summaryrefslogtreecommitdiff
path: root/thirdparty
diff options
context:
space:
mode:
Diffstat (limited to 'thirdparty')
-rw-r--r--thirdparty/README.md97
-rw-r--r--thirdparty/astcenc/LICENSE.txt175
-rw-r--r--thirdparty/astcenc/astcenc.h815
-rw-r--r--thirdparty/astcenc/astcenc_averages_and_directions.cpp995
-rw-r--r--thirdparty/astcenc/astcenc_block_sizes.cpp1184
-rw-r--r--thirdparty/astcenc/astcenc_color_quantize.cpp2071
-rw-r--r--thirdparty/astcenc/astcenc_color_unquantize.cpp941
-rw-r--r--thirdparty/astcenc/astcenc_compress_symbolic.cpp1455
-rw-r--r--thirdparty/astcenc/astcenc_compute_variance.cpp472
-rw-r--r--thirdparty/astcenc/astcenc_decompress_symbolic.cpp623
-rw-r--r--thirdparty/astcenc/astcenc_diagnostic_trace.cpp230
-rw-r--r--thirdparty/astcenc/astcenc_diagnostic_trace.h219
-rw-r--r--thirdparty/astcenc/astcenc_entry.cpp1427
-rw-r--r--thirdparty/astcenc/astcenc_find_best_partitioning.cpp780
-rw-r--r--thirdparty/astcenc/astcenc_ideal_endpoints_and_weights.cpp1663
-rw-r--r--thirdparty/astcenc/astcenc_image.cpp558
-rw-r--r--thirdparty/astcenc/astcenc_integer_sequence.cpp739
-rw-r--r--thirdparty/astcenc/astcenc_internal.h2196
-rw-r--r--thirdparty/astcenc/astcenc_internal_entry.h273
-rw-r--r--thirdparty/astcenc/astcenc_mathlib.cpp48
-rw-r--r--thirdparty/astcenc/astcenc_mathlib.h476
-rw-r--r--thirdparty/astcenc/astcenc_mathlib_softfloat.cpp411
-rw-r--r--thirdparty/astcenc/astcenc_partition_tables.cpp481
-rw-r--r--thirdparty/astcenc/astcenc_percentile_tables.cpp1251
-rw-r--r--thirdparty/astcenc/astcenc_pick_best_endpoint_format.cpp1350
-rw-r--r--thirdparty/astcenc/astcenc_platform_isa_detection.cpp166
-rw-r--r--thirdparty/astcenc/astcenc_quantization.cpp904
-rw-r--r--thirdparty/astcenc/astcenc_symbolic_physical.cpp534
-rw-r--r--thirdparty/astcenc/astcenc_vecmathlib.h570
-rw-r--r--thirdparty/astcenc/astcenc_vecmathlib_avx2_8.h1204
-rw-r--r--thirdparty/astcenc/astcenc_vecmathlib_common_4.h423
-rw-r--r--thirdparty/astcenc/astcenc_vecmathlib_neon_4.h1072
-rw-r--r--thirdparty/astcenc/astcenc_vecmathlib_none_4.h1169
-rw-r--r--thirdparty/astcenc/astcenc_vecmathlib_sse_4.h1283
-rw-r--r--thirdparty/astcenc/astcenc_weight_align.cpp479
-rw-r--r--thirdparty/astcenc/astcenc_weight_quant_xfer_tables.cpp147
-rw-r--r--thirdparty/astcenc/patches/fix-build-no-ssse3.patch81
-rw-r--r--thirdparty/basis_universal/encoder/basisu_comp.cpp178
-rw-r--r--thirdparty/basis_universal/encoder/basisu_comp.h21
-rw-r--r--thirdparty/basis_universal/encoder/basisu_enc.cpp4
-rw-r--r--thirdparty/basis_universal/encoder/basisu_frontend.cpp4
-rw-r--r--thirdparty/basis_universal/transcoder/basisu_transcoder.cpp2
-rw-r--r--thirdparty/certs/ca-certificates.crt50
-rw-r--r--thirdparty/cvtt/ConvectionKernels_BC67.cpp14
-rw-r--r--thirdparty/embree/common/algorithms/parallel_for.h1
-rw-r--r--thirdparty/embree/common/algorithms/parallel_for_for.h23
-rw-r--r--thirdparty/embree/common/algorithms/parallel_for_for_prefix_sum.h50
-rw-r--r--thirdparty/embree/common/algorithms/parallel_reduce.h2
-rw-r--r--thirdparty/embree/common/math/bbox.h10
-rw-r--r--thirdparty/embree/common/math/color.h19
-rw-r--r--thirdparty/embree/common/math/constants.cpp19
-rw-r--r--thirdparty/embree/common/math/constants.h54
-rw-r--r--thirdparty/embree/common/math/math.h104
-rw-r--r--thirdparty/embree/common/math/quaternion.h12
-rw-r--r--thirdparty/embree/common/math/transcendental.h6
-rw-r--r--thirdparty/embree/common/math/vec2.h8
-rw-r--r--thirdparty/embree/common/math/vec2fa.h29
-rw-r--r--thirdparty/embree/common/math/vec3.h11
-rw-r--r--thirdparty/embree/common/math/vec3fa.h92
-rw-r--r--thirdparty/embree/common/math/vec3ia.h46
-rw-r--r--thirdparty/embree/common/math/vec4.h6
-rw-r--r--thirdparty/embree/common/simd/arm/avx2neon.h1196
-rw-r--r--thirdparty/embree/common/simd/arm/emulation.h71
-rw-r--r--thirdparty/embree/common/simd/arm/sse2neon.h10961
-rw-r--r--thirdparty/embree/common/simd/simd.h2
-rw-r--r--thirdparty/embree/common/simd/sse.h2
-rw-r--r--thirdparty/embree/common/simd/vboold4_avx.h7
-rw-r--r--thirdparty/embree/common/simd/vboolf16_avx512.h2
-rw-r--r--thirdparty/embree/common/simd/vboolf4_sse2.h24
-rw-r--r--thirdparty/embree/common/simd/vboolf8_avx.h2
-rw-r--r--thirdparty/embree/common/simd/vdouble4_avx.h9
-rw-r--r--thirdparty/embree/common/simd/vfloat16_avx512.h5
-rw-r--r--thirdparty/embree/common/simd/vfloat4_sse2.h135
-rw-r--r--thirdparty/embree/common/simd/vfloat8_avx.h87
-rw-r--r--thirdparty/embree/common/simd/vint4_sse2.h100
-rw-r--r--thirdparty/embree/common/simd/vint8_avx.h4
-rw-r--r--thirdparty/embree/common/simd/vint8_avx2.h4
-rw-r--r--thirdparty/embree/common/simd/vuint4_sse2.h38
-rw-r--r--thirdparty/embree/common/simd/vuint8_avx.h4
-rw-r--r--thirdparty/embree/common/simd/vuint8_avx2.h2
-rw-r--r--thirdparty/embree/common/simd/wasm/emulation.h13
-rw-r--r--thirdparty/embree/common/sys/array.h12
-rw-r--r--thirdparty/embree/common/sys/barrier.h4
-rw-r--r--thirdparty/embree/common/sys/intrinsics.h124
-rw-r--r--thirdparty/embree/common/sys/mutex.cpp1
-rw-r--r--thirdparty/embree/common/sys/mutex.h6
-rw-r--r--thirdparty/embree/common/sys/platform.h20
-rw-r--r--thirdparty/embree/common/sys/sysinfo.cpp56
-rw-r--r--thirdparty/embree/common/sys/sysinfo.h13
-rw-r--r--thirdparty/embree/common/sys/thread.cpp35
-rw-r--r--thirdparty/embree/common/sys/vector.h14
-rw-r--r--thirdparty/embree/common/tasking/taskschedulerinternal.h2
-rw-r--r--thirdparty/embree/common/tasking/taskschedulertbb.h8
-rw-r--r--thirdparty/embree/include/embree3/rtcore_common.h4
-rw-r--r--thirdparty/embree/include/embree3/rtcore_config.h17
-rw-r--r--thirdparty/embree/include/embree3/rtcore_quaternion.h2
-rw-r--r--thirdparty/embree/include/embree3/rtcore_scene.h5
-rw-r--r--thirdparty/embree/kernels/builders/bvh_builder_morton.h2
-rw-r--r--thirdparty/embree/kernels/builders/bvh_builder_msmblur.h4
-rw-r--r--thirdparty/embree/kernels/builders/bvh_builder_sah.h2
-rw-r--r--thirdparty/embree/kernels/builders/heuristic_binning.h6
-rw-r--r--thirdparty/embree/kernels/builders/heuristic_openmerge_array.h2
-rw-r--r--thirdparty/embree/kernels/builders/heuristic_spatial.h111
-rw-r--r--thirdparty/embree/kernels/builders/heuristic_spatial_array.h15
-rw-r--r--thirdparty/embree/kernels/builders/primrefgen.cpp4
-rw-r--r--thirdparty/embree/kernels/builders/primrefgen_presplit.h13
-rw-r--r--thirdparty/embree/kernels/builders/splitter.h28
-rw-r--r--thirdparty/embree/kernels/bvh/bvh.cpp2
-rw-r--r--thirdparty/embree/kernels/bvh/bvh_intersector_hybrid.cpp4
-rw-r--r--thirdparty/embree/kernels/bvh/bvh_intersector_stream.h11
-rw-r--r--thirdparty/embree/kernels/bvh/bvh_node_aabb.h16
-rw-r--r--thirdparty/embree/kernels/bvh/bvh_node_aabb_mb.h8
-rw-r--r--thirdparty/embree/kernels/bvh/bvh_node_aabb_mb4d.h8
-rw-r--r--thirdparty/embree/kernels/bvh/bvh_node_qaabb.h8
-rw-r--r--thirdparty/embree/kernels/bvh/bvh_statistics.cpp2
-rw-r--r--thirdparty/embree/kernels/bvh/node_intersector1.h117
-rw-r--r--thirdparty/embree/kernels/bvh/node_intersector_frustum.h22
-rw-r--r--thirdparty/embree/kernels/bvh/node_intersector_packet.h53
-rw-r--r--thirdparty/embree/kernels/bvh/node_intersector_packet_stream.h26
-rw-r--r--thirdparty/embree/kernels/common/accel.h4
-rw-r--r--thirdparty/embree/kernels/common/acceln.cpp6
-rw-r--r--thirdparty/embree/kernels/common/accelset.h23
-rw-r--r--thirdparty/embree/kernels/common/alloc.cpp3
-rw-r--r--thirdparty/embree/kernels/common/alloc.h72
-rw-r--r--thirdparty/embree/kernels/common/device.cpp4
-rw-r--r--thirdparty/embree/kernels/common/geometry.h4
-rw-r--r--thirdparty/embree/kernels/common/isa.h2
-rw-r--r--thirdparty/embree/kernels/common/ray.h2
-rw-r--r--thirdparty/embree/kernels/common/rtcore.cpp26
-rw-r--r--thirdparty/embree/kernels/common/rtcore.h141
-rw-r--r--thirdparty/embree/kernels/common/rtcore_builder.cpp2
-rw-r--r--thirdparty/embree/kernels/common/scene.cpp4
-rw-r--r--thirdparty/embree/kernels/common/scene_curves.h8
-rw-r--r--thirdparty/embree/kernels/common/state.cpp15
-rw-r--r--thirdparty/embree/kernels/config.h4
-rw-r--r--thirdparty/embree/kernels/geometry/curve_intersector_oriented.h2
-rw-r--r--thirdparty/embree/kernels/geometry/curve_intersector_sweep.h2
-rw-r--r--thirdparty/embree/kernels/geometry/disc_intersector.h28
-rw-r--r--thirdparty/embree/kernels/geometry/filter.h34
-rw-r--r--thirdparty/embree/kernels/geometry/object_intersector.h8
-rw-r--r--thirdparty/embree/kernels/geometry/quadv.h2
-rw-r--r--thirdparty/embree/kernels/geometry/roundline_intersector.h4
-rw-r--r--thirdparty/embree/kernels/geometry/subgrid_intersector.h10
-rw-r--r--thirdparty/embree/kernels/hash.h5
-rw-r--r--thirdparty/embree/kernels/subdiv/bezier_patch.h2
-rw-r--r--thirdparty/embree/kernels/subdiv/catmullclark_ring.h8
-rw-r--r--thirdparty/embree/kernels/subdiv/catmullrom_curve.h6
-rw-r--r--thirdparty/embree/kernels/subdiv/linear_bezier_patch.h10
-rw-r--r--thirdparty/embree/patches/emscripten-nthreads.patch42
-rw-r--r--thirdparty/embree/patches/godot-changes-android.patch103
-rw-r--r--thirdparty/embree/patches/godot-changes-misc.patch105
-rw-r--r--thirdparty/embree/patches/godot-changes-noexcept.patch193
-rw-r--r--thirdparty/embree/patches/godot-changes-ubsan.patch24
-rw-r--r--thirdparty/enet/enet/godot.h59
-rw-r--r--thirdparty/enet/godot.cpp59
-rw-r--r--thirdparty/freetype/src/gzip/adler32.c192
-rw-r--r--thirdparty/freetype/src/gzip/crc32.c1116
-rw-r--r--thirdparty/freetype/src/gzip/crc32.h9446
-rw-r--r--thirdparty/freetype/src/gzip/gzguts.h219
-rw-r--r--thirdparty/freetype/src/gzip/infback.c641
-rw-r--r--thirdparty/freetype/src/gzip/inffast.c323
-rw-r--r--thirdparty/freetype/src/gzip/inffast.h11
-rw-r--r--thirdparty/freetype/src/gzip/inffixed.h94
-rw-r--r--thirdparty/freetype/src/gzip/inflate.c1610
-rw-r--r--thirdparty/freetype/src/gzip/inflate.h131
-rw-r--r--thirdparty/freetype/src/gzip/inftrees.c304
-rw-r--r--thirdparty/freetype/src/gzip/inftrees.h67
-rw-r--r--thirdparty/freetype/src/gzip/zlib.h1968
-rw-r--r--thirdparty/freetype/src/gzip/zutil.c325
-rw-r--r--thirdparty/freetype/src/gzip/zutil.h278
-rw-r--r--thirdparty/glad/KHR/khrplatform.h27
-rw-r--r--thirdparty/glad/LICENSE83
-rw-r--r--thirdparty/glad/gl.c1995
-rw-r--r--thirdparty/glad/glad.c1939
-rw-r--r--thirdparty/glad/glad/gl.h3884
-rw-r--r--thirdparty/glad/glad/glad.h3784
-rw-r--r--thirdparty/glad/glad/glx.h605
-rw-r--r--thirdparty/glad/glx.c395
-rw-r--r--thirdparty/glslang/SPIRV/GLSL.ext.EXT.h1
-rw-r--r--thirdparty/glslang/SPIRV/GLSL.ext.KHR.h4
-rw-r--r--thirdparty/glslang/SPIRV/GlslangToSpv.cpp350
-rw-r--r--thirdparty/glslang/SPIRV/NonSemanticShaderDebugInfo100.h171
-rw-r--r--thirdparty/glslang/SPIRV/SPVRemapper.cpp35
-rw-r--r--thirdparty/glslang/SPIRV/SPVRemapper.h8
-rw-r--r--thirdparty/glslang/SPIRV/SpvBuilder.cpp746
-rw-r--r--thirdparty/glslang/SPIRV/SpvBuilder.h110
-rw-r--r--thirdparty/glslang/SPIRV/SpvTools.cpp2
-rw-r--r--thirdparty/glslang/SPIRV/SpvTools.h16
-rw-r--r--thirdparty/glslang/SPIRV/doc.cpp77
-rw-r--r--thirdparty/glslang/SPIRV/spirv.hpp42
-rw-r--r--thirdparty/glslang/SPIRV/spvIR.h18
-rw-r--r--thirdparty/glslang/glslang/Include/BaseTypes.h24
-rw-r--r--thirdparty/glslang/glslang/Include/Common.h4
-rw-r--r--thirdparty/glslang/glslang/Include/ResourceLimits.h9
-rw-r--r--thirdparty/glslang/glslang/Include/Types.h560
-rw-r--r--thirdparty/glslang/glslang/Include/glslang_c_interface.h26
-rw-r--r--thirdparty/glslang/glslang/Include/glslang_c_shader_types.h53
-rw-r--r--thirdparty/glslang/glslang/Include/intermediate.h7
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/Initialize.cpp274
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/Intermediate.cpp12
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/ParseContextBase.cpp3
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/ParseHelper.cpp383
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/Scan.cpp19
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/ShaderLang.cpp35
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/SymbolTable.cpp2
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/SymbolTable.h3
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/Versions.cpp37
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/Versions.h14
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/glslang.y45
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp7866
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp.h15
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/intermOut.cpp11
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/iomapper.cpp28
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/iomapper.h9
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/linkValidate.cpp353
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/localintermediate.h54
-rw-r--r--thirdparty/glslang/glslang/OSDependent/Unix/ossource.cpp9
-rw-r--r--thirdparty/glslang/glslang/Public/ShaderLang.h23
-rw-r--r--thirdparty/glslang/glslang/build_info.h26
-rw-r--r--thirdparty/glslang/patches/unused_cleanup.diff61
-rw-r--r--thirdparty/graphite/include/graphite2/Font.h28
-rw-r--r--thirdparty/graphite/include/graphite2/Log.h28
-rw-r--r--thirdparty/graphite/include/graphite2/Segment.h28
-rw-r--r--thirdparty/graphite/include/graphite2/Types.h28
-rw-r--r--thirdparty/graphite/src/CmapCache.cpp29
-rw-r--r--thirdparty/graphite/src/Code.cpp29
-rw-r--r--thirdparty/graphite/src/Collider.cpp29
-rw-r--r--thirdparty/graphite/src/Decompressor.cpp29
-rw-r--r--thirdparty/graphite/src/Face.cpp29
-rw-r--r--thirdparty/graphite/src/FeatureMap.cpp29
-rw-r--r--thirdparty/graphite/src/FileFace.cpp29
-rw-r--r--thirdparty/graphite/src/Font.cpp27
-rw-r--r--thirdparty/graphite/src/GlyphCache.cpp37
-rw-r--r--thirdparty/graphite/src/GlyphFace.cpp27
-rw-r--r--thirdparty/graphite/src/Intervals.cpp29
-rw-r--r--thirdparty/graphite/src/Justifier.cpp27
-rw-r--r--thirdparty/graphite/src/NameTable.cpp27
-rw-r--r--thirdparty/graphite/src/Pass.cpp35
-rw-r--r--thirdparty/graphite/src/Position.cpp27
-rw-r--r--thirdparty/graphite/src/Segment.cpp29
-rw-r--r--thirdparty/graphite/src/Silf.cpp29
-rw-r--r--thirdparty/graphite/src/Slot.cpp29
-rw-r--r--thirdparty/graphite/src/Sparse.cpp29
-rw-r--r--thirdparty/graphite/src/TtfUtil.cpp33
-rw-r--r--thirdparty/graphite/src/UtfCodec.cpp27
-rw-r--r--thirdparty/graphite/src/call_machine.cpp29
-rw-r--r--thirdparty/graphite/src/direct_machine.cpp29
-rw-r--r--thirdparty/graphite/src/gr_char_info.cpp27
-rw-r--r--thirdparty/graphite/src/gr_face.cpp29
-rw-r--r--thirdparty/graphite/src/gr_features.cpp29
-rw-r--r--thirdparty/graphite/src/gr_font.cpp27
-rw-r--r--thirdparty/graphite/src/gr_logging.cpp27
-rw-r--r--thirdparty/graphite/src/gr_segment.cpp29
-rw-r--r--thirdparty/graphite/src/gr_slot.cpp29
-rw-r--r--thirdparty/graphite/src/inc/CharInfo.h27
-rw-r--r--thirdparty/graphite/src/inc/CmapCache.h27
-rw-r--r--thirdparty/graphite/src/inc/Code.h29
-rw-r--r--thirdparty/graphite/src/inc/Collider.h29
-rw-r--r--thirdparty/graphite/src/inc/Compression.h27
-rw-r--r--thirdparty/graphite/src/inc/Decompressor.h27
-rw-r--r--thirdparty/graphite/src/inc/Endian.h29
-rw-r--r--thirdparty/graphite/src/inc/Error.h33
-rw-r--r--thirdparty/graphite/src/inc/Face.h29
-rw-r--r--thirdparty/graphite/src/inc/FeatureMap.h29
-rw-r--r--thirdparty/graphite/src/inc/FeatureVal.h27
-rw-r--r--thirdparty/graphite/src/inc/FileFace.h29
-rw-r--r--thirdparty/graphite/src/inc/Font.h27
-rw-r--r--thirdparty/graphite/src/inc/GlyphCache.h29
-rw-r--r--thirdparty/graphite/src/inc/GlyphFace.h27
-rw-r--r--thirdparty/graphite/src/inc/Intervals.h29
-rw-r--r--thirdparty/graphite/src/inc/List.h29
-rw-r--r--thirdparty/graphite/src/inc/Machine.h29
-rw-r--r--thirdparty/graphite/src/inc/Main.h29
-rw-r--r--thirdparty/graphite/src/inc/NameTable.h27
-rw-r--r--thirdparty/graphite/src/inc/Pass.h27
-rw-r--r--thirdparty/graphite/src/inc/Position.h27
-rw-r--r--thirdparty/graphite/src/inc/Rule.h29
-rw-r--r--thirdparty/graphite/src/inc/Segment.h29
-rw-r--r--thirdparty/graphite/src/inc/Silf.h29
-rw-r--r--thirdparty/graphite/src/inc/Slot.h29
-rw-r--r--thirdparty/graphite/src/inc/Sparse.h29
-rw-r--r--thirdparty/graphite/src/inc/TtfTypes.h35
-rw-r--r--thirdparty/graphite/src/inc/TtfUtil.h36
-rw-r--r--thirdparty/graphite/src/inc/UtfCodec.h29
-rw-r--r--thirdparty/graphite/src/inc/bits.h29
-rw-r--r--thirdparty/graphite/src/inc/debug.h29
-rw-r--r--thirdparty/graphite/src/inc/json.h29
-rw-r--r--thirdparty/graphite/src/inc/locale2lcid.h27
-rw-r--r--thirdparty/graphite/src/inc/opcode_table.h27
-rw-r--r--thirdparty/graphite/src/inc/opcodes.h29
-rw-r--r--thirdparty/graphite/src/json.cpp29
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/Common/Coverage.hh42
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/Common/CoverageFormat1.hh9
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/Common/CoverageFormat2.hh35
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/GPOS/GPOS.hh2
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkBasePos.hh4
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkLigPos.hh4
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkMarkPos.hh4
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/GPOS/PairPos.hh4
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/GPOS/PairPosFormat1.hh26
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/GPOS/PairPosFormat2.hh27
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/GPOS/PairSet.hh20
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/GPOS/SinglePosFormat1.hh18
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/GPOS/SinglePosFormat2.hh24
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/GSUB/AlternateSet.hh2
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/GSUB/AlternateSubst.hh4
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/GSUB/GSUB.hh2
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/GSUB/Ligature.hh2
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/GSUB/LigatureSubst.hh4
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/GSUB/MultipleSubst.hh4
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/GSUB/Sequence.hh2
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/GSUB/SingleSubst.hh8
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/GSUB/SingleSubstFormat1.hh2
-rw-r--r--thirdparty/harfbuzz/src/OT/Layout/GSUB/SingleSubstFormat2.hh20
-rw-r--r--thirdparty/harfbuzz/src/OT/glyf/CompositeGlyph.hh67
-rw-r--r--thirdparty/harfbuzz/src/OT/glyf/Glyph.hh181
-rw-r--r--thirdparty/harfbuzz/src/OT/glyf/SimpleGlyph.hh65
-rw-r--r--thirdparty/harfbuzz/src/OT/glyf/SubsetGlyph.hh11
-rw-r--r--thirdparty/harfbuzz/src/OT/glyf/VarCompositeGlyph.hh353
-rw-r--r--thirdparty/harfbuzz/src/OT/glyf/composite-iter.hh68
-rw-r--r--thirdparty/harfbuzz/src/OT/glyf/coord-setter.hh34
-rw-r--r--thirdparty/harfbuzz/src/OT/glyf/glyf.hh111
-rw-r--r--thirdparty/harfbuzz/src/graph/classdef-graph.hh2
-rw-r--r--thirdparty/harfbuzz/src/graph/coverage-graph.hh2
-rw-r--r--thirdparty/harfbuzz/src/graph/graph.hh178
-rw-r--r--thirdparty/harfbuzz/src/graph/gsubgpos-graph.hh15
-rw-r--r--thirdparty/harfbuzz/src/graph/markbasepos-graph.hh33
-rw-r--r--thirdparty/harfbuzz/src/graph/pairpos-graph.hh77
-rw-r--r--thirdparty/harfbuzz/src/graph/serialize.hh23
-rw-r--r--thirdparty/harfbuzz/src/hb-aat-layout-just-table.hh20
-rw-r--r--thirdparty/harfbuzz/src/hb-aat-layout-morx-table.hh8
-rw-r--r--thirdparty/harfbuzz/src/hb-aat-layout-trak-table.hh8
-rw-r--r--thirdparty/harfbuzz/src/hb-aat-layout.cc3
-rw-r--r--thirdparty/harfbuzz/src/hb-aat-map.hh2
-rw-r--r--thirdparty/harfbuzz/src/hb-algs.hh137
-rw-r--r--thirdparty/harfbuzz/src/hb-array.hh94
-rw-r--r--thirdparty/harfbuzz/src/hb-bit-page.hh90
-rw-r--r--thirdparty/harfbuzz/src/hb-bit-set-invertible.hh6
-rw-r--r--thirdparty/harfbuzz/src/hb-bit-set.hh72
-rw-r--r--thirdparty/harfbuzz/src/hb-blob.cc2
-rw-r--r--thirdparty/harfbuzz/src/hb-buffer-deserialize-json.hh12
-rw-r--r--thirdparty/harfbuzz/src/hb-buffer-deserialize-text.hh490
-rw-r--r--thirdparty/harfbuzz/src/hb-buffer-serialize.cc8
-rw-r--r--thirdparty/harfbuzz/src/hb-buffer-verify.cc5
-rw-r--r--thirdparty/harfbuzz/src/hb-buffer.cc99
-rw-r--r--thirdparty/harfbuzz/src/hb-buffer.hh11
-rw-r--r--thirdparty/harfbuzz/src/hb-cache.hh2
-rw-r--r--thirdparty/harfbuzz/src/hb-cff-interp-common.hh95
-rw-r--r--thirdparty/harfbuzz/src/hb-cff2-interp-cs.hh16
-rw-r--r--thirdparty/harfbuzz/src/hb-common.cc14
-rw-r--r--thirdparty/harfbuzz/src/hb-config.hh15
-rw-r--r--thirdparty/harfbuzz/src/hb-coretext.cc13
-rw-r--r--thirdparty/harfbuzz/src/hb-cplusplus.hh6
-rw-r--r--thirdparty/harfbuzz/src/hb-draw.cc91
-rw-r--r--thirdparty/harfbuzz/src/hb-face.cc85
-rw-r--r--thirdparty/harfbuzz/src/hb-face.h4
-rw-r--r--thirdparty/harfbuzz/src/hb-face.hh2
-rw-r--r--thirdparty/harfbuzz/src/hb-fallback-shape.cc10
-rw-r--r--thirdparty/harfbuzz/src/hb-font.cc154
-rw-r--r--thirdparty/harfbuzz/src/hb-font.hh6
-rw-r--r--thirdparty/harfbuzz/src/hb-ft.cc32
-rw-r--r--thirdparty/harfbuzz/src/hb-glib.cc77
-rw-r--r--thirdparty/harfbuzz/src/hb-graphite2.cc2
-rw-r--r--thirdparty/harfbuzz/src/hb-iter.hh16
-rw-r--r--thirdparty/harfbuzz/src/hb-machinery.hh14
-rw-r--r--thirdparty/harfbuzz/src/hb-map.hh219
-rw-r--r--thirdparty/harfbuzz/src/hb-meta.hh15
-rw-r--r--thirdparty/harfbuzz/src/hb-ms-feature-ranges.hh2
-rw-r--r--thirdparty/harfbuzz/src/hb-multimap.hh92
-rw-r--r--thirdparty/harfbuzz/src/hb-mutex.hh7
-rw-r--r--thirdparty/harfbuzz/src/hb-number-parser.hh8
-rw-r--r--thirdparty/harfbuzz/src/hb-object.hh4
-rw-r--r--thirdparty/harfbuzz/src/hb-open-file.hh4
-rw-r--r--thirdparty/harfbuzz/src/hb-open-type.hh94
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-cff-common.hh156
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-cff1-table.hh10
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-cff2-table.hh11
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-cmap-table.hh224
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-color-cbdt-table.hh8
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-color-colr-table.hh127
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-color.cc4
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-face-table-list.hh28
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-font.cc10
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-layout-common.hh807
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-layout-gdef-table.hh4
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-layout-gsubgpos.hh280
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-layout.cc185
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-layout.h11
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-layout.hh7
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-map.cc79
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-map.hh16
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-math-table.hh8
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-meta-table.hh2
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-name-table.hh211
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-name.cc51
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-os2-table.hh23
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-post-table-v2subset.hh4
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-post-table.hh4
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-shape-normalize.cc2
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-shape.cc12
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-shape.hh2
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-shaper-arabic.cc19
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-shaper-default.cc2
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-shaper-indic-machine.hh610
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-shaper-indic-table.cc21
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-shaper-indic.cc73
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-shaper-khmer-machine.hh16
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-shaper-khmer.cc21
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-shaper-myanmar-machine.hh16
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-shaper-myanmar.cc48
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-shaper-syllabic.cc12
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-shaper-syllabic.hh4
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-shaper-thai.cc6
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-shaper-use-machine.hh16
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-shaper-use-table.hh390
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-shaper-use.cc33
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-shaper-vowel-constraints.cc34
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-shaper.hh2
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-stat-table.hh16
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-tag.cc2
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-var-avar-table.hh6
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-var-common.hh21
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-var-fvar-table.hh106
-rw-r--r--thirdparty/harfbuzz/src/hb-ot-var-gvar-table.hh229
-rw-r--r--thirdparty/harfbuzz/src/hb-pool.hh12
-rw-r--r--thirdparty/harfbuzz/src/hb-priority-queue.hh14
-rw-r--r--thirdparty/harfbuzz/src/hb-repacker.hh29
-rw-r--r--thirdparty/harfbuzz/src/hb-sanitize.hh15
-rw-r--r--thirdparty/harfbuzz/src/hb-serialize.hh64
-rw-r--r--thirdparty/harfbuzz/src/hb-set-digest.hh35
-rw-r--r--thirdparty/harfbuzz/src/hb-set.hh6
-rw-r--r--thirdparty/harfbuzz/src/hb-shape-plan.cc7
-rw-r--r--thirdparty/harfbuzz/src/hb-shape.cc5
-rw-r--r--thirdparty/harfbuzz/src/hb-shaper.cc2
-rw-r--r--thirdparty/harfbuzz/src/hb-static.cc2
-rw-r--r--thirdparty/harfbuzz/src/hb-subset-accelerator.hh121
-rw-r--r--thirdparty/harfbuzz/src/hb-subset-cff-common.cc11
-rw-r--r--thirdparty/harfbuzz/src/hb-subset-cff-common.hh491
-rw-r--r--thirdparty/harfbuzz/src/hb-subset-cff1.cc38
-rw-r--r--thirdparty/harfbuzz/src/hb-subset-cff2.cc23
-rw-r--r--thirdparty/harfbuzz/src/hb-subset-input.cc184
-rw-r--r--thirdparty/harfbuzz/src/hb-subset-input.hh56
-rw-r--r--thirdparty/harfbuzz/src/hb-subset-plan.cc305
-rw-r--r--thirdparty/harfbuzz/src/hb-subset-plan.hh66
-rw-r--r--thirdparty/harfbuzz/src/hb-subset.cc117
-rw-r--r--thirdparty/harfbuzz/src/hb-subset.h28
-rw-r--r--thirdparty/harfbuzz/src/hb-ucd-table.hh2564
-rw-r--r--thirdparty/harfbuzz/src/hb-ucd.cc16
-rw-r--r--thirdparty/harfbuzz/src/hb-unicode.cc26
-rw-r--r--thirdparty/harfbuzz/src/hb-uniscribe.cc6
-rw-r--r--thirdparty/harfbuzz/src/hb-vector.hh100
-rw-r--r--thirdparty/harfbuzz/src/hb-version.h6
-rw-r--r--thirdparty/harfbuzz/src/hb.hh1
-rw-r--r--thirdparty/icu4c/common/appendable.cpp8
-rw-r--r--thirdparty/icu4c/common/bmpset.cpp4
-rw-r--r--thirdparty/icu4c/common/brkeng.cpp4
-rw-r--r--thirdparty/icu4c/common/brkiter.cpp6
-rw-r--r--thirdparty/icu4c/common/bytesinkutil.cpp16
-rw-r--r--thirdparty/icu4c/common/bytesinkutil.h5
-rw-r--r--thirdparty/icu4c/common/bytestream.cpp8
-rw-r--r--thirdparty/icu4c/common/bytestrie.cpp14
-rw-r--r--thirdparty/icu4c/common/bytestriebuilder.cpp10
-rw-r--r--thirdparty/icu4c/common/bytestrieiterator.cpp12
-rw-r--r--thirdparty/icu4c/common/caniter.cpp10
-rw-r--r--thirdparty/icu4c/common/characterproperties.cpp10
-rw-r--r--thirdparty/icu4c/common/charstr.cpp6
-rw-r--r--thirdparty/icu4c/common/charstr.h4
-rw-r--r--thirdparty/icu4c/common/cmemory.cpp2
-rw-r--r--thirdparty/icu4c/common/cstr.h4
-rw-r--r--thirdparty/icu4c/common/dictbe.cpp8
-rw-r--r--thirdparty/icu4c/common/edits.cpp64
-rw-r--r--thirdparty/icu4c/common/emojiprops.cpp2
-rw-r--r--thirdparty/icu4c/common/filteredbrk.cpp38
-rw-r--r--thirdparty/icu4c/common/filterednormalizer2.cpp16
-rw-r--r--thirdparty/icu4c/common/hash.h4
-rw-r--r--thirdparty/icu4c/common/icuplug.cpp28
-rw-r--r--thirdparty/icu4c/common/loadednormalizer2impl.cpp12
-rw-r--r--thirdparty/icu4c/common/localebuilder.cpp4
-rw-r--r--thirdparty/icu4c/common/localefallback_data.h632
-rw-r--r--thirdparty/icu4c/common/localematcher.cpp22
-rw-r--r--thirdparty/icu4c/common/localeprioritylist.cpp2
-rw-r--r--thirdparty/icu4c/common/locavailable.cpp8
-rw-r--r--thirdparty/icu4c/common/locdispnames.cpp16
-rw-r--r--thirdparty/icu4c/common/locdistance.cpp4
-rw-r--r--thirdparty/icu4c/common/locdspnm.cpp40
-rw-r--r--thirdparty/icu4c/common/locid.cpp53
-rw-r--r--thirdparty/icu4c/common/loclikely.cpp50
-rw-r--r--thirdparty/icu4c/common/loclikelysubtags.cpp6
-rw-r--r--thirdparty/icu4c/common/locmap.cpp8
-rw-r--r--thirdparty/icu4c/common/locutil.cpp4
-rw-r--r--thirdparty/icu4c/common/messageimpl.h2
-rw-r--r--thirdparty/icu4c/common/messagepattern.cpp74
-rw-r--r--thirdparty/icu4c/common/norm2_nfc_data.h130
-rw-r--r--thirdparty/icu4c/common/normalizer2.cpp20
-rw-r--r--thirdparty/icu4c/common/normalizer2impl.cpp134
-rw-r--r--thirdparty/icu4c/common/normalizer2impl.h6
-rw-r--r--thirdparty/icu4c/common/normlzr.cpp6
-rw-r--r--thirdparty/icu4c/common/patternprops.cpp22
-rw-r--r--thirdparty/icu4c/common/patternprops.h2
-rw-r--r--thirdparty/icu4c/common/propname.cpp4
-rw-r--r--thirdparty/icu4c/common/propname_data.h1392
-rw-r--r--thirdparty/icu4c/common/propsvec.cpp6
-rw-r--r--thirdparty/icu4c/common/punycode.cpp2
-rw-r--r--thirdparty/icu4c/common/putil.cpp34
-rw-r--r--thirdparty/icu4c/common/rbbi.cpp32
-rw-r--r--thirdparty/icu4c/common/rbbi_cache.cpp150
-rw-r--r--thirdparty/icu4c/common/rbbidata.cpp6
-rw-r--r--thirdparty/icu4c/common/rbbinode.cpp14
-rw-r--r--thirdparty/icu4c/common/rbbirb.cpp6
-rw-r--r--thirdparty/icu4c/common/rbbirb.h8
-rw-r--r--thirdparty/icu4c/common/rbbirpt.h208
-rw-r--r--thirdparty/icu4c/common/rbbiscan.cpp78
-rw-r--r--thirdparty/icu4c/common/rbbiscan.h4
-rw-r--r--thirdparty/icu4c/common/rbbisetb.cpp4
-rw-r--r--thirdparty/icu4c/common/rbbisetb.h4
-rw-r--r--thirdparty/icu4c/common/rbbistbl.cpp4
-rw-r--r--thirdparty/icu4c/common/rbbitblb.cpp24
-rw-r--r--thirdparty/icu4c/common/rbbitblb.h4
-rw-r--r--thirdparty/icu4c/common/resbund.cpp10
-rw-r--r--thirdparty/icu4c/common/resource.h4
-rw-r--r--thirdparty/icu4c/common/ruleiter.cpp4
-rw-r--r--thirdparty/icu4c/common/ruleiter.h8
-rw-r--r--thirdparty/icu4c/common/serv.cpp18
-rw-r--r--thirdparty/icu4c/common/servlk.cpp8
-rw-r--r--thirdparty/icu4c/common/servlkf.cpp2
-rw-r--r--thirdparty/icu4c/common/servls.cpp4
-rw-r--r--thirdparty/icu4c/common/sharedobject.h4
-rw-r--r--thirdparty/icu4c/common/simpleformatter.cpp18
-rw-r--r--thirdparty/icu4c/common/static_unicode_sets.cpp12
-rw-r--r--thirdparty/icu4c/common/stringtriebuilder.cpp20
-rw-r--r--thirdparty/icu4c/common/uarrsort.cpp8
-rw-r--r--thirdparty/icu4c/common/ubidi.cpp64
-rw-r--r--thirdparty/icu4c/common/ubidi_props.cpp2
-rw-r--r--thirdparty/icu4c/common/ubidi_props_data.h253
-rw-r--r--thirdparty/icu4c/common/ubidiln.cpp16
-rw-r--r--thirdparty/icu4c/common/ubiditransform.cpp28
-rw-r--r--thirdparty/icu4c/common/ucase.cpp60
-rw-r--r--thirdparty/icu4c/common/ucase.h4
-rw-r--r--thirdparty/icu4c/common/ucase_props_data.h1373
-rw-r--r--thirdparty/icu4c/common/ucasemap.cpp18
-rw-r--r--thirdparty/icu4c/common/uchar.cpp12
-rw-r--r--thirdparty/icu4c/common/uchar_props_data.h7155
-rw-r--r--thirdparty/icu4c/common/ucharstrie.cpp14
-rw-r--r--thirdparty/icu4c/common/ucharstriebuilder.cpp10
-rw-r--r--thirdparty/icu4c/common/ucharstrieiterator.cpp20
-rw-r--r--thirdparty/icu4c/common/uchriter.cpp4
-rw-r--r--thirdparty/icu4c/common/ucln_cmn.cpp2
-rw-r--r--thirdparty/icu4c/common/ucnv.cpp110
-rw-r--r--thirdparty/icu4c/common/ucnv2022.cpp76
-rw-r--r--thirdparty/icu4c/common/ucnv_bld.cpp50
-rw-r--r--thirdparty/icu4c/common/ucnv_cb.cpp4
-rw-r--r--thirdparty/icu4c/common/ucnv_ct.cpp14
-rw-r--r--thirdparty/icu4c/common/ucnv_ext.cpp38
-rw-r--r--thirdparty/icu4c/common/ucnv_io.cpp42
-rw-r--r--thirdparty/icu4c/common/ucnv_lmb.cpp18
-rw-r--r--thirdparty/icu4c/common/ucnv_u16.cpp8
-rw-r--r--thirdparty/icu4c/common/ucnv_u32.cpp8
-rw-r--r--thirdparty/icu4c/common/ucnv_u7.cpp42
-rw-r--r--thirdparty/icu4c/common/ucnv_u8.cpp6
-rw-r--r--thirdparty/icu4c/common/ucnvbocu.cpp4
-rw-r--r--thirdparty/icu4c/common/ucnvhz.cpp34
-rw-r--r--thirdparty/icu4c/common/ucnvisci.cpp40
-rw-r--r--thirdparty/icu4c/common/ucnvlat1.cpp4
-rw-r--r--thirdparty/icu4c/common/ucnvmbcs.cpp48
-rw-r--r--thirdparty/icu4c/common/ucnvscsu.cpp52
-rw-r--r--thirdparty/icu4c/common/ucnvsel.cpp4
-rw-r--r--thirdparty/icu4c/common/ucol_swp.cpp12
-rw-r--r--thirdparty/icu4c/common/ucptrie_impl.h108
-rw-r--r--thirdparty/icu4c/common/ucurr.cpp91
-rw-r--r--thirdparty/icu4c/common/udata.cpp52
-rw-r--r--thirdparty/icu4c/common/udatamem.cpp2
-rw-r--r--thirdparty/icu4c/common/udatamem.h2
-rw-r--r--thirdparty/icu4c/common/uhash.cpp30
-rw-r--r--thirdparty/icu4c/common/uidna.cpp78
-rw-r--r--thirdparty/icu4c/common/uinit.cpp4
-rw-r--r--thirdparty/icu4c/common/uinvchar.cpp12
-rw-r--r--thirdparty/icu4c/common/uiter.cpp12
-rw-r--r--thirdparty/icu4c/common/ulist.cpp8
-rw-r--r--thirdparty/icu4c/common/uloc.cpp54
-rw-r--r--thirdparty/icu4c/common/uloc_keytype.cpp38
-rw-r--r--thirdparty/icu4c/common/uloc_tag.cpp234
-rw-r--r--thirdparty/icu4c/common/umapfile.cpp56
-rw-r--r--thirdparty/icu4c/common/umapfile.h2
-rw-r--r--thirdparty/icu4c/common/umutex.h10
-rw-r--r--thirdparty/icu4c/common/unames.cpp56
-rw-r--r--thirdparty/icu4c/common/unicode/bytestrie.h2
-rw-r--r--thirdparty/icu4c/common/unicode/bytestriebuilder.h4
-rw-r--r--thirdparty/icu4c/common/unicode/caniter.h6
-rw-r--r--thirdparty/icu4c/common/unicode/dtintrv.h2
-rw-r--r--thirdparty/icu4c/common/unicode/idna.h4
-rw-r--r--thirdparty/icu4c/common/unicode/normlzr.h4
-rw-r--r--thirdparty/icu4c/common/unicode/platform.h17
-rw-r--r--thirdparty/icu4c/common/unicode/rbbi.h7
-rw-r--r--thirdparty/icu4c/common/unicode/resbund.h2
-rw-r--r--thirdparty/icu4c/common/unicode/schriter.h8
-rw-r--r--thirdparty/icu4c/common/unicode/ubiditransform.h2
-rw-r--r--thirdparty/icu4c/common/unicode/uchar.h43
-rw-r--r--thirdparty/icu4c/common/unicode/ucharstrie.h2
-rw-r--r--thirdparty/icu4c/common/unicode/ucharstriebuilder.h4
-rw-r--r--thirdparty/icu4c/common/unicode/ucnv_cb.h4
-rw-r--r--thirdparty/icu4c/common/unicode/ucnv_err.h2
-rw-r--r--thirdparty/icu4c/common/unicode/ucnvsel.h1
-rw-r--r--thirdparty/icu4c/common/unicode/ucpmap.h3
-rw-r--r--thirdparty/icu4c/common/unicode/ucptrie.h3
-rw-r--r--thirdparty/icu4c/common/unicode/uloc.h4
-rw-r--r--thirdparty/icu4c/common/unicode/umachine.h8
-rw-r--r--thirdparty/icu4c/common/unicode/umisc.h4
-rw-r--r--thirdparty/icu4c/common/unicode/umutablecptrie.h3
-rw-r--r--thirdparty/icu4c/common/unicode/uniset.h11
-rw-r--r--thirdparty/icu4c/common/unicode/urename.h7
-rw-r--r--thirdparty/icu4c/common/unicode/uscript.h7
-rw-r--r--thirdparty/icu4c/common/unicode/uset.h8
-rw-r--r--thirdparty/icu4c/common/unicode/usetiter.h4
-rw-r--r--thirdparty/icu4c/common/unicode/utf_old.h6
-rw-r--r--thirdparty/icu4c/common/unicode/uvernum.h25
-rw-r--r--thirdparty/icu4c/common/unifiedcache.cpp24
-rw-r--r--thirdparty/icu4c/common/unifiedcache.h4
-rw-r--r--thirdparty/icu4c/common/uniset.cpp32
-rw-r--r--thirdparty/icu4c/common/uniset_closure.cpp4
-rw-r--r--thirdparty/icu4c/common/uniset_props.cpp70
-rw-r--r--thirdparty/icu4c/common/unisetspan.cpp28
-rw-r--r--thirdparty/icu4c/common/unistr.cpp72
-rw-r--r--thirdparty/icu4c/common/unistr_case.cpp12
-rw-r--r--thirdparty/icu4c/common/unistr_cnv.cpp16
-rw-r--r--thirdparty/icu4c/common/unorm.cpp6
-rw-r--r--thirdparty/icu4c/common/unormcmp.cpp8
-rw-r--r--thirdparty/icu4c/common/uprops.cpp34
-rw-r--r--thirdparty/icu4c/common/uresbund.cpp380
-rw-r--r--thirdparty/icu4c/common/uresdata.cpp28
-rw-r--r--thirdparty/icu4c/common/usc_impl.cpp4
-rw-r--r--thirdparty/icu4c/common/uscript.cpp4
-rw-r--r--thirdparty/icu4c/common/uscript_props.cpp2
-rw-r--r--thirdparty/icu4c/common/uset.cpp36
-rw-r--r--thirdparty/icu4c/common/usetiter.cpp16
-rw-r--r--thirdparty/icu4c/common/ushape.cpp12
-rw-r--r--thirdparty/icu4c/common/usprep.cpp50
-rw-r--r--thirdparty/icu4c/common/ustr_cnv.cpp4
-rw-r--r--thirdparty/icu4c/common/ustr_titlecase_brkiter.cpp2
-rw-r--r--thirdparty/icu4c/common/ustrcase.cpp14
-rw-r--r--thirdparty/icu4c/common/ustring.cpp44
-rw-r--r--thirdparty/icu4c/common/ustrtrns.cpp2
-rw-r--r--thirdparty/icu4c/common/utext.cpp126
-rw-r--r--thirdparty/icu4c/common/utf_impl.cpp8
-rw-r--r--thirdparty/icu4c/common/util.cpp10
-rw-r--r--thirdparty/icu4c/common/util.h2
-rw-r--r--thirdparty/icu4c/common/utrace.cpp2
-rw-r--r--thirdparty/icu4c/common/utrie.cpp48
-rw-r--r--thirdparty/icu4c/common/utrie2.cpp8
-rw-r--r--thirdparty/icu4c/common/utrie2.h2
-rw-r--r--thirdparty/icu4c/common/utrie2_builder.cpp44
-rw-r--r--thirdparty/icu4c/common/utrie_swap.cpp6
-rw-r--r--thirdparty/icu4c/common/uts46.cpp100
-rw-r--r--thirdparty/icu4c/common/uvector.cpp34
-rw-r--r--thirdparty/icu4c/common/uvectr32.cpp38
-rw-r--r--thirdparty/icu4c/common/uvectr32.h4
-rw-r--r--thirdparty/icu4c/common/uvectr64.cpp16
-rw-r--r--thirdparty/icu4c/common/uvectr64.h4
-rw-r--r--thirdparty/icu4c/common/wintz.cpp2
-rw-r--r--thirdparty/icu4c/i18n/scriptset.cpp14
-rw-r--r--thirdparty/icu4c/i18n/scriptset.h5
-rw-r--r--thirdparty/icu4c/i18n/ucln_in.cpp2
-rw-r--r--thirdparty/icu4c/i18n/unicode/uspoof.h2
-rw-r--r--thirdparty/icu4c/i18n/uspoof.cpp45
-rw-r--r--thirdparty/icu4c/i18n/uspoof_impl.cpp20
-rw-r--r--thirdparty/icu4c/i18n/uspoof_impl.h1
-rw-r--r--thirdparty/icu4c/icudt72l.dat (renamed from thirdparty/icu4c/icudt71l.dat)bin4271680 -> 4298880 bytes
-rw-r--r--thirdparty/libpng/arm/arm_init.c5
-rw-r--r--thirdparty/libpng/png.c6
-rw-r--r--thirdparty/libpng/png.h16
-rw-r--r--thirdparty/libpng/pngconf.h2
-rw-r--r--thirdparty/libpng/pnglibconf.h2
-rw-r--r--thirdparty/libpng/pngpriv.h2
-rw-r--r--thirdparty/libpng/pngread.c4
-rw-r--r--thirdparty/libpng/pngrutil.c2
-rw-r--r--thirdparty/libpng/pngwrite.c8
-rw-r--r--thirdparty/libpng/pngwutil.c6
-rw-r--r--thirdparty/libtheora/LICENSE8
-rw-r--r--thirdparty/libtheora/analyze.c2283
-rw-r--r--thirdparty/libtheora/apiwrapper.c2
-rw-r--r--thirdparty/libtheora/apiwrapper.h2
-rw-r--r--thirdparty/libtheora/bitpack.c23
-rw-r--r--thirdparty/libtheora/bitpack.h27
-rw-r--r--thirdparty/libtheora/collect.c974
-rw-r--r--thirdparty/libtheora/collect.h109
-rw-r--r--thirdparty/libtheora/dct.h2
-rw-r--r--thirdparty/libtheora/decinfo.c40
-rw-r--r--thirdparty/libtheora/decint.h152
-rw-r--r--thirdparty/libtheora/decode.c1993
-rw-r--r--thirdparty/libtheora/dequant.c2
-rw-r--r--thirdparty/libtheora/dequant.h2
-rw-r--r--thirdparty/libtheora/encfrag.c159
-rw-r--r--thirdparty/libtheora/encinfo.c2
-rw-r--r--thirdparty/libtheora/encint.h502
-rw-r--r--thirdparty/libtheora/encode.c428
-rw-r--r--thirdparty/libtheora/encoder_disabled.c5
-rw-r--r--thirdparty/libtheora/enquant.c198
-rw-r--r--thirdparty/libtheora/enquant.h7
-rw-r--r--thirdparty/libtheora/fdct.c9
-rw-r--r--thirdparty/libtheora/fragment.c43
-rw-r--r--thirdparty/libtheora/huffdec.c694
-rw-r--r--thirdparty/libtheora/huffdec.h72
-rw-r--r--thirdparty/libtheora/huffenc.c112
-rw-r--r--thirdparty/libtheora/huffenc.h3
-rw-r--r--thirdparty/libtheora/huffman.h4
-rw-r--r--thirdparty/libtheora/idct.c59
-rw-r--r--thirdparty/libtheora/info.c10
-rw-r--r--thirdparty/libtheora/internal.c92
-rw-r--r--thirdparty/libtheora/internal.h445
-rw-r--r--thirdparty/libtheora/mathops.c68
-rw-r--r--thirdparty/libtheora/mathops.h44
-rw-r--r--thirdparty/libtheora/mcenc.c215
-rw-r--r--thirdparty/libtheora/modedec.h4389
-rw-r--r--thirdparty/libtheora/ocintrin.h2
-rw-r--r--thirdparty/libtheora/patches/theora.git-0ae66d565e6bead8604d312bc1a4e9dccf245c88.patch38
-rw-r--r--thirdparty/libtheora/quant.c10
-rw-r--r--thirdparty/libtheora/quant.h2
-rw-r--r--thirdparty/libtheora/rate.c26
-rw-r--r--thirdparty/libtheora/state.c298
-rw-r--r--thirdparty/libtheora/state.h552
-rw-r--r--thirdparty/libtheora/theora/codec.h77
-rw-r--r--thirdparty/libtheora/theora/theora.h114
-rw-r--r--thirdparty/libtheora/theora/theoradec.h22
-rw-r--r--thirdparty/libtheora/theora/theoraenc.h112
-rw-r--r--thirdparty/libtheora/tokenize.c1006
-rw-r--r--thirdparty/libtheora/x86/mmxencfrag.c284
-rw-r--r--thirdparty/libtheora/x86/mmxfdct.c127
-rw-r--r--thirdparty/libtheora/x86/mmxfrag.c81
-rw-r--r--thirdparty/libtheora/x86/mmxfrag.h64
-rw-r--r--thirdparty/libtheora/x86/mmxidct.c292
-rw-r--r--thirdparty/libtheora/x86/mmxloop.h271
-rw-r--r--thirdparty/libtheora/x86/mmxstate.c162
-rw-r--r--thirdparty/libtheora/x86/sse2encfrag.c501
-rw-r--r--thirdparty/libtheora/x86/sse2fdct.c111
-rw-r--r--thirdparty/libtheora/x86/sse2idct.c456
-rw-r--r--thirdparty/libtheora/x86/sse2trans.h242
-rw-r--r--thirdparty/libtheora/x86/x86cpu.c182
-rw-r--r--thirdparty/libtheora/x86/x86cpu.h (renamed from thirdparty/libtheora/cpu.h)10
-rw-r--r--thirdparty/libtheora/x86/x86enc.c34
-rw-r--r--thirdparty/libtheora/x86/x86enc.h93
-rw-r--r--thirdparty/libtheora/x86/x86enquant.c149
-rw-r--r--thirdparty/libtheora/x86/x86int.h96
-rw-r--r--thirdparty/libtheora/x86/x86state.c47
-rw-r--r--thirdparty/libtheora/x86/x86zigzag.h244
-rw-r--r--thirdparty/libtheora/x86_vc/mmxencfrag.c122
-rw-r--r--thirdparty/libtheora/x86_vc/mmxfdct.c128
-rw-r--r--thirdparty/libtheora/x86_vc/mmxfrag.c83
-rw-r--r--thirdparty/libtheora/x86_vc/mmxfrag.h61
-rw-r--r--thirdparty/libtheora/x86_vc/mmxidct.c230
-rw-r--r--thirdparty/libtheora/x86_vc/mmxstate.c75
-rw-r--r--thirdparty/libtheora/x86_vc/x86cpu.c (renamed from thirdparty/libtheora/cpu.c)60
-rw-r--r--thirdparty/libtheora/x86_vc/x86cpu.h36
-rw-r--r--thirdparty/libtheora/x86_vc/x86enc.c14
-rw-r--r--thirdparty/libtheora/x86_vc/x86enc.h22
-rw-r--r--thirdparty/libtheora/x86_vc/x86int.h23
-rw-r--r--thirdparty/libtheora/x86_vc/x86state.c11
-rw-r--r--thirdparty/libtheora/x86_vc/x86zigzag.h244
-rw-r--r--thirdparty/mbedtls/include/mbedtls/asn1write.h2
-rw-r--r--thirdparty/mbedtls/include/mbedtls/bignum.h69
-rw-r--r--thirdparty/mbedtls/include/mbedtls/bn_mul.h32
-rw-r--r--thirdparty/mbedtls/include/mbedtls/check_config.h30
-rw-r--r--thirdparty/mbedtls/include/mbedtls/config.h15
-rw-r--r--thirdparty/mbedtls/include/mbedtls/ecdsa.h4
-rw-r--r--thirdparty/mbedtls/include/mbedtls/md.h2
-rw-r--r--thirdparty/mbedtls/include/mbedtls/platform.h7
-rw-r--r--thirdparty/mbedtls/include/mbedtls/ripemd160.h2
-rw-r--r--thirdparty/mbedtls/include/mbedtls/rsa.h2
-rw-r--r--thirdparty/mbedtls/include/mbedtls/ssl.h6
-rw-r--r--thirdparty/mbedtls/include/mbedtls/ssl_internal.h2
-rw-r--r--thirdparty/mbedtls/include/mbedtls/version.h8
-rw-r--r--thirdparty/mbedtls/library/aes.c12
-rw-r--r--thirdparty/mbedtls/library/arc4.c7
-rw-r--r--thirdparty/mbedtls/library/aria.c61
-rw-r--r--thirdparty/mbedtls/library/asn1parse.c6
-rw-r--r--thirdparty/mbedtls/library/asn1write.c12
-rw-r--r--thirdparty/mbedtls/library/base64.c5
-rw-r--r--thirdparty/mbedtls/library/bignum.c230
-rw-r--r--thirdparty/mbedtls/library/camellia.c7
-rw-r--r--thirdparty/mbedtls/library/ccm.c7
-rw-r--r--thirdparty/mbedtls/library/chacha20.c12
-rw-r--r--thirdparty/mbedtls/library/chachapoly.c7
-rw-r--r--thirdparty/mbedtls/library/cipher.c5
-rw-r--r--thirdparty/mbedtls/library/cipher_wrap.c6
-rw-r--r--thirdparty/mbedtls/library/common.h45
-rw-r--r--thirdparty/mbedtls/library/constant_time.c11
-rw-r--r--thirdparty/mbedtls/library/constant_time_internal.h9
-rw-r--r--thirdparty/mbedtls/library/ctr_drbg.c7
-rw-r--r--thirdparty/mbedtls/library/debug.c14
-rw-r--r--thirdparty/mbedtls/library/des.c7
-rw-r--r--thirdparty/mbedtls/library/dhm.c8
-rw-r--r--thirdparty/mbedtls/library/ecdh.c6
-rw-r--r--thirdparty/mbedtls/library/ecdsa.c6
-rw-r--r--thirdparty/mbedtls/library/ecjpake.c5
-rw-r--r--thirdparty/mbedtls/library/ecp.c55
-rw-r--r--thirdparty/mbedtls/library/ecp_curves.c5
-rw-r--r--thirdparty/mbedtls/library/entropy.c9
-rw-r--r--thirdparty/mbedtls/library/entropy_poll.c2
-rw-r--r--thirdparty/mbedtls/library/error.c6
-rw-r--r--thirdparty/mbedtls/library/gcm.c10
-rw-r--r--thirdparty/mbedtls/library/hmac_drbg.c7
-rw-r--r--thirdparty/mbedtls/library/md.c6
-rw-r--r--thirdparty/mbedtls/library/md2.c7
-rw-r--r--thirdparty/mbedtls/library/md4.c7
-rw-r--r--thirdparty/mbedtls/library/md5.c7
-rw-r--r--thirdparty/mbedtls/library/mps_reader.c7
-rw-r--r--thirdparty/mbedtls/library/mps_trace.h6
-rw-r--r--thirdparty/mbedtls/library/net_sockets.c4
-rw-r--r--thirdparty/mbedtls/library/nist_kw.c7
-rw-r--r--thirdparty/mbedtls/library/oid.c4
-rw-r--r--thirdparty/mbedtls/library/pem.c6
-rw-r--r--thirdparty/mbedtls/library/pk_wrap.c8
-rw-r--r--thirdparty/mbedtls/library/pkcs11.c6
-rw-r--r--thirdparty/mbedtls/library/pkcs5.c5
-rw-r--r--thirdparty/mbedtls/library/pkparse.c6
-rw-r--r--thirdparty/mbedtls/library/pkwrite.c6
-rw-r--r--thirdparty/mbedtls/library/poly1305.c12
-rw-r--r--thirdparty/mbedtls/library/ripemd160.c7
-rw-r--r--thirdparty/mbedtls/library/rsa.c7
-rw-r--r--thirdparty/mbedtls/library/sha1.c7
-rw-r--r--thirdparty/mbedtls/library/sha256.c10
-rw-r--r--thirdparty/mbedtls/library/sha512.c14
-rw-r--r--thirdparty/mbedtls/library/ssl_cache.c6
-rw-r--r--thirdparty/mbedtls/library/ssl_ciphersuites.c4
-rw-r--r--thirdparty/mbedtls/library/ssl_cli.c94
-rw-r--r--thirdparty/mbedtls/library/ssl_cookie.c5
-rw-r--r--thirdparty/mbedtls/library/ssl_msg.c25
-rw-r--r--thirdparty/mbedtls/library/ssl_srv.c24
-rw-r--r--thirdparty/mbedtls/library/ssl_ticket.c42
-rw-r--r--thirdparty/mbedtls/library/ssl_tls.c79
-rw-r--r--thirdparty/mbedtls/library/ssl_tls13_keys.c8
-rw-r--r--thirdparty/mbedtls/library/timing.c7
-rw-r--r--thirdparty/mbedtls/library/x509.c52
-rw-r--r--thirdparty/mbedtls/library/x509_crl.c10
-rw-r--r--thirdparty/mbedtls/library/x509_crt.c46
-rw-r--r--thirdparty/mbedtls/library/x509_csr.c8
-rw-r--r--thirdparty/mbedtls/library/x509write_csr.c6
-rw-r--r--thirdparty/mbedtls/library/xtea.c9
-rw-r--r--thirdparty/mbedtls/patches/1453.diff28
-rw-r--r--thirdparty/mbedtls/patches/windows-arm64-hardclock.diff16
-rw-r--r--thirdparty/miniupnpc/LICENSE39
-rw-r--r--thirdparty/miniupnpc/include/miniupnpc.h8
-rw-r--r--thirdparty/miniupnpc/src/miniupnpcstrings.h2
-rw-r--r--thirdparty/miniupnpc/src/minixml.c2
-rw-r--r--thirdparty/minizip/MiniZip64_info.txt (renamed from thirdparty/minizip/MiniZip_info.txt)0
-rw-r--r--thirdparty/minizip/crypt.h2
-rw-r--r--thirdparty/minizip/ioapi.c22
-rw-r--r--thirdparty/minizip/ioapi.h2
-rw-r--r--thirdparty/minizip/patches/godot-seek.patch14
-rw-r--r--thirdparty/minizip/patches/unbreak-gentoo.patch4
-rw-r--r--thirdparty/minizip/unzip.c4
-rw-r--r--thirdparty/minizip/zip.c7
-rw-r--r--thirdparty/openxr/include/openxr/openxr.h177
-rw-r--r--thirdparty/openxr/include/openxr/openxr_reflection.h490
-rw-r--r--thirdparty/openxr/include/openxr/openxr_reflection_parent_structs.h261
-rw-r--r--thirdparty/openxr/include/openxr/openxr_reflection_structs.h427
-rw-r--r--thirdparty/openxr/src/loader/loader_core.cpp4
-rw-r--r--thirdparty/openxr/src/loader/manifest_file.cpp12
-rw-r--r--thirdparty/openxr/src/xr_generated_dispatch_table.c14
-rw-r--r--thirdparty/openxr/src/xr_generated_dispatch_table.h14
-rw-r--r--thirdparty/recastnavigation/Recast/Include/Recast.h25
-rw-r--r--thirdparty/recastnavigation/Recast/Include/RecastAlloc.h6
-rw-r--r--thirdparty/recastnavigation/Recast/Source/Recast.cpp5
-rw-r--r--thirdparty/recastnavigation/Recast/Source/RecastMesh.cpp2
-rw-r--r--thirdparty/recastnavigation/Recast/Source/RecastMeshDetail.cpp2
-rw-r--r--thirdparty/recastnavigation/Recast/Source/RecastRasterization.cpp12
-rw-r--r--thirdparty/spirv-reflect/patches/specialization-constants.patch30
-rw-r--r--thirdparty/spirv-reflect/patches/zero-calloc.patch4
-rw-r--r--thirdparty/spirv-reflect/spirv_reflect.c51
-rw-r--r--thirdparty/spirv-reflect/spirv_reflect.h10
-rw-r--r--thirdparty/thorvg/inc/config.h2
-rw-r--r--thirdparty/thorvg/inc/thorvg.h18
-rw-r--r--thirdparty/thorvg/src/lib/sw_engine/tvgSwCommon.h2
-rw-r--r--thirdparty/thorvg/src/lib/sw_engine/tvgSwFill.cpp2
-rw-r--r--thirdparty/thorvg/src/lib/sw_engine/tvgSwMath.cpp2
-rw-r--r--thirdparty/thorvg/src/lib/sw_engine/tvgSwRaster.cpp8
-rw-r--r--thirdparty/thorvg/src/lib/sw_engine/tvgSwRasterTexmapInternal.h2
-rw-r--r--thirdparty/thorvg/src/lib/sw_engine/tvgSwStroke.cpp2
-rw-r--r--thirdparty/thorvg/src/lib/tvgLoadModule.h1
-rw-r--r--thirdparty/thorvg/src/loaders/external_jpg/tvgJpgLoader.cpp164
-rw-r--r--thirdparty/thorvg/src/loaders/external_jpg/tvgJpgLoader.h52
-rw-r--r--thirdparty/thorvg/src/loaders/jpg/tvgJpgLoader.cpp6
-rw-r--r--thirdparty/thorvg/src/loaders/jpg/tvgJpgd.cpp2
-rw-r--r--thirdparty/thorvg/src/loaders/png/tvgLodePng.cpp2647
-rw-r--r--thirdparty/thorvg/src/loaders/png/tvgLodePng.h174
-rw-r--r--thirdparty/thorvg/src/loaders/png/tvgPngLoader.cpp194
-rw-r--r--thirdparty/thorvg/src/loaders/png/tvgPngLoader.h54
-rw-r--r--thirdparty/thorvg/src/loaders/raw/tvgRawLoader.cpp6
-rw-r--r--thirdparty/thorvg/src/loaders/svg/tvgSvgCssStyle.cpp20
-rw-r--r--thirdparty/thorvg/src/loaders/svg/tvgSvgLoader.cpp164
-rw-r--r--thirdparty/thorvg/src/loaders/svg/tvgSvgLoader.h3
-rw-r--r--thirdparty/thorvg/src/loaders/svg/tvgSvgLoaderCommon.h26
-rw-r--r--thirdparty/thorvg/src/loaders/svg/tvgSvgSceneBuilder.cpp148
-rw-r--r--thirdparty/thorvg/src/loaders/svg/tvgSvgSceneBuilder.h2
-rw-r--r--thirdparty/thorvg/src/loaders/svg/tvgXmlParser.cpp8
-rw-r--r--thirdparty/thorvg/src/loaders/tvg/tvgTvgBinInterpreter.cpp6
-rw-r--r--thirdparty/thorvg/src/savers/tvg/tvgTvgSaver.cpp6
-rwxr-xr-xthirdparty/thorvg/update-thorvg.sh11
-rw-r--r--thirdparty/volk/LICENSE.md2
-rw-r--r--thirdparty/volk/volk.c265
-rw-r--r--thirdparty/volk/volk.h181
-rw-r--r--thirdparty/vulkan/include/vk_video/vulkan_video_codec_h264std.h166
-rw-r--r--thirdparty/vulkan/include/vk_video/vulkan_video_codec_h264std_decode.h45
-rw-r--r--thirdparty/vulkan/include/vk_video/vulkan_video_codec_h264std_encode.h89
-rw-r--r--thirdparty/vulkan/include/vk_video/vulkan_video_codec_h265std.h369
-rw-r--r--thirdparty/vulkan/include/vk_video/vulkan_video_codec_h265std_decode.h21
-rw-r--r--thirdparty/vulkan/include/vk_video/vulkan_video_codec_h265std_encode.h113
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan.h1
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan.hpp7670
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_android.h2
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_beta.h741
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_core.h1959
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_enums.hpp17360
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_format_traits.hpp7357
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_funcs.hpp19963
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_handles.hpp9880
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_hash.hpp5010
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_metal.h141
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_raii.hpp16206
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_static_assertions.hpp6105
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_structs.hpp60108
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_to_string.hpp8177
-rw-r--r--thirdparty/vulkan/patches/VKEnumStringHelper-use-volk.patch4
-rw-r--r--thirdparty/vulkan/vk_enum_string_helper.h1798
-rw-r--r--thirdparty/zlib/LICENSE22
-rw-r--r--thirdparty/zlib/compress.c6
-rw-r--r--thirdparty/zlib/crc32.c33
-rw-r--r--thirdparty/zlib/deflate.c218
-rw-r--r--thirdparty/zlib/deflate.h4
-rw-r--r--thirdparty/zlib/gzlib.c2
-rw-r--r--thirdparty/zlib/gzread.c8
-rw-r--r--thirdparty/zlib/gzwrite.c2
-rw-r--r--thirdparty/zlib/infback.c17
-rw-r--r--thirdparty/zlib/inflate.c7
-rw-r--r--thirdparty/zlib/inftrees.c4
-rw-r--r--thirdparty/zlib/inftrees.h2
-rw-r--r--thirdparty/zlib/trees.c123
-rw-r--r--thirdparty/zlib/uncompr.c4
-rw-r--r--thirdparty/zlib/zconf.h19
-rw-r--r--thirdparty/zlib/zlib.h20
-rw-r--r--thirdparty/zlib/zutil.c16
-rw-r--r--thirdparty/zlib/zutil.h1
940 files changed, 172258 insertions, 133833 deletions
diff --git a/thirdparty/README.md b/thirdparty/README.md
index ac97d9c2a2..38001b8782 100644
--- a/thirdparty/README.md
+++ b/thirdparty/README.md
@@ -17,10 +17,22 @@ Files extracted from upstream source:
- `license.txt`
+## astcenc
+
+- Upstream: https://github.com/ARM-software/astc-encoder
+- Version: 4.3.0 (ec83dda79fcefe07f69cdae7ed980d169bf2c4d4, 2023)
+- License: Apache 2.0
+
+Files extracted from upstream source:
+
+- `astcenc_*` and `astcenc.h` files from `Source`
+- `LICENSE.txt`
+
+
## basis_universal
- Upstream: https://github.com/BinomialLLC/basis_universal
-- Version: git (1531cfaf9ed5232248a0a45736686a849ca3befc, 2022)
+- Version: git (a91e94c8495d7f470d3df326a364d49324cfd4a3, 2022)
- License: Apache 2.0
Files extracted from upstream source:
@@ -44,14 +56,14 @@ Files extracted from upstream source:
## certs
- Upstream: Mozilla, via https://github.com/bagder/ca-bundle
-- Version: git (7f33e7eb8472dbcf31fdcf50cd216c89a282825d, 2022)
+- Version: git (b2f7415648411b6fd7c298c6c92d6552f0165f60, 2022)
- License: MPL 2.0
## cvtt
- Upstream: https://github.com/elasota/ConvectionKernels
-- Version: git (dc2dbbe0ae2cf2be06ef56d1021e2222a56c7fe2, 2021)
+- Version: git (350416daa4e98f1c17ffc273b134d0120a2ef230, 2022)
- License: MIT
Files extracted from upstream source:
@@ -74,7 +86,7 @@ Files extracted from upstream source:
## embree
- Upstream: https://github.com/embree/embree
-- Version: 3.13.1 (12b99393438a4cc9e478e33459eed78bec6233fd, 2021)
+- Version: 3.13.5 (698442324ccddd11725fb8875275dc1384f7fb40, 2022)
- License: Apache 2.0
Files extracted from upstream:
@@ -164,6 +176,7 @@ Files extracted from upstream source:
- `src/` folder, minus the `dlg` and `tools` subfolders
* These files can be removed: `.dat`, `.diff`, `.mk`, `.rc`, `README*`
+ * In `src/gzip/`, remove zlib files (everything but `ftgzip.c` and `ftzconf.h`)
- `include/` folder, minus the `dlg` subfolder
- `LICENSE.TXT` and `docs/FTL.TXT`
@@ -173,10 +186,27 @@ comments. Apply the patches in the `patches/` folder when syncing on newer upstr
commits.
+## glad
+
+- Upstream: https://github.com/Dav1dde/glad
+- Version: 2.0.2 (f237a2bfcec0d9b82b90ec9af4af265c40de7183, 2022)
+- License: CC0 1.0 and Apache 2.0
+
+Files extracted from upstream source:
+- `LICENSE`
+
+Files generated from [upstream web instance](https://gen.glad.sh/):
+- `KHR/khrplatform.h`
+- `gl.c`
+- `glad/gl.h`
+- `glx.c`
+- `glad/glx.h`
+
+
## glslang
- Upstream: https://github.com/KhronosGroup/glslang
-- Version: 11.8.0 (c34bb3b6c55f6ab084124ad964be95a699700d34, 2022)
+- Version: 11.12.0 / sdk-1.3.231.1 (5755de46b07e4374c05fb1081f65f7ae1f8cca81, 2022)
- License: glslang
Version should be kept in sync with the one of the used Vulkan SDK (see `vulkan`
@@ -195,14 +225,13 @@ Files extracted from upstream source:
to `glslang/build_info.h`
- `LICENSE.txt`
- Unnecessary files like `CMakeLists.txt`, `*.m4` and `updateGrammar` removed.
-- Patch in `patches/unused_cleanup.diff` must be applied.
## graphite
- Upstream: https://github.com/silnrsi/graphite
-- Version: 1.3.14 (80c52493ef42e6fe605a69dcddd2a691cd8a1380, 2021)
-- License: MPL-2.0
+- Version: 1.3.14 (27572742003b93dc53dc02c01c237b72c6c25f54, 2022)
+- License: MIT
Files extracted from upstream source:
@@ -214,7 +243,7 @@ Files extracted from upstream source:
## harfbuzz
- Upstream: https://github.com/harfbuzz/harfbuzz
-- Version: 5.2.0 (4a1d891c6317d2c83e5f3c2607ec5f5ccedffcde, 2022)
+- Version: 6.0.0 (afcae83a064843d71d47624bc162e121cc56c08b, 2022)
- License: MIT
Files extracted from upstream source:
@@ -226,7 +255,7 @@ Files extracted from upstream source:
## icu4c
- Upstream: https://github.com/unicode-org/icu
-- Version: 71.1 (c205e7ee49a7086a28b9c275fcfdac9ca3dc815d, 2022)
+- Version: 72.1 (ff3514f257ea10afe7e710e9f946f68d256704b1, 2022)
- License: Unicode
Files extracted from upstream source:
@@ -238,14 +267,14 @@ Files extracted from upstream source:
Files generated from upstream source:
-- the `icudt71l.dat` built with the provided `godot_data.json` config file (see
+- the `icudt72l.dat` built with the provided `godot_data.json` config file (see
https://github.com/unicode-org/icu/blob/master/docs/userguide/icu_data/buildtool.md
for instructions).
- Step 1: Build ICU with default options - `./runConfigureICU {PLATFORM} && make`.
- Step 2: Reconfigure ICU with custom data config - `ICU_DATA_FILTER_FILE={GODOT_SOURCE}/thirdparty/icu4c/godot_data.json ./runConfigureICU {PLATFORM} --with-data-packaging=common`.
- Step 3: Delete `data/out` folder and rebuild data - `cd data && rm -rf ./out && make`.
-- Step 4: Copy `source/data/out/icudt71l.dat` to the `{GODOT_SOURCE}/thirdparty/icu4c/icudt71l.dat`.
+- Step 4: Copy `source/data/out/icudt72l.dat` to the `{GODOT_SOURCE}/thirdparty/icu4c/icudt72l.dat`.
## jpeg-compressor
@@ -291,18 +320,15 @@ Files extracted from upstream source:
## libtheora
- Upstream: https://www.theora.org
-- Version: 1.1.1 (2010)
+- Version: git (7180717276af1ebc7da15c83162d6c5d6203aabf, 2020)
- License: BSD-3-Clause
Files extracted from upstream source:
-- all .c, .h in lib/
+- all .c, .h in lib/, except arm/ and c64x/ folders
- all .h files in include/theora/ as theora/
- COPYING and LICENSE
-Upstream patches included in the `patches` directory have been applied
-on top of the 1.1.1 source (not included in any stable release yet).
-
## libvorbis
@@ -332,7 +358,7 @@ Files extracted from upstream source:
## mbedtls
- Upstream: https://github.com/Mbed-TLS/mbedtls
-- Version: 2.18.1 (dd79db10014d85b26d11fe57218431f2e5ede6f2, 2022)
+- Version: 2.18.2 (89f040a5c938985c5f30728baed21e49d0846a53, 2022)
- License: Apache 2.0
File extracted from upstream release tarball:
@@ -342,12 +368,10 @@ File extracted from upstream release tarball:
- The `LICENSE` file.
- Applied the patch in `patches/1453.diff` (upstream PR:
https://github.com/ARMmbed/mbedtls/pull/1453).
+ Applied the patch in `patches/windows-arm64-hardclock.diff`
- Added 2 files `godot_core_mbedtls_platform.c` and `godot_core_mbedtls_config.h`
providing configuration for light bundling with core.
-Some changes have been made in order to fix Windows on ARM build errors.
-They are marked with `// -- GODOT start --` and `// -- GODOT end --`
-
## meshoptimizer
@@ -385,7 +409,7 @@ to solve some MSVC warnings. See the patches in the `patches` directory.
## miniupnpc
- Upstream: https://github.com/miniupnp/miniupnp
-- Version: 2.2.3 (2df8120326ed4246e049a7a6de707539604cd514, 2021)
+- Version: 2.2.4 (7d1d8bc3868b08ad003bad235eee57562b95b76d, 2022)
- License: BSD-3-Clause
Files extracted from upstream source:
@@ -403,12 +427,14 @@ that file when upgrading.
## minizip
- Upstream: https://www.zlib.net
-- Version: 1.2.12 (zlib contrib, 2022)
+- Version: 1.2.13 (zlib contrib, 2022)
- License: zlib
Files extracted from the upstream source:
-- contrib/minizip/{crypt.h,ioapi.{c,h},unzip.{c,h},zip.{c,h}}
+- From `contrib/minizip`:
+ `{crypt.h,ioapi.{c,h},unzip.{c,h},zip.{c,h}}`
+ `MiniZip64_info.txt`
Important: Some files have Godot-made changes for use in core/io.
They are marked with `/* GODOT start */` and `/* GODOT end */`
@@ -535,7 +561,7 @@ Patch files are provided in `oidn/patches/`.
## openxr
- Upstream: https://github.com/KhronosGroup/OpenXR-SDK
-- Version: 1.0.25 (c16a18c99740ea5dd251e3af117e0e5aea4ceaa9, 2022)
+- Version: 1.0.26 (e2da9ce83a4388c9622da328bf48548471261290, 2022)
- License: Apache 2.0
Files extracted from upstream source:
@@ -578,7 +604,7 @@ in 10.40, it can be found in the `patches` folder.
## recastnavigation
- Upstream: https://github.com/recastnavigation/recastnavigation
-- Version: git (5a870d427e47abd4a8e4ce58a95582ec049434d5, 2022)
+- Version: git (4fef0446609b23d6ac180ed822817571525528a1, 2022)
- License: zlib
Files extracted from upstream source:
@@ -606,11 +632,10 @@ Godot. See the patch in the `patches` folder for details.
## spirv-reflect
- Upstream: https://github.com/KhronosGroup/SPIRV-Reflect
-- Version: git (1ef99b09fa7ce5aee2c5cf70c61a4f7458d27e09, 2022)
+- Version: sdk-1.3.231.1 (b68b5a8a5d8ab5fce79e6596f3a731291046393a, 2022)
- License: Apache 2.0
-Does not track Vulkan SDK releases closely, but try to package a commit newer
-than the matching glslang and Vulkan headers, just in case.
+Now tracks Vulkan SDK releases, so keep it in sync with volk / vulkan.
Files extracted from upstream source:
@@ -620,7 +645,7 @@ Files extracted from upstream source:
Some downstream changes have been made and are identified by
`// -- GODOT begin --` and `// -- GODOT end --` comments.
-They can be reapplied using the patch included in the `patches`
+They can be reapplied using the patches included in the `patches`
folder.
@@ -656,7 +681,7 @@ instead of `miniz.h` as an external dependency.
## thorvg
- Upstream: https://github.com/Samsung/thorvg
-- Version: 0.8.1 (c4ccb1078f4390ec749ab8e05ba7e9e35f81285f, 2022)
+- Version: 0.8.3 (a0fcf51f80a75f63a066df085f60cdaf715188b6, 2022)
- License: MIT
Files extracted from upstream source:
@@ -685,7 +710,7 @@ folder.
## volk
- Upstream: https://github.com/zeux/volk
-- Version: 1.3.204 (92ba7c9f112a82cecf452ebf4b7c46f149a5799e, 2022)
+- Version: sdk-1.3.231.1 (f29df7d2834c434b39169d5b2e4dde8c05a5adc1, 2022)
- License: MIT
Unless there is a specific reason to package a more recent version, please stick
@@ -695,7 +720,6 @@ to tagged releases. All Vulkan libraries and headers should be kept in sync so:
- Update glslang (see "glslang").
- Update spirv-reflect (see "spirv-reflect").
-
Files extracted from upstream source:
- `volk.h`, `volk.c`
@@ -705,7 +729,7 @@ Files extracted from upstream source:
## vulkan
- Upstream: https://github.com/KhronosGroup/Vulkan-Headers
-- Version: 1.3.204 (1dace16d8044758d32736eb59802d171970e9448, 2022)
+- Version: sdk-1.3.231.1 (98f440ce6868c94f5ec6e198cc1adda4760e8849, 2022)
- License: Apache 2.0
The vendored version should be kept in sync with volk, see above.
@@ -757,12 +781,13 @@ Files extracted from upstream source:
## zlib
- Upstream: https://www.zlib.net
-- Version: 1.2.12 (2022)
+- Version: 1.2.13 (2022)
- License: zlib
Files extracted from upstream source:
-- all .c and .h files
+- All `*.c` and `*.h` files
+- `LICENSE`
## zstd
diff --git a/thirdparty/astcenc/LICENSE.txt b/thirdparty/astcenc/LICENSE.txt
new file mode 100644
index 0000000000..b82735a310
--- /dev/null
+++ b/thirdparty/astcenc/LICENSE.txt
@@ -0,0 +1,175 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
diff --git a/thirdparty/astcenc/astcenc.h b/thirdparty/astcenc/astcenc.h
new file mode 100644
index 0000000000..70ae783373
--- /dev/null
+++ b/thirdparty/astcenc/astcenc.h
@@ -0,0 +1,815 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2020-2023 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief The core astcenc codec library interface.
+ *
+ * This interface is the entry point to the core astcenc codec. It aims to be easy to use for
+ * non-experts, but also to allow experts to have fine control over the compressor heuristics if
+ * needed. The core codec only handles compression and decompression, transferring all inputs and
+ * outputs via memory buffers. To catch obvious input/output buffer sizing issues, which can cause
+ * security and stability problems, all transfer buffers are explicitly sized.
+ *
+ * While the aim is that we keep this interface mostly stable, it should be viewed as a mutable
+ * interface tied to a specific source version. We are not trying to maintain backwards
+ * compatibility across codec versions.
+ *
+ * The API state management is based around an explicit context object, which is the context for all
+ * allocated memory resources needed to compress and decompress a single image. A context can be
+ * used to sequentially compress multiple images using the same configuration, allowing setup
+ * overheads to be amortized over multiple images, which is particularly important when images are
+ * small.
+ *
+ * Multi-threading can be used two ways.
+ *
+ * * An application wishing to process multiple images in parallel can allocate multiple
+ * contexts and assign each context to a thread.
+ * * An application wishing to process a single image in using multiple threads can configure
+ * contexts for multi-threaded use, and invoke astcenc_compress/decompress() once per thread
+ * for faster processing. The caller is responsible for creating the worker threads, and
+ * synchronizing between images.
+ *
+ * Threading
+ * =========
+ *
+ * In pseudo-code, the usage for manual user threading looks like this:
+ *
+ * // Configure the compressor run
+ * astcenc_config my_config;
+ * astcenc_config_init(..., &my_config);
+ *
+ * // Power users can tweak <my_config> settings here ...
+ *
+ * // Allocate working state given config and thread_count
+ * astcenc_context* my_context;
+ * astcenc_context_alloc(&my_config, thread_count, &my_context);
+ *
+ * // Compress each image using these config settings
+ * foreach image:
+ * // For each thread in the thread pool
+ * for i in range(0, thread_count):
+ * astcenc_compress_image(my_context, &my_input, my_output, i);
+ *
+ * astcenc_compress_reset(my_context);
+ *
+ * // Clean up
+ * astcenc_context_free(my_context);
+ *
+ * Images
+ * ======
+ *
+ * The codec supports compressing single images, which can be either 2D images or volumetric 3D
+ * images. Calling code is responsible for any handling of aggregate types, such as mipmap chains,
+ * texture arrays, or sliced 3D textures.
+ *
+ * Images are passed in as an astcenc_image structure. Inputs can be either 8-bit unorm, 16-bit
+ * half-float, or 32-bit float, as indicated by the data_type field.
+ *
+ * Images can be any dimension; there is no requirement to be a multiple of the ASTC block size.
+ *
+ * Data is always passed in as 4 color components, and accessed as an array of 2D image slices. Data
+ * within an image slice is always tightly packed without padding. Addressing looks like this:
+ *
+ * data[z_coord][y_coord * x_dim * 4 + x_coord * 4 ] // Red
+ * data[z_coord][y_coord * x_dim * 4 + x_coord * 4 + 1] // Green
+ * data[z_coord][y_coord * x_dim * 4 + x_coord * 4 + 2] // Blue
+ * data[z_coord][y_coord * x_dim * 4 + x_coord * 4 + 3] // Alpha
+ *
+ * Common compressor usage
+ * =======================
+ *
+ * One of the most important things for coding image quality is to align the input data component
+ * count with the ASTC color endpoint mode. This avoids wasting bits encoding components you don't
+ * actually need in the endpoint colors.
+ *
+ * | Input data | Encoding swizzle | Sampling swizzle |
+ * | ------------ | ---------------- | ---------------- |
+ * | 1 component | RRR1 | .[rgb] |
+ * | 2 components | RRRG | .[rgb]a |
+ * | 3 components | RGB1 | .rgb |
+ * | 4 components | RGBA | .rgba |
+ *
+ * The 1 and 2 component modes recommend sampling from "g" to recover the luminance value as this
+ * provide best compatibility with other texture formats where the green component may be stored at
+ * higher precision than the others, such as RGB565. For ASTC any of the RGB components can be used;
+ * the luminance endpoint component will be returned for all three.
+ *
+ * When using the normal map compression mode ASTC will store normals as a two component X+Y map.
+ * Input images must contain unit-length normalized and should be passed in using a two component
+ * swizzle. The astcenc command line tool defaults to an RRRG swizzle, but some developers prefer
+ * to use GGGR for compatability with BC5n which will work just as well. The Z component can be
+ * recovered programmatically in shader code, using knowledge that the vector is unit length and
+ * that Z must be positive for a tangent-space normal map.
+ *
+ * Decompress-only usage
+ * =====================
+ *
+ * For some use cases it is useful to have a cut-down context and/or library which supports
+ * decompression but not compression.
+ *
+ * A context can be made decompress-only using the ASTCENC_FLG_DECOMPRESS_ONLY flag when the context
+ * is allocated. These contexts have lower dynamic memory footprint than a full context.
+ *
+ * The entire library can be made decompress-only by building the files with the define
+ * ASTCENC_DECOMPRESS_ONLY set. In this build the context will be smaller, and the library will
+ * exclude the functionality which is only needed for compression. This reduces the binary size by
+ * ~180KB. For these builds contexts must be created with the ASTCENC_FLG_DECOMPRESS_ONLY flag.
+ *
+ * Note that context structures returned by a library built as decompress-only are incompatible with
+ * a library built with compression included, and visa versa, as they have different sizes and
+ * memory layout.
+ *
+ * Self-decompress-only usage
+ * ==========================
+ *
+ * ASTC is a complex format with a large search space. The parts of this search space that are
+ * searched is determined by heuristics that are, in part, tied to the quality level used when
+ * creating the context.
+ *
+ * A normal context is capable of decompressing any ASTC texture, including those generated by other
+ * compressors with unknown heuristics. This is the most flexible implementation, but forces the
+ * data tables used by the codec to include entries that are not needed during compression. This
+ * can slow down context creation by a significant amount, especially for the faster compression
+ * modes where few data table entries are actually used. To optimize this use case the context can
+ * be created with the ASTCENC_FLG_SELF_DECOMPRESS_ONLY flag. This tells the compressor that it will
+ * only be asked to decompress images that it compressed itself, allowing the data tables to
+ * exclude entries that are not needed by the current compression configuration. This reduces the
+ * size of the context data tables in memory and improves context creation performance. Note that,
+ * as of the 3.6 release, this flag no longer affects compression performance.
+ *
+ * Using this flag while attempting to decompress an valid image which was created by another
+ * compressor, or even another astcenc compressor version or configuration, may result in blocks
+ * returning as solid magenta or NaN value error blocks.
+ */
+
+#ifndef ASTCENC_INCLUDED
+#define ASTCENC_INCLUDED
+
+#include <cstddef>
+#include <cstdint>
+
+#if defined(ASTCENC_DYNAMIC_LIBRARY)
+ #if defined(_MSC_VER)
+ #define ASTCENC_PUBLIC extern "C" __declspec(dllexport)
+ #else
+ #define ASTCENC_PUBLIC extern "C" __attribute__ ((visibility ("default")))
+ #endif
+#else
+ #define ASTCENC_PUBLIC
+#endif
+
+/* ============================================================================
+ Data declarations
+============================================================================ */
+
+/**
+ * @brief An opaque structure; see astcenc_internal.h for definition.
+ */
+struct astcenc_context;
+
+/**
+ * @brief A codec API error code.
+ */
+enum astcenc_error {
+ /** @brief The call was successful. */
+ ASTCENC_SUCCESS = 0,
+ /** @brief The call failed due to low memory, or undersized I/O buffers. */
+ ASTCENC_ERR_OUT_OF_MEM,
+ /** @brief The call failed due to the build using fast math. */
+ ASTCENC_ERR_BAD_CPU_FLOAT,
+ /** @brief The call failed due to the build using an unsupported ISA. */
+ ASTCENC_ERR_BAD_CPU_ISA,
+ /** @brief The call failed due to an out-of-spec parameter. */
+ ASTCENC_ERR_BAD_PARAM,
+ /** @brief The call failed due to an out-of-spec block size. */
+ ASTCENC_ERR_BAD_BLOCK_SIZE,
+ /** @brief The call failed due to an out-of-spec color profile. */
+ ASTCENC_ERR_BAD_PROFILE,
+ /** @brief The call failed due to an out-of-spec quality value. */
+ ASTCENC_ERR_BAD_QUALITY,
+ /** @brief The call failed due to an out-of-spec component swizzle. */
+ ASTCENC_ERR_BAD_SWIZZLE,
+ /** @brief The call failed due to an out-of-spec flag set. */
+ ASTCENC_ERR_BAD_FLAGS,
+ /** @brief The call failed due to the context not supporting the operation. */
+ ASTCENC_ERR_BAD_CONTEXT,
+ /** @brief The call failed due to unimplemented functionality. */
+ ASTCENC_ERR_NOT_IMPLEMENTED,
+#if defined(ASTCENC_DIAGNOSTICS)
+ /** @brief The call failed due to an issue with diagnostic tracing. */
+ ASTCENC_ERR_DTRACE_FAILURE,
+#endif
+};
+
+/**
+ * @brief A codec color profile.
+ */
+enum astcenc_profile {
+ /** @brief The LDR sRGB color profile. */
+ ASTCENC_PRF_LDR_SRGB = 0,
+ /** @brief The LDR linear color profile. */
+ ASTCENC_PRF_LDR,
+ /** @brief The HDR RGB with LDR alpha color profile. */
+ ASTCENC_PRF_HDR_RGB_LDR_A,
+ /** @brief The HDR RGBA color profile. */
+ ASTCENC_PRF_HDR
+};
+
+/** @brief The fastest, lowest quality, search preset. */
+static const float ASTCENC_PRE_FASTEST = 0.0f;
+
+/** @brief The fast search preset. */
+static const float ASTCENC_PRE_FAST = 10.0f;
+
+/** @brief The medium quality search preset. */
+static const float ASTCENC_PRE_MEDIUM = 60.0f;
+
+/** @brief The thorough quality search preset. */
+static const float ASTCENC_PRE_THOROUGH = 98.0f;
+
+/** @brief The thorough quality search preset. */
+static const float ASTCENC_PRE_VERYTHOROUGH = 99.0f;
+
+/** @brief The exhaustive, highest quality, search preset. */
+static const float ASTCENC_PRE_EXHAUSTIVE = 100.0f;
+
+/**
+ * @brief A codec component swizzle selector.
+ */
+enum astcenc_swz
+{
+ /** @brief Select the red component. */
+ ASTCENC_SWZ_R = 0,
+ /** @brief Select the green component. */
+ ASTCENC_SWZ_G = 1,
+ /** @brief Select the blue component. */
+ ASTCENC_SWZ_B = 2,
+ /** @brief Select the alpha component. */
+ ASTCENC_SWZ_A = 3,
+ /** @brief Use a constant zero component. */
+ ASTCENC_SWZ_0 = 4,
+ /** @brief Use a constant one component. */
+ ASTCENC_SWZ_1 = 5,
+ /** @brief Use a reconstructed normal vector Z component. */
+ ASTCENC_SWZ_Z = 6
+};
+
+/**
+ * @brief A texel component swizzle.
+ */
+struct astcenc_swizzle
+{
+ /** @brief The red component selector. */
+ astcenc_swz r;
+ /** @brief The green component selector. */
+ astcenc_swz g;
+ /** @brief The blue component selector. */
+ astcenc_swz b;
+ /** @brief The alpha component selector. */
+ astcenc_swz a;
+};
+
+/**
+ * @brief A texel component data format.
+ */
+enum astcenc_type
+{
+ /** @brief Unorm 8-bit data per component. */
+ ASTCENC_TYPE_U8 = 0,
+ /** @brief 16-bit float per component. */
+ ASTCENC_TYPE_F16 = 1,
+ /** @brief 32-bit float per component. */
+ ASTCENC_TYPE_F32 = 2
+};
+
+/**
+ * @brief Enable normal map compression.
+ *
+ * Input data will be treated a two component normal map, storing X and Y, and the codec will
+ * optimize for angular error rather than simple linear PSNR. In this mode the input swizzle should
+ * be e.g. rrrg (the default ordering for ASTC normals on the command line) or gggr (the ordering
+ * used by BC5n).
+ */
+static const unsigned int ASTCENC_FLG_MAP_NORMAL = 1 << 0;
+
+/**
+ * @brief Enable alpha weighting.
+ *
+ * The input alpha value is used for transparency, so errors in the RGB components are weighted by
+ * the transparency level. This allows the codec to more accurately encode the alpha value in areas
+ * where the color value is less significant.
+ */
+static const unsigned int ASTCENC_FLG_USE_ALPHA_WEIGHT = 1 << 2;
+
+/**
+ * @brief Enable perceptual error metrics.
+ *
+ * This mode enables perceptual compression mode, which will optimize for perceptual error rather
+ * than best PSNR. Only some input modes support perceptual error metrics.
+ */
+static const unsigned int ASTCENC_FLG_USE_PERCEPTUAL = 1 << 3;
+
+/**
+ * @brief Create a decompression-only context.
+ *
+ * This mode disables support for compression. This enables context allocation to skip some
+ * transient buffer allocation, resulting in lower memory usage.
+ */
+static const unsigned int ASTCENC_FLG_DECOMPRESS_ONLY = 1 << 4;
+
+/**
+ * @brief Create a self-decompression context.
+ *
+ * This mode configures the compressor so that it is only guaranteed to be able to decompress images
+ * that were actually created using the current context. This is the common case for compression use
+ * cases, and setting this flag enables additional optimizations, but does mean that the context
+ * cannot reliably decompress arbitrary ASTC images.
+ */
+static const unsigned int ASTCENC_FLG_SELF_DECOMPRESS_ONLY = 1 << 5;
+
+/**
+ * @brief Enable RGBM map compression.
+ *
+ * Input data will be treated as HDR data that has been stored in an LDR RGBM-encoded wrapper
+ * format. Data must be preprocessed by the user to be in LDR RGBM format before calling the
+ * compression function, this flag is only used to control the use of RGBM-specific heuristics and
+ * error metrics.
+ *
+ * IMPORTANT: The ASTC format is prone to bad failure modes with unconstrained RGBM data; very small
+ * M values can round to zero due to quantization and result in black or white pixels. It is highly
+ * recommended that the minimum value of M used in the encoding is kept above a lower threshold (try
+ * 16 or 32). Applying this threshold reduces the number of very dark colors that can be
+ * represented, but is still higher precision than 8-bit LDR.
+ *
+ * When this flag is set the value of @c rgbm_m_scale in the context must be set to the RGBM scale
+ * factor used during reconstruction. This defaults to 5 when in RGBM mode.
+ *
+ * It is recommended that the value of @c cw_a_weight is set to twice the value of the multiplier
+ * scale, ensuring that the M value is accurately encoded. This defaults to 10 when in RGBM mode,
+ * matching the default scale factor.
+ */
+static const unsigned int ASTCENC_FLG_MAP_RGBM = 1 << 6;
+
+/**
+ * @brief The bit mask of all valid flags.
+ */
+static const unsigned int ASTCENC_ALL_FLAGS =
+ ASTCENC_FLG_MAP_NORMAL |
+ ASTCENC_FLG_MAP_RGBM |
+ ASTCENC_FLG_USE_ALPHA_WEIGHT |
+ ASTCENC_FLG_USE_PERCEPTUAL |
+ ASTCENC_FLG_DECOMPRESS_ONLY |
+ ASTCENC_FLG_SELF_DECOMPRESS_ONLY;
+
+/**
+ * @brief The config structure.
+ *
+ * This structure will initially be populated by a call to astcenc_config_init, but power users may
+ * modify it before calling astcenc_context_alloc. See astcenccli_toplevel_help.cpp for full user
+ * documentation of the power-user settings.
+ *
+ * Note for any settings which are associated with a specific color component, the value in the
+ * config applies to the component that exists after any compression data swizzle is applied.
+ */
+struct astcenc_config
+{
+ /** @brief The color profile. */
+ astcenc_profile profile;
+
+ /** @brief The set of set flags. */
+ unsigned int flags;
+
+ /** @brief The ASTC block size X dimension. */
+ unsigned int block_x;
+
+ /** @brief The ASTC block size Y dimension. */
+ unsigned int block_y;
+
+ /** @brief The ASTC block size Z dimension. */
+ unsigned int block_z;
+
+ /** @brief The red component weight scale for error weighting (-cw). */
+ float cw_r_weight;
+
+ /** @brief The green component weight scale for error weighting (-cw). */
+ float cw_g_weight;
+
+ /** @brief The blue component weight scale for error weighting (-cw). */
+ float cw_b_weight;
+
+ /** @brief The alpha component weight scale for error weighting (-cw). */
+ float cw_a_weight;
+
+ /**
+ * @brief The radius for any alpha-weight scaling (-a).
+ *
+ * It is recommended that this is set to 1 when using FLG_USE_ALPHA_WEIGHT on a texture that
+ * will be sampled using linear texture filtering to minimize color bleed out of transparent
+ * texels that are adjacent to non-transparent texels.
+ */
+ unsigned int a_scale_radius;
+
+ /** @brief The RGBM scale factor for the shared multiplier (-rgbm). */
+ float rgbm_m_scale;
+
+ /**
+ * @brief The maximum number of partitions searched (-partitioncountlimit).
+ *
+ * Valid values are between 1 and 4.
+ */
+ unsigned int tune_partition_count_limit;
+
+ /**
+ * @brief The maximum number of partitions searched (-2partitionindexlimit).
+ *
+ * Valid values are between 1 and 1024.
+ */
+ unsigned int tune_2partition_index_limit;
+
+ /**
+ * @brief The maximum number of partitions searched (-3partitionindexlimit).
+ *
+ * Valid values are between 1 and 1024.
+ */
+ unsigned int tune_3partition_index_limit;
+
+ /**
+ * @brief The maximum number of partitions searched (-4partitionindexlimit).
+ *
+ * Valid values are between 1 and 1024.
+ */
+ unsigned int tune_4partition_index_limit;
+
+ /**
+ * @brief The maximum centile for block modes searched (-blockmodelimit).
+ *
+ * Valid values are between 1 and 100.
+ */
+ unsigned int tune_block_mode_limit;
+
+ /**
+ * @brief The maximum iterative refinements applied (-refinementlimit).
+ *
+ * Valid values are between 1 and N; there is no technical upper limit
+ * but little benefit is expected after N=4.
+ */
+ unsigned int tune_refinement_limit;
+
+ /**
+ * @brief The number of trial candidates per mode search (-candidatelimit).
+ *
+ * Valid values are between 1 and TUNE_MAX_TRIAL_CANDIDATES (default 4).
+ */
+ unsigned int tune_candidate_limit;
+
+ /**
+ * @brief The number of trial partitionings per search (-2partitioncandidatelimit).
+ *
+ * Valid values are between 1 and TUNE_MAX_PARTITIONING_CANDIDATES.
+ */
+ unsigned int tune_2partitioning_candidate_limit;
+
+ /**
+ * @brief The number of trial partitionings per search (-3partitioncandidatelimit).
+ *
+ * Valid values are between 1 and TUNE_MAX_PARTITIONING_CANDIDATES.
+ */
+ unsigned int tune_3partitioning_candidate_limit;
+
+ /**
+ * @brief The number of trial partitionings per search (-4partitioncandidatelimit).
+ *
+ * Valid values are between 1 and TUNE_MAX_PARTITIONING_CANDIDATES.
+ */
+ unsigned int tune_4partitioning_candidate_limit;
+
+ /**
+ * @brief The dB threshold for stopping block search (-dblimit).
+ *
+ * This option is ineffective for HDR textures.
+ */
+ float tune_db_limit;
+
+ /**
+ * @brief The amount of MSE overshoot needed to early-out trials.
+ *
+ * The first early-out is for 1 partition, 1 plane trials, where we try a minimal encode using
+ * the high probability block modes. This can short-cut compression for simple blocks.
+ *
+ * The second early-out is for refinement trials, where we can exit refinement once quality is
+ * reached.
+ */
+ float tune_mse_overshoot;
+
+ /**
+ * @brief The threshold for skipping 3.1/4.1 trials (-2partitionlimitfactor).
+ *
+ * This option is further scaled for normal maps, so it skips less often.
+ */
+ float tune_2_partition_early_out_limit_factor;
+
+ /**
+ * @brief The threshold for skipping 4.1 trials (-3partitionlimitfactor).
+ *
+ * This option is further scaled for normal maps, so it skips less often.
+ */
+ float tune_3_partition_early_out_limit_factor;
+
+ /**
+ * @brief The threshold for skipping two weight planes (-2planelimitcorrelation).
+ *
+ * This option is ineffective for normal maps.
+ */
+ float tune_2_plane_early_out_limit_correlation;
+
+#if defined(ASTCENC_DIAGNOSTICS)
+ /**
+ * @brief The path to save the diagnostic trace data to.
+ *
+ * This option is not part of the public API, and requires special builds
+ * of the library.
+ */
+ const char* trace_file_path;
+#endif
+};
+
+/**
+ * @brief An uncompressed 2D or 3D image.
+ *
+ * 3D image are passed in as an array of 2D slices. Each slice has identical
+ * size and color format.
+ */
+struct astcenc_image
+{
+ /** @brief The X dimension of the image, in texels. */
+ unsigned int dim_x;
+
+ /** @brief The Y dimension of the image, in texels. */
+ unsigned int dim_y;
+
+ /** @brief The Z dimension of the image, in texels. */
+ unsigned int dim_z;
+
+ /** @brief The data type per component. */
+ astcenc_type data_type;
+
+ /** @brief The array of 2D slices, of length @c dim_z. */
+ void** data;
+};
+
+/**
+ * @brief A block encoding metadata query result.
+ *
+ * If the block is an error block or a constant color block or an error block all fields other than
+ * the profile, block dimensions, and error/constant indicator will be zero.
+ */
+struct astcenc_block_info
+{
+ /** @brief The block encoding color profile. */
+ astcenc_profile profile;
+
+ /** @brief The number of texels in the X dimension. */
+ unsigned int block_x;
+
+ /** @brief The number of texels in the Y dimension. */
+ unsigned int block_y;
+
+ /** @brief The number of texel in the Z dimension. */
+ unsigned int block_z;
+
+ /** @brief The number of texels in the block. */
+ unsigned int texel_count;
+
+ /** @brief True if this block is an error block. */
+ bool is_error_block;
+
+ /** @brief True if this block is a constant color block. */
+ bool is_constant_block;
+
+ /** @brief True if this block is an HDR block. */
+ bool is_hdr_block;
+
+ /** @brief True if this block uses two weight planes. */
+ bool is_dual_plane_block;
+
+ /** @brief The number of partitions if not constant color. */
+ unsigned int partition_count;
+
+ /** @brief The partition index if 2 - 4 partitions used. */
+ unsigned int partition_index;
+
+ /** @brief The component index of the second plane if dual plane. */
+ unsigned int dual_plane_component;
+
+ /** @brief The color endpoint encoding mode for each partition. */
+ unsigned int color_endpoint_modes[4];
+
+ /** @brief The number of color endpoint quantization levels. */
+ unsigned int color_level_count;
+
+ /** @brief The number of weight quantization levels. */
+ unsigned int weight_level_count;
+
+ /** @brief The number of weights in the X dimension. */
+ unsigned int weight_x;
+
+ /** @brief The number of weights in the Y dimension. */
+ unsigned int weight_y;
+
+ /** @brief The number of weights in the Z dimension. */
+ unsigned int weight_z;
+
+ /** @brief The unpacked color endpoints for each partition. */
+ float color_endpoints[4][2][4];
+
+ /** @brief The per-texel interpolation weights for the block. */
+ float weight_values_plane1[216];
+
+ /** @brief The per-texel interpolation weights for the block. */
+ float weight_values_plane2[216];
+
+ /** @brief The per-texel partition assignments for the block. */
+ uint8_t partition_assignment[216];
+};
+
+/**
+ * Populate a codec config based on default settings.
+ *
+ * Power users can edit the returned config struct to fine tune before allocating the context.
+ *
+ * @param profile Color profile.
+ * @param block_x ASTC block size X dimension.
+ * @param block_y ASTC block size Y dimension.
+ * @param block_z ASTC block size Z dimension.
+ * @param quality Search quality preset / effort level. Either an
+ * @c ASTCENC_PRE_* value, or a effort level between 0
+ * and 100. Performance is not linear between 0 and 100.
+
+ * @param flags A valid set of @c ASTCENC_FLG_* flag bits.
+ * @param[out] config Output config struct to populate.
+ *
+ * @return @c ASTCENC_SUCCESS on success, or an error if the inputs are invalid
+ * either individually, or in combination.
+ */
+ASTCENC_PUBLIC astcenc_error astcenc_config_init(
+ astcenc_profile profile,
+ unsigned int block_x,
+ unsigned int block_y,
+ unsigned int block_z,
+ float quality,
+ unsigned int flags,
+ astcenc_config* config);
+
+/**
+ * @brief Allocate a new codec context based on a config.
+ *
+ * This function allocates all of the memory resources and threads needed by the codec. This can be
+ * slow, so it is recommended that contexts are reused to serially compress or decompress multiple
+ * images to amortize setup cost.
+ *
+ * Contexts can be allocated to support only decompression using the @c ASTCENC_FLG_DECOMPRESS_ONLY
+ * flag when creating the configuration. The compression functions will fail if invoked. For a
+ * decompress-only library build the @c ASTCENC_FLG_DECOMPRESS_ONLY flag must be set when creating
+ * any context.
+ *
+ * @param[in] config Codec config.
+ * @param thread_count Thread count to configure for.
+ * @param[out] context Location to store an opaque context pointer.
+ *
+ * @return @c ASTCENC_SUCCESS on success, or an error if context creation failed.
+ */
+ASTCENC_PUBLIC astcenc_error astcenc_context_alloc(
+ const astcenc_config* config,
+ unsigned int thread_count,
+ astcenc_context** context);
+
+/**
+ * @brief Compress an image.
+ *
+ * A single context can only compress or decompress a single image at a time.
+ *
+ * For a context configured for multi-threading, any set of the N threads can call this function.
+ * Work will be dynamically scheduled across the threads available. Each thread must have a unique
+ * @c thread_index.
+ *
+ * @param context Codec context.
+ * @param[in,out] image An input image, in 2D slices.
+ * @param swizzle Compression data swizzle, applied before compression.
+ * @param[out] data_out Pointer to output data array.
+ * @param data_len Length of the output data array.
+ * @param thread_index Thread index [0..N-1] of calling thread.
+ *
+ * @return @c ASTCENC_SUCCESS on success, or an error if compression failed.
+ */
+ASTCENC_PUBLIC astcenc_error astcenc_compress_image(
+ astcenc_context* context,
+ astcenc_image* image,
+ const astcenc_swizzle* swizzle,
+ uint8_t* data_out,
+ size_t data_len,
+ unsigned int thread_index);
+
+/**
+ * @brief Reset the codec state for a new compression.
+ *
+ * The caller is responsible for synchronizing threads in the worker thread pool. This function must
+ * only be called when all threads have exited the @c astcenc_compress_image() function for image N,
+ * but before any thread enters it for image N + 1.
+ *
+ * Calling this is not required (but won't hurt), if the context is created for single threaded use.
+ *
+ * @param context Codec context.
+ *
+ * @return @c ASTCENC_SUCCESS on success, or an error if reset failed.
+ */
+ASTCENC_PUBLIC astcenc_error astcenc_compress_reset(
+ astcenc_context* context);
+
+/**
+ * @brief Decompress an image.
+ *
+ * @param context Codec context.
+ * @param[in] data Pointer to compressed data.
+ * @param data_len Length of the compressed data, in bytes.
+ * @param[in,out] image_out Output image.
+ * @param swizzle Decompression data swizzle, applied after decompression.
+ * @param thread_index Thread index [0..N-1] of calling thread.
+ *
+ * @return @c ASTCENC_SUCCESS on success, or an error if decompression failed.
+ */
+ASTCENC_PUBLIC astcenc_error astcenc_decompress_image(
+ astcenc_context* context,
+ const uint8_t* data,
+ size_t data_len,
+ astcenc_image* image_out,
+ const astcenc_swizzle* swizzle,
+ unsigned int thread_index);
+
+/**
+ * @brief Reset the codec state for a new decompression.
+ *
+ * The caller is responsible for synchronizing threads in the worker thread pool. This function must
+ * only be called when all threads have exited the @c astcenc_decompress_image() function for image
+ * N, but before any thread enters it for image N + 1.
+ *
+ * Calling this is not required (but won't hurt), if the context is created for single threaded use.
+ *
+ * @param context Codec context.
+ *
+ * @return @c ASTCENC_SUCCESS on success, or an error if reset failed.
+ */
+ASTCENC_PUBLIC astcenc_error astcenc_decompress_reset(
+ astcenc_context* context);
+
+/**
+ * Free the compressor context.
+ *
+ * @param context The codec context.
+ */
+ASTCENC_PUBLIC void astcenc_context_free(
+ astcenc_context* context);
+
+/**
+ * @brief Provide a high level summary of a block's encoding.
+ *
+ * This feature is primarily useful for codec developers but may be useful for developers building
+ * advanced content packaging pipelines.
+ *
+ * @param context Codec context.
+ * @param data One block of compressed ASTC data.
+ * @param info The output info structure to populate.
+ *
+ * @return @c ASTCENC_SUCCESS if the block was decoded, or an error otherwise. Note that this
+ * function will return success even if the block itself was an error block encoding, as the
+ * decode was correctly handled.
+ */
+ASTCENC_PUBLIC astcenc_error astcenc_get_block_info(
+ astcenc_context* context,
+ const uint8_t data[16],
+ astcenc_block_info* info);
+
+/**
+ * @brief Get a printable string for specific status code.
+ *
+ * @param status The status value.
+ *
+ * @return A human readable nul-terminated string.
+ */
+ASTCENC_PUBLIC const char* astcenc_get_error_string(
+ astcenc_error status);
+
+#endif
diff --git a/thirdparty/astcenc/astcenc_averages_and_directions.cpp b/thirdparty/astcenc/astcenc_averages_and_directions.cpp
new file mode 100644
index 0000000000..d1f003844a
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_averages_and_directions.cpp
@@ -0,0 +1,995 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2022 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief Functions for finding dominant direction of a set of colors.
+ */
+#if !defined(ASTCENC_DECOMPRESS_ONLY)
+
+#include "astcenc_internal.h"
+
+#include <cassert>
+
+/**
+ * @brief Compute the average RGB color of each partition.
+ *
+ * The algorithm here uses a vectorized sequential scan and per-partition
+ * color accumulators, using select() to mask texel lanes in other partitions.
+ *
+ * We only accumulate sums for N-1 partitions during the scan; the value for
+ * the last partition can be computed given that we know the block-wide average
+ * already.
+ *
+ * Because of this we could reduce the loop iteration count so it "just" spans
+ * the max texel index needed for the N-1 partitions, which could need fewer
+ * iterations than the full block texel count. However, this makes the loop
+ * count erratic and causes more branch mispredictions so is a net loss.
+ *
+ * @param pi The partitioning to use.
+ * @param blk The block data to process.
+ * @param[out] averages The output averages. Unused partition indices will
+ * not be initialized, and lane<3> will be zero.
+ */
+static void compute_partition_averages_rgb(
+ const partition_info& pi,
+ const image_block& blk,
+ vfloat4 averages[BLOCK_MAX_PARTITIONS]
+) {
+ unsigned int partition_count = pi.partition_count;
+ unsigned int texel_count = blk.texel_count;
+ promise(texel_count > 0);
+
+ // For 1 partition just use the precomputed mean
+ if (partition_count == 1)
+ {
+ averages[0] = blk.data_mean.swz<0, 1, 2>();
+ }
+ // For 2 partitions scan results for partition 0, compute partition 1
+ else if (partition_count == 2)
+ {
+ vfloatacc pp_avg_rgb[3] {};
+
+ vint lane_id = vint::lane_id();
+ for (unsigned int i = 0; i < texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vint texel_partition(pi.partition_of_texel + i);
+
+ vmask lane_mask = lane_id < vint(texel_count);
+ lane_id += vint(ASTCENC_SIMD_WIDTH);
+
+ vmask p0_mask = lane_mask & (texel_partition == vint(0));
+
+ vfloat data_r = loada(blk.data_r + i);
+ haccumulate(pp_avg_rgb[0], data_r, p0_mask);
+
+ vfloat data_g = loada(blk.data_g + i);
+ haccumulate(pp_avg_rgb[1], data_g, p0_mask);
+
+ vfloat data_b = loada(blk.data_b + i);
+ haccumulate(pp_avg_rgb[2], data_b, p0_mask);
+ }
+
+ vfloat4 block_total = blk.data_mean.swz<0, 1, 2>() * static_cast<float>(blk.texel_count);
+
+ vfloat4 p0_total = vfloat3(hadd_s(pp_avg_rgb[0]),
+ hadd_s(pp_avg_rgb[1]),
+ hadd_s(pp_avg_rgb[2]));
+
+ vfloat4 p1_total = block_total - p0_total;
+
+ averages[0] = p0_total / static_cast<float>(pi.partition_texel_count[0]);
+ averages[1] = p1_total / static_cast<float>(pi.partition_texel_count[1]);
+ }
+ // For 3 partitions scan results for partition 0/1, compute partition 2
+ else if (partition_count == 3)
+ {
+ vfloatacc pp_avg_rgb[2][3] {};
+
+ vint lane_id = vint::lane_id();
+ for (unsigned int i = 0; i < texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vint texel_partition(pi.partition_of_texel + i);
+
+ vmask lane_mask = lane_id < vint(texel_count);
+ lane_id += vint(ASTCENC_SIMD_WIDTH);
+
+ vmask p0_mask = lane_mask & (texel_partition == vint(0));
+ vmask p1_mask = lane_mask & (texel_partition == vint(1));
+
+ vfloat data_r = loada(blk.data_r + i);
+ haccumulate(pp_avg_rgb[0][0], data_r, p0_mask);
+ haccumulate(pp_avg_rgb[1][0], data_r, p1_mask);
+
+ vfloat data_g = loada(blk.data_g + i);
+ haccumulate(pp_avg_rgb[0][1], data_g, p0_mask);
+ haccumulate(pp_avg_rgb[1][1], data_g, p1_mask);
+
+ vfloat data_b = loada(blk.data_b + i);
+ haccumulate(pp_avg_rgb[0][2], data_b, p0_mask);
+ haccumulate(pp_avg_rgb[1][2], data_b, p1_mask);
+ }
+
+ vfloat4 block_total = blk.data_mean.swz<0, 1, 2>() * static_cast<float>(blk.texel_count);
+
+ vfloat4 p0_total = vfloat3(hadd_s(pp_avg_rgb[0][0]),
+ hadd_s(pp_avg_rgb[0][1]),
+ hadd_s(pp_avg_rgb[0][2]));
+
+ vfloat4 p1_total = vfloat3(hadd_s(pp_avg_rgb[1][0]),
+ hadd_s(pp_avg_rgb[1][1]),
+ hadd_s(pp_avg_rgb[1][2]));
+
+ vfloat4 p2_total = block_total - p0_total - p1_total;
+
+ averages[0] = p0_total / static_cast<float>(pi.partition_texel_count[0]);
+ averages[1] = p1_total / static_cast<float>(pi.partition_texel_count[1]);
+ averages[2] = p2_total / static_cast<float>(pi.partition_texel_count[2]);
+ }
+ else
+ {
+ // For 4 partitions scan results for partition 0/1/2, compute partition 3
+ vfloatacc pp_avg_rgb[3][3] {};
+
+ vint lane_id = vint::lane_id();
+ for (unsigned int i = 0; i < texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vint texel_partition(pi.partition_of_texel + i);
+
+ vmask lane_mask = lane_id < vint(texel_count);
+ lane_id += vint(ASTCENC_SIMD_WIDTH);
+
+ vmask p0_mask = lane_mask & (texel_partition == vint(0));
+ vmask p1_mask = lane_mask & (texel_partition == vint(1));
+ vmask p2_mask = lane_mask & (texel_partition == vint(2));
+
+ vfloat data_r = loada(blk.data_r + i);
+ haccumulate(pp_avg_rgb[0][0], data_r, p0_mask);
+ haccumulate(pp_avg_rgb[1][0], data_r, p1_mask);
+ haccumulate(pp_avg_rgb[2][0], data_r, p2_mask);
+
+ vfloat data_g = loada(blk.data_g + i);
+ haccumulate(pp_avg_rgb[0][1], data_g, p0_mask);
+ haccumulate(pp_avg_rgb[1][1], data_g, p1_mask);
+ haccumulate(pp_avg_rgb[2][1], data_g, p2_mask);
+
+ vfloat data_b = loada(blk.data_b + i);
+ haccumulate(pp_avg_rgb[0][2], data_b, p0_mask);
+ haccumulate(pp_avg_rgb[1][2], data_b, p1_mask);
+ haccumulate(pp_avg_rgb[2][2], data_b, p2_mask);
+ }
+
+ vfloat4 block_total = blk.data_mean.swz<0, 1, 2>() * static_cast<float>(blk.texel_count);
+
+ vfloat4 p0_total = vfloat3(hadd_s(pp_avg_rgb[0][0]),
+ hadd_s(pp_avg_rgb[0][1]),
+ hadd_s(pp_avg_rgb[0][2]));
+
+ vfloat4 p1_total = vfloat3(hadd_s(pp_avg_rgb[1][0]),
+ hadd_s(pp_avg_rgb[1][1]),
+ hadd_s(pp_avg_rgb[1][2]));
+
+ vfloat4 p2_total = vfloat3(hadd_s(pp_avg_rgb[2][0]),
+ hadd_s(pp_avg_rgb[2][1]),
+ hadd_s(pp_avg_rgb[2][2]));
+
+ vfloat4 p3_total = block_total - p0_total - p1_total- p2_total;
+
+ averages[0] = p0_total / static_cast<float>(pi.partition_texel_count[0]);
+ averages[1] = p1_total / static_cast<float>(pi.partition_texel_count[1]);
+ averages[2] = p2_total / static_cast<float>(pi.partition_texel_count[2]);
+ averages[3] = p3_total / static_cast<float>(pi.partition_texel_count[3]);
+ }
+}
+
+/**
+ * @brief Compute the average RGBA color of each partition.
+ *
+ * The algorithm here uses a vectorized sequential scan and per-partition
+ * color accumulators, using select() to mask texel lanes in other partitions.
+ *
+ * We only accumulate sums for N-1 partitions during the scan; the value for
+ * the last partition can be computed given that we know the block-wide average
+ * already.
+ *
+ * Because of this we could reduce the loop iteration count so it "just" spans
+ * the max texel index needed for the N-1 partitions, which could need fewer
+ * iterations than the full block texel count. However, this makes the loop
+ * count erratic and causes more branch mispredictions so is a net loss.
+ *
+ * @param pi The partitioning to use.
+ * @param blk The block data to process.
+ * @param[out] averages The output averages. Unused partition indices will
+ * not be initialized.
+ */
+static void compute_partition_averages_rgba(
+ const partition_info& pi,
+ const image_block& blk,
+ vfloat4 averages[BLOCK_MAX_PARTITIONS]
+) {
+ unsigned int partition_count = pi.partition_count;
+ unsigned int texel_count = blk.texel_count;
+ promise(texel_count > 0);
+
+ // For 1 partition just use the precomputed mean
+ if (partition_count == 1)
+ {
+ averages[0] = blk.data_mean;
+ }
+ // For 2 partitions scan results for partition 0, compute partition 1
+ else if (partition_count == 2)
+ {
+ vfloat4 pp_avg_rgba[4] {};
+
+ vint lane_id = vint::lane_id();
+ for (unsigned int i = 0; i < texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vint texel_partition(pi.partition_of_texel + i);
+
+ vmask lane_mask = lane_id < vint(texel_count);
+ lane_id += vint(ASTCENC_SIMD_WIDTH);
+
+ vmask p0_mask = lane_mask & (texel_partition == vint(0));
+
+ vfloat data_r = loada(blk.data_r + i);
+ haccumulate(pp_avg_rgba[0], data_r, p0_mask);
+
+ vfloat data_g = loada(blk.data_g + i);
+ haccumulate(pp_avg_rgba[1], data_g, p0_mask);
+
+ vfloat data_b = loada(blk.data_b + i);
+ haccumulate(pp_avg_rgba[2], data_b, p0_mask);
+
+ vfloat data_a = loada(blk.data_a + i);
+ haccumulate(pp_avg_rgba[3], data_a, p0_mask);
+ }
+
+ vfloat4 block_total = blk.data_mean * static_cast<float>(blk.texel_count);
+
+ vfloat4 p0_total = vfloat4(hadd_s(pp_avg_rgba[0]),
+ hadd_s(pp_avg_rgba[1]),
+ hadd_s(pp_avg_rgba[2]),
+ hadd_s(pp_avg_rgba[3]));
+
+ vfloat4 p1_total = block_total - p0_total;
+
+ averages[0] = p0_total / static_cast<float>(pi.partition_texel_count[0]);
+ averages[1] = p1_total / static_cast<float>(pi.partition_texel_count[1]);
+ }
+ // For 3 partitions scan results for partition 0/1, compute partition 2
+ else if (partition_count == 3)
+ {
+ vfloat4 pp_avg_rgba[2][4] {};
+
+ vint lane_id = vint::lane_id();
+ for (unsigned int i = 0; i < texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vint texel_partition(pi.partition_of_texel + i);
+
+ vmask lane_mask = lane_id < vint(texel_count);
+ lane_id += vint(ASTCENC_SIMD_WIDTH);
+
+ vmask p0_mask = lane_mask & (texel_partition == vint(0));
+ vmask p1_mask = lane_mask & (texel_partition == vint(1));
+
+ vfloat data_r = loada(blk.data_r + i);
+ haccumulate(pp_avg_rgba[0][0], data_r, p0_mask);
+ haccumulate(pp_avg_rgba[1][0], data_r, p1_mask);
+
+ vfloat data_g = loada(blk.data_g + i);
+ haccumulate(pp_avg_rgba[0][1], data_g, p0_mask);
+ haccumulate(pp_avg_rgba[1][1], data_g, p1_mask);
+
+ vfloat data_b = loada(blk.data_b + i);
+ haccumulate(pp_avg_rgba[0][2], data_b, p0_mask);
+ haccumulate(pp_avg_rgba[1][2], data_b, p1_mask);
+
+ vfloat data_a = loada(blk.data_a + i);
+ haccumulate(pp_avg_rgba[0][3], data_a, p0_mask);
+ haccumulate(pp_avg_rgba[1][3], data_a, p1_mask);
+ }
+
+ vfloat4 block_total = blk.data_mean * static_cast<float>(blk.texel_count);
+
+ vfloat4 p0_total = vfloat4(hadd_s(pp_avg_rgba[0][0]),
+ hadd_s(pp_avg_rgba[0][1]),
+ hadd_s(pp_avg_rgba[0][2]),
+ hadd_s(pp_avg_rgba[0][3]));
+
+ vfloat4 p1_total = vfloat4(hadd_s(pp_avg_rgba[1][0]),
+ hadd_s(pp_avg_rgba[1][1]),
+ hadd_s(pp_avg_rgba[1][2]),
+ hadd_s(pp_avg_rgba[1][3]));
+
+ vfloat4 p2_total = block_total - p0_total - p1_total;
+
+ averages[0] = p0_total / static_cast<float>(pi.partition_texel_count[0]);
+ averages[1] = p1_total / static_cast<float>(pi.partition_texel_count[1]);
+ averages[2] = p2_total / static_cast<float>(pi.partition_texel_count[2]);
+ }
+ else
+ {
+ // For 4 partitions scan results for partition 0/1/2, compute partition 3
+ vfloat4 pp_avg_rgba[3][4] {};
+
+ vint lane_id = vint::lane_id();
+ for (unsigned int i = 0; i < texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vint texel_partition(pi.partition_of_texel + i);
+
+ vmask lane_mask = lane_id < vint(texel_count);
+ lane_id += vint(ASTCENC_SIMD_WIDTH);
+
+ vmask p0_mask = lane_mask & (texel_partition == vint(0));
+ vmask p1_mask = lane_mask & (texel_partition == vint(1));
+ vmask p2_mask = lane_mask & (texel_partition == vint(2));
+
+ vfloat data_r = loada(blk.data_r + i);
+ haccumulate(pp_avg_rgba[0][0], data_r, p0_mask);
+ haccumulate(pp_avg_rgba[1][0], data_r, p1_mask);
+ haccumulate(pp_avg_rgba[2][0], data_r, p2_mask);
+
+ vfloat data_g = loada(blk.data_g + i);
+ haccumulate(pp_avg_rgba[0][1], data_g, p0_mask);
+ haccumulate(pp_avg_rgba[1][1], data_g, p1_mask);
+ haccumulate(pp_avg_rgba[2][1], data_g, p2_mask);
+
+ vfloat data_b = loada(blk.data_b + i);
+ haccumulate(pp_avg_rgba[0][2], data_b, p0_mask);
+ haccumulate(pp_avg_rgba[1][2], data_b, p1_mask);
+ haccumulate(pp_avg_rgba[2][2], data_b, p2_mask);
+
+ vfloat data_a = loada(blk.data_a + i);
+ haccumulate(pp_avg_rgba[0][3], data_a, p0_mask);
+ haccumulate(pp_avg_rgba[1][3], data_a, p1_mask);
+ haccumulate(pp_avg_rgba[2][3], data_a, p2_mask);
+ }
+
+ vfloat4 block_total = blk.data_mean * static_cast<float>(blk.texel_count);
+
+ vfloat4 p0_total = vfloat4(hadd_s(pp_avg_rgba[0][0]),
+ hadd_s(pp_avg_rgba[0][1]),
+ hadd_s(pp_avg_rgba[0][2]),
+ hadd_s(pp_avg_rgba[0][3]));
+
+ vfloat4 p1_total = vfloat4(hadd_s(pp_avg_rgba[1][0]),
+ hadd_s(pp_avg_rgba[1][1]),
+ hadd_s(pp_avg_rgba[1][2]),
+ hadd_s(pp_avg_rgba[1][3]));
+
+ vfloat4 p2_total = vfloat4(hadd_s(pp_avg_rgba[2][0]),
+ hadd_s(pp_avg_rgba[2][1]),
+ hadd_s(pp_avg_rgba[2][2]),
+ hadd_s(pp_avg_rgba[2][3]));
+
+ vfloat4 p3_total = block_total - p0_total - p1_total- p2_total;
+
+ averages[0] = p0_total / static_cast<float>(pi.partition_texel_count[0]);
+ averages[1] = p1_total / static_cast<float>(pi.partition_texel_count[1]);
+ averages[2] = p2_total / static_cast<float>(pi.partition_texel_count[2]);
+ averages[3] = p3_total / static_cast<float>(pi.partition_texel_count[3]);
+ }
+}
+
+/* See header for documentation. */
+void compute_avgs_and_dirs_4_comp(
+ const partition_info& pi,
+ const image_block& blk,
+ partition_metrics pm[BLOCK_MAX_PARTITIONS]
+) {
+ int partition_count = pi.partition_count;
+ promise(partition_count > 0);
+
+ // Pre-compute partition_averages
+ vfloat4 partition_averages[BLOCK_MAX_PARTITIONS];
+ compute_partition_averages_rgba(pi, blk, partition_averages);
+
+ for (int partition = 0; partition < partition_count; partition++)
+ {
+ const uint8_t *texel_indexes = pi.texels_of_partition[partition];
+ unsigned int texel_count = pi.partition_texel_count[partition];
+ promise(texel_count > 0);
+
+ vfloat4 average = partition_averages[partition];
+ pm[partition].avg = average;
+
+ vfloat4 sum_xp = vfloat4::zero();
+ vfloat4 sum_yp = vfloat4::zero();
+ vfloat4 sum_zp = vfloat4::zero();
+ vfloat4 sum_wp = vfloat4::zero();
+
+ for (unsigned int i = 0; i < texel_count; i++)
+ {
+ unsigned int iwt = texel_indexes[i];
+ vfloat4 texel_datum = blk.texel(iwt);
+ texel_datum = texel_datum - average;
+
+ vfloat4 zero = vfloat4::zero();
+
+ vmask4 tdm0 = texel_datum.swz<0,0,0,0>() > zero;
+ sum_xp += select(zero, texel_datum, tdm0);
+
+ vmask4 tdm1 = texel_datum.swz<1,1,1,1>() > zero;
+ sum_yp += select(zero, texel_datum, tdm1);
+
+ vmask4 tdm2 = texel_datum.swz<2,2,2,2>() > zero;
+ sum_zp += select(zero, texel_datum, tdm2);
+
+ vmask4 tdm3 = texel_datum.swz<3,3,3,3>() > zero;
+ sum_wp += select(zero, texel_datum, tdm3);
+ }
+
+ vfloat4 prod_xp = dot(sum_xp, sum_xp);
+ vfloat4 prod_yp = dot(sum_yp, sum_yp);
+ vfloat4 prod_zp = dot(sum_zp, sum_zp);
+ vfloat4 prod_wp = dot(sum_wp, sum_wp);
+
+ vfloat4 best_vector = sum_xp;
+ vfloat4 best_sum = prod_xp;
+
+ vmask4 mask = prod_yp > best_sum;
+ best_vector = select(best_vector, sum_yp, mask);
+ best_sum = select(best_sum, prod_yp, mask);
+
+ mask = prod_zp > best_sum;
+ best_vector = select(best_vector, sum_zp, mask);
+ best_sum = select(best_sum, prod_zp, mask);
+
+ mask = prod_wp > best_sum;
+ best_vector = select(best_vector, sum_wp, mask);
+
+ pm[partition].dir = best_vector;
+ }
+}
+
+/* See header for documentation. */
+void compute_avgs_and_dirs_3_comp(
+ const partition_info& pi,
+ const image_block& blk,
+ unsigned int omitted_component,
+ partition_metrics pm[BLOCK_MAX_PARTITIONS]
+) {
+ // Pre-compute partition_averages
+ vfloat4 partition_averages[BLOCK_MAX_PARTITIONS];
+ compute_partition_averages_rgba(pi, blk, partition_averages);
+
+ const float* data_vr = blk.data_r;
+ const float* data_vg = blk.data_g;
+ const float* data_vb = blk.data_b;
+
+ // TODO: Data-driven permute would be useful to avoid this ...
+ if (omitted_component == 0)
+ {
+ partition_averages[0] = partition_averages[0].swz<1, 2, 3>();
+ partition_averages[1] = partition_averages[1].swz<1, 2, 3>();
+ partition_averages[2] = partition_averages[2].swz<1, 2, 3>();
+ partition_averages[3] = partition_averages[3].swz<1, 2, 3>();
+
+ data_vr = blk.data_g;
+ data_vg = blk.data_b;
+ data_vb = blk.data_a;
+ }
+ else if (omitted_component == 1)
+ {
+ partition_averages[0] = partition_averages[0].swz<0, 2, 3>();
+ partition_averages[1] = partition_averages[1].swz<0, 2, 3>();
+ partition_averages[2] = partition_averages[2].swz<0, 2, 3>();
+ partition_averages[3] = partition_averages[3].swz<0, 2, 3>();
+
+ data_vg = blk.data_b;
+ data_vb = blk.data_a;
+ }
+ else if (omitted_component == 2)
+ {
+ partition_averages[0] = partition_averages[0].swz<0, 1, 3>();
+ partition_averages[1] = partition_averages[1].swz<0, 1, 3>();
+ partition_averages[2] = partition_averages[2].swz<0, 1, 3>();
+ partition_averages[3] = partition_averages[3].swz<0, 1, 3>();
+
+ data_vb = blk.data_a;
+ }
+ else
+ {
+ partition_averages[0] = partition_averages[0].swz<0, 1, 2>();
+ partition_averages[1] = partition_averages[1].swz<0, 1, 2>();
+ partition_averages[2] = partition_averages[2].swz<0, 1, 2>();
+ partition_averages[3] = partition_averages[3].swz<0, 1, 2>();
+ }
+
+ unsigned int partition_count = pi.partition_count;
+ promise(partition_count > 0);
+
+ for (unsigned int partition = 0; partition < partition_count; partition++)
+ {
+ const uint8_t *texel_indexes = pi.texels_of_partition[partition];
+ unsigned int texel_count = pi.partition_texel_count[partition];
+ promise(texel_count > 0);
+
+ vfloat4 average = partition_averages[partition];
+ pm[partition].avg = average;
+
+ vfloat4 sum_xp = vfloat4::zero();
+ vfloat4 sum_yp = vfloat4::zero();
+ vfloat4 sum_zp = vfloat4::zero();
+
+ for (unsigned int i = 0; i < texel_count; i++)
+ {
+ unsigned int iwt = texel_indexes[i];
+
+ vfloat4 texel_datum = vfloat3(data_vr[iwt],
+ data_vg[iwt],
+ data_vb[iwt]);
+ texel_datum = texel_datum - average;
+
+ vfloat4 zero = vfloat4::zero();
+
+ vmask4 tdm0 = texel_datum.swz<0,0,0,0>() > zero;
+ sum_xp += select(zero, texel_datum, tdm0);
+
+ vmask4 tdm1 = texel_datum.swz<1,1,1,1>() > zero;
+ sum_yp += select(zero, texel_datum, tdm1);
+
+ vmask4 tdm2 = texel_datum.swz<2,2,2,2>() > zero;
+ sum_zp += select(zero, texel_datum, tdm2);
+ }
+
+ vfloat4 prod_xp = dot(sum_xp, sum_xp);
+ vfloat4 prod_yp = dot(sum_yp, sum_yp);
+ vfloat4 prod_zp = dot(sum_zp, sum_zp);
+
+ vfloat4 best_vector = sum_xp;
+ vfloat4 best_sum = prod_xp;
+
+ vmask4 mask = prod_yp > best_sum;
+ best_vector = select(best_vector, sum_yp, mask);
+ best_sum = select(best_sum, prod_yp, mask);
+
+ mask = prod_zp > best_sum;
+ best_vector = select(best_vector, sum_zp, mask);
+
+ pm[partition].dir = best_vector;
+ }
+}
+
+/* See header for documentation. */
+void compute_avgs_and_dirs_3_comp_rgb(
+ const partition_info& pi,
+ const image_block& blk,
+ partition_metrics pm[BLOCK_MAX_PARTITIONS]
+) {
+ unsigned int partition_count = pi.partition_count;
+ promise(partition_count > 0);
+
+ // Pre-compute partition_averages
+ vfloat4 partition_averages[BLOCK_MAX_PARTITIONS];
+ compute_partition_averages_rgb(pi, blk, partition_averages);
+
+ for (unsigned int partition = 0; partition < partition_count; partition++)
+ {
+ const uint8_t *texel_indexes = pi.texels_of_partition[partition];
+ unsigned int texel_count = pi.partition_texel_count[partition];
+ promise(texel_count > 0);
+
+ vfloat4 average = partition_averages[partition];
+ pm[partition].avg = average;
+
+ vfloat4 sum_xp = vfloat4::zero();
+ vfloat4 sum_yp = vfloat4::zero();
+ vfloat4 sum_zp = vfloat4::zero();
+
+ for (unsigned int i = 0; i < texel_count; i++)
+ {
+ unsigned int iwt = texel_indexes[i];
+
+ vfloat4 texel_datum = blk.texel3(iwt);
+ texel_datum = texel_datum - average;
+
+ vfloat4 zero = vfloat4::zero();
+
+ vmask4 tdm0 = texel_datum.swz<0,0,0,0>() > zero;
+ sum_xp += select(zero, texel_datum, tdm0);
+
+ vmask4 tdm1 = texel_datum.swz<1,1,1,1>() > zero;
+ sum_yp += select(zero, texel_datum, tdm1);
+
+ vmask4 tdm2 = texel_datum.swz<2,2,2,2>() > zero;
+ sum_zp += select(zero, texel_datum, tdm2);
+ }
+
+ vfloat4 prod_xp = dot(sum_xp, sum_xp);
+ vfloat4 prod_yp = dot(sum_yp, sum_yp);
+ vfloat4 prod_zp = dot(sum_zp, sum_zp);
+
+ vfloat4 best_vector = sum_xp;
+ vfloat4 best_sum = prod_xp;
+
+ vmask4 mask = prod_yp > best_sum;
+ best_vector = select(best_vector, sum_yp, mask);
+ best_sum = select(best_sum, prod_yp, mask);
+
+ mask = prod_zp > best_sum;
+ best_vector = select(best_vector, sum_zp, mask);
+
+ pm[partition].dir = best_vector;
+ }
+}
+
+/* See header for documentation. */
+void compute_avgs_and_dirs_2_comp(
+ const partition_info& pt,
+ const image_block& blk,
+ unsigned int component1,
+ unsigned int component2,
+ partition_metrics pm[BLOCK_MAX_PARTITIONS]
+) {
+ vfloat4 average;
+
+ const float* data_vr = nullptr;
+ const float* data_vg = nullptr;
+
+ if (component1 == 0 && component2 == 1)
+ {
+ average = blk.data_mean.swz<0, 1>();
+
+ data_vr = blk.data_r;
+ data_vg = blk.data_g;
+ }
+ else if (component1 == 0 && component2 == 2)
+ {
+ average = blk.data_mean.swz<0, 2>();
+
+ data_vr = blk.data_r;
+ data_vg = blk.data_b;
+ }
+ else // (component1 == 1 && component2 == 2)
+ {
+ assert(component1 == 1 && component2 == 2);
+
+ average = blk.data_mean.swz<1, 2>();
+
+ data_vr = blk.data_g;
+ data_vg = blk.data_b;
+ }
+
+ unsigned int partition_count = pt.partition_count;
+ promise(partition_count > 0);
+
+ for (unsigned int partition = 0; partition < partition_count; partition++)
+ {
+ const uint8_t *texel_indexes = pt.texels_of_partition[partition];
+ unsigned int texel_count = pt.partition_texel_count[partition];
+ promise(texel_count > 0);
+
+ // Only compute a partition mean if more than one partition
+ if (partition_count > 1)
+ {
+ average = vfloat4::zero();
+ for (unsigned int i = 0; i < texel_count; i++)
+ {
+ unsigned int iwt = texel_indexes[i];
+ average += vfloat2(data_vr[iwt], data_vg[iwt]);
+ }
+
+ average = average / static_cast<float>(texel_count);
+ }
+
+ pm[partition].avg = average;
+
+ vfloat4 sum_xp = vfloat4::zero();
+ vfloat4 sum_yp = vfloat4::zero();
+
+ for (unsigned int i = 0; i < texel_count; i++)
+ {
+ unsigned int iwt = texel_indexes[i];
+ vfloat4 texel_datum = vfloat2(data_vr[iwt], data_vg[iwt]);
+ texel_datum = texel_datum - average;
+
+ vfloat4 zero = vfloat4::zero();
+
+ vmask4 tdm0 = texel_datum.swz<0,0,0,0>() > zero;
+ sum_xp += select(zero, texel_datum, tdm0);
+
+ vmask4 tdm1 = texel_datum.swz<1,1,1,1>() > zero;
+ sum_yp += select(zero, texel_datum, tdm1);
+ }
+
+ vfloat4 prod_xp = dot(sum_xp, sum_xp);
+ vfloat4 prod_yp = dot(sum_yp, sum_yp);
+
+ vfloat4 best_vector = sum_xp;
+ vfloat4 best_sum = prod_xp;
+
+ vmask4 mask = prod_yp > best_sum;
+ best_vector = select(best_vector, sum_yp, mask);
+
+ pm[partition].dir = best_vector;
+ }
+}
+
+/* See header for documentation. */
+void compute_error_squared_rgba(
+ const partition_info& pi,
+ const image_block& blk,
+ const processed_line4 uncor_plines[BLOCK_MAX_PARTITIONS],
+ const processed_line4 samec_plines[BLOCK_MAX_PARTITIONS],
+ float uncor_lengths[BLOCK_MAX_PARTITIONS],
+ float samec_lengths[BLOCK_MAX_PARTITIONS],
+ float& uncor_error,
+ float& samec_error
+) {
+ unsigned int partition_count = pi.partition_count;
+ promise(partition_count > 0);
+
+ vfloatacc uncor_errorsumv = vfloatacc::zero();
+ vfloatacc samec_errorsumv = vfloatacc::zero();
+
+ for (unsigned int partition = 0; partition < partition_count; partition++)
+ {
+ const uint8_t *texel_indexes = pi.texels_of_partition[partition];
+
+ float uncor_loparam = 1e10f;
+ float uncor_hiparam = -1e10f;
+
+ float samec_loparam = 1e10f;
+ float samec_hiparam = -1e10f;
+
+ processed_line4 l_uncor = uncor_plines[partition];
+ processed_line4 l_samec = samec_plines[partition];
+
+ unsigned int texel_count = pi.partition_texel_count[partition];
+ promise(texel_count > 0);
+
+ // Vectorize some useful scalar inputs
+ vfloat l_uncor_bs0(l_uncor.bs.lane<0>());
+ vfloat l_uncor_bs1(l_uncor.bs.lane<1>());
+ vfloat l_uncor_bs2(l_uncor.bs.lane<2>());
+ vfloat l_uncor_bs3(l_uncor.bs.lane<3>());
+
+ vfloat l_uncor_amod0(l_uncor.amod.lane<0>());
+ vfloat l_uncor_amod1(l_uncor.amod.lane<1>());
+ vfloat l_uncor_amod2(l_uncor.amod.lane<2>());
+ vfloat l_uncor_amod3(l_uncor.amod.lane<3>());
+
+ vfloat l_samec_bs0(l_samec.bs.lane<0>());
+ vfloat l_samec_bs1(l_samec.bs.lane<1>());
+ vfloat l_samec_bs2(l_samec.bs.lane<2>());
+ vfloat l_samec_bs3(l_samec.bs.lane<3>());
+
+ assert(all(l_samec.amod == vfloat4(0.0f)));
+
+ vfloat uncor_loparamv(1e10f);
+ vfloat uncor_hiparamv(-1e10f);
+
+ vfloat samec_loparamv(1e10f);
+ vfloat samec_hiparamv(-1e10f);
+
+ vfloat ew_r(blk.channel_weight.lane<0>());
+ vfloat ew_g(blk.channel_weight.lane<1>());
+ vfloat ew_b(blk.channel_weight.lane<2>());
+ vfloat ew_a(blk.channel_weight.lane<3>());
+
+ // This implementation over-shoots, but this is safe as we initialize the texel_indexes
+ // array to extend the last value. This means min/max are not impacted, but we need to mask
+ // out the dummy values when we compute the line weighting.
+ vint lane_ids = vint::lane_id();
+ for (unsigned int i = 0; i < texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vmask mask = lane_ids < vint(texel_count);
+ vint texel_idxs(texel_indexes + i);
+
+ vfloat data_r = gatherf(blk.data_r, texel_idxs);
+ vfloat data_g = gatherf(blk.data_g, texel_idxs);
+ vfloat data_b = gatherf(blk.data_b, texel_idxs);
+ vfloat data_a = gatherf(blk.data_a, texel_idxs);
+
+ vfloat uncor_param = (data_r * l_uncor_bs0)
+ + (data_g * l_uncor_bs1)
+ + (data_b * l_uncor_bs2)
+ + (data_a * l_uncor_bs3);
+
+ uncor_loparamv = min(uncor_param, uncor_loparamv);
+ uncor_hiparamv = max(uncor_param, uncor_hiparamv);
+
+ vfloat uncor_dist0 = (l_uncor_amod0 - data_r)
+ + (uncor_param * l_uncor_bs0);
+ vfloat uncor_dist1 = (l_uncor_amod1 - data_g)
+ + (uncor_param * l_uncor_bs1);
+ vfloat uncor_dist2 = (l_uncor_amod2 - data_b)
+ + (uncor_param * l_uncor_bs2);
+ vfloat uncor_dist3 = (l_uncor_amod3 - data_a)
+ + (uncor_param * l_uncor_bs3);
+
+ vfloat uncor_err = (ew_r * uncor_dist0 * uncor_dist0)
+ + (ew_g * uncor_dist1 * uncor_dist1)
+ + (ew_b * uncor_dist2 * uncor_dist2)
+ + (ew_a * uncor_dist3 * uncor_dist3);
+
+ haccumulate(uncor_errorsumv, uncor_err, mask);
+
+ // Process samechroma data
+ vfloat samec_param = (data_r * l_samec_bs0)
+ + (data_g * l_samec_bs1)
+ + (data_b * l_samec_bs2)
+ + (data_a * l_samec_bs3);
+
+ samec_loparamv = min(samec_param, samec_loparamv);
+ samec_hiparamv = max(samec_param, samec_hiparamv);
+
+ vfloat samec_dist0 = samec_param * l_samec_bs0 - data_r;
+ vfloat samec_dist1 = samec_param * l_samec_bs1 - data_g;
+ vfloat samec_dist2 = samec_param * l_samec_bs2 - data_b;
+ vfloat samec_dist3 = samec_param * l_samec_bs3 - data_a;
+
+ vfloat samec_err = (ew_r * samec_dist0 * samec_dist0)
+ + (ew_g * samec_dist1 * samec_dist1)
+ + (ew_b * samec_dist2 * samec_dist2)
+ + (ew_a * samec_dist3 * samec_dist3);
+
+ haccumulate(samec_errorsumv, samec_err, mask);
+
+ lane_ids += vint(ASTCENC_SIMD_WIDTH);
+ }
+
+ uncor_loparam = hmin_s(uncor_loparamv);
+ uncor_hiparam = hmax_s(uncor_hiparamv);
+
+ samec_loparam = hmin_s(samec_loparamv);
+ samec_hiparam = hmax_s(samec_hiparamv);
+
+ float uncor_linelen = uncor_hiparam - uncor_loparam;
+ float samec_linelen = samec_hiparam - samec_loparam;
+
+ // Turn very small numbers and NaNs into a small number
+ uncor_lengths[partition] = astc::max(uncor_linelen, 1e-7f);
+ samec_lengths[partition] = astc::max(samec_linelen, 1e-7f);
+ }
+
+ uncor_error = hadd_s(uncor_errorsumv);
+ samec_error = hadd_s(samec_errorsumv);
+}
+
+/* See header for documentation. */
+void compute_error_squared_rgb(
+ const partition_info& pi,
+ const image_block& blk,
+ partition_lines3 plines[BLOCK_MAX_PARTITIONS],
+ float& uncor_error,
+ float& samec_error
+) {
+ unsigned int partition_count = pi.partition_count;
+ promise(partition_count > 0);
+
+ vfloatacc uncor_errorsumv = vfloatacc::zero();
+ vfloatacc samec_errorsumv = vfloatacc::zero();
+
+ for (unsigned int partition = 0; partition < partition_count; partition++)
+ {
+ partition_lines3& pl = plines[partition];
+ const uint8_t *texel_indexes = pi.texels_of_partition[partition];
+ unsigned int texel_count = pi.partition_texel_count[partition];
+ promise(texel_count > 0);
+
+ float uncor_loparam = 1e10f;
+ float uncor_hiparam = -1e10f;
+
+ float samec_loparam = 1e10f;
+ float samec_hiparam = -1e10f;
+
+ processed_line3 l_uncor = pl.uncor_pline;
+ processed_line3 l_samec = pl.samec_pline;
+
+ // This implementation is an example vectorization of this function.
+ // It works for - the codec is a 2-4% faster than not vectorizing - but
+ // the benefit is limited by the use of gathers and register pressure
+
+ // Vectorize some useful scalar inputs
+ vfloat l_uncor_bs0(l_uncor.bs.lane<0>());
+ vfloat l_uncor_bs1(l_uncor.bs.lane<1>());
+ vfloat l_uncor_bs2(l_uncor.bs.lane<2>());
+
+ vfloat l_uncor_amod0(l_uncor.amod.lane<0>());
+ vfloat l_uncor_amod1(l_uncor.amod.lane<1>());
+ vfloat l_uncor_amod2(l_uncor.amod.lane<2>());
+
+ vfloat l_samec_bs0(l_samec.bs.lane<0>());
+ vfloat l_samec_bs1(l_samec.bs.lane<1>());
+ vfloat l_samec_bs2(l_samec.bs.lane<2>());
+
+ assert(all(l_samec.amod == vfloat4(0.0f)));
+
+ vfloat uncor_loparamv(1e10f);
+ vfloat uncor_hiparamv(-1e10f);
+
+ vfloat samec_loparamv(1e10f);
+ vfloat samec_hiparamv(-1e10f);
+
+ vfloat ew_r(blk.channel_weight.lane<0>());
+ vfloat ew_g(blk.channel_weight.lane<1>());
+ vfloat ew_b(blk.channel_weight.lane<2>());
+
+ // This implementation over-shoots, but this is safe as we initialize the weights array
+ // to extend the last value. This means min/max are not impacted, but we need to mask
+ // out the dummy values when we compute the line weighting.
+ vint lane_ids = vint::lane_id();
+ for (unsigned int i = 0; i < texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vmask mask = lane_ids < vint(texel_count);
+ vint texel_idxs(texel_indexes + i);
+
+ vfloat data_r = gatherf(blk.data_r, texel_idxs);
+ vfloat data_g = gatherf(blk.data_g, texel_idxs);
+ vfloat data_b = gatherf(blk.data_b, texel_idxs);
+
+ vfloat uncor_param = (data_r * l_uncor_bs0)
+ + (data_g * l_uncor_bs1)
+ + (data_b * l_uncor_bs2);
+
+ uncor_loparamv = min(uncor_param, uncor_loparamv);
+ uncor_hiparamv = max(uncor_param, uncor_hiparamv);
+
+ vfloat uncor_dist0 = (l_uncor_amod0 - data_r)
+ + (uncor_param * l_uncor_bs0);
+ vfloat uncor_dist1 = (l_uncor_amod1 - data_g)
+ + (uncor_param * l_uncor_bs1);
+ vfloat uncor_dist2 = (l_uncor_amod2 - data_b)
+ + (uncor_param * l_uncor_bs2);
+
+ vfloat uncor_err = (ew_r * uncor_dist0 * uncor_dist0)
+ + (ew_g * uncor_dist1 * uncor_dist1)
+ + (ew_b * uncor_dist2 * uncor_dist2);
+
+ haccumulate(uncor_errorsumv, uncor_err, mask);
+
+ // Process samechroma data
+ vfloat samec_param = (data_r * l_samec_bs0)
+ + (data_g * l_samec_bs1)
+ + (data_b * l_samec_bs2);
+
+ samec_loparamv = min(samec_param, samec_loparamv);
+ samec_hiparamv = max(samec_param, samec_hiparamv);
+
+ vfloat samec_dist0 = samec_param * l_samec_bs0 - data_r;
+ vfloat samec_dist1 = samec_param * l_samec_bs1 - data_g;
+ vfloat samec_dist2 = samec_param * l_samec_bs2 - data_b;
+
+ vfloat samec_err = (ew_r * samec_dist0 * samec_dist0)
+ + (ew_g * samec_dist1 * samec_dist1)
+ + (ew_b * samec_dist2 * samec_dist2);
+
+ haccumulate(samec_errorsumv, samec_err, mask);
+
+ lane_ids += vint(ASTCENC_SIMD_WIDTH);
+ }
+
+ uncor_loparam = hmin_s(uncor_loparamv);
+ uncor_hiparam = hmax_s(uncor_hiparamv);
+
+ samec_loparam = hmin_s(samec_loparamv);
+ samec_hiparam = hmax_s(samec_hiparamv);
+
+ float uncor_linelen = uncor_hiparam - uncor_loparam;
+ float samec_linelen = samec_hiparam - samec_loparam;
+
+ // Turn very small numbers and NaNs into a small number
+ pl.uncor_line_len = astc::max(uncor_linelen, 1e-7f);
+ pl.samec_line_len = astc::max(samec_linelen, 1e-7f);
+ }
+
+ uncor_error = hadd_s(uncor_errorsumv);
+ samec_error = hadd_s(samec_errorsumv);
+}
+
+#endif
diff --git a/thirdparty/astcenc/astcenc_block_sizes.cpp b/thirdparty/astcenc/astcenc_block_sizes.cpp
new file mode 100644
index 0000000000..1c22d06a5c
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_block_sizes.cpp
@@ -0,0 +1,1184 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2023 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief Functions to generate block size descriptor and decimation tables.
+ */
+
+#include "astcenc_internal.h"
+
+/**
+ * @brief Decode the properties of an encoded 2D block mode.
+ *
+ * @param block_mode The encoded block mode.
+ * @param[out] x_weights The number of weights in the X dimension.
+ * @param[out] y_weights The number of weights in the Y dimension.
+ * @param[out] is_dual_plane True if this block mode has two weight planes.
+ * @param[out] quant_mode The quantization level for the weights.
+ * @param[out] weight_bits The storage bit count for the weights.
+ *
+ * @return Returns true if a valid mode, false otherwise.
+ */
+static bool decode_block_mode_2d(
+ unsigned int block_mode,
+ unsigned int& x_weights,
+ unsigned int& y_weights,
+ bool& is_dual_plane,
+ unsigned int& quant_mode,
+ unsigned int& weight_bits
+) {
+ unsigned int base_quant_mode = (block_mode >> 4) & 1;
+ unsigned int H = (block_mode >> 9) & 1;
+ unsigned int D = (block_mode >> 10) & 1;
+ unsigned int A = (block_mode >> 5) & 0x3;
+
+ x_weights = 0;
+ y_weights = 0;
+
+ if ((block_mode & 3) != 0)
+ {
+ base_quant_mode |= (block_mode & 3) << 1;
+ unsigned int B = (block_mode >> 7) & 3;
+ switch ((block_mode >> 2) & 3)
+ {
+ case 0:
+ x_weights = B + 4;
+ y_weights = A + 2;
+ break;
+ case 1:
+ x_weights = B + 8;
+ y_weights = A + 2;
+ break;
+ case 2:
+ x_weights = A + 2;
+ y_weights = B + 8;
+ break;
+ case 3:
+ B &= 1;
+ if (block_mode & 0x100)
+ {
+ x_weights = B + 2;
+ y_weights = A + 2;
+ }
+ else
+ {
+ x_weights = A + 2;
+ y_weights = B + 6;
+ }
+ break;
+ }
+ }
+ else
+ {
+ base_quant_mode |= ((block_mode >> 2) & 3) << 1;
+ if (((block_mode >> 2) & 3) == 0)
+ {
+ return false;
+ }
+
+ unsigned int B = (block_mode >> 9) & 3;
+ switch ((block_mode >> 7) & 3)
+ {
+ case 0:
+ x_weights = 12;
+ y_weights = A + 2;
+ break;
+ case 1:
+ x_weights = A + 2;
+ y_weights = 12;
+ break;
+ case 2:
+ x_weights = A + 6;
+ y_weights = B + 6;
+ D = 0;
+ H = 0;
+ break;
+ case 3:
+ switch ((block_mode >> 5) & 3)
+ {
+ case 0:
+ x_weights = 6;
+ y_weights = 10;
+ break;
+ case 1:
+ x_weights = 10;
+ y_weights = 6;
+ break;
+ case 2:
+ case 3:
+ return false;
+ }
+ break;
+ }
+ }
+
+ unsigned int weight_count = x_weights * y_weights * (D + 1);
+ quant_mode = (base_quant_mode - 2) + 6 * H;
+ is_dual_plane = D != 0;
+
+ weight_bits = get_ise_sequence_bitcount(weight_count, static_cast<quant_method>(quant_mode));
+ return (weight_count <= BLOCK_MAX_WEIGHTS &&
+ weight_bits >= BLOCK_MIN_WEIGHT_BITS &&
+ weight_bits <= BLOCK_MAX_WEIGHT_BITS);
+}
+
+/**
+ * @brief Decode the properties of an encoded 3D block mode.
+ *
+ * @param block_mode The encoded block mode.
+ * @param[out] x_weights The number of weights in the X dimension.
+ * @param[out] y_weights The number of weights in the Y dimension.
+ * @param[out] z_weights The number of weights in the Z dimension.
+ * @param[out] is_dual_plane True if this block mode has two weight planes.
+ * @param[out] quant_mode The quantization level for the weights.
+ * @param[out] weight_bits The storage bit count for the weights.
+ *
+ * @return Returns true if a valid mode, false otherwise.
+ */
+static bool decode_block_mode_3d(
+ unsigned int block_mode,
+ unsigned int& x_weights,
+ unsigned int& y_weights,
+ unsigned int& z_weights,
+ bool& is_dual_plane,
+ unsigned int& quant_mode,
+ unsigned int& weight_bits
+) {
+ unsigned int base_quant_mode = (block_mode >> 4) & 1;
+ unsigned int H = (block_mode >> 9) & 1;
+ unsigned int D = (block_mode >> 10) & 1;
+ unsigned int A = (block_mode >> 5) & 0x3;
+
+ x_weights = 0;
+ y_weights = 0;
+ z_weights = 0;
+
+ if ((block_mode & 3) != 0)
+ {
+ base_quant_mode |= (block_mode & 3) << 1;
+ unsigned int B = (block_mode >> 7) & 3;
+ unsigned int C = (block_mode >> 2) & 0x3;
+ x_weights = A + 2;
+ y_weights = B + 2;
+ z_weights = C + 2;
+ }
+ else
+ {
+ base_quant_mode |= ((block_mode >> 2) & 3) << 1;
+ if (((block_mode >> 2) & 3) == 0)
+ {
+ return false;
+ }
+
+ int B = (block_mode >> 9) & 3;
+ if (((block_mode >> 7) & 3) != 3)
+ {
+ D = 0;
+ H = 0;
+ }
+ switch ((block_mode >> 7) & 3)
+ {
+ case 0:
+ x_weights = 6;
+ y_weights = B + 2;
+ z_weights = A + 2;
+ break;
+ case 1:
+ x_weights = A + 2;
+ y_weights = 6;
+ z_weights = B + 2;
+ break;
+ case 2:
+ x_weights = A + 2;
+ y_weights = B + 2;
+ z_weights = 6;
+ break;
+ case 3:
+ x_weights = 2;
+ y_weights = 2;
+ z_weights = 2;
+ switch ((block_mode >> 5) & 3)
+ {
+ case 0:
+ x_weights = 6;
+ break;
+ case 1:
+ y_weights = 6;
+ break;
+ case 2:
+ z_weights = 6;
+ break;
+ case 3:
+ return false;
+ }
+ break;
+ }
+ }
+
+ unsigned int weight_count = x_weights * y_weights * z_weights * (D + 1);
+ quant_mode = (base_quant_mode - 2) + 6 * H;
+ is_dual_plane = D != 0;
+
+ weight_bits = get_ise_sequence_bitcount(weight_count, static_cast<quant_method>(quant_mode));
+ return (weight_count <= BLOCK_MAX_WEIGHTS &&
+ weight_bits >= BLOCK_MIN_WEIGHT_BITS &&
+ weight_bits <= BLOCK_MAX_WEIGHT_BITS);
+}
+
+/**
+ * @brief Create a 2D decimation entry for a block-size and weight-decimation pair.
+ *
+ * @param x_texels The number of texels in the X dimension.
+ * @param y_texels The number of texels in the Y dimension.
+ * @param x_weights The number of weights in the X dimension.
+ * @param y_weights The number of weights in the Y dimension.
+ * @param[out] di The decimation info structure to populate.
+ * @param[out] wb The decimation table init scratch working buffers.
+ */
+static void init_decimation_info_2d(
+ unsigned int x_texels,
+ unsigned int y_texels,
+ unsigned int x_weights,
+ unsigned int y_weights,
+ decimation_info& di,
+ dt_init_working_buffers& wb
+) {
+ unsigned int texels_per_block = x_texels * y_texels;
+ unsigned int weights_per_block = x_weights * y_weights;
+
+ uint8_t max_texel_count_of_weight = 0;
+
+ promise(weights_per_block > 0);
+ promise(texels_per_block > 0);
+ promise(x_texels > 0);
+ promise(y_texels > 0);
+
+ for (unsigned int i = 0; i < weights_per_block; i++)
+ {
+ wb.texel_count_of_weight[i] = 0;
+ }
+
+ for (unsigned int i = 0; i < texels_per_block; i++)
+ {
+ wb.weight_count_of_texel[i] = 0;
+ }
+
+ for (unsigned int y = 0; y < y_texels; y++)
+ {
+ for (unsigned int x = 0; x < x_texels; x++)
+ {
+ unsigned int texel = y * x_texels + x;
+
+ unsigned int x_weight = (((1024 + x_texels / 2) / (x_texels - 1)) * x * (x_weights - 1) + 32) >> 6;
+ unsigned int y_weight = (((1024 + y_texels / 2) / (y_texels - 1)) * y * (y_weights - 1) + 32) >> 6;
+
+ unsigned int x_weight_frac = x_weight & 0xF;
+ unsigned int y_weight_frac = y_weight & 0xF;
+ unsigned int x_weight_int = x_weight >> 4;
+ unsigned int y_weight_int = y_weight >> 4;
+
+ unsigned int qweight[4];
+ qweight[0] = x_weight_int + y_weight_int * x_weights;
+ qweight[1] = qweight[0] + 1;
+ qweight[2] = qweight[0] + x_weights;
+ qweight[3] = qweight[2] + 1;
+
+ // Truncated-precision bilinear interpolation
+ unsigned int prod = x_weight_frac * y_weight_frac;
+
+ unsigned int weight[4];
+ weight[3] = (prod + 8) >> 4;
+ weight[1] = x_weight_frac - weight[3];
+ weight[2] = y_weight_frac - weight[3];
+ weight[0] = 16 - x_weight_frac - y_weight_frac + weight[3];
+
+ for (unsigned int i = 0; i < 4; i++)
+ {
+ if (weight[i] != 0)
+ {
+ wb.grid_weights_of_texel[texel][wb.weight_count_of_texel[texel]] = static_cast<uint8_t>(qweight[i]);
+ wb.weights_of_texel[texel][wb.weight_count_of_texel[texel]] = static_cast<uint8_t>(weight[i]);
+ wb.weight_count_of_texel[texel]++;
+ wb.texels_of_weight[qweight[i]][wb.texel_count_of_weight[qweight[i]]] = static_cast<uint8_t>(texel);
+ wb.texel_weights_of_weight[qweight[i]][wb.texel_count_of_weight[qweight[i]]] = static_cast<uint8_t>(weight[i]);
+ wb.texel_count_of_weight[qweight[i]]++;
+ max_texel_count_of_weight = astc::max(max_texel_count_of_weight, wb.texel_count_of_weight[qweight[i]]);
+ }
+ }
+ }
+ }
+
+ uint8_t max_texel_weight_count = 0;
+ for (unsigned int i = 0; i < texels_per_block; i++)
+ {
+ di.texel_weight_count[i] = wb.weight_count_of_texel[i];
+ max_texel_weight_count = astc::max(max_texel_weight_count, di.texel_weight_count[i]);
+
+ for (unsigned int j = 0; j < wb.weight_count_of_texel[i]; j++)
+ {
+ di.texel_weight_contribs_int_tr[j][i] = wb.weights_of_texel[i][j];
+ di.texel_weight_contribs_float_tr[j][i] = static_cast<float>(wb.weights_of_texel[i][j]) * (1.0f / WEIGHTS_TEXEL_SUM);
+ di.texel_weights_tr[j][i] = wb.grid_weights_of_texel[i][j];
+ }
+
+ // Init all 4 entries so we can rely on zeros for vectorization
+ for (unsigned int j = wb.weight_count_of_texel[i]; j < 4; j++)
+ {
+ di.texel_weight_contribs_int_tr[j][i] = 0;
+ di.texel_weight_contribs_float_tr[j][i] = 0.0f;
+ di.texel_weights_tr[j][i] = 0;
+ }
+ }
+
+ di.max_texel_weight_count = max_texel_weight_count;
+
+ for (unsigned int i = 0; i < weights_per_block; i++)
+ {
+ unsigned int texel_count_wt = wb.texel_count_of_weight[i];
+ di.weight_texel_count[i] = static_cast<uint8_t>(texel_count_wt);
+
+ for (unsigned int j = 0; j < texel_count_wt; j++)
+ {
+ uint8_t texel = wb.texels_of_weight[i][j];
+
+ // Create transposed versions of these for better vectorization
+ di.weight_texels_tr[j][i] = texel;
+ di.weights_texel_contribs_tr[j][i] = static_cast<float>(wb.texel_weights_of_weight[i][j]);
+
+ // Store the per-texel contribution of this weight for each texel it contributes to
+ di.texel_contrib_for_weight[j][i] = 0.0f;
+ for (unsigned int k = 0; k < 4; k++)
+ {
+ uint8_t dttw = di.texel_weights_tr[k][texel];
+ float dttwf = di.texel_weight_contribs_float_tr[k][texel];
+ if (dttw == i && dttwf != 0.0f)
+ {
+ di.texel_contrib_for_weight[j][i] = di.texel_weight_contribs_float_tr[k][texel];
+ break;
+ }
+ }
+ }
+
+ // Initialize array tail so we can over-fetch with SIMD later to avoid loop tails
+ // Match last texel in active lane in SIMD group, for better gathers
+ uint8_t last_texel = di.weight_texels_tr[texel_count_wt - 1][i];
+ for (unsigned int j = texel_count_wt; j < max_texel_count_of_weight; j++)
+ {
+ di.weight_texels_tr[j][i] = last_texel;
+ di.weights_texel_contribs_tr[j][i] = 0.0f;
+ }
+ }
+
+ // Initialize array tail so we can over-fetch with SIMD later to avoid loop tails
+ unsigned int texels_per_block_simd = round_up_to_simd_multiple_vla(texels_per_block);
+ for (unsigned int i = texels_per_block; i < texels_per_block_simd; i++)
+ {
+ di.texel_weight_count[i] = 0;
+
+ for (unsigned int j = 0; j < 4; j++)
+ {
+ di.texel_weight_contribs_float_tr[j][i] = 0;
+ di.texel_weights_tr[j][i] = 0;
+ di.texel_weight_contribs_int_tr[j][i] = 0;
+ }
+ }
+
+ // Initialize array tail so we can over-fetch with SIMD later to avoid loop tails
+ // Match last texel in active lane in SIMD group, for better gathers
+ unsigned int last_texel_count_wt = wb.texel_count_of_weight[weights_per_block - 1];
+ uint8_t last_texel = di.weight_texels_tr[last_texel_count_wt - 1][weights_per_block - 1];
+
+ unsigned int weights_per_block_simd = round_up_to_simd_multiple_vla(weights_per_block);
+ for (unsigned int i = weights_per_block; i < weights_per_block_simd; i++)
+ {
+ di.weight_texel_count[i] = 0;
+
+ for (unsigned int j = 0; j < max_texel_count_of_weight; j++)
+ {
+ di.weight_texels_tr[j][i] = last_texel;
+ di.weights_texel_contribs_tr[j][i] = 0.0f;
+ }
+ }
+
+ di.texel_count = static_cast<uint8_t>(texels_per_block);
+ di.weight_count = static_cast<uint8_t>(weights_per_block);
+ di.weight_x = static_cast<uint8_t>(x_weights);
+ di.weight_y = static_cast<uint8_t>(y_weights);
+ di.weight_z = 1;
+}
+
+/**
+ * @brief Create a 3D decimation entry for a block-size and weight-decimation pair.
+ *
+ * @param x_texels The number of texels in the X dimension.
+ * @param y_texels The number of texels in the Y dimension.
+ * @param z_texels The number of texels in the Z dimension.
+ * @param x_weights The number of weights in the X dimension.
+ * @param y_weights The number of weights in the Y dimension.
+ * @param z_weights The number of weights in the Z dimension.
+ * @param[out] di The decimation info structure to populate.
+ @param[out] wb The decimation table init scratch working buffers.
+ */
+static void init_decimation_info_3d(
+ unsigned int x_texels,
+ unsigned int y_texels,
+ unsigned int z_texels,
+ unsigned int x_weights,
+ unsigned int y_weights,
+ unsigned int z_weights,
+ decimation_info& di,
+ dt_init_working_buffers& wb
+) {
+ unsigned int texels_per_block = x_texels * y_texels * z_texels;
+ unsigned int weights_per_block = x_weights * y_weights * z_weights;
+
+ uint8_t max_texel_count_of_weight = 0;
+
+ promise(weights_per_block > 0);
+ promise(texels_per_block > 0);
+
+ for (unsigned int i = 0; i < weights_per_block; i++)
+ {
+ wb.texel_count_of_weight[i] = 0;
+ }
+
+ for (unsigned int i = 0; i < texels_per_block; i++)
+ {
+ wb.weight_count_of_texel[i] = 0;
+ }
+
+ for (unsigned int z = 0; z < z_texels; z++)
+ {
+ for (unsigned int y = 0; y < y_texels; y++)
+ {
+ for (unsigned int x = 0; x < x_texels; x++)
+ {
+ int texel = (z * y_texels + y) * x_texels + x;
+
+ int x_weight = (((1024 + x_texels / 2) / (x_texels - 1)) * x * (x_weights - 1) + 32) >> 6;
+ int y_weight = (((1024 + y_texels / 2) / (y_texels - 1)) * y * (y_weights - 1) + 32) >> 6;
+ int z_weight = (((1024 + z_texels / 2) / (z_texels - 1)) * z * (z_weights - 1) + 32) >> 6;
+
+ int x_weight_frac = x_weight & 0xF;
+ int y_weight_frac = y_weight & 0xF;
+ int z_weight_frac = z_weight & 0xF;
+ int x_weight_int = x_weight >> 4;
+ int y_weight_int = y_weight >> 4;
+ int z_weight_int = z_weight >> 4;
+ int qweight[4];
+ int weight[4];
+ qweight[0] = (z_weight_int * y_weights + y_weight_int) * x_weights + x_weight_int;
+ qweight[3] = ((z_weight_int + 1) * y_weights + (y_weight_int + 1)) * x_weights + (x_weight_int + 1);
+
+ // simplex interpolation
+ int fs = x_weight_frac;
+ int ft = y_weight_frac;
+ int fp = z_weight_frac;
+
+ int cas = ((fs > ft) << 2) + ((ft > fp) << 1) + ((fs > fp));
+ int N = x_weights;
+ int NM = x_weights * y_weights;
+
+ int s1, s2, w0, w1, w2, w3;
+ switch (cas)
+ {
+ case 7:
+ s1 = 1;
+ s2 = N;
+ w0 = 16 - fs;
+ w1 = fs - ft;
+ w2 = ft - fp;
+ w3 = fp;
+ break;
+ case 3:
+ s1 = N;
+ s2 = 1;
+ w0 = 16 - ft;
+ w1 = ft - fs;
+ w2 = fs - fp;
+ w3 = fp;
+ break;
+ case 5:
+ s1 = 1;
+ s2 = NM;
+ w0 = 16 - fs;
+ w1 = fs - fp;
+ w2 = fp - ft;
+ w3 = ft;
+ break;
+ case 4:
+ s1 = NM;
+ s2 = 1;
+ w0 = 16 - fp;
+ w1 = fp - fs;
+ w2 = fs - ft;
+ w3 = ft;
+ break;
+ case 2:
+ s1 = N;
+ s2 = NM;
+ w0 = 16 - ft;
+ w1 = ft - fp;
+ w2 = fp - fs;
+ w3 = fs;
+ break;
+ case 0:
+ s1 = NM;
+ s2 = N;
+ w0 = 16 - fp;
+ w1 = fp - ft;
+ w2 = ft - fs;
+ w3 = fs;
+ break;
+ default:
+ s1 = NM;
+ s2 = N;
+ w0 = 16 - fp;
+ w1 = fp - ft;
+ w2 = ft - fs;
+ w3 = fs;
+ break;
+ }
+
+ qweight[1] = qweight[0] + s1;
+ qweight[2] = qweight[1] + s2;
+ weight[0] = w0;
+ weight[1] = w1;
+ weight[2] = w2;
+ weight[3] = w3;
+
+ for (unsigned int i = 0; i < 4; i++)
+ {
+ if (weight[i] != 0)
+ {
+ wb.grid_weights_of_texel[texel][wb.weight_count_of_texel[texel]] = static_cast<uint8_t>(qweight[i]);
+ wb.weights_of_texel[texel][wb.weight_count_of_texel[texel]] = static_cast<uint8_t>(weight[i]);
+ wb.weight_count_of_texel[texel]++;
+ wb.texels_of_weight[qweight[i]][wb.texel_count_of_weight[qweight[i]]] = static_cast<uint8_t>(texel);
+ wb.texel_weights_of_weight[qweight[i]][wb.texel_count_of_weight[qweight[i]]] = static_cast<uint8_t>(weight[i]);
+ wb.texel_count_of_weight[qweight[i]]++;
+ max_texel_count_of_weight = astc::max(max_texel_count_of_weight, wb.texel_count_of_weight[qweight[i]]);
+ }
+ }
+ }
+ }
+ }
+
+ uint8_t max_texel_weight_count = 0;
+ for (unsigned int i = 0; i < texels_per_block; i++)
+ {
+ di.texel_weight_count[i] = wb.weight_count_of_texel[i];
+ max_texel_weight_count = astc::max(max_texel_weight_count, di.texel_weight_count[i]);
+
+ // Init all 4 entries so we can rely on zeros for vectorization
+ for (unsigned int j = 0; j < 4; j++)
+ {
+ di.texel_weight_contribs_int_tr[j][i] = 0;
+ di.texel_weight_contribs_float_tr[j][i] = 0.0f;
+ di.texel_weights_tr[j][i] = 0;
+ }
+
+ for (unsigned int j = 0; j < wb.weight_count_of_texel[i]; j++)
+ {
+ di.texel_weight_contribs_int_tr[j][i] = wb.weights_of_texel[i][j];
+ di.texel_weight_contribs_float_tr[j][i] = static_cast<float>(wb.weights_of_texel[i][j]) * (1.0f / WEIGHTS_TEXEL_SUM);
+ di.texel_weights_tr[j][i] = wb.grid_weights_of_texel[i][j];
+ }
+ }
+
+ di.max_texel_weight_count = max_texel_weight_count;
+
+ for (unsigned int i = 0; i < weights_per_block; i++)
+ {
+ unsigned int texel_count_wt = wb.texel_count_of_weight[i];
+ di.weight_texel_count[i] = static_cast<uint8_t>(texel_count_wt);
+
+ for (unsigned int j = 0; j < texel_count_wt; j++)
+ {
+ unsigned int texel = wb.texels_of_weight[i][j];
+
+ // Create transposed versions of these for better vectorization
+ di.weight_texels_tr[j][i] = static_cast<uint8_t>(texel);
+ di.weights_texel_contribs_tr[j][i] = static_cast<float>(wb.texel_weights_of_weight[i][j]);
+
+ // Store the per-texel contribution of this weight for each texel it contributes to
+ di.texel_contrib_for_weight[j][i] = 0.0f;
+ for (unsigned int k = 0; k < 4; k++)
+ {
+ uint8_t dttw = di.texel_weights_tr[k][texel];
+ float dttwf = di.texel_weight_contribs_float_tr[k][texel];
+ if (dttw == i && dttwf != 0.0f)
+ {
+ di.texel_contrib_for_weight[j][i] = di.texel_weight_contribs_float_tr[k][texel];
+ break;
+ }
+ }
+ }
+
+ // Initialize array tail so we can over-fetch with SIMD later to avoid loop tails
+ // Match last texel in active lane in SIMD group, for better gathers
+ uint8_t last_texel = di.weight_texels_tr[texel_count_wt - 1][i];
+ for (unsigned int j = texel_count_wt; j < max_texel_count_of_weight; j++)
+ {
+ di.weight_texels_tr[j][i] = last_texel;
+ di.weights_texel_contribs_tr[j][i] = 0.0f;
+ }
+ }
+
+ // Initialize array tail so we can over-fetch with SIMD later to avoid loop tails
+ unsigned int texels_per_block_simd = round_up_to_simd_multiple_vla(texels_per_block);
+ for (unsigned int i = texels_per_block; i < texels_per_block_simd; i++)
+ {
+ di.texel_weight_count[i] = 0;
+
+ for (unsigned int j = 0; j < 4; j++)
+ {
+ di.texel_weight_contribs_float_tr[j][i] = 0;
+ di.texel_weights_tr[j][i] = 0;
+ di.texel_weight_contribs_int_tr[j][i] = 0;
+ }
+ }
+
+ // Initialize array tail so we can over-fetch with SIMD later to avoid loop tails
+ // Match last texel in active lane in SIMD group, for better gathers
+ int last_texel_count_wt = wb.texel_count_of_weight[weights_per_block - 1];
+ uint8_t last_texel = di.weight_texels_tr[last_texel_count_wt - 1][weights_per_block - 1];
+
+ unsigned int weights_per_block_simd = round_up_to_simd_multiple_vla(weights_per_block);
+ for (unsigned int i = weights_per_block; i < weights_per_block_simd; i++)
+ {
+ di.weight_texel_count[i] = 0;
+
+ for (int j = 0; j < max_texel_count_of_weight; j++)
+ {
+ di.weight_texels_tr[j][i] = last_texel;
+ di.weights_texel_contribs_tr[j][i] = 0.0f;
+ }
+ }
+
+ di.texel_count = static_cast<uint8_t>(texels_per_block);
+ di.weight_count = static_cast<uint8_t>(weights_per_block);
+ di.weight_x = static_cast<uint8_t>(x_weights);
+ di.weight_y = static_cast<uint8_t>(y_weights);
+ di.weight_z = static_cast<uint8_t>(z_weights);
+}
+
+/**
+ * @brief Assign the texels to use for kmeans clustering.
+ *
+ * The max limit is @c BLOCK_MAX_KMEANS_TEXELS; above this a random selection is used.
+ * The @c bsd.texel_count is an input and must be populated beforehand.
+ *
+ * @param[in,out] bsd The block size descriptor to populate.
+ */
+static void assign_kmeans_texels(
+ block_size_descriptor& bsd
+) {
+ // Use all texels for kmeans on a small block
+ if (bsd.texel_count <= BLOCK_MAX_KMEANS_TEXELS)
+ {
+ for (uint8_t i = 0; i < bsd.texel_count; i++)
+ {
+ bsd.kmeans_texels[i] = i;
+ }
+
+ return;
+ }
+
+ // Select a random subset of BLOCK_MAX_KMEANS_TEXELS for kmeans on a large block
+ uint64_t rng_state[2];
+ astc::rand_init(rng_state);
+
+ // Initialize array used for tracking used indices
+ bool seen[BLOCK_MAX_TEXELS];
+ for (uint8_t i = 0; i < bsd.texel_count; i++)
+ {
+ seen[i] = false;
+ }
+
+ // Assign 64 random indices, retrying if we see repeats
+ unsigned int arr_elements_set = 0;
+ while (arr_elements_set < BLOCK_MAX_KMEANS_TEXELS)
+ {
+ uint8_t texel = static_cast<uint8_t>(astc::rand(rng_state));
+ texel = texel % bsd.texel_count;
+ if (!seen[texel])
+ {
+ bsd.kmeans_texels[arr_elements_set++] = texel;
+ seen[texel] = true;
+ }
+ }
+}
+
+/**
+ * @brief Allocate a single 2D decimation table entry.
+ *
+ * @param x_texels The number of texels in the X dimension.
+ * @param y_texels The number of texels in the Y dimension.
+ * @param x_weights The number of weights in the X dimension.
+ * @param y_weights The number of weights in the Y dimension.
+ * @param bsd The block size descriptor we are populating.
+ * @param wb The decimation table init scratch working buffers.
+ * @param index The packed array index to populate.
+ */
+static void construct_dt_entry_2d(
+ unsigned int x_texels,
+ unsigned int y_texels,
+ unsigned int x_weights,
+ unsigned int y_weights,
+ block_size_descriptor& bsd,
+ dt_init_working_buffers& wb,
+ unsigned int index
+) {
+ unsigned int weight_count = x_weights * y_weights;
+ assert(weight_count <= BLOCK_MAX_WEIGHTS);
+
+ bool try_2planes = (2 * weight_count) <= BLOCK_MAX_WEIGHTS;
+
+ decimation_info& di = bsd.decimation_tables[index];
+ init_decimation_info_2d(x_texels, y_texels, x_weights, y_weights, di, wb);
+
+ int maxprec_1plane = -1;
+ int maxprec_2planes = -1;
+ for (int i = 0; i < 12; i++)
+ {
+ unsigned int bits_1plane = get_ise_sequence_bitcount(weight_count, static_cast<quant_method>(i));
+ if (bits_1plane >= BLOCK_MIN_WEIGHT_BITS && bits_1plane <= BLOCK_MAX_WEIGHT_BITS)
+ {
+ maxprec_1plane = i;
+ }
+
+ if (try_2planes)
+ {
+ unsigned int bits_2planes = get_ise_sequence_bitcount(2 * weight_count, static_cast<quant_method>(i));
+ if (bits_2planes >= BLOCK_MIN_WEIGHT_BITS && bits_2planes <= BLOCK_MAX_WEIGHT_BITS)
+ {
+ maxprec_2planes = i;
+ }
+ }
+ }
+
+ // At least one of the two should be valid ...
+ assert(maxprec_1plane >= 0 || maxprec_2planes >= 0);
+ bsd.decimation_modes[index].maxprec_1plane = static_cast<int8_t>(maxprec_1plane);
+ bsd.decimation_modes[index].maxprec_2planes = static_cast<int8_t>(maxprec_2planes);
+ bsd.decimation_modes[index].refprec_1_plane = 0;
+ bsd.decimation_modes[index].refprec_2_planes = 0;
+}
+
+/**
+ * @brief Allocate block modes and decimation tables for a single 2D block size.
+ *
+ * @param x_texels The number of texels in the X dimension.
+ * @param y_texels The number of texels in the Y dimension.
+ * @param can_omit_modes Can we discard modes that astcenc won't use, even if legal?
+ * @param mode_cutoff Percentile cutoff in range [0,1]. Low values more likely to be used.
+ * @param[out] bsd The block size descriptor to populate.
+ */
+static void construct_block_size_descriptor_2d(
+ unsigned int x_texels,
+ unsigned int y_texels,
+ bool can_omit_modes,
+ float mode_cutoff,
+ block_size_descriptor& bsd
+) {
+ // Store a remap table for storing packed decimation modes.
+ // Indexing uses [Y * 16 + X] and max size for each axis is 12.
+ static const unsigned int MAX_DMI = 12 * 16 + 12;
+ int decimation_mode_index[MAX_DMI];
+
+ dt_init_working_buffers* wb = new dt_init_working_buffers;
+
+ bsd.xdim = static_cast<uint8_t>(x_texels);
+ bsd.ydim = static_cast<uint8_t>(y_texels);
+ bsd.zdim = 1;
+ bsd.texel_count = static_cast<uint8_t>(x_texels * y_texels);
+
+ for (unsigned int i = 0; i < MAX_DMI; i++)
+ {
+ decimation_mode_index[i] = -1;
+ }
+
+ // Gather all the decimation grids that can be used with the current block
+#if !defined(ASTCENC_DECOMPRESS_ONLY)
+ const float *percentiles = get_2d_percentile_table(x_texels, y_texels);
+ float always_cutoff = 0.0f;
+#else
+ // Unused in decompress-only builds
+ (void)can_omit_modes;
+ (void)mode_cutoff;
+#endif
+
+ // Construct the list of block formats referencing the decimation tables
+ unsigned int packed_bm_idx = 0;
+ unsigned int packed_dm_idx = 0;
+
+ // Trackers
+ unsigned int bm_counts[4] { 0 };
+ unsigned int dm_counts[4] { 0 };
+
+ // Clear the list to a known-bad value
+ for (unsigned int i = 0; i < WEIGHTS_MAX_BLOCK_MODES; i++)
+ {
+ bsd.block_mode_packed_index[i] = BLOCK_BAD_BLOCK_MODE;
+ }
+
+ // Iterate four times to build a usefully ordered list:
+ // - Pass 0 - keep selected single plane "always" block modes
+ // - Pass 1 - keep selected single plane "non-always" block modes
+ // - Pass 2 - keep select dual plane block modes
+ // - Pass 3 - keep everything else that's legal
+ unsigned int limit = can_omit_modes ? 3 : 4;
+ for (unsigned int j = 0; j < limit; j ++)
+ {
+ for (unsigned int i = 0; i < WEIGHTS_MAX_BLOCK_MODES; i++)
+ {
+ // Skip modes we've already included in a previous pass
+ if (bsd.block_mode_packed_index[i] != BLOCK_BAD_BLOCK_MODE)
+ {
+ continue;
+ }
+
+ // Decode parameters
+ unsigned int x_weights;
+ unsigned int y_weights;
+ bool is_dual_plane;
+ unsigned int quant_mode;
+ unsigned int weight_bits;
+ bool valid = decode_block_mode_2d(i, x_weights, y_weights, is_dual_plane, quant_mode, weight_bits);
+
+ // Always skip invalid encodings for the current block size
+ if (!valid || (x_weights > x_texels) || (y_weights > y_texels))
+ {
+ continue;
+ }
+
+ // Selectively skip dual plane encodings
+ if (((j <= 1) && is_dual_plane) || (j == 2 && !is_dual_plane))
+ {
+ continue;
+ }
+
+ // Always skip encodings we can't physically encode based on
+ // generic encoding bit availability
+ if (is_dual_plane)
+ {
+ // This is the only check we need as only support 1 partition
+ if ((109 - weight_bits) <= 0)
+ {
+ continue;
+ }
+ }
+ else
+ {
+ // This is conservative - fewer bits may be available for > 1 partition
+ if ((111 - weight_bits) <= 0)
+ {
+ continue;
+ }
+ }
+
+ // Selectively skip encodings based on percentile
+ bool percentile_hit = false;
+ #if !defined(ASTCENC_DECOMPRESS_ONLY)
+ if (j == 0)
+ {
+ percentile_hit = percentiles[i] <= always_cutoff;
+ }
+ else
+ {
+ percentile_hit = percentiles[i] <= mode_cutoff;
+ }
+ #endif
+
+ if (j != 3 && !percentile_hit)
+ {
+ continue;
+ }
+
+ // Allocate and initialize the decimation table entry if we've not used it yet
+ int decimation_mode = decimation_mode_index[y_weights * 16 + x_weights];
+ if (decimation_mode < 0)
+ {
+ construct_dt_entry_2d(x_texels, y_texels, x_weights, y_weights, bsd, *wb, packed_dm_idx);
+ decimation_mode_index[y_weights * 16 + x_weights] = packed_dm_idx;
+ decimation_mode = packed_dm_idx;
+
+ dm_counts[j]++;
+ packed_dm_idx++;
+ }
+
+ auto& bm = bsd.block_modes[packed_bm_idx];
+
+ bm.decimation_mode = static_cast<uint8_t>(decimation_mode);
+ bm.quant_mode = static_cast<uint8_t>(quant_mode);
+ bm.is_dual_plane = static_cast<uint8_t>(is_dual_plane);
+ bm.weight_bits = static_cast<uint8_t>(weight_bits);
+ bm.mode_index = static_cast<uint16_t>(i);
+
+ auto& dm = bsd.decimation_modes[decimation_mode];
+
+ if (is_dual_plane)
+ {
+ dm.set_ref_2_plane(bm.get_weight_quant_mode());
+ }
+ else
+ {
+ dm.set_ref_1_plane(bm.get_weight_quant_mode());
+ }
+
+ bsd.block_mode_packed_index[i] = static_cast<uint16_t>(packed_bm_idx);
+
+ packed_bm_idx++;
+ bm_counts[j]++;
+ }
+ }
+
+ bsd.block_mode_count_1plane_always = bm_counts[0];
+ bsd.block_mode_count_1plane_selected = bm_counts[0] + bm_counts[1];
+ bsd.block_mode_count_1plane_2plane_selected = bm_counts[0] + bm_counts[1] + bm_counts[2];
+ bsd.block_mode_count_all = bm_counts[0] + bm_counts[1] + bm_counts[2] + bm_counts[3];
+
+ bsd.decimation_mode_count_always = dm_counts[0];
+ bsd.decimation_mode_count_selected = dm_counts[0] + dm_counts[1] + dm_counts[2];
+ bsd.decimation_mode_count_all = dm_counts[0] + dm_counts[1] + dm_counts[2] + dm_counts[3];
+
+#if !defined(ASTCENC_DECOMPRESS_ONLY)
+ assert(bsd.block_mode_count_1plane_always > 0);
+ assert(bsd.decimation_mode_count_always > 0);
+
+ delete[] percentiles;
+#endif
+
+ // Ensure the end of the array contains valid data (should never get read)
+ for (unsigned int i = bsd.decimation_mode_count_all; i < WEIGHTS_MAX_DECIMATION_MODES; i++)
+ {
+ bsd.decimation_modes[i].maxprec_1plane = -1;
+ bsd.decimation_modes[i].maxprec_2planes = -1;
+ bsd.decimation_modes[i].refprec_1_plane = 0;
+ bsd.decimation_modes[i].refprec_2_planes = 0;
+ }
+
+ // Determine the texels to use for kmeans clustering.
+ assign_kmeans_texels(bsd);
+
+ delete wb;
+}
+
+/**
+ * @brief Allocate block modes and decimation tables for a single 3D block size.
+ *
+ * TODO: This function doesn't include all of the heuristics that we use for 2D block sizes such as
+ * the percentile mode cutoffs. If 3D becomes more widely used we should look at this.
+ *
+ * @param x_texels The number of texels in the X dimension.
+ * @param y_texels The number of texels in the Y dimension.
+ * @param z_texels The number of texels in the Z dimension.
+ * @param[out] bsd The block size descriptor to populate.
+ */
+static void construct_block_size_descriptor_3d(
+ unsigned int x_texels,
+ unsigned int y_texels,
+ unsigned int z_texels,
+ block_size_descriptor& bsd
+) {
+ // Store a remap table for storing packed decimation modes.
+ // Indexing uses [Z * 64 + Y * 8 + X] and max size for each axis is 6.
+ static constexpr unsigned int MAX_DMI = 6 * 64 + 6 * 8 + 6;
+ int decimation_mode_index[MAX_DMI];
+ unsigned int decimation_mode_count = 0;
+
+ dt_init_working_buffers* wb = new dt_init_working_buffers;
+
+ bsd.xdim = static_cast<uint8_t>(x_texels);
+ bsd.ydim = static_cast<uint8_t>(y_texels);
+ bsd.zdim = static_cast<uint8_t>(z_texels);
+ bsd.texel_count = static_cast<uint8_t>(x_texels * y_texels * z_texels);
+
+ for (unsigned int i = 0; i < MAX_DMI; i++)
+ {
+ decimation_mode_index[i] = -1;
+ }
+
+ // gather all the infill-modes that can be used with the current block size
+ for (unsigned int x_weights = 2; x_weights <= x_texels; x_weights++)
+ {
+ for (unsigned int y_weights = 2; y_weights <= y_texels; y_weights++)
+ {
+ for (unsigned int z_weights = 2; z_weights <= z_texels; z_weights++)
+ {
+ unsigned int weight_count = x_weights * y_weights * z_weights;
+ if (weight_count > BLOCK_MAX_WEIGHTS)
+ {
+ continue;
+ }
+
+ decimation_info& di = bsd.decimation_tables[decimation_mode_count];
+ decimation_mode_index[z_weights * 64 + y_weights * 8 + x_weights] = decimation_mode_count;
+ init_decimation_info_3d(x_texels, y_texels, z_texels, x_weights, y_weights, z_weights, di, *wb);
+
+ int maxprec_1plane = -1;
+ int maxprec_2planes = -1;
+ for (unsigned int i = 0; i < 12; i++)
+ {
+ unsigned int bits_1plane = get_ise_sequence_bitcount(weight_count, static_cast<quant_method>(i));
+ if (bits_1plane >= BLOCK_MIN_WEIGHT_BITS && bits_1plane <= BLOCK_MAX_WEIGHT_BITS)
+ {
+ maxprec_1plane = i;
+ }
+
+ unsigned int bits_2planes = get_ise_sequence_bitcount(2 * weight_count, static_cast<quant_method>(i));
+ if (bits_2planes >= BLOCK_MIN_WEIGHT_BITS && bits_2planes <= BLOCK_MAX_WEIGHT_BITS)
+ {
+ maxprec_2planes = i;
+ }
+ }
+
+ if ((2 * weight_count) > BLOCK_MAX_WEIGHTS)
+ {
+ maxprec_2planes = -1;
+ }
+
+ bsd.decimation_modes[decimation_mode_count].maxprec_1plane = static_cast<int8_t>(maxprec_1plane);
+ bsd.decimation_modes[decimation_mode_count].maxprec_2planes = static_cast<int8_t>(maxprec_2planes);
+ bsd.decimation_modes[decimation_mode_count].refprec_1_plane = maxprec_1plane == -1 ? 0 : 0xFFFF;
+ bsd.decimation_modes[decimation_mode_count].refprec_2_planes = maxprec_2planes == -1 ? 0 : 0xFFFF;
+ decimation_mode_count++;
+ }
+ }
+ }
+
+ // Ensure the end of the array contains valid data (should never get read)
+ for (unsigned int i = decimation_mode_count; i < WEIGHTS_MAX_DECIMATION_MODES; i++)
+ {
+ bsd.decimation_modes[i].maxprec_1plane = -1;
+ bsd.decimation_modes[i].maxprec_2planes = -1;
+ bsd.decimation_modes[i].refprec_1_plane = 0;
+ bsd.decimation_modes[i].refprec_2_planes = 0;
+ }
+
+ bsd.decimation_mode_count_always = 0; // Skipped for 3D modes
+ bsd.decimation_mode_count_selected = decimation_mode_count;
+ bsd.decimation_mode_count_all = decimation_mode_count;
+
+ // Construct the list of block formats referencing the decimation tables
+
+ // Clear the list to a known-bad value
+ for (unsigned int i = 0; i < WEIGHTS_MAX_BLOCK_MODES; i++)
+ {
+ bsd.block_mode_packed_index[i] = BLOCK_BAD_BLOCK_MODE;
+ }
+
+ unsigned int packed_idx = 0;
+ unsigned int bm_counts[2] { 0 };
+
+ // Iterate two times to build a usefully ordered list:
+ // - Pass 0 - keep valid single plane block modes
+ // - Pass 1 - keep valid dual plane block modes
+ for (unsigned int j = 0; j < 2; j++)
+ {
+ for (unsigned int i = 0; i < WEIGHTS_MAX_BLOCK_MODES; i++)
+ {
+ // Skip modes we've already included in a previous pass
+ if (bsd.block_mode_packed_index[i] != BLOCK_BAD_BLOCK_MODE)
+ {
+ continue;
+ }
+
+ unsigned int x_weights;
+ unsigned int y_weights;
+ unsigned int z_weights;
+ bool is_dual_plane;
+ unsigned int quant_mode;
+ unsigned int weight_bits;
+
+ bool valid = decode_block_mode_3d(i, x_weights, y_weights, z_weights, is_dual_plane, quant_mode, weight_bits);
+ // Skip invalid encodings
+ if (!valid || x_weights > x_texels || y_weights > y_texels || z_weights > z_texels)
+ {
+ continue;
+ }
+
+ // Skip encodings in the wrong iteration
+ if ((j == 0 && is_dual_plane) || (j == 1 && !is_dual_plane))
+ {
+ continue;
+ }
+
+ // Always skip encodings we can't physically encode based on bit availability
+ if (is_dual_plane)
+ {
+ // This is the only check we need as only support 1 partition
+ if ((109 - weight_bits) <= 0)
+ {
+ continue;
+ }
+ }
+ else
+ {
+ // This is conservative - fewer bits may be available for > 1 partition
+ if ((111 - weight_bits) <= 0)
+ {
+ continue;
+ }
+ }
+
+ int decimation_mode = decimation_mode_index[z_weights * 64 + y_weights * 8 + x_weights];
+ bsd.block_modes[packed_idx].decimation_mode = static_cast<uint8_t>(decimation_mode);
+ bsd.block_modes[packed_idx].quant_mode = static_cast<uint8_t>(quant_mode);
+ bsd.block_modes[packed_idx].weight_bits = static_cast<uint8_t>(weight_bits);
+ bsd.block_modes[packed_idx].is_dual_plane = static_cast<uint8_t>(is_dual_plane);
+ bsd.block_modes[packed_idx].mode_index = static_cast<uint16_t>(i);
+
+ bsd.block_mode_packed_index[i] = static_cast<uint16_t>(packed_idx);
+ bm_counts[j]++;
+ packed_idx++;
+ }
+ }
+
+ bsd.block_mode_count_1plane_always = 0; // Skipped for 3D modes
+ bsd.block_mode_count_1plane_selected = bm_counts[0];
+ bsd.block_mode_count_1plane_2plane_selected = bm_counts[0] + bm_counts[1];
+ bsd.block_mode_count_all = bm_counts[0] + bm_counts[1];
+
+ // Determine the texels to use for kmeans clustering.
+ assign_kmeans_texels(bsd);
+
+ delete wb;
+}
+
+/* See header for documentation. */
+void init_block_size_descriptor(
+ unsigned int x_texels,
+ unsigned int y_texels,
+ unsigned int z_texels,
+ bool can_omit_modes,
+ unsigned int partition_count_cutoff,
+ float mode_cutoff,
+ block_size_descriptor& bsd
+) {
+ if (z_texels > 1)
+ {
+ construct_block_size_descriptor_3d(x_texels, y_texels, z_texels, bsd);
+ }
+ else
+ {
+ construct_block_size_descriptor_2d(x_texels, y_texels, can_omit_modes, mode_cutoff, bsd);
+ }
+
+ init_partition_tables(bsd, can_omit_modes, partition_count_cutoff);
+}
diff --git a/thirdparty/astcenc/astcenc_color_quantize.cpp b/thirdparty/astcenc/astcenc_color_quantize.cpp
new file mode 100644
index 0000000000..edcfe4f853
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_color_quantize.cpp
@@ -0,0 +1,2071 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2021 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+#if !defined(ASTCENC_DECOMPRESS_ONLY)
+
+/**
+ * @brief Functions for color quantization.
+ *
+ * The design of the color quantization functionality requires the caller to use higher level error
+ * analysis to determine the base encoding that should be used. This earlier analysis will select
+ * the basic type of the endpoint that should be used:
+ *
+ * * Mode: LDR or HDR
+ * * Quantization level
+ * * Channel count: L, LA, RGB, or RGBA
+ * * Endpoint 2 type: Direct color endcode, or scaled from endpoint 1.
+ *
+ * However, this leaves a number of decisions about exactly how to pack the endpoints open. In
+ * particular we need to determine if blue contraction can be used, or/and if delta encoding can be
+ * used. If they can be applied these will allow us to maintain higher precision in the endpoints
+ * without needing additional storage.
+ */
+
+#include <stdio.h>
+#include <assert.h>
+
+#include "astcenc_internal.h"
+
+/**
+ * @brief Determine the quantized value given a quantization level.
+ *
+ * @param quant_level The quantization level to use.
+ * @param value The value to convert. This may be outside of the 0-255 range and will be
+ * clamped before the value is looked up.
+ *
+ * @return The encoded quantized value. These are not necessarily in order; the compressor
+ * scrambles the values slightly to make hardware implementation easier.
+ */
+static inline uint8_t quant_color(
+ quant_method quant_level,
+ int value
+) {
+ return color_unquant_to_uquant_tables[quant_level - QUANT_6][value];
+}
+
+/**
+ * @brief Quantize an LDR RGB color.
+ *
+ * Since this is a fall-back encoding, we cannot actually fail but must produce a sensible result.
+ * For this encoding @c color0 cannot be larger than @c color1. If @c color0 is actually larger
+ * than @c color1, @c color0 is reduced and @c color1 is increased until the constraint is met.
+ *
+ * @param color0 The input unquantized color0 endpoint.
+ * @param color1 The input unquantized color1 endpoint.
+ * @param[out] output The output endpoints, returned as (r0, r1, g0, g1, b0, b1).
+ * @param quant_level The quantization level to use.
+ */
+static void quantize_rgb(
+ vfloat4 color0,
+ vfloat4 color1,
+ uint8_t output[6],
+ quant_method quant_level
+) {
+ float scale = 1.0f / 257.0f;
+
+ float r0 = astc::clamp255f(color0.lane<0>() * scale);
+ float g0 = astc::clamp255f(color0.lane<1>() * scale);
+ float b0 = astc::clamp255f(color0.lane<2>() * scale);
+
+ float r1 = astc::clamp255f(color1.lane<0>() * scale);
+ float g1 = astc::clamp255f(color1.lane<1>() * scale);
+ float b1 = astc::clamp255f(color1.lane<2>() * scale);
+
+ int ri0, gi0, bi0, ri1, gi1, bi1;
+ float rgb0_addon = 0.5f;
+ float rgb1_addon = 0.5f;
+ do
+ {
+ ri0 = quant_color(quant_level, astc::max(astc::flt2int_rd(r0 + rgb0_addon), 0));
+ gi0 = quant_color(quant_level, astc::max(astc::flt2int_rd(g0 + rgb0_addon), 0));
+ bi0 = quant_color(quant_level, astc::max(astc::flt2int_rd(b0 + rgb0_addon), 0));
+ ri1 = quant_color(quant_level, astc::min(astc::flt2int_rd(r1 + rgb1_addon), 255));
+ gi1 = quant_color(quant_level, astc::min(astc::flt2int_rd(g1 + rgb1_addon), 255));
+ bi1 = quant_color(quant_level, astc::min(astc::flt2int_rd(b1 + rgb1_addon), 255));
+
+ rgb0_addon -= 0.2f;
+ rgb1_addon += 0.2f;
+ } while (ri0 + gi0 + bi0 > ri1 + gi1 + bi1);
+
+ output[0] = static_cast<uint8_t>(ri0);
+ output[1] = static_cast<uint8_t>(ri1);
+ output[2] = static_cast<uint8_t>(gi0);
+ output[3] = static_cast<uint8_t>(gi1);
+ output[4] = static_cast<uint8_t>(bi0);
+ output[5] = static_cast<uint8_t>(bi1);
+}
+
+/**
+ * @brief Quantize an LDR RGBA color.
+ *
+ * Since this is a fall-back encoding, we cannot actually fail but must produce a sensible result.
+ * For this encoding @c color0.rgb cannot be larger than @c color1.rgb (this indicates blue
+ * contraction). If @c color0.rgb is actually larger than @c color1.rgb, @c color0.rgb is reduced
+ * and @c color1.rgb is increased until the constraint is met.
+ *
+ * @param color0 The input unquantized color0 endpoint.
+ * @param color1 The input unquantized color1 endpoint.
+ * @param[out] output The output endpoints, returned as (r0, r1, g0, g1, b0, b1, a0, a1).
+ * @param quant_level The quantization level to use.
+ */
+static void quantize_rgba(
+ vfloat4 color0,
+ vfloat4 color1,
+ uint8_t output[8],
+ quant_method quant_level
+) {
+ float scale = 1.0f / 257.0f;
+
+ float a0 = astc::clamp255f(color0.lane<3>() * scale);
+ float a1 = astc::clamp255f(color1.lane<3>() * scale);
+
+ output[6] = quant_color(quant_level, astc::flt2int_rtn(a0));
+ output[7] = quant_color(quant_level, astc::flt2int_rtn(a1));
+
+ quantize_rgb(color0, color1, output, quant_level);
+}
+
+/**
+ * @brief Try to quantize an LDR RGB color using blue-contraction.
+ *
+ * Blue-contraction is only usable if encoded color 1 is larger than color 0.
+ *
+ * @param color0 The input unquantized color0 endpoint.
+ * @param color1 The input unquantized color1 endpoint.
+ * @param[out] output The output endpoints, returned as (r1, r0, g1, g0, b1, b0).
+ * @param quant_level The quantization level to use.
+ *
+ * @return Returns @c false on failure, @c true on success.
+ */
+static bool try_quantize_rgb_blue_contract(
+ vfloat4 color0,
+ vfloat4 color1,
+ uint8_t output[6],
+ quant_method quant_level
+) {
+ float scale = 1.0f / 257.0f;
+
+ float r0 = color0.lane<0>() * scale;
+ float g0 = color0.lane<1>() * scale;
+ float b0 = color0.lane<2>() * scale;
+
+ float r1 = color1.lane<0>() * scale;
+ float g1 = color1.lane<1>() * scale;
+ float b1 = color1.lane<2>() * scale;
+
+ // Apply inverse blue-contraction. This can produce an overflow; which means BC cannot be used.
+ r0 += (r0 - b0);
+ g0 += (g0 - b0);
+ r1 += (r1 - b1);
+ g1 += (g1 - b1);
+
+ if (r0 < 0.0f || r0 > 255.0f || g0 < 0.0f || g0 > 255.0f || b0 < 0.0f || b0 > 255.0f ||
+ r1 < 0.0f || r1 > 255.0f || g1 < 0.0f || g1 > 255.0f || b1 < 0.0f || b1 > 255.0f)
+ {
+ return false;
+ }
+
+ // Quantize the inverse-blue-contracted color
+ int ri0 = quant_color(quant_level, astc::flt2int_rtn(r0));
+ int gi0 = quant_color(quant_level, astc::flt2int_rtn(g0));
+ int bi0 = quant_color(quant_level, astc::flt2int_rtn(b0));
+
+ int ri1 = quant_color(quant_level, astc::flt2int_rtn(r1));
+ int gi1 = quant_color(quant_level, astc::flt2int_rtn(g1));
+ int bi1 = quant_color(quant_level, astc::flt2int_rtn(b1));
+
+ // If color #1 is not larger than color #0 then blue-contraction cannot be used. Note that
+ // blue-contraction and quantization change this order, which is why we must test afterwards.
+ if (ri1 + gi1 + bi1 <= ri0 + gi0 + bi0)
+ {
+ return false;
+ }
+
+ output[0] = static_cast<uint8_t>(ri1);
+ output[1] = static_cast<uint8_t>(ri0);
+ output[2] = static_cast<uint8_t>(gi1);
+ output[3] = static_cast<uint8_t>(gi0);
+ output[4] = static_cast<uint8_t>(bi1);
+ output[5] = static_cast<uint8_t>(bi0);
+
+ return true;
+}
+
+/**
+ * @brief Try to quantize an LDR RGBA color using blue-contraction.
+ *
+ * Blue-contraction is only usable if encoded color 1 RGB is larger than color 0 RGB.
+ *
+ * @param color0 The input unquantized color0 endpoint.
+ * @param color1 The input unquantized color1 endpoint.
+ * @param[out] output The output endpoints, returned as (r1, r0, g1, g0, b1, b0, a1, a0).
+ * @param quant_level The quantization level to use.
+ *
+ * @return Returns @c false on failure, @c true on success.
+ */
+static int try_quantize_rgba_blue_contract(
+ vfloat4 color0,
+ vfloat4 color1,
+ uint8_t output[8],
+ quant_method quant_level
+) {
+ float scale = 1.0f / 257.0f;
+
+ float a0 = astc::clamp255f(color0.lane<3>() * scale);
+ float a1 = astc::clamp255f(color1.lane<3>() * scale);
+
+ output[6] = quant_color(quant_level, astc::flt2int_rtn(a1));
+ output[7] = quant_color(quant_level, astc::flt2int_rtn(a0));
+
+ return try_quantize_rgb_blue_contract(color0, color1, output, quant_level);
+}
+
+/**
+ * @brief Try to quantize an LDR RGB color using delta encoding.
+ *
+ * At decode time we move one bit from the offset to the base and seize another bit as a sign bit;
+ * we then unquantize both values as if they contain one extra bit. If the sum of the offsets is
+ * non-negative, then we encode a regular delta.
+ *
+ * @param color0 The input unquantized color0 endpoint.
+ * @param color1 The input unquantized color1 endpoint.
+ * @param[out] output The output endpoints, returned as (r0, r1, g0, g1, b0, b1).
+ * @param quant_level The quantization level to use.
+ *
+ * @return Returns @c false on failure, @c true on success.
+ */
+static bool try_quantize_rgb_delta(
+ vfloat4 color0,
+ vfloat4 color1,
+ uint8_t output[6],
+ quant_method quant_level
+) {
+ float scale = 1.0f / 257.0f;
+
+ float r0 = astc::clamp255f(color0.lane<0>() * scale);
+ float g0 = astc::clamp255f(color0.lane<1>() * scale);
+ float b0 = astc::clamp255f(color0.lane<2>() * scale);
+
+ float r1 = astc::clamp255f(color1.lane<0>() * scale);
+ float g1 = astc::clamp255f(color1.lane<1>() * scale);
+ float b1 = astc::clamp255f(color1.lane<2>() * scale);
+
+ // Transform r0 to unorm9
+ int r0a = astc::flt2int_rtn(r0);
+ int g0a = astc::flt2int_rtn(g0);
+ int b0a = astc::flt2int_rtn(b0);
+
+ r0a <<= 1;
+ g0a <<= 1;
+ b0a <<= 1;
+
+ // Mask off the top bit
+ int r0b = r0a & 0xFF;
+ int g0b = g0a & 0xFF;
+ int b0b = b0a & 0xFF;
+
+ // Quantize then unquantize in order to get a value that we take differences against
+ int r0be = quant_color(quant_level, r0b);
+ int g0be = quant_color(quant_level, g0b);
+ int b0be = quant_color(quant_level, b0b);
+
+ r0b = r0be | (r0a & 0x100);
+ g0b = g0be | (g0a & 0x100);
+ b0b = b0be | (b0a & 0x100);
+
+ // Get hold of the second value
+ int r1d = astc::flt2int_rtn(r1);
+ int g1d = astc::flt2int_rtn(g1);
+ int b1d = astc::flt2int_rtn(b1);
+
+ r1d <<= 1;
+ g1d <<= 1;
+ b1d <<= 1;
+
+ // ... and take differences
+ r1d -= r0b;
+ g1d -= g0b;
+ b1d -= b0b;
+
+ // Check if the difference is too large to be encodable
+ if (r1d > 63 || g1d > 63 || b1d > 63 || r1d < -64 || g1d < -64 || b1d < -64)
+ {
+ return false;
+ }
+
+ // Insert top bit of the base into the offset
+ r1d &= 0x7F;
+ g1d &= 0x7F;
+ b1d &= 0x7F;
+
+ r1d |= (r0b & 0x100) >> 1;
+ g1d |= (g0b & 0x100) >> 1;
+ b1d |= (b0b & 0x100) >> 1;
+
+ // Then quantize and unquantize; if this causes either top two bits to flip, then encoding fails
+ // since we have then corrupted either the top bit of the base or the sign bit of the offset
+ int r1de = quant_color(quant_level, r1d);
+ int g1de = quant_color(quant_level, g1d);
+ int b1de = quant_color(quant_level, b1d);
+
+ if (((r1d ^ r1de) | (g1d ^ g1de) | (b1d ^ b1de)) & 0xC0)
+ {
+ return false;
+ }
+
+ // If the sum of offsets triggers blue-contraction then encoding fails
+ vint4 ep0(r0be, g0be, b0be, 0);
+ vint4 ep1(r1de, g1de, b1de, 0);
+ bit_transfer_signed(ep1, ep0);
+ if (hadd_rgb_s(ep1) < 0)
+ {
+ return false;
+ }
+
+ // Check that the offsets produce legitimate sums as well
+ ep0 = ep0 + ep1;
+ if (any((ep0 < vint4(0)) | (ep0 > vint4(0xFF))))
+ {
+ return false;
+ }
+
+ output[0] = static_cast<uint8_t>(r0be);
+ output[1] = static_cast<uint8_t>(r1de);
+ output[2] = static_cast<uint8_t>(g0be);
+ output[3] = static_cast<uint8_t>(g1de);
+ output[4] = static_cast<uint8_t>(b0be);
+ output[5] = static_cast<uint8_t>(b1de);
+
+ return true;
+}
+
+static bool try_quantize_rgb_delta_blue_contract(
+ vfloat4 color0,
+ vfloat4 color1,
+ uint8_t output[6],
+ quant_method quant_level
+) {
+ // Note: Switch around endpoint colors already at start
+ float scale = 1.0f / 257.0f;
+
+ float r1 = color0.lane<0>() * scale;
+ float g1 = color0.lane<1>() * scale;
+ float b1 = color0.lane<2>() * scale;
+
+ float r0 = color1.lane<0>() * scale;
+ float g0 = color1.lane<1>() * scale;
+ float b0 = color1.lane<2>() * scale;
+
+ // Apply inverse blue-contraction. This can produce an overflow; which means BC cannot be used.
+ r0 += (r0 - b0);
+ g0 += (g0 - b0);
+ r1 += (r1 - b1);
+ g1 += (g1 - b1);
+
+ if (r0 < 0.0f || r0 > 255.0f || g0 < 0.0f || g0 > 255.0f || b0 < 0.0f || b0 > 255.0f ||
+ r1 < 0.0f || r1 > 255.0f || g1 < 0.0f || g1 > 255.0f || b1 < 0.0f || b1 > 255.0f)
+ {
+ return false;
+ }
+
+ // Transform r0 to unorm9
+ int r0a = astc::flt2int_rtn(r0);
+ int g0a = astc::flt2int_rtn(g0);
+ int b0a = astc::flt2int_rtn(b0);
+ r0a <<= 1;
+ g0a <<= 1;
+ b0a <<= 1;
+
+ // Mask off the top bit
+ int r0b = r0a & 0xFF;
+ int g0b = g0a & 0xFF;
+ int b0b = b0a & 0xFF;
+
+ // Quantize, then unquantize in order to get a value that we take differences against.
+ int r0be = quant_color(quant_level, r0b);
+ int g0be = quant_color(quant_level, g0b);
+ int b0be = quant_color(quant_level, b0b);
+
+ r0b = r0be | (r0a & 0x100);
+ g0b = g0be | (g0a & 0x100);
+ b0b = b0be | (b0a & 0x100);
+
+ // Get hold of the second value
+ int r1d = astc::flt2int_rtn(r1);
+ int g1d = astc::flt2int_rtn(g1);
+ int b1d = astc::flt2int_rtn(b1);
+
+ r1d <<= 1;
+ g1d <<= 1;
+ b1d <<= 1;
+
+ // .. and take differences!
+ r1d -= r0b;
+ g1d -= g0b;
+ b1d -= b0b;
+
+ // Check if the difference is too large to be encodable
+ if (r1d > 63 || g1d > 63 || b1d > 63 || r1d < -64 || g1d < -64 || b1d < -64)
+ {
+ return false;
+ }
+
+ // Insert top bit of the base into the offset
+ r1d &= 0x7F;
+ g1d &= 0x7F;
+ b1d &= 0x7F;
+
+ r1d |= (r0b & 0x100) >> 1;
+ g1d |= (g0b & 0x100) >> 1;
+ b1d |= (b0b & 0x100) >> 1;
+
+ // Then quantize and unquantize; if this causes any of the top two bits to flip,
+ // then encoding fails, since we have then corrupted either the top bit of the base
+ // or the sign bit of the offset.
+ int r1de = quant_color(quant_level, r1d);
+ int g1de = quant_color(quant_level, g1d);
+ int b1de = quant_color(quant_level, b1d);
+
+ if (((r1d ^ r1de) | (g1d ^ g1de) | (b1d ^ b1de)) & 0xC0)
+ {
+ return false;
+ }
+
+ // If the sum of offsets does not trigger blue-contraction then encoding fails
+ vint4 ep0(r0be, g0be, b0be, 0);
+ vint4 ep1(r1de, g1de, b1de, 0);
+ bit_transfer_signed(ep1, ep0);
+ if (hadd_rgb_s(ep1) >= 0)
+ {
+ return false;
+ }
+
+ // Check that the offsets produce legitimate sums as well
+ ep0 = ep0 + ep1;
+ if (any((ep0 < vint4(0)) | (ep0 > vint4(0xFF))))
+ {
+ return false;
+ }
+
+ output[0] = static_cast<uint8_t>(r0be);
+ output[1] = static_cast<uint8_t>(r1de);
+ output[2] = static_cast<uint8_t>(g0be);
+ output[3] = static_cast<uint8_t>(g1de);
+ output[4] = static_cast<uint8_t>(b0be);
+ output[5] = static_cast<uint8_t>(b1de);
+
+ return true;
+}
+
+/**
+ * @brief Try to quantize an LDR A color using delta encoding.
+ *
+ * At decode time we move one bit from the offset to the base and seize another bit as a sign bit;
+ * we then unquantize both values as if they contain one extra bit. If the sum of the offsets is
+ * non-negative, then we encode a regular delta.
+ *
+ * This function only compressed the alpha - the other elements in the output array are not touched.
+ *
+ * @param color0 The input unquantized color0 endpoint.
+ * @param color1 The input unquantized color1 endpoint.
+ * @param[out] output The output endpoints, returned as (x, x, x, x, x, x, a0, a1).
+ * @param quant_level The quantization level to use.
+ *
+ * @return Returns @c false on failure, @c true on success.
+ */
+static bool try_quantize_alpha_delta(
+ vfloat4 color0,
+ vfloat4 color1,
+ uint8_t output[8],
+ quant_method quant_level
+) {
+ float scale = 1.0f / 257.0f;
+
+ float a0 = astc::clamp255f(color0.lane<3>() * scale);
+ float a1 = astc::clamp255f(color1.lane<3>() * scale);
+
+ int a0a = astc::flt2int_rtn(a0);
+ a0a <<= 1;
+ int a0b = a0a & 0xFF;
+ int a0be = quant_color(quant_level, a0b);
+ a0b = a0be;
+ a0b |= a0a & 0x100;
+ int a1d = astc::flt2int_rtn(a1);
+ a1d <<= 1;
+ a1d -= a0b;
+
+ if (a1d > 63 || a1d < -64)
+ {
+ return false;
+ }
+
+ a1d &= 0x7F;
+ a1d |= (a0b & 0x100) >> 1;
+
+ int a1de = quant_color(quant_level, a1d);
+ int a1du = a1de;
+ if ((a1d ^ a1du) & 0xC0)
+ {
+ return false;
+ }
+
+ a1du &= 0x7F;
+ if (a1du & 0x40)
+ {
+ a1du -= 0x80;
+ }
+
+ a1du += a0b;
+ if (a1du < 0 || a1du > 0x1FF)
+ {
+ return false;
+ }
+
+ output[6] = static_cast<uint8_t>(a0be);
+ output[7] = static_cast<uint8_t>(a1de);
+
+ return true;
+}
+
+/**
+ * @brief Try to quantize an LDR LA color using delta encoding.
+ *
+ * At decode time we move one bit from the offset to the base and seize another bit as a sign bit;
+ * we then unquantize both values as if they contain one extra bit. If the sum of the offsets is
+ * non-negative, then we encode a regular delta.
+ *
+ * This function only compressed the alpha - the other elements in the output array are not touched.
+ *
+ * @param color0 The input unquantized color0 endpoint.
+ * @param color1 The input unquantized color1 endpoint.
+ * @param[out] output The output endpoints, returned as (l0, l1, a0, a1).
+ * @param quant_level The quantization level to use.
+ *
+ * @return Returns @c false on failure, @c true on success.
+ */
+static bool try_quantize_luminance_alpha_delta(
+ vfloat4 color0,
+ vfloat4 color1,
+ uint8_t output[4],
+ quant_method quant_level
+) {
+ float scale = 1.0f / 257.0f;
+
+ float l0 = astc::clamp255f(hadd_rgb_s(color0) * ((1.0f / 3.0f) * scale));
+ float l1 = astc::clamp255f(hadd_rgb_s(color1) * ((1.0f / 3.0f) * scale));
+
+ float a0 = astc::clamp255f(color0.lane<3>() * scale);
+ float a1 = astc::clamp255f(color1.lane<3>() * scale);
+
+ int l0a = astc::flt2int_rtn(l0);
+ int a0a = astc::flt2int_rtn(a0);
+ l0a <<= 1;
+ a0a <<= 1;
+
+ int l0b = l0a & 0xFF;
+ int a0b = a0a & 0xFF;
+ int l0be = quant_color(quant_level, l0b);
+ int a0be = quant_color(quant_level, a0b);
+ l0b = l0be;
+ a0b = a0be;
+ l0b |= l0a & 0x100;
+ a0b |= a0a & 0x100;
+
+ int l1d = astc::flt2int_rtn(l1);
+ int a1d = astc::flt2int_rtn(a1);
+ l1d <<= 1;
+ a1d <<= 1;
+ l1d -= l0b;
+ a1d -= a0b;
+
+ if (l1d > 63 || l1d < -64)
+ {
+ return false;
+ }
+
+ if (a1d > 63 || a1d < -64)
+ {
+ return false;
+ }
+
+ l1d &= 0x7F;
+ a1d &= 0x7F;
+ l1d |= (l0b & 0x100) >> 1;
+ a1d |= (a0b & 0x100) >> 1;
+
+ int l1de = quant_color(quant_level, l1d);
+ int a1de = quant_color(quant_level, a1d);
+ int l1du = l1de;
+ int a1du = a1de;
+
+ if ((l1d ^ l1du) & 0xC0)
+ {
+ return false;
+ }
+
+ if ((a1d ^ a1du) & 0xC0)
+ {
+ return false;
+ }
+
+ l1du &= 0x7F;
+ a1du &= 0x7F;
+
+ if (l1du & 0x40)
+ {
+ l1du -= 0x80;
+ }
+
+ if (a1du & 0x40)
+ {
+ a1du -= 0x80;
+ }
+
+ l1du += l0b;
+ a1du += a0b;
+
+ if (l1du < 0 || l1du > 0x1FF)
+ {
+ return false;
+ }
+
+ if (a1du < 0 || a1du > 0x1FF)
+ {
+ return false;
+ }
+
+ output[0] = static_cast<uint8_t>(l0be);
+ output[1] = static_cast<uint8_t>(l1de);
+ output[2] = static_cast<uint8_t>(a0be);
+ output[3] = static_cast<uint8_t>(a1de);
+
+ return true;
+}
+
+/**
+ * @brief Try to quantize an LDR RGBA color using delta encoding.
+ *
+ * At decode time we move one bit from the offset to the base and seize another bit as a sign bit;
+ * we then unquantize both values as if they contain one extra bit. If the sum of the offsets is
+ * non-negative, then we encode a regular delta.
+ *
+ * This function only compressed the alpha - the other elements in the output array are not touched.
+ *
+ * @param color0 The input unquantized color0 endpoint.
+ * @param color1 The input unquantized color1 endpoint.
+ * @param[out] output The output endpoints, returned as (r0, r1, b0, b1, g0, g1, a0, a1).
+ * @param quant_level The quantization level to use.
+ *
+ * @return Returns @c false on failure, @c true on success.
+ */
+static bool try_quantize_rgba_delta(
+ vfloat4 color0,
+ vfloat4 color1,
+ uint8_t output[8],
+ quant_method quant_level
+) {
+ return try_quantize_rgb_delta(color0, color1, output, quant_level) &&
+ try_quantize_alpha_delta(color0, color1, output, quant_level);
+}
+
+
+/**
+ * @brief Try to quantize an LDR RGBA color using delta and blue contract encoding.
+ *
+ * At decode time we move one bit from the offset to the base and seize another bit as a sign bit;
+ * we then unquantize both values as if they contain one extra bit. If the sum of the offsets is
+ * non-negative, then we encode a regular delta.
+ *
+ * This function only compressed the alpha - the other elements in the output array are not touched.
+ *
+ * @param color0 The input unquantized color0 endpoint.
+ * @param color1 The input unquantized color1 endpoint.
+ * @param[out] output The output endpoints, returned as (r0, r1, b0, b1, g0, g1, a0, a1).
+ * @param quant_level The quantization level to use.
+ *
+ * @return Returns @c false on failure, @c true on success.
+ */
+static bool try_quantize_rgba_delta_blue_contract(
+ vfloat4 color0,
+ vfloat4 color1,
+ uint8_t output[8],
+ quant_method quant_level
+) {
+ // Note that we swap the color0 and color1 ordering for alpha to match RGB blue-contract
+ return try_quantize_rgb_delta_blue_contract(color0, color1, output, quant_level) &&
+ try_quantize_alpha_delta(color1, color0, output, quant_level);
+}
+
+/**
+ * @brief Quantize an LDR RGB color using scale encoding.
+ *
+ * @param color The input unquantized color endpoint and scale factor.
+ * @param[out] output The output endpoints, returned as (r0, g0, b0, s).
+ * @param quant_level The quantization level to use.
+ */
+static void quantize_rgbs(
+ vfloat4 color,
+ uint8_t output[4],
+ quant_method quant_level
+) {
+ float scale = 1.0f / 257.0f;
+
+ float r = astc::clamp255f(color.lane<0>() * scale);
+ float g = astc::clamp255f(color.lane<1>() * scale);
+ float b = astc::clamp255f(color.lane<2>() * scale);
+
+ int ri = quant_color(quant_level, astc::flt2int_rtn(r));
+ int gi = quant_color(quant_level, astc::flt2int_rtn(g));
+ int bi = quant_color(quant_level, astc::flt2int_rtn(b));
+
+ float oldcolorsum = hadd_rgb_s(color) * scale;
+ float newcolorsum = static_cast<float>(ri + gi + bi);
+
+ float scalea = astc::clamp1f(color.lane<3>() * (oldcolorsum + 1e-10f) / (newcolorsum + 1e-10f));
+ int scale_idx = astc::flt2int_rtn(scalea * 256.0f);
+ scale_idx = astc::clamp(scale_idx, 0, 255);
+
+ output[0] = static_cast<uint8_t>(ri);
+ output[1] = static_cast<uint8_t>(gi);
+ output[2] = static_cast<uint8_t>(bi);
+ output[3] = quant_color(quant_level, scale_idx);
+}
+
+/**
+ * @brief Quantize an LDR RGBA color using scale encoding.
+ *
+ * @param color The input unquantized color endpoint and scale factor.
+ * @param[out] output The output endpoints, returned as (r0, g0, b0, s, a0, a1).
+ * @param quant_level The quantization level to use.
+ */
+static void quantize_rgbs_alpha(
+ vfloat4 color0,
+ vfloat4 color1,
+ vfloat4 color,
+ uint8_t output[6],
+ quant_method quant_level
+) {
+ float scale = 1.0f / 257.0f;
+
+ float a0 = astc::clamp255f(color0.lane<3>() * scale);
+ float a1 = astc::clamp255f(color1.lane<3>() * scale);
+
+ output[4] = quant_color(quant_level, astc::flt2int_rtn(a0));
+ output[5] = quant_color(quant_level, astc::flt2int_rtn(a1));
+
+ quantize_rgbs(color, output, quant_level);
+}
+
+/**
+ * @brief Quantize a LDR L color.
+ *
+ * @param color0 The input unquantized color0 endpoint.
+ * @param color1 The input unquantized color1 endpoint.
+ * @param[out] output The output endpoints, returned as (l0, l1).
+ * @param quant_level The quantization level to use.
+ */
+static void quantize_luminance(
+ vfloat4 color0,
+ vfloat4 color1,
+ uint8_t output[2],
+ quant_method quant_level
+) {
+ float scale = 1.0f / 257.0f;
+
+ color0 = color0 * scale;
+ color1 = color1 * scale;
+
+ float lum0 = astc::clamp255f(hadd_rgb_s(color0) * (1.0f / 3.0f));
+ float lum1 = astc::clamp255f(hadd_rgb_s(color1) * (1.0f / 3.0f));
+
+ if (lum0 > lum1)
+ {
+ float avg = (lum0 + lum1) * 0.5f;
+ lum0 = avg;
+ lum1 = avg;
+ }
+
+ output[0] = quant_color(quant_level, astc::flt2int_rtn(lum0));
+ output[1] = quant_color(quant_level, astc::flt2int_rtn(lum1));
+}
+
+/**
+ * @brief Quantize a LDR LA color.
+ *
+ * @param color0 The input unquantized color0 endpoint.
+ * @param color1 The input unquantized color1 endpoint.
+ * @param[out] output The output endpoints, returned as (l0, l1, a0, a1).
+ * @param quant_level The quantization level to use.
+ */
+static void quantize_luminance_alpha(
+ vfloat4 color0,
+ vfloat4 color1,
+ uint8_t output[4],
+ quant_method quant_level
+) {
+ float scale = 1.0f / 257.0f;
+
+ color0 = color0 * scale;
+ color1 = color1 * scale;
+
+ float lum0 = astc::clamp255f(hadd_rgb_s(color0) * (1.0f / 3.0f));
+ float lum1 = astc::clamp255f(hadd_rgb_s(color1) * (1.0f / 3.0f));
+
+ float a0 = astc::clamp255f(color0.lane<3>());
+ float a1 = astc::clamp255f(color1.lane<3>());
+
+ // If endpoints are close then pull apart slightly; this gives > 8 bit normal map precision.
+ if (quant_level > 18)
+ {
+ if (fabsf(lum0 - lum1) < 3.0f)
+ {
+ if (lum0 < lum1)
+ {
+ lum0 -= 0.5f;
+ lum1 += 0.5f;
+ }
+ else
+ {
+ lum0 += 0.5f;
+ lum1 -= 0.5f;
+ }
+
+ lum0 = astc::clamp255f(lum0);
+ lum1 = astc::clamp255f(lum1);
+ }
+
+ if (fabsf(a0 - a1) < 3.0f)
+ {
+ if (a0 < a1)
+ {
+ a0 -= 0.5f;
+ a1 += 0.5f;
+ }
+ else
+ {
+ a0 += 0.5f;
+ a1 -= 0.5f;
+ }
+
+ a0 = astc::clamp255f(a0);
+ a1 = astc::clamp255f(a1);
+ }
+ }
+
+ output[0] = quant_color(quant_level, astc::flt2int_rtn(lum0));
+ output[1] = quant_color(quant_level, astc::flt2int_rtn(lum1));
+ output[2] = quant_color(quant_level, astc::flt2int_rtn(a0));
+ output[3] = quant_color(quant_level, astc::flt2int_rtn(a1));
+}
+
+/**
+ * @brief Quantize and unquantize a value ensuring top two bits are the same.
+ *
+ * @param quant_level The quantization level to use.
+ * @param value The input unquantized value.
+ * @param[out] quant_value The quantized value.
+ */
+static inline void quantize_and_unquantize_retain_top_two_bits(
+ quant_method quant_level,
+ uint8_t value,
+ uint8_t& quant_value
+) {
+ int perform_loop;
+ uint8_t quantval;
+
+ do
+ {
+ quantval = quant_color(quant_level, value);
+
+ // Perform looping if the top two bits were modified by quant/unquant
+ perform_loop = (value & 0xC0) != (quantval & 0xC0);
+
+ if ((quantval & 0xC0) > (value & 0xC0))
+ {
+ // Quant/unquant rounded UP so that the top two bits changed;
+ // decrement the input in hopes that this will avoid rounding up.
+ value--;
+ }
+ else if ((quantval & 0xC0) < (value & 0xC0))
+ {
+ // Quant/unquant rounded DOWN so that the top two bits changed;
+ // decrement the input in hopes that this will avoid rounding down.
+ value--;
+ }
+ } while (perform_loop);
+
+ quant_value = quantval;
+}
+
+/**
+ * @brief Quantize and unquantize a value ensuring top four bits are the same.
+ *
+ * @param quant_level The quantization level to use.
+ * @param value The input unquantized value.
+ * @param[out] quant_value The quantized value in 0-255 range.
+ */
+static inline void quantize_and_unquantize_retain_top_four_bits(
+ quant_method quant_level,
+ uint8_t value,
+ uint8_t& quant_value
+) {
+ uint8_t perform_loop;
+ uint8_t quantval;
+
+ do
+ {
+ quantval = quant_color(quant_level, value);
+ // Perform looping if the top four bits were modified by quant/unquant
+ perform_loop = (value & 0xF0) != (quantval & 0xF0);
+
+ if ((quantval & 0xF0) > (value & 0xF0))
+ {
+ // Quant/unquant rounded UP so that the top four bits changed;
+ // decrement the input value in hopes that this will avoid rounding up.
+ value--;
+ }
+ else if ((quantval & 0xF0) < (value & 0xF0))
+ {
+ // Quant/unquant rounded DOWN so that the top four bits changed;
+ // decrement the input value in hopes that this will avoid rounding down.
+ value--;
+ }
+ } while (perform_loop);
+
+ quant_value = quantval;
+}
+
+/**
+ * @brief Quantize a HDR RGB color using RGB + offset.
+ *
+ * @param color The input unquantized color endpoint and offset.
+ * @param[out] output The output endpoints, returned as packed RGBS with some mode bits.
+ * @param quant_level The quantization level to use.
+ */
+static void quantize_hdr_rgbo(
+ vfloat4 color,
+ uint8_t output[4],
+ quant_method quant_level
+) {
+ color.set_lane<0>(color.lane<0>() + color.lane<3>());
+ color.set_lane<1>(color.lane<1>() + color.lane<3>());
+ color.set_lane<2>(color.lane<2>() + color.lane<3>());
+
+ color = clamp(0.0f, 65535.0f, color);
+
+ vfloat4 color_bak = color;
+
+ int majcomp;
+ if (color.lane<0>() > color.lane<1>() && color.lane<0>() > color.lane<2>())
+ {
+ majcomp = 0; // red is largest component
+ }
+ else if (color.lane<1>() > color.lane<2>())
+ {
+ majcomp = 1; // green is largest component
+ }
+ else
+ {
+ majcomp = 2; // blue is largest component
+ }
+
+ // swap around the red component and the largest component.
+ switch (majcomp)
+ {
+ case 1:
+ color = color.swz<1, 0, 2, 3>();
+ break;
+ case 2:
+ color = color.swz<2, 1, 0, 3>();
+ break;
+ default:
+ break;
+ }
+
+ static const int mode_bits[5][3] {
+ {11, 5, 7},
+ {11, 6, 5},
+ {10, 5, 8},
+ {9, 6, 7},
+ {8, 7, 6}
+ };
+
+ static const float mode_cutoffs[5][2] {
+ {1024, 4096},
+ {2048, 1024},
+ {2048, 16384},
+ {8192, 16384},
+ {32768, 16384}
+ };
+
+ static const float mode_rscales[5] {
+ 32.0f,
+ 32.0f,
+ 64.0f,
+ 128.0f,
+ 256.0f,
+ };
+
+ static const float mode_scales[5] {
+ 1.0f / 32.0f,
+ 1.0f / 32.0f,
+ 1.0f / 64.0f,
+ 1.0f / 128.0f,
+ 1.0f / 256.0f,
+ };
+
+ float r_base = color.lane<0>();
+ float g_base = color.lane<0>() - color.lane<1>() ;
+ float b_base = color.lane<0>() - color.lane<2>() ;
+ float s_base = color.lane<3>() ;
+
+ for (int mode = 0; mode < 5; mode++)
+ {
+ if (g_base > mode_cutoffs[mode][0] || b_base > mode_cutoffs[mode][0] || s_base > mode_cutoffs[mode][1])
+ {
+ continue;
+ }
+
+ // Encode the mode into a 4-bit vector
+ int mode_enc = mode < 4 ? (mode | (majcomp << 2)) : (majcomp | 0xC);
+
+ float mode_scale = mode_scales[mode];
+ float mode_rscale = mode_rscales[mode];
+
+ int gb_intcutoff = 1 << mode_bits[mode][1];
+ int s_intcutoff = 1 << mode_bits[mode][2];
+
+ // Quantize and unquantize R
+ int r_intval = astc::flt2int_rtn(r_base * mode_scale);
+
+ int r_lowbits = r_intval & 0x3f;
+
+ r_lowbits |= (mode_enc & 3) << 6;
+
+ uint8_t r_quantval;
+ quantize_and_unquantize_retain_top_two_bits(
+ quant_level, static_cast<uint8_t>(r_lowbits), r_quantval);
+
+ r_intval = (r_intval & ~0x3f) | (r_quantval & 0x3f);
+ float r_fval = static_cast<float>(r_intval) * mode_rscale;
+
+ // Recompute G and B, then quantize and unquantize them
+ float g_fval = r_fval - color.lane<1>() ;
+ float b_fval = r_fval - color.lane<2>() ;
+
+ g_fval = astc::clamp(g_fval, 0.0f, 65535.0f);
+ b_fval = astc::clamp(b_fval, 0.0f, 65535.0f);
+
+ int g_intval = astc::flt2int_rtn(g_fval * mode_scale);
+ int b_intval = astc::flt2int_rtn(b_fval * mode_scale);
+
+ if (g_intval >= gb_intcutoff || b_intval >= gb_intcutoff)
+ {
+ continue;
+ }
+
+ int g_lowbits = g_intval & 0x1f;
+ int b_lowbits = b_intval & 0x1f;
+
+ int bit0 = 0;
+ int bit1 = 0;
+ int bit2 = 0;
+ int bit3 = 0;
+
+ switch (mode)
+ {
+ case 0:
+ case 2:
+ bit0 = (r_intval >> 9) & 1;
+ break;
+ case 1:
+ case 3:
+ bit0 = (r_intval >> 8) & 1;
+ break;
+ case 4:
+ case 5:
+ bit0 = (g_intval >> 6) & 1;
+ break;
+ }
+
+ switch (mode)
+ {
+ case 0:
+ case 1:
+ case 2:
+ case 3:
+ bit2 = (r_intval >> 7) & 1;
+ break;
+ case 4:
+ case 5:
+ bit2 = (b_intval >> 6) & 1;
+ break;
+ }
+
+ switch (mode)
+ {
+ case 0:
+ case 2:
+ bit1 = (r_intval >> 8) & 1;
+ break;
+ case 1:
+ case 3:
+ case 4:
+ case 5:
+ bit1 = (g_intval >> 5) & 1;
+ break;
+ }
+
+ switch (mode)
+ {
+ case 0:
+ bit3 = (r_intval >> 10) & 1;
+ break;
+ case 2:
+ bit3 = (r_intval >> 6) & 1;
+ break;
+ case 1:
+ case 3:
+ case 4:
+ case 5:
+ bit3 = (b_intval >> 5) & 1;
+ break;
+ }
+
+ g_lowbits |= (mode_enc & 0x4) << 5;
+ b_lowbits |= (mode_enc & 0x8) << 4;
+
+ g_lowbits |= bit0 << 6;
+ g_lowbits |= bit1 << 5;
+ b_lowbits |= bit2 << 6;
+ b_lowbits |= bit3 << 5;
+
+ uint8_t g_quantval;
+ uint8_t b_quantval;
+
+ quantize_and_unquantize_retain_top_four_bits(
+ quant_level, static_cast<uint8_t>(g_lowbits), g_quantval);
+ quantize_and_unquantize_retain_top_four_bits(
+ quant_level, static_cast<uint8_t>(b_lowbits), b_quantval);
+
+ g_intval = (g_intval & ~0x1f) | (g_quantval & 0x1f);
+ b_intval = (b_intval & ~0x1f) | (b_quantval & 0x1f);
+
+ g_fval = static_cast<float>(g_intval) * mode_rscale;
+ b_fval = static_cast<float>(b_intval) * mode_rscale;
+
+ // Recompute the scale value, based on the errors introduced to red, green and blue
+
+ // If the error is positive, then the R,G,B errors combined have raised the color
+ // value overall; as such, the scale value needs to be increased.
+ float rgb_errorsum = (r_fval - color.lane<0>() ) + (r_fval - g_fval - color.lane<1>() ) + (r_fval - b_fval - color.lane<2>() );
+
+ float s_fval = s_base + rgb_errorsum * (1.0f / 3.0f);
+ s_fval = astc::clamp(s_fval, 0.0f, 1e9f);
+
+ int s_intval = astc::flt2int_rtn(s_fval * mode_scale);
+
+ if (s_intval >= s_intcutoff)
+ {
+ continue;
+ }
+
+ int s_lowbits = s_intval & 0x1f;
+
+ int bit4;
+ int bit5;
+ int bit6;
+ switch (mode)
+ {
+ case 1:
+ bit6 = (r_intval >> 9) & 1;
+ break;
+ default:
+ bit6 = (s_intval >> 5) & 1;
+ break;
+ }
+
+ switch (mode)
+ {
+ case 4:
+ bit5 = (r_intval >> 7) & 1;
+ break;
+ case 1:
+ bit5 = (r_intval >> 10) & 1;
+ break;
+ default:
+ bit5 = (s_intval >> 6) & 1;
+ break;
+ }
+
+ switch (mode)
+ {
+ case 2:
+ bit4 = (s_intval >> 7) & 1;
+ break;
+ default:
+ bit4 = (r_intval >> 6) & 1;
+ break;
+ }
+
+ s_lowbits |= bit6 << 5;
+ s_lowbits |= bit5 << 6;
+ s_lowbits |= bit4 << 7;
+
+ uint8_t s_quantval;
+
+ quantize_and_unquantize_retain_top_four_bits(
+ quant_level, static_cast<uint8_t>(s_lowbits), s_quantval);
+
+ output[0] = r_quantval;
+ output[1] = g_quantval;
+ output[2] = b_quantval;
+ output[3] = s_quantval;
+ return;
+ }
+
+ // Failed to encode any of the modes above? In that case encode using mode #5
+ float vals[4];
+ vals[0] = color_bak.lane<0>();
+ vals[1] = color_bak.lane<1>();
+ vals[2] = color_bak.lane<2>();
+ vals[3] = color_bak.lane<3>();
+
+ int ivals[4];
+ float cvals[3];
+
+ for (int i = 0; i < 3; i++)
+ {
+ vals[i] = astc::clamp(vals[i], 0.0f, 65020.0f);
+ ivals[i] = astc::flt2int_rtn(vals[i] * (1.0f / 512.0f));
+ cvals[i] = static_cast<float>(ivals[i]) * 512.0f;
+ }
+
+ float rgb_errorsum = (cvals[0] - vals[0]) + (cvals[1] - vals[1]) + (cvals[2] - vals[2]);
+ vals[3] += rgb_errorsum * (1.0f / 3.0f);
+
+ vals[3] = astc::clamp(vals[3], 0.0f, 65020.0f);
+ ivals[3] = astc::flt2int_rtn(vals[3] * (1.0f / 512.0f));
+
+ int encvals[4];
+ encvals[0] = (ivals[0] & 0x3f) | 0xC0;
+ encvals[1] = (ivals[1] & 0x7f) | 0x80;
+ encvals[2] = (ivals[2] & 0x7f) | 0x80;
+ encvals[3] = (ivals[3] & 0x7f) | ((ivals[0] & 0x40) << 1);
+
+ for (uint8_t i = 0; i < 4; i++)
+ {
+ quantize_and_unquantize_retain_top_four_bits(
+ quant_level, static_cast<uint8_t>(encvals[i]), output[i]);
+ }
+
+ return;
+}
+
+/**
+ * @brief Quantize a HDR RGB color using direct RGB encoding.
+ *
+ * @param color0 The input unquantized color0 endpoint.
+ * @param color1 The input unquantized color1 endpoint.
+ * @param[out] output The output endpoints, returned as packed RGB+RGB pairs with mode bits.
+ * @param quant_level The quantization level to use.
+ */
+static void quantize_hdr_rgb(
+ vfloat4 color0,
+ vfloat4 color1,
+ uint8_t output[6],
+ quant_method quant_level
+) {
+ // Note: color*.lane<3> is not used so we can ignore it
+ color0 = clamp(0.0f, 65535.0f, color0);
+ color1 = clamp(0.0f, 65535.0f, color1);
+
+ vfloat4 color0_bak = color0;
+ vfloat4 color1_bak = color1;
+
+ int majcomp;
+ if (color1.lane<0>() > color1.lane<1>() && color1.lane<0>() > color1.lane<2>())
+ {
+ majcomp = 0;
+ }
+ else if (color1.lane<1>() > color1.lane<2>())
+ {
+ majcomp = 1;
+ }
+ else
+ {
+ majcomp = 2;
+ }
+
+ // Swizzle the components
+ switch (majcomp)
+ {
+ case 1: // red-green swap
+ color0 = color0.swz<1, 0, 2, 3>();
+ color1 = color1.swz<1, 0, 2, 3>();
+ break;
+ case 2: // red-blue swap
+ color0 = color0.swz<2, 1, 0, 3>();
+ color1 = color1.swz<2, 1, 0, 3>();
+ break;
+ default:
+ break;
+ }
+
+ float a_base = color1.lane<0>();
+ a_base = astc::clamp(a_base, 0.0f, 65535.0f);
+
+ float b0_base = a_base - color1.lane<1>();
+ float b1_base = a_base - color1.lane<2>();
+ float c_base = a_base - color0.lane<0>();
+ float d0_base = a_base - b0_base - c_base - color0.lane<1>();
+ float d1_base = a_base - b1_base - c_base - color0.lane<2>();
+
+ // Number of bits in the various fields in the various modes
+ static const int mode_bits[8][4] {
+ {9, 7, 6, 7},
+ {9, 8, 6, 6},
+ {10, 6, 7, 7},
+ {10, 7, 7, 6},
+ {11, 8, 6, 5},
+ {11, 6, 8, 6},
+ {12, 7, 7, 5},
+ {12, 6, 7, 6}
+ };
+
+ // Cutoffs to use for the computed values of a,b,c,d, assuming the
+ // range 0..65535 are LNS values corresponding to fp16.
+ static const float mode_cutoffs[8][4] {
+ {16384, 8192, 8192, 8}, // mode 0: 9,7,6,7
+ {32768, 8192, 4096, 8}, // mode 1: 9,8,6,6
+ {4096, 8192, 4096, 4}, // mode 2: 10,6,7,7
+ {8192, 8192, 2048, 4}, // mode 3: 10,7,7,6
+ {8192, 2048, 512, 2}, // mode 4: 11,8,6,5
+ {2048, 8192, 1024, 2}, // mode 5: 11,6,8,6
+ {2048, 2048, 256, 1}, // mode 6: 12,7,7,5
+ {1024, 2048, 512, 1}, // mode 7: 12,6,7,6
+ };
+
+ static const float mode_scales[8] {
+ 1.0f / 128.0f,
+ 1.0f / 128.0f,
+ 1.0f / 64.0f,
+ 1.0f / 64.0f,
+ 1.0f / 32.0f,
+ 1.0f / 32.0f,
+ 1.0f / 16.0f,
+ 1.0f / 16.0f,
+ };
+
+ // Scaling factors when going from what was encoded in the mode to 16 bits.
+ static const float mode_rscales[8] {
+ 128.0f,
+ 128.0f,
+ 64.0f,
+ 64.0f,
+ 32.0f,
+ 32.0f,
+ 16.0f,
+ 16.0f
+ };
+
+ // Try modes one by one, with the highest-precision mode first.
+ for (int mode = 7; mode >= 0; mode--)
+ {
+ // For each mode, test if we can in fact accommodate the computed b, c, and d values.
+ // If we clearly can't, then we skip to the next mode.
+
+ float b_cutoff = mode_cutoffs[mode][0];
+ float c_cutoff = mode_cutoffs[mode][1];
+ float d_cutoff = mode_cutoffs[mode][2];
+
+ if (b0_base > b_cutoff || b1_base > b_cutoff || c_base > c_cutoff || fabsf(d0_base) > d_cutoff || fabsf(d1_base) > d_cutoff)
+ {
+ continue;
+ }
+
+ float mode_scale = mode_scales[mode];
+ float mode_rscale = mode_rscales[mode];
+
+ int b_intcutoff = 1 << mode_bits[mode][1];
+ int c_intcutoff = 1 << mode_bits[mode][2];
+ int d_intcutoff = 1 << (mode_bits[mode][3] - 1);
+
+ // Quantize and unquantize A, with the assumption that its high bits can be handled safely.
+ int a_intval = astc::flt2int_rtn(a_base * mode_scale);
+ int a_lowbits = a_intval & 0xFF;
+
+ int a_quantval = quant_color(quant_level, a_lowbits);
+ int a_uquantval = a_quantval;
+ a_intval = (a_intval & ~0xFF) | a_uquantval;
+ float a_fval = static_cast<float>(a_intval) * mode_rscale;
+
+ // Recompute C, then quantize and unquantize it
+ float c_fval = a_fval - color0.lane<0>();
+ c_fval = astc::clamp(c_fval, 0.0f, 65535.0f);
+
+ int c_intval = astc::flt2int_rtn(c_fval * mode_scale);
+
+ if (c_intval >= c_intcutoff)
+ {
+ continue;
+ }
+
+ int c_lowbits = c_intval & 0x3f;
+
+ c_lowbits |= (mode & 1) << 7;
+ c_lowbits |= (a_intval & 0x100) >> 2;
+
+ uint8_t c_quantval;
+
+ quantize_and_unquantize_retain_top_two_bits(
+ quant_level, static_cast<uint8_t>(c_lowbits), c_quantval);
+
+ c_intval = (c_intval & ~0x3F) | (c_quantval & 0x3F);
+ c_fval = static_cast<float>(c_intval) * mode_rscale;
+
+ // Recompute B0 and B1, then quantize and unquantize them
+ float b0_fval = a_fval - color1.lane<1>();
+ float b1_fval = a_fval - color1.lane<2>();
+
+ b0_fval = astc::clamp(b0_fval, 0.0f, 65535.0f);
+ b1_fval = astc::clamp(b1_fval, 0.0f, 65535.0f);
+ int b0_intval = astc::flt2int_rtn(b0_fval * mode_scale);
+ int b1_intval = astc::flt2int_rtn(b1_fval * mode_scale);
+
+ if (b0_intval >= b_intcutoff || b1_intval >= b_intcutoff)
+ {
+ continue;
+ }
+
+ int b0_lowbits = b0_intval & 0x3f;
+ int b1_lowbits = b1_intval & 0x3f;
+
+ int bit0 = 0;
+ int bit1 = 0;
+ switch (mode)
+ {
+ case 0:
+ case 1:
+ case 3:
+ case 4:
+ case 6:
+ bit0 = (b0_intval >> 6) & 1;
+ break;
+ case 2:
+ case 5:
+ case 7:
+ bit0 = (a_intval >> 9) & 1;
+ break;
+ }
+
+ switch (mode)
+ {
+ case 0:
+ case 1:
+ case 3:
+ case 4:
+ case 6:
+ bit1 = (b1_intval >> 6) & 1;
+ break;
+ case 2:
+ bit1 = (c_intval >> 6) & 1;
+ break;
+ case 5:
+ case 7:
+ bit1 = (a_intval >> 10) & 1;
+ break;
+ }
+
+ b0_lowbits |= bit0 << 6;
+ b1_lowbits |= bit1 << 6;
+
+ b0_lowbits |= ((mode >> 1) & 1) << 7;
+ b1_lowbits |= ((mode >> 2) & 1) << 7;
+
+ uint8_t b0_quantval;
+ uint8_t b1_quantval;
+
+ quantize_and_unquantize_retain_top_two_bits(
+ quant_level, static_cast<uint8_t>(b0_lowbits), b0_quantval);
+ quantize_and_unquantize_retain_top_two_bits(
+ quant_level, static_cast<uint8_t>(b1_lowbits), b1_quantval);
+
+ b0_intval = (b0_intval & ~0x3f) | (b0_quantval & 0x3f);
+ b1_intval = (b1_intval & ~0x3f) | (b1_quantval & 0x3f);
+ b0_fval = static_cast<float>(b0_intval) * mode_rscale;
+ b1_fval = static_cast<float>(b1_intval) * mode_rscale;
+
+ // Recompute D0 and D1, then quantize and unquantize them
+ float d0_fval = a_fval - b0_fval - c_fval - color0.lane<1>();
+ float d1_fval = a_fval - b1_fval - c_fval - color0.lane<2>();
+
+ d0_fval = astc::clamp(d0_fval, -65535.0f, 65535.0f);
+ d1_fval = astc::clamp(d1_fval, -65535.0f, 65535.0f);
+
+ int d0_intval = astc::flt2int_rtn(d0_fval * mode_scale);
+ int d1_intval = astc::flt2int_rtn(d1_fval * mode_scale);
+
+ if (abs(d0_intval) >= d_intcutoff || abs(d1_intval) >= d_intcutoff)
+ {
+ continue;
+ }
+
+ int d0_lowbits = d0_intval & 0x1f;
+ int d1_lowbits = d1_intval & 0x1f;
+
+ int bit2 = 0;
+ int bit3 = 0;
+ int bit4;
+ int bit5;
+ switch (mode)
+ {
+ case 0:
+ case 2:
+ bit2 = (d0_intval >> 6) & 1;
+ break;
+ case 1:
+ case 4:
+ bit2 = (b0_intval >> 7) & 1;
+ break;
+ case 3:
+ bit2 = (a_intval >> 9) & 1;
+ break;
+ case 5:
+ bit2 = (c_intval >> 7) & 1;
+ break;
+ case 6:
+ case 7:
+ bit2 = (a_intval >> 11) & 1;
+ break;
+ }
+ switch (mode)
+ {
+ case 0:
+ case 2:
+ bit3 = (d1_intval >> 6) & 1;
+ break;
+ case 1:
+ case 4:
+ bit3 = (b1_intval >> 7) & 1;
+ break;
+ case 3:
+ case 5:
+ case 6:
+ case 7:
+ bit3 = (c_intval >> 6) & 1;
+ break;
+ }
+
+ switch (mode)
+ {
+ case 4:
+ case 6:
+ bit4 = (a_intval >> 9) & 1;
+ bit5 = (a_intval >> 10) & 1;
+ break;
+ default:
+ bit4 = (d0_intval >> 5) & 1;
+ bit5 = (d1_intval >> 5) & 1;
+ break;
+ }
+
+ d0_lowbits |= bit2 << 6;
+ d1_lowbits |= bit3 << 6;
+ d0_lowbits |= bit4 << 5;
+ d1_lowbits |= bit5 << 5;
+
+ d0_lowbits |= (majcomp & 1) << 7;
+ d1_lowbits |= ((majcomp >> 1) & 1) << 7;
+
+ uint8_t d0_quantval;
+ uint8_t d1_quantval;
+
+ quantize_and_unquantize_retain_top_four_bits(
+ quant_level, static_cast<uint8_t>(d0_lowbits), d0_quantval);
+ quantize_and_unquantize_retain_top_four_bits(
+ quant_level, static_cast<uint8_t>(d1_lowbits), d1_quantval);
+
+ output[0] = static_cast<uint8_t>(a_quantval);
+ output[1] = c_quantval;
+ output[2] = b0_quantval;
+ output[3] = b1_quantval;
+ output[4] = d0_quantval;
+ output[5] = d1_quantval;
+ return;
+ }
+
+ // If neither of the modes fit we will use a flat representation for storing data, using 8 bits
+ // for red and green, and 7 bits for blue. This gives color accuracy roughly similar to LDR
+ // 4:4:3 which is not at all great but usable. This representation is used if the light color is
+ // more than 4x the color value of the dark color.
+ float vals[6];
+ vals[0] = color0_bak.lane<0>();
+ vals[1] = color1_bak.lane<0>();
+ vals[2] = color0_bak.lane<1>();
+ vals[3] = color1_bak.lane<1>();
+ vals[4] = color0_bak.lane<2>();
+ vals[5] = color1_bak.lane<2>();
+
+ for (int i = 0; i < 6; i++)
+ {
+ vals[i] = astc::clamp(vals[i], 0.0f, 65020.0f);
+ }
+
+ for (int i = 0; i < 4; i++)
+ {
+ int idx = astc::flt2int_rtn(vals[i] * 1.0f / 256.0f);
+ output[i] = quant_color(quant_level, idx);
+ }
+
+ for (int i = 4; i < 6; i++)
+ {
+ int idx = astc::flt2int_rtn(vals[i] * 1.0f / 512.0f) + 128;
+ quantize_and_unquantize_retain_top_two_bits(
+ quant_level, static_cast<uint8_t>(idx), output[i]);
+ }
+
+ return;
+}
+
+/**
+ * @brief Quantize a HDR RGB + LDR A color using direct RGBA encoding.
+ *
+ * @param color0 The input unquantized color0 endpoint.
+ * @param color1 The input unquantized color1 endpoint.
+ * @param[out] output The output endpoints, returned as packed RGBA+RGBA pairs with mode bits.
+ * @param quant_level The quantization level to use.
+ */
+static void quantize_hdr_rgb_ldr_alpha(
+ vfloat4 color0,
+ vfloat4 color1,
+ uint8_t output[8],
+ quant_method quant_level
+) {
+ float scale = 1.0f / 257.0f;
+
+ float a0 = astc::clamp255f(color0.lane<3>() * scale);
+ float a1 = astc::clamp255f(color1.lane<3>() * scale);
+
+ output[6] = quant_color(quant_level, astc::flt2int_rtn(a0));
+ output[7] = quant_color(quant_level, astc::flt2int_rtn(a1));
+
+ quantize_hdr_rgb(color0, color1, output, quant_level);
+}
+
+/**
+ * @brief Quantize a HDR L color using the large range encoding.
+ *
+ * @param color0 The input unquantized color0 endpoint.
+ * @param color1 The input unquantized color1 endpoint.
+ * @param[out] output The output endpoints, returned as packed (l0, l1).
+ * @param quant_level The quantization level to use.
+ */
+static void quantize_hdr_luminance_large_range(
+ vfloat4 color0,
+ vfloat4 color1,
+ uint8_t output[2],
+ quant_method quant_level
+) {
+ float lum0 = hadd_rgb_s(color0) * (1.0f / 3.0f);
+ float lum1 = hadd_rgb_s(color1) * (1.0f / 3.0f);
+
+ if (lum1 < lum0)
+ {
+ float avg = (lum0 + lum1) * 0.5f;
+ lum0 = avg;
+ lum1 = avg;
+ }
+
+ int ilum1 = astc::flt2int_rtn(lum1);
+ int ilum0 = astc::flt2int_rtn(lum0);
+
+ // Find the closest encodable point in the upper half of the code-point space
+ int upper_v0 = (ilum0 + 128) >> 8;
+ int upper_v1 = (ilum1 + 128) >> 8;
+
+ upper_v0 = astc::clamp(upper_v0, 0, 255);
+ upper_v1 = astc::clamp(upper_v1, 0, 255);
+
+ // Find the closest encodable point in the lower half of the code-point space
+ int lower_v0 = (ilum1 + 256) >> 8;
+ int lower_v1 = ilum0 >> 8;
+
+ lower_v0 = astc::clamp(lower_v0, 0, 255);
+ lower_v1 = astc::clamp(lower_v1, 0, 255);
+
+ // Determine the distance between the point in code-point space and the input value
+ int upper0_dec = upper_v0 << 8;
+ int upper1_dec = upper_v1 << 8;
+ int lower0_dec = (lower_v1 << 8) + 128;
+ int lower1_dec = (lower_v0 << 8) - 128;
+
+ int upper0_diff = upper0_dec - ilum0;
+ int upper1_diff = upper1_dec - ilum1;
+ int lower0_diff = lower0_dec - ilum0;
+ int lower1_diff = lower1_dec - ilum1;
+
+ int upper_error = (upper0_diff * upper0_diff) + (upper1_diff * upper1_diff);
+ int lower_error = (lower0_diff * lower0_diff) + (lower1_diff * lower1_diff);
+
+ int v0, v1;
+ if (upper_error < lower_error)
+ {
+ v0 = upper_v0;
+ v1 = upper_v1;
+ }
+ else
+ {
+ v0 = lower_v0;
+ v1 = lower_v1;
+ }
+
+ // OK; encode
+ output[0] = quant_color(quant_level, v0);
+ output[1] = quant_color(quant_level, v1);
+}
+
+/**
+ * @brief Quantize a HDR L color using the small range encoding.
+ *
+ * @param color0 The input unquantized color0 endpoint.
+ * @param color1 The input unquantized color1 endpoint.
+ * @param[out] output The output endpoints, returned as packed (l0, l1) with mode bits.
+ * @param quant_level The quantization level to use.
+ *
+ * @return Returns @c false on failure, @c true on success.
+ */
+static bool try_quantize_hdr_luminance_small_range(
+ vfloat4 color0,
+ vfloat4 color1,
+ uint8_t output[2],
+ quant_method quant_level
+) {
+ float lum0 = hadd_rgb_s(color0) * (1.0f / 3.0f);
+ float lum1 = hadd_rgb_s(color1) * (1.0f / 3.0f);
+
+ if (lum1 < lum0)
+ {
+ float avg = (lum0 + lum1) * 0.5f;
+ lum0 = avg;
+ lum1 = avg;
+ }
+
+ int ilum1 = astc::flt2int_rtn(lum1);
+ int ilum0 = astc::flt2int_rtn(lum0);
+
+ // Difference of more than a factor-of-2 results in immediate failure
+ if (ilum1 - ilum0 > 2048)
+ {
+ return false;
+ }
+
+ int lowval, highval, diffval;
+ int v0, v1;
+ int v0e, v1e;
+ int v0d, v1d;
+
+ // Try to encode the high-precision submode
+ lowval = (ilum0 + 16) >> 5;
+ highval = (ilum1 + 16) >> 5;
+
+ lowval = astc::clamp(lowval, 0, 2047);
+ highval = astc::clamp(highval, 0, 2047);
+
+ v0 = lowval & 0x7F;
+ v0e = quant_color(quant_level, v0);
+ v0d = v0e;
+
+ if (v0d < 0x80)
+ {
+ lowval = (lowval & ~0x7F) | v0d;
+ diffval = highval - lowval;
+ if (diffval >= 0 && diffval <= 15)
+ {
+ v1 = ((lowval >> 3) & 0xF0) | diffval;
+ v1e = quant_color(quant_level, v1);
+ v1d = v1e;
+ if ((v1d & 0xF0) == (v1 & 0xF0))
+ {
+ output[0] = static_cast<uint8_t>(v0e);
+ output[1] = static_cast<uint8_t>(v1e);
+ return true;
+ }
+ }
+ }
+
+ // Try to encode the low-precision submode
+ lowval = (ilum0 + 32) >> 6;
+ highval = (ilum1 + 32) >> 6;
+
+ lowval = astc::clamp(lowval, 0, 1023);
+ highval = astc::clamp(highval, 0, 1023);
+
+ v0 = (lowval & 0x7F) | 0x80;
+ v0e = quant_color(quant_level, v0);
+ v0d = v0e;
+ if ((v0d & 0x80) == 0)
+ {
+ return false;
+ }
+
+ lowval = (lowval & ~0x7F) | (v0d & 0x7F);
+ diffval = highval - lowval;
+ if (diffval < 0 || diffval > 31)
+ {
+ return false;
+ }
+
+ v1 = ((lowval >> 2) & 0xE0) | diffval;
+ v1e = quant_color(quant_level, v1);
+ v1d = v1e;
+ if ((v1d & 0xE0) != (v1 & 0xE0))
+ {
+ return false;
+ }
+
+ output[0] = static_cast<uint8_t>(v0e);
+ output[1] = static_cast<uint8_t>(v1e);
+ return true;
+}
+
+/**
+ * @brief Quantize a HDR A color using either delta or direct RGBA encoding.
+ *
+ * @param alpha0 The input unquantized color0 endpoint.
+ * @param alpha1 The input unquantized color1 endpoint.
+ * @param[out] output The output endpoints, returned as packed RGBA+RGBA pairs with mode bits.
+ * @param quant_level The quantization level to use.
+ */
+static void quantize_hdr_alpha(
+ float alpha0,
+ float alpha1,
+ uint8_t output[2],
+ quant_method quant_level
+) {
+ alpha0 = astc::clamp(alpha0, 0.0f, 65280.0f);
+ alpha1 = astc::clamp(alpha1, 0.0f, 65280.0f);
+
+ int ialpha0 = astc::flt2int_rtn(alpha0);
+ int ialpha1 = astc::flt2int_rtn(alpha1);
+
+ int val0, val1, diffval;
+ int v6, v7;
+ int v6e, v7e;
+ int v6d, v7d;
+
+ // Try to encode one of the delta submodes, in decreasing-precision order
+ for (int i = 2; i >= 0; i--)
+ {
+ val0 = (ialpha0 + (128 >> i)) >> (8 - i);
+ val1 = (ialpha1 + (128 >> i)) >> (8 - i);
+
+ v6 = (val0 & 0x7F) | ((i & 1) << 7);
+ v6e = quant_color(quant_level, v6);
+ v6d = v6e;
+
+ if ((v6 ^ v6d) & 0x80)
+ {
+ continue;
+ }
+
+ val0 = (val0 & ~0x7f) | (v6d & 0x7f);
+ diffval = val1 - val0;
+ int cutoff = 32 >> i;
+ int mask = 2 * cutoff - 1;
+
+ if (diffval < -cutoff || diffval >= cutoff)
+ {
+ continue;
+ }
+
+ v7 = ((i & 2) << 6) | ((val0 >> 7) << (6 - i)) | (diffval & mask);
+ v7e = quant_color(quant_level, v7);
+ v7d = v7e;
+
+ static const int testbits[3] { 0xE0, 0xF0, 0xF8 };
+
+ if ((v7 ^ v7d) & testbits[i])
+ {
+ continue;
+ }
+
+ output[0] = static_cast<uint8_t>(v6e);
+ output[1] = static_cast<uint8_t>(v7e);
+ return;
+ }
+
+ // Could not encode any of the delta modes; instead encode a flat value
+ val0 = (ialpha0 + 256) >> 9;
+ val1 = (ialpha1 + 256) >> 9;
+ v6 = val0 | 0x80;
+ v7 = val1 | 0x80;
+
+ output[0] = quant_color(quant_level, v6);
+ output[1] = quant_color(quant_level, v7);
+
+ return;
+}
+
+/**
+ * @brief Quantize a HDR RGBA color using either delta or direct RGBA encoding.
+ *
+ * @param color0 The input unquantized color0 endpoint.
+ * @param color1 The input unquantized color1 endpoint.
+ * @param[out] output The output endpoints, returned as packed RGBA+RGBA pairs with mode bits.
+ * @param quant_level The quantization level to use.
+ */
+static void quantize_hdr_rgb_alpha(
+ vfloat4 color0,
+ vfloat4 color1,
+ uint8_t output[8],
+ quant_method quant_level
+) {
+ quantize_hdr_rgb(color0, color1, output, quant_level);
+ quantize_hdr_alpha(color0.lane<3>(), color1.lane<3>(), output + 6, quant_level);
+}
+
+/* See header for documentation. */
+uint8_t pack_color_endpoints(
+ vfloat4 color0,
+ vfloat4 color1,
+ vfloat4 rgbs_color,
+ vfloat4 rgbo_color,
+ int format,
+ uint8_t* output,
+ quant_method quant_level
+) {
+ assert(QUANT_6 <= quant_level && quant_level <= QUANT_256);
+
+ // We do not support negative colors
+ color0 = max(color0, 0.0f);
+ color1 = max(color1, 0.0f);
+
+ uint8_t retval = 0;
+
+ switch (format)
+ {
+ case FMT_RGB:
+ if (quant_level <= QUANT_160)
+ {
+ if (try_quantize_rgb_delta_blue_contract(color0, color1, output, quant_level))
+ {
+ retval = FMT_RGB_DELTA;
+ break;
+ }
+ if (try_quantize_rgb_delta(color0, color1, output, quant_level))
+ {
+ retval = FMT_RGB_DELTA;
+ break;
+ }
+ }
+ if (quant_level < QUANT_256 && try_quantize_rgb_blue_contract(color0, color1, output, quant_level))
+ {
+ retval = FMT_RGB;
+ break;
+ }
+ quantize_rgb(color0, color1, output, quant_level);
+ retval = FMT_RGB;
+ break;
+
+ case FMT_RGBA:
+ if (quant_level <= QUANT_160)
+ {
+ if (try_quantize_rgba_delta_blue_contract(color0, color1, output, quant_level))
+ {
+ retval = FMT_RGBA_DELTA;
+ break;
+ }
+ if (try_quantize_rgba_delta(color0, color1, output, quant_level))
+ {
+ retval = FMT_RGBA_DELTA;
+ break;
+ }
+ }
+ if (quant_level < QUANT_256 && try_quantize_rgba_blue_contract(color0, color1, output, quant_level))
+ {
+ retval = FMT_RGBA;
+ break;
+ }
+ quantize_rgba(color0, color1, output, quant_level);
+ retval = FMT_RGBA;
+ break;
+
+ case FMT_RGB_SCALE:
+ quantize_rgbs(rgbs_color, output, quant_level);
+ retval = FMT_RGB_SCALE;
+ break;
+
+ case FMT_HDR_RGB_SCALE:
+ quantize_hdr_rgbo(rgbo_color, output, quant_level);
+ retval = FMT_HDR_RGB_SCALE;
+ break;
+
+ case FMT_HDR_RGB:
+ quantize_hdr_rgb(color0, color1, output, quant_level);
+ retval = FMT_HDR_RGB;
+ break;
+
+ case FMT_RGB_SCALE_ALPHA:
+ quantize_rgbs_alpha(color0, color1, rgbs_color, output, quant_level);
+ retval = FMT_RGB_SCALE_ALPHA;
+ break;
+
+ case FMT_HDR_LUMINANCE_SMALL_RANGE:
+ case FMT_HDR_LUMINANCE_LARGE_RANGE:
+ if (try_quantize_hdr_luminance_small_range(color0, color1, output, quant_level))
+ {
+ retval = FMT_HDR_LUMINANCE_SMALL_RANGE;
+ break;
+ }
+ quantize_hdr_luminance_large_range(color0, color1, output, quant_level);
+ retval = FMT_HDR_LUMINANCE_LARGE_RANGE;
+ break;
+
+ case FMT_LUMINANCE:
+ quantize_luminance(color0, color1, output, quant_level);
+ retval = FMT_LUMINANCE;
+ break;
+
+ case FMT_LUMINANCE_ALPHA:
+ if (quant_level <= 18)
+ {
+ if (try_quantize_luminance_alpha_delta(color0, color1, output, quant_level))
+ {
+ retval = FMT_LUMINANCE_ALPHA_DELTA;
+ break;
+ }
+ }
+ quantize_luminance_alpha(color0, color1, output, quant_level);
+ retval = FMT_LUMINANCE_ALPHA;
+ break;
+
+ case FMT_HDR_RGB_LDR_ALPHA:
+ quantize_hdr_rgb_ldr_alpha(color0, color1, output, quant_level);
+ retval = FMT_HDR_RGB_LDR_ALPHA;
+ break;
+
+ case FMT_HDR_RGBA:
+ quantize_hdr_rgb_alpha(color0, color1, output, quant_level);
+ retval = FMT_HDR_RGBA;
+ break;
+ }
+
+ return retval;
+}
+
+#endif
diff --git a/thirdparty/astcenc/astcenc_color_unquantize.cpp b/thirdparty/astcenc/astcenc_color_unquantize.cpp
new file mode 100644
index 0000000000..d31895a627
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_color_unquantize.cpp
@@ -0,0 +1,941 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2021 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+#include <utility>
+
+/**
+ * @brief Functions for color unquantization.
+ */
+
+#include "astcenc_internal.h"
+
+/**
+ * @brief Un-blue-contract a color.
+ *
+ * This function reverses any applied blue contraction.
+ *
+ * @param input The input color that has been blue-contracted.
+ *
+ * @return The uncontracted color.
+ */
+static ASTCENC_SIMD_INLINE vint4 uncontract_color(
+ vint4 input
+) {
+ vmask4 mask(true, true, false, false);
+ vint4 bc0 = asr<1>(input + input.lane<2>());
+ return select(input, bc0, mask);
+}
+
+/**
+ * @brief Unpack an LDR RGBA color that uses delta encoding.
+ *
+ * @param input0 The packed endpoint 0 color.
+ * @param input1 The packed endpoint 1 color deltas.
+ * @param[out] output0 The unpacked endpoint 0 color.
+ * @param[out] output1 The unpacked endpoint 1 color.
+ */
+static void rgba_delta_unpack(
+ vint4 input0,
+ vint4 input1,
+ vint4& output0,
+ vint4& output1
+) {
+ // Apply bit transfer
+ bit_transfer_signed(input1, input0);
+
+ // Apply blue-uncontraction if needed
+ int rgb_sum = hadd_rgb_s(input1);
+ input1 = input1 + input0;
+ if (rgb_sum < 0)
+ {
+ input0 = uncontract_color(input0);
+ input1 = uncontract_color(input1);
+ std::swap(input0, input1);
+ }
+
+ output0 = clamp(0, 255, input0);
+ output1 = clamp(0, 255, input1);
+}
+
+/**
+ * @brief Unpack an LDR RGB color that uses delta encoding.
+ *
+ * Output alpha set to 255.
+ *
+ * @param input0 The packed endpoint 0 color.
+ * @param input1 The packed endpoint 1 color deltas.
+ * @param[out] output0 The unpacked endpoint 0 color.
+ * @param[out] output1 The unpacked endpoint 1 color.
+ */
+static void rgb_delta_unpack(
+ vint4 input0,
+ vint4 input1,
+ vint4& output0,
+ vint4& output1
+) {
+ rgba_delta_unpack(input0, input1, output0, output1);
+ output0.set_lane<3>(255);
+ output1.set_lane<3>(255);
+}
+
+/**
+ * @brief Unpack an LDR RGBA color that uses direct encoding.
+ *
+ * @param input0 The packed endpoint 0 color.
+ * @param input1 The packed endpoint 1 color.
+ * @param[out] output0 The unpacked endpoint 0 color.
+ * @param[out] output1 The unpacked endpoint 1 color.
+ */
+static void rgba_unpack(
+ vint4 input0,
+ vint4 input1,
+ vint4& output0,
+ vint4& output1
+) {
+ // Apply blue-uncontraction if needed
+ if (hadd_rgb_s(input0) > hadd_rgb_s(input1))
+ {
+ input0 = uncontract_color(input0);
+ input1 = uncontract_color(input1);
+ std::swap(input0, input1);
+ }
+
+ output0 = input0;
+ output1 = input1;
+}
+
+/**
+ * @brief Unpack an LDR RGB color that uses direct encoding.
+ *
+ * Output alpha set to 255.
+ *
+ * @param input0 The packed endpoint 0 color.
+ * @param input1 The packed endpoint 1 color.
+ * @param[out] output0 The unpacked endpoint 0 color.
+ * @param[out] output1 The unpacked endpoint 1 color.
+ */
+static void rgb_unpack(
+ vint4 input0,
+ vint4 input1,
+ vint4& output0,
+ vint4& output1
+) {
+ rgba_unpack(input0, input1, output0, output1);
+ output0.set_lane<3>(255);
+ output1.set_lane<3>(255);
+}
+
+/**
+ * @brief Unpack an LDR RGBA color that uses scaled encoding.
+ *
+ * Note only the RGB channels use the scaled encoding, alpha uses direct.
+ *
+ * @param input0 The packed endpoint 0 color.
+ * @param alpha1 The packed endpoint 1 alpha value.
+ * @param scale The packed quantized scale.
+ * @param[out] output0 The unpacked endpoint 0 color.
+ * @param[out] output1 The unpacked endpoint 1 color.
+ */
+static void rgb_scale_alpha_unpack(
+ vint4 input0,
+ uint8_t alpha1,
+ uint8_t scale,
+ vint4& output0,
+ vint4& output1
+) {
+ output1 = input0;
+ output1.set_lane<3>(alpha1);
+
+ output0 = asr<8>(input0 * scale);
+ output0.set_lane<3>(input0.lane<3>());
+}
+
+/**
+ * @brief Unpack an LDR RGB color that uses scaled encoding.
+ *
+ * Output alpha is 255.
+ *
+ * @param input0 The packed endpoint 0 color.
+ * @param scale The packed scale.
+ * @param[out] output0 The unpacked endpoint 0 color.
+ * @param[out] output1 The unpacked endpoint 1 color.
+ */
+static void rgb_scale_unpack(
+ vint4 input0,
+ int scale,
+ vint4& output0,
+ vint4& output1
+) {
+ output1 = input0;
+ output1.set_lane<3>(255);
+
+ output0 = asr<8>(input0 * scale);
+ output0.set_lane<3>(255);
+}
+
+/**
+ * @brief Unpack an LDR L color that uses direct encoding.
+ *
+ * Output alpha is 255.
+ *
+ * @param input The packed endpoints.
+ * @param[out] output0 The unpacked endpoint 0 color.
+ * @param[out] output1 The unpacked endpoint 1 color.
+ */
+static void luminance_unpack(
+ const uint8_t input[2],
+ vint4& output0,
+ vint4& output1
+) {
+ int lum0 = input[0];
+ int lum1 = input[1];
+ output0 = vint4(lum0, lum0, lum0, 255);
+ output1 = vint4(lum1, lum1, lum1, 255);
+}
+
+/**
+ * @brief Unpack an LDR L color that uses delta encoding.
+ *
+ * Output alpha is 255.
+ *
+ * @param input The packed endpoints (L0, L1).
+ * @param[out] output0 The unpacked endpoint 0 color.
+ * @param[out] output1 The unpacked endpoint 1 color.
+ */
+static void luminance_delta_unpack(
+ const uint8_t input[2],
+ vint4& output0,
+ vint4& output1
+) {
+ int v0 = input[0];
+ int v1 = input[1];
+ int l0 = (v0 >> 2) | (v1 & 0xC0);
+ int l1 = l0 + (v1 & 0x3F);
+
+ l1 = astc::min(l1, 255);
+
+ output0 = vint4(l0, l0, l0, 255);
+ output1 = vint4(l1, l1, l1, 255);
+}
+
+/**
+ * @brief Unpack an LDR LA color that uses direct encoding.
+ *
+ * @param input The packed endpoints (L0, L1, A0, A1).
+ * @param[out] output0 The unpacked endpoint 0 color.
+ * @param[out] output1 The unpacked endpoint 1 color.
+ */
+static void luminance_alpha_unpack(
+ const uint8_t input[4],
+ vint4& output0,
+ vint4& output1
+) {
+ int lum0 = input[0];
+ int lum1 = input[1];
+ int alpha0 = input[2];
+ int alpha1 = input[3];
+ output0 = vint4(lum0, lum0, lum0, alpha0);
+ output1 = vint4(lum1, lum1, lum1, alpha1);
+}
+
+/**
+ * @brief Unpack an LDR LA color that uses delta encoding.
+ *
+ * @param input The packed endpoints (L0, L1, A0, A1).
+ * @param[out] output0 The unpacked endpoint 0 color.
+ * @param[out] output1 The unpacked endpoint 1 color.
+ */
+static void luminance_alpha_delta_unpack(
+ const uint8_t input[4],
+ vint4& output0,
+ vint4& output1
+) {
+ int lum0 = input[0];
+ int lum1 = input[1];
+ int alpha0 = input[2];
+ int alpha1 = input[3];
+
+ lum0 |= (lum1 & 0x80) << 1;
+ alpha0 |= (alpha1 & 0x80) << 1;
+ lum1 &= 0x7F;
+ alpha1 &= 0x7F;
+
+ if (lum1 & 0x40)
+ {
+ lum1 -= 0x80;
+ }
+
+ if (alpha1 & 0x40)
+ {
+ alpha1 -= 0x80;
+ }
+
+ lum0 >>= 1;
+ lum1 >>= 1;
+ alpha0 >>= 1;
+ alpha1 >>= 1;
+ lum1 += lum0;
+ alpha1 += alpha0;
+
+ lum1 = astc::clamp(lum1, 0, 255);
+ alpha1 = astc::clamp(alpha1, 0, 255);
+
+ output0 = vint4(lum0, lum0, lum0, alpha0);
+ output1 = vint4(lum1, lum1, lum1, alpha1);
+}
+
+/**
+ * @brief Unpack an HDR RGB + offset encoding.
+ *
+ * @param input The packed endpoints (packed and modal).
+ * @param[out] output0 The unpacked endpoint 0 color.
+ * @param[out] output1 The unpacked endpoint 1 color.
+ */
+static void hdr_rgbo_unpack(
+ const uint8_t input[4],
+ vint4& output0,
+ vint4& output1
+) {
+ int v0 = input[0];
+ int v1 = input[1];
+ int v2 = input[2];
+ int v3 = input[3];
+
+ int modeval = ((v0 & 0xC0) >> 6) | (((v1 & 0x80) >> 7) << 2) | (((v2 & 0x80) >> 7) << 3);
+
+ int majcomp;
+ int mode;
+ if ((modeval & 0xC) != 0xC)
+ {
+ majcomp = modeval >> 2;
+ mode = modeval & 3;
+ }
+ else if (modeval != 0xF)
+ {
+ majcomp = modeval & 3;
+ mode = 4;
+ }
+ else
+ {
+ majcomp = 0;
+ mode = 5;
+ }
+
+ int red = v0 & 0x3F;
+ int green = v1 & 0x1F;
+ int blue = v2 & 0x1F;
+ int scale = v3 & 0x1F;
+
+ int bit0 = (v1 >> 6) & 1;
+ int bit1 = (v1 >> 5) & 1;
+ int bit2 = (v2 >> 6) & 1;
+ int bit3 = (v2 >> 5) & 1;
+ int bit4 = (v3 >> 7) & 1;
+ int bit5 = (v3 >> 6) & 1;
+ int bit6 = (v3 >> 5) & 1;
+
+ int ohcomp = 1 << mode;
+
+ if (ohcomp & 0x30)
+ green |= bit0 << 6;
+ if (ohcomp & 0x3A)
+ green |= bit1 << 5;
+ if (ohcomp & 0x30)
+ blue |= bit2 << 6;
+ if (ohcomp & 0x3A)
+ blue |= bit3 << 5;
+
+ if (ohcomp & 0x3D)
+ scale |= bit6 << 5;
+ if (ohcomp & 0x2D)
+ scale |= bit5 << 6;
+ if (ohcomp & 0x04)
+ scale |= bit4 << 7;
+
+ if (ohcomp & 0x3B)
+ red |= bit4 << 6;
+ if (ohcomp & 0x04)
+ red |= bit3 << 6;
+
+ if (ohcomp & 0x10)
+ red |= bit5 << 7;
+ if (ohcomp & 0x0F)
+ red |= bit2 << 7;
+
+ if (ohcomp & 0x05)
+ red |= bit1 << 8;
+ if (ohcomp & 0x0A)
+ red |= bit0 << 8;
+
+ if (ohcomp & 0x05)
+ red |= bit0 << 9;
+ if (ohcomp & 0x02)
+ red |= bit6 << 9;
+
+ if (ohcomp & 0x01)
+ red |= bit3 << 10;
+ if (ohcomp & 0x02)
+ red |= bit5 << 10;
+
+ // expand to 12 bits.
+ static const int shamts[6] { 1, 1, 2, 3, 4, 5 };
+ int shamt = shamts[mode];
+ red <<= shamt;
+ green <<= shamt;
+ blue <<= shamt;
+ scale <<= shamt;
+
+ // on modes 0 to 4, the values stored for "green" and "blue" are differentials,
+ // not absolute values.
+ if (mode != 5)
+ {
+ green = red - green;
+ blue = red - blue;
+ }
+
+ // switch around components.
+ int temp;
+ switch (majcomp)
+ {
+ case 1:
+ temp = red;
+ red = green;
+ green = temp;
+ break;
+ case 2:
+ temp = red;
+ red = blue;
+ blue = temp;
+ break;
+ default:
+ break;
+ }
+
+ int red0 = red - scale;
+ int green0 = green - scale;
+ int blue0 = blue - scale;
+
+ // clamp to [0,0xFFF].
+ if (red < 0)
+ red = 0;
+ if (green < 0)
+ green = 0;
+ if (blue < 0)
+ blue = 0;
+
+ if (red0 < 0)
+ red0 = 0;
+ if (green0 < 0)
+ green0 = 0;
+ if (blue0 < 0)
+ blue0 = 0;
+
+ output0 = vint4(red0 << 4, green0 << 4, blue0 << 4, 0x7800);
+ output1 = vint4(red << 4, green << 4, blue << 4, 0x7800);
+}
+
+/**
+ * @brief Unpack an HDR RGB direct encoding.
+ *
+ * @param input The packed endpoints (packed and modal).
+ * @param[out] output0 The unpacked endpoint 0 color.
+ * @param[out] output1 The unpacked endpoint 1 color.
+ */
+static void hdr_rgb_unpack(
+ const uint8_t input[6],
+ vint4& output0,
+ vint4& output1
+) {
+
+ int v0 = input[0];
+ int v1 = input[1];
+ int v2 = input[2];
+ int v3 = input[3];
+ int v4 = input[4];
+ int v5 = input[5];
+
+ // extract all the fixed-placement bitfields
+ int modeval = ((v1 & 0x80) >> 7) | (((v2 & 0x80) >> 7) << 1) | (((v3 & 0x80) >> 7) << 2);
+
+ int majcomp = ((v4 & 0x80) >> 7) | (((v5 & 0x80) >> 7) << 1);
+
+ if (majcomp == 3)
+ {
+ output0 = vint4(v0 << 8, v2 << 8, (v4 & 0x7F) << 9, 0x7800);
+ output1 = vint4(v1 << 8, v3 << 8, (v5 & 0x7F) << 9, 0x7800);
+ return;
+ }
+
+ int a = v0 | ((v1 & 0x40) << 2);
+ int b0 = v2 & 0x3f;
+ int b1 = v3 & 0x3f;
+ int c = v1 & 0x3f;
+ int d0 = v4 & 0x7f;
+ int d1 = v5 & 0x7f;
+
+ // get hold of the number of bits in 'd0' and 'd1'
+ static const int dbits_tab[8] { 7, 6, 7, 6, 5, 6, 5, 6 };
+ int dbits = dbits_tab[modeval];
+
+ // extract six variable-placement bits
+ int bit0 = (v2 >> 6) & 1;
+ int bit1 = (v3 >> 6) & 1;
+ int bit2 = (v4 >> 6) & 1;
+ int bit3 = (v5 >> 6) & 1;
+ int bit4 = (v4 >> 5) & 1;
+ int bit5 = (v5 >> 5) & 1;
+
+ // and prepend the variable-placement bits depending on mode.
+ int ohmod = 1 << modeval; // one-hot-mode
+ if (ohmod & 0xA4)
+ a |= bit0 << 9;
+ if (ohmod & 0x8)
+ a |= bit2 << 9;
+ if (ohmod & 0x50)
+ a |= bit4 << 9;
+
+ if (ohmod & 0x50)
+ a |= bit5 << 10;
+ if (ohmod & 0xA0)
+ a |= bit1 << 10;
+
+ if (ohmod & 0xC0)
+ a |= bit2 << 11;
+
+ if (ohmod & 0x4)
+ c |= bit1 << 6;
+ if (ohmod & 0xE8)
+ c |= bit3 << 6;
+
+ if (ohmod & 0x20)
+ c |= bit2 << 7;
+
+ if (ohmod & 0x5B)
+ {
+ b0 |= bit0 << 6;
+ b1 |= bit1 << 6;
+ }
+
+ if (ohmod & 0x12)
+ {
+ b0 |= bit2 << 7;
+ b1 |= bit3 << 7;
+ }
+
+ if (ohmod & 0xAF)
+ {
+ d0 |= bit4 << 5;
+ d1 |= bit5 << 5;
+ }
+
+ if (ohmod & 0x5)
+ {
+ d0 |= bit2 << 6;
+ d1 |= bit3 << 6;
+ }
+
+ // sign-extend 'd0' and 'd1'
+ // note: this code assumes that signed right-shift actually sign-fills, not zero-fills.
+ int32_t d0x = d0;
+ int32_t d1x = d1;
+ int sx_shamt = 32 - dbits;
+ d0x <<= sx_shamt;
+ d0x >>= sx_shamt;
+ d1x <<= sx_shamt;
+ d1x >>= sx_shamt;
+ d0 = d0x;
+ d1 = d1x;
+
+ // expand all values to 12 bits, with left-shift as needed.
+ int val_shamt = (modeval >> 1) ^ 3;
+ a <<= val_shamt;
+ b0 <<= val_shamt;
+ b1 <<= val_shamt;
+ c <<= val_shamt;
+ d0 <<= val_shamt;
+ d1 <<= val_shamt;
+
+ // then compute the actual color values.
+ int red1 = a;
+ int green1 = a - b0;
+ int blue1 = a - b1;
+ int red0 = a - c;
+ int green0 = a - b0 - c - d0;
+ int blue0 = a - b1 - c - d1;
+
+ // clamp the color components to [0,2^12 - 1]
+ red0 = astc::clamp(red0, 0, 4095);
+ green0 = astc::clamp(green0, 0, 4095);
+ blue0 = astc::clamp(blue0, 0, 4095);
+
+ red1 = astc::clamp(red1, 0, 4095);
+ green1 = astc::clamp(green1, 0, 4095);
+ blue1 = astc::clamp(blue1, 0, 4095);
+
+ // switch around the color components
+ int temp0, temp1;
+ switch (majcomp)
+ {
+ case 1: // switch around red and green
+ temp0 = red0;
+ temp1 = red1;
+ red0 = green0;
+ red1 = green1;
+ green0 = temp0;
+ green1 = temp1;
+ break;
+ case 2: // switch around red and blue
+ temp0 = red0;
+ temp1 = red1;
+ red0 = blue0;
+ red1 = blue1;
+ blue0 = temp0;
+ blue1 = temp1;
+ break;
+ case 0: // no switch
+ break;
+ }
+
+ output0 = vint4(red0 << 4, green0 << 4, blue0 << 4, 0x7800);
+ output1 = vint4(red1 << 4, green1 << 4, blue1 << 4, 0x7800);
+}
+
+/**
+ * @brief Unpack an HDR RGB + LDR A direct encoding.
+ *
+ * @param input The packed endpoints (packed and modal).
+ * @param[out] output0 The unpacked endpoint 0 color.
+ * @param[out] output1 The unpacked endpoint 1 color.
+ */
+static void hdr_rgb_ldr_alpha_unpack(
+ const uint8_t input[8],
+ vint4& output0,
+ vint4& output1
+) {
+ hdr_rgb_unpack(input, output0, output1);
+
+ int v6 = input[6];
+ int v7 = input[7];
+ output0.set_lane<3>(v6);
+ output1.set_lane<3>(v7);
+}
+
+/**
+ * @brief Unpack an HDR L (small range) direct encoding.
+ *
+ * @param input The packed endpoints (packed and modal).
+ * @param[out] output0 The unpacked endpoint 0 color.
+ * @param[out] output1 The unpacked endpoint 1 color.
+ */
+static void hdr_luminance_small_range_unpack(
+ const uint8_t input[2],
+ vint4& output0,
+ vint4& output1
+) {
+ int v0 = input[0];
+ int v1 = input[1];
+
+ int y0, y1;
+ if (v0 & 0x80)
+ {
+ y0 = ((v1 & 0xE0) << 4) | ((v0 & 0x7F) << 2);
+ y1 = (v1 & 0x1F) << 2;
+ }
+ else
+ {
+ y0 = ((v1 & 0xF0) << 4) | ((v0 & 0x7F) << 1);
+ y1 = (v1 & 0xF) << 1;
+ }
+
+ y1 += y0;
+ if (y1 > 0xFFF)
+ {
+ y1 = 0xFFF;
+ }
+
+ output0 = vint4(y0 << 4, y0 << 4, y0 << 4, 0x7800);
+ output1 = vint4(y1 << 4, y1 << 4, y1 << 4, 0x7800);
+}
+
+/**
+ * @brief Unpack an HDR L (large range) direct encoding.
+ *
+ * @param input The packed endpoints (packed and modal).
+ * @param[out] output0 The unpacked endpoint 0 color.
+ * @param[out] output1 The unpacked endpoint 1 color.
+ */
+static void hdr_luminance_large_range_unpack(
+ const uint8_t input[2],
+ vint4& output0,
+ vint4& output1
+) {
+ int v0 = input[0];
+ int v1 = input[1];
+
+ int y0, y1;
+ if (v1 >= v0)
+ {
+ y0 = v0 << 4;
+ y1 = v1 << 4;
+ }
+ else
+ {
+ y0 = (v1 << 4) + 8;
+ y1 = (v0 << 4) - 8;
+ }
+
+ output0 = vint4(y0 << 4, y0 << 4, y0 << 4, 0x7800);
+ output1 = vint4(y1 << 4, y1 << 4, y1 << 4, 0x7800);
+}
+
+/**
+ * @brief Unpack an HDR A direct encoding.
+ *
+ * @param input The packed endpoints (packed and modal).
+ * @param[out] output0 The unpacked endpoint 0 color.
+ * @param[out] output1 The unpacked endpoint 1 color.
+ */
+static void hdr_alpha_unpack(
+ const uint8_t input[2],
+ int& output0,
+ int& output1
+) {
+
+ int v6 = input[0];
+ int v7 = input[1];
+
+ int selector = ((v6 >> 7) & 1) | ((v7 >> 6) & 2);
+ v6 &= 0x7F;
+ v7 &= 0x7F;
+ if (selector == 3)
+ {
+ output0 = v6 << 5;
+ output1 = v7 << 5;
+ }
+ else
+ {
+ v6 |= (v7 << (selector + 1)) & 0x780;
+ v7 &= (0x3f >> selector);
+ v7 ^= 32 >> selector;
+ v7 -= 32 >> selector;
+ v6 <<= (4 - selector);
+ v7 <<= (4 - selector);
+ v7 += v6;
+
+ if (v7 < 0)
+ {
+ v7 = 0;
+ }
+ else if (v7 > 0xFFF)
+ {
+ v7 = 0xFFF;
+ }
+
+ output0 = v6;
+ output1 = v7;
+ }
+
+ output0 <<= 4;
+ output1 <<= 4;
+}
+
+/**
+ * @brief Unpack an HDR RGBA direct encoding.
+ *
+ * @param input The packed endpoints (packed and modal).
+ * @param[out] output0 The unpacked endpoint 0 color.
+ * @param[out] output1 The unpacked endpoint 1 color.
+ */
+static void hdr_rgb_hdr_alpha_unpack(
+ const uint8_t input[8],
+ vint4& output0,
+ vint4& output1
+) {
+ hdr_rgb_unpack(input, output0, output1);
+
+ int alpha0, alpha1;
+ hdr_alpha_unpack(input + 6, alpha0, alpha1);
+
+ output0.set_lane<3>(alpha0);
+ output1.set_lane<3>(alpha1);
+}
+
+/* See header for documentation. */
+void unpack_color_endpoints(
+ astcenc_profile decode_mode,
+ int format,
+ const uint8_t* input,
+ bool& rgb_hdr,
+ bool& alpha_hdr,
+ vint4& output0,
+ vint4& output1
+) {
+ // Assume no NaNs and LDR endpoints unless set later
+ rgb_hdr = false;
+ alpha_hdr = false;
+
+ bool alpha_hdr_default = false;
+
+ switch (format)
+ {
+ case FMT_LUMINANCE:
+ luminance_unpack(input, output0, output1);
+ break;
+
+ case FMT_LUMINANCE_DELTA:
+ luminance_delta_unpack(input, output0, output1);
+ break;
+
+ case FMT_HDR_LUMINANCE_SMALL_RANGE:
+ rgb_hdr = true;
+ alpha_hdr_default = true;
+ hdr_luminance_small_range_unpack(input, output0, output1);
+ break;
+
+ case FMT_HDR_LUMINANCE_LARGE_RANGE:
+ rgb_hdr = true;
+ alpha_hdr_default = true;
+ hdr_luminance_large_range_unpack(input, output0, output1);
+ break;
+
+ case FMT_LUMINANCE_ALPHA:
+ luminance_alpha_unpack(input, output0, output1);
+ break;
+
+ case FMT_LUMINANCE_ALPHA_DELTA:
+ luminance_alpha_delta_unpack(input, output0, output1);
+ break;
+
+ case FMT_RGB_SCALE:
+ {
+ vint4 input0q(input[0], input[1], input[2], 0);
+ uint8_t scale = input[3];
+ rgb_scale_unpack(input0q, scale, output0, output1);
+ }
+ break;
+
+ case FMT_RGB_SCALE_ALPHA:
+ {
+ vint4 input0q(input[0], input[1], input[2], input[4]);
+ uint8_t alpha1q = input[5];
+ uint8_t scaleq = input[3];
+ rgb_scale_alpha_unpack(input0q, alpha1q, scaleq, output0, output1);
+ }
+ break;
+
+ case FMT_HDR_RGB_SCALE:
+ rgb_hdr = true;
+ alpha_hdr_default = true;
+ hdr_rgbo_unpack(input, output0, output1);
+ break;
+
+ case FMT_RGB:
+ {
+ vint4 input0q(input[0], input[2], input[4], 0);
+ vint4 input1q(input[1], input[3], input[5], 0);
+ rgb_unpack(input0q, input1q, output0, output1);
+ }
+ break;
+
+ case FMT_RGB_DELTA:
+ {
+ vint4 input0q(input[0], input[2], input[4], 0);
+ vint4 input1q(input[1], input[3], input[5], 0);
+ rgb_delta_unpack(input0q, input1q, output0, output1);
+ }
+ break;
+
+ case FMT_HDR_RGB:
+ rgb_hdr = true;
+ alpha_hdr_default = true;
+ hdr_rgb_unpack(input, output0, output1);
+ break;
+
+ case FMT_RGBA:
+ {
+ vint4 input0q(input[0], input[2], input[4], input[6]);
+ vint4 input1q(input[1], input[3], input[5], input[7]);
+ rgba_unpack(input0q, input1q, output0, output1);
+ }
+ break;
+
+ case FMT_RGBA_DELTA:
+ {
+ vint4 input0q(input[0], input[2], input[4], input[6]);
+ vint4 input1q(input[1], input[3], input[5], input[7]);
+ rgba_delta_unpack(input0q, input1q, output0, output1);
+ }
+ break;
+
+ case FMT_HDR_RGB_LDR_ALPHA:
+ rgb_hdr = true;
+ hdr_rgb_ldr_alpha_unpack(input, output0, output1);
+ break;
+
+ case FMT_HDR_RGBA:
+ rgb_hdr = true;
+ alpha_hdr = true;
+ hdr_rgb_hdr_alpha_unpack(input, output0, output1);
+ break;
+ }
+
+ // Assign a correct default alpha
+ if (alpha_hdr_default)
+ {
+ if (decode_mode == ASTCENC_PRF_HDR)
+ {
+ output0.set_lane<3>(0x7800);
+ output1.set_lane<3>(0x7800);
+ alpha_hdr = true;
+ }
+ else
+ {
+ output0.set_lane<3>(0x00FF);
+ output1.set_lane<3>(0x00FF);
+ alpha_hdr = false;
+ }
+ }
+
+ vint4 ldr_scale(257);
+ vint4 hdr_scale(1);
+ vint4 output_scale = ldr_scale;
+
+ // An LDR profile image
+ if ((decode_mode == ASTCENC_PRF_LDR) ||
+ (decode_mode == ASTCENC_PRF_LDR_SRGB))
+ {
+ // Also matches HDR alpha, as cannot have HDR alpha without HDR RGB
+ if (rgb_hdr == true)
+ {
+ output0 = vint4(0xFF00, 0x0000, 0xFF00, 0xFF00);
+ output1 = vint4(0xFF00, 0x0000, 0xFF00, 0xFF00);
+ output_scale = hdr_scale;
+
+ rgb_hdr = false;
+ alpha_hdr = false;
+ }
+ }
+ // An HDR profile image
+ else
+ {
+ vmask4 hdr_lanes(rgb_hdr, rgb_hdr, rgb_hdr, alpha_hdr);
+ output_scale = select(ldr_scale, hdr_scale, hdr_lanes);
+ }
+
+ output0 = output0 * output_scale;
+ output1 = output1 * output_scale;
+}
diff --git a/thirdparty/astcenc/astcenc_compress_symbolic.cpp b/thirdparty/astcenc/astcenc_compress_symbolic.cpp
new file mode 100644
index 0000000000..afb76246e7
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_compress_symbolic.cpp
@@ -0,0 +1,1455 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2023 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+#if !defined(ASTCENC_DECOMPRESS_ONLY)
+
+/**
+ * @brief Functions to compress a symbolic block.
+ */
+
+#include "astcenc_internal.h"
+#include "astcenc_diagnostic_trace.h"
+
+#include <cassert>
+
+/**
+ * @brief Merge two planes of endpoints into a single vector.
+ *
+ * @param ep_plane1 The endpoints for plane 1.
+ * @param ep_plane2 The endpoints for plane 2.
+ * @param component_plane2 The color component for plane 2.
+ * @param[out] result The merged output.
+ */
+static void merge_endpoints(
+ const endpoints& ep_plane1,
+ const endpoints& ep_plane2,
+ unsigned int component_plane2,
+ endpoints& result
+) {
+ unsigned int partition_count = ep_plane1.partition_count;
+ assert(partition_count == 1);
+
+ vmask4 sep_mask = vint4::lane_id() == vint4(component_plane2);
+
+ result.partition_count = partition_count;
+ result.endpt0[0] = select(ep_plane1.endpt0[0], ep_plane2.endpt0[0], sep_mask);
+ result.endpt1[0] = select(ep_plane1.endpt1[0], ep_plane2.endpt1[0], sep_mask);
+}
+
+/**
+ * @brief Attempt to improve weights given a chosen configuration.
+ *
+ * Given a fixed weight grid decimation and weight value quantization, iterate over all weights (per
+ * partition and per plane) and attempt to improve image quality by moving each weight up by one or
+ * down by one quantization step.
+ *
+ * This is a specialized function which only supports operating on undecimated weight grids,
+ * therefore primarily improving the performance of 4x4 and 5x5 blocks where grid decimation
+ * is needed less often.
+ *
+ * @param decode_mode The decode mode (LDR, HDR).
+ * @param bsd The block size information.
+ * @param blk The image block color data to compress.
+ * @param[out] scb The symbolic compressed block output.
+ */
+static bool realign_weights_undecimated(
+ astcenc_profile decode_mode,
+ const block_size_descriptor& bsd,
+ const image_block& blk,
+ symbolic_compressed_block& scb
+) {
+ // Get the partition descriptor
+ unsigned int partition_count = scb.partition_count;
+ const auto& pi = bsd.get_partition_info(partition_count, scb.partition_index);
+
+ // Get the quantization table
+ const block_mode& bm = bsd.get_block_mode(scb.block_mode);
+ unsigned int weight_quant_level = bm.quant_mode;
+ const quant_and_transfer_table& qat = quant_and_xfer_tables[weight_quant_level];
+
+ unsigned int max_plane = bm.is_dual_plane;
+ int plane2_component = scb.plane2_component;
+ vmask4 plane_mask = vint4::lane_id() == vint4(plane2_component);
+
+ // Decode the color endpoints
+ bool rgb_hdr;
+ bool alpha_hdr;
+ vint4 endpnt0[BLOCK_MAX_PARTITIONS];
+ vint4 endpnt1[BLOCK_MAX_PARTITIONS];
+ vfloat4 endpnt0f[BLOCK_MAX_PARTITIONS];
+ vfloat4 offset[BLOCK_MAX_PARTITIONS];
+
+ promise(partition_count > 0);
+
+ for (unsigned int pa_idx = 0; pa_idx < partition_count; pa_idx++)
+ {
+ unpack_color_endpoints(decode_mode,
+ scb.color_formats[pa_idx],
+ scb.color_values[pa_idx],
+ rgb_hdr, alpha_hdr,
+ endpnt0[pa_idx],
+ endpnt1[pa_idx]);
+ }
+
+ uint8_t* dec_weights_uquant = scb.weights;
+ bool adjustments = false;
+
+ // For each plane and partition ...
+ for (unsigned int pl_idx = 0; pl_idx <= max_plane; pl_idx++)
+ {
+ for (unsigned int pa_idx = 0; pa_idx < partition_count; pa_idx++)
+ {
+ // Compute the endpoint delta for all components in current plane
+ vint4 epd = endpnt1[pa_idx] - endpnt0[pa_idx];
+ epd = select(epd, vint4::zero(), plane_mask);
+
+ endpnt0f[pa_idx] = int_to_float(endpnt0[pa_idx]);
+ offset[pa_idx] = int_to_float(epd) * (1.0f / 64.0f);
+ }
+
+ // For each weight compute previous, current, and next errors
+ promise(bsd.texel_count > 0);
+ for (unsigned int texel = 0; texel < bsd.texel_count; texel++)
+ {
+ int uqw = dec_weights_uquant[texel];
+
+ uint32_t prev_and_next = qat.prev_next_values[uqw];
+ int uqw_down = prev_and_next & 0xFF;
+ int uqw_up = (prev_and_next >> 8) & 0xFF;
+
+ // Interpolate the colors to create the diffs
+ float weight_base = static_cast<float>(uqw);
+ float weight_down = static_cast<float>(uqw_down - uqw);
+ float weight_up = static_cast<float>(uqw_up - uqw);
+
+ unsigned int partition = pi.partition_of_texel[texel];
+ vfloat4 color_offset = offset[partition];
+ vfloat4 color_base = endpnt0f[partition];
+
+ vfloat4 color = color_base + color_offset * weight_base;
+ vfloat4 orig_color = blk.texel(texel);
+ vfloat4 error_weight = blk.channel_weight;
+
+ vfloat4 color_diff = color - orig_color;
+ vfloat4 color_diff_down = color_diff + color_offset * weight_down;
+ vfloat4 color_diff_up = color_diff + color_offset * weight_up;
+
+ float error_base = dot_s(color_diff * color_diff, error_weight);
+ float error_down = dot_s(color_diff_down * color_diff_down, error_weight);
+ float error_up = dot_s(color_diff_up * color_diff_up, error_weight);
+
+ // Check if the prev or next error is better, and if so use it
+ if ((error_up < error_base) && (error_up < error_down) && (uqw < 64))
+ {
+ dec_weights_uquant[texel] = static_cast<uint8_t>(uqw_up);
+ adjustments = true;
+ }
+ else if ((error_down < error_base) && (uqw > 0))
+ {
+ dec_weights_uquant[texel] = static_cast<uint8_t>(uqw_down);
+ adjustments = true;
+ }
+ }
+
+ // Prepare iteration for plane 2
+ dec_weights_uquant += WEIGHTS_PLANE2_OFFSET;
+ plane_mask = ~plane_mask;
+ }
+
+ return adjustments;
+}
+
+/**
+ * @brief Attempt to improve weights given a chosen configuration.
+ *
+ * Given a fixed weight grid decimation and weight value quantization, iterate over all weights (per
+ * partition and per plane) and attempt to improve image quality by moving each weight up by one or
+ * down by one quantization step.
+ *
+ * @param decode_mode The decode mode (LDR, HDR).
+ * @param bsd The block size information.
+ * @param blk The image block color data to compress.
+ * @param[out] scb The symbolic compressed block output.
+ */
+static bool realign_weights_decimated(
+ astcenc_profile decode_mode,
+ const block_size_descriptor& bsd,
+ const image_block& blk,
+ symbolic_compressed_block& scb
+) {
+ // Get the partition descriptor
+ unsigned int partition_count = scb.partition_count;
+ const auto& pi = bsd.get_partition_info(partition_count, scb.partition_index);
+
+ // Get the quantization table
+ const block_mode& bm = bsd.get_block_mode(scb.block_mode);
+ unsigned int weight_quant_level = bm.quant_mode;
+ const quant_and_transfer_table& qat = quant_and_xfer_tables[weight_quant_level];
+
+ // Get the decimation table
+ const decimation_info& di = bsd.get_decimation_info(bm.decimation_mode);
+ unsigned int weight_count = di.weight_count;
+ assert(weight_count != bsd.texel_count);
+
+ unsigned int max_plane = bm.is_dual_plane;
+ int plane2_component = scb.plane2_component;
+ vmask4 plane_mask = vint4::lane_id() == vint4(plane2_component);
+
+ // Decode the color endpoints
+ bool rgb_hdr;
+ bool alpha_hdr;
+ vint4 endpnt0[BLOCK_MAX_PARTITIONS];
+ vint4 endpnt1[BLOCK_MAX_PARTITIONS];
+ vfloat4 endpnt0f[BLOCK_MAX_PARTITIONS];
+ vfloat4 offset[BLOCK_MAX_PARTITIONS];
+
+ promise(partition_count > 0);
+ promise(weight_count > 0);
+
+ for (unsigned int pa_idx = 0; pa_idx < partition_count; pa_idx++)
+ {
+ unpack_color_endpoints(decode_mode,
+ scb.color_formats[pa_idx],
+ scb.color_values[pa_idx],
+ rgb_hdr, alpha_hdr,
+ endpnt0[pa_idx],
+ endpnt1[pa_idx]);
+ }
+
+ uint8_t* dec_weights_uquant = scb.weights;
+ bool adjustments = false;
+
+ // For each plane and partition ...
+ for (unsigned int pl_idx = 0; pl_idx <= max_plane; pl_idx++)
+ {
+ for (unsigned int pa_idx = 0; pa_idx < partition_count; pa_idx++)
+ {
+ // Compute the endpoint delta for all components in current plane
+ vint4 epd = endpnt1[pa_idx] - endpnt0[pa_idx];
+ epd = select(epd, vint4::zero(), plane_mask);
+
+ endpnt0f[pa_idx] = int_to_float(endpnt0[pa_idx]);
+ offset[pa_idx] = int_to_float(epd) * (1.0f / 64.0f);
+ }
+
+ // Create an unquantized weight grid for this decimation level
+ alignas(ASTCENC_VECALIGN) float uq_weightsf[BLOCK_MAX_WEIGHTS];
+ for (unsigned int we_idx = 0; we_idx < weight_count; we_idx += ASTCENC_SIMD_WIDTH)
+ {
+ vint unquant_value(dec_weights_uquant + we_idx);
+ vfloat unquant_valuef = int_to_float(unquant_value);
+ storea(unquant_valuef, uq_weightsf + we_idx);
+ }
+
+ // For each weight compute previous, current, and next errors
+ for (unsigned int we_idx = 0; we_idx < weight_count; we_idx++)
+ {
+ int uqw = dec_weights_uquant[we_idx];
+ uint32_t prev_and_next = qat.prev_next_values[uqw];
+
+ float uqw_base = uq_weightsf[we_idx];
+ float uqw_down = static_cast<float>(prev_and_next & 0xFF);
+ float uqw_up = static_cast<float>((prev_and_next >> 8) & 0xFF);
+
+ float uqw_diff_down = uqw_down - uqw_base;
+ float uqw_diff_up = uqw_up - uqw_base;
+
+ vfloat4 error_basev = vfloat4::zero();
+ vfloat4 error_downv = vfloat4::zero();
+ vfloat4 error_upv = vfloat4::zero();
+
+ // Interpolate the colors to create the diffs
+ unsigned int texels_to_evaluate = di.weight_texel_count[we_idx];
+ promise(texels_to_evaluate > 0);
+ for (unsigned int te_idx = 0; te_idx < texels_to_evaluate; te_idx++)
+ {
+ unsigned int texel = di.weight_texels_tr[te_idx][we_idx];
+
+ float tw_base = di.texel_contrib_for_weight[te_idx][we_idx];
+
+ float weight_base = (uq_weightsf[di.texel_weights_tr[0][texel]] * di.texel_weight_contribs_float_tr[0][texel]
+ + uq_weightsf[di.texel_weights_tr[1][texel]] * di.texel_weight_contribs_float_tr[1][texel])
+ + (uq_weightsf[di.texel_weights_tr[2][texel]] * di.texel_weight_contribs_float_tr[2][texel]
+ + uq_weightsf[di.texel_weights_tr[3][texel]] * di.texel_weight_contribs_float_tr[3][texel]);
+
+ // Ideally this is integer rounded, but IQ gain it isn't worth the overhead
+ // float weight = astc::flt_rd(weight_base + 0.5f);
+ // float weight_down = astc::flt_rd(weight_base + 0.5f + uqw_diff_down * tw_base) - weight;
+ // float weight_up = astc::flt_rd(weight_base + 0.5f + uqw_diff_up * tw_base) - weight;
+ float weight_down = weight_base + uqw_diff_down * tw_base - weight_base;
+ float weight_up = weight_base + uqw_diff_up * tw_base - weight_base;
+
+ unsigned int partition = pi.partition_of_texel[texel];
+ vfloat4 color_offset = offset[partition];
+ vfloat4 color_base = endpnt0f[partition];
+
+ vfloat4 color = color_base + color_offset * weight_base;
+ vfloat4 orig_color = blk.texel(texel);
+
+ vfloat4 color_diff = color - orig_color;
+ vfloat4 color_down_diff = color_diff + color_offset * weight_down;
+ vfloat4 color_up_diff = color_diff + color_offset * weight_up;
+
+ error_basev += color_diff * color_diff;
+ error_downv += color_down_diff * color_down_diff;
+ error_upv += color_up_diff * color_up_diff;
+ }
+
+ vfloat4 error_weight = blk.channel_weight;
+ float error_base = hadd_s(error_basev * error_weight);
+ float error_down = hadd_s(error_downv * error_weight);
+ float error_up = hadd_s(error_upv * error_weight);
+
+ // Check if the prev or next error is better, and if so use it
+ if ((error_up < error_base) && (error_up < error_down) && (uqw < 64))
+ {
+ uq_weightsf[we_idx] = uqw_up;
+ dec_weights_uquant[we_idx] = static_cast<uint8_t>(uqw_up);
+ adjustments = true;
+ }
+ else if ((error_down < error_base) && (uqw > 0))
+ {
+ uq_weightsf[we_idx] = uqw_down;
+ dec_weights_uquant[we_idx] = static_cast<uint8_t>(uqw_down);
+ adjustments = true;
+ }
+ }
+
+ // Prepare iteration for plane 2
+ dec_weights_uquant += WEIGHTS_PLANE2_OFFSET;
+ plane_mask = ~plane_mask;
+ }
+
+ return adjustments;
+}
+
+/**
+ * @brief Compress a block using a chosen partitioning and 1 plane of weights.
+ *
+ * @param config The compressor configuration.
+ * @param bsd The block size information.
+ * @param blk The image block color data to compress.
+ * @param only_always True if we only use "always" percentile block modes.
+ * @param tune_errorval_threshold The error value threshold.
+ * @param partition_count The partition count.
+ * @param partition_index The partition index if @c partition_count is 2-4.
+ * @param[out] scb The symbolic compressed block output.
+ * @param[out] tmpbuf The quantized weights for plane 1.
+ */
+static float compress_symbolic_block_for_partition_1plane(
+ const astcenc_config& config,
+ const block_size_descriptor& bsd,
+ const image_block& blk,
+ bool only_always,
+ float tune_errorval_threshold,
+ unsigned int partition_count,
+ unsigned int partition_index,
+ symbolic_compressed_block& scb,
+ compression_working_buffers& tmpbuf,
+ int quant_limit
+) {
+ promise(partition_count > 0);
+ promise(config.tune_candidate_limit > 0);
+ promise(config.tune_refinement_limit > 0);
+
+ int max_weight_quant = astc::min(static_cast<int>(QUANT_32), quant_limit);
+
+ auto compute_difference = &compute_symbolic_block_difference_1plane;
+ if ((partition_count == 1) && !(config.flags & ASTCENC_FLG_MAP_RGBM))
+ {
+ compute_difference = &compute_symbolic_block_difference_1plane_1partition;
+ }
+
+ const auto& pi = bsd.get_partition_info(partition_count, partition_index);
+
+ // Compute ideal weights and endpoint colors, with no quantization or decimation
+ endpoints_and_weights& ei = tmpbuf.ei1;
+ compute_ideal_colors_and_weights_1plane(blk, pi, ei);
+
+ // Compute ideal weights and endpoint colors for every decimation
+ float* dec_weights_ideal = tmpbuf.dec_weights_ideal;
+ uint8_t* dec_weights_uquant = tmpbuf.dec_weights_uquant;
+
+ // For each decimation mode, compute an ideal set of weights with no quantization
+ unsigned int max_decimation_modes = only_always ? bsd.decimation_mode_count_always
+ : bsd.decimation_mode_count_selected;
+ promise(max_decimation_modes > 0);
+ for (unsigned int i = 0; i < max_decimation_modes; i++)
+ {
+ const auto& dm = bsd.get_decimation_mode(i);
+ if (!dm.is_ref_1_plane(static_cast<quant_method>(max_weight_quant)))
+ {
+ continue;
+ }
+
+ const auto& di = bsd.get_decimation_info(i);
+
+ compute_ideal_weights_for_decimation(
+ ei,
+ di,
+ dec_weights_ideal + i * BLOCK_MAX_WEIGHTS);
+ }
+
+ // Compute maximum colors for the endpoints and ideal weights, then for each endpoint and ideal
+ // weight pair, compute the smallest weight that will result in a color value greater than 1
+ vfloat4 min_ep(10.0f);
+ for (unsigned int i = 0; i < partition_count; i++)
+ {
+ vfloat4 ep = (vfloat4(1.0f) - ei.ep.endpt0[i]) / (ei.ep.endpt1[i] - ei.ep.endpt0[i]);
+
+ vmask4 use_ep = (ep > vfloat4(0.5f)) & (ep < min_ep);
+ min_ep = select(min_ep, ep, use_ep);
+ }
+
+ float min_wt_cutoff = hmin_s(min_ep);
+
+ // For each mode, use the angular method to compute a shift
+ compute_angular_endpoints_1plane(
+ only_always, bsd, dec_weights_ideal, max_weight_quant, tmpbuf);
+
+ float* weight_low_value = tmpbuf.weight_low_value1;
+ float* weight_high_value = tmpbuf.weight_high_value1;
+ int8_t* qwt_bitcounts = tmpbuf.qwt_bitcounts;
+ float* qwt_errors = tmpbuf.qwt_errors;
+
+ // For each mode (which specifies a decimation and a quantization):
+ // * Compute number of bits needed for the quantized weights
+ // * Generate an optimized set of quantized weights
+ // * Compute quantization errors for the mode
+
+
+ static const int8_t free_bits_for_partition_count[4] {
+ 115 - 4, 111 - 4 - PARTITION_INDEX_BITS, 108 - 4 - PARTITION_INDEX_BITS, 105 - 4 - PARTITION_INDEX_BITS
+ };
+
+ unsigned int max_block_modes = only_always ? bsd.block_mode_count_1plane_always
+ : bsd.block_mode_count_1plane_selected;
+ promise(max_block_modes > 0);
+ for (unsigned int i = 0; i < max_block_modes; i++)
+ {
+ const block_mode& bm = bsd.block_modes[i];
+
+ if (bm.quant_mode > max_weight_quant)
+ {
+ qwt_errors[i] = 1e38f;
+ continue;
+ }
+
+ assert(!bm.is_dual_plane);
+ int bitcount = free_bits_for_partition_count[partition_count - 1] - bm.weight_bits;
+ if (bitcount <= 0)
+ {
+ qwt_errors[i] = 1e38f;
+ continue;
+ }
+
+ if (weight_high_value[i] > 1.02f * min_wt_cutoff)
+ {
+ weight_high_value[i] = 1.0f;
+ }
+
+ int decimation_mode = bm.decimation_mode;
+ const auto& di = bsd.get_decimation_info(decimation_mode);
+
+ qwt_bitcounts[i] = static_cast<int8_t>(bitcount);
+
+ alignas(ASTCENC_VECALIGN) float dec_weights_uquantf[BLOCK_MAX_WEIGHTS];
+
+ // Generate the optimized set of weights for the weight mode
+ compute_quantized_weights_for_decimation(
+ di,
+ weight_low_value[i], weight_high_value[i],
+ dec_weights_ideal + BLOCK_MAX_WEIGHTS * decimation_mode,
+ dec_weights_uquantf,
+ dec_weights_uquant + BLOCK_MAX_WEIGHTS * i,
+ bm.get_weight_quant_mode());
+
+ // Compute weight quantization errors for the block mode
+ qwt_errors[i] = compute_error_of_weight_set_1plane(
+ ei,
+ di,
+ dec_weights_uquantf);
+ }
+
+ // Decide the optimal combination of color endpoint encodings and weight encodings
+ uint8_t partition_format_specifiers[TUNE_MAX_TRIAL_CANDIDATES][BLOCK_MAX_PARTITIONS];
+ int block_mode_index[TUNE_MAX_TRIAL_CANDIDATES];
+
+ quant_method color_quant_level[TUNE_MAX_TRIAL_CANDIDATES];
+ quant_method color_quant_level_mod[TUNE_MAX_TRIAL_CANDIDATES];
+
+ unsigned int candidate_count = compute_ideal_endpoint_formats(
+ pi, blk, ei.ep, qwt_bitcounts, qwt_errors,
+ config.tune_candidate_limit, 0, max_block_modes,
+ partition_format_specifiers, block_mode_index,
+ color_quant_level, color_quant_level_mod, tmpbuf);
+
+ // Iterate over the N believed-to-be-best modes to find out which one is actually best
+ float best_errorval_in_mode = ERROR_CALC_DEFAULT;
+ float best_errorval_in_scb = scb.errorval;
+
+ for (unsigned int i = 0; i < candidate_count; i++)
+ {
+ TRACE_NODE(node0, "candidate");
+
+ const int bm_packed_index = block_mode_index[i];
+ assert(bm_packed_index >= 0 && bm_packed_index < static_cast<int>(bsd.block_mode_count_1plane_selected));
+ const block_mode& qw_bm = bsd.block_modes[bm_packed_index];
+
+ int decimation_mode = qw_bm.decimation_mode;
+ const auto& di = bsd.get_decimation_info(decimation_mode);
+ promise(di.weight_count > 0);
+
+ trace_add_data("weight_x", di.weight_x);
+ trace_add_data("weight_y", di.weight_y);
+ trace_add_data("weight_z", di.weight_z);
+ trace_add_data("weight_quant", qw_bm.quant_mode);
+
+ // Recompute the ideal color endpoints before storing them
+ vfloat4 rgbs_colors[BLOCK_MAX_PARTITIONS];
+ vfloat4 rgbo_colors[BLOCK_MAX_PARTITIONS];
+
+ symbolic_compressed_block workscb;
+ endpoints workep = ei.ep;
+
+ uint8_t* u8_weight_src = dec_weights_uquant + BLOCK_MAX_WEIGHTS * bm_packed_index;
+
+ for (unsigned int j = 0; j < di.weight_count; j++)
+ {
+ workscb.weights[j] = u8_weight_src[j];
+ }
+
+ for (unsigned int l = 0; l < config.tune_refinement_limit; l++)
+ {
+ recompute_ideal_colors_1plane(
+ blk, pi, di, workscb.weights,
+ workep, rgbs_colors, rgbo_colors);
+
+ // Quantize the chosen color, tracking if worth trying the mod value
+ bool all_same = color_quant_level[i] != color_quant_level_mod[i];
+ for (unsigned int j = 0; j < partition_count; j++)
+ {
+ workscb.color_formats[j] = pack_color_endpoints(
+ workep.endpt0[j],
+ workep.endpt1[j],
+ rgbs_colors[j],
+ rgbo_colors[j],
+ partition_format_specifiers[i][j],
+ workscb.color_values[j],
+ color_quant_level[i]);
+
+ all_same = all_same && workscb.color_formats[j] == workscb.color_formats[0];
+ }
+
+ // If all the color endpoint modes are the same, we get a few more bits to store colors;
+ // let's see if we can take advantage of this: requantize all the colors and see if the
+ // endpoint modes remain the same.
+ workscb.color_formats_matched = 0;
+ if (partition_count >= 2 && all_same)
+ {
+ uint8_t colorvals[BLOCK_MAX_PARTITIONS][12];
+ uint8_t color_formats_mod[BLOCK_MAX_PARTITIONS] { 0 };
+ bool all_same_mod = true;
+ for (unsigned int j = 0; j < partition_count; j++)
+ {
+ color_formats_mod[j] = pack_color_endpoints(
+ workep.endpt0[j],
+ workep.endpt1[j],
+ rgbs_colors[j],
+ rgbo_colors[j],
+ partition_format_specifiers[i][j],
+ colorvals[j],
+ color_quant_level_mod[i]);
+
+ // Early out as soon as it's no longer possible to use mod
+ if (color_formats_mod[j] != color_formats_mod[0])
+ {
+ all_same_mod = false;
+ break;
+ }
+ }
+
+ if (all_same_mod)
+ {
+ workscb.color_formats_matched = 1;
+ for (unsigned int j = 0; j < BLOCK_MAX_PARTITIONS; j++)
+ {
+ for (unsigned int k = 0; k < 8; k++)
+ {
+ workscb.color_values[j][k] = colorvals[j][k];
+ }
+
+ workscb.color_formats[j] = color_formats_mod[j];
+ }
+ }
+ }
+
+ // Store header fields
+ workscb.partition_count = static_cast<uint8_t>(partition_count);
+ workscb.partition_index = static_cast<uint16_t>(partition_index);
+ workscb.plane2_component = -1;
+ workscb.quant_mode = workscb.color_formats_matched ? color_quant_level_mod[i] : color_quant_level[i];
+ workscb.block_mode = qw_bm.mode_index;
+ workscb.block_type = SYM_BTYPE_NONCONST;
+
+ // Pre-realign test
+ if (l == 0)
+ {
+ float errorval = compute_difference(config, bsd, workscb, blk);
+ if (errorval == -ERROR_CALC_DEFAULT)
+ {
+ errorval = -errorval;
+ workscb.block_type = SYM_BTYPE_ERROR;
+ }
+
+ trace_add_data("error_prerealign", errorval);
+ best_errorval_in_mode = astc::min(errorval, best_errorval_in_mode);
+
+ // Average refinement improvement is 3.5% per iteration (allow 4.5%), but the first
+ // iteration can help more so we give it a extra 8% leeway. Use this knowledge to
+ // drive a heuristic to skip blocks that are unlikely to catch up with the best
+ // block we have already.
+ unsigned int iters_remaining = config.tune_refinement_limit - l;
+ float threshold = (0.045f * static_cast<float>(iters_remaining)) + 1.08f;
+ if (errorval > (threshold * best_errorval_in_scb))
+ {
+ break;
+ }
+
+ if (errorval < best_errorval_in_scb)
+ {
+ best_errorval_in_scb = errorval;
+ workscb.errorval = errorval;
+ scb = workscb;
+
+ if (errorval < tune_errorval_threshold)
+ {
+ // Skip remaining candidates - this is "good enough"
+ i = candidate_count;
+ break;
+ }
+ }
+ }
+
+ bool adjustments;
+ if (di.weight_count != bsd.texel_count)
+ {
+ adjustments = realign_weights_decimated(
+ config.profile, bsd, blk, workscb);
+ }
+ else
+ {
+ adjustments = realign_weights_undecimated(
+ config.profile, bsd, blk, workscb);
+ }
+
+ // Post-realign test
+ float errorval = compute_difference(config, bsd, workscb, blk);
+ if (errorval == -ERROR_CALC_DEFAULT)
+ {
+ errorval = -errorval;
+ workscb.block_type = SYM_BTYPE_ERROR;
+ }
+
+ trace_add_data("error_postrealign", errorval);
+ best_errorval_in_mode = astc::min(errorval, best_errorval_in_mode);
+
+ // Average refinement improvement is 3.5% per iteration, so skip blocks that are
+ // unlikely to catch up with the best block we have already. Assume a 4.5% per step to
+ // give benefit of the doubt ...
+ unsigned int iters_remaining = config.tune_refinement_limit - 1 - l;
+ float threshold = (0.045f * static_cast<float>(iters_remaining)) + 1.0f;
+ if (errorval > (threshold * best_errorval_in_scb))
+ {
+ break;
+ }
+
+ if (errorval < best_errorval_in_scb)
+ {
+ best_errorval_in_scb = errorval;
+ workscb.errorval = errorval;
+ scb = workscb;
+
+ if (errorval < tune_errorval_threshold)
+ {
+ // Skip remaining candidates - this is "good enough"
+ i = candidate_count;
+ break;
+ }
+ }
+
+ if (!adjustments)
+ {
+ break;
+ }
+ }
+ }
+
+ return best_errorval_in_mode;
+}
+
+/**
+ * @brief Compress a block using a chosen partitioning and 2 planes of weights.
+ *
+ * @param config The compressor configuration.
+ * @param bsd The block size information.
+ * @param blk The image block color data to compress.
+ * @param tune_errorval_threshold The error value threshold.
+ * @param plane2_component The component index for the second plane of weights.
+ * @param[out] scb The symbolic compressed block output.
+ * @param[out] tmpbuf The quantized weights for plane 1.
+ */
+static float compress_symbolic_block_for_partition_2planes(
+ const astcenc_config& config,
+ const block_size_descriptor& bsd,
+ const image_block& blk,
+ float tune_errorval_threshold,
+ unsigned int plane2_component,
+ symbolic_compressed_block& scb,
+ compression_working_buffers& tmpbuf,
+ int quant_limit
+) {
+ promise(config.tune_candidate_limit > 0);
+ promise(config.tune_refinement_limit > 0);
+ promise(bsd.decimation_mode_count_selected > 0);
+
+ int max_weight_quant = astc::min(static_cast<int>(QUANT_32), quant_limit);
+
+ // Compute ideal weights and endpoint colors, with no quantization or decimation
+ endpoints_and_weights& ei1 = tmpbuf.ei1;
+ endpoints_and_weights& ei2 = tmpbuf.ei2;
+
+ compute_ideal_colors_and_weights_2planes(bsd, blk, plane2_component, ei1, ei2);
+
+ // Compute ideal weights and endpoint colors for every decimation
+ float* dec_weights_ideal = tmpbuf.dec_weights_ideal;
+ uint8_t* dec_weights_uquant = tmpbuf.dec_weights_uquant;
+
+ // For each decimation mode, compute an ideal set of weights with no quantization
+ for (unsigned int i = 0; i < bsd.decimation_mode_count_selected; i++)
+ {
+ const auto& dm = bsd.get_decimation_mode(i);
+ if (!dm.is_ref_2_plane(static_cast<quant_method>(max_weight_quant)))
+ {
+ continue;
+ }
+
+ const auto& di = bsd.get_decimation_info(i);
+
+ compute_ideal_weights_for_decimation(
+ ei1,
+ di,
+ dec_weights_ideal + i * BLOCK_MAX_WEIGHTS);
+
+ compute_ideal_weights_for_decimation(
+ ei2,
+ di,
+ dec_weights_ideal + i * BLOCK_MAX_WEIGHTS + WEIGHTS_PLANE2_OFFSET);
+ }
+
+ // Compute maximum colors for the endpoints and ideal weights, then for each endpoint and ideal
+ // weight pair, compute the smallest weight that will result in a color value greater than 1
+ vfloat4 min_ep1(10.0f);
+ vfloat4 min_ep2(10.0f);
+
+ vfloat4 ep1 = (vfloat4(1.0f) - ei1.ep.endpt0[0]) / (ei1.ep.endpt1[0] - ei1.ep.endpt0[0]);
+ vmask4 use_ep1 = (ep1 > vfloat4(0.5f)) & (ep1 < min_ep1);
+ min_ep1 = select(min_ep1, ep1, use_ep1);
+
+ vfloat4 ep2 = (vfloat4(1.0f) - ei2.ep.endpt0[0]) / (ei2.ep.endpt1[0] - ei2.ep.endpt0[0]);
+ vmask4 use_ep2 = (ep2 > vfloat4(0.5f)) & (ep2 < min_ep2);
+ min_ep2 = select(min_ep2, ep2, use_ep2);
+
+ vfloat4 err_max(ERROR_CALC_DEFAULT);
+ vmask4 err_mask = vint4::lane_id() == vint4(plane2_component);
+
+ // Set the plane2 component to max error in ep1
+ min_ep1 = select(min_ep1, err_max, err_mask);
+
+ float min_wt_cutoff1 = hmin_s(min_ep1);
+
+ // Set the minwt2 to the plane2 component min in ep2
+ float min_wt_cutoff2 = hmin_s(select(err_max, min_ep2, err_mask));
+
+ compute_angular_endpoints_2planes(
+ bsd, dec_weights_ideal, max_weight_quant, tmpbuf);
+
+ // For each mode (which specifies a decimation and a quantization):
+ // * Compute number of bits needed for the quantized weights
+ // * Generate an optimized set of quantized weights
+ // * Compute quantization errors for the mode
+
+ float* weight_low_value1 = tmpbuf.weight_low_value1;
+ float* weight_high_value1 = tmpbuf.weight_high_value1;
+ float* weight_low_value2 = tmpbuf.weight_low_value2;
+ float* weight_high_value2 = tmpbuf.weight_high_value2;
+
+ int8_t* qwt_bitcounts = tmpbuf.qwt_bitcounts;
+ float* qwt_errors = tmpbuf.qwt_errors;
+
+ unsigned int start_2plane = bsd.block_mode_count_1plane_selected;
+ unsigned int end_2plane = bsd.block_mode_count_1plane_2plane_selected;
+
+ for (unsigned int i = start_2plane; i < end_2plane; i++)
+ {
+ const block_mode& bm = bsd.block_modes[i];
+ assert(bm.is_dual_plane);
+
+ if (bm.quant_mode > max_weight_quant)
+ {
+ qwt_errors[i] = 1e38f;
+ continue;
+ }
+
+ qwt_bitcounts[i] = static_cast<int8_t>(109 - bm.weight_bits);
+
+ if (weight_high_value1[i] > 1.02f * min_wt_cutoff1)
+ {
+ weight_high_value1[i] = 1.0f;
+ }
+
+ if (weight_high_value2[i] > 1.02f * min_wt_cutoff2)
+ {
+ weight_high_value2[i] = 1.0f;
+ }
+
+ unsigned int decimation_mode = bm.decimation_mode;
+ const auto& di = bsd.get_decimation_info(decimation_mode);
+
+ alignas(ASTCENC_VECALIGN) float dec_weights_uquantf[BLOCK_MAX_WEIGHTS];
+
+ // Generate the optimized set of weights for the mode
+ compute_quantized_weights_for_decimation(
+ di,
+ weight_low_value1[i],
+ weight_high_value1[i],
+ dec_weights_ideal + BLOCK_MAX_WEIGHTS * decimation_mode,
+ dec_weights_uquantf,
+ dec_weights_uquant + BLOCK_MAX_WEIGHTS * i,
+ bm.get_weight_quant_mode());
+
+ compute_quantized_weights_for_decimation(
+ di,
+ weight_low_value2[i],
+ weight_high_value2[i],
+ dec_weights_ideal + BLOCK_MAX_WEIGHTS * decimation_mode + WEIGHTS_PLANE2_OFFSET,
+ dec_weights_uquantf + WEIGHTS_PLANE2_OFFSET,
+ dec_weights_uquant + BLOCK_MAX_WEIGHTS * i + WEIGHTS_PLANE2_OFFSET,
+ bm.get_weight_quant_mode());
+
+ // Compute weight quantization errors for the block mode
+ qwt_errors[i] = compute_error_of_weight_set_2planes(
+ ei1,
+ ei2,
+ di,
+ dec_weights_uquantf,
+ dec_weights_uquantf + WEIGHTS_PLANE2_OFFSET);
+ }
+
+ // Decide the optimal combination of color endpoint encodings and weight encodings
+ uint8_t partition_format_specifiers[TUNE_MAX_TRIAL_CANDIDATES][BLOCK_MAX_PARTITIONS];
+ int block_mode_index[TUNE_MAX_TRIAL_CANDIDATES];
+
+ quant_method color_quant_level[TUNE_MAX_TRIAL_CANDIDATES];
+ quant_method color_quant_level_mod[TUNE_MAX_TRIAL_CANDIDATES];
+
+ endpoints epm;
+ merge_endpoints(ei1.ep, ei2.ep, plane2_component, epm);
+
+ const auto& pi = bsd.get_partition_info(1, 0);
+ unsigned int candidate_count = compute_ideal_endpoint_formats(
+ pi, blk, epm, qwt_bitcounts, qwt_errors,
+ config.tune_candidate_limit,
+ bsd.block_mode_count_1plane_selected, bsd.block_mode_count_1plane_2plane_selected,
+ partition_format_specifiers, block_mode_index,
+ color_quant_level, color_quant_level_mod, tmpbuf);
+
+ // Iterate over the N believed-to-be-best modes to find out which one is actually best
+ float best_errorval_in_mode = ERROR_CALC_DEFAULT;
+ float best_errorval_in_scb = scb.errorval;
+
+ for (unsigned int i = 0; i < candidate_count; i++)
+ {
+ TRACE_NODE(node0, "candidate");
+
+ const int bm_packed_index = block_mode_index[i];
+ assert(bm_packed_index >= static_cast<int>(bsd.block_mode_count_1plane_selected) &&
+ bm_packed_index < static_cast<int>(bsd.block_mode_count_1plane_2plane_selected));
+ const block_mode& qw_bm = bsd.block_modes[bm_packed_index];
+
+ int decimation_mode = qw_bm.decimation_mode;
+ const auto& di = bsd.get_decimation_info(decimation_mode);
+ promise(di.weight_count > 0);
+
+ trace_add_data("weight_x", di.weight_x);
+ trace_add_data("weight_y", di.weight_y);
+ trace_add_data("weight_z", di.weight_z);
+ trace_add_data("weight_quant", qw_bm.quant_mode);
+
+ vfloat4 rgbs_color;
+ vfloat4 rgbo_color;
+
+ symbolic_compressed_block workscb;
+ endpoints workep = epm;
+
+ uint8_t* u8_weight1_src = dec_weights_uquant + BLOCK_MAX_WEIGHTS * bm_packed_index;
+ uint8_t* u8_weight2_src = dec_weights_uquant + BLOCK_MAX_WEIGHTS * bm_packed_index + WEIGHTS_PLANE2_OFFSET;
+
+ for (int j = 0; j < di.weight_count; j++)
+ {
+ workscb.weights[j] = u8_weight1_src[j];
+ workscb.weights[j + WEIGHTS_PLANE2_OFFSET] = u8_weight2_src[j];
+ }
+
+ for (unsigned int l = 0; l < config.tune_refinement_limit; l++)
+ {
+ recompute_ideal_colors_2planes(
+ blk, bsd, di,
+ workscb.weights, workscb.weights + WEIGHTS_PLANE2_OFFSET,
+ workep, rgbs_color, rgbo_color, plane2_component);
+
+ // Quantize the chosen color
+ workscb.color_formats[0] = pack_color_endpoints(
+ workep.endpt0[0],
+ workep.endpt1[0],
+ rgbs_color, rgbo_color,
+ partition_format_specifiers[i][0],
+ workscb.color_values[0],
+ color_quant_level[i]);
+
+ // Store header fields
+ workscb.partition_count = 1;
+ workscb.partition_index = 0;
+ workscb.quant_mode = color_quant_level[i];
+ workscb.color_formats_matched = 0;
+ workscb.block_mode = qw_bm.mode_index;
+ workscb.plane2_component = static_cast<int8_t>(plane2_component);
+ workscb.block_type = SYM_BTYPE_NONCONST;
+
+ // Pre-realign test
+ if (l == 0)
+ {
+ float errorval = compute_symbolic_block_difference_2plane(config, bsd, workscb, blk);
+ if (errorval == -ERROR_CALC_DEFAULT)
+ {
+ errorval = -errorval;
+ workscb.block_type = SYM_BTYPE_ERROR;
+ }
+
+ trace_add_data("error_prerealign", errorval);
+ best_errorval_in_mode = astc::min(errorval, best_errorval_in_mode);
+
+ // Average refinement improvement is 3.5% per iteration (allow 4.5%), but the first
+ // iteration can help more so we give it a extra 8% leeway. Use this knowledge to
+ // drive a heuristic to skip blocks that are unlikely to catch up with the best
+ // block we have already.
+ unsigned int iters_remaining = config.tune_refinement_limit - l;
+ float threshold = (0.045f * static_cast<float>(iters_remaining)) + 1.08f;
+ if (errorval > (threshold * best_errorval_in_scb))
+ {
+ break;
+ }
+
+ if (errorval < best_errorval_in_scb)
+ {
+ best_errorval_in_scb = errorval;
+ workscb.errorval = errorval;
+ scb = workscb;
+
+ if (errorval < tune_errorval_threshold)
+ {
+ // Skip remaining candidates - this is "good enough"
+ i = candidate_count;
+ break;
+ }
+ }
+ }
+
+ // Perform a final pass over the weights to try to improve them.
+ bool adjustments;
+ if (di.weight_count != bsd.texel_count)
+ {
+ adjustments = realign_weights_decimated(
+ config.profile, bsd, blk, workscb);
+ }
+ else
+ {
+ adjustments = realign_weights_undecimated(
+ config.profile, bsd, blk, workscb);
+ }
+
+ // Post-realign test
+ float errorval = compute_symbolic_block_difference_2plane(config, bsd, workscb, blk);
+ if (errorval == -ERROR_CALC_DEFAULT)
+ {
+ errorval = -errorval;
+ workscb.block_type = SYM_BTYPE_ERROR;
+ }
+
+ trace_add_data("error_postrealign", errorval);
+ best_errorval_in_mode = astc::min(errorval, best_errorval_in_mode);
+
+ // Average refinement improvement is 3.5% per iteration, so skip blocks that are
+ // unlikely to catch up with the best block we have already. Assume a 4.5% per step to
+ // give benefit of the doubt ...
+ unsigned int iters_remaining = config.tune_refinement_limit - 1 - l;
+ float threshold = (0.045f * static_cast<float>(iters_remaining)) + 1.0f;
+ if (errorval > (threshold * best_errorval_in_scb))
+ {
+ break;
+ }
+
+ if (errorval < best_errorval_in_scb)
+ {
+ best_errorval_in_scb = errorval;
+ workscb.errorval = errorval;
+ scb = workscb;
+
+ if (errorval < tune_errorval_threshold)
+ {
+ // Skip remaining candidates - this is "good enough"
+ i = candidate_count;
+ break;
+ }
+ }
+
+ if (!adjustments)
+ {
+ break;
+ }
+ }
+ }
+
+ return best_errorval_in_mode;
+}
+
+/**
+ * @brief Determine the lowest cross-channel correlation factor.
+ *
+ * @param texels_per_block The number of texels in a block.
+ * @param blk The image block color data to compress.
+ *
+ * @return Return the lowest correlation factor.
+ */
+static float prepare_block_statistics(
+ int texels_per_block,
+ const image_block& blk
+) {
+ // Compute covariance matrix, as a collection of 10 scalars that form the upper-triangular row
+ // of the matrix. The matrix is symmetric, so this is all we need for this use case.
+ float rs = 0.0f;
+ float gs = 0.0f;
+ float bs = 0.0f;
+ float as = 0.0f;
+ float rr_var = 0.0f;
+ float gg_var = 0.0f;
+ float bb_var = 0.0f;
+ float aa_var = 0.0f;
+ float rg_cov = 0.0f;
+ float rb_cov = 0.0f;
+ float ra_cov = 0.0f;
+ float gb_cov = 0.0f;
+ float ga_cov = 0.0f;
+ float ba_cov = 0.0f;
+
+ float weight_sum = 0.0f;
+
+ promise(texels_per_block > 0);
+ for (int i = 0; i < texels_per_block; i++)
+ {
+ float weight = hadd_s(blk.channel_weight) / 4.0f;
+ assert(weight >= 0.0f);
+ weight_sum += weight;
+
+ float r = blk.data_r[i];
+ float g = blk.data_g[i];
+ float b = blk.data_b[i];
+ float a = blk.data_a[i];
+
+ float rw = r * weight;
+ rs += rw;
+ rr_var += r * rw;
+ rg_cov += g * rw;
+ rb_cov += b * rw;
+ ra_cov += a * rw;
+
+ float gw = g * weight;
+ gs += gw;
+ gg_var += g * gw;
+ gb_cov += b * gw;
+ ga_cov += a * gw;
+
+ float bw = b * weight;
+ bs += bw;
+ bb_var += b * bw;
+ ba_cov += a * bw;
+
+ float aw = a * weight;
+ as += aw;
+ aa_var += a * aw;
+ }
+
+ float rpt = 1.0f / astc::max(weight_sum, 1e-7f);
+
+ rr_var -= rs * (rs * rpt);
+ rg_cov -= gs * (rs * rpt);
+ rb_cov -= bs * (rs * rpt);
+ ra_cov -= as * (rs * rpt);
+
+ gg_var -= gs * (gs * rpt);
+ gb_cov -= bs * (gs * rpt);
+ ga_cov -= as * (gs * rpt);
+
+ bb_var -= bs * (bs * rpt);
+ ba_cov -= as * (bs * rpt);
+
+ aa_var -= as * (as * rpt);
+
+ // These will give a NaN if a channel is constant - these are fixed up in the next step
+ rg_cov *= astc::rsqrt(rr_var * gg_var);
+ rb_cov *= astc::rsqrt(rr_var * bb_var);
+ ra_cov *= astc::rsqrt(rr_var * aa_var);
+ gb_cov *= astc::rsqrt(gg_var * bb_var);
+ ga_cov *= astc::rsqrt(gg_var * aa_var);
+ ba_cov *= astc::rsqrt(bb_var * aa_var);
+
+ if (astc::isnan(rg_cov)) rg_cov = 1.0f;
+ if (astc::isnan(rb_cov)) rb_cov = 1.0f;
+ if (astc::isnan(ra_cov)) ra_cov = 1.0f;
+ if (astc::isnan(gb_cov)) gb_cov = 1.0f;
+ if (astc::isnan(ga_cov)) ga_cov = 1.0f;
+ if (astc::isnan(ba_cov)) ba_cov = 1.0f;
+
+ float lowest_correlation = astc::min(fabsf(rg_cov), fabsf(rb_cov));
+ lowest_correlation = astc::min(lowest_correlation, fabsf(ra_cov));
+ lowest_correlation = astc::min(lowest_correlation, fabsf(gb_cov));
+ lowest_correlation = astc::min(lowest_correlation, fabsf(ga_cov));
+ lowest_correlation = astc::min(lowest_correlation, fabsf(ba_cov));
+
+ // Diagnostic trace points
+ trace_add_data("min_r", blk.data_min.lane<0>());
+ trace_add_data("max_r", blk.data_max.lane<0>());
+ trace_add_data("min_g", blk.data_min.lane<1>());
+ trace_add_data("max_g", blk.data_max.lane<1>());
+ trace_add_data("min_b", blk.data_min.lane<2>());
+ trace_add_data("max_b", blk.data_max.lane<2>());
+ trace_add_data("min_a", blk.data_min.lane<3>());
+ trace_add_data("max_a", blk.data_max.lane<3>());
+ trace_add_data("cov_rg", fabsf(rg_cov));
+ trace_add_data("cov_rb", fabsf(rb_cov));
+ trace_add_data("cov_ra", fabsf(ra_cov));
+ trace_add_data("cov_gb", fabsf(gb_cov));
+ trace_add_data("cov_ga", fabsf(ga_cov));
+ trace_add_data("cov_ba", fabsf(ba_cov));
+
+ return lowest_correlation;
+}
+
+/* See header for documentation. */
+void compress_block(
+ const astcenc_contexti& ctx,
+ const image_block& blk,
+ physical_compressed_block& pcb,
+ compression_working_buffers& tmpbuf)
+{
+ astcenc_profile decode_mode = ctx.config.profile;
+ symbolic_compressed_block scb;
+ const block_size_descriptor& bsd = *ctx.bsd;
+ float lowest_correl;
+
+ TRACE_NODE(node0, "block");
+ trace_add_data("pos_x", blk.xpos);
+ trace_add_data("pos_y", blk.ypos);
+ trace_add_data("pos_z", blk.zpos);
+
+ // Set stricter block targets for luminance data as we have more bits to play with
+ bool block_is_l = blk.is_luminance();
+ float block_is_l_scale = block_is_l ? 1.0f / 1.5f : 1.0f;
+
+ // Set slightly stricter block targets for lumalpha data as we have more bits to play with
+ bool block_is_la = blk.is_luminancealpha();
+ float block_is_la_scale = block_is_la ? 1.0f / 1.05f : 1.0f;
+
+ bool block_skip_two_plane = false;
+ int max_partitions = ctx.config.tune_partition_count_limit;
+
+ unsigned int requested_partition_indices[3] {
+ ctx.config.tune_2partition_index_limit,
+ ctx.config.tune_3partition_index_limit,
+ ctx.config.tune_4partition_index_limit
+ };
+
+ unsigned int requested_partition_trials[3] {
+ ctx.config.tune_2partitioning_candidate_limit,
+ ctx.config.tune_3partitioning_candidate_limit,
+ ctx.config.tune_4partitioning_candidate_limit
+ };
+
+#if defined(ASTCENC_DIAGNOSTICS)
+ // Do this early in diagnostic builds so we can dump uniform metrics
+ // for every block. Do it later in release builds to avoid redundant work!
+ float error_weight_sum = hadd_s(blk.channel_weight) * bsd.texel_count;
+ float error_threshold = ctx.config.tune_db_limit
+ * error_weight_sum
+ * block_is_l_scale
+ * block_is_la_scale;
+
+ lowest_correl = prepare_block_statistics(bsd.texel_count, blk);
+ trace_add_data("lowest_correl", lowest_correl);
+ trace_add_data("tune_error_threshold", error_threshold);
+#endif
+
+ // Detected a constant-color block
+ if (all(blk.data_min == blk.data_max))
+ {
+ TRACE_NODE(node1, "pass");
+ trace_add_data("partition_count", 0);
+ trace_add_data("plane_count", 1);
+
+ scb.partition_count = 0;
+
+ // Encode as FP16 if using HDR
+ if ((decode_mode == ASTCENC_PRF_HDR) ||
+ (decode_mode == ASTCENC_PRF_HDR_RGB_LDR_A))
+ {
+ scb.block_type = SYM_BTYPE_CONST_F16;
+ vint4 color_f16 = float_to_float16(blk.origin_texel);
+ store(color_f16, scb.constant_color);
+ }
+ // Encode as UNORM16 if NOT using HDR
+ else
+ {
+ scb.block_type = SYM_BTYPE_CONST_U16;
+ vfloat4 color_f32 = clamp(0.0f, 1.0f, blk.origin_texel) * 65535.0f;
+ vint4 color_u16 = float_to_int_rtn(color_f32);
+ store(color_u16, scb.constant_color);
+ }
+
+ trace_add_data("exit", "quality hit");
+
+ symbolic_to_physical(bsd, scb, pcb);
+ return;
+ }
+
+#if !defined(ASTCENC_DIAGNOSTICS)
+ float error_weight_sum = hadd_s(blk.channel_weight) * bsd.texel_count;
+ float error_threshold = ctx.config.tune_db_limit
+ * error_weight_sum
+ * block_is_l_scale
+ * block_is_la_scale;
+#endif
+
+ // Set SCB and mode errors to a very high error value
+ scb.errorval = ERROR_CALC_DEFAULT;
+ scb.block_type = SYM_BTYPE_ERROR;
+
+ float best_errorvals_for_pcount[BLOCK_MAX_PARTITIONS] {
+ ERROR_CALC_DEFAULT, ERROR_CALC_DEFAULT, ERROR_CALC_DEFAULT, ERROR_CALC_DEFAULT
+ };
+
+ float exit_thresholds_for_pcount[BLOCK_MAX_PARTITIONS] {
+ 0.0f,
+ ctx.config.tune_2_partition_early_out_limit_factor,
+ ctx.config.tune_3_partition_early_out_limit_factor,
+ 0.0f
+ };
+
+ // Trial using 1 plane of weights and 1 partition.
+
+ // Most of the time we test it twice, first with a mode cutoff of 0 and then with the specified
+ // mode cutoff. This causes an early-out that speeds up encoding of easy blocks. However, this
+ // optimization is disabled for 4x4 and 5x4 blocks where it nearly always slows down the
+ // compression and slightly reduces image quality.
+
+ float errorval_mult[2] {
+ 1.0f / ctx.config.tune_mse_overshoot,
+ 1.0f
+ };
+
+ static const float errorval_overshoot = 1.0f / ctx.config.tune_mse_overshoot;
+
+ // Only enable MODE0 fast path (trial 0) if 2D, and more than 25 texels
+ int start_trial = 1;
+ if ((bsd.texel_count >= TUNE_MIN_TEXELS_MODE0_FASTPATH) && (bsd.zdim == 1))
+ {
+ start_trial = 0;
+ }
+
+ int quant_limit = QUANT_32;
+ for (int i = start_trial; i < 2; i++)
+ {
+ TRACE_NODE(node1, "pass");
+ trace_add_data("partition_count", 1);
+ trace_add_data("plane_count", 1);
+ trace_add_data("search_mode", i);
+
+ float errorval = compress_symbolic_block_for_partition_1plane(
+ ctx.config, bsd, blk, i == 0,
+ error_threshold * errorval_mult[i] * errorval_overshoot,
+ 1, 0, scb, tmpbuf, QUANT_32);
+
+ // Record the quant level so we can use the filter later searches
+ const auto& bm = bsd.get_block_mode(scb.block_mode);
+ quant_limit = bm.get_weight_quant_mode();
+
+ best_errorvals_for_pcount[0] = astc::min(best_errorvals_for_pcount[0], errorval);
+ if (errorval < (error_threshold * errorval_mult[i]))
+ {
+ trace_add_data("exit", "quality hit");
+ goto END_OF_TESTS;
+ }
+ }
+
+#if !defined(ASTCENC_DIAGNOSTICS)
+ lowest_correl = prepare_block_statistics(bsd.texel_count, blk);
+#endif
+
+ block_skip_two_plane = lowest_correl > ctx.config.tune_2_plane_early_out_limit_correlation;
+
+ // Test the four possible 1-partition, 2-planes modes. Do this in reverse, as
+ // alpha is the most likely to be non-correlated if it is present in the data.
+ for (int i = BLOCK_MAX_COMPONENTS - 1; i >= 0; i--)
+ {
+ TRACE_NODE(node1, "pass");
+ trace_add_data("partition_count", 1);
+ trace_add_data("plane_count", 2);
+ trace_add_data("plane_component", i);
+
+ if (block_skip_two_plane)
+ {
+ trace_add_data("skip", "tune_2_plane_early_out_limit_correlation");
+ continue;
+ }
+
+ if (blk.grayscale && i != 3)
+ {
+ trace_add_data("skip", "grayscale block");
+ continue;
+ }
+
+ if (blk.is_constant_channel(i))
+ {
+ trace_add_data("skip", "constant component");
+ continue;
+ }
+
+ float errorval = compress_symbolic_block_for_partition_2planes(
+ ctx.config, bsd, blk, error_threshold * errorval_overshoot,
+ i, scb, tmpbuf, quant_limit);
+
+ // If attempting two planes is much worse than the best one plane result
+ // then further two plane searches are unlikely to help so move on ...
+ if (errorval > (best_errorvals_for_pcount[0] * 1.85f))
+ {
+ break;
+ }
+
+ if (errorval < error_threshold)
+ {
+ trace_add_data("exit", "quality hit");
+ goto END_OF_TESTS;
+ }
+ }
+
+ // Find best blocks for 2, 3 and 4 partitions
+ for (int partition_count = 2; partition_count <= max_partitions; partition_count++)
+ {
+ unsigned int partition_indices[TUNE_MAX_PARTITIONING_CANDIDATES];
+
+ unsigned int requested_indices = requested_partition_indices[partition_count - 2];
+
+ unsigned int requested_trials = requested_partition_trials[partition_count - 2];
+ requested_trials = astc::min(requested_trials, requested_indices);
+
+ unsigned int actual_trials = find_best_partition_candidates(
+ bsd, blk, partition_count, requested_indices, partition_indices, requested_trials);
+
+ float best_error_in_prev = best_errorvals_for_pcount[partition_count - 2];
+
+ for (unsigned int i = 0; i < actual_trials; i++)
+ {
+ TRACE_NODE(node1, "pass");
+ trace_add_data("partition_count", partition_count);
+ trace_add_data("partition_index", partition_indices[i]);
+ trace_add_data("plane_count", 1);
+ trace_add_data("search_mode", i);
+
+ float errorval = compress_symbolic_block_for_partition_1plane(
+ ctx.config, bsd, blk, false,
+ error_threshold * errorval_overshoot,
+ partition_count, partition_indices[i],
+ scb, tmpbuf, quant_limit);
+
+ best_errorvals_for_pcount[partition_count - 1] = astc::min(best_errorvals_for_pcount[partition_count - 1], errorval);
+
+ // If using N partitions doesn't improve much over using N-1 partitions then skip trying
+ // N+1. Error can dramatically improve if the data is correlated or non-correlated and
+ // aligns with a partitioning that suits that encoding, so for this inner loop check add
+ // a large error scale because the "other" trial could be a lot better.
+ float best_error = best_errorvals_for_pcount[partition_count - 1];
+ float best_error_scale = exit_thresholds_for_pcount[partition_count - 1] * 1.85f;
+ if (best_error > (best_error_in_prev * best_error_scale))
+ {
+ trace_add_data("skip", "tune_partition_early_out_limit_factor");
+ goto END_OF_TESTS;
+ }
+
+ if (errorval < error_threshold)
+ {
+ trace_add_data("exit", "quality hit");
+ goto END_OF_TESTS;
+ }
+ }
+
+ // If using N partitions doesn't improve much over using N-1 partitions then skip trying N+1
+ float best_error = best_errorvals_for_pcount[partition_count - 1];
+ float best_error_scale = exit_thresholds_for_pcount[partition_count - 1];
+ if (best_error > (best_error_in_prev * best_error_scale))
+ {
+ trace_add_data("skip", "tune_partition_early_out_limit_factor");
+ goto END_OF_TESTS;
+ }
+ }
+
+ trace_add_data("exit", "quality not hit");
+
+END_OF_TESTS:
+ // If we still have an error block then convert to something we can encode
+ // TODO: Do something more sensible here, such as average color block
+ if (scb.block_type == SYM_BTYPE_ERROR)
+ {
+#if defined(ASTCENC_DIAGNOSTICS)
+ static bool printed_once = false;
+ if (!printed_once)
+ {
+ printed_once = true;
+ printf("WARN: At least one block failed to find a valid encoding.\n"
+ " Try increasing compression quality settings.\n\n");
+ }
+#endif
+
+ scb.block_type = SYM_BTYPE_CONST_U16;
+ vfloat4 color_f32 = clamp(0.0f, 1.0f, blk.origin_texel) * 65535.0f;
+ vint4 color_u16 = float_to_int_rtn(color_f32);
+ store(color_u16, scb.constant_color);
+ }
+
+ // Compress to a physical block
+ symbolic_to_physical(bsd, scb, pcb);
+}
+
+#endif
diff --git a/thirdparty/astcenc/astcenc_compute_variance.cpp b/thirdparty/astcenc/astcenc_compute_variance.cpp
new file mode 100644
index 0000000000..48a4af8cef
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_compute_variance.cpp
@@ -0,0 +1,472 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2022 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+#if !defined(ASTCENC_DECOMPRESS_ONLY)
+
+/**
+ * @brief Functions to calculate variance per component in a NxN footprint.
+ *
+ * We need N to be parametric, so the routine below uses summed area tables in order to execute in
+ * O(1) time independent of how big N is.
+ *
+ * The addition uses a Brent-Kung-based parallel prefix adder. This uses the prefix tree to first
+ * perform a binary reduction, and then distributes the results. This method means that there is no
+ * serial dependency between a given element and the next one, and also significantly improves
+ * numerical stability allowing us to use floats rather than doubles.
+ */
+
+#include "astcenc_internal.h"
+
+#include <cassert>
+
+/**
+ * @brief Generate a prefix-sum array using the Brent-Kung algorithm.
+ *
+ * This will take an input array of the form:
+ * v0, v1, v2, ...
+ * ... and modify in-place to turn it into a prefix-sum array of the form:
+ * v0, v0+v1, v0+v1+v2, ...
+ *
+ * @param d The array to prefix-sum.
+ * @param items The number of items in the array.
+ * @param stride The item spacing in the array; i.e. dense arrays should use 1.
+ */
+static void brent_kung_prefix_sum(
+ vfloat4* d,
+ size_t items,
+ int stride
+) {
+ if (items < 2)
+ return;
+
+ size_t lc_stride = 2;
+ size_t log2_stride = 1;
+
+ // The reduction-tree loop
+ do {
+ size_t step = lc_stride >> 1;
+ size_t start = lc_stride - 1;
+ size_t iters = items >> log2_stride;
+
+ vfloat4 *da = d + (start * stride);
+ ptrdiff_t ofs = -static_cast<ptrdiff_t>(step * stride);
+ size_t ofs_stride = stride << log2_stride;
+
+ while (iters)
+ {
+ *da = *da + da[ofs];
+ da += ofs_stride;
+ iters--;
+ }
+
+ log2_stride += 1;
+ lc_stride <<= 1;
+ } while (lc_stride <= items);
+
+ // The expansion-tree loop
+ do {
+ log2_stride -= 1;
+ lc_stride >>= 1;
+
+ size_t step = lc_stride >> 1;
+ size_t start = step + lc_stride - 1;
+ size_t iters = (items - step) >> log2_stride;
+
+ vfloat4 *da = d + (start * stride);
+ ptrdiff_t ofs = -static_cast<ptrdiff_t>(step * stride);
+ size_t ofs_stride = stride << log2_stride;
+
+ while (iters)
+ {
+ *da = *da + da[ofs];
+ da += ofs_stride;
+ iters--;
+ }
+ } while (lc_stride > 2);
+}
+
+/* See header for documentation. */
+void compute_pixel_region_variance(
+ astcenc_contexti& ctx,
+ const pixel_region_args& arg
+) {
+ // Unpack the memory structure into local variables
+ const astcenc_image* img = arg.img;
+ astcenc_swizzle swz = arg.swz;
+ bool have_z = arg.have_z;
+
+ int size_x = arg.size_x;
+ int size_y = arg.size_y;
+ int size_z = arg.size_z;
+
+ int offset_x = arg.offset_x;
+ int offset_y = arg.offset_y;
+ int offset_z = arg.offset_z;
+
+ int alpha_kernel_radius = arg.alpha_kernel_radius;
+
+ float* input_alpha_averages = ctx.input_alpha_averages;
+ vfloat4* work_memory = arg.work_memory;
+
+ // Compute memory sizes and dimensions that we need
+ int kernel_radius = alpha_kernel_radius;
+ int kerneldim = 2 * kernel_radius + 1;
+ int kernel_radius_xy = kernel_radius;
+ int kernel_radius_z = have_z ? kernel_radius : 0;
+
+ int padsize_x = size_x + kerneldim;
+ int padsize_y = size_y + kerneldim;
+ int padsize_z = size_z + (have_z ? kerneldim : 0);
+ int sizeprod = padsize_x * padsize_y * padsize_z;
+
+ int zd_start = have_z ? 1 : 0;
+
+ vfloat4 *varbuf1 = work_memory;
+ vfloat4 *varbuf2 = work_memory + sizeprod;
+
+ // Scaling factors to apply to Y and Z for accesses into the work buffers
+ int yst = padsize_x;
+ int zst = padsize_x * padsize_y;
+
+ // Scaling factors to apply to Y and Z for accesses into result buffers
+ int ydt = img->dim_x;
+ int zdt = img->dim_x * img->dim_y;
+
+ // Macros to act as accessor functions for the work-memory
+ #define VARBUF1(z, y, x) varbuf1[z * zst + y * yst + x]
+ #define VARBUF2(z, y, x) varbuf2[z * zst + y * yst + x]
+
+ // Load N and N^2 values into the work buffers
+ if (img->data_type == ASTCENC_TYPE_U8)
+ {
+ // Swizzle data structure 4 = ZERO, 5 = ONE
+ uint8_t data[6];
+ data[ASTCENC_SWZ_0] = 0;
+ data[ASTCENC_SWZ_1] = 255;
+
+ for (int z = zd_start; z < padsize_z; z++)
+ {
+ int z_src = (z - zd_start) + offset_z - kernel_radius_z;
+ z_src = astc::clamp(z_src, 0, static_cast<int>(img->dim_z - 1));
+ uint8_t* data8 = static_cast<uint8_t*>(img->data[z_src]);
+
+ for (int y = 1; y < padsize_y; y++)
+ {
+ int y_src = (y - 1) + offset_y - kernel_radius_xy;
+ y_src = astc::clamp(y_src, 0, static_cast<int>(img->dim_y - 1));
+
+ for (int x = 1; x < padsize_x; x++)
+ {
+ int x_src = (x - 1) + offset_x - kernel_radius_xy;
+ x_src = astc::clamp(x_src, 0, static_cast<int>(img->dim_x - 1));
+
+ data[0] = data8[(4 * img->dim_x * y_src) + (4 * x_src )];
+ data[1] = data8[(4 * img->dim_x * y_src) + (4 * x_src + 1)];
+ data[2] = data8[(4 * img->dim_x * y_src) + (4 * x_src + 2)];
+ data[3] = data8[(4 * img->dim_x * y_src) + (4 * x_src + 3)];
+
+ uint8_t r = data[swz.r];
+ uint8_t g = data[swz.g];
+ uint8_t b = data[swz.b];
+ uint8_t a = data[swz.a];
+
+ vfloat4 d = vfloat4 (r * (1.0f / 255.0f),
+ g * (1.0f / 255.0f),
+ b * (1.0f / 255.0f),
+ a * (1.0f / 255.0f));
+
+ VARBUF1(z, y, x) = d;
+ VARBUF2(z, y, x) = d * d;
+ }
+ }
+ }
+ }
+ else if (img->data_type == ASTCENC_TYPE_F16)
+ {
+ // Swizzle data structure 4 = ZERO, 5 = ONE (in FP16)
+ uint16_t data[6];
+ data[ASTCENC_SWZ_0] = 0;
+ data[ASTCENC_SWZ_1] = 0x3C00;
+
+ for (int z = zd_start; z < padsize_z; z++)
+ {
+ int z_src = (z - zd_start) + offset_z - kernel_radius_z;
+ z_src = astc::clamp(z_src, 0, static_cast<int>(img->dim_z - 1));
+ uint16_t* data16 = static_cast<uint16_t*>(img->data[z_src]);
+
+ for (int y = 1; y < padsize_y; y++)
+ {
+ int y_src = (y - 1) + offset_y - kernel_radius_xy;
+ y_src = astc::clamp(y_src, 0, static_cast<int>(img->dim_y - 1));
+
+ for (int x = 1; x < padsize_x; x++)
+ {
+ int x_src = (x - 1) + offset_x - kernel_radius_xy;
+ x_src = astc::clamp(x_src, 0, static_cast<int>(img->dim_x - 1));
+
+ data[0] = data16[(4 * img->dim_x * y_src) + (4 * x_src )];
+ data[1] = data16[(4 * img->dim_x * y_src) + (4 * x_src + 1)];
+ data[2] = data16[(4 * img->dim_x * y_src) + (4 * x_src + 2)];
+ data[3] = data16[(4 * img->dim_x * y_src) + (4 * x_src + 3)];
+
+ vint4 di(data[swz.r], data[swz.g], data[swz.b], data[swz.a]);
+ vfloat4 d = float16_to_float(di);
+
+ VARBUF1(z, y, x) = d;
+ VARBUF2(z, y, x) = d * d;
+ }
+ }
+ }
+ }
+ else // if (img->data_type == ASTCENC_TYPE_F32)
+ {
+ assert(img->data_type == ASTCENC_TYPE_F32);
+
+ // Swizzle data structure 4 = ZERO, 5 = ONE (in FP16)
+ float data[6];
+ data[ASTCENC_SWZ_0] = 0.0f;
+ data[ASTCENC_SWZ_1] = 1.0f;
+
+ for (int z = zd_start; z < padsize_z; z++)
+ {
+ int z_src = (z - zd_start) + offset_z - kernel_radius_z;
+ z_src = astc::clamp(z_src, 0, static_cast<int>(img->dim_z - 1));
+ float* data32 = static_cast<float*>(img->data[z_src]);
+
+ for (int y = 1; y < padsize_y; y++)
+ {
+ int y_src = (y - 1) + offset_y - kernel_radius_xy;
+ y_src = astc::clamp(y_src, 0, static_cast<int>(img->dim_y - 1));
+
+ for (int x = 1; x < padsize_x; x++)
+ {
+ int x_src = (x - 1) + offset_x - kernel_radius_xy;
+ x_src = astc::clamp(x_src, 0, static_cast<int>(img->dim_x - 1));
+
+ data[0] = data32[(4 * img->dim_x * y_src) + (4 * x_src )];
+ data[1] = data32[(4 * img->dim_x * y_src) + (4 * x_src + 1)];
+ data[2] = data32[(4 * img->dim_x * y_src) + (4 * x_src + 2)];
+ data[3] = data32[(4 * img->dim_x * y_src) + (4 * x_src + 3)];
+
+ float r = data[swz.r];
+ float g = data[swz.g];
+ float b = data[swz.b];
+ float a = data[swz.a];
+
+ vfloat4 d(r, g, b, a);
+
+ VARBUF1(z, y, x) = d;
+ VARBUF2(z, y, x) = d * d;
+ }
+ }
+ }
+ }
+
+ // Pad with an extra layer of 0s; this forms the edge of the SAT tables
+ vfloat4 vbz = vfloat4::zero();
+ for (int z = 0; z < padsize_z; z++)
+ {
+ for (int y = 0; y < padsize_y; y++)
+ {
+ VARBUF1(z, y, 0) = vbz;
+ VARBUF2(z, y, 0) = vbz;
+ }
+
+ for (int x = 0; x < padsize_x; x++)
+ {
+ VARBUF1(z, 0, x) = vbz;
+ VARBUF2(z, 0, x) = vbz;
+ }
+ }
+
+ if (have_z)
+ {
+ for (int y = 0; y < padsize_y; y++)
+ {
+ for (int x = 0; x < padsize_x; x++)
+ {
+ VARBUF1(0, y, x) = vbz;
+ VARBUF2(0, y, x) = vbz;
+ }
+ }
+ }
+
+ // Generate summed-area tables for N and N^2; this is done in-place, using
+ // a Brent-Kung parallel-prefix based algorithm to minimize precision loss
+ for (int z = zd_start; z < padsize_z; z++)
+ {
+ for (int y = 1; y < padsize_y; y++)
+ {
+ brent_kung_prefix_sum(&(VARBUF1(z, y, 1)), padsize_x - 1, 1);
+ brent_kung_prefix_sum(&(VARBUF2(z, y, 1)), padsize_x - 1, 1);
+ }
+ }
+
+ for (int z = zd_start; z < padsize_z; z++)
+ {
+ for (int x = 1; x < padsize_x; x++)
+ {
+ brent_kung_prefix_sum(&(VARBUF1(z, 1, x)), padsize_y - 1, yst);
+ brent_kung_prefix_sum(&(VARBUF2(z, 1, x)), padsize_y - 1, yst);
+ }
+ }
+
+ if (have_z)
+ {
+ for (int y = 1; y < padsize_y; y++)
+ {
+ for (int x = 1; x < padsize_x; x++)
+ {
+ brent_kung_prefix_sum(&(VARBUF1(1, y, x)), padsize_z - 1, zst);
+ brent_kung_prefix_sum(&(VARBUF2(1, y, x)), padsize_z - 1, zst);
+ }
+ }
+ }
+
+ // Compute a few constants used in the variance-calculation.
+ float alpha_kdim = static_cast<float>(2 * alpha_kernel_radius + 1);
+ float alpha_rsamples;
+
+ if (have_z)
+ {
+ alpha_rsamples = 1.0f / (alpha_kdim * alpha_kdim * alpha_kdim);
+ }
+ else
+ {
+ alpha_rsamples = 1.0f / (alpha_kdim * alpha_kdim);
+ }
+
+ // Use the summed-area tables to compute variance for each neighborhood
+ if (have_z)
+ {
+ for (int z = 0; z < size_z; z++)
+ {
+ int z_src = z + kernel_radius_z;
+ int z_dst = z + offset_z;
+ int z_low = z_src - alpha_kernel_radius;
+ int z_high = z_src + alpha_kernel_radius + 1;
+
+ for (int y = 0; y < size_y; y++)
+ {
+ int y_src = y + kernel_radius_xy;
+ int y_dst = y + offset_y;
+ int y_low = y_src - alpha_kernel_radius;
+ int y_high = y_src + alpha_kernel_radius + 1;
+
+ for (int x = 0; x < size_x; x++)
+ {
+ int x_src = x + kernel_radius_xy;
+ int x_dst = x + offset_x;
+ int x_low = x_src - alpha_kernel_radius;
+ int x_high = x_src + alpha_kernel_radius + 1;
+
+ // Summed-area table lookups for alpha average
+ float vasum = ( VARBUF1(z_high, y_low, x_low).lane<3>()
+ - VARBUF1(z_high, y_low, x_high).lane<3>()
+ - VARBUF1(z_high, y_high, x_low).lane<3>()
+ + VARBUF1(z_high, y_high, x_high).lane<3>()) -
+ ( VARBUF1(z_low, y_low, x_low).lane<3>()
+ - VARBUF1(z_low, y_low, x_high).lane<3>()
+ - VARBUF1(z_low, y_high, x_low).lane<3>()
+ + VARBUF1(z_low, y_high, x_high).lane<3>());
+
+ int out_index = z_dst * zdt + y_dst * ydt + x_dst;
+ input_alpha_averages[out_index] = (vasum * alpha_rsamples);
+ }
+ }
+ }
+ }
+ else
+ {
+ for (int y = 0; y < size_y; y++)
+ {
+ int y_src = y + kernel_radius_xy;
+ int y_dst = y + offset_y;
+ int y_low = y_src - alpha_kernel_radius;
+ int y_high = y_src + alpha_kernel_radius + 1;
+
+ for (int x = 0; x < size_x; x++)
+ {
+ int x_src = x + kernel_radius_xy;
+ int x_dst = x + offset_x;
+ int x_low = x_src - alpha_kernel_radius;
+ int x_high = x_src + alpha_kernel_radius + 1;
+
+ // Summed-area table lookups for alpha average
+ float vasum = VARBUF1(0, y_low, x_low).lane<3>()
+ - VARBUF1(0, y_low, x_high).lane<3>()
+ - VARBUF1(0, y_high, x_low).lane<3>()
+ + VARBUF1(0, y_high, x_high).lane<3>();
+
+ int out_index = y_dst * ydt + x_dst;
+ input_alpha_averages[out_index] = (vasum * alpha_rsamples);
+ }
+ }
+ }
+}
+
+/* See header for documentation. */
+unsigned int init_compute_averages(
+ const astcenc_image& img,
+ unsigned int alpha_kernel_radius,
+ const astcenc_swizzle& swz,
+ avg_args& ag
+) {
+ unsigned int size_x = img.dim_x;
+ unsigned int size_y = img.dim_y;
+ unsigned int size_z = img.dim_z;
+
+ // Compute maximum block size and from that the working memory buffer size
+ unsigned int kernel_radius = alpha_kernel_radius;
+ unsigned int kerneldim = 2 * kernel_radius + 1;
+
+ bool have_z = (size_z > 1);
+ unsigned int max_blk_size_xy = have_z ? 16 : 32;
+ unsigned int max_blk_size_z = astc::min(size_z, have_z ? 16u : 1u);
+
+ unsigned int max_padsize_xy = max_blk_size_xy + kerneldim;
+ unsigned int max_padsize_z = max_blk_size_z + (have_z ? kerneldim : 0);
+
+ // Perform block-wise averages calculations across the image
+ // Initialize fields which are not populated until later
+ ag.arg.size_x = 0;
+ ag.arg.size_y = 0;
+ ag.arg.size_z = 0;
+ ag.arg.offset_x = 0;
+ ag.arg.offset_y = 0;
+ ag.arg.offset_z = 0;
+ ag.arg.work_memory = nullptr;
+
+ ag.arg.img = &img;
+ ag.arg.swz = swz;
+ ag.arg.have_z = have_z;
+ ag.arg.alpha_kernel_radius = alpha_kernel_radius;
+
+ ag.img_size_x = size_x;
+ ag.img_size_y = size_y;
+ ag.img_size_z = size_z;
+ ag.blk_size_xy = max_blk_size_xy;
+ ag.blk_size_z = max_blk_size_z;
+ ag.work_memory_size = 2 * max_padsize_xy * max_padsize_xy * max_padsize_z;
+
+ // The parallel task count
+ unsigned int z_tasks = (size_z + max_blk_size_z - 1) / max_blk_size_z;
+ unsigned int y_tasks = (size_y + max_blk_size_xy - 1) / max_blk_size_xy;
+ return z_tasks * y_tasks;
+}
+
+#endif
diff --git a/thirdparty/astcenc/astcenc_decompress_symbolic.cpp b/thirdparty/astcenc/astcenc_decompress_symbolic.cpp
new file mode 100644
index 0000000000..39e5525c3b
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_decompress_symbolic.cpp
@@ -0,0 +1,623 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2023 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief Functions to decompress a symbolic block.
+ */
+
+#include "astcenc_internal.h"
+
+#include <stdio.h>
+#include <assert.h>
+
+/**
+ * @brief Compute the integer linear interpolation of two color endpoints.
+ *
+ * @param decode_mode The ASTC profile (linear or sRGB)
+ * @param color0 The endpoint0 color.
+ * @param color1 The endpoint1 color.
+ * @param weights The interpolation weight (between 0 and 64).
+ *
+ * @return The interpolated color.
+ */
+static vint4 lerp_color_int(
+ astcenc_profile decode_mode,
+ vint4 color0,
+ vint4 color1,
+ vint4 weights
+) {
+ vint4 weight1 = weights;
+ vint4 weight0 = vint4(64) - weight1;
+
+ if (decode_mode == ASTCENC_PRF_LDR_SRGB)
+ {
+ color0 = asr<8>(color0);
+ color1 = asr<8>(color1);
+ }
+
+ vint4 color = (color0 * weight0) + (color1 * weight1) + vint4(32);
+ color = asr<6>(color);
+
+ if (decode_mode == ASTCENC_PRF_LDR_SRGB)
+ {
+ color = color * vint4(257);
+ }
+
+ return color;
+}
+
+
+/**
+ * @brief Convert integer color value into a float value for the decoder.
+ *
+ * @param data The integer color value post-interpolation.
+ * @param lns_mask If set treat lane as HDR (LNS) else LDR (unorm16).
+ *
+ * @return The float color value.
+ */
+static inline vfloat4 decode_texel(
+ vint4 data,
+ vmask4 lns_mask
+) {
+ vint4 color_lns = vint4::zero();
+ vint4 color_unorm = vint4::zero();
+
+ if (any(lns_mask))
+ {
+ color_lns = lns_to_sf16(data);
+ }
+
+ if (!all(lns_mask))
+ {
+ color_unorm = unorm16_to_sf16(data);
+ }
+
+ // Pick components and then convert to FP16
+ vint4 datai = select(color_unorm, color_lns, lns_mask);
+ return float16_to_float(datai);
+}
+
+/* See header for documentation. */
+void unpack_weights(
+ const block_size_descriptor& bsd,
+ const symbolic_compressed_block& scb,
+ const decimation_info& di,
+ bool is_dual_plane,
+ int weights_plane1[BLOCK_MAX_TEXELS],
+ int weights_plane2[BLOCK_MAX_TEXELS]
+) {
+ // Safe to overshoot as all arrays are allocated to full size
+ if (!is_dual_plane)
+ {
+ // Build full 64-entry weight lookup table
+ vint4 tab0(reinterpret_cast<const int*>(scb.weights + 0));
+ vint4 tab1(reinterpret_cast<const int*>(scb.weights + 16));
+ vint4 tab2(reinterpret_cast<const int*>(scb.weights + 32));
+ vint4 tab3(reinterpret_cast<const int*>(scb.weights + 48));
+
+ vint tab0p, tab1p, tab2p, tab3p;
+ vtable_prepare(tab0, tab1, tab2, tab3, tab0p, tab1p, tab2p, tab3p);
+
+ for (unsigned int i = 0; i < bsd.texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vint summed_value(8);
+ vint weight_count(di.texel_weight_count + i);
+ int max_weight_count = hmax(weight_count).lane<0>();
+
+ promise(max_weight_count > 0);
+ for (int j = 0; j < max_weight_count; j++)
+ {
+ vint texel_weights(di.texel_weights_tr[j] + i);
+ vint texel_weights_int(di.texel_weight_contribs_int_tr[j] + i);
+
+ summed_value += vtable_8bt_32bi(tab0p, tab1p, tab2p, tab3p, texel_weights) * texel_weights_int;
+ }
+
+ store(lsr<4>(summed_value), weights_plane1 + i);
+ }
+ }
+ else
+ {
+ // Build a 32-entry weight lookup table per plane
+ // Plane 1
+ vint4 tab0_plane1(reinterpret_cast<const int*>(scb.weights + 0));
+ vint4 tab1_plane1(reinterpret_cast<const int*>(scb.weights + 16));
+ vint tab0_plane1p, tab1_plane1p;
+ vtable_prepare(tab0_plane1, tab1_plane1, tab0_plane1p, tab1_plane1p);
+
+ // Plane 2
+ vint4 tab0_plane2(reinterpret_cast<const int*>(scb.weights + 32));
+ vint4 tab1_plane2(reinterpret_cast<const int*>(scb.weights + 48));
+ vint tab0_plane2p, tab1_plane2p;
+ vtable_prepare(tab0_plane2, tab1_plane2, tab0_plane2p, tab1_plane2p);
+
+ for (unsigned int i = 0; i < bsd.texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vint sum_plane1(8);
+ vint sum_plane2(8);
+
+ vint weight_count(di.texel_weight_count + i);
+ int max_weight_count = hmax(weight_count).lane<0>();
+
+ promise(max_weight_count > 0);
+ for (int j = 0; j < max_weight_count; j++)
+ {
+ vint texel_weights(di.texel_weights_tr[j] + i);
+ vint texel_weights_int(di.texel_weight_contribs_int_tr[j] + i);
+
+ sum_plane1 += vtable_8bt_32bi(tab0_plane1p, tab1_plane1p, texel_weights) * texel_weights_int;
+ sum_plane2 += vtable_8bt_32bi(tab0_plane2p, tab1_plane2p, texel_weights) * texel_weights_int;
+ }
+
+ store(lsr<4>(sum_plane1), weights_plane1 + i);
+ store(lsr<4>(sum_plane2), weights_plane2 + i);
+ }
+ }
+}
+
+/**
+ * @brief Return an FP32 NaN value for use in error colors.
+ *
+ * This NaN encoding will turn into 0xFFFF when converted to an FP16 NaN.
+ *
+ * @return The float color value.
+ */
+static float error_color_nan()
+{
+ if32 v;
+ v.u = 0xFFFFE000U;
+ return v.f;
+}
+
+/* See header for documentation. */
+void decompress_symbolic_block(
+ astcenc_profile decode_mode,
+ const block_size_descriptor& bsd,
+ int xpos,
+ int ypos,
+ int zpos,
+ const symbolic_compressed_block& scb,
+ image_block& blk
+) {
+ blk.xpos = xpos;
+ blk.ypos = ypos;
+ blk.zpos = zpos;
+
+ blk.data_min = vfloat4::zero();
+ blk.data_mean = vfloat4::zero();
+ blk.data_max = vfloat4::zero();
+ blk.grayscale = false;
+
+ // If we detected an error-block, blow up immediately.
+ if (scb.block_type == SYM_BTYPE_ERROR)
+ {
+ for (unsigned int i = 0; i < bsd.texel_count; i++)
+ {
+ blk.data_r[i] = error_color_nan();
+ blk.data_g[i] = error_color_nan();
+ blk.data_b[i] = error_color_nan();
+ blk.data_a[i] = error_color_nan();
+ blk.rgb_lns[i] = 0;
+ blk.alpha_lns[i] = 0;
+ }
+
+ return;
+ }
+
+ if ((scb.block_type == SYM_BTYPE_CONST_F16) ||
+ (scb.block_type == SYM_BTYPE_CONST_U16))
+ {
+ vfloat4 color;
+ uint8_t use_lns = 0;
+
+ // UNORM16 constant color block
+ if (scb.block_type == SYM_BTYPE_CONST_U16)
+ {
+ vint4 colori(scb.constant_color);
+
+ // For sRGB decoding a real decoder would just use the top 8 bits for color conversion.
+ // We don't color convert, so rescale the top 8 bits into the full 16 bit dynamic range.
+ if (decode_mode == ASTCENC_PRF_LDR_SRGB)
+ {
+ colori = asr<8>(colori) * 257;
+ }
+
+ vint4 colorf16 = unorm16_to_sf16(colori);
+ color = float16_to_float(colorf16);
+ }
+ // FLOAT16 constant color block
+ else
+ {
+ switch (decode_mode)
+ {
+ case ASTCENC_PRF_LDR_SRGB:
+ case ASTCENC_PRF_LDR:
+ color = vfloat4(error_color_nan());
+ break;
+ case ASTCENC_PRF_HDR_RGB_LDR_A:
+ case ASTCENC_PRF_HDR:
+ // Constant-color block; unpack from FP16 to FP32.
+ color = float16_to_float(vint4(scb.constant_color));
+ use_lns = 1;
+ break;
+ }
+ }
+
+ for (unsigned int i = 0; i < bsd.texel_count; i++)
+ {
+ blk.data_r[i] = color.lane<0>();
+ blk.data_g[i] = color.lane<1>();
+ blk.data_b[i] = color.lane<2>();
+ blk.data_a[i] = color.lane<3>();
+ blk.rgb_lns[i] = use_lns;
+ blk.alpha_lns[i] = use_lns;
+ }
+
+ return;
+ }
+
+ // Get the appropriate partition-table entry
+ int partition_count = scb.partition_count;
+ const auto& pi = bsd.get_partition_info(partition_count, scb.partition_index);
+
+ // Get the appropriate block descriptors
+ const auto& bm = bsd.get_block_mode(scb.block_mode);
+ const auto& di = bsd.get_decimation_info(bm.decimation_mode);
+
+ bool is_dual_plane = static_cast<bool>(bm.is_dual_plane);
+
+ // Unquantize and undecimate the weights
+ int plane1_weights[BLOCK_MAX_TEXELS];
+ int plane2_weights[BLOCK_MAX_TEXELS];
+ unpack_weights(bsd, scb, di, is_dual_plane, plane1_weights, plane2_weights);
+
+ // Now that we have endpoint colors and weights, we can unpack texel colors
+ int plane2_component = scb.plane2_component;
+ vmask4 plane2_mask = vint4::lane_id() == vint4(plane2_component);
+
+ for (int i = 0; i < partition_count; i++)
+ {
+ // Decode the color endpoints for this partition
+ vint4 ep0;
+ vint4 ep1;
+ bool rgb_lns;
+ bool a_lns;
+
+ unpack_color_endpoints(decode_mode,
+ scb.color_formats[i],
+ scb.color_values[i],
+ rgb_lns, a_lns,
+ ep0, ep1);
+
+ vmask4 lns_mask(rgb_lns, rgb_lns, rgb_lns, a_lns);
+
+ int texel_count = pi.partition_texel_count[i];
+ for (int j = 0; j < texel_count; j++)
+ {
+ int tix = pi.texels_of_partition[i][j];
+ vint4 weight = select(vint4(plane1_weights[tix]), vint4(plane2_weights[tix]), plane2_mask);
+ vint4 color = lerp_color_int(decode_mode, ep0, ep1, weight);
+ vfloat4 colorf = decode_texel(color, lns_mask);
+
+ blk.data_r[tix] = colorf.lane<0>();
+ blk.data_g[tix] = colorf.lane<1>();
+ blk.data_b[tix] = colorf.lane<2>();
+ blk.data_a[tix] = colorf.lane<3>();
+ }
+ }
+}
+
+#if !defined(ASTCENC_DECOMPRESS_ONLY)
+
+/* See header for documentation. */
+float compute_symbolic_block_difference_2plane(
+ const astcenc_config& config,
+ const block_size_descriptor& bsd,
+ const symbolic_compressed_block& scb,
+ const image_block& blk
+) {
+ // If we detected an error-block, blow up immediately.
+ if (scb.block_type == SYM_BTYPE_ERROR)
+ {
+ return ERROR_CALC_DEFAULT;
+ }
+
+ assert(scb.block_mode >= 0);
+ assert(scb.partition_count == 1);
+ assert(bsd.get_block_mode(scb.block_mode).is_dual_plane == 1);
+
+ // Get the appropriate block descriptor
+ const block_mode& bm = bsd.get_block_mode(scb.block_mode);
+ const decimation_info& di = bsd.get_decimation_info(bm.decimation_mode);
+
+ // Unquantize and undecimate the weights
+ int plane1_weights[BLOCK_MAX_TEXELS];
+ int plane2_weights[BLOCK_MAX_TEXELS];
+ unpack_weights(bsd, scb, di, true, plane1_weights, plane2_weights);
+
+ vmask4 plane2_mask = vint4::lane_id() == vint4(scb.plane2_component);
+
+ vfloat4 summa = vfloat4::zero();
+
+ // Decode the color endpoints for this partition
+ vint4 ep0;
+ vint4 ep1;
+ bool rgb_lns;
+ bool a_lns;
+
+ unpack_color_endpoints(config.profile,
+ scb.color_formats[0],
+ scb.color_values[0],
+ rgb_lns, a_lns,
+ ep0, ep1);
+
+ // Unpack and compute error for each texel in the partition
+ unsigned int texel_count = bsd.texel_count;
+ for (unsigned int i = 0; i < texel_count; i++)
+ {
+ vint4 weight = select(vint4(plane1_weights[i]), vint4(plane2_weights[i]), plane2_mask);
+ vint4 colori = lerp_color_int(config.profile, ep0, ep1, weight);
+
+ vfloat4 color = int_to_float(colori);
+ vfloat4 oldColor = blk.texel(i);
+
+ // Compare error using a perceptual decode metric for RGBM textures
+ if (config.flags & ASTCENC_FLG_MAP_RGBM)
+ {
+ // Fail encodings that result in zero weight M pixels. Note that this can cause
+ // "interesting" artifacts if we reject all useful encodings - we typically get max
+ // brightness encodings instead which look just as bad. We recommend users apply a
+ // bias to their stored M value, limiting the lower value to 16 or 32 to avoid
+ // getting small M values post-quantization, but we can't prove it would never
+ // happen, especially at low bit rates ...
+ if (color.lane<3>() == 0.0f)
+ {
+ return -ERROR_CALC_DEFAULT;
+ }
+
+ // Compute error based on decoded RGBM color
+ color = vfloat4(
+ color.lane<0>() * color.lane<3>() * config.rgbm_m_scale,
+ color.lane<1>() * color.lane<3>() * config.rgbm_m_scale,
+ color.lane<2>() * color.lane<3>() * config.rgbm_m_scale,
+ 1.0f
+ );
+
+ oldColor = vfloat4(
+ oldColor.lane<0>() * oldColor.lane<3>() * config.rgbm_m_scale,
+ oldColor.lane<1>() * oldColor.lane<3>() * config.rgbm_m_scale,
+ oldColor.lane<2>() * oldColor.lane<3>() * config.rgbm_m_scale,
+ 1.0f
+ );
+ }
+
+ vfloat4 error = oldColor - color;
+ error = min(abs(error), 1e15f);
+ error = error * error;
+
+ summa += min(dot(error, blk.channel_weight), ERROR_CALC_DEFAULT);
+ }
+
+ return summa.lane<0>();
+}
+
+/* See header for documentation. */
+float compute_symbolic_block_difference_1plane(
+ const astcenc_config& config,
+ const block_size_descriptor& bsd,
+ const symbolic_compressed_block& scb,
+ const image_block& blk
+) {
+ assert(bsd.get_block_mode(scb.block_mode).is_dual_plane == 0);
+
+ // If we detected an error-block, blow up immediately.
+ if (scb.block_type == SYM_BTYPE_ERROR)
+ {
+ return ERROR_CALC_DEFAULT;
+ }
+
+ assert(scb.block_mode >= 0);
+
+ // Get the appropriate partition-table entry
+ unsigned int partition_count = scb.partition_count;
+ const auto& pi = bsd.get_partition_info(partition_count, scb.partition_index);
+
+ // Get the appropriate block descriptor
+ const block_mode& bm = bsd.get_block_mode(scb.block_mode);
+ const decimation_info& di = bsd.get_decimation_info(bm.decimation_mode);
+
+ // Unquantize and undecimate the weights
+ int plane1_weights[BLOCK_MAX_TEXELS];
+ unpack_weights(bsd, scb, di, false, plane1_weights, nullptr);
+
+ vfloat4 summa = vfloat4::zero();
+ for (unsigned int i = 0; i < partition_count; i++)
+ {
+ // Decode the color endpoints for this partition
+ vint4 ep0;
+ vint4 ep1;
+ bool rgb_lns;
+ bool a_lns;
+
+ unpack_color_endpoints(config.profile,
+ scb.color_formats[i],
+ scb.color_values[i],
+ rgb_lns, a_lns,
+ ep0, ep1);
+
+ // Unpack and compute error for each texel in the partition
+ unsigned int texel_count = pi.partition_texel_count[i];
+ for (unsigned int j = 0; j < texel_count; j++)
+ {
+ unsigned int tix = pi.texels_of_partition[i][j];
+ vint4 colori = lerp_color_int(config.profile, ep0, ep1,
+ vint4(plane1_weights[tix]));
+
+ vfloat4 color = int_to_float(colori);
+ vfloat4 oldColor = blk.texel(tix);
+
+ // Compare error using a perceptual decode metric for RGBM textures
+ if (config.flags & ASTCENC_FLG_MAP_RGBM)
+ {
+ // Fail encodings that result in zero weight M pixels. Note that this can cause
+ // "interesting" artifacts if we reject all useful encodings - we typically get max
+ // brightness encodings instead which look just as bad. We recommend users apply a
+ // bias to their stored M value, limiting the lower value to 16 or 32 to avoid
+ // getting small M values post-quantization, but we can't prove it would never
+ // happen, especially at low bit rates ...
+ if (color.lane<3>() == 0.0f)
+ {
+ return -ERROR_CALC_DEFAULT;
+ }
+
+ // Compute error based on decoded RGBM color
+ color = vfloat4(
+ color.lane<0>() * color.lane<3>() * config.rgbm_m_scale,
+ color.lane<1>() * color.lane<3>() * config.rgbm_m_scale,
+ color.lane<2>() * color.lane<3>() * config.rgbm_m_scale,
+ 1.0f
+ );
+
+ oldColor = vfloat4(
+ oldColor.lane<0>() * oldColor.lane<3>() * config.rgbm_m_scale,
+ oldColor.lane<1>() * oldColor.lane<3>() * config.rgbm_m_scale,
+ oldColor.lane<2>() * oldColor.lane<3>() * config.rgbm_m_scale,
+ 1.0f
+ );
+ }
+
+ vfloat4 error = oldColor - color;
+ error = min(abs(error), 1e15f);
+ error = error * error;
+
+ summa += min(dot(error, blk.channel_weight), ERROR_CALC_DEFAULT);
+ }
+ }
+
+ return summa.lane<0>();
+}
+
+/* See header for documentation. */
+float compute_symbolic_block_difference_1plane_1partition(
+ const astcenc_config& config,
+ const block_size_descriptor& bsd,
+ const symbolic_compressed_block& scb,
+ const image_block& blk
+) {
+ // If we detected an error-block, blow up immediately.
+ if (scb.block_type == SYM_BTYPE_ERROR)
+ {
+ return ERROR_CALC_DEFAULT;
+ }
+
+ assert(scb.block_mode >= 0);
+ assert(bsd.get_partition_info(scb.partition_count, scb.partition_index).partition_count == 1);
+
+ // Get the appropriate block descriptor
+ const block_mode& bm = bsd.get_block_mode(scb.block_mode);
+ const decimation_info& di = bsd.get_decimation_info(bm.decimation_mode);
+
+ // Unquantize and undecimate the weights
+ alignas(ASTCENC_VECALIGN) int plane1_weights[BLOCK_MAX_TEXELS];
+ unpack_weights(bsd, scb, di, false, plane1_weights, nullptr);
+
+ // Decode the color endpoints for this partition
+ vint4 ep0;
+ vint4 ep1;
+ bool rgb_lns;
+ bool a_lns;
+
+ unpack_color_endpoints(config.profile,
+ scb.color_formats[0],
+ scb.color_values[0],
+ rgb_lns, a_lns,
+ ep0, ep1);
+
+
+ // Pre-shift sRGB so things round correctly
+ if (config.profile == ASTCENC_PRF_LDR_SRGB)
+ {
+ ep0 = asr<8>(ep0);
+ ep1 = asr<8>(ep1);
+ }
+
+ // Unpack and compute error for each texel in the partition
+ vfloatacc summav = vfloatacc::zero();
+
+ vint lane_id = vint::lane_id();
+ vint srgb_scale(config.profile == ASTCENC_PRF_LDR_SRGB ? 257 : 1);
+
+ unsigned int texel_count = bsd.texel_count;
+ for (unsigned int i = 0; i < texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ // Compute EP1 contribution
+ vint weight1 = vint::loada(plane1_weights + i);
+ vint ep1_r = vint(ep1.lane<0>()) * weight1;
+ vint ep1_g = vint(ep1.lane<1>()) * weight1;
+ vint ep1_b = vint(ep1.lane<2>()) * weight1;
+ vint ep1_a = vint(ep1.lane<3>()) * weight1;
+
+ // Compute EP0 contribution
+ vint weight0 = vint(64) - weight1;
+ vint ep0_r = vint(ep0.lane<0>()) * weight0;
+ vint ep0_g = vint(ep0.lane<1>()) * weight0;
+ vint ep0_b = vint(ep0.lane<2>()) * weight0;
+ vint ep0_a = vint(ep0.lane<3>()) * weight0;
+
+ // Shift so things round correctly
+ vint colori_r = asr<6>(ep0_r + ep1_r + vint(32)) * srgb_scale;
+ vint colori_g = asr<6>(ep0_g + ep1_g + vint(32)) * srgb_scale;
+ vint colori_b = asr<6>(ep0_b + ep1_b + vint(32)) * srgb_scale;
+ vint colori_a = asr<6>(ep0_a + ep1_a + vint(32)) * srgb_scale;
+
+ // Compute color diff
+ vfloat color_r = int_to_float(colori_r);
+ vfloat color_g = int_to_float(colori_g);
+ vfloat color_b = int_to_float(colori_b);
+ vfloat color_a = int_to_float(colori_a);
+
+ vfloat color_orig_r = loada(blk.data_r + i);
+ vfloat color_orig_g = loada(blk.data_g + i);
+ vfloat color_orig_b = loada(blk.data_b + i);
+ vfloat color_orig_a = loada(blk.data_a + i);
+
+ vfloat color_error_r = min(abs(color_orig_r - color_r), vfloat(1e15f));
+ vfloat color_error_g = min(abs(color_orig_g - color_g), vfloat(1e15f));
+ vfloat color_error_b = min(abs(color_orig_b - color_b), vfloat(1e15f));
+ vfloat color_error_a = min(abs(color_orig_a - color_a), vfloat(1e15f));
+
+ // Compute squared error metric
+ color_error_r = color_error_r * color_error_r;
+ color_error_g = color_error_g * color_error_g;
+ color_error_b = color_error_b * color_error_b;
+ color_error_a = color_error_a * color_error_a;
+
+ vfloat metric = color_error_r * blk.channel_weight.lane<0>()
+ + color_error_g * blk.channel_weight.lane<1>()
+ + color_error_b * blk.channel_weight.lane<2>()
+ + color_error_a * blk.channel_weight.lane<3>();
+
+ // Mask off bad lanes
+ vmask mask = lane_id < vint(texel_count);
+ lane_id += vint(ASTCENC_SIMD_WIDTH);
+ haccumulate(summav, metric, mask);
+ }
+
+ return hadd_s(summav);
+}
+
+#endif
diff --git a/thirdparty/astcenc/astcenc_diagnostic_trace.cpp b/thirdparty/astcenc/astcenc_diagnostic_trace.cpp
new file mode 100644
index 0000000000..7fa7ab1a8b
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_diagnostic_trace.cpp
@@ -0,0 +1,230 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2021-2022 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief Functions for the library entrypoint.
+ */
+
+#if defined(ASTCENC_DIAGNOSTICS)
+
+#include <cassert>
+#include <cstdarg>
+#include <cstdio>
+#include <string>
+
+#include "astcenc_diagnostic_trace.h"
+
+/** @brief The global trace logger. */
+static TraceLog* g_TraceLog = nullptr;
+
+/** @brief The JSON indentation level. */
+static const size_t g_trace_indent = 2;
+
+TraceLog::TraceLog(
+ const char* file_name):
+ m_file(file_name, std::ofstream::out | std::ofstream::binary)
+{
+ assert(!g_TraceLog);
+ g_TraceLog = this;
+ m_root = new TraceNode("root");
+}
+
+/* See header for documentation. */
+TraceNode* TraceLog::get_current_leaf()
+{
+ if (m_stack.size())
+ {
+ return m_stack.back();
+ }
+
+ return nullptr;
+}
+
+/* See header for documentation. */
+size_t TraceLog::get_depth()
+{
+ return m_stack.size();
+}
+
+/* See header for documentation. */
+TraceLog::~TraceLog()
+{
+ assert(g_TraceLog == this);
+ delete m_root;
+ g_TraceLog = nullptr;
+}
+
+/* See header for documentation. */
+TraceNode::TraceNode(
+ const char* format,
+ ...
+) {
+ // Format the name string
+ constexpr size_t bufsz = 256;
+ char buffer[bufsz];
+
+ va_list args;
+ va_start (args, format);
+ vsnprintf (buffer, bufsz, format, args);
+ va_end (args);
+
+ // Guarantee there is a nul terminator
+ buffer[bufsz - 1] = 0;
+
+ // Generate the node
+ TraceNode* parent = g_TraceLog->get_current_leaf();
+ size_t depth = g_TraceLog->get_depth();
+ g_TraceLog->m_stack.push_back(this);
+
+ bool comma = parent && parent->m_attrib_count;
+ auto& out = g_TraceLog->m_file;
+
+ if (parent)
+ {
+ parent->m_attrib_count++;
+ }
+
+ if (comma)
+ {
+ out << ',';
+ }
+
+ if (depth)
+ {
+ out << '\n';
+ }
+
+ size_t out_indent = (depth * 2) * g_trace_indent;
+ size_t in_indent = (depth * 2 + 1) * g_trace_indent;
+
+ std::string out_indents("");
+ if (out_indent)
+ {
+ out_indents = std::string(out_indent, ' ');
+ }
+
+ std::string in_indents(in_indent, ' ');
+
+ out << out_indents << "[ \"node\", \"" << buffer << "\",\n";
+ out << in_indents << "[";
+}
+
+/* See header for documentation. */
+void TraceNode::add_attrib(
+ std::string type,
+ std::string key,
+ std::string value
+) {
+ (void)type;
+
+ size_t depth = g_TraceLog->get_depth();
+ size_t indent = (depth * 2) * g_trace_indent;
+ auto& out = g_TraceLog->m_file;
+ bool comma = m_attrib_count;
+ m_attrib_count++;
+
+ if (comma)
+ {
+ out << ',';
+ }
+
+ out << '\n';
+ out << std::string(indent, ' ') << "[ "
+ << "\"" << key << "\", "
+ << value << " ]";
+}
+
+/* See header for documentation. */
+TraceNode::~TraceNode()
+{
+ g_TraceLog->m_stack.pop_back();
+
+ auto& out = g_TraceLog->m_file;
+ size_t depth = g_TraceLog->get_depth();
+ size_t out_indent = (depth * 2) * g_trace_indent;
+ size_t in_indent = (depth * 2 + 1) * g_trace_indent;
+
+ std::string out_indents("");
+ if (out_indent)
+ {
+ out_indents = std::string(out_indent, ' ');
+ }
+
+ std::string in_indents(in_indent, ' ');
+
+ if (m_attrib_count)
+ {
+ out << "\n" << in_indents;
+ }
+ out << "]\n";
+
+ out << out_indents << "]";
+}
+
+/* See header for documentation. */
+void trace_add_data(
+ const char* key,
+ const char* format,
+ ...
+) {
+ constexpr size_t bufsz = 256;
+ char buffer[bufsz];
+
+ va_list args;
+ va_start (args, format);
+ vsnprintf (buffer, bufsz, format, args);
+ va_end (args);
+
+ // Guarantee there is a nul terminator
+ buffer[bufsz - 1] = 0;
+
+ std::string value = "\"" + std::string(buffer) + "\"";
+
+ TraceNode* node = g_TraceLog->get_current_leaf();
+ node->add_attrib("str", key, value);
+}
+
+/* See header for documentation. */
+void trace_add_data(
+ const char* key,
+ float value
+) {
+ char buffer[256];
+ sprintf(buffer, "%.20g", (double)value);
+ TraceNode* node = g_TraceLog->get_current_leaf();
+ node->add_attrib("float", key, buffer);
+}
+
+/* See header for documentation. */
+void trace_add_data(
+ const char* key,
+ int value
+) {
+ TraceNode* node = g_TraceLog->get_current_leaf();
+ node->add_attrib("int", key, std::to_string(value));
+}
+
+/* See header for documentation. */
+void trace_add_data(
+ const char* key,
+ unsigned int value
+) {
+ TraceNode* node = g_TraceLog->get_current_leaf();
+ node->add_attrib("int", key, std::to_string(value));
+}
+
+#endif
diff --git a/thirdparty/astcenc/astcenc_diagnostic_trace.h b/thirdparty/astcenc/astcenc_diagnostic_trace.h
new file mode 100644
index 0000000000..f5586b0ad5
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_diagnostic_trace.h
@@ -0,0 +1,219 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2021-2022 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief This module provides a set of diagnostic tracing utilities.
+ *
+ * Overview
+ * ========
+ *
+ * The built-in diagnostic trace tool generates a hierarchical JSON tree structure. The tree
+ * hierarchy contains three levels:
+ *
+ * - block
+ * - pass
+ * - candidate
+ *
+ * One block node exists for each compressed block in the image. One pass node exists for each major
+ * pass (N partition, M planes, O components) applied to a block. One candidate node exists for each
+ * encoding candidate trialed for a pass.
+ *
+ * Each node contains both the hierarchy but also a number of attributes which explain the behavior.
+ * For example, the block node contains the block coordinates in the image, the pass explains the
+ * pass configuration, and the candidate will explain the candidate encoding such as weight
+ * decimation, refinement error, etc.
+ *
+ * Trace Nodes are designed as scope-managed C++ objects with stack-like push/pop behavior.
+ * Constructing a trace node on the stack will automatically add it to the current node as a child,
+ * and then make it the current node. Destroying the current node will pop the stack and set the
+ * parent to the current node. This provides a robust mechanism for ensuring reliable nesting in the
+ * tree structure.
+ *
+ * A set of utility macros are provided to add attribute annotations to the current trace node.
+ *
+ * Usage
+ * =====
+ *
+ * Create Trace Nodes on the stack using the @c TRACE_NODE() macro. This will compile-out completely
+ * in builds with diagnostics disabled.
+ *
+ * Add annotations to the current trace node using the @c trace_add_data() macro. This will
+ * similarly compile out completely in builds with diagnostics disabled.
+ *
+ * If you need to add additional code to support diagnostics-only behavior wrap
+ * it in preprocessor guards:
+ *
+ * #if defined(ASTCENC_DIAGNOSTICS)
+ * #endif
+ */
+
+#ifndef ASTCENC_DIAGNOSTIC_TRACE_INCLUDED
+#define ASTCENC_DIAGNOSTIC_TRACE_INCLUDED
+
+#if defined(ASTCENC_DIAGNOSTICS)
+
+#include <iostream>
+#include <fstream>
+#include <vector>
+
+/**
+ * @brief Class representing a single node in the trace hierarchy.
+ */
+class TraceNode
+{
+public:
+ /**
+ * @brief Construct a new node.
+ *
+ * Constructing a node will push to the the top of the stack, automatically making it a child of
+ * the current node, and then setting it to become the current node.
+ *
+ * @param format The format template for the node name.
+ * @param ... The format parameters.
+ */
+ TraceNode(const char* format, ...);
+
+ /**
+ * @brief Add an attribute to this node.
+ *
+ * Note that no quoting is applied to the @c value, so if quoting is needed it must be done by
+ * the caller.
+ *
+ * @param type The type of the attribute.
+ * @param key The key of the attribute.
+ * @param value The value of the attribute.
+ */
+ void add_attrib(std::string type, std::string key, std::string value);
+
+ /**
+ * @brief Destroy this node.
+ *
+ * Destroying a node will pop it from the top of the stack, making its parent the current node.
+ * It is invalid behavior to destroy a node that is not the current node; usage must conform to
+ * stack push-pop semantics.
+ */
+ ~TraceNode();
+
+ /**
+ * @brief The number of attributes and child nodes in this node.
+ */
+ unsigned int m_attrib_count { 0 };
+};
+
+/**
+ * @brief Class representing the trace log file being written.
+ */
+class TraceLog
+{
+public:
+ /**
+ * @brief Create a new trace log.
+ *
+ * The trace log is global; there can be only one at a time.
+ *
+ * @param file_name The name of the file to write.
+ */
+ TraceLog(const char* file_name);
+
+ /**
+ * @brief Detroy the trace log.
+ *
+ * Trace logs MUST be cleanly destroyed to ensure the file gets written.
+ */
+ ~TraceLog();
+
+ /**
+ * @brief Get the current child node.
+ *
+ * @return The current leaf node.
+ */
+ TraceNode* get_current_leaf();
+
+ /**
+ * @brief Get the stack depth of the current child node.
+ *
+ * @return The current leaf node stack depth.
+ */
+ size_t get_depth();
+
+ /**
+ * @brief The file stream to write to.
+ */
+ std::ofstream m_file;
+
+ /**
+ * @brief The stack of nodes (newest at the back).
+ */
+ std::vector<TraceNode*> m_stack;
+
+private:
+ /**
+ * @brief The root node in the JSON file.
+ */
+ TraceNode* m_root;
+};
+
+/**
+ * @brief Utility macro to create a trace node on the stack.
+ *
+ * @param name The variable name to use.
+ * @param ... The name template and format parameters.
+ */
+#define TRACE_NODE(name, ...) TraceNode name(__VA_ARGS__);
+
+/**
+ * @brief Add a string annotation to the current node.
+ *
+ * @param key The name of the attribute.
+ * @param format The format template for the attribute value.
+ * @param ... The format parameters.
+ */
+void trace_add_data(const char* key, const char* format, ...);
+
+/**
+ * @brief Add a float annotation to the current node.
+ *
+ * @param key The name of the attribute.
+ * @param value The value of the attribute.
+ */
+void trace_add_data(const char* key, float value);
+
+/**
+ * @brief Add an integer annotation to the current node.
+ *
+ * @param key The name of the attribute.
+ * @param value The value of the attribute.
+ */
+void trace_add_data(const char* key, int value);
+
+/**
+ * @brief Add an unsigned integer annotation to the current node.
+ *
+ * @param key The name of the attribute.
+ * @param value The value of the attribute.
+ */
+void trace_add_data(const char* key, unsigned int value);
+
+#else
+
+#define TRACE_NODE(name, ...)
+
+#define trace_add_data(...)
+
+#endif
+
+#endif
diff --git a/thirdparty/astcenc/astcenc_entry.cpp b/thirdparty/astcenc/astcenc_entry.cpp
new file mode 100644
index 0000000000..e59f1fe61a
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_entry.cpp
@@ -0,0 +1,1427 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2023 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief Functions for the library entrypoint.
+ */
+
+#include <array>
+#include <cstring>
+#include <new>
+
+#include "astcenc.h"
+#include "astcenc_internal_entry.h"
+#include "astcenc_diagnostic_trace.h"
+
+/**
+ * @brief Record of the quality tuning parameter values.
+ *
+ * See the @c astcenc_config structure for detailed parameter documentation.
+ *
+ * Note that the mse_overshoot entries are scaling factors relative to the base MSE to hit db_limit.
+ * A 20% overshoot is harder to hit for a higher base db_limit, so we may actually use lower ratios
+ * for the more through search presets because the underlying db_limit is so much higher.
+ */
+struct astcenc_preset_config
+{
+ float quality;
+ unsigned int tune_partition_count_limit;
+ unsigned int tune_2partition_index_limit;
+ unsigned int tune_3partition_index_limit;
+ unsigned int tune_4partition_index_limit;
+ unsigned int tune_block_mode_limit;
+ unsigned int tune_refinement_limit;
+ unsigned int tune_candidate_limit;
+ unsigned int tune_2partitioning_candidate_limit;
+ unsigned int tune_3partitioning_candidate_limit;
+ unsigned int tune_4partitioning_candidate_limit;
+ float tune_db_limit_a_base;
+ float tune_db_limit_b_base;
+ float tune_mse_overshoot;
+ float tune_2_partition_early_out_limit_factor;
+ float tune_3_partition_early_out_limit_factor;
+ float tune_2_plane_early_out_limit_correlation;
+};
+
+/**
+ * @brief The static presets for high bandwidth encodings (x < 25 texels per block).
+ */
+static const std::array<astcenc_preset_config, 6> preset_configs_high {{
+ {
+ ASTCENC_PRE_FASTEST,
+ 2, 10, 6, 4, 43, 2, 2, 2, 2, 2, 85.2f, 63.2f, 3.5f, 1.0f, 1.0f, 0.85f
+ }, {
+ ASTCENC_PRE_FAST,
+ 3, 18, 10, 8, 55, 3, 3, 2, 2, 2, 85.2f, 63.2f, 3.5f, 1.0f, 1.0f, 0.90f
+ }, {
+ ASTCENC_PRE_MEDIUM,
+ 4, 34, 28, 16, 77, 3, 3, 2, 2, 2, 95.0f, 70.0f, 2.5f, 1.1f, 1.05f, 0.95f
+ }, {
+ ASTCENC_PRE_THOROUGH,
+ 4, 82, 60, 30, 94, 4, 4, 3, 2, 2, 105.0f, 77.0f, 10.0f, 1.35f, 1.15f, 0.97f
+ }, {
+ ASTCENC_PRE_VERYTHOROUGH,
+ 4, 256, 128, 64, 98, 4, 6, 20, 14, 8, 200.0f, 200.0f, 10.0f, 1.6f, 1.4f, 0.98f
+ }, {
+ ASTCENC_PRE_EXHAUSTIVE,
+ 4, 512, 512, 512, 100, 4, 8, 32, 32, 32, 200.0f, 200.0f, 10.0f, 2.0f, 2.0f, 0.99f
+ }
+}};
+
+/**
+ * @brief The static presets for medium bandwidth encodings (25 <= x < 64 texels per block).
+ */
+static const std::array<astcenc_preset_config, 6> preset_configs_mid {{
+ {
+ ASTCENC_PRE_FASTEST,
+ 2, 10, 6, 4, 43, 2, 2, 2, 2, 2, 85.2f, 63.2f, 3.5f, 1.0f, 1.0f, 0.80f
+ }, {
+ ASTCENC_PRE_FAST,
+ 3, 18, 12, 10, 55, 3, 3, 2, 2, 2, 85.2f, 63.2f, 3.5f, 1.0f, 1.0f, 0.85f
+ }, {
+ ASTCENC_PRE_MEDIUM,
+ 4, 34, 28, 16, 77, 3, 3, 2, 2, 2, 95.0f, 70.0f, 3.0f, 1.1f, 1.05f, 0.90f
+ }, {
+ ASTCENC_PRE_THOROUGH,
+ 4, 82, 60, 30, 94, 4, 4, 3, 2, 2, 105.0f, 77.0f, 10.0f, 1.4f, 1.2f, 0.95f
+ }, {
+ ASTCENC_PRE_VERYTHOROUGH,
+ 4, 256, 128, 64, 98, 4, 6, 12, 8, 3, 200.0f, 200.0f, 10.0f, 1.6f, 1.4f, 0.98f
+ }, {
+ ASTCENC_PRE_EXHAUSTIVE,
+ 4, 256, 256, 256, 100, 4, 8, 32, 32, 32, 200.0f, 200.0f, 10.0f, 2.0f, 2.0f, 0.99f
+ }
+}};
+
+/**
+ * @brief The static presets for low bandwidth encodings (64 <= x texels per block).
+ */
+static const std::array<astcenc_preset_config, 6> preset_configs_low {{
+ {
+ ASTCENC_PRE_FASTEST,
+ 2, 10, 6, 4, 40, 2, 2, 2, 2, 2, 85.0f, 63.0f, 3.5f, 1.0f, 1.0f, 0.80f
+ }, {
+ ASTCENC_PRE_FAST,
+ 2, 18, 12, 10, 55, 3, 3, 2, 2, 2, 85.0f, 63.0f, 3.5f, 1.0f, 1.0f, 0.85f
+ }, {
+ ASTCENC_PRE_MEDIUM,
+ 3, 34, 28, 16, 77, 3, 3, 2, 2, 2, 95.0f, 70.0f, 3.5f, 1.1f, 1.05f, 0.90f
+ }, {
+ ASTCENC_PRE_THOROUGH,
+ 4, 82, 60, 30, 93, 4, 4, 3, 2, 2, 105.0f, 77.0f, 10.0f, 1.3f, 1.2f, 0.97f
+ }, {
+ ASTCENC_PRE_VERYTHOROUGH,
+ 4, 256, 128, 64, 98, 4, 6, 9, 5, 2, 200.0f, 200.0f, 10.0f, 1.6f, 1.4f, 0.98f
+ }, {
+ ASTCENC_PRE_EXHAUSTIVE,
+ 4, 256, 256, 256, 100, 4, 8, 32, 32, 32, 200.0f, 200.0f, 10.0f, 2.0f, 2.0f, 0.99f
+ }
+}};
+
+/**
+ * @brief Validate CPU floating point meets assumptions made in the codec.
+ *
+ * The codec is written with the assumption that a float threaded through the @c if32 union will be
+ * stored and reloaded as a 32-bit IEEE-754 float with round-to-nearest rounding. This is always the
+ * case in an IEEE-754 compliant system, however not every system or compilation mode is actually
+ * IEEE-754 compliant. This normally fails if the code is compiled with fast math enabled.
+ *
+ * @return Return @c ASTCENC_SUCCESS if validated, otherwise an error on failure.
+ */
+static astcenc_error validate_cpu_float()
+{
+ if32 p;
+ volatile float xprec_testval = 2.51f;
+ p.f = xprec_testval + 12582912.0f;
+ float q = p.f - 12582912.0f;
+
+ if (q != 3.0f)
+ {
+ return ASTCENC_ERR_BAD_CPU_FLOAT;
+ }
+
+ return ASTCENC_SUCCESS;
+}
+
+/**
+ * @brief Validate CPU ISA support meets the requirements of this build of the library.
+ *
+ * Each library build is statically compiled for a particular set of CPU ISA features, such as the
+ * SIMD support or other ISA extensions such as POPCNT. This function checks that the host CPU
+ * actually supports everything this build needs.
+ *
+ * @return Return @c ASTCENC_SUCCESS if validated, otherwise an error on failure.
+ */
+static astcenc_error validate_cpu_isa()
+{
+ #if ASTCENC_SSE >= 41
+ if (!cpu_supports_sse41())
+ {
+ return ASTCENC_ERR_BAD_CPU_ISA;
+ }
+ #endif
+
+ #if ASTCENC_POPCNT >= 1
+ if (!cpu_supports_popcnt())
+ {
+ return ASTCENC_ERR_BAD_CPU_ISA;
+ }
+ #endif
+
+ #if ASTCENC_F16C >= 1
+ if (!cpu_supports_f16c())
+ {
+ return ASTCENC_ERR_BAD_CPU_ISA;
+ }
+ #endif
+
+ #if ASTCENC_AVX >= 2
+ if (!cpu_supports_avx2())
+ {
+ return ASTCENC_ERR_BAD_CPU_ISA;
+ }
+ #endif
+
+ return ASTCENC_SUCCESS;
+}
+
+/**
+ * @brief Validate config profile.
+ *
+ * @param profile The profile to check.
+ *
+ * @return Return @c ASTCENC_SUCCESS if validated, otherwise an error on failure.
+ */
+static astcenc_error validate_profile(
+ astcenc_profile profile
+) {
+ // Values in this enum are from an external user, so not guaranteed to be
+ // bounded to the enum values
+ switch (static_cast<int>(profile))
+ {
+ case ASTCENC_PRF_LDR_SRGB:
+ case ASTCENC_PRF_LDR:
+ case ASTCENC_PRF_HDR_RGB_LDR_A:
+ case ASTCENC_PRF_HDR:
+ return ASTCENC_SUCCESS;
+ default:
+ return ASTCENC_ERR_BAD_PROFILE;
+ }
+}
+
+/**
+ * @brief Validate block size.
+ *
+ * @param block_x The block x dimensions.
+ * @param block_y The block y dimensions.
+ * @param block_z The block z dimensions.
+ *
+ * @return Return @c ASTCENC_SUCCESS if validated, otherwise an error on failure.
+ */
+static astcenc_error validate_block_size(
+ unsigned int block_x,
+ unsigned int block_y,
+ unsigned int block_z
+) {
+ // Test if this is a legal block size at all
+ bool is_legal = (((block_z <= 1) && is_legal_2d_block_size(block_x, block_y)) ||
+ ((block_z >= 2) && is_legal_3d_block_size(block_x, block_y, block_z)));
+ if (!is_legal)
+ {
+ return ASTCENC_ERR_BAD_BLOCK_SIZE;
+ }
+
+ // Test if this build has sufficient capacity for this block size
+ bool have_capacity = (block_x * block_y * block_z) <= BLOCK_MAX_TEXELS;
+ if (!have_capacity)
+ {
+ return ASTCENC_ERR_NOT_IMPLEMENTED;
+ }
+
+ return ASTCENC_SUCCESS;
+}
+
+/**
+ * @brief Validate flags.
+ *
+ * @param flags The flags to check.
+ *
+ * @return Return @c ASTCENC_SUCCESS if validated, otherwise an error on failure.
+ */
+static astcenc_error validate_flags(
+ unsigned int flags
+) {
+ // Flags field must not contain any unknown flag bits
+ unsigned int exMask = ~ASTCENC_ALL_FLAGS;
+ if (popcount(flags & exMask) != 0)
+ {
+ return ASTCENC_ERR_BAD_FLAGS;
+ }
+
+ // Flags field must only contain at most a single map type
+ exMask = ASTCENC_FLG_MAP_NORMAL
+ | ASTCENC_FLG_MAP_RGBM;
+ if (popcount(flags & exMask) > 1)
+ {
+ return ASTCENC_ERR_BAD_FLAGS;
+ }
+
+ return ASTCENC_SUCCESS;
+}
+
+#if !defined(ASTCENC_DECOMPRESS_ONLY)
+
+/**
+ * @brief Validate single channel compression swizzle.
+ *
+ * @param swizzle The swizzle to check.
+ *
+ * @return Return @c ASTCENC_SUCCESS if validated, otherwise an error on failure.
+ */
+static astcenc_error validate_compression_swz(
+ astcenc_swz swizzle
+) {
+ // Not all enum values are handled; SWZ_Z is invalid for compression
+ switch (static_cast<int>(swizzle))
+ {
+ case ASTCENC_SWZ_R:
+ case ASTCENC_SWZ_G:
+ case ASTCENC_SWZ_B:
+ case ASTCENC_SWZ_A:
+ case ASTCENC_SWZ_0:
+ case ASTCENC_SWZ_1:
+ return ASTCENC_SUCCESS;
+ default:
+ return ASTCENC_ERR_BAD_SWIZZLE;
+ }
+}
+
+/**
+ * @brief Validate overall compression swizzle.
+ *
+ * @param swizzle The swizzle to check.
+ *
+ * @return Return @c ASTCENC_SUCCESS if validated, otherwise an error on failure.
+ */
+static astcenc_error validate_compression_swizzle(
+ const astcenc_swizzle& swizzle
+) {
+ if (validate_compression_swz(swizzle.r) ||
+ validate_compression_swz(swizzle.g) ||
+ validate_compression_swz(swizzle.b) ||
+ validate_compression_swz(swizzle.a))
+ {
+ return ASTCENC_ERR_BAD_SWIZZLE;
+ }
+
+ return ASTCENC_SUCCESS;
+}
+#endif
+
+/**
+ * @brief Validate single channel decompression swizzle.
+ *
+ * @param swizzle The swizzle to check.
+ *
+ * @return Return @c ASTCENC_SUCCESS if validated, otherwise an error on failure.
+ */
+static astcenc_error validate_decompression_swz(
+ astcenc_swz swizzle
+) {
+ // Values in this enum are from an external user, so not guaranteed to be
+ // bounded to the enum values
+ switch (static_cast<int>(swizzle))
+ {
+ case ASTCENC_SWZ_R:
+ case ASTCENC_SWZ_G:
+ case ASTCENC_SWZ_B:
+ case ASTCENC_SWZ_A:
+ case ASTCENC_SWZ_0:
+ case ASTCENC_SWZ_1:
+ case ASTCENC_SWZ_Z:
+ return ASTCENC_SUCCESS;
+ default:
+ return ASTCENC_ERR_BAD_SWIZZLE;
+ }
+}
+
+/**
+ * @brief Validate overall decompression swizzle.
+ *
+ * @param swizzle The swizzle to check.
+ *
+ * @return Return @c ASTCENC_SUCCESS if validated, otherwise an error on failure.
+ */
+static astcenc_error validate_decompression_swizzle(
+ const astcenc_swizzle& swizzle
+) {
+ if (validate_decompression_swz(swizzle.r) ||
+ validate_decompression_swz(swizzle.g) ||
+ validate_decompression_swz(swizzle.b) ||
+ validate_decompression_swz(swizzle.a))
+ {
+ return ASTCENC_ERR_BAD_SWIZZLE;
+ }
+
+ return ASTCENC_SUCCESS;
+}
+
+/**
+ * Validate that an incoming configuration is in-spec.
+ *
+ * This function can respond in two ways:
+ *
+ * * Numerical inputs that have valid ranges are clamped to those valid ranges. No error is thrown
+ * for out-of-range inputs in this case.
+ * * Numerical inputs and logic inputs are are logically invalid and which make no sense
+ * algorithmically will return an error.
+ *
+ * @param[in,out] config The input compressor configuration.
+ *
+ * @return Return @c ASTCENC_SUCCESS if validated, otherwise an error on failure.
+ */
+static astcenc_error validate_config(
+ astcenc_config &config
+) {
+ astcenc_error status;
+
+ status = validate_profile(config.profile);
+ if (status != ASTCENC_SUCCESS)
+ {
+ return status;
+ }
+
+ status = validate_flags(config.flags);
+ if (status != ASTCENC_SUCCESS)
+ {
+ return status;
+ }
+
+ status = validate_block_size(config.block_x, config.block_y, config.block_z);
+ if (status != ASTCENC_SUCCESS)
+ {
+ return status;
+ }
+
+#if defined(ASTCENC_DECOMPRESS_ONLY)
+ // Decompress-only builds only support decompress-only contexts
+ if (!(config.flags & ASTCENC_FLG_DECOMPRESS_ONLY))
+ {
+ return ASTCENC_ERR_BAD_PARAM;
+ }
+#endif
+
+ config.rgbm_m_scale = astc::max(config.rgbm_m_scale, 1.0f);
+
+ config.tune_partition_count_limit = astc::clamp(config.tune_partition_count_limit, 1u, 4u);
+ config.tune_2partition_index_limit = astc::clamp(config.tune_2partition_index_limit, 1u, BLOCK_MAX_PARTITIONINGS);
+ config.tune_3partition_index_limit = astc::clamp(config.tune_3partition_index_limit, 1u, BLOCK_MAX_PARTITIONINGS);
+ config.tune_4partition_index_limit = astc::clamp(config.tune_4partition_index_limit, 1u, BLOCK_MAX_PARTITIONINGS);
+ config.tune_block_mode_limit = astc::clamp(config.tune_block_mode_limit, 1u, 100u);
+ config.tune_refinement_limit = astc::max(config.tune_refinement_limit, 1u);
+ config.tune_candidate_limit = astc::clamp(config.tune_candidate_limit, 1u, TUNE_MAX_TRIAL_CANDIDATES);
+ config.tune_2partitioning_candidate_limit = astc::clamp(config.tune_2partitioning_candidate_limit, 1u, TUNE_MAX_PARTITIONING_CANDIDATES);
+ config.tune_3partitioning_candidate_limit = astc::clamp(config.tune_3partitioning_candidate_limit, 1u, TUNE_MAX_PARTITIONING_CANDIDATES);
+ config.tune_4partitioning_candidate_limit = astc::clamp(config.tune_4partitioning_candidate_limit, 1u, TUNE_MAX_PARTITIONING_CANDIDATES);
+ config.tune_db_limit = astc::max(config.tune_db_limit, 0.0f);
+ config.tune_mse_overshoot = astc::max(config.tune_mse_overshoot, 1.0f);
+ config.tune_2_partition_early_out_limit_factor = astc::max(config.tune_2_partition_early_out_limit_factor, 0.0f);
+ config.tune_3_partition_early_out_limit_factor = astc::max(config.tune_3_partition_early_out_limit_factor, 0.0f);
+ config.tune_2_plane_early_out_limit_correlation = astc::max(config.tune_2_plane_early_out_limit_correlation, 0.0f);
+
+ // Specifying a zero weight color component is not allowed; force to small value
+ float max_weight = astc::max(astc::max(config.cw_r_weight, config.cw_g_weight),
+ astc::max(config.cw_b_weight, config.cw_a_weight));
+ if (max_weight > 0.0f)
+ {
+ max_weight /= 1000.0f;
+ config.cw_r_weight = astc::max(config.cw_r_weight, max_weight);
+ config.cw_g_weight = astc::max(config.cw_g_weight, max_weight);
+ config.cw_b_weight = astc::max(config.cw_b_weight, max_weight);
+ config.cw_a_weight = astc::max(config.cw_a_weight, max_weight);
+ }
+ // If all color components error weights are zero then return an error
+ else
+ {
+ return ASTCENC_ERR_BAD_PARAM;
+ }
+
+ return ASTCENC_SUCCESS;
+}
+
+/* See header for documentation. */
+astcenc_error astcenc_config_init(
+ astcenc_profile profile,
+ unsigned int block_x,
+ unsigned int block_y,
+ unsigned int block_z,
+ float quality,
+ unsigned int flags,
+ astcenc_config* configp
+) {
+ astcenc_error status;
+
+ // Check basic library compatibility options here so they are checked early. Note, these checks
+ // are repeated in context_alloc for cases where callers use a manually defined config struct
+ status = validate_cpu_isa();
+ if (status != ASTCENC_SUCCESS)
+ {
+ return status;
+ }
+
+ status = validate_cpu_float();
+ if (status != ASTCENC_SUCCESS)
+ {
+ return status;
+ }
+
+ // Zero init all config fields; although most of will be over written
+ astcenc_config& config = *configp;
+ std::memset(&config, 0, sizeof(config));
+
+ // Process the block size
+ block_z = astc::max(block_z, 1u); // For 2D blocks Z==0 is accepted, but convert to 1
+ status = validate_block_size(block_x, block_y, block_z);
+ if (status != ASTCENC_SUCCESS)
+ {
+ return status;
+ }
+
+ config.block_x = block_x;
+ config.block_y = block_y;
+ config.block_z = block_z;
+
+ float texels = static_cast<float>(block_x * block_y * block_z);
+ float ltexels = logf(texels) / logf(10.0f);
+
+ // Process the performance quality level or preset; note that this must be done before we
+ // process any additional settings, such as color profile and flags, which may replace some of
+ // these settings with more use case tuned values
+ if (quality < ASTCENC_PRE_FASTEST ||
+ quality > ASTCENC_PRE_EXHAUSTIVE)
+ {
+ return ASTCENC_ERR_BAD_QUALITY;
+ }
+
+ static const std::array<astcenc_preset_config, 6>* preset_configs;
+ int texels_int = block_x * block_y * block_z;
+ if (texels_int < 25)
+ {
+ preset_configs = &preset_configs_high;
+ }
+ else if (texels_int < 64)
+ {
+ preset_configs = &preset_configs_mid;
+ }
+ else
+ {
+ preset_configs = &preset_configs_low;
+ }
+
+ // Determine which preset to use, or which pair to interpolate
+ size_t start;
+ size_t end;
+ for (end = 0; end < preset_configs->size(); end++)
+ {
+ if ((*preset_configs)[end].quality >= quality)
+ {
+ break;
+ }
+ }
+
+ start = end == 0 ? 0 : end - 1;
+
+ // Start and end node are the same - so just transfer the values.
+ if (start == end)
+ {
+ config.tune_partition_count_limit = (*preset_configs)[start].tune_partition_count_limit;
+ config.tune_2partition_index_limit = (*preset_configs)[start].tune_2partition_index_limit;
+ config.tune_3partition_index_limit = (*preset_configs)[start].tune_3partition_index_limit;
+ config.tune_4partition_index_limit = (*preset_configs)[start].tune_4partition_index_limit;
+ config.tune_block_mode_limit = (*preset_configs)[start].tune_block_mode_limit;
+ config.tune_refinement_limit = (*preset_configs)[start].tune_refinement_limit;
+ config.tune_candidate_limit = astc::min((*preset_configs)[start].tune_candidate_limit, TUNE_MAX_TRIAL_CANDIDATES);
+ config.tune_2partitioning_candidate_limit = astc::min((*preset_configs)[start].tune_2partitioning_candidate_limit, TUNE_MAX_PARTITIONING_CANDIDATES);
+ config.tune_3partitioning_candidate_limit = astc::min((*preset_configs)[start].tune_3partitioning_candidate_limit, TUNE_MAX_PARTITIONING_CANDIDATES);
+ config.tune_4partitioning_candidate_limit = astc::min((*preset_configs)[start].tune_4partitioning_candidate_limit, TUNE_MAX_PARTITIONING_CANDIDATES);
+ config.tune_db_limit = astc::max((*preset_configs)[start].tune_db_limit_a_base - 35 * ltexels,
+ (*preset_configs)[start].tune_db_limit_b_base - 19 * ltexels);
+
+ config.tune_mse_overshoot = (*preset_configs)[start].tune_mse_overshoot;
+
+ config.tune_2_partition_early_out_limit_factor = (*preset_configs)[start].tune_2_partition_early_out_limit_factor;
+ config.tune_3_partition_early_out_limit_factor =(*preset_configs)[start].tune_3_partition_early_out_limit_factor;
+ config.tune_2_plane_early_out_limit_correlation = (*preset_configs)[start].tune_2_plane_early_out_limit_correlation;
+ }
+ // Start and end node are not the same - so interpolate between them
+ else
+ {
+ auto& node_a = (*preset_configs)[start];
+ auto& node_b = (*preset_configs)[end];
+
+ float wt_range = node_b.quality - node_a.quality;
+ assert(wt_range > 0);
+
+ // Compute interpolation factors
+ float wt_node_a = (node_b.quality - quality) / wt_range;
+ float wt_node_b = (quality - node_a.quality) / wt_range;
+
+ #define LERP(param) ((node_a.param * wt_node_a) + (node_b.param * wt_node_b))
+ #define LERPI(param) astc::flt2int_rtn(\
+ (static_cast<float>(node_a.param) * wt_node_a) + \
+ (static_cast<float>(node_b.param) * wt_node_b))
+ #define LERPUI(param) static_cast<unsigned int>(LERPI(param))
+
+ config.tune_partition_count_limit = LERPI(tune_partition_count_limit);
+ config.tune_2partition_index_limit = LERPI(tune_2partition_index_limit);
+ config.tune_3partition_index_limit = LERPI(tune_3partition_index_limit);
+ config.tune_4partition_index_limit = LERPI(tune_4partition_index_limit);
+ config.tune_block_mode_limit = LERPI(tune_block_mode_limit);
+ config.tune_refinement_limit = LERPI(tune_refinement_limit);
+ config.tune_candidate_limit = astc::min(LERPUI(tune_candidate_limit),
+ TUNE_MAX_TRIAL_CANDIDATES);
+ config.tune_2partitioning_candidate_limit = astc::min(LERPUI(tune_2partitioning_candidate_limit),
+ BLOCK_MAX_PARTITIONINGS);
+ config.tune_3partitioning_candidate_limit = astc::min(LERPUI(tune_3partitioning_candidate_limit),
+ BLOCK_MAX_PARTITIONINGS);
+ config.tune_4partitioning_candidate_limit = astc::min(LERPUI(tune_4partitioning_candidate_limit),
+ BLOCK_MAX_PARTITIONINGS);
+ config.tune_db_limit = astc::max(LERP(tune_db_limit_a_base) - 35 * ltexels,
+ LERP(tune_db_limit_b_base) - 19 * ltexels);
+
+ config.tune_mse_overshoot = LERP(tune_mse_overshoot);
+
+ config.tune_2_partition_early_out_limit_factor = LERP(tune_2_partition_early_out_limit_factor);
+ config.tune_3_partition_early_out_limit_factor = LERP(tune_3_partition_early_out_limit_factor);
+ config.tune_2_plane_early_out_limit_correlation = LERP(tune_2_plane_early_out_limit_correlation);
+ #undef LERP
+ #undef LERPI
+ #undef LERPUI
+ }
+
+ // Set heuristics to the defaults for each color profile
+ config.cw_r_weight = 1.0f;
+ config.cw_g_weight = 1.0f;
+ config.cw_b_weight = 1.0f;
+ config.cw_a_weight = 1.0f;
+
+ config.a_scale_radius = 0;
+
+ config.rgbm_m_scale = 0.0f;
+
+ config.profile = profile;
+
+ // Values in this enum are from an external user, so not guaranteed to be
+ // bounded to the enum values
+ switch (static_cast<int>(profile))
+ {
+ case ASTCENC_PRF_LDR:
+ case ASTCENC_PRF_LDR_SRGB:
+ break;
+ case ASTCENC_PRF_HDR_RGB_LDR_A:
+ case ASTCENC_PRF_HDR:
+ config.tune_db_limit = 999.0f;
+ break;
+ default:
+ return ASTCENC_ERR_BAD_PROFILE;
+ }
+
+ // Flags field must not contain any unknown flag bits
+ status = validate_flags(flags);
+ if (status != ASTCENC_SUCCESS)
+ {
+ return status;
+ }
+
+ if (flags & ASTCENC_FLG_MAP_NORMAL)
+ {
+ // Normal map encoding uses L+A blocks, so allow one more partitioning
+ // than normal. We need need fewer bits for endpoints, so more likely
+ // to be able to use more partitions than an RGB/RGBA block
+ config.tune_partition_count_limit = astc::min(config.tune_partition_count_limit + 1u, 4u);
+
+ config.cw_g_weight = 0.0f;
+ config.cw_b_weight = 0.0f;
+ config.tune_2_partition_early_out_limit_factor *= 1.5f;
+ config.tune_3_partition_early_out_limit_factor *= 1.5f;
+ config.tune_2_plane_early_out_limit_correlation = 0.99f;
+
+ // Normals are prone to blocking artifacts on smooth curves
+ // so force compressor to try harder here ...
+ config.tune_db_limit *= 1.03f;
+ }
+ else if (flags & ASTCENC_FLG_MAP_RGBM)
+ {
+ config.rgbm_m_scale = 5.0f;
+ config.cw_a_weight = 2.0f * config.rgbm_m_scale;
+ }
+ else // (This is color data)
+ {
+ // This is a very basic perceptual metric for RGB color data, which weights error
+ // significance by the perceptual luminance contribution of each color channel. For
+ // luminance the usual weights to compute luminance from a linear RGB value are as
+ // follows:
+ //
+ // l = r * 0.3 + g * 0.59 + b * 0.11
+ //
+ // ... but we scale these up to keep a better balance between color and alpha. Note
+ // that if the content is using alpha we'd recommend using the -a option to weight
+ // the color contribution by the alpha transparency.
+ if (flags & ASTCENC_FLG_USE_PERCEPTUAL)
+ {
+ config.cw_r_weight = 0.30f * 2.25f;
+ config.cw_g_weight = 0.59f * 2.25f;
+ config.cw_b_weight = 0.11f * 2.25f;
+ }
+ }
+ config.flags = flags;
+
+ return ASTCENC_SUCCESS;
+}
+
+/* See header for documentation. */
+astcenc_error astcenc_context_alloc(
+ const astcenc_config* configp,
+ unsigned int thread_count,
+ astcenc_context** context
+) {
+ astcenc_error status;
+ const astcenc_config& config = *configp;
+
+ status = validate_cpu_isa();
+ if (status != ASTCENC_SUCCESS)
+ {
+ return status;
+ }
+
+ status = validate_cpu_float();
+ if (status != ASTCENC_SUCCESS)
+ {
+ return status;
+ }
+
+ if (thread_count == 0)
+ {
+ return ASTCENC_ERR_BAD_PARAM;
+ }
+
+#if defined(ASTCENC_DIAGNOSTICS)
+ // Force single threaded compressor use in diagnostic mode.
+ if (thread_count != 1)
+ {
+ return ASTCENC_ERR_BAD_PARAM;
+ }
+#endif
+
+ astcenc_context* ctxo = new astcenc_context;
+ astcenc_contexti* ctx = &ctxo->context;
+ ctx->thread_count = thread_count;
+ ctx->config = config;
+ ctx->working_buffers = nullptr;
+
+ // These are allocated per-compress, as they depend on image size
+ ctx->input_alpha_averages = nullptr;
+
+ // Copy the config first and validate the copy (we may modify it)
+ status = validate_config(ctx->config);
+ if (status != ASTCENC_SUCCESS)
+ {
+ delete ctxo;
+ return status;
+ }
+
+ ctx->bsd = aligned_malloc<block_size_descriptor>(sizeof(block_size_descriptor), ASTCENC_VECALIGN);
+ bool can_omit_modes = static_cast<bool>(config.flags & ASTCENC_FLG_SELF_DECOMPRESS_ONLY);
+ init_block_size_descriptor(config.block_x, config.block_y, config.block_z,
+ can_omit_modes,
+ config.tune_partition_count_limit,
+ static_cast<float>(config.tune_block_mode_limit) / 100.0f,
+ *ctx->bsd);
+
+#if !defined(ASTCENC_DECOMPRESS_ONLY)
+ // Do setup only needed by compression
+ if (!(status & ASTCENC_FLG_DECOMPRESS_ONLY))
+ {
+ // Turn a dB limit into a per-texel error for faster use later
+ if ((ctx->config.profile == ASTCENC_PRF_LDR) || (ctx->config.profile == ASTCENC_PRF_LDR_SRGB))
+ {
+ ctx->config.tune_db_limit = astc::pow(0.1f, ctx->config.tune_db_limit * 0.1f) * 65535.0f * 65535.0f;
+ }
+ else
+ {
+ ctx->config.tune_db_limit = 0.0f;
+ }
+
+ size_t worksize = sizeof(compression_working_buffers) * thread_count;
+ ctx->working_buffers = aligned_malloc<compression_working_buffers>(worksize, ASTCENC_VECALIGN);
+ static_assert((sizeof(compression_working_buffers) % ASTCENC_VECALIGN) == 0,
+ "compression_working_buffers size must be multiple of vector alignment");
+ if (!ctx->working_buffers)
+ {
+ aligned_free<block_size_descriptor>(ctx->bsd);
+ delete ctxo;
+ *context = nullptr;
+ return ASTCENC_ERR_OUT_OF_MEM;
+ }
+ }
+#endif
+
+#if defined(ASTCENC_DIAGNOSTICS)
+ ctx->trace_log = new TraceLog(ctx->config.trace_file_path);
+ if (!ctx->trace_log->m_file)
+ {
+ return ASTCENC_ERR_DTRACE_FAILURE;
+ }
+
+ trace_add_data("block_x", config.block_x);
+ trace_add_data("block_y", config.block_y);
+ trace_add_data("block_z", config.block_z);
+#endif
+
+ *context = ctxo;
+
+#if !defined(ASTCENC_DECOMPRESS_ONLY)
+ prepare_angular_tables();
+#endif
+
+ return ASTCENC_SUCCESS;
+}
+
+/* See header dor documentation. */
+void astcenc_context_free(
+ astcenc_context* ctxo
+) {
+ if (ctxo)
+ {
+ astcenc_contexti* ctx = &ctxo->context;
+ aligned_free<compression_working_buffers>(ctx->working_buffers);
+ aligned_free<block_size_descriptor>(ctx->bsd);
+#if defined(ASTCENC_DIAGNOSTICS)
+ delete ctx->trace_log;
+#endif
+ delete ctxo;
+ }
+}
+
+#if !defined(ASTCENC_DECOMPRESS_ONLY)
+
+/**
+ * @brief Compress an image, after any preflight has completed.
+ *
+ * @param[out] ctxo The compressor context.
+ * @param thread_index The thread index.
+ * @param image The intput image.
+ * @param swizzle The input swizzle.
+ * @param[out] buffer The output array for the compressed data.
+ */
+static void compress_image(
+ astcenc_context& ctxo,
+ unsigned int thread_index,
+ const astcenc_image& image,
+ const astcenc_swizzle& swizzle,
+ uint8_t* buffer
+) {
+ astcenc_contexti& ctx = ctxo.context;
+ const block_size_descriptor& bsd = *ctx.bsd;
+ astcenc_profile decode_mode = ctx.config.profile;
+
+ image_block blk;
+
+ int block_x = bsd.xdim;
+ int block_y = bsd.ydim;
+ int block_z = bsd.zdim;
+ blk.texel_count = static_cast<uint8_t>(block_x * block_y * block_z);
+
+ int dim_x = image.dim_x;
+ int dim_y = image.dim_y;
+ int dim_z = image.dim_z;
+
+ int xblocks = (dim_x + block_x - 1) / block_x;
+ int yblocks = (dim_y + block_y - 1) / block_y;
+ int zblocks = (dim_z + block_z - 1) / block_z;
+ int block_count = zblocks * yblocks * xblocks;
+
+ int row_blocks = xblocks;
+ int plane_blocks = xblocks * yblocks;
+
+ // Populate the block channel weights
+ blk.channel_weight = vfloat4(ctx.config.cw_r_weight,
+ ctx.config.cw_g_weight,
+ ctx.config.cw_b_weight,
+ ctx.config.cw_a_weight);
+
+ // Use preallocated scratch buffer
+ auto& temp_buffers = ctx.working_buffers[thread_index];
+
+ // Only the first thread actually runs the initializer
+ ctxo.manage_compress.init(block_count);
+
+ // Determine if we can use an optimized load function
+ bool needs_swz = (swizzle.r != ASTCENC_SWZ_R) || (swizzle.g != ASTCENC_SWZ_G) ||
+ (swizzle.b != ASTCENC_SWZ_B) || (swizzle.a != ASTCENC_SWZ_A);
+
+ bool needs_hdr = (decode_mode == ASTCENC_PRF_HDR) ||
+ (decode_mode == ASTCENC_PRF_HDR_RGB_LDR_A);
+
+ bool use_fast_load = !needs_swz && !needs_hdr &&
+ block_z == 1 && image.data_type == ASTCENC_TYPE_U8;
+
+ auto load_func = load_image_block;
+ if (use_fast_load)
+ {
+ load_func = load_image_block_fast_ldr;
+ }
+
+ // All threads run this processing loop until there is no work remaining
+ while (true)
+ {
+ unsigned int count;
+ unsigned int base = ctxo.manage_compress.get_task_assignment(16, count);
+ if (!count)
+ {
+ break;
+ }
+
+ for (unsigned int i = base; i < base + count; i++)
+ {
+ // Decode i into x, y, z block indices
+ int z = i / plane_blocks;
+ unsigned int rem = i - (z * plane_blocks);
+ int y = rem / row_blocks;
+ int x = rem - (y * row_blocks);
+
+ // Test if we can apply some basic alpha-scale RDO
+ bool use_full_block = true;
+ if (ctx.config.a_scale_radius != 0 && block_z == 1)
+ {
+ int start_x = x * block_x;
+ int end_x = astc::min(dim_x, start_x + block_x);
+
+ int start_y = y * block_y;
+ int end_y = astc::min(dim_y, start_y + block_y);
+
+ // SATs accumulate error, so don't test exactly zero. Test for
+ // less than 1 alpha in the expanded block footprint that
+ // includes the alpha radius.
+ int x_footprint = block_x + 2 * (ctx.config.a_scale_radius - 1);
+
+ int y_footprint = block_y + 2 * (ctx.config.a_scale_radius - 1);
+
+ float footprint = static_cast<float>(x_footprint * y_footprint);
+ float threshold = 0.9f / (255.0f * footprint);
+
+ // Do we have any alpha values?
+ use_full_block = false;
+ for (int ay = start_y; ay < end_y; ay++)
+ {
+ for (int ax = start_x; ax < end_x; ax++)
+ {
+ float a_avg = ctx.input_alpha_averages[ay * dim_x + ax];
+ if (a_avg > threshold)
+ {
+ use_full_block = true;
+ ax = end_x;
+ ay = end_y;
+ }
+ }
+ }
+ }
+
+ // Fetch the full block for compression
+ if (use_full_block)
+ {
+ load_func(decode_mode, image, blk, bsd, x * block_x, y * block_y, z * block_z, swizzle);
+
+ // Scale RGB error contribution by the maximum alpha in the block
+ // This encourages preserving alpha accuracy in regions with high
+ // transparency, and can buy up to 0.5 dB PSNR.
+ if (ctx.config.flags & ASTCENC_FLG_USE_ALPHA_WEIGHT)
+ {
+ float alpha_scale = blk.data_max.lane<3>() * (1.0f / 65535.0f);
+ blk.channel_weight = vfloat4(ctx.config.cw_r_weight * alpha_scale,
+ ctx.config.cw_g_weight * alpha_scale,
+ ctx.config.cw_b_weight * alpha_scale,
+ ctx.config.cw_a_weight);
+ }
+ }
+ // Apply alpha scale RDO - substitute constant color block
+ else
+ {
+ blk.origin_texel = vfloat4::zero();
+ blk.data_min = vfloat4::zero();
+ blk.data_mean = vfloat4::zero();
+ blk.data_max = vfloat4::zero();
+ blk.grayscale = true;
+ }
+
+ int offset = ((z * yblocks + y) * xblocks + x) * 16;
+ uint8_t *bp = buffer + offset;
+ physical_compressed_block* pcb = reinterpret_cast<physical_compressed_block*>(bp);
+ compress_block(ctx, blk, *pcb, temp_buffers);
+ }
+
+ ctxo.manage_compress.complete_task_assignment(count);
+ }
+}
+
+/**
+ * @brief Compute regional averages in an image.
+ *
+ * This function can be called by multiple threads, but only after a single
+ * thread calls the setup function @c init_compute_averages().
+ *
+ * Results are written back into @c img->input_alpha_averages.
+ *
+ * @param[out] ctx The context.
+ * @param ag The average and variance arguments created during setup.
+ */
+static void compute_averages(
+ astcenc_context& ctx,
+ const avg_args &ag
+) {
+ pixel_region_args arg = ag.arg;
+ arg.work_memory = new vfloat4[ag.work_memory_size];
+
+ int size_x = ag.img_size_x;
+ int size_y = ag.img_size_y;
+ int size_z = ag.img_size_z;
+
+ int step_xy = ag.blk_size_xy;
+ int step_z = ag.blk_size_z;
+
+ int y_tasks = (size_y + step_xy - 1) / step_xy;
+
+ // All threads run this processing loop until there is no work remaining
+ while (true)
+ {
+ unsigned int count;
+ unsigned int base = ctx.manage_avg.get_task_assignment(16, count);
+ if (!count)
+ {
+ break;
+ }
+
+ for (unsigned int i = base; i < base + count; i++)
+ {
+ int z = (i / (y_tasks)) * step_z;
+ int y = (i - (z * y_tasks)) * step_xy;
+
+ arg.size_z = astc::min(step_z, size_z - z);
+ arg.offset_z = z;
+
+ arg.size_y = astc::min(step_xy, size_y - y);
+ arg.offset_y = y;
+
+ for (int x = 0; x < size_x; x += step_xy)
+ {
+ arg.size_x = astc::min(step_xy, size_x - x);
+ arg.offset_x = x;
+ compute_pixel_region_variance(ctx.context, arg);
+ }
+ }
+
+ ctx.manage_avg.complete_task_assignment(count);
+ }
+
+ delete[] arg.work_memory;
+}
+
+#endif
+
+/* See header for documentation. */
+astcenc_error astcenc_compress_image(
+ astcenc_context* ctxo,
+ astcenc_image* imagep,
+ const astcenc_swizzle* swizzle,
+ uint8_t* data_out,
+ size_t data_len,
+ unsigned int thread_index
+) {
+#if defined(ASTCENC_DECOMPRESS_ONLY)
+ (void)ctxo;
+ (void)imagep;
+ (void)swizzle;
+ (void)data_out;
+ (void)data_len;
+ (void)thread_index;
+ return ASTCENC_ERR_BAD_CONTEXT;
+#else
+ astcenc_contexti* ctx = &ctxo->context;
+ astcenc_error status;
+ astcenc_image& image = *imagep;
+
+ if (ctx->config.flags & ASTCENC_FLG_DECOMPRESS_ONLY)
+ {
+ return ASTCENC_ERR_BAD_CONTEXT;
+ }
+
+ status = validate_compression_swizzle(*swizzle);
+ if (status != ASTCENC_SUCCESS)
+ {
+ return status;
+ }
+
+ if (thread_index >= ctx->thread_count)
+ {
+ return ASTCENC_ERR_BAD_PARAM;
+ }
+
+ unsigned int block_x = ctx->config.block_x;
+ unsigned int block_y = ctx->config.block_y;
+ unsigned int block_z = ctx->config.block_z;
+
+ unsigned int xblocks = (image.dim_x + block_x - 1) / block_x;
+ unsigned int yblocks = (image.dim_y + block_y - 1) / block_y;
+ unsigned int zblocks = (image.dim_z + block_z - 1) / block_z;
+
+ // Check we have enough output space (16 bytes per block)
+ size_t size_needed = xblocks * yblocks * zblocks * 16;
+ if (data_len < size_needed)
+ {
+ return ASTCENC_ERR_OUT_OF_MEM;
+ }
+
+ // If context thread count is one then implicitly reset
+ if (ctx->thread_count == 1)
+ {
+ astcenc_compress_reset(ctxo);
+ }
+
+ if (ctx->config.a_scale_radius != 0)
+ {
+ // First thread to enter will do setup, other threads will subsequently
+ // enter the critical section but simply skip over the initialization
+ auto init_avg = [ctx, &image, swizzle]() {
+ // Perform memory allocations for the destination buffers
+ size_t texel_count = image.dim_x * image.dim_y * image.dim_z;
+ ctx->input_alpha_averages = new float[texel_count];
+
+ return init_compute_averages(
+ image, ctx->config.a_scale_radius, *swizzle,
+ ctx->avg_preprocess_args);
+ };
+
+ // Only the first thread actually runs the initializer
+ ctxo->manage_avg.init(init_avg);
+
+ // All threads will enter this function and dynamically grab work
+ compute_averages(*ctxo, ctx->avg_preprocess_args);
+ }
+
+ // Wait for compute_averages to complete before compressing
+ ctxo->manage_avg.wait();
+
+ compress_image(*ctxo, thread_index, image, *swizzle, data_out);
+
+ // Wait for compress to complete before freeing memory
+ ctxo->manage_compress.wait();
+
+ auto term_compress = [ctx]() {
+ delete[] ctx->input_alpha_averages;
+ ctx->input_alpha_averages = nullptr;
+ };
+
+ // Only the first thread to arrive actually runs the term
+ ctxo->manage_compress.term(term_compress);
+
+ return ASTCENC_SUCCESS;
+#endif
+}
+
+/* See header for documentation. */
+astcenc_error astcenc_compress_reset(
+ astcenc_context* ctxo
+) {
+#if defined(ASTCENC_DECOMPRESS_ONLY)
+ (void)ctxo;
+ return ASTCENC_ERR_BAD_CONTEXT;
+#else
+ astcenc_contexti* ctx = &ctxo->context;
+ if (ctx->config.flags & ASTCENC_FLG_DECOMPRESS_ONLY)
+ {
+ return ASTCENC_ERR_BAD_CONTEXT;
+ }
+
+ ctxo->manage_avg.reset();
+ ctxo->manage_compress.reset();
+ return ASTCENC_SUCCESS;
+#endif
+}
+
+/* See header for documentation. */
+astcenc_error astcenc_decompress_image(
+ astcenc_context* ctxo,
+ const uint8_t* data,
+ size_t data_len,
+ astcenc_image* image_outp,
+ const astcenc_swizzle* swizzle,
+ unsigned int thread_index
+) {
+ astcenc_error status;
+ astcenc_image& image_out = *image_outp;
+ astcenc_contexti* ctx = &ctxo->context;
+
+ // Today this doesn't matter (working set on stack) but might in future ...
+ if (thread_index >= ctx->thread_count)
+ {
+ return ASTCENC_ERR_BAD_PARAM;
+ }
+
+ status = validate_decompression_swizzle(*swizzle);
+ if (status != ASTCENC_SUCCESS)
+ {
+ return status;
+ }
+
+ unsigned int block_x = ctx->config.block_x;
+ unsigned int block_y = ctx->config.block_y;
+ unsigned int block_z = ctx->config.block_z;
+
+ unsigned int xblocks = (image_out.dim_x + block_x - 1) / block_x;
+ unsigned int yblocks = (image_out.dim_y + block_y - 1) / block_y;
+ unsigned int zblocks = (image_out.dim_z + block_z - 1) / block_z;
+
+ int row_blocks = xblocks;
+ int plane_blocks = xblocks * yblocks;
+
+ // Check we have enough output space (16 bytes per block)
+ size_t size_needed = xblocks * yblocks * zblocks * 16;
+ if (data_len < size_needed)
+ {
+ return ASTCENC_ERR_OUT_OF_MEM;
+ }
+
+ image_block blk;
+ blk.texel_count = static_cast<uint8_t>(block_x * block_y * block_z);
+
+ // If context thread count is one then implicitly reset
+ if (ctx->thread_count == 1)
+ {
+ astcenc_decompress_reset(ctxo);
+ }
+
+ // Only the first thread actually runs the initializer
+ ctxo->manage_decompress.init(zblocks * yblocks * xblocks);
+
+ // All threads run this processing loop until there is no work remaining
+ while (true)
+ {
+ unsigned int count;
+ unsigned int base = ctxo->manage_decompress.get_task_assignment(128, count);
+ if (!count)
+ {
+ break;
+ }
+
+ for (unsigned int i = base; i < base + count; i++)
+ {
+ // Decode i into x, y, z block indices
+ int z = i / plane_blocks;
+ unsigned int rem = i - (z * plane_blocks);
+ int y = rem / row_blocks;
+ int x = rem - (y * row_blocks);
+
+ unsigned int offset = (((z * yblocks + y) * xblocks) + x) * 16;
+ const uint8_t* bp = data + offset;
+
+ const physical_compressed_block& pcb = *reinterpret_cast<const physical_compressed_block*>(bp);
+ symbolic_compressed_block scb;
+
+ physical_to_symbolic(*ctx->bsd, pcb, scb);
+
+ decompress_symbolic_block(ctx->config.profile, *ctx->bsd,
+ x * block_x, y * block_y, z * block_z,
+ scb, blk);
+
+ store_image_block(image_out, blk, *ctx->bsd,
+ x * block_x, y * block_y, z * block_z, *swizzle);
+ }
+
+ ctxo->manage_decompress.complete_task_assignment(count);
+ }
+
+ return ASTCENC_SUCCESS;
+}
+
+/* See header for documentation. */
+astcenc_error astcenc_decompress_reset(
+ astcenc_context* ctxo
+) {
+ ctxo->manage_decompress.reset();
+ return ASTCENC_SUCCESS;
+}
+
+/* See header for documentation. */
+astcenc_error astcenc_get_block_info(
+ astcenc_context* ctxo,
+ const uint8_t data[16],
+ astcenc_block_info* info
+) {
+#if defined(ASTCENC_DECOMPRESS_ONLY)
+ (void)ctxo;
+ (void)data;
+ (void)info;
+ return ASTCENC_ERR_BAD_CONTEXT;
+#else
+ astcenc_contexti* ctx = &ctxo->context;
+
+ // Decode the compressed data into a symbolic form
+ const physical_compressed_block&pcb = *reinterpret_cast<const physical_compressed_block*>(data);
+ symbolic_compressed_block scb;
+ physical_to_symbolic(*ctx->bsd, pcb, scb);
+
+ // Fetch the appropriate partition and decimation tables
+ block_size_descriptor& bsd = *ctx->bsd;
+
+ // Start from a clean slate
+ memset(info, 0, sizeof(*info));
+
+ // Basic info we can always populate
+ info->profile = ctx->config.profile;
+
+ info->block_x = ctx->config.block_x;
+ info->block_y = ctx->config.block_y;
+ info->block_z = ctx->config.block_z;
+ info->texel_count = bsd.texel_count;
+
+ // Check for error blocks first
+ info->is_error_block = scb.block_type == SYM_BTYPE_ERROR;
+ if (info->is_error_block)
+ {
+ return ASTCENC_SUCCESS;
+ }
+
+ // Check for constant color blocks second
+ info->is_constant_block = scb.block_type == SYM_BTYPE_CONST_F16 ||
+ scb.block_type == SYM_BTYPE_CONST_U16;
+ if (info->is_constant_block)
+ {
+ return ASTCENC_SUCCESS;
+ }
+
+ // Otherwise handle a full block ; known to be valid after conditions above have been checked
+ int partition_count = scb.partition_count;
+ const auto& pi = bsd.get_partition_info(partition_count, scb.partition_index);
+
+ const block_mode& bm = bsd.get_block_mode(scb.block_mode);
+ const decimation_info& di = bsd.get_decimation_info(bm.decimation_mode);
+
+ info->weight_x = di.weight_x;
+ info->weight_y = di.weight_y;
+ info->weight_z = di.weight_z;
+
+ info->is_dual_plane_block = bm.is_dual_plane != 0;
+
+ info->partition_count = scb.partition_count;
+ info->partition_index = scb.partition_index;
+ info->dual_plane_component = scb.plane2_component;
+
+ info->color_level_count = get_quant_level(scb.get_color_quant_mode());
+ info->weight_level_count = get_quant_level(bm.get_weight_quant_mode());
+
+ // Unpack color endpoints for each active partition
+ for (unsigned int i = 0; i < scb.partition_count; i++)
+ {
+ bool rgb_hdr;
+ bool a_hdr;
+ vint4 endpnt[2];
+
+ unpack_color_endpoints(ctx->config.profile,
+ scb.color_formats[i],
+ scb.color_values[i],
+ rgb_hdr, a_hdr,
+ endpnt[0], endpnt[1]);
+
+ // Store the color endpoint mode info
+ info->color_endpoint_modes[i] = scb.color_formats[i];
+ info->is_hdr_block = info->is_hdr_block || rgb_hdr || a_hdr;
+
+ // Store the unpacked and decoded color endpoint
+ vmask4 hdr_mask(rgb_hdr, rgb_hdr, rgb_hdr, a_hdr);
+ for (int j = 0; j < 2; j++)
+ {
+ vint4 color_lns = lns_to_sf16(endpnt[j]);
+ vint4 color_unorm = unorm16_to_sf16(endpnt[j]);
+ vint4 datai = select(color_unorm, color_lns, hdr_mask);
+ store(float16_to_float(datai), info->color_endpoints[i][j]);
+ }
+ }
+
+ // Unpack weights for each texel
+ int weight_plane1[BLOCK_MAX_TEXELS];
+ int weight_plane2[BLOCK_MAX_TEXELS];
+
+ unpack_weights(bsd, scb, di, bm.is_dual_plane, weight_plane1, weight_plane2);
+ for (unsigned int i = 0; i < bsd.texel_count; i++)
+ {
+ info->weight_values_plane1[i] = static_cast<float>(weight_plane1[i]) * (1.0f / WEIGHTS_TEXEL_SUM);
+ if (info->is_dual_plane_block)
+ {
+ info->weight_values_plane2[i] = static_cast<float>(weight_plane2[i]) * (1.0f / WEIGHTS_TEXEL_SUM);
+ }
+ }
+
+ // Unpack partition assignments for each texel
+ for (unsigned int i = 0; i < bsd.texel_count; i++)
+ {
+ info->partition_assignment[i] = pi.partition_of_texel[i];
+ }
+
+ return ASTCENC_SUCCESS;
+#endif
+}
+
+/* See header for documentation. */
+const char* astcenc_get_error_string(
+ astcenc_error status
+) {
+ // Values in this enum are from an external user, so not guaranteed to be
+ // bounded to the enum values
+ switch (static_cast<int>(status))
+ {
+ case ASTCENC_SUCCESS:
+ return "ASTCENC_SUCCESS";
+ case ASTCENC_ERR_OUT_OF_MEM:
+ return "ASTCENC_ERR_OUT_OF_MEM";
+ case ASTCENC_ERR_BAD_CPU_FLOAT:
+ return "ASTCENC_ERR_BAD_CPU_FLOAT";
+ case ASTCENC_ERR_BAD_CPU_ISA:
+ return "ASTCENC_ERR_BAD_CPU_ISA";
+ case ASTCENC_ERR_BAD_PARAM:
+ return "ASTCENC_ERR_BAD_PARAM";
+ case ASTCENC_ERR_BAD_BLOCK_SIZE:
+ return "ASTCENC_ERR_BAD_BLOCK_SIZE";
+ case ASTCENC_ERR_BAD_PROFILE:
+ return "ASTCENC_ERR_BAD_PROFILE";
+ case ASTCENC_ERR_BAD_QUALITY:
+ return "ASTCENC_ERR_BAD_QUALITY";
+ case ASTCENC_ERR_BAD_FLAGS:
+ return "ASTCENC_ERR_BAD_FLAGS";
+ case ASTCENC_ERR_BAD_SWIZZLE:
+ return "ASTCENC_ERR_BAD_SWIZZLE";
+ case ASTCENC_ERR_BAD_CONTEXT:
+ return "ASTCENC_ERR_BAD_CONTEXT";
+ case ASTCENC_ERR_NOT_IMPLEMENTED:
+ return "ASTCENC_ERR_NOT_IMPLEMENTED";
+#if defined(ASTCENC_DIAGNOSTICS)
+ case ASTCENC_ERR_DTRACE_FAILURE:
+ return "ASTCENC_ERR_DTRACE_FAILURE";
+#endif
+ default:
+ return nullptr;
+ }
+}
diff --git a/thirdparty/astcenc/astcenc_find_best_partitioning.cpp b/thirdparty/astcenc/astcenc_find_best_partitioning.cpp
new file mode 100644
index 0000000000..ffde3c7060
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_find_best_partitioning.cpp
@@ -0,0 +1,780 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2023 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+#if !defined(ASTCENC_DECOMPRESS_ONLY)
+
+/**
+ * @brief Functions for finding best partition for a block.
+ *
+ * The partition search operates in two stages. The first pass uses kmeans clustering to group
+ * texels into an ideal partitioning for the requested partition count, and then compares that
+ * against the 1024 partitionings generated by the ASTC partition hash function. The generated
+ * partitions are then ranked by the number of texels in the wrong partition, compared to the ideal
+ * clustering. All 1024 partitions are tested for similarity and ranked, apart from duplicates and
+ * partitionings that actually generate fewer than the requested partition count, but only the top
+ * N candidates are actually put through a more detailed search. N is determined by the compressor
+ * quality preset.
+ *
+ * For the detailed search, each candidate is checked against two possible encoding methods:
+ *
+ * - The best partitioning assuming different chroma colors (RGB + RGB or RGB + delta endpoints).
+ * - The best partitioning assuming same chroma colors (RGB + scale endpoints).
+ *
+ * This is implemented by computing the compute mean color and dominant direction for each
+ * partition. This defines two lines, both of which go through the mean color value.
+ *
+ * - One line has a direction defined by the dominant direction; this is used to assess the error
+ * from using an uncorrelated color representation.
+ * - The other line goes through (0,0,0,1) and is used to assess the error from using a same chroma
+ * (RGB + scale) color representation.
+ *
+ * The best candidate is selected by computing the squared-errors that result from using these
+ * lines for endpoint selection.
+ */
+
+#include <limits>
+#include "astcenc_internal.h"
+
+/**
+ * @brief Pick some initial kmeans cluster centers.
+ *
+ * @param blk The image block color data to compress.
+ * @param texel_count The number of texels in the block.
+ * @param partition_count The number of partitions in the block.
+ * @param[out] cluster_centers The initial partition cluster center colors.
+ */
+static void kmeans_init(
+ const image_block& blk,
+ unsigned int texel_count,
+ unsigned int partition_count,
+ vfloat4 cluster_centers[BLOCK_MAX_PARTITIONS]
+) {
+ promise(texel_count > 0);
+ promise(partition_count > 0);
+
+ unsigned int clusters_selected = 0;
+ float distances[BLOCK_MAX_TEXELS];
+
+ // Pick a random sample as first cluster center; 145897 from random.org
+ unsigned int sample = 145897 % texel_count;
+ vfloat4 center_color = blk.texel(sample);
+ cluster_centers[clusters_selected] = center_color;
+ clusters_selected++;
+
+ // Compute the distance to the first cluster center
+ float distance_sum = 0.0f;
+ for (unsigned int i = 0; i < texel_count; i++)
+ {
+ vfloat4 color = blk.texel(i);
+ vfloat4 diff = color - center_color;
+ float distance = dot_s(diff * diff, blk.channel_weight);
+ distance_sum += distance;
+ distances[i] = distance;
+ }
+
+ // More numbers from random.org for weighted-random center selection
+ const float cluster_cutoffs[9] {
+ 0.626220f, 0.932770f, 0.275454f,
+ 0.318558f, 0.240113f, 0.009190f,
+ 0.347661f, 0.731960f, 0.156391f
+ };
+
+ unsigned int cutoff = (clusters_selected - 1) + 3 * (partition_count - 2);
+
+ // Pick the remaining samples as needed
+ while (true)
+ {
+ // Pick the next center in a weighted-random fashion.
+ float summa = 0.0f;
+ float distance_cutoff = distance_sum * cluster_cutoffs[cutoff++];
+ for (sample = 0; sample < texel_count; sample++)
+ {
+ summa += distances[sample];
+ if (summa >= distance_cutoff)
+ {
+ break;
+ }
+ }
+
+ // Clamp to a valid range and store the selected cluster center
+ sample = astc::min(sample, texel_count - 1);
+
+ center_color = blk.texel(sample);
+ cluster_centers[clusters_selected++] = center_color;
+ if (clusters_selected >= partition_count)
+ {
+ break;
+ }
+
+ // Compute the distance to the new cluster center, keep the min dist
+ distance_sum = 0.0f;
+ for (unsigned int i = 0; i < texel_count; i++)
+ {
+ vfloat4 color = blk.texel(i);
+ vfloat4 diff = color - center_color;
+ float distance = dot_s(diff * diff, blk.channel_weight);
+ distance = astc::min(distance, distances[i]);
+ distance_sum += distance;
+ distances[i] = distance;
+ }
+ }
+}
+
+/**
+ * @brief Assign texels to clusters, based on a set of chosen center points.
+ *
+ * @param blk The image block color data to compress.
+ * @param texel_count The number of texels in the block.
+ * @param partition_count The number of partitions in the block.
+ * @param cluster_centers The partition cluster center colors.
+ * @param[out] partition_of_texel The partition assigned for each texel.
+ */
+static void kmeans_assign(
+ const image_block& blk,
+ unsigned int texel_count,
+ unsigned int partition_count,
+ const vfloat4 cluster_centers[BLOCK_MAX_PARTITIONS],
+ uint8_t partition_of_texel[BLOCK_MAX_TEXELS]
+) {
+ promise(texel_count > 0);
+ promise(partition_count > 0);
+
+ uint8_t partition_texel_count[BLOCK_MAX_PARTITIONS] { 0 };
+
+ // Find the best partition for every texel
+ for (unsigned int i = 0; i < texel_count; i++)
+ {
+ float best_distance = std::numeric_limits<float>::max();
+ unsigned int best_partition = 0;
+
+ vfloat4 color = blk.texel(i);
+ for (unsigned int j = 0; j < partition_count; j++)
+ {
+ vfloat4 diff = color - cluster_centers[j];
+ float distance = dot_s(diff * diff, blk.channel_weight);
+ if (distance < best_distance)
+ {
+ best_distance = distance;
+ best_partition = j;
+ }
+ }
+
+ partition_of_texel[i] = static_cast<uint8_t>(best_partition);
+ partition_texel_count[best_partition]++;
+ }
+
+ // It is possible to get a situation where a partition ends up without any texels. In this case,
+ // assign texel N to partition N. This is silly, but ensures that every partition retains at
+ // least one texel. Reassigning a texel in this manner may cause another partition to go empty,
+ // so if we actually did a reassignment, run the whole loop over again.
+ bool problem_case;
+ do
+ {
+ problem_case = false;
+ for (unsigned int i = 0; i < partition_count; i++)
+ {
+ if (partition_texel_count[i] == 0)
+ {
+ partition_texel_count[partition_of_texel[i]]--;
+ partition_texel_count[i]++;
+ partition_of_texel[i] = static_cast<uint8_t>(i);
+ problem_case = true;
+ }
+ }
+ } while (problem_case);
+}
+
+/**
+ * @brief Compute new cluster centers based on their center of gravity.
+ *
+ * @param blk The image block color data to compress.
+ * @param texel_count The number of texels in the block.
+ * @param partition_count The number of partitions in the block.
+ * @param[out] cluster_centers The new cluster center colors.
+ * @param partition_of_texel The partition assigned for each texel.
+ */
+static void kmeans_update(
+ const image_block& blk,
+ unsigned int texel_count,
+ unsigned int partition_count,
+ vfloat4 cluster_centers[BLOCK_MAX_PARTITIONS],
+ const uint8_t partition_of_texel[BLOCK_MAX_TEXELS]
+) {
+ promise(texel_count > 0);
+ promise(partition_count > 0);
+
+ vfloat4 color_sum[BLOCK_MAX_PARTITIONS] {
+ vfloat4::zero(),
+ vfloat4::zero(),
+ vfloat4::zero(),
+ vfloat4::zero()
+ };
+
+ uint8_t partition_texel_count[BLOCK_MAX_PARTITIONS] { 0 };
+
+ // Find the center-of-gravity in each cluster
+ for (unsigned int i = 0; i < texel_count; i++)
+ {
+ uint8_t partition = partition_of_texel[i];
+ color_sum[partition] += blk.texel(i);
+ partition_texel_count[partition]++;
+ }
+
+ // Set the center of gravity to be the new cluster center
+ for (unsigned int i = 0; i < partition_count; i++)
+ {
+ float scale = 1.0f / static_cast<float>(partition_texel_count[i]);
+ cluster_centers[i] = color_sum[i] * scale;
+ }
+}
+
+/**
+ * @brief Compute bit-mismatch for partitioning in 2-partition mode.
+ *
+ * @param a The texel assignment bitvector for the block.
+ * @param b The texel assignment bitvector for the partition table.
+ *
+ * @return The number of bit mismatches.
+ */
+static inline unsigned int partition_mismatch2(
+ const uint64_t a[2],
+ const uint64_t b[2]
+) {
+ int v1 = popcount(a[0] ^ b[0]) + popcount(a[1] ^ b[1]);
+ int v2 = popcount(a[0] ^ b[1]) + popcount(a[1] ^ b[0]);
+ return astc::min(v1, v2);
+}
+
+/**
+ * @brief Compute bit-mismatch for partitioning in 3-partition mode.
+ *
+ * @param a The texel assignment bitvector for the block.
+ * @param b The texel assignment bitvector for the partition table.
+ *
+ * @return The number of bit mismatches.
+ */
+static inline unsigned int partition_mismatch3(
+ const uint64_t a[3],
+ const uint64_t b[3]
+) {
+ int p00 = popcount(a[0] ^ b[0]);
+ int p01 = popcount(a[0] ^ b[1]);
+ int p02 = popcount(a[0] ^ b[2]);
+
+ int p10 = popcount(a[1] ^ b[0]);
+ int p11 = popcount(a[1] ^ b[1]);
+ int p12 = popcount(a[1] ^ b[2]);
+
+ int p20 = popcount(a[2] ^ b[0]);
+ int p21 = popcount(a[2] ^ b[1]);
+ int p22 = popcount(a[2] ^ b[2]);
+
+ int s0 = p11 + p22;
+ int s1 = p12 + p21;
+ int v0 = astc::min(s0, s1) + p00;
+
+ int s2 = p10 + p22;
+ int s3 = p12 + p20;
+ int v1 = astc::min(s2, s3) + p01;
+
+ int s4 = p10 + p21;
+ int s5 = p11 + p20;
+ int v2 = astc::min(s4, s5) + p02;
+
+ return astc::min(v0, v1, v2);
+}
+
+/**
+ * @brief Compute bit-mismatch for partitioning in 4-partition mode.
+ *
+ * @param a The texel assignment bitvector for the block.
+ * @param b The texel assignment bitvector for the partition table.
+ *
+ * @return The number of bit mismatches.
+ */
+static inline unsigned int partition_mismatch4(
+ const uint64_t a[4],
+ const uint64_t b[4]
+) {
+ int p00 = popcount(a[0] ^ b[0]);
+ int p01 = popcount(a[0] ^ b[1]);
+ int p02 = popcount(a[0] ^ b[2]);
+ int p03 = popcount(a[0] ^ b[3]);
+
+ int p10 = popcount(a[1] ^ b[0]);
+ int p11 = popcount(a[1] ^ b[1]);
+ int p12 = popcount(a[1] ^ b[2]);
+ int p13 = popcount(a[1] ^ b[3]);
+
+ int p20 = popcount(a[2] ^ b[0]);
+ int p21 = popcount(a[2] ^ b[1]);
+ int p22 = popcount(a[2] ^ b[2]);
+ int p23 = popcount(a[2] ^ b[3]);
+
+ int p30 = popcount(a[3] ^ b[0]);
+ int p31 = popcount(a[3] ^ b[1]);
+ int p32 = popcount(a[3] ^ b[2]);
+ int p33 = popcount(a[3] ^ b[3]);
+
+ int mx23 = astc::min(p22 + p33, p23 + p32);
+ int mx13 = astc::min(p21 + p33, p23 + p31);
+ int mx12 = astc::min(p21 + p32, p22 + p31);
+ int mx03 = astc::min(p20 + p33, p23 + p30);
+ int mx02 = astc::min(p20 + p32, p22 + p30);
+ int mx01 = astc::min(p21 + p30, p20 + p31);
+
+ int v0 = p00 + astc::min(p11 + mx23, p12 + mx13, p13 + mx12);
+ int v1 = p01 + astc::min(p10 + mx23, p12 + mx03, p13 + mx02);
+ int v2 = p02 + astc::min(p11 + mx03, p10 + mx13, p13 + mx01);
+ int v3 = p03 + astc::min(p11 + mx02, p12 + mx01, p10 + mx12);
+
+ return astc::min(v0, v1, v2, v3);
+}
+
+using mismatch_dispatch = unsigned int (*)(const uint64_t*, const uint64_t*);
+
+/**
+ * @brief Count the partition table mismatches vs the data clustering.
+ *
+ * @param bsd The block size information.
+ * @param partition_count The number of partitions in the block.
+ * @param bitmaps The block texel partition assignment patterns.
+ * @param[out] mismatch_counts The array storing per partitioning mismatch counts.
+ */
+static void count_partition_mismatch_bits(
+ const block_size_descriptor& bsd,
+ unsigned int partition_count,
+ const uint64_t bitmaps[BLOCK_MAX_PARTITIONS],
+ unsigned int mismatch_counts[BLOCK_MAX_PARTITIONINGS]
+) {
+ unsigned int active_count = bsd.partitioning_count_selected[partition_count - 1];
+ promise(active_count > 0);
+
+ if (partition_count == 2)
+ {
+ for (unsigned int i = 0; i < active_count; i++)
+ {
+ mismatch_counts[i] = partition_mismatch2(bitmaps, bsd.coverage_bitmaps_2[i]);
+ }
+ }
+ else if (partition_count == 3)
+ {
+ for (unsigned int i = 0; i < active_count; i++)
+ {
+ mismatch_counts[i] = partition_mismatch3(bitmaps, bsd.coverage_bitmaps_3[i]);
+ }
+ }
+ else
+ {
+ for (unsigned int i = 0; i < active_count; i++)
+ {
+ mismatch_counts[i] = partition_mismatch4(bitmaps, bsd.coverage_bitmaps_4[i]);
+ }
+ }
+}
+
+/**
+ * @brief Use counting sort on the mismatch array to sort partition candidates.
+ *
+ * @param partitioning_count The number of packed partitionings.
+ * @param mismatch_count Partitioning mismatch counts, in index order.
+ * @param[out] partition_ordering Partition index values, in mismatch order.
+ *
+ * @return The number of active partitions in this selection.
+ */
+static unsigned int get_partition_ordering_by_mismatch_bits(
+ unsigned int partitioning_count,
+ const unsigned int mismatch_count[BLOCK_MAX_PARTITIONINGS],
+ unsigned int partition_ordering[BLOCK_MAX_PARTITIONINGS]
+) {
+ promise(partitioning_count > 0);
+ unsigned int mscount[256] { 0 };
+
+ // Create the histogram of mismatch counts
+ for (unsigned int i = 0; i < partitioning_count; i++)
+ {
+ mscount[mismatch_count[i]]++;
+ }
+
+ unsigned int active_count = partitioning_count - mscount[255];
+
+ // Create a running sum from the histogram array
+ // Cells store previous values only; i.e. exclude self after sum
+ unsigned int summa = 0;
+ for (unsigned int i = 0; i < 256; i++)
+ {
+ unsigned int cnt = mscount[i];
+ mscount[i] = summa;
+ summa += cnt;
+ }
+
+ // Use the running sum as the index, incrementing after read to allow
+ // sequential entries with the same count
+ for (unsigned int i = 0; i < partitioning_count; i++)
+ {
+ unsigned int idx = mscount[mismatch_count[i]]++;
+ partition_ordering[idx] = i;
+ }
+
+ return active_count;
+}
+
+/**
+ * @brief Use k-means clustering to compute a partition ordering for a block..
+ *
+ * @param bsd The block size information.
+ * @param blk The image block color data to compress.
+ * @param partition_count The desired number of partitions in the block.
+ * @param[out] partition_ordering The list of recommended partition indices, in priority order.
+ *
+ * @return The number of active partitionings in this selection.
+ */
+static unsigned int compute_kmeans_partition_ordering(
+ const block_size_descriptor& bsd,
+ const image_block& blk,
+ unsigned int partition_count,
+ unsigned int partition_ordering[BLOCK_MAX_PARTITIONINGS]
+) {
+ vfloat4 cluster_centers[BLOCK_MAX_PARTITIONS];
+ uint8_t texel_partitions[BLOCK_MAX_TEXELS];
+
+ // Use three passes of k-means clustering to partition the block data
+ for (unsigned int i = 0; i < 3; i++)
+ {
+ if (i == 0)
+ {
+ kmeans_init(blk, bsd.texel_count, partition_count, cluster_centers);
+ }
+ else
+ {
+ kmeans_update(blk, bsd.texel_count, partition_count, cluster_centers, texel_partitions);
+ }
+
+ kmeans_assign(blk, bsd.texel_count, partition_count, cluster_centers, texel_partitions);
+ }
+
+ // Construct the block bitmaps of texel assignments to each partition
+ uint64_t bitmaps[BLOCK_MAX_PARTITIONS] { 0 };
+ unsigned int texels_to_process = astc::min(bsd.texel_count, BLOCK_MAX_KMEANS_TEXELS);
+ promise(texels_to_process > 0);
+ for (unsigned int i = 0; i < texels_to_process; i++)
+ {
+ unsigned int idx = bsd.kmeans_texels[i];
+ bitmaps[texel_partitions[idx]] |= 1ULL << i;
+ }
+
+ // Count the mismatch between the block and the format's partition tables
+ unsigned int mismatch_counts[BLOCK_MAX_PARTITIONINGS];
+ count_partition_mismatch_bits(bsd, partition_count, bitmaps, mismatch_counts);
+
+ // Sort the partitions based on the number of mismatched bits
+ return get_partition_ordering_by_mismatch_bits(
+ bsd.partitioning_count_selected[partition_count - 1],
+ mismatch_counts, partition_ordering);
+}
+
+/**
+ * @brief Insert a partitioning into an order list of results, sorted by error.
+ *
+ * @param max_values The max number of entries in the best result arrays.
+ * @param this_error The error of the new entry.
+ * @param this_partition The partition ID of the new entry.
+ * @param[out] best_errors The array of best error values.
+ * @param[out] best_partitions The array of best partition values.
+ */
+static void insert_result(
+ unsigned int max_values,
+ float this_error,
+ unsigned int this_partition,
+ float* best_errors,
+ unsigned int* best_partitions)
+{
+ promise(max_values > 0);
+
+ // Don't bother searching if the current worst error beats the new error
+ if (this_error >= best_errors[max_values - 1])
+ {
+ return;
+ }
+
+ // Else insert into the list in error-order
+ for (unsigned int i = 0; i < max_values; i++)
+ {
+ // Existing result is better - move on ...
+ if (this_error > best_errors[i])
+ {
+ continue;
+ }
+
+ // Move existing results down one
+ for (unsigned int j = max_values - 1; j > i; j--)
+ {
+ best_errors[j] = best_errors[j - 1];
+ best_partitions[j] = best_partitions[j - 1];
+ }
+
+ // Insert new result
+ best_errors[i] = this_error;
+ best_partitions[i] = this_partition;
+ break;
+ }
+}
+
+/* See header for documentation. */
+unsigned int find_best_partition_candidates(
+ const block_size_descriptor& bsd,
+ const image_block& blk,
+ unsigned int partition_count,
+ unsigned int partition_search_limit,
+ unsigned int best_partitions[TUNE_MAX_PARTITIONING_CANDIDATES],
+ unsigned int requested_candidates
+) {
+ // Constant used to estimate quantization error for a given partitioning; the optimal value for
+ // this depends on bitrate. These values have been determined empirically.
+ unsigned int texels_per_block = bsd.texel_count;
+ float weight_imprecision_estim = 0.055f;
+ if (texels_per_block <= 20)
+ {
+ weight_imprecision_estim = 0.03f;
+ }
+ else if (texels_per_block <= 31)
+ {
+ weight_imprecision_estim = 0.04f;
+ }
+ else if (texels_per_block <= 41)
+ {
+ weight_imprecision_estim = 0.05f;
+ }
+
+ promise(partition_count > 0);
+ promise(partition_search_limit > 0);
+
+ weight_imprecision_estim = weight_imprecision_estim * weight_imprecision_estim;
+
+ unsigned int partition_sequence[BLOCK_MAX_PARTITIONINGS];
+ unsigned int sequence_len = compute_kmeans_partition_ordering(bsd, blk, partition_count, partition_sequence);
+ partition_search_limit = astc::min(partition_search_limit, sequence_len);
+ requested_candidates = astc::min(partition_search_limit, requested_candidates);
+
+ bool uses_alpha = !blk.is_constant_channel(3);
+
+ // Partitioning errors assuming uncorrelated-chrominance endpoints
+ float uncor_best_errors[TUNE_MAX_PARTITIONING_CANDIDATES];
+ unsigned int uncor_best_partitions[TUNE_MAX_PARTITIONING_CANDIDATES];
+
+ // Partitioning errors assuming same-chrominance endpoints
+ float samec_best_errors[TUNE_MAX_PARTITIONING_CANDIDATES];
+ unsigned int samec_best_partitions[TUNE_MAX_PARTITIONING_CANDIDATES];
+
+ for (unsigned int i = 0; i < requested_candidates; i++)
+ {
+ uncor_best_errors[i] = ERROR_CALC_DEFAULT;
+ samec_best_errors[i] = ERROR_CALC_DEFAULT;
+ }
+
+ if (uses_alpha)
+ {
+ for (unsigned int i = 0; i < partition_search_limit; i++)
+ {
+ unsigned int partition = partition_sequence[i];
+ const auto& pi = bsd.get_raw_partition_info(partition_count, partition);
+
+ // Compute weighting to give to each component in each partition
+ partition_metrics pms[BLOCK_MAX_PARTITIONS];
+
+ compute_avgs_and_dirs_4_comp(pi, blk, pms);
+
+ line4 uncor_lines[BLOCK_MAX_PARTITIONS];
+ line4 samec_lines[BLOCK_MAX_PARTITIONS];
+
+ processed_line4 uncor_plines[BLOCK_MAX_PARTITIONS];
+ processed_line4 samec_plines[BLOCK_MAX_PARTITIONS];
+
+ float uncor_line_lens[BLOCK_MAX_PARTITIONS];
+ float samec_line_lens[BLOCK_MAX_PARTITIONS];
+
+ for (unsigned int j = 0; j < partition_count; j++)
+ {
+ partition_metrics& pm = pms[j];
+
+ uncor_lines[j].a = pm.avg;
+ uncor_lines[j].b = normalize_safe(pm.dir, unit4());
+
+ uncor_plines[j].amod = uncor_lines[j].a - uncor_lines[j].b * dot(uncor_lines[j].a, uncor_lines[j].b);
+ uncor_plines[j].bs = uncor_lines[j].b;
+
+ samec_lines[j].a = vfloat4::zero();
+ samec_lines[j].b = normalize_safe(pm.avg, unit4());
+
+ samec_plines[j].amod = vfloat4::zero();
+ samec_plines[j].bs = samec_lines[j].b;
+ }
+
+ float uncor_error = 0.0f;
+ float samec_error = 0.0f;
+
+ compute_error_squared_rgba(pi,
+ blk,
+ uncor_plines,
+ samec_plines,
+ uncor_line_lens,
+ samec_line_lens,
+ uncor_error,
+ samec_error);
+
+ // Compute an estimate of error introduced by weight quantization imprecision.
+ // This error is computed as follows, for each partition
+ // 1: compute the principal-axis vector (full length) in error-space
+ // 2: convert the principal-axis vector to regular RGB-space
+ // 3: scale the vector by a constant that estimates average quantization error
+ // 4: for each texel, square the vector, then do a dot-product with the texel's
+ // error weight; sum up the results across all texels.
+ // 4(optimized): square the vector once, then do a dot-product with the average
+ // texel error, then multiply by the number of texels.
+
+ for (unsigned int j = 0; j < partition_count; j++)
+ {
+ float tpp = static_cast<float>(pi.partition_texel_count[j]);
+ vfloat4 error_weights(tpp * weight_imprecision_estim);
+
+ vfloat4 uncor_vector = uncor_lines[j].b * uncor_line_lens[j];
+ vfloat4 samec_vector = samec_lines[j].b * samec_line_lens[j];
+
+ uncor_error += dot_s(uncor_vector * uncor_vector, error_weights);
+ samec_error += dot_s(samec_vector * samec_vector, error_weights);
+ }
+
+ insert_result(requested_candidates, uncor_error, partition, uncor_best_errors, uncor_best_partitions);
+ insert_result(requested_candidates, samec_error, partition, samec_best_errors, samec_best_partitions);
+ }
+ }
+ else
+ {
+ for (unsigned int i = 0; i < partition_search_limit; i++)
+ {
+ unsigned int partition = partition_sequence[i];
+ const auto& pi = bsd.get_raw_partition_info(partition_count, partition);
+
+ // Compute weighting to give to each component in each partition
+ partition_metrics pms[BLOCK_MAX_PARTITIONS];
+ compute_avgs_and_dirs_3_comp_rgb(pi, blk, pms);
+
+ partition_lines3 plines[BLOCK_MAX_PARTITIONS];
+
+ for (unsigned int j = 0; j < partition_count; j++)
+ {
+ partition_metrics& pm = pms[j];
+ partition_lines3& pl = plines[j];
+
+ pl.uncor_line.a = pm.avg;
+ pl.uncor_line.b = normalize_safe(pm.dir, unit3());
+
+ pl.samec_line.a = vfloat4::zero();
+ pl.samec_line.b = normalize_safe(pm.avg, unit3());
+
+ pl.uncor_pline.amod = pl.uncor_line.a - pl.uncor_line.b * dot3(pl.uncor_line.a, pl.uncor_line.b);
+ pl.uncor_pline.bs = pl.uncor_line.b;
+
+ pl.samec_pline.amod = vfloat4::zero();
+ pl.samec_pline.bs = pl.samec_line.b;
+ }
+
+ float uncor_error = 0.0f;
+ float samec_error = 0.0f;
+
+ compute_error_squared_rgb(pi,
+ blk,
+ plines,
+ uncor_error,
+ samec_error);
+
+ // Compute an estimate of error introduced by weight quantization imprecision.
+ // This error is computed as follows, for each partition
+ // 1: compute the principal-axis vector (full length) in error-space
+ // 2: convert the principal-axis vector to regular RGB-space
+ // 3: scale the vector by a constant that estimates average quantization error
+ // 4: for each texel, square the vector, then do a dot-product with the texel's
+ // error weight; sum up the results across all texels.
+ // 4(optimized): square the vector once, then do a dot-product with the average
+ // texel error, then multiply by the number of texels.
+
+ for (unsigned int j = 0; j < partition_count; j++)
+ {
+ partition_lines3& pl = plines[j];
+
+ float tpp = static_cast<float>(pi.partition_texel_count[j]);
+ vfloat4 error_weights(tpp * weight_imprecision_estim);
+
+ vfloat4 uncor_vector = pl.uncor_line.b * pl.uncor_line_len;
+ vfloat4 samec_vector = pl.samec_line.b * pl.samec_line_len;
+
+ uncor_error += dot3_s(uncor_vector * uncor_vector, error_weights);
+ samec_error += dot3_s(samec_vector * samec_vector, error_weights);
+ }
+
+ insert_result(requested_candidates, uncor_error, partition, uncor_best_errors, uncor_best_partitions);
+ insert_result(requested_candidates, samec_error, partition, samec_best_errors, samec_best_partitions);
+ }
+ }
+
+ bool best_is_uncor = uncor_best_partitions[0] > samec_best_partitions[0];
+
+ unsigned int interleave[2 * TUNE_MAX_PARTITIONING_CANDIDATES];
+ for (unsigned int i = 0; i < requested_candidates; i++)
+ {
+ if (best_is_uncor)
+ {
+ interleave[2 * i] = bsd.get_raw_partition_info(partition_count, uncor_best_partitions[i]).partition_index;
+ interleave[2 * i + 1] = bsd.get_raw_partition_info(partition_count, samec_best_partitions[i]).partition_index;
+ }
+ else
+ {
+ interleave[2 * i] = bsd.get_raw_partition_info(partition_count, samec_best_partitions[i]).partition_index;
+ interleave[2 * i + 1] = bsd.get_raw_partition_info(partition_count, uncor_best_partitions[i]).partition_index;
+ }
+ }
+
+ uint64_t bitmasks[1024/64] { 0 };
+ unsigned int emitted = 0;
+
+ // Deduplicate the first "requested" entries
+ for (unsigned int i = 0; i < requested_candidates * 2; i++)
+ {
+ unsigned int partition = interleave[i];
+
+ unsigned int word = partition / 64;
+ unsigned int bit = partition % 64;
+
+ bool written = bitmasks[word] & (1ull << bit);
+
+ if (!written)
+ {
+ best_partitions[emitted] = partition;
+ bitmasks[word] |= 1ull << bit;
+ emitted++;
+
+ if (emitted == requested_candidates)
+ {
+ break;
+ }
+ }
+ }
+
+ return emitted;
+}
+
+#endif
diff --git a/thirdparty/astcenc/astcenc_ideal_endpoints_and_weights.cpp b/thirdparty/astcenc/astcenc_ideal_endpoints_and_weights.cpp
new file mode 100644
index 0000000000..5145e08693
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_ideal_endpoints_and_weights.cpp
@@ -0,0 +1,1663 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2023 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+#if !defined(ASTCENC_DECOMPRESS_ONLY)
+
+/**
+ * @brief Functions for computing color endpoints and texel weights.
+ */
+
+#include <cassert>
+
+#include "astcenc_internal.h"
+#include "astcenc_vecmathlib.h"
+
+/**
+ * @brief Compute the infilled weight for N texel indices in a decimated grid.
+ *
+ * @param di The weight grid decimation to use.
+ * @param weights The decimated weight values to use.
+ * @param index The first texel index to interpolate.
+ *
+ * @return The interpolated weight for the given set of SIMD_WIDTH texels.
+ */
+static vfloat bilinear_infill_vla(
+ const decimation_info& di,
+ const float* weights,
+ unsigned int index
+) {
+ // Load the bilinear filter texel weight indexes in the decimated grid
+ vint weight_idx0 = vint(di.texel_weights_tr[0] + index);
+ vint weight_idx1 = vint(di.texel_weights_tr[1] + index);
+ vint weight_idx2 = vint(di.texel_weights_tr[2] + index);
+ vint weight_idx3 = vint(di.texel_weights_tr[3] + index);
+
+ // Load the bilinear filter weights from the decimated grid
+ vfloat weight_val0 = gatherf(weights, weight_idx0);
+ vfloat weight_val1 = gatherf(weights, weight_idx1);
+ vfloat weight_val2 = gatherf(weights, weight_idx2);
+ vfloat weight_val3 = gatherf(weights, weight_idx3);
+
+ // Load the weight contribution factors for each decimated weight
+ vfloat tex_weight_float0 = loada(di.texel_weight_contribs_float_tr[0] + index);
+ vfloat tex_weight_float1 = loada(di.texel_weight_contribs_float_tr[1] + index);
+ vfloat tex_weight_float2 = loada(di.texel_weight_contribs_float_tr[2] + index);
+ vfloat tex_weight_float3 = loada(di.texel_weight_contribs_float_tr[3] + index);
+
+ // Compute the bilinear interpolation to generate the per-texel weight
+ return (weight_val0 * tex_weight_float0 + weight_val1 * tex_weight_float1) +
+ (weight_val2 * tex_weight_float2 + weight_val3 * tex_weight_float3);
+}
+
+/**
+ * @brief Compute the infilled weight for N texel indices in a decimated grid.
+ *
+ * This is specialized version which computes only two weights per texel for
+ * encodings that are only decimated in a single axis.
+ *
+ * @param di The weight grid decimation to use.
+ * @param weights The decimated weight values to use.
+ * @param index The first texel index to interpolate.
+ *
+ * @return The interpolated weight for the given set of SIMD_WIDTH texels.
+ */
+static vfloat bilinear_infill_vla_2(
+ const decimation_info& di,
+ const float* weights,
+ unsigned int index
+) {
+ // Load the bilinear filter texel weight indexes in the decimated grid
+ vint weight_idx0 = vint(di.texel_weights_tr[0] + index);
+ vint weight_idx1 = vint(di.texel_weights_tr[1] + index);
+
+ // Load the bilinear filter weights from the decimated grid
+ vfloat weight_val0 = gatherf(weights, weight_idx0);
+ vfloat weight_val1 = gatherf(weights, weight_idx1);
+
+ // Load the weight contribution factors for each decimated weight
+ vfloat tex_weight_float0 = loada(di.texel_weight_contribs_float_tr[0] + index);
+ vfloat tex_weight_float1 = loada(di.texel_weight_contribs_float_tr[1] + index);
+
+ // Compute the bilinear interpolation to generate the per-texel weight
+ return (weight_val0 * tex_weight_float0 + weight_val1 * tex_weight_float1);
+}
+
+/**
+ * @brief Compute the ideal endpoints and weights for 1 color component.
+ *
+ * @param blk The image block color data to compress.
+ * @param pi The partition info for the current trial.
+ * @param[out] ei The computed ideal endpoints and weights.
+ * @param component The color component to compute.
+ */
+static void compute_ideal_colors_and_weights_1_comp(
+ const image_block& blk,
+ const partition_info& pi,
+ endpoints_and_weights& ei,
+ unsigned int component
+) {
+ unsigned int partition_count = pi.partition_count;
+ ei.ep.partition_count = partition_count;
+ promise(partition_count > 0);
+
+ unsigned int texel_count = blk.texel_count;
+ promise(texel_count > 0);
+
+ float error_weight;
+ const float* data_vr = nullptr;
+
+ assert(component < BLOCK_MAX_COMPONENTS);
+ switch (component)
+ {
+ case 0:
+ error_weight = blk.channel_weight.lane<0>();
+ data_vr = blk.data_r;
+ break;
+ case 1:
+ error_weight = blk.channel_weight.lane<1>();
+ data_vr = blk.data_g;
+ break;
+ case 2:
+ error_weight = blk.channel_weight.lane<2>();
+ data_vr = blk.data_b;
+ break;
+ default:
+ assert(component == 3);
+ error_weight = blk.channel_weight.lane<3>();
+ data_vr = blk.data_a;
+ break;
+ }
+
+ vmask4 sep_mask = vint4::lane_id() == vint4(component);
+ bool is_constant_wes { true };
+ float partition0_len_sq { 0.0f };
+
+ for (unsigned int i = 0; i < partition_count; i++)
+ {
+ float lowvalue { 1e10f };
+ float highvalue { -1e10f };
+
+ unsigned int partition_texel_count = pi.partition_texel_count[i];
+ for (unsigned int j = 0; j < partition_texel_count; j++)
+ {
+ unsigned int tix = pi.texels_of_partition[i][j];
+ float value = data_vr[tix];
+ lowvalue = astc::min(value, lowvalue);
+ highvalue = astc::max(value, highvalue);
+ }
+
+ if (highvalue <= lowvalue)
+ {
+ lowvalue = 0.0f;
+ highvalue = 1e-7f;
+ }
+
+ float length = highvalue - lowvalue;
+ float length_squared = length * length;
+ float scale = 1.0f / length;
+
+ if (i == 0)
+ {
+ partition0_len_sq = length_squared;
+ }
+ else
+ {
+ is_constant_wes = is_constant_wes && length_squared == partition0_len_sq;
+ }
+
+ for (unsigned int j = 0; j < partition_texel_count; j++)
+ {
+ unsigned int tix = pi.texels_of_partition[i][j];
+ float value = (data_vr[tix] - lowvalue) * scale;
+ value = astc::clamp1f(value);
+
+ ei.weights[tix] = value;
+ ei.weight_error_scale[tix] = length_squared * error_weight;
+ assert(!astc::isnan(ei.weight_error_scale[tix]));
+ }
+
+ ei.ep.endpt0[i] = select(blk.data_min, vfloat4(lowvalue), sep_mask);
+ ei.ep.endpt1[i] = select(blk.data_max, vfloat4(highvalue), sep_mask);
+ }
+
+ // Zero initialize any SIMD over-fetch
+ unsigned int texel_count_simd = round_up_to_simd_multiple_vla(texel_count);
+ for (unsigned int i = texel_count; i < texel_count_simd; i++)
+ {
+ ei.weights[i] = 0.0f;
+ ei.weight_error_scale[i] = 0.0f;
+ }
+
+ ei.is_constant_weight_error_scale = is_constant_wes;
+}
+
+/**
+ * @brief Compute the ideal endpoints and weights for 2 color components.
+ *
+ * @param blk The image block color data to compress.
+ * @param pi The partition info for the current trial.
+ * @param[out] ei The computed ideal endpoints and weights.
+ * @param component1 The first color component to compute.
+ * @param component2 The second color component to compute.
+ */
+static void compute_ideal_colors_and_weights_2_comp(
+ const image_block& blk,
+ const partition_info& pi,
+ endpoints_and_weights& ei,
+ int component1,
+ int component2
+) {
+ unsigned int partition_count = pi.partition_count;
+ ei.ep.partition_count = partition_count;
+ promise(partition_count > 0);
+
+ unsigned int texel_count = blk.texel_count;
+ promise(texel_count > 0);
+
+ partition_metrics pms[BLOCK_MAX_PARTITIONS];
+
+ float error_weight;
+ const float* data_vr = nullptr;
+ const float* data_vg = nullptr;
+
+ if (component1 == 0 && component2 == 1)
+ {
+ error_weight = hadd_s(blk.channel_weight.swz<0, 1>()) / 2.0f;
+
+ data_vr = blk.data_r;
+ data_vg = blk.data_g;
+ }
+ else if (component1 == 0 && component2 == 2)
+ {
+ error_weight = hadd_s(blk.channel_weight.swz<0, 2>()) / 2.0f;
+
+ data_vr = blk.data_r;
+ data_vg = blk.data_b;
+ }
+ else // (component1 == 1 && component2 == 2)
+ {
+ assert(component1 == 1 && component2 == 2);
+
+ error_weight = hadd_s(blk.channel_weight.swz<1, 2>()) / 2.0f;
+
+ data_vr = blk.data_g;
+ data_vg = blk.data_b;
+ }
+
+ compute_avgs_and_dirs_2_comp(pi, blk, component1, component2, pms);
+
+ bool is_constant_wes { true };
+ float partition0_len_sq { 0.0f };
+
+ vmask4 comp1_mask = vint4::lane_id() == vint4(component1);
+ vmask4 comp2_mask = vint4::lane_id() == vint4(component2);
+
+ for (unsigned int i = 0; i < partition_count; i++)
+ {
+ vfloat4 dir = pms[i].dir;
+ if (hadd_s(dir) < 0.0f)
+ {
+ dir = vfloat4::zero() - dir;
+ }
+
+ line2 line { pms[i].avg, normalize_safe(dir, unit2()) };
+ float lowparam { 1e10f };
+ float highparam { -1e10f };
+
+ unsigned int partition_texel_count = pi.partition_texel_count[i];
+ for (unsigned int j = 0; j < partition_texel_count; j++)
+ {
+ unsigned int tix = pi.texels_of_partition[i][j];
+ vfloat4 point = vfloat2(data_vr[tix], data_vg[tix]);
+ float param = dot_s(point - line.a, line.b);
+ ei.weights[tix] = param;
+
+ lowparam = astc::min(param, lowparam);
+ highparam = astc::max(param, highparam);
+ }
+
+ // It is possible for a uniform-color partition to produce length=0;
+ // this causes NaN issues so set to small value to avoid this problem
+ if (highparam <= lowparam)
+ {
+ lowparam = 0.0f;
+ highparam = 1e-7f;
+ }
+
+ float length = highparam - lowparam;
+ float length_squared = length * length;
+ float scale = 1.0f / length;
+
+ if (i == 0)
+ {
+ partition0_len_sq = length_squared;
+ }
+ else
+ {
+ is_constant_wes = is_constant_wes && length_squared == partition0_len_sq;
+ }
+
+ for (unsigned int j = 0; j < partition_texel_count; j++)
+ {
+ unsigned int tix = pi.texels_of_partition[i][j];
+ float idx = (ei.weights[tix] - lowparam) * scale;
+ idx = astc::clamp1f(idx);
+
+ ei.weights[tix] = idx;
+ ei.weight_error_scale[tix] = length_squared * error_weight;
+ assert(!astc::isnan(ei.weight_error_scale[tix]));
+ }
+
+ vfloat4 lowvalue = line.a + line.b * lowparam;
+ vfloat4 highvalue = line.a + line.b * highparam;
+
+ vfloat4 ep0 = select(blk.data_min, vfloat4(lowvalue.lane<0>()), comp1_mask);
+ vfloat4 ep1 = select(blk.data_max, vfloat4(highvalue.lane<0>()), comp1_mask);
+
+ ei.ep.endpt0[i] = select(ep0, vfloat4(lowvalue.lane<1>()), comp2_mask);
+ ei.ep.endpt1[i] = select(ep1, vfloat4(highvalue.lane<1>()), comp2_mask);
+ }
+
+ // Zero initialize any SIMD over-fetch
+ unsigned int texel_count_simd = round_up_to_simd_multiple_vla(texel_count);
+ for (unsigned int i = texel_count; i < texel_count_simd; i++)
+ {
+ ei.weights[i] = 0.0f;
+ ei.weight_error_scale[i] = 0.0f;
+ }
+
+ ei.is_constant_weight_error_scale = is_constant_wes;
+}
+
+/**
+ * @brief Compute the ideal endpoints and weights for 3 color components.
+ *
+ * @param blk The image block color data to compress.
+ * @param pi The partition info for the current trial.
+ * @param[out] ei The computed ideal endpoints and weights.
+ * @param omitted_component The color component excluded from the calculation.
+ */
+static void compute_ideal_colors_and_weights_3_comp(
+ const image_block& blk,
+ const partition_info& pi,
+ endpoints_and_weights& ei,
+ unsigned int omitted_component
+) {
+ unsigned int partition_count = pi.partition_count;
+ ei.ep.partition_count = partition_count;
+ promise(partition_count > 0);
+
+ unsigned int texel_count = blk.texel_count;
+ promise(texel_count > 0);
+
+ partition_metrics pms[BLOCK_MAX_PARTITIONS];
+
+ float error_weight;
+ const float* data_vr = nullptr;
+ const float* data_vg = nullptr;
+ const float* data_vb = nullptr;
+ if (omitted_component == 0)
+ {
+ error_weight = hadd_s(blk.channel_weight.swz<0, 1, 2>());
+ data_vr = blk.data_g;
+ data_vg = blk.data_b;
+ data_vb = blk.data_a;
+ }
+ else if (omitted_component == 1)
+ {
+ error_weight = hadd_s(blk.channel_weight.swz<0, 2, 3>());
+ data_vr = blk.data_r;
+ data_vg = blk.data_b;
+ data_vb = blk.data_a;
+ }
+ else if (omitted_component == 2)
+ {
+ error_weight = hadd_s(blk.channel_weight.swz<0, 1, 3>());
+ data_vr = blk.data_r;
+ data_vg = blk.data_g;
+ data_vb = blk.data_a;
+ }
+ else
+ {
+ assert(omitted_component == 3);
+
+ error_weight = hadd_s(blk.channel_weight.swz<0, 1, 2>());
+ data_vr = blk.data_r;
+ data_vg = blk.data_g;
+ data_vb = blk.data_b;
+ }
+
+ error_weight = error_weight * (1.0f / 3.0f);
+
+ if (omitted_component == 3)
+ {
+ compute_avgs_and_dirs_3_comp_rgb(pi, blk, pms);
+ }
+ else
+ {
+ compute_avgs_and_dirs_3_comp(pi, blk, omitted_component, pms);
+ }
+
+ bool is_constant_wes { true };
+ float partition0_len_sq { 0.0f };
+
+ for (unsigned int i = 0; i < partition_count; i++)
+ {
+ vfloat4 dir = pms[i].dir;
+ if (hadd_rgb_s(dir) < 0.0f)
+ {
+ dir = vfloat4::zero() - dir;
+ }
+
+ line3 line { pms[i].avg, normalize_safe(dir, unit3()) };
+ float lowparam { 1e10f };
+ float highparam { -1e10f };
+
+ unsigned int partition_texel_count = pi.partition_texel_count[i];
+ for (unsigned int j = 0; j < partition_texel_count; j++)
+ {
+ unsigned int tix = pi.texels_of_partition[i][j];
+ vfloat4 point = vfloat3(data_vr[tix], data_vg[tix], data_vb[tix]);
+ float param = dot3_s(point - line.a, line.b);
+ ei.weights[tix] = param;
+
+ lowparam = astc::min(param, lowparam);
+ highparam = astc::max(param, highparam);
+ }
+
+ // It is possible for a uniform-color partition to produce length=0;
+ // this causes NaN issues so set to small value to avoid this problem
+ if (highparam <= lowparam)
+ {
+ lowparam = 0.0f;
+ highparam = 1e-7f;
+ }
+
+ float length = highparam - lowparam;
+ float length_squared = length * length;
+ float scale = 1.0f / length;
+
+ if (i == 0)
+ {
+ partition0_len_sq = length_squared;
+ }
+ else
+ {
+ is_constant_wes = is_constant_wes && length_squared == partition0_len_sq;
+ }
+
+ for (unsigned int j = 0; j < partition_texel_count; j++)
+ {
+ unsigned int tix = pi.texels_of_partition[i][j];
+ float idx = (ei.weights[tix] - lowparam) * scale;
+ idx = astc::clamp1f(idx);
+
+ ei.weights[tix] = idx;
+ ei.weight_error_scale[tix] = length_squared * error_weight;
+ assert(!astc::isnan(ei.weight_error_scale[tix]));
+ }
+
+ vfloat4 ep0 = line.a + line.b * lowparam;
+ vfloat4 ep1 = line.a + line.b * highparam;
+
+ vfloat4 bmin = blk.data_min;
+ vfloat4 bmax = blk.data_max;
+
+ assert(omitted_component < BLOCK_MAX_COMPONENTS);
+ switch (omitted_component)
+ {
+ case 0:
+ ei.ep.endpt0[i] = vfloat4(bmin.lane<0>(), ep0.lane<0>(), ep0.lane<1>(), ep0.lane<2>());
+ ei.ep.endpt1[i] = vfloat4(bmax.lane<0>(), ep1.lane<0>(), ep1.lane<1>(), ep1.lane<2>());
+ break;
+ case 1:
+ ei.ep.endpt0[i] = vfloat4(ep0.lane<0>(), bmin.lane<1>(), ep0.lane<1>(), ep0.lane<2>());
+ ei.ep.endpt1[i] = vfloat4(ep1.lane<0>(), bmax.lane<1>(), ep1.lane<1>(), ep1.lane<2>());
+ break;
+ case 2:
+ ei.ep.endpt0[i] = vfloat4(ep0.lane<0>(), ep0.lane<1>(), bmin.lane<2>(), ep0.lane<2>());
+ ei.ep.endpt1[i] = vfloat4(ep1.lane<0>(), ep1.lane<1>(), bmax.lane<2>(), ep1.lane<2>());
+ break;
+ default:
+ ei.ep.endpt0[i] = vfloat4(ep0.lane<0>(), ep0.lane<1>(), ep0.lane<2>(), bmin.lane<3>());
+ ei.ep.endpt1[i] = vfloat4(ep1.lane<0>(), ep1.lane<1>(), ep1.lane<2>(), bmax.lane<3>());
+ break;
+ }
+ }
+
+ // Zero initialize any SIMD over-fetch
+ unsigned int texel_count_simd = round_up_to_simd_multiple_vla(texel_count);
+ for (unsigned int i = texel_count; i < texel_count_simd; i++)
+ {
+ ei.weights[i] = 0.0f;
+ ei.weight_error_scale[i] = 0.0f;
+ }
+
+ ei.is_constant_weight_error_scale = is_constant_wes;
+}
+
+/**
+ * @brief Compute the ideal endpoints and weights for 4 color components.
+ *
+ * @param blk The image block color data to compress.
+ * @param pi The partition info for the current trial.
+ * @param[out] ei The computed ideal endpoints and weights.
+ */
+static void compute_ideal_colors_and_weights_4_comp(
+ const image_block& blk,
+ const partition_info& pi,
+ endpoints_and_weights& ei
+) {
+ const float error_weight = hadd_s(blk.channel_weight) / 4.0f;
+
+ unsigned int partition_count = pi.partition_count;
+
+ unsigned int texel_count = blk.texel_count;
+ promise(texel_count > 0);
+ promise(partition_count > 0);
+
+ partition_metrics pms[BLOCK_MAX_PARTITIONS];
+
+ compute_avgs_and_dirs_4_comp(pi, blk, pms);
+
+ bool is_constant_wes { true };
+ float partition0_len_sq { 0.0f };
+
+ for (unsigned int i = 0; i < partition_count; i++)
+ {
+ vfloat4 dir = pms[i].dir;
+ if (hadd_rgb_s(dir) < 0.0f)
+ {
+ dir = vfloat4::zero() - dir;
+ }
+
+ line4 line { pms[i].avg, normalize_safe(dir, unit4()) };
+ float lowparam { 1e10f };
+ float highparam { -1e10f };
+
+ unsigned int partition_texel_count = pi.partition_texel_count[i];
+ for (unsigned int j = 0; j < partition_texel_count; j++)
+ {
+ unsigned int tix = pi.texels_of_partition[i][j];
+ vfloat4 point = blk.texel(tix);
+ float param = dot_s(point - line.a, line.b);
+ ei.weights[tix] = param;
+
+ lowparam = astc::min(param, lowparam);
+ highparam = astc::max(param, highparam);
+ }
+
+ // It is possible for a uniform-color partition to produce length=0;
+ // this causes NaN issues so set to small value to avoid this problem
+ if (highparam <= lowparam)
+ {
+ lowparam = 0.0f;
+ highparam = 1e-7f;
+ }
+
+ float length = highparam - lowparam;
+ float length_squared = length * length;
+ float scale = 1.0f / length;
+
+ if (i == 0)
+ {
+ partition0_len_sq = length_squared;
+ }
+ else
+ {
+ is_constant_wes = is_constant_wes && length_squared == partition0_len_sq;
+ }
+
+ ei.ep.endpt0[i] = line.a + line.b * lowparam;
+ ei.ep.endpt1[i] = line.a + line.b * highparam;
+
+ for (unsigned int j = 0; j < partition_texel_count; j++)
+ {
+ unsigned int tix = pi.texels_of_partition[i][j];
+ float idx = (ei.weights[tix] - lowparam) * scale;
+ idx = astc::clamp1f(idx);
+
+ ei.weights[tix] = idx;
+ ei.weight_error_scale[tix] = length_squared * error_weight;
+ assert(!astc::isnan(ei.weight_error_scale[tix]));
+ }
+ }
+
+ // Zero initialize any SIMD over-fetch
+ unsigned int texel_count_simd = round_up_to_simd_multiple_vla(texel_count);
+ for (unsigned int i = texel_count; i < texel_count_simd; i++)
+ {
+ ei.weights[i] = 0.0f;
+ ei.weight_error_scale[i] = 0.0f;
+ }
+
+ ei.is_constant_weight_error_scale = is_constant_wes;
+}
+
+/* See header for documentation. */
+void compute_ideal_colors_and_weights_1plane(
+ const image_block& blk,
+ const partition_info& pi,
+ endpoints_and_weights& ei
+) {
+ bool uses_alpha = !blk.is_constant_channel(3);
+
+ if (uses_alpha)
+ {
+ compute_ideal_colors_and_weights_4_comp(blk, pi, ei);
+ }
+ else
+ {
+ compute_ideal_colors_and_weights_3_comp(blk, pi, ei, 3);
+ }
+}
+
+/* See header for documentation. */
+void compute_ideal_colors_and_weights_2planes(
+ const block_size_descriptor& bsd,
+ const image_block& blk,
+ unsigned int plane2_component,
+ endpoints_and_weights& ei1,
+ endpoints_and_weights& ei2
+) {
+ const auto& pi = bsd.get_partition_info(1, 0);
+ bool uses_alpha = !blk.is_constant_channel(3);
+
+ assert(plane2_component < BLOCK_MAX_COMPONENTS);
+ switch (plane2_component)
+ {
+ case 0: // Separate weights for red
+ if (uses_alpha)
+ {
+ compute_ideal_colors_and_weights_3_comp(blk, pi, ei1, 0);
+ }
+ else
+ {
+ compute_ideal_colors_and_weights_2_comp(blk, pi, ei1, 1, 2);
+ }
+ compute_ideal_colors_and_weights_1_comp(blk, pi, ei2, 0);
+ break;
+
+ case 1: // Separate weights for green
+ if (uses_alpha)
+ {
+ compute_ideal_colors_and_weights_3_comp(blk, pi, ei1, 1);
+ }
+ else
+ {
+ compute_ideal_colors_and_weights_2_comp(blk, pi, ei1, 0, 2);
+ }
+ compute_ideal_colors_and_weights_1_comp(blk, pi, ei2, 1);
+ break;
+
+ case 2: // Separate weights for blue
+ if (uses_alpha)
+ {
+ compute_ideal_colors_and_weights_3_comp(blk, pi, ei1, 2);
+ }
+ else
+ {
+ compute_ideal_colors_and_weights_2_comp(blk, pi, ei1, 0, 1);
+ }
+ compute_ideal_colors_and_weights_1_comp(blk, pi, ei2, 2);
+ break;
+
+ default: // Separate weights for alpha
+ assert(uses_alpha);
+ compute_ideal_colors_and_weights_3_comp(blk, pi, ei1, 3);
+ compute_ideal_colors_and_weights_1_comp(blk, pi, ei2, 3);
+ break;
+ }
+}
+
+/* See header for documentation. */
+float compute_error_of_weight_set_1plane(
+ const endpoints_and_weights& eai,
+ const decimation_info& di,
+ const float* dec_weight_quant_uvalue
+) {
+ vfloatacc error_summav = vfloatacc::zero();
+ unsigned int texel_count = di.texel_count;
+ promise(texel_count > 0);
+
+ // Process SIMD-width chunks, safe to over-fetch - the extra space is zero initialized
+ if (di.max_texel_weight_count > 2)
+ {
+ for (unsigned int i = 0; i < texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ // Compute the bilinear interpolation of the decimated weight grid
+ vfloat current_values = bilinear_infill_vla(di, dec_weight_quant_uvalue, i);
+
+ // Compute the error between the computed value and the ideal weight
+ vfloat actual_values = loada(eai.weights + i);
+ vfloat diff = current_values - actual_values;
+ vfloat significance = loada(eai.weight_error_scale + i);
+ vfloat error = diff * diff * significance;
+
+ haccumulate(error_summav, error);
+ }
+ }
+ else if (di.max_texel_weight_count > 1)
+ {
+ for (unsigned int i = 0; i < texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ // Compute the bilinear interpolation of the decimated weight grid
+ vfloat current_values = bilinear_infill_vla_2(di, dec_weight_quant_uvalue, i);
+
+ // Compute the error between the computed value and the ideal weight
+ vfloat actual_values = loada(eai.weights + i);
+ vfloat diff = current_values - actual_values;
+ vfloat significance = loada(eai.weight_error_scale + i);
+ vfloat error = diff * diff * significance;
+
+ haccumulate(error_summav, error);
+ }
+ }
+ else
+ {
+ for (unsigned int i = 0; i < texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ // Load the weight set directly, without interpolation
+ vfloat current_values = loada(dec_weight_quant_uvalue + i);
+
+ // Compute the error between the computed value and the ideal weight
+ vfloat actual_values = loada(eai.weights + i);
+ vfloat diff = current_values - actual_values;
+ vfloat significance = loada(eai.weight_error_scale + i);
+ vfloat error = diff * diff * significance;
+
+ haccumulate(error_summav, error);
+ }
+ }
+
+ // Resolve the final scalar accumulator sum
+ return hadd_s(error_summav);
+}
+
+/* See header for documentation. */
+float compute_error_of_weight_set_2planes(
+ const endpoints_and_weights& eai1,
+ const endpoints_and_weights& eai2,
+ const decimation_info& di,
+ const float* dec_weight_quant_uvalue_plane1,
+ const float* dec_weight_quant_uvalue_plane2
+) {
+ vfloatacc error_summav = vfloatacc::zero();
+ unsigned int texel_count = di.texel_count;
+ promise(texel_count > 0);
+
+ // Process SIMD-width chunks, safe to over-fetch - the extra space is zero initialized
+ if (di.max_texel_weight_count > 2)
+ {
+ for (unsigned int i = 0; i < texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ // Plane 1
+ // Compute the bilinear interpolation of the decimated weight grid
+ vfloat current_values1 = bilinear_infill_vla(di, dec_weight_quant_uvalue_plane1, i);
+
+ // Compute the error between the computed value and the ideal weight
+ vfloat actual_values1 = loada(eai1.weights + i);
+ vfloat diff = current_values1 - actual_values1;
+ vfloat error1 = diff * diff * loada(eai1.weight_error_scale + i);
+
+ // Plane 2
+ // Compute the bilinear interpolation of the decimated weight grid
+ vfloat current_values2 = bilinear_infill_vla(di, dec_weight_quant_uvalue_plane2, i);
+
+ // Compute the error between the computed value and the ideal weight
+ vfloat actual_values2 = loada(eai2.weights + i);
+ diff = current_values2 - actual_values2;
+ vfloat error2 = diff * diff * loada(eai2.weight_error_scale + i);
+
+ haccumulate(error_summav, error1 + error2);
+ }
+ }
+ else if (di.max_texel_weight_count > 1)
+ {
+ for (unsigned int i = 0; i < texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ // Plane 1
+ // Compute the bilinear interpolation of the decimated weight grid
+ vfloat current_values1 = bilinear_infill_vla_2(di, dec_weight_quant_uvalue_plane1, i);
+
+ // Compute the error between the computed value and the ideal weight
+ vfloat actual_values1 = loada(eai1.weights + i);
+ vfloat diff = current_values1 - actual_values1;
+ vfloat error1 = diff * diff * loada(eai1.weight_error_scale + i);
+
+ // Plane 2
+ // Compute the bilinear interpolation of the decimated weight grid
+ vfloat current_values2 = bilinear_infill_vla_2(di, dec_weight_quant_uvalue_plane2, i);
+
+ // Compute the error between the computed value and the ideal weight
+ vfloat actual_values2 = loada(eai2.weights + i);
+ diff = current_values2 - actual_values2;
+ vfloat error2 = diff * diff * loada(eai2.weight_error_scale + i);
+
+ haccumulate(error_summav, error1 + error2);
+ }
+ }
+ else
+ {
+ for (unsigned int i = 0; i < texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ // Plane 1
+ // Load the weight set directly, without interpolation
+ vfloat current_values1 = loada(dec_weight_quant_uvalue_plane1 + i);
+
+ // Compute the error between the computed value and the ideal weight
+ vfloat actual_values1 = loada(eai1.weights + i);
+ vfloat diff = current_values1 - actual_values1;
+ vfloat error1 = diff * diff * loada(eai1.weight_error_scale + i);
+
+ // Plane 2
+ // Load the weight set directly, without interpolation
+ vfloat current_values2 = loada(dec_weight_quant_uvalue_plane2 + i);
+
+ // Compute the error between the computed value and the ideal weight
+ vfloat actual_values2 = loada(eai2.weights + i);
+ diff = current_values2 - actual_values2;
+ vfloat error2 = diff * diff * loada(eai2.weight_error_scale + i);
+
+ haccumulate(error_summav, error1 + error2);
+ }
+ }
+
+ // Resolve the final scalar accumulator sum
+ return hadd_s(error_summav);
+}
+
+/* See header for documentation. */
+void compute_ideal_weights_for_decimation(
+ const endpoints_and_weights& ei,
+ const decimation_info& di,
+ float* dec_weight_ideal_value
+) {
+ unsigned int texel_count = di.texel_count;
+ unsigned int weight_count = di.weight_count;
+ bool is_direct = texel_count == weight_count;
+ promise(texel_count > 0);
+ promise(weight_count > 0);
+
+ // Ensure that the end of the output arrays that are used for SIMD paths later are filled so we
+ // can safely run SIMD elsewhere without a loop tail. Note that this is always safe as weight
+ // arrays always contain space for 64 elements
+ unsigned int prev_weight_count_simd = round_down_to_simd_multiple_vla(weight_count - 1);
+ storea(vfloat::zero(), dec_weight_ideal_value + prev_weight_count_simd);
+
+ // If we have a 1:1 mapping just shortcut the computation. Transfer enough to also copy the
+ // zero-initialized SIMD over-fetch region
+ if (is_direct)
+ {
+ for (unsigned int i = 0; i < texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vfloat weight(ei.weights + i);
+ storea(weight, dec_weight_ideal_value + i);
+ }
+
+ return;
+ }
+
+ // Otherwise compute an estimate and perform single refinement iteration
+ alignas(ASTCENC_VECALIGN) float infilled_weights[BLOCK_MAX_TEXELS];
+
+ // Compute an initial average for each decimated weight
+ bool constant_wes = ei.is_constant_weight_error_scale;
+ vfloat weight_error_scale(ei.weight_error_scale[0]);
+
+ // This overshoots - this is OK as we initialize the array tails in the
+ // decimation table structures to safe values ...
+ for (unsigned int i = 0; i < weight_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ // Start with a small value to avoid div-by-zero later
+ vfloat weight_weight(1e-10f);
+ vfloat initial_weight = vfloat::zero();
+
+ // Accumulate error weighting of all the texels using this weight
+ vint weight_texel_count(di.weight_texel_count + i);
+ unsigned int max_texel_count = hmax(weight_texel_count).lane<0>();
+ promise(max_texel_count > 0);
+
+ for (unsigned int j = 0; j < max_texel_count; j++)
+ {
+ vint texel(di.weight_texels_tr[j] + i);
+ vfloat weight = loada(di.weights_texel_contribs_tr[j] + i);
+
+ if (!constant_wes)
+ {
+ weight_error_scale = gatherf(ei.weight_error_scale, texel);
+ }
+
+ vfloat contrib_weight = weight * weight_error_scale;
+
+ weight_weight += contrib_weight;
+ initial_weight += gatherf(ei.weights, texel) * contrib_weight;
+ }
+
+ storea(initial_weight / weight_weight, dec_weight_ideal_value + i);
+ }
+
+ // Populate the interpolated weight grid based on the initial average
+ // Process SIMD-width texel coordinates at at time while we can. Safe to
+ // over-process full SIMD vectors - the tail is zeroed.
+ if (di.max_texel_weight_count <= 2)
+ {
+ for (unsigned int i = 0; i < texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vfloat weight = bilinear_infill_vla_2(di, dec_weight_ideal_value, i);
+ storea(weight, infilled_weights + i);
+ }
+ }
+ else
+ {
+ for (unsigned int i = 0; i < texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vfloat weight = bilinear_infill_vla(di, dec_weight_ideal_value, i);
+ storea(weight, infilled_weights + i);
+ }
+ }
+
+ // Perform a single iteration of refinement
+ // Empirically determined step size; larger values don't help but smaller drops image quality
+ constexpr float stepsize = 0.25f;
+ constexpr float chd_scale = -WEIGHTS_TEXEL_SUM;
+
+ for (unsigned int i = 0; i < weight_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vfloat weight_val = loada(dec_weight_ideal_value + i);
+
+ // Accumulate error weighting of all the texels using this weight
+ // Start with a small value to avoid div-by-zero later
+ vfloat error_change0(1e-10f);
+ vfloat error_change1(0.0f);
+
+ // Accumulate error weighting of all the texels using this weight
+ vint weight_texel_count(di.weight_texel_count + i);
+ unsigned int max_texel_count = hmax(weight_texel_count).lane<0>();
+ promise(max_texel_count > 0);
+
+ for (unsigned int j = 0; j < max_texel_count; j++)
+ {
+ vint texel(di.weight_texels_tr[j] + i);
+ vfloat contrib_weight = loada(di.weights_texel_contribs_tr[j] + i);
+
+ if (!constant_wes)
+ {
+ weight_error_scale = gatherf(ei.weight_error_scale, texel);
+ }
+
+ vfloat scale = weight_error_scale * contrib_weight;
+ vfloat old_weight = gatherf(infilled_weights, texel);
+ vfloat ideal_weight = gatherf(ei.weights, texel);
+
+ error_change0 += contrib_weight * scale;
+ error_change1 += (old_weight - ideal_weight) * scale;
+ }
+
+ vfloat step = (error_change1 * chd_scale) / error_change0;
+ step = clamp(-stepsize, stepsize, step);
+
+ // Update the weight; note this can store negative values
+ storea(weight_val + step, dec_weight_ideal_value + i);
+ }
+}
+
+/* See header for documentation. */
+void compute_quantized_weights_for_decimation(
+ const decimation_info& di,
+ float low_bound,
+ float high_bound,
+ const float* dec_weight_ideal_value,
+ float* weight_set_out,
+ uint8_t* quantized_weight_set,
+ quant_method quant_level
+) {
+ int weight_count = di.weight_count;
+ promise(weight_count > 0);
+ const quant_and_transfer_table& qat = quant_and_xfer_tables[quant_level];
+
+ // The available quant levels, stored with a minus 1 bias
+ static const float quant_levels_m1[12] {
+ 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 7.0f, 9.0f, 11.0f, 15.0f, 19.0f, 23.0f, 31.0f
+ };
+
+ vint steps_m1(get_quant_level(quant_level) - 1);
+ float quant_level_m1 = quant_levels_m1[quant_level];
+
+ // Quantize the weight set using both the specified low/high bounds and standard 0..1 bounds
+
+ // TODO: Oddity to investigate; triggered by test in issue #265.
+ if (high_bound <= low_bound)
+ {
+ low_bound = 0.0f;
+ high_bound = 1.0f;
+ }
+
+ float rscale = high_bound - low_bound;
+ float scale = 1.0f / rscale;
+
+ float scaled_low_bound = low_bound * scale;
+ rscale *= 1.0f / 64.0f;
+
+ vfloat scalev(scale);
+ vfloat scaled_low_boundv(scaled_low_bound);
+ vfloat quant_level_m1v(quant_level_m1);
+ vfloat rscalev(rscale);
+ vfloat low_boundv(low_bound);
+
+ // This runs to the rounded-up SIMD size, which is safe as the loop tail is filled with known
+ // safe data in compute_ideal_weights_for_decimation and arrays are always 64 elements
+ if (get_quant_level(quant_level) <= 16)
+ {
+ vint4 tab0(reinterpret_cast<const int*>(qat.quant_to_unquant));
+ vint tab0p;
+ vtable_prepare(tab0, tab0p);
+
+ for (int i = 0; i < weight_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vfloat ix = loada(dec_weight_ideal_value + i) * scalev - scaled_low_boundv;
+ ix = clampzo(ix);
+
+ // Look up the two closest indexes and return the one that was closest
+ vfloat ix1 = ix * quant_level_m1v;
+
+ vint weightl = float_to_int(ix1);
+ vint weighth = min(weightl + vint(1), steps_m1);
+
+ vint ixli = vtable_8bt_32bi(tab0p, weightl);
+ vint ixhi = vtable_8bt_32bi(tab0p, weighth);
+
+ vfloat ixl = int_to_float(ixli);
+ vfloat ixh = int_to_float(ixhi);
+
+ vmask mask = (ixl + ixh) < (vfloat(128.0f) * ix);
+ vint weight = select(ixli, ixhi, mask);
+ ixl = select(ixl, ixh, mask);
+
+ // Invert the weight-scaling that was done initially
+ storea(ixl * rscalev + low_boundv, weight_set_out + i);
+ vint scn = pack_low_bytes(weight);
+ store_nbytes(scn, quantized_weight_set + i);
+ }
+ }
+ else
+ {
+ vint4 tab0(reinterpret_cast<const int*>(qat.quant_to_unquant));
+ vint4 tab1(reinterpret_cast<const int*>(qat.quant_to_unquant + 16));
+ vint tab0p, tab1p;
+ vtable_prepare(tab0, tab1, tab0p, tab1p);
+
+ for (int i = 0; i < weight_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vfloat ix = loada(dec_weight_ideal_value + i) * scalev - scaled_low_boundv;
+ ix = clampzo(ix);
+
+ // Look up the two closest indexes and return the one that was closest
+ vfloat ix1 = ix * quant_level_m1v;
+
+ vint weightl = float_to_int(ix1);
+ vint weighth = min(weightl + vint(1), steps_m1);
+
+ vint ixli = vtable_8bt_32bi(tab0p, tab1p, weightl);
+ vint ixhi = vtable_8bt_32bi(tab0p, tab1p, weighth);
+
+ vfloat ixl = int_to_float(ixli);
+ vfloat ixh = int_to_float(ixhi);
+
+ vmask mask = (ixl + ixh) < (vfloat(128.0f) * ix);
+ vint weight = select(ixli, ixhi, mask);
+ ixl = select(ixl, ixh, mask);
+
+ // Invert the weight-scaling that was done initially
+ storea(ixl * rscalev + low_boundv, weight_set_out + i);
+ vint scn = pack_low_bytes(weight);
+ store_nbytes(scn, quantized_weight_set + i);
+ }
+ }
+}
+
+/**
+ * @brief Compute the RGB + offset for a HDR endpoint mode #7.
+ *
+ * Since the matrix needed has a regular structure we can simplify the inverse calculation. This
+ * gives us ~24 multiplications vs. 96 for a generic inverse.
+ *
+ * mat[0] = vfloat4(rgba_ws.x, 0.0f, 0.0f, wght_ws.x);
+ * mat[1] = vfloat4( 0.0f, rgba_ws.y, 0.0f, wght_ws.y);
+ * mat[2] = vfloat4( 0.0f, 0.0f, rgba_ws.z, wght_ws.z);
+ * mat[3] = vfloat4(wght_ws.x, wght_ws.y, wght_ws.z, psum);
+ * mat = invert(mat);
+ *
+ * @param rgba_weight_sum Sum of partition component error weights.
+ * @param weight_weight_sum Sum of partition component error weights * texel weight.
+ * @param rgbq_sum Sum of partition component error weights * texel weight * color data.
+ * @param psum Sum of RGB color weights * texel weight^2.
+ */
+static inline vfloat4 compute_rgbo_vector(
+ vfloat4 rgba_weight_sum,
+ vfloat4 weight_weight_sum,
+ vfloat4 rgbq_sum,
+ float psum
+) {
+ float X = rgba_weight_sum.lane<0>();
+ float Y = rgba_weight_sum.lane<1>();
+ float Z = rgba_weight_sum.lane<2>();
+ float P = weight_weight_sum.lane<0>();
+ float Q = weight_weight_sum.lane<1>();
+ float R = weight_weight_sum.lane<2>();
+ float S = psum;
+
+ float PP = P * P;
+ float QQ = Q * Q;
+ float RR = R * R;
+
+ float SZmRR = S * Z - RR;
+ float DT = SZmRR * Y - Z * QQ;
+ float YP = Y * P;
+ float QX = Q * X;
+ float YX = Y * X;
+ float mZYP = -Z * YP;
+ float mZQX = -Z * QX;
+ float mRYX = -R * YX;
+ float ZQP = Z * Q * P;
+ float RYP = R * YP;
+ float RQX = R * QX;
+
+ // Compute the reciprocal of matrix determinant
+ float rdet = 1.0f / (DT * X + mZYP * P);
+
+ // Actually compute the adjugate, and then apply 1/det separately
+ vfloat4 mat0(DT, ZQP, RYP, mZYP);
+ vfloat4 mat1(ZQP, SZmRR * X - Z * PP, RQX, mZQX);
+ vfloat4 mat2(RYP, RQX, (S * Y - QQ) * X - Y * PP, mRYX);
+ vfloat4 mat3(mZYP, mZQX, mRYX, Z * YX);
+ vfloat4 vect = rgbq_sum * rdet;
+
+ return vfloat4(dot_s(mat0, vect),
+ dot_s(mat1, vect),
+ dot_s(mat2, vect),
+ dot_s(mat3, vect));
+}
+
+/* See header for documentation. */
+void recompute_ideal_colors_1plane(
+ const image_block& blk,
+ const partition_info& pi,
+ const decimation_info& di,
+ const uint8_t* dec_weights_uquant,
+ endpoints& ep,
+ vfloat4 rgbs_vectors[BLOCK_MAX_PARTITIONS],
+ vfloat4 rgbo_vectors[BLOCK_MAX_PARTITIONS]
+) {
+ unsigned int weight_count = di.weight_count;
+ unsigned int total_texel_count = blk.texel_count;
+ unsigned int partition_count = pi.partition_count;
+
+ promise(weight_count > 0);
+ promise(total_texel_count > 0);
+ promise(partition_count > 0);
+
+ alignas(ASTCENC_VECALIGN) float dec_weight[BLOCK_MAX_WEIGHTS];
+ for (unsigned int i = 0; i < weight_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vint unquant_value(dec_weights_uquant + i);
+ vfloat unquant_valuef = int_to_float(unquant_value) * vfloat(1.0f / 64.0f);
+ storea(unquant_valuef, dec_weight + i);
+ }
+
+ alignas(ASTCENC_VECALIGN) float undec_weight[BLOCK_MAX_TEXELS];
+ float* undec_weight_ref;
+ if (di.max_texel_weight_count == 1)
+ {
+ undec_weight_ref = dec_weight;
+ }
+ else if (di.max_texel_weight_count <= 2)
+ {
+ for (unsigned int i = 0; i < total_texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vfloat weight = bilinear_infill_vla_2(di, dec_weight, i);
+ storea(weight, undec_weight + i);
+ }
+
+ undec_weight_ref = undec_weight;
+ }
+ else
+ {
+ for (unsigned int i = 0; i < total_texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vfloat weight = bilinear_infill_vla(di, dec_weight, i);
+ storea(weight, undec_weight + i);
+ }
+
+ undec_weight_ref = undec_weight;
+ }
+
+ vfloat4 rgba_sum(blk.data_mean * static_cast<float>(blk.texel_count));
+
+ for (unsigned int i = 0; i < partition_count; i++)
+ {
+ unsigned int texel_count = pi.partition_texel_count[i];
+ const uint8_t *texel_indexes = pi.texels_of_partition[i];
+
+ // Only compute a partition mean if more than one partition
+ if (partition_count > 1)
+ {
+ rgba_sum = vfloat4::zero();
+ promise(texel_count > 0);
+ for (unsigned int j = 0; j < texel_count; j++)
+ {
+ unsigned int tix = texel_indexes[j];
+ rgba_sum += blk.texel(tix);
+ }
+ }
+
+ rgba_sum = rgba_sum * blk.channel_weight;
+ vfloat4 rgba_weight_sum = max(blk.channel_weight * static_cast<float>(texel_count), 1e-17f);
+ vfloat4 scale_dir = normalize((rgba_sum / rgba_weight_sum).swz<0, 1, 2>());
+
+ float scale_max = 0.0f;
+ float scale_min = 1e10f;
+
+ float wmin1 = 1.0f;
+ float wmax1 = 0.0f;
+
+ float left_sum_s = 0.0f;
+ float middle_sum_s = 0.0f;
+ float right_sum_s = 0.0f;
+
+ vfloat4 color_vec_x = vfloat4::zero();
+ vfloat4 color_vec_y = vfloat4::zero();
+
+ vfloat4 scale_vec = vfloat4::zero();
+
+ float weight_weight_sum_s = 1e-17f;
+
+ vfloat4 color_weight = blk.channel_weight;
+ float ls_weight = hadd_rgb_s(color_weight);
+
+ for (unsigned int j = 0; j < texel_count; j++)
+ {
+ unsigned int tix = texel_indexes[j];
+ vfloat4 rgba = blk.texel(tix);
+
+ float idx0 = undec_weight_ref[tix];
+
+ float om_idx0 = 1.0f - idx0;
+ wmin1 = astc::min(idx0, wmin1);
+ wmax1 = astc::max(idx0, wmax1);
+
+ float scale = dot3_s(scale_dir, rgba);
+ scale_min = astc::min(scale, scale_min);
+ scale_max = astc::max(scale, scale_max);
+
+ left_sum_s += om_idx0 * om_idx0;
+ middle_sum_s += om_idx0 * idx0;
+ right_sum_s += idx0 * idx0;
+ weight_weight_sum_s += idx0;
+
+ vfloat4 color_idx(idx0);
+ vfloat4 cwprod = rgba;
+ vfloat4 cwiprod = cwprod * color_idx;
+
+ color_vec_y += cwiprod;
+ color_vec_x += cwprod - cwiprod;
+
+ scale_vec += vfloat2(om_idx0, idx0) * (scale * ls_weight);
+ }
+
+ vfloat4 left_sum = vfloat4(left_sum_s) * color_weight;
+ vfloat4 middle_sum = vfloat4(middle_sum_s) * color_weight;
+ vfloat4 right_sum = vfloat4(right_sum_s) * color_weight;
+ vfloat4 lmrs_sum = vfloat3(left_sum_s, middle_sum_s, right_sum_s) * ls_weight;
+
+ color_vec_x = color_vec_x * color_weight;
+ color_vec_y = color_vec_y * color_weight;
+
+ // Initialize the luminance and scale vectors with a reasonable default
+ float scalediv = scale_min / astc::max(scale_max, 1e-10f);
+ scalediv = astc::clamp1f(scalediv);
+
+ vfloat4 sds = scale_dir * scale_max;
+
+ rgbs_vectors[i] = vfloat4(sds.lane<0>(), sds.lane<1>(), sds.lane<2>(), scalediv);
+
+ if (wmin1 >= wmax1 * 0.999f)
+ {
+ // If all weights in the partition were equal, then just take average of all colors in
+ // the partition and use that as both endpoint colors
+ vfloat4 avg = (color_vec_x + color_vec_y) / rgba_weight_sum;
+
+ vmask4 notnan_mask = avg == avg;
+ ep.endpt0[i] = select(ep.endpt0[i], avg, notnan_mask);
+ ep.endpt1[i] = select(ep.endpt1[i], avg, notnan_mask);
+
+ rgbs_vectors[i] = vfloat4(sds.lane<0>(), sds.lane<1>(), sds.lane<2>(), 1.0f);
+ }
+ else
+ {
+ // Otherwise, complete the analytic calculation of ideal-endpoint-values for the given
+ // set of texel weights and pixel colors
+ vfloat4 color_det1 = (left_sum * right_sum) - (middle_sum * middle_sum);
+ vfloat4 color_rdet1 = 1.0f / color_det1;
+
+ float ls_det1 = (lmrs_sum.lane<0>() * lmrs_sum.lane<2>()) - (lmrs_sum.lane<1>() * lmrs_sum.lane<1>());
+ float ls_rdet1 = 1.0f / ls_det1;
+
+ vfloat4 color_mss1 = (left_sum * left_sum)
+ + (2.0f * middle_sum * middle_sum)
+ + (right_sum * right_sum);
+
+ float ls_mss1 = (lmrs_sum.lane<0>() * lmrs_sum.lane<0>())
+ + (2.0f * lmrs_sum.lane<1>() * lmrs_sum.lane<1>())
+ + (lmrs_sum.lane<2>() * lmrs_sum.lane<2>());
+
+ vfloat4 ep0 = (right_sum * color_vec_x - middle_sum * color_vec_y) * color_rdet1;
+ vfloat4 ep1 = (left_sum * color_vec_y - middle_sum * color_vec_x) * color_rdet1;
+
+ vmask4 det_mask = abs(color_det1) > (color_mss1 * 1e-4f);
+ vmask4 notnan_mask = (ep0 == ep0) & (ep1 == ep1);
+ vmask4 full_mask = det_mask & notnan_mask;
+
+ ep.endpt0[i] = select(ep.endpt0[i], ep0, full_mask);
+ ep.endpt1[i] = select(ep.endpt1[i], ep1, full_mask);
+
+ float scale_ep0 = (lmrs_sum.lane<2>() * scale_vec.lane<0>() - lmrs_sum.lane<1>() * scale_vec.lane<1>()) * ls_rdet1;
+ float scale_ep1 = (lmrs_sum.lane<0>() * scale_vec.lane<1>() - lmrs_sum.lane<1>() * scale_vec.lane<0>()) * ls_rdet1;
+
+ if (fabsf(ls_det1) > (ls_mss1 * 1e-4f) && scale_ep0 == scale_ep0 && scale_ep1 == scale_ep1 && scale_ep0 < scale_ep1)
+ {
+ float scalediv2 = scale_ep0 / scale_ep1;
+ vfloat4 sdsm = scale_dir * scale_ep1;
+ rgbs_vectors[i] = vfloat4(sdsm.lane<0>(), sdsm.lane<1>(), sdsm.lane<2>(), scalediv2);
+ }
+ }
+
+ // Calculations specific to mode #7, the HDR RGB-scale mode - skip if known LDR
+ if (blk.rgb_lns[0] || blk.alpha_lns[0])
+ {
+ vfloat4 weight_weight_sum = vfloat4(weight_weight_sum_s) * color_weight;
+ float psum = right_sum_s * hadd_rgb_s(color_weight);
+
+ vfloat4 rgbq_sum = color_vec_x + color_vec_y;
+ rgbq_sum.set_lane<3>(hadd_rgb_s(color_vec_y));
+
+ vfloat4 rgbovec = compute_rgbo_vector(rgba_weight_sum, weight_weight_sum, rgbq_sum, psum);
+ rgbo_vectors[i] = rgbovec;
+
+ // We can get a failure due to the use of a singular (non-invertible) matrix
+ // If it failed, compute rgbo_vectors[] with a different method ...
+ if (astc::isnan(dot_s(rgbovec, rgbovec)))
+ {
+ vfloat4 v0 = ep.endpt0[i];
+ vfloat4 v1 = ep.endpt1[i];
+
+ float avgdif = hadd_rgb_s(v1 - v0) * (1.0f / 3.0f);
+ avgdif = astc::max(avgdif, 0.0f);
+
+ vfloat4 avg = (v0 + v1) * 0.5f;
+ vfloat4 ep0 = avg - vfloat4(avgdif) * 0.5f;
+ rgbo_vectors[i] = vfloat4(ep0.lane<0>(), ep0.lane<1>(), ep0.lane<2>(), avgdif);
+ }
+ }
+ }
+}
+
+/* See header for documentation. */
+void recompute_ideal_colors_2planes(
+ const image_block& blk,
+ const block_size_descriptor& bsd,
+ const decimation_info& di,
+ const uint8_t* dec_weights_uquant_plane1,
+ const uint8_t* dec_weights_uquant_plane2,
+ endpoints& ep,
+ vfloat4& rgbs_vector,
+ vfloat4& rgbo_vector,
+ int plane2_component
+) {
+ unsigned int weight_count = di.weight_count;
+ unsigned int total_texel_count = blk.texel_count;
+
+ promise(total_texel_count > 0);
+ promise(weight_count > 0);
+
+ alignas(ASTCENC_VECALIGN) float dec_weight_plane1[BLOCK_MAX_WEIGHTS_2PLANE];
+ alignas(ASTCENC_VECALIGN) float dec_weight_plane2[BLOCK_MAX_WEIGHTS_2PLANE];
+
+ assert(weight_count <= BLOCK_MAX_WEIGHTS_2PLANE);
+
+ for (unsigned int i = 0; i < weight_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vint unquant_value1(dec_weights_uquant_plane1 + i);
+ vfloat unquant_value1f = int_to_float(unquant_value1) * vfloat(1.0f / 64.0f);
+ storea(unquant_value1f, dec_weight_plane1 + i);
+
+ vint unquant_value2(dec_weights_uquant_plane2 + i);
+ vfloat unquant_value2f = int_to_float(unquant_value2) * vfloat(1.0f / 64.0f);
+ storea(unquant_value2f, dec_weight_plane2 + i);
+ }
+
+ alignas(ASTCENC_VECALIGN) float undec_weight_plane1[BLOCK_MAX_TEXELS];
+ alignas(ASTCENC_VECALIGN) float undec_weight_plane2[BLOCK_MAX_TEXELS];
+
+ float* undec_weight_plane1_ref;
+ float* undec_weight_plane2_ref;
+
+ if (di.max_texel_weight_count == 1)
+ {
+ undec_weight_plane1_ref = dec_weight_plane1;
+ undec_weight_plane2_ref = dec_weight_plane2;
+ }
+ else if (di.max_texel_weight_count <= 2)
+ {
+ for (unsigned int i = 0; i < total_texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vfloat weight = bilinear_infill_vla_2(di, dec_weight_plane1, i);
+ storea(weight, undec_weight_plane1 + i);
+
+ weight = bilinear_infill_vla_2(di, dec_weight_plane2, i);
+ storea(weight, undec_weight_plane2 + i);
+ }
+
+ undec_weight_plane1_ref = undec_weight_plane1;
+ undec_weight_plane2_ref = undec_weight_plane2;
+ }
+ else
+ {
+ for (unsigned int i = 0; i < total_texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vfloat weight = bilinear_infill_vla(di, dec_weight_plane1, i);
+ storea(weight, undec_weight_plane1 + i);
+
+ weight = bilinear_infill_vla(di, dec_weight_plane2, i);
+ storea(weight, undec_weight_plane2 + i);
+ }
+
+ undec_weight_plane1_ref = undec_weight_plane1;
+ undec_weight_plane2_ref = undec_weight_plane2;
+ }
+
+ unsigned int texel_count = bsd.texel_count;
+ vfloat4 rgba_weight_sum = max(blk.channel_weight * static_cast<float>(texel_count), 1e-17f);
+ vfloat4 scale_dir = normalize(blk.data_mean.swz<0, 1, 2>());
+
+ float scale_max = 0.0f;
+ float scale_min = 1e10f;
+
+ float wmin1 = 1.0f;
+ float wmax1 = 0.0f;
+
+ float wmin2 = 1.0f;
+ float wmax2 = 0.0f;
+
+ float left1_sum_s = 0.0f;
+ float middle1_sum_s = 0.0f;
+ float right1_sum_s = 0.0f;
+
+ float left2_sum_s = 0.0f;
+ float middle2_sum_s = 0.0f;
+ float right2_sum_s = 0.0f;
+
+ vfloat4 color_vec_x = vfloat4::zero();
+ vfloat4 color_vec_y = vfloat4::zero();
+
+ vfloat4 scale_vec = vfloat4::zero();
+
+ vfloat4 weight_weight_sum = vfloat4(1e-17f);
+
+ vmask4 p2_mask = vint4::lane_id() == vint4(plane2_component);
+ vfloat4 color_weight = blk.channel_weight;
+ float ls_weight = hadd_rgb_s(color_weight);
+
+ for (unsigned int j = 0; j < texel_count; j++)
+ {
+ vfloat4 rgba = blk.texel(j);
+
+ float idx0 = undec_weight_plane1_ref[j];
+
+ float om_idx0 = 1.0f - idx0;
+ wmin1 = astc::min(idx0, wmin1);
+ wmax1 = astc::max(idx0, wmax1);
+
+ float scale = dot3_s(scale_dir, rgba);
+ scale_min = astc::min(scale, scale_min);
+ scale_max = astc::max(scale, scale_max);
+
+ left1_sum_s += om_idx0 * om_idx0;
+ middle1_sum_s += om_idx0 * idx0;
+ right1_sum_s += idx0 * idx0;
+
+ float idx1 = undec_weight_plane2_ref[j];
+
+ float om_idx1 = 1.0f - idx1;
+ wmin2 = astc::min(idx1, wmin2);
+ wmax2 = astc::max(idx1, wmax2);
+
+ left2_sum_s += om_idx1 * om_idx1;
+ middle2_sum_s += om_idx1 * idx1;
+ right2_sum_s += idx1 * idx1;
+
+ vfloat4 color_idx = select(vfloat4(idx0), vfloat4(idx1), p2_mask);
+
+ vfloat4 cwprod = rgba;
+ vfloat4 cwiprod = cwprod * color_idx;
+
+ color_vec_y += cwiprod;
+ color_vec_x += cwprod - cwiprod;
+
+ scale_vec += vfloat2(om_idx0, idx0) * (ls_weight * scale);
+ weight_weight_sum += color_idx;
+ }
+
+ vfloat4 left1_sum = vfloat4(left1_sum_s) * color_weight;
+ vfloat4 middle1_sum = vfloat4(middle1_sum_s) * color_weight;
+ vfloat4 right1_sum = vfloat4(right1_sum_s) * color_weight;
+ vfloat4 lmrs_sum = vfloat3(left1_sum_s, middle1_sum_s, right1_sum_s) * ls_weight;
+
+ vfloat4 left2_sum = vfloat4(left2_sum_s) * color_weight;
+ vfloat4 middle2_sum = vfloat4(middle2_sum_s) * color_weight;
+ vfloat4 right2_sum = vfloat4(right2_sum_s) * color_weight;
+
+ color_vec_x = color_vec_x * color_weight;
+ color_vec_y = color_vec_y * color_weight;
+
+ // Initialize the luminance and scale vectors with a reasonable default
+ float scalediv = scale_min / astc::max(scale_max, 1e-10f);
+ scalediv = astc::clamp1f(scalediv);
+
+ vfloat4 sds = scale_dir * scale_max;
+
+ rgbs_vector = vfloat4(sds.lane<0>(), sds.lane<1>(), sds.lane<2>(), scalediv);
+
+ if (wmin1 >= wmax1 * 0.999f)
+ {
+ // If all weights in the partition were equal, then just take average of all colors in
+ // the partition and use that as both endpoint colors
+ vfloat4 avg = (color_vec_x + color_vec_y) / rgba_weight_sum;
+
+ vmask4 p1_mask = vint4::lane_id() != vint4(plane2_component);
+ vmask4 notnan_mask = avg == avg;
+ vmask4 full_mask = p1_mask & notnan_mask;
+
+ ep.endpt0[0] = select(ep.endpt0[0], avg, full_mask);
+ ep.endpt1[0] = select(ep.endpt1[0], avg, full_mask);
+
+ rgbs_vector = vfloat4(sds.lane<0>(), sds.lane<1>(), sds.lane<2>(), 1.0f);
+ }
+ else
+ {
+ // Otherwise, complete the analytic calculation of ideal-endpoint-values for the given
+ // set of texel weights and pixel colors
+ vfloat4 color_det1 = (left1_sum * right1_sum) - (middle1_sum * middle1_sum);
+ vfloat4 color_rdet1 = 1.0f / color_det1;
+
+ float ls_det1 = (lmrs_sum.lane<0>() * lmrs_sum.lane<2>()) - (lmrs_sum.lane<1>() * lmrs_sum.lane<1>());
+ float ls_rdet1 = 1.0f / ls_det1;
+
+ vfloat4 color_mss1 = (left1_sum * left1_sum)
+ + (2.0f * middle1_sum * middle1_sum)
+ + (right1_sum * right1_sum);
+
+ float ls_mss1 = (lmrs_sum.lane<0>() * lmrs_sum.lane<0>())
+ + (2.0f * lmrs_sum.lane<1>() * lmrs_sum.lane<1>())
+ + (lmrs_sum.lane<2>() * lmrs_sum.lane<2>());
+
+ vfloat4 ep0 = (right1_sum * color_vec_x - middle1_sum * color_vec_y) * color_rdet1;
+ vfloat4 ep1 = (left1_sum * color_vec_y - middle1_sum * color_vec_x) * color_rdet1;
+
+ float scale_ep0 = (lmrs_sum.lane<2>() * scale_vec.lane<0>() - lmrs_sum.lane<1>() * scale_vec.lane<1>()) * ls_rdet1;
+ float scale_ep1 = (lmrs_sum.lane<0>() * scale_vec.lane<1>() - lmrs_sum.lane<1>() * scale_vec.lane<0>()) * ls_rdet1;
+
+ vmask4 p1_mask = vint4::lane_id() != vint4(plane2_component);
+ vmask4 det_mask = abs(color_det1) > (color_mss1 * 1e-4f);
+ vmask4 notnan_mask = (ep0 == ep0) & (ep1 == ep1);
+ vmask4 full_mask = p1_mask & det_mask & notnan_mask;
+
+ ep.endpt0[0] = select(ep.endpt0[0], ep0, full_mask);
+ ep.endpt1[0] = select(ep.endpt1[0], ep1, full_mask);
+
+ if (fabsf(ls_det1) > (ls_mss1 * 1e-4f) && scale_ep0 == scale_ep0 && scale_ep1 == scale_ep1 && scale_ep0 < scale_ep1)
+ {
+ float scalediv2 = scale_ep0 / scale_ep1;
+ vfloat4 sdsm = scale_dir * scale_ep1;
+ rgbs_vector = vfloat4(sdsm.lane<0>(), sdsm.lane<1>(), sdsm.lane<2>(), scalediv2);
+ }
+ }
+
+ if (wmin2 >= wmax2 * 0.999f)
+ {
+ // If all weights in the partition were equal, then just take average of all colors in
+ // the partition and use that as both endpoint colors
+ vfloat4 avg = (color_vec_x + color_vec_y) / rgba_weight_sum;
+
+ vmask4 notnan_mask = avg == avg;
+ vmask4 full_mask = p2_mask & notnan_mask;
+
+ ep.endpt0[0] = select(ep.endpt0[0], avg, full_mask);
+ ep.endpt1[0] = select(ep.endpt1[0], avg, full_mask);
+ }
+ else
+ {
+ // Otherwise, complete the analytic calculation of ideal-endpoint-values for the given
+ // set of texel weights and pixel colors
+ vfloat4 color_det2 = (left2_sum * right2_sum) - (middle2_sum * middle2_sum);
+ vfloat4 color_rdet2 = 1.0f / color_det2;
+
+ vfloat4 color_mss2 = (left2_sum * left2_sum)
+ + (2.0f * middle2_sum * middle2_sum)
+ + (right2_sum * right2_sum);
+
+ vfloat4 ep0 = (right2_sum * color_vec_x - middle2_sum * color_vec_y) * color_rdet2;
+ vfloat4 ep1 = (left2_sum * color_vec_y - middle2_sum * color_vec_x) * color_rdet2;
+
+ vmask4 det_mask = abs(color_det2) > (color_mss2 * 1e-4f);
+ vmask4 notnan_mask = (ep0 == ep0) & (ep1 == ep1);
+ vmask4 full_mask = p2_mask & det_mask & notnan_mask;
+
+ ep.endpt0[0] = select(ep.endpt0[0], ep0, full_mask);
+ ep.endpt1[0] = select(ep.endpt1[0], ep1, full_mask);
+ }
+
+ // Calculations specific to mode #7, the HDR RGB-scale mode - skip if known LDR
+ if (blk.rgb_lns[0] || blk.alpha_lns[0])
+ {
+ weight_weight_sum = weight_weight_sum * color_weight;
+ float psum = dot3_s(select(right1_sum, right2_sum, p2_mask), color_weight);
+
+ vfloat4 rgbq_sum = color_vec_x + color_vec_y;
+ rgbq_sum.set_lane<3>(hadd_rgb_s(color_vec_y));
+
+ rgbo_vector = compute_rgbo_vector(rgba_weight_sum, weight_weight_sum, rgbq_sum, psum);
+
+ // We can get a failure due to the use of a singular (non-invertible) matrix
+ // If it failed, compute rgbo_vectors[] with a different method ...
+ if (astc::isnan(dot_s(rgbo_vector, rgbo_vector)))
+ {
+ vfloat4 v0 = ep.endpt0[0];
+ vfloat4 v1 = ep.endpt1[0];
+
+ float avgdif = hadd_rgb_s(v1 - v0) * (1.0f / 3.0f);
+ avgdif = astc::max(avgdif, 0.0f);
+
+ vfloat4 avg = (v0 + v1) * 0.5f;
+ vfloat4 ep0 = avg - vfloat4(avgdif) * 0.5f;
+
+ rgbo_vector = vfloat4(ep0.lane<0>(), ep0.lane<1>(), ep0.lane<2>(), avgdif);
+ }
+ }
+}
+
+#endif
diff --git a/thirdparty/astcenc/astcenc_image.cpp b/thirdparty/astcenc/astcenc_image.cpp
new file mode 100644
index 0000000000..9c0d6727d0
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_image.cpp
@@ -0,0 +1,558 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2022 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief Functions for creating in-memory ASTC image structures.
+ */
+
+#include <cassert>
+#include <cstring>
+
+#include "astcenc_internal.h"
+
+/**
+ * @brief Loader pipeline function type for data fetch from memory.
+ */
+using pixel_loader = vfloat4(*)(const void*, int);
+
+/**
+ * @brief Loader pipeline function type for swizzling data in a vector.
+ */
+using pixel_swizzler = vfloat4(*)(vfloat4, const astcenc_swizzle&);
+
+/**
+ * @brief Loader pipeline function type for converting data in a vector to LNS.
+ */
+using pixel_converter = vfloat4(*)(vfloat4, vmask4);
+
+/**
+ * @brief Load a 8-bit UNORM texel from a data array.
+ *
+ * @param data The data pointer.
+ * @param base_offset The index offset to the start of the pixel.
+ */
+static vfloat4 load_texel_u8(
+ const void* data,
+ int base_offset
+) {
+ const uint8_t* data8 = static_cast<const uint8_t*>(data);
+ return int_to_float(vint4(data8 + base_offset)) / 255.0f;
+}
+
+/**
+ * @brief Load a 16-bit fp16 texel from a data array.
+ *
+ * @param data The data pointer.
+ * @param base_offset The index offset to the start of the pixel.
+ */
+static vfloat4 load_texel_f16(
+ const void* data,
+ int base_offset
+) {
+ const uint16_t* data16 = static_cast<const uint16_t*>(data);
+ int r = data16[base_offset ];
+ int g = data16[base_offset + 1];
+ int b = data16[base_offset + 2];
+ int a = data16[base_offset + 3];
+ return float16_to_float(vint4(r, g, b, a));
+}
+
+/**
+ * @brief Load a 32-bit float texel from a data array.
+ *
+ * @param data The data pointer.
+ * @param base_offset The index offset to the start of the pixel.
+ */
+static vfloat4 load_texel_f32(
+ const void* data,
+ int base_offset
+) {
+ const float* data32 = static_cast<const float*>(data);
+ return vfloat4(data32 + base_offset);
+}
+
+/**
+ * @brief Dummy no-op swizzle function.
+ *
+ * @param data The source RGBA vector to swizzle.
+ * @param swz The swizzle to use.
+ */
+static vfloat4 swz_texel_skip(
+ vfloat4 data,
+ const astcenc_swizzle& swz
+) {
+ (void)swz;
+ return data;
+}
+
+/**
+ * @brief Swizzle a texel into a new arrangement.
+ *
+ * @param data The source RGBA vector to swizzle.
+ * @param swz The swizzle to use.
+ */
+static vfloat4 swz_texel(
+ vfloat4 data,
+ const astcenc_swizzle& swz
+) {
+ alignas(16) float datas[6];
+
+ storea(data, datas);
+ datas[ASTCENC_SWZ_0] = 0.0f;
+ datas[ASTCENC_SWZ_1] = 1.0f;
+
+ return vfloat4(datas[swz.r], datas[swz.g], datas[swz.b], datas[swz.a]);
+}
+
+/**
+ * @brief Encode a texel that is entirely LDR linear.
+ *
+ * @param data The RGBA data to encode.
+ * @param lns_mask The mask for the HDR channels than need LNS encoding.
+ */
+static vfloat4 encode_texel_unorm(
+ vfloat4 data,
+ vmask4 lns_mask
+) {
+ (void)lns_mask;
+ return data * 65535.0f;
+}
+
+/**
+ * @brief Encode a texel that includes at least some HDR LNS texels.
+ *
+ * @param data The RGBA data to encode.
+ * @param lns_mask The mask for the HDR channels than need LNS encoding.
+ */
+static vfloat4 encode_texel_lns(
+ vfloat4 data,
+ vmask4 lns_mask
+) {
+ vfloat4 datav_unorm = data * 65535.0f;
+ vfloat4 datav_lns = float_to_lns(data);
+ return select(datav_unorm, datav_lns, lns_mask);
+}
+
+/* See header for documentation. */
+void load_image_block(
+ astcenc_profile decode_mode,
+ const astcenc_image& img,
+ image_block& blk,
+ const block_size_descriptor& bsd,
+ unsigned int xpos,
+ unsigned int ypos,
+ unsigned int zpos,
+ const astcenc_swizzle& swz
+) {
+ unsigned int xsize = img.dim_x;
+ unsigned int ysize = img.dim_y;
+ unsigned int zsize = img.dim_z;
+
+ blk.xpos = xpos;
+ blk.ypos = ypos;
+ blk.zpos = zpos;
+
+ // True if any non-identity swizzle
+ bool needs_swz = (swz.r != ASTCENC_SWZ_R) || (swz.g != ASTCENC_SWZ_G) ||
+ (swz.b != ASTCENC_SWZ_B) || (swz.a != ASTCENC_SWZ_A);
+
+ int idx = 0;
+
+ vfloat4 data_min(1e38f);
+ vfloat4 data_mean(0.0f);
+ vfloat4 data_mean_scale(1.0f / static_cast<float>(bsd.texel_count));
+ vfloat4 data_max(-1e38f);
+ vmask4 grayscalev(true);
+
+ // This works because we impose the same choice everywhere during encode
+ uint8_t rgb_lns = (decode_mode == ASTCENC_PRF_HDR) ||
+ (decode_mode == ASTCENC_PRF_HDR_RGB_LDR_A) ? 1 : 0;
+ uint8_t a_lns = decode_mode == ASTCENC_PRF_HDR ? 1 : 0;
+ vint4 use_lns(rgb_lns, rgb_lns, rgb_lns, a_lns);
+ vmask4 lns_mask = use_lns != vint4::zero();
+
+ // Set up the function pointers for loading pipeline as needed
+ pixel_loader loader = load_texel_u8;
+ if (img.data_type == ASTCENC_TYPE_F16)
+ {
+ loader = load_texel_f16;
+ }
+ else if (img.data_type == ASTCENC_TYPE_F32)
+ {
+ loader = load_texel_f32;
+ }
+
+ pixel_swizzler swizzler = swz_texel_skip;
+ if (needs_swz)
+ {
+ swizzler = swz_texel;
+ }
+
+ pixel_converter converter = encode_texel_unorm;
+ if (any(lns_mask))
+ {
+ converter = encode_texel_lns;
+ }
+
+ for (unsigned int z = 0; z < bsd.zdim; z++)
+ {
+ unsigned int zi = astc::min(zpos + z, zsize - 1);
+ void* plane = img.data[zi];
+
+ for (unsigned int y = 0; y < bsd.ydim; y++)
+ {
+ unsigned int yi = astc::min(ypos + y, ysize - 1);
+
+ for (unsigned int x = 0; x < bsd.xdim; x++)
+ {
+ unsigned int xi = astc::min(xpos + x, xsize - 1);
+
+ vfloat4 datav = loader(plane, (4 * xsize * yi) + (4 * xi));
+ datav = swizzler(datav, swz);
+ datav = converter(datav, lns_mask);
+
+ // Compute block metadata
+ data_min = min(data_min, datav);
+ data_mean += datav * data_mean_scale;
+ data_max = max(data_max, datav);
+
+ grayscalev = grayscalev & (datav.swz<0,0,0,0>() == datav.swz<1,1,2,2>());
+
+ blk.data_r[idx] = datav.lane<0>();
+ blk.data_g[idx] = datav.lane<1>();
+ blk.data_b[idx] = datav.lane<2>();
+ blk.data_a[idx] = datav.lane<3>();
+
+ blk.rgb_lns[idx] = rgb_lns;
+ blk.alpha_lns[idx] = a_lns;
+
+ idx++;
+ }
+ }
+ }
+
+ // Reverse the encoding so we store origin block in the original format
+ vfloat4 data_enc = blk.texel(0);
+ vfloat4 data_enc_unorm = data_enc / 65535.0f;
+ vfloat4 data_enc_lns = vfloat4::zero();
+
+ if (rgb_lns || a_lns)
+ {
+ data_enc_lns = float16_to_float(lns_to_sf16(float_to_int(data_enc)));
+ }
+
+ blk.origin_texel = select(data_enc_unorm, data_enc_lns, lns_mask);
+
+ // Store block metadata
+ blk.data_min = data_min;
+ blk.data_mean = data_mean;
+ blk.data_max = data_max;
+ blk.grayscale = all(grayscalev);
+}
+
+/* See header for documentation. */
+void load_image_block_fast_ldr(
+ astcenc_profile decode_mode,
+ const astcenc_image& img,
+ image_block& blk,
+ const block_size_descriptor& bsd,
+ unsigned int xpos,
+ unsigned int ypos,
+ unsigned int zpos,
+ const astcenc_swizzle& swz
+) {
+ (void)swz;
+ (void)decode_mode;
+
+ unsigned int xsize = img.dim_x;
+ unsigned int ysize = img.dim_y;
+
+ blk.xpos = xpos;
+ blk.ypos = ypos;
+ blk.zpos = zpos;
+
+ vfloat4 data_min(1e38f);
+ vfloat4 data_mean = vfloat4::zero();
+ vfloat4 data_max(-1e38f);
+ vmask4 grayscalev(true);
+ int idx = 0;
+
+ const uint8_t* plane = static_cast<const uint8_t*>(img.data[0]);
+ for (unsigned int y = ypos; y < ypos + bsd.ydim; y++)
+ {
+ unsigned int yi = astc::min(y, ysize - 1);
+
+ for (unsigned int x = xpos; x < xpos + bsd.xdim; x++)
+ {
+ unsigned int xi = astc::min(x, xsize - 1);
+
+ vint4 datavi = vint4(plane + (4 * xsize * yi) + (4 * xi));
+ vfloat4 datav = int_to_float(datavi) * (65535.0f / 255.0f);
+
+ // Compute block metadata
+ data_min = min(data_min, datav);
+ data_mean += datav;
+ data_max = max(data_max, datav);
+
+ grayscalev = grayscalev & (datav.swz<0,0,0,0>() == datav.swz<1,1,2,2>());
+
+ blk.data_r[idx] = datav.lane<0>();
+ blk.data_g[idx] = datav.lane<1>();
+ blk.data_b[idx] = datav.lane<2>();
+ blk.data_a[idx] = datav.lane<3>();
+
+ idx++;
+ }
+ }
+
+ // Reverse the encoding so we store origin block in the original format
+ blk.origin_texel = blk.texel(0) / 65535.0f;
+
+ // Store block metadata
+ blk.rgb_lns[0] = 0;
+ blk.alpha_lns[0] = 0;
+ blk.data_min = data_min;
+ blk.data_mean = data_mean / static_cast<float>(bsd.texel_count);
+ blk.data_max = data_max;
+ blk.grayscale = all(grayscalev);
+}
+
+/* See header for documentation. */
+void store_image_block(
+ astcenc_image& img,
+ const image_block& blk,
+ const block_size_descriptor& bsd,
+ unsigned int xpos,
+ unsigned int ypos,
+ unsigned int zpos,
+ const astcenc_swizzle& swz
+) {
+ unsigned int x_size = img.dim_x;
+ unsigned int x_start = xpos;
+ unsigned int x_end = astc::min(x_size, xpos + bsd.xdim);
+ unsigned int x_count = x_end - x_start;
+ unsigned int x_nudge = bsd.xdim - x_count;
+
+ unsigned int y_size = img.dim_y;
+ unsigned int y_start = ypos;
+ unsigned int y_end = astc::min(y_size, ypos + bsd.ydim);
+ unsigned int y_count = y_end - y_start;
+ unsigned int y_nudge = (bsd.ydim - y_count) * bsd.xdim;
+
+ unsigned int z_size = img.dim_z;
+ unsigned int z_start = zpos;
+ unsigned int z_end = astc::min(z_size, zpos + bsd.zdim);
+
+ // True if any non-identity swizzle
+ bool needs_swz = (swz.r != ASTCENC_SWZ_R) || (swz.g != ASTCENC_SWZ_G) ||
+ (swz.b != ASTCENC_SWZ_B) || (swz.a != ASTCENC_SWZ_A);
+
+ // True if any swizzle uses Z reconstruct
+ bool needs_z = (swz.r == ASTCENC_SWZ_Z) || (swz.g == ASTCENC_SWZ_Z) ||
+ (swz.b == ASTCENC_SWZ_Z) || (swz.a == ASTCENC_SWZ_Z);
+
+ int idx = 0;
+ if (img.data_type == ASTCENC_TYPE_U8)
+ {
+ for (unsigned int z = z_start; z < z_end; z++)
+ {
+ // Fetch the image plane
+ uint8_t* data8 = static_cast<uint8_t*>(img.data[z]);
+
+ for (unsigned int y = y_start; y < y_end; y++)
+ {
+ uint8_t* data8_row = data8 + (4 * x_size * y) + (4 * x_start);
+
+ for (unsigned int x = 0; x < x_count; x += ASTCENC_SIMD_WIDTH)
+ {
+ unsigned int max_texels = ASTCENC_SIMD_WIDTH;
+ unsigned int used_texels = astc::min(x_count - x, max_texels);
+
+ // Unaligned load as rows are not always SIMD_WIDTH long
+ vfloat data_r(blk.data_r + idx);
+ vfloat data_g(blk.data_g + idx);
+ vfloat data_b(blk.data_b + idx);
+ vfloat data_a(blk.data_a + idx);
+
+ vint data_ri = float_to_int_rtn(min(data_r, 1.0f) * 255.0f);
+ vint data_gi = float_to_int_rtn(min(data_g, 1.0f) * 255.0f);
+ vint data_bi = float_to_int_rtn(min(data_b, 1.0f) * 255.0f);
+ vint data_ai = float_to_int_rtn(min(data_a, 1.0f) * 255.0f);
+
+ if (needs_swz)
+ {
+ vint swizzle_table[7];
+ swizzle_table[ASTCENC_SWZ_0] = vint(0);
+ swizzle_table[ASTCENC_SWZ_1] = vint(255);
+ swizzle_table[ASTCENC_SWZ_R] = data_ri;
+ swizzle_table[ASTCENC_SWZ_G] = data_gi;
+ swizzle_table[ASTCENC_SWZ_B] = data_bi;
+ swizzle_table[ASTCENC_SWZ_A] = data_ai;
+
+ if (needs_z)
+ {
+ vfloat data_x = (data_r * vfloat(2.0f)) - vfloat(1.0f);
+ vfloat data_y = (data_a * vfloat(2.0f)) - vfloat(1.0f);
+ vfloat data_z = vfloat(1.0f) - (data_x * data_x) - (data_y * data_y);
+ data_z = max(data_z, 0.0f);
+ data_z = (sqrt(data_z) * vfloat(0.5f)) + vfloat(0.5f);
+
+ swizzle_table[ASTCENC_SWZ_Z] = float_to_int_rtn(min(data_z, 1.0f) * 255.0f);
+ }
+
+ data_ri = swizzle_table[swz.r];
+ data_gi = swizzle_table[swz.g];
+ data_bi = swizzle_table[swz.b];
+ data_ai = swizzle_table[swz.a];
+ }
+
+ // Errors are NaN encoded - convert to magenta error color
+ // Branch is OK here - it is almost never true so predicts well
+ vmask nan_mask = data_r != data_r;
+ if (any(nan_mask))
+ {
+ data_ri = select(data_ri, vint(0xFF), nan_mask);
+ data_gi = select(data_gi, vint(0x00), nan_mask);
+ data_bi = select(data_bi, vint(0xFF), nan_mask);
+ data_ai = select(data_ai, vint(0xFF), nan_mask);
+ }
+
+ vint data_rgbai = interleave_rgba8(data_ri, data_gi, data_bi, data_ai);
+ vmask store_mask = vint::lane_id() < vint(used_texels);
+ store_lanes_masked(reinterpret_cast<int*>(data8_row), data_rgbai, store_mask);
+
+ data8_row += ASTCENC_SIMD_WIDTH * 4;
+ idx += used_texels;
+ }
+ idx += x_nudge;
+ }
+ idx += y_nudge;
+ }
+ }
+ else if (img.data_type == ASTCENC_TYPE_F16)
+ {
+ for (unsigned int z = z_start; z < z_end; z++)
+ {
+ // Fetch the image plane
+ uint16_t* data16 = static_cast<uint16_t*>(img.data[z]);
+
+ for (unsigned int y = y_start; y < y_end; y++)
+ {
+ uint16_t* data16_row = data16 + (4 * x_size * y) + (4 * x_start);
+
+ for (unsigned int x = 0; x < x_count; x++)
+ {
+ vint4 color;
+
+ // NaNs are handled inline - no need to special case
+ if (needs_swz)
+ {
+ float data[7];
+ data[ASTCENC_SWZ_0] = 0.0f;
+ data[ASTCENC_SWZ_1] = 1.0f;
+ data[ASTCENC_SWZ_R] = blk.data_r[idx];
+ data[ASTCENC_SWZ_G] = blk.data_g[idx];
+ data[ASTCENC_SWZ_B] = blk.data_b[idx];
+ data[ASTCENC_SWZ_A] = blk.data_a[idx];
+
+ if (needs_z)
+ {
+ float xN = (data[0] * 2.0f) - 1.0f;
+ float yN = (data[3] * 2.0f) - 1.0f;
+ float zN = 1.0f - xN * xN - yN * yN;
+ if (zN < 0.0f)
+ {
+ zN = 0.0f;
+ }
+ data[ASTCENC_SWZ_Z] = (astc::sqrt(zN) * 0.5f) + 0.5f;
+ }
+
+ vfloat4 colorf(data[swz.r], data[swz.g], data[swz.b], data[swz.a]);
+ color = float_to_float16(colorf);
+ }
+ else
+ {
+ vfloat4 colorf = blk.texel(idx);
+ color = float_to_float16(colorf);
+ }
+
+ // TODO: Vectorize with store N shorts?
+ data16_row[0] = static_cast<uint16_t>(color.lane<0>());
+ data16_row[1] = static_cast<uint16_t>(color.lane<1>());
+ data16_row[2] = static_cast<uint16_t>(color.lane<2>());
+ data16_row[3] = static_cast<uint16_t>(color.lane<3>());
+ data16_row += 4;
+ idx++;
+ }
+ idx += x_nudge;
+ }
+ idx += y_nudge;
+ }
+ }
+ else // if (img.data_type == ASTCENC_TYPE_F32)
+ {
+ assert(img.data_type == ASTCENC_TYPE_F32);
+
+ for (unsigned int z = z_start; z < z_end; z++)
+ {
+ // Fetch the image plane
+ float* data32 = static_cast<float*>(img.data[z]);
+
+ for (unsigned int y = y_start; y < y_end; y++)
+ {
+ float* data32_row = data32 + (4 * x_size * y) + (4 * x_start);
+
+ for (unsigned int x = 0; x < x_count; x++)
+ {
+ vfloat4 color = blk.texel(idx);
+
+ // NaNs are handled inline - no need to special case
+ if (needs_swz)
+ {
+ float data[7];
+ data[ASTCENC_SWZ_0] = 0.0f;
+ data[ASTCENC_SWZ_1] = 1.0f;
+ data[ASTCENC_SWZ_R] = color.lane<0>();
+ data[ASTCENC_SWZ_G] = color.lane<1>();
+ data[ASTCENC_SWZ_B] = color.lane<2>();
+ data[ASTCENC_SWZ_A] = color.lane<3>();
+
+ if (needs_z)
+ {
+ float xN = (data[0] * 2.0f) - 1.0f;
+ float yN = (data[3] * 2.0f) - 1.0f;
+ float zN = 1.0f - xN * xN - yN * yN;
+ if (zN < 0.0f)
+ {
+ zN = 0.0f;
+ }
+ data[ASTCENC_SWZ_Z] = (astc::sqrt(zN) * 0.5f) + 0.5f;
+ }
+
+ color = vfloat4(data[swz.r], data[swz.g], data[swz.b], data[swz.a]);
+ }
+
+ store(color, data32_row);
+ data32_row += 4;
+ idx++;
+ }
+ idx += x_nudge;
+ }
+ idx += y_nudge;
+ }
+ }
+}
diff --git a/thirdparty/astcenc/astcenc_integer_sequence.cpp b/thirdparty/astcenc/astcenc_integer_sequence.cpp
new file mode 100644
index 0000000000..416750374d
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_integer_sequence.cpp
@@ -0,0 +1,739 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2021 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief Functions for encoding/decoding Bounded Integer Sequence Encoding.
+ */
+
+#include "astcenc_internal.h"
+
+#include <array>
+
+/** @brief Unpacked quint triplets <low,middle,high> for each packed value */
+// TODO: Bitpack these into a uint16_t?
+static const uint8_t quints_of_integer[128][3] {
+ {0, 0, 0}, {1, 0, 0}, {2, 0, 0}, {3, 0, 0},
+ {4, 0, 0}, {0, 4, 0}, {4, 4, 0}, {4, 4, 4},
+ {0, 1, 0}, {1, 1, 0}, {2, 1, 0}, {3, 1, 0},
+ {4, 1, 0}, {1, 4, 0}, {4, 4, 1}, {4, 4, 4},
+ {0, 2, 0}, {1, 2, 0}, {2, 2, 0}, {3, 2, 0},
+ {4, 2, 0}, {2, 4, 0}, {4, 4, 2}, {4, 4, 4},
+ {0, 3, 0}, {1, 3, 0}, {2, 3, 0}, {3, 3, 0},
+ {4, 3, 0}, {3, 4, 0}, {4, 4, 3}, {4, 4, 4},
+ {0, 0, 1}, {1, 0, 1}, {2, 0, 1}, {3, 0, 1},
+ {4, 0, 1}, {0, 4, 1}, {4, 0, 4}, {0, 4, 4},
+ {0, 1, 1}, {1, 1, 1}, {2, 1, 1}, {3, 1, 1},
+ {4, 1, 1}, {1, 4, 1}, {4, 1, 4}, {1, 4, 4},
+ {0, 2, 1}, {1, 2, 1}, {2, 2, 1}, {3, 2, 1},
+ {4, 2, 1}, {2, 4, 1}, {4, 2, 4}, {2, 4, 4},
+ {0, 3, 1}, {1, 3, 1}, {2, 3, 1}, {3, 3, 1},
+ {4, 3, 1}, {3, 4, 1}, {4, 3, 4}, {3, 4, 4},
+ {0, 0, 2}, {1, 0, 2}, {2, 0, 2}, {3, 0, 2},
+ {4, 0, 2}, {0, 4, 2}, {2, 0, 4}, {3, 0, 4},
+ {0, 1, 2}, {1, 1, 2}, {2, 1, 2}, {3, 1, 2},
+ {4, 1, 2}, {1, 4, 2}, {2, 1, 4}, {3, 1, 4},
+ {0, 2, 2}, {1, 2, 2}, {2, 2, 2}, {3, 2, 2},
+ {4, 2, 2}, {2, 4, 2}, {2, 2, 4}, {3, 2, 4},
+ {0, 3, 2}, {1, 3, 2}, {2, 3, 2}, {3, 3, 2},
+ {4, 3, 2}, {3, 4, 2}, {2, 3, 4}, {3, 3, 4},
+ {0, 0, 3}, {1, 0, 3}, {2, 0, 3}, {3, 0, 3},
+ {4, 0, 3}, {0, 4, 3}, {0, 0, 4}, {1, 0, 4},
+ {0, 1, 3}, {1, 1, 3}, {2, 1, 3}, {3, 1, 3},
+ {4, 1, 3}, {1, 4, 3}, {0, 1, 4}, {1, 1, 4},
+ {0, 2, 3}, {1, 2, 3}, {2, 2, 3}, {3, 2, 3},
+ {4, 2, 3}, {2, 4, 3}, {0, 2, 4}, {1, 2, 4},
+ {0, 3, 3}, {1, 3, 3}, {2, 3, 3}, {3, 3, 3},
+ {4, 3, 3}, {3, 4, 3}, {0, 3, 4}, {1, 3, 4}
+};
+
+/** @brief Packed quint values for each unpacked value, indexed [hi][mid][lo]. */
+static const uint8_t integer_of_quints[5][5][5] {
+ {
+ {0, 1, 2, 3, 4},
+ {8, 9, 10, 11, 12},
+ {16, 17, 18, 19, 20},
+ {24, 25, 26, 27, 28},
+ {5, 13, 21, 29, 6}
+ },
+ {
+ {32, 33, 34, 35, 36},
+ {40, 41, 42, 43, 44},
+ {48, 49, 50, 51, 52},
+ {56, 57, 58, 59, 60},
+ {37, 45, 53, 61, 14}
+ },
+ {
+ {64, 65, 66, 67, 68},
+ {72, 73, 74, 75, 76},
+ {80, 81, 82, 83, 84},
+ {88, 89, 90, 91, 92},
+ {69, 77, 85, 93, 22}
+ },
+ {
+ {96, 97, 98, 99, 100},
+ {104, 105, 106, 107, 108},
+ {112, 113, 114, 115, 116},
+ {120, 121, 122, 123, 124},
+ {101, 109, 117, 125, 30}
+ },
+ {
+ {102, 103, 70, 71, 38},
+ {110, 111, 78, 79, 46},
+ {118, 119, 86, 87, 54},
+ {126, 127, 94, 95, 62},
+ {39, 47, 55, 63, 31}
+ }
+};
+
+/** @brief Unpacked trit quintuplets <low,...,high> for each packed value */
+// TODO: Bitpack these into a uint16_t?
+static const uint8_t trits_of_integer[256][5] {
+ {0, 0, 0, 0, 0}, {1, 0, 0, 0, 0}, {2, 0, 0, 0, 0}, {0, 0, 2, 0, 0},
+ {0, 1, 0, 0, 0}, {1, 1, 0, 0, 0}, {2, 1, 0, 0, 0}, {1, 0, 2, 0, 0},
+ {0, 2, 0, 0, 0}, {1, 2, 0, 0, 0}, {2, 2, 0, 0, 0}, {2, 0, 2, 0, 0},
+ {0, 2, 2, 0, 0}, {1, 2, 2, 0, 0}, {2, 2, 2, 0, 0}, {2, 0, 2, 0, 0},
+ {0, 0, 1, 0, 0}, {1, 0, 1, 0, 0}, {2, 0, 1, 0, 0}, {0, 1, 2, 0, 0},
+ {0, 1, 1, 0, 0}, {1, 1, 1, 0, 0}, {2, 1, 1, 0, 0}, {1, 1, 2, 0, 0},
+ {0, 2, 1, 0, 0}, {1, 2, 1, 0, 0}, {2, 2, 1, 0, 0}, {2, 1, 2, 0, 0},
+ {0, 0, 0, 2, 2}, {1, 0, 0, 2, 2}, {2, 0, 0, 2, 2}, {0, 0, 2, 2, 2},
+ {0, 0, 0, 1, 0}, {1, 0, 0, 1, 0}, {2, 0, 0, 1, 0}, {0, 0, 2, 1, 0},
+ {0, 1, 0, 1, 0}, {1, 1, 0, 1, 0}, {2, 1, 0, 1, 0}, {1, 0, 2, 1, 0},
+ {0, 2, 0, 1, 0}, {1, 2, 0, 1, 0}, {2, 2, 0, 1, 0}, {2, 0, 2, 1, 0},
+ {0, 2, 2, 1, 0}, {1, 2, 2, 1, 0}, {2, 2, 2, 1, 0}, {2, 0, 2, 1, 0},
+ {0, 0, 1, 1, 0}, {1, 0, 1, 1, 0}, {2, 0, 1, 1, 0}, {0, 1, 2, 1, 0},
+ {0, 1, 1, 1, 0}, {1, 1, 1, 1, 0}, {2, 1, 1, 1, 0}, {1, 1, 2, 1, 0},
+ {0, 2, 1, 1, 0}, {1, 2, 1, 1, 0}, {2, 2, 1, 1, 0}, {2, 1, 2, 1, 0},
+ {0, 1, 0, 2, 2}, {1, 1, 0, 2, 2}, {2, 1, 0, 2, 2}, {1, 0, 2, 2, 2},
+ {0, 0, 0, 2, 0}, {1, 0, 0, 2, 0}, {2, 0, 0, 2, 0}, {0, 0, 2, 2, 0},
+ {0, 1, 0, 2, 0}, {1, 1, 0, 2, 0}, {2, 1, 0, 2, 0}, {1, 0, 2, 2, 0},
+ {0, 2, 0, 2, 0}, {1, 2, 0, 2, 0}, {2, 2, 0, 2, 0}, {2, 0, 2, 2, 0},
+ {0, 2, 2, 2, 0}, {1, 2, 2, 2, 0}, {2, 2, 2, 2, 0}, {2, 0, 2, 2, 0},
+ {0, 0, 1, 2, 0}, {1, 0, 1, 2, 0}, {2, 0, 1, 2, 0}, {0, 1, 2, 2, 0},
+ {0, 1, 1, 2, 0}, {1, 1, 1, 2, 0}, {2, 1, 1, 2, 0}, {1, 1, 2, 2, 0},
+ {0, 2, 1, 2, 0}, {1, 2, 1, 2, 0}, {2, 2, 1, 2, 0}, {2, 1, 2, 2, 0},
+ {0, 2, 0, 2, 2}, {1, 2, 0, 2, 2}, {2, 2, 0, 2, 2}, {2, 0, 2, 2, 2},
+ {0, 0, 0, 0, 2}, {1, 0, 0, 0, 2}, {2, 0, 0, 0, 2}, {0, 0, 2, 0, 2},
+ {0, 1, 0, 0, 2}, {1, 1, 0, 0, 2}, {2, 1, 0, 0, 2}, {1, 0, 2, 0, 2},
+ {0, 2, 0, 0, 2}, {1, 2, 0, 0, 2}, {2, 2, 0, 0, 2}, {2, 0, 2, 0, 2},
+ {0, 2, 2, 0, 2}, {1, 2, 2, 0, 2}, {2, 2, 2, 0, 2}, {2, 0, 2, 0, 2},
+ {0, 0, 1, 0, 2}, {1, 0, 1, 0, 2}, {2, 0, 1, 0, 2}, {0, 1, 2, 0, 2},
+ {0, 1, 1, 0, 2}, {1, 1, 1, 0, 2}, {2, 1, 1, 0, 2}, {1, 1, 2, 0, 2},
+ {0, 2, 1, 0, 2}, {1, 2, 1, 0, 2}, {2, 2, 1, 0, 2}, {2, 1, 2, 0, 2},
+ {0, 2, 2, 2, 2}, {1, 2, 2, 2, 2}, {2, 2, 2, 2, 2}, {2, 0, 2, 2, 2},
+ {0, 0, 0, 0, 1}, {1, 0, 0, 0, 1}, {2, 0, 0, 0, 1}, {0, 0, 2, 0, 1},
+ {0, 1, 0, 0, 1}, {1, 1, 0, 0, 1}, {2, 1, 0, 0, 1}, {1, 0, 2, 0, 1},
+ {0, 2, 0, 0, 1}, {1, 2, 0, 0, 1}, {2, 2, 0, 0, 1}, {2, 0, 2, 0, 1},
+ {0, 2, 2, 0, 1}, {1, 2, 2, 0, 1}, {2, 2, 2, 0, 1}, {2, 0, 2, 0, 1},
+ {0, 0, 1, 0, 1}, {1, 0, 1, 0, 1}, {2, 0, 1, 0, 1}, {0, 1, 2, 0, 1},
+ {0, 1, 1, 0, 1}, {1, 1, 1, 0, 1}, {2, 1, 1, 0, 1}, {1, 1, 2, 0, 1},
+ {0, 2, 1, 0, 1}, {1, 2, 1, 0, 1}, {2, 2, 1, 0, 1}, {2, 1, 2, 0, 1},
+ {0, 0, 1, 2, 2}, {1, 0, 1, 2, 2}, {2, 0, 1, 2, 2}, {0, 1, 2, 2, 2},
+ {0, 0, 0, 1, 1}, {1, 0, 0, 1, 1}, {2, 0, 0, 1, 1}, {0, 0, 2, 1, 1},
+ {0, 1, 0, 1, 1}, {1, 1, 0, 1, 1}, {2, 1, 0, 1, 1}, {1, 0, 2, 1, 1},
+ {0, 2, 0, 1, 1}, {1, 2, 0, 1, 1}, {2, 2, 0, 1, 1}, {2, 0, 2, 1, 1},
+ {0, 2, 2, 1, 1}, {1, 2, 2, 1, 1}, {2, 2, 2, 1, 1}, {2, 0, 2, 1, 1},
+ {0, 0, 1, 1, 1}, {1, 0, 1, 1, 1}, {2, 0, 1, 1, 1}, {0, 1, 2, 1, 1},
+ {0, 1, 1, 1, 1}, {1, 1, 1, 1, 1}, {2, 1, 1, 1, 1}, {1, 1, 2, 1, 1},
+ {0, 2, 1, 1, 1}, {1, 2, 1, 1, 1}, {2, 2, 1, 1, 1}, {2, 1, 2, 1, 1},
+ {0, 1, 1, 2, 2}, {1, 1, 1, 2, 2}, {2, 1, 1, 2, 2}, {1, 1, 2, 2, 2},
+ {0, 0, 0, 2, 1}, {1, 0, 0, 2, 1}, {2, 0, 0, 2, 1}, {0, 0, 2, 2, 1},
+ {0, 1, 0, 2, 1}, {1, 1, 0, 2, 1}, {2, 1, 0, 2, 1}, {1, 0, 2, 2, 1},
+ {0, 2, 0, 2, 1}, {1, 2, 0, 2, 1}, {2, 2, 0, 2, 1}, {2, 0, 2, 2, 1},
+ {0, 2, 2, 2, 1}, {1, 2, 2, 2, 1}, {2, 2, 2, 2, 1}, {2, 0, 2, 2, 1},
+ {0, 0, 1, 2, 1}, {1, 0, 1, 2, 1}, {2, 0, 1, 2, 1}, {0, 1, 2, 2, 1},
+ {0, 1, 1, 2, 1}, {1, 1, 1, 2, 1}, {2, 1, 1, 2, 1}, {1, 1, 2, 2, 1},
+ {0, 2, 1, 2, 1}, {1, 2, 1, 2, 1}, {2, 2, 1, 2, 1}, {2, 1, 2, 2, 1},
+ {0, 2, 1, 2, 2}, {1, 2, 1, 2, 2}, {2, 2, 1, 2, 2}, {2, 1, 2, 2, 2},
+ {0, 0, 0, 1, 2}, {1, 0, 0, 1, 2}, {2, 0, 0, 1, 2}, {0, 0, 2, 1, 2},
+ {0, 1, 0, 1, 2}, {1, 1, 0, 1, 2}, {2, 1, 0, 1, 2}, {1, 0, 2, 1, 2},
+ {0, 2, 0, 1, 2}, {1, 2, 0, 1, 2}, {2, 2, 0, 1, 2}, {2, 0, 2, 1, 2},
+ {0, 2, 2, 1, 2}, {1, 2, 2, 1, 2}, {2, 2, 2, 1, 2}, {2, 0, 2, 1, 2},
+ {0, 0, 1, 1, 2}, {1, 0, 1, 1, 2}, {2, 0, 1, 1, 2}, {0, 1, 2, 1, 2},
+ {0, 1, 1, 1, 2}, {1, 1, 1, 1, 2}, {2, 1, 1, 1, 2}, {1, 1, 2, 1, 2},
+ {0, 2, 1, 1, 2}, {1, 2, 1, 1, 2}, {2, 2, 1, 1, 2}, {2, 1, 2, 1, 2},
+ {0, 2, 2, 2, 2}, {1, 2, 2, 2, 2}, {2, 2, 2, 2, 2}, {2, 1, 2, 2, 2}
+};
+
+/** @brief Packed trit values for each unpacked value, indexed [hi][][][][lo]. */
+static const uint8_t integer_of_trits[3][3][3][3][3] {
+ {
+ {
+ {
+ {0, 1, 2},
+ {4, 5, 6},
+ {8, 9, 10}
+ },
+ {
+ {16, 17, 18},
+ {20, 21, 22},
+ {24, 25, 26}
+ },
+ {
+ {3, 7, 15},
+ {19, 23, 27},
+ {12, 13, 14}
+ }
+ },
+ {
+ {
+ {32, 33, 34},
+ {36, 37, 38},
+ {40, 41, 42}
+ },
+ {
+ {48, 49, 50},
+ {52, 53, 54},
+ {56, 57, 58}
+ },
+ {
+ {35, 39, 47},
+ {51, 55, 59},
+ {44, 45, 46}
+ }
+ },
+ {
+ {
+ {64, 65, 66},
+ {68, 69, 70},
+ {72, 73, 74}
+ },
+ {
+ {80, 81, 82},
+ {84, 85, 86},
+ {88, 89, 90}
+ },
+ {
+ {67, 71, 79},
+ {83, 87, 91},
+ {76, 77, 78}
+ }
+ }
+ },
+ {
+ {
+ {
+ {128, 129, 130},
+ {132, 133, 134},
+ {136, 137, 138}
+ },
+ {
+ {144, 145, 146},
+ {148, 149, 150},
+ {152, 153, 154}
+ },
+ {
+ {131, 135, 143},
+ {147, 151, 155},
+ {140, 141, 142}
+ }
+ },
+ {
+ {
+ {160, 161, 162},
+ {164, 165, 166},
+ {168, 169, 170}
+ },
+ {
+ {176, 177, 178},
+ {180, 181, 182},
+ {184, 185, 186}
+ },
+ {
+ {163, 167, 175},
+ {179, 183, 187},
+ {172, 173, 174}
+ }
+ },
+ {
+ {
+ {192, 193, 194},
+ {196, 197, 198},
+ {200, 201, 202}
+ },
+ {
+ {208, 209, 210},
+ {212, 213, 214},
+ {216, 217, 218}
+ },
+ {
+ {195, 199, 207},
+ {211, 215, 219},
+ {204, 205, 206}
+ }
+ }
+ },
+ {
+ {
+ {
+ {96, 97, 98},
+ {100, 101, 102},
+ {104, 105, 106}
+ },
+ {
+ {112, 113, 114},
+ {116, 117, 118},
+ {120, 121, 122}
+ },
+ {
+ {99, 103, 111},
+ {115, 119, 123},
+ {108, 109, 110}
+ }
+ },
+ {
+ {
+ {224, 225, 226},
+ {228, 229, 230},
+ {232, 233, 234}
+ },
+ {
+ {240, 241, 242},
+ {244, 245, 246},
+ {248, 249, 250}
+ },
+ {
+ {227, 231, 239},
+ {243, 247, 251},
+ {236, 237, 238}
+ }
+ },
+ {
+ {
+ {28, 29, 30},
+ {60, 61, 62},
+ {92, 93, 94}
+ },
+ {
+ {156, 157, 158},
+ {188, 189, 190},
+ {220, 221, 222}
+ },
+ {
+ {31, 63, 127},
+ {159, 191, 255},
+ {252, 253, 254}
+ }
+ }
+ }
+};
+
+/**
+ * @brief The number of bits, trits, and quints needed for a quant level.
+ */
+struct btq_count
+{
+ /** @brief The number of bits. */
+ uint8_t bits:6;
+
+ /** @brief The number of trits. */
+ uint8_t trits:1;
+
+ /** @brief The number of quints. */
+ uint8_t quints:1;
+};
+
+/**
+ * @brief The table of bits, trits, and quints needed for a quant encode.
+ */
+static const std::array<btq_count, 21> btq_counts {{
+ { 1, 0, 0 }, // QUANT_2
+ { 0, 1, 0 }, // QUANT_3
+ { 2, 0, 0 }, // QUANT_4
+ { 0, 0, 1 }, // QUANT_5
+ { 1, 1, 0 }, // QUANT_6
+ { 3, 0, 0 }, // QUANT_8
+ { 1, 0, 1 }, // QUANT_10
+ { 2, 1, 0 }, // QUANT_12
+ { 4, 0, 0 }, // QUANT_16
+ { 2, 0, 1 }, // QUANT_20
+ { 3, 1, 0 }, // QUANT_24
+ { 5, 0, 0 }, // QUANT_32
+ { 3, 0, 1 }, // QUANT_40
+ { 4, 1, 0 }, // QUANT_48
+ { 6, 0, 0 }, // QUANT_64
+ { 4, 0, 1 }, // QUANT_80
+ { 5, 1, 0 }, // QUANT_96
+ { 7, 0, 0 }, // QUANT_128
+ { 5, 0, 1 }, // QUANT_160
+ { 6, 1, 0 }, // QUANT_192
+ { 8, 0, 0 } // QUANT_256
+}};
+
+/**
+ * @brief The sequence scale, round, and divisors needed to compute sizing.
+ *
+ * The length of a quantized sequence in bits is:
+ * (scale * <sequence_len> + round) / divisor
+ */
+struct ise_size
+{
+ /** @brief The scaling parameter. */
+ uint8_t scale:6;
+
+ /** @brief The divisor parameter. */
+ uint8_t divisor:2;
+};
+
+/**
+ * @brief The table of scale, round, and divisors needed for quant sizing.
+ */
+static const std::array<ise_size, 21> ise_sizes {{
+ { 1, 0 }, // QUANT_2
+ { 8, 2 }, // QUANT_3
+ { 2, 0 }, // QUANT_4
+ { 7, 1 }, // QUANT_5
+ { 13, 2 }, // QUANT_6
+ { 3, 0 }, // QUANT_8
+ { 10, 1 }, // QUANT_10
+ { 18, 2 }, // QUANT_12
+ { 4, 0 }, // QUANT_16
+ { 13, 1 }, // QUANT_20
+ { 23, 2 }, // QUANT_24
+ { 5, 0 }, // QUANT_32
+ { 16, 1 }, // QUANT_40
+ { 28, 2 }, // QUANT_48
+ { 6, 0 }, // QUANT_64
+ { 19, 1 }, // QUANT_80
+ { 33, 2 }, // QUANT_96
+ { 7, 0 }, // QUANT_128
+ { 22, 1 }, // QUANT_160
+ { 38, 2 }, // QUANT_192
+ { 8, 0 } // QUANT_256
+}};
+
+/* See header for documentation. */
+unsigned int get_ise_sequence_bitcount(
+ unsigned int character_count,
+ quant_method quant_level
+) {
+ // Cope with out-of bounds values - input might be invalid
+ if (static_cast<size_t>(quant_level) >= ise_sizes.size())
+ {
+ // Arbitrary large number that's more than an ASTC block can hold
+ return 1024;
+ }
+
+ auto& entry = ise_sizes[quant_level];
+ unsigned int divisor = (entry.divisor << 1) + 1;
+ return (entry.scale * character_count + divisor - 1) / divisor;
+}
+
+/**
+ * @brief Write up to 8 bits at an arbitrary bit offset.
+ *
+ * The stored value is at most 8 bits, but can be stored at an offset of between 0 and 7 bits so may
+ * span two separate bytes in memory.
+ *
+ * @param value The value to write.
+ * @param bitcount The number of bits to write, starting from LSB.
+ * @param bitoffset The bit offset to store at, between 0 and 7.
+ * @param[in,out] ptr The data pointer to write to.
+ */
+static inline void write_bits(
+ unsigned int value,
+ unsigned int bitcount,
+ unsigned int bitoffset,
+ uint8_t ptr[2]
+) {
+ unsigned int mask = (1 << bitcount) - 1;
+ value &= mask;
+ ptr += bitoffset >> 3;
+ bitoffset &= 7;
+ value <<= bitoffset;
+ mask <<= bitoffset;
+ mask = ~mask;
+
+ ptr[0] &= mask;
+ ptr[0] |= value;
+ ptr[1] &= mask >> 8;
+ ptr[1] |= value >> 8;
+}
+
+/**
+ * @brief Read up to 8 bits at an arbitrary bit offset.
+ *
+ * The stored value is at most 8 bits, but can be stored at an offset of between 0 and 7 bits so may
+ * span two separate bytes in memory.
+ *
+ * @param bitcount The number of bits to read.
+ * @param bitoffset The bit offset to read from, between 0 and 7.
+ * @param[in,out] ptr The data pointer to read from.
+ *
+ * @return The read value.
+ */
+static inline unsigned int read_bits(
+ unsigned int bitcount,
+ unsigned int bitoffset,
+ const uint8_t* ptr
+) {
+ unsigned int mask = (1 << bitcount) - 1;
+ ptr += bitoffset >> 3;
+ bitoffset &= 7;
+ unsigned int value = ptr[0] | (ptr[1] << 8);
+ value >>= bitoffset;
+ value &= mask;
+ return value;
+}
+
+/* See header for documentation. */
+void encode_ise(
+ quant_method quant_level,
+ unsigned int character_count,
+ const uint8_t* input_data,
+ uint8_t* output_data,
+ unsigned int bit_offset
+) {
+ promise(character_count > 0);
+
+ unsigned int bits = btq_counts[quant_level].bits;
+ unsigned int trits = btq_counts[quant_level].trits;
+ unsigned int quints = btq_counts[quant_level].quints;
+ unsigned int mask = (1 << bits) - 1;
+
+ // Write out trits and bits
+ if (trits)
+ {
+ unsigned int i = 0;
+ unsigned int full_trit_blocks = character_count / 5;
+
+ for (unsigned int j = 0; j < full_trit_blocks; j++)
+ {
+ unsigned int i4 = input_data[i + 4] >> bits;
+ unsigned int i3 = input_data[i + 3] >> bits;
+ unsigned int i2 = input_data[i + 2] >> bits;
+ unsigned int i1 = input_data[i + 1] >> bits;
+ unsigned int i0 = input_data[i + 0] >> bits;
+
+ uint8_t T = integer_of_trits[i4][i3][i2][i1][i0];
+
+ // The max size of a trit bit count is 6, so we can always safely
+ // pack a single MX value with the following 1 or 2 T bits.
+ uint8_t pack;
+
+ // Element 0 + T0 + T1
+ pack = (input_data[i++] & mask) | (((T >> 0) & 0x3) << bits);
+ write_bits(pack, bits + 2, bit_offset, output_data);
+ bit_offset += bits + 2;
+
+ // Element 1 + T2 + T3
+ pack = (input_data[i++] & mask) | (((T >> 2) & 0x3) << bits);
+ write_bits(pack, bits + 2, bit_offset, output_data);
+ bit_offset += bits + 2;
+
+ // Element 2 + T4
+ pack = (input_data[i++] & mask) | (((T >> 4) & 0x1) << bits);
+ write_bits(pack, bits + 1, bit_offset, output_data);
+ bit_offset += bits + 1;
+
+ // Element 3 + T5 + T6
+ pack = (input_data[i++] & mask) | (((T >> 5) & 0x3) << bits);
+ write_bits(pack, bits + 2, bit_offset, output_data);
+ bit_offset += bits + 2;
+
+ // Element 4 + T7
+ pack = (input_data[i++] & mask) | (((T >> 7) & 0x1) << bits);
+ write_bits(pack, bits + 1, bit_offset, output_data);
+ bit_offset += bits + 1;
+ }
+
+ // Loop tail for a partial block
+ if (i != character_count)
+ {
+ // i4 cannot be present - we know the block is partial
+ // i0 must be present - we know the block isn't empty
+ unsigned int i4 = 0;
+ unsigned int i3 = i + 3 >= character_count ? 0 : input_data[i + 3] >> bits;
+ unsigned int i2 = i + 2 >= character_count ? 0 : input_data[i + 2] >> bits;
+ unsigned int i1 = i + 1 >= character_count ? 0 : input_data[i + 1] >> bits;
+ unsigned int i0 = input_data[i + 0] >> bits;
+
+ uint8_t T = integer_of_trits[i4][i3][i2][i1][i0];
+
+ for (unsigned int j = 0; i < character_count; i++, j++)
+ {
+ // Truncated table as this iteration is always partital
+ static const uint8_t tbits[4] { 2, 2, 1, 2 };
+ static const uint8_t tshift[4] { 0, 2, 4, 5 };
+
+ uint8_t pack = (input_data[i] & mask) |
+ (((T >> tshift[j]) & ((1 << tbits[j]) - 1)) << bits);
+
+ write_bits(pack, bits + tbits[j], bit_offset, output_data);
+ bit_offset += bits + tbits[j];
+ }
+ }
+ }
+ // Write out quints and bits
+ else if (quints)
+ {
+ unsigned int i = 0;
+ unsigned int full_quint_blocks = character_count / 3;
+
+ for (unsigned int j = 0; j < full_quint_blocks; j++)
+ {
+ unsigned int i2 = input_data[i + 2] >> bits;
+ unsigned int i1 = input_data[i + 1] >> bits;
+ unsigned int i0 = input_data[i + 0] >> bits;
+
+ uint8_t T = integer_of_quints[i2][i1][i0];
+
+ // The max size of a quint bit count is 5, so we can always safely
+ // pack a single M value with the following 2 or 3 T bits.
+ uint8_t pack;
+
+ // Element 0
+ pack = (input_data[i++] & mask) | (((T >> 0) & 0x7) << bits);
+ write_bits(pack, bits + 3, bit_offset, output_data);
+ bit_offset += bits + 3;
+
+ // Element 1
+ pack = (input_data[i++] & mask) | (((T >> 3) & 0x3) << bits);
+ write_bits(pack, bits + 2, bit_offset, output_data);
+ bit_offset += bits + 2;
+
+ // Element 2
+ pack = (input_data[i++] & mask) | (((T >> 5) & 0x3) << bits);
+ write_bits(pack, bits + 2, bit_offset, output_data);
+ bit_offset += bits + 2;
+ }
+
+ // Loop tail for a partial block
+ if (i != character_count)
+ {
+ // i2 cannot be present - we know the block is partial
+ // i0 must be present - we know the block isn't empty
+ unsigned int i2 = 0;
+ unsigned int i1 = i + 1 >= character_count ? 0 : input_data[i + 1] >> bits;
+ unsigned int i0 = input_data[i + 0] >> bits;
+
+ uint8_t T = integer_of_quints[i2][i1][i0];
+
+ for (unsigned int j = 0; i < character_count; i++, j++)
+ {
+ // Truncated table as this iteration is always partital
+ static const uint8_t tbits[2] { 3, 2 };
+ static const uint8_t tshift[2] { 0, 3 };
+
+ uint8_t pack = (input_data[i] & mask) |
+ (((T >> tshift[j]) & ((1 << tbits[j]) - 1)) << bits);
+
+ write_bits(pack, bits + tbits[j], bit_offset, output_data);
+ bit_offset += bits + tbits[j];
+ }
+ }
+ }
+ // Write out just bits
+ else
+ {
+ for (unsigned int i = 0; i < character_count; i++)
+ {
+ write_bits(input_data[i], bits, bit_offset, output_data);
+ bit_offset += bits;
+ }
+ }
+}
+
+/* See header for documentation. */
+void decode_ise(
+ quant_method quant_level,
+ unsigned int character_count,
+ const uint8_t* input_data,
+ uint8_t* output_data,
+ unsigned int bit_offset
+) {
+ promise(character_count > 0);
+
+ // Note: due to how the trit/quint-block unpacking is done in this function, we may write more
+ // temporary results than the number of outputs. The maximum actual number of results is 64 bit,
+ // but we keep 4 additional character_count of padding.
+ uint8_t results[68];
+ uint8_t tq_blocks[22] { 0 }; // Trit-blocks or quint-blocks, must be zeroed
+
+ unsigned int bits = btq_counts[quant_level].bits;
+ unsigned int trits = btq_counts[quant_level].trits;
+ unsigned int quints = btq_counts[quant_level].quints;
+
+ unsigned int lcounter = 0;
+ unsigned int hcounter = 0;
+
+ // Collect bits for each element, as well as bits for any trit-blocks and quint-blocks.
+ for (unsigned int i = 0; i < character_count; i++)
+ {
+ results[i] = static_cast<uint8_t>(read_bits(bits, bit_offset, input_data));
+ bit_offset += bits;
+
+ if (trits)
+ {
+ static const uint8_t bits_to_read[5] { 2, 2, 1, 2, 1 };
+ static const uint8_t block_shift[5] { 0, 2, 4, 5, 7 };
+ static const uint8_t next_lcounter[5] { 1, 2, 3, 4, 0 };
+ static const uint8_t hcounter_incr[5] { 0, 0, 0, 0, 1 };
+ unsigned int tdata = read_bits(bits_to_read[lcounter], bit_offset, input_data);
+ bit_offset += bits_to_read[lcounter];
+ tq_blocks[hcounter] |= tdata << block_shift[lcounter];
+ hcounter += hcounter_incr[lcounter];
+ lcounter = next_lcounter[lcounter];
+ }
+
+ if (quints)
+ {
+ static const uint8_t bits_to_read[3] { 3, 2, 2 };
+ static const uint8_t block_shift[3] { 0, 3, 5 };
+ static const uint8_t next_lcounter[3] { 1, 2, 0 };
+ static const uint8_t hcounter_incr[3] { 0, 0, 1 };
+ unsigned int tdata = read_bits(bits_to_read[lcounter], bit_offset, input_data);
+ bit_offset += bits_to_read[lcounter];
+ tq_blocks[hcounter] |= tdata << block_shift[lcounter];
+ hcounter += hcounter_incr[lcounter];
+ lcounter = next_lcounter[lcounter];
+ }
+ }
+
+ // Unpack trit-blocks or quint-blocks as needed
+ if (trits)
+ {
+ unsigned int trit_blocks = (character_count + 4) / 5;
+ promise(trit_blocks > 0);
+ for (unsigned int i = 0; i < trit_blocks; i++)
+ {
+ const uint8_t *tritptr = trits_of_integer[tq_blocks[i]];
+ results[5 * i ] |= tritptr[0] << bits;
+ results[5 * i + 1] |= tritptr[1] << bits;
+ results[5 * i + 2] |= tritptr[2] << bits;
+ results[5 * i + 3] |= tritptr[3] << bits;
+ results[5 * i + 4] |= tritptr[4] << bits;
+ }
+ }
+
+ if (quints)
+ {
+ unsigned int quint_blocks = (character_count + 2) / 3;
+ promise(quint_blocks > 0);
+ for (unsigned int i = 0; i < quint_blocks; i++)
+ {
+ const uint8_t *quintptr = quints_of_integer[tq_blocks[i]];
+ results[3 * i ] |= quintptr[0] << bits;
+ results[3 * i + 1] |= quintptr[1] << bits;
+ results[3 * i + 2] |= quintptr[2] << bits;
+ }
+ }
+
+ for (unsigned int i = 0; i < character_count; i++)
+ {
+ output_data[i] = results[i];
+ }
+}
diff --git a/thirdparty/astcenc/astcenc_internal.h b/thirdparty/astcenc/astcenc_internal.h
new file mode 100644
index 0000000000..0aa8fa0f81
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_internal.h
@@ -0,0 +1,2196 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2023 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief Functions and data declarations.
+ */
+
+#ifndef ASTCENC_INTERNAL_INCLUDED
+#define ASTCENC_INTERNAL_INCLUDED
+
+#include <algorithm>
+#include <cstddef>
+#include <cstdint>
+#if defined(ASTCENC_DIAGNOSTICS)
+ #include <cstdio>
+#endif
+#include <cstdlib>
+
+#include "astcenc.h"
+#include "astcenc_mathlib.h"
+#include "astcenc_vecmathlib.h"
+
+/**
+ * @brief Make a promise to the compiler's optimizer.
+ *
+ * A promise is an expression that the optimizer is can assume is true for to help it generate
+ * faster code. Common use cases for this are to promise that a for loop will iterate more than
+ * once, or that the loop iteration count is a multiple of a vector length, which avoids pre-loop
+ * checks and can avoid loop tails if loops are unrolled by the auto-vectorizer.
+ */
+#if defined(NDEBUG)
+ #if !defined(__clang__) && defined(_MSC_VER)
+ #define promise(cond) __assume(cond)
+ #elif defined(__clang__)
+ #if __has_builtin(__builtin_assume)
+ #define promise(cond) __builtin_assume(cond)
+ #elif __has_builtin(__builtin_unreachable)
+ #define promise(cond) if (!(cond)) { __builtin_unreachable(); }
+ #else
+ #define promise(cond)
+ #endif
+ #else // Assume GCC
+ #define promise(cond) if (!(cond)) { __builtin_unreachable(); }
+ #endif
+#else
+ #define promise(cond) assert(cond)
+#endif
+
+/* ============================================================================
+ Constants
+============================================================================ */
+#if !defined(ASTCENC_BLOCK_MAX_TEXELS)
+ #define ASTCENC_BLOCK_MAX_TEXELS 216 // A 3D 6x6x6 block
+#endif
+
+/** @brief The maximum number of texels a block can support (6x6x6 block). */
+static constexpr unsigned int BLOCK_MAX_TEXELS { ASTCENC_BLOCK_MAX_TEXELS };
+
+/** @brief The maximum number of components a block can support. */
+static constexpr unsigned int BLOCK_MAX_COMPONENTS { 4 };
+
+/** @brief The maximum number of partitions a block can support. */
+static constexpr unsigned int BLOCK_MAX_PARTITIONS { 4 };
+
+/** @brief The number of partitionings, per partition count, suported by the ASTC format. */
+static constexpr unsigned int BLOCK_MAX_PARTITIONINGS { 1024 };
+
+/** @brief The maximum number of weights used during partition selection for texel clustering. */
+static constexpr uint8_t BLOCK_MAX_KMEANS_TEXELS { 64 };
+
+/** @brief The maximum number of weights a block can support. */
+static constexpr unsigned int BLOCK_MAX_WEIGHTS { 64 };
+
+/** @brief The maximum number of weights a block can support per plane in 2 plane mode. */
+static constexpr unsigned int BLOCK_MAX_WEIGHTS_2PLANE { BLOCK_MAX_WEIGHTS / 2 };
+
+/** @brief The minimum number of weight bits a candidate encoding must encode. */
+static constexpr unsigned int BLOCK_MIN_WEIGHT_BITS { 24 };
+
+/** @brief The maximum number of weight bits a candidate encoding can encode. */
+static constexpr unsigned int BLOCK_MAX_WEIGHT_BITS { 96 };
+
+/** @brief The index indicating a bad (unused) block mode in the remap array. */
+static constexpr uint16_t BLOCK_BAD_BLOCK_MODE { 0xFFFFu };
+
+/** @brief The index indicating a bad (unused) partitioning in the remap array. */
+static constexpr uint16_t BLOCK_BAD_PARTITIONING { 0xFFFFu };
+
+/** @brief The number of partition index bits supported by the ASTC format . */
+static constexpr unsigned int PARTITION_INDEX_BITS { 10 };
+
+/** @brief The offset of the plane 2 weights in shared weight arrays. */
+static constexpr unsigned int WEIGHTS_PLANE2_OFFSET { BLOCK_MAX_WEIGHTS_2PLANE };
+
+/** @brief The sum of quantized weights for one texel. */
+static constexpr float WEIGHTS_TEXEL_SUM { 16.0f };
+
+/** @brief The number of block modes supported by the ASTC format. */
+static constexpr unsigned int WEIGHTS_MAX_BLOCK_MODES { 2048 };
+
+/** @brief The number of weight grid decimation modes supported by the ASTC format. */
+static constexpr unsigned int WEIGHTS_MAX_DECIMATION_MODES { 87 };
+
+/** @brief The high default error used to initialize error trackers. */
+static constexpr float ERROR_CALC_DEFAULT { 1e30f };
+
+/**
+ * @brief The minimum texel count for a block to use the one partition fast path.
+ *
+ * This setting skips 4x4 and 5x4 block sizes.
+ */
+static constexpr unsigned int TUNE_MIN_TEXELS_MODE0_FASTPATH { 24 };
+
+/**
+ * @brief The maximum number of candidate encodings tested for each encoding mode.
+ *
+ * This can be dynamically reduced by the compression quality preset.
+ */
+static constexpr unsigned int TUNE_MAX_TRIAL_CANDIDATES { 8 };
+
+/**
+ * @brief The maximum number of candidate partitionings tested for each encoding mode.
+ *
+ * This can be dynamically reduced by the compression quality preset.
+ */
+static constexpr unsigned int TUNE_MAX_PARTITIONING_CANDIDATES { 32 };
+
+/**
+ * @brief The maximum quant level using full angular endpoint search method.
+ *
+ * The angular endpoint search is used to find the min/max weight that should
+ * be used for a given quantization level. It is effective but expensive, so
+ * we only use it where it has the most value - low quant levels with wide
+ * spacing. It is used below TUNE_MAX_ANGULAR_QUANT (inclusive). Above this we
+ * assume the min weight is 0.0f, and the max weight is 1.0f.
+ *
+ * Note the angular algorithm is vectorized, and using QUANT_12 exactly fills
+ * one 8-wide vector. Decreasing by one doesn't buy much performance, and
+ * increasing by one is disproportionately expensive.
+ */
+static constexpr unsigned int TUNE_MAX_ANGULAR_QUANT { 7 }; /* QUANT_12 */
+
+static_assert((BLOCK_MAX_TEXELS % ASTCENC_SIMD_WIDTH) == 0,
+ "BLOCK_MAX_TEXELS must be multiple of ASTCENC_SIMD_WIDTH");
+
+static_assert(BLOCK_MAX_TEXELS <= 216,
+ "BLOCK_MAX_TEXELS must not be greater than 216");
+
+static_assert((BLOCK_MAX_WEIGHTS % ASTCENC_SIMD_WIDTH) == 0,
+ "BLOCK_MAX_WEIGHTS must be multiple of ASTCENC_SIMD_WIDTH");
+
+static_assert((WEIGHTS_MAX_BLOCK_MODES % ASTCENC_SIMD_WIDTH) == 0,
+ "WEIGHTS_MAX_BLOCK_MODES must be multiple of ASTCENC_SIMD_WIDTH");
+
+
+/* ============================================================================
+ Commonly used data structures
+============================================================================ */
+
+/**
+ * @brief The ASTC endpoint formats.
+ *
+ * Note, the values here are used directly in the encoding in the format so do not rearrange.
+ */
+enum endpoint_formats
+{
+ FMT_LUMINANCE = 0,
+ FMT_LUMINANCE_DELTA = 1,
+ FMT_HDR_LUMINANCE_LARGE_RANGE = 2,
+ FMT_HDR_LUMINANCE_SMALL_RANGE = 3,
+ FMT_LUMINANCE_ALPHA = 4,
+ FMT_LUMINANCE_ALPHA_DELTA = 5,
+ FMT_RGB_SCALE = 6,
+ FMT_HDR_RGB_SCALE = 7,
+ FMT_RGB = 8,
+ FMT_RGB_DELTA = 9,
+ FMT_RGB_SCALE_ALPHA = 10,
+ FMT_HDR_RGB = 11,
+ FMT_RGBA = 12,
+ FMT_RGBA_DELTA = 13,
+ FMT_HDR_RGB_LDR_ALPHA = 14,
+ FMT_HDR_RGBA = 15
+};
+
+/**
+ * @brief The ASTC quantization methods.
+ *
+ * Note, the values here are used directly in the encoding in the format so do not rearrange.
+ */
+enum quant_method
+{
+ QUANT_2 = 0,
+ QUANT_3 = 1,
+ QUANT_4 = 2,
+ QUANT_5 = 3,
+ QUANT_6 = 4,
+ QUANT_8 = 5,
+ QUANT_10 = 6,
+ QUANT_12 = 7,
+ QUANT_16 = 8,
+ QUANT_20 = 9,
+ QUANT_24 = 10,
+ QUANT_32 = 11,
+ QUANT_40 = 12,
+ QUANT_48 = 13,
+ QUANT_64 = 14,
+ QUANT_80 = 15,
+ QUANT_96 = 16,
+ QUANT_128 = 17,
+ QUANT_160 = 18,
+ QUANT_192 = 19,
+ QUANT_256 = 20
+};
+
+/**
+ * @brief The number of levels use by an ASTC quantization method.
+ *
+ * @param method The quantization method
+ *
+ * @return The number of levels used by @c method.
+ */
+static inline unsigned int get_quant_level(quant_method method)
+{
+ switch (method)
+ {
+ case QUANT_2: return 2;
+ case QUANT_3: return 3;
+ case QUANT_4: return 4;
+ case QUANT_5: return 5;
+ case QUANT_6: return 6;
+ case QUANT_8: return 8;
+ case QUANT_10: return 10;
+ case QUANT_12: return 12;
+ case QUANT_16: return 16;
+ case QUANT_20: return 20;
+ case QUANT_24: return 24;
+ case QUANT_32: return 32;
+ case QUANT_40: return 40;
+ case QUANT_48: return 48;
+ case QUANT_64: return 64;
+ case QUANT_80: return 80;
+ case QUANT_96: return 96;
+ case QUANT_128: return 128;
+ case QUANT_160: return 160;
+ case QUANT_192: return 192;
+ case QUANT_256: return 256;
+ }
+
+ // Unreachable - the enum is fully described
+ return 0;
+}
+
+/**
+ * @brief Computed metrics about a partition in a block.
+ */
+struct partition_metrics
+{
+ /** @brief The error-weighted average color in the partition. */
+ vfloat4 avg;
+
+ /** @brief The dominant error-weighted direction in the partition. */
+ vfloat4 dir;
+};
+
+/**
+ * @brief Computed lines for a a three component analysis.
+ */
+struct partition_lines3
+{
+ /** @brief Line for uncorrelated chroma. */
+ line3 uncor_line;
+
+ /** @brief Line for correlated chroma, passing though the origin. */
+ line3 samec_line;
+
+ /** @brief Post-processed line for uncorrelated chroma. */
+ processed_line3 uncor_pline;
+
+ /** @brief Post-processed line for correlated chroma, passing though the origin. */
+ processed_line3 samec_pline;
+
+ /** @brief The length of the line for uncorrelated chroma. */
+ float uncor_line_len;
+
+ /** @brief The length of the line for correlated chroma. */
+ float samec_line_len;
+};
+
+/**
+ * @brief The partition information for a single partition.
+ *
+ * ASTC has a total of 1024 candidate partitions for each of 2/3/4 partition counts, although this
+ * 1024 includes seeds that generate duplicates of other seeds and seeds that generate completely
+ * empty partitions. These are both valid encodings, but astcenc will skip both during compression
+ * as they are not useful.
+ */
+struct partition_info
+{
+ /** @brief The number of partitions in this partitioning. */
+ uint16_t partition_count;
+
+ /** @brief The index (seed) of this partitioning. */
+ uint16_t partition_index;
+
+ /**
+ * @brief The number of texels in each partition.
+ *
+ * Note that some seeds result in zero texels assigned to a partition are valid, but are skipped
+ * by this compressor as there is no point spending bits encoding an unused color endpoint.
+ */
+ uint8_t partition_texel_count[BLOCK_MAX_PARTITIONS];
+
+ /** @brief The partition of each texel in the block. */
+ uint8_t partition_of_texel[BLOCK_MAX_TEXELS];
+
+ /** @brief The list of texels in each partition. */
+ uint8_t texels_of_partition[BLOCK_MAX_PARTITIONS][BLOCK_MAX_TEXELS];
+};
+
+/**
+ * @brief The weight grid information for a single decimation pattern.
+ *
+ * ASTC can store one weight per texel, but is also capable of storing lower resolution weight grids
+ * that are interpolated during decompression to assign a with to a texel. Storing fewer weights
+ * can free up a substantial amount of bits that we can then spend on more useful things, such as
+ * more accurate endpoints and weights, or additional partitions.
+ *
+ * This data structure is used to store information about a single weight grid decimation pattern,
+ * for a single block size.
+ */
+struct decimation_info
+{
+ /** @brief The total number of texels in the block. */
+ uint8_t texel_count;
+
+ /** @brief The maximum number of stored weights that contribute to each texel, between 1 and 4. */
+ uint8_t max_texel_weight_count;
+
+ /** @brief The total number of weights stored. */
+ uint8_t weight_count;
+
+ /** @brief The number of stored weights in the X dimension. */
+ uint8_t weight_x;
+
+ /** @brief The number of stored weights in the Y dimension. */
+ uint8_t weight_y;
+
+ /** @brief The number of stored weights in the Z dimension. */
+ uint8_t weight_z;
+
+ /**
+ * @brief The number of weights that contribute to each texel.
+ * Value is between 1 and 4.
+ */
+ uint8_t texel_weight_count[BLOCK_MAX_TEXELS];
+
+ /**
+ * @brief The weight index of the N weights that are interpolated for each texel.
+ * Stored transposed to improve vectorization.
+ */
+ uint8_t texel_weights_tr[4][BLOCK_MAX_TEXELS];
+
+ /**
+ * @brief The bilinear contribution of the N weights that are interpolated for each texel.
+ * Value is between 0 and 16, stored transposed to improve vectorization.
+ */
+ uint8_t texel_weight_contribs_int_tr[4][BLOCK_MAX_TEXELS];
+
+ /**
+ * @brief The bilinear contribution of the N weights that are interpolated for each texel.
+ * Value is between 0 and 1, stored transposed to improve vectorization.
+ */
+ alignas(ASTCENC_VECALIGN) float texel_weight_contribs_float_tr[4][BLOCK_MAX_TEXELS];
+
+ /** @brief The number of texels that each stored weight contributes to. */
+ uint8_t weight_texel_count[BLOCK_MAX_WEIGHTS];
+
+ /**
+ * @brief The list of texels that use a specific weight index.
+ * Stored transposed to improve vectorization.
+ */
+ uint8_t weight_texels_tr[BLOCK_MAX_TEXELS][BLOCK_MAX_WEIGHTS];
+
+ /**
+ * @brief The bilinear contribution to the N texels that use each weight.
+ * Value is between 0 and 1, stored transposed to improve vectorization.
+ */
+ alignas(ASTCENC_VECALIGN) float weights_texel_contribs_tr[BLOCK_MAX_TEXELS][BLOCK_MAX_WEIGHTS];
+
+ /**
+ * @brief The bilinear contribution to the Nth texel that uses each weight.
+ * Value is between 0 and 1, stored transposed to improve vectorization.
+ */
+ float texel_contrib_for_weight[BLOCK_MAX_TEXELS][BLOCK_MAX_WEIGHTS];
+};
+
+/**
+ * @brief Metadata for single block mode for a specific block size.
+ */
+struct block_mode
+{
+ /** @brief The block mode index in the ASTC encoded form. */
+ uint16_t mode_index;
+
+ /** @brief The decimation mode index in the compressor reindexed list. */
+ uint8_t decimation_mode;
+
+ /** @brief The weight quantization used by this block mode. */
+ uint8_t quant_mode;
+
+ /** @brief The weight quantization used by this block mode. */
+ uint8_t weight_bits;
+
+ /** @brief Is a dual weight plane used by this block mode? */
+ uint8_t is_dual_plane : 1;
+
+ /**
+ * @brief Get the weight quantization used by this block mode.
+ *
+ * @return The quantization level.
+ */
+ inline quant_method get_weight_quant_mode() const
+ {
+ return static_cast<quant_method>(this->quant_mode);
+ }
+};
+
+/**
+ * @brief Metadata for single decimation mode for a specific block size.
+ */
+struct decimation_mode
+{
+ /** @brief The max weight precision for 1 plane, or -1 if not supported. */
+ int8_t maxprec_1plane;
+
+ /** @brief The max weight precision for 2 planes, or -1 if not supported. */
+ int8_t maxprec_2planes;
+
+ /**
+ * @brief Bitvector indicating weight quant modes used by active 1 plane block modes.
+ *
+ * Bit 0 = QUANT_2, Bit 1 = QUANT_3, etc.
+ */
+ uint16_t refprec_1_plane;
+
+ /**
+ * @brief Bitvector indicating weight quant methods used by active 2 plane block modes.
+ *
+ * Bit 0 = QUANT_2, Bit 1 = QUANT_3, etc.
+ */
+ uint16_t refprec_2_planes;
+
+ /**
+ * @brief Set a 1 plane weight quant as active.
+ *
+ * @param weight_quant The quant method to set.
+ */
+ void set_ref_1_plane(quant_method weight_quant)
+ {
+ refprec_1_plane |= (1 << weight_quant);
+ }
+
+ /**
+ * @brief Test if this mode is active below a given 1 plane weight quant (inclusive).
+ *
+ * @param max_weight_quant The max quant method to test.
+ */
+ bool is_ref_1_plane(quant_method max_weight_quant) const
+ {
+ uint16_t mask = static_cast<uint16_t>((1 << (max_weight_quant + 1)) - 1);
+ return (refprec_1_plane & mask) != 0;
+ }
+
+ /**
+ * @brief Set a 2 plane weight quant as active.
+ *
+ * @param weight_quant The quant method to set.
+ */
+ void set_ref_2_plane(quant_method weight_quant)
+ {
+ refprec_2_planes |= static_cast<uint16_t>(1 << weight_quant);
+ }
+
+ /**
+ * @brief Test if this mode is active below a given 2 plane weight quant (inclusive).
+ *
+ * @param max_weight_quant The max quant method to test.
+ */
+ bool is_ref_2_plane(quant_method max_weight_quant) const
+ {
+ uint16_t mask = static_cast<uint16_t>((1 << (max_weight_quant + 1)) - 1);
+ return (refprec_2_planes & mask) != 0;
+ }
+};
+
+/**
+ * @brief Data tables for a single block size.
+ *
+ * The decimation tables store the information to apply weight grid dimension reductions. We only
+ * store the decimation modes that are actually needed by the current context; many of the possible
+ * modes will be unused (too many weights for the current block size or disabled by heuristics). The
+ * actual number of weights stored is @c decimation_mode_count, and the @c decimation_modes and
+ * @c decimation_tables arrays store the active modes contiguously at the start of the array. These
+ * entries are not stored in any particular order.
+ *
+ * The block mode tables store the unpacked block mode settings. Block modes are stored in the
+ * compressed block as an 11 bit field, but for any given block size and set of compressor
+ * heuristics, only a subset of the block modes will be used. The actual number of block modes
+ * stored is indicated in @c block_mode_count, and the @c block_modes array store the active modes
+ * contiguously at the start of the array. These entries are stored in incrementing "packed" value
+ * order, which doesn't mean much once unpacked. To allow decompressors to reference the packed data
+ * efficiently the @c block_mode_packed_index array stores the mapping between physical ID and the
+ * actual remapped array index.
+ */
+struct block_size_descriptor
+{
+ /** @brief The block X dimension, in texels. */
+ uint8_t xdim;
+
+ /** @brief The block Y dimension, in texels. */
+ uint8_t ydim;
+
+ /** @brief The block Z dimension, in texels. */
+ uint8_t zdim;
+
+ /** @brief The block total texel count. */
+ uint8_t texel_count;
+
+ /**
+ * @brief The number of stored decimation modes which are "always" modes.
+ *
+ * Always modes are stored at the start of the decimation_modes list.
+ */
+ unsigned int decimation_mode_count_always;
+
+ /** @brief The number of stored decimation modes for selected encodings. */
+ unsigned int decimation_mode_count_selected;
+
+ /** @brief The number of stored decimation modes for any encoding. */
+ unsigned int decimation_mode_count_all;
+
+ /**
+ * @brief The number of stored block modes which are "always" modes.
+ *
+ * Always modes are stored at the start of the block_modes list.
+ */
+ unsigned int block_mode_count_1plane_always;
+
+ /** @brief The number of stored block modes for active 1 plane encodings. */
+ unsigned int block_mode_count_1plane_selected;
+
+ /** @brief The number of stored block modes for active 1 and 2 plane encodings. */
+ unsigned int block_mode_count_1plane_2plane_selected;
+
+ /** @brief The number of stored block modes for any encoding. */
+ unsigned int block_mode_count_all;
+
+ /** @brief The number of selected partitionings for 1/2/3/4 partitionings. */
+ unsigned int partitioning_count_selected[BLOCK_MAX_PARTITIONS];
+
+ /** @brief The number of partitionings for 1/2/3/4 partitionings. */
+ unsigned int partitioning_count_all[BLOCK_MAX_PARTITIONS];
+
+ /** @brief The active decimation modes, stored in low indices. */
+ decimation_mode decimation_modes[WEIGHTS_MAX_DECIMATION_MODES];
+
+ /** @brief The active decimation tables, stored in low indices. */
+ alignas(ASTCENC_VECALIGN) decimation_info decimation_tables[WEIGHTS_MAX_DECIMATION_MODES];
+
+ /** @brief The packed block mode array index, or @c BLOCK_BAD_BLOCK_MODE if not active. */
+ uint16_t block_mode_packed_index[WEIGHTS_MAX_BLOCK_MODES];
+
+ /** @brief The active block modes, stored in low indices. */
+ block_mode block_modes[WEIGHTS_MAX_BLOCK_MODES];
+
+ /** @brief The active partition tables, stored in low indices per-count. */
+ partition_info partitionings[(3 * BLOCK_MAX_PARTITIONINGS) + 1];
+
+ /**
+ * @brief The packed partition table array index, or @c BLOCK_BAD_PARTITIONING if not active.
+ *
+ * Indexed by partition_count - 2, containing 2, 3 and 4 partitions.
+ */
+ uint16_t partitioning_packed_index[3][BLOCK_MAX_PARTITIONINGS];
+
+ /** @brief The active texels for k-means partition selection. */
+ uint8_t kmeans_texels[BLOCK_MAX_KMEANS_TEXELS];
+
+ /**
+ * @brief The canonical 2-partition coverage pattern used during block partition search.
+ *
+ * Indexed by remapped index, not physical index.
+ */
+ uint64_t coverage_bitmaps_2[BLOCK_MAX_PARTITIONINGS][2];
+
+ /**
+ * @brief The canonical 3-partition coverage pattern used during block partition search.
+ *
+ * Indexed by remapped index, not physical index.
+ */
+ uint64_t coverage_bitmaps_3[BLOCK_MAX_PARTITIONINGS][3];
+
+ /**
+ * @brief The canonical 4-partition coverage pattern used during block partition search.
+ *
+ * Indexed by remapped index, not physical index.
+ */
+ uint64_t coverage_bitmaps_4[BLOCK_MAX_PARTITIONINGS][4];
+
+ /**
+ * @brief Get the block mode structure for index @c block_mode.
+ *
+ * This function can only return block modes that are enabled by the current compressor config.
+ * Decompression from an arbitrary source should not use this without first checking that the
+ * packed block mode index is not @c BLOCK_BAD_BLOCK_MODE.
+ *
+ * @param block_mode The packed block mode index.
+ *
+ * @return The block mode structure.
+ */
+ const block_mode& get_block_mode(unsigned int block_mode) const
+ {
+ unsigned int packed_index = this->block_mode_packed_index[block_mode];
+ assert(packed_index != BLOCK_BAD_BLOCK_MODE && packed_index < this->block_mode_count_all);
+ return this->block_modes[packed_index];
+ }
+
+ /**
+ * @brief Get the decimation mode structure for index @c decimation_mode.
+ *
+ * This function can only return decimation modes that are enabled by the current compressor
+ * config. The mode array is stored packed, but this is only ever indexed by the packed index
+ * stored in the @c block_mode and never exists in an unpacked form.
+ *
+ * @param decimation_mode The packed decimation mode index.
+ *
+ * @return The decimation mode structure.
+ */
+ const decimation_mode& get_decimation_mode(unsigned int decimation_mode) const
+ {
+ return this->decimation_modes[decimation_mode];
+ }
+
+ /**
+ * @brief Get the decimation info structure for index @c decimation_mode.
+ *
+ * This function can only return decimation modes that are enabled by the current compressor
+ * config. The mode array is stored packed, but this is only ever indexed by the packed index
+ * stored in the @c block_mode and never exists in an unpacked form.
+ *
+ * @param decimation_mode The packed decimation mode index.
+ *
+ * @return The decimation info structure.
+ */
+ const decimation_info& get_decimation_info(unsigned int decimation_mode) const
+ {
+ return this->decimation_tables[decimation_mode];
+ }
+
+ /**
+ * @brief Get the partition info table for a given partition count.
+ *
+ * @param partition_count The number of partitions we want the table for.
+ *
+ * @return The pointer to the table of 1024 entries (for 2/3/4 parts) or 1 entry (for 1 part).
+ */
+ const partition_info* get_partition_table(unsigned int partition_count) const
+ {
+ if (partition_count == 1)
+ {
+ partition_count = 5;
+ }
+ unsigned int index = (partition_count - 2) * BLOCK_MAX_PARTITIONINGS;
+ return this->partitionings + index;
+ }
+
+ /**
+ * @brief Get the partition info structure for a given partition count and seed.
+ *
+ * @param partition_count The number of partitions we want the info for.
+ * @param index The partition seed (between 0 and 1023).
+ *
+ * @return The partition info structure.
+ */
+ const partition_info& get_partition_info(unsigned int partition_count, unsigned int index) const
+ {
+ unsigned int packed_index = 0;
+ if (partition_count >= 2)
+ {
+ packed_index = this->partitioning_packed_index[partition_count - 2][index];
+ }
+
+ assert(packed_index != BLOCK_BAD_PARTITIONING && packed_index < this->partitioning_count_all[partition_count - 1]);
+ auto& result = get_partition_table(partition_count)[packed_index];
+ assert(index == result.partition_index);
+ return result;
+ }
+
+ /**
+ * @brief Get the partition info structure for a given partition count and seed.
+ *
+ * @param partition_count The number of partitions we want the info for.
+ * @param packed_index The raw array offset.
+ *
+ * @return The partition info structure.
+ */
+ const partition_info& get_raw_partition_info(unsigned int partition_count, unsigned int packed_index) const
+ {
+ assert(packed_index != BLOCK_BAD_PARTITIONING && packed_index < this->partitioning_count_all[partition_count - 1]);
+ auto& result = get_partition_table(partition_count)[packed_index];
+ return result;
+ }
+};
+
+/**
+ * @brief The image data for a single block.
+ *
+ * The @c data_[rgba] fields store the image data in an encoded SoA float form designed for easy
+ * vectorization. Input data is converted to float and stored as values between 0 and 65535. LDR
+ * data is stored as direct UNORM data, HDR data is stored as LNS data.
+ *
+ * The @c rgb_lns and @c alpha_lns fields that assigned a per-texel use of HDR are only used during
+ * decompression. The current compressor will always use HDR endpoint formats when in HDR mode.
+ */
+struct image_block
+{
+ /** @brief The input (compress) or output (decompress) data for the red color component. */
+ alignas(ASTCENC_VECALIGN) float data_r[BLOCK_MAX_TEXELS];
+
+ /** @brief The input (compress) or output (decompress) data for the green color component. */
+ alignas(ASTCENC_VECALIGN) float data_g[BLOCK_MAX_TEXELS];
+
+ /** @brief The input (compress) or output (decompress) data for the blue color component. */
+ alignas(ASTCENC_VECALIGN) float data_b[BLOCK_MAX_TEXELS];
+
+ /** @brief The input (compress) or output (decompress) data for the alpha color component. */
+ alignas(ASTCENC_VECALIGN) float data_a[BLOCK_MAX_TEXELS];
+
+ /** @brief The number of texels in the block. */
+ uint8_t texel_count;
+
+ /** @brief The original data for texel 0 for constant color block encoding. */
+ vfloat4 origin_texel;
+
+ /** @brief The min component value of all texels in the block. */
+ vfloat4 data_min;
+
+ /** @brief The mean component value of all texels in the block. */
+ vfloat4 data_mean;
+
+ /** @brief The max component value of all texels in the block. */
+ vfloat4 data_max;
+
+ /** @brief The relative error significance of the color channels. */
+ vfloat4 channel_weight;
+
+ /** @brief Is this grayscale block where R == G == B for all texels? */
+ bool grayscale;
+
+ /** @brief Set to 1 if a texel is using HDR RGB endpoints (decompression only). */
+ uint8_t rgb_lns[BLOCK_MAX_TEXELS];
+
+ /** @brief Set to 1 if a texel is using HDR alpha endpoints (decompression only). */
+ uint8_t alpha_lns[BLOCK_MAX_TEXELS];
+
+ /** @brief The X position of this block in the input or output image. */
+ unsigned int xpos;
+
+ /** @brief The Y position of this block in the input or output image. */
+ unsigned int ypos;
+
+ /** @brief The Z position of this block in the input or output image. */
+ unsigned int zpos;
+
+ /**
+ * @brief Get an RGBA texel value from the data.
+ *
+ * @param index The texel index.
+ *
+ * @return The texel in RGBA component ordering.
+ */
+ inline vfloat4 texel(unsigned int index) const
+ {
+ return vfloat4(data_r[index],
+ data_g[index],
+ data_b[index],
+ data_a[index]);
+ }
+
+ /**
+ * @brief Get an RGB texel value from the data.
+ *
+ * @param index The texel index.
+ *
+ * @return The texel in RGB0 component ordering.
+ */
+ inline vfloat4 texel3(unsigned int index) const
+ {
+ return vfloat3(data_r[index],
+ data_g[index],
+ data_b[index]);
+ }
+
+ /**
+ * @brief Get the default alpha value for endpoints that don't store it.
+ *
+ * The default depends on whether the alpha endpoint is LDR or HDR.
+ *
+ * @return The alpha value in the scaled range used by the compressor.
+ */
+ inline float get_default_alpha() const
+ {
+ return this->alpha_lns[0] ? static_cast<float>(0x7800) : static_cast<float>(0xFFFF);
+ }
+
+ /**
+ * @brief Test if a single color channel is constant across the block.
+ *
+ * Constant color channels are easier to compress as interpolating between two identical colors
+ * always returns the same value, irrespective of the weight used. They therefore can be ignored
+ * for the purposes of weight selection and use of a second weight plane.
+ *
+ * @return @c true if the channel is constant across the block, @c false otherwise.
+ */
+ inline bool is_constant_channel(int channel) const
+ {
+ vmask4 lane_mask = vint4::lane_id() == vint4(channel);
+ vmask4 color_mask = this->data_min == this->data_max;
+ return any(lane_mask & color_mask);
+ }
+
+ /**
+ * @brief Test if this block is a luminance block with constant 1.0 alpha.
+ *
+ * @return @c true if the block is a luminance block , @c false otherwise.
+ */
+ inline bool is_luminance() const
+ {
+ float default_alpha = this->get_default_alpha();
+ bool alpha1 = (this->data_min.lane<3>() == default_alpha) &&
+ (this->data_max.lane<3>() == default_alpha);
+ return this->grayscale && alpha1;
+ }
+
+ /**
+ * @brief Test if this block is a luminance block with variable alpha.
+ *
+ * @return @c true if the block is a luminance + alpha block , @c false otherwise.
+ */
+ inline bool is_luminancealpha() const
+ {
+ float default_alpha = this->get_default_alpha();
+ bool alpha1 = (this->data_min.lane<3>() == default_alpha) &&
+ (this->data_max.lane<3>() == default_alpha);
+ return this->grayscale && !alpha1;
+ }
+};
+
+/**
+ * @brief Data structure storing the color endpoints for a block.
+ */
+struct endpoints
+{
+ /** @brief The number of partition endpoints stored. */
+ unsigned int partition_count;
+
+ /** @brief The colors for endpoint 0. */
+ vfloat4 endpt0[BLOCK_MAX_PARTITIONS];
+
+ /** @brief The colors for endpoint 1. */
+ vfloat4 endpt1[BLOCK_MAX_PARTITIONS];
+};
+
+/**
+ * @brief Data structure storing the color endpoints and weights.
+ */
+struct endpoints_and_weights
+{
+ /** @brief True if all active values in weight_error_scale are the same. */
+ bool is_constant_weight_error_scale;
+
+ /** @brief The color endpoints. */
+ endpoints ep;
+
+ /** @brief The ideal weight for each texel; may be undecimated or decimated. */
+ alignas(ASTCENC_VECALIGN) float weights[BLOCK_MAX_TEXELS];
+
+ /** @brief The ideal weight error scaling for each texel; may be undecimated or decimated. */
+ alignas(ASTCENC_VECALIGN) float weight_error_scale[BLOCK_MAX_TEXELS];
+};
+
+/**
+ * @brief Utility storing estimated errors from choosing particular endpoint encodings.
+ */
+struct encoding_choice_errors
+{
+ /** @brief Error of using LDR RGB-scale instead of complete endpoints. */
+ float rgb_scale_error;
+
+ /** @brief Error of using HDR RGB-scale instead of complete endpoints. */
+ float rgb_luma_error;
+
+ /** @brief Error of using luminance instead of RGB. */
+ float luminance_error;
+
+ /** @brief Error of discarding alpha and using a constant 1.0 alpha. */
+ float alpha_drop_error;
+
+ /** @brief Can we use delta offset encoding? */
+ bool can_offset_encode;
+
+ /** @brief Can we use blue contraction encoding? */
+ bool can_blue_contract;
+};
+
+/**
+ * @brief Preallocated working buffers, allocated per thread during context creation.
+ */
+struct alignas(ASTCENC_VECALIGN) compression_working_buffers
+{
+ /** @brief Ideal endpoints and weights for plane 1. */
+ endpoints_and_weights ei1;
+
+ /** @brief Ideal endpoints and weights for plane 2. */
+ endpoints_and_weights ei2;
+
+ /**
+ * @brief Decimated ideal weight values in the ~0-1 range.
+ *
+ * Note that values can be slightly below zero or higher than one due to
+ * endpoint extents being inside the ideal color representation.
+ *
+ * For two planes, second plane starts at @c WEIGHTS_PLANE2_OFFSET offsets.
+ */
+ alignas(ASTCENC_VECALIGN) float dec_weights_ideal[WEIGHTS_MAX_DECIMATION_MODES * BLOCK_MAX_WEIGHTS];
+
+ /**
+ * @brief Decimated quantized weight values in the unquantized 0-64 range.
+ *
+ * For two planes, second plane starts at @c WEIGHTS_PLANE2_OFFSET offsets.
+ */
+ uint8_t dec_weights_uquant[WEIGHTS_MAX_BLOCK_MODES * BLOCK_MAX_WEIGHTS];
+
+ /** @brief Error of the best encoding combination for each block mode. */
+ alignas(ASTCENC_VECALIGN) float errors_of_best_combination[WEIGHTS_MAX_BLOCK_MODES];
+
+ /** @brief The best color quant for each block mode. */
+ uint8_t best_quant_levels[WEIGHTS_MAX_BLOCK_MODES];
+
+ /** @brief The best color quant for each block mode if modes are the same and we have spare bits. */
+ uint8_t best_quant_levels_mod[WEIGHTS_MAX_BLOCK_MODES];
+
+ /** @brief The best endpoint format for each partition. */
+ uint8_t best_ep_formats[WEIGHTS_MAX_BLOCK_MODES][BLOCK_MAX_PARTITIONS];
+
+ /** @brief The total bit storage needed for quantized weights for each block mode. */
+ int8_t qwt_bitcounts[WEIGHTS_MAX_BLOCK_MODES];
+
+ /** @brief The cumulative error for quantized weights for each block mode. */
+ float qwt_errors[WEIGHTS_MAX_BLOCK_MODES];
+
+ /** @brief The low weight value in plane 1 for each block mode. */
+ float weight_low_value1[WEIGHTS_MAX_BLOCK_MODES];
+
+ /** @brief The high weight value in plane 1 for each block mode. */
+ float weight_high_value1[WEIGHTS_MAX_BLOCK_MODES];
+
+ /** @brief The low weight value in plane 1 for each quant level and decimation mode. */
+ float weight_low_values1[WEIGHTS_MAX_DECIMATION_MODES][TUNE_MAX_ANGULAR_QUANT + 1];
+
+ /** @brief The high weight value in plane 1 for each quant level and decimation mode. */
+ float weight_high_values1[WEIGHTS_MAX_DECIMATION_MODES][TUNE_MAX_ANGULAR_QUANT + 1];
+
+ /** @brief The low weight value in plane 2 for each block mode. */
+ float weight_low_value2[WEIGHTS_MAX_BLOCK_MODES];
+
+ /** @brief The high weight value in plane 2 for each block mode. */
+ float weight_high_value2[WEIGHTS_MAX_BLOCK_MODES];
+
+ /** @brief The low weight value in plane 2 for each quant level and decimation mode. */
+ float weight_low_values2[WEIGHTS_MAX_DECIMATION_MODES][TUNE_MAX_ANGULAR_QUANT + 1];
+
+ /** @brief The high weight value in plane 2 for each quant level and decimation mode. */
+ float weight_high_values2[WEIGHTS_MAX_DECIMATION_MODES][TUNE_MAX_ANGULAR_QUANT + 1];
+};
+
+struct dt_init_working_buffers
+{
+ uint8_t weight_count_of_texel[BLOCK_MAX_TEXELS];
+ uint8_t grid_weights_of_texel[BLOCK_MAX_TEXELS][4];
+ uint8_t weights_of_texel[BLOCK_MAX_TEXELS][4];
+
+ uint8_t texel_count_of_weight[BLOCK_MAX_WEIGHTS];
+ uint8_t texels_of_weight[BLOCK_MAX_WEIGHTS][BLOCK_MAX_TEXELS];
+ uint8_t texel_weights_of_weight[BLOCK_MAX_WEIGHTS][BLOCK_MAX_TEXELS];
+};
+
+/**
+ * @brief Weight quantization transfer table.
+ *
+ * ASTC can store texel weights at many quantization levels, so for performance we store essential
+ * information about each level as a precomputed data structure. Unquantized weights are integers
+ * or floats in the range [0, 64].
+ *
+ * This structure provides a table, used to estimate the closest quantized weight for a given
+ * floating-point weight. For each quantized weight, the corresponding unquantized values. For each
+ * quantized weight, a previous-value and a next-value.
+*/
+struct quant_and_transfer_table
+{
+ /** @brief The unscrambled unquantized value. */
+ int8_t quant_to_unquant[32];
+
+ /** @brief The scrambling order: scrambled_quant = map[unscrambled_quant]. */
+ int8_t scramble_map[32];
+
+ /** @brief The unscrambling order: unscrambled_unquant = map[scrambled_quant]. */
+ int8_t unscramble_and_unquant_map[32];
+
+ /**
+ * @brief A table of previous-and-next weights, indexed by the current unquantized value.
+ * * bits 7:0 = previous-index, unquantized
+ * * bits 15:8 = next-index, unquantized
+ */
+ uint16_t prev_next_values[65];
+};
+
+/** @brief The precomputed quant and transfer table. */
+extern const quant_and_transfer_table quant_and_xfer_tables[12];
+
+/** @brief The block is an error block, and will return error color or NaN. */
+static constexpr uint8_t SYM_BTYPE_ERROR { 0 };
+
+/** @brief The block is a constant color block using FP16 colors. */
+static constexpr uint8_t SYM_BTYPE_CONST_F16 { 1 };
+
+/** @brief The block is a constant color block using UNORM16 colors. */
+static constexpr uint8_t SYM_BTYPE_CONST_U16 { 2 };
+
+/** @brief The block is a normal non-constant color block. */
+static constexpr uint8_t SYM_BTYPE_NONCONST { 3 };
+
+/**
+ * @brief A symbolic representation of a compressed block.
+ *
+ * The symbolic representation stores the unpacked content of a single
+ * @c physical_compressed_block, in a form which is much easier to access for
+ * the rest of the compressor code.
+ */
+struct symbolic_compressed_block
+{
+ /** @brief The block type, one of the @c SYM_BTYPE_* constants. */
+ uint8_t block_type;
+
+ /** @brief The number of partitions; valid for @c NONCONST blocks. */
+ uint8_t partition_count;
+
+ /** @brief Non-zero if the color formats matched; valid for @c NONCONST blocks. */
+ uint8_t color_formats_matched;
+
+ /** @brief The plane 2 color component, or -1 if single plane; valid for @c NONCONST blocks. */
+ int8_t plane2_component;
+
+ /** @brief The block mode; valid for @c NONCONST blocks. */
+ uint16_t block_mode;
+
+ /** @brief The partition index; valid for @c NONCONST blocks if 2 or more partitions. */
+ uint16_t partition_index;
+
+ /** @brief The endpoint color formats for each partition; valid for @c NONCONST blocks. */
+ uint8_t color_formats[BLOCK_MAX_PARTITIONS];
+
+ /** @brief The endpoint color quant mode; valid for @c NONCONST blocks. */
+ quant_method quant_mode;
+
+ /** @brief The error of the current encoding; valid for @c NONCONST blocks. */
+ float errorval;
+
+ // We can't have both of these at the same time
+ union {
+ /** @brief The constant color; valid for @c CONST blocks. */
+ int constant_color[BLOCK_MAX_COMPONENTS];
+
+ /** @brief The quantized endpoint color pairs; valid for @c NONCONST blocks. */
+ uint8_t color_values[BLOCK_MAX_PARTITIONS][8];
+ };
+
+ /** @brief The quantized and decimated weights.
+ *
+ * Weights are stored in the 0-64 unpacked range allowing them to be used
+ * directly in encoding passes without per-use unpacking. Packing happens
+ * when converting to/from the physical bitstream encoding.
+ *
+ * If dual plane, the second plane starts at @c weights[WEIGHTS_PLANE2_OFFSET].
+ */
+ uint8_t weights[BLOCK_MAX_WEIGHTS];
+
+ /**
+ * @brief Get the weight quantization used by this block mode.
+ *
+ * @return The quantization level.
+ */
+ inline quant_method get_color_quant_mode() const
+ {
+ return this->quant_mode;
+ }
+};
+
+/**
+ * @brief A physical representation of a compressed block.
+ *
+ * The physical representation stores the raw bytes of the format in memory.
+ */
+struct physical_compressed_block
+{
+ /** @brief The ASTC encoded data for a single block. */
+ uint8_t data[16];
+};
+
+
+/**
+ * @brief Parameter structure for @c compute_pixel_region_variance().
+ *
+ * This function takes a structure to avoid spilling arguments to the stack on every function
+ * invocation, as there are a lot of parameters.
+ */
+struct pixel_region_args
+{
+ /** @brief The image to analyze. */
+ const astcenc_image* img;
+
+ /** @brief The component swizzle pattern. */
+ astcenc_swizzle swz;
+
+ /** @brief Should the algorithm bother with Z axis processing? */
+ bool have_z;
+
+ /** @brief The kernel radius for alpha processing. */
+ unsigned int alpha_kernel_radius;
+
+ /** @brief The X dimension of the working data to process. */
+ unsigned int size_x;
+
+ /** @brief The Y dimension of the working data to process. */
+ unsigned int size_y;
+
+ /** @brief The Z dimension of the working data to process. */
+ unsigned int size_z;
+
+ /** @brief The X position of first src and dst data in the data set. */
+ unsigned int offset_x;
+
+ /** @brief The Y position of first src and dst data in the data set. */
+ unsigned int offset_y;
+
+ /** @brief The Z position of first src and dst data in the data set. */
+ unsigned int offset_z;
+
+ /** @brief The working memory buffer. */
+ vfloat4 *work_memory;
+};
+
+/**
+ * @brief Parameter structure for @c compute_averages_proc().
+ */
+struct avg_args
+{
+ /** @brief The arguments for the nested variance computation. */
+ pixel_region_args arg;
+
+ /** @brief The image X dimensions. */
+ unsigned int img_size_x;
+
+ /** @brief The image Y dimensions. */
+ unsigned int img_size_y;
+
+ /** @brief The image Z dimensions. */
+ unsigned int img_size_z;
+
+ /** @brief The maximum working block dimensions in X and Y dimensions. */
+ unsigned int blk_size_xy;
+
+ /** @brief The maximum working block dimensions in Z dimensions. */
+ unsigned int blk_size_z;
+
+ /** @brief The working block memory size. */
+ unsigned int work_memory_size;
+};
+
+#if defined(ASTCENC_DIAGNOSTICS)
+/* See astcenc_diagnostic_trace header for details. */
+class TraceLog;
+#endif
+
+/**
+ * @brief The astcenc compression context.
+ */
+struct astcenc_contexti
+{
+ /** @brief The configuration this context was created with. */
+ astcenc_config config;
+
+ /** @brief The thread count supported by this context. */
+ unsigned int thread_count;
+
+ /** @brief The block size descriptor this context was created with. */
+ block_size_descriptor* bsd;
+
+ /*
+ * Fields below here are not needed in a decompress-only build, but some remain as they are
+ * small and it avoids littering the code with #ifdefs. The most significant contributors to
+ * large structure size are omitted.
+ */
+
+ /** @brief The input image alpha channel averages table, may be @c nullptr if not needed. */
+ float* input_alpha_averages;
+
+ /** @brief The scratch working buffers, one per thread (see @c thread_count). */
+ compression_working_buffers* working_buffers;
+
+#if !defined(ASTCENC_DECOMPRESS_ONLY)
+ /** @brief The pixel region and variance worker arguments. */
+ avg_args avg_preprocess_args;
+#endif
+
+#if defined(ASTCENC_DIAGNOSTICS)
+ /**
+ * @brief The diagnostic trace logger.
+ *
+ * Note that this is a singleton, so can only be used in single threaded mode. It only exists
+ * here so we have a reference to close the file at the end of the capture.
+ */
+ TraceLog* trace_log;
+#endif
+};
+
+/* ============================================================================
+ Functionality for managing block sizes and partition tables.
+============================================================================ */
+
+/**
+ * @brief Populate the block size descriptor for the target block size.
+ *
+ * This will also initialize the partition table metadata, which is stored as part of the BSD
+ * structure.
+ *
+ * @param x_texels The number of texels in the block X dimension.
+ * @param y_texels The number of texels in the block Y dimension.
+ * @param z_texels The number of texels in the block Z dimension.
+ * @param can_omit_modes Can we discard modes and partitionings that astcenc won't use?
+ * @param partition_count_cutoff The partition count cutoff to use, if we can omit partitionings.
+ * @param mode_cutoff The block mode percentile cutoff [0-1].
+ * @param[out] bsd The descriptor to initialize.
+ */
+void init_block_size_descriptor(
+ unsigned int x_texels,
+ unsigned int y_texels,
+ unsigned int z_texels,
+ bool can_omit_modes,
+ unsigned int partition_count_cutoff,
+ float mode_cutoff,
+ block_size_descriptor& bsd);
+
+/**
+ * @brief Populate the partition tables for the target block size.
+ *
+ * Note the @c bsd descriptor must be initialized by calling @c init_block_size_descriptor() before
+ * calling this function.
+ *
+ * @param[out] bsd The block size information structure to populate.
+ * @param can_omit_partitionings True if we can we drop partitionings that astcenc won't use.
+ * @param partition_count_cutoff The partition count cutoff to use, if we can omit partitionings.
+ */
+void init_partition_tables(
+ block_size_descriptor& bsd,
+ bool can_omit_partitionings,
+ unsigned int partition_count_cutoff);
+
+/**
+ * @brief Get the percentile table for 2D block modes.
+ *
+ * This is an empirically determined prioritization of which block modes to use in the search in
+ * terms of their centile (lower centiles = more useful).
+ *
+ * Returns a dynamically allocated array; caller must free with delete[].
+ *
+ * @param xdim The block x size.
+ * @param ydim The block y size.
+ *
+ * @return The unpacked table.
+ */
+const float* get_2d_percentile_table(
+ unsigned int xdim,
+ unsigned int ydim);
+
+/**
+ * @brief Query if a 2D block size is legal.
+ *
+ * @return True if legal, false otherwise.
+ */
+bool is_legal_2d_block_size(
+ unsigned int xdim,
+ unsigned int ydim);
+
+/**
+ * @brief Query if a 3D block size is legal.
+ *
+ * @return True if legal, false otherwise.
+ */
+bool is_legal_3d_block_size(
+ unsigned int xdim,
+ unsigned int ydim,
+ unsigned int zdim);
+
+/* ============================================================================
+ Functionality for managing BISE quantization and unquantization.
+============================================================================ */
+
+/**
+ * @brief The precomputed table for quantizing color values.
+ *
+ * Converts unquant value in 0-255 range into quant value in 0-255 range.
+ * No BISE scrambling is applied at this stage.
+ *
+ * Indexed by [quant_mode - 4][data_value].
+ */
+extern const uint8_t color_unquant_to_uquant_tables[17][256];
+
+/**
+ * @brief The precomputed table for packing quantized color values.
+ *
+ * Converts quant value in 0-255 range into packed quant value in 0-N range,
+ * with BISE scrambling applied.
+ *
+ * Indexed by [quant_mode - 4][data_value].
+ */
+extern const uint8_t color_uquant_to_scrambled_pquant_tables[17][256];
+
+/**
+ * @brief The precomputed table for unpacking color values.
+ *
+ * Converts quant value in 0-N range into unpacked value in 0-255 range,
+ * with BISE unscrambling applied.
+ *
+ * Indexed by [quant_mode - 4][data_value].
+ */
+extern const uint8_t* color_scrambled_pquant_to_uquant_tables[17];
+
+/**
+ * @brief The precomputed quant mode storage table.
+ *
+ * Indexing by [integer_count/2][bits] gives us the quantization level for a given integer count and
+ * number of compressed storage bits. Returns -1 for cases where the requested integer count cannot
+ * ever fit in the supplied storage size.
+ */
+extern const int8_t quant_mode_table[10][128];
+
+/**
+ * @brief Encode a packed string using BISE.
+ *
+ * Note that BISE can return strings that are not a whole number of bytes in length, and ASTC can
+ * start storing strings in a block at arbitrary bit offsets in the encoded data.
+ *
+ * @param quant_level The BISE alphabet size.
+ * @param character_count The number of characters in the string.
+ * @param input_data The unpacked string, one byte per character.
+ * @param[in,out] output_data The output packed string.
+ * @param bit_offset The starting offset in the output storage.
+ */
+void encode_ise(
+ quant_method quant_level,
+ unsigned int character_count,
+ const uint8_t* input_data,
+ uint8_t* output_data,
+ unsigned int bit_offset);
+
+/**
+ * @brief Decode a packed string using BISE.
+ *
+ * Note that BISE input strings are not a whole number of bytes in length, and ASTC can start
+ * strings at arbitrary bit offsets in the encoded data.
+ *
+ * @param quant_level The BISE alphabet size.
+ * @param character_count The number of characters in the string.
+ * @param input_data The packed string.
+ * @param[in,out] output_data The output storage, one byte per character.
+ * @param bit_offset The starting offset in the output storage.
+ */
+void decode_ise(
+ quant_method quant_level,
+ unsigned int character_count,
+ const uint8_t* input_data,
+ uint8_t* output_data,
+ unsigned int bit_offset);
+
+/**
+ * @brief Return the number of bits needed to encode an ISE sequence.
+ *
+ * This implementation assumes that the @c quant level is untrusted, given it may come from random
+ * data being decompressed, so we return an arbitrary unencodable size if that is the case.
+ *
+ * @param character_count The number of items in the sequence.
+ * @param quant_level The desired quantization level.
+ *
+ * @return The number of bits needed to encode the BISE string.
+ */
+unsigned int get_ise_sequence_bitcount(
+ unsigned int character_count,
+ quant_method quant_level);
+
+/* ============================================================================
+ Functionality for managing color partitioning.
+============================================================================ */
+
+/**
+ * @brief Compute averages and dominant directions for each partition in a 2 component texture.
+ *
+ * @param pi The partition info for the current trial.
+ * @param blk The image block color data to be compressed.
+ * @param component1 The first component included in the analysis.
+ * @param component2 The second component included in the analysis.
+ * @param[out] pm The output partition metrics.
+ * - Only pi.partition_count array entries actually get initialized.
+ * - Direction vectors @c pm.dir are not normalized.
+ */
+void compute_avgs_and_dirs_2_comp(
+ const partition_info& pi,
+ const image_block& blk,
+ unsigned int component1,
+ unsigned int component2,
+ partition_metrics pm[BLOCK_MAX_PARTITIONS]);
+
+/**
+ * @brief Compute averages and dominant directions for each partition in a 3 component texture.
+ *
+ * @param pi The partition info for the current trial.
+ * @param blk The image block color data to be compressed.
+ * @param omitted_component The component excluded from the analysis.
+ * @param[out] pm The output partition metrics.
+ * - Only pi.partition_count array entries actually get initialized.
+ * - Direction vectors @c pm.dir are not normalized.
+ */
+void compute_avgs_and_dirs_3_comp(
+ const partition_info& pi,
+ const image_block& blk,
+ unsigned int omitted_component,
+ partition_metrics pm[BLOCK_MAX_PARTITIONS]);
+
+/**
+ * @brief Compute averages and dominant directions for each partition in a 3 component texture.
+ *
+ * This is a specialization of @c compute_avgs_and_dirs_3_comp where the omitted component is
+ * always alpha, a common case during partition search.
+ *
+ * @param pi The partition info for the current trial.
+ * @param blk The image block color data to be compressed.
+ * @param[out] pm The output partition metrics.
+ * - Only pi.partition_count array entries actually get initialized.
+ * - Direction vectors @c pm.dir are not normalized.
+ */
+void compute_avgs_and_dirs_3_comp_rgb(
+ const partition_info& pi,
+ const image_block& blk,
+ partition_metrics pm[BLOCK_MAX_PARTITIONS]);
+
+/**
+ * @brief Compute averages and dominant directions for each partition in a 4 component texture.
+ *
+ * @param pi The partition info for the current trial.
+ * @param blk The image block color data to be compressed.
+ * @param[out] pm The output partition metrics.
+ * - Only pi.partition_count array entries actually get initialized.
+ * - Direction vectors @c pm.dir are not normalized.
+ */
+void compute_avgs_and_dirs_4_comp(
+ const partition_info& pi,
+ const image_block& blk,
+ partition_metrics pm[BLOCK_MAX_PARTITIONS]);
+
+/**
+ * @brief Compute the RGB error for uncorrelated and same chroma projections.
+ *
+ * The output of compute averages and dirs is post processed to define two lines, both of which go
+ * through the mean-color-value. One line has a direction defined by the dominant direction; this
+ * is used to assess the error from using an uncorrelated color representation. The other line goes
+ * through (0,0,0) and is used to assess the error from using an RGBS color representation.
+ *
+ * This function computes the squared error when using these two representations.
+ *
+ * @param pi The partition info for the current trial.
+ * @param blk The image block color data to be compressed.
+ * @param[in,out] plines Processed line inputs, and line length outputs.
+ * @param[out] uncor_error The cumulative error for using the uncorrelated line.
+ * @param[out] samec_error The cumulative error for using the same chroma line.
+ */
+void compute_error_squared_rgb(
+ const partition_info& pi,
+ const image_block& blk,
+ partition_lines3 plines[BLOCK_MAX_PARTITIONS],
+ float& uncor_error,
+ float& samec_error);
+
+/**
+ * @brief Compute the RGBA error for uncorrelated and same chroma projections.
+ *
+ * The output of compute averages and dirs is post processed to define two lines, both of which go
+ * through the mean-color-value. One line has a direction defined by the dominant direction; this
+ * is used to assess the error from using an uncorrelated color representation. The other line goes
+ * through (0,0,0,1) and is used to assess the error from using an RGBS color representation.
+ *
+ * This function computes the squared error when using these two representations.
+ *
+ * @param pi The partition info for the current trial.
+ * @param blk The image block color data to be compressed.
+ * @param uncor_plines Processed uncorrelated partition lines for each partition.
+ * @param samec_plines Processed same chroma partition lines for each partition.
+ * @param[out] uncor_lengths The length of each components deviation from the line.
+ * @param[out] samec_lengths The length of each components deviation from the line.
+ * @param[out] uncor_error The cumulative error for using the uncorrelated line.
+ * @param[out] samec_error The cumulative error for using the same chroma line.
+ */
+void compute_error_squared_rgba(
+ const partition_info& pi,
+ const image_block& blk,
+ const processed_line4 uncor_plines[BLOCK_MAX_PARTITIONS],
+ const processed_line4 samec_plines[BLOCK_MAX_PARTITIONS],
+ float uncor_lengths[BLOCK_MAX_PARTITIONS],
+ float samec_lengths[BLOCK_MAX_PARTITIONS],
+ float& uncor_error,
+ float& samec_error);
+
+/**
+ * @brief Find the best set of partitions to trial for a given block.
+ *
+ * On return the @c best_partitions list will contain the two best partition
+ * candidates; one assuming data has uncorrelated chroma and one assuming the
+ * data has correlated chroma. The best candidate is returned first in the list.
+ *
+ * @param bsd The block size information.
+ * @param blk The image block color data to compress.
+ * @param partition_count The number of partitions in the block.
+ * @param partition_search_limit The number of candidate partition encodings to trial.
+ * @param[out] best_partitions The best partition candidates.
+ * @param requested_candidates The number of requested partitionings. May return fewer if
+ * candidates are not available.
+ *
+ * @return The actual number of candidates returned.
+ */
+unsigned int find_best_partition_candidates(
+ const block_size_descriptor& bsd,
+ const image_block& blk,
+ unsigned int partition_count,
+ unsigned int partition_search_limit,
+ unsigned int best_partitions[TUNE_MAX_PARTITIONING_CANDIDATES],
+ unsigned int requested_candidates);
+
+/* ============================================================================
+ Functionality for managing images and image related data.
+============================================================================ */
+
+/**
+ * @brief Setup computation of regional averages in an image.
+ *
+ * This must be done by only a single thread per image, before any thread calls
+ * @c compute_averages().
+ *
+ * Results are written back into @c img->input_alpha_averages.
+ *
+ * @param img The input image data, also holds output data.
+ * @param alpha_kernel_radius The kernel radius (in pixels) for alpha mods.
+ * @param swz Input data component swizzle.
+ * @param[out] ag The average variance arguments to init.
+ *
+ * @return The number of tasks in the processing stage.
+ */
+unsigned int init_compute_averages(
+ const astcenc_image& img,
+ unsigned int alpha_kernel_radius,
+ const astcenc_swizzle& swz,
+ avg_args& ag);
+
+/**
+ * @brief Compute averages for a pixel region.
+ *
+ * The routine computes both in a single pass, using a summed-area table to decouple the running
+ * time from the averaging/variance kernel size.
+ *
+ * @param[out] ctx The compressor context storing the output data.
+ * @param arg The input parameter structure.
+ */
+void compute_pixel_region_variance(
+ astcenc_contexti& ctx,
+ const pixel_region_args& arg);
+/**
+ * @brief Load a single image block from the input image.
+ *
+ * @param decode_mode The compression color profile.
+ * @param img The input image data.
+ * @param[out] blk The image block to populate.
+ * @param bsd The block size information.
+ * @param xpos The block X coordinate in the input image.
+ * @param ypos The block Y coordinate in the input image.
+ * @param zpos The block Z coordinate in the input image.
+ * @param swz The swizzle to apply on load.
+ */
+void load_image_block(
+ astcenc_profile decode_mode,
+ const astcenc_image& img,
+ image_block& blk,
+ const block_size_descriptor& bsd,
+ unsigned int xpos,
+ unsigned int ypos,
+ unsigned int zpos,
+ const astcenc_swizzle& swz);
+
+/**
+ * @brief Load a single image block from the input image.
+ *
+ * This specialized variant can be used only if the block is 2D LDR U8 data,
+ * with no swizzle.
+ *
+ * @param decode_mode The compression color profile.
+ * @param img The input image data.
+ * @param[out] blk The image block to populate.
+ * @param bsd The block size information.
+ * @param xpos The block X coordinate in the input image.
+ * @param ypos The block Y coordinate in the input image.
+ * @param zpos The block Z coordinate in the input image.
+ * @param swz The swizzle to apply on load.
+ */
+void load_image_block_fast_ldr(
+ astcenc_profile decode_mode,
+ const astcenc_image& img,
+ image_block& blk,
+ const block_size_descriptor& bsd,
+ unsigned int xpos,
+ unsigned int ypos,
+ unsigned int zpos,
+ const astcenc_swizzle& swz);
+
+/**
+ * @brief Store a single image block to the output image.
+ *
+ * @param[out] img The output image data.
+ * @param blk The image block to export.
+ * @param bsd The block size information.
+ * @param xpos The block X coordinate in the input image.
+ * @param ypos The block Y coordinate in the input image.
+ * @param zpos The block Z coordinate in the input image.
+ * @param swz The swizzle to apply on store.
+ */
+void store_image_block(
+ astcenc_image& img,
+ const image_block& blk,
+ const block_size_descriptor& bsd,
+ unsigned int xpos,
+ unsigned int ypos,
+ unsigned int zpos,
+ const astcenc_swizzle& swz);
+
+/* ============================================================================
+ Functionality for computing endpoint colors and weights for a block.
+============================================================================ */
+
+/**
+ * @brief Compute ideal endpoint colors and weights for 1 plane of weights.
+ *
+ * The ideal endpoints define a color line for the partition. For each texel the ideal weight
+ * defines an exact position on the partition color line. We can then use these to assess the error
+ * introduced by removing and quantizing the weight grid.
+ *
+ * @param blk The image block color data to compress.
+ * @param pi The partition info for the current trial.
+ * @param[out] ei The endpoint and weight values.
+ */
+void compute_ideal_colors_and_weights_1plane(
+ const image_block& blk,
+ const partition_info& pi,
+ endpoints_and_weights& ei);
+
+/**
+ * @brief Compute ideal endpoint colors and weights for 2 planes of weights.
+ *
+ * The ideal endpoints define a color line for the partition. For each texel the ideal weight
+ * defines an exact position on the partition color line. We can then use these to assess the error
+ * introduced by removing and quantizing the weight grid.
+ *
+ * @param bsd The block size information.
+ * @param blk The image block color data to compress.
+ * @param plane2_component The component assigned to plane 2.
+ * @param[out] ei1 The endpoint and weight values for plane 1.
+ * @param[out] ei2 The endpoint and weight values for plane 2.
+ */
+void compute_ideal_colors_and_weights_2planes(
+ const block_size_descriptor& bsd,
+ const image_block& blk,
+ unsigned int plane2_component,
+ endpoints_and_weights& ei1,
+ endpoints_and_weights& ei2);
+
+/**
+ * @brief Compute the optimal unquantized weights for a decimation table.
+ *
+ * After computing ideal weights for the case for a complete weight grid, we we want to compute the
+ * ideal weights for the case where weights exist only for some texels. We do this with a
+ * steepest-descent grid solver which works as follows:
+ *
+ * First, for each actual weight, perform a weighted averaging of the texels affected by the weight.
+ * Then, set step size to <some initial value> and attempt one step towards the original ideal
+ * weight if it helps to reduce error.
+ *
+ * @param ei The non-decimated endpoints and weights.
+ * @param di The selected weight decimation.
+ * @param[out] dec_weight_ideal_value The ideal values for the decimated weight set.
+ */
+void compute_ideal_weights_for_decimation(
+ const endpoints_and_weights& ei,
+ const decimation_info& di,
+ float* dec_weight_ideal_value);
+
+/**
+ * @brief Compute the optimal quantized weights for a decimation table.
+ *
+ * We test the two closest weight indices in the allowed quantization range and keep the weight that
+ * is the closest match.
+ *
+ * @param di The selected weight decimation.
+ * @param low_bound The lowest weight allowed.
+ * @param high_bound The highest weight allowed.
+ * @param dec_weight_ideal_value The ideal weight set.
+ * @param[out] dec_weight_quant_uvalue The output quantized weight as a float.
+ * @param[out] dec_weight_uquant The output quantized weight as encoded int.
+ * @param quant_level The desired weight quant level.
+ */
+void compute_quantized_weights_for_decimation(
+ const decimation_info& di,
+ float low_bound,
+ float high_bound,
+ const float* dec_weight_ideal_value,
+ float* dec_weight_quant_uvalue,
+ uint8_t* dec_weight_uquant,
+ quant_method quant_level);
+
+/**
+ * @brief Compute the error of a decimated weight set for 1 plane.
+ *
+ * After computing ideal weights for the case with one weight per texel, we want to compute the
+ * error for decimated weight grids where weights are stored at a lower resolution. This function
+ * computes the error of the reduced grid, compared to the full grid.
+ *
+ * @param eai The ideal weights for the full grid.
+ * @param di The selected weight decimation.
+ * @param dec_weight_quant_uvalue The quantized weights for the decimated grid.
+ *
+ * @return The accumulated error.
+ */
+float compute_error_of_weight_set_1plane(
+ const endpoints_and_weights& eai,
+ const decimation_info& di,
+ const float* dec_weight_quant_uvalue);
+
+/**
+ * @brief Compute the error of a decimated weight set for 2 planes.
+ *
+ * After computing ideal weights for the case with one weight per texel, we want to compute the
+ * error for decimated weight grids where weights are stored at a lower resolution. This function
+ * computes the error of the reduced grid, compared to the full grid.
+ *
+ * @param eai1 The ideal weights for the full grid and plane 1.
+ * @param eai2 The ideal weights for the full grid and plane 2.
+ * @param di The selected weight decimation.
+ * @param dec_weight_quant_uvalue_plane1 The quantized weights for the decimated grid plane 1.
+ * @param dec_weight_quant_uvalue_plane2 The quantized weights for the decimated grid plane 2.
+ *
+ * @return The accumulated error.
+ */
+float compute_error_of_weight_set_2planes(
+ const endpoints_and_weights& eai1,
+ const endpoints_and_weights& eai2,
+ const decimation_info& di,
+ const float* dec_weight_quant_uvalue_plane1,
+ const float* dec_weight_quant_uvalue_plane2);
+
+/**
+ * @brief Pack a single pair of color endpoints as effectively as possible.
+ *
+ * The user requests a base color endpoint mode in @c format, but the quantizer may choose a
+ * delta-based representation. It will report back the format variant it actually used.
+ *
+ * @param color0 The input unquantized color0 endpoint for absolute endpoint pairs.
+ * @param color1 The input unquantized color1 endpoint for absolute endpoint pairs.
+ * @param rgbs_color The input unquantized RGBS variant endpoint for same chroma endpoints.
+ * @param rgbo_color The input unquantized RGBS variant endpoint for HDR endpoints.
+ * @param format The desired base format.
+ * @param[out] output The output storage for the quantized colors/
+ * @param quant_level The quantization level requested.
+ *
+ * @return The actual endpoint mode used.
+ */
+uint8_t pack_color_endpoints(
+ vfloat4 color0,
+ vfloat4 color1,
+ vfloat4 rgbs_color,
+ vfloat4 rgbo_color,
+ int format,
+ uint8_t* output,
+ quant_method quant_level);
+
+/**
+ * @brief Unpack a single pair of encoded endpoints.
+ *
+ * Endpoints must be unscrambled and converted into the 0-255 range before calling this functions.
+ *
+ * @param decode_mode The decode mode (LDR, HDR).
+ * @param format The color endpoint mode used.
+ * @param input The raw array of encoded input integers. The length of this array
+ * depends on @c format; it can be safely assumed to be large enough.
+ * @param[out] rgb_hdr Is the endpoint using HDR for the RGB channels?
+ * @param[out] alpha_hdr Is the endpoint using HDR for the A channel?
+ * @param[out] output0 The output color for endpoint 0.
+ * @param[out] output1 The output color for endpoint 1.
+ */
+void unpack_color_endpoints(
+ astcenc_profile decode_mode,
+ int format,
+ const uint8_t* input,
+ bool& rgb_hdr,
+ bool& alpha_hdr,
+ vint4& output0,
+ vint4& output1);
+
+/**
+ * @brief Unpack a set of quantized and decimated weights.
+ *
+ * TODO: Can we skip this for non-decimated weights now that the @c scb is
+ * already storing unquantized weights?
+ *
+ * @param bsd The block size information.
+ * @param scb The symbolic compressed encoding.
+ * @param di The weight grid decimation table.
+ * @param is_dual_plane @c true if this is a dual plane block, @c false otherwise.
+ * @param[out] weights_plane1 The output array for storing the plane 1 weights.
+ * @param[out] weights_plane2 The output array for storing the plane 2 weights.
+ */
+void unpack_weights(
+ const block_size_descriptor& bsd,
+ const symbolic_compressed_block& scb,
+ const decimation_info& di,
+ bool is_dual_plane,
+ int weights_plane1[BLOCK_MAX_TEXELS],
+ int weights_plane2[BLOCK_MAX_TEXELS]);
+
+/**
+ * @brief Identify, for each mode, which set of color endpoint produces the best result.
+ *
+ * Returns the best @c tune_candidate_limit best looking modes, along with the ideal color encoding
+ * combination for each. The modified quantization level can be used when all formats are the same,
+ * as this frees up two additional bits of storage.
+ *
+ * @param pi The partition info for the current trial.
+ * @param blk The image block color data to compress.
+ * @param ep The ideal endpoints.
+ * @param qwt_bitcounts Bit counts for different quantization methods.
+ * @param qwt_errors Errors for different quantization methods.
+ * @param tune_candidate_limit The max number of candidates to return, may be less.
+ * @param start_block_mode The first block mode to inspect.
+ * @param end_block_mode The last block mode to inspect.
+ * @param[out] partition_format_specifiers The best formats per partition.
+ * @param[out] block_mode The best packed block mode indexes.
+ * @param[out] quant_level The best color quant level.
+ * @param[out] quant_level_mod The best color quant level if endpoints are the same.
+ * @param[out] tmpbuf Preallocated scratch buffers for the compressor.
+ *
+ * @return The actual number of candidate matches returned.
+ */
+unsigned int compute_ideal_endpoint_formats(
+ const partition_info& pi,
+ const image_block& blk,
+ const endpoints& ep,
+ const int8_t* qwt_bitcounts,
+ const float* qwt_errors,
+ unsigned int tune_candidate_limit,
+ unsigned int start_block_mode,
+ unsigned int end_block_mode,
+ uint8_t partition_format_specifiers[TUNE_MAX_TRIAL_CANDIDATES][BLOCK_MAX_PARTITIONS],
+ int block_mode[TUNE_MAX_TRIAL_CANDIDATES],
+ quant_method quant_level[TUNE_MAX_TRIAL_CANDIDATES],
+ quant_method quant_level_mod[TUNE_MAX_TRIAL_CANDIDATES],
+ compression_working_buffers& tmpbuf);
+
+/**
+ * @brief For a given 1 plane weight set recompute the endpoint colors.
+ *
+ * As we quantize and decimate weights the optimal endpoint colors may change slightly, so we must
+ * recompute the ideal colors for a specific weight set.
+ *
+ * @param blk The image block color data to compress.
+ * @param pi The partition info for the current trial.
+ * @param di The weight grid decimation table.
+ * @param dec_weights_uquant The quantized weight set.
+ * @param[in,out] ep The color endpoints (modifed in place).
+ * @param[out] rgbs_vectors The RGB+scale vectors for LDR blocks.
+ * @param[out] rgbo_vectors The RGB+offset vectors for HDR blocks.
+ */
+void recompute_ideal_colors_1plane(
+ const image_block& blk,
+ const partition_info& pi,
+ const decimation_info& di,
+ const uint8_t* dec_weights_uquant,
+ endpoints& ep,
+ vfloat4 rgbs_vectors[BLOCK_MAX_PARTITIONS],
+ vfloat4 rgbo_vectors[BLOCK_MAX_PARTITIONS]);
+
+/**
+ * @brief For a given 2 plane weight set recompute the endpoint colors.
+ *
+ * As we quantize and decimate weights the optimal endpoint colors may change slightly, so we must
+ * recompute the ideal colors for a specific weight set.
+ *
+ * @param blk The image block color data to compress.
+ * @param bsd The block_size descriptor.
+ * @param di The weight grid decimation table.
+ * @param dec_weights_uquant_plane1 The quantized weight set for plane 1.
+ * @param dec_weights_uquant_plane2 The quantized weight set for plane 2.
+ * @param[in,out] ep The color endpoints (modifed in place).
+ * @param[out] rgbs_vector The RGB+scale color for LDR blocks.
+ * @param[out] rgbo_vector The RGB+offset color for HDR blocks.
+ * @param plane2_component The component assigned to plane 2.
+ */
+void recompute_ideal_colors_2planes(
+ const image_block& blk,
+ const block_size_descriptor& bsd,
+ const decimation_info& di,
+ const uint8_t* dec_weights_uquant_plane1,
+ const uint8_t* dec_weights_uquant_plane2,
+ endpoints& ep,
+ vfloat4& rgbs_vector,
+ vfloat4& rgbo_vector,
+ int plane2_component);
+
+/**
+ * @brief Expand the angular tables needed for the alternative to PCA that we use.
+ */
+void prepare_angular_tables();
+
+/**
+ * @brief Compute the angular endpoints for one plane for each block mode.
+ *
+ * @param only_always Only consider block modes that are always enabled.
+ * @param bsd The block size descriptor for the current trial.
+ * @param dec_weight_ideal_value The ideal decimated unquantized weight values.
+ * @param max_weight_quant The maximum block mode weight quantization allowed.
+ * @param[out] tmpbuf Preallocated scratch buffers for the compressor.
+ */
+void compute_angular_endpoints_1plane(
+ bool only_always,
+ const block_size_descriptor& bsd,
+ const float* dec_weight_ideal_value,
+ unsigned int max_weight_quant,
+ compression_working_buffers& tmpbuf);
+
+/**
+ * @brief Compute the angular endpoints for two planes for each block mode.
+ *
+ * @param bsd The block size descriptor for the current trial.
+ * @param dec_weight_ideal_value The ideal decimated unquantized weight values.
+ * @param max_weight_quant The maximum block mode weight quantization allowed.
+ * @param[out] tmpbuf Preallocated scratch buffers for the compressor.
+ */
+void compute_angular_endpoints_2planes(
+ const block_size_descriptor& bsd,
+ const float* dec_weight_ideal_value,
+ unsigned int max_weight_quant,
+ compression_working_buffers& tmpbuf);
+
+/* ============================================================================
+ Functionality for high level compression and decompression access.
+============================================================================ */
+
+/**
+ * @brief Compress an image block into a physical block.
+ *
+ * @param ctx The compressor context and configuration.
+ * @param blk The image block color data to compress.
+ * @param[out] pcb The physical compressed block output.
+ * @param[out] tmpbuf Preallocated scratch buffers for the compressor.
+ */
+void compress_block(
+ const astcenc_contexti& ctx,
+ const image_block& blk,
+ physical_compressed_block& pcb,
+ compression_working_buffers& tmpbuf);
+
+/**
+ * @brief Decompress a symbolic block in to an image block.
+ *
+ * @param decode_mode The decode mode (LDR, HDR, etc).
+ * @param bsd The block size information.
+ * @param xpos The X coordinate of the block in the overall image.
+ * @param ypos The Y coordinate of the block in the overall image.
+ * @param zpos The Z coordinate of the block in the overall image.
+ * @param[out] blk The decompressed image block color data.
+ */
+void decompress_symbolic_block(
+ astcenc_profile decode_mode,
+ const block_size_descriptor& bsd,
+ int xpos,
+ int ypos,
+ int zpos,
+ const symbolic_compressed_block& scb,
+ image_block& blk);
+
+/**
+ * @brief Compute the error between a symbolic block and the original input data.
+ *
+ * This function is specialized for 2 plane and 1 partition search.
+ *
+ * In RGBM mode this will reject blocks that attempt to encode a zero M value.
+ *
+ * @param config The compressor config.
+ * @param bsd The block size information.
+ * @param scb The symbolic compressed encoding.
+ * @param blk The original image block color data.
+ *
+ * @return Returns the computed error, or a negative value if the encoding
+ * should be rejected for any reason.
+ */
+float compute_symbolic_block_difference_2plane(
+ const astcenc_config& config,
+ const block_size_descriptor& bsd,
+ const symbolic_compressed_block& scb,
+ const image_block& blk);
+
+/**
+ * @brief Compute the error between a symbolic block and the original input data.
+ *
+ * This function is specialized for 1 plane and N partition search.
+ *
+ * In RGBM mode this will reject blocks that attempt to encode a zero M value.
+ *
+ * @param config The compressor config.
+ * @param bsd The block size information.
+ * @param scb The symbolic compressed encoding.
+ * @param blk The original image block color data.
+ *
+ * @return Returns the computed error, or a negative value if the encoding
+ * should be rejected for any reason.
+ */
+float compute_symbolic_block_difference_1plane(
+ const astcenc_config& config,
+ const block_size_descriptor& bsd,
+ const symbolic_compressed_block& scb,
+ const image_block& blk);
+
+/**
+ * @brief Compute the error between a symbolic block and the original input data.
+ *
+ * This function is specialized for 1 plane and 1 partition search.
+ *
+ * In RGBM mode this will reject blocks that attempt to encode a zero M value.
+ *
+ * @param config The compressor config.
+ * @param bsd The block size information.
+ * @param scb The symbolic compressed encoding.
+ * @param blk The original image block color data.
+ *
+ * @return Returns the computed error, or a negative value if the encoding
+ * should be rejected for any reason.
+ */
+float compute_symbolic_block_difference_1plane_1partition(
+ const astcenc_config& config,
+ const block_size_descriptor& bsd,
+ const symbolic_compressed_block& scb,
+ const image_block& blk);
+
+/**
+ * @brief Convert a symbolic representation into a binary physical encoding.
+ *
+ * It is assumed that the symbolic encoding is valid and encodable, or
+ * previously flagged as an error block if an error color it to be encoded.
+ *
+ * @param bsd The block size information.
+ * @param scb The symbolic representation.
+ * @param[out] pcb The binary encoded data.
+ */
+void symbolic_to_physical(
+ const block_size_descriptor& bsd,
+ const symbolic_compressed_block& scb,
+ physical_compressed_block& pcb);
+
+/**
+ * @brief Convert a binary physical encoding into a symbolic representation.
+ *
+ * This function can cope with arbitrary input data; output blocks will be
+ * flagged as an error block if the encoding is invalid.
+ *
+ * @param bsd The block size information.
+ * @param pcb The binary encoded data.
+ * @param[out] scb The output symbolic representation.
+ */
+void physical_to_symbolic(
+ const block_size_descriptor& bsd,
+ const physical_compressed_block& pcb,
+ symbolic_compressed_block& scb);
+
+/* ============================================================================
+Platform-specific functions.
+============================================================================ */
+/**
+ * @brief Run-time detection if the host CPU supports the POPCNT extension.
+ *
+ * @return @c true if supported, @c false if not.
+ */
+bool cpu_supports_popcnt();
+
+/**
+ * @brief Run-time detection if the host CPU supports F16C extension.
+ *
+ * @return @c true if supported, @c false if not.
+ */
+bool cpu_supports_f16c();
+
+/**
+ * @brief Run-time detection if the host CPU supports SSE 4.1 extension.
+ *
+ * @return @c true if supported, @c false if not.
+ */
+bool cpu_supports_sse41();
+
+/**
+ * @brief Run-time detection if the host CPU supports AVX 2 extension.
+ *
+ * @return @c true if supported, @c false if not.
+ */
+bool cpu_supports_avx2();
+
+/**
+ * @brief Allocate an aligned memory buffer.
+ *
+ * Allocated memory must be freed by aligned_free;
+ *
+ * @param size The desired buffer size.
+ * @param align The desired buffer alignment; must be 2^N.
+ *
+ * @return The memory buffer pointer or nullptr on allocation failure.
+ */
+template<typename T>
+T* aligned_malloc(size_t size, size_t align)
+{
+ void* ptr;
+ int error = 0;
+
+#if defined(_WIN32)
+ ptr = _aligned_malloc(size, align);
+#else
+ error = posix_memalign(&ptr, align, size);
+#endif
+
+ if (error || (!ptr))
+ {
+ return nullptr;
+ }
+
+ return static_cast<T*>(ptr);
+}
+
+/**
+ * @brief Free an aligned memory buffer.
+ *
+ * @param ptr The buffer to free.
+ */
+template<typename T>
+void aligned_free(T* ptr)
+{
+#if defined(_WIN32)
+ _aligned_free(reinterpret_cast<void*>(ptr));
+#else
+ free(reinterpret_cast<void*>(ptr));
+#endif
+}
+
+#endif
diff --git a/thirdparty/astcenc/astcenc_internal_entry.h b/thirdparty/astcenc/astcenc_internal_entry.h
new file mode 100644
index 0000000000..4e8794547a
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_internal_entry.h
@@ -0,0 +1,273 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2022 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief Functions and data declarations for the outer context.
+ *
+ * The outer context includes thread-pool management, which is slower to
+ * compile due to increased use of C++ stdlib. The inner context used in the
+ * majority of the codec library does not include this.
+ */
+
+#ifndef ASTCENC_INTERNAL_ENTRY_INCLUDED
+#define ASTCENC_INTERNAL_ENTRY_INCLUDED
+
+#include <atomic>
+#include <condition_variable>
+#include <functional>
+#include <mutex>
+
+#include "astcenc_internal.h"
+
+/* ============================================================================
+ Parallel execution control
+============================================================================ */
+
+/**
+ * @brief A simple counter-based manager for parallel task execution.
+ *
+ * The task processing execution consists of:
+ *
+ * * A single-threaded init stage.
+ * * A multi-threaded processing stage.
+ * * A condition variable so threads can wait for processing completion.
+ *
+ * The init stage will be executed by the first thread to arrive in the critical section, there is
+ * no main thread in the thread pool.
+ *
+ * The processing stage uses dynamic dispatch to assign task tickets to threads on an on-demand
+ * basis. Threads may each therefore executed different numbers of tasks, depending on their
+ * processing complexity. The task queue and the task tickets are just counters; the caller must map
+ * these integers to an actual processing partition in a specific problem domain.
+ *
+ * The exit wait condition is needed to ensure processing has finished before a worker thread can
+ * progress to the next stage of the pipeline. Specifically a worker may exit the processing stage
+ * because there are no new tasks to assign to it while other worker threads are still processing.
+ * Calling @c wait() will ensure that all other worker have finished before the thread can proceed.
+ *
+ * The basic usage model:
+ *
+ * // --------- From single-threaded code ---------
+ *
+ * // Reset the tracker state
+ * manager->reset()
+ *
+ * // --------- From multi-threaded code ---------
+ *
+ * // Run the stage init; only first thread actually runs the lambda
+ * manager->init(<lambda>)
+ *
+ * do
+ * {
+ * // Request a task assignment
+ * uint task_count;
+ * uint base_index = manager->get_tasks(<granule>, task_count);
+ *
+ * // Process any tasks we were given (task_count <= granule size)
+ * if (task_count)
+ * {
+ * // Run the user task processing code for N tasks here
+ * ...
+ *
+ * // Flag these tasks as complete
+ * manager->complete_tasks(task_count);
+ * }
+ * } while (task_count);
+ *
+ * // Wait for all threads to complete tasks before progressing
+ * manager->wait()
+ *
+ * // Run the stage term; only first thread actually runs the lambda
+ * manager->term(<lambda>)
+ */
+class ParallelManager
+{
+private:
+ /** @brief Lock used for critical section and condition synchronization. */
+ std::mutex m_lock;
+
+ /** @brief True if the stage init() step has been executed. */
+ bool m_init_done;
+
+ /** @brief True if the stage term() step has been executed. */
+ bool m_term_done;
+
+ /** @brief Condition variable for tracking stage processing completion. */
+ std::condition_variable m_complete;
+
+ /** @brief Number of tasks started, but not necessarily finished. */
+ std::atomic<unsigned int> m_start_count;
+
+ /** @brief Number of tasks finished. */
+ unsigned int m_done_count;
+
+ /** @brief Number of tasks that need to be processed. */
+ unsigned int m_task_count;
+
+public:
+ /** @brief Create a new ParallelManager. */
+ ParallelManager()
+ {
+ reset();
+ }
+
+ /**
+ * @brief Reset the tracker for a new processing batch.
+ *
+ * This must be called from single-threaded code before starting the multi-threaded processing
+ * operations.
+ */
+ void reset()
+ {
+ m_init_done = false;
+ m_term_done = false;
+ m_start_count = 0;
+ m_done_count = 0;
+ m_task_count = 0;
+ }
+
+ /**
+ * @brief Trigger the pipeline stage init step.
+ *
+ * This can be called from multi-threaded code. The first thread to hit this will process the
+ * initialization. Other threads will block and wait for it to complete.
+ *
+ * @param init_func Callable which executes the stage initialization. It must return the
+ * total number of tasks in the stage.
+ */
+ void init(std::function<unsigned int(void)> init_func)
+ {
+ std::lock_guard<std::mutex> lck(m_lock);
+ if (!m_init_done)
+ {
+ m_task_count = init_func();
+ m_init_done = true;
+ }
+ }
+
+ /**
+ * @brief Trigger the pipeline stage init step.
+ *
+ * This can be called from multi-threaded code. The first thread to hit this will process the
+ * initialization. Other threads will block and wait for it to complete.
+ *
+ * @param task_count Total number of tasks needing processing.
+ */
+ void init(unsigned int task_count)
+ {
+ std::lock_guard<std::mutex> lck(m_lock);
+ if (!m_init_done)
+ {
+ m_task_count = task_count;
+ m_init_done = true;
+ }
+ }
+
+ /**
+ * @brief Request a task assignment.
+ *
+ * Assign up to @c granule tasks to the caller for processing.
+ *
+ * @param granule Maximum number of tasks that can be assigned.
+ * @param[out] count Actual number of tasks assigned, or zero if no tasks were assigned.
+ *
+ * @return Task index of the first assigned task; assigned tasks increment from this.
+ */
+ unsigned int get_task_assignment(unsigned int granule, unsigned int& count)
+ {
+ unsigned int base = m_start_count.fetch_add(granule, std::memory_order_relaxed);
+ if (base >= m_task_count)
+ {
+ count = 0;
+ return 0;
+ }
+
+ count = astc::min(m_task_count - base, granule);
+ return base;
+ }
+
+ /**
+ * @brief Complete a task assignment.
+ *
+ * Mark @c count tasks as complete. This will notify all threads blocked on @c wait() if this
+ * completes the processing of the stage.
+ *
+ * @param count The number of completed tasks.
+ */
+ void complete_task_assignment(unsigned int count)
+ {
+ // Note: m_done_count cannot use an atomic without the mutex; this has a race between the
+ // update here and the wait() for other threads
+ std::unique_lock<std::mutex> lck(m_lock);
+ this->m_done_count += count;
+ if (m_done_count == m_task_count)
+ {
+ lck.unlock();
+ m_complete.notify_all();
+ }
+ }
+
+ /**
+ * @brief Wait for stage processing to complete.
+ */
+ void wait()
+ {
+ std::unique_lock<std::mutex> lck(m_lock);
+ m_complete.wait(lck, [this]{ return m_done_count == m_task_count; });
+ }
+
+ /**
+ * @brief Trigger the pipeline stage term step.
+ *
+ * This can be called from multi-threaded code. The first thread to hit this will process the
+ * work pool termination. Caller must have called @c wait() prior to calling this function to
+ * ensure that processing is complete.
+ *
+ * @param term_func Callable which executes the stage termination.
+ */
+ void term(std::function<void(void)> term_func)
+ {
+ std::lock_guard<std::mutex> lck(m_lock);
+ if (!m_term_done)
+ {
+ term_func();
+ m_term_done = true;
+ }
+ }
+};
+
+/**
+ * @brief The astcenc compression context.
+ */
+struct astcenc_context
+{
+ /** @brief The context internal state. */
+ astcenc_contexti context;
+
+#if !defined(ASTCENC_DECOMPRESS_ONLY)
+ /** @brief The parallel manager for averages computation. */
+ ParallelManager manage_avg;
+
+ /** @brief The parallel manager for compression. */
+ ParallelManager manage_compress;
+#endif
+
+ /** @brief The parallel manager for decompression. */
+ ParallelManager manage_decompress;
+};
+
+#endif
diff --git a/thirdparty/astcenc/astcenc_mathlib.cpp b/thirdparty/astcenc/astcenc_mathlib.cpp
new file mode 100644
index 0000000000..f276ac7e3d
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_mathlib.cpp
@@ -0,0 +1,48 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2021 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+#include "astcenc_mathlib.h"
+
+/**
+ * @brief 64-bit rotate left.
+ *
+ * @param val The value to rotate.
+ * @param count The rotation, in bits.
+ */
+static inline uint64_t rotl(uint64_t val, int count)
+{
+ return (val << count) | (val >> (64 - count));
+}
+
+/* See header for documentation. */
+void astc::rand_init(uint64_t state[2])
+{
+ state[0] = 0xfaf9e171cea1ec6bULL;
+ state[1] = 0xf1b318cc06af5d71ULL;
+}
+
+/* See header for documentation. */
+uint64_t astc::rand(uint64_t state[2])
+{
+ uint64_t s0 = state[0];
+ uint64_t s1 = state[1];
+ uint64_t res = s0 + s1;
+ s1 ^= s0;
+ state[0] = rotl(s0, 24) ^ s1 ^ (s1 << 16);
+ state[1] = rotl(s1, 37);
+ return res;
+}
diff --git a/thirdparty/astcenc/astcenc_mathlib.h b/thirdparty/astcenc/astcenc_mathlib.h
new file mode 100644
index 0000000000..0540c4fedd
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_mathlib.h
@@ -0,0 +1,476 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2021 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/*
+ * This module implements a variety of mathematical data types and library
+ * functions used by the codec.
+ */
+
+#ifndef ASTC_MATHLIB_H_INCLUDED
+#define ASTC_MATHLIB_H_INCLUDED
+
+#include <cassert>
+#include <cstdint>
+#include <cmath>
+
+#ifndef ASTCENC_POPCNT
+ #if defined(__POPCNT__)
+ #define ASTCENC_POPCNT 1
+ #else
+ #define ASTCENC_POPCNT 0
+ #endif
+#endif
+
+#ifndef ASTCENC_F16C
+ #if defined(__F16C__)
+ #define ASTCENC_F16C 1
+ #else
+ #define ASTCENC_F16C 0
+ #endif
+#endif
+
+#ifndef ASTCENC_SSE
+ #if defined(__SSE4_2__)
+ #define ASTCENC_SSE 42
+ #elif defined(__SSE4_1__)
+ #define ASTCENC_SSE 41
+ #elif defined(__SSE2__)
+ #define ASTCENC_SSE 20
+ #else
+ #define ASTCENC_SSE 0
+ #endif
+#endif
+
+#ifndef ASTCENC_AVX
+ #if defined(__AVX2__)
+ #define ASTCENC_AVX 2
+ #elif defined(__AVX__)
+ #define ASTCENC_AVX 1
+ #else
+ #define ASTCENC_AVX 0
+ #endif
+#endif
+
+#ifndef ASTCENC_NEON
+ #if defined(__aarch64__)
+ #define ASTCENC_NEON 1
+ #else
+ #define ASTCENC_NEON 0
+ #endif
+#endif
+
+#if ASTCENC_AVX
+ #define ASTCENC_VECALIGN 32
+#else
+ #define ASTCENC_VECALIGN 16
+#endif
+
+#if ASTCENC_SSE != 0 || ASTCENC_AVX != 0 || ASTCENC_POPCNT != 0
+ #include <immintrin.h>
+#endif
+
+/* ============================================================================
+ Fast math library; note that many of the higher-order functions in this set
+ use approximations which are less accurate, but faster, than <cmath> standard
+ library equivalents.
+
+ Note: Many of these are not necessarily faster than simple C versions when
+ used on a single scalar value, but are included for testing purposes as most
+ have an option based on SSE intrinsics and therefore provide an obvious route
+ to future vectorization.
+============================================================================ */
+
+// Union for manipulation of float bit patterns
+typedef union
+{
+ uint32_t u;
+ int32_t s;
+ float f;
+} if32;
+
+// These are namespaced to avoid colliding with C standard library functions.
+namespace astc
+{
+
+static const float PI = 3.14159265358979323846f;
+static const float PI_OVER_TWO = 1.57079632679489661923f;
+
+/**
+ * @brief SP float absolute value.
+ *
+ * @param v The value to make absolute.
+ *
+ * @return The absolute value.
+ */
+static inline float fabs(float v)
+{
+ return std::fabs(v);
+}
+
+/**
+ * @brief Test if a float value is a nan.
+ *
+ * @param v The value test.
+ *
+ * @return Zero is not a NaN, non-zero otherwise.
+ */
+static inline bool isnan(float v)
+{
+ return v != v;
+}
+
+/**
+ * @brief Return the minimum of two values.
+ *
+ * For floats, NaNs are turned into @c q.
+ *
+ * @param p The first value to compare.
+ * @param q The second value to compare.
+ *
+ * @return The smallest value.
+ */
+template<typename T>
+static inline T min(T p, T q)
+{
+ return p < q ? p : q;
+}
+
+/**
+ * @brief Return the minimum of three values.
+ *
+ * For floats, NaNs are turned into @c r.
+ *
+ * @param p The first value to compare.
+ * @param q The second value to compare.
+ * @param r The third value to compare.
+ *
+ * @return The smallest value.
+ */
+template<typename T>
+static inline T min(T p, T q, T r)
+{
+ return min(min(p, q), r);
+}
+
+/**
+ * @brief Return the minimum of four values.
+ *
+ * For floats, NaNs are turned into @c s.
+ *
+ * @param p The first value to compare.
+ * @param q The second value to compare.
+ * @param r The third value to compare.
+ * @param s The fourth value to compare.
+ *
+ * @return The smallest value.
+ */
+template<typename T>
+static inline T min(T p, T q, T r, T s)
+{
+ return min(min(p, q), min(r, s));
+}
+
+/**
+ * @brief Return the maximum of two values.
+ *
+ * For floats, NaNs are turned into @c q.
+ *
+ * @param p The first value to compare.
+ * @param q The second value to compare.
+ *
+ * @return The largest value.
+ */
+template<typename T>
+static inline T max(T p, T q)
+{
+ return p > q ? p : q;
+}
+
+/**
+ * @brief Return the maximum of three values.
+ *
+ * For floats, NaNs are turned into @c r.
+ *
+ * @param p The first value to compare.
+ * @param q The second value to compare.
+ * @param r The third value to compare.
+ *
+ * @return The largest value.
+ */
+template<typename T>
+static inline T max(T p, T q, T r)
+{
+ return max(max(p, q), r);
+}
+
+/**
+ * @brief Return the maximum of four values.
+ *
+ * For floats, NaNs are turned into @c s.
+ *
+ * @param p The first value to compare.
+ * @param q The second value to compare.
+ * @param r The third value to compare.
+ * @param s The fourth value to compare.
+ *
+ * @return The largest value.
+ */
+template<typename T>
+static inline T max(T p, T q, T r, T s)
+{
+ return max(max(p, q), max(r, s));
+}
+
+/**
+ * @brief Clamp a value value between @c mn and @c mx.
+ *
+ * For floats, NaNs are turned into @c mn.
+ *
+ * @param v The value to clamp.
+ * @param mn The min value (inclusive).
+ * @param mx The max value (inclusive).
+ *
+ * @return The clamped value.
+ */
+template<typename T>
+inline T clamp(T v, T mn, T mx)
+{
+ // Do not reorder; correct NaN handling relies on the fact that comparison
+ // with NaN returns false and will fall-though to the "min" value.
+ if (v > mx) return mx;
+ if (v > mn) return v;
+ return mn;
+}
+
+/**
+ * @brief Clamp a float value between 0.0f and 1.0f.
+ *
+ * NaNs are turned into 0.0f.
+ *
+ * @param v The value to clamp.
+ *
+ * @return The clamped value.
+ */
+static inline float clamp1f(float v)
+{
+ return astc::clamp(v, 0.0f, 1.0f);
+}
+
+/**
+ * @brief Clamp a float value between 0.0f and 255.0f.
+ *
+ * NaNs are turned into 0.0f.
+ *
+ * @param v The value to clamp.
+ *
+ * @return The clamped value.
+ */
+static inline float clamp255f(float v)
+{
+ return astc::clamp(v, 0.0f, 255.0f);
+}
+
+/**
+ * @brief SP float round-down.
+ *
+ * @param v The value to round.
+ *
+ * @return The rounded value.
+ */
+static inline float flt_rd(float v)
+{
+ return std::floor(v);
+}
+
+/**
+ * @brief SP float round-to-nearest and convert to integer.
+ *
+ * @param v The value to round.
+ *
+ * @return The rounded value.
+ */
+static inline int flt2int_rtn(float v)
+{
+
+ return static_cast<int>(v + 0.5f);
+}
+
+/**
+ * @brief SP float round down and convert to integer.
+ *
+ * @param v The value to round.
+ *
+ * @return The rounded value.
+ */
+static inline int flt2int_rd(float v)
+{
+ return static_cast<int>(v);
+}
+
+/**
+ * @brief SP float bit-interpreted as an integer.
+ *
+ * @param v The value to bitcast.
+ *
+ * @return The converted value.
+ */
+static inline int float_as_int(float v)
+{
+ union { int a; float b; } u;
+ u.b = v;
+ return u.a;
+}
+
+/**
+ * @brief Integer bit-interpreted as an SP float.
+ *
+ * @param v The value to bitcast.
+ *
+ * @return The converted value.
+ */
+static inline float int_as_float(int v)
+{
+ union { int a; float b; } u;
+ u.a = v;
+ return u.b;
+}
+
+/**
+ * @brief Fast approximation of 1.0 / sqrt(val).
+ *
+ * @param v The input value.
+ *
+ * @return The approximated result.
+ */
+static inline float rsqrt(float v)
+{
+ return 1.0f / std::sqrt(v);
+}
+
+/**
+ * @brief Fast approximation of sqrt(val).
+ *
+ * @param v The input value.
+ *
+ * @return The approximated result.
+ */
+static inline float sqrt(float v)
+{
+ return std::sqrt(v);
+}
+
+/**
+ * @brief Extract mantissa and exponent of a float value.
+ *
+ * @param v The input value.
+ * @param[out] expo The output exponent.
+ *
+ * @return The mantissa.
+ */
+static inline float frexp(float v, int* expo)
+{
+ if32 p;
+ p.f = v;
+ *expo = ((p.u >> 23) & 0xFF) - 126;
+ p.u = (p.u & 0x807fffff) | 0x3f000000;
+ return p.f;
+}
+
+/**
+ * @brief Initialize the seed structure for a random number generator.
+ *
+ * Important note: For the purposes of ASTC we want sets of random numbers to
+ * use the codec, but we want the same seed value across instances and threads
+ * to ensure that image output is stable across compressor runs and across
+ * platforms. Every PRNG created by this call will therefore return the same
+ * sequence of values ...
+ *
+ * @param state The state structure to initialize.
+ */
+void rand_init(uint64_t state[2]);
+
+/**
+ * @brief Return the next random number from the generator.
+ *
+ * This RNG is an implementation of the "xoroshoro-128+ 1.0" PRNG, based on the
+ * public-domain implementation given by David Blackman & Sebastiano Vigna at
+ * http://vigna.di.unimi.it/xorshift/xoroshiro128plus.c
+ *
+ * @param state The state structure to use/update.
+ */
+uint64_t rand(uint64_t state[2]);
+
+}
+
+/* ============================================================================
+ Softfloat library with fp32 and fp16 conversion functionality.
+============================================================================ */
+#if (ASTCENC_F16C == 0) && (ASTCENC_NEON == 0)
+ /* narrowing float->float conversions */
+ uint16_t float_to_sf16(float val);
+ float sf16_to_float(uint16_t val);
+#endif
+
+/*********************************
+ Vector library
+*********************************/
+#include "astcenc_vecmathlib.h"
+
+/*********************************
+ Declaration of line types
+*********************************/
+// parametric line, 2D: The line is given by line = a + b * t.
+
+struct line2
+{
+ vfloat4 a;
+ vfloat4 b;
+};
+
+// parametric line, 3D
+struct line3
+{
+ vfloat4 a;
+ vfloat4 b;
+};
+
+struct line4
+{
+ vfloat4 a;
+ vfloat4 b;
+};
+
+
+struct processed_line2
+{
+ vfloat4 amod;
+ vfloat4 bs;
+};
+
+struct processed_line3
+{
+ vfloat4 amod;
+ vfloat4 bs;
+};
+
+struct processed_line4
+{
+ vfloat4 amod;
+ vfloat4 bs;
+};
+
+#endif
diff --git a/thirdparty/astcenc/astcenc_mathlib_softfloat.cpp b/thirdparty/astcenc/astcenc_mathlib_softfloat.cpp
new file mode 100644
index 0000000000..42db764549
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_mathlib_softfloat.cpp
@@ -0,0 +1,411 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2021 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief Soft-float library for IEEE-754.
+ */
+#if (ASTCENC_F16C == 0) && (ASTCENC_NEON == 0)
+
+#include "astcenc_mathlib.h"
+
+/* sized soft-float types. These are mapped to the sized integer
+ types of C99, instead of C's floating-point types; this is because
+ the library needs to maintain exact, bit-level control on all
+ operations on these data types. */
+typedef uint16_t sf16;
+typedef uint32_t sf32;
+
+/******************************************
+ helper functions and their lookup tables
+ ******************************************/
+/* count leading zeros functions. Only used when the input is nonzero. */
+
+#if defined(__GNUC__) && (defined(__i386) || defined(__amd64))
+#elif defined(__arm__) && defined(__ARMCC_VERSION)
+#elif defined(__arm__) && defined(__GNUC__)
+#else
+ /* table used for the slow default versions. */
+ static const uint8_t clz_table[256] =
+ {
+ 8, 7, 6, 6, 5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+#endif
+
+/*
+ 32-bit count-leading-zeros function: use the Assembly instruction whenever possible. */
+static uint32_t clz32(uint32_t inp)
+{
+ #if defined(__GNUC__) && (defined(__i386) || defined(__amd64))
+ uint32_t bsr;
+ __asm__("bsrl %1, %0": "=r"(bsr):"r"(inp | 1));
+ return 31 - bsr;
+ #else
+ #if defined(__arm__) && defined(__ARMCC_VERSION)
+ return __clz(inp); /* armcc builtin */
+ #else
+ #if defined(__arm__) && defined(__GNUC__)
+ uint32_t lz;
+ __asm__("clz %0, %1": "=r"(lz):"r"(inp));
+ return lz;
+ #else
+ /* slow default version */
+ uint32_t summa = 24;
+ if (inp >= UINT32_C(0x10000))
+ {
+ inp >>= 16;
+ summa -= 16;
+ }
+ if (inp >= UINT32_C(0x100))
+ {
+ inp >>= 8;
+ summa -= 8;
+ }
+ return summa + clz_table[inp];
+ #endif
+ #endif
+ #endif
+}
+
+/* the five rounding modes that IEEE-754r defines */
+typedef enum
+{
+ SF_UP = 0, /* round towards positive infinity */
+ SF_DOWN = 1, /* round towards negative infinity */
+ SF_TOZERO = 2, /* round towards zero */
+ SF_NEARESTEVEN = 3, /* round toward nearest value; if mid-between, round to even value */
+ SF_NEARESTAWAY = 4 /* round toward nearest value; if mid-between, round away from zero */
+} roundmode;
+
+
+static uint32_t rtne_shift32(uint32_t inp, uint32_t shamt)
+{
+ uint32_t vl1 = UINT32_C(1) << shamt;
+ uint32_t inp2 = inp + (vl1 >> 1); /* added 0.5 ULP */
+ uint32_t msk = (inp | UINT32_C(1)) & vl1; /* nonzero if odd. '| 1' forces it to 1 if the shamt is 0. */
+ msk--; /* negative if even, nonnegative if odd. */
+ inp2 -= (msk >> 31); /* subtract epsilon before shift if even. */
+ inp2 >>= shamt;
+ return inp2;
+}
+
+static uint32_t rtna_shift32(uint32_t inp, uint32_t shamt)
+{
+ uint32_t vl1 = (UINT32_C(1) << shamt) >> 1;
+ inp += vl1;
+ inp >>= shamt;
+ return inp;
+}
+
+static uint32_t rtup_shift32(uint32_t inp, uint32_t shamt)
+{
+ uint32_t vl1 = UINT32_C(1) << shamt;
+ inp += vl1;
+ inp--;
+ inp >>= shamt;
+ return inp;
+}
+
+/* convert from FP16 to FP32. */
+static sf32 sf16_to_sf32(sf16 inp)
+{
+ uint32_t inpx = inp;
+
+ /*
+ This table contains, for every FP16 sign/exponent value combination,
+ the difference between the input FP16 value and the value obtained
+ by shifting the correct FP32 result right by 13 bits.
+ This table allows us to handle every case except denormals and NaN
+ with just 1 table lookup, 2 shifts and 1 add.
+ */
+
+ #define WITH_MSB(a) (UINT32_C(a) | (1u << 31))
+ static const uint32_t tbl[64] =
+ {
+ WITH_MSB(0x00000), 0x1C000, 0x1C000, 0x1C000, 0x1C000, 0x1C000, 0x1C000, 0x1C000,
+ 0x1C000, 0x1C000, 0x1C000, 0x1C000, 0x1C000, 0x1C000, 0x1C000, 0x1C000,
+ 0x1C000, 0x1C000, 0x1C000, 0x1C000, 0x1C000, 0x1C000, 0x1C000, 0x1C000,
+ 0x1C000, 0x1C000, 0x1C000, 0x1C000, 0x1C000, 0x1C000, 0x1C000, WITH_MSB(0x38000),
+ WITH_MSB(0x38000), 0x54000, 0x54000, 0x54000, 0x54000, 0x54000, 0x54000, 0x54000,
+ 0x54000, 0x54000, 0x54000, 0x54000, 0x54000, 0x54000, 0x54000, 0x54000,
+ 0x54000, 0x54000, 0x54000, 0x54000, 0x54000, 0x54000, 0x54000, 0x54000,
+ 0x54000, 0x54000, 0x54000, 0x54000, 0x54000, 0x54000, 0x54000, WITH_MSB(0x70000)
+ };
+
+ uint32_t res = tbl[inpx >> 10];
+ res += inpx;
+
+ /* Normal cases: MSB of 'res' not set. */
+ if ((res & WITH_MSB(0)) == 0)
+ {
+ return res << 13;
+ }
+
+ /* Infinity and Zero: 10 LSB of 'res' not set. */
+ if ((res & 0x3FF) == 0)
+ {
+ return res << 13;
+ }
+
+ /* NaN: the exponent field of 'inp' is non-zero. */
+ if ((inpx & 0x7C00) != 0)
+ {
+ /* All NaNs are quietened. */
+ return (res << 13) | 0x400000;
+ }
+
+ /* Denormal cases */
+ uint32_t sign = (inpx & 0x8000) << 16;
+ uint32_t mskval = inpx & 0x7FFF;
+ uint32_t leadingzeroes = clz32(mskval);
+ mskval <<= leadingzeroes;
+ return (mskval >> 8) + ((0x85 - leadingzeroes) << 23) + sign;
+}
+
+/* Conversion routine that converts from FP32 to FP16. It supports denormals and all rounding modes. If a NaN is given as input, it is quietened. */
+static sf16 sf32_to_sf16(sf32 inp, roundmode rmode)
+{
+ /* for each possible sign/exponent combination, store a case index. This gives a 512-byte table */
+ static const uint8_t tab[512] {
+ 0, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
+ 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
+ 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
+ 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
+ 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
+ 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
+ 10, 10, 10, 10, 10, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
+ 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 40,
+ 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
+ 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
+ 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
+ 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
+ 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
+ 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
+ 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 50,
+
+ 5, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
+ 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
+ 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
+ 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
+ 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
+ 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
+ 15, 15, 15, 15, 15, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
+ 25, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 45,
+ 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
+ 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
+ 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
+ 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
+ 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
+ 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
+ 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 55,
+ };
+
+ /* many of the cases below use a case-dependent magic constant. So we look up a magic constant before actually performing the switch. This table allows us to group cases, thereby minimizing code
+ size. */
+ static const uint32_t tabx[60] {
+ UINT32_C(0), UINT32_C(0), UINT32_C(0), UINT32_C(0), UINT32_C(0), UINT32_C(0x8000), UINT32_C(0x80000000), UINT32_C(0x8000), UINT32_C(0x8000), UINT32_C(0x8000),
+ UINT32_C(1), UINT32_C(0), UINT32_C(0), UINT32_C(0), UINT32_C(0), UINT32_C(0x8000), UINT32_C(0x8001), UINT32_C(0x8000), UINT32_C(0x8000), UINT32_C(0x8000),
+ UINT32_C(0), UINT32_C(0), UINT32_C(0), UINT32_C(0), UINT32_C(0), UINT32_C(0x8000), UINT32_C(0x8000), UINT32_C(0x8000), UINT32_C(0x8000), UINT32_C(0x8000),
+ UINT32_C(0xC8001FFF), UINT32_C(0xC8000000), UINT32_C(0xC8000000), UINT32_C(0xC8000FFF), UINT32_C(0xC8001000),
+ UINT32_C(0x58000000), UINT32_C(0x38001FFF), UINT32_C(0x58000000), UINT32_C(0x58000FFF), UINT32_C(0x58001000),
+ UINT32_C(0x7C00), UINT32_C(0x7BFF), UINT32_C(0x7BFF), UINT32_C(0x7C00), UINT32_C(0x7C00),
+ UINT32_C(0xFBFF), UINT32_C(0xFC00), UINT32_C(0xFBFF), UINT32_C(0xFC00), UINT32_C(0xFC00),
+ UINT32_C(0x90000000), UINT32_C(0x90000000), UINT32_C(0x90000000), UINT32_C(0x90000000), UINT32_C(0x90000000),
+ UINT32_C(0x20000000), UINT32_C(0x20000000), UINT32_C(0x20000000), UINT32_C(0x20000000), UINT32_C(0x20000000)
+ };
+
+ uint32_t p;
+ uint32_t idx = rmode + tab[inp >> 23];
+ uint32_t vlx = tabx[idx];
+ switch (idx)
+ {
+ /*
+ Positive number which may be Infinity or NaN.
+ We need to check whether it is NaN; if it is, quieten it by setting the top bit of the mantissa.
+ (If we don't do this quieting, then a NaN that is distinguished only by having
+ its low-order bits set, would be turned into an INF. */
+ case 50:
+ case 51:
+ case 52:
+ case 53:
+ case 54:
+ case 55:
+ case 56:
+ case 57:
+ case 58:
+ case 59:
+ /*
+ the input value is 0x7F800000 or 0xFF800000 if it is INF.
+ By subtracting 1, we get 7F7FFFFF or FF7FFFFF, that is, bit 23 becomes zero.
+ For NaNs, however, this operation will keep bit 23 with the value 1.
+ We can then extract bit 23, and logical-OR bit 9 of the result with this
+ bit in order to quieten the NaN (a Quiet NaN is a NaN where the top bit
+ of the mantissa is set.)
+ */
+ p = (inp - 1) & UINT32_C(0x800000); /* zero if INF, nonzero if NaN. */
+ return static_cast<sf16>(((inp + vlx) >> 13) | (p >> 14));
+ /*
+ positive, exponent = 0, round-mode == UP; need to check whether number actually is 0.
+ If it is, then return 0, else return 1 (the smallest representable nonzero number)
+ */
+ case 0:
+ /*
+ -inp will set the MSB if the input number is nonzero.
+ Thus (-inp) >> 31 will turn into 0 if the input number is 0 and 1 otherwise.
+ */
+ return static_cast<sf16>(static_cast<uint32_t>((-static_cast<int32_t>(inp))) >> 31);
+
+ /*
+ negative, exponent = , round-mode == DOWN, need to check whether number is
+ actually 0. If it is, return 0x8000 ( float -0.0 )
+ Else return the smallest negative number ( 0x8001 ) */
+ case 6:
+ /*
+ in this case 'vlx' is 0x80000000. By subtracting the input value from it,
+ we obtain a value that is 0 if the input value is in fact zero and has
+ the MSB set if it isn't. We then right-shift the value by 31 places to
+ get a value that is 0 if the input is -0.0 and 1 otherwise.
+ */
+ return static_cast<sf16>(((vlx - inp) >> 31) + UINT32_C(0x8000));
+
+ /*
+ for all other cases involving underflow/overflow, we don't need to
+ do actual tests; we just return 'vlx'.
+ */
+ case 1:
+ case 2:
+ case 3:
+ case 4:
+ case 5:
+ case 7:
+ case 8:
+ case 9:
+ case 10:
+ case 11:
+ case 12:
+ case 13:
+ case 14:
+ case 15:
+ case 16:
+ case 17:
+ case 18:
+ case 19:
+ case 40:
+ case 41:
+ case 42:
+ case 43:
+ case 44:
+ case 45:
+ case 46:
+ case 47:
+ case 48:
+ case 49:
+ return static_cast<sf16>(vlx);
+
+ /*
+ for normal numbers, 'vlx' is the difference between the FP32 value of a number and the
+ FP16 representation of the same number left-shifted by 13 places. In addition, a rounding constant is
+ baked into 'vlx': for rounding-away-from zero, the constant is 2^13 - 1, causing roundoff away
+ from zero. for round-to-nearest away, the constant is 2^12, causing roundoff away from zero.
+ for round-to-nearest-even, the constant is 2^12 - 1. This causes correct round-to-nearest-even
+ except for odd input numbers. For odd input numbers, we need to add 1 to the constant. */
+
+ /* normal number, all rounding modes except round-to-nearest-even: */
+ case 30:
+ case 31:
+ case 32:
+ case 34:
+ case 35:
+ case 36:
+ case 37:
+ case 39:
+ return static_cast<sf16>((inp + vlx) >> 13);
+
+ /* normal number, round-to-nearest-even. */
+ case 33:
+ case 38:
+ p = inp + vlx;
+ p += (inp >> 13) & 1;
+ return static_cast<sf16>(p >> 13);
+
+ /*
+ the various denormal cases. These are not expected to be common, so their performance is a bit
+ less important. For each of these cases, we need to extract an exponent and a mantissa
+ (including the implicit '1'!), and then right-shift the mantissa by a shift-amount that
+ depends on the exponent. The shift must apply the correct rounding mode. 'vlx' is used to supply the
+ sign of the resulting denormal number.
+ */
+ case 21:
+ case 22:
+ case 25:
+ case 27:
+ /* denormal, round towards zero. */
+ p = 126 - ((inp >> 23) & 0xFF);
+ return static_cast<sf16>((((inp & UINT32_C(0x7FFFFF)) + UINT32_C(0x800000)) >> p) | vlx);
+ case 20:
+ case 26:
+ /* denormal, round away from zero. */
+ p = 126 - ((inp >> 23) & 0xFF);
+ return static_cast<sf16>(rtup_shift32((inp & UINT32_C(0x7FFFFF)) + UINT32_C(0x800000), p) | vlx);
+ case 24:
+ case 29:
+ /* denormal, round to nearest-away */
+ p = 126 - ((inp >> 23) & 0xFF);
+ return static_cast<sf16>(rtna_shift32((inp & UINT32_C(0x7FFFFF)) + UINT32_C(0x800000), p) | vlx);
+ case 23:
+ case 28:
+ /* denormal, round to nearest-even. */
+ p = 126 - ((inp >> 23) & 0xFF);
+ return static_cast<sf16>(rtne_shift32((inp & UINT32_C(0x7FFFFF)) + UINT32_C(0x800000), p) | vlx);
+ }
+
+ return 0;
+}
+
+/* convert from soft-float to native-float */
+float sf16_to_float(uint16_t p)
+{
+ if32 i;
+ i.u = sf16_to_sf32(p);
+ return i.f;
+}
+
+/* convert from native-float to soft-float */
+uint16_t float_to_sf16(float p)
+{
+ if32 i;
+ i.f = p;
+ return sf32_to_sf16(i.u, SF_NEARESTEVEN);
+}
+
+#endif
diff --git a/thirdparty/astcenc/astcenc_partition_tables.cpp b/thirdparty/astcenc/astcenc_partition_tables.cpp
new file mode 100644
index 0000000000..cad42384d7
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_partition_tables.cpp
@@ -0,0 +1,481 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2023 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief Functions for generating partition tables on demand.
+ */
+
+#include "astcenc_internal.h"
+
+/** @brief The number of 64-bit words needed to represent a canonical partition bit pattern. */
+#define BIT_PATTERN_WORDS (((ASTCENC_BLOCK_MAX_TEXELS * 2) + 63) / 64)
+
+/**
+ * @brief Generate a canonical representation of a partition pattern.
+ *
+ * The returned value stores two bits per texel, for up to 6x6x6 texels, where the two bits store
+ * the remapped texel index. Remapping ensures that we only match on the partition pattern,
+ * independent of the partition order generated by the hash.
+ *
+ * @param texel_count The number of texels in the block.
+ * @param partition_of_texel The partition assignments, in hash order.
+ * @param[out] bit_pattern The output bit pattern representation.
+ */
+static void generate_canonical_partitioning(
+ unsigned int texel_count,
+ const uint8_t* partition_of_texel,
+ uint64_t bit_pattern[BIT_PATTERN_WORDS]
+) {
+ // Clear the pattern
+ for (unsigned int i = 0; i < BIT_PATTERN_WORDS; i++)
+ {
+ bit_pattern[i] = 0;
+ }
+
+ // Store a mapping to reorder the raw partitions so that the partitions are ordered such
+ // that the lowest texel index in partition N is smaller than the lowest texel index in
+ // partition N + 1.
+ int mapped_index[BLOCK_MAX_PARTITIONS];
+ int map_weight_count = 0;
+
+ for (unsigned int i = 0; i < BLOCK_MAX_PARTITIONS; i++)
+ {
+ mapped_index[i] = -1;
+ }
+
+ for (unsigned int i = 0; i < texel_count; i++)
+ {
+ int index = partition_of_texel[i];
+ if (mapped_index[index] < 0)
+ {
+ mapped_index[index] = map_weight_count++;
+ }
+
+ uint64_t xlat_index = mapped_index[index];
+ bit_pattern[i >> 5] |= xlat_index << (2 * (i & 0x1F));
+ }
+}
+
+/**
+ * @brief Compare two canonical patterns to see if they are the same.
+ *
+ * @param part1 The first canonical bit pattern to check.
+ * @param part2 The second canonical bit pattern to check.
+ *
+ * @return @c true if the patterns are the same, @c false otherwise.
+ */
+static bool compare_canonical_partitionings(
+ const uint64_t part1[BIT_PATTERN_WORDS],
+ const uint64_t part2[BIT_PATTERN_WORDS]
+) {
+ return (part1[0] == part2[0])
+#if BIT_PATTERN_WORDS > 1
+ && (part1[1] == part2[1])
+#endif
+#if BIT_PATTERN_WORDS > 2
+ && (part1[2] == part2[2])
+#endif
+#if BIT_PATTERN_WORDS > 3
+ && (part1[3] == part2[3])
+#endif
+#if BIT_PATTERN_WORDS > 4
+ && (part1[4] == part2[4])
+#endif
+#if BIT_PATTERN_WORDS > 5
+ && (part1[5] == part2[5])
+#endif
+#if BIT_PATTERN_WORDS > 6
+ && (part1[6] == part2[6])
+#endif
+ ;
+}
+
+/**
+ * @brief Hash function used for procedural partition assignment.
+ *
+ * @param inp The hash seed.
+ *
+ * @return The hashed value.
+ */
+static uint32_t hash52(
+ uint32_t inp
+) {
+ inp ^= inp >> 15;
+
+ // (2^4 + 1) * (2^7 + 1) * (2^17 - 1)
+ inp *= 0xEEDE0891;
+ inp ^= inp >> 5;
+ inp += inp << 16;
+ inp ^= inp >> 7;
+ inp ^= inp >> 3;
+ inp ^= inp << 6;
+ inp ^= inp >> 17;
+ return inp;
+}
+
+/**
+ * @brief Select texel assignment for a single coordinate.
+ *
+ * @param seed The seed - the partition index from the block.
+ * @param x The texel X coordinate in the block.
+ * @param y The texel Y coordinate in the block.
+ * @param z The texel Z coordinate in the block.
+ * @param partition_count The total partition count of this encoding.
+ * @param small_block @c true if the block has fewer than 32 texels.
+ *
+ * @return The assigned partition index for this texel.
+ */
+static uint8_t select_partition(
+ int seed,
+ int x,
+ int y,
+ int z,
+ int partition_count,
+ bool small_block
+) {
+ // For small blocks bias the coordinates to get better distribution
+ if (small_block)
+ {
+ x <<= 1;
+ y <<= 1;
+ z <<= 1;
+ }
+
+ seed += (partition_count - 1) * 1024;
+
+ uint32_t rnum = hash52(seed);
+
+ uint8_t seed1 = rnum & 0xF;
+ uint8_t seed2 = (rnum >> 4) & 0xF;
+ uint8_t seed3 = (rnum >> 8) & 0xF;
+ uint8_t seed4 = (rnum >> 12) & 0xF;
+ uint8_t seed5 = (rnum >> 16) & 0xF;
+ uint8_t seed6 = (rnum >> 20) & 0xF;
+ uint8_t seed7 = (rnum >> 24) & 0xF;
+ uint8_t seed8 = (rnum >> 28) & 0xF;
+ uint8_t seed9 = (rnum >> 18) & 0xF;
+ uint8_t seed10 = (rnum >> 22) & 0xF;
+ uint8_t seed11 = (rnum >> 26) & 0xF;
+ uint8_t seed12 = ((rnum >> 30) | (rnum << 2)) & 0xF;
+
+ // Squaring all the seeds in order to bias their distribution towards lower values.
+ seed1 *= seed1;
+ seed2 *= seed2;
+ seed3 *= seed3;
+ seed4 *= seed4;
+ seed5 *= seed5;
+ seed6 *= seed6;
+ seed7 *= seed7;
+ seed8 *= seed8;
+ seed9 *= seed9;
+ seed10 *= seed10;
+ seed11 *= seed11;
+ seed12 *= seed12;
+
+ int sh1, sh2;
+ if (seed & 1)
+ {
+ sh1 = (seed & 2 ? 4 : 5);
+ sh2 = (partition_count == 3 ? 6 : 5);
+ }
+ else
+ {
+ sh1 = (partition_count == 3 ? 6 : 5);
+ sh2 = (seed & 2 ? 4 : 5);
+ }
+
+ int sh3 = (seed & 0x10) ? sh1 : sh2;
+
+ seed1 >>= sh1;
+ seed2 >>= sh2;
+ seed3 >>= sh1;
+ seed4 >>= sh2;
+ seed5 >>= sh1;
+ seed6 >>= sh2;
+ seed7 >>= sh1;
+ seed8 >>= sh2;
+
+ seed9 >>= sh3;
+ seed10 >>= sh3;
+ seed11 >>= sh3;
+ seed12 >>= sh3;
+
+ int a = seed1 * x + seed2 * y + seed11 * z + (rnum >> 14);
+ int b = seed3 * x + seed4 * y + seed12 * z + (rnum >> 10);
+ int c = seed5 * x + seed6 * y + seed9 * z + (rnum >> 6);
+ int d = seed7 * x + seed8 * y + seed10 * z + (rnum >> 2);
+
+ // Apply the saw
+ a &= 0x3F;
+ b &= 0x3F;
+ c &= 0x3F;
+ d &= 0x3F;
+
+ // Remove some of the components if we are to output < 4 partitions.
+ if (partition_count <= 3)
+ {
+ d = 0;
+ }
+
+ if (partition_count <= 2)
+ {
+ c = 0;
+ }
+
+ if (partition_count <= 1)
+ {
+ b = 0;
+ }
+
+ uint8_t partition;
+ if (a >= b && a >= c && a >= d)
+ {
+ partition = 0;
+ }
+ else if (b >= c && b >= d)
+ {
+ partition = 1;
+ }
+ else if (c >= d)
+ {
+ partition = 2;
+ }
+ else
+ {
+ partition = 3;
+ }
+
+ return partition;
+}
+
+/**
+ * @brief Generate a single partition info structure.
+ *
+ * @param[out] bsd The block size information.
+ * @param partition_count The partition count of this partitioning.
+ * @param partition_index The partition index / seed of this partitioning.
+ * @param partition_remap_index The remapped partition index of this partitioning.
+ * @param[out] pi The partition info structure to populate.
+ *
+ * @return True if this is a useful partition index, False if we can skip it.
+ */
+static bool generate_one_partition_info_entry(
+ block_size_descriptor& bsd,
+ unsigned int partition_count,
+ unsigned int partition_index,
+ unsigned int partition_remap_index,
+ partition_info& pi
+) {
+ int texels_per_block = bsd.texel_count;
+ bool small_block = texels_per_block < 32;
+
+ uint8_t *partition_of_texel = pi.partition_of_texel;
+
+ // Assign texels to partitions
+ int texel_idx = 0;
+ int counts[BLOCK_MAX_PARTITIONS] { 0 };
+ for (unsigned int z = 0; z < bsd.zdim; z++)
+ {
+ for (unsigned int y = 0; y < bsd.ydim; y++)
+ {
+ for (unsigned int x = 0; x < bsd.xdim; x++)
+ {
+ uint8_t part = select_partition(partition_index, x, y, z, partition_count, small_block);
+ pi.texels_of_partition[part][counts[part]++] = static_cast<uint8_t>(texel_idx++);
+ *partition_of_texel++ = part;
+ }
+ }
+ }
+
+ // Fill loop tail so we can overfetch later
+ for (unsigned int i = 0; i < partition_count; i++)
+ {
+ int ptex_count = counts[i];
+ int ptex_count_simd = round_up_to_simd_multiple_vla(ptex_count);
+ for (int j = ptex_count; j < ptex_count_simd; j++)
+ {
+ pi.texels_of_partition[i][j] = pi.texels_of_partition[i][ptex_count - 1];
+ }
+ }
+
+ // Populate the actual procedural partition count
+ if (counts[0] == 0)
+ {
+ pi.partition_count = 0;
+ }
+ else if (counts[1] == 0)
+ {
+ pi.partition_count = 1;
+ }
+ else if (counts[2] == 0)
+ {
+ pi.partition_count = 2;
+ }
+ else if (counts[3] == 0)
+ {
+ pi.partition_count = 3;
+ }
+ else
+ {
+ pi.partition_count = 4;
+ }
+
+ // Populate the partition index
+ pi.partition_index = static_cast<uint16_t>(partition_index);
+
+ // Populate the coverage bitmaps for 2/3/4 partitions
+ uint64_t* bitmaps { nullptr };
+ if (partition_count == 2)
+ {
+ bitmaps = bsd.coverage_bitmaps_2[partition_remap_index];
+ }
+ else if (partition_count == 3)
+ {
+ bitmaps = bsd.coverage_bitmaps_3[partition_remap_index];
+ }
+ else if (partition_count == 4)
+ {
+ bitmaps = bsd.coverage_bitmaps_4[partition_remap_index];
+ }
+
+ for (unsigned int i = 0; i < BLOCK_MAX_PARTITIONS; i++)
+ {
+ pi.partition_texel_count[i] = static_cast<uint8_t>(counts[i]);
+ }
+
+ // Valid partitionings have texels in all of the requested partitions
+ bool valid = pi.partition_count == partition_count;
+
+ if (bitmaps)
+ {
+ // Populate the partition coverage bitmap
+ for (unsigned int i = 0; i < partition_count; i++)
+ {
+ bitmaps[i] = 0ULL;
+ }
+
+ unsigned int texels_to_process = astc::min(bsd.texel_count, BLOCK_MAX_KMEANS_TEXELS);
+ for (unsigned int i = 0; i < texels_to_process; i++)
+ {
+ unsigned int idx = bsd.kmeans_texels[i];
+ bitmaps[pi.partition_of_texel[idx]] |= 1ULL << i;
+ }
+ }
+
+ return valid;
+}
+
+static void build_partition_table_for_one_partition_count(
+ block_size_descriptor& bsd,
+ bool can_omit_partitionings,
+ unsigned int partition_count_cutoff,
+ unsigned int partition_count,
+ partition_info* ptab,
+ uint64_t* canonical_patterns
+) {
+ unsigned int next_index = 0;
+ bsd.partitioning_count_selected[partition_count - 1] = 0;
+ bsd.partitioning_count_all[partition_count - 1] = 0;
+
+ // Skip tables larger than config max partition count if we can omit modes
+ if (can_omit_partitionings && (partition_count > partition_count_cutoff))
+ {
+ return;
+ }
+
+ // Iterate through twice
+ // - Pass 0: Keep selected partitionings
+ // - Pass 1: Keep non-selected partitionings (skip if in omit mode)
+ unsigned int max_iter = can_omit_partitionings ? 1 : 2;
+
+ // Tracker for things we built in the first iteration
+ uint8_t build[BLOCK_MAX_PARTITIONINGS] { 0 };
+ for (unsigned int x = 0; x < max_iter; x++)
+ {
+ for (unsigned int i = 0; i < BLOCK_MAX_PARTITIONINGS; i++)
+ {
+ // Don't include things we built in the first pass
+ if ((x == 1) && build[i])
+ {
+ continue;
+ }
+
+ bool keep_useful = generate_one_partition_info_entry(bsd, partition_count, i, next_index, ptab[next_index]);
+ if ((x == 0) && !keep_useful)
+ {
+ continue;
+ }
+
+ generate_canonical_partitioning(bsd.texel_count, ptab[next_index].partition_of_texel, canonical_patterns + next_index * BIT_PATTERN_WORDS);
+ bool keep_canonical = true;
+ for (unsigned int j = 0; j < next_index; j++)
+ {
+ bool match = compare_canonical_partitionings(canonical_patterns + next_index * BIT_PATTERN_WORDS, canonical_patterns + j * BIT_PATTERN_WORDS);
+ if (match)
+ {
+ keep_canonical = false;
+ break;
+ }
+ }
+
+ if (keep_useful && keep_canonical)
+ {
+ if (x == 0)
+ {
+ bsd.partitioning_packed_index[partition_count - 2][i] = static_cast<uint16_t>(next_index);
+ bsd.partitioning_count_selected[partition_count - 1]++;
+ bsd.partitioning_count_all[partition_count - 1]++;
+ build[i] = 1;
+ next_index++;
+ }
+ }
+ else
+ {
+ if (x == 1)
+ {
+ bsd.partitioning_packed_index[partition_count - 2][i] = static_cast<uint16_t>(next_index);
+ bsd.partitioning_count_all[partition_count - 1]++;
+ next_index++;
+ }
+ }
+ }
+ }
+}
+
+/* See header for documentation. */
+void init_partition_tables(
+ block_size_descriptor& bsd,
+ bool can_omit_partitionings,
+ unsigned int partition_count_cutoff
+) {
+ partition_info* par_tab2 = bsd.partitionings;
+ partition_info* par_tab3 = par_tab2 + BLOCK_MAX_PARTITIONINGS;
+ partition_info* par_tab4 = par_tab3 + BLOCK_MAX_PARTITIONINGS;
+ partition_info* par_tab1 = par_tab4 + BLOCK_MAX_PARTITIONINGS;
+
+ generate_one_partition_info_entry(bsd, 1, 0, 0, *par_tab1);
+ bsd.partitioning_count_selected[0] = 1;
+ bsd.partitioning_count_all[0] = 1;
+
+ uint64_t* canonical_patterns = new uint64_t[BLOCK_MAX_PARTITIONINGS * BIT_PATTERN_WORDS];
+
+ build_partition_table_for_one_partition_count(bsd, can_omit_partitionings, partition_count_cutoff, 2, par_tab2, canonical_patterns);
+ build_partition_table_for_one_partition_count(bsd, can_omit_partitionings, partition_count_cutoff, 3, par_tab3, canonical_patterns);
+ build_partition_table_for_one_partition_count(bsd, can_omit_partitionings, partition_count_cutoff, 4, par_tab4, canonical_patterns);
+
+ delete[] canonical_patterns;
+}
diff --git a/thirdparty/astcenc/astcenc_percentile_tables.cpp b/thirdparty/astcenc/astcenc_percentile_tables.cpp
new file mode 100644
index 0000000000..448ddcc968
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_percentile_tables.cpp
@@ -0,0 +1,1251 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2022 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief Percentile data tables for different block encodings.
+ *
+ * To reduce binary size the tables are stored using a packed differential encoding.
+ */
+
+#include "astcenc_internal.h"
+
+#if !defined(ASTCENC_DECOMPRESS_ONLY)
+/**
+ * @brief Structure containing packed percentile metadata.
+ *
+ * Note that percentile tables do not exist for 3D textures, so no zdim is stored.
+ */
+struct packed_percentile_table
+{
+ /** The block X dimension. */
+ uint8_t xdim;
+
+ /** The block Y dimension. */
+ uint8_t ydim;
+
+ /** The number of packed items in the 1 and 2 plane data. */
+ uint16_t item_count[2];
+
+ /** The accumulator divisor for 1 and 2 plane data. */
+ uint16_t difscales[2];
+
+ /** The initial accumulator values for 1 and 2 plane data. */
+ uint16_t initial_percs[2];
+
+ /** The packed data for the 1 and 2 plane data. */
+ const uint16_t *items[2];
+};
+
+#if ASTCENC_BLOCK_MAX_TEXELS >= (4 * 4)
+static const uint16_t percentile_arr_4x4_0[61] {
+ 0x0242, 0x7243, 0x6A51, 0x6A52, 0x5A41, 0x4A53, 0x8851, 0x3842,
+ 0x3852, 0x3853, 0x3043, 0xFA33, 0x1BDF, 0x2022, 0x1032, 0x29CE,
+ 0x21DE, 0x2823, 0x0813, 0x0A13, 0x0A31, 0x0A23, 0x09CF, 0x0833,
+ 0x0A32, 0x01DF, 0x0BDD, 0x0BCF, 0x0221, 0x095F, 0x0A01, 0x0BDE,
+ 0x0BCD, 0x0A22, 0x09AF, 0x0B5F, 0x0B4D, 0x0BCE, 0x0BBF, 0x0A11,
+ 0x01BF, 0x0202, 0x0B5D, 0x1203, 0x034E, 0x0B8E, 0x035E, 0x0212,
+ 0x032E, 0x0B4F, 0x03AF, 0x03AD, 0x03BD, 0x0BBE, 0x03AE, 0x039F,
+ 0x039E, 0x033E, 0x033F, 0x038F, 0x032F
+};
+
+static const uint16_t percentile_arr_4x4_1[84] {
+ 0x0452, 0xFFAE, 0x2433, 0x1DDF, 0x17CD, 0x1E21, 0x1C43, 0x1442,
+ 0x3FBE, 0x1FDD, 0x0E31, 0x0F4F, 0x1423, 0x0FBD, 0x1451, 0x0E03,
+ 0x05CF, 0x0C32, 0x0DDE, 0x27AD, 0x274E, 0x0E02, 0x0F5E, 0x07AF,
+ 0x0F5F, 0x0DCE, 0x0C41, 0x0422, 0x0613, 0x0E12, 0x0611, 0x0F3F,
+ 0x0601, 0x0DBF, 0x05DD, 0x075D, 0x0C02, 0x054E, 0x0431, 0x0413,
+ 0x079F, 0x05BE, 0x0F4D, 0x0403, 0x05AF, 0x055F, 0x05AE, 0x054F,
+ 0x0421, 0x05BD, 0x0DCD, 0x0411, 0x0412, 0x055E, 0x055D, 0x073D,
+ 0x058E, 0x072F, 0x072D, 0x079D, 0x0D2E, 0x0453, 0x078D, 0x053E,
+ 0x053F, 0x059E, 0x052F, 0x058F, 0x072E, 0x078F, 0x059F, 0x078E,
+ 0x071F, 0x073E, 0x051F, 0x070D, 0x079E, 0x070E, 0x071D, 0x0622,
+ 0x070F, 0x071E, 0x07BF, 0x07CE
+};
+
+static const packed_percentile_table block_pcd_4x4 {
+ 4, 4,
+ { 61, 84 },
+ { 184, 141 },
+ { 0, 53 },
+ { percentile_arr_4x4_0, percentile_arr_4x4_1 }
+};
+#endif
+
+#if ASTCENC_BLOCK_MAX_TEXELS >= (5 * 4)
+static const uint16_t percentile_arr_5x4_0[91] {
+ 0x02C1, 0xFAD1, 0xE8D3, 0xDAC2, 0xA8D2, 0x70D1, 0x50C2, 0x80C3,
+ 0xD2C3, 0x4AA2, 0x2AD2, 0x2242, 0x2251, 0x42A3, 0x1A43, 0x4A52,
+ 0x32B3, 0x2A41, 0x1042, 0x1851, 0x5892, 0x10A2, 0x2253, 0x10B2,
+ 0x10B3, 0x13DF, 0x3083, 0x08B1, 0x1043, 0x12B1, 0x0AB2, 0x1A93,
+ 0x1852, 0x1A33, 0x09CE, 0x08A3, 0x1022, 0x1283, 0x0853, 0x1AA1,
+ 0x1093, 0x11DE, 0x135F, 0x1832, 0x195F, 0x0A81, 0x11CF, 0x0A31,
+ 0x09DF, 0x0B4D, 0x09AF, 0x03CF, 0x0813, 0x03DD, 0x0A92, 0x0A82,
+ 0x03CD, 0x0023, 0x0BDE, 0x0BBF, 0x1232, 0x0221, 0x0291, 0x0A23,
+ 0x0833, 0x035D, 0x0BCE, 0x01BF, 0x0222, 0x134E, 0x0213, 0x0A01,
+ 0x0B4F, 0x0B5E, 0x038E, 0x032E, 0x03AF, 0x0A11, 0x03AD, 0x0203,
+ 0x0202, 0x0BBD, 0x033E, 0x03AE, 0x03BE, 0x0212, 0x033F, 0x039E,
+ 0x039F, 0x032F, 0x038F
+};
+
+static const uint16_t percentile_arr_5x4_1[104] {
+ 0x0433, 0xB621, 0x5452, 0x4443, 0x7FAE, 0xFCA3, 0x7CC2, 0x24B2,
+ 0x45DF, 0x44B3, 0x7631, 0x27CD, 0x1CD1, 0x1E03, 0x4FBE, 0x774F,
+ 0x1C42, 0x7691, 0x24A2, 0x2681, 0x3C23, 0x3C93, 0x0FBD, 0x1C32,
+ 0x1E82, 0x1E12, 0x0F4E, 0x1602, 0x0FAD, 0x0C51, 0x1FDD, 0x0E13,
+ 0x0DCF, 0x175E, 0x0C22, 0x175F, 0x15DE, 0x0CB1, 0x17AF, 0x1CC1,
+ 0x1F3F, 0x1483, 0x0441, 0x0C91, 0x04D2, 0x0DCE, 0x154E, 0x079F,
+ 0x0CA1, 0x0F5D, 0x0431, 0x15DD, 0x05BF, 0x0C92, 0x0611, 0x0C82,
+ 0x0402, 0x074D, 0x0DBD, 0x055E, 0x05BE, 0x0DCD, 0x0421, 0x05AF,
+ 0x0403, 0x0D4F, 0x055F, 0x05AE, 0x0413, 0x0E01, 0x055D, 0x073D,
+ 0x0C12, 0x0692, 0x0411, 0x072D, 0x078D, 0x079D, 0x058E, 0x0D2E,
+ 0x0453, 0x072F, 0x059E, 0x052F, 0x071F, 0x053F, 0x053E, 0x078F,
+ 0x058F, 0x051F, 0x0F2E, 0x059F, 0x078E, 0x073E, 0x071D, 0x070D,
+ 0x070E, 0x079E, 0x0622, 0x0683, 0x070F, 0x071E, 0x07BF, 0x07CE
+};
+
+static const packed_percentile_table block_pcd_5x4 {
+ 5, 4,
+ { 91, 104 },
+ { 322, 464 },
+ { 0, 202 },
+ { percentile_arr_5x4_0, percentile_arr_5x4_1 }
+};
+#endif
+
+#if ASTCENC_BLOCK_MAX_TEXELS >= (5 * 5)
+static const uint16_t percentile_arr_5x5_0[129] {
+ 0x00F3, 0xF8F2, 0x70E3, 0x62E1, 0x60E1, 0x4AC1, 0x3261, 0x38D3,
+ 0x3271, 0x5AF1, 0x5873, 0x2AD1, 0x28E2, 0x28F1, 0x2262, 0x9AC2,
+ 0x18D2, 0x1072, 0x1071, 0x22A2, 0x2062, 0x1A51, 0x10C2, 0x0892,
+ 0x08D1, 0x1AA3, 0x23EE, 0x08C3, 0x0BEF, 0x2242, 0x0863, 0x0AB3,
+ 0x0BFF, 0x0A93, 0x08A2, 0x0A41, 0x1083, 0x0842, 0x10B3, 0x21EE,
+ 0x10B2, 0x00B1, 0x1263, 0x12C3, 0x0A83, 0x0851, 0x11FE, 0x0253,
+ 0x09FD, 0x0A72, 0x09FF, 0x1AB2, 0x0BDF, 0x0A33, 0x0243, 0x0B7F,
+ 0x0AB1, 0x12D2, 0x0252, 0x096F, 0x00A3, 0x0893, 0x0822, 0x0843,
+ 0x097E, 0x097F, 0x01EF, 0x09CE, 0x03FE, 0x0A81, 0x036F, 0x0052,
+ 0x13FD, 0x0AA1, 0x1853, 0x036D, 0x0A92, 0x0832, 0x01DE, 0x0A82,
+ 0x0BED, 0x0231, 0x0BBF, 0x03DD, 0x0B6E, 0x01AF, 0x0813, 0x0023,
+ 0x0A91, 0x015F, 0x037E, 0x01CF, 0x0232, 0x0BCD, 0x0221, 0x0BDE,
+ 0x0213, 0x035F, 0x0B7D, 0x0223, 0x01BF, 0x0BCF, 0x01DF, 0x0033,
+ 0x0222, 0x03CE, 0x0A01, 0x03AF, 0x034D, 0x0B8E, 0x032E, 0x0203,
+ 0x0211, 0x0202, 0x0B5D, 0x03AD, 0x034E, 0x03AE, 0x034F, 0x033F,
+ 0x039F, 0x03BD, 0x03BE, 0x035E, 0x0212, 0x033E, 0x039E, 0x032F,
+ 0x038F
+};
+
+static const uint16_t percentile_arr_5x5_1[126] {
+ 0x0443, 0x6452, 0xFE21, 0x27AE, 0x2433, 0x1FCD, 0x25DF, 0x6CC2,
+ 0x2C62, 0x1F4F, 0x4C42, 0x1FBE, 0x0DEF, 0x34A3, 0x0E03, 0x54B2,
+ 0x1F7D, 0x17DD, 0x0DFF, 0x0CD1, 0x0E31, 0x0C71, 0x1CF1, 0x15FE,
+ 0x1691, 0x1681, 0x24B3, 0x174E, 0x0F6E, 0x0493, 0x175E, 0x1C51,
+ 0x17BD, 0x076D, 0x2CA2, 0x05EE, 0x1472, 0x2423, 0x0DCF, 0x0432,
+ 0x15DE, 0x0612, 0x0CD2, 0x0682, 0x0F5F, 0x07AD, 0x0602, 0x0CE1,
+ 0x0C91, 0x0FAF, 0x073F, 0x0E13, 0x0D7F, 0x0DCE, 0x0422, 0x0D7D,
+ 0x0441, 0x05FD, 0x0CB1, 0x0C83, 0x04C1, 0x0461, 0x0F9F, 0x0DDD,
+ 0x056E, 0x0C92, 0x0482, 0x0431, 0x05ED, 0x0D6F, 0x075D, 0x0402,
+ 0x057E, 0x0DBF, 0x04A1, 0x054E, 0x0F4D, 0x0403, 0x05CD, 0x0453,
+ 0x05AE, 0x0421, 0x0F1F, 0x05BE, 0x0601, 0x0611, 0x05BD, 0x05AF,
+ 0x078D, 0x072D, 0x073D, 0x055E, 0x0F9D, 0x0411, 0x0413, 0x0412,
+ 0x055F, 0x077E, 0x055D, 0x052E, 0x054F, 0x053E, 0x058E, 0x078F,
+ 0x059E, 0x071D, 0x0E92, 0x053F, 0x059F, 0x051F, 0x072F, 0x052F,
+ 0x070D, 0x079E, 0x058F, 0x072E, 0x070E, 0x078E, 0x070F, 0x073E,
+ 0x0622, 0x0683, 0x071E, 0x076F, 0x07BF, 0x07CE
+};
+
+static const packed_percentile_table block_pcd_5x5 {
+ 5, 5,
+ { 129, 126 },
+ { 258, 291 },
+ { 0, 116 },
+ { percentile_arr_5x5_0, percentile_arr_5x5_1 }
+};
+#endif
+
+#if ASTCENC_BLOCK_MAX_TEXELS >= (6 * 5)
+static const uint16_t percentile_arr_6x5_0[165] {
+ 0x0163, 0xF8F3, 0x9962, 0x8972, 0x7961, 0x7173, 0x6953, 0x5943,
+ 0x4B41, 0x3AE1, 0x38E3, 0x6971, 0x32C1, 0x28D3, 0x2A61, 0xC8F2,
+ 0x2271, 0x4873, 0x5B21, 0x3AD1, 0x1B13, 0x1952, 0x1B51, 0x12F1,
+ 0x1A62, 0x1322, 0x1951, 0x10E2, 0x1B31, 0x20F1, 0x2102, 0x2072,
+ 0x10D2, 0x1142, 0x2912, 0x3871, 0x2BEE, 0x0862, 0x1123, 0x0AC2,
+ 0x12A2, 0x0A51, 0x1922, 0x0941, 0x1BEF, 0x0B42, 0x08D1, 0x13FF,
+ 0x1933, 0x08C3, 0x08C2, 0x1131, 0x08E1, 0x2903, 0x0863, 0x0B32,
+ 0x1132, 0x1AC3, 0x0A42, 0x1A41, 0x0042, 0x21EE, 0x09FF, 0x03DF,
+ 0x0AA3, 0x11FE, 0x02B3, 0x0B11, 0x10B3, 0x0B03, 0x11FD, 0x0913,
+ 0x0A53, 0x037F, 0x1263, 0x0051, 0x0A33, 0x0B01, 0x016F, 0x0A72,
+ 0x1312, 0x08A2, 0x10B1, 0x0BFE, 0x11EF, 0x0B02, 0x0A52, 0x0043,
+ 0x0822, 0x01CE, 0x0A43, 0x097F, 0x036F, 0x08B2, 0x03FD, 0x0A83,
+ 0x0B33, 0x0AB1, 0x017E, 0x0B23, 0x0852, 0x02D2, 0x0BBF, 0x0BDD,
+ 0x03ED, 0x0AB2, 0x02A1, 0x0853, 0x036D, 0x0892, 0x0032, 0x0A31,
+ 0x0083, 0x09DE, 0x0A93, 0x08A3, 0x1213, 0x0BDE, 0x03CD, 0x036E,
+ 0x037E, 0x0A21, 0x0023, 0x0BCF, 0x01CF, 0x0013, 0x01AF, 0x0A92,
+ 0x0232, 0x035F, 0x0093, 0x0B7D, 0x015F, 0x0282, 0x01BF, 0x09DF,
+ 0x03CE, 0x0223, 0x0833, 0x0222, 0x03AF, 0x0A01, 0x0291, 0x0B4D,
+ 0x032E, 0x038E, 0x0203, 0x0281, 0x035D, 0x03AD, 0x0B9F, 0x0202,
+ 0x034F, 0x03BE, 0x0211, 0x03AE, 0x03BD, 0x0212, 0x034E, 0x033F,
+ 0x033E, 0x035E, 0x039E, 0x032F, 0x038F
+};
+
+static const uint16_t percentile_arr_6x5_1[145] {
+ 0x0443, 0xEFAE, 0x2CC2, 0x2E21, 0x2C52, 0x7C33, 0x47CD, 0x25DF,
+ 0x3CA3, 0xFFBE, 0x2551, 0x24B3, 0x474F, 0x1513, 0x2691, 0x1603,
+ 0x1462, 0x1D32, 0x14B2, 0x5442, 0x2CD2, 0x35EF, 0x0CD1, 0x3D22,
+ 0x17BD, 0x0FDD, 0x0DFF, 0x2631, 0x177D, 0x0CF1, 0x1E81, 0x0E82,
+ 0x1DFE, 0x0F5E, 0x0701, 0x2CA2, 0x1D03, 0x0F4E, 0x1471, 0x0C51,
+ 0x1F6E, 0x2FAF, 0x0561, 0x0C72, 0x176D, 0x0FAD, 0x0DEE, 0x05CF,
+ 0x0E13, 0x0F5F, 0x0E12, 0x0C23, 0x1E02, 0x1D12, 0x0CB1, 0x0C32,
+ 0x0C93, 0x15DE, 0x0F9F, 0x0F3F, 0x0D41, 0x0C41, 0x0CC1, 0x0D31,
+ 0x0C22, 0x05FD, 0x057F, 0x0D01, 0x0461, 0x04E1, 0x0D7D, 0x05CE,
+ 0x0502, 0x0C31, 0x05ED, 0x05DD, 0x0511, 0x0F11, 0x0491, 0x0D6F,
+ 0x0521, 0x056E, 0x0C83, 0x0D23, 0x04A1, 0x0C02, 0x075D, 0x05BF,
+ 0x0C21, 0x079D, 0x0482, 0x05BD, 0x0DBE, 0x05CD, 0x054E, 0x057E,
+ 0x0DAE, 0x074D, 0x078D, 0x0542, 0x0492, 0x05AF, 0x0611, 0x0F3D,
+ 0x0601, 0x071F, 0x055E, 0x059E, 0x0571, 0x054F, 0x0412, 0x0453,
+ 0x058E, 0x0413, 0x0D3E, 0x077E, 0x072D, 0x052E, 0x059F, 0x055D,
+ 0x072F, 0x0403, 0x0411, 0x058F, 0x055F, 0x0692, 0x078E, 0x053F,
+ 0x0D2F, 0x078F, 0x070D, 0x071D, 0x051F, 0x072E, 0x079E, 0x070E,
+ 0x070F, 0x073E, 0x0622, 0x0683, 0x0702, 0x071E, 0x076F, 0x07BF,
+ 0x07CE
+};
+
+static const packed_percentile_table block_pcd_6x5 {
+ 6, 5,
+ { 165, 145 },
+ { 388, 405 },
+ { 0, 156 },
+ { percentile_arr_6x5_0, percentile_arr_6x5_1 }
+};
+#endif
+
+#if ASTCENC_BLOCK_MAX_TEXELS >= (6 * 6)
+static const uint16_t percentile_arr_6x6_0[206] {
+ 0x006F, 0xF908, 0xF104, 0xE918, 0xE963, 0xD114, 0xB0F3, 0xA07E,
+ 0x7972, 0x705F, 0x687F, 0x6162, 0x5953, 0x586E, 0x610C, 0x524D,
+ 0x5973, 0x9943, 0x98E3, 0x904F, 0x8341, 0x7AC1, 0x3A61, 0x70D3,
+ 0xA073, 0x6AE1, 0x30F2, 0x3313, 0x2B21, 0x9A2E, 0x4322, 0x225D,
+ 0x2331, 0x2271, 0x22D1, 0x1A2D, 0x221F, 0x22F1, 0x1971, 0x6952,
+ 0x1951, 0x187D, 0x18F1, 0x1902, 0x185E, 0x1B51, 0x105D, 0x1A3D,
+ 0x30E2, 0x10D2, 0x1961, 0x12A2, 0x6072, 0x3942, 0x386D, 0x33EE,
+ 0x104E, 0x4923, 0x101E, 0x2122, 0x1251, 0x1141, 0x182F, 0x3133,
+ 0x080E, 0x1262, 0x123E, 0x1B32, 0x102E, 0x1931, 0x10D1, 0x1912,
+ 0x0871, 0x12C2, 0x08C2, 0x1103, 0x0B03, 0x1062, 0x083D, 0x08E1,
+ 0x1132, 0x184D, 0x0863, 0x08C3, 0x303F, 0x083E, 0x10B3, 0x12A3,
+ 0x0BEF, 0x0B11, 0x1A42, 0x2233, 0x13FF, 0x080F, 0x0A41, 0x0AC3,
+ 0x0842, 0x1A63, 0x0BDF, 0x09FF, 0x12B3, 0x124E, 0x0B12, 0x0B42,
+ 0x0A2F, 0x1253, 0x0913, 0x1051, 0x0B01, 0x120F, 0x0B02, 0x08A2,
+ 0x0BBF, 0x00B1, 0x22B1, 0x01EE, 0x1B33, 0x0B23, 0x0283, 0x13FD,
+ 0x0AB2, 0x11FD, 0x09FE, 0x0A43, 0x08B2, 0x0A1D, 0x0A52, 0x023F,
+ 0x101F, 0x01CE, 0x0A31, 0x0BDD, 0x0293, 0x1822, 0x12A1, 0x03FE,
+ 0x121E, 0x0843, 0x0272, 0x0B6F, 0x0052, 0x0A0D, 0x0BED, 0x12D2,
+ 0x1B7F, 0x1053, 0x0032, 0x01DE, 0x08A3, 0x020E, 0x0883, 0x09EF,
+ 0x0892, 0x0A21, 0x03CD, 0x0B5F, 0x0213, 0x0A32, 0x016F, 0x1292,
+ 0x03DE, 0x017E, 0x0BAF, 0x0223, 0x1093, 0x0BCF, 0x037E, 0x01DF,
+ 0x09CF, 0x015F, 0x09AF, 0x0023, 0x01BF, 0x0222, 0x0282, 0x03CE,
+ 0x1013, 0x036E, 0x097F, 0x0033, 0x0A01, 0x0B6D, 0x03BE, 0x037D,
+ 0x0281, 0x0BAE, 0x0203, 0x032E, 0x034D, 0x034F, 0x0291, 0x0211,
+ 0x038E, 0x03BD, 0x039E, 0x0BAD, 0x033E, 0x034E, 0x039F, 0x0202,
+ 0x035D, 0x0212, 0x033F, 0x035E, 0x038F, 0x032F
+};
+
+static const uint16_t percentile_arr_6x6_1[164] {
+ 0x07AE, 0x8443, 0x7E21, 0x77CD, 0x6C62, 0x9433, 0x6452, 0x34C2,
+ 0x5DDF, 0xC7BE, 0x25EF, 0x24A3, 0x3CF1, 0xFDFF, 0x177D, 0x1F4F,
+ 0xC551, 0x5CB3, 0x1532, 0x1513, 0x143E, 0x245D, 0x14B2, 0x2472,
+ 0x14D2, 0x1FBD, 0x1631, 0x2DFE, 0x1691, 0x17DD, 0x2E03, 0x376E,
+ 0x2442, 0x0F6D, 0x3C71, 0x2CD1, 0x2522, 0x6C51, 0x260D, 0x17AF,
+ 0x0DEE, 0x1C1F, 0x2F01, 0x142E, 0x0CA2, 0x0FAD, 0x3D03, 0x275E,
+ 0x1681, 0x274E, 0x1682, 0x1C23, 0x273F, 0x0F5F, 0x05DE, 0x15FD,
+ 0x0DCF, 0x1E02, 0x04B1, 0x144D, 0x0E12, 0x0D12, 0x1CC1, 0x0E13,
+ 0x1C6D, 0x0C32, 0x043D, 0x0C61, 0x0F9F, 0x04E1, 0x0DCE, 0x0D41,
+ 0x1C93, 0x0C22, 0x061D, 0x0D7F, 0x0C41, 0x0561, 0x0531, 0x0D21,
+ 0x0711, 0x0C91, 0x0501, 0x0C1E, 0x040F, 0x15DD, 0x0431, 0x0C2F,
+ 0x057D, 0x0C2D, 0x0DBE, 0x040E, 0x0D02, 0x0D11, 0x054E, 0x040D,
+ 0x0D23, 0x0DBF, 0x04A1, 0x05ED, 0x0C1D, 0x05BD, 0x072D, 0x056E,
+ 0x0483, 0x0F3D, 0x0482, 0x078D, 0x0F5D, 0x0453, 0x0D9E, 0x0C4E,
+ 0x05CD, 0x079D, 0x0402, 0x05AE, 0x0F1F, 0x0542, 0x074D, 0x056F,
+ 0x0421, 0x0D4F, 0x0601, 0x0571, 0x0492, 0x059F, 0x053F, 0x05AF,
+ 0x0611, 0x055E, 0x0D8E, 0x053E, 0x055D, 0x047D, 0x0411, 0x052E,
+ 0x058F, 0x051F, 0x055F, 0x0D7E, 0x072F, 0x052F, 0x0412, 0x078F,
+ 0x0403, 0x077E, 0x070D, 0x070E, 0x078E, 0x0F1D, 0x072E, 0x0413,
+ 0x070F, 0x0692, 0x079E, 0x060E, 0x0622, 0x0683, 0x0702, 0x071E,
+ 0x073E, 0x076F, 0x07BF, 0x07CE
+};
+
+static const packed_percentile_table block_pcd_6x6 {
+ 6, 6,
+ { 206, 164 },
+ { 769, 644 },
+ { 0, 256 },
+ { percentile_arr_6x6_0, percentile_arr_6x6_1 }
+};
+#endif
+
+#if ASTCENC_BLOCK_MAX_TEXELS >= (8 * 5)
+static const uint16_t percentile_arr_8x5_0[226] {
+ 0x0066, 0xF865, 0xE963, 0xA856, 0xA1F2, 0x9875, 0x91C3, 0x91E2,
+ 0x80F3, 0x8076, 0x61E3, 0x6153, 0x5172, 0x59D2, 0x51D3, 0x5047,
+ 0xA943, 0x49B3, 0x4846, 0x4962, 0xC037, 0x4173, 0x39F1, 0x7027,
+ 0xA2C1, 0x3AE1, 0x9341, 0x30D3, 0x5225, 0x2A61, 0x33C1, 0x28E3,
+ 0x53A1, 0x49C2, 0x2A06, 0x4055, 0x2006, 0x21D1, 0x2271, 0x4321,
+ 0x3873, 0x18F2, 0x2015, 0x1A15, 0x1857, 0x52D1, 0x3045, 0x4835,
+ 0x1952, 0x29E1, 0x3207, 0x1036, 0x1816, 0x2A16, 0x2971, 0x13B1,
+ 0x2A17, 0x2351, 0x1025, 0x1826, 0x30E2, 0x1262, 0x20F1, 0x1007,
+ 0x1072, 0x1151, 0x10D2, 0x1235, 0x1205, 0x1062, 0x4AF1, 0x1251,
+ 0x0B31, 0x1381, 0x13EE, 0x1B92, 0x13EF, 0x0942, 0x1AA2, 0x13FF,
+ 0x1161, 0x0B93, 0x19A2, 0x11B1, 0x08D1, 0x12C2, 0x0B13, 0x1B22,
+ 0x2123, 0x09A3, 0x2071, 0x1B7F, 0x1817, 0x0A42, 0x10C2, 0x1233,
+ 0x08C3, 0x0A41, 0x0B42, 0x09C1, 0x0933, 0x1AB3, 0x1382, 0x1BDF,
+ 0x2122, 0x0A53, 0x0AC3, 0x20E1, 0x0941, 0x0931, 0x0042, 0x0BA2,
+ 0x0AA3, 0x0992, 0x0863, 0x08B3, 0x11B2, 0x0902, 0x1283, 0x09FF,
+ 0x0B83, 0x0982, 0x0932, 0x0BFE, 0x0B32, 0x0BBF, 0x11FE, 0x036F,
+ 0x0851, 0x08B1, 0x18A2, 0x11EE, 0x0A52, 0x0BB2, 0x01FD, 0x0A43,
+ 0x1A63, 0x1193, 0x0B91, 0x0043, 0x1231, 0x0A26, 0x0AB1, 0x03FD,
+ 0x096F, 0x00B2, 0x0983, 0x0A72, 0x01CE, 0x0BDD, 0x0022, 0x0B11,
+ 0x1213, 0x0B6D, 0x017E, 0x1333, 0x0112, 0x0852, 0x02D2, 0x097F,
+ 0x01EF, 0x0AB2, 0x0293, 0x0853, 0x0BED, 0x0B12, 0x1303, 0x02A1,
+ 0x0892, 0x0032, 0x0883, 0x0B6E, 0x0292, 0x0A32, 0x037E, 0x0B23,
+ 0x0103, 0x0A21, 0x0B01, 0x0302, 0x0BCD, 0x00A3, 0x0BCF, 0x0BDE,
+ 0x0113, 0x01DE, 0x0B5F, 0x0013, 0x0BAF, 0x0223, 0x0222, 0x0A82,
+ 0x0833, 0x0023, 0x09CF, 0x037D, 0x01AF, 0x095F, 0x03CE, 0x09DF,
+ 0x01BF, 0x0893, 0x0203, 0x0201, 0x0B4D, 0x03BE, 0x032E, 0x03AE,
+ 0x0291, 0x0A02, 0x0211, 0x039F, 0x0281, 0x038E, 0x03AD, 0x033F,
+ 0x035D, 0x033E, 0x034E, 0x034F, 0x0212, 0x03BD, 0x032F, 0x035E,
+ 0x038F, 0x039E
+};
+
+static const uint16_t percentile_arr_8x5_1[167] {
+ 0x0621, 0xFCC2, 0x3443, 0xA433, 0x5532, 0x2551, 0x6CA3, 0x27AE,
+ 0x6452, 0x8E03, 0x3CB3, 0x4DA2, 0x6DDF, 0x37CD, 0x6F01, 0x1691,
+ 0x2E82, 0x27BE, 0x1513, 0x34D2, 0x1D22, 0x3E31, 0x2593, 0x2CB2,
+ 0x1C16, 0x374F, 0x0DD1, 0x2583, 0x6613, 0x0CD1, 0x0C35, 0x1462,
+ 0x3E81, 0x2612, 0x2C42, 0x3407, 0x14A2, 0x0E02, 0x1CF1, 0x0C06,
+ 0x17BD, 0x0F7D, 0x1D23, 0x35B1, 0x179F, 0x0D92, 0x0F5E, 0x1451,
+ 0x04B1, 0x1F6E, 0x0DEF, 0x0D31, 0x374E, 0x15C1, 0x0541, 0x2405,
+ 0x17AD, 0x0471, 0x1472, 0x0DFE, 0x0711, 0x0FDD, 0x0DFF, 0x0432,
+ 0x1D82, 0x0423, 0x0F6D, 0x07AF, 0x0F5F, 0x04C1, 0x1542, 0x0561,
+ 0x0DCF, 0x1D03, 0x1493, 0x0422, 0x0445, 0x0D12, 0x0C25, 0x0415,
+ 0x0DA1, 0x1591, 0x0DEE, 0x05DE, 0x0C31, 0x0491, 0x0441, 0x0D21,
+ 0x078D, 0x057D, 0x0C61, 0x0F3F, 0x0581, 0x0D6E, 0x0501, 0x0CA1,
+ 0x04E1, 0x0DFD, 0x057F, 0x0502, 0x0511, 0x0C82, 0x0483, 0x0C03,
+ 0x079D, 0x0402, 0x0DDD, 0x0611, 0x05AE, 0x0DCE, 0x056F, 0x0421,
+ 0x057E, 0x071F, 0x0DBF, 0x05BE, 0x0412, 0x059F, 0x054E, 0x077E,
+ 0x0C26, 0x05ED, 0x073D, 0x0601, 0x0492, 0x0453, 0x075D, 0x058E,
+ 0x0F2D, 0x05CD, 0x0571, 0x053E, 0x0692, 0x05BD, 0x054F, 0x055E,
+ 0x0411, 0x0F1D, 0x074D, 0x059E, 0x05AF, 0x070D, 0x053F, 0x058F,
+ 0x0413, 0x070F, 0x055D, 0x070E, 0x078F, 0x052E, 0x072F, 0x055F,
+ 0x078E, 0x0F2E, 0x052F, 0x051F, 0x0417, 0x071E, 0x0781, 0x0622,
+ 0x0683, 0x0702, 0x073E, 0x076F, 0x079E, 0x07BF, 0x07CE
+};
+
+static const packed_percentile_table block_pcd_8x5 {
+ 8, 5,
+ { 226, 167 },
+ { 763, 517 },
+ { 0, 178 },
+ { percentile_arr_8x5_0, percentile_arr_8x5_1 }
+};
+#endif
+
+#if ASTCENC_BLOCK_MAX_TEXELS >= (8 * 6)
+static const uint16_t percentile_arr_8x6_0[273] {
+ 0x0154, 0xF944, 0xE066, 0xA128, 0x9963, 0x8118, 0x806F, 0x79F2,
+ 0x79E2, 0x7108, 0xD934, 0x6056, 0x69C3, 0x60F3, 0x5972, 0x59E3,
+ 0x5075, 0x91B3, 0xC9D2, 0x807E, 0x385F, 0x4153, 0x3943, 0x4162,
+ 0x3837, 0x3847, 0x7173, 0x31D3, 0x6948, 0x3046, 0x307F, 0x5827,
+ 0x3114, 0x32C1, 0x3076, 0x2A4D, 0x58E3, 0x306E, 0x2924, 0x2A61,
+ 0x29F1, 0x50D3, 0x704F, 0x210C, 0x2BA1, 0x2225, 0x2873, 0x4865,
+ 0x2206, 0x8341, 0x2006, 0x3B21, 0x18F2, 0x21C2, 0x1A1F, 0x23C1,
+ 0x3AE1, 0x1855, 0x19D1, 0x1A15, 0x3815, 0x1207, 0x1835, 0x2A2E,
+ 0x1A16, 0x1836, 0x2271, 0x2845, 0x1A2D, 0x11E1, 0x1816, 0x1171,
+ 0x2217, 0x1952, 0x12D1, 0x3904, 0x125D, 0x4BB1, 0x207D, 0x10E2,
+ 0x1026, 0x2025, 0x12F1, 0x28F1, 0x105D, 0x1235, 0x12A2, 0x1007,
+ 0x123D, 0x1A05, 0x1072, 0x1331, 0x101E, 0x0951, 0x10D2, 0x1057,
+ 0x1B92, 0x185E, 0x1251, 0x19A2, 0x186D, 0x0B81, 0x2BEE, 0x080E,
+ 0x1A33, 0x1942, 0x0B13, 0x0B51, 0x11A3, 0x0923, 0x2322, 0x09B1,
+ 0x184E, 0x1161, 0x18D1, 0x0933, 0x0B93, 0x4A62, 0x1017, 0x082F,
+ 0x0A42, 0x0B82, 0x0AA3, 0x0A41, 0x08C2, 0x08B3, 0x0A3E, 0x22B3,
+ 0x0871, 0x1BBF, 0x09C1, 0x0AC2, 0x09B2, 0x0BEF, 0x082E, 0x1062,
+ 0x0922, 0x08C3, 0x1063, 0x0A53, 0x0BDF, 0x080F, 0x0B42, 0x0A83,
+ 0x084D, 0x103F, 0x0931, 0x08E1, 0x0A0F, 0x1BA2, 0x09FF, 0x1332,
+ 0x03FF, 0x0941, 0x12C3, 0x0A63, 0x003D, 0x0842, 0x083E, 0x0B83,
+ 0x0BB2, 0x0A31, 0x0932, 0x1102, 0x0992, 0x0982, 0x1051, 0x08B1,
+ 0x0A2F, 0x121E, 0x02B1, 0x0A4E, 0x11EE, 0x00A2, 0x1022, 0x0043,
+ 0x0A52, 0x0A1D, 0x0226, 0x1193, 0x03DD, 0x08B2, 0x0BFD, 0x0A43,
+ 0x0A13, 0x0AB2, 0x01FD, 0x09FE, 0x020D, 0x081F, 0x0B33, 0x0053,
+ 0x0B91, 0x0293, 0x0B11, 0x0B7F, 0x0AA1, 0x0B03, 0x0A0E, 0x03FE,
+ 0x01CE, 0x0B6F, 0x0183, 0x0912, 0x023F, 0x0852, 0x0A21, 0x0323,
+ 0x03ED, 0x0A32, 0x13AF, 0x0272, 0x08A3, 0x0B12, 0x0083, 0x0832,
+ 0x13CD, 0x0223, 0x0A92, 0x0092, 0x0AD2, 0x0301, 0x0302, 0x0BDE,
+ 0x0A22, 0x01EF, 0x0B5F, 0x0103, 0x0BCF, 0x096F, 0x017E, 0x0113,
+ 0x01DE, 0x0823, 0x0282, 0x0B6E, 0x015F, 0x0813, 0x01AF, 0x01CF,
+ 0x0B7E, 0x0033, 0x01DF, 0x0BCE, 0x01BF, 0x036D, 0x0A03, 0x017F,
+ 0x03BE, 0x0201, 0x0893, 0x038E, 0x034D, 0x03AE, 0x0202, 0x039F,
+ 0x0291, 0x0A11, 0x032E, 0x033F, 0x034F, 0x0281, 0x037D, 0x03BD,
+ 0x0212, 0x033E, 0x035E, 0x034E, 0x035D, 0x03AD, 0x032F, 0x038F,
+ 0x039E
+};
+
+static const uint16_t percentile_arr_8x6_1[186] {
+ 0x0621, 0xFC33, 0x37AE, 0x1CC2, 0x2C43, 0xAD32, 0x34A3, 0x4551,
+ 0x6452, 0x5C62, 0x1FCD, 0x14F1, 0x4CB3, 0x24D2, 0x15DF, 0x0FBE,
+ 0x2603, 0x3DA2, 0x2E31, 0x25D1, 0x25EF, 0x0D22, 0x2E91, 0x1E82,
+ 0x0FBD, 0x1513, 0x0CB2, 0x0CD1, 0x0F4F, 0x1F7D, 0x1701, 0x0C16,
+ 0x2593, 0x2C42, 0x0C72, 0x14A2, 0x0F6E, 0x0C35, 0x0C71, 0x0D83,
+ 0x0C07, 0x1DFF, 0x043E, 0x1613, 0x07DD, 0x0FAD, 0x1451, 0x076D,
+ 0x0E81, 0x05FE, 0x0406, 0x0E0D, 0x045D, 0x2612, 0x0E02, 0x07AF,
+ 0x0DB1, 0x0F5E, 0x15C1, 0x0C23, 0x1523, 0x0C1F, 0x0D92, 0x04B1,
+ 0x0D31, 0x0432, 0x0D61, 0x0F4E, 0x0D41, 0x0DEE, 0x0D42, 0x04C1,
+ 0x0CE1, 0x079F, 0x0C2E, 0x0405, 0x0C22, 0x0461, 0x0E1D, 0x0582,
+ 0x073F, 0x0571, 0x0C4D, 0x0DFD, 0x05CE, 0x0C6D, 0x05DE, 0x0415,
+ 0x0C45, 0x075F, 0x0C41, 0x0D03, 0x05A1, 0x0711, 0x05CF, 0x0425,
+ 0x0C93, 0x0D21, 0x0591, 0x043D, 0x0D12, 0x0501, 0x040F, 0x0511,
+ 0x0431, 0x0C03, 0x04A1, 0x078D, 0x0581, 0x041E, 0x040D, 0x0C02,
+ 0x040E, 0x05DD, 0x057F, 0x079D, 0x042D, 0x0D9F, 0x0502, 0x056E,
+ 0x0412, 0x071F, 0x044E, 0x05BF, 0x0C1D, 0x0482, 0x05AE, 0x042F,
+ 0x057D, 0x0491, 0x054E, 0x047D, 0x0DBE, 0x0611, 0x0492, 0x0601,
+ 0x05BD, 0x05CD, 0x0426, 0x05ED, 0x072D, 0x073D, 0x0483, 0x0F5D,
+ 0x0421, 0x056F, 0x053F, 0x058E, 0x054F, 0x078F, 0x053E, 0x059E,
+ 0x057E, 0x051F, 0x055D, 0x0413, 0x070D, 0x05AF, 0x0411, 0x0453,
+ 0x0D5E, 0x077E, 0x052F, 0x070F, 0x074D, 0x0692, 0x070E, 0x072F,
+ 0x072E, 0x058F, 0x071D, 0x052E, 0x0417, 0x073E, 0x0781, 0x078E,
+ 0x055F, 0x060E, 0x0622, 0x0683, 0x0702, 0x071E, 0x076F, 0x079E,
+ 0x07BF, 0x07CE
+};
+
+static const packed_percentile_table block_pcd_8x6 {
+ 8, 6,
+ { 273, 186 },
+ { 880, 300 },
+ { 0, 64 },
+ { percentile_arr_8x6_0, percentile_arr_8x6_1 }
+};
+#endif
+
+#if ASTCENC_BLOCK_MAX_TEXELS >= (8 * 8)
+static const uint16_t percentile_arr_8x8_0[347] {
+ 0x0334, 0xFD44, 0xDD14, 0x9154, 0x9B08, 0x906A, 0x8928, 0x8108,
+ 0xE866, 0xC918, 0x606F, 0xC0FE, 0x5963, 0x58EE, 0x6534, 0x505A,
+ 0x51E2, 0xA8CF, 0x5354, 0x5314, 0x5134, 0x5524, 0x48F3, 0x504B,
+ 0x487E, 0x5344, 0x49C3, 0x4972, 0x49F2, 0x4856, 0xD0EF, 0x81D2,
+ 0x78DE, 0x4261, 0x3AC1, 0x71E3, 0x6879, 0x390C, 0x3143, 0x31B3,
+ 0x385F, 0x3153, 0x306E, 0x3037, 0x30DF, 0x3162, 0x304F, 0x3075,
+ 0xB03B, 0x2847, 0x28E3, 0x2914, 0x507F, 0x28BF, 0x5173, 0x5073,
+ 0x20D3, 0x2A06, 0x2827, 0x2508, 0x2229, 0x29D3, 0x204A, 0x207A,
+ 0x2046, 0x4148, 0x20FD, 0x4225, 0x23A1, 0x3944, 0x2065, 0x1924,
+ 0x2324, 0x1806, 0x19F1, 0x2215, 0x1876, 0x22AD, 0x502B, 0x1B04,
+ 0x18F2, 0x3A4D, 0x3216, 0x3504, 0x18DD, 0x1B21, 0x10CE, 0x1869,
+ 0x1B41, 0x1855, 0x1207, 0x1AE1, 0x2845, 0x19D1, 0x2A0A, 0x1A2D,
+ 0x2A1A, 0x11C2, 0x1A0B, 0x1217, 0x2816, 0x121B, 0x1271, 0x2AD1,
+ 0x1035, 0x1015, 0x287D, 0x12F1, 0x43C1, 0x1171, 0x1A05, 0x08E2,
+ 0x11E1, 0x3251, 0x2049, 0x20F1, 0x12CD, 0x0A39, 0x1219, 0x1059,
+ 0x1104, 0x1036, 0x1872, 0x3007, 0x08ED, 0x205E, 0x1026, 0x0952,
+ 0x1392, 0x1019, 0x0951, 0x100A, 0x13EE, 0x08D2, 0x1242, 0x0ABD,
+ 0x22A2, 0x0BDF, 0x2B81, 0x0A35, 0x13B1, 0x0839, 0x13BF, 0x0A33,
+ 0x1B31, 0x205D, 0x1241, 0x183A, 0x2025, 0x0B93, 0x0A3D, 0x1017,
+ 0x1313, 0x1253, 0x082A, 0x204E, 0x09A2, 0x080B, 0x0A1F, 0x125D,
+ 0x0A2E, 0x081A, 0x08D1, 0x082F, 0x086D, 0x1B82, 0x0A09, 0x0B22,
+ 0x1062, 0x11A3, 0x2161, 0x0923, 0x129F, 0x1A62, 0x0871, 0x0942,
+ 0x081B, 0x1133, 0x18AE, 0x0A9E, 0x0863, 0x09FF, 0x18C2, 0x0B51,
+ 0x08BD, 0x0AA3, 0x09B1, 0x1AC2, 0x08B3, 0x0829, 0x0BEF, 0x0B83,
+ 0x0AAE, 0x0A8D, 0x1857, 0x185B, 0x08AF, 0x103F, 0x08C3, 0x09B2,
+ 0x0A4E, 0x11C1, 0x0A31, 0x0B42, 0x0A83, 0x0BFF, 0x13DD, 0x00CD,
+ 0x0AB3, 0x0842, 0x08BE, 0x0922, 0x1A8E, 0x08E1, 0x002E, 0x0BA2,
+ 0x0A8F, 0x2263, 0x0252, 0x0B32, 0x0AC3, 0x0941, 0x0A43, 0x083D,
+ 0x083E, 0x0A3E, 0x084D, 0x1131, 0x136F, 0x0AB1, 0x0193, 0x0BFD,
+ 0x0391, 0x0851, 0x13AF, 0x0843, 0x0213, 0x1226, 0x0932, 0x03B2,
+ 0x0902, 0x0BCD, 0x0221, 0x089E, 0x00B1, 0x0BDE, 0x03FE, 0x02A1,
+ 0x0982, 0x009F, 0x080E, 0x0B5F, 0x02BE, 0x0A32, 0x0A2A, 0x01EE,
+ 0x0053, 0x0AB2, 0x0192, 0x09FD, 0x0052, 0x0B03, 0x0293, 0x00A2,
+ 0x0B7F, 0x0BED, 0x0311, 0x08B2, 0x0A72, 0x088E, 0x0333, 0x0B12,
+ 0x0A23, 0x0822, 0x0083, 0x11CE, 0x021D, 0x08A3, 0x088F, 0x029D,
+ 0x0A22, 0x0A3F, 0x01FE, 0x020F, 0x0983, 0x02D2, 0x0292, 0x0B23,
+ 0x001E, 0x0BCF, 0x03CE, 0x09AF, 0x0B02, 0x0301, 0x022F, 0x137E,
+ 0x021E, 0x09EF, 0x016F, 0x0112, 0x097E, 0x080F, 0x020D, 0x0092,
+ 0x01DE, 0x09DF, 0x0032, 0x0033, 0x0A82, 0x03BE, 0x0B6E, 0x001F,
+ 0x020E, 0x0023, 0x09CF, 0x0113, 0x0103, 0x0013, 0x0BAE, 0x0203,
+ 0x0BAD, 0x01BF, 0x034F, 0x095F, 0x036D, 0x0202, 0x017F, 0x0093,
+ 0x0201, 0x034D, 0x0212, 0x035D, 0x03BD, 0x0B3F, 0x035E, 0x0211,
+ 0x0281, 0x0291, 0x032E, 0x037D, 0x034E, 0x038E, 0x039F, 0x032F,
+ 0x033E, 0x038F, 0x039E
+};
+
+static const uint16_t percentile_arr_8x8_1[208] {
+ 0x0621, 0x3443, 0x47CD, 0x97AE, 0xFC62, 0x14F1, 0x24C2, 0x25DF,
+ 0x3C33, 0x1C52, 0x9C72, 0x0FBE, 0x0C5D, 0x343E, 0x24A3, 0x1551,
+ 0x5D32, 0x1CD2, 0x15EF, 0x4E31, 0x04DD, 0x1FDD, 0x174F, 0x0DD1,
+ 0x3E0D, 0x15FF, 0x0DA2, 0x1E03, 0x17BD, 0x177D, 0x14B3, 0x0471,
+ 0x0CAE, 0x1C1F, 0x04D1, 0x0F6E, 0x0DFE, 0x1C42, 0x0C16, 0x0D22,
+ 0x0C9F, 0x2C2E, 0x0FAD, 0x0571, 0x147D, 0x0C07, 0x04B2, 0x0F6D,
+ 0x0F5E, 0x07AF, 0x146D, 0x0C51, 0x0593, 0x2583, 0x0C4E, 0x040B,
+ 0x0C35, 0x0513, 0x0E91, 0x0406, 0x073F, 0x144D, 0x0561, 0x048F,
+ 0x0F01, 0x0F4E, 0x0CA2, 0x075F, 0x1682, 0x04E1, 0x0C1A, 0x04BD,
+ 0x0542, 0x0D41, 0x0DEE, 0x04CD, 0x0DCF, 0x04B1, 0x0C15, 0x0C3D,
+ 0x0423, 0x0592, 0x0DDE, 0x0422, 0x0432, 0x05FD, 0x0DC1, 0x05B1,
+ 0x0DCE, 0x0612, 0x0C2F, 0x0445, 0x0602, 0x0531, 0x0439, 0x0E81,
+ 0x0582, 0x0C61, 0x061D, 0x049E, 0x0405, 0x0409, 0x0DBE, 0x079F,
+ 0x0D21, 0x04C1, 0x0C0A, 0x0E13, 0x04AD, 0x040E, 0x0581, 0x0419,
+ 0x05DD, 0x0D03, 0x049D, 0x0449, 0x0429, 0x048E, 0x0DA1, 0x0425,
+ 0x0512, 0x0501, 0x0431, 0x0523, 0x0441, 0x042D, 0x040F, 0x0D7D,
+ 0x0511, 0x0502, 0x05BF, 0x04A1, 0x0C03, 0x0402, 0x079D, 0x05AE,
+ 0x075D, 0x057F, 0x041D, 0x048D, 0x042A, 0x0453, 0x05AF, 0x078D,
+ 0x0C0D, 0x073D, 0x0491, 0x0591, 0x05BD, 0x072D, 0x057E, 0x051F,
+ 0x0482, 0x0492, 0x041E, 0x0412, 0x0D9F, 0x0421, 0x0493, 0x0711,
+ 0x056E, 0x059E, 0x054E, 0x0611, 0x05ED, 0x074D, 0x070F, 0x056F,
+ 0x052F, 0x053F, 0x071F, 0x054F, 0x05CD, 0x0483, 0x055E, 0x072F,
+ 0x0E01, 0x0426, 0x058F, 0x0413, 0x078F, 0x071D, 0x055F, 0x058E,
+ 0x0411, 0x053E, 0x071E, 0x055D, 0x077E, 0x052E, 0x0692, 0x0417,
+ 0x070D, 0x078E, 0x070E, 0x072E, 0x041B, 0x060E, 0x0622, 0x0683,
+ 0x068D, 0x0702, 0x073E, 0x076F, 0x0781, 0x079E, 0x07BF, 0x07CE
+};
+
+static const packed_percentile_table block_pcd_8x8 {
+ 8, 8,
+ { 347, 208 },
+ { 1144, 267 },
+ { 0, 38 },
+ { percentile_arr_8x8_0, percentile_arr_8x8_1 }
+};
+#endif
+
+#if ASTCENC_BLOCK_MAX_TEXELS >= (10 * 5)
+static const uint16_t percentile_arr_10x5_0[274] {
+ 0x0165, 0xF975, 0xD866, 0xC056, 0xA946, 0x90C6, 0x90F5, 0x8963,
+ 0x80D6, 0x80E6, 0x60F3, 0x61C3, 0x59F2, 0xA927, 0x5075, 0x4847,
+ 0x5153, 0x4955, 0x49E2, 0x48B6, 0x41D2, 0x4943, 0x8305, 0x8172,
+ 0x4046, 0x4037, 0x40A7, 0x70B7, 0x7AC1, 0x31E3, 0x7027, 0x30E5,
+ 0x69D3, 0x99B3, 0x3315, 0x6115, 0x3136, 0x3076, 0x3173, 0x30D5,
+ 0x3106, 0x8962, 0x2916, 0x30C7, 0x5126, 0x30D3, 0x2956, 0x5117,
+ 0x2B41, 0x2AE1, 0x2A61, 0x29F1, 0x2306, 0x2145, 0x4A85, 0x2057,
+ 0x40E3, 0x4137, 0x3B21, 0x23C1, 0x2065, 0x1925, 0x51C2, 0x5225,
+ 0x4935, 0x1AD1, 0x23A1, 0x19D1, 0x1A71, 0x4055, 0x1873, 0x1A86,
+ 0x1295, 0x18F2, 0x28A6, 0x1952, 0x4AA5, 0x20B5, 0x10C5, 0x2AA2,
+ 0x11E1, 0x1107, 0x10D2, 0x2171, 0x1351, 0x3036, 0x1331, 0x1BEE,
+ 0x2035, 0x1045, 0x1313, 0x0A15, 0x1087, 0x1296, 0x13EF, 0x18E2,
+ 0x1151, 0x1086, 0x10F1, 0x08A5, 0x12C2, 0x1BFF, 0x1095, 0x1A62,
+ 0x1322, 0x0942, 0x1026, 0x1872, 0x1062, 0x0897, 0x1123, 0x08D1,
+ 0x1A06, 0x0806, 0x137F, 0x13B1, 0x13DF, 0x1A51, 0x09B1, 0x0A83,
+ 0x1015, 0x22F1, 0x0961, 0x0B81, 0x12B3, 0x0A35, 0x0AA3, 0x20B3,
+ 0x08C3, 0x2342, 0x0933, 0x0A33, 0x09A2, 0x10C2, 0x0896, 0x2205,
+ 0x0825, 0x20E1, 0x0922, 0x1242, 0x0B16, 0x0B32, 0x09A3, 0x0AC3,
+ 0x0BBF, 0x0B93, 0x0071, 0x0931, 0x0A41, 0x2392, 0x13FE, 0x09C1,
+ 0x0B07, 0x0016, 0x1182, 0x09B2, 0x0A26, 0x0132, 0x0941, 0x0A93,
+ 0x0992, 0x1063, 0x1217, 0x01FF, 0x11EE, 0x1216, 0x0B23, 0x0B82,
+ 0x0042, 0x1102, 0x0213, 0x0B6F, 0x09FE, 0x1207, 0x0807, 0x18B1,
+ 0x0253, 0x0AB1, 0x08A2, 0x13FD, 0x01FD, 0x1983, 0x0AB2, 0x0A31,
+ 0x016F, 0x0B11, 0x00B2, 0x0851, 0x0AD2, 0x0993, 0x0BDD, 0x12A1,
+ 0x017F, 0x0A97, 0x1022, 0x0383, 0x0843, 0x0A52, 0x03A2, 0x097E,
+ 0x0817, 0x03B2, 0x0A43, 0x09EF, 0x0A63, 0x0B33, 0x0B03, 0x0292,
+ 0x0272, 0x09CE, 0x0287, 0x136D, 0x0053, 0x0B12, 0x0083, 0x0892,
+ 0x0112, 0x1282, 0x03ED, 0x0852, 0x0301, 0x1391, 0x0232, 0x0B7E,
+ 0x0221, 0x08A3, 0x0BCD, 0x0BCF, 0x036E, 0x09DE, 0x0103, 0x03DE,
+ 0x0832, 0x0BAF, 0x0302, 0x13CE, 0x035F, 0x0093, 0x0A23, 0x01DF,
+ 0x0013, 0x0A22, 0x0023, 0x0113, 0x09AF, 0x01BF, 0x0033, 0x095F,
+ 0x0203, 0x0281, 0x09CF, 0x037D, 0x0201, 0x0B4D, 0x03AE, 0x03BE,
+ 0x0291, 0x035E, 0x038E, 0x0B9F, 0x03AD, 0x0202, 0x034F, 0x0211,
+ 0x035D, 0x0212, 0x032E, 0x039E, 0x033F, 0x034E, 0x03BD, 0x032F,
+ 0x033E, 0x038F
+};
+
+static const uint16_t percentile_arr_10x5_1[180] {
+ 0x0532, 0xFCA3, 0x3621, 0x6E82, 0x2CC2, 0x3D51, 0x3F01, 0x2691,
+ 0x17AE, 0x35A2, 0x74B3, 0x1603, 0x4433, 0x3C43, 0x6C35, 0x25D1,
+ 0x1D13, 0x15DF, 0x37CD, 0x0D93, 0x1D22, 0x0E81, 0x1452, 0x0CD2,
+ 0x37BE, 0x0CB2, 0x3407, 0x1523, 0x0C16, 0x0CB5, 0x0C96, 0x1486,
+ 0x2631, 0x1506, 0x0F4F, 0x1583, 0x0CD1, 0x2CA2, 0x2612, 0x1613,
+ 0x1602, 0x1F11, 0x179F, 0x17BD, 0x15B1, 0x0406, 0x1D41, 0x0CF1,
+ 0x0D31, 0x0442, 0x1C62, 0x0F6E, 0x077D, 0x0C51, 0x0445, 0x0D15,
+ 0x2592, 0x0CB1, 0x05EF, 0x0542, 0x17AF, 0x1425, 0x075E, 0x0FAD,
+ 0x0CC1, 0x0503, 0x0512, 0x15C1, 0x0C95, 0x0415, 0x0505, 0x0F4E,
+ 0x04A5, 0x0493, 0x0C32, 0x0F5F, 0x04E1, 0x0521, 0x0C85, 0x07DD,
+ 0x0582, 0x15FF, 0x05CF, 0x0405, 0x0D91, 0x05A1, 0x05FE, 0x0C23,
+ 0x0561, 0x0472, 0x0471, 0x0C22, 0x0DEE, 0x076D, 0x0502, 0x0426,
+ 0x0C61, 0x0D7D, 0x0525, 0x05DE, 0x0DCE, 0x079D, 0x0692, 0x0441,
+ 0x0C91, 0x05DD, 0x0511, 0x057F, 0x0611, 0x0DFD, 0x078D, 0x056E,
+ 0x0492, 0x04A1, 0x073F, 0x0C31, 0x05BE, 0x0483, 0x0571, 0x056F,
+ 0x0D9F, 0x0581, 0x0501, 0x057E, 0x05BF, 0x078F, 0x0516, 0x05ED,
+ 0x0402, 0x0F7E, 0x0482, 0x054E, 0x075D, 0x071F, 0x05CD, 0x0535,
+ 0x05AE, 0x0C11, 0x058F, 0x05AF, 0x0421, 0x0413, 0x0601, 0x054F,
+ 0x073D, 0x059E, 0x0487, 0x070F, 0x078E, 0x0781, 0x053E, 0x0403,
+ 0x072D, 0x055D, 0x05BD, 0x079E, 0x0D8E, 0x0412, 0x052E, 0x074D,
+ 0x053F, 0x051F, 0x070E, 0x055F, 0x072F, 0x052F, 0x070D, 0x055E,
+ 0x0417, 0x0453, 0x072E, 0x0622, 0x0683, 0x0702, 0x071D, 0x071E,
+ 0x073E, 0x076F, 0x07BF, 0x07CE
+};
+
+static const packed_percentile_table block_pcd_10x5 {
+ 10, 5,
+ { 274, 180 },
+ { 954, 324 },
+ { 0, 79 },
+ { percentile_arr_10x5_0, percentile_arr_10x5_1 }
+};
+#endif
+
+#if ASTCENC_BLOCK_MAX_TEXELS >= (10 * 6)
+static const uint16_t percentile_arr_10x6_0[325] {
+ 0x01A4, 0xF954, 0xA066, 0x9975, 0x80F5, 0x7056, 0x6918, 0x6963,
+ 0x58C6, 0x5946, 0x5928, 0x5174, 0x586F, 0xA0E6, 0x5108, 0x48D6,
+ 0x49E2, 0x40F3, 0x9172, 0x41F2, 0xB875, 0x3927, 0x39C3, 0xA953,
+ 0x3934, 0x3305, 0x30B6, 0x6943, 0x31D2, 0x3876, 0x3037, 0x2955,
+ 0x30A7, 0x32C1, 0x29B3, 0x3027, 0x287E, 0x30B7, 0x29E3, 0x5846,
+ 0x2B15, 0x2847, 0x3162, 0x5173, 0x4936, 0x285F, 0x48D3, 0x2164,
+ 0x4906, 0x20E5, 0x2915, 0x2116, 0x407F, 0x20D5, 0x2A61, 0x4117,
+ 0x20E3, 0x2126, 0x4148, 0x206E, 0x39D3, 0x2145, 0x41B4, 0x1B06,
+ 0x2114, 0x2165, 0x5321, 0x5A85, 0x1A4D, 0x1A1F, 0x19F1, 0x3341,
+ 0x184F, 0x1956, 0x3125, 0x30C7, 0x28F2, 0x1937, 0x1AE1, 0x1073,
+ 0x1BA1, 0x1935, 0x110C, 0x1BC1, 0x3A25, 0x19C2, 0x1295, 0x122E,
+ 0x1944, 0x11D1, 0x1124, 0x1857, 0x22D1, 0x2286, 0x1A2D, 0x12A2,
+ 0x2107, 0x1055, 0x2065, 0x0A71, 0x2152, 0x10C5, 0x10D2, 0x1331,
+ 0x08B5, 0x1171, 0x2836, 0x10A6, 0x0904, 0x123D, 0x20F1, 0x12A5,
+ 0x10E2, 0x107D, 0x1AF1, 0x1313, 0x0951, 0x11E1, 0x1B22, 0x1B51,
+ 0x0835, 0x101E, 0x0A5D, 0x0A15, 0x3045, 0x0A96, 0x08A5, 0x1142,
+ 0x12A3, 0x1872, 0x085D, 0x09B1, 0x100E, 0x0887, 0x0886, 0x086D,
+ 0x0933, 0x12B3, 0x0897, 0x08B3, 0x0A33, 0x0923, 0x1095, 0x0BEE,
+ 0x2BB1, 0x085E, 0x1283, 0x0A51, 0x1026, 0x0A06, 0x12C2, 0x08D1,
+ 0x11A2, 0x13BF, 0x08C3, 0x10C2, 0x0A3E, 0x0BDF, 0x0B81, 0x13EF,
+ 0x0A35, 0x0B16, 0x082F, 0x2161, 0x1B32, 0x0806, 0x084E, 0x11A3,
+ 0x1015, 0x1122, 0x2931, 0x0342, 0x0825, 0x0A0F, 0x0896, 0x0A05,
+ 0x0241, 0x09C1, 0x083F, 0x0A42, 0x0071, 0x0B07, 0x082E, 0x0393,
+ 0x12B1, 0x0A62, 0x0226, 0x0A2F, 0x0B92, 0x0063, 0x0932, 0x0862,
+ 0x09FF, 0x0A31, 0x00E1, 0x12B2, 0x09B2, 0x0AC3, 0x0941, 0x0293,
+ 0x1323, 0x104D, 0x003E, 0x083D, 0x0992, 0x1382, 0x03FF, 0x0A13,
+ 0x1016, 0x0A53, 0x0182, 0x1007, 0x0AA1, 0x080F, 0x0A16, 0x0A1E,
+ 0x0042, 0x0902, 0x13DD, 0x0BB2, 0x0A63, 0x00A2, 0x08B1, 0x03FE,
+ 0x1207, 0x08B2, 0x0B83, 0x09EE, 0x0311, 0x0A87, 0x0BAF, 0x03A2,
+ 0x09FD, 0x0051, 0x0B33, 0x020D, 0x09CE, 0x0217, 0x021D, 0x0817,
+ 0x020E, 0x0A4E, 0x001F, 0x0BFD, 0x0297, 0x0983, 0x0A92, 0x0252,
+ 0x0243, 0x0B03, 0x0193, 0x036F, 0x0B12, 0x0043, 0x0822, 0x0A21,
+ 0x01FE, 0x0853, 0x037F, 0x023F, 0x0BED, 0x02D2, 0x0B91, 0x0232,
+ 0x0282, 0x0912, 0x08A3, 0x0852, 0x0223, 0x0BCD, 0x0083, 0x0301,
+ 0x0832, 0x01EF, 0x0892, 0x0302, 0x0A72, 0x03DE, 0x0893, 0x0BCF,
+ 0x09DE, 0x03CE, 0x035F, 0x0833, 0x0023, 0x0103, 0x017E, 0x0813,
+ 0x01CF, 0x01BF, 0x016F, 0x0A22, 0x037E, 0x0113, 0x01AF, 0x0B6E,
+ 0x03BE, 0x0201, 0x0A03, 0x01DF, 0x036D, 0x03AE, 0x015F, 0x0281,
+ 0x033E, 0x0A02, 0x038E, 0x017F, 0x0291, 0x034D, 0x03BD, 0x0B7D,
+ 0x03AD, 0x0211, 0x0212, 0x034F, 0x032E, 0x039F, 0x034E, 0x035D,
+ 0x035E, 0x033F, 0x039E, 0x032F, 0x038F
+};
+
+static const uint16_t percentile_arr_10x6_1[199] {
+ 0x0621, 0xBD32, 0x5CA3, 0x1FAE, 0x64C2, 0x1D51, 0x6C33, 0xFC43,
+ 0x5CB3, 0x25A2, 0x2E82, 0x35D1, 0x4F01, 0x3FBE, 0x3691, 0x2DDF,
+ 0x2E03, 0x3FCD, 0x14D2, 0x1CF1, 0x0C52, 0x3C35, 0x2D22, 0x1513,
+ 0x1462, 0x54B2, 0x0E31, 0x4E81, 0x1593, 0x1D23, 0x1CD1, 0x14B5,
+ 0x2FBD, 0x0C07, 0x1D06, 0x0DEF, 0x14A2, 0x1612, 0x1F4F, 0x0C16,
+ 0x1F7D, 0x0C96, 0x0486, 0x1F9F, 0x0D42, 0x4583, 0x0E02, 0x0472,
+ 0x0DB1, 0x1613, 0x0FAD, 0x0D41, 0x0F11, 0x0E0D, 0x1C42, 0x143E,
+ 0x076E, 0x04B1, 0x0FAF, 0x0D61, 0x0531, 0x0C71, 0x0DFF, 0x0DFE,
+ 0x0406, 0x0C45, 0x0451, 0x0D15, 0x05C1, 0x2CC1, 0x141F, 0x0CE1,
+ 0x0FDD, 0x0C22, 0x0582, 0x0D92, 0x0571, 0x0F6D, 0x0C93, 0x045D,
+ 0x0F5E, 0x044D, 0x0423, 0x0D05, 0x0425, 0x0C95, 0x04A5, 0x0DCE,
+ 0x075F, 0x0E1D, 0x0503, 0x042E, 0x0D91, 0x0512, 0x0DDE, 0x05A1,
+ 0x074E, 0x0C32, 0x0431, 0x0415, 0x0D21, 0x05EE, 0x040E, 0x0DDD,
+ 0x0485, 0x1525, 0x0491, 0x0C26, 0x046D, 0x0C05, 0x05CF, 0x05FD,
+ 0x0E92, 0x073F, 0x0C0D, 0x043D, 0x0502, 0x0C1E, 0x041D, 0x0461,
+ 0x04A1, 0x0511, 0x0581, 0x05BD, 0x0C41, 0x059F, 0x05BF, 0x040F,
+ 0x0C7D, 0x0402, 0x054E, 0x057D, 0x0403, 0x078D, 0x05AE, 0x042D,
+ 0x0483, 0x079D, 0x0D7F, 0x0482, 0x0611, 0x056E, 0x0516, 0x05BE,
+ 0x0535, 0x044E, 0x05AF, 0x0DED, 0x042F, 0x0492, 0x058E, 0x078F,
+ 0x0412, 0x057E, 0x053E, 0x0F1F, 0x073D, 0x0601, 0x0501, 0x075D,
+ 0x059E, 0x05CD, 0x053F, 0x054F, 0x055E, 0x055D, 0x0421, 0x074D,
+ 0x051F, 0x072F, 0x0781, 0x0411, 0x0D6F, 0x077E, 0x0487, 0x070E,
+ 0x070F, 0x072D, 0x058F, 0x078E, 0x079E, 0x052E, 0x0413, 0x072E,
+ 0x071D, 0x052F, 0x055F, 0x073E, 0x0417, 0x0453, 0x060E, 0x0622,
+ 0x0683, 0x0702, 0x070D, 0x071E, 0x076F, 0x07BF, 0x07CE
+};
+
+static const packed_percentile_table block_pcd_10x6 {
+ 10, 6,
+ { 325, 199 },
+ { 922, 381 },
+ { 0, 78 },
+ { percentile_arr_10x6_0, percentile_arr_10x6_1 }
+};
+#endif
+
+#if ASTCENC_BLOCK_MAX_TEXELS >= (10 * 8)
+static const uint16_t percentile_arr_10x8_0[400] {
+ 0x0154, 0xAB34, 0xAD44, 0x8308, 0x7866, 0x7B64, 0x79A4, 0x7975,
+ 0x686A, 0x6908, 0xC514, 0x6174, 0x6128, 0x6118, 0x5B54, 0x5163,
+ 0xF856, 0x50F5, 0x986F, 0xDD34, 0x48FE, 0x4972, 0x48E6, 0x4146,
+ 0x48EE, 0x40F3, 0x4AC1, 0x38C6, 0x41E2, 0xBB05, 0x707E, 0x38D6,
+ 0x3927, 0x6B14, 0x384B, 0x3948, 0x3153, 0x385A, 0x3134, 0x6B15,
+ 0x39F2, 0x30CF, 0x3143, 0x91D2, 0x31C3, 0x60EF, 0x5973, 0x3076,
+ 0x28D3, 0x3261, 0x2875, 0x28DE, 0x290C, 0x51E3, 0x28A7, 0x20E3,
+ 0x2962, 0x2B06, 0x2917, 0x483B, 0x20B6, 0x2D24, 0x206E, 0x285F,
+ 0x20B7, 0x2936, 0x4047, 0x2037, 0x20DF, 0x28BF, 0x21B4, 0x21B3,
+ 0x1D08, 0x2027, 0x404F, 0x3846, 0x2116, 0x187F, 0x1879, 0x2285,
+ 0x1A29, 0x3915, 0x4873, 0x1955, 0x3114, 0x1B44, 0x2165, 0x107A,
+ 0x1956, 0x6137, 0x1106, 0x3145, 0x1B21, 0x19D3, 0x12AD, 0x1B41,
+ 0x1AD1, 0x1126, 0x18F2, 0x282B, 0x40E5, 0x20D5, 0x2A0A, 0x284A,
+ 0x1286, 0x1295, 0x121A, 0x2A0B, 0x321B, 0x122D, 0x10FD, 0x13A1,
+ 0x32A2, 0x12E1, 0x1164, 0x13C1, 0x124D, 0x1239, 0x4504, 0x10C7,
+ 0x22F1, 0x11F1, 0x0AC2, 0x2125, 0x1225, 0x0B04, 0x1107, 0x1069,
+ 0x1A19, 0x13BF, 0x2A96, 0x08D2, 0x1271, 0x0952, 0x2BDF, 0x0B31,
+ 0x1251, 0x2124, 0x0B13, 0x12BD, 0x1233, 0x13EE, 0x2144, 0x0B16,
+ 0x0A15, 0x18E2, 0x08DD, 0x1097, 0x0857, 0x0B24, 0x0AA5, 0x12A3,
+ 0x11C2, 0x11D1, 0x10CE, 0x0865, 0x123D, 0x08B3, 0x0B51, 0x1971,
+ 0x0A41, 0x0A06, 0x1039, 0x080A, 0x0B22, 0x0923, 0x0836, 0x08C3,
+ 0x0A1F, 0x1072, 0x080B, 0x0935, 0x0855, 0x18A6, 0x0A42, 0x1133,
+ 0x0A83, 0x0A09, 0x0ACD, 0x0A2E, 0x0887, 0x083A, 0x10C5, 0x085E,
+ 0x13B1, 0x087D, 0x0819, 0x0A9F, 0x0049, 0x08F1, 0x0BEF, 0x1161,
+ 0x0B42, 0x09E1, 0x0A05, 0x0904, 0x12AE, 0x029E, 0x0A31, 0x09FF,
+ 0x0951, 0x0859, 0x001A, 0x082F, 0x0B81, 0x08B5, 0x0A35, 0x082A,
+ 0x08ED, 0x1142, 0x1262, 0x0B32, 0x08A5, 0x12D2, 0x03DD, 0x0B07,
+ 0x18AE, 0x083F, 0x00AF, 0x0AB3, 0x086D, 0x0287, 0x0A93, 0x025D,
+ 0x0816, 0x13FF, 0x0A8D, 0x005D, 0x08D1, 0x0392, 0x0845, 0x0AC3,
+ 0x08C2, 0x01A3, 0x0AB1, 0x09A2, 0x005B, 0x0B93, 0x02B2, 0x1086,
+ 0x001B, 0x0863, 0x0216, 0x0AA1, 0x0896, 0x0A8F, 0x084E, 0x0A8E,
+ 0x0A53, 0x0026, 0x0A26, 0x0382, 0x0807, 0x0862, 0x0029, 0x0871,
+ 0x00BD, 0x0835, 0x024E, 0x0806, 0x0941, 0x0895, 0x03AF, 0x0A13,
+ 0x0932, 0x03ED, 0x0BFD, 0x0207, 0x0B83, 0x0993, 0x09B1, 0x03CD,
+ 0x0A3E, 0x03FE, 0x0A21, 0x0015, 0x0B11, 0x0A43, 0x00E1, 0x136F,
+ 0x00BE, 0x00A2, 0x0842, 0x0043, 0x0825, 0x082E, 0x0A2A, 0x03DE,
+ 0x0BA2, 0x0122, 0x0BCF, 0x004D, 0x0323, 0x09C1, 0x0292, 0x083E,
+ 0x0252, 0x0017, 0x0A72, 0x00CD, 0x0182, 0x0A63, 0x0131, 0x09B2,
+ 0x0303, 0x0902, 0x0053, 0x035F, 0x0A32, 0x003D, 0x0992, 0x0A2F,
+ 0x03B2, 0x0ABE, 0x009F, 0x0183, 0x0312, 0x08B1, 0x0B02, 0x0A17,
+ 0x0B7F, 0x0333, 0x0297, 0x0A23, 0x020F, 0x0282, 0x0851, 0x0822,
+ 0x03CE, 0x01EE, 0x000E, 0x08B2, 0x0083, 0x0A1D, 0x00A3, 0x0222,
+ 0x088F, 0x0112, 0x029D, 0x0092, 0x0A3F, 0x0391, 0x089E, 0x0301,
+ 0x01FD, 0x09BF, 0x01CE, 0x0852, 0x01FE, 0x0013, 0x0903, 0x088E,
+ 0x037E, 0x021E, 0x01EF, 0x095F, 0x016F, 0x09DE, 0x03BE, 0x020E,
+ 0x0113, 0x01DF, 0x080F, 0x020D, 0x0833, 0x03AE, 0x0032, 0x03BD,
+ 0x0823, 0x001E, 0x01AF, 0x0203, 0x034F, 0x0093, 0x0A81, 0x036E,
+ 0x0291, 0x038E, 0x0A01, 0x001F, 0x017F, 0x01CF, 0x017E, 0x0202,
+ 0x0BAD, 0x0211, 0x035D, 0x035E, 0x039F, 0x0212, 0x032E, 0x033F,
+ 0x034D, 0x034E, 0x036D, 0x032F, 0x033E, 0x037D, 0x038F, 0x039E
+};
+
+static const uint16_t percentile_arr_10x8_1[221] {
+ 0x0621, 0xDFAE, 0x2443, 0x54C2, 0x37CD, 0x1CF1, 0xFCA3, 0x14D2,
+ 0x2D32, 0x5551, 0x7DDF, 0x5C33, 0x15D1, 0x3462, 0x24B3, 0x7452,
+ 0x5FBE, 0x6472, 0x65A2, 0x1D06, 0x445D, 0x15EF, 0x0E31, 0x1D71,
+ 0x343E, 0x0D42, 0x0CDD, 0x1F01, 0x4691, 0x1435, 0x0E82, 0x0DFF,
+ 0x17DD, 0x0D22, 0x24B2, 0x1603, 0x04B5, 0x24AE, 0x060D, 0x2D13,
+ 0x0C7D, 0x0496, 0x17BD, 0x1F4F, 0x1F7D, 0x1486, 0x0593, 0x1C16,
+ 0x0C07, 0x15FE, 0x041F, 0x14D1, 0x0C9F, 0x0E81, 0x0D15, 0x27AF,
+ 0x0C2E, 0x0D23, 0x176E, 0x0FAD, 0x1C06, 0x1561, 0x0DB1, 0x040B,
+ 0x1C4E, 0x0D83, 0x1711, 0x0C42, 0x0C71, 0x1C1A, 0x0D25, 0x04A2,
+ 0x0C45, 0x076D, 0x0F9F, 0x075F, 0x0E12, 0x046D, 0x048F, 0x1D92,
+ 0x0602, 0x0C39, 0x174E, 0x0C51, 0x0CA1, 0x075E, 0x05C1, 0x14BD,
+ 0x0D31, 0x0423, 0x0F3F, 0x0495, 0x0C93, 0x049E, 0x0D05, 0x04E1,
+ 0x0DEE, 0x0415, 0x04B1, 0x0503, 0x0CCD, 0x042F, 0x0DCF, 0x044D,
+ 0x0541, 0x1582, 0x05DE, 0x0D01, 0x0487, 0x040A, 0x0516, 0x0CA5,
+ 0x05FD, 0x05BF, 0x057D, 0x0DA1, 0x0426, 0x040F, 0x071F, 0x0613,
+ 0x0432, 0x0D12, 0x043D, 0x0425, 0x0461, 0x061D, 0x0D21, 0x0591,
+ 0x079D, 0x048D, 0x0429, 0x0C49, 0x04C1, 0x042A, 0x040E, 0x0485,
+ 0x0511, 0x0405, 0x0502, 0x0441, 0x0C19, 0x0692, 0x0535, 0x058F,
+ 0x041D, 0x059F, 0x072D, 0x04AD, 0x049D, 0x05CE, 0x048E, 0x0C31,
+ 0x057F, 0x078D, 0x0409, 0x041E, 0x05AE, 0x0611, 0x058E, 0x05DD,
+ 0x05CD, 0x056E, 0x0483, 0x073D, 0x054E, 0x0D9E, 0x0402, 0x0491,
+ 0x040D, 0x056F, 0x042D, 0x0581, 0x0421, 0x057E, 0x0781, 0x053E,
+ 0x0482, 0x078F, 0x0413, 0x052E, 0x0601, 0x0422, 0x0492, 0x055E,
+ 0x05BE, 0x0F9E, 0x072F, 0x074D, 0x0412, 0x070F, 0x075D, 0x05BD,
+ 0x051F, 0x071D, 0x073E, 0x077E, 0x0403, 0x0411, 0x078E, 0x055D,
+ 0x05AF, 0x05ED, 0x052F, 0x053F, 0x070D, 0x070E, 0x072E, 0x054F,
+ 0x0417, 0x041B, 0x0453, 0x055F, 0x060E, 0x0622, 0x0683, 0x068D,
+ 0x0702, 0x071E, 0x076F, 0x07BF, 0x07CE
+};
+
+static const packed_percentile_table block_pcd_10x8 =
+{
+ 10, 8,
+ { 400, 221 },
+ { 1119, 376 },
+ { 0, 52 },
+ { percentile_arr_10x8_0, percentile_arr_10x8_1 }
+};
+#endif
+
+#if ASTCENC_BLOCK_MAX_TEXELS >= (10 * 10)
+static const uint16_t percentile_arr_10x10_0[453] {
+ 0x0334, 0x9514, 0x8954, 0x806A, 0x6F14, 0x6724, 0x6108, 0x6364,
+ 0x5175, 0x5D44, 0x5866, 0x5118, 0x5308, 0xA179, 0x5128, 0xF534,
+ 0x49A4, 0x5354, 0x9174, 0x486F, 0x48EA, 0x40F3, 0x4963, 0x414A,
+ 0xF8F9, 0x3984, 0x4172, 0x387E, 0x405A, 0x38DA, 0x38F5, 0x9B05,
+ 0x30EE, 0x32C1, 0x3261, 0x3D08, 0x31E2, 0x3056, 0x292B, 0x3146,
+ 0x3127, 0x3315, 0x58CA, 0x58E6, 0x290C, 0x3314, 0x8134, 0x28E3,
+ 0x28FE, 0x2948, 0x28C6, 0x78DE, 0x28BB, 0x68D6, 0x286E, 0x2173,
+ 0x2962, 0x21D2, 0x205F, 0x49F2, 0x2917, 0x2306, 0x207F, 0x404F,
+ 0x2153, 0x2943, 0x20CF, 0x21C3, 0x2073, 0x20D3, 0x2136, 0x183B,
+ 0x430A, 0x40A7, 0x18B6, 0x2079, 0x2309, 0x2075, 0x184B, 0x20EF,
+ 0x187A, 0x7837, 0x1B19, 0x20AB, 0x18BA, 0x20B7, 0x1994, 0x19E3,
+ 0x21B4, 0x49B3, 0x38BF, 0x193B, 0x1876, 0x182B, 0x30F2, 0x193A,
+ 0x1827, 0x1965, 0x1914, 0x184A, 0x4047, 0x1916, 0x1285, 0x1937,
+ 0x122D, 0x1915, 0x1321, 0x1955, 0x1046, 0x191B, 0x2106, 0x2919,
+ 0x1344, 0x1524, 0x12E1, 0x3926, 0x10E5, 0x2295, 0x1159, 0x1145,
+ 0x10DF, 0x124D, 0x1271, 0x092A, 0x2169, 0x1704, 0x22A2, 0x1164,
+ 0x13EE, 0x12F1, 0x0AD1, 0x128A, 0x110A, 0x11D3, 0x1286, 0x115A,
+ 0x2BA1, 0x0BBF, 0x3956, 0x2A89, 0x12AD, 0x10E9, 0x0B41, 0x1A29,
+ 0x2225, 0x08FD, 0x1107, 0x08D5, 0x191A, 0x1125, 0x1A96, 0x0B04,
+ 0x18D9, 0x2B16, 0x11F1, 0x0A33, 0x0924, 0x131A, 0x1149, 0x1324,
+ 0x0BEF, 0x0A99, 0x08CB, 0x123D, 0x1331, 0x0BDF, 0x0872, 0x22A3,
+ 0x0AC2, 0x1144, 0x0D04, 0x08D2, 0x08CE, 0x0AA9, 0x0A9A, 0x0B13,
+ 0x1251, 0x0865, 0x1069, 0x0897, 0x1215, 0x18B3, 0x1A62, 0x08C7,
+ 0x185E, 0x10E2, 0x0AA5, 0x21FF, 0x090B, 0x0952, 0x09E1, 0x0A42,
+ 0x08F1, 0x0A06, 0x0B22, 0x087D, 0x1139, 0x021F, 0x122E, 0x082F,
+ 0x09C2, 0x0887, 0x0A0A, 0x03C1, 0x0929, 0x0A5D, 0x0A83, 0x0BFF,
+ 0x0935, 0x085B, 0x0104, 0x08DD, 0x0923, 0x083F, 0x0241, 0x09D1,
+ 0x0A39, 0x0863, 0x0A8B, 0x08A6, 0x008B, 0x1133, 0x13B1, 0x089B,
+ 0x0AB3, 0x0036, 0x0BDD, 0x08ED, 0x0857, 0x0971, 0x0219, 0x1235,
+ 0x0AB1, 0x0ACD, 0x036F, 0x0A31, 0x08AA, 0x003A, 0x08C3, 0x0A05,
+ 0x02BD, 0x0B92, 0x0B07, 0x12B2, 0x08C5, 0x0B51, 0x0381, 0x0A8D,
+ 0x01A3, 0x0896, 0x0855, 0x0BFD, 0x005D, 0x0BFE, 0x023E, 0x08AF,
+ 0x00B9, 0x0A93, 0x00B5, 0x0862, 0x0A0B, 0x0A09, 0x0A72, 0x0332,
+ 0x0AA1, 0x08C9, 0x024E, 0x1382, 0x0951, 0x00A5, 0x0A2A, 0x0059,
+ 0x0A9E, 0x0B42, 0x004E, 0x0942, 0x03ED, 0x09B2, 0x02D2, 0x0849,
+ 0x0035, 0x0216, 0x0961, 0x0BAF, 0x00AE, 0x0826, 0x0287, 0x0A1A,
+ 0x0393, 0x0221, 0x09A2, 0x086D, 0x0226, 0x0871, 0x0039, 0x082A,
+ 0x08C2, 0x08E1, 0x0845, 0x0207, 0x0B23, 0x0015, 0x00D1, 0x0B83,
+ 0x037F, 0x0252, 0x08A9, 0x0099, 0x0A13, 0x0053, 0x0807, 0x03CD,
+ 0x0BDE, 0x0016, 0x089A, 0x0232, 0x035F, 0x0A8E, 0x0AC3, 0x022F,
+ 0x0263, 0x0829, 0x004D, 0x0132, 0x0806, 0x0311, 0x01B1, 0x0941,
+ 0x0086, 0x000B, 0x1122, 0x0025, 0x0842, 0x00BD, 0x0BCF, 0x03A2,
+ 0x0043, 0x0B03, 0x0895, 0x0A8F, 0x008A, 0x09EF, 0x0253, 0x0A1B,
+ 0x0182, 0x0243, 0x0A92, 0x00CD, 0x083E, 0x030B, 0x0223, 0x081A,
+ 0x0A9F, 0x0193, 0x00BE, 0x0017, 0x0931, 0x0391, 0x037E, 0x09C1,
+ 0x0312, 0x0333, 0x03B2, 0x083D, 0x08B1, 0x00B2, 0x002E, 0x021D,
+ 0x0A9D, 0x0192, 0x02AE, 0x0102, 0x0022, 0x081B, 0x0222, 0x009E,
+ 0x021E, 0x000A, 0x089F, 0x0217, 0x0BCE, 0x0052, 0x020F, 0x0A97,
+ 0x0282, 0x008E, 0x0A3F, 0x01FD, 0x00A3, 0x0019, 0x08A2, 0x0301,
+ 0x036E, 0x01FE, 0x03BE, 0x0ABE, 0x01CE, 0x0302, 0x029B, 0x0051,
+ 0x0883, 0x008F, 0x0BAE, 0x01DF, 0x0183, 0x0912, 0x000E, 0x020D,
+ 0x01EE, 0x0B4F, 0x0033, 0x0103, 0x020E, 0x0832, 0x01AF, 0x0913,
+ 0x01DE, 0x0203, 0x001E, 0x0092, 0x0093, 0x000F, 0x015F, 0x0291,
+ 0x0281, 0x0813, 0x001F, 0x01CF, 0x033F, 0x0023, 0x01BF, 0x0202,
+ 0x016F, 0x017E, 0x03AD, 0x0201, 0x034E, 0x0BBD, 0x036D, 0x017F,
+ 0x0211, 0x038E, 0x0212, 0x032E, 0x034D, 0x035E, 0x037D, 0x039E,
+ 0x032F, 0x033E, 0x035D, 0x038F, 0x039F
+};
+
+static const uint16_t percentile_arr_10x10_1[234] {
+ 0x07CD, 0x6E21, 0x24F1, 0x8443, 0xD7AE, 0x24C2, 0x1C62, 0xCCA3,
+ 0x1C33, 0xFDEF, 0x2532, 0x55DF, 0x1472, 0x6C3E, 0x14D2, 0x34DD,
+ 0x1452, 0x745D, 0x4D51, 0x8DD1, 0x247D, 0x75FF, 0x0CB3, 0x17BE,
+ 0x6CAE, 0x17DD, 0x1571, 0x3D06, 0x4E31, 0x0DA2, 0x67BD, 0x160D,
+ 0x2C4E, 0x0D22, 0x176E, 0x3CB2, 0x142E, 0x4DFE, 0x0F4F, 0x1435,
+ 0x0F01, 0x0D42, 0x0F7D, 0x0CB5, 0x1E03, 0x149F, 0x1C96, 0x141F,
+ 0x14B9, 0x0FAF, 0x0439, 0x0E91, 0x2682, 0x1D13, 0x1FAD, 0x0407,
+ 0x3471, 0x0C86, 0x0F6D, 0x0D15, 0x0D61, 0x040B, 0x0C6D, 0x0C16,
+ 0x0C9A, 0x0D0A, 0x0593, 0x0CD1, 0x248F, 0x0C2F, 0x3C42, 0x1523,
+ 0x0445, 0x0E81, 0x0CA2, 0x1525, 0x0406, 0x1C8A, 0x0C1A, 0x04BD,
+ 0x0F5E, 0x0F3F, 0x1F4E, 0x0E1D, 0x0423, 0x0DCF, 0x044D, 0x0D92,
+ 0x0583, 0x0DB1, 0x1449, 0x15EE, 0x0F5F, 0x079F, 0x0D19, 0x0409,
+ 0x04CD, 0x05FD, 0x143D, 0x0612, 0x0D03, 0x0D82, 0x04B1, 0x0C95,
+ 0x0C2A, 0x049E, 0x05AF, 0x0D31, 0x05BE, 0x04E1, 0x0D05, 0x0516,
+ 0x0711, 0x05C1, 0x0509, 0x0D41, 0x0493, 0x048E, 0x0602, 0x05BF,
+ 0x0CA5, 0x0529, 0x0535, 0x0D12, 0x0539, 0x0451, 0x0C29, 0x071F,
+ 0x040A, 0x0F3D, 0x0432, 0x059F, 0x0425, 0x0C99, 0x05DE, 0x05CE,
+ 0x0C0F, 0x0489, 0x051A, 0x0501, 0x0415, 0x057F, 0x0431, 0x0E13,
+ 0x040D, 0x041D, 0x075D, 0x0C53, 0x0502, 0x04C1, 0x049D, 0x0426,
+ 0x040E, 0x05A1, 0x055F, 0x0781, 0x0591, 0x04A9, 0x048B, 0x0D8E,
+ 0x052E, 0x0412, 0x0521, 0x0405, 0x04AD, 0x074D, 0x0611, 0x077E,
+ 0x078F, 0x078D, 0x048D, 0x041E, 0x0487, 0x0461, 0x0C85, 0x05ED,
+ 0x0402, 0x0483, 0x0419, 0x0511, 0x0491, 0x0482, 0x059E, 0x068D,
+ 0x055D, 0x072E, 0x05DD, 0x054E, 0x0441, 0x0422, 0x052F, 0x057D,
+ 0x072D, 0x079D, 0x0CA1, 0x072F, 0x079E, 0x0581, 0x042D, 0x055E,
+ 0x0601, 0x0413, 0x0692, 0x0403, 0x051F, 0x053F, 0x054F, 0x05CD,
+ 0x070F, 0x071D, 0x05AE, 0x05BD, 0x0492, 0x056E, 0x0411, 0x0417,
+ 0x041B, 0x0421, 0x053E, 0x056F, 0x057E, 0x058F, 0x060E, 0x0622,
+ 0x0683, 0x0702, 0x070D, 0x070E, 0x071E, 0x073E, 0x076F, 0x078E,
+ 0x07BF, 0x07CE
+};
+
+static const packed_percentile_table block_pcd_10x10 {
+ 10, 10,
+ { 453, 234 },
+ { 1095, 472 },
+ { 0, 70 },
+ { percentile_arr_10x10_0, percentile_arr_10x10_1 }
+};
+#endif
+
+#if ASTCENC_BLOCK_MAX_TEXELS >= (12 * 10)
+static const uint16_t percentile_arr_12x10_0[491] {
+ 0x0334, 0x9954, 0x8514, 0x7128, 0x6364, 0xC174, 0x5D34, 0x5866,
+ 0x5975, 0x5354, 0xAF14, 0x506A, 0x5108, 0x5724, 0x5308, 0x4544,
+ 0x4918, 0x4064, 0x49E2, 0x4179, 0x8163, 0x4054, 0xF81C, 0x394A,
+ 0x38F3, 0x4172, 0x38F5, 0xA06F, 0x68EA, 0x69F2, 0x3134, 0x31A4,
+ 0x305A, 0x68DA, 0x3056, 0x3146, 0x31F5, 0x3148, 0x5A61, 0x32C1,
+ 0x31D2, 0x307E, 0x29E3, 0x30E6, 0x59C3, 0x2984, 0x29B6, 0x28F9,
+ 0x5204, 0x28EE, 0x50CA, 0x2997, 0x48C6, 0x4838, 0x2953, 0x200C,
+ 0x2943, 0x2173, 0x2D08, 0x4162, 0x29B4, 0x2314, 0x21B3, 0x212B,
+ 0x210C, 0x48E3, 0x60DE, 0x205F, 0x20FE, 0x2028, 0x21A6, 0x404F,
+ 0x20D6, 0x2214, 0x2127, 0x1873, 0x40CF, 0x206E, 0x1B09, 0x21C6,
+ 0x2075, 0x19D5, 0x2305, 0x18D3, 0x2076, 0x1804, 0x230A, 0x304B,
+ 0x20BB, 0x18B6, 0x1936, 0x1B19, 0x3037, 0x187F, 0x18A7, 0x1B85,
+ 0x30BA, 0x183B, 0x1027, 0x18EF, 0x1B21, 0x1879, 0x10AB, 0x1917,
+ 0x1114, 0x18BF, 0x1074, 0x1994, 0x2847, 0x111B, 0x28F2, 0x11E5,
+ 0x19A7, 0x113A, 0x1046, 0x28B7, 0x207A, 0x182B, 0x1155, 0x104A,
+ 0x1344, 0x293B, 0x11D3, 0x2014, 0x1044, 0x1018, 0x13A1, 0x1315,
+ 0x2524, 0x20DF, 0x10E5, 0x1126, 0x12A2, 0x1824, 0x2271, 0x11F1,
+ 0x2964, 0x12D1, 0x115A, 0x092A, 0x2341, 0x1A2D, 0x12E1, 0x090A,
+ 0x13BF, 0x0A4D, 0x2119, 0x0BC1, 0x1233, 0x1A8A, 0x2008, 0x1159,
+ 0x1A89, 0x08D5, 0x1156, 0x0834, 0x13EE, 0x1169, 0x1187, 0x1AA3,
+ 0x1229, 0x1331, 0x0A85, 0x0937, 0x1704, 0x08FD, 0x2124, 0x0B13,
+ 0x1251, 0x0AAD, 0x082C, 0x091A, 0x18D9, 0x0A99, 0x1848, 0x18E9,
+ 0x0B95, 0x1144, 0x0AF1, 0x1A25, 0x131A, 0x09C5, 0x0986, 0x1BDF,
+ 0x0B24, 0x0965, 0x1262, 0x0949, 0x0872, 0x09C2, 0x12C2, 0x0916,
+ 0x085E, 0x0B06, 0x08CB, 0x08C7, 0x1242, 0x1BEF, 0x0A9A, 0x1152,
+ 0x08B3, 0x0AA9, 0x090B, 0x08D2, 0x1B22, 0x0B04, 0x0865, 0x0A15,
+ 0x1286, 0x0A83, 0x0A95, 0x09D1, 0x0A06, 0x0196, 0x1139, 0x0A3D,
+ 0x0933, 0x13B1, 0x0123, 0x0D04, 0x08E2, 0x122E, 0x08A6, 0x00CE,
+ 0x0A31, 0x1241, 0x0B51, 0x1057, 0x1171, 0x007D, 0x1145, 0x0A0A,
+ 0x0129, 0x09FF, 0x089B, 0x085B, 0x0063, 0x0AB1, 0x0A1F, 0x0A5D,
+ 0x0AA5, 0x0036, 0x0904, 0x0B86, 0x0A8B, 0x0897, 0x11E1, 0x0332,
+ 0x083F, 0x0A19, 0x02B3, 0x0859, 0x08C3, 0x0855, 0x11B5, 0x01A5,
+ 0x0AB2, 0x0392, 0x10DD, 0x09A3, 0x00ED, 0x0907, 0x1161, 0x002F,
+ 0x0887, 0x0216, 0x0ABD, 0x0B81, 0x0A93, 0x0A21, 0x003A, 0x0ACD,
+ 0x0AA1, 0x0A35, 0x0272, 0x0BDD, 0x03FE, 0x0BAF, 0x0869, 0x0213,
+ 0x088B, 0x020B, 0x00B5, 0x1035, 0x08F1, 0x0151, 0x0A4E, 0x0239,
+ 0x0BA2, 0x00AA, 0x0896, 0x0382, 0x0A08, 0x0A05, 0x0A09, 0x0142,
+ 0x086D, 0x004E, 0x0B23, 0x0106, 0x0807, 0x036F, 0x0995, 0x03FD,
+ 0x08AF, 0x08C5, 0x0062, 0x0053, 0x0B42, 0x0826, 0x021A, 0x01A2,
+ 0x09B1, 0x00C9, 0x09B2, 0x0045, 0x0207, 0x08B9, 0x00A5, 0x0AD2,
+ 0x0095, 0x003E, 0x0A32, 0x0383, 0x0849, 0x0135, 0x029E, 0x0A26,
+ 0x023E, 0x0BFF, 0x0A52, 0x0311, 0x001B, 0x0915, 0x0A8D, 0x0223,
+ 0x022A, 0x0BED, 0x0086, 0x0A96, 0x0222, 0x035F, 0x0A43, 0x085D,
+ 0x0303, 0x0393, 0x0A63, 0x082A, 0x037F, 0x0932, 0x0043, 0x0292,
+ 0x03CD, 0x0BDE, 0x009F, 0x0125, 0x08A9, 0x0253, 0x0015, 0x0192,
+ 0x0A17, 0x08C2, 0x0316, 0x00D1, 0x0282, 0x0871, 0x0312, 0x0122,
+ 0x0A9F, 0x02AE, 0x0006, 0x0A8E, 0x08E1, 0x0016, 0x0B0B, 0x00AE,
+ 0x0025, 0x0193, 0x0AC3, 0x0017, 0x0307, 0x00BD, 0x08BE, 0x0039,
+ 0x0BB2, 0x021B, 0x01FD, 0x084D, 0x03CE, 0x00A3, 0x0302, 0x0BCF,
+ 0x0033, 0x0391, 0x028F, 0x0852, 0x0287, 0x008A, 0x0333, 0x080B,
+ 0x0131, 0x01C1, 0x037E, 0x0A0F, 0x00B1, 0x002E, 0x0099, 0x0902,
+ 0x009A, 0x003D, 0x0982, 0x0301, 0x00CD, 0x0941, 0x0042, 0x0183,
+ 0x029D, 0x08A2, 0x021D, 0x001A, 0x0A97, 0x01EF, 0x01CE, 0x0051,
+ 0x0BAE, 0x022F, 0x03BE, 0x021E, 0x000A, 0x09DF, 0x0029, 0x020D,
+ 0x02BE, 0x029B, 0x09EE, 0x00B2, 0x0912, 0x036E, 0x009E, 0x0022,
+ 0x0019, 0x0892, 0x0032, 0x01FE, 0x0083, 0x023F, 0x0B96, 0x000E,
+ 0x008F, 0x0113, 0x0103, 0x001E, 0x0A0E, 0x0013, 0x008E, 0x0281,
+ 0x09AF, 0x017E, 0x0203, 0x016F, 0x0291, 0x0023, 0x0093, 0x03BD,
+ 0x001F, 0x01CF, 0x01DE, 0x0201, 0x01BF, 0x0B4F, 0x000F, 0x0202,
+ 0x037D, 0x038E, 0x0211, 0x0212, 0x034E, 0x039F, 0x03AD, 0x015F,
+ 0x017F, 0x032E, 0x033F, 0x034D, 0x035E, 0x036D, 0x032F, 0x033E,
+ 0x035D, 0x038F, 0x039E
+};
+
+static const uint16_t percentile_arr_12x10_1[240] {
+ 0x0621, 0xA443, 0xFCC2, 0x3CA3, 0x1D32, 0x14F1, 0x7462, 0x1433,
+ 0x27CD, 0x2571, 0x57AE, 0x5DD1, 0x64B3, 0x44D2, 0x2C72, 0x25A2,
+ 0x1E31, 0x55DF, 0x4C52, 0x1DEF, 0x0D51, 0x3C5D, 0x3C3E, 0x74DD,
+ 0x347D, 0x27BE, 0x5CB5, 0x17DD, 0x2C14, 0x0CAE, 0x24B2, 0x15FF,
+ 0x2701, 0x0D42, 0x1FBD, 0x0C35, 0x1603, 0x060D, 0x1D93, 0x0C96,
+ 0x1C07, 0x1522, 0x0D06, 0x0F4F, 0x0C9F, 0x1F6E, 0x0D86, 0x0C2E,
+ 0x1DFE, 0x0682, 0x1E91, 0x0F7D, 0x0C86, 0x040B, 0x1513, 0x044E,
+ 0x14D1, 0x0C39, 0x14B9, 0x1C71, 0x05B1, 0x0C1F, 0x0681, 0x1445,
+ 0x0C16, 0x0D95, 0x1583, 0x0D61, 0x0FAD, 0x1442, 0x048F, 0x0D0A,
+ 0x049A, 0x0F6D, 0x146D, 0x0C2F, 0x0D25, 0x0406, 0x0C1A, 0x0D23,
+ 0x0612, 0x0FAF, 0x0F11, 0x0592, 0x0515, 0x14E1, 0x0602, 0x048A,
+ 0x0E1D, 0x0CBD, 0x0F9F, 0x0423, 0x075E, 0x174E, 0x0426, 0x0404,
+ 0x0C22, 0x0CA2, 0x0DEE, 0x0CA5, 0x0F3F, 0x05C1, 0x0CCD, 0x0503,
+ 0x044D, 0x0D16, 0x0449, 0x0D82, 0x0613, 0x0585, 0x0519, 0x0C95,
+ 0x075F, 0x0D35, 0x04B1, 0x0509, 0x0531, 0x0DA1, 0x049E, 0x040A,
+ 0x05CF, 0x0D41, 0x0415, 0x0692, 0x05FD, 0x0C25, 0x04A1, 0x0529,
+ 0x0591, 0x0C93, 0x057F, 0x04C1, 0x0512, 0x051A, 0x078D, 0x0451,
+ 0x0C0F, 0x0487, 0x0611, 0x0432, 0x042A, 0x05AF, 0x0461, 0x072D,
+ 0x0409, 0x0405, 0x0D39, 0x05DE, 0x048E, 0x0499, 0x0483, 0x04A9,
+ 0x0491, 0x042D, 0x049D, 0x0429, 0x040E, 0x05AE, 0x0521, 0x043D,
+ 0x0581, 0x05DD, 0x0492, 0x0CAD, 0x041E, 0x058F, 0x071F, 0x072F,
+ 0x0419, 0x073D, 0x057D, 0x0511, 0x05CE, 0x041D, 0x0485, 0x056E,
+ 0x0412, 0x0431, 0x05BF, 0x0441, 0x054E, 0x0489, 0x0421, 0x0502,
+ 0x0408, 0x040D, 0x051F, 0x059F, 0x073E, 0x078F, 0x0482, 0x079D,
+ 0x0C02, 0x05BE, 0x048B, 0x0411, 0x0505, 0x057E, 0x052E, 0x074D,
+ 0x077E, 0x054F, 0x0601, 0x055F, 0x068D, 0x070D, 0x070F, 0x071E,
+ 0x072E, 0x05CD, 0x0403, 0x0501, 0x055D, 0x059E, 0x0781, 0x0413,
+ 0x0417, 0x041B, 0x0453, 0x048D, 0x052F, 0x053E, 0x053F, 0x055E,
+ 0x056F, 0x058E, 0x05BD, 0x05ED, 0x060E, 0x0622, 0x0683, 0x0702,
+ 0x070E, 0x071D, 0x075D, 0x076F, 0x078E, 0x079E, 0x07BF, 0x07CE
+};
+
+static const packed_percentile_table block_pcd_12x10 =
+{
+ 12, 10,
+ { 491, 240 },
+ { 1099, 341 },
+ { 0, 23 },
+ { percentile_arr_12x10_0, percentile_arr_12x10_1 }
+};
+#endif
+
+#if ASTCENC_BLOCK_MAX_TEXELS >= (12 * 12)
+static const uint16_t percentile_arr_12x12_0[529] {
+ 0x0334, 0xF534, 0x8514, 0x8954, 0x7F14, 0xFB54, 0x7B08, 0x7128,
+ 0x7974, 0x6179, 0x6B64, 0x6908, 0x606A, 0x6724, 0xB544, 0xB066,
+ 0xA14A, 0x5118, 0x9975, 0x51F9, 0x981C, 0x49CA, 0x4854, 0x886F,
+ 0x88D4, 0x48EE, 0x41E2, 0x4163, 0x40F3, 0x4261, 0x4064, 0x407E,
+ 0x385A, 0x42C1, 0x4172, 0x38EA, 0x3946, 0x78CF, 0xA056, 0x38DE,
+ 0x3D08, 0x38F9, 0x3B14, 0x38FE, 0xA134, 0x38B8, 0x31A4, 0x71D2,
+ 0x60DA, 0x39C3, 0x99BA, 0x60CA, 0x39F2, 0x30F5, 0x304F, 0x31B6,
+ 0x31F5, 0x3204, 0x3148, 0x305F, 0x2953, 0x3194, 0x3184, 0x310C,
+ 0x889C, 0x300C, 0x2943, 0x30EF, 0x28C6, 0x2997, 0x2838, 0x58E6,
+ 0x20E4, 0x28E3, 0x2873, 0x29E3, 0x2A84, 0x28D3, 0x492B, 0x2962,
+ 0x286E, 0x20BF, 0x21AA, 0x29A6, 0x6A14, 0x2828, 0x89C6, 0x21B3,
+ 0x2305, 0x29B4, 0x2173, 0x2127, 0x20D6, 0x407F, 0x2294, 0x21D9,
+ 0x21D5, 0x2004, 0x404B, 0x18DF, 0x2079, 0x219B, 0x18A8, 0x2385,
+ 0x1936, 0x21AB, 0x188C, 0x1B09, 0x18BA, 0x203B, 0x187A, 0x1875,
+ 0x2344, 0x18BB, 0x18B6, 0x193A, 0x1837, 0x1914, 0x1846, 0x1876,
+ 0x1884, 0x1D24, 0x182B, 0x284A, 0x18A7, 0x18AB, 0x1917, 0x322D,
+ 0x1047, 0x1874, 0x1818, 0x18F2, 0x1164, 0x1B89, 0x2959, 0x1B21,
+ 0x39E5, 0x1827, 0x10F4, 0x18B7, 0x11D3, 0x1A4D, 0x1315, 0x12AD,
+ 0x1AD1, 0x3A71, 0x1319, 0x11A7, 0x2044, 0x2F04, 0x2341, 0x10E5,
+ 0x1155, 0x195A, 0x1024, 0x111B, 0x1251, 0x1233, 0x12E1, 0x13A1,
+ 0x13BF, 0x212A, 0x22A2, 0x113B, 0x23DF, 0x10D5, 0x2399, 0x0814,
+ 0x1126, 0x13EE, 0x1285, 0x10C4, 0x18FD, 0x20D9, 0x0987, 0x1242,
+ 0x29C5, 0x2313, 0x0898, 0x13C1, 0x08C8, 0x11F1, 0x1034, 0x1B24,
+ 0x0B0A, 0x11E9, 0x0808, 0x125D, 0x18E9, 0x0848, 0x1395, 0x0965,
+ 0x123D, 0x2186, 0x1295, 0x18CE, 0x098B, 0x0BEF, 0x1504, 0x082C,
+ 0x0A41, 0x1144, 0x0A89, 0x0956, 0x1331, 0x085E, 0x0B04, 0x128A,
+ 0x12A3, 0x1937, 0x19C2, 0x0952, 0x0872, 0x08B4, 0x1262, 0x1124,
+ 0x1969, 0x1063, 0x0AF1, 0x1225, 0x0894, 0x11C9, 0x18D2, 0x0ACD,
+ 0x0A29, 0x0B06, 0x09B5, 0x18C7, 0x0916, 0x1088, 0x09FF, 0x2206,
+ 0x0A15, 0x08B3, 0x0B51, 0x0A1F, 0x18CB, 0x0AC2, 0x0A2E, 0x1865,
+ 0x08AC, 0x0A31, 0x08A4, 0x138A, 0x0A99, 0x09D1, 0x0A86, 0x189B,
+ 0x0283, 0x0BDD, 0x0ABD, 0x1933, 0x083F, 0x1386, 0x0923, 0x0322,
+ 0x0869, 0x10DD, 0x13B1, 0x082F, 0x087D, 0x11B9, 0x085B, 0x08ED,
+ 0x00C3, 0x08E2, 0x084E, 0x0887, 0x0855, 0x0A0A, 0x0857, 0x0B92,
+ 0x1036, 0x12A5, 0x0293, 0x0945, 0x08A6, 0x0196, 0x19A3, 0x036F,
+ 0x0904, 0x1205, 0x09E1, 0x0381, 0x0971, 0x1219, 0x0BAF, 0x0949,
+ 0x00AF, 0x0AA9, 0x018A, 0x0907, 0x0BFD, 0x003A, 0x0BCD, 0x0AB2,
+ 0x088B, 0x0252, 0x0A4E, 0x03FF, 0x0845, 0x0897, 0x0059, 0x090B,
+ 0x0B42, 0x0807, 0x0A16, 0x0853, 0x0A8D, 0x01B2, 0x0AB1, 0x091A,
+ 0x0195, 0x0A35, 0x00B5, 0x10AA, 0x0115, 0x0A21, 0x0096, 0x0A08,
+ 0x03FE, 0x0B7F, 0x08B9, 0x12B3, 0x023E, 0x0A23, 0x029E, 0x08F1,
+ 0x01A9, 0x0BDE, 0x0843, 0x02D2, 0x0A1A, 0x08C5, 0x0151, 0x0A43,
+ 0x0332, 0x0383, 0x0826, 0x0BED, 0x10C2, 0x00AE, 0x0B82, 0x0213,
+ 0x0232, 0x085D, 0x02A1, 0x101B, 0x035F, 0x0303, 0x0A39, 0x0207,
+ 0x0A53, 0x0142, 0x01A5, 0x082A, 0x0099, 0x0A17, 0x03CF, 0x0906,
+ 0x0125, 0x0A96, 0x0A9A, 0x0209, 0x0393, 0x0961, 0x0131, 0x0A88,
+ 0x0139, 0x099A, 0x0292, 0x0272, 0x0862, 0x08BE, 0x0141, 0x02C3,
+ 0x0886, 0x0039, 0x08A9, 0x01A2, 0x01B1, 0x0851, 0x020B, 0x086D,
+ 0x0312, 0x08CD, 0x020F, 0x0311, 0x0BCE, 0x0135, 0x0006, 0x0849,
+ 0x0132, 0x0A8F, 0x022F, 0x022A, 0x0AAE, 0x0A8E, 0x0263, 0x03A2,
+ 0x083E, 0x009A, 0x021B, 0x0835, 0x0323, 0x0871, 0x0993, 0x0226,
+ 0x0302, 0x0922, 0x0119, 0x0222, 0x021D, 0x0B07, 0x08C9, 0x037E,
+ 0x08BD, 0x0042, 0x00D1, 0x0B33, 0x01C1, 0x0B9A, 0x0282, 0x088A,
+ 0x0182, 0x083D, 0x004D, 0x010A, 0x0A1E, 0x0019, 0x00B2, 0x0999,
+ 0x00A5, 0x0095, 0x0817, 0x0022, 0x031A, 0x0902, 0x00A3, 0x01BF,
+ 0x029F, 0x0816, 0x03B2, 0x0015, 0x0391, 0x0BBE, 0x01FE, 0x1129,
+ 0x002E, 0x01DF, 0x0301, 0x0033, 0x0B6E, 0x00E1, 0x0297, 0x00B1,
+ 0x009F, 0x0B16, 0x000A, 0x001A, 0x0052, 0x080B, 0x030B, 0x029D,
+ 0x0BAE, 0x01FD, 0x020E, 0x00A2, 0x0A3F, 0x0192, 0x0ABE, 0x020D,
+ 0x008F, 0x028B, 0x0083, 0x0025, 0x09EE, 0x01EF, 0x0029, 0x0291,
+ 0x0B4F, 0x0396, 0x0287, 0x008E, 0x0092, 0x0B4E, 0x017E, 0x001E,
+ 0x009E, 0x0103, 0x080F, 0x000E, 0x0113, 0x0203, 0x01CF, 0x0183,
+ 0x01CE, 0x001F, 0x0112, 0x01DE, 0x038E, 0x0832, 0x033E, 0x0212,
+ 0x029B, 0x0023, 0x016F, 0x0201, 0x09AF, 0x0202, 0x0281, 0x035E,
+ 0x034D, 0x037D, 0x03AD, 0x0013, 0x0093, 0x015F, 0x0211, 0x033F,
+ 0x036D, 0x039F, 0x03BD, 0x017F, 0x032E, 0x032F, 0x035D, 0x038F,
+ 0x039E
+};
+
+static const uint16_t percentile_arr_12x12_1[246] {
+ 0x0443, 0xFFCD, 0x2C62, 0x2E21, 0x3CF1, 0x34C2, 0x4CDD, 0x2452,
+ 0xD5DF, 0x1DD1, 0x0FAE, 0x64A3, 0x0C7D, 0x3433, 0x1CD2, 0x2DEF,
+ 0x0C3E, 0x1D71, 0xA472, 0x0D32, 0x54B3, 0x4D51, 0x445D, 0x0E31,
+ 0x1FDD, 0x0DFF, 0x0CAE, 0x45A2, 0x2FBE, 0xA4B9, 0x1C4E, 0x2C9F,
+ 0x160D, 0x0D42, 0x342E, 0x074F, 0x1414, 0x0F6E, 0x0CB2, 0x34B5,
+ 0x0DFE, 0x0D86, 0x1496, 0x1D22, 0x0691, 0x140B, 0x041F, 0x0C35,
+ 0x1D93, 0x1506, 0x1439, 0x0C9A, 0x0F01, 0x2442, 0x0C8F, 0x04D1,
+ 0x1486, 0x0C6D, 0x0513, 0x0C71, 0x0E82, 0x177D, 0x0E03, 0x07BD,
+ 0x0C2F, 0x0D83, 0x07AF, 0x0D61, 0x1407, 0x0DB1, 0x050A, 0x0C94,
+ 0x07AD, 0x0D8A, 0x0C04, 0x0416, 0x0C49, 0x0445, 0x15C1, 0x0C1A,
+ 0x0525, 0x0595, 0x0C8A, 0x075E, 0x0CBD, 0x0681, 0x0F4E, 0x075F,
+ 0x061D, 0x1541, 0x0CB1, 0x0F3F, 0x0406, 0x076D, 0x0DCF, 0x05EE,
+ 0x0D23, 0x0599, 0x0CCD, 0x0711, 0x0C23, 0x079F, 0x0D15, 0x0585,
+ 0x04A2, 0x042A, 0x0D31, 0x05BF, 0x0D92, 0x0C26, 0x043D, 0x0C93,
+ 0x0502, 0x0C15, 0x048B, 0x0D03, 0x0613, 0x0516, 0x0495, 0x0C29,
+ 0x04A5, 0x040F, 0x0425, 0x0539, 0x0D19, 0x04E1, 0x05BE, 0x0422,
+ 0x0432, 0x0C0A, 0x0431, 0x041E, 0x0492, 0x04A9, 0x0582, 0x0529,
+ 0x0487, 0x0C4D, 0x0512, 0x049E, 0x0505, 0x0451, 0x0D7F, 0x0489,
+ 0x0602, 0x05DE, 0x0591, 0x0535, 0x074D, 0x055E, 0x04C1, 0x0612,
+ 0x05DD, 0x05FD, 0x0C61, 0x0521, 0x0484, 0x05CE, 0x0581, 0x0491,
+ 0x051A, 0x04A1, 0x048E, 0x040D, 0x0499, 0x071F, 0x072E, 0x075D,
+ 0x0441, 0x0589, 0x057E, 0x0CAD, 0x0501, 0x054F, 0x0692, 0x0511,
+ 0x049D, 0x0509, 0x056E, 0x040E, 0x0409, 0x0601, 0x048D, 0x0413,
+ 0x053E, 0x0419, 0x072D, 0x0408, 0x0485, 0x042D, 0x041D, 0x05A1,
+ 0x0781, 0x0402, 0x05ED, 0x0C82, 0x0403, 0x057D, 0x05CD, 0x0611,
+ 0x0488, 0x0411, 0x054E, 0x051F, 0x053F, 0x056F, 0x059F, 0x070F,
+ 0x071D, 0x073D, 0x073E, 0x077E, 0x078F, 0x0405, 0x079D, 0x079E,
+ 0x058E, 0x0412, 0x055D, 0x05AE, 0x041B, 0x0421, 0x0453, 0x0417,
+ 0x0483, 0x052E, 0x052F, 0x055F, 0x058F, 0x059E, 0x05AF, 0x05BD,
+ 0x060E, 0x0622, 0x0683, 0x068D, 0x0702, 0x070D, 0x070E, 0x071E,
+ 0x072F, 0x076F, 0x078D, 0x078E, 0x07BF, 0x07CE
+};
+
+static const packed_percentile_table block_pcd_12x12 {
+ 12, 12,
+ { 529, 246 },
+ { 1435, 335 },
+ { 0, 22 },
+ { percentile_arr_12x12_0, percentile_arr_12x12_1 }
+};
+#endif
+
+/**
+ * @brief Fetch the packed percentile table for the given 2D block size.
+ *
+ * @param xdim The block x size.
+ * @param ydim The block y size.
+ *
+ * @return The packed table.
+ */
+static const packed_percentile_table *get_packed_table(
+ int xdim,
+ int ydim
+) {
+ int idx = (ydim << 8) | xdim;
+ switch (idx)
+ {
+#if ASTCENC_BLOCK_MAX_TEXELS >= (4 * 4)
+ case 0x0404: return &block_pcd_4x4;
+#endif
+#if ASTCENC_BLOCK_MAX_TEXELS >= (5 * 4)
+ case 0x0405: return &block_pcd_5x4;
+#endif
+#if ASTCENC_BLOCK_MAX_TEXELS >= (5 * 5)
+ case 0x0505: return &block_pcd_5x5;
+#endif
+#if ASTCENC_BLOCK_MAX_TEXELS >= (6 * 5)
+ case 0x0506: return &block_pcd_6x5;
+#endif
+#if ASTCENC_BLOCK_MAX_TEXELS >= (6 * 6)
+ case 0x0606: return &block_pcd_6x6;
+#endif
+#if ASTCENC_BLOCK_MAX_TEXELS >= (8 * 5)
+ case 0x0508: return &block_pcd_8x5;
+#endif
+#if ASTCENC_BLOCK_MAX_TEXELS >= (8 * 6)
+ case 0x0608: return &block_pcd_8x6;
+#endif
+#if ASTCENC_BLOCK_MAX_TEXELS >= (8 * 8)
+ case 0x0808: return &block_pcd_8x8;
+#endif
+#if ASTCENC_BLOCK_MAX_TEXELS >= (10 * 5)
+ case 0x050A: return &block_pcd_10x5;
+#endif
+#if ASTCENC_BLOCK_MAX_TEXELS >= (10 * 6)
+ case 0x060A: return &block_pcd_10x6;
+#endif
+#if ASTCENC_BLOCK_MAX_TEXELS >= (10 * 8)
+ case 0x080A: return &block_pcd_10x8;
+#endif
+#if ASTCENC_BLOCK_MAX_TEXELS >= (10 * 10)
+ case 0x0A0A: return &block_pcd_10x10;
+#endif
+#if ASTCENC_BLOCK_MAX_TEXELS >= (12 * 10)
+ case 0x0A0C: return &block_pcd_12x10;
+#endif
+#if ASTCENC_BLOCK_MAX_TEXELS >= (12 * 12)
+ case 0x0C0C: return &block_pcd_12x12;
+#endif
+ }
+
+ // Should never hit this with a valid 2D block size
+ return nullptr;
+}
+
+/* See header for documentation. */
+const float *get_2d_percentile_table(
+ unsigned int xdim,
+ unsigned int ydim
+) {
+ float* unpacked_table = new float[WEIGHTS_MAX_BLOCK_MODES];
+ const packed_percentile_table *apt = get_packed_table(xdim, ydim);
+
+ // Set the default percentile
+ for (unsigned int i = 0; i < WEIGHTS_MAX_BLOCK_MODES; i++)
+ {
+ unpacked_table[i] = 1.0f;
+ }
+
+ // Populate the unpacked percentile values
+ for (int i = 0; i < 2; i++)
+ {
+ unsigned int itemcount = apt->item_count[i];
+ unsigned int difscale = apt->difscales[i];
+ unsigned int accum = apt->initial_percs[i];
+ const uint16_t *item_ptr = apt->items[i];
+
+ for (unsigned int j = 0; j < itemcount; j++)
+ {
+ uint16_t item = item_ptr[j];
+ unsigned int idx = item & 0x7FF;
+ unsigned int weight = (item >> 11) & 0x1F;
+ accum += weight;
+ unpacked_table[idx] = static_cast<float>(accum) / static_cast<float>(difscale);
+ }
+ }
+
+ return unpacked_table;
+}
+#endif
+
+/* See header for documentation. */
+bool is_legal_2d_block_size(
+ unsigned int xdim,
+ unsigned int ydim
+) {
+ unsigned int idx = (xdim << 8) | ydim;
+ switch (idx)
+ {
+ case 0x0404:
+ case 0x0504:
+ case 0x0505:
+ case 0x0605:
+ case 0x0606:
+ case 0x0805:
+ case 0x0806:
+ case 0x0808:
+ case 0x0A05:
+ case 0x0A06:
+ case 0x0A08:
+ case 0x0A0A:
+ case 0x0C0A:
+ case 0x0C0C:
+ return true;
+ }
+
+ return false;
+}
+
+/* See header for documentation. */
+bool is_legal_3d_block_size(
+ unsigned int xdim,
+ unsigned int ydim,
+ unsigned int zdim
+) {
+ unsigned int idx = (xdim << 16) | (ydim << 8) | zdim;
+ switch (idx)
+ {
+ case 0x030303:
+ case 0x040303:
+ case 0x040403:
+ case 0x040404:
+ case 0x050404:
+ case 0x050504:
+ case 0x050505:
+ case 0x060505:
+ case 0x060605:
+ case 0x060606:
+ return true;
+ }
+
+ return false;
+}
diff --git a/thirdparty/astcenc/astcenc_pick_best_endpoint_format.cpp b/thirdparty/astcenc/astcenc_pick_best_endpoint_format.cpp
new file mode 100644
index 0000000000..f25140d4c7
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_pick_best_endpoint_format.cpp
@@ -0,0 +1,1350 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2022 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+#if !defined(ASTCENC_DECOMPRESS_ONLY)
+
+/**
+ * @brief Functions for finding best endpoint format.
+ *
+ * We assume there are two independent sources of error in any given partition:
+ *
+ * - Encoding choice errors
+ * - Quantization errors
+ *
+ * Encoding choice errors are caused by encoder decisions. For example:
+ *
+ * - Using luminance instead of separate RGB components.
+ * - Using a constant 1.0 alpha instead of storing an alpha component.
+ * - Using RGB+scale instead of storing two full RGB endpoints.
+ *
+ * Quantization errors occur due to the limited precision we use for storage. These errors generally
+ * scale with quantization level, but are not actually independent of color encoding. In particular:
+ *
+ * - If we can use offset encoding then quantization error is halved.
+ * - If we can use blue-contraction then quantization error for RG is halved.
+ * - If we use HDR endpoints the quantization error is higher.
+ *
+ * Apart from these effects, we assume the error is proportional to the quantization step size.
+ */
+
+
+#include "astcenc_internal.h"
+#include "astcenc_vecmathlib.h"
+
+#include <assert.h>
+
+/**
+ * @brief Compute the errors of the endpoint line options for one partition.
+ *
+ * Uncorrelated data assumes storing completely independent RGBA channels for each endpoint. Same
+ * chroma data assumes storing RGBA endpoints which pass though the origin (LDR only). RGBL data
+ * assumes storing RGB + lumashift (HDR only). Luminance error assumes storing RGB channels as a
+ * single value.
+ *
+ *
+ * @param pi The partition info data.
+ * @param partition_index The partition index to compule the error for.
+ * @param blk The image block.
+ * @param uncor_pline The endpoint line assuming uncorrelated endpoints.
+ * @param[out] uncor_err The computed error for the uncorrelated endpoint line.
+ * @param samec_pline The endpoint line assuming the same chroma for both endpoints.
+ * @param[out] samec_err The computed error for the uncorrelated endpoint line.
+ * @param rgbl_pline The endpoint line assuming RGB + lumashift data.
+ * @param[out] rgbl_err The computed error for the RGB + lumashift endpoint line.
+ * @param l_pline The endpoint line assuming luminance data.
+ * @param[out] l_err The computed error for the luminance endpoint line.
+ * @param[out] a_drop_err The computed error for dropping the alpha component.
+ */
+static void compute_error_squared_rgb_single_partition(
+ const partition_info& pi,
+ int partition_index,
+ const image_block& blk,
+ const processed_line3& uncor_pline,
+ float& uncor_err,
+ const processed_line3& samec_pline,
+ float& samec_err,
+ const processed_line3& rgbl_pline,
+ float& rgbl_err,
+ const processed_line3& l_pline,
+ float& l_err,
+ float& a_drop_err
+) {
+ vfloat4 ews = blk.channel_weight;
+
+ unsigned int texel_count = pi.partition_texel_count[partition_index];
+ const uint8_t* texel_indexes = pi.texels_of_partition[partition_index];
+ promise(texel_count > 0);
+
+ vfloatacc a_drop_errv = vfloatacc::zero();
+ vfloat default_a(blk.get_default_alpha());
+
+ vfloatacc uncor_errv = vfloatacc::zero();
+ vfloat uncor_bs0(uncor_pline.bs.lane<0>());
+ vfloat uncor_bs1(uncor_pline.bs.lane<1>());
+ vfloat uncor_bs2(uncor_pline.bs.lane<2>());
+
+ vfloat uncor_amod0(uncor_pline.amod.lane<0>());
+ vfloat uncor_amod1(uncor_pline.amod.lane<1>());
+ vfloat uncor_amod2(uncor_pline.amod.lane<2>());
+
+ vfloatacc samec_errv = vfloatacc::zero();
+ vfloat samec_bs0(samec_pline.bs.lane<0>());
+ vfloat samec_bs1(samec_pline.bs.lane<1>());
+ vfloat samec_bs2(samec_pline.bs.lane<2>());
+
+ vfloatacc rgbl_errv = vfloatacc::zero();
+ vfloat rgbl_bs0(rgbl_pline.bs.lane<0>());
+ vfloat rgbl_bs1(rgbl_pline.bs.lane<1>());
+ vfloat rgbl_bs2(rgbl_pline.bs.lane<2>());
+
+ vfloat rgbl_amod0(rgbl_pline.amod.lane<0>());
+ vfloat rgbl_amod1(rgbl_pline.amod.lane<1>());
+ vfloat rgbl_amod2(rgbl_pline.amod.lane<2>());
+
+ vfloatacc l_errv = vfloatacc::zero();
+ vfloat l_bs0(l_pline.bs.lane<0>());
+ vfloat l_bs1(l_pline.bs.lane<1>());
+ vfloat l_bs2(l_pline.bs.lane<2>());
+
+ vint lane_ids = vint::lane_id();
+ for (unsigned int i = 0; i < texel_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ vint tix(texel_indexes + i);
+
+ vmask mask = lane_ids < vint(texel_count);
+ lane_ids += vint(ASTCENC_SIMD_WIDTH);
+
+ // Compute the error that arises from just ditching alpha
+ vfloat data_a = gatherf(blk.data_a, tix);
+ vfloat alpha_diff = data_a - default_a;
+ alpha_diff = alpha_diff * alpha_diff;
+
+ haccumulate(a_drop_errv, alpha_diff, mask);
+
+ vfloat data_r = gatherf(blk.data_r, tix);
+ vfloat data_g = gatherf(blk.data_g, tix);
+ vfloat data_b = gatherf(blk.data_b, tix);
+
+ // Compute uncorrelated error
+ vfloat param = data_r * uncor_bs0
+ + data_g * uncor_bs1
+ + data_b * uncor_bs2;
+
+ vfloat dist0 = (uncor_amod0 + param * uncor_bs0) - data_r;
+ vfloat dist1 = (uncor_amod1 + param * uncor_bs1) - data_g;
+ vfloat dist2 = (uncor_amod2 + param * uncor_bs2) - data_b;
+
+ vfloat error = dist0 * dist0 * ews.lane<0>()
+ + dist1 * dist1 * ews.lane<1>()
+ + dist2 * dist2 * ews.lane<2>();
+
+ haccumulate(uncor_errv, error, mask);
+
+ // Compute same chroma error - no "amod", its always zero
+ param = data_r * samec_bs0
+ + data_g * samec_bs1
+ + data_b * samec_bs2;
+
+ dist0 = (param * samec_bs0) - data_r;
+ dist1 = (param * samec_bs1) - data_g;
+ dist2 = (param * samec_bs2) - data_b;
+
+ error = dist0 * dist0 * ews.lane<0>()
+ + dist1 * dist1 * ews.lane<1>()
+ + dist2 * dist2 * ews.lane<2>();
+
+ haccumulate(samec_errv, error, mask);
+
+ // Compute rgbl error
+ param = data_r * rgbl_bs0
+ + data_g * rgbl_bs1
+ + data_b * rgbl_bs2;
+
+ dist0 = (rgbl_amod0 + param * rgbl_bs0) - data_r;
+ dist1 = (rgbl_amod1 + param * rgbl_bs1) - data_g;
+ dist2 = (rgbl_amod2 + param * rgbl_bs2) - data_b;
+
+ error = dist0 * dist0 * ews.lane<0>()
+ + dist1 * dist1 * ews.lane<1>()
+ + dist2 * dist2 * ews.lane<2>();
+
+ haccumulate(rgbl_errv, error, mask);
+
+ // Compute luma error - no "amod", its always zero
+ param = data_r * l_bs0
+ + data_g * l_bs1
+ + data_b * l_bs2;
+
+ dist0 = (param * l_bs0) - data_r;
+ dist1 = (param * l_bs1) - data_g;
+ dist2 = (param * l_bs2) - data_b;
+
+ error = dist0 * dist0 * ews.lane<0>()
+ + dist1 * dist1 * ews.lane<1>()
+ + dist2 * dist2 * ews.lane<2>();
+
+ haccumulate(l_errv, error, mask);
+ }
+
+ a_drop_err = hadd_s(a_drop_errv) * ews.lane<3>();
+ uncor_err = hadd_s(uncor_errv);
+ samec_err = hadd_s(samec_errv);
+ rgbl_err = hadd_s(rgbl_errv);
+ l_err = hadd_s(l_errv);
+}
+
+/**
+ * @brief For a given set of input colors and partitioning determine endpoint encode errors.
+ *
+ * This function determines the color error that results from RGB-scale encoding (LDR only),
+ * RGB-lumashift encoding (HDR only), luminance-encoding, and alpha drop. Also determines whether
+ * the endpoints are eligible for offset encoding or blue-contraction
+ *
+ * @param blk The image block.
+ * @param pi The partition info data.
+ * @param ep The idealized endpoints.
+ * @param[out] eci The resulting encoding choice error metrics.
+ */
+static void compute_encoding_choice_errors(
+ const image_block& blk,
+ const partition_info& pi,
+ const endpoints& ep,
+ encoding_choice_errors eci[BLOCK_MAX_PARTITIONS])
+{
+ int partition_count = pi.partition_count;
+ promise(partition_count > 0);
+
+ partition_metrics pms[BLOCK_MAX_PARTITIONS];
+
+ compute_avgs_and_dirs_3_comp_rgb(pi, blk, pms);
+
+ for (int i = 0; i < partition_count; i++)
+ {
+ partition_metrics& pm = pms[i];
+
+ line3 uncor_rgb_lines;
+ line3 samec_rgb_lines; // for LDR-RGB-scale
+ line3 rgb_luma_lines; // for HDR-RGB-scale
+
+ processed_line3 uncor_rgb_plines;
+ processed_line3 samec_rgb_plines;
+ processed_line3 rgb_luma_plines;
+ processed_line3 luminance_plines;
+
+ float uncorr_rgb_error;
+ float samechroma_rgb_error;
+ float rgb_luma_error;
+ float luminance_rgb_error;
+ float alpha_drop_error;
+
+ uncor_rgb_lines.a = pm.avg;
+ uncor_rgb_lines.b = normalize_safe(pm.dir, unit3());
+
+ samec_rgb_lines.a = vfloat4::zero();
+ samec_rgb_lines.b = normalize_safe(pm.avg, unit3());
+
+ rgb_luma_lines.a = pm.avg;
+ rgb_luma_lines.b = unit3();
+
+ uncor_rgb_plines.amod = uncor_rgb_lines.a - uncor_rgb_lines.b * dot3(uncor_rgb_lines.a, uncor_rgb_lines.b);
+ uncor_rgb_plines.bs = uncor_rgb_lines.b;
+
+ // Same chroma always goes though zero, so this is simpler than the others
+ samec_rgb_plines.amod = vfloat4::zero();
+ samec_rgb_plines.bs = samec_rgb_lines.b;
+
+ rgb_luma_plines.amod = rgb_luma_lines.a - rgb_luma_lines.b * dot3(rgb_luma_lines.a, rgb_luma_lines.b);
+ rgb_luma_plines.bs = rgb_luma_lines.b;
+
+ // Luminance always goes though zero, so this is simpler than the others
+ luminance_plines.amod = vfloat4::zero();
+ luminance_plines.bs = unit3();
+
+ compute_error_squared_rgb_single_partition(
+ pi, i, blk,
+ uncor_rgb_plines, uncorr_rgb_error,
+ samec_rgb_plines, samechroma_rgb_error,
+ rgb_luma_plines, rgb_luma_error,
+ luminance_plines, luminance_rgb_error,
+ alpha_drop_error);
+
+ // Determine if we can offset encode RGB lanes
+ vfloat4 endpt0 = ep.endpt0[i];
+ vfloat4 endpt1 = ep.endpt1[i];
+ vfloat4 endpt_diff = abs(endpt1 - endpt0);
+ vmask4 endpt_can_offset = endpt_diff < vfloat4(0.12f * 65535.0f);
+ bool can_offset_encode = (mask(endpt_can_offset) & 0x7) == 0x7;
+
+ // Store out the settings
+ eci[i].rgb_scale_error = (samechroma_rgb_error - uncorr_rgb_error) * 0.7f; // empirical
+ eci[i].rgb_luma_error = (rgb_luma_error - uncorr_rgb_error) * 1.5f; // wild guess
+ eci[i].luminance_error = (luminance_rgb_error - uncorr_rgb_error) * 3.0f; // empirical
+ eci[i].alpha_drop_error = alpha_drop_error * 3.0f;
+ eci[i].can_offset_encode = can_offset_encode;
+ eci[i].can_blue_contract = !blk.is_luminance();
+ }
+}
+
+/**
+ * @brief For a given partition compute the error for every endpoint integer count and quant level.
+ *
+ * @param encode_hdr_rgb @c true if using HDR for RGB, @c false for LDR.
+ * @param encode_hdr_alpha @c true if using HDR for alpha, @c false for LDR.
+ * @param partition_index The partition index.
+ * @param pi The partition info.
+ * @param eci The encoding choice error metrics.
+ * @param ep The idealized endpoints.
+ * @param error_weight The resulting encoding choice error metrics.
+ * @param[out] best_error The best error for each integer count and quant level.
+ * @param[out] format_of_choice The preferred endpoint format for each integer count and quant level.
+ */
+static void compute_color_error_for_every_integer_count_and_quant_level(
+ bool encode_hdr_rgb,
+ bool encode_hdr_alpha,
+ int partition_index,
+ const partition_info& pi,
+ const encoding_choice_errors& eci,
+ const endpoints& ep,
+ vfloat4 error_weight,
+ float best_error[21][4],
+ uint8_t format_of_choice[21][4]
+) {
+ int partition_size = pi.partition_texel_count[partition_index];
+
+ static const float baseline_quant_error[21 - QUANT_6] {
+ (65536.0f * 65536.0f / 18.0f) / (5 * 5),
+ (65536.0f * 65536.0f / 18.0f) / (7 * 7),
+ (65536.0f * 65536.0f / 18.0f) / (9 * 9),
+ (65536.0f * 65536.0f / 18.0f) / (11 * 11),
+ (65536.0f * 65536.0f / 18.0f) / (15 * 15),
+ (65536.0f * 65536.0f / 18.0f) / (19 * 19),
+ (65536.0f * 65536.0f / 18.0f) / (23 * 23),
+ (65536.0f * 65536.0f / 18.0f) / (31 * 31),
+ (65536.0f * 65536.0f / 18.0f) / (39 * 39),
+ (65536.0f * 65536.0f / 18.0f) / (47 * 47),
+ (65536.0f * 65536.0f / 18.0f) / (63 * 63),
+ (65536.0f * 65536.0f / 18.0f) / (79 * 79),
+ (65536.0f * 65536.0f / 18.0f) / (95 * 95),
+ (65536.0f * 65536.0f / 18.0f) / (127 * 127),
+ (65536.0f * 65536.0f / 18.0f) / (159 * 159),
+ (65536.0f * 65536.0f / 18.0f) / (191 * 191),
+ (65536.0f * 65536.0f / 18.0f) / (255 * 255)
+ };
+
+ vfloat4 ep0 = ep.endpt0[partition_index];
+ vfloat4 ep1 = ep.endpt1[partition_index];
+
+ float ep1_min = hmin_rgb_s(ep1);
+ ep1_min = astc::max(ep1_min, 0.0f);
+
+ float error_weight_rgbsum = hadd_rgb_s(error_weight);
+
+ float range_upper_limit_rgb = encode_hdr_rgb ? 61440.0f : 65535.0f;
+ float range_upper_limit_alpha = encode_hdr_alpha ? 61440.0f : 65535.0f;
+
+ // It is possible to get endpoint colors significantly outside [0,upper-limit] even if the
+ // input data are safely contained in [0,upper-limit]; we need to add an error term for this
+ vfloat4 offset(range_upper_limit_rgb, range_upper_limit_rgb, range_upper_limit_rgb, range_upper_limit_alpha);
+ vfloat4 ep0_range_error_high = max(ep0 - offset, 0.0f);
+ vfloat4 ep1_range_error_high = max(ep1 - offset, 0.0f);
+
+ vfloat4 ep0_range_error_low = min(ep0, 0.0f);
+ vfloat4 ep1_range_error_low = min(ep1, 0.0f);
+
+ vfloat4 sum_range_error =
+ (ep0_range_error_low * ep0_range_error_low) +
+ (ep1_range_error_low * ep1_range_error_low) +
+ (ep0_range_error_high * ep0_range_error_high) +
+ (ep1_range_error_high * ep1_range_error_high);
+
+ float rgb_range_error = dot3_s(sum_range_error, error_weight)
+ * 0.5f * static_cast<float>(partition_size);
+ float alpha_range_error = sum_range_error.lane<3>() * error_weight.lane<3>()
+ * 0.5f * static_cast<float>(partition_size);
+
+ if (encode_hdr_rgb)
+ {
+
+ // Collect some statistics
+ float af, cf;
+ if (ep1.lane<0>() > ep1.lane<1>() && ep1.lane<0>() > ep1.lane<2>())
+ {
+ af = ep1.lane<0>();
+ cf = ep1.lane<0>() - ep0.lane<0>();
+ }
+ else if (ep1.lane<1>() > ep1.lane<2>())
+ {
+ af = ep1.lane<1>();
+ cf = ep1.lane<1>() - ep0.lane<1>();
+ }
+ else
+ {
+ af = ep1.lane<2>();
+ cf = ep1.lane<2>() - ep0.lane<2>();
+ }
+
+ // Estimate of color-component spread in high endpoint color
+ float bf = af - ep1_min;
+ vfloat4 prd = (ep1 - vfloat4(cf)).swz<0, 1, 2>();
+ vfloat4 pdif = prd - ep0.swz<0, 1, 2>();
+ // Estimate of color-component spread in low endpoint color
+ float df = hmax_s(abs(pdif));
+
+ int b = static_cast<int>(bf);
+ int c = static_cast<int>(cf);
+ int d = static_cast<int>(df);
+
+ // Determine which one of the 6 submodes is likely to be used in case of an RGBO-mode
+ int rgbo_mode = 5; // 7 bits per component
+ // mode 4: 8 7 6
+ if (b < 32768 && c < 16384)
+ {
+ rgbo_mode = 4;
+ }
+
+ // mode 3: 9 6 7
+ if (b < 8192 && c < 16384)
+ {
+ rgbo_mode = 3;
+ }
+
+ // mode 2: 10 5 8
+ if (b < 2048 && c < 16384)
+ {
+ rgbo_mode = 2;
+ }
+
+ // mode 1: 11 6 5
+ if (b < 2048 && c < 1024)
+ {
+ rgbo_mode = 1;
+ }
+
+ // mode 0: 11 5 7
+ if (b < 1024 && c < 4096)
+ {
+ rgbo_mode = 0;
+ }
+
+ // Determine which one of the 9 submodes is likely to be used in case of an RGB-mode.
+ int rgb_mode = 8; // 8 bits per component, except 7 bits for blue
+
+ // mode 0: 9 7 6 7
+ if (b < 16384 && c < 8192 && d < 8192)
+ {
+ rgb_mode = 0;
+ }
+
+ // mode 1: 9 8 6 6
+ if (b < 32768 && c < 8192 && d < 4096)
+ {
+ rgb_mode = 1;
+ }
+
+ // mode 2: 10 6 7 7
+ if (b < 4096 && c < 8192 && d < 4096)
+ {
+ rgb_mode = 2;
+ }
+
+ // mode 3: 10 7 7 6
+ if (b < 8192 && c < 8192 && d < 2048)
+ {
+ rgb_mode = 3;
+ }
+
+ // mode 4: 11 8 6 5
+ if (b < 8192 && c < 2048 && d < 512)
+ {
+ rgb_mode = 4;
+ }
+
+ // mode 5: 11 6 8 6
+ if (b < 2048 && c < 8192 && d < 1024)
+ {
+ rgb_mode = 5;
+ }
+
+ // mode 6: 12 7 7 5
+ if (b < 2048 && c < 2048 && d < 256)
+ {
+ rgb_mode = 6;
+ }
+
+ // mode 7: 12 6 7 6
+ if (b < 1024 && c < 2048 && d < 512)
+ {
+ rgb_mode = 7;
+ }
+
+ static const float rgbo_error_scales[6] { 4.0f, 4.0f, 16.0f, 64.0f, 256.0f, 1024.0f };
+ static const float rgb_error_scales[9] { 64.0f, 64.0f, 16.0f, 16.0f, 4.0f, 4.0f, 1.0f, 1.0f, 384.0f };
+
+ float mode7mult = rgbo_error_scales[rgbo_mode] * 0.0015f; // Empirically determined ....
+ float mode11mult = rgb_error_scales[rgb_mode] * 0.010f; // Empirically determined ....
+
+
+ float lum_high = hadd_rgb_s(ep1) * (1.0f / 3.0f);
+ float lum_low = hadd_rgb_s(ep0) * (1.0f / 3.0f);
+ float lumdif = lum_high - lum_low;
+ float mode23mult = lumdif < 960 ? 4.0f : lumdif < 3968 ? 16.0f : 128.0f;
+
+ mode23mult *= 0.0005f; // Empirically determined ....
+
+ // Pick among the available HDR endpoint modes
+ for (int i = QUANT_2; i < QUANT_16; i++)
+ {
+ best_error[i][3] = ERROR_CALC_DEFAULT;
+ best_error[i][2] = ERROR_CALC_DEFAULT;
+ best_error[i][1] = ERROR_CALC_DEFAULT;
+ best_error[i][0] = ERROR_CALC_DEFAULT;
+
+ format_of_choice[i][3] = static_cast<uint8_t>(encode_hdr_alpha ? FMT_HDR_RGBA : FMT_HDR_RGB_LDR_ALPHA);
+ format_of_choice[i][2] = FMT_HDR_RGB;
+ format_of_choice[i][1] = FMT_HDR_RGB_SCALE;
+ format_of_choice[i][0] = FMT_HDR_LUMINANCE_LARGE_RANGE;
+ }
+
+ for (int i = QUANT_16; i <= QUANT_256; i++)
+ {
+ // The base_quant_error should depend on the scale-factor that would be used during
+ // actual encode of the color value
+
+ float base_quant_error = baseline_quant_error[i - QUANT_6] * static_cast<float>(partition_size);
+ float rgb_quantization_error = error_weight_rgbsum * base_quant_error * 2.0f;
+ float alpha_quantization_error = error_weight.lane<3>() * base_quant_error * 2.0f;
+ float rgba_quantization_error = rgb_quantization_error + alpha_quantization_error;
+
+ // For 8 integers, we have two encodings: one with HDR A and another one with LDR A
+
+ float full_hdr_rgba_error = rgba_quantization_error + rgb_range_error + alpha_range_error;
+ best_error[i][3] = full_hdr_rgba_error;
+ format_of_choice[i][3] = static_cast<uint8_t>(encode_hdr_alpha ? FMT_HDR_RGBA : FMT_HDR_RGB_LDR_ALPHA);
+
+ // For 6 integers, we have one HDR-RGB encoding
+ float full_hdr_rgb_error = (rgb_quantization_error * mode11mult) + rgb_range_error + eci.alpha_drop_error;
+ best_error[i][2] = full_hdr_rgb_error;
+ format_of_choice[i][2] = FMT_HDR_RGB;
+
+ // For 4 integers, we have one HDR-RGB-Scale encoding
+ float hdr_rgb_scale_error = (rgb_quantization_error * mode7mult) + rgb_range_error + eci.alpha_drop_error + eci.rgb_luma_error;
+
+ best_error[i][1] = hdr_rgb_scale_error;
+ format_of_choice[i][1] = FMT_HDR_RGB_SCALE;
+
+ // For 2 integers, we assume luminance-with-large-range
+ float hdr_luminance_error = (rgb_quantization_error * mode23mult) + rgb_range_error + eci.alpha_drop_error + eci.luminance_error;
+ best_error[i][0] = hdr_luminance_error;
+ format_of_choice[i][0] = FMT_HDR_LUMINANCE_LARGE_RANGE;
+ }
+ }
+ else
+ {
+ for (int i = QUANT_2; i < QUANT_6; i++)
+ {
+ best_error[i][3] = ERROR_CALC_DEFAULT;
+ best_error[i][2] = ERROR_CALC_DEFAULT;
+ best_error[i][1] = ERROR_CALC_DEFAULT;
+ best_error[i][0] = ERROR_CALC_DEFAULT;
+
+ format_of_choice[i][3] = FMT_RGBA;
+ format_of_choice[i][2] = FMT_RGB;
+ format_of_choice[i][1] = FMT_RGB_SCALE;
+ format_of_choice[i][0] = FMT_LUMINANCE;
+ }
+
+ float base_quant_error_rgb = error_weight_rgbsum * static_cast<float>(partition_size);
+ float base_quant_error_a = error_weight.lane<3>() * static_cast<float>(partition_size);
+ float base_quant_error_rgba = base_quant_error_rgb + base_quant_error_a;
+
+ float error_scale_bc_rgba = eci.can_blue_contract ? 0.625f : 1.0f;
+ float error_scale_oe_rgba = eci.can_offset_encode ? 0.5f : 1.0f;
+
+ float error_scale_bc_rgb = eci.can_blue_contract ? 0.5f : 1.0f;
+ float error_scale_oe_rgb = eci.can_offset_encode ? 0.25f : 1.0f;
+
+ // Pick among the available LDR endpoint modes
+ for (int i = QUANT_6; i <= QUANT_256; i++)
+ {
+ // Offset encoding not possible at higher quant levels
+ if (i >= QUANT_192)
+ {
+ error_scale_oe_rgba = 1.0f;
+ error_scale_oe_rgb = 1.0f;
+ }
+
+ float base_quant_error = baseline_quant_error[i - QUANT_6];
+ float quant_error_rgb = base_quant_error_rgb * base_quant_error;
+ float quant_error_rgba = base_quant_error_rgba * base_quant_error;
+
+ // 8 integers can encode as RGBA+RGBA
+ float full_ldr_rgba_error = quant_error_rgba
+ * error_scale_bc_rgba
+ * error_scale_oe_rgba
+ + rgb_range_error
+ + alpha_range_error;
+
+ best_error[i][3] = full_ldr_rgba_error;
+ format_of_choice[i][3] = FMT_RGBA;
+
+ // 6 integers can encode as RGB+RGB or RGBS+AA
+ float full_ldr_rgb_error = quant_error_rgb
+ * error_scale_bc_rgb
+ * error_scale_oe_rgb
+ + rgb_range_error
+ + eci.alpha_drop_error;
+
+ float rgbs_alpha_error = quant_error_rgba
+ + eci.rgb_scale_error
+ + rgb_range_error
+ + alpha_range_error;
+
+ if (rgbs_alpha_error < full_ldr_rgb_error)
+ {
+ best_error[i][2] = rgbs_alpha_error;
+ format_of_choice[i][2] = FMT_RGB_SCALE_ALPHA;
+ }
+ else
+ {
+ best_error[i][2] = full_ldr_rgb_error;
+ format_of_choice[i][2] = FMT_RGB;
+ }
+
+ // 4 integers can encode as RGBS or LA+LA
+ float ldr_rgbs_error = quant_error_rgb
+ + rgb_range_error
+ + eci.alpha_drop_error
+ + eci.rgb_scale_error;
+
+ float lum_alpha_error = quant_error_rgba
+ + rgb_range_error
+ + alpha_range_error
+ + eci.luminance_error;
+
+ if (ldr_rgbs_error < lum_alpha_error)
+ {
+ best_error[i][1] = ldr_rgbs_error;
+ format_of_choice[i][1] = FMT_RGB_SCALE;
+ }
+ else
+ {
+ best_error[i][1] = lum_alpha_error;
+ format_of_choice[i][1] = FMT_LUMINANCE_ALPHA;
+ }
+
+ // 2 integers can encode as L+L
+ float luminance_error = quant_error_rgb
+ + rgb_range_error
+ + eci.alpha_drop_error
+ + eci.luminance_error;
+
+ best_error[i][0] = luminance_error;
+ format_of_choice[i][0] = FMT_LUMINANCE;
+ }
+ }
+}
+
+/**
+ * @brief For one partition compute the best format and quantization for a given bit count.
+ *
+ * @param best_combined_error The best error for each quant level and integer count.
+ * @param best_combined_format The best format for each quant level and integer count.
+ * @param bits_available The number of bits available for encoding.
+ * @param[out] best_quant_level The output best color quant level.
+ * @param[out] best_format The output best color format.
+ *
+ * @return The output error for the best pairing.
+ */
+static float one_partition_find_best_combination_for_bitcount(
+ const float best_combined_error[21][4],
+ const uint8_t best_combined_format[21][4],
+ int bits_available,
+ uint8_t& best_quant_level,
+ uint8_t& best_format
+) {
+ int best_integer_count = 0;
+ float best_integer_count_error = ERROR_CALC_DEFAULT;
+
+ for (int integer_count = 1; integer_count <= 4; integer_count++)
+ {
+ // Compute the quantization level for a given number of integers and a given number of bits
+ int quant_level = quant_mode_table[integer_count][bits_available];
+
+ // Don't have enough bits to represent a given endpoint format at all!
+ if (quant_level < QUANT_6)
+ {
+ continue;
+ }
+
+ float integer_count_error = best_combined_error[quant_level][integer_count - 1];
+ if (integer_count_error < best_integer_count_error)
+ {
+ best_integer_count_error = integer_count_error;
+ best_integer_count = integer_count - 1;
+ }
+ }
+
+ int ql = quant_mode_table[best_integer_count + 1][bits_available];
+
+ best_quant_level = static_cast<uint8_t>(ql);
+ best_format = FMT_LUMINANCE;
+
+ if (ql >= QUANT_6)
+ {
+ best_format = best_combined_format[ql][best_integer_count];
+ }
+
+ return best_integer_count_error;
+}
+
+/**
+ * @brief For 2 partitions compute the best format combinations for every pair of quant mode and integer count.
+ *
+ * @param best_error The best error for a single endpoint quant level and integer count.
+ * @param best_format The best format for a single endpoint quant level and integer count.
+ * @param[out] best_combined_error The best combined error pairings for the 2 partitions.
+ * @param[out] best_combined_format The best combined format pairings for the 2 partitions.
+ */
+static void two_partitions_find_best_combination_for_every_quantization_and_integer_count(
+ const float best_error[2][21][4], // indexed by (partition, quant-level, integer-pair-count-minus-1)
+ const uint8_t best_format[2][21][4],
+ float best_combined_error[21][7], // indexed by (quant-level, integer-pair-count-minus-2)
+ uint8_t best_combined_format[21][7][2]
+) {
+ for (int i = QUANT_2; i <= QUANT_256; i++)
+ {
+ for (int j = 0; j < 7; j++)
+ {
+ best_combined_error[i][j] = ERROR_CALC_DEFAULT;
+ }
+ }
+
+ for (int quant = QUANT_6; quant <= QUANT_256; quant++)
+ {
+ for (int i = 0; i < 4; i++) // integer-count for first endpoint-pair
+ {
+ for (int j = 0; j < 4; j++) // integer-count for second endpoint-pair
+ {
+ int low2 = astc::min(i, j);
+ int high2 = astc::max(i, j);
+ if ((high2 - low2) > 1)
+ {
+ continue;
+ }
+
+ int intcnt = i + j;
+ float errorterm = astc::min(best_error[0][quant][i] + best_error[1][quant][j], 1e10f);
+ if (errorterm <= best_combined_error[quant][intcnt])
+ {
+ best_combined_error[quant][intcnt] = errorterm;
+ best_combined_format[quant][intcnt][0] = best_format[0][quant][i];
+ best_combined_format[quant][intcnt][1] = best_format[1][quant][j];
+ }
+ }
+ }
+ }
+}
+
+/**
+ * @brief For 2 partitions compute the best format and quantization for a given bit count.
+ *
+ * @param best_combined_error The best error for each quant level and integer count.
+ * @param best_combined_format The best format for each quant level and integer count.
+ * @param bits_available The number of bits available for encoding.
+ * @param[out] best_quant_level The output best color quant level.
+ * @param[out] best_quant_level_mod The output best color quant level assuming two more bits are available.
+ * @param[out] best_formats The output best color formats.
+ *
+ * @return The output error for the best pairing.
+ */
+static float two_partitions_find_best_combination_for_bitcount(
+ float best_combined_error[21][7],
+ uint8_t best_combined_format[21][7][2],
+ int bits_available,
+ uint8_t& best_quant_level,
+ uint8_t& best_quant_level_mod,
+ uint8_t* best_formats
+) {
+ int best_integer_count = 0;
+ float best_integer_count_error = ERROR_CALC_DEFAULT;
+
+ for (int integer_count = 2; integer_count <= 8; integer_count++)
+ {
+ // Compute the quantization level for a given number of integers and a given number of bits
+ int quant_level = quant_mode_table[integer_count][bits_available];
+
+ // Don't have enough bits to represent a given endpoint format at all!
+ if (quant_level < QUANT_6)
+ {
+ break;
+ }
+
+ float integer_count_error = best_combined_error[quant_level][integer_count - 2];
+ if (integer_count_error < best_integer_count_error)
+ {
+ best_integer_count_error = integer_count_error;
+ best_integer_count = integer_count;
+ }
+ }
+
+ int ql = quant_mode_table[best_integer_count][bits_available];
+ int ql_mod = quant_mode_table[best_integer_count][bits_available + 2];
+
+ best_quant_level = static_cast<uint8_t>(ql);
+ best_quant_level_mod = static_cast<uint8_t>(ql_mod);
+
+ if (ql >= QUANT_6)
+ {
+ for (int i = 0; i < 2; i++)
+ {
+ best_formats[i] = best_combined_format[ql][best_integer_count - 2][i];
+ }
+ }
+ else
+ {
+ for (int i = 0; i < 2; i++)
+ {
+ best_formats[i] = FMT_LUMINANCE;
+ }
+ }
+
+ return best_integer_count_error;
+}
+
+/**
+ * @brief For 3 partitions compute the best format combinations for every pair of quant mode and integer count.
+ *
+ * @param best_error The best error for a single endpoint quant level and integer count.
+ * @param best_format The best format for a single endpoint quant level and integer count.
+ * @param[out] best_combined_error The best combined error pairings for the 3 partitions.
+ * @param[out] best_combined_format The best combined format pairings for the 3 partitions.
+ */
+static void three_partitions_find_best_combination_for_every_quantization_and_integer_count(
+ const float best_error[3][21][4], // indexed by (partition, quant-level, integer-count)
+ const uint8_t best_format[3][21][4],
+ float best_combined_error[21][10],
+ uint8_t best_combined_format[21][10][3]
+) {
+ for (int i = QUANT_2; i <= QUANT_256; i++)
+ {
+ for (int j = 0; j < 10; j++)
+ {
+ best_combined_error[i][j] = ERROR_CALC_DEFAULT;
+ }
+ }
+
+ for (int quant = QUANT_6; quant <= QUANT_256; quant++)
+ {
+ for (int i = 0; i < 4; i++) // integer-count for first endpoint-pair
+ {
+ for (int j = 0; j < 4; j++) // integer-count for second endpoint-pair
+ {
+ int low2 = astc::min(i, j);
+ int high2 = astc::max(i, j);
+ if ((high2 - low2) > 1)
+ {
+ continue;
+ }
+
+ for (int k = 0; k < 4; k++) // integer-count for third endpoint-pair
+ {
+ int low3 = astc::min(k, low2);
+ int high3 = astc::max(k, high2);
+ if ((high3 - low3) > 1)
+ {
+ continue;
+ }
+
+ int intcnt = i + j + k;
+ float errorterm = astc::min(best_error[0][quant][i] + best_error[1][quant][j] + best_error[2][quant][k], 1e10f);
+ if (errorterm <= best_combined_error[quant][intcnt])
+ {
+ best_combined_error[quant][intcnt] = errorterm;
+ best_combined_format[quant][intcnt][0] = best_format[0][quant][i];
+ best_combined_format[quant][intcnt][1] = best_format[1][quant][j];
+ best_combined_format[quant][intcnt][2] = best_format[2][quant][k];
+ }
+ }
+ }
+ }
+ }
+}
+
+/**
+ * @brief For 3 partitions compute the best format and quantization for a given bit count.
+ *
+ * @param best_combined_error The best error for each quant level and integer count.
+ * @param best_combined_format The best format for each quant level and integer count.
+ * @param bits_available The number of bits available for encoding.
+ * @param[out] best_quant_level The output best color quant level.
+ * @param[out] best_quant_level_mod The output best color quant level assuming two more bits are available.
+ * @param[out] best_formats The output best color formats.
+ *
+ * @return The output error for the best pairing.
+ */
+static float three_partitions_find_best_combination_for_bitcount(
+ const float best_combined_error[21][10],
+ const uint8_t best_combined_format[21][10][3],
+ int bits_available,
+ uint8_t& best_quant_level,
+ uint8_t& best_quant_level_mod,
+ uint8_t* best_formats
+) {
+ int best_integer_count = 0;
+ float best_integer_count_error = ERROR_CALC_DEFAULT;
+
+ for (int integer_count = 3; integer_count <= 9; integer_count++)
+ {
+ // Compute the quantization level for a given number of integers and a given number of bits
+ int quant_level = quant_mode_table[integer_count][bits_available];
+
+ // Don't have enough bits to represent a given endpoint format at all!
+ if (quant_level < QUANT_6)
+ {
+ break;
+ }
+
+ float integer_count_error = best_combined_error[quant_level][integer_count - 3];
+ if (integer_count_error < best_integer_count_error)
+ {
+ best_integer_count_error = integer_count_error;
+ best_integer_count = integer_count;
+ }
+ }
+
+ int ql = quant_mode_table[best_integer_count][bits_available];
+ int ql_mod = quant_mode_table[best_integer_count][bits_available + 5];
+
+ best_quant_level = static_cast<uint8_t>(ql);
+ best_quant_level_mod = static_cast<uint8_t>(ql_mod);
+
+ if (ql >= QUANT_6)
+ {
+ for (int i = 0; i < 3; i++)
+ {
+ best_formats[i] = best_combined_format[ql][best_integer_count - 3][i];
+ }
+ }
+ else
+ {
+ for (int i = 0; i < 3; i++)
+ {
+ best_formats[i] = FMT_LUMINANCE;
+ }
+ }
+
+ return best_integer_count_error;
+}
+
+/**
+ * @brief For 4 partitions compute the best format combinations for every pair of quant mode and integer count.
+ *
+ * @param best_error The best error for a single endpoint quant level and integer count.
+ * @param best_format The best format for a single endpoint quant level and integer count.
+ * @param[out] best_combined_error The best combined error pairings for the 4 partitions.
+ * @param[out] best_combined_format The best combined format pairings for the 4 partitions.
+ */
+static void four_partitions_find_best_combination_for_every_quantization_and_integer_count(
+ const float best_error[4][21][4], // indexed by (partition, quant-level, integer-count)
+ const uint8_t best_format[4][21][4],
+ float best_combined_error[21][13],
+ uint8_t best_combined_format[21][13][4]
+) {
+ for (int i = QUANT_2; i <= QUANT_256; i++)
+ {
+ for (int j = 0; j < 13; j++)
+ {
+ best_combined_error[i][j] = ERROR_CALC_DEFAULT;
+ }
+ }
+
+ for (int quant = QUANT_6; quant <= QUANT_256; quant++)
+ {
+ for (int i = 0; i < 4; i++) // integer-count for first endpoint-pair
+ {
+ for (int j = 0; j < 4; j++) // integer-count for second endpoint-pair
+ {
+ int low2 = astc::min(i, j);
+ int high2 = astc::max(i, j);
+ if ((high2 - low2) > 1)
+ {
+ continue;
+ }
+
+ for (int k = 0; k < 4; k++) // integer-count for third endpoint-pair
+ {
+ int low3 = astc::min(k, low2);
+ int high3 = astc::max(k, high2);
+ if ((high3 - low3) > 1)
+ {
+ continue;
+ }
+
+ for (int l = 0; l < 4; l++) // integer-count for fourth endpoint-pair
+ {
+ int low4 = astc::min(l, low3);
+ int high4 = astc::max(l, high3);
+ if ((high4 - low4) > 1)
+ {
+ continue;
+ }
+
+ int intcnt = i + j + k + l;
+ float errorterm = astc::min(best_error[0][quant][i] + best_error[1][quant][j] + best_error[2][quant][k] + best_error[3][quant][l], 1e10f);
+ if (errorterm <= best_combined_error[quant][intcnt])
+ {
+ best_combined_error[quant][intcnt] = errorterm;
+ best_combined_format[quant][intcnt][0] = best_format[0][quant][i];
+ best_combined_format[quant][intcnt][1] = best_format[1][quant][j];
+ best_combined_format[quant][intcnt][2] = best_format[2][quant][k];
+ best_combined_format[quant][intcnt][3] = best_format[3][quant][l];
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+/**
+ * @brief For 4 partitions compute the best format and quantization for a given bit count.
+ *
+ * @param best_combined_error The best error for each quant level and integer count.
+ * @param best_combined_format The best format for each quant level and integer count.
+ * @param bits_available The number of bits available for encoding.
+ * @param[out] best_quant_level The output best color quant level.
+ * @param[out] best_quant_level_mod The output best color quant level assuming two more bits are available.
+ * @param[out] best_formats The output best color formats.
+ *
+ * @return best_error The output error for the best pairing.
+ */
+static float four_partitions_find_best_combination_for_bitcount(
+ const float best_combined_error[21][13],
+ const uint8_t best_combined_format[21][13][4],
+ int bits_available,
+ uint8_t& best_quant_level,
+ uint8_t& best_quant_level_mod,
+ uint8_t* best_formats
+) {
+ int best_integer_count = 0;
+ float best_integer_count_error = ERROR_CALC_DEFAULT;
+
+ for (int integer_count = 4; integer_count <= 9; integer_count++)
+ {
+ // Compute the quantization level for a given number of integers and a given number of bits
+ int quant_level = quant_mode_table[integer_count][bits_available];
+
+ // Don't have enough bits to represent a given endpoint format at all!
+ if (quant_level < QUANT_6)
+ {
+ break;
+ }
+
+ float integer_count_error = best_combined_error[quant_level][integer_count - 4];
+ if (integer_count_error < best_integer_count_error)
+ {
+ best_integer_count_error = integer_count_error;
+ best_integer_count = integer_count;
+ }
+ }
+
+ int ql = quant_mode_table[best_integer_count][bits_available];
+ int ql_mod = quant_mode_table[best_integer_count][bits_available + 8];
+
+ best_quant_level = static_cast<uint8_t>(ql);
+ best_quant_level_mod = static_cast<uint8_t>(ql_mod);
+
+ if (ql >= QUANT_6)
+ {
+ for (int i = 0; i < 4; i++)
+ {
+ best_formats[i] = best_combined_format[ql][best_integer_count - 4][i];
+ }
+ }
+ else
+ {
+ for (int i = 0; i < 4; i++)
+ {
+ best_formats[i] = FMT_LUMINANCE;
+ }
+ }
+
+ return best_integer_count_error;
+}
+
+/* See header for documentation. */
+unsigned int compute_ideal_endpoint_formats(
+ const partition_info& pi,
+ const image_block& blk,
+ const endpoints& ep,
+ // bitcounts and errors computed for the various quantization methods
+ const int8_t* qwt_bitcounts,
+ const float* qwt_errors,
+ unsigned int tune_candidate_limit,
+ unsigned int start_block_mode,
+ unsigned int end_block_mode,
+ // output data
+ uint8_t partition_format_specifiers[TUNE_MAX_TRIAL_CANDIDATES][BLOCK_MAX_PARTITIONS],
+ int block_mode[TUNE_MAX_TRIAL_CANDIDATES],
+ quant_method quant_level[TUNE_MAX_TRIAL_CANDIDATES],
+ quant_method quant_level_mod[TUNE_MAX_TRIAL_CANDIDATES],
+ compression_working_buffers& tmpbuf
+) {
+ int partition_count = pi.partition_count;
+
+ promise(partition_count > 0);
+
+ bool encode_hdr_rgb = static_cast<bool>(blk.rgb_lns[0]);
+ bool encode_hdr_alpha = static_cast<bool>(blk.alpha_lns[0]);
+
+ // Compute the errors that result from various encoding choices (such as using luminance instead
+ // of RGB, discarding Alpha, using RGB-scale in place of two separate RGB endpoints and so on)
+ encoding_choice_errors eci[BLOCK_MAX_PARTITIONS];
+ compute_encoding_choice_errors(blk, pi, ep, eci);
+
+ float best_error[BLOCK_MAX_PARTITIONS][21][4];
+ uint8_t format_of_choice[BLOCK_MAX_PARTITIONS][21][4];
+ for (int i = 0; i < partition_count; i++)
+ {
+ compute_color_error_for_every_integer_count_and_quant_level(
+ encode_hdr_rgb, encode_hdr_alpha, i,
+ pi, eci[i], ep, blk.channel_weight, best_error[i],
+ format_of_choice[i]);
+ }
+
+ float* errors_of_best_combination = tmpbuf.errors_of_best_combination;
+ uint8_t* best_quant_levels = tmpbuf.best_quant_levels;
+ uint8_t* best_quant_levels_mod = tmpbuf.best_quant_levels_mod;
+ uint8_t (&best_ep_formats)[WEIGHTS_MAX_BLOCK_MODES][BLOCK_MAX_PARTITIONS] = tmpbuf.best_ep_formats;
+
+ // Ensure that the first iteration understep contains data that will never be picked
+ vfloat clear_error(ERROR_CALC_DEFAULT);
+ vint clear_quant(0);
+
+ unsigned int packed_start_block_mode = round_down_to_simd_multiple_vla(start_block_mode);
+ storea(clear_error, errors_of_best_combination + packed_start_block_mode);
+ store_nbytes(clear_quant, best_quant_levels + packed_start_block_mode);
+ store_nbytes(clear_quant, best_quant_levels_mod + packed_start_block_mode);
+
+ // Ensure that last iteration overstep contains data that will never be picked
+ unsigned int packed_end_block_mode = round_down_to_simd_multiple_vla(end_block_mode - 1);
+ storea(clear_error, errors_of_best_combination + packed_end_block_mode);
+ store_nbytes(clear_quant, best_quant_levels + packed_end_block_mode);
+ store_nbytes(clear_quant, best_quant_levels_mod + packed_end_block_mode);
+
+ // Track a scalar best to avoid expensive search at least once ...
+ float error_of_best_combination = ERROR_CALC_DEFAULT;
+ int index_of_best_combination = -1;
+
+ // The block contains 1 partition
+ if (partition_count == 1)
+ {
+ for (unsigned int i = start_block_mode; i < end_block_mode; i++)
+ {
+ if (qwt_errors[i] >= ERROR_CALC_DEFAULT)
+ {
+ errors_of_best_combination[i] = ERROR_CALC_DEFAULT;
+ continue;
+ }
+
+ float error_of_best = one_partition_find_best_combination_for_bitcount(
+ best_error[0], format_of_choice[0], qwt_bitcounts[i],
+ best_quant_levels[i], best_ep_formats[i][0]);
+
+ float total_error = error_of_best + qwt_errors[i];
+ errors_of_best_combination[i] = total_error;
+ best_quant_levels_mod[i] = best_quant_levels[i];
+
+ if (total_error < error_of_best_combination)
+ {
+ error_of_best_combination = total_error;
+ index_of_best_combination = i;
+ }
+ }
+ }
+ // The block contains 2 partitions
+ else if (partition_count == 2)
+ {
+ float combined_best_error[21][7];
+ uint8_t formats_of_choice[21][7][2];
+
+ two_partitions_find_best_combination_for_every_quantization_and_integer_count(
+ best_error, format_of_choice, combined_best_error, formats_of_choice);
+
+ assert(start_block_mode == 0);
+ for (unsigned int i = 0; i < end_block_mode; i++)
+ {
+ if (qwt_errors[i] >= ERROR_CALC_DEFAULT)
+ {
+ errors_of_best_combination[i] = ERROR_CALC_DEFAULT;
+ continue;
+ }
+
+ float error_of_best = two_partitions_find_best_combination_for_bitcount(
+ combined_best_error, formats_of_choice, qwt_bitcounts[i],
+ best_quant_levels[i], best_quant_levels_mod[i],
+ best_ep_formats[i]);
+
+ float total_error = error_of_best + qwt_errors[i];
+ errors_of_best_combination[i] = total_error;
+
+ if (total_error < error_of_best_combination)
+ {
+ error_of_best_combination = total_error;
+ index_of_best_combination = i;
+ }
+ }
+ }
+ // The block contains 3 partitions
+ else if (partition_count == 3)
+ {
+ float combined_best_error[21][10];
+ uint8_t formats_of_choice[21][10][3];
+
+ three_partitions_find_best_combination_for_every_quantization_and_integer_count(
+ best_error, format_of_choice, combined_best_error, formats_of_choice);
+
+ assert(start_block_mode == 0);
+ for (unsigned int i = 0; i < end_block_mode; i++)
+ {
+ if (qwt_errors[i] >= ERROR_CALC_DEFAULT)
+ {
+ errors_of_best_combination[i] = ERROR_CALC_DEFAULT;
+ continue;
+ }
+
+ float error_of_best = three_partitions_find_best_combination_for_bitcount(
+ combined_best_error, formats_of_choice, qwt_bitcounts[i],
+ best_quant_levels[i], best_quant_levels_mod[i],
+ best_ep_formats[i]);
+
+ float total_error = error_of_best + qwt_errors[i];
+ errors_of_best_combination[i] = total_error;
+
+ if (total_error < error_of_best_combination)
+ {
+ error_of_best_combination = total_error;
+ index_of_best_combination = i;
+ }
+ }
+ }
+ // The block contains 4 partitions
+ else // if (partition_count == 4)
+ {
+ assert(partition_count == 4);
+ float combined_best_error[21][13];
+ uint8_t formats_of_choice[21][13][4];
+
+ four_partitions_find_best_combination_for_every_quantization_and_integer_count(
+ best_error, format_of_choice, combined_best_error, formats_of_choice);
+
+ assert(start_block_mode == 0);
+ for (unsigned int i = 0; i < end_block_mode; i++)
+ {
+ if (qwt_errors[i] >= ERROR_CALC_DEFAULT)
+ {
+ errors_of_best_combination[i] = ERROR_CALC_DEFAULT;
+ continue;
+ }
+
+ float error_of_best = four_partitions_find_best_combination_for_bitcount(
+ combined_best_error, formats_of_choice, qwt_bitcounts[i],
+ best_quant_levels[i], best_quant_levels_mod[i],
+ best_ep_formats[i]);
+
+ float total_error = error_of_best + qwt_errors[i];
+ errors_of_best_combination[i] = total_error;
+
+ if (total_error < error_of_best_combination)
+ {
+ error_of_best_combination = total_error;
+ index_of_best_combination = i;
+ }
+ }
+ }
+
+ int best_error_weights[TUNE_MAX_TRIAL_CANDIDATES];
+
+ // Fast path the first result and avoid the list search for trial 0
+ best_error_weights[0] = index_of_best_combination;
+ if (index_of_best_combination >= 0)
+ {
+ errors_of_best_combination[index_of_best_combination] = ERROR_CALC_DEFAULT;
+ }
+
+ // Search the remaining results and pick the best candidate modes for trial 1+
+ for (unsigned int i = 1; i < tune_candidate_limit; i++)
+ {
+ vint vbest_error_index(-1);
+ vfloat vbest_ep_error(ERROR_CALC_DEFAULT);
+
+ start_block_mode = round_down_to_simd_multiple_vla(start_block_mode);
+ vint lane_ids = vint::lane_id() + vint(start_block_mode);
+ for (unsigned int j = start_block_mode; j < end_block_mode; j += ASTCENC_SIMD_WIDTH)
+ {
+ vfloat err = vfloat(errors_of_best_combination + j);
+ vmask mask = err < vbest_ep_error;
+ vbest_ep_error = select(vbest_ep_error, err, mask);
+ vbest_error_index = select(vbest_error_index, lane_ids, mask);
+ lane_ids += vint(ASTCENC_SIMD_WIDTH);
+ }
+
+ // Pick best mode from the SIMD result, using lowest matching index to ensure invariance
+ vmask lanes_min_error = vbest_ep_error == hmin(vbest_ep_error);
+ vbest_error_index = select(vint(0x7FFFFFFF), vbest_error_index, lanes_min_error);
+ vbest_error_index = hmin(vbest_error_index);
+ int best_error_index = vbest_error_index.lane<0>();
+
+ best_error_weights[i] = best_error_index;
+
+ // Max the error for this candidate so we don't pick it again
+ if (best_error_index >= 0)
+ {
+ errors_of_best_combination[best_error_index] = ERROR_CALC_DEFAULT;
+ }
+ // Early-out if no more candidates are valid
+ else
+ {
+ break;
+ }
+ }
+
+ for (unsigned int i = 0; i < tune_candidate_limit; i++)
+ {
+ if (best_error_weights[i] < 0)
+ {
+ return i;
+ }
+
+ block_mode[i] = best_error_weights[i];
+
+ quant_level[i] = static_cast<quant_method>(best_quant_levels[best_error_weights[i]]);
+ quant_level_mod[i] = static_cast<quant_method>(best_quant_levels_mod[best_error_weights[i]]);
+
+ assert(quant_level[i] >= QUANT_6 && quant_level[i] <= QUANT_256);
+ assert(quant_level_mod[i] >= QUANT_6 && quant_level_mod[i] <= QUANT_256);
+
+ for (int j = 0; j < partition_count; j++)
+ {
+ partition_format_specifiers[i][j] = best_ep_formats[best_error_weights[i]][j];
+ }
+ }
+
+ return tune_candidate_limit;
+}
+
+#endif
diff --git a/thirdparty/astcenc/astcenc_platform_isa_detection.cpp b/thirdparty/astcenc/astcenc_platform_isa_detection.cpp
new file mode 100644
index 0000000000..8ed98437ea
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_platform_isa_detection.cpp
@@ -0,0 +1,166 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2020-2022 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief Platform-specific function implementations.
+ *
+ * This module contains functions for querying the host extended ISA support.
+ */
+
+// Include before the defines below to pick up any auto-setup based on compiler
+// built-in config, if not being set explicitly by the build system
+#include "astcenc_internal.h"
+
+#if (ASTCENC_SSE > 0) || (ASTCENC_AVX > 0) || \
+ (ASTCENC_POPCNT > 0) || (ASTCENC_F16C > 0)
+
+static bool g_init { false };
+
+/** Does this CPU support SSE 4.1? Set to -1 if not yet initialized. */
+static bool g_cpu_has_sse41 { false };
+
+/** Does this CPU support AVX2? Set to -1 if not yet initialized. */
+static bool g_cpu_has_avx2 { false };
+
+/** Does this CPU support POPCNT? Set to -1 if not yet initialized. */
+static bool g_cpu_has_popcnt { false };
+
+/** Does this CPU support F16C? Set to -1 if not yet initialized. */
+static bool g_cpu_has_f16c { false };
+
+/* ============================================================================
+ Platform code for Visual Studio
+============================================================================ */
+#if !defined(__clang__) && defined(_MSC_VER)
+#define WIN32_LEAN_AND_MEAN
+#include <windows.h>
+#include <intrin.h>
+
+/**
+ * @brief Detect platform CPU ISA support and update global trackers.
+ */
+static void detect_cpu_isa()
+{
+ int data[4];
+
+ __cpuid(data, 0);
+ int num_id = data[0];
+
+ if (num_id >= 1)
+ {
+ __cpuidex(data, 1, 0);
+ // SSE41 = Bank 1, ECX, bit 19
+ g_cpu_has_sse41 = data[2] & (1 << 19) ? true : false;
+ // POPCNT = Bank 1, ECX, bit 23
+ g_cpu_has_popcnt = data[2] & (1 << 23) ? true : false;
+ // F16C = Bank 1, ECX, bit 29
+ g_cpu_has_f16c = data[2] & (1 << 29) ? true : false;
+ }
+
+ if (num_id >= 7)
+ {
+ __cpuidex(data, 7, 0);
+ // AVX2 = Bank 7, EBX, bit 5
+ g_cpu_has_avx2 = data[1] & (1 << 5) ? true : false;
+ }
+
+ // Ensure state bits are updated before init flag is updated
+ MemoryBarrier();
+ g_init = true;
+}
+
+/* ============================================================================
+ Platform code for GCC and Clang
+============================================================================ */
+#else
+#include <cpuid.h>
+
+/**
+ * @brief Detect platform CPU ISA support and update global trackers.
+ */
+static void detect_cpu_isa()
+{
+ unsigned int data[4];
+
+ if (__get_cpuid_count(1, 0, &data[0], &data[1], &data[2], &data[3]))
+ {
+ // SSE41 = Bank 1, ECX, bit 19
+ g_cpu_has_sse41 = data[2] & (1 << 19) ? true : false;
+ // POPCNT = Bank 1, ECX, bit 23
+ g_cpu_has_popcnt = data[2] & (1 << 23) ? true : false;
+ // F16C = Bank 1, ECX, bit 29
+ g_cpu_has_f16c = data[2] & (1 << 29) ? true : false;
+ }
+
+ g_cpu_has_avx2 = 0;
+ if (__get_cpuid_count(7, 0, &data[0], &data[1], &data[2], &data[3]))
+ {
+ // AVX2 = Bank 7, EBX, bit 5
+ g_cpu_has_avx2 = data[1] & (1 << 5) ? true : false;
+ }
+
+ // Ensure state bits are updated before init flag is updated
+ __sync_synchronize();
+ g_init = true;
+}
+#endif
+
+/* See header for documentation. */
+bool cpu_supports_popcnt()
+{
+ if (!g_init)
+ {
+ detect_cpu_isa();
+ }
+
+ return g_cpu_has_popcnt;
+}
+
+/* See header for documentation. */
+bool cpu_supports_f16c()
+{
+ if (!g_init)
+ {
+ detect_cpu_isa();
+ }
+
+ return g_cpu_has_f16c;
+}
+
+/* See header for documentation. */
+bool cpu_supports_sse41()
+{
+ if (!g_init)
+ {
+ detect_cpu_isa();
+ }
+
+ return g_cpu_has_sse41;
+}
+
+/* See header for documentation. */
+bool cpu_supports_avx2()
+{
+ if (!g_init)
+ {
+ detect_cpu_isa();
+ }
+
+ return g_cpu_has_avx2;
+}
+
+#endif
diff --git a/thirdparty/astcenc/astcenc_quantization.cpp b/thirdparty/astcenc/astcenc_quantization.cpp
new file mode 100644
index 0000000000..478a21ead7
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_quantization.cpp
@@ -0,0 +1,904 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2021 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief Functions and data tables for numeric quantization..
+ */
+
+#include "astcenc_internal.h"
+
+#if !defined(ASTCENC_DECOMPRESS_ONLY)
+
+// Starts from QUANT_6
+// Not scrambled
+const uint8_t color_unquant_to_uquant_tables[17][256] {
+ {
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 51, 51, 51, 51, 51, 51,
+ 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51,
+ 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51,
+ 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 102, 102, 102,
+ 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102,
+ 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102,
+ 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102,
+ 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153,
+ 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153,
+ 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153,
+ 153, 153, 153, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204,
+ 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204,
+ 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204,
+ 204, 204, 204, 204, 204, 204, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
+ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255
+ },
+ {
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36,
+ 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36,
+ 36, 36, 36, 36, 36, 36, 36, 73, 73, 73, 73, 73, 73, 73, 73, 73,
+ 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73,
+ 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 109, 109, 109, 109,
+ 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109,
+ 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109,
+ 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146,
+ 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146,
+ 146, 146, 146, 146, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182,
+ 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182,
+ 182, 182, 182, 182, 182, 182, 182, 182, 182, 219, 219, 219, 219, 219, 219, 219,
+ 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219,
+ 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 255, 255, 255,
+ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255
+ },
+ {
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28,
+ 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
+ 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 56, 56, 56, 56, 56,
+ 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56,
+ 56, 56, 56, 56, 56, 56, 56, 84, 84, 84, 84, 84, 84, 84, 84, 84,
+ 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84,
+ 84, 84, 84, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113,
+ 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113,
+ 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142,
+ 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 171, 171, 171,
+ 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171,
+ 171, 171, 171, 171, 171, 171, 171, 171, 171, 199, 199, 199, 199, 199, 199, 199,
+ 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199,
+ 199, 199, 199, 199, 199, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227,
+ 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227,
+ 227, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255
+ },
+ {
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 23, 23, 23,
+ 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23,
+ 23, 23, 23, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46,
+ 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 69, 69, 69, 69, 69, 69,
+ 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
+ 69, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92,
+ 92, 92, 92, 92, 92, 92, 92, 92, 92, 116, 116, 116, 116, 116, 116, 116,
+ 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116,
+ 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139,
+ 139, 139, 139, 139, 139, 139, 139, 163, 163, 163, 163, 163, 163, 163, 163, 163,
+ 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 186,
+ 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186,
+ 186, 186, 186, 186, 186, 186, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209,
+ 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 232, 232, 232,
+ 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232,
+ 232, 232, 232, 232, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255
+ },
+ {
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 17, 17, 17, 17, 17, 17,
+ 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 34, 34, 34, 34, 34, 34,
+ 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 51, 51, 51, 51, 51,
+ 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 85, 85, 85,
+ 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 102, 102,
+ 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 119,
+ 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119,
+ 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136,
+ 136, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153,
+ 153, 153, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170,
+ 170, 170, 170, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187,
+ 187, 187, 187, 187, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204,
+ 204, 204, 204, 204, 204, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221,
+ 221, 221, 221, 221, 221, 221, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238,
+ 238, 238, 238, 238, 238, 238, 238, 255, 255, 255, 255, 255, 255, 255, 255, 255
+ },
+ {
+ 0, 0, 0, 0, 0, 0, 0, 13, 13, 13, 13, 13, 13, 13, 13, 13,
+ 13, 13, 13, 13, 13, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
+ 27, 27, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
+ 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 67, 67, 67,
+ 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 80, 80, 80, 80, 80, 80,
+ 80, 80, 80, 80, 80, 80, 80, 80, 94, 94, 94, 94, 94, 94, 94, 94,
+ 94, 94, 94, 94, 94, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107,
+ 107, 107, 107, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121,
+ 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 148, 148, 148,
+ 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 161, 161, 161, 161, 161,
+ 161, 161, 161, 161, 161, 161, 161, 161, 175, 175, 175, 175, 175, 175, 175, 175,
+ 175, 175, 175, 175, 175, 175, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188,
+ 188, 188, 188, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201,
+ 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 228, 228,
+ 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 242, 242, 242, 242, 242,
+ 242, 242, 242, 242, 242, 242, 242, 242, 242, 255, 255, 255, 255, 255, 255, 255
+ },
+ {
+ 0, 0, 0, 0, 0, 0, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
+ 11, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 33, 33, 33, 33,
+ 33, 33, 33, 33, 33, 33, 33, 44, 44, 44, 44, 44, 44, 44, 44, 44,
+ 44, 44, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 66, 66, 66,
+ 66, 66, 66, 66, 66, 66, 66, 66, 77, 77, 77, 77, 77, 77, 77, 77,
+ 77, 77, 77, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 99, 99,
+ 99, 99, 99, 99, 99, 99, 99, 99, 99, 110, 110, 110, 110, 110, 110, 110,
+ 110, 110, 110, 110, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121,
+ 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 145, 145, 145, 145,
+ 145, 145, 145, 145, 145, 145, 145, 156, 156, 156, 156, 156, 156, 156, 156, 156,
+ 156, 156, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 178, 178, 178,
+ 178, 178, 178, 178, 178, 178, 178, 178, 189, 189, 189, 189, 189, 189, 189, 189,
+ 189, 189, 189, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 211, 211,
+ 211, 211, 211, 211, 211, 211, 211, 211, 211, 222, 222, 222, 222, 222, 222, 222,
+ 222, 222, 222, 222, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 244,
+ 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 255, 255, 255, 255, 255, 255
+ },
+ {
+ 0, 0, 0, 0, 0, 8, 8, 8, 8, 8, 8, 8, 8, 16, 16, 16,
+ 16, 16, 16, 16, 16, 24, 24, 24, 24, 24, 24, 24, 24, 33, 33, 33,
+ 33, 33, 33, 33, 33, 33, 41, 41, 41, 41, 41, 41, 41, 41, 49, 49,
+ 49, 49, 49, 49, 49, 49, 57, 57, 57, 57, 57, 57, 57, 57, 66, 66,
+ 66, 66, 66, 66, 66, 66, 66, 74, 74, 74, 74, 74, 74, 74, 74, 82,
+ 82, 82, 82, 82, 82, 82, 82, 90, 90, 90, 90, 90, 90, 90, 90, 99,
+ 99, 99, 99, 99, 99, 99, 99, 99, 107, 107, 107, 107, 107, 107, 107, 107,
+ 115, 115, 115, 115, 115, 115, 115, 115, 123, 123, 123, 123, 123, 123, 123, 123,
+ 132, 132, 132, 132, 132, 132, 132, 132, 140, 140, 140, 140, 140, 140, 140, 140,
+ 148, 148, 148, 148, 148, 148, 148, 148, 156, 156, 156, 156, 156, 156, 156, 156,
+ 156, 165, 165, 165, 165, 165, 165, 165, 165, 173, 173, 173, 173, 173, 173, 173,
+ 173, 181, 181, 181, 181, 181, 181, 181, 181, 189, 189, 189, 189, 189, 189, 189,
+ 189, 189, 198, 198, 198, 198, 198, 198, 198, 198, 206, 206, 206, 206, 206, 206,
+ 206, 206, 214, 214, 214, 214, 214, 214, 214, 214, 222, 222, 222, 222, 222, 222,
+ 222, 222, 222, 231, 231, 231, 231, 231, 231, 231, 231, 239, 239, 239, 239, 239,
+ 239, 239, 239, 247, 247, 247, 247, 247, 247, 247, 247, 255, 255, 255, 255, 255
+ },
+ {
+ 0, 0, 0, 0, 6, 6, 6, 6, 6, 6, 13, 13, 13, 13, 13, 13,
+ 13, 19, 19, 19, 19, 19, 19, 26, 26, 26, 26, 26, 26, 26, 32, 32,
+ 32, 32, 32, 32, 39, 39, 39, 39, 39, 39, 39, 45, 45, 45, 45, 45,
+ 45, 52, 52, 52, 52, 52, 52, 52, 58, 58, 58, 58, 58, 58, 65, 65,
+ 65, 65, 65, 65, 65, 71, 71, 71, 71, 71, 71, 78, 78, 78, 78, 78,
+ 78, 78, 84, 84, 84, 84, 84, 84, 91, 91, 91, 91, 91, 91, 91, 97,
+ 97, 97, 97, 97, 97, 104, 104, 104, 104, 104, 104, 104, 110, 110, 110, 110,
+ 110, 110, 117, 117, 117, 117, 117, 117, 117, 123, 123, 123, 123, 123, 123, 123,
+ 132, 132, 132, 132, 132, 132, 132, 138, 138, 138, 138, 138, 138, 138, 145, 145,
+ 145, 145, 145, 145, 151, 151, 151, 151, 151, 151, 151, 158, 158, 158, 158, 158,
+ 158, 164, 164, 164, 164, 164, 164, 164, 171, 171, 171, 171, 171, 171, 177, 177,
+ 177, 177, 177, 177, 177, 184, 184, 184, 184, 184, 184, 190, 190, 190, 190, 190,
+ 190, 190, 197, 197, 197, 197, 197, 197, 203, 203, 203, 203, 203, 203, 203, 210,
+ 210, 210, 210, 210, 210, 216, 216, 216, 216, 216, 216, 216, 223, 223, 223, 223,
+ 223, 223, 229, 229, 229, 229, 229, 229, 229, 236, 236, 236, 236, 236, 236, 242,
+ 242, 242, 242, 242, 242, 242, 249, 249, 249, 249, 249, 249, 255, 255, 255, 255
+ },
+ {
+ 0, 0, 0, 5, 5, 5, 5, 5, 5, 11, 11, 11, 11, 11, 16, 16,
+ 16, 16, 16, 21, 21, 21, 21, 21, 21, 27, 27, 27, 27, 27, 32, 32,
+ 32, 32, 32, 32, 38, 38, 38, 38, 38, 43, 43, 43, 43, 43, 48, 48,
+ 48, 48, 48, 48, 54, 54, 54, 54, 54, 59, 59, 59, 59, 59, 59, 65,
+ 65, 65, 65, 65, 70, 70, 70, 70, 70, 70, 76, 76, 76, 76, 76, 81,
+ 81, 81, 81, 81, 86, 86, 86, 86, 86, 86, 92, 92, 92, 92, 92, 97,
+ 97, 97, 97, 97, 97, 103, 103, 103, 103, 103, 108, 108, 108, 108, 108, 113,
+ 113, 113, 113, 113, 113, 119, 119, 119, 119, 119, 124, 124, 124, 124, 124, 124,
+ 131, 131, 131, 131, 131, 131, 136, 136, 136, 136, 136, 142, 142, 142, 142, 142,
+ 142, 147, 147, 147, 147, 147, 152, 152, 152, 152, 152, 158, 158, 158, 158, 158,
+ 158, 163, 163, 163, 163, 163, 169, 169, 169, 169, 169, 169, 174, 174, 174, 174,
+ 174, 179, 179, 179, 179, 179, 185, 185, 185, 185, 185, 185, 190, 190, 190, 190,
+ 190, 196, 196, 196, 196, 196, 196, 201, 201, 201, 201, 201, 207, 207, 207, 207,
+ 207, 207, 212, 212, 212, 212, 212, 217, 217, 217, 217, 217, 223, 223, 223, 223,
+ 223, 223, 228, 228, 228, 228, 228, 234, 234, 234, 234, 234, 234, 239, 239, 239,
+ 239, 239, 244, 244, 244, 244, 244, 250, 250, 250, 250, 250, 250, 255, 255, 255
+ },
+ {
+ 0, 0, 0, 4, 4, 4, 4, 8, 8, 8, 8, 12, 12, 12, 12, 16,
+ 16, 16, 16, 20, 20, 20, 20, 24, 24, 24, 24, 28, 28, 28, 28, 32,
+ 32, 32, 32, 36, 36, 36, 36, 40, 40, 40, 40, 44, 44, 44, 44, 48,
+ 48, 48, 48, 52, 52, 52, 52, 56, 56, 56, 56, 60, 60, 60, 60, 65,
+ 65, 65, 65, 65, 69, 69, 69, 69, 73, 73, 73, 73, 77, 77, 77, 77,
+ 81, 81, 81, 81, 85, 85, 85, 85, 89, 89, 89, 89, 93, 93, 93, 93,
+ 97, 97, 97, 97, 101, 101, 101, 101, 105, 105, 105, 105, 109, 109, 109, 109,
+ 113, 113, 113, 113, 117, 117, 117, 117, 121, 121, 121, 121, 125, 125, 125, 125,
+ 130, 130, 130, 130, 134, 134, 134, 134, 138, 138, 138, 138, 142, 142, 142, 142,
+ 146, 146, 146, 146, 150, 150, 150, 150, 154, 154, 154, 154, 158, 158, 158, 158,
+ 162, 162, 162, 162, 166, 166, 166, 166, 170, 170, 170, 170, 174, 174, 174, 174,
+ 178, 178, 178, 178, 182, 182, 182, 182, 186, 186, 186, 186, 190, 190, 190, 190,
+ 190, 195, 195, 195, 195, 199, 199, 199, 199, 203, 203, 203, 203, 207, 207, 207,
+ 207, 211, 211, 211, 211, 215, 215, 215, 215, 219, 219, 219, 219, 223, 223, 223,
+ 223, 227, 227, 227, 227, 231, 231, 231, 231, 235, 235, 235, 235, 239, 239, 239,
+ 239, 243, 243, 243, 243, 247, 247, 247, 247, 251, 251, 251, 251, 255, 255, 255
+ },
+ {
+ 0, 0, 3, 3, 3, 6, 6, 6, 9, 9, 9, 9, 13, 13, 13, 16,
+ 16, 16, 19, 19, 19, 22, 22, 22, 25, 25, 25, 25, 29, 29, 29, 32,
+ 32, 32, 35, 35, 35, 38, 38, 38, 38, 42, 42, 42, 45, 45, 45, 48,
+ 48, 48, 51, 51, 51, 54, 54, 54, 54, 58, 58, 58, 61, 61, 61, 64,
+ 64, 64, 67, 67, 67, 67, 71, 71, 71, 74, 74, 74, 77, 77, 77, 80,
+ 80, 80, 83, 83, 83, 83, 87, 87, 87, 90, 90, 90, 93, 93, 93, 96,
+ 96, 96, 96, 100, 100, 100, 103, 103, 103, 106, 106, 106, 109, 109, 109, 112,
+ 112, 112, 112, 116, 116, 116, 119, 119, 119, 122, 122, 122, 125, 125, 125, 125,
+ 130, 130, 130, 130, 133, 133, 133, 136, 136, 136, 139, 139, 139, 143, 143, 143,
+ 143, 146, 146, 146, 149, 149, 149, 152, 152, 152, 155, 155, 155, 159, 159, 159,
+ 159, 162, 162, 162, 165, 165, 165, 168, 168, 168, 172, 172, 172, 172, 175, 175,
+ 175, 178, 178, 178, 181, 181, 181, 184, 184, 184, 188, 188, 188, 188, 191, 191,
+ 191, 194, 194, 194, 197, 197, 197, 201, 201, 201, 201, 204, 204, 204, 207, 207,
+ 207, 210, 210, 210, 213, 213, 213, 217, 217, 217, 217, 220, 220, 220, 223, 223,
+ 223, 226, 226, 226, 230, 230, 230, 230, 233, 233, 233, 236, 236, 236, 239, 239,
+ 239, 242, 242, 242, 246, 246, 246, 246, 249, 249, 249, 252, 252, 252, 255, 255
+ },
+ {
+ 0, 0, 2, 2, 5, 5, 5, 8, 8, 8, 10, 10, 13, 13, 13, 16,
+ 16, 16, 18, 18, 21, 21, 21, 24, 24, 24, 26, 26, 29, 29, 29, 32,
+ 32, 32, 35, 35, 35, 37, 37, 40, 40, 40, 43, 43, 43, 45, 45, 48,
+ 48, 48, 51, 51, 51, 53, 53, 56, 56, 56, 59, 59, 59, 61, 61, 64,
+ 64, 64, 67, 67, 67, 70, 70, 70, 72, 72, 75, 75, 75, 78, 78, 78,
+ 80, 80, 83, 83, 83, 86, 86, 86, 88, 88, 91, 91, 91, 94, 94, 94,
+ 96, 96, 99, 99, 99, 102, 102, 102, 104, 104, 107, 107, 107, 110, 110, 110,
+ 112, 112, 115, 115, 115, 118, 118, 118, 120, 120, 123, 123, 123, 126, 126, 126,
+ 129, 129, 129, 132, 132, 132, 135, 135, 137, 137, 137, 140, 140, 140, 143, 143,
+ 145, 145, 145, 148, 148, 148, 151, 151, 153, 153, 153, 156, 156, 156, 159, 159,
+ 161, 161, 161, 164, 164, 164, 167, 167, 169, 169, 169, 172, 172, 172, 175, 175,
+ 177, 177, 177, 180, 180, 180, 183, 183, 185, 185, 185, 188, 188, 188, 191, 191,
+ 191, 194, 194, 196, 196, 196, 199, 199, 199, 202, 202, 204, 204, 204, 207, 207,
+ 207, 210, 210, 212, 212, 212, 215, 215, 215, 218, 218, 220, 220, 220, 223, 223,
+ 223, 226, 226, 226, 229, 229, 231, 231, 231, 234, 234, 234, 237, 237, 239, 239,
+ 239, 242, 242, 242, 245, 245, 247, 247, 247, 250, 250, 250, 253, 253, 255, 255
+ },
+ {
+ 0, 0, 2, 2, 4, 4, 6, 6, 8, 8, 10, 10, 12, 12, 14, 14,
+ 16, 16, 18, 18, 20, 20, 22, 22, 24, 24, 26, 26, 28, 28, 30, 30,
+ 32, 32, 34, 34, 36, 36, 38, 38, 40, 40, 42, 42, 44, 44, 46, 46,
+ 48, 48, 50, 50, 52, 52, 54, 54, 56, 56, 58, 58, 60, 60, 62, 62,
+ 64, 64, 66, 66, 68, 68, 70, 70, 72, 72, 74, 74, 76, 76, 78, 78,
+ 80, 80, 82, 82, 84, 84, 86, 86, 88, 88, 90, 90, 92, 92, 94, 94,
+ 96, 96, 98, 98, 100, 100, 102, 102, 104, 104, 106, 106, 108, 108, 110, 110,
+ 112, 112, 114, 114, 116, 116, 118, 118, 120, 120, 122, 122, 124, 124, 126, 126,
+ 129, 129, 131, 131, 133, 133, 135, 135, 137, 137, 139, 139, 141, 141, 143, 143,
+ 145, 145, 147, 147, 149, 149, 151, 151, 153, 153, 155, 155, 157, 157, 159, 159,
+ 161, 161, 163, 163, 165, 165, 167, 167, 169, 169, 171, 171, 173, 173, 175, 175,
+ 177, 177, 179, 179, 181, 181, 183, 183, 185, 185, 187, 187, 189, 189, 191, 191,
+ 193, 193, 195, 195, 197, 197, 199, 199, 201, 201, 203, 203, 205, 205, 207, 207,
+ 209, 209, 211, 211, 213, 213, 215, 215, 217, 217, 219, 219, 221, 221, 223, 223,
+ 225, 225, 227, 227, 229, 229, 231, 231, 233, 233, 235, 235, 237, 237, 239, 239,
+ 241, 241, 243, 243, 245, 245, 247, 247, 249, 249, 251, 251, 253, 253, 255, 255
+ },
+ {
+ 0, 1, 1, 3, 4, 4, 6, 6, 8, 9, 9, 11, 12, 12, 14, 14,
+ 16, 17, 17, 19, 20, 20, 22, 22, 24, 25, 25, 27, 28, 28, 30, 30,
+ 32, 33, 33, 35, 36, 36, 38, 38, 40, 41, 41, 43, 44, 44, 46, 46,
+ 48, 49, 49, 51, 52, 52, 54, 54, 56, 57, 57, 59, 60, 60, 62, 62,
+ 64, 65, 65, 67, 68, 68, 70, 70, 72, 73, 73, 75, 76, 76, 78, 78,
+ 80, 81, 81, 83, 84, 84, 86, 86, 88, 89, 89, 91, 92, 92, 94, 94,
+ 96, 97, 97, 99, 100, 100, 102, 102, 104, 105, 105, 107, 108, 108, 110, 110,
+ 112, 113, 113, 115, 116, 116, 118, 118, 120, 121, 121, 123, 124, 124, 126, 126,
+ 129, 129, 131, 131, 132, 134, 134, 135, 137, 137, 139, 139, 140, 142, 142, 143,
+ 145, 145, 147, 147, 148, 150, 150, 151, 153, 153, 155, 155, 156, 158, 158, 159,
+ 161, 161, 163, 163, 164, 166, 166, 167, 169, 169, 171, 171, 172, 174, 174, 175,
+ 177, 177, 179, 179, 180, 182, 182, 183, 185, 185, 187, 187, 188, 190, 190, 191,
+ 193, 193, 195, 195, 196, 198, 198, 199, 201, 201, 203, 203, 204, 206, 206, 207,
+ 209, 209, 211, 211, 212, 214, 214, 215, 217, 217, 219, 219, 220, 222, 222, 223,
+ 225, 225, 227, 227, 228, 230, 230, 231, 233, 233, 235, 235, 236, 238, 238, 239,
+ 241, 241, 243, 243, 244, 246, 246, 247, 249, 249, 251, 251, 252, 254, 254, 255
+ },
+ {
+ 0, 1, 2, 2, 4, 5, 6, 6, 8, 9, 10, 10, 12, 13, 14, 14,
+ 16, 17, 18, 18, 20, 21, 22, 22, 24, 25, 26, 26, 28, 29, 30, 30,
+ 32, 33, 34, 34, 36, 37, 38, 38, 40, 41, 42, 42, 44, 45, 46, 46,
+ 48, 49, 50, 50, 52, 53, 54, 54, 56, 57, 58, 58, 60, 61, 62, 62,
+ 64, 65, 66, 66, 68, 69, 70, 70, 72, 73, 74, 74, 76, 77, 78, 78,
+ 80, 81, 82, 82, 84, 85, 86, 86, 88, 89, 90, 90, 92, 93, 94, 94,
+ 96, 97, 98, 98, 100, 101, 102, 102, 104, 105, 106, 106, 108, 109, 110, 110,
+ 112, 113, 114, 114, 116, 117, 118, 118, 120, 121, 122, 122, 124, 125, 126, 126,
+ 129, 129, 130, 131, 133, 133, 134, 135, 137, 137, 138, 139, 141, 141, 142, 143,
+ 145, 145, 146, 147, 149, 149, 150, 151, 153, 153, 154, 155, 157, 157, 158, 159,
+ 161, 161, 162, 163, 165, 165, 166, 167, 169, 169, 170, 171, 173, 173, 174, 175,
+ 177, 177, 178, 179, 181, 181, 182, 183, 185, 185, 186, 187, 189, 189, 190, 191,
+ 193, 193, 194, 195, 197, 197, 198, 199, 201, 201, 202, 203, 205, 205, 206, 207,
+ 209, 209, 210, 211, 213, 213, 214, 215, 217, 217, 218, 219, 221, 221, 222, 223,
+ 225, 225, 226, 227, 229, 229, 230, 231, 233, 233, 234, 235, 237, 237, 238, 239,
+ 241, 241, 242, 243, 245, 245, 246, 247, 249, 249, 250, 251, 253, 253, 254, 255
+ },
+ {
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
+ 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+ 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+ 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
+ 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95,
+ 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
+ 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
+ 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143,
+ 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159,
+ 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175,
+ 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
+ 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
+ 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223,
+ 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239,
+ 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255
+ }
+};
+
+// Starts from QUANT_6
+// Scrambled
+const uint8_t color_uquant_to_scrambled_pquant_tables[17][256] {
+ {
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
+ },
+ {
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6,
+ 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
+ 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
+ },
+ {
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 6, 6, 6, 6, 6, 6, 6, 6, 6,
+ 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
+ 6, 6, 6, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
+ 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
+ 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
+ 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
+ },
+ {
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
+ 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
+ 6, 6, 6, 6, 6, 6, 6, 6, 6, 10, 10, 10, 10, 10, 10, 10,
+ 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
+ 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
+ 11, 11, 11, 11, 11, 11, 11, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
+ 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
+ },
+ {
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6,
+ 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
+ 8, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
+ 9, 9, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
+ 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
+ 11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
+ 12, 12, 12, 12, 12, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
+ 13, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
+ 14, 14, 14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 15, 15, 15, 15
+ },
+ {
+ 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
+ 8, 8, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
+ 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 6, 6, 6, 6, 6, 6,
+ 6, 6, 6, 6, 6, 6, 6, 6, 10, 10, 10, 10, 10, 10, 10, 10,
+ 10, 10, 10, 10, 10, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
+ 14, 14, 14, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
+ 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 15, 15, 15,
+ 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 11, 11, 11, 11, 11,
+ 11, 11, 11, 11, 11, 11, 11, 11, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
+ 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 9, 9,
+ 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 1, 1, 1, 1, 1, 1, 1
+ },
+ {
+ 0, 0, 0, 0, 0, 0, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
+ 8, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 10, 10, 10, 10, 10, 10, 10, 10, 10,
+ 10, 10, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 12, 12, 12, 12, 12, 12, 12, 12,
+ 12, 12, 12, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 6, 6,
+ 6, 6, 6, 6, 6, 6, 6, 6, 6, 14, 14, 14, 14, 14, 14, 14,
+ 14, 14, 14, 14, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22,
+ 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 15, 15, 15, 15,
+ 15, 15, 15, 15, 15, 15, 15, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 13, 13, 13,
+ 13, 13, 13, 13, 13, 13, 13, 13, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 11, 11,
+ 11, 11, 11, 11, 11, 11, 11, 11, 11, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 9,
+ 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 1, 1, 1, 1, 1, 1
+ },
+ {
+ 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2,
+ 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6,
+ 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7, 7, 8, 8,
+ 8, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9, 10,
+ 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11, 12,
+ 12, 12, 12, 12, 12, 12, 12, 12, 13, 13, 13, 13, 13, 13, 13, 13,
+ 14, 14, 14, 14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 15, 15, 15,
+ 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 17, 17, 17, 17,
+ 18, 18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19, 19, 19, 19, 19,
+ 19, 20, 20, 20, 20, 20, 20, 20, 20, 21, 21, 21, 21, 21, 21, 21,
+ 21, 22, 22, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23, 23,
+ 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 25, 25, 25, 25, 25, 25,
+ 25, 25, 26, 26, 26, 26, 26, 26, 26, 26, 27, 27, 27, 27, 27, 27,
+ 27, 27, 27, 28, 28, 28, 28, 28, 28, 28, 28, 29, 29, 29, 29, 29,
+ 29, 29, 29, 30, 30, 30, 30, 30, 30, 30, 30, 31, 31, 31, 31, 31
+ },
+ {
+ 0, 0, 0, 0, 8, 8, 8, 8, 8, 8, 16, 16, 16, 16, 16, 16,
+ 16, 24, 24, 24, 24, 24, 24, 32, 32, 32, 32, 32, 32, 32, 2, 2,
+ 2, 2, 2, 2, 10, 10, 10, 10, 10, 10, 10, 18, 18, 18, 18, 18,
+ 18, 26, 26, 26, 26, 26, 26, 26, 34, 34, 34, 34, 34, 34, 4, 4,
+ 4, 4, 4, 4, 4, 12, 12, 12, 12, 12, 12, 20, 20, 20, 20, 20,
+ 20, 20, 28, 28, 28, 28, 28, 28, 36, 36, 36, 36, 36, 36, 36, 6,
+ 6, 6, 6, 6, 6, 14, 14, 14, 14, 14, 14, 14, 22, 22, 22, 22,
+ 22, 22, 30, 30, 30, 30, 30, 30, 30, 38, 38, 38, 38, 38, 38, 38,
+ 39, 39, 39, 39, 39, 39, 39, 31, 31, 31, 31, 31, 31, 31, 23, 23,
+ 23, 23, 23, 23, 15, 15, 15, 15, 15, 15, 15, 7, 7, 7, 7, 7,
+ 7, 37, 37, 37, 37, 37, 37, 37, 29, 29, 29, 29, 29, 29, 21, 21,
+ 21, 21, 21, 21, 21, 13, 13, 13, 13, 13, 13, 5, 5, 5, 5, 5,
+ 5, 5, 35, 35, 35, 35, 35, 35, 27, 27, 27, 27, 27, 27, 27, 19,
+ 19, 19, 19, 19, 19, 11, 11, 11, 11, 11, 11, 11, 3, 3, 3, 3,
+ 3, 3, 33, 33, 33, 33, 33, 33, 33, 25, 25, 25, 25, 25, 25, 17,
+ 17, 17, 17, 17, 17, 17, 9, 9, 9, 9, 9, 9, 1, 1, 1, 1
+ },
+ {
+ 0, 0, 0, 16, 16, 16, 16, 16, 16, 32, 32, 32, 32, 32, 2, 2,
+ 2, 2, 2, 18, 18, 18, 18, 18, 18, 34, 34, 34, 34, 34, 4, 4,
+ 4, 4, 4, 4, 20, 20, 20, 20, 20, 36, 36, 36, 36, 36, 6, 6,
+ 6, 6, 6, 6, 22, 22, 22, 22, 22, 38, 38, 38, 38, 38, 38, 8,
+ 8, 8, 8, 8, 24, 24, 24, 24, 24, 24, 40, 40, 40, 40, 40, 10,
+ 10, 10, 10, 10, 26, 26, 26, 26, 26, 26, 42, 42, 42, 42, 42, 12,
+ 12, 12, 12, 12, 12, 28, 28, 28, 28, 28, 44, 44, 44, 44, 44, 14,
+ 14, 14, 14, 14, 14, 30, 30, 30, 30, 30, 46, 46, 46, 46, 46, 46,
+ 47, 47, 47, 47, 47, 47, 31, 31, 31, 31, 31, 15, 15, 15, 15, 15,
+ 15, 45, 45, 45, 45, 45, 29, 29, 29, 29, 29, 13, 13, 13, 13, 13,
+ 13, 43, 43, 43, 43, 43, 27, 27, 27, 27, 27, 27, 11, 11, 11, 11,
+ 11, 41, 41, 41, 41, 41, 25, 25, 25, 25, 25, 25, 9, 9, 9, 9,
+ 9, 39, 39, 39, 39, 39, 39, 23, 23, 23, 23, 23, 7, 7, 7, 7,
+ 7, 7, 37, 37, 37, 37, 37, 21, 21, 21, 21, 21, 5, 5, 5, 5,
+ 5, 5, 35, 35, 35, 35, 35, 19, 19, 19, 19, 19, 19, 3, 3, 3,
+ 3, 3, 33, 33, 33, 33, 33, 17, 17, 17, 17, 17, 17, 1, 1, 1
+ },
+ {
+ 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4,
+ 4, 4, 4, 5, 5, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8,
+ 8, 8, 8, 9, 9, 9, 9, 10, 10, 10, 10, 11, 11, 11, 11, 12,
+ 12, 12, 12, 13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15, 15, 16,
+ 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18, 19, 19, 19, 19,
+ 20, 20, 20, 20, 21, 21, 21, 21, 22, 22, 22, 22, 23, 23, 23, 23,
+ 24, 24, 24, 24, 25, 25, 25, 25, 26, 26, 26, 26, 27, 27, 27, 27,
+ 28, 28, 28, 28, 29, 29, 29, 29, 30, 30, 30, 30, 31, 31, 31, 31,
+ 32, 32, 32, 32, 33, 33, 33, 33, 34, 34, 34, 34, 35, 35, 35, 35,
+ 36, 36, 36, 36, 37, 37, 37, 37, 38, 38, 38, 38, 39, 39, 39, 39,
+ 40, 40, 40, 40, 41, 41, 41, 41, 42, 42, 42, 42, 43, 43, 43, 43,
+ 44, 44, 44, 44, 45, 45, 45, 45, 46, 46, 46, 46, 47, 47, 47, 47,
+ 47, 48, 48, 48, 48, 49, 49, 49, 49, 50, 50, 50, 50, 51, 51, 51,
+ 51, 52, 52, 52, 52, 53, 53, 53, 53, 54, 54, 54, 54, 55, 55, 55,
+ 55, 56, 56, 56, 56, 57, 57, 57, 57, 58, 58, 58, 58, 59, 59, 59,
+ 59, 60, 60, 60, 60, 61, 61, 61, 61, 62, 62, 62, 62, 63, 63, 63
+ },
+ {
+ 0, 0, 16, 16, 16, 32, 32, 32, 48, 48, 48, 48, 64, 64, 64, 2,
+ 2, 2, 18, 18, 18, 34, 34, 34, 50, 50, 50, 50, 66, 66, 66, 4,
+ 4, 4, 20, 20, 20, 36, 36, 36, 36, 52, 52, 52, 68, 68, 68, 6,
+ 6, 6, 22, 22, 22, 38, 38, 38, 38, 54, 54, 54, 70, 70, 70, 8,
+ 8, 8, 24, 24, 24, 24, 40, 40, 40, 56, 56, 56, 72, 72, 72, 10,
+ 10, 10, 26, 26, 26, 26, 42, 42, 42, 58, 58, 58, 74, 74, 74, 12,
+ 12, 12, 12, 28, 28, 28, 44, 44, 44, 60, 60, 60, 76, 76, 76, 14,
+ 14, 14, 14, 30, 30, 30, 46, 46, 46, 62, 62, 62, 78, 78, 78, 78,
+ 79, 79, 79, 79, 63, 63, 63, 47, 47, 47, 31, 31, 31, 15, 15, 15,
+ 15, 77, 77, 77, 61, 61, 61, 45, 45, 45, 29, 29, 29, 13, 13, 13,
+ 13, 75, 75, 75, 59, 59, 59, 43, 43, 43, 27, 27, 27, 27, 11, 11,
+ 11, 73, 73, 73, 57, 57, 57, 41, 41, 41, 25, 25, 25, 25, 9, 9,
+ 9, 71, 71, 71, 55, 55, 55, 39, 39, 39, 39, 23, 23, 23, 7, 7,
+ 7, 69, 69, 69, 53, 53, 53, 37, 37, 37, 37, 21, 21, 21, 5, 5,
+ 5, 67, 67, 67, 51, 51, 51, 51, 35, 35, 35, 19, 19, 19, 3, 3,
+ 3, 65, 65, 65, 49, 49, 49, 49, 33, 33, 33, 17, 17, 17, 1, 1
+ },
+ {
+ 0, 0, 32, 32, 64, 64, 64, 2, 2, 2, 34, 34, 66, 66, 66, 4,
+ 4, 4, 36, 36, 68, 68, 68, 6, 6, 6, 38, 38, 70, 70, 70, 8,
+ 8, 8, 40, 40, 40, 72, 72, 10, 10, 10, 42, 42, 42, 74, 74, 12,
+ 12, 12, 44, 44, 44, 76, 76, 14, 14, 14, 46, 46, 46, 78, 78, 16,
+ 16, 16, 48, 48, 48, 80, 80, 80, 18, 18, 50, 50, 50, 82, 82, 82,
+ 20, 20, 52, 52, 52, 84, 84, 84, 22, 22, 54, 54, 54, 86, 86, 86,
+ 24, 24, 56, 56, 56, 88, 88, 88, 26, 26, 58, 58, 58, 90, 90, 90,
+ 28, 28, 60, 60, 60, 92, 92, 92, 30, 30, 62, 62, 62, 94, 94, 94,
+ 95, 95, 95, 63, 63, 63, 31, 31, 93, 93, 93, 61, 61, 61, 29, 29,
+ 91, 91, 91, 59, 59, 59, 27, 27, 89, 89, 89, 57, 57, 57, 25, 25,
+ 87, 87, 87, 55, 55, 55, 23, 23, 85, 85, 85, 53, 53, 53, 21, 21,
+ 83, 83, 83, 51, 51, 51, 19, 19, 81, 81, 81, 49, 49, 49, 17, 17,
+ 17, 79, 79, 47, 47, 47, 15, 15, 15, 77, 77, 45, 45, 45, 13, 13,
+ 13, 75, 75, 43, 43, 43, 11, 11, 11, 73, 73, 41, 41, 41, 9, 9,
+ 9, 71, 71, 71, 39, 39, 7, 7, 7, 69, 69, 69, 37, 37, 5, 5,
+ 5, 67, 67, 67, 35, 35, 3, 3, 3, 65, 65, 65, 33, 33, 1, 1
+ },
+ {
+ 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7,
+ 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14, 15, 15,
+ 16, 16, 17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22, 23, 23,
+ 24, 24, 25, 25, 26, 26, 27, 27, 28, 28, 29, 29, 30, 30, 31, 31,
+ 32, 32, 33, 33, 34, 34, 35, 35, 36, 36, 37, 37, 38, 38, 39, 39,
+ 40, 40, 41, 41, 42, 42, 43, 43, 44, 44, 45, 45, 46, 46, 47, 47,
+ 48, 48, 49, 49, 50, 50, 51, 51, 52, 52, 53, 53, 54, 54, 55, 55,
+ 56, 56, 57, 57, 58, 58, 59, 59, 60, 60, 61, 61, 62, 62, 63, 63,
+ 64, 64, 65, 65, 66, 66, 67, 67, 68, 68, 69, 69, 70, 70, 71, 71,
+ 72, 72, 73, 73, 74, 74, 75, 75, 76, 76, 77, 77, 78, 78, 79, 79,
+ 80, 80, 81, 81, 82, 82, 83, 83, 84, 84, 85, 85, 86, 86, 87, 87,
+ 88, 88, 89, 89, 90, 90, 91, 91, 92, 92, 93, 93, 94, 94, 95, 95,
+ 96, 96, 97, 97, 98, 98, 99, 99, 100, 100, 101, 101, 102, 102, 103, 103,
+ 104, 104, 105, 105, 106, 106, 107, 107, 108, 108, 109, 109, 110, 110, 111, 111,
+ 112, 112, 113, 113, 114, 114, 115, 115, 116, 116, 117, 117, 118, 118, 119, 119,
+ 120, 120, 121, 121, 122, 122, 123, 123, 124, 124, 125, 125, 126, 126, 127, 127
+ },
+ {
+ 0, 32, 32, 64, 96, 96, 128, 128, 2, 34, 34, 66, 98, 98, 130, 130,
+ 4, 36, 36, 68, 100, 100, 132, 132, 6, 38, 38, 70, 102, 102, 134, 134,
+ 8, 40, 40, 72, 104, 104, 136, 136, 10, 42, 42, 74, 106, 106, 138, 138,
+ 12, 44, 44, 76, 108, 108, 140, 140, 14, 46, 46, 78, 110, 110, 142, 142,
+ 16, 48, 48, 80, 112, 112, 144, 144, 18, 50, 50, 82, 114, 114, 146, 146,
+ 20, 52, 52, 84, 116, 116, 148, 148, 22, 54, 54, 86, 118, 118, 150, 150,
+ 24, 56, 56, 88, 120, 120, 152, 152, 26, 58, 58, 90, 122, 122, 154, 154,
+ 28, 60, 60, 92, 124, 124, 156, 156, 30, 62, 62, 94, 126, 126, 158, 158,
+ 159, 159, 127, 127, 95, 63, 63, 31, 157, 157, 125, 125, 93, 61, 61, 29,
+ 155, 155, 123, 123, 91, 59, 59, 27, 153, 153, 121, 121, 89, 57, 57, 25,
+ 151, 151, 119, 119, 87, 55, 55, 23, 149, 149, 117, 117, 85, 53, 53, 21,
+ 147, 147, 115, 115, 83, 51, 51, 19, 145, 145, 113, 113, 81, 49, 49, 17,
+ 143, 143, 111, 111, 79, 47, 47, 15, 141, 141, 109, 109, 77, 45, 45, 13,
+ 139, 139, 107, 107, 75, 43, 43, 11, 137, 137, 105, 105, 73, 41, 41, 9,
+ 135, 135, 103, 103, 71, 39, 39, 7, 133, 133, 101, 101, 69, 37, 37, 5,
+ 131, 131, 99, 99, 67, 35, 35, 3, 129, 129, 97, 97, 65, 33, 33, 1
+ },
+ {
+ 0, 64, 128, 128, 2, 66, 130, 130, 4, 68, 132, 132, 6, 70, 134, 134,
+ 8, 72, 136, 136, 10, 74, 138, 138, 12, 76, 140, 140, 14, 78, 142, 142,
+ 16, 80, 144, 144, 18, 82, 146, 146, 20, 84, 148, 148, 22, 86, 150, 150,
+ 24, 88, 152, 152, 26, 90, 154, 154, 28, 92, 156, 156, 30, 94, 158, 158,
+ 32, 96, 160, 160, 34, 98, 162, 162, 36, 100, 164, 164, 38, 102, 166, 166,
+ 40, 104, 168, 168, 42, 106, 170, 170, 44, 108, 172, 172, 46, 110, 174, 174,
+ 48, 112, 176, 176, 50, 114, 178, 178, 52, 116, 180, 180, 54, 118, 182, 182,
+ 56, 120, 184, 184, 58, 122, 186, 186, 60, 124, 188, 188, 62, 126, 190, 190,
+ 191, 191, 127, 63, 189, 189, 125, 61, 187, 187, 123, 59, 185, 185, 121, 57,
+ 183, 183, 119, 55, 181, 181, 117, 53, 179, 179, 115, 51, 177, 177, 113, 49,
+ 175, 175, 111, 47, 173, 173, 109, 45, 171, 171, 107, 43, 169, 169, 105, 41,
+ 167, 167, 103, 39, 165, 165, 101, 37, 163, 163, 99, 35, 161, 161, 97, 33,
+ 159, 159, 95, 31, 157, 157, 93, 29, 155, 155, 91, 27, 153, 153, 89, 25,
+ 151, 151, 87, 23, 149, 149, 85, 21, 147, 147, 83, 19, 145, 145, 81, 17,
+ 143, 143, 79, 15, 141, 141, 77, 13, 139, 139, 75, 11, 137, 137, 73, 9,
+ 135, 135, 71, 7, 133, 133, 69, 5, 131, 131, 67, 3, 129, 129, 65, 1
+ },
+ {
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
+ 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+ 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+ 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
+ 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95,
+ 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
+ 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
+ 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143,
+ 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159,
+ 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175,
+ 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
+ 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
+ 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223,
+ 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239,
+ 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255
+ }
+};
+
+#endif
+
+// Starts from QUANT_6
+// Scrambled
+static const uint8_t color_scrambled_pquant_to_uquant_q6[6] {
+ 0, 255, 51, 204, 102, 153
+};
+
+static const uint8_t color_scrambled_pquant_to_uquant_q8[8] {
+ 0, 36, 73, 109, 146, 182, 219, 255
+};
+
+static const uint8_t color_scrambled_pquant_to_uquant_q10[10] {
+ 0, 255, 28, 227, 56, 199, 84, 171, 113, 142
+};
+
+static const uint8_t color_scrambled_pquant_to_uquant_q12[12] {
+ 0, 255, 69, 186, 23, 232, 92, 163, 46, 209, 116, 139
+};
+
+static const uint8_t color_scrambled_pquant_to_uquant_q16[16] {
+ 0, 17, 34, 51, 68, 85, 102, 119, 136, 153, 170, 187, 204, 221, 238, 255
+};
+
+static const uint8_t color_scrambled_pquant_to_uquant_q20[20] {
+ 0, 255, 67, 188, 13, 242, 80, 175, 27, 228, 94, 161, 40, 215, 107, 148,
+ 54, 201, 121, 134
+};
+
+static const uint8_t color_scrambled_pquant_to_uquant_q24[24] {
+ 0, 255, 33, 222, 66, 189, 99, 156, 11, 244, 44, 211, 77, 178, 110, 145,
+ 22, 233, 55, 200, 88, 167, 121, 134
+};
+
+static const uint8_t color_scrambled_pquant_to_uquant_q32[32] {
+ 0, 8, 16, 24, 33, 41, 49, 57, 66, 74, 82, 90, 99, 107, 115, 123,
+ 132, 140, 148, 156, 165, 173, 181, 189, 198, 206, 214, 222, 231, 239, 247, 255
+};
+
+static const uint8_t color_scrambled_pquant_to_uquant_q40[40] {
+ 0, 255, 32, 223, 65, 190, 97, 158, 6, 249, 39, 216, 71, 184, 104, 151,
+ 13, 242, 45, 210, 78, 177, 110, 145, 19, 236, 52, 203, 84, 171, 117, 138,
+ 26, 229, 58, 197, 91, 164, 123, 132
+};
+
+static const uint8_t color_scrambled_pquant_to_uquant_q48[48] {
+ 0, 255, 16, 239, 32, 223, 48, 207, 65, 190, 81, 174, 97, 158, 113, 142,
+ 5, 250, 21, 234, 38, 217, 54, 201, 70, 185, 86, 169, 103, 152, 119, 136,
+ 11, 244, 27, 228, 43, 212, 59, 196, 76, 179, 92, 163, 108, 147, 124, 131
+};
+
+static const uint8_t color_scrambled_pquant_to_uquant_q64[64] {
+ 0, 4, 8, 12, 16, 20, 24, 28, 32, 36, 40, 44, 48, 52, 56, 60,
+ 65, 69, 73, 77, 81, 85, 89, 93, 97, 101, 105, 109, 113, 117, 121, 125,
+ 130, 134, 138, 142, 146, 150, 154, 158, 162, 166, 170, 174, 178, 182, 186, 190,
+ 195, 199, 203, 207, 211, 215, 219, 223, 227, 231, 235, 239, 243, 247, 251, 255,
+};
+
+static const uint8_t color_scrambled_pquant_to_uquant_q80[80] {
+ 0, 255, 16, 239, 32, 223, 48, 207, 64, 191, 80, 175, 96, 159, 112, 143,
+ 3, 252, 19, 236, 35, 220, 51, 204, 67, 188, 83, 172, 100, 155, 116, 139,
+ 6, 249, 22, 233, 38, 217, 54, 201, 71, 184, 87, 168, 103, 152, 119, 136,
+ 9, 246, 25, 230, 42, 213, 58, 197, 74, 181, 90, 165, 106, 149, 122, 133,
+ 13, 242, 29, 226, 45, 210, 61, 194, 77, 178, 93, 162, 109, 146, 125, 130
+};
+
+static const uint8_t color_scrambled_pquant_to_uquant_q96[96] {
+ 0, 255, 8, 247, 16, 239, 24, 231, 32, 223, 40, 215, 48, 207, 56, 199,
+ 64, 191, 72, 183, 80, 175, 88, 167, 96, 159, 104, 151, 112, 143, 120, 135,
+ 2, 253, 10, 245, 18, 237, 26, 229, 35, 220, 43, 212, 51, 204, 59, 196,
+ 67, 188, 75, 180, 83, 172, 91, 164, 99, 156, 107, 148, 115, 140, 123, 132,
+ 5, 250, 13, 242, 21, 234, 29, 226, 37, 218, 45, 210, 53, 202, 61, 194,
+ 70, 185, 78, 177, 86, 169, 94, 161, 102, 153, 110, 145, 118, 137, 126, 129
+};
+
+static const uint8_t color_scrambled_pquant_to_uquant_q128[128] {
+ 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30,
+ 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62,
+ 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94,
+ 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126,
+ 129, 131, 133, 135, 137, 139, 141, 143, 145, 147, 149, 151, 153, 155, 157, 159,
+ 161, 163, 165, 167, 169, 171, 173, 175, 177, 179, 181, 183, 185, 187, 189, 191,
+ 193, 195, 197, 199, 201, 203, 205, 207, 209, 211, 213, 215, 217, 219, 221, 223,
+ 225, 227, 229, 231, 233, 235, 237, 239, 241, 243, 245, 247, 249, 251, 253, 255
+};
+
+static const uint8_t color_scrambled_pquant_to_uquant_q160[160] {
+ 0, 255, 8, 247, 16, 239, 24, 231, 32, 223, 40, 215, 48, 207, 56, 199,
+ 64, 191, 72, 183, 80, 175, 88, 167, 96, 159, 104, 151, 112, 143, 120, 135,
+ 1, 254, 9, 246, 17, 238, 25, 230, 33, 222, 41, 214, 49, 206, 57, 198,
+ 65, 190, 73, 182, 81, 174, 89, 166, 97, 158, 105, 150, 113, 142, 121, 134,
+ 3, 252, 11, 244, 19, 236, 27, 228, 35, 220, 43, 212, 51, 204, 59, 196,
+ 67, 188, 75, 180, 83, 172, 91, 164, 99, 156, 107, 148, 115, 140, 123, 132,
+ 4, 251, 12, 243, 20, 235, 28, 227, 36, 219, 44, 211, 52, 203, 60, 195,
+ 68, 187, 76, 179, 84, 171, 92, 163, 100, 155, 108, 147, 116, 139, 124, 131,
+ 6, 249, 14, 241, 22, 233, 30, 225, 38, 217, 46, 209, 54, 201, 62, 193,
+ 70, 185, 78, 177, 86, 169, 94, 161, 102, 153, 110, 145, 118, 137, 126, 129
+};
+
+static const uint8_t color_scrambled_pquant_to_uquant_q192[192] {
+ 0, 255, 4, 251, 8, 247, 12, 243, 16, 239, 20, 235, 24, 231, 28, 227,
+ 32, 223, 36, 219, 40, 215, 44, 211, 48, 207, 52, 203, 56, 199, 60, 195,
+ 64, 191, 68, 187, 72, 183, 76, 179, 80, 175, 84, 171, 88, 167, 92, 163,
+ 96, 159, 100, 155, 104, 151, 108, 147, 112, 143, 116, 139, 120, 135, 124, 131,
+ 1, 254, 5, 250, 9, 246, 13, 242, 17, 238, 21, 234, 25, 230, 29, 226,
+ 33, 222, 37, 218, 41, 214, 45, 210, 49, 206, 53, 202, 57, 198, 61, 194,
+ 65, 190, 69, 186, 73, 182, 77, 178, 81, 174, 85, 170, 89, 166, 93, 162,
+ 97, 158, 101, 154, 105, 150, 109, 146, 113, 142, 117, 138, 121, 134, 125, 130,
+ 2, 253, 6, 249, 10, 245, 14, 241, 18, 237, 22, 233, 26, 229, 30, 225,
+ 34, 221, 38, 217, 42, 213, 46, 209, 50, 205, 54, 201, 58, 197, 62, 193,
+ 66, 189, 70, 185, 74, 181, 78, 177, 82, 173, 86, 169, 90, 165, 94, 161,
+ 98, 157, 102, 153, 106, 149, 110, 145, 114, 141, 118, 137, 122, 133, 126, 129
+};
+
+static const uint8_t color_scrambled_pquant_to_uquant_q256[256] {
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
+ 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+ 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+ 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
+ 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95,
+ 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
+ 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
+ 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143,
+ 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159,
+ 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175,
+ 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
+ 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
+ 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223,
+ 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239,
+ 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255
+};
+
+const uint8_t* color_scrambled_pquant_to_uquant_tables[17] {
+ color_scrambled_pquant_to_uquant_q6,
+ color_scrambled_pquant_to_uquant_q8,
+ color_scrambled_pquant_to_uquant_q10,
+ color_scrambled_pquant_to_uquant_q12,
+ color_scrambled_pquant_to_uquant_q16,
+ color_scrambled_pquant_to_uquant_q20,
+ color_scrambled_pquant_to_uquant_q24,
+ color_scrambled_pquant_to_uquant_q32,
+ color_scrambled_pquant_to_uquant_q40,
+ color_scrambled_pquant_to_uquant_q48,
+ color_scrambled_pquant_to_uquant_q64,
+ color_scrambled_pquant_to_uquant_q80,
+ color_scrambled_pquant_to_uquant_q96,
+ color_scrambled_pquant_to_uquant_q128,
+ color_scrambled_pquant_to_uquant_q160,
+ color_scrambled_pquant_to_uquant_q192,
+ color_scrambled_pquant_to_uquant_q256
+};
+
+// The quant_mode_table[integer_count/2][bits] gives us the quantization level for a given integer
+// count and number of bits that the integer may fit into.
+const int8_t quant_mode_table[10][128] {
+ {
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1
+ },
+ {
+ -1, -1, 0, 0, 2, 3, 5, 6, 8, 9, 11, 12, 14, 15, 17, 18,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20
+ },
+ {
+ -1, -1, -1, -1, 0, 0, 0, 1, 2, 2, 3, 4, 5, 5, 6, 7,
+ 8, 8, 9, 10, 11, 11, 12, 13, 14, 14, 15, 16, 17, 17, 18, 19,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20
+ },
+ {
+ -1, -1, -1, -1, -1, -1, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3,
+ 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
+ 12, 12, 13, 13, 14, 14, 15, 15, 16, 16, 17, 17, 18, 18, 19, 19,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20
+ },
+ {
+ -1, -1, -1, -1, -1, -1, -1, -1, 0, 0, 0, 0, 0, 1, 1, 1,
+ 2, 2, 2, 3, 3, 4, 4, 4, 5, 5, 5, 6, 6, 7, 7, 7,
+ 8, 8, 8, 9, 9, 10, 10, 10, 11, 11, 11, 12, 12, 13, 13, 13,
+ 14, 14, 14, 15, 15, 16, 16, 16, 17, 17, 17, 18, 18, 19, 19, 19,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20
+ },
+ {
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, 0, 0, 0, 0, 0,
+ 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 4, 4, 4, 4, 5, 5,
+ 5, 5, 6, 6, 7, 7, 7, 7, 8, 8, 8, 8, 9, 9, 10, 10,
+ 10, 10, 11, 11, 11, 11, 12, 12, 13, 13, 13, 13, 14, 14, 14, 14,
+ 15, 15, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 19, 19, 19, 19,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20
+ },
+ {
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, 0, 0, 0,
+ 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3,
+ 4, 4, 4, 4, 5, 5, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7,
+ 8, 8, 8, 8, 9, 9, 9, 9, 10, 10, 10, 10, 11, 11, 11, 11,
+ 12, 12, 12, 12, 13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15, 15,
+ 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18, 19, 19, 19, 19,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20
+ },
+ {
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2,
+ 2, 3, 3, 3, 3, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 6,
+ 6, 6, 6, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 9, 9, 9,
+ 9, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 12, 12, 12, 12, 13,
+ 13, 13, 13, 13, 14, 14, 14, 14, 14, 15, 15, 15, 15, 16, 16, 16,
+ 16, 16, 17, 17, 17, 17, 17, 18, 18, 18, 18, 19, 19, 19, 19, 19,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20
+ },
+ {
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1,
+ 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4,
+ 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7,
+ 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10,
+ 11, 11, 11, 11, 11, 11, 12, 12, 12, 12, 13, 13, 13, 13, 13, 13,
+ 14, 14, 14, 14, 14, 14, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16,
+ 17, 17, 17, 17, 17, 17, 18, 18, 18, 18, 19, 19, 19, 19, 19, 19
+ },
+ {
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1,
+ 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 4,
+ 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6,
+ 6, 7, 7, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 8, 9, 9,
+ 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11,
+ 12, 12, 12, 12, 12, 13, 13, 13, 13, 13, 13, 13, 14, 14, 14, 14,
+ 14, 14, 15, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 16, 17, 17
+ }
+};
diff --git a/thirdparty/astcenc/astcenc_symbolic_physical.cpp b/thirdparty/astcenc/astcenc_symbolic_physical.cpp
new file mode 100644
index 0000000000..80221a6013
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_symbolic_physical.cpp
@@ -0,0 +1,534 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2021 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief Functions for converting between symbolic and physical encodings.
+ */
+
+#include "astcenc_internal.h"
+
+#include <cassert>
+
+/**
+ * @brief Write up to 8 bits at an arbitrary bit offset.
+ *
+ * The stored value is at most 8 bits, but can be stored at an offset of between 0 and 7 bits so
+ * may span two separate bytes in memory.
+ *
+ * @param value The value to write.
+ * @param bitcount The number of bits to write, starting from LSB.
+ * @param bitoffset The bit offset to store at, between 0 and 7.
+ * @param[in,out] ptr The data pointer to write to.
+ */
+static inline void write_bits(
+ int value,
+ int bitcount,
+ int bitoffset,
+ uint8_t* ptr
+) {
+ int mask = (1 << bitcount) - 1;
+ value &= mask;
+ ptr += bitoffset >> 3;
+ bitoffset &= 7;
+ value <<= bitoffset;
+ mask <<= bitoffset;
+ mask = ~mask;
+
+ ptr[0] &= mask;
+ ptr[0] |= value;
+ ptr[1] &= mask >> 8;
+ ptr[1] |= value >> 8;
+}
+
+/**
+ * @brief Read up to 8 bits at an arbitrary bit offset.
+ *
+ * The stored value is at most 8 bits, but can be stored at an offset of between 0 and 7 bits so may
+ * span two separate bytes in memory.
+ *
+ * @param bitcount The number of bits to read.
+ * @param bitoffset The bit offset to read from, between 0 and 7.
+ * @param[in,out] ptr The data pointer to read from.
+ *
+ * @return The read value.
+ */
+static inline int read_bits(
+ int bitcount,
+ int bitoffset,
+ const uint8_t* ptr
+) {
+ int mask = (1 << bitcount) - 1;
+ ptr += bitoffset >> 3;
+ bitoffset &= 7;
+ int value = ptr[0] | (ptr[1] << 8);
+ value >>= bitoffset;
+ value &= mask;
+ return value;
+}
+
+/**
+ * @brief Reverse bits in a byte.
+ *
+ * @param p The value to reverse.
+ *
+ * @return The reversed result.
+ */
+static inline int bitrev8(int p)
+{
+ p = ((p & 0x0F) << 4) | ((p >> 4) & 0x0F);
+ p = ((p & 0x33) << 2) | ((p >> 2) & 0x33);
+ p = ((p & 0x55) << 1) | ((p >> 1) & 0x55);
+ return p;
+}
+
+/* See header for documentation. */
+void symbolic_to_physical(
+ const block_size_descriptor& bsd,
+ const symbolic_compressed_block& scb,
+ physical_compressed_block& pcb
+) {
+ assert(scb.block_type != SYM_BTYPE_ERROR);
+
+ // Constant color block using UNORM16 colors
+ if (scb.block_type == SYM_BTYPE_CONST_U16)
+ {
+ // There is currently no attempt to coalesce larger void-extents
+ static const uint8_t cbytes[8] { 0xFC, 0xFD, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF };
+ for (unsigned int i = 0; i < 8; i++)
+ {
+ pcb.data[i] = cbytes[i];
+ }
+
+ for (unsigned int i = 0; i < BLOCK_MAX_COMPONENTS; i++)
+ {
+ pcb.data[2 * i + 8] = scb.constant_color[i] & 0xFF;
+ pcb.data[2 * i + 9] = (scb.constant_color[i] >> 8) & 0xFF;
+ }
+
+ return;
+ }
+
+ // Constant color block using FP16 colors
+ if (scb.block_type == SYM_BTYPE_CONST_F16)
+ {
+ // There is currently no attempt to coalesce larger void-extents
+ static const uint8_t cbytes[8] { 0xFC, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF };
+ for (unsigned int i = 0; i < 8; i++)
+ {
+ pcb.data[i] = cbytes[i];
+ }
+
+ for (unsigned int i = 0; i < BLOCK_MAX_COMPONENTS; i++)
+ {
+ pcb.data[2 * i + 8] = scb.constant_color[i] & 0xFF;
+ pcb.data[2 * i + 9] = (scb.constant_color[i] >> 8) & 0xFF;
+ }
+
+ return;
+ }
+
+ unsigned int partition_count = scb.partition_count;
+
+ // Compress the weights.
+ // They are encoded as an ordinary integer-sequence, then bit-reversed
+ uint8_t weightbuf[16] { 0 };
+
+ const auto& bm = bsd.get_block_mode(scb.block_mode);
+ const auto& di = bsd.get_decimation_info(bm.decimation_mode);
+ int weight_count = di.weight_count;
+ quant_method weight_quant_method = bm.get_weight_quant_mode();
+ float weight_quant_levels = static_cast<float>(get_quant_level(weight_quant_method));
+ int is_dual_plane = bm.is_dual_plane;
+
+ const auto& qat = quant_and_xfer_tables[weight_quant_method];
+
+ int real_weight_count = is_dual_plane ? 2 * weight_count : weight_count;
+
+ int bits_for_weights = get_ise_sequence_bitcount(real_weight_count, weight_quant_method);
+
+ uint8_t weights[64];
+ if (is_dual_plane)
+ {
+ for (int i = 0; i < weight_count; i++)
+ {
+ float uqw = static_cast<float>(scb.weights[i]);
+ float qw = (uqw / 64.0f) * (weight_quant_levels - 1.0f);
+ int qwi = static_cast<int>(qw + 0.5f);
+ weights[2 * i] = qat.scramble_map[qwi];
+
+ uqw = static_cast<float>(scb.weights[i + WEIGHTS_PLANE2_OFFSET]);
+ qw = (uqw / 64.0f) * (weight_quant_levels - 1.0f);
+ qwi = static_cast<int>(qw + 0.5f);
+ weights[2 * i + 1] = qat.scramble_map[qwi];
+ }
+ }
+ else
+ {
+ for (int i = 0; i < weight_count; i++)
+ {
+ float uqw = static_cast<float>(scb.weights[i]);
+ float qw = (uqw / 64.0f) * (weight_quant_levels - 1.0f);
+ int qwi = static_cast<int>(qw + 0.5f);
+ weights[i] = qat.scramble_map[qwi];
+ }
+ }
+
+ encode_ise(weight_quant_method, real_weight_count, weights, weightbuf, 0);
+
+ for (int i = 0; i < 16; i++)
+ {
+ pcb.data[i] = static_cast<uint8_t>(bitrev8(weightbuf[15 - i]));
+ }
+
+ write_bits(scb.block_mode, 11, 0, pcb.data);
+ write_bits(partition_count - 1, 2, 11, pcb.data);
+
+ int below_weights_pos = 128 - bits_for_weights;
+
+ // Encode partition index and color endpoint types for blocks with 2+ partitions
+ if (partition_count > 1)
+ {
+ write_bits(scb.partition_index, 6, 13, pcb.data);
+ write_bits(scb.partition_index >> 6, PARTITION_INDEX_BITS - 6, 19, pcb.data);
+
+ if (scb.color_formats_matched)
+ {
+ write_bits(scb.color_formats[0] << 2, 6, 13 + PARTITION_INDEX_BITS, pcb.data);
+ }
+ else
+ {
+ // Check endpoint types for each partition to determine the lowest class present
+ int low_class = 4;
+
+ for (unsigned int i = 0; i < partition_count; i++)
+ {
+ int class_of_format = scb.color_formats[i] >> 2;
+ low_class = astc::min(class_of_format, low_class);
+ }
+
+ if (low_class == 3)
+ {
+ low_class = 2;
+ }
+
+ int encoded_type = low_class + 1;
+ int bitpos = 2;
+
+ for (unsigned int i = 0; i < partition_count; i++)
+ {
+ int classbit_of_format = (scb.color_formats[i] >> 2) - low_class;
+ encoded_type |= classbit_of_format << bitpos;
+ bitpos++;
+ }
+
+ for (unsigned int i = 0; i < partition_count; i++)
+ {
+ int lowbits_of_format = scb.color_formats[i] & 3;
+ encoded_type |= lowbits_of_format << bitpos;
+ bitpos += 2;
+ }
+
+ int encoded_type_lowpart = encoded_type & 0x3F;
+ int encoded_type_highpart = encoded_type >> 6;
+ int encoded_type_highpart_size = (3 * partition_count) - 4;
+ int encoded_type_highpart_pos = 128 - bits_for_weights - encoded_type_highpart_size;
+ write_bits(encoded_type_lowpart, 6, 13 + PARTITION_INDEX_BITS, pcb.data);
+ write_bits(encoded_type_highpart, encoded_type_highpart_size, encoded_type_highpart_pos, pcb.data);
+ below_weights_pos -= encoded_type_highpart_size;
+ }
+ }
+ else
+ {
+ write_bits(scb.color_formats[0], 4, 13, pcb.data);
+ }
+
+ // In dual-plane mode, encode the color component of the second plane of weights
+ if (is_dual_plane)
+ {
+ write_bits(scb.plane2_component, 2, below_weights_pos - 2, pcb.data);
+ }
+
+ // Encode the color components
+ uint8_t values_to_encode[32];
+ int valuecount_to_encode = 0;
+
+ const uint8_t* pack_table = color_uquant_to_scrambled_pquant_tables[scb.quant_mode - QUANT_6];
+ for (unsigned int i = 0; i < scb.partition_count; i++)
+ {
+ int vals = 2 * (scb.color_formats[i] >> 2) + 2;
+ assert(vals <= 8);
+ for (int j = 0; j < vals; j++)
+ {
+ values_to_encode[j + valuecount_to_encode] = pack_table[scb.color_values[i][j]];
+ }
+ valuecount_to_encode += vals;
+ }
+
+ encode_ise(scb.get_color_quant_mode(), valuecount_to_encode, values_to_encode, pcb.data,
+ scb.partition_count == 1 ? 17 : 19 + PARTITION_INDEX_BITS);
+}
+
+/* See header for documentation. */
+void physical_to_symbolic(
+ const block_size_descriptor& bsd,
+ const physical_compressed_block& pcb,
+ symbolic_compressed_block& scb
+) {
+ uint8_t bswapped[16];
+
+ scb.block_type = SYM_BTYPE_NONCONST;
+
+ // Extract header fields
+ int block_mode = read_bits(11, 0, pcb.data);
+ if ((block_mode & 0x1FF) == 0x1FC)
+ {
+ // Constant color block
+
+ // Check what format the data has
+ if (block_mode & 0x200)
+ {
+ scb.block_type = SYM_BTYPE_CONST_F16;
+ }
+ else
+ {
+ scb.block_type = SYM_BTYPE_CONST_U16;
+ }
+
+ scb.partition_count = 0;
+ for (int i = 0; i < 4; i++)
+ {
+ scb.constant_color[i] = pcb.data[2 * i + 8] | (pcb.data[2 * i + 9] << 8);
+ }
+
+ // Additionally, check that the void-extent
+ if (bsd.zdim == 1)
+ {
+ // 2D void-extent
+ int rsvbits = read_bits(2, 10, pcb.data);
+ if (rsvbits != 3)
+ {
+ scb.block_type = SYM_BTYPE_ERROR;
+ return;
+ }
+
+ int vx_low_s = read_bits(8, 12, pcb.data) | (read_bits(5, 12 + 8, pcb.data) << 8);
+ int vx_high_s = read_bits(8, 25, pcb.data) | (read_bits(5, 25 + 8, pcb.data) << 8);
+ int vx_low_t = read_bits(8, 38, pcb.data) | (read_bits(5, 38 + 8, pcb.data) << 8);
+ int vx_high_t = read_bits(8, 51, pcb.data) | (read_bits(5, 51 + 8, pcb.data) << 8);
+
+ int all_ones = vx_low_s == 0x1FFF && vx_high_s == 0x1FFF && vx_low_t == 0x1FFF && vx_high_t == 0x1FFF;
+
+ if ((vx_low_s >= vx_high_s || vx_low_t >= vx_high_t) && !all_ones)
+ {
+ scb.block_type = SYM_BTYPE_ERROR;
+ return;
+ }
+ }
+ else
+ {
+ // 3D void-extent
+ int vx_low_s = read_bits(9, 10, pcb.data);
+ int vx_high_s = read_bits(9, 19, pcb.data);
+ int vx_low_t = read_bits(9, 28, pcb.data);
+ int vx_high_t = read_bits(9, 37, pcb.data);
+ int vx_low_p = read_bits(9, 46, pcb.data);
+ int vx_high_p = read_bits(9, 55, pcb.data);
+
+ int all_ones = vx_low_s == 0x1FF && vx_high_s == 0x1FF && vx_low_t == 0x1FF && vx_high_t == 0x1FF && vx_low_p == 0x1FF && vx_high_p == 0x1FF;
+
+ if ((vx_low_s >= vx_high_s || vx_low_t >= vx_high_t || vx_low_p >= vx_high_p) && !all_ones)
+ {
+ scb.block_type = SYM_BTYPE_ERROR;
+ return;
+ }
+ }
+
+ return;
+ }
+
+ unsigned int packed_index = bsd.block_mode_packed_index[block_mode];
+ if (packed_index == BLOCK_BAD_BLOCK_MODE)
+ {
+ scb.block_type = SYM_BTYPE_ERROR;
+ return;
+ }
+
+ const auto& bm = bsd.get_block_mode(block_mode);
+ const auto& di = bsd.get_decimation_info(bm.decimation_mode);
+
+ int weight_count = di.weight_count;
+ promise(weight_count > 0);
+
+ quant_method weight_quant_method = static_cast<quant_method>(bm.quant_mode);
+ int is_dual_plane = bm.is_dual_plane;
+
+ int real_weight_count = is_dual_plane ? 2 * weight_count : weight_count;
+
+ int partition_count = read_bits(2, 11, pcb.data) + 1;
+ promise(partition_count > 0);
+
+ scb.block_mode = static_cast<uint16_t>(block_mode);
+ scb.partition_count = static_cast<uint8_t>(partition_count);
+
+ for (int i = 0; i < 16; i++)
+ {
+ bswapped[i] = static_cast<uint8_t>(bitrev8(pcb.data[15 - i]));
+ }
+
+ int bits_for_weights = get_ise_sequence_bitcount(real_weight_count, weight_quant_method);
+
+ int below_weights_pos = 128 - bits_for_weights;
+
+ uint8_t indices[64];
+ const auto& qat = quant_and_xfer_tables[weight_quant_method];
+
+ decode_ise(weight_quant_method, real_weight_count, bswapped, indices, 0);
+
+ if (is_dual_plane)
+ {
+ for (int i = 0; i < weight_count; i++)
+ {
+ scb.weights[i] = qat.unscramble_and_unquant_map[indices[2 * i]];
+ scb.weights[i + WEIGHTS_PLANE2_OFFSET] = qat.unscramble_and_unquant_map[indices[2 * i + 1]];
+ }
+ }
+ else
+ {
+ for (int i = 0; i < weight_count; i++)
+ {
+ scb.weights[i] = qat.unscramble_and_unquant_map[indices[i]];
+ }
+ }
+
+ if (is_dual_plane && partition_count == 4)
+ {
+ scb.block_type = SYM_BTYPE_ERROR;
+ return;
+ }
+
+ scb.color_formats_matched = 0;
+
+ // Determine the format of each endpoint pair
+ int color_formats[BLOCK_MAX_PARTITIONS];
+ int encoded_type_highpart_size = 0;
+ if (partition_count == 1)
+ {
+ color_formats[0] = read_bits(4, 13, pcb.data);
+ scb.partition_index = 0;
+ }
+ else
+ {
+ encoded_type_highpart_size = (3 * partition_count) - 4;
+ below_weights_pos -= encoded_type_highpart_size;
+ int encoded_type = read_bits(6, 13 + PARTITION_INDEX_BITS, pcb.data) | (read_bits(encoded_type_highpart_size, below_weights_pos, pcb.data) << 6);
+ int baseclass = encoded_type & 0x3;
+ if (baseclass == 0)
+ {
+ for (int i = 0; i < partition_count; i++)
+ {
+ color_formats[i] = (encoded_type >> 2) & 0xF;
+ }
+
+ below_weights_pos += encoded_type_highpart_size;
+ scb.color_formats_matched = 1;
+ encoded_type_highpart_size = 0;
+ }
+ else
+ {
+ int bitpos = 2;
+ baseclass--;
+
+ for (int i = 0; i < partition_count; i++)
+ {
+ color_formats[i] = (((encoded_type >> bitpos) & 1) + baseclass) << 2;
+ bitpos++;
+ }
+
+ for (int i = 0; i < partition_count; i++)
+ {
+ color_formats[i] |= (encoded_type >> bitpos) & 3;
+ bitpos += 2;
+ }
+ }
+ scb.partition_index = static_cast<uint16_t>(read_bits(6, 13, pcb.data) | (read_bits(PARTITION_INDEX_BITS - 6, 19, pcb.data) << 6));
+ }
+
+ for (int i = 0; i < partition_count; i++)
+ {
+ scb.color_formats[i] = static_cast<uint8_t>(color_formats[i]);
+ }
+
+ // Determine number of color endpoint integers
+ int color_integer_count = 0;
+ for (int i = 0; i < partition_count; i++)
+ {
+ int endpoint_class = color_formats[i] >> 2;
+ color_integer_count += (endpoint_class + 1) * 2;
+ }
+
+ if (color_integer_count > 18)
+ {
+ scb.block_type = SYM_BTYPE_ERROR;
+ return;
+ }
+
+ // Determine the color endpoint format to use
+ static const int color_bits_arr[5] { -1, 115 - 4, 113 - 4 - PARTITION_INDEX_BITS, 113 - 4 - PARTITION_INDEX_BITS, 113 - 4 - PARTITION_INDEX_BITS };
+ int color_bits = color_bits_arr[partition_count] - bits_for_weights - encoded_type_highpart_size;
+ if (is_dual_plane)
+ {
+ color_bits -= 2;
+ }
+
+ if (color_bits < 0)
+ {
+ color_bits = 0;
+ }
+
+ int color_quant_level = quant_mode_table[color_integer_count >> 1][color_bits];
+ if (color_quant_level < QUANT_6)
+ {
+ scb.block_type = SYM_BTYPE_ERROR;
+ return;
+ }
+
+ // Unpack the integer color values and assign to endpoints
+ scb.quant_mode = static_cast<quant_method>(color_quant_level);
+
+ uint8_t values_to_decode[32];
+ decode_ise(static_cast<quant_method>(color_quant_level), color_integer_count, pcb.data,
+ values_to_decode, (partition_count == 1 ? 17 : 19 + PARTITION_INDEX_BITS));
+
+ int valuecount_to_decode = 0;
+ const uint8_t* unpack_table = color_scrambled_pquant_to_uquant_tables[scb.quant_mode - QUANT_6];
+ for (int i = 0; i < partition_count; i++)
+ {
+ int vals = 2 * (color_formats[i] >> 2) + 2;
+ for (int j = 0; j < vals; j++)
+ {
+ scb.color_values[i][j] = unpack_table[values_to_decode[j + valuecount_to_decode]];
+ }
+ valuecount_to_decode += vals;
+ }
+
+ // Fetch component for second-plane in the case of dual plane of weights.
+ scb.plane2_component = -1;
+ if (is_dual_plane)
+ {
+ scb.plane2_component = static_cast<int8_t>(read_bits(2, below_weights_pos - 2, pcb.data));
+ }
+}
diff --git a/thirdparty/astcenc/astcenc_vecmathlib.h b/thirdparty/astcenc/astcenc_vecmathlib.h
new file mode 100644
index 0000000000..d48f1d73ea
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_vecmathlib.h
@@ -0,0 +1,570 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2019-2022 Arm Limited
+// Copyright 2008 Jose Fonseca
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/*
+ * This module implements vector support for floats, ints, and vector lane
+ * control masks. It provides access to both explicit vector width types, and
+ * flexible N-wide types where N can be determined at compile time.
+ *
+ * The design of this module encourages use of vector length agnostic code, via
+ * the vint, vfloat, and vmask types. These will take on the widest SIMD vector
+ * with that is available at compile time. The current vector width is
+ * accessible for e.g. loop strides via the ASTCENC_SIMD_WIDTH constant.
+ *
+ * Explicit scalar types are accessible via the vint1, vfloat1, vmask1 types.
+ * These are provided primarily for prototyping and algorithm debug of VLA
+ * implementations.
+ *
+ * Explicit 4-wide types are accessible via the vint4, vfloat4, and vmask4
+ * types. These are provided for use by VLA code, but are also expected to be
+ * used as a fixed-width type and will supported a reference C++ fallback for
+ * use on platforms without SIMD intrinsics.
+ *
+ * Explicit 8-wide types are accessible via the vint8, vfloat8, and vmask8
+ * types. These are provide for use by VLA code, and are not expected to be
+ * used as a fixed-width type in normal code. No reference C implementation is
+ * provided on platforms without underlying SIMD intrinsics.
+ *
+ * With the current implementation ISA support is provided for:
+ *
+ * * 1-wide for scalar reference.
+ * * 4-wide for Armv8-A NEON.
+ * * 4-wide for x86-64 SSE2.
+ * * 4-wide for x86-64 SSE4.1.
+ * * 8-wide for x86-64 AVX2.
+ */
+
+#ifndef ASTC_VECMATHLIB_H_INCLUDED
+#define ASTC_VECMATHLIB_H_INCLUDED
+
+#if ASTCENC_SSE != 0 || ASTCENC_AVX != 0
+ #include <immintrin.h>
+#elif ASTCENC_NEON != 0
+ #include <arm_neon.h>
+#endif
+
+#if !defined(__clang__) && defined(_MSC_VER)
+ #define ASTCENC_SIMD_INLINE __forceinline
+ #define ASTCENC_NO_INLINE
+#elif defined(__GNUC__) && !defined(__clang__)
+ #define ASTCENC_SIMD_INLINE __attribute__((always_inline)) inline
+ #define ASTCENC_NO_INLINE __attribute__ ((noinline))
+#else
+ #define ASTCENC_SIMD_INLINE __attribute__((always_inline, nodebug)) inline
+ #define ASTCENC_NO_INLINE __attribute__ ((noinline))
+#endif
+
+#if ASTCENC_AVX >= 2
+ /* If we have AVX2 expose 8-wide VLA. */
+ #include "astcenc_vecmathlib_sse_4.h"
+ #include "astcenc_vecmathlib_common_4.h"
+ #include "astcenc_vecmathlib_avx2_8.h"
+
+ #define ASTCENC_SIMD_WIDTH 8
+
+ using vfloat = vfloat8;
+
+ #if defined(ASTCENC_NO_INVARIANCE)
+ using vfloatacc = vfloat8;
+ #else
+ using vfloatacc = vfloat4;
+ #endif
+
+ using vint = vint8;
+ using vmask = vmask8;
+
+ constexpr auto loada = vfloat8::loada;
+ constexpr auto load1 = vfloat8::load1;
+
+#elif ASTCENC_SSE >= 20
+ /* If we have SSE expose 4-wide VLA, and 4-wide fixed width. */
+ #include "astcenc_vecmathlib_sse_4.h"
+ #include "astcenc_vecmathlib_common_4.h"
+
+ #define ASTCENC_SIMD_WIDTH 4
+
+ using vfloat = vfloat4;
+ using vfloatacc = vfloat4;
+ using vint = vint4;
+ using vmask = vmask4;
+
+ constexpr auto loada = vfloat4::loada;
+ constexpr auto load1 = vfloat4::load1;
+
+#elif ASTCENC_NEON > 0
+ /* If we have NEON expose 4-wide VLA. */
+ #include "astcenc_vecmathlib_neon_4.h"
+ #include "astcenc_vecmathlib_common_4.h"
+
+ #define ASTCENC_SIMD_WIDTH 4
+
+ using vfloat = vfloat4;
+ using vfloatacc = vfloat4;
+ using vint = vint4;
+ using vmask = vmask4;
+
+ constexpr auto loada = vfloat4::loada;
+ constexpr auto load1 = vfloat4::load1;
+
+#else
+ // If we have nothing expose 4-wide VLA, and 4-wide fixed width.
+
+ // Note: We no longer expose the 1-wide scalar fallback because it is not
+ // invariant with the 4-wide path due to algorithms that use horizontal
+ // operations that accumulate a local vector sum before accumulating into
+ // a running sum.
+ //
+ // For 4 items adding into an accumulator using 1-wide vectors the sum is:
+ //
+ // result = ((((sum + l0) + l1) + l2) + l3)
+ //
+ // ... whereas the accumulator for a 4-wide vector sum is:
+ //
+ // result = sum + ((l0 + l2) + (l1 + l3))
+ //
+ // In "normal maths" this is the same, but the floating point reassociation
+ // differences mean that these will not produce the same result.
+
+ #include "astcenc_vecmathlib_none_4.h"
+ #include "astcenc_vecmathlib_common_4.h"
+
+ #define ASTCENC_SIMD_WIDTH 4
+
+ using vfloat = vfloat4;
+ using vfloatacc = vfloat4;
+ using vint = vint4;
+ using vmask = vmask4;
+
+ constexpr auto loada = vfloat4::loada;
+ constexpr auto load1 = vfloat4::load1;
+#endif
+
+/**
+ * @brief Round a count down to the largest multiple of 8.
+ *
+ * @param count The unrounded value.
+ *
+ * @return The rounded value.
+ */
+ASTCENC_SIMD_INLINE unsigned int round_down_to_simd_multiple_8(unsigned int count)
+{
+ return count & static_cast<unsigned int>(~(8 - 1));
+}
+
+/**
+ * @brief Round a count down to the largest multiple of 4.
+ *
+ * @param count The unrounded value.
+ *
+ * @return The rounded value.
+ */
+ASTCENC_SIMD_INLINE unsigned int round_down_to_simd_multiple_4(unsigned int count)
+{
+ return count & static_cast<unsigned int>(~(4 - 1));
+}
+
+/**
+ * @brief Round a count down to the largest multiple of the SIMD width.
+ *
+ * Assumption that the vector width is a power of two ...
+ *
+ * @param count The unrounded value.
+ *
+ * @return The rounded value.
+ */
+ASTCENC_SIMD_INLINE unsigned int round_down_to_simd_multiple_vla(unsigned int count)
+{
+ return count & static_cast<unsigned int>(~(ASTCENC_SIMD_WIDTH - 1));
+}
+
+/**
+ * @brief Round a count up to the largest multiple of the SIMD width.
+ *
+ * Assumption that the vector width is a power of two ...
+ *
+ * @param count The unrounded value.
+ *
+ * @return The rounded value.
+ */
+ASTCENC_SIMD_INLINE unsigned int round_up_to_simd_multiple_vla(unsigned int count)
+{
+ unsigned int multiples = (count + ASTCENC_SIMD_WIDTH - 1) / ASTCENC_SIMD_WIDTH;
+ return multiples * ASTCENC_SIMD_WIDTH;
+}
+
+/**
+ * @brief Return @c a with lanes negated if the @c b lane is negative.
+ */
+ASTCENC_SIMD_INLINE vfloat change_sign(vfloat a, vfloat b)
+{
+ vint ia = float_as_int(a);
+ vint ib = float_as_int(b);
+ vint sign_mask(static_cast<int>(0x80000000));
+ vint r = ia ^ (ib & sign_mask);
+ return int_as_float(r);
+}
+
+/**
+ * @brief Return fast, but approximate, vector atan(x).
+ *
+ * Max error of this implementation is 0.004883.
+ */
+ASTCENC_SIMD_INLINE vfloat atan(vfloat x)
+{
+ vmask c = abs(x) > vfloat(1.0f);
+ vfloat z = change_sign(vfloat(astc::PI_OVER_TWO), x);
+ vfloat y = select(x, vfloat(1.0f) / x, c);
+ y = y / (y * y * vfloat(0.28f) + vfloat(1.0f));
+ return select(y, z - y, c);
+}
+
+/**
+ * @brief Return fast, but approximate, vector atan2(x, y).
+ */
+ASTCENC_SIMD_INLINE vfloat atan2(vfloat y, vfloat x)
+{
+ vfloat z = atan(abs(y / x));
+ vmask xmask = vmask(float_as_int(x).m);
+ return change_sign(select_msb(z, vfloat(astc::PI) - z, xmask), y);
+}
+
+/*
+ * @brief Factory that returns a unit length 4 component vfloat4.
+ */
+static ASTCENC_SIMD_INLINE vfloat4 unit4()
+{
+ return vfloat4(0.5f);
+}
+
+/**
+ * @brief Factory that returns a unit length 3 component vfloat4.
+ */
+static ASTCENC_SIMD_INLINE vfloat4 unit3()
+{
+ float val = 0.577350258827209473f;
+ return vfloat4(val, val, val, 0.0f);
+}
+
+/**
+ * @brief Factory that returns a unit length 2 component vfloat4.
+ */
+static ASTCENC_SIMD_INLINE vfloat4 unit2()
+{
+ float val = 0.707106769084930420f;
+ return vfloat4(val, val, 0.0f, 0.0f);
+}
+
+/**
+ * @brief Factory that returns a 3 component vfloat4.
+ */
+static ASTCENC_SIMD_INLINE vfloat4 vfloat3(float a, float b, float c)
+{
+ return vfloat4(a, b, c, 0.0f);
+}
+
+/**
+ * @brief Factory that returns a 2 component vfloat4.
+ */
+static ASTCENC_SIMD_INLINE vfloat4 vfloat2(float a, float b)
+{
+ return vfloat4(a, b, 0.0f, 0.0f);
+}
+
+/**
+ * @brief Normalize a non-zero length vector to unit length.
+ */
+static ASTCENC_SIMD_INLINE vfloat4 normalize(vfloat4 a)
+{
+ vfloat4 length = dot(a, a);
+ return a / sqrt(length);
+}
+
+/**
+ * @brief Normalize a vector, returning @c safe if len is zero.
+ */
+static ASTCENC_SIMD_INLINE vfloat4 normalize_safe(vfloat4 a, vfloat4 safe)
+{
+ vfloat4 length = dot(a, a);
+ if (length.lane<0>() != 0.0f)
+ {
+ return a / sqrt(length);
+ }
+
+ return safe;
+}
+
+
+
+#define POLY0(x, c0) ( c0)
+#define POLY1(x, c0, c1) ((POLY0(x, c1) * x) + c0)
+#define POLY2(x, c0, c1, c2) ((POLY1(x, c1, c2) * x) + c0)
+#define POLY3(x, c0, c1, c2, c3) ((POLY2(x, c1, c2, c3) * x) + c0)
+#define POLY4(x, c0, c1, c2, c3, c4) ((POLY3(x, c1, c2, c3, c4) * x) + c0)
+#define POLY5(x, c0, c1, c2, c3, c4, c5) ((POLY4(x, c1, c2, c3, c4, c5) * x) + c0)
+
+/**
+ * @brief Compute an approximate exp2(x) for each lane in the vector.
+ *
+ * Based on 5th degree minimax polynomials, ported from this blog
+ * https://jrfonseca.blogspot.com/2008/09/fast-sse2-pow-tables-or-polynomials.html
+ */
+static ASTCENC_SIMD_INLINE vfloat4 exp2(vfloat4 x)
+{
+ x = clamp(-126.99999f, 129.0f, x);
+
+ vint4 ipart = float_to_int(x - 0.5f);
+ vfloat4 fpart = x - int_to_float(ipart);
+
+ // Integer contrib, using 1 << ipart
+ vfloat4 iexp = int_as_float(lsl<23>(ipart + 127));
+
+ // Fractional contrib, using polynomial fit of 2^x in range [-0.5, 0.5)
+ vfloat4 fexp = POLY5(fpart,
+ 9.9999994e-1f,
+ 6.9315308e-1f,
+ 2.4015361e-1f,
+ 5.5826318e-2f,
+ 8.9893397e-3f,
+ 1.8775767e-3f);
+
+ return iexp * fexp;
+}
+
+/**
+ * @brief Compute an approximate log2(x) for each lane in the vector.
+ *
+ * Based on 5th degree minimax polynomials, ported from this blog
+ * https://jrfonseca.blogspot.com/2008/09/fast-sse2-pow-tables-or-polynomials.html
+ */
+static ASTCENC_SIMD_INLINE vfloat4 log2(vfloat4 x)
+{
+ vint4 exp(0x7F800000);
+ vint4 mant(0x007FFFFF);
+ vint4 one(0x3F800000);
+
+ vint4 i = float_as_int(x);
+
+ vfloat4 e = int_to_float(lsr<23>(i & exp) - 127);
+
+ vfloat4 m = int_as_float((i & mant) | one);
+
+ // Polynomial fit of log2(x)/(x - 1), for x in range [1, 2)
+ vfloat4 p = POLY4(m,
+ 2.8882704548164776201f,
+ -2.52074962577807006663f,
+ 1.48116647521213171641f,
+ -0.465725644288844778798f,
+ 0.0596515482674574969533f);
+
+ // Increases the polynomial degree, but ensures that log2(1) == 0
+ p = p * (m - 1.0f);
+
+ return p + e;
+}
+
+/**
+ * @brief Compute an approximate pow(x, y) for each lane in the vector.
+ *
+ * Power function based on the exp2(log2(x) * y) transform.
+ */
+static ASTCENC_SIMD_INLINE vfloat4 pow(vfloat4 x, vfloat4 y)
+{
+ vmask4 zero_mask = y == vfloat4(0.0f);
+ vfloat4 estimate = exp2(log2(x) * y);
+
+ // Guarantee that y == 0 returns exactly 1.0f
+ return select(estimate, vfloat4(1.0f), zero_mask);
+}
+
+/**
+ * @brief Count the leading zeros for each lane in @c a.
+ *
+ * Valid for all data values of @c a; will return a per-lane value [0, 32].
+ */
+static ASTCENC_SIMD_INLINE vint4 clz(vint4 a)
+{
+ // This function is a horrible abuse of floating point exponents to convert
+ // the original integer value into a 2^N encoding we can recover easily.
+
+ // Convert to float without risk of rounding up by keeping only top 8 bits.
+ // This trick is is guaranteed to keep top 8 bits and clear the 9th.
+ a = (~lsr<8>(a)) & a;
+ a = float_as_int(int_to_float(a));
+
+ // Extract and unbias exponent
+ a = vint4(127 + 31) - lsr<23>(a);
+
+ // Clamp result to a valid 32-bit range
+ return clamp(0, 32, a);
+}
+
+/**
+ * @brief Return lanewise 2^a for each lane in @c a.
+ *
+ * Use of signed int means that this is only valid for values in range [0, 31].
+ */
+static ASTCENC_SIMD_INLINE vint4 two_to_the_n(vint4 a)
+{
+ // 2^30 is the largest signed number than can be represented
+ assert(all(a < vint4(31)));
+
+ // This function is a horrible abuse of floating point to use the exponent
+ // and float conversion to generate a 2^N multiple.
+
+ // Bias the exponent
+ vint4 exp = a + 127;
+ exp = lsl<23>(exp);
+
+ // Reinterpret the bits as a float, and then convert to an int
+ vfloat4 f = int_as_float(exp);
+ return float_to_int(f);
+}
+
+/**
+ * @brief Convert unorm16 [0, 65535] to float16 in range [0, 1].
+ */
+static ASTCENC_SIMD_INLINE vint4 unorm16_to_sf16(vint4 p)
+{
+ vint4 fp16_one = vint4(0x3C00);
+ vint4 fp16_small = lsl<8>(p);
+
+ vmask4 is_one = p == vint4(0xFFFF);
+ vmask4 is_small = p < vint4(4);
+
+ // Manually inline clz() on Visual Studio to avoid release build codegen bug
+ // see https://github.com/ARM-software/astc-encoder/issues/259
+#if !defined(__clang__) && defined(_MSC_VER)
+ vint4 a = (~lsr<8>(p)) & p;
+ a = float_as_int(int_to_float(a));
+ a = vint4(127 + 31) - lsr<23>(a);
+ vint4 lz = clamp(0, 32, a) - 16;
+#else
+ vint4 lz = clz(p) - 16;
+#endif
+
+ p = p * two_to_the_n(lz + 1);
+ p = p & vint4(0xFFFF);
+
+ p = lsr<6>(p);
+
+ p = p | lsl<10>(vint4(14) - lz);
+
+ vint4 r = select(p, fp16_one, is_one);
+ r = select(r, fp16_small, is_small);
+ return r;
+}
+
+/**
+ * @brief Convert 16-bit LNS to float16.
+ */
+static ASTCENC_SIMD_INLINE vint4 lns_to_sf16(vint4 p)
+{
+ vint4 mc = p & 0x7FF;
+ vint4 ec = lsr<11>(p);
+
+ vint4 mc_512 = mc * 3;
+ vmask4 mask_512 = mc < vint4(512);
+
+ vint4 mc_1536 = mc * 4 - 512;
+ vmask4 mask_1536 = mc < vint4(1536);
+
+ vint4 mc_else = mc * 5 - 2048;
+
+ vint4 mt = mc_else;
+ mt = select(mt, mc_1536, mask_1536);
+ mt = select(mt, mc_512, mask_512);
+
+ vint4 res = lsl<10>(ec) | lsr<3>(mt);
+ return min(res, vint4(0x7BFF));
+}
+
+/**
+ * @brief Extract mantissa and exponent of a float value.
+ *
+ * @param a The input value.
+ * @param[out] exp The output exponent.
+ *
+ * @return The mantissa.
+ */
+static ASTCENC_SIMD_INLINE vfloat4 frexp(vfloat4 a, vint4& exp)
+{
+ // Interpret the bits as an integer
+ vint4 ai = float_as_int(a);
+
+ // Extract and unbias the exponent
+ exp = (lsr<23>(ai) & 0xFF) - 126;
+
+ // Extract and unbias the mantissa
+ vint4 manti = (ai & static_cast<int>(0x807FFFFF)) | 0x3F000000;
+ return int_as_float(manti);
+}
+
+/**
+ * @brief Convert float to 16-bit LNS.
+ */
+static ASTCENC_SIMD_INLINE vfloat4 float_to_lns(vfloat4 a)
+{
+ vint4 exp;
+ vfloat4 mant = frexp(a, exp);
+
+ // Do these early before we start messing about ...
+ vmask4 mask_underflow_nan = ~(a > vfloat4(1.0f / 67108864.0f));
+ vmask4 mask_infinity = a >= vfloat4(65536.0f);
+
+ // If input is smaller than 2^-14, multiply by 2^25 and don't bias.
+ vmask4 exp_lt_m13 = exp < vint4(-13);
+
+ vfloat4 a1a = a * 33554432.0f;
+ vint4 expa = vint4::zero();
+
+ vfloat4 a1b = (mant - 0.5f) * 4096;
+ vint4 expb = exp + 14;
+
+ a = select(a1b, a1a, exp_lt_m13);
+ exp = select(expb, expa, exp_lt_m13);
+
+ vmask4 a_lt_384 = a < vfloat4(384.0f);
+ vmask4 a_lt_1408 = a <= vfloat4(1408.0f);
+
+ vfloat4 a2a = a * (4.0f / 3.0f);
+ vfloat4 a2b = a + 128.0f;
+ vfloat4 a2c = (a + 512.0f) * (4.0f / 5.0f);
+
+ a = a2c;
+ a = select(a, a2b, a_lt_1408);
+ a = select(a, a2a, a_lt_384);
+
+ a = a + (int_to_float(exp) * 2048.0f) + 1.0f;
+
+ a = select(a, vfloat4(65535.0f), mask_infinity);
+ a = select(a, vfloat4::zero(), mask_underflow_nan);
+
+ return a;
+}
+
+namespace astc
+{
+
+static ASTCENC_SIMD_INLINE float pow(float x, float y)
+{
+ return pow(vfloat4(x), vfloat4(y)).lane<0>();
+}
+
+}
+
+#endif // #ifndef ASTC_VECMATHLIB_H_INCLUDED
diff --git a/thirdparty/astcenc/astcenc_vecmathlib_avx2_8.h b/thirdparty/astcenc/astcenc_vecmathlib_avx2_8.h
new file mode 100644
index 0000000000..a785aca75b
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_vecmathlib_avx2_8.h
@@ -0,0 +1,1204 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2019-2022 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief 8x32-bit vectors, implemented using AVX2.
+ *
+ * This module implements 8-wide 32-bit float, int, and mask vectors for x86
+ * AVX2.
+ *
+ * There is a baseline level of functionality provided by all vector widths and
+ * implementations. This is implemented using identical function signatures,
+ * modulo data type, so we can use them as substitutable implementations in VLA
+ * code.
+ */
+
+#ifndef ASTC_VECMATHLIB_AVX2_8_H_INCLUDED
+#define ASTC_VECMATHLIB_AVX2_8_H_INCLUDED
+
+#ifndef ASTCENC_SIMD_INLINE
+ #error "Include astcenc_vecmathlib.h, do not include directly"
+#endif
+
+#include <cstdio>
+
+// Define convenience intrinsics that are missing on older compilers
+#define astcenc_mm256_set_m128i(m, n) _mm256_insertf128_si256(_mm256_castsi128_si256((n)), (m), 1)
+
+// ============================================================================
+// vfloat8 data type
+// ============================================================================
+
+/**
+ * @brief Data type for 8-wide floats.
+ */
+struct vfloat8
+{
+ /**
+ * @brief Construct from zero-initialized value.
+ */
+ ASTCENC_SIMD_INLINE vfloat8() = default;
+
+ /**
+ * @brief Construct from 4 values loaded from an unaligned address.
+ *
+ * Consider using loada() which is better with vectors if data is aligned
+ * to vector length.
+ */
+ ASTCENC_SIMD_INLINE explicit vfloat8(const float *p)
+ {
+ m = _mm256_loadu_ps(p);
+ }
+
+ /**
+ * @brief Construct from 1 scalar value replicated across all lanes.
+ *
+ * Consider using zero() for constexpr zeros.
+ */
+ ASTCENC_SIMD_INLINE explicit vfloat8(float a)
+ {
+ m = _mm256_set1_ps(a);
+ }
+
+ /**
+ * @brief Construct from 8 scalar values.
+ *
+ * The value of @c a is stored to lane 0 (LSB) in the SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vfloat8(
+ float a, float b, float c, float d,
+ float e, float f, float g, float h)
+ {
+ m = _mm256_set_ps(h, g, f, e, d, c, b, a);
+ }
+
+ /**
+ * @brief Construct from an existing SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vfloat8(__m256 a)
+ {
+ m = a;
+ }
+
+ /**
+ * @brief Get the scalar value of a single lane.
+ */
+ template <int l> ASTCENC_SIMD_INLINE float lane() const
+ {
+ #if !defined(__clang__) && defined(_MSC_VER)
+ return m.m256_f32[l];
+ #else
+ union { __m256 m; float f[8]; } cvt;
+ cvt.m = m;
+ return cvt.f[l];
+ #endif
+ }
+
+ /**
+ * @brief Factory that returns a vector of zeros.
+ */
+ static ASTCENC_SIMD_INLINE vfloat8 zero()
+ {
+ return vfloat8(_mm256_setzero_ps());
+ }
+
+ /**
+ * @brief Factory that returns a replicated scalar loaded from memory.
+ */
+ static ASTCENC_SIMD_INLINE vfloat8 load1(const float* p)
+ {
+ return vfloat8(_mm256_broadcast_ss(p));
+ }
+
+ /**
+ * @brief Factory that returns a vector loaded from 32B aligned memory.
+ */
+ static ASTCENC_SIMD_INLINE vfloat8 loada(const float* p)
+ {
+ return vfloat8(_mm256_load_ps(p));
+ }
+
+ /**
+ * @brief Factory that returns a vector containing the lane IDs.
+ */
+ static ASTCENC_SIMD_INLINE vfloat8 lane_id()
+ {
+ return vfloat8(_mm256_set_ps(7, 6, 5, 4, 3, 2, 1, 0));
+ }
+
+ /**
+ * @brief The vector ...
+ */
+ __m256 m;
+};
+
+// ============================================================================
+// vint8 data type
+// ============================================================================
+
+/**
+ * @brief Data type for 8-wide ints.
+ */
+struct vint8
+{
+ /**
+ * @brief Construct from zero-initialized value.
+ */
+ ASTCENC_SIMD_INLINE vint8() = default;
+
+ /**
+ * @brief Construct from 8 values loaded from an unaligned address.
+ *
+ * Consider using loada() which is better with vectors if data is aligned
+ * to vector length.
+ */
+ ASTCENC_SIMD_INLINE explicit vint8(const int *p)
+ {
+ m = _mm256_loadu_si256(reinterpret_cast<const __m256i*>(p));
+ }
+
+ /**
+ * @brief Construct from 8 uint8_t loaded from an unaligned address.
+ */
+ ASTCENC_SIMD_INLINE explicit vint8(const uint8_t *p)
+ {
+ // _mm_loadu_si64 would be nicer syntax, but missing on older GCC
+ m = _mm256_cvtepu8_epi32(_mm_cvtsi64_si128(*reinterpret_cast<const long long*>(p)));
+ }
+
+ /**
+ * @brief Construct from 1 scalar value replicated across all lanes.
+ *
+ * Consider using vfloat4::zero() for constexpr zeros.
+ */
+ ASTCENC_SIMD_INLINE explicit vint8(int a)
+ {
+ m = _mm256_set1_epi32(a);
+ }
+
+ /**
+ * @brief Construct from 8 scalar values.
+ *
+ * The value of @c a is stored to lane 0 (LSB) in the SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vint8(
+ int a, int b, int c, int d,
+ int e, int f, int g, int h)
+ {
+ m = _mm256_set_epi32(h, g, f, e, d, c, b, a);
+ }
+
+ /**
+ * @brief Construct from an existing SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vint8(__m256i a)
+ {
+ m = a;
+ }
+
+ /**
+ * @brief Get the scalar from a single lane.
+ */
+ template <int l> ASTCENC_SIMD_INLINE int lane() const
+ {
+ #if !defined(__clang__) && defined(_MSC_VER)
+ return m.m256i_i32[l];
+ #else
+ union { __m256i m; int f[8]; } cvt;
+ cvt.m = m;
+ return cvt.f[l];
+ #endif
+ }
+
+ /**
+ * @brief Factory that returns a vector of zeros.
+ */
+ static ASTCENC_SIMD_INLINE vint8 zero()
+ {
+ return vint8(_mm256_setzero_si256());
+ }
+
+ /**
+ * @brief Factory that returns a replicated scalar loaded from memory.
+ */
+ static ASTCENC_SIMD_INLINE vint8 load1(const int* p)
+ {
+ __m128i a = _mm_set1_epi32(*p);
+ return vint8(_mm256_broadcastd_epi32(a));
+ }
+
+ /**
+ * @brief Factory that returns a vector loaded from 32B aligned memory.
+ */
+ static ASTCENC_SIMD_INLINE vint8 loada(const int* p)
+ {
+ return vint8(_mm256_load_si256(reinterpret_cast<const __m256i*>(p)));
+ }
+
+ /**
+ * @brief Factory that returns a vector containing the lane IDs.
+ */
+ static ASTCENC_SIMD_INLINE vint8 lane_id()
+ {
+ return vint8(_mm256_set_epi32(7, 6, 5, 4, 3, 2, 1, 0));
+ }
+
+ /**
+ * @brief The vector ...
+ */
+ __m256i m;
+};
+
+// ============================================================================
+// vmask8 data type
+// ============================================================================
+
+/**
+ * @brief Data type for 8-wide control plane masks.
+ */
+struct vmask8
+{
+ /**
+ * @brief Construct from an existing SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vmask8(__m256 a)
+ {
+ m = a;
+ }
+
+ /**
+ * @brief Construct from an existing SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vmask8(__m256i a)
+ {
+ m = _mm256_castsi256_ps(a);
+ }
+
+ /**
+ * @brief Construct from 1 scalar value.
+ */
+ ASTCENC_SIMD_INLINE explicit vmask8(bool a)
+ {
+ vint8 mask(a == false ? 0 : -1);
+ m = _mm256_castsi256_ps(mask.m);
+ }
+
+ /**
+ * @brief The vector ...
+ */
+ __m256 m;
+};
+
+// ============================================================================
+// vmask8 operators and functions
+// ============================================================================
+
+/**
+ * @brief Overload: mask union (or).
+ */
+ASTCENC_SIMD_INLINE vmask8 operator|(vmask8 a, vmask8 b)
+{
+ return vmask8(_mm256_or_ps(a.m, b.m));
+}
+
+/**
+ * @brief Overload: mask intersect (and).
+ */
+ASTCENC_SIMD_INLINE vmask8 operator&(vmask8 a, vmask8 b)
+{
+ return vmask8(_mm256_and_ps(a.m, b.m));
+}
+
+/**
+ * @brief Overload: mask difference (xor).
+ */
+ASTCENC_SIMD_INLINE vmask8 operator^(vmask8 a, vmask8 b)
+{
+ return vmask8(_mm256_xor_ps(a.m, b.m));
+}
+
+/**
+ * @brief Overload: mask invert (not).
+ */
+ASTCENC_SIMD_INLINE vmask8 operator~(vmask8 a)
+{
+ return vmask8(_mm256_xor_si256(_mm256_castps_si256(a.m), _mm256_set1_epi32(-1)));
+}
+
+/**
+ * @brief Return a 8-bit mask code indicating mask status.
+ *
+ * bit0 = lane 0
+ */
+ASTCENC_SIMD_INLINE unsigned int mask(vmask8 a)
+{
+ return static_cast<unsigned int>(_mm256_movemask_ps(a.m));
+}
+
+/**
+ * @brief True if any lanes are enabled, false otherwise.
+ */
+ASTCENC_SIMD_INLINE bool any(vmask8 a)
+{
+ return mask(a) != 0;
+}
+
+/**
+ * @brief True if all lanes are enabled, false otherwise.
+ */
+ASTCENC_SIMD_INLINE bool all(vmask8 a)
+{
+ return mask(a) == 0xFF;
+}
+
+// ============================================================================
+// vint8 operators and functions
+// ============================================================================
+/**
+ * @brief Overload: vector by vector addition.
+ */
+ASTCENC_SIMD_INLINE vint8 operator+(vint8 a, vint8 b)
+{
+ return vint8(_mm256_add_epi32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector incremental addition.
+ */
+ASTCENC_SIMD_INLINE vint8& operator+=(vint8& a, const vint8& b)
+{
+ a = a + b;
+ return a;
+}
+
+/**
+ * @brief Overload: vector by vector subtraction.
+ */
+ASTCENC_SIMD_INLINE vint8 operator-(vint8 a, vint8 b)
+{
+ return vint8(_mm256_sub_epi32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector multiplication.
+ */
+ASTCENC_SIMD_INLINE vint8 operator*(vint8 a, vint8 b)
+{
+ return vint8(_mm256_mullo_epi32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector bit invert.
+ */
+ASTCENC_SIMD_INLINE vint8 operator~(vint8 a)
+{
+ return vint8(_mm256_xor_si256(a.m, _mm256_set1_epi32(-1)));
+}
+
+/**
+ * @brief Overload: vector by vector bitwise or.
+ */
+ASTCENC_SIMD_INLINE vint8 operator|(vint8 a, vint8 b)
+{
+ return vint8(_mm256_or_si256(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector bitwise and.
+ */
+ASTCENC_SIMD_INLINE vint8 operator&(vint8 a, vint8 b)
+{
+ return vint8(_mm256_and_si256(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector bitwise xor.
+ */
+ASTCENC_SIMD_INLINE vint8 operator^(vint8 a, vint8 b)
+{
+ return vint8(_mm256_xor_si256(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector equality.
+ */
+ASTCENC_SIMD_INLINE vmask8 operator==(vint8 a, vint8 b)
+{
+ return vmask8(_mm256_cmpeq_epi32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector inequality.
+ */
+ASTCENC_SIMD_INLINE vmask8 operator!=(vint8 a, vint8 b)
+{
+ return ~vmask8(_mm256_cmpeq_epi32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector less than.
+ */
+ASTCENC_SIMD_INLINE vmask8 operator<(vint8 a, vint8 b)
+{
+ return vmask8(_mm256_cmpgt_epi32(b.m, a.m));
+}
+
+/**
+ * @brief Overload: vector by vector greater than.
+ */
+ASTCENC_SIMD_INLINE vmask8 operator>(vint8 a, vint8 b)
+{
+ return vmask8(_mm256_cmpgt_epi32(a.m, b.m));
+}
+
+/**
+ * @brief Logical shift left.
+ */
+template <int s> ASTCENC_SIMD_INLINE vint8 lsl(vint8 a)
+{
+ return vint8(_mm256_slli_epi32(a.m, s));
+}
+
+/**
+ * @brief Arithmetic shift right.
+ */
+template <int s> ASTCENC_SIMD_INLINE vint8 asr(vint8 a)
+{
+ return vint8(_mm256_srai_epi32(a.m, s));
+}
+
+/**
+ * @brief Logical shift right.
+ */
+template <int s> ASTCENC_SIMD_INLINE vint8 lsr(vint8 a)
+{
+ return vint8(_mm256_srli_epi32(a.m, s));
+}
+
+/**
+ * @brief Return the min vector of two vectors.
+ */
+ASTCENC_SIMD_INLINE vint8 min(vint8 a, vint8 b)
+{
+ return vint8(_mm256_min_epi32(a.m, b.m));
+}
+
+/**
+ * @brief Return the max vector of two vectors.
+ */
+ASTCENC_SIMD_INLINE vint8 max(vint8 a, vint8 b)
+{
+ return vint8(_mm256_max_epi32(a.m, b.m));
+}
+
+/**
+ * @brief Return the horizontal minimum of a vector.
+ */
+ASTCENC_SIMD_INLINE vint8 hmin(vint8 a)
+{
+ __m128i m = _mm_min_epi32(_mm256_extracti128_si256(a.m, 0), _mm256_extracti128_si256(a.m, 1));
+ m = _mm_min_epi32(m, _mm_shuffle_epi32(m, _MM_SHUFFLE(0,0,3,2)));
+ m = _mm_min_epi32(m, _mm_shuffle_epi32(m, _MM_SHUFFLE(0,0,0,1)));
+ m = _mm_shuffle_epi32(m, _MM_SHUFFLE(0,0,0,0));
+
+ __m256i r = astcenc_mm256_set_m128i(m, m);
+ vint8 vmin(r);
+ return vmin;
+}
+
+/**
+ * @brief Return the horizontal maximum of a vector.
+ */
+ASTCENC_SIMD_INLINE vint8 hmax(vint8 a)
+{
+ __m128i m = _mm_max_epi32(_mm256_extracti128_si256(a.m, 0), _mm256_extracti128_si256(a.m, 1));
+ m = _mm_max_epi32(m, _mm_shuffle_epi32(m, _MM_SHUFFLE(0,0,3,2)));
+ m = _mm_max_epi32(m, _mm_shuffle_epi32(m, _MM_SHUFFLE(0,0,0,1)));
+ m = _mm_shuffle_epi32(m, _MM_SHUFFLE(0,0,0,0));
+
+ __m256i r = astcenc_mm256_set_m128i(m, m);
+ vint8 vmax(r);
+ return vmax;
+}
+
+/**
+ * @brief Store a vector to a 16B aligned memory address.
+ */
+ASTCENC_SIMD_INLINE void storea(vint8 a, int* p)
+{
+ _mm256_store_si256(reinterpret_cast<__m256i*>(p), a.m);
+}
+
+/**
+ * @brief Store a vector to an unaligned memory address.
+ */
+ASTCENC_SIMD_INLINE void store(vint8 a, int* p)
+{
+ _mm256_storeu_si256(reinterpret_cast<__m256i*>(p), a.m);
+}
+
+/**
+ * @brief Store lowest N (vector width) bytes into an unaligned address.
+ */
+ASTCENC_SIMD_INLINE void store_nbytes(vint8 a, uint8_t* p)
+{
+ // This is the most logical implementation, but the convenience intrinsic
+ // is missing on older compilers (supported in g++ 9 and clang++ 9).
+ // _mm_storeu_si64(ptr, _mm256_extracti128_si256(v.m, 0))
+ _mm_storel_epi64(reinterpret_cast<__m128i*>(p), _mm256_extracti128_si256(a.m, 0));
+}
+
+/**
+ * @brief Gather N (vector width) indices from the array.
+ */
+ASTCENC_SIMD_INLINE vint8 gatheri(const int* base, vint8 indices)
+{
+ return vint8(_mm256_i32gather_epi32(base, indices.m, 4));
+}
+
+/**
+ * @brief Pack low 8 bits of N (vector width) lanes into bottom of vector.
+ */
+ASTCENC_SIMD_INLINE vint8 pack_low_bytes(vint8 v)
+{
+ __m256i shuf = _mm256_set_epi8(0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 28, 24, 20, 16,
+ 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 12, 8, 4, 0);
+ __m256i a = _mm256_shuffle_epi8(v.m, shuf);
+ __m128i a0 = _mm256_extracti128_si256(a, 0);
+ __m128i a1 = _mm256_extracti128_si256(a, 1);
+ __m128i b = _mm_unpacklo_epi32(a0, a1);
+
+ __m256i r = astcenc_mm256_set_m128i(b, b);
+ return vint8(r);
+}
+
+/**
+ * @brief Return lanes from @c b if @c cond is set, else @c a.
+ */
+ASTCENC_SIMD_INLINE vint8 select(vint8 a, vint8 b, vmask8 cond)
+{
+ __m256i condi = _mm256_castps_si256(cond.m);
+ return vint8(_mm256_blendv_epi8(a.m, b.m, condi));
+}
+
+// ============================================================================
+// vfloat4 operators and functions
+// ============================================================================
+
+/**
+ * @brief Overload: vector by vector addition.
+ */
+ASTCENC_SIMD_INLINE vfloat8 operator+(vfloat8 a, vfloat8 b)
+{
+ return vfloat8(_mm256_add_ps(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector incremental addition.
+ */
+ASTCENC_SIMD_INLINE vfloat8& operator+=(vfloat8& a, const vfloat8& b)
+{
+ a = a + b;
+ return a;
+}
+
+/**
+ * @brief Overload: vector by vector subtraction.
+ */
+ASTCENC_SIMD_INLINE vfloat8 operator-(vfloat8 a, vfloat8 b)
+{
+ return vfloat8(_mm256_sub_ps(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector multiplication.
+ */
+ASTCENC_SIMD_INLINE vfloat8 operator*(vfloat8 a, vfloat8 b)
+{
+ return vfloat8(_mm256_mul_ps(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by scalar multiplication.
+ */
+ASTCENC_SIMD_INLINE vfloat8 operator*(vfloat8 a, float b)
+{
+ return vfloat8(_mm256_mul_ps(a.m, _mm256_set1_ps(b)));
+}
+
+/**
+ * @brief Overload: scalar by vector multiplication.
+ */
+ASTCENC_SIMD_INLINE vfloat8 operator*(float a, vfloat8 b)
+{
+ return vfloat8(_mm256_mul_ps(_mm256_set1_ps(a), b.m));
+}
+
+/**
+ * @brief Overload: vector by vector division.
+ */
+ASTCENC_SIMD_INLINE vfloat8 operator/(vfloat8 a, vfloat8 b)
+{
+ return vfloat8(_mm256_div_ps(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by scalar division.
+ */
+ASTCENC_SIMD_INLINE vfloat8 operator/(vfloat8 a, float b)
+{
+ return vfloat8(_mm256_div_ps(a.m, _mm256_set1_ps(b)));
+}
+
+
+/**
+ * @brief Overload: scalar by vector division.
+ */
+ASTCENC_SIMD_INLINE vfloat8 operator/(float a, vfloat8 b)
+{
+ return vfloat8(_mm256_div_ps(_mm256_set1_ps(a), b.m));
+}
+
+
+/**
+ * @brief Overload: vector by vector equality.
+ */
+ASTCENC_SIMD_INLINE vmask8 operator==(vfloat8 a, vfloat8 b)
+{
+ return vmask8(_mm256_cmp_ps(a.m, b.m, _CMP_EQ_OQ));
+}
+
+/**
+ * @brief Overload: vector by vector inequality.
+ */
+ASTCENC_SIMD_INLINE vmask8 operator!=(vfloat8 a, vfloat8 b)
+{
+ return vmask8(_mm256_cmp_ps(a.m, b.m, _CMP_NEQ_OQ));
+}
+
+/**
+ * @brief Overload: vector by vector less than.
+ */
+ASTCENC_SIMD_INLINE vmask8 operator<(vfloat8 a, vfloat8 b)
+{
+ return vmask8(_mm256_cmp_ps(a.m, b.m, _CMP_LT_OQ));
+}
+
+/**
+ * @brief Overload: vector by vector greater than.
+ */
+ASTCENC_SIMD_INLINE vmask8 operator>(vfloat8 a, vfloat8 b)
+{
+ return vmask8(_mm256_cmp_ps(a.m, b.m, _CMP_GT_OQ));
+}
+
+/**
+ * @brief Overload: vector by vector less than or equal.
+ */
+ASTCENC_SIMD_INLINE vmask8 operator<=(vfloat8 a, vfloat8 b)
+{
+ return vmask8(_mm256_cmp_ps(a.m, b.m, _CMP_LE_OQ));
+}
+
+/**
+ * @brief Overload: vector by vector greater than or equal.
+ */
+ASTCENC_SIMD_INLINE vmask8 operator>=(vfloat8 a, vfloat8 b)
+{
+ return vmask8(_mm256_cmp_ps(a.m, b.m, _CMP_GE_OQ));
+}
+
+/**
+ * @brief Return the min vector of two vectors.
+ *
+ * If either lane value is NaN, @c b will be returned for that lane.
+ */
+ASTCENC_SIMD_INLINE vfloat8 min(vfloat8 a, vfloat8 b)
+{
+ return vfloat8(_mm256_min_ps(a.m, b.m));
+}
+
+/**
+ * @brief Return the min vector of a vector and a scalar.
+ *
+ * If either lane value is NaN, @c b will be returned for that lane.
+ */
+ASTCENC_SIMD_INLINE vfloat8 min(vfloat8 a, float b)
+{
+ return min(a, vfloat8(b));
+}
+
+/**
+ * @brief Return the max vector of two vectors.
+ *
+ * If either lane value is NaN, @c b will be returned for that lane.
+ */
+ASTCENC_SIMD_INLINE vfloat8 max(vfloat8 a, vfloat8 b)
+{
+ return vfloat8(_mm256_max_ps(a.m, b.m));
+}
+
+/**
+ * @brief Return the max vector of a vector and a scalar.
+ *
+ * If either lane value is NaN, @c b will be returned for that lane.
+ */
+ASTCENC_SIMD_INLINE vfloat8 max(vfloat8 a, float b)
+{
+ return max(a, vfloat8(b));
+}
+
+/**
+ * @brief Return the clamped value between min and max.
+ *
+ * It is assumed that neither @c min nor @c max are NaN values. If @c a is NaN
+ * then @c min will be returned for that lane.
+ */
+ASTCENC_SIMD_INLINE vfloat8 clamp(float min, float max, vfloat8 a)
+{
+ // Do not reorder - second operand will return if either is NaN
+ a.m = _mm256_max_ps(a.m, _mm256_set1_ps(min));
+ a.m = _mm256_min_ps(a.m, _mm256_set1_ps(max));
+ return a;
+}
+
+/**
+ * @brief Return a clamped value between 0.0f and max.
+ *
+ * It is assumed that @c max is not a NaN value. If @c a is NaN then zero will
+ * be returned for that lane.
+ */
+ASTCENC_SIMD_INLINE vfloat8 clampz(float max, vfloat8 a)
+{
+ a.m = _mm256_max_ps(a.m, _mm256_setzero_ps());
+ a.m = _mm256_min_ps(a.m, _mm256_set1_ps(max));
+ return a;
+}
+
+/**
+ * @brief Return a clamped value between 0.0f and 1.0f.
+ *
+ * If @c a is NaN then zero will be returned for that lane.
+ */
+ASTCENC_SIMD_INLINE vfloat8 clampzo(vfloat8 a)
+{
+ a.m = _mm256_max_ps(a.m, _mm256_setzero_ps());
+ a.m = _mm256_min_ps(a.m, _mm256_set1_ps(1.0f));
+ return a;
+}
+
+/**
+ * @brief Return the absolute value of the float vector.
+ */
+ASTCENC_SIMD_INLINE vfloat8 abs(vfloat8 a)
+{
+ __m256 msk = _mm256_castsi256_ps(_mm256_set1_epi32(0x7fffffff));
+ return vfloat8(_mm256_and_ps(a.m, msk));
+}
+
+/**
+ * @brief Return a float rounded to the nearest integer value.
+ */
+ASTCENC_SIMD_INLINE vfloat8 round(vfloat8 a)
+{
+ constexpr int flags = _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC;
+ return vfloat8(_mm256_round_ps(a.m, flags));
+}
+
+/**
+ * @brief Return the horizontal minimum of a vector.
+ */
+ASTCENC_SIMD_INLINE vfloat8 hmin(vfloat8 a)
+{
+ __m128 vlow = _mm256_castps256_ps128(a.m);
+ __m128 vhigh = _mm256_extractf128_ps(a.m, 1);
+ vlow = _mm_min_ps(vlow, vhigh);
+
+ // First do an horizontal reduction.
+ __m128 shuf = _mm_shuffle_ps(vlow, vlow, _MM_SHUFFLE(2, 3, 0, 1));
+ __m128 mins = _mm_min_ps(vlow, shuf);
+ shuf = _mm_movehl_ps(shuf, mins);
+ mins = _mm_min_ss(mins, shuf);
+
+ // This is the most logical implementation, but the convenience intrinsic
+ // is missing on older compilers (supported in g++ 9 and clang++ 9).
+ //__m256i r = _mm256_set_m128(m, m)
+ __m256 r = _mm256_insertf128_ps(_mm256_castps128_ps256(mins), mins, 1);
+
+ return vfloat8(_mm256_permute_ps(r, 0));
+}
+
+/**
+ * @brief Return the horizontal minimum of a vector.
+ */
+ASTCENC_SIMD_INLINE float hmin_s(vfloat8 a)
+{
+ return hmin(a).lane<0>();
+}
+
+/**
+ * @brief Return the horizontal maximum of a vector.
+ */
+ASTCENC_SIMD_INLINE vfloat8 hmax(vfloat8 a)
+{
+ __m128 vlow = _mm256_castps256_ps128(a.m);
+ __m128 vhigh = _mm256_extractf128_ps(a.m, 1);
+ vhigh = _mm_max_ps(vlow, vhigh);
+
+ // First do an horizontal reduction.
+ __m128 shuf = _mm_shuffle_ps(vhigh, vhigh, _MM_SHUFFLE(2, 3, 0, 1));
+ __m128 maxs = _mm_max_ps(vhigh, shuf);
+ shuf = _mm_movehl_ps(shuf,maxs);
+ maxs = _mm_max_ss(maxs, shuf);
+
+ // This is the most logical implementation, but the convenience intrinsic
+ // is missing on older compilers (supported in g++ 9 and clang++ 9).
+ //__m256i r = _mm256_set_m128(m, m)
+ __m256 r = _mm256_insertf128_ps(_mm256_castps128_ps256(maxs), maxs, 1);
+ return vfloat8(_mm256_permute_ps(r, 0));
+}
+
+/**
+ * @brief Return the horizontal maximum of a vector.
+ */
+ASTCENC_SIMD_INLINE float hmax_s(vfloat8 a)
+{
+ return hmax(a).lane<0>();
+}
+
+/**
+ * @brief Return the horizontal sum of a vector.
+ */
+ASTCENC_SIMD_INLINE float hadd_s(vfloat8 a)
+{
+ // Two sequential 4-wide adds gives invariance with 4-wide code
+ vfloat4 lo(_mm256_extractf128_ps(a.m, 0));
+ vfloat4 hi(_mm256_extractf128_ps(a.m, 1));
+ return hadd_s(lo) + hadd_s(hi);
+}
+
+/**
+ * @brief Return lanes from @c b if @c cond is set, else @c a.
+ */
+ASTCENC_SIMD_INLINE vfloat8 select(vfloat8 a, vfloat8 b, vmask8 cond)
+{
+ return vfloat8(_mm256_blendv_ps(a.m, b.m, cond.m));
+}
+
+/**
+ * @brief Return lanes from @c b if MSB of @c cond is set, else @c a.
+ */
+ASTCENC_SIMD_INLINE vfloat8 select_msb(vfloat8 a, vfloat8 b, vmask8 cond)
+{
+ return vfloat8(_mm256_blendv_ps(a.m, b.m, cond.m));
+}
+
+/**
+ * @brief Accumulate lane-wise sums for a vector, folded 4-wide.
+ *
+ * This is invariant with 4-wide implementations.
+ */
+ASTCENC_SIMD_INLINE void haccumulate(vfloat4& accum, vfloat8 a)
+{
+ vfloat4 lo(_mm256_extractf128_ps(a.m, 0));
+ haccumulate(accum, lo);
+
+ vfloat4 hi(_mm256_extractf128_ps(a.m, 1));
+ haccumulate(accum, hi);
+}
+
+/**
+ * @brief Accumulate lane-wise sums for a vector.
+ *
+ * This is NOT invariant with 4-wide implementations.
+ */
+ASTCENC_SIMD_INLINE void haccumulate(vfloat8& accum, vfloat8 a)
+{
+ accum += a;
+}
+
+/**
+ * @brief Accumulate masked lane-wise sums for a vector, folded 4-wide.
+ *
+ * This is invariant with 4-wide implementations.
+ */
+ASTCENC_SIMD_INLINE void haccumulate(vfloat4& accum, vfloat8 a, vmask8 m)
+{
+ a = select(vfloat8::zero(), a, m);
+ haccumulate(accum, a);
+}
+
+/**
+ * @brief Accumulate masked lane-wise sums for a vector.
+ *
+ * This is NOT invariant with 4-wide implementations.
+ */
+ASTCENC_SIMD_INLINE void haccumulate(vfloat8& accum, vfloat8 a, vmask8 m)
+{
+ a = select(vfloat8::zero(), a, m);
+ haccumulate(accum, a);
+}
+
+/**
+ * @brief Return the sqrt of the lanes in the vector.
+ */
+ASTCENC_SIMD_INLINE vfloat8 sqrt(vfloat8 a)
+{
+ return vfloat8(_mm256_sqrt_ps(a.m));
+}
+
+/**
+ * @brief Load a vector of gathered results from an array;
+ */
+ASTCENC_SIMD_INLINE vfloat8 gatherf(const float* base, vint8 indices)
+{
+ return vfloat8(_mm256_i32gather_ps(base, indices.m, 4));
+}
+
+/**
+ * @brief Store a vector to an unaligned memory address.
+ */
+ASTCENC_SIMD_INLINE void store(vfloat8 a, float* p)
+{
+ _mm256_storeu_ps(p, a.m);
+}
+
+/**
+ * @brief Store a vector to a 32B aligned memory address.
+ */
+ASTCENC_SIMD_INLINE void storea(vfloat8 a, float* p)
+{
+ _mm256_store_ps(p, a.m);
+}
+
+/**
+ * @brief Return a integer value for a float vector, using truncation.
+ */
+ASTCENC_SIMD_INLINE vint8 float_to_int(vfloat8 a)
+{
+ return vint8(_mm256_cvttps_epi32(a.m));
+}
+
+/**
+ * @brief Return a integer value for a float vector, using round-to-nearest.
+ */
+ASTCENC_SIMD_INLINE vint8 float_to_int_rtn(vfloat8 a)
+{
+ a = round(a);
+ return vint8(_mm256_cvttps_epi32(a.m));
+}
+
+
+/**
+ * @brief Return a float value for an integer vector.
+ */
+ASTCENC_SIMD_INLINE vfloat8 int_to_float(vint8 a)
+{
+ return vfloat8(_mm256_cvtepi32_ps(a.m));
+}
+
+/**
+ * @brief Return a float value as an integer bit pattern (i.e. no conversion).
+ *
+ * It is a common trick to convert floats into integer bit patterns, perform
+ * some bit hackery based on knowledge they are IEEE 754 layout, and then
+ * convert them back again. This is the first half of that flip.
+ */
+ASTCENC_SIMD_INLINE vint8 float_as_int(vfloat8 a)
+{
+ return vint8(_mm256_castps_si256(a.m));
+}
+
+/**
+ * @brief Return a integer value as a float bit pattern (i.e. no conversion).
+ *
+ * It is a common trick to convert floats into integer bit patterns, perform
+ * some bit hackery based on knowledge they are IEEE 754 layout, and then
+ * convert them back again. This is the second half of that flip.
+ */
+ASTCENC_SIMD_INLINE vfloat8 int_as_float(vint8 a)
+{
+ return vfloat8(_mm256_castsi256_ps(a.m));
+}
+
+/**
+ * @brief Prepare a vtable lookup table for use with the native SIMD size.
+ */
+ASTCENC_SIMD_INLINE void vtable_prepare(vint4 t0, vint8& t0p)
+{
+ // AVX2 duplicates the table within each 128-bit lane
+ __m128i t0n = t0.m;
+ t0p = vint8(astcenc_mm256_set_m128i(t0n, t0n));
+}
+
+/**
+ * @brief Prepare a vtable lookup table for use with the native SIMD size.
+ */
+ASTCENC_SIMD_INLINE void vtable_prepare(vint4 t0, vint4 t1, vint8& t0p, vint8& t1p)
+{
+ // AVX2 duplicates the table within each 128-bit lane
+ __m128i t0n = t0.m;
+ t0p = vint8(astcenc_mm256_set_m128i(t0n, t0n));
+
+ __m128i t1n = _mm_xor_si128(t0.m, t1.m);
+ t1p = vint8(astcenc_mm256_set_m128i(t1n, t1n));
+}
+
+/**
+ * @brief Prepare a vtable lookup table for use with the native SIMD size.
+ */
+ASTCENC_SIMD_INLINE void vtable_prepare(
+ vint4 t0, vint4 t1, vint4 t2, vint4 t3,
+ vint8& t0p, vint8& t1p, vint8& t2p, vint8& t3p)
+{
+ // AVX2 duplicates the table within each 128-bit lane
+ __m128i t0n = t0.m;
+ t0p = vint8(astcenc_mm256_set_m128i(t0n, t0n));
+
+ __m128i t1n = _mm_xor_si128(t0.m, t1.m);
+ t1p = vint8(astcenc_mm256_set_m128i(t1n, t1n));
+
+ __m128i t2n = _mm_xor_si128(t1.m, t2.m);
+ t2p = vint8(astcenc_mm256_set_m128i(t2n, t2n));
+
+ __m128i t3n = _mm_xor_si128(t2.m, t3.m);
+ t3p = vint8(astcenc_mm256_set_m128i(t3n, t3n));
+}
+
+/**
+ * @brief Perform an 8-bit 16-entry table lookup, with 32-bit indexes.
+ */
+ASTCENC_SIMD_INLINE vint8 vtable_8bt_32bi(vint8 t0, vint8 idx)
+{
+ // Set index byte MSB to 1 for unused bytes so shuffle returns zero
+ __m256i idxx = _mm256_or_si256(idx.m, _mm256_set1_epi32(static_cast<int>(0xFFFFFF00)));
+
+ __m256i result = _mm256_shuffle_epi8(t0.m, idxx);
+ return vint8(result);
+}
+
+/**
+ * @brief Perform an 8-bit 32-entry table lookup, with 32-bit indexes.
+ */
+ASTCENC_SIMD_INLINE vint8 vtable_8bt_32bi(vint8 t0, vint8 t1, vint8 idx)
+{
+ // Set index byte MSB to 1 for unused bytes so shuffle returns zero
+ __m256i idxx = _mm256_or_si256(idx.m, _mm256_set1_epi32(static_cast<int>(0xFFFFFF00)));
+
+ __m256i result = _mm256_shuffle_epi8(t0.m, idxx);
+ idxx = _mm256_sub_epi8(idxx, _mm256_set1_epi8(16));
+
+ __m256i result2 = _mm256_shuffle_epi8(t1.m, idxx);
+ result = _mm256_xor_si256(result, result2);
+ return vint8(result);
+}
+
+/**
+ * @brief Perform an 8-bit 64-entry table lookup, with 32-bit indexes.
+ */
+ASTCENC_SIMD_INLINE vint8 vtable_8bt_32bi(vint8 t0, vint8 t1, vint8 t2, vint8 t3, vint8 idx)
+{
+ // Set index byte MSB to 1 for unused bytes so shuffle returns zero
+ __m256i idxx = _mm256_or_si256(idx.m, _mm256_set1_epi32(static_cast<int>(0xFFFFFF00)));
+
+ __m256i result = _mm256_shuffle_epi8(t0.m, idxx);
+ idxx = _mm256_sub_epi8(idxx, _mm256_set1_epi8(16));
+
+ __m256i result2 = _mm256_shuffle_epi8(t1.m, idxx);
+ result = _mm256_xor_si256(result, result2);
+ idxx = _mm256_sub_epi8(idxx, _mm256_set1_epi8(16));
+
+ result2 = _mm256_shuffle_epi8(t2.m, idxx);
+ result = _mm256_xor_si256(result, result2);
+ idxx = _mm256_sub_epi8(idxx, _mm256_set1_epi8(16));
+
+ result2 = _mm256_shuffle_epi8(t3.m, idxx);
+ result = _mm256_xor_si256(result, result2);
+
+ return vint8(result);
+}
+
+/**
+ * @brief Return a vector of interleaved RGBA data.
+ *
+ * Input vectors have the value stored in the bottom 8 bits of each lane,
+ * with high bits set to zero.
+ *
+ * Output vector stores a single RGBA texel packed in each lane.
+ */
+ASTCENC_SIMD_INLINE vint8 interleave_rgba8(vint8 r, vint8 g, vint8 b, vint8 a)
+{
+ return r + lsl<8>(g) + lsl<16>(b) + lsl<24>(a);
+}
+
+/**
+ * @brief Store a vector, skipping masked lanes.
+ *
+ * All masked lanes must be at the end of vector, after all non-masked lanes.
+ */
+ASTCENC_SIMD_INLINE void store_lanes_masked(int* base, vint8 data, vmask8 mask)
+{
+ _mm256_maskstore_epi32(base, _mm256_castps_si256(mask.m), data.m);
+}
+
+/**
+ * @brief Debug function to print a vector of ints.
+ */
+ASTCENC_SIMD_INLINE void print(vint8 a)
+{
+ alignas(ASTCENC_VECALIGN) int v[8];
+ storea(a, v);
+ printf("v8_i32:\n %8d %8d %8d %8d %8d %8d %8d %8d\n",
+ v[0], v[1], v[2], v[3], v[4], v[5], v[6], v[7]);
+}
+
+/**
+ * @brief Debug function to print a vector of ints.
+ */
+ASTCENC_SIMD_INLINE void printx(vint8 a)
+{
+ alignas(ASTCENC_VECALIGN) int v[8];
+ storea(a, v);
+ printf("v8_i32:\n %08x %08x %08x %08x %08x %08x %08x %08x\n",
+ v[0], v[1], v[2], v[3], v[4], v[5], v[6], v[7]);
+}
+
+/**
+ * @brief Debug function to print a vector of floats.
+ */
+ASTCENC_SIMD_INLINE void print(vfloat8 a)
+{
+ alignas(ASTCENC_VECALIGN) float v[8];
+ storea(a, v);
+ printf("v8_f32:\n %0.4f %0.4f %0.4f %0.4f %0.4f %0.4f %0.4f %0.4f\n",
+ static_cast<double>(v[0]), static_cast<double>(v[1]),
+ static_cast<double>(v[2]), static_cast<double>(v[3]),
+ static_cast<double>(v[4]), static_cast<double>(v[5]),
+ static_cast<double>(v[6]), static_cast<double>(v[7]));
+}
+
+/**
+ * @brief Debug function to print a vector of masks.
+ */
+ASTCENC_SIMD_INLINE void print(vmask8 a)
+{
+ print(select(vint8(0), vint8(1), a));
+}
+
+#endif // #ifndef ASTC_VECMATHLIB_AVX2_8_H_INCLUDED
diff --git a/thirdparty/astcenc/astcenc_vecmathlib_common_4.h b/thirdparty/astcenc/astcenc_vecmathlib_common_4.h
new file mode 100644
index 0000000000..86ee4fd3e1
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_vecmathlib_common_4.h
@@ -0,0 +1,423 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2020-2021 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief Generic 4x32-bit vector functions.
+ *
+ * This module implements generic 4-wide vector functions that are valid for
+ * all instruction sets, typically implemented using lower level 4-wide
+ * operations that are ISA-specific.
+ */
+
+#ifndef ASTC_VECMATHLIB_COMMON_4_H_INCLUDED
+#define ASTC_VECMATHLIB_COMMON_4_H_INCLUDED
+
+#ifndef ASTCENC_SIMD_INLINE
+ #error "Include astcenc_vecmathlib.h, do not include directly"
+#endif
+
+#include <cstdio>
+
+// ============================================================================
+// vmask4 operators and functions
+// ============================================================================
+
+/**
+ * @brief True if any lanes are enabled, false otherwise.
+ */
+ASTCENC_SIMD_INLINE bool any(vmask4 a)
+{
+ return mask(a) != 0;
+}
+
+/**
+ * @brief True if all lanes are enabled, false otherwise.
+ */
+ASTCENC_SIMD_INLINE bool all(vmask4 a)
+{
+ return mask(a) == 0xF;
+}
+
+// ============================================================================
+// vint4 operators and functions
+// ============================================================================
+
+/**
+ * @brief Overload: vector by scalar addition.
+ */
+ASTCENC_SIMD_INLINE vint4 operator+(vint4 a, int b)
+{
+ return a + vint4(b);
+}
+
+/**
+ * @brief Overload: vector by vector incremental addition.
+ */
+ASTCENC_SIMD_INLINE vint4& operator+=(vint4& a, const vint4& b)
+{
+ a = a + b;
+ return a;
+}
+
+/**
+ * @brief Overload: vector by scalar subtraction.
+ */
+ASTCENC_SIMD_INLINE vint4 operator-(vint4 a, int b)
+{
+ return a - vint4(b);
+}
+
+/**
+ * @brief Overload: vector by scalar multiplication.
+ */
+ASTCENC_SIMD_INLINE vint4 operator*(vint4 a, int b)
+{
+ return a * vint4(b);
+}
+
+/**
+ * @brief Overload: vector by scalar bitwise or.
+ */
+ASTCENC_SIMD_INLINE vint4 operator|(vint4 a, int b)
+{
+ return a | vint4(b);
+}
+
+/**
+ * @brief Overload: vector by scalar bitwise and.
+ */
+ASTCENC_SIMD_INLINE vint4 operator&(vint4 a, int b)
+{
+ return a & vint4(b);
+}
+
+/**
+ * @brief Overload: vector by scalar bitwise xor.
+ */
+ASTCENC_SIMD_INLINE vint4 operator^(vint4 a, int b)
+{
+ return a ^ vint4(b);
+}
+
+/**
+ * @brief Return the clamped value between min and max.
+ */
+ASTCENC_SIMD_INLINE vint4 clamp(int minv, int maxv, vint4 a)
+{
+ return min(max(a, vint4(minv)), vint4(maxv));
+}
+
+/**
+ * @brief Return the horizontal sum of RGB vector lanes as a scalar.
+ */
+ASTCENC_SIMD_INLINE int hadd_rgb_s(vint4 a)
+{
+ return a.lane<0>() + a.lane<1>() + a.lane<2>();
+}
+
+// ============================================================================
+// vfloat4 operators and functions
+// ============================================================================
+
+/**
+ * @brief Overload: vector by vector incremental addition.
+ */
+ASTCENC_SIMD_INLINE vfloat4& operator+=(vfloat4& a, const vfloat4& b)
+{
+ a = a + b;
+ return a;
+}
+
+/**
+ * @brief Overload: vector by scalar addition.
+ */
+ASTCENC_SIMD_INLINE vfloat4 operator+(vfloat4 a, float b)
+{
+ return a + vfloat4(b);
+}
+
+/**
+ * @brief Overload: vector by scalar subtraction.
+ */
+ASTCENC_SIMD_INLINE vfloat4 operator-(vfloat4 a, float b)
+{
+ return a - vfloat4(b);
+}
+
+/**
+ * @brief Overload: vector by scalar multiplication.
+ */
+ASTCENC_SIMD_INLINE vfloat4 operator*(vfloat4 a, float b)
+{
+ return a * vfloat4(b);
+}
+
+/**
+ * @brief Overload: scalar by vector multiplication.
+ */
+ASTCENC_SIMD_INLINE vfloat4 operator*(float a, vfloat4 b)
+{
+ return vfloat4(a) * b;
+}
+
+/**
+ * @brief Overload: vector by scalar division.
+ */
+ASTCENC_SIMD_INLINE vfloat4 operator/(vfloat4 a, float b)
+{
+ return a / vfloat4(b);
+}
+
+/**
+ * @brief Overload: scalar by vector division.
+ */
+ASTCENC_SIMD_INLINE vfloat4 operator/(float a, vfloat4 b)
+{
+ return vfloat4(a) / b;
+}
+
+/**
+ * @brief Return the min vector of a vector and a scalar.
+ *
+ * If either lane value is NaN, @c b will be returned for that lane.
+ */
+ASTCENC_SIMD_INLINE vfloat4 min(vfloat4 a, float b)
+{
+ return min(a, vfloat4(b));
+}
+
+/**
+ * @brief Return the max vector of a vector and a scalar.
+ *
+ * If either lane value is NaN, @c b will be returned for that lane.
+ */
+ASTCENC_SIMD_INLINE vfloat4 max(vfloat4 a, float b)
+{
+ return max(a, vfloat4(b));
+}
+
+/**
+ * @brief Return the clamped value between min and max.
+ *
+ * It is assumed that neither @c min nor @c max are NaN values. If @c a is NaN
+ * then @c min will be returned for that lane.
+ */
+ASTCENC_SIMD_INLINE vfloat4 clamp(float minv, float maxv, vfloat4 a)
+{
+ // Do not reorder - second operand will return if either is NaN
+ return min(max(a, minv), maxv);
+}
+
+/**
+ * @brief Return the clamped value between 0.0f and max.
+ *
+ * It is assumed that @c max is not a NaN value. If @c a is NaN then zero will
+ * be returned for that lane.
+ */
+ASTCENC_SIMD_INLINE vfloat4 clampz(float maxv, vfloat4 a)
+{
+ // Do not reorder - second operand will return if either is NaN
+ return min(max(a, vfloat4::zero()), maxv);
+}
+
+/**
+ * @brief Return the clamped value between 0.0f and 1.0f.
+ *
+ * If @c a is NaN then zero will be returned for that lane.
+ */
+ASTCENC_SIMD_INLINE vfloat4 clampzo(vfloat4 a)
+{
+ // Do not reorder - second operand will return if either is NaN
+ return min(max(a, vfloat4::zero()), 1.0f);
+}
+
+/**
+ * @brief Return the horizontal minimum of a vector.
+ */
+ASTCENC_SIMD_INLINE float hmin_s(vfloat4 a)
+{
+ return hmin(a).lane<0>();
+}
+
+/**
+ * @brief Return the horizontal min of RGB vector lanes as a scalar.
+ */
+ASTCENC_SIMD_INLINE float hmin_rgb_s(vfloat4 a)
+{
+ a.set_lane<3>(a.lane<0>());
+ return hmin_s(a);
+}
+
+/**
+ * @brief Return the horizontal maximum of a vector.
+ */
+ASTCENC_SIMD_INLINE float hmax_s(vfloat4 a)
+{
+ return hmax(a).lane<0>();
+}
+
+/**
+ * @brief Accumulate lane-wise sums for a vector.
+ */
+ASTCENC_SIMD_INLINE void haccumulate(vfloat4& accum, vfloat4 a)
+{
+ accum = accum + a;
+}
+
+/**
+ * @brief Accumulate lane-wise sums for a masked vector.
+ */
+ASTCENC_SIMD_INLINE void haccumulate(vfloat4& accum, vfloat4 a, vmask4 m)
+{
+ a = select(vfloat4::zero(), a, m);
+ haccumulate(accum, a);
+}
+
+/**
+ * @brief Return the horizontal sum of RGB vector lanes as a scalar.
+ */
+ASTCENC_SIMD_INLINE float hadd_rgb_s(vfloat4 a)
+{
+ return a.lane<0>() + a.lane<1>() + a.lane<2>();
+}
+
+#if !defined(ASTCENC_USE_NATIVE_DOT_PRODUCT)
+
+/**
+ * @brief Return the dot product for the full 4 lanes, returning scalar.
+ */
+ASTCENC_SIMD_INLINE float dot_s(vfloat4 a, vfloat4 b)
+{
+ vfloat4 m = a * b;
+ return hadd_s(m);
+}
+
+/**
+ * @brief Return the dot product for the full 4 lanes, returning vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 dot(vfloat4 a, vfloat4 b)
+{
+ vfloat4 m = a * b;
+ return vfloat4(hadd_s(m));
+}
+
+/**
+ * @brief Return the dot product for the bottom 3 lanes, returning scalar.
+ */
+ASTCENC_SIMD_INLINE float dot3_s(vfloat4 a, vfloat4 b)
+{
+ vfloat4 m = a * b;
+ return hadd_rgb_s(m);
+}
+
+/**
+ * @brief Return the dot product for the bottom 3 lanes, returning vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 dot3(vfloat4 a, vfloat4 b)
+{
+ vfloat4 m = a * b;
+ float d3 = hadd_rgb_s(m);
+ return vfloat4(d3, d3, d3, 0.0f);
+}
+
+#endif
+
+#if !defined(ASTCENC_USE_NATIVE_POPCOUNT)
+
+/**
+ * @brief Population bit count.
+ *
+ * @param v The value to population count.
+ *
+ * @return The number of 1 bits.
+ */
+static inline int popcount(uint64_t v)
+{
+ uint64_t mask1 = 0x5555555555555555ULL;
+ uint64_t mask2 = 0x3333333333333333ULL;
+ uint64_t mask3 = 0x0F0F0F0F0F0F0F0FULL;
+ v -= (v >> 1) & mask1;
+ v = (v & mask2) + ((v >> 2) & mask2);
+ v += v >> 4;
+ v &= mask3;
+ v *= 0x0101010101010101ULL;
+ v >>= 56;
+ return static_cast<int>(v);
+}
+
+#endif
+
+/**
+ * @brief Apply signed bit transfer.
+ *
+ * @param input0 The first encoded endpoint.
+ * @param input1 The second encoded endpoint.
+ */
+static ASTCENC_SIMD_INLINE void bit_transfer_signed(
+ vint4& input0,
+ vint4& input1
+) {
+ input1 = lsr<1>(input1) | (input0 & 0x80);
+ input0 = lsr<1>(input0) & 0x3F;
+
+ vmask4 mask = (input0 & 0x20) != vint4::zero();
+ input0 = select(input0, input0 - 0x40, mask);
+}
+
+/**
+ * @brief Debug function to print a vector of ints.
+ */
+ASTCENC_SIMD_INLINE void print(vint4 a)
+{
+ alignas(16) int v[4];
+ storea(a, v);
+ printf("v4_i32:\n %8d %8d %8d %8d\n",
+ v[0], v[1], v[2], v[3]);
+}
+
+/**
+ * @brief Debug function to print a vector of ints.
+ */
+ASTCENC_SIMD_INLINE void printx(vint4 a)
+{
+ alignas(16) int v[4];
+ storea(a, v);
+ printf("v4_i32:\n %08x %08x %08x %08x\n",
+ v[0], v[1], v[2], v[3]);
+}
+
+/**
+ * @brief Debug function to print a vector of floats.
+ */
+ASTCENC_SIMD_INLINE void print(vfloat4 a)
+{
+ alignas(16) float v[4];
+ storea(a, v);
+ printf("v4_f32:\n %0.4f %0.4f %0.4f %0.4f\n",
+ static_cast<double>(v[0]), static_cast<double>(v[1]),
+ static_cast<double>(v[2]), static_cast<double>(v[3]));
+}
+
+/**
+ * @brief Debug function to print a vector of masks.
+ */
+ASTCENC_SIMD_INLINE void print(vmask4 a)
+{
+ print(select(vint4(0), vint4(1), a));
+}
+
+#endif // #ifndef ASTC_VECMATHLIB_COMMON_4_H_INCLUDED
diff --git a/thirdparty/astcenc/astcenc_vecmathlib_neon_4.h b/thirdparty/astcenc/astcenc_vecmathlib_neon_4.h
new file mode 100644
index 0000000000..e742eae6cb
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_vecmathlib_neon_4.h
@@ -0,0 +1,1072 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2019-2022 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief 4x32-bit vectors, implemented using Armv8-A NEON.
+ *
+ * This module implements 4-wide 32-bit float, int, and mask vectors for
+ * Armv8-A NEON.
+ *
+ * There is a baseline level of functionality provided by all vector widths and
+ * implementations. This is implemented using identical function signatures,
+ * modulo data type, so we can use them as substitutable implementations in VLA
+ * code.
+ *
+ * The 4-wide vectors are also used as a fixed-width type, and significantly
+ * extend the functionality above that available to VLA code.
+ */
+
+#ifndef ASTC_VECMATHLIB_NEON_4_H_INCLUDED
+#define ASTC_VECMATHLIB_NEON_4_H_INCLUDED
+
+#ifndef ASTCENC_SIMD_INLINE
+ #error "Include astcenc_vecmathlib.h, do not include directly"
+#endif
+
+#include <cstdio>
+
+// ============================================================================
+// vfloat4 data type
+// ============================================================================
+
+/**
+ * @brief Data type for 4-wide floats.
+ */
+struct vfloat4
+{
+ /**
+ * @brief Construct from zero-initialized value.
+ */
+ ASTCENC_SIMD_INLINE vfloat4() = default;
+
+ /**
+ * @brief Construct from 4 values loaded from an unaligned address.
+ *
+ * Consider using loada() which is better with vectors if data is aligned
+ * to vector length.
+ */
+ ASTCENC_SIMD_INLINE explicit vfloat4(const float *p)
+ {
+ m = vld1q_f32(p);
+ }
+
+ /**
+ * @brief Construct from 1 scalar value replicated across all lanes.
+ *
+ * Consider using zero() for constexpr zeros.
+ */
+ ASTCENC_SIMD_INLINE explicit vfloat4(float a)
+ {
+ m = vdupq_n_f32(a);
+ }
+
+ /**
+ * @brief Construct from 4 scalar values.
+ *
+ * The value of @c a is stored to lane 0 (LSB) in the SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vfloat4(float a, float b, float c, float d)
+ {
+ float v[4] { a, b, c, d };
+ m = vld1q_f32(v);
+ }
+
+ /**
+ * @brief Construct from an existing SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vfloat4(float32x4_t a)
+ {
+ m = a;
+ }
+
+ /**
+ * @brief Get the scalar value of a single lane.
+ */
+ template <int l> ASTCENC_SIMD_INLINE float lane() const
+ {
+ return vgetq_lane_f32(m, l);
+ }
+
+ /**
+ * @brief Set the scalar value of a single lane.
+ */
+ template <int l> ASTCENC_SIMD_INLINE void set_lane(float a)
+ {
+ m = vsetq_lane_f32(a, m, l);
+ }
+
+ /**
+ * @brief Factory that returns a vector of zeros.
+ */
+ static ASTCENC_SIMD_INLINE vfloat4 zero()
+ {
+ return vfloat4(vdupq_n_f32(0.0f));
+ }
+
+ /**
+ * @brief Factory that returns a replicated scalar loaded from memory.
+ */
+ static ASTCENC_SIMD_INLINE vfloat4 load1(const float* p)
+ {
+ return vfloat4(vld1q_dup_f32(p));
+ }
+
+ /**
+ * @brief Factory that returns a vector loaded from 16B aligned memory.
+ */
+ static ASTCENC_SIMD_INLINE vfloat4 loada(const float* p)
+ {
+ return vfloat4(vld1q_f32(p));
+ }
+
+ /**
+ * @brief Factory that returns a vector containing the lane IDs.
+ */
+ static ASTCENC_SIMD_INLINE vfloat4 lane_id()
+ {
+ alignas(16) float data[4] { 0.0f, 1.0f, 2.0f, 3.0f };
+ return vfloat4(vld1q_f32(data));
+ }
+
+ /**
+ * @brief Return a swizzled float 2.
+ */
+ template <int l0, int l1> ASTCENC_SIMD_INLINE vfloat4 swz() const
+ {
+ return vfloat4(lane<l0>(), lane<l1>(), 0.0f, 0.0f);
+ }
+
+ /**
+ * @brief Return a swizzled float 3.
+ */
+ template <int l0, int l1, int l2> ASTCENC_SIMD_INLINE vfloat4 swz() const
+ {
+ return vfloat4(lane<l0>(), lane<l1>(), lane<l2>(), 0.0f);
+ }
+
+ /**
+ * @brief Return a swizzled float 4.
+ */
+ template <int l0, int l1, int l2, int l3> ASTCENC_SIMD_INLINE vfloat4 swz() const
+ {
+ return vfloat4(lane<l0>(), lane<l1>(), lane<l2>(), lane<l3>());
+ }
+
+ /**
+ * @brief The vector ...
+ */
+ float32x4_t m;
+};
+
+// ============================================================================
+// vint4 data type
+// ============================================================================
+
+/**
+ * @brief Data type for 4-wide ints.
+ */
+struct vint4
+{
+ /**
+ * @brief Construct from zero-initialized value.
+ */
+ ASTCENC_SIMD_INLINE vint4() = default;
+
+ /**
+ * @brief Construct from 4 values loaded from an unaligned address.
+ *
+ * Consider using loada() which is better with vectors if data is aligned
+ * to vector length.
+ */
+ ASTCENC_SIMD_INLINE explicit vint4(const int *p)
+ {
+ m = vld1q_s32(p);
+ }
+
+ /**
+ * @brief Construct from 4 uint8_t loaded from an unaligned address.
+ */
+ ASTCENC_SIMD_INLINE explicit vint4(const uint8_t *p)
+ {
+ // Cast is safe - NEON loads are allowed to be unaligned
+ uint32x2_t t8 = vld1_dup_u32(reinterpret_cast<const uint32_t*>(p));
+ uint16x4_t t16 = vget_low_u16(vmovl_u8(vreinterpret_u8_u32(t8)));
+ m = vreinterpretq_s32_u32(vmovl_u16(t16));
+ }
+
+ /**
+ * @brief Construct from 1 scalar value replicated across all lanes.
+ *
+ * Consider using vfloat4::zero() for constexpr zeros.
+ */
+ ASTCENC_SIMD_INLINE explicit vint4(int a)
+ {
+ m = vdupq_n_s32(a);
+ }
+
+ /**
+ * @brief Construct from 4 scalar values.
+ *
+ * The value of @c a is stored to lane 0 (LSB) in the SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vint4(int a, int b, int c, int d)
+ {
+ int v[4] { a, b, c, d };
+ m = vld1q_s32(v);
+ }
+
+ /**
+ * @brief Construct from an existing SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vint4(int32x4_t a)
+ {
+ m = a;
+ }
+
+ /**
+ * @brief Get the scalar from a single lane.
+ */
+ template <int l> ASTCENC_SIMD_INLINE int lane() const
+ {
+ return vgetq_lane_s32(m, l);
+ }
+
+ /**
+ * @brief Set the scalar value of a single lane.
+ */
+ template <int l> ASTCENC_SIMD_INLINE void set_lane(int a)
+ {
+ m = vsetq_lane_s32(a, m, l);
+ }
+
+ /**
+ * @brief Factory that returns a vector of zeros.
+ */
+ static ASTCENC_SIMD_INLINE vint4 zero()
+ {
+ return vint4(0);
+ }
+
+ /**
+ * @brief Factory that returns a replicated scalar loaded from memory.
+ */
+ static ASTCENC_SIMD_INLINE vint4 load1(const int* p)
+ {
+ return vint4(*p);
+ }
+
+ /**
+ * @brief Factory that returns a vector loaded from 16B aligned memory.
+ */
+ static ASTCENC_SIMD_INLINE vint4 loada(const int* p)
+ {
+ return vint4(p);
+ }
+
+ /**
+ * @brief Factory that returns a vector containing the lane IDs.
+ */
+ static ASTCENC_SIMD_INLINE vint4 lane_id()
+ {
+ alignas(16) static const int data[4] { 0, 1, 2, 3 };
+ return vint4(vld1q_s32(data));
+ }
+
+ /**
+ * @brief The vector ...
+ */
+ int32x4_t m;
+};
+
+// ============================================================================
+// vmask4 data type
+// ============================================================================
+
+/**
+ * @brief Data type for 4-wide control plane masks.
+ */
+struct vmask4
+{
+ /**
+ * @brief Construct from an existing SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vmask4(uint32x4_t a)
+ {
+ m = a;
+ }
+
+#if !defined(_MSC_VER)
+ /**
+ * @brief Construct from an existing SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vmask4(int32x4_t a)
+ {
+ m = vreinterpretq_u32_s32(a);
+ }
+#endif
+
+ /**
+ * @brief Construct from 1 scalar value.
+ */
+ ASTCENC_SIMD_INLINE explicit vmask4(bool a)
+ {
+ m = vreinterpretq_u32_s32(vdupq_n_s32(a == true ? -1 : 0));
+ }
+
+ /**
+ * @brief Construct from 4 scalar values.
+ *
+ * The value of @c a is stored to lane 0 (LSB) in the SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vmask4(bool a, bool b, bool c, bool d)
+ {
+ int v[4] {
+ a == true ? -1 : 0,
+ b == true ? -1 : 0,
+ c == true ? -1 : 0,
+ d == true ? -1 : 0
+ };
+
+ int32x4_t ms = vld1q_s32(v);
+ m = vreinterpretq_u32_s32(ms);
+ }
+
+ /**
+ * @brief Get the scalar from a single lane.
+ */
+ template <int32_t l> ASTCENC_SIMD_INLINE uint32_t lane() const
+ {
+ return vgetq_lane_u32(m, l);
+ }
+
+ /**
+ * @brief The vector ...
+ */
+ uint32x4_t m;
+};
+
+// ============================================================================
+// vmask4 operators and functions
+// ============================================================================
+
+/**
+ * @brief Overload: mask union (or).
+ */
+ASTCENC_SIMD_INLINE vmask4 operator|(vmask4 a, vmask4 b)
+{
+ return vmask4(vorrq_u32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: mask intersect (and).
+ */
+ASTCENC_SIMD_INLINE vmask4 operator&(vmask4 a, vmask4 b)
+{
+ return vmask4(vandq_u32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: mask difference (xor).
+ */
+ASTCENC_SIMD_INLINE vmask4 operator^(vmask4 a, vmask4 b)
+{
+ return vmask4(veorq_u32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: mask invert (not).
+ */
+ASTCENC_SIMD_INLINE vmask4 operator~(vmask4 a)
+{
+ return vmask4(vmvnq_u32(a.m));
+}
+
+/**
+ * @brief Return a 4-bit mask code indicating mask status.
+ *
+ * bit0 = lane 0
+ */
+ASTCENC_SIMD_INLINE unsigned int mask(vmask4 a)
+{
+ static const int shifta[4] { 0, 1, 2, 3 };
+ static const int32x4_t shift = vld1q_s32(shifta);
+
+ uint32x4_t tmp = vshrq_n_u32(a.m, 31);
+ return vaddvq_u32(vshlq_u32(tmp, shift));
+}
+
+// ============================================================================
+// vint4 operators and functions
+// ============================================================================
+
+/**
+ * @brief Overload: vector by vector addition.
+ */
+ASTCENC_SIMD_INLINE vint4 operator+(vint4 a, vint4 b)
+{
+ return vint4(vaddq_s32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector subtraction.
+ */
+ASTCENC_SIMD_INLINE vint4 operator-(vint4 a, vint4 b)
+{
+ return vint4(vsubq_s32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector multiplication.
+ */
+ASTCENC_SIMD_INLINE vint4 operator*(vint4 a, vint4 b)
+{
+ return vint4(vmulq_s32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector bit invert.
+ */
+ASTCENC_SIMD_INLINE vint4 operator~(vint4 a)
+{
+ return vint4(vmvnq_s32(a.m));
+}
+
+/**
+ * @brief Overload: vector by vector bitwise or.
+ */
+ASTCENC_SIMD_INLINE vint4 operator|(vint4 a, vint4 b)
+{
+ return vint4(vorrq_s32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector bitwise and.
+ */
+ASTCENC_SIMD_INLINE vint4 operator&(vint4 a, vint4 b)
+{
+ return vint4(vandq_s32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector bitwise xor.
+ */
+ASTCENC_SIMD_INLINE vint4 operator^(vint4 a, vint4 b)
+{
+ return vint4(veorq_s32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector equality.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator==(vint4 a, vint4 b)
+{
+ return vmask4(vceqq_s32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector inequality.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator!=(vint4 a, vint4 b)
+{
+ return ~vmask4(vceqq_s32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector less than.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator<(vint4 a, vint4 b)
+{
+ return vmask4(vcltq_s32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector greater than.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator>(vint4 a, vint4 b)
+{
+ return vmask4(vcgtq_s32(a.m, b.m));
+}
+
+/**
+ * @brief Logical shift left.
+ */
+template <int s> ASTCENC_SIMD_INLINE vint4 lsl(vint4 a)
+{
+ return vint4(vshlq_s32(a.m, vdupq_n_s32(s)));
+}
+
+/**
+ * @brief Logical shift right.
+ */
+template <int s> ASTCENC_SIMD_INLINE vint4 lsr(vint4 a)
+{
+ uint32x4_t ua = vreinterpretq_u32_s32(a.m);
+ ua = vshlq_u32(ua, vdupq_n_s32(-s));
+ return vint4(vreinterpretq_s32_u32(ua));
+}
+
+/**
+ * @brief Arithmetic shift right.
+ */
+template <int s> ASTCENC_SIMD_INLINE vint4 asr(vint4 a)
+{
+ return vint4(vshlq_s32(a.m, vdupq_n_s32(-s)));
+}
+
+/**
+ * @brief Return the min vector of two vectors.
+ */
+ASTCENC_SIMD_INLINE vint4 min(vint4 a, vint4 b)
+{
+ return vint4(vminq_s32(a.m, b.m));
+}
+
+/**
+ * @brief Return the max vector of two vectors.
+ */
+ASTCENC_SIMD_INLINE vint4 max(vint4 a, vint4 b)
+{
+ return vint4(vmaxq_s32(a.m, b.m));
+}
+
+/**
+ * @brief Return the horizontal minimum of a vector.
+ */
+ASTCENC_SIMD_INLINE vint4 hmin(vint4 a)
+{
+ return vint4(vminvq_s32(a.m));
+}
+
+/**
+ * @brief Return the horizontal maximum of a vector.
+ */
+ASTCENC_SIMD_INLINE vint4 hmax(vint4 a)
+{
+ return vint4(vmaxvq_s32(a.m));
+}
+
+/**
+ * @brief Return the horizontal sum of a vector.
+ */
+ASTCENC_SIMD_INLINE int hadd_s(vint4 a)
+{
+ int32x2_t t = vadd_s32(vget_high_s32(a.m), vget_low_s32(a.m));
+ return vget_lane_s32(vpadd_s32(t, t), 0);
+}
+
+/**
+ * @brief Store a vector to a 16B aligned memory address.
+ */
+ASTCENC_SIMD_INLINE void storea(vint4 a, int* p)
+{
+ vst1q_s32(p, a.m);
+}
+
+/**
+ * @brief Store a vector to an unaligned memory address.
+ */
+ASTCENC_SIMD_INLINE void store(vint4 a, int* p)
+{
+ vst1q_s32(p, a.m);
+}
+
+/**
+ * @brief Store lowest N (vector width) bytes into an unaligned address.
+ */
+ASTCENC_SIMD_INLINE void store_nbytes(vint4 a, uint8_t* p)
+{
+ vst1q_lane_s32(reinterpret_cast<int32_t*>(p), a.m, 0);
+}
+
+/**
+ * @brief Gather N (vector width) indices from the array.
+ */
+ASTCENC_SIMD_INLINE vint4 gatheri(const int* base, vint4 indices)
+{
+ alignas(16) int idx[4];
+ storea(indices, idx);
+ alignas(16) int vals[4];
+ vals[0] = base[idx[0]];
+ vals[1] = base[idx[1]];
+ vals[2] = base[idx[2]];
+ vals[3] = base[idx[3]];
+ return vint4(vals);
+}
+
+/**
+ * @brief Pack low 8 bits of N (vector width) lanes into bottom of vector.
+ */
+ASTCENC_SIMD_INLINE vint4 pack_low_bytes(vint4 a)
+{
+ alignas(16) uint8_t shuf[16] {
+ 0, 4, 8, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ uint8x16_t idx = vld1q_u8(shuf);
+ int8x16_t av = vreinterpretq_s8_s32(a.m);
+ return vint4(vreinterpretq_s32_s8(vqtbl1q_s8(av, idx)));
+}
+
+/**
+ * @brief Return lanes from @c b if @c cond is set, else @c a.
+ */
+ASTCENC_SIMD_INLINE vint4 select(vint4 a, vint4 b, vmask4 cond)
+{
+ return vint4(vbslq_s32(cond.m, b.m, a.m));
+}
+
+// ============================================================================
+// vfloat4 operators and functions
+// ============================================================================
+
+/**
+ * @brief Overload: vector by vector addition.
+ */
+ASTCENC_SIMD_INLINE vfloat4 operator+(vfloat4 a, vfloat4 b)
+{
+ return vfloat4(vaddq_f32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector subtraction.
+ */
+ASTCENC_SIMD_INLINE vfloat4 operator-(vfloat4 a, vfloat4 b)
+{
+ return vfloat4(vsubq_f32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector multiplication.
+ */
+ASTCENC_SIMD_INLINE vfloat4 operator*(vfloat4 a, vfloat4 b)
+{
+ return vfloat4(vmulq_f32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector division.
+ */
+ASTCENC_SIMD_INLINE vfloat4 operator/(vfloat4 a, vfloat4 b)
+{
+ return vfloat4(vdivq_f32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector equality.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator==(vfloat4 a, vfloat4 b)
+{
+ return vmask4(vceqq_f32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector inequality.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator!=(vfloat4 a, vfloat4 b)
+{
+ return vmask4(vmvnq_u32(vceqq_f32(a.m, b.m)));
+}
+
+/**
+ * @brief Overload: vector by vector less than.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator<(vfloat4 a, vfloat4 b)
+{
+ return vmask4(vcltq_f32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector greater than.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator>(vfloat4 a, vfloat4 b)
+{
+ return vmask4(vcgtq_f32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector less than or equal.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator<=(vfloat4 a, vfloat4 b)
+{
+ return vmask4(vcleq_f32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector greater than or equal.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator>=(vfloat4 a, vfloat4 b)
+{
+ return vmask4(vcgeq_f32(a.m, b.m));
+}
+
+/**
+ * @brief Return the min vector of two vectors.
+ *
+ * If either lane value is NaN, @c b will be returned for that lane.
+ */
+ASTCENC_SIMD_INLINE vfloat4 min(vfloat4 a, vfloat4 b)
+{
+ // Do not reorder - second operand will return if either is NaN
+ return vfloat4(vminnmq_f32(a.m, b.m));
+}
+
+/**
+ * @brief Return the max vector of two vectors.
+ *
+ * If either lane value is NaN, @c b will be returned for that lane.
+ */
+ASTCENC_SIMD_INLINE vfloat4 max(vfloat4 a, vfloat4 b)
+{
+ // Do not reorder - second operand will return if either is NaN
+ return vfloat4(vmaxnmq_f32(a.m, b.m));
+}
+
+/**
+ * @brief Return the absolute value of the float vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 abs(vfloat4 a)
+{
+ float32x4_t zero = vdupq_n_f32(0.0f);
+ float32x4_t inv = vsubq_f32(zero, a.m);
+ return vfloat4(vmaxq_f32(a.m, inv));
+}
+
+/**
+ * @brief Return a float rounded to the nearest integer value.
+ */
+ASTCENC_SIMD_INLINE vfloat4 round(vfloat4 a)
+{
+ return vfloat4(vrndnq_f32(a.m));
+}
+
+/**
+ * @brief Return the horizontal minimum of a vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 hmin(vfloat4 a)
+{
+ return vfloat4(vminvq_f32(a.m));
+}
+
+/**
+ * @brief Return the horizontal maximum of a vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 hmax(vfloat4 a)
+{
+ return vfloat4(vmaxvq_f32(a.m));
+}
+
+/**
+ * @brief Return the horizontal sum of a vector.
+ */
+ASTCENC_SIMD_INLINE float hadd_s(vfloat4 a)
+{
+ // Perform halving add to ensure invariance; we cannot use vaddqv as this
+ // does (0 + 1 + 2 + 3) which is not invariant with x86 (0 + 2) + (1 + 3).
+ float32x2_t t = vadd_f32(vget_high_f32(a.m), vget_low_f32(a.m));
+ return vget_lane_f32(vpadd_f32(t, t), 0);
+}
+
+/**
+ * @brief Return the sqrt of the lanes in the vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 sqrt(vfloat4 a)
+{
+ return vfloat4(vsqrtq_f32(a.m));
+}
+
+/**
+ * @brief Return lanes from @c b if @c cond is set, else @c a.
+ */
+ASTCENC_SIMD_INLINE vfloat4 select(vfloat4 a, vfloat4 b, vmask4 cond)
+{
+ return vfloat4(vbslq_f32(cond.m, b.m, a.m));
+}
+
+/**
+ * @brief Return lanes from @c b if MSB of @c cond is set, else @c a.
+ */
+ASTCENC_SIMD_INLINE vfloat4 select_msb(vfloat4 a, vfloat4 b, vmask4 cond)
+{
+ static const uint32x4_t msb = vdupq_n_u32(0x80000000u);
+ uint32x4_t mask = vcgeq_u32(cond.m, msb);
+ return vfloat4(vbslq_f32(mask, b.m, a.m));
+}
+
+/**
+ * @brief Load a vector of gathered results from an array;
+ */
+ASTCENC_SIMD_INLINE vfloat4 gatherf(const float* base, vint4 indices)
+{
+ alignas(16) int idx[4];
+ storea(indices, idx);
+ alignas(16) float vals[4];
+ vals[0] = base[idx[0]];
+ vals[1] = base[idx[1]];
+ vals[2] = base[idx[2]];
+ vals[3] = base[idx[3]];
+ return vfloat4(vals);
+}
+
+/**
+ * @brief Store a vector to an unaligned memory address.
+ */
+ASTCENC_SIMD_INLINE void store(vfloat4 a, float* p)
+{
+ vst1q_f32(p, a.m);
+}
+
+/**
+ * @brief Store a vector to a 16B aligned memory address.
+ */
+ASTCENC_SIMD_INLINE void storea(vfloat4 a, float* p)
+{
+ vst1q_f32(p, a.m);
+}
+
+/**
+ * @brief Return a integer value for a float vector, using truncation.
+ */
+ASTCENC_SIMD_INLINE vint4 float_to_int(vfloat4 a)
+{
+ return vint4(vcvtq_s32_f32(a.m));
+}
+
+/**
+ * @brief Return a integer value for a float vector, using round-to-nearest.
+ */
+ASTCENC_SIMD_INLINE vint4 float_to_int_rtn(vfloat4 a)
+{
+ a = round(a);
+ return vint4(vcvtq_s32_f32(a.m));
+}
+
+/**
+ * @brief Return a float value for an integer vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 int_to_float(vint4 a)
+{
+ return vfloat4(vcvtq_f32_s32(a.m));
+}
+
+/**
+ * @brief Return a float16 value for a float vector, using round-to-nearest.
+ */
+ASTCENC_SIMD_INLINE vint4 float_to_float16(vfloat4 a)
+{
+ // Generate float16 value
+ float16x4_t f16 = vcvt_f16_f32(a.m);
+
+ // Convert each 16-bit float pattern to a 32-bit pattern
+ uint16x4_t u16 = vreinterpret_u16_f16(f16);
+ uint32x4_t u32 = vmovl_u16(u16);
+ return vint4(vreinterpretq_s32_u32(u32));
+}
+
+/**
+ * @brief Return a float16 value for a float scalar, using round-to-nearest.
+ */
+static inline uint16_t float_to_float16(float a)
+{
+ vfloat4 av(a);
+ return static_cast<uint16_t>(float_to_float16(av).lane<0>());
+}
+
+/**
+ * @brief Return a float value for a float16 vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 float16_to_float(vint4 a)
+{
+ // Convert each 32-bit float pattern to a 16-bit pattern
+ uint32x4_t u32 = vreinterpretq_u32_s32(a.m);
+ uint16x4_t u16 = vmovn_u32(u32);
+ float16x4_t f16 = vreinterpret_f16_u16(u16);
+
+ // Generate float16 value
+ return vfloat4(vcvt_f32_f16(f16));
+}
+
+/**
+ * @brief Return a float value for a float16 scalar.
+ */
+ASTCENC_SIMD_INLINE float float16_to_float(uint16_t a)
+{
+ vint4 av(a);
+ return float16_to_float(av).lane<0>();
+}
+
+/**
+ * @brief Return a float value as an integer bit pattern (i.e. no conversion).
+ *
+ * It is a common trick to convert floats into integer bit patterns, perform
+ * some bit hackery based on knowledge they are IEEE 754 layout, and then
+ * convert them back again. This is the first half of that flip.
+ */
+ASTCENC_SIMD_INLINE vint4 float_as_int(vfloat4 a)
+{
+ return vint4(vreinterpretq_s32_f32(a.m));
+}
+
+/**
+ * @brief Return a integer value as a float bit pattern (i.e. no conversion).
+ *
+ * It is a common trick to convert floats into integer bit patterns, perform
+ * some bit hackery based on knowledge they are IEEE 754 layout, and then
+ * convert them back again. This is the second half of that flip.
+ */
+ASTCENC_SIMD_INLINE vfloat4 int_as_float(vint4 v)
+{
+ return vfloat4(vreinterpretq_f32_s32(v.m));
+}
+
+/**
+ * @brief Prepare a vtable lookup table for use with the native SIMD size.
+ */
+ASTCENC_SIMD_INLINE void vtable_prepare(vint4 t0, vint4& t0p)
+{
+ t0p = t0;
+}
+
+
+/**
+ * @brief Prepare a vtable lookup table for use with the native SIMD size.
+ */
+ASTCENC_SIMD_INLINE void vtable_prepare(vint4 t0, vint4 t1, vint4& t0p, vint4& t1p)
+{
+ t0p = t0;
+ t1p = t1;
+}
+
+/**
+ * @brief Prepare a vtable lookup table for use with the native SIMD size.
+ */
+ASTCENC_SIMD_INLINE void vtable_prepare(
+ vint4 t0, vint4 t1, vint4 t2, vint4 t3,
+ vint4& t0p, vint4& t1p, vint4& t2p, vint4& t3p)
+{
+ t0p = t0;
+ t1p = t1;
+ t2p = t2;
+ t3p = t3;
+}
+
+/**
+ * @brief Perform an 8-bit 16-entry table lookup, with 32-bit indexes.
+ */
+ASTCENC_SIMD_INLINE vint4 vtable_8bt_32bi(vint4 t0, vint4 idx)
+{
+ int8x16_t table {
+ vreinterpretq_s8_s32(t0.m)
+ };
+
+ // Set index byte above max index for unused bytes so table lookup returns zero
+ int32x4_t idx_masked = vorrq_s32(idx.m, vdupq_n_s32(0xFFFFFF00));
+ uint8x16_t idx_bytes = vreinterpretq_u8_s32(idx_masked);
+
+ return vint4(vreinterpretq_s32_s8(vqtbl1q_s8(table, idx_bytes)));
+}
+
+/**
+ * @brief Perform an 8-bit 32-entry table lookup, with 32-bit indexes.
+ */
+ASTCENC_SIMD_INLINE vint4 vtable_8bt_32bi(vint4 t0, vint4 t1, vint4 idx)
+{
+ int8x16x2_t table {
+ vreinterpretq_s8_s32(t0.m),
+ vreinterpretq_s8_s32(t1.m)
+ };
+
+ // Set index byte above max index for unused bytes so table lookup returns zero
+ int32x4_t idx_masked = vorrq_s32(idx.m, vdupq_n_s32(0xFFFFFF00));
+ uint8x16_t idx_bytes = vreinterpretq_u8_s32(idx_masked);
+
+ return vint4(vreinterpretq_s32_s8(vqtbl2q_s8(table, idx_bytes)));
+}
+
+/**
+ * @brief Perform an 8-bit 64-entry table lookup, with 32-bit indexes.
+ */
+ASTCENC_SIMD_INLINE vint4 vtable_8bt_32bi(vint4 t0, vint4 t1, vint4 t2, vint4 t3, vint4 idx)
+{
+ int8x16x4_t table {
+ vreinterpretq_s8_s32(t0.m),
+ vreinterpretq_s8_s32(t1.m),
+ vreinterpretq_s8_s32(t2.m),
+ vreinterpretq_s8_s32(t3.m)
+ };
+
+ // Set index byte above max index for unused bytes so table lookup returns zero
+ int32x4_t idx_masked = vorrq_s32(idx.m, vdupq_n_s32(0xFFFFFF00));
+ uint8x16_t idx_bytes = vreinterpretq_u8_s32(idx_masked);
+
+ return vint4(vreinterpretq_s32_s8(vqtbl4q_s8(table, idx_bytes)));
+}
+
+/**
+ * @brief Return a vector of interleaved RGBA data.
+ *
+ * Input vectors have the value stored in the bottom 8 bits of each lane,
+ * with high bits set to zero.
+ *
+ * Output vector stores a single RGBA texel packed in each lane.
+ */
+ASTCENC_SIMD_INLINE vint4 interleave_rgba8(vint4 r, vint4 g, vint4 b, vint4 a)
+{
+ return r + lsl<8>(g) + lsl<16>(b) + lsl<24>(a);
+}
+
+/**
+ * @brief Store a vector, skipping masked lanes.
+ *
+ * All masked lanes must be at the end of vector, after all non-masked lanes.
+ */
+ASTCENC_SIMD_INLINE void store_lanes_masked(int* base, vint4 data, vmask4 mask)
+{
+ if (mask.lane<3>())
+ {
+ store(data, base);
+ }
+ else if (mask.lane<2>())
+ {
+ base[0] = data.lane<0>();
+ base[1] = data.lane<1>();
+ base[2] = data.lane<2>();
+ }
+ else if (mask.lane<1>())
+ {
+ base[0] = data.lane<0>();
+ base[1] = data.lane<1>();
+ }
+ else if (mask.lane<0>())
+ {
+ base[0] = data.lane<0>();
+ }
+}
+
+#define ASTCENC_USE_NATIVE_POPCOUNT 1
+
+/**
+ * @brief Population bit count.
+ *
+ * @param v The value to population count.
+ *
+ * @return The number of 1 bits.
+ */
+ASTCENC_SIMD_INLINE int popcount(uint64_t v)
+{
+ return static_cast<int>(vaddlv_u8(vcnt_u8(vcreate_u8(v))));
+}
+
+#endif // #ifndef ASTC_VECMATHLIB_NEON_4_H_INCLUDED
diff --git a/thirdparty/astcenc/astcenc_vecmathlib_none_4.h b/thirdparty/astcenc/astcenc_vecmathlib_none_4.h
new file mode 100644
index 0000000000..d9b52be3e4
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_vecmathlib_none_4.h
@@ -0,0 +1,1169 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2019-2022 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief 4x32-bit vectors, implemented using plain C++.
+ *
+ * This module implements 4-wide 32-bit float, int, and mask vectors. This
+ * module provides a scalar fallback for VLA code, primarily useful for
+ * debugging VLA algorithms without the complexity of handling SIMD. Only the
+ * baseline level of functionality needed to support VLA is provided.
+ *
+ * Note that the vector conditional operators implemented by this module are
+ * designed to behave like SIMD conditional operators that generate lane masks.
+ * Rather than returning 0/1 booleans like normal C++ code they will return
+ * 0/-1 to give a full lane-width bitmask.
+ *
+ * Note that the documentation for this module still talks about "vectors" to
+ * help developers think about the implied VLA behavior when writing optimized
+ * paths.
+ */
+
+#ifndef ASTC_VECMATHLIB_NONE_4_H_INCLUDED
+#define ASTC_VECMATHLIB_NONE_4_H_INCLUDED
+
+#ifndef ASTCENC_SIMD_INLINE
+ #error "Include astcenc_vecmathlib.h, do not include directly"
+#endif
+
+#include <algorithm>
+#include <cstdio>
+#include <cstring>
+#include <cfenv>
+
+// ============================================================================
+// vfloat4 data type
+// ============================================================================
+
+/**
+ * @brief Data type for 4-wide floats.
+ */
+struct vfloat4
+{
+ /**
+ * @brief Construct from zero-initialized value.
+ */
+ ASTCENC_SIMD_INLINE vfloat4() = default;
+
+ /**
+ * @brief Construct from 4 values loaded from an unaligned address.
+ *
+ * Consider using loada() which is better with wider VLA vectors if data is
+ * aligned to vector length.
+ */
+ ASTCENC_SIMD_INLINE explicit vfloat4(const float* p)
+ {
+ m[0] = p[0];
+ m[1] = p[1];
+ m[2] = p[2];
+ m[3] = p[3];
+ }
+
+ /**
+ * @brief Construct from 4 scalar values replicated across all lanes.
+ *
+ * Consider using zero() for constexpr zeros.
+ */
+ ASTCENC_SIMD_INLINE explicit vfloat4(float a)
+ {
+ m[0] = a;
+ m[1] = a;
+ m[2] = a;
+ m[3] = a;
+ }
+
+ /**
+ * @brief Construct from 4 scalar values.
+ *
+ * The value of @c a is stored to lane 0 (LSB) in the SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vfloat4(float a, float b, float c, float d)
+ {
+ m[0] = a;
+ m[1] = b;
+ m[2] = c;
+ m[3] = d;
+ }
+
+ /**
+ * @brief Get the scalar value of a single lane.
+ */
+ template <int l> ASTCENC_SIMD_INLINE float lane() const
+ {
+ return m[l];
+ }
+
+ /**
+ * @brief Set the scalar value of a single lane.
+ */
+ template <int l> ASTCENC_SIMD_INLINE void set_lane(float a)
+ {
+ m[l] = a;
+ }
+
+ /**
+ * @brief Factory that returns a vector of zeros.
+ */
+ static ASTCENC_SIMD_INLINE vfloat4 zero()
+ {
+ return vfloat4(0.0f);
+ }
+
+ /**
+ * @brief Factory that returns a replicated scalar loaded from memory.
+ */
+ static ASTCENC_SIMD_INLINE vfloat4 load1(const float* p)
+ {
+ return vfloat4(*p);
+ }
+
+ /**
+ * @brief Factory that returns a vector loaded from aligned memory.
+ */
+ static ASTCENC_SIMD_INLINE vfloat4 loada(const float* p)
+ {
+ return vfloat4(p);
+ }
+
+ /**
+ * @brief Factory that returns a vector containing the lane IDs.
+ */
+ static ASTCENC_SIMD_INLINE vfloat4 lane_id()
+ {
+ return vfloat4(0.0f, 1.0f, 2.0f, 3.0f);
+ }
+
+ /**
+ * @brief Return a swizzled float 2.
+ */
+ template <int l0, int l1> ASTCENC_SIMD_INLINE vfloat4 swz() const
+ {
+ return vfloat4(lane<l0>(), lane<l1>(), 0.0f, 0.0f);
+ }
+
+ /**
+ * @brief Return a swizzled float 3.
+ */
+ template <int l0, int l1, int l2> ASTCENC_SIMD_INLINE vfloat4 swz() const
+ {
+ return vfloat4(lane<l0>(), lane<l1>(), lane<l2>(), 0.0f);
+ }
+
+ /**
+ * @brief Return a swizzled float 4.
+ */
+ template <int l0, int l1, int l2, int l3> ASTCENC_SIMD_INLINE vfloat4 swz() const
+ {
+ return vfloat4(lane<l0>(), lane<l1>(), lane<l2>(), lane<l3>());
+ }
+
+ /**
+ * @brief The vector ...
+ */
+ float m[4];
+};
+
+// ============================================================================
+// vint4 data type
+// ============================================================================
+
+/**
+ * @brief Data type for 4-wide ints.
+ */
+struct vint4
+{
+ /**
+ * @brief Construct from zero-initialized value.
+ */
+ ASTCENC_SIMD_INLINE vint4() = default;
+
+ /**
+ * @brief Construct from 4 values loaded from an unaligned address.
+ *
+ * Consider using vint4::loada() which is better with wider VLA vectors
+ * if data is aligned.
+ */
+ ASTCENC_SIMD_INLINE explicit vint4(const int* p)
+ {
+ m[0] = p[0];
+ m[1] = p[1];
+ m[2] = p[2];
+ m[3] = p[3];
+ }
+
+ /**
+ * @brief Construct from 4 uint8_t loaded from an unaligned address.
+ */
+ ASTCENC_SIMD_INLINE explicit vint4(const uint8_t *p)
+ {
+ m[0] = p[0];
+ m[1] = p[1];
+ m[2] = p[2];
+ m[3] = p[3];
+ }
+
+ /**
+ * @brief Construct from 4 scalar values.
+ *
+ * The value of @c a is stored to lane 0 (LSB) in the SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vint4(int a, int b, int c, int d)
+ {
+ m[0] = a;
+ m[1] = b;
+ m[2] = c;
+ m[3] = d;
+ }
+
+
+ /**
+ * @brief Construct from 4 scalar values replicated across all lanes.
+ *
+ * Consider using vint4::zero() for constexpr zeros.
+ */
+ ASTCENC_SIMD_INLINE explicit vint4(int a)
+ {
+ m[0] = a;
+ m[1] = a;
+ m[2] = a;
+ m[3] = a;
+ }
+
+ /**
+ * @brief Get the scalar value of a single lane.
+ */
+ template <int l> ASTCENC_SIMD_INLINE int lane() const
+ {
+ return m[l];
+ }
+
+ /**
+ * @brief Set the scalar value of a single lane.
+ */
+ template <int l> ASTCENC_SIMD_INLINE void set_lane(int a)
+ {
+ m[l] = a;
+ }
+
+ /**
+ * @brief Factory that returns a vector of zeros.
+ */
+ static ASTCENC_SIMD_INLINE vint4 zero()
+ {
+ return vint4(0);
+ }
+
+ /**
+ * @brief Factory that returns a replicated scalar loaded from memory.
+ */
+ static ASTCENC_SIMD_INLINE vint4 load1(const int* p)
+ {
+ return vint4(*p);
+ }
+
+ /**
+ * @brief Factory that returns a vector loaded from 16B aligned memory.
+ */
+ static ASTCENC_SIMD_INLINE vint4 loada(const int* p)
+ {
+ return vint4(p);
+ }
+
+ /**
+ * @brief Factory that returns a vector containing the lane IDs.
+ */
+ static ASTCENC_SIMD_INLINE vint4 lane_id()
+ {
+ return vint4(0, 1, 2, 3);
+ }
+
+ /**
+ * @brief The vector ...
+ */
+ int m[4];
+};
+
+// ============================================================================
+// vmask4 data type
+// ============================================================================
+
+/**
+ * @brief Data type for 4-wide control plane masks.
+ */
+struct vmask4
+{
+ /**
+ * @brief Construct from an existing mask value.
+ */
+ ASTCENC_SIMD_INLINE explicit vmask4(int* p)
+ {
+ m[0] = p[0];
+ m[1] = p[1];
+ m[2] = p[2];
+ m[3] = p[3];
+ }
+
+ /**
+ * @brief Construct from 1 scalar value.
+ */
+ ASTCENC_SIMD_INLINE explicit vmask4(bool a)
+ {
+ m[0] = a == false ? 0 : -1;
+ m[1] = a == false ? 0 : -1;
+ m[2] = a == false ? 0 : -1;
+ m[3] = a == false ? 0 : -1;
+ }
+
+ /**
+ * @brief Construct from 4 scalar values.
+ *
+ * The value of @c a is stored to lane 0 (LSB) in the SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vmask4(bool a, bool b, bool c, bool d)
+ {
+ m[0] = a == false ? 0 : -1;
+ m[1] = b == false ? 0 : -1;
+ m[2] = c == false ? 0 : -1;
+ m[3] = d == false ? 0 : -1;
+ }
+
+
+ /**
+ * @brief The vector ...
+ */
+ int m[4];
+};
+
+// ============================================================================
+// vmask4 operators and functions
+// ============================================================================
+
+/**
+ * @brief Overload: mask union (or).
+ */
+ASTCENC_SIMD_INLINE vmask4 operator|(vmask4 a, vmask4 b)
+{
+ return vmask4(a.m[0] | b.m[0],
+ a.m[1] | b.m[1],
+ a.m[2] | b.m[2],
+ a.m[3] | b.m[3]);
+}
+
+/**
+ * @brief Overload: mask intersect (and).
+ */
+ASTCENC_SIMD_INLINE vmask4 operator&(vmask4 a, vmask4 b)
+{
+ return vmask4(a.m[0] & b.m[0],
+ a.m[1] & b.m[1],
+ a.m[2] & b.m[2],
+ a.m[3] & b.m[3]);
+}
+
+/**
+ * @brief Overload: mask difference (xor).
+ */
+ASTCENC_SIMD_INLINE vmask4 operator^(vmask4 a, vmask4 b)
+{
+ return vmask4(a.m[0] ^ b.m[0],
+ a.m[1] ^ b.m[1],
+ a.m[2] ^ b.m[2],
+ a.m[3] ^ b.m[3]);
+}
+
+/**
+ * @brief Overload: mask invert (not).
+ */
+ASTCENC_SIMD_INLINE vmask4 operator~(vmask4 a)
+{
+ return vmask4(~a.m[0],
+ ~a.m[1],
+ ~a.m[2],
+ ~a.m[3]);
+}
+
+/**
+ * @brief Return a 1-bit mask code indicating mask status.
+ *
+ * bit0 = lane 0
+ */
+ASTCENC_SIMD_INLINE unsigned int mask(vmask4 a)
+{
+ return ((a.m[0] >> 31) & 0x1) |
+ ((a.m[1] >> 30) & 0x2) |
+ ((a.m[2] >> 29) & 0x4) |
+ ((a.m[3] >> 28) & 0x8);
+}
+
+// ============================================================================
+// vint4 operators and functions
+// ============================================================================
+
+/**
+ * @brief Overload: vector by vector addition.
+ */
+ASTCENC_SIMD_INLINE vint4 operator+(vint4 a, vint4 b)
+{
+ return vint4(a.m[0] + b.m[0],
+ a.m[1] + b.m[1],
+ a.m[2] + b.m[2],
+ a.m[3] + b.m[3]);
+}
+
+/**
+ * @brief Overload: vector by vector subtraction.
+ */
+ASTCENC_SIMD_INLINE vint4 operator-(vint4 a, vint4 b)
+{
+ return vint4(a.m[0] - b.m[0],
+ a.m[1] - b.m[1],
+ a.m[2] - b.m[2],
+ a.m[3] - b.m[3]);
+}
+
+/**
+ * @brief Overload: vector by vector multiplication.
+ */
+ASTCENC_SIMD_INLINE vint4 operator*(vint4 a, vint4 b)
+{
+ return vint4(a.m[0] * b.m[0],
+ a.m[1] * b.m[1],
+ a.m[2] * b.m[2],
+ a.m[3] * b.m[3]);
+}
+
+/**
+ * @brief Overload: vector bit invert.
+ */
+ASTCENC_SIMD_INLINE vint4 operator~(vint4 a)
+{
+ return vint4(~a.m[0],
+ ~a.m[1],
+ ~a.m[2],
+ ~a.m[3]);
+}
+
+/**
+ * @brief Overload: vector by vector bitwise or.
+ */
+ASTCENC_SIMD_INLINE vint4 operator|(vint4 a, vint4 b)
+{
+ return vint4(a.m[0] | b.m[0],
+ a.m[1] | b.m[1],
+ a.m[2] | b.m[2],
+ a.m[3] | b.m[3]);
+}
+
+/**
+ * @brief Overload: vector by vector bitwise and.
+ */
+ASTCENC_SIMD_INLINE vint4 operator&(vint4 a, vint4 b)
+{
+ return vint4(a.m[0] & b.m[0],
+ a.m[1] & b.m[1],
+ a.m[2] & b.m[2],
+ a.m[3] & b.m[3]);
+}
+
+/**
+ * @brief Overload: vector by vector bitwise xor.
+ */
+ASTCENC_SIMD_INLINE vint4 operator^(vint4 a, vint4 b)
+{
+ return vint4(a.m[0] ^ b.m[0],
+ a.m[1] ^ b.m[1],
+ a.m[2] ^ b.m[2],
+ a.m[3] ^ b.m[3]);
+}
+
+/**
+ * @brief Overload: vector by vector equality.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator==(vint4 a, vint4 b)
+{
+ return vmask4(a.m[0] == b.m[0],
+ a.m[1] == b.m[1],
+ a.m[2] == b.m[2],
+ a.m[3] == b.m[3]);
+}
+
+/**
+ * @brief Overload: vector by vector inequality.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator!=(vint4 a, vint4 b)
+{
+ return vmask4(a.m[0] != b.m[0],
+ a.m[1] != b.m[1],
+ a.m[2] != b.m[2],
+ a.m[3] != b.m[3]);
+}
+
+/**
+ * @brief Overload: vector by vector less than.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator<(vint4 a, vint4 b)
+{
+ return vmask4(a.m[0] < b.m[0],
+ a.m[1] < b.m[1],
+ a.m[2] < b.m[2],
+ a.m[3] < b.m[3]);
+}
+
+/**
+ * @brief Overload: vector by vector greater than.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator>(vint4 a, vint4 b)
+{
+ return vmask4(a.m[0] > b.m[0],
+ a.m[1] > b.m[1],
+ a.m[2] > b.m[2],
+ a.m[3] > b.m[3]);
+}
+
+/**
+ * @brief Logical shift left.
+ */
+template <int s> ASTCENC_SIMD_INLINE vint4 lsl(vint4 a)
+{
+ return vint4(a.m[0] << s,
+ a.m[1] << s,
+ a.m[2] << s,
+ a.m[3] << s);
+}
+
+/**
+ * @brief Logical shift right.
+ */
+template <int s> ASTCENC_SIMD_INLINE vint4 lsr(vint4 a)
+{
+ unsigned int as0 = static_cast<unsigned int>(a.m[0]) >> s;
+ unsigned int as1 = static_cast<unsigned int>(a.m[1]) >> s;
+ unsigned int as2 = static_cast<unsigned int>(a.m[2]) >> s;
+ unsigned int as3 = static_cast<unsigned int>(a.m[3]) >> s;
+
+ return vint4(static_cast<int>(as0),
+ static_cast<int>(as1),
+ static_cast<int>(as2),
+ static_cast<int>(as3));
+}
+
+/**
+ * @brief Arithmetic shift right.
+ */
+template <int s> ASTCENC_SIMD_INLINE vint4 asr(vint4 a)
+{
+ return vint4(a.m[0] >> s,
+ a.m[1] >> s,
+ a.m[2] >> s,
+ a.m[3] >> s);
+}
+
+/**
+ * @brief Return the min vector of two vectors.
+ */
+ASTCENC_SIMD_INLINE vint4 min(vint4 a, vint4 b)
+{
+ return vint4(a.m[0] < b.m[0] ? a.m[0] : b.m[0],
+ a.m[1] < b.m[1] ? a.m[1] : b.m[1],
+ a.m[2] < b.m[2] ? a.m[2] : b.m[2],
+ a.m[3] < b.m[3] ? a.m[3] : b.m[3]);
+}
+
+/**
+ * @brief Return the min vector of two vectors.
+ */
+ASTCENC_SIMD_INLINE vint4 max(vint4 a, vint4 b)
+{
+ return vint4(a.m[0] > b.m[0] ? a.m[0] : b.m[0],
+ a.m[1] > b.m[1] ? a.m[1] : b.m[1],
+ a.m[2] > b.m[2] ? a.m[2] : b.m[2],
+ a.m[3] > b.m[3] ? a.m[3] : b.m[3]);
+}
+
+/**
+ * @brief Return the horizontal minimum of a single vector.
+ */
+ASTCENC_SIMD_INLINE vint4 hmin(vint4 a)
+{
+ int b = std::min(a.m[0], a.m[1]);
+ int c = std::min(a.m[2], a.m[3]);
+ return vint4(std::min(b, c));
+}
+
+/**
+ * @brief Return the horizontal maximum of a single vector.
+ */
+ASTCENC_SIMD_INLINE vint4 hmax(vint4 a)
+{
+ int b = std::max(a.m[0], a.m[1]);
+ int c = std::max(a.m[2], a.m[3]);
+ return vint4(std::max(b, c));
+}
+
+/**
+ * @brief Return the horizontal sum of vector lanes as a scalar.
+ */
+ASTCENC_SIMD_INLINE int hadd_s(vint4 a)
+{
+ return a.m[0] + a.m[1] + a.m[2] + a.m[3];
+}
+
+/**
+ * @brief Store a vector to an aligned memory address.
+ */
+ASTCENC_SIMD_INLINE void storea(vint4 a, int* p)
+{
+ p[0] = a.m[0];
+ p[1] = a.m[1];
+ p[2] = a.m[2];
+ p[3] = a.m[3];
+}
+
+/**
+ * @brief Store a vector to an unaligned memory address.
+ */
+ASTCENC_SIMD_INLINE void store(vint4 a, int* p)
+{
+ p[0] = a.m[0];
+ p[1] = a.m[1];
+ p[2] = a.m[2];
+ p[3] = a.m[3];
+}
+
+/**
+ * @brief Store lowest N (vector width) bytes into an unaligned address.
+ */
+ASTCENC_SIMD_INLINE void store_nbytes(vint4 a, uint8_t* p)
+{
+ int* pi = reinterpret_cast<int*>(p);
+ *pi = a.m[0];
+}
+
+/**
+ * @brief Gather N (vector width) indices from the array.
+ */
+ASTCENC_SIMD_INLINE vint4 gatheri(const int* base, vint4 indices)
+{
+ return vint4(base[indices.m[0]],
+ base[indices.m[1]],
+ base[indices.m[2]],
+ base[indices.m[3]]);
+}
+
+/**
+ * @brief Pack low 8 bits of N (vector width) lanes into bottom of vector.
+ */
+ASTCENC_SIMD_INLINE vint4 pack_low_bytes(vint4 a)
+{
+ int b0 = a.m[0] & 0xFF;
+ int b1 = a.m[1] & 0xFF;
+ int b2 = a.m[2] & 0xFF;
+ int b3 = a.m[3] & 0xFF;
+
+ int b = b0 | (b1 << 8) | (b2 << 16) | (b3 << 24);
+ return vint4(b, 0, 0, 0);
+}
+
+/**
+ * @brief Return lanes from @c b if MSB of @c cond is set, else @c a.
+ */
+ASTCENC_SIMD_INLINE vint4 select(vint4 a, vint4 b, vmask4 cond)
+{
+ return vint4((cond.m[0] & static_cast<int>(0x80000000)) ? b.m[0] : a.m[0],
+ (cond.m[1] & static_cast<int>(0x80000000)) ? b.m[1] : a.m[1],
+ (cond.m[2] & static_cast<int>(0x80000000)) ? b.m[2] : a.m[2],
+ (cond.m[3] & static_cast<int>(0x80000000)) ? b.m[3] : a.m[3]);
+}
+
+// ============================================================================
+// vfloat4 operators and functions
+// ============================================================================
+
+/**
+ * @brief Overload: vector by vector addition.
+ */
+ASTCENC_SIMD_INLINE vfloat4 operator+(vfloat4 a, vfloat4 b)
+{
+ return vfloat4(a.m[0] + b.m[0],
+ a.m[1] + b.m[1],
+ a.m[2] + b.m[2],
+ a.m[3] + b.m[3]);
+}
+
+/**
+ * @brief Overload: vector by vector subtraction.
+ */
+ASTCENC_SIMD_INLINE vfloat4 operator-(vfloat4 a, vfloat4 b)
+{
+ return vfloat4(a.m[0] - b.m[0],
+ a.m[1] - b.m[1],
+ a.m[2] - b.m[2],
+ a.m[3] - b.m[3]);
+}
+
+/**
+ * @brief Overload: vector by vector multiplication.
+ */
+ASTCENC_SIMD_INLINE vfloat4 operator*(vfloat4 a, vfloat4 b)
+{
+ return vfloat4(a.m[0] * b.m[0],
+ a.m[1] * b.m[1],
+ a.m[2] * b.m[2],
+ a.m[3] * b.m[3]);
+}
+
+/**
+ * @brief Overload: vector by vector division.
+ */
+ASTCENC_SIMD_INLINE vfloat4 operator/(vfloat4 a, vfloat4 b)
+{
+ return vfloat4(a.m[0] / b.m[0],
+ a.m[1] / b.m[1],
+ a.m[2] / b.m[2],
+ a.m[3] / b.m[3]);
+}
+
+/**
+ * @brief Overload: vector by vector equality.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator==(vfloat4 a, vfloat4 b)
+{
+ return vmask4(a.m[0] == b.m[0],
+ a.m[1] == b.m[1],
+ a.m[2] == b.m[2],
+ a.m[3] == b.m[3]);
+}
+
+/**
+ * @brief Overload: vector by vector inequality.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator!=(vfloat4 a, vfloat4 b)
+{
+ return vmask4(a.m[0] != b.m[0],
+ a.m[1] != b.m[1],
+ a.m[2] != b.m[2],
+ a.m[3] != b.m[3]);
+}
+
+/**
+ * @brief Overload: vector by vector less than.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator<(vfloat4 a, vfloat4 b)
+{
+ return vmask4(a.m[0] < b.m[0],
+ a.m[1] < b.m[1],
+ a.m[2] < b.m[2],
+ a.m[3] < b.m[3]);
+}
+
+/**
+ * @brief Overload: vector by vector greater than.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator>(vfloat4 a, vfloat4 b)
+{
+ return vmask4(a.m[0] > b.m[0],
+ a.m[1] > b.m[1],
+ a.m[2] > b.m[2],
+ a.m[3] > b.m[3]);
+}
+
+/**
+ * @brief Overload: vector by vector less than or equal.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator<=(vfloat4 a, vfloat4 b)
+{
+ return vmask4(a.m[0] <= b.m[0],
+ a.m[1] <= b.m[1],
+ a.m[2] <= b.m[2],
+ a.m[3] <= b.m[3]);
+}
+
+/**
+ * @brief Overload: vector by vector greater than or equal.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator>=(vfloat4 a, vfloat4 b)
+{
+ return vmask4(a.m[0] >= b.m[0],
+ a.m[1] >= b.m[1],
+ a.m[2] >= b.m[2],
+ a.m[3] >= b.m[3]);
+}
+
+/**
+ * @brief Return the min vector of two vectors.
+ *
+ * If either lane value is NaN, @c b will be returned for that lane.
+ */
+ASTCENC_SIMD_INLINE vfloat4 min(vfloat4 a, vfloat4 b)
+{
+ return vfloat4(a.m[0] < b.m[0] ? a.m[0] : b.m[0],
+ a.m[1] < b.m[1] ? a.m[1] : b.m[1],
+ a.m[2] < b.m[2] ? a.m[2] : b.m[2],
+ a.m[3] < b.m[3] ? a.m[3] : b.m[3]);
+}
+
+/**
+ * @brief Return the max vector of two vectors.
+ *
+ * If either lane value is NaN, @c b will be returned for that lane.
+ */
+ASTCENC_SIMD_INLINE vfloat4 max(vfloat4 a, vfloat4 b)
+{
+ return vfloat4(a.m[0] > b.m[0] ? a.m[0] : b.m[0],
+ a.m[1] > b.m[1] ? a.m[1] : b.m[1],
+ a.m[2] > b.m[2] ? a.m[2] : b.m[2],
+ a.m[3] > b.m[3] ? a.m[3] : b.m[3]);
+}
+
+/**
+ * @brief Return the absolute value of the float vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 abs(vfloat4 a)
+{
+ return vfloat4(std::abs(a.m[0]),
+ std::abs(a.m[1]),
+ std::abs(a.m[2]),
+ std::abs(a.m[3]));
+}
+
+/**
+ * @brief Return a float rounded to the nearest integer value.
+ */
+ASTCENC_SIMD_INLINE vfloat4 round(vfloat4 a)
+{
+ assert(std::fegetround() == FE_TONEAREST);
+ return vfloat4(std::nearbyint(a.m[0]),
+ std::nearbyint(a.m[1]),
+ std::nearbyint(a.m[2]),
+ std::nearbyint(a.m[3]));
+}
+
+/**
+ * @brief Return the horizontal minimum of a vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 hmin(vfloat4 a)
+{
+ float tmp1 = std::min(a.m[0], a.m[1]);
+ float tmp2 = std::min(a.m[2], a.m[3]);
+ return vfloat4(std::min(tmp1, tmp2));
+}
+
+/**
+ * @brief Return the horizontal maximum of a vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 hmax(vfloat4 a)
+{
+ float tmp1 = std::max(a.m[0], a.m[1]);
+ float tmp2 = std::max(a.m[2], a.m[3]);
+ return vfloat4(std::max(tmp1, tmp2));
+}
+
+/**
+ * @brief Return the horizontal sum of a vector.
+ */
+ASTCENC_SIMD_INLINE float hadd_s(vfloat4 a)
+{
+ // Use halving add, gives invariance with SIMD versions
+ return (a.m[0] + a.m[2]) + (a.m[1] + a.m[3]);
+}
+
+/**
+ * @brief Return the sqrt of the lanes in the vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 sqrt(vfloat4 a)
+{
+ return vfloat4(std::sqrt(a.m[0]),
+ std::sqrt(a.m[1]),
+ std::sqrt(a.m[2]),
+ std::sqrt(a.m[3]));
+}
+
+/**
+ * @brief Return lanes from @c b if @c cond is set, else @c a.
+ */
+ASTCENC_SIMD_INLINE vfloat4 select(vfloat4 a, vfloat4 b, vmask4 cond)
+{
+ return vfloat4((cond.m[0] & static_cast<int>(0x80000000)) ? b.m[0] : a.m[0],
+ (cond.m[1] & static_cast<int>(0x80000000)) ? b.m[1] : a.m[1],
+ (cond.m[2] & static_cast<int>(0x80000000)) ? b.m[2] : a.m[2],
+ (cond.m[3] & static_cast<int>(0x80000000)) ? b.m[3] : a.m[3]);
+}
+
+/**
+ * @brief Return lanes from @c b if MSB of @c cond is set, else @c a.
+ */
+ASTCENC_SIMD_INLINE vfloat4 select_msb(vfloat4 a, vfloat4 b, vmask4 cond)
+{
+ return vfloat4((cond.m[0] & static_cast<int>(0x80000000)) ? b.m[0] : a.m[0],
+ (cond.m[1] & static_cast<int>(0x80000000)) ? b.m[1] : a.m[1],
+ (cond.m[2] & static_cast<int>(0x80000000)) ? b.m[2] : a.m[2],
+ (cond.m[3] & static_cast<int>(0x80000000)) ? b.m[3] : a.m[3]);
+}
+
+/**
+ * @brief Load a vector of gathered results from an array;
+ */
+ASTCENC_SIMD_INLINE vfloat4 gatherf(const float* base, vint4 indices)
+{
+ return vfloat4(base[indices.m[0]],
+ base[indices.m[1]],
+ base[indices.m[2]],
+ base[indices.m[3]]);
+}
+
+/**
+ * @brief Store a vector to an unaligned memory address.
+ */
+ASTCENC_SIMD_INLINE void store(vfloat4 a, float* ptr)
+{
+ ptr[0] = a.m[0];
+ ptr[1] = a.m[1];
+ ptr[2] = a.m[2];
+ ptr[3] = a.m[3];
+}
+
+/**
+ * @brief Store a vector to an aligned memory address.
+ */
+ASTCENC_SIMD_INLINE void storea(vfloat4 a, float* ptr)
+{
+ ptr[0] = a.m[0];
+ ptr[1] = a.m[1];
+ ptr[2] = a.m[2];
+ ptr[3] = a.m[3];
+}
+
+/**
+ * @brief Return a integer value for a float vector, using truncation.
+ */
+ASTCENC_SIMD_INLINE vint4 float_to_int(vfloat4 a)
+{
+ return vint4(static_cast<int>(a.m[0]),
+ static_cast<int>(a.m[1]),
+ static_cast<int>(a.m[2]),
+ static_cast<int>(a.m[3]));
+}
+
+/**f
+ * @brief Return a integer value for a float vector, using round-to-nearest.
+ */
+ASTCENC_SIMD_INLINE vint4 float_to_int_rtn(vfloat4 a)
+{
+ return vint4(static_cast<int>(a.m[0] + 0.5f),
+ static_cast<int>(a.m[1] + 0.5f),
+ static_cast<int>(a.m[2] + 0.5f),
+ static_cast<int>(a.m[3] + 0.5f));
+}
+
+/**
+ * @brief Return a float value for a integer vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 int_to_float(vint4 a)
+{
+ return vfloat4(static_cast<float>(a.m[0]),
+ static_cast<float>(a.m[1]),
+ static_cast<float>(a.m[2]),
+ static_cast<float>(a.m[3]));
+}
+
+/**
+ * @brief Return a float16 value for a float vector, using round-to-nearest.
+ */
+ASTCENC_SIMD_INLINE vint4 float_to_float16(vfloat4 a)
+{
+ return vint4(
+ float_to_sf16(a.lane<0>()),
+ float_to_sf16(a.lane<1>()),
+ float_to_sf16(a.lane<2>()),
+ float_to_sf16(a.lane<3>()));
+}
+
+/**
+ * @brief Return a float16 value for a float scalar, using round-to-nearest.
+ */
+static inline uint16_t float_to_float16(float a)
+{
+ return float_to_sf16(a);
+}
+
+/**
+ * @brief Return a float value for a float16 vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 float16_to_float(vint4 a)
+{
+ return vfloat4(
+ sf16_to_float(static_cast<uint16_t>(a.lane<0>())),
+ sf16_to_float(static_cast<uint16_t>(a.lane<1>())),
+ sf16_to_float(static_cast<uint16_t>(a.lane<2>())),
+ sf16_to_float(static_cast<uint16_t>(a.lane<3>())));
+}
+
+/**
+ * @brief Return a float value for a float16 scalar.
+ */
+ASTCENC_SIMD_INLINE float float16_to_float(uint16_t a)
+{
+ return sf16_to_float(a);
+}
+
+/**
+ * @brief Return a float value as an integer bit pattern (i.e. no conversion).
+ *
+ * It is a common trick to convert floats into integer bit patterns, perform
+ * some bit hackery based on knowledge they are IEEE 754 layout, and then
+ * convert them back again. This is the first half of that flip.
+ */
+ASTCENC_SIMD_INLINE vint4 float_as_int(vfloat4 a)
+{
+ vint4 r;
+ memcpy(r.m, a.m, 4 * 4);
+ return r;
+}
+
+/**
+ * @brief Return a integer value as a float bit pattern (i.e. no conversion).
+ *
+ * It is a common trick to convert floats into integer bit patterns, perform
+ * some bit hackery based on knowledge they are IEEE 754 layout, and then
+ * convert them back again. This is the second half of that flip.
+ */
+ASTCENC_SIMD_INLINE vfloat4 int_as_float(vint4 a)
+{
+ vfloat4 r;
+ memcpy(r.m, a.m, 4 * 4);
+ return r;
+}
+
+/**
+ * @brief Prepare a vtable lookup table for use with the native SIMD size.
+ */
+ASTCENC_SIMD_INLINE void vtable_prepare(vint4 t0, vint4& t0p)
+{
+ t0p = t0;
+}
+
+/**
+ * @brief Prepare a vtable lookup table for use with the native SIMD size.
+ */
+ASTCENC_SIMD_INLINE void vtable_prepare(vint4 t0, vint4 t1, vint4& t0p, vint4& t1p)
+{
+ t0p = t0;
+ t1p = t1;
+}
+
+/**
+ * @brief Prepare a vtable lookup table for use with the native SIMD size.
+ */
+ASTCENC_SIMD_INLINE void vtable_prepare(
+ vint4 t0, vint4 t1, vint4 t2, vint4 t3,
+ vint4& t0p, vint4& t1p, vint4& t2p, vint4& t3p)
+{
+ t0p = t0;
+ t1p = t1;
+ t2p = t2;
+ t3p = t3;
+}
+
+/**
+ * @brief Perform an 8-bit 32-entry table lookup, with 32-bit indexes.
+ */
+ASTCENC_SIMD_INLINE vint4 vtable_8bt_32bi(vint4 t0, vint4 idx)
+{
+ uint8_t table[16];
+ storea(t0, reinterpret_cast<int*>(table + 0));
+
+ return vint4(table[idx.lane<0>()],
+ table[idx.lane<1>()],
+ table[idx.lane<2>()],
+ table[idx.lane<3>()]);
+}
+
+
+/**
+ * @brief Perform an 8-bit 32-entry table lookup, with 32-bit indexes.
+ */
+ASTCENC_SIMD_INLINE vint4 vtable_8bt_32bi(vint4 t0, vint4 t1, vint4 idx)
+{
+ uint8_t table[32];
+ storea(t0, reinterpret_cast<int*>(table + 0));
+ storea(t1, reinterpret_cast<int*>(table + 16));
+
+ return vint4(table[idx.lane<0>()],
+ table[idx.lane<1>()],
+ table[idx.lane<2>()],
+ table[idx.lane<3>()]);
+}
+
+/**
+ * @brief Perform an 8-bit 64-entry table lookup, with 32-bit indexes.
+ */
+ASTCENC_SIMD_INLINE vint4 vtable_8bt_32bi(vint4 t0, vint4 t1, vint4 t2, vint4 t3, vint4 idx)
+{
+ uint8_t table[64];
+ storea(t0, reinterpret_cast<int*>(table + 0));
+ storea(t1, reinterpret_cast<int*>(table + 16));
+ storea(t2, reinterpret_cast<int*>(table + 32));
+ storea(t3, reinterpret_cast<int*>(table + 48));
+
+ return vint4(table[idx.lane<0>()],
+ table[idx.lane<1>()],
+ table[idx.lane<2>()],
+ table[idx.lane<3>()]);
+}
+
+/**
+ * @brief Return a vector of interleaved RGBA data.
+ *
+ * Input vectors have the value stored in the bottom 8 bits of each lane,
+ * with high bits set to zero.
+ *
+ * Output vector stores a single RGBA texel packed in each lane.
+ */
+ASTCENC_SIMD_INLINE vint4 interleave_rgba8(vint4 r, vint4 g, vint4 b, vint4 a)
+{
+ return r + lsl<8>(g) + lsl<16>(b) + lsl<24>(a);
+}
+
+/**
+ * @brief Store a vector, skipping masked lanes.
+ *
+ * All masked lanes must be at the end of vector, after all non-masked lanes.
+ */
+ASTCENC_SIMD_INLINE void store_lanes_masked(int* base, vint4 data, vmask4 mask)
+{
+ if (mask.m[3])
+ {
+ store(data, base);
+ }
+ else if (mask.m[2])
+ {
+ base[0] = data.lane<0>();
+ base[1] = data.lane<1>();
+ base[2] = data.lane<2>();
+ }
+ else if (mask.m[1])
+ {
+ base[0] = data.lane<0>();
+ base[1] = data.lane<1>();
+ }
+ else if (mask.m[0])
+ {
+ base[0] = data.lane<0>();
+ }
+}
+
+#endif // #ifndef ASTC_VECMATHLIB_NONE_4_H_INCLUDED
diff --git a/thirdparty/astcenc/astcenc_vecmathlib_sse_4.h b/thirdparty/astcenc/astcenc_vecmathlib_sse_4.h
new file mode 100644
index 0000000000..26dcc4a891
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_vecmathlib_sse_4.h
@@ -0,0 +1,1283 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2019-2022 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief 4x32-bit vectors, implemented using SSE.
+ *
+ * This module implements 4-wide 32-bit float, int, and mask vectors for x86
+ * SSE. The implementation requires at least SSE2, but higher levels of SSE can
+ * be selected at compile time to improve performance.
+ *
+ * There is a baseline level of functionality provided by all vector widths and
+ * implementations. This is implemented using identical function signatures,
+ * modulo data type, so we can use them as substitutable implementations in VLA
+ * code.
+ *
+ * The 4-wide vectors are also used as a fixed-width type, and significantly
+ * extend the functionality above that available to VLA code.
+ */
+
+#ifndef ASTC_VECMATHLIB_SSE_4_H_INCLUDED
+#define ASTC_VECMATHLIB_SSE_4_H_INCLUDED
+
+#ifndef ASTCENC_SIMD_INLINE
+ #error "Include astcenc_vecmathlib.h, do not include directly"
+#endif
+
+#include <cstdio>
+
+// ============================================================================
+// vfloat4 data type
+// ============================================================================
+
+/**
+ * @brief Data type for 4-wide floats.
+ */
+struct vfloat4
+{
+ /**
+ * @brief Construct from zero-initialized value.
+ */
+ ASTCENC_SIMD_INLINE vfloat4() = default;
+
+ /**
+ * @brief Construct from 4 values loaded from an unaligned address.
+ *
+ * Consider using loada() which is better with vectors if data is aligned
+ * to vector length.
+ */
+ ASTCENC_SIMD_INLINE explicit vfloat4(const float *p)
+ {
+ m = _mm_loadu_ps(p);
+ }
+
+ /**
+ * @brief Construct from 1 scalar value replicated across all lanes.
+ *
+ * Consider using zero() for constexpr zeros.
+ */
+ ASTCENC_SIMD_INLINE explicit vfloat4(float a)
+ {
+ m = _mm_set1_ps(a);
+ }
+
+ /**
+ * @brief Construct from 4 scalar values.
+ *
+ * The value of @c a is stored to lane 0 (LSB) in the SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vfloat4(float a, float b, float c, float d)
+ {
+ m = _mm_set_ps(d, c, b, a);
+ }
+
+ /**
+ * @brief Construct from an existing SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vfloat4(__m128 a)
+ {
+ m = a;
+ }
+
+ /**
+ * @brief Get the scalar value of a single lane.
+ */
+ template <int l> ASTCENC_SIMD_INLINE float lane() const
+ {
+ return _mm_cvtss_f32(_mm_shuffle_ps(m, m, l));
+ }
+
+ /**
+ * @brief Set the scalar value of a single lane.
+ */
+ template <int l> ASTCENC_SIMD_INLINE void set_lane(float a)
+ {
+#if ASTCENC_SSE >= 41
+ __m128 v = _mm_set1_ps(a);
+ m = _mm_insert_ps(m, v, l << 6 | l << 4);
+#else
+ alignas(16) float idx[4];
+ _mm_store_ps(idx, m);
+ idx[l] = a;
+ m = _mm_load_ps(idx);
+#endif
+ }
+
+ /**
+ * @brief Factory that returns a vector of zeros.
+ */
+ static ASTCENC_SIMD_INLINE vfloat4 zero()
+ {
+ return vfloat4(_mm_setzero_ps());
+ }
+
+ /**
+ * @brief Factory that returns a replicated scalar loaded from memory.
+ */
+ static ASTCENC_SIMD_INLINE vfloat4 load1(const float* p)
+ {
+ return vfloat4(_mm_load_ps1(p));
+ }
+
+ /**
+ * @brief Factory that returns a vector loaded from 16B aligned memory.
+ */
+ static ASTCENC_SIMD_INLINE vfloat4 loada(const float* p)
+ {
+ return vfloat4(_mm_load_ps(p));
+ }
+
+ /**
+ * @brief Factory that returns a vector containing the lane IDs.
+ */
+ static ASTCENC_SIMD_INLINE vfloat4 lane_id()
+ {
+ return vfloat4(_mm_set_ps(3, 2, 1, 0));
+ }
+
+ /**
+ * @brief Return a swizzled float 2.
+ */
+ template <int l0, int l1> ASTCENC_SIMD_INLINE vfloat4 swz() const
+ {
+ vfloat4 result(_mm_shuffle_ps(m, m, l0 | l1 << 2));
+ result.set_lane<2>(0.0f);
+ result.set_lane<3>(0.0f);
+ return result;
+ }
+
+ /**
+ * @brief Return a swizzled float 3.
+ */
+ template <int l0, int l1, int l2> ASTCENC_SIMD_INLINE vfloat4 swz() const
+ {
+ vfloat4 result(_mm_shuffle_ps(m, m, l0 | l1 << 2 | l2 << 4));
+ result.set_lane<3>(0.0f);
+ return result;
+ }
+
+ /**
+ * @brief Return a swizzled float 4.
+ */
+ template <int l0, int l1, int l2, int l3> ASTCENC_SIMD_INLINE vfloat4 swz() const
+ {
+ return vfloat4(_mm_shuffle_ps(m, m, l0 | l1 << 2 | l2 << 4 | l3 << 6));
+ }
+
+ /**
+ * @brief The vector ...
+ */
+ __m128 m;
+};
+
+// ============================================================================
+// vint4 data type
+// ============================================================================
+
+/**
+ * @brief Data type for 4-wide ints.
+ */
+struct vint4
+{
+ /**
+ * @brief Construct from zero-initialized value.
+ */
+ ASTCENC_SIMD_INLINE vint4() = default;
+
+ /**
+ * @brief Construct from 4 values loaded from an unaligned address.
+ *
+ * Consider using loada() which is better with vectors if data is aligned
+ * to vector length.
+ */
+ ASTCENC_SIMD_INLINE explicit vint4(const int *p)
+ {
+ m = _mm_loadu_si128(reinterpret_cast<const __m128i*>(p));
+ }
+
+ /**
+ * @brief Construct from 4 uint8_t loaded from an unaligned address.
+ */
+ ASTCENC_SIMD_INLINE explicit vint4(const uint8_t *p)
+ {
+ // _mm_loadu_si32 would be nicer syntax, but missing on older GCC
+ __m128i t = _mm_cvtsi32_si128(*reinterpret_cast<const int*>(p));
+
+#if ASTCENC_SSE >= 41
+ m = _mm_cvtepu8_epi32(t);
+#else
+ t = _mm_unpacklo_epi8(t, _mm_setzero_si128());
+ m = _mm_unpacklo_epi16(t, _mm_setzero_si128());
+#endif
+ }
+
+ /**
+ * @brief Construct from 1 scalar value replicated across all lanes.
+ *
+ * Consider using vfloat4::zero() for constexpr zeros.
+ */
+ ASTCENC_SIMD_INLINE explicit vint4(int a)
+ {
+ m = _mm_set1_epi32(a);
+ }
+
+ /**
+ * @brief Construct from 4 scalar values.
+ *
+ * The value of @c a is stored to lane 0 (LSB) in the SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vint4(int a, int b, int c, int d)
+ {
+ m = _mm_set_epi32(d, c, b, a);
+ }
+
+ /**
+ * @brief Construct from an existing SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vint4(__m128i a)
+ {
+ m = a;
+ }
+
+ /**
+ * @brief Get the scalar from a single lane.
+ */
+ template <int l> ASTCENC_SIMD_INLINE int lane() const
+ {
+ return _mm_cvtsi128_si32(_mm_shuffle_epi32(m, l));
+ }
+
+ /**
+ * @brief Set the scalar value of a single lane.
+ */
+ template <int l> ASTCENC_SIMD_INLINE void set_lane(int a)
+ {
+#if ASTCENC_SSE >= 41
+ m = _mm_insert_epi32(m, a, l);
+#else
+ alignas(16) int idx[4];
+ _mm_store_si128(reinterpret_cast<__m128i*>(idx), m);
+ idx[l] = a;
+ m = _mm_load_si128(reinterpret_cast<const __m128i*>(idx));
+#endif
+ }
+
+ /**
+ * @brief Factory that returns a vector of zeros.
+ */
+ static ASTCENC_SIMD_INLINE vint4 zero()
+ {
+ return vint4(_mm_setzero_si128());
+ }
+
+ /**
+ * @brief Factory that returns a replicated scalar loaded from memory.
+ */
+ static ASTCENC_SIMD_INLINE vint4 load1(const int* p)
+ {
+ return vint4(*p);
+ }
+
+ /**
+ * @brief Factory that returns a vector loaded from 16B aligned memory.
+ */
+ static ASTCENC_SIMD_INLINE vint4 loada(const int* p)
+ {
+ return vint4(_mm_load_si128(reinterpret_cast<const __m128i*>(p)));
+ }
+
+ /**
+ * @brief Factory that returns a vector containing the lane IDs.
+ */
+ static ASTCENC_SIMD_INLINE vint4 lane_id()
+ {
+ return vint4(_mm_set_epi32(3, 2, 1, 0));
+ }
+
+ /**
+ * @brief The vector ...
+ */
+ __m128i m;
+};
+
+// ============================================================================
+// vmask4 data type
+// ============================================================================
+
+/**
+ * @brief Data type for 4-wide control plane masks.
+ */
+struct vmask4
+{
+ /**
+ * @brief Construct from an existing SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vmask4(__m128 a)
+ {
+ m = a;
+ }
+
+ /**
+ * @brief Construct from an existing SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vmask4(__m128i a)
+ {
+ m = _mm_castsi128_ps(a);
+ }
+
+ /**
+ * @brief Construct from 1 scalar value.
+ */
+ ASTCENC_SIMD_INLINE explicit vmask4(bool a)
+ {
+ vint4 mask(a == false ? 0 : -1);
+ m = _mm_castsi128_ps(mask.m);
+ }
+
+ /**
+ * @brief Construct from 4 scalar values.
+ *
+ * The value of @c a is stored to lane 0 (LSB) in the SIMD register.
+ */
+ ASTCENC_SIMD_INLINE explicit vmask4(bool a, bool b, bool c, bool d)
+ {
+ vint4 mask(a == false ? 0 : -1,
+ b == false ? 0 : -1,
+ c == false ? 0 : -1,
+ d == false ? 0 : -1);
+
+ m = _mm_castsi128_ps(mask.m);
+ }
+
+ /**
+ * @brief Get the scalar value of a single lane.
+ */
+ template <int l> ASTCENC_SIMD_INLINE float lane() const
+ {
+ return _mm_cvtss_f32(_mm_shuffle_ps(m, m, l));
+ }
+
+ /**
+ * @brief The vector ...
+ */
+ __m128 m;
+};
+
+// ============================================================================
+// vmask4 operators and functions
+// ============================================================================
+
+/**
+ * @brief Overload: mask union (or).
+ */
+ASTCENC_SIMD_INLINE vmask4 operator|(vmask4 a, vmask4 b)
+{
+ return vmask4(_mm_or_ps(a.m, b.m));
+}
+
+/**
+ * @brief Overload: mask intersect (and).
+ */
+ASTCENC_SIMD_INLINE vmask4 operator&(vmask4 a, vmask4 b)
+{
+ return vmask4(_mm_and_ps(a.m, b.m));
+}
+
+/**
+ * @brief Overload: mask difference (xor).
+ */
+ASTCENC_SIMD_INLINE vmask4 operator^(vmask4 a, vmask4 b)
+{
+ return vmask4(_mm_xor_ps(a.m, b.m));
+}
+
+/**
+ * @brief Overload: mask invert (not).
+ */
+ASTCENC_SIMD_INLINE vmask4 operator~(vmask4 a)
+{
+ return vmask4(_mm_xor_si128(_mm_castps_si128(a.m), _mm_set1_epi32(-1)));
+}
+
+/**
+ * @brief Return a 4-bit mask code indicating mask status.
+ *
+ * bit0 = lane 0
+ */
+ASTCENC_SIMD_INLINE unsigned int mask(vmask4 a)
+{
+ return static_cast<unsigned int>(_mm_movemask_ps(a.m));
+}
+
+// ============================================================================
+// vint4 operators and functions
+// ============================================================================
+
+/**
+ * @brief Overload: vector by vector addition.
+ */
+ASTCENC_SIMD_INLINE vint4 operator+(vint4 a, vint4 b)
+{
+ return vint4(_mm_add_epi32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector subtraction.
+ */
+ASTCENC_SIMD_INLINE vint4 operator-(vint4 a, vint4 b)
+{
+ return vint4(_mm_sub_epi32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector multiplication.
+ */
+ASTCENC_SIMD_INLINE vint4 operator*(vint4 a, vint4 b)
+{
+#if ASTCENC_SSE >= 41
+ return vint4(_mm_mullo_epi32 (a.m, b.m));
+#else
+ __m128i t1 = _mm_mul_epu32(a.m, b.m);
+ __m128i t2 = _mm_mul_epu32(
+ _mm_srli_si128(a.m, 4),
+ _mm_srli_si128(b.m, 4));
+ __m128i r = _mm_unpacklo_epi32(
+ _mm_shuffle_epi32(t1, _MM_SHUFFLE (0, 0, 2, 0)),
+ _mm_shuffle_epi32(t2, _MM_SHUFFLE (0, 0, 2, 0)));
+ return vint4(r);
+#endif
+}
+
+/**
+ * @brief Overload: vector bit invert.
+ */
+ASTCENC_SIMD_INLINE vint4 operator~(vint4 a)
+{
+ return vint4(_mm_xor_si128(a.m, _mm_set1_epi32(-1)));
+}
+
+/**
+ * @brief Overload: vector by vector bitwise or.
+ */
+ASTCENC_SIMD_INLINE vint4 operator|(vint4 a, vint4 b)
+{
+ return vint4(_mm_or_si128(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector bitwise and.
+ */
+ASTCENC_SIMD_INLINE vint4 operator&(vint4 a, vint4 b)
+{
+ return vint4(_mm_and_si128(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector bitwise xor.
+ */
+ASTCENC_SIMD_INLINE vint4 operator^(vint4 a, vint4 b)
+{
+ return vint4(_mm_xor_si128(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector equality.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator==(vint4 a, vint4 b)
+{
+ return vmask4(_mm_cmpeq_epi32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector inequality.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator!=(vint4 a, vint4 b)
+{
+ return ~vmask4(_mm_cmpeq_epi32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector less than.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator<(vint4 a, vint4 b)
+{
+ return vmask4(_mm_cmplt_epi32(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector greater than.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator>(vint4 a, vint4 b)
+{
+ return vmask4(_mm_cmpgt_epi32(a.m, b.m));
+}
+
+/**
+ * @brief Logical shift left.
+ */
+template <int s> ASTCENC_SIMD_INLINE vint4 lsl(vint4 a)
+{
+ return vint4(_mm_slli_epi32(a.m, s));
+}
+
+/**
+ * @brief Logical shift right.
+ */
+template <int s> ASTCENC_SIMD_INLINE vint4 lsr(vint4 a)
+{
+ return vint4(_mm_srli_epi32(a.m, s));
+}
+
+/**
+ * @brief Arithmetic shift right.
+ */
+template <int s> ASTCENC_SIMD_INLINE vint4 asr(vint4 a)
+{
+ return vint4(_mm_srai_epi32(a.m, s));
+}
+
+/**
+ * @brief Return the min vector of two vectors.
+ */
+ASTCENC_SIMD_INLINE vint4 min(vint4 a, vint4 b)
+{
+#if ASTCENC_SSE >= 41
+ return vint4(_mm_min_epi32(a.m, b.m));
+#else
+ vmask4 d = a < b;
+ __m128i ap = _mm_and_si128(_mm_castps_si128(d.m), a.m);
+ __m128i bp = _mm_andnot_si128(_mm_castps_si128(d.m), b.m);
+ return vint4(_mm_or_si128(ap,bp));
+#endif
+}
+
+/**
+ * @brief Return the max vector of two vectors.
+ */
+ASTCENC_SIMD_INLINE vint4 max(vint4 a, vint4 b)
+{
+#if ASTCENC_SSE >= 41
+ return vint4(_mm_max_epi32(a.m, b.m));
+#else
+ vmask4 d = a > b;
+ __m128i ap = _mm_and_si128(_mm_castps_si128(d.m), a.m);
+ __m128i bp = _mm_andnot_si128(_mm_castps_si128(d.m), b.m);
+ return vint4(_mm_or_si128(ap,bp));
+#endif
+}
+
+/**
+ * @brief Return the horizontal minimum of a vector.
+ */
+ASTCENC_SIMD_INLINE vint4 hmin(vint4 a)
+{
+ a = min(a, vint4(_mm_shuffle_epi32(a.m, _MM_SHUFFLE(0, 0, 3, 2))));
+ a = min(a, vint4(_mm_shuffle_epi32(a.m, _MM_SHUFFLE(0, 0, 0, 1))));
+ return vint4(_mm_shuffle_epi32(a.m, _MM_SHUFFLE(0, 0, 0, 0)));
+}
+
+/*
+ * @brief Return the horizontal maximum of a vector.
+ */
+ASTCENC_SIMD_INLINE vint4 hmax(vint4 a)
+{
+ a = max(a, vint4(_mm_shuffle_epi32(a.m, _MM_SHUFFLE(0, 0, 3, 2))));
+ a = max(a, vint4(_mm_shuffle_epi32(a.m, _MM_SHUFFLE(0, 0, 0, 1))));
+ return vint4(_mm_shuffle_epi32(a.m, _MM_SHUFFLE(0, 0, 0, 0)));
+}
+
+/**
+ * @brief Return the horizontal sum of a vector as a scalar.
+ */
+ASTCENC_SIMD_INLINE int hadd_s(vint4 a)
+{
+ // Add top and bottom halves, lane 1/0
+ __m128i fold = _mm_castps_si128(_mm_movehl_ps(_mm_castsi128_ps(a.m),
+ _mm_castsi128_ps(a.m)));
+ __m128i t = _mm_add_epi32(a.m, fold);
+
+ // Add top and bottom halves, lane 0 (_mm_hadd_ps exists but slow)
+ t = _mm_add_epi32(t, _mm_shuffle_epi32(t, 0x55));
+
+ return _mm_cvtsi128_si32(t);
+}
+
+/**
+ * @brief Store a vector to a 16B aligned memory address.
+ */
+ASTCENC_SIMD_INLINE void storea(vint4 a, int* p)
+{
+ _mm_store_si128(reinterpret_cast<__m128i*>(p), a.m);
+}
+
+/**
+ * @brief Store a vector to an unaligned memory address.
+ */
+ASTCENC_SIMD_INLINE void store(vint4 a, int* p)
+{
+ // Cast due to missing intrinsics
+ _mm_storeu_ps(reinterpret_cast<float*>(p), _mm_castsi128_ps(a.m));
+}
+
+/**
+ * @brief Store lowest N (vector width) bytes into an unaligned address.
+ */
+ASTCENC_SIMD_INLINE void store_nbytes(vint4 a, uint8_t* p)
+{
+ // Cast due to missing intrinsics
+ _mm_store_ss(reinterpret_cast<float*>(p), _mm_castsi128_ps(a.m));
+}
+
+/**
+ * @brief Gather N (vector width) indices from the array.
+ */
+ASTCENC_SIMD_INLINE vint4 gatheri(const int* base, vint4 indices)
+{
+#if ASTCENC_AVX >= 2
+ return vint4(_mm_i32gather_epi32(base, indices.m, 4));
+#else
+ alignas(16) int idx[4];
+ storea(indices, idx);
+ return vint4(base[idx[0]], base[idx[1]], base[idx[2]], base[idx[3]]);
+#endif
+}
+
+/**
+ * @brief Pack low 8 bits of N (vector width) lanes into bottom of vector.
+ */
+ASTCENC_SIMD_INLINE vint4 pack_low_bytes(vint4 a)
+{
+#if ASTCENC_SSE >= 41
+ __m128i shuf = _mm_set_epi8(0,0,0,0, 0,0,0,0, 0,0,0,0, 12,8,4,0);
+ return vint4(_mm_shuffle_epi8(a.m, shuf));
+#else
+ __m128i va = _mm_unpacklo_epi8(a.m, _mm_shuffle_epi32(a.m, _MM_SHUFFLE(1,1,1,1)));
+ __m128i vb = _mm_unpackhi_epi8(a.m, _mm_shuffle_epi32(a.m, _MM_SHUFFLE(3,3,3,3)));
+ return vint4(_mm_unpacklo_epi16(va, vb));
+#endif
+}
+
+/**
+ * @brief Return lanes from @c b if @c cond is set, else @c a.
+ */
+ASTCENC_SIMD_INLINE vint4 select(vint4 a, vint4 b, vmask4 cond)
+{
+ __m128i condi = _mm_castps_si128(cond.m);
+
+#if ASTCENC_SSE >= 41
+ return vint4(_mm_blendv_epi8(a.m, b.m, condi));
+#else
+ return vint4(_mm_or_si128(_mm_and_si128(condi, b.m), _mm_andnot_si128(condi, a.m)));
+#endif
+}
+
+// ============================================================================
+// vfloat4 operators and functions
+// ============================================================================
+
+/**
+ * @brief Overload: vector by vector addition.
+ */
+ASTCENC_SIMD_INLINE vfloat4 operator+(vfloat4 a, vfloat4 b)
+{
+ return vfloat4(_mm_add_ps(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector subtraction.
+ */
+ASTCENC_SIMD_INLINE vfloat4 operator-(vfloat4 a, vfloat4 b)
+{
+ return vfloat4(_mm_sub_ps(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector multiplication.
+ */
+ASTCENC_SIMD_INLINE vfloat4 operator*(vfloat4 a, vfloat4 b)
+{
+ return vfloat4(_mm_mul_ps(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector division.
+ */
+ASTCENC_SIMD_INLINE vfloat4 operator/(vfloat4 a, vfloat4 b)
+{
+ return vfloat4(_mm_div_ps(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector equality.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator==(vfloat4 a, vfloat4 b)
+{
+ return vmask4(_mm_cmpeq_ps(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector inequality.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator!=(vfloat4 a, vfloat4 b)
+{
+ return vmask4(_mm_cmpneq_ps(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector less than.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator<(vfloat4 a, vfloat4 b)
+{
+ return vmask4(_mm_cmplt_ps(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector greater than.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator>(vfloat4 a, vfloat4 b)
+{
+ return vmask4(_mm_cmpgt_ps(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector less than or equal.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator<=(vfloat4 a, vfloat4 b)
+{
+ return vmask4(_mm_cmple_ps(a.m, b.m));
+}
+
+/**
+ * @brief Overload: vector by vector greater than or equal.
+ */
+ASTCENC_SIMD_INLINE vmask4 operator>=(vfloat4 a, vfloat4 b)
+{
+ return vmask4(_mm_cmpge_ps(a.m, b.m));
+}
+
+/**
+ * @brief Return the min vector of two vectors.
+ *
+ * If either lane value is NaN, @c b will be returned for that lane.
+ */
+ASTCENC_SIMD_INLINE vfloat4 min(vfloat4 a, vfloat4 b)
+{
+ // Do not reorder - second operand will return if either is NaN
+ return vfloat4(_mm_min_ps(a.m, b.m));
+}
+
+/**
+ * @brief Return the max vector of two vectors.
+ *
+ * If either lane value is NaN, @c b will be returned for that lane.
+ */
+ASTCENC_SIMD_INLINE vfloat4 max(vfloat4 a, vfloat4 b)
+{
+ // Do not reorder - second operand will return if either is NaN
+ return vfloat4(_mm_max_ps(a.m, b.m));
+}
+
+/**
+ * @brief Return the absolute value of the float vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 abs(vfloat4 a)
+{
+ return vfloat4(_mm_max_ps(_mm_sub_ps(_mm_setzero_ps(), a.m), a.m));
+}
+
+/**
+ * @brief Return a float rounded to the nearest integer value.
+ */
+ASTCENC_SIMD_INLINE vfloat4 round(vfloat4 a)
+{
+#if ASTCENC_SSE >= 41
+ constexpr int flags = _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC;
+ return vfloat4(_mm_round_ps(a.m, flags));
+#else
+ __m128 v = a.m;
+ __m128 neg_zero = _mm_castsi128_ps(_mm_set1_epi32(static_cast<int>(0x80000000)));
+ __m128 no_fraction = _mm_set1_ps(8388608.0f);
+ __m128 abs_mask = _mm_castsi128_ps(_mm_set1_epi32(0x7FFFFFFF));
+ __m128 sign = _mm_and_ps(v, neg_zero);
+ __m128 s_magic = _mm_or_ps(no_fraction, sign);
+ __m128 r1 = _mm_add_ps(v, s_magic);
+ r1 = _mm_sub_ps(r1, s_magic);
+ __m128 r2 = _mm_and_ps(v, abs_mask);
+ __m128 mask = _mm_cmple_ps(r2, no_fraction);
+ r2 = _mm_andnot_ps(mask, v);
+ r1 = _mm_and_ps(r1, mask);
+ return vfloat4(_mm_xor_ps(r1, r2));
+#endif
+}
+
+/**
+ * @brief Return the horizontal minimum of a vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 hmin(vfloat4 a)
+{
+ a = min(a, vfloat4(_mm_shuffle_ps(a.m, a.m, _MM_SHUFFLE(0, 0, 3, 2))));
+ a = min(a, vfloat4(_mm_shuffle_ps(a.m, a.m, _MM_SHUFFLE(0, 0, 0, 1))));
+ return vfloat4(_mm_shuffle_ps(a.m, a.m, _MM_SHUFFLE(0, 0, 0, 0)));
+}
+
+/**
+ * @brief Return the horizontal maximum of a vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 hmax(vfloat4 a)
+{
+ a = max(a, vfloat4(_mm_shuffle_ps(a.m, a.m, _MM_SHUFFLE(0, 0, 3, 2))));
+ a = max(a, vfloat4(_mm_shuffle_ps(a.m, a.m, _MM_SHUFFLE(0, 0, 0, 1))));
+ return vfloat4(_mm_shuffle_ps(a.m, a.m, _MM_SHUFFLE(0, 0, 0, 0)));
+}
+
+/**
+ * @brief Return the horizontal sum of a vector as a scalar.
+ */
+ASTCENC_SIMD_INLINE float hadd_s(vfloat4 a)
+{
+ // Add top and bottom halves, lane 1/0
+ __m128 t = _mm_add_ps(a.m, _mm_movehl_ps(a.m, a.m));
+
+ // Add top and bottom halves, lane 0 (_mm_hadd_ps exists but slow)
+ t = _mm_add_ss(t, _mm_shuffle_ps(t, t, 0x55));
+
+ return _mm_cvtss_f32(t);
+}
+
+/**
+ * @brief Return the sqrt of the lanes in the vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 sqrt(vfloat4 a)
+{
+ return vfloat4(_mm_sqrt_ps(a.m));
+}
+
+/**
+ * @brief Return lanes from @c b if @c cond is set, else @c a.
+ */
+ASTCENC_SIMD_INLINE vfloat4 select(vfloat4 a, vfloat4 b, vmask4 cond)
+{
+#if ASTCENC_SSE >= 41
+ return vfloat4(_mm_blendv_ps(a.m, b.m, cond.m));
+#else
+ return vfloat4(_mm_or_ps(_mm_and_ps(cond.m, b.m), _mm_andnot_ps(cond.m, a.m)));
+#endif
+}
+
+/**
+ * @brief Return lanes from @c b if MSB of @c cond is set, else @c a.
+ */
+ASTCENC_SIMD_INLINE vfloat4 select_msb(vfloat4 a, vfloat4 b, vmask4 cond)
+{
+#if ASTCENC_SSE >= 41
+ return vfloat4(_mm_blendv_ps(a.m, b.m, cond.m));
+#else
+ __m128 d = _mm_castsi128_ps(_mm_srai_epi32(_mm_castps_si128(cond.m), 31));
+ return vfloat4(_mm_or_ps(_mm_and_ps(d, b.m), _mm_andnot_ps(d, a.m)));
+#endif
+}
+
+/**
+ * @brief Load a vector of gathered results from an array;
+ */
+ASTCENC_SIMD_INLINE vfloat4 gatherf(const float* base, vint4 indices)
+{
+#if ASTCENC_AVX >= 2
+ return vfloat4(_mm_i32gather_ps(base, indices.m, 4));
+#else
+ alignas(16) int idx[4];
+ storea(indices, idx);
+ return vfloat4(base[idx[0]], base[idx[1]], base[idx[2]], base[idx[3]]);
+#endif
+}
+
+/**
+ * @brief Store a vector to an unaligned memory address.
+ */
+ASTCENC_SIMD_INLINE void store(vfloat4 a, float* p)
+{
+ _mm_storeu_ps(p, a.m);
+}
+
+/**
+ * @brief Store a vector to a 16B aligned memory address.
+ */
+ASTCENC_SIMD_INLINE void storea(vfloat4 a, float* p)
+{
+ _mm_store_ps(p, a.m);
+}
+
+/**
+ * @brief Return a integer value for a float vector, using truncation.
+ */
+ASTCENC_SIMD_INLINE vint4 float_to_int(vfloat4 a)
+{
+ return vint4(_mm_cvttps_epi32(a.m));
+}
+
+/**
+ * @brief Return a integer value for a float vector, using round-to-nearest.
+ */
+ASTCENC_SIMD_INLINE vint4 float_to_int_rtn(vfloat4 a)
+{
+ a = round(a);
+ return vint4(_mm_cvttps_epi32(a.m));
+}
+
+/**
+ * @brief Return a float value for an integer vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 int_to_float(vint4 a)
+{
+ return vfloat4(_mm_cvtepi32_ps(a.m));
+}
+
+/**
+ * @brief Return a float16 value for a float vector, using round-to-nearest.
+ */
+ASTCENC_SIMD_INLINE vint4 float_to_float16(vfloat4 a)
+{
+#if ASTCENC_F16C >= 1
+ __m128i packedf16 = _mm_cvtps_ph(a.m, 0);
+ __m128i f16 = _mm_cvtepu16_epi32(packedf16);
+ return vint4(f16);
+#else
+ return vint4(
+ float_to_sf16(a.lane<0>()),
+ float_to_sf16(a.lane<1>()),
+ float_to_sf16(a.lane<2>()),
+ float_to_sf16(a.lane<3>()));
+#endif
+}
+
+/**
+ * @brief Return a float16 value for a float scalar, using round-to-nearest.
+ */
+static inline uint16_t float_to_float16(float a)
+{
+#if ASTCENC_F16C >= 1
+ __m128i f16 = _mm_cvtps_ph(_mm_set1_ps(a), 0);
+ return static_cast<uint16_t>(_mm_cvtsi128_si32(f16));
+#else
+ return float_to_sf16(a);
+#endif
+}
+
+/**
+ * @brief Return a float value for a float16 vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 float16_to_float(vint4 a)
+{
+#if ASTCENC_F16C >= 1
+ __m128i packed = _mm_packs_epi32(a.m, a.m);
+ __m128 f32 = _mm_cvtph_ps(packed);
+ return vfloat4(f32);
+#else
+ return vfloat4(
+ sf16_to_float(static_cast<uint16_t>(a.lane<0>())),
+ sf16_to_float(static_cast<uint16_t>(a.lane<1>())),
+ sf16_to_float(static_cast<uint16_t>(a.lane<2>())),
+ sf16_to_float(static_cast<uint16_t>(a.lane<3>())));
+#endif
+}
+
+/**
+ * @brief Return a float value for a float16 scalar.
+ */
+ASTCENC_SIMD_INLINE float float16_to_float(uint16_t a)
+{
+#if ASTCENC_F16C >= 1
+ __m128i packed = _mm_set1_epi16(static_cast<short>(a));
+ __m128 f32 = _mm_cvtph_ps(packed);
+ return _mm_cvtss_f32(f32);
+#else
+ return sf16_to_float(a);
+#endif
+}
+
+/**
+ * @brief Return a float value as an integer bit pattern (i.e. no conversion).
+ *
+ * It is a common trick to convert floats into integer bit patterns, perform
+ * some bit hackery based on knowledge they are IEEE 754 layout, and then
+ * convert them back again. This is the first half of that flip.
+ */
+ASTCENC_SIMD_INLINE vint4 float_as_int(vfloat4 a)
+{
+ return vint4(_mm_castps_si128(a.m));
+}
+
+/**
+ * @brief Return a integer value as a float bit pattern (i.e. no conversion).
+ *
+ * It is a common trick to convert floats into integer bit patterns, perform
+ * some bit hackery based on knowledge they are IEEE 754 layout, and then
+ * convert them back again. This is the second half of that flip.
+ */
+ASTCENC_SIMD_INLINE vfloat4 int_as_float(vint4 v)
+{
+ return vfloat4(_mm_castsi128_ps(v.m));
+}
+
+/**
+ * @brief Prepare a vtable lookup table for use with the native SIMD size.
+ */
+ASTCENC_SIMD_INLINE void vtable_prepare(vint4 t0, vint4& t0p)
+{
+ t0p = t0;
+}
+
+/**
+ * @brief Prepare a vtable lookup table for use with the native SIMD size.
+ */
+ASTCENC_SIMD_INLINE void vtable_prepare(vint4 t0, vint4 t1, vint4& t0p, vint4& t1p)
+{
+#if ASTCENC_SSE >= 41
+ t0p = t0;
+ t1p = t0 ^ t1;
+#else
+ t0p = t0;
+ t1p = t1;
+#endif
+}
+
+/**
+ * @brief Prepare a vtable lookup table for use with the native SIMD size.
+ */
+ASTCENC_SIMD_INLINE void vtable_prepare(
+ vint4 t0, vint4 t1, vint4 t2, vint4 t3,
+ vint4& t0p, vint4& t1p, vint4& t2p, vint4& t3p)
+{
+#if ASTCENC_SSE >= 41
+ t0p = t0;
+ t1p = t0 ^ t1;
+ t2p = t1 ^ t2;
+ t3p = t2 ^ t3;
+#else
+ t0p = t0;
+ t1p = t1;
+ t2p = t2;
+ t3p = t3;
+#endif
+}
+
+/**
+ * @brief Perform an 8-bit 16-entry table lookup, with 32-bit indexes.
+ */
+ASTCENC_SIMD_INLINE vint4 vtable_8bt_32bi(vint4 t0, vint4 idx)
+{
+#if ASTCENC_SSE >= 41
+ // Set index byte MSB to 1 for unused bytes so shuffle returns zero
+ __m128i idxx = _mm_or_si128(idx.m, _mm_set1_epi32(static_cast<int>(0xFFFFFF00)));
+
+ __m128i result = _mm_shuffle_epi8(t0.m, idxx);
+ return vint4(result);
+#else
+ alignas(ASTCENC_VECALIGN) uint8_t table[16];
+ storea(t0, reinterpret_cast<int*>(table + 0));
+
+ return vint4(table[idx.lane<0>()],
+ table[idx.lane<1>()],
+ table[idx.lane<2>()],
+ table[idx.lane<3>()]);
+#endif
+}
+
+/**
+ * @brief Perform an 8-bit 32-entry table lookup, with 32-bit indexes.
+ */
+ASTCENC_SIMD_INLINE vint4 vtable_8bt_32bi(vint4 t0, vint4 t1, vint4 idx)
+{
+#if ASTCENC_SSE >= 41
+ // Set index byte MSB to 1 for unused bytes so shuffle returns zero
+ __m128i idxx = _mm_or_si128(idx.m, _mm_set1_epi32(static_cast<int>(0xFFFFFF00)));
+
+ __m128i result = _mm_shuffle_epi8(t0.m, idxx);
+ idxx = _mm_sub_epi8(idxx, _mm_set1_epi8(16));
+
+ __m128i result2 = _mm_shuffle_epi8(t1.m, idxx);
+ result = _mm_xor_si128(result, result2);
+
+ return vint4(result);
+#else
+ alignas(ASTCENC_VECALIGN) uint8_t table[32];
+ storea(t0, reinterpret_cast<int*>(table + 0));
+ storea(t1, reinterpret_cast<int*>(table + 16));
+
+ return vint4(table[idx.lane<0>()],
+ table[idx.lane<1>()],
+ table[idx.lane<2>()],
+ table[idx.lane<3>()]);
+#endif
+}
+
+/**
+ * @brief Perform an 8-bit 64-entry table lookup, with 32-bit indexes.
+ */
+ASTCENC_SIMD_INLINE vint4 vtable_8bt_32bi(vint4 t0, vint4 t1, vint4 t2, vint4 t3, vint4 idx)
+{
+#if ASTCENC_SSE >= 41
+ // Set index byte MSB to 1 for unused bytes so shuffle returns zero
+ __m128i idxx = _mm_or_si128(idx.m, _mm_set1_epi32(static_cast<int>(0xFFFFFF00)));
+
+ __m128i result = _mm_shuffle_epi8(t0.m, idxx);
+ idxx = _mm_sub_epi8(idxx, _mm_set1_epi8(16));
+
+ __m128i result2 = _mm_shuffle_epi8(t1.m, idxx);
+ result = _mm_xor_si128(result, result2);
+ idxx = _mm_sub_epi8(idxx, _mm_set1_epi8(16));
+
+ result2 = _mm_shuffle_epi8(t2.m, idxx);
+ result = _mm_xor_si128(result, result2);
+ idxx = _mm_sub_epi8(idxx, _mm_set1_epi8(16));
+
+ result2 = _mm_shuffle_epi8(t3.m, idxx);
+ result = _mm_xor_si128(result, result2);
+
+ return vint4(result);
+#else
+ alignas(ASTCENC_VECALIGN) uint8_t table[64];
+ storea(t0, reinterpret_cast<int*>(table + 0));
+ storea(t1, reinterpret_cast<int*>(table + 16));
+ storea(t2, reinterpret_cast<int*>(table + 32));
+ storea(t3, reinterpret_cast<int*>(table + 48));
+
+ return vint4(table[idx.lane<0>()],
+ table[idx.lane<1>()],
+ table[idx.lane<2>()],
+ table[idx.lane<3>()]);
+#endif
+}
+
+/**
+ * @brief Return a vector of interleaved RGBA data.
+ *
+ * Input vectors have the value stored in the bottom 8 bits of each lane,
+ * with high bits set to zero.
+ *
+ * Output vector stores a single RGBA texel packed in each lane.
+ */
+ASTCENC_SIMD_INLINE vint4 interleave_rgba8(vint4 r, vint4 g, vint4 b, vint4 a)
+{
+// Workaround an XCode compiler internal fault; note is slower than slli_epi32
+// so we should revert this when we get the opportunity
+#if defined(__APPLE__)
+ __m128i value = r.m;
+ value = _mm_add_epi32(value, _mm_bslli_si128(g.m, 1));
+ value = _mm_add_epi32(value, _mm_bslli_si128(b.m, 2));
+ value = _mm_add_epi32(value, _mm_bslli_si128(a.m, 3));
+ return vint4(value);
+#else
+ __m128i value = r.m;
+ value = _mm_add_epi32(value, _mm_slli_epi32(g.m, 8));
+ value = _mm_add_epi32(value, _mm_slli_epi32(b.m, 16));
+ value = _mm_add_epi32(value, _mm_slli_epi32(a.m, 24));
+ return vint4(value);
+#endif
+}
+
+/**
+ * @brief Store a vector, skipping masked lanes.
+ *
+ * All masked lanes must be at the end of vector, after all non-masked lanes.
+ */
+ASTCENC_SIMD_INLINE void store_lanes_masked(int* base, vint4 data, vmask4 mask)
+{
+#if ASTCENC_AVX >= 2
+ _mm_maskstore_epi32(base, _mm_castps_si128(mask.m), data.m);
+#else
+ // Note - we cannot use _mm_maskmoveu_si128 as the underlying hardware doesn't guarantee
+ // fault suppression on masked lanes so we can get page faults at the end of an image.
+ if (mask.lane<3>() != 0.0f)
+ {
+ store(data, base);
+ }
+ else if (mask.lane<2>() != 0.0f)
+ {
+ base[0] = data.lane<0>();
+ base[1] = data.lane<1>();
+ base[2] = data.lane<2>();
+ }
+ else if (mask.lane<1>() != 0.0f)
+ {
+ base[0] = data.lane<0>();
+ base[1] = data.lane<1>();
+ }
+ else if (mask.lane<0>() != 0.0f)
+ {
+ base[0] = data.lane<0>();
+ }
+#endif
+}
+
+#if defined(ASTCENC_NO_INVARIANCE) && (ASTCENC_SSE >= 41)
+
+#define ASTCENC_USE_NATIVE_DOT_PRODUCT 1
+
+/**
+ * @brief Return the dot product for the full 4 lanes, returning scalar.
+ */
+ASTCENC_SIMD_INLINE float dot_s(vfloat4 a, vfloat4 b)
+{
+ return _mm_cvtss_f32(_mm_dp_ps(a.m, b.m, 0xFF));
+}
+
+/**
+ * @brief Return the dot product for the full 4 lanes, returning vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 dot(vfloat4 a, vfloat4 b)
+{
+ return vfloat4(_mm_dp_ps(a.m, b.m, 0xFF));
+}
+
+/**
+ * @brief Return the dot product for the bottom 3 lanes, returning scalar.
+ */
+ASTCENC_SIMD_INLINE float dot3_s(vfloat4 a, vfloat4 b)
+{
+ return _mm_cvtss_f32(_mm_dp_ps(a.m, b.m, 0x77));
+}
+
+/**
+ * @brief Return the dot product for the bottom 3 lanes, returning vector.
+ */
+ASTCENC_SIMD_INLINE vfloat4 dot3(vfloat4 a, vfloat4 b)
+{
+ return vfloat4(_mm_dp_ps(a.m, b.m, 0x77));
+}
+
+#endif // #if defined(ASTCENC_NO_INVARIANCE) && (ASTCENC_SSE >= 41)
+
+#if ASTCENC_POPCNT >= 1
+
+#define ASTCENC_USE_NATIVE_POPCOUNT 1
+
+/**
+ * @brief Population bit count.
+ *
+ * @param v The value to population count.
+ *
+ * @return The number of 1 bits.
+ */
+ASTCENC_SIMD_INLINE int popcount(uint64_t v)
+{
+ return static_cast<int>(_mm_popcnt_u64(v));
+}
+
+#endif // ASTCENC_POPCNT >= 1
+
+#endif // #ifndef ASTC_VECMATHLIB_SSE_4_H_INCLUDED
diff --git a/thirdparty/astcenc/astcenc_weight_align.cpp b/thirdparty/astcenc/astcenc_weight_align.cpp
new file mode 100644
index 0000000000..e40a318cf5
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_weight_align.cpp
@@ -0,0 +1,479 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2022 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+#if !defined(ASTCENC_DECOMPRESS_ONLY)
+
+/**
+ * @brief Functions for angular-sum algorithm for weight alignment.
+ *
+ * This algorithm works as follows:
+ * - we compute a complex number P as (cos s*i, sin s*i) for each weight,
+ * where i is the input value and s is a scaling factor based on the spacing between the weights.
+ * - we then add together complex numbers for all the weights.
+ * - we then compute the length and angle of the resulting sum.
+ *
+ * This should produce the following results:
+ * - perfect alignment results in a vector whose length is equal to the sum of lengths of all inputs
+ * - even distribution results in a vector of length 0.
+ * - all samples identical results in perfect alignment for every scaling.
+ *
+ * For each scaling factor within a given set, we compute an alignment factor from 0 to 1. This
+ * should then result in some scalings standing out as having particularly good alignment factors;
+ * we can use this to produce a set of candidate scale/shift values for various quantization levels;
+ * we should then actually try them and see what happens.
+ */
+
+#include "astcenc_internal.h"
+#include "astcenc_vecmathlib.h"
+
+#include <stdio.h>
+#include <cassert>
+#include <cstring>
+
+static constexpr unsigned int ANGULAR_STEPS { 32 };
+
+static_assert((ANGULAR_STEPS % ASTCENC_SIMD_WIDTH) == 0,
+ "ANGULAR_STEPS must be multiple of ASTCENC_SIMD_WIDTH");
+
+static_assert(ANGULAR_STEPS >= 32,
+ "ANGULAR_STEPS must be at least max(steps_for_quant_level)");
+
+// Store a reduced sin/cos table for 64 possible weight values; this causes
+// slight quality loss compared to using sin() and cos() directly. Must be 2^N.
+static constexpr unsigned int SINCOS_STEPS { 64 };
+
+static const uint8_t steps_for_quant_level[12] {
+ 2, 3, 4, 5, 6, 8, 10, 12, 16, 20, 24, 32
+};
+
+alignas(ASTCENC_VECALIGN) static float sin_table[SINCOS_STEPS][ANGULAR_STEPS];
+alignas(ASTCENC_VECALIGN) static float cos_table[SINCOS_STEPS][ANGULAR_STEPS];
+
+#if defined(ASTCENC_DIAGNOSTICS)
+ static bool print_once { true };
+#endif
+
+/* See header for documentation. */
+void prepare_angular_tables()
+{
+ for (unsigned int i = 0; i < ANGULAR_STEPS; i++)
+ {
+ float angle_step = static_cast<float>(i + 1);
+
+ for (unsigned int j = 0; j < SINCOS_STEPS; j++)
+ {
+ sin_table[j][i] = static_cast<float>(sinf((2.0f * astc::PI / (SINCOS_STEPS - 1.0f)) * angle_step * static_cast<float>(j)));
+ cos_table[j][i] = static_cast<float>(cosf((2.0f * astc::PI / (SINCOS_STEPS - 1.0f)) * angle_step * static_cast<float>(j)));
+ }
+ }
+}
+
+/**
+ * @brief Compute the angular alignment factors and offsets.
+ *
+ * @param weight_count The number of (decimated) weights.
+ * @param dec_weight_ideal_value The ideal decimated unquantized weight values.
+ * @param max_angular_steps The maximum number of steps to be tested.
+ * @param[out] offsets The output angular offsets array.
+ */
+static void compute_angular_offsets(
+ unsigned int weight_count,
+ const float* dec_weight_ideal_value,
+ unsigned int max_angular_steps,
+ float* offsets
+) {
+ promise(weight_count > 0);
+ promise(max_angular_steps > 0);
+
+ alignas(ASTCENC_VECALIGN) int isamplev[BLOCK_MAX_WEIGHTS];
+
+ // Precompute isample; arrays are always allocated 64 elements long
+ for (unsigned int i = 0; i < weight_count; i += ASTCENC_SIMD_WIDTH)
+ {
+ // Add 2^23 and interpreting bits extracts round-to-nearest int
+ vfloat sample = loada(dec_weight_ideal_value + i) * (SINCOS_STEPS - 1.0f) + vfloat(12582912.0f);
+ vint isample = float_as_int(sample) & vint((SINCOS_STEPS - 1));
+ storea(isample, isamplev + i);
+ }
+
+ // Arrays are multiple of SIMD width (ANGULAR_STEPS), safe to overshoot max
+ vfloat mult = vfloat(1.0f / (2.0f * astc::PI));
+
+ for (unsigned int i = 0; i < max_angular_steps; i += ASTCENC_SIMD_WIDTH)
+ {
+ vfloat anglesum_x = vfloat::zero();
+ vfloat anglesum_y = vfloat::zero();
+
+ for (unsigned int j = 0; j < weight_count; j++)
+ {
+ int isample = isamplev[j];
+ anglesum_x += loada(cos_table[isample] + i);
+ anglesum_y += loada(sin_table[isample] + i);
+ }
+
+ vfloat angle = atan2(anglesum_y, anglesum_x);
+ vfloat ofs = angle * mult;
+ storea(ofs, offsets + i);
+ }
+}
+
+/**
+ * @brief For a given step size compute the lowest and highest weight.
+ *
+ * Compute the lowest and highest weight that results from quantizing using the given stepsize and
+ * offset, and then compute the resulting error. The cut errors indicate the error that results from
+ * forcing samples that should have had one weight value one step up or down.
+ *
+ * @param weight_count The number of (decimated) weights.
+ * @param dec_weight_ideal_value The ideal decimated unquantized weight values.
+ * @param max_angular_steps The maximum number of steps to be tested.
+ * @param max_quant_steps The maximum quantization level to be tested.
+ * @param offsets The angular offsets array.
+ * @param[out] lowest_weight Per angular step, the lowest weight.
+ * @param[out] weight_span Per angular step, the span between lowest and highest weight.
+ * @param[out] error Per angular step, the error.
+ * @param[out] cut_low_weight_error Per angular step, the low weight cut error.
+ * @param[out] cut_high_weight_error Per angular step, the high weight cut error.
+ */
+static void compute_lowest_and_highest_weight(
+ unsigned int weight_count,
+ const float* dec_weight_ideal_value,
+ unsigned int max_angular_steps,
+ unsigned int max_quant_steps,
+ const float* offsets,
+ float* lowest_weight,
+ int* weight_span,
+ float* error,
+ float* cut_low_weight_error,
+ float* cut_high_weight_error
+) {
+ promise(weight_count > 0);
+ promise(max_angular_steps > 0);
+
+ vfloat rcp_stepsize = vfloat::lane_id() + vfloat(1.0f);
+
+ // Arrays are ANGULAR_STEPS long, so always safe to run full vectors
+ for (unsigned int sp = 0; sp < max_angular_steps; sp += ASTCENC_SIMD_WIDTH)
+ {
+ vfloat minidx(128.0f);
+ vfloat maxidx(-128.0f);
+ vfloat errval = vfloat::zero();
+ vfloat cut_low_weight_err = vfloat::zero();
+ vfloat cut_high_weight_err = vfloat::zero();
+ vfloat offset = loada(offsets + sp);
+
+ for (unsigned int j = 0; j < weight_count; j++)
+ {
+ vfloat sval = load1(dec_weight_ideal_value + j) * rcp_stepsize - offset;
+ vfloat svalrte = round(sval);
+ vfloat diff = sval - svalrte;
+ errval += diff * diff;
+
+ // Reset tracker on min hit
+ vmask mask = svalrte < minidx;
+ minidx = select(minidx, svalrte, mask);
+ cut_low_weight_err = select(cut_low_weight_err, vfloat::zero(), mask);
+
+ // Accumulate on min hit
+ mask = svalrte == minidx;
+ vfloat accum = cut_low_weight_err + vfloat(1.0f) - vfloat(2.0f) * diff;
+ cut_low_weight_err = select(cut_low_weight_err, accum, mask);
+
+ // Reset tracker on max hit
+ mask = svalrte > maxidx;
+ maxidx = select(maxidx, svalrte, mask);
+ cut_high_weight_err = select(cut_high_weight_err, vfloat::zero(), mask);
+
+ // Accumulate on max hit
+ mask = svalrte == maxidx;
+ accum = cut_high_weight_err + vfloat(1.0f) + vfloat(2.0f) * diff;
+ cut_high_weight_err = select(cut_high_weight_err, accum, mask);
+ }
+
+ // Write out min weight and weight span; clamp span to a usable range
+ vint span = float_to_int(maxidx - minidx + vfloat(1));
+ span = min(span, vint(max_quant_steps + 3));
+ span = max(span, vint(2));
+ storea(minidx, lowest_weight + sp);
+ storea(span, weight_span + sp);
+
+ // The cut_(lowest/highest)_weight_error indicate the error that results from forcing
+ // samples that should have had the weight value one step (up/down).
+ vfloat ssize = 1.0f / rcp_stepsize;
+ vfloat errscale = ssize * ssize;
+ storea(errval * errscale, error + sp);
+ storea(cut_low_weight_err * errscale, cut_low_weight_error + sp);
+ storea(cut_high_weight_err * errscale, cut_high_weight_error + sp);
+
+ rcp_stepsize = rcp_stepsize + vfloat(ASTCENC_SIMD_WIDTH);
+ }
+}
+
+/**
+ * @brief The main function for the angular algorithm.
+ *
+ * @param weight_count The number of (decimated) weights.
+ * @param dec_weight_ideal_value The ideal decimated unquantized weight values.
+ * @param max_quant_level The maximum quantization level to be tested.
+ * @param[out] low_value Per angular step, the lowest weight value.
+ * @param[out] high_value Per angular step, the highest weight value.
+ */
+static void compute_angular_endpoints_for_quant_levels(
+ unsigned int weight_count,
+ const float* dec_weight_ideal_value,
+ unsigned int max_quant_level,
+ float low_value[TUNE_MAX_ANGULAR_QUANT + 1],
+ float high_value[TUNE_MAX_ANGULAR_QUANT + 1]
+) {
+ unsigned int max_quant_steps = steps_for_quant_level[max_quant_level];
+ unsigned int max_angular_steps = steps_for_quant_level[max_quant_level];
+
+ alignas(ASTCENC_VECALIGN) float angular_offsets[ANGULAR_STEPS];
+
+ compute_angular_offsets(weight_count, dec_weight_ideal_value,
+ max_angular_steps, angular_offsets);
+
+ alignas(ASTCENC_VECALIGN) float lowest_weight[ANGULAR_STEPS];
+ alignas(ASTCENC_VECALIGN) int32_t weight_span[ANGULAR_STEPS];
+ alignas(ASTCENC_VECALIGN) float error[ANGULAR_STEPS];
+ alignas(ASTCENC_VECALIGN) float cut_low_weight_error[ANGULAR_STEPS];
+ alignas(ASTCENC_VECALIGN) float cut_high_weight_error[ANGULAR_STEPS];
+
+ compute_lowest_and_highest_weight(weight_count, dec_weight_ideal_value,
+ max_angular_steps, max_quant_steps,
+ angular_offsets, lowest_weight, weight_span, error,
+ cut_low_weight_error, cut_high_weight_error);
+
+ // For each quantization level, find the best error terms. Use packed vectors so data-dependent
+ // branches can become selects. This involves some integer to float casts, but the values are
+ // small enough so they never round the wrong way.
+ vfloat4 best_results[36];
+
+ // Initialize the array to some safe defaults
+ promise(max_quant_steps > 0);
+ for (unsigned int i = 0; i < (max_quant_steps + 4); i++)
+ {
+ // Lane<0> = Best error
+ // Lane<1> = Best scale; -1 indicates no solution found
+ // Lane<2> = Cut low weight
+ best_results[i] = vfloat4(ERROR_CALC_DEFAULT, -1.0f, 0.0f, 0.0f);
+ }
+
+ promise(max_angular_steps > 0);
+ for (unsigned int i = 0; i < max_angular_steps; i++)
+ {
+ float i_flt = static_cast<float>(i);
+
+ int idx_span = weight_span[i];
+
+ float error_cut_low = error[i] + cut_low_weight_error[i];
+ float error_cut_high = error[i] + cut_high_weight_error[i];
+ float error_cut_low_high = error[i] + cut_low_weight_error[i] + cut_high_weight_error[i];
+
+ // Check best error against record N
+ vfloat4 best_result = best_results[idx_span];
+ vfloat4 new_result = vfloat4(error[i], i_flt, 0.0f, 0.0f);
+ vmask4 mask = vfloat4(best_result.lane<0>()) > vfloat4(error[i]);
+ best_results[idx_span] = select(best_result, new_result, mask);
+
+ // Check best error against record N-1 with either cut low or cut high
+ best_result = best_results[idx_span - 1];
+
+ new_result = vfloat4(error_cut_low, i_flt, 1.0f, 0.0f);
+ mask = vfloat4(best_result.lane<0>()) > vfloat4(error_cut_low);
+ best_result = select(best_result, new_result, mask);
+
+ new_result = vfloat4(error_cut_high, i_flt, 0.0f, 0.0f);
+ mask = vfloat4(best_result.lane<0>()) > vfloat4(error_cut_high);
+ best_results[idx_span - 1] = select(best_result, new_result, mask);
+
+ // Check best error against record N-2 with both cut low and high
+ best_result = best_results[idx_span - 2];
+ new_result = vfloat4(error_cut_low_high, i_flt, 1.0f, 0.0f);
+ mask = vfloat4(best_result.lane<0>()) > vfloat4(error_cut_low_high);
+ best_results[idx_span - 2] = select(best_result, new_result, mask);
+ }
+
+ for (unsigned int i = 0; i <= max_quant_level; i++)
+ {
+ unsigned int q = steps_for_quant_level[i];
+ int bsi = static_cast<int>(best_results[q].lane<1>());
+
+ // Did we find anything?
+#if defined(ASTCENC_DIAGNOSTICS)
+ if ((bsi < 0) && print_once)
+ {
+ print_once = false;
+ printf("INFO: Unable to find full encoding within search error limit.\n\n");
+ }
+#endif
+
+ bsi = astc::max(0, bsi);
+
+ float lwi = lowest_weight[bsi] + best_results[q].lane<2>();
+ float hwi = lwi + static_cast<float>(q) - 1.0f;
+
+ float stepsize = 1.0f / (1.0f + static_cast<float>(bsi));
+ low_value[i] = (angular_offsets[bsi] + lwi) * stepsize;
+ high_value[i] = (angular_offsets[bsi] + hwi) * stepsize;
+ }
+}
+
+/* See header for documentation. */
+void compute_angular_endpoints_1plane(
+ bool only_always,
+ const block_size_descriptor& bsd,
+ const float* dec_weight_ideal_value,
+ unsigned int max_weight_quant,
+ compression_working_buffers& tmpbuf
+) {
+ float (&low_value)[WEIGHTS_MAX_BLOCK_MODES] = tmpbuf.weight_low_value1;
+ float (&high_value)[WEIGHTS_MAX_BLOCK_MODES] = tmpbuf.weight_high_value1;
+
+ float (&low_values)[WEIGHTS_MAX_DECIMATION_MODES][TUNE_MAX_ANGULAR_QUANT + 1] = tmpbuf.weight_low_values1;
+ float (&high_values)[WEIGHTS_MAX_DECIMATION_MODES][TUNE_MAX_ANGULAR_QUANT + 1] = tmpbuf.weight_high_values1;
+
+ unsigned int max_decimation_modes = only_always ? bsd.decimation_mode_count_always
+ : bsd.decimation_mode_count_selected;
+ promise(max_decimation_modes > 0);
+ for (unsigned int i = 0; i < max_decimation_modes; i++)
+ {
+ const decimation_mode& dm = bsd.decimation_modes[i];
+ if (!dm.is_ref_1_plane(static_cast<quant_method>(max_weight_quant)))
+ {
+ continue;
+ }
+
+ unsigned int weight_count = bsd.get_decimation_info(i).weight_count;
+
+ unsigned int max_precision = dm.maxprec_1plane;
+ if (max_precision > TUNE_MAX_ANGULAR_QUANT)
+ {
+ max_precision = TUNE_MAX_ANGULAR_QUANT;
+ }
+
+ if (max_precision > max_weight_quant)
+ {
+ max_precision = max_weight_quant;
+ }
+
+ compute_angular_endpoints_for_quant_levels(
+ weight_count,
+ dec_weight_ideal_value + i * BLOCK_MAX_WEIGHTS,
+ max_precision, low_values[i], high_values[i]);
+ }
+
+ unsigned int max_block_modes = only_always ? bsd.block_mode_count_1plane_always
+ : bsd.block_mode_count_1plane_selected;
+ promise(max_block_modes > 0);
+ for (unsigned int i = 0; i < max_block_modes; i++)
+ {
+ const block_mode& bm = bsd.block_modes[i];
+ assert(!bm.is_dual_plane);
+
+ unsigned int quant_mode = bm.quant_mode;
+ unsigned int decim_mode = bm.decimation_mode;
+
+ if (quant_mode <= TUNE_MAX_ANGULAR_QUANT)
+ {
+ low_value[i] = low_values[decim_mode][quant_mode];
+ high_value[i] = high_values[decim_mode][quant_mode];
+ }
+ else
+ {
+ low_value[i] = 0.0f;
+ high_value[i] = 1.0f;
+ }
+ }
+}
+
+/* See header for documentation. */
+void compute_angular_endpoints_2planes(
+ const block_size_descriptor& bsd,
+ const float* dec_weight_ideal_value,
+ unsigned int max_weight_quant,
+ compression_working_buffers& tmpbuf
+) {
+ float (&low_value1)[WEIGHTS_MAX_BLOCK_MODES] = tmpbuf.weight_low_value1;
+ float (&high_value1)[WEIGHTS_MAX_BLOCK_MODES] = tmpbuf.weight_high_value1;
+ float (&low_value2)[WEIGHTS_MAX_BLOCK_MODES] = tmpbuf.weight_low_value2;
+ float (&high_value2)[WEIGHTS_MAX_BLOCK_MODES] = tmpbuf.weight_high_value2;
+
+ float (&low_values1)[WEIGHTS_MAX_DECIMATION_MODES][TUNE_MAX_ANGULAR_QUANT + 1] = tmpbuf.weight_low_values1;
+ float (&high_values1)[WEIGHTS_MAX_DECIMATION_MODES][TUNE_MAX_ANGULAR_QUANT + 1] = tmpbuf.weight_high_values1;
+ float (&low_values2)[WEIGHTS_MAX_DECIMATION_MODES][TUNE_MAX_ANGULAR_QUANT + 1] = tmpbuf.weight_low_values2;
+ float (&high_values2)[WEIGHTS_MAX_DECIMATION_MODES][TUNE_MAX_ANGULAR_QUANT + 1] = tmpbuf.weight_high_values2;
+
+ promise(bsd.decimation_mode_count_selected > 0);
+ for (unsigned int i = 0; i < bsd.decimation_mode_count_selected; i++)
+ {
+ const decimation_mode& dm = bsd.decimation_modes[i];
+ if (!dm.is_ref_2_plane(static_cast<quant_method>(max_weight_quant)))
+ {
+ continue;
+ }
+
+ unsigned int weight_count = bsd.get_decimation_info(i).weight_count;
+
+ unsigned int max_precision = dm.maxprec_2planes;
+ if (max_precision > TUNE_MAX_ANGULAR_QUANT)
+ {
+ max_precision = TUNE_MAX_ANGULAR_QUANT;
+ }
+
+ if (max_precision > max_weight_quant)
+ {
+ max_precision = max_weight_quant;
+ }
+
+ compute_angular_endpoints_for_quant_levels(
+ weight_count,
+ dec_weight_ideal_value + i * BLOCK_MAX_WEIGHTS,
+ max_precision, low_values1[i], high_values1[i]);
+
+ compute_angular_endpoints_for_quant_levels(
+ weight_count,
+ dec_weight_ideal_value + i * BLOCK_MAX_WEIGHTS + WEIGHTS_PLANE2_OFFSET,
+ max_precision, low_values2[i], high_values2[i]);
+ }
+
+ unsigned int start = bsd.block_mode_count_1plane_selected;
+ unsigned int end = bsd.block_mode_count_1plane_2plane_selected;
+ for (unsigned int i = start; i < end; i++)
+ {
+ const block_mode& bm = bsd.block_modes[i];
+ unsigned int quant_mode = bm.quant_mode;
+ unsigned int decim_mode = bm.decimation_mode;
+
+ if (quant_mode <= TUNE_MAX_ANGULAR_QUANT)
+ {
+ low_value1[i] = low_values1[decim_mode][quant_mode];
+ high_value1[i] = high_values1[decim_mode][quant_mode];
+ low_value2[i] = low_values2[decim_mode][quant_mode];
+ high_value2[i] = high_values2[decim_mode][quant_mode];
+ }
+ else
+ {
+ low_value1[i] = 0.0f;
+ high_value1[i] = 1.0f;
+ low_value2[i] = 0.0f;
+ high_value2[i] = 1.0f;
+ }
+ }
+}
+
+#endif
diff --git a/thirdparty/astcenc/astcenc_weight_quant_xfer_tables.cpp b/thirdparty/astcenc/astcenc_weight_quant_xfer_tables.cpp
new file mode 100644
index 0000000000..8fdf73adc2
--- /dev/null
+++ b/thirdparty/astcenc/astcenc_weight_quant_xfer_tables.cpp
@@ -0,0 +1,147 @@
+// SPDX-License-Identifier: Apache-2.0
+// ----------------------------------------------------------------------------
+// Copyright 2011-2021 Arm Limited
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// ----------------------------------------------------------------------------
+
+/**
+ * @brief Data tables for quantization transfer.
+ */
+
+#include "astcenc_internal.h"
+
+#define _ 0 // Using _ to indicate an entry that will not be used.
+
+const quant_and_transfer_table quant_and_xfer_tables[12] {
+ // QUANT2, range 0..1
+ {
+ {0, 64},
+ {0, 1},
+ {0, 64},
+ {0x4000,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,
+ _,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,
+ 0x4000}
+ },
+ // QUANT_3, range 0..2
+ {
+ {0, 32, 64},
+ {0, 1, 2},
+ {0, 32, 64},
+ {0x2000,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,
+ _,_,0x4000,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,
+ _,_,_,_,0x4020}
+ },
+ // QUANT_4, range 0..3
+ {
+ {0, 21, 43, 64},
+ {0, 1, 2, 3},
+ {0, 21, 43, 64},
+ {0x1500,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,0x2b00,_,_,_,_,
+ _,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,0x4015,_,_,_,_,_,_,_,_,_,_,_,_,
+ _,_,_,_,_,_,_,_,0x402b}
+ },
+ //QUANT_5, range 0..4
+ {
+ {0, 16, 32, 48, 64},
+ {0, 1, 2, 3, 4},
+ {0, 16, 32, 48, 64},
+ {0x1000,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,0x2000,_,_,_,_,_,_,_,_,_,
+ _,_,_,_,_,_,0x3010,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,0x4020,_,_,_,
+ _,_,_,_,_,_,_,_,_,_,_,_,0x4030}
+ },
+ // QUANT_6, range 0..5
+ {
+ {0, 12, 25, 39, 52, 64},
+ {0, 2, 4, 5, 3, 1},
+ {0, 64, 12, 52, 25, 39},
+ {0x0c00,_,_,_,_,_,_,_,_,_,_,_,0x1900,_,_,_,_,_,_,_,_,_,_,_,_,
+ 0x270c,_,_,_,_,_,_,_,_,_,_,_,_,_,0x3419,_,_,_,_,_,_,_,_,_,_,
+ _,_,0x4027,_,_,_,_,_,_,_,_,_,_,_,0x4034}
+ },
+ // QUANT_8, range 0..7
+ {
+ {0, 9, 18, 27, 37, 46, 55, 64},
+ {0, 1, 2, 3, 4, 5, 6, 7},
+ {0, 9, 18, 27, 37, 46, 55, 64},
+ {0x0900,_,_,_,_,_,_,_,_,0x1200,_,_,_,_,_,_,_,_,0x1b09,_,_,
+ _,_,_,_,_,_,0x2512,_,_,_,_,_,_,_,_,_,0x2e1b,_,_,_,_,_,_,_,_,
+ 0x3725,_,_,_,_,_,_,_,_,0x402e,_,_,_,_,_,_,_,_,0x4037}
+ },
+ // QUANT_10, range 0..9
+ {
+ {0, 7, 14, 21, 28, 36, 43, 50, 57, 64},
+ {0, 2, 4, 6, 8, 9, 7, 5, 3, 1},
+ {0, 64, 7, 57, 14, 50, 21, 43, 28, 36},
+ {0x0700,_,_,_,_,_,_,0x0e00,_,_,_,_,_,_,0x1507,_,_,_,_,_,_,
+ 0x1c0e,_,_,_,_,_,_,0x2415,_,_,_,_,_,_,_,0x2b1c,_,_,_,_,_,
+ _,0x3224,_,_,_,_,_,_,0x392b,_,_,_,_,_,_,0x4032,_,_,_,_,_,
+ _,0x4039}
+ },
+ // QUANT_12, range 0..11
+ {
+ {0, 5, 11, 17, 23, 28, 36, 41, 47, 53, 59, 64},
+ {0, 4, 8, 2, 6, 10, 11, 7, 3, 9, 5, 1},
+ {0, 64, 17, 47, 5, 59, 23, 41, 11, 53, 28, 36},
+ {0x0500,_,_,_,_,0x0b00,_,_,_,_,_,0x1105,_,_,_,_,_,
+ 0x170b,_,_,_,_,_,0x1c11,_,_,_,_,0x2417,_,_,_,_,_,_,_,
+ 0x291c,_,_,_,_,0x2f24,_,_,_,_,_,0x3529,_,_,_,_,_,
+ 0x3b2f,_,_,_,_,_,0x4035,_,_,_,_,0x403b}
+ },
+ // QUANT_16, range 0..15
+ {
+ {0, 4, 8, 12, 17, 21, 25, 29, 35, 39, 43, 47, 52, 56, 60, 64},
+ {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15},
+ {0, 4, 8, 12, 17, 21, 25, 29, 35, 39, 43, 47, 52, 56, 60, 64},
+ {0x0400,_,_,_,0x0800,_,_,_,0x0c04,_,_,_,0x1108,_,_,_,_,
+ 0x150c,_,_,_,0x1911,_,_,_,0x1d15,_,_,_,0x2319,_,_,_,_,
+ _,0x271d,_,_,_,0x2b23,_,_,_,0x2f27,_,_,_,0x342b,_,_,_,
+ _,0x382f,_,_,_,0x3c34,_,_,_,0x4038,_,_,_,0x403c}
+ },
+ // QUANT_20, range 0..19
+ {
+ {0, 3, 6, 9, 13, 16, 19, 23, 26, 29, 35, 38, 41, 45, 48, 51, 55, 58, 61, 64},
+ {0, 4, 8, 12, 16, 2, 6, 10, 14, 18, 19, 15, 11, 7, 3, 17, 13, 9, 5, 1},
+ {0, 64, 16, 48, 3, 61, 19, 45, 6, 58, 23, 41, 9, 55, 26, 38, 13, 51, 29, 35},
+ {0x0300,_,_,0x0600,_,_,0x0903,_,_,0x0d06,_,_,_,
+ 0x1009,_,_,0x130d,_,_,0x1710,_,_,_,0x1a13,_,_,
+ 0x1d17,_,_,0x231a,_,_,_,_,_,0x261d,_,_,0x2923,_,_,
+ 0x2d26,_,_,_,0x3029,_,_,0x332d,_,_,0x3730,_,_,_,
+ 0x3a33,_,_,0x3d37,_,_,0x403a,_,_,0x403d}
+ },
+ // QUANT_24, range 0..23
+ {
+ {0, 2, 5, 8, 11, 13, 16, 19, 22, 24, 27, 30, 34, 37, 40, 42, 45, 48, 51, 53, 56, 59, 62, 64},
+ {0, 8, 16, 2, 10, 18, 4, 12, 20, 6, 14, 22, 23, 15, 7, 21, 13, 5, 19, 11, 3, 17, 9, 1},
+ {0, 64, 8, 56, 16, 48, 24, 40, 2, 62, 11, 53, 19, 45, 27, 37, 5, 59, 13, 51, 22, 42, 30, 34},
+ {0x0200,_,0x0500,_,_,0x0802,_,_,0x0b05,_,_,0x0d08,
+ _,0x100b,_,_,0x130d,_,_,0x1610,_,_,0x1813,_,
+ 0x1b16,_,_,0x1e18,_,_,0x221b,_,_,_,0x251e,_,_,
+ 0x2822,_,_,0x2a25,_,0x2d28,_,_,0x302a,_,_,0x332d,
+ _,_,0x3530,_,0x3833,_,_,0x3b35,_,_,0x3e38,_,_,
+ 0x403b,_,0x403e}
+ },
+ // QUANT_32, range 0..31
+ {
+ {0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64},
+ {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31},
+ {0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64},
+ {0x0200,_,0x0400,_,0x0602,_,0x0804,_,0x0a06,_,
+ 0x0c08,_,0x0e0a,_,0x100c,_,0x120e,_,0x1410,_,
+ 0x1612,_,0x1814,_,0x1a16,_,0x1c18,_,0x1e1a,_,
+ 0x221c,_,_,_,0x241e,_,0x2622,_,0x2824,_,0x2a26,_,
+ 0x2c28,_,0x2e2a,_,0x302c,_,0x322e,_,0x3430,_,
+ 0x3632,_,0x3834,_,0x3a36,_,0x3c38,_,0x3e3a,_,
+ 0x403c,_,0x403e}
+ }
+};
diff --git a/thirdparty/astcenc/patches/fix-build-no-ssse3.patch b/thirdparty/astcenc/patches/fix-build-no-ssse3.patch
new file mode 100644
index 0000000000..9da4f3e1f3
--- /dev/null
+++ b/thirdparty/astcenc/patches/fix-build-no-ssse3.patch
@@ -0,0 +1,81 @@
+From 02c22d3df501dc284ba732fa82a6c408c57b3237 Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?R=C3=A9mi=20Verschelde?= <rverschelde@gmail.com>
+Date: Thu, 19 Jan 2023 23:30:13 +0100
+Subject: [PATCH] mathlib: Remove incomplete support for SSE3 which assumed
+ SSSE3
+
+`_mm_shuffle_epi8` requires SSSE3 so the check on `ASTCENC_SSE >= 30` is
+too lax and would fail if `__SSE3__` is supported, but not `__SSSE3__`.
+
+The only supported configurations are SSE2, SSE4.1, and AVX2, so as
+discussed in #393 we drop the SSE3 checks and require SSE4.1 instead.
+---
+ Source/astcenc_mathlib.h | 2 --
+ Source/astcenc_vecmathlib_sse_4.h | 10 +++++-----
+ 2 files changed, 5 insertions(+), 7 deletions(-)
+
+diff --git a/Source/astcenc_mathlib.h b/Source/astcenc_mathlib.h
+index 67e989e..0540c4f 100644
+--- a/Source/astcenc_mathlib.h
++++ b/Source/astcenc_mathlib.h
+@@ -48,8 +48,6 @@
+ #define ASTCENC_SSE 42
+ #elif defined(__SSE4_1__)
+ #define ASTCENC_SSE 41
+- #elif defined(__SSE3__)
+- #define ASTCENC_SSE 30
+ #elif defined(__SSE2__)
+ #define ASTCENC_SSE 20
+ #else
+diff --git a/Source/astcenc_vecmathlib_sse_4.h b/Source/astcenc_vecmathlib_sse_4.h
+index 76fe577..26dcc4a 100644
+--- a/Source/astcenc_vecmathlib_sse_4.h
++++ b/Source/astcenc_vecmathlib_sse_4.h
+@@ -1046,7 +1046,7 @@ ASTCENC_SIMD_INLINE void vtable_prepare(vint4 t0, vint4& t0p)
+ */
+ ASTCENC_SIMD_INLINE void vtable_prepare(vint4 t0, vint4 t1, vint4& t0p, vint4& t1p)
+ {
+-#if ASTCENC_SSE >= 30
++#if ASTCENC_SSE >= 41
+ t0p = t0;
+ t1p = t0 ^ t1;
+ #else
+@@ -1062,7 +1062,7 @@ ASTCENC_SIMD_INLINE void vtable_prepare(
+ vint4 t0, vint4 t1, vint4 t2, vint4 t3,
+ vint4& t0p, vint4& t1p, vint4& t2p, vint4& t3p)
+ {
+-#if ASTCENC_SSE >= 30
++#if ASTCENC_SSE >= 41
+ t0p = t0;
+ t1p = t0 ^ t1;
+ t2p = t1 ^ t2;
+@@ -1080,7 +1080,7 @@ ASTCENC_SIMD_INLINE void vtable_prepare(
+ */
+ ASTCENC_SIMD_INLINE vint4 vtable_8bt_32bi(vint4 t0, vint4 idx)
+ {
+-#if ASTCENC_SSE >= 30
++#if ASTCENC_SSE >= 41
+ // Set index byte MSB to 1 for unused bytes so shuffle returns zero
+ __m128i idxx = _mm_or_si128(idx.m, _mm_set1_epi32(static_cast<int>(0xFFFFFF00)));
+
+@@ -1102,7 +1102,7 @@ ASTCENC_SIMD_INLINE vint4 vtable_8bt_32bi(vint4 t0, vint4 idx)
+ */
+ ASTCENC_SIMD_INLINE vint4 vtable_8bt_32bi(vint4 t0, vint4 t1, vint4 idx)
+ {
+-#if ASTCENC_SSE >= 30
++#if ASTCENC_SSE >= 41
+ // Set index byte MSB to 1 for unused bytes so shuffle returns zero
+ __m128i idxx = _mm_or_si128(idx.m, _mm_set1_epi32(static_cast<int>(0xFFFFFF00)));
+
+@@ -1130,7 +1130,7 @@ ASTCENC_SIMD_INLINE vint4 vtable_8bt_32bi(vint4 t0, vint4 t1, vint4 idx)
+ */
+ ASTCENC_SIMD_INLINE vint4 vtable_8bt_32bi(vint4 t0, vint4 t1, vint4 t2, vint4 t3, vint4 idx)
+ {
+-#if ASTCENC_SSE >= 30
++#if ASTCENC_SSE >= 41
+ // Set index byte MSB to 1 for unused bytes so shuffle returns zero
+ __m128i idxx = _mm_or_si128(idx.m, _mm_set1_epi32(static_cast<int>(0xFFFFFF00)));
+
+--
+2.39.1
+
diff --git a/thirdparty/basis_universal/encoder/basisu_comp.cpp b/thirdparty/basis_universal/encoder/basisu_comp.cpp
index 166a1c4fe0..41eae2b78a 100644
--- a/thirdparty/basis_universal/encoder/basisu_comp.cpp
+++ b/thirdparty/basis_universal/encoder/basisu_comp.cpp
@@ -1501,7 +1501,8 @@ namespace basisu
if (m_params.m_compute_stats)
{
- printf("Slice: %u\n", slice_index);
+ if (m_params.m_print_stats)
+ printf("Slice: %u\n", slice_index);
image_stats& s = m_stats[slice_index];
@@ -1511,81 +1512,100 @@ namespace basisu
// ---- .basis stats
em.calc(m_slice_images[slice_index], m_decoded_output_textures_unpacked[slice_index], 0, 3);
- em.print(".basis RGB Avg: ");
+ if (m_params.m_print_stats)
+ em.print(".basis RGB Avg: ");
s.m_basis_rgb_avg_psnr = em.m_psnr;
em.calc(m_slice_images[slice_index], m_decoded_output_textures_unpacked[slice_index], 0, 4);
- em.print(".basis RGBA Avg: ");
+ if (m_params.m_print_stats)
+ em.print(".basis RGBA Avg: ");
s.m_basis_rgba_avg_psnr = em.m_psnr;
em.calc(m_slice_images[slice_index], m_decoded_output_textures_unpacked[slice_index], 0, 1);
- em.print(".basis R Avg: ");
+ if (m_params.m_print_stats)
+ em.print(".basis R Avg: ");
em.calc(m_slice_images[slice_index], m_decoded_output_textures_unpacked[slice_index], 1, 1);
- em.print(".basis G Avg: ");
+ if (m_params.m_print_stats)
+ em.print(".basis G Avg: ");
em.calc(m_slice_images[slice_index], m_decoded_output_textures_unpacked[slice_index], 2, 1);
- em.print(".basis B Avg: ");
+ if (m_params.m_print_stats)
+ em.print(".basis B Avg: ");
if (m_params.m_uastc)
{
em.calc(m_slice_images[slice_index], m_decoded_output_textures_unpacked[slice_index], 3, 1);
- em.print(".basis A Avg: ");
+ if (m_params.m_print_stats)
+ em.print(".basis A Avg: ");
s.m_basis_a_avg_psnr = em.m_psnr;
}
em.calc(m_slice_images[slice_index], m_decoded_output_textures_unpacked[slice_index], 0, 0);
- em.print(".basis 709 Luma: ");
+ if (m_params.m_print_stats)
+ em.print(".basis 709 Luma: ");
s.m_basis_luma_709_psnr = static_cast<float>(em.m_psnr);
s.m_basis_luma_709_ssim = static_cast<float>(em.m_ssim);
em.calc(m_slice_images[slice_index], m_decoded_output_textures_unpacked[slice_index], 0, 0, true, true);
- em.print(".basis 601 Luma: ");
+ if (m_params.m_print_stats)
+ em.print(".basis 601 Luma: ");
s.m_basis_luma_601_psnr = static_cast<float>(em.m_psnr);
if (m_slice_descs.size() == 1)
{
const uint32_t output_size = comp_size ? (uint32_t)comp_size : (uint32_t)comp_data.size();
- debug_printf(".basis RGB PSNR per bit/texel*10000: %3.3f\n", 10000.0f * s.m_basis_rgb_avg_psnr / ((output_size * 8.0f) / (slice_desc.m_orig_width * slice_desc.m_orig_height)));
- debug_printf(".basis Luma 709 PSNR per bit/texel*10000: %3.3f\n", 10000.0f * s.m_basis_luma_709_psnr / ((output_size * 8.0f) / (slice_desc.m_orig_width * slice_desc.m_orig_height)));
+ if (m_params.m_print_stats)
+ {
+ debug_printf(".basis RGB PSNR per bit/texel*10000: %3.3f\n", 10000.0f * s.m_basis_rgb_avg_psnr / ((output_size * 8.0f) / (slice_desc.m_orig_width * slice_desc.m_orig_height)));
+ debug_printf(".basis Luma 709 PSNR per bit/texel*10000: %3.3f\n", 10000.0f * s.m_basis_luma_709_psnr / ((output_size * 8.0f) / (slice_desc.m_orig_width * slice_desc.m_orig_height)));
+ }
}
if (m_decoded_output_textures_unpacked_bc7[slice_index].get_width())
{
// ---- BC7 stats
em.calc(m_slice_images[slice_index], m_decoded_output_textures_unpacked_bc7[slice_index], 0, 3);
- em.print("BC7 RGB Avg: ");
+ if (m_params.m_print_stats)
+ em.print("BC7 RGB Avg: ");
s.m_bc7_rgb_avg_psnr = em.m_psnr;
em.calc(m_slice_images[slice_index], m_decoded_output_textures_unpacked_bc7[slice_index], 0, 4);
- em.print("BC7 RGBA Avg: ");
+ if (m_params.m_print_stats)
+ em.print("BC7 RGBA Avg: ");
s.m_bc7_rgba_avg_psnr = em.m_psnr;
em.calc(m_slice_images[slice_index], m_decoded_output_textures_unpacked_bc7[slice_index], 0, 1);
- em.print("BC7 R Avg: ");
+ if (m_params.m_print_stats)
+ em.print("BC7 R Avg: ");
em.calc(m_slice_images[slice_index], m_decoded_output_textures_unpacked_bc7[slice_index], 1, 1);
- em.print("BC7 G Avg: ");
+ if (m_params.m_print_stats)
+ em.print("BC7 G Avg: ");
em.calc(m_slice_images[slice_index], m_decoded_output_textures_unpacked_bc7[slice_index], 2, 1);
- em.print("BC7 B Avg: ");
+ if (m_params.m_print_stats)
+ em.print("BC7 B Avg: ");
if (m_params.m_uastc)
{
em.calc(m_slice_images[slice_index], m_decoded_output_textures_unpacked_bc7[slice_index], 3, 1);
- em.print("BC7 A Avg: ");
+ if (m_params.m_print_stats)
+ em.print("BC7 A Avg: ");
s.m_bc7_a_avg_psnr = em.m_psnr;
}
em.calc(m_slice_images[slice_index], m_decoded_output_textures_unpacked_bc7[slice_index], 0, 0);
- em.print("BC7 709 Luma: ");
+ if (m_params.m_print_stats)
+ em.print("BC7 709 Luma: ");
s.m_bc7_luma_709_psnr = static_cast<float>(em.m_psnr);
s.m_bc7_luma_709_ssim = static_cast<float>(em.m_ssim);
em.calc(m_slice_images[slice_index], m_decoded_output_textures_unpacked_bc7[slice_index], 0, 0, true, true);
- em.print("BC7 601 Luma: ");
+ if (m_params.m_print_stats)
+ em.print("BC7 601 Luma: ");
s.m_bc7_luma_601_psnr = static_cast<float>(em.m_psnr);
}
@@ -1593,16 +1613,19 @@ namespace basisu
{
// ---- Nearly best possible ETC1S stats
em.calc(m_slice_images[slice_index], m_best_etc1s_images_unpacked[slice_index], 0, 3);
- em.print("Unquantized ETC1S RGB Avg: ");
+ if (m_params.m_print_stats)
+ em.print("Unquantized ETC1S RGB Avg: ");
s.m_best_etc1s_rgb_avg_psnr = static_cast<float>(em.m_psnr);
em.calc(m_slice_images[slice_index], m_best_etc1s_images_unpacked[slice_index], 0, 0);
- em.print("Unquantized ETC1S 709 Luma: ");
+ if (m_params.m_print_stats)
+ em.print("Unquantized ETC1S 709 Luma: ");
s.m_best_etc1s_luma_709_psnr = static_cast<float>(em.m_psnr);
s.m_best_etc1s_luma_709_ssim = static_cast<float>(em.m_ssim);
em.calc(m_slice_images[slice_index], m_best_etc1s_images_unpacked[slice_index], 0, 0, true, true);
- em.print("Unquantized ETC1S 601 Luma: ");
+ if (m_params.m_print_stats)
+ em.print("Unquantized ETC1S 601 Luma: ");
s.m_best_etc1s_luma_601_psnr = static_cast<float>(em.m_psnr);
}
}
@@ -2311,6 +2334,8 @@ namespace basisu
}
comp_params.m_compute_stats = (pStats != nullptr);
+ comp_params.m_print_stats = (flags_and_quality & cFlagPrintStats) != 0;
+ comp_params.m_status_output = (flags_and_quality & cFlagPrintStatus) != 0;
// Create the compressor, initialize it, and process the input
basis_compressor comp;
@@ -2328,6 +2353,11 @@ namespace basisu
return nullptr;
}
+ if ((pStats) && (comp.get_opencl_failed()))
+ {
+ pStats->m_opencl_failed = true;
+ }
+
// Get the output file data and return it to the caller
void* pFile_data = nullptr;
const uint8_vec* pFile_data_vec = comp_params.m_create_ktx2_file ? &comp.get_output_ktx2_file() : &comp.get_output_basis_file();
@@ -2388,4 +2418,108 @@ namespace basisu
free(p);
}
+ bool basis_benchmark_etc1s_opencl(bool* pOpenCL_failed)
+ {
+ if (pOpenCL_failed)
+ *pOpenCL_failed = false;
+
+ if (!opencl_is_available())
+ {
+ error_printf("basis_benchmark_etc1s_opencl: OpenCL support must be enabled first!\n");
+ return false;
+ }
+
+ const uint32_t W = 1024, H = 1024;
+ basisu::vector<image> images;
+ image& img = images.enlarge(1)->resize(W, H);
+
+ const uint32_t NUM_RAND_LETTERS = 6000;// 40000;
+
+ rand r;
+ r.seed(200);
+
+ for (uint32_t i = 0; i < NUM_RAND_LETTERS; i++)
+ {
+ uint32_t x = r.irand(0, W - 1), y = r.irand(0, H - 1);
+ uint32_t sx = r.irand(1, 4), sy = r.irand(1, 4);
+ color_rgba c(r.byte(), r.byte(), r.byte(), 255);
+
+ img.debug_text(x, y, sx, sy, c, nullptr, false, "%c", static_cast<char>(r.irand(32, 127)));
+ }
+
+ //save_png("test.png", img);
+
+ image_stats stats;
+
+ uint32_t flags_and_quality = cFlagSRGB | cFlagThreaded | 255;
+ size_t comp_size = 0;
+
+ double best_cpu_time = 1e+9f, best_gpu_time = 1e+9f;
+
+ const uint32_t TIMES_TO_ENCODE = 2;
+ interval_timer tm;
+
+ for (uint32_t i = 0; i < TIMES_TO_ENCODE; i++)
+ {
+ tm.start();
+ void* pComp_data = basis_compress(
+ images,
+ flags_and_quality, 1.0f,
+ &comp_size,
+ &stats);
+ double cpu_time = tm.get_elapsed_secs();
+ if (!pComp_data)
+ {
+ error_printf("basis_benchmark_etc1s_opencl: basis_compress() failed (CPU)!\n");
+ return false;
+ }
+
+ best_cpu_time = minimum(best_cpu_time, cpu_time);
+
+ basis_free_data(pComp_data);
+ }
+
+ printf("Best CPU time: %3.3f\n", best_cpu_time);
+
+ for (uint32_t i = 0; i < TIMES_TO_ENCODE; i++)
+ {
+ tm.start();
+ void* pComp_data = basis_compress(
+ images,
+ flags_and_quality | cFlagUseOpenCL, 1.0f,
+ &comp_size,
+ &stats);
+
+ if (stats.m_opencl_failed)
+ {
+ error_printf("basis_benchmark_etc1s_opencl: OpenCL failed!\n");
+
+ basis_free_data(pComp_data);
+
+ if (pOpenCL_failed)
+ *pOpenCL_failed = true;
+
+ return false;
+ }
+
+ double gpu_time = tm.get_elapsed_secs();
+ if (!pComp_data)
+ {
+ error_printf("basis_benchmark_etc1s_opencl: basis_compress() failed (GPU)!\n");
+ return false;
+ }
+
+ best_gpu_time = minimum(best_gpu_time, gpu_time);
+
+ basis_free_data(pComp_data);
+ }
+
+ printf("Best GPU time: %3.3f\n", best_gpu_time);
+
+ return best_gpu_time < best_cpu_time;
+ }
+
} // namespace basisu
+
+
+
diff --git a/thirdparty/basis_universal/encoder/basisu_comp.h b/thirdparty/basis_universal/encoder/basisu_comp.h
index aa5ea6fec3..b6c9fef9e2 100644
--- a/thirdparty/basis_universal/encoder/basisu_comp.h
+++ b/thirdparty/basis_universal/encoder/basisu_comp.h
@@ -92,6 +92,8 @@ namespace basisu
m_best_etc1s_luma_709_psnr = 0.0f;
m_best_etc1s_luma_601_psnr = 0.0f;
m_best_etc1s_luma_709_ssim = 0.0f;
+
+ m_opencl_failed = false;
}
std::string m_filename;
@@ -119,6 +121,8 @@ namespace basisu
float m_best_etc1s_luma_709_psnr;
float m_best_etc1s_luma_601_psnr;
float m_best_etc1s_luma_709_ssim;
+
+ bool m_opencl_failed;
};
template<bool def>
@@ -255,6 +259,7 @@ namespace basisu
m_write_output_basis_files.clear();
m_compression_level.clear();
m_compute_stats.clear();
+ m_print_stats.clear();
m_check_for_alpha.clear();
m_force_alpha.clear();
m_multithreading.clear();
@@ -373,6 +378,9 @@ namespace basisu
// Compute and display image metrics
bool_param<false> m_compute_stats;
+
+ // Print stats to stdout, if m_compute_stats is true.
+ bool_param<true> m_print_stats;
// Check to see if any input image has an alpha channel, if so then the output basis file will have alpha channels
bool_param<true> m_check_for_alpha;
@@ -583,11 +591,16 @@ namespace basisu
cFlagYFlip = 1 << 16, // flip source image on Y axis before compression
cFlagUASTC = 1 << 17, // use UASTC compression vs. ETC1S
- cFlagUASTCRDO = 1 << 18 // use RDO postprocessing when generating UASTC files (must set uastc_rdo_quality to the quality scalar)
+ cFlagUASTCRDO = 1 << 18, // use RDO postprocessing when generating UASTC files (must set uastc_rdo_quality to the quality scalar)
+
+ cFlagPrintStats = 1 << 19, // print image stats to stdout
+ cFlagPrintStatus = 1 << 20 // print status to stdout
};
// This function accepts an array of source images.
// If more than one image is provided, it's assumed the images form a mipmap pyramid and automatic mipmap generation is disabled.
+ // Returns a pointer to the compressed .basis or .ktx2 file data. *pSize is the size of the compressed data. The returned block must be freed using basis_free_data().
+ // basisu_encoder_init() MUST be called first!
void* basis_compress(
const basisu::vector<image> &source_images,
uint32_t flags_and_quality, float uastc_rdo_quality,
@@ -604,6 +617,12 @@ namespace basisu
// Frees the dynamically allocated file data returned by basis_compress().
void basis_free_data(void* p);
+ // Runs a short benchmark using synthetic image data to time OpenCL encoding vs. CPU encoding, with multithreading enabled.
+ // Returns true if opencl is worth using on this system, otherwise false.
+ // If pOpenCL_failed is not null, it will be set to true if OpenCL encoding failed *on this particular machine/driver/BasisU version* and the encoder falled back to CPU encoding.
+ // basisu_encoder_init() MUST be called first. If OpenCL support wasn't enabled this always returns false.
+ bool basis_benchmark_etc1s_opencl(bool *pOpenCL_failed = nullptr);
+
// Parallel compression API
struct parallel_results
{
diff --git a/thirdparty/basis_universal/encoder/basisu_enc.cpp b/thirdparty/basis_universal/encoder/basisu_enc.cpp
index b427215ee3..c431ceaf12 100644
--- a/thirdparty/basis_universal/encoder/basisu_enc.cpp
+++ b/thirdparty/basis_universal/encoder/basisu_enc.cpp
@@ -187,6 +187,8 @@ namespace basisu
opencl_init(opencl_force_serialization);
}
+ interval_timer::init(); // make sure interval_timer globals are initialized from main thread to avoid TSAN reports
+
g_library_initialized = true;
}
@@ -227,7 +229,7 @@ namespace basisu
{
QueryPerformanceFrequency(reinterpret_cast<LARGE_INTEGER*>(pTicks));
}
-#elif defined(__APPLE__) || defined(__FreeBSD__) || defined(__OpenBSD__)
+#elif defined(__APPLE__) || defined(__FreeBSD__) || defined(__OpenBSD__) || defined(__EMSCRIPTEN__)
#include <sys/time.h>
inline void query_counter(timer_ticks* pTicks)
{
diff --git a/thirdparty/basis_universal/encoder/basisu_frontend.cpp b/thirdparty/basis_universal/encoder/basisu_frontend.cpp
index 00210e6679..1f30a33c70 100644
--- a/thirdparty/basis_universal/encoder/basisu_frontend.cpp
+++ b/thirdparty/basis_universal/encoder/basisu_frontend.cpp
@@ -2328,8 +2328,6 @@ namespace basisu
m_optimized_cluster_selectors.resize(total_selector_clusters);
- uint32_t total_clusters_processed = 0;
-
// For each selector codebook entry, and for each of the 4x4 selectors, determine which selector minimizes the error across all the blocks that use that quantized selector.
const uint32_t N = 256;
for (uint32_t cluster_index_iter = 0; cluster_index_iter < total_selector_clusters; cluster_index_iter += N)
@@ -2338,7 +2336,7 @@ namespace basisu
const uint32_t last_index = minimum<uint32_t>((uint32_t)total_selector_clusters, cluster_index_iter + N);
#ifndef __EMSCRIPTEN__
- m_params.m_pJob_pool->add_job([this, first_index, last_index, &total_clusters_processed, &total_selector_clusters] {
+ m_params.m_pJob_pool->add_job([this, first_index, last_index] {
#endif
for (uint32_t cluster_index = first_index; cluster_index < last_index; cluster_index++)
diff --git a/thirdparty/basis_universal/transcoder/basisu_transcoder.cpp b/thirdparty/basis_universal/transcoder/basisu_transcoder.cpp
index 630731900f..3aeba0ee7a 100644
--- a/thirdparty/basis_universal/transcoder/basisu_transcoder.cpp
+++ b/thirdparty/basis_universal/transcoder/basisu_transcoder.cpp
@@ -16867,7 +16867,7 @@ namespace basist
{
m_format = basist::basis_tex_format::cETC1S;
- // 3.10.2: "Whether the image has 1 or 2 slices can be determined from the DFD’s sample count."
+ // 3.10.2: "Whether the image has 1 or 2 slices can be determined from the DFD's sample count."
// If m_has_alpha is true it may be 2-channel RRRG or 4-channel RGBA, but we let the caller deal with that.
m_has_alpha = (m_header.m_dfd_byte_length == 60);
diff --git a/thirdparty/certs/ca-certificates.crt b/thirdparty/certs/ca-certificates.crt
index 036f630d5b..5ed6adf574 100644
--- a/thirdparty/certs/ca-certificates.crt
+++ b/thirdparty/certs/ca-certificates.crt
@@ -1,7 +1,7 @@
##
## Bundle of CA Root Certificates
##
-## Certificate data from Mozilla as of: Tue Jul 19 14:20:01 2022 GMT
+## Certificate data from Mozilla as of: Fri Oct 21 16:14:43 2022 GMT
##
## This is a bundle of X.509 certificates of public Certificate Authorities
## (CA). These were automatically extracted from Mozilla's root certificates
@@ -14,7 +14,7 @@
## Just configure this file as the SSLCACertificateFile.
##
## Conversion done with mk-ca-bundle.pl version 1.29.
-## SHA256: 9bf3799611fb58197f61d45e71ce3dc19f30e7dd73731915872ce5108a7bb066
+## SHA256: 3ff8bd209b5f2e739b9f2b96eacb694a774114685b02978257824f37ff528f71
##
@@ -3458,3 +3458,49 @@ zPUwHQYDVR0OBBYEFP+CMXI++cRmbK04ntGwUYilkMz1MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjO
PQQDAwNpADBmAjEA5gVYaWHlLcoNy/EZCL3W/VGSGn5jVASQkZo1kTmZ+gepZpO6yGjUij/67W4W
Aie3AjEA3VoXK3YdZUKWpqxdinlW2Iob35reX8dQj7FbcQwm32pAAOwzkSFxvmjkI6TZraE3
-----END CERTIFICATE-----
+
+Security Communication RootCA3
+==============================
+-----BEGIN CERTIFICATE-----
+MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNVBAYTAkpQMSUw
+IwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScwJQYDVQQDEx5TZWN1cml0eSBD
+b21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQsw
+CQYDVQQGEwJKUDElMCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UE
+AxMeU2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A
+MIICCgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4rCmDvu20r
+hvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzAlrenfna84xtSGc4RHwsE
+NPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MGTfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2
+/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF79+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGm
+npjKIG58u4iFW/vAEGK78vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtY
+XLVqAvO4g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3weGVPK
+p7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst+3A7caoreyYn8xrC
+3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M0V9hvqG8OmpI6iZVIhZdXw3/JzOf
+GAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQT9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0Vcw
+CBEF/VfR2ccCAwEAAaNCMEAwHQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB
+/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS
+YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PAFNr0Y/Dq9HHu
+Tofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd9XbXv8S2gVj/yP9kaWJ5rW4O
+H3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQIUYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASx
+YfQAW0q3nHE3GYV5v4GwxxMOdnE+OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZ
+XSEIx2C/pHF7uNkegr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml
++LLfiAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUVnuiZIesn
+KwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD2NCcnWXL0CsnMQMeNuE9
+dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI//1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm
+6Vwdp6POXiUyK+OVrCoHzrQoeIY8LaadTdJ0MN1kURXbg4NR16/9M51NZg==
+-----END CERTIFICATE-----
+
+Security Communication ECC RootCA1
+==================================
+-----BEGIN CERTIFICATE-----
+MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYTAkpQMSUwIwYD
+VQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYDVQQDEyJTZWN1cml0eSBDb21t
+dW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYxNjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTEL
+MAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNV
+BAMTIlNlY3VyaXR5IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQA
+IgNiAASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+CnnfdldB9sELLo
+5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpKULGjQjBAMB0GA1UdDgQW
+BBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAK
+BggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3L
+snNdo4gIxwwCMQDAqy0Obe0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70e
+N9k=
+-----END CERTIFICATE-----
diff --git a/thirdparty/cvtt/ConvectionKernels_BC67.cpp b/thirdparty/cvtt/ConvectionKernels_BC67.cpp
index 791859b232..021d658c08 100644
--- a/thirdparty/cvtt/ConvectionKernels_BC67.cpp
+++ b/thirdparty/cvtt/ConvectionKernels_BC67.cpp
@@ -726,10 +726,10 @@ namespace cvtt
if (carry)
{
uint32_t bitMask = (1 << carry) - 1;
- for (int i = 0; i < 4; i++)
+ for (int i = 0; i < entriesRemaining; i++)
{
m_vector[i] >>= carry;
- if (i != 3)
+ if (i != entriesRemaining - 1)
m_vector[i] |= (m_vector[i + 1] & bitMask) << (32 - carry);
}
}
@@ -3058,14 +3058,11 @@ void cvtt::Internal::BC6HComputer::SignExtendSingle(int &v, int bits)
void cvtt::Internal::BC6HComputer::UnpackOne(PixelBlockF16 &output, const uint8_t *pBC, bool isSigned)
{
- UnpackingVector pv;
- pv.Init(pBC);
-
int numModeBits = 2;
- int modeBits = pv.Unpack(2);
+ int modeBits = pBC[0] & 0x3;
if (modeBits != 0 && modeBits != 1)
{
- modeBits |= pv.Unpack(3) << 2;
+ modeBits = pBC[0] & 0x1f;
numModeBits += 3;
}
@@ -3102,6 +3099,9 @@ void cvtt::Internal::BC6HComputer::UnpackOne(PixelBlockF16 &output, const uint8_
for (int ch = 0; ch < 3; ch++)
eps[subset][epi][ch] = 0;
+ UnpackingVector pv;
+ pv.Init(pBC);
+
{
uint32_t header[3];
uint16_t codedEPs[2][2][3];
diff --git a/thirdparty/embree/common/algorithms/parallel_for.h b/thirdparty/embree/common/algorithms/parallel_for.h
index 645681ac63..6d411e4852 100644
--- a/thirdparty/embree/common/algorithms/parallel_for.h
+++ b/thirdparty/embree/common/algorithms/parallel_for.h
@@ -26,7 +26,6 @@ namespace embree
abort();
// -- GODOT end --
}
-
#elif defined(TASKING_TBB)
#if TBB_INTERFACE_VERSION >= 12002
tbb::task_group_context context;
diff --git a/thirdparty/embree/common/algorithms/parallel_for_for.h b/thirdparty/embree/common/algorithms/parallel_for_for.h
index 92c37a4a38..7838ef11b3 100644
--- a/thirdparty/embree/common/algorithms/parallel_for_for.h
+++ b/thirdparty/embree/common/algorithms/parallel_for_for.h
@@ -30,15 +30,20 @@ namespace embree
template<typename ArrayArray>
__forceinline ParallelForForState (ArrayArray& array2, const size_t minStepSize) {
init(array2,minStepSize);
+ }
+
+ template<typename SizeFunc>
+ __forceinline ParallelForForState (const size_t numArrays, const SizeFunc& getSize, const size_t minStepSize) {
+ init(numArrays,getSize,minStepSize);
}
- template<typename ArrayArray>
- __forceinline void init ( ArrayArray& array2, const size_t minStepSize )
+ template<typename SizeFunc>
+ __forceinline void init ( const size_t numArrays, const SizeFunc& getSize, const size_t minStepSize )
{
/* first calculate total number of elements */
size_t N = 0;
- for (size_t i=0; i<array2.size(); i++) {
- N += array2[i] ? array2[i]->size() : 0;
+ for (size_t i=0; i<numArrays; i++) {
+ N += getSize(i);
}
this->N = N;
@@ -54,8 +59,8 @@ namespace embree
size_t k0 = (++taskIndex)*N/taskCount;
for (size_t i=0, k=0; taskIndex < taskCount; i++)
{
- assert(i<array2.size());
- size_t j=0, M = array2[i] ? array2[i]->size() : 0;
+ assert(i<numArrays);
+ size_t j=0, M = getSize(i);
while (j<M && k+M-j >= k0 && taskIndex < taskCount) {
assert(taskIndex<taskCount);
i0[taskIndex] = i;
@@ -67,6 +72,12 @@ namespace embree
}
}
+ template<typename ArrayArray>
+ __forceinline void init ( ArrayArray& array2, const size_t minStepSize )
+ {
+ init(array2.size(),[&](size_t i) { return array2[i] ? array2[i]->size() : 0; },minStepSize);
+ }
+
__forceinline size_t size() const {
return N;
}
diff --git a/thirdparty/embree/common/algorithms/parallel_for_for_prefix_sum.h b/thirdparty/embree/common/algorithms/parallel_for_for_prefix_sum.h
index b15b44a991..8c3f4aace7 100644
--- a/thirdparty/embree/common/algorithms/parallel_for_for_prefix_sum.h
+++ b/thirdparty/embree/common/algorithms/parallel_for_for_prefix_sum.h
@@ -17,15 +17,20 @@ namespace embree
__forceinline ParallelForForPrefixSumState (ArrayArray& array2, const size_t minStepSize)
: ParallelForForState(array2,minStepSize) {}
+ template<typename SizeFunc>
+ __forceinline ParallelForForPrefixSumState (size_t numArrays, const SizeFunc& getSize, const size_t minStepSize)
+ : ParallelForForState(numArrays,getSize,minStepSize) {}
+
ParallelPrefixSumState<Value> prefix_state;
};
- template<typename ArrayArray, typename Index, typename Value, typename Func, typename Reduction>
- __forceinline Value parallel_for_for_prefix_sum0( ParallelForForPrefixSumState<Value>& state, ArrayArray& array2, Index minStepSize,
- const Value& identity, const Func& func, const Reduction& reduction)
+ template<typename SizeFunc, typename Index, typename Value, typename Func, typename Reduction>
+ __forceinline Value parallel_for_for_prefix_sum0_( ParallelForForPrefixSumState<Value>& state, Index minStepSize,
+ const SizeFunc& getSize, const Value& identity, const Func& func, const Reduction& reduction)
{
/* calculate number of tasks to use */
const size_t taskCount = state.taskCount;
+
/* perform parallel prefix sum */
parallel_for(taskCount, [&](const size_t taskIndex)
{
@@ -38,9 +43,9 @@ namespace embree
size_t k=k0;
Value N=identity;
for (size_t i=i0; k<k1; i++) {
- const size_t size = array2[i] ? array2[i]->size() : 0;
+ const size_t size = getSize(i);
const size_t r0 = j0, r1 = min(size,r0+k1-k);
- if (r1 > r0) N = reduction(N, func(array2[i],range<Index>((Index)r0,(Index)r1),(Index)k,(Index)i));
+ if (r1 > r0) N = reduction(N, func((Index)i,range<Index>((Index)r0,(Index)r1),(Index)k));
k+=r1-r0; j0 = 0;
}
state.prefix_state.counts[taskIndex] = N;
@@ -58,9 +63,10 @@ namespace embree
return sum;
}
- template<typename ArrayArray, typename Index, typename Value, typename Func, typename Reduction>
- __forceinline Value parallel_for_for_prefix_sum1( ParallelForForPrefixSumState<Value>& state, ArrayArray& array2, Index minStepSize,
- const Value& identity, const Func& func, const Reduction& reduction)
+ template<typename SizeFunc, typename Index, typename Value, typename Func, typename Reduction>
+ __forceinline Value parallel_for_for_prefix_sum1_( ParallelForForPrefixSumState<Value>& state, Index minStepSize,
+ const SizeFunc& getSize,
+ const Value& identity, const Func& func, const Reduction& reduction)
{
/* calculate number of tasks to use */
const size_t taskCount = state.taskCount;
@@ -76,9 +82,9 @@ namespace embree
size_t k=k0;
Value N=identity;
for (size_t i=i0; k<k1; i++) {
- const size_t size = array2[i] ? array2[i]->size() : 0;
+ const size_t size = getSize(i);
const size_t r0 = j0, r1 = min(size,r0+k1-k);
- if (r1 > r0) N = reduction(N, func(array2[i],range<Index>((Index)r0,(Index)r1),(Index)k,(Index)i,reduction(state.prefix_state.sums[taskIndex],N)));
+ if (r1 > r0) N = reduction(N, func((Index)i,range<Index>((Index)r0,(Index)r1),(Index)k,reduction(state.prefix_state.sums[taskIndex],N)));
k+=r1-r0; j0 = 0;
}
state.prefix_state.counts[taskIndex] = N;
@@ -96,6 +102,30 @@ namespace embree
return sum;
}
+ template<typename ArrayArray, typename Index, typename Value, typename Func, typename Reduction>
+ __forceinline Value parallel_for_for_prefix_sum0( ParallelForForPrefixSumState<Value>& state,
+ ArrayArray& array2, Index minStepSize,
+ const Value& identity, const Func& func, const Reduction& reduction)
+ {
+ return parallel_for_for_prefix_sum0_(state,minStepSize,
+ [&](Index i) { return array2[i] ? array2[i]->size() : 0; },
+ identity,
+ [&](Index i, const range<Index>& r, Index k) { return func(array2[i], r, k, i); },
+ reduction);
+ }
+
+ template<typename ArrayArray, typename Index, typename Value, typename Func, typename Reduction>
+ __forceinline Value parallel_for_for_prefix_sum1( ParallelForForPrefixSumState<Value>& state,
+ ArrayArray& array2, Index minStepSize,
+ const Value& identity, const Func& func, const Reduction& reduction)
+ {
+ return parallel_for_for_prefix_sum1_(state,minStepSize,
+ [&](Index i) { return array2[i] ? array2[i]->size() : 0; },
+ identity,
+ [&](Index i, const range<Index>& r, Index k, const Value& base) { return func(array2[i], r, k, i, base); },
+ reduction);
+ }
+
template<typename ArrayArray, typename Value, typename Func, typename Reduction>
__forceinline Value parallel_for_for_prefix_sum0( ParallelForForPrefixSumState<Value>& state, ArrayArray& array2,
const Value& identity, const Func& func, const Reduction& reduction)
diff --git a/thirdparty/embree/common/algorithms/parallel_reduce.h b/thirdparty/embree/common/algorithms/parallel_reduce.h
index 8271372ea4..cd0078f2e6 100644
--- a/thirdparty/embree/common/algorithms/parallel_reduce.h
+++ b/thirdparty/embree/common/algorithms/parallel_reduce.h
@@ -26,7 +26,7 @@ namespace embree
const Index threadCount = (Index) TaskScheduler::threadCount();
taskCount = min(taskCount,threadCount,maxTasks);
- /* parallel invokation of all tasks */
+ /* parallel invocation of all tasks */
dynamic_large_stack_array(Value,values,taskCount,8192); // consumes at most 8192 bytes on the stack
parallel_for(taskCount, [&](const Index taskIndex) {
const Index k0 = first+(taskIndex+0)*(last-first)/taskCount;
diff --git a/thirdparty/embree/common/math/bbox.h b/thirdparty/embree/common/math/bbox.h
index bc43155358..e4eb3df9a4 100644
--- a/thirdparty/embree/common/math/bbox.h
+++ b/thirdparty/embree/common/math/bbox.h
@@ -77,7 +77,7 @@ namespace embree
return lower > upper;
}
-#if defined(__SSE__)
+#if defined(__SSE__) || defined(__ARM_NEON)
template<> __forceinline bool BBox<Vec3fa>::empty() const {
return !all(le_mask(lower,upper));
}
@@ -196,11 +196,11 @@ namespace embree
}
template<> __inline bool subset( const BBox<Vec3fa>& a, const BBox<Vec3fa>& b ) {
- return all(ge_mask(a.lower,b.lower)) & all(le_mask(a.upper,b.upper));
+ return all(ge_mask(a.lower,b.lower)) && all(le_mask(a.upper,b.upper));
}
template<> __inline bool subset( const BBox<Vec3fx>& a, const BBox<Vec3fx>& b ) {
- return all(ge_mask(a.lower,b.lower)) & all(le_mask(a.upper,b.upper));
+ return all(ge_mask(a.lower,b.lower)) && all(le_mask(a.upper,b.upper));
}
/*! blending */
@@ -228,11 +228,11 @@ namespace embree
/// SSE / AVX / MIC specializations
////////////////////////////////////////////////////////////////////////////////
-#if defined __SSE__
+#if defined (__SSE__) || defined(__ARM_NEON)
#include "../simd/sse.h"
#endif
-#if defined __AVX__
+#if defined (__AVX__)
#include "../simd/avx.h"
#endif
diff --git a/thirdparty/embree/common/math/color.h b/thirdparty/embree/common/math/color.h
index 529584ea16..e62e4ad2a4 100644
--- a/thirdparty/embree/common/math/color.h
+++ b/thirdparty/embree/common/math/color.h
@@ -152,21 +152,38 @@ namespace embree
}
__forceinline const Color rcp ( const Color& a )
{
+#if defined(__aarch64__)
+ __m128 reciprocal = _mm_rcp_ps(a.m128);
+ reciprocal = vmulq_f32(vrecpsq_f32(a.m128, reciprocal), reciprocal);
+ reciprocal = vmulq_f32(vrecpsq_f32(a.m128, reciprocal), reciprocal);
+ return (const Color)reciprocal;
+#else
#if defined(__AVX512VL__)
const Color r = _mm_rcp14_ps(a.m128);
#else
const Color r = _mm_rcp_ps(a.m128);
#endif
- return _mm_sub_ps(_mm_add_ps(r, r), _mm_mul_ps(_mm_mul_ps(r, r), a));
+ return _mm_add_ps(r,_mm_mul_ps(r, _mm_sub_ps(_mm_set1_ps(1.0f), _mm_mul_ps(a, r)))); // computes r + r * (1 - a * r)
+
+#endif //defined(__aarch64__)
}
__forceinline const Color rsqrt( const Color& a )
{
+#if defined(__aarch64__)
+ __m128 r = _mm_rsqrt_ps(a.m128);
+ r = vmulq_f32(r, vrsqrtsq_f32(vmulq_f32(a.m128, r), r));
+ r = vmulq_f32(r, vrsqrtsq_f32(vmulq_f32(a.m128, r), r));
+ return r;
+#else
+
#if defined(__AVX512VL__)
__m128 r = _mm_rsqrt14_ps(a.m128);
#else
__m128 r = _mm_rsqrt_ps(a.m128);
#endif
return _mm_add_ps(_mm_mul_ps(_mm_set1_ps(1.5f),r), _mm_mul_ps(_mm_mul_ps(_mm_mul_ps(a, _mm_set1_ps(-0.5f)), r), _mm_mul_ps(r, r)));
+
+#endif //defined(__aarch64__)
}
__forceinline const Color sqrt ( const Color& a ) { return _mm_sqrt_ps(a.m128); }
diff --git a/thirdparty/embree/common/math/constants.cpp b/thirdparty/embree/common/math/constants.cpp
index 03919ae20c..f51c642bfc 100644
--- a/thirdparty/embree/common/math/constants.cpp
+++ b/thirdparty/embree/common/math/constants.cpp
@@ -5,23 +5,4 @@
namespace embree
{
- TrueTy True;
- FalseTy False;
- ZeroTy zero;
- OneTy one;
- NegInfTy neg_inf;
- PosInfTy inf;
- PosInfTy pos_inf;
- NaNTy nan;
- UlpTy ulp;
- PiTy pi;
- OneOverPiTy one_over_pi;
- TwoPiTy two_pi;
- OneOverTwoPiTy one_over_two_pi;
- FourPiTy four_pi;
- OneOverFourPiTy one_over_four_pi;
- StepTy step;
- ReverseStepTy reverse_step;
- EmptyTy empty;
- UndefinedTy undefined;
}
diff --git a/thirdparty/embree/common/math/constants.h b/thirdparty/embree/common/math/constants.h
index 578473a8ab..07a1a868ba 100644
--- a/thirdparty/embree/common/math/constants.h
+++ b/thirdparty/embree/common/math/constants.h
@@ -24,13 +24,13 @@ namespace embree
__forceinline operator bool( ) const { return true; }
};
- extern MAYBE_UNUSED TrueTy True;
+ const constexpr TrueTy True = TrueTy();
struct FalseTy {
__forceinline operator bool( ) const { return false; }
};
- extern MAYBE_UNUSED FalseTy False;
+ const constexpr FalseTy False = FalseTy();
struct ZeroTy
{
@@ -48,7 +48,7 @@ namespace embree
__forceinline operator unsigned char ( ) const { return 0; }
};
- extern MAYBE_UNUSED ZeroTy zero;
+ const constexpr ZeroTy zero = ZeroTy();
struct OneTy
{
@@ -66,7 +66,7 @@ namespace embree
__forceinline operator unsigned char ( ) const { return 1; }
};
- extern MAYBE_UNUSED OneTy one;
+ const constexpr OneTy one = OneTy();
struct NegInfTy
{
@@ -85,7 +85,7 @@ namespace embree
};
- extern MAYBE_UNUSED NegInfTy neg_inf;
+ const constexpr NegInfTy neg_inf = NegInfTy();
struct PosInfTy
{
@@ -103,8 +103,8 @@ namespace embree
__forceinline operator unsigned char ( ) const { return std::numeric_limits<unsigned char>::max(); }
};
- extern MAYBE_UNUSED PosInfTy inf;
- extern MAYBE_UNUSED PosInfTy pos_inf;
+ const constexpr PosInfTy inf = PosInfTy();
+ const constexpr PosInfTy pos_inf = PosInfTy();
struct NaNTy
{
@@ -112,15 +112,15 @@ namespace embree
__forceinline operator float ( ) const { return std::numeric_limits<float>::quiet_NaN(); }
};
- extern MAYBE_UNUSED NaNTy nan;
+ const constexpr NaNTy nan = NaNTy();
struct UlpTy
{
__forceinline operator double( ) const { return std::numeric_limits<double>::epsilon(); }
__forceinline operator float ( ) const { return std::numeric_limits<float>::epsilon(); }
};
-
- extern MAYBE_UNUSED UlpTy ulp;
+
+ const constexpr UlpTy ulp = UlpTy();
struct PiTy
{
@@ -128,7 +128,7 @@ namespace embree
__forceinline operator float ( ) const { return float(M_PI); }
};
- extern MAYBE_UNUSED PiTy pi;
+ const constexpr PiTy pi = PiTy();
struct OneOverPiTy
{
@@ -136,7 +136,7 @@ namespace embree
__forceinline operator float ( ) const { return float(M_1_PI); }
};
- extern MAYBE_UNUSED OneOverPiTy one_over_pi;
+ const constexpr OneOverPiTy one_over_pi = OneOverPiTy();
struct TwoPiTy
{
@@ -144,7 +144,7 @@ namespace embree
__forceinline operator float ( ) const { return float(2.0*M_PI); }
};
- extern MAYBE_UNUSED TwoPiTy two_pi;
+ const constexpr TwoPiTy two_pi = TwoPiTy();
struct OneOverTwoPiTy
{
@@ -152,7 +152,7 @@ namespace embree
__forceinline operator float ( ) const { return float(0.5*M_1_PI); }
};
- extern MAYBE_UNUSED OneOverTwoPiTy one_over_two_pi;
+ const constexpr OneOverTwoPiTy one_over_two_pi = OneOverTwoPiTy();
struct FourPiTy
{
@@ -160,7 +160,7 @@ namespace embree
__forceinline operator float ( ) const { return float(4.0*M_PI); }
};
- extern MAYBE_UNUSED FourPiTy four_pi;
+ const constexpr FourPiTy four_pi = FourPiTy();
struct OneOverFourPiTy
{
@@ -168,30 +168,42 @@ namespace embree
__forceinline operator float ( ) const { return float(0.25*M_1_PI); }
};
- extern MAYBE_UNUSED OneOverFourPiTy one_over_four_pi;
+ const constexpr OneOverFourPiTy one_over_four_pi = OneOverFourPiTy();
struct StepTy {
+ __forceinline operator double ( ) const { return 0; }
+ __forceinline operator float ( ) const { return 0; }
+ __forceinline operator long long( ) const { return 0; }
+ __forceinline operator unsigned long long( ) const { return 0; }
+ __forceinline operator long ( ) const { return 0; }
+ __forceinline operator unsigned long ( ) const { return 0; }
+ __forceinline operator int ( ) const { return 0; }
+ __forceinline operator unsigned int ( ) const { return 0; }
+ __forceinline operator short ( ) const { return 0; }
+ __forceinline operator unsigned short ( ) const { return 0; }
+ __forceinline operator char ( ) const { return 0; }
+ __forceinline operator unsigned char ( ) const { return 0; }
};
- extern MAYBE_UNUSED StepTy step;
+ const constexpr StepTy step = StepTy();
struct ReverseStepTy {
};
- extern MAYBE_UNUSED ReverseStepTy reverse_step;
+ const constexpr ReverseStepTy reverse_step = ReverseStepTy();
struct EmptyTy {
};
- extern MAYBE_UNUSED EmptyTy empty;
+ const constexpr EmptyTy empty = EmptyTy();
struct FullTy {
};
- extern MAYBE_UNUSED FullTy full;
+ const constexpr FullTy full = FullTy();
struct UndefinedTy {
};
- extern MAYBE_UNUSED UndefinedTy undefined;
+ const constexpr UndefinedTy undefined = UndefinedTy();
}
diff --git a/thirdparty/embree/common/math/math.h b/thirdparty/embree/common/math/math.h
index 4bc54c1a6a..7930c17727 100644
--- a/thirdparty/embree/common/math/math.h
+++ b/thirdparty/embree/common/math/math.h
@@ -53,6 +53,16 @@ namespace embree
__forceinline float rcp ( const float x )
{
+#if defined(__aarch64__)
+ // Move scalar to vector register and do rcp.
+ __m128 a;
+ a[0] = x;
+ float32x4_t reciprocal = vrecpeq_f32(a);
+ reciprocal = vmulq_f32(vrecpsq_f32(a, reciprocal), reciprocal);
+ reciprocal = vmulq_f32(vrecpsq_f32(a, reciprocal), reciprocal);
+ return reciprocal[0];
+#else
+
const __m128 a = _mm_set_ss(x);
#if defined(__AVX512VL__)
@@ -66,30 +76,71 @@ namespace embree
#else
return _mm_cvtss_f32(_mm_mul_ss(r,_mm_sub_ss(_mm_set_ss(2.0f), _mm_mul_ss(r, a))));
#endif
+
+#endif //defined(__aarch64__)
}
__forceinline float signmsk ( const float x ) {
+#if defined(__aarch64__)
+ // FP and Neon shares same vector register in arm64
+ __m128 a;
+ __m128i b;
+ a[0] = x;
+ b[0] = 0x80000000;
+ a = _mm_and_ps(a, vreinterpretq_f32_s32(b));
+ return a[0];
+#else
return _mm_cvtss_f32(_mm_and_ps(_mm_set_ss(x),_mm_castsi128_ps(_mm_set1_epi32(0x80000000))));
+#endif
}
__forceinline float xorf( const float x, const float y ) {
+#if defined(__aarch64__)
+ // FP and Neon shares same vector register in arm64
+ __m128 a;
+ __m128 b;
+ a[0] = x;
+ b[0] = y;
+ a = _mm_xor_ps(a, b);
+ return a[0];
+#else
return _mm_cvtss_f32(_mm_xor_ps(_mm_set_ss(x),_mm_set_ss(y)));
+#endif
}
__forceinline float andf( const float x, const unsigned y ) {
+#if defined(__aarch64__)
+ // FP and Neon shares same vector register in arm64
+ __m128 a;
+ __m128i b;
+ a[0] = x;
+ b[0] = y;
+ a = _mm_and_ps(a, vreinterpretq_f32_s32(b));
+ return a[0];
+#else
return _mm_cvtss_f32(_mm_and_ps(_mm_set_ss(x),_mm_castsi128_ps(_mm_set1_epi32(y))));
+#endif
}
__forceinline float rsqrt( const float x )
{
+#if defined(__aarch64__)
+ // FP and Neon shares same vector register in arm64
+ __m128 a;
+ a[0] = x;
+ __m128 value = _mm_rsqrt_ps(a);
+ value = vmulq_f32(value, vrsqrtsq_f32(vmulq_f32(a, value), value));
+ value = vmulq_f32(value, vrsqrtsq_f32(vmulq_f32(a, value), value));
+ return value[0];
+#else
+
const __m128 a = _mm_set_ss(x);
#if defined(__AVX512VL__)
__m128 r = _mm_rsqrt14_ss(_mm_set_ss(0.0f),a);
#else
__m128 r = _mm_rsqrt_ss(a);
#endif
- r = _mm_add_ss(_mm_mul_ss(_mm_set_ss(1.5f), r), _mm_mul_ss(_mm_mul_ss(_mm_mul_ss(a, _mm_set_ss(-0.5f)), r), _mm_mul_ss(r, r)));
-#if defined(__ARM_NEON)
- r = _mm_add_ss(_mm_mul_ss(_mm_set_ss(1.5f), r), _mm_mul_ss(_mm_mul_ss(_mm_mul_ss(a, _mm_set_ss(-0.5f)), r), _mm_mul_ss(r, r)));
+ const __m128 c = _mm_add_ss(_mm_mul_ss(_mm_set_ss(1.5f), r),
+ _mm_mul_ss(_mm_mul_ss(_mm_mul_ss(a, _mm_set_ss(-0.5f)), r), _mm_mul_ss(r, r)));
+ return _mm_cvtss_f32(c);
#endif
- return _mm_cvtss_f32(r);
}
#if defined(__WIN32__) && defined(_MSC_VER) && (_MSC_VER <= 1700)
@@ -146,7 +197,17 @@ namespace embree
__forceinline double floor( const double x ) { return ::floor (x); }
__forceinline double ceil ( const double x ) { return ::ceil (x); }
-#if defined(__SSE4_1__)
+#if defined(__aarch64__)
+ __forceinline float mini(float a, float b) {
+ // FP and Neon shares same vector register in arm64
+ __m128 x;
+ __m128 y;
+ x[0] = a;
+ y[0] = b;
+ x = _mm_min_ps(x, y);
+ return x[0];
+ }
+#elif defined(__SSE4_1__)
__forceinline float mini(float a, float b) {
const __m128i ai = _mm_castps_si128(_mm_set_ss(a));
const __m128i bi = _mm_castps_si128(_mm_set_ss(b));
@@ -155,7 +216,17 @@ namespace embree
}
#endif
-#if defined(__SSE4_1__)
+#if defined(__aarch64__)
+ __forceinline float maxi(float a, float b) {
+ // FP and Neon shares same vector register in arm64
+ __m128 x;
+ __m128 y;
+ x[0] = a;
+ y[0] = b;
+ x = _mm_max_ps(x, y);
+ return x[0];
+ }
+#elif defined(__SSE4_1__)
__forceinline float maxi(float a, float b) {
const __m128i ai = _mm_castps_si128(_mm_set_ss(a));
const __m128i bi = _mm_castps_si128(_mm_set_ss(b));
@@ -172,9 +243,12 @@ namespace embree
__forceinline int64_t min(int64_t a, int64_t b) { return a<b ? a:b; }
__forceinline float min(float a, float b) { return a<b ? a:b; }
__forceinline double min(double a, double b) { return a<b ? a:b; }
-#if defined(__64BIT__)
+#if defined(__64BIT__) || defined(__EMSCRIPTEN__)
__forceinline size_t min(size_t a, size_t b) { return a<b ? a:b; }
#endif
+#if defined(__EMSCRIPTEN__)
+ __forceinline long min(long a, long b) { return a<b ? a:b; }
+#endif
template<typename T> __forceinline T min(const T& a, const T& b, const T& c) { return min(min(a,b),c); }
template<typename T> __forceinline T min(const T& a, const T& b, const T& c, const T& d) { return min(min(a,b),min(c,d)); }
@@ -189,9 +263,12 @@ namespace embree
__forceinline int64_t max(int64_t a, int64_t b) { return a<b ? b:a; }
__forceinline float max(float a, float b) { return a<b ? b:a; }
__forceinline double max(double a, double b) { return a<b ? b:a; }
-#if defined(__64BIT__)
+#if defined(__64BIT__) || defined(__EMSCRIPTEN__)
__forceinline size_t max(size_t a, size_t b) { return a<b ? b:a; }
#endif
+#if defined(__EMSCRIPTEN__)
+ __forceinline long max(long a, long b) { return a<b ? b:a; }
+#endif
template<typename T> __forceinline T max(const T& a, const T& b, const T& c) { return max(max(a,b),c); }
template<typename T> __forceinline T max(const T& a, const T& b, const T& c, const T& d) { return max(max(a,b),max(c,d)); }
@@ -231,6 +308,15 @@ namespace embree
__forceinline float msub ( const float a, const float b, const float c) { return _mm_cvtss_f32(_mm_fmsub_ss(_mm_set_ss(a),_mm_set_ss(b),_mm_set_ss(c))); }
__forceinline float nmadd ( const float a, const float b, const float c) { return _mm_cvtss_f32(_mm_fnmadd_ss(_mm_set_ss(a),_mm_set_ss(b),_mm_set_ss(c))); }
__forceinline float nmsub ( const float a, const float b, const float c) { return _mm_cvtss_f32(_mm_fnmsub_ss(_mm_set_ss(a),_mm_set_ss(b),_mm_set_ss(c))); }
+
+#elif defined (__aarch64__) && defined(__clang__)
+#pragma clang fp contract(fast)
+__forceinline float madd ( const float a, const float b, const float c) { return a*b + c; }
+__forceinline float msub ( const float a, const float b, const float c) { return a*b - c; }
+__forceinline float nmadd ( const float a, const float b, const float c) { return c - a*b; }
+__forceinline float nmsub ( const float a, const float b, const float c) { return -(c + a*b); }
+#pragma clang fp contract(on)
+
#else
__forceinline float madd ( const float a, const float b, const float c) { return a*b+c; }
__forceinline float msub ( const float a, const float b, const float c) { return a*b-c; }
@@ -326,7 +412,7 @@ namespace embree
return x | (y << 1) | (z << 2);
}
-#if defined(__AVX2__)
+#if defined(__AVX2__) && !defined(__aarch64__)
template<>
__forceinline unsigned int bitInterleave(const unsigned int &xi, const unsigned int& yi, const unsigned int& zi)
diff --git a/thirdparty/embree/common/math/quaternion.h b/thirdparty/embree/common/math/quaternion.h
index 080800efcd..78efccda72 100644
--- a/thirdparty/embree/common/math/quaternion.h
+++ b/thirdparty/embree/common/math/quaternion.h
@@ -242,13 +242,17 @@ namespace embree
T cosTheta = dot(q0, q1_);
QuaternionT<T> q1 = select(cosTheta < 0.f, -q1_, q1_);
cosTheta = select(cosTheta < 0.f, -cosTheta, cosTheta);
- if (unlikely(all(cosTheta > 0.9995f))) {
- return normalize(lerp(q0, q1, t));
- }
+
+ // spherical linear interpolation
const T phi = t * fastapprox::acos(cosTheta);
T sinPhi, cosPhi;
fastapprox::sincos(phi, sinPhi, cosPhi);
QuaternionT<T> qperp = sinPhi * normalize(msub(cosTheta, q0, q1));
- return msub(cosPhi, q0, qperp);
+ QuaternionT<T> qslerp = msub(cosPhi, q0, qperp);
+
+ // regular linear interpolation as fallback
+ QuaternionT<T> qlerp = normalize(lerp(q0, q1, t));
+
+ return select(cosTheta > 0.9995f, qlerp, qslerp);
}
}
diff --git a/thirdparty/embree/common/math/transcendental.h b/thirdparty/embree/common/math/transcendental.h
index fd16c26e81..daf9dd96d2 100644
--- a/thirdparty/embree/common/math/transcendental.h
+++ b/thirdparty/embree/common/math/transcendental.h
@@ -27,7 +27,7 @@ __forceinline T sin(const T &v)
// Reduced range version of x
auto x = v - kReal * piOverTwoVec;
auto kMod4 = k & 3;
- auto sinUseCos = (kMod4 == 1 | kMod4 == 3);
+ auto sinUseCos = (kMod4 == 1) | (kMod4 == 3);
auto flipSign = (kMod4 > 1);
// These coefficients are from sollya with fpminimax(sin(x)/x, [|0, 2,
@@ -76,8 +76,8 @@ __forceinline T cos(const T &v)
auto x = v - kReal * piOverTwoVec;
auto kMod4 = k & 3;
- auto cosUseCos = (kMod4 == 0 | kMod4 == 2);
- auto flipSign = (kMod4 == 1 | kMod4 == 2);
+ auto cosUseCos = (kMod4 == 0) | (kMod4 == 2);
+ auto flipSign = (kMod4 == 1) | (kMod4 == 2);
const float sinC2 = -0.16666667163372039794921875;
const float sinC4 = +8.333347737789154052734375e-3;
diff --git a/thirdparty/embree/common/math/vec2.h b/thirdparty/embree/common/math/vec2.h
index d62aef51f3..f6d98ffa0d 100644
--- a/thirdparty/embree/common/math/vec2.h
+++ b/thirdparty/embree/common/math/vec2.h
@@ -144,7 +144,7 @@ namespace embree
}
////////////////////////////////////////////////////////////////////////////////
- /// Euclidian Space Operators
+ /// Euclidean Space Operators
////////////////////////////////////////////////////////////////////////////////
template<typename T> __forceinline T dot ( const Vec2<T>& a, const Vec2<T>& b ) { return madd(a.x,b.x,a.y*b.y); }
@@ -205,11 +205,11 @@ namespace embree
#include "vec2fa.h"
-#if defined __SSE__
+#if defined(__SSE__) || defined(__ARM_NEON)
#include "../simd/sse.h"
#endif
-#if defined __AVX__
+#if defined(__AVX__)
#include "../simd/avx.h"
#endif
@@ -221,7 +221,7 @@ namespace embree
{
template<> __forceinline Vec2<float>::Vec2(const Vec2fa& a) : x(a.x), y(a.y) {}
-#if defined(__SSE__)
+#if defined(__SSE__) || defined(__ARM_NEON)
template<> __forceinline Vec2<vfloat4>::Vec2(const Vec2fa& a) : x(a.x), y(a.y) {}
#endif
diff --git a/thirdparty/embree/common/math/vec2fa.h b/thirdparty/embree/common/math/vec2fa.h
index a51fb68fd0..4f222894c2 100644
--- a/thirdparty/embree/common/math/vec2fa.h
+++ b/thirdparty/embree/common/math/vec2fa.h
@@ -97,6 +97,12 @@ namespace embree
__forceinline Vec2fa rcp ( const Vec2fa& a )
{
+#if defined(__aarch64__)
+ __m128 reciprocal = _mm_rcp_ps(a.m128);
+ reciprocal = vmulq_f32(vrecpsq_f32(a.m128, reciprocal), reciprocal);
+ reciprocal = vmulq_f32(vrecpsq_f32(a.m128, reciprocal), reciprocal);
+ return (const Vec2fa)reciprocal;
+#else
#if defined(__AVX512VL__)
const Vec2fa r = _mm_rcp14_ps(a.m128);
#else
@@ -104,13 +110,15 @@ namespace embree
#endif
#if defined(__AVX2__)
- const Vec2fa res = _mm_mul_ps(r,_mm_fnmadd_ps(r, a, vfloat4(2.0f)));
+ const Vec2fa h_n = _mm_fnmadd_ps(a, r, vfloat4(1.0)); // First, compute 1 - a * r (which will be very close to 0)
+ const Vec2fa res = _mm_fmadd_ps(r, h_n, r); // Then compute r + r * h_n
#else
- const Vec2fa res = _mm_mul_ps(r,_mm_sub_ps(vfloat4(2.0f), _mm_mul_ps(r, a)));
- //return _mm_sub_ps(_mm_add_ps(r, r), _mm_mul_ps(_mm_mul_ps(r, r), a));
+ const Vec2fa h_n = _mm_sub_ps(vfloat4(1.0f), _mm_mul_ps(a, r)); // First, compute 1 - a * r (which will be very close to 0)
+ const Vec2fa res = _mm_add_ps(r,_mm_mul_ps(r, h_n)); // Then compute r + r * h_n
#endif
return res;
+#endif //defined(__aarch64__)
}
__forceinline Vec2fa sqrt ( const Vec2fa& a ) { return _mm_sqrt_ps(a.m128); }
@@ -118,12 +126,21 @@ namespace embree
__forceinline Vec2fa rsqrt( const Vec2fa& a )
{
+#if defined(__aarch64__)
+ __m128 r = _mm_rsqrt_ps(a.m128);
+ r = vmulq_f32(r, vrsqrtsq_f32(vmulq_f32(a.m128, r), r));
+ r = vmulq_f32(r, vrsqrtsq_f32(vmulq_f32(a.m128, r), r));
+ return r;
+#else
+
#if defined(__AVX512VL__)
__m128 r = _mm_rsqrt14_ps(a.m128);
#else
__m128 r = _mm_rsqrt_ps(a.m128);
#endif
return _mm_add_ps(_mm_mul_ps(_mm_set1_ps(1.5f),r), _mm_mul_ps(_mm_mul_ps(_mm_mul_ps(a, _mm_set1_ps(-0.5f)), r), _mm_mul_ps(r, r)));
+
+#endif
}
__forceinline Vec2fa zero_fix(const Vec2fa& a) {
@@ -156,7 +173,7 @@ namespace embree
__forceinline Vec2fa min( const Vec2fa& a, const Vec2fa& b ) { return _mm_min_ps(a.m128,b.m128); }
__forceinline Vec2fa max( const Vec2fa& a, const Vec2fa& b ) { return _mm_max_ps(a.m128,b.m128); }
-#if defined(__SSE4_1__)
+#if defined(__aarch64__) || defined(__SSE4_1__)
__forceinline Vec2fa mini(const Vec2fa& a, const Vec2fa& b) {
const vint4 ai = _mm_castps_si128(a);
const vint4 bi = _mm_castps_si128(b);
@@ -165,7 +182,7 @@ namespace embree
}
#endif
-#if defined(__SSE4_1__)
+#if defined(__aarch64__) || defined(__SSE4_1__)
__forceinline Vec2fa maxi(const Vec2fa& a, const Vec2fa& b) {
const vint4 ai = _mm_castps_si128(a);
const vint4 bi = _mm_castps_si128(b);
@@ -227,7 +244,7 @@ namespace embree
__forceinline bool operator !=( const Vec2fa& a, const Vec2fa& b ) { return (_mm_movemask_ps(_mm_cmpneq_ps(a.m128, b.m128)) & 3) != 0; }
////////////////////////////////////////////////////////////////////////////////
- /// Euclidian Space Operators
+ /// Euclidean Space Operators
////////////////////////////////////////////////////////////////////////////////
#if defined(__SSE4_1__)
diff --git a/thirdparty/embree/common/math/vec3.h b/thirdparty/embree/common/math/vec3.h
index ce94eff327..254f6c4011 100644
--- a/thirdparty/embree/common/math/vec3.h
+++ b/thirdparty/embree/common/math/vec3.h
@@ -197,7 +197,7 @@ namespace embree
template<typename T> __forceinline Vec3<bool> ge_mask( const Vec3<T>& a, const Vec3<T>& b ) { return Vec3<bool>(a.x>=b.x,a.y>=b.y,a.z>=b.z); }
////////////////////////////////////////////////////////////////////////////////
- /// Euclidian Space Operators
+ /// Euclidean Space Operators
////////////////////////////////////////////////////////////////////////////////
template<typename T> __forceinline T sqr ( const Vec3<T>& a ) { return dot(a,a); }
@@ -207,7 +207,6 @@ namespace embree
template<typename T> __forceinline Vec3<T> normalize( const Vec3<T>& a ) { return a*rsqrt(sqr(a)); }
template<typename T> __forceinline T distance ( const Vec3<T>& a, const Vec3<T>& b ) { return length(a-b); }
template<typename T> __forceinline Vec3<T> cross ( const Vec3<T>& a, const Vec3<T>& b ) { return Vec3<T>(msub(a.y,b.z,a.z*b.y), msub(a.z,b.x,a.x*b.z), msub(a.x,b.y,a.y*b.x)); }
-
template<typename T> __forceinline Vec3<T> stable_triangle_normal( const Vec3<T>& a, const Vec3<T>& b, const Vec3<T>& c )
{
const T ab_x = a.z*b.y, ab_y = a.x*b.z, ab_z = a.y*b.x;
@@ -266,11 +265,11 @@ namespace embree
/// SSE / AVX / MIC specializations
////////////////////////////////////////////////////////////////////////////////
-#if defined __SSE__
+#if defined(__SSE__) || defined(__ARM_NEON)
#include "../simd/sse.h"
#endif
-#if defined __AVX__
+#if defined(__AVX__)
#include "../simd/avx.h"
#endif
@@ -291,14 +290,14 @@ namespace embree
template<> __forceinline Vec3<vfloat4>::Vec3(const Vec3fa& a) {
x = a.x; y = a.y; z = a.z;
}
-#elif defined(__SSE__)
+#elif defined(__SSE__) || defined(__ARM_NEON)
template<>
__forceinline Vec3<vfloat4>::Vec3(const Vec3fa& a) {
const vfloat4 v = vfloat4(a.m128); x = shuffle<0,0,0,0>(v); y = shuffle<1,1,1,1>(v); z = shuffle<2,2,2,2>(v);
}
#endif
-#if defined(__SSE__)
+#if defined(__SSE__) || defined(__ARM_NEON)
template<>
__forceinline Vec3<vfloat4> broadcast<vfloat4,vfloat4>(const Vec3<vfloat4>& a, const size_t k) {
return Vec3<vfloat4>(vfloat4::broadcast(&a.x[k]), vfloat4::broadcast(&a.y[k]), vfloat4::broadcast(&a.z[k]));
diff --git a/thirdparty/embree/common/math/vec3fa.h b/thirdparty/embree/common/math/vec3fa.h
index 586039741d..8564cf6d10 100644
--- a/thirdparty/embree/common/math/vec3fa.h
+++ b/thirdparty/embree/common/math/vec3fa.h
@@ -55,7 +55,13 @@ namespace embree
////////////////////////////////////////////////////////////////////////////////
static __forceinline Vec3fa load( const void* const a ) {
+#if defined(__aarch64__)
+ __m128 t = _mm_load_ps((float*)a);
+ t[3] = 0.0f;
+ return Vec3fa(t);
+#else
return Vec3fa(_mm_and_ps(_mm_load_ps((float*)a),_mm_castsi128_ps(_mm_set_epi32(0, -1, -1, -1))));
+#endif
}
static __forceinline Vec3fa loadu( const void* const a ) {
@@ -89,12 +95,20 @@ namespace embree
__forceinline Vec3fa operator +( const Vec3fa& a ) { return a; }
__forceinline Vec3fa operator -( const Vec3fa& a ) {
+#if defined(__aarch64__)
+ return vnegq_f32(a.m128);
+#else
const __m128 mask = _mm_castsi128_ps(_mm_set1_epi32(0x80000000));
return _mm_xor_ps(a.m128, mask);
+#endif
}
__forceinline Vec3fa abs ( const Vec3fa& a ) {
+#if defined(__aarch64__)
+ return _mm_abs_ps(a.m128);
+#else
const __m128 mask = _mm_castsi128_ps(_mm_set1_epi32(0x7fffffff));
return _mm_and_ps(a.m128, mask);
+#endif
}
__forceinline Vec3fa sign ( const Vec3fa& a ) {
return blendv_ps(Vec3fa(one).m128, (-Vec3fa(one)).m128, _mm_cmplt_ps (a.m128,Vec3fa(zero).m128));
@@ -102,6 +116,10 @@ namespace embree
__forceinline Vec3fa rcp ( const Vec3fa& a )
{
+#if defined(__aarch64__)
+ return vdivq_f32(vdupq_n_f32(1.0f),a.m128);
+#else
+
#if defined(__AVX512VL__)
const Vec3fa r = _mm_rcp14_ps(a.m128);
#else
@@ -109,13 +127,15 @@ namespace embree
#endif
#if defined(__AVX2__)
- const Vec3fa res = _mm_mul_ps(r.m128,_mm_fnmadd_ps(r.m128, a.m128, vfloat4(2.0f)));
+ const Vec3fa h_n = _mm_fnmadd_ps(a.m128, r.m128, vfloat4(1.0)); // First, compute 1 - a * r (which will be very close to 0)
+ const Vec3fa res = _mm_fmadd_ps(r.m128, h_n.m128, r.m128); // Then compute r + r * h_n
#else
- const Vec3fa res = _mm_mul_ps(r.m128,_mm_sub_ps(vfloat4(2.0f), _mm_mul_ps(r.m128, a.m128)));
- //return _mm_sub_ps(_mm_add_ps(r, r), _mm_mul_ps(_mm_mul_ps(r, r), a));
+ const Vec3fa h_n = _mm_sub_ps(vfloat4(1.0f), _mm_mul_ps(a.m128, r.m128)); // First, compute 1 - a * r (which will be very close to 0)
+ const Vec3fa res = _mm_add_ps(r.m128,_mm_mul_ps(r.m128, h_n.m128)); // Then compute r + r * h_n
#endif
return res;
+#endif //defined(__aarch64__)
}
__forceinline Vec3fa sqrt ( const Vec3fa& a ) { return _mm_sqrt_ps(a.m128); }
@@ -123,12 +143,20 @@ namespace embree
__forceinline Vec3fa rsqrt( const Vec3fa& a )
{
+#if defined(__aarch64__)
+ __m128 r = _mm_rsqrt_ps(a.m128);
+ r = vmulq_f32(r, vrsqrtsq_f32(vmulq_f32(a.m128, r), r));
+ r = vmulq_f32(r, vrsqrtsq_f32(vmulq_f32(a.m128, r), r));
+ return r;
+#else
+
#if defined(__AVX512VL__)
__m128 r = _mm_rsqrt14_ps(a.m128);
#else
__m128 r = _mm_rsqrt_ps(a.m128);
#endif
return _mm_add_ps(_mm_mul_ps(_mm_set1_ps(1.5f),r), _mm_mul_ps(_mm_mul_ps(_mm_mul_ps(a.m128, _mm_set1_ps(-0.5f)), r), _mm_mul_ps(r, r)));
+#endif
}
__forceinline Vec3fa zero_fix(const Vec3fa& a) {
@@ -161,7 +189,7 @@ namespace embree
__forceinline Vec3fa min( const Vec3fa& a, const Vec3fa& b ) { return _mm_min_ps(a.m128,b.m128); }
__forceinline Vec3fa max( const Vec3fa& a, const Vec3fa& b ) { return _mm_max_ps(a.m128,b.m128); }
-#if defined(__SSE4_1__)
+#if defined(__aarch64__) || defined(__SSE4_1__)
__forceinline Vec3fa mini(const Vec3fa& a, const Vec3fa& b) {
const vint4 ai = _mm_castps_si128(a.m128);
const vint4 bi = _mm_castps_si128(b.m128);
@@ -170,7 +198,7 @@ namespace embree
}
#endif
-#if defined(__SSE4_1__)
+#if defined(__aarch64__) || defined(__SSE4_1__)
__forceinline Vec3fa maxi(const Vec3fa& a, const Vec3fa& b) {
const vint4 ai = _mm_castps_si128(a.m128);
const vint4 bi = _mm_castps_si128(b.m128);
@@ -187,16 +215,16 @@ namespace embree
/// Ternary Operators
////////////////////////////////////////////////////////////////////////////////
-#if defined(__AVX2__)
+#if defined(__AVX2__) || defined(__ARM_NEON)
__forceinline Vec3fa madd ( const Vec3fa& a, const Vec3fa& b, const Vec3fa& c) { return _mm_fmadd_ps(a.m128,b.m128,c.m128); }
__forceinline Vec3fa msub ( const Vec3fa& a, const Vec3fa& b, const Vec3fa& c) { return _mm_fmsub_ps(a.m128,b.m128,c.m128); }
__forceinline Vec3fa nmadd ( const Vec3fa& a, const Vec3fa& b, const Vec3fa& c) { return _mm_fnmadd_ps(a.m128,b.m128,c.m128); }
__forceinline Vec3fa nmsub ( const Vec3fa& a, const Vec3fa& b, const Vec3fa& c) { return _mm_fnmsub_ps(a.m128,b.m128,c.m128); }
#else
__forceinline Vec3fa madd ( const Vec3fa& a, const Vec3fa& b, const Vec3fa& c) { return a*b+c; }
- __forceinline Vec3fa msub ( const Vec3fa& a, const Vec3fa& b, const Vec3fa& c) { return a*b-c; }
__forceinline Vec3fa nmadd ( const Vec3fa& a, const Vec3fa& b, const Vec3fa& c) { return -a*b+c;}
__forceinline Vec3fa nmsub ( const Vec3fa& a, const Vec3fa& b, const Vec3fa& c) { return -a*b-c; }
+ __forceinline Vec3fa msub ( const Vec3fa& a, const Vec3fa& b, const Vec3fa& c) { return a*b-c; }
#endif
__forceinline Vec3fa madd ( const float a, const Vec3fa& b, const Vec3fa& c) { return madd(Vec3fa(a),b,c); }
@@ -218,8 +246,26 @@ namespace embree
////////////////////////////////////////////////////////////////////////////////
/// Reductions
////////////////////////////////////////////////////////////////////////////////
+#if defined(__aarch64__)
+ __forceinline float reduce_add(const Vec3fa& v) {
+ float32x4_t t = v.m128;
+ t[3] = 0.0f;
+ return vaddvq_f32(t);
+ }
- __forceinline float reduce_add(const Vec3fa& v) {
+ __forceinline float reduce_mul(const Vec3fa& v) { return v.x*v.y*v.z; }
+ __forceinline float reduce_min(const Vec3fa& v) {
+ float32x4_t t = v.m128;
+ t[3] = t[2];
+ return vminvq_f32(t);
+ }
+ __forceinline float reduce_max(const Vec3fa& v) {
+ float32x4_t t = v.m128;
+ t[3] = t[2];
+ return vmaxvq_f32(t);
+ }
+#else
+ __forceinline float reduce_add(const Vec3fa& v) {
const vfloat4 a(v.m128);
const vfloat4 b = shuffle<1>(a);
const vfloat4 c = shuffle<2>(a);
@@ -229,6 +275,7 @@ namespace embree
__forceinline float reduce_mul(const Vec3fa& v) { return v.x*v.y*v.z; }
__forceinline float reduce_min(const Vec3fa& v) { return min(v.x,v.y,v.z); }
__forceinline float reduce_max(const Vec3fa& v) { return max(v.x,v.y,v.z); }
+#endif
////////////////////////////////////////////////////////////////////////////////
/// Comparison Operators
@@ -241,8 +288,13 @@ namespace embree
__forceinline Vec3ba neq_mask(const Vec3fa& a, const Vec3fa& b ) { return _mm_cmpneq_ps(a.m128, b.m128); }
__forceinline Vec3ba lt_mask( const Vec3fa& a, const Vec3fa& b ) { return _mm_cmplt_ps (a.m128, b.m128); }
__forceinline Vec3ba le_mask( const Vec3fa& a, const Vec3fa& b ) { return _mm_cmple_ps (a.m128, b.m128); }
- __forceinline Vec3ba gt_mask( const Vec3fa& a, const Vec3fa& b ) { return _mm_cmpnle_ps(a.m128, b.m128); }
- __forceinline Vec3ba ge_mask( const Vec3fa& a, const Vec3fa& b ) { return _mm_cmpnlt_ps(a.m128, b.m128); }
+ #if defined(__aarch64__)
+ __forceinline Vec3ba gt_mask( const Vec3fa& a, const Vec3fa& b ) { return _mm_cmpgt_ps (a.m128, b.m128); }
+ __forceinline Vec3ba ge_mask( const Vec3fa& a, const Vec3fa& b ) { return _mm_cmpge_ps (a.m128, b.m128); }
+#else
+ __forceinline Vec3ba gt_mask(const Vec3fa& a, const Vec3fa& b) { return _mm_cmpnle_ps(a.m128, b.m128); }
+ __forceinline Vec3ba ge_mask(const Vec3fa& a, const Vec3fa& b) { return _mm_cmpnlt_ps(a.m128, b.m128); }
+#endif
__forceinline bool isvalid ( const Vec3fa& v ) {
return all(gt_mask(v,Vec3fa(-FLT_LARGE)) & lt_mask(v,Vec3fa(+FLT_LARGE)));
@@ -261,7 +313,7 @@ namespace embree
}
////////////////////////////////////////////////////////////////////////////////
- /// Euclidian Space Operators
+ /// Euclidean Space Operators
////////////////////////////////////////////////////////////////////////////////
#if defined(__SSE4_1__)
@@ -335,7 +387,11 @@ namespace embree
/// Rounding Functions
////////////////////////////////////////////////////////////////////////////////
-#if defined (__SSE4_1__)
+#if defined(__aarch64__)
+ __forceinline Vec3fa floor(const Vec3fa& a) { return vrndmq_f32(a.m128); }
+ __forceinline Vec3fa ceil (const Vec3fa& a) { return vrndpq_f32(a.m128); }
+ __forceinline Vec3fa trunc(const Vec3fa& a) { return vrndq_f32(a.m128); }
+#elif defined (__SSE4_1__)
__forceinline Vec3fa trunc( const Vec3fa& a ) { return _mm_round_ps(a.m128, _MM_FROUND_TO_NEAREST_INT); }
__forceinline Vec3fa floor( const Vec3fa& a ) { return _mm_round_ps(a.m128, _MM_FROUND_TO_NEG_INF ); }
__forceinline Vec3fa ceil ( const Vec3fa& a ) { return _mm_round_ps(a.m128, _MM_FROUND_TO_POS_INF ); }
@@ -393,8 +449,10 @@ namespace embree
__forceinline Vec3fx( const Vec3fa& other, const int a1) { m128 = other.m128; a = a1; }
__forceinline Vec3fx( const Vec3fa& other, const unsigned a1) { m128 = other.m128; u = a1; }
- __forceinline Vec3fx( const Vec3fa& other, const float w1) {
-#if defined (__SSE4_1__)
+ __forceinline Vec3fx( const Vec3fa& other, const float w1) {
+#if defined (__aarch64__)
+ m128 = other.m128; m128[3] = w1;
+#elif defined (__SSE4_1__)
m128 = _mm_insert_ps(other.m128, _mm_set_ss(w1),3 << 4);
#else
const vint4 mask(-1,-1,-1,0);
@@ -526,7 +584,7 @@ namespace embree
__forceinline Vec3fx min( const Vec3fx& a, const Vec3fx& b ) { return _mm_min_ps(a.m128,b.m128); }
__forceinline Vec3fx max( const Vec3fx& a, const Vec3fx& b ) { return _mm_max_ps(a.m128,b.m128); }
-#if defined(__SSE4_1__)
+#if defined(__SSE4_1__) || defined(__aarch64__)
__forceinline Vec3fx mini(const Vec3fx& a, const Vec3fx& b) {
const vint4 ai = _mm_castps_si128(a.m128);
const vint4 bi = _mm_castps_si128(b.m128);
@@ -535,7 +593,7 @@ namespace embree
}
#endif
-#if defined(__SSE4_1__)
+#if defined(__SSE4_1__) || defined(__aarch64__)
__forceinline Vec3fx maxi(const Vec3fx& a, const Vec3fx& b) {
const vint4 ai = _mm_castps_si128(a.m128);
const vint4 bi = _mm_castps_si128(b.m128);
@@ -626,7 +684,7 @@ namespace embree
}
////////////////////////////////////////////////////////////////////////////////
- /// Euclidian Space Operators
+ /// Euclidean Space Operators
////////////////////////////////////////////////////////////////////////////////
#if defined(__SSE4_1__)
diff --git a/thirdparty/embree/common/math/vec3ia.h b/thirdparty/embree/common/math/vec3ia.h
index 694804c40d..d4cc3125cd 100644
--- a/thirdparty/embree/common/math/vec3ia.h
+++ b/thirdparty/embree/common/math/vec3ia.h
@@ -65,7 +65,9 @@ namespace embree
__forceinline Vec3ia operator +( const Vec3ia& a ) { return a; }
__forceinline Vec3ia operator -( const Vec3ia& a ) { return _mm_sub_epi32(_mm_setzero_si128(), a.m128); }
-#if defined(__SSSE3__)
+#if (defined(__aarch64__))
+ __forceinline Vec3ia abs ( const Vec3ia& a ) { return vabsq_s32(a.m128); }
+#elif defined(__SSSE3__)
__forceinline Vec3ia abs ( const Vec3ia& a ) { return _mm_abs_epi32(a.m128); }
#endif
@@ -81,7 +83,7 @@ namespace embree
__forceinline Vec3ia operator -( const Vec3ia& a, const int b ) { return a-Vec3ia(b); }
__forceinline Vec3ia operator -( const int a, const Vec3ia& b ) { return Vec3ia(a)-b; }
-#if defined(__SSE4_1__)
+#if defined(__aarch64__) || defined(__SSE4_1__)
__forceinline Vec3ia operator *( const Vec3ia& a, const Vec3ia& b ) { return _mm_mullo_epi32(a.m128, b.m128); }
__forceinline Vec3ia operator *( const Vec3ia& a, const int b ) { return a * Vec3ia(b); }
__forceinline Vec3ia operator *( const int a, const Vec3ia& b ) { return Vec3ia(a) * b; }
@@ -116,7 +118,7 @@ namespace embree
__forceinline Vec3ia& operator -=( Vec3ia& a, const Vec3ia& b ) { return a = a - b; }
__forceinline Vec3ia& operator -=( Vec3ia& a, const int& b ) { return a = a - b; }
-#if defined(__SSE4_1__)
+#if defined(__aarch64__) || defined(__SSE4_1__)
__forceinline Vec3ia& operator *=( Vec3ia& a, const Vec3ia& b ) { return a = a * b; }
__forceinline Vec3ia& operator *=( Vec3ia& a, const int& b ) { return a = a * b; }
#endif
@@ -127,18 +129,38 @@ namespace embree
__forceinline Vec3ia& operator |=( Vec3ia& a, const Vec3ia& b ) { return a = a | b; }
__forceinline Vec3ia& operator |=( Vec3ia& a, const int& b ) { return a = a | b; }
+#if !defined(__ARM_NEON)
__forceinline Vec3ia& operator <<=( Vec3ia& a, const int& b ) { return a = a << b; }
__forceinline Vec3ia& operator >>=( Vec3ia& a, const int& b ) { return a = a >> b; }
+#endif
////////////////////////////////////////////////////////////////////////////////
- /// Reductions
+ /// Select
////////////////////////////////////////////////////////////////////////////////
+ __forceinline Vec3ia select( const Vec3ba& m, const Vec3ia& t, const Vec3ia& f ) {
+#if defined(__aarch64__) || defined(__SSE4_1__)
+ return _mm_castps_si128(_mm_blendv_ps(_mm_castsi128_ps(f), _mm_castsi128_ps(t), m));
+#else
+ return _mm_or_si128(_mm_and_si128(_mm_castps_si128(m), t), _mm_andnot_si128(_mm_castps_si128(m), f));
+#endif
+ }
+
+ ////////////////////////////////////////////////////////////////////////////////
+ /// Reductions
+ ////////////////////////////////////////////////////////////////////////////////
+#if defined(__aarch64__)
+ __forceinline int reduce_add(const Vec3ia& v) { return vaddvq_s32(select(Vec3ba(1,1,1),v,Vec3ia(0))); }
+ __forceinline int reduce_mul(const Vec3ia& v) { return v.x*v.y*v.z; }
+ __forceinline int reduce_min(const Vec3ia& v) { return vminvq_s32(select(Vec3ba(1,1,1),v,Vec3ia(0x7FFFFFFF))); }
+ __forceinline int reduce_max(const Vec3ia& v) { return vmaxvq_s32(select(Vec3ba(1,1,1),v,Vec3ia(0x80000000))); }
+#else
__forceinline int reduce_add(const Vec3ia& v) { return v.x+v.y+v.z; }
__forceinline int reduce_mul(const Vec3ia& v) { return v.x*v.y*v.z; }
__forceinline int reduce_min(const Vec3ia& v) { return min(v.x,v.y,v.z); }
__forceinline int reduce_max(const Vec3ia& v) { return max(v.x,v.y,v.z); }
-
+#endif
+
////////////////////////////////////////////////////////////////////////////////
/// Comparison Operators
////////////////////////////////////////////////////////////////////////////////
@@ -156,19 +178,7 @@ namespace embree
__forceinline Vec3ba lt_mask( const Vec3ia& a, const Vec3ia& b ) { return _mm_castsi128_ps(_mm_cmplt_epi32 (a.m128, b.m128)); }
__forceinline Vec3ba gt_mask( const Vec3ia& a, const Vec3ia& b ) { return _mm_castsi128_ps(_mm_cmpgt_epi32 (a.m128, b.m128)); }
- ////////////////////////////////////////////////////////////////////////////////
- /// Select
- ////////////////////////////////////////////////////////////////////////////////
-
- __forceinline Vec3ia select( const Vec3ba& m, const Vec3ia& t, const Vec3ia& f ) {
-#if defined(__SSE4_1__)
- return _mm_castps_si128(_mm_blendv_ps(_mm_castsi128_ps(f), _mm_castsi128_ps(t), m));
-#else
- return _mm_or_si128(_mm_and_si128(_mm_castps_si128(m), t), _mm_andnot_si128(_mm_castps_si128(m), f));
-#endif
- }
-
-#if defined(__SSE4_1__)
+#if defined(__aarch64__) || defined(__SSE4_1__)
__forceinline Vec3ia min( const Vec3ia& a, const Vec3ia& b ) { return _mm_min_epi32(a.m128,b.m128); }
__forceinline Vec3ia max( const Vec3ia& a, const Vec3ia& b ) { return _mm_max_epi32(a.m128,b.m128); }
#else
diff --git a/thirdparty/embree/common/math/vec4.h b/thirdparty/embree/common/math/vec4.h
index 0ed107928a..10c53f47b4 100644
--- a/thirdparty/embree/common/math/vec4.h
+++ b/thirdparty/embree/common/math/vec4.h
@@ -149,7 +149,7 @@ namespace embree
}
////////////////////////////////////////////////////////////////////////////////
- /// Euclidian Space Operators
+ /// Euclidean Space Operators
////////////////////////////////////////////////////////////////////////////////
template<typename T> __forceinline T dot ( const Vec4<T>& a, const Vec4<T>& b ) { return madd(a.x,b.x,madd(a.y,b.y,madd(a.z,b.z,a.w*b.w))); }
@@ -205,7 +205,7 @@ namespace embree
/// SSE / AVX / MIC specializations
////////////////////////////////////////////////////////////////////////////////
-#if defined __SSE__
+#if defined(__SSE__) || defined(__ARM_NEON)
#include "../simd/sse.h"
#endif
@@ -225,7 +225,7 @@ namespace embree
template<> __forceinline Vec4<vfloat4>::Vec4( const Vec3fx& a ) {
x = a.x; y = a.y; z = a.z; w = a.w;
}
-#elif defined(__SSE__)
+#elif defined(__SSE__) || defined(__ARM_NEON)
template<> __forceinline Vec4<vfloat4>::Vec4( const Vec3fx& a ) {
const vfloat4 v = vfloat4(a.m128); x = shuffle<0,0,0,0>(v); y = shuffle<1,1,1,1>(v); z = shuffle<2,2,2,2>(v); w = shuffle<3,3,3,3>(v);
}
diff --git a/thirdparty/embree/common/simd/arm/avx2neon.h b/thirdparty/embree/common/simd/arm/avx2neon.h
new file mode 100644
index 0000000000..dd321d3d64
--- /dev/null
+++ b/thirdparty/embree/common/simd/arm/avx2neon.h
@@ -0,0 +1,1196 @@
+#pragma once
+
+#if !defined(__aarch64__)
+#error "avx2neon is only supported for AARCH64"
+#endif
+
+#include "sse2neon.h"
+
+#define AVX2NEON_ABI static inline __attribute__((always_inline))
+
+
+struct __m256 {
+ __m128 lo,hi;
+ __m256() {}
+};
+
+
+
+
+struct __m256i {
+ __m128i lo,hi;
+ explicit __m256i(const __m256 a) : lo(__m128i(a.lo)),hi(__m128i(a.hi)) {}
+ operator __m256() const {__m256 res; res.lo = __m128(lo);res.hi = __m128(hi); return res;}
+ __m256i() {}
+};
+
+
+
+
+struct __m256d {
+ float64x2_t lo,hi;
+ __m256d() {}
+ __m256d(const __m256& a) : lo(float64x2_t(a.lo)),hi(float64x2_t(a.hi)) {}
+ __m256d(const __m256i& a) : lo(float64x2_t(a.lo)),hi(float64x2_t(a.hi)) {}
+};
+
+#define UNARY_AVX_OP(type,func,basic_func) AVX2NEON_ABI type func(const type& a) {type res;res.lo=basic_func(a.lo);res.hi=basic_func(a.hi);return res;}
+
+
+#define BINARY_AVX_OP(type,func,basic_func) AVX2NEON_ABI type func(const type& a,const type& b) {type res;res.lo=basic_func(a.lo,b.lo);res.hi=basic_func(a.hi,b.hi);return res;}
+#define BINARY_AVX_OP_CAST(type,func,basic_func,bdst,bsrc) AVX2NEON_ABI type func(const type& a,const type& b) {type res;res.lo=bdst(basic_func(bsrc(a.lo),bsrc(b.lo)));res.hi=bdst(basic_func(bsrc(a.hi),bsrc(b.hi)));return res;}
+
+#define TERNARY_AVX_OP(type,func,basic_func) AVX2NEON_ABI type func(const type& a,const type& b,const type& c) {type res;res.lo=basic_func(a.lo,b.lo,c.lo);res.hi=basic_func(a.hi,b.hi,c.hi);return res;}
+
+
+#define CAST_SIMD_TYPE(to,name,from,basic_dst) AVX2NEON_ABI to name(const from& a) { to res; res.lo = basic_dst(a.lo); res.hi=basic_dst(a.hi); return res;}
+
+
+
+#define _mm_stream_load_si128 _mm_load_si128
+#define _mm256_stream_load_si256 _mm256_load_si256
+
+
+AVX2NEON_ABI
+__m128i _mm_blend_epi32 (__m128i a, __m128i b, const int imm8)
+{
+ __m128 af = _mm_castsi128_ps(a);
+ __m128 bf = _mm_castsi128_ps(b);
+ __m128 blendf = _mm_blend_ps(af, bf, imm8);
+ return _mm_castps_si128(blendf);
+}
+
+AVX2NEON_ABI
+int _mm_movemask_popcnt(__m128 a)
+{
+ return __builtin_popcount(_mm_movemask_ps(a));
+}
+
+AVX2NEON_ABI
+__m128 _mm_maskload_ps (float const * mem_addr, __m128i mask)
+{
+ float32x4_t res;
+ uint32x4_t mask_u32 = vreinterpretq_u32_m128i(mask);
+ for (int i=0;i<4;i++) {
+ if (mask_u32[i] & 0x80000000) res[i] = mem_addr[i]; else res[i] = 0;
+ }
+ return vreinterpretq_m128_f32(res);
+}
+
+AVX2NEON_ABI
+void _mm_maskstore_ps (float * mem_addr, __m128i mask, __m128 a)
+{
+ float32x4_t a_f32 = vreinterpretq_f32_m128(a);
+ uint32x4_t mask_u32 = vreinterpretq_u32_m128i(mask);
+ for (int i=0;i<4;i++) {
+ if (mask_u32[i] & 0x80000000) mem_addr[i] = a_f32[i];
+ }
+}
+
+AVX2NEON_ABI
+void _mm_maskstore_epi32 (int * mem_addr, __m128i mask, __m128i a)
+{
+ uint32x4_t mask_u32 = vreinterpretq_u32_m128i(mask);
+ int32x4_t a_s32 = vreinterpretq_s32_m128i(a);
+ for (int i=0;i<4;i++) {
+ if (mask_u32[i] & 0x80000000) mem_addr[i] = a_s32[i];
+ }
+}
+
+
+#define _mm_fmadd_ss _mm_fmadd_ps
+#define _mm_fmsub_ss _mm_fmsub_ps
+#define _mm_fnmsub_ss _mm_fnmsub_ps
+#define _mm_fnmadd_ss _mm_fnmadd_ps
+
+template<int code>
+AVX2NEON_ABI float32x4_t dpps_neon(const float32x4_t& a,const float32x4_t& b)
+{
+ float v;
+ v = 0;
+ v += (code & 0x10) ? a[0]*b[0] : 0;
+ v += (code & 0x20) ? a[1]*b[1] : 0;
+ v += (code & 0x40) ? a[2]*b[2] : 0;
+ v += (code & 0x80) ? a[3]*b[3] : 0;
+ float32x4_t res;
+ res[0] = (code & 0x1) ? v : 0;
+ res[1] = (code & 0x2) ? v : 0;
+ res[2] = (code & 0x4) ? v : 0;
+ res[3] = (code & 0x8) ? v : 0;
+ return res;
+}
+
+template<>
+inline float32x4_t dpps_neon<0x7f>(const float32x4_t& a,const float32x4_t& b)
+{
+ float v;
+ float32x4_t m = _mm_mul_ps(a,b);
+ m[3] = 0;
+ v = vaddvq_f32(m);
+ return _mm_set1_ps(v);
+}
+
+template<>
+inline float32x4_t dpps_neon<0xff>(const float32x4_t& a,const float32x4_t& b)
+{
+ float v;
+ float32x4_t m = _mm_mul_ps(a,b);
+ v = vaddvq_f32(m);
+ return _mm_set1_ps(v);
+}
+
+#define _mm_dp_ps(a,b,c) dpps_neon<c>((a),(b))
+
+
+AVX2NEON_ABI
+__m128 _mm_permutevar_ps (__m128 a, __m128i b)
+{
+ uint32x4_t b_u32 = vreinterpretq_u32_m128i(b);
+ float32x4_t x;
+ for (int i=0;i<4;i++)
+ {
+ x[i] = a[b_u32[i]];
+ }
+ return vreinterpretq_m128_f32(x);
+}
+
+AVX2NEON_ABI
+__m256i _mm256_setzero_si256()
+{
+ __m256i res;
+ res.lo = res.hi = vdupq_n_s32(0);
+ return res;
+}
+
+AVX2NEON_ABI
+__m256 _mm256_setzero_ps()
+{
+ __m256 res;
+ res.lo = res.hi = vdupq_n_f32(0.0f);
+ return res;
+}
+
+AVX2NEON_ABI
+__m256i _mm256_undefined_si256()
+{
+ return _mm256_setzero_si256();
+}
+
+AVX2NEON_ABI
+__m256 _mm256_undefined_ps()
+{
+ return _mm256_setzero_ps();
+}
+
+CAST_SIMD_TYPE(__m256d, _mm256_castps_pd, __m256, float64x2_t)
+CAST_SIMD_TYPE(__m256i, _mm256_castps_si256, __m256, __m128i)
+CAST_SIMD_TYPE(__m256, _mm256_castsi256_ps, __m256i, __m128)
+CAST_SIMD_TYPE(__m256, _mm256_castpd_ps , __m256d, __m128)
+CAST_SIMD_TYPE(__m256d, _mm256_castsi256_pd, __m256i, float64x2_t)
+CAST_SIMD_TYPE(__m256i, _mm256_castpd_si256, __m256d, __m128i)
+
+
+
+
+AVX2NEON_ABI
+__m128 _mm256_castps256_ps128 (__m256 a)
+{
+ return a.lo;
+}
+
+AVX2NEON_ABI
+__m256i _mm256_castsi128_si256 (__m128i a)
+{
+ __m256i res;
+ res.lo = a ;
+ res.hi = vdupq_n_s32(0);
+ return res;
+}
+
+AVX2NEON_ABI
+__m128i _mm256_castsi256_si128 (__m256i a)
+{
+ return a.lo;
+}
+
+AVX2NEON_ABI
+__m256 _mm256_castps128_ps256 (__m128 a)
+{
+ __m256 res;
+ res.lo = a;
+ res.hi = vdupq_n_f32(0);
+ return res;
+}
+
+
+AVX2NEON_ABI
+__m256 _mm256_broadcast_ss (float const * mem_addr)
+{
+ __m256 res;
+ res.lo = res.hi = vdupq_n_f32(*mem_addr);
+ return res;
+}
+
+
+AVX2NEON_ABI
+__m256i _mm256_set_epi32 (int e7, int e6, int e5, int e4, int e3, int e2, int e1, int e0)
+{
+ __m256i res;
+ res.lo = _mm_set_epi32(e3,e2,e1,e0);
+ res.hi = _mm_set_epi32(e7,e6,e5,e4);
+ return res;
+
+}
+
+AVX2NEON_ABI
+__m256i _mm256_set1_epi32 (int a)
+{
+ __m256i res;
+ res.lo = res.hi = vdupq_n_s32(a);
+ return res;
+}
+AVX2NEON_ABI
+__m256i _mm256_set1_epi8 (int a)
+{
+ __m256i res;
+ res.lo = res.hi = vdupq_n_s8(a);
+ return res;
+}
+AVX2NEON_ABI
+__m256i _mm256_set1_epi16 (int a)
+{
+ __m256i res;
+ res.lo = res.hi = vdupq_n_s16(a);
+ return res;
+}
+
+
+
+
+AVX2NEON_ABI
+int _mm256_movemask_ps(const __m256& v)
+{
+ return (_mm_movemask_ps(v.hi) << 4) | _mm_movemask_ps(v.lo);
+}
+
+template<int imm8>
+AVX2NEON_ABI
+__m256 __mm256_permute_ps (const __m256& a)
+{
+ __m256 res;
+ res.lo = _mm_shuffle_ps(a.lo,a.lo,imm8);
+ res.hi = _mm_shuffle_ps(a.hi,a.hi,imm8);
+ return res;
+
+}
+
+#define _mm256_permute_ps(a,c) __mm256_permute_ps<c>(a)
+
+
+template<int imm8>
+AVX2NEON_ABI
+__m256 __mm256_shuffle_ps (const __m256 a,const __m256& b)
+{
+ __m256 res;
+ res.lo = _mm_shuffle_ps(a.lo,b.lo,imm8);
+ res.hi = _mm_shuffle_ps(a.hi,b.hi,imm8);
+ return res;
+
+}
+
+template<int imm8>
+AVX2NEON_ABI
+__m256i __mm256_shuffle_epi32 (const __m256i a)
+{
+ __m256i res;
+ res.lo = _mm_shuffle_epi32(a.lo,imm8);
+ res.hi = _mm_shuffle_epi32(a.hi,imm8);
+ return res;
+
+}
+
+template<int imm8>
+AVX2NEON_ABI
+__m256i __mm256_srli_si256 (__m256i a)
+{
+ __m256i res;
+ res.lo = _mm_srli_si128(a.lo,imm8);
+ res.hi = _mm_srli_si128(a.hi,imm8);
+ return res;
+}
+
+template<int imm8>
+AVX2NEON_ABI
+__m256i __mm256_slli_si256 (__m256i a)
+{
+ __m256i res;
+ res.lo = _mm_slli_si128(a.lo,imm8);
+ res.hi = _mm_slli_si128(a.hi,imm8);
+ return res;
+}
+
+
+#define _mm256_srli_si256(a,b) __mm256_srli_si256<b>(a)
+#define _mm256_slli_si256(a,b) __mm256_slli_si256<b>(a)
+
+
+
+#define _mm256_shuffle_ps(a,b,c) __mm256_shuffle_ps<c>(a,b)
+#define _mm256_shuffle_epi32(a,c) __mm256_shuffle_epi32<c>(a)
+
+
+AVX2NEON_ABI
+__m256i _mm256_set1_epi64x (long long a)
+{
+ __m256i res;
+ int64x2_t t = vdupq_n_s64(a);
+ res.lo = res.hi = __m128i(t);
+ return res;
+}
+
+
+AVX2NEON_ABI
+__m256 _mm256_permute2f128_ps (__m256 a, __m256 b, int imm8)
+{
+ __m256 res;
+ __m128 tmp;
+ switch (imm8 & 0x7)
+ {
+ case 0: tmp = a.lo; break;
+ case 1: tmp = a.hi; break;
+ case 2: tmp = b.lo; break;
+ case 3: tmp = b.hi; break;
+ }
+ if (imm8 & 0x8)
+ tmp = _mm_setzero_ps();
+
+
+
+ res.lo = tmp;
+ imm8 >>= 4;
+
+ switch (imm8 & 0x7)
+ {
+ case 0: tmp = a.lo; break;
+ case 1: tmp = a.hi; break;
+ case 2: tmp = b.lo; break;
+ case 3: tmp = b.hi; break;
+ }
+ if (imm8 & 0x8)
+ tmp = _mm_setzero_ps();
+
+ res.hi = tmp;
+
+ return res;
+}
+
+AVX2NEON_ABI
+__m256 _mm256_moveldup_ps (__m256 a)
+{
+ __m256 res;
+ res.lo = _mm_moveldup_ps(a.lo);
+ res.hi = _mm_moveldup_ps(a.hi);
+ return res;
+}
+
+AVX2NEON_ABI
+__m256 _mm256_movehdup_ps (__m256 a)
+{
+ __m256 res;
+ res.lo = _mm_movehdup_ps(a.lo);
+ res.hi = _mm_movehdup_ps(a.hi);
+ return res;
+}
+
+AVX2NEON_ABI
+__m256 _mm256_insertf128_ps (__m256 a, __m128 b, int imm8)
+{
+ __m256 res = a;
+ if (imm8 & 1) res.hi = b;
+ else res.lo = b;
+ return res;
+}
+
+
+AVX2NEON_ABI
+__m128 _mm256_extractf128_ps (__m256 a, const int imm8)
+{
+ if (imm8 & 1) return a.hi;
+ return a.lo;
+}
+
+
+AVX2NEON_ABI
+__m256d _mm256_movedup_pd (__m256d a)
+{
+ __m256d res;
+ res.lo = _mm_movedup_pd(a.lo);
+ res.hi = _mm_movedup_pd(a.hi);
+ return res;
+}
+
+AVX2NEON_ABI
+__m256i _mm256_abs_epi32(__m256i a)
+{
+ __m256i res;
+ res.lo = vabsq_s32(a.lo);
+ res.hi = vabsq_s32(a.hi);
+ return res;
+}
+
+UNARY_AVX_OP(__m256,_mm256_sqrt_ps,_mm_sqrt_ps)
+UNARY_AVX_OP(__m256,_mm256_rsqrt_ps,_mm_rsqrt_ps)
+UNARY_AVX_OP(__m256,_mm256_rcp_ps,_mm_rcp_ps)
+UNARY_AVX_OP(__m256,_mm256_floor_ps,vrndmq_f32)
+UNARY_AVX_OP(__m256,_mm256_ceil_ps,vrndpq_f32)
+UNARY_AVX_OP(__m256i,_mm256_abs_epi16,_mm_abs_epi16)
+
+
+BINARY_AVX_OP(__m256i,_mm256_add_epi8,_mm_add_epi8)
+BINARY_AVX_OP(__m256i,_mm256_adds_epi8,_mm_adds_epi8)
+
+BINARY_AVX_OP(__m256i,_mm256_hadd_epi32,_mm_hadd_epi32)
+BINARY_AVX_OP(__m256i,_mm256_add_epi32,_mm_add_epi32)
+BINARY_AVX_OP(__m256i,_mm256_sub_epi32,_mm_sub_epi32)
+BINARY_AVX_OP(__m256i,_mm256_mullo_epi32,_mm_mullo_epi32)
+
+BINARY_AVX_OP(__m256i,_mm256_min_epi32,_mm_min_epi32)
+BINARY_AVX_OP(__m256i,_mm256_max_epi32,_mm_max_epi32)
+BINARY_AVX_OP(__m256i,_mm256_min_epi16,_mm_min_epi16)
+BINARY_AVX_OP(__m256i,_mm256_max_epi16,_mm_max_epi16)
+BINARY_AVX_OP(__m256i,_mm256_min_epi8,_mm_min_epi8)
+BINARY_AVX_OP(__m256i,_mm256_max_epi8,_mm_max_epi8)
+BINARY_AVX_OP(__m256i,_mm256_min_epu16,_mm_min_epu16)
+BINARY_AVX_OP(__m256i,_mm256_max_epu16,_mm_max_epu16)
+BINARY_AVX_OP(__m256i,_mm256_min_epu8,_mm_min_epu8)
+BINARY_AVX_OP(__m256i,_mm256_max_epu8,_mm_max_epu8)
+BINARY_AVX_OP(__m256i,_mm256_sign_epi16,_mm_sign_epi16)
+
+
+BINARY_AVX_OP_CAST(__m256i,_mm256_min_epu32,vminq_u32,__m128i,uint32x4_t)
+BINARY_AVX_OP_CAST(__m256i,_mm256_max_epu32,vmaxq_u32,__m128i,uint32x4_t)
+
+BINARY_AVX_OP(__m256,_mm256_min_ps,_mm_min_ps)
+BINARY_AVX_OP(__m256,_mm256_max_ps,_mm_max_ps)
+
+BINARY_AVX_OP(__m256,_mm256_add_ps,_mm_add_ps)
+BINARY_AVX_OP(__m256,_mm256_mul_ps,_mm_mul_ps)
+BINARY_AVX_OP(__m256,_mm256_sub_ps,_mm_sub_ps)
+BINARY_AVX_OP(__m256,_mm256_div_ps,_mm_div_ps)
+
+BINARY_AVX_OP(__m256,_mm256_and_ps,_mm_and_ps)
+BINARY_AVX_OP(__m256,_mm256_andnot_ps,_mm_andnot_ps)
+BINARY_AVX_OP(__m256,_mm256_or_ps,_mm_or_ps)
+BINARY_AVX_OP(__m256,_mm256_xor_ps,_mm_xor_ps)
+
+BINARY_AVX_OP_CAST(__m256d,_mm256_and_pd,vandq_s64,float64x2_t,int64x2_t)
+BINARY_AVX_OP_CAST(__m256d,_mm256_or_pd,vorrq_s64,float64x2_t,int64x2_t)
+BINARY_AVX_OP_CAST(__m256d,_mm256_xor_pd,veorq_s64,float64x2_t,int64x2_t)
+
+
+
+BINARY_AVX_OP(__m256i,_mm256_and_si256,_mm_and_si128)
+BINARY_AVX_OP(__m256i,_mm256_andnot_si256,_mm_andnot_si128)
+BINARY_AVX_OP(__m256i,_mm256_or_si256,_mm_or_si128)
+BINARY_AVX_OP(__m256i,_mm256_xor_si256,_mm_xor_si128)
+
+
+BINARY_AVX_OP(__m256,_mm256_unpackhi_ps,_mm_unpackhi_ps)
+BINARY_AVX_OP(__m256,_mm256_unpacklo_ps,_mm_unpacklo_ps)
+TERNARY_AVX_OP(__m256,_mm256_blendv_ps,_mm_blendv_ps)
+TERNARY_AVX_OP(__m256i,_mm256_blendv_epi8,_mm_blendv_epi8)
+
+
+TERNARY_AVX_OP(__m256,_mm256_fmadd_ps,_mm_fmadd_ps)
+TERNARY_AVX_OP(__m256,_mm256_fnmadd_ps,_mm_fnmadd_ps)
+TERNARY_AVX_OP(__m256,_mm256_fmsub_ps,_mm_fmsub_ps)
+TERNARY_AVX_OP(__m256,_mm256_fnmsub_ps,_mm_fnmsub_ps)
+
+
+
+BINARY_AVX_OP(__m256i,_mm256_packs_epi32,_mm_packs_epi32)
+BINARY_AVX_OP(__m256i,_mm256_packs_epi16,_mm_packs_epi16)
+BINARY_AVX_OP(__m256i,_mm256_packus_epi32,_mm_packus_epi32)
+BINARY_AVX_OP(__m256i,_mm256_packus_epi16,_mm_packus_epi16)
+
+
+BINARY_AVX_OP(__m256i,_mm256_unpackhi_epi64,_mm_unpackhi_epi64)
+BINARY_AVX_OP(__m256i,_mm256_unpackhi_epi32,_mm_unpackhi_epi32)
+BINARY_AVX_OP(__m256i,_mm256_unpackhi_epi16,_mm_unpackhi_epi16)
+BINARY_AVX_OP(__m256i,_mm256_unpackhi_epi8,_mm_unpackhi_epi8)
+
+BINARY_AVX_OP(__m256i,_mm256_unpacklo_epi64,_mm_unpacklo_epi64)
+BINARY_AVX_OP(__m256i,_mm256_unpacklo_epi32,_mm_unpacklo_epi32)
+BINARY_AVX_OP(__m256i,_mm256_unpacklo_epi16,_mm_unpacklo_epi16)
+BINARY_AVX_OP(__m256i,_mm256_unpacklo_epi8,_mm_unpacklo_epi8)
+
+BINARY_AVX_OP(__m256i,_mm256_mulhrs_epi16,_mm_mulhrs_epi16)
+BINARY_AVX_OP(__m256i,_mm256_mulhi_epu16,_mm_mulhi_epu16)
+BINARY_AVX_OP(__m256i,_mm256_mulhi_epi16,_mm_mulhi_epi16)
+//BINARY_AVX_OP(__m256i,_mm256_mullo_epu16,_mm_mullo_epu16)
+BINARY_AVX_OP(__m256i,_mm256_mullo_epi16,_mm_mullo_epi16)
+
+BINARY_AVX_OP(__m256i,_mm256_subs_epu16,_mm_subs_epu16)
+BINARY_AVX_OP(__m256i,_mm256_adds_epu16,_mm_adds_epu16)
+BINARY_AVX_OP(__m256i,_mm256_subs_epi16,_mm_subs_epi16)
+BINARY_AVX_OP(__m256i,_mm256_adds_epi16,_mm_adds_epi16)
+BINARY_AVX_OP(__m256i,_mm256_sub_epi16,_mm_sub_epi16)
+BINARY_AVX_OP(__m256i,_mm256_add_epi16,_mm_add_epi16)
+BINARY_AVX_OP(__m256i,_mm256_sub_epi8,_mm_sub_epi8)
+
+
+BINARY_AVX_OP(__m256i,_mm256_hadd_epi16,_mm_hadd_epi16)
+BINARY_AVX_OP(__m256i,_mm256_hadds_epi16,_mm_hadds_epi16)
+
+
+
+
+BINARY_AVX_OP(__m256i,_mm256_cmpeq_epi32,_mm_cmpeq_epi32)
+BINARY_AVX_OP(__m256i,_mm256_cmpgt_epi32,_mm_cmpgt_epi32)
+
+BINARY_AVX_OP(__m256i,_mm256_cmpeq_epi8,_mm_cmpeq_epi8)
+BINARY_AVX_OP(__m256i,_mm256_cmpgt_epi8,_mm_cmpgt_epi8)
+
+BINARY_AVX_OP(__m256i,_mm256_cmpeq_epi16,_mm_cmpeq_epi16)
+BINARY_AVX_OP(__m256i,_mm256_cmpgt_epi16,_mm_cmpgt_epi16)
+
+
+BINARY_AVX_OP(__m256i,_mm256_shuffle_epi8,_mm_shuffle_epi8)
+
+
+BINARY_AVX_OP(__m256,_mm256_cmpeq_ps,_mm_cmpeq_ps)
+BINARY_AVX_OP(__m256,_mm256_cmpneq_ps,_mm_cmpneq_ps)
+BINARY_AVX_OP(__m256,_mm256_cmpnlt_ps,_mm_cmpnlt_ps)
+BINARY_AVX_OP(__m256,_mm256_cmpngt_ps,_mm_cmpngt_ps)
+BINARY_AVX_OP(__m256,_mm256_cmpge_ps,_mm_cmpge_ps)
+BINARY_AVX_OP(__m256,_mm256_cmpnge_ps,_mm_cmpnge_ps)
+BINARY_AVX_OP(__m256,_mm256_cmplt_ps,_mm_cmplt_ps)
+BINARY_AVX_OP(__m256,_mm256_cmple_ps,_mm_cmple_ps)
+BINARY_AVX_OP(__m256,_mm256_cmpgt_ps,_mm_cmpgt_ps)
+BINARY_AVX_OP(__m256,_mm256_cmpnle_ps,_mm_cmpnle_ps)
+
+
+AVX2NEON_ABI
+__m256i _mm256_cvtps_epi32 (__m256 a)
+{
+ __m256i res;
+ res.lo = _mm_cvtps_epi32(a.lo);
+ res.hi = _mm_cvtps_epi32(a.hi);
+ return res;
+
+}
+
+AVX2NEON_ABI
+__m256i _mm256_cvttps_epi32 (__m256 a)
+{
+ __m256i res;
+ res.lo = _mm_cvttps_epi32(a.lo);
+ res.hi = _mm_cvttps_epi32(a.hi);
+ return res;
+
+}
+
+AVX2NEON_ABI
+__m256 _mm256_loadu_ps (float const * mem_addr)
+{
+ __m256 res;
+ res.lo = *(__m128 *)(mem_addr + 0);
+ res.hi = *(__m128 *)(mem_addr + 4);
+ return res;
+}
+#define _mm256_load_ps _mm256_loadu_ps
+
+
+AVX2NEON_ABI
+int _mm256_testz_ps (const __m256& a, const __m256& b)
+{
+ __m256 t = a;
+ if (&a != &b)
+ t = _mm256_and_ps(a,b);
+
+ int32x4_t l = vshrq_n_s32(vreinterpretq_s32_m128(t.lo),31);
+ int32x4_t h = vshrq_n_s32(vreinterpretq_s32_m128(t.hi),31);
+ return vaddvq_s32(vaddq_s32(l,h)) == 0;
+}
+
+
+AVX2NEON_ABI
+__m256i _mm256_set_epi64x (int64_t e3, int64_t e2, int64_t e1, int64_t e0)
+{
+ __m256i res;
+ int64x2_t t0 = {e0,e1};
+ int64x2_t t1 = {e2,e3};
+ res.lo = __m128i(t0);
+ res.hi = __m128i(t1);
+ return res;
+}
+AVX2NEON_ABI
+__m256i _mm256_setr_epi64x (int64_t e0, int64_t e1, int64_t e2, int64_t e3)
+{
+ __m256i res;
+ int64x2_t t0 = {e0,e1};
+ int64x2_t t1 = {e2,e3};
+ res.lo = __m128i(t0);
+ res.hi = __m128i(t1);
+ return res;
+}
+
+
+
+AVX2NEON_ABI
+__m256i _mm256_set_epi8 (char e31, char e30, char e29, char e28, char e27, char e26, char e25, char e24, char e23, char e22, char e21, char e20, char e19, char e18, char e17, char e16, char e15, char e14, char e13, char e12, char e11, char e10, char e9, char e8, char e7, char e6, char e5, char e4, char e3, char e2, char e1, char e0)
+{
+ int8x16_t lo = {e0,e1,e2,e3,e4,e5,e6,e7,e8,e9,e10,e11,e12,e13,e14,e15};
+ int8x16_t hi = {e16,e17,e18,e19,e20,e21,e22,e23,e24,e25,e26,e27,e28,e29,e30,e31};
+ __m256i res;
+ res.lo = lo; res.hi = hi;
+ return res;
+}
+
+AVX2NEON_ABI
+__m256i _mm256_setr_epi8 (char e0, char e1, char e2, char e3, char e4, char e5, char e6, char e7, char e8, char e9, char e10, char e11, char e12, char e13, char e14, char e15, char e16, char e17, char e18, char e19, char e20, char e21, char e22, char e23, char e24, char e25, char e26, char e27, char e28, char e29, char e30, char e31)
+{
+ int8x16_t lo = {e0,e1,e2,e3,e4,e5,e6,e7,e8,e9,e10,e11,e12,e13,e14,e15};
+ int8x16_t hi = {e16,e17,e18,e19,e20,e21,e22,e23,e24,e25,e26,e27,e28,e29,e30,e31};
+ __m256i res;
+ res.lo = lo; res.hi = hi;
+ return res;
+}
+
+
+AVX2NEON_ABI
+__m256i _mm256_set_epi16 (short e15, short e14, short e13, short e12, short e11, short e10, short e9, short e8, short e7, short e6, short e5, short e4, short e3, short e2, short e1, short e0)
+{
+ int16x8_t lo = {e0,e1,e2,e3,e4,e5,e6,e7};
+ int16x8_t hi = {e8,e9,e10,e11,e12,e13,e14,e15};
+ __m256i res;
+ res.lo = lo; res.hi = hi;
+ return res;
+}
+
+AVX2NEON_ABI
+__m256i _mm256_setr_epi16 (short e0, short e1, short e2, short e3, short e4, short e5, short e6, short e7, short e8, short e9, short e10, short e11, short e12, short e13, short e14, short e15)
+{
+ int16x8_t lo = {e0,e1,e2,e3,e4,e5,e6,e7};
+ int16x8_t hi = {e8,e9,e10,e11,e12,e13,e14,e15};
+ __m256i res;
+ res.lo = lo; res.hi = hi;
+ return res;
+}
+
+
+
+
+AVX2NEON_ABI
+int _mm256_movemask_epi8(const __m256i& a)
+{
+ return (_mm_movemask_epi8(a.hi) << 16) | _mm_movemask_epi8(a.lo);
+}
+
+
+AVX2NEON_ABI
+int _mm256_testz_si256(const __m256i& a,const __m256i& b)
+{
+ uint32x4_t lo = vandq_u32(a.lo,b.lo);
+ uint32x4_t hi = vandq_u32(a.hi,b.hi);
+
+ return (vaddvq_u32(lo) + vaddvq_u32(hi)) == 0;
+}
+
+AVX2NEON_ABI
+__m256d _mm256_setzero_pd ()
+{
+ __m256d res;
+ res.lo = res.hi = vdupq_n_f64(0);
+ return res;
+}
+
+AVX2NEON_ABI
+int _mm256_movemask_pd (__m256d a)
+{
+ return (_mm_movemask_pd(a.hi) << 2) | _mm_movemask_pd(a.lo);
+}
+
+AVX2NEON_ABI
+__m256i _mm256_cmpeq_epi64 (__m256i a, __m256i b)
+{
+ __m256i res;
+ res.lo = _mm_cmpeq_epi64(a.lo, b.lo);
+ res.hi = _mm_cmpeq_epi64(a.hi, b.hi);
+ return res;
+}
+
+AVX2NEON_ABI
+__m256d _mm256_cmpeq_pd (__m256d a, __m256d b)
+{
+ __m256d res;
+ res.lo = _mm_cmpeq_pd(a.lo, b.lo);
+ res.hi = _mm_cmpeq_pd(a.hi, b.hi);
+ return res;
+}
+
+
+AVX2NEON_ABI
+int _mm256_testz_pd (const __m256d& a, const __m256d& b)
+{
+ __m256d t = a;
+
+ if (&a != &b)
+ t = _mm256_and_pd(a,b);
+
+ return _mm256_movemask_pd(t) == 0;
+}
+
+AVX2NEON_ABI
+__m256d _mm256_blendv_pd (__m256d a, __m256d b, __m256d mask)
+{
+ __m256d res;
+ res.lo = _mm_blendv_pd(a.lo, b.lo, mask.lo);
+ res.hi = _mm_blendv_pd(a.hi, b.hi, mask.hi);
+ return res;
+}
+
+template<int imm8>
+AVX2NEON_ABI
+__m256 __mm256_dp_ps (__m256 a, __m256 b)
+{
+ __m256 res;
+ res.lo = _mm_dp_ps(a.lo, b.lo, imm8);
+ res.hi = _mm_dp_ps(a.hi, b.hi, imm8);
+ return res;
+}
+
+#define _mm256_dp_ps(a,b,c) __mm256_dp_ps<c>(a,b)
+
+AVX2NEON_ABI
+double _mm256_permute4x64_pd_select(__m256d a, const int imm8)
+{
+ switch (imm8 & 3) {
+ case 0:
+ return ((float64x2_t)a.lo)[0];
+ case 1:
+ return ((float64x2_t)a.lo)[1];
+ case 2:
+ return ((float64x2_t)a.hi)[0];
+ case 3:
+ return ((float64x2_t)a.hi)[1];
+ }
+ __builtin_unreachable();
+ return 0;
+}
+
+AVX2NEON_ABI
+__m256d _mm256_permute4x64_pd (__m256d a, const int imm8)
+{
+ float64x2_t lo,hi;
+ lo[0] = _mm256_permute4x64_pd_select(a,imm8 >> 0);
+ lo[1] = _mm256_permute4x64_pd_select(a,imm8 >> 2);
+ hi[0] = _mm256_permute4x64_pd_select(a,imm8 >> 4);
+ hi[1] = _mm256_permute4x64_pd_select(a,imm8 >> 6);
+
+ __m256d res;
+ res.lo = lo; res.hi = hi;
+ return res;
+}
+
+AVX2NEON_ABI
+__m256i _mm256_insertf128_si256 (__m256i a, __m128i b, int imm8)
+{
+ return __m256i(_mm256_insertf128_ps((__m256)a,(__m128)b,imm8));
+}
+
+
+AVX2NEON_ABI
+__m256i _mm256_loadu_si256 (__m256i const * mem_addr)
+{
+ __m256i res;
+ res.lo = *(__m128i *)((int32_t *)mem_addr + 0);
+ res.hi = *(__m128i *)((int32_t *)mem_addr + 4);
+ return res;
+}
+
+#define _mm256_load_si256 _mm256_loadu_si256
+
+AVX2NEON_ABI
+void _mm256_storeu_ps (float * mem_addr, __m256 a)
+{
+ *(__m128 *)(mem_addr + 0) = a.lo;
+ *(__m128 *)(mem_addr + 4) = a.hi;
+}
+
+#define _mm256_store_ps _mm256_storeu_ps
+#define _mm256_stream_ps _mm256_storeu_ps
+
+
+AVX2NEON_ABI
+void _mm256_storeu_si256 (__m256i * mem_addr, __m256i a)
+{
+ *(__m128i *)((int32_t *)mem_addr + 0) = a.lo;
+ *(__m128i *)((int32_t *)mem_addr + 4) = a.hi;
+}
+
+#define _mm256_store_si256 _mm256_storeu_si256
+
+
+
+AVX2NEON_ABI
+__m256i _mm256_permute4x64_epi64 (const __m256i a, const int imm8)
+{
+ uint8x16x2_t tbl = {a.lo, a.hi};
+
+ uint8_t sz = sizeof(uint64_t);
+ uint8_t u64[4] = {
+ (uint8_t)(((imm8 >> 0) & 0x3) * sz),
+ (uint8_t)(((imm8 >> 2) & 0x3) * sz),
+ (uint8_t)(((imm8 >> 4) & 0x3) * sz),
+ (uint8_t)(((imm8 >> 6) & 0x3) * sz),
+ };
+
+ uint8x16_t idx_lo = {
+ // lo[0] bytes
+ (uint8_t)(u64[0]+0), (uint8_t)(u64[0]+1), (uint8_t)(u64[0]+2), (uint8_t)(u64[0]+3),
+ (uint8_t)(u64[0]+4), (uint8_t)(u64[0]+5), (uint8_t)(u64[0]+6), (uint8_t)(u64[0]+7),
+
+ // lo[1] bytes
+ (uint8_t)(u64[1]+0), (uint8_t)(u64[1]+1), (uint8_t)(u64[1]+2), (uint8_t)(u64[1]+3),
+ (uint8_t)(u64[1]+4), (uint8_t)(u64[1]+5), (uint8_t)(u64[1]+6), (uint8_t)(u64[1]+7),
+ };
+ uint8x16_t idx_hi = {
+ // hi[0] bytes
+ (uint8_t)(u64[2]+0), (uint8_t)(u64[2]+1), (uint8_t)(u64[2]+2), (uint8_t)(u64[2]+3),
+ (uint8_t)(u64[2]+4), (uint8_t)(u64[2]+5), (uint8_t)(u64[2]+6), (uint8_t)(u64[2]+7),
+
+ // hi[1] bytes
+ (uint8_t)(u64[3]+0), (uint8_t)(u64[3]+1), (uint8_t)(u64[3]+2), (uint8_t)(u64[3]+3),
+ (uint8_t)(u64[3]+4), (uint8_t)(u64[3]+5), (uint8_t)(u64[3]+6), (uint8_t)(u64[3]+7),
+ };
+
+ uint8x16_t lo = vqtbl2q_u8(tbl, idx_lo);
+ uint8x16_t hi = vqtbl2q_u8(tbl, idx_hi);
+
+ __m256i res;
+ res.lo = lo; res.hi = hi;
+ return res;
+}
+
+
+AVX2NEON_ABI
+__m256i _mm256_permute2x128_si256(const __m256i a,const __m256i b, const int imm8)
+{
+ return __m256i(_mm256_permute2f128_ps(__m256(a),__m256(b),imm8));
+}
+
+
+
+AVX2NEON_ABI
+__m256 _mm256_maskload_ps (float const * mem_addr, __m256i mask)
+{
+ __m256 res;
+ res.lo = _mm_maskload_ps(mem_addr,mask.lo);
+ res.hi = _mm_maskload_ps(mem_addr + 4,mask.hi);
+ return res;
+}
+
+
+AVX2NEON_ABI
+__m256i _mm256_cvtepu8_epi32 (__m128i a)
+{
+ uint8x16_t a_u8 = vreinterpretq_u8_m128i(a); // xxxx xxxx xxxx xxxx HHGG FFEE DDCC BBAA
+ uint16x8_t u16x8 = vmovl_u8(vget_low_u8(a_u8)); // 00HH 00GG 00FF 00EE 00DD 00CC 00BB 00AA
+ uint32x4_t lo = vmovl_u16(vget_low_u16(u16x8)); // 0000 00DD 0000 00CC 0000 00BB 0000 00AA
+ uint32x4_t hi = vmovl_high_u16(u16x8); // 0000 00HH 0000 00GG 0000 00FF 0000 00EE
+
+ __m256i res;
+ res.lo = lo; res.hi = hi;
+ return res;
+}
+
+
+AVX2NEON_ABI
+__m256i _mm256_cvtepi8_epi32 (__m128i a)
+{
+ int8x16_t a_s8 = vreinterpretq_s8_m128i(a); // xxxx xxxx xxxx xxxx HHGG FFEE DDCC BBAA
+ int16x8_t s16x8 = vmovl_s8(vget_low_s8(a_s8)); // ssHH ssGG ssFF ssEE ssDD ssCC ssBB ssAA
+ int32x4_t lo = vmovl_s16(vget_low_s16(s16x8)); // ssss ssDD ssss ssCC ssss ssBB ssss ssAA
+ int32x4_t hi = vmovl_high_s16(s16x8); // ssss ssHH ssss ssGG ssss ssFF ssss ssEE
+
+ __m256i res;
+ res.lo = lo; res.hi = hi;
+ return res;
+}
+
+
+AVX2NEON_ABI
+__m256i _mm256_cvtepi16_epi32 (__m128i a)
+{
+ int16x8_t a_s16 = vreinterpretq_s16_m128i(a); // HHHH GGGG FFFF EEEE DDDD CCCC BBBB AAAA
+ int32x4_t lo = vmovl_s16(vget_low_s16(a_s16)); // ssss DDDD ssss CCCC ssss BBBB ssss AAAA
+ int32x4_t hi = vmovl_high_s16(a_s16); // ssss HHHH ssss GGGG ssss FFFF ssss EEEE
+
+ __m256i res;
+ res.lo = lo; res.hi = hi;
+ return res;
+}
+
+
+
+AVX2NEON_ABI
+void _mm256_maskstore_epi32 (int* mem_addr, __m256i mask, __m256i a)
+{
+ _mm_maskstore_epi32(mem_addr,mask.lo,a.lo);
+ _mm_maskstore_epi32(mem_addr + 4,mask.hi,a.hi);
+}
+
+AVX2NEON_ABI
+__m256i _mm256_slli_epi64 (__m256i a, int imm8)
+{
+ __m256i res;
+ res.lo = _mm_slli_epi64(a.lo,imm8);
+ res.hi = _mm_slli_epi64(a.hi,imm8);
+ return res;
+}
+
+AVX2NEON_ABI
+__m256i _mm256_slli_epi32 (__m256i a, int imm8)
+{
+ __m256i res;
+ res.lo = _mm_slli_epi32(a.lo,imm8);
+ res.hi = _mm_slli_epi32(a.hi,imm8);
+ return res;
+}
+
+
+AVX2NEON_ABI
+__m256i __mm256_slli_epi16 (__m256i a, int imm8)
+{
+ __m256i res;
+ res.lo = _mm_slli_epi16(a.lo,imm8);
+ res.hi = _mm_slli_epi16(a.hi,imm8);
+ return res;
+}
+
+
+AVX2NEON_ABI
+__m256i _mm256_srli_epi32 (__m256i a, int imm8)
+{
+ __m256i res;
+ res.lo = _mm_srli_epi32(a.lo,imm8);
+ res.hi = _mm_srli_epi32(a.hi,imm8);
+ return res;
+}
+
+AVX2NEON_ABI
+__m256i __mm256_srli_epi16 (__m256i a, int imm8)
+{
+ __m256i res;
+ res.lo = _mm_srli_epi16(a.lo,imm8);
+ res.hi = _mm_srli_epi16(a.hi,imm8);
+ return res;
+}
+
+AVX2NEON_ABI
+__m256i _mm256_cvtepu16_epi32(__m128i a)
+{
+ __m256i res;
+ res.lo = vmovl_u16(vget_low_u16(a));
+ res.hi = vmovl_high_u16(a);
+ return res;
+}
+
+AVX2NEON_ABI
+__m256i _mm256_cvtepu8_epi16(__m128i a)
+{
+ __m256i res;
+ res.lo = vmovl_u8(vget_low_u8(a));
+ res.hi = vmovl_high_u8(a);
+ return res;
+}
+
+
+AVX2NEON_ABI
+__m256i _mm256_srai_epi32 (__m256i a, int imm8)
+{
+ __m256i res;
+ res.lo = _mm_srai_epi32(a.lo,imm8);
+ res.hi = _mm_srai_epi32(a.hi,imm8);
+ return res;
+}
+
+AVX2NEON_ABI
+__m256i _mm256_srai_epi16 (__m256i a, int imm8)
+{
+ __m256i res;
+ res.lo = _mm_srai_epi16(a.lo,imm8);
+ res.hi = _mm_srai_epi16(a.hi,imm8);
+ return res;
+}
+
+
+AVX2NEON_ABI
+__m256i _mm256_sllv_epi32 (__m256i a, __m256i count)
+{
+ __m256i res;
+ res.lo = vshlq_s32(a.lo,count.lo);
+ res.hi = vshlq_s32(a.hi,count.hi);
+ return res;
+
+}
+
+
+AVX2NEON_ABI
+__m256i _mm256_srav_epi32 (__m256i a, __m256i count)
+{
+ __m256i res;
+ res.lo = vshlq_s32(a.lo,vnegq_s32(count.lo));
+ res.hi = vshlq_s32(a.hi,vnegq_s32(count.hi));
+ return res;
+
+}
+
+AVX2NEON_ABI
+__m256i _mm256_srlv_epi32 (__m256i a, __m256i count)
+{
+ __m256i res;
+ res.lo = __m128i(vshlq_u32(uint32x4_t(a.lo),vnegq_s32(count.lo)));
+ res.hi = __m128i(vshlq_u32(uint32x4_t(a.hi),vnegq_s32(count.hi)));
+ return res;
+
+}
+
+
+AVX2NEON_ABI
+__m256i _mm256_permute2f128_si256 (__m256i a, __m256i b, int imm8)
+{
+ return __m256i(_mm256_permute2f128_ps(__m256(a),__m256(b),imm8));
+}
+
+
+AVX2NEON_ABI
+__m128i _mm256_extractf128_si256 (__m256i a, const int imm8)
+{
+ if (imm8 & 1) return a.hi;
+ return a.lo;
+}
+
+AVX2NEON_ABI
+__m256 _mm256_set1_ps(float x)
+{
+ __m256 res;
+ res.lo = res.hi = vdupq_n_f32(x);
+ return res;
+}
+
+AVX2NEON_ABI
+__m256 _mm256_set_ps (float e7, float e6, float e5, float e4, float e3, float e2, float e1, float e0)
+{
+ __m256 res;
+ res.lo = _mm_set_ps(e3,e2,e1,e0);
+ res.hi = _mm_set_ps(e7,e6,e5,e4);
+ return res;
+}
+
+AVX2NEON_ABI
+__m256 _mm256_broadcast_ps (__m128 const * mem_addr)
+{
+ __m256 res;
+ res.lo = res.hi = *mem_addr;
+ return res;
+}
+
+AVX2NEON_ABI
+__m256 _mm256_cvtepi32_ps (__m256i a)
+{
+ __m256 res;
+ res.lo = _mm_cvtepi32_ps(a.lo);
+ res.hi = _mm_cvtepi32_ps(a.hi);
+ return res;
+}
+AVX2NEON_ABI
+void _mm256_maskstore_ps (float * mem_addr, __m256i mask, __m256 a)
+{
+ uint32x4_t mask_lo = mask.lo;
+ uint32x4_t mask_hi = mask.hi;
+ float32x4_t a_lo = a.lo;
+ float32x4_t a_hi = a.hi;
+
+ for (int i=0;i<4;i++) {
+ if (mask_lo[i] & 0x80000000) mem_addr[i] = a_lo[i];
+ if (mask_hi[i] & 0x80000000) mem_addr[i+4] = a_hi[i];
+ }
+}
+
+AVX2NEON_ABI
+__m256d _mm256_andnot_pd (__m256d a, __m256d b)
+{
+ __m256d res;
+ res.lo = float64x2_t(_mm_andnot_ps(__m128(a.lo),__m128(b.lo)));
+ res.hi = float64x2_t(_mm_andnot_ps(__m128(a.hi),__m128(b.hi)));
+ return res;
+}
+
+AVX2NEON_ABI
+__m256 _mm256_blend_ps (__m256 a, __m256 b, const int imm8)
+{
+ __m256 res;
+ res.lo = _mm_blend_ps(a.lo,b.lo,imm8 & 0xf);
+ res.hi = _mm_blend_ps(a.hi,b.hi,imm8 >> 4);
+ return res;
+
+}
+
+
+AVX2NEON_ABI
+__m256i _mm256_blend_epi32 (__m256i a, __m256i b, const int imm8)
+{
+ return __m256i(_mm256_blend_ps(__m256(a),__m256(b),imm8));
+
+}
+
+AVX2NEON_ABI
+__m256i _mm256_blend_epi16 (__m256i a, __m256i b, const int imm8)
+{
+ __m256i res;
+ res.lo = _mm_blend_epi16(a.lo,b.lo,imm8);
+ res.hi = _mm_blend_epi16(a.hi,b.hi,imm8);
+ return res;
+}
+
+
+
+AVX2NEON_ABI
+__m256i _mm256_i32gather_epi32 (int const* base_addr, __m256i vindex, const int scale)
+{
+ int32x4_t vindex_lo = vindex.lo;
+ int32x4_t vindex_hi = vindex.hi;
+ int32x4_t lo,hi;
+ for (int i=0;i<4;i++)
+ {
+ lo[i] = *(int32_t *)((char *) base_addr + (vindex_lo[i]*scale));
+ hi[i] = *(int32_t *)((char *) base_addr + (vindex_hi[i]*scale));
+ }
+
+ __m256i res;
+ res.lo = lo; res.hi = hi;
+ return res;
+}
+
+
+AVX2NEON_ABI
+__m256i _mm256_mask_i32gather_epi32 (__m256i src, int const* base_addr, __m256i vindex, __m256i mask, const int scale)
+{
+ uint32x4_t mask_lo = mask.lo;
+ uint32x4_t mask_hi = mask.hi;
+ int32x4_t vindex_lo = vindex.lo;
+ int32x4_t vindex_hi = vindex.hi;
+ int32x4_t lo,hi;
+ lo = hi = _mm_setzero_si128();
+ for (int i=0;i<4;i++)
+ {
+ if (mask_lo[i] >> 31) lo[i] = *(int32_t *)((char *) base_addr + (vindex_lo[i]*scale));
+ if (mask_hi[i] >> 31) hi[i] = *(int32_t *)((char *) base_addr + (vindex_hi[i]*scale));
+ }
+
+ __m256i res;
+ res.lo = lo; res.hi = hi;
+ return res;
+}
diff --git a/thirdparty/embree/common/simd/arm/emulation.h b/thirdparty/embree/common/simd/arm/emulation.h
index 1c3875fb27..4327298019 100644
--- a/thirdparty/embree/common/simd/arm/emulation.h
+++ b/thirdparty/embree/common/simd/arm/emulation.h
@@ -11,33 +11,28 @@
#include "sse2neon.h"
-__forceinline __m128 _mm_fmsub_ps(__m128 a, __m128 b, __m128 c) {
- __m128 neg_c = vreinterpretq_m128_f32(vnegq_f32(vreinterpretq_f32_m128(c)));
- return _mm_fmadd_ps(a, b, neg_c);
-}
-
-__forceinline __m128 _mm_fnmadd_ps(__m128 a, __m128 b, __m128 c) {
-#if defined(__aarch64__)
- return vreinterpretq_m128_f32(vfmsq_f32(vreinterpretq_f32_m128(c),
- vreinterpretq_f32_m128(b),
- vreinterpretq_f32_m128(a)));
-#else
- return _mm_sub_ps(c, _mm_mul_ps(a, b));
-#endif
-}
+__forceinline __m128 _mm_abs_ps(__m128 a) { return vabsq_f32(a); }
+
+__forceinline __m128 _mm_fmadd_ps (__m128 a, __m128 b, __m128 c) { return vfmaq_f32(c, a, b); }
+__forceinline __m128 _mm_fnmadd_ps(__m128 a, __m128 b, __m128 c) { return vfmsq_f32(c, a, b); }
+__forceinline __m128 _mm_fnmsub_ps(__m128 a, __m128 b, __m128 c) { return vnegq_f32(vfmaq_f32(c, a, b)); }
+__forceinline __m128 _mm_fmsub_ps (__m128 a, __m128 b, __m128 c) { return vnegq_f32(vfmsq_f32(c, a, b)); }
-__forceinline __m128 _mm_fnmsub_ps(__m128 a, __m128 b, __m128 c) {
- return vreinterpretq_m128_f32(vnegq_f32(vreinterpretq_f32_m128(_mm_fmadd_ps(a,b,c))));
+__forceinline __m128 _mm_broadcast_ss (float const * mem_addr)
+{
+ return vdupq_n_f32(*mem_addr);
}
+// AVX2 emulation leverages Intel FMA defs above. Include after them.
+#include "avx2neon.h"
/* Dummy defines for floating point control */
#define _MM_MASK_MASK 0x1f80
#define _MM_MASK_DIV_ZERO 0x200
-#define _MM_FLUSH_ZERO_ON 0x8000
+// #define _MM_FLUSH_ZERO_ON 0x8000
#define _MM_MASK_DENORM 0x100
#define _MM_SET_EXCEPTION_MASK(x)
-#define _MM_SET_FLUSH_ZERO_MODE(x)
+// #define _MM_SET_FLUSH_ZERO_MODE(x)
__forceinline int _mm_getcsr()
{
@@ -48,3 +43,43 @@ __forceinline void _mm_mfence()
{
__sync_synchronize();
}
+
+__forceinline __m128i _mm_load4epu8_epi32(__m128i *ptr)
+{
+ uint8x8_t t0 = vld1_u8((uint8_t*)ptr);
+ uint16x8_t t1 = vmovl_u8(t0);
+ uint32x4_t t2 = vmovl_u16(vget_low_u16(t1));
+ return vreinterpretq_s32_u32(t2);
+}
+
+__forceinline __m128i _mm_load4epu16_epi32(__m128i *ptr)
+{
+ uint16x8_t t0 = vld1q_u16((uint16_t*)ptr);
+ uint32x4_t t1 = vmovl_u16(vget_low_u16(t0));
+ return vreinterpretq_s32_u32(t1);
+}
+
+__forceinline __m128i _mm_load4epi8_f32(__m128i *ptr)
+{
+ int8x8_t t0 = vld1_s8((int8_t*)ptr);
+ int16x8_t t1 = vmovl_s8(t0);
+ int32x4_t t2 = vmovl_s16(vget_low_s16(t1));
+ float32x4_t t3 = vcvtq_f32_s32(t2);
+ return vreinterpretq_s32_f32(t3);
+}
+
+__forceinline __m128i _mm_load4epu8_f32(__m128i *ptr)
+{
+ uint8x8_t t0 = vld1_u8((uint8_t*)ptr);
+ uint16x8_t t1 = vmovl_u8(t0);
+ uint32x4_t t2 = vmovl_u16(vget_low_u16(t1));
+ return vreinterpretq_s32_u32(t2);
+}
+
+__forceinline __m128i _mm_load4epi16_f32(__m128i *ptr)
+{
+ int16x8_t t0 = vld1q_s16((int16_t*)ptr);
+ int32x4_t t1 = vmovl_s16(vget_low_s16(t0));
+ float32x4_t t2 = vcvtq_f32_s32(t1);
+ return vreinterpretq_s32_f32(t2);
+}
diff --git a/thirdparty/embree/common/simd/arm/sse2neon.h b/thirdparty/embree/common/simd/arm/sse2neon.h
index 7eb25cf2c5..43416662d7 100644
--- a/thirdparty/embree/common/simd/arm/sse2neon.h
+++ b/thirdparty/embree/common/simd/arm/sse2neon.h
@@ -52,7 +52,7 @@
/* Enable precise implementation of math operations
* This would slow down the computation a bit, but gives consistent result with
- * x86 SSE2. (e.g. would solve a hole or NaN pixel in the rendering result)
+ * x86 SSE. (e.g. would solve a hole or NaN pixel in the rendering result)
*/
/* _mm_min_ps and _mm_max_ps */
#ifndef SSE2NEON_PRECISE_MINMAX
@@ -66,36 +66,29 @@
#ifndef SSE2NEON_PRECISE_SQRT
#define SSE2NEON_PRECISE_SQRT (0)
#endif
-#ifndef SSE2NEON_PRECISE_RSQRT
-#define SSE2NEON_PRECISE_RSQRT (0)
+/* _mm_dp_pd */
+#ifndef SSE2NEON_PRECISE_DP
+#define SSE2NEON_PRECISE_DP (0)
#endif
+/* compiler specific definitions */
#if defined(__GNUC__) || defined(__clang__)
#pragma push_macro("FORCE_INLINE")
#pragma push_macro("ALIGN_STRUCT")
#define FORCE_INLINE static inline __attribute__((always_inline))
#define ALIGN_STRUCT(x) __attribute__((aligned(x)))
-#ifndef likely
-#define likely(x) __builtin_expect(!!(x), 1)
-#endif
-#ifndef unlikely
-#define unlikely(x) __builtin_expect(!!(x), 0)
-#endif
-#else
-#error "Macro name collisions may happen with unsupported compiler."
-#ifdef FORCE_INLINE
-#undef FORCE_INLINE
-#endif
+#define _sse2neon_likely(x) __builtin_expect(!!(x), 1)
+#define _sse2neon_unlikely(x) __builtin_expect(!!(x), 0)
+#else /* non-GNU / non-clang compilers */
+#warning "Macro name collisions may happen with unsupported compiler."
+#ifndef FORCE_INLINE
#define FORCE_INLINE static inline
+#endif
#ifndef ALIGN_STRUCT
#define ALIGN_STRUCT(x) __declspec(align(x))
#endif
-#endif
-#ifndef likely
-#define likely(x) (x)
-#endif
-#ifndef unlikely
-#define unlikely(x) (x)
+#define _sse2neon_likely(x) (x)
+#define _sse2neon_unlikely(x) (x)
#endif
#include <stdint.h>
@@ -155,6 +148,14 @@
* argument "a" of mm_shuffle_ps that will be places in fp1 of result.
* fp0 is the same for fp0 of result.
*/
+#if defined(__aarch64__)
+#define _MN_SHUFFLE(fp3,fp2,fp1,fp0) ( (uint8x16_t){ (((fp3)*4)+0), (((fp3)*4)+1), (((fp3)*4)+2), (((fp3)*4)+3), (((fp2)*4)+0), (((fp2)*4)+1), (((fp2)*4)+\
+2), (((fp2)*4)+3), (((fp1)*4)+0), (((fp1)*4)+1), (((fp1)*4)+2), (((fp1)*4)+3), (((fp0)*4)+0), (((fp0)*4)+1), (((fp0)*4)+2), (((fp0)*4)+3) } )
+#define _MF_SHUFFLE(fp3,fp2,fp1,fp0) ( (uint8x16_t){ (((fp3)*4)+0), (((fp3)*4)+1), (((fp3)*4)+2), (((fp3)*4)+3), (((fp2)*4)+0), (((fp2)*4)+1), (((fp2)*4)+\
+2), (((fp2)*4)+3), (((fp1)*4)+16+0), (((fp1)*4)+16+1), (((fp1)*4)+16+2), (((fp1)*4)+16+3), (((fp0)*4)+16+0), (((fp0)*4)+16+1), (((fp0)*4)+16+2), (((fp0)*\
+4)+16+3) } )
+#endif
+
#define _MM_SHUFFLE(fp3, fp2, fp1, fp0) \
(((fp3) << 6) | ((fp2) << 4) | ((fp1) << 2) | ((fp0)))
@@ -169,6 +170,14 @@
#define _MM_ROUND_DOWN 0x2000
#define _MM_ROUND_UP 0x4000
#define _MM_ROUND_TOWARD_ZERO 0x6000
+/* Flush zero mode macros. */
+#define _MM_FLUSH_ZERO_MASK 0x8000
+#define _MM_FLUSH_ZERO_ON 0x8000
+#define _MM_FLUSH_ZERO_OFF 0x0000
+/* Denormals are zeros mode macros. */
+#define _MM_DENORMALS_ZERO_MASK 0x0040
+#define _MM_DENORMALS_ZERO_ON 0x0040
+#define _MM_DENORMALS_ZERO_OFF 0x0000
/* indicate immediate constant argument in a given range */
#define __constrange(a, b) const
@@ -189,7 +198,10 @@ typedef float64x2_t __m128d; /* 128-bit vector containing 2 doubles */
#else
typedef float32x4_t __m128d;
#endif
-typedef int64x2_t __m128i; /* 128-bit vector containing integers */
+// Note: upstream sse2neon declares __m128i as int64x2_t. However, there's
+// many places within embree that assume __m128i can be indexed as a
+// 4 element u32.
+typedef int32x4_t __m128i; /* 128-bit vector containing integers */
/* type-safe casting between types */
@@ -221,28 +233,28 @@ typedef int64x2_t __m128i; /* 128-bit vector containing integers */
#define vreinterpretq_s32_m128(x) vreinterpretq_s32_f32(x)
#define vreinterpretq_s64_m128(x) vreinterpretq_s64_f32(x)
-#define vreinterpretq_m128i_s8(x) vreinterpretq_s64_s8(x)
-#define vreinterpretq_m128i_s16(x) vreinterpretq_s64_s16(x)
-#define vreinterpretq_m128i_s32(x) vreinterpretq_s64_s32(x)
-#define vreinterpretq_m128i_s64(x) (x)
+#define vreinterpretq_m128i_s8(x) vreinterpretq_s32_s8(x)
+#define vreinterpretq_m128i_s16(x) vreinterpretq_s32_s16(x)
+#define vreinterpretq_m128i_s32(x) (x)
+#define vreinterpretq_m128i_s64(x) vreinterpretq_s32_s64(x)
-#define vreinterpretq_m128i_u8(x) vreinterpretq_s64_u8(x)
-#define vreinterpretq_m128i_u16(x) vreinterpretq_s64_u16(x)
-#define vreinterpretq_m128i_u32(x) vreinterpretq_s64_u32(x)
-#define vreinterpretq_m128i_u64(x) vreinterpretq_s64_u64(x)
+#define vreinterpretq_m128i_u8(x) vreinterpretq_s32_u8(x)
+#define vreinterpretq_m128i_u16(x) vreinterpretq_s32_u16(x)
+#define vreinterpretq_m128i_u32(x) vreinterpretq_s32_u32(x)
+#define vreinterpretq_m128i_u64(x) vreinterpretq_s32_u64(x)
-#define vreinterpretq_f32_m128i(x) vreinterpretq_f32_s64(x)
-#define vreinterpretq_f64_m128i(x) vreinterpretq_f64_s64(x)
+#define vreinterpretq_f32_m128i(x) vreinterpretq_f32_s32(x)
+#define vreinterpretq_f64_m128i(x) vreinterpretq_f64_s32(x)
-#define vreinterpretq_s8_m128i(x) vreinterpretq_s8_s64(x)
-#define vreinterpretq_s16_m128i(x) vreinterpretq_s16_s64(x)
-#define vreinterpretq_s32_m128i(x) vreinterpretq_s32_s64(x)
-#define vreinterpretq_s64_m128i(x) (x)
+#define vreinterpretq_s8_m128i(x) vreinterpretq_s8_s32(x)
+#define vreinterpretq_s16_m128i(x) vreinterpretq_s16_s32(x)
+#define vreinterpretq_s32_m128i(x) (x)
+#define vreinterpretq_s64_m128i(x) vreinterpretq_s64_s32(x)
-#define vreinterpretq_u8_m128i(x) vreinterpretq_u8_s64(x)
-#define vreinterpretq_u16_m128i(x) vreinterpretq_u16_s64(x)
-#define vreinterpretq_u32_m128i(x) vreinterpretq_u32_s64(x)
-#define vreinterpretq_u64_m128i(x) vreinterpretq_u64_s64(x)
+#define vreinterpretq_u8_m128i(x) vreinterpretq_u8_s32(x)
+#define vreinterpretq_u16_m128i(x) vreinterpretq_u16_s32(x)
+#define vreinterpretq_u32_m128i(x) vreinterpretq_u32_s32(x)
+#define vreinterpretq_u64_m128i(x) vreinterpretq_u64_s32(x)
#define vreinterpret_m64_s8(x) vreinterpret_s64_s8(x)
#define vreinterpret_m64_s16(x) vreinterpret_s64_s16(x)
@@ -281,6 +293,7 @@ typedef int64x2_t __m128i; /* 128-bit vector containing integers */
#define vreinterpretq_s64_m128d(x) vreinterpretq_s64_f64(x)
+#define vreinterpretq_u32_m128d(x) vreinterpretq_u32_f64(x)
#define vreinterpretq_u64_m128d(x) vreinterpretq_u64_f64(x)
#define vreinterpretq_f64_m128d(x) (x)
@@ -303,10 +316,10 @@ typedef int64x2_t __m128i; /* 128-bit vector containing integers */
#endif
// A struct is defined in this header file called 'SIMDVec' which can be used
-// by applications which attempt to access the contents of an _m128 struct
+// by applications which attempt to access the contents of an __m128 struct
// directly. It is important to note that accessing the __m128 struct directly
// is bad coding practice by Microsoft: @see:
-// https://msdn.microsoft.com/en-us/library/ayeb3ayc.aspx
+// https://docs.microsoft.com/en-us/cpp/cpp/m128
//
// However, some legacy source code may try to access the contents of an __m128
// struct directly so the developer can use the SIMDVec as an alias for it. Any
@@ -342,13 +355,48 @@ typedef union ALIGN_STRUCT(16) SIMDVec {
#define vreinterpretq_nth_u32_m128i(x, n) (((SIMDVec *) &x)->m128_u32[n])
#define vreinterpretq_nth_u8_m128i(x, n) (((SIMDVec *) &x)->m128_u8[n])
+/* SSE macros */
+#define _MM_GET_FLUSH_ZERO_MODE _sse2neon_mm_get_flush_zero_mode
+#define _MM_SET_FLUSH_ZERO_MODE _sse2neon_mm_set_flush_zero_mode
+#define _MM_GET_DENORMALS_ZERO_MODE _sse2neon_mm_get_denormals_zero_mode
+#define _MM_SET_DENORMALS_ZERO_MODE _sse2neon_mm_set_denormals_zero_mode
+
+// Function declaration
+// SSE
+FORCE_INLINE unsigned int _MM_GET_ROUNDING_MODE();
+FORCE_INLINE __m128 _mm_move_ss(__m128, __m128);
+FORCE_INLINE __m128 _mm_or_ps(__m128, __m128);
+FORCE_INLINE __m128 _mm_set_ps1(float);
+FORCE_INLINE __m128 _mm_setzero_ps(void);
+// SSE2
+FORCE_INLINE __m128i _mm_and_si128(__m128i, __m128i);
+FORCE_INLINE __m128i _mm_castps_si128(__m128);
+FORCE_INLINE __m128i _mm_cmpeq_epi32(__m128i, __m128i);
+FORCE_INLINE __m128i _mm_cvtps_epi32(__m128);
+FORCE_INLINE __m128d _mm_move_sd(__m128d, __m128d);
+FORCE_INLINE __m128i _mm_or_si128(__m128i, __m128i);
+FORCE_INLINE __m128i _mm_set_epi32(int, int, int, int);
+FORCE_INLINE __m128i _mm_set_epi64x(int64_t, int64_t);
+FORCE_INLINE __m128d _mm_set_pd(double, double);
+FORCE_INLINE __m128i _mm_set1_epi32(int);
+FORCE_INLINE __m128i _mm_setzero_si128();
+// SSE4.1
+FORCE_INLINE __m128d _mm_ceil_pd(__m128d);
+FORCE_INLINE __m128 _mm_ceil_ps(__m128);
+FORCE_INLINE __m128d _mm_floor_pd(__m128d);
+FORCE_INLINE __m128 _mm_floor_ps(__m128);
+FORCE_INLINE __m128d _mm_round_pd(__m128d, int);
+FORCE_INLINE __m128 _mm_round_ps(__m128, int);
+// SSE4.2
+FORCE_INLINE uint32_t _mm_crc32_u8(uint32_t, uint8_t);
+
/* Backwards compatibility for compilers with lack of specific type support */
// Older gcc does not define vld1q_u8_x4 type
-#if defined(__GNUC__) && !defined(__clang__) && \
- ((__GNUC__ == 10 && (__GNUC_MINOR__ <= 1)) || \
- (__GNUC__ == 9 && (__GNUC_MINOR__ <= 3)) || \
- (__GNUC__ == 8 && (__GNUC_MINOR__ <= 4)) || __GNUC__ <= 7)
+#if defined(__GNUC__) && !defined(__clang__) && \
+ ((__GNUC__ <= 10 && defined(__arm__)) || \
+ (__GNUC__ == 10 && __GNUC_MINOR__ < 3 && defined(__aarch64__)) || \
+ (__GNUC__ <= 9 && defined(__aarch64__)))
FORCE_INLINE uint8x16x4_t _sse2neon_vld1q_u8_x4(const uint8_t *p)
{
uint8x16x4_t ret;
@@ -443,8 +491,6 @@ FORCE_INLINE uint8x16x4_t _sse2neon_vld1q_u8_x4(const uint8_t *p)
+------+------+------+------+------+------+-------------+
*/
-/* Set/get methods */
-
/* Constants for use with _mm_prefetch. */
enum _mm_hint {
_MM_HINT_NTA = 0, /* load data to L1 and L2 cache, mark it as NTA */
@@ -457,1098 +503,1568 @@ enum _mm_hint {
_MM_HINT_ET2 = 7 /* exclusive version of _MM_HINT_T2 */
};
-// Loads one cache line of data from address p to a location closer to the
-// processor. https://msdn.microsoft.com/en-us/library/84szxsww(v=vs.100).aspx
-FORCE_INLINE void _mm_prefetch(const void *p, int i)
+// The bit field mapping to the FPCR(floating-point control register)
+typedef struct {
+ uint16_t res0;
+ uint8_t res1 : 6;
+ uint8_t bit22 : 1;
+ uint8_t bit23 : 1;
+ uint8_t bit24 : 1;
+ uint8_t res2 : 7;
+#if defined(__aarch64__)
+ uint32_t res3;
+#endif
+} fpcr_bitfield;
+
+// Takes the upper 64 bits of a and places it in the low end of the result
+// Takes the lower 64 bits of b and places it into the high end of the result.
+FORCE_INLINE __m128 _mm_shuffle_ps_1032(__m128 a, __m128 b)
{
- (void) i;
- __builtin_prefetch(p);
+ float32x2_t a32 = vget_high_f32(vreinterpretq_f32_m128(a));
+ float32x2_t b10 = vget_low_f32(vreinterpretq_f32_m128(b));
+ return vreinterpretq_m128_f32(vcombine_f32(a32, b10));
}
-// Pause the processor. This is typically used in spin-wait loops and depending
-// on the x86 processor typical values are in the 40-100 cycle range. The
-// 'yield' instruction isn't a good fit beacuse it's effectively a nop on most
-// Arm cores. Experience with several databases has shown has shown an 'isb' is
-// a reasonable approximation.
-FORCE_INLINE void _mm_pause()
+// takes the lower two 32-bit values from a and swaps them and places in high
+// end of result takes the higher two 32 bit values from b and swaps them and
+// places in low end of result.
+FORCE_INLINE __m128 _mm_shuffle_ps_2301(__m128 a, __m128 b)
{
- __asm__ __volatile__("isb\n");
+ float32x2_t a01 = vrev64_f32(vget_low_f32(vreinterpretq_f32_m128(a)));
+ float32x2_t b23 = vrev64_f32(vget_high_f32(vreinterpretq_f32_m128(b)));
+ return vreinterpretq_m128_f32(vcombine_f32(a01, b23));
}
-// Copy the lower single-precision (32-bit) floating-point element of a to dst.
-//
-// dst[31:0] := a[31:0]
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtss_f32
-FORCE_INLINE float _mm_cvtss_f32(__m128 a)
+FORCE_INLINE __m128 _mm_shuffle_ps_0321(__m128 a, __m128 b)
{
- return vgetq_lane_f32(vreinterpretq_f32_m128(a), 0);
+ float32x2_t a21 = vget_high_f32(
+ vextq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a), 3));
+ float32x2_t b03 = vget_low_f32(
+ vextq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b), 3));
+ return vreinterpretq_m128_f32(vcombine_f32(a21, b03));
}
-// Convert the lower single-precision (32-bit) floating-point element in b to a
-// double-precision (64-bit) floating-point element, store the result in the
-// lower element of dst, and copy the upper element from a to the upper element
-// of dst.
+FORCE_INLINE __m128 _mm_shuffle_ps_2103(__m128 a, __m128 b)
+{
+ float32x2_t a03 = vget_low_f32(
+ vextq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a), 3));
+ float32x2_t b21 = vget_high_f32(
+ vextq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b), 3));
+ return vreinterpretq_m128_f32(vcombine_f32(a03, b21));
+}
+
+FORCE_INLINE __m128 _mm_shuffle_ps_1010(__m128 a, __m128 b)
+{
+ float32x2_t a10 = vget_low_f32(vreinterpretq_f32_m128(a));
+ float32x2_t b10 = vget_low_f32(vreinterpretq_f32_m128(b));
+ return vreinterpretq_m128_f32(vcombine_f32(a10, b10));
+}
+
+FORCE_INLINE __m128 _mm_shuffle_ps_1001(__m128 a, __m128 b)
+{
+ float32x2_t a01 = vrev64_f32(vget_low_f32(vreinterpretq_f32_m128(a)));
+ float32x2_t b10 = vget_low_f32(vreinterpretq_f32_m128(b));
+ return vreinterpretq_m128_f32(vcombine_f32(a01, b10));
+}
+
+FORCE_INLINE __m128 _mm_shuffle_ps_0101(__m128 a, __m128 b)
+{
+ float32x2_t a01 = vrev64_f32(vget_low_f32(vreinterpretq_f32_m128(a)));
+ float32x2_t b01 = vrev64_f32(vget_low_f32(vreinterpretq_f32_m128(b)));
+ return vreinterpretq_m128_f32(vcombine_f32(a01, b01));
+}
+
+// keeps the low 64 bits of b in the low and puts the high 64 bits of a in the
+// high
+FORCE_INLINE __m128 _mm_shuffle_ps_3210(__m128 a, __m128 b)
+{
+ float32x2_t a10 = vget_low_f32(vreinterpretq_f32_m128(a));
+ float32x2_t b32 = vget_high_f32(vreinterpretq_f32_m128(b));
+ return vreinterpretq_m128_f32(vcombine_f32(a10, b32));
+}
+
+FORCE_INLINE __m128 _mm_shuffle_ps_0011(__m128 a, __m128 b)
+{
+ float32x2_t a11 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(a)), 1);
+ float32x2_t b00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 0);
+ return vreinterpretq_m128_f32(vcombine_f32(a11, b00));
+}
+
+FORCE_INLINE __m128 _mm_shuffle_ps_0022(__m128 a, __m128 b)
+{
+ float32x2_t a22 =
+ vdup_lane_f32(vget_high_f32(vreinterpretq_f32_m128(a)), 0);
+ float32x2_t b00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 0);
+ return vreinterpretq_m128_f32(vcombine_f32(a22, b00));
+}
+
+FORCE_INLINE __m128 _mm_shuffle_ps_2200(__m128 a, __m128 b)
+{
+ float32x2_t a00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(a)), 0);
+ float32x2_t b22 =
+ vdup_lane_f32(vget_high_f32(vreinterpretq_f32_m128(b)), 0);
+ return vreinterpretq_m128_f32(vcombine_f32(a00, b22));
+}
+
+FORCE_INLINE __m128 _mm_shuffle_ps_3202(__m128 a, __m128 b)
+{
+ float32_t a0 = vgetq_lane_f32(vreinterpretq_f32_m128(a), 0);
+ float32x2_t a22 =
+ vdup_lane_f32(vget_high_f32(vreinterpretq_f32_m128(a)), 0);
+ float32x2_t a02 = vset_lane_f32(a0, a22, 1); /* TODO: use vzip ?*/
+ float32x2_t b32 = vget_high_f32(vreinterpretq_f32_m128(b));
+ return vreinterpretq_m128_f32(vcombine_f32(a02, b32));
+}
+
+FORCE_INLINE __m128 _mm_shuffle_ps_1133(__m128 a, __m128 b)
+{
+ float32x2_t a33 =
+ vdup_lane_f32(vget_high_f32(vreinterpretq_f32_m128(a)), 1);
+ float32x2_t b11 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 1);
+ return vreinterpretq_m128_f32(vcombine_f32(a33, b11));
+}
+
+FORCE_INLINE __m128 _mm_shuffle_ps_2010(__m128 a, __m128 b)
+{
+ float32x2_t a10 = vget_low_f32(vreinterpretq_f32_m128(a));
+ float32_t b2 = vgetq_lane_f32(vreinterpretq_f32_m128(b), 2);
+ float32x2_t b00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 0);
+ float32x2_t b20 = vset_lane_f32(b2, b00, 1);
+ return vreinterpretq_m128_f32(vcombine_f32(a10, b20));
+}
+
+FORCE_INLINE __m128 _mm_shuffle_ps_2001(__m128 a, __m128 b)
+{
+ float32x2_t a01 = vrev64_f32(vget_low_f32(vreinterpretq_f32_m128(a)));
+ float32_t b2 = vgetq_lane_f32(b, 2);
+ float32x2_t b00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 0);
+ float32x2_t b20 = vset_lane_f32(b2, b00, 1);
+ return vreinterpretq_m128_f32(vcombine_f32(a01, b20));
+}
+
+FORCE_INLINE __m128 _mm_shuffle_ps_2032(__m128 a, __m128 b)
+{
+ float32x2_t a32 = vget_high_f32(vreinterpretq_f32_m128(a));
+ float32_t b2 = vgetq_lane_f32(b, 2);
+ float32x2_t b00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 0);
+ float32x2_t b20 = vset_lane_f32(b2, b00, 1);
+ return vreinterpretq_m128_f32(vcombine_f32(a32, b20));
+}
+
+// Kahan summation for accurate summation of floating-point numbers.
+// http://blog.zachbjornson.com/2019/08/11/fast-float-summation.html
+FORCE_INLINE void _sse2neon_kadd_f32(float *sum, float *c, float y)
+{
+ y -= *c;
+ float t = *sum + y;
+ *c = (t - *sum) - y;
+ *sum = t;
+}
+
+#if defined(__ARM_FEATURE_CRYPTO)
+// Wraps vmull_p64
+FORCE_INLINE uint64x2_t _sse2neon_vmull_p64(uint64x1_t _a, uint64x1_t _b)
+{
+ poly64_t a = vget_lane_p64(vreinterpret_p64_u64(_a), 0);
+ poly64_t b = vget_lane_p64(vreinterpret_p64_u64(_b), 0);
+ return vreinterpretq_u64_p128(vmull_p64(a, b));
+}
+#else // ARMv7 polyfill
+// ARMv7/some A64 lacks vmull_p64, but it has vmull_p8.
//
-// dst[63:0] := Convert_FP32_To_FP64(b[31:0])
-// dst[127:64] := a[127:64]
+// vmull_p8 calculates 8 8-bit->16-bit polynomial multiplies, but we need a
+// 64-bit->128-bit polynomial multiply.
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtss_sd
-FORCE_INLINE __m128d _mm_cvtss_sd(__m128d a, __m128 b)
+// It needs some work and is somewhat slow, but it is still faster than all
+// known scalar methods.
+//
+// Algorithm adapted to C from
+// https://www.workofard.com/2017/07/ghash-for-low-end-cores/, which is adapted
+// from "Fast Software Polynomial Multiplication on ARM Processors Using the
+// NEON Engine" by Danilo Camara, Conrado Gouvea, Julio Lopez and Ricardo Dahab
+// (https://hal.inria.fr/hal-01506572)
+static uint64x2_t _sse2neon_vmull_p64(uint64x1_t _a, uint64x1_t _b)
{
- double d = (double) vgetq_lane_f32(vreinterpretq_f32_m128(b), 0);
+ poly8x8_t a = vreinterpret_p8_u64(_a);
+ poly8x8_t b = vreinterpret_p8_u64(_b);
+
+ // Masks
+ uint8x16_t k48_32 = vcombine_u8(vcreate_u8(0x0000ffffffffffff),
+ vcreate_u8(0x00000000ffffffff));
+ uint8x16_t k16_00 = vcombine_u8(vcreate_u8(0x000000000000ffff),
+ vcreate_u8(0x0000000000000000));
+
+ // Do the multiplies, rotating with vext to get all combinations
+ uint8x16_t d = vreinterpretq_u8_p16(vmull_p8(a, b)); // D = A0 * B0
+ uint8x16_t e =
+ vreinterpretq_u8_p16(vmull_p8(a, vext_p8(b, b, 1))); // E = A0 * B1
+ uint8x16_t f =
+ vreinterpretq_u8_p16(vmull_p8(vext_p8(a, a, 1), b)); // F = A1 * B0
+ uint8x16_t g =
+ vreinterpretq_u8_p16(vmull_p8(a, vext_p8(b, b, 2))); // G = A0 * B2
+ uint8x16_t h =
+ vreinterpretq_u8_p16(vmull_p8(vext_p8(a, a, 2), b)); // H = A2 * B0
+ uint8x16_t i =
+ vreinterpretq_u8_p16(vmull_p8(a, vext_p8(b, b, 3))); // I = A0 * B3
+ uint8x16_t j =
+ vreinterpretq_u8_p16(vmull_p8(vext_p8(a, a, 3), b)); // J = A3 * B0
+ uint8x16_t k =
+ vreinterpretq_u8_p16(vmull_p8(a, vext_p8(b, b, 4))); // L = A0 * B4
+
+ // Add cross products
+ uint8x16_t l = veorq_u8(e, f); // L = E + F
+ uint8x16_t m = veorq_u8(g, h); // M = G + H
+ uint8x16_t n = veorq_u8(i, j); // N = I + J
+
+ // Interleave. Using vzip1 and vzip2 prevents Clang from emitting TBL
+ // instructions.
#if defined(__aarch64__)
- return vreinterpretq_m128d_f64(
- vsetq_lane_f64(d, vreinterpretq_f64_m128d(a), 0));
+ uint8x16_t lm_p0 = vreinterpretq_u8_u64(
+ vzip1q_u64(vreinterpretq_u64_u8(l), vreinterpretq_u64_u8(m)));
+ uint8x16_t lm_p1 = vreinterpretq_u8_u64(
+ vzip2q_u64(vreinterpretq_u64_u8(l), vreinterpretq_u64_u8(m)));
+ uint8x16_t nk_p0 = vreinterpretq_u8_u64(
+ vzip1q_u64(vreinterpretq_u64_u8(n), vreinterpretq_u64_u8(k)));
+ uint8x16_t nk_p1 = vreinterpretq_u8_u64(
+ vzip2q_u64(vreinterpretq_u64_u8(n), vreinterpretq_u64_u8(k)));
#else
- return vreinterpretq_m128d_s64(
- vsetq_lane_s64(*(int64_t *) &d, vreinterpretq_s64_m128d(a), 0));
+ uint8x16_t lm_p0 = vcombine_u8(vget_low_u8(l), vget_low_u8(m));
+ uint8x16_t lm_p1 = vcombine_u8(vget_high_u8(l), vget_high_u8(m));
+ uint8x16_t nk_p0 = vcombine_u8(vget_low_u8(n), vget_low_u8(k));
+ uint8x16_t nk_p1 = vcombine_u8(vget_high_u8(n), vget_high_u8(k));
#endif
-}
+ // t0 = (L) (P0 + P1) << 8
+ // t1 = (M) (P2 + P3) << 16
+ uint8x16_t t0t1_tmp = veorq_u8(lm_p0, lm_p1);
+ uint8x16_t t0t1_h = vandq_u8(lm_p1, k48_32);
+ uint8x16_t t0t1_l = veorq_u8(t0t1_tmp, t0t1_h);
-// Convert the lower single-precision (32-bit) floating-point element in a to a
-// 32-bit integer, and store the result in dst.
-//
-// dst[31:0] := Convert_FP32_To_Int32(a[31:0])
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtss_si32
-#define _mm_cvtss_si32(a) _mm_cvt_ss2si(a)
+ // t2 = (N) (P4 + P5) << 24
+ // t3 = (K) (P6 + P7) << 32
+ uint8x16_t t2t3_tmp = veorq_u8(nk_p0, nk_p1);
+ uint8x16_t t2t3_h = vandq_u8(nk_p1, k16_00);
+ uint8x16_t t2t3_l = veorq_u8(t2t3_tmp, t2t3_h);
-// Convert the lower single-precision (32-bit) floating-point element in a to a
-// 64-bit integer, and store the result in dst.
-//
-// dst[63:0] := Convert_FP32_To_Int64(a[31:0])
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtss_si64
-FORCE_INLINE int _mm_cvtss_si64(__m128 a)
-{
+ // De-interleave
#if defined(__aarch64__)
- return vgetq_lane_s64(
- vreinterpretq_s64_s32(vcvtnq_s32_f32(vreinterpretq_f32_m128(a))), 0);
+ uint8x16_t t0 = vreinterpretq_u8_u64(
+ vuzp1q_u64(vreinterpretq_u64_u8(t0t1_l), vreinterpretq_u64_u8(t0t1_h)));
+ uint8x16_t t1 = vreinterpretq_u8_u64(
+ vuzp2q_u64(vreinterpretq_u64_u8(t0t1_l), vreinterpretq_u64_u8(t0t1_h)));
+ uint8x16_t t2 = vreinterpretq_u8_u64(
+ vuzp1q_u64(vreinterpretq_u64_u8(t2t3_l), vreinterpretq_u64_u8(t2t3_h)));
+ uint8x16_t t3 = vreinterpretq_u8_u64(
+ vuzp2q_u64(vreinterpretq_u64_u8(t2t3_l), vreinterpretq_u64_u8(t2t3_h)));
#else
- float32_t data = vgetq_lane_f32(vreinterpretq_f32_m128(a), 0);
- float32_t diff = data - floor(data);
- if (diff > 0.5)
- return (int64_t) ceil(data);
- if (unlikely(diff == 0.5)) {
- int64_t f = (int64_t) floor(data);
- int64_t c = (int64_t) ceil(data);
- return c & 1 ? f : c;
- }
- return (int64_t) floor(data);
+ uint8x16_t t1 = vcombine_u8(vget_high_u8(t0t1_l), vget_high_u8(t0t1_h));
+ uint8x16_t t0 = vcombine_u8(vget_low_u8(t0t1_l), vget_low_u8(t0t1_h));
+ uint8x16_t t3 = vcombine_u8(vget_high_u8(t2t3_l), vget_high_u8(t2t3_h));
+ uint8x16_t t2 = vcombine_u8(vget_low_u8(t2t3_l), vget_low_u8(t2t3_h));
#endif
+ // Shift the cross products
+ uint8x16_t t0_shift = vextq_u8(t0, t0, 15); // t0 << 8
+ uint8x16_t t1_shift = vextq_u8(t1, t1, 14); // t1 << 16
+ uint8x16_t t2_shift = vextq_u8(t2, t2, 13); // t2 << 24
+ uint8x16_t t3_shift = vextq_u8(t3, t3, 12); // t3 << 32
+
+ // Accumulate the products
+ uint8x16_t cross1 = veorq_u8(t0_shift, t1_shift);
+ uint8x16_t cross2 = veorq_u8(t2_shift, t3_shift);
+ uint8x16_t mix = veorq_u8(d, cross1);
+ uint8x16_t r = veorq_u8(mix, cross2);
+ return vreinterpretq_u64_u8(r);
}
+#endif // ARMv7 polyfill
-// Convert packed single-precision (32-bit) floating-point elements in a to
-// packed 32-bit integers with truncation, and store the results in dst.
-//
-// FOR j := 0 to 1
-// i := 32*j
-// dst[i+31:i] := Convert_FP32_To_Int32_Truncate(a[i+31:i])
-// ENDFOR
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtt_ps2pi
-FORCE_INLINE __m64 _mm_cvtt_ps2pi(__m128 a)
+// C equivalent:
+// __m128i _mm_shuffle_epi32_default(__m128i a,
+// __constrange(0, 255) int imm) {
+// __m128i ret;
+// ret[0] = a[imm & 0x3]; ret[1] = a[(imm >> 2) & 0x3];
+// ret[2] = a[(imm >> 4) & 0x03]; ret[3] = a[(imm >> 6) & 0x03];
+// return ret;
+// }
+#define _mm_shuffle_epi32_default(a, imm) \
+ __extension__({ \
+ int32x4_t ret; \
+ ret = vmovq_n_s32( \
+ vgetq_lane_s32(vreinterpretq_s32_m128i(a), (imm) & (0x3))); \
+ ret = vsetq_lane_s32( \
+ vgetq_lane_s32(vreinterpretq_s32_m128i(a), ((imm) >> 2) & 0x3), \
+ ret, 1); \
+ ret = vsetq_lane_s32( \
+ vgetq_lane_s32(vreinterpretq_s32_m128i(a), ((imm) >> 4) & 0x3), \
+ ret, 2); \
+ ret = vsetq_lane_s32( \
+ vgetq_lane_s32(vreinterpretq_s32_m128i(a), ((imm) >> 6) & 0x3), \
+ ret, 3); \
+ vreinterpretq_m128i_s32(ret); \
+ })
+
+// Takes the upper 64 bits of a and places it in the low end of the result
+// Takes the lower 64 bits of a and places it into the high end of the result.
+FORCE_INLINE __m128i _mm_shuffle_epi_1032(__m128i a)
{
- return vreinterpret_m64_s32(
- vget_low_s32(vcvtq_s32_f32(vreinterpretq_f32_m128(a))));
+ int32x2_t a32 = vget_high_s32(vreinterpretq_s32_m128i(a));
+ int32x2_t a10 = vget_low_s32(vreinterpretq_s32_m128i(a));
+ return vreinterpretq_m128i_s32(vcombine_s32(a32, a10));
}
-// Convert the lower single-precision (32-bit) floating-point element in a to a
-// 32-bit integer with truncation, and store the result in dst.
-//
-// dst[31:0] := Convert_FP32_To_Int32_Truncate(a[31:0])
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtt_ss2si
-FORCE_INLINE int _mm_cvtt_ss2si(__m128 a)
+// takes the lower two 32-bit values from a and swaps them and places in low end
+// of result takes the higher two 32 bit values from a and swaps them and places
+// in high end of result.
+FORCE_INLINE __m128i _mm_shuffle_epi_2301(__m128i a)
{
- return vgetq_lane_s32(vcvtq_s32_f32(vreinterpretq_f32_m128(a)), 0);
+ int32x2_t a01 = vrev64_s32(vget_low_s32(vreinterpretq_s32_m128i(a)));
+ int32x2_t a23 = vrev64_s32(vget_high_s32(vreinterpretq_s32_m128i(a)));
+ return vreinterpretq_m128i_s32(vcombine_s32(a01, a23));
}
-// Convert packed single-precision (32-bit) floating-point elements in a to
-// packed 32-bit integers with truncation, and store the results in dst.
-//
-// FOR j := 0 to 1
-// i := 32*j
-// dst[i+31:i] := Convert_FP32_To_Int32_Truncate(a[i+31:i])
-// ENDFOR
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvttps_pi32
-#define _mm_cvttps_pi32(a) _mm_cvtt_ps2pi(a)
+// rotates the least significant 32 bits into the most significant 32 bits, and
+// shifts the rest down
+FORCE_INLINE __m128i _mm_shuffle_epi_0321(__m128i a)
+{
+ return vreinterpretq_m128i_s32(
+ vextq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(a), 1));
+}
-// Convert the lower single-precision (32-bit) floating-point element in a to a
-// 32-bit integer with truncation, and store the result in dst.
-//
-// dst[31:0] := Convert_FP32_To_Int32_Truncate(a[31:0])
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvttss_si32
-#define _mm_cvttss_si32(a) _mm_cvtt_ss2si(a)
+// rotates the most significant 32 bits into the least significant 32 bits, and
+// shifts the rest up
+FORCE_INLINE __m128i _mm_shuffle_epi_2103(__m128i a)
+{
+ return vreinterpretq_m128i_s32(
+ vextq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(a), 3));
+}
-// Convert the lower single-precision (32-bit) floating-point element in a to a
-// 64-bit integer with truncation, and store the result in dst.
-//
-// dst[63:0] := Convert_FP32_To_Int64_Truncate(a[31:0])
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvttss_si64
-FORCE_INLINE int64_t _mm_cvttss_si64(__m128 a)
+// gets the lower 64 bits of a, and places it in the upper 64 bits
+// gets the lower 64 bits of a and places it in the lower 64 bits
+FORCE_INLINE __m128i _mm_shuffle_epi_1010(__m128i a)
{
- return vgetq_lane_s64(
- vmovl_s32(vget_low_s32(vcvtq_s32_f32(vreinterpretq_f32_m128(a)))), 0);
+ int32x2_t a10 = vget_low_s32(vreinterpretq_s32_m128i(a));
+ return vreinterpretq_m128i_s32(vcombine_s32(a10, a10));
}
-// Sets the 128-bit value to zero
-// https://msdn.microsoft.com/en-us/library/vstudio/ys7dw0kh(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_setzero_si128(void)
+// gets the lower 64 bits of a, swaps the 0 and 1 elements, and places it in the
+// lower 64 bits gets the lower 64 bits of a, and places it in the upper 64 bits
+FORCE_INLINE __m128i _mm_shuffle_epi_1001(__m128i a)
{
- return vreinterpretq_m128i_s32(vdupq_n_s32(0));
+ int32x2_t a01 = vrev64_s32(vget_low_s32(vreinterpretq_s32_m128i(a)));
+ int32x2_t a10 = vget_low_s32(vreinterpretq_s32_m128i(a));
+ return vreinterpretq_m128i_s32(vcombine_s32(a01, a10));
}
-// Clears the four single-precision, floating-point values.
-// https://msdn.microsoft.com/en-us/library/vstudio/tk1t2tbz(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_setzero_ps(void)
+// gets the lower 64 bits of a, swaps the 0 and 1 elements and places it in the
+// upper 64 bits gets the lower 64 bits of a, swaps the 0 and 1 elements, and
+// places it in the lower 64 bits
+FORCE_INLINE __m128i _mm_shuffle_epi_0101(__m128i a)
{
- return vreinterpretq_m128_f32(vdupq_n_f32(0));
+ int32x2_t a01 = vrev64_s32(vget_low_s32(vreinterpretq_s32_m128i(a)));
+ return vreinterpretq_m128i_s32(vcombine_s32(a01, a01));
}
-// Return vector of type __m128d with all elements set to zero.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_setzero_pd
-FORCE_INLINE __m128d _mm_setzero_pd(void)
+FORCE_INLINE __m128i _mm_shuffle_epi_2211(__m128i a)
{
+ int32x2_t a11 = vdup_lane_s32(vget_low_s32(vreinterpretq_s32_m128i(a)), 1);
+ int32x2_t a22 = vdup_lane_s32(vget_high_s32(vreinterpretq_s32_m128i(a)), 0);
+ return vreinterpretq_m128i_s32(vcombine_s32(a11, a22));
+}
+
+FORCE_INLINE __m128i _mm_shuffle_epi_0122(__m128i a)
+{
+ int32x2_t a22 = vdup_lane_s32(vget_high_s32(vreinterpretq_s32_m128i(a)), 0);
+ int32x2_t a01 = vrev64_s32(vget_low_s32(vreinterpretq_s32_m128i(a)));
+ return vreinterpretq_m128i_s32(vcombine_s32(a22, a01));
+}
+
+FORCE_INLINE __m128i _mm_shuffle_epi_3332(__m128i a)
+{
+ int32x2_t a32 = vget_high_s32(vreinterpretq_s32_m128i(a));
+ int32x2_t a33 = vdup_lane_s32(vget_high_s32(vreinterpretq_s32_m128i(a)), 1);
+ return vreinterpretq_m128i_s32(vcombine_s32(a32, a33));
+}
+
+// FORCE_INLINE __m128i _mm_shuffle_epi32_splat(__m128i a, __constrange(0,255)
+// int imm)
#if defined(__aarch64__)
- return vreinterpretq_m128d_f64(vdupq_n_f64(0));
+#define _mm_shuffle_epi32_splat(a, imm) \
+ __extension__({ \
+ vreinterpretq_m128i_s32( \
+ vdupq_laneq_s32(vreinterpretq_s32_m128i(a), (imm))); \
+ })
#else
- return vreinterpretq_m128d_f32(vdupq_n_f32(0));
+#define _mm_shuffle_epi32_splat(a, imm) \
+ __extension__({ \
+ vreinterpretq_m128i_s32( \
+ vdupq_n_s32(vgetq_lane_s32(vreinterpretq_s32_m128i(a), (imm)))); \
+ })
#endif
-}
-// Sets the four single-precision, floating-point values to w.
+// NEON does not support a general purpose permute intrinsic
+// Selects four specific single-precision, floating-point values from a and b,
+// based on the mask i.
//
-// r0 := r1 := r2 := r3 := w
+// C equivalent:
+// __m128 _mm_shuffle_ps_default(__m128 a, __m128 b,
+// __constrange(0, 255) int imm) {
+// __m128 ret;
+// ret[0] = a[imm & 0x3]; ret[1] = a[(imm >> 2) & 0x3];
+// ret[2] = b[(imm >> 4) & 0x03]; ret[3] = b[(imm >> 6) & 0x03];
+// return ret;
+// }
//
-// https://msdn.microsoft.com/en-us/library/vstudio/2x1se8ha(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_set1_ps(float _w)
+// https://msdn.microsoft.com/en-us/library/vstudio/5f0858x0(v=vs.100).aspx
+#define _mm_shuffle_ps_default(a, b, imm) \
+ __extension__({ \
+ float32x4_t ret; \
+ ret = vmovq_n_f32( \
+ vgetq_lane_f32(vreinterpretq_f32_m128(a), (imm) & (0x3))); \
+ ret = vsetq_lane_f32( \
+ vgetq_lane_f32(vreinterpretq_f32_m128(a), ((imm) >> 2) & 0x3), \
+ ret, 1); \
+ ret = vsetq_lane_f32( \
+ vgetq_lane_f32(vreinterpretq_f32_m128(b), ((imm) >> 4) & 0x3), \
+ ret, 2); \
+ ret = vsetq_lane_f32( \
+ vgetq_lane_f32(vreinterpretq_f32_m128(b), ((imm) >> 6) & 0x3), \
+ ret, 3); \
+ vreinterpretq_m128_f32(ret); \
+ })
+
+// Shuffles the lower 4 signed or unsigned 16-bit integers in a as specified
+// by imm.
+// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/y41dkk37(v=vs.100)
+// FORCE_INLINE __m128i _mm_shufflelo_epi16_function(__m128i a,
+// __constrange(0,255) int
+// imm)
+#define _mm_shufflelo_epi16_function(a, imm) \
+ __extension__({ \
+ int16x8_t ret = vreinterpretq_s16_m128i(a); \
+ int16x4_t lowBits = vget_low_s16(ret); \
+ ret = vsetq_lane_s16(vget_lane_s16(lowBits, (imm) & (0x3)), ret, 0); \
+ ret = vsetq_lane_s16(vget_lane_s16(lowBits, ((imm) >> 2) & 0x3), ret, \
+ 1); \
+ ret = vsetq_lane_s16(vget_lane_s16(lowBits, ((imm) >> 4) & 0x3), ret, \
+ 2); \
+ ret = vsetq_lane_s16(vget_lane_s16(lowBits, ((imm) >> 6) & 0x3), ret, \
+ 3); \
+ vreinterpretq_m128i_s16(ret); \
+ })
+
+// Shuffles the upper 4 signed or unsigned 16-bit integers in a as specified
+// by imm.
+// https://msdn.microsoft.com/en-us/library/13ywktbs(v=vs.100).aspx
+// FORCE_INLINE __m128i _mm_shufflehi_epi16_function(__m128i a,
+// __constrange(0,255) int
+// imm)
+#define _mm_shufflehi_epi16_function(a, imm) \
+ __extension__({ \
+ int16x8_t ret = vreinterpretq_s16_m128i(a); \
+ int16x4_t highBits = vget_high_s16(ret); \
+ ret = vsetq_lane_s16(vget_lane_s16(highBits, (imm) & (0x3)), ret, 4); \
+ ret = vsetq_lane_s16(vget_lane_s16(highBits, ((imm) >> 2) & 0x3), ret, \
+ 5); \
+ ret = vsetq_lane_s16(vget_lane_s16(highBits, ((imm) >> 4) & 0x3), ret, \
+ 6); \
+ ret = vsetq_lane_s16(vget_lane_s16(highBits, ((imm) >> 6) & 0x3), ret, \
+ 7); \
+ vreinterpretq_m128i_s16(ret); \
+ })
+
+/* SSE */
+
+// Adds the four single-precision, floating-point values of a and b.
+//
+// r0 := a0 + b0
+// r1 := a1 + b1
+// r2 := a2 + b2
+// r3 := a3 + b3
+//
+// https://msdn.microsoft.com/en-us/library/vstudio/c9848chc(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_add_ps(__m128 a, __m128 b)
{
- return vreinterpretq_m128_f32(vdupq_n_f32(_w));
+ return vreinterpretq_m128_f32(
+ vaddq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
}
-// Sets the four single-precision, floating-point values to w.
-// https://msdn.microsoft.com/en-us/library/vstudio/2x1se8ha(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_set_ps1(float _w)
+// adds the scalar single-precision floating point values of a and b.
+// https://msdn.microsoft.com/en-us/library/be94x2y6(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_add_ss(__m128 a, __m128 b)
{
- return vreinterpretq_m128_f32(vdupq_n_f32(_w));
+ float32_t b0 = vgetq_lane_f32(vreinterpretq_f32_m128(b), 0);
+ float32x4_t value = vsetq_lane_f32(b0, vdupq_n_f32(0), 0);
+ // the upper values in the result must be the remnants of <a>.
+ return vreinterpretq_m128_f32(vaddq_f32(a, value));
}
-// Sets the four single-precision, floating-point values to the four inputs.
-// https://msdn.microsoft.com/en-us/library/vstudio/afh0zf75(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_set_ps(float w, float z, float y, float x)
+// Computes the bitwise AND of the four single-precision, floating-point values
+// of a and b.
+//
+// r0 := a0 & b0
+// r1 := a1 & b1
+// r2 := a2 & b2
+// r3 := a3 & b3
+//
+// https://msdn.microsoft.com/en-us/library/vstudio/73ck1xc5(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_and_ps(__m128 a, __m128 b)
{
- float ALIGN_STRUCT(16) data[4] = {x, y, z, w};
- return vreinterpretq_m128_f32(vld1q_f32(data));
+ return vreinterpretq_m128_s32(
+ vandq_s32(vreinterpretq_s32_m128(a), vreinterpretq_s32_m128(b)));
}
-// Copy single-precision (32-bit) floating-point element a to the lower element
-// of dst, and zero the upper 3 elements.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_set_ss
-FORCE_INLINE __m128 _mm_set_ss(float a)
+// Computes the bitwise AND-NOT of the four single-precision, floating-point
+// values of a and b.
+//
+// r0 := ~a0 & b0
+// r1 := ~a1 & b1
+// r2 := ~a2 & b2
+// r3 := ~a3 & b3
+//
+// https://msdn.microsoft.com/en-us/library/vstudio/68h7wd02(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_andnot_ps(__m128 a, __m128 b)
{
- float ALIGN_STRUCT(16) data[4] = {a, 0, 0, 0};
- return vreinterpretq_m128_f32(vld1q_f32(data));
+ return vreinterpretq_m128_s32(
+ vbicq_s32(vreinterpretq_s32_m128(b),
+ vreinterpretq_s32_m128(a))); // *NOTE* argument swap
}
-// Sets the four single-precision, floating-point values to the four inputs in
-// reverse order.
-// https://msdn.microsoft.com/en-us/library/vstudio/d2172ct3(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_setr_ps(float w, float z, float y, float x)
+// Average packed unsigned 16-bit integers in a and b, and store the results in
+// dst.
+//
+// FOR j := 0 to 3
+// i := j*16
+// dst[i+15:i] := (a[i+15:i] + b[i+15:i] + 1) >> 1
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_avg_pu16
+FORCE_INLINE __m64 _mm_avg_pu16(__m64 a, __m64 b)
{
- float ALIGN_STRUCT(16) data[4] = {w, z, y, x};
- return vreinterpretq_m128_f32(vld1q_f32(data));
+ return vreinterpret_m64_u16(
+ vrhadd_u16(vreinterpret_u16_m64(a), vreinterpret_u16_m64(b)));
}
-// Sets the 8 signed 16-bit integer values in reverse order.
+// Average packed unsigned 8-bit integers in a and b, and store the results in
+// dst.
//
-// Return Value
-// r0 := w0
-// r1 := w1
-// ...
-// r7 := w7
-FORCE_INLINE __m128i _mm_setr_epi16(short w0,
- short w1,
- short w2,
- short w3,
- short w4,
- short w5,
- short w6,
- short w7)
+// FOR j := 0 to 7
+// i := j*8
+// dst[i+7:i] := (a[i+7:i] + b[i+7:i] + 1) >> 1
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_avg_pu8
+FORCE_INLINE __m64 _mm_avg_pu8(__m64 a, __m64 b)
{
- int16_t ALIGN_STRUCT(16) data[8] = {w0, w1, w2, w3, w4, w5, w6, w7};
- return vreinterpretq_m128i_s16(vld1q_s16((int16_t *) data));
+ return vreinterpret_m64_u8(
+ vrhadd_u8(vreinterpret_u8_m64(a), vreinterpret_u8_m64(b)));
}
-// Sets the 4 signed 32-bit integer values in reverse order
-// https://technet.microsoft.com/en-us/library/security/27yb3ee5(v=vs.90).aspx
-FORCE_INLINE __m128i _mm_setr_epi32(int i3, int i2, int i1, int i0)
+// Compares for equality.
+// https://msdn.microsoft.com/en-us/library/vstudio/36aectz5(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_cmpeq_ps(__m128 a, __m128 b)
{
- int32_t ALIGN_STRUCT(16) data[4] = {i3, i2, i1, i0};
- return vreinterpretq_m128i_s32(vld1q_s32(data));
+ return vreinterpretq_m128_u32(
+ vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
}
-// Set packed 64-bit integers in dst with the supplied values in reverse order.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_setr_epi64
-FORCE_INLINE __m128i _mm_setr_epi64(__m64 e1, __m64 e0)
+// Compares for equality.
+// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/k423z28e(v=vs.100)
+FORCE_INLINE __m128 _mm_cmpeq_ss(__m128 a, __m128 b)
{
- return vreinterpretq_m128i_s64(vcombine_s64(e1, e0));
+ return _mm_move_ss(a, _mm_cmpeq_ps(a, b));
}
-// Sets the 16 signed 8-bit integer values to b.
+// Compares for greater than or equal.
+// https://msdn.microsoft.com/en-us/library/vstudio/fs813y2t(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_cmpge_ps(__m128 a, __m128 b)
+{
+ return vreinterpretq_m128_u32(
+ vcgeq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
+}
+
+// Compares for greater than or equal.
+// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/kesh3ddc(v=vs.100)
+FORCE_INLINE __m128 _mm_cmpge_ss(__m128 a, __m128 b)
+{
+ return _mm_move_ss(a, _mm_cmpge_ps(a, b));
+}
+
+// Compares for greater than.
//
-// r0 := b
-// r1 := b
-// ...
-// r15 := b
+// r0 := (a0 > b0) ? 0xffffffff : 0x0
+// r1 := (a1 > b1) ? 0xffffffff : 0x0
+// r2 := (a2 > b2) ? 0xffffffff : 0x0
+// r3 := (a3 > b3) ? 0xffffffff : 0x0
//
-// https://msdn.microsoft.com/en-us/library/6e14xhyf(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_set1_epi8(signed char w)
+// https://msdn.microsoft.com/en-us/library/vstudio/11dy102s(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_cmpgt_ps(__m128 a, __m128 b)
{
- return vreinterpretq_m128i_s8(vdupq_n_s8(w));
+ return vreinterpretq_m128_u32(
+ vcgtq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
}
-// Broadcast double-precision (64-bit) floating-point value a to all elements of
-// dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_set1_pd
-FORCE_INLINE __m128d _mm_set1_pd(double d)
+// Compares for greater than.
+// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/1xyyyy9e(v=vs.100)
+FORCE_INLINE __m128 _mm_cmpgt_ss(__m128 a, __m128 b)
{
-#if defined(__aarch64__)
- return vreinterpretq_m128d_f64(vdupq_n_f64(d));
-#else
- return vreinterpretq_m128d_s64(vdupq_n_s64(*(int64_t *) &d));
-#endif
+ return _mm_move_ss(a, _mm_cmpgt_ps(a, b));
}
-// Sets the 8 signed 16-bit integer values to w.
+// Compares for less than or equal.
//
-// r0 := w
-// r1 := w
-// ...
-// r7 := w
+// r0 := (a0 <= b0) ? 0xffffffff : 0x0
+// r1 := (a1 <= b1) ? 0xffffffff : 0x0
+// r2 := (a2 <= b2) ? 0xffffffff : 0x0
+// r3 := (a3 <= b3) ? 0xffffffff : 0x0
//
-// https://msdn.microsoft.com/en-us/library/k0ya3x0e(v=vs.90).aspx
-FORCE_INLINE __m128i _mm_set1_epi16(short w)
+// https://msdn.microsoft.com/en-us/library/vstudio/1s75w83z(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_cmple_ps(__m128 a, __m128 b)
{
- return vreinterpretq_m128i_s16(vdupq_n_s16(w));
+ return vreinterpretq_m128_u32(
+ vcleq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
}
-// Sets the 16 signed 8-bit integer values.
-// https://msdn.microsoft.com/en-us/library/x0cx8zd3(v=vs.90).aspx
-FORCE_INLINE __m128i _mm_set_epi8(signed char b15,
- signed char b14,
- signed char b13,
- signed char b12,
- signed char b11,
- signed char b10,
- signed char b9,
- signed char b8,
- signed char b7,
- signed char b6,
- signed char b5,
- signed char b4,
- signed char b3,
- signed char b2,
- signed char b1,
- signed char b0)
+// Compares for less than or equal.
+// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/a7x0hbhw(v=vs.100)
+FORCE_INLINE __m128 _mm_cmple_ss(__m128 a, __m128 b)
{
- int8_t ALIGN_STRUCT(16)
- data[16] = {(int8_t) b0, (int8_t) b1, (int8_t) b2, (int8_t) b3,
- (int8_t) b4, (int8_t) b5, (int8_t) b6, (int8_t) b7,
- (int8_t) b8, (int8_t) b9, (int8_t) b10, (int8_t) b11,
- (int8_t) b12, (int8_t) b13, (int8_t) b14, (int8_t) b15};
- return (__m128i) vld1q_s8(data);
+ return _mm_move_ss(a, _mm_cmple_ps(a, b));
}
-// Sets the 8 signed 16-bit integer values.
-// https://msdn.microsoft.com/en-au/library/3e0fek84(v=vs.90).aspx
-FORCE_INLINE __m128i _mm_set_epi16(short i7,
- short i6,
- short i5,
- short i4,
- short i3,
- short i2,
- short i1,
- short i0)
+// Compares for less than
+// https://msdn.microsoft.com/en-us/library/vstudio/f330yhc8(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_cmplt_ps(__m128 a, __m128 b)
{
- int16_t ALIGN_STRUCT(16) data[8] = {i0, i1, i2, i3, i4, i5, i6, i7};
- return vreinterpretq_m128i_s16(vld1q_s16(data));
+ return vreinterpretq_m128_u32(
+ vcltq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
}
-// Sets the 16 signed 8-bit integer values in reverse order.
-// https://msdn.microsoft.com/en-us/library/2khb9c7k(v=vs.90).aspx
-FORCE_INLINE __m128i _mm_setr_epi8(signed char b0,
- signed char b1,
- signed char b2,
- signed char b3,
- signed char b4,
- signed char b5,
- signed char b6,
- signed char b7,
- signed char b8,
- signed char b9,
- signed char b10,
- signed char b11,
- signed char b12,
- signed char b13,
- signed char b14,
- signed char b15)
+// Compares for less than
+// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/fy94wye7(v=vs.100)
+FORCE_INLINE __m128 _mm_cmplt_ss(__m128 a, __m128 b)
{
- int8_t ALIGN_STRUCT(16)
- data[16] = {(int8_t) b0, (int8_t) b1, (int8_t) b2, (int8_t) b3,
- (int8_t) b4, (int8_t) b5, (int8_t) b6, (int8_t) b7,
- (int8_t) b8, (int8_t) b9, (int8_t) b10, (int8_t) b11,
- (int8_t) b12, (int8_t) b13, (int8_t) b14, (int8_t) b15};
- return (__m128i) vld1q_s8(data);
+ return _mm_move_ss(a, _mm_cmplt_ps(a, b));
}
-// Sets the 4 signed 32-bit integer values to i.
-//
-// r0 := i
-// r1 := i
-// r2 := i
-// r3 := I
-//
-// https://msdn.microsoft.com/en-us/library/vstudio/h4xscxat(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_set1_epi32(int _i)
+// Compares for inequality.
+// https://msdn.microsoft.com/en-us/library/sf44thbx(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_cmpneq_ps(__m128 a, __m128 b)
{
- return vreinterpretq_m128i_s32(vdupq_n_s32(_i));
+ return vreinterpretq_m128_u32(vmvnq_u32(
+ vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))));
}
-// Sets the 2 signed 64-bit integer values to i.
-// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/whtfzhzk(v=vs.100)
-FORCE_INLINE __m128i _mm_set1_epi64(__m64 _i)
+// Compares for inequality.
+// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/ekya8fh4(v=vs.100)
+FORCE_INLINE __m128 _mm_cmpneq_ss(__m128 a, __m128 b)
{
- return vreinterpretq_m128i_s64(vdupq_n_s64((int64_t) _i));
+ return _mm_move_ss(a, _mm_cmpneq_ps(a, b));
}
-// Sets the 2 signed 64-bit integer values to i.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_set1_epi64x
-FORCE_INLINE __m128i _mm_set1_epi64x(int64_t _i)
+// Compares for not greater than or equal.
+// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/wsexys62(v=vs.100)
+FORCE_INLINE __m128 _mm_cmpnge_ps(__m128 a, __m128 b)
{
- return vreinterpretq_m128i_s64(vdupq_n_s64(_i));
+ return vreinterpretq_m128_u32(vmvnq_u32(
+ vcgeq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))));
}
-// Sets the 4 signed 32-bit integer values.
-// https://msdn.microsoft.com/en-us/library/vstudio/019beekt(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_set_epi32(int i3, int i2, int i1, int i0)
+// Compares for not greater than or equal.
+// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/fk2y80s8(v=vs.100)
+FORCE_INLINE __m128 _mm_cmpnge_ss(__m128 a, __m128 b)
{
- int32_t ALIGN_STRUCT(16) data[4] = {i0, i1, i2, i3};
- return vreinterpretq_m128i_s32(vld1q_s32(data));
+ return _mm_move_ss(a, _mm_cmpnge_ps(a, b));
}
-// Returns the __m128i structure with its two 64-bit integer values
-// initialized to the values of the two 64-bit integers passed in.
-// https://msdn.microsoft.com/en-us/library/dk2sdw0h(v=vs.120).aspx
-FORCE_INLINE __m128i _mm_set_epi64x(int64_t i1, int64_t i2)
+// Compares for not greater than.
+// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/d0xh7w0s(v=vs.100)
+FORCE_INLINE __m128 _mm_cmpngt_ps(__m128 a, __m128 b)
{
- return vreinterpretq_m128i_s64(
- vcombine_s64(vcreate_s64(i2), vcreate_s64(i1)));
+ return vreinterpretq_m128_u32(vmvnq_u32(
+ vcgtq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))));
}
-// Returns the __m128i structure with its two 64-bit integer values
-// initialized to the values of the two 64-bit integers passed in.
-// https://msdn.microsoft.com/en-us/library/dk2sdw0h(v=vs.120).aspx
-FORCE_INLINE __m128i _mm_set_epi64(__m64 i1, __m64 i2)
+// Compares for not greater than.
+// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/z7x9ydwh(v=vs.100)
+FORCE_INLINE __m128 _mm_cmpngt_ss(__m128 a, __m128 b)
{
- return _mm_set_epi64x((int64_t) i1, (int64_t) i2);
+ return _mm_move_ss(a, _mm_cmpngt_ps(a, b));
}
-// Set packed double-precision (64-bit) floating-point elements in dst with the
-// supplied values.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_set_pd
-FORCE_INLINE __m128d _mm_set_pd(double e1, double e0)
+// Compares for not less than or equal.
+// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/6a330kxw(v=vs.100)
+FORCE_INLINE __m128 _mm_cmpnle_ps(__m128 a, __m128 b)
{
- double ALIGN_STRUCT(16) data[2] = {e0, e1};
-#if defined(__aarch64__)
- return vreinterpretq_m128d_f64(vld1q_f64((float64_t *) data));
-#else
- return vreinterpretq_m128d_f32(vld1q_f32((float32_t *) data));
-#endif
+ return vreinterpretq_m128_u32(vmvnq_u32(
+ vcleq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))));
}
-// Set packed double-precision (64-bit) floating-point elements in dst with the
-// supplied values in reverse order.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_setr_pd
-FORCE_INLINE __m128d _mm_setr_pd(double e1, double e0)
+// Compares for not less than or equal.
+// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/z7x9ydwh(v=vs.100)
+FORCE_INLINE __m128 _mm_cmpnle_ss(__m128 a, __m128 b)
{
- return _mm_set_pd(e0, e1);
+ return _mm_move_ss(a, _mm_cmpnle_ps(a, b));
}
-// Copy double-precision (64-bit) floating-point element a to the lower element
-// of dst, and zero the upper element.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_set_sd
-FORCE_INLINE __m128d _mm_set_sd(double a)
+// Compares for not less than.
+// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/4686bbdw(v=vs.100)
+FORCE_INLINE __m128 _mm_cmpnlt_ps(__m128 a, __m128 b)
{
- return _mm_set_pd(0, a);
+ return vreinterpretq_m128_u32(vmvnq_u32(
+ vcltq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))));
}
-// Broadcast double-precision (64-bit) floating-point value a to all elements of
-// dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_set_pd1
-#define _mm_set_pd1 _mm_set1_pd
+// Compares for not less than.
+// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/56b9z2wf(v=vs.100)
+FORCE_INLINE __m128 _mm_cmpnlt_ss(__m128 a, __m128 b)
+{
+ return _mm_move_ss(a, _mm_cmpnlt_ps(a, b));
+}
-// Stores four single-precision, floating-point values.
-// https://msdn.microsoft.com/en-us/library/vstudio/s3h4ay6y(v=vs.100).aspx
-FORCE_INLINE void _mm_store_ps(float *p, __m128 a)
+// Compares the four 32-bit floats in a and b to check if any values are NaN.
+// Ordered compare between each value returns true for "orderable" and false for
+// "not orderable" (NaN).
+// https://msdn.microsoft.com/en-us/library/vstudio/0h9w00fx(v=vs.100).aspx see
+// also:
+// http://stackoverflow.com/questions/8627331/what-does-ordered-unordered-comparison-mean
+// http://stackoverflow.com/questions/29349621/neon-isnanval-intrinsics
+FORCE_INLINE __m128 _mm_cmpord_ps(__m128 a, __m128 b)
{
- vst1q_f32(p, vreinterpretq_f32_m128(a));
+ // Note: NEON does not have ordered compare builtin
+ // Need to compare a eq a and b eq b to check for NaN
+ // Do AND of results to get final
+ uint32x4_t ceqaa =
+ vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a));
+ uint32x4_t ceqbb =
+ vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b));
+ return vreinterpretq_m128_u32(vandq_u32(ceqaa, ceqbb));
}
-// Store the lower single-precision (32-bit) floating-point element from a into
-// 4 contiguous elements in memory. mem_addr must be aligned on a 16-byte
-// boundary or a general-protection exception may be generated.
-//
-// MEM[mem_addr+31:mem_addr] := a[31:0]
-// MEM[mem_addr+63:mem_addr+32] := a[31:0]
-// MEM[mem_addr+95:mem_addr+64] := a[31:0]
-// MEM[mem_addr+127:mem_addr+96] := a[31:0]
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_store_ps1
-FORCE_INLINE void _mm_store_ps1(float *p, __m128 a)
+// Compares for ordered.
+// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/343t62da(v=vs.100)
+FORCE_INLINE __m128 _mm_cmpord_ss(__m128 a, __m128 b)
{
- float32_t a0 = vgetq_lane_f32(vreinterpretq_f32_m128(a), 0);
- vst1q_f32(p, vdupq_n_f32(a0));
+ return _mm_move_ss(a, _mm_cmpord_ps(a, b));
}
-// Store the lower single-precision (32-bit) floating-point element from a into
-// 4 contiguous elements in memory. mem_addr must be aligned on a 16-byte
-// boundary or a general-protection exception may be generated.
-//
-// MEM[mem_addr+31:mem_addr] := a[31:0]
-// MEM[mem_addr+63:mem_addr+32] := a[31:0]
-// MEM[mem_addr+95:mem_addr+64] := a[31:0]
-// MEM[mem_addr+127:mem_addr+96] := a[31:0]
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_store1_ps
-#define _mm_store1_ps _mm_store_ps1
+// Compares for unordered.
+// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/khy6fk1t(v=vs.100)
+FORCE_INLINE __m128 _mm_cmpunord_ps(__m128 a, __m128 b)
+{
+ uint32x4_t f32a =
+ vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a));
+ uint32x4_t f32b =
+ vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b));
+ return vreinterpretq_m128_u32(vmvnq_u32(vandq_u32(f32a, f32b)));
+}
-// Store 4 single-precision (32-bit) floating-point elements from a into memory
-// in reverse order. mem_addr must be aligned on a 16-byte boundary or a
-// general-protection exception may be generated.
-//
-// MEM[mem_addr+31:mem_addr] := a[127:96]
-// MEM[mem_addr+63:mem_addr+32] := a[95:64]
-// MEM[mem_addr+95:mem_addr+64] := a[63:32]
-// MEM[mem_addr+127:mem_addr+96] := a[31:0]
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_storer_ps
-FORCE_INLINE void _mm_storer_ps(float *p, __m128 a)
+// Compares for unordered.
+// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/2as2387b(v=vs.100)
+FORCE_INLINE __m128 _mm_cmpunord_ss(__m128 a, __m128 b)
{
- float32x4_t tmp = vrev64q_f32(vreinterpretq_f32_m128(a));
- float32x4_t rev = vextq_f32(tmp, tmp, 2);
- vst1q_f32(p, rev);
+ return _mm_move_ss(a, _mm_cmpunord_ps(a, b));
}
-// Stores four single-precision, floating-point values.
-// https://msdn.microsoft.com/en-us/library/44e30x22(v=vs.100).aspx
-FORCE_INLINE void _mm_storeu_ps(float *p, __m128 a)
+// Compares the lower single-precision floating point scalar values of a and b
+// using an equality operation. :
+// https://msdn.microsoft.com/en-us/library/93yx2h2b(v=vs.100).aspx
+FORCE_INLINE int _mm_comieq_ss(__m128 a, __m128 b)
{
- vst1q_f32(p, vreinterpretq_f32_m128(a));
+ uint32x4_t a_eq_b =
+ vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b));
+ return vgetq_lane_u32(a_eq_b, 0) & 0x1;
}
-// Stores four 32-bit integer values as (as a __m128i value) at the address p.
-// https://msdn.microsoft.com/en-us/library/vstudio/edk11s13(v=vs.100).aspx
-FORCE_INLINE void _mm_store_si128(__m128i *p, __m128i a)
+// Compares the lower single-precision floating point scalar values of a and b
+// using a greater than or equal operation. :
+// https://msdn.microsoft.com/en-us/library/8t80des6(v=vs.100).aspx
+FORCE_INLINE int _mm_comige_ss(__m128 a, __m128 b)
{
- vst1q_s32((int32_t *) p, vreinterpretq_s32_m128i(a));
+ uint32x4_t a_ge_b =
+ vcgeq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b));
+ return vgetq_lane_u32(a_ge_b, 0) & 0x1;
}
-// Stores four 32-bit integer values as (as a __m128i value) at the address p.
-// https://msdn.microsoft.com/en-us/library/vstudio/edk11s13(v=vs.100).aspx
-FORCE_INLINE void _mm_storeu_si128(__m128i *p, __m128i a)
+// Compares the lower single-precision floating point scalar values of a and b
+// using a greater than operation. :
+// https://msdn.microsoft.com/en-us/library/b0738e0t(v=vs.100).aspx
+FORCE_INLINE int _mm_comigt_ss(__m128 a, __m128 b)
{
- vst1q_s32((int32_t *) p, vreinterpretq_s32_m128i(a));
+ uint32x4_t a_gt_b =
+ vcgtq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b));
+ return vgetq_lane_u32(a_gt_b, 0) & 0x1;
}
-// Stores the lower single - precision, floating - point value.
-// https://msdn.microsoft.com/en-us/library/tzz10fbx(v=vs.100).aspx
-FORCE_INLINE void _mm_store_ss(float *p, __m128 a)
+// Compares the lower single-precision floating point scalar values of a and b
+// using a less than or equal operation. :
+// https://msdn.microsoft.com/en-us/library/1w4t7c57(v=vs.90).aspx
+FORCE_INLINE int _mm_comile_ss(__m128 a, __m128 b)
{
- vst1q_lane_f32(p, vreinterpretq_f32_m128(a), 0);
+ uint32x4_t a_le_b =
+ vcleq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b));
+ return vgetq_lane_u32(a_le_b, 0) & 0x1;
}
-// Store 128-bits (composed of 2 packed double-precision (64-bit) floating-point
-// elements) from a into memory. mem_addr must be aligned on a 16-byte boundary
-// or a general-protection exception may be generated.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_store_pd
-FORCE_INLINE void _mm_store_pd(double *mem_addr, __m128d a)
+// Compares the lower single-precision floating point scalar values of a and b
+// using a less than operation. :
+// https://msdn.microsoft.com/en-us/library/2kwe606b(v=vs.90).aspx Important
+// note!! The documentation on MSDN is incorrect! If either of the values is a
+// NAN the docs say you will get a one, but in fact, it will return a zero!!
+FORCE_INLINE int _mm_comilt_ss(__m128 a, __m128 b)
{
-#if defined(__aarch64__)
- vst1q_f64((float64_t *) mem_addr, vreinterpretq_f64_m128d(a));
-#else
- vst1q_f32((float32_t *) mem_addr, vreinterpretq_f32_m128d(a));
-#endif
+ uint32x4_t a_lt_b =
+ vcltq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b));
+ return vgetq_lane_u32(a_lt_b, 0) & 0x1;
}
-// Store the upper double-precision (64-bit) floating-point element from a into
-// memory.
+// Compares the lower single-precision floating point scalar values of a and b
+// using an inequality operation. :
+// https://msdn.microsoft.com/en-us/library/bafh5e0a(v=vs.90).aspx
+FORCE_INLINE int _mm_comineq_ss(__m128 a, __m128 b)
+{
+ return !_mm_comieq_ss(a, b);
+}
+
+// Convert packed signed 32-bit integers in b to packed single-precision
+// (32-bit) floating-point elements, store the results in the lower 2 elements
+// of dst, and copy the upper 2 packed elements from a to the upper elements of
+// dst.
//
-// MEM[mem_addr+63:mem_addr] := a[127:64]
+// dst[31:0] := Convert_Int32_To_FP32(b[31:0])
+// dst[63:32] := Convert_Int32_To_FP32(b[63:32])
+// dst[95:64] := a[95:64]
+// dst[127:96] := a[127:96]
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_storeh_pd
-FORCE_INLINE void _mm_storeh_pd(double *mem_addr, __m128d a)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvt_pi2ps
+FORCE_INLINE __m128 _mm_cvt_pi2ps(__m128 a, __m64 b)
{
-#if defined(__aarch64__)
- vst1_f64((float64_t *) mem_addr, vget_high_f64(vreinterpretq_f64_m128d(a)));
-#else
- vst1_f32((float32_t *) mem_addr, vget_high_f32(vreinterpretq_f32_m128d(a)));
-#endif
+ return vreinterpretq_m128_f32(
+ vcombine_f32(vcvt_f32_s32(vreinterpret_s32_m64(b)),
+ vget_high_f32(vreinterpretq_f32_m128(a))));
}
-// Store the lower double-precision (64-bit) floating-point element from a into
-// memory.
+// Convert packed single-precision (32-bit) floating-point elements in a to
+// packed 32-bit integers, and store the results in dst.
//
-// MEM[mem_addr+63:mem_addr] := a[63:0]
+// FOR j := 0 to 1
+// i := 32*j
+// dst[i+31:i] := Convert_FP32_To_Int32(a[i+31:i])
+// ENDFOR
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_storel_pd
-FORCE_INLINE void _mm_storel_pd(double *mem_addr, __m128d a)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvt_ps2pi
+FORCE_INLINE __m64 _mm_cvt_ps2pi(__m128 a)
{
#if defined(__aarch64__)
- vst1_f64((float64_t *) mem_addr, vget_low_f64(vreinterpretq_f64_m128d(a)));
+ return vreinterpret_m64_s32(
+ vget_low_s32(vcvtnq_s32_f32(vrndiq_f32(vreinterpretq_f32_m128(a)))));
#else
- vst1_f32((float32_t *) mem_addr, vget_low_f32(vreinterpretq_f32_m128d(a)));
+ return vreinterpret_m64_s32(vcvt_s32_f32(vget_low_f32(
+ vreinterpretq_f32_m128(_mm_round_ps(a, _MM_FROUND_CUR_DIRECTION)))));
#endif
}
-// Store 2 double-precision (64-bit) floating-point elements from a into memory
-// in reverse order. mem_addr must be aligned on a 16-byte boundary or a
-// general-protection exception may be generated.
+// Convert the signed 32-bit integer b to a single-precision (32-bit)
+// floating-point element, store the result in the lower element of dst, and
+// copy the upper 3 packed elements from a to the upper elements of dst.
//
-// MEM[mem_addr+63:mem_addr] := a[127:64]
-// MEM[mem_addr+127:mem_addr+64] := a[63:0]
+// dst[31:0] := Convert_Int32_To_FP32(b[31:0])
+// dst[127:32] := a[127:32]
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_storer_pd
-FORCE_INLINE void _mm_storer_pd(double *mem_addr, __m128d a)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvt_si2ss
+FORCE_INLINE __m128 _mm_cvt_si2ss(__m128 a, int b)
{
- float32x4_t f = vreinterpretq_f32_m128d(a);
- _mm_store_pd(mem_addr, vreinterpretq_m128d_f32(vextq_f32(f, f, 2)));
+ return vreinterpretq_m128_f32(
+ vsetq_lane_f32((float) b, vreinterpretq_f32_m128(a), 0));
}
-// Store the lower double-precision (64-bit) floating-point element from a into
-// 2 contiguous elements in memory. mem_addr must be aligned on a 16-byte
-// boundary or a general-protection exception may be generated.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_store_pd1
-FORCE_INLINE void _mm_store_pd1(double *mem_addr, __m128d a)
+// Convert the lower single-precision (32-bit) floating-point element in a to a
+// 32-bit integer, and store the result in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvt_ss2si
+FORCE_INLINE int _mm_cvt_ss2si(__m128 a)
{
#if defined(__aarch64__)
- float64x1_t a_low = vget_low_f64(vreinterpretq_f64_m128d(a));
- vst1q_f64((float64_t *) mem_addr,
- vreinterpretq_f64_m128d(vcombine_f64(a_low, a_low)));
+ return vgetq_lane_s32(vcvtnq_s32_f32(vrndiq_f32(vreinterpretq_f32_m128(a))),
+ 0);
#else
- float32x2_t a_low = vget_low_f32(vreinterpretq_f32_m128d(a));
- vst1q_f32((float32_t *) mem_addr,
- vreinterpretq_f32_m128d(vcombine_f32(a_low, a_low)));
+ float32_t data = vgetq_lane_f32(
+ vreinterpretq_f32_m128(_mm_round_ps(a, _MM_FROUND_CUR_DIRECTION)), 0);
+ return (int32_t) data;
#endif
}
-// Store the lower double-precision (64-bit) floating-point element from a into
-// 2 contiguous elements in memory. mem_addr must be aligned on a 16-byte
-// boundary or a general-protection exception may be generated.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#expand=9,526,5601&text=_mm_store1_pd
-#define _mm_store1_pd _mm_store_pd1
-
-// Store 128-bits (composed of 2 packed double-precision (64-bit) floating-point
-// elements) from a into memory. mem_addr does not need to be aligned on any
-// particular boundary.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_storeu_pd
-FORCE_INLINE void _mm_storeu_pd(double *mem_addr, __m128d a)
+// Convert packed 16-bit integers in a to packed single-precision (32-bit)
+// floating-point elements, and store the results in dst.
+//
+// FOR j := 0 to 3
+// i := j*16
+// m := j*32
+// dst[m+31:m] := Convert_Int16_To_FP32(a[i+15:i])
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpi16_ps
+FORCE_INLINE __m128 _mm_cvtpi16_ps(__m64 a)
{
- _mm_store_pd(mem_addr, a);
+ return vreinterpretq_m128_f32(
+ vcvtq_f32_s32(vmovl_s16(vreinterpret_s16_m64(a))));
}
-// Reads the lower 64 bits of b and stores them into the lower 64 bits of a.
-// https://msdn.microsoft.com/en-us/library/hhwf428f%28v=vs.90%29.aspx
-FORCE_INLINE void _mm_storel_epi64(__m128i *a, __m128i b)
+// Convert packed 32-bit integers in b to packed single-precision (32-bit)
+// floating-point elements, store the results in the lower 2 elements of dst,
+// and copy the upper 2 packed elements from a to the upper elements of dst.
+//
+// dst[31:0] := Convert_Int32_To_FP32(b[31:0])
+// dst[63:32] := Convert_Int32_To_FP32(b[63:32])
+// dst[95:64] := a[95:64]
+// dst[127:96] := a[127:96]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpi32_ps
+FORCE_INLINE __m128 _mm_cvtpi32_ps(__m128 a, __m64 b)
{
- uint64x1_t hi = vget_high_u64(vreinterpretq_u64_m128i(*a));
- uint64x1_t lo = vget_low_u64(vreinterpretq_u64_m128i(b));
- *a = vreinterpretq_m128i_u64(vcombine_u64(lo, hi));
+ return vreinterpretq_m128_f32(
+ vcombine_f32(vcvt_f32_s32(vreinterpret_s32_m64(b)),
+ vget_high_f32(vreinterpretq_f32_m128(a))));
}
-// Stores the lower two single-precision floating point values of a to the
-// address p.
+// Convert packed signed 32-bit integers in a to packed single-precision
+// (32-bit) floating-point elements, store the results in the lower 2 elements
+// of dst, then covert the packed signed 32-bit integers in b to
+// single-precision (32-bit) floating-point element, and store the results in
+// the upper 2 elements of dst.
//
-// *p0 := a0
-// *p1 := a1
+// dst[31:0] := Convert_Int32_To_FP32(a[31:0])
+// dst[63:32] := Convert_Int32_To_FP32(a[63:32])
+// dst[95:64] := Convert_Int32_To_FP32(b[31:0])
+// dst[127:96] := Convert_Int32_To_FP32(b[63:32])
//
-// https://msdn.microsoft.com/en-us/library/h54t98ks(v=vs.90).aspx
-FORCE_INLINE void _mm_storel_pi(__m64 *p, __m128 a)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpi32x2_ps
+FORCE_INLINE __m128 _mm_cvtpi32x2_ps(__m64 a, __m64 b)
{
- *p = vreinterpret_m64_f32(vget_low_f32(a));
+ return vreinterpretq_m128_f32(vcvtq_f32_s32(
+ vcombine_s32(vreinterpret_s32_m64(a), vreinterpret_s32_m64(b))));
}
-// Stores the upper two single-precision, floating-point values of a to the
-// address p.
+// Convert the lower packed 8-bit integers in a to packed single-precision
+// (32-bit) floating-point elements, and store the results in dst.
//
-// *p0 := a2
-// *p1 := a3
+// FOR j := 0 to 3
+// i := j*8
+// m := j*32
+// dst[m+31:m] := Convert_Int8_To_FP32(a[i+7:i])
+// ENDFOR
//
-// https://msdn.microsoft.com/en-us/library/a7525fs8(v%3dvs.90).aspx
-FORCE_INLINE void _mm_storeh_pi(__m64 *p, __m128 a)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpi8_ps
+FORCE_INLINE __m128 _mm_cvtpi8_ps(__m64 a)
{
- *p = vreinterpret_m64_f32(vget_high_f32(a));
+ return vreinterpretq_m128_f32(vcvtq_f32_s32(
+ vmovl_s16(vget_low_s16(vmovl_s8(vreinterpret_s8_m64(a))))));
}
-// Loads a single single-precision, floating-point value, copying it into all
-// four words
-// https://msdn.microsoft.com/en-us/library/vstudio/5cdkf716(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_load1_ps(const float *p)
+// Convert packed single-precision (32-bit) floating-point elements in a to
+// packed 16-bit integers, and store the results in dst. Note: this intrinsic
+// will generate 0x7FFF, rather than 0x8000, for input values between 0x7FFF and
+// 0x7FFFFFFF.
+//
+// FOR j := 0 to 3
+// i := 16*j
+// k := 32*j
+// IF a[k+31:k] >= FP32(0x7FFF) && a[k+31:k] <= FP32(0x7FFFFFFF)
+// dst[i+15:i] := 0x7FFF
+// ELSE
+// dst[i+15:i] := Convert_FP32_To_Int16(a[k+31:k])
+// FI
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtps_pi16
+FORCE_INLINE __m64 _mm_cvtps_pi16(__m128 a)
{
- return vreinterpretq_m128_f32(vld1q_dup_f32(p));
+ const __m128 i16Min = _mm_set_ps1((float) INT16_MIN);
+ const __m128 i16Max = _mm_set_ps1((float) INT16_MAX);
+ const __m128 i32Max = _mm_set_ps1((float) INT32_MAX);
+ const __m128i maxMask = _mm_castps_si128(
+ _mm_and_ps(_mm_cmpge_ps(a, i16Max), _mm_cmple_ps(a, i32Max)));
+ const __m128i betweenMask = _mm_castps_si128(
+ _mm_and_ps(_mm_cmpgt_ps(a, i16Min), _mm_cmplt_ps(a, i16Max)));
+ const __m128i minMask = _mm_cmpeq_epi32(_mm_or_si128(maxMask, betweenMask),
+ _mm_setzero_si128());
+ __m128i max = _mm_and_si128(maxMask, _mm_set1_epi32(INT16_MAX));
+ __m128i min = _mm_and_si128(minMask, _mm_set1_epi32(INT16_MIN));
+ __m128i cvt = _mm_and_si128(betweenMask, _mm_cvtps_epi32(a));
+ __m128i res32 = _mm_or_si128(_mm_or_si128(max, min), cvt);
+ return vreinterpret_m64_s16(vmovn_s32(vreinterpretq_s32_m128i(res32)));
}
-// Load a single-precision (32-bit) floating-point element from memory into all
-// elements of dst.
+// Convert packed single-precision (32-bit) floating-point elements in a to
+// packed 32-bit integers, and store the results in dst.
//
-// dst[31:0] := MEM[mem_addr+31:mem_addr]
-// dst[63:32] := MEM[mem_addr+31:mem_addr]
-// dst[95:64] := MEM[mem_addr+31:mem_addr]
-// dst[127:96] := MEM[mem_addr+31:mem_addr]
+// FOR j := 0 to 1
+// i := 32*j
+// dst[i+31:i] := Convert_FP32_To_Int32(a[i+31:i])
+// ENDFOR
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_load_ps1
-#define _mm_load_ps1 _mm_load1_ps
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtps_pi32
+#define _mm_cvtps_pi32(a) _mm_cvt_ps2pi(a)
-// Sets the lower two single-precision, floating-point values with 64
-// bits of data loaded from the address p; the upper two values are passed
-// through from a.
+// Convert packed single-precision (32-bit) floating-point elements in a to
+// packed 8-bit integers, and store the results in lower 4 elements of dst.
+// Note: this intrinsic will generate 0x7F, rather than 0x80, for input values
+// between 0x7F and 0x7FFFFFFF.
//
-// Return Value
-// r0 := *p0
-// r1 := *p1
-// r2 := a2
-// r3 := a3
+// FOR j := 0 to 3
+// i := 8*j
+// k := 32*j
+// IF a[k+31:k] >= FP32(0x7F) && a[k+31:k] <= FP32(0x7FFFFFFF)
+// dst[i+7:i] := 0x7F
+// ELSE
+// dst[i+7:i] := Convert_FP32_To_Int8(a[k+31:k])
+// FI
+// ENDFOR
//
-// https://msdn.microsoft.com/en-us/library/s57cyak2(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_loadl_pi(__m128 a, __m64 const *p)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtps_pi8
+FORCE_INLINE __m64 _mm_cvtps_pi8(__m128 a)
+{
+ const __m128 i8Min = _mm_set_ps1((float) INT8_MIN);
+ const __m128 i8Max = _mm_set_ps1((float) INT8_MAX);
+ const __m128 i32Max = _mm_set_ps1((float) INT32_MAX);
+ const __m128i maxMask = _mm_castps_si128(
+ _mm_and_ps(_mm_cmpge_ps(a, i8Max), _mm_cmple_ps(a, i32Max)));
+ const __m128i betweenMask = _mm_castps_si128(
+ _mm_and_ps(_mm_cmpgt_ps(a, i8Min), _mm_cmplt_ps(a, i8Max)));
+ const __m128i minMask = _mm_cmpeq_epi32(_mm_or_si128(maxMask, betweenMask),
+ _mm_setzero_si128());
+ __m128i max = _mm_and_si128(maxMask, _mm_set1_epi32(INT8_MAX));
+ __m128i min = _mm_and_si128(minMask, _mm_set1_epi32(INT8_MIN));
+ __m128i cvt = _mm_and_si128(betweenMask, _mm_cvtps_epi32(a));
+ __m128i res32 = _mm_or_si128(_mm_or_si128(max, min), cvt);
+ int16x4_t res16 = vmovn_s32(vreinterpretq_s32_m128i(res32));
+ int8x8_t res8 = vmovn_s16(vcombine_s16(res16, res16));
+ uint32_t bitMask[2] = {0xFFFFFFFF, 0};
+ int8x8_t mask = vreinterpret_s8_u32(vld1_u32(bitMask));
+
+ return vreinterpret_m64_s8(vorr_s8(vand_s8(mask, res8), vdup_n_s8(0)));
+}
+
+// Convert packed unsigned 16-bit integers in a to packed single-precision
+// (32-bit) floating-point elements, and store the results in dst.
+//
+// FOR j := 0 to 3
+// i := j*16
+// m := j*32
+// dst[m+31:m] := Convert_UInt16_To_FP32(a[i+15:i])
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpu16_ps
+FORCE_INLINE __m128 _mm_cvtpu16_ps(__m64 a)
{
return vreinterpretq_m128_f32(
- vcombine_f32(vld1_f32((const float32_t *) p), vget_high_f32(a)));
+ vcvtq_f32_u32(vmovl_u16(vreinterpret_u16_m64(a))));
}
-// Load 4 single-precision (32-bit) floating-point elements from memory into dst
-// in reverse order. mem_addr must be aligned on a 16-byte boundary or a
-// general-protection exception may be generated.
+// Convert the lower packed unsigned 8-bit integers in a to packed
+// single-precision (32-bit) floating-point elements, and store the results in
+// dst.
//
-// dst[31:0] := MEM[mem_addr+127:mem_addr+96]
-// dst[63:32] := MEM[mem_addr+95:mem_addr+64]
-// dst[95:64] := MEM[mem_addr+63:mem_addr+32]
-// dst[127:96] := MEM[mem_addr+31:mem_addr]
+// FOR j := 0 to 3
+// i := j*8
+// m := j*32
+// dst[m+31:m] := Convert_UInt8_To_FP32(a[i+7:i])
+// ENDFOR
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadr_ps
-FORCE_INLINE __m128 _mm_loadr_ps(const float *p)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpu8_ps
+FORCE_INLINE __m128 _mm_cvtpu8_ps(__m64 a)
{
- float32x4_t v = vrev64q_f32(vld1q_f32(p));
- return vreinterpretq_m128_f32(vextq_f32(v, v, 2));
+ return vreinterpretq_m128_f32(vcvtq_f32_u32(
+ vmovl_u16(vget_low_u16(vmovl_u8(vreinterpret_u8_m64(a))))));
}
-// Sets the upper two single-precision, floating-point values with 64
-// bits of data loaded from the address p; the lower two values are passed
-// through from a.
+// Convert the signed 32-bit integer b to a single-precision (32-bit)
+// floating-point element, store the result in the lower element of dst, and
+// copy the upper 3 packed elements from a to the upper elements of dst.
//
-// r0 := a0
-// r1 := a1
-// r2 := *p0
-// r3 := *p1
+// dst[31:0] := Convert_Int32_To_FP32(b[31:0])
+// dst[127:32] := a[127:32]
//
-// https://msdn.microsoft.com/en-us/library/w92wta0x(v%3dvs.100).aspx
-FORCE_INLINE __m128 _mm_loadh_pi(__m128 a, __m64 const *p)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi32_ss
+#define _mm_cvtsi32_ss(a, b) _mm_cvt_si2ss(a, b)
+
+// Convert the signed 64-bit integer b to a single-precision (32-bit)
+// floating-point element, store the result in the lower element of dst, and
+// copy the upper 3 packed elements from a to the upper elements of dst.
+//
+// dst[31:0] := Convert_Int64_To_FP32(b[63:0])
+// dst[127:32] := a[127:32]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi64_ss
+FORCE_INLINE __m128 _mm_cvtsi64_ss(__m128 a, int64_t b)
{
return vreinterpretq_m128_f32(
- vcombine_f32(vget_low_f32(a), vld1_f32((const float32_t *) p)));
+ vsetq_lane_f32((float) b, vreinterpretq_f32_m128(a), 0));
}
-// Loads four single-precision, floating-point values.
-// https://msdn.microsoft.com/en-us/library/vstudio/zzd50xxt(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_load_ps(const float *p)
+// Copy the lower single-precision (32-bit) floating-point element of a to dst.
+//
+// dst[31:0] := a[31:0]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtss_f32
+FORCE_INLINE float _mm_cvtss_f32(__m128 a)
{
- return vreinterpretq_m128_f32(vld1q_f32(p));
+ return vgetq_lane_f32(vreinterpretq_f32_m128(a), 0);
}
-// Loads four single-precision, floating-point values.
-// https://msdn.microsoft.com/en-us/library/x1b16s7z%28v=vs.90%29.aspx
-FORCE_INLINE __m128 _mm_loadu_ps(const float *p)
+// Convert the lower single-precision (32-bit) floating-point element in a to a
+// 32-bit integer, and store the result in dst.
+//
+// dst[31:0] := Convert_FP32_To_Int32(a[31:0])
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtss_si32
+#define _mm_cvtss_si32(a) _mm_cvt_ss2si(a)
+
+// Convert the lower single-precision (32-bit) floating-point element in a to a
+// 64-bit integer, and store the result in dst.
+//
+// dst[63:0] := Convert_FP32_To_Int64(a[31:0])
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtss_si64
+FORCE_INLINE int64_t _mm_cvtss_si64(__m128 a)
{
- // for neon, alignment doesn't matter, so _mm_load_ps and _mm_loadu_ps are
- // equivalent for neon
- return vreinterpretq_m128_f32(vld1q_f32(p));
+#if defined(__aarch64__)
+ return (int64_t) vgetq_lane_f32(vrndiq_f32(vreinterpretq_f32_m128(a)), 0);
+#else
+ float32_t data = vgetq_lane_f32(
+ vreinterpretq_f32_m128(_mm_round_ps(a, _MM_FROUND_CUR_DIRECTION)), 0);
+ return (int64_t) data;
+#endif
}
-// Load unaligned 16-bit integer from memory into the first element of dst.
+// Convert packed single-precision (32-bit) floating-point elements in a to
+// packed 32-bit integers with truncation, and store the results in dst.
//
-// dst[15:0] := MEM[mem_addr+15:mem_addr]
-// dst[MAX:16] := 0
+// FOR j := 0 to 1
+// i := 32*j
+// dst[i+31:i] := Convert_FP32_To_Int32_Truncate(a[i+31:i])
+// ENDFOR
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadu_si16
-FORCE_INLINE __m128i _mm_loadu_si16(const void *p)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtt_ps2pi
+FORCE_INLINE __m64 _mm_cvtt_ps2pi(__m128 a)
{
- return vreinterpretq_m128i_s16(
- vsetq_lane_s16(*(const int16_t *) p, vdupq_n_s16(0), 0));
+ return vreinterpret_m64_s32(
+ vget_low_s32(vcvtq_s32_f32(vreinterpretq_f32_m128(a))));
}
-// Load unaligned 64-bit integer from memory into the first element of dst.
+// Convert the lower single-precision (32-bit) floating-point element in a to a
+// 32-bit integer with truncation, and store the result in dst.
//
-// dst[63:0] := MEM[mem_addr+63:mem_addr]
-// dst[MAX:64] := 0
+// dst[31:0] := Convert_FP32_To_Int32_Truncate(a[31:0])
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadu_si64
-FORCE_INLINE __m128i _mm_loadu_si64(const void *p)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtt_ss2si
+FORCE_INLINE int _mm_cvtt_ss2si(__m128 a)
{
- return vreinterpretq_m128i_s64(
- vcombine_s64(vld1_s64((const int64_t *) p), vdup_n_s64(0)));
+ return vgetq_lane_s32(vcvtq_s32_f32(vreinterpretq_f32_m128(a)), 0);
}
-// Load a double-precision (64-bit) floating-point element from memory into the
-// lower of dst, and zero the upper element. mem_addr does not need to be
-// aligned on any particular boundary.
+// Convert packed single-precision (32-bit) floating-point elements in a to
+// packed 32-bit integers with truncation, and store the results in dst.
//
-// dst[63:0] := MEM[mem_addr+63:mem_addr]
-// dst[127:64] := 0
+// FOR j := 0 to 1
+// i := 32*j
+// dst[i+31:i] := Convert_FP32_To_Int32_Truncate(a[i+31:i])
+// ENDFOR
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_load_sd
-FORCE_INLINE __m128d _mm_load_sd(const double *p)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvttps_pi32
+#define _mm_cvttps_pi32(a) _mm_cvtt_ps2pi(a)
+
+// Convert the lower single-precision (32-bit) floating-point element in a to a
+// 32-bit integer with truncation, and store the result in dst.
+//
+// dst[31:0] := Convert_FP32_To_Int32_Truncate(a[31:0])
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvttss_si32
+#define _mm_cvttss_si32(a) _mm_cvtt_ss2si(a)
+
+// Convert the lower single-precision (32-bit) floating-point element in a to a
+// 64-bit integer with truncation, and store the result in dst.
+//
+// dst[63:0] := Convert_FP32_To_Int64_Truncate(a[31:0])
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvttss_si64
+FORCE_INLINE int64_t _mm_cvttss_si64(__m128 a)
{
-#if defined(__aarch64__)
- return vreinterpretq_m128d_f64(vsetq_lane_f64(*p, vdupq_n_f64(0), 0));
-#else
- const float *fp = (const float *) p;
- float ALIGN_STRUCT(16) data[4] = {fp[0], fp[1], 0, 0};
- return vreinterpretq_m128d_f32(vld1q_f32(data));
-#endif
+ return (int64_t) vgetq_lane_f32(vreinterpretq_f32_m128(a), 0);
}
-// Loads two double-precision from 16-byte aligned memory, floating-point
-// values.
+// Divides the four single-precision, floating-point values of a and b.
//
-// dst[127:0] := MEM[mem_addr+127:mem_addr]
+// r0 := a0 / b0
+// r1 := a1 / b1
+// r2 := a2 / b2
+// r3 := a3 / b3
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_load_pd
-FORCE_INLINE __m128d _mm_load_pd(const double *p)
+// https://msdn.microsoft.com/en-us/library/edaw8147(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_div_ps(__m128 a, __m128 b)
{
-#if defined(__aarch64__)
- return vreinterpretq_m128d_f64(vld1q_f64(p));
+#if defined(__aarch64__) && !SSE2NEON_PRECISE_DIV
+ return vreinterpretq_m128_f32(
+ vdivq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
#else
- const float *fp = (const float *) p;
- float ALIGN_STRUCT(16) data[4] = {fp[0], fp[1], fp[2], fp[3]};
- return vreinterpretq_m128d_f32(vld1q_f32(data));
+ float32x4_t recip = vrecpeq_f32(vreinterpretq_f32_m128(b));
+ recip = vmulq_f32(recip, vrecpsq_f32(recip, vreinterpretq_f32_m128(b)));
+#if SSE2NEON_PRECISE_DIV
+ // Additional Netwon-Raphson iteration for accuracy
+ recip = vmulq_f32(recip, vrecpsq_f32(recip, vreinterpretq_f32_m128(b)));
+#endif
+ return vreinterpretq_m128_f32(vmulq_f32(vreinterpretq_f32_m128(a), recip));
#endif
}
-// Loads two double-precision from unaligned memory, floating-point values.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadu_pd
-FORCE_INLINE __m128d _mm_loadu_pd(const double *p)
+// Divides the scalar single-precision floating point value of a by b.
+// https://msdn.microsoft.com/en-us/library/4y73xa49(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_div_ss(__m128 a, __m128 b)
{
- return _mm_load_pd(p);
+ float32_t value =
+ vgetq_lane_f32(vreinterpretq_f32_m128(_mm_div_ps(a, b)), 0);
+ return vreinterpretq_m128_f32(
+ vsetq_lane_f32(value, vreinterpretq_f32_m128(a), 0));
}
-// Loads an single - precision, floating - point value into the low word and
-// clears the upper three words.
-// https://msdn.microsoft.com/en-us/library/548bb9h4%28v=vs.90%29.aspx
-FORCE_INLINE __m128 _mm_load_ss(const float *p)
-{
- return vreinterpretq_m128_f32(vsetq_lane_f32(*p, vdupq_n_f32(0), 0));
-}
+// Extract a 16-bit integer from a, selected with imm8, and store the result in
+// the lower element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_extract_pi16
+#define _mm_extract_pi16(a, imm) \
+ (int32_t) vget_lane_u16(vreinterpret_u16_m64(a), (imm))
-// Load 64-bit integer from memory into the first element of dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadl_epi64
-FORCE_INLINE __m128i _mm_loadl_epi64(__m128i const *p)
+// Free aligned memory that was allocated with _mm_malloc.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_free
+FORCE_INLINE void _mm_free(void *addr)
{
- /* Load the lower 64 bits of the value pointed to by p into the
- * lower 64 bits of the result, zeroing the upper 64 bits of the result.
- */
- return vreinterpretq_m128i_s32(
- vcombine_s32(vld1_s32((int32_t const *) p), vcreate_s32(0)));
+ free(addr);
}
-// Load a double-precision (64-bit) floating-point element from memory into the
-// lower element of dst, and copy the upper element from a to dst. mem_addr does
-// not need to be aligned on any particular boundary.
-//
-// dst[63:0] := MEM[mem_addr+63:mem_addr]
-// dst[127:64] := a[127:64]
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadl_pd
-FORCE_INLINE __m128d _mm_loadl_pd(__m128d a, const double *p)
+// Macro: Get the flush zero bits from the MXCSR control and status register.
+// The flush zero may contain any of the following flags: _MM_FLUSH_ZERO_ON or
+// _MM_FLUSH_ZERO_OFF
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_MM_GET_FLUSH_ZERO_MODE
+FORCE_INLINE unsigned int _sse2neon_mm_get_flush_zero_mode()
{
+ union {
+ fpcr_bitfield field;
#if defined(__aarch64__)
- return vreinterpretq_m128d_f64(
- vcombine_f64(vld1_f64(p), vget_high_f64(vreinterpretq_f64_m128d(a))));
+ uint64_t value;
#else
- return vreinterpretq_m128d_f32(
- vcombine_f32(vld1_f32((const float *) p),
- vget_high_f32(vreinterpretq_f32_m128d(a))));
+ uint32_t value;
#endif
-}
+ } r;
-// Load 2 double-precision (64-bit) floating-point elements from memory into dst
-// in reverse order. mem_addr must be aligned on a 16-byte boundary or a
-// general-protection exception may be generated.
-//
-// dst[63:0] := MEM[mem_addr+127:mem_addr+64]
-// dst[127:64] := MEM[mem_addr+63:mem_addr]
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadr_pd
-FORCE_INLINE __m128d _mm_loadr_pd(const double *p)
-{
#if defined(__aarch64__)
- float64x2_t v = vld1q_f64(p);
- return vreinterpretq_m128d_f64(vextq_f64(v, v, 1));
+ asm volatile("mrs %0, FPCR" : "=r"(r.value)); /* read */
#else
- int64x2_t v = vld1q_s64((const int64_t *) p);
- return vreinterpretq_m128d_s64(vextq_s64(v, v, 1));
+ asm volatile("vmrs %0, FPSCR" : "=r"(r.value)); /* read */
#endif
+
+ return r.field.bit24 ? _MM_FLUSH_ZERO_ON : _MM_FLUSH_ZERO_OFF;
}
-// Sets the low word to the single-precision, floating-point value of b
-// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/35hdzazd(v=vs.100)
-FORCE_INLINE __m128 _mm_move_ss(__m128 a, __m128 b)
+// Macro: Get the rounding mode bits from the MXCSR control and status register.
+// The rounding mode may contain any of the following flags: _MM_ROUND_NEAREST,
+// _MM_ROUND_DOWN, _MM_ROUND_UP, _MM_ROUND_TOWARD_ZERO
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_MM_GET_ROUNDING_MODE
+FORCE_INLINE unsigned int _MM_GET_ROUNDING_MODE()
{
- return vreinterpretq_m128_f32(
- vsetq_lane_f32(vgetq_lane_f32(vreinterpretq_f32_m128(b), 0),
- vreinterpretq_f32_m128(a), 0));
+ union {
+ fpcr_bitfield field;
+#if defined(__aarch64__)
+ uint64_t value;
+#else
+ uint32_t value;
+#endif
+ } r;
+
+#if defined(__aarch64__)
+ asm volatile("mrs %0, FPCR" : "=r"(r.value)); /* read */
+#else
+ asm volatile("vmrs %0, FPSCR" : "=r"(r.value)); /* read */
+#endif
+
+ if (r.field.bit22) {
+ return r.field.bit23 ? _MM_ROUND_TOWARD_ZERO : _MM_ROUND_UP;
+ } else {
+ return r.field.bit23 ? _MM_ROUND_DOWN : _MM_ROUND_NEAREST;
+ }
}
-// Move the lower double-precision (64-bit) floating-point element from b to the
-// lower element of dst, and copy the upper element from a to the upper element
-// of dst.
-//
-// dst[63:0] := b[63:0]
-// dst[127:64] := a[127:64]
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_move_sd
-FORCE_INLINE __m128d _mm_move_sd(__m128d a, __m128d b)
+// Copy a to dst, and insert the 16-bit integer i into dst at the location
+// specified by imm8.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_insert_pi16
+#define _mm_insert_pi16(a, b, imm) \
+ __extension__({ \
+ vreinterpret_m64_s16( \
+ vset_lane_s16((b), vreinterpret_s16_m64(a), (imm))); \
+ })
+
+// Loads four single-precision, floating-point values.
+// https://msdn.microsoft.com/en-us/library/vstudio/zzd50xxt(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_load_ps(const float *p)
{
- return vreinterpretq_m128d_f32(
- vcombine_f32(vget_low_f32(vreinterpretq_f32_m128d(b)),
- vget_high_f32(vreinterpretq_f32_m128d(a))));
+ return vreinterpretq_m128_f32(vld1q_f32(p));
}
-// Copy the lower 64-bit integer in a to the lower element of dst, and zero the
-// upper element.
+// Load a single-precision (32-bit) floating-point element from memory into all
+// elements of dst.
//
-// dst[63:0] := a[63:0]
-// dst[127:64] := 0
+// dst[31:0] := MEM[mem_addr+31:mem_addr]
+// dst[63:32] := MEM[mem_addr+31:mem_addr]
+// dst[95:64] := MEM[mem_addr+31:mem_addr]
+// dst[127:96] := MEM[mem_addr+31:mem_addr]
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_move_epi64
-FORCE_INLINE __m128i _mm_move_epi64(__m128i a)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_load_ps1
+#define _mm_load_ps1 _mm_load1_ps
+
+// Loads an single - precision, floating - point value into the low word and
+// clears the upper three words.
+// https://msdn.microsoft.com/en-us/library/548bb9h4%28v=vs.90%29.aspx
+FORCE_INLINE __m128 _mm_load_ss(const float *p)
{
- return vreinterpretq_m128i_s64(
- vsetq_lane_s64(0, vreinterpretq_s64_m128i(a), 1));
+ return vreinterpretq_m128_f32(vsetq_lane_f32(*p, vdupq_n_f32(0), 0));
}
-// Return vector of type __m128 with undefined elements.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_undefined_ps
-FORCE_INLINE __m128 _mm_undefined_ps(void)
+// Loads a single single-precision, floating-point value, copying it into all
+// four words
+// https://msdn.microsoft.com/en-us/library/vstudio/5cdkf716(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_load1_ps(const float *p)
{
-#if defined(__GNUC__) || defined(__clang__)
-#pragma GCC diagnostic push
-#pragma GCC diagnostic ignored "-Wuninitialized"
-#endif
- __m128 a;
- return a;
-#if defined(__GNUC__) || defined(__clang__)
-#pragma GCC diagnostic pop
-#endif
+ return vreinterpretq_m128_f32(vld1q_dup_f32(p));
}
-/* Logic/Binary operations */
-
-// Computes the bitwise AND-NOT of the four single-precision, floating-point
-// values of a and b.
+// Sets the upper two single-precision, floating-point values with 64
+// bits of data loaded from the address p; the lower two values are passed
+// through from a.
//
-// r0 := ~a0 & b0
-// r1 := ~a1 & b1
-// r2 := ~a2 & b2
-// r3 := ~a3 & b3
+// r0 := a0
+// r1 := a1
+// r2 := *p0
+// r3 := *p1
//
-// https://msdn.microsoft.com/en-us/library/vstudio/68h7wd02(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_andnot_ps(__m128 a, __m128 b)
+// https://msdn.microsoft.com/en-us/library/w92wta0x(v%3dvs.100).aspx
+FORCE_INLINE __m128 _mm_loadh_pi(__m128 a, __m64 const *p)
{
- return vreinterpretq_m128_s32(
- vbicq_s32(vreinterpretq_s32_m128(b),
- vreinterpretq_s32_m128(a))); // *NOTE* argument swap
+ return vreinterpretq_m128_f32(
+ vcombine_f32(vget_low_f32(a), vld1_f32((const float32_t *) p)));
}
-// Compute the bitwise NOT of packed double-precision (64-bit) floating-point
-// elements in a and then AND with b, and store the results in dst.
+// Sets the lower two single-precision, floating-point values with 64
+// bits of data loaded from the address p; the upper two values are passed
+// through from a.
//
-// FOR j := 0 to 1
-// i := j*64
-// dst[i+63:i] := ((NOT a[i+63:i]) AND b[i+63:i])
-// ENDFOR
+// Return Value
+// r0 := *p0
+// r1 := *p1
+// r2 := a2
+// r3 := a3
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_andnot_pd
-FORCE_INLINE __m128d _mm_andnot_pd(__m128d a, __m128d b)
+// https://msdn.microsoft.com/en-us/library/s57cyak2(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_loadl_pi(__m128 a, __m64 const *p)
{
- // *NOTE* argument swap
- return vreinterpretq_m128d_s64(
- vbicq_s64(vreinterpretq_s64_m128d(b), vreinterpretq_s64_m128d(a)));
+ return vreinterpretq_m128_f32(
+ vcombine_f32(vld1_f32((const float32_t *) p), vget_high_f32(a)));
}
-// Computes the bitwise AND of the 128-bit value in b and the bitwise NOT of the
-// 128-bit value in a.
+// Load 4 single-precision (32-bit) floating-point elements from memory into dst
+// in reverse order. mem_addr must be aligned on a 16-byte boundary or a
+// general-protection exception may be generated.
//
-// r := (~a) & b
+// dst[31:0] := MEM[mem_addr+127:mem_addr+96]
+// dst[63:32] := MEM[mem_addr+95:mem_addr+64]
+// dst[95:64] := MEM[mem_addr+63:mem_addr+32]
+// dst[127:96] := MEM[mem_addr+31:mem_addr]
//
-// https://msdn.microsoft.com/en-us/library/vstudio/1beaceh8(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_andnot_si128(__m128i a, __m128i b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadr_ps
+FORCE_INLINE __m128 _mm_loadr_ps(const float *p)
{
- return vreinterpretq_m128i_s32(
- vbicq_s32(vreinterpretq_s32_m128i(b),
- vreinterpretq_s32_m128i(a))); // *NOTE* argument swap
+ float32x4_t v = vrev64q_f32(vld1q_f32(p));
+ return vreinterpretq_m128_f32(vextq_f32(v, v, 2));
}
-// Computes the bitwise AND of the 128-bit value in a and the 128-bit value in
-// b.
-//
-// r := a & b
-//
-// https://msdn.microsoft.com/en-us/library/vstudio/6d1txsa8(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_and_si128(__m128i a, __m128i b)
+// Loads four single-precision, floating-point values.
+// https://msdn.microsoft.com/en-us/library/x1b16s7z%28v=vs.90%29.aspx
+FORCE_INLINE __m128 _mm_loadu_ps(const float *p)
{
- return vreinterpretq_m128i_s32(
- vandq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
+ // for neon, alignment doesn't matter, so _mm_load_ps and _mm_loadu_ps are
+ // equivalent for neon
+ return vreinterpretq_m128_f32(vld1q_f32(p));
}
-// Computes the bitwise AND of the four single-precision, floating-point values
-// of a and b.
+// Load unaligned 16-bit integer from memory into the first element of dst.
//
-// r0 := a0 & b0
-// r1 := a1 & b1
-// r2 := a2 & b2
-// r3 := a3 & b3
+// dst[15:0] := MEM[mem_addr+15:mem_addr]
+// dst[MAX:16] := 0
//
-// https://msdn.microsoft.com/en-us/library/vstudio/73ck1xc5(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_and_ps(__m128 a, __m128 b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadu_si16
+FORCE_INLINE __m128i _mm_loadu_si16(const void *p)
{
- return vreinterpretq_m128_s32(
- vandq_s32(vreinterpretq_s32_m128(a), vreinterpretq_s32_m128(b)));
+ return vreinterpretq_m128i_s16(
+ vsetq_lane_s16(*(const int16_t *) p, vdupq_n_s16(0), 0));
}
-// Compute the bitwise AND of packed double-precision (64-bit) floating-point
-// elements in a and b, and store the results in dst.
+// Load unaligned 64-bit integer from memory into the first element of dst.
//
-// FOR j := 0 to 1
-// i := j*64
-// dst[i+63:i] := a[i+63:i] AND b[i+63:i]
-// ENDFOR
+// dst[63:0] := MEM[mem_addr+63:mem_addr]
+// dst[MAX:64] := 0
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_and_pd
-FORCE_INLINE __m128d _mm_and_pd(__m128d a, __m128d b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadu_si64
+FORCE_INLINE __m128i _mm_loadu_si64(const void *p)
{
- return vreinterpretq_m128d_s64(
- vandq_s64(vreinterpretq_s64_m128d(a), vreinterpretq_s64_m128d(b)));
+ return vreinterpretq_m128i_s64(
+ vcombine_s64(vld1_s64((const int64_t *) p), vdup_n_s64(0)));
}
-// Computes the bitwise OR of the four single-precision, floating-point values
-// of a and b.
-// https://msdn.microsoft.com/en-us/library/vstudio/7ctdsyy0(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_or_ps(__m128 a, __m128 b)
+// Allocate aligned blocks of memory.
+// https://software.intel.com/en-us/
+// cpp-compiler-developer-guide-and-reference-allocating-and-freeing-aligned-memory-blocks
+FORCE_INLINE void *_mm_malloc(size_t size, size_t align)
{
- return vreinterpretq_m128_s32(
- vorrq_s32(vreinterpretq_s32_m128(a), vreinterpretq_s32_m128(b)));
+ void *ptr;
+ if (align == 1)
+ return malloc(size);
+ if (align == 2 || (sizeof(void *) == 8 && align == 4))
+ align = sizeof(void *);
+ if (!posix_memalign(&ptr, align, size))
+ return ptr;
+ return NULL;
}
-// Computes bitwise EXOR (exclusive-or) of the four single-precision,
-// floating-point values of a and b.
-// https://msdn.microsoft.com/en-us/library/ss6k3wk8(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_xor_ps(__m128 a, __m128 b)
+// Conditionally store 8-bit integer elements from a into memory using mask
+// (elements are not stored when the highest bit is not set in the corresponding
+// element) and a non-temporal memory hint.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_maskmove_si64
+FORCE_INLINE void _mm_maskmove_si64(__m64 a, __m64 mask, char *mem_addr)
{
- return vreinterpretq_m128_s32(
- veorq_s32(vreinterpretq_s32_m128(a), vreinterpretq_s32_m128(b)));
+ int8x8_t shr_mask = vshr_n_s8(vreinterpret_s8_m64(mask), 7);
+ __m128 b = _mm_load_ps((const float *) mem_addr);
+ int8x8_t masked =
+ vbsl_s8(vreinterpret_u8_s8(shr_mask), vreinterpret_s8_m64(a),
+ vreinterpret_s8_u64(vget_low_u64(vreinterpretq_u64_m128(b))));
+ vst1_s8((int8_t *) mem_addr, masked);
}
-// Compute the bitwise XOR of packed double-precision (64-bit) floating-point
-// elements in a and b, and store the results in dst.
+// Conditionally store 8-bit integer elements from a into memory using mask
+// (elements are not stored when the highest bit is not set in the corresponding
+// element) and a non-temporal memory hint.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_maskmovq
+#define _m_maskmovq(a, mask, mem_addr) _mm_maskmove_si64(a, mask, mem_addr)
+
+// Compare packed signed 16-bit integers in a and b, and store packed maximum
+// values in dst.
//
-// FOR j := 0 to 1
-// i := j*64
-// dst[i+63:i] := a[i+63:i] XOR b[i+63:i]
+// FOR j := 0 to 3
+// i := j*16
+// dst[i+15:i] := MAX(a[i+15:i], b[i+15:i])
// ENDFOR
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_xor_pd
-FORCE_INLINE __m128d _mm_xor_pd(__m128d a, __m128d b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_pi16
+FORCE_INLINE __m64 _mm_max_pi16(__m64 a, __m64 b)
{
- return vreinterpretq_m128d_s64(
- veorq_s64(vreinterpretq_s64_m128d(a), vreinterpretq_s64_m128d(b)));
+ return vreinterpret_m64_s16(
+ vmax_s16(vreinterpret_s16_m64(a), vreinterpret_s16_m64(b)));
}
-// Compute the bitwise OR of packed double-precision (64-bit) floating-point
-// elements in a and b, and store the results in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=mm_or_pd
-FORCE_INLINE __m128d _mm_or_pd(__m128d a, __m128d b)
+// Computes the maximums of the four single-precision, floating-point values of
+// a and b.
+// https://msdn.microsoft.com/en-us/library/vstudio/ff5d607a(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_max_ps(__m128 a, __m128 b)
{
- return vreinterpretq_m128d_s64(
- vorrq_s64(vreinterpretq_s64_m128d(a), vreinterpretq_s64_m128d(b)));
+#if SSE2NEON_PRECISE_MINMAX
+ float32x4_t _a = vreinterpretq_f32_m128(a);
+ float32x4_t _b = vreinterpretq_f32_m128(b);
+ return vbslq_f32(vcltq_f32(_b, _a), _a, _b);
+#else
+ return vreinterpretq_m128_f32(
+ vmaxq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
+#endif
}
-// Computes the bitwise OR of the 128-bit value in a and the 128-bit value in b.
+// Compare packed unsigned 8-bit integers in a and b, and store packed maximum
+// values in dst.
//
-// r := a | b
+// FOR j := 0 to 7
+// i := j*8
+// dst[i+7:i] := MAX(a[i+7:i], b[i+7:i])
+// ENDFOR
//
-// https://msdn.microsoft.com/en-us/library/vstudio/ew8ty0db(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_or_si128(__m128i a, __m128i b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_pu8
+FORCE_INLINE __m64 _mm_max_pu8(__m64 a, __m64 b)
{
- return vreinterpretq_m128i_s32(
- vorrq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
+ return vreinterpret_m64_u8(
+ vmax_u8(vreinterpret_u8_m64(a), vreinterpret_u8_m64(b)));
}
-// Computes the bitwise XOR of the 128-bit value in a and the 128-bit value in
-// b. https://msdn.microsoft.com/en-us/library/fzt08www(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_xor_si128(__m128i a, __m128i b)
+// Computes the maximum of the two lower scalar single-precision floating point
+// values of a and b.
+// https://msdn.microsoft.com/en-us/library/s6db5esz(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_max_ss(__m128 a, __m128 b)
{
- return vreinterpretq_m128i_s32(
- veorq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
+ float32_t value = vgetq_lane_f32(_mm_max_ps(a, b), 0);
+ return vreinterpretq_m128_f32(
+ vsetq_lane_f32(value, vreinterpretq_f32_m128(a), 0));
}
-// Duplicate the low double-precision (64-bit) floating-point element from a,
-// and store the results in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_movedup_pd
-FORCE_INLINE __m128d _mm_movedup_pd(__m128d a)
+// Compare packed signed 16-bit integers in a and b, and store packed minimum
+// values in dst.
+//
+// FOR j := 0 to 3
+// i := j*16
+// dst[i+15:i] := MIN(a[i+15:i], b[i+15:i])
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_min_pi16
+FORCE_INLINE __m64 _mm_min_pi16(__m64 a, __m64 b)
{
-#if (__aarch64__)
- return vreinterpretq_m128d_f64(
- vdupq_laneq_f64(vreinterpretq_f64_m128d(a), 0));
-#else
- return vreinterpretq_m128d_u64(
- vdupq_n_u64(vgetq_lane_u64(vreinterpretq_u64_m128d(a), 0)));
-#endif
+ return vreinterpret_m64_s16(
+ vmin_s16(vreinterpret_s16_m64(a), vreinterpret_s16_m64(b)));
}
-// Duplicate odd-indexed single-precision (32-bit) floating-point elements
-// from a, and store the results in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_movehdup_ps
-FORCE_INLINE __m128 _mm_movehdup_ps(__m128 a)
+// Computes the minima of the four single-precision, floating-point values of a
+// and b.
+// https://msdn.microsoft.com/en-us/library/vstudio/wh13kadz(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_min_ps(__m128 a, __m128 b)
{
-#if __has_builtin(__builtin_shufflevector)
- return vreinterpretq_m128_f32(__builtin_shufflevector(
- vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a), 1, 1, 3, 3));
+#if SSE2NEON_PRECISE_MINMAX
+ float32x4_t _a = vreinterpretq_f32_m128(a);
+ float32x4_t _b = vreinterpretq_f32_m128(b);
+ return vbslq_f32(vcltq_f32(_a, _b), _a, _b);
#else
- float32_t a1 = vgetq_lane_f32(vreinterpretq_f32_m128(a), 1);
- float32_t a3 = vgetq_lane_f32(vreinterpretq_f32_m128(a), 3);
- float ALIGN_STRUCT(16) data[4] = {a1, a1, a3, a3};
- return vreinterpretq_m128_f32(vld1q_f32(data));
+ return vreinterpretq_m128_f32(
+ vminq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
#endif
}
-// Duplicate even-indexed single-precision (32-bit) floating-point elements
-// from a, and store the results in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_moveldup_ps
-FORCE_INLINE __m128 _mm_moveldup_ps(__m128 a)
+// Compare packed unsigned 8-bit integers in a and b, and store packed minimum
+// values in dst.
+//
+// FOR j := 0 to 7
+// i := j*8
+// dst[i+7:i] := MIN(a[i+7:i], b[i+7:i])
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_min_pu8
+FORCE_INLINE __m64 _mm_min_pu8(__m64 a, __m64 b)
{
-#if __has_builtin(__builtin_shufflevector)
- return vreinterpretq_m128_f32(__builtin_shufflevector(
- vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a), 0, 0, 2, 2));
-#else
- float32_t a0 = vgetq_lane_f32(vreinterpretq_f32_m128(a), 0);
- float32_t a2 = vgetq_lane_f32(vreinterpretq_f32_m128(a), 2);
- float ALIGN_STRUCT(16) data[4] = {a0, a0, a2, a2};
- return vreinterpretq_m128_f32(vld1q_f32(data));
-#endif
+ return vreinterpret_m64_u8(
+ vmin_u8(vreinterpret_u8_m64(a), vreinterpret_u8_m64(b)));
+}
+
+// Computes the minimum of the two lower scalar single-precision floating point
+// values of a and b.
+// https://msdn.microsoft.com/en-us/library/0a9y7xaa(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_min_ss(__m128 a, __m128 b)
+{
+ float32_t value = vgetq_lane_f32(_mm_min_ps(a, b), 0);
+ return vreinterpretq_m128_f32(
+ vsetq_lane_f32(value, vreinterpretq_f32_m128(a), 0));
+}
+
+// Sets the low word to the single-precision, floating-point value of b
+// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/35hdzazd(v=vs.100)
+FORCE_INLINE __m128 _mm_move_ss(__m128 a, __m128 b)
+{
+ return vreinterpretq_m128_f32(
+ vsetq_lane_f32(vgetq_lane_f32(vreinterpretq_f32_m128(b), 0),
+ vreinterpretq_f32_m128(a), 0));
}
// Moves the upper two values of B into the lower two values of A.
@@ -1577,315 +2093,419 @@ FORCE_INLINE __m128 _mm_movelh_ps(__m128 __A, __m128 __B)
return vreinterpretq_m128_f32(vcombine_f32(a10, b10));
}
-// Compute the absolute value of packed signed 32-bit integers in a, and store
-// the unsigned results in dst.
-//
-// FOR j := 0 to 3
-// i := j*32
-// dst[i+31:i] := ABS(a[i+31:i])
-// ENDFOR
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_abs_epi32
-FORCE_INLINE __m128i _mm_abs_epi32(__m128i a)
+// Create mask from the most significant bit of each 8-bit element in a, and
+// store the result in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_movemask_pi8
+FORCE_INLINE int _mm_movemask_pi8(__m64 a)
{
- return vreinterpretq_m128i_s32(vabsq_s32(vreinterpretq_s32_m128i(a)));
+ uint8x8_t input = vreinterpret_u8_m64(a);
+#if defined(__aarch64__)
+ static const int8x8_t shift = {0, 1, 2, 3, 4, 5, 6, 7};
+ uint8x8_t tmp = vshr_n_u8(input, 7);
+ return vaddv_u8(vshl_u8(tmp, shift));
+#else
+ // Refer the implementation of `_mm_movemask_epi8`
+ uint16x4_t high_bits = vreinterpret_u16_u8(vshr_n_u8(input, 7));
+ uint32x2_t paired16 =
+ vreinterpret_u32_u16(vsra_n_u16(high_bits, high_bits, 7));
+ uint8x8_t paired32 =
+ vreinterpret_u8_u32(vsra_n_u32(paired16, paired16, 14));
+ return vget_lane_u8(paired32, 0) | ((int) vget_lane_u8(paired32, 4) << 4);
+#endif
}
-// Compute the absolute value of packed signed 16-bit integers in a, and store
-// the unsigned results in dst.
-//
-// FOR j := 0 to 7
-// i := j*16
-// dst[i+15:i] := ABS(a[i+15:i])
-// ENDFOR
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_abs_epi16
-FORCE_INLINE __m128i _mm_abs_epi16(__m128i a)
+// NEON does not provide this method
+// Creates a 4-bit mask from the most significant bits of the four
+// single-precision, floating-point values.
+// https://msdn.microsoft.com/en-us/library/vstudio/4490ys29(v=vs.100).aspx
+FORCE_INLINE int _mm_movemask_ps(__m128 a)
{
- return vreinterpretq_m128i_s16(vabsq_s16(vreinterpretq_s16_m128i(a)));
+ uint32x4_t input = vreinterpretq_u32_m128(a);
+#if defined(__aarch64__)
+ static const int32x4_t shift = {0, 1, 2, 3};
+ uint32x4_t tmp = vshrq_n_u32(input, 31);
+ return vaddvq_u32(vshlq_u32(tmp, shift));
+#else
+ // Uses the exact same method as _mm_movemask_epi8, see that for details.
+ // Shift out everything but the sign bits with a 32-bit unsigned shift
+ // right.
+ uint64x2_t high_bits = vreinterpretq_u64_u32(vshrq_n_u32(input, 31));
+ // Merge the two pairs together with a 64-bit unsigned shift right + add.
+ uint8x16_t paired =
+ vreinterpretq_u8_u64(vsraq_n_u64(high_bits, high_bits, 31));
+ // Extract the result.
+ return vgetq_lane_u8(paired, 0) | (vgetq_lane_u8(paired, 8) << 2);
+#endif
}
-// Compute the absolute value of packed signed 8-bit integers in a, and store
-// the unsigned results in dst.
+// Multiplies the four single-precision, floating-point values of a and b.
//
-// FOR j := 0 to 15
-// i := j*8
-// dst[i+7:i] := ABS(a[i+7:i])
-// ENDFOR
+// r0 := a0 * b0
+// r1 := a1 * b1
+// r2 := a2 * b2
+// r3 := a3 * b3
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_abs_epi8
-FORCE_INLINE __m128i _mm_abs_epi8(__m128i a)
+// https://msdn.microsoft.com/en-us/library/vstudio/22kbk6t9(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_mul_ps(__m128 a, __m128 b)
{
- return vreinterpretq_m128i_s8(vabsq_s8(vreinterpretq_s8_m128i(a)));
+ return vreinterpretq_m128_f32(
+ vmulq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
}
-// Compute the absolute value of packed signed 32-bit integers in a, and store
-// the unsigned results in dst.
+// Multiply the lower single-precision (32-bit) floating-point element in a and
+// b, store the result in the lower element of dst, and copy the upper 3 packed
+// elements from a to the upper elements of dst.
//
-// FOR j := 0 to 1
-// i := j*32
-// dst[i+31:i] := ABS(a[i+31:i])
-// ENDFOR
+// dst[31:0] := a[31:0] * b[31:0]
+// dst[127:32] := a[127:32]
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_abs_pi32
-FORCE_INLINE __m64 _mm_abs_pi32(__m64 a)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_mul_ss
+FORCE_INLINE __m128 _mm_mul_ss(__m128 a, __m128 b)
{
- return vreinterpret_m64_s32(vabs_s32(vreinterpret_s32_m64(a)));
+ return _mm_move_ss(a, _mm_mul_ps(a, b));
}
-// Compute the absolute value of packed signed 16-bit integers in a, and store
-// the unsigned results in dst.
-//
-// FOR j := 0 to 3
-// i := j*16
-// dst[i+15:i] := ABS(a[i+15:i])
-// ENDFOR
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_abs_pi16
-FORCE_INLINE __m64 _mm_abs_pi16(__m64 a)
+// Multiply the packed unsigned 16-bit integers in a and b, producing
+// intermediate 32-bit integers, and store the high 16 bits of the intermediate
+// integers in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_mulhi_pu16
+FORCE_INLINE __m64 _mm_mulhi_pu16(__m64 a, __m64 b)
{
- return vreinterpret_m64_s16(vabs_s16(vreinterpret_s16_m64(a)));
+ return vreinterpret_m64_u16(vshrn_n_u32(
+ vmull_u16(vreinterpret_u16_m64(a), vreinterpret_u16_m64(b)), 16));
}
-// Compute the absolute value of packed signed 8-bit integers in a, and store
-// the unsigned results in dst.
+// Computes the bitwise OR of the four single-precision, floating-point values
+// of a and b.
+// https://msdn.microsoft.com/en-us/library/vstudio/7ctdsyy0(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_or_ps(__m128 a, __m128 b)
+{
+ return vreinterpretq_m128_s32(
+ vorrq_s32(vreinterpretq_s32_m128(a), vreinterpretq_s32_m128(b)));
+}
+
+// Average packed unsigned 8-bit integers in a and b, and store the results in
+// dst.
//
// FOR j := 0 to 7
// i := j*8
-// dst[i+7:i] := ABS(a[i+7:i])
+// dst[i+7:i] := (a[i+7:i] + b[i+7:i] + 1) >> 1
// ENDFOR
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_abs_pi8
-FORCE_INLINE __m64 _mm_abs_pi8(__m64 a)
-{
- return vreinterpret_m64_s8(vabs_s8(vreinterpret_s8_m64(a)));
-}
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pavgb
+#define _m_pavgb(a, b) _mm_avg_pu8(a, b)
-// Concatenate 16-byte blocks in a and b into a 32-byte temporary result, shift
-// the result right by imm8 bytes, and store the low 16 bytes in dst.
+// Average packed unsigned 16-bit integers in a and b, and store the results in
+// dst.
//
-// tmp[255:0] := ((a[127:0] << 128)[255:0] OR b[127:0]) >> (imm8*8)
-// dst[127:0] := tmp[127:0]
+// FOR j := 0 to 3
+// i := j*16
+// dst[i+15:i] := (a[i+15:i] + b[i+15:i] + 1) >> 1
+// ENDFOR
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_alignr_epi8
-#define _mm_alignr_epi8(a, b, imm) \
- __extension__({ \
- __m128i ret; \
- if (unlikely((imm) >= 32)) { \
- ret = _mm_setzero_si128(); \
- } else { \
- uint8x16_t tmp_low, tmp_high; \
- if (imm >= 16) { \
- const int idx = imm - 16; \
- tmp_low = vreinterpretq_u8_m128i(a); \
- tmp_high = vdupq_n_u8(0); \
- ret = \
- vreinterpretq_m128i_u8(vextq_u8(tmp_low, tmp_high, idx)); \
- } else { \
- const int idx = imm; \
- tmp_low = vreinterpretq_u8_m128i(b); \
- tmp_high = vreinterpretq_u8_m128i(a); \
- ret = \
- vreinterpretq_m128i_u8(vextq_u8(tmp_low, tmp_high, idx)); \
- } \
- } \
- ret; \
- })
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pavgw
+#define _m_pavgw(a, b) _mm_avg_pu16(a, b)
-// Concatenate 8-byte blocks in a and b into a 16-byte temporary result, shift
-// the result right by imm8 bytes, and store the low 8 bytes in dst.
-//
-// tmp[127:0] := ((a[63:0] << 64)[127:0] OR b[63:0]) >> (imm8*8)
-// dst[63:0] := tmp[63:0]
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_alignr_pi8
-#define _mm_alignr_pi8(a, b, imm) \
- __extension__({ \
- __m64 ret; \
- if (unlikely((imm) >= 16)) { \
- ret = vreinterpret_m64_s8(vdup_n_s8(0)); \
- } else { \
- uint8x8_t tmp_low, tmp_high; \
- if (imm >= 8) { \
- const int idx = imm - 8; \
- tmp_low = vreinterpret_u8_m64(a); \
- tmp_high = vdup_n_u8(0); \
- ret = vreinterpret_m64_u8(vext_u8(tmp_low, tmp_high, idx)); \
- } else { \
- const int idx = imm; \
- tmp_low = vreinterpret_u8_m64(b); \
- tmp_high = vreinterpret_u8_m64(a); \
- ret = vreinterpret_m64_u8(vext_u8(tmp_low, tmp_high, idx)); \
- } \
- } \
- ret; \
- })
+// Extract a 16-bit integer from a, selected with imm8, and store the result in
+// the lower element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pextrw
+#define _m_pextrw(a, imm) _mm_extract_pi16(a, imm)
-// Takes the upper 64 bits of a and places it in the low end of the result
-// Takes the lower 64 bits of b and places it into the high end of the result.
-FORCE_INLINE __m128 _mm_shuffle_ps_1032(__m128 a, __m128 b)
-{
- float32x2_t a32 = vget_high_f32(vreinterpretq_f32_m128(a));
- float32x2_t b10 = vget_low_f32(vreinterpretq_f32_m128(b));
- return vreinterpretq_m128_f32(vcombine_f32(a32, b10));
-}
+// Copy a to dst, and insert the 16-bit integer i into dst at the location
+// specified by imm8.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=m_pinsrw
+#define _m_pinsrw(a, i, imm) _mm_insert_pi16(a, i, imm)
-// takes the lower two 32-bit values from a and swaps them and places in high
-// end of result takes the higher two 32 bit values from b and swaps them and
-// places in low end of result.
-FORCE_INLINE __m128 _mm_shuffle_ps_2301(__m128 a, __m128 b)
+// Compare packed signed 16-bit integers in a and b, and store packed maximum
+// values in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pmaxsw
+#define _m_pmaxsw(a, b) _mm_max_pi16(a, b)
+
+// Compare packed unsigned 8-bit integers in a and b, and store packed maximum
+// values in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pmaxub
+#define _m_pmaxub(a, b) _mm_max_pu8(a, b)
+
+// Compare packed signed 16-bit integers in a and b, and store packed minimum
+// values in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pminsw
+#define _m_pminsw(a, b) _mm_min_pi16(a, b)
+
+// Compare packed unsigned 8-bit integers in a and b, and store packed minimum
+// values in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pminub
+#define _m_pminub(a, b) _mm_min_pu8(a, b)
+
+// Create mask from the most significant bit of each 8-bit element in a, and
+// store the result in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pmovmskb
+#define _m_pmovmskb(a) _mm_movemask_pi8(a)
+
+// Multiply the packed unsigned 16-bit integers in a and b, producing
+// intermediate 32-bit integers, and store the high 16 bits of the intermediate
+// integers in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pmulhuw
+#define _m_pmulhuw(a, b) _mm_mulhi_pu16(a, b)
+
+// Loads one cache line of data from address p to a location closer to the
+// processor. https://msdn.microsoft.com/en-us/library/84szxsww(v=vs.100).aspx
+FORCE_INLINE void _mm_prefetch(const void *p, int i)
{
- float32x2_t a01 = vrev64_f32(vget_low_f32(vreinterpretq_f32_m128(a)));
- float32x2_t b23 = vrev64_f32(vget_high_f32(vreinterpretq_f32_m128(b)));
- return vreinterpretq_m128_f32(vcombine_f32(a01, b23));
+ (void) i;
+ __builtin_prefetch(p);
}
-FORCE_INLINE __m128 _mm_shuffle_ps_0321(__m128 a, __m128 b)
+// Compute the absolute differences of packed unsigned 8-bit integers in a and
+// b, then horizontally sum each consecutive 8 differences to produce four
+// unsigned 16-bit integers, and pack these unsigned 16-bit integers in the low
+// 16 bits of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=m_psadbw
+#define _m_psadbw(a, b) _mm_sad_pu8(a, b)
+
+// Shuffle 16-bit integers in a using the control in imm8, and store the results
+// in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pshufw
+#define _m_pshufw(a, imm) _mm_shuffle_pi16(a, imm)
+
+// Compute the approximate reciprocal of packed single-precision (32-bit)
+// floating-point elements in a, and store the results in dst. The maximum
+// relative error for this approximation is less than 1.5*2^-12.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_rcp_ps
+FORCE_INLINE __m128 _mm_rcp_ps(__m128 in)
{
- float32x2_t a21 = vget_high_f32(
- vextq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a), 3));
- float32x2_t b03 = vget_low_f32(
- vextq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b), 3));
- return vreinterpretq_m128_f32(vcombine_f32(a21, b03));
+ float32x4_t recip = vrecpeq_f32(vreinterpretq_f32_m128(in));
+ recip = vmulq_f32(recip, vrecpsq_f32(recip, vreinterpretq_f32_m128(in)));
+#if SSE2NEON_PRECISE_DIV
+ // Additional Netwon-Raphson iteration for accuracy
+ recip = vmulq_f32(recip, vrecpsq_f32(recip, vreinterpretq_f32_m128(in)));
+#endif
+ return vreinterpretq_m128_f32(recip);
}
-FORCE_INLINE __m128 _mm_shuffle_ps_2103(__m128 a, __m128 b)
+// Compute the approximate reciprocal of the lower single-precision (32-bit)
+// floating-point element in a, store the result in the lower element of dst,
+// and copy the upper 3 packed elements from a to the upper elements of dst. The
+// maximum relative error for this approximation is less than 1.5*2^-12.
+//
+// dst[31:0] := (1.0 / a[31:0])
+// dst[127:32] := a[127:32]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_rcp_ss
+FORCE_INLINE __m128 _mm_rcp_ss(__m128 a)
{
- float32x2_t a03 = vget_low_f32(
- vextq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a), 3));
- float32x2_t b21 = vget_high_f32(
- vextq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b), 3));
- return vreinterpretq_m128_f32(vcombine_f32(a03, b21));
+ return _mm_move_ss(a, _mm_rcp_ps(a));
}
-FORCE_INLINE __m128 _mm_shuffle_ps_1010(__m128 a, __m128 b)
+// Computes the approximations of the reciprocal square roots of the four
+// single-precision floating point values of in.
+// The current precision is 1% error.
+// https://msdn.microsoft.com/en-us/library/22hfsh53(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_rsqrt_ps(__m128 in)
{
- float32x2_t a10 = vget_low_f32(vreinterpretq_f32_m128(a));
- float32x2_t b10 = vget_low_f32(vreinterpretq_f32_m128(b));
- return vreinterpretq_m128_f32(vcombine_f32(a10, b10));
+ float32x4_t out = vrsqrteq_f32(vreinterpretq_f32_m128(in));
+#if SSE2NEON_PRECISE_SQRT
+ // Additional Netwon-Raphson iteration for accuracy
+ out = vmulq_f32(
+ out, vrsqrtsq_f32(vmulq_f32(vreinterpretq_f32_m128(in), out), out));
+ out = vmulq_f32(
+ out, vrsqrtsq_f32(vmulq_f32(vreinterpretq_f32_m128(in), out), out));
+#endif
+ return vreinterpretq_m128_f32(out);
}
-FORCE_INLINE __m128 _mm_shuffle_ps_1001(__m128 a, __m128 b)
+// Compute the approximate reciprocal square root of the lower single-precision
+// (32-bit) floating-point element in a, store the result in the lower element
+// of dst, and copy the upper 3 packed elements from a to the upper elements of
+// dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_rsqrt_ss
+FORCE_INLINE __m128 _mm_rsqrt_ss(__m128 in)
{
- float32x2_t a01 = vrev64_f32(vget_low_f32(vreinterpretq_f32_m128(a)));
- float32x2_t b10 = vget_low_f32(vreinterpretq_f32_m128(b));
- return vreinterpretq_m128_f32(vcombine_f32(a01, b10));
+ return vsetq_lane_f32(vgetq_lane_f32(_mm_rsqrt_ps(in), 0), in, 0);
}
-FORCE_INLINE __m128 _mm_shuffle_ps_0101(__m128 a, __m128 b)
+// Compute the absolute differences of packed unsigned 8-bit integers in a and
+// b, then horizontally sum each consecutive 8 differences to produce four
+// unsigned 16-bit integers, and pack these unsigned 16-bit integers in the low
+// 16 bits of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sad_pu8
+FORCE_INLINE __m64 _mm_sad_pu8(__m64 a, __m64 b)
{
- float32x2_t a01 = vrev64_f32(vget_low_f32(vreinterpretq_f32_m128(a)));
- float32x2_t b01 = vrev64_f32(vget_low_f32(vreinterpretq_f32_m128(b)));
- return vreinterpretq_m128_f32(vcombine_f32(a01, b01));
+ uint64x1_t t = vpaddl_u32(vpaddl_u16(
+ vpaddl_u8(vabd_u8(vreinterpret_u8_m64(a), vreinterpret_u8_m64(b)))));
+ return vreinterpret_m64_u16(
+ vset_lane_u16(vget_lane_u64(t, 0), vdup_n_u16(0), 0));
}
-// keeps the low 64 bits of b in the low and puts the high 64 bits of a in the
-// high
-FORCE_INLINE __m128 _mm_shuffle_ps_3210(__m128 a, __m128 b)
+// Macro: Set the flush zero bits of the MXCSR control and status register to
+// the value in unsigned 32-bit integer a. The flush zero may contain any of the
+// following flags: _MM_FLUSH_ZERO_ON or _MM_FLUSH_ZERO_OFF
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_MM_SET_FLUSH_ZERO_MODE
+FORCE_INLINE void _sse2neon_mm_set_flush_zero_mode(unsigned int flag)
{
- float32x2_t a10 = vget_low_f32(vreinterpretq_f32_m128(a));
- float32x2_t b32 = vget_high_f32(vreinterpretq_f32_m128(b));
- return vreinterpretq_m128_f32(vcombine_f32(a10, b32));
+ // AArch32 Advanced SIMD arithmetic always uses the Flush-to-zero setting,
+ // regardless of the value of the FZ bit.
+ union {
+ fpcr_bitfield field;
+#if defined(__aarch64__)
+ uint64_t value;
+#else
+ uint32_t value;
+#endif
+ } r;
+
+#if defined(__aarch64__)
+ asm volatile("mrs %0, FPCR" : "=r"(r.value)); /* read */
+#else
+ asm volatile("vmrs %0, FPSCR" : "=r"(r.value)); /* read */
+#endif
+
+ r.field.bit24 = (flag & _MM_FLUSH_ZERO_MASK) == _MM_FLUSH_ZERO_ON;
+
+#if defined(__aarch64__)
+ asm volatile("msr FPCR, %0" ::"r"(r)); /* write */
+#else
+ asm volatile("vmsr FPSCR, %0" ::"r"(r)); /* write */
+#endif
}
-FORCE_INLINE __m128 _mm_shuffle_ps_0011(__m128 a, __m128 b)
+// Sets the four single-precision, floating-point values to the four inputs.
+// https://msdn.microsoft.com/en-us/library/vstudio/afh0zf75(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_set_ps(float w, float z, float y, float x)
{
- float32x2_t a11 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(a)), 1);
- float32x2_t b00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 0);
- return vreinterpretq_m128_f32(vcombine_f32(a11, b00));
+ float ALIGN_STRUCT(16) data[4] = {x, y, z, w};
+ return vreinterpretq_m128_f32(vld1q_f32(data));
}
-FORCE_INLINE __m128 _mm_shuffle_ps_0022(__m128 a, __m128 b)
+// Sets the four single-precision, floating-point values to w.
+// https://msdn.microsoft.com/en-us/library/vstudio/2x1se8ha(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_set_ps1(float _w)
{
- float32x2_t a22 =
- vdup_lane_f32(vget_high_f32(vreinterpretq_f32_m128(a)), 0);
- float32x2_t b00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 0);
- return vreinterpretq_m128_f32(vcombine_f32(a22, b00));
+ return vreinterpretq_m128_f32(vdupq_n_f32(_w));
}
-FORCE_INLINE __m128 _mm_shuffle_ps_2200(__m128 a, __m128 b)
+// Macro: Set the rounding mode bits of the MXCSR control and status register to
+// the value in unsigned 32-bit integer a. The rounding mode may contain any of
+// the following flags: _MM_ROUND_NEAREST, _MM_ROUND_DOWN, _MM_ROUND_UP,
+// _MM_ROUND_TOWARD_ZERO
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_MM_SET_ROUNDING_MODE
+FORCE_INLINE void _MM_SET_ROUNDING_MODE(int rounding)
{
- float32x2_t a00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(a)), 0);
- float32x2_t b22 =
- vdup_lane_f32(vget_high_f32(vreinterpretq_f32_m128(b)), 0);
- return vreinterpretq_m128_f32(vcombine_f32(a00, b22));
+ union {
+ fpcr_bitfield field;
+#if defined(__aarch64__)
+ uint64_t value;
+#else
+ uint32_t value;
+#endif
+ } r;
+
+#if defined(__aarch64__)
+ asm volatile("mrs %0, FPCR" : "=r"(r.value)); /* read */
+#else
+ asm volatile("vmrs %0, FPSCR" : "=r"(r.value)); /* read */
+#endif
+
+ switch (rounding) {
+ case _MM_ROUND_TOWARD_ZERO:
+ r.field.bit22 = 1;
+ r.field.bit23 = 1;
+ break;
+ case _MM_ROUND_DOWN:
+ r.field.bit22 = 0;
+ r.field.bit23 = 1;
+ break;
+ case _MM_ROUND_UP:
+ r.field.bit22 = 1;
+ r.field.bit23 = 0;
+ break;
+ default: //_MM_ROUND_NEAREST
+ r.field.bit22 = 0;
+ r.field.bit23 = 0;
+ }
+
+#if defined(__aarch64__)
+ asm volatile("msr FPCR, %0" ::"r"(r)); /* write */
+#else
+ asm volatile("vmsr FPSCR, %0" ::"r"(r)); /* write */
+#endif
}
-FORCE_INLINE __m128 _mm_shuffle_ps_3202(__m128 a, __m128 b)
+// Copy single-precision (32-bit) floating-point element a to the lower element
+// of dst, and zero the upper 3 elements.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_set_ss
+FORCE_INLINE __m128 _mm_set_ss(float a)
{
- float32_t a0 = vgetq_lane_f32(vreinterpretq_f32_m128(a), 0);
- float32x2_t a22 =
- vdup_lane_f32(vget_high_f32(vreinterpretq_f32_m128(a)), 0);
- float32x2_t a02 = vset_lane_f32(a0, a22, 1); /* TODO: use vzip ?*/
- float32x2_t b32 = vget_high_f32(vreinterpretq_f32_m128(b));
- return vreinterpretq_m128_f32(vcombine_f32(a02, b32));
+ float ALIGN_STRUCT(16) data[4] = {a, 0, 0, 0};
+ return vreinterpretq_m128_f32(vld1q_f32(data));
}
-FORCE_INLINE __m128 _mm_shuffle_ps_1133(__m128 a, __m128 b)
+// Sets the four single-precision, floating-point values to w.
+//
+// r0 := r1 := r2 := r3 := w
+//
+// https://msdn.microsoft.com/en-us/library/vstudio/2x1se8ha(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_set1_ps(float _w)
{
- float32x2_t a33 =
- vdup_lane_f32(vget_high_f32(vreinterpretq_f32_m128(a)), 1);
- float32x2_t b11 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 1);
- return vreinterpretq_m128_f32(vcombine_f32(a33, b11));
+ return vreinterpretq_m128_f32(vdupq_n_f32(_w));
}
-FORCE_INLINE __m128 _mm_shuffle_ps_2010(__m128 a, __m128 b)
+FORCE_INLINE void _mm_setcsr(unsigned int a)
{
- float32x2_t a10 = vget_low_f32(vreinterpretq_f32_m128(a));
- float32_t b2 = vgetq_lane_f32(vreinterpretq_f32_m128(b), 2);
- float32x2_t b00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 0);
- float32x2_t b20 = vset_lane_f32(b2, b00, 1);
- return vreinterpretq_m128_f32(vcombine_f32(a10, b20));
+ _MM_SET_ROUNDING_MODE(a);
}
-FORCE_INLINE __m128 _mm_shuffle_ps_2001(__m128 a, __m128 b)
+// Sets the four single-precision, floating-point values to the four inputs in
+// reverse order.
+// https://msdn.microsoft.com/en-us/library/vstudio/d2172ct3(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_setr_ps(float w, float z, float y, float x)
{
- float32x2_t a01 = vrev64_f32(vget_low_f32(vreinterpretq_f32_m128(a)));
- float32_t b2 = vgetq_lane_f32(b, 2);
- float32x2_t b00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 0);
- float32x2_t b20 = vset_lane_f32(b2, b00, 1);
- return vreinterpretq_m128_f32(vcombine_f32(a01, b20));
+ float ALIGN_STRUCT(16) data[4] = {w, z, y, x};
+ return vreinterpretq_m128_f32(vld1q_f32(data));
}
-FORCE_INLINE __m128 _mm_shuffle_ps_2032(__m128 a, __m128 b)
+// Clears the four single-precision, floating-point values.
+// https://msdn.microsoft.com/en-us/library/vstudio/tk1t2tbz(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_setzero_ps(void)
{
- float32x2_t a32 = vget_high_f32(vreinterpretq_f32_m128(a));
- float32_t b2 = vgetq_lane_f32(b, 2);
- float32x2_t b00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 0);
- float32x2_t b20 = vset_lane_f32(b2, b00, 1);
- return vreinterpretq_m128_f32(vcombine_f32(a32, b20));
+ return vreinterpretq_m128_f32(vdupq_n_f32(0));
}
-// NEON does not support a general purpose permute intrinsic
-// Selects four specific single-precision, floating-point values from a and b,
-// based on the mask i.
-//
-// C equivalent:
-// __m128 _mm_shuffle_ps_default(__m128 a, __m128 b,
-// __constrange(0, 255) int imm) {
-// __m128 ret;
-// ret[0] = a[imm & 0x3]; ret[1] = a[(imm >> 2) & 0x3];
-// ret[2] = b[(imm >> 4) & 0x03]; ret[3] = b[(imm >> 6) & 0x03];
-// return ret;
-// }
-//
-// https://msdn.microsoft.com/en-us/library/vstudio/5f0858x0(v=vs.100).aspx
-#define _mm_shuffle_ps_default(a, b, imm) \
+// Shuffle 16-bit integers in a using the control in imm8, and store the results
+// in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_shuffle_pi16
+#if __has_builtin(__builtin_shufflevector)
+#define _mm_shuffle_pi16(a, imm) \
__extension__({ \
- float32x4_t ret; \
- ret = vmovq_n_f32( \
- vgetq_lane_f32(vreinterpretq_f32_m128(a), (imm) & (0x3))); \
- ret = vsetq_lane_f32( \
- vgetq_lane_f32(vreinterpretq_f32_m128(a), ((imm) >> 2) & 0x3), \
- ret, 1); \
- ret = vsetq_lane_f32( \
- vgetq_lane_f32(vreinterpretq_f32_m128(b), ((imm) >> 4) & 0x3), \
- ret, 2); \
- ret = vsetq_lane_f32( \
- vgetq_lane_f32(vreinterpretq_f32_m128(b), ((imm) >> 6) & 0x3), \
- ret, 3); \
- vreinterpretq_m128_f32(ret); \
+ vreinterpret_m64_s16(__builtin_shufflevector( \
+ vreinterpret_s16_m64(a), vreinterpret_s16_m64(a), (imm & 0x3), \
+ ((imm >> 2) & 0x3), ((imm >> 4) & 0x3), ((imm >> 6) & 0x3))); \
})
+#else
+#define _mm_shuffle_pi16(a, imm) \
+ __extension__({ \
+ int16x4_t ret; \
+ ret = \
+ vmov_n_s16(vget_lane_s16(vreinterpret_s16_m64(a), (imm) & (0x3))); \
+ ret = vset_lane_s16( \
+ vget_lane_s16(vreinterpret_s16_m64(a), ((imm) >> 2) & 0x3), ret, \
+ 1); \
+ ret = vset_lane_s16( \
+ vget_lane_s16(vreinterpret_s16_m64(a), ((imm) >> 4) & 0x3), ret, \
+ 2); \
+ ret = vset_lane_s16( \
+ vget_lane_s16(vreinterpret_s16_m64(a), ((imm) >> 6) & 0x3), ret, \
+ 3); \
+ vreinterpret_m64_s16(ret); \
+ })
+#endif
+
+// Guarantees that every preceding store is globally visible before any
+// subsequent store.
+// https://msdn.microsoft.com/en-us/library/5h2w73d1%28v=vs.90%29.aspx
+FORCE_INLINE void _mm_sfence(void)
+{
+ __sync_synchronize();
+}
// FORCE_INLINE __m128 _mm_shuffle_ps(__m128 a, __m128 b, __constrange(0,255)
// int imm)
@@ -1963,1876 +2583,1721 @@ FORCE_INLINE __m128 _mm_shuffle_ps_2032(__m128 a, __m128 b)
})
#endif
-// Takes the upper 64 bits of a and places it in the low end of the result
-// Takes the lower 64 bits of a and places it into the high end of the result.
-FORCE_INLINE __m128i _mm_shuffle_epi_1032(__m128i a)
+// Computes the approximations of square roots of the four single-precision,
+// floating-point values of a. First computes reciprocal square roots and then
+// reciprocals of the four values.
+//
+// r0 := sqrt(a0)
+// r1 := sqrt(a1)
+// r2 := sqrt(a2)
+// r3 := sqrt(a3)
+//
+// https://msdn.microsoft.com/en-us/library/vstudio/8z67bwwk(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_sqrt_ps(__m128 in)
{
- int32x2_t a32 = vget_high_s32(vreinterpretq_s32_m128i(a));
- int32x2_t a10 = vget_low_s32(vreinterpretq_s32_m128i(a));
- return vreinterpretq_m128i_s32(vcombine_s32(a32, a10));
+#if SSE2NEON_PRECISE_SQRT
+ float32x4_t recip = vrsqrteq_f32(vreinterpretq_f32_m128(in));
+
+ // Test for vrsqrteq_f32(0) -> positive infinity case.
+ // Change to zero, so that s * 1/sqrt(s) result is zero too.
+ const uint32x4_t pos_inf = vdupq_n_u32(0x7F800000);
+ const uint32x4_t div_by_zero =
+ vceqq_u32(pos_inf, vreinterpretq_u32_f32(recip));
+ recip = vreinterpretq_f32_u32(
+ vandq_u32(vmvnq_u32(div_by_zero), vreinterpretq_u32_f32(recip)));
+
+ // Additional Netwon-Raphson iteration for accuracy
+ recip = vmulq_f32(
+ vrsqrtsq_f32(vmulq_f32(recip, recip), vreinterpretq_f32_m128(in)),
+ recip);
+ recip = vmulq_f32(
+ vrsqrtsq_f32(vmulq_f32(recip, recip), vreinterpretq_f32_m128(in)),
+ recip);
+
+ // sqrt(s) = s * 1/sqrt(s)
+ return vreinterpretq_m128_f32(vmulq_f32(vreinterpretq_f32_m128(in), recip));
+#elif defined(__aarch64__)
+ return vreinterpretq_m128_f32(vsqrtq_f32(vreinterpretq_f32_m128(in)));
+#else
+ float32x4_t recipsq = vrsqrteq_f32(vreinterpretq_f32_m128(in));
+ float32x4_t sq = vrecpeq_f32(recipsq);
+ return vreinterpretq_m128_f32(sq);
+#endif
}
-// takes the lower two 32-bit values from a and swaps them and places in low end
-// of result takes the higher two 32 bit values from a and swaps them and places
-// in high end of result.
-FORCE_INLINE __m128i _mm_shuffle_epi_2301(__m128i a)
+// Computes the approximation of the square root of the scalar single-precision
+// floating point value of in.
+// https://msdn.microsoft.com/en-us/library/ahfsc22d(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_sqrt_ss(__m128 in)
{
- int32x2_t a01 = vrev64_s32(vget_low_s32(vreinterpretq_s32_m128i(a)));
- int32x2_t a23 = vrev64_s32(vget_high_s32(vreinterpretq_s32_m128i(a)));
- return vreinterpretq_m128i_s32(vcombine_s32(a01, a23));
+ float32_t value =
+ vgetq_lane_f32(vreinterpretq_f32_m128(_mm_sqrt_ps(in)), 0);
+ return vreinterpretq_m128_f32(
+ vsetq_lane_f32(value, vreinterpretq_f32_m128(in), 0));
}
-// rotates the least significant 32 bits into the most signficant 32 bits, and
-// shifts the rest down
-FORCE_INLINE __m128i _mm_shuffle_epi_0321(__m128i a)
+// Stores four single-precision, floating-point values.
+// https://msdn.microsoft.com/en-us/library/vstudio/s3h4ay6y(v=vs.100).aspx
+FORCE_INLINE void _mm_store_ps(float *p, __m128 a)
{
- return vreinterpretq_m128i_s32(
- vextq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(a), 1));
+ vst1q_f32(p, vreinterpretq_f32_m128(a));
}
-// rotates the most significant 32 bits into the least signficant 32 bits, and
-// shifts the rest up
-FORCE_INLINE __m128i _mm_shuffle_epi_2103(__m128i a)
+// Store the lower single-precision (32-bit) floating-point element from a into
+// 4 contiguous elements in memory. mem_addr must be aligned on a 16-byte
+// boundary or a general-protection exception may be generated.
+//
+// MEM[mem_addr+31:mem_addr] := a[31:0]
+// MEM[mem_addr+63:mem_addr+32] := a[31:0]
+// MEM[mem_addr+95:mem_addr+64] := a[31:0]
+// MEM[mem_addr+127:mem_addr+96] := a[31:0]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_store_ps1
+FORCE_INLINE void _mm_store_ps1(float *p, __m128 a)
{
- return vreinterpretq_m128i_s32(
- vextq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(a), 3));
+ float32_t a0 = vgetq_lane_f32(vreinterpretq_f32_m128(a), 0);
+ vst1q_f32(p, vdupq_n_f32(a0));
}
-// gets the lower 64 bits of a, and places it in the upper 64 bits
-// gets the lower 64 bits of a and places it in the lower 64 bits
-FORCE_INLINE __m128i _mm_shuffle_epi_1010(__m128i a)
+// Stores the lower single - precision, floating - point value.
+// https://msdn.microsoft.com/en-us/library/tzz10fbx(v=vs.100).aspx
+FORCE_INLINE void _mm_store_ss(float *p, __m128 a)
{
- int32x2_t a10 = vget_low_s32(vreinterpretq_s32_m128i(a));
- return vreinterpretq_m128i_s32(vcombine_s32(a10, a10));
+ vst1q_lane_f32(p, vreinterpretq_f32_m128(a), 0);
}
-// gets the lower 64 bits of a, swaps the 0 and 1 elements, and places it in the
-// lower 64 bits gets the lower 64 bits of a, and places it in the upper 64 bits
-FORCE_INLINE __m128i _mm_shuffle_epi_1001(__m128i a)
+// Store the lower single-precision (32-bit) floating-point element from a into
+// 4 contiguous elements in memory. mem_addr must be aligned on a 16-byte
+// boundary or a general-protection exception may be generated.
+//
+// MEM[mem_addr+31:mem_addr] := a[31:0]
+// MEM[mem_addr+63:mem_addr+32] := a[31:0]
+// MEM[mem_addr+95:mem_addr+64] := a[31:0]
+// MEM[mem_addr+127:mem_addr+96] := a[31:0]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_store1_ps
+#define _mm_store1_ps _mm_store_ps1
+
+// Stores the upper two single-precision, floating-point values of a to the
+// address p.
+//
+// *p0 := a2
+// *p1 := a3
+//
+// https://msdn.microsoft.com/en-us/library/a7525fs8(v%3dvs.90).aspx
+FORCE_INLINE void _mm_storeh_pi(__m64 *p, __m128 a)
{
- int32x2_t a01 = vrev64_s32(vget_low_s32(vreinterpretq_s32_m128i(a)));
- int32x2_t a10 = vget_low_s32(vreinterpretq_s32_m128i(a));
- return vreinterpretq_m128i_s32(vcombine_s32(a01, a10));
+ *p = vreinterpret_m64_f32(vget_high_f32(a));
}
-// gets the lower 64 bits of a, swaps the 0 and 1 elements and places it in the
-// upper 64 bits gets the lower 64 bits of a, swaps the 0 and 1 elements, and
-// places it in the lower 64 bits
-FORCE_INLINE __m128i _mm_shuffle_epi_0101(__m128i a)
+// Stores the lower two single-precision floating point values of a to the
+// address p.
+//
+// *p0 := a0
+// *p1 := a1
+//
+// https://msdn.microsoft.com/en-us/library/h54t98ks(v=vs.90).aspx
+FORCE_INLINE void _mm_storel_pi(__m64 *p, __m128 a)
{
- int32x2_t a01 = vrev64_s32(vget_low_s32(vreinterpretq_s32_m128i(a)));
- return vreinterpretq_m128i_s32(vcombine_s32(a01, a01));
+ *p = vreinterpret_m64_f32(vget_low_f32(a));
}
-FORCE_INLINE __m128i _mm_shuffle_epi_2211(__m128i a)
+// Store 4 single-precision (32-bit) floating-point elements from a into memory
+// in reverse order. mem_addr must be aligned on a 16-byte boundary or a
+// general-protection exception may be generated.
+//
+// MEM[mem_addr+31:mem_addr] := a[127:96]
+// MEM[mem_addr+63:mem_addr+32] := a[95:64]
+// MEM[mem_addr+95:mem_addr+64] := a[63:32]
+// MEM[mem_addr+127:mem_addr+96] := a[31:0]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_storer_ps
+FORCE_INLINE void _mm_storer_ps(float *p, __m128 a)
{
- int32x2_t a11 = vdup_lane_s32(vget_low_s32(vreinterpretq_s32_m128i(a)), 1);
- int32x2_t a22 = vdup_lane_s32(vget_high_s32(vreinterpretq_s32_m128i(a)), 0);
- return vreinterpretq_m128i_s32(vcombine_s32(a11, a22));
+ float32x4_t tmp = vrev64q_f32(vreinterpretq_f32_m128(a));
+ float32x4_t rev = vextq_f32(tmp, tmp, 2);
+ vst1q_f32(p, rev);
}
-FORCE_INLINE __m128i _mm_shuffle_epi_0122(__m128i a)
+// Stores four single-precision, floating-point values.
+// https://msdn.microsoft.com/en-us/library/44e30x22(v=vs.100).aspx
+FORCE_INLINE void _mm_storeu_ps(float *p, __m128 a)
{
- int32x2_t a22 = vdup_lane_s32(vget_high_s32(vreinterpretq_s32_m128i(a)), 0);
- int32x2_t a01 = vrev64_s32(vget_low_s32(vreinterpretq_s32_m128i(a)));
- return vreinterpretq_m128i_s32(vcombine_s32(a22, a01));
+ vst1q_f32(p, vreinterpretq_f32_m128(a));
}
-FORCE_INLINE __m128i _mm_shuffle_epi_3332(__m128i a)
+// Stores 16-bits of integer data a at the address p.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_storeu_si16
+FORCE_INLINE void _mm_storeu_si16(void *p, __m128i a)
{
- int32x2_t a32 = vget_high_s32(vreinterpretq_s32_m128i(a));
- int32x2_t a33 = vdup_lane_s32(vget_high_s32(vreinterpretq_s32_m128i(a)), 1);
- return vreinterpretq_m128i_s32(vcombine_s32(a32, a33));
+ vst1q_lane_s16((int16_t *) p, vreinterpretq_s16_m128i(a), 0);
}
-// Shuffle packed 8-bit integers in a according to shuffle control mask in the
-// corresponding 8-bit element of b, and store the results in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_shuffle_epi8
-FORCE_INLINE __m128i _mm_shuffle_epi8(__m128i a, __m128i b)
+// Stores 64-bits of integer data a at the address p.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_storeu_si64
+FORCE_INLINE void _mm_storeu_si64(void *p, __m128i a)
{
- int8x16_t tbl = vreinterpretq_s8_m128i(a); // input a
- uint8x16_t idx = vreinterpretq_u8_m128i(b); // input b
- uint8x16_t idx_masked =
- vandq_u8(idx, vdupq_n_u8(0x8F)); // avoid using meaningless bits
-#if defined(__aarch64__)
- return vreinterpretq_m128i_s8(vqtbl1q_s8(tbl, idx_masked));
-#elif defined(__GNUC__)
- int8x16_t ret;
- // %e and %f represent the even and odd D registers
- // respectively.
- __asm__ __volatile__(
- "vtbl.8 %e[ret], {%e[tbl], %f[tbl]}, %e[idx]\n"
- "vtbl.8 %f[ret], {%e[tbl], %f[tbl]}, %f[idx]\n"
- : [ret] "=&w"(ret)
- : [tbl] "w"(tbl), [idx] "w"(idx_masked));
- return vreinterpretq_m128i_s8(ret);
-#else
- // use this line if testing on aarch64
- int8x8x2_t a_split = {vget_low_s8(tbl), vget_high_s8(tbl)};
- return vreinterpretq_m128i_s8(
- vcombine_s8(vtbl2_s8(a_split, vget_low_u8(idx_masked)),
- vtbl2_s8(a_split, vget_high_u8(idx_masked))));
-#endif
+ vst1q_lane_s64((int64_t *) p, vreinterpretq_s64_m128i(a), 0);
}
-// C equivalent:
-// __m128i _mm_shuffle_epi32_default(__m128i a,
-// __constrange(0, 255) int imm) {
-// __m128i ret;
-// ret[0] = a[imm & 0x3]; ret[1] = a[(imm >> 2) & 0x3];
-// ret[2] = a[(imm >> 4) & 0x03]; ret[3] = a[(imm >> 6) & 0x03];
-// return ret;
-// }
-#define _mm_shuffle_epi32_default(a, imm) \
- __extension__({ \
- int32x4_t ret; \
- ret = vmovq_n_s32( \
- vgetq_lane_s32(vreinterpretq_s32_m128i(a), (imm) & (0x3))); \
- ret = vsetq_lane_s32( \
- vgetq_lane_s32(vreinterpretq_s32_m128i(a), ((imm) >> 2) & 0x3), \
- ret, 1); \
- ret = vsetq_lane_s32( \
- vgetq_lane_s32(vreinterpretq_s32_m128i(a), ((imm) >> 4) & 0x3), \
- ret, 2); \
- ret = vsetq_lane_s32( \
- vgetq_lane_s32(vreinterpretq_s32_m128i(a), ((imm) >> 6) & 0x3), \
- ret, 3); \
- vreinterpretq_m128i_s32(ret); \
- })
+// Store 64-bits of integer data from a into memory using a non-temporal memory
+// hint.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_stream_pi
+FORCE_INLINE void _mm_stream_pi(__m64 *p, __m64 a)
+{
+ vst1_s64((int64_t *) p, vreinterpret_s64_m64(a));
+}
-// FORCE_INLINE __m128i _mm_shuffle_epi32_splat(__m128i a, __constrange(0,255)
-// int imm)
-#if defined(__aarch64__)
-#define _mm_shuffle_epi32_splat(a, imm) \
- __extension__({ \
- vreinterpretq_m128i_s32( \
- vdupq_laneq_s32(vreinterpretq_s32_m128i(a), (imm))); \
- })
+// Store 128-bits (composed of 4 packed single-precision (32-bit) floating-
+// point elements) from a into memory using a non-temporal memory hint.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_stream_ps
+FORCE_INLINE void _mm_stream_ps(float *p, __m128 a)
+{
+#if __has_builtin(__builtin_nontemporal_store)
+ __builtin_nontemporal_store(a, (float32x4_t *) p);
#else
-#define _mm_shuffle_epi32_splat(a, imm) \
- __extension__({ \
- vreinterpretq_m128i_s32( \
- vdupq_n_s32(vgetq_lane_s32(vreinterpretq_s32_m128i(a), (imm)))); \
- })
+ vst1q_f32(p, vreinterpretq_f32_m128(a));
#endif
+}
-// Shuffles the 4 signed or unsigned 32-bit integers in a as specified by imm.
-// https://msdn.microsoft.com/en-us/library/56f67xbk%28v=vs.90%29.aspx
-// FORCE_INLINE __m128i _mm_shuffle_epi32(__m128i a,
-// __constrange(0,255) int imm)
-#if __has_builtin(__builtin_shufflevector)
-#define _mm_shuffle_epi32(a, imm) \
- __extension__({ \
- int32x4_t _input = vreinterpretq_s32_m128i(a); \
- int32x4_t _shuf = __builtin_shufflevector( \
- _input, _input, (imm) & (0x3), ((imm) >> 2) & 0x3, \
- ((imm) >> 4) & 0x3, ((imm) >> 6) & 0x3); \
- vreinterpretq_m128i_s32(_shuf); \
- })
-#else // generic
-#define _mm_shuffle_epi32(a, imm) \
- __extension__({ \
- __m128i ret; \
- switch (imm) { \
- case _MM_SHUFFLE(1, 0, 3, 2): \
- ret = _mm_shuffle_epi_1032((a)); \
- break; \
- case _MM_SHUFFLE(2, 3, 0, 1): \
- ret = _mm_shuffle_epi_2301((a)); \
- break; \
- case _MM_SHUFFLE(0, 3, 2, 1): \
- ret = _mm_shuffle_epi_0321((a)); \
- break; \
- case _MM_SHUFFLE(2, 1, 0, 3): \
- ret = _mm_shuffle_epi_2103((a)); \
- break; \
- case _MM_SHUFFLE(1, 0, 1, 0): \
- ret = _mm_shuffle_epi_1010((a)); \
- break; \
- case _MM_SHUFFLE(1, 0, 0, 1): \
- ret = _mm_shuffle_epi_1001((a)); \
- break; \
- case _MM_SHUFFLE(0, 1, 0, 1): \
- ret = _mm_shuffle_epi_0101((a)); \
- break; \
- case _MM_SHUFFLE(2, 2, 1, 1): \
- ret = _mm_shuffle_epi_2211((a)); \
- break; \
- case _MM_SHUFFLE(0, 1, 2, 2): \
- ret = _mm_shuffle_epi_0122((a)); \
- break; \
- case _MM_SHUFFLE(3, 3, 3, 2): \
- ret = _mm_shuffle_epi_3332((a)); \
- break; \
- case _MM_SHUFFLE(0, 0, 0, 0): \
- ret = _mm_shuffle_epi32_splat((a), 0); \
- break; \
- case _MM_SHUFFLE(1, 1, 1, 1): \
- ret = _mm_shuffle_epi32_splat((a), 1); \
- break; \
- case _MM_SHUFFLE(2, 2, 2, 2): \
- ret = _mm_shuffle_epi32_splat((a), 2); \
- break; \
- case _MM_SHUFFLE(3, 3, 3, 3): \
- ret = _mm_shuffle_epi32_splat((a), 3); \
- break; \
- default: \
- ret = _mm_shuffle_epi32_default((a), (imm)); \
- break; \
- } \
- ret; \
- })
-#endif
+// Subtracts the four single-precision, floating-point values of a and b.
+//
+// r0 := a0 - b0
+// r1 := a1 - b1
+// r2 := a2 - b2
+// r3 := a3 - b3
+//
+// https://msdn.microsoft.com/en-us/library/vstudio/1zad2k61(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_sub_ps(__m128 a, __m128 b)
+{
+ return vreinterpretq_m128_f32(
+ vsubq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
+}
-// Shuffles the lower 4 signed or unsigned 16-bit integers in a as specified
-// by imm.
-// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/y41dkk37(v=vs.100)
-// FORCE_INLINE __m128i _mm_shufflelo_epi16_function(__m128i a,
-// __constrange(0,255) int
-// imm)
-#define _mm_shufflelo_epi16_function(a, imm) \
- __extension__({ \
- int16x8_t ret = vreinterpretq_s16_m128i(a); \
- int16x4_t lowBits = vget_low_s16(ret); \
- ret = vsetq_lane_s16(vget_lane_s16(lowBits, (imm) & (0x3)), ret, 0); \
- ret = vsetq_lane_s16(vget_lane_s16(lowBits, ((imm) >> 2) & 0x3), ret, \
- 1); \
- ret = vsetq_lane_s16(vget_lane_s16(lowBits, ((imm) >> 4) & 0x3), ret, \
- 2); \
- ret = vsetq_lane_s16(vget_lane_s16(lowBits, ((imm) >> 6) & 0x3), ret, \
- 3); \
- vreinterpretq_m128i_s16(ret); \
- })
+// Subtract the lower single-precision (32-bit) floating-point element in b from
+// the lower single-precision (32-bit) floating-point element in a, store the
+// result in the lower element of dst, and copy the upper 3 packed elements from
+// a to the upper elements of dst.
+//
+// dst[31:0] := a[31:0] - b[31:0]
+// dst[127:32] := a[127:32]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sub_ss
+FORCE_INLINE __m128 _mm_sub_ss(__m128 a, __m128 b)
+{
+ return _mm_move_ss(a, _mm_sub_ps(a, b));
+}
-// FORCE_INLINE __m128i _mm_shufflelo_epi16(__m128i a,
-// __constrange(0,255) int imm)
-#if __has_builtin(__builtin_shufflevector)
-#define _mm_shufflelo_epi16(a, imm) \
- __extension__({ \
- int16x8_t _input = vreinterpretq_s16_m128i(a); \
- int16x8_t _shuf = __builtin_shufflevector( \
- _input, _input, ((imm) & (0x3)), (((imm) >> 2) & 0x3), \
- (((imm) >> 4) & 0x3), (((imm) >> 6) & 0x3), 4, 5, 6, 7); \
- vreinterpretq_m128i_s16(_shuf); \
- })
-#else // generic
-#define _mm_shufflelo_epi16(a, imm) _mm_shufflelo_epi16_function((a), (imm))
-#endif
+// Macro: Transpose the 4x4 matrix formed by the 4 rows of single-precision
+// (32-bit) floating-point elements in row0, row1, row2, and row3, and store the
+// transposed matrix in these vectors (row0 now contains column 0, etc.).
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=MM_TRANSPOSE4_PS
+#define _MM_TRANSPOSE4_PS(row0, row1, row2, row3) \
+ do { \
+ float32x4x2_t ROW01 = vtrnq_f32(row0, row1); \
+ float32x4x2_t ROW23 = vtrnq_f32(row2, row3); \
+ row0 = vcombine_f32(vget_low_f32(ROW01.val[0]), \
+ vget_low_f32(ROW23.val[0])); \
+ row1 = vcombine_f32(vget_low_f32(ROW01.val[1]), \
+ vget_low_f32(ROW23.val[1])); \
+ row2 = vcombine_f32(vget_high_f32(ROW01.val[0]), \
+ vget_high_f32(ROW23.val[0])); \
+ row3 = vcombine_f32(vget_high_f32(ROW01.val[1]), \
+ vget_high_f32(ROW23.val[1])); \
+ } while (0)
-// Shuffles the upper 4 signed or unsigned 16-bit integers in a as specified
-// by imm.
-// https://msdn.microsoft.com/en-us/library/13ywktbs(v=vs.100).aspx
-// FORCE_INLINE __m128i _mm_shufflehi_epi16_function(__m128i a,
-// __constrange(0,255) int
-// imm)
-#define _mm_shufflehi_epi16_function(a, imm) \
- __extension__({ \
- int16x8_t ret = vreinterpretq_s16_m128i(a); \
- int16x4_t highBits = vget_high_s16(ret); \
- ret = vsetq_lane_s16(vget_lane_s16(highBits, (imm) & (0x3)), ret, 4); \
- ret = vsetq_lane_s16(vget_lane_s16(highBits, ((imm) >> 2) & 0x3), ret, \
- 5); \
- ret = vsetq_lane_s16(vget_lane_s16(highBits, ((imm) >> 4) & 0x3), ret, \
- 6); \
- ret = vsetq_lane_s16(vget_lane_s16(highBits, ((imm) >> 6) & 0x3), ret, \
- 7); \
- vreinterpretq_m128i_s16(ret); \
- })
+// according to the documentation, these intrinsics behave the same as the
+// non-'u' versions. We'll just alias them here.
+#define _mm_ucomieq_ss _mm_comieq_ss
+#define _mm_ucomige_ss _mm_comige_ss
+#define _mm_ucomigt_ss _mm_comigt_ss
+#define _mm_ucomile_ss _mm_comile_ss
+#define _mm_ucomilt_ss _mm_comilt_ss
+#define _mm_ucomineq_ss _mm_comineq_ss
-// FORCE_INLINE __m128i _mm_shufflehi_epi16(__m128i a,
-// __constrange(0,255) int imm)
-#if __has_builtin(__builtin_shufflevector)
-#define _mm_shufflehi_epi16(a, imm) \
- __extension__({ \
- int16x8_t _input = vreinterpretq_s16_m128i(a); \
- int16x8_t _shuf = __builtin_shufflevector( \
- _input, _input, 0, 1, 2, 3, ((imm) & (0x3)) + 4, \
- (((imm) >> 2) & 0x3) + 4, (((imm) >> 4) & 0x3) + 4, \
- (((imm) >> 6) & 0x3) + 4); \
- vreinterpretq_m128i_s16(_shuf); \
- })
-#else // generic
-#define _mm_shufflehi_epi16(a, imm) _mm_shufflehi_epi16_function((a), (imm))
+// Return vector of type __m128i with undefined elements.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=mm_undefined_si128
+FORCE_INLINE __m128i _mm_undefined_si128(void)
+{
+#if defined(__GNUC__) || defined(__clang__)
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wuninitialized"
#endif
+ __m128i a;
+ return a;
+#if defined(__GNUC__) || defined(__clang__)
+#pragma GCC diagnostic pop
+#endif
+}
-// Shuffle double-precision (64-bit) floating-point elements using the control
-// in imm8, and store the results in dst.
+// Return vector of type __m128 with undefined elements.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_undefined_ps
+FORCE_INLINE __m128 _mm_undefined_ps(void)
+{
+#if defined(__GNUC__) || defined(__clang__)
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wuninitialized"
+#endif
+ __m128 a;
+ return a;
+#if defined(__GNUC__) || defined(__clang__)
+#pragma GCC diagnostic pop
+#endif
+}
+
+// Selects and interleaves the upper two single-precision, floating-point values
+// from a and b.
//
-// dst[63:0] := (imm8[0] == 0) ? a[63:0] : a[127:64]
-// dst[127:64] := (imm8[1] == 0) ? b[63:0] : b[127:64]
+// r0 := a2
+// r1 := b2
+// r2 := a3
+// r3 := b3
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_shuffle_pd
-#if __has_builtin(__builtin_shufflevector)
-#define _mm_shuffle_pd(a, b, imm8) \
- vreinterpretq_m128d_s64(__builtin_shufflevector( \
- vreinterpretq_s64_m128d(a), vreinterpretq_s64_m128d(b), imm8 & 0x1, \
- ((imm8 & 0x2) >> 1) + 2))
+// https://msdn.microsoft.com/en-us/library/skccxx7d%28v=vs.90%29.aspx
+FORCE_INLINE __m128 _mm_unpackhi_ps(__m128 a, __m128 b)
+{
+#if defined(__aarch64__)
+ return vreinterpretq_m128_f32(
+ vzip2q_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
#else
-#define _mm_shuffle_pd(a, b, imm8) \
- _mm_castsi128_pd(_mm_set_epi64x( \
- vgetq_lane_s64(vreinterpretq_s64_m128d(b), (imm8 & 0x2) >> 1), \
- vgetq_lane_s64(vreinterpretq_s64_m128d(a), imm8 & 0x1)))
+ float32x2_t a1 = vget_high_f32(vreinterpretq_f32_m128(a));
+ float32x2_t b1 = vget_high_f32(vreinterpretq_f32_m128(b));
+ float32x2x2_t result = vzip_f32(a1, b1);
+ return vreinterpretq_m128_f32(vcombine_f32(result.val[0], result.val[1]));
#endif
+}
-// Blend packed 16-bit integers from a and b using control mask imm8, and store
-// the results in dst.
+// Selects and interleaves the lower two single-precision, floating-point values
+// from a and b.
//
-// FOR j := 0 to 7
-// i := j*16
-// IF imm8[j]
-// dst[i+15:i] := b[i+15:i]
-// ELSE
-// dst[i+15:i] := a[i+15:i]
-// FI
-// ENDFOR
-// FORCE_INLINE __m128i _mm_blend_epi16(__m128i a, __m128i b,
-// __constrange(0,255) int imm)
-#define _mm_blend_epi16(a, b, imm) \
- __extension__({ \
- const uint16_t _mask[8] = {((imm) & (1 << 0)) ? 0xFFFF : 0x0000, \
- ((imm) & (1 << 1)) ? 0xFFFF : 0x0000, \
- ((imm) & (1 << 2)) ? 0xFFFF : 0x0000, \
- ((imm) & (1 << 3)) ? 0xFFFF : 0x0000, \
- ((imm) & (1 << 4)) ? 0xFFFF : 0x0000, \
- ((imm) & (1 << 5)) ? 0xFFFF : 0x0000, \
- ((imm) & (1 << 6)) ? 0xFFFF : 0x0000, \
- ((imm) & (1 << 7)) ? 0xFFFF : 0x0000}; \
- uint16x8_t _mask_vec = vld1q_u16(_mask); \
- uint16x8_t _a = vreinterpretq_u16_m128i(a); \
- uint16x8_t _b = vreinterpretq_u16_m128i(b); \
- vreinterpretq_m128i_u16(vbslq_u16(_mask_vec, _b, _a)); \
- })
-
-// Blend packed 8-bit integers from a and b using mask, and store the results in
-// dst.
+// r0 := a0
+// r1 := b0
+// r2 := a1
+// r3 := b1
//
-// FOR j := 0 to 15
-// i := j*8
-// IF mask[i+7]
-// dst[i+7:i] := b[i+7:i]
-// ELSE
-// dst[i+7:i] := a[i+7:i]
-// FI
-// ENDFOR
-FORCE_INLINE __m128i _mm_blendv_epi8(__m128i _a, __m128i _b, __m128i _mask)
+// https://msdn.microsoft.com/en-us/library/25st103b%28v=vs.90%29.aspx
+FORCE_INLINE __m128 _mm_unpacklo_ps(__m128 a, __m128 b)
{
- // Use a signed shift right to create a mask with the sign bit
- uint8x16_t mask =
- vreinterpretq_u8_s8(vshrq_n_s8(vreinterpretq_s8_m128i(_mask), 7));
- uint8x16_t a = vreinterpretq_u8_m128i(_a);
- uint8x16_t b = vreinterpretq_u8_m128i(_b);
- return vreinterpretq_m128i_u8(vbslq_u8(mask, b, a));
+#if defined(__aarch64__)
+ return vreinterpretq_m128_f32(
+ vzip1q_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
+#else
+ float32x2_t a1 = vget_low_f32(vreinterpretq_f32_m128(a));
+ float32x2_t b1 = vget_low_f32(vreinterpretq_f32_m128(b));
+ float32x2x2_t result = vzip_f32(a1, b1);
+ return vreinterpretq_m128_f32(vcombine_f32(result.val[0], result.val[1]));
+#endif
}
-/* Shifts */
+// Computes bitwise EXOR (exclusive-or) of the four single-precision,
+// floating-point values of a and b.
+// https://msdn.microsoft.com/en-us/library/ss6k3wk8(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_xor_ps(__m128 a, __m128 b)
+{
+ return vreinterpretq_m128_s32(
+ veorq_s32(vreinterpretq_s32_m128(a), vreinterpretq_s32_m128(b)));
+}
+/* SSE2 */
-// Shift packed 16-bit integers in a right by imm while shifting in sign
-// bits, and store the results in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_srai_epi16
-FORCE_INLINE __m128i _mm_srai_epi16(__m128i a, int imm)
+// Adds the 8 signed or unsigned 16-bit integers in a to the 8 signed or
+// unsigned 16-bit integers in b.
+// https://msdn.microsoft.com/en-us/library/fceha5k4(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_add_epi16(__m128i a, __m128i b)
{
- const int count = (imm & ~15) ? 15 : imm;
- return (__m128i) vshlq_s16((int16x8_t) a, vdupq_n_s16(-count));
+ return vreinterpretq_m128i_s16(
+ vaddq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
}
-// Shifts the 8 signed or unsigned 16-bit integers in a left by count bits while
-// shifting in zeros.
+// Adds the 4 signed or unsigned 32-bit integers in a to the 4 signed or
+// unsigned 32-bit integers in b.
//
-// r0 := a0 << count
-// r1 := a1 << count
-// ...
-// r7 := a7 << count
+// r0 := a0 + b0
+// r1 := a1 + b1
+// r2 := a2 + b2
+// r3 := a3 + b3
//
-// https://msdn.microsoft.com/en-us/library/es73bcsy(v=vs.90).aspx
-#define _mm_slli_epi16(a, imm) \
- __extension__({ \
- __m128i ret; \
- if (unlikely((imm)) <= 0) { \
- ret = a; \
- } \
- if (unlikely((imm) > 15)) { \
- ret = _mm_setzero_si128(); \
- } else { \
- ret = vreinterpretq_m128i_s16( \
- vshlq_n_s16(vreinterpretq_s16_m128i(a), (imm))); \
- } \
- ret; \
- })
-
-// Shifts the 4 signed or unsigned 32-bit integers in a left by count bits while
-// shifting in zeros. :
-// https://msdn.microsoft.com/en-us/library/z2k3bbtb%28v=vs.90%29.aspx
-// FORCE_INLINE __m128i _mm_slli_epi32(__m128i a, __constrange(0,255) int imm)
-FORCE_INLINE __m128i _mm_slli_epi32(__m128i a, int imm)
+// https://msdn.microsoft.com/en-us/library/vstudio/09xs4fkk(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_add_epi32(__m128i a, __m128i b)
{
- if (unlikely(imm <= 0)) /* TODO: add constant range macro: [0, 255] */
- return a;
- if (unlikely(imm > 31))
- return _mm_setzero_si128();
return vreinterpretq_m128i_s32(
- vshlq_s32(vreinterpretq_s32_m128i(a), vdupq_n_s32(imm)));
+ vaddq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
}
-// Shift packed 64-bit integers in a left by imm8 while shifting in zeros, and
-// store the results in dst.
-FORCE_INLINE __m128i _mm_slli_epi64(__m128i a, int imm)
+// Adds the 4 signed or unsigned 64-bit integers in a to the 4 signed or
+// unsigned 32-bit integers in b.
+// https://msdn.microsoft.com/en-us/library/vstudio/09xs4fkk(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_add_epi64(__m128i a, __m128i b)
{
- if (unlikely(imm <= 0)) /* TODO: add constant range macro: [0, 255] */
- return a;
- if (unlikely(imm > 63))
- return _mm_setzero_si128();
return vreinterpretq_m128i_s64(
- vshlq_s64(vreinterpretq_s64_m128i(a), vdupq_n_s64(imm)));
+ vaddq_s64(vreinterpretq_s64_m128i(a), vreinterpretq_s64_m128i(b)));
}
-// Shift packed 16-bit integers in a right by imm8 while shifting in zeros, and
+// Adds the 16 signed or unsigned 8-bit integers in a to the 16 signed or
+// unsigned 8-bit integers in b.
+// https://technet.microsoft.com/en-us/subscriptions/yc7tcyzs(v=vs.90)
+FORCE_INLINE __m128i _mm_add_epi8(__m128i a, __m128i b)
+{
+ return vreinterpretq_m128i_s8(
+ vaddq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
+}
+
+// Add packed double-precision (64-bit) floating-point elements in a and b, and
// store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_add_pd
+FORCE_INLINE __m128d _mm_add_pd(__m128d a, __m128d b)
+{
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(
+ vaddq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)));
+#else
+ double *da = (double *) &a;
+ double *db = (double *) &b;
+ double c[2];
+ c[0] = da[0] + db[0];
+ c[1] = da[1] + db[1];
+ return vld1q_f32((float32_t *) c);
+#endif
+}
+
+// Add the lower double-precision (64-bit) floating-point element in a and b,
+// store the result in the lower element of dst, and copy the upper element from
+// a to the upper element of dst.
//
-// FOR j := 0 to 7
-// i := j*16
-// IF imm8[7:0] > 15
-// dst[i+15:i] := 0
-// ELSE
-// dst[i+15:i] := ZeroExtend16(a[i+15:i] >> imm8[7:0])
-// FI
-// ENDFOR
+// dst[63:0] := a[63:0] + b[63:0]
+// dst[127:64] := a[127:64]
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_srli_epi16
-#define _mm_srli_epi16(a, imm) \
- __extension__({ \
- __m128i ret; \
- if (unlikely(imm) == 0) { \
- ret = a; \
- } \
- if (likely(0 < (imm) && (imm) < 16)) { \
- ret = vreinterpretq_m128i_u16( \
- vshlq_u16(vreinterpretq_u16_m128i(a), vdupq_n_s16(-imm))); \
- } else { \
- ret = _mm_setzero_si128(); \
- } \
- ret; \
- })
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_add_sd
+FORCE_INLINE __m128d _mm_add_sd(__m128d a, __m128d b)
+{
+#if defined(__aarch64__)
+ return _mm_move_sd(a, _mm_add_pd(a, b));
+#else
+ double *da = (double *) &a;
+ double *db = (double *) &b;
+ double c[2];
+ c[0] = da[0] + db[0];
+ c[1] = da[1];
+ return vld1q_f32((float32_t *) c);
+#endif
+}
-// Shift packed 32-bit integers in a right by imm8 while shifting in zeros, and
-// store the results in dst.
+// Add 64-bit integers a and b, and store the result in dst.
//
-// FOR j := 0 to 3
-// i := j*32
-// IF imm8[7:0] > 31
-// dst[i+31:i] := 0
-// ELSE
-// dst[i+31:i] := ZeroExtend32(a[i+31:i] >> imm8[7:0])
-// FI
-// ENDFOR
+// dst[63:0] := a[63:0] + b[63:0]
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_srli_epi32
-// FORCE_INLINE __m128i _mm_srli_epi32(__m128i a, __constrange(0,255) int imm)
-#define _mm_srli_epi32(a, imm) \
- __extension__({ \
- __m128i ret; \
- if (unlikely((imm) == 0)) { \
- ret = a; \
- } \
- if (likely(0 < (imm) && (imm) < 32)) { \
- ret = vreinterpretq_m128i_u32( \
- vshlq_u32(vreinterpretq_u32_m128i(a), vdupq_n_s32(-imm))); \
- } else { \
- ret = _mm_setzero_si128(); \
- } \
- ret; \
- })
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_add_si64
+FORCE_INLINE __m64 _mm_add_si64(__m64 a, __m64 b)
+{
+ return vreinterpret_m64_s64(
+ vadd_s64(vreinterpret_s64_m64(a), vreinterpret_s64_m64(b)));
+}
-// Shift packed 64-bit integers in a right by imm8 while shifting in zeros, and
-// store the results in dst.
+// Adds the 8 signed 16-bit integers in a to the 8 signed 16-bit integers in b
+// and saturates.
//
-// FOR j := 0 to 1
-// i := j*64
-// IF imm8[7:0] > 63
-// dst[i+63:i] := 0
-// ELSE
-// dst[i+63:i] := ZeroExtend64(a[i+63:i] >> imm8[7:0])
-// FI
-// ENDFOR
+// r0 := SignedSaturate(a0 + b0)
+// r1 := SignedSaturate(a1 + b1)
+// ...
+// r7 := SignedSaturate(a7 + b7)
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_srli_epi64
-#define _mm_srli_epi64(a, imm) \
- __extension__({ \
- __m128i ret; \
- if (unlikely((imm) == 0)) { \
- ret = a; \
- } \
- if (likely(0 < (imm) && (imm) < 64)) { \
- ret = vreinterpretq_m128i_u64( \
- vshlq_u64(vreinterpretq_u64_m128i(a), vdupq_n_s64(-imm))); \
- } else { \
- ret = _mm_setzero_si128(); \
- } \
- ret; \
- })
+// https://msdn.microsoft.com/en-us/library/1a306ef8(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_adds_epi16(__m128i a, __m128i b)
+{
+ return vreinterpretq_m128i_s16(
+ vqaddq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
+}
-// Shift packed 32-bit integers in a right by imm8 while shifting in sign bits,
-// and store the results in dst.
+// Add packed signed 8-bit integers in a and b using saturation, and store the
+// results in dst.
//
-// FOR j := 0 to 3
-// i := j*32
-// IF imm8[7:0] > 31
-// dst[i+31:i] := (a[i+31] ? 0xFFFFFFFF : 0x0)
-// ELSE
-// dst[i+31:i] := SignExtend32(a[i+31:i] >> imm8[7:0])
-// FI
+// FOR j := 0 to 15
+// i := j*8
+// dst[i+7:i] := Saturate8( a[i+7:i] + b[i+7:i] )
// ENDFOR
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_srai_epi32
-// FORCE_INLINE __m128i _mm_srai_epi32(__m128i a, __constrange(0,255) int imm)
-#define _mm_srai_epi32(a, imm) \
- __extension__({ \
- __m128i ret; \
- if (unlikely((imm) == 0)) { \
- ret = a; \
- } \
- if (likely(0 < (imm) && (imm) < 32)) { \
- ret = vreinterpretq_m128i_s32( \
- vshlq_s32(vreinterpretq_s32_m128i(a), vdupq_n_s32(-imm))); \
- } else { \
- ret = vreinterpretq_m128i_s32( \
- vshrq_n_s32(vreinterpretq_s32_m128i(a), 31)); \
- } \
- ret; \
- })
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_adds_epi8
+FORCE_INLINE __m128i _mm_adds_epi8(__m128i a, __m128i b)
+{
+ return vreinterpretq_m128i_s8(
+ vqaddq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
+}
-// Shifts the 128 - bit value in a right by imm bytes while shifting in
-// zeros.imm must be an immediate.
-//
-// r := srl(a, imm*8)
-//
-// https://msdn.microsoft.com/en-us/library/305w28yz(v=vs.100).aspx
-// FORCE_INLINE _mm_srli_si128(__m128i a, __constrange(0,255) int imm)
-#define _mm_srli_si128(a, imm) \
- __extension__({ \
- __m128i ret; \
- if (unlikely((imm) <= 0)) { \
- ret = a; \
- } \
- if (unlikely((imm) > 15)) { \
- ret = _mm_setzero_si128(); \
- } else { \
- ret = vreinterpretq_m128i_s8( \
- vextq_s8(vreinterpretq_s8_m128i(a), vdupq_n_s8(0), (imm))); \
- } \
- ret; \
- })
+// Add packed unsigned 16-bit integers in a and b using saturation, and store
+// the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_adds_epu16
+FORCE_INLINE __m128i _mm_adds_epu16(__m128i a, __m128i b)
+{
+ return vreinterpretq_m128i_u16(
+ vqaddq_u16(vreinterpretq_u16_m128i(a), vreinterpretq_u16_m128i(b)));
+}
-// Shifts the 128-bit value in a left by imm bytes while shifting in zeros. imm
-// must be an immediate.
-//
-// r := a << (imm * 8)
-//
-// https://msdn.microsoft.com/en-us/library/34d3k2kt(v=vs.100).aspx
-// FORCE_INLINE __m128i _mm_slli_si128(__m128i a, __constrange(0,255) int imm)
-#define _mm_slli_si128(a, imm) \
- __extension__({ \
- __m128i ret; \
- if (unlikely((imm) <= 0)) { \
- ret = a; \
- } \
- if (unlikely((imm) > 15)) { \
- ret = _mm_setzero_si128(); \
- } else { \
- ret = vreinterpretq_m128i_s8(vextq_s8( \
- vdupq_n_s8(0), vreinterpretq_s8_m128i(a), 16 - (imm))); \
- } \
- ret; \
- })
+// Adds the 16 unsigned 8-bit integers in a to the 16 unsigned 8-bit integers in
+// b and saturates..
+// https://msdn.microsoft.com/en-us/library/9hahyddy(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_adds_epu8(__m128i a, __m128i b)
+{
+ return vreinterpretq_m128i_u8(
+ vqaddq_u8(vreinterpretq_u8_m128i(a), vreinterpretq_u8_m128i(b)));
+}
-// Shifts the 8 signed or unsigned 16-bit integers in a left by count bits while
-// shifting in zeros.
+// Compute the bitwise AND of packed double-precision (64-bit) floating-point
+// elements in a and b, and store the results in dst.
//
-// r0 := a0 << count
-// r1 := a1 << count
-// ...
-// r7 := a7 << count
+// FOR j := 0 to 1
+// i := j*64
+// dst[i+63:i] := a[i+63:i] AND b[i+63:i]
+// ENDFOR
//
-// https://msdn.microsoft.com/en-us/library/c79w388h(v%3dvs.90).aspx
-FORCE_INLINE __m128i _mm_sll_epi16(__m128i a, __m128i count)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_and_pd
+FORCE_INLINE __m128d _mm_and_pd(__m128d a, __m128d b)
{
- uint64_t c = vreinterpretq_nth_u64_m128i(count, 0);
- if (unlikely(c > 15))
- return _mm_setzero_si128();
-
- int16x8_t vc = vdupq_n_s16((int16_t) c);
- return vreinterpretq_m128i_s16(vshlq_s16(vreinterpretq_s16_m128i(a), vc));
+ return vreinterpretq_m128d_s64(
+ vandq_s64(vreinterpretq_s64_m128d(a), vreinterpretq_s64_m128d(b)));
}
-// Shifts the 4 signed or unsigned 32-bit integers in a left by count bits while
-// shifting in zeros.
+// Computes the bitwise AND of the 128-bit value in a and the 128-bit value in
+// b.
//
-// r0 := a0 << count
-// r1 := a1 << count
-// r2 := a2 << count
-// r3 := a3 << count
+// r := a & b
//
-// https://msdn.microsoft.com/en-us/library/6fe5a6s9(v%3dvs.90).aspx
-FORCE_INLINE __m128i _mm_sll_epi32(__m128i a, __m128i count)
+// https://msdn.microsoft.com/en-us/library/vstudio/6d1txsa8(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_and_si128(__m128i a, __m128i b)
{
- uint64_t c = vreinterpretq_nth_u64_m128i(count, 0);
- if (unlikely(c > 31))
- return _mm_setzero_si128();
-
- int32x4_t vc = vdupq_n_s32((int32_t) c);
- return vreinterpretq_m128i_s32(vshlq_s32(vreinterpretq_s32_m128i(a), vc));
+ return vreinterpretq_m128i_s32(
+ vandq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
}
-// Shifts the 2 signed or unsigned 64-bit integers in a left by count bits while
-// shifting in zeros.
+// Compute the bitwise NOT of packed double-precision (64-bit) floating-point
+// elements in a and then AND with b, and store the results in dst.
//
-// r0 := a0 << count
-// r1 := a1 << count
+// FOR j := 0 to 1
+// i := j*64
+// dst[i+63:i] := ((NOT a[i+63:i]) AND b[i+63:i])
+// ENDFOR
//
-// https://msdn.microsoft.com/en-us/library/6ta9dffd(v%3dvs.90).aspx
-FORCE_INLINE __m128i _mm_sll_epi64(__m128i a, __m128i count)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_andnot_pd
+FORCE_INLINE __m128d _mm_andnot_pd(__m128d a, __m128d b)
{
- uint64_t c = vreinterpretq_nth_u64_m128i(count, 0);
- if (unlikely(c > 63))
- return _mm_setzero_si128();
-
- int64x2_t vc = vdupq_n_s64((int64_t) c);
- return vreinterpretq_m128i_s64(vshlq_s64(vreinterpretq_s64_m128i(a), vc));
+ // *NOTE* argument swap
+ return vreinterpretq_m128d_s64(
+ vbicq_s64(vreinterpretq_s64_m128d(b), vreinterpretq_s64_m128d(a)));
}
-// Shifts the 8 signed or unsigned 16-bit integers in a right by count bits
-// while shifting in zeros.
+// Computes the bitwise AND of the 128-bit value in b and the bitwise NOT of the
+// 128-bit value in a.
//
-// r0 := srl(a0, count)
-// r1 := srl(a1, count)
-// ...
-// r7 := srl(a7, count)
+// r := (~a) & b
//
-// https://msdn.microsoft.com/en-us/library/wd5ax830(v%3dvs.90).aspx
-FORCE_INLINE __m128i _mm_srl_epi16(__m128i a, __m128i count)
+// https://msdn.microsoft.com/en-us/library/vstudio/1beaceh8(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_andnot_si128(__m128i a, __m128i b)
{
- uint64_t c = vreinterpretq_nth_u64_m128i(count, 0);
- if (unlikely(c > 15))
- return _mm_setzero_si128();
-
- int16x8_t vc = vdupq_n_s16(-(int16_t) c);
- return vreinterpretq_m128i_u16(vshlq_u16(vreinterpretq_u16_m128i(a), vc));
+ return vreinterpretq_m128i_s32(
+ vbicq_s32(vreinterpretq_s32_m128i(b),
+ vreinterpretq_s32_m128i(a))); // *NOTE* argument swap
}
-// Shifts the 4 signed or unsigned 32-bit integers in a right by count bits
-// while shifting in zeros.
+// Computes the average of the 8 unsigned 16-bit integers in a and the 8
+// unsigned 16-bit integers in b and rounds.
//
-// r0 := srl(a0, count)
-// r1 := srl(a1, count)
-// r2 := srl(a2, count)
-// r3 := srl(a3, count)
+// r0 := (a0 + b0) / 2
+// r1 := (a1 + b1) / 2
+// ...
+// r7 := (a7 + b7) / 2
//
-// https://msdn.microsoft.com/en-us/library/a9cbttf4(v%3dvs.90).aspx
-FORCE_INLINE __m128i _mm_srl_epi32(__m128i a, __m128i count)
+// https://msdn.microsoft.com/en-us/library/vstudio/y13ca3c8(v=vs.90).aspx
+FORCE_INLINE __m128i _mm_avg_epu16(__m128i a, __m128i b)
{
- uint64_t c = vreinterpretq_nth_u64_m128i(count, 0);
- if (unlikely(c > 31))
- return _mm_setzero_si128();
-
- int32x4_t vc = vdupq_n_s32(-(int32_t) c);
- return vreinterpretq_m128i_u32(vshlq_u32(vreinterpretq_u32_m128i(a), vc));
+ return (__m128i) vrhaddq_u16(vreinterpretq_u16_m128i(a),
+ vreinterpretq_u16_m128i(b));
}
-// Shifts the 2 signed or unsigned 64-bit integers in a right by count bits
-// while shifting in zeros.
+// Computes the average of the 16 unsigned 8-bit integers in a and the 16
+// unsigned 8-bit integers in b and rounds.
//
-// r0 := srl(a0, count)
-// r1 := srl(a1, count)
+// r0 := (a0 + b0) / 2
+// r1 := (a1 + b1) / 2
+// ...
+// r15 := (a15 + b15) / 2
//
-// https://msdn.microsoft.com/en-us/library/yf6cf9k8(v%3dvs.90).aspx
-FORCE_INLINE __m128i _mm_srl_epi64(__m128i a, __m128i count)
+// https://msdn.microsoft.com/en-us/library/vstudio/8zwh554a(v%3dvs.90).aspx
+FORCE_INLINE __m128i _mm_avg_epu8(__m128i a, __m128i b)
{
- uint64_t c = vreinterpretq_nth_u64_m128i(count, 0);
- if (unlikely(c > 63))
- return _mm_setzero_si128();
-
- int64x2_t vc = vdupq_n_s64(-(int64_t) c);
- return vreinterpretq_m128i_u64(vshlq_u64(vreinterpretq_u64_m128i(a), vc));
+ return vreinterpretq_m128i_u8(
+ vrhaddq_u8(vreinterpretq_u8_m128i(a), vreinterpretq_u8_m128i(b)));
}
-// NEON does not provide a version of this function.
-// Creates a 16-bit mask from the most significant bits of the 16 signed or
-// unsigned 8-bit integers in a and zero extends the upper bits.
-// https://msdn.microsoft.com/en-us/library/vstudio/s090c8fk(v=vs.100).aspx
-FORCE_INLINE int _mm_movemask_epi8(__m128i a)
-{
- // Use increasingly wide shifts+adds to collect the sign bits
- // together.
- // Since the widening shifts would be rather confusing to follow in little
- // endian, everything will be illustrated in big endian order instead. This
- // has a different result - the bits would actually be reversed on a big
- // endian machine.
-
- // Starting input (only half the elements are shown):
- // 89 ff 1d c0 00 10 99 33
- uint8x16_t input = vreinterpretq_u8_m128i(a);
-
- // Shift out everything but the sign bits with an unsigned shift right.
- //
- // Bytes of the vector::
- // 89 ff 1d c0 00 10 99 33
- // \ \ \ \ \ \ \ \ high_bits = (uint16x4_t)(input >> 7)
- // | | | | | | | |
- // 01 01 00 01 00 00 01 00
- //
- // Bits of first important lane(s):
- // 10001001 (89)
- // \______
- // |
- // 00000001 (01)
- uint16x8_t high_bits = vreinterpretq_u16_u8(vshrq_n_u8(input, 7));
-
- // Merge the even lanes together with a 16-bit unsigned shift right + add.
- // 'xx' represents garbage data which will be ignored in the final result.
- // In the important bytes, the add functions like a binary OR.
- //
- // 01 01 00 01 00 00 01 00
- // \_ | \_ | \_ | \_ | paired16 = (uint32x4_t)(input + (input >> 7))
- // \| \| \| \|
- // xx 03 xx 01 xx 00 xx 02
- //
- // 00000001 00000001 (01 01)
- // \_______ |
- // \|
- // xxxxxxxx xxxxxx11 (xx 03)
- uint32x4_t paired16 =
- vreinterpretq_u32_u16(vsraq_n_u16(high_bits, high_bits, 7));
+// Shift a left by imm8 bytes while shifting in zeros, and store the results in
+// dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_bslli_si128
+#define _mm_bslli_si128(a, imm) _mm_slli_si128(a, imm)
- // Repeat with a wider 32-bit shift + add.
- // xx 03 xx 01 xx 00 xx 02
- // \____ | \____ | paired32 = (uint64x1_t)(paired16 + (paired16 >>
- // 14))
- // \| \|
- // xx xx xx 0d xx xx xx 02
- //
- // 00000011 00000001 (03 01)
- // \\_____ ||
- // '----.\||
- // xxxxxxxx xxxx1101 (xx 0d)
- uint64x2_t paired32 =
- vreinterpretq_u64_u32(vsraq_n_u32(paired16, paired16, 14));
+// Shift a right by imm8 bytes while shifting in zeros, and store the results in
+// dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_bsrli_si128
+#define _mm_bsrli_si128(a, imm) _mm_srli_si128(a, imm)
- // Last, an even wider 64-bit shift + add to get our result in the low 8 bit
- // lanes. xx xx xx 0d xx xx xx 02
- // \_________ | paired64 = (uint8x8_t)(paired32 + (paired32 >>
- // 28))
- // \|
- // xx xx xx xx xx xx xx d2
- //
- // 00001101 00000010 (0d 02)
- // \ \___ | |
- // '---. \| |
- // xxxxxxxx 11010010 (xx d2)
- uint8x16_t paired64 =
- vreinterpretq_u8_u64(vsraq_n_u64(paired32, paired32, 28));
+// Cast vector of type __m128d to type __m128. This intrinsic is only used for
+// compilation and does not generate any instructions, thus it has zero latency.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_castpd_ps
+FORCE_INLINE __m128 _mm_castpd_ps(__m128d a)
+{
+ return vreinterpretq_m128_s64(vreinterpretq_s64_m128d(a));
+}
- // Extract the low 8 bits from each 64-bit lane with 2 8-bit extracts.
- // xx xx xx xx xx xx xx d2
- // || return paired64[0]
- // d2
- // Note: Little endian would return the correct value 4b (01001011) instead.
- return vgetq_lane_u8(paired64, 0) | ((int) vgetq_lane_u8(paired64, 8) << 8);
+// Cast vector of type __m128d to type __m128i. This intrinsic is only used for
+// compilation and does not generate any instructions, thus it has zero latency.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_castpd_si128
+FORCE_INLINE __m128i _mm_castpd_si128(__m128d a)
+{
+ return vreinterpretq_m128i_s64(vreinterpretq_s64_m128d(a));
}
-// Copy the lower 64-bit integer in a to dst.
-//
-// dst[63:0] := a[63:0]
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_movepi64_pi64
-FORCE_INLINE __m64 _mm_movepi64_pi64(__m128i a)
+// Cast vector of type __m128 to type __m128d. This intrinsic is only used for
+// compilation and does not generate any instructions, thus it has zero latency.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_castps_pd
+FORCE_INLINE __m128d _mm_castps_pd(__m128 a)
{
- return vreinterpret_m64_s64(vget_low_s64(vreinterpretq_s64_m128i(a)));
+ return vreinterpretq_m128d_s32(vreinterpretq_s32_m128(a));
}
-// Copy the 64-bit integer a to the lower element of dst, and zero the upper
-// element.
-//
-// dst[63:0] := a[63:0]
-// dst[127:64] := 0
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_movpi64_epi64
-FORCE_INLINE __m128i _mm_movpi64_epi64(__m64 a)
+// Applies a type cast to reinterpret four 32-bit floating point values passed
+// in as a 128-bit parameter as packed 32-bit integers.
+// https://msdn.microsoft.com/en-us/library/bb514099.aspx
+FORCE_INLINE __m128i _mm_castps_si128(__m128 a)
{
- return vreinterpretq_m128i_s64(
- vcombine_s64(vreinterpret_s64_m64(a), vdup_n_s64(0)));
+ return vreinterpretq_m128i_s32(vreinterpretq_s32_m128(a));
}
-// NEON does not provide this method
-// Creates a 4-bit mask from the most significant bits of the four
-// single-precision, floating-point values.
-// https://msdn.microsoft.com/en-us/library/vstudio/4490ys29(v=vs.100).aspx
-FORCE_INLINE int _mm_movemask_ps(__m128 a)
+// Cast vector of type __m128i to type __m128d. This intrinsic is only used for
+// compilation and does not generate any instructions, thus it has zero latency.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_castsi128_pd
+FORCE_INLINE __m128d _mm_castsi128_pd(__m128i a)
{
- uint32x4_t input = vreinterpretq_u32_m128(a);
#if defined(__aarch64__)
- static const int32x4_t shift = {0, 1, 2, 3};
- uint32x4_t tmp = vshrq_n_u32(input, 31);
- return vaddvq_u32(vshlq_u32(tmp, shift));
+ return vreinterpretq_m128d_f64(vreinterpretq_f64_m128i(a));
#else
- // Uses the exact same method as _mm_movemask_epi8, see that for details.
- // Shift out everything but the sign bits with a 32-bit unsigned shift
- // right.
- uint64x2_t high_bits = vreinterpretq_u64_u32(vshrq_n_u32(input, 31));
- // Merge the two pairs together with a 64-bit unsigned shift right + add.
- uint8x16_t paired =
- vreinterpretq_u8_u64(vsraq_n_u64(high_bits, high_bits, 31));
- // Extract the result.
- return vgetq_lane_u8(paired, 0) | (vgetq_lane_u8(paired, 8) << 2);
+ return vreinterpretq_m128d_f32(vreinterpretq_f32_m128i(a));
#endif
}
-// Compute the bitwise NOT of a and then AND with a 128-bit vector containing
-// all 1's, and return 1 if the result is zero, otherwise return 0.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_test_all_ones
-FORCE_INLINE int _mm_test_all_ones(__m128i a)
+// Applies a type cast to reinterpret four 32-bit integers passed in as a
+// 128-bit parameter as packed 32-bit floating point values.
+// https://msdn.microsoft.com/en-us/library/bb514029.aspx
+FORCE_INLINE __m128 _mm_castsi128_ps(__m128i a)
{
- return (uint64_t)(vgetq_lane_s64(a, 0) & vgetq_lane_s64(a, 1)) ==
- ~(uint64_t) 0;
+ return vreinterpretq_m128_s32(vreinterpretq_s32_m128i(a));
}
-// Compute the bitwise AND of 128 bits (representing integer data) in a and
-// mask, and return 1 if the result is zero, otherwise return 0.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_test_all_zeros
-FORCE_INLINE int _mm_test_all_zeros(__m128i a, __m128i mask)
+// Cache line containing p is flushed and invalidated from all caches in the
+// coherency domain. :
+// https://msdn.microsoft.com/en-us/library/ba08y07y(v=vs.100).aspx
+FORCE_INLINE void _mm_clflush(void const *p)
{
- int64x2_t a_and_mask =
- vandq_s64(vreinterpretq_s64_m128i(a), vreinterpretq_s64_m128i(mask));
- return (vgetq_lane_s64(a_and_mask, 0) | vgetq_lane_s64(a_and_mask, 1)) ? 0
- : 1;
+ (void) p;
+ // no corollary for Neon?
}
-/* Math operations */
+// Compares the 8 signed or unsigned 16-bit integers in a and the 8 signed or
+// unsigned 16-bit integers in b for equality.
+// https://msdn.microsoft.com/en-us/library/2ay060te(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_cmpeq_epi16(__m128i a, __m128i b)
+{
+ return vreinterpretq_m128i_u16(
+ vceqq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
+}
-// Subtracts the four single-precision, floating-point values of a and b.
-//
-// r0 := a0 - b0
-// r1 := a1 - b1
-// r2 := a2 - b2
-// r3 := a3 - b3
-//
-// https://msdn.microsoft.com/en-us/library/vstudio/1zad2k61(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_sub_ps(__m128 a, __m128 b)
+// Compare packed 32-bit integers in a and b for equality, and store the results
+// in dst
+FORCE_INLINE __m128i _mm_cmpeq_epi32(__m128i a, __m128i b)
{
- return vreinterpretq_m128_f32(
- vsubq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
+ return vreinterpretq_m128i_u32(
+ vceqq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
}
-// Subtract the lower single-precision (32-bit) floating-point element in b from
-// the lower single-precision (32-bit) floating-point element in a, store the
-// result in the lower element of dst, and copy the upper 3 packed elements from
-// a to the upper elements of dst.
-//
-// dst[31:0] := a[31:0] - b[31:0]
-// dst[127:32] := a[127:32]
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sub_ss
-FORCE_INLINE __m128 _mm_sub_ss(__m128 a, __m128 b)
+// Compares the 16 signed or unsigned 8-bit integers in a and the 16 signed or
+// unsigned 8-bit integers in b for equality.
+// https://msdn.microsoft.com/en-us/library/windows/desktop/bz5xk21a(v=vs.90).aspx
+FORCE_INLINE __m128i _mm_cmpeq_epi8(__m128i a, __m128i b)
{
- return _mm_move_ss(a, _mm_sub_ps(a, b));
+ return vreinterpretq_m128i_u8(
+ vceqq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
}
-// Subtract 2 packed 64-bit integers in b from 2 packed 64-bit integers in a,
-// and store the results in dst.
-// r0 := a0 - b0
-// r1 := a1 - b1
-FORCE_INLINE __m128i _mm_sub_epi64(__m128i a, __m128i b)
+// Compare packed double-precision (64-bit) floating-point elements in a and b
+// for equality, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpeq_pd
+FORCE_INLINE __m128d _mm_cmpeq_pd(__m128d a, __m128d b)
{
- return vreinterpretq_m128i_s64(
- vsubq_s64(vreinterpretq_s64_m128i(a), vreinterpretq_s64_m128i(b)));
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_u64(
+ vceqq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)));
+#else
+ // (a == b) -> (a_lo == b_lo) && (a_hi == b_hi)
+ uint32x4_t cmp =
+ vceqq_u32(vreinterpretq_u32_m128d(a), vreinterpretq_u32_m128d(b));
+ uint32x4_t swapped = vrev64q_u32(cmp);
+ return vreinterpretq_m128d_u32(vandq_u32(cmp, swapped));
+#endif
}
-// Subtracts the 4 signed or unsigned 32-bit integers of b from the 4 signed or
-// unsigned 32-bit integers of a.
-//
-// r0 := a0 - b0
-// r1 := a1 - b1
-// r2 := a2 - b2
-// r3 := a3 - b3
-//
-// https://msdn.microsoft.com/en-us/library/vstudio/fhh866h0(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_sub_epi32(__m128i a, __m128i b)
+// Compare the lower double-precision (64-bit) floating-point elements in a and
+// b for equality, store the result in the lower element of dst, and copy the
+// upper element from a to the upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpeq_sd
+FORCE_INLINE __m128d _mm_cmpeq_sd(__m128d a, __m128d b)
{
- return vreinterpretq_m128i_s32(
- vsubq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
+ return _mm_move_sd(a, _mm_cmpeq_pd(a, b));
}
-// Subtract packed 16-bit integers in b from packed 16-bit integers in a, and
-// store the results in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sub_epi16
-FORCE_INLINE __m128i _mm_sub_epi16(__m128i a, __m128i b)
+// Compare packed double-precision (64-bit) floating-point elements in a and b
+// for greater-than-or-equal, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpge_pd
+FORCE_INLINE __m128d _mm_cmpge_pd(__m128d a, __m128d b)
{
- return vreinterpretq_m128i_s16(
- vsubq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_u64(
+ vcgeq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)));
+#else
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t a1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+ uint64_t b1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(b));
+ uint64_t d[2];
+ d[0] = (*(double *) &a0) >= (*(double *) &b0) ? ~UINT64_C(0) : UINT64_C(0);
+ d[1] = (*(double *) &a1) >= (*(double *) &b1) ? ~UINT64_C(0) : UINT64_C(0);
+
+ return vreinterpretq_m128d_u64(vld1q_u64(d));
+#endif
}
-// Subtract packed 8-bit integers in b from packed 8-bit integers in a, and
-// store the results in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sub_epi8
-FORCE_INLINE __m128i _mm_sub_epi8(__m128i a, __m128i b)
+// Compare the lower double-precision (64-bit) floating-point elements in a and
+// b for greater-than-or-equal, store the result in the lower element of dst,
+// and copy the upper element from a to the upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpge_sd
+FORCE_INLINE __m128d _mm_cmpge_sd(__m128d a, __m128d b)
{
- return vreinterpretq_m128i_s8(
- vsubq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
+#if defined(__aarch64__)
+ return _mm_move_sd(a, _mm_cmpge_pd(a, b));
+#else
+ // expand "_mm_cmpge_pd()" to reduce unnecessary operations
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t a1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+ uint64_t d[2];
+ d[0] = (*(double *) &a0) >= (*(double *) &b0) ? ~UINT64_C(0) : UINT64_C(0);
+ d[1] = a1;
+
+ return vreinterpretq_m128d_u64(vld1q_u64(d));
+#endif
}
-// Subtract 64-bit integer b from 64-bit integer a, and store the result in dst.
+// Compares the 8 signed 16-bit integers in a and the 8 signed 16-bit integers
+// in b for greater than.
//
-// dst[63:0] := a[63:0] - b[63:0]
+// r0 := (a0 > b0) ? 0xffff : 0x0
+// r1 := (a1 > b1) ? 0xffff : 0x0
+// ...
+// r7 := (a7 > b7) ? 0xffff : 0x0
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sub_si64
-FORCE_INLINE __m64 _mm_sub_si64(__m64 a, __m64 b)
+// https://technet.microsoft.com/en-us/library/xd43yfsa(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_cmpgt_epi16(__m128i a, __m128i b)
{
- return vreinterpret_m64_s64(
- vsub_s64(vreinterpret_s64_m64(a), vreinterpret_s64_m64(b)));
+ return vreinterpretq_m128i_u16(
+ vcgtq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
}
-// Subtracts the 8 unsigned 16-bit integers of bfrom the 8 unsigned 16-bit
-// integers of a and saturates..
-// https://technet.microsoft.com/en-us/subscriptions/index/f44y0s19(v=vs.90).aspx
-FORCE_INLINE __m128i _mm_subs_epu16(__m128i a, __m128i b)
+// Compares the 4 signed 32-bit integers in a and the 4 signed 32-bit integers
+// in b for greater than.
+// https://msdn.microsoft.com/en-us/library/vstudio/1s9f2z0y(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_cmpgt_epi32(__m128i a, __m128i b)
{
- return vreinterpretq_m128i_u16(
- vqsubq_u16(vreinterpretq_u16_m128i(a), vreinterpretq_u16_m128i(b)));
+ return vreinterpretq_m128i_u32(
+ vcgtq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
}
-// Subtracts the 16 unsigned 8-bit integers of b from the 16 unsigned 8-bit
-// integers of a and saturates.
+// Compares the 16 signed 8-bit integers in a and the 16 signed 8-bit integers
+// in b for greater than.
//
-// r0 := UnsignedSaturate(a0 - b0)
-// r1 := UnsignedSaturate(a1 - b1)
+// r0 := (a0 > b0) ? 0xff : 0x0
+// r1 := (a1 > b1) ? 0xff : 0x0
// ...
-// r15 := UnsignedSaturate(a15 - b15)
+// r15 := (a15 > b15) ? 0xff : 0x0
//
-// https://technet.microsoft.com/en-us/subscriptions/yadkxc18(v=vs.90)
-FORCE_INLINE __m128i _mm_subs_epu8(__m128i a, __m128i b)
+// https://msdn.microsoft.com/zh-tw/library/wf45zt2b(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_cmpgt_epi8(__m128i a, __m128i b)
{
return vreinterpretq_m128i_u8(
- vqsubq_u8(vreinterpretq_u8_m128i(a), vreinterpretq_u8_m128i(b)));
+ vcgtq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
}
-// Subtracts the 16 signed 8-bit integers of b from the 16 signed 8-bit integers
-// of a and saturates.
-//
-// r0 := SignedSaturate(a0 - b0)
-// r1 := SignedSaturate(a1 - b1)
-// ...
-// r15 := SignedSaturate(a15 - b15)
-//
-// https://technet.microsoft.com/en-us/subscriptions/by7kzks1(v=vs.90)
-FORCE_INLINE __m128i _mm_subs_epi8(__m128i a, __m128i b)
+// Compare packed double-precision (64-bit) floating-point elements in a and b
+// for greater-than, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpgt_pd
+FORCE_INLINE __m128d _mm_cmpgt_pd(__m128d a, __m128d b)
{
- return vreinterpretq_m128i_s8(
- vqsubq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_u64(
+ vcgtq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)));
+#else
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t a1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+ uint64_t b1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(b));
+ uint64_t d[2];
+ d[0] = (*(double *) &a0) > (*(double *) &b0) ? ~UINT64_C(0) : UINT64_C(0);
+ d[1] = (*(double *) &a1) > (*(double *) &b1) ? ~UINT64_C(0) : UINT64_C(0);
+
+ return vreinterpretq_m128d_u64(vld1q_u64(d));
+#endif
}
-// Subtracts the 8 signed 16-bit integers of b from the 8 signed 16-bit integers
-// of a and saturates.
-//
-// r0 := SignedSaturate(a0 - b0)
-// r1 := SignedSaturate(a1 - b1)
-// ...
-// r7 := SignedSaturate(a7 - b7)
-//
-// https://technet.microsoft.com/en-us/subscriptions/3247z5b8(v=vs.90)
-FORCE_INLINE __m128i _mm_subs_epi16(__m128i a, __m128i b)
+// Compare the lower double-precision (64-bit) floating-point elements in a and
+// b for greater-than, store the result in the lower element of dst, and copy
+// the upper element from a to the upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpgt_sd
+FORCE_INLINE __m128d _mm_cmpgt_sd(__m128d a, __m128d b)
{
- return vreinterpretq_m128i_s16(
- vqsubq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
+#if defined(__aarch64__)
+ return _mm_move_sd(a, _mm_cmpgt_pd(a, b));
+#else
+ // expand "_mm_cmpge_pd()" to reduce unnecessary operations
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t a1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+ uint64_t d[2];
+ d[0] = (*(double *) &a0) > (*(double *) &b0) ? ~UINT64_C(0) : UINT64_C(0);
+ d[1] = a1;
+
+ return vreinterpretq_m128d_u64(vld1q_u64(d));
+#endif
}
-// Subtract packed double-precision (64-bit) floating-point elements in b from
-// packed double-precision (64-bit) floating-point elements in a, and store the
-// results in dst.
-//
-// FOR j := 0 to 1
-// i := j*64
-// dst[i+63:i] := a[i+63:i] - b[i+63:i]
-// ENDFOR
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=mm_sub_pd
-FORCE_INLINE __m128d _mm_sub_pd(__m128d a, __m128d b)
+// Compare packed double-precision (64-bit) floating-point elements in a and b
+// for less-than-or-equal, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmple_pd
+FORCE_INLINE __m128d _mm_cmple_pd(__m128d a, __m128d b)
{
#if defined(__aarch64__)
- return vreinterpretq_m128d_f64(
- vsubq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)));
+ return vreinterpretq_m128d_u64(
+ vcleq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)));
#else
- double *da = (double *) &a;
- double *db = (double *) &b;
- double c[2];
- c[0] = da[0] - db[0];
- c[1] = da[1] - db[1];
- return vld1q_f32((float32_t *) c);
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t a1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+ uint64_t b1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(b));
+ uint64_t d[2];
+ d[0] = (*(double *) &a0) <= (*(double *) &b0) ? ~UINT64_C(0) : UINT64_C(0);
+ d[1] = (*(double *) &a1) <= (*(double *) &b1) ? ~UINT64_C(0) : UINT64_C(0);
+
+ return vreinterpretq_m128d_u64(vld1q_u64(d));
#endif
}
-// Subtract the lower double-precision (64-bit) floating-point element in b from
-// the lower double-precision (64-bit) floating-point element in a, store the
-// result in the lower element of dst, and copy the upper element from a to the
-// upper element of dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sub_sd
-FORCE_INLINE __m128d _mm_sub_sd(__m128d a, __m128d b)
+// Compare the lower double-precision (64-bit) floating-point elements in a and
+// b for less-than-or-equal, store the result in the lower element of dst, and
+// copy the upper element from a to the upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmple_sd
+FORCE_INLINE __m128d _mm_cmple_sd(__m128d a, __m128d b)
{
- return _mm_move_sd(a, _mm_sub_pd(a, b));
+#if defined(__aarch64__)
+ return _mm_move_sd(a, _mm_cmple_pd(a, b));
+#else
+ // expand "_mm_cmpge_pd()" to reduce unnecessary operations
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t a1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+ uint64_t d[2];
+ d[0] = (*(double *) &a0) <= (*(double *) &b0) ? ~UINT64_C(0) : UINT64_C(0);
+ d[1] = a1;
+
+ return vreinterpretq_m128d_u64(vld1q_u64(d));
+#endif
}
-// Add packed unsigned 16-bit integers in a and b using saturation, and store
-// the results in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_adds_epu16
-FORCE_INLINE __m128i _mm_adds_epu16(__m128i a, __m128i b)
+// Compares the 8 signed 16-bit integers in a and the 8 signed 16-bit integers
+// in b for less than.
+//
+// r0 := (a0 < b0) ? 0xffff : 0x0
+// r1 := (a1 < b1) ? 0xffff : 0x0
+// ...
+// r7 := (a7 < b7) ? 0xffff : 0x0
+//
+// https://technet.microsoft.com/en-us/library/t863edb2(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_cmplt_epi16(__m128i a, __m128i b)
{
return vreinterpretq_m128i_u16(
- vqaddq_u16(vreinterpretq_u16_m128i(a), vreinterpretq_u16_m128i(b)));
+ vcltq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
}
-// Negate packed 8-bit integers in a when the corresponding signed
-// 8-bit integer in b is negative, and store the results in dst.
-// Element in dst are zeroed out when the corresponding element
-// in b is zero.
-//
-// for i in 0..15
-// if b[i] < 0
-// r[i] := -a[i]
-// else if b[i] == 0
-// r[i] := 0
-// else
-// r[i] := a[i]
-// fi
-// done
-FORCE_INLINE __m128i _mm_sign_epi8(__m128i _a, __m128i _b)
+
+// Compares the 4 signed 32-bit integers in a and the 4 signed 32-bit integers
+// in b for less than.
+// https://msdn.microsoft.com/en-us/library/vstudio/4ak0bf5d(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_cmplt_epi32(__m128i a, __m128i b)
{
- int8x16_t a = vreinterpretq_s8_m128i(_a);
- int8x16_t b = vreinterpretq_s8_m128i(_b);
+ return vreinterpretq_m128i_u32(
+ vcltq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
+}
- // signed shift right: faster than vclt
- // (b < 0) ? 0xFF : 0
- uint8x16_t ltMask = vreinterpretq_u8_s8(vshrq_n_s8(b, 7));
+// Compares the 16 signed 8-bit integers in a and the 16 signed 8-bit integers
+// in b for lesser than.
+// https://msdn.microsoft.com/en-us/library/windows/desktop/9s46csht(v=vs.90).aspx
+FORCE_INLINE __m128i _mm_cmplt_epi8(__m128i a, __m128i b)
+{
+ return vreinterpretq_m128i_u8(
+ vcltq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
+}
- // (b == 0) ? 0xFF : 0
+// Compare packed double-precision (64-bit) floating-point elements in a and b
+// for less-than, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmplt_pd
+FORCE_INLINE __m128d _mm_cmplt_pd(__m128d a, __m128d b)
+{
#if defined(__aarch64__)
- int8x16_t zeroMask = vreinterpretq_s8_u8(vceqzq_s8(b));
+ return vreinterpretq_m128d_u64(
+ vcltq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)));
#else
- int8x16_t zeroMask = vreinterpretq_s8_u8(vceqq_s8(b, vdupq_n_s8(0)));
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t a1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+ uint64_t b1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(b));
+ uint64_t d[2];
+ d[0] = (*(double *) &a0) < (*(double *) &b0) ? ~UINT64_C(0) : UINT64_C(0);
+ d[1] = (*(double *) &a1) < (*(double *) &b1) ? ~UINT64_C(0) : UINT64_C(0);
+
+ return vreinterpretq_m128d_u64(vld1q_u64(d));
#endif
-
- // bitwise select either a or nagative 'a' (vnegq_s8(a) return nagative 'a')
- // based on ltMask
- int8x16_t masked = vbslq_s8(ltMask, vnegq_s8(a), a);
- // res = masked & (~zeroMask)
- int8x16_t res = vbicq_s8(masked, zeroMask);
-
- return vreinterpretq_m128i_s8(res);
}
-// Negate packed 16-bit integers in a when the corresponding signed
-// 16-bit integer in b is negative, and store the results in dst.
-// Element in dst are zeroed out when the corresponding element
-// in b is zero.
-//
-// for i in 0..7
-// if b[i] < 0
-// r[i] := -a[i]
-// else if b[i] == 0
-// r[i] := 0
-// else
-// r[i] := a[i]
-// fi
-// done
-FORCE_INLINE __m128i _mm_sign_epi16(__m128i _a, __m128i _b)
+// Compare the lower double-precision (64-bit) floating-point elements in a and
+// b for less-than, store the result in the lower element of dst, and copy the
+// upper element from a to the upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmplt_sd
+FORCE_INLINE __m128d _mm_cmplt_sd(__m128d a, __m128d b)
{
- int16x8_t a = vreinterpretq_s16_m128i(_a);
- int16x8_t b = vreinterpretq_s16_m128i(_b);
-
- // signed shift right: faster than vclt
- // (b < 0) ? 0xFFFF : 0
- uint16x8_t ltMask = vreinterpretq_u16_s16(vshrq_n_s16(b, 15));
- // (b == 0) ? 0xFFFF : 0
#if defined(__aarch64__)
- int16x8_t zeroMask = vreinterpretq_s16_u16(vceqzq_s16(b));
+ return _mm_move_sd(a, _mm_cmplt_pd(a, b));
#else
- int16x8_t zeroMask = vreinterpretq_s16_u16(vceqq_s16(b, vdupq_n_s16(0)));
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t a1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+ uint64_t d[2];
+ d[0] = (*(double *) &a0) < (*(double *) &b0) ? ~UINT64_C(0) : UINT64_C(0);
+ d[1] = a1;
+
+ return vreinterpretq_m128d_u64(vld1q_u64(d));
#endif
-
- // bitwise select either a or negative 'a' (vnegq_s16(a) equals to negative
- // 'a') based on ltMask
- int16x8_t masked = vbslq_s16(ltMask, vnegq_s16(a), a);
- // res = masked & (~zeroMask)
- int16x8_t res = vbicq_s16(masked, zeroMask);
- return vreinterpretq_m128i_s16(res);
}
-// Negate packed 32-bit integers in a when the corresponding signed
-// 32-bit integer in b is negative, and store the results in dst.
-// Element in dst are zeroed out when the corresponding element
-// in b is zero.
-//
-// for i in 0..3
-// if b[i] < 0
-// r[i] := -a[i]
-// else if b[i] == 0
-// r[i] := 0
-// else
-// r[i] := a[i]
-// fi
-// done
-FORCE_INLINE __m128i _mm_sign_epi32(__m128i _a, __m128i _b)
+// Compare packed double-precision (64-bit) floating-point elements in a and b
+// for not-equal, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpneq_pd
+FORCE_INLINE __m128d _mm_cmpneq_pd(__m128d a, __m128d b)
{
- int32x4_t a = vreinterpretq_s32_m128i(_a);
- int32x4_t b = vreinterpretq_s32_m128i(_b);
-
- // signed shift right: faster than vclt
- // (b < 0) ? 0xFFFFFFFF : 0
- uint32x4_t ltMask = vreinterpretq_u32_s32(vshrq_n_s32(b, 31));
-
- // (b == 0) ? 0xFFFFFFFF : 0
#if defined(__aarch64__)
- int32x4_t zeroMask = vreinterpretq_s32_u32(vceqzq_s32(b));
+ return vreinterpretq_m128d_s32(vmvnq_s32(vreinterpretq_s32_u64(
+ vceqq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)))));
#else
- int32x4_t zeroMask = vreinterpretq_s32_u32(vceqq_s32(b, vdupq_n_s32(0)));
+ // (a == b) -> (a_lo == b_lo) && (a_hi == b_hi)
+ uint32x4_t cmp =
+ vceqq_u32(vreinterpretq_u32_m128d(a), vreinterpretq_u32_m128d(b));
+ uint32x4_t swapped = vrev64q_u32(cmp);
+ return vreinterpretq_m128d_u32(vmvnq_u32(vandq_u32(cmp, swapped)));
#endif
-
- // bitwise select either a or negative 'a' (vnegq_s32(a) equals to negative
- // 'a') based on ltMask
- int32x4_t masked = vbslq_s32(ltMask, vnegq_s32(a), a);
- // res = masked & (~zeroMask)
- int32x4_t res = vbicq_s32(masked, zeroMask);
- return vreinterpretq_m128i_s32(res);
}
-// Negate packed 16-bit integers in a when the corresponding signed 16-bit
-// integer in b is negative, and store the results in dst. Element in dst are
-// zeroed out when the corresponding element in b is zero.
-//
-// FOR j := 0 to 3
-// i := j*16
-// IF b[i+15:i] < 0
-// dst[i+15:i] := -(a[i+15:i])
-// ELSE IF b[i+15:i] == 0
-// dst[i+15:i] := 0
-// ELSE
-// dst[i+15:i] := a[i+15:i]
-// FI
-// ENDFOR
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sign_pi16
-FORCE_INLINE __m64 _mm_sign_pi16(__m64 _a, __m64 _b)
+// Compare the lower double-precision (64-bit) floating-point elements in a and
+// b for not-equal, store the result in the lower element of dst, and copy the
+// upper element from a to the upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpneq_sd
+FORCE_INLINE __m128d _mm_cmpneq_sd(__m128d a, __m128d b)
{
- int16x4_t a = vreinterpret_s16_m64(_a);
- int16x4_t b = vreinterpret_s16_m64(_b);
-
- // signed shift right: faster than vclt
- // (b < 0) ? 0xFFFF : 0
- uint16x4_t ltMask = vreinterpret_u16_s16(vshr_n_s16(b, 15));
+ return _mm_move_sd(a, _mm_cmpneq_pd(a, b));
+}
- // (b == 0) ? 0xFFFF : 0
+// Compare packed double-precision (64-bit) floating-point elements in a and b
+// for not-greater-than-or-equal, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpnge_pd
+FORCE_INLINE __m128d _mm_cmpnge_pd(__m128d a, __m128d b)
+{
#if defined(__aarch64__)
- int16x4_t zeroMask = vreinterpret_s16_u16(vceqz_s16(b));
+ return vreinterpretq_m128d_u64(veorq_u64(
+ vcgeq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)),
+ vdupq_n_u64(UINT64_MAX)));
#else
- int16x4_t zeroMask = vreinterpret_s16_u16(vceq_s16(b, vdup_n_s16(0)));
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t a1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+ uint64_t b1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(b));
+ uint64_t d[2];
+ d[0] =
+ !((*(double *) &a0) >= (*(double *) &b0)) ? ~UINT64_C(0) : UINT64_C(0);
+ d[1] =
+ !((*(double *) &a1) >= (*(double *) &b1)) ? ~UINT64_C(0) : UINT64_C(0);
+
+ return vreinterpretq_m128d_u64(vld1q_u64(d));
#endif
-
- // bitwise select either a or nagative 'a' (vneg_s16(a) return nagative 'a')
- // based on ltMask
- int16x4_t masked = vbsl_s16(ltMask, vneg_s16(a), a);
- // res = masked & (~zeroMask)
- int16x4_t res = vbic_s16(masked, zeroMask);
-
- return vreinterpret_m64_s16(res);
}
-// Negate packed 32-bit integers in a when the corresponding signed 32-bit
-// integer in b is negative, and store the results in dst. Element in dst are
-// zeroed out when the corresponding element in b is zero.
-//
-// FOR j := 0 to 1
-// i := j*32
-// IF b[i+31:i] < 0
-// dst[i+31:i] := -(a[i+31:i])
-// ELSE IF b[i+31:i] == 0
-// dst[i+31:i] := 0
-// ELSE
-// dst[i+31:i] := a[i+31:i]
-// FI
-// ENDFOR
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sign_pi32
-FORCE_INLINE __m64 _mm_sign_pi32(__m64 _a, __m64 _b)
+// Compare the lower double-precision (64-bit) floating-point elements in a and
+// b for not-greater-than-or-equal, store the result in the lower element of
+// dst, and copy the upper element from a to the upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpnge_sd
+FORCE_INLINE __m128d _mm_cmpnge_sd(__m128d a, __m128d b)
{
- int32x2_t a = vreinterpret_s32_m64(_a);
- int32x2_t b = vreinterpret_s32_m64(_b);
-
- // signed shift right: faster than vclt
- // (b < 0) ? 0xFFFFFFFF : 0
- uint32x2_t ltMask = vreinterpret_u32_s32(vshr_n_s32(b, 31));
+ return _mm_move_sd(a, _mm_cmpnge_pd(a, b));
+}
- // (b == 0) ? 0xFFFFFFFF : 0
+// Compare packed double-precision (64-bit) floating-point elements in a and b
+// for not-greater-than, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_cmpngt_pd
+FORCE_INLINE __m128d _mm_cmpngt_pd(__m128d a, __m128d b)
+{
#if defined(__aarch64__)
- int32x2_t zeroMask = vreinterpret_s32_u32(vceqz_s32(b));
+ return vreinterpretq_m128d_u64(veorq_u64(
+ vcgtq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)),
+ vdupq_n_u64(UINT64_MAX)));
#else
- int32x2_t zeroMask = vreinterpret_s32_u32(vceq_s32(b, vdup_n_s32(0)));
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t a1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+ uint64_t b1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(b));
+ uint64_t d[2];
+ d[0] =
+ !((*(double *) &a0) > (*(double *) &b0)) ? ~UINT64_C(0) : UINT64_C(0);
+ d[1] =
+ !((*(double *) &a1) > (*(double *) &b1)) ? ~UINT64_C(0) : UINT64_C(0);
+
+ return vreinterpretq_m128d_u64(vld1q_u64(d));
#endif
-
- // bitwise select either a or nagative 'a' (vneg_s32(a) return nagative 'a')
- // based on ltMask
- int32x2_t masked = vbsl_s32(ltMask, vneg_s32(a), a);
- // res = masked & (~zeroMask)
- int32x2_t res = vbic_s32(masked, zeroMask);
-
- return vreinterpret_m64_s32(res);
}
-// Negate packed 8-bit integers in a when the corresponding signed 8-bit integer
-// in b is negative, and store the results in dst. Element in dst are zeroed out
-// when the corresponding element in b is zero.
-//
-// FOR j := 0 to 7
-// i := j*8
-// IF b[i+7:i] < 0
-// dst[i+7:i] := -(a[i+7:i])
-// ELSE IF b[i+7:i] == 0
-// dst[i+7:i] := 0
-// ELSE
-// dst[i+7:i] := a[i+7:i]
-// FI
-// ENDFOR
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sign_pi8
-FORCE_INLINE __m64 _mm_sign_pi8(__m64 _a, __m64 _b)
+// Compare the lower double-precision (64-bit) floating-point elements in a and
+// b for not-greater-than, store the result in the lower element of dst, and
+// copy the upper element from a to the upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpngt_sd
+FORCE_INLINE __m128d _mm_cmpngt_sd(__m128d a, __m128d b)
{
- int8x8_t a = vreinterpret_s8_m64(_a);
- int8x8_t b = vreinterpret_s8_m64(_b);
-
- // signed shift right: faster than vclt
- // (b < 0) ? 0xFF : 0
- uint8x8_t ltMask = vreinterpret_u8_s8(vshr_n_s8(b, 7));
+ return _mm_move_sd(a, _mm_cmpngt_pd(a, b));
+}
- // (b == 0) ? 0xFF : 0
+// Compare packed double-precision (64-bit) floating-point elements in a and b
+// for not-less-than-or-equal, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpnle_pd
+FORCE_INLINE __m128d _mm_cmpnle_pd(__m128d a, __m128d b)
+{
#if defined(__aarch64__)
- int8x8_t zeroMask = vreinterpret_s8_u8(vceqz_s8(b));
+ return vreinterpretq_m128d_u64(veorq_u64(
+ vcleq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)),
+ vdupq_n_u64(UINT64_MAX)));
#else
- int8x8_t zeroMask = vreinterpret_s8_u8(vceq_s8(b, vdup_n_s8(0)));
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t a1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+ uint64_t b1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(b));
+ uint64_t d[2];
+ d[0] =
+ !((*(double *) &a0) <= (*(double *) &b0)) ? ~UINT64_C(0) : UINT64_C(0);
+ d[1] =
+ !((*(double *) &a1) <= (*(double *) &b1)) ? ~UINT64_C(0) : UINT64_C(0);
+
+ return vreinterpretq_m128d_u64(vld1q_u64(d));
#endif
-
- // bitwise select either a or nagative 'a' (vneg_s8(a) return nagative 'a')
- // based on ltMask
- int8x8_t masked = vbsl_s8(ltMask, vneg_s8(a), a);
- // res = masked & (~zeroMask)
- int8x8_t res = vbic_s8(masked, zeroMask);
-
- return vreinterpret_m64_s8(res);
}
-// Average packed unsigned 16-bit integers in a and b, and store the results in
-// dst.
-//
-// FOR j := 0 to 3
-// i := j*16
-// dst[i+15:i] := (a[i+15:i] + b[i+15:i] + 1) >> 1
-// ENDFOR
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_avg_pu16
-FORCE_INLINE __m64 _mm_avg_pu16(__m64 a, __m64 b)
+// Compare the lower double-precision (64-bit) floating-point elements in a and
+// b for not-less-than-or-equal, store the result in the lower element of dst,
+// and copy the upper element from a to the upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpnle_sd
+FORCE_INLINE __m128d _mm_cmpnle_sd(__m128d a, __m128d b)
{
- return vreinterpret_m64_u16(
- vrhadd_u16(vreinterpret_u16_m64(a), vreinterpret_u16_m64(b)));
+ return _mm_move_sd(a, _mm_cmpnle_pd(a, b));
}
-// Average packed unsigned 8-bit integers in a and b, and store the results in
-// dst.
-//
-// FOR j := 0 to 7
-// i := j*8
-// dst[i+7:i] := (a[i+7:i] + b[i+7:i] + 1) >> 1
-// ENDFOR
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_avg_pu8
-FORCE_INLINE __m64 _mm_avg_pu8(__m64 a, __m64 b)
+// Compare packed double-precision (64-bit) floating-point elements in a and b
+// for not-less-than, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpnlt_pd
+FORCE_INLINE __m128d _mm_cmpnlt_pd(__m128d a, __m128d b)
{
- return vreinterpret_m64_u8(
- vrhadd_u8(vreinterpret_u8_m64(a), vreinterpret_u8_m64(b)));
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_u64(veorq_u64(
+ vcltq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)),
+ vdupq_n_u64(UINT64_MAX)));
+#else
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t a1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+ uint64_t b1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(b));
+ uint64_t d[2];
+ d[0] =
+ !((*(double *) &a0) < (*(double *) &b0)) ? ~UINT64_C(0) : UINT64_C(0);
+ d[1] =
+ !((*(double *) &a1) < (*(double *) &b1)) ? ~UINT64_C(0) : UINT64_C(0);
+
+ return vreinterpretq_m128d_u64(vld1q_u64(d));
+#endif
}
-// Average packed unsigned 8-bit integers in a and b, and store the results in
-// dst.
-//
-// FOR j := 0 to 7
-// i := j*8
-// dst[i+7:i] := (a[i+7:i] + b[i+7:i] + 1) >> 1
-// ENDFOR
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pavgb
-#define _m_pavgb(a, b) _mm_avg_pu8(a, b)
-
-// Average packed unsigned 16-bit integers in a and b, and store the results in
-// dst.
-//
-// FOR j := 0 to 3
-// i := j*16
-// dst[i+15:i] := (a[i+15:i] + b[i+15:i] + 1) >> 1
-// ENDFOR
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pavgw
-#define _m_pavgw(a, b) _mm_avg_pu16(a, b)
-
-// Extract a 16-bit integer from a, selected with imm8, and store the result in
-// the lower element of dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pextrw
-#define _m_pextrw(a, imm) _mm_extract_pi16(a, imm)
-
-// Copy a to dst, and insert the 16-bit integer i into dst at the location
-// specified by imm8.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=m_pinsrw
-#define _m_pinsrw(a, i, imm) _mm_insert_pi16(a, i, imm)
-
-// Compare packed signed 16-bit integers in a and b, and store packed maximum
-// values in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pmaxsw
-#define _m_pmaxsw(a, b) _mm_max_pi16(a, b)
-
-// Compare packed unsigned 8-bit integers in a and b, and store packed maximum
-// values in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pmaxub
-#define _m_pmaxub(a, b) _mm_max_pu8(a, b)
-
-// Compare packed signed 16-bit integers in a and b, and store packed minimum
-// values in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pminsw
-#define _m_pminsw(a, b) _mm_min_pi16(a, b)
-
-// Compare packed unsigned 8-bit integers in a and b, and store packed minimum
-// values in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pminub
-#define _m_pminub(a, b) _mm_min_pu8(a, b)
-
-// Computes the average of the 16 unsigned 8-bit integers in a and the 16
-// unsigned 8-bit integers in b and rounds.
-//
-// r0 := (a0 + b0) / 2
-// r1 := (a1 + b1) / 2
-// ...
-// r15 := (a15 + b15) / 2
-//
-// https://msdn.microsoft.com/en-us/library/vstudio/8zwh554a(v%3dvs.90).aspx
-FORCE_INLINE __m128i _mm_avg_epu8(__m128i a, __m128i b)
+// Compare the lower double-precision (64-bit) floating-point elements in a and
+// b for not-less-than, store the result in the lower element of dst, and copy
+// the upper element from a to the upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpnlt_sd
+FORCE_INLINE __m128d _mm_cmpnlt_sd(__m128d a, __m128d b)
{
- return vreinterpretq_m128i_u8(
- vrhaddq_u8(vreinterpretq_u8_m128i(a), vreinterpretq_u8_m128i(b)));
+ return _mm_move_sd(a, _mm_cmpnlt_pd(a, b));
}
-// Computes the average of the 8 unsigned 16-bit integers in a and the 8
-// unsigned 16-bit integers in b and rounds.
-//
-// r0 := (a0 + b0) / 2
-// r1 := (a1 + b1) / 2
-// ...
-// r7 := (a7 + b7) / 2
-//
-// https://msdn.microsoft.com/en-us/library/vstudio/y13ca3c8(v=vs.90).aspx
-FORCE_INLINE __m128i _mm_avg_epu16(__m128i a, __m128i b)
+// Compare packed double-precision (64-bit) floating-point elements in a and b
+// to see if neither is NaN, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpord_pd
+FORCE_INLINE __m128d _mm_cmpord_pd(__m128d a, __m128d b)
{
- return (__m128i) vrhaddq_u16(vreinterpretq_u16_m128i(a),
- vreinterpretq_u16_m128i(b));
+#if defined(__aarch64__)
+ // Excluding NaNs, any two floating point numbers can be compared.
+ uint64x2_t not_nan_a =
+ vceqq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(a));
+ uint64x2_t not_nan_b =
+ vceqq_f64(vreinterpretq_f64_m128d(b), vreinterpretq_f64_m128d(b));
+ return vreinterpretq_m128d_u64(vandq_u64(not_nan_a, not_nan_b));
+#else
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t a1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+ uint64_t b1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(b));
+ uint64_t d[2];
+ d[0] = ((*(double *) &a0) == (*(double *) &a0) &&
+ (*(double *) &b0) == (*(double *) &b0))
+ ? ~UINT64_C(0)
+ : UINT64_C(0);
+ d[1] = ((*(double *) &a1) == (*(double *) &a1) &&
+ (*(double *) &b1) == (*(double *) &b1))
+ ? ~UINT64_C(0)
+ : UINT64_C(0);
+
+ return vreinterpretq_m128d_u64(vld1q_u64(d));
+#endif
}
-// Adds the four single-precision, floating-point values of a and b.
-//
-// r0 := a0 + b0
-// r1 := a1 + b1
-// r2 := a2 + b2
-// r3 := a3 + b3
-//
-// https://msdn.microsoft.com/en-us/library/vstudio/c9848chc(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_add_ps(__m128 a, __m128 b)
+// Compare the lower double-precision (64-bit) floating-point elements in a and
+// b to see if neither is NaN, store the result in the lower element of dst, and
+// copy the upper element from a to the upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpord_sd
+FORCE_INLINE __m128d _mm_cmpord_sd(__m128d a, __m128d b)
{
- return vreinterpretq_m128_f32(
- vaddq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
+#if defined(__aarch64__)
+ return _mm_move_sd(a, _mm_cmpord_pd(a, b));
+#else
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+ uint64_t a1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(a));
+ uint64_t d[2];
+ d[0] = ((*(double *) &a0) == (*(double *) &a0) &&
+ (*(double *) &b0) == (*(double *) &b0))
+ ? ~UINT64_C(0)
+ : UINT64_C(0);
+ d[1] = a1;
+
+ return vreinterpretq_m128d_u64(vld1q_u64(d));
+#endif
}
-// Add packed double-precision (64-bit) floating-point elements in a and b, and
-// store the results in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_add_pd
-FORCE_INLINE __m128d _mm_add_pd(__m128d a, __m128d b)
+// Compare packed double-precision (64-bit) floating-point elements in a and b
+// to see if either is NaN, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpunord_pd
+FORCE_INLINE __m128d _mm_cmpunord_pd(__m128d a, __m128d b)
{
#if defined(__aarch64__)
- return vreinterpretq_m128d_f64(
- vaddq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)));
+ // Two NaNs are not equal in comparison operation.
+ uint64x2_t not_nan_a =
+ vceqq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(a));
+ uint64x2_t not_nan_b =
+ vceqq_f64(vreinterpretq_f64_m128d(b), vreinterpretq_f64_m128d(b));
+ return vreinterpretq_m128d_s32(
+ vmvnq_s32(vreinterpretq_s32_u64(vandq_u64(not_nan_a, not_nan_b))));
#else
- double *da = (double *) &a;
- double *db = (double *) &b;
- double c[2];
- c[0] = da[0] + db[0];
- c[1] = da[1] + db[1];
- return vld1q_f32((float32_t *) c);
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t a1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+ uint64_t b1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(b));
+ uint64_t d[2];
+ d[0] = ((*(double *) &a0) == (*(double *) &a0) &&
+ (*(double *) &b0) == (*(double *) &b0))
+ ? UINT64_C(0)
+ : ~UINT64_C(0);
+ d[1] = ((*(double *) &a1) == (*(double *) &a1) &&
+ (*(double *) &b1) == (*(double *) &b1))
+ ? UINT64_C(0)
+ : ~UINT64_C(0);
+
+ return vreinterpretq_m128d_u64(vld1q_u64(d));
#endif
}
-// Add the lower double-precision (64-bit) floating-point element in a and b,
-// store the result in the lower element of dst, and copy the upper element from
-// a to the upper element of dst.
-//
-// dst[63:0] := a[63:0] + b[63:0]
-// dst[127:64] := a[127:64]
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_add_sd
-FORCE_INLINE __m128d _mm_add_sd(__m128d a, __m128d b)
+// Compare the lower double-precision (64-bit) floating-point elements in a and
+// b to see if either is NaN, store the result in the lower element of dst, and
+// copy the upper element from a to the upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpunord_sd
+FORCE_INLINE __m128d _mm_cmpunord_sd(__m128d a, __m128d b)
{
#if defined(__aarch64__)
- return _mm_move_sd(a, _mm_add_pd(a, b));
+ return _mm_move_sd(a, _mm_cmpunord_pd(a, b));
#else
- double *da = (double *) &a;
- double *db = (double *) &b;
- double c[2];
- c[0] = da[0] + db[0];
- c[1] = da[1];
- return vld1q_f32((float32_t *) c);
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+ uint64_t a1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(a));
+ uint64_t d[2];
+ d[0] = ((*(double *) &a0) == (*(double *) &a0) &&
+ (*(double *) &b0) == (*(double *) &b0))
+ ? UINT64_C(0)
+ : ~UINT64_C(0);
+ d[1] = a1;
+
+ return vreinterpretq_m128d_u64(vld1q_u64(d));
#endif
}
-// Add 64-bit integers a and b, and store the result in dst.
-//
-// dst[63:0] := a[63:0] + b[63:0]
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_add_si64
-FORCE_INLINE __m64 _mm_add_si64(__m64 a, __m64 b)
+// Compare the lower double-precision (64-bit) floating-point element in a and b
+// for greater-than-or-equal, and return the boolean result (0 or 1).
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_comige_sd
+FORCE_INLINE int _mm_comige_sd(__m128d a, __m128d b)
{
- return vreinterpret_m64_s64(
- vadd_s64(vreinterpret_s64_m64(a), vreinterpret_s64_m64(b)));
-}
+#if defined(__aarch64__)
+ return vgetq_lane_u64(vcgeq_f64(a, b), 0) & 0x1;
+#else
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
-// adds the scalar single-precision floating point values of a and b.
-// https://msdn.microsoft.com/en-us/library/be94x2y6(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_add_ss(__m128 a, __m128 b)
-{
- float32_t b0 = vgetq_lane_f32(vreinterpretq_f32_m128(b), 0);
- float32x4_t value = vsetq_lane_f32(b0, vdupq_n_f32(0), 0);
- // the upper values in the result must be the remnants of <a>.
- return vreinterpretq_m128_f32(vaddq_f32(a, value));
+ return (*(double *) &a0 >= *(double *) &b0);
+#endif
}
-// Adds the 4 signed or unsigned 64-bit integers in a to the 4 signed or
-// unsigned 32-bit integers in b.
-// https://msdn.microsoft.com/en-us/library/vstudio/09xs4fkk(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_add_epi64(__m128i a, __m128i b)
+// Compare the lower double-precision (64-bit) floating-point element in a and b
+// for greater-than, and return the boolean result (0 or 1).
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_comigt_sd
+FORCE_INLINE int _mm_comigt_sd(__m128d a, __m128d b)
{
- return vreinterpretq_m128i_s64(
- vaddq_s64(vreinterpretq_s64_m128i(a), vreinterpretq_s64_m128i(b)));
+#if defined(__aarch64__)
+ return vgetq_lane_u64(vcgtq_f64(a, b), 0) & 0x1;
+#else
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+
+ return (*(double *) &a0 > *(double *) &b0);
+#endif
}
-// Adds the 4 signed or unsigned 32-bit integers in a to the 4 signed or
-// unsigned 32-bit integers in b.
-//
-// r0 := a0 + b0
-// r1 := a1 + b1
-// r2 := a2 + b2
-// r3 := a3 + b3
-//
-// https://msdn.microsoft.com/en-us/library/vstudio/09xs4fkk(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_add_epi32(__m128i a, __m128i b)
+// Compare the lower double-precision (64-bit) floating-point element in a and b
+// for less-than-or-equal, and return the boolean result (0 or 1).
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_comile_sd
+FORCE_INLINE int _mm_comile_sd(__m128d a, __m128d b)
{
- return vreinterpretq_m128i_s32(
- vaddq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
+#if defined(__aarch64__)
+ return vgetq_lane_u64(vcleq_f64(a, b), 0) & 0x1;
+#else
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+
+ return (*(double *) &a0 <= *(double *) &b0);
+#endif
}
-// Adds the 8 signed or unsigned 16-bit integers in a to the 8 signed or
-// unsigned 16-bit integers in b.
-// https://msdn.microsoft.com/en-us/library/fceha5k4(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_add_epi16(__m128i a, __m128i b)
+// Compare the lower double-precision (64-bit) floating-point element in a and b
+// for less-than, and return the boolean result (0 or 1).
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_comilt_sd
+FORCE_INLINE int _mm_comilt_sd(__m128d a, __m128d b)
{
- return vreinterpretq_m128i_s16(
- vaddq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
+#if defined(__aarch64__)
+ return vgetq_lane_u64(vcltq_f64(a, b), 0) & 0x1;
+#else
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+
+ return (*(double *) &a0 < *(double *) &b0);
+#endif
}
-// Adds the 16 signed or unsigned 8-bit integers in a to the 16 signed or
-// unsigned 8-bit integers in b.
-// https://technet.microsoft.com/en-us/subscriptions/yc7tcyzs(v=vs.90)
-FORCE_INLINE __m128i _mm_add_epi8(__m128i a, __m128i b)
+// Compare the lower double-precision (64-bit) floating-point element in a and b
+// for equality, and return the boolean result (0 or 1).
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_comieq_sd
+FORCE_INLINE int _mm_comieq_sd(__m128d a, __m128d b)
{
- return vreinterpretq_m128i_s8(
- vaddq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
+#if defined(__aarch64__)
+ return vgetq_lane_u64(vceqq_f64(a, b), 0) & 0x1;
+#else
+ uint32x4_t a_not_nan =
+ vceqq_u32(vreinterpretq_u32_m128d(a), vreinterpretq_u32_m128d(a));
+ uint32x4_t b_not_nan =
+ vceqq_u32(vreinterpretq_u32_m128d(b), vreinterpretq_u32_m128d(b));
+ uint32x4_t a_and_b_not_nan = vandq_u32(a_not_nan, b_not_nan);
+ uint32x4_t a_eq_b =
+ vceqq_u32(vreinterpretq_u32_m128d(a), vreinterpretq_u32_m128d(b));
+ uint64x2_t and_results = vandq_u64(vreinterpretq_u64_u32(a_and_b_not_nan),
+ vreinterpretq_u64_u32(a_eq_b));
+ return vgetq_lane_u64(and_results, 0) & 0x1;
+#endif
}
-// Adds the 8 signed 16-bit integers in a to the 8 signed 16-bit integers in b
-// and saturates.
-//
-// r0 := SignedSaturate(a0 + b0)
-// r1 := SignedSaturate(a1 + b1)
-// ...
-// r7 := SignedSaturate(a7 + b7)
-//
-// https://msdn.microsoft.com/en-us/library/1a306ef8(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_adds_epi16(__m128i a, __m128i b)
+// Compare the lower double-precision (64-bit) floating-point element in a and b
+// for not-equal, and return the boolean result (0 or 1).
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_comineq_sd
+FORCE_INLINE int _mm_comineq_sd(__m128d a, __m128d b)
{
- return vreinterpretq_m128i_s16(
- vqaddq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
+ return !_mm_comieq_sd(a, b);
}
-// Add packed signed 8-bit integers in a and b using saturation, and store the
-// results in dst.
+// Convert packed signed 32-bit integers in a to packed double-precision
+// (64-bit) floating-point elements, and store the results in dst.
//
-// FOR j := 0 to 15
-// i := j*8
-// dst[i+7:i] := Saturate8( a[i+7:i] + b[i+7:i] )
+// FOR j := 0 to 1
+// i := j*32
+// m := j*64
+// dst[m+63:m] := Convert_Int32_To_FP64(a[i+31:i])
// ENDFOR
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_adds_epi8
-FORCE_INLINE __m128i _mm_adds_epi8(__m128i a, __m128i b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtepi32_pd
+FORCE_INLINE __m128d _mm_cvtepi32_pd(__m128i a)
{
- return vreinterpretq_m128i_s8(
- vqaddq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(
+ vcvtq_f64_s64(vmovl_s32(vget_low_s32(vreinterpretq_s32_m128i(a)))));
+#else
+ double a0 = (double) vgetq_lane_s32(vreinterpretq_s32_m128i(a), 0);
+ double a1 = (double) vgetq_lane_s32(vreinterpretq_s32_m128i(a), 1);
+ return _mm_set_pd(a1, a0);
+#endif
}
-// Adds the 16 unsigned 8-bit integers in a to the 16 unsigned 8-bit integers in
-// b and saturates..
-// https://msdn.microsoft.com/en-us/library/9hahyddy(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_adds_epu8(__m128i a, __m128i b)
+// Converts the four signed 32-bit integer values of a to single-precision,
+// floating-point values
+// https://msdn.microsoft.com/en-us/library/vstudio/36bwxcx5(v=vs.100).aspx
+FORCE_INLINE __m128 _mm_cvtepi32_ps(__m128i a)
{
- return vreinterpretq_m128i_u8(
- vqaddq_u8(vreinterpretq_u8_m128i(a), vreinterpretq_u8_m128i(b)));
+ return vreinterpretq_m128_f32(vcvtq_f32_s32(vreinterpretq_s32_m128i(a)));
}
-// Multiplies the 8 signed or unsigned 16-bit integers from a by the 8 signed or
-// unsigned 16-bit integers from b.
+// Convert packed double-precision (64-bit) floating-point elements in a to
+// packed 32-bit integers, and store the results in dst.
//
-// r0 := (a0 * b0)[15:0]
-// r1 := (a1 * b1)[15:0]
-// ...
-// r7 := (a7 * b7)[15:0]
+// FOR j := 0 to 1
+// i := 32*j
+// k := 64*j
+// dst[i+31:i] := Convert_FP64_To_Int32(a[k+63:k])
+// ENDFOR
//
-// https://msdn.microsoft.com/en-us/library/vstudio/9ks1472s(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_mullo_epi16(__m128i a, __m128i b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpd_epi32
+FORCE_INLINE __m128i _mm_cvtpd_epi32(__m128d a)
{
- return vreinterpretq_m128i_s16(
- vmulq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
+ __m128d rnd = _mm_round_pd(a, _MM_FROUND_CUR_DIRECTION);
+ double d0 = ((double *) &rnd)[0];
+ double d1 = ((double *) &rnd)[1];
+ return _mm_set_epi32(0, 0, (int32_t) d1, (int32_t) d0);
}
-// Multiplies the 4 signed or unsigned 32-bit integers from a by the 4 signed or
-// unsigned 32-bit integers from b.
-// https://msdn.microsoft.com/en-us/library/vstudio/bb531409(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_mullo_epi32(__m128i a, __m128i b)
+// Convert packed double-precision (64-bit) floating-point elements in a to
+// packed 32-bit integers, and store the results in dst.
+//
+// FOR j := 0 to 1
+// i := 32*j
+// k := 64*j
+// dst[i+31:i] := Convert_FP64_To_Int32(a[k+63:k])
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpd_pi32
+FORCE_INLINE __m64 _mm_cvtpd_pi32(__m128d a)
{
- return vreinterpretq_m128i_s32(
- vmulq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
+ __m128d rnd = _mm_round_pd(a, _MM_FROUND_CUR_DIRECTION);
+ double d0 = ((double *) &rnd)[0];
+ double d1 = ((double *) &rnd)[1];
+ int32_t ALIGN_STRUCT(16) data[2] = {(int32_t) d0, (int32_t) d1};
+ return vreinterpret_m64_s32(vld1_s32(data));
}
-// Multiply the packed unsigned 16-bit integers in a and b, producing
-// intermediate 32-bit integers, and store the high 16 bits of the intermediate
-// integers in dst.
+// Convert packed double-precision (64-bit) floating-point elements in a to
+// packed single-precision (32-bit) floating-point elements, and store the
+// results in dst.
//
-// FOR j := 0 to 3
-// i := j*16
-// tmp[31:0] := a[i+15:i] * b[i+15:i]
-// dst[i+15:i] := tmp[31:16]
+// FOR j := 0 to 1
+// i := 32*j
+// k := 64*j
+// dst[i+31:i] := Convert_FP64_To_FP32(a[k+64:k])
// ENDFOR
+// dst[127:64] := 0
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pmulhuw
-#define _m_pmulhuw(a, b) _mm_mulhi_pu16(a, b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpd_ps
+FORCE_INLINE __m128 _mm_cvtpd_ps(__m128d a)
+{
+#if defined(__aarch64__)
+ float32x2_t tmp = vcvt_f32_f64(vreinterpretq_f64_m128d(a));
+ return vreinterpretq_m128_f32(vcombine_f32(tmp, vdup_n_f32(0)));
+#else
+ float a0 = (float) ((double *) &a)[0];
+ float a1 = (float) ((double *) &a)[1];
+ return _mm_set_ps(0, 0, a1, a0);
+#endif
+}
-// Multiplies the four single-precision, floating-point values of a and b.
+// Convert packed signed 32-bit integers in a to packed double-precision
+// (64-bit) floating-point elements, and store the results in dst.
//
-// r0 := a0 * b0
-// r1 := a1 * b1
-// r2 := a2 * b2
-// r3 := a3 * b3
+// FOR j := 0 to 1
+// i := j*32
+// m := j*64
+// dst[m+63:m] := Convert_Int32_To_FP64(a[i+31:i])
+// ENDFOR
//
-// https://msdn.microsoft.com/en-us/library/vstudio/22kbk6t9(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_mul_ps(__m128 a, __m128 b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpi32_pd
+FORCE_INLINE __m128d _mm_cvtpi32_pd(__m64 a)
{
- return vreinterpretq_m128_f32(
- vmulq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(
+ vcvtq_f64_s64(vmovl_s32(vreinterpret_s32_m64(a))));
+#else
+ double a0 = (double) vget_lane_s32(vreinterpret_s32_m64(a), 0);
+ double a1 = (double) vget_lane_s32(vreinterpret_s32_m64(a), 1);
+ return _mm_set_pd(a1, a0);
+#endif
}
-// Multiply packed double-precision (64-bit) floating-point elements in a and b,
-// and store the results in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_mul_pd
-FORCE_INLINE __m128d _mm_mul_pd(__m128d a, __m128d b)
+// Converts the four single-precision, floating-point values of a to signed
+// 32-bit integer values.
+//
+// r0 := (int) a0
+// r1 := (int) a1
+// r2 := (int) a2
+// r3 := (int) a3
+//
+// https://msdn.microsoft.com/en-us/library/vstudio/xdc42k5e(v=vs.100).aspx
+// *NOTE*. The default rounding mode on SSE is 'round to even', which ARMv7-A
+// does not support! It is supported on ARMv8-A however.
+FORCE_INLINE __m128i _mm_cvtps_epi32(__m128 a)
{
#if defined(__aarch64__)
- return vreinterpretq_m128d_f64(
- vmulq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)));
+ switch (_MM_GET_ROUNDING_MODE()) {
+ case _MM_ROUND_NEAREST:
+ return vreinterpretq_m128i_s32(vcvtnq_s32_f32(a));
+ case _MM_ROUND_DOWN:
+ return vreinterpretq_m128i_s32(vcvtmq_s32_f32(a));
+ case _MM_ROUND_UP:
+ return vreinterpretq_m128i_s32(vcvtpq_s32_f32(a));
+ default: // _MM_ROUND_TOWARD_ZERO
+ return vreinterpretq_m128i_s32(vcvtq_s32_f32(a));
+ }
#else
- double *da = (double *) &a;
- double *db = (double *) &b;
- double c[2];
- c[0] = da[0] * db[0];
- c[1] = da[1] * db[1];
- return vld1q_f32((float32_t *) c);
+ float *f = (float *) &a;
+ switch (_MM_GET_ROUNDING_MODE()) {
+ case _MM_ROUND_NEAREST: {
+ uint32x4_t signmask = vdupq_n_u32(0x80000000);
+ float32x4_t half = vbslq_f32(signmask, vreinterpretq_f32_m128(a),
+ vdupq_n_f32(0.5f)); /* +/- 0.5 */
+ int32x4_t r_normal = vcvtq_s32_f32(vaddq_f32(
+ vreinterpretq_f32_m128(a), half)); /* round to integer: [a + 0.5]*/
+ int32x4_t r_trunc = vcvtq_s32_f32(
+ vreinterpretq_f32_m128(a)); /* truncate to integer: [a] */
+ int32x4_t plusone = vreinterpretq_s32_u32(vshrq_n_u32(
+ vreinterpretq_u32_s32(vnegq_s32(r_trunc)), 31)); /* 1 or 0 */
+ int32x4_t r_even = vbicq_s32(vaddq_s32(r_trunc, plusone),
+ vdupq_n_s32(1)); /* ([a] + {0,1}) & ~1 */
+ float32x4_t delta = vsubq_f32(
+ vreinterpretq_f32_m128(a),
+ vcvtq_f32_s32(r_trunc)); /* compute delta: delta = (a - [a]) */
+ uint32x4_t is_delta_half =
+ vceqq_f32(delta, half); /* delta == +/- 0.5 */
+ return vreinterpretq_m128i_s32(
+ vbslq_s32(is_delta_half, r_even, r_normal));
+ }
+ case _MM_ROUND_DOWN:
+ return _mm_set_epi32(floorf(f[3]), floorf(f[2]), floorf(f[1]),
+ floorf(f[0]));
+ case _MM_ROUND_UP:
+ return _mm_set_epi32(ceilf(f[3]), ceilf(f[2]), ceilf(f[1]),
+ ceilf(f[0]));
+ default: // _MM_ROUND_TOWARD_ZERO
+ return _mm_set_epi32((int32_t) f[3], (int32_t) f[2], (int32_t) f[1],
+ (int32_t) f[0]);
+ }
#endif
}
-// Multiply the lower double-precision (64-bit) floating-point element in a and
-// b, store the result in the lower element of dst, and copy the upper element
-// from a to the upper element of dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=mm_mul_sd
-FORCE_INLINE __m128d _mm_mul_sd(__m128d a, __m128d b)
+// Convert packed single-precision (32-bit) floating-point elements in a to
+// packed double-precision (64-bit) floating-point elements, and store the
+// results in dst.
+//
+// FOR j := 0 to 1
+// i := 64*j
+// k := 32*j
+// dst[i+63:i] := Convert_FP32_To_FP64(a[k+31:k])
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtps_pd
+FORCE_INLINE __m128d _mm_cvtps_pd(__m128 a)
{
- return _mm_move_sd(a, _mm_mul_pd(a, b));
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(
+ vcvt_f64_f32(vget_low_f32(vreinterpretq_f32_m128(a))));
+#else
+ double a0 = (double) vgetq_lane_f32(vreinterpretq_f32_m128(a), 0);
+ double a1 = (double) vgetq_lane_f32(vreinterpretq_f32_m128(a), 1);
+ return _mm_set_pd(a1, a0);
+#endif
}
-// Multiply the lower single-precision (32-bit) floating-point element in a and
-// b, store the result in the lower element of dst, and copy the upper 3 packed
-// elements from a to the upper elements of dst.
+// Copy the lower double-precision (64-bit) floating-point element of a to dst.
//
-// dst[31:0] := a[31:0] * b[31:0]
-// dst[127:32] := a[127:32]
+// dst[63:0] := a[63:0]
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_mul_ss
-FORCE_INLINE __m128 _mm_mul_ss(__m128 a, __m128 b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsd_f64
+FORCE_INLINE double _mm_cvtsd_f64(__m128d a)
{
- return _mm_move_ss(a, _mm_mul_ps(a, b));
+#if defined(__aarch64__)
+ return (double) vgetq_lane_f64(vreinterpretq_f64_m128d(a), 0);
+#else
+ return ((double *) &a)[0];
+#endif
}
-// Multiply the low unsigned 32-bit integers from each packed 64-bit element in
-// a and b, and store the unsigned 64-bit results in dst.
+// Convert the lower double-precision (64-bit) floating-point element in a to a
+// 32-bit integer, and store the result in dst.
//
-// r0 := (a0 & 0xFFFFFFFF) * (b0 & 0xFFFFFFFF)
-// r1 := (a2 & 0xFFFFFFFF) * (b2 & 0xFFFFFFFF)
-FORCE_INLINE __m128i _mm_mul_epu32(__m128i a, __m128i b)
+// dst[31:0] := Convert_FP64_To_Int32(a[63:0])
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsd_si32
+FORCE_INLINE int32_t _mm_cvtsd_si32(__m128d a)
{
- // vmull_u32 upcasts instead of masking, so we downcast.
- uint32x2_t a_lo = vmovn_u64(vreinterpretq_u64_m128i(a));
- uint32x2_t b_lo = vmovn_u64(vreinterpretq_u64_m128i(b));
- return vreinterpretq_m128i_u64(vmull_u32(a_lo, b_lo));
+#if defined(__aarch64__)
+ return (int32_t) vgetq_lane_f64(vrndiq_f64(vreinterpretq_f64_m128d(a)), 0);
+#else
+ __m128d rnd = _mm_round_pd(a, _MM_FROUND_CUR_DIRECTION);
+ double ret = ((double *) &rnd)[0];
+ return (int32_t) ret;
+#endif
}
-// Multiply the low unsigned 32-bit integers from a and b, and store the
-// unsigned 64-bit result in dst.
+// Convert the lower double-precision (64-bit) floating-point element in a to a
+// 64-bit integer, and store the result in dst.
//
-// dst[63:0] := a[31:0] * b[31:0]
+// dst[63:0] := Convert_FP64_To_Int64(a[63:0])
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_mul_su32
-FORCE_INLINE __m64 _mm_mul_su32(__m64 a, __m64 b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsd_si64
+FORCE_INLINE int64_t _mm_cvtsd_si64(__m128d a)
{
- return vreinterpret_m64_u64(vget_low_u64(
- vmull_u32(vreinterpret_u32_m64(a), vreinterpret_u32_m64(b))));
+#if defined(__aarch64__)
+ return (int64_t) vgetq_lane_f64(vrndiq_f64(vreinterpretq_f64_m128d(a)), 0);
+#else
+ __m128d rnd = _mm_round_pd(a, _MM_FROUND_CUR_DIRECTION);
+ double ret = ((double *) &rnd)[0];
+ return (int64_t) ret;
+#endif
}
-// Multiply the low signed 32-bit integers from each packed 64-bit element in
-// a and b, and store the signed 64-bit results in dst.
+// Convert the lower double-precision (64-bit) floating-point element in a to a
+// 64-bit integer, and store the result in dst.
//
-// r0 := (int64_t)(int32_t)a0 * (int64_t)(int32_t)b0
-// r1 := (int64_t)(int32_t)a2 * (int64_t)(int32_t)b2
-FORCE_INLINE __m128i _mm_mul_epi32(__m128i a, __m128i b)
+// dst[63:0] := Convert_FP64_To_Int64(a[63:0])
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsd_si64x
+#define _mm_cvtsd_si64x _mm_cvtsd_si64
+
+// Convert the lower double-precision (64-bit) floating-point element in b to a
+// single-precision (32-bit) floating-point element, store the result in the
+// lower element of dst, and copy the upper 3 packed elements from a to the
+// upper elements of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsd_ss
+FORCE_INLINE __m128 _mm_cvtsd_ss(__m128 a, __m128d b)
{
- // vmull_s32 upcasts instead of masking, so we downcast.
- int32x2_t a_lo = vmovn_s64(vreinterpretq_s64_m128i(a));
- int32x2_t b_lo = vmovn_s64(vreinterpretq_s64_m128i(b));
- return vreinterpretq_m128i_s64(vmull_s32(a_lo, b_lo));
+#if defined(__aarch64__)
+ return vreinterpretq_m128_f32(vsetq_lane_f32(
+ vget_lane_f32(vcvt_f32_f64(vreinterpretq_f64_m128d(b)), 0),
+ vreinterpretq_f32_m128(a), 0));
+#else
+ return vreinterpretq_m128_f32(vsetq_lane_f32((float) ((double *) &b)[0],
+ vreinterpretq_f32_m128(a), 0));
+#endif
}
-// Multiplies the 8 signed 16-bit integers from a by the 8 signed 16-bit
-// integers from b.
+// Copy the lower 32-bit integer in a to dst.
//
-// r0 := (a0 * b0) + (a1 * b1)
-// r1 := (a2 * b2) + (a3 * b3)
-// r2 := (a4 * b4) + (a5 * b5)
-// r3 := (a6 * b6) + (a7 * b7)
-// https://msdn.microsoft.com/en-us/library/yht36sa6(v=vs.90).aspx
-FORCE_INLINE __m128i _mm_madd_epi16(__m128i a, __m128i b)
+// dst[31:0] := a[31:0]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi128_si32
+FORCE_INLINE int _mm_cvtsi128_si32(__m128i a)
{
- int32x4_t low = vmull_s16(vget_low_s16(vreinterpretq_s16_m128i(a)),
- vget_low_s16(vreinterpretq_s16_m128i(b)));
- int32x4_t high = vmull_s16(vget_high_s16(vreinterpretq_s16_m128i(a)),
- vget_high_s16(vreinterpretq_s16_m128i(b)));
-
- int32x2_t low_sum = vpadd_s32(vget_low_s32(low), vget_high_s32(low));
- int32x2_t high_sum = vpadd_s32(vget_low_s32(high), vget_high_s32(high));
-
- return vreinterpretq_m128i_s32(vcombine_s32(low_sum, high_sum));
+ return vgetq_lane_s32(vreinterpretq_s32_m128i(a), 0);
}
-// Multiply packed signed 16-bit integers in a and b, producing intermediate
-// signed 32-bit integers. Shift right by 15 bits while rounding up, and store
-// the packed 16-bit integers in dst.
+// Copy the lower 64-bit integer in a to dst.
//
-// r0 := Round(((int32_t)a0 * (int32_t)b0) >> 15)
-// r1 := Round(((int32_t)a1 * (int32_t)b1) >> 15)
-// r2 := Round(((int32_t)a2 * (int32_t)b2) >> 15)
-// ...
-// r7 := Round(((int32_t)a7 * (int32_t)b7) >> 15)
-FORCE_INLINE __m128i _mm_mulhrs_epi16(__m128i a, __m128i b)
+// dst[63:0] := a[63:0]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi128_si64
+FORCE_INLINE int64_t _mm_cvtsi128_si64(__m128i a)
{
- // Has issues due to saturation
- // return vreinterpretq_m128i_s16(vqrdmulhq_s16(a, b));
-
- // Multiply
- int32x4_t mul_lo = vmull_s16(vget_low_s16(vreinterpretq_s16_m128i(a)),
- vget_low_s16(vreinterpretq_s16_m128i(b)));
- int32x4_t mul_hi = vmull_s16(vget_high_s16(vreinterpretq_s16_m128i(a)),
- vget_high_s16(vreinterpretq_s16_m128i(b)));
-
- // Rounding narrowing shift right
- // narrow = (int16_t)((mul + 16384) >> 15);
- int16x4_t narrow_lo = vrshrn_n_s32(mul_lo, 15);
- int16x4_t narrow_hi = vrshrn_n_s32(mul_hi, 15);
-
- // Join together
- return vreinterpretq_m128i_s16(vcombine_s16(narrow_lo, narrow_hi));
+ return vgetq_lane_s64(vreinterpretq_s64_m128i(a), 0);
}
-// Vertically multiply each unsigned 8-bit integer from a with the corresponding
-// signed 8-bit integer from b, producing intermediate signed 16-bit integers.
-// Horizontally add adjacent pairs of intermediate signed 16-bit integers,
-// and pack the saturated results in dst.
-//
-// FOR j := 0 to 7
-// i := j*16
-// dst[i+15:i] := Saturate_To_Int16( a[i+15:i+8]*b[i+15:i+8] +
-// a[i+7:i]*b[i+7:i] )
-// ENDFOR
-FORCE_INLINE __m128i _mm_maddubs_epi16(__m128i _a, __m128i _b)
+// Copy the lower 64-bit integer in a to dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi128_si64x
+#define _mm_cvtsi128_si64x(a) _mm_cvtsi128_si64(a)
+
+// Convert the signed 32-bit integer b to a double-precision (64-bit)
+// floating-point element, store the result in the lower element of dst, and
+// copy the upper element from a to the upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi32_sd
+FORCE_INLINE __m128d _mm_cvtsi32_sd(__m128d a, int32_t b)
{
#if defined(__aarch64__)
- uint8x16_t a = vreinterpretq_u8_m128i(_a);
- int8x16_t b = vreinterpretq_s8_m128i(_b);
- int16x8_t tl = vmulq_s16(vreinterpretq_s16_u16(vmovl_u8(vget_low_u8(a))),
- vmovl_s8(vget_low_s8(b)));
- int16x8_t th = vmulq_s16(vreinterpretq_s16_u16(vmovl_u8(vget_high_u8(a))),
- vmovl_s8(vget_high_s8(b)));
- return vreinterpretq_m128i_s16(
- vqaddq_s16(vuzp1q_s16(tl, th), vuzp2q_s16(tl, th)));
+ return vreinterpretq_m128d_f64(
+ vsetq_lane_f64((double) b, vreinterpretq_f64_m128d(a), 0));
#else
- // This would be much simpler if x86 would choose to zero extend OR sign
- // extend, not both. This could probably be optimized better.
- uint16x8_t a = vreinterpretq_u16_m128i(_a);
- int16x8_t b = vreinterpretq_s16_m128i(_b);
-
- // Zero extend a
- int16x8_t a_odd = vreinterpretq_s16_u16(vshrq_n_u16(a, 8));
- int16x8_t a_even = vreinterpretq_s16_u16(vbicq_u16(a, vdupq_n_u16(0xff00)));
-
- // Sign extend by shifting left then shifting right.
- int16x8_t b_even = vshrq_n_s16(vshlq_n_s16(b, 8), 8);
- int16x8_t b_odd = vshrq_n_s16(b, 8);
-
- // multiply
- int16x8_t prod1 = vmulq_s16(a_even, b_even);
- int16x8_t prod2 = vmulq_s16(a_odd, b_odd);
-
- // saturated add
- return vreinterpretq_m128i_s16(vqaddq_s16(prod1, prod2));
+ double bf = (double) b;
+ return vreinterpretq_m128d_s64(
+ vsetq_lane_s64(*(int64_t *) &bf, vreinterpretq_s64_m128d(a), 0));
#endif
}
-// Computes the fused multiple add product of 32-bit floating point numbers.
+// Copy the lower 64-bit integer in a to dst.
//
-// Return Value
-// Multiplies A and B, and adds C to the temporary result before returning it.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_fmadd
-FORCE_INLINE __m128 _mm_fmadd_ps(__m128 a, __m128 b, __m128 c)
+// dst[63:0] := a[63:0]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi128_si64x
+#define _mm_cvtsi128_si64x(a) _mm_cvtsi128_si64(a)
+
+// Moves 32-bit integer a to the least significant 32 bits of an __m128 object,
+// zero extending the upper bits.
+//
+// r0 := a
+// r1 := 0x0
+// r2 := 0x0
+// r3 := 0x0
+//
+// https://msdn.microsoft.com/en-us/library/ct3539ha%28v=vs.90%29.aspx
+FORCE_INLINE __m128i _mm_cvtsi32_si128(int a)
+{
+ return vreinterpretq_m128i_s32(vsetq_lane_s32(a, vdupq_n_s32(0), 0));
+}
+
+// Convert the signed 64-bit integer b to a double-precision (64-bit)
+// floating-point element, store the result in the lower element of dst, and
+// copy the upper element from a to the upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi64_sd
+FORCE_INLINE __m128d _mm_cvtsi64_sd(__m128d a, int64_t b)
{
#if defined(__aarch64__)
- return vreinterpretq_m128_f32(vfmaq_f32(vreinterpretq_f32_m128(c),
- vreinterpretq_f32_m128(b),
- vreinterpretq_f32_m128(a)));
+ return vreinterpretq_m128d_f64(
+ vsetq_lane_f64((double) b, vreinterpretq_f64_m128d(a), 0));
#else
- return _mm_add_ps(_mm_mul_ps(a, b), c);
+ double bf = (double) b;
+ return vreinterpretq_m128d_s64(
+ vsetq_lane_s64(*(int64_t *) &bf, vreinterpretq_s64_m128d(a), 0));
#endif
}
-// Alternatively add and subtract packed single-precision (32-bit)
-// floating-point elements in a to/from packed elements in b, and store the
-// results in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=addsub_ps
-FORCE_INLINE __m128 _mm_addsub_ps(__m128 a, __m128 b)
+// Moves 64-bit integer a to the least significant 64 bits of an __m128 object,
+// zero extending the upper bits.
+//
+// r0 := a
+// r1 := 0x0
+FORCE_INLINE __m128i _mm_cvtsi64_si128(int64_t a)
{
- __m128 mask = {-1.0f, 1.0f, -1.0f, 1.0f};
- return _mm_fmadd_ps(b, mask, a);
+ return vreinterpretq_m128i_s64(vsetq_lane_s64(a, vdupq_n_s64(0), 0));
}
-// Horizontally add adjacent pairs of double-precision (64-bit) floating-point
-// elements in a and b, and pack the results in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_hadd_pd
-FORCE_INLINE __m128d _mm_hadd_pd(__m128d a, __m128d b)
+// Copy 64-bit integer a to the lower element of dst, and zero the upper
+// element.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi64x_si128
+#define _mm_cvtsi64x_si128(a) _mm_cvtsi64_si128(a)
+
+// Convert the signed 64-bit integer b to a double-precision (64-bit)
+// floating-point element, store the result in the lower element of dst, and
+// copy the upper element from a to the upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi64x_sd
+#define _mm_cvtsi64x_sd(a, b) _mm_cvtsi64_sd(a, b)
+
+// Convert the lower single-precision (32-bit) floating-point element in b to a
+// double-precision (64-bit) floating-point element, store the result in the
+// lower element of dst, and copy the upper element from a to the upper element
+// of dst.
+//
+// dst[63:0] := Convert_FP32_To_FP64(b[31:0])
+// dst[127:64] := a[127:64]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtss_sd
+FORCE_INLINE __m128d _mm_cvtss_sd(__m128d a, __m128 b)
{
+ double d = (double) vgetq_lane_f32(vreinterpretq_f32_m128(b), 0);
#if defined(__aarch64__)
return vreinterpretq_m128d_f64(
- vpaddq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)));
+ vsetq_lane_f64(d, vreinterpretq_f64_m128d(a), 0));
#else
- double *da = (double *) &a;
- double *db = (double *) &b;
- double c[] = {da[0] + da[1], db[0] + db[1]};
- return vreinterpretq_m128d_u64(vld1q_u64((uint64_t *) c));
+ return vreinterpretq_m128d_s64(
+ vsetq_lane_s64(*(int64_t *) &d, vreinterpretq_s64_m128d(a), 0));
#endif
}
-// Compute the absolute differences of packed unsigned 8-bit integers in a and
-// b, then horizontally sum each consecutive 8 differences to produce two
-// unsigned 16-bit integers, and pack these unsigned 16-bit integers in the low
-// 16 bits of 64-bit elements in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sad_epu8
-FORCE_INLINE __m128i _mm_sad_epu8(__m128i a, __m128i b)
+// Convert packed double-precision (64-bit) floating-point elements in a to
+// packed 32-bit integers with truncation, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvttpd_epi32
+FORCE_INLINE __m128i _mm_cvttpd_epi32(__m128d a)
{
- uint16x8_t t = vpaddlq_u8(vabdq_u8((uint8x16_t) a, (uint8x16_t) b));
- uint16_t r0 = t[0] + t[1] + t[2] + t[3];
- uint16_t r4 = t[4] + t[5] + t[6] + t[7];
- uint16x8_t r = vsetq_lane_u16(r0, vdupq_n_u16(0), 0);
- return (__m128i) vsetq_lane_u16(r4, r, 4);
+ double a0 = ((double *) &a)[0];
+ double a1 = ((double *) &a)[1];
+ return _mm_set_epi32(0, 0, (int32_t) a1, (int32_t) a0);
}
-// Compute the absolute differences of packed unsigned 8-bit integers in a and
-// b, then horizontally sum each consecutive 8 differences to produce four
-// unsigned 16-bit integers, and pack these unsigned 16-bit integers in the low
-// 16 bits of dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sad_pu8
-FORCE_INLINE __m64 _mm_sad_pu8(__m64 a, __m64 b)
+// Convert packed double-precision (64-bit) floating-point elements in a to
+// packed 32-bit integers with truncation, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvttpd_pi32
+FORCE_INLINE __m64 _mm_cvttpd_pi32(__m128d a)
{
- uint16x4_t t =
- vpaddl_u8(vabd_u8(vreinterpret_u8_m64(a), vreinterpret_u8_m64(b)));
- uint16_t r0 = t[0] + t[1] + t[2] + t[3];
- return vreinterpret_m64_u16(vset_lane_u16(r0, vdup_n_u16(0), 0));
+ double a0 = ((double *) &a)[0];
+ double a1 = ((double *) &a)[1];
+ int32_t ALIGN_STRUCT(16) data[2] = {(int32_t) a0, (int32_t) a1};
+ return vreinterpret_m64_s32(vld1_s32(data));
}
-// Compute the absolute differences of packed unsigned 8-bit integers in a and
-// b, then horizontally sum each consecutive 8 differences to produce four
-// unsigned 16-bit integers, and pack these unsigned 16-bit integers in the low
-// 16 bits of dst.
+// Converts the four single-precision, floating-point values of a to signed
+// 32-bit integer values using truncate.
+// https://msdn.microsoft.com/en-us/library/vstudio/1h005y6x(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_cvttps_epi32(__m128 a)
+{
+ return vreinterpretq_m128i_s32(vcvtq_s32_f32(vreinterpretq_f32_m128(a)));
+}
+
+// Convert the lower double-precision (64-bit) floating-point element in a to a
+// 32-bit integer with truncation, and store the result in dst.
//
-// FOR j := 0 to 7
-// i := j*8
-// tmp[i+7:i] := ABS(a[i+7:i] - b[i+7:i])
-// ENDFOR
-// dst[15:0] := tmp[7:0] + tmp[15:8] + tmp[23:16] + tmp[31:24] + tmp[39:32] +
-// tmp[47:40] + tmp[55:48] + tmp[63:56] dst[63:16] := 0
+// dst[63:0] := Convert_FP64_To_Int32_Truncate(a[63:0])
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_psadbw
-#define _m_psadbw(a, b) _mm_sad_pu8(a, b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvttsd_si32
+FORCE_INLINE int32_t _mm_cvttsd_si32(__m128d a)
+{
+ double ret = *((double *) &a);
+ return (int32_t) ret;
+}
-// Divides the four single-precision, floating-point values of a and b.
+// Convert the lower double-precision (64-bit) floating-point element in a to a
+// 64-bit integer with truncation, and store the result in dst.
//
-// r0 := a0 / b0
-// r1 := a1 / b1
-// r2 := a2 / b2
-// r3 := a3 / b3
+// dst[63:0] := Convert_FP64_To_Int64_Truncate(a[63:0])
//
-// https://msdn.microsoft.com/en-us/library/edaw8147(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_div_ps(__m128 a, __m128 b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvttsd_si64
+FORCE_INLINE int64_t _mm_cvttsd_si64(__m128d a)
{
-#if defined(__aarch64__) && !SSE2NEON_PRECISE_DIV
- return vreinterpretq_m128_f32(
- vdivq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
+#if defined(__aarch64__)
+ return vgetq_lane_s64(vcvtq_s64_f64(vreinterpretq_f64_m128d(a)), 0);
#else
- float32x4_t recip = vrecpeq_f32(vreinterpretq_f32_m128(b));
- recip = vmulq_f32(recip, vrecpsq_f32(recip, vreinterpretq_f32_m128(b)));
-#if SSE2NEON_PRECISE_DIV
- // Additional Netwon-Raphson iteration for accuracy
- recip = vmulq_f32(recip, vrecpsq_f32(recip, vreinterpretq_f32_m128(b)));
-#endif
- return vreinterpretq_m128_f32(vmulq_f32(vreinterpretq_f32_m128(a), recip));
+ double ret = *((double *) &a);
+ return (int64_t) ret;
#endif
}
-// Divides the scalar single-precision floating point value of a by b.
-// https://msdn.microsoft.com/en-us/library/4y73xa49(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_div_ss(__m128 a, __m128 b)
-{
- float32_t value =
- vgetq_lane_f32(vreinterpretq_f32_m128(_mm_div_ps(a, b)), 0);
- return vreinterpretq_m128_f32(
- vsetq_lane_f32(value, vreinterpretq_f32_m128(a), 0));
-}
+// Convert the lower double-precision (64-bit) floating-point element in a to a
+// 64-bit integer with truncation, and store the result in dst.
+//
+// dst[63:0] := Convert_FP64_To_Int64_Truncate(a[63:0])
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvttsd_si64x
+#define _mm_cvttsd_si64x(a) _mm_cvttsd_si64(a)
// Divide packed double-precision (64-bit) floating-point elements in a by
// packed elements in b, and store the results in dst.
@@ -3875,266 +4340,230 @@ FORCE_INLINE __m128d _mm_div_sd(__m128d a, __m128d b)
#endif
}
-// Compute the approximate reciprocal of packed single-precision (32-bit)
-// floating-point elements in a, and store the results in dst. The maximum
-// relative error for this approximation is less than 1.5*2^-12.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_rcp_ps
-FORCE_INLINE __m128 _mm_rcp_ps(__m128 in)
+// Extracts the selected signed or unsigned 16-bit integer from a and zero
+// extends.
+// https://msdn.microsoft.com/en-us/library/6dceta0c(v=vs.100).aspx
+// FORCE_INLINE int _mm_extract_epi16(__m128i a, __constrange(0,8) int imm)
+#define _mm_extract_epi16(a, imm) \
+ vgetq_lane_u16(vreinterpretq_u16_m128i(a), (imm))
+
+// Inserts the least significant 16 bits of b into the selected 16-bit integer
+// of a.
+// https://msdn.microsoft.com/en-us/library/kaze8hz1%28v=vs.100%29.aspx
+// FORCE_INLINE __m128i _mm_insert_epi16(__m128i a, int b,
+// __constrange(0,8) int imm)
+#define _mm_insert_epi16(a, b, imm) \
+ __extension__({ \
+ vreinterpretq_m128i_s16( \
+ vsetq_lane_s16((b), vreinterpretq_s16_m128i(a), (imm))); \
+ })
+
+// Loads two double-precision from 16-byte aligned memory, floating-point
+// values.
+//
+// dst[127:0] := MEM[mem_addr+127:mem_addr]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_load_pd
+FORCE_INLINE __m128d _mm_load_pd(const double *p)
{
- float32x4_t recip = vrecpeq_f32(vreinterpretq_f32_m128(in));
- recip = vmulq_f32(recip, vrecpsq_f32(recip, vreinterpretq_f32_m128(in)));
-#if SSE2NEON_PRECISE_DIV
- // Additional Netwon-Raphson iteration for accuracy
- recip = vmulq_f32(recip, vrecpsq_f32(recip, vreinterpretq_f32_m128(in)));
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(vld1q_f64(p));
+#else
+ const float *fp = (const float *) p;
+ float ALIGN_STRUCT(16) data[4] = {fp[0], fp[1], fp[2], fp[3]};
+ return vreinterpretq_m128d_f32(vld1q_f32(data));
#endif
- return vreinterpretq_m128_f32(recip);
}
-// Compute the approximate reciprocal of the lower single-precision (32-bit)
-// floating-point element in a, store the result in the lower element of dst,
-// and copy the upper 3 packed elements from a to the upper elements of dst. The
-// maximum relative error for this approximation is less than 1.5*2^-12.
+// Load a double-precision (64-bit) floating-point element from memory into both
+// elements of dst.
//
-// dst[31:0] := (1.0 / a[31:0])
-// dst[127:32] := a[127:32]
+// dst[63:0] := MEM[mem_addr+63:mem_addr]
+// dst[127:64] := MEM[mem_addr+63:mem_addr]
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_rcp_ss
-FORCE_INLINE __m128 _mm_rcp_ss(__m128 a)
-{
- return _mm_move_ss(a, _mm_rcp_ps(a));
-}
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_load_pd1
+#define _mm_load_pd1 _mm_load1_pd
-// Computes the approximations of square roots of the four single-precision,
-// floating-point values of a. First computes reciprocal square roots and then
-// reciprocals of the four values.
+// Load a double-precision (64-bit) floating-point element from memory into the
+// lower of dst, and zero the upper element. mem_addr does not need to be
+// aligned on any particular boundary.
//
-// r0 := sqrt(a0)
-// r1 := sqrt(a1)
-// r2 := sqrt(a2)
-// r3 := sqrt(a3)
+// dst[63:0] := MEM[mem_addr+63:mem_addr]
+// dst[127:64] := 0
//
-// https://msdn.microsoft.com/en-us/library/vstudio/8z67bwwk(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_sqrt_ps(__m128 in)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_load_sd
+FORCE_INLINE __m128d _mm_load_sd(const double *p)
{
-#if SSE2NEON_PRECISE_SQRT
- float32x4_t recip = vrsqrteq_f32(vreinterpretq_f32_m128(in));
-
- // Test for vrsqrteq_f32(0) -> positive infinity case.
- // Change to zero, so that s * 1/sqrt(s) result is zero too.
- const uint32x4_t pos_inf = vdupq_n_u32(0x7F800000);
- const uint32x4_t div_by_zero =
- vceqq_u32(pos_inf, vreinterpretq_u32_f32(recip));
- recip = vreinterpretq_f32_u32(
- vandq_u32(vmvnq_u32(div_by_zero), vreinterpretq_u32_f32(recip)));
-
- // Additional Netwon-Raphson iteration for accuracy
- recip = vmulq_f32(
- vrsqrtsq_f32(vmulq_f32(recip, recip), vreinterpretq_f32_m128(in)),
- recip);
- recip = vmulq_f32(
- vrsqrtsq_f32(vmulq_f32(recip, recip), vreinterpretq_f32_m128(in)),
- recip);
-
- // sqrt(s) = s * 1/sqrt(s)
- return vreinterpretq_m128_f32(vmulq_f32(vreinterpretq_f32_m128(in), recip));
-#elif defined(__aarch64__)
- return vreinterpretq_m128_f32(vsqrtq_f32(vreinterpretq_f32_m128(in)));
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(vsetq_lane_f64(*p, vdupq_n_f64(0), 0));
#else
- float32x4_t recipsq = vrsqrteq_f32(vreinterpretq_f32_m128(in));
- float32x4_t sq = vrecpeq_f32(recipsq);
- return vreinterpretq_m128_f32(sq);
+ const float *fp = (const float *) p;
+ float ALIGN_STRUCT(16) data[4] = {fp[0], fp[1], 0, 0};
+ return vreinterpretq_m128d_f32(vld1q_f32(data));
#endif
}
-// Computes the approximation of the square root of the scalar single-precision
-// floating point value of in.
-// https://msdn.microsoft.com/en-us/library/ahfsc22d(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_sqrt_ss(__m128 in)
+// Loads 128-bit value. :
+// https://msdn.microsoft.com/en-us/library/atzzad1h(v=vs.80).aspx
+FORCE_INLINE __m128i _mm_load_si128(const __m128i *p)
{
- float32_t value =
- vgetq_lane_f32(vreinterpretq_f32_m128(_mm_sqrt_ps(in)), 0);
- return vreinterpretq_m128_f32(
- vsetq_lane_f32(value, vreinterpretq_f32_m128(in), 0));
+ return vreinterpretq_m128i_s32(vld1q_s32((const int32_t *) p));
}
-// Computes the approximations of the reciprocal square roots of the four
-// single-precision floating point values of in.
-// https://msdn.microsoft.com/en-us/library/22hfsh53(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_rsqrt_ps(__m128 in)
+// Load a double-precision (64-bit) floating-point element from memory into both
+// elements of dst.
+//
+// dst[63:0] := MEM[mem_addr+63:mem_addr]
+// dst[127:64] := MEM[mem_addr+63:mem_addr]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_load1_pd
+FORCE_INLINE __m128d _mm_load1_pd(const double *p)
{
- float32x4_t out = vrsqrteq_f32(vreinterpretq_f32_m128(in));
-#if SSE2NEON_PRECISE_RSQRT
- // Additional Netwon-Raphson iteration for accuracy
- out = vmulq_f32(
- out, vrsqrtsq_f32(vmulq_f32(vreinterpretq_f32_m128(in), out), out));
- out = vmulq_f32(
- out, vrsqrtsq_f32(vmulq_f32(vreinterpretq_f32_m128(in), out), out));
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(vld1q_dup_f64(p));
+#else
+ return vreinterpretq_m128d_s64(vdupq_n_s64(*(const int64_t *) p));
#endif
- return vreinterpretq_m128_f32(out);
}
-// Compute the approximate reciprocal square root of the lower single-precision
-// (32-bit) floating-point element in a, store the result in the lower element
-// of dst, and copy the upper 3 packed elements from a to the upper elements of
-// dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_rsqrt_ss
-FORCE_INLINE __m128 _mm_rsqrt_ss(__m128 in)
+// Load a double-precision (64-bit) floating-point element from memory into the
+// upper element of dst, and copy the lower element from a to dst. mem_addr does
+// not need to be aligned on any particular boundary.
+//
+// dst[63:0] := a[63:0]
+// dst[127:64] := MEM[mem_addr+63:mem_addr]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadh_pd
+FORCE_INLINE __m128d _mm_loadh_pd(__m128d a, const double *p)
{
- return vsetq_lane_f32(vgetq_lane_f32(_mm_rsqrt_ps(in), 0), in, 0);
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(
+ vcombine_f64(vget_low_f64(vreinterpretq_f64_m128d(a)), vld1_f64(p)));
+#else
+ return vreinterpretq_m128d_f32(vcombine_f32(
+ vget_low_f32(vreinterpretq_f32_m128d(a)), vld1_f32((const float *) p)));
+#endif
}
-// Compare packed signed 16-bit integers in a and b, and store packed maximum
-// values in dst.
-//
-// FOR j := 0 to 3
-// i := j*16
-// dst[i+15:i] := MAX(a[i+15:i], b[i+15:i])
-// ENDFOR
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_pi16
-FORCE_INLINE __m64 _mm_max_pi16(__m64 a, __m64 b)
+// Load 64-bit integer from memory into the first element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadl_epi64
+FORCE_INLINE __m128i _mm_loadl_epi64(__m128i const *p)
{
- return vreinterpret_m64_s16(
- vmax_s16(vreinterpret_s16_m64(a), vreinterpret_s16_m64(b)));
+ /* Load the lower 64 bits of the value pointed to by p into the
+ * lower 64 bits of the result, zeroing the upper 64 bits of the result.
+ */
+ return vreinterpretq_m128i_s32(
+ vcombine_s32(vld1_s32((int32_t const *) p), vcreate_s32(0)));
}
-// Compare packed signed 16-bit integers in a and b, and store packed maximum
-// values in dst.
+// Load a double-precision (64-bit) floating-point element from memory into the
+// lower element of dst, and copy the upper element from a to dst. mem_addr does
+// not need to be aligned on any particular boundary.
//
-// FOR j := 0 to 3
-// i := j*16
-// dst[i+15:i] := MAX(a[i+15:i], b[i+15:i])
-// ENDFOR
+// dst[63:0] := MEM[mem_addr+63:mem_addr]
+// dst[127:64] := a[127:64]
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_pi16
-#define _m_pmaxsw(a, b) _mm_max_pi16(a, b)
-
-// Computes the maximums of the four single-precision, floating-point values of
-// a and b.
-// https://msdn.microsoft.com/en-us/library/vstudio/ff5d607a(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_max_ps(__m128 a, __m128 b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadl_pd
+FORCE_INLINE __m128d _mm_loadl_pd(__m128d a, const double *p)
{
-#if SSE2NEON_PRECISE_MINMAX
- float32x4_t _a = vreinterpretq_f32_m128(a);
- float32x4_t _b = vreinterpretq_f32_m128(b);
- return vbslq_f32(vcltq_f32(_b, _a), _a, _b);
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(
+ vcombine_f64(vld1_f64(p), vget_high_f64(vreinterpretq_f64_m128d(a))));
#else
- return vreinterpretq_m128_f32(
- vmaxq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
+ return vreinterpretq_m128d_f32(
+ vcombine_f32(vld1_f32((const float *) p),
+ vget_high_f32(vreinterpretq_f32_m128d(a))));
#endif
}
-// Compare packed unsigned 8-bit integers in a and b, and store packed maximum
-// values in dst.
+// Load 2 double-precision (64-bit) floating-point elements from memory into dst
+// in reverse order. mem_addr must be aligned on a 16-byte boundary or a
+// general-protection exception may be generated.
//
-// FOR j := 0 to 7
-// i := j*8
-// dst[i+7:i] := MAX(a[i+7:i], b[i+7:i])
-// ENDFOR
+// dst[63:0] := MEM[mem_addr+127:mem_addr+64]
+// dst[127:64] := MEM[mem_addr+63:mem_addr]
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_pu8
-FORCE_INLINE __m64 _mm_max_pu8(__m64 a, __m64 b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadr_pd
+FORCE_INLINE __m128d _mm_loadr_pd(const double *p)
{
- return vreinterpret_m64_u8(
- vmax_u8(vreinterpret_u8_m64(a), vreinterpret_u8_m64(b)));
+#if defined(__aarch64__)
+ float64x2_t v = vld1q_f64(p);
+ return vreinterpretq_m128d_f64(vextq_f64(v, v, 1));
+#else
+ int64x2_t v = vld1q_s64((const int64_t *) p);
+ return vreinterpretq_m128d_s64(vextq_s64(v, v, 1));
+#endif
}
-// Compare packed unsigned 8-bit integers in a and b, and store packed maximum
-// values in dst.
-//
-// FOR j := 0 to 7
-// i := j*8
-// dst[i+7:i] := MAX(a[i+7:i], b[i+7:i])
-// ENDFOR
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_pu8
-#define _m_pmaxub(a, b) _mm_max_pu8(a, b)
-
-// Compare packed signed 16-bit integers in a and b, and store packed minimum
-// values in dst.
-//
-// FOR j := 0 to 3
-// i := j*16
-// dst[i+15:i] := MIN(a[i+15:i], b[i+15:i])
-// ENDFOR
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_min_pi16
-FORCE_INLINE __m64 _mm_min_pi16(__m64 a, __m64 b)
+// Loads two double-precision from unaligned memory, floating-point values.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadu_pd
+FORCE_INLINE __m128d _mm_loadu_pd(const double *p)
{
- return vreinterpret_m64_s16(
- vmin_s16(vreinterpret_s16_m64(a), vreinterpret_s16_m64(b)));
+ return _mm_load_pd(p);
}
-// Compare packed signed 16-bit integers in a and b, and store packed minimum
-// values in dst.
-//
-// FOR j := 0 to 3
-// i := j*16
-// dst[i+15:i] := MIN(a[i+15:i], b[i+15:i])
-// ENDFOR
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_min_pi16
-#define _m_pminsw(a, b) _mm_min_pi16(a, b)
-
-// Computes the minima of the four single-precision, floating-point values of a
-// and b.
-// https://msdn.microsoft.com/en-us/library/vstudio/wh13kadz(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_min_ps(__m128 a, __m128 b)
+// Loads 128-bit value. :
+// https://msdn.microsoft.com/zh-cn/library/f4k12ae8(v=vs.90).aspx
+FORCE_INLINE __m128i _mm_loadu_si128(const __m128i *p)
{
-#if SSE2NEON_PRECISE_MINMAX
- float32x4_t _a = vreinterpretq_f32_m128(a);
- float32x4_t _b = vreinterpretq_f32_m128(b);
- return vbslq_f32(vcltq_f32(_a, _b), _a, _b);
-#else
- return vreinterpretq_m128_f32(
- vminq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
-#endif
+ return vreinterpretq_m128i_s32(vld1q_s32((const int32_t *) p));
}
-// Compare packed unsigned 8-bit integers in a and b, and store packed minimum
-// values in dst.
+// Load unaligned 32-bit integer from memory into the first element of dst.
//
-// FOR j := 0 to 7
-// i := j*8
-// dst[i+7:i] := MIN(a[i+7:i], b[i+7:i])
-// ENDFOR
+// dst[31:0] := MEM[mem_addr+31:mem_addr]
+// dst[MAX:32] := 0
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_min_pu8
-FORCE_INLINE __m64 _mm_min_pu8(__m64 a, __m64 b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadu_si32
+FORCE_INLINE __m128i _mm_loadu_si32(const void *p)
{
- return vreinterpret_m64_u8(
- vmin_u8(vreinterpret_u8_m64(a), vreinterpret_u8_m64(b)));
+ return vreinterpretq_m128i_s32(
+ vsetq_lane_s32(*(const int32_t *) p, vdupq_n_s32(0), 0));
}
-// Compare packed unsigned 8-bit integers in a and b, and store packed minimum
-// values in dst.
-//
-// FOR j := 0 to 7
-// i := j*8
-// dst[i+7:i] := MIN(a[i+7:i], b[i+7:i])
-// ENDFOR
+// Multiplies the 8 signed 16-bit integers from a by the 8 signed 16-bit
+// integers from b.
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_min_pu8
-#define _m_pminub(a, b) _mm_min_pu8(a, b)
+// r0 := (a0 * b0) + (a1 * b1)
+// r1 := (a2 * b2) + (a3 * b3)
+// r2 := (a4 * b4) + (a5 * b5)
+// r3 := (a6 * b6) + (a7 * b7)
+// https://msdn.microsoft.com/en-us/library/yht36sa6(v=vs.90).aspx
+FORCE_INLINE __m128i _mm_madd_epi16(__m128i a, __m128i b)
+{
+ int32x4_t low = vmull_s16(vget_low_s16(vreinterpretq_s16_m128i(a)),
+ vget_low_s16(vreinterpretq_s16_m128i(b)));
+ int32x4_t high = vmull_s16(vget_high_s16(vreinterpretq_s16_m128i(a)),
+ vget_high_s16(vreinterpretq_s16_m128i(b)));
-// Computes the maximum of the two lower scalar single-precision floating point
-// values of a and b.
-// https://msdn.microsoft.com/en-us/library/s6db5esz(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_max_ss(__m128 a, __m128 b)
+ int32x2_t low_sum = vpadd_s32(vget_low_s32(low), vget_high_s32(low));
+ int32x2_t high_sum = vpadd_s32(vget_low_s32(high), vget_high_s32(high));
+
+ return vreinterpretq_m128i_s32(vcombine_s32(low_sum, high_sum));
+}
+
+// Conditionally store 8-bit integer elements from a into memory using mask
+// (elements are not stored when the highest bit is not set in the corresponding
+// element) and a non-temporal memory hint. mem_addr does not need to be aligned
+// on any particular boundary.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_maskmoveu_si128
+FORCE_INLINE void _mm_maskmoveu_si128(__m128i a, __m128i mask, char *mem_addr)
{
- float32_t value = vgetq_lane_f32(_mm_max_ps(a, b), 0);
- return vreinterpretq_m128_f32(
- vsetq_lane_f32(value, vreinterpretq_f32_m128(a), 0));
+ int8x16_t shr_mask = vshrq_n_s8(vreinterpretq_s8_m128i(mask), 7);
+ __m128 b = _mm_load_ps((const float *) mem_addr);
+ int8x16_t masked =
+ vbslq_s8(vreinterpretq_u8_s8(shr_mask), vreinterpretq_s8_m128i(a),
+ vreinterpretq_s8_m128(b));
+ vst1q_s8((int8_t *) mem_addr, masked);
}
-// Computes the minimum of the two lower scalar single-precision floating point
-// values of a and b.
-// https://msdn.microsoft.com/en-us/library/0a9y7xaa(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_min_ss(__m128 a, __m128 b)
+// Computes the pairwise maxima of the 8 signed 16-bit integers from a and the 8
+// signed 16-bit integers from b.
+// https://msdn.microsoft.com/en-us/LIBRary/3x060h7c(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_max_epi16(__m128i a, __m128i b)
{
- float32_t value = vgetq_lane_f32(_mm_min_ps(a, b), 0);
- return vreinterpretq_m128_f32(
- vsetq_lane_f32(value, vreinterpretq_f32_m128(a), 0));
+ return vreinterpretq_m128i_s16(
+ vmaxq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
}
// Computes the pairwise maxima of the 16 unsigned 8-bit integers from a and the
@@ -4146,13 +4575,41 @@ FORCE_INLINE __m128i _mm_max_epu8(__m128i a, __m128i b)
vmaxq_u8(vreinterpretq_u8_m128i(a), vreinterpretq_u8_m128i(b)));
}
-// Computes the pairwise minima of the 16 unsigned 8-bit integers from a and the
-// 16 unsigned 8-bit integers from b.
-// https://msdn.microsoft.com/ko-kr/library/17k8cf58(v=vs.100).aspxx
-FORCE_INLINE __m128i _mm_min_epu8(__m128i a, __m128i b)
+// Compare packed double-precision (64-bit) floating-point elements in a and b,
+// and store packed maximum values in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_pd
+FORCE_INLINE __m128d _mm_max_pd(__m128d a, __m128d b)
{
- return vreinterpretq_m128i_u8(
- vminq_u8(vreinterpretq_u8_m128i(a), vreinterpretq_u8_m128i(b)));
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(
+ vmaxq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)));
+#else
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t a1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+ uint64_t b1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(b));
+ uint64_t d[2];
+ d[0] = (*(double *) &a0) > (*(double *) &b0) ? a0 : b0;
+ d[1] = (*(double *) &a1) > (*(double *) &b1) ? a1 : b1;
+
+ return vreinterpretq_m128d_u64(vld1q_u64(d));
+#endif
+}
+
+// Compare the lower double-precision (64-bit) floating-point elements in a and
+// b, store the maximum value in the lower element of dst, and copy the upper
+// element from a to the upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_sd
+FORCE_INLINE __m128d _mm_max_sd(__m128d a, __m128d b)
+{
+#if defined(__aarch64__)
+ return _mm_move_sd(a, _mm_max_pd(a, b));
+#else
+ double *da = (double *) &a;
+ double *db = (double *) &b;
+ double c[2] = {fmax(da[0], db[0]), da[1]};
+ return vld1q_f32((float32_t *) c);
+#endif
}
// Computes the pairwise minima of the 8 signed 16-bit integers from a and the 8
@@ -4164,110 +4621,246 @@ FORCE_INLINE __m128i _mm_min_epi16(__m128i a, __m128i b)
vminq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
}
-// Compare packed signed 8-bit integers in a and b, and store packed maximum
-// values in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_epi8
-FORCE_INLINE __m128i _mm_max_epi8(__m128i a, __m128i b)
+// Computes the pairwise minima of the 16 unsigned 8-bit integers from a and the
+// 16 unsigned 8-bit integers from b.
+// https://msdn.microsoft.com/ko-kr/library/17k8cf58(v=vs.100).aspxx
+FORCE_INLINE __m128i _mm_min_epu8(__m128i a, __m128i b)
{
- return vreinterpretq_m128i_s8(
- vmaxq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
+ return vreinterpretq_m128i_u8(
+ vminq_u8(vreinterpretq_u8_m128i(a), vreinterpretq_u8_m128i(b)));
}
-// Compare packed unsigned 16-bit integers in a and b, and store packed maximum
-// values in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_epu16
-FORCE_INLINE __m128i _mm_max_epu16(__m128i a, __m128i b)
+// Compare packed double-precision (64-bit) floating-point elements in a and b,
+// and store packed minimum values in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_min_pd
+FORCE_INLINE __m128d _mm_min_pd(__m128d a, __m128d b)
{
- return vreinterpretq_m128i_u16(
- vmaxq_u16(vreinterpretq_u16_m128i(a), vreinterpretq_u16_m128i(b)));
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(
+ vminq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)));
+#else
+ uint64_t a0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(a));
+ uint64_t a1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(a));
+ uint64_t b0 = (uint64_t) vget_low_u64(vreinterpretq_u64_m128d(b));
+ uint64_t b1 = (uint64_t) vget_high_u64(vreinterpretq_u64_m128d(b));
+ uint64_t d[2];
+ d[0] = (*(double *) &a0) < (*(double *) &b0) ? a0 : b0;
+ d[1] = (*(double *) &a1) < (*(double *) &b1) ? a1 : b1;
+ return vreinterpretq_m128d_u64(vld1q_u64(d));
+#endif
}
-// Compare packed signed 8-bit integers in a and b, and store packed minimum
-// values in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_min_epi8
-FORCE_INLINE __m128i _mm_min_epi8(__m128i a, __m128i b)
+// Compare the lower double-precision (64-bit) floating-point elements in a and
+// b, store the minimum value in the lower element of dst, and copy the upper
+// element from a to the upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_min_sd
+FORCE_INLINE __m128d _mm_min_sd(__m128d a, __m128d b)
{
- return vreinterpretq_m128i_s8(
- vminq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
+#if defined(__aarch64__)
+ return _mm_move_sd(a, _mm_min_pd(a, b));
+#else
+ double *da = (double *) &a;
+ double *db = (double *) &b;
+ double c[2] = {fmin(da[0], db[0]), da[1]};
+ return vld1q_f32((float32_t *) c);
+#endif
}
-// Compare packed unsigned 16-bit integers in a and b, and store packed minimum
-// values in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_min_epu16
-FORCE_INLINE __m128i _mm_min_epu16(__m128i a, __m128i b)
+// Copy the lower 64-bit integer in a to the lower element of dst, and zero the
+// upper element.
+//
+// dst[63:0] := a[63:0]
+// dst[127:64] := 0
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_move_epi64
+FORCE_INLINE __m128i _mm_move_epi64(__m128i a)
{
- return vreinterpretq_m128i_u16(
- vminq_u16(vreinterpretq_u16_m128i(a), vreinterpretq_u16_m128i(b)));
+ return vreinterpretq_m128i_s64(
+ vsetq_lane_s64(0, vreinterpretq_s64_m128i(a), 1));
}
-// Computes the pairwise maxima of the 8 signed 16-bit integers from a and the 8
-// signed 16-bit integers from b.
-// https://msdn.microsoft.com/en-us/LIBRary/3x060h7c(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_max_epi16(__m128i a, __m128i b)
+// Move the lower double-precision (64-bit) floating-point element from b to the
+// lower element of dst, and copy the upper element from a to the upper element
+// of dst.
+//
+// dst[63:0] := b[63:0]
+// dst[127:64] := a[127:64]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_move_sd
+FORCE_INLINE __m128d _mm_move_sd(__m128d a, __m128d b)
{
- return vreinterpretq_m128i_s16(
- vmaxq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
+ return vreinterpretq_m128d_f32(
+ vcombine_f32(vget_low_f32(vreinterpretq_f32_m128d(b)),
+ vget_high_f32(vreinterpretq_f32_m128d(a))));
}
-// epi versions of min/max
-// Computes the pariwise maximums of the four signed 32-bit integer values of a
-// and b.
+// NEON does not provide a version of this function.
+// Creates a 16-bit mask from the most significant bits of the 16 signed or
+// unsigned 8-bit integers in a and zero extends the upper bits.
+// https://msdn.microsoft.com/en-us/library/vstudio/s090c8fk(v=vs.100).aspx
+FORCE_INLINE int _mm_movemask_epi8(__m128i a)
+{
+ // Use increasingly wide shifts+adds to collect the sign bits
+ // together.
+ // Since the widening shifts would be rather confusing to follow in little
+ // endian, everything will be illustrated in big endian order instead. This
+ // has a different result - the bits would actually be reversed on a big
+ // endian machine.
+
+ // Starting input (only half the elements are shown):
+ // 89 ff 1d c0 00 10 99 33
+ uint8x16_t input = vreinterpretq_u8_m128i(a);
+
+ // Shift out everything but the sign bits with an unsigned shift right.
+ //
+ // Bytes of the vector::
+ // 89 ff 1d c0 00 10 99 33
+ // \ \ \ \ \ \ \ \ high_bits = (uint16x4_t)(input >> 7)
+ // | | | | | | | |
+ // 01 01 00 01 00 00 01 00
+ //
+ // Bits of first important lane(s):
+ // 10001001 (89)
+ // \______
+ // |
+ // 00000001 (01)
+ uint16x8_t high_bits = vreinterpretq_u16_u8(vshrq_n_u8(input, 7));
+
+ // Merge the even lanes together with a 16-bit unsigned shift right + add.
+ // 'xx' represents garbage data which will be ignored in the final result.
+ // In the important bytes, the add functions like a binary OR.
+ //
+ // 01 01 00 01 00 00 01 00
+ // \_ | \_ | \_ | \_ | paired16 = (uint32x4_t)(input + (input >> 7))
+ // \| \| \| \|
+ // xx 03 xx 01 xx 00 xx 02
+ //
+ // 00000001 00000001 (01 01)
+ // \_______ |
+ // \|
+ // xxxxxxxx xxxxxx11 (xx 03)
+ uint32x4_t paired16 =
+ vreinterpretq_u32_u16(vsraq_n_u16(high_bits, high_bits, 7));
+
+ // Repeat with a wider 32-bit shift + add.
+ // xx 03 xx 01 xx 00 xx 02
+ // \____ | \____ | paired32 = (uint64x1_t)(paired16 + (paired16 >>
+ // 14))
+ // \| \|
+ // xx xx xx 0d xx xx xx 02
+ //
+ // 00000011 00000001 (03 01)
+ // \\_____ ||
+ // '----.\||
+ // xxxxxxxx xxxx1101 (xx 0d)
+ uint64x2_t paired32 =
+ vreinterpretq_u64_u32(vsraq_n_u32(paired16, paired16, 14));
+
+ // Last, an even wider 64-bit shift + add to get our result in the low 8 bit
+ // lanes. xx xx xx 0d xx xx xx 02
+ // \_________ | paired64 = (uint8x8_t)(paired32 + (paired32 >>
+ // 28))
+ // \|
+ // xx xx xx xx xx xx xx d2
+ //
+ // 00001101 00000010 (0d 02)
+ // \ \___ | |
+ // '---. \| |
+ // xxxxxxxx 11010010 (xx d2)
+ uint8x16_t paired64 =
+ vreinterpretq_u8_u64(vsraq_n_u64(paired32, paired32, 28));
+
+ // Extract the low 8 bits from each 64-bit lane with 2 8-bit extracts.
+ // xx xx xx xx xx xx xx d2
+ // || return paired64[0]
+ // d2
+ // Note: Little endian would return the correct value 4b (01001011) instead.
+ return vgetq_lane_u8(paired64, 0) | ((int) vgetq_lane_u8(paired64, 8) << 8);
+}
+
+// Set each bit of mask dst based on the most significant bit of the
+// corresponding packed double-precision (64-bit) floating-point element in a.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_movemask_pd
+FORCE_INLINE int _mm_movemask_pd(__m128d a)
+{
+ uint64x2_t input = vreinterpretq_u64_m128d(a);
+ uint64x2_t high_bits = vshrq_n_u64(input, 63);
+ return vgetq_lane_u64(high_bits, 0) | (vgetq_lane_u64(high_bits, 1) << 1);
+}
+
+// Copy the lower 64-bit integer in a to dst.
//
-// A 128-bit parameter that can be defined with the following equations:
-// r0 := (a0 > b0) ? a0 : b0
-// r1 := (a1 > b1) ? a1 : b1
-// r2 := (a2 > b2) ? a2 : b2
-// r3 := (a3 > b3) ? a3 : b3
+// dst[63:0] := a[63:0]
//
-// https://msdn.microsoft.com/en-us/library/vstudio/bb514055(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_max_epi32(__m128i a, __m128i b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_movepi64_pi64
+FORCE_INLINE __m64 _mm_movepi64_pi64(__m128i a)
{
- return vreinterpretq_m128i_s32(
- vmaxq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
+ return vreinterpret_m64_s64(vget_low_s64(vreinterpretq_s64_m128i(a)));
}
-// Computes the pariwise minima of the four signed 32-bit integer values of a
-// and b.
+// Copy the 64-bit integer a to the lower element of dst, and zero the upper
+// element.
//
-// A 128-bit parameter that can be defined with the following equations:
-// r0 := (a0 < b0) ? a0 : b0
-// r1 := (a1 < b1) ? a1 : b1
-// r2 := (a2 < b2) ? a2 : b2
-// r3 := (a3 < b3) ? a3 : b3
+// dst[63:0] := a[63:0]
+// dst[127:64] := 0
//
-// https://msdn.microsoft.com/en-us/library/vstudio/bb531476(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_min_epi32(__m128i a, __m128i b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_movpi64_epi64
+FORCE_INLINE __m128i _mm_movpi64_epi64(__m64 a)
{
- return vreinterpretq_m128i_s32(
- vminq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
+ return vreinterpretq_m128i_s64(
+ vcombine_s64(vreinterpret_s64_m64(a), vdup_n_s64(0)));
}
-// Compare packed unsigned 32-bit integers in a and b, and store packed maximum
-// values in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_epu32
-FORCE_INLINE __m128i _mm_max_epu32(__m128i a, __m128i b)
+// Multiply the low unsigned 32-bit integers from each packed 64-bit element in
+// a and b, and store the unsigned 64-bit results in dst.
+//
+// r0 := (a0 & 0xFFFFFFFF) * (b0 & 0xFFFFFFFF)
+// r1 := (a2 & 0xFFFFFFFF) * (b2 & 0xFFFFFFFF)
+FORCE_INLINE __m128i _mm_mul_epu32(__m128i a, __m128i b)
{
- return vreinterpretq_m128i_u32(
- vmaxq_u32(vreinterpretq_u32_m128i(a), vreinterpretq_u32_m128i(b)));
+ // vmull_u32 upcasts instead of masking, so we downcast.
+ uint32x2_t a_lo = vmovn_u64(vreinterpretq_u64_m128i(a));
+ uint32x2_t b_lo = vmovn_u64(vreinterpretq_u64_m128i(b));
+ return vreinterpretq_m128i_u64(vmull_u32(a_lo, b_lo));
}
-// Compare packed unsigned 32-bit integers in a and b, and store packed minimum
-// values in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_epu32
-FORCE_INLINE __m128i _mm_min_epu32(__m128i a, __m128i b)
+// Multiply packed double-precision (64-bit) floating-point elements in a and b,
+// and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_mul_pd
+FORCE_INLINE __m128d _mm_mul_pd(__m128d a, __m128d b)
{
- return vreinterpretq_m128i_u32(
- vminq_u32(vreinterpretq_u32_m128i(a), vreinterpretq_u32_m128i(b)));
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(
+ vmulq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)));
+#else
+ double *da = (double *) &a;
+ double *db = (double *) &b;
+ double c[2];
+ c[0] = da[0] * db[0];
+ c[1] = da[1] * db[1];
+ return vld1q_f32((float32_t *) c);
+#endif
}
-// Multiply the packed unsigned 16-bit integers in a and b, producing
-// intermediate 32-bit integers, and store the high 16 bits of the intermediate
-// integers in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_mulhi_pu16
-FORCE_INLINE __m64 _mm_mulhi_pu16(__m64 a, __m64 b)
+// Multiply the lower double-precision (64-bit) floating-point element in a and
+// b, store the result in the lower element of dst, and copy the upper element
+// from a to the upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=mm_mul_sd
+FORCE_INLINE __m128d _mm_mul_sd(__m128d a, __m128d b)
{
- return vreinterpret_m64_u16(vshrn_n_u32(
- vmull_u16(vreinterpret_u16_m64(a), vreinterpret_u16_m64(b)), 16));
+ return _mm_move_sd(a, _mm_mul_pd(a, b));
+}
+
+// Multiply the low unsigned 32-bit integers from a and b, and store the
+// unsigned 64-bit result in dst.
+//
+// dst[63:0] := a[31:0] * b[31:0]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_mul_su32
+FORCE_INLINE __m64 _mm_mul_su32(__m64 a, __m64 b)
+{
+ return vreinterpret_m64_u64(vget_low_u64(
+ vmull_u32(vreinterpret_u32_m64(a), vreinterpret_u32_m64(b))));
}
// Multiplies the 8 signed 16-bit integers from a by the 8 signed 16-bit
@@ -4321,1341 +4914,2413 @@ FORCE_INLINE __m128i _mm_mulhi_epu16(__m128i a, __m128i b)
#endif
}
-// Computes pairwise add of each argument as single-precision, floating-point
-// values a and b.
-// https://msdn.microsoft.com/en-us/library/yd9wecaa.aspx
-FORCE_INLINE __m128 _mm_hadd_ps(__m128 a, __m128 b)
+// Multiplies the 8 signed or unsigned 16-bit integers from a by the 8 signed or
+// unsigned 16-bit integers from b.
+//
+// r0 := (a0 * b0)[15:0]
+// r1 := (a1 * b1)[15:0]
+// ...
+// r7 := (a7 * b7)[15:0]
+//
+// https://msdn.microsoft.com/en-us/library/vstudio/9ks1472s(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_mullo_epi16(__m128i a, __m128i b)
{
-#if defined(__aarch64__)
- return vreinterpretq_m128_f32(
- vpaddq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
-#else
- float32x2_t a10 = vget_low_f32(vreinterpretq_f32_m128(a));
- float32x2_t a32 = vget_high_f32(vreinterpretq_f32_m128(a));
- float32x2_t b10 = vget_low_f32(vreinterpretq_f32_m128(b));
- float32x2_t b32 = vget_high_f32(vreinterpretq_f32_m128(b));
- return vreinterpretq_m128_f32(
- vcombine_f32(vpadd_f32(a10, a32), vpadd_f32(b10, b32)));
-#endif
+ return vreinterpretq_m128i_s16(
+ vmulq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
}
-// Computes pairwise add of each argument as a 16-bit signed or unsigned integer
-// values a and b.
-FORCE_INLINE __m128i _mm_hadd_epi16(__m128i _a, __m128i _b)
+// Compute the bitwise OR of packed double-precision (64-bit) floating-point
+// elements in a and b, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=mm_or_pd
+FORCE_INLINE __m128d _mm_or_pd(__m128d a, __m128d b)
{
- int16x8_t a = vreinterpretq_s16_m128i(_a);
- int16x8_t b = vreinterpretq_s16_m128i(_b);
-#if defined(__aarch64__)
- return vreinterpretq_m128i_s16(vpaddq_s16(a, b));
-#else
- return vreinterpretq_m128i_s16(
- vcombine_s16(vpadd_s16(vget_low_s16(a), vget_high_s16(a)),
- vpadd_s16(vget_low_s16(b), vget_high_s16(b))));
-#endif
+ return vreinterpretq_m128d_s64(
+ vorrq_s64(vreinterpretq_s64_m128d(a), vreinterpretq_s64_m128d(b)));
}
-// Horizontally substract adjacent pairs of single-precision (32-bit)
-// floating-point elements in a and b, and pack the results in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_hsub_ps
-FORCE_INLINE __m128 _mm_hsub_ps(__m128 _a, __m128 _b)
+// Computes the bitwise OR of the 128-bit value in a and the 128-bit value in b.
+//
+// r := a | b
+//
+// https://msdn.microsoft.com/en-us/library/vstudio/ew8ty0db(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_or_si128(__m128i a, __m128i b)
{
-#if defined(__aarch64__)
- return vreinterpretq_m128_f32(vsubq_f32(
- vuzp1q_f32(vreinterpretq_f32_m128(_a), vreinterpretq_f32_m128(_b)),
- vuzp2q_f32(vreinterpretq_f32_m128(_a), vreinterpretq_f32_m128(_b))));
-#else
- float32x4x2_t c =
- vuzpq_f32(vreinterpretq_f32_m128(_a), vreinterpretq_f32_m128(_b));
- return vreinterpretq_m128_f32(vsubq_f32(c.val[0], c.val[1]));
-#endif
+ return vreinterpretq_m128i_s32(
+ vorrq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
}
-// Horizontally add adjacent pairs of 16-bit integers in a and b, and pack the
-// signed 16-bit results in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_hadd_pi16
-FORCE_INLINE __m64 _mm_hadd_pi16(__m64 a, __m64 b)
+// Packs the 16 signed 16-bit integers from a and b into 8-bit integers and
+// saturates.
+// https://msdn.microsoft.com/en-us/library/k4y4f7w5%28v=vs.90%29.aspx
+FORCE_INLINE __m128i _mm_packs_epi16(__m128i a, __m128i b)
{
- return vreinterpret_m64_s16(
- vpadd_s16(vreinterpret_s16_m64(a), vreinterpret_s16_m64(b)));
+ return vreinterpretq_m128i_s8(
+ vcombine_s8(vqmovn_s16(vreinterpretq_s16_m128i(a)),
+ vqmovn_s16(vreinterpretq_s16_m128i(b))));
}
-// Horizontally add adjacent pairs of 32-bit integers in a and b, and pack the
-// signed 32-bit results in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_hadd_pi32
-FORCE_INLINE __m64 _mm_hadd_pi32(__m64 a, __m64 b)
+// Packs the 8 signed 32-bit integers from a and b into signed 16-bit integers
+// and saturates.
+//
+// r0 := SignedSaturate(a0)
+// r1 := SignedSaturate(a1)
+// r2 := SignedSaturate(a2)
+// r3 := SignedSaturate(a3)
+// r4 := SignedSaturate(b0)
+// r5 := SignedSaturate(b1)
+// r6 := SignedSaturate(b2)
+// r7 := SignedSaturate(b3)
+//
+// https://msdn.microsoft.com/en-us/library/393t56f9%28v=vs.90%29.aspx
+FORCE_INLINE __m128i _mm_packs_epi32(__m128i a, __m128i b)
{
- return vreinterpret_m64_s32(
- vpadd_s32(vreinterpret_s32_m64(a), vreinterpret_s32_m64(b)));
+ return vreinterpretq_m128i_s16(
+ vcombine_s16(vqmovn_s32(vreinterpretq_s32_m128i(a)),
+ vqmovn_s32(vreinterpretq_s32_m128i(b))));
}
-// Computes pairwise difference of each argument as a 16-bit signed or unsigned
-// integer values a and b.
-FORCE_INLINE __m128i _mm_hsub_epi16(__m128i _a, __m128i _b)
+// Packs the 16 signed 16 - bit integers from a and b into 8 - bit unsigned
+// integers and saturates.
+//
+// r0 := UnsignedSaturate(a0)
+// r1 := UnsignedSaturate(a1)
+// ...
+// r7 := UnsignedSaturate(a7)
+// r8 := UnsignedSaturate(b0)
+// r9 := UnsignedSaturate(b1)
+// ...
+// r15 := UnsignedSaturate(b7)
+//
+// https://msdn.microsoft.com/en-us/library/07ad1wx4(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_packus_epi16(const __m128i a, const __m128i b)
{
- int32x4_t a = vreinterpretq_s32_m128i(_a);
- int32x4_t b = vreinterpretq_s32_m128i(_b);
- // Interleave using vshrn/vmovn
- // [a0|a2|a4|a6|b0|b2|b4|b6]
- // [a1|a3|a5|a7|b1|b3|b5|b7]
- int16x8_t ab0246 = vcombine_s16(vmovn_s32(a), vmovn_s32(b));
- int16x8_t ab1357 = vcombine_s16(vshrn_n_s32(a, 16), vshrn_n_s32(b, 16));
- // Subtract
- return vreinterpretq_m128i_s16(vsubq_s16(ab0246, ab1357));
+ return vreinterpretq_m128i_u8(
+ vcombine_u8(vqmovun_s16(vreinterpretq_s16_m128i(a)),
+ vqmovun_s16(vreinterpretq_s16_m128i(b))));
}
-// Computes saturated pairwise sub of each argument as a 16-bit signed
-// integer values a and b.
-FORCE_INLINE __m128i _mm_hadds_epi16(__m128i _a, __m128i _b)
+// Pause the processor. This is typically used in spin-wait loops and depending
+// on the x86 processor typical values are in the 40-100 cycle range. The
+// 'yield' instruction isn't a good fit beacuse it's effectively a nop on most
+// Arm cores. Experience with several databases has shown has shown an 'isb' is
+// a reasonable approximation.
+FORCE_INLINE void _mm_pause()
{
-#if defined(__aarch64__)
- int16x8_t a = vreinterpretq_s16_m128i(_a);
- int16x8_t b = vreinterpretq_s16_m128i(_b);
- return vreinterpretq_s64_s16(
- vqaddq_s16(vuzp1q_s16(a, b), vuzp2q_s16(a, b)));
-#else
- int32x4_t a = vreinterpretq_s32_m128i(_a);
- int32x4_t b = vreinterpretq_s32_m128i(_b);
- // Interleave using vshrn/vmovn
- // [a0|a2|a4|a6|b0|b2|b4|b6]
- // [a1|a3|a5|a7|b1|b3|b5|b7]
- int16x8_t ab0246 = vcombine_s16(vmovn_s32(a), vmovn_s32(b));
- int16x8_t ab1357 = vcombine_s16(vshrn_n_s32(a, 16), vshrn_n_s32(b, 16));
- // Saturated add
- return vreinterpretq_m128i_s16(vqaddq_s16(ab0246, ab1357));
-#endif
+ __asm__ __volatile__("isb\n");
}
-// Computes saturated pairwise difference of each argument as a 16-bit signed
-// integer values a and b.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_hsubs_epi16
-FORCE_INLINE __m128i _mm_hsubs_epi16(__m128i _a, __m128i _b)
+// Compute the absolute differences of packed unsigned 8-bit integers in a and
+// b, then horizontally sum each consecutive 8 differences to produce two
+// unsigned 16-bit integers, and pack these unsigned 16-bit integers in the low
+// 16 bits of 64-bit elements in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sad_epu8
+FORCE_INLINE __m128i _mm_sad_epu8(__m128i a, __m128i b)
{
-#if defined(__aarch64__)
- int16x8_t a = vreinterpretq_s16_m128i(_a);
- int16x8_t b = vreinterpretq_s16_m128i(_b);
- return vreinterpretq_s64_s16(
- vqsubq_s16(vuzp1q_s16(a, b), vuzp2q_s16(a, b)));
-#else
- int32x4_t a = vreinterpretq_s32_m128i(_a);
- int32x4_t b = vreinterpretq_s32_m128i(_b);
- // Interleave using vshrn/vmovn
- // [a0|a2|a4|a6|b0|b2|b4|b6]
- // [a1|a3|a5|a7|b1|b3|b5|b7]
- int16x8_t ab0246 = vcombine_s16(vmovn_s32(a), vmovn_s32(b));
- int16x8_t ab1357 = vcombine_s16(vshrn_n_s32(a, 16), vshrn_n_s32(b, 16));
- // Saturated subtract
- return vreinterpretq_m128i_s16(vqsubq_s16(ab0246, ab1357));
-#endif
+ uint16x8_t t = vpaddlq_u8(vabdq_u8((uint8x16_t) a, (uint8x16_t) b));
+ return vreinterpretq_m128i_u64(vpaddlq_u32(vpaddlq_u16(t)));
}
-// Computes pairwise add of each argument as a 32-bit signed or unsigned integer
-// values a and b.
-FORCE_INLINE __m128i _mm_hadd_epi32(__m128i _a, __m128i _b)
+// Sets the 8 signed 16-bit integer values.
+// https://msdn.microsoft.com/en-au/library/3e0fek84(v=vs.90).aspx
+FORCE_INLINE __m128i _mm_set_epi16(short i7,
+ short i6,
+ short i5,
+ short i4,
+ short i3,
+ short i2,
+ short i1,
+ short i0)
{
- int32x4_t a = vreinterpretq_s32_m128i(_a);
- int32x4_t b = vreinterpretq_s32_m128i(_b);
- return vreinterpretq_m128i_s32(
- vcombine_s32(vpadd_s32(vget_low_s32(a), vget_high_s32(a)),
- vpadd_s32(vget_low_s32(b), vget_high_s32(b))));
+ int16_t ALIGN_STRUCT(16) data[8] = {i0, i1, i2, i3, i4, i5, i6, i7};
+ return vreinterpretq_m128i_s16(vld1q_s16(data));
}
-// Computes pairwise difference of each argument as a 32-bit signed or unsigned
-// integer values a and b.
-FORCE_INLINE __m128i _mm_hsub_epi32(__m128i _a, __m128i _b)
+// Sets the 4 signed 32-bit integer values.
+// https://msdn.microsoft.com/en-us/library/vstudio/019beekt(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_set_epi32(int i3, int i2, int i1, int i0)
{
- int64x2_t a = vreinterpretq_s64_m128i(_a);
- int64x2_t b = vreinterpretq_s64_m128i(_b);
- // Interleave using vshrn/vmovn
- // [a0|a2|b0|b2]
- // [a1|a2|b1|b3]
- int32x4_t ab02 = vcombine_s32(vmovn_s64(a), vmovn_s64(b));
- int32x4_t ab13 = vcombine_s32(vshrn_n_s64(a, 32), vshrn_n_s64(b, 32));
- // Subtract
- return vreinterpretq_m128i_s32(vsubq_s32(ab02, ab13));
+ int32_t ALIGN_STRUCT(16) data[4] = {i0, i1, i2, i3};
+ return vreinterpretq_m128i_s32(vld1q_s32(data));
}
-// Kahan summation for accurate summation of floating-point numbers.
-// http://blog.zachbjornson.com/2019/08/11/fast-float-summation.html
-FORCE_INLINE void _sse2neon_kadd_f32(float *sum, float *c, float y)
+// Returns the __m128i structure with its two 64-bit integer values
+// initialized to the values of the two 64-bit integers passed in.
+// https://msdn.microsoft.com/en-us/library/dk2sdw0h(v=vs.120).aspx
+FORCE_INLINE __m128i _mm_set_epi64(__m64 i1, __m64 i2)
{
- y -= *c;
- float t = *sum + y;
- *c = (t - *sum) - y;
- *sum = t;
+ return _mm_set_epi64x((int64_t) i1, (int64_t) i2);
}
-// Conditionally multiply the packed single-precision (32-bit) floating-point
-// elements in a and b using the high 4 bits in imm8, sum the four products,
-// and conditionally store the sum in dst using the low 4 bits of imm.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_dp_ps
-FORCE_INLINE __m128 _mm_dp_ps(__m128 a, __m128 b, const int imm)
+// Returns the __m128i structure with its two 64-bit integer values
+// initialized to the values of the two 64-bit integers passed in.
+// https://msdn.microsoft.com/en-us/library/dk2sdw0h(v=vs.120).aspx
+FORCE_INLINE __m128i _mm_set_epi64x(int64_t i1, int64_t i2)
{
-#if defined(__aarch64__)
- /* shortcuts */
- if (imm == 0xFF) {
- return _mm_set1_ps(vaddvq_f32(_mm_mul_ps(a, b)));
- }
- if (imm == 0x7F) {
- float32x4_t m = _mm_mul_ps(a, b);
- m[3] = 0;
- return _mm_set1_ps(vaddvq_f32(m));
- }
-#endif
-
- float s = 0, c = 0;
- float32x4_t f32a = vreinterpretq_f32_m128(a);
- float32x4_t f32b = vreinterpretq_f32_m128(b);
-
- /* To improve the accuracy of floating-point summation, Kahan algorithm
- * is used for each operation.
- */
- if (imm & (1 << 4))
- _sse2neon_kadd_f32(&s, &c, f32a[0] * f32b[0]);
- if (imm & (1 << 5))
- _sse2neon_kadd_f32(&s, &c, f32a[1] * f32b[1]);
- if (imm & (1 << 6))
- _sse2neon_kadd_f32(&s, &c, f32a[2] * f32b[2]);
- if (imm & (1 << 7))
- _sse2neon_kadd_f32(&s, &c, f32a[3] * f32b[3]);
- s += c;
-
- float32x4_t res = {
- (imm & 0x1) ? s : 0,
- (imm & 0x2) ? s : 0,
- (imm & 0x4) ? s : 0,
- (imm & 0x8) ? s : 0,
- };
- return vreinterpretq_m128_f32(res);
+ return vreinterpretq_m128i_s64(
+ vcombine_s64(vcreate_s64(i2), vcreate_s64(i1)));
}
-/* Compare operations */
+// Sets the 16 signed 8-bit integer values.
+// https://msdn.microsoft.com/en-us/library/x0cx8zd3(v=vs.90).aspx
+FORCE_INLINE __m128i _mm_set_epi8(signed char b15,
+ signed char b14,
+ signed char b13,
+ signed char b12,
+ signed char b11,
+ signed char b10,
+ signed char b9,
+ signed char b8,
+ signed char b7,
+ signed char b6,
+ signed char b5,
+ signed char b4,
+ signed char b3,
+ signed char b2,
+ signed char b1,
+ signed char b0)
+{
+ int8_t ALIGN_STRUCT(16)
+ data[16] = {(int8_t) b0, (int8_t) b1, (int8_t) b2, (int8_t) b3,
+ (int8_t) b4, (int8_t) b5, (int8_t) b6, (int8_t) b7,
+ (int8_t) b8, (int8_t) b9, (int8_t) b10, (int8_t) b11,
+ (int8_t) b12, (int8_t) b13, (int8_t) b14, (int8_t) b15};
+ return (__m128i) vld1q_s8(data);
+}
-// Compares for less than
-// https://msdn.microsoft.com/en-us/library/vstudio/f330yhc8(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_cmplt_ps(__m128 a, __m128 b)
+// Set packed double-precision (64-bit) floating-point elements in dst with the
+// supplied values.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_set_pd
+FORCE_INLINE __m128d _mm_set_pd(double e1, double e0)
{
- return vreinterpretq_m128_u32(
- vcltq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
+ double ALIGN_STRUCT(16) data[2] = {e0, e1};
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(vld1q_f64((float64_t *) data));
+#else
+ return vreinterpretq_m128d_f32(vld1q_f32((float32_t *) data));
+#endif
}
-// Compares for less than
-// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/fy94wye7(v=vs.100)
-FORCE_INLINE __m128 _mm_cmplt_ss(__m128 a, __m128 b)
+// Broadcast double-precision (64-bit) floating-point value a to all elements of
+// dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_set_pd1
+#define _mm_set_pd1 _mm_set1_pd
+
+// Copy double-precision (64-bit) floating-point element a to the lower element
+// of dst, and zero the upper element.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_set_sd
+FORCE_INLINE __m128d _mm_set_sd(double a)
{
- return _mm_move_ss(a, _mm_cmplt_ps(a, b));
+ return _mm_set_pd(0, a);
}
-// Compares for greater than.
+// Sets the 8 signed 16-bit integer values to w.
//
-// r0 := (a0 > b0) ? 0xffffffff : 0x0
-// r1 := (a1 > b1) ? 0xffffffff : 0x0
-// r2 := (a2 > b2) ? 0xffffffff : 0x0
-// r3 := (a3 > b3) ? 0xffffffff : 0x0
+// r0 := w
+// r1 := w
+// ...
+// r7 := w
//
-// https://msdn.microsoft.com/en-us/library/vstudio/11dy102s(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_cmpgt_ps(__m128 a, __m128 b)
+// https://msdn.microsoft.com/en-us/library/k0ya3x0e(v=vs.90).aspx
+FORCE_INLINE __m128i _mm_set1_epi16(short w)
{
- return vreinterpretq_m128_u32(
- vcgtq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
+ return vreinterpretq_m128i_s16(vdupq_n_s16(w));
}
-// Compares for greater than.
-// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/1xyyyy9e(v=vs.100)
-FORCE_INLINE __m128 _mm_cmpgt_ss(__m128 a, __m128 b)
+// Sets the 4 signed 32-bit integer values to i.
+//
+// r0 := i
+// r1 := i
+// r2 := i
+// r3 := I
+//
+// https://msdn.microsoft.com/en-us/library/vstudio/h4xscxat(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_set1_epi32(int _i)
{
- return _mm_move_ss(a, _mm_cmpgt_ps(a, b));
+ return vreinterpretq_m128i_s32(vdupq_n_s32(_i));
}
-// Compares for greater than or equal.
-// https://msdn.microsoft.com/en-us/library/vstudio/fs813y2t(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_cmpge_ps(__m128 a, __m128 b)
+// Sets the 2 signed 64-bit integer values to i.
+// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/whtfzhzk(v=vs.100)
+FORCE_INLINE __m128i _mm_set1_epi64(__m64 _i)
{
- return vreinterpretq_m128_u32(
- vcgeq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
+ return vreinterpretq_m128i_s64(vdupq_n_s64((int64_t) _i));
}
-// Compares for greater than or equal.
-// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/kesh3ddc(v=vs.100)
-FORCE_INLINE __m128 _mm_cmpge_ss(__m128 a, __m128 b)
+// Sets the 2 signed 64-bit integer values to i.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_set1_epi64x
+FORCE_INLINE __m128i _mm_set1_epi64x(int64_t _i)
{
- return _mm_move_ss(a, _mm_cmpge_ps(a, b));
+ return vreinterpretq_m128i_s64(vdupq_n_s64(_i));
}
-// Compares for less than or equal.
+// Sets the 16 signed 8-bit integer values to b.
//
-// r0 := (a0 <= b0) ? 0xffffffff : 0x0
-// r1 := (a1 <= b1) ? 0xffffffff : 0x0
-// r2 := (a2 <= b2) ? 0xffffffff : 0x0
-// r3 := (a3 <= b3) ? 0xffffffff : 0x0
+// r0 := b
+// r1 := b
+// ...
+// r15 := b
//
-// https://msdn.microsoft.com/en-us/library/vstudio/1s75w83z(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_cmple_ps(__m128 a, __m128 b)
+// https://msdn.microsoft.com/en-us/library/6e14xhyf(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_set1_epi8(signed char w)
{
- return vreinterpretq_m128_u32(
- vcleq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
+ return vreinterpretq_m128i_s8(vdupq_n_s8(w));
}
-// Compares for less than or equal.
-// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/a7x0hbhw(v=vs.100)
-FORCE_INLINE __m128 _mm_cmple_ss(__m128 a, __m128 b)
+// Broadcast double-precision (64-bit) floating-point value a to all elements of
+// dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_set1_pd
+FORCE_INLINE __m128d _mm_set1_pd(double d)
{
- return _mm_move_ss(a, _mm_cmple_ps(a, b));
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(vdupq_n_f64(d));
+#else
+ return vreinterpretq_m128d_s64(vdupq_n_s64(*(int64_t *) &d));
+#endif
}
-// Compares for equality.
-// https://msdn.microsoft.com/en-us/library/vstudio/36aectz5(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_cmpeq_ps(__m128 a, __m128 b)
+// Sets the 8 signed 16-bit integer values in reverse order.
+//
+// Return Value
+// r0 := w0
+// r1 := w1
+// ...
+// r7 := w7
+FORCE_INLINE __m128i _mm_setr_epi16(short w0,
+ short w1,
+ short w2,
+ short w3,
+ short w4,
+ short w5,
+ short w6,
+ short w7)
{
- return vreinterpretq_m128_u32(
- vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
+ int16_t ALIGN_STRUCT(16) data[8] = {w0, w1, w2, w3, w4, w5, w6, w7};
+ return vreinterpretq_m128i_s16(vld1q_s16((int16_t *) data));
}
-// Compares for equality.
-// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/k423z28e(v=vs.100)
-FORCE_INLINE __m128 _mm_cmpeq_ss(__m128 a, __m128 b)
+// Sets the 4 signed 32-bit integer values in reverse order
+// https://technet.microsoft.com/en-us/library/security/27yb3ee5(v=vs.90).aspx
+FORCE_INLINE __m128i _mm_setr_epi32(int i3, int i2, int i1, int i0)
{
- return _mm_move_ss(a, _mm_cmpeq_ps(a, b));
+ int32_t ALIGN_STRUCT(16) data[4] = {i3, i2, i1, i0};
+ return vreinterpretq_m128i_s32(vld1q_s32(data));
}
-// Compares for inequality.
-// https://msdn.microsoft.com/en-us/library/sf44thbx(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_cmpneq_ps(__m128 a, __m128 b)
+// Set packed 64-bit integers in dst with the supplied values in reverse order.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_setr_epi64
+FORCE_INLINE __m128i _mm_setr_epi64(__m64 e1, __m64 e0)
{
- return vreinterpretq_m128_u32(vmvnq_u32(
- vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))));
+ return vreinterpretq_m128i_s64(vcombine_s64(e1, e0));
}
-// Compares for inequality.
-// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/ekya8fh4(v=vs.100)
-FORCE_INLINE __m128 _mm_cmpneq_ss(__m128 a, __m128 b)
+// Sets the 16 signed 8-bit integer values in reverse order.
+// https://msdn.microsoft.com/en-us/library/2khb9c7k(v=vs.90).aspx
+FORCE_INLINE __m128i _mm_setr_epi8(signed char b0,
+ signed char b1,
+ signed char b2,
+ signed char b3,
+ signed char b4,
+ signed char b5,
+ signed char b6,
+ signed char b7,
+ signed char b8,
+ signed char b9,
+ signed char b10,
+ signed char b11,
+ signed char b12,
+ signed char b13,
+ signed char b14,
+ signed char b15)
{
- return _mm_move_ss(a, _mm_cmpneq_ps(a, b));
+ int8_t ALIGN_STRUCT(16)
+ data[16] = {(int8_t) b0, (int8_t) b1, (int8_t) b2, (int8_t) b3,
+ (int8_t) b4, (int8_t) b5, (int8_t) b6, (int8_t) b7,
+ (int8_t) b8, (int8_t) b9, (int8_t) b10, (int8_t) b11,
+ (int8_t) b12, (int8_t) b13, (int8_t) b14, (int8_t) b15};
+ return (__m128i) vld1q_s8(data);
}
-// Compares for not greater than or equal.
-// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/wsexys62(v=vs.100)
-FORCE_INLINE __m128 _mm_cmpnge_ps(__m128 a, __m128 b)
+// Set packed double-precision (64-bit) floating-point elements in dst with the
+// supplied values in reverse order.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_setr_pd
+FORCE_INLINE __m128d _mm_setr_pd(double e1, double e0)
{
- return _mm_cmplt_ps(a, b);
+ return _mm_set_pd(e0, e1);
}
-// Compares for not greater than or equal.
-// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/fk2y80s8(v=vs.100)
-FORCE_INLINE __m128 _mm_cmpnge_ss(__m128 a, __m128 b)
+// Return vector of type __m128d with all elements set to zero.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_setzero_pd
+FORCE_INLINE __m128d _mm_setzero_pd(void)
{
- return _mm_cmplt_ss(a, b);
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(vdupq_n_f64(0));
+#else
+ return vreinterpretq_m128d_f32(vdupq_n_f32(0));
+#endif
}
-// Compares for not greater than.
-// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/d0xh7w0s(v=vs.100)
-FORCE_INLINE __m128 _mm_cmpngt_ps(__m128 a, __m128 b)
+// Sets the 128-bit value to zero
+// https://msdn.microsoft.com/en-us/library/vstudio/ys7dw0kh(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_setzero_si128(void)
{
- return _mm_cmple_ps(a, b);
+ return vreinterpretq_m128i_s32(vdupq_n_s32(0));
}
-// Compares for not greater than.
-// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/z7x9ydwh(v=vs.100)
-FORCE_INLINE __m128 _mm_cmpngt_ss(__m128 a, __m128 b)
+// Shuffles the 4 signed or unsigned 32-bit integers in a as specified by imm.
+// https://msdn.microsoft.com/en-us/library/56f67xbk%28v=vs.90%29.aspx
+// FORCE_INLINE __m128i _mm_shuffle_epi32(__m128i a,
+// __constrange(0,255) int imm)
+#if __has_builtin(__builtin_shufflevector)
+#define _mm_shuffle_epi32(a, imm) \
+ __extension__({ \
+ int32x4_t _input = vreinterpretq_s32_m128i(a); \
+ int32x4_t _shuf = __builtin_shufflevector( \
+ _input, _input, (imm) & (0x3), ((imm) >> 2) & 0x3, \
+ ((imm) >> 4) & 0x3, ((imm) >> 6) & 0x3); \
+ vreinterpretq_m128i_s32(_shuf); \
+ })
+#else // generic
+#define _mm_shuffle_epi32(a, imm) \
+ __extension__({ \
+ __m128i ret; \
+ switch (imm) { \
+ case _MM_SHUFFLE(1, 0, 3, 2): \
+ ret = _mm_shuffle_epi_1032((a)); \
+ break; \
+ case _MM_SHUFFLE(2, 3, 0, 1): \
+ ret = _mm_shuffle_epi_2301((a)); \
+ break; \
+ case _MM_SHUFFLE(0, 3, 2, 1): \
+ ret = _mm_shuffle_epi_0321((a)); \
+ break; \
+ case _MM_SHUFFLE(2, 1, 0, 3): \
+ ret = _mm_shuffle_epi_2103((a)); \
+ break; \
+ case _MM_SHUFFLE(1, 0, 1, 0): \
+ ret = _mm_shuffle_epi_1010((a)); \
+ break; \
+ case _MM_SHUFFLE(1, 0, 0, 1): \
+ ret = _mm_shuffle_epi_1001((a)); \
+ break; \
+ case _MM_SHUFFLE(0, 1, 0, 1): \
+ ret = _mm_shuffle_epi_0101((a)); \
+ break; \
+ case _MM_SHUFFLE(2, 2, 1, 1): \
+ ret = _mm_shuffle_epi_2211((a)); \
+ break; \
+ case _MM_SHUFFLE(0, 1, 2, 2): \
+ ret = _mm_shuffle_epi_0122((a)); \
+ break; \
+ case _MM_SHUFFLE(3, 3, 3, 2): \
+ ret = _mm_shuffle_epi_3332((a)); \
+ break; \
+ case _MM_SHUFFLE(0, 0, 0, 0): \
+ ret = _mm_shuffle_epi32_splat((a), 0); \
+ break; \
+ case _MM_SHUFFLE(1, 1, 1, 1): \
+ ret = _mm_shuffle_epi32_splat((a), 1); \
+ break; \
+ case _MM_SHUFFLE(2, 2, 2, 2): \
+ ret = _mm_shuffle_epi32_splat((a), 2); \
+ break; \
+ case _MM_SHUFFLE(3, 3, 3, 3): \
+ ret = _mm_shuffle_epi32_splat((a), 3); \
+ break; \
+ default: \
+ ret = _mm_shuffle_epi32_default((a), (imm)); \
+ break; \
+ } \
+ ret; \
+ })
+#endif
+
+// Shuffle double-precision (64-bit) floating-point elements using the control
+// in imm8, and store the results in dst.
+//
+// dst[63:0] := (imm8[0] == 0) ? a[63:0] : a[127:64]
+// dst[127:64] := (imm8[1] == 0) ? b[63:0] : b[127:64]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_shuffle_pd
+#if __has_builtin(__builtin_shufflevector)
+#define _mm_shuffle_pd(a, b, imm8) \
+ vreinterpretq_m128d_s64(__builtin_shufflevector( \
+ vreinterpretq_s64_m128d(a), vreinterpretq_s64_m128d(b), imm8 & 0x1, \
+ ((imm8 & 0x2) >> 1) + 2))
+#else
+#define _mm_shuffle_pd(a, b, imm8) \
+ _mm_castsi128_pd(_mm_set_epi64x( \
+ vgetq_lane_s64(vreinterpretq_s64_m128d(b), (imm8 & 0x2) >> 1), \
+ vgetq_lane_s64(vreinterpretq_s64_m128d(a), imm8 & 0x1)))
+#endif
+
+// FORCE_INLINE __m128i _mm_shufflehi_epi16(__m128i a,
+// __constrange(0,255) int imm)
+#if __has_builtin(__builtin_shufflevector)
+#define _mm_shufflehi_epi16(a, imm) \
+ __extension__({ \
+ int16x8_t _input = vreinterpretq_s16_m128i(a); \
+ int16x8_t _shuf = __builtin_shufflevector( \
+ _input, _input, 0, 1, 2, 3, ((imm) & (0x3)) + 4, \
+ (((imm) >> 2) & 0x3) + 4, (((imm) >> 4) & 0x3) + 4, \
+ (((imm) >> 6) & 0x3) + 4); \
+ vreinterpretq_m128i_s16(_shuf); \
+ })
+#else // generic
+#define _mm_shufflehi_epi16(a, imm) _mm_shufflehi_epi16_function((a), (imm))
+#endif
+
+// FORCE_INLINE __m128i _mm_shufflelo_epi16(__m128i a,
+// __constrange(0,255) int imm)
+#if __has_builtin(__builtin_shufflevector)
+#define _mm_shufflelo_epi16(a, imm) \
+ __extension__({ \
+ int16x8_t _input = vreinterpretq_s16_m128i(a); \
+ int16x8_t _shuf = __builtin_shufflevector( \
+ _input, _input, ((imm) & (0x3)), (((imm) >> 2) & 0x3), \
+ (((imm) >> 4) & 0x3), (((imm) >> 6) & 0x3), 4, 5, 6, 7); \
+ vreinterpretq_m128i_s16(_shuf); \
+ })
+#else // generic
+#define _mm_shufflelo_epi16(a, imm) _mm_shufflelo_epi16_function((a), (imm))
+#endif
+
+// Shift packed 16-bit integers in a left by count while shifting in zeros, and
+// store the results in dst.
+//
+// FOR j := 0 to 7
+// i := j*16
+// IF count[63:0] > 15
+// dst[i+15:i] := 0
+// ELSE
+// dst[i+15:i] := ZeroExtend16(a[i+15:i] << count[63:0])
+// FI
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sll_epi16
+FORCE_INLINE __m128i _mm_sll_epi16(__m128i a, __m128i count)
{
- return _mm_cmple_ss(a, b);
+ uint64_t c = vreinterpretq_nth_u64_m128i(count, 0);
+ if (_sse2neon_unlikely(c & ~15))
+ return _mm_setzero_si128();
+
+ int16x8_t vc = vdupq_n_s16((int16_t) c);
+ return vreinterpretq_m128i_s16(vshlq_s16(vreinterpretq_s16_m128i(a), vc));
}
-// Compares for not less than or equal.
-// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/6a330kxw(v=vs.100)
-FORCE_INLINE __m128 _mm_cmpnle_ps(__m128 a, __m128 b)
+// Shift packed 32-bit integers in a left by count while shifting in zeros, and
+// store the results in dst.
+//
+// FOR j := 0 to 3
+// i := j*32
+// IF count[63:0] > 31
+// dst[i+31:i] := 0
+// ELSE
+// dst[i+31:i] := ZeroExtend32(a[i+31:i] << count[63:0])
+// FI
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sll_epi32
+FORCE_INLINE __m128i _mm_sll_epi32(__m128i a, __m128i count)
{
- return _mm_cmpgt_ps(a, b);
+ uint64_t c = vreinterpretq_nth_u64_m128i(count, 0);
+ if (_sse2neon_unlikely(c & ~31))
+ return _mm_setzero_si128();
+
+ int32x4_t vc = vdupq_n_s32((int32_t) c);
+ return vreinterpretq_m128i_s32(vshlq_s32(vreinterpretq_s32_m128i(a), vc));
}
-// Compares for not less than or equal.
-// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/z7x9ydwh(v=vs.100)
-FORCE_INLINE __m128 _mm_cmpnle_ss(__m128 a, __m128 b)
+// Shift packed 64-bit integers in a left by count while shifting in zeros, and
+// store the results in dst.
+//
+// FOR j := 0 to 1
+// i := j*64
+// IF count[63:0] > 63
+// dst[i+63:i] := 0
+// ELSE
+// dst[i+63:i] := ZeroExtend64(a[i+63:i] << count[63:0])
+// FI
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sll_epi64
+FORCE_INLINE __m128i _mm_sll_epi64(__m128i a, __m128i count)
{
- return _mm_cmpgt_ss(a, b);
+ uint64_t c = vreinterpretq_nth_u64_m128i(count, 0);
+ if (_sse2neon_unlikely(c & ~63))
+ return _mm_setzero_si128();
+
+ int64x2_t vc = vdupq_n_s64((int64_t) c);
+ return vreinterpretq_m128i_s64(vshlq_s64(vreinterpretq_s64_m128i(a), vc));
}
-// Compares for not less than.
-// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/4686bbdw(v=vs.100)
-FORCE_INLINE __m128 _mm_cmpnlt_ps(__m128 a, __m128 b)
+// Shift packed 16-bit integers in a left by imm8 while shifting in zeros, and
+// store the results in dst.
+//
+// FOR j := 0 to 7
+// i := j*16
+// IF imm8[7:0] > 15
+// dst[i+15:i] := 0
+// ELSE
+// dst[i+15:i] := ZeroExtend16(a[i+15:i] << imm8[7:0])
+// FI
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_slli_epi16
+FORCE_INLINE __m128i _mm_slli_epi16(__m128i a, int imm)
{
- return _mm_cmpge_ps(a, b);
+ if (_sse2neon_unlikely(imm & ~15))
+ return _mm_setzero_si128();
+ return vreinterpretq_m128i_s16(
+ vshlq_s16(vreinterpretq_s16_m128i(a), vdupq_n_s16(imm)));
}
-// Compares for not less than.
-// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/56b9z2wf(v=vs.100)
-FORCE_INLINE __m128 _mm_cmpnlt_ss(__m128 a, __m128 b)
+// Shift packed 32-bit integers in a left by imm8 while shifting in zeros, and
+// store the results in dst.
+//
+// FOR j := 0 to 3
+// i := j*32
+// IF imm8[7:0] > 31
+// dst[i+31:i] := 0
+// ELSE
+// dst[i+31:i] := ZeroExtend32(a[i+31:i] << imm8[7:0])
+// FI
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_slli_epi32
+FORCE_INLINE __m128i _mm_slli_epi32(__m128i a, int imm)
{
- return _mm_cmpge_ss(a, b);
+ if (_sse2neon_unlikely(imm & ~31))
+ return _mm_setzero_si128();
+ return vreinterpretq_m128i_s32(
+ vshlq_s32(vreinterpretq_s32_m128i(a), vdupq_n_s32(imm)));
}
-// Compares the 16 signed or unsigned 8-bit integers in a and the 16 signed or
-// unsigned 8-bit integers in b for equality.
-// https://msdn.microsoft.com/en-us/library/windows/desktop/bz5xk21a(v=vs.90).aspx
-FORCE_INLINE __m128i _mm_cmpeq_epi8(__m128i a, __m128i b)
+// Shift packed 64-bit integers in a left by imm8 while shifting in zeros, and
+// store the results in dst.
+//
+// FOR j := 0 to 1
+// i := j*64
+// IF imm8[7:0] > 63
+// dst[i+63:i] := 0
+// ELSE
+// dst[i+63:i] := ZeroExtend64(a[i+63:i] << imm8[7:0])
+// FI
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_slli_epi64
+FORCE_INLINE __m128i _mm_slli_epi64(__m128i a, int imm)
+{
+ if (_sse2neon_unlikely(imm & ~63))
+ return _mm_setzero_si128();
+ return vreinterpretq_m128i_s64(
+ vshlq_s64(vreinterpretq_s64_m128i(a), vdupq_n_s64(imm)));
+}
+
+// Shift a left by imm8 bytes while shifting in zeros, and store the results in
+// dst.
+//
+// tmp := imm8[7:0]
+// IF tmp > 15
+// tmp := 16
+// FI
+// dst[127:0] := a[127:0] << (tmp*8)
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_slli_si128
+FORCE_INLINE __m128i _mm_slli_si128(__m128i a, int imm)
{
+ if (_sse2neon_unlikely(imm & ~15))
+ return _mm_setzero_si128();
+ uint8x16_t tmp[2] = {vdupq_n_u8(0), vreinterpretq_u8_m128i(a)};
return vreinterpretq_m128i_u8(
- vceqq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
+ vld1q_u8(((uint8_t const *) tmp) + (16 - imm)));
}
-// Compare packed double-precision (64-bit) floating-point elements in a and b
-// for equality, and store the results in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpeq_pd
-FORCE_INLINE __m128d _mm_cmpeq_pd(__m128d a, __m128d b)
+// Compute the square root of packed double-precision (64-bit) floating-point
+// elements in a, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sqrt_pd
+FORCE_INLINE __m128d _mm_sqrt_pd(__m128d a)
{
#if defined(__aarch64__)
- return vreinterpretq_m128d_u64(
- vceqq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)));
+ return vreinterpretq_m128d_f64(vsqrtq_f64(vreinterpretq_f64_m128d(a)));
#else
- // (a == b) -> (a_lo == b_lo) && (a_hi == b_hi)
- uint32x4_t cmp =
- vceqq_u32(vreinterpretq_u32_m128d(a), vreinterpretq_u32_m128d(b));
- uint32x4_t swapped = vrev64q_u32(cmp);
- return vreinterpretq_m128d_u32(vandq_u32(cmp, swapped));
+ double a0 = sqrt(((double *) &a)[0]);
+ double a1 = sqrt(((double *) &a)[1]);
+ return _mm_set_pd(a1, a0);
#endif
}
-// Compares the 8 signed or unsigned 16-bit integers in a and the 8 signed or
-// unsigned 16-bit integers in b for equality.
-// https://msdn.microsoft.com/en-us/library/2ay060te(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_cmpeq_epi16(__m128i a, __m128i b)
+// Compute the square root of the lower double-precision (64-bit) floating-point
+// element in b, store the result in the lower element of dst, and copy the
+// upper element from a to the upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sqrt_sd
+FORCE_INLINE __m128d _mm_sqrt_sd(__m128d a, __m128d b)
{
- return vreinterpretq_m128i_u16(
- vceqq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
+#if defined(__aarch64__)
+ return _mm_move_sd(a, _mm_sqrt_pd(b));
+#else
+ return _mm_set_pd(((double *) &a)[1], sqrt(((double *) &b)[0]));
+#endif
}
-// Compare packed 32-bit integers in a and b for equality, and store the results
-// in dst
-FORCE_INLINE __m128i _mm_cmpeq_epi32(__m128i a, __m128i b)
+// Shift packed 16-bit integers in a right by count while shifting in sign bits,
+// and store the results in dst.
+//
+// FOR j := 0 to 7
+// i := j*16
+// IF count[63:0] > 15
+// dst[i+15:i] := (a[i+15] ? 0xFFFF : 0x0)
+// ELSE
+// dst[i+15:i] := SignExtend16(a[i+15:i] >> count[63:0])
+// FI
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sra_epi16
+FORCE_INLINE __m128i _mm_sra_epi16(__m128i a, __m128i count)
{
- return vreinterpretq_m128i_u32(
- vceqq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
+ int64_t c = (int64_t) vget_low_s64((int64x2_t) count);
+ if (_sse2neon_unlikely(c & ~15))
+ return _mm_cmplt_epi16(a, _mm_setzero_si128());
+ return vreinterpretq_m128i_s16(vshlq_s16((int16x8_t) a, vdupq_n_s16(-c)));
}
-// Compare packed 64-bit integers in a and b for equality, and store the results
-// in dst
-FORCE_INLINE __m128i _mm_cmpeq_epi64(__m128i a, __m128i b)
+// Shift packed 32-bit integers in a right by count while shifting in sign bits,
+// and store the results in dst.
+//
+// FOR j := 0 to 3
+// i := j*32
+// IF count[63:0] > 31
+// dst[i+31:i] := (a[i+31] ? 0xFFFFFFFF : 0x0)
+// ELSE
+// dst[i+31:i] := SignExtend32(a[i+31:i] >> count[63:0])
+// FI
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sra_epi32
+FORCE_INLINE __m128i _mm_sra_epi32(__m128i a, __m128i count)
{
-#if defined(__aarch64__)
- return vreinterpretq_m128i_u64(
- vceqq_u64(vreinterpretq_u64_m128i(a), vreinterpretq_u64_m128i(b)));
-#else
- // ARMv7 lacks vceqq_u64
- // (a == b) -> (a_lo == b_lo) && (a_hi == b_hi)
- uint32x4_t cmp =
- vceqq_u32(vreinterpretq_u32_m128i(a), vreinterpretq_u32_m128i(b));
- uint32x4_t swapped = vrev64q_u32(cmp);
- return vreinterpretq_m128i_u32(vandq_u32(cmp, swapped));
-#endif
+ int64_t c = (int64_t) vget_low_s64((int64x2_t) count);
+ if (_sse2neon_unlikely(c & ~31))
+ return _mm_cmplt_epi32(a, _mm_setzero_si128());
+ return vreinterpretq_m128i_s32(vshlq_s32((int32x4_t) a, vdupq_n_s32(-c)));
}
-// Compares the 16 signed 8-bit integers in a and the 16 signed 8-bit integers
-// in b for lesser than.
-// https://msdn.microsoft.com/en-us/library/windows/desktop/9s46csht(v=vs.90).aspx
-FORCE_INLINE __m128i _mm_cmplt_epi8(__m128i a, __m128i b)
+// Shift packed 16-bit integers in a right by imm8 while shifting in sign
+// bits, and store the results in dst.
+//
+// FOR j := 0 to 7
+// i := j*16
+// IF imm8[7:0] > 15
+// dst[i+15:i] := (a[i+15] ? 0xFFFF : 0x0)
+// ELSE
+// dst[i+15:i] := SignExtend16(a[i+15:i] >> imm8[7:0])
+// FI
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_srai_epi16
+FORCE_INLINE __m128i _mm_srai_epi16(__m128i a, int imm)
{
- return vreinterpretq_m128i_u8(
- vcltq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
+ const int count = (imm & ~15) ? 15 : imm;
+ return (__m128i) vshlq_s16((int16x8_t) a, vdupq_n_s16(-count));
}
-// Compares the 16 signed 8-bit integers in a and the 16 signed 8-bit integers
-// in b for greater than.
+// Shift packed 32-bit integers in a right by imm8 while shifting in sign bits,
+// and store the results in dst.
//
-// r0 := (a0 > b0) ? 0xff : 0x0
-// r1 := (a1 > b1) ? 0xff : 0x0
-// ...
-// r15 := (a15 > b15) ? 0xff : 0x0
+// FOR j := 0 to 3
+// i := j*32
+// IF imm8[7:0] > 31
+// dst[i+31:i] := (a[i+31] ? 0xFFFFFFFF : 0x0)
+// ELSE
+// dst[i+31:i] := SignExtend32(a[i+31:i] >> imm8[7:0])
+// FI
+// ENDFOR
//
-// https://msdn.microsoft.com/zh-tw/library/wf45zt2b(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_cmpgt_epi8(__m128i a, __m128i b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_srai_epi32
+// FORCE_INLINE __m128i _mm_srai_epi32(__m128i a, __constrange(0,255) int imm)
+#define _mm_srai_epi32(a, imm) \
+ __extension__({ \
+ __m128i ret; \
+ if (_sse2neon_unlikely((imm) == 0)) { \
+ ret = a; \
+ } else if (_sse2neon_likely(0 < (imm) && (imm) < 32)) { \
+ ret = vreinterpretq_m128i_s32( \
+ vshlq_s32(vreinterpretq_s32_m128i(a), vdupq_n_s32(-imm))); \
+ } else { \
+ ret = vreinterpretq_m128i_s32( \
+ vshrq_n_s32(vreinterpretq_s32_m128i(a), 31)); \
+ } \
+ ret; \
+ })
+
+// Shift packed 16-bit integers in a right by count while shifting in zeros, and
+// store the results in dst.
+//
+// FOR j := 0 to 7
+// i := j*16
+// IF count[63:0] > 15
+// dst[i+15:i] := 0
+// ELSE
+// dst[i+15:i] := ZeroExtend16(a[i+15:i] >> count[63:0])
+// FI
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_srl_epi16
+FORCE_INLINE __m128i _mm_srl_epi16(__m128i a, __m128i count)
{
- return vreinterpretq_m128i_u8(
- vcgtq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
+ uint64_t c = vreinterpretq_nth_u64_m128i(count, 0);
+ if (_sse2neon_unlikely(c & ~15))
+ return _mm_setzero_si128();
+
+ int16x8_t vc = vdupq_n_s16(-(int16_t) c);
+ return vreinterpretq_m128i_u16(vshlq_u16(vreinterpretq_u16_m128i(a), vc));
}
-// Compares the 8 signed 16-bit integers in a and the 8 signed 16-bit integers
-// in b for less than.
+// Shift packed 32-bit integers in a right by count while shifting in zeros, and
+// store the results in dst.
//
-// r0 := (a0 < b0) ? 0xffff : 0x0
-// r1 := (a1 < b1) ? 0xffff : 0x0
-// ...
-// r7 := (a7 < b7) ? 0xffff : 0x0
+// FOR j := 0 to 3
+// i := j*32
+// IF count[63:0] > 31
+// dst[i+31:i] := 0
+// ELSE
+// dst[i+31:i] := ZeroExtend32(a[i+31:i] >> count[63:0])
+// FI
+// ENDFOR
//
-// https://technet.microsoft.com/en-us/library/t863edb2(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_cmplt_epi16(__m128i a, __m128i b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_srl_epi32
+FORCE_INLINE __m128i _mm_srl_epi32(__m128i a, __m128i count)
{
- return vreinterpretq_m128i_u16(
- vcltq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
+ uint64_t c = vreinterpretq_nth_u64_m128i(count, 0);
+ if (_sse2neon_unlikely(c & ~31))
+ return _mm_setzero_si128();
+
+ int32x4_t vc = vdupq_n_s32(-(int32_t) c);
+ return vreinterpretq_m128i_u32(vshlq_u32(vreinterpretq_u32_m128i(a), vc));
}
-// Compares the 8 signed 16-bit integers in a and the 8 signed 16-bit integers
-// in b for greater than.
+// Shift packed 64-bit integers in a right by count while shifting in zeros, and
+// store the results in dst.
//
-// r0 := (a0 > b0) ? 0xffff : 0x0
-// r1 := (a1 > b1) ? 0xffff : 0x0
-// ...
-// r7 := (a7 > b7) ? 0xffff : 0x0
+// FOR j := 0 to 1
+// i := j*64
+// IF count[63:0] > 63
+// dst[i+63:i] := 0
+// ELSE
+// dst[i+63:i] := ZeroExtend64(a[i+63:i] >> count[63:0])
+// FI
+// ENDFOR
//
-// https://technet.microsoft.com/en-us/library/xd43yfsa(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_cmpgt_epi16(__m128i a, __m128i b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_srl_epi64
+FORCE_INLINE __m128i _mm_srl_epi64(__m128i a, __m128i count)
{
- return vreinterpretq_m128i_u16(
- vcgtq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
+ uint64_t c = vreinterpretq_nth_u64_m128i(count, 0);
+ if (_sse2neon_unlikely(c & ~63))
+ return _mm_setzero_si128();
+
+ int64x2_t vc = vdupq_n_s64(-(int64_t) c);
+ return vreinterpretq_m128i_u64(vshlq_u64(vreinterpretq_u64_m128i(a), vc));
}
+// Shift packed 16-bit integers in a right by imm8 while shifting in zeros, and
+// store the results in dst.
+//
+// FOR j := 0 to 7
+// i := j*16
+// IF imm8[7:0] > 15
+// dst[i+15:i] := 0
+// ELSE
+// dst[i+15:i] := ZeroExtend16(a[i+15:i] >> imm8[7:0])
+// FI
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_srli_epi16
+#define _mm_srli_epi16(a, imm) \
+ __extension__({ \
+ __m128i ret; \
+ if (_sse2neon_unlikely(imm & ~15)) { \
+ ret = _mm_setzero_si128(); \
+ } else { \
+ ret = vreinterpretq_m128i_u16( \
+ vshlq_u16(vreinterpretq_u16_m128i(a), vdupq_n_s16(-imm))); \
+ } \
+ ret; \
+ })
+
+// Shift packed 32-bit integers in a right by imm8 while shifting in zeros, and
+// store the results in dst.
+//
+// FOR j := 0 to 3
+// i := j*32
+// IF imm8[7:0] > 31
+// dst[i+31:i] := 0
+// ELSE
+// dst[i+31:i] := ZeroExtend32(a[i+31:i] >> imm8[7:0])
+// FI
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_srli_epi32
+// FORCE_INLINE __m128i _mm_srli_epi32(__m128i a, __constrange(0,255) int imm)
+#define _mm_srli_epi32(a, imm) \
+ __extension__({ \
+ __m128i ret; \
+ if (_sse2neon_unlikely(imm & ~31)) { \
+ ret = _mm_setzero_si128(); \
+ } else { \
+ ret = vreinterpretq_m128i_u32( \
+ vshlq_u32(vreinterpretq_u32_m128i(a), vdupq_n_s32(-imm))); \
+ } \
+ ret; \
+ })
-// Compares the 4 signed 32-bit integers in a and the 4 signed 32-bit integers
-// in b for less than.
-// https://msdn.microsoft.com/en-us/library/vstudio/4ak0bf5d(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_cmplt_epi32(__m128i a, __m128i b)
+// Shift packed 64-bit integers in a right by imm8 while shifting in zeros, and
+// store the results in dst.
+//
+// FOR j := 0 to 1
+// i := j*64
+// IF imm8[7:0] > 63
+// dst[i+63:i] := 0
+// ELSE
+// dst[i+63:i] := ZeroExtend64(a[i+63:i] >> imm8[7:0])
+// FI
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_srli_epi64
+#define _mm_srli_epi64(a, imm) \
+ __extension__({ \
+ __m128i ret; \
+ if (_sse2neon_unlikely(imm & ~63)) { \
+ ret = _mm_setzero_si128(); \
+ } else { \
+ ret = vreinterpretq_m128i_u64( \
+ vshlq_u64(vreinterpretq_u64_m128i(a), vdupq_n_s64(-imm))); \
+ } \
+ ret; \
+ })
+
+// Shift a right by imm8 bytes while shifting in zeros, and store the results in
+// dst.
+//
+// tmp := imm8[7:0]
+// IF tmp > 15
+// tmp := 16
+// FI
+// dst[127:0] := a[127:0] >> (tmp*8)
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_srli_si128
+FORCE_INLINE __m128i _mm_srli_si128(__m128i a, int imm)
{
- return vreinterpretq_m128i_u32(
- vcltq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
+ if (_sse2neon_unlikely(imm & ~15))
+ return _mm_setzero_si128();
+ uint8x16_t tmp[2] = {vreinterpretq_u8_m128i(a), vdupq_n_u8(0)};
+ return vreinterpretq_m128i_u8(vld1q_u8(((uint8_t const *) tmp) + imm));
}
-// Compares the 4 signed 32-bit integers in a and the 4 signed 32-bit integers
-// in b for greater than.
-// https://msdn.microsoft.com/en-us/library/vstudio/1s9f2z0y(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_cmpgt_epi32(__m128i a, __m128i b)
+// Store 128-bits (composed of 2 packed double-precision (64-bit) floating-point
+// elements) from a into memory. mem_addr must be aligned on a 16-byte boundary
+// or a general-protection exception may be generated.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_store_pd
+FORCE_INLINE void _mm_store_pd(double *mem_addr, __m128d a)
{
- return vreinterpretq_m128i_u32(
- vcgtq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
+#if defined(__aarch64__)
+ vst1q_f64((float64_t *) mem_addr, vreinterpretq_f64_m128d(a));
+#else
+ vst1q_f32((float32_t *) mem_addr, vreinterpretq_f32_m128d(a));
+#endif
}
-// Compares the 2 signed 64-bit integers in a and the 2 signed 64-bit integers
-// in b for greater than.
-FORCE_INLINE __m128i _mm_cmpgt_epi64(__m128i a, __m128i b)
+// Store the lower double-precision (64-bit) floating-point element from a into
+// 2 contiguous elements in memory. mem_addr must be aligned on a 16-byte
+// boundary or a general-protection exception may be generated.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_store_pd1
+FORCE_INLINE void _mm_store_pd1(double *mem_addr, __m128d a)
{
#if defined(__aarch64__)
- return vreinterpretq_m128i_u64(
- vcgtq_s64(vreinterpretq_s64_m128i(a), vreinterpretq_s64_m128i(b)));
+ float64x1_t a_low = vget_low_f64(vreinterpretq_f64_m128d(a));
+ vst1q_f64((float64_t *) mem_addr,
+ vreinterpretq_f64_m128d(vcombine_f64(a_low, a_low)));
#else
- // ARMv7 lacks vcgtq_s64.
- // This is based off of Clang's SSE2 polyfill:
- // (a > b) -> ((a_hi > b_hi) || (a_lo > b_lo && a_hi == b_hi))
-
- // Mask the sign bit out since we need a signed AND an unsigned comparison
- // and it is ugly to try and split them.
- int32x4_t mask = vreinterpretq_s32_s64(vdupq_n_s64(0x80000000ull));
- int32x4_t a_mask = veorq_s32(vreinterpretq_s32_m128i(a), mask);
- int32x4_t b_mask = veorq_s32(vreinterpretq_s32_m128i(b), mask);
- // Check if a > b
- int64x2_t greater = vreinterpretq_s64_u32(vcgtq_s32(a_mask, b_mask));
- // Copy upper mask to lower mask
- // a_hi > b_hi
- int64x2_t gt_hi = vshrq_n_s64(greater, 63);
- // Copy lower mask to upper mask
- // a_lo > b_lo
- int64x2_t gt_lo = vsliq_n_s64(greater, greater, 32);
- // Compare for equality
- int64x2_t equal = vreinterpretq_s64_u32(vceqq_s32(a_mask, b_mask));
- // Copy upper mask to lower mask
- // a_hi == b_hi
- int64x2_t eq_hi = vshrq_n_s64(equal, 63);
- // a_hi > b_hi || (a_lo > b_lo && a_hi == b_hi)
- int64x2_t ret = vorrq_s64(gt_hi, vandq_s64(gt_lo, eq_hi));
- return vreinterpretq_m128i_s64(ret);
+ float32x2_t a_low = vget_low_f32(vreinterpretq_f32_m128d(a));
+ vst1q_f32((float32_t *) mem_addr,
+ vreinterpretq_f32_m128d(vcombine_f32(a_low, a_low)));
#endif
}
-// Compares the four 32-bit floats in a and b to check if any values are NaN.
-// Ordered compare between each value returns true for "orderable" and false for
-// "not orderable" (NaN).
-// https://msdn.microsoft.com/en-us/library/vstudio/0h9w00fx(v=vs.100).aspx see
-// also:
-// http://stackoverflow.com/questions/8627331/what-does-ordered-unordered-comparison-mean
-// http://stackoverflow.com/questions/29349621/neon-isnanval-intrinsics
-FORCE_INLINE __m128 _mm_cmpord_ps(__m128 a, __m128 b)
+// Store the lower double-precision (64-bit) floating-point element from a into
+// memory. mem_addr does not need to be aligned on any particular boundary.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=mm_store_sd
+FORCE_INLINE void _mm_store_sd(double *mem_addr, __m128d a)
{
- // Note: NEON does not have ordered compare builtin
- // Need to compare a eq a and b eq b to check for NaN
- // Do AND of results to get final
- uint32x4_t ceqaa =
- vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a));
- uint32x4_t ceqbb =
- vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b));
- return vreinterpretq_m128_u32(vandq_u32(ceqaa, ceqbb));
+#if defined(__aarch64__)
+ vst1_f64((float64_t *) mem_addr, vget_low_f64(vreinterpretq_f64_m128d(a)));
+#else
+ vst1_u64((uint64_t *) mem_addr, vget_low_u64(vreinterpretq_u64_m128d(a)));
+#endif
}
-// Compares for ordered.
-// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/343t62da(v=vs.100)
-FORCE_INLINE __m128 _mm_cmpord_ss(__m128 a, __m128 b)
+// Stores four 32-bit integer values as (as a __m128i value) at the address p.
+// https://msdn.microsoft.com/en-us/library/vstudio/edk11s13(v=vs.100).aspx
+FORCE_INLINE void _mm_store_si128(__m128i *p, __m128i a)
{
- return _mm_move_ss(a, _mm_cmpord_ps(a, b));
+ vst1q_s32((int32_t *) p, vreinterpretq_s32_m128i(a));
}
-// Compares for unordered.
-// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/khy6fk1t(v=vs.100)
-FORCE_INLINE __m128 _mm_cmpunord_ps(__m128 a, __m128 b)
+// Store the lower double-precision (64-bit) floating-point element from a into
+// 2 contiguous elements in memory. mem_addr must be aligned on a 16-byte
+// boundary or a general-protection exception may be generated.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#expand=9,526,5601&text=_mm_store1_pd
+#define _mm_store1_pd _mm_store_pd1
+
+// Store the upper double-precision (64-bit) floating-point element from a into
+// memory.
+//
+// MEM[mem_addr+63:mem_addr] := a[127:64]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_storeh_pd
+FORCE_INLINE void _mm_storeh_pd(double *mem_addr, __m128d a)
{
- uint32x4_t f32a =
- vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a));
- uint32x4_t f32b =
- vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b));
- return vreinterpretq_m128_u32(vmvnq_u32(vandq_u32(f32a, f32b)));
+#if defined(__aarch64__)
+ vst1_f64((float64_t *) mem_addr, vget_high_f64(vreinterpretq_f64_m128d(a)));
+#else
+ vst1_f32((float32_t *) mem_addr, vget_high_f32(vreinterpretq_f32_m128d(a)));
+#endif
}
-// Compares for unordered.
-// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/2as2387b(v=vs.100)
-FORCE_INLINE __m128 _mm_cmpunord_ss(__m128 a, __m128 b)
+// Reads the lower 64 bits of b and stores them into the lower 64 bits of a.
+// https://msdn.microsoft.com/en-us/library/hhwf428f%28v=vs.90%29.aspx
+FORCE_INLINE void _mm_storel_epi64(__m128i *a, __m128i b)
{
- return _mm_move_ss(a, _mm_cmpunord_ps(a, b));
+ uint64x1_t hi = vget_high_u64(vreinterpretq_u64_m128i(*a));
+ uint64x1_t lo = vget_low_u64(vreinterpretq_u64_m128i(b));
+ *a = vreinterpretq_m128i_u64(vcombine_u64(lo, hi));
}
-// Compares the lower single-precision floating point scalar values of a and b
-// using a less than operation. :
-// https://msdn.microsoft.com/en-us/library/2kwe606b(v=vs.90).aspx Important
-// note!! The documentation on MSDN is incorrect! If either of the values is a
-// NAN the docs say you will get a one, but in fact, it will return a zero!!
-FORCE_INLINE int _mm_comilt_ss(__m128 a, __m128 b)
+// Store the lower double-precision (64-bit) floating-point element from a into
+// memory.
+//
+// MEM[mem_addr+63:mem_addr] := a[63:0]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_storel_pd
+FORCE_INLINE void _mm_storel_pd(double *mem_addr, __m128d a)
{
- uint32x4_t a_not_nan =
- vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a));
- uint32x4_t b_not_nan =
- vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b));
- uint32x4_t a_and_b_not_nan = vandq_u32(a_not_nan, b_not_nan);
- uint32x4_t a_lt_b =
- vcltq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b));
- return (vgetq_lane_u32(vandq_u32(a_and_b_not_nan, a_lt_b), 0) != 0) ? 1 : 0;
+#if defined(__aarch64__)
+ vst1_f64((float64_t *) mem_addr, vget_low_f64(vreinterpretq_f64_m128d(a)));
+#else
+ vst1_f32((float32_t *) mem_addr, vget_low_f32(vreinterpretq_f32_m128d(a)));
+#endif
}
-// Compares the lower single-precision floating point scalar values of a and b
-// using a greater than operation. :
-// https://msdn.microsoft.com/en-us/library/b0738e0t(v=vs.100).aspx
-FORCE_INLINE int _mm_comigt_ss(__m128 a, __m128 b)
+// Store 2 double-precision (64-bit) floating-point elements from a into memory
+// in reverse order. mem_addr must be aligned on a 16-byte boundary or a
+// general-protection exception may be generated.
+//
+// MEM[mem_addr+63:mem_addr] := a[127:64]
+// MEM[mem_addr+127:mem_addr+64] := a[63:0]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_storer_pd
+FORCE_INLINE void _mm_storer_pd(double *mem_addr, __m128d a)
{
- // return vgetq_lane_u32(vcgtq_f32(vreinterpretq_f32_m128(a),
- // vreinterpretq_f32_m128(b)), 0);
- uint32x4_t a_not_nan =
- vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a));
- uint32x4_t b_not_nan =
- vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b));
- uint32x4_t a_and_b_not_nan = vandq_u32(a_not_nan, b_not_nan);
- uint32x4_t a_gt_b =
- vcgtq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b));
- return (vgetq_lane_u32(vandq_u32(a_and_b_not_nan, a_gt_b), 0) != 0) ? 1 : 0;
+ float32x4_t f = vreinterpretq_f32_m128d(a);
+ _mm_store_pd(mem_addr, vreinterpretq_m128d_f32(vextq_f32(f, f, 2)));
}
-// Compares the lower single-precision floating point scalar values of a and b
-// using a less than or equal operation. :
-// https://msdn.microsoft.com/en-us/library/1w4t7c57(v=vs.90).aspx
-FORCE_INLINE int _mm_comile_ss(__m128 a, __m128 b)
+// Store 128-bits (composed of 2 packed double-precision (64-bit) floating-point
+// elements) from a into memory. mem_addr does not need to be aligned on any
+// particular boundary.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_storeu_pd
+FORCE_INLINE void _mm_storeu_pd(double *mem_addr, __m128d a)
{
- // return vgetq_lane_u32(vcleq_f32(vreinterpretq_f32_m128(a),
- // vreinterpretq_f32_m128(b)), 0);
- uint32x4_t a_not_nan =
- vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a));
- uint32x4_t b_not_nan =
- vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b));
- uint32x4_t a_and_b_not_nan = vandq_u32(a_not_nan, b_not_nan);
- uint32x4_t a_le_b =
- vcleq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b));
- return (vgetq_lane_u32(vandq_u32(a_and_b_not_nan, a_le_b), 0) != 0) ? 1 : 0;
+ _mm_store_pd(mem_addr, a);
}
-// Compares the lower single-precision floating point scalar values of a and b
-// using a greater than or equal operation. :
-// https://msdn.microsoft.com/en-us/library/8t80des6(v=vs.100).aspx
-FORCE_INLINE int _mm_comige_ss(__m128 a, __m128 b)
+// Stores 128-bits of integer data a at the address p.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_storeu_si128
+FORCE_INLINE void _mm_storeu_si128(__m128i *p, __m128i a)
{
- // return vgetq_lane_u32(vcgeq_f32(vreinterpretq_f32_m128(a),
- // vreinterpretq_f32_m128(b)), 0);
- uint32x4_t a_not_nan =
- vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a));
- uint32x4_t b_not_nan =
- vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b));
- uint32x4_t a_and_b_not_nan = vandq_u32(a_not_nan, b_not_nan);
- uint32x4_t a_ge_b =
- vcgeq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b));
- return (vgetq_lane_u32(vandq_u32(a_and_b_not_nan, a_ge_b), 0) != 0) ? 1 : 0;
+ vst1q_s32((int32_t *) p, vreinterpretq_s32_m128i(a));
}
-// Compares the lower single-precision floating point scalar values of a and b
-// using an equality operation. :
-// https://msdn.microsoft.com/en-us/library/93yx2h2b(v=vs.100).aspx
-FORCE_INLINE int _mm_comieq_ss(__m128 a, __m128 b)
+// Stores 32-bits of integer data a at the address p.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_storeu_si32
+FORCE_INLINE void _mm_storeu_si32(void *p, __m128i a)
{
- // return vgetq_lane_u32(vceqq_f32(vreinterpretq_f32_m128(a),
- // vreinterpretq_f32_m128(b)), 0);
- uint32x4_t a_not_nan =
- vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a));
- uint32x4_t b_not_nan =
- vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b));
- uint32x4_t a_and_b_not_nan = vandq_u32(a_not_nan, b_not_nan);
- uint32x4_t a_eq_b =
- vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b));
- return (vgetq_lane_u32(vandq_u32(a_and_b_not_nan, a_eq_b), 0) != 0) ? 1 : 0;
+ vst1q_lane_s32((int32_t *) p, vreinterpretq_s32_m128i(a), 0);
}
-// Compares the lower single-precision floating point scalar values of a and b
-// using an inequality operation. :
-// https://msdn.microsoft.com/en-us/library/bafh5e0a(v=vs.90).aspx
-FORCE_INLINE int _mm_comineq_ss(__m128 a, __m128 b)
+// Store 128-bits (composed of 2 packed double-precision (64-bit) floating-point
+// elements) from a into memory using a non-temporal memory hint. mem_addr must
+// be aligned on a 16-byte boundary or a general-protection exception may be
+// generated.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_stream_pd
+FORCE_INLINE void _mm_stream_pd(double *p, __m128d a)
{
- // return !vgetq_lane_u32(vceqq_f32(vreinterpretq_f32_m128(a),
- // vreinterpretq_f32_m128(b)), 0);
- uint32x4_t a_not_nan =
- vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a));
- uint32x4_t b_not_nan =
- vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b));
- uint32x4_t a_or_b_nan = vmvnq_u32(vandq_u32(a_not_nan, b_not_nan));
- uint32x4_t a_neq_b = vmvnq_u32(
- vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
- return (vgetq_lane_u32(vorrq_u32(a_or_b_nan, a_neq_b), 0) != 0) ? 1 : 0;
+#if __has_builtin(__builtin_nontemporal_store)
+ __builtin_nontemporal_store(a, (float32x4_t *) p);
+#elif defined(__aarch64__)
+ vst1q_f64(p, vreinterpretq_f64_m128d(a));
+#else
+ vst1q_s64((int64_t *) p, vreinterpretq_s64_m128d(a));
+#endif
}
-// according to the documentation, these intrinsics behave the same as the
-// non-'u' versions. We'll just alias them here.
-#define _mm_ucomieq_ss _mm_comieq_ss
-#define _mm_ucomige_ss _mm_comige_ss
-#define _mm_ucomigt_ss _mm_comigt_ss
-#define _mm_ucomile_ss _mm_comile_ss
-#define _mm_ucomilt_ss _mm_comilt_ss
-#define _mm_ucomineq_ss _mm_comineq_ss
+// Stores the data in a to the address p without polluting the caches. If the
+// cache line containing address p is already in the cache, the cache will be
+// updated.
+// https://msdn.microsoft.com/en-us/library/ba08y07y%28v=vs.90%29.aspx
+FORCE_INLINE void _mm_stream_si128(__m128i *p, __m128i a)
+{
+#if __has_builtin(__builtin_nontemporal_store)
+ __builtin_nontemporal_store(a, p);
+#else
+ vst1q_s64((int64_t *) p, vreinterpretq_s64_m128i(a));
+#endif
+}
-/* Conversions */
+// Store 32-bit integer a into memory using a non-temporal hint to minimize
+// cache pollution. If the cache line containing address mem_addr is already in
+// the cache, the cache will be updated.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_stream_si32
+FORCE_INLINE void _mm_stream_si32(int *p, int a)
+{
+ vst1q_lane_s32((int32_t *) p, vdupq_n_s32(a), 0);
+}
-// Convert packed signed 32-bit integers in b to packed single-precision
-// (32-bit) floating-point elements, store the results in the lower 2 elements
-// of dst, and copy the upper 2 packed elements from a to the upper elements of
-// dst.
+// Subtract packed 16-bit integers in b from packed 16-bit integers in a, and
+// store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sub_epi16
+FORCE_INLINE __m128i _mm_sub_epi16(__m128i a, __m128i b)
+{
+ return vreinterpretq_m128i_s16(
+ vsubq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
+}
+
+// Subtracts the 4 signed or unsigned 32-bit integers of b from the 4 signed or
+// unsigned 32-bit integers of a.
//
-// dst[31:0] := Convert_Int32_To_FP32(b[31:0])
-// dst[63:32] := Convert_Int32_To_FP32(b[63:32])
-// dst[95:64] := a[95:64]
-// dst[127:96] := a[127:96]
+// r0 := a0 - b0
+// r1 := a1 - b1
+// r2 := a2 - b2
+// r3 := a3 - b3
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvt_pi2ps
-FORCE_INLINE __m128 _mm_cvt_pi2ps(__m128 a, __m64 b)
+// https://msdn.microsoft.com/en-us/library/vstudio/fhh866h0(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_sub_epi32(__m128i a, __m128i b)
{
- return vreinterpretq_m128_f32(
- vcombine_f32(vcvt_f32_s32(vreinterpret_s32_m64(b)),
- vget_high_f32(vreinterpretq_f32_m128(a))));
+ return vreinterpretq_m128i_s32(
+ vsubq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
}
-// Convert the signed 32-bit integer b to a single-precision (32-bit)
-// floating-point element, store the result in the lower element of dst, and
-// copy the upper 3 packed elements from a to the upper elements of dst.
+// Subtract 2 packed 64-bit integers in b from 2 packed 64-bit integers in a,
+// and store the results in dst.
+// r0 := a0 - b0
+// r1 := a1 - b1
+FORCE_INLINE __m128i _mm_sub_epi64(__m128i a, __m128i b)
+{
+ return vreinterpretq_m128i_s64(
+ vsubq_s64(vreinterpretq_s64_m128i(a), vreinterpretq_s64_m128i(b)));
+}
+
+// Subtract packed 8-bit integers in b from packed 8-bit integers in a, and
+// store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sub_epi8
+FORCE_INLINE __m128i _mm_sub_epi8(__m128i a, __m128i b)
+{
+ return vreinterpretq_m128i_s8(
+ vsubq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
+}
+
+// Subtract packed double-precision (64-bit) floating-point elements in b from
+// packed double-precision (64-bit) floating-point elements in a, and store the
+// results in dst.
//
-// dst[31:0] := Convert_Int32_To_FP32(b[31:0])
-// dst[127:32] := a[127:32]
+// FOR j := 0 to 1
+// i := j*64
+// dst[i+63:i] := a[i+63:i] - b[i+63:i]
+// ENDFOR
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvt_si2ss
-FORCE_INLINE __m128 _mm_cvt_si2ss(__m128 a, int b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=mm_sub_pd
+FORCE_INLINE __m128d _mm_sub_pd(__m128d a, __m128d b)
{
- return vreinterpretq_m128_f32(
- vsetq_lane_f32((float) b, vreinterpretq_f32_m128(a), 0));
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(
+ vsubq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)));
+#else
+ double *da = (double *) &a;
+ double *db = (double *) &b;
+ double c[2];
+ c[0] = da[0] - db[0];
+ c[1] = da[1] - db[1];
+ return vld1q_f32((float32_t *) c);
+#endif
}
-// Convert the signed 32-bit integer b to a single-precision (32-bit)
-// floating-point element, store the result in the lower element of dst, and
-// copy the upper 3 packed elements from a to the upper elements of dst.
+// Subtract the lower double-precision (64-bit) floating-point element in b from
+// the lower double-precision (64-bit) floating-point element in a, store the
+// result in the lower element of dst, and copy the upper element from a to the
+// upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sub_sd
+FORCE_INLINE __m128d _mm_sub_sd(__m128d a, __m128d b)
+{
+ return _mm_move_sd(a, _mm_sub_pd(a, b));
+}
+
+// Subtract 64-bit integer b from 64-bit integer a, and store the result in dst.
//
-// dst[31:0] := Convert_Int32_To_FP32(b[31:0])
-// dst[127:32] := a[127:32]
+// dst[63:0] := a[63:0] - b[63:0]
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi32_ss
-#define _mm_cvtsi32_ss(a, b) _mm_cvt_si2ss(a, b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sub_si64
+FORCE_INLINE __m64 _mm_sub_si64(__m64 a, __m64 b)
+{
+ return vreinterpret_m64_s64(
+ vsub_s64(vreinterpret_s64_m64(a), vreinterpret_s64_m64(b)));
+}
-// Convert the signed 64-bit integer b to a single-precision (32-bit)
-// floating-point element, store the result in the lower element of dst, and
-// copy the upper 3 packed elements from a to the upper elements of dst.
+// Subtracts the 8 signed 16-bit integers of b from the 8 signed 16-bit integers
+// of a and saturates.
//
-// dst[31:0] := Convert_Int64_To_FP32(b[63:0])
-// dst[127:32] := a[127:32]
+// r0 := SignedSaturate(a0 - b0)
+// r1 := SignedSaturate(a1 - b1)
+// ...
+// r7 := SignedSaturate(a7 - b7)
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi64_ss
-FORCE_INLINE __m128 _mm_cvtsi64_ss(__m128 a, int64_t b)
+// https://technet.microsoft.com/en-us/subscriptions/3247z5b8(v=vs.90)
+FORCE_INLINE __m128i _mm_subs_epi16(__m128i a, __m128i b)
{
- return vreinterpretq_m128_f32(
- vsetq_lane_f32((float) b, vreinterpretq_f32_m128(a), 0));
+ return vreinterpretq_m128i_s16(
+ vqsubq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
}
-// Convert the lower single-precision (32-bit) floating-point element in a to a
-// 32-bit integer, and store the result in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvt_ss2si
-FORCE_INLINE int _mm_cvt_ss2si(__m128 a)
+// Subtracts the 16 signed 8-bit integers of b from the 16 signed 8-bit integers
+// of a and saturates.
+//
+// r0 := SignedSaturate(a0 - b0)
+// r1 := SignedSaturate(a1 - b1)
+// ...
+// r15 := SignedSaturate(a15 - b15)
+//
+// https://technet.microsoft.com/en-us/subscriptions/by7kzks1(v=vs.90)
+FORCE_INLINE __m128i _mm_subs_epi8(__m128i a, __m128i b)
{
-#if defined(__aarch64__)
- return vgetq_lane_s32(vcvtnq_s32_f32(vreinterpretq_f32_m128(a)), 0);
-#else
- float32_t data = vgetq_lane_f32(vreinterpretq_f32_m128(a), 0);
- float32_t diff = data - floor(data);
- if (diff > 0.5)
- return (int32_t) ceil(data);
- if (unlikely(diff == 0.5)) {
- int32_t f = (int32_t) floor(data);
- int32_t c = (int32_t) ceil(data);
- return c & 1 ? f : c;
- }
- return (int32_t) floor(data);
-#endif
+ return vreinterpretq_m128i_s8(
+ vqsubq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
}
-// Convert packed 16-bit integers in a to packed single-precision (32-bit)
-// floating-point elements, and store the results in dst.
+// Subtracts the 8 unsigned 16-bit integers of bfrom the 8 unsigned 16-bit
+// integers of a and saturates..
+// https://technet.microsoft.com/en-us/subscriptions/index/f44y0s19(v=vs.90).aspx
+FORCE_INLINE __m128i _mm_subs_epu16(__m128i a, __m128i b)
+{
+ return vreinterpretq_m128i_u16(
+ vqsubq_u16(vreinterpretq_u16_m128i(a), vreinterpretq_u16_m128i(b)));
+}
+
+// Subtracts the 16 unsigned 8-bit integers of b from the 16 unsigned 8-bit
+// integers of a and saturates.
//
-// FOR j := 0 to 3
-// i := j*16
-// m := j*32
-// dst[m+31:m] := Convert_Int16_To_FP32(a[i+15:i])
-// ENDFOR
+// r0 := UnsignedSaturate(a0 - b0)
+// r1 := UnsignedSaturate(a1 - b1)
+// ...
+// r15 := UnsignedSaturate(a15 - b15)
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpi16_ps
-FORCE_INLINE __m128 _mm_cvtpi16_ps(__m64 a)
+// https://technet.microsoft.com/en-us/subscriptions/yadkxc18(v=vs.90)
+FORCE_INLINE __m128i _mm_subs_epu8(__m128i a, __m128i b)
{
- return vreinterpretq_m128_f32(
- vcvtq_f32_s32(vmovl_s16(vreinterpret_s16_m64(a))));
+ return vreinterpretq_m128i_u8(
+ vqsubq_u8(vreinterpretq_u8_m128i(a), vreinterpretq_u8_m128i(b)));
}
-// Convert packed 32-bit integers in b to packed single-precision (32-bit)
-// floating-point elements, store the results in the lower 2 elements of dst,
-// and copy the upper 2 packed elements from a to the upper elements of dst.
+#define _mm_ucomieq_sd _mm_comieq_sd
+#define _mm_ucomige_sd _mm_comige_sd
+#define _mm_ucomigt_sd _mm_comigt_sd
+#define _mm_ucomile_sd _mm_comile_sd
+#define _mm_ucomilt_sd _mm_comilt_sd
+#define _mm_ucomineq_sd _mm_comineq_sd
+
+// Return vector of type __m128d with undefined elements.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_undefined_pd
+FORCE_INLINE __m128d _mm_undefined_pd(void)
+{
+#if defined(__GNUC__) || defined(__clang__)
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wuninitialized"
+#endif
+ __m128d a;
+ return a;
+#if defined(__GNUC__) || defined(__clang__)
+#pragma GCC diagnostic pop
+#endif
+}
+
+// Interleaves the upper 4 signed or unsigned 16-bit integers in a with the
+// upper 4 signed or unsigned 16-bit integers in b.
//
-// dst[31:0] := Convert_Int32_To_FP32(b[31:0])
-// dst[63:32] := Convert_Int32_To_FP32(b[63:32])
-// dst[95:64] := a[95:64]
-// dst[127:96] := a[127:96]
+// r0 := a4
+// r1 := b4
+// r2 := a5
+// r3 := b5
+// r4 := a6
+// r5 := b6
+// r6 := a7
+// r7 := b7
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpi32_ps
-FORCE_INLINE __m128 _mm_cvtpi32_ps(__m128 a, __m64 b)
+// https://msdn.microsoft.com/en-us/library/03196cz7(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_unpackhi_epi16(__m128i a, __m128i b)
{
- return vreinterpretq_m128_f32(
- vcombine_f32(vcvt_f32_s32(vreinterpret_s32_m64(b)),
- vget_high_f32(vreinterpretq_f32_m128(a))));
+#if defined(__aarch64__)
+ return vreinterpretq_m128i_s16(
+ vzip2q_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
+#else
+ int16x4_t a1 = vget_high_s16(vreinterpretq_s16_m128i(a));
+ int16x4_t b1 = vget_high_s16(vreinterpretq_s16_m128i(b));
+ int16x4x2_t result = vzip_s16(a1, b1);
+ return vreinterpretq_m128i_s16(vcombine_s16(result.val[0], result.val[1]));
+#endif
}
-// Convert packed signed 32-bit integers in a to packed single-precision
-// (32-bit) floating-point elements, store the results in the lower 2 elements
-// of dst, then covert the packed signed 32-bit integers in b to
-// single-precision (32-bit) floating-point element, and store the results in
-// the upper 2 elements of dst.
+// Interleaves the upper 2 signed or unsigned 32-bit integers in a with the
+// upper 2 signed or unsigned 32-bit integers in b.
+// https://msdn.microsoft.com/en-us/library/65sa7cbs(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_unpackhi_epi32(__m128i a, __m128i b)
+{
+#if defined(__aarch64__)
+ return vreinterpretq_m128i_s32(
+ vzip2q_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
+#else
+ int32x2_t a1 = vget_high_s32(vreinterpretq_s32_m128i(a));
+ int32x2_t b1 = vget_high_s32(vreinterpretq_s32_m128i(b));
+ int32x2x2_t result = vzip_s32(a1, b1);
+ return vreinterpretq_m128i_s32(vcombine_s32(result.val[0], result.val[1]));
+#endif
+}
+
+// Interleaves the upper signed or unsigned 64-bit integer in a with the
+// upper signed or unsigned 64-bit integer in b.
//
-// dst[31:0] := Convert_Int32_To_FP32(a[31:0])
-// dst[63:32] := Convert_Int32_To_FP32(a[63:32])
-// dst[95:64] := Convert_Int32_To_FP32(b[31:0])
-// dst[127:96] := Convert_Int32_To_FP32(b[63:32])
+// r0 := a1
+// r1 := b1
+FORCE_INLINE __m128i _mm_unpackhi_epi64(__m128i a, __m128i b)
+{
+ int64x1_t a_h = vget_high_s64(vreinterpretq_s64_m128i(a));
+ int64x1_t b_h = vget_high_s64(vreinterpretq_s64_m128i(b));
+ return vreinterpretq_m128i_s64(vcombine_s64(a_h, b_h));
+}
+
+// Interleaves the upper 8 signed or unsigned 8-bit integers in a with the upper
+// 8 signed or unsigned 8-bit integers in b.
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpi32x2_ps
-FORCE_INLINE __m128 _mm_cvtpi32x2_ps(__m64 a, __m64 b)
+// r0 := a8
+// r1 := b8
+// r2 := a9
+// r3 := b9
+// ...
+// r14 := a15
+// r15 := b15
+//
+// https://msdn.microsoft.com/en-us/library/t5h7783k(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_unpackhi_epi8(__m128i a, __m128i b)
{
- return vreinterpretq_m128_f32(vcvtq_f32_s32(
- vcombine_s32(vreinterpret_s32_m64(a), vreinterpret_s32_m64(b))));
+#if defined(__aarch64__)
+ return vreinterpretq_m128i_s8(
+ vzip2q_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
+#else
+ int8x8_t a1 =
+ vreinterpret_s8_s16(vget_high_s16(vreinterpretq_s16_m128i(a)));
+ int8x8_t b1 =
+ vreinterpret_s8_s16(vget_high_s16(vreinterpretq_s16_m128i(b)));
+ int8x8x2_t result = vzip_s8(a1, b1);
+ return vreinterpretq_m128i_s8(vcombine_s8(result.val[0], result.val[1]));
+#endif
}
-// Convert the lower packed 8-bit integers in a to packed single-precision
-// (32-bit) floating-point elements, and store the results in dst.
+// Unpack and interleave double-precision (64-bit) floating-point elements from
+// the high half of a and b, and store the results in dst.
//
-// FOR j := 0 to 3
-// i := j*8
-// m := j*32
-// dst[m+31:m] := Convert_Int8_To_FP32(a[i+7:i])
-// ENDFOR
+// DEFINE INTERLEAVE_HIGH_QWORDS(src1[127:0], src2[127:0]) {
+// dst[63:0] := src1[127:64]
+// dst[127:64] := src2[127:64]
+// RETURN dst[127:0]
+// }
+// dst[127:0] := INTERLEAVE_HIGH_QWORDS(a[127:0], b[127:0])
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpi8_ps
-FORCE_INLINE __m128 _mm_cvtpi8_ps(__m64 a)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_unpackhi_pd
+FORCE_INLINE __m128d _mm_unpackhi_pd(__m128d a, __m128d b)
{
- return vreinterpretq_m128_f32(vcvtq_f32_s32(
- vmovl_s16(vget_low_s16(vmovl_s8(vreinterpret_s8_m64(a))))));
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(
+ vzip2q_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)));
+#else
+ return vreinterpretq_m128d_s64(
+ vcombine_s64(vget_high_s64(vreinterpretq_s64_m128d(a)),
+ vget_high_s64(vreinterpretq_s64_m128d(b))));
+#endif
}
-// Convert packed unsigned 16-bit integers in a to packed single-precision
-// (32-bit) floating-point elements, and store the results in dst.
+// Interleaves the lower 4 signed or unsigned 16-bit integers in a with the
+// lower 4 signed or unsigned 16-bit integers in b.
//
-// FOR j := 0 to 3
-// i := j*16
-// m := j*32
-// dst[m+31:m] := Convert_UInt16_To_FP32(a[i+15:i])
-// ENDFOR
+// r0 := a0
+// r1 := b0
+// r2 := a1
+// r3 := b1
+// r4 := a2
+// r5 := b2
+// r6 := a3
+// r7 := b3
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpu16_ps
-FORCE_INLINE __m128 _mm_cvtpu16_ps(__m64 a)
+// https://msdn.microsoft.com/en-us/library/btxb17bw%28v=vs.90%29.aspx
+FORCE_INLINE __m128i _mm_unpacklo_epi16(__m128i a, __m128i b)
{
- return vreinterpretq_m128_f32(
- vcvtq_f32_u32(vmovl_u16(vreinterpret_u16_m64(a))));
+#if defined(__aarch64__)
+ return vreinterpretq_m128i_s16(
+ vzip1q_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
+#else
+ int16x4_t a1 = vget_low_s16(vreinterpretq_s16_m128i(a));
+ int16x4_t b1 = vget_low_s16(vreinterpretq_s16_m128i(b));
+ int16x4x2_t result = vzip_s16(a1, b1);
+ return vreinterpretq_m128i_s16(vcombine_s16(result.val[0], result.val[1]));
+#endif
}
-// Convert the lower packed unsigned 8-bit integers in a to packed
-// single-precision (32-bit) floating-point elements, and store the results in
-// dst.
+// Interleaves the lower 2 signed or unsigned 32 - bit integers in a with the
+// lower 2 signed or unsigned 32 - bit integers in b.
//
-// FOR j := 0 to 3
-// i := j*8
-// m := j*32
-// dst[m+31:m] := Convert_UInt8_To_FP32(a[i+7:i])
-// ENDFOR
+// r0 := a0
+// r1 := b0
+// r2 := a1
+// r3 := b1
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpu8_ps
-FORCE_INLINE __m128 _mm_cvtpu8_ps(__m64 a)
+// https://msdn.microsoft.com/en-us/library/x8atst9d(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_unpacklo_epi32(__m128i a, __m128i b)
{
- return vreinterpretq_m128_f32(vcvtq_f32_u32(
- vmovl_u16(vget_low_u16(vmovl_u8(vreinterpret_u8_m64(a))))));
+#if defined(__aarch64__)
+ return vreinterpretq_m128i_s32(
+ vzip1q_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
+#else
+ int32x2_t a1 = vget_low_s32(vreinterpretq_s32_m128i(a));
+ int32x2_t b1 = vget_low_s32(vreinterpretq_s32_m128i(b));
+ int32x2x2_t result = vzip_s32(a1, b1);
+ return vreinterpretq_m128i_s32(vcombine_s32(result.val[0], result.val[1]));
+#endif
}
-// Converts the four single-precision, floating-point values of a to signed
-// 32-bit integer values using truncate.
-// https://msdn.microsoft.com/en-us/library/vstudio/1h005y6x(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_cvttps_epi32(__m128 a)
+FORCE_INLINE __m128i _mm_unpacklo_epi64(__m128i a, __m128i b)
{
- return vreinterpretq_m128i_s32(vcvtq_s32_f32(vreinterpretq_f32_m128(a)));
+ int64x1_t a_l = vget_low_s64(vreinterpretq_s64_m128i(a));
+ int64x1_t b_l = vget_low_s64(vreinterpretq_s64_m128i(b));
+ return vreinterpretq_m128i_s64(vcombine_s64(a_l, b_l));
}
-// Convert the lower double-precision (64-bit) floating-point element in a to a
-// 64-bit integer with truncation, and store the result in dst.
+// Interleaves the lower 8 signed or unsigned 8-bit integers in a with the lower
+// 8 signed or unsigned 8-bit integers in b.
//
-// dst[63:0] := Convert_FP64_To_Int64_Truncate(a[63:0])
+// r0 := a0
+// r1 := b0
+// r2 := a1
+// r3 := b1
+// ...
+// r14 := a7
+// r15 := b7
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvttsd_si64
-FORCE_INLINE int64_t _mm_cvttsd_si64(__m128d a)
+// https://msdn.microsoft.com/en-us/library/xf7k860c%28v=vs.90%29.aspx
+FORCE_INLINE __m128i _mm_unpacklo_epi8(__m128i a, __m128i b)
{
#if defined(__aarch64__)
- return vgetq_lane_s64(vcvtq_s64_f64(vreinterpretq_f64_m128d(a)), 0);
+ return vreinterpretq_m128i_s8(
+ vzip1q_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
#else
- double ret = *((double *) &a);
- return (int64_t) ret;
+ int8x8_t a1 = vreinterpret_s8_s16(vget_low_s16(vreinterpretq_s16_m128i(a)));
+ int8x8_t b1 = vreinterpret_s8_s16(vget_low_s16(vreinterpretq_s16_m128i(b)));
+ int8x8x2_t result = vzip_s8(a1, b1);
+ return vreinterpretq_m128i_s8(vcombine_s8(result.val[0], result.val[1]));
#endif
}
-// Convert the lower double-precision (64-bit) floating-point element in a to a
-// 64-bit integer with truncation, and store the result in dst.
+// Unpack and interleave double-precision (64-bit) floating-point elements from
+// the low half of a and b, and store the results in dst.
//
-// dst[63:0] := Convert_FP64_To_Int64_Truncate(a[63:0])
+// DEFINE INTERLEAVE_QWORDS(src1[127:0], src2[127:0]) {
+// dst[63:0] := src1[63:0]
+// dst[127:64] := src2[63:0]
+// RETURN dst[127:0]
+// }
+// dst[127:0] := INTERLEAVE_QWORDS(a[127:0], b[127:0])
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvttsd_si64x
-#define _mm_cvttsd_si64x(a) _mm_cvttsd_si64(a)
-
-// Converts the four signed 32-bit integer values of a to single-precision,
-// floating-point values
-// https://msdn.microsoft.com/en-us/library/vstudio/36bwxcx5(v=vs.100).aspx
-FORCE_INLINE __m128 _mm_cvtepi32_ps(__m128i a)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_unpacklo_pd
+FORCE_INLINE __m128d _mm_unpacklo_pd(__m128d a, __m128d b)
{
- return vreinterpretq_m128_f32(vcvtq_f32_s32(vreinterpretq_s32_m128i(a)));
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(
+ vzip1q_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)));
+#else
+ return vreinterpretq_m128d_s64(
+ vcombine_s64(vget_low_s64(vreinterpretq_s64_m128d(a)),
+ vget_low_s64(vreinterpretq_s64_m128d(b))));
+#endif
}
-// Converts the four unsigned 8-bit integers in the lower 16 bits to four
-// unsigned 32-bit integers.
-FORCE_INLINE __m128i _mm_cvtepu8_epi16(__m128i a)
+// Compute the bitwise XOR of packed double-precision (64-bit) floating-point
+// elements in a and b, and store the results in dst.
+//
+// FOR j := 0 to 1
+// i := j*64
+// dst[i+63:i] := a[i+63:i] XOR b[i+63:i]
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_xor_pd
+FORCE_INLINE __m128d _mm_xor_pd(__m128d a, __m128d b)
{
- uint8x16_t u8x16 = vreinterpretq_u8_m128i(a); /* xxxx xxxx xxxx DCBA */
- uint16x8_t u16x8 = vmovl_u8(vget_low_u8(u8x16)); /* 0x0x 0x0x 0D0C 0B0A */
- return vreinterpretq_m128i_u16(u16x8);
+ return vreinterpretq_m128d_s64(
+ veorq_s64(vreinterpretq_s64_m128d(a), vreinterpretq_s64_m128d(b)));
}
-// Converts the four unsigned 8-bit integers in the lower 32 bits to four
-// unsigned 32-bit integers.
-// https://msdn.microsoft.com/en-us/library/bb531467%28v=vs.100%29.aspx
-FORCE_INLINE __m128i _mm_cvtepu8_epi32(__m128i a)
+// Computes the bitwise XOR of the 128-bit value in a and the 128-bit value in
+// b. https://msdn.microsoft.com/en-us/library/fzt08www(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_xor_si128(__m128i a, __m128i b)
{
- uint8x16_t u8x16 = vreinterpretq_u8_m128i(a); /* xxxx xxxx xxxx DCBA */
- uint16x8_t u16x8 = vmovl_u8(vget_low_u8(u8x16)); /* 0x0x 0x0x 0D0C 0B0A */
- uint32x4_t u32x4 = vmovl_u16(vget_low_u16(u16x8)); /* 000D 000C 000B 000A */
- return vreinterpretq_m128i_u32(u32x4);
+ return vreinterpretq_m128i_s32(
+ veorq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
}
-// Converts the two unsigned 8-bit integers in the lower 16 bits to two
-// unsigned 64-bit integers.
-FORCE_INLINE __m128i _mm_cvtepu8_epi64(__m128i a)
+/* SSE3 */
+
+// Alternatively add and subtract packed double-precision (64-bit)
+// floating-point elements in a to/from packed elements in b, and store the
+// results in dst.
+//
+// FOR j := 0 to 1
+// i := j*64
+// IF ((j & 1) == 0)
+// dst[i+63:i] := a[i+63:i] - b[i+63:i]
+// ELSE
+// dst[i+63:i] := a[i+63:i] + b[i+63:i]
+// FI
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_addsub_pd
+FORCE_INLINE __m128d _mm_addsub_pd(__m128d a, __m128d b)
{
- uint8x16_t u8x16 = vreinterpretq_u8_m128i(a); /* xxxx xxxx xxxx xxBA */
- uint16x8_t u16x8 = vmovl_u8(vget_low_u8(u8x16)); /* 0x0x 0x0x 0x0x 0B0A */
- uint32x4_t u32x4 = vmovl_u16(vget_low_u16(u16x8)); /* 000x 000x 000B 000A */
- uint64x2_t u64x2 = vmovl_u32(vget_low_u32(u32x4)); /* 0000 000B 0000 000A */
- return vreinterpretq_m128i_u64(u64x2);
+ __m128d mask = _mm_set_pd(1.0f, -1.0f);
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(vfmaq_f64(vreinterpretq_f64_m128d(a),
+ vreinterpretq_f64_m128d(b),
+ vreinterpretq_f64_m128d(mask)));
+#else
+ return _mm_add_pd(_mm_mul_pd(b, mask), a);
+#endif
}
-// Converts the four unsigned 8-bit integers in the lower 16 bits to four
-// unsigned 32-bit integers.
-FORCE_INLINE __m128i _mm_cvtepi8_epi16(__m128i a)
+// Alternatively add and subtract packed single-precision (32-bit)
+// floating-point elements in a to/from packed elements in b, and store the
+// results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=addsub_ps
+FORCE_INLINE __m128 _mm_addsub_ps(__m128 a, __m128 b)
{
- int8x16_t s8x16 = vreinterpretq_s8_m128i(a); /* xxxx xxxx xxxx DCBA */
- int16x8_t s16x8 = vmovl_s8(vget_low_s8(s8x16)); /* 0x0x 0x0x 0D0C 0B0A */
- return vreinterpretq_m128i_s16(s16x8);
+ __m128 mask = {-1.0f, 1.0f, -1.0f, 1.0f};
+#if defined(__aarch64__) || defined(__ARM_FEATURE_FMA) /* VFPv4+ */
+ return vreinterpretq_m128_f32(vfmaq_f32(vreinterpretq_f32_m128(a),
+ vreinterpretq_f32_m128(mask),
+ vreinterpretq_f32_m128(b)));
+#else
+ return _mm_add_ps(_mm_mul_ps(b, mask), a);
+#endif
}
-// Converts the four unsigned 8-bit integers in the lower 32 bits to four
-// unsigned 32-bit integers.
-FORCE_INLINE __m128i _mm_cvtepi8_epi32(__m128i a)
+// Horizontally add adjacent pairs of double-precision (64-bit) floating-point
+// elements in a and b, and pack the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_hadd_pd
+FORCE_INLINE __m128d _mm_hadd_pd(__m128d a, __m128d b)
{
- int8x16_t s8x16 = vreinterpretq_s8_m128i(a); /* xxxx xxxx xxxx DCBA */
- int16x8_t s16x8 = vmovl_s8(vget_low_s8(s8x16)); /* 0x0x 0x0x 0D0C 0B0A */
- int32x4_t s32x4 = vmovl_s16(vget_low_s16(s16x8)); /* 000D 000C 000B 000A */
- return vreinterpretq_m128i_s32(s32x4);
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(
+ vpaddq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)));
+#else
+ double *da = (double *) &a;
+ double *db = (double *) &b;
+ double c[] = {da[0] + da[1], db[0] + db[1]};
+ return vreinterpretq_m128d_u64(vld1q_u64((uint64_t *) c));
+#endif
}
-// Converts the two signed 8-bit integers in the lower 32 bits to four
-// signed 64-bit integers.
-FORCE_INLINE __m128i _mm_cvtepi8_epi64(__m128i a)
+// Computes pairwise add of each argument as single-precision, floating-point
+// values a and b.
+// https://msdn.microsoft.com/en-us/library/yd9wecaa.aspx
+FORCE_INLINE __m128 _mm_hadd_ps(__m128 a, __m128 b)
{
- int8x16_t s8x16 = vreinterpretq_s8_m128i(a); /* xxxx xxxx xxxx xxBA */
- int16x8_t s16x8 = vmovl_s8(vget_low_s8(s8x16)); /* 0x0x 0x0x 0x0x 0B0A */
- int32x4_t s32x4 = vmovl_s16(vget_low_s16(s16x8)); /* 000x 000x 000B 000A */
- int64x2_t s64x2 = vmovl_s32(vget_low_s32(s32x4)); /* 0000 000B 0000 000A */
- return vreinterpretq_m128i_s64(s64x2);
+#if defined(__aarch64__)
+ return vreinterpretq_m128_f32(
+ vpaddq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
+#else
+ float32x2_t a10 = vget_low_f32(vreinterpretq_f32_m128(a));
+ float32x2_t a32 = vget_high_f32(vreinterpretq_f32_m128(a));
+ float32x2_t b10 = vget_low_f32(vreinterpretq_f32_m128(b));
+ float32x2_t b32 = vget_high_f32(vreinterpretq_f32_m128(b));
+ return vreinterpretq_m128_f32(
+ vcombine_f32(vpadd_f32(a10, a32), vpadd_f32(b10, b32)));
+#endif
}
-// Converts the four signed 16-bit integers in the lower 64 bits to four signed
-// 32-bit integers.
-FORCE_INLINE __m128i _mm_cvtepi16_epi32(__m128i a)
+// Horizontally subtract adjacent pairs of double-precision (64-bit)
+// floating-point elements in a and b, and pack the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_hsub_pd
+FORCE_INLINE __m128d _mm_hsub_pd(__m128d _a, __m128d _b)
{
- return vreinterpretq_m128i_s32(
- vmovl_s16(vget_low_s16(vreinterpretq_s16_m128i(a))));
+#if defined(__aarch64__)
+ return vreinterpretq_m128d_f64(vsubq_f64(
+ vuzp1q_f64(vreinterpretq_f64_m128d(_a), vreinterpretq_f64_m128d(_b)),
+ vuzp2q_f64(vreinterpretq_f64_m128d(_a), vreinterpretq_f64_m128d(_b))));
+#else
+ double *da = (double *) &_a;
+ double *db = (double *) &_b;
+ double c[] = {da[0] - da[1], db[0] - db[1]};
+ return vreinterpretq_m128d_u64(vld1q_u64((uint64_t *) c));
+#endif
}
-// Converts the two signed 16-bit integers in the lower 32 bits two signed
-// 32-bit integers.
-FORCE_INLINE __m128i _mm_cvtepi16_epi64(__m128i a)
+// Horizontally substract adjacent pairs of single-precision (32-bit)
+// floating-point elements in a and b, and pack the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_hsub_ps
+FORCE_INLINE __m128 _mm_hsub_ps(__m128 _a, __m128 _b)
{
- int16x8_t s16x8 = vreinterpretq_s16_m128i(a); /* xxxx xxxx xxxx 0B0A */
- int32x4_t s32x4 = vmovl_s16(vget_low_s16(s16x8)); /* 000x 000x 000B 000A */
- int64x2_t s64x2 = vmovl_s32(vget_low_s32(s32x4)); /* 0000 000B 0000 000A */
- return vreinterpretq_m128i_s64(s64x2);
+#if defined(__aarch64__)
+ return vreinterpretq_m128_f32(vsubq_f32(
+ vuzp1q_f32(vreinterpretq_f32_m128(_a), vreinterpretq_f32_m128(_b)),
+ vuzp2q_f32(vreinterpretq_f32_m128(_a), vreinterpretq_f32_m128(_b))));
+#else
+ float32x4x2_t c =
+ vuzpq_f32(vreinterpretq_f32_m128(_a), vreinterpretq_f32_m128(_b));
+ return vreinterpretq_m128_f32(vsubq_f32(c.val[0], c.val[1]));
+#endif
}
-// Converts the four unsigned 16-bit integers in the lower 64 bits to four
-// unsigned 32-bit integers.
-FORCE_INLINE __m128i _mm_cvtepu16_epi32(__m128i a)
+// Load 128-bits of integer data from unaligned memory into dst. This intrinsic
+// may perform better than _mm_loadu_si128 when the data crosses a cache line
+// boundary.
+//
+// dst[127:0] := MEM[mem_addr+127:mem_addr]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_lddqu_si128
+#define _mm_lddqu_si128 _mm_loadu_si128
+
+// Load a double-precision (64-bit) floating-point element from memory into both
+// elements of dst.
+//
+// dst[63:0] := MEM[mem_addr+63:mem_addr]
+// dst[127:64] := MEM[mem_addr+63:mem_addr]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loaddup_pd
+#define _mm_loaddup_pd _mm_load1_pd
+
+// Duplicate the low double-precision (64-bit) floating-point element from a,
+// and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_movedup_pd
+FORCE_INLINE __m128d _mm_movedup_pd(__m128d a)
{
- return vreinterpretq_m128i_u32(
- vmovl_u16(vget_low_u16(vreinterpretq_u16_m128i(a))));
+#if (__aarch64__)
+ return vreinterpretq_m128d_f64(
+ vdupq_laneq_f64(vreinterpretq_f64_m128d(a), 0));
+#else
+ return vreinterpretq_m128d_u64(
+ vdupq_n_u64(vgetq_lane_u64(vreinterpretq_u64_m128d(a), 0)));
+#endif
}
-// Converts the two unsigned 16-bit integers in the lower 32 bits to two
-// unsigned 64-bit integers.
-FORCE_INLINE __m128i _mm_cvtepu16_epi64(__m128i a)
+// Duplicate odd-indexed single-precision (32-bit) floating-point elements
+// from a, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_movehdup_ps
+FORCE_INLINE __m128 _mm_movehdup_ps(__m128 a)
{
- uint16x8_t u16x8 = vreinterpretq_u16_m128i(a); /* xxxx xxxx xxxx 0B0A */
- uint32x4_t u32x4 = vmovl_u16(vget_low_u16(u16x8)); /* 000x 000x 000B 000A */
- uint64x2_t u64x2 = vmovl_u32(vget_low_u32(u32x4)); /* 0000 000B 0000 000A */
- return vreinterpretq_m128i_u64(u64x2);
+#if __has_builtin(__builtin_shufflevector)
+ return vreinterpretq_m128_f32(__builtin_shufflevector(
+ vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a), 1, 1, 3, 3));
+#else
+ float32_t a1 = vgetq_lane_f32(vreinterpretq_f32_m128(a), 1);
+ float32_t a3 = vgetq_lane_f32(vreinterpretq_f32_m128(a), 3);
+ float ALIGN_STRUCT(16) data[4] = {a1, a1, a3, a3};
+ return vreinterpretq_m128_f32(vld1q_f32(data));
+#endif
}
-// Converts the two unsigned 32-bit integers in the lower 64 bits to two
-// unsigned 64-bit integers.
-FORCE_INLINE __m128i _mm_cvtepu32_epi64(__m128i a)
+// Duplicate even-indexed single-precision (32-bit) floating-point elements
+// from a, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_moveldup_ps
+FORCE_INLINE __m128 _mm_moveldup_ps(__m128 a)
{
- return vreinterpretq_m128i_u64(
- vmovl_u32(vget_low_u32(vreinterpretq_u32_m128i(a))));
+#if __has_builtin(__builtin_shufflevector)
+ return vreinterpretq_m128_f32(__builtin_shufflevector(
+ vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a), 0, 0, 2, 2));
+#else
+ float32_t a0 = vgetq_lane_f32(vreinterpretq_f32_m128(a), 0);
+ float32_t a2 = vgetq_lane_f32(vreinterpretq_f32_m128(a), 2);
+ float ALIGN_STRUCT(16) data[4] = {a0, a0, a2, a2};
+ return vreinterpretq_m128_f32(vld1q_f32(data));
+#endif
}
-// Converts the two signed 32-bit integers in the lower 64 bits to two signed
-// 64-bit integers.
-FORCE_INLINE __m128i _mm_cvtepi32_epi64(__m128i a)
+/* SSSE3 */
+
+// Compute the absolute value of packed signed 16-bit integers in a, and store
+// the unsigned results in dst.
+//
+// FOR j := 0 to 7
+// i := j*16
+// dst[i+15:i] := ABS(a[i+15:i])
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_abs_epi16
+FORCE_INLINE __m128i _mm_abs_epi16(__m128i a)
{
- return vreinterpretq_m128i_s64(
- vmovl_s32(vget_low_s32(vreinterpretq_s32_m128i(a))));
+ return vreinterpretq_m128i_s16(vabsq_s16(vreinterpretq_s16_m128i(a)));
}
-// Converts the four single-precision, floating-point values of a to signed
-// 32-bit integer values.
+// Compute the absolute value of packed signed 32-bit integers in a, and store
+// the unsigned results in dst.
//
-// r0 := (int) a0
-// r1 := (int) a1
-// r2 := (int) a2
-// r3 := (int) a3
+// FOR j := 0 to 3
+// i := j*32
+// dst[i+31:i] := ABS(a[i+31:i])
+// ENDFOR
//
-// https://msdn.microsoft.com/en-us/library/vstudio/xdc42k5e(v=vs.100).aspx
-// *NOTE*. The default rounding mode on SSE is 'round to even', which ARMv7-A
-// does not support! It is supported on ARMv8-A however.
-FORCE_INLINE __m128i _mm_cvtps_epi32(__m128 a)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_abs_epi32
+FORCE_INLINE __m128i _mm_abs_epi32(__m128i a)
{
-#if defined(__aarch64__)
- return vreinterpretq_m128i_s32(vcvtnq_s32_f32(a));
-#else
- uint32x4_t signmask = vdupq_n_u32(0x80000000);
- float32x4_t half = vbslq_f32(signmask, vreinterpretq_f32_m128(a),
- vdupq_n_f32(0.5f)); /* +/- 0.5 */
- int32x4_t r_normal = vcvtq_s32_f32(vaddq_f32(
- vreinterpretq_f32_m128(a), half)); /* round to integer: [a + 0.5]*/
- int32x4_t r_trunc =
- vcvtq_s32_f32(vreinterpretq_f32_m128(a)); /* truncate to integer: [a] */
- int32x4_t plusone = vreinterpretq_s32_u32(vshrq_n_u32(
- vreinterpretq_u32_s32(vnegq_s32(r_trunc)), 31)); /* 1 or 0 */
- int32x4_t r_even = vbicq_s32(vaddq_s32(r_trunc, plusone),
- vdupq_n_s32(1)); /* ([a] + {0,1}) & ~1 */
- float32x4_t delta = vsubq_f32(
- vreinterpretq_f32_m128(a),
- vcvtq_f32_s32(r_trunc)); /* compute delta: delta = (a - [a]) */
- uint32x4_t is_delta_half = vceqq_f32(delta, half); /* delta == +/- 0.5 */
- return vreinterpretq_m128i_s32(vbslq_s32(is_delta_half, r_even, r_normal));
-#endif
+ return vreinterpretq_m128i_s32(vabsq_s32(vreinterpretq_s32_m128i(a)));
}
-// Convert packed single-precision (32-bit) floating-point elements in a to
-// packed 16-bit integers, and store the results in dst. Note: this intrinsic
-// will generate 0x7FFF, rather than 0x8000, for input values between 0x7FFF and
-// 0x7FFFFFFF.
+// Compute the absolute value of packed signed 8-bit integers in a, and store
+// the unsigned results in dst.
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtps_pi16
-FORCE_INLINE __m64 _mm_cvtps_pi16(__m128 a)
+// FOR j := 0 to 15
+// i := j*8
+// dst[i+7:i] := ABS(a[i+7:i])
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_abs_epi8
+FORCE_INLINE __m128i _mm_abs_epi8(__m128i a)
{
- return vreinterpret_m64_s16(
- vmovn_s32(vreinterpretq_s32_m128i(_mm_cvtps_epi32(a))));
+ return vreinterpretq_m128i_s8(vabsq_s8(vreinterpretq_s8_m128i(a)));
}
-// Copy the lower 32-bit integer in a to dst.
+// Compute the absolute value of packed signed 16-bit integers in a, and store
+// the unsigned results in dst.
//
-// dst[31:0] := a[31:0]
+// FOR j := 0 to 3
+// i := j*16
+// dst[i+15:i] := ABS(a[i+15:i])
+// ENDFOR
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi128_si32
-FORCE_INLINE int _mm_cvtsi128_si32(__m128i a)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_abs_pi16
+FORCE_INLINE __m64 _mm_abs_pi16(__m64 a)
{
- return vgetq_lane_s32(vreinterpretq_s32_m128i(a), 0);
+ return vreinterpret_m64_s16(vabs_s16(vreinterpret_s16_m64(a)));
}
-// Copy the lower 64-bit integer in a to dst.
+// Compute the absolute value of packed signed 32-bit integers in a, and store
+// the unsigned results in dst.
//
-// dst[63:0] := a[63:0]
+// FOR j := 0 to 1
+// i := j*32
+// dst[i+31:i] := ABS(a[i+31:i])
+// ENDFOR
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi128_si64
-FORCE_INLINE int64_t _mm_cvtsi128_si64(__m128i a)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_abs_pi32
+FORCE_INLINE __m64 _mm_abs_pi32(__m64 a)
{
- return vgetq_lane_s64(vreinterpretq_s64_m128i(a), 0);
+ return vreinterpret_m64_s32(vabs_s32(vreinterpret_s32_m64(a)));
}
-// Copy the lower 64-bit integer in a to dst.
+// Compute the absolute value of packed signed 8-bit integers in a, and store
+// the unsigned results in dst.
//
-// dst[63:0] := a[63:0]
+// FOR j := 0 to 7
+// i := j*8
+// dst[i+7:i] := ABS(a[i+7:i])
+// ENDFOR
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi128_si64x
-#define _mm_cvtsi128_si64x(a) _mm_cvtsi128_si64(a)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_abs_pi8
+FORCE_INLINE __m64 _mm_abs_pi8(__m64 a)
+{
+ return vreinterpret_m64_s8(vabs_s8(vreinterpret_s8_m64(a)));
+}
-// Moves 32-bit integer a to the least significant 32 bits of an __m128 object,
-// zero extending the upper bits.
+// Concatenate 16-byte blocks in a and b into a 32-byte temporary result, shift
+// the result right by imm8 bytes, and store the low 16 bytes in dst.
//
-// r0 := a
-// r1 := 0x0
-// r2 := 0x0
-// r3 := 0x0
+// tmp[255:0] := ((a[127:0] << 128)[255:0] OR b[127:0]) >> (imm8*8)
+// dst[127:0] := tmp[127:0]
//
-// https://msdn.microsoft.com/en-us/library/ct3539ha%28v=vs.90%29.aspx
-FORCE_INLINE __m128i _mm_cvtsi32_si128(int a)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_alignr_epi8
+FORCE_INLINE __m128i _mm_alignr_epi8(__m128i a, __m128i b, int imm)
{
- return vreinterpretq_m128i_s32(vsetq_lane_s32(a, vdupq_n_s32(0), 0));
+ if (_sse2neon_unlikely(imm & ~31))
+ return _mm_setzero_si128();
+ int idx;
+ uint8x16_t tmp[2];
+ if (imm >= 16) {
+ idx = imm - 16;
+ tmp[0] = vreinterpretq_u8_m128i(a);
+ tmp[1] = vdupq_n_u8(0);
+ } else {
+ idx = imm;
+ tmp[0] = vreinterpretq_u8_m128i(b);
+ tmp[1] = vreinterpretq_u8_m128i(a);
+ }
+ return vreinterpretq_m128i_u8(vld1q_u8(((uint8_t const *) tmp) + idx));
}
-// Moves 64-bit integer a to the least significant 64 bits of an __m128 object,
-// zero extending the upper bits.
+// Concatenate 8-byte blocks in a and b into a 16-byte temporary result, shift
+// the result right by imm8 bytes, and store the low 8 bytes in dst.
//
-// r0 := a
-// r1 := 0x0
-FORCE_INLINE __m128i _mm_cvtsi64_si128(int64_t a)
+// tmp[127:0] := ((a[63:0] << 64)[127:0] OR b[63:0]) >> (imm8*8)
+// dst[63:0] := tmp[63:0]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_alignr_pi8
+#define _mm_alignr_pi8(a, b, imm) \
+ __extension__({ \
+ __m64 ret; \
+ if (_sse2neon_unlikely((imm) >= 16)) { \
+ ret = vreinterpret_m64_s8(vdup_n_s8(0)); \
+ } else { \
+ uint8x8_t tmp_low, tmp_high; \
+ if (imm >= 8) { \
+ const int idx = imm - 8; \
+ tmp_low = vreinterpret_u8_m64(a); \
+ tmp_high = vdup_n_u8(0); \
+ ret = vreinterpret_m64_u8(vext_u8(tmp_low, tmp_high, idx)); \
+ } else { \
+ const int idx = imm; \
+ tmp_low = vreinterpret_u8_m64(b); \
+ tmp_high = vreinterpret_u8_m64(a); \
+ ret = vreinterpret_m64_u8(vext_u8(tmp_low, tmp_high, idx)); \
+ } \
+ } \
+ ret; \
+ })
+
+// Computes pairwise add of each argument as a 16-bit signed or unsigned integer
+// values a and b.
+FORCE_INLINE __m128i _mm_hadd_epi16(__m128i _a, __m128i _b)
{
- return vreinterpretq_m128i_s64(vsetq_lane_s64(a, vdupq_n_s64(0), 0));
+ int16x8_t a = vreinterpretq_s16_m128i(_a);
+ int16x8_t b = vreinterpretq_s16_m128i(_b);
+#if defined(__aarch64__)
+ return vreinterpretq_m128i_s16(vpaddq_s16(a, b));
+#else
+ return vreinterpretq_m128i_s16(
+ vcombine_s16(vpadd_s16(vget_low_s16(a), vget_high_s16(a)),
+ vpadd_s16(vget_low_s16(b), vget_high_s16(b))));
+#endif
}
-// Cast vector of type __m128 to type __m128d. This intrinsic is only used for
-// compilation and does not generate any instructions, thus it has zero latency.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_castps_pd
-FORCE_INLINE __m128d _mm_castps_pd(__m128 a)
+// Computes pairwise add of each argument as a 32-bit signed or unsigned integer
+// values a and b.
+FORCE_INLINE __m128i _mm_hadd_epi32(__m128i _a, __m128i _b)
{
- return vreinterpretq_m128d_s32(vreinterpretq_s32_m128(a));
+ int32x4_t a = vreinterpretq_s32_m128i(_a);
+ int32x4_t b = vreinterpretq_s32_m128i(_b);
+ return vreinterpretq_m128i_s32(
+ vcombine_s32(vpadd_s32(vget_low_s32(a), vget_high_s32(a)),
+ vpadd_s32(vget_low_s32(b), vget_high_s32(b))));
}
-// Applies a type cast to reinterpret four 32-bit floating point values passed
-// in as a 128-bit parameter as packed 32-bit integers.
-// https://msdn.microsoft.com/en-us/library/bb514099.aspx
-FORCE_INLINE __m128i _mm_castps_si128(__m128 a)
+// Horizontally add adjacent pairs of 16-bit integers in a and b, and pack the
+// signed 16-bit results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_hadd_pi16
+FORCE_INLINE __m64 _mm_hadd_pi16(__m64 a, __m64 b)
{
- return vreinterpretq_m128i_s32(vreinterpretq_s32_m128(a));
+ return vreinterpret_m64_s16(
+ vpadd_s16(vreinterpret_s16_m64(a), vreinterpret_s16_m64(b)));
}
-// Cast vector of type __m128i to type __m128d. This intrinsic is only used for
-// compilation and does not generate any instructions, thus it has zero latency.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_castsi128_pd
-FORCE_INLINE __m128d _mm_castsi128_pd(__m128i a)
+// Horizontally add adjacent pairs of 32-bit integers in a and b, and pack the
+// signed 32-bit results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_hadd_pi32
+FORCE_INLINE __m64 _mm_hadd_pi32(__m64 a, __m64 b)
+{
+ return vreinterpret_m64_s32(
+ vpadd_s32(vreinterpret_s32_m64(a), vreinterpret_s32_m64(b)));
+}
+
+// Computes saturated pairwise sub of each argument as a 16-bit signed
+// integer values a and b.
+FORCE_INLINE __m128i _mm_hadds_epi16(__m128i _a, __m128i _b)
{
#if defined(__aarch64__)
- return vreinterpretq_m128d_f64(vreinterpretq_f64_m128i(a));
+ int16x8_t a = vreinterpretq_s16_m128i(_a);
+ int16x8_t b = vreinterpretq_s16_m128i(_b);
+ return vreinterpretq_s64_s16(
+ vqaddq_s16(vuzp1q_s16(a, b), vuzp2q_s16(a, b)));
#else
- return vreinterpretq_m128d_f32(vreinterpretq_f32_m128i(a));
+ int32x4_t a = vreinterpretq_s32_m128i(_a);
+ int32x4_t b = vreinterpretq_s32_m128i(_b);
+ // Interleave using vshrn/vmovn
+ // [a0|a2|a4|a6|b0|b2|b4|b6]
+ // [a1|a3|a5|a7|b1|b3|b5|b7]
+ int16x8_t ab0246 = vcombine_s16(vmovn_s32(a), vmovn_s32(b));
+ int16x8_t ab1357 = vcombine_s16(vshrn_n_s32(a, 16), vshrn_n_s32(b, 16));
+ // Saturated add
+ return vreinterpretq_m128i_s16(vqaddq_s16(ab0246, ab1357));
#endif
}
-// Applies a type cast to reinterpret four 32-bit integers passed in as a
-// 128-bit parameter as packed 32-bit floating point values.
-// https://msdn.microsoft.com/en-us/library/bb514029.aspx
-FORCE_INLINE __m128 _mm_castsi128_ps(__m128i a)
+// Horizontally add adjacent pairs of signed 16-bit integers in a and b using
+// saturation, and pack the signed 16-bit results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_hadds_pi16
+FORCE_INLINE __m64 _mm_hadds_pi16(__m64 _a, __m64 _b)
{
- return vreinterpretq_m128_s32(vreinterpretq_s32_m128i(a));
+ int16x4_t a = vreinterpret_s16_m64(_a);
+ int16x4_t b = vreinterpret_s16_m64(_b);
+#if defined(__aarch64__)
+ return vreinterpret_s64_s16(vqadd_s16(vuzp1_s16(a, b), vuzp2_s16(a, b)));
+#else
+ int16x4x2_t res = vuzp_s16(a, b);
+ return vreinterpret_s64_s16(vqadd_s16(res.val[0], res.val[1]));
+#endif
}
-// Loads 128-bit value. :
-// https://msdn.microsoft.com/en-us/library/atzzad1h(v=vs.80).aspx
-FORCE_INLINE __m128i _mm_load_si128(const __m128i *p)
+// Computes pairwise difference of each argument as a 16-bit signed or unsigned
+// integer values a and b.
+FORCE_INLINE __m128i _mm_hsub_epi16(__m128i _a, __m128i _b)
{
- return vreinterpretq_m128i_s32(vld1q_s32((const int32_t *) p));
+ int32x4_t a = vreinterpretq_s32_m128i(_a);
+ int32x4_t b = vreinterpretq_s32_m128i(_b);
+ // Interleave using vshrn/vmovn
+ // [a0|a2|a4|a6|b0|b2|b4|b6]
+ // [a1|a3|a5|a7|b1|b3|b5|b7]
+ int16x8_t ab0246 = vcombine_s16(vmovn_s32(a), vmovn_s32(b));
+ int16x8_t ab1357 = vcombine_s16(vshrn_n_s32(a, 16), vshrn_n_s32(b, 16));
+ // Subtract
+ return vreinterpretq_m128i_s16(vsubq_s16(ab0246, ab1357));
}
-// Load a double-precision (64-bit) floating-point element from memory into both
-// elements of dst.
-//
-// dst[63:0] := MEM[mem_addr+63:mem_addr]
-// dst[127:64] := MEM[mem_addr+63:mem_addr]
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_load1_pd
-FORCE_INLINE __m128d _mm_load1_pd(const double *p)
+// Computes pairwise difference of each argument as a 32-bit signed or unsigned
+// integer values a and b.
+FORCE_INLINE __m128i _mm_hsub_epi32(__m128i _a, __m128i _b)
+{
+ int64x2_t a = vreinterpretq_s64_m128i(_a);
+ int64x2_t b = vreinterpretq_s64_m128i(_b);
+ // Interleave using vshrn/vmovn
+ // [a0|a2|b0|b2]
+ // [a1|a2|b1|b3]
+ int32x4_t ab02 = vcombine_s32(vmovn_s64(a), vmovn_s64(b));
+ int32x4_t ab13 = vcombine_s32(vshrn_n_s64(a, 32), vshrn_n_s64(b, 32));
+ // Subtract
+ return vreinterpretq_m128i_s32(vsubq_s32(ab02, ab13));
+}
+
+// Horizontally subtract adjacent pairs of 16-bit integers in a and b, and pack
+// the signed 16-bit results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_hsub_pi16
+FORCE_INLINE __m64 _mm_hsub_pi16(__m64 _a, __m64 _b)
+{
+ int32x4_t ab =
+ vcombine_s32(vreinterpret_s32_m64(_a), vreinterpret_s32_m64(_b));
+
+ int16x4_t ab_low_bits = vmovn_s32(ab);
+ int16x4_t ab_high_bits = vshrn_n_s32(ab, 16);
+
+ return vreinterpret_m64_s16(vsub_s16(ab_low_bits, ab_high_bits));
+}
+
+// Horizontally subtract adjacent pairs of 32-bit integers in a and b, and pack
+// the signed 32-bit results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=mm_hsub_pi32
+FORCE_INLINE __m64 _mm_hsub_pi32(__m64 _a, __m64 _b)
{
#if defined(__aarch64__)
- return vreinterpretq_m128d_f64(vld1q_dup_f64(p));
+ int32x2_t a = vreinterpret_s32_m64(_a);
+ int32x2_t b = vreinterpret_s32_m64(_b);
+ return vreinterpret_m64_s32(vsub_s32(vtrn1_s32(a, b), vtrn2_s32(a, b)));
#else
- return vreinterpretq_m128d_s64(vdupq_n_s64(*(const int64_t *) p));
+ int32x2x2_t trn_ab =
+ vtrn_s32(vreinterpret_s32_m64(_a), vreinterpret_s32_m64(_b));
+ return vreinterpret_m64_s32(vsub_s32(trn_ab.val[0], trn_ab.val[1]));
#endif
}
-// Load a double-precision (64-bit) floating-point element from memory into both
-// elements of dst.
-//
-// dst[63:0] := MEM[mem_addr+63:mem_addr]
-// dst[127:64] := MEM[mem_addr+63:mem_addr]
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_load_pd1
-#define _mm_load_pd1 _mm_load1_pd
+// Computes saturated pairwise difference of each argument as a 16-bit signed
+// integer values a and b.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_hsubs_epi16
+FORCE_INLINE __m128i _mm_hsubs_epi16(__m128i _a, __m128i _b)
+{
+#if defined(__aarch64__)
+ int16x8_t a = vreinterpretq_s16_m128i(_a);
+ int16x8_t b = vreinterpretq_s16_m128i(_b);
+ return vreinterpretq_s64_s16(
+ vqsubq_s16(vuzp1q_s16(a, b), vuzp2q_s16(a, b)));
+#else
+ int32x4_t a = vreinterpretq_s32_m128i(_a);
+ int32x4_t b = vreinterpretq_s32_m128i(_b);
+ // Interleave using vshrn/vmovn
+ // [a0|a2|a4|a6|b0|b2|b4|b6]
+ // [a1|a3|a5|a7|b1|b3|b5|b7]
+ int16x8_t ab0246 = vcombine_s16(vmovn_s32(a), vmovn_s32(b));
+ int16x8_t ab1357 = vcombine_s16(vshrn_n_s32(a, 16), vshrn_n_s32(b, 16));
+ // Saturated subtract
+ return vreinterpretq_m128i_s16(vqsubq_s16(ab0246, ab1357));
+#endif
+}
-// Load a double-precision (64-bit) floating-point element from memory into the
-// upper element of dst, and copy the lower element from a to dst. mem_addr does
-// not need to be aligned on any particular boundary.
-//
-// dst[63:0] := a[63:0]
-// dst[127:64] := MEM[mem_addr+63:mem_addr]
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadh_pd
-FORCE_INLINE __m128d _mm_loadh_pd(__m128d a, const double *p)
+// Horizontally subtract adjacent pairs of signed 16-bit integers in a and b
+// using saturation, and pack the signed 16-bit results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_hsubs_pi16
+FORCE_INLINE __m64 _mm_hsubs_pi16(__m64 _a, __m64 _b)
{
+ int16x4_t a = vreinterpret_s16_m64(_a);
+ int16x4_t b = vreinterpret_s16_m64(_b);
#if defined(__aarch64__)
- return vreinterpretq_m128d_f64(
- vcombine_f64(vget_low_f64(vreinterpretq_f64_m128d(a)), vld1_f64(p)));
+ return vreinterpret_s64_s16(vqsub_s16(vuzp1_s16(a, b), vuzp2_s16(a, b)));
#else
- return vreinterpretq_m128d_f32(vcombine_f32(
- vget_low_f32(vreinterpretq_f32_m128d(a)), vld1_f32((const float *) p)));
+ int16x4x2_t res = vuzp_s16(a, b);
+ return vreinterpret_s64_s16(vqsub_s16(res.val[0], res.val[1]));
#endif
}
-// Load a double-precision (64-bit) floating-point element from memory into both
-// elements of dst.
-//
-// dst[63:0] := MEM[mem_addr+63:mem_addr]
-// dst[127:64] := MEM[mem_addr+63:mem_addr]
+// Vertically multiply each unsigned 8-bit integer from a with the corresponding
+// signed 8-bit integer from b, producing intermediate signed 16-bit integers.
+// Horizontally add adjacent pairs of intermediate signed 16-bit integers,
+// and pack the saturated results in dst.
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_load_pd1
-#define _mm_load_pd1 _mm_load1_pd
+// FOR j := 0 to 7
+// i := j*16
+// dst[i+15:i] := Saturate_To_Int16( a[i+15:i+8]*b[i+15:i+8] +
+// a[i+7:i]*b[i+7:i] )
+// ENDFOR
+FORCE_INLINE __m128i _mm_maddubs_epi16(__m128i _a, __m128i _b)
+{
+#if defined(__aarch64__)
+ uint8x16_t a = vreinterpretq_u8_m128i(_a);
+ int8x16_t b = vreinterpretq_s8_m128i(_b);
+ int16x8_t tl = vmulq_s16(vreinterpretq_s16_u16(vmovl_u8(vget_low_u8(a))),
+ vmovl_s8(vget_low_s8(b)));
+ int16x8_t th = vmulq_s16(vreinterpretq_s16_u16(vmovl_u8(vget_high_u8(a))),
+ vmovl_s8(vget_high_s8(b)));
+ return vreinterpretq_m128i_s16(
+ vqaddq_s16(vuzp1q_s16(tl, th), vuzp2q_s16(tl, th)));
+#else
+ // This would be much simpler if x86 would choose to zero extend OR sign
+ // extend, not both. This could probably be optimized better.
+ uint16x8_t a = vreinterpretq_u16_m128i(_a);
+ int16x8_t b = vreinterpretq_s16_m128i(_b);
-// Load a double-precision (64-bit) floating-point element from memory into both
-// elements of dst.
-//
-// dst[63:0] := MEM[mem_addr+63:mem_addr]
-// dst[127:64] := MEM[mem_addr+63:mem_addr]
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loaddup_pd
-#define _mm_loaddup_pd _mm_load1_pd
+ // Zero extend a
+ int16x8_t a_odd = vreinterpretq_s16_u16(vshrq_n_u16(a, 8));
+ int16x8_t a_even = vreinterpretq_s16_u16(vbicq_u16(a, vdupq_n_u16(0xff00)));
-// Loads 128-bit value. :
-// https://msdn.microsoft.com/zh-cn/library/f4k12ae8(v=vs.90).aspx
-FORCE_INLINE __m128i _mm_loadu_si128(const __m128i *p)
+ // Sign extend by shifting left then shifting right.
+ int16x8_t b_even = vshrq_n_s16(vshlq_n_s16(b, 8), 8);
+ int16x8_t b_odd = vshrq_n_s16(b, 8);
+
+ // multiply
+ int16x8_t prod1 = vmulq_s16(a_even, b_even);
+ int16x8_t prod2 = vmulq_s16(a_odd, b_odd);
+
+ // saturated add
+ return vreinterpretq_m128i_s16(vqaddq_s16(prod1, prod2));
+#endif
+}
+
+// Vertically multiply each unsigned 8-bit integer from a with the corresponding
+// signed 8-bit integer from b, producing intermediate signed 16-bit integers.
+// Horizontally add adjacent pairs of intermediate signed 16-bit integers, and
+// pack the saturated results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_maddubs_pi16
+FORCE_INLINE __m64 _mm_maddubs_pi16(__m64 _a, __m64 _b)
{
- return vreinterpretq_m128i_s32(vld1q_s32((const int32_t *) p));
+ uint16x4_t a = vreinterpret_u16_m64(_a);
+ int16x4_t b = vreinterpret_s16_m64(_b);
+
+ // Zero extend a
+ int16x4_t a_odd = vreinterpret_s16_u16(vshr_n_u16(a, 8));
+ int16x4_t a_even = vreinterpret_s16_u16(vand_u16(a, vdup_n_u16(0xff)));
+
+ // Sign extend by shifting left then shifting right.
+ int16x4_t b_even = vshr_n_s16(vshl_n_s16(b, 8), 8);
+ int16x4_t b_odd = vshr_n_s16(b, 8);
+
+ // multiply
+ int16x4_t prod1 = vmul_s16(a_even, b_even);
+ int16x4_t prod2 = vmul_s16(a_odd, b_odd);
+
+ // saturated add
+ return vreinterpret_m64_s16(vqadd_s16(prod1, prod2));
}
-// Load unaligned 32-bit integer from memory into the first element of dst.
-//
-// dst[31:0] := MEM[mem_addr+31:mem_addr]
-// dst[MAX:32] := 0
+// Multiply packed signed 16-bit integers in a and b, producing intermediate
+// signed 32-bit integers. Shift right by 15 bits while rounding up, and store
+// the packed 16-bit integers in dst.
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadu_si32
-FORCE_INLINE __m128i _mm_loadu_si32(const void *p)
+// r0 := Round(((int32_t)a0 * (int32_t)b0) >> 15)
+// r1 := Round(((int32_t)a1 * (int32_t)b1) >> 15)
+// r2 := Round(((int32_t)a2 * (int32_t)b2) >> 15)
+// ...
+// r7 := Round(((int32_t)a7 * (int32_t)b7) >> 15)
+FORCE_INLINE __m128i _mm_mulhrs_epi16(__m128i a, __m128i b)
{
- return vreinterpretq_m128i_s32(
- vsetq_lane_s32(*(const int32_t *) p, vdupq_n_s32(0), 0));
+ // Has issues due to saturation
+ // return vreinterpretq_m128i_s16(vqrdmulhq_s16(a, b));
+
+ // Multiply
+ int32x4_t mul_lo = vmull_s16(vget_low_s16(vreinterpretq_s16_m128i(a)),
+ vget_low_s16(vreinterpretq_s16_m128i(b)));
+ int32x4_t mul_hi = vmull_s16(vget_high_s16(vreinterpretq_s16_m128i(a)),
+ vget_high_s16(vreinterpretq_s16_m128i(b)));
+
+ // Rounding narrowing shift right
+ // narrow = (int16_t)((mul + 16384) >> 15);
+ int16x4_t narrow_lo = vrshrn_n_s32(mul_lo, 15);
+ int16x4_t narrow_hi = vrshrn_n_s32(mul_hi, 15);
+
+ // Join together
+ return vreinterpretq_m128i_s16(vcombine_s16(narrow_lo, narrow_hi));
}
-// Convert packed double-precision (64-bit) floating-point elements in a to
-// packed single-precision (32-bit) floating-point elements, and store the
-// results in dst.
+// Multiply packed signed 16-bit integers in a and b, producing intermediate
+// signed 32-bit integers. Truncate each intermediate integer to the 18 most
+// significant bits, round by adding 1, and store bits [16:1] to dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_mulhrs_pi16
+FORCE_INLINE __m64 _mm_mulhrs_pi16(__m64 a, __m64 b)
+{
+ int32x4_t mul_extend =
+ vmull_s16((vreinterpret_s16_m64(a)), (vreinterpret_s16_m64(b)));
+
+ // Rounding narrowing shift right
+ return vreinterpret_m64_s16(vrshrn_n_s32(mul_extend, 15));
+}
+
+// Shuffle packed 8-bit integers in a according to shuffle control mask in the
+// corresponding 8-bit element of b, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_shuffle_epi8
+FORCE_INLINE __m128i _mm_shuffle_epi8(__m128i a, __m128i b)
+{
+ int8x16_t tbl = vreinterpretq_s8_m128i(a); // input a
+ uint8x16_t idx = vreinterpretq_u8_m128i(b); // input b
+ uint8x16_t idx_masked =
+ vandq_u8(idx, vdupq_n_u8(0x8F)); // avoid using meaningless bits
+#if defined(__aarch64__)
+ return vreinterpretq_m128i_s8(vqtbl1q_s8(tbl, idx_masked));
+#elif defined(__GNUC__)
+ int8x16_t ret;
+ // %e and %f represent the even and odd D registers
+ // respectively.
+ __asm__ __volatile__(
+ "vtbl.8 %e[ret], {%e[tbl], %f[tbl]}, %e[idx]\n"
+ "vtbl.8 %f[ret], {%e[tbl], %f[tbl]}, %f[idx]\n"
+ : [ret] "=&w"(ret)
+ : [tbl] "w"(tbl), [idx] "w"(idx_masked));
+ return vreinterpretq_m128i_s8(ret);
+#else
+ // use this line if testing on aarch64
+ int8x8x2_t a_split = {vget_low_s8(tbl), vget_high_s8(tbl)};
+ return vreinterpretq_m128i_s8(
+ vcombine_s8(vtbl2_s8(a_split, vget_low_u8(idx_masked)),
+ vtbl2_s8(a_split, vget_high_u8(idx_masked))));
+#endif
+}
+
+// Shuffle packed 8-bit integers in a according to shuffle control mask in the
+// corresponding 8-bit element of b, and store the results in dst.
//
-// FOR j := 0 to 1
-// i := 32*j
-// k := 64*j
-// dst[i+31:i] := Convert_FP64_To_FP32(a[k+64:k])
+// FOR j := 0 to 7
+// i := j*8
+// IF b[i+7] == 1
+// dst[i+7:i] := 0
+// ELSE
+// index[2:0] := b[i+2:i]
+// dst[i+7:i] := a[index*8+7:index*8]
+// FI
// ENDFOR
-// dst[127:64] := 0
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpd_ps
-FORCE_INLINE __m128 _mm_cvtpd_ps(__m128d a)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_shuffle_pi8
+FORCE_INLINE __m64 _mm_shuffle_pi8(__m64 a, __m64 b)
+{
+ const int8x8_t controlMask =
+ vand_s8(vreinterpret_s8_m64(b), vdup_n_s8((int8_t)(0x1 << 7 | 0x07)));
+ int8x8_t res = vtbl1_s8(vreinterpret_s8_m64(a), controlMask);
+ return vreinterpret_m64_s8(res);
+}
+
+// Negate packed 16-bit integers in a when the corresponding signed
+// 16-bit integer in b is negative, and store the results in dst.
+// Element in dst are zeroed out when the corresponding element
+// in b is zero.
+//
+// for i in 0..7
+// if b[i] < 0
+// r[i] := -a[i]
+// else if b[i] == 0
+// r[i] := 0
+// else
+// r[i] := a[i]
+// fi
+// done
+FORCE_INLINE __m128i _mm_sign_epi16(__m128i _a, __m128i _b)
{
+ int16x8_t a = vreinterpretq_s16_m128i(_a);
+ int16x8_t b = vreinterpretq_s16_m128i(_b);
+
+ // signed shift right: faster than vclt
+ // (b < 0) ? 0xFFFF : 0
+ uint16x8_t ltMask = vreinterpretq_u16_s16(vshrq_n_s16(b, 15));
+ // (b == 0) ? 0xFFFF : 0
#if defined(__aarch64__)
- float32x2_t tmp = vcvt_f32_f64(vreinterpretq_f64_m128d(a));
- return vreinterpretq_m128_f32(vcombine_f32(tmp, vdup_n_f32(0)));
+ int16x8_t zeroMask = vreinterpretq_s16_u16(vceqzq_s16(b));
#else
- float a0 = (float) ((double *) &a)[0];
- float a1 = (float) ((double *) &a)[1];
- return _mm_set_ps(0, 0, a1, a0);
+ int16x8_t zeroMask = vreinterpretq_s16_u16(vceqq_s16(b, vdupq_n_s16(0)));
#endif
+
+ // bitwise select either a or negative 'a' (vnegq_s16(a) equals to negative
+ // 'a') based on ltMask
+ int16x8_t masked = vbslq_s16(ltMask, vnegq_s16(a), a);
+ // res = masked & (~zeroMask)
+ int16x8_t res = vbicq_s16(masked, zeroMask);
+ return vreinterpretq_m128i_s16(res);
}
-// Copy the lower double-precision (64-bit) floating-point element of a to dst.
+// Negate packed 32-bit integers in a when the corresponding signed
+// 32-bit integer in b is negative, and store the results in dst.
+// Element in dst are zeroed out when the corresponding element
+// in b is zero.
//
-// dst[63:0] := a[63:0]
+// for i in 0..3
+// if b[i] < 0
+// r[i] := -a[i]
+// else if b[i] == 0
+// r[i] := 0
+// else
+// r[i] := a[i]
+// fi
+// done
+FORCE_INLINE __m128i _mm_sign_epi32(__m128i _a, __m128i _b)
+{
+ int32x4_t a = vreinterpretq_s32_m128i(_a);
+ int32x4_t b = vreinterpretq_s32_m128i(_b);
+
+ // signed shift right: faster than vclt
+ // (b < 0) ? 0xFFFFFFFF : 0
+ uint32x4_t ltMask = vreinterpretq_u32_s32(vshrq_n_s32(b, 31));
+
+ // (b == 0) ? 0xFFFFFFFF : 0
+#if defined(__aarch64__)
+ int32x4_t zeroMask = vreinterpretq_s32_u32(vceqzq_s32(b));
+#else
+ int32x4_t zeroMask = vreinterpretq_s32_u32(vceqq_s32(b, vdupq_n_s32(0)));
+#endif
+
+ // bitwise select either a or negative 'a' (vnegq_s32(a) equals to negative
+ // 'a') based on ltMask
+ int32x4_t masked = vbslq_s32(ltMask, vnegq_s32(a), a);
+ // res = masked & (~zeroMask)
+ int32x4_t res = vbicq_s32(masked, zeroMask);
+ return vreinterpretq_m128i_s32(res);
+}
+
+// Negate packed 8-bit integers in a when the corresponding signed
+// 8-bit integer in b is negative, and store the results in dst.
+// Element in dst are zeroed out when the corresponding element
+// in b is zero.
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsd_f64
-FORCE_INLINE double _mm_cvtsd_f64(__m128d a)
+// for i in 0..15
+// if b[i] < 0
+// r[i] := -a[i]
+// else if b[i] == 0
+// r[i] := 0
+// else
+// r[i] := a[i]
+// fi
+// done
+FORCE_INLINE __m128i _mm_sign_epi8(__m128i _a, __m128i _b)
{
+ int8x16_t a = vreinterpretq_s8_m128i(_a);
+ int8x16_t b = vreinterpretq_s8_m128i(_b);
+
+ // signed shift right: faster than vclt
+ // (b < 0) ? 0xFF : 0
+ uint8x16_t ltMask = vreinterpretq_u8_s8(vshrq_n_s8(b, 7));
+
+ // (b == 0) ? 0xFF : 0
#if defined(__aarch64__)
- return (double) vgetq_lane_f64(vreinterpretq_f64_m128d(a), 0);
+ int8x16_t zeroMask = vreinterpretq_s8_u8(vceqzq_s8(b));
#else
- return ((double *) &a)[0];
+ int8x16_t zeroMask = vreinterpretq_s8_u8(vceqq_s8(b, vdupq_n_s8(0)));
#endif
+
+ // bitwise select either a or nagative 'a' (vnegq_s8(a) return nagative 'a')
+ // based on ltMask
+ int8x16_t masked = vbslq_s8(ltMask, vnegq_s8(a), a);
+ // res = masked & (~zeroMask)
+ int8x16_t res = vbicq_s8(masked, zeroMask);
+
+ return vreinterpretq_m128i_s8(res);
}
-// Convert packed single-precision (32-bit) floating-point elements in a to
-// packed double-precision (64-bit) floating-point elements, and store the
-// results in dst.
+// Negate packed 16-bit integers in a when the corresponding signed 16-bit
+// integer in b is negative, and store the results in dst. Element in dst are
+// zeroed out when the corresponding element in b is zero.
//
-// FOR j := 0 to 1
-// i := 64*j
-// k := 32*j
-// dst[i+63:i] := Convert_FP32_To_FP64(a[k+31:k])
+// FOR j := 0 to 3
+// i := j*16
+// IF b[i+15:i] < 0
+// dst[i+15:i] := -(a[i+15:i])
+// ELSE IF b[i+15:i] == 0
+// dst[i+15:i] := 0
+// ELSE
+// dst[i+15:i] := a[i+15:i]
+// FI
// ENDFOR
//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtps_pd
-FORCE_INLINE __m128d _mm_cvtps_pd(__m128 a)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sign_pi16
+FORCE_INLINE __m64 _mm_sign_pi16(__m64 _a, __m64 _b)
{
+ int16x4_t a = vreinterpret_s16_m64(_a);
+ int16x4_t b = vreinterpret_s16_m64(_b);
+
+ // signed shift right: faster than vclt
+ // (b < 0) ? 0xFFFF : 0
+ uint16x4_t ltMask = vreinterpret_u16_s16(vshr_n_s16(b, 15));
+
+ // (b == 0) ? 0xFFFF : 0
#if defined(__aarch64__)
- return vreinterpretq_m128d_f64(
- vcvt_f64_f32(vget_low_f32(vreinterpretq_f32_m128(a))));
+ int16x4_t zeroMask = vreinterpret_s16_u16(vceqz_s16(b));
#else
- double a0 = (double) vgetq_lane_f32(vreinterpretq_f32_m128(a), 0);
- double a1 = (double) vgetq_lane_f32(vreinterpretq_f32_m128(a), 1);
- return _mm_set_pd(a1, a0);
+ int16x4_t zeroMask = vreinterpret_s16_u16(vceq_s16(b, vdup_n_s16(0)));
#endif
-}
-// Cast vector of type __m128d to type __m128i. This intrinsic is only used for
-// compilation and does not generate any instructions, thus it has zero latency.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_castpd_si128
-FORCE_INLINE __m128i _mm_castpd_si128(__m128d a)
-{
- return vreinterpretq_m128i_s64(vreinterpretq_s64_m128d(a));
+ // bitwise select either a or nagative 'a' (vneg_s16(a) return nagative 'a')
+ // based on ltMask
+ int16x4_t masked = vbsl_s16(ltMask, vneg_s16(a), a);
+ // res = masked & (~zeroMask)
+ int16x4_t res = vbic_s16(masked, zeroMask);
+
+ return vreinterpret_m64_s16(res);
}
-// Cast vector of type __m128d to type __m128. This intrinsic is only used for
-// compilation and does not generate any instructions, thus it has zero latency.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_castpd_ps
-FORCE_INLINE __m128 _mm_castpd_ps(__m128d a)
+// Negate packed 32-bit integers in a when the corresponding signed 32-bit
+// integer in b is negative, and store the results in dst. Element in dst are
+// zeroed out when the corresponding element in b is zero.
+//
+// FOR j := 0 to 1
+// i := j*32
+// IF b[i+31:i] < 0
+// dst[i+31:i] := -(a[i+31:i])
+// ELSE IF b[i+31:i] == 0
+// dst[i+31:i] := 0
+// ELSE
+// dst[i+31:i] := a[i+31:i]
+// FI
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sign_pi32
+FORCE_INLINE __m64 _mm_sign_pi32(__m64 _a, __m64 _b)
{
- return vreinterpretq_m128_s64(vreinterpretq_s64_m128d(a));
+ int32x2_t a = vreinterpret_s32_m64(_a);
+ int32x2_t b = vreinterpret_s32_m64(_b);
+
+ // signed shift right: faster than vclt
+ // (b < 0) ? 0xFFFFFFFF : 0
+ uint32x2_t ltMask = vreinterpret_u32_s32(vshr_n_s32(b, 31));
+
+ // (b == 0) ? 0xFFFFFFFF : 0
+#if defined(__aarch64__)
+ int32x2_t zeroMask = vreinterpret_s32_u32(vceqz_s32(b));
+#else
+ int32x2_t zeroMask = vreinterpret_s32_u32(vceq_s32(b, vdup_n_s32(0)));
+#endif
+
+ // bitwise select either a or nagative 'a' (vneg_s32(a) return nagative 'a')
+ // based on ltMask
+ int32x2_t masked = vbsl_s32(ltMask, vneg_s32(a), a);
+ // res = masked & (~zeroMask)
+ int32x2_t res = vbic_s32(masked, zeroMask);
+
+ return vreinterpret_m64_s32(res);
}
-// Blend packed single-precision (32-bit) floating-point elements from a and b
-// using mask, and store the results in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_blendv_ps
-FORCE_INLINE __m128 _mm_blendv_ps(__m128 _a, __m128 _b, __m128 _mask)
+// Negate packed 8-bit integers in a when the corresponding signed 8-bit integer
+// in b is negative, and store the results in dst. Element in dst are zeroed out
+// when the corresponding element in b is zero.
+//
+// FOR j := 0 to 7
+// i := j*8
+// IF b[i+7:i] < 0
+// dst[i+7:i] := -(a[i+7:i])
+// ELSE IF b[i+7:i] == 0
+// dst[i+7:i] := 0
+// ELSE
+// dst[i+7:i] := a[i+7:i]
+// FI
+// ENDFOR
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sign_pi8
+FORCE_INLINE __m64 _mm_sign_pi8(__m64 _a, __m64 _b)
{
- // Use a signed shift right to create a mask with the sign bit
- uint32x4_t mask =
- vreinterpretq_u32_s32(vshrq_n_s32(vreinterpretq_s32_m128(_mask), 31));
- float32x4_t a = vreinterpretq_f32_m128(_a);
- float32x4_t b = vreinterpretq_f32_m128(_b);
- return vreinterpretq_m128_f32(vbslq_f32(mask, b, a));
+ int8x8_t a = vreinterpret_s8_m64(_a);
+ int8x8_t b = vreinterpret_s8_m64(_b);
+
+ // signed shift right: faster than vclt
+ // (b < 0) ? 0xFF : 0
+ uint8x8_t ltMask = vreinterpret_u8_s8(vshr_n_s8(b, 7));
+
+ // (b == 0) ? 0xFF : 0
+#if defined(__aarch64__)
+ int8x8_t zeroMask = vreinterpret_s8_u8(vceqz_s8(b));
+#else
+ int8x8_t zeroMask = vreinterpret_s8_u8(vceq_s8(b, vdup_n_s8(0)));
+#endif
+
+ // bitwise select either a or nagative 'a' (vneg_s8(a) return nagative 'a')
+ // based on ltMask
+ int8x8_t masked = vbsl_s8(ltMask, vneg_s8(a), a);
+ // res = masked & (~zeroMask)
+ int8x8_t res = vbic_s8(masked, zeroMask);
+
+ return vreinterpret_m64_s8(res);
}
+/* SSE4.1 */
+
+// Blend packed 16-bit integers from a and b using control mask imm8, and store
+// the results in dst.
+//
+// FOR j := 0 to 7
+// i := j*16
+// IF imm8[j]
+// dst[i+15:i] := b[i+15:i]
+// ELSE
+// dst[i+15:i] := a[i+15:i]
+// FI
+// ENDFOR
+// FORCE_INLINE __m128i _mm_blend_epi16(__m128i a, __m128i b,
+// __constrange(0,255) int imm)
+#define _mm_blend_epi16(a, b, imm) \
+ __extension__({ \
+ const uint16_t ones = 0xffff; \
+ const uint16_t zeros = 0x0000; \
+ const uint16_t _mask[8] = {((imm) & (1 << 0)) ? ones : zeros, \
+ ((imm) & (1 << 1)) ? ones : zeros, \
+ ((imm) & (1 << 2)) ? ones : zeros, \
+ ((imm) & (1 << 3)) ? ones : zeros, \
+ ((imm) & (1 << 4)) ? ones : zeros, \
+ ((imm) & (1 << 5)) ? ones : zeros, \
+ ((imm) & (1 << 6)) ? ones : zeros, \
+ ((imm) & (1 << 7)) ? ones : zeros}; \
+ uint16x8_t _mask_vec = vld1q_u16(_mask); \
+ uint16x8_t _a = vreinterpretq_u16_m128i(a); \
+ uint16x8_t _b = vreinterpretq_u16_m128i(b); \
+ vreinterpretq_m128i_u16(vbslq_u16(_mask_vec, _b, _a)); \
+ })
+
+// Blend packed double-precision (64-bit) floating-point elements from a and b
+// using control mask imm8, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_blend_pd
+#define _mm_blend_pd(a, b, imm) \
+ __extension__({ \
+ const uint64_t _mask[2] = { \
+ ((imm) & (1 << 0)) ? ~UINT64_C(0) : UINT64_C(0), \
+ ((imm) & (1 << 1)) ? ~UINT64_C(0) : UINT64_C(0)}; \
+ uint64x2_t _mask_vec = vld1q_u64(_mask); \
+ uint64x2_t _a = vreinterpretq_u64_m128d(a); \
+ uint64x2_t _b = vreinterpretq_u64_m128d(b); \
+ vreinterpretq_m128d_u64(vbslq_u64(_mask_vec, _b, _a)); \
+ })
+
// Blend packed single-precision (32-bit) floating-point elements from a and b
// using mask, and store the results in dst.
// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_blend_ps
@@ -5672,6 +7337,27 @@ FORCE_INLINE __m128 _mm_blend_ps(__m128 _a, __m128 _b, const char imm8)
return vreinterpretq_m128_f32(vbslq_f32(mask, b, a));
}
+// Blend packed 8-bit integers from a and b using mask, and store the results in
+// dst.
+//
+// FOR j := 0 to 15
+// i := j*8
+// IF mask[i+7]
+// dst[i+7:i] := b[i+7:i]
+// ELSE
+// dst[i+7:i] := a[i+7:i]
+// FI
+// ENDFOR
+FORCE_INLINE __m128i _mm_blendv_epi8(__m128i _a, __m128i _b, __m128i _mask)
+{
+ // Use a signed shift right to create a mask with the sign bit
+ uint8x16_t mask =
+ vreinterpretq_u8_s8(vshrq_n_s8(vreinterpretq_s8_m128i(_mask), 7));
+ uint8x16_t a = vreinterpretq_u8_m128i(_a);
+ uint8x16_t b = vreinterpretq_u8_m128i(_b);
+ return vreinterpretq_m128i_u8(vbslq_u8(mask, b, a));
+}
+
// Blend packed double-precision (64-bit) floating-point elements from a and b
// using mask, and store the results in dst.
// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_blendv_pd
@@ -5690,154 +7376,55 @@ FORCE_INLINE __m128d _mm_blendv_pd(__m128d _a, __m128d _b, __m128d _mask)
#endif
}
-typedef struct {
- uint16_t res0;
- uint8_t res1 : 6;
- uint8_t bit22 : 1;
- uint8_t bit23 : 1;
- uint8_t res2;
-#if defined(__aarch64__)
- uint32_t res3;
-#endif
-} fpcr_bitfield;
-
-// Macro: Set the rounding mode bits of the MXCSR control and status register to
-// the value in unsigned 32-bit integer a. The rounding mode may contain any of
-// the following flags: _MM_ROUND_NEAREST, _MM_ROUND_DOWN, _MM_ROUND_UP,
-// _MM_ROUND_TOWARD_ZERO
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_MM_SET_ROUNDING_MODE
-FORCE_INLINE void _MM_SET_ROUNDING_MODE(int rounding)
-{
- union {
- fpcr_bitfield field;
-#if defined(__aarch64__)
- uint64_t value;
-#else
- uint32_t value;
-#endif
- } r;
-
-#if defined(__aarch64__)
- asm volatile("mrs %0, FPCR" : "=r"(r.value)); /* read */
-#else
- asm volatile("vmrs %0, FPSCR" : "=r"(r.value)); /* read */
-#endif
-
- switch (rounding) {
- case _MM_ROUND_TOWARD_ZERO:
- r.field.bit22 = 1;
- r.field.bit23 = 1;
- break;
- case _MM_ROUND_DOWN:
- r.field.bit22 = 0;
- r.field.bit23 = 1;
- break;
- case _MM_ROUND_UP:
- r.field.bit22 = 1;
- r.field.bit23 = 0;
- break;
- default: //_MM_ROUND_NEAREST
- r.field.bit22 = 0;
- r.field.bit23 = 0;
- }
-
-#if defined(__aarch64__)
- asm volatile("msr FPCR, %0" ::"r"(r)); /* write */
-#else
- asm volatile("vmsr FPSCR, %0" ::"r"(r)); /* write */
-#endif
-}
-
-FORCE_INLINE void _mm_setcsr(unsigned int a)
+// Blend packed single-precision (32-bit) floating-point elements from a and b
+// using mask, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_blendv_ps
+FORCE_INLINE __m128 _mm_blendv_ps(__m128 _a, __m128 _b, __m128 _mask)
{
- _MM_SET_ROUNDING_MODE(a);
+ // Use a signed shift right to create a mask with the sign bit
+ uint32x4_t mask =
+ vreinterpretq_u32_s32(vshrq_n_s32(vreinterpretq_s32_m128(_mask), 31));
+ float32x4_t a = vreinterpretq_f32_m128(_a);
+ float32x4_t b = vreinterpretq_f32_m128(_b);
+ return vreinterpretq_m128_f32(vbslq_f32(mask, b, a));
}
-// Round the packed single-precision (32-bit) floating-point elements in a using
-// the rounding parameter, and store the results as packed single-precision
+// Round the packed double-precision (64-bit) floating-point elements in a up
+// to an integer value, and store the results as packed double-precision
// floating-point elements in dst.
-// software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_round_ps
-FORCE_INLINE __m128 _mm_round_ps(__m128 a, int rounding)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_ceil_pd
+FORCE_INLINE __m128d _mm_ceil_pd(__m128d a)
{
#if defined(__aarch64__)
- switch (rounding) {
- case (_MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC):
- return vreinterpretq_m128_f32(vrndnq_f32(vreinterpretq_f32_m128(a)));
- case (_MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC):
- return vreinterpretq_m128_f32(vrndmq_f32(vreinterpretq_f32_m128(a)));
- case (_MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC):
- return vreinterpretq_m128_f32(vrndpq_f32(vreinterpretq_f32_m128(a)));
- case (_MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC):
- return vreinterpretq_m128_f32(vrndq_f32(vreinterpretq_f32_m128(a)));
- default: //_MM_FROUND_CUR_DIRECTION
- return vreinterpretq_m128_f32(vrndiq_f32(vreinterpretq_f32_m128(a)));
- }
+ return vreinterpretq_m128d_f64(vrndpq_f64(vreinterpretq_f64_m128d(a)));
#else
- float *v_float = (float *) &a;
- __m128 zero, neg_inf, pos_inf;
-
- switch (rounding) {
- case (_MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC):
- return _mm_cvtepi32_ps(_mm_cvtps_epi32(a));
- case (_MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC):
- return (__m128){floorf(v_float[0]), floorf(v_float[1]),
- floorf(v_float[2]), floorf(v_float[3])};
- case (_MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC):
- return (__m128){ceilf(v_float[0]), ceilf(v_float[1]), ceilf(v_float[2]),
- ceilf(v_float[3])};
- case (_MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC):
- zero = _mm_set_ps(0.0f, 0.0f, 0.0f, 0.0f);
- neg_inf = _mm_set_ps(floorf(v_float[0]), floorf(v_float[1]),
- floorf(v_float[2]), floorf(v_float[3]));
- pos_inf = _mm_set_ps(ceilf(v_float[0]), ceilf(v_float[1]),
- ceilf(v_float[2]), ceilf(v_float[3]));
- return _mm_blendv_ps(pos_inf, neg_inf, _mm_cmple_ps(a, zero));
- default: //_MM_FROUND_CUR_DIRECTION
- return (__m128){roundf(v_float[0]), roundf(v_float[1]),
- roundf(v_float[2]), roundf(v_float[3])};
- }
+ double *f = (double *) &a;
+ return _mm_set_pd(ceil(f[1]), ceil(f[0]));
#endif
}
-// Convert packed single-precision (32-bit) floating-point elements in a to
-// packed 32-bit integers, and store the results in dst.
-//
-// FOR j := 0 to 1
-// i := 32*j
-// dst[i+31:i] := Convert_FP32_To_Int32(a[i+31:i])
-// ENDFOR
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvt_ps2pi
-FORCE_INLINE __m64 _mm_cvt_ps2pi(__m128 a)
+// Round the packed single-precision (32-bit) floating-point elements in a up to
+// an integer value, and store the results as packed single-precision
+// floating-point elements in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_ceil_ps
+FORCE_INLINE __m128 _mm_ceil_ps(__m128 a)
{
#if defined(__aarch64__)
- return vreinterpret_m64_s32(
- vget_low_s32(vcvtnq_s32_f32(vreinterpretq_f32_m128(a))));
+ return vreinterpretq_m128_f32(vrndpq_f32(vreinterpretq_f32_m128(a)));
#else
- return vreinterpret_m64_s32(
- vcvt_s32_f32(vget_low_f32(vreinterpretq_f32_m128(
- _mm_round_ps(a, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC)))));
+ float *f = (float *) &a;
+ return _mm_set_ps(ceilf(f[3]), ceilf(f[2]), ceilf(f[1]), ceilf(f[0]));
#endif
}
-// Convert packed single-precision (32-bit) floating-point elements in a to
-// packed 32-bit integers, and store the results in dst.
-//
-// FOR j := 0 to 1
-// i := 32*j
-// dst[i+31:i] := Convert_FP32_To_Int32(a[i+31:i])
-// ENDFOR
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtps_pi32
-#define _mm_cvtps_pi32(a) _mm_cvt_ps2pi(a)
-
-// Round the packed single-precision (32-bit) floating-point elements in a up to
-// an integer value, and store the results as packed single-precision
-// floating-point elements in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_ceil_ps
-FORCE_INLINE __m128 _mm_ceil_ps(__m128 a)
+// Round the lower double-precision (64-bit) floating-point element in b up to
+// an integer value, store the result as a double-precision floating-point
+// element in the lower element of dst, and copy the upper element from a to the
+// upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_ceil_sd
+FORCE_INLINE __m128d _mm_ceil_sd(__m128d a, __m128d b)
{
- return _mm_round_ps(a, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC);
+ return _mm_move_sd(a, _mm_ceil_pd(b));
}
// Round the lower single-precision (32-bit) floating-point element in b up to
@@ -5851,396 +7438,442 @@ FORCE_INLINE __m128 _mm_ceil_ps(__m128 a)
// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_ceil_ss
FORCE_INLINE __m128 _mm_ceil_ss(__m128 a, __m128 b)
{
- return _mm_move_ss(
- a, _mm_round_ps(b, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC));
+ return _mm_move_ss(a, _mm_ceil_ps(b));
}
-// Round the packed single-precision (32-bit) floating-point elements in a down
-// to an integer value, and store the results as packed single-precision
-// floating-point elements in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_floor_ps
-FORCE_INLINE __m128 _mm_floor_ps(__m128 a)
+// Compare packed 64-bit integers in a and b for equality, and store the results
+// in dst
+FORCE_INLINE __m128i _mm_cmpeq_epi64(__m128i a, __m128i b)
{
- return _mm_round_ps(a, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC);
+#if defined(__aarch64__)
+ return vreinterpretq_m128i_u64(
+ vceqq_u64(vreinterpretq_u64_m128i(a), vreinterpretq_u64_m128i(b)));
+#else
+ // ARMv7 lacks vceqq_u64
+ // (a == b) -> (a_lo == b_lo) && (a_hi == b_hi)
+ uint32x4_t cmp =
+ vceqq_u32(vreinterpretq_u32_m128i(a), vreinterpretq_u32_m128i(b));
+ uint32x4_t swapped = vrev64q_u32(cmp);
+ return vreinterpretq_m128i_u32(vandq_u32(cmp, swapped));
+#endif
}
-// Round the lower single-precision (32-bit) floating-point element in b down to
-// an integer value, store the result as a single-precision floating-point
-// element in the lower element of dst, and copy the upper 3 packed elements
-// from a to the upper elements of dst.
-//
-// dst[31:0] := FLOOR(b[31:0])
-// dst[127:32] := a[127:32]
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_floor_ss
-FORCE_INLINE __m128 _mm_floor_ss(__m128 a, __m128 b)
+// Converts the four signed 16-bit integers in the lower 64 bits to four signed
+// 32-bit integers.
+FORCE_INLINE __m128i _mm_cvtepi16_epi32(__m128i a)
{
- return _mm_move_ss(
- a, _mm_round_ps(b, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC));
+ return vreinterpretq_m128i_s32(
+ vmovl_s16(vget_low_s16(vreinterpretq_s16_m128i(a))));
}
-// Load 128-bits of integer data from unaligned memory into dst. This intrinsic
-// may perform better than _mm_loadu_si128 when the data crosses a cache line
-// boundary.
-//
-// dst[127:0] := MEM[mem_addr+127:mem_addr]
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_lddqu_si128
-#define _mm_lddqu_si128 _mm_loadu_si128
+// Converts the two signed 16-bit integers in the lower 32 bits two signed
+// 32-bit integers.
+FORCE_INLINE __m128i _mm_cvtepi16_epi64(__m128i a)
+{
+ int16x8_t s16x8 = vreinterpretq_s16_m128i(a); /* xxxx xxxx xxxx 0B0A */
+ int32x4_t s32x4 = vmovl_s16(vget_low_s16(s16x8)); /* 000x 000x 000B 000A */
+ int64x2_t s64x2 = vmovl_s32(vget_low_s32(s32x4)); /* 0000 000B 0000 000A */
+ return vreinterpretq_m128i_s64(s64x2);
+}
-/* Miscellaneous Operations */
+// Converts the two signed 32-bit integers in the lower 64 bits to two signed
+// 64-bit integers.
+FORCE_INLINE __m128i _mm_cvtepi32_epi64(__m128i a)
+{
+ return vreinterpretq_m128i_s64(
+ vmovl_s32(vget_low_s32(vreinterpretq_s32_m128i(a))));
+}
-// Shifts the 8 signed 16-bit integers in a right by count bits while shifting
-// in the sign bit.
-//
-// r0 := a0 >> count
-// r1 := a1 >> count
-// ...
-// r7 := a7 >> count
-//
-// https://msdn.microsoft.com/en-us/library/3c9997dk(v%3dvs.90).aspx
-FORCE_INLINE __m128i _mm_sra_epi16(__m128i a, __m128i count)
+// Converts the four unsigned 8-bit integers in the lower 16 bits to four
+// unsigned 32-bit integers.
+FORCE_INLINE __m128i _mm_cvtepi8_epi16(__m128i a)
{
- int64_t c = (int64_t) vget_low_s64((int64x2_t) count);
- if (unlikely(c > 15))
- return _mm_cmplt_epi16(a, _mm_setzero_si128());
- return vreinterpretq_m128i_s16(vshlq_s16((int16x8_t) a, vdupq_n_s16(-c)));
+ int8x16_t s8x16 = vreinterpretq_s8_m128i(a); /* xxxx xxxx xxxx DCBA */
+ int16x8_t s16x8 = vmovl_s8(vget_low_s8(s8x16)); /* 0x0x 0x0x 0D0C 0B0A */
+ return vreinterpretq_m128i_s16(s16x8);
}
-// Shifts the 4 signed 32-bit integers in a right by count bits while shifting
-// in the sign bit.
-//
-// r0 := a0 >> count
-// r1 := a1 >> count
-// r2 := a2 >> count
-// r3 := a3 >> count
-//
-// https://msdn.microsoft.com/en-us/library/ce40009e(v%3dvs.100).aspx
-FORCE_INLINE __m128i _mm_sra_epi32(__m128i a, __m128i count)
+// Converts the four unsigned 8-bit integers in the lower 32 bits to four
+// unsigned 32-bit integers.
+FORCE_INLINE __m128i _mm_cvtepi8_epi32(__m128i a)
{
- int64_t c = (int64_t) vget_low_s64((int64x2_t) count);
- if (unlikely(c > 31))
- return _mm_cmplt_epi32(a, _mm_setzero_si128());
- return vreinterpretq_m128i_s32(vshlq_s32((int32x4_t) a, vdupq_n_s32(-c)));
+ int8x16_t s8x16 = vreinterpretq_s8_m128i(a); /* xxxx xxxx xxxx DCBA */
+ int16x8_t s16x8 = vmovl_s8(vget_low_s8(s8x16)); /* 0x0x 0x0x 0D0C 0B0A */
+ int32x4_t s32x4 = vmovl_s16(vget_low_s16(s16x8)); /* 000D 000C 000B 000A */
+ return vreinterpretq_m128i_s32(s32x4);
}
-// Packs the 16 signed 16-bit integers from a and b into 8-bit integers and
-// saturates.
-// https://msdn.microsoft.com/en-us/library/k4y4f7w5%28v=vs.90%29.aspx
-FORCE_INLINE __m128i _mm_packs_epi16(__m128i a, __m128i b)
+// Converts the two signed 8-bit integers in the lower 32 bits to four
+// signed 64-bit integers.
+FORCE_INLINE __m128i _mm_cvtepi8_epi64(__m128i a)
{
- return vreinterpretq_m128i_s8(
- vcombine_s8(vqmovn_s16(vreinterpretq_s16_m128i(a)),
- vqmovn_s16(vreinterpretq_s16_m128i(b))));
+ int8x16_t s8x16 = vreinterpretq_s8_m128i(a); /* xxxx xxxx xxxx xxBA */
+ int16x8_t s16x8 = vmovl_s8(vget_low_s8(s8x16)); /* 0x0x 0x0x 0x0x 0B0A */
+ int32x4_t s32x4 = vmovl_s16(vget_low_s16(s16x8)); /* 000x 000x 000B 000A */
+ int64x2_t s64x2 = vmovl_s32(vget_low_s32(s32x4)); /* 0000 000B 0000 000A */
+ return vreinterpretq_m128i_s64(s64x2);
}
-// Packs the 16 signed 16 - bit integers from a and b into 8 - bit unsigned
-// integers and saturates.
-//
-// r0 := UnsignedSaturate(a0)
-// r1 := UnsignedSaturate(a1)
-// ...
-// r7 := UnsignedSaturate(a7)
-// r8 := UnsignedSaturate(b0)
-// r9 := UnsignedSaturate(b1)
-// ...
-// r15 := UnsignedSaturate(b7)
-//
-// https://msdn.microsoft.com/en-us/library/07ad1wx4(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_packus_epi16(const __m128i a, const __m128i b)
+// Converts the four unsigned 16-bit integers in the lower 64 bits to four
+// unsigned 32-bit integers.
+FORCE_INLINE __m128i _mm_cvtepu16_epi32(__m128i a)
{
- return vreinterpretq_m128i_u8(
- vcombine_u8(vqmovun_s16(vreinterpretq_s16_m128i(a)),
- vqmovun_s16(vreinterpretq_s16_m128i(b))));
+ return vreinterpretq_m128i_u32(
+ vmovl_u16(vget_low_u16(vreinterpretq_u16_m128i(a))));
}
-// Packs the 8 signed 32-bit integers from a and b into signed 16-bit integers
-// and saturates.
-//
-// r0 := SignedSaturate(a0)
-// r1 := SignedSaturate(a1)
-// r2 := SignedSaturate(a2)
-// r3 := SignedSaturate(a3)
-// r4 := SignedSaturate(b0)
-// r5 := SignedSaturate(b1)
-// r6 := SignedSaturate(b2)
-// r7 := SignedSaturate(b3)
-//
-// https://msdn.microsoft.com/en-us/library/393t56f9%28v=vs.90%29.aspx
-FORCE_INLINE __m128i _mm_packs_epi32(__m128i a, __m128i b)
+// Converts the two unsigned 16-bit integers in the lower 32 bits to two
+// unsigned 64-bit integers.
+FORCE_INLINE __m128i _mm_cvtepu16_epi64(__m128i a)
{
- return vreinterpretq_m128i_s16(
- vcombine_s16(vqmovn_s32(vreinterpretq_s32_m128i(a)),
- vqmovn_s32(vreinterpretq_s32_m128i(b))));
+ uint16x8_t u16x8 = vreinterpretq_u16_m128i(a); /* xxxx xxxx xxxx 0B0A */
+ uint32x4_t u32x4 = vmovl_u16(vget_low_u16(u16x8)); /* 000x 000x 000B 000A */
+ uint64x2_t u64x2 = vmovl_u32(vget_low_u32(u32x4)); /* 0000 000B 0000 000A */
+ return vreinterpretq_m128i_u64(u64x2);
}
-// Packs the 8 unsigned 32-bit integers from a and b into unsigned 16-bit
-// integers and saturates.
-//
-// r0 := UnsignedSaturate(a0)
-// r1 := UnsignedSaturate(a1)
-// r2 := UnsignedSaturate(a2)
-// r3 := UnsignedSaturate(a3)
-// r4 := UnsignedSaturate(b0)
-// r5 := UnsignedSaturate(b1)
-// r6 := UnsignedSaturate(b2)
-// r7 := UnsignedSaturate(b3)
-FORCE_INLINE __m128i _mm_packus_epi32(__m128i a, __m128i b)
+// Converts the two unsigned 32-bit integers in the lower 64 bits to two
+// unsigned 64-bit integers.
+FORCE_INLINE __m128i _mm_cvtepu32_epi64(__m128i a)
{
- return vreinterpretq_m128i_u16(
- vcombine_u16(vqmovun_s32(vreinterpretq_s32_m128i(a)),
- vqmovun_s32(vreinterpretq_s32_m128i(b))));
+ return vreinterpretq_m128i_u64(
+ vmovl_u32(vget_low_u32(vreinterpretq_u32_m128i(a))));
}
-// Interleaves the lower 8 signed or unsigned 8-bit integers in a with the lower
-// 8 signed or unsigned 8-bit integers in b.
-//
-// r0 := a0
-// r1 := b0
-// r2 := a1
-// r3 := b1
-// ...
-// r14 := a7
-// r15 := b7
-//
-// https://msdn.microsoft.com/en-us/library/xf7k860c%28v=vs.90%29.aspx
-FORCE_INLINE __m128i _mm_unpacklo_epi8(__m128i a, __m128i b)
+// Zero extend packed unsigned 8-bit integers in a to packed 16-bit integers,
+// and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtepu8_epi16
+FORCE_INLINE __m128i _mm_cvtepu8_epi16(__m128i a)
{
-#if defined(__aarch64__)
- return vreinterpretq_m128i_s8(
- vzip1q_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
-#else
- int8x8_t a1 = vreinterpret_s8_s16(vget_low_s16(vreinterpretq_s16_m128i(a)));
- int8x8_t b1 = vreinterpret_s8_s16(vget_low_s16(vreinterpretq_s16_m128i(b)));
- int8x8x2_t result = vzip_s8(a1, b1);
- return vreinterpretq_m128i_s8(vcombine_s8(result.val[0], result.val[1]));
-#endif
+ uint8x16_t u8x16 = vreinterpretq_u8_m128i(a); /* xxxx xxxx HGFE DCBA */
+ uint16x8_t u16x8 = vmovl_u8(vget_low_u8(u8x16)); /* 0H0G 0F0E 0D0C 0B0A */
+ return vreinterpretq_m128i_u16(u16x8);
}
-// Interleaves the lower 4 signed or unsigned 16-bit integers in a with the
-// lower 4 signed or unsigned 16-bit integers in b.
-//
-// r0 := a0
-// r1 := b0
-// r2 := a1
-// r3 := b1
-// r4 := a2
-// r5 := b2
-// r6 := a3
-// r7 := b3
-//
-// https://msdn.microsoft.com/en-us/library/btxb17bw%28v=vs.90%29.aspx
-FORCE_INLINE __m128i _mm_unpacklo_epi16(__m128i a, __m128i b)
+// Converts the four unsigned 8-bit integers in the lower 32 bits to four
+// unsigned 32-bit integers.
+// https://msdn.microsoft.com/en-us/library/bb531467%28v=vs.100%29.aspx
+FORCE_INLINE __m128i _mm_cvtepu8_epi32(__m128i a)
{
-#if defined(__aarch64__)
- return vreinterpretq_m128i_s16(
- vzip1q_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
-#else
- int16x4_t a1 = vget_low_s16(vreinterpretq_s16_m128i(a));
- int16x4_t b1 = vget_low_s16(vreinterpretq_s16_m128i(b));
- int16x4x2_t result = vzip_s16(a1, b1);
- return vreinterpretq_m128i_s16(vcombine_s16(result.val[0], result.val[1]));
-#endif
+ uint8x16_t u8x16 = vreinterpretq_u8_m128i(a); /* xxxx xxxx xxxx DCBA */
+ uint16x8_t u16x8 = vmovl_u8(vget_low_u8(u8x16)); /* 0x0x 0x0x 0D0C 0B0A */
+ uint32x4_t u32x4 = vmovl_u16(vget_low_u16(u16x8)); /* 000D 000C 000B 000A */
+ return vreinterpretq_m128i_u32(u32x4);
}
-// Interleaves the lower 2 signed or unsigned 32 - bit integers in a with the
-// lower 2 signed or unsigned 32 - bit integers in b.
-//
-// r0 := a0
-// r1 := b0
-// r2 := a1
-// r3 := b1
-//
-// https://msdn.microsoft.com/en-us/library/x8atst9d(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_unpacklo_epi32(__m128i a, __m128i b)
+// Converts the two unsigned 8-bit integers in the lower 16 bits to two
+// unsigned 64-bit integers.
+FORCE_INLINE __m128i _mm_cvtepu8_epi64(__m128i a)
+{
+ uint8x16_t u8x16 = vreinterpretq_u8_m128i(a); /* xxxx xxxx xxxx xxBA */
+ uint16x8_t u16x8 = vmovl_u8(vget_low_u8(u8x16)); /* 0x0x 0x0x 0x0x 0B0A */
+ uint32x4_t u32x4 = vmovl_u16(vget_low_u16(u16x8)); /* 000x 000x 000B 000A */
+ uint64x2_t u64x2 = vmovl_u32(vget_low_u32(u32x4)); /* 0000 000B 0000 000A */
+ return vreinterpretq_m128i_u64(u64x2);
+}
+
+// Conditionally multiply the packed double-precision (64-bit) floating-point
+// elements in a and b using the high 4 bits in imm8, sum the four products, and
+// conditionally store the sum in dst using the low 4 bits of imm8.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_dp_pd
+FORCE_INLINE __m128d _mm_dp_pd(__m128d a, __m128d b, const int imm)
{
+ // Generate mask value from constant immediate bit value
+ const int64_t bit0Mask = imm & 0x01 ? UINT64_MAX : 0;
+ const int64_t bit1Mask = imm & 0x02 ? UINT64_MAX : 0;
+#if !SSE2NEON_PRECISE_DP
+ const int64_t bit4Mask = imm & 0x10 ? UINT64_MAX : 0;
+ const int64_t bit5Mask = imm & 0x20 ? UINT64_MAX : 0;
+#endif
+ // Conditional multiplication
+#if !SSE2NEON_PRECISE_DP
+ __m128d mul = _mm_mul_pd(a, b);
+ const __m128d mulMask =
+ _mm_castsi128_pd(_mm_set_epi64x(bit5Mask, bit4Mask));
+ __m128d tmp = _mm_and_pd(mul, mulMask);
+#else
#if defined(__aarch64__)
- return vreinterpretq_m128i_s32(
- vzip1q_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
+ double d0 = (imm & 0x10) ? vgetq_lane_f64(vreinterpretq_f64_m128d(a), 0) *
+ vgetq_lane_f64(vreinterpretq_f64_m128d(b), 0)
+ : 0;
+ double d1 = (imm & 0x20) ? vgetq_lane_f64(vreinterpretq_f64_m128d(a), 1) *
+ vgetq_lane_f64(vreinterpretq_f64_m128d(b), 1)
+ : 0;
#else
- int32x2_t a1 = vget_low_s32(vreinterpretq_s32_m128i(a));
- int32x2_t b1 = vget_low_s32(vreinterpretq_s32_m128i(b));
- int32x2x2_t result = vzip_s32(a1, b1);
- return vreinterpretq_m128i_s32(vcombine_s32(result.val[0], result.val[1]));
+ double d0 = (imm & 0x10) ? ((double *) &a)[0] * ((double *) &b)[0] : 0;
+ double d1 = (imm & 0x20) ? ((double *) &a)[1] * ((double *) &b)[1] : 0;
+#endif
+ __m128d tmp = _mm_set_pd(d1, d0);
#endif
+ // Sum the products
+#if defined(__aarch64__)
+ double sum = vpaddd_f64(vreinterpretq_f64_m128d(tmp));
+#else
+ double sum = *((double *) &tmp) + *(((double *) &tmp) + 1);
+#endif
+ // Conditionally store the sum
+ const __m128d sumMask =
+ _mm_castsi128_pd(_mm_set_epi64x(bit1Mask, bit0Mask));
+ __m128d res = _mm_and_pd(_mm_set_pd1(sum), sumMask);
+ return res;
}
-FORCE_INLINE __m128i _mm_unpacklo_epi64(__m128i a, __m128i b)
+// Conditionally multiply the packed single-precision (32-bit) floating-point
+// elements in a and b using the high 4 bits in imm8, sum the four products,
+// and conditionally store the sum in dst using the low 4 bits of imm.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_dp_ps
+FORCE_INLINE __m128 _mm_dp_ps(__m128 a, __m128 b, const int imm)
{
- int64x1_t a_l = vget_low_s64(vreinterpretq_s64_m128i(a));
- int64x1_t b_l = vget_low_s64(vreinterpretq_s64_m128i(b));
- return vreinterpretq_m128i_s64(vcombine_s64(a_l, b_l));
+#if defined(__aarch64__)
+ /* shortcuts */
+ if (imm == 0xFF) {
+ return _mm_set1_ps(vaddvq_f32(_mm_mul_ps(a, b)));
+ }
+ if (imm == 0x7F) {
+ float32x4_t m = _mm_mul_ps(a, b);
+ m[3] = 0;
+ return _mm_set1_ps(vaddvq_f32(m));
+ }
+#endif
+
+ float s = 0, c = 0;
+ float32x4_t f32a = vreinterpretq_f32_m128(a);
+ float32x4_t f32b = vreinterpretq_f32_m128(b);
+
+ /* To improve the accuracy of floating-point summation, Kahan algorithm
+ * is used for each operation.
+ */
+ if (imm & (1 << 4))
+ _sse2neon_kadd_f32(&s, &c, f32a[0] * f32b[0]);
+ if (imm & (1 << 5))
+ _sse2neon_kadd_f32(&s, &c, f32a[1] * f32b[1]);
+ if (imm & (1 << 6))
+ _sse2neon_kadd_f32(&s, &c, f32a[2] * f32b[2]);
+ if (imm & (1 << 7))
+ _sse2neon_kadd_f32(&s, &c, f32a[3] * f32b[3]);
+ s += c;
+
+ float32x4_t res = {
+ (imm & 0x1) ? s : 0,
+ (imm & 0x2) ? s : 0,
+ (imm & 0x4) ? s : 0,
+ (imm & 0x8) ? s : 0,
+ };
+ return vreinterpretq_m128_f32(res);
}
-// Selects and interleaves the lower two single-precision, floating-point values
-// from a and b.
-//
-// r0 := a0
-// r1 := b0
-// r2 := a1
-// r3 := b1
-//
-// https://msdn.microsoft.com/en-us/library/25st103b%28v=vs.90%29.aspx
-FORCE_INLINE __m128 _mm_unpacklo_ps(__m128 a, __m128 b)
+// Extracts the selected signed or unsigned 32-bit integer from a and zero
+// extends.
+// FORCE_INLINE int _mm_extract_epi32(__m128i a, __constrange(0,4) int imm)
+#define _mm_extract_epi32(a, imm) \
+ vgetq_lane_s32(vreinterpretq_s32_m128i(a), (imm))
+
+// Extracts the selected signed or unsigned 64-bit integer from a and zero
+// extends.
+// FORCE_INLINE __int64 _mm_extract_epi64(__m128i a, __constrange(0,2) int imm)
+#define _mm_extract_epi64(a, imm) \
+ vgetq_lane_s64(vreinterpretq_s64_m128i(a), (imm))
+
+// Extracts the selected signed or unsigned 8-bit integer from a and zero
+// extends.
+// FORCE_INLINE int _mm_extract_epi8(__m128i a, __constrange(0,16) int imm)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_extract_epi8
+#define _mm_extract_epi8(a, imm) vgetq_lane_u8(vreinterpretq_u8_m128i(a), (imm))
+
+// Extracts the selected single-precision (32-bit) floating-point from a.
+// FORCE_INLINE int _mm_extract_ps(__m128 a, __constrange(0,4) int imm)
+#define _mm_extract_ps(a, imm) vgetq_lane_s32(vreinterpretq_s32_m128(a), (imm))
+
+// Round the packed double-precision (64-bit) floating-point elements in a down
+// to an integer value, and store the results as packed double-precision
+// floating-point elements in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_floor_pd
+FORCE_INLINE __m128d _mm_floor_pd(__m128d a)
{
#if defined(__aarch64__)
- return vreinterpretq_m128_f32(
- vzip1q_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
+ return vreinterpretq_m128d_f64(vrndmq_f64(vreinterpretq_f64_m128d(a)));
#else
- float32x2_t a1 = vget_low_f32(vreinterpretq_f32_m128(a));
- float32x2_t b1 = vget_low_f32(vreinterpretq_f32_m128(b));
- float32x2x2_t result = vzip_f32(a1, b1);
- return vreinterpretq_m128_f32(vcombine_f32(result.val[0], result.val[1]));
+ double *f = (double *) &a;
+ return _mm_set_pd(floor(f[1]), floor(f[0]));
#endif
}
-// Unpack and interleave double-precision (64-bit) floating-point elements from
-// the low half of a and b, and store the results in dst.
-//
-// DEFINE INTERLEAVE_QWORDS(src1[127:0], src2[127:0]) {
-// dst[63:0] := src1[63:0]
-// dst[127:64] := src2[63:0]
-// RETURN dst[127:0]
-// }
-// dst[127:0] := INTERLEAVE_QWORDS(a[127:0], b[127:0])
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_unpacklo_pd
-FORCE_INLINE __m128d _mm_unpacklo_pd(__m128d a, __m128d b)
+// Round the packed single-precision (32-bit) floating-point elements in a down
+// to an integer value, and store the results as packed single-precision
+// floating-point elements in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_floor_ps
+FORCE_INLINE __m128 _mm_floor_ps(__m128 a)
{
#if defined(__aarch64__)
- return vreinterpretq_m128d_f64(
- vzip1q_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)));
+ return vreinterpretq_m128_f32(vrndmq_f32(vreinterpretq_f32_m128(a)));
#else
- return vreinterpretq_m128d_s64(
- vcombine_s64(vget_low_s64(vreinterpretq_s64_m128d(a)),
- vget_low_s64(vreinterpretq_s64_m128d(b))));
+ float *f = (float *) &a;
+ return _mm_set_ps(floorf(f[3]), floorf(f[2]), floorf(f[1]), floorf(f[0]));
#endif
}
-// Unpack and interleave double-precision (64-bit) floating-point elements from
-// the high half of a and b, and store the results in dst.
-//
-// DEFINE INTERLEAVE_HIGH_QWORDS(src1[127:0], src2[127:0]) {
-// dst[63:0] := src1[127:64]
-// dst[127:64] := src2[127:64]
-// RETURN dst[127:0]
-// }
-// dst[127:0] := INTERLEAVE_HIGH_QWORDS(a[127:0], b[127:0])
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_unpackhi_pd
-FORCE_INLINE __m128d _mm_unpackhi_pd(__m128d a, __m128d b)
+// Round the lower double-precision (64-bit) floating-point element in b down to
+// an integer value, store the result as a double-precision floating-point
+// element in the lower element of dst, and copy the upper element from a to the
+// upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_floor_sd
+FORCE_INLINE __m128d _mm_floor_sd(__m128d a, __m128d b)
{
-#if defined(__aarch64__)
- return vreinterpretq_m128d_f64(
- vzip2q_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b)));
-#else
- return vreinterpretq_m128d_s64(
- vcombine_s64(vget_high_s64(vreinterpretq_s64_m128d(a)),
- vget_high_s64(vreinterpretq_s64_m128d(b))));
-#endif
+ return _mm_move_sd(a, _mm_floor_pd(b));
}
-// Selects and interleaves the upper two single-precision, floating-point values
-// from a and b.
+// Round the lower single-precision (32-bit) floating-point element in b down to
+// an integer value, store the result as a single-precision floating-point
+// element in the lower element of dst, and copy the upper 3 packed elements
+// from a to the upper elements of dst.
//
-// r0 := a2
-// r1 := b2
-// r2 := a3
-// r3 := b3
+// dst[31:0] := FLOOR(b[31:0])
+// dst[127:32] := a[127:32]
//
-// https://msdn.microsoft.com/en-us/library/skccxx7d%28v=vs.90%29.aspx
-FORCE_INLINE __m128 _mm_unpackhi_ps(__m128 a, __m128 b)
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_floor_ss
+FORCE_INLINE __m128 _mm_floor_ss(__m128 a, __m128 b)
{
-#if defined(__aarch64__)
- return vreinterpretq_m128_f32(
- vzip2q_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)));
-#else
- float32x2_t a1 = vget_high_f32(vreinterpretq_f32_m128(a));
- float32x2_t b1 = vget_high_f32(vreinterpretq_f32_m128(b));
- float32x2x2_t result = vzip_f32(a1, b1);
- return vreinterpretq_m128_f32(vcombine_f32(result.val[0], result.val[1]));
-#endif
+ return _mm_move_ss(a, _mm_floor_ps(b));
}
-// Interleaves the upper 8 signed or unsigned 8-bit integers in a with the upper
-// 8 signed or unsigned 8-bit integers in b.
+// Inserts the least significant 32 bits of b into the selected 32-bit integer
+// of a.
+// FORCE_INLINE __m128i _mm_insert_epi32(__m128i a, int b,
+// __constrange(0,4) int imm)
+#define _mm_insert_epi32(a, b, imm) \
+ __extension__({ \
+ vreinterpretq_m128i_s32( \
+ vsetq_lane_s32((b), vreinterpretq_s32_m128i(a), (imm))); \
+ })
+
+// Inserts the least significant 64 bits of b into the selected 64-bit integer
+// of a.
+// FORCE_INLINE __m128i _mm_insert_epi64(__m128i a, __int64 b,
+// __constrange(0,2) int imm)
+#define _mm_insert_epi64(a, b, imm) \
+ __extension__({ \
+ vreinterpretq_m128i_s64( \
+ vsetq_lane_s64((b), vreinterpretq_s64_m128i(a), (imm))); \
+ })
+
+// Inserts the least significant 8 bits of b into the selected 8-bit integer
+// of a.
+// FORCE_INLINE __m128i _mm_insert_epi8(__m128i a, int b,
+// __constrange(0,16) int imm)
+#define _mm_insert_epi8(a, b, imm) \
+ __extension__({ \
+ vreinterpretq_m128i_s8( \
+ vsetq_lane_s8((b), vreinterpretq_s8_m128i(a), (imm))); \
+ })
+
+// Copy a to tmp, then insert a single-precision (32-bit) floating-point
+// element from b into tmp using the control in imm8. Store tmp to dst using
+// the mask in imm8 (elements are zeroed out when the corresponding bit is set).
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=insert_ps
+#define _mm_insert_ps(a, b, imm8) \
+ __extension__({ \
+ float32x4_t tmp1 = \
+ vsetq_lane_f32(vgetq_lane_f32(b, (imm8 >> 6) & 0x3), \
+ vreinterpretq_f32_m128(a), 0); \
+ float32x4_t tmp2 = \
+ vsetq_lane_f32(vgetq_lane_f32(tmp1, 0), vreinterpretq_f32_m128(a), \
+ ((imm8 >> 4) & 0x3)); \
+ const uint32_t data[4] = {((imm8) & (1 << 0)) ? UINT32_MAX : 0, \
+ ((imm8) & (1 << 1)) ? UINT32_MAX : 0, \
+ ((imm8) & (1 << 2)) ? UINT32_MAX : 0, \
+ ((imm8) & (1 << 3)) ? UINT32_MAX : 0}; \
+ uint32x4_t mask = vld1q_u32(data); \
+ float32x4_t all_zeros = vdupq_n_f32(0); \
+ \
+ vreinterpretq_m128_f32( \
+ vbslq_f32(mask, all_zeros, vreinterpretq_f32_m128(tmp2))); \
+ })
+
+// epi versions of min/max
+// Computes the pariwise maximums of the four signed 32-bit integer values of a
+// and b.
//
-// r0 := a8
-// r1 := b8
-// r2 := a9
-// r3 := b9
-// ...
-// r14 := a15
-// r15 := b15
+// A 128-bit parameter that can be defined with the following equations:
+// r0 := (a0 > b0) ? a0 : b0
+// r1 := (a1 > b1) ? a1 : b1
+// r2 := (a2 > b2) ? a2 : b2
+// r3 := (a3 > b3) ? a3 : b3
//
-// https://msdn.microsoft.com/en-us/library/t5h7783k(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_unpackhi_epi8(__m128i a, __m128i b)
+// https://msdn.microsoft.com/en-us/library/vstudio/bb514055(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_max_epi32(__m128i a, __m128i b)
+{
+ return vreinterpretq_m128i_s32(
+ vmaxq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
+}
+
+// Compare packed signed 8-bit integers in a and b, and store packed maximum
+// values in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_epi8
+FORCE_INLINE __m128i _mm_max_epi8(__m128i a, __m128i b)
{
-#if defined(__aarch64__)
return vreinterpretq_m128i_s8(
- vzip2q_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
-#else
- int8x8_t a1 =
- vreinterpret_s8_s16(vget_high_s16(vreinterpretq_s16_m128i(a)));
- int8x8_t b1 =
- vreinterpret_s8_s16(vget_high_s16(vreinterpretq_s16_m128i(b)));
- int8x8x2_t result = vzip_s8(a1, b1);
- return vreinterpretq_m128i_s8(vcombine_s8(result.val[0], result.val[1]));
-#endif
+ vmaxq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
}
-// Interleaves the upper 4 signed or unsigned 16-bit integers in a with the
-// upper 4 signed or unsigned 16-bit integers in b.
+// Compare packed unsigned 16-bit integers in a and b, and store packed maximum
+// values in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_epu16
+FORCE_INLINE __m128i _mm_max_epu16(__m128i a, __m128i b)
+{
+ return vreinterpretq_m128i_u16(
+ vmaxq_u16(vreinterpretq_u16_m128i(a), vreinterpretq_u16_m128i(b)));
+}
+
+// Compare packed unsigned 32-bit integers in a and b, and store packed maximum
+// values in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_epu32
+FORCE_INLINE __m128i _mm_max_epu32(__m128i a, __m128i b)
+{
+ return vreinterpretq_m128i_u32(
+ vmaxq_u32(vreinterpretq_u32_m128i(a), vreinterpretq_u32_m128i(b)));
+}
+
+// Computes the pariwise minima of the four signed 32-bit integer values of a
+// and b.
//
-// r0 := a4
-// r1 := b4
-// r2 := a5
-// r3 := b5
-// r4 := a6
-// r5 := b6
-// r6 := a7
-// r7 := b7
+// A 128-bit parameter that can be defined with the following equations:
+// r0 := (a0 < b0) ? a0 : b0
+// r1 := (a1 < b1) ? a1 : b1
+// r2 := (a2 < b2) ? a2 : b2
+// r3 := (a3 < b3) ? a3 : b3
//
-// https://msdn.microsoft.com/en-us/library/03196cz7(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_unpackhi_epi16(__m128i a, __m128i b)
+// https://msdn.microsoft.com/en-us/library/vstudio/bb531476(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_min_epi32(__m128i a, __m128i b)
{
-#if defined(__aarch64__)
- return vreinterpretq_m128i_s16(
- vzip2q_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b)));
-#else
- int16x4_t a1 = vget_high_s16(vreinterpretq_s16_m128i(a));
- int16x4_t b1 = vget_high_s16(vreinterpretq_s16_m128i(b));
- int16x4x2_t result = vzip_s16(a1, b1);
- return vreinterpretq_m128i_s16(vcombine_s16(result.val[0], result.val[1]));
-#endif
+ return vreinterpretq_m128i_s32(
+ vminq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
}
-// Interleaves the upper 2 signed or unsigned 32-bit integers in a with the
-// upper 2 signed or unsigned 32-bit integers in b.
-// https://msdn.microsoft.com/en-us/library/65sa7cbs(v=vs.100).aspx
-FORCE_INLINE __m128i _mm_unpackhi_epi32(__m128i a, __m128i b)
+// Compare packed signed 8-bit integers in a and b, and store packed minimum
+// values in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_min_epi8
+FORCE_INLINE __m128i _mm_min_epi8(__m128i a, __m128i b)
{
-#if defined(__aarch64__)
- return vreinterpretq_m128i_s32(
- vzip2q_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
-#else
- int32x2_t a1 = vget_high_s32(vreinterpretq_s32_m128i(a));
- int32x2_t b1 = vget_high_s32(vreinterpretq_s32_m128i(b));
- int32x2x2_t result = vzip_s32(a1, b1);
- return vreinterpretq_m128i_s32(vcombine_s32(result.val[0], result.val[1]));
-#endif
+ return vreinterpretq_m128i_s8(
+ vminq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b)));
}
-// Interleaves the upper signed or unsigned 64-bit integer in a with the
-// upper signed or unsigned 64-bit integer in b.
-//
-// r0 := a1
-// r1 := b1
-FORCE_INLINE __m128i _mm_unpackhi_epi64(__m128i a, __m128i b)
+// Compare packed unsigned 16-bit integers in a and b, and store packed minimum
+// values in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_min_epu16
+FORCE_INLINE __m128i _mm_min_epu16(__m128i a, __m128i b)
{
- int64x1_t a_h = vget_high_s64(vreinterpretq_s64_m128i(a));
- int64x1_t b_h = vget_high_s64(vreinterpretq_s64_m128i(b));
- return vreinterpretq_m128i_s64(vcombine_s64(a_h, b_h));
+ return vreinterpretq_m128i_u16(
+ vminq_u16(vreinterpretq_u16_m128i(a), vreinterpretq_u16_m128i(b)));
+}
+
+// Compare packed unsigned 32-bit integers in a and b, and store packed minimum
+// values in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_epu32
+FORCE_INLINE __m128i _mm_min_epu32(__m128i a, __m128i b)
+{
+ return vreinterpretq_m128i_u32(
+ vminq_u32(vreinterpretq_u32_m128i(a), vreinterpretq_u32_m128i(b)));
}
// Horizontally compute the minimum amongst the packed unsigned 16-bit integers
@@ -6296,6 +7929,339 @@ FORCE_INLINE __m128i _mm_minpos_epu16(__m128i a)
return dst;
}
+// Compute the sum of absolute differences (SADs) of quadruplets of unsigned
+// 8-bit integers in a compared to those in b, and store the 16-bit results in
+// dst. Eight SADs are performed using one quadruplet from b and eight
+// quadruplets from a. One quadruplet is selected from b starting at on the
+// offset specified in imm8. Eight quadruplets are formed from sequential 8-bit
+// integers selected from a starting at the offset specified in imm8.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_mpsadbw_epu8
+FORCE_INLINE __m128i _mm_mpsadbw_epu8(__m128i a, __m128i b, const int imm)
+{
+ uint8x16_t _a, _b;
+
+ switch (imm & 0x4) {
+ case 0:
+ // do nothing
+ _a = vreinterpretq_u8_m128i(a);
+ break;
+ case 4:
+ _a = vreinterpretq_u8_u32(vextq_u32(vreinterpretq_u32_m128i(a),
+ vreinterpretq_u32_m128i(a), 1));
+ break;
+ default:
+#if defined(__GNUC__) || defined(__clang__)
+ __builtin_unreachable();
+#endif
+ break;
+ }
+
+ switch (imm & 0x3) {
+ case 0:
+ _b = vreinterpretq_u8_u32(
+ vdupq_n_u32(vgetq_lane_u32(vreinterpretq_u32_m128i(b), 0)));
+ break;
+ case 1:
+ _b = vreinterpretq_u8_u32(
+ vdupq_n_u32(vgetq_lane_u32(vreinterpretq_u32_m128i(b), 1)));
+ break;
+ case 2:
+ _b = vreinterpretq_u8_u32(
+ vdupq_n_u32(vgetq_lane_u32(vreinterpretq_u32_m128i(b), 2)));
+ break;
+ case 3:
+ _b = vreinterpretq_u8_u32(
+ vdupq_n_u32(vgetq_lane_u32(vreinterpretq_u32_m128i(b), 3)));
+ break;
+ default:
+#if defined(__GNUC__) || defined(__clang__)
+ __builtin_unreachable();
+#endif
+ break;
+ }
+
+ int16x8_t c04, c15, c26, c37;
+ uint8x8_t low_b = vget_low_u8(_b);
+ c04 = vabsq_s16(vreinterpretq_s16_u16(vsubl_u8(vget_low_u8(_a), low_b)));
+ _a = vextq_u8(_a, _a, 1);
+ c15 = vabsq_s16(vreinterpretq_s16_u16(vsubl_u8(vget_low_u8(_a), low_b)));
+ _a = vextq_u8(_a, _a, 1);
+ c26 = vabsq_s16(vreinterpretq_s16_u16(vsubl_u8(vget_low_u8(_a), low_b)));
+ _a = vextq_u8(_a, _a, 1);
+ c37 = vabsq_s16(vreinterpretq_s16_u16(vsubl_u8(vget_low_u8(_a), low_b)));
+#if defined(__aarch64__)
+ // |0|4|2|6|
+ c04 = vpaddq_s16(c04, c26);
+ // |1|5|3|7|
+ c15 = vpaddq_s16(c15, c37);
+
+ int32x4_t trn1_c =
+ vtrn1q_s32(vreinterpretq_s32_s16(c04), vreinterpretq_s32_s16(c15));
+ int32x4_t trn2_c =
+ vtrn2q_s32(vreinterpretq_s32_s16(c04), vreinterpretq_s32_s16(c15));
+ return vreinterpretq_m128i_s16(vpaddq_s16(vreinterpretq_s16_s32(trn1_c),
+ vreinterpretq_s16_s32(trn2_c)));
+#else
+ int16x4_t c01, c23, c45, c67;
+ c01 = vpadd_s16(vget_low_s16(c04), vget_low_s16(c15));
+ c23 = vpadd_s16(vget_low_s16(c26), vget_low_s16(c37));
+ c45 = vpadd_s16(vget_high_s16(c04), vget_high_s16(c15));
+ c67 = vpadd_s16(vget_high_s16(c26), vget_high_s16(c37));
+
+ return vreinterpretq_m128i_s16(
+ vcombine_s16(vpadd_s16(c01, c23), vpadd_s16(c45, c67)));
+#endif
+}
+
+// Multiply the low signed 32-bit integers from each packed 64-bit element in
+// a and b, and store the signed 64-bit results in dst.
+//
+// r0 := (int64_t)(int32_t)a0 * (int64_t)(int32_t)b0
+// r1 := (int64_t)(int32_t)a2 * (int64_t)(int32_t)b2
+FORCE_INLINE __m128i _mm_mul_epi32(__m128i a, __m128i b)
+{
+ // vmull_s32 upcasts instead of masking, so we downcast.
+ int32x2_t a_lo = vmovn_s64(vreinterpretq_s64_m128i(a));
+ int32x2_t b_lo = vmovn_s64(vreinterpretq_s64_m128i(b));
+ return vreinterpretq_m128i_s64(vmull_s32(a_lo, b_lo));
+}
+
+// Multiplies the 4 signed or unsigned 32-bit integers from a by the 4 signed or
+// unsigned 32-bit integers from b.
+// https://msdn.microsoft.com/en-us/library/vstudio/bb531409(v=vs.100).aspx
+FORCE_INLINE __m128i _mm_mullo_epi32(__m128i a, __m128i b)
+{
+ return vreinterpretq_m128i_s32(
+ vmulq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b)));
+}
+
+// Packs the 8 unsigned 32-bit integers from a and b into unsigned 16-bit
+// integers and saturates.
+//
+// r0 := UnsignedSaturate(a0)
+// r1 := UnsignedSaturate(a1)
+// r2 := UnsignedSaturate(a2)
+// r3 := UnsignedSaturate(a3)
+// r4 := UnsignedSaturate(b0)
+// r5 := UnsignedSaturate(b1)
+// r6 := UnsignedSaturate(b2)
+// r7 := UnsignedSaturate(b3)
+FORCE_INLINE __m128i _mm_packus_epi32(__m128i a, __m128i b)
+{
+ return vreinterpretq_m128i_u16(
+ vcombine_u16(vqmovun_s32(vreinterpretq_s32_m128i(a)),
+ vqmovun_s32(vreinterpretq_s32_m128i(b))));
+}
+
+// Round the packed double-precision (64-bit) floating-point elements in a using
+// the rounding parameter, and store the results as packed double-precision
+// floating-point elements in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_round_pd
+FORCE_INLINE __m128d _mm_round_pd(__m128d a, int rounding)
+{
+#if defined(__aarch64__)
+ switch (rounding) {
+ case (_MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC):
+ return vreinterpretq_m128d_f64(vrndnq_f64(vreinterpretq_f64_m128d(a)));
+ case (_MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC):
+ return _mm_floor_pd(a);
+ case (_MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC):
+ return _mm_ceil_pd(a);
+ case (_MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC):
+ return vreinterpretq_m128d_f64(vrndq_f64(vreinterpretq_f64_m128d(a)));
+ default: //_MM_FROUND_CUR_DIRECTION
+ return vreinterpretq_m128d_f64(vrndiq_f64(vreinterpretq_f64_m128d(a)));
+ }
+#else
+ double *v_double = (double *) &a;
+
+ if (rounding == (_MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC) ||
+ (rounding == _MM_FROUND_CUR_DIRECTION &&
+ _MM_GET_ROUNDING_MODE() == _MM_ROUND_NEAREST)) {
+ double res[2], tmp;
+ for (int i = 0; i < 2; i++) {
+ tmp = (v_double[i] < 0) ? -v_double[i] : v_double[i];
+ double roundDown = floor(tmp); // Round down value
+ double roundUp = ceil(tmp); // Round up value
+ double diffDown = tmp - roundDown;
+ double diffUp = roundUp - tmp;
+ if (diffDown < diffUp) {
+ /* If it's closer to the round down value, then use it */
+ res[i] = roundDown;
+ } else if (diffDown > diffUp) {
+ /* If it's closer to the round up value, then use it */
+ res[i] = roundUp;
+ } else {
+ /* If it's equidistant between round up and round down value,
+ * pick the one which is an even number */
+ double half = roundDown / 2;
+ if (half != floor(half)) {
+ /* If the round down value is odd, return the round up value
+ */
+ res[i] = roundUp;
+ } else {
+ /* If the round up value is odd, return the round down value
+ */
+ res[i] = roundDown;
+ }
+ }
+ res[i] = (v_double[i] < 0) ? -res[i] : res[i];
+ }
+ return _mm_set_pd(res[1], res[0]);
+ } else if (rounding == (_MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC) ||
+ (rounding == _MM_FROUND_CUR_DIRECTION &&
+ _MM_GET_ROUNDING_MODE() == _MM_ROUND_DOWN)) {
+ return _mm_floor_pd(a);
+ } else if (rounding == (_MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC) ||
+ (rounding == _MM_FROUND_CUR_DIRECTION &&
+ _MM_GET_ROUNDING_MODE() == _MM_ROUND_UP)) {
+ return _mm_ceil_pd(a);
+ }
+ return _mm_set_pd(v_double[1] > 0 ? floor(v_double[1]) : ceil(v_double[1]),
+ v_double[0] > 0 ? floor(v_double[0]) : ceil(v_double[0]));
+#endif
+}
+
+// Round the packed single-precision (32-bit) floating-point elements in a using
+// the rounding parameter, and store the results as packed single-precision
+// floating-point elements in dst.
+// software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_round_ps
+FORCE_INLINE __m128 _mm_round_ps(__m128 a, int rounding)
+{
+#if defined(__aarch64__)
+ switch (rounding) {
+ case (_MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC):
+ return vreinterpretq_m128_f32(vrndnq_f32(vreinterpretq_f32_m128(a)));
+ case (_MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC):
+ return _mm_floor_ps(a);
+ case (_MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC):
+ return _mm_ceil_ps(a);
+ case (_MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC):
+ return vreinterpretq_m128_f32(vrndq_f32(vreinterpretq_f32_m128(a)));
+ default: //_MM_FROUND_CUR_DIRECTION
+ return vreinterpretq_m128_f32(vrndiq_f32(vreinterpretq_f32_m128(a)));
+ }
+#else
+ float *v_float = (float *) &a;
+
+ if (rounding == (_MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC) ||
+ (rounding == _MM_FROUND_CUR_DIRECTION &&
+ _MM_GET_ROUNDING_MODE() == _MM_ROUND_NEAREST)) {
+ uint32x4_t signmask = vdupq_n_u32(0x80000000);
+ float32x4_t half = vbslq_f32(signmask, vreinterpretq_f32_m128(a),
+ vdupq_n_f32(0.5f)); /* +/- 0.5 */
+ int32x4_t r_normal = vcvtq_s32_f32(vaddq_f32(
+ vreinterpretq_f32_m128(a), half)); /* round to integer: [a + 0.5]*/
+ int32x4_t r_trunc = vcvtq_s32_f32(
+ vreinterpretq_f32_m128(a)); /* truncate to integer: [a] */
+ int32x4_t plusone = vreinterpretq_s32_u32(vshrq_n_u32(
+ vreinterpretq_u32_s32(vnegq_s32(r_trunc)), 31)); /* 1 or 0 */
+ int32x4_t r_even = vbicq_s32(vaddq_s32(r_trunc, plusone),
+ vdupq_n_s32(1)); /* ([a] + {0,1}) & ~1 */
+ float32x4_t delta = vsubq_f32(
+ vreinterpretq_f32_m128(a),
+ vcvtq_f32_s32(r_trunc)); /* compute delta: delta = (a - [a]) */
+ uint32x4_t is_delta_half =
+ vceqq_f32(delta, half); /* delta == +/- 0.5 */
+ return vreinterpretq_m128_f32(
+ vcvtq_f32_s32(vbslq_s32(is_delta_half, r_even, r_normal)));
+ } else if (rounding == (_MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC) ||
+ (rounding == _MM_FROUND_CUR_DIRECTION &&
+ _MM_GET_ROUNDING_MODE() == _MM_ROUND_DOWN)) {
+ return _mm_floor_ps(a);
+ } else if (rounding == (_MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC) ||
+ (rounding == _MM_FROUND_CUR_DIRECTION &&
+ _MM_GET_ROUNDING_MODE() == _MM_ROUND_UP)) {
+ return _mm_ceil_ps(a);
+ }
+ return _mm_set_ps(v_float[3] > 0 ? floorf(v_float[3]) : ceilf(v_float[3]),
+ v_float[2] > 0 ? floorf(v_float[2]) : ceilf(v_float[2]),
+ v_float[1] > 0 ? floorf(v_float[1]) : ceilf(v_float[1]),
+ v_float[0] > 0 ? floorf(v_float[0]) : ceilf(v_float[0]));
+#endif
+}
+
+// Round the lower double-precision (64-bit) floating-point element in b using
+// the rounding parameter, store the result as a double-precision floating-point
+// element in the lower element of dst, and copy the upper element from a to the
+// upper element of dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_round_sd
+FORCE_INLINE __m128d _mm_round_sd(__m128d a, __m128d b, int rounding)
+{
+ return _mm_move_sd(a, _mm_round_pd(b, rounding));
+}
+
+// Round the lower single-precision (32-bit) floating-point element in b using
+// the rounding parameter, store the result as a single-precision floating-point
+// element in the lower element of dst, and copy the upper 3 packed elements
+// from a to the upper elements of dst. Rounding is done according to the
+// rounding[3:0] parameter, which can be one of:
+// (_MM_FROUND_TO_NEAREST_INT |_MM_FROUND_NO_EXC) // round to nearest, and
+// suppress exceptions
+// (_MM_FROUND_TO_NEG_INF |_MM_FROUND_NO_EXC) // round down, and
+// suppress exceptions
+// (_MM_FROUND_TO_POS_INF |_MM_FROUND_NO_EXC) // round up, and suppress
+// exceptions
+// (_MM_FROUND_TO_ZERO |_MM_FROUND_NO_EXC) // truncate, and suppress
+// exceptions _MM_FROUND_CUR_DIRECTION // use MXCSR.RC; see
+// _MM_SET_ROUNDING_MODE
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_round_ss
+FORCE_INLINE __m128 _mm_round_ss(__m128 a, __m128 b, int rounding)
+{
+ return _mm_move_ss(a, _mm_round_ps(b, rounding));
+}
+
+// Load 128-bits of integer data from memory into dst using a non-temporal
+// memory hint. mem_addr must be aligned on a 16-byte boundary or a
+// general-protection exception may be generated.
+//
+// dst[127:0] := MEM[mem_addr+127:mem_addr]
+//
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_stream_load_si128
+FORCE_INLINE __m128i _mm_stream_load_si128(__m128i *p)
+{
+#if __has_builtin(__builtin_nontemporal_store)
+ return __builtin_nontemporal_load(p);
+#else
+ return vreinterpretq_m128i_s64(vld1q_s64((int64_t *) p));
+#endif
+}
+
+// Compute the bitwise NOT of a and then AND with a 128-bit vector containing
+// all 1's, and return 1 if the result is zero, otherwise return 0.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_test_all_ones
+FORCE_INLINE int _mm_test_all_ones(__m128i a)
+{
+ return (uint64_t)(vgetq_lane_s64(a, 0) & vgetq_lane_s64(a, 1)) ==
+ ~(uint64_t) 0;
+}
+
+// Compute the bitwise AND of 128 bits (representing integer data) in a and
+// mask, and return 1 if the result is zero, otherwise return 0.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_test_all_zeros
+FORCE_INLINE int _mm_test_all_zeros(__m128i a, __m128i mask)
+{
+ int64x2_t a_and_mask =
+ vandq_s64(vreinterpretq_s64_m128i(a), vreinterpretq_s64_m128i(mask));
+ return !(vgetq_lane_s64(a_and_mask, 0) | vgetq_lane_s64(a_and_mask, 1));
+}
+
+// Compute the bitwise AND of 128 bits (representing integer data) in a and
+// mask, and set ZF to 1 if the result is zero, otherwise set ZF to 0. Compute
+// the bitwise NOT of a and then AND with mask, and set CF to 1 if the result is
+// zero, otherwise set CF to 0. Return 1 if both the ZF and CF values are zero,
+// otherwise return 0.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=mm_test_mix_ones_zero
+FORCE_INLINE int _mm_test_mix_ones_zeros(__m128i a, __m128i mask)
+{
+ uint64x2_t zf =
+ vandq_u64(vreinterpretq_u64_m128i(mask), vreinterpretq_u64_m128i(a));
+ uint64x2_t cf =
+ vbicq_u64(vreinterpretq_u64_m128i(mask), vreinterpretq_u64_m128i(a));
+ uint64x2_t result = vandq_u64(zf, cf);
+ return !(vgetq_lane_u64(result, 0) | vgetq_lane_u64(result, 1));
+}
+
// Compute the bitwise AND of 128 bits (representing integer data) in a and b,
// and set ZF to 1 if the result is zero, otherwise set ZF to 0. Compute the
// bitwise NOT of a and then AND with b, and set CF to 1 if the result is zero,
@@ -6312,6 +8278,14 @@ FORCE_INLINE int _mm_testc_si128(__m128i a, __m128i b)
// Compute the bitwise AND of 128 bits (representing integer data) in a and b,
// and set ZF to 1 if the result is zero, otherwise set ZF to 0. Compute the
// bitwise NOT of a and then AND with b, and set CF to 1 if the result is zero,
+// otherwise set CF to 0. Return 1 if both the ZF and CF values are zero,
+// otherwise return 0.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_testnzc_si128
+#define _mm_testnzc_si128(a, b) _mm_test_mix_ones_zeros(a, b)
+
+// Compute the bitwise AND of 128 bits (representing integer data) in a and b,
+// and set ZF to 1 if the result is zero, otherwise set ZF to 0. Compute the
+// bitwise NOT of a and then AND with b, and set CF to 1 if the result is zero,
// otherwise set CF to 0. Return the ZF value.
// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_testz_si128
FORCE_INLINE int _mm_testz_si128(__m128i a, __m128i b)
@@ -6321,299 +8295,93 @@ FORCE_INLINE int _mm_testz_si128(__m128i a, __m128i b)
return !(vgetq_lane_s64(s64, 0) | vgetq_lane_s64(s64, 1));
}
-// Extracts the selected signed or unsigned 8-bit integer from a and zero
-// extends.
-// FORCE_INLINE int _mm_extract_epi8(__m128i a, __constrange(0,16) int imm)
-#define _mm_extract_epi8(a, imm) vgetq_lane_u8(vreinterpretq_u8_m128i(a), (imm))
-
-// Inserts the least significant 8 bits of b into the selected 8-bit integer
-// of a.
-// FORCE_INLINE __m128i _mm_insert_epi8(__m128i a, int b,
-// __constrange(0,16) int imm)
-#define _mm_insert_epi8(a, b, imm) \
- __extension__({ \
- vreinterpretq_m128i_s8( \
- vsetq_lane_s8((b), vreinterpretq_s8_m128i(a), (imm))); \
- })
-
-// Extracts the selected signed or unsigned 16-bit integer from a and zero
-// extends.
-// https://msdn.microsoft.com/en-us/library/6dceta0c(v=vs.100).aspx
-// FORCE_INLINE int _mm_extract_epi16(__m128i a, __constrange(0,8) int imm)
-#define _mm_extract_epi16(a, imm) \
- vgetq_lane_u16(vreinterpretq_u16_m128i(a), (imm))
-
-// Inserts the least significant 16 bits of b into the selected 16-bit integer
-// of a.
-// https://msdn.microsoft.com/en-us/library/kaze8hz1%28v=vs.100%29.aspx
-// FORCE_INLINE __m128i _mm_insert_epi16(__m128i a, int b,
-// __constrange(0,8) int imm)
-#define _mm_insert_epi16(a, b, imm) \
- __extension__({ \
- vreinterpretq_m128i_s16( \
- vsetq_lane_s16((b), vreinterpretq_s16_m128i(a), (imm))); \
- })
-
-// Copy a to dst, and insert the 16-bit integer i into dst at the location
-// specified by imm8.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_insert_pi16
-#define _mm_insert_pi16(a, b, imm) \
- __extension__({ \
- vreinterpret_m64_s16( \
- vset_lane_s16((b), vreinterpret_s16_m64(a), (imm))); \
- })
-
-// Extracts the selected signed or unsigned 32-bit integer from a and zero
-// extends.
-// FORCE_INLINE int _mm_extract_epi32(__m128i a, __constrange(0,4) int imm)
-#define _mm_extract_epi32(a, imm) \
- vgetq_lane_s32(vreinterpretq_s32_m128i(a), (imm))
-
-// Extracts the selected single-precision (32-bit) floating-point from a.
-// FORCE_INLINE int _mm_extract_ps(__m128 a, __constrange(0,4) int imm)
-#define _mm_extract_ps(a, imm) vgetq_lane_s32(vreinterpretq_s32_m128(a), (imm))
-
-// Inserts the least significant 32 bits of b into the selected 32-bit integer
-// of a.
-// FORCE_INLINE __m128i _mm_insert_epi32(__m128i a, int b,
-// __constrange(0,4) int imm)
-#define _mm_insert_epi32(a, b, imm) \
- __extension__({ \
- vreinterpretq_m128i_s32( \
- vsetq_lane_s32((b), vreinterpretq_s32_m128i(a), (imm))); \
- })
-
-// Extracts the selected signed or unsigned 64-bit integer from a and zero
-// extends.
-// FORCE_INLINE __int64 _mm_extract_epi64(__m128i a, __constrange(0,2) int imm)
-#define _mm_extract_epi64(a, imm) \
- vgetq_lane_s64(vreinterpretq_s64_m128i(a), (imm))
-
-// Inserts the least significant 64 bits of b into the selected 64-bit integer
-// of a.
-// FORCE_INLINE __m128i _mm_insert_epi64(__m128i a, __int64 b,
-// __constrange(0,2) int imm)
-#define _mm_insert_epi64(a, b, imm) \
- __extension__({ \
- vreinterpretq_m128i_s64( \
- vsetq_lane_s64((b), vreinterpretq_s64_m128i(a), (imm))); \
- })
+/* SSE4.2 */
-// Count the number of bits set to 1 in unsigned 32-bit integer a, and
-// return that count in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_popcnt_u32
-FORCE_INLINE int _mm_popcnt_u32(unsigned int a)
+// Compares the 2 signed 64-bit integers in a and the 2 signed 64-bit integers
+// in b for greater than.
+FORCE_INLINE __m128i _mm_cmpgt_epi64(__m128i a, __m128i b)
{
#if defined(__aarch64__)
-#if __has_builtin(__builtin_popcount)
- return __builtin_popcount(a);
+ return vreinterpretq_m128i_u64(
+ vcgtq_s64(vreinterpretq_s64_m128i(a), vreinterpretq_s64_m128i(b)));
#else
- return (int) vaddlv_u8(vcnt_u8(vcreate_u8((uint64_t) a)));
-#endif
-#else
- uint32_t count = 0;
- uint8x8_t input_val, count8x8_val;
- uint16x4_t count16x4_val;
- uint32x2_t count32x2_val;
-
- input_val = vld1_u8((uint8_t *) &a);
- count8x8_val = vcnt_u8(input_val);
- count16x4_val = vpaddl_u8(count8x8_val);
- count32x2_val = vpaddl_u16(count16x4_val);
-
- vst1_u32(&count, count32x2_val);
- return count;
+ return vreinterpretq_m128i_s64(vshrq_n_s64(
+ vqsubq_s64(vreinterpretq_s64_m128i(b), vreinterpretq_s64_m128i(a)),
+ 63));
#endif
}
-// Count the number of bits set to 1 in unsigned 64-bit integer a, and
-// return that count in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_popcnt_u64
-FORCE_INLINE int64_t _mm_popcnt_u64(uint64_t a)
+// Starting with the initial value in crc, accumulates a CRC32 value for
+// unsigned 16-bit integer v.
+// https://msdn.microsoft.com/en-us/library/bb531411(v=vs.100)
+FORCE_INLINE uint32_t _mm_crc32_u16(uint32_t crc, uint16_t v)
{
-#if defined(__aarch64__)
-#if __has_builtin(__builtin_popcountll)
- return __builtin_popcountll(a);
-#else
- return (int64_t) vaddlv_u8(vcnt_u8(vcreate_u8(a)));
-#endif
+#if defined(__aarch64__) && defined(__ARM_FEATURE_CRC32)
+ __asm__ __volatile__("crc32ch %w[c], %w[c], %w[v]\n\t"
+ : [c] "+r"(crc)
+ : [v] "r"(v));
#else
- uint64_t count = 0;
- uint8x8_t input_val, count8x8_val;
- uint16x4_t count16x4_val;
- uint32x2_t count32x2_val;
- uint64x1_t count64x1_val;
-
- input_val = vld1_u8((uint8_t *) &a);
- count8x8_val = vcnt_u8(input_val);
- count16x4_val = vpaddl_u8(count8x8_val);
- count32x2_val = vpaddl_u16(count16x4_val);
- count64x1_val = vpaddl_u32(count32x2_val);
- vst1_u64(&count, count64x1_val);
- return count;
+ crc = _mm_crc32_u8(crc, v & 0xff);
+ crc = _mm_crc32_u8(crc, (v >> 8) & 0xff);
#endif
+ return crc;
}
-// Macro: Transpose the 4x4 matrix formed by the 4 rows of single-precision
-// (32-bit) floating-point elements in row0, row1, row2, and row3, and store the
-// transposed matrix in these vectors (row0 now contains column 0, etc.).
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=MM_TRANSPOSE4_PS
-#define _MM_TRANSPOSE4_PS(row0, row1, row2, row3) \
- do { \
- float32x4x2_t ROW01 = vtrnq_f32(row0, row1); \
- float32x4x2_t ROW23 = vtrnq_f32(row2, row3); \
- row0 = vcombine_f32(vget_low_f32(ROW01.val[0]), \
- vget_low_f32(ROW23.val[0])); \
- row1 = vcombine_f32(vget_low_f32(ROW01.val[1]), \
- vget_low_f32(ROW23.val[1])); \
- row2 = vcombine_f32(vget_high_f32(ROW01.val[0]), \
- vget_high_f32(ROW23.val[0])); \
- row3 = vcombine_f32(vget_high_f32(ROW01.val[1]), \
- vget_high_f32(ROW23.val[1])); \
- } while (0)
-
-/* Crypto Extensions */
-
-#if defined(__ARM_FEATURE_CRYPTO)
-// Wraps vmull_p64
-FORCE_INLINE uint64x2_t _sse2neon_vmull_p64(uint64x1_t _a, uint64x1_t _b)
-{
- poly64_t a = vget_lane_p64(vreinterpret_p64_u64(_a), 0);
- poly64_t b = vget_lane_p64(vreinterpret_p64_u64(_b), 0);
- return vreinterpretq_u64_p128(vmull_p64(a, b));
-}
-#else // ARMv7 polyfill
-// ARMv7/some A64 lacks vmull_p64, but it has vmull_p8.
-//
-// vmull_p8 calculates 8 8-bit->16-bit polynomial multiplies, but we need a
-// 64-bit->128-bit polynomial multiply.
-//
-// It needs some work and is somewhat slow, but it is still faster than all
-// known scalar methods.
-//
-// Algorithm adapted to C from
-// https://www.workofard.com/2017/07/ghash-for-low-end-cores/, which is adapted
-// from "Fast Software Polynomial Multiplication on ARM Processors Using the
-// NEON Engine" by Danilo Camara, Conrado Gouvea, Julio Lopez and Ricardo Dahab
-// (https://hal.inria.fr/hal-01506572)
-static uint64x2_t _sse2neon_vmull_p64(uint64x1_t _a, uint64x1_t _b)
+// Starting with the initial value in crc, accumulates a CRC32 value for
+// unsigned 32-bit integer v.
+// https://msdn.microsoft.com/en-us/library/bb531394(v=vs.100)
+FORCE_INLINE uint32_t _mm_crc32_u32(uint32_t crc, uint32_t v)
{
- poly8x8_t a = vreinterpret_p8_u64(_a);
- poly8x8_t b = vreinterpret_p8_u64(_b);
-
- // Masks
- uint8x16_t k48_32 = vcombine_u8(vcreate_u8(0x0000ffffffffffff),
- vcreate_u8(0x00000000ffffffff));
- uint8x16_t k16_00 = vcombine_u8(vcreate_u8(0x000000000000ffff),
- vcreate_u8(0x0000000000000000));
-
- // Do the multiplies, rotating with vext to get all combinations
- uint8x16_t d = vreinterpretq_u8_p16(vmull_p8(a, b)); // D = A0 * B0
- uint8x16_t e =
- vreinterpretq_u8_p16(vmull_p8(a, vext_p8(b, b, 1))); // E = A0 * B1
- uint8x16_t f =
- vreinterpretq_u8_p16(vmull_p8(vext_p8(a, a, 1), b)); // F = A1 * B0
- uint8x16_t g =
- vreinterpretq_u8_p16(vmull_p8(a, vext_p8(b, b, 2))); // G = A0 * B2
- uint8x16_t h =
- vreinterpretq_u8_p16(vmull_p8(vext_p8(a, a, 2), b)); // H = A2 * B0
- uint8x16_t i =
- vreinterpretq_u8_p16(vmull_p8(a, vext_p8(b, b, 3))); // I = A0 * B3
- uint8x16_t j =
- vreinterpretq_u8_p16(vmull_p8(vext_p8(a, a, 3), b)); // J = A3 * B0
- uint8x16_t k =
- vreinterpretq_u8_p16(vmull_p8(a, vext_p8(b, b, 4))); // L = A0 * B4
-
- // Add cross products
- uint8x16_t l = veorq_u8(e, f); // L = E + F
- uint8x16_t m = veorq_u8(g, h); // M = G + H
- uint8x16_t n = veorq_u8(i, j); // N = I + J
-
- // Interleave. Using vzip1 and vzip2 prevents Clang from emitting TBL
- // instructions.
-#if defined(__aarch64__)
- uint8x16_t lm_p0 = vreinterpretq_u8_u64(
- vzip1q_u64(vreinterpretq_u64_u8(l), vreinterpretq_u64_u8(m)));
- uint8x16_t lm_p1 = vreinterpretq_u8_u64(
- vzip2q_u64(vreinterpretq_u64_u8(l), vreinterpretq_u64_u8(m)));
- uint8x16_t nk_p0 = vreinterpretq_u8_u64(
- vzip1q_u64(vreinterpretq_u64_u8(n), vreinterpretq_u64_u8(k)));
- uint8x16_t nk_p1 = vreinterpretq_u8_u64(
- vzip2q_u64(vreinterpretq_u64_u8(n), vreinterpretq_u64_u8(k)));
+#if defined(__aarch64__) && defined(__ARM_FEATURE_CRC32)
+ __asm__ __volatile__("crc32cw %w[c], %w[c], %w[v]\n\t"
+ : [c] "+r"(crc)
+ : [v] "r"(v));
#else
- uint8x16_t lm_p0 = vcombine_u8(vget_low_u8(l), vget_low_u8(m));
- uint8x16_t lm_p1 = vcombine_u8(vget_high_u8(l), vget_high_u8(m));
- uint8x16_t nk_p0 = vcombine_u8(vget_low_u8(n), vget_low_u8(k));
- uint8x16_t nk_p1 = vcombine_u8(vget_high_u8(n), vget_high_u8(k));
+ crc = _mm_crc32_u16(crc, v & 0xffff);
+ crc = _mm_crc32_u16(crc, (v >> 16) & 0xffff);
#endif
- // t0 = (L) (P0 + P1) << 8
- // t1 = (M) (P2 + P3) << 16
- uint8x16_t t0t1_tmp = veorq_u8(lm_p0, lm_p1);
- uint8x16_t t0t1_h = vandq_u8(lm_p1, k48_32);
- uint8x16_t t0t1_l = veorq_u8(t0t1_tmp, t0t1_h);
-
- // t2 = (N) (P4 + P5) << 24
- // t3 = (K) (P6 + P7) << 32
- uint8x16_t t2t3_tmp = veorq_u8(nk_p0, nk_p1);
- uint8x16_t t2t3_h = vandq_u8(nk_p1, k16_00);
- uint8x16_t t2t3_l = veorq_u8(t2t3_tmp, t2t3_h);
+ return crc;
+}
- // De-interleave
-#if defined(__aarch64__)
- uint8x16_t t0 = vreinterpretq_u8_u64(
- vuzp1q_u64(vreinterpretq_u64_u8(t0t1_l), vreinterpretq_u64_u8(t0t1_h)));
- uint8x16_t t1 = vreinterpretq_u8_u64(
- vuzp2q_u64(vreinterpretq_u64_u8(t0t1_l), vreinterpretq_u64_u8(t0t1_h)));
- uint8x16_t t2 = vreinterpretq_u8_u64(
- vuzp1q_u64(vreinterpretq_u64_u8(t2t3_l), vreinterpretq_u64_u8(t2t3_h)));
- uint8x16_t t3 = vreinterpretq_u8_u64(
- vuzp2q_u64(vreinterpretq_u64_u8(t2t3_l), vreinterpretq_u64_u8(t2t3_h)));
+// Starting with the initial value in crc, accumulates a CRC32 value for
+// unsigned 64-bit integer v.
+// https://msdn.microsoft.com/en-us/library/bb514033(v=vs.100)
+FORCE_INLINE uint64_t _mm_crc32_u64(uint64_t crc, uint64_t v)
+{
+#if defined(__aarch64__) && defined(__ARM_FEATURE_CRC32)
+ __asm__ __volatile__("crc32cx %w[c], %w[c], %x[v]\n\t"
+ : [c] "+r"(crc)
+ : [v] "r"(v));
#else
- uint8x16_t t1 = vcombine_u8(vget_high_u8(t0t1_l), vget_high_u8(t0t1_h));
- uint8x16_t t0 = vcombine_u8(vget_low_u8(t0t1_l), vget_low_u8(t0t1_h));
- uint8x16_t t3 = vcombine_u8(vget_high_u8(t2t3_l), vget_high_u8(t2t3_h));
- uint8x16_t t2 = vcombine_u8(vget_low_u8(t2t3_l), vget_low_u8(t2t3_h));
+ crc = _mm_crc32_u32((uint32_t)(crc), v & 0xffffffff);
+ crc = _mm_crc32_u32((uint32_t)(crc), (v >> 32) & 0xffffffff);
#endif
- // Shift the cross products
- uint8x16_t t0_shift = vextq_u8(t0, t0, 15); // t0 << 8
- uint8x16_t t1_shift = vextq_u8(t1, t1, 14); // t1 << 16
- uint8x16_t t2_shift = vextq_u8(t2, t2, 13); // t2 << 24
- uint8x16_t t3_shift = vextq_u8(t3, t3, 12); // t3 << 32
-
- // Accumulate the products
- uint8x16_t cross1 = veorq_u8(t0_shift, t1_shift);
- uint8x16_t cross2 = veorq_u8(t2_shift, t3_shift);
- uint8x16_t mix = veorq_u8(d, cross1);
- uint8x16_t r = veorq_u8(mix, cross2);
- return vreinterpretq_u64_u8(r);
+ return crc;
}
-#endif // ARMv7 polyfill
-// Perform a carry-less multiplication of two 64-bit integers, selected from a
-// and b according to imm8, and store the results in dst.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_clmulepi64_si128
-FORCE_INLINE __m128i _mm_clmulepi64_si128(__m128i _a, __m128i _b, const int imm)
+// Starting with the initial value in crc, accumulates a CRC32 value for
+// unsigned 8-bit integer v.
+// https://msdn.microsoft.com/en-us/library/bb514036(v=vs.100)
+FORCE_INLINE uint32_t _mm_crc32_u8(uint32_t crc, uint8_t v)
{
- uint64x2_t a = vreinterpretq_u64_m128i(_a);
- uint64x2_t b = vreinterpretq_u64_m128i(_b);
- switch (imm & 0x11) {
- case 0x00:
- return vreinterpretq_m128i_u64(
- _sse2neon_vmull_p64(vget_low_u64(a), vget_low_u64(b)));
- case 0x01:
- return vreinterpretq_m128i_u64(
- _sse2neon_vmull_p64(vget_high_u64(a), vget_low_u64(b)));
- case 0x10:
- return vreinterpretq_m128i_u64(
- _sse2neon_vmull_p64(vget_low_u64(a), vget_high_u64(b)));
- case 0x11:
- return vreinterpretq_m128i_u64(
- _sse2neon_vmull_p64(vget_high_u64(a), vget_high_u64(b)));
- default:
- abort();
+#if defined(__aarch64__) && defined(__ARM_FEATURE_CRC32)
+ __asm__ __volatile__("crc32cb %w[c], %w[c], %w[v]\n\t"
+ : [c] "+r"(crc)
+ : [v] "r"(v));
+#else
+ crc ^= v;
+ for (int bit = 0; bit < 8; bit++) {
+ if (crc & 1)
+ crc = (crc >> 1) ^ UINT32_C(0x82f63b78);
+ else
+ crc = (crc >> 1);
}
+#endif
+ return crc;
}
+/* AES */
+
#if !defined(__ARM_FEATURE_CRYPTO)
/* clang-format off */
#define SSE2NEON_AES_DATA(w) \
@@ -6752,22 +8520,22 @@ FORCE_INLINE __m128i _mm_aesenclast_si128(__m128i a, __m128i RoundKey)
{
/* FIXME: optimized for NEON */
uint8_t v[4][4] = {
- [0] = {SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 0)],
- SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 5)],
- SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 10)],
- SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 15)]},
- [1] = {SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 4)],
- SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 9)],
- SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 14)],
- SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 3)]},
- [2] = {SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 8)],
- SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 13)],
- SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 2)],
- SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 7)]},
- [3] = {SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 12)],
- SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 1)],
- SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 6)],
- SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 11)]},
+ {SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 0)],
+ SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 5)],
+ SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 10)],
+ SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 15)]},
+ {SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 4)],
+ SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 9)],
+ SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 14)],
+ SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 3)]},
+ {SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 8)],
+ SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 13)],
+ SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 2)],
+ SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 7)]},
+ {SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 12)],
+ SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 1)],
+ SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 6)],
+ SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 11)]},
};
for (int i = 0; i < 16; i++)
vreinterpretq_nth_u8_m128i(a, i) =
@@ -6833,155 +8601,134 @@ FORCE_INLINE __m128i _mm_aeskeygenassist_si128(__m128i a, const int rcon)
}
#endif
-/* Streaming Extensions */
+/* Others */
-// Guarantees that every preceding store is globally visible before any
-// subsequent store.
-// https://msdn.microsoft.com/en-us/library/5h2w73d1%28v=vs.90%29.aspx
-FORCE_INLINE void _mm_sfence(void)
+// Perform a carry-less multiplication of two 64-bit integers, selected from a
+// and b according to imm8, and store the results in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_clmulepi64_si128
+FORCE_INLINE __m128i _mm_clmulepi64_si128(__m128i _a, __m128i _b, const int imm)
{
- __sync_synchronize();
+ uint64x2_t a = vreinterpretq_u64_m128i(_a);
+ uint64x2_t b = vreinterpretq_u64_m128i(_b);
+ switch (imm & 0x11) {
+ case 0x00:
+ return vreinterpretq_m128i_u64(
+ _sse2neon_vmull_p64(vget_low_u64(a), vget_low_u64(b)));
+ case 0x01:
+ return vreinterpretq_m128i_u64(
+ _sse2neon_vmull_p64(vget_high_u64(a), vget_low_u64(b)));
+ case 0x10:
+ return vreinterpretq_m128i_u64(
+ _sse2neon_vmull_p64(vget_low_u64(a), vget_high_u64(b)));
+ case 0x11:
+ return vreinterpretq_m128i_u64(
+ _sse2neon_vmull_p64(vget_high_u64(a), vget_high_u64(b)));
+ default:
+ abort();
+ }
}
-// Store 128-bits (composed of 4 packed single-precision (32-bit) floating-
-// point elements) from a into memory using a non-temporal memory hint.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_stream_ps
-FORCE_INLINE void _mm_stream_ps(float *p, __m128 a)
+FORCE_INLINE unsigned int _sse2neon_mm_get_denormals_zero_mode()
{
-#if __has_builtin(__builtin_nontemporal_store)
- __builtin_nontemporal_store(a, (float32x4_t *) p);
+ union {
+ fpcr_bitfield field;
+#if defined(__aarch64__)
+ uint64_t value;
#else
- vst1q_f32(p, vreinterpretq_f32_m128(a));
+ uint32_t value;
#endif
-}
+ } r;
-// Stores the data in a to the address p without polluting the caches. If the
-// cache line containing address p is already in the cache, the cache will be
-// updated.
-// https://msdn.microsoft.com/en-us/library/ba08y07y%28v=vs.90%29.aspx
-FORCE_INLINE void _mm_stream_si128(__m128i *p, __m128i a)
-{
-#if __has_builtin(__builtin_nontemporal_store)
- __builtin_nontemporal_store(a, p);
+#if defined(__aarch64__)
+ asm volatile("mrs %0, FPCR" : "=r"(r.value)); /* read */
#else
- vst1q_s64((int64_t *) p, vreinterpretq_s64_m128i(a));
+ asm volatile("vmrs %0, FPSCR" : "=r"(r.value)); /* read */
#endif
+
+ return r.field.bit24 ? _MM_DENORMALS_ZERO_ON : _MM_DENORMALS_ZERO_OFF;
}
-// Load 128-bits of integer data from memory into dst using a non-temporal
-// memory hint. mem_addr must be aligned on a 16-byte boundary or a
-// general-protection exception may be generated.
-//
-// dst[127:0] := MEM[mem_addr+127:mem_addr]
-//
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_stream_load_si128
-FORCE_INLINE __m128i _mm_stream_load_si128(__m128i *p)
+// Count the number of bits set to 1 in unsigned 32-bit integer a, and
+// return that count in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_popcnt_u32
+FORCE_INLINE int _mm_popcnt_u32(unsigned int a)
{
-#if __has_builtin(__builtin_nontemporal_store)
- return __builtin_nontemporal_load(p);
+#if defined(__aarch64__)
+#if __has_builtin(__builtin_popcount)
+ return __builtin_popcount(a);
#else
- return vreinterpretq_m128i_s64(vld1q_s64((int64_t *) p));
+ return (int) vaddlv_u8(vcnt_u8(vcreate_u8((uint64_t) a)));
#endif
-}
-
-// Cache line containing p is flushed and invalidated from all caches in the
-// coherency domain. :
-// https://msdn.microsoft.com/en-us/library/ba08y07y(v=vs.100).aspx
-FORCE_INLINE void _mm_clflush(void const *p)
-{
- (void) p;
- // no corollary for Neon?
-}
+#else
+ uint32_t count = 0;
+ uint8x8_t input_val, count8x8_val;
+ uint16x4_t count16x4_val;
+ uint32x2_t count32x2_val;
-// Allocate aligned blocks of memory.
-// https://software.intel.com/en-us/
-// cpp-compiler-developer-guide-and-reference-allocating-and-freeing-aligned-memory-blocks
-FORCE_INLINE void *_mm_malloc(size_t size, size_t align)
-{
- void *ptr;
- if (align == 1)
- return malloc(size);
- if (align == 2 || (sizeof(void *) == 8 && align == 4))
- align = sizeof(void *);
- if (!posix_memalign(&ptr, align, size))
- return ptr;
- return NULL;
-}
+ input_val = vld1_u8((uint8_t *) &a);
+ count8x8_val = vcnt_u8(input_val);
+ count16x4_val = vpaddl_u8(count8x8_val);
+ count32x2_val = vpaddl_u16(count16x4_val);
-// Free aligned memory that was allocated with _mm_malloc.
-// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_free
-FORCE_INLINE void _mm_free(void *addr)
-{
- free(addr);
+ vst1_u32(&count, count32x2_val);
+ return count;
+#endif
}
-// Starting with the initial value in crc, accumulates a CRC32 value for
-// unsigned 8-bit integer v.
-// https://msdn.microsoft.com/en-us/library/bb514036(v=vs.100)
-FORCE_INLINE uint32_t _mm_crc32_u8(uint32_t crc, uint8_t v)
+// Count the number of bits set to 1 in unsigned 64-bit integer a, and
+// return that count in dst.
+// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_popcnt_u64
+FORCE_INLINE int64_t _mm_popcnt_u64(uint64_t a)
{
-#if defined(__aarch64__) && defined(__ARM_FEATURE_CRC32)
- __asm__ __volatile__("crc32cb %w[c], %w[c], %w[v]\n\t"
- : [c] "+r"(crc)
- : [v] "r"(v));
+#if defined(__aarch64__)
+#if __has_builtin(__builtin_popcountll)
+ return __builtin_popcountll(a);
#else
- crc ^= v;
- for (int bit = 0; bit < 8; bit++) {
- if (crc & 1)
- crc = (crc >> 1) ^ UINT32_C(0x82f63b78);
- else
- crc = (crc >> 1);
- }
+ return (int64_t) vaddlv_u8(vcnt_u8(vcreate_u8(a)));
+#endif
+#else
+ uint64_t count = 0;
+ uint8x8_t input_val, count8x8_val;
+ uint16x4_t count16x4_val;
+ uint32x2_t count32x2_val;
+ uint64x1_t count64x1_val;
+
+ input_val = vld1_u8((uint8_t *) &a);
+ count8x8_val = vcnt_u8(input_val);
+ count16x4_val = vpaddl_u8(count8x8_val);
+ count32x2_val = vpaddl_u16(count16x4_val);
+ count64x1_val = vpaddl_u32(count32x2_val);
+ vst1_u64(&count, count64x1_val);
+ return count;
#endif
- return crc;
}
-// Starting with the initial value in crc, accumulates a CRC32 value for
-// unsigned 16-bit integer v.
-// https://msdn.microsoft.com/en-us/library/bb531411(v=vs.100)
-FORCE_INLINE uint32_t _mm_crc32_u16(uint32_t crc, uint16_t v)
+FORCE_INLINE void _sse2neon_mm_set_denormals_zero_mode(unsigned int flag)
{
-#if defined(__aarch64__) && defined(__ARM_FEATURE_CRC32)
- __asm__ __volatile__("crc32ch %w[c], %w[c], %w[v]\n\t"
- : [c] "+r"(crc)
- : [v] "r"(v));
+ // AArch32 Advanced SIMD arithmetic always uses the Flush-to-zero setting,
+ // regardless of the value of the FZ bit.
+ union {
+ fpcr_bitfield field;
+#if defined(__aarch64__)
+ uint64_t value;
#else
- crc = _mm_crc32_u8(crc, v & 0xff);
- crc = _mm_crc32_u8(crc, (v >> 8) & 0xff);
+ uint32_t value;
#endif
- return crc;
-}
+ } r;
-// Starting with the initial value in crc, accumulates a CRC32 value for
-// unsigned 32-bit integer v.
-// https://msdn.microsoft.com/en-us/library/bb531394(v=vs.100)
-FORCE_INLINE uint32_t _mm_crc32_u32(uint32_t crc, uint32_t v)
-{
-#if defined(__aarch64__) && defined(__ARM_FEATURE_CRC32)
- __asm__ __volatile__("crc32cw %w[c], %w[c], %w[v]\n\t"
- : [c] "+r"(crc)
- : [v] "r"(v));
+#if defined(__aarch64__)
+ asm volatile("mrs %0, FPCR" : "=r"(r.value)); /* read */
#else
- crc = _mm_crc32_u16(crc, v & 0xffff);
- crc = _mm_crc32_u16(crc, (v >> 16) & 0xffff);
+ asm volatile("vmrs %0, FPSCR" : "=r"(r.value)); /* read */
#endif
- return crc;
-}
-// Starting with the initial value in crc, accumulates a CRC32 value for
-// unsigned 64-bit integer v.
-// https://msdn.microsoft.com/en-us/library/bb514033(v=vs.100)
-FORCE_INLINE uint64_t _mm_crc32_u64(uint64_t crc, uint64_t v)
-{
-#if defined(__aarch64__) && defined(__ARM_FEATURE_CRC32)
- __asm__ __volatile__("crc32cx %w[c], %w[c], %x[v]\n\t"
- : [c] "+r"(crc)
- : [v] "r"(v));
+ r.field.bit24 = (flag & _MM_DENORMALS_ZERO_MASK) == _MM_DENORMALS_ZERO_ON;
+
+#if defined(__aarch64__)
+ asm volatile("msr FPCR, %0" ::"r"(r)); /* write */
#else
- crc = _mm_crc32_u32((uint32_t)(crc), v & 0xffffffff);
- crc = _mm_crc32_u32((uint32_t)(crc), (v >> 32) & 0xffffffff);
+ asm volatile("vmsr FPSCR, %0" ::"r"(r)); /* write */
#endif
- return crc;
}
#if defined(__GNUC__) || defined(__clang__)
@@ -6993,4 +8740,4 @@ FORCE_INLINE uint64_t _mm_crc32_u64(uint64_t crc, uint64_t v)
#pragma GCC pop_options
#endif
-#endif
+#endif \ No newline at end of file
diff --git a/thirdparty/embree/common/simd/simd.h b/thirdparty/embree/common/simd/simd.h
index 195506b530..34e37b08b1 100644
--- a/thirdparty/embree/common/simd/simd.h
+++ b/thirdparty/embree/common/simd/simd.h
@@ -6,7 +6,7 @@
#include "../math/math.h"
/* include SSE wrapper classes */
-#if defined(__SSE__)
+#if defined(__SSE__) || defined(__ARM_NEON)
# include "sse.h"
#endif
diff --git a/thirdparty/embree/common/simd/sse.h b/thirdparty/embree/common/simd/sse.h
index 1465fb4fb0..04d90533dd 100644
--- a/thirdparty/embree/common/simd/sse.h
+++ b/thirdparty/embree/common/simd/sse.h
@@ -11,7 +11,7 @@
namespace embree
{
-#if defined(__SSE4_1__)
+#if defined(__aarch64__) || defined(__SSE4_1__)
__forceinline __m128 blendv_ps(__m128 f, __m128 t, __m128 mask) {
return _mm_blendv_ps(f,t,mask);
}
diff --git a/thirdparty/embree/common/simd/vboold4_avx.h b/thirdparty/embree/common/simd/vboold4_avx.h
index 7db0d1c5c1..450bd7a4eb 100644
--- a/thirdparty/embree/common/simd/vboold4_avx.h
+++ b/thirdparty/embree/common/simd/vboold4_avx.h
@@ -62,7 +62,11 @@ namespace embree
////////////////////////////////////////////////////////////////////////////////
__forceinline vboold(FalseTy) : v(_mm256_setzero_pd()) {}
+#if !defined(__aarch64__)
__forceinline vboold(TrueTy) : v(_mm256_cmp_pd(_mm256_setzero_pd(), _mm256_setzero_pd(), _CMP_EQ_OQ)) {}
+#else
+ __forceinline vboold(TrueTy) : v(_mm256_cmpeq_pd(_mm256_setzero_pd(), _mm256_setzero_pd())) {}
+#endif
////////////////////////////////////////////////////////////////////////////////
/// Array Access
@@ -107,9 +111,10 @@ namespace embree
/// Movement/Shifting/Shuffling Functions
////////////////////////////////////////////////////////////////////////////////
+#if !defined(__aarch64__)
__forceinline vboold4 unpacklo(const vboold4& a, const vboold4& b) { return _mm256_unpacklo_pd(a, b); }
__forceinline vboold4 unpackhi(const vboold4& a, const vboold4& b) { return _mm256_unpackhi_pd(a, b); }
-
+#endif
#if defined(__AVX2__)
template<int i0, int i1, int i2, int i3>
diff --git a/thirdparty/embree/common/simd/vboolf16_avx512.h b/thirdparty/embree/common/simd/vboolf16_avx512.h
index 19841dcea8..86b718f025 100644
--- a/thirdparty/embree/common/simd/vboolf16_avx512.h
+++ b/thirdparty/embree/common/simd/vboolf16_avx512.h
@@ -116,7 +116,7 @@ namespace embree
__forceinline size_t popcnt (const vboolf16& a) { return popcnt(a.v); }
////////////////////////////////////////////////////////////////////////////////
- /// Convertion Operations
+ /// Conversion Operations
////////////////////////////////////////////////////////////////////////////////
__forceinline unsigned int toInt (const vboolf16& a) { return mm512_mask2int(a); }
diff --git a/thirdparty/embree/common/simd/vboolf4_sse2.h b/thirdparty/embree/common/simd/vboolf4_sse2.h
index fa84b1b6ee..9e0fdf5c6f 100644
--- a/thirdparty/embree/common/simd/vboolf4_sse2.h
+++ b/thirdparty/embree/common/simd/vboolf4_sse2.h
@@ -36,9 +36,11 @@ namespace embree
__forceinline vboolf(__m128 input) : v(input) {}
__forceinline operator const __m128&() const { return v; }
+ #if !defined(__EMSCRIPTEN__)
__forceinline operator const __m128i() const { return _mm_castps_si128(v); }
__forceinline operator const __m128d() const { return _mm_castps_pd(v); }
-
+ #endif
+
__forceinline vboolf(bool a)
: v(mm_lookupmask_ps[(size_t(a) << 3) | (size_t(a) << 2) | (size_t(a) << 1) | size_t(a)]) {}
__forceinline vboolf(bool a, bool b)
@@ -100,7 +102,7 @@ namespace embree
__forceinline vboolf4 operator ==(const vboolf4& a, const vboolf4& b) { return _mm_castsi128_ps(_mm_cmpeq_epi32(a, b)); }
__forceinline vboolf4 select(const vboolf4& m, const vboolf4& t, const vboolf4& f) {
-#if defined(__SSE4_1__)
+#if defined(__aarch64__) || defined(__SSE4_1__)
return _mm_blendv_ps(f, t, m);
#else
return _mm_or_ps(_mm_and_ps(m, t), _mm_andnot_ps(m, f));
@@ -114,6 +116,17 @@ namespace embree
__forceinline vboolf4 unpacklo(const vboolf4& a, const vboolf4& b) { return _mm_unpacklo_ps(a, b); }
__forceinline vboolf4 unpackhi(const vboolf4& a, const vboolf4& b) { return _mm_unpackhi_ps(a, b); }
+#if defined(__aarch64__)
+ template<int i0, int i1, int i2, int i3>
+ __forceinline vboolf4 shuffle(const vboolf4& v) {
+ return vreinterpretq_f32_u8(vqtbl1q_u8( vreinterpretq_u8_s32(v), _MN_SHUFFLE(i0, i1, i2, i3)));
+ }
+
+ template<int i0, int i1, int i2, int i3>
+ __forceinline vboolf4 shuffle(const vboolf4& a, const vboolf4& b) {
+ return vreinterpretq_f32_u8(vqtbl2q_u8( (uint8x16x2_t){(uint8x16_t)a.v, (uint8x16_t)b.v}, _MF_SHUFFLE(i0, i1, i2, i3)));
+ }
+#else
template<int i0, int i1, int i2, int i3>
__forceinline vboolf4 shuffle(const vboolf4& v) {
return _mm_castsi128_ps(_mm_shuffle_epi32(v, _MM_SHUFFLE(i3, i2, i1, i0)));
@@ -123,6 +136,7 @@ namespace embree
__forceinline vboolf4 shuffle(const vboolf4& a, const vboolf4& b) {
return _mm_shuffle_ps(a, b, _MM_SHUFFLE(i3, i2, i1, i0));
}
+#endif
template<int i0>
__forceinline vboolf4 shuffle(const vboolf4& v) {
@@ -135,7 +149,7 @@ namespace embree
template<> __forceinline vboolf4 shuffle<0, 1, 0, 1>(const vboolf4& v) { return _mm_castpd_ps(_mm_movedup_pd(v)); }
#endif
-#if defined(__SSE4_1__)
+#if defined(__SSE4_1__) && !defined(__aarch64__)
template<int dst, int src, int clr> __forceinline vboolf4 insert(const vboolf4& a, const vboolf4& b) { return _mm_insert_ps(a, b, (dst << 4) | (src << 6) | clr); }
template<int dst, int src> __forceinline vboolf4 insert(const vboolf4& a, const vboolf4& b) { return insert<dst, src, 0>(a, b); }
template<int dst> __forceinline vboolf4 insert(const vboolf4& a, const bool b) { return insert<dst, 0>(a, vboolf4(b)); }
@@ -157,7 +171,9 @@ namespace embree
__forceinline bool none(const vboolf4& valid, const vboolf4& b) { return none(valid & b); }
__forceinline size_t movemask(const vboolf4& a) { return _mm_movemask_ps(a); }
-#if defined(__SSE4_2__)
+#if defined(__aarch64__)
+ __forceinline size_t popcnt(const vboolf4& a) { return vaddvq_s32(vandq_u32(vreinterpretq_u32_f32(a.v),_mm_set1_epi32(1))); }
+#elif defined(__SSE4_2__)
__forceinline size_t popcnt(const vboolf4& a) { return popcnt((size_t)_mm_movemask_ps(a)); }
#else
__forceinline size_t popcnt(const vboolf4& a) { return bool(a[0])+bool(a[1])+bool(a[2])+bool(a[3]); }
diff --git a/thirdparty/embree/common/simd/vboolf8_avx.h b/thirdparty/embree/common/simd/vboolf8_avx.h
index ba77cc3c5e..18cede19c6 100644
--- a/thirdparty/embree/common/simd/vboolf8_avx.h
+++ b/thirdparty/embree/common/simd/vboolf8_avx.h
@@ -76,7 +76,7 @@ namespace embree
////////////////////////////////////////////////////////////////////////////////
__forceinline vboolf(FalseTy) : v(_mm256_setzero_ps()) {}
- __forceinline vboolf(TrueTy) : v(_mm256_cmp_ps(_mm256_setzero_ps(), _mm256_setzero_ps(), _CMP_EQ_OQ)) {}
+ __forceinline vboolf(TrueTy) : v(_mm256_castsi256_ps(_mm256_set1_epi32(0xFFFFFFFF))) {}
////////////////////////////////////////////////////////////////////////////////
/// Array Access
diff --git a/thirdparty/embree/common/simd/vdouble4_avx.h b/thirdparty/embree/common/simd/vdouble4_avx.h
index 55326de7dd..208bb7ac99 100644
--- a/thirdparty/embree/common/simd/vdouble4_avx.h
+++ b/thirdparty/embree/common/simd/vdouble4_avx.h
@@ -189,13 +189,20 @@ namespace embree
__forceinline vboold4 operator >=(const vdouble4& a, const vdouble4& b) { return _mm256_cmp_pd_mask(a, b, _MM_CMPINT_GE); }
__forceinline vboold4 operator > (const vdouble4& a, const vdouble4& b) { return _mm256_cmp_pd_mask(a, b, _MM_CMPINT_GT); }
__forceinline vboold4 operator <=(const vdouble4& a, const vdouble4& b) { return _mm256_cmp_pd_mask(a, b, _MM_CMPINT_LE); }
-#else
+#elif !defined(__aarch64__)
__forceinline vboold4 operator ==(const vdouble4& a, const vdouble4& b) { return _mm256_cmp_pd(a, b, _CMP_EQ_OQ); }
__forceinline vboold4 operator !=(const vdouble4& a, const vdouble4& b) { return _mm256_cmp_pd(a, b, _CMP_NEQ_UQ); }
__forceinline vboold4 operator < (const vdouble4& a, const vdouble4& b) { return _mm256_cmp_pd(a, b, _CMP_LT_OS); }
__forceinline vboold4 operator >=(const vdouble4& a, const vdouble4& b) { return _mm256_cmp_pd(a, b, _CMP_NLT_US); }
__forceinline vboold4 operator > (const vdouble4& a, const vdouble4& b) { return _mm256_cmp_pd(a, b, _CMP_NLE_US); }
__forceinline vboold4 operator <=(const vdouble4& a, const vdouble4& b) { return _mm256_cmp_pd(a, b, _CMP_LE_OS); }
+#else
+ __forceinline vboold4 operator ==(const vdouble4& a, const vdouble4& b) { return _mm256_cmpeq_pd(a, b); }
+ __forceinline vboold4 operator !=(const vdouble4& a, const vdouble4& b) { return _mm256_cmpneq_pd(a, b); }
+ __forceinline vboold4 operator < (const vdouble4& a, const vdouble4& b) { return _mm256_cmplt_pd(a, b); }
+ __forceinline vboold4 operator >=(const vdouble4& a, const vdouble4& b) { return _mm256_cmpnlt_pd(a, b); }
+ __forceinline vboold4 operator > (const vdouble4& a, const vdouble4& b) { return _mm256_cmpnle_pd(a, b); }
+ __forceinline vboold4 operator <=(const vdouble4& a, const vdouble4& b) { return _mm256_cmple_pd(a, b); }
#endif
__forceinline vboold4 operator ==(const vdouble4& a, double b) { return a == vdouble4(b); }
diff --git a/thirdparty/embree/common/simd/vfloat16_avx512.h b/thirdparty/embree/common/simd/vfloat16_avx512.h
index 9f1e2459c4..75c471cc0c 100644
--- a/thirdparty/embree/common/simd/vfloat16_avx512.h
+++ b/thirdparty/embree/common/simd/vfloat16_avx512.h
@@ -177,9 +177,10 @@ namespace embree
__forceinline vfloat16 abs (const vfloat16& a) { return _mm512_castsi512_ps(_mm512_and_epi32(_mm512_castps_si512(a),_mm512_set1_epi32(0x7FFFFFFF))); }
__forceinline vfloat16 signmsk(const vfloat16& a) { return _mm512_castsi512_ps(_mm512_and_epi32(_mm512_castps_si512(a),_mm512_set1_epi32(0x80000000))); }
- __forceinline vfloat16 rcp(const vfloat16& a) {
+ __forceinline vfloat16 rcp(const vfloat16& a)
+ {
const vfloat16 r = _mm512_rcp14_ps(a);
- return _mm512_mul_ps(r, _mm512_fnmadd_ps(r, a, vfloat16(2.0f)));
+ return _mm512_fmadd_ps(r, _mm512_fnmadd_ps(a, r, vfloat16(1.0)), r); // computes r + r * (1 - a*r)
}
__forceinline vfloat16 sqr (const vfloat16& a) { return _mm512_mul_ps(a,a); }
diff --git a/thirdparty/embree/common/simd/vfloat4_sse2.h b/thirdparty/embree/common/simd/vfloat4_sse2.h
index 5215bf9730..6d7e11fe72 100644
--- a/thirdparty/embree/common/simd/vfloat4_sse2.h
+++ b/thirdparty/embree/common/simd/vfloat4_sse2.h
@@ -42,6 +42,11 @@ namespace embree
__forceinline vfloat(float a, float b, float c, float d) : v(_mm_set_ps(d, c, b, a)) {}
__forceinline explicit vfloat(const vint4& a) : v(_mm_cvtepi32_ps(a)) {}
+#if defined(__aarch64__)
+ __forceinline explicit vfloat(const vuint4& x) {
+ v = vcvtq_f32_u32(vreinterpretq_u32_s32(x.v));
+ }
+#else
__forceinline explicit vfloat(const vuint4& x) {
const __m128i a = _mm_and_si128(x,_mm_set1_epi32(0x7FFFFFFF));
const __m128i b = _mm_and_si128(_mm_srai_epi32(x,31),_mm_set1_epi32(0x4F000000)); //0x4F000000 = 2^31
@@ -49,7 +54,7 @@ namespace embree
const __m128 bf = _mm_castsi128_ps(b);
v = _mm_add_ps(af,bf);
}
-
+#endif
////////////////////////////////////////////////////////////////////////////////
/// Constants
////////////////////////////////////////////////////////////////////////////////
@@ -107,7 +112,11 @@ namespace embree
#endif
}
-#if defined(__SSE4_1__)
+#if defined(__aarch64__)
+ static __forceinline vfloat4 load(const char* ptr) {
+ return __m128(_mm_load4epi8_f32(((__m128i*)ptr)));
+ }
+#elif defined(__SSE4_1__)
static __forceinline vfloat4 load(const char* ptr) {
return _mm_cvtepi32_ps(_mm_cvtepi8_epi32(_mm_loadu_si128((__m128i*)ptr)));
}
@@ -117,7 +126,11 @@ namespace embree
}
#endif
-#if defined(__SSE4_1__)
+#if defined(__aarch64__)
+ static __forceinline vfloat4 load(const unsigned char* ptr) {
+ return __m128(_mm_load4epu8_f32(((__m128i*)ptr)));
+ }
+#elif defined(__SSE4_1__)
static __forceinline vfloat4 load(const unsigned char* ptr) {
return _mm_cvtepi32_ps(_mm_cvtepu8_epi32(_mm_loadu_si128((__m128i*)ptr)));
}
@@ -128,7 +141,11 @@ namespace embree
}
#endif
-#if defined(__SSE4_1__)
+#if defined(__aarch64__)
+ static __forceinline vfloat4 load(const short* ptr) {
+ return __m128(_mm_load4epi16_f32(((__m128i*)ptr)));
+ }
+#elif defined(__SSE4_1__)
static __forceinline vfloat4 load(const short* ptr) {
return _mm_cvtepi32_ps(_mm_cvtepi16_epi32(_mm_loadu_si128((__m128i*)ptr)));
}
@@ -145,15 +162,19 @@ namespace embree
static __forceinline void store_nt(void* ptr, const vfloat4& v)
{
#if defined (__SSE4_1__)
+#if defined(__aarch64__)
_mm_stream_ps((float*)ptr,v);
#else
+ _mm_stream_ps((float*)ptr,v);
+#endif
+#else
_mm_store_ps((float*)ptr,v);
#endif
}
template<int scale = 4>
static __forceinline vfloat4 gather(const float* ptr, const vint4& index) {
-#if defined(__AVX2__)
+#if defined(__AVX2__) && !defined(__aarch64__)
return _mm_i32gather_ps(ptr, index, scale);
#else
return vfloat4(
@@ -169,7 +190,7 @@ namespace embree
vfloat4 r = zero;
#if defined(__AVX512VL__)
return _mm_mmask_i32gather_ps(r, mask, index, ptr, scale);
-#elif defined(__AVX2__)
+#elif defined(__AVX2__) && !defined(__aarch64__)
return _mm_mask_i32gather_ps(r, ptr, index, mask, scale);
#else
if (likely(mask[0])) r[0] = *(float*)(((char*)ptr)+scale*index[0]);
@@ -223,8 +244,8 @@ namespace embree
friend __forceinline vfloat4 select(const vboolf4& m, const vfloat4& t, const vfloat4& f) {
#if defined(__AVX512VL__)
return _mm_mask_blend_ps(m, f, t);
-#elif defined(__SSE4_1__)
- return _mm_blendv_ps(f, t, m);
+#elif defined(__SSE4_1__) || (defined(__aarch64__))
+ return _mm_blendv_ps(f, t, m);
#else
return _mm_or_ps(_mm_and_ps(m, t), _mm_andnot_ps(m, f));
#endif
@@ -256,18 +277,34 @@ namespace embree
__forceinline vfloat4 toFloat(const vint4& a) { return vfloat4(a); }
__forceinline vfloat4 operator +(const vfloat4& a) { return a; }
+#if defined(__aarch64__)
+ __forceinline vfloat4 operator -(const vfloat4& a) {
+ return vnegq_f32(a);
+ }
+#else
__forceinline vfloat4 operator -(const vfloat4& a) { return _mm_xor_ps(a, _mm_castsi128_ps(_mm_set1_epi32(0x80000000))); }
+#endif
+#if defined(__aarch64__)
+ __forceinline vfloat4 abs(const vfloat4& a) { return _mm_abs_ps(a); }
+#else
__forceinline vfloat4 abs(const vfloat4& a) { return _mm_and_ps(a, _mm_castsi128_ps(_mm_set1_epi32(0x7fffffff))); }
+#endif
+
#if defined(__AVX512VL__)
__forceinline vfloat4 sign(const vfloat4& a) { return _mm_mask_blend_ps(_mm_cmp_ps_mask(a, vfloat4(zero), _CMP_LT_OQ), vfloat4(one), -vfloat4(one)); }
#else
__forceinline vfloat4 sign(const vfloat4& a) { return blendv_ps(vfloat4(one), -vfloat4(one), _mm_cmplt_ps(a, vfloat4(zero))); }
#endif
+
__forceinline vfloat4 signmsk(const vfloat4& a) { return _mm_and_ps(a,_mm_castsi128_ps(_mm_set1_epi32(0x80000000))); }
-
+
__forceinline vfloat4 rcp(const vfloat4& a)
{
+#if defined(__aarch64__)
+ return vfloat4(vdivq_f32(vdupq_n_f32(1.0f),a.v));
+#else
+
#if defined(__AVX512VL__)
const vfloat4 r = _mm_rcp14_ps(a);
#else
@@ -275,30 +312,39 @@ namespace embree
#endif
#if defined(__AVX2__)
- return _mm_mul_ps(r,_mm_fnmadd_ps(r, a, vfloat4(2.0f)));
+ return _mm_fmadd_ps(r, _mm_fnmadd_ps(a, r, vfloat4(1.0f)), r); // computes r + r * (1 - a * r)
#else
- return _mm_mul_ps(r,_mm_sub_ps(vfloat4(2.0f), _mm_mul_ps(r, a)));
+ return _mm_add_ps(r,_mm_mul_ps(r, _mm_sub_ps(vfloat4(1.0f), _mm_mul_ps(a, r)))); // computes r + r * (1 - a * r)
#endif
+
+#endif //defined(__aarch64__)
}
__forceinline vfloat4 sqr (const vfloat4& a) { return _mm_mul_ps(a,a); }
__forceinline vfloat4 sqrt(const vfloat4& a) { return _mm_sqrt_ps(a); }
__forceinline vfloat4 rsqrt(const vfloat4& a)
{
+#if defined(__aarch64__)
+ vfloat4 r = _mm_rsqrt_ps(a);
+ r = vmulq_f32(r, vrsqrtsq_f32(vmulq_f32(a, r), r));
+ r = vmulq_f32(r, vrsqrtsq_f32(vmulq_f32(a, r), r));
+ r = vmulq_f32(r, vrsqrtsq_f32(vmulq_f32(a, r), r));
+ return r;
+#else
+
#if defined(__AVX512VL__)
vfloat4 r = _mm_rsqrt14_ps(a);
#else
vfloat4 r = _mm_rsqrt_ps(a);
#endif
-#if defined(__ARM_NEON)
- r = _mm_fmadd_ps(_mm_set1_ps(1.5f), r, _mm_mul_ps(_mm_mul_ps(_mm_mul_ps(a, _mm_set1_ps(-0.5f)), r), _mm_mul_ps(r, r)));
- r = _mm_fmadd_ps(_mm_set1_ps(1.5f), r, _mm_mul_ps(_mm_mul_ps(_mm_mul_ps(a, _mm_set1_ps(-0.5f)), r), _mm_mul_ps(r, r)));
-#elif defined(__AVX2__)
+#if defined(__AVX2__)
r = _mm_fmadd_ps(_mm_set1_ps(1.5f), r, _mm_mul_ps(_mm_mul_ps(_mm_mul_ps(a, _mm_set1_ps(-0.5f)), r), _mm_mul_ps(r, r)));
#else
r = _mm_add_ps(_mm_mul_ps(_mm_set1_ps(1.5f), r), _mm_mul_ps(_mm_mul_ps(_mm_mul_ps(a, _mm_set1_ps(-0.5f)), r), _mm_mul_ps(r, r)));
#endif
+
+#endif
return r;
}
@@ -344,7 +390,8 @@ namespace embree
__forceinline vfloat4 max(const vfloat4& a, float b) { return _mm_max_ps(a,vfloat4(b)); }
__forceinline vfloat4 max(float a, const vfloat4& b) { return _mm_max_ps(vfloat4(a),b); }
-#if defined(__SSE4_1__)
+#if defined(__SSE4_1__) || defined(__aarch64__)
+
__forceinline vfloat4 mini(const vfloat4& a, const vfloat4& b) {
const vint4 ai = _mm_castps_si128(a);
const vint4 bi = _mm_castps_si128(b);
@@ -393,9 +440,10 @@ namespace embree
__forceinline vfloat4 nmsub(const vfloat4& a, const vfloat4& b, const vfloat4& c) { return _mm_fnmsub_ps(a,b,c); }
#else
__forceinline vfloat4 madd (const vfloat4& a, const vfloat4& b, const vfloat4& c) { return a*b+c; }
- __forceinline vfloat4 msub (const vfloat4& a, const vfloat4& b, const vfloat4& c) { return a*b-c; }
__forceinline vfloat4 nmadd(const vfloat4& a, const vfloat4& b, const vfloat4& c) { return -a*b+c;}
__forceinline vfloat4 nmsub(const vfloat4& a, const vfloat4& b, const vfloat4& c) { return -a*b-c; }
+ __forceinline vfloat4 msub (const vfloat4& a, const vfloat4& b, const vfloat4& c) { return a*b-c; }
+
#endif
////////////////////////////////////////////////////////////////////////////////
@@ -429,8 +477,13 @@ namespace embree
__forceinline vboolf4 operator ==(const vfloat4& a, const vfloat4& b) { return _mm_cmpeq_ps (a, b); }
__forceinline vboolf4 operator !=(const vfloat4& a, const vfloat4& b) { return _mm_cmpneq_ps(a, b); }
__forceinline vboolf4 operator < (const vfloat4& a, const vfloat4& b) { return _mm_cmplt_ps (a, b); }
+#if defined(__aarch64__)
+ __forceinline vboolf4 operator >=(const vfloat4& a, const vfloat4& b) { return _mm_cmpge_ps (a, b); }
+ __forceinline vboolf4 operator > (const vfloat4& a, const vfloat4& b) { return _mm_cmpgt_ps (a, b); }
+#else
__forceinline vboolf4 operator >=(const vfloat4& a, const vfloat4& b) { return _mm_cmpnlt_ps(a, b); }
__forceinline vboolf4 operator > (const vfloat4& a, const vfloat4& b) { return _mm_cmpnle_ps(a, b); }
+#endif
__forceinline vboolf4 operator <=(const vfloat4& a, const vfloat4& b) { return _mm_cmple_ps (a, b); }
#endif
@@ -484,7 +537,7 @@ namespace embree
return select(vboolf4(mask), t, f);
#endif
}
-
+
__forceinline vfloat4 lerp(const vfloat4& a, const vfloat4& b, const vfloat4& t) {
return madd(t,b-a,a);
}
@@ -506,10 +559,10 @@ namespace embree
////////////////////////////////////////////////////////////////////////////////
#if defined(__aarch64__)
- __forceinline vfloat4 floor(const vfloat4& a) { return vrndmq_f32(a.v); }
- __forceinline vfloat4 ceil (const vfloat4& a) { return vrndpq_f32(a.v); }
- __forceinline vfloat4 trunc(const vfloat4& a) { return vrndq_f32(a.v); }
- __forceinline vfloat4 round(const vfloat4& a) { return vrndnq_f32(a.v); }
+ __forceinline vfloat4 floor(const vfloat4& a) { return vrndmq_f32(a.v); } // towards -inf
+ __forceinline vfloat4 ceil (const vfloat4& a) { return vrndpq_f32(a.v); } // toward +inf
+ __forceinline vfloat4 trunc(const vfloat4& a) { return vrndq_f32(a.v); } // towards 0
+ __forceinline vfloat4 round(const vfloat4& a) { return vrndnq_f32(a.v); } // to nearest, ties to even. NOTE(LTE): arm clang uses vrndnq, old gcc uses vrndqn?
#elif defined (__SSE4_1__)
__forceinline vfloat4 floor(const vfloat4& a) { return _mm_round_ps(a, _MM_FROUND_TO_NEG_INF ); }
__forceinline vfloat4 ceil (const vfloat4& a) { return _mm_round_ps(a, _MM_FROUND_TO_POS_INF ); }
@@ -524,7 +577,9 @@ namespace embree
__forceinline vfloat4 frac(const vfloat4& a) { return a-floor(a); }
__forceinline vint4 floori(const vfloat4& a) {
-#if defined(__SSE4_1__)
+#if defined(__aarch64__)
+ return vcvtq_s32_f32(floor(a));
+#elif defined(__SSE4_1__)
return vint4(floor(a));
#else
return vint4(a-vfloat4(0.5f));
@@ -538,6 +593,16 @@ namespace embree
__forceinline vfloat4 unpacklo(const vfloat4& a, const vfloat4& b) { return _mm_unpacklo_ps(a, b); }
__forceinline vfloat4 unpackhi(const vfloat4& a, const vfloat4& b) { return _mm_unpackhi_ps(a, b); }
+#if defined(__aarch64__)
+ template<int i0, int i1, int i2, int i3>
+ __forceinline vfloat4 shuffle(const vfloat4& v) {
+ return vreinterpretq_f32_u8(vqtbl1q_u8( (uint8x16_t)v.v, _MN_SHUFFLE(i0, i1, i2, i3)));
+ }
+ template<int i0, int i1, int i2, int i3>
+ __forceinline vfloat4 shuffle(const vfloat4& a, const vfloat4& b) {
+ return vreinterpretq_f32_u8(vqtbl2q_u8( (uint8x16x2_t){(uint8x16_t)a.v, (uint8x16_t)b.v}, _MF_SHUFFLE(i0, i1, i2, i3)));
+ }
+#else
template<int i0, int i1, int i2, int i3>
__forceinline vfloat4 shuffle(const vfloat4& v) {
return _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(v), _MM_SHUFFLE(i3, i2, i1, i0)));
@@ -547,8 +612,9 @@ namespace embree
__forceinline vfloat4 shuffle(const vfloat4& a, const vfloat4& b) {
return _mm_shuffle_ps(a, b, _MM_SHUFFLE(i3, i2, i1, i0));
}
+#endif
-#if defined(__SSE3__)
+#if defined(__SSE3__) && !defined(__aarch64__)
template<> __forceinline vfloat4 shuffle<0, 0, 2, 2>(const vfloat4& v) { return _mm_moveldup_ps(v); }
template<> __forceinline vfloat4 shuffle<1, 1, 3, 3>(const vfloat4& v) { return _mm_movehdup_ps(v); }
template<> __forceinline vfloat4 shuffle<0, 1, 0, 1>(const vfloat4& v) { return _mm_castpd_ps(_mm_movedup_pd(_mm_castps_pd(v))); }
@@ -559,10 +625,14 @@ namespace embree
return shuffle<i,i,i,i>(v);
}
+#if defined(__aarch64__)
+ template<int i> __forceinline float extract(const vfloat4& a) { return a[i]; }
+#else
template<int i> __forceinline float extract (const vfloat4& a) { return _mm_cvtss_f32(shuffle<i>(a)); }
template<> __forceinline float extract<0>(const vfloat4& a) { return _mm_cvtss_f32(a); }
+#endif
-#if defined (__SSE4_1__)
+#if defined (__SSE4_1__) && !defined(__aarch64__)
template<int dst, int src, int clr> __forceinline vfloat4 insert(const vfloat4& a, const vfloat4& b) { return _mm_insert_ps(a, b, (dst << 4) | (src << 6) | clr); }
template<int dst, int src> __forceinline vfloat4 insert(const vfloat4& a, const vfloat4& b) { return insert<dst, src, 0>(a, b); }
template<int dst> __forceinline vfloat4 insert(const vfloat4& a, const float b) { return insert<dst, 0>(a, _mm_set_ss(b)); }
@@ -664,14 +734,25 @@ namespace embree
////////////////////////////////////////////////////////////////////////////////
/// Reductions
////////////////////////////////////////////////////////////////////////////////
-
+#if defined(__aarch64__)
+ __forceinline vfloat4 vreduce_min(const vfloat4& v) { float h = vminvq_f32(v); return vdupq_n_f32(h); }
+ __forceinline vfloat4 vreduce_max(const vfloat4& v) { float h = vmaxvq_f32(v); return vdupq_n_f32(h); }
+ __forceinline vfloat4 vreduce_add(const vfloat4& v) { float h = vaddvq_f32(v); return vdupq_n_f32(h); }
+#else
__forceinline vfloat4 vreduce_min(const vfloat4& v) { vfloat4 h = min(shuffle<1,0,3,2>(v),v); return min(shuffle<2,3,0,1>(h),h); }
__forceinline vfloat4 vreduce_max(const vfloat4& v) { vfloat4 h = max(shuffle<1,0,3,2>(v),v); return max(shuffle<2,3,0,1>(h),h); }
__forceinline vfloat4 vreduce_add(const vfloat4& v) { vfloat4 h = shuffle<1,0,3,2>(v) + v ; return shuffle<2,3,0,1>(h) + h ; }
+#endif
+#if defined(__aarch64__)
+ __forceinline float reduce_min(const vfloat4& v) { return vminvq_f32(v); }
+ __forceinline float reduce_max(const vfloat4& v) { return vmaxvq_f32(v); }
+ __forceinline float reduce_add(const vfloat4& v) { return vaddvq_f32(v); }
+#else
__forceinline float reduce_min(const vfloat4& v) { return _mm_cvtss_f32(vreduce_min(v)); }
__forceinline float reduce_max(const vfloat4& v) { return _mm_cvtss_f32(vreduce_max(v)); }
__forceinline float reduce_add(const vfloat4& v) { return _mm_cvtss_f32(vreduce_add(v)); }
+#endif
__forceinline size_t select_min(const vboolf4& valid, const vfloat4& v)
{
@@ -687,7 +768,7 @@ namespace embree
}
////////////////////////////////////////////////////////////////////////////////
- /// Euclidian Space Operators
+ /// Euclidean Space Operators
////////////////////////////////////////////////////////////////////////////////
__forceinline float dot(const vfloat4& a, const vfloat4& b) {
diff --git a/thirdparty/embree/common/simd/vfloat8_avx.h b/thirdparty/embree/common/simd/vfloat8_avx.h
index 13446454e8..b09d5e641d 100644
--- a/thirdparty/embree/common/simd/vfloat8_avx.h
+++ b/thirdparty/embree/common/simd/vfloat8_avx.h
@@ -107,11 +107,11 @@ namespace embree
static __forceinline void store (const vboolf8& mask, void* ptr, const vfloat8& v) { _mm256_mask_store_ps ((float*)ptr,mask,v); }
static __forceinline void storeu(const vboolf8& mask, void* ptr, const vfloat8& v) { _mm256_mask_storeu_ps((float*)ptr,mask,v); }
#else
- static __forceinline vfloat8 load (const vboolf8& mask, const void* ptr) { return _mm256_maskload_ps((float*)ptr,(__m256i)mask); }
- static __forceinline vfloat8 loadu(const vboolf8& mask, const void* ptr) { return _mm256_maskload_ps((float*)ptr,(__m256i)mask); }
+ static __forceinline vfloat8 load (const vboolf8& mask, const void* ptr) { return _mm256_maskload_ps((float*)ptr,_mm256_castps_si256(mask.v)); }
+ static __forceinline vfloat8 loadu(const vboolf8& mask, const void* ptr) { return _mm256_maskload_ps((float*)ptr,_mm256_castps_si256(mask.v)); }
- static __forceinline void store (const vboolf8& mask, void* ptr, const vfloat8& v) { _mm256_maskstore_ps((float*)ptr,(__m256i)mask,v); }
- static __forceinline void storeu(const vboolf8& mask, void* ptr, const vfloat8& v) { _mm256_maskstore_ps((float*)ptr,(__m256i)mask,v); }
+ static __forceinline void store (const vboolf8& mask, void* ptr, const vfloat8& v) { _mm256_maskstore_ps((float*)ptr,_mm256_castps_si256(mask.v),v); }
+ static __forceinline void storeu(const vboolf8& mask, void* ptr, const vfloat8& v) { _mm256_maskstore_ps((float*)ptr,_mm256_castps_si256(mask.v),v); }
#endif
#if defined(__AVX2__)
@@ -126,7 +126,7 @@ namespace embree
template<int scale = 4>
static __forceinline vfloat8 gather(const float* ptr, const vint8& index) {
-#if defined(__AVX2__)
+#if defined(__AVX2__) && !defined(__aarch64__)
return _mm256_i32gather_ps(ptr, index ,scale);
#else
return vfloat8(
@@ -146,7 +146,7 @@ namespace embree
vfloat8 r = zero;
#if defined(__AVX512VL__)
return _mm256_mmask_i32gather_ps(r, mask, index, ptr, scale);
-#elif defined(__AVX2__)
+#elif defined(__AVX2__) && !defined(__aarch64__)
return _mm256_mask_i32gather_ps(r, ptr, index, mask, scale);
#else
if (likely(mask[0])) r[0] = *(float*)(((char*)ptr)+scale*index[0]);
@@ -215,20 +215,52 @@ namespace embree
__forceinline vfloat8 toFloat(const vint8& a) { return vfloat8(a); }
__forceinline vfloat8 operator +(const vfloat8& a) { return a; }
+#if !defined(__aarch64__)
__forceinline vfloat8 operator -(const vfloat8& a) {
const __m256 mask = _mm256_castsi256_ps(_mm256_set1_epi32(0x80000000));
return _mm256_xor_ps(a, mask);
}
+#else
+ __forceinline vfloat8 operator -(const vfloat8& a) {
+ __m256 res;
+ res.lo = vnegq_f32(a.v.lo);
+ res.hi = vnegq_f32(a.v.hi);
+ return res;
+}
+#endif
+
+#if !defined(__aarch64__)
__forceinline vfloat8 abs(const vfloat8& a) {
const __m256 mask = _mm256_castsi256_ps(_mm256_set1_epi32(0x7fffffff));
return _mm256_and_ps(a, mask);
}
+#else
+__forceinline vfloat8 abs(const vfloat8& a) {
+ __m256 res;
+ res.lo = vabsq_f32(a.v.lo);
+ res.hi = vabsq_f32(a.v.hi);
+ return res;
+}
+#endif
+
+#if !defined(__aarch64__)
__forceinline vfloat8 sign (const vfloat8& a) { return _mm256_blendv_ps(vfloat8(one), -vfloat8(one), _mm256_cmp_ps(a, vfloat8(zero), _CMP_NGE_UQ)); }
+#else
+ __forceinline vfloat8 sign (const vfloat8& a) { return _mm256_blendv_ps(vfloat8(one), -vfloat8(one), _mm256_cmplt_ps(a, vfloat8(zero))); }
+#endif
__forceinline vfloat8 signmsk(const vfloat8& a) { return _mm256_and_ps(a,_mm256_castsi256_ps(_mm256_set1_epi32(0x80000000))); }
static __forceinline vfloat8 rcp(const vfloat8& a)
{
+#if defined(__aarch64__)
+ vfloat8 ret;
+ const float32x4_t one = vdupq_n_f32(1.0f);
+ ret.v.lo = vdivq_f32(one, a.v.lo);
+ ret.v.hi = vdivq_f32(one, a.v.hi);
+ return ret;
+#endif
+
#if defined(__AVX512VL__)
const vfloat8 r = _mm256_rcp14_ps(a);
#else
@@ -236,9 +268,12 @@ namespace embree
#endif
#if defined(__AVX2__)
- return _mm256_mul_ps(r, _mm256_fnmadd_ps(r, a, vfloat8(2.0f)));
+ // First, compute 1 - a * r (which will be very close to 0)
+ const vfloat8 h_n = _mm256_fnmadd_ps(a, r, vfloat8(1.0f));
+ // Then compute r + r * h_n
+ return _mm256_fmadd_ps(r, h_n, r);
#else
- return _mm256_mul_ps(r, _mm256_sub_ps(vfloat8(2.0f), _mm256_mul_ps(r, a)));
+ return _mm256_add_ps(r,_mm256_mul_ps(r, _mm256_sub_ps(vfloat8(1.0f), _mm256_mul_ps(a, r)))); // computes r + r * (1 - a * r)
#endif
}
__forceinline vfloat8 sqr (const vfloat8& a) { return _mm256_mul_ps(a,a); }
@@ -384,7 +419,7 @@ namespace embree
static __forceinline vfloat8 select(const vboolf8& m, const vfloat8& t, const vfloat8& f) {
return _mm256_mask_blend_ps(m, f, t);
}
-#else
+#elif !defined(__aarch64__)
static __forceinline vboolf8 operator ==(const vfloat8& a, const vfloat8& b) { return _mm256_cmp_ps(a, b, _CMP_EQ_OQ); }
static __forceinline vboolf8 operator !=(const vfloat8& a, const vfloat8& b) { return _mm256_cmp_ps(a, b, _CMP_NEQ_UQ); }
static __forceinline vboolf8 operator < (const vfloat8& a, const vfloat8& b) { return _mm256_cmp_ps(a, b, _CMP_LT_OS); }
@@ -395,6 +430,18 @@ namespace embree
static __forceinline vfloat8 select(const vboolf8& m, const vfloat8& t, const vfloat8& f) {
return _mm256_blendv_ps(f, t, m);
}
+#else
+ static __forceinline vboolf8 operator ==(const vfloat8& a, const vfloat8& b) { return _mm256_cmpeq_ps(a, b); }
+ static __forceinline vboolf8 operator !=(const vfloat8& a, const vfloat8& b) { return _mm256_cmpneq_ps(a, b); }
+ static __forceinline vboolf8 operator < (const vfloat8& a, const vfloat8& b) { return _mm256_cmplt_ps(a, b); }
+ static __forceinline vboolf8 operator >=(const vfloat8& a, const vfloat8& b) { return _mm256_cmpge_ps(a, b); }
+ static __forceinline vboolf8 operator > (const vfloat8& a, const vfloat8& b) { return _mm256_cmpgt_ps(a, b); }
+ static __forceinline vboolf8 operator <=(const vfloat8& a, const vfloat8& b) { return _mm256_cmple_ps(a, b); }
+
+ static __forceinline vfloat8 select(const vboolf8& m, const vfloat8& t, const vfloat8& f) {
+ return _mm256_blendv_ps(f, t, m);
+ }
+
#endif
template<int mask>
@@ -463,10 +510,17 @@ namespace embree
/// Rounding Functions
////////////////////////////////////////////////////////////////////////////////
+#if !defined(__aarch64__)
__forceinline vfloat8 floor(const vfloat8& a) { return _mm256_round_ps(a, _MM_FROUND_TO_NEG_INF ); }
__forceinline vfloat8 ceil (const vfloat8& a) { return _mm256_round_ps(a, _MM_FROUND_TO_POS_INF ); }
__forceinline vfloat8 trunc(const vfloat8& a) { return _mm256_round_ps(a, _MM_FROUND_TO_ZERO ); }
__forceinline vfloat8 round(const vfloat8& a) { return _mm256_round_ps(a, _MM_FROUND_TO_NEAREST_INT); }
+#else
+ __forceinline vfloat8 floor(const vfloat8& a) { return _mm256_floor_ps(a); }
+ __forceinline vfloat8 ceil (const vfloat8& a) { return _mm256_ceil_ps(a); }
+#endif
+
+
__forceinline vfloat8 frac (const vfloat8& a) { return a-floor(a); }
////////////////////////////////////////////////////////////////////////////////
@@ -501,9 +555,11 @@ namespace embree
return _mm256_shuffle_ps(a, b, _MM_SHUFFLE(i3, i2, i1, i0));
}
+#if !defined(__aarch64__)
template<> __forceinline vfloat8 shuffle<0, 0, 2, 2>(const vfloat8& v) { return _mm256_moveldup_ps(v); }
template<> __forceinline vfloat8 shuffle<1, 1, 3, 3>(const vfloat8& v) { return _mm256_movehdup_ps(v); }
template<> __forceinline vfloat8 shuffle<0, 1, 0, 1>(const vfloat8& v) { return _mm256_castpd_ps(_mm256_movedup_pd(_mm256_castps_pd(v))); }
+#endif
__forceinline vfloat8 broadcast(const float* ptr) { return _mm256_broadcast_ss(ptr); }
template<size_t i> __forceinline vfloat8 insert4(const vfloat8& a, const vfloat4& b) { return _mm256_insertf128_ps(a, b, i); }
@@ -512,7 +568,7 @@ namespace embree
__forceinline float toScalar(const vfloat8& v) { return _mm_cvtss_f32(_mm256_castps256_ps128(v)); }
-#if defined (__AVX2__)
+#if defined (__AVX2__) && !defined(__aarch64__)
static __forceinline vfloat8 permute(const vfloat8& a, const __m256i& index) {
return _mm256_permutevar8x32_ps(a, index);
}
@@ -609,7 +665,7 @@ namespace embree
////////////////////////////////////////////////////////////////////////////////
/// Reductions
////////////////////////////////////////////////////////////////////////////////
-
+#if !defined(__aarch64__)
__forceinline vfloat8 vreduce_min2(const vfloat8& v) { return min(v,shuffle<1,0,3,2>(v)); }
__forceinline vfloat8 vreduce_min4(const vfloat8& v) { vfloat8 v1 = vreduce_min2(v); return min(v1,shuffle<2,3,0,1>(v1)); }
__forceinline vfloat8 vreduce_min (const vfloat8& v) { vfloat8 v1 = vreduce_min4(v); return min(v1,shuffle4<1,0>(v1)); }
@@ -625,7 +681,14 @@ namespace embree
__forceinline float reduce_min(const vfloat8& v) { return toScalar(vreduce_min(v)); }
__forceinline float reduce_max(const vfloat8& v) { return toScalar(vreduce_max(v)); }
__forceinline float reduce_add(const vfloat8& v) { return toScalar(vreduce_add(v)); }
+#else
+ __forceinline float reduce_min(const vfloat8& v) { return vminvq_f32(_mm_min_ps(v.v.lo,v.v.hi)); }
+ __forceinline float reduce_max(const vfloat8& v) { return vmaxvq_f32(_mm_max_ps(v.v.lo,v.v.hi)); }
+ __forceinline vfloat8 vreduce_min(const vfloat8& v) { return vfloat8(reduce_min(v)); }
+ __forceinline vfloat8 vreduce_max(const vfloat8& v) { return vfloat8(reduce_max(v)); }
+ __forceinline float reduce_add(const vfloat8& v) { return vaddvq_f32(_mm_add_ps(v.v.lo,v.v.hi)); }
+#endif
__forceinline size_t select_min(const vboolf8& valid, const vfloat8& v)
{
const vfloat8 a = select(valid,v,vfloat8(pos_inf));
@@ -642,7 +705,7 @@ namespace embree
////////////////////////////////////////////////////////////////////////////////
- /// Euclidian Space Operators (pairs of Vec3fa's)
+ /// Euclidean Space Operators (pairs of Vec3fa's)
////////////////////////////////////////////////////////////////////////////////
//__forceinline vfloat8 dot(const vfloat8& a, const vfloat8& b) {
diff --git a/thirdparty/embree/common/simd/vint4_sse2.h b/thirdparty/embree/common/simd/vint4_sse2.h
index 9814d5c71c..eea03a771e 100644
--- a/thirdparty/embree/common/simd/vint4_sse2.h
+++ b/thirdparty/embree/common/simd/vint4_sse2.h
@@ -106,7 +106,14 @@ namespace embree
#endif
-#if defined(__SSE4_1__)
+#if defined(__aarch64__)
+ static __forceinline vint4 load(const unsigned char* ptr) {
+ return _mm_load4epu8_epi32(((__m128i*)ptr));
+ }
+ static __forceinline vint4 loadu(const unsigned char* ptr) {
+ return _mm_load4epu8_epi32(((__m128i*)ptr));
+ }
+#elif defined(__SSE4_1__)
static __forceinline vint4 load(const unsigned char* ptr) {
return _mm_cvtepu8_epi32(_mm_loadl_epi64((__m128i*)ptr));
}
@@ -127,7 +134,9 @@ namespace embree
#endif
static __forceinline vint4 load(const unsigned short* ptr) {
-#if defined (__SSE4_1__)
+#if defined(__aarch64__)
+ return __m128i(vmovl_u16(vld1_u16(ptr)));
+#elif defined (__SSE4_1__)
return _mm_cvtepu16_epi32(_mm_loadu_si128((__m128i*)ptr));
#else
return vint4(ptr[0],ptr[1],ptr[2],ptr[3]);
@@ -135,7 +144,12 @@ namespace embree
}
static __forceinline void store(unsigned char* ptr, const vint4& v) {
-#if defined(__SSE4_1__)
+#if defined(__aarch64__)
+ int32x4_t x = v;
+ uint16x4_t y = vqmovn_u32(uint32x4_t(x));
+ uint8x8_t z = vqmovn_u16(vcombine_u16(y, y));
+ vst1_lane_u32((uint32_t *)ptr,uint32x2_t(z), 0);
+#elif defined(__SSE4_1__)
__m128i x = v;
x = _mm_packus_epi32(x, x);
x = _mm_packus_epi16(x, x);
@@ -147,20 +161,26 @@ namespace embree
}
static __forceinline void store(unsigned short* ptr, const vint4& v) {
+#if defined(__aarch64__)
+ uint32x4_t x = uint32x4_t(v.v);
+ uint16x4_t y = vqmovn_u32(x);
+ vst1_u16(ptr, y);
+#else
for (size_t i=0;i<4;i++)
ptr[i] = (unsigned short)v[i];
+#endif
}
static __forceinline vint4 load_nt(void* ptr) {
-#if defined(__SSE4_1__)
- return _mm_stream_load_si128((__m128i*)ptr);
+#if defined(__aarch64__) || defined(__SSE4_1__)
+ return _mm_stream_load_si128((__m128i*)ptr);
#else
return _mm_load_si128((__m128i*)ptr);
#endif
}
static __forceinline void store_nt(void* ptr, const vint4& v) {
-#if defined(__SSE4_1__)
+#if !defined(__aarch64__) && defined(__SSE4_1__)
_mm_stream_ps((float*)ptr, _mm_castsi128_ps(v));
#else
_mm_store_si128((__m128i*)ptr,v);
@@ -169,7 +189,7 @@ namespace embree
template<int scale = 4>
static __forceinline vint4 gather(const int* ptr, const vint4& index) {
-#if defined(__AVX2__)
+#if defined(__AVX2__) && !defined(__aarch64__)
return _mm_i32gather_epi32(ptr, index, scale);
#else
return vint4(
@@ -185,7 +205,7 @@ namespace embree
vint4 r = zero;
#if defined(__AVX512VL__)
return _mm_mmask_i32gather_epi32(r, mask, index, ptr, scale);
-#elif defined(__AVX2__)
+#elif defined(__AVX2__) && !defined(__aarch64__)
return _mm_mask_i32gather_epi32(r, ptr, index, mask, scale);
#else
if (likely(mask[0])) r[0] = *(int*)(((char*)ptr)+scale*index[0]);
@@ -222,7 +242,7 @@ namespace embree
#endif
}
-#if defined(__x86_64__)
+#if defined(__x86_64__) || defined(__aarch64__)
static __forceinline vint4 broadcast64(long long a) { return _mm_set1_epi64x(a); }
#endif
@@ -236,6 +256,8 @@ namespace embree
friend __forceinline vint4 select(const vboolf4& m, const vint4& t, const vint4& f) {
#if defined(__AVX512VL__)
return _mm_mask_blend_epi32(m, (__m128i)f, (__m128i)t);
+#elif defined(__aarch64__)
+ return _mm_castps_si128(_mm_blendv_ps((__m128)f.v,(__m128) t.v, (__m128)m.v));
#elif defined(__SSE4_1__)
return _mm_castps_si128(_mm_blendv_ps(_mm_castsi128_ps(f), _mm_castsi128_ps(t), m));
#else
@@ -256,7 +278,9 @@ namespace embree
__forceinline vint4 operator +(const vint4& a) { return a; }
__forceinline vint4 operator -(const vint4& a) { return _mm_sub_epi32(_mm_setzero_si128(), a); }
-#if defined(__SSSE3__)
+#if defined(__aarch64__)
+ __forceinline vint4 abs(const vint4& a) { return vabsq_s32(a.v); }
+#elif defined(__SSSE3__)
__forceinline vint4 abs(const vint4& a) { return _mm_abs_epi32(a); }
#endif
@@ -272,7 +296,7 @@ namespace embree
__forceinline vint4 operator -(const vint4& a, int b) { return a - vint4(b); }
__forceinline vint4 operator -(int a, const vint4& b) { return vint4(a) - b; }
-#if defined(__SSE4_1__)
+#if (defined(__aarch64__)) || defined(__SSE4_1__)
__forceinline vint4 operator *(const vint4& a, const vint4& b) { return _mm_mullo_epi32(a, b); }
#else
__forceinline vint4 operator *(const vint4& a, const vint4& b) { return vint4(a[0]*b[0],a[1]*b[1],a[2]*b[2],a[3]*b[3]); }
@@ -292,8 +316,8 @@ namespace embree
__forceinline vint4 operator ^(const vint4& a, int b) { return a ^ vint4(b); }
__forceinline vint4 operator ^(int a, const vint4& b) { return vint4(a) ^ b; }
- __forceinline vint4 operator <<(const vint4& a, int n) { return _mm_slli_epi32(a, n); }
- __forceinline vint4 operator >>(const vint4& a, int n) { return _mm_srai_epi32(a, n); }
+ __forceinline vint4 operator <<(const vint4& a, const int n) { return _mm_slli_epi32(a, n); }
+ __forceinline vint4 operator >>(const vint4& a, const int n) { return _mm_srai_epi32(a, n); }
__forceinline vint4 sll (const vint4& a, int b) { return _mm_slli_epi32(a, b); }
__forceinline vint4 sra (const vint4& a, int b) { return _mm_srai_epi32(a, b); }
@@ -309,7 +333,7 @@ namespace embree
__forceinline vint4& operator -=(vint4& a, const vint4& b) { return a = a - b; }
__forceinline vint4& operator -=(vint4& a, int b) { return a = a - b; }
-#if defined(__SSE4_1__)
+#if (defined(__aarch64__)) || defined(__SSE4_1__)
__forceinline vint4& operator *=(vint4& a, const vint4& b) { return a = a * b; }
__forceinline vint4& operator *=(vint4& a, int b) { return a = a * b; }
#endif
@@ -393,7 +417,7 @@ namespace embree
#endif
}
-#if defined(__SSE4_1__)
+#if defined(__aarch64__) || defined(__SSE4_1__)
__forceinline vint4 min(const vint4& a, const vint4& b) { return _mm_min_epi32(a, b); }
__forceinline vint4 max(const vint4& a, const vint4& b) { return _mm_max_epi32(a, b); }
@@ -417,6 +441,16 @@ namespace embree
__forceinline vint4 unpacklo(const vint4& a, const vint4& b) { return _mm_castps_si128(_mm_unpacklo_ps(_mm_castsi128_ps(a), _mm_castsi128_ps(b))); }
__forceinline vint4 unpackhi(const vint4& a, const vint4& b) { return _mm_castps_si128(_mm_unpackhi_ps(_mm_castsi128_ps(a), _mm_castsi128_ps(b))); }
+#if defined(__aarch64__)
+ template<int i0, int i1, int i2, int i3>
+ __forceinline vint4 shuffle(const vint4& v) {
+ return vreinterpretq_s32_u8(vqtbl1q_u8( (uint8x16_t)v.v, _MN_SHUFFLE(i0, i1, i2, i3)));
+ }
+ template<int i0, int i1, int i2, int i3>
+ __forceinline vint4 shuffle(const vint4& a, const vint4& b) {
+ return vreinterpretq_s32_u8(vqtbl2q_u8( (uint8x16x2_t){(uint8x16_t)a.v, (uint8x16_t)b.v}, _MF_SHUFFLE(i0, i1, i2, i3)));
+ }
+#else
template<int i0, int i1, int i2, int i3>
__forceinline vint4 shuffle(const vint4& v) {
return _mm_shuffle_epi32(v, _MM_SHUFFLE(i3, i2, i1, i0));
@@ -426,7 +460,7 @@ namespace embree
__forceinline vint4 shuffle(const vint4& a, const vint4& b) {
return _mm_castps_si128(_mm_shuffle_ps(_mm_castsi128_ps(a), _mm_castsi128_ps(b), _MM_SHUFFLE(i3, i2, i1, i0)));
}
-
+#endif
#if defined(__SSE3__)
template<> __forceinline vint4 shuffle<0, 0, 2, 2>(const vint4& v) { return _mm_castps_si128(_mm_moveldup_ps(_mm_castsi128_ps(v))); }
template<> __forceinline vint4 shuffle<1, 1, 3, 3>(const vint4& v) { return _mm_castps_si128(_mm_movehdup_ps(_mm_castsi128_ps(v))); }
@@ -438,7 +472,7 @@ namespace embree
return shuffle<i,i,i,i>(v);
}
-#if defined(__SSE4_1__)
+#if defined(__SSE4_1__) && !defined(__aarch64__)
template<int src> __forceinline int extract(const vint4& b) { return _mm_extract_epi32(b, src); }
template<int dst> __forceinline vint4 insert(const vint4& a, const int b) { return _mm_insert_epi32(a, b, dst); }
#else
@@ -446,18 +480,27 @@ namespace embree
template<int dst> __forceinline vint4 insert(const vint4& a, int b) { vint4 c = a; c[dst&3] = b; return c; }
#endif
-
template<> __forceinline int extract<0>(const vint4& b) { return _mm_cvtsi128_si32(b); }
-
+
__forceinline int toScalar(const vint4& v) { return _mm_cvtsi128_si32(v); }
-
- __forceinline size_t toSizeT(const vint4& v) {
+
+#if defined(__aarch64__)
+ __forceinline size_t toSizeT(const vint4& v) {
+ uint64x2_t x = uint64x2_t(v.v);
+ return x[0];
+ }
+#else
+__forceinline size_t toSizeT(const vint4& v) {
#if defined(__WIN32__) && !defined(__X86_64__) // win32 workaround
return toScalar(v);
+#elif defined(__ARM_NEON)
+ // FIXME(LTE): Do we need a swap(i.e. use lane 1)?
+ return vgetq_lane_u64(*(reinterpret_cast<const uint64x2_t *>(&v)), 0);
#else
return _mm_cvtsi128_si64(v);
#endif
}
+#endif
#if defined(__AVX512VL__)
@@ -475,7 +518,17 @@ namespace embree
/// Reductions
////////////////////////////////////////////////////////////////////////////////
-#if defined(__SSE4_1__)
+#if defined(__aarch64__) || defined(__SSE4_1__)
+
+#if defined(__aarch64__)
+ __forceinline vint4 vreduce_min(const vint4& v) { int h = vminvq_s32(v); return vdupq_n_s32(h); }
+ __forceinline vint4 vreduce_max(const vint4& v) { int h = vmaxvq_s32(v); return vdupq_n_s32(h); }
+ __forceinline vint4 vreduce_add(const vint4& v) { int h = vaddvq_s32(v); return vdupq_n_s32(h); }
+
+ __forceinline int reduce_min(const vint4& v) { return vminvq_s32(v); }
+ __forceinline int reduce_max(const vint4& v) { return vmaxvq_s32(v); }
+ __forceinline int reduce_add(const vint4& v) { return vaddvq_s32(v); }
+#else
__forceinline vint4 vreduce_min(const vint4& v) { vint4 h = min(shuffle<1,0,3,2>(v),v); return min(shuffle<2,3,0,1>(h),h); }
__forceinline vint4 vreduce_max(const vint4& v) { vint4 h = max(shuffle<1,0,3,2>(v),v); return max(shuffle<2,3,0,1>(h),h); }
__forceinline vint4 vreduce_add(const vint4& v) { vint4 h = shuffle<1,0,3,2>(v) + v ; return shuffle<2,3,0,1>(h) + h ; }
@@ -483,6 +536,7 @@ namespace embree
__forceinline int reduce_min(const vint4& v) { return toScalar(vreduce_min(v)); }
__forceinline int reduce_max(const vint4& v) { return toScalar(vreduce_max(v)); }
__forceinline int reduce_add(const vint4& v) { return toScalar(vreduce_add(v)); }
+#endif
__forceinline size_t select_min(const vint4& v) { return bsf(movemask(v == vreduce_min(v))); }
__forceinline size_t select_max(const vint4& v) { return bsf(movemask(v == vreduce_max(v))); }
@@ -502,7 +556,7 @@ namespace embree
/// Sorting networks
////////////////////////////////////////////////////////////////////////////////
-#if defined(__SSE4_1__)
+#if (defined(__aarch64__)) || defined(__SSE4_1__)
__forceinline vint4 usort_ascending(const vint4& v)
{
diff --git a/thirdparty/embree/common/simd/vint8_avx.h b/thirdparty/embree/common/simd/vint8_avx.h
index f43e9a8c22..48f5a9b203 100644
--- a/thirdparty/embree/common/simd/vint8_avx.h
+++ b/thirdparty/embree/common/simd/vint8_avx.h
@@ -79,8 +79,8 @@ namespace embree
static __forceinline void store (void* ptr, const vint8& f) { _mm256_store_ps((float*)ptr,_mm256_castsi256_ps(f)); }
static __forceinline void storeu(void* ptr, const vint8& f) { _mm256_storeu_ps((float*)ptr,_mm256_castsi256_ps(f)); }
- static __forceinline void store (const vboolf8& mask, void* ptr, const vint8& f) { _mm256_maskstore_ps((float*)ptr,(__m256i)mask,_mm256_castsi256_ps(f)); }
- static __forceinline void storeu(const vboolf8& mask, void* ptr, const vint8& f) { _mm256_maskstore_ps((float*)ptr,(__m256i)mask,_mm256_castsi256_ps(f)); }
+ static __forceinline void store (const vboolf8& mask, void* ptr, const vint8& f) { _mm256_maskstore_ps((float*)ptr,_mm256_castps_si256(mask.v),_mm256_castsi256_ps(f)); }
+ static __forceinline void storeu(const vboolf8& mask, void* ptr, const vint8& f) { _mm256_maskstore_ps((float*)ptr,_mm256_castps_si256(mask.v),_mm256_castsi256_ps(f)); }
static __forceinline void store_nt(void* ptr, const vint8& v) {
_mm256_stream_ps((float*)ptr,_mm256_castsi256_ps(v));
diff --git a/thirdparty/embree/common/simd/vint8_avx2.h b/thirdparty/embree/common/simd/vint8_avx2.h
index e04737ffbe..d48efac3f4 100644
--- a/thirdparty/embree/common/simd/vint8_avx2.h
+++ b/thirdparty/embree/common/simd/vint8_avx2.h
@@ -393,6 +393,7 @@ namespace embree
__forceinline int toScalar(const vint8& v) { return _mm_cvtsi128_si32(_mm256_castsi256_si128(v)); }
+#if !defined(__aarch64__)
__forceinline vint8 permute(const vint8& v, const __m256i& index) {
return _mm256_permutevar8x32_epi32(v, index);
}
@@ -410,6 +411,9 @@ namespace embree
#endif
}
+#endif
+
+
////////////////////////////////////////////////////////////////////////////////
/// Reductions
////////////////////////////////////////////////////////////////////////////////
diff --git a/thirdparty/embree/common/simd/vuint4_sse2.h b/thirdparty/embree/common/simd/vuint4_sse2.h
index 0601b9ab80..f7817da6be 100644
--- a/thirdparty/embree/common/simd/vuint4_sse2.h
+++ b/thirdparty/embree/common/simd/vuint4_sse2.h
@@ -95,7 +95,14 @@ namespace embree
static __forceinline void storeu(const vboolf4& mask, void* ptr, const vuint4& i) { storeu(ptr,select(mask,i,loadu(ptr))); }
#endif
-#if defined(__SSE4_1__)
+#if defined(__aarch64__)
+ static __forceinline vuint4 load(const unsigned char* ptr) {
+ return _mm_load4epu8_epi32(((__m128i*)ptr));
+ }
+ static __forceinline vuint4 loadu(const unsigned char* ptr) {
+ return _mm_load4epu8_epi32(((__m128i*)ptr));
+ }
+#elif defined(__SSE4_1__)
static __forceinline vuint4 load(const unsigned char* ptr) {
return _mm_cvtepu8_epi32(_mm_loadl_epi64((__m128i*)ptr));
}
@@ -107,7 +114,9 @@ namespace embree
#endif
static __forceinline vuint4 load(const unsigned short* ptr) {
-#if defined (__SSE4_1__)
+#if defined(__aarch64__)
+ return _mm_load4epu16_epi32(((__m128i*)ptr));
+#elif defined (__SSE4_1__)
return _mm_cvtepu16_epi32(_mm_loadu_si128((__m128i*)ptr));
#else
return vuint4(ptr[0],ptr[1],ptr[2],ptr[3]);
@@ -115,7 +124,7 @@ namespace embree
}
static __forceinline vuint4 load_nt(void* ptr) {
-#if defined(__SSE4_1__)
+#if (defined(__aarch64__)) || defined(__SSE4_1__)
return _mm_stream_load_si128((__m128i*)ptr);
#else
return _mm_load_si128((__m128i*)ptr);
@@ -123,8 +132,8 @@ namespace embree
}
static __forceinline void store_nt(void* ptr, const vuint4& v) {
-#if defined(__SSE4_1__)
- _mm_stream_ps((float*)ptr,_mm_castsi128_ps(v));
+#if !defined(__aarch64__) && defined(__SSE4_1__)
+ _mm_stream_ps((float*)ptr, _mm_castsi128_ps(v));
#else
_mm_store_si128((__m128i*)ptr,v);
#endif
@@ -132,7 +141,7 @@ namespace embree
template<int scale = 4>
static __forceinline vuint4 gather(const unsigned int* ptr, const vint4& index) {
-#if defined(__AVX2__)
+#if defined(__AVX2__) && !defined(__aarch64__)
return _mm_i32gather_epi32((const int*)ptr, index, scale);
#else
return vuint4(
@@ -148,7 +157,7 @@ namespace embree
vuint4 r = zero;
#if defined(__AVX512VL__)
return _mm_mmask_i32gather_epi32(r, mask, index, ptr, scale);
-#elif defined(__AVX2__)
+#elif defined(__AVX2__) && !defined(__aarch64__)
return _mm_mask_i32gather_epi32(r, (const int*)ptr, index, mask, scale);
#else
if (likely(mask[0])) r[0] = *(unsigned int*)(((char*)ptr)+scale*index[0]);
@@ -344,6 +353,16 @@ namespace embree
__forceinline vuint4 unpacklo(const vuint4& a, const vuint4& b) { return _mm_castps_si128(_mm_unpacklo_ps(_mm_castsi128_ps(a), _mm_castsi128_ps(b))); }
__forceinline vuint4 unpackhi(const vuint4& a, const vuint4& b) { return _mm_castps_si128(_mm_unpackhi_ps(_mm_castsi128_ps(a), _mm_castsi128_ps(b))); }
+#if defined(__aarch64__)
+ template<int i0, int i1, int i2, int i3>
+ __forceinline vuint4 shuffle(const vuint4& v) {
+ return vreinterpretq_s32_u8(vqtbl1q_u8( (uint8x16_t)v.v, _MN_SHUFFLE(i0, i1, i2, i3)));
+ }
+ template<int i0, int i1, int i2, int i3>
+ __forceinline vuint4 shuffle(const vuint4& a, const vuint4& b) {
+ return vreinterpretq_s32_u8(vqtbl2q_u8( (uint8x16x2_t){(uint8x16_t)a.v, (uint8x16_t)b.v}, _MF_SHUFFLE(i0, i1, i2, i3)));
+ }
+#else
template<int i0, int i1, int i2, int i3>
__forceinline vuint4 shuffle(const vuint4& v) {
return _mm_shuffle_epi32(v, _MM_SHUFFLE(i3, i2, i1, i0));
@@ -353,7 +372,7 @@ namespace embree
__forceinline vuint4 shuffle(const vuint4& a, const vuint4& b) {
return _mm_castps_si128(_mm_shuffle_ps(_mm_castsi128_ps(a), _mm_castsi128_ps(b), _MM_SHUFFLE(i3, i2, i1, i0)));
}
-
+#endif
#if defined(__SSE3__)
template<> __forceinline vuint4 shuffle<0, 0, 2, 2>(const vuint4& v) { return _mm_castps_si128(_mm_moveldup_ps(_mm_castsi128_ps(v))); }
template<> __forceinline vuint4 shuffle<1, 1, 3, 3>(const vuint4& v) { return _mm_castps_si128(_mm_movehdup_ps(_mm_castsi128_ps(v))); }
@@ -365,7 +384,7 @@ namespace embree
return shuffle<i,i,i,i>(v);
}
-#if defined(__SSE4_1__)
+#if defined(__SSE4_1__) && !defined(__aarch64__)
template<int src> __forceinline unsigned int extract(const vuint4& b) { return _mm_extract_epi32(b, src); }
template<int dst> __forceinline vuint4 insert(const vuint4& a, const unsigned b) { return _mm_insert_epi32(a, b, dst); }
#else
@@ -373,7 +392,6 @@ namespace embree
template<int dst> __forceinline vuint4 insert(const vuint4& a, const unsigned b) { vuint4 c = a; c[dst&3] = b; return c; }
#endif
-
template<> __forceinline unsigned int extract<0>(const vuint4& b) { return _mm_cvtsi128_si32(b); }
__forceinline unsigned int toScalar(const vuint4& v) { return _mm_cvtsi128_si32(v); }
diff --git a/thirdparty/embree/common/simd/vuint8_avx.h b/thirdparty/embree/common/simd/vuint8_avx.h
index 589cd9d731..cb8b5158c1 100644
--- a/thirdparty/embree/common/simd/vuint8_avx.h
+++ b/thirdparty/embree/common/simd/vuint8_avx.h
@@ -77,8 +77,8 @@ namespace embree
static __forceinline void store (void* ptr, const vuint8& f) { _mm256_store_ps((float*)ptr,_mm256_castsi256_ps(f)); }
static __forceinline void storeu(void* ptr, const vuint8& f) { _mm256_storeu_ps((float*)ptr,_mm256_castsi256_ps(f)); }
- static __forceinline void store (const vboolf8& mask, void* ptr, const vuint8& f) { _mm256_maskstore_ps((float*)ptr,(__m256i)mask,_mm256_castsi256_ps(f)); }
- static __forceinline void storeu(const vboolf8& mask, void* ptr, const vuint8& f) { _mm256_maskstore_ps((float*)ptr,(__m256i)mask,_mm256_castsi256_ps(f)); }
+ static __forceinline void store (const vboolf8& mask, void* ptr, const vuint8& f) { _mm256_maskstore_ps((float*)ptr,_mm256_castps_si256(mask.v),_mm256_castsi256_ps(f)); }
+ static __forceinline void storeu(const vboolf8& mask, void* ptr, const vuint8& f) { _mm256_maskstore_ps((float*)ptr,_mm256_castps_si256(mask.v),_mm256_castsi256_ps(f)); }
static __forceinline void store_nt(void* ptr, const vuint8& v) {
_mm256_stream_ps((float*)ptr,_mm256_castsi256_ps(v));
diff --git a/thirdparty/embree/common/simd/vuint8_avx2.h b/thirdparty/embree/common/simd/vuint8_avx2.h
index 17b994522f..959143724b 100644
--- a/thirdparty/embree/common/simd/vuint8_avx2.h
+++ b/thirdparty/embree/common/simd/vuint8_avx2.h
@@ -385,6 +385,7 @@ namespace embree
__forceinline int toScalar(const vuint8& v) { return _mm_cvtsi128_si32(_mm256_castsi256_si128(v)); }
+#if !defined(__aarch64__)
__forceinline vuint8 permute(const vuint8& v, const __m256i& index) {
return _mm256_permutevar8x32_epi32(v, index);
}
@@ -401,6 +402,7 @@ namespace embree
return _mm256_alignr_epi8(a, b, 4*i);
#endif
}
+#endif // !defined(__aarch64__)
////////////////////////////////////////////////////////////////////////////////
/// Reductions
diff --git a/thirdparty/embree/common/simd/wasm/emulation.h b/thirdparty/embree/common/simd/wasm/emulation.h
new file mode 100644
index 0000000000..778ab4ae6a
--- /dev/null
+++ b/thirdparty/embree/common/simd/wasm/emulation.h
@@ -0,0 +1,13 @@
+// Copyright 2009-2020 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+
+#pragma once
+
+// According to https://emscripten.org/docs/porting/simd.html, _MM_SET_EXCEPTION_MASK and
+// _mm_setcsr are unavailable in WebAssembly.
+
+#define _MM_SET_EXCEPTION_MASK(x)
+
+__forceinline void _mm_setcsr(unsigned int)
+{
+}
diff --git a/thirdparty/embree/common/sys/array.h b/thirdparty/embree/common/sys/array.h
index dd9190c52a..e96939b63d 100644
--- a/thirdparty/embree/common/sys/array.h
+++ b/thirdparty/embree/common/sys/array.h
@@ -59,8 +59,8 @@ namespace embree
/********************** Iterators ****************************/
- __forceinline T* begin() const { return items; };
- __forceinline T* end () const { return items+M; };
+ __forceinline T* begin() const { return (T*)items; };
+ __forceinline T* end () const { return (T*)items+M; };
/********************** Capacity ****************************/
@@ -101,8 +101,8 @@ namespace embree
__forceinline T& at(size_t i) { assert(i < M); return items[i]; }
__forceinline const T& at(size_t i) const { assert(i < M); return items[i]; }
- __forceinline T& front() const { assert(M > 0); return items[0]; };
- __forceinline T& back () const { assert(M > 0); return items[M-1]; };
+ __forceinline T& front() { assert(M > 0); return items[0]; };
+ __forceinline T& back () { assert(M > 0); return items[M-1]; };
__forceinline T* data() { return items; };
__forceinline const T* data() const { return items; };
@@ -139,7 +139,7 @@ namespace embree
__forceinline Ty& operator[](const unsigned i) { assert(i<N); return data[i]; }
__forceinline const Ty& operator[](const unsigned i) const { assert(i<N); return data[i]; }
-#if defined(__64BIT__)
+#if defined(__64BIT__) || defined(__EMSCRIPTEN__)
__forceinline Ty& operator[](const size_t i) { assert(i<N); return data[i]; }
__forceinline const Ty& operator[](const size_t i) const { assert(i<N); return data[i]; }
#endif
@@ -196,7 +196,7 @@ namespace embree
__forceinline Ty& operator[](const int i) { assert(i>=0 && i<max_total_elements); resize(i+1); return data[i]; }
__forceinline Ty& operator[](const unsigned i) { assert(i<max_total_elements); resize(i+1); return data[i]; }
-#if defined(__64BIT__)
+#if defined(__64BIT__) || defined(__EMSCRIPTEN__)
__forceinline Ty& operator[](const size_t i) { assert(i<max_total_elements); resize(i+1); return data[i]; }
#endif
diff --git a/thirdparty/embree/common/sys/barrier.h b/thirdparty/embree/common/sys/barrier.h
index 37fc036291..c56513a2ed 100644
--- a/thirdparty/embree/common/sys/barrier.h
+++ b/thirdparty/embree/common/sys/barrier.h
@@ -24,7 +24,7 @@ namespace embree
BarrierSys& operator= (const BarrierSys& other) DELETED; // do not implement
public:
- /*! intializes the barrier with some number of threads */
+ /*! initializes the barrier with some number of threads */
void init(size_t count);
/*! lets calling thread wait in barrier */
@@ -94,7 +94,7 @@ namespace embree
LinearBarrierActive& operator= (const LinearBarrierActive& other) DELETED; // do not implement
public:
- /*! intializes the barrier with some number of threads */
+ /*! initializes the barrier with some number of threads */
void init(size_t threadCount);
/*! thread with threadIndex waits in the barrier */
diff --git a/thirdparty/embree/common/sys/intrinsics.h b/thirdparty/embree/common/sys/intrinsics.h
index ed8dd7d40a..2c2f6eccda 100644
--- a/thirdparty/embree/common/sys/intrinsics.h
+++ b/thirdparty/embree/common/sys/intrinsics.h
@@ -13,6 +13,9 @@
#include "../simd/arm/emulation.h"
#else
#include <immintrin.h>
+#if defined(__EMSCRIPTEN__)
+#include "../simd/wasm/emulation.h"
+#endif
#endif
#if defined(__BMI__) && defined(__GNUC__) && !defined(__INTEL_COMPILER)
@@ -24,24 +27,26 @@
#endif
#endif
-#if defined(__LZCNT__)
+#if defined(__aarch64__)
#if !defined(_lzcnt_u32)
- #define _lzcnt_u32 __lzcnt32
+ #define _lzcnt_u32 __builtin_clz
#endif
- #if !defined(_lzcnt_u64)
- #define _lzcnt_u64 __lzcnt64
+#else
+ #if defined(__LZCNT__)
+ #if !defined(_lzcnt_u32)
+ #define _lzcnt_u32 __lzcnt32
+ #endif
+ #if !defined(_lzcnt_u64)
+ #define _lzcnt_u64 __lzcnt64
+ #endif
#endif
#endif
#if defined(__WIN32__)
-// -- GODOT start --
-#if !defined(NOMINMAX)
-// -- GODOT end --
-#define NOMINMAX
-// -- GODOT start --
-#endif
-#include "windows.h"
-// -- GODOT end --
+# if !defined(NOMINMAX)
+# define NOMINMAX
+# endif
+# include <windows.h>
#endif
/* normally defined in pmmintrin.h, but we always need this */
@@ -69,7 +74,7 @@ namespace embree
}
__forceinline int bsf(int v) {
-#if defined(__AVX2__)
+#if defined(__AVX2__) && !defined(__aarch64__)
return _tzcnt_u32(v);
#else
unsigned long r = 0; _BitScanForward(&r,v); return r;
@@ -77,7 +82,7 @@ namespace embree
}
__forceinline unsigned bsf(unsigned v) {
-#if defined(__AVX2__)
+#if defined(__AVX2__) && !defined(__aarch64__)
return _tzcnt_u32(v);
#else
unsigned long r = 0; _BitScanForward(&r,v); return r;
@@ -118,7 +123,7 @@ namespace embree
#endif
__forceinline int bsr(int v) {
-#if defined(__AVX2__)
+#if defined(__AVX2__) && !defined(__aarch64__)
return 31 - _lzcnt_u32(v);
#else
unsigned long r = 0; _BitScanReverse(&r,v); return r;
@@ -126,7 +131,7 @@ namespace embree
}
__forceinline unsigned bsr(unsigned v) {
-#if defined(__AVX2__)
+#if defined(__AVX2__) && !defined(__aarch64__)
return 31 - _lzcnt_u32(v);
#else
unsigned long r = 0; _BitScanReverse(&r,v); return r;
@@ -145,7 +150,7 @@ namespace embree
__forceinline int lzcnt(const int x)
{
-#if defined(__AVX2__)
+#if defined(__AVX2__) && !defined(__aarch64__)
return _lzcnt_u32(x);
#else
if (unlikely(x == 0)) return 32;
@@ -214,15 +219,26 @@ namespace embree
#elif defined(__X86_ASM__)
__forceinline void __cpuid(int out[4], int op) {
- asm volatile ("cpuid" : "=a"(out[0]), "=b"(out[1]), "=c"(out[2]), "=d"(out[3]) : "a"(op));
+#if defined(__ARM_NEON)
+ if (op == 0) { // Get CPU name
+ out[0] = 0x41524d20;
+ out[1] = 0x41524d20;
+ out[2] = 0x41524d20;
+ out[3] = 0x41524d20;
+ }
+#else
+ asm volatile ("cpuid" : "=a"(out[0]), "=b"(out[1]), "=c"(out[2]), "=d"(out[3]) : "a"(op));
+#endif
}
-
+
+#if !defined(__ARM_NEON)
__forceinline void __cpuid_count(int out[4], int op1, int op2) {
asm volatile ("cpuid" : "=a"(out[0]), "=b"(out[1]), "=c"(out[2]), "=d"(out[3]) : "a"(op1), "c"(op2));
}
-
#endif
-
+
+#endif
+
__forceinline uint64_t read_tsc() {
#if defined(__X86_ASM__)
uint32_t high,low;
@@ -235,30 +251,38 @@ namespace embree
}
__forceinline int bsf(int v) {
-#if defined(__AVX2__)
+#if defined(__ARM_NEON)
+ return __builtin_ctz(v);
+#else
+#if defined(__AVX2__)
return _tzcnt_u32(v);
#elif defined(__X86_ASM__)
int r = 0; asm ("bsf %1,%0" : "=r"(r) : "r"(v)); return r;
#else
return __builtin_ctz(v);
#endif
+#endif
}
#if defined(__64BIT__)
__forceinline unsigned bsf(unsigned v)
{
-#if defined(__AVX2__)
+#if defined(__ARM_NEON)
+ return __builtin_ctz(v);
+#else
+#if defined(__AVX2__)
return _tzcnt_u32(v);
#elif defined(__X86_ASM__)
unsigned r = 0; asm ("bsf %1,%0" : "=r"(r) : "r"(v)); return r;
#else
return __builtin_ctz(v);
#endif
+#endif
}
#endif
__forceinline size_t bsf(size_t v) {
-#if defined(__AVX2__)
+#if defined(__AVX2__) && !defined(__aarch64__)
#if defined(__X86_64__)
return _tzcnt_u64(v);
#else
@@ -295,7 +319,7 @@ namespace embree
}
__forceinline int bsr(int v) {
-#if defined(__AVX2__)
+#if defined(__AVX2__) && !defined(__aarch64__)
return 31 - _lzcnt_u32(v);
#elif defined(__X86_ASM__)
int r = 0; asm ("bsr %1,%0" : "=r"(r) : "r"(v)); return r;
@@ -304,7 +328,7 @@ namespace embree
#endif
}
-#if defined(__64BIT__)
+#if defined(__64BIT__) || defined(__EMSCRIPTEN__)
__forceinline unsigned bsr(unsigned v) {
#if defined(__AVX2__)
return 31 - _lzcnt_u32(v);
@@ -317,7 +341,7 @@ namespace embree
#endif
__forceinline size_t bsr(size_t v) {
-#if defined(__AVX2__)
+#if defined(__AVX2__) && !defined(__aarch64__)
#if defined(__X86_64__)
return 63 - _lzcnt_u64(v);
#else
@@ -332,7 +356,7 @@ namespace embree
__forceinline int lzcnt(const int x)
{
-#if defined(__AVX2__)
+#if defined(__AVX2__) && !defined(__aarch64__)
return _lzcnt_u32(x);
#else
if (unlikely(x == 0)) return 32;
@@ -341,18 +365,18 @@ namespace embree
}
__forceinline size_t blsr(size_t v) {
-#if defined(__AVX2__)
-#if defined(__INTEL_COMPILER)
+#if defined(__AVX2__) && !defined(__aarch64__)
+ #if defined(__INTEL_COMPILER)
return _blsr_u64(v);
+ #else
+ #if defined(__X86_64__)
+ return __blsr_u64(v);
+ #else
+ return __blsr_u32(v);
+ #endif
+ #endif
#else
-#if defined(__X86_64__)
- return __blsr_u64(v);
-#else
- return __blsr_u32(v);
-#endif
-#endif
-#else
- return v & (v-1);
+ return v & (v-1);
#endif
}
@@ -368,7 +392,7 @@ namespace embree
#if defined(__X86_ASM__)
int r = 0; asm ("bts %1,%0" : "=r"(r) : "r"(i), "0"(v) : "flags"); return r;
#else
- return (v | (v << i));
+ return (v | (1 << i));
#endif
}
@@ -376,7 +400,7 @@ namespace embree
#if defined(__X86_ASM__)
int r = 0; asm ("btr %1,%0" : "=r"(r) : "r"(i), "0"(v) : "flags"); return r;
#else
- return (v & ~(v << i));
+ return (v & ~(1 << i));
#endif
}
@@ -392,7 +416,7 @@ namespace embree
#if defined(__X86_ASM__)
size_t r = 0; asm ("bts %1,%0" : "=r"(r) : "r"(i), "0"(v) : "flags"); return r;
#else
- return (v | (v << i));
+ return (v | (1 << i));
#endif
}
@@ -400,7 +424,7 @@ namespace embree
#if defined(__X86_ASM__)
size_t r = 0; asm ("btr %1,%0" : "=r"(r) : "r"(i), "0"(v) : "flags"); return r;
#else
- return (v & ~(v << i));
+ return (v & ~(1 << i));
#endif
}
@@ -435,8 +459,8 @@ namespace embree
#endif
#endif
-#if defined(__SSE4_2__)
-
+#if defined(__SSE4_2__) || defined(__ARM_NEON)
+
__forceinline int popcnt(int in) {
return _mm_popcnt_u32(in);
}
@@ -483,14 +507,14 @@ namespace embree
#endif
}
- __forceinline void prefetchL1EX(const void* ptr) {
- prefetchEX(ptr);
+ __forceinline void prefetchL1EX(const void* ptr) {
+ prefetchEX(ptr);
}
-
- __forceinline void prefetchL2EX(const void* ptr) {
- prefetchEX(ptr);
+
+ __forceinline void prefetchL2EX(const void* ptr) {
+ prefetchEX(ptr);
}
-#if defined(__AVX2__)
+#if defined(__AVX2__) && !defined(__aarch64__)
__forceinline unsigned int pext(unsigned int a, unsigned int b) { return _pext_u32(a, b); }
__forceinline unsigned int pdep(unsigned int a, unsigned int b) { return _pdep_u32(a, b); }
#if defined(__X86_64__)
diff --git a/thirdparty/embree/common/sys/mutex.cpp b/thirdparty/embree/common/sys/mutex.cpp
index 789feaf2d8..8212deaa49 100644
--- a/thirdparty/embree/common/sys/mutex.cpp
+++ b/thirdparty/embree/common/sys/mutex.cpp
@@ -36,6 +36,7 @@ namespace embree
MAYBE_UNUSED bool ok = pthread_mutex_destroy((pthread_mutex_t*)mutex) == 0;
assert(ok);
delete (pthread_mutex_t*)mutex;
+ mutex = nullptr;
}
void MutexSys::lock()
diff --git a/thirdparty/embree/common/sys/mutex.h b/thirdparty/embree/common/sys/mutex.h
index 4cb3626d92..26af6c582c 100644
--- a/thirdparty/embree/common/sys/mutex.h
+++ b/thirdparty/embree/common/sys/mutex.h
@@ -7,6 +7,7 @@
#include "intrinsics.h"
#include "atomic.h"
+#define CPU_CACHELINE_SIZE 64
namespace embree
{
/*! system mutex */
@@ -83,6 +84,11 @@ namespace embree
atomic<bool> flag;
};
+ class PaddedSpinLock : public SpinLock
+ {
+ private:
+ char padding[CPU_CACHELINE_SIZE - sizeof(SpinLock)];
+ };
/*! safe mutex lock and unlock helper */
template<typename Mutex> class Lock {
public:
diff --git a/thirdparty/embree/common/sys/platform.h b/thirdparty/embree/common/sys/platform.h
index 3e386c4944..728bf6ed7d 100644
--- a/thirdparty/embree/common/sys/platform.h
+++ b/thirdparty/embree/common/sys/platform.h
@@ -92,16 +92,19 @@
////////////////////////////////////////////////////////////////////////////////
#ifdef __WIN32__
-#define dll_export __declspec(dllexport)
-#define dll_import __declspec(dllimport)
+# if defined(EMBREE_STATIC_LIB)
+# define dll_export
+# define dll_import
+# else
+# define dll_export __declspec(dllexport)
+# define dll_import __declspec(dllimport)
+# endif
#else
-#define dll_export __attribute__ ((visibility ("default")))
-#define dll_import
+# define dll_export __attribute__ ((visibility ("default")))
+# define dll_import
#endif
-// -- GODOT start --
#if defined(__WIN32__) && !defined(__MINGW32__)
-// -- GODOT end --
#if !defined(__noinline)
#define __noinline __declspec(noinline)
#endif
@@ -151,9 +154,7 @@
#define DELETED = delete
#endif
-// -- GODOT start --
#if !defined(likely)
-// -- GODOT end --
#if defined(_MSC_VER) && !defined(__INTEL_COMPILER)
#define likely(expr) (expr)
#define unlikely(expr) (expr)
@@ -161,9 +162,7 @@
#define likely(expr) __builtin_expect((bool)(expr),true )
#define unlikely(expr) __builtin_expect((bool)(expr),false)
#endif
-// -- GODOT start --
#endif
-// -- GODOT end --
////////////////////////////////////////////////////////////////////////////////
/// Error handling and debugging
@@ -252,6 +251,7 @@ __forceinline std::string toString(long long value) {
#pragma warning(disable:4800) // forcing value to bool 'true' or 'false' (performance warning)
//#pragma warning(disable:4267) // '=' : conversion from 'size_t' to 'unsigned long', possible loss of data
#pragma warning(disable:4244) // 'argument' : conversion from 'ssize_t' to 'unsigned int', possible loss of data
+#pragma warning(disable:4267) // conversion from 'size_t' to 'const int', possible loss of data
//#pragma warning(disable:4355) // 'this' : used in base member initializer list
//#pragma warning(disable:391 ) // '<=' : signed / unsigned mismatch
//#pragma warning(disable:4018) // '<' : signed / unsigned mismatch
diff --git a/thirdparty/embree/common/sys/sysinfo.cpp b/thirdparty/embree/common/sys/sysinfo.cpp
index f1a59e511e..7f7a009a1e 100644
--- a/thirdparty/embree/common/sys/sysinfo.cpp
+++ b/thirdparty/embree/common/sys/sysinfo.cpp
@@ -21,7 +21,11 @@ namespace embree
std::string getPlatformName()
{
-#if defined(__LINUX__) && !defined(__64BIT__)
+#if defined(__ANDROID__) && !defined(__64BIT__)
+ return "Android (32bit)";
+#elif defined(__ANDROID__) && defined(__64BIT__)
+ return "Android (64bit)";
+#elif defined(__LINUX__) && !defined(__64BIT__)
return "Linux (32bit)";
#elif defined(__LINUX__) && defined(__64BIT__)
return "Linux (64bit)";
@@ -248,9 +252,7 @@ namespace embree
#if defined(__X86_ASM__)
__noinline int64_t get_xcr0()
{
-// -- GODOT start --
-#if defined (__WIN32__) && !defined (__MINGW32__)
-// -- GODOT end --
+#if defined (__WIN32__) && !defined (__MINGW32__) && defined(_XCR_XFEATURE_ENABLED_MASK)
int64_t xcr0 = 0; // int64_t is workaround for compiler bug under VS2013, Win32
xcr0 = _xgetbv(0);
return xcr0;
@@ -337,9 +339,24 @@ namespace embree
if (cpuid_leaf_7[ECX] & CPU_FEATURE_BIT_AVX512VBMI) cpu_features |= CPU_FEATURE_AVX512VBMI;
return cpu_features;
-#elif defined(__ARM_NEON)
- /* emulated features with sse2neon */
- return CPU_FEATURE_SSE|CPU_FEATURE_SSE2|CPU_FEATURE_XMM_ENABLED;
+
+#elif defined(__ARM_NEON) || defined(__EMSCRIPTEN__)
+
+ int cpu_features = CPU_FEATURE_NEON|CPU_FEATURE_SSE|CPU_FEATURE_SSE2;
+ cpu_features |= CPU_FEATURE_SSE3|CPU_FEATURE_SSSE3|CPU_FEATURE_SSE42;
+ cpu_features |= CPU_FEATURE_XMM_ENABLED;
+ cpu_features |= CPU_FEATURE_YMM_ENABLED;
+ cpu_features |= CPU_FEATURE_SSE41 | CPU_FEATURE_RDRAND | CPU_FEATURE_F16C;
+ cpu_features |= CPU_FEATURE_POPCNT;
+ cpu_features |= CPU_FEATURE_AVX;
+ cpu_features |= CPU_FEATURE_AVX2;
+ cpu_features |= CPU_FEATURE_FMA3;
+ cpu_features |= CPU_FEATURE_LZCNT;
+ cpu_features |= CPU_FEATURE_BMI1;
+ cpu_features |= CPU_FEATURE_BMI2;
+ cpu_features |= CPU_FEATURE_NEON_2X;
+ return cpu_features;
+
#else
/* Unknown CPU. */
return 0;
@@ -376,6 +393,8 @@ namespace embree
if (features & CPU_FEATURE_AVX512VL) str += "AVX512VL ";
if (features & CPU_FEATURE_AVX512IFMA) str += "AVX512IFMA ";
if (features & CPU_FEATURE_AVX512VBMI) str += "AVX512VBMI ";
+ if (features & CPU_FEATURE_NEON) str += "NEON ";
+ if (features & CPU_FEATURE_NEON_2X) str += "2xNEON ";
return str;
}
@@ -390,6 +409,9 @@ namespace embree
if (isa == AVX) return "AVX";
if (isa == AVX2) return "AVX2";
if (isa == AVX512) return "AVX512";
+
+ if (isa == NEON) return "NEON";
+ if (isa == NEON_2X) return "2xNEON";
return "UNKNOWN";
}
@@ -410,6 +432,9 @@ namespace embree
if (hasISA(features,AVXI)) v += "AVXI ";
if (hasISA(features,AVX2)) v += "AVX2 ";
if (hasISA(features,AVX512)) v += "AVX512 ";
+
+ if (hasISA(features,NEON)) v += "NEON ";
+ if (hasISA(features,NEON_2X)) v += "2xNEON ";
return v;
}
}
@@ -613,6 +638,16 @@ namespace embree
#include <sys/time.h>
#include <pthread.h>
+#if defined(__EMSCRIPTEN__)
+#include <emscripten.h>
+
+// -- GODOT start --
+extern "C" {
+extern int godot_js_os_hw_concurrency_get();
+}
+// -- GODOT end --
+#endif
+
namespace embree
{
unsigned int getNumberOfLogicalThreads()
@@ -620,12 +655,13 @@ namespace embree
static int nThreads = -1;
if (nThreads != -1) return nThreads;
-// -- GODOT start --
-// #if defined(__MACOSX__)
#if defined(__MACOSX__) || defined(__ANDROID__)
-// -- GODOT end --
nThreads = sysconf(_SC_NPROCESSORS_ONLN); // does not work in Linux LXC container
assert(nThreads);
+#elif defined(__EMSCRIPTEN__)
+ // -- GODOT start --
+ nThreads = godot_js_os_hw_concurrency_get();
+ // -- GODOT end --
#else
cpu_set_t set;
if (pthread_getaffinity_np(pthread_self(), sizeof(set), &set) == 0)
diff --git a/thirdparty/embree/common/sys/sysinfo.h b/thirdparty/embree/common/sys/sysinfo.h
index 72351d12e4..cefd39a0f6 100644
--- a/thirdparty/embree/common/sys/sysinfo.h
+++ b/thirdparty/embree/common/sys/sysinfo.h
@@ -55,7 +55,12 @@
# define isa sse
# define ISA SSE
# define ISA_STR "SSE"
-#else
+#elif defined(__ARM_NEON)
+// NOTE(LTE): Use sse2 for `isa` for the compatibility at the moment.
+#define isa sse2
+#define ISA NEON
+#define ISA_STR "NEON"
+#else
#error Unknown ISA
#endif
@@ -133,7 +138,9 @@ namespace embree
static const int CPU_FEATURE_XMM_ENABLED = 1 << 25;
static const int CPU_FEATURE_YMM_ENABLED = 1 << 26;
static const int CPU_FEATURE_ZMM_ENABLED = 1 << 27;
-
+ static const int CPU_FEATURE_NEON = 1 << 28;
+ static const int CPU_FEATURE_NEON_2X = 1 << 29;
+
/*! get CPU features */
int getCPUFeatures();
@@ -154,6 +161,8 @@ namespace embree
static const int AVXI = AVX | CPU_FEATURE_F16C | CPU_FEATURE_RDRAND;
static const int AVX2 = AVXI | CPU_FEATURE_AVX2 | CPU_FEATURE_FMA3 | CPU_FEATURE_BMI1 | CPU_FEATURE_BMI2 | CPU_FEATURE_LZCNT;
static const int AVX512 = AVX2 | CPU_FEATURE_AVX512F | CPU_FEATURE_AVX512DQ | CPU_FEATURE_AVX512CD | CPU_FEATURE_AVX512BW | CPU_FEATURE_AVX512VL | CPU_FEATURE_ZMM_ENABLED;
+ static const int NEON = CPU_FEATURE_NEON | CPU_FEATURE_SSE | CPU_FEATURE_SSE2;
+ static const int NEON_2X = CPU_FEATURE_NEON_2X | AVX2;
/*! converts ISA bitvector into a string */
std::string stringOfISA(int features);
diff --git a/thirdparty/embree/common/sys/thread.cpp b/thirdparty/embree/common/sys/thread.cpp
index f4014be89b..530c3c7810 100644
--- a/thirdparty/embree/common/sys/thread.cpp
+++ b/thirdparty/embree/common/sys/thread.cpp
@@ -10,6 +10,9 @@
#include "../simd/arm/emulation.h"
#else
#include <xmmintrin.h>
+#if defined(__EMSCRIPTEN__)
+#include "../simd/wasm/emulation.h"
+#endif
#endif
#if defined(PTHREADS_WIN32)
@@ -158,9 +161,7 @@ namespace embree
/// Linux Platform
////////////////////////////////////////////////////////////////////////////////
-// -- GODOT start --
#if defined(__LINUX__) && !defined(__ANDROID__)
-// -- GODOT end --
#include <fstream>
#include <sstream>
@@ -219,6 +220,8 @@ namespace embree
/* find correct thread to affinitize to */
cpu_set_t set;
+ CPU_ZERO(&set);
+
if (pthread_getaffinity_np(pthread_self(), sizeof(set), &set) == 0)
{
for (int i=0, j=0; i<CPU_SETSIZE; i++)
@@ -241,7 +244,8 @@ namespace embree
{
cpu_set_t cset;
CPU_ZERO(&cset);
- size_t threadID = mapThreadID(affinity);
+ //size_t threadID = mapThreadID(affinity); // this is not working properly in LXC containers when some processors are disabled
+ size_t threadID = affinity;
CPU_SET(threadID, &cset);
pthread_setaffinity_np(pthread_self(), sizeof(cset), &cset);
@@ -249,7 +253,6 @@ namespace embree
}
#endif
-// -- GODOT start --
////////////////////////////////////////////////////////////////////////////////
/// Android Platform
////////////////////////////////////////////////////////////////////////////////
@@ -269,7 +272,6 @@ namespace embree
}
}
#endif
-// -- GODOT end --
////////////////////////////////////////////////////////////////////////////////
/// FreeBSD Platform
@@ -294,6 +296,21 @@ namespace embree
#endif
////////////////////////////////////////////////////////////////////////////////
+/// WebAssembly Platform
+////////////////////////////////////////////////////////////////////////////////
+
+#if defined(__EMSCRIPTEN__)
+namespace embree
+{
+ /*! set affinity of the calling thread */
+ void setAffinity(ssize_t affinity)
+ {
+ // Setting thread affinity is not supported in WASM.
+ }
+}
+#endif
+
+////////////////////////////////////////////////////////////////////////////////
/// MacOSX Platform
////////////////////////////////////////////////////////////////////////////////
@@ -379,9 +396,7 @@ namespace embree
pthread_attr_destroy(&attr);
/* set affinity */
-// -- GODOT start --
#if defined(__LINUX__) && !defined(__ANDROID__)
-// -- GODOT end --
if (threadID >= 0) {
cpu_set_t cset;
CPU_ZERO(&cset);
@@ -396,7 +411,6 @@ namespace embree
CPU_SET(threadID, &cset);
pthread_setaffinity_np(*tid, sizeof(cset), &cset);
}
-// -- GODOT start --
#elif defined(__ANDROID__)
if (threadID >= 0) {
cpu_set_t cset;
@@ -405,7 +419,6 @@ namespace embree
sched_setaffinity(pthread_gettid_np(*tid), sizeof(cset), &cset);
}
#endif
-// -- GODOT end --
return thread_t(tid);
}
@@ -424,14 +437,12 @@ namespace embree
/*! destroy a hardware thread by its handle */
void destroyThread(thread_t tid) {
-// -- GODOT start --
#if defined(__ANDROID__)
- FATAL("Can't destroy threads on Android.");
+ FATAL("Can't destroy threads on Android."); // pthread_cancel not implemented.
#else
pthread_cancel(*(pthread_t*)tid);
delete (pthread_t*)tid;
#endif
-// -- GODOT end --
}
/*! creates thread local storage */
diff --git a/thirdparty/embree/common/sys/vector.h b/thirdparty/embree/common/sys/vector.h
index f832626789..d05e1deb18 100644
--- a/thirdparty/embree/common/sys/vector.h
+++ b/thirdparty/embree/common/sys/vector.h
@@ -127,14 +127,15 @@ namespace embree
{
assert(!empty());
size_active--;
- alloc.destroy(&items[size_active]);
+ items[size_active].~T();
}
__forceinline void clear()
{
/* destroy elements */
- for (size_t i=0; i<size_active; i++)
- alloc.destroy(&items[i]);
+ for (size_t i=0; i<size_active; i++){
+ items[i].~T();
+ }
/* free memory */
alloc.deallocate(items,size_alloced);
@@ -178,8 +179,9 @@ namespace embree
/* destroy elements */
if (new_active < size_active)
{
- for (size_t i=new_active; i<size_active; i++)
- alloc.destroy(&items[i]);
+ for (size_t i=new_active; i<size_active; i++){
+ items[i].~T();
+ }
size_active = new_active;
}
@@ -195,7 +197,7 @@ namespace embree
items = alloc.allocate(new_alloced);
for (size_t i=0; i<size_active; i++) {
::new (&items[i]) T(std::move(old_items[i]));
- alloc.destroy(&old_items[i]);
+ old_items[i].~T();
}
for (size_t i=size_active; i<new_active; i++) {
diff --git a/thirdparty/embree/common/tasking/taskschedulerinternal.h b/thirdparty/embree/common/tasking/taskschedulerinternal.h
index 8fa6bb12fa..6cc2495195 100644
--- a/thirdparty/embree/common/tasking/taskschedulerinternal.h
+++ b/thirdparty/embree/common/tasking/taskschedulerinternal.h
@@ -143,7 +143,7 @@ namespace embree
/* allocate new task on right side of stack */
size_t oldStackPtr = stackPtr;
TaskFunction* func = new (alloc(sizeof(ClosureTaskFunction<Closure>))) ClosureTaskFunction<Closure>(closure);
- new (&tasks[right]) Task(func,thread.task,oldStackPtr,size);
+ new (&(tasks[right.load()])) Task(func,thread.task,oldStackPtr,size);
right++;
/* also move left pointer */
diff --git a/thirdparty/embree/common/tasking/taskschedulertbb.h b/thirdparty/embree/common/tasking/taskschedulertbb.h
index 35bd49849f..042ba7bc4c 100644
--- a/thirdparty/embree/common/tasking/taskschedulertbb.h
+++ b/thirdparty/embree/common/tasking/taskschedulertbb.h
@@ -11,14 +11,8 @@
#include "../sys/condition.h"
#include "../sys/ref.h"
-#if defined(__WIN32__)
-// -- GODOT start --
-#if !defined(NOMINMAX)
-// -- GODOT end --
+#if defined(__WIN32__) && !defined(NOMINMAX)
# define NOMINMAX
-// -- GODOT start --
-#endif
-// -- GODOT end --
#endif
// We need to define these to avoid implicit linkage against
diff --git a/thirdparty/embree/include/embree3/rtcore_common.h b/thirdparty/embree/include/embree3/rtcore_common.h
index 4857e1e05e..894628e47c 100644
--- a/thirdparty/embree/include/embree3/rtcore_common.h
+++ b/thirdparty/embree/include/embree3/rtcore_common.h
@@ -19,9 +19,7 @@ typedef int ssize_t;
#endif
#endif
-// -- GODOT start --
-#if defined(_WIN32) && defined(_MSC_VER)
-// -- GODOT end --
+#if defined(_WIN32) && !defined(__MINGW32__)
# define RTC_ALIGN(...) __declspec(align(__VA_ARGS__))
#else
# define RTC_ALIGN(...) __attribute__((aligned(__VA_ARGS__)))
diff --git a/thirdparty/embree/include/embree3/rtcore_config.h b/thirdparty/embree/include/embree3/rtcore_config.h
index 62b7b6f4dc..0b399ef040 100644
--- a/thirdparty/embree/include/embree3/rtcore_config.h
+++ b/thirdparty/embree/include/embree3/rtcore_config.h
@@ -1,4 +1,3 @@
-
// Copyright 2009-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
@@ -6,23 +5,25 @@
#define RTC_VERSION_MAJOR 3
#define RTC_VERSION_MINOR 13
-#define RTC_VERSION_PATCH 1
-#define RTC_VERSION 31301
-#define RTC_VERSION_STRING "3.13.1"
+#define RTC_VERSION_PATCH 5
+#define RTC_VERSION 31305
+#define RTC_VERSION_STRING "3.13.5"
#define RTC_MAX_INSTANCE_LEVEL_COUNT 1
#define EMBREE_MIN_WIDTH 0
#define RTC_MIN_WIDTH EMBREE_MIN_WIDTH
-#define EMBREE_STATIC_LIB
-/* #undef EMBREE_API_NAMESPACE */
+#if !defined(EMBREE_STATIC_LIB)
+# define EMBREE_STATIC_LIB
+#endif
+/* #undef EMBREE_API_NAMESPACE*/
#if defined(EMBREE_API_NAMESPACE)
# define RTC_NAMESPACE
-# define RTC_NAMESPACE_BEGIN namespace {
+# define RTC_NAMESPACE_BEGIN namespace {
# define RTC_NAMESPACE_END }
-# define RTC_NAMESPACE_USE using namespace ;
+# define RTC_NAMESPACE_USE using namespace;
# define RTC_API_EXTERN_C
# undef EMBREE_API_NAMESPACE
#else
diff --git a/thirdparty/embree/include/embree3/rtcore_quaternion.h b/thirdparty/embree/include/embree3/rtcore_quaternion.h
index 6489fa3467..bd5fe1d89a 100644
--- a/thirdparty/embree/include/embree3/rtcore_quaternion.h
+++ b/thirdparty/embree/include/embree3/rtcore_quaternion.h
@@ -8,7 +8,7 @@
RTC_NAMESPACE_BEGIN
/*
- * Structure for transformation respresentation as a matrix decomposition using
+ * Structure for transformation representation as a matrix decomposition using
* a quaternion
*/
struct RTC_ALIGN(16) RTCQuaternionDecomposition
diff --git a/thirdparty/embree/include/embree3/rtcore_scene.h b/thirdparty/embree/include/embree3/rtcore_scene.h
index 5878a3d402..34d87a2ce4 100644
--- a/thirdparty/embree/include/embree3/rtcore_scene.h
+++ b/thirdparty/embree/include/embree3/rtcore_scene.h
@@ -47,9 +47,12 @@ RTC_API void rtcAttachGeometryByID(RTCScene scene, RTCGeometry geometry, unsigne
/* Detaches the geometry from the scene. */
RTC_API void rtcDetachGeometry(RTCScene scene, unsigned int geomID);
-/* Gets a geometry handle from the scene. */
+/* Gets a geometry handle from the scene. This function is not thread safe and should get used during rendering. */
RTC_API RTCGeometry rtcGetGeometry(RTCScene scene, unsigned int geomID);
+/* Gets a geometry handle from the scene. This function is thread safe and should NOT get used during rendering. */
+RTC_API RTCGeometry rtcGetGeometryThreadSafe(RTCScene scene, unsigned int geomID);
+
/* Commits the scene. */
RTC_API void rtcCommitScene(RTCScene scene);
diff --git a/thirdparty/embree/kernels/builders/bvh_builder_morton.h b/thirdparty/embree/kernels/builders/bvh_builder_morton.h
index 8f21e3254f..cba32ca73c 100644
--- a/thirdparty/embree/kernels/builders/bvh_builder_morton.h
+++ b/thirdparty/embree/kernels/builders/bvh_builder_morton.h
@@ -411,7 +411,7 @@ namespace embree
ReductionTy bounds[MAX_BRANCHING_FACTOR];
if (current.size() > singleThreadThreshold)
{
- /*! parallel_for is faster than spawing sub-tasks */
+ /*! parallel_for is faster than spawning sub-tasks */
parallel_for(size_t(0), numChildren, [&] (const range<size_t>& r) {
for (size_t i=r.begin(); i<r.end(); i++) {
bounds[i] = recurse(depth+1,children[i],nullptr,true);
diff --git a/thirdparty/embree/kernels/builders/bvh_builder_msmblur.h b/thirdparty/embree/kernels/builders/bvh_builder_msmblur.h
index f9a08d65cd..6e73c0d250 100644
--- a/thirdparty/embree/kernels/builders/bvh_builder_msmblur.h
+++ b/thirdparty/embree/kernels/builders/bvh_builder_msmblur.h
@@ -374,7 +374,7 @@ namespace embree
const size_t begin = set.begin();
const size_t end = set.end();
- const size_t center = (begin + end)/2;
+ const size_t center = (begin + end + 1) / 2;
PrimInfoMB linfo = empty;
for (size_t i=begin; i<center; i++)
@@ -594,7 +594,7 @@ namespace embree
/* spawn tasks */
if (unlikely(current.size() > cfg.singleThreadThreshold))
{
- /*! parallel_for is faster than spawing sub-tasks */
+ /*! parallel_for is faster than spawning sub-tasks */
parallel_for(size_t(0), children.size(), [&] (const range<size_t>& r) {
for (size_t i=r.begin(); i<r.end(); i++) {
values[i] = recurse(children[i],nullptr,true);
diff --git a/thirdparty/embree/kernels/builders/bvh_builder_sah.h b/thirdparty/embree/kernels/builders/bvh_builder_sah.h
index fff4bf2a35..24c5faf8be 100644
--- a/thirdparty/embree/kernels/builders/bvh_builder_sah.h
+++ b/thirdparty/embree/kernels/builders/bvh_builder_sah.h
@@ -298,7 +298,7 @@ namespace embree
/* spawn tasks */
if (current.size() > cfg.singleThreadThreshold)
{
- /*! parallel_for is faster than spawing sub-tasks */
+ /*! parallel_for is faster than spawning sub-tasks */
parallel_for(size_t(0), numChildren, [&] (const range<size_t>& r) { // FIXME: no range here
for (size_t i=r.begin(); i<r.end(); i++) {
values[i] = recurse(children[i],nullptr,true);
diff --git a/thirdparty/embree/kernels/builders/heuristic_binning.h b/thirdparty/embree/kernels/builders/heuristic_binning.h
index ee29d09ac9..41be6183b8 100644
--- a/thirdparty/embree/kernels/builders/heuristic_binning.h
+++ b/thirdparty/embree/kernels/builders/heuristic_binning.h
@@ -57,14 +57,12 @@ namespace embree
__forceinline Vec3ia bin(const Vec3fa& p) const
{
const vint4 i = floori((vfloat4(p)-ofs)*scale);
-#if 1
assert(i[0] >= 0 && (size_t)i[0] < num);
assert(i[1] >= 0 && (size_t)i[1] < num);
assert(i[2] >= 0 && (size_t)i[2] < num);
- return Vec3ia(i);
-#else
+
+ // we clamp to handle corner cases that could calculate out of bounds bin
return Vec3ia(clamp(i,vint4(0),vint4(num-1)));
-#endif
}
/*! faster but unsafe binning */
diff --git a/thirdparty/embree/kernels/builders/heuristic_openmerge_array.h b/thirdparty/embree/kernels/builders/heuristic_openmerge_array.h
index 4249d16ea1..354e283557 100644
--- a/thirdparty/embree/kernels/builders/heuristic_openmerge_array.h
+++ b/thirdparty/embree/kernels/builders/heuristic_openmerge_array.h
@@ -275,7 +275,7 @@ namespace embree
openNodesBasedOnExtend(set);
#endif
- /* disable opening when unsufficient space for opening a node available */
+ /* disable opening when insufficient space for opening a node available */
if (set.ext_range_size() < max_open_size-1)
set.set_ext_range(set.end()); /* disable opening */
}
diff --git a/thirdparty/embree/kernels/builders/heuristic_spatial.h b/thirdparty/embree/kernels/builders/heuristic_spatial.h
index a6939ba258..8b3499ac8d 100644
--- a/thirdparty/embree/kernels/builders/heuristic_spatial.h
+++ b/thirdparty/embree/kernels/builders/heuristic_spatial.h
@@ -159,27 +159,25 @@ namespace embree
assert(binID < BINS);
bounds [binID][dim].extend(b);
}
-
- /*! bins an array of triangles */
- template<typename SplitPrimitive>
- __forceinline void bin(const SplitPrimitive& splitPrimitive, const PrimRef* prims, size_t N, const SpatialBinMapping<BINS>& mapping)
+
+ /*! bins an array of primitives */
+ template<typename PrimitiveSplitterFactory>
+ __forceinline void bin2(const PrimitiveSplitterFactory& splitterFactory, const PrimRef* source, size_t begin, size_t end, const SpatialBinMapping<BINS>& mapping)
{
- for (size_t i=0; i<N; i++)
+ for (size_t i=begin; i<end; i++)
{
- const PrimRef prim = prims[i];
+ const PrimRef& prim = source[i];
unsigned splits = prim.geomID() >> (32-RESERVED_NUM_SPATIAL_SPLITS_GEOMID_BITS);
-
- if (unlikely(splits == 1))
+
+ if (unlikely(splits <= 1))
{
const vint4 bin = mapping.bin(center(prim.bounds()));
for (size_t dim=0; dim<3; dim++)
{
assert(bin[dim] >= (int)0 && bin[dim] < (int)BINS);
- numBegin[bin[dim]][dim]++;
- numEnd [bin[dim]][dim]++;
- bounds [bin[dim]][dim].extend(prim.bounds());
+ add(dim,bin[dim],bin[dim],bin[dim],prim.bounds());
}
- }
+ }
else
{
const vint4 bin0 = mapping.bin(prim.bounds().lower);
@@ -187,89 +185,44 @@ namespace embree
for (size_t dim=0; dim<3; dim++)
{
+ if (unlikely(mapping.invalid(dim)))
+ continue;
+
size_t bin;
- PrimRef rest = prim;
size_t l = bin0[dim];
size_t r = bin1[dim];
-
+
// same bin optimization
if (likely(l == r))
{
- numBegin[l][dim]++;
- numEnd [l][dim]++;
- bounds [l][dim].extend(prim.bounds());
+ add(dim,l,l,l,prim.bounds());
continue;
}
-
- for (bin=(size_t)bin0[dim]; bin<(size_t)bin1[dim]; bin++)
+ size_t bin_start = bin0[dim];
+ size_t bin_end = bin1[dim];
+ BBox3fa rest = prim.bounds();
+
+ /* assure that split position always overlaps the primitive bounds */
+ while (bin_start < bin_end && mapping.pos(bin_start+1,dim) <= rest.lower[dim]) bin_start++;
+ while (bin_start < bin_end && mapping.pos(bin_end ,dim) >= rest.upper[dim]) bin_end--;
+
+ const auto splitter = splitterFactory(prim);
+ for (bin=bin_start; bin<bin_end; bin++)
{
const float pos = mapping.pos(bin+1,dim);
+ BBox3fa left,right;
+ splitter(rest,dim,pos,left,right);
- PrimRef left,right;
- splitPrimitive(rest,(int)dim,pos,left,right);
- if (unlikely(left.bounds().empty())) l++;
- bounds[bin][dim].extend(left.bounds());
+ if (unlikely(left.empty())) l++;
+ extend(dim,bin,left);
rest = right;
}
- if (unlikely(rest.bounds().empty())) r--;
- numBegin[l][dim]++;
- numEnd [r][dim]++;
- bounds [bin][dim].extend(rest.bounds());
+ if (unlikely(rest.empty())) r--;
+ add(dim,l,r,bin,rest);
}
- }
+ }
}
}
-
- /*! bins a range of primitives inside an array */
- template<typename SplitPrimitive>
- void bin(const SplitPrimitive& splitPrimitive, const PrimRef* prims, size_t begin, size_t end, const SpatialBinMapping<BINS>& mapping) {
- bin(splitPrimitive,prims+begin,end-begin,mapping);
- }
-
- /*! bins an array of primitives */
- template<typename PrimitiveSplitterFactory>
- __forceinline void bin2(const PrimitiveSplitterFactory& splitterFactory, const PrimRef* source, size_t begin, size_t end, const SpatialBinMapping<BINS>& mapping)
- {
- for (size_t i=begin; i<end; i++)
- {
- const PrimRef &prim = source[i];
- const vint4 bin0 = mapping.bin(prim.bounds().lower);
- const vint4 bin1 = mapping.bin(prim.bounds().upper);
-
- for (size_t dim=0; dim<3; dim++)
- {
- if (unlikely(mapping.invalid(dim)))
- continue;
-
- size_t bin;
- size_t l = bin0[dim];
- size_t r = bin1[dim];
-
- // same bin optimization
- if (likely(l == r))
- {
- add(dim,l,l,l,prim.bounds());
- continue;
- }
- const size_t bin_start = bin0[dim];
- const size_t bin_end = bin1[dim];
- BBox3fa rest = prim.bounds();
- const auto splitter = splitterFactory(prim);
- for (bin=bin_start; bin<bin_end; bin++)
- {
- const float pos = mapping.pos(bin+1,dim);
- BBox3fa left,right;
- splitter(rest,dim,pos,left,right);
- if (unlikely(left.empty())) l++;
- extend(dim,bin,left);
- rest = right;
- }
- if (unlikely(rest.empty())) r--;
- add(dim,l,r,bin,rest);
- }
- }
- }
-
/*! bins an array of primitives */
diff --git a/thirdparty/embree/kernels/builders/heuristic_spatial_array.h b/thirdparty/embree/kernels/builders/heuristic_spatial_array.h
index 60d235f48d..2584c19bda 100644
--- a/thirdparty/embree/kernels/builders/heuristic_spatial_array.h
+++ b/thirdparty/embree/kernels/builders/heuristic_spatial_array.h
@@ -241,7 +241,7 @@ namespace embree
SpatialBinner binner(empty);
const SpatialBinMapping<SPATIAL_BINS> mapping(set);
binner.bin2(splitterFactory,prims0,set.begin(),set.end(),mapping);
- /* todo: best spatial split not exeeding the extended range does not provide any benefit ?*/
+ /* todo: best spatial split not exceeding the extended range does not provide any benefit ?*/
return binner.best(mapping,logBlockSize); //,set.ext_size());
}
@@ -256,7 +256,7 @@ namespace embree
binner.bin2(splitterFactory,prims0,r.begin(),r.end(),_mapping);
return binner; },
[&] (const SpatialBinner& b0, const SpatialBinner& b1) -> SpatialBinner { return SpatialBinner::reduce(b0,b1); });
- /* todo: best spatial split not exeeding the extended range does not provide any benefit ?*/
+ /* todo: best spatial split not exceeding the extended range does not provide any benefit ?*/
return binner.best(mapping,logBlockSize); //,set.ext_size());
}
@@ -286,6 +286,7 @@ namespace embree
//int bin0 = split.mapping.bin(prims0[i].lower)[split.dim];
//int bin1 = split.mapping.bin(prims0[i].upper)[split.dim];
//if (unlikely(bin0 < split.pos && bin1 >= split.pos))
+
if (unlikely(prims0[i].lower[split.dim] < fpos && prims0[i].upper[split.dim] > fpos))
{
assert(splits > 1);
@@ -384,8 +385,8 @@ namespace embree
new (&lset) PrimInfoExtRange(begin,center,center,local_left);
new (&rset) PrimInfoExtRange(center,end,end,local_right);
- assert(area(lset.geomBounds) >= 0.0f);
- assert(area(rset.geomBounds) >= 0.0f);
+ assert(!lset.geomBounds.empty() && area(lset.geomBounds) >= 0.0f);
+ assert(!rset.geomBounds.empty() && area(rset.geomBounds) >= 0.0f);
return std::pair<size_t,size_t>(left_weight,right_weight);
}
@@ -410,7 +411,7 @@ namespace embree
begin,end,local_left,local_right,
[&] (const PrimRef& ref) {
const Vec3fa c = ref.bounds().center();
- return any(((vint4)mapping.bin(c) < vSplitPos) & vSplitMask);
+ return any(((vint4)mapping.bin(c) < vSplitPos) & vSplitMask);
},
[] (PrimInfo& pinfo,const PrimRef& ref) { pinfo.add_center2(ref,ref.lower.u >> (32-RESERVED_NUM_SPATIAL_SPLITS_GEOMID_BITS)); });
@@ -419,8 +420,8 @@ namespace embree
new (&lset) PrimInfoExtRange(begin,center,center,local_left);
new (&rset) PrimInfoExtRange(center,end,end,local_right);
- assert(area(lset.geomBounds) >= 0.0f);
- assert(area(rset.geomBounds) >= 0.0f);
+ assert(!lset.geomBounds.empty() && area(lset.geomBounds) >= 0.0f);
+ assert(!rset.geomBounds.empty() && area(rset.geomBounds) >= 0.0f);
return std::pair<size_t,size_t>(left_weight,right_weight);
}
diff --git a/thirdparty/embree/kernels/builders/primrefgen.cpp b/thirdparty/embree/kernels/builders/primrefgen.cpp
index d279dc4993..e2d7c27bd8 100644
--- a/thirdparty/embree/kernels/builders/primrefgen.cpp
+++ b/thirdparty/embree/kernels/builders/primrefgen.cpp
@@ -184,9 +184,7 @@ namespace embree
// special variants for grid meshes
-// -- GODOT start --
#if defined(EMBREE_GEOMETRY_GRID)
-// -- GODOT end --
PrimInfo createPrimRefArrayGrids(Scene* scene, mvector<PrimRef>& prims, mvector<SubGridBuildData>& sgrids)
{
PrimInfo pinfo(empty);
@@ -296,9 +294,7 @@ namespace embree
return pinfo;
}
-// -- GODOT start --
#endif
-// -- GODOT end --
// ====================================================================================================
// ====================================================================================================
diff --git a/thirdparty/embree/kernels/builders/primrefgen_presplit.h b/thirdparty/embree/kernels/builders/primrefgen_presplit.h
index 8cd251ddd2..aa2026a85e 100644
--- a/thirdparty/embree/kernels/builders/primrefgen_presplit.h
+++ b/thirdparty/embree/kernels/builders/primrefgen_presplit.h
@@ -266,7 +266,7 @@ namespace embree
/* anything to split ? */
if (center < numPrimitives)
{
- const size_t numPrimitivesToSplit = numPrimitives - center;
+ size_t numPrimitivesToSplit = numPrimitives - center;
assert(presplitItem[center].priority >= 1.0f);
/* sort presplit items in ascending order */
@@ -279,8 +279,8 @@ namespace embree
});
);
- unsigned int *const primOffset0 = (unsigned int*)tmp_presplitItem;
- unsigned int *const primOffset1 = (unsigned int*)tmp_presplitItem + numPrimitivesToSplit;
+ unsigned int* primOffset0 = (unsigned int*)tmp_presplitItem;
+ unsigned int* primOffset1 = (unsigned int*)tmp_presplitItem + numPrimitivesToSplit;
/* compute actual number of sub-primitives generated within the [center;numPrimitives-1] range */
const size_t totalNumSubPrims = parallel_reduce( size_t(center), numPrimitives, size_t(MIN_STEP_SIZE), size_t(0), [&](const range<size_t>& t) -> size_t {
@@ -317,11 +317,16 @@ namespace embree
sum += numSubPrims;
}
new_center++;
+
+ primOffset0 += new_center - center;
+ numPrimitivesToSplit -= new_center - center;
center = new_center;
+ assert(numPrimitivesToSplit == (numPrimitives - center));
}
/* parallel prefix sum to compute offsets for storing sub-primitives */
const unsigned int offset = parallel_prefix_sum(primOffset0,primOffset1,numPrimitivesToSplit,(unsigned int)0,std::plus<unsigned int>());
+ assert(numPrimitives+offset <= alloc_numPrimitives);
/* iterate over range, and split primitives into sub primitives and append them to prims array */
parallel_for( size_t(center), numPrimitives, size_t(MIN_STEP_SIZE), [&](const range<size_t>& rn) -> void {
@@ -338,7 +343,7 @@ namespace embree
unsigned int numSubPrims = 0;
splitPrimitive(Splitter,prims[primrefID],geomID,primID,split_levels,grid_base,grid_scale,grid_extend,subPrims,numSubPrims);
const size_t newID = numPrimitives + primOffset1[j-center];
- assert(newID+numSubPrims <= alloc_numPrimitives);
+ assert(newID+numSubPrims-1 <= alloc_numPrimitives);
prims[primrefID] = subPrims[0];
for (size_t i=1;i<numSubPrims;i++)
prims[newID+i-1] = subPrims[i];
diff --git a/thirdparty/embree/kernels/builders/splitter.h b/thirdparty/embree/kernels/builders/splitter.h
index f7720bd284..da89d0b178 100644
--- a/thirdparty/embree/kernels/builders/splitter.h
+++ b/thirdparty/embree/kernels/builders/splitter.h
@@ -128,28 +128,30 @@ namespace embree
const unsigned int mask = 0xFFFFFFFF >> RESERVED_NUM_SPATIAL_SPLITS_GEOMID_BITS;
const QuadMesh* mesh = (const QuadMesh*) scene->get(prim.geomID() & mask );
QuadMesh::Quad quad = mesh->quad(prim.primID());
- v[0] = mesh->vertex(quad.v[0]);
- v[1] = mesh->vertex(quad.v[1]);
- v[2] = mesh->vertex(quad.v[2]);
- v[3] = mesh->vertex(quad.v[3]);
- v[4] = mesh->vertex(quad.v[0]);
- inv_length[0] = Vec3fa(1.0f) / (v[1]-v[0]);
- inv_length[1] = Vec3fa(1.0f) / (v[2]-v[1]);
- inv_length[2] = Vec3fa(1.0f) / (v[3]-v[2]);
- inv_length[3] = Vec3fa(1.0f) / (v[0]-v[3]);
+ v[0] = mesh->vertex(quad.v[1]);
+ v[1] = mesh->vertex(quad.v[2]);
+ v[2] = mesh->vertex(quad.v[3]);
+ v[3] = mesh->vertex(quad.v[0]);
+ v[4] = mesh->vertex(quad.v[1]);
+ v[5] = mesh->vertex(quad.v[3]);
+ inv_length[0] = Vec3fa(1.0f) / (v[1] - v[0]);
+ inv_length[1] = Vec3fa(1.0f) / (v[2] - v[1]);
+ inv_length[2] = Vec3fa(1.0f) / (v[3] - v[2]);
+ inv_length[3] = Vec3fa(1.0f) / (v[4] - v[3]);
+ inv_length[4] = Vec3fa(1.0f) / (v[5] - v[4]);
}
__forceinline void operator() (const PrimRef& prim, const size_t dim, const float pos, PrimRef& left_o, PrimRef& right_o) const {
- splitPolygon<4>(prim,dim,pos,v,left_o,right_o);
+ splitPolygon<5>(prim,dim,pos,v,left_o,right_o);
}
__forceinline void operator() (const BBox3fa& prim, const size_t dim, const float pos, BBox3fa& left_o, BBox3fa& right_o) const {
- splitPolygon<4>(prim,dim,pos,v,inv_length,left_o,right_o);
+ splitPolygon<5>(prim,dim,pos,v,inv_length,left_o,right_o);
}
private:
- Vec3fa v[5];
- Vec3fa inv_length[4];
+ Vec3fa v[6];
+ Vec3fa inv_length[5];
};
struct QuadSplitterFactory
diff --git a/thirdparty/embree/kernels/bvh/bvh.cpp b/thirdparty/embree/kernels/bvh/bvh.cpp
index a84295f0da..f6cf626465 100644
--- a/thirdparty/embree/kernels/bvh/bvh.cpp
+++ b/thirdparty/embree/kernels/bvh/bvh.cpp
@@ -183,7 +183,7 @@ namespace embree
template class BVHN<8>;
#endif
-#if !defined(__AVX__) || !defined(EMBREE_TARGET_SSE2) && !defined(EMBREE_TARGET_SSE42)
+#if !defined(__AVX__) || !defined(EMBREE_TARGET_SSE2) && !defined(EMBREE_TARGET_SSE42) || defined(__aarch64__)
template class BVHN<4>;
#endif
}
diff --git a/thirdparty/embree/kernels/bvh/bvh_intersector_hybrid.cpp b/thirdparty/embree/kernels/bvh/bvh_intersector_hybrid.cpp
index 6e9a5a538e..1d393fd06b 100644
--- a/thirdparty/embree/kernels/bvh/bvh_intersector_hybrid.cpp
+++ b/thirdparty/embree/kernels/bvh/bvh_intersector_hybrid.cpp
@@ -230,7 +230,7 @@ namespace embree
continue;
/* switch to single ray traversal */
-#if (!defined(__WIN32__) || defined(__X86_64__)) && defined(__SSE4_2__)
+#if (!defined(__WIN32__) || defined(__X86_64__)) && ((defined(__aarch64__)) || defined(__SSE4_2__))
#if FORCE_SINGLE_MODE == 0
if (single)
#endif
@@ -676,7 +676,7 @@ namespace embree
continue;
/* switch to single ray traversal */
-#if (!defined(__WIN32__) || defined(__X86_64__)) && defined(__SSE4_2__)
+#if (!defined(__WIN32__) || defined(__X86_64__)) && ((defined(__aarch64__)) || defined(__SSE4_2__))
#if FORCE_SINGLE_MODE == 0
if (single)
#endif
diff --git a/thirdparty/embree/kernels/bvh/bvh_intersector_stream.h b/thirdparty/embree/kernels/bvh/bvh_intersector_stream.h
index 717f559677..c7e040fadb 100644
--- a/thirdparty/embree/kernels/bvh/bvh_intersector_stream.h
+++ b/thirdparty/embree/kernels/bvh/bvh_intersector_stream.h
@@ -170,12 +170,23 @@ namespace embree
TravRayKStream<K,robust> &p = packets[rayID / K];
const size_t i = rayID % K;
const vint<N> bitmask(shiftTable[rayID]);
+
+#if defined (__aarch64__)
+ const vfloat<N> tNearX = madd(bminX, p.rdir.x[i], p.neg_org_rdir.x[i]);
+ const vfloat<N> tNearY = madd(bminY, p.rdir.y[i], p.neg_org_rdir.y[i]);
+ const vfloat<N> tNearZ = madd(bminZ, p.rdir.z[i], p.neg_org_rdir.z[i]);
+ const vfloat<N> tFarX = madd(bmaxX, p.rdir.x[i], p.neg_org_rdir.x[i]);
+ const vfloat<N> tFarY = madd(bmaxY, p.rdir.y[i], p.neg_org_rdir.y[i]);
+ const vfloat<N> tFarZ = madd(bmaxZ, p.rdir.z[i], p.neg_org_rdir.z[i]);
+#else
const vfloat<N> tNearX = msub(bminX, p.rdir.x[i], p.org_rdir.x[i]);
const vfloat<N> tNearY = msub(bminY, p.rdir.y[i], p.org_rdir.y[i]);
const vfloat<N> tNearZ = msub(bminZ, p.rdir.z[i], p.org_rdir.z[i]);
const vfloat<N> tFarX = msub(bmaxX, p.rdir.x[i], p.org_rdir.x[i]);
const vfloat<N> tFarY = msub(bmaxY, p.rdir.y[i], p.org_rdir.y[i]);
const vfloat<N> tFarZ = msub(bmaxZ, p.rdir.z[i], p.org_rdir.z[i]);
+#endif
+
const vfloat<N> tNear = maxi(tNearX, tNearY, tNearZ, vfloat<N>(p.tnear[i]));
const vfloat<N> tFar = mini(tFarX , tFarY , tFarZ, vfloat<N>(p.tfar[i]));
diff --git a/thirdparty/embree/kernels/bvh/bvh_node_aabb.h b/thirdparty/embree/kernels/bvh/bvh_node_aabb.h
index 57530692bc..3fd9fc7d18 100644
--- a/thirdparty/embree/kernels/bvh/bvh_node_aabb.h
+++ b/thirdparty/embree/kernels/bvh/bvh_node_aabb.h
@@ -46,6 +46,14 @@ namespace embree
template<typename BuildRecord>
__forceinline NodeRef operator() (const BuildRecord& precord, const BuildRecord* crecords, NodeRef ref, NodeRef* children, const size_t num) const
{
+#if defined(DEBUG)
+ // check that empty children are only at the end of the child list
+ bool emptyChild = false;
+ for (size_t i=0; i<num; i++) {
+ emptyChild |= (children[i] == NodeRef::emptyNode);
+ assert(emptyChild == (children[i] == NodeRef::emptyNode));
+ }
+#endif
AABBNode_t* node = ref.getAABBNode();
for (size_t i=0; i<num; i++) node->setRef(i,children[i]);
return ref;
@@ -60,6 +68,14 @@ namespace embree
template<typename BuildRecord>
__forceinline NodeRef operator() (const BuildRecord& precord, const BuildRecord* crecords, NodeRef ref, NodeRef* children, const size_t num) const
{
+#if defined(DEBUG)
+ // check that empty children are only at the end of the child list
+ bool emptyChild = false;
+ for (size_t i=0; i<num; i++) {
+ emptyChild |= (children[i] == NodeRef::emptyNode);
+ assert(emptyChild == (children[i] == NodeRef::emptyNode));
+ }
+#endif
AABBNode_t* node = ref.getAABBNode();
for (size_t i=0; i<num; i++) node->setRef(i,children[i]);
diff --git a/thirdparty/embree/kernels/bvh/bvh_node_aabb_mb.h b/thirdparty/embree/kernels/bvh/bvh_node_aabb_mb.h
index c4cea7d8ba..001f526c25 100644
--- a/thirdparty/embree/kernels/bvh/bvh_node_aabb_mb.h
+++ b/thirdparty/embree/kernels/bvh/bvh_node_aabb_mb.h
@@ -31,6 +31,14 @@ namespace embree
template<typename BuildRecord>
__forceinline NodeRecordMB operator() (const BuildRecord& precord, const BuildRecord* crecords, NodeRef ref, NodeRecordMB* children, const size_t num) const
{
+#if defined(DEBUG)
+ // check that empty children are only at the end of the child list
+ bool emptyChild = false;
+ for (size_t i=0; i<num; i++) {
+ emptyChild |= (children[i].ref == NodeRef::emptyNode);
+ assert(emptyChild == (children[i].ref == NodeRef::emptyNode));
+ }
+#endif
AABBNodeMB_t* node = ref.getAABBNodeMB();
LBBox3fa bounds = empty;
diff --git a/thirdparty/embree/kernels/bvh/bvh_node_aabb_mb4d.h b/thirdparty/embree/kernels/bvh/bvh_node_aabb_mb4d.h
index 46a81d7581..3b966fd054 100644
--- a/thirdparty/embree/kernels/bvh/bvh_node_aabb_mb4d.h
+++ b/thirdparty/embree/kernels/bvh/bvh_node_aabb_mb4d.h
@@ -41,6 +41,14 @@ namespace embree
template<typename BuildRecord>
__forceinline void operator() (const BuildRecord&, const BuildRecord*, NodeRef ref, NodeRecordMB4D* children, const size_t num) const
{
+#if defined(DEBUG)
+ // check that empty children are only at the end of the child list
+ bool emptyChild = false;
+ for (size_t i=0; i<num; i++) {
+ emptyChild |= (children[i].ref == NodeRef::emptyNode);
+ assert(emptyChild == (children[i].ref == NodeRef::emptyNode));
+ }
+#endif
if (likely(ref.isAABBNodeMB())) {
for (size_t i=0; i<num; i++)
ref.getAABBNodeMB()->set(i, children[i]);
diff --git a/thirdparty/embree/kernels/bvh/bvh_node_qaabb.h b/thirdparty/embree/kernels/bvh/bvh_node_qaabb.h
index 2afc8c98e7..99671ddc5a 100644
--- a/thirdparty/embree/kernels/bvh/bvh_node_qaabb.h
+++ b/thirdparty/embree/kernels/bvh/bvh_node_qaabb.h
@@ -190,6 +190,14 @@ namespace embree
template<typename BuildRecord>
__forceinline NodeRef operator() (const BuildRecord& precord, const BuildRecord* crecords, NodeRef ref, NodeRef* children, const size_t num) const
{
+#if defined(DEBUG)
+ // check that empty children are only at the end of the child list
+ bool emptyChild = false;
+ for (size_t i=0; i<num; i++) {
+ emptyChild |= (children[i] == NodeRef::emptyNode);
+ assert(emptyChild == (children[i] == NodeRef::emptyNode));
+ }
+#endif
QuantizedNode_t* node = ref.quantizedNode();
for (size_t i=0; i<num; i++) node->setRef(i,children[i]);
return ref;
diff --git a/thirdparty/embree/kernels/bvh/bvh_statistics.cpp b/thirdparty/embree/kernels/bvh/bvh_statistics.cpp
index d857ff7d95..57f75bfd7e 100644
--- a/thirdparty/embree/kernels/bvh/bvh_statistics.cpp
+++ b/thirdparty/embree/kernels/bvh/bvh_statistics.cpp
@@ -162,7 +162,7 @@ namespace embree
template class BVHNStatistics<8>;
#endif
-#if !defined(__AVX__) || !defined(EMBREE_TARGET_SSE2) && !defined(EMBREE_TARGET_SSE42)
+#if !defined(__AVX__) || (!defined(EMBREE_TARGET_SSE2) && !defined(EMBREE_TARGET_SSE42)) || defined(__aarch64__)
template class BVHNStatistics<4>;
#endif
}
diff --git a/thirdparty/embree/kernels/bvh/node_intersector1.h b/thirdparty/embree/kernels/bvh/node_intersector1.h
index 1ec4fc63fc..17641fa888 100644
--- a/thirdparty/embree/kernels/bvh/node_intersector1.h
+++ b/thirdparty/embree/kernels/bvh/node_intersector1.h
@@ -5,6 +5,15 @@
#include "node_intersector.h"
+#if defined(__AVX2__)
+#define __FMA_X4__
+#endif
+
+#if defined(__aarch64__)
+#define __FMA_X4__
+#endif
+
+
namespace embree
{
namespace isa
@@ -29,9 +38,15 @@ namespace embree
org = Vec3vf<N>(ray_org.x,ray_org.y,ray_org.z);
dir = Vec3vf<N>(ray_dir.x,ray_dir.y,ray_dir.z);
rdir = Vec3vf<N>(ray_rdir.x,ray_rdir.y,ray_rdir.z);
-#if defined(__AVX2__) || defined(__ARM_NEON)
+#if defined(__FMA_X4__)
const Vec3fa ray_org_rdir = ray_org*ray_rdir;
+#if !defined(__aarch64__)
org_rdir = Vec3vf<N>(ray_org_rdir.x,ray_org_rdir.y,ray_org_rdir.z);
+#else
+ //for aarch64, we do not have msub equal instruction, so we negeate orig and use madd
+ //x86 will use msub
+ neg_org_rdir = Vec3vf<N>(-ray_org_rdir.x,-ray_org_rdir.y,-ray_org_rdir.z);
+#endif
#endif
nearX = ray_rdir.x >= 0.0f ? 0*sizeof(vfloat<N>) : 1*sizeof(vfloat<N>);
nearY = ray_rdir.y >= 0.0f ? 2*sizeof(vfloat<N>) : 3*sizeof(vfloat<N>);
@@ -49,8 +64,12 @@ namespace embree
org = Vec3vf<N>(ray_org.x[k], ray_org.y[k], ray_org.z[k]);
dir = Vec3vf<N>(ray_dir.x[k], ray_dir.y[k], ray_dir.z[k]);
rdir = Vec3vf<N>(ray_rdir.x[k], ray_rdir.y[k], ray_rdir.z[k]);
-#if defined(__AVX2__) || defined(__ARM_NEON)
- org_rdir = org*rdir;
+#if defined(__FMA_X4__)
+#if !defined(__aarch64__)
+ org_rdir = org*rdir;
+#else
+ neg_org_rdir = -(org*rdir);
+#endif
#endif
nearX = nearXYZ.x[k];
nearY = nearXYZ.y[k];
@@ -62,8 +81,14 @@ namespace embree
Vec3fa org_xyz, dir_xyz;
Vec3vf<N> org, dir, rdir;
-#if defined(__AVX2__) || defined(__ARM_NEON)
+#if defined(__FMA_X4__)
+#if !defined(__aarch64__)
Vec3vf<N> org_rdir;
+#else
+ //aarch64 version are keeping negation of the org_rdir and use madd
+ //x86 uses msub
+ Vec3vf<N> neg_org_rdir;
+#endif
#endif
size_t nearX, nearY, nearZ;
size_t farX, farY, farZ;
@@ -404,13 +429,22 @@ namespace embree
template<>
__forceinline size_t intersectNode<4>(const typename BVH4::AABBNode* node, const TravRay<4,false>& ray, vfloat4& dist)
{
-#if defined(__AVX2__) || defined(__ARM_NEON)
+#if defined(__FMA_X4__)
+#if defined(__aarch64__)
+ const vfloat4 tNearX = madd(vfloat4::load((float*)((const char*)&node->lower_x+ray.nearX)), ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat4 tNearY = madd(vfloat4::load((float*)((const char*)&node->lower_x+ray.nearY)), ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat4 tNearZ = madd(vfloat4::load((float*)((const char*)&node->lower_x+ray.nearZ)), ray.rdir.z, ray.neg_org_rdir.z);
+ const vfloat4 tFarX = madd(vfloat4::load((float*)((const char*)&node->lower_x+ray.farX )), ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat4 tFarY = madd(vfloat4::load((float*)((const char*)&node->lower_x+ray.farY )), ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat4 tFarZ = madd(vfloat4::load((float*)((const char*)&node->lower_x+ray.farZ )), ray.rdir.z, ray.neg_org_rdir.z);
+#else
const vfloat4 tNearX = msub(vfloat4::load((float*)((const char*)&node->lower_x+ray.nearX)), ray.rdir.x, ray.org_rdir.x);
const vfloat4 tNearY = msub(vfloat4::load((float*)((const char*)&node->lower_x+ray.nearY)), ray.rdir.y, ray.org_rdir.y);
const vfloat4 tNearZ = msub(vfloat4::load((float*)((const char*)&node->lower_x+ray.nearZ)), ray.rdir.z, ray.org_rdir.z);
const vfloat4 tFarX = msub(vfloat4::load((float*)((const char*)&node->lower_x+ray.farX )), ray.rdir.x, ray.org_rdir.x);
const vfloat4 tFarY = msub(vfloat4::load((float*)((const char*)&node->lower_x+ray.farY )), ray.rdir.y, ray.org_rdir.y);
const vfloat4 tFarZ = msub(vfloat4::load((float*)((const char*)&node->lower_x+ray.farZ )), ray.rdir.z, ray.org_rdir.z);
+#endif
#else
const vfloat4 tNearX = (vfloat4::load((float*)((const char*)&node->lower_x+ray.nearX)) - ray.org.x) * ray.rdir.x;
const vfloat4 tNearY = (vfloat4::load((float*)((const char*)&node->lower_x+ray.nearY)) - ray.org.y) * ray.rdir.y;
@@ -450,13 +484,23 @@ namespace embree
template<>
__forceinline size_t intersectNode<8>(const typename BVH8::AABBNode* node, const TravRay<8,false>& ray, vfloat8& dist)
{
-#if defined(__AVX2__) || defined(__ARM_NEON)
+#if defined(__AVX2__)
+#if defined(__aarch64__)
+ const vfloat8 tNearX = madd(vfloat8::load((float*)((const char*)&node->lower_x+ray.nearX)), ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat8 tNearY = madd(vfloat8::load((float*)((const char*)&node->lower_x+ray.nearY)), ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat8 tNearZ = madd(vfloat8::load((float*)((const char*)&node->lower_x+ray.nearZ)), ray.rdir.z, ray.neg_org_rdir.z);
+ const vfloat8 tFarX = madd(vfloat8::load((float*)((const char*)&node->lower_x+ray.farX )), ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat8 tFarY = madd(vfloat8::load((float*)((const char*)&node->lower_x+ray.farY )), ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat8 tFarZ = madd(vfloat8::load((float*)((const char*)&node->lower_x+ray.farZ )), ray.rdir.z, ray.neg_org_rdir.z);
+#else
const vfloat8 tNearX = msub(vfloat8::load((float*)((const char*)&node->lower_x+ray.nearX)), ray.rdir.x, ray.org_rdir.x);
const vfloat8 tNearY = msub(vfloat8::load((float*)((const char*)&node->lower_x+ray.nearY)), ray.rdir.y, ray.org_rdir.y);
const vfloat8 tNearZ = msub(vfloat8::load((float*)((const char*)&node->lower_x+ray.nearZ)), ray.rdir.z, ray.org_rdir.z);
const vfloat8 tFarX = msub(vfloat8::load((float*)((const char*)&node->lower_x+ray.farX )), ray.rdir.x, ray.org_rdir.x);
const vfloat8 tFarY = msub(vfloat8::load((float*)((const char*)&node->lower_x+ray.farY )), ray.rdir.y, ray.org_rdir.y);
const vfloat8 tFarZ = msub(vfloat8::load((float*)((const char*)&node->lower_x+ray.farZ )), ray.rdir.z, ray.org_rdir.z);
+#endif
+
#else
const vfloat8 tNearX = (vfloat8::load((float*)((const char*)&node->lower_x+ray.nearX)) - ray.org.x) * ray.rdir.x;
const vfloat8 tNearY = (vfloat8::load((float*)((const char*)&node->lower_x+ray.nearY)) - ray.org.y) * ray.rdir.y;
@@ -522,13 +566,22 @@ namespace embree
const vfloat<N>* pFarX = (const vfloat<N>*)((const char*)&node->lower_x+ray.farX);
const vfloat<N>* pFarY = (const vfloat<N>*)((const char*)&node->lower_x+ray.farY);
const vfloat<N>* pFarZ = (const vfloat<N>*)((const char*)&node->lower_x+ray.farZ);
-#if defined(__AVX2__) || defined(__ARM_NEON)
+#if defined(__FMA_X4__)
+#if defined(__aarch64__)
+ const vfloat<N> tNearX = madd(madd(time,pNearX[6],vfloat<N>(pNearX[0])), ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat<N> tNearY = madd(madd(time,pNearY[6],vfloat<N>(pNearY[0])), ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat<N> tNearZ = madd(madd(time,pNearZ[6],vfloat<N>(pNearZ[0])), ray.rdir.z, ray.neg_org_rdir.z);
+ const vfloat<N> tFarX = madd(madd(time,pFarX [6],vfloat<N>(pFarX [0])), ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat<N> tFarY = madd(madd(time,pFarY [6],vfloat<N>(pFarY [0])), ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat<N> tFarZ = madd(madd(time,pFarZ [6],vfloat<N>(pFarZ [0])), ray.rdir.z, ray.neg_org_rdir.z);
+#else
const vfloat<N> tNearX = msub(madd(time,pNearX[6],vfloat<N>(pNearX[0])), ray.rdir.x, ray.org_rdir.x);
const vfloat<N> tNearY = msub(madd(time,pNearY[6],vfloat<N>(pNearY[0])), ray.rdir.y, ray.org_rdir.y);
const vfloat<N> tNearZ = msub(madd(time,pNearZ[6],vfloat<N>(pNearZ[0])), ray.rdir.z, ray.org_rdir.z);
const vfloat<N> tFarX = msub(madd(time,pFarX [6],vfloat<N>(pFarX [0])), ray.rdir.x, ray.org_rdir.x);
const vfloat<N> tFarY = msub(madd(time,pFarY [6],vfloat<N>(pFarY [0])), ray.rdir.y, ray.org_rdir.y);
const vfloat<N> tFarZ = msub(madd(time,pFarZ [6],vfloat<N>(pFarZ [0])), ray.rdir.z, ray.org_rdir.z);
+#endif
#else
const vfloat<N> tNearX = (madd(time,pNearX[6],vfloat<N>(pNearX[0])) - ray.org.x) * ray.rdir.x;
const vfloat<N> tNearY = (madd(time,pNearY[6],vfloat<N>(pNearY[0])) - ray.org.y) * ray.rdir.y;
@@ -537,7 +590,7 @@ namespace embree
const vfloat<N> tFarY = (madd(time,pFarY [6],vfloat<N>(pFarY [0])) - ray.org.y) * ray.rdir.y;
const vfloat<N> tFarZ = (madd(time,pFarZ [6],vfloat<N>(pFarZ [0])) - ray.org.z) * ray.rdir.z;
#endif
-#if defined(__AVX2__) && !defined(__AVX512F__) // HSW
+#if defined(__FMA_X4__) && !defined(__AVX512F__) // HSW
const vfloat<N> tNear = maxi(tNearX,tNearY,tNearZ,ray.tnear);
const vfloat<N> tFar = mini(tFarX ,tFarY ,tFarZ ,ray.tfar);
const vbool<N> vmask = asInt(tNear) > asInt(tFar);
@@ -598,13 +651,22 @@ namespace embree
const vfloat<N>* pFarX = (const vfloat<N>*)((const char*)&node->lower_x+ray.farX);
const vfloat<N>* pFarY = (const vfloat<N>*)((const char*)&node->lower_x+ray.farY);
const vfloat<N>* pFarZ = (const vfloat<N>*)((const char*)&node->lower_x+ray.farZ);
-#if defined (__AVX2__) || defined(__ARM_NEON)
+#if defined (__FMA_X4__)
+#if defined(__aarch64__)
+ const vfloat<N> tNearX = madd(madd(time,pNearX[6],vfloat<N>(pNearX[0])), ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat<N> tNearY = madd(madd(time,pNearY[6],vfloat<N>(pNearY[0])), ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat<N> tNearZ = madd(madd(time,pNearZ[6],vfloat<N>(pNearZ[0])), ray.rdir.z, ray.neg_org_rdir.z);
+ const vfloat<N> tFarX = madd(madd(time,pFarX [6],vfloat<N>(pFarX [0])), ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat<N> tFarY = madd(madd(time,pFarY [6],vfloat<N>(pFarY [0])), ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat<N> tFarZ = madd(madd(time,pFarZ [6],vfloat<N>(pFarZ [0])), ray.rdir.z, ray.neg_org_rdir.z);
+#else
const vfloat<N> tNearX = msub(madd(time,pNearX[6],vfloat<N>(pNearX[0])), ray.rdir.x, ray.org_rdir.x);
const vfloat<N> tNearY = msub(madd(time,pNearY[6],vfloat<N>(pNearY[0])), ray.rdir.y, ray.org_rdir.y);
const vfloat<N> tNearZ = msub(madd(time,pNearZ[6],vfloat<N>(pNearZ[0])), ray.rdir.z, ray.org_rdir.z);
const vfloat<N> tFarX = msub(madd(time,pFarX [6],vfloat<N>(pFarX [0])), ray.rdir.x, ray.org_rdir.x);
const vfloat<N> tFarY = msub(madd(time,pFarY [6],vfloat<N>(pFarY [0])), ray.rdir.y, ray.org_rdir.y);
const vfloat<N> tFarZ = msub(madd(time,pFarZ [6],vfloat<N>(pFarZ [0])), ray.rdir.z, ray.org_rdir.z);
+#endif
#else
const vfloat<N> tNearX = (madd(time,pNearX[6],vfloat<N>(pNearX[0])) - ray.org.x) * ray.rdir.x;
const vfloat<N> tNearY = (madd(time,pNearY[6],vfloat<N>(pNearY[0])) - ray.org.y) * ray.rdir.y;
@@ -613,7 +675,7 @@ namespace embree
const vfloat<N> tFarY = (madd(time,pFarY [6],vfloat<N>(pFarY [0])) - ray.org.y) * ray.rdir.y;
const vfloat<N> tFarZ = (madd(time,pFarZ [6],vfloat<N>(pFarZ [0])) - ray.org.z) * ray.rdir.z;
#endif
-#if defined(__AVX2__) && !defined(__AVX512F__)
+#if defined(__FMA_X4__) && !defined(__AVX512F__)
const vfloat<N> tNear = maxi(maxi(tNearX,tNearY),maxi(tNearZ,ray.tnear));
const vfloat<N> tFar = mini(mini(tFarX ,tFarY ),mini(tFarZ ,ray.tfar ));
#else
@@ -687,13 +749,22 @@ namespace embree
const vfloat4 lower_z = madd(node->dequantize<4>(ray.nearZ >> 2),scale_z,start_z);
const vfloat4 upper_z = madd(node->dequantize<4>(ray.farZ >> 2),scale_z,start_z);
-#if defined(__AVX2__) || defined(__ARM_NEON)
+#if defined(__FMA_X4__)
+#if defined(__aarch64__)
+ const vfloat4 tNearX = madd(lower_x, ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat4 tNearY = madd(lower_y, ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat4 tNearZ = madd(lower_z, ray.rdir.z, ray.neg_org_rdir.z);
+ const vfloat4 tFarX = madd(upper_x, ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat4 tFarY = madd(upper_y, ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat4 tFarZ = madd(upper_z, ray.rdir.z, ray.neg_org_rdir.z);
+#else
const vfloat4 tNearX = msub(lower_x, ray.rdir.x, ray.org_rdir.x);
const vfloat4 tNearY = msub(lower_y, ray.rdir.y, ray.org_rdir.y);
const vfloat4 tNearZ = msub(lower_z, ray.rdir.z, ray.org_rdir.z);
const vfloat4 tFarX = msub(upper_x, ray.rdir.x, ray.org_rdir.x);
const vfloat4 tFarY = msub(upper_y, ray.rdir.y, ray.org_rdir.y);
const vfloat4 tFarZ = msub(upper_z, ray.rdir.z, ray.org_rdir.z);
+#endif
#else
const vfloat4 tNearX = (lower_x - ray.org.x) * ray.rdir.x;
const vfloat4 tNearY = (lower_y - ray.org.y) * ray.rdir.y;
@@ -703,7 +774,7 @@ namespace embree
const vfloat4 tFarZ = (upper_z - ray.org.z) * ray.rdir.z;
#endif
-#if defined(__SSE4_1__) && !defined(__AVX512F__) // up to HSW
+#if defined(__aarch64__) || defined(__SSE4_1__) && !defined(__AVX512F__) // up to HSW
const vfloat4 tNear = maxi(tNearX,tNearY,tNearZ,ray.tnear);
const vfloat4 tFar = mini(tFarX ,tFarY ,tFarZ ,ray.tfar);
const vbool4 vmask = asInt(tNear) > asInt(tFar);
@@ -775,13 +846,22 @@ namespace embree
const vfloat8 lower_z = madd(node->dequantize<8>(ray.nearZ >> 2),scale_z,start_z);
const vfloat8 upper_z = madd(node->dequantize<8>(ray.farZ >> 2),scale_z,start_z);
-#if defined(__AVX2__) || defined(__ARM_NEON)
+#if defined(__AVX2__)
+#if defined(__aarch64__)
+ const vfloat8 tNearX = madd(lower_x, ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat8 tNearY = madd(lower_y, ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat8 tNearZ = madd(lower_z, ray.rdir.z, ray.neg_org_rdir.z);
+ const vfloat8 tFarX = madd(upper_x, ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat8 tFarY = madd(upper_y, ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat8 tFarZ = madd(upper_z, ray.rdir.z, ray.neg_org_rdir.z);
+#else
const vfloat8 tNearX = msub(lower_x, ray.rdir.x, ray.org_rdir.x);
const vfloat8 tNearY = msub(lower_y, ray.rdir.y, ray.org_rdir.y);
const vfloat8 tNearZ = msub(lower_z, ray.rdir.z, ray.org_rdir.z);
const vfloat8 tFarX = msub(upper_x, ray.rdir.x, ray.org_rdir.x);
const vfloat8 tFarY = msub(upper_y, ray.rdir.y, ray.org_rdir.y);
const vfloat8 tFarZ = msub(upper_z, ray.rdir.z, ray.org_rdir.z);
+#endif
#else
const vfloat8 tNearX = (lower_x - ray.org.x) * ray.rdir.x;
const vfloat8 tNearY = (lower_y - ray.org.y) * ray.rdir.y;
@@ -857,13 +937,22 @@ namespace embree
const vfloat<N> upper_y = node->dequantizeUpperY(time);
const vfloat<N> lower_z = node->dequantizeLowerZ(time);
const vfloat<N> upper_z = node->dequantizeUpperZ(time);
-#if defined(__AVX2__) || defined(__ARM_NEON)
+#if defined(__FMA_X4__)
+#if defined(__aarch64__)
+ const vfloat<N> tNearX = madd(lower_x, ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat<N> tNearY = madd(lower_y, ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat<N> tNearZ = madd(lower_z, ray.rdir.z, ray.neg_org_rdir.z);
+ const vfloat<N> tFarX = madd(upper_x, ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat<N> tFarY = madd(upper_y, ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat<N> tFarZ = madd(upper_z, ray.rdir.z, ray.neg_org_rdir.z);
+#else
const vfloat<N> tNearX = msub(lower_x, ray.rdir.x, ray.org_rdir.x);
const vfloat<N> tNearY = msub(lower_y, ray.rdir.y, ray.org_rdir.y);
const vfloat<N> tNearZ = msub(lower_z, ray.rdir.z, ray.org_rdir.z);
const vfloat<N> tFarX = msub(upper_x, ray.rdir.x, ray.org_rdir.x);
const vfloat<N> tFarY = msub(upper_y, ray.rdir.y, ray.org_rdir.y);
const vfloat<N> tFarZ = msub(upper_z, ray.rdir.z, ray.org_rdir.z);
+#endif
#else
const vfloat<N> tNearX = (lower_x - ray.org.x) * ray.rdir.x;
const vfloat<N> tNearY = (lower_y - ray.org.y) * ray.rdir.y;
diff --git a/thirdparty/embree/kernels/bvh/node_intersector_frustum.h b/thirdparty/embree/kernels/bvh/node_intersector_frustum.h
index 1f7215e5df..cad4e6de2d 100644
--- a/thirdparty/embree/kernels/bvh/node_intersector_frustum.h
+++ b/thirdparty/embree/kernels/bvh/node_intersector_frustum.h
@@ -75,9 +75,13 @@ namespace embree
min_rdir = select(pos_rdir, reduced_min_rdir, reduced_max_rdir);
max_rdir = select(pos_rdir, reduced_max_rdir, reduced_min_rdir);
+#if defined (__aarch64__)
+ neg_min_org_rdir = -(min_rdir * select(pos_rdir, reduced_max_org, reduced_min_org));
+ neg_max_org_rdir = -(max_rdir * select(pos_rdir, reduced_min_org, reduced_max_org));
+#else
min_org_rdir = min_rdir * select(pos_rdir, reduced_max_org, reduced_min_org);
max_org_rdir = max_rdir * select(pos_rdir, reduced_min_org, reduced_max_org);
-
+#endif
min_dist = reduced_min_dist;
max_dist = reduced_max_dist;
@@ -95,9 +99,13 @@ namespace embree
Vec3fa min_rdir;
Vec3fa max_rdir;
+#if defined (__aarch64__)
+ Vec3fa neg_min_org_rdir;
+ Vec3fa neg_max_org_rdir;
+#else
Vec3fa min_org_rdir;
Vec3fa max_org_rdir;
-
+#endif
float min_dist;
float max_dist;
};
@@ -191,13 +199,21 @@ namespace embree
const vfloat<N> bmaxY = *(const vfloat<N>*)((const char*)&node->lower_x + frustum.nf.farY);
const vfloat<N> bmaxZ = *(const vfloat<N>*)((const char*)&node->lower_x + frustum.nf.farZ);
+#if defined (__aarch64__)
+ const vfloat<N> fminX = madd(bminX, vfloat<N>(frustum.min_rdir.x), vfloat<N>(frustum.neg_min_org_rdir.x));
+ const vfloat<N> fminY = madd(bminY, vfloat<N>(frustum.min_rdir.y), vfloat<N>(frustum.neg_min_org_rdir.y));
+ const vfloat<N> fminZ = madd(bminZ, vfloat<N>(frustum.min_rdir.z), vfloat<N>(frustum.neg_min_org_rdir.z));
+ const vfloat<N> fmaxX = madd(bmaxX, vfloat<N>(frustum.max_rdir.x), vfloat<N>(frustum.neg_max_org_rdir.x));
+ const vfloat<N> fmaxY = madd(bmaxY, vfloat<N>(frustum.max_rdir.y), vfloat<N>(frustum.neg_max_org_rdir.y));
+ const vfloat<N> fmaxZ = madd(bmaxZ, vfloat<N>(frustum.max_rdir.z), vfloat<N>(frustum.neg_max_org_rdir.z));
+#else
const vfloat<N> fminX = msub(bminX, vfloat<N>(frustum.min_rdir.x), vfloat<N>(frustum.min_org_rdir.x));
const vfloat<N> fminY = msub(bminY, vfloat<N>(frustum.min_rdir.y), vfloat<N>(frustum.min_org_rdir.y));
const vfloat<N> fminZ = msub(bminZ, vfloat<N>(frustum.min_rdir.z), vfloat<N>(frustum.min_org_rdir.z));
const vfloat<N> fmaxX = msub(bmaxX, vfloat<N>(frustum.max_rdir.x), vfloat<N>(frustum.max_org_rdir.x));
const vfloat<N> fmaxY = msub(bmaxY, vfloat<N>(frustum.max_rdir.y), vfloat<N>(frustum.max_org_rdir.y));
const vfloat<N> fmaxZ = msub(bmaxZ, vfloat<N>(frustum.max_rdir.z), vfloat<N>(frustum.max_org_rdir.z));
-
+#endif
const vfloat<N> fmin = maxi(fminX, fminY, fminZ, vfloat<N>(frustum.min_dist));
dist = fmin;
const vfloat<N> fmax = mini(fmaxX, fmaxY, fmaxZ, vfloat<N>(frustum.max_dist));
diff --git a/thirdparty/embree/kernels/bvh/node_intersector_packet.h b/thirdparty/embree/kernels/bvh/node_intersector_packet.h
index d5498fc5db..4deacd620d 100644
--- a/thirdparty/embree/kernels/bvh/node_intersector_packet.h
+++ b/thirdparty/embree/kernels/bvh/node_intersector_packet.h
@@ -39,7 +39,9 @@ namespace embree
org = ray_org;
dir = ray_dir;
rdir = rcp_safe(ray_dir);
-#if defined(__AVX2__) || defined(__ARM_NEON)
+#if defined(__aarch64__)
+ neg_org_rdir = -(org * rdir);
+#elif defined(__AVX2__)
org_rdir = org * rdir;
#endif
@@ -55,7 +57,9 @@ namespace embree
Vec3vf<K> org;
Vec3vf<K> dir;
Vec3vf<K> rdir;
-#if defined(__AVX2__) || defined(__ARM_NEON)
+#if defined(__aarch64__)
+ Vec3vf<K> neg_org_rdir;
+#elif defined(__AVX2__)
Vec3vf<K> org_rdir;
#endif
Vec3vi<K> nearXYZ;
@@ -119,7 +123,14 @@ namespace embree
const TravRayKFast<K>& ray, vfloat<K>& dist)
{
- #if defined(__AVX2__) || defined(__ARM_NEON)
+#if defined(__aarch64__)
+ const vfloat<K> lclipMinX = madd(node->lower_x[i], ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat<K> lclipMinY = madd(node->lower_y[i], ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat<K> lclipMinZ = madd(node->lower_z[i], ray.rdir.z, ray.neg_org_rdir.z);
+ const vfloat<K> lclipMaxX = madd(node->upper_x[i], ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat<K> lclipMaxY = madd(node->upper_y[i], ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat<K> lclipMaxZ = madd(node->upper_z[i], ray.rdir.z, ray.neg_org_rdir.z);
+#elif defined(__AVX2__)
const vfloat<K> lclipMinX = msub(node->lower_x[i], ray.rdir.x, ray.org_rdir.x);
const vfloat<K> lclipMinY = msub(node->lower_y[i], ray.rdir.y, ray.org_rdir.y);
const vfloat<K> lclipMinZ = msub(node->lower_z[i], ray.rdir.z, ray.org_rdir.z);
@@ -199,7 +210,14 @@ namespace embree
const vfloat<K> vupper_y = madd(time, vfloat<K>(node->upper_dy[i]), vfloat<K>(node->upper_y[i]));
const vfloat<K> vupper_z = madd(time, vfloat<K>(node->upper_dz[i]), vfloat<K>(node->upper_z[i]));
-#if defined(__AVX2__) || defined(__ARM_NEON)
+#if defined(__aarch64__)
+ const vfloat<K> lclipMinX = madd(vlower_x, ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat<K> lclipMinY = madd(vlower_y, ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat<K> lclipMinZ = madd(vlower_z, ray.rdir.z, ray.neg_org_rdir.z);
+ const vfloat<K> lclipMaxX = madd(vupper_x, ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat<K> lclipMaxY = madd(vupper_y, ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat<K> lclipMaxZ = madd(vupper_z, ray.rdir.z, ray.neg_org_rdir.z);
+#elif defined(__AVX2__)
const vfloat<K> lclipMinX = msub(vlower_x, ray.rdir.x, ray.org_rdir.x);
const vfloat<K> lclipMinY = msub(vlower_y, ray.rdir.y, ray.org_rdir.y);
const vfloat<K> lclipMinZ = msub(vlower_z, ray.rdir.z, ray.org_rdir.z);
@@ -302,7 +320,14 @@ namespace embree
const vfloat<K> vupper_y = madd(time, vfloat<K>(node->upper_dy[i]), vfloat<K>(node->upper_y[i]));
const vfloat<K> vupper_z = madd(time, vfloat<K>(node->upper_dz[i]), vfloat<K>(node->upper_z[i]));
-#if defined(__AVX2__) || defined(__ARM_NEON)
+#if defined(__aarch64__)
+ const vfloat<K> lclipMinX = madd(vlower_x, ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat<K> lclipMinY = madd(vlower_y, ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat<K> lclipMinZ = madd(vlower_z, ray.rdir.z, ray.neg_org_rdir.z);
+ const vfloat<K> lclipMaxX = madd(vupper_x, ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat<K> lclipMaxY = madd(vupper_y, ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat<K> lclipMaxZ = madd(vupper_z, ray.rdir.z, ray.neg_org_rdir.z);
+#elif defined(__AVX2__)
const vfloat<K> lclipMinX = msub(vlower_x, ray.rdir.x, ray.org_rdir.x);
const vfloat<K> lclipMinY = msub(vlower_y, ray.rdir.y, ray.org_rdir.y);
const vfloat<K> lclipMinZ = msub(vlower_z, ray.rdir.z, ray.org_rdir.z);
@@ -464,7 +489,14 @@ namespace embree
const vfloat<N> lower_z = node->dequantizeLowerZ();
const vfloat<N> upper_z = node->dequantizeUpperZ();
- #if defined(__AVX2__) || defined(__ARM_NEON)
+ #if defined(__aarch64__)
+ const vfloat<K> lclipMinX = madd(lower_x[i], ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat<K> lclipMinY = madd(lower_y[i], ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat<K> lclipMinZ = madd(lower_z[i], ray.rdir.z, ray.neg_org_rdir.z);
+ const vfloat<K> lclipMaxX = madd(upper_x[i], ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat<K> lclipMaxY = madd(upper_y[i], ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat<K> lclipMaxZ = madd(upper_z[i], ray.rdir.z, ray.neg_org_rdir.z);
+ #elif defined(__AVX2__)
const vfloat<K> lclipMinX = msub(lower_x[i], ray.rdir.x, ray.org_rdir.x);
const vfloat<K> lclipMinY = msub(lower_y[i], ray.rdir.y, ray.org_rdir.y);
const vfloat<K> lclipMinZ = msub(lower_z[i], ray.rdir.z, ray.org_rdir.z);
@@ -549,7 +581,14 @@ namespace embree
const vfloat<K> lower_z = node->template dequantizeLowerZ<K>(i,time);
const vfloat<K> upper_z = node->template dequantizeUpperZ<K>(i,time);
-#if defined(__AVX2__) || defined(__ARM_NEON)
+#if defined(__aarch64__)
+ const vfloat<K> lclipMinX = madd(lower_x, ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat<K> lclipMinY = madd(lower_y, ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat<K> lclipMinZ = madd(lower_z, ray.rdir.z, ray.neg_org_rdir.z);
+ const vfloat<K> lclipMaxX = madd(upper_x, ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat<K> lclipMaxY = madd(upper_y, ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat<K> lclipMaxZ = madd(upper_z, ray.rdir.z, ray.neg_org_rdir.z);
+#elif defined(__AVX2__)
const vfloat<K> lclipMinX = msub(lower_x, ray.rdir.x, ray.org_rdir.x);
const vfloat<K> lclipMinY = msub(lower_y, ray.rdir.y, ray.org_rdir.y);
const vfloat<K> lclipMinZ = msub(lower_z, ray.rdir.z, ray.org_rdir.z);
diff --git a/thirdparty/embree/kernels/bvh/node_intersector_packet_stream.h b/thirdparty/embree/kernels/bvh/node_intersector_packet_stream.h
index 55b2c27231..943fd7043f 100644
--- a/thirdparty/embree/kernels/bvh/node_intersector_packet_stream.h
+++ b/thirdparty/embree/kernels/bvh/node_intersector_packet_stream.h
@@ -32,11 +32,19 @@ namespace embree
__forceinline void init(const Vec3vf<K>& ray_org, const Vec3vf<K>& ray_dir)
{
rdir = rcp_safe(ray_dir);
+#if defined(__aarch64__)
+ neg_org_rdir = -(ray_org * rdir);
+#else
org_rdir = ray_org * rdir;
+#endif
}
Vec3vf<K> rdir;
+#if defined(__aarch64__)
+ Vec3vf<K> neg_org_rdir;
+#else
Vec3vf<K> org_rdir;
+#endif
vfloat<K> tnear;
vfloat<K> tfar;
};
@@ -87,12 +95,21 @@ namespace embree
const vfloat<N> bmaxY = vfloat<N>(*(const vfloat<N>*)((const char*)&node->lower_x + nf.farY));
const vfloat<N> bmaxZ = vfloat<N>(*(const vfloat<N>*)((const char*)&node->lower_x + nf.farZ));
+#if defined (__aarch64__)
+ const vfloat<N> rminX = madd(bminX, vfloat<N>(ray.rdir.x[k]), vfloat<N>(ray.neg_org_rdir.x[k]));
+ const vfloat<N> rminY = madd(bminY, vfloat<N>(ray.rdir.y[k]), vfloat<N>(ray.neg_org_rdir.y[k]));
+ const vfloat<N> rminZ = madd(bminZ, vfloat<N>(ray.rdir.z[k]), vfloat<N>(ray.neg_org_rdir.z[k]));
+ const vfloat<N> rmaxX = madd(bmaxX, vfloat<N>(ray.rdir.x[k]), vfloat<N>(ray.neg_org_rdir.x[k]));
+ const vfloat<N> rmaxY = madd(bmaxY, vfloat<N>(ray.rdir.y[k]), vfloat<N>(ray.neg_org_rdir.y[k]));
+ const vfloat<N> rmaxZ = madd(bmaxZ, vfloat<N>(ray.rdir.z[k]), vfloat<N>(ray.neg_org_rdir.z[k]));
+#else
const vfloat<N> rminX = msub(bminX, vfloat<N>(ray.rdir.x[k]), vfloat<N>(ray.org_rdir.x[k]));
const vfloat<N> rminY = msub(bminY, vfloat<N>(ray.rdir.y[k]), vfloat<N>(ray.org_rdir.y[k]));
const vfloat<N> rminZ = msub(bminZ, vfloat<N>(ray.rdir.z[k]), vfloat<N>(ray.org_rdir.z[k]));
const vfloat<N> rmaxX = msub(bmaxX, vfloat<N>(ray.rdir.x[k]), vfloat<N>(ray.org_rdir.x[k]));
const vfloat<N> rmaxY = msub(bmaxY, vfloat<N>(ray.rdir.y[k]), vfloat<N>(ray.org_rdir.y[k]));
const vfloat<N> rmaxZ = msub(bmaxZ, vfloat<N>(ray.rdir.z[k]), vfloat<N>(ray.org_rdir.z[k]));
+#endif
const vfloat<N> rmin = maxi(rminX, rminY, rminZ, vfloat<N>(ray.tnear[k]));
const vfloat<N> rmax = mini(rmaxX, rmaxY, rmaxZ, vfloat<N>(ray.tfar[k]));
@@ -113,12 +130,21 @@ namespace embree
const vfloat<K> bmaxY = *(const float*)(ptr + nf.farY);
const vfloat<K> bmaxZ = *(const float*)(ptr + nf.farZ);
+#if defined (__aarch64__)
+ const vfloat<K> rminX = madd(bminX, ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat<K> rminY = madd(bminY, ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat<K> rminZ = madd(bminZ, ray.rdir.z, ray.neg_org_rdir.z);
+ const vfloat<K> rmaxX = madd(bmaxX, ray.rdir.x, ray.neg_org_rdir.x);
+ const vfloat<K> rmaxY = madd(bmaxY, ray.rdir.y, ray.neg_org_rdir.y);
+ const vfloat<K> rmaxZ = madd(bmaxZ, ray.rdir.z, ray.neg_org_rdir.z);
+#else
const vfloat<K> rminX = msub(bminX, ray.rdir.x, ray.org_rdir.x);
const vfloat<K> rminY = msub(bminY, ray.rdir.y, ray.org_rdir.y);
const vfloat<K> rminZ = msub(bminZ, ray.rdir.z, ray.org_rdir.z);
const vfloat<K> rmaxX = msub(bmaxX, ray.rdir.x, ray.org_rdir.x);
const vfloat<K> rmaxY = msub(bmaxY, ray.rdir.y, ray.org_rdir.y);
const vfloat<K> rmaxZ = msub(bmaxZ, ray.rdir.z, ray.org_rdir.z);
+#endif
const vfloat<K> rmin = maxi(rminX, rminY, rminZ, ray.tnear);
const vfloat<K> rmax = mini(rmaxX, rmaxY, rmaxZ, ray.tfar);
diff --git a/thirdparty/embree/kernels/common/accel.h b/thirdparty/embree/kernels/common/accel.h
index cc4ea1805b..d24326ce92 100644
--- a/thirdparty/embree/kernels/common/accel.h
+++ b/thirdparty/embree/kernels/common/accel.h
@@ -332,7 +332,7 @@ namespace embree
intersectorN.intersect(this,rayN,N,context);
}
-#if defined(__SSE__)
+#if defined(__SSE__) || defined(__ARM_NEON)
__forceinline void intersect(const vbool4& valid, RayHitK<4>& ray, IntersectContext* context) {
const vint<4> mask = valid.mask32();
intersect4(&mask,(RTCRayHit4&)ray,context);
@@ -388,7 +388,7 @@ namespace embree
intersectorN.occluded(this,rayN,N,context);
}
-#if defined(__SSE__)
+#if defined(__SSE__) || defined(__ARM_NEON)
__forceinline void occluded(const vbool4& valid, RayK<4>& ray, IntersectContext* context) {
const vint<4> mask = valid.mask32();
occluded4(&mask,(RTCRay4&)ray,context);
diff --git a/thirdparty/embree/kernels/common/acceln.cpp b/thirdparty/embree/kernels/common/acceln.cpp
index 32a27c560a..111c62083d 100644
--- a/thirdparty/embree/kernels/common/acceln.cpp
+++ b/thirdparty/embree/kernels/common/acceln.cpp
@@ -97,7 +97,7 @@ namespace embree
for (size_t i=0; i<This->accels.size(); i++) {
if (This->accels[i]->isEmpty()) continue;
This->accels[i]->intersectors.occluded4(valid,ray,context);
-#if defined(__SSE2__)
+#if defined(__SSE2__) || defined(__ARM_NEON)
vbool4 valid0 = asBool(((vint4*)valid)[0]);
vbool4 hit0 = ((vfloat4*)ray.tfar)[0] >= vfloat4(zero);
if (unlikely(none(valid0 & hit0))) break;
@@ -111,7 +111,7 @@ namespace embree
for (size_t i=0; i<This->accels.size(); i++) {
if (This->accels[i]->isEmpty()) continue;
This->accels[i]->intersectors.occluded8(valid,ray,context);
-#if defined(__SSE2__) // FIXME: use higher ISA
+#if defined(__SSE2__) || defined(__ARM_NEON) // FIXME: use higher ISA
vbool4 valid0 = asBool(((vint4*)valid)[0]);
vbool4 hit0 = ((vfloat4*)ray.tfar)[0] >= vfloat4(zero);
vbool4 valid1 = asBool(((vint4*)valid)[1]);
@@ -127,7 +127,7 @@ namespace embree
for (size_t i=0; i<This->accels.size(); i++) {
if (This->accels[i]->isEmpty()) continue;
This->accels[i]->intersectors.occluded16(valid,ray,context);
-#if defined(__SSE2__) // FIXME: use higher ISA
+#if defined(__SSE2__) || defined(__ARM_NEON) // FIXME: use higher ISA
vbool4 valid0 = asBool(((vint4*)valid)[0]);
vbool4 hit0 = ((vfloat4*)ray.tfar)[0] >= vfloat4(zero);
vbool4 valid1 = asBool(((vint4*)valid)[1]);
diff --git a/thirdparty/embree/kernels/common/accelset.h b/thirdparty/embree/kernels/common/accelset.h
index 90b184a07b..1b67120c97 100644
--- a/thirdparty/embree/kernels/common/accelset.h
+++ b/thirdparty/embree/kernels/common/accelset.h
@@ -14,21 +14,14 @@ namespace embree
struct IntersectFunctionNArguments;
struct OccludedFunctionNArguments;
- typedef void (*ReportIntersectionFunc) (IntersectFunctionNArguments* args, const RTCFilterFunctionNArguments* filter_args);
- typedef void (*ReportOcclusionFunc) (OccludedFunctionNArguments* args, const RTCFilterFunctionNArguments* filter_args);
-
struct IntersectFunctionNArguments : public RTCIntersectFunctionNArguments
{
- IntersectContext* internal_context;
Geometry* geometry;
- ReportIntersectionFunc report;
};
struct OccludedFunctionNArguments : public RTCOccludedFunctionNArguments
{
- IntersectContext* internal_context;
Geometry* geometry;
- ReportOcclusionFunc report;
};
/*! Base class for set of acceleration structures. */
@@ -145,7 +138,7 @@ namespace embree
public:
/*! Intersects a single ray with the scene. */
- __forceinline void intersect (RayHit& ray, unsigned int geomID, unsigned int primID, IntersectContext* context, ReportIntersectionFunc report)
+ __forceinline void intersect (RayHit& ray, unsigned int geomID, unsigned int primID, IntersectContext* context)
{
assert(primID < size());
assert(intersectorN.intersect);
@@ -159,15 +152,13 @@ namespace embree
args.N = 1;
args.geomID = geomID;
args.primID = primID;
- args.internal_context = context;
args.geometry = this;
- args.report = report;
intersectorN.intersect(&args);
}
/*! Tests if single ray is occluded by the scene. */
- __forceinline void occluded (Ray& ray, unsigned int geomID, unsigned int primID, IntersectContext* context, ReportOcclusionFunc report)
+ __forceinline void occluded (Ray& ray, unsigned int geomID, unsigned int primID, IntersectContext* context)
{
assert(primID < size());
assert(intersectorN.occluded);
@@ -181,16 +172,14 @@ namespace embree
args.N = 1;
args.geomID = geomID;
args.primID = primID;
- args.internal_context = context;
args.geometry = this;
- args.report = report;
intersectorN.occluded(&args);
}
/*! Intersects a packet of K rays with the scene. */
template<int K>
- __forceinline void intersect (const vbool<K>& valid, RayHitK<K>& ray, unsigned int geomID, unsigned int primID, IntersectContext* context, ReportIntersectionFunc report)
+ __forceinline void intersect (const vbool<K>& valid, RayHitK<K>& ray, unsigned int geomID, unsigned int primID, IntersectContext* context)
{
assert(primID < size());
assert(intersectorN.intersect);
@@ -204,16 +193,14 @@ namespace embree
args.N = K;
args.geomID = geomID;
args.primID = primID;
- args.internal_context = context;
args.geometry = this;
- args.report = report;
intersectorN.intersect(&args);
}
/*! Tests if a packet of K rays is occluded by the scene. */
template<int K>
- __forceinline void occluded (const vbool<K>& valid, RayK<K>& ray, unsigned int geomID, unsigned int primID, IntersectContext* context, ReportOcclusionFunc report)
+ __forceinline void occluded (const vbool<K>& valid, RayK<K>& ray, unsigned int geomID, unsigned int primID, IntersectContext* context)
{
assert(primID < size());
assert(intersectorN.occluded);
@@ -227,9 +214,7 @@ namespace embree
args.N = K;
args.geomID = geomID;
args.primID = primID;
- args.internal_context = context;
args.geometry = this;
- args.report = report;
intersectorN.occluded(&args);
}
diff --git a/thirdparty/embree/kernels/common/alloc.cpp b/thirdparty/embree/kernels/common/alloc.cpp
index 1a0e1aeed3..38a76225f4 100644
--- a/thirdparty/embree/kernels/common/alloc.cpp
+++ b/thirdparty/embree/kernels/common/alloc.cpp
@@ -3,6 +3,9 @@
#include "alloc.h"
#include "../../common/sys/thread.h"
+#if defined(APPLE) && defined(__aarch64__)
+#include "../../common/sys/barrier.h"
+#endif
namespace embree
{
diff --git a/thirdparty/embree/kernels/common/alloc.h b/thirdparty/embree/kernels/common/alloc.h
index 4458e35c24..12769df2c8 100644
--- a/thirdparty/embree/kernels/common/alloc.h
+++ b/thirdparty/embree/kernels/common/alloc.h
@@ -8,6 +8,10 @@
#include "scene.h"
#include "primref.h"
+#if defined(APPLE) && defined(__aarch64__)
+#include <mutex>
+#endif
+
namespace embree
{
class FastAllocator
@@ -26,7 +30,7 @@ namespace embree
public:
struct ThreadLocal2;
- enum AllocationType { ALIGNED_MALLOC, OS_MALLOC, SHARED, ANY_TYPE };
+ enum AllocationType { ALIGNED_MALLOC, EMBREE_OS_MALLOC, SHARED, ANY_TYPE };
/*! Per thread structure holding the current memory block. */
struct __aligned(64) ThreadLocal
@@ -132,7 +136,11 @@ namespace embree
{
assert(alloc_i);
if (alloc.load() == alloc_i) return;
+#if defined(APPLE) && defined(__aarch64__)
+ std::scoped_lock lock(mutex);
+#else
Lock<SpinLock> lock(mutex);
+#endif
//if (alloc.load() == alloc_i) return; // not required as only one thread calls bind
if (alloc.load()) {
alloc.load()->bytesUsed += alloc0.getUsedBytes() + alloc1.getUsedBytes();
@@ -150,7 +158,11 @@ namespace embree
{
assert(alloc_i);
if (alloc.load() != alloc_i) return;
+#if defined(APPLE) && defined(__aarch64__)
+ std::scoped_lock lock(mutex);
+#else
Lock<SpinLock> lock(mutex);
+#endif
if (alloc.load() != alloc_i) return; // required as a different thread calls unbind
alloc.load()->bytesUsed += alloc0.getUsedBytes() + alloc1.getUsedBytes();
alloc.load()->bytesFree += alloc0.getFreeBytes() + alloc1.getFreeBytes();
@@ -161,7 +173,11 @@ namespace embree
}
public:
+#if defined(APPLE) && defined(__aarch64__)
+ std::mutex mutex;
+#else
SpinLock mutex; //!< required as unbind is called from other threads
+#endif
std::atomic<FastAllocator*> alloc; //!< parent allocator
ThreadLocal alloc0;
ThreadLocal alloc1;
@@ -169,7 +185,7 @@ namespace embree
FastAllocator (Device* device, bool osAllocation)
: device(device), slotMask(0), usedBlocks(nullptr), freeBlocks(nullptr), use_single_mode(false), defaultBlockSize(PAGE_SIZE), estimatedSize(0),
- growSize(PAGE_SIZE), maxGrowSize(maxAllocationSize), log2_grow_size_scale(0), bytesUsed(0), bytesFree(0), bytesWasted(0), atype(osAllocation ? OS_MALLOC : ALIGNED_MALLOC),
+ growSize(PAGE_SIZE), maxGrowSize(maxAllocationSize), log2_grow_size_scale(0), bytesUsed(0), bytesFree(0), bytesWasted(0), atype(osAllocation ? EMBREE_OS_MALLOC : ALIGNED_MALLOC),
primrefarray(device,0)
{
for (size_t i=0; i<MAX_THREAD_USED_BLOCK_SLOTS; i++)
@@ -206,7 +222,7 @@ namespace embree
void setOSallocation(bool flag)
{
- atype = flag ? OS_MALLOC : ALIGNED_MALLOC;
+ atype = flag ? EMBREE_OS_MALLOC : ALIGNED_MALLOC;
}
private:
@@ -217,7 +233,11 @@ namespace embree
ThreadLocal2* alloc = thread_local_allocator2;
if (alloc == nullptr) {
thread_local_allocator2 = alloc = new ThreadLocal2;
+#if defined(APPLE) && defined(__aarch64__)
+ std::scoped_lock lock(s_thread_local_allocators_lock);
+#else
Lock<SpinLock> lock(s_thread_local_allocators_lock);
+#endif
s_thread_local_allocators.push_back(make_unique(alloc));
}
return alloc;
@@ -227,7 +247,11 @@ namespace embree
__forceinline void join(ThreadLocal2* alloc)
{
+#if defined(APPLE) && defined(__aarch64__)
+ std::scoped_lock lock(s_thread_local_allocators_lock);
+#else
Lock<SpinLock> lock(thread_local_allocators_lock);
+#endif
thread_local_allocators.push_back(alloc);
}
@@ -492,7 +516,11 @@ namespace embree
/* parallel block creation in case of no freeBlocks, avoids single global mutex */
if (likely(freeBlocks.load() == nullptr))
{
+#if defined(APPLE) && defined(__aarch64__)
+ std::scoped_lock lock(slotMutex[slot]);
+#else
Lock<SpinLock> lock(slotMutex[slot]);
+#endif
if (myUsedBlocks == threadUsedBlocks[slot]) {
const size_t alignedBytes = (bytes+(align-1)) & ~(align-1);
const size_t allocSize = max(min(growSize,maxGrowSize),alignedBytes);
@@ -505,7 +533,11 @@ namespace embree
/* if this fails allocate new block */
{
- Lock<SpinLock> lock(mutex);
+#if defined(APPLE) && defined(__aarch64__)
+ std::scoped_lock lock(mutex);
+#else
+ Lock<SpinLock> lock(mutex);
+#endif
if (myUsedBlocks == threadUsedBlocks[slot])
{
if (freeBlocks.load() != nullptr) {
@@ -527,7 +559,11 @@ namespace embree
/*! add new block */
void addBlock(void* ptr, ssize_t bytes)
{
+#if defined(APPLE) && defined(__aarch64__)
+ std::scoped_lock lock(mutex);
+#else
Lock<SpinLock> lock(mutex);
+#endif
const size_t sizeof_Header = offsetof(Block,data[0]);
void* aptr = (void*) ((((size_t)ptr)+maxAlignment-1) & ~(maxAlignment-1));
size_t ofs = (size_t) aptr - (size_t) ptr;
@@ -613,8 +649,8 @@ namespace embree
bytesWasted(alloc->bytesWasted),
stat_all(alloc,ANY_TYPE),
stat_malloc(alloc,ALIGNED_MALLOC),
- stat_4K(alloc,OS_MALLOC,false),
- stat_2M(alloc,OS_MALLOC,true),
+ stat_4K(alloc,EMBREE_OS_MALLOC,false),
+ stat_2M(alloc,EMBREE_OS_MALLOC,true),
stat_shared(alloc,SHARED) {}
AllStatistics (size_t bytesUsed,
@@ -707,7 +743,7 @@ namespace embree
/* We avoid using os_malloc for small blocks as this could
* cause a risk of fragmenting the virtual address space and
* reach the limit of vm.max_map_count = 65k under Linux. */
- if (atype == OS_MALLOC && bytesAllocate < maxAllocationSize)
+ if (atype == EMBREE_OS_MALLOC && bytesAllocate < maxAllocationSize)
atype = ALIGNED_MALLOC;
/* we need to additionally allocate some header */
@@ -716,7 +752,7 @@ namespace embree
bytesReserve = sizeof_Header+bytesReserve;
/* consume full 4k pages with using os_malloc */
- if (atype == OS_MALLOC) {
+ if (atype == EMBREE_OS_MALLOC) {
bytesAllocate = ((bytesAllocate+PAGE_SIZE-1) & ~(PAGE_SIZE-1));
bytesReserve = ((bytesReserve +PAGE_SIZE-1) & ~(PAGE_SIZE-1));
}
@@ -748,11 +784,11 @@ namespace embree
return new (ptr) Block(ALIGNED_MALLOC,bytesAllocate-sizeof_Header,bytesAllocate-sizeof_Header,next,alignment);
}
}
- else if (atype == OS_MALLOC)
+ else if (atype == EMBREE_OS_MALLOC)
{
if (device) device->memoryMonitor(bytesAllocate,false);
bool huge_pages; ptr = os_malloc(bytesReserve,huge_pages);
- return new (ptr) Block(OS_MALLOC,bytesAllocate-sizeof_Header,bytesReserve-sizeof_Header,next,0,huge_pages);
+ return new (ptr) Block(EMBREE_OS_MALLOC,bytesAllocate-sizeof_Header,bytesReserve-sizeof_Header,next,0,huge_pages);
}
else
assert(false);
@@ -796,7 +832,7 @@ namespace embree
if (device) device->memoryMonitor(-sizeof_Alloced,true);
}
- else if (atype == OS_MALLOC) {
+ else if (atype == EMBREE_OS_MALLOC) {
size_t sizeof_This = sizeof_Header+reserveEnd;
os_free(this,sizeof_This,huge_pages);
if (device) device->memoryMonitor(-sizeof_Alloced,true);
@@ -857,7 +893,7 @@ namespace embree
bool hasType(AllocationType atype_i, bool huge_pages_i) const
{
if (atype_i == ANY_TYPE ) return true;
- else if (atype == OS_MALLOC) return atype_i == atype && huge_pages_i == huge_pages;
+ else if (atype == EMBREE_OS_MALLOC) return atype_i == atype && huge_pages_i == huge_pages;
else return atype_i == atype;
}
@@ -906,7 +942,7 @@ namespace embree
void print_block() const
{
if (atype == ALIGNED_MALLOC) std::cout << "A";
- else if (atype == OS_MALLOC) std::cout << "O";
+ else if (atype == EMBREE_OS_MALLOC) std::cout << "O";
else if (atype == SHARED) std::cout << "S";
if (huge_pages) std::cout << "H";
size_t bytesUsed = getBlockUsedBytes();
@@ -936,7 +972,11 @@ namespace embree
std::atomic<Block*> freeBlocks;
std::atomic<Block*> threadBlocks[MAX_THREAD_USED_BLOCK_SLOTS];
- SpinLock slotMutex[MAX_THREAD_USED_BLOCK_SLOTS];
+#if defined(APPLE) && defined(__aarch64__)
+ std::mutex slotMutex[MAX_THREAD_USED_BLOCK_SLOTS];
+#else
+ PaddedSpinLock slotMutex[MAX_THREAD_USED_BLOCK_SLOTS];
+#endif
bool use_single_mode;
size_t defaultBlockSize;
@@ -950,7 +990,11 @@ namespace embree
static __thread ThreadLocal2* thread_local_allocator2;
static SpinLock s_thread_local_allocators_lock;
static std::vector<std::unique_ptr<ThreadLocal2>> s_thread_local_allocators;
+#if defined(APPLE) && defined(__aarch64__)
+ std::mutex thread_local_allocators_lock;
+#else
SpinLock thread_local_allocators_lock;
+#endif
std::vector<ThreadLocal2*> thread_local_allocators;
AllocationType atype;
mvector<PrimRef> primrefarray; //!< primrefarray used to allocate nodes
diff --git a/thirdparty/embree/kernels/common/device.cpp b/thirdparty/embree/kernels/common/device.cpp
index 068e0c2983..833ec65139 100644
--- a/thirdparty/embree/kernels/common/device.cpp
+++ b/thirdparty/embree/kernels/common/device.cpp
@@ -66,7 +66,11 @@ namespace embree
case CPU::CORE1: frequency_level = FREQUENCY_SIMD128; break;
case CPU::XEON_PHI_KNIGHTS_MILL : frequency_level = FREQUENCY_SIMD512; break;
case CPU::XEON_PHI_KNIGHTS_LANDING: frequency_level = FREQUENCY_SIMD512; break;
+#if defined(__APPLE__)
+ case CPU::ARM: frequency_level = FREQUENCY_SIMD256; break; // Apple M1 supports high throughput for SIMD4
+#else
case CPU::ARM: frequency_level = FREQUENCY_SIMD128; break;
+#endif
}
/* initialize global state */
diff --git a/thirdparty/embree/kernels/common/geometry.h b/thirdparty/embree/kernels/common/geometry.h
index 2f9f2e7c94..593990f5b1 100644
--- a/thirdparty/embree/kernels/common/geometry.h
+++ b/thirdparty/embree/kernels/common/geometry.h
@@ -91,7 +91,7 @@ namespace embree
size_t numFilterFunctions; //!< number of geometries with filter functions enabled
size_t numTriangles; //!< number of enabled triangles
- size_t numMBTriangles; //!< number of enabled motion blured triangles
+ size_t numMBTriangles; //!< number of enabled motion blurred triangles
size_t numQuads; //!< number of enabled quads
size_t numMBQuads; //!< number of enabled motion blurred quads
size_t numBezierCurves; //!< number of enabled curves
@@ -99,7 +99,7 @@ namespace embree
size_t numLineSegments; //!< number of enabled line segments
size_t numMBLineSegments; //!< number of enabled line motion blurred segments
size_t numSubdivPatches; //!< number of enabled subdivision patches
- size_t numMBSubdivPatches; //!< number of enabled motion blured subdivision patches
+ size_t numMBSubdivPatches; //!< number of enabled motion blurred subdivision patches
size_t numUserGeometries; //!< number of enabled user geometries
size_t numMBUserGeometries; //!< number of enabled motion blurred user geometries
size_t numInstancesCheap; //!< number of enabled cheap instances
diff --git a/thirdparty/embree/kernels/common/isa.h b/thirdparty/embree/kernels/common/isa.h
index ae6556336c..9e1132e1a0 100644
--- a/thirdparty/embree/kernels/common/isa.h
+++ b/thirdparty/embree/kernels/common/isa.h
@@ -44,7 +44,7 @@ namespace embree
#define SELECT_SYMBOL_DEFAULT(features,intersector) \
intersector = isa::intersector;
-#if defined(__SSE__)
+#if defined(__SSE__) || defined(__ARM_NEON)
#if !defined(EMBREE_TARGET_SIMD4)
#define EMBREE_TARGET_SIMD4
#endif
diff --git a/thirdparty/embree/kernels/common/ray.h b/thirdparty/embree/kernels/common/ray.h
index 7b951cc1e8..3c8ee3989c 100644
--- a/thirdparty/embree/kernels/common/ray.h
+++ b/thirdparty/embree/kernels/common/ray.h
@@ -6,7 +6,7 @@
#include "default.h"
#include "instance_stack.h"
-// FIXME: if ray gets seperated into ray* and hit, uload4 needs to be adjusted
+// FIXME: if ray gets separated into ray* and hit, uload4 needs to be adjusted
namespace embree
{
diff --git a/thirdparty/embree/kernels/common/rtcore.cpp b/thirdparty/embree/kernels/common/rtcore.cpp
index 94b3819e42..a6ea55bfc4 100644
--- a/thirdparty/embree/kernels/common/rtcore.cpp
+++ b/thirdparty/embree/kernels/common/rtcore.cpp
@@ -7,6 +7,7 @@
#include "device.h"
#include "scene.h"
#include "context.h"
+#include "../geometry/filter.h"
#include "../../include/embree3/rtcore_ray.h"
using namespace embree;
@@ -482,7 +483,7 @@ RTC_NAMESPACE_BEGIN;
IntersectContext context(scene,user_context);
#if !defined(EMBREE_RAY_PACKETS)
- Ray4* ray4 = (Ray4*) rayhit;
+ RayHit4* ray4 = (RayHit4*) rayhit;
for (size_t i=0; i<4; i++) {
if (!valid[i]) continue;
RayHit ray1; ray4->get(i,ray1);
@@ -513,7 +514,7 @@ RTC_NAMESPACE_BEGIN;
IntersectContext context(scene,user_context);
#if !defined(EMBREE_RAY_PACKETS)
- Ray8* ray8 = (Ray8*) rayhit;
+ RayHit8* ray8 = (RayHit8*) rayhit;
for (size_t i=0; i<8; i++) {
if (!valid[i]) continue;
RayHit ray1; ray8->get(i,ray1);
@@ -546,7 +547,7 @@ RTC_NAMESPACE_BEGIN;
IntersectContext context(scene,user_context);
#if !defined(EMBREE_RAY_PACKETS)
- Ray16* ray16 = (Ray16*) rayhit;
+ RayHit16* ray16 = (RayHit16*) rayhit;
for (size_t i=0; i<16; i++) {
if (!valid[i]) continue;
RayHit ray1; ray16->get(i,ray1);
@@ -1097,13 +1098,13 @@ RTC_NAMESPACE_BEGIN;
RTC_API void rtcFilterIntersection(const struct RTCIntersectFunctionNArguments* const args_i, const struct RTCFilterFunctionNArguments* filter_args)
{
IntersectFunctionNArguments* args = (IntersectFunctionNArguments*) args_i;
- args->report(args,filter_args);
+ isa::reportIntersection1(args, filter_args);
}
RTC_API void rtcFilterOcclusion(const struct RTCOccludedFunctionNArguments* const args_i, const struct RTCFilterFunctionNArguments* filter_args)
{
OccludedFunctionNArguments* args = (OccludedFunctionNArguments*) args_i;
- args->report(args,filter_args);
+ isa::reportOcclusion1(args,filter_args);
}
RTC_API RTCGeometry rtcNewGeometry (RTCDevice hdevice, RTCGeometryType type)
@@ -1763,4 +1764,19 @@ RTC_NAMESPACE_BEGIN;
return nullptr;
}
+ RTC_API RTCGeometry rtcGetGeometryThreadSafe (RTCScene hscene, unsigned int geomID)
+ {
+ Scene* scene = (Scene*) hscene;
+ RTC_CATCH_BEGIN;
+ RTC_TRACE(rtcGetGeometryThreadSafe);
+#if defined(DEBUG)
+ RTC_VERIFY_HANDLE(hscene);
+ RTC_VERIFY_GEOMID(geomID);
+#endif
+ Ref<Geometry> geom = scene->get_locked(geomID);
+ return (RTCGeometry) geom.ptr;
+ RTC_CATCH_END2(scene);
+ return nullptr;
+ }
+
RTC_NAMESPACE_END
diff --git a/thirdparty/embree/kernels/common/rtcore.h b/thirdparty/embree/kernels/common/rtcore.h
index f8aad7c7cb..ac58a84d6f 100644
--- a/thirdparty/embree/kernels/common/rtcore.h
+++ b/thirdparty/embree/kernels/common/rtcore.h
@@ -26,56 +26,59 @@ namespace embree
/*! Macros used in the rtcore API implementation */
// -- GODOT start --
-// #define RTC_CATCH_BEGIN try {
#define RTC_CATCH_BEGIN
-
-// #define RTC_CATCH_END(device) \
-// } catch (std::bad_alloc&) { \
-// Device::process_error(device,RTC_ERROR_OUT_OF_MEMORY,"out of memory"); \
-// } catch (rtcore_error& e) { \
-// Device::process_error(device,e.error,e.what()); \
-// } catch (std::exception& e) { \
-// Device::process_error(device,RTC_ERROR_UNKNOWN,e.what()); \
-// } catch (...) { \
-// Device::process_error(device,RTC_ERROR_UNKNOWN,"unknown exception caught"); \
-// }
#define RTC_CATCH_END(device)
-
-// #define RTC_CATCH_END2(scene) \
-// } catch (std::bad_alloc&) { \
-// Device* device = scene ? scene->device : nullptr; \
-// Device::process_error(device,RTC_ERROR_OUT_OF_MEMORY,"out of memory"); \
-// } catch (rtcore_error& e) { \
-// Device* device = scene ? scene->device : nullptr; \
-// Device::process_error(device,e.error,e.what()); \
-// } catch (std::exception& e) { \
-// Device* device = scene ? scene->device : nullptr; \
-// Device::process_error(device,RTC_ERROR_UNKNOWN,e.what()); \
-// } catch (...) { \
-// Device* device = scene ? scene->device : nullptr; \
-// Device::process_error(device,RTC_ERROR_UNKNOWN,"unknown exception caught"); \
-// }
#define RTC_CATCH_END2(scene)
-
-// #define RTC_CATCH_END2_FALSE(scene) \
-// } catch (std::bad_alloc&) { \
-// Device* device = scene ? scene->device : nullptr; \
-// Device::process_error(device,RTC_ERROR_OUT_OF_MEMORY,"out of memory"); \
-// return false; \
-// } catch (rtcore_error& e) { \
-// Device* device = scene ? scene->device : nullptr; \
-// Device::process_error(device,e.error,e.what()); \
-// return false; \
-// } catch (std::exception& e) { \
-// Device* device = scene ? scene->device : nullptr; \
-// Device::process_error(device,RTC_ERROR_UNKNOWN,e.what()); \
-// return false; \
-// } catch (...) { \
-// Device* device = scene ? scene->device : nullptr; \
-// Device::process_error(device,RTC_ERROR_UNKNOWN,"unknown exception caught"); \
-// return false; \
-// }
#define RTC_CATCH_END2_FALSE(scene) return false;
+
+#if 0
+#define RTC_CATCH_BEGIN try {
+
+#define RTC_CATCH_END(device) \
+ } catch (std::bad_alloc&) { \
+ Device::process_error(device,RTC_ERROR_OUT_OF_MEMORY,"out of memory"); \
+ } catch (rtcore_error& e) { \
+ Device::process_error(device,e.error,e.what()); \
+ } catch (std::exception& e) { \
+ Device::process_error(device,RTC_ERROR_UNKNOWN,e.what()); \
+ } catch (...) { \
+ Device::process_error(device,RTC_ERROR_UNKNOWN,"unknown exception caught"); \
+ }
+
+#define RTC_CATCH_END2(scene) \
+ } catch (std::bad_alloc&) { \
+ Device* device = scene ? scene->device : nullptr; \
+ Device::process_error(device,RTC_ERROR_OUT_OF_MEMORY,"out of memory"); \
+ } catch (rtcore_error& e) { \
+ Device* device = scene ? scene->device : nullptr; \
+ Device::process_error(device,e.error,e.what()); \
+ } catch (std::exception& e) { \
+ Device* device = scene ? scene->device : nullptr; \
+ Device::process_error(device,RTC_ERROR_UNKNOWN,e.what()); \
+ } catch (...) { \
+ Device* device = scene ? scene->device : nullptr; \
+ Device::process_error(device,RTC_ERROR_UNKNOWN,"unknown exception caught"); \
+ }
+
+#define RTC_CATCH_END2_FALSE(scene) \
+ } catch (std::bad_alloc&) { \
+ Device* device = scene ? scene->device : nullptr; \
+ Device::process_error(device,RTC_ERROR_OUT_OF_MEMORY,"out of memory"); \
+ return false; \
+ } catch (rtcore_error& e) { \
+ Device* device = scene ? scene->device : nullptr; \
+ Device::process_error(device,e.error,e.what()); \
+ return false; \
+ } catch (std::exception& e) { \
+ Device* device = scene ? scene->device : nullptr; \
+ Device::process_error(device,RTC_ERROR_UNKNOWN,e.what()); \
+ return false; \
+ } catch (...) { \
+ Device* device = scene ? scene->device : nullptr; \
+ Device::process_error(device,RTC_ERROR_UNKNOWN,"unknown exception caught"); \
+ return false; \
+ }
+#endif
// -- GODOT end --
#define RTC_VERIFY_HANDLE(handle) \
@@ -103,39 +106,35 @@ namespace embree
#define RTC_TRACE(x)
#endif
-// -- GODOT begin --
-// /*! used to throw embree API errors */
-// struct rtcore_error : public std::exception
-// {
-// __forceinline rtcore_error(RTCError error, const std::string& str)
-// : error(error), str(str) {}
-//
-// ~rtcore_error() throw() {}
-//
-// const char* what () const throw () {
-// return str.c_str();
-// }
-//
-// RTCError error;
-// std::string str;
-// };
-// -- GODOT end --
+// -- GODOT start --
+#if 0
+ /*! used to throw embree API errors */
+ struct rtcore_error : public std::exception
+ {
+ __forceinline rtcore_error(RTCError error, const std::string& str)
+ : error(error), str(str) {}
+
+ ~rtcore_error() throw() {}
+
+ const char* what () const throw () {
+ return str.c_str();
+ }
+
+ RTCError error;
+ std::string str;
+ };
+#endif
#if defined(DEBUG) // only report file and line in debug mode
- // -- GODOT begin --
- // #define throw_RTCError(error,str) \
- // throw rtcore_error(error,std::string(__FILE__) + " (" + toString(__LINE__) + "): " + std::string(str));
#define throw_RTCError(error,str) \
printf("%s (%d): %s", __FILE__, __LINE__, std::string(str).c_str()), abort();
- // -- GODOT end --
+ // throw rtcore_error(error,std::string(__FILE__) + " (" + toString(__LINE__) + "): " + std::string(str));
#else
- // -- GODOT begin --
- // #define throw_RTCError(error,str) \
- // throw rtcore_error(error,str);
#define throw_RTCError(error,str) \
abort();
- // -- GODOT end --
+ // throw rtcore_error(error,str);
#endif
+// -- GODOT end --
#define RTC_BUILD_ARGUMENTS_HAS(settings,member) \
(settings.byteSize > (offsetof(RTCBuildArguments,member)+sizeof(settings.member)))
diff --git a/thirdparty/embree/kernels/common/rtcore_builder.cpp b/thirdparty/embree/kernels/common/rtcore_builder.cpp
index 1f1b6f6ddf..29e3bdca20 100644
--- a/thirdparty/embree/kernels/common/rtcore_builder.cpp
+++ b/thirdparty/embree/kernels/common/rtcore_builder.cpp
@@ -371,7 +371,7 @@ RTC_NAMESPACE_BEGIN
bvh->allocator.init_estimate(arguments->primitiveCount*sizeof(BBox3fa));
bvh->allocator.reset();
- /* switch between differnet builders based on quality level */
+ /* switch between different builders based on quality level */
if (arguments->buildQuality == RTC_BUILD_QUALITY_LOW)
return rtcBuildBVHMorton(arguments);
else if (arguments->buildQuality == RTC_BUILD_QUALITY_MEDIUM)
diff --git a/thirdparty/embree/kernels/common/scene.cpp b/thirdparty/embree/kernels/common/scene.cpp
index 408d7eae6f..65d31d0f81 100644
--- a/thirdparty/embree/kernels/common/scene.cpp
+++ b/thirdparty/embree/kernels/common/scene.cpp
@@ -629,9 +629,7 @@ namespace embree
if (geometry == null)
throw_RTCError(RTC_ERROR_INVALID_OPERATION,"invalid geometry");
- if (geometry->isEnabled()) {
- setModified ();
- }
+ setModified ();
accels_deleteGeometry(unsigned(geomID));
id_pool.deallocate((unsigned)geomID);
geometries[geomID] = null;
diff --git a/thirdparty/embree/kernels/common/scene_curves.h b/thirdparty/embree/kernels/common/scene_curves.h
index a5a39e42d4..a1ea45d3c7 100644
--- a/thirdparty/embree/kernels/common/scene_curves.h
+++ b/thirdparty/embree/kernels/common/scene_curves.h
@@ -452,6 +452,10 @@ namespace embree
const Vec3fa n1 = normal(index+1,itime);
if (!isvalid(n0) || !isvalid(n1))
return false;
+
+ const BBox3fa b = getOrientedCurveScaledRadius(i,itime).accurateBounds();
+ if (!isvalid(b))
+ return false;
}
}
@@ -612,6 +616,10 @@ namespace embree
const Vec3fa dn1 = dnormal(index+1,itime);
if (!isvalid(dn0) || !isvalid(dn1))
return false;
+
+ const BBox3fa b = getOrientedCurveScaledRadius(i,itime).accurateBounds();
+ if (!isvalid(b))
+ return false;
}
}
diff --git a/thirdparty/embree/kernels/common/state.cpp b/thirdparty/embree/kernels/common/state.cpp
index 01c862da0c..db6b803041 100644
--- a/thirdparty/embree/kernels/common/state.cpp
+++ b/thirdparty/embree/kernels/common/state.cpp
@@ -144,7 +144,20 @@ namespace embree
}
bool State::checkISASupport() {
+#if defined(__ARM_NEON)
+ /*
+ * NEON CPU type is a mixture of NEON and SSE2
+ */
+
+ bool hasSSE2 = (getCPUFeatures() & enabled_cpu_features) & CPU_FEATURE_SSE2;
+
+ /* this will be true when explicitly initialize Device with `isa=neon` config */
+ bool hasNEON = (getCPUFeatures() & enabled_cpu_features) & CPU_FEATURE_NEON;
+
+ return hasSSE2 || hasNEON;
+#else
return (getCPUFeatures() & enabled_cpu_features) == enabled_cpu_features;
+#endif
}
void State::verify()
@@ -157,8 +170,10 @@ namespace embree
* functions */
#if defined(DEBUG)
#if defined(EMBREE_TARGET_SSE2)
+#if !defined(__ARM_NEON)
assert(sse2::getISA() <= SSE2);
#endif
+#endif
#if defined(EMBREE_TARGET_SSE42)
assert(sse42::getISA() <= SSE42);
#endif
diff --git a/thirdparty/embree/kernels/config.h b/thirdparty/embree/kernels/config.h
index 2bf7e93587..84ac27d103 100644
--- a/thirdparty/embree/kernels/config.h
+++ b/thirdparty/embree/kernels/config.h
@@ -1,5 +1,4 @@
-
-// Copyright 2009-2020 Intel Corporation
+// Copyright 2009-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
/* #undef EMBREE_RAY_MASK */
@@ -20,6 +19,7 @@
/* #undef EMBREE_COMPACT_POLYS */
#define EMBREE_CURVE_SELF_INTERSECTION_AVOIDANCE_FACTOR 2.0
+#define EMBREE_DISC_POINT_SELF_INTERSECTION_AVOIDANCE
#if defined(EMBREE_GEOMETRY_TRIANGLE)
#define IF_ENABLED_TRIS(x) x
diff --git a/thirdparty/embree/kernels/geometry/curve_intersector_oriented.h b/thirdparty/embree/kernels/geometry/curve_intersector_oriented.h
index 3d8900c2aa..75532f5ae0 100644
--- a/thirdparty/embree/kernels/geometry/curve_intersector_oriented.h
+++ b/thirdparty/embree/kernels/geometry/curve_intersector_oriented.h
@@ -225,7 +225,7 @@ namespace embree
/* exit if convergence cannot get proven, but terminate if we are very small */
if (unlikely(!subset(K,x) && !very_small)) return false;
- /* solve using newton raphson iteration of convergence is guarenteed */
+ /* solve using newton raphson iteration of convergence is guaranteed */
solve_newton_raphson_loop(cu,cv,c1,dfdu,dfdv,rcp_J);
return true;
}
diff --git a/thirdparty/embree/kernels/geometry/curve_intersector_sweep.h b/thirdparty/embree/kernels/geometry/curve_intersector_sweep.h
index 2d4abd73ac..ed827d583f 100644
--- a/thirdparty/embree/kernels/geometry/curve_intersector_sweep.h
+++ b/thirdparty/embree/kernels/geometry/curve_intersector_sweep.h
@@ -60,7 +60,7 @@ namespace embree
const Vec3fa dir = ray.dir;
const float length_ray_dir = length(dir);
- /* error of curve evaluations is propertional to largest coordinate */
+ /* error of curve evaluations is proportional to largest coordinate */
const BBox3ff box = curve.bounds();
const float P_err = 16.0f*float(ulp)*reduce_max(max(abs(box.lower),abs(box.upper)));
diff --git a/thirdparty/embree/kernels/geometry/disc_intersector.h b/thirdparty/embree/kernels/geometry/disc_intersector.h
index 816c066899..ec6fa9c4f3 100644
--- a/thirdparty/embree/kernels/geometry/disc_intersector.h
+++ b/thirdparty/embree/kernels/geometry/disc_intersector.h
@@ -68,15 +68,15 @@ namespace embree
const Vec3vf<M> center = v0.xyz();
const vfloat<M> radius = v0.w;
+ /* compute ray distance projC0 to hit point with ray oriented plane */
const Vec3vf<M> c0 = center - ray_org;
const vfloat<M> projC0 = dot(c0, ray_dir) * rd2;
valid &= (vfloat<M>(ray.tnear()) <= projC0) & (projC0 <= vfloat<M>(ray.tfar));
- if (EMBREE_CURVE_SELF_INTERSECTION_AVOIDANCE_FACTOR != 0.0f)
- valid &= projC0 > float(EMBREE_CURVE_SELF_INTERSECTION_AVOIDANCE_FACTOR) * radius * pre.depth_scale; // ignore self intersections
if (unlikely(none(valid)))
return false;
-
+
+ /* check if hit point lies inside disc */
const Vec3vf<M> perp = c0 - projC0 * ray_dir;
const vfloat<M> l2 = dot(perp, perp);
const vfloat<M> r2 = radius * radius;
@@ -84,6 +84,15 @@ namespace embree
if (unlikely(none(valid)))
return false;
+ /* We reject hits where the ray origin lies inside the ray
+ * oriented disc to avoid self intersections. */
+#if defined(EMBREE_DISC_POINT_SELF_INTERSECTION_AVOIDANCE)
+ const vfloat<M> m2 = dot(c0, c0);
+ valid &= (m2 > r2);
+ if (unlikely(none(valid)))
+ return false;
+#endif
+
DiscIntersectorHitM<M> hit(zero, zero, projC0, -ray_dir);
return epilog(valid, hit);
}
@@ -152,15 +161,15 @@ namespace embree
const Vec3vf<M> center = v0.xyz();
const vfloat<M> radius = v0.w;
+ /* compute ray distance projC0 to hit point with ray oriented plane */
const Vec3vf<M> c0 = center - ray_org;
const vfloat<M> projC0 = dot(c0, ray_dir) * rd2;
valid &= (vfloat<M>(ray.tnear()[k]) <= projC0) & (projC0 <= vfloat<M>(ray.tfar[k]));
- if (EMBREE_CURVE_SELF_INTERSECTION_AVOIDANCE_FACTOR != 0.0f)
- valid &= projC0 > float(EMBREE_CURVE_SELF_INTERSECTION_AVOIDANCE_FACTOR) * radius * pre.depth_scale[k]; // ignore self intersections
if (unlikely(none(valid)))
return false;
+ /* check if hit point lies inside disc */
const Vec3vf<M> perp = c0 - projC0 * ray_dir;
const vfloat<M> l2 = dot(perp, perp);
const vfloat<M> r2 = radius * radius;
@@ -168,6 +177,15 @@ namespace embree
if (unlikely(none(valid)))
return false;
+ /* We reject hits where the ray origin lies inside the ray
+ * oriented disc to avoid self intersections. */
+#if defined(EMBREE_DISC_POINT_SELF_INTERSECTION_AVOIDANCE)
+ const vfloat<M> m2 = dot(c0, c0);
+ valid &= (m2 > r2);
+ if (unlikely(none(valid)))
+ return false;
+#endif
+
DiscIntersectorHitM<M> hit(zero, zero, projC0, -ray_dir);
return epilog(valid, hit);
}
diff --git a/thirdparty/embree/kernels/geometry/filter.h b/thirdparty/embree/kernels/geometry/filter.h
index 3b4d924ea7..d64320bf78 100644
--- a/thirdparty/embree/kernels/geometry/filter.h
+++ b/thirdparty/embree/kernels/geometry/filter.h
@@ -51,20 +51,11 @@ namespace embree
__forceinline void reportIntersection1(IntersectFunctionNArguments* args, const RTCFilterFunctionNArguments* filter_args)
{
#if defined(EMBREE_FILTER_FUNCTION)
- IntersectContext* MAYBE_UNUSED context = args->internal_context;
- const Geometry* const geometry = args->geometry;
- if (geometry->intersectionFilterN) {
- assert(context->scene->hasGeometryFilterFunction());
- geometry->intersectionFilterN(filter_args);
- }
+ if (args->geometry->intersectionFilterN)
+ args->geometry->intersectionFilterN(filter_args);
- //if (args->valid[0] == 0)
- // return;
-
- if (context->user->filter) {
- assert(context->scene->hasContextFilterFunction());
- context->user->filter(filter_args);
- }
+ if (args->context->filter)
+ args->context->filter(filter_args);
#endif
}
@@ -105,20 +96,11 @@ namespace embree
__forceinline void reportOcclusion1(OccludedFunctionNArguments* args, const RTCFilterFunctionNArguments* filter_args)
{
#if defined(EMBREE_FILTER_FUNCTION)
- IntersectContext* MAYBE_UNUSED context = args->internal_context;
- const Geometry* const geometry = args->geometry;
- if (geometry->occlusionFilterN) {
- assert(context->scene->hasGeometryFilterFunction());
- geometry->occlusionFilterN(filter_args);
- }
-
- //if (args->valid[0] == 0)
- // return false;
+ if (args->geometry->occlusionFilterN)
+ args->geometry->occlusionFilterN(filter_args);
- if (context->user->filter) {
- assert(context->scene->hasContextFilterFunction());
- context->user->filter(filter_args);
- }
+ if (args->context->filter)
+ args->context->filter(filter_args);
#endif
}
diff --git a/thirdparty/embree/kernels/geometry/object_intersector.h b/thirdparty/embree/kernels/geometry/object_intersector.h
index 11ceb2f7fe..e4ad01852f 100644
--- a/thirdparty/embree/kernels/geometry/object_intersector.h
+++ b/thirdparty/embree/kernels/geometry/object_intersector.h
@@ -32,7 +32,7 @@ namespace embree
return;
#endif
- accel->intersect(ray,prim.geomID(),prim.primID(),context,reportIntersection1);
+ accel->intersect(ray,prim.geomID(),prim.primID(),context);
}
static __forceinline bool occluded(const Precalculations& pre, Ray& ray, IntersectContext* context, const Primitive& prim)
@@ -44,7 +44,7 @@ namespace embree
return false;
#endif
- accel->occluded(ray,prim.geomID(),prim.primID(),context,&reportOcclusion1);
+ accel->occluded(ray,prim.geomID(),prim.primID(),context);
return ray.tfar < 0.0f;
}
@@ -89,7 +89,7 @@ namespace embree
valid &= (ray.mask & accel->mask) != 0;
if (none(valid)) return;
#endif
- accel->intersect(valid,ray,prim.geomID(),prim.primID(),context,&reportIntersection1);
+ accel->intersect(valid,ray,prim.geomID(),prim.primID(),context);
}
static __forceinline vbool<K> occluded(const vbool<K>& valid_i, const Precalculations& pre, RayK<K>& ray, IntersectContext* context, const Primitive& prim)
@@ -102,7 +102,7 @@ namespace embree
valid &= (ray.mask & accel->mask) != 0;
if (none(valid)) return false;
#endif
- accel->occluded(valid,ray,prim.geomID(),prim.primID(),context,&reportOcclusion1);
+ accel->occluded(valid,ray,prim.geomID(),prim.primID(),context);
return ray.tfar < 0.0f;
}
diff --git a/thirdparty/embree/kernels/geometry/quadv.h b/thirdparty/embree/kernels/geometry/quadv.h
index 2137356ff2..514e519b0c 100644
--- a/thirdparty/embree/kernels/geometry/quadv.h
+++ b/thirdparty/embree/kernels/geometry/quadv.h
@@ -152,7 +152,7 @@ namespace embree
Vec3vf<M> v0; // 1st vertex of the quads
Vec3vf<M> v1; // 2nd vertex of the quads
Vec3vf<M> v2; // 3rd vertex of the quads
- Vec3vf<M> v3; // 4rd vertex of the quads
+ Vec3vf<M> v3; // 4th vertex of the quads
private:
vuint<M> geomIDs; // geometry ID
vuint<M> primIDs; // primitive ID
diff --git a/thirdparty/embree/kernels/geometry/roundline_intersector.h b/thirdparty/embree/kernels/geometry/roundline_intersector.h
index 0e9393442b..764ff93fec 100644
--- a/thirdparty/embree/kernels/geometry/roundline_intersector.h
+++ b/thirdparty/embree/kernels/geometry/roundline_intersector.h
@@ -19,7 +19,7 @@
For multiple connected round linear curve segments this construction
yield a proper shape when viewed from the outside. Using the
- following CSG we can also handle the interiour in most common cases:
+ following CSG we can also handle the interior in most common cases:
round_linear_curve(pl,rl,p0,r0,p1,r1,pr,rr) =
cone_sphere(p0,r0,p1,r1) - cone(pl,rl,p0,r0) - cone(p1,r1,pr,rr)
@@ -431,7 +431,7 @@ namespace embree
Ng' = (h-u*dP) - (w0+u*dw)*dw/dP^2*dP
Inserting the definition of w0 and dw and refactoring
- yield a furhter scaled Ng'':
+ yield a further scaled Ng'':
Ng'' = (dP^2 - dr^2) (h-q) - (r0+u*dr)*dr*dP
diff --git a/thirdparty/embree/kernels/geometry/subgrid_intersector.h b/thirdparty/embree/kernels/geometry/subgrid_intersector.h
index ad5fee2e4e..e241073812 100644
--- a/thirdparty/embree/kernels/geometry/subgrid_intersector.h
+++ b/thirdparty/embree/kernels/geometry/subgrid_intersector.h
@@ -264,8 +264,8 @@ namespace embree
const Vec3vf<K> p2 = vtx[i*4+2];
const Vec3vf<K> p3 = vtx[i*4+3];
STAT3(shadow.trav_prims,1,popcnt(valid0),K);
- if (pre.intersectK(valid0,ray,p0,p1,p2,p3,g,subgrid,i,OccludedKEpilogM<4,K,filter>(valid0,ray,context,subgrid.geomID(),subgrid.primID(),i)))
- break;
+ pre.intersectK(valid0,ray,p0,p1,p2,p3,g,subgrid,i,OccludedKEpilogM<4,K,filter>(valid0,ray,context,subgrid.geomID(),subgrid.primID(),i));
+ if (none(valid0)) break;
}
return !valid0;
}
@@ -408,10 +408,8 @@ namespace embree
const Vec3vf<K> p2 = vtx[i*4+2];
const Vec3vf<K> p3 = vtx[i*4+3];
STAT3(shadow.trav_prims,1,popcnt(valid0),K);
- //if (pre.intersectK(valid0,ray,p0,p1,p2,p3,g,subgrid,i,OccludedKEpilogM<4,K,filter>(valid0,ray,context,subgrid.geomID(),subgrid.primID(),i)))
- if (pre.occludedK(valid0,ray,p0,p1,p2,p3,g,subgrid,i,OccludedKEpilogM<4,K,filter>(valid0,ray,context,subgrid.geomID(),subgrid.primID(),i)))
-
- break;
+ pre.occludedK(valid0,ray,p0,p1,p2,p3,g,subgrid,i,OccludedKEpilogM<4,K,filter>(valid0,ray,context,subgrid.geomID(),subgrid.primID(),i));
+ if (none(valid0)) break;
}
return !valid0;
}
diff --git a/thirdparty/embree/kernels/hash.h b/thirdparty/embree/kernels/hash.h
index 470e15f03e..39d50e2354 100644
--- a/thirdparty/embree/kernels/hash.h
+++ b/thirdparty/embree/kernels/hash.h
@@ -1,5 +1,4 @@
-
-// Copyright 2009-2020 Intel Corporation
+// Copyright 2009-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
-#define RTC_HASH "12b99393438a4cc9e478e33459eed78bec6233fd"
+#define RTC_HASH "698442324ccddd11725fb8875275dc1384f7fb40"
diff --git a/thirdparty/embree/kernels/subdiv/bezier_patch.h b/thirdparty/embree/kernels/subdiv/bezier_patch.h
index 2ff03902a7..0a2aef321f 100644
--- a/thirdparty/embree/kernels/subdiv/bezier_patch.h
+++ b/thirdparty/embree/kernels/subdiv/bezier_patch.h
@@ -94,7 +94,7 @@ namespace embree
matrix[0][1] = computeRightEdgeBezierControlPoint(source.v,1,1);
matrix[0][2] = computeLeftEdgeBezierControlPoint(source.v,1,2);
- /* compute buttom edge control points */
+ /* compute bottom edge control points */
matrix[3][1] = computeRightEdgeBezierControlPoint(source.v,2,1);
matrix[3][2] = computeLeftEdgeBezierControlPoint(source.v,2,2);
diff --git a/thirdparty/embree/kernels/subdiv/catmullclark_ring.h b/thirdparty/embree/kernels/subdiv/catmullclark_ring.h
index e5ad5dadfe..eab91d9ee6 100644
--- a/thirdparty/embree/kernels/subdiv/catmullclark_ring.h
+++ b/thirdparty/embree/kernels/subdiv/catmullclark_ring.h
@@ -388,7 +388,7 @@ namespace embree
return (Vertex_t)(n*n*vtx+4.0f*E+F) / ((n+5.0f)*n);
}
- /* gets limit tangent in the direction of egde vtx -> ring[0] */
+ /* gets limit tangent in the direction of edge vtx -> ring[0] */
__forceinline Vertex getLimitTangent() const
{
if (unlikely(std::isinf(vertex_crease_weight)))
@@ -429,7 +429,7 @@ namespace embree
return sigma * (alpha + beta);
}
- /* gets limit tangent in the direction of egde vtx -> ring[edge_valence-2] */
+ /* gets limit tangent in the direction of edge vtx -> ring[edge_valence-2] */
__forceinline Vertex getSecondLimitTangent() const
{
if (unlikely(std::isinf(vertex_crease_weight)))
@@ -763,7 +763,7 @@ namespace embree
}
- /* gets limit tangent in the direction of egde vtx -> ring[0] */
+ /* gets limit tangent in the direction of edge vtx -> ring[0] */
__forceinline Vertex getLimitTangent() const
{
CatmullClark1Ring cc_vtx;
@@ -779,7 +779,7 @@ namespace embree
return 2.0f * cc_vtx.getLimitTangent();
}
- /* gets limit tangent in the direction of egde vtx -> ring[edge_valence-2] */
+ /* gets limit tangent in the direction of edge vtx -> ring[edge_valence-2] */
__forceinline Vertex getSecondLimitTangent() const
{
CatmullClark1Ring cc_vtx;
diff --git a/thirdparty/embree/kernels/subdiv/catmullrom_curve.h b/thirdparty/embree/kernels/subdiv/catmullrom_curve.h
index 74fc4c1230..9532287d98 100644
--- a/thirdparty/embree/kernels/subdiv/catmullrom_curve.h
+++ b/thirdparty/embree/kernels/subdiv/catmullrom_curve.h
@@ -8,7 +8,7 @@
/*
- Implements Catmul Rom curves with control points p0, p1, p2, p3. At
+ Implements Catmull-Rom curves with control points p0, p1, p2, p3. At
t=0 the curve goes through p1, with tangent (p2-p0)/3, and for t=1
the curve goes through p2 with tangent (p3-p2)/2.
@@ -91,11 +91,11 @@ namespace embree
: v0(v0), v1(v1), v2(v2), v3(v3) {}
__forceinline Vertex begin() const {
- return madd(1.0f/6.0f,v0,madd(2.0f/3.0f,v1,1.0f/6.0f*v2));
+ return v1;
}
__forceinline Vertex end() const {
- return madd(1.0f/6.0f,v1,madd(2.0f/3.0f,v2,1.0f/6.0f*v3));
+ return v2;
}
__forceinline Vertex center() const {
diff --git a/thirdparty/embree/kernels/subdiv/linear_bezier_patch.h b/thirdparty/embree/kernels/subdiv/linear_bezier_patch.h
index f8e8a25f35..dcdb101d7c 100644
--- a/thirdparty/embree/kernels/subdiv/linear_bezier_patch.h
+++ b/thirdparty/embree/kernels/subdiv/linear_bezier_patch.h
@@ -81,29 +81,29 @@ namespace embree
{
SourceCurve<Vec3ff> vcurve = center;
SourceCurve<Vec3fa> ncurve = normal;
-
+
/* here we construct a patch which follows the curve l(t) =
* p(t) +/- r(t)*normalize(cross(n(t),dp(t))) */
const Vec3ff p0 = vcurve.eval(0.0f);
const Vec3ff dp0 = vcurve.eval_du(0.0f);
- const Vec3ff ddp0 = vcurve.eval_dudu(0.0f);
+ //const Vec3ff ddp0 = vcurve.eval_dudu(0.0f); // ddp0 is assumed to be 0
const Vec3fa n0 = ncurve.eval(0.0f);
const Vec3fa dn0 = ncurve.eval_du(0.0f);
const Vec3ff p1 = vcurve.eval(1.0f);
const Vec3ff dp1 = vcurve.eval_du(1.0f);
- const Vec3ff ddp1 = vcurve.eval_dudu(1.0f);
+ //const Vec3ff ddp1 = vcurve.eval_dudu(1.0f); // ddp1 is assumed to be 0
const Vec3fa n1 = ncurve.eval(1.0f);
const Vec3fa dn1 = ncurve.eval_du(1.0f);
const Vec3fa bt0 = cross(n0,dp0);
- const Vec3fa dbt0 = cross(dn0,dp0) + cross(n0,ddp0);
+ const Vec3fa dbt0 = cross(dn0,dp0);// + cross(n0,ddp0);
const Vec3fa bt1 = cross(n1,dp1);
- const Vec3fa dbt1 = cross(dn1,dp1) + cross(n1,ddp1);
+ const Vec3fa dbt1 = cross(dn1,dp1);// + cross(n1,ddp1);
const Vec3fa k0 = normalize(bt0);
const Vec3fa dk0 = dnormalize(bt0,dbt0);
diff --git a/thirdparty/embree/patches/emscripten-nthreads.patch b/thirdparty/embree/patches/emscripten-nthreads.patch
new file mode 100644
index 0000000000..e42f203475
--- /dev/null
+++ b/thirdparty/embree/patches/emscripten-nthreads.patch
@@ -0,0 +1,42 @@
+diff --git a/thirdparty/embree/common/sys/sysinfo.cpp b/thirdparty/embree/common/sys/sysinfo.cpp
+index c98f61fa53..7f7a009a1e 100644
+--- a/thirdparty/embree/common/sys/sysinfo.cpp
++++ b/thirdparty/embree/common/sys/sysinfo.cpp
+@@ -640,6 +640,12 @@ namespace embree
+
+ #if defined(__EMSCRIPTEN__)
+ #include <emscripten.h>
++
++// -- GODOT start --
++extern "C" {
++extern int godot_js_os_hw_concurrency_get();
++}
++// -- GODOT end --
+ #endif
+
+ namespace embree
+@@ -653,21 +659,9 @@ namespace embree
+ nThreads = sysconf(_SC_NPROCESSORS_ONLN); // does not work in Linux LXC container
+ assert(nThreads);
+ #elif defined(__EMSCRIPTEN__)
+- // WebAssembly supports pthreads, but not pthread_getaffinity_np. Get the number of logical
+- // threads from the browser or Node.js using JavaScript.
+- nThreads = MAIN_THREAD_EM_ASM_INT({
+- const isBrowser = typeof window !== 'undefined';
+- const isNode = typeof process !== 'undefined' && process.versions != null &&
+- process.versions.node != null;
+- if (isBrowser) {
+- // Return 1 if the browser does not expose hardwareConcurrency.
+- return window.navigator.hardwareConcurrency || 1;
+- } else if (isNode) {
+- return require('os').cpus().length;
+- } else {
+- return 1;
+- }
+- });
++ // -- GODOT start --
++ nThreads = godot_js_os_hw_concurrency_get();
++ // -- GODOT end --
+ #else
+ cpu_set_t set;
+ if (pthread_getaffinity_np(pthread_self(), sizeof(set), &set) == 0)
diff --git a/thirdparty/embree/patches/godot-changes-android.patch b/thirdparty/embree/patches/godot-changes-android.patch
deleted file mode 100644
index a27f924bde..0000000000
--- a/thirdparty/embree/patches/godot-changes-android.patch
+++ /dev/null
@@ -1,103 +0,0 @@
-diff --git a/thirdparty/embree/common/sys/sysinfo.cpp b/thirdparty/embree/common/sys/sysinfo.cpp
-index ba97dc227b..1679599608 100644
---- a/thirdparty/embree/common/sys/sysinfo.cpp
-+++ b/thirdparty/embree/common/sys/sysinfo.cpp
-@@ -618,7 +618,10 @@ namespace embree
- static int nThreads = -1;
- if (nThreads != -1) return nThreads;
-
--#if defined(__MACOSX__)
-+// -- GODOT start --
-+// #if defined(__MACOSX__)
-+#if defined(__MACOSX__) || defined(__ANDROID__)
-+// -- GODOT end --
- nThreads = sysconf(_SC_NPROCESSORS_ONLN); // does not work in Linux LXC container
- assert(nThreads);
- #else
-diff --git a/thirdparty/embree/common/sys/thread.cpp b/thirdparty/embree/common/sys/thread.cpp
-index a7827e18f7..f4014be89b 100644
---- a/thirdparty/embree/common/sys/thread.cpp
-+++ b/thirdparty/embree/common/sys/thread.cpp
-@@ -158,7 +158,9 @@ namespace embree
- /// Linux Platform
- ////////////////////////////////////////////////////////////////////////////////
-
--#if defined(__LINUX__)
-+// -- GODOT start --
-+#if defined(__LINUX__) && !defined(__ANDROID__)
-+// -- GODOT end --
-
- #include <fstream>
- #include <sstream>
-@@ -247,6 +249,28 @@ namespace embree
- }
- #endif
-
-+// -- GODOT start --
-+////////////////////////////////////////////////////////////////////////////////
-+/// Android Platform
-+////////////////////////////////////////////////////////////////////////////////
-+
-+#if defined(__ANDROID__)
-+
-+namespace embree
-+{
-+ /*! set affinity of the calling thread */
-+ void setAffinity(ssize_t affinity)
-+ {
-+ cpu_set_t cset;
-+ CPU_ZERO(&cset);
-+ CPU_SET(affinity, &cset);
-+
-+ sched_setaffinity(0, sizeof(cset), &cset);
-+ }
-+}
-+#endif
-+// -- GODOT end --
-+
- ////////////////////////////////////////////////////////////////////////////////
- /// FreeBSD Platform
- ////////////////////////////////////////////////////////////////////////////////
-@@ -355,7 +379,9 @@ namespace embree
- pthread_attr_destroy(&attr);
-
- /* set affinity */
--#if defined(__LINUX__)
-+// -- GODOT start --
-+#if defined(__LINUX__) && !defined(__ANDROID__)
-+// -- GODOT end --
- if (threadID >= 0) {
- cpu_set_t cset;
- CPU_ZERO(&cset);
-@@ -370,7 +396,16 @@ namespace embree
- CPU_SET(threadID, &cset);
- pthread_setaffinity_np(*tid, sizeof(cset), &cset);
- }
-+// -- GODOT start --
-+#elif defined(__ANDROID__)
-+ if (threadID >= 0) {
-+ cpu_set_t cset;
-+ CPU_ZERO(&cset);
-+ CPU_SET(threadID, &cset);
-+ sched_setaffinity(pthread_gettid_np(*tid), sizeof(cset), &cset);
-+ }
- #endif
-+// -- GODOT end --
-
- return thread_t(tid);
- }
-@@ -389,8 +424,14 @@ namespace embree
-
- /*! destroy a hardware thread by its handle */
- void destroyThread(thread_t tid) {
-+// -- GODOT start --
-+#if defined(__ANDROID__)
-+ FATAL("Can't destroy threads on Android.");
-+#else
- pthread_cancel(*(pthread_t*)tid);
- delete (pthread_t*)tid;
-+#endif
-+// -- GODOT end --
- }
-
- /*! creates thread local storage */
diff --git a/thirdparty/embree/patches/godot-changes-misc.patch b/thirdparty/embree/patches/godot-changes-misc.patch
deleted file mode 100644
index 8bf0d9fa97..0000000000
--- a/thirdparty/embree/patches/godot-changes-misc.patch
+++ /dev/null
@@ -1,105 +0,0 @@
-diff --git a/thirdparty/embree/common/sys/intrinsics.h b/thirdparty/embree/common/sys/intrinsics.h
-index 79729c87ab..ed8dd7d40a 100644
---- a/thirdparty/embree/common/sys/intrinsics.h
-+++ b/thirdparty/embree/common/sys/intrinsics.h
-@@ -34,8 +34,14 @@
- #endif
-
- #if defined(__WIN32__)
--# define NOMINMAX
--# include <windows.h>
-+// -- GODOT start --
-+#if !defined(NOMINMAX)
-+// -- GODOT end --
-+#define NOMINMAX
-+// -- GODOT start --
-+#endif
-+#include "windows.h"
-+// -- GODOT end --
- #endif
-
- /* normally defined in pmmintrin.h, but we always need this */
-diff --git a/thirdparty/embree/common/sys/platform.h b/thirdparty/embree/common/sys/platform.h
-index 3fc5e99b8d..697e07bb86 100644
---- a/thirdparty/embree/common/sys/platform.h
-+++ b/thirdparty/embree/common/sys/platform.h
-@@ -99,7 +99,9 @@
- #define dll_import
- #endif
-
--#ifdef __WIN32__
-+// -- GODOT start --
-+#if defined(__WIN32__) && !defined(__MINGW32__)
-+// -- GODOT end --
- #if !defined(__noinline)
- #define __noinline __declspec(noinline)
- #endif
-@@ -149,6 +151,9 @@
- #define DELETED = delete
- #endif
-
-+// -- GODOT start --
-+#if !defined(likely)
-+// -- GODOT end --
- #if defined(_MSC_VER) && !defined(__INTEL_COMPILER)
- #define likely(expr) (expr)
- #define unlikely(expr) (expr)
-@@ -156,6 +161,9 @@
- #define likely(expr) __builtin_expect((bool)(expr),true )
- #define unlikely(expr) __builtin_expect((bool)(expr),false)
- #endif
-+// -- GODOT start --
-+#endif
-+// -- GODOT end --
-
- ////////////////////////////////////////////////////////////////////////////////
- /// Error handling and debugging
-diff --git a/thirdparty/embree/common/sys/sysinfo.cpp b/thirdparty/embree/common/sys/sysinfo.cpp
-index ba97dc227b..f1a59e511e 100644
---- a/thirdparty/embree/common/sys/sysinfo.cpp
-+++ b/thirdparty/embree/common/sys/sysinfo.cpp
-@@ -248,7 +248,9 @@ namespace embree
- #if defined(__X86_ASM__)
- __noinline int64_t get_xcr0()
- {
--#if defined (__WIN32__)
-+// -- GODOT start --
-+#if defined (__WIN32__) && !defined (__MINGW32__)
-+// -- GODOT end --
- int64_t xcr0 = 0; // int64_t is workaround for compiler bug under VS2013, Win32
- xcr0 = _xgetbv(0);
- return xcr0;
-diff --git a/thirdparty/embree/include/embree3/rtcore_common.h b/thirdparty/embree/include/embree3/rtcore_common.h
-index 9c14b28745..4857e1e05e 100644
---- a/thirdparty/embree/include/embree3/rtcore_common.h
-+++ b/thirdparty/embree/include/embree3/rtcore_common.h
-@@ -19,7 +19,9 @@ typedef int ssize_t;
- #endif
- #endif
-
--#ifdef _WIN32
-+// -- GODOT start --
-+#if defined(_WIN32) && defined(_MSC_VER)
-+// -- GODOT end --
- # define RTC_ALIGN(...) __declspec(align(__VA_ARGS__))
- #else
- # define RTC_ALIGN(...) __attribute__((aligned(__VA_ARGS__)))
-diff --git a/thirdparty/embree/common/tasking/taskschedulertbb.h b/thirdparty/embree/common/tasking/taskschedulertbb.h
-index 3fd15816e9..35bd49849f 100644
---- a/thirdparty/embree/common/tasking/taskschedulertbb.h
-+++ b/thirdparty/embree/common/tasking/taskschedulertbb.h
-@@ -12,7 +12,13 @@
- #include "../sys/ref.h"
-
- #if defined(__WIN32__)
-+// -- GODOT start --
-+#if !defined(NOMINMAX)
-+// -- GODOT end --
- # define NOMINMAX
-+// -- GODOT start --
-+#endif
-+// -- GODOT end --
- #endif
-
- // We need to define these to avoid implicit linkage against
- \ No newline at end of file
diff --git a/thirdparty/embree/patches/godot-changes-noexcept.patch b/thirdparty/embree/patches/godot-changes-noexcept.patch
index 598a7f2ddc..84169c36e4 100644
--- a/thirdparty/embree/patches/godot-changes-noexcept.patch
+++ b/thirdparty/embree/patches/godot-changes-noexcept.patch
@@ -1,5 +1,5 @@
diff --git a/thirdparty/embree/common/algorithms/parallel_for.h b/thirdparty/embree/common/algorithms/parallel_for.h
-index f052d8b468..645681ac63 100644
+index f2969a88f1..6d411e4852 100644
--- a/thirdparty/embree/common/algorithms/parallel_for.h
+++ b/thirdparty/embree/common/algorithms/parallel_for.h
@@ -21,7 +21,10 @@ namespace embree
@@ -12,9 +12,9 @@ index f052d8b468..645681ac63 100644
+ abort();
+ // -- GODOT end --
}
-
#elif defined(TASKING_TBB)
-@@ -31,13 +34,19 @@ namespace embree
+ #if TBB_INTERFACE_VERSION >= 12002
+@@ -30,13 +33,19 @@ namespace embree
func(i);
},context);
if (context.is_group_execution_cancelled())
@@ -36,7 +36,7 @@ index f052d8b468..645681ac63 100644
#endif
#elif defined(TASKING_PPL)
-@@ -57,7 +66,10 @@ namespace embree
+@@ -56,7 +65,10 @@ namespace embree
#if defined(TASKING_INTERNAL)
TaskScheduler::spawn(first,last,minStepSize,func);
if (!TaskScheduler::wait())
@@ -48,7 +48,7 @@ index f052d8b468..645681ac63 100644
#elif defined(TASKING_TBB)
#if TBB_INTERFACE_VERSION >= 12002
-@@ -66,13 +78,19 @@ namespace embree
+@@ -65,13 +77,19 @@ namespace embree
func(range<Index>(r.begin(),r.end()));
},context);
if (context.is_group_execution_cancelled())
@@ -70,7 +70,7 @@ index f052d8b468..645681ac63 100644
#endif
#elif defined(TASKING_PPL)
-@@ -104,13 +122,19 @@ namespace embree
+@@ -103,13 +121,19 @@ namespace embree
func(i);
},tbb::simple_partitioner(),context);
if (context.is_group_execution_cancelled())
@@ -92,7 +92,7 @@ index f052d8b468..645681ac63 100644
#endif
}
-@@ -125,13 +149,19 @@ namespace embree
+@@ -124,13 +148,19 @@ namespace embree
func(i);
},ap,context);
if (context.is_group_execution_cancelled())
@@ -115,7 +115,7 @@ index f052d8b468..645681ac63 100644
}
diff --git a/thirdparty/embree/common/algorithms/parallel_reduce.h b/thirdparty/embree/common/algorithms/parallel_reduce.h
-index f42ae2ec50..8271372ea4 100644
+index 1a94aad8c4..cd0078f2e6 100644
--- a/thirdparty/embree/common/algorithms/parallel_reduce.h
+++ b/thirdparty/embree/common/algorithms/parallel_reduce.h
@@ -58,15 +58,19 @@ namespace embree
@@ -247,10 +247,10 @@ index 1bc30fe9a5..abdd269069 100644
/* hint for transparent huge pages (THP) */
diff --git a/thirdparty/embree/common/sys/platform.h b/thirdparty/embree/common/sys/platform.h
-index 8a6d9fa0a9..697e07bb86 100644
+index be3ec36436..728bf6ed7d 100644
--- a/thirdparty/embree/common/sys/platform.h
+++ b/thirdparty/embree/common/sys/platform.h
-@@ -179,11 +179,19 @@
+@@ -178,11 +178,19 @@
#define PRINT4(x,y,z,w) embree_cout << STRING(x) << " = " << (x) << ", " << STRING(y) << " = " << (y) << ", " << STRING(z) << " = " << (z) << ", " << STRING(w) << " = " << (w) << embree_endl
#if defined(DEBUG) // only report file and line in debug mode
@@ -351,7 +351,7 @@ index dca835a716..ad438588a3 100644
bool TaskScheduler::steal_from_other_threads(Thread& thread)
diff --git a/thirdparty/embree/common/tasking/taskschedulerinternal.h b/thirdparty/embree/common/tasking/taskschedulerinternal.h
-index c766a0bb6a..8fa6bb12fa 100644
+index 61a0e57c5b..6cc2495195 100644
--- a/thirdparty/embree/common/tasking/taskschedulerinternal.h
+++ b/thirdparty/embree/common/tasking/taskschedulerinternal.h
@@ -123,7 +123,10 @@ namespace embree
@@ -391,7 +391,7 @@ index c766a0bb6a..8fa6bb12fa 100644
/*! steals a task from a different thread */
bool steal_from_other_threads(Thread& thread);
diff --git a/thirdparty/embree/kernels/bvh/bvh_statistics.cpp b/thirdparty/embree/kernels/bvh/bvh_statistics.cpp
-index d8da78eed7..d857ff7d95 100644
+index 40f9043736..57f75bfd7e 100644
--- a/thirdparty/embree/kernels/bvh/bvh_statistics.cpp
+++ b/thirdparty/embree/kernels/bvh/bvh_statistics.cpp
@@ -150,7 +150,10 @@ namespace embree
@@ -407,10 +407,10 @@ index d8da78eed7..d857ff7d95 100644
return s;
}
diff --git a/thirdparty/embree/kernels/common/rtcore.cpp b/thirdparty/embree/kernels/common/rtcore.cpp
-index 74e9fb335c..94b3819e42 100644
+index 95a94319ec..a6ea55bfc4 100644
--- a/thirdparty/embree/kernels/common/rtcore.cpp
+++ b/thirdparty/embree/kernels/common/rtcore.cpp
-@@ -197,7 +197,10 @@ RTC_NAMESPACE_BEGIN;
+@@ -198,7 +198,10 @@ RTC_NAMESPACE_BEGIN;
if (quality != RTC_BUILD_QUALITY_LOW &&
quality != RTC_BUILD_QUALITY_MEDIUM &&
quality != RTC_BUILD_QUALITY_HIGH)
@@ -422,7 +422,7 @@ index 74e9fb335c..94b3819e42 100644
scene->setBuildQuality(quality);
RTC_CATCH_END2(scene);
}
-@@ -1350,7 +1353,10 @@ RTC_NAMESPACE_BEGIN;
+@@ -1351,7 +1354,10 @@ RTC_NAMESPACE_BEGIN;
quality != RTC_BUILD_QUALITY_MEDIUM &&
quality != RTC_BUILD_QUALITY_HIGH &&
quality != RTC_BUILD_QUALITY_REFIT)
@@ -435,172 +435,67 @@ index 74e9fb335c..94b3819e42 100644
RTC_CATCH_END2(geometry);
}
diff --git a/thirdparty/embree/kernels/common/rtcore.h b/thirdparty/embree/kernels/common/rtcore.h
-index 4e4b24e9c2..373e49a689 100644
+index 4e4b24e9c2..ac58a84d6f 100644
--- a/thirdparty/embree/kernels/common/rtcore.h
+++ b/thirdparty/embree/kernels/common/rtcore.h
-@@ -25,52 +25,58 @@ namespace embree
+@@ -25,6 +25,13 @@ namespace embree
#endif
/*! Macros used in the rtcore API implementation */
--#define RTC_CATCH_BEGIN try {
+// -- GODOT start --
-+// #define RTC_CATCH_BEGIN try {
+#define RTC_CATCH_BEGIN
-
--#define RTC_CATCH_END(device) \
-- } catch (std::bad_alloc&) { \
-- Device::process_error(device,RTC_ERROR_OUT_OF_MEMORY,"out of memory"); \
-- } catch (rtcore_error& e) { \
-- Device::process_error(device,e.error,e.what()); \
-- } catch (std::exception& e) { \
-- Device::process_error(device,RTC_ERROR_UNKNOWN,e.what()); \
-- } catch (...) { \
-- Device::process_error(device,RTC_ERROR_UNKNOWN,"unknown exception caught"); \
-- }
-+// #define RTC_CATCH_END(device) \
-+// } catch (std::bad_alloc&) { \
-+// Device::process_error(device,RTC_ERROR_OUT_OF_MEMORY,"out of memory"); \
-+// } catch (rtcore_error& e) { \
-+// Device::process_error(device,e.error,e.what()); \
-+// } catch (std::exception& e) { \
-+// Device::process_error(device,RTC_ERROR_UNKNOWN,e.what()); \
-+// } catch (...) { \
-+// Device::process_error(device,RTC_ERROR_UNKNOWN,"unknown exception caught"); \
-+// }
+#define RTC_CATCH_END(device)
-
--#define RTC_CATCH_END2(scene) \
-- } catch (std::bad_alloc&) { \
-- Device* device = scene ? scene->device : nullptr; \
-- Device::process_error(device,RTC_ERROR_OUT_OF_MEMORY,"out of memory"); \
-- } catch (rtcore_error& e) { \
-- Device* device = scene ? scene->device : nullptr; \
-- Device::process_error(device,e.error,e.what()); \
-- } catch (std::exception& e) { \
-- Device* device = scene ? scene->device : nullptr; \
-- Device::process_error(device,RTC_ERROR_UNKNOWN,e.what()); \
-- } catch (...) { \
-- Device* device = scene ? scene->device : nullptr; \
-- Device::process_error(device,RTC_ERROR_UNKNOWN,"unknown exception caught"); \
-- }
-+// #define RTC_CATCH_END2(scene) \
-+// } catch (std::bad_alloc&) { \
-+// Device* device = scene ? scene->device : nullptr; \
-+// Device::process_error(device,RTC_ERROR_OUT_OF_MEMORY,"out of memory"); \
-+// } catch (rtcore_error& e) { \
-+// Device* device = scene ? scene->device : nullptr; \
-+// Device::process_error(device,e.error,e.what()); \
-+// } catch (std::exception& e) { \
-+// Device* device = scene ? scene->device : nullptr; \
-+// Device::process_error(device,RTC_ERROR_UNKNOWN,e.what()); \
-+// } catch (...) { \
-+// Device* device = scene ? scene->device : nullptr; \
-+// Device::process_error(device,RTC_ERROR_UNKNOWN,"unknown exception caught"); \
-+// }
+#define RTC_CATCH_END2(scene)
-
--#define RTC_CATCH_END2_FALSE(scene) \
-- } catch (std::bad_alloc&) { \
-- Device* device = scene ? scene->device : nullptr; \
-- Device::process_error(device,RTC_ERROR_OUT_OF_MEMORY,"out of memory"); \
-- return false; \
-- } catch (rtcore_error& e) { \
-- Device* device = scene ? scene->device : nullptr; \
-- Device::process_error(device,e.error,e.what()); \
-- return false; \
-- } catch (std::exception& e) { \
-- Device* device = scene ? scene->device : nullptr; \
-- Device::process_error(device,RTC_ERROR_UNKNOWN,e.what()); \
-- return false; \
-- } catch (...) { \
-- Device* device = scene ? scene->device : nullptr; \
-- Device::process_error(device,RTC_ERROR_UNKNOWN,"unknown exception caught"); \
-- return false; \
-- }
-+// #define RTC_CATCH_END2_FALSE(scene) \
-+// } catch (std::bad_alloc&) { \
-+// Device* device = scene ? scene->device : nullptr; \
-+// Device::process_error(device,RTC_ERROR_OUT_OF_MEMORY,"out of memory"); \
-+// return false; \
-+// } catch (rtcore_error& e) { \
-+// Device* device = scene ? scene->device : nullptr; \
-+// Device::process_error(device,e.error,e.what()); \
-+// return false; \
-+// } catch (std::exception& e) { \
-+// Device* device = scene ? scene->device : nullptr; \
-+// Device::process_error(device,RTC_ERROR_UNKNOWN,e.what()); \
-+// return false; \
-+// } catch (...) { \
-+// Device* device = scene ? scene->device : nullptr; \
-+// Device::process_error(device,RTC_ERROR_UNKNOWN,"unknown exception caught"); \
-+// return false; \
-+// }
+#define RTC_CATCH_END2_FALSE(scene) return false;
++
++#if 0
+ #define RTC_CATCH_BEGIN try {
+
+ #define RTC_CATCH_END(device) \
+@@ -71,6 +78,8 @@ namespace embree
+ Device::process_error(device,RTC_ERROR_UNKNOWN,"unknown exception caught"); \
+ return false; \
+ }
++#endif
+// -- GODOT end --
#define RTC_VERIFY_HANDLE(handle) \
if (handle == nullptr) { \
-@@ -97,28 +103,38 @@ namespace embree
+@@ -97,6 +106,8 @@ namespace embree
#define RTC_TRACE(x)
#endif
-- /*! used to throw embree API errors */
-- struct rtcore_error : public std::exception
-- {
-- __forceinline rtcore_error(RTCError error, const std::string& str)
-- : error(error), str(str) {}
--
-- ~rtcore_error() throw() {}
--
-- const char* what () const throw () {
-- return str.c_str();
-- }
--
-- RTCError error;
-- std::string str;
-- };
-+// -- GODOT begin --
-+// /*! used to throw embree API errors */
-+// struct rtcore_error : public std::exception
-+// {
-+// __forceinline rtcore_error(RTCError error, const std::string& str)
-+// : error(error), str(str) {}
-+//
-+// ~rtcore_error() throw() {}
-+//
-+// const char* what () const throw () {
-+// return str.c_str();
-+// }
-+//
-+// RTCError error;
-+// std::string str;
-+// };
-+// -- GODOT end --
++// -- GODOT start --
++#if 0
+ /*! used to throw embree API errors */
+ struct rtcore_error : public std::exception
+ {
+@@ -112,14 +123,18 @@ namespace embree
+ RTCError error;
+ std::string str;
+ };
++#endif
#if defined(DEBUG) // only report file and line in debug mode
-+ // -- GODOT begin --
-+ // #define throw_RTCError(error,str) \
-+ // throw rtcore_error(error,std::string(__FILE__) + " (" + toString(__LINE__) + "): " + std::string(str));
#define throw_RTCError(error,str) \
- throw rtcore_error(error,std::string(__FILE__) + " (" + toString(__LINE__) + "): " + std::string(str));
+ printf("%s (%d): %s", __FILE__, __LINE__, std::string(str).c_str()), abort();
-+ // -- GODOT end --
++ // throw rtcore_error(error,std::string(__FILE__) + " (" + toString(__LINE__) + "): " + std::string(str));
#else
-+ // -- GODOT begin --
-+ // #define throw_RTCError(error,str) \
-+ // throw rtcore_error(error,str);
#define throw_RTCError(error,str) \
- throw rtcore_error(error,str);
+ abort();
-+ // -- GODOT end --
++ // throw rtcore_error(error,str);
#endif
++// -- GODOT end --
#define RTC_BUILD_ARGUMENTS_HAS(settings,member) \
+ (settings.byteSize > (offsetof(RTCBuildArguments,member)+sizeof(settings.member)))
diff --git a/thirdparty/embree/kernels/common/scene.cpp b/thirdparty/embree/kernels/common/scene.cpp
-index 0149055f2c..408d7eae6f 100644
+index ad1916c54e..65d31d0f81 100644
--- a/thirdparty/embree/kernels/common/scene.cpp
+++ b/thirdparty/embree/kernels/common/scene.cpp
-@@ -792,16 +792,18 @@ namespace embree
+@@ -790,16 +790,18 @@ namespace embree
}
/* initiate build */
diff --git a/thirdparty/embree/patches/godot-changes-ubsan.patch b/thirdparty/embree/patches/godot-changes-ubsan.patch
deleted file mode 100644
index 1336246f0d..0000000000
--- a/thirdparty/embree/patches/godot-changes-ubsan.patch
+++ /dev/null
@@ -1,24 +0,0 @@
-diff --git a/thirdparty/embree/kernels/builders/primrefgen.cpp b/thirdparty/embree/kernels/builders/primrefgen.cpp
-index bb4fc81dfe..d279dc4993 100644
---- a/thirdparty/embree/kernels/builders/primrefgen.cpp
-+++ b/thirdparty/embree/kernels/builders/primrefgen.cpp
-@@ -184,6 +184,9 @@ namespace embree
-
- // special variants for grid meshes
-
-+// -- GODOT start --
-+#if defined(EMBREE_GEOMETRY_GRID)
-+// -- GODOT end --
- PrimInfo createPrimRefArrayGrids(Scene* scene, mvector<PrimRef>& prims, mvector<SubGridBuildData>& sgrids)
- {
- PrimInfo pinfo(empty);
-@@ -293,6 +296,9 @@ namespace embree
-
- return pinfo;
- }
-+// -- GODOT start --
-+#endif
-+// -- GODOT end --
-
- // ====================================================================================================
- // ====================================================================================================
diff --git a/thirdparty/enet/enet/godot.h b/thirdparty/enet/enet/godot.h
index faca1f21a0..1c60fdbb1f 100644
--- a/thirdparty/enet/enet/godot.h
+++ b/thirdparty/enet/enet/godot.h
@@ -1,32 +1,33 @@
-/*************************************************************************/
-/* godot.h */
-/*************************************************************************/
-/* This file is part of: */
-/* GODOT ENGINE */
-/* https://godotengine.org */
-/*************************************************************************/
-/* Copyright (c) 2007-2022 Juan Linietsky, Ariel Manzur. */
-/* Copyright (c) 2014-2022 Godot Engine contributors (cf. AUTHORS.md). */
-/* */
-/* Permission is hereby granted, free of charge, to any person obtaining */
-/* a copy of this software and associated documentation files (the */
-/* "Software"), to deal in the Software without restriction, including */
-/* without limitation the rights to use, copy, modify, merge, publish, */
-/* distribute, sublicense, and/or sell copies of the Software, and to */
-/* permit persons to whom the Software is furnished to do so, subject to */
-/* the following conditions: */
-/* */
-/* The above copyright notice and this permission notice shall be */
-/* included in all copies or substantial portions of the Software. */
-/* */
-/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
-/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
-/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
-/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
-/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
-/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
-/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
-/*************************************************************************/
+/**************************************************************************/
+/* godot.h */
+/**************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* https://godotengine.org */
+/**************************************************************************/
+/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
+/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/**************************************************************************/
+
/**
@file godot.h
@brief ENet Godot header
diff --git a/thirdparty/enet/godot.cpp b/thirdparty/enet/godot.cpp
index d2fcc4642e..47298dcf6a 100644
--- a/thirdparty/enet/godot.cpp
+++ b/thirdparty/enet/godot.cpp
@@ -1,32 +1,33 @@
-/*************************************************************************/
-/* godot.cpp */
-/*************************************************************************/
-/* This file is part of: */
-/* GODOT ENGINE */
-/* https://godotengine.org */
-/*************************************************************************/
-/* Copyright (c) 2007-2022 Juan Linietsky, Ariel Manzur. */
-/* Copyright (c) 2014-2022 Godot Engine contributors (cf. AUTHORS.md). */
-/* */
-/* Permission is hereby granted, free of charge, to any person obtaining */
-/* a copy of this software and associated documentation files (the */
-/* "Software"), to deal in the Software without restriction, including */
-/* without limitation the rights to use, copy, modify, merge, publish, */
-/* distribute, sublicense, and/or sell copies of the Software, and to */
-/* permit persons to whom the Software is furnished to do so, subject to */
-/* the following conditions: */
-/* */
-/* The above copyright notice and this permission notice shall be */
-/* included in all copies or substantial portions of the Software. */
-/* */
-/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
-/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
-/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
-/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
-/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
-/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
-/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
-/*************************************************************************/
+/**************************************************************************/
+/* godot.cpp */
+/**************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* https://godotengine.org */
+/**************************************************************************/
+/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
+/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/**************************************************************************/
+
/**
@file godot.cpp
@brief ENet Godot specific functions
diff --git a/thirdparty/freetype/src/gzip/adler32.c b/thirdparty/freetype/src/gzip/adler32.c
deleted file mode 100644
index aa032e1ddf..0000000000
--- a/thirdparty/freetype/src/gzip/adler32.c
+++ /dev/null
@@ -1,192 +0,0 @@
-/* adler32.c -- compute the Adler-32 checksum of a data stream
- * Copyright (C) 1995-2011, 2016 Mark Adler
- * For conditions of distribution and use, see copyright notice in zlib.h
- */
-
-/* @(#) $Id$ */
-
-#include "zutil.h"
-
-#ifndef Z_FREETYPE
-local uLong adler32_combine_ OF((uLong adler1, uLong adler2, z_off64_t len2));
-#endif
-
-#define BASE 65521U /* largest prime smaller than 65536 */
-#define NMAX 5552
-/* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */
-
-#define DO1(buf,i) {adler += (buf)[i]; sum2 += adler;}
-#define DO2(buf,i) DO1(buf,i); DO1(buf,i+1);
-#define DO4(buf,i) DO2(buf,i); DO2(buf,i+2);
-#define DO8(buf,i) DO4(buf,i); DO4(buf,i+4);
-#define DO16(buf) DO8(buf,0); DO8(buf,8);
-
-/* use NO_DIVIDE if your processor does not do division in hardware --
- try it both ways to see which is faster */
-#ifdef NO_DIVIDE
-/* note that this assumes BASE is 65521, where 65536 % 65521 == 15
- (thank you to John Reiser for pointing this out) */
-# define CHOP(a) \
- do { \
- unsigned long tmp = a >> 16; \
- a &= 0xffffUL; \
- a += (tmp << 4) - tmp; \
- } while (0)
-# define MOD28(a) \
- do { \
- CHOP(a); \
- if (a >= BASE) a -= BASE; \
- } while (0)
-# define MOD(a) \
- do { \
- CHOP(a); \
- MOD28(a); \
- } while (0)
-# define MOD63(a) \
- do { /* this assumes a is not negative */ \
- z_off64_t tmp = a >> 32; \
- a &= 0xffffffffL; \
- a += (tmp << 8) - (tmp << 5) + tmp; \
- tmp = a >> 16; \
- a &= 0xffffL; \
- a += (tmp << 4) - tmp; \
- tmp = a >> 16; \
- a &= 0xffffL; \
- a += (tmp << 4) - tmp; \
- if (a >= BASE) a -= BASE; \
- } while (0)
-#else
-# define MOD(a) a %= BASE
-# define MOD28(a) a %= BASE
-# define MOD63(a) a %= BASE
-#endif
-
-/* ========================================================================= */
-uLong ZEXPORT adler32_z(
- uLong adler,
- const Bytef *buf,
- z_size_t len)
-{
- unsigned long sum2;
- unsigned n;
-
- /* split Adler-32 into component sums */
- sum2 = (adler >> 16) & 0xffff;
- adler &= 0xffff;
-
- /* in case user likes doing a byte at a time, keep it fast */
- if (len == 1) {
- adler += buf[0];
- if (adler >= BASE)
- adler -= BASE;
- sum2 += adler;
- if (sum2 >= BASE)
- sum2 -= BASE;
- return adler | (sum2 << 16);
- }
-
- /* initial Adler-32 value (deferred check for len == 1 speed) */
- if (buf == Z_NULL)
- return 1L;
-
- /* in case short lengths are provided, keep it somewhat fast */
- if (len < 16) {
- while (len--) {
- adler += *buf++;
- sum2 += adler;
- }
- if (adler >= BASE)
- adler -= BASE;
- MOD28(sum2); /* only added so many BASE's */
- return adler | (sum2 << 16);
- }
-
- /* do length NMAX blocks -- requires just one modulo operation */
- while (len >= NMAX) {
- len -= NMAX;
- n = NMAX / 16; /* NMAX is divisible by 16 */
- do {
- DO16(buf); /* 16 sums unrolled */
- buf += 16;
- } while (--n);
- MOD(adler);
- MOD(sum2);
- }
-
- /* do remaining bytes (less than NMAX, still just one modulo) */
- if (len) { /* avoid modulos if none remaining */
- while (len >= 16) {
- len -= 16;
- DO16(buf);
- buf += 16;
- }
- while (len--) {
- adler += *buf++;
- sum2 += adler;
- }
- MOD(adler);
- MOD(sum2);
- }
-
- /* return recombined sums */
- return adler | (sum2 << 16);
-}
-
-/* ========================================================================= */
-uLong ZEXPORT adler32(
- uLong adler,
- const Bytef *buf,
- uInt len)
-{
- return adler32_z(adler, buf, len);
-}
-
-#ifndef Z_FREETYPE
-
-/* ========================================================================= */
-local uLong adler32_combine_(
- uLong adler1,
- uLong adler2,
- z_off64_t len2)
-{
- unsigned long sum1;
- unsigned long sum2;
- unsigned rem;
-
- /* for negative len, return invalid adler32 as a clue for debugging */
- if (len2 < 0)
- return 0xffffffffUL;
-
- /* the derivation of this formula is left as an exercise for the reader */
- MOD63(len2); /* assumes len2 >= 0 */
- rem = (unsigned)len2;
- sum1 = adler1 & 0xffff;
- sum2 = rem * sum1;
- MOD(sum2);
- sum1 += (adler2 & 0xffff) + BASE - 1;
- sum2 += ((adler1 >> 16) & 0xffff) + ((adler2 >> 16) & 0xffff) + BASE - rem;
- if (sum1 >= BASE) sum1 -= BASE;
- if (sum1 >= BASE) sum1 -= BASE;
- if (sum2 >= ((unsigned long)BASE << 1)) sum2 -= ((unsigned long)BASE << 1);
- if (sum2 >= BASE) sum2 -= BASE;
- return sum1 | (sum2 << 16);
-}
-
-/* ========================================================================= */
-uLong ZEXPORT adler32_combine(
- uLong adler1,
- uLong adler2,
- z_off_t len2)
-{
- return adler32_combine_(adler1, adler2, len2);
-}
-
-uLong ZEXPORT adler32_combine64(
- uLong adler1,
- uLong adler2,
- z_off64_t len2)
-{
- return adler32_combine_(adler1, adler2, len2);
-}
-
-#endif /* !Z_FREETYPE */
diff --git a/thirdparty/freetype/src/gzip/crc32.c b/thirdparty/freetype/src/gzip/crc32.c
deleted file mode 100644
index 2ddc32d1fb..0000000000
--- a/thirdparty/freetype/src/gzip/crc32.c
+++ /dev/null
@@ -1,1116 +0,0 @@
-/* crc32.c -- compute the CRC-32 of a data stream
- * Copyright (C) 1995-2022 Mark Adler
- * For conditions of distribution and use, see copyright notice in zlib.h
- *
- * This interleaved implementation of a CRC makes use of pipelined multiple
- * arithmetic-logic units, commonly found in modern CPU cores. It is due to
- * Kadatch and Jenkins (2010). See doc/crc-doc.1.0.pdf in this distribution.
- */
-
-/* @(#) $Id$ */
-
-/*
- Note on the use of DYNAMIC_CRC_TABLE: there is no mutex or semaphore
- protection on the static variables used to control the first-use generation
- of the crc tables. Therefore, if you #define DYNAMIC_CRC_TABLE, you should
- first call get_crc_table() to initialize the tables before allowing more than
- one thread to use crc32().
-
- MAKECRCH can be #defined to write out crc32.h. A main() routine is also
- produced, so that this one source file can be compiled to an executable.
- */
-
-#ifdef MAKECRCH
-# include <stdio.h>
-# ifndef DYNAMIC_CRC_TABLE
-# define DYNAMIC_CRC_TABLE
-# endif /* !DYNAMIC_CRC_TABLE */
-#endif /* MAKECRCH */
-
-#include "zutil.h" /* for Z_U4, Z_U8, z_crc_t, and FAR definitions */
-
- /*
- A CRC of a message is computed on N braids of words in the message, where
- each word consists of W bytes (4 or 8). If N is 3, for example, then three
- running sparse CRCs are calculated respectively on each braid, at these
- indices in the array of words: 0, 3, 6, ..., 1, 4, 7, ..., and 2, 5, 8, ...
- This is done starting at a word boundary, and continues until as many blocks
- of N * W bytes as are available have been processed. The results are combined
- into a single CRC at the end. For this code, N must be in the range 1..6 and
- W must be 4 or 8. The upper limit on N can be increased if desired by adding
- more #if blocks, extending the patterns apparent in the code. In addition,
- crc32.h would need to be regenerated, if the maximum N value is increased.
-
- N and W are chosen empirically by benchmarking the execution time on a given
- processor. The choices for N and W below were based on testing on Intel Kaby
- Lake i7, AMD Ryzen 7, ARM Cortex-A57, Sparc64-VII, PowerPC POWER9, and MIPS64
- Octeon II processors. The Intel, AMD, and ARM processors were all fastest
- with N=5, W=8. The Sparc, PowerPC, and MIPS64 were all fastest at N=5, W=4.
- They were all tested with either gcc or clang, all using the -O3 optimization
- level. Your mileage may vary.
- */
-
-/* Define N */
-#ifdef Z_TESTN
-# define N Z_TESTN
-#else
-# define N 5
-#endif
-#if N < 1 || N > 6
-# error N must be in 1..6
-#endif
-
-/*
- z_crc_t must be at least 32 bits. z_word_t must be at least as long as
- z_crc_t. It is assumed here that z_word_t is either 32 bits or 64 bits, and
- that bytes are eight bits.
- */
-
-/*
- Define W and the associated z_word_t type. If W is not defined, then a
- braided calculation is not used, and the associated tables and code are not
- compiled.
- */
-#ifdef Z_TESTW
-# if Z_TESTW-1 != -1
-# define W Z_TESTW
-# endif
-#else
-# ifdef MAKECRCH
-# define W 8 /* required for MAKECRCH */
-# else
-# if defined(__x86_64__) || defined(__aarch64__)
-# define W 8
-# else
-# define W 4
-# endif
-# endif
-#endif
-#ifdef W
-# if W == 8 && defined(Z_U8)
- typedef Z_U8 z_word_t;
-# elif defined(Z_U4)
-# undef W
-# define W 4
- typedef Z_U4 z_word_t;
-# else
-# undef W
-# endif
-#endif
-
-/* Local functions. */
-local z_crc_t multmodp OF((z_crc_t a, z_crc_t b));
-local z_crc_t x2nmodp OF((z_off64_t n, unsigned k));
-
-/* If available, use the ARM processor CRC32 instruction. */
-#if defined(__aarch64__) && defined(__ARM_FEATURE_CRC32) && W == 8
-# define ARMCRC32
-#endif
-
-#if defined(W) && (!defined(ARMCRC32) || defined(DYNAMIC_CRC_TABLE))
-/*
- Swap the bytes in a z_word_t to convert between little and big endian. Any
- self-respecting compiler will optimize this to a single machine byte-swap
- instruction, if one is available. This assumes that word_t is either 32 bits
- or 64 bits.
- */
-local z_word_t byte_swap(
- z_word_t word)
-{
-# if W == 8
- return
- (word & 0xff00000000000000) >> 56 |
- (word & 0xff000000000000) >> 40 |
- (word & 0xff0000000000) >> 24 |
- (word & 0xff00000000) >> 8 |
- (word & 0xff000000) << 8 |
- (word & 0xff0000) << 24 |
- (word & 0xff00) << 40 |
- (word & 0xff) << 56;
-# else /* W == 4 */
- return
- (word & 0xff000000) >> 24 |
- (word & 0xff0000) >> 8 |
- (word & 0xff00) << 8 |
- (word & 0xff) << 24;
-# endif
-}
-#endif
-
-/* CRC polynomial. */
-#define POLY 0xedb88320 /* p(x) reflected, with x^32 implied */
-
-#ifdef DYNAMIC_CRC_TABLE
-
-local z_crc_t FAR crc_table[256];
-local z_crc_t FAR x2n_table[32];
-local void make_crc_table OF((void));
-#ifdef W
- local z_word_t FAR crc_big_table[256];
- local z_crc_t FAR crc_braid_table[W][256];
- local z_word_t FAR crc_braid_big_table[W][256];
- local void braid OF((z_crc_t [][256], z_word_t [][256], int, int));
-#endif
-#ifdef MAKECRCH
- local void write_table OF((FILE *, const z_crc_t FAR *, int));
- local void write_table32hi OF((FILE *, const z_word_t FAR *, int));
- local void write_table64 OF((FILE *, const z_word_t FAR *, int));
-#endif /* MAKECRCH */
-
-/*
- Define a once() function depending on the availability of atomics. If this is
- compiled with DYNAMIC_CRC_TABLE defined, and if CRCs will be computed in
- multiple threads, and if atomics are not available, then get_crc_table() must
- be called to initialize the tables and must return before any threads are
- allowed to compute or combine CRCs.
- */
-
-/* Definition of once functionality. */
-typedef struct once_s once_t;
-local void once OF((once_t *, void (*)(void)));
-
-/* Check for the availability of atomics. */
-#if defined(__STDC__) && __STDC_VERSION__ >= 201112L && \
- !defined(__STDC_NO_ATOMICS__)
-
-#include <stdatomic.h>
-
-/* Structure for once(), which must be initialized with ONCE_INIT. */
-struct once_s {
- atomic_flag begun;
- atomic_int done;
-};
-#define ONCE_INIT {ATOMIC_FLAG_INIT, 0}
-
-/*
- Run the provided init() function exactly once, even if multiple threads
- invoke once() at the same time. The state must be a once_t initialized with
- ONCE_INIT.
- */
-local void once(state, init)
- once_t *state;
- void (*init)(void);
-{
- if (!atomic_load(&state->done)) {
- if (atomic_flag_test_and_set(&state->begun))
- while (!atomic_load(&state->done))
- ;
- else {
- init();
- atomic_store(&state->done, 1);
- }
- }
-}
-
-#else /* no atomics */
-
-/* Structure for once(), which must be initialized with ONCE_INIT. */
-struct once_s {
- volatile int begun;
- volatile int done;
-};
-#define ONCE_INIT {0, 0}
-
-/* Test and set. Alas, not atomic, but tries to minimize the period of
- vulnerability. */
-local int test_and_set OF((int volatile *));
-local int test_and_set(
- int volatile *flag)
-{
- int was;
-
- was = *flag;
- *flag = 1;
- return was;
-}
-
-/* Run the provided init() function once. This is not thread-safe. */
-local void once(state, init)
- once_t *state;
- void (*init)(void);
-{
- if (!state->done) {
- if (test_and_set(&state->begun))
- while (!state->done)
- ;
- else {
- init();
- state->done = 1;
- }
- }
-}
-
-#endif
-
-/* State for once(). */
-local once_t made = ONCE_INIT;
-
-/*
- Generate tables for a byte-wise 32-bit CRC calculation on the polynomial:
- x^32+x^26+x^23+x^22+x^16+x^12+x^11+x^10+x^8+x^7+x^5+x^4+x^2+x+1.
-
- Polynomials over GF(2) are represented in binary, one bit per coefficient,
- with the lowest powers in the most significant bit. Then adding polynomials
- is just exclusive-or, and multiplying a polynomial by x is a right shift by
- one. If we call the above polynomial p, and represent a byte as the
- polynomial q, also with the lowest power in the most significant bit (so the
- byte 0xb1 is the polynomial x^7+x^3+x^2+1), then the CRC is (q*x^32) mod p,
- where a mod b means the remainder after dividing a by b.
-
- This calculation is done using the shift-register method of multiplying and
- taking the remainder. The register is initialized to zero, and for each
- incoming bit, x^32 is added mod p to the register if the bit is a one (where
- x^32 mod p is p+x^32 = x^26+...+1), and the register is multiplied mod p by x
- (which is shifting right by one and adding x^32 mod p if the bit shifted out
- is a one). We start with the highest power (least significant bit) of q and
- repeat for all eight bits of q.
-
- The table is simply the CRC of all possible eight bit values. This is all the
- information needed to generate CRCs on data a byte at a time for all
- combinations of CRC register values and incoming bytes.
- */
-
-local void make_crc_table()
-{
- unsigned i, j, n;
- z_crc_t p;
-
- /* initialize the CRC of bytes tables */
- for (i = 0; i < 256; i++) {
- p = i;
- for (j = 0; j < 8; j++)
- p = p & 1 ? (p >> 1) ^ POLY : p >> 1;
- crc_table[i] = p;
-#ifdef W
- crc_big_table[i] = byte_swap(p);
-#endif
- }
-
- /* initialize the x^2^n mod p(x) table */
- p = (z_crc_t)1 << 30; /* x^1 */
- x2n_table[0] = p;
- for (n = 1; n < 32; n++)
- x2n_table[n] = p = multmodp(p, p);
-
-#ifdef W
- /* initialize the braiding tables -- needs x2n_table[] */
- braid(crc_braid_table, crc_braid_big_table, N, W);
-#endif
-
-#ifdef MAKECRCH
- {
- /*
- The crc32.h header file contains tables for both 32-bit and 64-bit
- z_word_t's, and so requires a 64-bit type be available. In that case,
- z_word_t must be defined to be 64-bits. This code then also generates
- and writes out the tables for the case that z_word_t is 32 bits.
- */
-#if !defined(W) || W != 8
-# error Need a 64-bit integer type in order to generate crc32.h.
-#endif
- FILE *out;
- int k, n;
- z_crc_t ltl[8][256];
- z_word_t big[8][256];
-
- out = fopen("crc32.h", "w");
- if (out == NULL) return;
-
- /* write out little-endian CRC table to crc32.h */
- fprintf(out,
- "/* crc32.h -- tables for rapid CRC calculation\n"
- " * Generated automatically by crc32.c\n */\n"
- "\n"
- "local const z_crc_t FAR crc_table[] = {\n"
- " ");
- write_table(out, crc_table, 256);
- fprintf(out,
- "};\n");
-
- /* write out big-endian CRC table for 64-bit z_word_t to crc32.h */
- fprintf(out,
- "\n"
- "#ifdef W\n"
- "\n"
- "#if W == 8\n"
- "\n"
- "local const z_word_t FAR crc_big_table[] = {\n"
- " ");
- write_table64(out, crc_big_table, 256);
- fprintf(out,
- "};\n");
-
- /* write out big-endian CRC table for 32-bit z_word_t to crc32.h */
- fprintf(out,
- "\n"
- "#else /* W == 4 */\n"
- "\n"
- "local const z_word_t FAR crc_big_table[] = {\n"
- " ");
- write_table32hi(out, crc_big_table, 256);
- fprintf(out,
- "};\n"
- "\n"
- "#endif\n");
-
- /* write out braid tables for each value of N */
- for (n = 1; n <= 6; n++) {
- fprintf(out,
- "\n"
- "#if N == %d\n", n);
-
- /* compute braid tables for this N and 64-bit word_t */
- braid(ltl, big, n, 8);
-
- /* write out braid tables for 64-bit z_word_t to crc32.h */
- fprintf(out,
- "\n"
- "#if W == 8\n"
- "\n"
- "local const z_crc_t FAR crc_braid_table[][256] = {\n");
- for (k = 0; k < 8; k++) {
- fprintf(out, " {");
- write_table(out, ltl[k], 256);
- fprintf(out, "}%s", k < 7 ? ",\n" : "");
- }
- fprintf(out,
- "};\n"
- "\n"
- "local const z_word_t FAR crc_braid_big_table[][256] = {\n");
- for (k = 0; k < 8; k++) {
- fprintf(out, " {");
- write_table64(out, big[k], 256);
- fprintf(out, "}%s", k < 7 ? ",\n" : "");
- }
- fprintf(out,
- "};\n");
-
- /* compute braid tables for this N and 32-bit word_t */
- braid(ltl, big, n, 4);
-
- /* write out braid tables for 32-bit z_word_t to crc32.h */
- fprintf(out,
- "\n"
- "#else /* W == 4 */\n"
- "\n"
- "local const z_crc_t FAR crc_braid_table[][256] = {\n");
- for (k = 0; k < 4; k++) {
- fprintf(out, " {");
- write_table(out, ltl[k], 256);
- fprintf(out, "}%s", k < 3 ? ",\n" : "");
- }
- fprintf(out,
- "};\n"
- "\n"
- "local const z_word_t FAR crc_braid_big_table[][256] = {\n");
- for (k = 0; k < 4; k++) {
- fprintf(out, " {");
- write_table32hi(out, big[k], 256);
- fprintf(out, "}%s", k < 3 ? ",\n" : "");
- }
- fprintf(out,
- "};\n"
- "\n"
- "#endif\n"
- "\n"
- "#endif\n");
- }
- fprintf(out,
- "\n"
- "#endif\n");
-
- /* write out zeros operator table to crc32.h */
- fprintf(out,
- "\n"
- "local const z_crc_t FAR x2n_table[] = {\n"
- " ");
- write_table(out, x2n_table, 32);
- fprintf(out,
- "};\n");
- fclose(out);
- }
-#endif /* MAKECRCH */
-}
-
-#ifdef MAKECRCH
-
-/*
- Write the 32-bit values in table[0..k-1] to out, five per line in
- hexadecimal separated by commas.
- */
-local void write_table(
- FILE *out,
- const z_crc_t FAR *table,
- int k)
-{
- int n;
-
- for (n = 0; n < k; n++)
- fprintf(out, "%s0x%08lx%s", n == 0 || n % 5 ? "" : " ",
- (unsigned long)(table[n]),
- n == k - 1 ? "" : (n % 5 == 4 ? ",\n" : ", "));
-}
-
-/*
- Write the high 32-bits of each value in table[0..k-1] to out, five per line
- in hexadecimal separated by commas.
- */
-local void write_table32hi(
- FILE *out,
- const z_word_t FAR *table,
- int k)
-{
- int n;
-
- for (n = 0; n < k; n++)
- fprintf(out, "%s0x%08lx%s", n == 0 || n % 5 ? "" : " ",
- (unsigned long)(table[n] >> 32),
- n == k - 1 ? "" : (n % 5 == 4 ? ",\n" : ", "));
-}
-
-/*
- Write the 64-bit values in table[0..k-1] to out, three per line in
- hexadecimal separated by commas. This assumes that if there is a 64-bit
- type, then there is also a long long integer type, and it is at least 64
- bits. If not, then the type cast and format string can be adjusted
- accordingly.
- */
-local void write_table64(
- FILE *out,
- const z_word_t FAR *table,
- int k)
-{
- int n;
-
- for (n = 0; n < k; n++)
- fprintf(out, "%s0x%016llx%s", n == 0 || n % 3 ? "" : " ",
- (unsigned long long)(table[n]),
- n == k - 1 ? "" : (n % 3 == 2 ? ",\n" : ", "));
-}
-
-/* Actually do the deed. */
-int main()
-{
- make_crc_table();
- return 0;
-}
-
-#endif /* MAKECRCH */
-
-#ifdef W
-/*
- Generate the little and big-endian braid tables for the given n and z_word_t
- size w. Each array must have room for w blocks of 256 elements.
- */
-local void braid(ltl, big, n, w)
- z_crc_t ltl[][256];
- z_word_t big[][256];
- int n;
- int w;
-{
- int k;
- z_crc_t i, p, q;
- for (k = 0; k < w; k++) {
- p = x2nmodp((n * w + 3 - k) << 3, 0);
- ltl[k][0] = 0;
- big[w - 1 - k][0] = 0;
- for (i = 1; i < 256; i++) {
- ltl[k][i] = q = multmodp(i << 24, p);
- big[w - 1 - k][i] = byte_swap(q);
- }
- }
-}
-#endif
-
-#else /* !DYNAMIC_CRC_TABLE */
-/* ========================================================================
- * Tables for byte-wise and braided CRC-32 calculations, and a table of powers
- * of x for combining CRC-32s, all made by make_crc_table().
- */
-#include "crc32.h"
-#endif /* DYNAMIC_CRC_TABLE */
-
-/* ========================================================================
- * Routines used for CRC calculation. Some are also required for the table
- * generation above.
- */
-
-/*
- Return a(x) multiplied by b(x) modulo p(x), where p(x) is the CRC polynomial,
- reflected. For speed, this requires that a not be zero.
- */
-local z_crc_t multmodp(
- z_crc_t a,
- z_crc_t b)
-{
- z_crc_t m, p;
-
- m = (z_crc_t)1 << 31;
- p = 0;
- for (;;) {
- if (a & m) {
- p ^= b;
- if ((a & (m - 1)) == 0)
- break;
- }
- m >>= 1;
- b = b & 1 ? (b >> 1) ^ POLY : b >> 1;
- }
- return p;
-}
-
-/*
- Return x^(n * 2^k) modulo p(x). Requires that x2n_table[] has been
- initialized.
- */
-local z_crc_t x2nmodp(
- z_off64_t n,
- unsigned k)
-{
- z_crc_t p;
-
- p = (z_crc_t)1 << 31; /* x^0 == 1 */
- while (n) {
- if (n & 1)
- p = multmodp(x2n_table[k & 31], p);
- n >>= 1;
- k++;
- }
- return p;
-}
-
-/* =========================================================================
- * This function can be used by asm versions of crc32(), and to force the
- * generation of the CRC tables in a threaded application.
- */
-const z_crc_t FAR * ZEXPORT get_crc_table()
-{
-#ifdef DYNAMIC_CRC_TABLE
- once(&made, make_crc_table);
-#endif /* DYNAMIC_CRC_TABLE */
- return (const z_crc_t FAR *)crc_table;
-}
-
-/* =========================================================================
- * Use ARM machine instructions if available. This will compute the CRC about
- * ten times faster than the braided calculation. This code does not check for
- * the presence of the CRC instruction at run time. __ARM_FEATURE_CRC32 will
- * only be defined if the compilation specifies an ARM processor architecture
- * that has the instructions. For example, compiling with -march=armv8.1-a or
- * -march=armv8-a+crc, or -march=native if the compile machine has the crc32
- * instructions.
- */
-#ifdef ARMCRC32
-
-/*
- Constants empirically determined to maximize speed. These values are from
- measurements on a Cortex-A57. Your mileage may vary.
- */
-#define Z_BATCH 3990 /* number of words in a batch */
-#define Z_BATCH_ZEROS 0xa10d3d0c /* computed from Z_BATCH = 3990 */
-#define Z_BATCH_MIN 800 /* fewest words in a final batch */
-
-unsigned long ZEXPORT crc32_z(
- unsigned long crc,
- const unsigned char FAR *buf,
- z_size_t len)
-{
- z_crc_t val;
- z_word_t crc1, crc2;
- const z_word_t *word;
- z_word_t val0, val1, val2;
- z_size_t last, last2, i;
- z_size_t num;
-
- /* Return initial CRC, if requested. */
- if (buf == Z_NULL) return 0;
-
-#ifdef DYNAMIC_CRC_TABLE
- once(&made, make_crc_table);
-#endif /* DYNAMIC_CRC_TABLE */
-
- /* Pre-condition the CRC */
- crc ^= 0xffffffff;
-
- /* Compute the CRC up to a word boundary. */
- while (len && ((z_size_t)buf & 7) != 0) {
- len--;
- val = *buf++;
- __asm__ volatile("crc32b %w0, %w0, %w1" : "+r"(crc) : "r"(val));
- }
-
- /* Prepare to compute the CRC on full 64-bit words word[0..num-1]. */
- word = (z_word_t const *)buf;
- num = len >> 3;
- len &= 7;
-
- /* Do three interleaved CRCs to realize the throughput of one crc32x
- instruction per cycle. Each CRC is calcuated on Z_BATCH words. The three
- CRCs are combined into a single CRC after each set of batches. */
- while (num >= 3 * Z_BATCH) {
- crc1 = 0;
- crc2 = 0;
- for (i = 0; i < Z_BATCH; i++) {
- val0 = word[i];
- val1 = word[i + Z_BATCH];
- val2 = word[i + 2 * Z_BATCH];
- __asm__ volatile("crc32x %w0, %w0, %x1" : "+r"(crc) : "r"(val0));
- __asm__ volatile("crc32x %w0, %w0, %x1" : "+r"(crc1) : "r"(val1));
- __asm__ volatile("crc32x %w0, %w0, %x1" : "+r"(crc2) : "r"(val2));
- }
- word += 3 * Z_BATCH;
- num -= 3 * Z_BATCH;
- crc = multmodp(Z_BATCH_ZEROS, crc) ^ crc1;
- crc = multmodp(Z_BATCH_ZEROS, crc) ^ crc2;
- }
-
- /* Do one last smaller batch with the remaining words, if there are enough
- to pay for the combination of CRCs. */
- last = num / 3;
- if (last >= Z_BATCH_MIN) {
- last2 = last << 1;
- crc1 = 0;
- crc2 = 0;
- for (i = 0; i < last; i++) {
- val0 = word[i];
- val1 = word[i + last];
- val2 = word[i + last2];
- __asm__ volatile("crc32x %w0, %w0, %x1" : "+r"(crc) : "r"(val0));
- __asm__ volatile("crc32x %w0, %w0, %x1" : "+r"(crc1) : "r"(val1));
- __asm__ volatile("crc32x %w0, %w0, %x1" : "+r"(crc2) : "r"(val2));
- }
- word += 3 * last;
- num -= 3 * last;
- val = x2nmodp(last, 6);
- crc = multmodp(val, crc) ^ crc1;
- crc = multmodp(val, crc) ^ crc2;
- }
-
- /* Compute the CRC on any remaining words. */
- for (i = 0; i < num; i++) {
- val0 = word[i];
- __asm__ volatile("crc32x %w0, %w0, %x1" : "+r"(crc) : "r"(val0));
- }
- word += num;
-
- /* Complete the CRC on any remaining bytes. */
- buf = (const unsigned char FAR *)word;
- while (len) {
- len--;
- val = *buf++;
- __asm__ volatile("crc32b %w0, %w0, %w1" : "+r"(crc) : "r"(val));
- }
-
- /* Return the CRC, post-conditioned. */
- return crc ^ 0xffffffff;
-}
-
-#else
-
-#ifdef W
-
-/*
- Return the CRC of the W bytes in the word_t data, taking the
- least-significant byte of the word as the first byte of data, without any pre
- or post conditioning. This is used to combine the CRCs of each braid.
- */
-local z_crc_t crc_word(
- z_word_t data)
-{
- int k;
- for (k = 0; k < W; k++)
- data = (data >> 8) ^ crc_table[data & 0xff];
- return (z_crc_t)data;
-}
-
-local z_word_t crc_word_big(
- z_word_t data)
-{
- int k;
- for (k = 0; k < W; k++)
- data = (data << 8) ^
- crc_big_table[(data >> ((W - 1) << 3)) & 0xff];
- return data;
-}
-
-#endif
-
-/* ========================================================================= */
-unsigned long ZEXPORT crc32_z(
- unsigned long crc,
- const unsigned char FAR *buf,
- z_size_t len)
-{
- /* Return initial CRC, if requested. */
- if (buf == Z_NULL) return 0;
-
-#ifdef DYNAMIC_CRC_TABLE
- once(&made, make_crc_table);
-#endif /* DYNAMIC_CRC_TABLE */
-
- /* Pre-condition the CRC */
- crc ^= 0xffffffff;
-
-#ifdef W
-
- /* If provided enough bytes, do a braided CRC calculation. */
- if (len >= N * W + W - 1) {
- z_size_t blks;
- z_word_t const *words;
- unsigned endian;
- int k;
-
- /* Compute the CRC up to a z_word_t boundary. */
- while (len && ((z_size_t)buf & (W - 1)) != 0) {
- len--;
- crc = (crc >> 8) ^ crc_table[(crc ^ *buf++) & 0xff];
- }
-
- /* Compute the CRC on as many N z_word_t blocks as are available. */
- blks = len / (N * W);
- len -= blks * N * W;
- words = (z_word_t const *)buf;
-
- /* Do endian check at execution time instead of compile time, since ARM
- processors can change the endianess at execution time. If the
- compiler knows what the endianess will be, it can optimize out the
- check and the unused branch. */
- endian = 1;
- if (*(unsigned char *)&endian) {
- /* Little endian. */
-
- z_crc_t crc0;
- z_word_t word0;
-#if N > 1
- z_crc_t crc1;
- z_word_t word1;
-#if N > 2
- z_crc_t crc2;
- z_word_t word2;
-#if N > 3
- z_crc_t crc3;
- z_word_t word3;
-#if N > 4
- z_crc_t crc4;
- z_word_t word4;
-#if N > 5
- z_crc_t crc5;
- z_word_t word5;
-#endif
-#endif
-#endif
-#endif
-#endif
-
- /* Initialize the CRC for each braid. */
- crc0 = crc;
-#if N > 1
- crc1 = 0;
-#if N > 2
- crc2 = 0;
-#if N > 3
- crc3 = 0;
-#if N > 4
- crc4 = 0;
-#if N > 5
- crc5 = 0;
-#endif
-#endif
-#endif
-#endif
-#endif
-
- /*
- Process the first blks-1 blocks, computing the CRCs on each braid
- independently.
- */
- while (--blks) {
- /* Load the word for each braid into registers. */
- word0 = crc0 ^ words[0];
-#if N > 1
- word1 = crc1 ^ words[1];
-#if N > 2
- word2 = crc2 ^ words[2];
-#if N > 3
- word3 = crc3 ^ words[3];
-#if N > 4
- word4 = crc4 ^ words[4];
-#if N > 5
- word5 = crc5 ^ words[5];
-#endif
-#endif
-#endif
-#endif
-#endif
- words += N;
-
- /* Compute and update the CRC for each word. The loop should
- get unrolled. */
- crc0 = crc_braid_table[0][word0 & 0xff];
-#if N > 1
- crc1 = crc_braid_table[0][word1 & 0xff];
-#if N > 2
- crc2 = crc_braid_table[0][word2 & 0xff];
-#if N > 3
- crc3 = crc_braid_table[0][word3 & 0xff];
-#if N > 4
- crc4 = crc_braid_table[0][word4 & 0xff];
-#if N > 5
- crc5 = crc_braid_table[0][word5 & 0xff];
-#endif
-#endif
-#endif
-#endif
-#endif
- for (k = 1; k < W; k++) {
- crc0 ^= crc_braid_table[k][(word0 >> (k << 3)) & 0xff];
-#if N > 1
- crc1 ^= crc_braid_table[k][(word1 >> (k << 3)) & 0xff];
-#if N > 2
- crc2 ^= crc_braid_table[k][(word2 >> (k << 3)) & 0xff];
-#if N > 3
- crc3 ^= crc_braid_table[k][(word3 >> (k << 3)) & 0xff];
-#if N > 4
- crc4 ^= crc_braid_table[k][(word4 >> (k << 3)) & 0xff];
-#if N > 5
- crc5 ^= crc_braid_table[k][(word5 >> (k << 3)) & 0xff];
-#endif
-#endif
-#endif
-#endif
-#endif
- }
- }
-
- /*
- Process the last block, combining the CRCs of the N braids at the
- same time.
- */
- crc = crc_word(crc0 ^ words[0]);
-#if N > 1
- crc = crc_word(crc1 ^ words[1] ^ crc);
-#if N > 2
- crc = crc_word(crc2 ^ words[2] ^ crc);
-#if N > 3
- crc = crc_word(crc3 ^ words[3] ^ crc);
-#if N > 4
- crc = crc_word(crc4 ^ words[4] ^ crc);
-#if N > 5
- crc = crc_word(crc5 ^ words[5] ^ crc);
-#endif
-#endif
-#endif
-#endif
-#endif
- words += N;
- }
- else {
- /* Big endian. */
-
- z_word_t crc0, word0, comb;
-#if N > 1
- z_word_t crc1, word1;
-#if N > 2
- z_word_t crc2, word2;
-#if N > 3
- z_word_t crc3, word3;
-#if N > 4
- z_word_t crc4, word4;
-#if N > 5
- z_word_t crc5, word5;
-#endif
-#endif
-#endif
-#endif
-#endif
-
- /* Initialize the CRC for each braid. */
- crc0 = byte_swap(crc);
-#if N > 1
- crc1 = 0;
-#if N > 2
- crc2 = 0;
-#if N > 3
- crc3 = 0;
-#if N > 4
- crc4 = 0;
-#if N > 5
- crc5 = 0;
-#endif
-#endif
-#endif
-#endif
-#endif
-
- /*
- Process the first blks-1 blocks, computing the CRCs on each braid
- independently.
- */
- while (--blks) {
- /* Load the word for each braid into registers. */
- word0 = crc0 ^ words[0];
-#if N > 1
- word1 = crc1 ^ words[1];
-#if N > 2
- word2 = crc2 ^ words[2];
-#if N > 3
- word3 = crc3 ^ words[3];
-#if N > 4
- word4 = crc4 ^ words[4];
-#if N > 5
- word5 = crc5 ^ words[5];
-#endif
-#endif
-#endif
-#endif
-#endif
- words += N;
-
- /* Compute and update the CRC for each word. The loop should
- get unrolled. */
- crc0 = crc_braid_big_table[0][word0 & 0xff];
-#if N > 1
- crc1 = crc_braid_big_table[0][word1 & 0xff];
-#if N > 2
- crc2 = crc_braid_big_table[0][word2 & 0xff];
-#if N > 3
- crc3 = crc_braid_big_table[0][word3 & 0xff];
-#if N > 4
- crc4 = crc_braid_big_table[0][word4 & 0xff];
-#if N > 5
- crc5 = crc_braid_big_table[0][word5 & 0xff];
-#endif
-#endif
-#endif
-#endif
-#endif
- for (k = 1; k < W; k++) {
- crc0 ^= crc_braid_big_table[k][(word0 >> (k << 3)) & 0xff];
-#if N > 1
- crc1 ^= crc_braid_big_table[k][(word1 >> (k << 3)) & 0xff];
-#if N > 2
- crc2 ^= crc_braid_big_table[k][(word2 >> (k << 3)) & 0xff];
-#if N > 3
- crc3 ^= crc_braid_big_table[k][(word3 >> (k << 3)) & 0xff];
-#if N > 4
- crc4 ^= crc_braid_big_table[k][(word4 >> (k << 3)) & 0xff];
-#if N > 5
- crc5 ^= crc_braid_big_table[k][(word5 >> (k << 3)) & 0xff];
-#endif
-#endif
-#endif
-#endif
-#endif
- }
- }
-
- /*
- Process the last block, combining the CRCs of the N braids at the
- same time.
- */
- comb = crc_word_big(crc0 ^ words[0]);
-#if N > 1
- comb = crc_word_big(crc1 ^ words[1] ^ comb);
-#if N > 2
- comb = crc_word_big(crc2 ^ words[2] ^ comb);
-#if N > 3
- comb = crc_word_big(crc3 ^ words[3] ^ comb);
-#if N > 4
- comb = crc_word_big(crc4 ^ words[4] ^ comb);
-#if N > 5
- comb = crc_word_big(crc5 ^ words[5] ^ comb);
-#endif
-#endif
-#endif
-#endif
-#endif
- words += N;
- crc = byte_swap(comb);
- }
-
- /*
- Update the pointer to the remaining bytes to process.
- */
- buf = (unsigned char const *)words;
- }
-
-#endif /* W */
-
- /* Complete the computation of the CRC on any remaining bytes. */
- while (len >= 8) {
- len -= 8;
- crc = (crc >> 8) ^ crc_table[(crc ^ *buf++) & 0xff];
- crc = (crc >> 8) ^ crc_table[(crc ^ *buf++) & 0xff];
- crc = (crc >> 8) ^ crc_table[(crc ^ *buf++) & 0xff];
- crc = (crc >> 8) ^ crc_table[(crc ^ *buf++) & 0xff];
- crc = (crc >> 8) ^ crc_table[(crc ^ *buf++) & 0xff];
- crc = (crc >> 8) ^ crc_table[(crc ^ *buf++) & 0xff];
- crc = (crc >> 8) ^ crc_table[(crc ^ *buf++) & 0xff];
- crc = (crc >> 8) ^ crc_table[(crc ^ *buf++) & 0xff];
- }
- while (len) {
- len--;
- crc = (crc >> 8) ^ crc_table[(crc ^ *buf++) & 0xff];
- }
-
- /* Return the CRC, post-conditioned. */
- return crc ^ 0xffffffff;
-}
-
-#endif
-
-/* ========================================================================= */
-unsigned long ZEXPORT crc32(
- unsigned long crc,
- const unsigned char FAR *buf,
- uInt len)
-{
- return crc32_z(crc, buf, len);
-}
-
-/* ========================================================================= */
-uLong ZEXPORT crc32_combine64(
- uLong crc1,
- uLong crc2,
- z_off64_t len2)
-{
-#ifdef DYNAMIC_CRC_TABLE
- once(&made, make_crc_table);
-#endif /* DYNAMIC_CRC_TABLE */
- return multmodp(x2nmodp(len2, 3), crc1) ^ crc2;
-}
-
-/* ========================================================================= */
-uLong ZEXPORT crc32_combine(
- uLong crc1,
- uLong crc2,
- z_off_t len2)
-{
- return crc32_combine64(crc1, crc2, len2);
-}
-
-/* ========================================================================= */
-uLong ZEXPORT crc32_combine_gen64(
- z_off64_t len2)
-{
-#ifdef DYNAMIC_CRC_TABLE
- once(&made, make_crc_table);
-#endif /* DYNAMIC_CRC_TABLE */
- return x2nmodp(len2, 3);
-}
-
-/* ========================================================================= */
-uLong ZEXPORT crc32_combine_gen(
- z_off_t len2)
-{
- return crc32_combine_gen64(len2);
-}
-
-/* ========================================================================= */
-uLong crc32_combine_op(
- uLong crc1,
- uLong crc2,
- uLong op)
-{
- return multmodp(op, crc1) ^ crc2;
-}
diff --git a/thirdparty/freetype/src/gzip/crc32.h b/thirdparty/freetype/src/gzip/crc32.h
deleted file mode 100644
index 137df68d61..0000000000
--- a/thirdparty/freetype/src/gzip/crc32.h
+++ /dev/null
@@ -1,9446 +0,0 @@
-/* crc32.h -- tables for rapid CRC calculation
- * Generated automatically by crc32.c
- */
-
-local const z_crc_t FAR crc_table[] = {
- 0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419,
- 0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4,
- 0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07,
- 0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de,
- 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, 0x136c9856,
- 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9,
- 0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4,
- 0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b,
- 0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3,
- 0x45df5c75, 0xdcd60dcf, 0xabd13d59, 0x26d930ac, 0x51de003a,
- 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599,
- 0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924,
- 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190,
- 0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f,
- 0x9fbfe4a5, 0xe8b8d433, 0x7807c9a2, 0x0f00f934, 0x9609a88e,
- 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01,
- 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, 0x6c0695ed,
- 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950,
- 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3,
- 0xfbd44c65, 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2,
- 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a,
- 0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5,
- 0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010,
- 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f,
- 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17,
- 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6,
- 0x03b6e20c, 0x74b1d29a, 0xead54739, 0x9dd277af, 0x04db2615,
- 0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8,
- 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, 0xf00f9344,
- 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb,
- 0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a,
- 0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5,
- 0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1,
- 0xa6bc5767, 0x3fb506dd, 0x48b2364b, 0xd80d2bda, 0xaf0a1b4c,
- 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef,
- 0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236,
- 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe,
- 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31,
- 0x2cd99e8b, 0x5bdeae1d, 0x9b64c2b0, 0xec63f226, 0x756aa39c,
- 0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713,
- 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, 0x92d28e9b,
- 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242,
- 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1,
- 0x18b74777, 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c,
- 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278,
- 0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7,
- 0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66,
- 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,
- 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605,
- 0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8,
- 0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b,
- 0x2d02ef8d};
-
-#ifdef W
-
-#if W == 8
-
-local const z_word_t FAR crc_big_table[] = {
- 0x0000000000000000, 0x9630077700000000, 0x2c610eee00000000,
- 0xba51099900000000, 0x19c46d0700000000, 0x8ff46a7000000000,
- 0x35a563e900000000, 0xa395649e00000000, 0x3288db0e00000000,
- 0xa4b8dc7900000000, 0x1ee9d5e000000000, 0x88d9d29700000000,
- 0x2b4cb60900000000, 0xbd7cb17e00000000, 0x072db8e700000000,
- 0x911dbf9000000000, 0x6410b71d00000000, 0xf220b06a00000000,
- 0x4871b9f300000000, 0xde41be8400000000, 0x7dd4da1a00000000,
- 0xebe4dd6d00000000, 0x51b5d4f400000000, 0xc785d38300000000,
- 0x56986c1300000000, 0xc0a86b6400000000, 0x7af962fd00000000,
- 0xecc9658a00000000, 0x4f5c011400000000, 0xd96c066300000000,
- 0x633d0ffa00000000, 0xf50d088d00000000, 0xc8206e3b00000000,
- 0x5e10694c00000000, 0xe44160d500000000, 0x727167a200000000,
- 0xd1e4033c00000000, 0x47d4044b00000000, 0xfd850dd200000000,
- 0x6bb50aa500000000, 0xfaa8b53500000000, 0x6c98b24200000000,
- 0xd6c9bbdb00000000, 0x40f9bcac00000000, 0xe36cd83200000000,
- 0x755cdf4500000000, 0xcf0dd6dc00000000, 0x593dd1ab00000000,
- 0xac30d92600000000, 0x3a00de5100000000, 0x8051d7c800000000,
- 0x1661d0bf00000000, 0xb5f4b42100000000, 0x23c4b35600000000,
- 0x9995bacf00000000, 0x0fa5bdb800000000, 0x9eb8022800000000,
- 0x0888055f00000000, 0xb2d90cc600000000, 0x24e90bb100000000,
- 0x877c6f2f00000000, 0x114c685800000000, 0xab1d61c100000000,
- 0x3d2d66b600000000, 0x9041dc7600000000, 0x0671db0100000000,
- 0xbc20d29800000000, 0x2a10d5ef00000000, 0x8985b17100000000,
- 0x1fb5b60600000000, 0xa5e4bf9f00000000, 0x33d4b8e800000000,
- 0xa2c9077800000000, 0x34f9000f00000000, 0x8ea8099600000000,
- 0x18980ee100000000, 0xbb0d6a7f00000000, 0x2d3d6d0800000000,
- 0x976c649100000000, 0x015c63e600000000, 0xf4516b6b00000000,
- 0x62616c1c00000000, 0xd830658500000000, 0x4e0062f200000000,
- 0xed95066c00000000, 0x7ba5011b00000000, 0xc1f4088200000000,
- 0x57c40ff500000000, 0xc6d9b06500000000, 0x50e9b71200000000,
- 0xeab8be8b00000000, 0x7c88b9fc00000000, 0xdf1ddd6200000000,
- 0x492dda1500000000, 0xf37cd38c00000000, 0x654cd4fb00000000,
- 0x5861b24d00000000, 0xce51b53a00000000, 0x7400bca300000000,
- 0xe230bbd400000000, 0x41a5df4a00000000, 0xd795d83d00000000,
- 0x6dc4d1a400000000, 0xfbf4d6d300000000, 0x6ae9694300000000,
- 0xfcd96e3400000000, 0x468867ad00000000, 0xd0b860da00000000,
- 0x732d044400000000, 0xe51d033300000000, 0x5f4c0aaa00000000,
- 0xc97c0ddd00000000, 0x3c71055000000000, 0xaa41022700000000,
- 0x10100bbe00000000, 0x86200cc900000000, 0x25b5685700000000,
- 0xb3856f2000000000, 0x09d466b900000000, 0x9fe461ce00000000,
- 0x0ef9de5e00000000, 0x98c9d92900000000, 0x2298d0b000000000,
- 0xb4a8d7c700000000, 0x173db35900000000, 0x810db42e00000000,
- 0x3b5cbdb700000000, 0xad6cbac000000000, 0x2083b8ed00000000,
- 0xb6b3bf9a00000000, 0x0ce2b60300000000, 0x9ad2b17400000000,
- 0x3947d5ea00000000, 0xaf77d29d00000000, 0x1526db0400000000,
- 0x8316dc7300000000, 0x120b63e300000000, 0x843b649400000000,
- 0x3e6a6d0d00000000, 0xa85a6a7a00000000, 0x0bcf0ee400000000,
- 0x9dff099300000000, 0x27ae000a00000000, 0xb19e077d00000000,
- 0x44930ff000000000, 0xd2a3088700000000, 0x68f2011e00000000,
- 0xfec2066900000000, 0x5d5762f700000000, 0xcb67658000000000,
- 0x71366c1900000000, 0xe7066b6e00000000, 0x761bd4fe00000000,
- 0xe02bd38900000000, 0x5a7ada1000000000, 0xcc4add6700000000,
- 0x6fdfb9f900000000, 0xf9efbe8e00000000, 0x43beb71700000000,
- 0xd58eb06000000000, 0xe8a3d6d600000000, 0x7e93d1a100000000,
- 0xc4c2d83800000000, 0x52f2df4f00000000, 0xf167bbd100000000,
- 0x6757bca600000000, 0xdd06b53f00000000, 0x4b36b24800000000,
- 0xda2b0dd800000000, 0x4c1b0aaf00000000, 0xf64a033600000000,
- 0x607a044100000000, 0xc3ef60df00000000, 0x55df67a800000000,
- 0xef8e6e3100000000, 0x79be694600000000, 0x8cb361cb00000000,
- 0x1a8366bc00000000, 0xa0d26f2500000000, 0x36e2685200000000,
- 0x95770ccc00000000, 0x03470bbb00000000, 0xb916022200000000,
- 0x2f26055500000000, 0xbe3bbac500000000, 0x280bbdb200000000,
- 0x925ab42b00000000, 0x046ab35c00000000, 0xa7ffd7c200000000,
- 0x31cfd0b500000000, 0x8b9ed92c00000000, 0x1daede5b00000000,
- 0xb0c2649b00000000, 0x26f263ec00000000, 0x9ca36a7500000000,
- 0x0a936d0200000000, 0xa906099c00000000, 0x3f360eeb00000000,
- 0x8567077200000000, 0x1357000500000000, 0x824abf9500000000,
- 0x147ab8e200000000, 0xae2bb17b00000000, 0x381bb60c00000000,
- 0x9b8ed29200000000, 0x0dbed5e500000000, 0xb7efdc7c00000000,
- 0x21dfdb0b00000000, 0xd4d2d38600000000, 0x42e2d4f100000000,
- 0xf8b3dd6800000000, 0x6e83da1f00000000, 0xcd16be8100000000,
- 0x5b26b9f600000000, 0xe177b06f00000000, 0x7747b71800000000,
- 0xe65a088800000000, 0x706a0fff00000000, 0xca3b066600000000,
- 0x5c0b011100000000, 0xff9e658f00000000, 0x69ae62f800000000,
- 0xd3ff6b6100000000, 0x45cf6c1600000000, 0x78e20aa000000000,
- 0xeed20dd700000000, 0x5483044e00000000, 0xc2b3033900000000,
- 0x612667a700000000, 0xf71660d000000000, 0x4d47694900000000,
- 0xdb776e3e00000000, 0x4a6ad1ae00000000, 0xdc5ad6d900000000,
- 0x660bdf4000000000, 0xf03bd83700000000, 0x53aebca900000000,
- 0xc59ebbde00000000, 0x7fcfb24700000000, 0xe9ffb53000000000,
- 0x1cf2bdbd00000000, 0x8ac2baca00000000, 0x3093b35300000000,
- 0xa6a3b42400000000, 0x0536d0ba00000000, 0x9306d7cd00000000,
- 0x2957de5400000000, 0xbf67d92300000000, 0x2e7a66b300000000,
- 0xb84a61c400000000, 0x021b685d00000000, 0x942b6f2a00000000,
- 0x37be0bb400000000, 0xa18e0cc300000000, 0x1bdf055a00000000,
- 0x8def022d00000000};
-
-#else /* W == 4 */
-
-local const z_word_t FAR crc_big_table[] = {
- 0x00000000, 0x96300777, 0x2c610eee, 0xba510999, 0x19c46d07,
- 0x8ff46a70, 0x35a563e9, 0xa395649e, 0x3288db0e, 0xa4b8dc79,
- 0x1ee9d5e0, 0x88d9d297, 0x2b4cb609, 0xbd7cb17e, 0x072db8e7,
- 0x911dbf90, 0x6410b71d, 0xf220b06a, 0x4871b9f3, 0xde41be84,
- 0x7dd4da1a, 0xebe4dd6d, 0x51b5d4f4, 0xc785d383, 0x56986c13,
- 0xc0a86b64, 0x7af962fd, 0xecc9658a, 0x4f5c0114, 0xd96c0663,
- 0x633d0ffa, 0xf50d088d, 0xc8206e3b, 0x5e10694c, 0xe44160d5,
- 0x727167a2, 0xd1e4033c, 0x47d4044b, 0xfd850dd2, 0x6bb50aa5,
- 0xfaa8b535, 0x6c98b242, 0xd6c9bbdb, 0x40f9bcac, 0xe36cd832,
- 0x755cdf45, 0xcf0dd6dc, 0x593dd1ab, 0xac30d926, 0x3a00de51,
- 0x8051d7c8, 0x1661d0bf, 0xb5f4b421, 0x23c4b356, 0x9995bacf,
- 0x0fa5bdb8, 0x9eb80228, 0x0888055f, 0xb2d90cc6, 0x24e90bb1,
- 0x877c6f2f, 0x114c6858, 0xab1d61c1, 0x3d2d66b6, 0x9041dc76,
- 0x0671db01, 0xbc20d298, 0x2a10d5ef, 0x8985b171, 0x1fb5b606,
- 0xa5e4bf9f, 0x33d4b8e8, 0xa2c90778, 0x34f9000f, 0x8ea80996,
- 0x18980ee1, 0xbb0d6a7f, 0x2d3d6d08, 0x976c6491, 0x015c63e6,
- 0xf4516b6b, 0x62616c1c, 0xd8306585, 0x4e0062f2, 0xed95066c,
- 0x7ba5011b, 0xc1f40882, 0x57c40ff5, 0xc6d9b065, 0x50e9b712,
- 0xeab8be8b, 0x7c88b9fc, 0xdf1ddd62, 0x492dda15, 0xf37cd38c,
- 0x654cd4fb, 0x5861b24d, 0xce51b53a, 0x7400bca3, 0xe230bbd4,
- 0x41a5df4a, 0xd795d83d, 0x6dc4d1a4, 0xfbf4d6d3, 0x6ae96943,
- 0xfcd96e34, 0x468867ad, 0xd0b860da, 0x732d0444, 0xe51d0333,
- 0x5f4c0aaa, 0xc97c0ddd, 0x3c710550, 0xaa410227, 0x10100bbe,
- 0x86200cc9, 0x25b56857, 0xb3856f20, 0x09d466b9, 0x9fe461ce,
- 0x0ef9de5e, 0x98c9d929, 0x2298d0b0, 0xb4a8d7c7, 0x173db359,
- 0x810db42e, 0x3b5cbdb7, 0xad6cbac0, 0x2083b8ed, 0xb6b3bf9a,
- 0x0ce2b603, 0x9ad2b174, 0x3947d5ea, 0xaf77d29d, 0x1526db04,
- 0x8316dc73, 0x120b63e3, 0x843b6494, 0x3e6a6d0d, 0xa85a6a7a,
- 0x0bcf0ee4, 0x9dff0993, 0x27ae000a, 0xb19e077d, 0x44930ff0,
- 0xd2a30887, 0x68f2011e, 0xfec20669, 0x5d5762f7, 0xcb676580,
- 0x71366c19, 0xe7066b6e, 0x761bd4fe, 0xe02bd389, 0x5a7ada10,
- 0xcc4add67, 0x6fdfb9f9, 0xf9efbe8e, 0x43beb717, 0xd58eb060,
- 0xe8a3d6d6, 0x7e93d1a1, 0xc4c2d838, 0x52f2df4f, 0xf167bbd1,
- 0x6757bca6, 0xdd06b53f, 0x4b36b248, 0xda2b0dd8, 0x4c1b0aaf,
- 0xf64a0336, 0x607a0441, 0xc3ef60df, 0x55df67a8, 0xef8e6e31,
- 0x79be6946, 0x8cb361cb, 0x1a8366bc, 0xa0d26f25, 0x36e26852,
- 0x95770ccc, 0x03470bbb, 0xb9160222, 0x2f260555, 0xbe3bbac5,
- 0x280bbdb2, 0x925ab42b, 0x046ab35c, 0xa7ffd7c2, 0x31cfd0b5,
- 0x8b9ed92c, 0x1daede5b, 0xb0c2649b, 0x26f263ec, 0x9ca36a75,
- 0x0a936d02, 0xa906099c, 0x3f360eeb, 0x85670772, 0x13570005,
- 0x824abf95, 0x147ab8e2, 0xae2bb17b, 0x381bb60c, 0x9b8ed292,
- 0x0dbed5e5, 0xb7efdc7c, 0x21dfdb0b, 0xd4d2d386, 0x42e2d4f1,
- 0xf8b3dd68, 0x6e83da1f, 0xcd16be81, 0x5b26b9f6, 0xe177b06f,
- 0x7747b718, 0xe65a0888, 0x706a0fff, 0xca3b0666, 0x5c0b0111,
- 0xff9e658f, 0x69ae62f8, 0xd3ff6b61, 0x45cf6c16, 0x78e20aa0,
- 0xeed20dd7, 0x5483044e, 0xc2b30339, 0x612667a7, 0xf71660d0,
- 0x4d476949, 0xdb776e3e, 0x4a6ad1ae, 0xdc5ad6d9, 0x660bdf40,
- 0xf03bd837, 0x53aebca9, 0xc59ebbde, 0x7fcfb247, 0xe9ffb530,
- 0x1cf2bdbd, 0x8ac2baca, 0x3093b353, 0xa6a3b424, 0x0536d0ba,
- 0x9306d7cd, 0x2957de54, 0xbf67d923, 0x2e7a66b3, 0xb84a61c4,
- 0x021b685d, 0x942b6f2a, 0x37be0bb4, 0xa18e0cc3, 0x1bdf055a,
- 0x8def022d};
-
-#endif
-
-#if N == 1
-
-#if W == 8
-
-local const z_crc_t FAR crc_braid_table[][256] = {
- {0x00000000, 0xccaa009e, 0x4225077d, 0x8e8f07e3, 0x844a0efa,
- 0x48e00e64, 0xc66f0987, 0x0ac50919, 0xd3e51bb5, 0x1f4f1b2b,
- 0x91c01cc8, 0x5d6a1c56, 0x57af154f, 0x9b0515d1, 0x158a1232,
- 0xd92012ac, 0x7cbb312b, 0xb01131b5, 0x3e9e3656, 0xf23436c8,
- 0xf8f13fd1, 0x345b3f4f, 0xbad438ac, 0x767e3832, 0xaf5e2a9e,
- 0x63f42a00, 0xed7b2de3, 0x21d12d7d, 0x2b142464, 0xe7be24fa,
- 0x69312319, 0xa59b2387, 0xf9766256, 0x35dc62c8, 0xbb53652b,
- 0x77f965b5, 0x7d3c6cac, 0xb1966c32, 0x3f196bd1, 0xf3b36b4f,
- 0x2a9379e3, 0xe639797d, 0x68b67e9e, 0xa41c7e00, 0xaed97719,
- 0x62737787, 0xecfc7064, 0x205670fa, 0x85cd537d, 0x496753e3,
- 0xc7e85400, 0x0b42549e, 0x01875d87, 0xcd2d5d19, 0x43a25afa,
- 0x8f085a64, 0x562848c8, 0x9a824856, 0x140d4fb5, 0xd8a74f2b,
- 0xd2624632, 0x1ec846ac, 0x9047414f, 0x5ced41d1, 0x299dc2ed,
- 0xe537c273, 0x6bb8c590, 0xa712c50e, 0xadd7cc17, 0x617dcc89,
- 0xeff2cb6a, 0x2358cbf4, 0xfa78d958, 0x36d2d9c6, 0xb85dde25,
- 0x74f7debb, 0x7e32d7a2, 0xb298d73c, 0x3c17d0df, 0xf0bdd041,
- 0x5526f3c6, 0x998cf358, 0x1703f4bb, 0xdba9f425, 0xd16cfd3c,
- 0x1dc6fda2, 0x9349fa41, 0x5fe3fadf, 0x86c3e873, 0x4a69e8ed,
- 0xc4e6ef0e, 0x084cef90, 0x0289e689, 0xce23e617, 0x40ace1f4,
- 0x8c06e16a, 0xd0eba0bb, 0x1c41a025, 0x92cea7c6, 0x5e64a758,
- 0x54a1ae41, 0x980baedf, 0x1684a93c, 0xda2ea9a2, 0x030ebb0e,
- 0xcfa4bb90, 0x412bbc73, 0x8d81bced, 0x8744b5f4, 0x4beeb56a,
- 0xc561b289, 0x09cbb217, 0xac509190, 0x60fa910e, 0xee7596ed,
- 0x22df9673, 0x281a9f6a, 0xe4b09ff4, 0x6a3f9817, 0xa6959889,
- 0x7fb58a25, 0xb31f8abb, 0x3d908d58, 0xf13a8dc6, 0xfbff84df,
- 0x37558441, 0xb9da83a2, 0x7570833c, 0x533b85da, 0x9f918544,
- 0x111e82a7, 0xddb48239, 0xd7718b20, 0x1bdb8bbe, 0x95548c5d,
- 0x59fe8cc3, 0x80de9e6f, 0x4c749ef1, 0xc2fb9912, 0x0e51998c,
- 0x04949095, 0xc83e900b, 0x46b197e8, 0x8a1b9776, 0x2f80b4f1,
- 0xe32ab46f, 0x6da5b38c, 0xa10fb312, 0xabcaba0b, 0x6760ba95,
- 0xe9efbd76, 0x2545bde8, 0xfc65af44, 0x30cfafda, 0xbe40a839,
- 0x72eaa8a7, 0x782fa1be, 0xb485a120, 0x3a0aa6c3, 0xf6a0a65d,
- 0xaa4de78c, 0x66e7e712, 0xe868e0f1, 0x24c2e06f, 0x2e07e976,
- 0xe2ade9e8, 0x6c22ee0b, 0xa088ee95, 0x79a8fc39, 0xb502fca7,
- 0x3b8dfb44, 0xf727fbda, 0xfde2f2c3, 0x3148f25d, 0xbfc7f5be,
- 0x736df520, 0xd6f6d6a7, 0x1a5cd639, 0x94d3d1da, 0x5879d144,
- 0x52bcd85d, 0x9e16d8c3, 0x1099df20, 0xdc33dfbe, 0x0513cd12,
- 0xc9b9cd8c, 0x4736ca6f, 0x8b9ccaf1, 0x8159c3e8, 0x4df3c376,
- 0xc37cc495, 0x0fd6c40b, 0x7aa64737, 0xb60c47a9, 0x3883404a,
- 0xf42940d4, 0xfeec49cd, 0x32464953, 0xbcc94eb0, 0x70634e2e,
- 0xa9435c82, 0x65e95c1c, 0xeb665bff, 0x27cc5b61, 0x2d095278,
- 0xe1a352e6, 0x6f2c5505, 0xa386559b, 0x061d761c, 0xcab77682,
- 0x44387161, 0x889271ff, 0x825778e6, 0x4efd7878, 0xc0727f9b,
- 0x0cd87f05, 0xd5f86da9, 0x19526d37, 0x97dd6ad4, 0x5b776a4a,
- 0x51b26353, 0x9d1863cd, 0x1397642e, 0xdf3d64b0, 0x83d02561,
- 0x4f7a25ff, 0xc1f5221c, 0x0d5f2282, 0x079a2b9b, 0xcb302b05,
- 0x45bf2ce6, 0x89152c78, 0x50353ed4, 0x9c9f3e4a, 0x121039a9,
- 0xdeba3937, 0xd47f302e, 0x18d530b0, 0x965a3753, 0x5af037cd,
- 0xff6b144a, 0x33c114d4, 0xbd4e1337, 0x71e413a9, 0x7b211ab0,
- 0xb78b1a2e, 0x39041dcd, 0xf5ae1d53, 0x2c8e0fff, 0xe0240f61,
- 0x6eab0882, 0xa201081c, 0xa8c40105, 0x646e019b, 0xeae10678,
- 0x264b06e6},
- {0x00000000, 0xa6770bb4, 0x979f1129, 0x31e81a9d, 0xf44f2413,
- 0x52382fa7, 0x63d0353a, 0xc5a73e8e, 0x33ef4e67, 0x959845d3,
- 0xa4705f4e, 0x020754fa, 0xc7a06a74, 0x61d761c0, 0x503f7b5d,
- 0xf64870e9, 0x67de9cce, 0xc1a9977a, 0xf0418de7, 0x56368653,
- 0x9391b8dd, 0x35e6b369, 0x040ea9f4, 0xa279a240, 0x5431d2a9,
- 0xf246d91d, 0xc3aec380, 0x65d9c834, 0xa07ef6ba, 0x0609fd0e,
- 0x37e1e793, 0x9196ec27, 0xcfbd399c, 0x69ca3228, 0x582228b5,
- 0xfe552301, 0x3bf21d8f, 0x9d85163b, 0xac6d0ca6, 0x0a1a0712,
- 0xfc5277fb, 0x5a257c4f, 0x6bcd66d2, 0xcdba6d66, 0x081d53e8,
- 0xae6a585c, 0x9f8242c1, 0x39f54975, 0xa863a552, 0x0e14aee6,
- 0x3ffcb47b, 0x998bbfcf, 0x5c2c8141, 0xfa5b8af5, 0xcbb39068,
- 0x6dc49bdc, 0x9b8ceb35, 0x3dfbe081, 0x0c13fa1c, 0xaa64f1a8,
- 0x6fc3cf26, 0xc9b4c492, 0xf85cde0f, 0x5e2bd5bb, 0x440b7579,
- 0xe27c7ecd, 0xd3946450, 0x75e36fe4, 0xb044516a, 0x16335ade,
- 0x27db4043, 0x81ac4bf7, 0x77e43b1e, 0xd19330aa, 0xe07b2a37,
- 0x460c2183, 0x83ab1f0d, 0x25dc14b9, 0x14340e24, 0xb2430590,
- 0x23d5e9b7, 0x85a2e203, 0xb44af89e, 0x123df32a, 0xd79acda4,
- 0x71edc610, 0x4005dc8d, 0xe672d739, 0x103aa7d0, 0xb64dac64,
- 0x87a5b6f9, 0x21d2bd4d, 0xe47583c3, 0x42028877, 0x73ea92ea,
- 0xd59d995e, 0x8bb64ce5, 0x2dc14751, 0x1c295dcc, 0xba5e5678,
- 0x7ff968f6, 0xd98e6342, 0xe86679df, 0x4e11726b, 0xb8590282,
- 0x1e2e0936, 0x2fc613ab, 0x89b1181f, 0x4c162691, 0xea612d25,
- 0xdb8937b8, 0x7dfe3c0c, 0xec68d02b, 0x4a1fdb9f, 0x7bf7c102,
- 0xdd80cab6, 0x1827f438, 0xbe50ff8c, 0x8fb8e511, 0x29cfeea5,
- 0xdf879e4c, 0x79f095f8, 0x48188f65, 0xee6f84d1, 0x2bc8ba5f,
- 0x8dbfb1eb, 0xbc57ab76, 0x1a20a0c2, 0x8816eaf2, 0x2e61e146,
- 0x1f89fbdb, 0xb9fef06f, 0x7c59cee1, 0xda2ec555, 0xebc6dfc8,
- 0x4db1d47c, 0xbbf9a495, 0x1d8eaf21, 0x2c66b5bc, 0x8a11be08,
- 0x4fb68086, 0xe9c18b32, 0xd82991af, 0x7e5e9a1b, 0xefc8763c,
- 0x49bf7d88, 0x78576715, 0xde206ca1, 0x1b87522f, 0xbdf0599b,
- 0x8c184306, 0x2a6f48b2, 0xdc27385b, 0x7a5033ef, 0x4bb82972,
- 0xedcf22c6, 0x28681c48, 0x8e1f17fc, 0xbff70d61, 0x198006d5,
- 0x47abd36e, 0xe1dcd8da, 0xd034c247, 0x7643c9f3, 0xb3e4f77d,
- 0x1593fcc9, 0x247be654, 0x820cede0, 0x74449d09, 0xd23396bd,
- 0xe3db8c20, 0x45ac8794, 0x800bb91a, 0x267cb2ae, 0x1794a833,
- 0xb1e3a387, 0x20754fa0, 0x86024414, 0xb7ea5e89, 0x119d553d,
- 0xd43a6bb3, 0x724d6007, 0x43a57a9a, 0xe5d2712e, 0x139a01c7,
- 0xb5ed0a73, 0x840510ee, 0x22721b5a, 0xe7d525d4, 0x41a22e60,
- 0x704a34fd, 0xd63d3f49, 0xcc1d9f8b, 0x6a6a943f, 0x5b828ea2,
- 0xfdf58516, 0x3852bb98, 0x9e25b02c, 0xafcdaab1, 0x09baa105,
- 0xfff2d1ec, 0x5985da58, 0x686dc0c5, 0xce1acb71, 0x0bbdf5ff,
- 0xadcafe4b, 0x9c22e4d6, 0x3a55ef62, 0xabc30345, 0x0db408f1,
- 0x3c5c126c, 0x9a2b19d8, 0x5f8c2756, 0xf9fb2ce2, 0xc813367f,
- 0x6e643dcb, 0x982c4d22, 0x3e5b4696, 0x0fb35c0b, 0xa9c457bf,
- 0x6c636931, 0xca146285, 0xfbfc7818, 0x5d8b73ac, 0x03a0a617,
- 0xa5d7ada3, 0x943fb73e, 0x3248bc8a, 0xf7ef8204, 0x519889b0,
- 0x6070932d, 0xc6079899, 0x304fe870, 0x9638e3c4, 0xa7d0f959,
- 0x01a7f2ed, 0xc400cc63, 0x6277c7d7, 0x539fdd4a, 0xf5e8d6fe,
- 0x647e3ad9, 0xc209316d, 0xf3e12bf0, 0x55962044, 0x90311eca,
- 0x3646157e, 0x07ae0fe3, 0xa1d90457, 0x579174be, 0xf1e67f0a,
- 0xc00e6597, 0x66796e23, 0xa3de50ad, 0x05a95b19, 0x34414184,
- 0x92364a30},
- {0x00000000, 0xcb5cd3a5, 0x4dc8a10b, 0x869472ae, 0x9b914216,
- 0x50cd91b3, 0xd659e31d, 0x1d0530b8, 0xec53826d, 0x270f51c8,
- 0xa19b2366, 0x6ac7f0c3, 0x77c2c07b, 0xbc9e13de, 0x3a0a6170,
- 0xf156b2d5, 0x03d6029b, 0xc88ad13e, 0x4e1ea390, 0x85427035,
- 0x9847408d, 0x531b9328, 0xd58fe186, 0x1ed33223, 0xef8580f6,
- 0x24d95353, 0xa24d21fd, 0x6911f258, 0x7414c2e0, 0xbf481145,
- 0x39dc63eb, 0xf280b04e, 0x07ac0536, 0xccf0d693, 0x4a64a43d,
- 0x81387798, 0x9c3d4720, 0x57619485, 0xd1f5e62b, 0x1aa9358e,
- 0xebff875b, 0x20a354fe, 0xa6372650, 0x6d6bf5f5, 0x706ec54d,
- 0xbb3216e8, 0x3da66446, 0xf6fab7e3, 0x047a07ad, 0xcf26d408,
- 0x49b2a6a6, 0x82ee7503, 0x9feb45bb, 0x54b7961e, 0xd223e4b0,
- 0x197f3715, 0xe82985c0, 0x23755665, 0xa5e124cb, 0x6ebdf76e,
- 0x73b8c7d6, 0xb8e41473, 0x3e7066dd, 0xf52cb578, 0x0f580a6c,
- 0xc404d9c9, 0x4290ab67, 0x89cc78c2, 0x94c9487a, 0x5f959bdf,
- 0xd901e971, 0x125d3ad4, 0xe30b8801, 0x28575ba4, 0xaec3290a,
- 0x659ffaaf, 0x789aca17, 0xb3c619b2, 0x35526b1c, 0xfe0eb8b9,
- 0x0c8e08f7, 0xc7d2db52, 0x4146a9fc, 0x8a1a7a59, 0x971f4ae1,
- 0x5c439944, 0xdad7ebea, 0x118b384f, 0xe0dd8a9a, 0x2b81593f,
- 0xad152b91, 0x6649f834, 0x7b4cc88c, 0xb0101b29, 0x36846987,
- 0xfdd8ba22, 0x08f40f5a, 0xc3a8dcff, 0x453cae51, 0x8e607df4,
- 0x93654d4c, 0x58399ee9, 0xdeadec47, 0x15f13fe2, 0xe4a78d37,
- 0x2ffb5e92, 0xa96f2c3c, 0x6233ff99, 0x7f36cf21, 0xb46a1c84,
- 0x32fe6e2a, 0xf9a2bd8f, 0x0b220dc1, 0xc07ede64, 0x46eaacca,
- 0x8db67f6f, 0x90b34fd7, 0x5bef9c72, 0xdd7beedc, 0x16273d79,
- 0xe7718fac, 0x2c2d5c09, 0xaab92ea7, 0x61e5fd02, 0x7ce0cdba,
- 0xb7bc1e1f, 0x31286cb1, 0xfa74bf14, 0x1eb014d8, 0xd5ecc77d,
- 0x5378b5d3, 0x98246676, 0x852156ce, 0x4e7d856b, 0xc8e9f7c5,
- 0x03b52460, 0xf2e396b5, 0x39bf4510, 0xbf2b37be, 0x7477e41b,
- 0x6972d4a3, 0xa22e0706, 0x24ba75a8, 0xefe6a60d, 0x1d661643,
- 0xd63ac5e6, 0x50aeb748, 0x9bf264ed, 0x86f75455, 0x4dab87f0,
- 0xcb3ff55e, 0x006326fb, 0xf135942e, 0x3a69478b, 0xbcfd3525,
- 0x77a1e680, 0x6aa4d638, 0xa1f8059d, 0x276c7733, 0xec30a496,
- 0x191c11ee, 0xd240c24b, 0x54d4b0e5, 0x9f886340, 0x828d53f8,
- 0x49d1805d, 0xcf45f2f3, 0x04192156, 0xf54f9383, 0x3e134026,
- 0xb8873288, 0x73dbe12d, 0x6eded195, 0xa5820230, 0x2316709e,
- 0xe84aa33b, 0x1aca1375, 0xd196c0d0, 0x5702b27e, 0x9c5e61db,
- 0x815b5163, 0x4a0782c6, 0xcc93f068, 0x07cf23cd, 0xf6999118,
- 0x3dc542bd, 0xbb513013, 0x700de3b6, 0x6d08d30e, 0xa65400ab,
- 0x20c07205, 0xeb9ca1a0, 0x11e81eb4, 0xdab4cd11, 0x5c20bfbf,
- 0x977c6c1a, 0x8a795ca2, 0x41258f07, 0xc7b1fda9, 0x0ced2e0c,
- 0xfdbb9cd9, 0x36e74f7c, 0xb0733dd2, 0x7b2fee77, 0x662adecf,
- 0xad760d6a, 0x2be27fc4, 0xe0beac61, 0x123e1c2f, 0xd962cf8a,
- 0x5ff6bd24, 0x94aa6e81, 0x89af5e39, 0x42f38d9c, 0xc467ff32,
- 0x0f3b2c97, 0xfe6d9e42, 0x35314de7, 0xb3a53f49, 0x78f9ecec,
- 0x65fcdc54, 0xaea00ff1, 0x28347d5f, 0xe368aefa, 0x16441b82,
- 0xdd18c827, 0x5b8cba89, 0x90d0692c, 0x8dd55994, 0x46898a31,
- 0xc01df89f, 0x0b412b3a, 0xfa1799ef, 0x314b4a4a, 0xb7df38e4,
- 0x7c83eb41, 0x6186dbf9, 0xaada085c, 0x2c4e7af2, 0xe712a957,
- 0x15921919, 0xdececabc, 0x585ab812, 0x93066bb7, 0x8e035b0f,
- 0x455f88aa, 0xc3cbfa04, 0x089729a1, 0xf9c19b74, 0x329d48d1,
- 0xb4093a7f, 0x7f55e9da, 0x6250d962, 0xa90c0ac7, 0x2f987869,
- 0xe4c4abcc},
- {0x00000000, 0x3d6029b0, 0x7ac05360, 0x47a07ad0, 0xf580a6c0,
- 0xc8e08f70, 0x8f40f5a0, 0xb220dc10, 0x30704bc1, 0x0d106271,
- 0x4ab018a1, 0x77d03111, 0xc5f0ed01, 0xf890c4b1, 0xbf30be61,
- 0x825097d1, 0x60e09782, 0x5d80be32, 0x1a20c4e2, 0x2740ed52,
- 0x95603142, 0xa80018f2, 0xefa06222, 0xd2c04b92, 0x5090dc43,
- 0x6df0f5f3, 0x2a508f23, 0x1730a693, 0xa5107a83, 0x98705333,
- 0xdfd029e3, 0xe2b00053, 0xc1c12f04, 0xfca106b4, 0xbb017c64,
- 0x866155d4, 0x344189c4, 0x0921a074, 0x4e81daa4, 0x73e1f314,
- 0xf1b164c5, 0xccd14d75, 0x8b7137a5, 0xb6111e15, 0x0431c205,
- 0x3951ebb5, 0x7ef19165, 0x4391b8d5, 0xa121b886, 0x9c419136,
- 0xdbe1ebe6, 0xe681c256, 0x54a11e46, 0x69c137f6, 0x2e614d26,
- 0x13016496, 0x9151f347, 0xac31daf7, 0xeb91a027, 0xd6f18997,
- 0x64d15587, 0x59b17c37, 0x1e1106e7, 0x23712f57, 0x58f35849,
- 0x659371f9, 0x22330b29, 0x1f532299, 0xad73fe89, 0x9013d739,
- 0xd7b3ade9, 0xead38459, 0x68831388, 0x55e33a38, 0x124340e8,
- 0x2f236958, 0x9d03b548, 0xa0639cf8, 0xe7c3e628, 0xdaa3cf98,
- 0x3813cfcb, 0x0573e67b, 0x42d39cab, 0x7fb3b51b, 0xcd93690b,
- 0xf0f340bb, 0xb7533a6b, 0x8a3313db, 0x0863840a, 0x3503adba,
- 0x72a3d76a, 0x4fc3feda, 0xfde322ca, 0xc0830b7a, 0x872371aa,
- 0xba43581a, 0x9932774d, 0xa4525efd, 0xe3f2242d, 0xde920d9d,
- 0x6cb2d18d, 0x51d2f83d, 0x167282ed, 0x2b12ab5d, 0xa9423c8c,
- 0x9422153c, 0xd3826fec, 0xeee2465c, 0x5cc29a4c, 0x61a2b3fc,
- 0x2602c92c, 0x1b62e09c, 0xf9d2e0cf, 0xc4b2c97f, 0x8312b3af,
- 0xbe729a1f, 0x0c52460f, 0x31326fbf, 0x7692156f, 0x4bf23cdf,
- 0xc9a2ab0e, 0xf4c282be, 0xb362f86e, 0x8e02d1de, 0x3c220dce,
- 0x0142247e, 0x46e25eae, 0x7b82771e, 0xb1e6b092, 0x8c869922,
- 0xcb26e3f2, 0xf646ca42, 0x44661652, 0x79063fe2, 0x3ea64532,
- 0x03c66c82, 0x8196fb53, 0xbcf6d2e3, 0xfb56a833, 0xc6368183,
- 0x74165d93, 0x49767423, 0x0ed60ef3, 0x33b62743, 0xd1062710,
- 0xec660ea0, 0xabc67470, 0x96a65dc0, 0x248681d0, 0x19e6a860,
- 0x5e46d2b0, 0x6326fb00, 0xe1766cd1, 0xdc164561, 0x9bb63fb1,
- 0xa6d61601, 0x14f6ca11, 0x2996e3a1, 0x6e369971, 0x5356b0c1,
- 0x70279f96, 0x4d47b626, 0x0ae7ccf6, 0x3787e546, 0x85a73956,
- 0xb8c710e6, 0xff676a36, 0xc2074386, 0x4057d457, 0x7d37fde7,
- 0x3a978737, 0x07f7ae87, 0xb5d77297, 0x88b75b27, 0xcf1721f7,
- 0xf2770847, 0x10c70814, 0x2da721a4, 0x6a075b74, 0x576772c4,
- 0xe547aed4, 0xd8278764, 0x9f87fdb4, 0xa2e7d404, 0x20b743d5,
- 0x1dd76a65, 0x5a7710b5, 0x67173905, 0xd537e515, 0xe857cca5,
- 0xaff7b675, 0x92979fc5, 0xe915e8db, 0xd475c16b, 0x93d5bbbb,
- 0xaeb5920b, 0x1c954e1b, 0x21f567ab, 0x66551d7b, 0x5b3534cb,
- 0xd965a31a, 0xe4058aaa, 0xa3a5f07a, 0x9ec5d9ca, 0x2ce505da,
- 0x11852c6a, 0x562556ba, 0x6b457f0a, 0x89f57f59, 0xb49556e9,
- 0xf3352c39, 0xce550589, 0x7c75d999, 0x4115f029, 0x06b58af9,
- 0x3bd5a349, 0xb9853498, 0x84e51d28, 0xc34567f8, 0xfe254e48,
- 0x4c059258, 0x7165bbe8, 0x36c5c138, 0x0ba5e888, 0x28d4c7df,
- 0x15b4ee6f, 0x521494bf, 0x6f74bd0f, 0xdd54611f, 0xe03448af,
- 0xa794327f, 0x9af41bcf, 0x18a48c1e, 0x25c4a5ae, 0x6264df7e,
- 0x5f04f6ce, 0xed242ade, 0xd044036e, 0x97e479be, 0xaa84500e,
- 0x4834505d, 0x755479ed, 0x32f4033d, 0x0f942a8d, 0xbdb4f69d,
- 0x80d4df2d, 0xc774a5fd, 0xfa148c4d, 0x78441b9c, 0x4524322c,
- 0x028448fc, 0x3fe4614c, 0x8dc4bd5c, 0xb0a494ec, 0xf704ee3c,
- 0xca64c78c},
- {0x00000000, 0xb8bc6765, 0xaa09c88b, 0x12b5afee, 0x8f629757,
- 0x37def032, 0x256b5fdc, 0x9dd738b9, 0xc5b428ef, 0x7d084f8a,
- 0x6fbde064, 0xd7018701, 0x4ad6bfb8, 0xf26ad8dd, 0xe0df7733,
- 0x58631056, 0x5019579f, 0xe8a530fa, 0xfa109f14, 0x42acf871,
- 0xdf7bc0c8, 0x67c7a7ad, 0x75720843, 0xcdce6f26, 0x95ad7f70,
- 0x2d111815, 0x3fa4b7fb, 0x8718d09e, 0x1acfe827, 0xa2738f42,
- 0xb0c620ac, 0x087a47c9, 0xa032af3e, 0x188ec85b, 0x0a3b67b5,
- 0xb28700d0, 0x2f503869, 0x97ec5f0c, 0x8559f0e2, 0x3de59787,
- 0x658687d1, 0xdd3ae0b4, 0xcf8f4f5a, 0x7733283f, 0xeae41086,
- 0x525877e3, 0x40edd80d, 0xf851bf68, 0xf02bf8a1, 0x48979fc4,
- 0x5a22302a, 0xe29e574f, 0x7f496ff6, 0xc7f50893, 0xd540a77d,
- 0x6dfcc018, 0x359fd04e, 0x8d23b72b, 0x9f9618c5, 0x272a7fa0,
- 0xbafd4719, 0x0241207c, 0x10f48f92, 0xa848e8f7, 0x9b14583d,
- 0x23a83f58, 0x311d90b6, 0x89a1f7d3, 0x1476cf6a, 0xaccaa80f,
- 0xbe7f07e1, 0x06c36084, 0x5ea070d2, 0xe61c17b7, 0xf4a9b859,
- 0x4c15df3c, 0xd1c2e785, 0x697e80e0, 0x7bcb2f0e, 0xc377486b,
- 0xcb0d0fa2, 0x73b168c7, 0x6104c729, 0xd9b8a04c, 0x446f98f5,
- 0xfcd3ff90, 0xee66507e, 0x56da371b, 0x0eb9274d, 0xb6054028,
- 0xa4b0efc6, 0x1c0c88a3, 0x81dbb01a, 0x3967d77f, 0x2bd27891,
- 0x936e1ff4, 0x3b26f703, 0x839a9066, 0x912f3f88, 0x299358ed,
- 0xb4446054, 0x0cf80731, 0x1e4da8df, 0xa6f1cfba, 0xfe92dfec,
- 0x462eb889, 0x549b1767, 0xec277002, 0x71f048bb, 0xc94c2fde,
- 0xdbf98030, 0x6345e755, 0x6b3fa09c, 0xd383c7f9, 0xc1366817,
- 0x798a0f72, 0xe45d37cb, 0x5ce150ae, 0x4e54ff40, 0xf6e89825,
- 0xae8b8873, 0x1637ef16, 0x048240f8, 0xbc3e279d, 0x21e91f24,
- 0x99557841, 0x8be0d7af, 0x335cb0ca, 0xed59b63b, 0x55e5d15e,
- 0x47507eb0, 0xffec19d5, 0x623b216c, 0xda874609, 0xc832e9e7,
- 0x708e8e82, 0x28ed9ed4, 0x9051f9b1, 0x82e4565f, 0x3a58313a,
- 0xa78f0983, 0x1f336ee6, 0x0d86c108, 0xb53aa66d, 0xbd40e1a4,
- 0x05fc86c1, 0x1749292f, 0xaff54e4a, 0x322276f3, 0x8a9e1196,
- 0x982bbe78, 0x2097d91d, 0x78f4c94b, 0xc048ae2e, 0xd2fd01c0,
- 0x6a4166a5, 0xf7965e1c, 0x4f2a3979, 0x5d9f9697, 0xe523f1f2,
- 0x4d6b1905, 0xf5d77e60, 0xe762d18e, 0x5fdeb6eb, 0xc2098e52,
- 0x7ab5e937, 0x680046d9, 0xd0bc21bc, 0x88df31ea, 0x3063568f,
- 0x22d6f961, 0x9a6a9e04, 0x07bda6bd, 0xbf01c1d8, 0xadb46e36,
- 0x15080953, 0x1d724e9a, 0xa5ce29ff, 0xb77b8611, 0x0fc7e174,
- 0x9210d9cd, 0x2aacbea8, 0x38191146, 0x80a57623, 0xd8c66675,
- 0x607a0110, 0x72cfaefe, 0xca73c99b, 0x57a4f122, 0xef189647,
- 0xfdad39a9, 0x45115ecc, 0x764dee06, 0xcef18963, 0xdc44268d,
- 0x64f841e8, 0xf92f7951, 0x41931e34, 0x5326b1da, 0xeb9ad6bf,
- 0xb3f9c6e9, 0x0b45a18c, 0x19f00e62, 0xa14c6907, 0x3c9b51be,
- 0x842736db, 0x96929935, 0x2e2efe50, 0x2654b999, 0x9ee8defc,
- 0x8c5d7112, 0x34e11677, 0xa9362ece, 0x118a49ab, 0x033fe645,
- 0xbb838120, 0xe3e09176, 0x5b5cf613, 0x49e959fd, 0xf1553e98,
- 0x6c820621, 0xd43e6144, 0xc68bceaa, 0x7e37a9cf, 0xd67f4138,
- 0x6ec3265d, 0x7c7689b3, 0xc4caeed6, 0x591dd66f, 0xe1a1b10a,
- 0xf3141ee4, 0x4ba87981, 0x13cb69d7, 0xab770eb2, 0xb9c2a15c,
- 0x017ec639, 0x9ca9fe80, 0x241599e5, 0x36a0360b, 0x8e1c516e,
- 0x866616a7, 0x3eda71c2, 0x2c6fde2c, 0x94d3b949, 0x090481f0,
- 0xb1b8e695, 0xa30d497b, 0x1bb12e1e, 0x43d23e48, 0xfb6e592d,
- 0xe9dbf6c3, 0x516791a6, 0xccb0a91f, 0x740cce7a, 0x66b96194,
- 0xde0506f1},
- {0x00000000, 0x01c26a37, 0x0384d46e, 0x0246be59, 0x0709a8dc,
- 0x06cbc2eb, 0x048d7cb2, 0x054f1685, 0x0e1351b8, 0x0fd13b8f,
- 0x0d9785d6, 0x0c55efe1, 0x091af964, 0x08d89353, 0x0a9e2d0a,
- 0x0b5c473d, 0x1c26a370, 0x1de4c947, 0x1fa2771e, 0x1e601d29,
- 0x1b2f0bac, 0x1aed619b, 0x18abdfc2, 0x1969b5f5, 0x1235f2c8,
- 0x13f798ff, 0x11b126a6, 0x10734c91, 0x153c5a14, 0x14fe3023,
- 0x16b88e7a, 0x177ae44d, 0x384d46e0, 0x398f2cd7, 0x3bc9928e,
- 0x3a0bf8b9, 0x3f44ee3c, 0x3e86840b, 0x3cc03a52, 0x3d025065,
- 0x365e1758, 0x379c7d6f, 0x35dac336, 0x3418a901, 0x3157bf84,
- 0x3095d5b3, 0x32d36bea, 0x331101dd, 0x246be590, 0x25a98fa7,
- 0x27ef31fe, 0x262d5bc9, 0x23624d4c, 0x22a0277b, 0x20e69922,
- 0x2124f315, 0x2a78b428, 0x2bbade1f, 0x29fc6046, 0x283e0a71,
- 0x2d711cf4, 0x2cb376c3, 0x2ef5c89a, 0x2f37a2ad, 0x709a8dc0,
- 0x7158e7f7, 0x731e59ae, 0x72dc3399, 0x7793251c, 0x76514f2b,
- 0x7417f172, 0x75d59b45, 0x7e89dc78, 0x7f4bb64f, 0x7d0d0816,
- 0x7ccf6221, 0x798074a4, 0x78421e93, 0x7a04a0ca, 0x7bc6cafd,
- 0x6cbc2eb0, 0x6d7e4487, 0x6f38fade, 0x6efa90e9, 0x6bb5866c,
- 0x6a77ec5b, 0x68315202, 0x69f33835, 0x62af7f08, 0x636d153f,
- 0x612bab66, 0x60e9c151, 0x65a6d7d4, 0x6464bde3, 0x662203ba,
- 0x67e0698d, 0x48d7cb20, 0x4915a117, 0x4b531f4e, 0x4a917579,
- 0x4fde63fc, 0x4e1c09cb, 0x4c5ab792, 0x4d98dda5, 0x46c49a98,
- 0x4706f0af, 0x45404ef6, 0x448224c1, 0x41cd3244, 0x400f5873,
- 0x4249e62a, 0x438b8c1d, 0x54f16850, 0x55330267, 0x5775bc3e,
- 0x56b7d609, 0x53f8c08c, 0x523aaabb, 0x507c14e2, 0x51be7ed5,
- 0x5ae239e8, 0x5b2053df, 0x5966ed86, 0x58a487b1, 0x5deb9134,
- 0x5c29fb03, 0x5e6f455a, 0x5fad2f6d, 0xe1351b80, 0xe0f771b7,
- 0xe2b1cfee, 0xe373a5d9, 0xe63cb35c, 0xe7fed96b, 0xe5b86732,
- 0xe47a0d05, 0xef264a38, 0xeee4200f, 0xeca29e56, 0xed60f461,
- 0xe82fe2e4, 0xe9ed88d3, 0xebab368a, 0xea695cbd, 0xfd13b8f0,
- 0xfcd1d2c7, 0xfe976c9e, 0xff5506a9, 0xfa1a102c, 0xfbd87a1b,
- 0xf99ec442, 0xf85cae75, 0xf300e948, 0xf2c2837f, 0xf0843d26,
- 0xf1465711, 0xf4094194, 0xf5cb2ba3, 0xf78d95fa, 0xf64fffcd,
- 0xd9785d60, 0xd8ba3757, 0xdafc890e, 0xdb3ee339, 0xde71f5bc,
- 0xdfb39f8b, 0xddf521d2, 0xdc374be5, 0xd76b0cd8, 0xd6a966ef,
- 0xd4efd8b6, 0xd52db281, 0xd062a404, 0xd1a0ce33, 0xd3e6706a,
- 0xd2241a5d, 0xc55efe10, 0xc49c9427, 0xc6da2a7e, 0xc7184049,
- 0xc25756cc, 0xc3953cfb, 0xc1d382a2, 0xc011e895, 0xcb4dafa8,
- 0xca8fc59f, 0xc8c97bc6, 0xc90b11f1, 0xcc440774, 0xcd866d43,
- 0xcfc0d31a, 0xce02b92d, 0x91af9640, 0x906dfc77, 0x922b422e,
- 0x93e92819, 0x96a63e9c, 0x976454ab, 0x9522eaf2, 0x94e080c5,
- 0x9fbcc7f8, 0x9e7eadcf, 0x9c381396, 0x9dfa79a1, 0x98b56f24,
- 0x99770513, 0x9b31bb4a, 0x9af3d17d, 0x8d893530, 0x8c4b5f07,
- 0x8e0de15e, 0x8fcf8b69, 0x8a809dec, 0x8b42f7db, 0x89044982,
- 0x88c623b5, 0x839a6488, 0x82580ebf, 0x801eb0e6, 0x81dcdad1,
- 0x8493cc54, 0x8551a663, 0x8717183a, 0x86d5720d, 0xa9e2d0a0,
- 0xa820ba97, 0xaa6604ce, 0xaba46ef9, 0xaeeb787c, 0xaf29124b,
- 0xad6fac12, 0xacadc625, 0xa7f18118, 0xa633eb2f, 0xa4755576,
- 0xa5b73f41, 0xa0f829c4, 0xa13a43f3, 0xa37cfdaa, 0xa2be979d,
- 0xb5c473d0, 0xb40619e7, 0xb640a7be, 0xb782cd89, 0xb2cddb0c,
- 0xb30fb13b, 0xb1490f62, 0xb08b6555, 0xbbd72268, 0xba15485f,
- 0xb853f606, 0xb9919c31, 0xbcde8ab4, 0xbd1ce083, 0xbf5a5eda,
- 0xbe9834ed},
- {0x00000000, 0x191b3141, 0x32366282, 0x2b2d53c3, 0x646cc504,
- 0x7d77f445, 0x565aa786, 0x4f4196c7, 0xc8d98a08, 0xd1c2bb49,
- 0xfaefe88a, 0xe3f4d9cb, 0xacb54f0c, 0xb5ae7e4d, 0x9e832d8e,
- 0x87981ccf, 0x4ac21251, 0x53d92310, 0x78f470d3, 0x61ef4192,
- 0x2eaed755, 0x37b5e614, 0x1c98b5d7, 0x05838496, 0x821b9859,
- 0x9b00a918, 0xb02dfadb, 0xa936cb9a, 0xe6775d5d, 0xff6c6c1c,
- 0xd4413fdf, 0xcd5a0e9e, 0x958424a2, 0x8c9f15e3, 0xa7b24620,
- 0xbea97761, 0xf1e8e1a6, 0xe8f3d0e7, 0xc3de8324, 0xdac5b265,
- 0x5d5daeaa, 0x44469feb, 0x6f6bcc28, 0x7670fd69, 0x39316bae,
- 0x202a5aef, 0x0b07092c, 0x121c386d, 0xdf4636f3, 0xc65d07b2,
- 0xed705471, 0xf46b6530, 0xbb2af3f7, 0xa231c2b6, 0x891c9175,
- 0x9007a034, 0x179fbcfb, 0x0e848dba, 0x25a9de79, 0x3cb2ef38,
- 0x73f379ff, 0x6ae848be, 0x41c51b7d, 0x58de2a3c, 0xf0794f05,
- 0xe9627e44, 0xc24f2d87, 0xdb541cc6, 0x94158a01, 0x8d0ebb40,
- 0xa623e883, 0xbf38d9c2, 0x38a0c50d, 0x21bbf44c, 0x0a96a78f,
- 0x138d96ce, 0x5ccc0009, 0x45d73148, 0x6efa628b, 0x77e153ca,
- 0xbabb5d54, 0xa3a06c15, 0x888d3fd6, 0x91960e97, 0xded79850,
- 0xc7cca911, 0xece1fad2, 0xf5facb93, 0x7262d75c, 0x6b79e61d,
- 0x4054b5de, 0x594f849f, 0x160e1258, 0x0f152319, 0x243870da,
- 0x3d23419b, 0x65fd6ba7, 0x7ce65ae6, 0x57cb0925, 0x4ed03864,
- 0x0191aea3, 0x188a9fe2, 0x33a7cc21, 0x2abcfd60, 0xad24e1af,
- 0xb43fd0ee, 0x9f12832d, 0x8609b26c, 0xc94824ab, 0xd05315ea,
- 0xfb7e4629, 0xe2657768, 0x2f3f79f6, 0x362448b7, 0x1d091b74,
- 0x04122a35, 0x4b53bcf2, 0x52488db3, 0x7965de70, 0x607eef31,
- 0xe7e6f3fe, 0xfefdc2bf, 0xd5d0917c, 0xcccba03d, 0x838a36fa,
- 0x9a9107bb, 0xb1bc5478, 0xa8a76539, 0x3b83984b, 0x2298a90a,
- 0x09b5fac9, 0x10aecb88, 0x5fef5d4f, 0x46f46c0e, 0x6dd93fcd,
- 0x74c20e8c, 0xf35a1243, 0xea412302, 0xc16c70c1, 0xd8774180,
- 0x9736d747, 0x8e2de606, 0xa500b5c5, 0xbc1b8484, 0x71418a1a,
- 0x685abb5b, 0x4377e898, 0x5a6cd9d9, 0x152d4f1e, 0x0c367e5f,
- 0x271b2d9c, 0x3e001cdd, 0xb9980012, 0xa0833153, 0x8bae6290,
- 0x92b553d1, 0xddf4c516, 0xc4eff457, 0xefc2a794, 0xf6d996d5,
- 0xae07bce9, 0xb71c8da8, 0x9c31de6b, 0x852aef2a, 0xca6b79ed,
- 0xd37048ac, 0xf85d1b6f, 0xe1462a2e, 0x66de36e1, 0x7fc507a0,
- 0x54e85463, 0x4df36522, 0x02b2f3e5, 0x1ba9c2a4, 0x30849167,
- 0x299fa026, 0xe4c5aeb8, 0xfdde9ff9, 0xd6f3cc3a, 0xcfe8fd7b,
- 0x80a96bbc, 0x99b25afd, 0xb29f093e, 0xab84387f, 0x2c1c24b0,
- 0x350715f1, 0x1e2a4632, 0x07317773, 0x4870e1b4, 0x516bd0f5,
- 0x7a468336, 0x635db277, 0xcbfad74e, 0xd2e1e60f, 0xf9ccb5cc,
- 0xe0d7848d, 0xaf96124a, 0xb68d230b, 0x9da070c8, 0x84bb4189,
- 0x03235d46, 0x1a386c07, 0x31153fc4, 0x280e0e85, 0x674f9842,
- 0x7e54a903, 0x5579fac0, 0x4c62cb81, 0x8138c51f, 0x9823f45e,
- 0xb30ea79d, 0xaa1596dc, 0xe554001b, 0xfc4f315a, 0xd7626299,
- 0xce7953d8, 0x49e14f17, 0x50fa7e56, 0x7bd72d95, 0x62cc1cd4,
- 0x2d8d8a13, 0x3496bb52, 0x1fbbe891, 0x06a0d9d0, 0x5e7ef3ec,
- 0x4765c2ad, 0x6c48916e, 0x7553a02f, 0x3a1236e8, 0x230907a9,
- 0x0824546a, 0x113f652b, 0x96a779e4, 0x8fbc48a5, 0xa4911b66,
- 0xbd8a2a27, 0xf2cbbce0, 0xebd08da1, 0xc0fdde62, 0xd9e6ef23,
- 0x14bce1bd, 0x0da7d0fc, 0x268a833f, 0x3f91b27e, 0x70d024b9,
- 0x69cb15f8, 0x42e6463b, 0x5bfd777a, 0xdc656bb5, 0xc57e5af4,
- 0xee530937, 0xf7483876, 0xb809aeb1, 0xa1129ff0, 0x8a3fcc33,
- 0x9324fd72},
- {0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419,
- 0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4,
- 0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07,
- 0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de,
- 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, 0x136c9856,
- 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9,
- 0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4,
- 0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b,
- 0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3,
- 0x45df5c75, 0xdcd60dcf, 0xabd13d59, 0x26d930ac, 0x51de003a,
- 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599,
- 0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924,
- 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190,
- 0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f,
- 0x9fbfe4a5, 0xe8b8d433, 0x7807c9a2, 0x0f00f934, 0x9609a88e,
- 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01,
- 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, 0x6c0695ed,
- 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950,
- 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3,
- 0xfbd44c65, 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2,
- 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a,
- 0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5,
- 0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010,
- 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f,
- 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17,
- 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6,
- 0x03b6e20c, 0x74b1d29a, 0xead54739, 0x9dd277af, 0x04db2615,
- 0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8,
- 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, 0xf00f9344,
- 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb,
- 0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a,
- 0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5,
- 0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1,
- 0xa6bc5767, 0x3fb506dd, 0x48b2364b, 0xd80d2bda, 0xaf0a1b4c,
- 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef,
- 0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236,
- 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe,
- 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31,
- 0x2cd99e8b, 0x5bdeae1d, 0x9b64c2b0, 0xec63f226, 0x756aa39c,
- 0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713,
- 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, 0x92d28e9b,
- 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242,
- 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1,
- 0x18b74777, 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c,
- 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278,
- 0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7,
- 0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66,
- 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,
- 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605,
- 0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8,
- 0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b,
- 0x2d02ef8d}};
-
-local const z_word_t FAR crc_braid_big_table[][256] = {
- {0x0000000000000000, 0x9630077700000000, 0x2c610eee00000000,
- 0xba51099900000000, 0x19c46d0700000000, 0x8ff46a7000000000,
- 0x35a563e900000000, 0xa395649e00000000, 0x3288db0e00000000,
- 0xa4b8dc7900000000, 0x1ee9d5e000000000, 0x88d9d29700000000,
- 0x2b4cb60900000000, 0xbd7cb17e00000000, 0x072db8e700000000,
- 0x911dbf9000000000, 0x6410b71d00000000, 0xf220b06a00000000,
- 0x4871b9f300000000, 0xde41be8400000000, 0x7dd4da1a00000000,
- 0xebe4dd6d00000000, 0x51b5d4f400000000, 0xc785d38300000000,
- 0x56986c1300000000, 0xc0a86b6400000000, 0x7af962fd00000000,
- 0xecc9658a00000000, 0x4f5c011400000000, 0xd96c066300000000,
- 0x633d0ffa00000000, 0xf50d088d00000000, 0xc8206e3b00000000,
- 0x5e10694c00000000, 0xe44160d500000000, 0x727167a200000000,
- 0xd1e4033c00000000, 0x47d4044b00000000, 0xfd850dd200000000,
- 0x6bb50aa500000000, 0xfaa8b53500000000, 0x6c98b24200000000,
- 0xd6c9bbdb00000000, 0x40f9bcac00000000, 0xe36cd83200000000,
- 0x755cdf4500000000, 0xcf0dd6dc00000000, 0x593dd1ab00000000,
- 0xac30d92600000000, 0x3a00de5100000000, 0x8051d7c800000000,
- 0x1661d0bf00000000, 0xb5f4b42100000000, 0x23c4b35600000000,
- 0x9995bacf00000000, 0x0fa5bdb800000000, 0x9eb8022800000000,
- 0x0888055f00000000, 0xb2d90cc600000000, 0x24e90bb100000000,
- 0x877c6f2f00000000, 0x114c685800000000, 0xab1d61c100000000,
- 0x3d2d66b600000000, 0x9041dc7600000000, 0x0671db0100000000,
- 0xbc20d29800000000, 0x2a10d5ef00000000, 0x8985b17100000000,
- 0x1fb5b60600000000, 0xa5e4bf9f00000000, 0x33d4b8e800000000,
- 0xa2c9077800000000, 0x34f9000f00000000, 0x8ea8099600000000,
- 0x18980ee100000000, 0xbb0d6a7f00000000, 0x2d3d6d0800000000,
- 0x976c649100000000, 0x015c63e600000000, 0xf4516b6b00000000,
- 0x62616c1c00000000, 0xd830658500000000, 0x4e0062f200000000,
- 0xed95066c00000000, 0x7ba5011b00000000, 0xc1f4088200000000,
- 0x57c40ff500000000, 0xc6d9b06500000000, 0x50e9b71200000000,
- 0xeab8be8b00000000, 0x7c88b9fc00000000, 0xdf1ddd6200000000,
- 0x492dda1500000000, 0xf37cd38c00000000, 0x654cd4fb00000000,
- 0x5861b24d00000000, 0xce51b53a00000000, 0x7400bca300000000,
- 0xe230bbd400000000, 0x41a5df4a00000000, 0xd795d83d00000000,
- 0x6dc4d1a400000000, 0xfbf4d6d300000000, 0x6ae9694300000000,
- 0xfcd96e3400000000, 0x468867ad00000000, 0xd0b860da00000000,
- 0x732d044400000000, 0xe51d033300000000, 0x5f4c0aaa00000000,
- 0xc97c0ddd00000000, 0x3c71055000000000, 0xaa41022700000000,
- 0x10100bbe00000000, 0x86200cc900000000, 0x25b5685700000000,
- 0xb3856f2000000000, 0x09d466b900000000, 0x9fe461ce00000000,
- 0x0ef9de5e00000000, 0x98c9d92900000000, 0x2298d0b000000000,
- 0xb4a8d7c700000000, 0x173db35900000000, 0x810db42e00000000,
- 0x3b5cbdb700000000, 0xad6cbac000000000, 0x2083b8ed00000000,
- 0xb6b3bf9a00000000, 0x0ce2b60300000000, 0x9ad2b17400000000,
- 0x3947d5ea00000000, 0xaf77d29d00000000, 0x1526db0400000000,
- 0x8316dc7300000000, 0x120b63e300000000, 0x843b649400000000,
- 0x3e6a6d0d00000000, 0xa85a6a7a00000000, 0x0bcf0ee400000000,
- 0x9dff099300000000, 0x27ae000a00000000, 0xb19e077d00000000,
- 0x44930ff000000000, 0xd2a3088700000000, 0x68f2011e00000000,
- 0xfec2066900000000, 0x5d5762f700000000, 0xcb67658000000000,
- 0x71366c1900000000, 0xe7066b6e00000000, 0x761bd4fe00000000,
- 0xe02bd38900000000, 0x5a7ada1000000000, 0xcc4add6700000000,
- 0x6fdfb9f900000000, 0xf9efbe8e00000000, 0x43beb71700000000,
- 0xd58eb06000000000, 0xe8a3d6d600000000, 0x7e93d1a100000000,
- 0xc4c2d83800000000, 0x52f2df4f00000000, 0xf167bbd100000000,
- 0x6757bca600000000, 0xdd06b53f00000000, 0x4b36b24800000000,
- 0xda2b0dd800000000, 0x4c1b0aaf00000000, 0xf64a033600000000,
- 0x607a044100000000, 0xc3ef60df00000000, 0x55df67a800000000,
- 0xef8e6e3100000000, 0x79be694600000000, 0x8cb361cb00000000,
- 0x1a8366bc00000000, 0xa0d26f2500000000, 0x36e2685200000000,
- 0x95770ccc00000000, 0x03470bbb00000000, 0xb916022200000000,
- 0x2f26055500000000, 0xbe3bbac500000000, 0x280bbdb200000000,
- 0x925ab42b00000000, 0x046ab35c00000000, 0xa7ffd7c200000000,
- 0x31cfd0b500000000, 0x8b9ed92c00000000, 0x1daede5b00000000,
- 0xb0c2649b00000000, 0x26f263ec00000000, 0x9ca36a7500000000,
- 0x0a936d0200000000, 0xa906099c00000000, 0x3f360eeb00000000,
- 0x8567077200000000, 0x1357000500000000, 0x824abf9500000000,
- 0x147ab8e200000000, 0xae2bb17b00000000, 0x381bb60c00000000,
- 0x9b8ed29200000000, 0x0dbed5e500000000, 0xb7efdc7c00000000,
- 0x21dfdb0b00000000, 0xd4d2d38600000000, 0x42e2d4f100000000,
- 0xf8b3dd6800000000, 0x6e83da1f00000000, 0xcd16be8100000000,
- 0x5b26b9f600000000, 0xe177b06f00000000, 0x7747b71800000000,
- 0xe65a088800000000, 0x706a0fff00000000, 0xca3b066600000000,
- 0x5c0b011100000000, 0xff9e658f00000000, 0x69ae62f800000000,
- 0xd3ff6b6100000000, 0x45cf6c1600000000, 0x78e20aa000000000,
- 0xeed20dd700000000, 0x5483044e00000000, 0xc2b3033900000000,
- 0x612667a700000000, 0xf71660d000000000, 0x4d47694900000000,
- 0xdb776e3e00000000, 0x4a6ad1ae00000000, 0xdc5ad6d900000000,
- 0x660bdf4000000000, 0xf03bd83700000000, 0x53aebca900000000,
- 0xc59ebbde00000000, 0x7fcfb24700000000, 0xe9ffb53000000000,
- 0x1cf2bdbd00000000, 0x8ac2baca00000000, 0x3093b35300000000,
- 0xa6a3b42400000000, 0x0536d0ba00000000, 0x9306d7cd00000000,
- 0x2957de5400000000, 0xbf67d92300000000, 0x2e7a66b300000000,
- 0xb84a61c400000000, 0x021b685d00000000, 0x942b6f2a00000000,
- 0x37be0bb400000000, 0xa18e0cc300000000, 0x1bdf055a00000000,
- 0x8def022d00000000},
- {0x0000000000000000, 0x41311b1900000000, 0x8262363200000000,
- 0xc3532d2b00000000, 0x04c56c6400000000, 0x45f4777d00000000,
- 0x86a75a5600000000, 0xc796414f00000000, 0x088ad9c800000000,
- 0x49bbc2d100000000, 0x8ae8effa00000000, 0xcbd9f4e300000000,
- 0x0c4fb5ac00000000, 0x4d7eaeb500000000, 0x8e2d839e00000000,
- 0xcf1c988700000000, 0x5112c24a00000000, 0x1023d95300000000,
- 0xd370f47800000000, 0x9241ef6100000000, 0x55d7ae2e00000000,
- 0x14e6b53700000000, 0xd7b5981c00000000, 0x9684830500000000,
- 0x59981b8200000000, 0x18a9009b00000000, 0xdbfa2db000000000,
- 0x9acb36a900000000, 0x5d5d77e600000000, 0x1c6c6cff00000000,
- 0xdf3f41d400000000, 0x9e0e5acd00000000, 0xa224849500000000,
- 0xe3159f8c00000000, 0x2046b2a700000000, 0x6177a9be00000000,
- 0xa6e1e8f100000000, 0xe7d0f3e800000000, 0x2483dec300000000,
- 0x65b2c5da00000000, 0xaaae5d5d00000000, 0xeb9f464400000000,
- 0x28cc6b6f00000000, 0x69fd707600000000, 0xae6b313900000000,
- 0xef5a2a2000000000, 0x2c09070b00000000, 0x6d381c1200000000,
- 0xf33646df00000000, 0xb2075dc600000000, 0x715470ed00000000,
- 0x30656bf400000000, 0xf7f32abb00000000, 0xb6c231a200000000,
- 0x75911c8900000000, 0x34a0079000000000, 0xfbbc9f1700000000,
- 0xba8d840e00000000, 0x79dea92500000000, 0x38efb23c00000000,
- 0xff79f37300000000, 0xbe48e86a00000000, 0x7d1bc54100000000,
- 0x3c2ade5800000000, 0x054f79f000000000, 0x447e62e900000000,
- 0x872d4fc200000000, 0xc61c54db00000000, 0x018a159400000000,
- 0x40bb0e8d00000000, 0x83e823a600000000, 0xc2d938bf00000000,
- 0x0dc5a03800000000, 0x4cf4bb2100000000, 0x8fa7960a00000000,
- 0xce968d1300000000, 0x0900cc5c00000000, 0x4831d74500000000,
- 0x8b62fa6e00000000, 0xca53e17700000000, 0x545dbbba00000000,
- 0x156ca0a300000000, 0xd63f8d8800000000, 0x970e969100000000,
- 0x5098d7de00000000, 0x11a9ccc700000000, 0xd2fae1ec00000000,
- 0x93cbfaf500000000, 0x5cd7627200000000, 0x1de6796b00000000,
- 0xdeb5544000000000, 0x9f844f5900000000, 0x58120e1600000000,
- 0x1923150f00000000, 0xda70382400000000, 0x9b41233d00000000,
- 0xa76bfd6500000000, 0xe65ae67c00000000, 0x2509cb5700000000,
- 0x6438d04e00000000, 0xa3ae910100000000, 0xe29f8a1800000000,
- 0x21cca73300000000, 0x60fdbc2a00000000, 0xafe124ad00000000,
- 0xeed03fb400000000, 0x2d83129f00000000, 0x6cb2098600000000,
- 0xab2448c900000000, 0xea1553d000000000, 0x29467efb00000000,
- 0x687765e200000000, 0xf6793f2f00000000, 0xb748243600000000,
- 0x741b091d00000000, 0x352a120400000000, 0xf2bc534b00000000,
- 0xb38d485200000000, 0x70de657900000000, 0x31ef7e6000000000,
- 0xfef3e6e700000000, 0xbfc2fdfe00000000, 0x7c91d0d500000000,
- 0x3da0cbcc00000000, 0xfa368a8300000000, 0xbb07919a00000000,
- 0x7854bcb100000000, 0x3965a7a800000000, 0x4b98833b00000000,
- 0x0aa9982200000000, 0xc9fab50900000000, 0x88cbae1000000000,
- 0x4f5def5f00000000, 0x0e6cf44600000000, 0xcd3fd96d00000000,
- 0x8c0ec27400000000, 0x43125af300000000, 0x022341ea00000000,
- 0xc1706cc100000000, 0x804177d800000000, 0x47d7369700000000,
- 0x06e62d8e00000000, 0xc5b500a500000000, 0x84841bbc00000000,
- 0x1a8a417100000000, 0x5bbb5a6800000000, 0x98e8774300000000,
- 0xd9d96c5a00000000, 0x1e4f2d1500000000, 0x5f7e360c00000000,
- 0x9c2d1b2700000000, 0xdd1c003e00000000, 0x120098b900000000,
- 0x533183a000000000, 0x9062ae8b00000000, 0xd153b59200000000,
- 0x16c5f4dd00000000, 0x57f4efc400000000, 0x94a7c2ef00000000,
- 0xd596d9f600000000, 0xe9bc07ae00000000, 0xa88d1cb700000000,
- 0x6bde319c00000000, 0x2aef2a8500000000, 0xed796bca00000000,
- 0xac4870d300000000, 0x6f1b5df800000000, 0x2e2a46e100000000,
- 0xe136de6600000000, 0xa007c57f00000000, 0x6354e85400000000,
- 0x2265f34d00000000, 0xe5f3b20200000000, 0xa4c2a91b00000000,
- 0x6791843000000000, 0x26a09f2900000000, 0xb8aec5e400000000,
- 0xf99fdefd00000000, 0x3accf3d600000000, 0x7bfde8cf00000000,
- 0xbc6ba98000000000, 0xfd5ab29900000000, 0x3e099fb200000000,
- 0x7f3884ab00000000, 0xb0241c2c00000000, 0xf115073500000000,
- 0x32462a1e00000000, 0x7377310700000000, 0xb4e1704800000000,
- 0xf5d06b5100000000, 0x3683467a00000000, 0x77b25d6300000000,
- 0x4ed7facb00000000, 0x0fe6e1d200000000, 0xccb5ccf900000000,
- 0x8d84d7e000000000, 0x4a1296af00000000, 0x0b238db600000000,
- 0xc870a09d00000000, 0x8941bb8400000000, 0x465d230300000000,
- 0x076c381a00000000, 0xc43f153100000000, 0x850e0e2800000000,
- 0x42984f6700000000, 0x03a9547e00000000, 0xc0fa795500000000,
- 0x81cb624c00000000, 0x1fc5388100000000, 0x5ef4239800000000,
- 0x9da70eb300000000, 0xdc9615aa00000000, 0x1b0054e500000000,
- 0x5a314ffc00000000, 0x996262d700000000, 0xd85379ce00000000,
- 0x174fe14900000000, 0x567efa5000000000, 0x952dd77b00000000,
- 0xd41ccc6200000000, 0x138a8d2d00000000, 0x52bb963400000000,
- 0x91e8bb1f00000000, 0xd0d9a00600000000, 0xecf37e5e00000000,
- 0xadc2654700000000, 0x6e91486c00000000, 0x2fa0537500000000,
- 0xe836123a00000000, 0xa907092300000000, 0x6a54240800000000,
- 0x2b653f1100000000, 0xe479a79600000000, 0xa548bc8f00000000,
- 0x661b91a400000000, 0x272a8abd00000000, 0xe0bccbf200000000,
- 0xa18dd0eb00000000, 0x62defdc000000000, 0x23efe6d900000000,
- 0xbde1bc1400000000, 0xfcd0a70d00000000, 0x3f838a2600000000,
- 0x7eb2913f00000000, 0xb924d07000000000, 0xf815cb6900000000,
- 0x3b46e64200000000, 0x7a77fd5b00000000, 0xb56b65dc00000000,
- 0xf45a7ec500000000, 0x370953ee00000000, 0x763848f700000000,
- 0xb1ae09b800000000, 0xf09f12a100000000, 0x33cc3f8a00000000,
- 0x72fd249300000000},
- {0x0000000000000000, 0x376ac20100000000, 0x6ed4840300000000,
- 0x59be460200000000, 0xdca8090700000000, 0xebc2cb0600000000,
- 0xb27c8d0400000000, 0x85164f0500000000, 0xb851130e00000000,
- 0x8f3bd10f00000000, 0xd685970d00000000, 0xe1ef550c00000000,
- 0x64f91a0900000000, 0x5393d80800000000, 0x0a2d9e0a00000000,
- 0x3d475c0b00000000, 0x70a3261c00000000, 0x47c9e41d00000000,
- 0x1e77a21f00000000, 0x291d601e00000000, 0xac0b2f1b00000000,
- 0x9b61ed1a00000000, 0xc2dfab1800000000, 0xf5b5691900000000,
- 0xc8f2351200000000, 0xff98f71300000000, 0xa626b11100000000,
- 0x914c731000000000, 0x145a3c1500000000, 0x2330fe1400000000,
- 0x7a8eb81600000000, 0x4de47a1700000000, 0xe0464d3800000000,
- 0xd72c8f3900000000, 0x8e92c93b00000000, 0xb9f80b3a00000000,
- 0x3cee443f00000000, 0x0b84863e00000000, 0x523ac03c00000000,
- 0x6550023d00000000, 0x58175e3600000000, 0x6f7d9c3700000000,
- 0x36c3da3500000000, 0x01a9183400000000, 0x84bf573100000000,
- 0xb3d5953000000000, 0xea6bd33200000000, 0xdd01113300000000,
- 0x90e56b2400000000, 0xa78fa92500000000, 0xfe31ef2700000000,
- 0xc95b2d2600000000, 0x4c4d622300000000, 0x7b27a02200000000,
- 0x2299e62000000000, 0x15f3242100000000, 0x28b4782a00000000,
- 0x1fdeba2b00000000, 0x4660fc2900000000, 0x710a3e2800000000,
- 0xf41c712d00000000, 0xc376b32c00000000, 0x9ac8f52e00000000,
- 0xada2372f00000000, 0xc08d9a7000000000, 0xf7e7587100000000,
- 0xae591e7300000000, 0x9933dc7200000000, 0x1c25937700000000,
- 0x2b4f517600000000, 0x72f1177400000000, 0x459bd57500000000,
- 0x78dc897e00000000, 0x4fb64b7f00000000, 0x16080d7d00000000,
- 0x2162cf7c00000000, 0xa474807900000000, 0x931e427800000000,
- 0xcaa0047a00000000, 0xfdcac67b00000000, 0xb02ebc6c00000000,
- 0x87447e6d00000000, 0xdefa386f00000000, 0xe990fa6e00000000,
- 0x6c86b56b00000000, 0x5bec776a00000000, 0x0252316800000000,
- 0x3538f36900000000, 0x087faf6200000000, 0x3f156d6300000000,
- 0x66ab2b6100000000, 0x51c1e96000000000, 0xd4d7a66500000000,
- 0xe3bd646400000000, 0xba03226600000000, 0x8d69e06700000000,
- 0x20cbd74800000000, 0x17a1154900000000, 0x4e1f534b00000000,
- 0x7975914a00000000, 0xfc63de4f00000000, 0xcb091c4e00000000,
- 0x92b75a4c00000000, 0xa5dd984d00000000, 0x989ac44600000000,
- 0xaff0064700000000, 0xf64e404500000000, 0xc124824400000000,
- 0x4432cd4100000000, 0x73580f4000000000, 0x2ae6494200000000,
- 0x1d8c8b4300000000, 0x5068f15400000000, 0x6702335500000000,
- 0x3ebc755700000000, 0x09d6b75600000000, 0x8cc0f85300000000,
- 0xbbaa3a5200000000, 0xe2147c5000000000, 0xd57ebe5100000000,
- 0xe839e25a00000000, 0xdf53205b00000000, 0x86ed665900000000,
- 0xb187a45800000000, 0x3491eb5d00000000, 0x03fb295c00000000,
- 0x5a456f5e00000000, 0x6d2fad5f00000000, 0x801b35e100000000,
- 0xb771f7e000000000, 0xeecfb1e200000000, 0xd9a573e300000000,
- 0x5cb33ce600000000, 0x6bd9fee700000000, 0x3267b8e500000000,
- 0x050d7ae400000000, 0x384a26ef00000000, 0x0f20e4ee00000000,
- 0x569ea2ec00000000, 0x61f460ed00000000, 0xe4e22fe800000000,
- 0xd388ede900000000, 0x8a36abeb00000000, 0xbd5c69ea00000000,
- 0xf0b813fd00000000, 0xc7d2d1fc00000000, 0x9e6c97fe00000000,
- 0xa90655ff00000000, 0x2c101afa00000000, 0x1b7ad8fb00000000,
- 0x42c49ef900000000, 0x75ae5cf800000000, 0x48e900f300000000,
- 0x7f83c2f200000000, 0x263d84f000000000, 0x115746f100000000,
- 0x944109f400000000, 0xa32bcbf500000000, 0xfa958df700000000,
- 0xcdff4ff600000000, 0x605d78d900000000, 0x5737bad800000000,
- 0x0e89fcda00000000, 0x39e33edb00000000, 0xbcf571de00000000,
- 0x8b9fb3df00000000, 0xd221f5dd00000000, 0xe54b37dc00000000,
- 0xd80c6bd700000000, 0xef66a9d600000000, 0xb6d8efd400000000,
- 0x81b22dd500000000, 0x04a462d000000000, 0x33cea0d100000000,
- 0x6a70e6d300000000, 0x5d1a24d200000000, 0x10fe5ec500000000,
- 0x27949cc400000000, 0x7e2adac600000000, 0x494018c700000000,
- 0xcc5657c200000000, 0xfb3c95c300000000, 0xa282d3c100000000,
- 0x95e811c000000000, 0xa8af4dcb00000000, 0x9fc58fca00000000,
- 0xc67bc9c800000000, 0xf1110bc900000000, 0x740744cc00000000,
- 0x436d86cd00000000, 0x1ad3c0cf00000000, 0x2db902ce00000000,
- 0x4096af9100000000, 0x77fc6d9000000000, 0x2e422b9200000000,
- 0x1928e99300000000, 0x9c3ea69600000000, 0xab54649700000000,
- 0xf2ea229500000000, 0xc580e09400000000, 0xf8c7bc9f00000000,
- 0xcfad7e9e00000000, 0x9613389c00000000, 0xa179fa9d00000000,
- 0x246fb59800000000, 0x1305779900000000, 0x4abb319b00000000,
- 0x7dd1f39a00000000, 0x3035898d00000000, 0x075f4b8c00000000,
- 0x5ee10d8e00000000, 0x698bcf8f00000000, 0xec9d808a00000000,
- 0xdbf7428b00000000, 0x8249048900000000, 0xb523c68800000000,
- 0x88649a8300000000, 0xbf0e588200000000, 0xe6b01e8000000000,
- 0xd1dadc8100000000, 0x54cc938400000000, 0x63a6518500000000,
- 0x3a18178700000000, 0x0d72d58600000000, 0xa0d0e2a900000000,
- 0x97ba20a800000000, 0xce0466aa00000000, 0xf96ea4ab00000000,
- 0x7c78ebae00000000, 0x4b1229af00000000, 0x12ac6fad00000000,
- 0x25c6adac00000000, 0x1881f1a700000000, 0x2feb33a600000000,
- 0x765575a400000000, 0x413fb7a500000000, 0xc429f8a000000000,
- 0xf3433aa100000000, 0xaafd7ca300000000, 0x9d97bea200000000,
- 0xd073c4b500000000, 0xe71906b400000000, 0xbea740b600000000,
- 0x89cd82b700000000, 0x0cdbcdb200000000, 0x3bb10fb300000000,
- 0x620f49b100000000, 0x55658bb000000000, 0x6822d7bb00000000,
- 0x5f4815ba00000000, 0x06f653b800000000, 0x319c91b900000000,
- 0xb48adebc00000000, 0x83e01cbd00000000, 0xda5e5abf00000000,
- 0xed3498be00000000},
- {0x0000000000000000, 0x6567bcb800000000, 0x8bc809aa00000000,
- 0xeeafb51200000000, 0x5797628f00000000, 0x32f0de3700000000,
- 0xdc5f6b2500000000, 0xb938d79d00000000, 0xef28b4c500000000,
- 0x8a4f087d00000000, 0x64e0bd6f00000000, 0x018701d700000000,
- 0xb8bfd64a00000000, 0xddd86af200000000, 0x3377dfe000000000,
- 0x5610635800000000, 0x9f57195000000000, 0xfa30a5e800000000,
- 0x149f10fa00000000, 0x71f8ac4200000000, 0xc8c07bdf00000000,
- 0xada7c76700000000, 0x4308727500000000, 0x266fcecd00000000,
- 0x707fad9500000000, 0x1518112d00000000, 0xfbb7a43f00000000,
- 0x9ed0188700000000, 0x27e8cf1a00000000, 0x428f73a200000000,
- 0xac20c6b000000000, 0xc9477a0800000000, 0x3eaf32a000000000,
- 0x5bc88e1800000000, 0xb5673b0a00000000, 0xd00087b200000000,
- 0x6938502f00000000, 0x0c5fec9700000000, 0xe2f0598500000000,
- 0x8797e53d00000000, 0xd187866500000000, 0xb4e03add00000000,
- 0x5a4f8fcf00000000, 0x3f28337700000000, 0x8610e4ea00000000,
- 0xe377585200000000, 0x0dd8ed4000000000, 0x68bf51f800000000,
- 0xa1f82bf000000000, 0xc49f974800000000, 0x2a30225a00000000,
- 0x4f579ee200000000, 0xf66f497f00000000, 0x9308f5c700000000,
- 0x7da740d500000000, 0x18c0fc6d00000000, 0x4ed09f3500000000,
- 0x2bb7238d00000000, 0xc518969f00000000, 0xa07f2a2700000000,
- 0x1947fdba00000000, 0x7c20410200000000, 0x928ff41000000000,
- 0xf7e848a800000000, 0x3d58149b00000000, 0x583fa82300000000,
- 0xb6901d3100000000, 0xd3f7a18900000000, 0x6acf761400000000,
- 0x0fa8caac00000000, 0xe1077fbe00000000, 0x8460c30600000000,
- 0xd270a05e00000000, 0xb7171ce600000000, 0x59b8a9f400000000,
- 0x3cdf154c00000000, 0x85e7c2d100000000, 0xe0807e6900000000,
- 0x0e2fcb7b00000000, 0x6b4877c300000000, 0xa20f0dcb00000000,
- 0xc768b17300000000, 0x29c7046100000000, 0x4ca0b8d900000000,
- 0xf5986f4400000000, 0x90ffd3fc00000000, 0x7e5066ee00000000,
- 0x1b37da5600000000, 0x4d27b90e00000000, 0x284005b600000000,
- 0xc6efb0a400000000, 0xa3880c1c00000000, 0x1ab0db8100000000,
- 0x7fd7673900000000, 0x9178d22b00000000, 0xf41f6e9300000000,
- 0x03f7263b00000000, 0x66909a8300000000, 0x883f2f9100000000,
- 0xed58932900000000, 0x546044b400000000, 0x3107f80c00000000,
- 0xdfa84d1e00000000, 0xbacff1a600000000, 0xecdf92fe00000000,
- 0x89b82e4600000000, 0x67179b5400000000, 0x027027ec00000000,
- 0xbb48f07100000000, 0xde2f4cc900000000, 0x3080f9db00000000,
- 0x55e7456300000000, 0x9ca03f6b00000000, 0xf9c783d300000000,
- 0x176836c100000000, 0x720f8a7900000000, 0xcb375de400000000,
- 0xae50e15c00000000, 0x40ff544e00000000, 0x2598e8f600000000,
- 0x73888bae00000000, 0x16ef371600000000, 0xf840820400000000,
- 0x9d273ebc00000000, 0x241fe92100000000, 0x4178559900000000,
- 0xafd7e08b00000000, 0xcab05c3300000000, 0x3bb659ed00000000,
- 0x5ed1e55500000000, 0xb07e504700000000, 0xd519ecff00000000,
- 0x6c213b6200000000, 0x094687da00000000, 0xe7e932c800000000,
- 0x828e8e7000000000, 0xd49eed2800000000, 0xb1f9519000000000,
- 0x5f56e48200000000, 0x3a31583a00000000, 0x83098fa700000000,
- 0xe66e331f00000000, 0x08c1860d00000000, 0x6da63ab500000000,
- 0xa4e140bd00000000, 0xc186fc0500000000, 0x2f29491700000000,
- 0x4a4ef5af00000000, 0xf376223200000000, 0x96119e8a00000000,
- 0x78be2b9800000000, 0x1dd9972000000000, 0x4bc9f47800000000,
- 0x2eae48c000000000, 0xc001fdd200000000, 0xa566416a00000000,
- 0x1c5e96f700000000, 0x79392a4f00000000, 0x97969f5d00000000,
- 0xf2f123e500000000, 0x05196b4d00000000, 0x607ed7f500000000,
- 0x8ed162e700000000, 0xebb6de5f00000000, 0x528e09c200000000,
- 0x37e9b57a00000000, 0xd946006800000000, 0xbc21bcd000000000,
- 0xea31df8800000000, 0x8f56633000000000, 0x61f9d62200000000,
- 0x049e6a9a00000000, 0xbda6bd0700000000, 0xd8c101bf00000000,
- 0x366eb4ad00000000, 0x5309081500000000, 0x9a4e721d00000000,
- 0xff29cea500000000, 0x11867bb700000000, 0x74e1c70f00000000,
- 0xcdd9109200000000, 0xa8beac2a00000000, 0x4611193800000000,
- 0x2376a58000000000, 0x7566c6d800000000, 0x10017a6000000000,
- 0xfeaecf7200000000, 0x9bc973ca00000000, 0x22f1a45700000000,
- 0x479618ef00000000, 0xa939adfd00000000, 0xcc5e114500000000,
- 0x06ee4d7600000000, 0x6389f1ce00000000, 0x8d2644dc00000000,
- 0xe841f86400000000, 0x51792ff900000000, 0x341e934100000000,
- 0xdab1265300000000, 0xbfd69aeb00000000, 0xe9c6f9b300000000,
- 0x8ca1450b00000000, 0x620ef01900000000, 0x07694ca100000000,
- 0xbe519b3c00000000, 0xdb36278400000000, 0x3599929600000000,
- 0x50fe2e2e00000000, 0x99b9542600000000, 0xfcdee89e00000000,
- 0x12715d8c00000000, 0x7716e13400000000, 0xce2e36a900000000,
- 0xab498a1100000000, 0x45e63f0300000000, 0x208183bb00000000,
- 0x7691e0e300000000, 0x13f65c5b00000000, 0xfd59e94900000000,
- 0x983e55f100000000, 0x2106826c00000000, 0x44613ed400000000,
- 0xaace8bc600000000, 0xcfa9377e00000000, 0x38417fd600000000,
- 0x5d26c36e00000000, 0xb389767c00000000, 0xd6eecac400000000,
- 0x6fd61d5900000000, 0x0ab1a1e100000000, 0xe41e14f300000000,
- 0x8179a84b00000000, 0xd769cb1300000000, 0xb20e77ab00000000,
- 0x5ca1c2b900000000, 0x39c67e0100000000, 0x80fea99c00000000,
- 0xe599152400000000, 0x0b36a03600000000, 0x6e511c8e00000000,
- 0xa716668600000000, 0xc271da3e00000000, 0x2cde6f2c00000000,
- 0x49b9d39400000000, 0xf081040900000000, 0x95e6b8b100000000,
- 0x7b490da300000000, 0x1e2eb11b00000000, 0x483ed24300000000,
- 0x2d596efb00000000, 0xc3f6dbe900000000, 0xa691675100000000,
- 0x1fa9b0cc00000000, 0x7ace0c7400000000, 0x9461b96600000000,
- 0xf10605de00000000},
- {0x0000000000000000, 0xb029603d00000000, 0x6053c07a00000000,
- 0xd07aa04700000000, 0xc0a680f500000000, 0x708fe0c800000000,
- 0xa0f5408f00000000, 0x10dc20b200000000, 0xc14b703000000000,
- 0x7162100d00000000, 0xa118b04a00000000, 0x1131d07700000000,
- 0x01edf0c500000000, 0xb1c490f800000000, 0x61be30bf00000000,
- 0xd197508200000000, 0x8297e06000000000, 0x32be805d00000000,
- 0xe2c4201a00000000, 0x52ed402700000000, 0x4231609500000000,
- 0xf21800a800000000, 0x2262a0ef00000000, 0x924bc0d200000000,
- 0x43dc905000000000, 0xf3f5f06d00000000, 0x238f502a00000000,
- 0x93a6301700000000, 0x837a10a500000000, 0x3353709800000000,
- 0xe329d0df00000000, 0x5300b0e200000000, 0x042fc1c100000000,
- 0xb406a1fc00000000, 0x647c01bb00000000, 0xd455618600000000,
- 0xc489413400000000, 0x74a0210900000000, 0xa4da814e00000000,
- 0x14f3e17300000000, 0xc564b1f100000000, 0x754dd1cc00000000,
- 0xa537718b00000000, 0x151e11b600000000, 0x05c2310400000000,
- 0xb5eb513900000000, 0x6591f17e00000000, 0xd5b8914300000000,
- 0x86b821a100000000, 0x3691419c00000000, 0xe6ebe1db00000000,
- 0x56c281e600000000, 0x461ea15400000000, 0xf637c16900000000,
- 0x264d612e00000000, 0x9664011300000000, 0x47f3519100000000,
- 0xf7da31ac00000000, 0x27a091eb00000000, 0x9789f1d600000000,
- 0x8755d16400000000, 0x377cb15900000000, 0xe706111e00000000,
- 0x572f712300000000, 0x4958f35800000000, 0xf971936500000000,
- 0x290b332200000000, 0x9922531f00000000, 0x89fe73ad00000000,
- 0x39d7139000000000, 0xe9adb3d700000000, 0x5984d3ea00000000,
- 0x8813836800000000, 0x383ae35500000000, 0xe840431200000000,
- 0x5869232f00000000, 0x48b5039d00000000, 0xf89c63a000000000,
- 0x28e6c3e700000000, 0x98cfa3da00000000, 0xcbcf133800000000,
- 0x7be6730500000000, 0xab9cd34200000000, 0x1bb5b37f00000000,
- 0x0b6993cd00000000, 0xbb40f3f000000000, 0x6b3a53b700000000,
- 0xdb13338a00000000, 0x0a84630800000000, 0xbaad033500000000,
- 0x6ad7a37200000000, 0xdafec34f00000000, 0xca22e3fd00000000,
- 0x7a0b83c000000000, 0xaa71238700000000, 0x1a5843ba00000000,
- 0x4d77329900000000, 0xfd5e52a400000000, 0x2d24f2e300000000,
- 0x9d0d92de00000000, 0x8dd1b26c00000000, 0x3df8d25100000000,
- 0xed82721600000000, 0x5dab122b00000000, 0x8c3c42a900000000,
- 0x3c15229400000000, 0xec6f82d300000000, 0x5c46e2ee00000000,
- 0x4c9ac25c00000000, 0xfcb3a26100000000, 0x2cc9022600000000,
- 0x9ce0621b00000000, 0xcfe0d2f900000000, 0x7fc9b2c400000000,
- 0xafb3128300000000, 0x1f9a72be00000000, 0x0f46520c00000000,
- 0xbf6f323100000000, 0x6f15927600000000, 0xdf3cf24b00000000,
- 0x0eaba2c900000000, 0xbe82c2f400000000, 0x6ef862b300000000,
- 0xded1028e00000000, 0xce0d223c00000000, 0x7e24420100000000,
- 0xae5ee24600000000, 0x1e77827b00000000, 0x92b0e6b100000000,
- 0x2299868c00000000, 0xf2e326cb00000000, 0x42ca46f600000000,
- 0x5216664400000000, 0xe23f067900000000, 0x3245a63e00000000,
- 0x826cc60300000000, 0x53fb968100000000, 0xe3d2f6bc00000000,
- 0x33a856fb00000000, 0x838136c600000000, 0x935d167400000000,
- 0x2374764900000000, 0xf30ed60e00000000, 0x4327b63300000000,
- 0x102706d100000000, 0xa00e66ec00000000, 0x7074c6ab00000000,
- 0xc05da69600000000, 0xd081862400000000, 0x60a8e61900000000,
- 0xb0d2465e00000000, 0x00fb266300000000, 0xd16c76e100000000,
- 0x614516dc00000000, 0xb13fb69b00000000, 0x0116d6a600000000,
- 0x11caf61400000000, 0xa1e3962900000000, 0x7199366e00000000,
- 0xc1b0565300000000, 0x969f277000000000, 0x26b6474d00000000,
- 0xf6cce70a00000000, 0x46e5873700000000, 0x5639a78500000000,
- 0xe610c7b800000000, 0x366a67ff00000000, 0x864307c200000000,
- 0x57d4574000000000, 0xe7fd377d00000000, 0x3787973a00000000,
- 0x87aef70700000000, 0x9772d7b500000000, 0x275bb78800000000,
- 0xf72117cf00000000, 0x470877f200000000, 0x1408c71000000000,
- 0xa421a72d00000000, 0x745b076a00000000, 0xc472675700000000,
- 0xd4ae47e500000000, 0x648727d800000000, 0xb4fd879f00000000,
- 0x04d4e7a200000000, 0xd543b72000000000, 0x656ad71d00000000,
- 0xb510775a00000000, 0x0539176700000000, 0x15e537d500000000,
- 0xa5cc57e800000000, 0x75b6f7af00000000, 0xc59f979200000000,
- 0xdbe815e900000000, 0x6bc175d400000000, 0xbbbbd59300000000,
- 0x0b92b5ae00000000, 0x1b4e951c00000000, 0xab67f52100000000,
- 0x7b1d556600000000, 0xcb34355b00000000, 0x1aa365d900000000,
- 0xaa8a05e400000000, 0x7af0a5a300000000, 0xcad9c59e00000000,
- 0xda05e52c00000000, 0x6a2c851100000000, 0xba56255600000000,
- 0x0a7f456b00000000, 0x597ff58900000000, 0xe95695b400000000,
- 0x392c35f300000000, 0x890555ce00000000, 0x99d9757c00000000,
- 0x29f0154100000000, 0xf98ab50600000000, 0x49a3d53b00000000,
- 0x983485b900000000, 0x281de58400000000, 0xf86745c300000000,
- 0x484e25fe00000000, 0x5892054c00000000, 0xe8bb657100000000,
- 0x38c1c53600000000, 0x88e8a50b00000000, 0xdfc7d42800000000,
- 0x6feeb41500000000, 0xbf94145200000000, 0x0fbd746f00000000,
- 0x1f6154dd00000000, 0xaf4834e000000000, 0x7f3294a700000000,
- 0xcf1bf49a00000000, 0x1e8ca41800000000, 0xaea5c42500000000,
- 0x7edf646200000000, 0xcef6045f00000000, 0xde2a24ed00000000,
- 0x6e0344d000000000, 0xbe79e49700000000, 0x0e5084aa00000000,
- 0x5d50344800000000, 0xed79547500000000, 0x3d03f43200000000,
- 0x8d2a940f00000000, 0x9df6b4bd00000000, 0x2ddfd48000000000,
- 0xfda574c700000000, 0x4d8c14fa00000000, 0x9c1b447800000000,
- 0x2c32244500000000, 0xfc48840200000000, 0x4c61e43f00000000,
- 0x5cbdc48d00000000, 0xec94a4b000000000, 0x3cee04f700000000,
- 0x8cc764ca00000000},
- {0x0000000000000000, 0xa5d35ccb00000000, 0x0ba1c84d00000000,
- 0xae72948600000000, 0x1642919b00000000, 0xb391cd5000000000,
- 0x1de359d600000000, 0xb830051d00000000, 0x6d8253ec00000000,
- 0xc8510f2700000000, 0x66239ba100000000, 0xc3f0c76a00000000,
- 0x7bc0c27700000000, 0xde139ebc00000000, 0x70610a3a00000000,
- 0xd5b256f100000000, 0x9b02d60300000000, 0x3ed18ac800000000,
- 0x90a31e4e00000000, 0x3570428500000000, 0x8d40479800000000,
- 0x28931b5300000000, 0x86e18fd500000000, 0x2332d31e00000000,
- 0xf68085ef00000000, 0x5353d92400000000, 0xfd214da200000000,
- 0x58f2116900000000, 0xe0c2147400000000, 0x451148bf00000000,
- 0xeb63dc3900000000, 0x4eb080f200000000, 0x3605ac0700000000,
- 0x93d6f0cc00000000, 0x3da4644a00000000, 0x9877388100000000,
- 0x20473d9c00000000, 0x8594615700000000, 0x2be6f5d100000000,
- 0x8e35a91a00000000, 0x5b87ffeb00000000, 0xfe54a32000000000,
- 0x502637a600000000, 0xf5f56b6d00000000, 0x4dc56e7000000000,
- 0xe81632bb00000000, 0x4664a63d00000000, 0xe3b7faf600000000,
- 0xad077a0400000000, 0x08d426cf00000000, 0xa6a6b24900000000,
- 0x0375ee8200000000, 0xbb45eb9f00000000, 0x1e96b75400000000,
- 0xb0e423d200000000, 0x15377f1900000000, 0xc08529e800000000,
- 0x6556752300000000, 0xcb24e1a500000000, 0x6ef7bd6e00000000,
- 0xd6c7b87300000000, 0x7314e4b800000000, 0xdd66703e00000000,
- 0x78b52cf500000000, 0x6c0a580f00000000, 0xc9d904c400000000,
- 0x67ab904200000000, 0xc278cc8900000000, 0x7a48c99400000000,
- 0xdf9b955f00000000, 0x71e901d900000000, 0xd43a5d1200000000,
- 0x01880be300000000, 0xa45b572800000000, 0x0a29c3ae00000000,
- 0xaffa9f6500000000, 0x17ca9a7800000000, 0xb219c6b300000000,
- 0x1c6b523500000000, 0xb9b80efe00000000, 0xf7088e0c00000000,
- 0x52dbd2c700000000, 0xfca9464100000000, 0x597a1a8a00000000,
- 0xe14a1f9700000000, 0x4499435c00000000, 0xeaebd7da00000000,
- 0x4f388b1100000000, 0x9a8adde000000000, 0x3f59812b00000000,
- 0x912b15ad00000000, 0x34f8496600000000, 0x8cc84c7b00000000,
- 0x291b10b000000000, 0x8769843600000000, 0x22bad8fd00000000,
- 0x5a0ff40800000000, 0xffdca8c300000000, 0x51ae3c4500000000,
- 0xf47d608e00000000, 0x4c4d659300000000, 0xe99e395800000000,
- 0x47ecadde00000000, 0xe23ff11500000000, 0x378da7e400000000,
- 0x925efb2f00000000, 0x3c2c6fa900000000, 0x99ff336200000000,
- 0x21cf367f00000000, 0x841c6ab400000000, 0x2a6efe3200000000,
- 0x8fbda2f900000000, 0xc10d220b00000000, 0x64de7ec000000000,
- 0xcaacea4600000000, 0x6f7fb68d00000000, 0xd74fb39000000000,
- 0x729cef5b00000000, 0xdcee7bdd00000000, 0x793d271600000000,
- 0xac8f71e700000000, 0x095c2d2c00000000, 0xa72eb9aa00000000,
- 0x02fde56100000000, 0xbacde07c00000000, 0x1f1ebcb700000000,
- 0xb16c283100000000, 0x14bf74fa00000000, 0xd814b01e00000000,
- 0x7dc7ecd500000000, 0xd3b5785300000000, 0x7666249800000000,
- 0xce56218500000000, 0x6b857d4e00000000, 0xc5f7e9c800000000,
- 0x6024b50300000000, 0xb596e3f200000000, 0x1045bf3900000000,
- 0xbe372bbf00000000, 0x1be4777400000000, 0xa3d4726900000000,
- 0x06072ea200000000, 0xa875ba2400000000, 0x0da6e6ef00000000,
- 0x4316661d00000000, 0xe6c53ad600000000, 0x48b7ae5000000000,
- 0xed64f29b00000000, 0x5554f78600000000, 0xf087ab4d00000000,
- 0x5ef53fcb00000000, 0xfb26630000000000, 0x2e9435f100000000,
- 0x8b47693a00000000, 0x2535fdbc00000000, 0x80e6a17700000000,
- 0x38d6a46a00000000, 0x9d05f8a100000000, 0x33776c2700000000,
- 0x96a430ec00000000, 0xee111c1900000000, 0x4bc240d200000000,
- 0xe5b0d45400000000, 0x4063889f00000000, 0xf8538d8200000000,
- 0x5d80d14900000000, 0xf3f245cf00000000, 0x5621190400000000,
- 0x83934ff500000000, 0x2640133e00000000, 0x883287b800000000,
- 0x2de1db7300000000, 0x95d1de6e00000000, 0x300282a500000000,
- 0x9e70162300000000, 0x3ba34ae800000000, 0x7513ca1a00000000,
- 0xd0c096d100000000, 0x7eb2025700000000, 0xdb615e9c00000000,
- 0x63515b8100000000, 0xc682074a00000000, 0x68f093cc00000000,
- 0xcd23cf0700000000, 0x189199f600000000, 0xbd42c53d00000000,
- 0x133051bb00000000, 0xb6e30d7000000000, 0x0ed3086d00000000,
- 0xab0054a600000000, 0x0572c02000000000, 0xa0a19ceb00000000,
- 0xb41ee81100000000, 0x11cdb4da00000000, 0xbfbf205c00000000,
- 0x1a6c7c9700000000, 0xa25c798a00000000, 0x078f254100000000,
- 0xa9fdb1c700000000, 0x0c2eed0c00000000, 0xd99cbbfd00000000,
- 0x7c4fe73600000000, 0xd23d73b000000000, 0x77ee2f7b00000000,
- 0xcfde2a6600000000, 0x6a0d76ad00000000, 0xc47fe22b00000000,
- 0x61acbee000000000, 0x2f1c3e1200000000, 0x8acf62d900000000,
- 0x24bdf65f00000000, 0x816eaa9400000000, 0x395eaf8900000000,
- 0x9c8df34200000000, 0x32ff67c400000000, 0x972c3b0f00000000,
- 0x429e6dfe00000000, 0xe74d313500000000, 0x493fa5b300000000,
- 0xececf97800000000, 0x54dcfc6500000000, 0xf10fa0ae00000000,
- 0x5f7d342800000000, 0xfaae68e300000000, 0x821b441600000000,
- 0x27c818dd00000000, 0x89ba8c5b00000000, 0x2c69d09000000000,
- 0x9459d58d00000000, 0x318a894600000000, 0x9ff81dc000000000,
- 0x3a2b410b00000000, 0xef9917fa00000000, 0x4a4a4b3100000000,
- 0xe438dfb700000000, 0x41eb837c00000000, 0xf9db866100000000,
- 0x5c08daaa00000000, 0xf27a4e2c00000000, 0x57a912e700000000,
- 0x1919921500000000, 0xbccacede00000000, 0x12b85a5800000000,
- 0xb76b069300000000, 0x0f5b038e00000000, 0xaa885f4500000000,
- 0x04facbc300000000, 0xa129970800000000, 0x749bc1f900000000,
- 0xd1489d3200000000, 0x7f3a09b400000000, 0xdae9557f00000000,
- 0x62d9506200000000, 0xc70a0ca900000000, 0x6978982f00000000,
- 0xccabc4e400000000},
- {0x0000000000000000, 0xb40b77a600000000, 0x29119f9700000000,
- 0x9d1ae83100000000, 0x13244ff400000000, 0xa72f385200000000,
- 0x3a35d06300000000, 0x8e3ea7c500000000, 0x674eef3300000000,
- 0xd345989500000000, 0x4e5f70a400000000, 0xfa54070200000000,
- 0x746aa0c700000000, 0xc061d76100000000, 0x5d7b3f5000000000,
- 0xe97048f600000000, 0xce9cde6700000000, 0x7a97a9c100000000,
- 0xe78d41f000000000, 0x5386365600000000, 0xddb8919300000000,
- 0x69b3e63500000000, 0xf4a90e0400000000, 0x40a279a200000000,
- 0xa9d2315400000000, 0x1dd946f200000000, 0x80c3aec300000000,
- 0x34c8d96500000000, 0xbaf67ea000000000, 0x0efd090600000000,
- 0x93e7e13700000000, 0x27ec969100000000, 0x9c39bdcf00000000,
- 0x2832ca6900000000, 0xb528225800000000, 0x012355fe00000000,
- 0x8f1df23b00000000, 0x3b16859d00000000, 0xa60c6dac00000000,
- 0x12071a0a00000000, 0xfb7752fc00000000, 0x4f7c255a00000000,
- 0xd266cd6b00000000, 0x666dbacd00000000, 0xe8531d0800000000,
- 0x5c586aae00000000, 0xc142829f00000000, 0x7549f53900000000,
- 0x52a563a800000000, 0xe6ae140e00000000, 0x7bb4fc3f00000000,
- 0xcfbf8b9900000000, 0x41812c5c00000000, 0xf58a5bfa00000000,
- 0x6890b3cb00000000, 0xdc9bc46d00000000, 0x35eb8c9b00000000,
- 0x81e0fb3d00000000, 0x1cfa130c00000000, 0xa8f164aa00000000,
- 0x26cfc36f00000000, 0x92c4b4c900000000, 0x0fde5cf800000000,
- 0xbbd52b5e00000000, 0x79750b4400000000, 0xcd7e7ce200000000,
- 0x506494d300000000, 0xe46fe37500000000, 0x6a5144b000000000,
- 0xde5a331600000000, 0x4340db2700000000, 0xf74bac8100000000,
- 0x1e3be47700000000, 0xaa3093d100000000, 0x372a7be000000000,
- 0x83210c4600000000, 0x0d1fab8300000000, 0xb914dc2500000000,
- 0x240e341400000000, 0x900543b200000000, 0xb7e9d52300000000,
- 0x03e2a28500000000, 0x9ef84ab400000000, 0x2af33d1200000000,
- 0xa4cd9ad700000000, 0x10c6ed7100000000, 0x8ddc054000000000,
- 0x39d772e600000000, 0xd0a73a1000000000, 0x64ac4db600000000,
- 0xf9b6a58700000000, 0x4dbdd22100000000, 0xc38375e400000000,
- 0x7788024200000000, 0xea92ea7300000000, 0x5e999dd500000000,
- 0xe54cb68b00000000, 0x5147c12d00000000, 0xcc5d291c00000000,
- 0x78565eba00000000, 0xf668f97f00000000, 0x42638ed900000000,
- 0xdf7966e800000000, 0x6b72114e00000000, 0x820259b800000000,
- 0x36092e1e00000000, 0xab13c62f00000000, 0x1f18b18900000000,
- 0x9126164c00000000, 0x252d61ea00000000, 0xb83789db00000000,
- 0x0c3cfe7d00000000, 0x2bd068ec00000000, 0x9fdb1f4a00000000,
- 0x02c1f77b00000000, 0xb6ca80dd00000000, 0x38f4271800000000,
- 0x8cff50be00000000, 0x11e5b88f00000000, 0xa5eecf2900000000,
- 0x4c9e87df00000000, 0xf895f07900000000, 0x658f184800000000,
- 0xd1846fee00000000, 0x5fbac82b00000000, 0xebb1bf8d00000000,
- 0x76ab57bc00000000, 0xc2a0201a00000000, 0xf2ea168800000000,
- 0x46e1612e00000000, 0xdbfb891f00000000, 0x6ff0feb900000000,
- 0xe1ce597c00000000, 0x55c52eda00000000, 0xc8dfc6eb00000000,
- 0x7cd4b14d00000000, 0x95a4f9bb00000000, 0x21af8e1d00000000,
- 0xbcb5662c00000000, 0x08be118a00000000, 0x8680b64f00000000,
- 0x328bc1e900000000, 0xaf9129d800000000, 0x1b9a5e7e00000000,
- 0x3c76c8ef00000000, 0x887dbf4900000000, 0x1567577800000000,
- 0xa16c20de00000000, 0x2f52871b00000000, 0x9b59f0bd00000000,
- 0x0643188c00000000, 0xb2486f2a00000000, 0x5b3827dc00000000,
- 0xef33507a00000000, 0x7229b84b00000000, 0xc622cfed00000000,
- 0x481c682800000000, 0xfc171f8e00000000, 0x610df7bf00000000,
- 0xd506801900000000, 0x6ed3ab4700000000, 0xdad8dce100000000,
- 0x47c234d000000000, 0xf3c9437600000000, 0x7df7e4b300000000,
- 0xc9fc931500000000, 0x54e67b2400000000, 0xe0ed0c8200000000,
- 0x099d447400000000, 0xbd9633d200000000, 0x208cdbe300000000,
- 0x9487ac4500000000, 0x1ab90b8000000000, 0xaeb27c2600000000,
- 0x33a8941700000000, 0x87a3e3b100000000, 0xa04f752000000000,
- 0x1444028600000000, 0x895eeab700000000, 0x3d559d1100000000,
- 0xb36b3ad400000000, 0x07604d7200000000, 0x9a7aa54300000000,
- 0x2e71d2e500000000, 0xc7019a1300000000, 0x730aedb500000000,
- 0xee10058400000000, 0x5a1b722200000000, 0xd425d5e700000000,
- 0x602ea24100000000, 0xfd344a7000000000, 0x493f3dd600000000,
- 0x8b9f1dcc00000000, 0x3f946a6a00000000, 0xa28e825b00000000,
- 0x1685f5fd00000000, 0x98bb523800000000, 0x2cb0259e00000000,
- 0xb1aacdaf00000000, 0x05a1ba0900000000, 0xecd1f2ff00000000,
- 0x58da855900000000, 0xc5c06d6800000000, 0x71cb1ace00000000,
- 0xfff5bd0b00000000, 0x4bfecaad00000000, 0xd6e4229c00000000,
- 0x62ef553a00000000, 0x4503c3ab00000000, 0xf108b40d00000000,
- 0x6c125c3c00000000, 0xd8192b9a00000000, 0x56278c5f00000000,
- 0xe22cfbf900000000, 0x7f3613c800000000, 0xcb3d646e00000000,
- 0x224d2c9800000000, 0x96465b3e00000000, 0x0b5cb30f00000000,
- 0xbf57c4a900000000, 0x3169636c00000000, 0x856214ca00000000,
- 0x1878fcfb00000000, 0xac738b5d00000000, 0x17a6a00300000000,
- 0xa3add7a500000000, 0x3eb73f9400000000, 0x8abc483200000000,
- 0x0482eff700000000, 0xb089985100000000, 0x2d93706000000000,
- 0x999807c600000000, 0x70e84f3000000000, 0xc4e3389600000000,
- 0x59f9d0a700000000, 0xedf2a70100000000, 0x63cc00c400000000,
- 0xd7c7776200000000, 0x4add9f5300000000, 0xfed6e8f500000000,
- 0xd93a7e6400000000, 0x6d3109c200000000, 0xf02be1f300000000,
- 0x4420965500000000, 0xca1e319000000000, 0x7e15463600000000,
- 0xe30fae0700000000, 0x5704d9a100000000, 0xbe74915700000000,
- 0x0a7fe6f100000000, 0x97650ec000000000, 0x236e796600000000,
- 0xad50dea300000000, 0x195ba90500000000, 0x8441413400000000,
- 0x304a369200000000},
- {0x0000000000000000, 0x9e00aacc00000000, 0x7d07254200000000,
- 0xe3078f8e00000000, 0xfa0e4a8400000000, 0x640ee04800000000,
- 0x87096fc600000000, 0x1909c50a00000000, 0xb51be5d300000000,
- 0x2b1b4f1f00000000, 0xc81cc09100000000, 0x561c6a5d00000000,
- 0x4f15af5700000000, 0xd115059b00000000, 0x32128a1500000000,
- 0xac1220d900000000, 0x2b31bb7c00000000, 0xb53111b000000000,
- 0x56369e3e00000000, 0xc83634f200000000, 0xd13ff1f800000000,
- 0x4f3f5b3400000000, 0xac38d4ba00000000, 0x32387e7600000000,
- 0x9e2a5eaf00000000, 0x002af46300000000, 0xe32d7bed00000000,
- 0x7d2dd12100000000, 0x6424142b00000000, 0xfa24bee700000000,
- 0x1923316900000000, 0x87239ba500000000, 0x566276f900000000,
- 0xc862dc3500000000, 0x2b6553bb00000000, 0xb565f97700000000,
- 0xac6c3c7d00000000, 0x326c96b100000000, 0xd16b193f00000000,
- 0x4f6bb3f300000000, 0xe379932a00000000, 0x7d7939e600000000,
- 0x9e7eb66800000000, 0x007e1ca400000000, 0x1977d9ae00000000,
- 0x8777736200000000, 0x6470fcec00000000, 0xfa70562000000000,
- 0x7d53cd8500000000, 0xe353674900000000, 0x0054e8c700000000,
- 0x9e54420b00000000, 0x875d870100000000, 0x195d2dcd00000000,
- 0xfa5aa24300000000, 0x645a088f00000000, 0xc848285600000000,
- 0x5648829a00000000, 0xb54f0d1400000000, 0x2b4fa7d800000000,
- 0x324662d200000000, 0xac46c81e00000000, 0x4f41479000000000,
- 0xd141ed5c00000000, 0xedc29d2900000000, 0x73c237e500000000,
- 0x90c5b86b00000000, 0x0ec512a700000000, 0x17ccd7ad00000000,
- 0x89cc7d6100000000, 0x6acbf2ef00000000, 0xf4cb582300000000,
- 0x58d978fa00000000, 0xc6d9d23600000000, 0x25de5db800000000,
- 0xbbdef77400000000, 0xa2d7327e00000000, 0x3cd798b200000000,
- 0xdfd0173c00000000, 0x41d0bdf000000000, 0xc6f3265500000000,
- 0x58f38c9900000000, 0xbbf4031700000000, 0x25f4a9db00000000,
- 0x3cfd6cd100000000, 0xa2fdc61d00000000, 0x41fa499300000000,
- 0xdffae35f00000000, 0x73e8c38600000000, 0xede8694a00000000,
- 0x0eefe6c400000000, 0x90ef4c0800000000, 0x89e6890200000000,
- 0x17e623ce00000000, 0xf4e1ac4000000000, 0x6ae1068c00000000,
- 0xbba0ebd000000000, 0x25a0411c00000000, 0xc6a7ce9200000000,
- 0x58a7645e00000000, 0x41aea15400000000, 0xdfae0b9800000000,
- 0x3ca9841600000000, 0xa2a92eda00000000, 0x0ebb0e0300000000,
- 0x90bba4cf00000000, 0x73bc2b4100000000, 0xedbc818d00000000,
- 0xf4b5448700000000, 0x6ab5ee4b00000000, 0x89b261c500000000,
- 0x17b2cb0900000000, 0x909150ac00000000, 0x0e91fa6000000000,
- 0xed9675ee00000000, 0x7396df2200000000, 0x6a9f1a2800000000,
- 0xf49fb0e400000000, 0x17983f6a00000000, 0x899895a600000000,
- 0x258ab57f00000000, 0xbb8a1fb300000000, 0x588d903d00000000,
- 0xc68d3af100000000, 0xdf84fffb00000000, 0x4184553700000000,
- 0xa283dab900000000, 0x3c83707500000000, 0xda853b5300000000,
- 0x4485919f00000000, 0xa7821e1100000000, 0x3982b4dd00000000,
- 0x208b71d700000000, 0xbe8bdb1b00000000, 0x5d8c549500000000,
- 0xc38cfe5900000000, 0x6f9ede8000000000, 0xf19e744c00000000,
- 0x1299fbc200000000, 0x8c99510e00000000, 0x9590940400000000,
- 0x0b903ec800000000, 0xe897b14600000000, 0x76971b8a00000000,
- 0xf1b4802f00000000, 0x6fb42ae300000000, 0x8cb3a56d00000000,
- 0x12b30fa100000000, 0x0bbacaab00000000, 0x95ba606700000000,
- 0x76bdefe900000000, 0xe8bd452500000000, 0x44af65fc00000000,
- 0xdaafcf3000000000, 0x39a840be00000000, 0xa7a8ea7200000000,
- 0xbea12f7800000000, 0x20a185b400000000, 0xc3a60a3a00000000,
- 0x5da6a0f600000000, 0x8ce74daa00000000, 0x12e7e76600000000,
- 0xf1e068e800000000, 0x6fe0c22400000000, 0x76e9072e00000000,
- 0xe8e9ade200000000, 0x0bee226c00000000, 0x95ee88a000000000,
- 0x39fca87900000000, 0xa7fc02b500000000, 0x44fb8d3b00000000,
- 0xdafb27f700000000, 0xc3f2e2fd00000000, 0x5df2483100000000,
- 0xbef5c7bf00000000, 0x20f56d7300000000, 0xa7d6f6d600000000,
- 0x39d65c1a00000000, 0xdad1d39400000000, 0x44d1795800000000,
- 0x5dd8bc5200000000, 0xc3d8169e00000000, 0x20df991000000000,
- 0xbedf33dc00000000, 0x12cd130500000000, 0x8ccdb9c900000000,
- 0x6fca364700000000, 0xf1ca9c8b00000000, 0xe8c3598100000000,
- 0x76c3f34d00000000, 0x95c47cc300000000, 0x0bc4d60f00000000,
- 0x3747a67a00000000, 0xa9470cb600000000, 0x4a40833800000000,
- 0xd44029f400000000, 0xcd49ecfe00000000, 0x5349463200000000,
- 0xb04ec9bc00000000, 0x2e4e637000000000, 0x825c43a900000000,
- 0x1c5ce96500000000, 0xff5b66eb00000000, 0x615bcc2700000000,
- 0x7852092d00000000, 0xe652a3e100000000, 0x05552c6f00000000,
- 0x9b5586a300000000, 0x1c761d0600000000, 0x8276b7ca00000000,
- 0x6171384400000000, 0xff71928800000000, 0xe678578200000000,
- 0x7878fd4e00000000, 0x9b7f72c000000000, 0x057fd80c00000000,
- 0xa96df8d500000000, 0x376d521900000000, 0xd46add9700000000,
- 0x4a6a775b00000000, 0x5363b25100000000, 0xcd63189d00000000,
- 0x2e64971300000000, 0xb0643ddf00000000, 0x6125d08300000000,
- 0xff257a4f00000000, 0x1c22f5c100000000, 0x82225f0d00000000,
- 0x9b2b9a0700000000, 0x052b30cb00000000, 0xe62cbf4500000000,
- 0x782c158900000000, 0xd43e355000000000, 0x4a3e9f9c00000000,
- 0xa939101200000000, 0x3739bade00000000, 0x2e307fd400000000,
- 0xb030d51800000000, 0x53375a9600000000, 0xcd37f05a00000000,
- 0x4a146bff00000000, 0xd414c13300000000, 0x37134ebd00000000,
- 0xa913e47100000000, 0xb01a217b00000000, 0x2e1a8bb700000000,
- 0xcd1d043900000000, 0x531daef500000000, 0xff0f8e2c00000000,
- 0x610f24e000000000, 0x8208ab6e00000000, 0x1c0801a200000000,
- 0x0501c4a800000000, 0x9b016e6400000000, 0x7806e1ea00000000,
- 0xe6064b2600000000}};
-
-#else /* W == 4 */
-
-local const z_crc_t FAR crc_braid_table[][256] = {
- {0x00000000, 0xb8bc6765, 0xaa09c88b, 0x12b5afee, 0x8f629757,
- 0x37def032, 0x256b5fdc, 0x9dd738b9, 0xc5b428ef, 0x7d084f8a,
- 0x6fbde064, 0xd7018701, 0x4ad6bfb8, 0xf26ad8dd, 0xe0df7733,
- 0x58631056, 0x5019579f, 0xe8a530fa, 0xfa109f14, 0x42acf871,
- 0xdf7bc0c8, 0x67c7a7ad, 0x75720843, 0xcdce6f26, 0x95ad7f70,
- 0x2d111815, 0x3fa4b7fb, 0x8718d09e, 0x1acfe827, 0xa2738f42,
- 0xb0c620ac, 0x087a47c9, 0xa032af3e, 0x188ec85b, 0x0a3b67b5,
- 0xb28700d0, 0x2f503869, 0x97ec5f0c, 0x8559f0e2, 0x3de59787,
- 0x658687d1, 0xdd3ae0b4, 0xcf8f4f5a, 0x7733283f, 0xeae41086,
- 0x525877e3, 0x40edd80d, 0xf851bf68, 0xf02bf8a1, 0x48979fc4,
- 0x5a22302a, 0xe29e574f, 0x7f496ff6, 0xc7f50893, 0xd540a77d,
- 0x6dfcc018, 0x359fd04e, 0x8d23b72b, 0x9f9618c5, 0x272a7fa0,
- 0xbafd4719, 0x0241207c, 0x10f48f92, 0xa848e8f7, 0x9b14583d,
- 0x23a83f58, 0x311d90b6, 0x89a1f7d3, 0x1476cf6a, 0xaccaa80f,
- 0xbe7f07e1, 0x06c36084, 0x5ea070d2, 0xe61c17b7, 0xf4a9b859,
- 0x4c15df3c, 0xd1c2e785, 0x697e80e0, 0x7bcb2f0e, 0xc377486b,
- 0xcb0d0fa2, 0x73b168c7, 0x6104c729, 0xd9b8a04c, 0x446f98f5,
- 0xfcd3ff90, 0xee66507e, 0x56da371b, 0x0eb9274d, 0xb6054028,
- 0xa4b0efc6, 0x1c0c88a3, 0x81dbb01a, 0x3967d77f, 0x2bd27891,
- 0x936e1ff4, 0x3b26f703, 0x839a9066, 0x912f3f88, 0x299358ed,
- 0xb4446054, 0x0cf80731, 0x1e4da8df, 0xa6f1cfba, 0xfe92dfec,
- 0x462eb889, 0x549b1767, 0xec277002, 0x71f048bb, 0xc94c2fde,
- 0xdbf98030, 0x6345e755, 0x6b3fa09c, 0xd383c7f9, 0xc1366817,
- 0x798a0f72, 0xe45d37cb, 0x5ce150ae, 0x4e54ff40, 0xf6e89825,
- 0xae8b8873, 0x1637ef16, 0x048240f8, 0xbc3e279d, 0x21e91f24,
- 0x99557841, 0x8be0d7af, 0x335cb0ca, 0xed59b63b, 0x55e5d15e,
- 0x47507eb0, 0xffec19d5, 0x623b216c, 0xda874609, 0xc832e9e7,
- 0x708e8e82, 0x28ed9ed4, 0x9051f9b1, 0x82e4565f, 0x3a58313a,
- 0xa78f0983, 0x1f336ee6, 0x0d86c108, 0xb53aa66d, 0xbd40e1a4,
- 0x05fc86c1, 0x1749292f, 0xaff54e4a, 0x322276f3, 0x8a9e1196,
- 0x982bbe78, 0x2097d91d, 0x78f4c94b, 0xc048ae2e, 0xd2fd01c0,
- 0x6a4166a5, 0xf7965e1c, 0x4f2a3979, 0x5d9f9697, 0xe523f1f2,
- 0x4d6b1905, 0xf5d77e60, 0xe762d18e, 0x5fdeb6eb, 0xc2098e52,
- 0x7ab5e937, 0x680046d9, 0xd0bc21bc, 0x88df31ea, 0x3063568f,
- 0x22d6f961, 0x9a6a9e04, 0x07bda6bd, 0xbf01c1d8, 0xadb46e36,
- 0x15080953, 0x1d724e9a, 0xa5ce29ff, 0xb77b8611, 0x0fc7e174,
- 0x9210d9cd, 0x2aacbea8, 0x38191146, 0x80a57623, 0xd8c66675,
- 0x607a0110, 0x72cfaefe, 0xca73c99b, 0x57a4f122, 0xef189647,
- 0xfdad39a9, 0x45115ecc, 0x764dee06, 0xcef18963, 0xdc44268d,
- 0x64f841e8, 0xf92f7951, 0x41931e34, 0x5326b1da, 0xeb9ad6bf,
- 0xb3f9c6e9, 0x0b45a18c, 0x19f00e62, 0xa14c6907, 0x3c9b51be,
- 0x842736db, 0x96929935, 0x2e2efe50, 0x2654b999, 0x9ee8defc,
- 0x8c5d7112, 0x34e11677, 0xa9362ece, 0x118a49ab, 0x033fe645,
- 0xbb838120, 0xe3e09176, 0x5b5cf613, 0x49e959fd, 0xf1553e98,
- 0x6c820621, 0xd43e6144, 0xc68bceaa, 0x7e37a9cf, 0xd67f4138,
- 0x6ec3265d, 0x7c7689b3, 0xc4caeed6, 0x591dd66f, 0xe1a1b10a,
- 0xf3141ee4, 0x4ba87981, 0x13cb69d7, 0xab770eb2, 0xb9c2a15c,
- 0x017ec639, 0x9ca9fe80, 0x241599e5, 0x36a0360b, 0x8e1c516e,
- 0x866616a7, 0x3eda71c2, 0x2c6fde2c, 0x94d3b949, 0x090481f0,
- 0xb1b8e695, 0xa30d497b, 0x1bb12e1e, 0x43d23e48, 0xfb6e592d,
- 0xe9dbf6c3, 0x516791a6, 0xccb0a91f, 0x740cce7a, 0x66b96194,
- 0xde0506f1},
- {0x00000000, 0x01c26a37, 0x0384d46e, 0x0246be59, 0x0709a8dc,
- 0x06cbc2eb, 0x048d7cb2, 0x054f1685, 0x0e1351b8, 0x0fd13b8f,
- 0x0d9785d6, 0x0c55efe1, 0x091af964, 0x08d89353, 0x0a9e2d0a,
- 0x0b5c473d, 0x1c26a370, 0x1de4c947, 0x1fa2771e, 0x1e601d29,
- 0x1b2f0bac, 0x1aed619b, 0x18abdfc2, 0x1969b5f5, 0x1235f2c8,
- 0x13f798ff, 0x11b126a6, 0x10734c91, 0x153c5a14, 0x14fe3023,
- 0x16b88e7a, 0x177ae44d, 0x384d46e0, 0x398f2cd7, 0x3bc9928e,
- 0x3a0bf8b9, 0x3f44ee3c, 0x3e86840b, 0x3cc03a52, 0x3d025065,
- 0x365e1758, 0x379c7d6f, 0x35dac336, 0x3418a901, 0x3157bf84,
- 0x3095d5b3, 0x32d36bea, 0x331101dd, 0x246be590, 0x25a98fa7,
- 0x27ef31fe, 0x262d5bc9, 0x23624d4c, 0x22a0277b, 0x20e69922,
- 0x2124f315, 0x2a78b428, 0x2bbade1f, 0x29fc6046, 0x283e0a71,
- 0x2d711cf4, 0x2cb376c3, 0x2ef5c89a, 0x2f37a2ad, 0x709a8dc0,
- 0x7158e7f7, 0x731e59ae, 0x72dc3399, 0x7793251c, 0x76514f2b,
- 0x7417f172, 0x75d59b45, 0x7e89dc78, 0x7f4bb64f, 0x7d0d0816,
- 0x7ccf6221, 0x798074a4, 0x78421e93, 0x7a04a0ca, 0x7bc6cafd,
- 0x6cbc2eb0, 0x6d7e4487, 0x6f38fade, 0x6efa90e9, 0x6bb5866c,
- 0x6a77ec5b, 0x68315202, 0x69f33835, 0x62af7f08, 0x636d153f,
- 0x612bab66, 0x60e9c151, 0x65a6d7d4, 0x6464bde3, 0x662203ba,
- 0x67e0698d, 0x48d7cb20, 0x4915a117, 0x4b531f4e, 0x4a917579,
- 0x4fde63fc, 0x4e1c09cb, 0x4c5ab792, 0x4d98dda5, 0x46c49a98,
- 0x4706f0af, 0x45404ef6, 0x448224c1, 0x41cd3244, 0x400f5873,
- 0x4249e62a, 0x438b8c1d, 0x54f16850, 0x55330267, 0x5775bc3e,
- 0x56b7d609, 0x53f8c08c, 0x523aaabb, 0x507c14e2, 0x51be7ed5,
- 0x5ae239e8, 0x5b2053df, 0x5966ed86, 0x58a487b1, 0x5deb9134,
- 0x5c29fb03, 0x5e6f455a, 0x5fad2f6d, 0xe1351b80, 0xe0f771b7,
- 0xe2b1cfee, 0xe373a5d9, 0xe63cb35c, 0xe7fed96b, 0xe5b86732,
- 0xe47a0d05, 0xef264a38, 0xeee4200f, 0xeca29e56, 0xed60f461,
- 0xe82fe2e4, 0xe9ed88d3, 0xebab368a, 0xea695cbd, 0xfd13b8f0,
- 0xfcd1d2c7, 0xfe976c9e, 0xff5506a9, 0xfa1a102c, 0xfbd87a1b,
- 0xf99ec442, 0xf85cae75, 0xf300e948, 0xf2c2837f, 0xf0843d26,
- 0xf1465711, 0xf4094194, 0xf5cb2ba3, 0xf78d95fa, 0xf64fffcd,
- 0xd9785d60, 0xd8ba3757, 0xdafc890e, 0xdb3ee339, 0xde71f5bc,
- 0xdfb39f8b, 0xddf521d2, 0xdc374be5, 0xd76b0cd8, 0xd6a966ef,
- 0xd4efd8b6, 0xd52db281, 0xd062a404, 0xd1a0ce33, 0xd3e6706a,
- 0xd2241a5d, 0xc55efe10, 0xc49c9427, 0xc6da2a7e, 0xc7184049,
- 0xc25756cc, 0xc3953cfb, 0xc1d382a2, 0xc011e895, 0xcb4dafa8,
- 0xca8fc59f, 0xc8c97bc6, 0xc90b11f1, 0xcc440774, 0xcd866d43,
- 0xcfc0d31a, 0xce02b92d, 0x91af9640, 0x906dfc77, 0x922b422e,
- 0x93e92819, 0x96a63e9c, 0x976454ab, 0x9522eaf2, 0x94e080c5,
- 0x9fbcc7f8, 0x9e7eadcf, 0x9c381396, 0x9dfa79a1, 0x98b56f24,
- 0x99770513, 0x9b31bb4a, 0x9af3d17d, 0x8d893530, 0x8c4b5f07,
- 0x8e0de15e, 0x8fcf8b69, 0x8a809dec, 0x8b42f7db, 0x89044982,
- 0x88c623b5, 0x839a6488, 0x82580ebf, 0x801eb0e6, 0x81dcdad1,
- 0x8493cc54, 0x8551a663, 0x8717183a, 0x86d5720d, 0xa9e2d0a0,
- 0xa820ba97, 0xaa6604ce, 0xaba46ef9, 0xaeeb787c, 0xaf29124b,
- 0xad6fac12, 0xacadc625, 0xa7f18118, 0xa633eb2f, 0xa4755576,
- 0xa5b73f41, 0xa0f829c4, 0xa13a43f3, 0xa37cfdaa, 0xa2be979d,
- 0xb5c473d0, 0xb40619e7, 0xb640a7be, 0xb782cd89, 0xb2cddb0c,
- 0xb30fb13b, 0xb1490f62, 0xb08b6555, 0xbbd72268, 0xba15485f,
- 0xb853f606, 0xb9919c31, 0xbcde8ab4, 0xbd1ce083, 0xbf5a5eda,
- 0xbe9834ed},
- {0x00000000, 0x191b3141, 0x32366282, 0x2b2d53c3, 0x646cc504,
- 0x7d77f445, 0x565aa786, 0x4f4196c7, 0xc8d98a08, 0xd1c2bb49,
- 0xfaefe88a, 0xe3f4d9cb, 0xacb54f0c, 0xb5ae7e4d, 0x9e832d8e,
- 0x87981ccf, 0x4ac21251, 0x53d92310, 0x78f470d3, 0x61ef4192,
- 0x2eaed755, 0x37b5e614, 0x1c98b5d7, 0x05838496, 0x821b9859,
- 0x9b00a918, 0xb02dfadb, 0xa936cb9a, 0xe6775d5d, 0xff6c6c1c,
- 0xd4413fdf, 0xcd5a0e9e, 0x958424a2, 0x8c9f15e3, 0xa7b24620,
- 0xbea97761, 0xf1e8e1a6, 0xe8f3d0e7, 0xc3de8324, 0xdac5b265,
- 0x5d5daeaa, 0x44469feb, 0x6f6bcc28, 0x7670fd69, 0x39316bae,
- 0x202a5aef, 0x0b07092c, 0x121c386d, 0xdf4636f3, 0xc65d07b2,
- 0xed705471, 0xf46b6530, 0xbb2af3f7, 0xa231c2b6, 0x891c9175,
- 0x9007a034, 0x179fbcfb, 0x0e848dba, 0x25a9de79, 0x3cb2ef38,
- 0x73f379ff, 0x6ae848be, 0x41c51b7d, 0x58de2a3c, 0xf0794f05,
- 0xe9627e44, 0xc24f2d87, 0xdb541cc6, 0x94158a01, 0x8d0ebb40,
- 0xa623e883, 0xbf38d9c2, 0x38a0c50d, 0x21bbf44c, 0x0a96a78f,
- 0x138d96ce, 0x5ccc0009, 0x45d73148, 0x6efa628b, 0x77e153ca,
- 0xbabb5d54, 0xa3a06c15, 0x888d3fd6, 0x91960e97, 0xded79850,
- 0xc7cca911, 0xece1fad2, 0xf5facb93, 0x7262d75c, 0x6b79e61d,
- 0x4054b5de, 0x594f849f, 0x160e1258, 0x0f152319, 0x243870da,
- 0x3d23419b, 0x65fd6ba7, 0x7ce65ae6, 0x57cb0925, 0x4ed03864,
- 0x0191aea3, 0x188a9fe2, 0x33a7cc21, 0x2abcfd60, 0xad24e1af,
- 0xb43fd0ee, 0x9f12832d, 0x8609b26c, 0xc94824ab, 0xd05315ea,
- 0xfb7e4629, 0xe2657768, 0x2f3f79f6, 0x362448b7, 0x1d091b74,
- 0x04122a35, 0x4b53bcf2, 0x52488db3, 0x7965de70, 0x607eef31,
- 0xe7e6f3fe, 0xfefdc2bf, 0xd5d0917c, 0xcccba03d, 0x838a36fa,
- 0x9a9107bb, 0xb1bc5478, 0xa8a76539, 0x3b83984b, 0x2298a90a,
- 0x09b5fac9, 0x10aecb88, 0x5fef5d4f, 0x46f46c0e, 0x6dd93fcd,
- 0x74c20e8c, 0xf35a1243, 0xea412302, 0xc16c70c1, 0xd8774180,
- 0x9736d747, 0x8e2de606, 0xa500b5c5, 0xbc1b8484, 0x71418a1a,
- 0x685abb5b, 0x4377e898, 0x5a6cd9d9, 0x152d4f1e, 0x0c367e5f,
- 0x271b2d9c, 0x3e001cdd, 0xb9980012, 0xa0833153, 0x8bae6290,
- 0x92b553d1, 0xddf4c516, 0xc4eff457, 0xefc2a794, 0xf6d996d5,
- 0xae07bce9, 0xb71c8da8, 0x9c31de6b, 0x852aef2a, 0xca6b79ed,
- 0xd37048ac, 0xf85d1b6f, 0xe1462a2e, 0x66de36e1, 0x7fc507a0,
- 0x54e85463, 0x4df36522, 0x02b2f3e5, 0x1ba9c2a4, 0x30849167,
- 0x299fa026, 0xe4c5aeb8, 0xfdde9ff9, 0xd6f3cc3a, 0xcfe8fd7b,
- 0x80a96bbc, 0x99b25afd, 0xb29f093e, 0xab84387f, 0x2c1c24b0,
- 0x350715f1, 0x1e2a4632, 0x07317773, 0x4870e1b4, 0x516bd0f5,
- 0x7a468336, 0x635db277, 0xcbfad74e, 0xd2e1e60f, 0xf9ccb5cc,
- 0xe0d7848d, 0xaf96124a, 0xb68d230b, 0x9da070c8, 0x84bb4189,
- 0x03235d46, 0x1a386c07, 0x31153fc4, 0x280e0e85, 0x674f9842,
- 0x7e54a903, 0x5579fac0, 0x4c62cb81, 0x8138c51f, 0x9823f45e,
- 0xb30ea79d, 0xaa1596dc, 0xe554001b, 0xfc4f315a, 0xd7626299,
- 0xce7953d8, 0x49e14f17, 0x50fa7e56, 0x7bd72d95, 0x62cc1cd4,
- 0x2d8d8a13, 0x3496bb52, 0x1fbbe891, 0x06a0d9d0, 0x5e7ef3ec,
- 0x4765c2ad, 0x6c48916e, 0x7553a02f, 0x3a1236e8, 0x230907a9,
- 0x0824546a, 0x113f652b, 0x96a779e4, 0x8fbc48a5, 0xa4911b66,
- 0xbd8a2a27, 0xf2cbbce0, 0xebd08da1, 0xc0fdde62, 0xd9e6ef23,
- 0x14bce1bd, 0x0da7d0fc, 0x268a833f, 0x3f91b27e, 0x70d024b9,
- 0x69cb15f8, 0x42e6463b, 0x5bfd777a, 0xdc656bb5, 0xc57e5af4,
- 0xee530937, 0xf7483876, 0xb809aeb1, 0xa1129ff0, 0x8a3fcc33,
- 0x9324fd72},
- {0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419,
- 0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4,
- 0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07,
- 0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de,
- 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, 0x136c9856,
- 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9,
- 0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4,
- 0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b,
- 0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3,
- 0x45df5c75, 0xdcd60dcf, 0xabd13d59, 0x26d930ac, 0x51de003a,
- 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599,
- 0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924,
- 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190,
- 0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f,
- 0x9fbfe4a5, 0xe8b8d433, 0x7807c9a2, 0x0f00f934, 0x9609a88e,
- 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01,
- 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, 0x6c0695ed,
- 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950,
- 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3,
- 0xfbd44c65, 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2,
- 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a,
- 0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5,
- 0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010,
- 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f,
- 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17,
- 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6,
- 0x03b6e20c, 0x74b1d29a, 0xead54739, 0x9dd277af, 0x04db2615,
- 0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8,
- 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, 0xf00f9344,
- 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb,
- 0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a,
- 0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5,
- 0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1,
- 0xa6bc5767, 0x3fb506dd, 0x48b2364b, 0xd80d2bda, 0xaf0a1b4c,
- 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef,
- 0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236,
- 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe,
- 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31,
- 0x2cd99e8b, 0x5bdeae1d, 0x9b64c2b0, 0xec63f226, 0x756aa39c,
- 0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713,
- 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, 0x92d28e9b,
- 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242,
- 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1,
- 0x18b74777, 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c,
- 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278,
- 0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7,
- 0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66,
- 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,
- 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605,
- 0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8,
- 0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b,
- 0x2d02ef8d}};
-
-local const z_word_t FAR crc_braid_big_table[][256] = {
- {0x00000000, 0x96300777, 0x2c610eee, 0xba510999, 0x19c46d07,
- 0x8ff46a70, 0x35a563e9, 0xa395649e, 0x3288db0e, 0xa4b8dc79,
- 0x1ee9d5e0, 0x88d9d297, 0x2b4cb609, 0xbd7cb17e, 0x072db8e7,
- 0x911dbf90, 0x6410b71d, 0xf220b06a, 0x4871b9f3, 0xde41be84,
- 0x7dd4da1a, 0xebe4dd6d, 0x51b5d4f4, 0xc785d383, 0x56986c13,
- 0xc0a86b64, 0x7af962fd, 0xecc9658a, 0x4f5c0114, 0xd96c0663,
- 0x633d0ffa, 0xf50d088d, 0xc8206e3b, 0x5e10694c, 0xe44160d5,
- 0x727167a2, 0xd1e4033c, 0x47d4044b, 0xfd850dd2, 0x6bb50aa5,
- 0xfaa8b535, 0x6c98b242, 0xd6c9bbdb, 0x40f9bcac, 0xe36cd832,
- 0x755cdf45, 0xcf0dd6dc, 0x593dd1ab, 0xac30d926, 0x3a00de51,
- 0x8051d7c8, 0x1661d0bf, 0xb5f4b421, 0x23c4b356, 0x9995bacf,
- 0x0fa5bdb8, 0x9eb80228, 0x0888055f, 0xb2d90cc6, 0x24e90bb1,
- 0x877c6f2f, 0x114c6858, 0xab1d61c1, 0x3d2d66b6, 0x9041dc76,
- 0x0671db01, 0xbc20d298, 0x2a10d5ef, 0x8985b171, 0x1fb5b606,
- 0xa5e4bf9f, 0x33d4b8e8, 0xa2c90778, 0x34f9000f, 0x8ea80996,
- 0x18980ee1, 0xbb0d6a7f, 0x2d3d6d08, 0x976c6491, 0x015c63e6,
- 0xf4516b6b, 0x62616c1c, 0xd8306585, 0x4e0062f2, 0xed95066c,
- 0x7ba5011b, 0xc1f40882, 0x57c40ff5, 0xc6d9b065, 0x50e9b712,
- 0xeab8be8b, 0x7c88b9fc, 0xdf1ddd62, 0x492dda15, 0xf37cd38c,
- 0x654cd4fb, 0x5861b24d, 0xce51b53a, 0x7400bca3, 0xe230bbd4,
- 0x41a5df4a, 0xd795d83d, 0x6dc4d1a4, 0xfbf4d6d3, 0x6ae96943,
- 0xfcd96e34, 0x468867ad, 0xd0b860da, 0x732d0444, 0xe51d0333,
- 0x5f4c0aaa, 0xc97c0ddd, 0x3c710550, 0xaa410227, 0x10100bbe,
- 0x86200cc9, 0x25b56857, 0xb3856f20, 0x09d466b9, 0x9fe461ce,
- 0x0ef9de5e, 0x98c9d929, 0x2298d0b0, 0xb4a8d7c7, 0x173db359,
- 0x810db42e, 0x3b5cbdb7, 0xad6cbac0, 0x2083b8ed, 0xb6b3bf9a,
- 0x0ce2b603, 0x9ad2b174, 0x3947d5ea, 0xaf77d29d, 0x1526db04,
- 0x8316dc73, 0x120b63e3, 0x843b6494, 0x3e6a6d0d, 0xa85a6a7a,
- 0x0bcf0ee4, 0x9dff0993, 0x27ae000a, 0xb19e077d, 0x44930ff0,
- 0xd2a30887, 0x68f2011e, 0xfec20669, 0x5d5762f7, 0xcb676580,
- 0x71366c19, 0xe7066b6e, 0x761bd4fe, 0xe02bd389, 0x5a7ada10,
- 0xcc4add67, 0x6fdfb9f9, 0xf9efbe8e, 0x43beb717, 0xd58eb060,
- 0xe8a3d6d6, 0x7e93d1a1, 0xc4c2d838, 0x52f2df4f, 0xf167bbd1,
- 0x6757bca6, 0xdd06b53f, 0x4b36b248, 0xda2b0dd8, 0x4c1b0aaf,
- 0xf64a0336, 0x607a0441, 0xc3ef60df, 0x55df67a8, 0xef8e6e31,
- 0x79be6946, 0x8cb361cb, 0x1a8366bc, 0xa0d26f25, 0x36e26852,
- 0x95770ccc, 0x03470bbb, 0xb9160222, 0x2f260555, 0xbe3bbac5,
- 0x280bbdb2, 0x925ab42b, 0x046ab35c, 0xa7ffd7c2, 0x31cfd0b5,
- 0x8b9ed92c, 0x1daede5b, 0xb0c2649b, 0x26f263ec, 0x9ca36a75,
- 0x0a936d02, 0xa906099c, 0x3f360eeb, 0x85670772, 0x13570005,
- 0x824abf95, 0x147ab8e2, 0xae2bb17b, 0x381bb60c, 0x9b8ed292,
- 0x0dbed5e5, 0xb7efdc7c, 0x21dfdb0b, 0xd4d2d386, 0x42e2d4f1,
- 0xf8b3dd68, 0x6e83da1f, 0xcd16be81, 0x5b26b9f6, 0xe177b06f,
- 0x7747b718, 0xe65a0888, 0x706a0fff, 0xca3b0666, 0x5c0b0111,
- 0xff9e658f, 0x69ae62f8, 0xd3ff6b61, 0x45cf6c16, 0x78e20aa0,
- 0xeed20dd7, 0x5483044e, 0xc2b30339, 0x612667a7, 0xf71660d0,
- 0x4d476949, 0xdb776e3e, 0x4a6ad1ae, 0xdc5ad6d9, 0x660bdf40,
- 0xf03bd837, 0x53aebca9, 0xc59ebbde, 0x7fcfb247, 0xe9ffb530,
- 0x1cf2bdbd, 0x8ac2baca, 0x3093b353, 0xa6a3b424, 0x0536d0ba,
- 0x9306d7cd, 0x2957de54, 0xbf67d923, 0x2e7a66b3, 0xb84a61c4,
- 0x021b685d, 0x942b6f2a, 0x37be0bb4, 0xa18e0cc3, 0x1bdf055a,
- 0x8def022d},
- {0x00000000, 0x41311b19, 0x82623632, 0xc3532d2b, 0x04c56c64,
- 0x45f4777d, 0x86a75a56, 0xc796414f, 0x088ad9c8, 0x49bbc2d1,
- 0x8ae8effa, 0xcbd9f4e3, 0x0c4fb5ac, 0x4d7eaeb5, 0x8e2d839e,
- 0xcf1c9887, 0x5112c24a, 0x1023d953, 0xd370f478, 0x9241ef61,
- 0x55d7ae2e, 0x14e6b537, 0xd7b5981c, 0x96848305, 0x59981b82,
- 0x18a9009b, 0xdbfa2db0, 0x9acb36a9, 0x5d5d77e6, 0x1c6c6cff,
- 0xdf3f41d4, 0x9e0e5acd, 0xa2248495, 0xe3159f8c, 0x2046b2a7,
- 0x6177a9be, 0xa6e1e8f1, 0xe7d0f3e8, 0x2483dec3, 0x65b2c5da,
- 0xaaae5d5d, 0xeb9f4644, 0x28cc6b6f, 0x69fd7076, 0xae6b3139,
- 0xef5a2a20, 0x2c09070b, 0x6d381c12, 0xf33646df, 0xb2075dc6,
- 0x715470ed, 0x30656bf4, 0xf7f32abb, 0xb6c231a2, 0x75911c89,
- 0x34a00790, 0xfbbc9f17, 0xba8d840e, 0x79dea925, 0x38efb23c,
- 0xff79f373, 0xbe48e86a, 0x7d1bc541, 0x3c2ade58, 0x054f79f0,
- 0x447e62e9, 0x872d4fc2, 0xc61c54db, 0x018a1594, 0x40bb0e8d,
- 0x83e823a6, 0xc2d938bf, 0x0dc5a038, 0x4cf4bb21, 0x8fa7960a,
- 0xce968d13, 0x0900cc5c, 0x4831d745, 0x8b62fa6e, 0xca53e177,
- 0x545dbbba, 0x156ca0a3, 0xd63f8d88, 0x970e9691, 0x5098d7de,
- 0x11a9ccc7, 0xd2fae1ec, 0x93cbfaf5, 0x5cd76272, 0x1de6796b,
- 0xdeb55440, 0x9f844f59, 0x58120e16, 0x1923150f, 0xda703824,
- 0x9b41233d, 0xa76bfd65, 0xe65ae67c, 0x2509cb57, 0x6438d04e,
- 0xa3ae9101, 0xe29f8a18, 0x21cca733, 0x60fdbc2a, 0xafe124ad,
- 0xeed03fb4, 0x2d83129f, 0x6cb20986, 0xab2448c9, 0xea1553d0,
- 0x29467efb, 0x687765e2, 0xf6793f2f, 0xb7482436, 0x741b091d,
- 0x352a1204, 0xf2bc534b, 0xb38d4852, 0x70de6579, 0x31ef7e60,
- 0xfef3e6e7, 0xbfc2fdfe, 0x7c91d0d5, 0x3da0cbcc, 0xfa368a83,
- 0xbb07919a, 0x7854bcb1, 0x3965a7a8, 0x4b98833b, 0x0aa99822,
- 0xc9fab509, 0x88cbae10, 0x4f5def5f, 0x0e6cf446, 0xcd3fd96d,
- 0x8c0ec274, 0x43125af3, 0x022341ea, 0xc1706cc1, 0x804177d8,
- 0x47d73697, 0x06e62d8e, 0xc5b500a5, 0x84841bbc, 0x1a8a4171,
- 0x5bbb5a68, 0x98e87743, 0xd9d96c5a, 0x1e4f2d15, 0x5f7e360c,
- 0x9c2d1b27, 0xdd1c003e, 0x120098b9, 0x533183a0, 0x9062ae8b,
- 0xd153b592, 0x16c5f4dd, 0x57f4efc4, 0x94a7c2ef, 0xd596d9f6,
- 0xe9bc07ae, 0xa88d1cb7, 0x6bde319c, 0x2aef2a85, 0xed796bca,
- 0xac4870d3, 0x6f1b5df8, 0x2e2a46e1, 0xe136de66, 0xa007c57f,
- 0x6354e854, 0x2265f34d, 0xe5f3b202, 0xa4c2a91b, 0x67918430,
- 0x26a09f29, 0xb8aec5e4, 0xf99fdefd, 0x3accf3d6, 0x7bfde8cf,
- 0xbc6ba980, 0xfd5ab299, 0x3e099fb2, 0x7f3884ab, 0xb0241c2c,
- 0xf1150735, 0x32462a1e, 0x73773107, 0xb4e17048, 0xf5d06b51,
- 0x3683467a, 0x77b25d63, 0x4ed7facb, 0x0fe6e1d2, 0xccb5ccf9,
- 0x8d84d7e0, 0x4a1296af, 0x0b238db6, 0xc870a09d, 0x8941bb84,
- 0x465d2303, 0x076c381a, 0xc43f1531, 0x850e0e28, 0x42984f67,
- 0x03a9547e, 0xc0fa7955, 0x81cb624c, 0x1fc53881, 0x5ef42398,
- 0x9da70eb3, 0xdc9615aa, 0x1b0054e5, 0x5a314ffc, 0x996262d7,
- 0xd85379ce, 0x174fe149, 0x567efa50, 0x952dd77b, 0xd41ccc62,
- 0x138a8d2d, 0x52bb9634, 0x91e8bb1f, 0xd0d9a006, 0xecf37e5e,
- 0xadc26547, 0x6e91486c, 0x2fa05375, 0xe836123a, 0xa9070923,
- 0x6a542408, 0x2b653f11, 0xe479a796, 0xa548bc8f, 0x661b91a4,
- 0x272a8abd, 0xe0bccbf2, 0xa18dd0eb, 0x62defdc0, 0x23efe6d9,
- 0xbde1bc14, 0xfcd0a70d, 0x3f838a26, 0x7eb2913f, 0xb924d070,
- 0xf815cb69, 0x3b46e642, 0x7a77fd5b, 0xb56b65dc, 0xf45a7ec5,
- 0x370953ee, 0x763848f7, 0xb1ae09b8, 0xf09f12a1, 0x33cc3f8a,
- 0x72fd2493},
- {0x00000000, 0x376ac201, 0x6ed48403, 0x59be4602, 0xdca80907,
- 0xebc2cb06, 0xb27c8d04, 0x85164f05, 0xb851130e, 0x8f3bd10f,
- 0xd685970d, 0xe1ef550c, 0x64f91a09, 0x5393d808, 0x0a2d9e0a,
- 0x3d475c0b, 0x70a3261c, 0x47c9e41d, 0x1e77a21f, 0x291d601e,
- 0xac0b2f1b, 0x9b61ed1a, 0xc2dfab18, 0xf5b56919, 0xc8f23512,
- 0xff98f713, 0xa626b111, 0x914c7310, 0x145a3c15, 0x2330fe14,
- 0x7a8eb816, 0x4de47a17, 0xe0464d38, 0xd72c8f39, 0x8e92c93b,
- 0xb9f80b3a, 0x3cee443f, 0x0b84863e, 0x523ac03c, 0x6550023d,
- 0x58175e36, 0x6f7d9c37, 0x36c3da35, 0x01a91834, 0x84bf5731,
- 0xb3d59530, 0xea6bd332, 0xdd011133, 0x90e56b24, 0xa78fa925,
- 0xfe31ef27, 0xc95b2d26, 0x4c4d6223, 0x7b27a022, 0x2299e620,
- 0x15f32421, 0x28b4782a, 0x1fdeba2b, 0x4660fc29, 0x710a3e28,
- 0xf41c712d, 0xc376b32c, 0x9ac8f52e, 0xada2372f, 0xc08d9a70,
- 0xf7e75871, 0xae591e73, 0x9933dc72, 0x1c259377, 0x2b4f5176,
- 0x72f11774, 0x459bd575, 0x78dc897e, 0x4fb64b7f, 0x16080d7d,
- 0x2162cf7c, 0xa4748079, 0x931e4278, 0xcaa0047a, 0xfdcac67b,
- 0xb02ebc6c, 0x87447e6d, 0xdefa386f, 0xe990fa6e, 0x6c86b56b,
- 0x5bec776a, 0x02523168, 0x3538f369, 0x087faf62, 0x3f156d63,
- 0x66ab2b61, 0x51c1e960, 0xd4d7a665, 0xe3bd6464, 0xba032266,
- 0x8d69e067, 0x20cbd748, 0x17a11549, 0x4e1f534b, 0x7975914a,
- 0xfc63de4f, 0xcb091c4e, 0x92b75a4c, 0xa5dd984d, 0x989ac446,
- 0xaff00647, 0xf64e4045, 0xc1248244, 0x4432cd41, 0x73580f40,
- 0x2ae64942, 0x1d8c8b43, 0x5068f154, 0x67023355, 0x3ebc7557,
- 0x09d6b756, 0x8cc0f853, 0xbbaa3a52, 0xe2147c50, 0xd57ebe51,
- 0xe839e25a, 0xdf53205b, 0x86ed6659, 0xb187a458, 0x3491eb5d,
- 0x03fb295c, 0x5a456f5e, 0x6d2fad5f, 0x801b35e1, 0xb771f7e0,
- 0xeecfb1e2, 0xd9a573e3, 0x5cb33ce6, 0x6bd9fee7, 0x3267b8e5,
- 0x050d7ae4, 0x384a26ef, 0x0f20e4ee, 0x569ea2ec, 0x61f460ed,
- 0xe4e22fe8, 0xd388ede9, 0x8a36abeb, 0xbd5c69ea, 0xf0b813fd,
- 0xc7d2d1fc, 0x9e6c97fe, 0xa90655ff, 0x2c101afa, 0x1b7ad8fb,
- 0x42c49ef9, 0x75ae5cf8, 0x48e900f3, 0x7f83c2f2, 0x263d84f0,
- 0x115746f1, 0x944109f4, 0xa32bcbf5, 0xfa958df7, 0xcdff4ff6,
- 0x605d78d9, 0x5737bad8, 0x0e89fcda, 0x39e33edb, 0xbcf571de,
- 0x8b9fb3df, 0xd221f5dd, 0xe54b37dc, 0xd80c6bd7, 0xef66a9d6,
- 0xb6d8efd4, 0x81b22dd5, 0x04a462d0, 0x33cea0d1, 0x6a70e6d3,
- 0x5d1a24d2, 0x10fe5ec5, 0x27949cc4, 0x7e2adac6, 0x494018c7,
- 0xcc5657c2, 0xfb3c95c3, 0xa282d3c1, 0x95e811c0, 0xa8af4dcb,
- 0x9fc58fca, 0xc67bc9c8, 0xf1110bc9, 0x740744cc, 0x436d86cd,
- 0x1ad3c0cf, 0x2db902ce, 0x4096af91, 0x77fc6d90, 0x2e422b92,
- 0x1928e993, 0x9c3ea696, 0xab546497, 0xf2ea2295, 0xc580e094,
- 0xf8c7bc9f, 0xcfad7e9e, 0x9613389c, 0xa179fa9d, 0x246fb598,
- 0x13057799, 0x4abb319b, 0x7dd1f39a, 0x3035898d, 0x075f4b8c,
- 0x5ee10d8e, 0x698bcf8f, 0xec9d808a, 0xdbf7428b, 0x82490489,
- 0xb523c688, 0x88649a83, 0xbf0e5882, 0xe6b01e80, 0xd1dadc81,
- 0x54cc9384, 0x63a65185, 0x3a181787, 0x0d72d586, 0xa0d0e2a9,
- 0x97ba20a8, 0xce0466aa, 0xf96ea4ab, 0x7c78ebae, 0x4b1229af,
- 0x12ac6fad, 0x25c6adac, 0x1881f1a7, 0x2feb33a6, 0x765575a4,
- 0x413fb7a5, 0xc429f8a0, 0xf3433aa1, 0xaafd7ca3, 0x9d97bea2,
- 0xd073c4b5, 0xe71906b4, 0xbea740b6, 0x89cd82b7, 0x0cdbcdb2,
- 0x3bb10fb3, 0x620f49b1, 0x55658bb0, 0x6822d7bb, 0x5f4815ba,
- 0x06f653b8, 0x319c91b9, 0xb48adebc, 0x83e01cbd, 0xda5e5abf,
- 0xed3498be},
- {0x00000000, 0x6567bcb8, 0x8bc809aa, 0xeeafb512, 0x5797628f,
- 0x32f0de37, 0xdc5f6b25, 0xb938d79d, 0xef28b4c5, 0x8a4f087d,
- 0x64e0bd6f, 0x018701d7, 0xb8bfd64a, 0xddd86af2, 0x3377dfe0,
- 0x56106358, 0x9f571950, 0xfa30a5e8, 0x149f10fa, 0x71f8ac42,
- 0xc8c07bdf, 0xada7c767, 0x43087275, 0x266fcecd, 0x707fad95,
- 0x1518112d, 0xfbb7a43f, 0x9ed01887, 0x27e8cf1a, 0x428f73a2,
- 0xac20c6b0, 0xc9477a08, 0x3eaf32a0, 0x5bc88e18, 0xb5673b0a,
- 0xd00087b2, 0x6938502f, 0x0c5fec97, 0xe2f05985, 0x8797e53d,
- 0xd1878665, 0xb4e03add, 0x5a4f8fcf, 0x3f283377, 0x8610e4ea,
- 0xe3775852, 0x0dd8ed40, 0x68bf51f8, 0xa1f82bf0, 0xc49f9748,
- 0x2a30225a, 0x4f579ee2, 0xf66f497f, 0x9308f5c7, 0x7da740d5,
- 0x18c0fc6d, 0x4ed09f35, 0x2bb7238d, 0xc518969f, 0xa07f2a27,
- 0x1947fdba, 0x7c204102, 0x928ff410, 0xf7e848a8, 0x3d58149b,
- 0x583fa823, 0xb6901d31, 0xd3f7a189, 0x6acf7614, 0x0fa8caac,
- 0xe1077fbe, 0x8460c306, 0xd270a05e, 0xb7171ce6, 0x59b8a9f4,
- 0x3cdf154c, 0x85e7c2d1, 0xe0807e69, 0x0e2fcb7b, 0x6b4877c3,
- 0xa20f0dcb, 0xc768b173, 0x29c70461, 0x4ca0b8d9, 0xf5986f44,
- 0x90ffd3fc, 0x7e5066ee, 0x1b37da56, 0x4d27b90e, 0x284005b6,
- 0xc6efb0a4, 0xa3880c1c, 0x1ab0db81, 0x7fd76739, 0x9178d22b,
- 0xf41f6e93, 0x03f7263b, 0x66909a83, 0x883f2f91, 0xed589329,
- 0x546044b4, 0x3107f80c, 0xdfa84d1e, 0xbacff1a6, 0xecdf92fe,
- 0x89b82e46, 0x67179b54, 0x027027ec, 0xbb48f071, 0xde2f4cc9,
- 0x3080f9db, 0x55e74563, 0x9ca03f6b, 0xf9c783d3, 0x176836c1,
- 0x720f8a79, 0xcb375de4, 0xae50e15c, 0x40ff544e, 0x2598e8f6,
- 0x73888bae, 0x16ef3716, 0xf8408204, 0x9d273ebc, 0x241fe921,
- 0x41785599, 0xafd7e08b, 0xcab05c33, 0x3bb659ed, 0x5ed1e555,
- 0xb07e5047, 0xd519ecff, 0x6c213b62, 0x094687da, 0xe7e932c8,
- 0x828e8e70, 0xd49eed28, 0xb1f95190, 0x5f56e482, 0x3a31583a,
- 0x83098fa7, 0xe66e331f, 0x08c1860d, 0x6da63ab5, 0xa4e140bd,
- 0xc186fc05, 0x2f294917, 0x4a4ef5af, 0xf3762232, 0x96119e8a,
- 0x78be2b98, 0x1dd99720, 0x4bc9f478, 0x2eae48c0, 0xc001fdd2,
- 0xa566416a, 0x1c5e96f7, 0x79392a4f, 0x97969f5d, 0xf2f123e5,
- 0x05196b4d, 0x607ed7f5, 0x8ed162e7, 0xebb6de5f, 0x528e09c2,
- 0x37e9b57a, 0xd9460068, 0xbc21bcd0, 0xea31df88, 0x8f566330,
- 0x61f9d622, 0x049e6a9a, 0xbda6bd07, 0xd8c101bf, 0x366eb4ad,
- 0x53090815, 0x9a4e721d, 0xff29cea5, 0x11867bb7, 0x74e1c70f,
- 0xcdd91092, 0xa8beac2a, 0x46111938, 0x2376a580, 0x7566c6d8,
- 0x10017a60, 0xfeaecf72, 0x9bc973ca, 0x22f1a457, 0x479618ef,
- 0xa939adfd, 0xcc5e1145, 0x06ee4d76, 0x6389f1ce, 0x8d2644dc,
- 0xe841f864, 0x51792ff9, 0x341e9341, 0xdab12653, 0xbfd69aeb,
- 0xe9c6f9b3, 0x8ca1450b, 0x620ef019, 0x07694ca1, 0xbe519b3c,
- 0xdb362784, 0x35999296, 0x50fe2e2e, 0x99b95426, 0xfcdee89e,
- 0x12715d8c, 0x7716e134, 0xce2e36a9, 0xab498a11, 0x45e63f03,
- 0x208183bb, 0x7691e0e3, 0x13f65c5b, 0xfd59e949, 0x983e55f1,
- 0x2106826c, 0x44613ed4, 0xaace8bc6, 0xcfa9377e, 0x38417fd6,
- 0x5d26c36e, 0xb389767c, 0xd6eecac4, 0x6fd61d59, 0x0ab1a1e1,
- 0xe41e14f3, 0x8179a84b, 0xd769cb13, 0xb20e77ab, 0x5ca1c2b9,
- 0x39c67e01, 0x80fea99c, 0xe5991524, 0x0b36a036, 0x6e511c8e,
- 0xa7166686, 0xc271da3e, 0x2cde6f2c, 0x49b9d394, 0xf0810409,
- 0x95e6b8b1, 0x7b490da3, 0x1e2eb11b, 0x483ed243, 0x2d596efb,
- 0xc3f6dbe9, 0xa6916751, 0x1fa9b0cc, 0x7ace0c74, 0x9461b966,
- 0xf10605de}};
-
-#endif
-
-#endif
-
-#if N == 2
-
-#if W == 8
-
-local const z_crc_t FAR crc_braid_table[][256] = {
- {0x00000000, 0xae689191, 0x87a02563, 0x29c8b4f2, 0xd4314c87,
- 0x7a59dd16, 0x539169e4, 0xfdf9f875, 0x73139f4f, 0xdd7b0ede,
- 0xf4b3ba2c, 0x5adb2bbd, 0xa722d3c8, 0x094a4259, 0x2082f6ab,
- 0x8eea673a, 0xe6273e9e, 0x484faf0f, 0x61871bfd, 0xcfef8a6c,
- 0x32167219, 0x9c7ee388, 0xb5b6577a, 0x1bdec6eb, 0x9534a1d1,
- 0x3b5c3040, 0x129484b2, 0xbcfc1523, 0x4105ed56, 0xef6d7cc7,
- 0xc6a5c835, 0x68cd59a4, 0x173f7b7d, 0xb957eaec, 0x909f5e1e,
- 0x3ef7cf8f, 0xc30e37fa, 0x6d66a66b, 0x44ae1299, 0xeac68308,
- 0x642ce432, 0xca4475a3, 0xe38cc151, 0x4de450c0, 0xb01da8b5,
- 0x1e753924, 0x37bd8dd6, 0x99d51c47, 0xf11845e3, 0x5f70d472,
- 0x76b86080, 0xd8d0f111, 0x25290964, 0x8b4198f5, 0xa2892c07,
- 0x0ce1bd96, 0x820bdaac, 0x2c634b3d, 0x05abffcf, 0xabc36e5e,
- 0x563a962b, 0xf85207ba, 0xd19ab348, 0x7ff222d9, 0x2e7ef6fa,
- 0x8016676b, 0xa9ded399, 0x07b64208, 0xfa4fba7d, 0x54272bec,
- 0x7def9f1e, 0xd3870e8f, 0x5d6d69b5, 0xf305f824, 0xdacd4cd6,
- 0x74a5dd47, 0x895c2532, 0x2734b4a3, 0x0efc0051, 0xa09491c0,
- 0xc859c864, 0x663159f5, 0x4ff9ed07, 0xe1917c96, 0x1c6884e3,
- 0xb2001572, 0x9bc8a180, 0x35a03011, 0xbb4a572b, 0x1522c6ba,
- 0x3cea7248, 0x9282e3d9, 0x6f7b1bac, 0xc1138a3d, 0xe8db3ecf,
- 0x46b3af5e, 0x39418d87, 0x97291c16, 0xbee1a8e4, 0x10893975,
- 0xed70c100, 0x43185091, 0x6ad0e463, 0xc4b875f2, 0x4a5212c8,
- 0xe43a8359, 0xcdf237ab, 0x639aa63a, 0x9e635e4f, 0x300bcfde,
- 0x19c37b2c, 0xb7abeabd, 0xdf66b319, 0x710e2288, 0x58c6967a,
- 0xf6ae07eb, 0x0b57ff9e, 0xa53f6e0f, 0x8cf7dafd, 0x229f4b6c,
- 0xac752c56, 0x021dbdc7, 0x2bd50935, 0x85bd98a4, 0x784460d1,
- 0xd62cf140, 0xffe445b2, 0x518cd423, 0x5cfdedf4, 0xf2957c65,
- 0xdb5dc897, 0x75355906, 0x88cca173, 0x26a430e2, 0x0f6c8410,
- 0xa1041581, 0x2fee72bb, 0x8186e32a, 0xa84e57d8, 0x0626c649,
- 0xfbdf3e3c, 0x55b7afad, 0x7c7f1b5f, 0xd2178ace, 0xbadad36a,
- 0x14b242fb, 0x3d7af609, 0x93126798, 0x6eeb9fed, 0xc0830e7c,
- 0xe94bba8e, 0x47232b1f, 0xc9c94c25, 0x67a1ddb4, 0x4e696946,
- 0xe001f8d7, 0x1df800a2, 0xb3909133, 0x9a5825c1, 0x3430b450,
- 0x4bc29689, 0xe5aa0718, 0xcc62b3ea, 0x620a227b, 0x9ff3da0e,
- 0x319b4b9f, 0x1853ff6d, 0xb63b6efc, 0x38d109c6, 0x96b99857,
- 0xbf712ca5, 0x1119bd34, 0xece04541, 0x4288d4d0, 0x6b406022,
- 0xc528f1b3, 0xade5a817, 0x038d3986, 0x2a458d74, 0x842d1ce5,
- 0x79d4e490, 0xd7bc7501, 0xfe74c1f3, 0x501c5062, 0xdef63758,
- 0x709ea6c9, 0x5956123b, 0xf73e83aa, 0x0ac77bdf, 0xa4afea4e,
- 0x8d675ebc, 0x230fcf2d, 0x72831b0e, 0xdceb8a9f, 0xf5233e6d,
- 0x5b4baffc, 0xa6b25789, 0x08dac618, 0x211272ea, 0x8f7ae37b,
- 0x01908441, 0xaff815d0, 0x8630a122, 0x285830b3, 0xd5a1c8c6,
- 0x7bc95957, 0x5201eda5, 0xfc697c34, 0x94a42590, 0x3accb401,
- 0x130400f3, 0xbd6c9162, 0x40956917, 0xeefdf886, 0xc7354c74,
- 0x695ddde5, 0xe7b7badf, 0x49df2b4e, 0x60179fbc, 0xce7f0e2d,
- 0x3386f658, 0x9dee67c9, 0xb426d33b, 0x1a4e42aa, 0x65bc6073,
- 0xcbd4f1e2, 0xe21c4510, 0x4c74d481, 0xb18d2cf4, 0x1fe5bd65,
- 0x362d0997, 0x98459806, 0x16afff3c, 0xb8c76ead, 0x910fda5f,
- 0x3f674bce, 0xc29eb3bb, 0x6cf6222a, 0x453e96d8, 0xeb560749,
- 0x839b5eed, 0x2df3cf7c, 0x043b7b8e, 0xaa53ea1f, 0x57aa126a,
- 0xf9c283fb, 0xd00a3709, 0x7e62a698, 0xf088c1a2, 0x5ee05033,
- 0x7728e4c1, 0xd9407550, 0x24b98d25, 0x8ad11cb4, 0xa319a846,
- 0x0d7139d7},
- {0x00000000, 0xb9fbdbe8, 0xa886b191, 0x117d6a79, 0x8a7c6563,
- 0x3387be8b, 0x22fad4f2, 0x9b010f1a, 0xcf89cc87, 0x7672176f,
- 0x670f7d16, 0xdef4a6fe, 0x45f5a9e4, 0xfc0e720c, 0xed731875,
- 0x5488c39d, 0x44629f4f, 0xfd9944a7, 0xece42ede, 0x551ff536,
- 0xce1efa2c, 0x77e521c4, 0x66984bbd, 0xdf639055, 0x8beb53c8,
- 0x32108820, 0x236de259, 0x9a9639b1, 0x019736ab, 0xb86ced43,
- 0xa911873a, 0x10ea5cd2, 0x88c53e9e, 0x313ee576, 0x20438f0f,
- 0x99b854e7, 0x02b95bfd, 0xbb428015, 0xaa3fea6c, 0x13c43184,
- 0x474cf219, 0xfeb729f1, 0xefca4388, 0x56319860, 0xcd30977a,
- 0x74cb4c92, 0x65b626eb, 0xdc4dfd03, 0xcca7a1d1, 0x755c7a39,
- 0x64211040, 0xdddacba8, 0x46dbc4b2, 0xff201f5a, 0xee5d7523,
- 0x57a6aecb, 0x032e6d56, 0xbad5b6be, 0xaba8dcc7, 0x1253072f,
- 0x89520835, 0x30a9d3dd, 0x21d4b9a4, 0x982f624c, 0xcafb7b7d,
- 0x7300a095, 0x627dcaec, 0xdb861104, 0x40871e1e, 0xf97cc5f6,
- 0xe801af8f, 0x51fa7467, 0x0572b7fa, 0xbc896c12, 0xadf4066b,
- 0x140fdd83, 0x8f0ed299, 0x36f50971, 0x27886308, 0x9e73b8e0,
- 0x8e99e432, 0x37623fda, 0x261f55a3, 0x9fe48e4b, 0x04e58151,
- 0xbd1e5ab9, 0xac6330c0, 0x1598eb28, 0x411028b5, 0xf8ebf35d,
- 0xe9969924, 0x506d42cc, 0xcb6c4dd6, 0x7297963e, 0x63eafc47,
- 0xda1127af, 0x423e45e3, 0xfbc59e0b, 0xeab8f472, 0x53432f9a,
- 0xc8422080, 0x71b9fb68, 0x60c49111, 0xd93f4af9, 0x8db78964,
- 0x344c528c, 0x253138f5, 0x9ccae31d, 0x07cbec07, 0xbe3037ef,
- 0xaf4d5d96, 0x16b6867e, 0x065cdaac, 0xbfa70144, 0xaeda6b3d,
- 0x1721b0d5, 0x8c20bfcf, 0x35db6427, 0x24a60e5e, 0x9d5dd5b6,
- 0xc9d5162b, 0x702ecdc3, 0x6153a7ba, 0xd8a87c52, 0x43a97348,
- 0xfa52a8a0, 0xeb2fc2d9, 0x52d41931, 0x4e87f0bb, 0xf77c2b53,
- 0xe601412a, 0x5ffa9ac2, 0xc4fb95d8, 0x7d004e30, 0x6c7d2449,
- 0xd586ffa1, 0x810e3c3c, 0x38f5e7d4, 0x29888dad, 0x90735645,
- 0x0b72595f, 0xb28982b7, 0xa3f4e8ce, 0x1a0f3326, 0x0ae56ff4,
- 0xb31eb41c, 0xa263de65, 0x1b98058d, 0x80990a97, 0x3962d17f,
- 0x281fbb06, 0x91e460ee, 0xc56ca373, 0x7c97789b, 0x6dea12e2,
- 0xd411c90a, 0x4f10c610, 0xf6eb1df8, 0xe7967781, 0x5e6dac69,
- 0xc642ce25, 0x7fb915cd, 0x6ec47fb4, 0xd73fa45c, 0x4c3eab46,
- 0xf5c570ae, 0xe4b81ad7, 0x5d43c13f, 0x09cb02a2, 0xb030d94a,
- 0xa14db333, 0x18b668db, 0x83b767c1, 0x3a4cbc29, 0x2b31d650,
- 0x92ca0db8, 0x8220516a, 0x3bdb8a82, 0x2aa6e0fb, 0x935d3b13,
- 0x085c3409, 0xb1a7efe1, 0xa0da8598, 0x19215e70, 0x4da99ded,
- 0xf4524605, 0xe52f2c7c, 0x5cd4f794, 0xc7d5f88e, 0x7e2e2366,
- 0x6f53491f, 0xd6a892f7, 0x847c8bc6, 0x3d87502e, 0x2cfa3a57,
- 0x9501e1bf, 0x0e00eea5, 0xb7fb354d, 0xa6865f34, 0x1f7d84dc,
- 0x4bf54741, 0xf20e9ca9, 0xe373f6d0, 0x5a882d38, 0xc1892222,
- 0x7872f9ca, 0x690f93b3, 0xd0f4485b, 0xc01e1489, 0x79e5cf61,
- 0x6898a518, 0xd1637ef0, 0x4a6271ea, 0xf399aa02, 0xe2e4c07b,
- 0x5b1f1b93, 0x0f97d80e, 0xb66c03e6, 0xa711699f, 0x1eeab277,
- 0x85ebbd6d, 0x3c106685, 0x2d6d0cfc, 0x9496d714, 0x0cb9b558,
- 0xb5426eb0, 0xa43f04c9, 0x1dc4df21, 0x86c5d03b, 0x3f3e0bd3,
- 0x2e4361aa, 0x97b8ba42, 0xc33079df, 0x7acba237, 0x6bb6c84e,
- 0xd24d13a6, 0x494c1cbc, 0xf0b7c754, 0xe1caad2d, 0x583176c5,
- 0x48db2a17, 0xf120f1ff, 0xe05d9b86, 0x59a6406e, 0xc2a74f74,
- 0x7b5c949c, 0x6a21fee5, 0xd3da250d, 0x8752e690, 0x3ea93d78,
- 0x2fd45701, 0x962f8ce9, 0x0d2e83f3, 0xb4d5581b, 0xa5a83262,
- 0x1c53e98a},
- {0x00000000, 0x9d0fe176, 0xe16ec4ad, 0x7c6125db, 0x19ac8f1b,
- 0x84a36e6d, 0xf8c24bb6, 0x65cdaac0, 0x33591e36, 0xae56ff40,
- 0xd237da9b, 0x4f383bed, 0x2af5912d, 0xb7fa705b, 0xcb9b5580,
- 0x5694b4f6, 0x66b23c6c, 0xfbbddd1a, 0x87dcf8c1, 0x1ad319b7,
- 0x7f1eb377, 0xe2115201, 0x9e7077da, 0x037f96ac, 0x55eb225a,
- 0xc8e4c32c, 0xb485e6f7, 0x298a0781, 0x4c47ad41, 0xd1484c37,
- 0xad2969ec, 0x3026889a, 0xcd6478d8, 0x506b99ae, 0x2c0abc75,
- 0xb1055d03, 0xd4c8f7c3, 0x49c716b5, 0x35a6336e, 0xa8a9d218,
- 0xfe3d66ee, 0x63328798, 0x1f53a243, 0x825c4335, 0xe791e9f5,
- 0x7a9e0883, 0x06ff2d58, 0x9bf0cc2e, 0xabd644b4, 0x36d9a5c2,
- 0x4ab88019, 0xd7b7616f, 0xb27acbaf, 0x2f752ad9, 0x53140f02,
- 0xce1bee74, 0x988f5a82, 0x0580bbf4, 0x79e19e2f, 0xe4ee7f59,
- 0x8123d599, 0x1c2c34ef, 0x604d1134, 0xfd42f042, 0x41b9f7f1,
- 0xdcb61687, 0xa0d7335c, 0x3dd8d22a, 0x581578ea, 0xc51a999c,
- 0xb97bbc47, 0x24745d31, 0x72e0e9c7, 0xefef08b1, 0x938e2d6a,
- 0x0e81cc1c, 0x6b4c66dc, 0xf64387aa, 0x8a22a271, 0x172d4307,
- 0x270bcb9d, 0xba042aeb, 0xc6650f30, 0x5b6aee46, 0x3ea74486,
- 0xa3a8a5f0, 0xdfc9802b, 0x42c6615d, 0x1452d5ab, 0x895d34dd,
- 0xf53c1106, 0x6833f070, 0x0dfe5ab0, 0x90f1bbc6, 0xec909e1d,
- 0x719f7f6b, 0x8cdd8f29, 0x11d26e5f, 0x6db34b84, 0xf0bcaaf2,
- 0x95710032, 0x087ee144, 0x741fc49f, 0xe91025e9, 0xbf84911f,
- 0x228b7069, 0x5eea55b2, 0xc3e5b4c4, 0xa6281e04, 0x3b27ff72,
- 0x4746daa9, 0xda493bdf, 0xea6fb345, 0x77605233, 0x0b0177e8,
- 0x960e969e, 0xf3c33c5e, 0x6eccdd28, 0x12adf8f3, 0x8fa21985,
- 0xd936ad73, 0x44394c05, 0x385869de, 0xa55788a8, 0xc09a2268,
- 0x5d95c31e, 0x21f4e6c5, 0xbcfb07b3, 0x8373efe2, 0x1e7c0e94,
- 0x621d2b4f, 0xff12ca39, 0x9adf60f9, 0x07d0818f, 0x7bb1a454,
- 0xe6be4522, 0xb02af1d4, 0x2d2510a2, 0x51443579, 0xcc4bd40f,
- 0xa9867ecf, 0x34899fb9, 0x48e8ba62, 0xd5e75b14, 0xe5c1d38e,
- 0x78ce32f8, 0x04af1723, 0x99a0f655, 0xfc6d5c95, 0x6162bde3,
- 0x1d039838, 0x800c794e, 0xd698cdb8, 0x4b972cce, 0x37f60915,
- 0xaaf9e863, 0xcf3442a3, 0x523ba3d5, 0x2e5a860e, 0xb3556778,
- 0x4e17973a, 0xd318764c, 0xaf795397, 0x3276b2e1, 0x57bb1821,
- 0xcab4f957, 0xb6d5dc8c, 0x2bda3dfa, 0x7d4e890c, 0xe041687a,
- 0x9c204da1, 0x012facd7, 0x64e20617, 0xf9ede761, 0x858cc2ba,
- 0x188323cc, 0x28a5ab56, 0xb5aa4a20, 0xc9cb6ffb, 0x54c48e8d,
- 0x3109244d, 0xac06c53b, 0xd067e0e0, 0x4d680196, 0x1bfcb560,
- 0x86f35416, 0xfa9271cd, 0x679d90bb, 0x02503a7b, 0x9f5fdb0d,
- 0xe33efed6, 0x7e311fa0, 0xc2ca1813, 0x5fc5f965, 0x23a4dcbe,
- 0xbeab3dc8, 0xdb669708, 0x4669767e, 0x3a0853a5, 0xa707b2d3,
- 0xf1930625, 0x6c9ce753, 0x10fdc288, 0x8df223fe, 0xe83f893e,
- 0x75306848, 0x09514d93, 0x945eace5, 0xa478247f, 0x3977c509,
- 0x4516e0d2, 0xd81901a4, 0xbdd4ab64, 0x20db4a12, 0x5cba6fc9,
- 0xc1b58ebf, 0x97213a49, 0x0a2edb3f, 0x764ffee4, 0xeb401f92,
- 0x8e8db552, 0x13825424, 0x6fe371ff, 0xf2ec9089, 0x0fae60cb,
- 0x92a181bd, 0xeec0a466, 0x73cf4510, 0x1602efd0, 0x8b0d0ea6,
- 0xf76c2b7d, 0x6a63ca0b, 0x3cf77efd, 0xa1f89f8b, 0xdd99ba50,
- 0x40965b26, 0x255bf1e6, 0xb8541090, 0xc435354b, 0x593ad43d,
- 0x691c5ca7, 0xf413bdd1, 0x8872980a, 0x157d797c, 0x70b0d3bc,
- 0xedbf32ca, 0x91de1711, 0x0cd1f667, 0x5a454291, 0xc74aa3e7,
- 0xbb2b863c, 0x2624674a, 0x43e9cd8a, 0xdee62cfc, 0xa2870927,
- 0x3f88e851},
- {0x00000000, 0xdd96d985, 0x605cb54b, 0xbdca6cce, 0xc0b96a96,
- 0x1d2fb313, 0xa0e5dfdd, 0x7d730658, 0x5a03d36d, 0x87950ae8,
- 0x3a5f6626, 0xe7c9bfa3, 0x9abab9fb, 0x472c607e, 0xfae60cb0,
- 0x2770d535, 0xb407a6da, 0x69917f5f, 0xd45b1391, 0x09cdca14,
- 0x74becc4c, 0xa92815c9, 0x14e27907, 0xc974a082, 0xee0475b7,
- 0x3392ac32, 0x8e58c0fc, 0x53ce1979, 0x2ebd1f21, 0xf32bc6a4,
- 0x4ee1aa6a, 0x937773ef, 0xb37e4bf5, 0x6ee89270, 0xd322febe,
- 0x0eb4273b, 0x73c72163, 0xae51f8e6, 0x139b9428, 0xce0d4dad,
- 0xe97d9898, 0x34eb411d, 0x89212dd3, 0x54b7f456, 0x29c4f20e,
- 0xf4522b8b, 0x49984745, 0x940e9ec0, 0x0779ed2f, 0xdaef34aa,
- 0x67255864, 0xbab381e1, 0xc7c087b9, 0x1a565e3c, 0xa79c32f2,
- 0x7a0aeb77, 0x5d7a3e42, 0x80ece7c7, 0x3d268b09, 0xe0b0528c,
- 0x9dc354d4, 0x40558d51, 0xfd9fe19f, 0x2009381a, 0xbd8d91ab,
- 0x601b482e, 0xddd124e0, 0x0047fd65, 0x7d34fb3d, 0xa0a222b8,
- 0x1d684e76, 0xc0fe97f3, 0xe78e42c6, 0x3a189b43, 0x87d2f78d,
- 0x5a442e08, 0x27372850, 0xfaa1f1d5, 0x476b9d1b, 0x9afd449e,
- 0x098a3771, 0xd41ceef4, 0x69d6823a, 0xb4405bbf, 0xc9335de7,
- 0x14a58462, 0xa96fe8ac, 0x74f93129, 0x5389e41c, 0x8e1f3d99,
- 0x33d55157, 0xee4388d2, 0x93308e8a, 0x4ea6570f, 0xf36c3bc1,
- 0x2efae244, 0x0ef3da5e, 0xd36503db, 0x6eaf6f15, 0xb339b690,
- 0xce4ab0c8, 0x13dc694d, 0xae160583, 0x7380dc06, 0x54f00933,
- 0x8966d0b6, 0x34acbc78, 0xe93a65fd, 0x944963a5, 0x49dfba20,
- 0xf415d6ee, 0x29830f6b, 0xbaf47c84, 0x6762a501, 0xdaa8c9cf,
- 0x073e104a, 0x7a4d1612, 0xa7dbcf97, 0x1a11a359, 0xc7877adc,
- 0xe0f7afe9, 0x3d61766c, 0x80ab1aa2, 0x5d3dc327, 0x204ec57f,
- 0xfdd81cfa, 0x40127034, 0x9d84a9b1, 0xa06a2517, 0x7dfcfc92,
- 0xc036905c, 0x1da049d9, 0x60d34f81, 0xbd459604, 0x008ffaca,
- 0xdd19234f, 0xfa69f67a, 0x27ff2fff, 0x9a354331, 0x47a39ab4,
- 0x3ad09cec, 0xe7464569, 0x5a8c29a7, 0x871af022, 0x146d83cd,
- 0xc9fb5a48, 0x74313686, 0xa9a7ef03, 0xd4d4e95b, 0x094230de,
- 0xb4885c10, 0x691e8595, 0x4e6e50a0, 0x93f88925, 0x2e32e5eb,
- 0xf3a43c6e, 0x8ed73a36, 0x5341e3b3, 0xee8b8f7d, 0x331d56f8,
- 0x13146ee2, 0xce82b767, 0x7348dba9, 0xaede022c, 0xd3ad0474,
- 0x0e3bddf1, 0xb3f1b13f, 0x6e6768ba, 0x4917bd8f, 0x9481640a,
- 0x294b08c4, 0xf4ddd141, 0x89aed719, 0x54380e9c, 0xe9f26252,
- 0x3464bbd7, 0xa713c838, 0x7a8511bd, 0xc74f7d73, 0x1ad9a4f6,
- 0x67aaa2ae, 0xba3c7b2b, 0x07f617e5, 0xda60ce60, 0xfd101b55,
- 0x2086c2d0, 0x9d4cae1e, 0x40da779b, 0x3da971c3, 0xe03fa846,
- 0x5df5c488, 0x80631d0d, 0x1de7b4bc, 0xc0716d39, 0x7dbb01f7,
- 0xa02dd872, 0xdd5ede2a, 0x00c807af, 0xbd026b61, 0x6094b2e4,
- 0x47e467d1, 0x9a72be54, 0x27b8d29a, 0xfa2e0b1f, 0x875d0d47,
- 0x5acbd4c2, 0xe701b80c, 0x3a976189, 0xa9e01266, 0x7476cbe3,
- 0xc9bca72d, 0x142a7ea8, 0x695978f0, 0xb4cfa175, 0x0905cdbb,
- 0xd493143e, 0xf3e3c10b, 0x2e75188e, 0x93bf7440, 0x4e29adc5,
- 0x335aab9d, 0xeecc7218, 0x53061ed6, 0x8e90c753, 0xae99ff49,
- 0x730f26cc, 0xcec54a02, 0x13539387, 0x6e2095df, 0xb3b64c5a,
- 0x0e7c2094, 0xd3eaf911, 0xf49a2c24, 0x290cf5a1, 0x94c6996f,
- 0x495040ea, 0x342346b2, 0xe9b59f37, 0x547ff3f9, 0x89e92a7c,
- 0x1a9e5993, 0xc7088016, 0x7ac2ecd8, 0xa754355d, 0xda273305,
- 0x07b1ea80, 0xba7b864e, 0x67ed5fcb, 0x409d8afe, 0x9d0b537b,
- 0x20c13fb5, 0xfd57e630, 0x8024e068, 0x5db239ed, 0xe0785523,
- 0x3dee8ca6},
- {0x00000000, 0x9ba54c6f, 0xec3b9e9f, 0x779ed2f0, 0x03063b7f,
- 0x98a37710, 0xef3da5e0, 0x7498e98f, 0x060c76fe, 0x9da93a91,
- 0xea37e861, 0x7192a40e, 0x050a4d81, 0x9eaf01ee, 0xe931d31e,
- 0x72949f71, 0x0c18edfc, 0x97bda193, 0xe0237363, 0x7b863f0c,
- 0x0f1ed683, 0x94bb9aec, 0xe325481c, 0x78800473, 0x0a149b02,
- 0x91b1d76d, 0xe62f059d, 0x7d8a49f2, 0x0912a07d, 0x92b7ec12,
- 0xe5293ee2, 0x7e8c728d, 0x1831dbf8, 0x83949797, 0xf40a4567,
- 0x6faf0908, 0x1b37e087, 0x8092ace8, 0xf70c7e18, 0x6ca93277,
- 0x1e3dad06, 0x8598e169, 0xf2063399, 0x69a37ff6, 0x1d3b9679,
- 0x869eda16, 0xf10008e6, 0x6aa54489, 0x14293604, 0x8f8c7a6b,
- 0xf812a89b, 0x63b7e4f4, 0x172f0d7b, 0x8c8a4114, 0xfb1493e4,
- 0x60b1df8b, 0x122540fa, 0x89800c95, 0xfe1ede65, 0x65bb920a,
- 0x11237b85, 0x8a8637ea, 0xfd18e51a, 0x66bda975, 0x3063b7f0,
- 0xabc6fb9f, 0xdc58296f, 0x47fd6500, 0x33658c8f, 0xa8c0c0e0,
- 0xdf5e1210, 0x44fb5e7f, 0x366fc10e, 0xadca8d61, 0xda545f91,
- 0x41f113fe, 0x3569fa71, 0xaeccb61e, 0xd95264ee, 0x42f72881,
- 0x3c7b5a0c, 0xa7de1663, 0xd040c493, 0x4be588fc, 0x3f7d6173,
- 0xa4d82d1c, 0xd346ffec, 0x48e3b383, 0x3a772cf2, 0xa1d2609d,
- 0xd64cb26d, 0x4de9fe02, 0x3971178d, 0xa2d45be2, 0xd54a8912,
- 0x4eefc57d, 0x28526c08, 0xb3f72067, 0xc469f297, 0x5fccbef8,
- 0x2b545777, 0xb0f11b18, 0xc76fc9e8, 0x5cca8587, 0x2e5e1af6,
- 0xb5fb5699, 0xc2658469, 0x59c0c806, 0x2d582189, 0xb6fd6de6,
- 0xc163bf16, 0x5ac6f379, 0x244a81f4, 0xbfefcd9b, 0xc8711f6b,
- 0x53d45304, 0x274cba8b, 0xbce9f6e4, 0xcb772414, 0x50d2687b,
- 0x2246f70a, 0xb9e3bb65, 0xce7d6995, 0x55d825fa, 0x2140cc75,
- 0xbae5801a, 0xcd7b52ea, 0x56de1e85, 0x60c76fe0, 0xfb62238f,
- 0x8cfcf17f, 0x1759bd10, 0x63c1549f, 0xf86418f0, 0x8ffaca00,
- 0x145f866f, 0x66cb191e, 0xfd6e5571, 0x8af08781, 0x1155cbee,
- 0x65cd2261, 0xfe686e0e, 0x89f6bcfe, 0x1253f091, 0x6cdf821c,
- 0xf77ace73, 0x80e41c83, 0x1b4150ec, 0x6fd9b963, 0xf47cf50c,
- 0x83e227fc, 0x18476b93, 0x6ad3f4e2, 0xf176b88d, 0x86e86a7d,
- 0x1d4d2612, 0x69d5cf9d, 0xf27083f2, 0x85ee5102, 0x1e4b1d6d,
- 0x78f6b418, 0xe353f877, 0x94cd2a87, 0x0f6866e8, 0x7bf08f67,
- 0xe055c308, 0x97cb11f8, 0x0c6e5d97, 0x7efac2e6, 0xe55f8e89,
- 0x92c15c79, 0x09641016, 0x7dfcf999, 0xe659b5f6, 0x91c76706,
- 0x0a622b69, 0x74ee59e4, 0xef4b158b, 0x98d5c77b, 0x03708b14,
- 0x77e8629b, 0xec4d2ef4, 0x9bd3fc04, 0x0076b06b, 0x72e22f1a,
- 0xe9476375, 0x9ed9b185, 0x057cfdea, 0x71e41465, 0xea41580a,
- 0x9ddf8afa, 0x067ac695, 0x50a4d810, 0xcb01947f, 0xbc9f468f,
- 0x273a0ae0, 0x53a2e36f, 0xc807af00, 0xbf997df0, 0x243c319f,
- 0x56a8aeee, 0xcd0de281, 0xba933071, 0x21367c1e, 0x55ae9591,
- 0xce0bd9fe, 0xb9950b0e, 0x22304761, 0x5cbc35ec, 0xc7197983,
- 0xb087ab73, 0x2b22e71c, 0x5fba0e93, 0xc41f42fc, 0xb381900c,
- 0x2824dc63, 0x5ab04312, 0xc1150f7d, 0xb68bdd8d, 0x2d2e91e2,
- 0x59b6786d, 0xc2133402, 0xb58de6f2, 0x2e28aa9d, 0x489503e8,
- 0xd3304f87, 0xa4ae9d77, 0x3f0bd118, 0x4b933897, 0xd03674f8,
- 0xa7a8a608, 0x3c0dea67, 0x4e997516, 0xd53c3979, 0xa2a2eb89,
- 0x3907a7e6, 0x4d9f4e69, 0xd63a0206, 0xa1a4d0f6, 0x3a019c99,
- 0x448dee14, 0xdf28a27b, 0xa8b6708b, 0x33133ce4, 0x478bd56b,
- 0xdc2e9904, 0xabb04bf4, 0x3015079b, 0x428198ea, 0xd924d485,
- 0xaeba0675, 0x351f4a1a, 0x4187a395, 0xda22effa, 0xadbc3d0a,
- 0x36197165},
- {0x00000000, 0xc18edfc0, 0x586cb9c1, 0x99e26601, 0xb0d97382,
- 0x7157ac42, 0xe8b5ca43, 0x293b1583, 0xbac3e145, 0x7b4d3e85,
- 0xe2af5884, 0x23218744, 0x0a1a92c7, 0xcb944d07, 0x52762b06,
- 0x93f8f4c6, 0xaef6c4cb, 0x6f781b0b, 0xf69a7d0a, 0x3714a2ca,
- 0x1e2fb749, 0xdfa16889, 0x46430e88, 0x87cdd148, 0x1435258e,
- 0xd5bbfa4e, 0x4c599c4f, 0x8dd7438f, 0xa4ec560c, 0x656289cc,
- 0xfc80efcd, 0x3d0e300d, 0x869c8fd7, 0x47125017, 0xdef03616,
- 0x1f7ee9d6, 0x3645fc55, 0xf7cb2395, 0x6e294594, 0xafa79a54,
- 0x3c5f6e92, 0xfdd1b152, 0x6433d753, 0xa5bd0893, 0x8c861d10,
- 0x4d08c2d0, 0xd4eaa4d1, 0x15647b11, 0x286a4b1c, 0xe9e494dc,
- 0x7006f2dd, 0xb1882d1d, 0x98b3389e, 0x593de75e, 0xc0df815f,
- 0x01515e9f, 0x92a9aa59, 0x53277599, 0xcac51398, 0x0b4bcc58,
- 0x2270d9db, 0xe3fe061b, 0x7a1c601a, 0xbb92bfda, 0xd64819ef,
- 0x17c6c62f, 0x8e24a02e, 0x4faa7fee, 0x66916a6d, 0xa71fb5ad,
- 0x3efdd3ac, 0xff730c6c, 0x6c8bf8aa, 0xad05276a, 0x34e7416b,
- 0xf5699eab, 0xdc528b28, 0x1ddc54e8, 0x843e32e9, 0x45b0ed29,
- 0x78bedd24, 0xb93002e4, 0x20d264e5, 0xe15cbb25, 0xc867aea6,
- 0x09e97166, 0x900b1767, 0x5185c8a7, 0xc27d3c61, 0x03f3e3a1,
- 0x9a1185a0, 0x5b9f5a60, 0x72a44fe3, 0xb32a9023, 0x2ac8f622,
- 0xeb4629e2, 0x50d49638, 0x915a49f8, 0x08b82ff9, 0xc936f039,
- 0xe00de5ba, 0x21833a7a, 0xb8615c7b, 0x79ef83bb, 0xea17777d,
- 0x2b99a8bd, 0xb27bcebc, 0x73f5117c, 0x5ace04ff, 0x9b40db3f,
- 0x02a2bd3e, 0xc32c62fe, 0xfe2252f3, 0x3fac8d33, 0xa64eeb32,
- 0x67c034f2, 0x4efb2171, 0x8f75feb1, 0x169798b0, 0xd7194770,
- 0x44e1b3b6, 0x856f6c76, 0x1c8d0a77, 0xdd03d5b7, 0xf438c034,
- 0x35b61ff4, 0xac5479f5, 0x6ddaa635, 0x77e1359f, 0xb66fea5f,
- 0x2f8d8c5e, 0xee03539e, 0xc738461d, 0x06b699dd, 0x9f54ffdc,
- 0x5eda201c, 0xcd22d4da, 0x0cac0b1a, 0x954e6d1b, 0x54c0b2db,
- 0x7dfba758, 0xbc757898, 0x25971e99, 0xe419c159, 0xd917f154,
- 0x18992e94, 0x817b4895, 0x40f59755, 0x69ce82d6, 0xa8405d16,
- 0x31a23b17, 0xf02ce4d7, 0x63d41011, 0xa25acfd1, 0x3bb8a9d0,
- 0xfa367610, 0xd30d6393, 0x1283bc53, 0x8b61da52, 0x4aef0592,
- 0xf17dba48, 0x30f36588, 0xa9110389, 0x689fdc49, 0x41a4c9ca,
- 0x802a160a, 0x19c8700b, 0xd846afcb, 0x4bbe5b0d, 0x8a3084cd,
- 0x13d2e2cc, 0xd25c3d0c, 0xfb67288f, 0x3ae9f74f, 0xa30b914e,
- 0x62854e8e, 0x5f8b7e83, 0x9e05a143, 0x07e7c742, 0xc6691882,
- 0xef520d01, 0x2edcd2c1, 0xb73eb4c0, 0x76b06b00, 0xe5489fc6,
- 0x24c64006, 0xbd242607, 0x7caaf9c7, 0x5591ec44, 0x941f3384,
- 0x0dfd5585, 0xcc738a45, 0xa1a92c70, 0x6027f3b0, 0xf9c595b1,
- 0x384b4a71, 0x11705ff2, 0xd0fe8032, 0x491ce633, 0x889239f3,
- 0x1b6acd35, 0xdae412f5, 0x430674f4, 0x8288ab34, 0xabb3beb7,
- 0x6a3d6177, 0xf3df0776, 0x3251d8b6, 0x0f5fe8bb, 0xced1377b,
- 0x5733517a, 0x96bd8eba, 0xbf869b39, 0x7e0844f9, 0xe7ea22f8,
- 0x2664fd38, 0xb59c09fe, 0x7412d63e, 0xedf0b03f, 0x2c7e6fff,
- 0x05457a7c, 0xc4cba5bc, 0x5d29c3bd, 0x9ca71c7d, 0x2735a3a7,
- 0xe6bb7c67, 0x7f591a66, 0xbed7c5a6, 0x97ecd025, 0x56620fe5,
- 0xcf8069e4, 0x0e0eb624, 0x9df642e2, 0x5c789d22, 0xc59afb23,
- 0x041424e3, 0x2d2f3160, 0xeca1eea0, 0x754388a1, 0xb4cd5761,
- 0x89c3676c, 0x484db8ac, 0xd1afdead, 0x1021016d, 0x391a14ee,
- 0xf894cb2e, 0x6176ad2f, 0xa0f872ef, 0x33008629, 0xf28e59e9,
- 0x6b6c3fe8, 0xaae2e028, 0x83d9f5ab, 0x42572a6b, 0xdbb54c6a,
- 0x1a3b93aa},
- {0x00000000, 0xefc26b3e, 0x04f5d03d, 0xeb37bb03, 0x09eba07a,
- 0xe629cb44, 0x0d1e7047, 0xe2dc1b79, 0x13d740f4, 0xfc152bca,
- 0x172290c9, 0xf8e0fbf7, 0x1a3ce08e, 0xf5fe8bb0, 0x1ec930b3,
- 0xf10b5b8d, 0x27ae81e8, 0xc86cead6, 0x235b51d5, 0xcc993aeb,
- 0x2e452192, 0xc1874aac, 0x2ab0f1af, 0xc5729a91, 0x3479c11c,
- 0xdbbbaa22, 0x308c1121, 0xdf4e7a1f, 0x3d926166, 0xd2500a58,
- 0x3967b15b, 0xd6a5da65, 0x4f5d03d0, 0xa09f68ee, 0x4ba8d3ed,
- 0xa46ab8d3, 0x46b6a3aa, 0xa974c894, 0x42437397, 0xad8118a9,
- 0x5c8a4324, 0xb348281a, 0x587f9319, 0xb7bdf827, 0x5561e35e,
- 0xbaa38860, 0x51943363, 0xbe56585d, 0x68f38238, 0x8731e906,
- 0x6c065205, 0x83c4393b, 0x61182242, 0x8eda497c, 0x65edf27f,
- 0x8a2f9941, 0x7b24c2cc, 0x94e6a9f2, 0x7fd112f1, 0x901379cf,
- 0x72cf62b6, 0x9d0d0988, 0x763ab28b, 0x99f8d9b5, 0x9eba07a0,
- 0x71786c9e, 0x9a4fd79d, 0x758dbca3, 0x9751a7da, 0x7893cce4,
- 0x93a477e7, 0x7c661cd9, 0x8d6d4754, 0x62af2c6a, 0x89989769,
- 0x665afc57, 0x8486e72e, 0x6b448c10, 0x80733713, 0x6fb15c2d,
- 0xb9148648, 0x56d6ed76, 0xbde15675, 0x52233d4b, 0xb0ff2632,
- 0x5f3d4d0c, 0xb40af60f, 0x5bc89d31, 0xaac3c6bc, 0x4501ad82,
- 0xae361681, 0x41f47dbf, 0xa32866c6, 0x4cea0df8, 0xa7ddb6fb,
- 0x481fddc5, 0xd1e70470, 0x3e256f4e, 0xd512d44d, 0x3ad0bf73,
- 0xd80ca40a, 0x37cecf34, 0xdcf97437, 0x333b1f09, 0xc2304484,
- 0x2df22fba, 0xc6c594b9, 0x2907ff87, 0xcbdbe4fe, 0x24198fc0,
- 0xcf2e34c3, 0x20ec5ffd, 0xf6498598, 0x198beea6, 0xf2bc55a5,
- 0x1d7e3e9b, 0xffa225e2, 0x10604edc, 0xfb57f5df, 0x14959ee1,
- 0xe59ec56c, 0x0a5cae52, 0xe16b1551, 0x0ea97e6f, 0xec756516,
- 0x03b70e28, 0xe880b52b, 0x0742de15, 0xe6050901, 0x09c7623f,
- 0xe2f0d93c, 0x0d32b202, 0xefeea97b, 0x002cc245, 0xeb1b7946,
- 0x04d91278, 0xf5d249f5, 0x1a1022cb, 0xf12799c8, 0x1ee5f2f6,
- 0xfc39e98f, 0x13fb82b1, 0xf8cc39b2, 0x170e528c, 0xc1ab88e9,
- 0x2e69e3d7, 0xc55e58d4, 0x2a9c33ea, 0xc8402893, 0x278243ad,
- 0xccb5f8ae, 0x23779390, 0xd27cc81d, 0x3dbea323, 0xd6891820,
- 0x394b731e, 0xdb976867, 0x34550359, 0xdf62b85a, 0x30a0d364,
- 0xa9580ad1, 0x469a61ef, 0xadaddaec, 0x426fb1d2, 0xa0b3aaab,
- 0x4f71c195, 0xa4467a96, 0x4b8411a8, 0xba8f4a25, 0x554d211b,
- 0xbe7a9a18, 0x51b8f126, 0xb364ea5f, 0x5ca68161, 0xb7913a62,
- 0x5853515c, 0x8ef68b39, 0x6134e007, 0x8a035b04, 0x65c1303a,
- 0x871d2b43, 0x68df407d, 0x83e8fb7e, 0x6c2a9040, 0x9d21cbcd,
- 0x72e3a0f3, 0x99d41bf0, 0x761670ce, 0x94ca6bb7, 0x7b080089,
- 0x903fbb8a, 0x7ffdd0b4, 0x78bf0ea1, 0x977d659f, 0x7c4ade9c,
- 0x9388b5a2, 0x7154aedb, 0x9e96c5e5, 0x75a17ee6, 0x9a6315d8,
- 0x6b684e55, 0x84aa256b, 0x6f9d9e68, 0x805ff556, 0x6283ee2f,
- 0x8d418511, 0x66763e12, 0x89b4552c, 0x5f118f49, 0xb0d3e477,
- 0x5be45f74, 0xb426344a, 0x56fa2f33, 0xb938440d, 0x520fff0e,
- 0xbdcd9430, 0x4cc6cfbd, 0xa304a483, 0x48331f80, 0xa7f174be,
- 0x452d6fc7, 0xaaef04f9, 0x41d8bffa, 0xae1ad4c4, 0x37e20d71,
- 0xd820664f, 0x3317dd4c, 0xdcd5b672, 0x3e09ad0b, 0xd1cbc635,
- 0x3afc7d36, 0xd53e1608, 0x24354d85, 0xcbf726bb, 0x20c09db8,
- 0xcf02f686, 0x2ddeedff, 0xc21c86c1, 0x292b3dc2, 0xc6e956fc,
- 0x104c8c99, 0xff8ee7a7, 0x14b95ca4, 0xfb7b379a, 0x19a72ce3,
- 0xf66547dd, 0x1d52fcde, 0xf29097e0, 0x039bcc6d, 0xec59a753,
- 0x076e1c50, 0xe8ac776e, 0x0a706c17, 0xe5b20729, 0x0e85bc2a,
- 0xe147d714},
- {0x00000000, 0x177b1443, 0x2ef62886, 0x398d3cc5, 0x5dec510c,
- 0x4a97454f, 0x731a798a, 0x64616dc9, 0xbbd8a218, 0xaca3b65b,
- 0x952e8a9e, 0x82559edd, 0xe634f314, 0xf14fe757, 0xc8c2db92,
- 0xdfb9cfd1, 0xacc04271, 0xbbbb5632, 0x82366af7, 0x954d7eb4,
- 0xf12c137d, 0xe657073e, 0xdfda3bfb, 0xc8a12fb8, 0x1718e069,
- 0x0063f42a, 0x39eec8ef, 0x2e95dcac, 0x4af4b165, 0x5d8fa526,
- 0x640299e3, 0x73798da0, 0x82f182a3, 0x958a96e0, 0xac07aa25,
- 0xbb7cbe66, 0xdf1dd3af, 0xc866c7ec, 0xf1ebfb29, 0xe690ef6a,
- 0x392920bb, 0x2e5234f8, 0x17df083d, 0x00a41c7e, 0x64c571b7,
- 0x73be65f4, 0x4a335931, 0x5d484d72, 0x2e31c0d2, 0x394ad491,
- 0x00c7e854, 0x17bcfc17, 0x73dd91de, 0x64a6859d, 0x5d2bb958,
- 0x4a50ad1b, 0x95e962ca, 0x82927689, 0xbb1f4a4c, 0xac645e0f,
- 0xc80533c6, 0xdf7e2785, 0xe6f31b40, 0xf1880f03, 0xde920307,
- 0xc9e91744, 0xf0642b81, 0xe71f3fc2, 0x837e520b, 0x94054648,
- 0xad887a8d, 0xbaf36ece, 0x654aa11f, 0x7231b55c, 0x4bbc8999,
- 0x5cc79dda, 0x38a6f013, 0x2fdde450, 0x1650d895, 0x012bccd6,
- 0x72524176, 0x65295535, 0x5ca469f0, 0x4bdf7db3, 0x2fbe107a,
- 0x38c50439, 0x014838fc, 0x16332cbf, 0xc98ae36e, 0xdef1f72d,
- 0xe77ccbe8, 0xf007dfab, 0x9466b262, 0x831da621, 0xba909ae4,
- 0xadeb8ea7, 0x5c6381a4, 0x4b1895e7, 0x7295a922, 0x65eebd61,
- 0x018fd0a8, 0x16f4c4eb, 0x2f79f82e, 0x3802ec6d, 0xe7bb23bc,
- 0xf0c037ff, 0xc94d0b3a, 0xde361f79, 0xba5772b0, 0xad2c66f3,
- 0x94a15a36, 0x83da4e75, 0xf0a3c3d5, 0xe7d8d796, 0xde55eb53,
- 0xc92eff10, 0xad4f92d9, 0xba34869a, 0x83b9ba5f, 0x94c2ae1c,
- 0x4b7b61cd, 0x5c00758e, 0x658d494b, 0x72f65d08, 0x169730c1,
- 0x01ec2482, 0x38611847, 0x2f1a0c04, 0x6655004f, 0x712e140c,
- 0x48a328c9, 0x5fd83c8a, 0x3bb95143, 0x2cc24500, 0x154f79c5,
- 0x02346d86, 0xdd8da257, 0xcaf6b614, 0xf37b8ad1, 0xe4009e92,
- 0x8061f35b, 0x971ae718, 0xae97dbdd, 0xb9eccf9e, 0xca95423e,
- 0xddee567d, 0xe4636ab8, 0xf3187efb, 0x97791332, 0x80020771,
- 0xb98f3bb4, 0xaef42ff7, 0x714de026, 0x6636f465, 0x5fbbc8a0,
- 0x48c0dce3, 0x2ca1b12a, 0x3bdaa569, 0x025799ac, 0x152c8def,
- 0xe4a482ec, 0xf3df96af, 0xca52aa6a, 0xdd29be29, 0xb948d3e0,
- 0xae33c7a3, 0x97befb66, 0x80c5ef25, 0x5f7c20f4, 0x480734b7,
- 0x718a0872, 0x66f11c31, 0x029071f8, 0x15eb65bb, 0x2c66597e,
- 0x3b1d4d3d, 0x4864c09d, 0x5f1fd4de, 0x6692e81b, 0x71e9fc58,
- 0x15889191, 0x02f385d2, 0x3b7eb917, 0x2c05ad54, 0xf3bc6285,
- 0xe4c776c6, 0xdd4a4a03, 0xca315e40, 0xae503389, 0xb92b27ca,
- 0x80a61b0f, 0x97dd0f4c, 0xb8c70348, 0xafbc170b, 0x96312bce,
- 0x814a3f8d, 0xe52b5244, 0xf2504607, 0xcbdd7ac2, 0xdca66e81,
- 0x031fa150, 0x1464b513, 0x2de989d6, 0x3a929d95, 0x5ef3f05c,
- 0x4988e41f, 0x7005d8da, 0x677ecc99, 0x14074139, 0x037c557a,
- 0x3af169bf, 0x2d8a7dfc, 0x49eb1035, 0x5e900476, 0x671d38b3,
- 0x70662cf0, 0xafdfe321, 0xb8a4f762, 0x8129cba7, 0x9652dfe4,
- 0xf233b22d, 0xe548a66e, 0xdcc59aab, 0xcbbe8ee8, 0x3a3681eb,
- 0x2d4d95a8, 0x14c0a96d, 0x03bbbd2e, 0x67dad0e7, 0x70a1c4a4,
- 0x492cf861, 0x5e57ec22, 0x81ee23f3, 0x969537b0, 0xaf180b75,
- 0xb8631f36, 0xdc0272ff, 0xcb7966bc, 0xf2f45a79, 0xe58f4e3a,
- 0x96f6c39a, 0x818dd7d9, 0xb800eb1c, 0xaf7bff5f, 0xcb1a9296,
- 0xdc6186d5, 0xe5ecba10, 0xf297ae53, 0x2d2e6182, 0x3a5575c1,
- 0x03d84904, 0x14a35d47, 0x70c2308e, 0x67b924cd, 0x5e341808,
- 0x494f0c4b}};
-
-local const z_word_t FAR crc_braid_big_table[][256] = {
- {0x0000000000000000, 0x43147b1700000000, 0x8628f62e00000000,
- 0xc53c8d3900000000, 0x0c51ec5d00000000, 0x4f45974a00000000,
- 0x8a791a7300000000, 0xc96d616400000000, 0x18a2d8bb00000000,
- 0x5bb6a3ac00000000, 0x9e8a2e9500000000, 0xdd9e558200000000,
- 0x14f334e600000000, 0x57e74ff100000000, 0x92dbc2c800000000,
- 0xd1cfb9df00000000, 0x7142c0ac00000000, 0x3256bbbb00000000,
- 0xf76a368200000000, 0xb47e4d9500000000, 0x7d132cf100000000,
- 0x3e0757e600000000, 0xfb3bdadf00000000, 0xb82fa1c800000000,
- 0x69e0181700000000, 0x2af4630000000000, 0xefc8ee3900000000,
- 0xacdc952e00000000, 0x65b1f44a00000000, 0x26a58f5d00000000,
- 0xe399026400000000, 0xa08d797300000000, 0xa382f18200000000,
- 0xe0968a9500000000, 0x25aa07ac00000000, 0x66be7cbb00000000,
- 0xafd31ddf00000000, 0xecc766c800000000, 0x29fbebf100000000,
- 0x6aef90e600000000, 0xbb20293900000000, 0xf834522e00000000,
- 0x3d08df1700000000, 0x7e1ca40000000000, 0xb771c56400000000,
- 0xf465be7300000000, 0x3159334a00000000, 0x724d485d00000000,
- 0xd2c0312e00000000, 0x91d44a3900000000, 0x54e8c70000000000,
- 0x17fcbc1700000000, 0xde91dd7300000000, 0x9d85a66400000000,
- 0x58b92b5d00000000, 0x1bad504a00000000, 0xca62e99500000000,
- 0x8976928200000000, 0x4c4a1fbb00000000, 0x0f5e64ac00000000,
- 0xc63305c800000000, 0x85277edf00000000, 0x401bf3e600000000,
- 0x030f88f100000000, 0x070392de00000000, 0x4417e9c900000000,
- 0x812b64f000000000, 0xc23f1fe700000000, 0x0b527e8300000000,
- 0x4846059400000000, 0x8d7a88ad00000000, 0xce6ef3ba00000000,
- 0x1fa14a6500000000, 0x5cb5317200000000, 0x9989bc4b00000000,
- 0xda9dc75c00000000, 0x13f0a63800000000, 0x50e4dd2f00000000,
- 0x95d8501600000000, 0xd6cc2b0100000000, 0x7641527200000000,
- 0x3555296500000000, 0xf069a45c00000000, 0xb37ddf4b00000000,
- 0x7a10be2f00000000, 0x3904c53800000000, 0xfc38480100000000,
- 0xbf2c331600000000, 0x6ee38ac900000000, 0x2df7f1de00000000,
- 0xe8cb7ce700000000, 0xabdf07f000000000, 0x62b2669400000000,
- 0x21a61d8300000000, 0xe49a90ba00000000, 0xa78eebad00000000,
- 0xa481635c00000000, 0xe795184b00000000, 0x22a9957200000000,
- 0x61bdee6500000000, 0xa8d08f0100000000, 0xebc4f41600000000,
- 0x2ef8792f00000000, 0x6dec023800000000, 0xbc23bbe700000000,
- 0xff37c0f000000000, 0x3a0b4dc900000000, 0x791f36de00000000,
- 0xb07257ba00000000, 0xf3662cad00000000, 0x365aa19400000000,
- 0x754eda8300000000, 0xd5c3a3f000000000, 0x96d7d8e700000000,
- 0x53eb55de00000000, 0x10ff2ec900000000, 0xd9924fad00000000,
- 0x9a8634ba00000000, 0x5fbab98300000000, 0x1caec29400000000,
- 0xcd617b4b00000000, 0x8e75005c00000000, 0x4b498d6500000000,
- 0x085df67200000000, 0xc130971600000000, 0x8224ec0100000000,
- 0x4718613800000000, 0x040c1a2f00000000, 0x4f00556600000000,
- 0x0c142e7100000000, 0xc928a34800000000, 0x8a3cd85f00000000,
- 0x4351b93b00000000, 0x0045c22c00000000, 0xc5794f1500000000,
- 0x866d340200000000, 0x57a28ddd00000000, 0x14b6f6ca00000000,
- 0xd18a7bf300000000, 0x929e00e400000000, 0x5bf3618000000000,
- 0x18e71a9700000000, 0xdddb97ae00000000, 0x9ecfecb900000000,
- 0x3e4295ca00000000, 0x7d56eedd00000000, 0xb86a63e400000000,
- 0xfb7e18f300000000, 0x3213799700000000, 0x7107028000000000,
- 0xb43b8fb900000000, 0xf72ff4ae00000000, 0x26e04d7100000000,
- 0x65f4366600000000, 0xa0c8bb5f00000000, 0xe3dcc04800000000,
- 0x2ab1a12c00000000, 0x69a5da3b00000000, 0xac99570200000000,
- 0xef8d2c1500000000, 0xec82a4e400000000, 0xaf96dff300000000,
- 0x6aaa52ca00000000, 0x29be29dd00000000, 0xe0d348b900000000,
- 0xa3c733ae00000000, 0x66fbbe9700000000, 0x25efc58000000000,
- 0xf4207c5f00000000, 0xb734074800000000, 0x72088a7100000000,
- 0x311cf16600000000, 0xf871900200000000, 0xbb65eb1500000000,
- 0x7e59662c00000000, 0x3d4d1d3b00000000, 0x9dc0644800000000,
- 0xded41f5f00000000, 0x1be8926600000000, 0x58fce97100000000,
- 0x9191881500000000, 0xd285f30200000000, 0x17b97e3b00000000,
- 0x54ad052c00000000, 0x8562bcf300000000, 0xc676c7e400000000,
- 0x034a4add00000000, 0x405e31ca00000000, 0x893350ae00000000,
- 0xca272bb900000000, 0x0f1ba68000000000, 0x4c0fdd9700000000,
- 0x4803c7b800000000, 0x0b17bcaf00000000, 0xce2b319600000000,
- 0x8d3f4a8100000000, 0x44522be500000000, 0x074650f200000000,
- 0xc27addcb00000000, 0x816ea6dc00000000, 0x50a11f0300000000,
- 0x13b5641400000000, 0xd689e92d00000000, 0x959d923a00000000,
- 0x5cf0f35e00000000, 0x1fe4884900000000, 0xdad8057000000000,
- 0x99cc7e6700000000, 0x3941071400000000, 0x7a557c0300000000,
- 0xbf69f13a00000000, 0xfc7d8a2d00000000, 0x3510eb4900000000,
- 0x7604905e00000000, 0xb3381d6700000000, 0xf02c667000000000,
- 0x21e3dfaf00000000, 0x62f7a4b800000000, 0xa7cb298100000000,
- 0xe4df529600000000, 0x2db233f200000000, 0x6ea648e500000000,
- 0xab9ac5dc00000000, 0xe88ebecb00000000, 0xeb81363a00000000,
- 0xa8954d2d00000000, 0x6da9c01400000000, 0x2ebdbb0300000000,
- 0xe7d0da6700000000, 0xa4c4a17000000000, 0x61f82c4900000000,
- 0x22ec575e00000000, 0xf323ee8100000000, 0xb037959600000000,
- 0x750b18af00000000, 0x361f63b800000000, 0xff7202dc00000000,
- 0xbc6679cb00000000, 0x795af4f200000000, 0x3a4e8fe500000000,
- 0x9ac3f69600000000, 0xd9d78d8100000000, 0x1ceb00b800000000,
- 0x5fff7baf00000000, 0x96921acb00000000, 0xd58661dc00000000,
- 0x10baece500000000, 0x53ae97f200000000, 0x82612e2d00000000,
- 0xc175553a00000000, 0x0449d80300000000, 0x475da31400000000,
- 0x8e30c27000000000, 0xcd24b96700000000, 0x0818345e00000000,
- 0x4b0c4f4900000000},
- {0x0000000000000000, 0x3e6bc2ef00000000, 0x3dd0f50400000000,
- 0x03bb37eb00000000, 0x7aa0eb0900000000, 0x44cb29e600000000,
- 0x47701e0d00000000, 0x791bdce200000000, 0xf440d71300000000,
- 0xca2b15fc00000000, 0xc990221700000000, 0xf7fbe0f800000000,
- 0x8ee03c1a00000000, 0xb08bfef500000000, 0xb330c91e00000000,
- 0x8d5b0bf100000000, 0xe881ae2700000000, 0xd6ea6cc800000000,
- 0xd5515b2300000000, 0xeb3a99cc00000000, 0x9221452e00000000,
- 0xac4a87c100000000, 0xaff1b02a00000000, 0x919a72c500000000,
- 0x1cc1793400000000, 0x22aabbdb00000000, 0x21118c3000000000,
- 0x1f7a4edf00000000, 0x6661923d00000000, 0x580a50d200000000,
- 0x5bb1673900000000, 0x65daa5d600000000, 0xd0035d4f00000000,
- 0xee689fa000000000, 0xedd3a84b00000000, 0xd3b86aa400000000,
- 0xaaa3b64600000000, 0x94c874a900000000, 0x9773434200000000,
- 0xa91881ad00000000, 0x24438a5c00000000, 0x1a2848b300000000,
- 0x19937f5800000000, 0x27f8bdb700000000, 0x5ee3615500000000,
- 0x6088a3ba00000000, 0x6333945100000000, 0x5d5856be00000000,
- 0x3882f36800000000, 0x06e9318700000000, 0x0552066c00000000,
- 0x3b39c48300000000, 0x4222186100000000, 0x7c49da8e00000000,
- 0x7ff2ed6500000000, 0x41992f8a00000000, 0xccc2247b00000000,
- 0xf2a9e69400000000, 0xf112d17f00000000, 0xcf79139000000000,
- 0xb662cf7200000000, 0x88090d9d00000000, 0x8bb23a7600000000,
- 0xb5d9f89900000000, 0xa007ba9e00000000, 0x9e6c787100000000,
- 0x9dd74f9a00000000, 0xa3bc8d7500000000, 0xdaa7519700000000,
- 0xe4cc937800000000, 0xe777a49300000000, 0xd91c667c00000000,
- 0x54476d8d00000000, 0x6a2caf6200000000, 0x6997988900000000,
- 0x57fc5a6600000000, 0x2ee7868400000000, 0x108c446b00000000,
- 0x1337738000000000, 0x2d5cb16f00000000, 0x488614b900000000,
- 0x76edd65600000000, 0x7556e1bd00000000, 0x4b3d235200000000,
- 0x3226ffb000000000, 0x0c4d3d5f00000000, 0x0ff60ab400000000,
- 0x319dc85b00000000, 0xbcc6c3aa00000000, 0x82ad014500000000,
- 0x811636ae00000000, 0xbf7df44100000000, 0xc66628a300000000,
- 0xf80dea4c00000000, 0xfbb6dda700000000, 0xc5dd1f4800000000,
- 0x7004e7d100000000, 0x4e6f253e00000000, 0x4dd412d500000000,
- 0x73bfd03a00000000, 0x0aa40cd800000000, 0x34cfce3700000000,
- 0x3774f9dc00000000, 0x091f3b3300000000, 0x844430c200000000,
- 0xba2ff22d00000000, 0xb994c5c600000000, 0x87ff072900000000,
- 0xfee4dbcb00000000, 0xc08f192400000000, 0xc3342ecf00000000,
- 0xfd5fec2000000000, 0x988549f600000000, 0xa6ee8b1900000000,
- 0xa555bcf200000000, 0x9b3e7e1d00000000, 0xe225a2ff00000000,
- 0xdc4e601000000000, 0xdff557fb00000000, 0xe19e951400000000,
- 0x6cc59ee500000000, 0x52ae5c0a00000000, 0x51156be100000000,
- 0x6f7ea90e00000000, 0x166575ec00000000, 0x280eb70300000000,
- 0x2bb580e800000000, 0x15de420700000000, 0x010905e600000000,
- 0x3f62c70900000000, 0x3cd9f0e200000000, 0x02b2320d00000000,
- 0x7ba9eeef00000000, 0x45c22c0000000000, 0x46791beb00000000,
- 0x7812d90400000000, 0xf549d2f500000000, 0xcb22101a00000000,
- 0xc89927f100000000, 0xf6f2e51e00000000, 0x8fe939fc00000000,
- 0xb182fb1300000000, 0xb239ccf800000000, 0x8c520e1700000000,
- 0xe988abc100000000, 0xd7e3692e00000000, 0xd4585ec500000000,
- 0xea339c2a00000000, 0x932840c800000000, 0xad43822700000000,
- 0xaef8b5cc00000000, 0x9093772300000000, 0x1dc87cd200000000,
- 0x23a3be3d00000000, 0x201889d600000000, 0x1e734b3900000000,
- 0x676897db00000000, 0x5903553400000000, 0x5ab862df00000000,
- 0x64d3a03000000000, 0xd10a58a900000000, 0xef619a4600000000,
- 0xecdaadad00000000, 0xd2b16f4200000000, 0xabaab3a000000000,
- 0x95c1714f00000000, 0x967a46a400000000, 0xa811844b00000000,
- 0x254a8fba00000000, 0x1b214d5500000000, 0x189a7abe00000000,
- 0x26f1b85100000000, 0x5fea64b300000000, 0x6181a65c00000000,
- 0x623a91b700000000, 0x5c51535800000000, 0x398bf68e00000000,
- 0x07e0346100000000, 0x045b038a00000000, 0x3a30c16500000000,
- 0x432b1d8700000000, 0x7d40df6800000000, 0x7efbe88300000000,
- 0x40902a6c00000000, 0xcdcb219d00000000, 0xf3a0e37200000000,
- 0xf01bd49900000000, 0xce70167600000000, 0xb76bca9400000000,
- 0x8900087b00000000, 0x8abb3f9000000000, 0xb4d0fd7f00000000,
- 0xa10ebf7800000000, 0x9f657d9700000000, 0x9cde4a7c00000000,
- 0xa2b5889300000000, 0xdbae547100000000, 0xe5c5969e00000000,
- 0xe67ea17500000000, 0xd815639a00000000, 0x554e686b00000000,
- 0x6b25aa8400000000, 0x689e9d6f00000000, 0x56f55f8000000000,
- 0x2fee836200000000, 0x1185418d00000000, 0x123e766600000000,
- 0x2c55b48900000000, 0x498f115f00000000, 0x77e4d3b000000000,
- 0x745fe45b00000000, 0x4a3426b400000000, 0x332ffa5600000000,
- 0x0d4438b900000000, 0x0eff0f5200000000, 0x3094cdbd00000000,
- 0xbdcfc64c00000000, 0x83a404a300000000, 0x801f334800000000,
- 0xbe74f1a700000000, 0xc76f2d4500000000, 0xf904efaa00000000,
- 0xfabfd84100000000, 0xc4d41aae00000000, 0x710de23700000000,
- 0x4f6620d800000000, 0x4cdd173300000000, 0x72b6d5dc00000000,
- 0x0bad093e00000000, 0x35c6cbd100000000, 0x367dfc3a00000000,
- 0x08163ed500000000, 0x854d352400000000, 0xbb26f7cb00000000,
- 0xb89dc02000000000, 0x86f602cf00000000, 0xffedde2d00000000,
- 0xc1861cc200000000, 0xc23d2b2900000000, 0xfc56e9c600000000,
- 0x998c4c1000000000, 0xa7e78eff00000000, 0xa45cb91400000000,
- 0x9a377bfb00000000, 0xe32ca71900000000, 0xdd4765f600000000,
- 0xdefc521d00000000, 0xe09790f200000000, 0x6dcc9b0300000000,
- 0x53a759ec00000000, 0x501c6e0700000000, 0x6e77ace800000000,
- 0x176c700a00000000, 0x2907b2e500000000, 0x2abc850e00000000,
- 0x14d747e100000000},
- {0x0000000000000000, 0xc0df8ec100000000, 0xc1b96c5800000000,
- 0x0166e29900000000, 0x8273d9b000000000, 0x42ac577100000000,
- 0x43cab5e800000000, 0x83153b2900000000, 0x45e1c3ba00000000,
- 0x853e4d7b00000000, 0x8458afe200000000, 0x4487212300000000,
- 0xc7921a0a00000000, 0x074d94cb00000000, 0x062b765200000000,
- 0xc6f4f89300000000, 0xcbc4f6ae00000000, 0x0b1b786f00000000,
- 0x0a7d9af600000000, 0xcaa2143700000000, 0x49b72f1e00000000,
- 0x8968a1df00000000, 0x880e434600000000, 0x48d1cd8700000000,
- 0x8e25351400000000, 0x4efabbd500000000, 0x4f9c594c00000000,
- 0x8f43d78d00000000, 0x0c56eca400000000, 0xcc89626500000000,
- 0xcdef80fc00000000, 0x0d300e3d00000000, 0xd78f9c8600000000,
- 0x1750124700000000, 0x1636f0de00000000, 0xd6e97e1f00000000,
- 0x55fc453600000000, 0x9523cbf700000000, 0x9445296e00000000,
- 0x549aa7af00000000, 0x926e5f3c00000000, 0x52b1d1fd00000000,
- 0x53d7336400000000, 0x9308bda500000000, 0x101d868c00000000,
- 0xd0c2084d00000000, 0xd1a4ead400000000, 0x117b641500000000,
- 0x1c4b6a2800000000, 0xdc94e4e900000000, 0xddf2067000000000,
- 0x1d2d88b100000000, 0x9e38b39800000000, 0x5ee73d5900000000,
- 0x5f81dfc000000000, 0x9f5e510100000000, 0x59aaa99200000000,
- 0x9975275300000000, 0x9813c5ca00000000, 0x58cc4b0b00000000,
- 0xdbd9702200000000, 0x1b06fee300000000, 0x1a601c7a00000000,
- 0xdabf92bb00000000, 0xef1948d600000000, 0x2fc6c61700000000,
- 0x2ea0248e00000000, 0xee7faa4f00000000, 0x6d6a916600000000,
- 0xadb51fa700000000, 0xacd3fd3e00000000, 0x6c0c73ff00000000,
- 0xaaf88b6c00000000, 0x6a2705ad00000000, 0x6b41e73400000000,
- 0xab9e69f500000000, 0x288b52dc00000000, 0xe854dc1d00000000,
- 0xe9323e8400000000, 0x29edb04500000000, 0x24ddbe7800000000,
- 0xe40230b900000000, 0xe564d22000000000, 0x25bb5ce100000000,
- 0xa6ae67c800000000, 0x6671e90900000000, 0x67170b9000000000,
- 0xa7c8855100000000, 0x613c7dc200000000, 0xa1e3f30300000000,
- 0xa085119a00000000, 0x605a9f5b00000000, 0xe34fa47200000000,
- 0x23902ab300000000, 0x22f6c82a00000000, 0xe22946eb00000000,
- 0x3896d45000000000, 0xf8495a9100000000, 0xf92fb80800000000,
- 0x39f036c900000000, 0xbae50de000000000, 0x7a3a832100000000,
- 0x7b5c61b800000000, 0xbb83ef7900000000, 0x7d7717ea00000000,
- 0xbda8992b00000000, 0xbcce7bb200000000, 0x7c11f57300000000,
- 0xff04ce5a00000000, 0x3fdb409b00000000, 0x3ebda20200000000,
- 0xfe622cc300000000, 0xf35222fe00000000, 0x338dac3f00000000,
- 0x32eb4ea600000000, 0xf234c06700000000, 0x7121fb4e00000000,
- 0xb1fe758f00000000, 0xb098971600000000, 0x704719d700000000,
- 0xb6b3e14400000000, 0x766c6f8500000000, 0x770a8d1c00000000,
- 0xb7d503dd00000000, 0x34c038f400000000, 0xf41fb63500000000,
- 0xf57954ac00000000, 0x35a6da6d00000000, 0x9f35e17700000000,
- 0x5fea6fb600000000, 0x5e8c8d2f00000000, 0x9e5303ee00000000,
- 0x1d4638c700000000, 0xdd99b60600000000, 0xdcff549f00000000,
- 0x1c20da5e00000000, 0xdad422cd00000000, 0x1a0bac0c00000000,
- 0x1b6d4e9500000000, 0xdbb2c05400000000, 0x58a7fb7d00000000,
- 0x987875bc00000000, 0x991e972500000000, 0x59c119e400000000,
- 0x54f117d900000000, 0x942e991800000000, 0x95487b8100000000,
- 0x5597f54000000000, 0xd682ce6900000000, 0x165d40a800000000,
- 0x173ba23100000000, 0xd7e42cf000000000, 0x1110d46300000000,
- 0xd1cf5aa200000000, 0xd0a9b83b00000000, 0x107636fa00000000,
- 0x93630dd300000000, 0x53bc831200000000, 0x52da618b00000000,
- 0x9205ef4a00000000, 0x48ba7df100000000, 0x8865f33000000000,
- 0x890311a900000000, 0x49dc9f6800000000, 0xcac9a44100000000,
- 0x0a162a8000000000, 0x0b70c81900000000, 0xcbaf46d800000000,
- 0x0d5bbe4b00000000, 0xcd84308a00000000, 0xcce2d21300000000,
- 0x0c3d5cd200000000, 0x8f2867fb00000000, 0x4ff7e93a00000000,
- 0x4e910ba300000000, 0x8e4e856200000000, 0x837e8b5f00000000,
- 0x43a1059e00000000, 0x42c7e70700000000, 0x821869c600000000,
- 0x010d52ef00000000, 0xc1d2dc2e00000000, 0xc0b43eb700000000,
- 0x006bb07600000000, 0xc69f48e500000000, 0x0640c62400000000,
- 0x072624bd00000000, 0xc7f9aa7c00000000, 0x44ec915500000000,
- 0x84331f9400000000, 0x8555fd0d00000000, 0x458a73cc00000000,
- 0x702ca9a100000000, 0xb0f3276000000000, 0xb195c5f900000000,
- 0x714a4b3800000000, 0xf25f701100000000, 0x3280fed000000000,
- 0x33e61c4900000000, 0xf339928800000000, 0x35cd6a1b00000000,
- 0xf512e4da00000000, 0xf474064300000000, 0x34ab888200000000,
- 0xb7beb3ab00000000, 0x77613d6a00000000, 0x7607dff300000000,
- 0xb6d8513200000000, 0xbbe85f0f00000000, 0x7b37d1ce00000000,
- 0x7a51335700000000, 0xba8ebd9600000000, 0x399b86bf00000000,
- 0xf944087e00000000, 0xf822eae700000000, 0x38fd642600000000,
- 0xfe099cb500000000, 0x3ed6127400000000, 0x3fb0f0ed00000000,
- 0xff6f7e2c00000000, 0x7c7a450500000000, 0xbca5cbc400000000,
- 0xbdc3295d00000000, 0x7d1ca79c00000000, 0xa7a3352700000000,
- 0x677cbbe600000000, 0x661a597f00000000, 0xa6c5d7be00000000,
- 0x25d0ec9700000000, 0xe50f625600000000, 0xe46980cf00000000,
- 0x24b60e0e00000000, 0xe242f69d00000000, 0x229d785c00000000,
- 0x23fb9ac500000000, 0xe324140400000000, 0x60312f2d00000000,
- 0xa0eea1ec00000000, 0xa188437500000000, 0x6157cdb400000000,
- 0x6c67c38900000000, 0xacb84d4800000000, 0xaddeafd100000000,
- 0x6d01211000000000, 0xee141a3900000000, 0x2ecb94f800000000,
- 0x2fad766100000000, 0xef72f8a000000000, 0x2986003300000000,
- 0xe9598ef200000000, 0xe83f6c6b00000000, 0x28e0e2aa00000000,
- 0xabf5d98300000000, 0x6b2a574200000000, 0x6a4cb5db00000000,
- 0xaa933b1a00000000},
- {0x0000000000000000, 0x6f4ca59b00000000, 0x9f9e3bec00000000,
- 0xf0d29e7700000000, 0x7f3b060300000000, 0x1077a39800000000,
- 0xe0a53def00000000, 0x8fe9987400000000, 0xfe760c0600000000,
- 0x913aa99d00000000, 0x61e837ea00000000, 0x0ea4927100000000,
- 0x814d0a0500000000, 0xee01af9e00000000, 0x1ed331e900000000,
- 0x719f947200000000, 0xfced180c00000000, 0x93a1bd9700000000,
- 0x637323e000000000, 0x0c3f867b00000000, 0x83d61e0f00000000,
- 0xec9abb9400000000, 0x1c4825e300000000, 0x7304807800000000,
- 0x029b140a00000000, 0x6dd7b19100000000, 0x9d052fe600000000,
- 0xf2498a7d00000000, 0x7da0120900000000, 0x12ecb79200000000,
- 0xe23e29e500000000, 0x8d728c7e00000000, 0xf8db311800000000,
- 0x9797948300000000, 0x67450af400000000, 0x0809af6f00000000,
- 0x87e0371b00000000, 0xe8ac928000000000, 0x187e0cf700000000,
- 0x7732a96c00000000, 0x06ad3d1e00000000, 0x69e1988500000000,
- 0x993306f200000000, 0xf67fa36900000000, 0x79963b1d00000000,
- 0x16da9e8600000000, 0xe60800f100000000, 0x8944a56a00000000,
- 0x0436291400000000, 0x6b7a8c8f00000000, 0x9ba812f800000000,
- 0xf4e4b76300000000, 0x7b0d2f1700000000, 0x14418a8c00000000,
- 0xe49314fb00000000, 0x8bdfb16000000000, 0xfa40251200000000,
- 0x950c808900000000, 0x65de1efe00000000, 0x0a92bb6500000000,
- 0x857b231100000000, 0xea37868a00000000, 0x1ae518fd00000000,
- 0x75a9bd6600000000, 0xf0b7633000000000, 0x9ffbc6ab00000000,
- 0x6f2958dc00000000, 0x0065fd4700000000, 0x8f8c653300000000,
- 0xe0c0c0a800000000, 0x10125edf00000000, 0x7f5efb4400000000,
- 0x0ec16f3600000000, 0x618dcaad00000000, 0x915f54da00000000,
- 0xfe13f14100000000, 0x71fa693500000000, 0x1eb6ccae00000000,
- 0xee6452d900000000, 0x8128f74200000000, 0x0c5a7b3c00000000,
- 0x6316dea700000000, 0x93c440d000000000, 0xfc88e54b00000000,
- 0x73617d3f00000000, 0x1c2dd8a400000000, 0xecff46d300000000,
- 0x83b3e34800000000, 0xf22c773a00000000, 0x9d60d2a100000000,
- 0x6db24cd600000000, 0x02fee94d00000000, 0x8d17713900000000,
- 0xe25bd4a200000000, 0x12894ad500000000, 0x7dc5ef4e00000000,
- 0x086c522800000000, 0x6720f7b300000000, 0x97f269c400000000,
- 0xf8becc5f00000000, 0x7757542b00000000, 0x181bf1b000000000,
- 0xe8c96fc700000000, 0x8785ca5c00000000, 0xf61a5e2e00000000,
- 0x9956fbb500000000, 0x698465c200000000, 0x06c8c05900000000,
- 0x8921582d00000000, 0xe66dfdb600000000, 0x16bf63c100000000,
- 0x79f3c65a00000000, 0xf4814a2400000000, 0x9bcdefbf00000000,
- 0x6b1f71c800000000, 0x0453d45300000000, 0x8bba4c2700000000,
- 0xe4f6e9bc00000000, 0x142477cb00000000, 0x7b68d25000000000,
- 0x0af7462200000000, 0x65bbe3b900000000, 0x95697dce00000000,
- 0xfa25d85500000000, 0x75cc402100000000, 0x1a80e5ba00000000,
- 0xea527bcd00000000, 0x851ede5600000000, 0xe06fc76000000000,
- 0x8f2362fb00000000, 0x7ff1fc8c00000000, 0x10bd591700000000,
- 0x9f54c16300000000, 0xf01864f800000000, 0x00cafa8f00000000,
- 0x6f865f1400000000, 0x1e19cb6600000000, 0x71556efd00000000,
- 0x8187f08a00000000, 0xeecb551100000000, 0x6122cd6500000000,
- 0x0e6e68fe00000000, 0xfebcf68900000000, 0x91f0531200000000,
- 0x1c82df6c00000000, 0x73ce7af700000000, 0x831ce48000000000,
- 0xec50411b00000000, 0x63b9d96f00000000, 0x0cf57cf400000000,
- 0xfc27e28300000000, 0x936b471800000000, 0xe2f4d36a00000000,
- 0x8db876f100000000, 0x7d6ae88600000000, 0x12264d1d00000000,
- 0x9dcfd56900000000, 0xf28370f200000000, 0x0251ee8500000000,
- 0x6d1d4b1e00000000, 0x18b4f67800000000, 0x77f853e300000000,
- 0x872acd9400000000, 0xe866680f00000000, 0x678ff07b00000000,
- 0x08c355e000000000, 0xf811cb9700000000, 0x975d6e0c00000000,
- 0xe6c2fa7e00000000, 0x898e5fe500000000, 0x795cc19200000000,
- 0x1610640900000000, 0x99f9fc7d00000000, 0xf6b559e600000000,
- 0x0667c79100000000, 0x692b620a00000000, 0xe459ee7400000000,
- 0x8b154bef00000000, 0x7bc7d59800000000, 0x148b700300000000,
- 0x9b62e87700000000, 0xf42e4dec00000000, 0x04fcd39b00000000,
- 0x6bb0760000000000, 0x1a2fe27200000000, 0x756347e900000000,
- 0x85b1d99e00000000, 0xeafd7c0500000000, 0x6514e47100000000,
- 0x0a5841ea00000000, 0xfa8adf9d00000000, 0x95c67a0600000000,
- 0x10d8a45000000000, 0x7f9401cb00000000, 0x8f469fbc00000000,
- 0xe00a3a2700000000, 0x6fe3a25300000000, 0x00af07c800000000,
- 0xf07d99bf00000000, 0x9f313c2400000000, 0xeeaea85600000000,
- 0x81e20dcd00000000, 0x713093ba00000000, 0x1e7c362100000000,
- 0x9195ae5500000000, 0xfed90bce00000000, 0x0e0b95b900000000,
- 0x6147302200000000, 0xec35bc5c00000000, 0x837919c700000000,
- 0x73ab87b000000000, 0x1ce7222b00000000, 0x930eba5f00000000,
- 0xfc421fc400000000, 0x0c9081b300000000, 0x63dc242800000000,
- 0x1243b05a00000000, 0x7d0f15c100000000, 0x8ddd8bb600000000,
- 0xe2912e2d00000000, 0x6d78b65900000000, 0x023413c200000000,
- 0xf2e68db500000000, 0x9daa282e00000000, 0xe803954800000000,
- 0x874f30d300000000, 0x779daea400000000, 0x18d10b3f00000000,
- 0x9738934b00000000, 0xf87436d000000000, 0x08a6a8a700000000,
- 0x67ea0d3c00000000, 0x1675994e00000000, 0x79393cd500000000,
- 0x89eba2a200000000, 0xe6a7073900000000, 0x694e9f4d00000000,
- 0x06023ad600000000, 0xf6d0a4a100000000, 0x999c013a00000000,
- 0x14ee8d4400000000, 0x7ba228df00000000, 0x8b70b6a800000000,
- 0xe43c133300000000, 0x6bd58b4700000000, 0x04992edc00000000,
- 0xf44bb0ab00000000, 0x9b07153000000000, 0xea98814200000000,
- 0x85d424d900000000, 0x7506baae00000000, 0x1a4a1f3500000000,
- 0x95a3874100000000, 0xfaef22da00000000, 0x0a3dbcad00000000,
- 0x6571193600000000},
- {0x0000000000000000, 0x85d996dd00000000, 0x4bb55c6000000000,
- 0xce6ccabd00000000, 0x966ab9c000000000, 0x13b32f1d00000000,
- 0xdddfe5a000000000, 0x5806737d00000000, 0x6dd3035a00000000,
- 0xe80a958700000000, 0x26665f3a00000000, 0xa3bfc9e700000000,
- 0xfbb9ba9a00000000, 0x7e602c4700000000, 0xb00ce6fa00000000,
- 0x35d5702700000000, 0xdaa607b400000000, 0x5f7f916900000000,
- 0x91135bd400000000, 0x14cacd0900000000, 0x4cccbe7400000000,
- 0xc91528a900000000, 0x0779e21400000000, 0x82a074c900000000,
- 0xb77504ee00000000, 0x32ac923300000000, 0xfcc0588e00000000,
- 0x7919ce5300000000, 0x211fbd2e00000000, 0xa4c62bf300000000,
- 0x6aaae14e00000000, 0xef73779300000000, 0xf54b7eb300000000,
- 0x7092e86e00000000, 0xbefe22d300000000, 0x3b27b40e00000000,
- 0x6321c77300000000, 0xe6f851ae00000000, 0x28949b1300000000,
- 0xad4d0dce00000000, 0x98987de900000000, 0x1d41eb3400000000,
- 0xd32d218900000000, 0x56f4b75400000000, 0x0ef2c42900000000,
- 0x8b2b52f400000000, 0x4547984900000000, 0xc09e0e9400000000,
- 0x2fed790700000000, 0xaa34efda00000000, 0x6458256700000000,
- 0xe181b3ba00000000, 0xb987c0c700000000, 0x3c5e561a00000000,
- 0xf2329ca700000000, 0x77eb0a7a00000000, 0x423e7a5d00000000,
- 0xc7e7ec8000000000, 0x098b263d00000000, 0x8c52b0e000000000,
- 0xd454c39d00000000, 0x518d554000000000, 0x9fe19ffd00000000,
- 0x1a38092000000000, 0xab918dbd00000000, 0x2e481b6000000000,
- 0xe024d1dd00000000, 0x65fd470000000000, 0x3dfb347d00000000,
- 0xb822a2a000000000, 0x764e681d00000000, 0xf397fec000000000,
- 0xc6428ee700000000, 0x439b183a00000000, 0x8df7d28700000000,
- 0x082e445a00000000, 0x5028372700000000, 0xd5f1a1fa00000000,
- 0x1b9d6b4700000000, 0x9e44fd9a00000000, 0x71378a0900000000,
- 0xf4ee1cd400000000, 0x3a82d66900000000, 0xbf5b40b400000000,
- 0xe75d33c900000000, 0x6284a51400000000, 0xace86fa900000000,
- 0x2931f97400000000, 0x1ce4895300000000, 0x993d1f8e00000000,
- 0x5751d53300000000, 0xd28843ee00000000, 0x8a8e309300000000,
- 0x0f57a64e00000000, 0xc13b6cf300000000, 0x44e2fa2e00000000,
- 0x5edaf30e00000000, 0xdb0365d300000000, 0x156faf6e00000000,
- 0x90b639b300000000, 0xc8b04ace00000000, 0x4d69dc1300000000,
- 0x830516ae00000000, 0x06dc807300000000, 0x3309f05400000000,
- 0xb6d0668900000000, 0x78bcac3400000000, 0xfd653ae900000000,
- 0xa563499400000000, 0x20badf4900000000, 0xeed615f400000000,
- 0x6b0f832900000000, 0x847cf4ba00000000, 0x01a5626700000000,
- 0xcfc9a8da00000000, 0x4a103e0700000000, 0x12164d7a00000000,
- 0x97cfdba700000000, 0x59a3111a00000000, 0xdc7a87c700000000,
- 0xe9aff7e000000000, 0x6c76613d00000000, 0xa21aab8000000000,
- 0x27c33d5d00000000, 0x7fc54e2000000000, 0xfa1cd8fd00000000,
- 0x3470124000000000, 0xb1a9849d00000000, 0x17256aa000000000,
- 0x92fcfc7d00000000, 0x5c9036c000000000, 0xd949a01d00000000,
- 0x814fd36000000000, 0x049645bd00000000, 0xcafa8f0000000000,
- 0x4f2319dd00000000, 0x7af669fa00000000, 0xff2fff2700000000,
- 0x3143359a00000000, 0xb49aa34700000000, 0xec9cd03a00000000,
- 0x694546e700000000, 0xa7298c5a00000000, 0x22f01a8700000000,
- 0xcd836d1400000000, 0x485afbc900000000, 0x8636317400000000,
- 0x03efa7a900000000, 0x5be9d4d400000000, 0xde30420900000000,
- 0x105c88b400000000, 0x95851e6900000000, 0xa0506e4e00000000,
- 0x2589f89300000000, 0xebe5322e00000000, 0x6e3ca4f300000000,
- 0x363ad78e00000000, 0xb3e3415300000000, 0x7d8f8bee00000000,
- 0xf8561d3300000000, 0xe26e141300000000, 0x67b782ce00000000,
- 0xa9db487300000000, 0x2c02deae00000000, 0x7404add300000000,
- 0xf1dd3b0e00000000, 0x3fb1f1b300000000, 0xba68676e00000000,
- 0x8fbd174900000000, 0x0a64819400000000, 0xc4084b2900000000,
- 0x41d1ddf400000000, 0x19d7ae8900000000, 0x9c0e385400000000,
- 0x5262f2e900000000, 0xd7bb643400000000, 0x38c813a700000000,
- 0xbd11857a00000000, 0x737d4fc700000000, 0xf6a4d91a00000000,
- 0xaea2aa6700000000, 0x2b7b3cba00000000, 0xe517f60700000000,
- 0x60ce60da00000000, 0x551b10fd00000000, 0xd0c2862000000000,
- 0x1eae4c9d00000000, 0x9b77da4000000000, 0xc371a93d00000000,
- 0x46a83fe000000000, 0x88c4f55d00000000, 0x0d1d638000000000,
- 0xbcb4e71d00000000, 0x396d71c000000000, 0xf701bb7d00000000,
- 0x72d82da000000000, 0x2ade5edd00000000, 0xaf07c80000000000,
- 0x616b02bd00000000, 0xe4b2946000000000, 0xd167e44700000000,
- 0x54be729a00000000, 0x9ad2b82700000000, 0x1f0b2efa00000000,
- 0x470d5d8700000000, 0xc2d4cb5a00000000, 0x0cb801e700000000,
- 0x8961973a00000000, 0x6612e0a900000000, 0xe3cb767400000000,
- 0x2da7bcc900000000, 0xa87e2a1400000000, 0xf078596900000000,
- 0x75a1cfb400000000, 0xbbcd050900000000, 0x3e1493d400000000,
- 0x0bc1e3f300000000, 0x8e18752e00000000, 0x4074bf9300000000,
- 0xc5ad294e00000000, 0x9dab5a3300000000, 0x1872ccee00000000,
- 0xd61e065300000000, 0x53c7908e00000000, 0x49ff99ae00000000,
- 0xcc260f7300000000, 0x024ac5ce00000000, 0x8793531300000000,
- 0xdf95206e00000000, 0x5a4cb6b300000000, 0x94207c0e00000000,
- 0x11f9ead300000000, 0x242c9af400000000, 0xa1f50c2900000000,
- 0x6f99c69400000000, 0xea40504900000000, 0xb246233400000000,
- 0x379fb5e900000000, 0xf9f37f5400000000, 0x7c2ae98900000000,
- 0x93599e1a00000000, 0x168008c700000000, 0xd8ecc27a00000000,
- 0x5d3554a700000000, 0x053327da00000000, 0x80eab10700000000,
- 0x4e867bba00000000, 0xcb5fed6700000000, 0xfe8a9d4000000000,
- 0x7b530b9d00000000, 0xb53fc12000000000, 0x30e657fd00000000,
- 0x68e0248000000000, 0xed39b25d00000000, 0x235578e000000000,
- 0xa68cee3d00000000},
- {0x0000000000000000, 0x76e10f9d00000000, 0xadc46ee100000000,
- 0xdb25617c00000000, 0x1b8fac1900000000, 0x6d6ea38400000000,
- 0xb64bc2f800000000, 0xc0aacd6500000000, 0x361e593300000000,
- 0x40ff56ae00000000, 0x9bda37d200000000, 0xed3b384f00000000,
- 0x2d91f52a00000000, 0x5b70fab700000000, 0x80559bcb00000000,
- 0xf6b4945600000000, 0x6c3cb26600000000, 0x1addbdfb00000000,
- 0xc1f8dc8700000000, 0xb719d31a00000000, 0x77b31e7f00000000,
- 0x015211e200000000, 0xda77709e00000000, 0xac967f0300000000,
- 0x5a22eb5500000000, 0x2cc3e4c800000000, 0xf7e685b400000000,
- 0x81078a2900000000, 0x41ad474c00000000, 0x374c48d100000000,
- 0xec6929ad00000000, 0x9a88263000000000, 0xd87864cd00000000,
- 0xae996b5000000000, 0x75bc0a2c00000000, 0x035d05b100000000,
- 0xc3f7c8d400000000, 0xb516c74900000000, 0x6e33a63500000000,
- 0x18d2a9a800000000, 0xee663dfe00000000, 0x9887326300000000,
- 0x43a2531f00000000, 0x35435c8200000000, 0xf5e991e700000000,
- 0x83089e7a00000000, 0x582dff0600000000, 0x2eccf09b00000000,
- 0xb444d6ab00000000, 0xc2a5d93600000000, 0x1980b84a00000000,
- 0x6f61b7d700000000, 0xafcb7ab200000000, 0xd92a752f00000000,
- 0x020f145300000000, 0x74ee1bce00000000, 0x825a8f9800000000,
- 0xf4bb800500000000, 0x2f9ee17900000000, 0x597feee400000000,
- 0x99d5238100000000, 0xef342c1c00000000, 0x34114d6000000000,
- 0x42f042fd00000000, 0xf1f7b94100000000, 0x8716b6dc00000000,
- 0x5c33d7a000000000, 0x2ad2d83d00000000, 0xea78155800000000,
- 0x9c991ac500000000, 0x47bc7bb900000000, 0x315d742400000000,
- 0xc7e9e07200000000, 0xb108efef00000000, 0x6a2d8e9300000000,
- 0x1ccc810e00000000, 0xdc664c6b00000000, 0xaa8743f600000000,
- 0x71a2228a00000000, 0x07432d1700000000, 0x9dcb0b2700000000,
- 0xeb2a04ba00000000, 0x300f65c600000000, 0x46ee6a5b00000000,
- 0x8644a73e00000000, 0xf0a5a8a300000000, 0x2b80c9df00000000,
- 0x5d61c64200000000, 0xabd5521400000000, 0xdd345d8900000000,
- 0x06113cf500000000, 0x70f0336800000000, 0xb05afe0d00000000,
- 0xc6bbf19000000000, 0x1d9e90ec00000000, 0x6b7f9f7100000000,
- 0x298fdd8c00000000, 0x5f6ed21100000000, 0x844bb36d00000000,
- 0xf2aabcf000000000, 0x3200719500000000, 0x44e17e0800000000,
- 0x9fc41f7400000000, 0xe92510e900000000, 0x1f9184bf00000000,
- 0x69708b2200000000, 0xb255ea5e00000000, 0xc4b4e5c300000000,
- 0x041e28a600000000, 0x72ff273b00000000, 0xa9da464700000000,
- 0xdf3b49da00000000, 0x45b36fea00000000, 0x3352607700000000,
- 0xe877010b00000000, 0x9e960e9600000000, 0x5e3cc3f300000000,
- 0x28ddcc6e00000000, 0xf3f8ad1200000000, 0x8519a28f00000000,
- 0x73ad36d900000000, 0x054c394400000000, 0xde69583800000000,
- 0xa88857a500000000, 0x68229ac000000000, 0x1ec3955d00000000,
- 0xc5e6f42100000000, 0xb307fbbc00000000, 0xe2ef738300000000,
- 0x940e7c1e00000000, 0x4f2b1d6200000000, 0x39ca12ff00000000,
- 0xf960df9a00000000, 0x8f81d00700000000, 0x54a4b17b00000000,
- 0x2245bee600000000, 0xd4f12ab000000000, 0xa210252d00000000,
- 0x7935445100000000, 0x0fd44bcc00000000, 0xcf7e86a900000000,
- 0xb99f893400000000, 0x62bae84800000000, 0x145be7d500000000,
- 0x8ed3c1e500000000, 0xf832ce7800000000, 0x2317af0400000000,
- 0x55f6a09900000000, 0x955c6dfc00000000, 0xe3bd626100000000,
- 0x3898031d00000000, 0x4e790c8000000000, 0xb8cd98d600000000,
- 0xce2c974b00000000, 0x1509f63700000000, 0x63e8f9aa00000000,
- 0xa34234cf00000000, 0xd5a33b5200000000, 0x0e865a2e00000000,
- 0x786755b300000000, 0x3a97174e00000000, 0x4c7618d300000000,
- 0x975379af00000000, 0xe1b2763200000000, 0x2118bb5700000000,
- 0x57f9b4ca00000000, 0x8cdcd5b600000000, 0xfa3dda2b00000000,
- 0x0c894e7d00000000, 0x7a6841e000000000, 0xa14d209c00000000,
- 0xd7ac2f0100000000, 0x1706e26400000000, 0x61e7edf900000000,
- 0xbac28c8500000000, 0xcc23831800000000, 0x56aba52800000000,
- 0x204aaab500000000, 0xfb6fcbc900000000, 0x8d8ec45400000000,
- 0x4d24093100000000, 0x3bc506ac00000000, 0xe0e067d000000000,
- 0x9601684d00000000, 0x60b5fc1b00000000, 0x1654f38600000000,
- 0xcd7192fa00000000, 0xbb909d6700000000, 0x7b3a500200000000,
- 0x0ddb5f9f00000000, 0xd6fe3ee300000000, 0xa01f317e00000000,
- 0x1318cac200000000, 0x65f9c55f00000000, 0xbedca42300000000,
- 0xc83dabbe00000000, 0x089766db00000000, 0x7e76694600000000,
- 0xa553083a00000000, 0xd3b207a700000000, 0x250693f100000000,
- 0x53e79c6c00000000, 0x88c2fd1000000000, 0xfe23f28d00000000,
- 0x3e893fe800000000, 0x4868307500000000, 0x934d510900000000,
- 0xe5ac5e9400000000, 0x7f2478a400000000, 0x09c5773900000000,
- 0xd2e0164500000000, 0xa40119d800000000, 0x64abd4bd00000000,
- 0x124adb2000000000, 0xc96fba5c00000000, 0xbf8eb5c100000000,
- 0x493a219700000000, 0x3fdb2e0a00000000, 0xe4fe4f7600000000,
- 0x921f40eb00000000, 0x52b58d8e00000000, 0x2454821300000000,
- 0xff71e36f00000000, 0x8990ecf200000000, 0xcb60ae0f00000000,
- 0xbd81a19200000000, 0x66a4c0ee00000000, 0x1045cf7300000000,
- 0xd0ef021600000000, 0xa60e0d8b00000000, 0x7d2b6cf700000000,
- 0x0bca636a00000000, 0xfd7ef73c00000000, 0x8b9ff8a100000000,
- 0x50ba99dd00000000, 0x265b964000000000, 0xe6f15b2500000000,
- 0x901054b800000000, 0x4b3535c400000000, 0x3dd43a5900000000,
- 0xa75c1c6900000000, 0xd1bd13f400000000, 0x0a98728800000000,
- 0x7c797d1500000000, 0xbcd3b07000000000, 0xca32bfed00000000,
- 0x1117de9100000000, 0x67f6d10c00000000, 0x9142455a00000000,
- 0xe7a34ac700000000, 0x3c862bbb00000000, 0x4a67242600000000,
- 0x8acde94300000000, 0xfc2ce6de00000000, 0x270987a200000000,
- 0x51e8883f00000000},
- {0x0000000000000000, 0xe8dbfbb900000000, 0x91b186a800000000,
- 0x796a7d1100000000, 0x63657c8a00000000, 0x8bbe873300000000,
- 0xf2d4fa2200000000, 0x1a0f019b00000000, 0x87cc89cf00000000,
- 0x6f17727600000000, 0x167d0f6700000000, 0xfea6f4de00000000,
- 0xe4a9f54500000000, 0x0c720efc00000000, 0x751873ed00000000,
- 0x9dc3885400000000, 0x4f9f624400000000, 0xa74499fd00000000,
- 0xde2ee4ec00000000, 0x36f51f5500000000, 0x2cfa1ece00000000,
- 0xc421e57700000000, 0xbd4b986600000000, 0x559063df00000000,
- 0xc853eb8b00000000, 0x2088103200000000, 0x59e26d2300000000,
- 0xb139969a00000000, 0xab36970100000000, 0x43ed6cb800000000,
- 0x3a8711a900000000, 0xd25cea1000000000, 0x9e3ec58800000000,
- 0x76e53e3100000000, 0x0f8f432000000000, 0xe754b89900000000,
- 0xfd5bb90200000000, 0x158042bb00000000, 0x6cea3faa00000000,
- 0x8431c41300000000, 0x19f24c4700000000, 0xf129b7fe00000000,
- 0x8843caef00000000, 0x6098315600000000, 0x7a9730cd00000000,
- 0x924ccb7400000000, 0xeb26b66500000000, 0x03fd4ddc00000000,
- 0xd1a1a7cc00000000, 0x397a5c7500000000, 0x4010216400000000,
- 0xa8cbdadd00000000, 0xb2c4db4600000000, 0x5a1f20ff00000000,
- 0x23755dee00000000, 0xcbaea65700000000, 0x566d2e0300000000,
- 0xbeb6d5ba00000000, 0xc7dca8ab00000000, 0x2f07531200000000,
- 0x3508528900000000, 0xddd3a93000000000, 0xa4b9d42100000000,
- 0x4c622f9800000000, 0x7d7bfbca00000000, 0x95a0007300000000,
- 0xecca7d6200000000, 0x041186db00000000, 0x1e1e874000000000,
- 0xf6c57cf900000000, 0x8faf01e800000000, 0x6774fa5100000000,
- 0xfab7720500000000, 0x126c89bc00000000, 0x6b06f4ad00000000,
- 0x83dd0f1400000000, 0x99d20e8f00000000, 0x7109f53600000000,
- 0x0863882700000000, 0xe0b8739e00000000, 0x32e4998e00000000,
- 0xda3f623700000000, 0xa3551f2600000000, 0x4b8ee49f00000000,
- 0x5181e50400000000, 0xb95a1ebd00000000, 0xc03063ac00000000,
- 0x28eb981500000000, 0xb528104100000000, 0x5df3ebf800000000,
- 0x249996e900000000, 0xcc426d5000000000, 0xd64d6ccb00000000,
- 0x3e96977200000000, 0x47fcea6300000000, 0xaf2711da00000000,
- 0xe3453e4200000000, 0x0b9ec5fb00000000, 0x72f4b8ea00000000,
- 0x9a2f435300000000, 0x802042c800000000, 0x68fbb97100000000,
- 0x1191c46000000000, 0xf94a3fd900000000, 0x6489b78d00000000,
- 0x8c524c3400000000, 0xf538312500000000, 0x1de3ca9c00000000,
- 0x07eccb0700000000, 0xef3730be00000000, 0x965d4daf00000000,
- 0x7e86b61600000000, 0xacda5c0600000000, 0x4401a7bf00000000,
- 0x3d6bdaae00000000, 0xd5b0211700000000, 0xcfbf208c00000000,
- 0x2764db3500000000, 0x5e0ea62400000000, 0xb6d55d9d00000000,
- 0x2b16d5c900000000, 0xc3cd2e7000000000, 0xbaa7536100000000,
- 0x527ca8d800000000, 0x4873a94300000000, 0xa0a852fa00000000,
- 0xd9c22feb00000000, 0x3119d45200000000, 0xbbf0874e00000000,
- 0x532b7cf700000000, 0x2a4101e600000000, 0xc29afa5f00000000,
- 0xd895fbc400000000, 0x304e007d00000000, 0x49247d6c00000000,
- 0xa1ff86d500000000, 0x3c3c0e8100000000, 0xd4e7f53800000000,
- 0xad8d882900000000, 0x4556739000000000, 0x5f59720b00000000,
- 0xb78289b200000000, 0xcee8f4a300000000, 0x26330f1a00000000,
- 0xf46fe50a00000000, 0x1cb41eb300000000, 0x65de63a200000000,
- 0x8d05981b00000000, 0x970a998000000000, 0x7fd1623900000000,
- 0x06bb1f2800000000, 0xee60e49100000000, 0x73a36cc500000000,
- 0x9b78977c00000000, 0xe212ea6d00000000, 0x0ac911d400000000,
- 0x10c6104f00000000, 0xf81debf600000000, 0x817796e700000000,
- 0x69ac6d5e00000000, 0x25ce42c600000000, 0xcd15b97f00000000,
- 0xb47fc46e00000000, 0x5ca43fd700000000, 0x46ab3e4c00000000,
- 0xae70c5f500000000, 0xd71ab8e400000000, 0x3fc1435d00000000,
- 0xa202cb0900000000, 0x4ad930b000000000, 0x33b34da100000000,
- 0xdb68b61800000000, 0xc167b78300000000, 0x29bc4c3a00000000,
- 0x50d6312b00000000, 0xb80dca9200000000, 0x6a51208200000000,
- 0x828adb3b00000000, 0xfbe0a62a00000000, 0x133b5d9300000000,
- 0x09345c0800000000, 0xe1efa7b100000000, 0x9885daa000000000,
- 0x705e211900000000, 0xed9da94d00000000, 0x054652f400000000,
- 0x7c2c2fe500000000, 0x94f7d45c00000000, 0x8ef8d5c700000000,
- 0x66232e7e00000000, 0x1f49536f00000000, 0xf792a8d600000000,
- 0xc68b7c8400000000, 0x2e50873d00000000, 0x573afa2c00000000,
- 0xbfe1019500000000, 0xa5ee000e00000000, 0x4d35fbb700000000,
- 0x345f86a600000000, 0xdc847d1f00000000, 0x4147f54b00000000,
- 0xa99c0ef200000000, 0xd0f673e300000000, 0x382d885a00000000,
- 0x222289c100000000, 0xcaf9727800000000, 0xb3930f6900000000,
- 0x5b48f4d000000000, 0x89141ec000000000, 0x61cfe57900000000,
- 0x18a5986800000000, 0xf07e63d100000000, 0xea71624a00000000,
- 0x02aa99f300000000, 0x7bc0e4e200000000, 0x931b1f5b00000000,
- 0x0ed8970f00000000, 0xe6036cb600000000, 0x9f6911a700000000,
- 0x77b2ea1e00000000, 0x6dbdeb8500000000, 0x8566103c00000000,
- 0xfc0c6d2d00000000, 0x14d7969400000000, 0x58b5b90c00000000,
- 0xb06e42b500000000, 0xc9043fa400000000, 0x21dfc41d00000000,
- 0x3bd0c58600000000, 0xd30b3e3f00000000, 0xaa61432e00000000,
- 0x42bab89700000000, 0xdf7930c300000000, 0x37a2cb7a00000000,
- 0x4ec8b66b00000000, 0xa6134dd200000000, 0xbc1c4c4900000000,
- 0x54c7b7f000000000, 0x2dadcae100000000, 0xc576315800000000,
- 0x172adb4800000000, 0xfff120f100000000, 0x869b5de000000000,
- 0x6e40a65900000000, 0x744fa7c200000000, 0x9c945c7b00000000,
- 0xe5fe216a00000000, 0x0d25dad300000000, 0x90e6528700000000,
- 0x783da93e00000000, 0x0157d42f00000000, 0xe98c2f9600000000,
- 0xf3832e0d00000000, 0x1b58d5b400000000, 0x6232a8a500000000,
- 0x8ae9531c00000000},
- {0x0000000000000000, 0x919168ae00000000, 0x6325a08700000000,
- 0xf2b4c82900000000, 0x874c31d400000000, 0x16dd597a00000000,
- 0xe469915300000000, 0x75f8f9fd00000000, 0x4f9f137300000000,
- 0xde0e7bdd00000000, 0x2cbab3f400000000, 0xbd2bdb5a00000000,
- 0xc8d322a700000000, 0x59424a0900000000, 0xabf6822000000000,
- 0x3a67ea8e00000000, 0x9e3e27e600000000, 0x0faf4f4800000000,
- 0xfd1b876100000000, 0x6c8aefcf00000000, 0x1972163200000000,
- 0x88e37e9c00000000, 0x7a57b6b500000000, 0xebc6de1b00000000,
- 0xd1a1349500000000, 0x40305c3b00000000, 0xb284941200000000,
- 0x2315fcbc00000000, 0x56ed054100000000, 0xc77c6def00000000,
- 0x35c8a5c600000000, 0xa459cd6800000000, 0x7d7b3f1700000000,
- 0xecea57b900000000, 0x1e5e9f9000000000, 0x8fcff73e00000000,
- 0xfa370ec300000000, 0x6ba6666d00000000, 0x9912ae4400000000,
- 0x0883c6ea00000000, 0x32e42c6400000000, 0xa37544ca00000000,
- 0x51c18ce300000000, 0xc050e44d00000000, 0xb5a81db000000000,
- 0x2439751e00000000, 0xd68dbd3700000000, 0x471cd59900000000,
- 0xe34518f100000000, 0x72d4705f00000000, 0x8060b87600000000,
- 0x11f1d0d800000000, 0x6409292500000000, 0xf598418b00000000,
- 0x072c89a200000000, 0x96bde10c00000000, 0xacda0b8200000000,
- 0x3d4b632c00000000, 0xcfffab0500000000, 0x5e6ec3ab00000000,
- 0x2b963a5600000000, 0xba0752f800000000, 0x48b39ad100000000,
- 0xd922f27f00000000, 0xfaf67e2e00000000, 0x6b67168000000000,
- 0x99d3dea900000000, 0x0842b60700000000, 0x7dba4ffa00000000,
- 0xec2b275400000000, 0x1e9fef7d00000000, 0x8f0e87d300000000,
- 0xb5696d5d00000000, 0x24f805f300000000, 0xd64ccdda00000000,
- 0x47dda57400000000, 0x32255c8900000000, 0xa3b4342700000000,
- 0x5100fc0e00000000, 0xc09194a000000000, 0x64c859c800000000,
- 0xf559316600000000, 0x07edf94f00000000, 0x967c91e100000000,
- 0xe384681c00000000, 0x721500b200000000, 0x80a1c89b00000000,
- 0x1130a03500000000, 0x2b574abb00000000, 0xbac6221500000000,
- 0x4872ea3c00000000, 0xd9e3829200000000, 0xac1b7b6f00000000,
- 0x3d8a13c100000000, 0xcf3edbe800000000, 0x5eafb34600000000,
- 0x878d413900000000, 0x161c299700000000, 0xe4a8e1be00000000,
- 0x7539891000000000, 0x00c170ed00000000, 0x9150184300000000,
- 0x63e4d06a00000000, 0xf275b8c400000000, 0xc812524a00000000,
- 0x59833ae400000000, 0xab37f2cd00000000, 0x3aa69a6300000000,
- 0x4f5e639e00000000, 0xdecf0b3000000000, 0x2c7bc31900000000,
- 0xbdeaabb700000000, 0x19b366df00000000, 0x88220e7100000000,
- 0x7a96c65800000000, 0xeb07aef600000000, 0x9eff570b00000000,
- 0x0f6e3fa500000000, 0xfddaf78c00000000, 0x6c4b9f2200000000,
- 0x562c75ac00000000, 0xc7bd1d0200000000, 0x3509d52b00000000,
- 0xa498bd8500000000, 0xd160447800000000, 0x40f12cd600000000,
- 0xb245e4ff00000000, 0x23d48c5100000000, 0xf4edfd5c00000000,
- 0x657c95f200000000, 0x97c85ddb00000000, 0x0659357500000000,
- 0x73a1cc8800000000, 0xe230a42600000000, 0x10846c0f00000000,
- 0x811504a100000000, 0xbb72ee2f00000000, 0x2ae3868100000000,
- 0xd8574ea800000000, 0x49c6260600000000, 0x3c3edffb00000000,
- 0xadafb75500000000, 0x5f1b7f7c00000000, 0xce8a17d200000000,
- 0x6ad3daba00000000, 0xfb42b21400000000, 0x09f67a3d00000000,
- 0x9867129300000000, 0xed9feb6e00000000, 0x7c0e83c000000000,
- 0x8eba4be900000000, 0x1f2b234700000000, 0x254cc9c900000000,
- 0xb4dda16700000000, 0x4669694e00000000, 0xd7f801e000000000,
- 0xa200f81d00000000, 0x339190b300000000, 0xc125589a00000000,
- 0x50b4303400000000, 0x8996c24b00000000, 0x1807aae500000000,
- 0xeab362cc00000000, 0x7b220a6200000000, 0x0edaf39f00000000,
- 0x9f4b9b3100000000, 0x6dff531800000000, 0xfc6e3bb600000000,
- 0xc609d13800000000, 0x5798b99600000000, 0xa52c71bf00000000,
- 0x34bd191100000000, 0x4145e0ec00000000, 0xd0d4884200000000,
- 0x2260406b00000000, 0xb3f128c500000000, 0x17a8e5ad00000000,
- 0x86398d0300000000, 0x748d452a00000000, 0xe51c2d8400000000,
- 0x90e4d47900000000, 0x0175bcd700000000, 0xf3c174fe00000000,
- 0x62501c5000000000, 0x5837f6de00000000, 0xc9a69e7000000000,
- 0x3b12565900000000, 0xaa833ef700000000, 0xdf7bc70a00000000,
- 0x4eeaafa400000000, 0xbc5e678d00000000, 0x2dcf0f2300000000,
- 0x0e1b837200000000, 0x9f8aebdc00000000, 0x6d3e23f500000000,
- 0xfcaf4b5b00000000, 0x8957b2a600000000, 0x18c6da0800000000,
- 0xea72122100000000, 0x7be37a8f00000000, 0x4184900100000000,
- 0xd015f8af00000000, 0x22a1308600000000, 0xb330582800000000,
- 0xc6c8a1d500000000, 0x5759c97b00000000, 0xa5ed015200000000,
- 0x347c69fc00000000, 0x9025a49400000000, 0x01b4cc3a00000000,
- 0xf300041300000000, 0x62916cbd00000000, 0x1769954000000000,
- 0x86f8fdee00000000, 0x744c35c700000000, 0xe5dd5d6900000000,
- 0xdfbab7e700000000, 0x4e2bdf4900000000, 0xbc9f176000000000,
- 0x2d0e7fce00000000, 0x58f6863300000000, 0xc967ee9d00000000,
- 0x3bd326b400000000, 0xaa424e1a00000000, 0x7360bc6500000000,
- 0xe2f1d4cb00000000, 0x10451ce200000000, 0x81d4744c00000000,
- 0xf42c8db100000000, 0x65bde51f00000000, 0x97092d3600000000,
- 0x0698459800000000, 0x3cffaf1600000000, 0xad6ec7b800000000,
- 0x5fda0f9100000000, 0xce4b673f00000000, 0xbbb39ec200000000,
- 0x2a22f66c00000000, 0xd8963e4500000000, 0x490756eb00000000,
- 0xed5e9b8300000000, 0x7ccff32d00000000, 0x8e7b3b0400000000,
- 0x1fea53aa00000000, 0x6a12aa5700000000, 0xfb83c2f900000000,
- 0x09370ad000000000, 0x98a6627e00000000, 0xa2c188f000000000,
- 0x3350e05e00000000, 0xc1e4287700000000, 0x507540d900000000,
- 0x258db92400000000, 0xb41cd18a00000000, 0x46a819a300000000,
- 0xd739710d00000000}};
-
-#else /* W == 4 */
-
-local const z_crc_t FAR crc_braid_table[][256] = {
- {0x00000000, 0xccaa009e, 0x4225077d, 0x8e8f07e3, 0x844a0efa,
- 0x48e00e64, 0xc66f0987, 0x0ac50919, 0xd3e51bb5, 0x1f4f1b2b,
- 0x91c01cc8, 0x5d6a1c56, 0x57af154f, 0x9b0515d1, 0x158a1232,
- 0xd92012ac, 0x7cbb312b, 0xb01131b5, 0x3e9e3656, 0xf23436c8,
- 0xf8f13fd1, 0x345b3f4f, 0xbad438ac, 0x767e3832, 0xaf5e2a9e,
- 0x63f42a00, 0xed7b2de3, 0x21d12d7d, 0x2b142464, 0xe7be24fa,
- 0x69312319, 0xa59b2387, 0xf9766256, 0x35dc62c8, 0xbb53652b,
- 0x77f965b5, 0x7d3c6cac, 0xb1966c32, 0x3f196bd1, 0xf3b36b4f,
- 0x2a9379e3, 0xe639797d, 0x68b67e9e, 0xa41c7e00, 0xaed97719,
- 0x62737787, 0xecfc7064, 0x205670fa, 0x85cd537d, 0x496753e3,
- 0xc7e85400, 0x0b42549e, 0x01875d87, 0xcd2d5d19, 0x43a25afa,
- 0x8f085a64, 0x562848c8, 0x9a824856, 0x140d4fb5, 0xd8a74f2b,
- 0xd2624632, 0x1ec846ac, 0x9047414f, 0x5ced41d1, 0x299dc2ed,
- 0xe537c273, 0x6bb8c590, 0xa712c50e, 0xadd7cc17, 0x617dcc89,
- 0xeff2cb6a, 0x2358cbf4, 0xfa78d958, 0x36d2d9c6, 0xb85dde25,
- 0x74f7debb, 0x7e32d7a2, 0xb298d73c, 0x3c17d0df, 0xf0bdd041,
- 0x5526f3c6, 0x998cf358, 0x1703f4bb, 0xdba9f425, 0xd16cfd3c,
- 0x1dc6fda2, 0x9349fa41, 0x5fe3fadf, 0x86c3e873, 0x4a69e8ed,
- 0xc4e6ef0e, 0x084cef90, 0x0289e689, 0xce23e617, 0x40ace1f4,
- 0x8c06e16a, 0xd0eba0bb, 0x1c41a025, 0x92cea7c6, 0x5e64a758,
- 0x54a1ae41, 0x980baedf, 0x1684a93c, 0xda2ea9a2, 0x030ebb0e,
- 0xcfa4bb90, 0x412bbc73, 0x8d81bced, 0x8744b5f4, 0x4beeb56a,
- 0xc561b289, 0x09cbb217, 0xac509190, 0x60fa910e, 0xee7596ed,
- 0x22df9673, 0x281a9f6a, 0xe4b09ff4, 0x6a3f9817, 0xa6959889,
- 0x7fb58a25, 0xb31f8abb, 0x3d908d58, 0xf13a8dc6, 0xfbff84df,
- 0x37558441, 0xb9da83a2, 0x7570833c, 0x533b85da, 0x9f918544,
- 0x111e82a7, 0xddb48239, 0xd7718b20, 0x1bdb8bbe, 0x95548c5d,
- 0x59fe8cc3, 0x80de9e6f, 0x4c749ef1, 0xc2fb9912, 0x0e51998c,
- 0x04949095, 0xc83e900b, 0x46b197e8, 0x8a1b9776, 0x2f80b4f1,
- 0xe32ab46f, 0x6da5b38c, 0xa10fb312, 0xabcaba0b, 0x6760ba95,
- 0xe9efbd76, 0x2545bde8, 0xfc65af44, 0x30cfafda, 0xbe40a839,
- 0x72eaa8a7, 0x782fa1be, 0xb485a120, 0x3a0aa6c3, 0xf6a0a65d,
- 0xaa4de78c, 0x66e7e712, 0xe868e0f1, 0x24c2e06f, 0x2e07e976,
- 0xe2ade9e8, 0x6c22ee0b, 0xa088ee95, 0x79a8fc39, 0xb502fca7,
- 0x3b8dfb44, 0xf727fbda, 0xfde2f2c3, 0x3148f25d, 0xbfc7f5be,
- 0x736df520, 0xd6f6d6a7, 0x1a5cd639, 0x94d3d1da, 0x5879d144,
- 0x52bcd85d, 0x9e16d8c3, 0x1099df20, 0xdc33dfbe, 0x0513cd12,
- 0xc9b9cd8c, 0x4736ca6f, 0x8b9ccaf1, 0x8159c3e8, 0x4df3c376,
- 0xc37cc495, 0x0fd6c40b, 0x7aa64737, 0xb60c47a9, 0x3883404a,
- 0xf42940d4, 0xfeec49cd, 0x32464953, 0xbcc94eb0, 0x70634e2e,
- 0xa9435c82, 0x65e95c1c, 0xeb665bff, 0x27cc5b61, 0x2d095278,
- 0xe1a352e6, 0x6f2c5505, 0xa386559b, 0x061d761c, 0xcab77682,
- 0x44387161, 0x889271ff, 0x825778e6, 0x4efd7878, 0xc0727f9b,
- 0x0cd87f05, 0xd5f86da9, 0x19526d37, 0x97dd6ad4, 0x5b776a4a,
- 0x51b26353, 0x9d1863cd, 0x1397642e, 0xdf3d64b0, 0x83d02561,
- 0x4f7a25ff, 0xc1f5221c, 0x0d5f2282, 0x079a2b9b, 0xcb302b05,
- 0x45bf2ce6, 0x89152c78, 0x50353ed4, 0x9c9f3e4a, 0x121039a9,
- 0xdeba3937, 0xd47f302e, 0x18d530b0, 0x965a3753, 0x5af037cd,
- 0xff6b144a, 0x33c114d4, 0xbd4e1337, 0x71e413a9, 0x7b211ab0,
- 0xb78b1a2e, 0x39041dcd, 0xf5ae1d53, 0x2c8e0fff, 0xe0240f61,
- 0x6eab0882, 0xa201081c, 0xa8c40105, 0x646e019b, 0xeae10678,
- 0x264b06e6},
- {0x00000000, 0xa6770bb4, 0x979f1129, 0x31e81a9d, 0xf44f2413,
- 0x52382fa7, 0x63d0353a, 0xc5a73e8e, 0x33ef4e67, 0x959845d3,
- 0xa4705f4e, 0x020754fa, 0xc7a06a74, 0x61d761c0, 0x503f7b5d,
- 0xf64870e9, 0x67de9cce, 0xc1a9977a, 0xf0418de7, 0x56368653,
- 0x9391b8dd, 0x35e6b369, 0x040ea9f4, 0xa279a240, 0x5431d2a9,
- 0xf246d91d, 0xc3aec380, 0x65d9c834, 0xa07ef6ba, 0x0609fd0e,
- 0x37e1e793, 0x9196ec27, 0xcfbd399c, 0x69ca3228, 0x582228b5,
- 0xfe552301, 0x3bf21d8f, 0x9d85163b, 0xac6d0ca6, 0x0a1a0712,
- 0xfc5277fb, 0x5a257c4f, 0x6bcd66d2, 0xcdba6d66, 0x081d53e8,
- 0xae6a585c, 0x9f8242c1, 0x39f54975, 0xa863a552, 0x0e14aee6,
- 0x3ffcb47b, 0x998bbfcf, 0x5c2c8141, 0xfa5b8af5, 0xcbb39068,
- 0x6dc49bdc, 0x9b8ceb35, 0x3dfbe081, 0x0c13fa1c, 0xaa64f1a8,
- 0x6fc3cf26, 0xc9b4c492, 0xf85cde0f, 0x5e2bd5bb, 0x440b7579,
- 0xe27c7ecd, 0xd3946450, 0x75e36fe4, 0xb044516a, 0x16335ade,
- 0x27db4043, 0x81ac4bf7, 0x77e43b1e, 0xd19330aa, 0xe07b2a37,
- 0x460c2183, 0x83ab1f0d, 0x25dc14b9, 0x14340e24, 0xb2430590,
- 0x23d5e9b7, 0x85a2e203, 0xb44af89e, 0x123df32a, 0xd79acda4,
- 0x71edc610, 0x4005dc8d, 0xe672d739, 0x103aa7d0, 0xb64dac64,
- 0x87a5b6f9, 0x21d2bd4d, 0xe47583c3, 0x42028877, 0x73ea92ea,
- 0xd59d995e, 0x8bb64ce5, 0x2dc14751, 0x1c295dcc, 0xba5e5678,
- 0x7ff968f6, 0xd98e6342, 0xe86679df, 0x4e11726b, 0xb8590282,
- 0x1e2e0936, 0x2fc613ab, 0x89b1181f, 0x4c162691, 0xea612d25,
- 0xdb8937b8, 0x7dfe3c0c, 0xec68d02b, 0x4a1fdb9f, 0x7bf7c102,
- 0xdd80cab6, 0x1827f438, 0xbe50ff8c, 0x8fb8e511, 0x29cfeea5,
- 0xdf879e4c, 0x79f095f8, 0x48188f65, 0xee6f84d1, 0x2bc8ba5f,
- 0x8dbfb1eb, 0xbc57ab76, 0x1a20a0c2, 0x8816eaf2, 0x2e61e146,
- 0x1f89fbdb, 0xb9fef06f, 0x7c59cee1, 0xda2ec555, 0xebc6dfc8,
- 0x4db1d47c, 0xbbf9a495, 0x1d8eaf21, 0x2c66b5bc, 0x8a11be08,
- 0x4fb68086, 0xe9c18b32, 0xd82991af, 0x7e5e9a1b, 0xefc8763c,
- 0x49bf7d88, 0x78576715, 0xde206ca1, 0x1b87522f, 0xbdf0599b,
- 0x8c184306, 0x2a6f48b2, 0xdc27385b, 0x7a5033ef, 0x4bb82972,
- 0xedcf22c6, 0x28681c48, 0x8e1f17fc, 0xbff70d61, 0x198006d5,
- 0x47abd36e, 0xe1dcd8da, 0xd034c247, 0x7643c9f3, 0xb3e4f77d,
- 0x1593fcc9, 0x247be654, 0x820cede0, 0x74449d09, 0xd23396bd,
- 0xe3db8c20, 0x45ac8794, 0x800bb91a, 0x267cb2ae, 0x1794a833,
- 0xb1e3a387, 0x20754fa0, 0x86024414, 0xb7ea5e89, 0x119d553d,
- 0xd43a6bb3, 0x724d6007, 0x43a57a9a, 0xe5d2712e, 0x139a01c7,
- 0xb5ed0a73, 0x840510ee, 0x22721b5a, 0xe7d525d4, 0x41a22e60,
- 0x704a34fd, 0xd63d3f49, 0xcc1d9f8b, 0x6a6a943f, 0x5b828ea2,
- 0xfdf58516, 0x3852bb98, 0x9e25b02c, 0xafcdaab1, 0x09baa105,
- 0xfff2d1ec, 0x5985da58, 0x686dc0c5, 0xce1acb71, 0x0bbdf5ff,
- 0xadcafe4b, 0x9c22e4d6, 0x3a55ef62, 0xabc30345, 0x0db408f1,
- 0x3c5c126c, 0x9a2b19d8, 0x5f8c2756, 0xf9fb2ce2, 0xc813367f,
- 0x6e643dcb, 0x982c4d22, 0x3e5b4696, 0x0fb35c0b, 0xa9c457bf,
- 0x6c636931, 0xca146285, 0xfbfc7818, 0x5d8b73ac, 0x03a0a617,
- 0xa5d7ada3, 0x943fb73e, 0x3248bc8a, 0xf7ef8204, 0x519889b0,
- 0x6070932d, 0xc6079899, 0x304fe870, 0x9638e3c4, 0xa7d0f959,
- 0x01a7f2ed, 0xc400cc63, 0x6277c7d7, 0x539fdd4a, 0xf5e8d6fe,
- 0x647e3ad9, 0xc209316d, 0xf3e12bf0, 0x55962044, 0x90311eca,
- 0x3646157e, 0x07ae0fe3, 0xa1d90457, 0x579174be, 0xf1e67f0a,
- 0xc00e6597, 0x66796e23, 0xa3de50ad, 0x05a95b19, 0x34414184,
- 0x92364a30},
- {0x00000000, 0xcb5cd3a5, 0x4dc8a10b, 0x869472ae, 0x9b914216,
- 0x50cd91b3, 0xd659e31d, 0x1d0530b8, 0xec53826d, 0x270f51c8,
- 0xa19b2366, 0x6ac7f0c3, 0x77c2c07b, 0xbc9e13de, 0x3a0a6170,
- 0xf156b2d5, 0x03d6029b, 0xc88ad13e, 0x4e1ea390, 0x85427035,
- 0x9847408d, 0x531b9328, 0xd58fe186, 0x1ed33223, 0xef8580f6,
- 0x24d95353, 0xa24d21fd, 0x6911f258, 0x7414c2e0, 0xbf481145,
- 0x39dc63eb, 0xf280b04e, 0x07ac0536, 0xccf0d693, 0x4a64a43d,
- 0x81387798, 0x9c3d4720, 0x57619485, 0xd1f5e62b, 0x1aa9358e,
- 0xebff875b, 0x20a354fe, 0xa6372650, 0x6d6bf5f5, 0x706ec54d,
- 0xbb3216e8, 0x3da66446, 0xf6fab7e3, 0x047a07ad, 0xcf26d408,
- 0x49b2a6a6, 0x82ee7503, 0x9feb45bb, 0x54b7961e, 0xd223e4b0,
- 0x197f3715, 0xe82985c0, 0x23755665, 0xa5e124cb, 0x6ebdf76e,
- 0x73b8c7d6, 0xb8e41473, 0x3e7066dd, 0xf52cb578, 0x0f580a6c,
- 0xc404d9c9, 0x4290ab67, 0x89cc78c2, 0x94c9487a, 0x5f959bdf,
- 0xd901e971, 0x125d3ad4, 0xe30b8801, 0x28575ba4, 0xaec3290a,
- 0x659ffaaf, 0x789aca17, 0xb3c619b2, 0x35526b1c, 0xfe0eb8b9,
- 0x0c8e08f7, 0xc7d2db52, 0x4146a9fc, 0x8a1a7a59, 0x971f4ae1,
- 0x5c439944, 0xdad7ebea, 0x118b384f, 0xe0dd8a9a, 0x2b81593f,
- 0xad152b91, 0x6649f834, 0x7b4cc88c, 0xb0101b29, 0x36846987,
- 0xfdd8ba22, 0x08f40f5a, 0xc3a8dcff, 0x453cae51, 0x8e607df4,
- 0x93654d4c, 0x58399ee9, 0xdeadec47, 0x15f13fe2, 0xe4a78d37,
- 0x2ffb5e92, 0xa96f2c3c, 0x6233ff99, 0x7f36cf21, 0xb46a1c84,
- 0x32fe6e2a, 0xf9a2bd8f, 0x0b220dc1, 0xc07ede64, 0x46eaacca,
- 0x8db67f6f, 0x90b34fd7, 0x5bef9c72, 0xdd7beedc, 0x16273d79,
- 0xe7718fac, 0x2c2d5c09, 0xaab92ea7, 0x61e5fd02, 0x7ce0cdba,
- 0xb7bc1e1f, 0x31286cb1, 0xfa74bf14, 0x1eb014d8, 0xd5ecc77d,
- 0x5378b5d3, 0x98246676, 0x852156ce, 0x4e7d856b, 0xc8e9f7c5,
- 0x03b52460, 0xf2e396b5, 0x39bf4510, 0xbf2b37be, 0x7477e41b,
- 0x6972d4a3, 0xa22e0706, 0x24ba75a8, 0xefe6a60d, 0x1d661643,
- 0xd63ac5e6, 0x50aeb748, 0x9bf264ed, 0x86f75455, 0x4dab87f0,
- 0xcb3ff55e, 0x006326fb, 0xf135942e, 0x3a69478b, 0xbcfd3525,
- 0x77a1e680, 0x6aa4d638, 0xa1f8059d, 0x276c7733, 0xec30a496,
- 0x191c11ee, 0xd240c24b, 0x54d4b0e5, 0x9f886340, 0x828d53f8,
- 0x49d1805d, 0xcf45f2f3, 0x04192156, 0xf54f9383, 0x3e134026,
- 0xb8873288, 0x73dbe12d, 0x6eded195, 0xa5820230, 0x2316709e,
- 0xe84aa33b, 0x1aca1375, 0xd196c0d0, 0x5702b27e, 0x9c5e61db,
- 0x815b5163, 0x4a0782c6, 0xcc93f068, 0x07cf23cd, 0xf6999118,
- 0x3dc542bd, 0xbb513013, 0x700de3b6, 0x6d08d30e, 0xa65400ab,
- 0x20c07205, 0xeb9ca1a0, 0x11e81eb4, 0xdab4cd11, 0x5c20bfbf,
- 0x977c6c1a, 0x8a795ca2, 0x41258f07, 0xc7b1fda9, 0x0ced2e0c,
- 0xfdbb9cd9, 0x36e74f7c, 0xb0733dd2, 0x7b2fee77, 0x662adecf,
- 0xad760d6a, 0x2be27fc4, 0xe0beac61, 0x123e1c2f, 0xd962cf8a,
- 0x5ff6bd24, 0x94aa6e81, 0x89af5e39, 0x42f38d9c, 0xc467ff32,
- 0x0f3b2c97, 0xfe6d9e42, 0x35314de7, 0xb3a53f49, 0x78f9ecec,
- 0x65fcdc54, 0xaea00ff1, 0x28347d5f, 0xe368aefa, 0x16441b82,
- 0xdd18c827, 0x5b8cba89, 0x90d0692c, 0x8dd55994, 0x46898a31,
- 0xc01df89f, 0x0b412b3a, 0xfa1799ef, 0x314b4a4a, 0xb7df38e4,
- 0x7c83eb41, 0x6186dbf9, 0xaada085c, 0x2c4e7af2, 0xe712a957,
- 0x15921919, 0xdececabc, 0x585ab812, 0x93066bb7, 0x8e035b0f,
- 0x455f88aa, 0xc3cbfa04, 0x089729a1, 0xf9c19b74, 0x329d48d1,
- 0xb4093a7f, 0x7f55e9da, 0x6250d962, 0xa90c0ac7, 0x2f987869,
- 0xe4c4abcc},
- {0x00000000, 0x3d6029b0, 0x7ac05360, 0x47a07ad0, 0xf580a6c0,
- 0xc8e08f70, 0x8f40f5a0, 0xb220dc10, 0x30704bc1, 0x0d106271,
- 0x4ab018a1, 0x77d03111, 0xc5f0ed01, 0xf890c4b1, 0xbf30be61,
- 0x825097d1, 0x60e09782, 0x5d80be32, 0x1a20c4e2, 0x2740ed52,
- 0x95603142, 0xa80018f2, 0xefa06222, 0xd2c04b92, 0x5090dc43,
- 0x6df0f5f3, 0x2a508f23, 0x1730a693, 0xa5107a83, 0x98705333,
- 0xdfd029e3, 0xe2b00053, 0xc1c12f04, 0xfca106b4, 0xbb017c64,
- 0x866155d4, 0x344189c4, 0x0921a074, 0x4e81daa4, 0x73e1f314,
- 0xf1b164c5, 0xccd14d75, 0x8b7137a5, 0xb6111e15, 0x0431c205,
- 0x3951ebb5, 0x7ef19165, 0x4391b8d5, 0xa121b886, 0x9c419136,
- 0xdbe1ebe6, 0xe681c256, 0x54a11e46, 0x69c137f6, 0x2e614d26,
- 0x13016496, 0x9151f347, 0xac31daf7, 0xeb91a027, 0xd6f18997,
- 0x64d15587, 0x59b17c37, 0x1e1106e7, 0x23712f57, 0x58f35849,
- 0x659371f9, 0x22330b29, 0x1f532299, 0xad73fe89, 0x9013d739,
- 0xd7b3ade9, 0xead38459, 0x68831388, 0x55e33a38, 0x124340e8,
- 0x2f236958, 0x9d03b548, 0xa0639cf8, 0xe7c3e628, 0xdaa3cf98,
- 0x3813cfcb, 0x0573e67b, 0x42d39cab, 0x7fb3b51b, 0xcd93690b,
- 0xf0f340bb, 0xb7533a6b, 0x8a3313db, 0x0863840a, 0x3503adba,
- 0x72a3d76a, 0x4fc3feda, 0xfde322ca, 0xc0830b7a, 0x872371aa,
- 0xba43581a, 0x9932774d, 0xa4525efd, 0xe3f2242d, 0xde920d9d,
- 0x6cb2d18d, 0x51d2f83d, 0x167282ed, 0x2b12ab5d, 0xa9423c8c,
- 0x9422153c, 0xd3826fec, 0xeee2465c, 0x5cc29a4c, 0x61a2b3fc,
- 0x2602c92c, 0x1b62e09c, 0xf9d2e0cf, 0xc4b2c97f, 0x8312b3af,
- 0xbe729a1f, 0x0c52460f, 0x31326fbf, 0x7692156f, 0x4bf23cdf,
- 0xc9a2ab0e, 0xf4c282be, 0xb362f86e, 0x8e02d1de, 0x3c220dce,
- 0x0142247e, 0x46e25eae, 0x7b82771e, 0xb1e6b092, 0x8c869922,
- 0xcb26e3f2, 0xf646ca42, 0x44661652, 0x79063fe2, 0x3ea64532,
- 0x03c66c82, 0x8196fb53, 0xbcf6d2e3, 0xfb56a833, 0xc6368183,
- 0x74165d93, 0x49767423, 0x0ed60ef3, 0x33b62743, 0xd1062710,
- 0xec660ea0, 0xabc67470, 0x96a65dc0, 0x248681d0, 0x19e6a860,
- 0x5e46d2b0, 0x6326fb00, 0xe1766cd1, 0xdc164561, 0x9bb63fb1,
- 0xa6d61601, 0x14f6ca11, 0x2996e3a1, 0x6e369971, 0x5356b0c1,
- 0x70279f96, 0x4d47b626, 0x0ae7ccf6, 0x3787e546, 0x85a73956,
- 0xb8c710e6, 0xff676a36, 0xc2074386, 0x4057d457, 0x7d37fde7,
- 0x3a978737, 0x07f7ae87, 0xb5d77297, 0x88b75b27, 0xcf1721f7,
- 0xf2770847, 0x10c70814, 0x2da721a4, 0x6a075b74, 0x576772c4,
- 0xe547aed4, 0xd8278764, 0x9f87fdb4, 0xa2e7d404, 0x20b743d5,
- 0x1dd76a65, 0x5a7710b5, 0x67173905, 0xd537e515, 0xe857cca5,
- 0xaff7b675, 0x92979fc5, 0xe915e8db, 0xd475c16b, 0x93d5bbbb,
- 0xaeb5920b, 0x1c954e1b, 0x21f567ab, 0x66551d7b, 0x5b3534cb,
- 0xd965a31a, 0xe4058aaa, 0xa3a5f07a, 0x9ec5d9ca, 0x2ce505da,
- 0x11852c6a, 0x562556ba, 0x6b457f0a, 0x89f57f59, 0xb49556e9,
- 0xf3352c39, 0xce550589, 0x7c75d999, 0x4115f029, 0x06b58af9,
- 0x3bd5a349, 0xb9853498, 0x84e51d28, 0xc34567f8, 0xfe254e48,
- 0x4c059258, 0x7165bbe8, 0x36c5c138, 0x0ba5e888, 0x28d4c7df,
- 0x15b4ee6f, 0x521494bf, 0x6f74bd0f, 0xdd54611f, 0xe03448af,
- 0xa794327f, 0x9af41bcf, 0x18a48c1e, 0x25c4a5ae, 0x6264df7e,
- 0x5f04f6ce, 0xed242ade, 0xd044036e, 0x97e479be, 0xaa84500e,
- 0x4834505d, 0x755479ed, 0x32f4033d, 0x0f942a8d, 0xbdb4f69d,
- 0x80d4df2d, 0xc774a5fd, 0xfa148c4d, 0x78441b9c, 0x4524322c,
- 0x028448fc, 0x3fe4614c, 0x8dc4bd5c, 0xb0a494ec, 0xf704ee3c,
- 0xca64c78c}};
-
-local const z_word_t FAR crc_braid_big_table[][256] = {
- {0x00000000, 0xb029603d, 0x6053c07a, 0xd07aa047, 0xc0a680f5,
- 0x708fe0c8, 0xa0f5408f, 0x10dc20b2, 0xc14b7030, 0x7162100d,
- 0xa118b04a, 0x1131d077, 0x01edf0c5, 0xb1c490f8, 0x61be30bf,
- 0xd1975082, 0x8297e060, 0x32be805d, 0xe2c4201a, 0x52ed4027,
- 0x42316095, 0xf21800a8, 0x2262a0ef, 0x924bc0d2, 0x43dc9050,
- 0xf3f5f06d, 0x238f502a, 0x93a63017, 0x837a10a5, 0x33537098,
- 0xe329d0df, 0x5300b0e2, 0x042fc1c1, 0xb406a1fc, 0x647c01bb,
- 0xd4556186, 0xc4894134, 0x74a02109, 0xa4da814e, 0x14f3e173,
- 0xc564b1f1, 0x754dd1cc, 0xa537718b, 0x151e11b6, 0x05c23104,
- 0xb5eb5139, 0x6591f17e, 0xd5b89143, 0x86b821a1, 0x3691419c,
- 0xe6ebe1db, 0x56c281e6, 0x461ea154, 0xf637c169, 0x264d612e,
- 0x96640113, 0x47f35191, 0xf7da31ac, 0x27a091eb, 0x9789f1d6,
- 0x8755d164, 0x377cb159, 0xe706111e, 0x572f7123, 0x4958f358,
- 0xf9719365, 0x290b3322, 0x9922531f, 0x89fe73ad, 0x39d71390,
- 0xe9adb3d7, 0x5984d3ea, 0x88138368, 0x383ae355, 0xe8404312,
- 0x5869232f, 0x48b5039d, 0xf89c63a0, 0x28e6c3e7, 0x98cfa3da,
- 0xcbcf1338, 0x7be67305, 0xab9cd342, 0x1bb5b37f, 0x0b6993cd,
- 0xbb40f3f0, 0x6b3a53b7, 0xdb13338a, 0x0a846308, 0xbaad0335,
- 0x6ad7a372, 0xdafec34f, 0xca22e3fd, 0x7a0b83c0, 0xaa712387,
- 0x1a5843ba, 0x4d773299, 0xfd5e52a4, 0x2d24f2e3, 0x9d0d92de,
- 0x8dd1b26c, 0x3df8d251, 0xed827216, 0x5dab122b, 0x8c3c42a9,
- 0x3c152294, 0xec6f82d3, 0x5c46e2ee, 0x4c9ac25c, 0xfcb3a261,
- 0x2cc90226, 0x9ce0621b, 0xcfe0d2f9, 0x7fc9b2c4, 0xafb31283,
- 0x1f9a72be, 0x0f46520c, 0xbf6f3231, 0x6f159276, 0xdf3cf24b,
- 0x0eaba2c9, 0xbe82c2f4, 0x6ef862b3, 0xded1028e, 0xce0d223c,
- 0x7e244201, 0xae5ee246, 0x1e77827b, 0x92b0e6b1, 0x2299868c,
- 0xf2e326cb, 0x42ca46f6, 0x52166644, 0xe23f0679, 0x3245a63e,
- 0x826cc603, 0x53fb9681, 0xe3d2f6bc, 0x33a856fb, 0x838136c6,
- 0x935d1674, 0x23747649, 0xf30ed60e, 0x4327b633, 0x102706d1,
- 0xa00e66ec, 0x7074c6ab, 0xc05da696, 0xd0818624, 0x60a8e619,
- 0xb0d2465e, 0x00fb2663, 0xd16c76e1, 0x614516dc, 0xb13fb69b,
- 0x0116d6a6, 0x11caf614, 0xa1e39629, 0x7199366e, 0xc1b05653,
- 0x969f2770, 0x26b6474d, 0xf6cce70a, 0x46e58737, 0x5639a785,
- 0xe610c7b8, 0x366a67ff, 0x864307c2, 0x57d45740, 0xe7fd377d,
- 0x3787973a, 0x87aef707, 0x9772d7b5, 0x275bb788, 0xf72117cf,
- 0x470877f2, 0x1408c710, 0xa421a72d, 0x745b076a, 0xc4726757,
- 0xd4ae47e5, 0x648727d8, 0xb4fd879f, 0x04d4e7a2, 0xd543b720,
- 0x656ad71d, 0xb510775a, 0x05391767, 0x15e537d5, 0xa5cc57e8,
- 0x75b6f7af, 0xc59f9792, 0xdbe815e9, 0x6bc175d4, 0xbbbbd593,
- 0x0b92b5ae, 0x1b4e951c, 0xab67f521, 0x7b1d5566, 0xcb34355b,
- 0x1aa365d9, 0xaa8a05e4, 0x7af0a5a3, 0xcad9c59e, 0xda05e52c,
- 0x6a2c8511, 0xba562556, 0x0a7f456b, 0x597ff589, 0xe95695b4,
- 0x392c35f3, 0x890555ce, 0x99d9757c, 0x29f01541, 0xf98ab506,
- 0x49a3d53b, 0x983485b9, 0x281de584, 0xf86745c3, 0x484e25fe,
- 0x5892054c, 0xe8bb6571, 0x38c1c536, 0x88e8a50b, 0xdfc7d428,
- 0x6feeb415, 0xbf941452, 0x0fbd746f, 0x1f6154dd, 0xaf4834e0,
- 0x7f3294a7, 0xcf1bf49a, 0x1e8ca418, 0xaea5c425, 0x7edf6462,
- 0xcef6045f, 0xde2a24ed, 0x6e0344d0, 0xbe79e497, 0x0e5084aa,
- 0x5d503448, 0xed795475, 0x3d03f432, 0x8d2a940f, 0x9df6b4bd,
- 0x2ddfd480, 0xfda574c7, 0x4d8c14fa, 0x9c1b4478, 0x2c322445,
- 0xfc488402, 0x4c61e43f, 0x5cbdc48d, 0xec94a4b0, 0x3cee04f7,
- 0x8cc764ca},
- {0x00000000, 0xa5d35ccb, 0x0ba1c84d, 0xae729486, 0x1642919b,
- 0xb391cd50, 0x1de359d6, 0xb830051d, 0x6d8253ec, 0xc8510f27,
- 0x66239ba1, 0xc3f0c76a, 0x7bc0c277, 0xde139ebc, 0x70610a3a,
- 0xd5b256f1, 0x9b02d603, 0x3ed18ac8, 0x90a31e4e, 0x35704285,
- 0x8d404798, 0x28931b53, 0x86e18fd5, 0x2332d31e, 0xf68085ef,
- 0x5353d924, 0xfd214da2, 0x58f21169, 0xe0c21474, 0x451148bf,
- 0xeb63dc39, 0x4eb080f2, 0x3605ac07, 0x93d6f0cc, 0x3da4644a,
- 0x98773881, 0x20473d9c, 0x85946157, 0x2be6f5d1, 0x8e35a91a,
- 0x5b87ffeb, 0xfe54a320, 0x502637a6, 0xf5f56b6d, 0x4dc56e70,
- 0xe81632bb, 0x4664a63d, 0xe3b7faf6, 0xad077a04, 0x08d426cf,
- 0xa6a6b249, 0x0375ee82, 0xbb45eb9f, 0x1e96b754, 0xb0e423d2,
- 0x15377f19, 0xc08529e8, 0x65567523, 0xcb24e1a5, 0x6ef7bd6e,
- 0xd6c7b873, 0x7314e4b8, 0xdd66703e, 0x78b52cf5, 0x6c0a580f,
- 0xc9d904c4, 0x67ab9042, 0xc278cc89, 0x7a48c994, 0xdf9b955f,
- 0x71e901d9, 0xd43a5d12, 0x01880be3, 0xa45b5728, 0x0a29c3ae,
- 0xaffa9f65, 0x17ca9a78, 0xb219c6b3, 0x1c6b5235, 0xb9b80efe,
- 0xf7088e0c, 0x52dbd2c7, 0xfca94641, 0x597a1a8a, 0xe14a1f97,
- 0x4499435c, 0xeaebd7da, 0x4f388b11, 0x9a8adde0, 0x3f59812b,
- 0x912b15ad, 0x34f84966, 0x8cc84c7b, 0x291b10b0, 0x87698436,
- 0x22bad8fd, 0x5a0ff408, 0xffdca8c3, 0x51ae3c45, 0xf47d608e,
- 0x4c4d6593, 0xe99e3958, 0x47ecadde, 0xe23ff115, 0x378da7e4,
- 0x925efb2f, 0x3c2c6fa9, 0x99ff3362, 0x21cf367f, 0x841c6ab4,
- 0x2a6efe32, 0x8fbda2f9, 0xc10d220b, 0x64de7ec0, 0xcaacea46,
- 0x6f7fb68d, 0xd74fb390, 0x729cef5b, 0xdcee7bdd, 0x793d2716,
- 0xac8f71e7, 0x095c2d2c, 0xa72eb9aa, 0x02fde561, 0xbacde07c,
- 0x1f1ebcb7, 0xb16c2831, 0x14bf74fa, 0xd814b01e, 0x7dc7ecd5,
- 0xd3b57853, 0x76662498, 0xce562185, 0x6b857d4e, 0xc5f7e9c8,
- 0x6024b503, 0xb596e3f2, 0x1045bf39, 0xbe372bbf, 0x1be47774,
- 0xa3d47269, 0x06072ea2, 0xa875ba24, 0x0da6e6ef, 0x4316661d,
- 0xe6c53ad6, 0x48b7ae50, 0xed64f29b, 0x5554f786, 0xf087ab4d,
- 0x5ef53fcb, 0xfb266300, 0x2e9435f1, 0x8b47693a, 0x2535fdbc,
- 0x80e6a177, 0x38d6a46a, 0x9d05f8a1, 0x33776c27, 0x96a430ec,
- 0xee111c19, 0x4bc240d2, 0xe5b0d454, 0x4063889f, 0xf8538d82,
- 0x5d80d149, 0xf3f245cf, 0x56211904, 0x83934ff5, 0x2640133e,
- 0x883287b8, 0x2de1db73, 0x95d1de6e, 0x300282a5, 0x9e701623,
- 0x3ba34ae8, 0x7513ca1a, 0xd0c096d1, 0x7eb20257, 0xdb615e9c,
- 0x63515b81, 0xc682074a, 0x68f093cc, 0xcd23cf07, 0x189199f6,
- 0xbd42c53d, 0x133051bb, 0xb6e30d70, 0x0ed3086d, 0xab0054a6,
- 0x0572c020, 0xa0a19ceb, 0xb41ee811, 0x11cdb4da, 0xbfbf205c,
- 0x1a6c7c97, 0xa25c798a, 0x078f2541, 0xa9fdb1c7, 0x0c2eed0c,
- 0xd99cbbfd, 0x7c4fe736, 0xd23d73b0, 0x77ee2f7b, 0xcfde2a66,
- 0x6a0d76ad, 0xc47fe22b, 0x61acbee0, 0x2f1c3e12, 0x8acf62d9,
- 0x24bdf65f, 0x816eaa94, 0x395eaf89, 0x9c8df342, 0x32ff67c4,
- 0x972c3b0f, 0x429e6dfe, 0xe74d3135, 0x493fa5b3, 0xececf978,
- 0x54dcfc65, 0xf10fa0ae, 0x5f7d3428, 0xfaae68e3, 0x821b4416,
- 0x27c818dd, 0x89ba8c5b, 0x2c69d090, 0x9459d58d, 0x318a8946,
- 0x9ff81dc0, 0x3a2b410b, 0xef9917fa, 0x4a4a4b31, 0xe438dfb7,
- 0x41eb837c, 0xf9db8661, 0x5c08daaa, 0xf27a4e2c, 0x57a912e7,
- 0x19199215, 0xbccacede, 0x12b85a58, 0xb76b0693, 0x0f5b038e,
- 0xaa885f45, 0x04facbc3, 0xa1299708, 0x749bc1f9, 0xd1489d32,
- 0x7f3a09b4, 0xdae9557f, 0x62d95062, 0xc70a0ca9, 0x6978982f,
- 0xccabc4e4},
- {0x00000000, 0xb40b77a6, 0x29119f97, 0x9d1ae831, 0x13244ff4,
- 0xa72f3852, 0x3a35d063, 0x8e3ea7c5, 0x674eef33, 0xd3459895,
- 0x4e5f70a4, 0xfa540702, 0x746aa0c7, 0xc061d761, 0x5d7b3f50,
- 0xe97048f6, 0xce9cde67, 0x7a97a9c1, 0xe78d41f0, 0x53863656,
- 0xddb89193, 0x69b3e635, 0xf4a90e04, 0x40a279a2, 0xa9d23154,
- 0x1dd946f2, 0x80c3aec3, 0x34c8d965, 0xbaf67ea0, 0x0efd0906,
- 0x93e7e137, 0x27ec9691, 0x9c39bdcf, 0x2832ca69, 0xb5282258,
- 0x012355fe, 0x8f1df23b, 0x3b16859d, 0xa60c6dac, 0x12071a0a,
- 0xfb7752fc, 0x4f7c255a, 0xd266cd6b, 0x666dbacd, 0xe8531d08,
- 0x5c586aae, 0xc142829f, 0x7549f539, 0x52a563a8, 0xe6ae140e,
- 0x7bb4fc3f, 0xcfbf8b99, 0x41812c5c, 0xf58a5bfa, 0x6890b3cb,
- 0xdc9bc46d, 0x35eb8c9b, 0x81e0fb3d, 0x1cfa130c, 0xa8f164aa,
- 0x26cfc36f, 0x92c4b4c9, 0x0fde5cf8, 0xbbd52b5e, 0x79750b44,
- 0xcd7e7ce2, 0x506494d3, 0xe46fe375, 0x6a5144b0, 0xde5a3316,
- 0x4340db27, 0xf74bac81, 0x1e3be477, 0xaa3093d1, 0x372a7be0,
- 0x83210c46, 0x0d1fab83, 0xb914dc25, 0x240e3414, 0x900543b2,
- 0xb7e9d523, 0x03e2a285, 0x9ef84ab4, 0x2af33d12, 0xa4cd9ad7,
- 0x10c6ed71, 0x8ddc0540, 0x39d772e6, 0xd0a73a10, 0x64ac4db6,
- 0xf9b6a587, 0x4dbdd221, 0xc38375e4, 0x77880242, 0xea92ea73,
- 0x5e999dd5, 0xe54cb68b, 0x5147c12d, 0xcc5d291c, 0x78565eba,
- 0xf668f97f, 0x42638ed9, 0xdf7966e8, 0x6b72114e, 0x820259b8,
- 0x36092e1e, 0xab13c62f, 0x1f18b189, 0x9126164c, 0x252d61ea,
- 0xb83789db, 0x0c3cfe7d, 0x2bd068ec, 0x9fdb1f4a, 0x02c1f77b,
- 0xb6ca80dd, 0x38f42718, 0x8cff50be, 0x11e5b88f, 0xa5eecf29,
- 0x4c9e87df, 0xf895f079, 0x658f1848, 0xd1846fee, 0x5fbac82b,
- 0xebb1bf8d, 0x76ab57bc, 0xc2a0201a, 0xf2ea1688, 0x46e1612e,
- 0xdbfb891f, 0x6ff0feb9, 0xe1ce597c, 0x55c52eda, 0xc8dfc6eb,
- 0x7cd4b14d, 0x95a4f9bb, 0x21af8e1d, 0xbcb5662c, 0x08be118a,
- 0x8680b64f, 0x328bc1e9, 0xaf9129d8, 0x1b9a5e7e, 0x3c76c8ef,
- 0x887dbf49, 0x15675778, 0xa16c20de, 0x2f52871b, 0x9b59f0bd,
- 0x0643188c, 0xb2486f2a, 0x5b3827dc, 0xef33507a, 0x7229b84b,
- 0xc622cfed, 0x481c6828, 0xfc171f8e, 0x610df7bf, 0xd5068019,
- 0x6ed3ab47, 0xdad8dce1, 0x47c234d0, 0xf3c94376, 0x7df7e4b3,
- 0xc9fc9315, 0x54e67b24, 0xe0ed0c82, 0x099d4474, 0xbd9633d2,
- 0x208cdbe3, 0x9487ac45, 0x1ab90b80, 0xaeb27c26, 0x33a89417,
- 0x87a3e3b1, 0xa04f7520, 0x14440286, 0x895eeab7, 0x3d559d11,
- 0xb36b3ad4, 0x07604d72, 0x9a7aa543, 0x2e71d2e5, 0xc7019a13,
- 0x730aedb5, 0xee100584, 0x5a1b7222, 0xd425d5e7, 0x602ea241,
- 0xfd344a70, 0x493f3dd6, 0x8b9f1dcc, 0x3f946a6a, 0xa28e825b,
- 0x1685f5fd, 0x98bb5238, 0x2cb0259e, 0xb1aacdaf, 0x05a1ba09,
- 0xecd1f2ff, 0x58da8559, 0xc5c06d68, 0x71cb1ace, 0xfff5bd0b,
- 0x4bfecaad, 0xd6e4229c, 0x62ef553a, 0x4503c3ab, 0xf108b40d,
- 0x6c125c3c, 0xd8192b9a, 0x56278c5f, 0xe22cfbf9, 0x7f3613c8,
- 0xcb3d646e, 0x224d2c98, 0x96465b3e, 0x0b5cb30f, 0xbf57c4a9,
- 0x3169636c, 0x856214ca, 0x1878fcfb, 0xac738b5d, 0x17a6a003,
- 0xa3add7a5, 0x3eb73f94, 0x8abc4832, 0x0482eff7, 0xb0899851,
- 0x2d937060, 0x999807c6, 0x70e84f30, 0xc4e33896, 0x59f9d0a7,
- 0xedf2a701, 0x63cc00c4, 0xd7c77762, 0x4add9f53, 0xfed6e8f5,
- 0xd93a7e64, 0x6d3109c2, 0xf02be1f3, 0x44209655, 0xca1e3190,
- 0x7e154636, 0xe30fae07, 0x5704d9a1, 0xbe749157, 0x0a7fe6f1,
- 0x97650ec0, 0x236e7966, 0xad50dea3, 0x195ba905, 0x84414134,
- 0x304a3692},
- {0x00000000, 0x9e00aacc, 0x7d072542, 0xe3078f8e, 0xfa0e4a84,
- 0x640ee048, 0x87096fc6, 0x1909c50a, 0xb51be5d3, 0x2b1b4f1f,
- 0xc81cc091, 0x561c6a5d, 0x4f15af57, 0xd115059b, 0x32128a15,
- 0xac1220d9, 0x2b31bb7c, 0xb53111b0, 0x56369e3e, 0xc83634f2,
- 0xd13ff1f8, 0x4f3f5b34, 0xac38d4ba, 0x32387e76, 0x9e2a5eaf,
- 0x002af463, 0xe32d7bed, 0x7d2dd121, 0x6424142b, 0xfa24bee7,
- 0x19233169, 0x87239ba5, 0x566276f9, 0xc862dc35, 0x2b6553bb,
- 0xb565f977, 0xac6c3c7d, 0x326c96b1, 0xd16b193f, 0x4f6bb3f3,
- 0xe379932a, 0x7d7939e6, 0x9e7eb668, 0x007e1ca4, 0x1977d9ae,
- 0x87777362, 0x6470fcec, 0xfa705620, 0x7d53cd85, 0xe3536749,
- 0x0054e8c7, 0x9e54420b, 0x875d8701, 0x195d2dcd, 0xfa5aa243,
- 0x645a088f, 0xc8482856, 0x5648829a, 0xb54f0d14, 0x2b4fa7d8,
- 0x324662d2, 0xac46c81e, 0x4f414790, 0xd141ed5c, 0xedc29d29,
- 0x73c237e5, 0x90c5b86b, 0x0ec512a7, 0x17ccd7ad, 0x89cc7d61,
- 0x6acbf2ef, 0xf4cb5823, 0x58d978fa, 0xc6d9d236, 0x25de5db8,
- 0xbbdef774, 0xa2d7327e, 0x3cd798b2, 0xdfd0173c, 0x41d0bdf0,
- 0xc6f32655, 0x58f38c99, 0xbbf40317, 0x25f4a9db, 0x3cfd6cd1,
- 0xa2fdc61d, 0x41fa4993, 0xdffae35f, 0x73e8c386, 0xede8694a,
- 0x0eefe6c4, 0x90ef4c08, 0x89e68902, 0x17e623ce, 0xf4e1ac40,
- 0x6ae1068c, 0xbba0ebd0, 0x25a0411c, 0xc6a7ce92, 0x58a7645e,
- 0x41aea154, 0xdfae0b98, 0x3ca98416, 0xa2a92eda, 0x0ebb0e03,
- 0x90bba4cf, 0x73bc2b41, 0xedbc818d, 0xf4b54487, 0x6ab5ee4b,
- 0x89b261c5, 0x17b2cb09, 0x909150ac, 0x0e91fa60, 0xed9675ee,
- 0x7396df22, 0x6a9f1a28, 0xf49fb0e4, 0x17983f6a, 0x899895a6,
- 0x258ab57f, 0xbb8a1fb3, 0x588d903d, 0xc68d3af1, 0xdf84fffb,
- 0x41845537, 0xa283dab9, 0x3c837075, 0xda853b53, 0x4485919f,
- 0xa7821e11, 0x3982b4dd, 0x208b71d7, 0xbe8bdb1b, 0x5d8c5495,
- 0xc38cfe59, 0x6f9ede80, 0xf19e744c, 0x1299fbc2, 0x8c99510e,
- 0x95909404, 0x0b903ec8, 0xe897b146, 0x76971b8a, 0xf1b4802f,
- 0x6fb42ae3, 0x8cb3a56d, 0x12b30fa1, 0x0bbacaab, 0x95ba6067,
- 0x76bdefe9, 0xe8bd4525, 0x44af65fc, 0xdaafcf30, 0x39a840be,
- 0xa7a8ea72, 0xbea12f78, 0x20a185b4, 0xc3a60a3a, 0x5da6a0f6,
- 0x8ce74daa, 0x12e7e766, 0xf1e068e8, 0x6fe0c224, 0x76e9072e,
- 0xe8e9ade2, 0x0bee226c, 0x95ee88a0, 0x39fca879, 0xa7fc02b5,
- 0x44fb8d3b, 0xdafb27f7, 0xc3f2e2fd, 0x5df24831, 0xbef5c7bf,
- 0x20f56d73, 0xa7d6f6d6, 0x39d65c1a, 0xdad1d394, 0x44d17958,
- 0x5dd8bc52, 0xc3d8169e, 0x20df9910, 0xbedf33dc, 0x12cd1305,
- 0x8ccdb9c9, 0x6fca3647, 0xf1ca9c8b, 0xe8c35981, 0x76c3f34d,
- 0x95c47cc3, 0x0bc4d60f, 0x3747a67a, 0xa9470cb6, 0x4a408338,
- 0xd44029f4, 0xcd49ecfe, 0x53494632, 0xb04ec9bc, 0x2e4e6370,
- 0x825c43a9, 0x1c5ce965, 0xff5b66eb, 0x615bcc27, 0x7852092d,
- 0xe652a3e1, 0x05552c6f, 0x9b5586a3, 0x1c761d06, 0x8276b7ca,
- 0x61713844, 0xff719288, 0xe6785782, 0x7878fd4e, 0x9b7f72c0,
- 0x057fd80c, 0xa96df8d5, 0x376d5219, 0xd46add97, 0x4a6a775b,
- 0x5363b251, 0xcd63189d, 0x2e649713, 0xb0643ddf, 0x6125d083,
- 0xff257a4f, 0x1c22f5c1, 0x82225f0d, 0x9b2b9a07, 0x052b30cb,
- 0xe62cbf45, 0x782c1589, 0xd43e3550, 0x4a3e9f9c, 0xa9391012,
- 0x3739bade, 0x2e307fd4, 0xb030d518, 0x53375a96, 0xcd37f05a,
- 0x4a146bff, 0xd414c133, 0x37134ebd, 0xa913e471, 0xb01a217b,
- 0x2e1a8bb7, 0xcd1d0439, 0x531daef5, 0xff0f8e2c, 0x610f24e0,
- 0x8208ab6e, 0x1c0801a2, 0x0501c4a8, 0x9b016e64, 0x7806e1ea,
- 0xe6064b26}};
-
-#endif
-
-#endif
-
-#if N == 3
-
-#if W == 8
-
-local const z_crc_t FAR crc_braid_table[][256] = {
- {0x00000000, 0x81256527, 0xd93bcc0f, 0x581ea928, 0x69069e5f,
- 0xe823fb78, 0xb03d5250, 0x31183777, 0xd20d3cbe, 0x53285999,
- 0x0b36f0b1, 0x8a139596, 0xbb0ba2e1, 0x3a2ec7c6, 0x62306eee,
- 0xe3150bc9, 0x7f6b7f3d, 0xfe4e1a1a, 0xa650b332, 0x2775d615,
- 0x166de162, 0x97488445, 0xcf562d6d, 0x4e73484a, 0xad664383,
- 0x2c4326a4, 0x745d8f8c, 0xf578eaab, 0xc460dddc, 0x4545b8fb,
- 0x1d5b11d3, 0x9c7e74f4, 0xfed6fe7a, 0x7ff39b5d, 0x27ed3275,
- 0xa6c85752, 0x97d06025, 0x16f50502, 0x4eebac2a, 0xcfcec90d,
- 0x2cdbc2c4, 0xadfea7e3, 0xf5e00ecb, 0x74c56bec, 0x45dd5c9b,
- 0xc4f839bc, 0x9ce69094, 0x1dc3f5b3, 0x81bd8147, 0x0098e460,
- 0x58864d48, 0xd9a3286f, 0xe8bb1f18, 0x699e7a3f, 0x3180d317,
- 0xb0a5b630, 0x53b0bdf9, 0xd295d8de, 0x8a8b71f6, 0x0bae14d1,
- 0x3ab623a6, 0xbb934681, 0xe38defa9, 0x62a88a8e, 0x26dcfab5,
- 0xa7f99f92, 0xffe736ba, 0x7ec2539d, 0x4fda64ea, 0xceff01cd,
- 0x96e1a8e5, 0x17c4cdc2, 0xf4d1c60b, 0x75f4a32c, 0x2dea0a04,
- 0xaccf6f23, 0x9dd75854, 0x1cf23d73, 0x44ec945b, 0xc5c9f17c,
- 0x59b78588, 0xd892e0af, 0x808c4987, 0x01a92ca0, 0x30b11bd7,
- 0xb1947ef0, 0xe98ad7d8, 0x68afb2ff, 0x8bbab936, 0x0a9fdc11,
- 0x52817539, 0xd3a4101e, 0xe2bc2769, 0x6399424e, 0x3b87eb66,
- 0xbaa28e41, 0xd80a04cf, 0x592f61e8, 0x0131c8c0, 0x8014ade7,
- 0xb10c9a90, 0x3029ffb7, 0x6837569f, 0xe91233b8, 0x0a073871,
- 0x8b225d56, 0xd33cf47e, 0x52199159, 0x6301a62e, 0xe224c309,
- 0xba3a6a21, 0x3b1f0f06, 0xa7617bf2, 0x26441ed5, 0x7e5ab7fd,
- 0xff7fd2da, 0xce67e5ad, 0x4f42808a, 0x175c29a2, 0x96794c85,
- 0x756c474c, 0xf449226b, 0xac578b43, 0x2d72ee64, 0x1c6ad913,
- 0x9d4fbc34, 0xc551151c, 0x4474703b, 0x4db9f56a, 0xcc9c904d,
- 0x94823965, 0x15a75c42, 0x24bf6b35, 0xa59a0e12, 0xfd84a73a,
- 0x7ca1c21d, 0x9fb4c9d4, 0x1e91acf3, 0x468f05db, 0xc7aa60fc,
- 0xf6b2578b, 0x779732ac, 0x2f899b84, 0xaeacfea3, 0x32d28a57,
- 0xb3f7ef70, 0xebe94658, 0x6acc237f, 0x5bd41408, 0xdaf1712f,
- 0x82efd807, 0x03cabd20, 0xe0dfb6e9, 0x61fad3ce, 0x39e47ae6,
- 0xb8c11fc1, 0x89d928b6, 0x08fc4d91, 0x50e2e4b9, 0xd1c7819e,
- 0xb36f0b10, 0x324a6e37, 0x6a54c71f, 0xeb71a238, 0xda69954f,
- 0x5b4cf068, 0x03525940, 0x82773c67, 0x616237ae, 0xe0475289,
- 0xb859fba1, 0x397c9e86, 0x0864a9f1, 0x8941ccd6, 0xd15f65fe,
- 0x507a00d9, 0xcc04742d, 0x4d21110a, 0x153fb822, 0x941add05,
- 0xa502ea72, 0x24278f55, 0x7c39267d, 0xfd1c435a, 0x1e094893,
- 0x9f2c2db4, 0xc732849c, 0x4617e1bb, 0x770fd6cc, 0xf62ab3eb,
- 0xae341ac3, 0x2f117fe4, 0x6b650fdf, 0xea406af8, 0xb25ec3d0,
- 0x337ba6f7, 0x02639180, 0x8346f4a7, 0xdb585d8f, 0x5a7d38a8,
- 0xb9683361, 0x384d5646, 0x6053ff6e, 0xe1769a49, 0xd06ead3e,
- 0x514bc819, 0x09556131, 0x88700416, 0x140e70e2, 0x952b15c5,
- 0xcd35bced, 0x4c10d9ca, 0x7d08eebd, 0xfc2d8b9a, 0xa43322b2,
- 0x25164795, 0xc6034c5c, 0x4726297b, 0x1f388053, 0x9e1de574,
- 0xaf05d203, 0x2e20b724, 0x763e1e0c, 0xf71b7b2b, 0x95b3f1a5,
- 0x14969482, 0x4c883daa, 0xcdad588d, 0xfcb56ffa, 0x7d900add,
- 0x258ea3f5, 0xa4abc6d2, 0x47becd1b, 0xc69ba83c, 0x9e850114,
- 0x1fa06433, 0x2eb85344, 0xaf9d3663, 0xf7839f4b, 0x76a6fa6c,
- 0xead88e98, 0x6bfdebbf, 0x33e34297, 0xb2c627b0, 0x83de10c7,
- 0x02fb75e0, 0x5ae5dcc8, 0xdbc0b9ef, 0x38d5b226, 0xb9f0d701,
- 0xe1ee7e29, 0x60cb1b0e, 0x51d32c79, 0xd0f6495e, 0x88e8e076,
- 0x09cd8551},
- {0x00000000, 0x9b73ead4, 0xed96d3e9, 0x76e5393d, 0x005ca193,
- 0x9b2f4b47, 0xedca727a, 0x76b998ae, 0x00b94326, 0x9bcaa9f2,
- 0xed2f90cf, 0x765c7a1b, 0x00e5e2b5, 0x9b960861, 0xed73315c,
- 0x7600db88, 0x0172864c, 0x9a016c98, 0xece455a5, 0x7797bf71,
- 0x012e27df, 0x9a5dcd0b, 0xecb8f436, 0x77cb1ee2, 0x01cbc56a,
- 0x9ab82fbe, 0xec5d1683, 0x772efc57, 0x019764f9, 0x9ae48e2d,
- 0xec01b710, 0x77725dc4, 0x02e50c98, 0x9996e64c, 0xef73df71,
- 0x740035a5, 0x02b9ad0b, 0x99ca47df, 0xef2f7ee2, 0x745c9436,
- 0x025c4fbe, 0x992fa56a, 0xefca9c57, 0x74b97683, 0x0200ee2d,
- 0x997304f9, 0xef963dc4, 0x74e5d710, 0x03978ad4, 0x98e46000,
- 0xee01593d, 0x7572b3e9, 0x03cb2b47, 0x98b8c193, 0xee5df8ae,
- 0x752e127a, 0x032ec9f2, 0x985d2326, 0xeeb81a1b, 0x75cbf0cf,
- 0x03726861, 0x980182b5, 0xeee4bb88, 0x7597515c, 0x05ca1930,
- 0x9eb9f3e4, 0xe85ccad9, 0x732f200d, 0x0596b8a3, 0x9ee55277,
- 0xe8006b4a, 0x7373819e, 0x05735a16, 0x9e00b0c2, 0xe8e589ff,
- 0x7396632b, 0x052ffb85, 0x9e5c1151, 0xe8b9286c, 0x73cac2b8,
- 0x04b89f7c, 0x9fcb75a8, 0xe92e4c95, 0x725da641, 0x04e43eef,
- 0x9f97d43b, 0xe972ed06, 0x720107d2, 0x0401dc5a, 0x9f72368e,
- 0xe9970fb3, 0x72e4e567, 0x045d7dc9, 0x9f2e971d, 0xe9cbae20,
- 0x72b844f4, 0x072f15a8, 0x9c5cff7c, 0xeab9c641, 0x71ca2c95,
- 0x0773b43b, 0x9c005eef, 0xeae567d2, 0x71968d06, 0x0796568e,
- 0x9ce5bc5a, 0xea008567, 0x71736fb3, 0x07caf71d, 0x9cb91dc9,
- 0xea5c24f4, 0x712fce20, 0x065d93e4, 0x9d2e7930, 0xebcb400d,
- 0x70b8aad9, 0x06013277, 0x9d72d8a3, 0xeb97e19e, 0x70e40b4a,
- 0x06e4d0c2, 0x9d973a16, 0xeb72032b, 0x7001e9ff, 0x06b87151,
- 0x9dcb9b85, 0xeb2ea2b8, 0x705d486c, 0x0b943260, 0x90e7d8b4,
- 0xe602e189, 0x7d710b5d, 0x0bc893f3, 0x90bb7927, 0xe65e401a,
- 0x7d2daace, 0x0b2d7146, 0x905e9b92, 0xe6bba2af, 0x7dc8487b,
- 0x0b71d0d5, 0x90023a01, 0xe6e7033c, 0x7d94e9e8, 0x0ae6b42c,
- 0x91955ef8, 0xe77067c5, 0x7c038d11, 0x0aba15bf, 0x91c9ff6b,
- 0xe72cc656, 0x7c5f2c82, 0x0a5ff70a, 0x912c1dde, 0xe7c924e3,
- 0x7cbace37, 0x0a035699, 0x9170bc4d, 0xe7958570, 0x7ce66fa4,
- 0x09713ef8, 0x9202d42c, 0xe4e7ed11, 0x7f9407c5, 0x092d9f6b,
- 0x925e75bf, 0xe4bb4c82, 0x7fc8a656, 0x09c87dde, 0x92bb970a,
- 0xe45eae37, 0x7f2d44e3, 0x0994dc4d, 0x92e73699, 0xe4020fa4,
- 0x7f71e570, 0x0803b8b4, 0x93705260, 0xe5956b5d, 0x7ee68189,
- 0x085f1927, 0x932cf3f3, 0xe5c9cace, 0x7eba201a, 0x08bafb92,
- 0x93c91146, 0xe52c287b, 0x7e5fc2af, 0x08e65a01, 0x9395b0d5,
- 0xe57089e8, 0x7e03633c, 0x0e5e2b50, 0x952dc184, 0xe3c8f8b9,
- 0x78bb126d, 0x0e028ac3, 0x95716017, 0xe394592a, 0x78e7b3fe,
- 0x0ee76876, 0x959482a2, 0xe371bb9f, 0x7802514b, 0x0ebbc9e5,
- 0x95c82331, 0xe32d1a0c, 0x785ef0d8, 0x0f2cad1c, 0x945f47c8,
- 0xe2ba7ef5, 0x79c99421, 0x0f700c8f, 0x9403e65b, 0xe2e6df66,
- 0x799535b2, 0x0f95ee3a, 0x94e604ee, 0xe2033dd3, 0x7970d707,
- 0x0fc94fa9, 0x94baa57d, 0xe25f9c40, 0x792c7694, 0x0cbb27c8,
- 0x97c8cd1c, 0xe12df421, 0x7a5e1ef5, 0x0ce7865b, 0x97946c8f,
- 0xe17155b2, 0x7a02bf66, 0x0c0264ee, 0x97718e3a, 0xe194b707,
- 0x7ae75dd3, 0x0c5ec57d, 0x972d2fa9, 0xe1c81694, 0x7abbfc40,
- 0x0dc9a184, 0x96ba4b50, 0xe05f726d, 0x7b2c98b9, 0x0d950017,
- 0x96e6eac3, 0xe003d3fe, 0x7b70392a, 0x0d70e2a2, 0x96030876,
- 0xe0e6314b, 0x7b95db9f, 0x0d2c4331, 0x965fa9e5, 0xe0ba90d8,
- 0x7bc97a0c},
- {0x00000000, 0x172864c0, 0x2e50c980, 0x3978ad40, 0x5ca19300,
- 0x4b89f7c0, 0x72f15a80, 0x65d93e40, 0xb9432600, 0xae6b42c0,
- 0x9713ef80, 0x803b8b40, 0xe5e2b500, 0xf2cad1c0, 0xcbb27c80,
- 0xdc9a1840, 0xa9f74a41, 0xbedf2e81, 0x87a783c1, 0x908fe701,
- 0xf556d941, 0xe27ebd81, 0xdb0610c1, 0xcc2e7401, 0x10b46c41,
- 0x079c0881, 0x3ee4a5c1, 0x29ccc101, 0x4c15ff41, 0x5b3d9b81,
- 0x624536c1, 0x756d5201, 0x889f92c3, 0x9fb7f603, 0xa6cf5b43,
- 0xb1e73f83, 0xd43e01c3, 0xc3166503, 0xfa6ec843, 0xed46ac83,
- 0x31dcb4c3, 0x26f4d003, 0x1f8c7d43, 0x08a41983, 0x6d7d27c3,
- 0x7a554303, 0x432dee43, 0x54058a83, 0x2168d882, 0x3640bc42,
- 0x0f381102, 0x181075c2, 0x7dc94b82, 0x6ae12f42, 0x53998202,
- 0x44b1e6c2, 0x982bfe82, 0x8f039a42, 0xb67b3702, 0xa15353c2,
- 0xc48a6d82, 0xd3a20942, 0xeadaa402, 0xfdf2c0c2, 0xca4e23c7,
- 0xdd664707, 0xe41eea47, 0xf3368e87, 0x96efb0c7, 0x81c7d407,
- 0xb8bf7947, 0xaf971d87, 0x730d05c7, 0x64256107, 0x5d5dcc47,
- 0x4a75a887, 0x2fac96c7, 0x3884f207, 0x01fc5f47, 0x16d43b87,
- 0x63b96986, 0x74910d46, 0x4de9a006, 0x5ac1c4c6, 0x3f18fa86,
- 0x28309e46, 0x11483306, 0x066057c6, 0xdafa4f86, 0xcdd22b46,
- 0xf4aa8606, 0xe382e2c6, 0x865bdc86, 0x9173b846, 0xa80b1506,
- 0xbf2371c6, 0x42d1b104, 0x55f9d5c4, 0x6c817884, 0x7ba91c44,
- 0x1e702204, 0x095846c4, 0x3020eb84, 0x27088f44, 0xfb929704,
- 0xecbaf3c4, 0xd5c25e84, 0xc2ea3a44, 0xa7330404, 0xb01b60c4,
- 0x8963cd84, 0x9e4ba944, 0xeb26fb45, 0xfc0e9f85, 0xc57632c5,
- 0xd25e5605, 0xb7876845, 0xa0af0c85, 0x99d7a1c5, 0x8effc505,
- 0x5265dd45, 0x454db985, 0x7c3514c5, 0x6b1d7005, 0x0ec44e45,
- 0x19ec2a85, 0x209487c5, 0x37bce305, 0x4fed41cf, 0x58c5250f,
- 0x61bd884f, 0x7695ec8f, 0x134cd2cf, 0x0464b60f, 0x3d1c1b4f,
- 0x2a347f8f, 0xf6ae67cf, 0xe186030f, 0xd8feae4f, 0xcfd6ca8f,
- 0xaa0ff4cf, 0xbd27900f, 0x845f3d4f, 0x9377598f, 0xe61a0b8e,
- 0xf1326f4e, 0xc84ac20e, 0xdf62a6ce, 0xbabb988e, 0xad93fc4e,
- 0x94eb510e, 0x83c335ce, 0x5f592d8e, 0x4871494e, 0x7109e40e,
- 0x662180ce, 0x03f8be8e, 0x14d0da4e, 0x2da8770e, 0x3a8013ce,
- 0xc772d30c, 0xd05ab7cc, 0xe9221a8c, 0xfe0a7e4c, 0x9bd3400c,
- 0x8cfb24cc, 0xb583898c, 0xa2abed4c, 0x7e31f50c, 0x691991cc,
- 0x50613c8c, 0x4749584c, 0x2290660c, 0x35b802cc, 0x0cc0af8c,
- 0x1be8cb4c, 0x6e85994d, 0x79adfd8d, 0x40d550cd, 0x57fd340d,
- 0x32240a4d, 0x250c6e8d, 0x1c74c3cd, 0x0b5ca70d, 0xd7c6bf4d,
- 0xc0eedb8d, 0xf99676cd, 0xeebe120d, 0x8b672c4d, 0x9c4f488d,
- 0xa537e5cd, 0xb21f810d, 0x85a36208, 0x928b06c8, 0xabf3ab88,
- 0xbcdbcf48, 0xd902f108, 0xce2a95c8, 0xf7523888, 0xe07a5c48,
- 0x3ce04408, 0x2bc820c8, 0x12b08d88, 0x0598e948, 0x6041d708,
- 0x7769b3c8, 0x4e111e88, 0x59397a48, 0x2c542849, 0x3b7c4c89,
- 0x0204e1c9, 0x152c8509, 0x70f5bb49, 0x67dddf89, 0x5ea572c9,
- 0x498d1609, 0x95170e49, 0x823f6a89, 0xbb47c7c9, 0xac6fa309,
- 0xc9b69d49, 0xde9ef989, 0xe7e654c9, 0xf0ce3009, 0x0d3cf0cb,
- 0x1a14940b, 0x236c394b, 0x34445d8b, 0x519d63cb, 0x46b5070b,
- 0x7fcdaa4b, 0x68e5ce8b, 0xb47fd6cb, 0xa357b20b, 0x9a2f1f4b,
- 0x8d077b8b, 0xe8de45cb, 0xfff6210b, 0xc68e8c4b, 0xd1a6e88b,
- 0xa4cbba8a, 0xb3e3de4a, 0x8a9b730a, 0x9db317ca, 0xf86a298a,
- 0xef424d4a, 0xd63ae00a, 0xc11284ca, 0x1d889c8a, 0x0aa0f84a,
- 0x33d8550a, 0x24f031ca, 0x41290f8a, 0x56016b4a, 0x6f79c60a,
- 0x7851a2ca},
- {0x00000000, 0x9fda839e, 0xe4c4017d, 0x7b1e82e3, 0x12f904bb,
- 0x8d238725, 0xf63d05c6, 0x69e78658, 0x25f20976, 0xba288ae8,
- 0xc136080b, 0x5eec8b95, 0x370b0dcd, 0xa8d18e53, 0xd3cf0cb0,
- 0x4c158f2e, 0x4be412ec, 0xd43e9172, 0xaf201391, 0x30fa900f,
- 0x591d1657, 0xc6c795c9, 0xbdd9172a, 0x220394b4, 0x6e161b9a,
- 0xf1cc9804, 0x8ad21ae7, 0x15089979, 0x7cef1f21, 0xe3359cbf,
- 0x982b1e5c, 0x07f19dc2, 0x97c825d8, 0x0812a646, 0x730c24a5,
- 0xecd6a73b, 0x85312163, 0x1aeba2fd, 0x61f5201e, 0xfe2fa380,
- 0xb23a2cae, 0x2de0af30, 0x56fe2dd3, 0xc924ae4d, 0xa0c32815,
- 0x3f19ab8b, 0x44072968, 0xdbddaaf6, 0xdc2c3734, 0x43f6b4aa,
- 0x38e83649, 0xa732b5d7, 0xced5338f, 0x510fb011, 0x2a1132f2,
- 0xb5cbb16c, 0xf9de3e42, 0x6604bddc, 0x1d1a3f3f, 0x82c0bca1,
- 0xeb273af9, 0x74fdb967, 0x0fe33b84, 0x9039b81a, 0xf4e14df1,
- 0x6b3bce6f, 0x10254c8c, 0x8fffcf12, 0xe618494a, 0x79c2cad4,
- 0x02dc4837, 0x9d06cba9, 0xd1134487, 0x4ec9c719, 0x35d745fa,
- 0xaa0dc664, 0xc3ea403c, 0x5c30c3a2, 0x272e4141, 0xb8f4c2df,
- 0xbf055f1d, 0x20dfdc83, 0x5bc15e60, 0xc41bddfe, 0xadfc5ba6,
- 0x3226d838, 0x49385adb, 0xd6e2d945, 0x9af7566b, 0x052dd5f5,
- 0x7e335716, 0xe1e9d488, 0x880e52d0, 0x17d4d14e, 0x6cca53ad,
- 0xf310d033, 0x63296829, 0xfcf3ebb7, 0x87ed6954, 0x1837eaca,
- 0x71d06c92, 0xee0aef0c, 0x95146def, 0x0aceee71, 0x46db615f,
- 0xd901e2c1, 0xa21f6022, 0x3dc5e3bc, 0x542265e4, 0xcbf8e67a,
- 0xb0e66499, 0x2f3ce707, 0x28cd7ac5, 0xb717f95b, 0xcc097bb8,
- 0x53d3f826, 0x3a347e7e, 0xa5eefde0, 0xdef07f03, 0x412afc9d,
- 0x0d3f73b3, 0x92e5f02d, 0xe9fb72ce, 0x7621f150, 0x1fc67708,
- 0x801cf496, 0xfb027675, 0x64d8f5eb, 0x32b39da3, 0xad691e3d,
- 0xd6779cde, 0x49ad1f40, 0x204a9918, 0xbf901a86, 0xc48e9865,
- 0x5b541bfb, 0x174194d5, 0x889b174b, 0xf38595a8, 0x6c5f1636,
- 0x05b8906e, 0x9a6213f0, 0xe17c9113, 0x7ea6128d, 0x79578f4f,
- 0xe68d0cd1, 0x9d938e32, 0x02490dac, 0x6bae8bf4, 0xf474086a,
- 0x8f6a8a89, 0x10b00917, 0x5ca58639, 0xc37f05a7, 0xb8618744,
- 0x27bb04da, 0x4e5c8282, 0xd186011c, 0xaa9883ff, 0x35420061,
- 0xa57bb87b, 0x3aa13be5, 0x41bfb906, 0xde653a98, 0xb782bcc0,
- 0x28583f5e, 0x5346bdbd, 0xcc9c3e23, 0x8089b10d, 0x1f533293,
- 0x644db070, 0xfb9733ee, 0x9270b5b6, 0x0daa3628, 0x76b4b4cb,
- 0xe96e3755, 0xee9faa97, 0x71452909, 0x0a5babea, 0x95812874,
- 0xfc66ae2c, 0x63bc2db2, 0x18a2af51, 0x87782ccf, 0xcb6da3e1,
- 0x54b7207f, 0x2fa9a29c, 0xb0732102, 0xd994a75a, 0x464e24c4,
- 0x3d50a627, 0xa28a25b9, 0xc652d052, 0x598853cc, 0x2296d12f,
- 0xbd4c52b1, 0xd4abd4e9, 0x4b715777, 0x306fd594, 0xafb5560a,
- 0xe3a0d924, 0x7c7a5aba, 0x0764d859, 0x98be5bc7, 0xf159dd9f,
- 0x6e835e01, 0x159ddce2, 0x8a475f7c, 0x8db6c2be, 0x126c4120,
- 0x6972c3c3, 0xf6a8405d, 0x9f4fc605, 0x0095459b, 0x7b8bc778,
- 0xe45144e6, 0xa844cbc8, 0x379e4856, 0x4c80cab5, 0xd35a492b,
- 0xbabdcf73, 0x25674ced, 0x5e79ce0e, 0xc1a34d90, 0x519af58a,
- 0xce407614, 0xb55ef4f7, 0x2a847769, 0x4363f131, 0xdcb972af,
- 0xa7a7f04c, 0x387d73d2, 0x7468fcfc, 0xebb27f62, 0x90acfd81,
- 0x0f767e1f, 0x6691f847, 0xf94b7bd9, 0x8255f93a, 0x1d8f7aa4,
- 0x1a7ee766, 0x85a464f8, 0xfebae61b, 0x61606585, 0x0887e3dd,
- 0x975d6043, 0xec43e2a0, 0x7399613e, 0x3f8cee10, 0xa0566d8e,
- 0xdb48ef6d, 0x44926cf3, 0x2d75eaab, 0xb2af6935, 0xc9b1ebd6,
- 0x566b6848},
- {0x00000000, 0x65673b46, 0xcace768c, 0xafa94dca, 0x4eedeb59,
- 0x2b8ad01f, 0x84239dd5, 0xe144a693, 0x9ddbd6b2, 0xf8bcedf4,
- 0x5715a03e, 0x32729b78, 0xd3363deb, 0xb65106ad, 0x19f84b67,
- 0x7c9f7021, 0xe0c6ab25, 0x85a19063, 0x2a08dda9, 0x4f6fe6ef,
- 0xae2b407c, 0xcb4c7b3a, 0x64e536f0, 0x01820db6, 0x7d1d7d97,
- 0x187a46d1, 0xb7d30b1b, 0xd2b4305d, 0x33f096ce, 0x5697ad88,
- 0xf93ee042, 0x9c59db04, 0x1afc500b, 0x7f9b6b4d, 0xd0322687,
- 0xb5551dc1, 0x5411bb52, 0x31768014, 0x9edfcdde, 0xfbb8f698,
- 0x872786b9, 0xe240bdff, 0x4de9f035, 0x288ecb73, 0xc9ca6de0,
- 0xacad56a6, 0x03041b6c, 0x6663202a, 0xfa3afb2e, 0x9f5dc068,
- 0x30f48da2, 0x5593b6e4, 0xb4d71077, 0xd1b02b31, 0x7e1966fb,
- 0x1b7e5dbd, 0x67e12d9c, 0x028616da, 0xad2f5b10, 0xc8486056,
- 0x290cc6c5, 0x4c6bfd83, 0xe3c2b049, 0x86a58b0f, 0x35f8a016,
- 0x509f9b50, 0xff36d69a, 0x9a51eddc, 0x7b154b4f, 0x1e727009,
- 0xb1db3dc3, 0xd4bc0685, 0xa82376a4, 0xcd444de2, 0x62ed0028,
- 0x078a3b6e, 0xe6ce9dfd, 0x83a9a6bb, 0x2c00eb71, 0x4967d037,
- 0xd53e0b33, 0xb0593075, 0x1ff07dbf, 0x7a9746f9, 0x9bd3e06a,
- 0xfeb4db2c, 0x511d96e6, 0x347aada0, 0x48e5dd81, 0x2d82e6c7,
- 0x822bab0d, 0xe74c904b, 0x060836d8, 0x636f0d9e, 0xccc64054,
- 0xa9a17b12, 0x2f04f01d, 0x4a63cb5b, 0xe5ca8691, 0x80adbdd7,
- 0x61e91b44, 0x048e2002, 0xab276dc8, 0xce40568e, 0xb2df26af,
- 0xd7b81de9, 0x78115023, 0x1d766b65, 0xfc32cdf6, 0x9955f6b0,
- 0x36fcbb7a, 0x539b803c, 0xcfc25b38, 0xaaa5607e, 0x050c2db4,
- 0x606b16f2, 0x812fb061, 0xe4488b27, 0x4be1c6ed, 0x2e86fdab,
- 0x52198d8a, 0x377eb6cc, 0x98d7fb06, 0xfdb0c040, 0x1cf466d3,
- 0x79935d95, 0xd63a105f, 0xb35d2b19, 0x6bf1402c, 0x0e967b6a,
- 0xa13f36a0, 0xc4580de6, 0x251cab75, 0x407b9033, 0xefd2ddf9,
- 0x8ab5e6bf, 0xf62a969e, 0x934dadd8, 0x3ce4e012, 0x5983db54,
- 0xb8c77dc7, 0xdda04681, 0x72090b4b, 0x176e300d, 0x8b37eb09,
- 0xee50d04f, 0x41f99d85, 0x249ea6c3, 0xc5da0050, 0xa0bd3b16,
- 0x0f1476dc, 0x6a734d9a, 0x16ec3dbb, 0x738b06fd, 0xdc224b37,
- 0xb9457071, 0x5801d6e2, 0x3d66eda4, 0x92cfa06e, 0xf7a89b28,
- 0x710d1027, 0x146a2b61, 0xbbc366ab, 0xdea45ded, 0x3fe0fb7e,
- 0x5a87c038, 0xf52e8df2, 0x9049b6b4, 0xecd6c695, 0x89b1fdd3,
- 0x2618b019, 0x437f8b5f, 0xa23b2dcc, 0xc75c168a, 0x68f55b40,
- 0x0d926006, 0x91cbbb02, 0xf4ac8044, 0x5b05cd8e, 0x3e62f6c8,
- 0xdf26505b, 0xba416b1d, 0x15e826d7, 0x708f1d91, 0x0c106db0,
- 0x697756f6, 0xc6de1b3c, 0xa3b9207a, 0x42fd86e9, 0x279abdaf,
- 0x8833f065, 0xed54cb23, 0x5e09e03a, 0x3b6edb7c, 0x94c796b6,
- 0xf1a0adf0, 0x10e40b63, 0x75833025, 0xda2a7def, 0xbf4d46a9,
- 0xc3d23688, 0xa6b50dce, 0x091c4004, 0x6c7b7b42, 0x8d3fddd1,
- 0xe858e697, 0x47f1ab5d, 0x2296901b, 0xbecf4b1f, 0xdba87059,
- 0x74013d93, 0x116606d5, 0xf022a046, 0x95459b00, 0x3aecd6ca,
- 0x5f8bed8c, 0x23149dad, 0x4673a6eb, 0xe9daeb21, 0x8cbdd067,
- 0x6df976f4, 0x089e4db2, 0xa7370078, 0xc2503b3e, 0x44f5b031,
- 0x21928b77, 0x8e3bc6bd, 0xeb5cfdfb, 0x0a185b68, 0x6f7f602e,
- 0xc0d62de4, 0xa5b116a2, 0xd92e6683, 0xbc495dc5, 0x13e0100f,
- 0x76872b49, 0x97c38dda, 0xf2a4b69c, 0x5d0dfb56, 0x386ac010,
- 0xa4331b14, 0xc1542052, 0x6efd6d98, 0x0b9a56de, 0xeadef04d,
- 0x8fb9cb0b, 0x201086c1, 0x4577bd87, 0x39e8cda6, 0x5c8ff6e0,
- 0xf326bb2a, 0x9641806c, 0x770526ff, 0x12621db9, 0xbdcb5073,
- 0xd8ac6b35},
- {0x00000000, 0xd7e28058, 0x74b406f1, 0xa35686a9, 0xe9680de2,
- 0x3e8a8dba, 0x9ddc0b13, 0x4a3e8b4b, 0x09a11d85, 0xde439ddd,
- 0x7d151b74, 0xaaf79b2c, 0xe0c91067, 0x372b903f, 0x947d1696,
- 0x439f96ce, 0x13423b0a, 0xc4a0bb52, 0x67f63dfb, 0xb014bda3,
- 0xfa2a36e8, 0x2dc8b6b0, 0x8e9e3019, 0x597cb041, 0x1ae3268f,
- 0xcd01a6d7, 0x6e57207e, 0xb9b5a026, 0xf38b2b6d, 0x2469ab35,
- 0x873f2d9c, 0x50ddadc4, 0x26847614, 0xf166f64c, 0x523070e5,
- 0x85d2f0bd, 0xcfec7bf6, 0x180efbae, 0xbb587d07, 0x6cbafd5f,
- 0x2f256b91, 0xf8c7ebc9, 0x5b916d60, 0x8c73ed38, 0xc64d6673,
- 0x11afe62b, 0xb2f96082, 0x651be0da, 0x35c64d1e, 0xe224cd46,
- 0x41724bef, 0x9690cbb7, 0xdcae40fc, 0x0b4cc0a4, 0xa81a460d,
- 0x7ff8c655, 0x3c67509b, 0xeb85d0c3, 0x48d3566a, 0x9f31d632,
- 0xd50f5d79, 0x02eddd21, 0xa1bb5b88, 0x7659dbd0, 0x4d08ec28,
- 0x9aea6c70, 0x39bcead9, 0xee5e6a81, 0xa460e1ca, 0x73826192,
- 0xd0d4e73b, 0x07366763, 0x44a9f1ad, 0x934b71f5, 0x301df75c,
- 0xe7ff7704, 0xadc1fc4f, 0x7a237c17, 0xd975fabe, 0x0e977ae6,
- 0x5e4ad722, 0x89a8577a, 0x2afed1d3, 0xfd1c518b, 0xb722dac0,
- 0x60c05a98, 0xc396dc31, 0x14745c69, 0x57ebcaa7, 0x80094aff,
- 0x235fcc56, 0xf4bd4c0e, 0xbe83c745, 0x6961471d, 0xca37c1b4,
- 0x1dd541ec, 0x6b8c9a3c, 0xbc6e1a64, 0x1f389ccd, 0xc8da1c95,
- 0x82e497de, 0x55061786, 0xf650912f, 0x21b21177, 0x622d87b9,
- 0xb5cf07e1, 0x16998148, 0xc17b0110, 0x8b458a5b, 0x5ca70a03,
- 0xfff18caa, 0x28130cf2, 0x78cea136, 0xaf2c216e, 0x0c7aa7c7,
- 0xdb98279f, 0x91a6acd4, 0x46442c8c, 0xe512aa25, 0x32f02a7d,
- 0x716fbcb3, 0xa68d3ceb, 0x05dbba42, 0xd2393a1a, 0x9807b151,
- 0x4fe53109, 0xecb3b7a0, 0x3b5137f8, 0x9a11d850, 0x4df35808,
- 0xeea5dea1, 0x39475ef9, 0x7379d5b2, 0xa49b55ea, 0x07cdd343,
- 0xd02f531b, 0x93b0c5d5, 0x4452458d, 0xe704c324, 0x30e6437c,
- 0x7ad8c837, 0xad3a486f, 0x0e6ccec6, 0xd98e4e9e, 0x8953e35a,
- 0x5eb16302, 0xfde7e5ab, 0x2a0565f3, 0x603beeb8, 0xb7d96ee0,
- 0x148fe849, 0xc36d6811, 0x80f2fedf, 0x57107e87, 0xf446f82e,
- 0x23a47876, 0x699af33d, 0xbe787365, 0x1d2ef5cc, 0xcacc7594,
- 0xbc95ae44, 0x6b772e1c, 0xc821a8b5, 0x1fc328ed, 0x55fda3a6,
- 0x821f23fe, 0x2149a557, 0xf6ab250f, 0xb534b3c1, 0x62d63399,
- 0xc180b530, 0x16623568, 0x5c5cbe23, 0x8bbe3e7b, 0x28e8b8d2,
- 0xff0a388a, 0xafd7954e, 0x78351516, 0xdb6393bf, 0x0c8113e7,
- 0x46bf98ac, 0x915d18f4, 0x320b9e5d, 0xe5e91e05, 0xa67688cb,
- 0x71940893, 0xd2c28e3a, 0x05200e62, 0x4f1e8529, 0x98fc0571,
- 0x3baa83d8, 0xec480380, 0xd7193478, 0x00fbb420, 0xa3ad3289,
- 0x744fb2d1, 0x3e71399a, 0xe993b9c2, 0x4ac53f6b, 0x9d27bf33,
- 0xdeb829fd, 0x095aa9a5, 0xaa0c2f0c, 0x7deeaf54, 0x37d0241f,
- 0xe032a447, 0x436422ee, 0x9486a2b6, 0xc45b0f72, 0x13b98f2a,
- 0xb0ef0983, 0x670d89db, 0x2d330290, 0xfad182c8, 0x59870461,
- 0x8e658439, 0xcdfa12f7, 0x1a1892af, 0xb94e1406, 0x6eac945e,
- 0x24921f15, 0xf3709f4d, 0x502619e4, 0x87c499bc, 0xf19d426c,
- 0x267fc234, 0x8529449d, 0x52cbc4c5, 0x18f54f8e, 0xcf17cfd6,
- 0x6c41497f, 0xbba3c927, 0xf83c5fe9, 0x2fdedfb1, 0x8c885918,
- 0x5b6ad940, 0x1154520b, 0xc6b6d253, 0x65e054fa, 0xb202d4a2,
- 0xe2df7966, 0x353df93e, 0x966b7f97, 0x4189ffcf, 0x0bb77484,
- 0xdc55f4dc, 0x7f037275, 0xa8e1f22d, 0xeb7e64e3, 0x3c9ce4bb,
- 0x9fca6212, 0x4828e24a, 0x02166901, 0xd5f4e959, 0x76a26ff0,
- 0xa140efa8},
- {0x00000000, 0xef52b6e1, 0x05d46b83, 0xea86dd62, 0x0ba8d706,
- 0xe4fa61e7, 0x0e7cbc85, 0xe12e0a64, 0x1751ae0c, 0xf80318ed,
- 0x1285c58f, 0xfdd7736e, 0x1cf9790a, 0xf3abcfeb, 0x192d1289,
- 0xf67fa468, 0x2ea35c18, 0xc1f1eaf9, 0x2b77379b, 0xc425817a,
- 0x250b8b1e, 0xca593dff, 0x20dfe09d, 0xcf8d567c, 0x39f2f214,
- 0xd6a044f5, 0x3c269997, 0xd3742f76, 0x325a2512, 0xdd0893f3,
- 0x378e4e91, 0xd8dcf870, 0x5d46b830, 0xb2140ed1, 0x5892d3b3,
- 0xb7c06552, 0x56ee6f36, 0xb9bcd9d7, 0x533a04b5, 0xbc68b254,
- 0x4a17163c, 0xa545a0dd, 0x4fc37dbf, 0xa091cb5e, 0x41bfc13a,
- 0xaeed77db, 0x446baab9, 0xab391c58, 0x73e5e428, 0x9cb752c9,
- 0x76318fab, 0x9963394a, 0x784d332e, 0x971f85cf, 0x7d9958ad,
- 0x92cbee4c, 0x64b44a24, 0x8be6fcc5, 0x616021a7, 0x8e329746,
- 0x6f1c9d22, 0x804e2bc3, 0x6ac8f6a1, 0x859a4040, 0xba8d7060,
- 0x55dfc681, 0xbf591be3, 0x500bad02, 0xb125a766, 0x5e771187,
- 0xb4f1cce5, 0x5ba37a04, 0xaddcde6c, 0x428e688d, 0xa808b5ef,
- 0x475a030e, 0xa674096a, 0x4926bf8b, 0xa3a062e9, 0x4cf2d408,
- 0x942e2c78, 0x7b7c9a99, 0x91fa47fb, 0x7ea8f11a, 0x9f86fb7e,
- 0x70d44d9f, 0x9a5290fd, 0x7500261c, 0x837f8274, 0x6c2d3495,
- 0x86abe9f7, 0x69f95f16, 0x88d75572, 0x6785e393, 0x8d033ef1,
- 0x62518810, 0xe7cbc850, 0x08997eb1, 0xe21fa3d3, 0x0d4d1532,
- 0xec631f56, 0x0331a9b7, 0xe9b774d5, 0x06e5c234, 0xf09a665c,
- 0x1fc8d0bd, 0xf54e0ddf, 0x1a1cbb3e, 0xfb32b15a, 0x146007bb,
- 0xfee6dad9, 0x11b46c38, 0xc9689448, 0x263a22a9, 0xccbcffcb,
- 0x23ee492a, 0xc2c0434e, 0x2d92f5af, 0xc71428cd, 0x28469e2c,
- 0xde393a44, 0x316b8ca5, 0xdbed51c7, 0x34bfe726, 0xd591ed42,
- 0x3ac35ba3, 0xd04586c1, 0x3f173020, 0xae6be681, 0x41395060,
- 0xabbf8d02, 0x44ed3be3, 0xa5c33187, 0x4a918766, 0xa0175a04,
- 0x4f45ece5, 0xb93a488d, 0x5668fe6c, 0xbcee230e, 0x53bc95ef,
- 0xb2929f8b, 0x5dc0296a, 0xb746f408, 0x581442e9, 0x80c8ba99,
- 0x6f9a0c78, 0x851cd11a, 0x6a4e67fb, 0x8b606d9f, 0x6432db7e,
- 0x8eb4061c, 0x61e6b0fd, 0x97991495, 0x78cba274, 0x924d7f16,
- 0x7d1fc9f7, 0x9c31c393, 0x73637572, 0x99e5a810, 0x76b71ef1,
- 0xf32d5eb1, 0x1c7fe850, 0xf6f93532, 0x19ab83d3, 0xf88589b7,
- 0x17d73f56, 0xfd51e234, 0x120354d5, 0xe47cf0bd, 0x0b2e465c,
- 0xe1a89b3e, 0x0efa2ddf, 0xefd427bb, 0x0086915a, 0xea004c38,
- 0x0552fad9, 0xdd8e02a9, 0x32dcb448, 0xd85a692a, 0x3708dfcb,
- 0xd626d5af, 0x3974634e, 0xd3f2be2c, 0x3ca008cd, 0xcadfaca5,
- 0x258d1a44, 0xcf0bc726, 0x205971c7, 0xc1777ba3, 0x2e25cd42,
- 0xc4a31020, 0x2bf1a6c1, 0x14e696e1, 0xfbb42000, 0x1132fd62,
- 0xfe604b83, 0x1f4e41e7, 0xf01cf706, 0x1a9a2a64, 0xf5c89c85,
- 0x03b738ed, 0xece58e0c, 0x0663536e, 0xe931e58f, 0x081fefeb,
- 0xe74d590a, 0x0dcb8468, 0xe2993289, 0x3a45caf9, 0xd5177c18,
- 0x3f91a17a, 0xd0c3179b, 0x31ed1dff, 0xdebfab1e, 0x3439767c,
- 0xdb6bc09d, 0x2d1464f5, 0xc246d214, 0x28c00f76, 0xc792b997,
- 0x26bcb3f3, 0xc9ee0512, 0x2368d870, 0xcc3a6e91, 0x49a02ed1,
- 0xa6f29830, 0x4c744552, 0xa326f3b3, 0x4208f9d7, 0xad5a4f36,
- 0x47dc9254, 0xa88e24b5, 0x5ef180dd, 0xb1a3363c, 0x5b25eb5e,
- 0xb4775dbf, 0x555957db, 0xba0be13a, 0x508d3c58, 0xbfdf8ab9,
- 0x670372c9, 0x8851c428, 0x62d7194a, 0x8d85afab, 0x6caba5cf,
- 0x83f9132e, 0x697fce4c, 0x862d78ad, 0x7052dcc5, 0x9f006a24,
- 0x7586b746, 0x9ad401a7, 0x7bfa0bc3, 0x94a8bd22, 0x7e2e6040,
- 0x917cd6a1},
- {0x00000000, 0x87a6cb43, 0xd43c90c7, 0x539a5b84, 0x730827cf,
- 0xf4aeec8c, 0xa734b708, 0x20927c4b, 0xe6104f9e, 0x61b684dd,
- 0x322cdf59, 0xb58a141a, 0x95186851, 0x12bea312, 0x4124f896,
- 0xc68233d5, 0x1751997d, 0x90f7523e, 0xc36d09ba, 0x44cbc2f9,
- 0x6459beb2, 0xe3ff75f1, 0xb0652e75, 0x37c3e536, 0xf141d6e3,
- 0x76e71da0, 0x257d4624, 0xa2db8d67, 0x8249f12c, 0x05ef3a6f,
- 0x567561eb, 0xd1d3aaa8, 0x2ea332fa, 0xa905f9b9, 0xfa9fa23d,
- 0x7d39697e, 0x5dab1535, 0xda0dde76, 0x899785f2, 0x0e314eb1,
- 0xc8b37d64, 0x4f15b627, 0x1c8feda3, 0x9b2926e0, 0xbbbb5aab,
- 0x3c1d91e8, 0x6f87ca6c, 0xe821012f, 0x39f2ab87, 0xbe5460c4,
- 0xedce3b40, 0x6a68f003, 0x4afa8c48, 0xcd5c470b, 0x9ec61c8f,
- 0x1960d7cc, 0xdfe2e419, 0x58442f5a, 0x0bde74de, 0x8c78bf9d,
- 0xaceac3d6, 0x2b4c0895, 0x78d65311, 0xff709852, 0x5d4665f4,
- 0xdae0aeb7, 0x897af533, 0x0edc3e70, 0x2e4e423b, 0xa9e88978,
- 0xfa72d2fc, 0x7dd419bf, 0xbb562a6a, 0x3cf0e129, 0x6f6abaad,
- 0xe8cc71ee, 0xc85e0da5, 0x4ff8c6e6, 0x1c629d62, 0x9bc45621,
- 0x4a17fc89, 0xcdb137ca, 0x9e2b6c4e, 0x198da70d, 0x391fdb46,
- 0xbeb91005, 0xed234b81, 0x6a8580c2, 0xac07b317, 0x2ba17854,
- 0x783b23d0, 0xff9de893, 0xdf0f94d8, 0x58a95f9b, 0x0b33041f,
- 0x8c95cf5c, 0x73e5570e, 0xf4439c4d, 0xa7d9c7c9, 0x207f0c8a,
- 0x00ed70c1, 0x874bbb82, 0xd4d1e006, 0x53772b45, 0x95f51890,
- 0x1253d3d3, 0x41c98857, 0xc66f4314, 0xe6fd3f5f, 0x615bf41c,
- 0x32c1af98, 0xb56764db, 0x64b4ce73, 0xe3120530, 0xb0885eb4,
- 0x372e95f7, 0x17bce9bc, 0x901a22ff, 0xc380797b, 0x4426b238,
- 0x82a481ed, 0x05024aae, 0x5698112a, 0xd13eda69, 0xf1aca622,
- 0x760a6d61, 0x259036e5, 0xa236fda6, 0xba8ccbe8, 0x3d2a00ab,
- 0x6eb05b2f, 0xe916906c, 0xc984ec27, 0x4e222764, 0x1db87ce0,
- 0x9a1eb7a3, 0x5c9c8476, 0xdb3a4f35, 0x88a014b1, 0x0f06dff2,
- 0x2f94a3b9, 0xa83268fa, 0xfba8337e, 0x7c0ef83d, 0xaddd5295,
- 0x2a7b99d6, 0x79e1c252, 0xfe470911, 0xded5755a, 0x5973be19,
- 0x0ae9e59d, 0x8d4f2ede, 0x4bcd1d0b, 0xcc6bd648, 0x9ff18dcc,
- 0x1857468f, 0x38c53ac4, 0xbf63f187, 0xecf9aa03, 0x6b5f6140,
- 0x942ff912, 0x13893251, 0x401369d5, 0xc7b5a296, 0xe727dedd,
- 0x6081159e, 0x331b4e1a, 0xb4bd8559, 0x723fb68c, 0xf5997dcf,
- 0xa603264b, 0x21a5ed08, 0x01379143, 0x86915a00, 0xd50b0184,
- 0x52adcac7, 0x837e606f, 0x04d8ab2c, 0x5742f0a8, 0xd0e43beb,
- 0xf07647a0, 0x77d08ce3, 0x244ad767, 0xa3ec1c24, 0x656e2ff1,
- 0xe2c8e4b2, 0xb152bf36, 0x36f47475, 0x1666083e, 0x91c0c37d,
- 0xc25a98f9, 0x45fc53ba, 0xe7caae1c, 0x606c655f, 0x33f63edb,
- 0xb450f598, 0x94c289d3, 0x13644290, 0x40fe1914, 0xc758d257,
- 0x01dae182, 0x867c2ac1, 0xd5e67145, 0x5240ba06, 0x72d2c64d,
- 0xf5740d0e, 0xa6ee568a, 0x21489dc9, 0xf09b3761, 0x773dfc22,
- 0x24a7a7a6, 0xa3016ce5, 0x839310ae, 0x0435dbed, 0x57af8069,
- 0xd0094b2a, 0x168b78ff, 0x912db3bc, 0xc2b7e838, 0x4511237b,
- 0x65835f30, 0xe2259473, 0xb1bfcff7, 0x361904b4, 0xc9699ce6,
- 0x4ecf57a5, 0x1d550c21, 0x9af3c762, 0xba61bb29, 0x3dc7706a,
- 0x6e5d2bee, 0xe9fbe0ad, 0x2f79d378, 0xa8df183b, 0xfb4543bf,
- 0x7ce388fc, 0x5c71f4b7, 0xdbd73ff4, 0x884d6470, 0x0febaf33,
- 0xde38059b, 0x599eced8, 0x0a04955c, 0x8da25e1f, 0xad302254,
- 0x2a96e917, 0x790cb293, 0xfeaa79d0, 0x38284a05, 0xbf8e8146,
- 0xec14dac2, 0x6bb21181, 0x4b206dca, 0xcc86a689, 0x9f1cfd0d,
- 0x18ba364e}};
-
-local const z_word_t FAR crc_braid_big_table[][256] = {
- {0x0000000000000000, 0x43cba68700000000, 0xc7903cd400000000,
- 0x845b9a5300000000, 0xcf27087300000000, 0x8cecaef400000000,
- 0x08b734a700000000, 0x4b7c922000000000, 0x9e4f10e600000000,
- 0xdd84b66100000000, 0x59df2c3200000000, 0x1a148ab500000000,
- 0x5168189500000000, 0x12a3be1200000000, 0x96f8244100000000,
- 0xd53382c600000000, 0x7d99511700000000, 0x3e52f79000000000,
- 0xba096dc300000000, 0xf9c2cb4400000000, 0xb2be596400000000,
- 0xf175ffe300000000, 0x752e65b000000000, 0x36e5c33700000000,
- 0xe3d641f100000000, 0xa01de77600000000, 0x24467d2500000000,
- 0x678ddba200000000, 0x2cf1498200000000, 0x6f3aef0500000000,
- 0xeb61755600000000, 0xa8aad3d100000000, 0xfa32a32e00000000,
- 0xb9f905a900000000, 0x3da29ffa00000000, 0x7e69397d00000000,
- 0x3515ab5d00000000, 0x76de0dda00000000, 0xf285978900000000,
- 0xb14e310e00000000, 0x647db3c800000000, 0x27b6154f00000000,
- 0xa3ed8f1c00000000, 0xe026299b00000000, 0xab5abbbb00000000,
- 0xe8911d3c00000000, 0x6cca876f00000000, 0x2f0121e800000000,
- 0x87abf23900000000, 0xc46054be00000000, 0x403bceed00000000,
- 0x03f0686a00000000, 0x488cfa4a00000000, 0x0b475ccd00000000,
- 0x8f1cc69e00000000, 0xccd7601900000000, 0x19e4e2df00000000,
- 0x5a2f445800000000, 0xde74de0b00000000, 0x9dbf788c00000000,
- 0xd6c3eaac00000000, 0x95084c2b00000000, 0x1153d67800000000,
- 0x529870ff00000000, 0xf465465d00000000, 0xb7aee0da00000000,
- 0x33f57a8900000000, 0x703edc0e00000000, 0x3b424e2e00000000,
- 0x7889e8a900000000, 0xfcd272fa00000000, 0xbf19d47d00000000,
- 0x6a2a56bb00000000, 0x29e1f03c00000000, 0xadba6a6f00000000,
- 0xee71cce800000000, 0xa50d5ec800000000, 0xe6c6f84f00000000,
- 0x629d621c00000000, 0x2156c49b00000000, 0x89fc174a00000000,
- 0xca37b1cd00000000, 0x4e6c2b9e00000000, 0x0da78d1900000000,
- 0x46db1f3900000000, 0x0510b9be00000000, 0x814b23ed00000000,
- 0xc280856a00000000, 0x17b307ac00000000, 0x5478a12b00000000,
- 0xd0233b7800000000, 0x93e89dff00000000, 0xd8940fdf00000000,
- 0x9b5fa95800000000, 0x1f04330b00000000, 0x5ccf958c00000000,
- 0x0e57e57300000000, 0x4d9c43f400000000, 0xc9c7d9a700000000,
- 0x8a0c7f2000000000, 0xc170ed0000000000, 0x82bb4b8700000000,
- 0x06e0d1d400000000, 0x452b775300000000, 0x9018f59500000000,
- 0xd3d3531200000000, 0x5788c94100000000, 0x14436fc600000000,
- 0x5f3ffde600000000, 0x1cf45b6100000000, 0x98afc13200000000,
- 0xdb6467b500000000, 0x73ceb46400000000, 0x300512e300000000,
- 0xb45e88b000000000, 0xf7952e3700000000, 0xbce9bc1700000000,
- 0xff221a9000000000, 0x7b7980c300000000, 0x38b2264400000000,
- 0xed81a48200000000, 0xae4a020500000000, 0x2a11985600000000,
- 0x69da3ed100000000, 0x22a6acf100000000, 0x616d0a7600000000,
- 0xe536902500000000, 0xa6fd36a200000000, 0xe8cb8cba00000000,
- 0xab002a3d00000000, 0x2f5bb06e00000000, 0x6c9016e900000000,
- 0x27ec84c900000000, 0x6427224e00000000, 0xe07cb81d00000000,
- 0xa3b71e9a00000000, 0x76849c5c00000000, 0x354f3adb00000000,
- 0xb114a08800000000, 0xf2df060f00000000, 0xb9a3942f00000000,
- 0xfa6832a800000000, 0x7e33a8fb00000000, 0x3df80e7c00000000,
- 0x9552ddad00000000, 0xd6997b2a00000000, 0x52c2e17900000000,
- 0x110947fe00000000, 0x5a75d5de00000000, 0x19be735900000000,
- 0x9de5e90a00000000, 0xde2e4f8d00000000, 0x0b1dcd4b00000000,
- 0x48d66bcc00000000, 0xcc8df19f00000000, 0x8f46571800000000,
- 0xc43ac53800000000, 0x87f163bf00000000, 0x03aaf9ec00000000,
- 0x40615f6b00000000, 0x12f92f9400000000, 0x5132891300000000,
- 0xd569134000000000, 0x96a2b5c700000000, 0xddde27e700000000,
- 0x9e15816000000000, 0x1a4e1b3300000000, 0x5985bdb400000000,
- 0x8cb63f7200000000, 0xcf7d99f500000000, 0x4b2603a600000000,
- 0x08eda52100000000, 0x4391370100000000, 0x005a918600000000,
- 0x84010bd500000000, 0xc7caad5200000000, 0x6f607e8300000000,
- 0x2cabd80400000000, 0xa8f0425700000000, 0xeb3be4d000000000,
- 0xa04776f000000000, 0xe38cd07700000000, 0x67d74a2400000000,
- 0x241ceca300000000, 0xf12f6e6500000000, 0xb2e4c8e200000000,
- 0x36bf52b100000000, 0x7574f43600000000, 0x3e08661600000000,
- 0x7dc3c09100000000, 0xf9985ac200000000, 0xba53fc4500000000,
- 0x1caecae700000000, 0x5f656c6000000000, 0xdb3ef63300000000,
- 0x98f550b400000000, 0xd389c29400000000, 0x9042641300000000,
- 0x1419fe4000000000, 0x57d258c700000000, 0x82e1da0100000000,
- 0xc12a7c8600000000, 0x4571e6d500000000, 0x06ba405200000000,
- 0x4dc6d27200000000, 0x0e0d74f500000000, 0x8a56eea600000000,
- 0xc99d482100000000, 0x61379bf000000000, 0x22fc3d7700000000,
- 0xa6a7a72400000000, 0xe56c01a300000000, 0xae10938300000000,
- 0xeddb350400000000, 0x6980af5700000000, 0x2a4b09d000000000,
- 0xff788b1600000000, 0xbcb32d9100000000, 0x38e8b7c200000000,
- 0x7b23114500000000, 0x305f836500000000, 0x739425e200000000,
- 0xf7cfbfb100000000, 0xb404193600000000, 0xe69c69c900000000,
- 0xa557cf4e00000000, 0x210c551d00000000, 0x62c7f39a00000000,
- 0x29bb61ba00000000, 0x6a70c73d00000000, 0xee2b5d6e00000000,
- 0xade0fbe900000000, 0x78d3792f00000000, 0x3b18dfa800000000,
- 0xbf4345fb00000000, 0xfc88e37c00000000, 0xb7f4715c00000000,
- 0xf43fd7db00000000, 0x70644d8800000000, 0x33afeb0f00000000,
- 0x9b0538de00000000, 0xd8ce9e5900000000, 0x5c95040a00000000,
- 0x1f5ea28d00000000, 0x542230ad00000000, 0x17e9962a00000000,
- 0x93b20c7900000000, 0xd079aafe00000000, 0x054a283800000000,
- 0x46818ebf00000000, 0xc2da14ec00000000, 0x8111b26b00000000,
- 0xca6d204b00000000, 0x89a686cc00000000, 0x0dfd1c9f00000000,
- 0x4e36ba1800000000},
- {0x0000000000000000, 0xe1b652ef00000000, 0x836bd40500000000,
- 0x62dd86ea00000000, 0x06d7a80b00000000, 0xe761fae400000000,
- 0x85bc7c0e00000000, 0x640a2ee100000000, 0x0cae511700000000,
- 0xed1803f800000000, 0x8fc5851200000000, 0x6e73d7fd00000000,
- 0x0a79f91c00000000, 0xebcfabf300000000, 0x89122d1900000000,
- 0x68a47ff600000000, 0x185ca32e00000000, 0xf9eaf1c100000000,
- 0x9b37772b00000000, 0x7a8125c400000000, 0x1e8b0b2500000000,
- 0xff3d59ca00000000, 0x9de0df2000000000, 0x7c568dcf00000000,
- 0x14f2f23900000000, 0xf544a0d600000000, 0x9799263c00000000,
- 0x762f74d300000000, 0x12255a3200000000, 0xf39308dd00000000,
- 0x914e8e3700000000, 0x70f8dcd800000000, 0x30b8465d00000000,
- 0xd10e14b200000000, 0xb3d3925800000000, 0x5265c0b700000000,
- 0x366fee5600000000, 0xd7d9bcb900000000, 0xb5043a5300000000,
- 0x54b268bc00000000, 0x3c16174a00000000, 0xdda045a500000000,
- 0xbf7dc34f00000000, 0x5ecb91a000000000, 0x3ac1bf4100000000,
- 0xdb77edae00000000, 0xb9aa6b4400000000, 0x581c39ab00000000,
- 0x28e4e57300000000, 0xc952b79c00000000, 0xab8f317600000000,
- 0x4a39639900000000, 0x2e334d7800000000, 0xcf851f9700000000,
- 0xad58997d00000000, 0x4ceecb9200000000, 0x244ab46400000000,
- 0xc5fce68b00000000, 0xa721606100000000, 0x4697328e00000000,
- 0x229d1c6f00000000, 0xc32b4e8000000000, 0xa1f6c86a00000000,
- 0x40409a8500000000, 0x60708dba00000000, 0x81c6df5500000000,
- 0xe31b59bf00000000, 0x02ad0b5000000000, 0x66a725b100000000,
- 0x8711775e00000000, 0xe5ccf1b400000000, 0x047aa35b00000000,
- 0x6cdedcad00000000, 0x8d688e4200000000, 0xefb508a800000000,
- 0x0e035a4700000000, 0x6a0974a600000000, 0x8bbf264900000000,
- 0xe962a0a300000000, 0x08d4f24c00000000, 0x782c2e9400000000,
- 0x999a7c7b00000000, 0xfb47fa9100000000, 0x1af1a87e00000000,
- 0x7efb869f00000000, 0x9f4dd47000000000, 0xfd90529a00000000,
- 0x1c26007500000000, 0x74827f8300000000, 0x95342d6c00000000,
- 0xf7e9ab8600000000, 0x165ff96900000000, 0x7255d78800000000,
- 0x93e3856700000000, 0xf13e038d00000000, 0x1088516200000000,
- 0x50c8cbe700000000, 0xb17e990800000000, 0xd3a31fe200000000,
- 0x32154d0d00000000, 0x561f63ec00000000, 0xb7a9310300000000,
- 0xd574b7e900000000, 0x34c2e50600000000, 0x5c669af000000000,
- 0xbdd0c81f00000000, 0xdf0d4ef500000000, 0x3ebb1c1a00000000,
- 0x5ab132fb00000000, 0xbb07601400000000, 0xd9dae6fe00000000,
- 0x386cb41100000000, 0x489468c900000000, 0xa9223a2600000000,
- 0xcbffbccc00000000, 0x2a49ee2300000000, 0x4e43c0c200000000,
- 0xaff5922d00000000, 0xcd2814c700000000, 0x2c9e462800000000,
- 0x443a39de00000000, 0xa58c6b3100000000, 0xc751eddb00000000,
- 0x26e7bf3400000000, 0x42ed91d500000000, 0xa35bc33a00000000,
- 0xc18645d000000000, 0x2030173f00000000, 0x81e66bae00000000,
- 0x6050394100000000, 0x028dbfab00000000, 0xe33bed4400000000,
- 0x8731c3a500000000, 0x6687914a00000000, 0x045a17a000000000,
- 0xe5ec454f00000000, 0x8d483ab900000000, 0x6cfe685600000000,
- 0x0e23eebc00000000, 0xef95bc5300000000, 0x8b9f92b200000000,
- 0x6a29c05d00000000, 0x08f446b700000000, 0xe942145800000000,
- 0x99bac88000000000, 0x780c9a6f00000000, 0x1ad11c8500000000,
- 0xfb674e6a00000000, 0x9f6d608b00000000, 0x7edb326400000000,
- 0x1c06b48e00000000, 0xfdb0e66100000000, 0x9514999700000000,
- 0x74a2cb7800000000, 0x167f4d9200000000, 0xf7c91f7d00000000,
- 0x93c3319c00000000, 0x7275637300000000, 0x10a8e59900000000,
- 0xf11eb77600000000, 0xb15e2df300000000, 0x50e87f1c00000000,
- 0x3235f9f600000000, 0xd383ab1900000000, 0xb78985f800000000,
- 0x563fd71700000000, 0x34e251fd00000000, 0xd554031200000000,
- 0xbdf07ce400000000, 0x5c462e0b00000000, 0x3e9ba8e100000000,
- 0xdf2dfa0e00000000, 0xbb27d4ef00000000, 0x5a91860000000000,
- 0x384c00ea00000000, 0xd9fa520500000000, 0xa9028edd00000000,
- 0x48b4dc3200000000, 0x2a695ad800000000, 0xcbdf083700000000,
- 0xafd526d600000000, 0x4e63743900000000, 0x2cbef2d300000000,
- 0xcd08a03c00000000, 0xa5acdfca00000000, 0x441a8d2500000000,
- 0x26c70bcf00000000, 0xc771592000000000, 0xa37b77c100000000,
- 0x42cd252e00000000, 0x2010a3c400000000, 0xc1a6f12b00000000,
- 0xe196e61400000000, 0x0020b4fb00000000, 0x62fd321100000000,
- 0x834b60fe00000000, 0xe7414e1f00000000, 0x06f71cf000000000,
- 0x642a9a1a00000000, 0x859cc8f500000000, 0xed38b70300000000,
- 0x0c8ee5ec00000000, 0x6e53630600000000, 0x8fe531e900000000,
- 0xebef1f0800000000, 0x0a594de700000000, 0x6884cb0d00000000,
- 0x893299e200000000, 0xf9ca453a00000000, 0x187c17d500000000,
- 0x7aa1913f00000000, 0x9b17c3d000000000, 0xff1ded3100000000,
- 0x1eabbfde00000000, 0x7c76393400000000, 0x9dc06bdb00000000,
- 0xf564142d00000000, 0x14d246c200000000, 0x760fc02800000000,
- 0x97b992c700000000, 0xf3b3bc2600000000, 0x1205eec900000000,
- 0x70d8682300000000, 0x916e3acc00000000, 0xd12ea04900000000,
- 0x3098f2a600000000, 0x5245744c00000000, 0xb3f326a300000000,
- 0xd7f9084200000000, 0x364f5aad00000000, 0x5492dc4700000000,
- 0xb5248ea800000000, 0xdd80f15e00000000, 0x3c36a3b100000000,
- 0x5eeb255b00000000, 0xbf5d77b400000000, 0xdb57595500000000,
- 0x3ae10bba00000000, 0x583c8d5000000000, 0xb98adfbf00000000,
- 0xc972036700000000, 0x28c4518800000000, 0x4a19d76200000000,
- 0xabaf858d00000000, 0xcfa5ab6c00000000, 0x2e13f98300000000,
- 0x4cce7f6900000000, 0xad782d8600000000, 0xc5dc527000000000,
- 0x246a009f00000000, 0x46b7867500000000, 0xa701d49a00000000,
- 0xc30bfa7b00000000, 0x22bda89400000000, 0x40602e7e00000000,
- 0xa1d67c9100000000},
- {0x0000000000000000, 0x5880e2d700000000, 0xf106b47400000000,
- 0xa98656a300000000, 0xe20d68e900000000, 0xba8d8a3e00000000,
- 0x130bdc9d00000000, 0x4b8b3e4a00000000, 0x851da10900000000,
- 0xdd9d43de00000000, 0x741b157d00000000, 0x2c9bf7aa00000000,
- 0x6710c9e000000000, 0x3f902b3700000000, 0x96167d9400000000,
- 0xce969f4300000000, 0x0a3b421300000000, 0x52bba0c400000000,
- 0xfb3df66700000000, 0xa3bd14b000000000, 0xe8362afa00000000,
- 0xb0b6c82d00000000, 0x19309e8e00000000, 0x41b07c5900000000,
- 0x8f26e31a00000000, 0xd7a601cd00000000, 0x7e20576e00000000,
- 0x26a0b5b900000000, 0x6d2b8bf300000000, 0x35ab692400000000,
- 0x9c2d3f8700000000, 0xc4addd5000000000, 0x1476842600000000,
- 0x4cf666f100000000, 0xe570305200000000, 0xbdf0d28500000000,
- 0xf67beccf00000000, 0xaefb0e1800000000, 0x077d58bb00000000,
- 0x5ffdba6c00000000, 0x916b252f00000000, 0xc9ebc7f800000000,
- 0x606d915b00000000, 0x38ed738c00000000, 0x73664dc600000000,
- 0x2be6af1100000000, 0x8260f9b200000000, 0xdae01b6500000000,
- 0x1e4dc63500000000, 0x46cd24e200000000, 0xef4b724100000000,
- 0xb7cb909600000000, 0xfc40aedc00000000, 0xa4c04c0b00000000,
- 0x0d461aa800000000, 0x55c6f87f00000000, 0x9b50673c00000000,
- 0xc3d085eb00000000, 0x6a56d34800000000, 0x32d6319f00000000,
- 0x795d0fd500000000, 0x21dded0200000000, 0x885bbba100000000,
- 0xd0db597600000000, 0x28ec084d00000000, 0x706cea9a00000000,
- 0xd9eabc3900000000, 0x816a5eee00000000, 0xcae160a400000000,
- 0x9261827300000000, 0x3be7d4d000000000, 0x6367360700000000,
- 0xadf1a94400000000, 0xf5714b9300000000, 0x5cf71d3000000000,
- 0x0477ffe700000000, 0x4ffcc1ad00000000, 0x177c237a00000000,
- 0xbefa75d900000000, 0xe67a970e00000000, 0x22d74a5e00000000,
- 0x7a57a88900000000, 0xd3d1fe2a00000000, 0x8b511cfd00000000,
- 0xc0da22b700000000, 0x985ac06000000000, 0x31dc96c300000000,
- 0x695c741400000000, 0xa7caeb5700000000, 0xff4a098000000000,
- 0x56cc5f2300000000, 0x0e4cbdf400000000, 0x45c783be00000000,
- 0x1d47616900000000, 0xb4c137ca00000000, 0xec41d51d00000000,
- 0x3c9a8c6b00000000, 0x641a6ebc00000000, 0xcd9c381f00000000,
- 0x951cdac800000000, 0xde97e48200000000, 0x8617065500000000,
- 0x2f9150f600000000, 0x7711b22100000000, 0xb9872d6200000000,
- 0xe107cfb500000000, 0x4881991600000000, 0x10017bc100000000,
- 0x5b8a458b00000000, 0x030aa75c00000000, 0xaa8cf1ff00000000,
- 0xf20c132800000000, 0x36a1ce7800000000, 0x6e212caf00000000,
- 0xc7a77a0c00000000, 0x9f2798db00000000, 0xd4aca69100000000,
- 0x8c2c444600000000, 0x25aa12e500000000, 0x7d2af03200000000,
- 0xb3bc6f7100000000, 0xeb3c8da600000000, 0x42badb0500000000,
- 0x1a3a39d200000000, 0x51b1079800000000, 0x0931e54f00000000,
- 0xa0b7b3ec00000000, 0xf837513b00000000, 0x50d8119a00000000,
- 0x0858f34d00000000, 0xa1dea5ee00000000, 0xf95e473900000000,
- 0xb2d5797300000000, 0xea559ba400000000, 0x43d3cd0700000000,
- 0x1b532fd000000000, 0xd5c5b09300000000, 0x8d45524400000000,
- 0x24c304e700000000, 0x7c43e63000000000, 0x37c8d87a00000000,
- 0x6f483aad00000000, 0xc6ce6c0e00000000, 0x9e4e8ed900000000,
- 0x5ae3538900000000, 0x0263b15e00000000, 0xabe5e7fd00000000,
- 0xf365052a00000000, 0xb8ee3b6000000000, 0xe06ed9b700000000,
- 0x49e88f1400000000, 0x11686dc300000000, 0xdffef28000000000,
- 0x877e105700000000, 0x2ef846f400000000, 0x7678a42300000000,
- 0x3df39a6900000000, 0x657378be00000000, 0xccf52e1d00000000,
- 0x9475ccca00000000, 0x44ae95bc00000000, 0x1c2e776b00000000,
- 0xb5a821c800000000, 0xed28c31f00000000, 0xa6a3fd5500000000,
- 0xfe231f8200000000, 0x57a5492100000000, 0x0f25abf600000000,
- 0xc1b334b500000000, 0x9933d66200000000, 0x30b580c100000000,
- 0x6835621600000000, 0x23be5c5c00000000, 0x7b3ebe8b00000000,
- 0xd2b8e82800000000, 0x8a380aff00000000, 0x4e95d7af00000000,
- 0x1615357800000000, 0xbf9363db00000000, 0xe713810c00000000,
- 0xac98bf4600000000, 0xf4185d9100000000, 0x5d9e0b3200000000,
- 0x051ee9e500000000, 0xcb8876a600000000, 0x9308947100000000,
- 0x3a8ec2d200000000, 0x620e200500000000, 0x29851e4f00000000,
- 0x7105fc9800000000, 0xd883aa3b00000000, 0x800348ec00000000,
- 0x783419d700000000, 0x20b4fb0000000000, 0x8932ada300000000,
- 0xd1b24f7400000000, 0x9a39713e00000000, 0xc2b993e900000000,
- 0x6b3fc54a00000000, 0x33bf279d00000000, 0xfd29b8de00000000,
- 0xa5a95a0900000000, 0x0c2f0caa00000000, 0x54afee7d00000000,
- 0x1f24d03700000000, 0x47a432e000000000, 0xee22644300000000,
- 0xb6a2869400000000, 0x720f5bc400000000, 0x2a8fb91300000000,
- 0x8309efb000000000, 0xdb890d6700000000, 0x9002332d00000000,
- 0xc882d1fa00000000, 0x6104875900000000, 0x3984658e00000000,
- 0xf712facd00000000, 0xaf92181a00000000, 0x06144eb900000000,
- 0x5e94ac6e00000000, 0x151f922400000000, 0x4d9f70f300000000,
- 0xe419265000000000, 0xbc99c48700000000, 0x6c429df100000000,
- 0x34c27f2600000000, 0x9d44298500000000, 0xc5c4cb5200000000,
- 0x8e4ff51800000000, 0xd6cf17cf00000000, 0x7f49416c00000000,
- 0x27c9a3bb00000000, 0xe95f3cf800000000, 0xb1dfde2f00000000,
- 0x1859888c00000000, 0x40d96a5b00000000, 0x0b52541100000000,
- 0x53d2b6c600000000, 0xfa54e06500000000, 0xa2d402b200000000,
- 0x6679dfe200000000, 0x3ef93d3500000000, 0x977f6b9600000000,
- 0xcfff894100000000, 0x8474b70b00000000, 0xdcf455dc00000000,
- 0x7572037f00000000, 0x2df2e1a800000000, 0xe3647eeb00000000,
- 0xbbe49c3c00000000, 0x1262ca9f00000000, 0x4ae2284800000000,
- 0x0169160200000000, 0x59e9f4d500000000, 0xf06fa27600000000,
- 0xa8ef40a100000000},
- {0x0000000000000000, 0x463b676500000000, 0x8c76ceca00000000,
- 0xca4da9af00000000, 0x59ebed4e00000000, 0x1fd08a2b00000000,
- 0xd59d238400000000, 0x93a644e100000000, 0xb2d6db9d00000000,
- 0xf4edbcf800000000, 0x3ea0155700000000, 0x789b723200000000,
- 0xeb3d36d300000000, 0xad0651b600000000, 0x674bf81900000000,
- 0x21709f7c00000000, 0x25abc6e000000000, 0x6390a18500000000,
- 0xa9dd082a00000000, 0xefe66f4f00000000, 0x7c402bae00000000,
- 0x3a7b4ccb00000000, 0xf036e56400000000, 0xb60d820100000000,
- 0x977d1d7d00000000, 0xd1467a1800000000, 0x1b0bd3b700000000,
- 0x5d30b4d200000000, 0xce96f03300000000, 0x88ad975600000000,
- 0x42e03ef900000000, 0x04db599c00000000, 0x0b50fc1a00000000,
- 0x4d6b9b7f00000000, 0x872632d000000000, 0xc11d55b500000000,
- 0x52bb115400000000, 0x1480763100000000, 0xdecddf9e00000000,
- 0x98f6b8fb00000000, 0xb986278700000000, 0xffbd40e200000000,
- 0x35f0e94d00000000, 0x73cb8e2800000000, 0xe06dcac900000000,
- 0xa656adac00000000, 0x6c1b040300000000, 0x2a20636600000000,
- 0x2efb3afa00000000, 0x68c05d9f00000000, 0xa28df43000000000,
- 0xe4b6935500000000, 0x7710d7b400000000, 0x312bb0d100000000,
- 0xfb66197e00000000, 0xbd5d7e1b00000000, 0x9c2de16700000000,
- 0xda16860200000000, 0x105b2fad00000000, 0x566048c800000000,
- 0xc5c60c2900000000, 0x83fd6b4c00000000, 0x49b0c2e300000000,
- 0x0f8ba58600000000, 0x16a0f83500000000, 0x509b9f5000000000,
- 0x9ad636ff00000000, 0xdced519a00000000, 0x4f4b157b00000000,
- 0x0970721e00000000, 0xc33ddbb100000000, 0x8506bcd400000000,
- 0xa47623a800000000, 0xe24d44cd00000000, 0x2800ed6200000000,
- 0x6e3b8a0700000000, 0xfd9dcee600000000, 0xbba6a98300000000,
- 0x71eb002c00000000, 0x37d0674900000000, 0x330b3ed500000000,
- 0x753059b000000000, 0xbf7df01f00000000, 0xf946977a00000000,
- 0x6ae0d39b00000000, 0x2cdbb4fe00000000, 0xe6961d5100000000,
- 0xa0ad7a3400000000, 0x81dde54800000000, 0xc7e6822d00000000,
- 0x0dab2b8200000000, 0x4b904ce700000000, 0xd836080600000000,
- 0x9e0d6f6300000000, 0x5440c6cc00000000, 0x127ba1a900000000,
- 0x1df0042f00000000, 0x5bcb634a00000000, 0x9186cae500000000,
- 0xd7bdad8000000000, 0x441be96100000000, 0x02208e0400000000,
- 0xc86d27ab00000000, 0x8e5640ce00000000, 0xaf26dfb200000000,
- 0xe91db8d700000000, 0x2350117800000000, 0x656b761d00000000,
- 0xf6cd32fc00000000, 0xb0f6559900000000, 0x7abbfc3600000000,
- 0x3c809b5300000000, 0x385bc2cf00000000, 0x7e60a5aa00000000,
- 0xb42d0c0500000000, 0xf2166b6000000000, 0x61b02f8100000000,
- 0x278b48e400000000, 0xedc6e14b00000000, 0xabfd862e00000000,
- 0x8a8d195200000000, 0xccb67e3700000000, 0x06fbd79800000000,
- 0x40c0b0fd00000000, 0xd366f41c00000000, 0x955d937900000000,
- 0x5f103ad600000000, 0x192b5db300000000, 0x2c40f16b00000000,
- 0x6a7b960e00000000, 0xa0363fa100000000, 0xe60d58c400000000,
- 0x75ab1c2500000000, 0x33907b4000000000, 0xf9ddd2ef00000000,
- 0xbfe6b58a00000000, 0x9e962af600000000, 0xd8ad4d9300000000,
- 0x12e0e43c00000000, 0x54db835900000000, 0xc77dc7b800000000,
- 0x8146a0dd00000000, 0x4b0b097200000000, 0x0d306e1700000000,
- 0x09eb378b00000000, 0x4fd050ee00000000, 0x859df94100000000,
- 0xc3a69e2400000000, 0x5000dac500000000, 0x163bbda000000000,
- 0xdc76140f00000000, 0x9a4d736a00000000, 0xbb3dec1600000000,
- 0xfd068b7300000000, 0x374b22dc00000000, 0x717045b900000000,
- 0xe2d6015800000000, 0xa4ed663d00000000, 0x6ea0cf9200000000,
- 0x289ba8f700000000, 0x27100d7100000000, 0x612b6a1400000000,
- 0xab66c3bb00000000, 0xed5da4de00000000, 0x7efbe03f00000000,
- 0x38c0875a00000000, 0xf28d2ef500000000, 0xb4b6499000000000,
- 0x95c6d6ec00000000, 0xd3fdb18900000000, 0x19b0182600000000,
- 0x5f8b7f4300000000, 0xcc2d3ba200000000, 0x8a165cc700000000,
- 0x405bf56800000000, 0x0660920d00000000, 0x02bbcb9100000000,
- 0x4480acf400000000, 0x8ecd055b00000000, 0xc8f6623e00000000,
- 0x5b5026df00000000, 0x1d6b41ba00000000, 0xd726e81500000000,
- 0x911d8f7000000000, 0xb06d100c00000000, 0xf656776900000000,
- 0x3c1bdec600000000, 0x7a20b9a300000000, 0xe986fd4200000000,
- 0xafbd9a2700000000, 0x65f0338800000000, 0x23cb54ed00000000,
- 0x3ae0095e00000000, 0x7cdb6e3b00000000, 0xb696c79400000000,
- 0xf0ada0f100000000, 0x630be41000000000, 0x2530837500000000,
- 0xef7d2ada00000000, 0xa9464dbf00000000, 0x8836d2c300000000,
- 0xce0db5a600000000, 0x04401c0900000000, 0x427b7b6c00000000,
- 0xd1dd3f8d00000000, 0x97e658e800000000, 0x5dabf14700000000,
- 0x1b90962200000000, 0x1f4bcfbe00000000, 0x5970a8db00000000,
- 0x933d017400000000, 0xd506661100000000, 0x46a022f000000000,
- 0x009b459500000000, 0xcad6ec3a00000000, 0x8ced8b5f00000000,
- 0xad9d142300000000, 0xeba6734600000000, 0x21ebdae900000000,
- 0x67d0bd8c00000000, 0xf476f96d00000000, 0xb24d9e0800000000,
- 0x780037a700000000, 0x3e3b50c200000000, 0x31b0f54400000000,
- 0x778b922100000000, 0xbdc63b8e00000000, 0xfbfd5ceb00000000,
- 0x685b180a00000000, 0x2e607f6f00000000, 0xe42dd6c000000000,
- 0xa216b1a500000000, 0x83662ed900000000, 0xc55d49bc00000000,
- 0x0f10e01300000000, 0x492b877600000000, 0xda8dc39700000000,
- 0x9cb6a4f200000000, 0x56fb0d5d00000000, 0x10c06a3800000000,
- 0x141b33a400000000, 0x522054c100000000, 0x986dfd6e00000000,
- 0xde569a0b00000000, 0x4df0deea00000000, 0x0bcbb98f00000000,
- 0xc186102000000000, 0x87bd774500000000, 0xa6cde83900000000,
- 0xe0f68f5c00000000, 0x2abb26f300000000, 0x6c80419600000000,
- 0xff26057700000000, 0xb91d621200000000, 0x7350cbbd00000000,
- 0x356bacd800000000},
- {0x0000000000000000, 0x9e83da9f00000000, 0x7d01c4e400000000,
- 0xe3821e7b00000000, 0xbb04f91200000000, 0x2587238d00000000,
- 0xc6053df600000000, 0x5886e76900000000, 0x7609f22500000000,
- 0xe88a28ba00000000, 0x0b0836c100000000, 0x958bec5e00000000,
- 0xcd0d0b3700000000, 0x538ed1a800000000, 0xb00ccfd300000000,
- 0x2e8f154c00000000, 0xec12e44b00000000, 0x72913ed400000000,
- 0x911320af00000000, 0x0f90fa3000000000, 0x57161d5900000000,
- 0xc995c7c600000000, 0x2a17d9bd00000000, 0xb494032200000000,
- 0x9a1b166e00000000, 0x0498ccf100000000, 0xe71ad28a00000000,
- 0x7999081500000000, 0x211fef7c00000000, 0xbf9c35e300000000,
- 0x5c1e2b9800000000, 0xc29df10700000000, 0xd825c89700000000,
- 0x46a6120800000000, 0xa5240c7300000000, 0x3ba7d6ec00000000,
- 0x6321318500000000, 0xfda2eb1a00000000, 0x1e20f56100000000,
- 0x80a32ffe00000000, 0xae2c3ab200000000, 0x30afe02d00000000,
- 0xd32dfe5600000000, 0x4dae24c900000000, 0x1528c3a000000000,
- 0x8bab193f00000000, 0x6829074400000000, 0xf6aadddb00000000,
- 0x34372cdc00000000, 0xaab4f64300000000, 0x4936e83800000000,
- 0xd7b532a700000000, 0x8f33d5ce00000000, 0x11b00f5100000000,
- 0xf232112a00000000, 0x6cb1cbb500000000, 0x423edef900000000,
- 0xdcbd046600000000, 0x3f3f1a1d00000000, 0xa1bcc08200000000,
- 0xf93a27eb00000000, 0x67b9fd7400000000, 0x843be30f00000000,
- 0x1ab8399000000000, 0xf14de1f400000000, 0x6fce3b6b00000000,
- 0x8c4c251000000000, 0x12cfff8f00000000, 0x4a4918e600000000,
- 0xd4cac27900000000, 0x3748dc0200000000, 0xa9cb069d00000000,
- 0x874413d100000000, 0x19c7c94e00000000, 0xfa45d73500000000,
- 0x64c60daa00000000, 0x3c40eac300000000, 0xa2c3305c00000000,
- 0x41412e2700000000, 0xdfc2f4b800000000, 0x1d5f05bf00000000,
- 0x83dcdf2000000000, 0x605ec15b00000000, 0xfedd1bc400000000,
- 0xa65bfcad00000000, 0x38d8263200000000, 0xdb5a384900000000,
- 0x45d9e2d600000000, 0x6b56f79a00000000, 0xf5d52d0500000000,
- 0x1657337e00000000, 0x88d4e9e100000000, 0xd0520e8800000000,
- 0x4ed1d41700000000, 0xad53ca6c00000000, 0x33d010f300000000,
- 0x2968296300000000, 0xb7ebf3fc00000000, 0x5469ed8700000000,
- 0xcaea371800000000, 0x926cd07100000000, 0x0cef0aee00000000,
- 0xef6d149500000000, 0x71eece0a00000000, 0x5f61db4600000000,
- 0xc1e201d900000000, 0x22601fa200000000, 0xbce3c53d00000000,
- 0xe465225400000000, 0x7ae6f8cb00000000, 0x9964e6b000000000,
- 0x07e73c2f00000000, 0xc57acd2800000000, 0x5bf917b700000000,
- 0xb87b09cc00000000, 0x26f8d35300000000, 0x7e7e343a00000000,
- 0xe0fdeea500000000, 0x037ff0de00000000, 0x9dfc2a4100000000,
- 0xb3733f0d00000000, 0x2df0e59200000000, 0xce72fbe900000000,
- 0x50f1217600000000, 0x0877c61f00000000, 0x96f41c8000000000,
- 0x757602fb00000000, 0xebf5d86400000000, 0xa39db33200000000,
- 0x3d1e69ad00000000, 0xde9c77d600000000, 0x401fad4900000000,
- 0x18994a2000000000, 0x861a90bf00000000, 0x65988ec400000000,
- 0xfb1b545b00000000, 0xd594411700000000, 0x4b179b8800000000,
- 0xa89585f300000000, 0x36165f6c00000000, 0x6e90b80500000000,
- 0xf013629a00000000, 0x13917ce100000000, 0x8d12a67e00000000,
- 0x4f8f577900000000, 0xd10c8de600000000, 0x328e939d00000000,
- 0xac0d490200000000, 0xf48bae6b00000000, 0x6a0874f400000000,
- 0x898a6a8f00000000, 0x1709b01000000000, 0x3986a55c00000000,
- 0xa7057fc300000000, 0x448761b800000000, 0xda04bb2700000000,
- 0x82825c4e00000000, 0x1c0186d100000000, 0xff8398aa00000000,
- 0x6100423500000000, 0x7bb87ba500000000, 0xe53ba13a00000000,
- 0x06b9bf4100000000, 0x983a65de00000000, 0xc0bc82b700000000,
- 0x5e3f582800000000, 0xbdbd465300000000, 0x233e9ccc00000000,
- 0x0db1898000000000, 0x9332531f00000000, 0x70b04d6400000000,
- 0xee3397fb00000000, 0xb6b5709200000000, 0x2836aa0d00000000,
- 0xcbb4b47600000000, 0x55376ee900000000, 0x97aa9fee00000000,
- 0x0929457100000000, 0xeaab5b0a00000000, 0x7428819500000000,
- 0x2cae66fc00000000, 0xb22dbc6300000000, 0x51afa21800000000,
- 0xcf2c788700000000, 0xe1a36dcb00000000, 0x7f20b75400000000,
- 0x9ca2a92f00000000, 0x022173b000000000, 0x5aa794d900000000,
- 0xc4244e4600000000, 0x27a6503d00000000, 0xb9258aa200000000,
- 0x52d052c600000000, 0xcc53885900000000, 0x2fd1962200000000,
- 0xb1524cbd00000000, 0xe9d4abd400000000, 0x7757714b00000000,
- 0x94d56f3000000000, 0x0a56b5af00000000, 0x24d9a0e300000000,
- 0xba5a7a7c00000000, 0x59d8640700000000, 0xc75bbe9800000000,
- 0x9fdd59f100000000, 0x015e836e00000000, 0xe2dc9d1500000000,
- 0x7c5f478a00000000, 0xbec2b68d00000000, 0x20416c1200000000,
- 0xc3c3726900000000, 0x5d40a8f600000000, 0x05c64f9f00000000,
- 0x9b45950000000000, 0x78c78b7b00000000, 0xe64451e400000000,
- 0xc8cb44a800000000, 0x56489e3700000000, 0xb5ca804c00000000,
- 0x2b495ad300000000, 0x73cfbdba00000000, 0xed4c672500000000,
- 0x0ece795e00000000, 0x904da3c100000000, 0x8af59a5100000000,
- 0x147640ce00000000, 0xf7f45eb500000000, 0x6977842a00000000,
- 0x31f1634300000000, 0xaf72b9dc00000000, 0x4cf0a7a700000000,
- 0xd2737d3800000000, 0xfcfc687400000000, 0x627fb2eb00000000,
- 0x81fdac9000000000, 0x1f7e760f00000000, 0x47f8916600000000,
- 0xd97b4bf900000000, 0x3af9558200000000, 0xa47a8f1d00000000,
- 0x66e77e1a00000000, 0xf864a48500000000, 0x1be6bafe00000000,
- 0x8565606100000000, 0xdde3870800000000, 0x43605d9700000000,
- 0xa0e243ec00000000, 0x3e61997300000000, 0x10ee8c3f00000000,
- 0x8e6d56a000000000, 0x6def48db00000000, 0xf36c924400000000,
- 0xabea752d00000000, 0x3569afb200000000, 0xd6ebb1c900000000,
- 0x48686b5600000000},
- {0x0000000000000000, 0xc064281700000000, 0x80c9502e00000000,
- 0x40ad783900000000, 0x0093a15c00000000, 0xc0f7894b00000000,
- 0x805af17200000000, 0x403ed96500000000, 0x002643b900000000,
- 0xc0426bae00000000, 0x80ef139700000000, 0x408b3b8000000000,
- 0x00b5e2e500000000, 0xc0d1caf200000000, 0x807cb2cb00000000,
- 0x40189adc00000000, 0x414af7a900000000, 0x812edfbe00000000,
- 0xc183a78700000000, 0x01e78f9000000000, 0x41d956f500000000,
- 0x81bd7ee200000000, 0xc11006db00000000, 0x01742ecc00000000,
- 0x416cb41000000000, 0x81089c0700000000, 0xc1a5e43e00000000,
- 0x01c1cc2900000000, 0x41ff154c00000000, 0x819b3d5b00000000,
- 0xc136456200000000, 0x01526d7500000000, 0xc3929f8800000000,
- 0x03f6b79f00000000, 0x435bcfa600000000, 0x833fe7b100000000,
- 0xc3013ed400000000, 0x036516c300000000, 0x43c86efa00000000,
- 0x83ac46ed00000000, 0xc3b4dc3100000000, 0x03d0f42600000000,
- 0x437d8c1f00000000, 0x8319a40800000000, 0xc3277d6d00000000,
- 0x0343557a00000000, 0x43ee2d4300000000, 0x838a055400000000,
- 0x82d8682100000000, 0x42bc403600000000, 0x0211380f00000000,
- 0xc275101800000000, 0x824bc97d00000000, 0x422fe16a00000000,
- 0x0282995300000000, 0xc2e6b14400000000, 0x82fe2b9800000000,
- 0x429a038f00000000, 0x02377bb600000000, 0xc25353a100000000,
- 0x826d8ac400000000, 0x4209a2d300000000, 0x02a4daea00000000,
- 0xc2c0f2fd00000000, 0xc7234eca00000000, 0x074766dd00000000,
- 0x47ea1ee400000000, 0x878e36f300000000, 0xc7b0ef9600000000,
- 0x07d4c78100000000, 0x4779bfb800000000, 0x871d97af00000000,
- 0xc7050d7300000000, 0x0761256400000000, 0x47cc5d5d00000000,
- 0x87a8754a00000000, 0xc796ac2f00000000, 0x07f2843800000000,
- 0x475ffc0100000000, 0x873bd41600000000, 0x8669b96300000000,
- 0x460d917400000000, 0x06a0e94d00000000, 0xc6c4c15a00000000,
- 0x86fa183f00000000, 0x469e302800000000, 0x0633481100000000,
- 0xc657600600000000, 0x864ffada00000000, 0x462bd2cd00000000,
- 0x0686aaf400000000, 0xc6e282e300000000, 0x86dc5b8600000000,
- 0x46b8739100000000, 0x06150ba800000000, 0xc67123bf00000000,
- 0x04b1d14200000000, 0xc4d5f95500000000, 0x8478816c00000000,
- 0x441ca97b00000000, 0x0422701e00000000, 0xc446580900000000,
- 0x84eb203000000000, 0x448f082700000000, 0x049792fb00000000,
- 0xc4f3baec00000000, 0x845ec2d500000000, 0x443aeac200000000,
- 0x040433a700000000, 0xc4601bb000000000, 0x84cd638900000000,
- 0x44a94b9e00000000, 0x45fb26eb00000000, 0x859f0efc00000000,
- 0xc53276c500000000, 0x05565ed200000000, 0x456887b700000000,
- 0x850cafa000000000, 0xc5a1d79900000000, 0x05c5ff8e00000000,
- 0x45dd655200000000, 0x85b94d4500000000, 0xc514357c00000000,
- 0x05701d6b00000000, 0x454ec40e00000000, 0x852aec1900000000,
- 0xc587942000000000, 0x05e3bc3700000000, 0xcf41ed4f00000000,
- 0x0f25c55800000000, 0x4f88bd6100000000, 0x8fec957600000000,
- 0xcfd24c1300000000, 0x0fb6640400000000, 0x4f1b1c3d00000000,
- 0x8f7f342a00000000, 0xcf67aef600000000, 0x0f0386e100000000,
- 0x4faefed800000000, 0x8fcad6cf00000000, 0xcff40faa00000000,
- 0x0f9027bd00000000, 0x4f3d5f8400000000, 0x8f59779300000000,
- 0x8e0b1ae600000000, 0x4e6f32f100000000, 0x0ec24ac800000000,
- 0xcea662df00000000, 0x8e98bbba00000000, 0x4efc93ad00000000,
- 0x0e51eb9400000000, 0xce35c38300000000, 0x8e2d595f00000000,
- 0x4e49714800000000, 0x0ee4097100000000, 0xce80216600000000,
- 0x8ebef80300000000, 0x4edad01400000000, 0x0e77a82d00000000,
- 0xce13803a00000000, 0x0cd372c700000000, 0xccb75ad000000000,
- 0x8c1a22e900000000, 0x4c7e0afe00000000, 0x0c40d39b00000000,
- 0xcc24fb8c00000000, 0x8c8983b500000000, 0x4cedaba200000000,
- 0x0cf5317e00000000, 0xcc91196900000000, 0x8c3c615000000000,
- 0x4c58494700000000, 0x0c66902200000000, 0xcc02b83500000000,
- 0x8cafc00c00000000, 0x4ccbe81b00000000, 0x4d99856e00000000,
- 0x8dfdad7900000000, 0xcd50d54000000000, 0x0d34fd5700000000,
- 0x4d0a243200000000, 0x8d6e0c2500000000, 0xcdc3741c00000000,
- 0x0da75c0b00000000, 0x4dbfc6d700000000, 0x8ddbeec000000000,
- 0xcd7696f900000000, 0x0d12beee00000000, 0x4d2c678b00000000,
- 0x8d484f9c00000000, 0xcde537a500000000, 0x0d811fb200000000,
- 0x0862a38500000000, 0xc8068b9200000000, 0x88abf3ab00000000,
- 0x48cfdbbc00000000, 0x08f102d900000000, 0xc8952ace00000000,
- 0x883852f700000000, 0x485c7ae000000000, 0x0844e03c00000000,
- 0xc820c82b00000000, 0x888db01200000000, 0x48e9980500000000,
- 0x08d7416000000000, 0xc8b3697700000000, 0x881e114e00000000,
- 0x487a395900000000, 0x4928542c00000000, 0x894c7c3b00000000,
- 0xc9e1040200000000, 0x09852c1500000000, 0x49bbf57000000000,
- 0x89dfdd6700000000, 0xc972a55e00000000, 0x09168d4900000000,
- 0x490e179500000000, 0x896a3f8200000000, 0xc9c747bb00000000,
- 0x09a36fac00000000, 0x499db6c900000000, 0x89f99ede00000000,
- 0xc954e6e700000000, 0x0930cef000000000, 0xcbf03c0d00000000,
- 0x0b94141a00000000, 0x4b396c2300000000, 0x8b5d443400000000,
- 0xcb639d5100000000, 0x0b07b54600000000, 0x4baacd7f00000000,
- 0x8bcee56800000000, 0xcbd67fb400000000, 0x0bb257a300000000,
- 0x4b1f2f9a00000000, 0x8b7b078d00000000, 0xcb45dee800000000,
- 0x0b21f6ff00000000, 0x4b8c8ec600000000, 0x8be8a6d100000000,
- 0x8abacba400000000, 0x4adee3b300000000, 0x0a739b8a00000000,
- 0xca17b39d00000000, 0x8a296af800000000, 0x4a4d42ef00000000,
- 0x0ae03ad600000000, 0xca8412c100000000, 0x8a9c881d00000000,
- 0x4af8a00a00000000, 0x0a55d83300000000, 0xca31f02400000000,
- 0x8a0f294100000000, 0x4a6b015600000000, 0x0ac6796f00000000,
- 0xcaa2517800000000},
- {0x0000000000000000, 0xd4ea739b00000000, 0xe9d396ed00000000,
- 0x3d39e57600000000, 0x93a15c0000000000, 0x474b2f9b00000000,
- 0x7a72caed00000000, 0xae98b97600000000, 0x2643b90000000000,
- 0xf2a9ca9b00000000, 0xcf902fed00000000, 0x1b7a5c7600000000,
- 0xb5e2e50000000000, 0x6108969b00000000, 0x5c3173ed00000000,
- 0x88db007600000000, 0x4c86720100000000, 0x986c019a00000000,
- 0xa555e4ec00000000, 0x71bf977700000000, 0xdf272e0100000000,
- 0x0bcd5d9a00000000, 0x36f4b8ec00000000, 0xe21ecb7700000000,
- 0x6ac5cb0100000000, 0xbe2fb89a00000000, 0x83165dec00000000,
- 0x57fc2e7700000000, 0xf964970100000000, 0x2d8ee49a00000000,
- 0x10b701ec00000000, 0xc45d727700000000, 0x980ce50200000000,
- 0x4ce6969900000000, 0x71df73ef00000000, 0xa535007400000000,
- 0x0badb90200000000, 0xdf47ca9900000000, 0xe27e2fef00000000,
- 0x36945c7400000000, 0xbe4f5c0200000000, 0x6aa52f9900000000,
- 0x579ccaef00000000, 0x8376b97400000000, 0x2dee000200000000,
- 0xf904739900000000, 0xc43d96ef00000000, 0x10d7e57400000000,
- 0xd48a970300000000, 0x0060e49800000000, 0x3d5901ee00000000,
- 0xe9b3727500000000, 0x472bcb0300000000, 0x93c1b89800000000,
- 0xaef85dee00000000, 0x7a122e7500000000, 0xf2c92e0300000000,
- 0x26235d9800000000, 0x1b1ab8ee00000000, 0xcff0cb7500000000,
- 0x6168720300000000, 0xb582019800000000, 0x88bbe4ee00000000,
- 0x5c51977500000000, 0x3019ca0500000000, 0xe4f3b99e00000000,
- 0xd9ca5ce800000000, 0x0d202f7300000000, 0xa3b8960500000000,
- 0x7752e59e00000000, 0x4a6b00e800000000, 0x9e81737300000000,
- 0x165a730500000000, 0xc2b0009e00000000, 0xff89e5e800000000,
- 0x2b63967300000000, 0x85fb2f0500000000, 0x51115c9e00000000,
- 0x6c28b9e800000000, 0xb8c2ca7300000000, 0x7c9fb80400000000,
- 0xa875cb9f00000000, 0x954c2ee900000000, 0x41a65d7200000000,
- 0xef3ee40400000000, 0x3bd4979f00000000, 0x06ed72e900000000,
- 0xd207017200000000, 0x5adc010400000000, 0x8e36729f00000000,
- 0xb30f97e900000000, 0x67e5e47200000000, 0xc97d5d0400000000,
- 0x1d972e9f00000000, 0x20aecbe900000000, 0xf444b87200000000,
- 0xa8152f0700000000, 0x7cff5c9c00000000, 0x41c6b9ea00000000,
- 0x952cca7100000000, 0x3bb4730700000000, 0xef5e009c00000000,
- 0xd267e5ea00000000, 0x068d967100000000, 0x8e56960700000000,
- 0x5abce59c00000000, 0x678500ea00000000, 0xb36f737100000000,
- 0x1df7ca0700000000, 0xc91db99c00000000, 0xf4245cea00000000,
- 0x20ce2f7100000000, 0xe4935d0600000000, 0x30792e9d00000000,
- 0x0d40cbeb00000000, 0xd9aab87000000000, 0x7732010600000000,
- 0xa3d8729d00000000, 0x9ee197eb00000000, 0x4a0be47000000000,
- 0xc2d0e40600000000, 0x163a979d00000000, 0x2b0372eb00000000,
- 0xffe9017000000000, 0x5171b80600000000, 0x859bcb9d00000000,
- 0xb8a22eeb00000000, 0x6c485d7000000000, 0x6032940b00000000,
- 0xb4d8e79000000000, 0x89e102e600000000, 0x5d0b717d00000000,
- 0xf393c80b00000000, 0x2779bb9000000000, 0x1a405ee600000000,
- 0xceaa2d7d00000000, 0x46712d0b00000000, 0x929b5e9000000000,
- 0xafa2bbe600000000, 0x7b48c87d00000000, 0xd5d0710b00000000,
- 0x013a029000000000, 0x3c03e7e600000000, 0xe8e9947d00000000,
- 0x2cb4e60a00000000, 0xf85e959100000000, 0xc56770e700000000,
- 0x118d037c00000000, 0xbf15ba0a00000000, 0x6bffc99100000000,
- 0x56c62ce700000000, 0x822c5f7c00000000, 0x0af75f0a00000000,
- 0xde1d2c9100000000, 0xe324c9e700000000, 0x37ceba7c00000000,
- 0x9956030a00000000, 0x4dbc709100000000, 0x708595e700000000,
- 0xa46fe67c00000000, 0xf83e710900000000, 0x2cd4029200000000,
- 0x11ede7e400000000, 0xc507947f00000000, 0x6b9f2d0900000000,
- 0xbf755e9200000000, 0x824cbbe400000000, 0x56a6c87f00000000,
- 0xde7dc80900000000, 0x0a97bb9200000000, 0x37ae5ee400000000,
- 0xe3442d7f00000000, 0x4ddc940900000000, 0x9936e79200000000,
- 0xa40f02e400000000, 0x70e5717f00000000, 0xb4b8030800000000,
- 0x6052709300000000, 0x5d6b95e500000000, 0x8981e67e00000000,
- 0x27195f0800000000, 0xf3f32c9300000000, 0xcecac9e500000000,
- 0x1a20ba7e00000000, 0x92fbba0800000000, 0x4611c99300000000,
- 0x7b282ce500000000, 0xafc25f7e00000000, 0x015ae60800000000,
- 0xd5b0959300000000, 0xe88970e500000000, 0x3c63037e00000000,
- 0x502b5e0e00000000, 0x84c12d9500000000, 0xb9f8c8e300000000,
- 0x6d12bb7800000000, 0xc38a020e00000000, 0x1760719500000000,
- 0x2a5994e300000000, 0xfeb3e77800000000, 0x7668e70e00000000,
- 0xa282949500000000, 0x9fbb71e300000000, 0x4b51027800000000,
- 0xe5c9bb0e00000000, 0x3123c89500000000, 0x0c1a2de300000000,
- 0xd8f05e7800000000, 0x1cad2c0f00000000, 0xc8475f9400000000,
- 0xf57ebae200000000, 0x2194c97900000000, 0x8f0c700f00000000,
- 0x5be6039400000000, 0x66dfe6e200000000, 0xb235957900000000,
- 0x3aee950f00000000, 0xee04e69400000000, 0xd33d03e200000000,
- 0x07d7707900000000, 0xa94fc90f00000000, 0x7da5ba9400000000,
- 0x409c5fe200000000, 0x94762c7900000000, 0xc827bb0c00000000,
- 0x1ccdc89700000000, 0x21f42de100000000, 0xf51e5e7a00000000,
- 0x5b86e70c00000000, 0x8f6c949700000000, 0xb25571e100000000,
- 0x66bf027a00000000, 0xee64020c00000000, 0x3a8e719700000000,
- 0x07b794e100000000, 0xd35de77a00000000, 0x7dc55e0c00000000,
- 0xa92f2d9700000000, 0x9416c8e100000000, 0x40fcbb7a00000000,
- 0x84a1c90d00000000, 0x504bba9600000000, 0x6d725fe000000000,
- 0xb9982c7b00000000, 0x1700950d00000000, 0xc3eae69600000000,
- 0xfed303e000000000, 0x2a39707b00000000, 0xa2e2700d00000000,
- 0x7608039600000000, 0x4b31e6e000000000, 0x9fdb957b00000000,
- 0x31432c0d00000000, 0xe5a95f9600000000, 0xd890bae000000000,
- 0x0c7ac97b00000000},
- {0x0000000000000000, 0x2765258100000000, 0x0fcc3bd900000000,
- 0x28a91e5800000000, 0x5f9e066900000000, 0x78fb23e800000000,
- 0x50523db000000000, 0x7737183100000000, 0xbe3c0dd200000000,
- 0x9959285300000000, 0xb1f0360b00000000, 0x9695138a00000000,
- 0xe1a20bbb00000000, 0xc6c72e3a00000000, 0xee6e306200000000,
- 0xc90b15e300000000, 0x3d7f6b7f00000000, 0x1a1a4efe00000000,
- 0x32b350a600000000, 0x15d6752700000000, 0x62e16d1600000000,
- 0x4584489700000000, 0x6d2d56cf00000000, 0x4a48734e00000000,
- 0x834366ad00000000, 0xa426432c00000000, 0x8c8f5d7400000000,
- 0xabea78f500000000, 0xdcdd60c400000000, 0xfbb8454500000000,
- 0xd3115b1d00000000, 0xf4747e9c00000000, 0x7afed6fe00000000,
- 0x5d9bf37f00000000, 0x7532ed2700000000, 0x5257c8a600000000,
- 0x2560d09700000000, 0x0205f51600000000, 0x2aaceb4e00000000,
- 0x0dc9cecf00000000, 0xc4c2db2c00000000, 0xe3a7fead00000000,
- 0xcb0ee0f500000000, 0xec6bc57400000000, 0x9b5cdd4500000000,
- 0xbc39f8c400000000, 0x9490e69c00000000, 0xb3f5c31d00000000,
- 0x4781bd8100000000, 0x60e4980000000000, 0x484d865800000000,
- 0x6f28a3d900000000, 0x181fbbe800000000, 0x3f7a9e6900000000,
- 0x17d3803100000000, 0x30b6a5b000000000, 0xf9bdb05300000000,
- 0xded895d200000000, 0xf6718b8a00000000, 0xd114ae0b00000000,
- 0xa623b63a00000000, 0x814693bb00000000, 0xa9ef8de300000000,
- 0x8e8aa86200000000, 0xb5fadc2600000000, 0x929ff9a700000000,
- 0xba36e7ff00000000, 0x9d53c27e00000000, 0xea64da4f00000000,
- 0xcd01ffce00000000, 0xe5a8e19600000000, 0xc2cdc41700000000,
- 0x0bc6d1f400000000, 0x2ca3f47500000000, 0x040aea2d00000000,
- 0x236fcfac00000000, 0x5458d79d00000000, 0x733df21c00000000,
- 0x5b94ec4400000000, 0x7cf1c9c500000000, 0x8885b75900000000,
- 0xafe092d800000000, 0x87498c8000000000, 0xa02ca90100000000,
- 0xd71bb13000000000, 0xf07e94b100000000, 0xd8d78ae900000000,
- 0xffb2af6800000000, 0x36b9ba8b00000000, 0x11dc9f0a00000000,
- 0x3975815200000000, 0x1e10a4d300000000, 0x6927bce200000000,
- 0x4e42996300000000, 0x66eb873b00000000, 0x418ea2ba00000000,
- 0xcf040ad800000000, 0xe8612f5900000000, 0xc0c8310100000000,
- 0xe7ad148000000000, 0x909a0cb100000000, 0xb7ff293000000000,
- 0x9f56376800000000, 0xb83312e900000000, 0x7138070a00000000,
- 0x565d228b00000000, 0x7ef43cd300000000, 0x5991195200000000,
- 0x2ea6016300000000, 0x09c324e200000000, 0x216a3aba00000000,
- 0x060f1f3b00000000, 0xf27b61a700000000, 0xd51e442600000000,
- 0xfdb75a7e00000000, 0xdad27fff00000000, 0xade567ce00000000,
- 0x8a80424f00000000, 0xa2295c1700000000, 0x854c799600000000,
- 0x4c476c7500000000, 0x6b2249f400000000, 0x438b57ac00000000,
- 0x64ee722d00000000, 0x13d96a1c00000000, 0x34bc4f9d00000000,
- 0x1c1551c500000000, 0x3b70744400000000, 0x6af5b94d00000000,
- 0x4d909ccc00000000, 0x6539829400000000, 0x425ca71500000000,
- 0x356bbf2400000000, 0x120e9aa500000000, 0x3aa784fd00000000,
- 0x1dc2a17c00000000, 0xd4c9b49f00000000, 0xf3ac911e00000000,
- 0xdb058f4600000000, 0xfc60aac700000000, 0x8b57b2f600000000,
- 0xac32977700000000, 0x849b892f00000000, 0xa3feacae00000000,
- 0x578ad23200000000, 0x70eff7b300000000, 0x5846e9eb00000000,
- 0x7f23cc6a00000000, 0x0814d45b00000000, 0x2f71f1da00000000,
- 0x07d8ef8200000000, 0x20bdca0300000000, 0xe9b6dfe000000000,
- 0xced3fa6100000000, 0xe67ae43900000000, 0xc11fc1b800000000,
- 0xb628d98900000000, 0x914dfc0800000000, 0xb9e4e25000000000,
- 0x9e81c7d100000000, 0x100b6fb300000000, 0x376e4a3200000000,
- 0x1fc7546a00000000, 0x38a271eb00000000, 0x4f9569da00000000,
- 0x68f04c5b00000000, 0x4059520300000000, 0x673c778200000000,
- 0xae37626100000000, 0x895247e000000000, 0xa1fb59b800000000,
- 0x869e7c3900000000, 0xf1a9640800000000, 0xd6cc418900000000,
- 0xfe655fd100000000, 0xd9007a5000000000, 0x2d7404cc00000000,
- 0x0a11214d00000000, 0x22b83f1500000000, 0x05dd1a9400000000,
- 0x72ea02a500000000, 0x558f272400000000, 0x7d26397c00000000,
- 0x5a431cfd00000000, 0x9348091e00000000, 0xb42d2c9f00000000,
- 0x9c8432c700000000, 0xbbe1174600000000, 0xccd60f7700000000,
- 0xebb32af600000000, 0xc31a34ae00000000, 0xe47f112f00000000,
- 0xdf0f656b00000000, 0xf86a40ea00000000, 0xd0c35eb200000000,
- 0xf7a67b3300000000, 0x8091630200000000, 0xa7f4468300000000,
- 0x8f5d58db00000000, 0xa8387d5a00000000, 0x613368b900000000,
- 0x46564d3800000000, 0x6eff536000000000, 0x499a76e100000000,
- 0x3ead6ed000000000, 0x19c84b5100000000, 0x3161550900000000,
- 0x1604708800000000, 0xe2700e1400000000, 0xc5152b9500000000,
- 0xedbc35cd00000000, 0xcad9104c00000000, 0xbdee087d00000000,
- 0x9a8b2dfc00000000, 0xb22233a400000000, 0x9547162500000000,
- 0x5c4c03c600000000, 0x7b29264700000000, 0x5380381f00000000,
- 0x74e51d9e00000000, 0x03d205af00000000, 0x24b7202e00000000,
- 0x0c1e3e7600000000, 0x2b7b1bf700000000, 0xa5f1b39500000000,
- 0x8294961400000000, 0xaa3d884c00000000, 0x8d58adcd00000000,
- 0xfa6fb5fc00000000, 0xdd0a907d00000000, 0xf5a38e2500000000,
- 0xd2c6aba400000000, 0x1bcdbe4700000000, 0x3ca89bc600000000,
- 0x1401859e00000000, 0x3364a01f00000000, 0x4453b82e00000000,
- 0x63369daf00000000, 0x4b9f83f700000000, 0x6cfaa67600000000,
- 0x988ed8ea00000000, 0xbfebfd6b00000000, 0x9742e33300000000,
- 0xb027c6b200000000, 0xc710de8300000000, 0xe075fb0200000000,
- 0xc8dce55a00000000, 0xefb9c0db00000000, 0x26b2d53800000000,
- 0x01d7f0b900000000, 0x297eeee100000000, 0x0e1bcb6000000000,
- 0x792cd35100000000, 0x5e49f6d000000000, 0x76e0e88800000000,
- 0x5185cd0900000000}};
-
-#else /* W == 4 */
-
-local const z_crc_t FAR crc_braid_table[][256] = {
- {0x00000000, 0x9ba54c6f, 0xec3b9e9f, 0x779ed2f0, 0x03063b7f,
- 0x98a37710, 0xef3da5e0, 0x7498e98f, 0x060c76fe, 0x9da93a91,
- 0xea37e861, 0x7192a40e, 0x050a4d81, 0x9eaf01ee, 0xe931d31e,
- 0x72949f71, 0x0c18edfc, 0x97bda193, 0xe0237363, 0x7b863f0c,
- 0x0f1ed683, 0x94bb9aec, 0xe325481c, 0x78800473, 0x0a149b02,
- 0x91b1d76d, 0xe62f059d, 0x7d8a49f2, 0x0912a07d, 0x92b7ec12,
- 0xe5293ee2, 0x7e8c728d, 0x1831dbf8, 0x83949797, 0xf40a4567,
- 0x6faf0908, 0x1b37e087, 0x8092ace8, 0xf70c7e18, 0x6ca93277,
- 0x1e3dad06, 0x8598e169, 0xf2063399, 0x69a37ff6, 0x1d3b9679,
- 0x869eda16, 0xf10008e6, 0x6aa54489, 0x14293604, 0x8f8c7a6b,
- 0xf812a89b, 0x63b7e4f4, 0x172f0d7b, 0x8c8a4114, 0xfb1493e4,
- 0x60b1df8b, 0x122540fa, 0x89800c95, 0xfe1ede65, 0x65bb920a,
- 0x11237b85, 0x8a8637ea, 0xfd18e51a, 0x66bda975, 0x3063b7f0,
- 0xabc6fb9f, 0xdc58296f, 0x47fd6500, 0x33658c8f, 0xa8c0c0e0,
- 0xdf5e1210, 0x44fb5e7f, 0x366fc10e, 0xadca8d61, 0xda545f91,
- 0x41f113fe, 0x3569fa71, 0xaeccb61e, 0xd95264ee, 0x42f72881,
- 0x3c7b5a0c, 0xa7de1663, 0xd040c493, 0x4be588fc, 0x3f7d6173,
- 0xa4d82d1c, 0xd346ffec, 0x48e3b383, 0x3a772cf2, 0xa1d2609d,
- 0xd64cb26d, 0x4de9fe02, 0x3971178d, 0xa2d45be2, 0xd54a8912,
- 0x4eefc57d, 0x28526c08, 0xb3f72067, 0xc469f297, 0x5fccbef8,
- 0x2b545777, 0xb0f11b18, 0xc76fc9e8, 0x5cca8587, 0x2e5e1af6,
- 0xb5fb5699, 0xc2658469, 0x59c0c806, 0x2d582189, 0xb6fd6de6,
- 0xc163bf16, 0x5ac6f379, 0x244a81f4, 0xbfefcd9b, 0xc8711f6b,
- 0x53d45304, 0x274cba8b, 0xbce9f6e4, 0xcb772414, 0x50d2687b,
- 0x2246f70a, 0xb9e3bb65, 0xce7d6995, 0x55d825fa, 0x2140cc75,
- 0xbae5801a, 0xcd7b52ea, 0x56de1e85, 0x60c76fe0, 0xfb62238f,
- 0x8cfcf17f, 0x1759bd10, 0x63c1549f, 0xf86418f0, 0x8ffaca00,
- 0x145f866f, 0x66cb191e, 0xfd6e5571, 0x8af08781, 0x1155cbee,
- 0x65cd2261, 0xfe686e0e, 0x89f6bcfe, 0x1253f091, 0x6cdf821c,
- 0xf77ace73, 0x80e41c83, 0x1b4150ec, 0x6fd9b963, 0xf47cf50c,
- 0x83e227fc, 0x18476b93, 0x6ad3f4e2, 0xf176b88d, 0x86e86a7d,
- 0x1d4d2612, 0x69d5cf9d, 0xf27083f2, 0x85ee5102, 0x1e4b1d6d,
- 0x78f6b418, 0xe353f877, 0x94cd2a87, 0x0f6866e8, 0x7bf08f67,
- 0xe055c308, 0x97cb11f8, 0x0c6e5d97, 0x7efac2e6, 0xe55f8e89,
- 0x92c15c79, 0x09641016, 0x7dfcf999, 0xe659b5f6, 0x91c76706,
- 0x0a622b69, 0x74ee59e4, 0xef4b158b, 0x98d5c77b, 0x03708b14,
- 0x77e8629b, 0xec4d2ef4, 0x9bd3fc04, 0x0076b06b, 0x72e22f1a,
- 0xe9476375, 0x9ed9b185, 0x057cfdea, 0x71e41465, 0xea41580a,
- 0x9ddf8afa, 0x067ac695, 0x50a4d810, 0xcb01947f, 0xbc9f468f,
- 0x273a0ae0, 0x53a2e36f, 0xc807af00, 0xbf997df0, 0x243c319f,
- 0x56a8aeee, 0xcd0de281, 0xba933071, 0x21367c1e, 0x55ae9591,
- 0xce0bd9fe, 0xb9950b0e, 0x22304761, 0x5cbc35ec, 0xc7197983,
- 0xb087ab73, 0x2b22e71c, 0x5fba0e93, 0xc41f42fc, 0xb381900c,
- 0x2824dc63, 0x5ab04312, 0xc1150f7d, 0xb68bdd8d, 0x2d2e91e2,
- 0x59b6786d, 0xc2133402, 0xb58de6f2, 0x2e28aa9d, 0x489503e8,
- 0xd3304f87, 0xa4ae9d77, 0x3f0bd118, 0x4b933897, 0xd03674f8,
- 0xa7a8a608, 0x3c0dea67, 0x4e997516, 0xd53c3979, 0xa2a2eb89,
- 0x3907a7e6, 0x4d9f4e69, 0xd63a0206, 0xa1a4d0f6, 0x3a019c99,
- 0x448dee14, 0xdf28a27b, 0xa8b6708b, 0x33133ce4, 0x478bd56b,
- 0xdc2e9904, 0xabb04bf4, 0x3015079b, 0x428198ea, 0xd924d485,
- 0xaeba0675, 0x351f4a1a, 0x4187a395, 0xda22effa, 0xadbc3d0a,
- 0x36197165},
- {0x00000000, 0xc18edfc0, 0x586cb9c1, 0x99e26601, 0xb0d97382,
- 0x7157ac42, 0xe8b5ca43, 0x293b1583, 0xbac3e145, 0x7b4d3e85,
- 0xe2af5884, 0x23218744, 0x0a1a92c7, 0xcb944d07, 0x52762b06,
- 0x93f8f4c6, 0xaef6c4cb, 0x6f781b0b, 0xf69a7d0a, 0x3714a2ca,
- 0x1e2fb749, 0xdfa16889, 0x46430e88, 0x87cdd148, 0x1435258e,
- 0xd5bbfa4e, 0x4c599c4f, 0x8dd7438f, 0xa4ec560c, 0x656289cc,
- 0xfc80efcd, 0x3d0e300d, 0x869c8fd7, 0x47125017, 0xdef03616,
- 0x1f7ee9d6, 0x3645fc55, 0xf7cb2395, 0x6e294594, 0xafa79a54,
- 0x3c5f6e92, 0xfdd1b152, 0x6433d753, 0xa5bd0893, 0x8c861d10,
- 0x4d08c2d0, 0xd4eaa4d1, 0x15647b11, 0x286a4b1c, 0xe9e494dc,
- 0x7006f2dd, 0xb1882d1d, 0x98b3389e, 0x593de75e, 0xc0df815f,
- 0x01515e9f, 0x92a9aa59, 0x53277599, 0xcac51398, 0x0b4bcc58,
- 0x2270d9db, 0xe3fe061b, 0x7a1c601a, 0xbb92bfda, 0xd64819ef,
- 0x17c6c62f, 0x8e24a02e, 0x4faa7fee, 0x66916a6d, 0xa71fb5ad,
- 0x3efdd3ac, 0xff730c6c, 0x6c8bf8aa, 0xad05276a, 0x34e7416b,
- 0xf5699eab, 0xdc528b28, 0x1ddc54e8, 0x843e32e9, 0x45b0ed29,
- 0x78bedd24, 0xb93002e4, 0x20d264e5, 0xe15cbb25, 0xc867aea6,
- 0x09e97166, 0x900b1767, 0x5185c8a7, 0xc27d3c61, 0x03f3e3a1,
- 0x9a1185a0, 0x5b9f5a60, 0x72a44fe3, 0xb32a9023, 0x2ac8f622,
- 0xeb4629e2, 0x50d49638, 0x915a49f8, 0x08b82ff9, 0xc936f039,
- 0xe00de5ba, 0x21833a7a, 0xb8615c7b, 0x79ef83bb, 0xea17777d,
- 0x2b99a8bd, 0xb27bcebc, 0x73f5117c, 0x5ace04ff, 0x9b40db3f,
- 0x02a2bd3e, 0xc32c62fe, 0xfe2252f3, 0x3fac8d33, 0xa64eeb32,
- 0x67c034f2, 0x4efb2171, 0x8f75feb1, 0x169798b0, 0xd7194770,
- 0x44e1b3b6, 0x856f6c76, 0x1c8d0a77, 0xdd03d5b7, 0xf438c034,
- 0x35b61ff4, 0xac5479f5, 0x6ddaa635, 0x77e1359f, 0xb66fea5f,
- 0x2f8d8c5e, 0xee03539e, 0xc738461d, 0x06b699dd, 0x9f54ffdc,
- 0x5eda201c, 0xcd22d4da, 0x0cac0b1a, 0x954e6d1b, 0x54c0b2db,
- 0x7dfba758, 0xbc757898, 0x25971e99, 0xe419c159, 0xd917f154,
- 0x18992e94, 0x817b4895, 0x40f59755, 0x69ce82d6, 0xa8405d16,
- 0x31a23b17, 0xf02ce4d7, 0x63d41011, 0xa25acfd1, 0x3bb8a9d0,
- 0xfa367610, 0xd30d6393, 0x1283bc53, 0x8b61da52, 0x4aef0592,
- 0xf17dba48, 0x30f36588, 0xa9110389, 0x689fdc49, 0x41a4c9ca,
- 0x802a160a, 0x19c8700b, 0xd846afcb, 0x4bbe5b0d, 0x8a3084cd,
- 0x13d2e2cc, 0xd25c3d0c, 0xfb67288f, 0x3ae9f74f, 0xa30b914e,
- 0x62854e8e, 0x5f8b7e83, 0x9e05a143, 0x07e7c742, 0xc6691882,
- 0xef520d01, 0x2edcd2c1, 0xb73eb4c0, 0x76b06b00, 0xe5489fc6,
- 0x24c64006, 0xbd242607, 0x7caaf9c7, 0x5591ec44, 0x941f3384,
- 0x0dfd5585, 0xcc738a45, 0xa1a92c70, 0x6027f3b0, 0xf9c595b1,
- 0x384b4a71, 0x11705ff2, 0xd0fe8032, 0x491ce633, 0x889239f3,
- 0x1b6acd35, 0xdae412f5, 0x430674f4, 0x8288ab34, 0xabb3beb7,
- 0x6a3d6177, 0xf3df0776, 0x3251d8b6, 0x0f5fe8bb, 0xced1377b,
- 0x5733517a, 0x96bd8eba, 0xbf869b39, 0x7e0844f9, 0xe7ea22f8,
- 0x2664fd38, 0xb59c09fe, 0x7412d63e, 0xedf0b03f, 0x2c7e6fff,
- 0x05457a7c, 0xc4cba5bc, 0x5d29c3bd, 0x9ca71c7d, 0x2735a3a7,
- 0xe6bb7c67, 0x7f591a66, 0xbed7c5a6, 0x97ecd025, 0x56620fe5,
- 0xcf8069e4, 0x0e0eb624, 0x9df642e2, 0x5c789d22, 0xc59afb23,
- 0x041424e3, 0x2d2f3160, 0xeca1eea0, 0x754388a1, 0xb4cd5761,
- 0x89c3676c, 0x484db8ac, 0xd1afdead, 0x1021016d, 0x391a14ee,
- 0xf894cb2e, 0x6176ad2f, 0xa0f872ef, 0x33008629, 0xf28e59e9,
- 0x6b6c3fe8, 0xaae2e028, 0x83d9f5ab, 0x42572a6b, 0xdbb54c6a,
- 0x1a3b93aa},
- {0x00000000, 0xefc26b3e, 0x04f5d03d, 0xeb37bb03, 0x09eba07a,
- 0xe629cb44, 0x0d1e7047, 0xe2dc1b79, 0x13d740f4, 0xfc152bca,
- 0x172290c9, 0xf8e0fbf7, 0x1a3ce08e, 0xf5fe8bb0, 0x1ec930b3,
- 0xf10b5b8d, 0x27ae81e8, 0xc86cead6, 0x235b51d5, 0xcc993aeb,
- 0x2e452192, 0xc1874aac, 0x2ab0f1af, 0xc5729a91, 0x3479c11c,
- 0xdbbbaa22, 0x308c1121, 0xdf4e7a1f, 0x3d926166, 0xd2500a58,
- 0x3967b15b, 0xd6a5da65, 0x4f5d03d0, 0xa09f68ee, 0x4ba8d3ed,
- 0xa46ab8d3, 0x46b6a3aa, 0xa974c894, 0x42437397, 0xad8118a9,
- 0x5c8a4324, 0xb348281a, 0x587f9319, 0xb7bdf827, 0x5561e35e,
- 0xbaa38860, 0x51943363, 0xbe56585d, 0x68f38238, 0x8731e906,
- 0x6c065205, 0x83c4393b, 0x61182242, 0x8eda497c, 0x65edf27f,
- 0x8a2f9941, 0x7b24c2cc, 0x94e6a9f2, 0x7fd112f1, 0x901379cf,
- 0x72cf62b6, 0x9d0d0988, 0x763ab28b, 0x99f8d9b5, 0x9eba07a0,
- 0x71786c9e, 0x9a4fd79d, 0x758dbca3, 0x9751a7da, 0x7893cce4,
- 0x93a477e7, 0x7c661cd9, 0x8d6d4754, 0x62af2c6a, 0x89989769,
- 0x665afc57, 0x8486e72e, 0x6b448c10, 0x80733713, 0x6fb15c2d,
- 0xb9148648, 0x56d6ed76, 0xbde15675, 0x52233d4b, 0xb0ff2632,
- 0x5f3d4d0c, 0xb40af60f, 0x5bc89d31, 0xaac3c6bc, 0x4501ad82,
- 0xae361681, 0x41f47dbf, 0xa32866c6, 0x4cea0df8, 0xa7ddb6fb,
- 0x481fddc5, 0xd1e70470, 0x3e256f4e, 0xd512d44d, 0x3ad0bf73,
- 0xd80ca40a, 0x37cecf34, 0xdcf97437, 0x333b1f09, 0xc2304484,
- 0x2df22fba, 0xc6c594b9, 0x2907ff87, 0xcbdbe4fe, 0x24198fc0,
- 0xcf2e34c3, 0x20ec5ffd, 0xf6498598, 0x198beea6, 0xf2bc55a5,
- 0x1d7e3e9b, 0xffa225e2, 0x10604edc, 0xfb57f5df, 0x14959ee1,
- 0xe59ec56c, 0x0a5cae52, 0xe16b1551, 0x0ea97e6f, 0xec756516,
- 0x03b70e28, 0xe880b52b, 0x0742de15, 0xe6050901, 0x09c7623f,
- 0xe2f0d93c, 0x0d32b202, 0xefeea97b, 0x002cc245, 0xeb1b7946,
- 0x04d91278, 0xf5d249f5, 0x1a1022cb, 0xf12799c8, 0x1ee5f2f6,
- 0xfc39e98f, 0x13fb82b1, 0xf8cc39b2, 0x170e528c, 0xc1ab88e9,
- 0x2e69e3d7, 0xc55e58d4, 0x2a9c33ea, 0xc8402893, 0x278243ad,
- 0xccb5f8ae, 0x23779390, 0xd27cc81d, 0x3dbea323, 0xd6891820,
- 0x394b731e, 0xdb976867, 0x34550359, 0xdf62b85a, 0x30a0d364,
- 0xa9580ad1, 0x469a61ef, 0xadaddaec, 0x426fb1d2, 0xa0b3aaab,
- 0x4f71c195, 0xa4467a96, 0x4b8411a8, 0xba8f4a25, 0x554d211b,
- 0xbe7a9a18, 0x51b8f126, 0xb364ea5f, 0x5ca68161, 0xb7913a62,
- 0x5853515c, 0x8ef68b39, 0x6134e007, 0x8a035b04, 0x65c1303a,
- 0x871d2b43, 0x68df407d, 0x83e8fb7e, 0x6c2a9040, 0x9d21cbcd,
- 0x72e3a0f3, 0x99d41bf0, 0x761670ce, 0x94ca6bb7, 0x7b080089,
- 0x903fbb8a, 0x7ffdd0b4, 0x78bf0ea1, 0x977d659f, 0x7c4ade9c,
- 0x9388b5a2, 0x7154aedb, 0x9e96c5e5, 0x75a17ee6, 0x9a6315d8,
- 0x6b684e55, 0x84aa256b, 0x6f9d9e68, 0x805ff556, 0x6283ee2f,
- 0x8d418511, 0x66763e12, 0x89b4552c, 0x5f118f49, 0xb0d3e477,
- 0x5be45f74, 0xb426344a, 0x56fa2f33, 0xb938440d, 0x520fff0e,
- 0xbdcd9430, 0x4cc6cfbd, 0xa304a483, 0x48331f80, 0xa7f174be,
- 0x452d6fc7, 0xaaef04f9, 0x41d8bffa, 0xae1ad4c4, 0x37e20d71,
- 0xd820664f, 0x3317dd4c, 0xdcd5b672, 0x3e09ad0b, 0xd1cbc635,
- 0x3afc7d36, 0xd53e1608, 0x24354d85, 0xcbf726bb, 0x20c09db8,
- 0xcf02f686, 0x2ddeedff, 0xc21c86c1, 0x292b3dc2, 0xc6e956fc,
- 0x104c8c99, 0xff8ee7a7, 0x14b95ca4, 0xfb7b379a, 0x19a72ce3,
- 0xf66547dd, 0x1d52fcde, 0xf29097e0, 0x039bcc6d, 0xec59a753,
- 0x076e1c50, 0xe8ac776e, 0x0a706c17, 0xe5b20729, 0x0e85bc2a,
- 0xe147d714},
- {0x00000000, 0x177b1443, 0x2ef62886, 0x398d3cc5, 0x5dec510c,
- 0x4a97454f, 0x731a798a, 0x64616dc9, 0xbbd8a218, 0xaca3b65b,
- 0x952e8a9e, 0x82559edd, 0xe634f314, 0xf14fe757, 0xc8c2db92,
- 0xdfb9cfd1, 0xacc04271, 0xbbbb5632, 0x82366af7, 0x954d7eb4,
- 0xf12c137d, 0xe657073e, 0xdfda3bfb, 0xc8a12fb8, 0x1718e069,
- 0x0063f42a, 0x39eec8ef, 0x2e95dcac, 0x4af4b165, 0x5d8fa526,
- 0x640299e3, 0x73798da0, 0x82f182a3, 0x958a96e0, 0xac07aa25,
- 0xbb7cbe66, 0xdf1dd3af, 0xc866c7ec, 0xf1ebfb29, 0xe690ef6a,
- 0x392920bb, 0x2e5234f8, 0x17df083d, 0x00a41c7e, 0x64c571b7,
- 0x73be65f4, 0x4a335931, 0x5d484d72, 0x2e31c0d2, 0x394ad491,
- 0x00c7e854, 0x17bcfc17, 0x73dd91de, 0x64a6859d, 0x5d2bb958,
- 0x4a50ad1b, 0x95e962ca, 0x82927689, 0xbb1f4a4c, 0xac645e0f,
- 0xc80533c6, 0xdf7e2785, 0xe6f31b40, 0xf1880f03, 0xde920307,
- 0xc9e91744, 0xf0642b81, 0xe71f3fc2, 0x837e520b, 0x94054648,
- 0xad887a8d, 0xbaf36ece, 0x654aa11f, 0x7231b55c, 0x4bbc8999,
- 0x5cc79dda, 0x38a6f013, 0x2fdde450, 0x1650d895, 0x012bccd6,
- 0x72524176, 0x65295535, 0x5ca469f0, 0x4bdf7db3, 0x2fbe107a,
- 0x38c50439, 0x014838fc, 0x16332cbf, 0xc98ae36e, 0xdef1f72d,
- 0xe77ccbe8, 0xf007dfab, 0x9466b262, 0x831da621, 0xba909ae4,
- 0xadeb8ea7, 0x5c6381a4, 0x4b1895e7, 0x7295a922, 0x65eebd61,
- 0x018fd0a8, 0x16f4c4eb, 0x2f79f82e, 0x3802ec6d, 0xe7bb23bc,
- 0xf0c037ff, 0xc94d0b3a, 0xde361f79, 0xba5772b0, 0xad2c66f3,
- 0x94a15a36, 0x83da4e75, 0xf0a3c3d5, 0xe7d8d796, 0xde55eb53,
- 0xc92eff10, 0xad4f92d9, 0xba34869a, 0x83b9ba5f, 0x94c2ae1c,
- 0x4b7b61cd, 0x5c00758e, 0x658d494b, 0x72f65d08, 0x169730c1,
- 0x01ec2482, 0x38611847, 0x2f1a0c04, 0x6655004f, 0x712e140c,
- 0x48a328c9, 0x5fd83c8a, 0x3bb95143, 0x2cc24500, 0x154f79c5,
- 0x02346d86, 0xdd8da257, 0xcaf6b614, 0xf37b8ad1, 0xe4009e92,
- 0x8061f35b, 0x971ae718, 0xae97dbdd, 0xb9eccf9e, 0xca95423e,
- 0xddee567d, 0xe4636ab8, 0xf3187efb, 0x97791332, 0x80020771,
- 0xb98f3bb4, 0xaef42ff7, 0x714de026, 0x6636f465, 0x5fbbc8a0,
- 0x48c0dce3, 0x2ca1b12a, 0x3bdaa569, 0x025799ac, 0x152c8def,
- 0xe4a482ec, 0xf3df96af, 0xca52aa6a, 0xdd29be29, 0xb948d3e0,
- 0xae33c7a3, 0x97befb66, 0x80c5ef25, 0x5f7c20f4, 0x480734b7,
- 0x718a0872, 0x66f11c31, 0x029071f8, 0x15eb65bb, 0x2c66597e,
- 0x3b1d4d3d, 0x4864c09d, 0x5f1fd4de, 0x6692e81b, 0x71e9fc58,
- 0x15889191, 0x02f385d2, 0x3b7eb917, 0x2c05ad54, 0xf3bc6285,
- 0xe4c776c6, 0xdd4a4a03, 0xca315e40, 0xae503389, 0xb92b27ca,
- 0x80a61b0f, 0x97dd0f4c, 0xb8c70348, 0xafbc170b, 0x96312bce,
- 0x814a3f8d, 0xe52b5244, 0xf2504607, 0xcbdd7ac2, 0xdca66e81,
- 0x031fa150, 0x1464b513, 0x2de989d6, 0x3a929d95, 0x5ef3f05c,
- 0x4988e41f, 0x7005d8da, 0x677ecc99, 0x14074139, 0x037c557a,
- 0x3af169bf, 0x2d8a7dfc, 0x49eb1035, 0x5e900476, 0x671d38b3,
- 0x70662cf0, 0xafdfe321, 0xb8a4f762, 0x8129cba7, 0x9652dfe4,
- 0xf233b22d, 0xe548a66e, 0xdcc59aab, 0xcbbe8ee8, 0x3a3681eb,
- 0x2d4d95a8, 0x14c0a96d, 0x03bbbd2e, 0x67dad0e7, 0x70a1c4a4,
- 0x492cf861, 0x5e57ec22, 0x81ee23f3, 0x969537b0, 0xaf180b75,
- 0xb8631f36, 0xdc0272ff, 0xcb7966bc, 0xf2f45a79, 0xe58f4e3a,
- 0x96f6c39a, 0x818dd7d9, 0xb800eb1c, 0xaf7bff5f, 0xcb1a9296,
- 0xdc6186d5, 0xe5ecba10, 0xf297ae53, 0x2d2e6182, 0x3a5575c1,
- 0x03d84904, 0x14a35d47, 0x70c2308e, 0x67b924cd, 0x5e341808,
- 0x494f0c4b}};
-
-local const z_word_t FAR crc_braid_big_table[][256] = {
- {0x00000000, 0x43147b17, 0x8628f62e, 0xc53c8d39, 0x0c51ec5d,
- 0x4f45974a, 0x8a791a73, 0xc96d6164, 0x18a2d8bb, 0x5bb6a3ac,
- 0x9e8a2e95, 0xdd9e5582, 0x14f334e6, 0x57e74ff1, 0x92dbc2c8,
- 0xd1cfb9df, 0x7142c0ac, 0x3256bbbb, 0xf76a3682, 0xb47e4d95,
- 0x7d132cf1, 0x3e0757e6, 0xfb3bdadf, 0xb82fa1c8, 0x69e01817,
- 0x2af46300, 0xefc8ee39, 0xacdc952e, 0x65b1f44a, 0x26a58f5d,
- 0xe3990264, 0xa08d7973, 0xa382f182, 0xe0968a95, 0x25aa07ac,
- 0x66be7cbb, 0xafd31ddf, 0xecc766c8, 0x29fbebf1, 0x6aef90e6,
- 0xbb202939, 0xf834522e, 0x3d08df17, 0x7e1ca400, 0xb771c564,
- 0xf465be73, 0x3159334a, 0x724d485d, 0xd2c0312e, 0x91d44a39,
- 0x54e8c700, 0x17fcbc17, 0xde91dd73, 0x9d85a664, 0x58b92b5d,
- 0x1bad504a, 0xca62e995, 0x89769282, 0x4c4a1fbb, 0x0f5e64ac,
- 0xc63305c8, 0x85277edf, 0x401bf3e6, 0x030f88f1, 0x070392de,
- 0x4417e9c9, 0x812b64f0, 0xc23f1fe7, 0x0b527e83, 0x48460594,
- 0x8d7a88ad, 0xce6ef3ba, 0x1fa14a65, 0x5cb53172, 0x9989bc4b,
- 0xda9dc75c, 0x13f0a638, 0x50e4dd2f, 0x95d85016, 0xd6cc2b01,
- 0x76415272, 0x35552965, 0xf069a45c, 0xb37ddf4b, 0x7a10be2f,
- 0x3904c538, 0xfc384801, 0xbf2c3316, 0x6ee38ac9, 0x2df7f1de,
- 0xe8cb7ce7, 0xabdf07f0, 0x62b26694, 0x21a61d83, 0xe49a90ba,
- 0xa78eebad, 0xa481635c, 0xe795184b, 0x22a99572, 0x61bdee65,
- 0xa8d08f01, 0xebc4f416, 0x2ef8792f, 0x6dec0238, 0xbc23bbe7,
- 0xff37c0f0, 0x3a0b4dc9, 0x791f36de, 0xb07257ba, 0xf3662cad,
- 0x365aa194, 0x754eda83, 0xd5c3a3f0, 0x96d7d8e7, 0x53eb55de,
- 0x10ff2ec9, 0xd9924fad, 0x9a8634ba, 0x5fbab983, 0x1caec294,
- 0xcd617b4b, 0x8e75005c, 0x4b498d65, 0x085df672, 0xc1309716,
- 0x8224ec01, 0x47186138, 0x040c1a2f, 0x4f005566, 0x0c142e71,
- 0xc928a348, 0x8a3cd85f, 0x4351b93b, 0x0045c22c, 0xc5794f15,
- 0x866d3402, 0x57a28ddd, 0x14b6f6ca, 0xd18a7bf3, 0x929e00e4,
- 0x5bf36180, 0x18e71a97, 0xdddb97ae, 0x9ecfecb9, 0x3e4295ca,
- 0x7d56eedd, 0xb86a63e4, 0xfb7e18f3, 0x32137997, 0x71070280,
- 0xb43b8fb9, 0xf72ff4ae, 0x26e04d71, 0x65f43666, 0xa0c8bb5f,
- 0xe3dcc048, 0x2ab1a12c, 0x69a5da3b, 0xac995702, 0xef8d2c15,
- 0xec82a4e4, 0xaf96dff3, 0x6aaa52ca, 0x29be29dd, 0xe0d348b9,
- 0xa3c733ae, 0x66fbbe97, 0x25efc580, 0xf4207c5f, 0xb7340748,
- 0x72088a71, 0x311cf166, 0xf8719002, 0xbb65eb15, 0x7e59662c,
- 0x3d4d1d3b, 0x9dc06448, 0xded41f5f, 0x1be89266, 0x58fce971,
- 0x91918815, 0xd285f302, 0x17b97e3b, 0x54ad052c, 0x8562bcf3,
- 0xc676c7e4, 0x034a4add, 0x405e31ca, 0x893350ae, 0xca272bb9,
- 0x0f1ba680, 0x4c0fdd97, 0x4803c7b8, 0x0b17bcaf, 0xce2b3196,
- 0x8d3f4a81, 0x44522be5, 0x074650f2, 0xc27addcb, 0x816ea6dc,
- 0x50a11f03, 0x13b56414, 0xd689e92d, 0x959d923a, 0x5cf0f35e,
- 0x1fe48849, 0xdad80570, 0x99cc7e67, 0x39410714, 0x7a557c03,
- 0xbf69f13a, 0xfc7d8a2d, 0x3510eb49, 0x7604905e, 0xb3381d67,
- 0xf02c6670, 0x21e3dfaf, 0x62f7a4b8, 0xa7cb2981, 0xe4df5296,
- 0x2db233f2, 0x6ea648e5, 0xab9ac5dc, 0xe88ebecb, 0xeb81363a,
- 0xa8954d2d, 0x6da9c014, 0x2ebdbb03, 0xe7d0da67, 0xa4c4a170,
- 0x61f82c49, 0x22ec575e, 0xf323ee81, 0xb0379596, 0x750b18af,
- 0x361f63b8, 0xff7202dc, 0xbc6679cb, 0x795af4f2, 0x3a4e8fe5,
- 0x9ac3f696, 0xd9d78d81, 0x1ceb00b8, 0x5fff7baf, 0x96921acb,
- 0xd58661dc, 0x10baece5, 0x53ae97f2, 0x82612e2d, 0xc175553a,
- 0x0449d803, 0x475da314, 0x8e30c270, 0xcd24b967, 0x0818345e,
- 0x4b0c4f49},
- {0x00000000, 0x3e6bc2ef, 0x3dd0f504, 0x03bb37eb, 0x7aa0eb09,
- 0x44cb29e6, 0x47701e0d, 0x791bdce2, 0xf440d713, 0xca2b15fc,
- 0xc9902217, 0xf7fbe0f8, 0x8ee03c1a, 0xb08bfef5, 0xb330c91e,
- 0x8d5b0bf1, 0xe881ae27, 0xd6ea6cc8, 0xd5515b23, 0xeb3a99cc,
- 0x9221452e, 0xac4a87c1, 0xaff1b02a, 0x919a72c5, 0x1cc17934,
- 0x22aabbdb, 0x21118c30, 0x1f7a4edf, 0x6661923d, 0x580a50d2,
- 0x5bb16739, 0x65daa5d6, 0xd0035d4f, 0xee689fa0, 0xedd3a84b,
- 0xd3b86aa4, 0xaaa3b646, 0x94c874a9, 0x97734342, 0xa91881ad,
- 0x24438a5c, 0x1a2848b3, 0x19937f58, 0x27f8bdb7, 0x5ee36155,
- 0x6088a3ba, 0x63339451, 0x5d5856be, 0x3882f368, 0x06e93187,
- 0x0552066c, 0x3b39c483, 0x42221861, 0x7c49da8e, 0x7ff2ed65,
- 0x41992f8a, 0xccc2247b, 0xf2a9e694, 0xf112d17f, 0xcf791390,
- 0xb662cf72, 0x88090d9d, 0x8bb23a76, 0xb5d9f899, 0xa007ba9e,
- 0x9e6c7871, 0x9dd74f9a, 0xa3bc8d75, 0xdaa75197, 0xe4cc9378,
- 0xe777a493, 0xd91c667c, 0x54476d8d, 0x6a2caf62, 0x69979889,
- 0x57fc5a66, 0x2ee78684, 0x108c446b, 0x13377380, 0x2d5cb16f,
- 0x488614b9, 0x76edd656, 0x7556e1bd, 0x4b3d2352, 0x3226ffb0,
- 0x0c4d3d5f, 0x0ff60ab4, 0x319dc85b, 0xbcc6c3aa, 0x82ad0145,
- 0x811636ae, 0xbf7df441, 0xc66628a3, 0xf80dea4c, 0xfbb6dda7,
- 0xc5dd1f48, 0x7004e7d1, 0x4e6f253e, 0x4dd412d5, 0x73bfd03a,
- 0x0aa40cd8, 0x34cfce37, 0x3774f9dc, 0x091f3b33, 0x844430c2,
- 0xba2ff22d, 0xb994c5c6, 0x87ff0729, 0xfee4dbcb, 0xc08f1924,
- 0xc3342ecf, 0xfd5fec20, 0x988549f6, 0xa6ee8b19, 0xa555bcf2,
- 0x9b3e7e1d, 0xe225a2ff, 0xdc4e6010, 0xdff557fb, 0xe19e9514,
- 0x6cc59ee5, 0x52ae5c0a, 0x51156be1, 0x6f7ea90e, 0x166575ec,
- 0x280eb703, 0x2bb580e8, 0x15de4207, 0x010905e6, 0x3f62c709,
- 0x3cd9f0e2, 0x02b2320d, 0x7ba9eeef, 0x45c22c00, 0x46791beb,
- 0x7812d904, 0xf549d2f5, 0xcb22101a, 0xc89927f1, 0xf6f2e51e,
- 0x8fe939fc, 0xb182fb13, 0xb239ccf8, 0x8c520e17, 0xe988abc1,
- 0xd7e3692e, 0xd4585ec5, 0xea339c2a, 0x932840c8, 0xad438227,
- 0xaef8b5cc, 0x90937723, 0x1dc87cd2, 0x23a3be3d, 0x201889d6,
- 0x1e734b39, 0x676897db, 0x59035534, 0x5ab862df, 0x64d3a030,
- 0xd10a58a9, 0xef619a46, 0xecdaadad, 0xd2b16f42, 0xabaab3a0,
- 0x95c1714f, 0x967a46a4, 0xa811844b, 0x254a8fba, 0x1b214d55,
- 0x189a7abe, 0x26f1b851, 0x5fea64b3, 0x6181a65c, 0x623a91b7,
- 0x5c515358, 0x398bf68e, 0x07e03461, 0x045b038a, 0x3a30c165,
- 0x432b1d87, 0x7d40df68, 0x7efbe883, 0x40902a6c, 0xcdcb219d,
- 0xf3a0e372, 0xf01bd499, 0xce701676, 0xb76bca94, 0x8900087b,
- 0x8abb3f90, 0xb4d0fd7f, 0xa10ebf78, 0x9f657d97, 0x9cde4a7c,
- 0xa2b58893, 0xdbae5471, 0xe5c5969e, 0xe67ea175, 0xd815639a,
- 0x554e686b, 0x6b25aa84, 0x689e9d6f, 0x56f55f80, 0x2fee8362,
- 0x1185418d, 0x123e7666, 0x2c55b489, 0x498f115f, 0x77e4d3b0,
- 0x745fe45b, 0x4a3426b4, 0x332ffa56, 0x0d4438b9, 0x0eff0f52,
- 0x3094cdbd, 0xbdcfc64c, 0x83a404a3, 0x801f3348, 0xbe74f1a7,
- 0xc76f2d45, 0xf904efaa, 0xfabfd841, 0xc4d41aae, 0x710de237,
- 0x4f6620d8, 0x4cdd1733, 0x72b6d5dc, 0x0bad093e, 0x35c6cbd1,
- 0x367dfc3a, 0x08163ed5, 0x854d3524, 0xbb26f7cb, 0xb89dc020,
- 0x86f602cf, 0xffedde2d, 0xc1861cc2, 0xc23d2b29, 0xfc56e9c6,
- 0x998c4c10, 0xa7e78eff, 0xa45cb914, 0x9a377bfb, 0xe32ca719,
- 0xdd4765f6, 0xdefc521d, 0xe09790f2, 0x6dcc9b03, 0x53a759ec,
- 0x501c6e07, 0x6e77ace8, 0x176c700a, 0x2907b2e5, 0x2abc850e,
- 0x14d747e1},
- {0x00000000, 0xc0df8ec1, 0xc1b96c58, 0x0166e299, 0x8273d9b0,
- 0x42ac5771, 0x43cab5e8, 0x83153b29, 0x45e1c3ba, 0x853e4d7b,
- 0x8458afe2, 0x44872123, 0xc7921a0a, 0x074d94cb, 0x062b7652,
- 0xc6f4f893, 0xcbc4f6ae, 0x0b1b786f, 0x0a7d9af6, 0xcaa21437,
- 0x49b72f1e, 0x8968a1df, 0x880e4346, 0x48d1cd87, 0x8e253514,
- 0x4efabbd5, 0x4f9c594c, 0x8f43d78d, 0x0c56eca4, 0xcc896265,
- 0xcdef80fc, 0x0d300e3d, 0xd78f9c86, 0x17501247, 0x1636f0de,
- 0xd6e97e1f, 0x55fc4536, 0x9523cbf7, 0x9445296e, 0x549aa7af,
- 0x926e5f3c, 0x52b1d1fd, 0x53d73364, 0x9308bda5, 0x101d868c,
- 0xd0c2084d, 0xd1a4ead4, 0x117b6415, 0x1c4b6a28, 0xdc94e4e9,
- 0xddf20670, 0x1d2d88b1, 0x9e38b398, 0x5ee73d59, 0x5f81dfc0,
- 0x9f5e5101, 0x59aaa992, 0x99752753, 0x9813c5ca, 0x58cc4b0b,
- 0xdbd97022, 0x1b06fee3, 0x1a601c7a, 0xdabf92bb, 0xef1948d6,
- 0x2fc6c617, 0x2ea0248e, 0xee7faa4f, 0x6d6a9166, 0xadb51fa7,
- 0xacd3fd3e, 0x6c0c73ff, 0xaaf88b6c, 0x6a2705ad, 0x6b41e734,
- 0xab9e69f5, 0x288b52dc, 0xe854dc1d, 0xe9323e84, 0x29edb045,
- 0x24ddbe78, 0xe40230b9, 0xe564d220, 0x25bb5ce1, 0xa6ae67c8,
- 0x6671e909, 0x67170b90, 0xa7c88551, 0x613c7dc2, 0xa1e3f303,
- 0xa085119a, 0x605a9f5b, 0xe34fa472, 0x23902ab3, 0x22f6c82a,
- 0xe22946eb, 0x3896d450, 0xf8495a91, 0xf92fb808, 0x39f036c9,
- 0xbae50de0, 0x7a3a8321, 0x7b5c61b8, 0xbb83ef79, 0x7d7717ea,
- 0xbda8992b, 0xbcce7bb2, 0x7c11f573, 0xff04ce5a, 0x3fdb409b,
- 0x3ebda202, 0xfe622cc3, 0xf35222fe, 0x338dac3f, 0x32eb4ea6,
- 0xf234c067, 0x7121fb4e, 0xb1fe758f, 0xb0989716, 0x704719d7,
- 0xb6b3e144, 0x766c6f85, 0x770a8d1c, 0xb7d503dd, 0x34c038f4,
- 0xf41fb635, 0xf57954ac, 0x35a6da6d, 0x9f35e177, 0x5fea6fb6,
- 0x5e8c8d2f, 0x9e5303ee, 0x1d4638c7, 0xdd99b606, 0xdcff549f,
- 0x1c20da5e, 0xdad422cd, 0x1a0bac0c, 0x1b6d4e95, 0xdbb2c054,
- 0x58a7fb7d, 0x987875bc, 0x991e9725, 0x59c119e4, 0x54f117d9,
- 0x942e9918, 0x95487b81, 0x5597f540, 0xd682ce69, 0x165d40a8,
- 0x173ba231, 0xd7e42cf0, 0x1110d463, 0xd1cf5aa2, 0xd0a9b83b,
- 0x107636fa, 0x93630dd3, 0x53bc8312, 0x52da618b, 0x9205ef4a,
- 0x48ba7df1, 0x8865f330, 0x890311a9, 0x49dc9f68, 0xcac9a441,
- 0x0a162a80, 0x0b70c819, 0xcbaf46d8, 0x0d5bbe4b, 0xcd84308a,
- 0xcce2d213, 0x0c3d5cd2, 0x8f2867fb, 0x4ff7e93a, 0x4e910ba3,
- 0x8e4e8562, 0x837e8b5f, 0x43a1059e, 0x42c7e707, 0x821869c6,
- 0x010d52ef, 0xc1d2dc2e, 0xc0b43eb7, 0x006bb076, 0xc69f48e5,
- 0x0640c624, 0x072624bd, 0xc7f9aa7c, 0x44ec9155, 0x84331f94,
- 0x8555fd0d, 0x458a73cc, 0x702ca9a1, 0xb0f32760, 0xb195c5f9,
- 0x714a4b38, 0xf25f7011, 0x3280fed0, 0x33e61c49, 0xf3399288,
- 0x35cd6a1b, 0xf512e4da, 0xf4740643, 0x34ab8882, 0xb7beb3ab,
- 0x77613d6a, 0x7607dff3, 0xb6d85132, 0xbbe85f0f, 0x7b37d1ce,
- 0x7a513357, 0xba8ebd96, 0x399b86bf, 0xf944087e, 0xf822eae7,
- 0x38fd6426, 0xfe099cb5, 0x3ed61274, 0x3fb0f0ed, 0xff6f7e2c,
- 0x7c7a4505, 0xbca5cbc4, 0xbdc3295d, 0x7d1ca79c, 0xa7a33527,
- 0x677cbbe6, 0x661a597f, 0xa6c5d7be, 0x25d0ec97, 0xe50f6256,
- 0xe46980cf, 0x24b60e0e, 0xe242f69d, 0x229d785c, 0x23fb9ac5,
- 0xe3241404, 0x60312f2d, 0xa0eea1ec, 0xa1884375, 0x6157cdb4,
- 0x6c67c389, 0xacb84d48, 0xaddeafd1, 0x6d012110, 0xee141a39,
- 0x2ecb94f8, 0x2fad7661, 0xef72f8a0, 0x29860033, 0xe9598ef2,
- 0xe83f6c6b, 0x28e0e2aa, 0xabf5d983, 0x6b2a5742, 0x6a4cb5db,
- 0xaa933b1a},
- {0x00000000, 0x6f4ca59b, 0x9f9e3bec, 0xf0d29e77, 0x7f3b0603,
- 0x1077a398, 0xe0a53def, 0x8fe99874, 0xfe760c06, 0x913aa99d,
- 0x61e837ea, 0x0ea49271, 0x814d0a05, 0xee01af9e, 0x1ed331e9,
- 0x719f9472, 0xfced180c, 0x93a1bd97, 0x637323e0, 0x0c3f867b,
- 0x83d61e0f, 0xec9abb94, 0x1c4825e3, 0x73048078, 0x029b140a,
- 0x6dd7b191, 0x9d052fe6, 0xf2498a7d, 0x7da01209, 0x12ecb792,
- 0xe23e29e5, 0x8d728c7e, 0xf8db3118, 0x97979483, 0x67450af4,
- 0x0809af6f, 0x87e0371b, 0xe8ac9280, 0x187e0cf7, 0x7732a96c,
- 0x06ad3d1e, 0x69e19885, 0x993306f2, 0xf67fa369, 0x79963b1d,
- 0x16da9e86, 0xe60800f1, 0x8944a56a, 0x04362914, 0x6b7a8c8f,
- 0x9ba812f8, 0xf4e4b763, 0x7b0d2f17, 0x14418a8c, 0xe49314fb,
- 0x8bdfb160, 0xfa402512, 0x950c8089, 0x65de1efe, 0x0a92bb65,
- 0x857b2311, 0xea37868a, 0x1ae518fd, 0x75a9bd66, 0xf0b76330,
- 0x9ffbc6ab, 0x6f2958dc, 0x0065fd47, 0x8f8c6533, 0xe0c0c0a8,
- 0x10125edf, 0x7f5efb44, 0x0ec16f36, 0x618dcaad, 0x915f54da,
- 0xfe13f141, 0x71fa6935, 0x1eb6ccae, 0xee6452d9, 0x8128f742,
- 0x0c5a7b3c, 0x6316dea7, 0x93c440d0, 0xfc88e54b, 0x73617d3f,
- 0x1c2dd8a4, 0xecff46d3, 0x83b3e348, 0xf22c773a, 0x9d60d2a1,
- 0x6db24cd6, 0x02fee94d, 0x8d177139, 0xe25bd4a2, 0x12894ad5,
- 0x7dc5ef4e, 0x086c5228, 0x6720f7b3, 0x97f269c4, 0xf8becc5f,
- 0x7757542b, 0x181bf1b0, 0xe8c96fc7, 0x8785ca5c, 0xf61a5e2e,
- 0x9956fbb5, 0x698465c2, 0x06c8c059, 0x8921582d, 0xe66dfdb6,
- 0x16bf63c1, 0x79f3c65a, 0xf4814a24, 0x9bcdefbf, 0x6b1f71c8,
- 0x0453d453, 0x8bba4c27, 0xe4f6e9bc, 0x142477cb, 0x7b68d250,
- 0x0af74622, 0x65bbe3b9, 0x95697dce, 0xfa25d855, 0x75cc4021,
- 0x1a80e5ba, 0xea527bcd, 0x851ede56, 0xe06fc760, 0x8f2362fb,
- 0x7ff1fc8c, 0x10bd5917, 0x9f54c163, 0xf01864f8, 0x00cafa8f,
- 0x6f865f14, 0x1e19cb66, 0x71556efd, 0x8187f08a, 0xeecb5511,
- 0x6122cd65, 0x0e6e68fe, 0xfebcf689, 0x91f05312, 0x1c82df6c,
- 0x73ce7af7, 0x831ce480, 0xec50411b, 0x63b9d96f, 0x0cf57cf4,
- 0xfc27e283, 0x936b4718, 0xe2f4d36a, 0x8db876f1, 0x7d6ae886,
- 0x12264d1d, 0x9dcfd569, 0xf28370f2, 0x0251ee85, 0x6d1d4b1e,
- 0x18b4f678, 0x77f853e3, 0x872acd94, 0xe866680f, 0x678ff07b,
- 0x08c355e0, 0xf811cb97, 0x975d6e0c, 0xe6c2fa7e, 0x898e5fe5,
- 0x795cc192, 0x16106409, 0x99f9fc7d, 0xf6b559e6, 0x0667c791,
- 0x692b620a, 0xe459ee74, 0x8b154bef, 0x7bc7d598, 0x148b7003,
- 0x9b62e877, 0xf42e4dec, 0x04fcd39b, 0x6bb07600, 0x1a2fe272,
- 0x756347e9, 0x85b1d99e, 0xeafd7c05, 0x6514e471, 0x0a5841ea,
- 0xfa8adf9d, 0x95c67a06, 0x10d8a450, 0x7f9401cb, 0x8f469fbc,
- 0xe00a3a27, 0x6fe3a253, 0x00af07c8, 0xf07d99bf, 0x9f313c24,
- 0xeeaea856, 0x81e20dcd, 0x713093ba, 0x1e7c3621, 0x9195ae55,
- 0xfed90bce, 0x0e0b95b9, 0x61473022, 0xec35bc5c, 0x837919c7,
- 0x73ab87b0, 0x1ce7222b, 0x930eba5f, 0xfc421fc4, 0x0c9081b3,
- 0x63dc2428, 0x1243b05a, 0x7d0f15c1, 0x8ddd8bb6, 0xe2912e2d,
- 0x6d78b659, 0x023413c2, 0xf2e68db5, 0x9daa282e, 0xe8039548,
- 0x874f30d3, 0x779daea4, 0x18d10b3f, 0x9738934b, 0xf87436d0,
- 0x08a6a8a7, 0x67ea0d3c, 0x1675994e, 0x79393cd5, 0x89eba2a2,
- 0xe6a70739, 0x694e9f4d, 0x06023ad6, 0xf6d0a4a1, 0x999c013a,
- 0x14ee8d44, 0x7ba228df, 0x8b70b6a8, 0xe43c1333, 0x6bd58b47,
- 0x04992edc, 0xf44bb0ab, 0x9b071530, 0xea988142, 0x85d424d9,
- 0x7506baae, 0x1a4a1f35, 0x95a38741, 0xfaef22da, 0x0a3dbcad,
- 0x65711936}};
-
-#endif
-
-#endif
-
-#if N == 4
-
-#if W == 8
-
-local const z_crc_t FAR crc_braid_table[][256] = {
- {0x00000000, 0xf1da05aa, 0x38c50d15, 0xc91f08bf, 0x718a1a2a,
- 0x80501f80, 0x494f173f, 0xb8951295, 0xe3143454, 0x12ce31fe,
- 0xdbd13941, 0x2a0b3ceb, 0x929e2e7e, 0x63442bd4, 0xaa5b236b,
- 0x5b8126c1, 0x1d596ee9, 0xec836b43, 0x259c63fc, 0xd4466656,
- 0x6cd374c3, 0x9d097169, 0x541679d6, 0xa5cc7c7c, 0xfe4d5abd,
- 0x0f975f17, 0xc68857a8, 0x37525202, 0x8fc74097, 0x7e1d453d,
- 0xb7024d82, 0x46d84828, 0x3ab2ddd2, 0xcb68d878, 0x0277d0c7,
- 0xf3add56d, 0x4b38c7f8, 0xbae2c252, 0x73fdcaed, 0x8227cf47,
- 0xd9a6e986, 0x287cec2c, 0xe163e493, 0x10b9e139, 0xa82cf3ac,
- 0x59f6f606, 0x90e9feb9, 0x6133fb13, 0x27ebb33b, 0xd631b691,
- 0x1f2ebe2e, 0xeef4bb84, 0x5661a911, 0xa7bbacbb, 0x6ea4a404,
- 0x9f7ea1ae, 0xc4ff876f, 0x352582c5, 0xfc3a8a7a, 0x0de08fd0,
- 0xb5759d45, 0x44af98ef, 0x8db09050, 0x7c6a95fa, 0x7565bba4,
- 0x84bfbe0e, 0x4da0b6b1, 0xbc7ab31b, 0x04efa18e, 0xf535a424,
- 0x3c2aac9b, 0xcdf0a931, 0x96718ff0, 0x67ab8a5a, 0xaeb482e5,
- 0x5f6e874f, 0xe7fb95da, 0x16219070, 0xdf3e98cf, 0x2ee49d65,
- 0x683cd54d, 0x99e6d0e7, 0x50f9d858, 0xa123ddf2, 0x19b6cf67,
- 0xe86ccacd, 0x2173c272, 0xd0a9c7d8, 0x8b28e119, 0x7af2e4b3,
- 0xb3edec0c, 0x4237e9a6, 0xfaa2fb33, 0x0b78fe99, 0xc267f626,
- 0x33bdf38c, 0x4fd76676, 0xbe0d63dc, 0x77126b63, 0x86c86ec9,
- 0x3e5d7c5c, 0xcf8779f6, 0x06987149, 0xf74274e3, 0xacc35222,
- 0x5d195788, 0x94065f37, 0x65dc5a9d, 0xdd494808, 0x2c934da2,
- 0xe58c451d, 0x145640b7, 0x528e089f, 0xa3540d35, 0x6a4b058a,
- 0x9b910020, 0x230412b5, 0xd2de171f, 0x1bc11fa0, 0xea1b1a0a,
- 0xb19a3ccb, 0x40403961, 0x895f31de, 0x78853474, 0xc01026e1,
- 0x31ca234b, 0xf8d52bf4, 0x090f2e5e, 0xeacb7748, 0x1b1172e2,
- 0xd20e7a5d, 0x23d47ff7, 0x9b416d62, 0x6a9b68c8, 0xa3846077,
- 0x525e65dd, 0x09df431c, 0xf80546b6, 0x311a4e09, 0xc0c04ba3,
- 0x78555936, 0x898f5c9c, 0x40905423, 0xb14a5189, 0xf79219a1,
- 0x06481c0b, 0xcf5714b4, 0x3e8d111e, 0x8618038b, 0x77c20621,
- 0xbedd0e9e, 0x4f070b34, 0x14862df5, 0xe55c285f, 0x2c4320e0,
- 0xdd99254a, 0x650c37df, 0x94d63275, 0x5dc93aca, 0xac133f60,
- 0xd079aa9a, 0x21a3af30, 0xe8bca78f, 0x1966a225, 0xa1f3b0b0,
- 0x5029b51a, 0x9936bda5, 0x68ecb80f, 0x336d9ece, 0xc2b79b64,
- 0x0ba893db, 0xfa729671, 0x42e784e4, 0xb33d814e, 0x7a2289f1,
- 0x8bf88c5b, 0xcd20c473, 0x3cfac1d9, 0xf5e5c966, 0x043fcccc,
- 0xbcaade59, 0x4d70dbf3, 0x846fd34c, 0x75b5d6e6, 0x2e34f027,
- 0xdfeef58d, 0x16f1fd32, 0xe72bf898, 0x5fbeea0d, 0xae64efa7,
- 0x677be718, 0x96a1e2b2, 0x9faeccec, 0x6e74c946, 0xa76bc1f9,
- 0x56b1c453, 0xee24d6c6, 0x1ffed36c, 0xd6e1dbd3, 0x273bde79,
- 0x7cbaf8b8, 0x8d60fd12, 0x447ff5ad, 0xb5a5f007, 0x0d30e292,
- 0xfceae738, 0x35f5ef87, 0xc42fea2d, 0x82f7a205, 0x732da7af,
- 0xba32af10, 0x4be8aaba, 0xf37db82f, 0x02a7bd85, 0xcbb8b53a,
- 0x3a62b090, 0x61e39651, 0x903993fb, 0x59269b44, 0xa8fc9eee,
- 0x10698c7b, 0xe1b389d1, 0x28ac816e, 0xd97684c4, 0xa51c113e,
- 0x54c61494, 0x9dd91c2b, 0x6c031981, 0xd4960b14, 0x254c0ebe,
- 0xec530601, 0x1d8903ab, 0x4608256a, 0xb7d220c0, 0x7ecd287f,
- 0x8f172dd5, 0x37823f40, 0xc6583aea, 0x0f473255, 0xfe9d37ff,
- 0xb8457fd7, 0x499f7a7d, 0x808072c2, 0x715a7768, 0xc9cf65fd,
- 0x38156057, 0xf10a68e8, 0x00d06d42, 0x5b514b83, 0xaa8b4e29,
- 0x63944696, 0x924e433c, 0x2adb51a9, 0xdb015403, 0x121e5cbc,
- 0xe3c45916},
- {0x00000000, 0x0ee7e8d1, 0x1dcfd1a2, 0x13283973, 0x3b9fa344,
- 0x35784b95, 0x265072e6, 0x28b79a37, 0x773f4688, 0x79d8ae59,
- 0x6af0972a, 0x64177ffb, 0x4ca0e5cc, 0x42470d1d, 0x516f346e,
- 0x5f88dcbf, 0xee7e8d10, 0xe09965c1, 0xf3b15cb2, 0xfd56b463,
- 0xd5e12e54, 0xdb06c685, 0xc82efff6, 0xc6c91727, 0x9941cb98,
- 0x97a62349, 0x848e1a3a, 0x8a69f2eb, 0xa2de68dc, 0xac39800d,
- 0xbf11b97e, 0xb1f651af, 0x078c1c61, 0x096bf4b0, 0x1a43cdc3,
- 0x14a42512, 0x3c13bf25, 0x32f457f4, 0x21dc6e87, 0x2f3b8656,
- 0x70b35ae9, 0x7e54b238, 0x6d7c8b4b, 0x639b639a, 0x4b2cf9ad,
- 0x45cb117c, 0x56e3280f, 0x5804c0de, 0xe9f29171, 0xe71579a0,
- 0xf43d40d3, 0xfadaa802, 0xd26d3235, 0xdc8adae4, 0xcfa2e397,
- 0xc1450b46, 0x9ecdd7f9, 0x902a3f28, 0x8302065b, 0x8de5ee8a,
- 0xa55274bd, 0xabb59c6c, 0xb89da51f, 0xb67a4dce, 0x0f1838c2,
- 0x01ffd013, 0x12d7e960, 0x1c3001b1, 0x34879b86, 0x3a607357,
- 0x29484a24, 0x27afa2f5, 0x78277e4a, 0x76c0969b, 0x65e8afe8,
- 0x6b0f4739, 0x43b8dd0e, 0x4d5f35df, 0x5e770cac, 0x5090e47d,
- 0xe166b5d2, 0xef815d03, 0xfca96470, 0xf24e8ca1, 0xdaf91696,
- 0xd41efe47, 0xc736c734, 0xc9d12fe5, 0x9659f35a, 0x98be1b8b,
- 0x8b9622f8, 0x8571ca29, 0xadc6501e, 0xa321b8cf, 0xb00981bc,
- 0xbeee696d, 0x089424a3, 0x0673cc72, 0x155bf501, 0x1bbc1dd0,
- 0x330b87e7, 0x3dec6f36, 0x2ec45645, 0x2023be94, 0x7fab622b,
- 0x714c8afa, 0x6264b389, 0x6c835b58, 0x4434c16f, 0x4ad329be,
- 0x59fb10cd, 0x571cf81c, 0xe6eaa9b3, 0xe80d4162, 0xfb257811,
- 0xf5c290c0, 0xdd750af7, 0xd392e226, 0xc0badb55, 0xce5d3384,
- 0x91d5ef3b, 0x9f3207ea, 0x8c1a3e99, 0x82fdd648, 0xaa4a4c7f,
- 0xa4ada4ae, 0xb7859ddd, 0xb962750c, 0x1e307184, 0x10d79955,
- 0x03ffa026, 0x0d1848f7, 0x25afd2c0, 0x2b483a11, 0x38600362,
- 0x3687ebb3, 0x690f370c, 0x67e8dfdd, 0x74c0e6ae, 0x7a270e7f,
- 0x52909448, 0x5c777c99, 0x4f5f45ea, 0x41b8ad3b, 0xf04efc94,
- 0xfea91445, 0xed812d36, 0xe366c5e7, 0xcbd15fd0, 0xc536b701,
- 0xd61e8e72, 0xd8f966a3, 0x8771ba1c, 0x899652cd, 0x9abe6bbe,
- 0x9459836f, 0xbcee1958, 0xb209f189, 0xa121c8fa, 0xafc6202b,
- 0x19bc6de5, 0x175b8534, 0x0473bc47, 0x0a945496, 0x2223cea1,
- 0x2cc42670, 0x3fec1f03, 0x310bf7d2, 0x6e832b6d, 0x6064c3bc,
- 0x734cfacf, 0x7dab121e, 0x551c8829, 0x5bfb60f8, 0x48d3598b,
- 0x4634b15a, 0xf7c2e0f5, 0xf9250824, 0xea0d3157, 0xe4ead986,
- 0xcc5d43b1, 0xc2baab60, 0xd1929213, 0xdf757ac2, 0x80fda67d,
- 0x8e1a4eac, 0x9d3277df, 0x93d59f0e, 0xbb620539, 0xb585ede8,
- 0xa6add49b, 0xa84a3c4a, 0x11284946, 0x1fcfa197, 0x0ce798e4,
- 0x02007035, 0x2ab7ea02, 0x245002d3, 0x37783ba0, 0x399fd371,
- 0x66170fce, 0x68f0e71f, 0x7bd8de6c, 0x753f36bd, 0x5d88ac8a,
- 0x536f445b, 0x40477d28, 0x4ea095f9, 0xff56c456, 0xf1b12c87,
- 0xe29915f4, 0xec7efd25, 0xc4c96712, 0xca2e8fc3, 0xd906b6b0,
- 0xd7e15e61, 0x886982de, 0x868e6a0f, 0x95a6537c, 0x9b41bbad,
- 0xb3f6219a, 0xbd11c94b, 0xae39f038, 0xa0de18e9, 0x16a45527,
- 0x1843bdf6, 0x0b6b8485, 0x058c6c54, 0x2d3bf663, 0x23dc1eb2,
- 0x30f427c1, 0x3e13cf10, 0x619b13af, 0x6f7cfb7e, 0x7c54c20d,
- 0x72b32adc, 0x5a04b0eb, 0x54e3583a, 0x47cb6149, 0x492c8998,
- 0xf8dad837, 0xf63d30e6, 0xe5150995, 0xebf2e144, 0xc3457b73,
- 0xcda293a2, 0xde8aaad1, 0xd06d4200, 0x8fe59ebf, 0x8102766e,
- 0x922a4f1d, 0x9ccda7cc, 0xb47a3dfb, 0xba9dd52a, 0xa9b5ec59,
- 0xa7520488},
- {0x00000000, 0x3c60e308, 0x78c1c610, 0x44a12518, 0xf1838c20,
- 0xcde36f28, 0x89424a30, 0xb522a938, 0x38761e01, 0x0416fd09,
- 0x40b7d811, 0x7cd73b19, 0xc9f59221, 0xf5957129, 0xb1345431,
- 0x8d54b739, 0x70ec3c02, 0x4c8cdf0a, 0x082dfa12, 0x344d191a,
- 0x816fb022, 0xbd0f532a, 0xf9ae7632, 0xc5ce953a, 0x489a2203,
- 0x74fac10b, 0x305be413, 0x0c3b071b, 0xb919ae23, 0x85794d2b,
- 0xc1d86833, 0xfdb88b3b, 0xe1d87804, 0xddb89b0c, 0x9919be14,
- 0xa5795d1c, 0x105bf424, 0x2c3b172c, 0x689a3234, 0x54fad13c,
- 0xd9ae6605, 0xe5ce850d, 0xa16fa015, 0x9d0f431d, 0x282dea25,
- 0x144d092d, 0x50ec2c35, 0x6c8ccf3d, 0x91344406, 0xad54a70e,
- 0xe9f58216, 0xd595611e, 0x60b7c826, 0x5cd72b2e, 0x18760e36,
- 0x2416ed3e, 0xa9425a07, 0x9522b90f, 0xd1839c17, 0xede37f1f,
- 0x58c1d627, 0x64a1352f, 0x20001037, 0x1c60f33f, 0x18c1f649,
- 0x24a11541, 0x60003059, 0x5c60d351, 0xe9427a69, 0xd5229961,
- 0x9183bc79, 0xade35f71, 0x20b7e848, 0x1cd70b40, 0x58762e58,
- 0x6416cd50, 0xd1346468, 0xed548760, 0xa9f5a278, 0x95954170,
- 0x682dca4b, 0x544d2943, 0x10ec0c5b, 0x2c8cef53, 0x99ae466b,
- 0xa5cea563, 0xe16f807b, 0xdd0f6373, 0x505bd44a, 0x6c3b3742,
- 0x289a125a, 0x14faf152, 0xa1d8586a, 0x9db8bb62, 0xd9199e7a,
- 0xe5797d72, 0xf9198e4d, 0xc5796d45, 0x81d8485d, 0xbdb8ab55,
- 0x089a026d, 0x34fae165, 0x705bc47d, 0x4c3b2775, 0xc16f904c,
- 0xfd0f7344, 0xb9ae565c, 0x85ceb554, 0x30ec1c6c, 0x0c8cff64,
- 0x482dda7c, 0x744d3974, 0x89f5b24f, 0xb5955147, 0xf134745f,
- 0xcd549757, 0x78763e6f, 0x4416dd67, 0x00b7f87f, 0x3cd71b77,
- 0xb183ac4e, 0x8de34f46, 0xc9426a5e, 0xf5228956, 0x4000206e,
- 0x7c60c366, 0x38c1e67e, 0x04a10576, 0x3183ec92, 0x0de30f9a,
- 0x49422a82, 0x7522c98a, 0xc00060b2, 0xfc6083ba, 0xb8c1a6a2,
- 0x84a145aa, 0x09f5f293, 0x3595119b, 0x71343483, 0x4d54d78b,
- 0xf8767eb3, 0xc4169dbb, 0x80b7b8a3, 0xbcd75bab, 0x416fd090,
- 0x7d0f3398, 0x39ae1680, 0x05cef588, 0xb0ec5cb0, 0x8c8cbfb8,
- 0xc82d9aa0, 0xf44d79a8, 0x7919ce91, 0x45792d99, 0x01d80881,
- 0x3db8eb89, 0x889a42b1, 0xb4faa1b9, 0xf05b84a1, 0xcc3b67a9,
- 0xd05b9496, 0xec3b779e, 0xa89a5286, 0x94fab18e, 0x21d818b6,
- 0x1db8fbbe, 0x5919dea6, 0x65793dae, 0xe82d8a97, 0xd44d699f,
- 0x90ec4c87, 0xac8caf8f, 0x19ae06b7, 0x25cee5bf, 0x616fc0a7,
- 0x5d0f23af, 0xa0b7a894, 0x9cd74b9c, 0xd8766e84, 0xe4168d8c,
- 0x513424b4, 0x6d54c7bc, 0x29f5e2a4, 0x159501ac, 0x98c1b695,
- 0xa4a1559d, 0xe0007085, 0xdc60938d, 0x69423ab5, 0x5522d9bd,
- 0x1183fca5, 0x2de31fad, 0x29421adb, 0x1522f9d3, 0x5183dccb,
- 0x6de33fc3, 0xd8c196fb, 0xe4a175f3, 0xa00050eb, 0x9c60b3e3,
- 0x113404da, 0x2d54e7d2, 0x69f5c2ca, 0x559521c2, 0xe0b788fa,
- 0xdcd76bf2, 0x98764eea, 0xa416ade2, 0x59ae26d9, 0x65cec5d1,
- 0x216fe0c9, 0x1d0f03c1, 0xa82daaf9, 0x944d49f1, 0xd0ec6ce9,
- 0xec8c8fe1, 0x61d838d8, 0x5db8dbd0, 0x1919fec8, 0x25791dc0,
- 0x905bb4f8, 0xac3b57f0, 0xe89a72e8, 0xd4fa91e0, 0xc89a62df,
- 0xf4fa81d7, 0xb05ba4cf, 0x8c3b47c7, 0x3919eeff, 0x05790df7,
- 0x41d828ef, 0x7db8cbe7, 0xf0ec7cde, 0xcc8c9fd6, 0x882dbace,
- 0xb44d59c6, 0x016ff0fe, 0x3d0f13f6, 0x79ae36ee, 0x45ced5e6,
- 0xb8765edd, 0x8416bdd5, 0xc0b798cd, 0xfcd77bc5, 0x49f5d2fd,
- 0x759531f5, 0x313414ed, 0x0d54f7e5, 0x800040dc, 0xbc60a3d4,
- 0xf8c186cc, 0xc4a165c4, 0x7183ccfc, 0x4de32ff4, 0x09420aec,
- 0x3522e9e4},
- {0x00000000, 0x6307d924, 0xc60fb248, 0xa5086b6c, 0x576e62d1,
- 0x3469bbf5, 0x9161d099, 0xf26609bd, 0xaedcc5a2, 0xcddb1c86,
- 0x68d377ea, 0x0bd4aece, 0xf9b2a773, 0x9ab57e57, 0x3fbd153b,
- 0x5cbacc1f, 0x86c88d05, 0xe5cf5421, 0x40c73f4d, 0x23c0e669,
- 0xd1a6efd4, 0xb2a136f0, 0x17a95d9c, 0x74ae84b8, 0x281448a7,
- 0x4b139183, 0xee1bfaef, 0x8d1c23cb, 0x7f7a2a76, 0x1c7df352,
- 0xb975983e, 0xda72411a, 0xd6e01c4b, 0xb5e7c56f, 0x10efae03,
- 0x73e87727, 0x818e7e9a, 0xe289a7be, 0x4781ccd2, 0x248615f6,
- 0x783cd9e9, 0x1b3b00cd, 0xbe336ba1, 0xdd34b285, 0x2f52bb38,
- 0x4c55621c, 0xe95d0970, 0x8a5ad054, 0x5028914e, 0x332f486a,
- 0x96272306, 0xf520fa22, 0x0746f39f, 0x64412abb, 0xc14941d7,
- 0xa24e98f3, 0xfef454ec, 0x9df38dc8, 0x38fbe6a4, 0x5bfc3f80,
- 0xa99a363d, 0xca9def19, 0x6f958475, 0x0c925d51, 0x76b13ed7,
- 0x15b6e7f3, 0xb0be8c9f, 0xd3b955bb, 0x21df5c06, 0x42d88522,
- 0xe7d0ee4e, 0x84d7376a, 0xd86dfb75, 0xbb6a2251, 0x1e62493d,
- 0x7d659019, 0x8f0399a4, 0xec044080, 0x490c2bec, 0x2a0bf2c8,
- 0xf079b3d2, 0x937e6af6, 0x3676019a, 0x5571d8be, 0xa717d103,
- 0xc4100827, 0x6118634b, 0x021fba6f, 0x5ea57670, 0x3da2af54,
- 0x98aac438, 0xfbad1d1c, 0x09cb14a1, 0x6acccd85, 0xcfc4a6e9,
- 0xacc37fcd, 0xa051229c, 0xc356fbb8, 0x665e90d4, 0x055949f0,
- 0xf73f404d, 0x94389969, 0x3130f205, 0x52372b21, 0x0e8de73e,
- 0x6d8a3e1a, 0xc8825576, 0xab858c52, 0x59e385ef, 0x3ae45ccb,
- 0x9fec37a7, 0xfcebee83, 0x2699af99, 0x459e76bd, 0xe0961dd1,
- 0x8391c4f5, 0x71f7cd48, 0x12f0146c, 0xb7f87f00, 0xd4ffa624,
- 0x88456a3b, 0xeb42b31f, 0x4e4ad873, 0x2d4d0157, 0xdf2b08ea,
- 0xbc2cd1ce, 0x1924baa2, 0x7a236386, 0xed627dae, 0x8e65a48a,
- 0x2b6dcfe6, 0x486a16c2, 0xba0c1f7f, 0xd90bc65b, 0x7c03ad37,
- 0x1f047413, 0x43beb80c, 0x20b96128, 0x85b10a44, 0xe6b6d360,
- 0x14d0dadd, 0x77d703f9, 0xd2df6895, 0xb1d8b1b1, 0x6baaf0ab,
- 0x08ad298f, 0xada542e3, 0xcea29bc7, 0x3cc4927a, 0x5fc34b5e,
- 0xfacb2032, 0x99ccf916, 0xc5763509, 0xa671ec2d, 0x03798741,
- 0x607e5e65, 0x921857d8, 0xf11f8efc, 0x5417e590, 0x37103cb4,
- 0x3b8261e5, 0x5885b8c1, 0xfd8dd3ad, 0x9e8a0a89, 0x6cec0334,
- 0x0febda10, 0xaae3b17c, 0xc9e46858, 0x955ea447, 0xf6597d63,
- 0x5351160f, 0x3056cf2b, 0xc230c696, 0xa1371fb2, 0x043f74de,
- 0x6738adfa, 0xbd4aece0, 0xde4d35c4, 0x7b455ea8, 0x1842878c,
- 0xea248e31, 0x89235715, 0x2c2b3c79, 0x4f2ce55d, 0x13962942,
- 0x7091f066, 0xd5999b0a, 0xb69e422e, 0x44f84b93, 0x27ff92b7,
- 0x82f7f9db, 0xe1f020ff, 0x9bd34379, 0xf8d49a5d, 0x5ddcf131,
- 0x3edb2815, 0xccbd21a8, 0xafbaf88c, 0x0ab293e0, 0x69b54ac4,
- 0x350f86db, 0x56085fff, 0xf3003493, 0x9007edb7, 0x6261e40a,
- 0x01663d2e, 0xa46e5642, 0xc7698f66, 0x1d1bce7c, 0x7e1c1758,
- 0xdb147c34, 0xb813a510, 0x4a75acad, 0x29727589, 0x8c7a1ee5,
- 0xef7dc7c1, 0xb3c70bde, 0xd0c0d2fa, 0x75c8b996, 0x16cf60b2,
- 0xe4a9690f, 0x87aeb02b, 0x22a6db47, 0x41a10263, 0x4d335f32,
- 0x2e348616, 0x8b3ced7a, 0xe83b345e, 0x1a5d3de3, 0x795ae4c7,
- 0xdc528fab, 0xbf55568f, 0xe3ef9a90, 0x80e843b4, 0x25e028d8,
- 0x46e7f1fc, 0xb481f841, 0xd7862165, 0x728e4a09, 0x1189932d,
- 0xcbfbd237, 0xa8fc0b13, 0x0df4607f, 0x6ef3b95b, 0x9c95b0e6,
- 0xff9269c2, 0x5a9a02ae, 0x399ddb8a, 0x65271795, 0x0620ceb1,
- 0xa328a5dd, 0xc02f7cf9, 0x32497544, 0x514eac60, 0xf446c70c,
- 0x97411e28},
- {0x00000000, 0x01b5fd1d, 0x036bfa3a, 0x02de0727, 0x06d7f474,
- 0x07620969, 0x05bc0e4e, 0x0409f353, 0x0dafe8e8, 0x0c1a15f5,
- 0x0ec412d2, 0x0f71efcf, 0x0b781c9c, 0x0acde181, 0x0813e6a6,
- 0x09a61bbb, 0x1b5fd1d0, 0x1aea2ccd, 0x18342bea, 0x1981d6f7,
- 0x1d8825a4, 0x1c3dd8b9, 0x1ee3df9e, 0x1f562283, 0x16f03938,
- 0x1745c425, 0x159bc302, 0x142e3e1f, 0x1027cd4c, 0x11923051,
- 0x134c3776, 0x12f9ca6b, 0x36bfa3a0, 0x370a5ebd, 0x35d4599a,
- 0x3461a487, 0x306857d4, 0x31ddaac9, 0x3303adee, 0x32b650f3,
- 0x3b104b48, 0x3aa5b655, 0x387bb172, 0x39ce4c6f, 0x3dc7bf3c,
- 0x3c724221, 0x3eac4506, 0x3f19b81b, 0x2de07270, 0x2c558f6d,
- 0x2e8b884a, 0x2f3e7557, 0x2b378604, 0x2a827b19, 0x285c7c3e,
- 0x29e98123, 0x204f9a98, 0x21fa6785, 0x232460a2, 0x22919dbf,
- 0x26986eec, 0x272d93f1, 0x25f394d6, 0x244669cb, 0x6d7f4740,
- 0x6ccaba5d, 0x6e14bd7a, 0x6fa14067, 0x6ba8b334, 0x6a1d4e29,
- 0x68c3490e, 0x6976b413, 0x60d0afa8, 0x616552b5, 0x63bb5592,
- 0x620ea88f, 0x66075bdc, 0x67b2a6c1, 0x656ca1e6, 0x64d95cfb,
- 0x76209690, 0x77956b8d, 0x754b6caa, 0x74fe91b7, 0x70f762e4,
- 0x71429ff9, 0x739c98de, 0x722965c3, 0x7b8f7e78, 0x7a3a8365,
- 0x78e48442, 0x7951795f, 0x7d588a0c, 0x7ced7711, 0x7e337036,
- 0x7f868d2b, 0x5bc0e4e0, 0x5a7519fd, 0x58ab1eda, 0x591ee3c7,
- 0x5d171094, 0x5ca2ed89, 0x5e7ceaae, 0x5fc917b3, 0x566f0c08,
- 0x57daf115, 0x5504f632, 0x54b10b2f, 0x50b8f87c, 0x510d0561,
- 0x53d30246, 0x5266ff5b, 0x409f3530, 0x412ac82d, 0x43f4cf0a,
- 0x42413217, 0x4648c144, 0x47fd3c59, 0x45233b7e, 0x4496c663,
- 0x4d30ddd8, 0x4c8520c5, 0x4e5b27e2, 0x4feedaff, 0x4be729ac,
- 0x4a52d4b1, 0x488cd396, 0x49392e8b, 0xdafe8e80, 0xdb4b739d,
- 0xd99574ba, 0xd82089a7, 0xdc297af4, 0xdd9c87e9, 0xdf4280ce,
- 0xdef77dd3, 0xd7516668, 0xd6e49b75, 0xd43a9c52, 0xd58f614f,
- 0xd186921c, 0xd0336f01, 0xd2ed6826, 0xd358953b, 0xc1a15f50,
- 0xc014a24d, 0xc2caa56a, 0xc37f5877, 0xc776ab24, 0xc6c35639,
- 0xc41d511e, 0xc5a8ac03, 0xcc0eb7b8, 0xcdbb4aa5, 0xcf654d82,
- 0xced0b09f, 0xcad943cc, 0xcb6cbed1, 0xc9b2b9f6, 0xc80744eb,
- 0xec412d20, 0xedf4d03d, 0xef2ad71a, 0xee9f2a07, 0xea96d954,
- 0xeb232449, 0xe9fd236e, 0xe848de73, 0xe1eec5c8, 0xe05b38d5,
- 0xe2853ff2, 0xe330c2ef, 0xe73931bc, 0xe68ccca1, 0xe452cb86,
- 0xe5e7369b, 0xf71efcf0, 0xf6ab01ed, 0xf47506ca, 0xf5c0fbd7,
- 0xf1c90884, 0xf07cf599, 0xf2a2f2be, 0xf3170fa3, 0xfab11418,
- 0xfb04e905, 0xf9daee22, 0xf86f133f, 0xfc66e06c, 0xfdd31d71,
- 0xff0d1a56, 0xfeb8e74b, 0xb781c9c0, 0xb63434dd, 0xb4ea33fa,
- 0xb55fcee7, 0xb1563db4, 0xb0e3c0a9, 0xb23dc78e, 0xb3883a93,
- 0xba2e2128, 0xbb9bdc35, 0xb945db12, 0xb8f0260f, 0xbcf9d55c,
- 0xbd4c2841, 0xbf922f66, 0xbe27d27b, 0xacde1810, 0xad6be50d,
- 0xafb5e22a, 0xae001f37, 0xaa09ec64, 0xabbc1179, 0xa962165e,
- 0xa8d7eb43, 0xa171f0f8, 0xa0c40de5, 0xa21a0ac2, 0xa3aff7df,
- 0xa7a6048c, 0xa613f991, 0xa4cdfeb6, 0xa57803ab, 0x813e6a60,
- 0x808b977d, 0x8255905a, 0x83e06d47, 0x87e99e14, 0x865c6309,
- 0x8482642e, 0x85379933, 0x8c918288, 0x8d247f95, 0x8ffa78b2,
- 0x8e4f85af, 0x8a4676fc, 0x8bf38be1, 0x892d8cc6, 0x889871db,
- 0x9a61bbb0, 0x9bd446ad, 0x990a418a, 0x98bfbc97, 0x9cb64fc4,
- 0x9d03b2d9, 0x9fddb5fe, 0x9e6848e3, 0x97ce5358, 0x967bae45,
- 0x94a5a962, 0x9510547f, 0x9119a72c, 0x90ac5a31, 0x92725d16,
- 0x93c7a00b},
- {0x00000000, 0x6e8c1b41, 0xdd183682, 0xb3942dc3, 0x61416b45,
- 0x0fcd7004, 0xbc595dc7, 0xd2d54686, 0xc282d68a, 0xac0ecdcb,
- 0x1f9ae008, 0x7116fb49, 0xa3c3bdcf, 0xcd4fa68e, 0x7edb8b4d,
- 0x1057900c, 0x5e74ab55, 0x30f8b014, 0x836c9dd7, 0xede08696,
- 0x3f35c010, 0x51b9db51, 0xe22df692, 0x8ca1edd3, 0x9cf67ddf,
- 0xf27a669e, 0x41ee4b5d, 0x2f62501c, 0xfdb7169a, 0x933b0ddb,
- 0x20af2018, 0x4e233b59, 0xbce956aa, 0xd2654deb, 0x61f16028,
- 0x0f7d7b69, 0xdda83def, 0xb32426ae, 0x00b00b6d, 0x6e3c102c,
- 0x7e6b8020, 0x10e79b61, 0xa373b6a2, 0xcdffade3, 0x1f2aeb65,
- 0x71a6f024, 0xc232dde7, 0xacbec6a6, 0xe29dfdff, 0x8c11e6be,
- 0x3f85cb7d, 0x5109d03c, 0x83dc96ba, 0xed508dfb, 0x5ec4a038,
- 0x3048bb79, 0x201f2b75, 0x4e933034, 0xfd071df7, 0x938b06b6,
- 0x415e4030, 0x2fd25b71, 0x9c4676b2, 0xf2ca6df3, 0xa2a3ab15,
- 0xcc2fb054, 0x7fbb9d97, 0x113786d6, 0xc3e2c050, 0xad6edb11,
- 0x1efaf6d2, 0x7076ed93, 0x60217d9f, 0x0ead66de, 0xbd394b1d,
- 0xd3b5505c, 0x016016da, 0x6fec0d9b, 0xdc782058, 0xb2f43b19,
- 0xfcd70040, 0x925b1b01, 0x21cf36c2, 0x4f432d83, 0x9d966b05,
- 0xf31a7044, 0x408e5d87, 0x2e0246c6, 0x3e55d6ca, 0x50d9cd8b,
- 0xe34de048, 0x8dc1fb09, 0x5f14bd8f, 0x3198a6ce, 0x820c8b0d,
- 0xec80904c, 0x1e4afdbf, 0x70c6e6fe, 0xc352cb3d, 0xadded07c,
- 0x7f0b96fa, 0x11878dbb, 0xa213a078, 0xcc9fbb39, 0xdcc82b35,
- 0xb2443074, 0x01d01db7, 0x6f5c06f6, 0xbd894070, 0xd3055b31,
- 0x609176f2, 0x0e1d6db3, 0x403e56ea, 0x2eb24dab, 0x9d266068,
- 0xf3aa7b29, 0x217f3daf, 0x4ff326ee, 0xfc670b2d, 0x92eb106c,
- 0x82bc8060, 0xec309b21, 0x5fa4b6e2, 0x3128ada3, 0xe3fdeb25,
- 0x8d71f064, 0x3ee5dda7, 0x5069c6e6, 0x9e36506b, 0xf0ba4b2a,
- 0x432e66e9, 0x2da27da8, 0xff773b2e, 0x91fb206f, 0x226f0dac,
- 0x4ce316ed, 0x5cb486e1, 0x32389da0, 0x81acb063, 0xef20ab22,
- 0x3df5eda4, 0x5379f6e5, 0xe0eddb26, 0x8e61c067, 0xc042fb3e,
- 0xaecee07f, 0x1d5acdbc, 0x73d6d6fd, 0xa103907b, 0xcf8f8b3a,
- 0x7c1ba6f9, 0x1297bdb8, 0x02c02db4, 0x6c4c36f5, 0xdfd81b36,
- 0xb1540077, 0x638146f1, 0x0d0d5db0, 0xbe997073, 0xd0156b32,
- 0x22df06c1, 0x4c531d80, 0xffc73043, 0x914b2b02, 0x439e6d84,
- 0x2d1276c5, 0x9e865b06, 0xf00a4047, 0xe05dd04b, 0x8ed1cb0a,
- 0x3d45e6c9, 0x53c9fd88, 0x811cbb0e, 0xef90a04f, 0x5c048d8c,
- 0x328896cd, 0x7cabad94, 0x1227b6d5, 0xa1b39b16, 0xcf3f8057,
- 0x1deac6d1, 0x7366dd90, 0xc0f2f053, 0xae7eeb12, 0xbe297b1e,
- 0xd0a5605f, 0x63314d9c, 0x0dbd56dd, 0xdf68105b, 0xb1e40b1a,
- 0x027026d9, 0x6cfc3d98, 0x3c95fb7e, 0x5219e03f, 0xe18dcdfc,
- 0x8f01d6bd, 0x5dd4903b, 0x33588b7a, 0x80cca6b9, 0xee40bdf8,
- 0xfe172df4, 0x909b36b5, 0x230f1b76, 0x4d830037, 0x9f5646b1,
- 0xf1da5df0, 0x424e7033, 0x2cc26b72, 0x62e1502b, 0x0c6d4b6a,
- 0xbff966a9, 0xd1757de8, 0x03a03b6e, 0x6d2c202f, 0xdeb80dec,
- 0xb03416ad, 0xa06386a1, 0xceef9de0, 0x7d7bb023, 0x13f7ab62,
- 0xc122ede4, 0xafaef6a5, 0x1c3adb66, 0x72b6c027, 0x807cadd4,
- 0xeef0b695, 0x5d649b56, 0x33e88017, 0xe13dc691, 0x8fb1ddd0,
- 0x3c25f013, 0x52a9eb52, 0x42fe7b5e, 0x2c72601f, 0x9fe64ddc,
- 0xf16a569d, 0x23bf101b, 0x4d330b5a, 0xfea72699, 0x902b3dd8,
- 0xde080681, 0xb0841dc0, 0x03103003, 0x6d9c2b42, 0xbf496dc4,
- 0xd1c57685, 0x62515b46, 0x0cdd4007, 0x1c8ad00b, 0x7206cb4a,
- 0xc192e689, 0xaf1efdc8, 0x7dcbbb4e, 0x1347a00f, 0xa0d38dcc,
- 0xce5f968d},
- {0x00000000, 0xe71da697, 0x154a4b6f, 0xf257edf8, 0x2a9496de,
- 0xcd893049, 0x3fdeddb1, 0xd8c37b26, 0x55292dbc, 0xb2348b2b,
- 0x406366d3, 0xa77ec044, 0x7fbdbb62, 0x98a01df5, 0x6af7f00d,
- 0x8dea569a, 0xaa525b78, 0x4d4ffdef, 0xbf181017, 0x5805b680,
- 0x80c6cda6, 0x67db6b31, 0x958c86c9, 0x7291205e, 0xff7b76c4,
- 0x1866d053, 0xea313dab, 0x0d2c9b3c, 0xd5efe01a, 0x32f2468d,
- 0xc0a5ab75, 0x27b80de2, 0x8fd5b0b1, 0x68c81626, 0x9a9ffbde,
- 0x7d825d49, 0xa541266f, 0x425c80f8, 0xb00b6d00, 0x5716cb97,
- 0xdafc9d0d, 0x3de13b9a, 0xcfb6d662, 0x28ab70f5, 0xf0680bd3,
- 0x1775ad44, 0xe52240bc, 0x023fe62b, 0x2587ebc9, 0xc29a4d5e,
- 0x30cda0a6, 0xd7d00631, 0x0f137d17, 0xe80edb80, 0x1a593678,
- 0xfd4490ef, 0x70aec675, 0x97b360e2, 0x65e48d1a, 0x82f92b8d,
- 0x5a3a50ab, 0xbd27f63c, 0x4f701bc4, 0xa86dbd53, 0xc4da6723,
- 0x23c7c1b4, 0xd1902c4c, 0x368d8adb, 0xee4ef1fd, 0x0953576a,
- 0xfb04ba92, 0x1c191c05, 0x91f34a9f, 0x76eeec08, 0x84b901f0,
- 0x63a4a767, 0xbb67dc41, 0x5c7a7ad6, 0xae2d972e, 0x493031b9,
- 0x6e883c5b, 0x89959acc, 0x7bc27734, 0x9cdfd1a3, 0x441caa85,
- 0xa3010c12, 0x5156e1ea, 0xb64b477d, 0x3ba111e7, 0xdcbcb770,
- 0x2eeb5a88, 0xc9f6fc1f, 0x11358739, 0xf62821ae, 0x047fcc56,
- 0xe3626ac1, 0x4b0fd792, 0xac127105, 0x5e459cfd, 0xb9583a6a,
- 0x619b414c, 0x8686e7db, 0x74d10a23, 0x93ccacb4, 0x1e26fa2e,
- 0xf93b5cb9, 0x0b6cb141, 0xec7117d6, 0x34b26cf0, 0xd3afca67,
- 0x21f8279f, 0xc6e58108, 0xe15d8cea, 0x06402a7d, 0xf417c785,
- 0x130a6112, 0xcbc91a34, 0x2cd4bca3, 0xde83515b, 0x399ef7cc,
- 0xb474a156, 0x536907c1, 0xa13eea39, 0x46234cae, 0x9ee03788,
- 0x79fd911f, 0x8baa7ce7, 0x6cb7da70, 0x52c5c807, 0xb5d86e90,
- 0x478f8368, 0xa09225ff, 0x78515ed9, 0x9f4cf84e, 0x6d1b15b6,
- 0x8a06b321, 0x07ece5bb, 0xe0f1432c, 0x12a6aed4, 0xf5bb0843,
- 0x2d787365, 0xca65d5f2, 0x3832380a, 0xdf2f9e9d, 0xf897937f,
- 0x1f8a35e8, 0xedddd810, 0x0ac07e87, 0xd20305a1, 0x351ea336,
- 0xc7494ece, 0x2054e859, 0xadbebec3, 0x4aa31854, 0xb8f4f5ac,
- 0x5fe9533b, 0x872a281d, 0x60378e8a, 0x92606372, 0x757dc5e5,
- 0xdd1078b6, 0x3a0dde21, 0xc85a33d9, 0x2f47954e, 0xf784ee68,
- 0x109948ff, 0xe2cea507, 0x05d30390, 0x8839550a, 0x6f24f39d,
- 0x9d731e65, 0x7a6eb8f2, 0xa2adc3d4, 0x45b06543, 0xb7e788bb,
- 0x50fa2e2c, 0x774223ce, 0x905f8559, 0x620868a1, 0x8515ce36,
- 0x5dd6b510, 0xbacb1387, 0x489cfe7f, 0xaf8158e8, 0x226b0e72,
- 0xc576a8e5, 0x3721451d, 0xd03ce38a, 0x08ff98ac, 0xefe23e3b,
- 0x1db5d3c3, 0xfaa87554, 0x961faf24, 0x710209b3, 0x8355e44b,
- 0x644842dc, 0xbc8b39fa, 0x5b969f6d, 0xa9c17295, 0x4edcd402,
- 0xc3368298, 0x242b240f, 0xd67cc9f7, 0x31616f60, 0xe9a21446,
- 0x0ebfb2d1, 0xfce85f29, 0x1bf5f9be, 0x3c4df45c, 0xdb5052cb,
- 0x2907bf33, 0xce1a19a4, 0x16d96282, 0xf1c4c415, 0x039329ed,
- 0xe48e8f7a, 0x6964d9e0, 0x8e797f77, 0x7c2e928f, 0x9b333418,
- 0x43f04f3e, 0xa4ede9a9, 0x56ba0451, 0xb1a7a2c6, 0x19ca1f95,
- 0xfed7b902, 0x0c8054fa, 0xeb9df26d, 0x335e894b, 0xd4432fdc,
- 0x2614c224, 0xc10964b3, 0x4ce33229, 0xabfe94be, 0x59a97946,
- 0xbeb4dfd1, 0x6677a4f7, 0x816a0260, 0x733def98, 0x9420490f,
- 0xb39844ed, 0x5485e27a, 0xa6d20f82, 0x41cfa915, 0x990cd233,
- 0x7e1174a4, 0x8c46995c, 0x6b5b3fcb, 0xe6b16951, 0x01accfc6,
- 0xf3fb223e, 0x14e684a9, 0xcc25ff8f, 0x2b385918, 0xd96fb4e0,
- 0x3e721277},
- {0x00000000, 0xa58b900e, 0x9066265d, 0x35edb653, 0xfbbd4afb,
- 0x5e36daf5, 0x6bdb6ca6, 0xce50fca8, 0x2c0b93b7, 0x898003b9,
- 0xbc6db5ea, 0x19e625e4, 0xd7b6d94c, 0x723d4942, 0x47d0ff11,
- 0xe25b6f1f, 0x5817276e, 0xfd9cb760, 0xc8710133, 0x6dfa913d,
- 0xa3aa6d95, 0x0621fd9b, 0x33cc4bc8, 0x9647dbc6, 0x741cb4d9,
- 0xd19724d7, 0xe47a9284, 0x41f1028a, 0x8fa1fe22, 0x2a2a6e2c,
- 0x1fc7d87f, 0xba4c4871, 0xb02e4edc, 0x15a5ded2, 0x20486881,
- 0x85c3f88f, 0x4b930427, 0xee189429, 0xdbf5227a, 0x7e7eb274,
- 0x9c25dd6b, 0x39ae4d65, 0x0c43fb36, 0xa9c86b38, 0x67989790,
- 0xc213079e, 0xf7feb1cd, 0x527521c3, 0xe83969b2, 0x4db2f9bc,
- 0x785f4fef, 0xddd4dfe1, 0x13842349, 0xb60fb347, 0x83e20514,
- 0x2669951a, 0xc432fa05, 0x61b96a0b, 0x5454dc58, 0xf1df4c56,
- 0x3f8fb0fe, 0x9a0420f0, 0xafe996a3, 0x0a6206ad, 0xbb2d9bf9,
- 0x1ea60bf7, 0x2b4bbda4, 0x8ec02daa, 0x4090d102, 0xe51b410c,
- 0xd0f6f75f, 0x757d6751, 0x9726084e, 0x32ad9840, 0x07402e13,
- 0xa2cbbe1d, 0x6c9b42b5, 0xc910d2bb, 0xfcfd64e8, 0x5976f4e6,
- 0xe33abc97, 0x46b12c99, 0x735c9aca, 0xd6d70ac4, 0x1887f66c,
- 0xbd0c6662, 0x88e1d031, 0x2d6a403f, 0xcf312f20, 0x6ababf2e,
- 0x5f57097d, 0xfadc9973, 0x348c65db, 0x9107f5d5, 0xa4ea4386,
- 0x0161d388, 0x0b03d525, 0xae88452b, 0x9b65f378, 0x3eee6376,
- 0xf0be9fde, 0x55350fd0, 0x60d8b983, 0xc553298d, 0x27084692,
- 0x8283d69c, 0xb76e60cf, 0x12e5f0c1, 0xdcb50c69, 0x793e9c67,
- 0x4cd32a34, 0xe958ba3a, 0x5314f24b, 0xf69f6245, 0xc372d416,
- 0x66f94418, 0xa8a9b8b0, 0x0d2228be, 0x38cf9eed, 0x9d440ee3,
- 0x7f1f61fc, 0xda94f1f2, 0xef7947a1, 0x4af2d7af, 0x84a22b07,
- 0x2129bb09, 0x14c40d5a, 0xb14f9d54, 0xad2a31b3, 0x08a1a1bd,
- 0x3d4c17ee, 0x98c787e0, 0x56977b48, 0xf31ceb46, 0xc6f15d15,
- 0x637acd1b, 0x8121a204, 0x24aa320a, 0x11478459, 0xb4cc1457,
- 0x7a9ce8ff, 0xdf1778f1, 0xeafacea2, 0x4f715eac, 0xf53d16dd,
- 0x50b686d3, 0x655b3080, 0xc0d0a08e, 0x0e805c26, 0xab0bcc28,
- 0x9ee67a7b, 0x3b6dea75, 0xd936856a, 0x7cbd1564, 0x4950a337,
- 0xecdb3339, 0x228bcf91, 0x87005f9f, 0xb2ede9cc, 0x176679c2,
- 0x1d047f6f, 0xb88fef61, 0x8d625932, 0x28e9c93c, 0xe6b93594,
- 0x4332a59a, 0x76df13c9, 0xd35483c7, 0x310fecd8, 0x94847cd6,
- 0xa169ca85, 0x04e25a8b, 0xcab2a623, 0x6f39362d, 0x5ad4807e,
- 0xff5f1070, 0x45135801, 0xe098c80f, 0xd5757e5c, 0x70feee52,
- 0xbeae12fa, 0x1b2582f4, 0x2ec834a7, 0x8b43a4a9, 0x6918cbb6,
- 0xcc935bb8, 0xf97eedeb, 0x5cf57de5, 0x92a5814d, 0x372e1143,
- 0x02c3a710, 0xa748371e, 0x1607aa4a, 0xb38c3a44, 0x86618c17,
- 0x23ea1c19, 0xedbae0b1, 0x483170bf, 0x7ddcc6ec, 0xd85756e2,
- 0x3a0c39fd, 0x9f87a9f3, 0xaa6a1fa0, 0x0fe18fae, 0xc1b17306,
- 0x643ae308, 0x51d7555b, 0xf45cc555, 0x4e108d24, 0xeb9b1d2a,
- 0xde76ab79, 0x7bfd3b77, 0xb5adc7df, 0x102657d1, 0x25cbe182,
- 0x8040718c, 0x621b1e93, 0xc7908e9d, 0xf27d38ce, 0x57f6a8c0,
- 0x99a65468, 0x3c2dc466, 0x09c07235, 0xac4be23b, 0xa629e496,
- 0x03a27498, 0x364fc2cb, 0x93c452c5, 0x5d94ae6d, 0xf81f3e63,
- 0xcdf28830, 0x6879183e, 0x8a227721, 0x2fa9e72f, 0x1a44517c,
- 0xbfcfc172, 0x719f3dda, 0xd414add4, 0xe1f91b87, 0x44728b89,
- 0xfe3ec3f8, 0x5bb553f6, 0x6e58e5a5, 0xcbd375ab, 0x05838903,
- 0xa008190d, 0x95e5af5e, 0x306e3f50, 0xd235504f, 0x77bec041,
- 0x42537612, 0xe7d8e61c, 0x29881ab4, 0x8c038aba, 0xb9ee3ce9,
- 0x1c65ace7}};
-
-local const z_word_t FAR crc_braid_big_table[][256] = {
- {0x0000000000000000, 0x0e908ba500000000, 0x5d26669000000000,
- 0x53b6ed3500000000, 0xfb4abdfb00000000, 0xf5da365e00000000,
- 0xa66cdb6b00000000, 0xa8fc50ce00000000, 0xb7930b2c00000000,
- 0xb903808900000000, 0xeab56dbc00000000, 0xe425e61900000000,
- 0x4cd9b6d700000000, 0x42493d7200000000, 0x11ffd04700000000,
- 0x1f6f5be200000000, 0x6e27175800000000, 0x60b79cfd00000000,
- 0x330171c800000000, 0x3d91fa6d00000000, 0x956daaa300000000,
- 0x9bfd210600000000, 0xc84bcc3300000000, 0xc6db479600000000,
- 0xd9b41c7400000000, 0xd72497d100000000, 0x84927ae400000000,
- 0x8a02f14100000000, 0x22fea18f00000000, 0x2c6e2a2a00000000,
- 0x7fd8c71f00000000, 0x71484cba00000000, 0xdc4e2eb000000000,
- 0xd2dea51500000000, 0x8168482000000000, 0x8ff8c38500000000,
- 0x2704934b00000000, 0x299418ee00000000, 0x7a22f5db00000000,
- 0x74b27e7e00000000, 0x6bdd259c00000000, 0x654dae3900000000,
- 0x36fb430c00000000, 0x386bc8a900000000, 0x9097986700000000,
- 0x9e0713c200000000, 0xcdb1fef700000000, 0xc321755200000000,
- 0xb26939e800000000, 0xbcf9b24d00000000, 0xef4f5f7800000000,
- 0xe1dfd4dd00000000, 0x4923841300000000, 0x47b30fb600000000,
- 0x1405e28300000000, 0x1a95692600000000, 0x05fa32c400000000,
- 0x0b6ab96100000000, 0x58dc545400000000, 0x564cdff100000000,
- 0xfeb08f3f00000000, 0xf020049a00000000, 0xa396e9af00000000,
- 0xad06620a00000000, 0xf99b2dbb00000000, 0xf70ba61e00000000,
- 0xa4bd4b2b00000000, 0xaa2dc08e00000000, 0x02d1904000000000,
- 0x0c411be500000000, 0x5ff7f6d000000000, 0x51677d7500000000,
- 0x4e08269700000000, 0x4098ad3200000000, 0x132e400700000000,
- 0x1dbecba200000000, 0xb5429b6c00000000, 0xbbd210c900000000,
- 0xe864fdfc00000000, 0xe6f4765900000000, 0x97bc3ae300000000,
- 0x992cb14600000000, 0xca9a5c7300000000, 0xc40ad7d600000000,
- 0x6cf6871800000000, 0x62660cbd00000000, 0x31d0e18800000000,
- 0x3f406a2d00000000, 0x202f31cf00000000, 0x2ebfba6a00000000,
- 0x7d09575f00000000, 0x7399dcfa00000000, 0xdb658c3400000000,
- 0xd5f5079100000000, 0x8643eaa400000000, 0x88d3610100000000,
- 0x25d5030b00000000, 0x2b4588ae00000000, 0x78f3659b00000000,
- 0x7663ee3e00000000, 0xde9fbef000000000, 0xd00f355500000000,
- 0x83b9d86000000000, 0x8d2953c500000000, 0x9246082700000000,
- 0x9cd6838200000000, 0xcf606eb700000000, 0xc1f0e51200000000,
- 0x690cb5dc00000000, 0x679c3e7900000000, 0x342ad34c00000000,
- 0x3aba58e900000000, 0x4bf2145300000000, 0x45629ff600000000,
- 0x16d472c300000000, 0x1844f96600000000, 0xb0b8a9a800000000,
- 0xbe28220d00000000, 0xed9ecf3800000000, 0xe30e449d00000000,
- 0xfc611f7f00000000, 0xf2f194da00000000, 0xa14779ef00000000,
- 0xafd7f24a00000000, 0x072ba28400000000, 0x09bb292100000000,
- 0x5a0dc41400000000, 0x549d4fb100000000, 0xb3312aad00000000,
- 0xbda1a10800000000, 0xee174c3d00000000, 0xe087c79800000000,
- 0x487b975600000000, 0x46eb1cf300000000, 0x155df1c600000000,
- 0x1bcd7a6300000000, 0x04a2218100000000, 0x0a32aa2400000000,
- 0x5984471100000000, 0x5714ccb400000000, 0xffe89c7a00000000,
- 0xf17817df00000000, 0xa2cefaea00000000, 0xac5e714f00000000,
- 0xdd163df500000000, 0xd386b65000000000, 0x80305b6500000000,
- 0x8ea0d0c000000000, 0x265c800e00000000, 0x28cc0bab00000000,
- 0x7b7ae69e00000000, 0x75ea6d3b00000000, 0x6a8536d900000000,
- 0x6415bd7c00000000, 0x37a3504900000000, 0x3933dbec00000000,
- 0x91cf8b2200000000, 0x9f5f008700000000, 0xcce9edb200000000,
- 0xc279661700000000, 0x6f7f041d00000000, 0x61ef8fb800000000,
- 0x3259628d00000000, 0x3cc9e92800000000, 0x9435b9e600000000,
- 0x9aa5324300000000, 0xc913df7600000000, 0xc78354d300000000,
- 0xd8ec0f3100000000, 0xd67c849400000000, 0x85ca69a100000000,
- 0x8b5ae20400000000, 0x23a6b2ca00000000, 0x2d36396f00000000,
- 0x7e80d45a00000000, 0x70105fff00000000, 0x0158134500000000,
- 0x0fc898e000000000, 0x5c7e75d500000000, 0x52eefe7000000000,
- 0xfa12aebe00000000, 0xf482251b00000000, 0xa734c82e00000000,
- 0xa9a4438b00000000, 0xb6cb186900000000, 0xb85b93cc00000000,
- 0xebed7ef900000000, 0xe57df55c00000000, 0x4d81a59200000000,
- 0x43112e3700000000, 0x10a7c30200000000, 0x1e3748a700000000,
- 0x4aaa071600000000, 0x443a8cb300000000, 0x178c618600000000,
- 0x191cea2300000000, 0xb1e0baed00000000, 0xbf70314800000000,
- 0xecc6dc7d00000000, 0xe25657d800000000, 0xfd390c3a00000000,
- 0xf3a9879f00000000, 0xa01f6aaa00000000, 0xae8fe10f00000000,
- 0x0673b1c100000000, 0x08e33a6400000000, 0x5b55d75100000000,
- 0x55c55cf400000000, 0x248d104e00000000, 0x2a1d9beb00000000,
- 0x79ab76de00000000, 0x773bfd7b00000000, 0xdfc7adb500000000,
- 0xd157261000000000, 0x82e1cb2500000000, 0x8c71408000000000,
- 0x931e1b6200000000, 0x9d8e90c700000000, 0xce387df200000000,
- 0xc0a8f65700000000, 0x6854a69900000000, 0x66c42d3c00000000,
- 0x3572c00900000000, 0x3be24bac00000000, 0x96e429a600000000,
- 0x9874a20300000000, 0xcbc24f3600000000, 0xc552c49300000000,
- 0x6dae945d00000000, 0x633e1ff800000000, 0x3088f2cd00000000,
- 0x3e18796800000000, 0x2177228a00000000, 0x2fe7a92f00000000,
- 0x7c51441a00000000, 0x72c1cfbf00000000, 0xda3d9f7100000000,
- 0xd4ad14d400000000, 0x871bf9e100000000, 0x898b724400000000,
- 0xf8c33efe00000000, 0xf653b55b00000000, 0xa5e5586e00000000,
- 0xab75d3cb00000000, 0x0389830500000000, 0x0d1908a000000000,
- 0x5eafe59500000000, 0x503f6e3000000000, 0x4f5035d200000000,
- 0x41c0be7700000000, 0x1276534200000000, 0x1ce6d8e700000000,
- 0xb41a882900000000, 0xba8a038c00000000, 0xe93ceeb900000000,
- 0xe7ac651c00000000},
- {0x0000000000000000, 0x97a61de700000000, 0x6f4b4a1500000000,
- 0xf8ed57f200000000, 0xde96942a00000000, 0x493089cd00000000,
- 0xb1ddde3f00000000, 0x267bc3d800000000, 0xbc2d295500000000,
- 0x2b8b34b200000000, 0xd366634000000000, 0x44c07ea700000000,
- 0x62bbbd7f00000000, 0xf51da09800000000, 0x0df0f76a00000000,
- 0x9a56ea8d00000000, 0x785b52aa00000000, 0xeffd4f4d00000000,
- 0x171018bf00000000, 0x80b6055800000000, 0xa6cdc68000000000,
- 0x316bdb6700000000, 0xc9868c9500000000, 0x5e20917200000000,
- 0xc4767bff00000000, 0x53d0661800000000, 0xab3d31ea00000000,
- 0x3c9b2c0d00000000, 0x1ae0efd500000000, 0x8d46f23200000000,
- 0x75aba5c000000000, 0xe20db82700000000, 0xb1b0d58f00000000,
- 0x2616c86800000000, 0xdefb9f9a00000000, 0x495d827d00000000,
- 0x6f2641a500000000, 0xf8805c4200000000, 0x006d0bb000000000,
- 0x97cb165700000000, 0x0d9dfcda00000000, 0x9a3be13d00000000,
- 0x62d6b6cf00000000, 0xf570ab2800000000, 0xd30b68f000000000,
- 0x44ad751700000000, 0xbc4022e500000000, 0x2be63f0200000000,
- 0xc9eb872500000000, 0x5e4d9ac200000000, 0xa6a0cd3000000000,
- 0x3106d0d700000000, 0x177d130f00000000, 0x80db0ee800000000,
- 0x7836591a00000000, 0xef9044fd00000000, 0x75c6ae7000000000,
- 0xe260b39700000000, 0x1a8de46500000000, 0x8d2bf98200000000,
- 0xab503a5a00000000, 0x3cf627bd00000000, 0xc41b704f00000000,
- 0x53bd6da800000000, 0x2367dac400000000, 0xb4c1c72300000000,
- 0x4c2c90d100000000, 0xdb8a8d3600000000, 0xfdf14eee00000000,
- 0x6a57530900000000, 0x92ba04fb00000000, 0x051c191c00000000,
- 0x9f4af39100000000, 0x08ecee7600000000, 0xf001b98400000000,
- 0x67a7a46300000000, 0x41dc67bb00000000, 0xd67a7a5c00000000,
- 0x2e972dae00000000, 0xb931304900000000, 0x5b3c886e00000000,
- 0xcc9a958900000000, 0x3477c27b00000000, 0xa3d1df9c00000000,
- 0x85aa1c4400000000, 0x120c01a300000000, 0xeae1565100000000,
- 0x7d474bb600000000, 0xe711a13b00000000, 0x70b7bcdc00000000,
- 0x885aeb2e00000000, 0x1ffcf6c900000000, 0x3987351100000000,
- 0xae2128f600000000, 0x56cc7f0400000000, 0xc16a62e300000000,
- 0x92d70f4b00000000, 0x057112ac00000000, 0xfd9c455e00000000,
- 0x6a3a58b900000000, 0x4c419b6100000000, 0xdbe7868600000000,
- 0x230ad17400000000, 0xb4accc9300000000, 0x2efa261e00000000,
- 0xb95c3bf900000000, 0x41b16c0b00000000, 0xd61771ec00000000,
- 0xf06cb23400000000, 0x67caafd300000000, 0x9f27f82100000000,
- 0x0881e5c600000000, 0xea8c5de100000000, 0x7d2a400600000000,
- 0x85c717f400000000, 0x12610a1300000000, 0x341ac9cb00000000,
- 0xa3bcd42c00000000, 0x5b5183de00000000, 0xccf79e3900000000,
- 0x56a174b400000000, 0xc107695300000000, 0x39ea3ea100000000,
- 0xae4c234600000000, 0x8837e09e00000000, 0x1f91fd7900000000,
- 0xe77caa8b00000000, 0x70dab76c00000000, 0x07c8c55200000000,
- 0x906ed8b500000000, 0x68838f4700000000, 0xff2592a000000000,
- 0xd95e517800000000, 0x4ef84c9f00000000, 0xb6151b6d00000000,
- 0x21b3068a00000000, 0xbbe5ec0700000000, 0x2c43f1e000000000,
- 0xd4aea61200000000, 0x4308bbf500000000, 0x6573782d00000000,
- 0xf2d565ca00000000, 0x0a38323800000000, 0x9d9e2fdf00000000,
- 0x7f9397f800000000, 0xe8358a1f00000000, 0x10d8dded00000000,
- 0x877ec00a00000000, 0xa10503d200000000, 0x36a31e3500000000,
- 0xce4e49c700000000, 0x59e8542000000000, 0xc3bebead00000000,
- 0x5418a34a00000000, 0xacf5f4b800000000, 0x3b53e95f00000000,
- 0x1d282a8700000000, 0x8a8e376000000000, 0x7263609200000000,
- 0xe5c57d7500000000, 0xb67810dd00000000, 0x21de0d3a00000000,
- 0xd9335ac800000000, 0x4e95472f00000000, 0x68ee84f700000000,
- 0xff48991000000000, 0x07a5cee200000000, 0x9003d30500000000,
- 0x0a55398800000000, 0x9df3246f00000000, 0x651e739d00000000,
- 0xf2b86e7a00000000, 0xd4c3ada200000000, 0x4365b04500000000,
- 0xbb88e7b700000000, 0x2c2efa5000000000, 0xce23427700000000,
- 0x59855f9000000000, 0xa168086200000000, 0x36ce158500000000,
- 0x10b5d65d00000000, 0x8713cbba00000000, 0x7ffe9c4800000000,
- 0xe85881af00000000, 0x720e6b2200000000, 0xe5a876c500000000,
- 0x1d45213700000000, 0x8ae33cd000000000, 0xac98ff0800000000,
- 0x3b3ee2ef00000000, 0xc3d3b51d00000000, 0x5475a8fa00000000,
- 0x24af1f9600000000, 0xb309027100000000, 0x4be4558300000000,
- 0xdc42486400000000, 0xfa398bbc00000000, 0x6d9f965b00000000,
- 0x9572c1a900000000, 0x02d4dc4e00000000, 0x988236c300000000,
- 0x0f242b2400000000, 0xf7c97cd600000000, 0x606f613100000000,
- 0x4614a2e900000000, 0xd1b2bf0e00000000, 0x295fe8fc00000000,
- 0xbef9f51b00000000, 0x5cf44d3c00000000, 0xcb5250db00000000,
- 0x33bf072900000000, 0xa4191ace00000000, 0x8262d91600000000,
- 0x15c4c4f100000000, 0xed29930300000000, 0x7a8f8ee400000000,
- 0xe0d9646900000000, 0x777f798e00000000, 0x8f922e7c00000000,
- 0x1834339b00000000, 0x3e4ff04300000000, 0xa9e9eda400000000,
- 0x5104ba5600000000, 0xc6a2a7b100000000, 0x951fca1900000000,
- 0x02b9d7fe00000000, 0xfa54800c00000000, 0x6df29deb00000000,
- 0x4b895e3300000000, 0xdc2f43d400000000, 0x24c2142600000000,
- 0xb36409c100000000, 0x2932e34c00000000, 0xbe94feab00000000,
- 0x4679a95900000000, 0xd1dfb4be00000000, 0xf7a4776600000000,
- 0x60026a8100000000, 0x98ef3d7300000000, 0x0f49209400000000,
- 0xed4498b300000000, 0x7ae2855400000000, 0x820fd2a600000000,
- 0x15a9cf4100000000, 0x33d20c9900000000, 0xa474117e00000000,
- 0x5c99468c00000000, 0xcb3f5b6b00000000, 0x5169b1e600000000,
- 0xc6cfac0100000000, 0x3e22fbf300000000, 0xa984e61400000000,
- 0x8fff25cc00000000, 0x1859382b00000000, 0xe0b46fd900000000,
- 0x7712723e00000000},
- {0x0000000000000000, 0x411b8c6e00000000, 0x823618dd00000000,
- 0xc32d94b300000000, 0x456b416100000000, 0x0470cd0f00000000,
- 0xc75d59bc00000000, 0x8646d5d200000000, 0x8ad682c200000000,
- 0xcbcd0eac00000000, 0x08e09a1f00000000, 0x49fb167100000000,
- 0xcfbdc3a300000000, 0x8ea64fcd00000000, 0x4d8bdb7e00000000,
- 0x0c90571000000000, 0x55ab745e00000000, 0x14b0f83000000000,
- 0xd79d6c8300000000, 0x9686e0ed00000000, 0x10c0353f00000000,
- 0x51dbb95100000000, 0x92f62de200000000, 0xd3eda18c00000000,
- 0xdf7df69c00000000, 0x9e667af200000000, 0x5d4bee4100000000,
- 0x1c50622f00000000, 0x9a16b7fd00000000, 0xdb0d3b9300000000,
- 0x1820af2000000000, 0x593b234e00000000, 0xaa56e9bc00000000,
- 0xeb4d65d200000000, 0x2860f16100000000, 0x697b7d0f00000000,
- 0xef3da8dd00000000, 0xae2624b300000000, 0x6d0bb00000000000,
- 0x2c103c6e00000000, 0x20806b7e00000000, 0x619be71000000000,
- 0xa2b673a300000000, 0xe3adffcd00000000, 0x65eb2a1f00000000,
- 0x24f0a67100000000, 0xe7dd32c200000000, 0xa6c6beac00000000,
- 0xfffd9de200000000, 0xbee6118c00000000, 0x7dcb853f00000000,
- 0x3cd0095100000000, 0xba96dc8300000000, 0xfb8d50ed00000000,
- 0x38a0c45e00000000, 0x79bb483000000000, 0x752b1f2000000000,
- 0x3430934e00000000, 0xf71d07fd00000000, 0xb6068b9300000000,
- 0x30405e4100000000, 0x715bd22f00000000, 0xb276469c00000000,
- 0xf36dcaf200000000, 0x15aba3a200000000, 0x54b02fcc00000000,
- 0x979dbb7f00000000, 0xd686371100000000, 0x50c0e2c300000000,
- 0x11db6ead00000000, 0xd2f6fa1e00000000, 0x93ed767000000000,
- 0x9f7d216000000000, 0xde66ad0e00000000, 0x1d4b39bd00000000,
- 0x5c50b5d300000000, 0xda16600100000000, 0x9b0dec6f00000000,
- 0x582078dc00000000, 0x193bf4b200000000, 0x4000d7fc00000000,
- 0x011b5b9200000000, 0xc236cf2100000000, 0x832d434f00000000,
- 0x056b969d00000000, 0x44701af300000000, 0x875d8e4000000000,
- 0xc646022e00000000, 0xcad6553e00000000, 0x8bcdd95000000000,
- 0x48e04de300000000, 0x09fbc18d00000000, 0x8fbd145f00000000,
- 0xcea6983100000000, 0x0d8b0c8200000000, 0x4c9080ec00000000,
- 0xbffd4a1e00000000, 0xfee6c67000000000, 0x3dcb52c300000000,
- 0x7cd0dead00000000, 0xfa960b7f00000000, 0xbb8d871100000000,
- 0x78a013a200000000, 0x39bb9fcc00000000, 0x352bc8dc00000000,
- 0x743044b200000000, 0xb71dd00100000000, 0xf6065c6f00000000,
- 0x704089bd00000000, 0x315b05d300000000, 0xf276916000000000,
- 0xb36d1d0e00000000, 0xea563e4000000000, 0xab4db22e00000000,
- 0x6860269d00000000, 0x297baaf300000000, 0xaf3d7f2100000000,
- 0xee26f34f00000000, 0x2d0b67fc00000000, 0x6c10eb9200000000,
- 0x6080bc8200000000, 0x219b30ec00000000, 0xe2b6a45f00000000,
- 0xa3ad283100000000, 0x25ebfde300000000, 0x64f0718d00000000,
- 0xa7dde53e00000000, 0xe6c6695000000000, 0x6b50369e00000000,
- 0x2a4bbaf000000000, 0xe9662e4300000000, 0xa87da22d00000000,
- 0x2e3b77ff00000000, 0x6f20fb9100000000, 0xac0d6f2200000000,
- 0xed16e34c00000000, 0xe186b45c00000000, 0xa09d383200000000,
- 0x63b0ac8100000000, 0x22ab20ef00000000, 0xa4edf53d00000000,
- 0xe5f6795300000000, 0x26dbede000000000, 0x67c0618e00000000,
- 0x3efb42c000000000, 0x7fe0ceae00000000, 0xbccd5a1d00000000,
- 0xfdd6d67300000000, 0x7b9003a100000000, 0x3a8b8fcf00000000,
- 0xf9a61b7c00000000, 0xb8bd971200000000, 0xb42dc00200000000,
- 0xf5364c6c00000000, 0x361bd8df00000000, 0x770054b100000000,
- 0xf146816300000000, 0xb05d0d0d00000000, 0x737099be00000000,
- 0x326b15d000000000, 0xc106df2200000000, 0x801d534c00000000,
- 0x4330c7ff00000000, 0x022b4b9100000000, 0x846d9e4300000000,
- 0xc576122d00000000, 0x065b869e00000000, 0x47400af000000000,
- 0x4bd05de000000000, 0x0acbd18e00000000, 0xc9e6453d00000000,
- 0x88fdc95300000000, 0x0ebb1c8100000000, 0x4fa090ef00000000,
- 0x8c8d045c00000000, 0xcd96883200000000, 0x94adab7c00000000,
- 0xd5b6271200000000, 0x169bb3a100000000, 0x57803fcf00000000,
- 0xd1c6ea1d00000000, 0x90dd667300000000, 0x53f0f2c000000000,
- 0x12eb7eae00000000, 0x1e7b29be00000000, 0x5f60a5d000000000,
- 0x9c4d316300000000, 0xdd56bd0d00000000, 0x5b1068df00000000,
- 0x1a0be4b100000000, 0xd926700200000000, 0x983dfc6c00000000,
- 0x7efb953c00000000, 0x3fe0195200000000, 0xfccd8de100000000,
- 0xbdd6018f00000000, 0x3b90d45d00000000, 0x7a8b583300000000,
- 0xb9a6cc8000000000, 0xf8bd40ee00000000, 0xf42d17fe00000000,
- 0xb5369b9000000000, 0x761b0f2300000000, 0x3700834d00000000,
- 0xb146569f00000000, 0xf05ddaf100000000, 0x33704e4200000000,
- 0x726bc22c00000000, 0x2b50e16200000000, 0x6a4b6d0c00000000,
- 0xa966f9bf00000000, 0xe87d75d100000000, 0x6e3ba00300000000,
- 0x2f202c6d00000000, 0xec0db8de00000000, 0xad1634b000000000,
- 0xa18663a000000000, 0xe09defce00000000, 0x23b07b7d00000000,
- 0x62abf71300000000, 0xe4ed22c100000000, 0xa5f6aeaf00000000,
- 0x66db3a1c00000000, 0x27c0b67200000000, 0xd4ad7c8000000000,
- 0x95b6f0ee00000000, 0x569b645d00000000, 0x1780e83300000000,
- 0x91c63de100000000, 0xd0ddb18f00000000, 0x13f0253c00000000,
- 0x52eba95200000000, 0x5e7bfe4200000000, 0x1f60722c00000000,
- 0xdc4de69f00000000, 0x9d566af100000000, 0x1b10bf2300000000,
- 0x5a0b334d00000000, 0x9926a7fe00000000, 0xd83d2b9000000000,
- 0x810608de00000000, 0xc01d84b000000000, 0x0330100300000000,
- 0x422b9c6d00000000, 0xc46d49bf00000000, 0x8576c5d100000000,
- 0x465b516200000000, 0x0740dd0c00000000, 0x0bd08a1c00000000,
- 0x4acb067200000000, 0x89e692c100000000, 0xc8fd1eaf00000000,
- 0x4ebbcb7d00000000, 0x0fa0471300000000, 0xcc8dd3a000000000,
- 0x8d965fce00000000},
- {0x0000000000000000, 0x1dfdb50100000000, 0x3afa6b0300000000,
- 0x2707de0200000000, 0x74f4d70600000000, 0x6909620700000000,
- 0x4e0ebc0500000000, 0x53f3090400000000, 0xe8e8af0d00000000,
- 0xf5151a0c00000000, 0xd212c40e00000000, 0xcfef710f00000000,
- 0x9c1c780b00000000, 0x81e1cd0a00000000, 0xa6e6130800000000,
- 0xbb1ba60900000000, 0xd0d15f1b00000000, 0xcd2cea1a00000000,
- 0xea2b341800000000, 0xf7d6811900000000, 0xa425881d00000000,
- 0xb9d83d1c00000000, 0x9edfe31e00000000, 0x8322561f00000000,
- 0x3839f01600000000, 0x25c4451700000000, 0x02c39b1500000000,
- 0x1f3e2e1400000000, 0x4ccd271000000000, 0x5130921100000000,
- 0x76374c1300000000, 0x6bcaf91200000000, 0xa0a3bf3600000000,
- 0xbd5e0a3700000000, 0x9a59d43500000000, 0x87a4613400000000,
- 0xd457683000000000, 0xc9aadd3100000000, 0xeead033300000000,
- 0xf350b63200000000, 0x484b103b00000000, 0x55b6a53a00000000,
- 0x72b17b3800000000, 0x6f4cce3900000000, 0x3cbfc73d00000000,
- 0x2142723c00000000, 0x0645ac3e00000000, 0x1bb8193f00000000,
- 0x7072e02d00000000, 0x6d8f552c00000000, 0x4a888b2e00000000,
- 0x57753e2f00000000, 0x0486372b00000000, 0x197b822a00000000,
- 0x3e7c5c2800000000, 0x2381e92900000000, 0x989a4f2000000000,
- 0x8567fa2100000000, 0xa260242300000000, 0xbf9d912200000000,
- 0xec6e982600000000, 0xf1932d2700000000, 0xd694f32500000000,
- 0xcb69462400000000, 0x40477f6d00000000, 0x5dbaca6c00000000,
- 0x7abd146e00000000, 0x6740a16f00000000, 0x34b3a86b00000000,
- 0x294e1d6a00000000, 0x0e49c36800000000, 0x13b4766900000000,
- 0xa8afd06000000000, 0xb552656100000000, 0x9255bb6300000000,
- 0x8fa80e6200000000, 0xdc5b076600000000, 0xc1a6b26700000000,
- 0xe6a16c6500000000, 0xfb5cd96400000000, 0x9096207600000000,
- 0x8d6b957700000000, 0xaa6c4b7500000000, 0xb791fe7400000000,
- 0xe462f77000000000, 0xf99f427100000000, 0xde989c7300000000,
- 0xc365297200000000, 0x787e8f7b00000000, 0x65833a7a00000000,
- 0x4284e47800000000, 0x5f79517900000000, 0x0c8a587d00000000,
- 0x1177ed7c00000000, 0x3670337e00000000, 0x2b8d867f00000000,
- 0xe0e4c05b00000000, 0xfd19755a00000000, 0xda1eab5800000000,
- 0xc7e31e5900000000, 0x9410175d00000000, 0x89eda25c00000000,
- 0xaeea7c5e00000000, 0xb317c95f00000000, 0x080c6f5600000000,
- 0x15f1da5700000000, 0x32f6045500000000, 0x2f0bb15400000000,
- 0x7cf8b85000000000, 0x61050d5100000000, 0x4602d35300000000,
- 0x5bff665200000000, 0x30359f4000000000, 0x2dc82a4100000000,
- 0x0acff44300000000, 0x1732414200000000, 0x44c1484600000000,
- 0x593cfd4700000000, 0x7e3b234500000000, 0x63c6964400000000,
- 0xd8dd304d00000000, 0xc520854c00000000, 0xe2275b4e00000000,
- 0xffdaee4f00000000, 0xac29e74b00000000, 0xb1d4524a00000000,
- 0x96d38c4800000000, 0x8b2e394900000000, 0x808efeda00000000,
- 0x9d734bdb00000000, 0xba7495d900000000, 0xa78920d800000000,
- 0xf47a29dc00000000, 0xe9879cdd00000000, 0xce8042df00000000,
- 0xd37df7de00000000, 0x686651d700000000, 0x759be4d600000000,
- 0x529c3ad400000000, 0x4f618fd500000000, 0x1c9286d100000000,
- 0x016f33d000000000, 0x2668edd200000000, 0x3b9558d300000000,
- 0x505fa1c100000000, 0x4da214c000000000, 0x6aa5cac200000000,
- 0x77587fc300000000, 0x24ab76c700000000, 0x3956c3c600000000,
- 0x1e511dc400000000, 0x03aca8c500000000, 0xb8b70ecc00000000,
- 0xa54abbcd00000000, 0x824d65cf00000000, 0x9fb0d0ce00000000,
- 0xcc43d9ca00000000, 0xd1be6ccb00000000, 0xf6b9b2c900000000,
- 0xeb4407c800000000, 0x202d41ec00000000, 0x3dd0f4ed00000000,
- 0x1ad72aef00000000, 0x072a9fee00000000, 0x54d996ea00000000,
- 0x492423eb00000000, 0x6e23fde900000000, 0x73de48e800000000,
- 0xc8c5eee100000000, 0xd5385be000000000, 0xf23f85e200000000,
- 0xefc230e300000000, 0xbc3139e700000000, 0xa1cc8ce600000000,
- 0x86cb52e400000000, 0x9b36e7e500000000, 0xf0fc1ef700000000,
- 0xed01abf600000000, 0xca0675f400000000, 0xd7fbc0f500000000,
- 0x8408c9f100000000, 0x99f57cf000000000, 0xbef2a2f200000000,
- 0xa30f17f300000000, 0x1814b1fa00000000, 0x05e904fb00000000,
- 0x22eedaf900000000, 0x3f136ff800000000, 0x6ce066fc00000000,
- 0x711dd3fd00000000, 0x561a0dff00000000, 0x4be7b8fe00000000,
- 0xc0c981b700000000, 0xdd3434b600000000, 0xfa33eab400000000,
- 0xe7ce5fb500000000, 0xb43d56b100000000, 0xa9c0e3b000000000,
- 0x8ec73db200000000, 0x933a88b300000000, 0x28212eba00000000,
- 0x35dc9bbb00000000, 0x12db45b900000000, 0x0f26f0b800000000,
- 0x5cd5f9bc00000000, 0x41284cbd00000000, 0x662f92bf00000000,
- 0x7bd227be00000000, 0x1018deac00000000, 0x0de56bad00000000,
- 0x2ae2b5af00000000, 0x371f00ae00000000, 0x64ec09aa00000000,
- 0x7911bcab00000000, 0x5e1662a900000000, 0x43ebd7a800000000,
- 0xf8f071a100000000, 0xe50dc4a000000000, 0xc20a1aa200000000,
- 0xdff7afa300000000, 0x8c04a6a700000000, 0x91f913a600000000,
- 0xb6fecda400000000, 0xab0378a500000000, 0x606a3e8100000000,
- 0x7d978b8000000000, 0x5a90558200000000, 0x476de08300000000,
- 0x149ee98700000000, 0x09635c8600000000, 0x2e64828400000000,
- 0x3399378500000000, 0x8882918c00000000, 0x957f248d00000000,
- 0xb278fa8f00000000, 0xaf854f8e00000000, 0xfc76468a00000000,
- 0xe18bf38b00000000, 0xc68c2d8900000000, 0xdb71988800000000,
- 0xb0bb619a00000000, 0xad46d49b00000000, 0x8a410a9900000000,
- 0x97bcbf9800000000, 0xc44fb69c00000000, 0xd9b2039d00000000,
- 0xfeb5dd9f00000000, 0xe348689e00000000, 0x5853ce9700000000,
- 0x45ae7b9600000000, 0x62a9a59400000000, 0x7f54109500000000,
- 0x2ca7199100000000, 0x315aac9000000000, 0x165d729200000000,
- 0x0ba0c79300000000},
- {0x0000000000000000, 0x24d9076300000000, 0x48b20fc600000000,
- 0x6c6b08a500000000, 0xd1626e5700000000, 0xf5bb693400000000,
- 0x99d0619100000000, 0xbd0966f200000000, 0xa2c5dcae00000000,
- 0x861cdbcd00000000, 0xea77d36800000000, 0xceaed40b00000000,
- 0x73a7b2f900000000, 0x577eb59a00000000, 0x3b15bd3f00000000,
- 0x1fccba5c00000000, 0x058dc88600000000, 0x2154cfe500000000,
- 0x4d3fc74000000000, 0x69e6c02300000000, 0xd4efa6d100000000,
- 0xf036a1b200000000, 0x9c5da91700000000, 0xb884ae7400000000,
- 0xa748142800000000, 0x8391134b00000000, 0xeffa1bee00000000,
- 0xcb231c8d00000000, 0x762a7a7f00000000, 0x52f37d1c00000000,
- 0x3e9875b900000000, 0x1a4172da00000000, 0x4b1ce0d600000000,
- 0x6fc5e7b500000000, 0x03aeef1000000000, 0x2777e87300000000,
- 0x9a7e8e8100000000, 0xbea789e200000000, 0xd2cc814700000000,
- 0xf615862400000000, 0xe9d93c7800000000, 0xcd003b1b00000000,
- 0xa16b33be00000000, 0x85b234dd00000000, 0x38bb522f00000000,
- 0x1c62554c00000000, 0x70095de900000000, 0x54d05a8a00000000,
- 0x4e91285000000000, 0x6a482f3300000000, 0x0623279600000000,
- 0x22fa20f500000000, 0x9ff3460700000000, 0xbb2a416400000000,
- 0xd74149c100000000, 0xf3984ea200000000, 0xec54f4fe00000000,
- 0xc88df39d00000000, 0xa4e6fb3800000000, 0x803ffc5b00000000,
- 0x3d369aa900000000, 0x19ef9dca00000000, 0x7584956f00000000,
- 0x515d920c00000000, 0xd73eb17600000000, 0xf3e7b61500000000,
- 0x9f8cbeb000000000, 0xbb55b9d300000000, 0x065cdf2100000000,
- 0x2285d84200000000, 0x4eeed0e700000000, 0x6a37d78400000000,
- 0x75fb6dd800000000, 0x51226abb00000000, 0x3d49621e00000000,
- 0x1990657d00000000, 0xa499038f00000000, 0x804004ec00000000,
- 0xec2b0c4900000000, 0xc8f20b2a00000000, 0xd2b379f000000000,
- 0xf66a7e9300000000, 0x9a01763600000000, 0xbed8715500000000,
- 0x03d117a700000000, 0x270810c400000000, 0x4b63186100000000,
- 0x6fba1f0200000000, 0x7076a55e00000000, 0x54afa23d00000000,
- 0x38c4aa9800000000, 0x1c1dadfb00000000, 0xa114cb0900000000,
- 0x85cdcc6a00000000, 0xe9a6c4cf00000000, 0xcd7fc3ac00000000,
- 0x9c2251a000000000, 0xb8fb56c300000000, 0xd4905e6600000000,
- 0xf049590500000000, 0x4d403ff700000000, 0x6999389400000000,
- 0x05f2303100000000, 0x212b375200000000, 0x3ee78d0e00000000,
- 0x1a3e8a6d00000000, 0x765582c800000000, 0x528c85ab00000000,
- 0xef85e35900000000, 0xcb5ce43a00000000, 0xa737ec9f00000000,
- 0x83eeebfc00000000, 0x99af992600000000, 0xbd769e4500000000,
- 0xd11d96e000000000, 0xf5c4918300000000, 0x48cdf77100000000,
- 0x6c14f01200000000, 0x007ff8b700000000, 0x24a6ffd400000000,
- 0x3b6a458800000000, 0x1fb342eb00000000, 0x73d84a4e00000000,
- 0x57014d2d00000000, 0xea082bdf00000000, 0xced12cbc00000000,
- 0xa2ba241900000000, 0x8663237a00000000, 0xae7d62ed00000000,
- 0x8aa4658e00000000, 0xe6cf6d2b00000000, 0xc2166a4800000000,
- 0x7f1f0cba00000000, 0x5bc60bd900000000, 0x37ad037c00000000,
- 0x1374041f00000000, 0x0cb8be4300000000, 0x2861b92000000000,
- 0x440ab18500000000, 0x60d3b6e600000000, 0xdddad01400000000,
- 0xf903d77700000000, 0x9568dfd200000000, 0xb1b1d8b100000000,
- 0xabf0aa6b00000000, 0x8f29ad0800000000, 0xe342a5ad00000000,
- 0xc79ba2ce00000000, 0x7a92c43c00000000, 0x5e4bc35f00000000,
- 0x3220cbfa00000000, 0x16f9cc9900000000, 0x093576c500000000,
- 0x2dec71a600000000, 0x4187790300000000, 0x655e7e6000000000,
- 0xd857189200000000, 0xfc8e1ff100000000, 0x90e5175400000000,
- 0xb43c103700000000, 0xe561823b00000000, 0xc1b8855800000000,
- 0xadd38dfd00000000, 0x890a8a9e00000000, 0x3403ec6c00000000,
- 0x10daeb0f00000000, 0x7cb1e3aa00000000, 0x5868e4c900000000,
- 0x47a45e9500000000, 0x637d59f600000000, 0x0f16515300000000,
- 0x2bcf563000000000, 0x96c630c200000000, 0xb21f37a100000000,
- 0xde743f0400000000, 0xfaad386700000000, 0xe0ec4abd00000000,
- 0xc4354dde00000000, 0xa85e457b00000000, 0x8c87421800000000,
- 0x318e24ea00000000, 0x1557238900000000, 0x793c2b2c00000000,
- 0x5de52c4f00000000, 0x4229961300000000, 0x66f0917000000000,
- 0x0a9b99d500000000, 0x2e429eb600000000, 0x934bf84400000000,
- 0xb792ff2700000000, 0xdbf9f78200000000, 0xff20f0e100000000,
- 0x7943d39b00000000, 0x5d9ad4f800000000, 0x31f1dc5d00000000,
- 0x1528db3e00000000, 0xa821bdcc00000000, 0x8cf8baaf00000000,
- 0xe093b20a00000000, 0xc44ab56900000000, 0xdb860f3500000000,
- 0xff5f085600000000, 0x933400f300000000, 0xb7ed079000000000,
- 0x0ae4616200000000, 0x2e3d660100000000, 0x42566ea400000000,
- 0x668f69c700000000, 0x7cce1b1d00000000, 0x58171c7e00000000,
- 0x347c14db00000000, 0x10a513b800000000, 0xadac754a00000000,
- 0x8975722900000000, 0xe51e7a8c00000000, 0xc1c77def00000000,
- 0xde0bc7b300000000, 0xfad2c0d000000000, 0x96b9c87500000000,
- 0xb260cf1600000000, 0x0f69a9e400000000, 0x2bb0ae8700000000,
- 0x47dba62200000000, 0x6302a14100000000, 0x325f334d00000000,
- 0x1686342e00000000, 0x7aed3c8b00000000, 0x5e343be800000000,
- 0xe33d5d1a00000000, 0xc7e45a7900000000, 0xab8f52dc00000000,
- 0x8f5655bf00000000, 0x909aefe300000000, 0xb443e88000000000,
- 0xd828e02500000000, 0xfcf1e74600000000, 0x41f881b400000000,
- 0x652186d700000000, 0x094a8e7200000000, 0x2d93891100000000,
- 0x37d2fbcb00000000, 0x130bfca800000000, 0x7f60f40d00000000,
- 0x5bb9f36e00000000, 0xe6b0959c00000000, 0xc26992ff00000000,
- 0xae029a5a00000000, 0x8adb9d3900000000, 0x9517276500000000,
- 0xb1ce200600000000, 0xdda528a300000000, 0xf97c2fc000000000,
- 0x4475493200000000, 0x60ac4e5100000000, 0x0cc746f400000000,
- 0x281e419700000000},
- {0x0000000000000000, 0x08e3603c00000000, 0x10c6c17800000000,
- 0x1825a14400000000, 0x208c83f100000000, 0x286fe3cd00000000,
- 0x304a428900000000, 0x38a922b500000000, 0x011e763800000000,
- 0x09fd160400000000, 0x11d8b74000000000, 0x193bd77c00000000,
- 0x2192f5c900000000, 0x297195f500000000, 0x315434b100000000,
- 0x39b7548d00000000, 0x023cec7000000000, 0x0adf8c4c00000000,
- 0x12fa2d0800000000, 0x1a194d3400000000, 0x22b06f8100000000,
- 0x2a530fbd00000000, 0x3276aef900000000, 0x3a95cec500000000,
- 0x03229a4800000000, 0x0bc1fa7400000000, 0x13e45b3000000000,
- 0x1b073b0c00000000, 0x23ae19b900000000, 0x2b4d798500000000,
- 0x3368d8c100000000, 0x3b8bb8fd00000000, 0x0478d8e100000000,
- 0x0c9bb8dd00000000, 0x14be199900000000, 0x1c5d79a500000000,
- 0x24f45b1000000000, 0x2c173b2c00000000, 0x34329a6800000000,
- 0x3cd1fa5400000000, 0x0566aed900000000, 0x0d85cee500000000,
- 0x15a06fa100000000, 0x1d430f9d00000000, 0x25ea2d2800000000,
- 0x2d094d1400000000, 0x352cec5000000000, 0x3dcf8c6c00000000,
- 0x0644349100000000, 0x0ea754ad00000000, 0x1682f5e900000000,
- 0x1e6195d500000000, 0x26c8b76000000000, 0x2e2bd75c00000000,
- 0x360e761800000000, 0x3eed162400000000, 0x075a42a900000000,
- 0x0fb9229500000000, 0x179c83d100000000, 0x1f7fe3ed00000000,
- 0x27d6c15800000000, 0x2f35a16400000000, 0x3710002000000000,
- 0x3ff3601c00000000, 0x49f6c11800000000, 0x4115a12400000000,
- 0x5930006000000000, 0x51d3605c00000000, 0x697a42e900000000,
- 0x619922d500000000, 0x79bc839100000000, 0x715fe3ad00000000,
- 0x48e8b72000000000, 0x400bd71c00000000, 0x582e765800000000,
- 0x50cd166400000000, 0x686434d100000000, 0x608754ed00000000,
- 0x78a2f5a900000000, 0x7041959500000000, 0x4bca2d6800000000,
- 0x43294d5400000000, 0x5b0cec1000000000, 0x53ef8c2c00000000,
- 0x6b46ae9900000000, 0x63a5cea500000000, 0x7b806fe100000000,
- 0x73630fdd00000000, 0x4ad45b5000000000, 0x42373b6c00000000,
- 0x5a129a2800000000, 0x52f1fa1400000000, 0x6a58d8a100000000,
- 0x62bbb89d00000000, 0x7a9e19d900000000, 0x727d79e500000000,
- 0x4d8e19f900000000, 0x456d79c500000000, 0x5d48d88100000000,
- 0x55abb8bd00000000, 0x6d029a0800000000, 0x65e1fa3400000000,
- 0x7dc45b7000000000, 0x75273b4c00000000, 0x4c906fc100000000,
- 0x44730ffd00000000, 0x5c56aeb900000000, 0x54b5ce8500000000,
- 0x6c1cec3000000000, 0x64ff8c0c00000000, 0x7cda2d4800000000,
- 0x74394d7400000000, 0x4fb2f58900000000, 0x475195b500000000,
- 0x5f7434f100000000, 0x579754cd00000000, 0x6f3e767800000000,
- 0x67dd164400000000, 0x7ff8b70000000000, 0x771bd73c00000000,
- 0x4eac83b100000000, 0x464fe38d00000000, 0x5e6a42c900000000,
- 0x568922f500000000, 0x6e20004000000000, 0x66c3607c00000000,
- 0x7ee6c13800000000, 0x7605a10400000000, 0x92ec833100000000,
- 0x9a0fe30d00000000, 0x822a424900000000, 0x8ac9227500000000,
- 0xb26000c000000000, 0xba8360fc00000000, 0xa2a6c1b800000000,
- 0xaa45a18400000000, 0x93f2f50900000000, 0x9b11953500000000,
- 0x8334347100000000, 0x8bd7544d00000000, 0xb37e76f800000000,
- 0xbb9d16c400000000, 0xa3b8b78000000000, 0xab5bd7bc00000000,
- 0x90d06f4100000000, 0x98330f7d00000000, 0x8016ae3900000000,
- 0x88f5ce0500000000, 0xb05cecb000000000, 0xb8bf8c8c00000000,
- 0xa09a2dc800000000, 0xa8794df400000000, 0x91ce197900000000,
- 0x992d794500000000, 0x8108d80100000000, 0x89ebb83d00000000,
- 0xb1429a8800000000, 0xb9a1fab400000000, 0xa1845bf000000000,
- 0xa9673bcc00000000, 0x96945bd000000000, 0x9e773bec00000000,
- 0x86529aa800000000, 0x8eb1fa9400000000, 0xb618d82100000000,
- 0xbefbb81d00000000, 0xa6de195900000000, 0xae3d796500000000,
- 0x978a2de800000000, 0x9f694dd400000000, 0x874cec9000000000,
- 0x8faf8cac00000000, 0xb706ae1900000000, 0xbfe5ce2500000000,
- 0xa7c06f6100000000, 0xaf230f5d00000000, 0x94a8b7a000000000,
- 0x9c4bd79c00000000, 0x846e76d800000000, 0x8c8d16e400000000,
- 0xb424345100000000, 0xbcc7546d00000000, 0xa4e2f52900000000,
- 0xac01951500000000, 0x95b6c19800000000, 0x9d55a1a400000000,
- 0x857000e000000000, 0x8d9360dc00000000, 0xb53a426900000000,
- 0xbdd9225500000000, 0xa5fc831100000000, 0xad1fe32d00000000,
- 0xdb1a422900000000, 0xd3f9221500000000, 0xcbdc835100000000,
- 0xc33fe36d00000000, 0xfb96c1d800000000, 0xf375a1e400000000,
- 0xeb5000a000000000, 0xe3b3609c00000000, 0xda04341100000000,
- 0xd2e7542d00000000, 0xcac2f56900000000, 0xc221955500000000,
- 0xfa88b7e000000000, 0xf26bd7dc00000000, 0xea4e769800000000,
- 0xe2ad16a400000000, 0xd926ae5900000000, 0xd1c5ce6500000000,
- 0xc9e06f2100000000, 0xc1030f1d00000000, 0xf9aa2da800000000,
- 0xf1494d9400000000, 0xe96cecd000000000, 0xe18f8cec00000000,
- 0xd838d86100000000, 0xd0dbb85d00000000, 0xc8fe191900000000,
- 0xc01d792500000000, 0xf8b45b9000000000, 0xf0573bac00000000,
- 0xe8729ae800000000, 0xe091fad400000000, 0xdf629ac800000000,
- 0xd781faf400000000, 0xcfa45bb000000000, 0xc7473b8c00000000,
- 0xffee193900000000, 0xf70d790500000000, 0xef28d84100000000,
- 0xe7cbb87d00000000, 0xde7cecf000000000, 0xd69f8ccc00000000,
- 0xceba2d8800000000, 0xc6594db400000000, 0xfef06f0100000000,
- 0xf6130f3d00000000, 0xee36ae7900000000, 0xe6d5ce4500000000,
- 0xdd5e76b800000000, 0xd5bd168400000000, 0xcd98b7c000000000,
- 0xc57bd7fc00000000, 0xfdd2f54900000000, 0xf531957500000000,
- 0xed14343100000000, 0xe5f7540d00000000, 0xdc40008000000000,
- 0xd4a360bc00000000, 0xcc86c1f800000000, 0xc465a1c400000000,
- 0xfccc837100000000, 0xf42fe34d00000000, 0xec0a420900000000,
- 0xe4e9223500000000},
- {0x0000000000000000, 0xd1e8e70e00000000, 0xa2d1cf1d00000000,
- 0x7339281300000000, 0x44a39f3b00000000, 0x954b783500000000,
- 0xe672502600000000, 0x379ab72800000000, 0x88463f7700000000,
- 0x59aed87900000000, 0x2a97f06a00000000, 0xfb7f176400000000,
- 0xcce5a04c00000000, 0x1d0d474200000000, 0x6e346f5100000000,
- 0xbfdc885f00000000, 0x108d7eee00000000, 0xc16599e000000000,
- 0xb25cb1f300000000, 0x63b456fd00000000, 0x542ee1d500000000,
- 0x85c606db00000000, 0xf6ff2ec800000000, 0x2717c9c600000000,
- 0x98cb419900000000, 0x4923a69700000000, 0x3a1a8e8400000000,
- 0xebf2698a00000000, 0xdc68dea200000000, 0x0d8039ac00000000,
- 0x7eb911bf00000000, 0xaf51f6b100000000, 0x611c8c0700000000,
- 0xb0f46b0900000000, 0xc3cd431a00000000, 0x1225a41400000000,
- 0x25bf133c00000000, 0xf457f43200000000, 0x876edc2100000000,
- 0x56863b2f00000000, 0xe95ab37000000000, 0x38b2547e00000000,
- 0x4b8b7c6d00000000, 0x9a639b6300000000, 0xadf92c4b00000000,
- 0x7c11cb4500000000, 0x0f28e35600000000, 0xdec0045800000000,
- 0x7191f2e900000000, 0xa07915e700000000, 0xd3403df400000000,
- 0x02a8dafa00000000, 0x35326dd200000000, 0xe4da8adc00000000,
- 0x97e3a2cf00000000, 0x460b45c100000000, 0xf9d7cd9e00000000,
- 0x283f2a9000000000, 0x5b06028300000000, 0x8aeee58d00000000,
- 0xbd7452a500000000, 0x6c9cb5ab00000000, 0x1fa59db800000000,
- 0xce4d7ab600000000, 0xc238180f00000000, 0x13d0ff0100000000,
- 0x60e9d71200000000, 0xb101301c00000000, 0x869b873400000000,
- 0x5773603a00000000, 0x244a482900000000, 0xf5a2af2700000000,
- 0x4a7e277800000000, 0x9b96c07600000000, 0xe8afe86500000000,
- 0x39470f6b00000000, 0x0eddb84300000000, 0xdf355f4d00000000,
- 0xac0c775e00000000, 0x7de4905000000000, 0xd2b566e100000000,
- 0x035d81ef00000000, 0x7064a9fc00000000, 0xa18c4ef200000000,
- 0x9616f9da00000000, 0x47fe1ed400000000, 0x34c736c700000000,
- 0xe52fd1c900000000, 0x5af3599600000000, 0x8b1bbe9800000000,
- 0xf822968b00000000, 0x29ca718500000000, 0x1e50c6ad00000000,
- 0xcfb821a300000000, 0xbc8109b000000000, 0x6d69eebe00000000,
- 0xa324940800000000, 0x72cc730600000000, 0x01f55b1500000000,
- 0xd01dbc1b00000000, 0xe7870b3300000000, 0x366fec3d00000000,
- 0x4556c42e00000000, 0x94be232000000000, 0x2b62ab7f00000000,
- 0xfa8a4c7100000000, 0x89b3646200000000, 0x585b836c00000000,
- 0x6fc1344400000000, 0xbe29d34a00000000, 0xcd10fb5900000000,
- 0x1cf81c5700000000, 0xb3a9eae600000000, 0x62410de800000000,
- 0x117825fb00000000, 0xc090c2f500000000, 0xf70a75dd00000000,
- 0x26e292d300000000, 0x55dbbac000000000, 0x84335dce00000000,
- 0x3befd59100000000, 0xea07329f00000000, 0x993e1a8c00000000,
- 0x48d6fd8200000000, 0x7f4c4aaa00000000, 0xaea4ada400000000,
- 0xdd9d85b700000000, 0x0c7562b900000000, 0x8471301e00000000,
- 0x5599d71000000000, 0x26a0ff0300000000, 0xf748180d00000000,
- 0xc0d2af2500000000, 0x113a482b00000000, 0x6203603800000000,
- 0xb3eb873600000000, 0x0c370f6900000000, 0xdddfe86700000000,
- 0xaee6c07400000000, 0x7f0e277a00000000, 0x4894905200000000,
- 0x997c775c00000000, 0xea455f4f00000000, 0x3badb84100000000,
- 0x94fc4ef000000000, 0x4514a9fe00000000, 0x362d81ed00000000,
- 0xe7c566e300000000, 0xd05fd1cb00000000, 0x01b736c500000000,
- 0x728e1ed600000000, 0xa366f9d800000000, 0x1cba718700000000,
- 0xcd52968900000000, 0xbe6bbe9a00000000, 0x6f83599400000000,
- 0x5819eebc00000000, 0x89f109b200000000, 0xfac821a100000000,
- 0x2b20c6af00000000, 0xe56dbc1900000000, 0x34855b1700000000,
- 0x47bc730400000000, 0x9654940a00000000, 0xa1ce232200000000,
- 0x7026c42c00000000, 0x031fec3f00000000, 0xd2f70b3100000000,
- 0x6d2b836e00000000, 0xbcc3646000000000, 0xcffa4c7300000000,
- 0x1e12ab7d00000000, 0x29881c5500000000, 0xf860fb5b00000000,
- 0x8b59d34800000000, 0x5ab1344600000000, 0xf5e0c2f700000000,
- 0x240825f900000000, 0x57310dea00000000, 0x86d9eae400000000,
- 0xb1435dcc00000000, 0x60abbac200000000, 0x139292d100000000,
- 0xc27a75df00000000, 0x7da6fd8000000000, 0xac4e1a8e00000000,
- 0xdf77329d00000000, 0x0e9fd59300000000, 0x390562bb00000000,
- 0xe8ed85b500000000, 0x9bd4ada600000000, 0x4a3c4aa800000000,
- 0x4649281100000000, 0x97a1cf1f00000000, 0xe498e70c00000000,
- 0x3570000200000000, 0x02eab72a00000000, 0xd302502400000000,
- 0xa03b783700000000, 0x71d39f3900000000, 0xce0f176600000000,
- 0x1fe7f06800000000, 0x6cded87b00000000, 0xbd363f7500000000,
- 0x8aac885d00000000, 0x5b446f5300000000, 0x287d474000000000,
- 0xf995a04e00000000, 0x56c456ff00000000, 0x872cb1f100000000,
- 0xf41599e200000000, 0x25fd7eec00000000, 0x1267c9c400000000,
- 0xc38f2eca00000000, 0xb0b606d900000000, 0x615ee1d700000000,
- 0xde82698800000000, 0x0f6a8e8600000000, 0x7c53a69500000000,
- 0xadbb419b00000000, 0x9a21f6b300000000, 0x4bc911bd00000000,
- 0x38f039ae00000000, 0xe918dea000000000, 0x2755a41600000000,
- 0xf6bd431800000000, 0x85846b0b00000000, 0x546c8c0500000000,
- 0x63f63b2d00000000, 0xb21edc2300000000, 0xc127f43000000000,
- 0x10cf133e00000000, 0xaf139b6100000000, 0x7efb7c6f00000000,
- 0x0dc2547c00000000, 0xdc2ab37200000000, 0xebb0045a00000000,
- 0x3a58e35400000000, 0x4961cb4700000000, 0x98892c4900000000,
- 0x37d8daf800000000, 0xe6303df600000000, 0x950915e500000000,
- 0x44e1f2eb00000000, 0x737b45c300000000, 0xa293a2cd00000000,
- 0xd1aa8ade00000000, 0x00426dd000000000, 0xbf9ee58f00000000,
- 0x6e76028100000000, 0x1d4f2a9200000000, 0xcca7cd9c00000000,
- 0xfb3d7ab400000000, 0x2ad59dba00000000, 0x59ecb5a900000000,
- 0x880452a700000000},
- {0x0000000000000000, 0xaa05daf100000000, 0x150dc53800000000,
- 0xbf081fc900000000, 0x2a1a8a7100000000, 0x801f508000000000,
- 0x3f174f4900000000, 0x951295b800000000, 0x543414e300000000,
- 0xfe31ce1200000000, 0x4139d1db00000000, 0xeb3c0b2a00000000,
- 0x7e2e9e9200000000, 0xd42b446300000000, 0x6b235baa00000000,
- 0xc126815b00000000, 0xe96e591d00000000, 0x436b83ec00000000,
- 0xfc639c2500000000, 0x566646d400000000, 0xc374d36c00000000,
- 0x6971099d00000000, 0xd679165400000000, 0x7c7ccca500000000,
- 0xbd5a4dfe00000000, 0x175f970f00000000, 0xa85788c600000000,
- 0x0252523700000000, 0x9740c78f00000000, 0x3d451d7e00000000,
- 0x824d02b700000000, 0x2848d84600000000, 0xd2ddb23a00000000,
- 0x78d868cb00000000, 0xc7d0770200000000, 0x6dd5adf300000000,
- 0xf8c7384b00000000, 0x52c2e2ba00000000, 0xedcafd7300000000,
- 0x47cf278200000000, 0x86e9a6d900000000, 0x2cec7c2800000000,
- 0x93e463e100000000, 0x39e1b91000000000, 0xacf32ca800000000,
- 0x06f6f65900000000, 0xb9fee99000000000, 0x13fb336100000000,
- 0x3bb3eb2700000000, 0x91b631d600000000, 0x2ebe2e1f00000000,
- 0x84bbf4ee00000000, 0x11a9615600000000, 0xbbacbba700000000,
- 0x04a4a46e00000000, 0xaea17e9f00000000, 0x6f87ffc400000000,
- 0xc582253500000000, 0x7a8a3afc00000000, 0xd08fe00d00000000,
- 0x459d75b500000000, 0xef98af4400000000, 0x5090b08d00000000,
- 0xfa956a7c00000000, 0xa4bb657500000000, 0x0ebebf8400000000,
- 0xb1b6a04d00000000, 0x1bb37abc00000000, 0x8ea1ef0400000000,
- 0x24a435f500000000, 0x9bac2a3c00000000, 0x31a9f0cd00000000,
- 0xf08f719600000000, 0x5a8aab6700000000, 0xe582b4ae00000000,
- 0x4f876e5f00000000, 0xda95fbe700000000, 0x7090211600000000,
- 0xcf983edf00000000, 0x659de42e00000000, 0x4dd53c6800000000,
- 0xe7d0e69900000000, 0x58d8f95000000000, 0xf2dd23a100000000,
- 0x67cfb61900000000, 0xcdca6ce800000000, 0x72c2732100000000,
- 0xd8c7a9d000000000, 0x19e1288b00000000, 0xb3e4f27a00000000,
- 0x0cecedb300000000, 0xa6e9374200000000, 0x33fba2fa00000000,
- 0x99fe780b00000000, 0x26f667c200000000, 0x8cf3bd3300000000,
- 0x7666d74f00000000, 0xdc630dbe00000000, 0x636b127700000000,
- 0xc96ec88600000000, 0x5c7c5d3e00000000, 0xf67987cf00000000,
- 0x4971980600000000, 0xe37442f700000000, 0x2252c3ac00000000,
- 0x8857195d00000000, 0x375f069400000000, 0x9d5adc6500000000,
- 0x084849dd00000000, 0xa24d932c00000000, 0x1d458ce500000000,
- 0xb740561400000000, 0x9f088e5200000000, 0x350d54a300000000,
- 0x8a054b6a00000000, 0x2000919b00000000, 0xb512042300000000,
- 0x1f17ded200000000, 0xa01fc11b00000000, 0x0a1a1bea00000000,
- 0xcb3c9ab100000000, 0x6139404000000000, 0xde315f8900000000,
- 0x7434857800000000, 0xe12610c000000000, 0x4b23ca3100000000,
- 0xf42bd5f800000000, 0x5e2e0f0900000000, 0x4877cbea00000000,
- 0xe272111b00000000, 0x5d7a0ed200000000, 0xf77fd42300000000,
- 0x626d419b00000000, 0xc8689b6a00000000, 0x776084a300000000,
- 0xdd655e5200000000, 0x1c43df0900000000, 0xb64605f800000000,
- 0x094e1a3100000000, 0xa34bc0c000000000, 0x3659557800000000,
- 0x9c5c8f8900000000, 0x2354904000000000, 0x89514ab100000000,
- 0xa11992f700000000, 0x0b1c480600000000, 0xb41457cf00000000,
- 0x1e118d3e00000000, 0x8b03188600000000, 0x2106c27700000000,
- 0x9e0eddbe00000000, 0x340b074f00000000, 0xf52d861400000000,
- 0x5f285ce500000000, 0xe020432c00000000, 0x4a2599dd00000000,
- 0xdf370c6500000000, 0x7532d69400000000, 0xca3ac95d00000000,
- 0x603f13ac00000000, 0x9aaa79d000000000, 0x30afa32100000000,
- 0x8fa7bce800000000, 0x25a2661900000000, 0xb0b0f3a100000000,
- 0x1ab5295000000000, 0xa5bd369900000000, 0x0fb8ec6800000000,
- 0xce9e6d3300000000, 0x649bb7c200000000, 0xdb93a80b00000000,
- 0x719672fa00000000, 0xe484e74200000000, 0x4e813db300000000,
- 0xf189227a00000000, 0x5b8cf88b00000000, 0x73c420cd00000000,
- 0xd9c1fa3c00000000, 0x66c9e5f500000000, 0xcccc3f0400000000,
- 0x59deaabc00000000, 0xf3db704d00000000, 0x4cd36f8400000000,
- 0xe6d6b57500000000, 0x27f0342e00000000, 0x8df5eedf00000000,
- 0x32fdf11600000000, 0x98f82be700000000, 0x0deabe5f00000000,
- 0xa7ef64ae00000000, 0x18e77b6700000000, 0xb2e2a19600000000,
- 0xecccae9f00000000, 0x46c9746e00000000, 0xf9c16ba700000000,
- 0x53c4b15600000000, 0xc6d624ee00000000, 0x6cd3fe1f00000000,
- 0xd3dbe1d600000000, 0x79de3b2700000000, 0xb8f8ba7c00000000,
- 0x12fd608d00000000, 0xadf57f4400000000, 0x07f0a5b500000000,
- 0x92e2300d00000000, 0x38e7eafc00000000, 0x87eff53500000000,
- 0x2dea2fc400000000, 0x05a2f78200000000, 0xafa72d7300000000,
- 0x10af32ba00000000, 0xbaaae84b00000000, 0x2fb87df300000000,
- 0x85bda70200000000, 0x3ab5b8cb00000000, 0x90b0623a00000000,
- 0x5196e36100000000, 0xfb93399000000000, 0x449b265900000000,
- 0xee9efca800000000, 0x7b8c691000000000, 0xd189b3e100000000,
- 0x6e81ac2800000000, 0xc48476d900000000, 0x3e111ca500000000,
- 0x9414c65400000000, 0x2b1cd99d00000000, 0x8119036c00000000,
- 0x140b96d400000000, 0xbe0e4c2500000000, 0x010653ec00000000,
- 0xab03891d00000000, 0x6a25084600000000, 0xc020d2b700000000,
- 0x7f28cd7e00000000, 0xd52d178f00000000, 0x403f823700000000,
- 0xea3a58c600000000, 0x5532470f00000000, 0xff379dfe00000000,
- 0xd77f45b800000000, 0x7d7a9f4900000000, 0xc272808000000000,
- 0x68775a7100000000, 0xfd65cfc900000000, 0x5760153800000000,
- 0xe8680af100000000, 0x426dd00000000000, 0x834b515b00000000,
- 0x294e8baa00000000, 0x9646946300000000, 0x3c434e9200000000,
- 0xa951db2a00000000, 0x035401db00000000, 0xbc5c1e1200000000,
- 0x1659c4e300000000}};
-
-#else /* W == 4 */
-
-local const z_crc_t FAR crc_braid_table[][256] = {
- {0x00000000, 0xae689191, 0x87a02563, 0x29c8b4f2, 0xd4314c87,
- 0x7a59dd16, 0x539169e4, 0xfdf9f875, 0x73139f4f, 0xdd7b0ede,
- 0xf4b3ba2c, 0x5adb2bbd, 0xa722d3c8, 0x094a4259, 0x2082f6ab,
- 0x8eea673a, 0xe6273e9e, 0x484faf0f, 0x61871bfd, 0xcfef8a6c,
- 0x32167219, 0x9c7ee388, 0xb5b6577a, 0x1bdec6eb, 0x9534a1d1,
- 0x3b5c3040, 0x129484b2, 0xbcfc1523, 0x4105ed56, 0xef6d7cc7,
- 0xc6a5c835, 0x68cd59a4, 0x173f7b7d, 0xb957eaec, 0x909f5e1e,
- 0x3ef7cf8f, 0xc30e37fa, 0x6d66a66b, 0x44ae1299, 0xeac68308,
- 0x642ce432, 0xca4475a3, 0xe38cc151, 0x4de450c0, 0xb01da8b5,
- 0x1e753924, 0x37bd8dd6, 0x99d51c47, 0xf11845e3, 0x5f70d472,
- 0x76b86080, 0xd8d0f111, 0x25290964, 0x8b4198f5, 0xa2892c07,
- 0x0ce1bd96, 0x820bdaac, 0x2c634b3d, 0x05abffcf, 0xabc36e5e,
- 0x563a962b, 0xf85207ba, 0xd19ab348, 0x7ff222d9, 0x2e7ef6fa,
- 0x8016676b, 0xa9ded399, 0x07b64208, 0xfa4fba7d, 0x54272bec,
- 0x7def9f1e, 0xd3870e8f, 0x5d6d69b5, 0xf305f824, 0xdacd4cd6,
- 0x74a5dd47, 0x895c2532, 0x2734b4a3, 0x0efc0051, 0xa09491c0,
- 0xc859c864, 0x663159f5, 0x4ff9ed07, 0xe1917c96, 0x1c6884e3,
- 0xb2001572, 0x9bc8a180, 0x35a03011, 0xbb4a572b, 0x1522c6ba,
- 0x3cea7248, 0x9282e3d9, 0x6f7b1bac, 0xc1138a3d, 0xe8db3ecf,
- 0x46b3af5e, 0x39418d87, 0x97291c16, 0xbee1a8e4, 0x10893975,
- 0xed70c100, 0x43185091, 0x6ad0e463, 0xc4b875f2, 0x4a5212c8,
- 0xe43a8359, 0xcdf237ab, 0x639aa63a, 0x9e635e4f, 0x300bcfde,
- 0x19c37b2c, 0xb7abeabd, 0xdf66b319, 0x710e2288, 0x58c6967a,
- 0xf6ae07eb, 0x0b57ff9e, 0xa53f6e0f, 0x8cf7dafd, 0x229f4b6c,
- 0xac752c56, 0x021dbdc7, 0x2bd50935, 0x85bd98a4, 0x784460d1,
- 0xd62cf140, 0xffe445b2, 0x518cd423, 0x5cfdedf4, 0xf2957c65,
- 0xdb5dc897, 0x75355906, 0x88cca173, 0x26a430e2, 0x0f6c8410,
- 0xa1041581, 0x2fee72bb, 0x8186e32a, 0xa84e57d8, 0x0626c649,
- 0xfbdf3e3c, 0x55b7afad, 0x7c7f1b5f, 0xd2178ace, 0xbadad36a,
- 0x14b242fb, 0x3d7af609, 0x93126798, 0x6eeb9fed, 0xc0830e7c,
- 0xe94bba8e, 0x47232b1f, 0xc9c94c25, 0x67a1ddb4, 0x4e696946,
- 0xe001f8d7, 0x1df800a2, 0xb3909133, 0x9a5825c1, 0x3430b450,
- 0x4bc29689, 0xe5aa0718, 0xcc62b3ea, 0x620a227b, 0x9ff3da0e,
- 0x319b4b9f, 0x1853ff6d, 0xb63b6efc, 0x38d109c6, 0x96b99857,
- 0xbf712ca5, 0x1119bd34, 0xece04541, 0x4288d4d0, 0x6b406022,
- 0xc528f1b3, 0xade5a817, 0x038d3986, 0x2a458d74, 0x842d1ce5,
- 0x79d4e490, 0xd7bc7501, 0xfe74c1f3, 0x501c5062, 0xdef63758,
- 0x709ea6c9, 0x5956123b, 0xf73e83aa, 0x0ac77bdf, 0xa4afea4e,
- 0x8d675ebc, 0x230fcf2d, 0x72831b0e, 0xdceb8a9f, 0xf5233e6d,
- 0x5b4baffc, 0xa6b25789, 0x08dac618, 0x211272ea, 0x8f7ae37b,
- 0x01908441, 0xaff815d0, 0x8630a122, 0x285830b3, 0xd5a1c8c6,
- 0x7bc95957, 0x5201eda5, 0xfc697c34, 0x94a42590, 0x3accb401,
- 0x130400f3, 0xbd6c9162, 0x40956917, 0xeefdf886, 0xc7354c74,
- 0x695ddde5, 0xe7b7badf, 0x49df2b4e, 0x60179fbc, 0xce7f0e2d,
- 0x3386f658, 0x9dee67c9, 0xb426d33b, 0x1a4e42aa, 0x65bc6073,
- 0xcbd4f1e2, 0xe21c4510, 0x4c74d481, 0xb18d2cf4, 0x1fe5bd65,
- 0x362d0997, 0x98459806, 0x16afff3c, 0xb8c76ead, 0x910fda5f,
- 0x3f674bce, 0xc29eb3bb, 0x6cf6222a, 0x453e96d8, 0xeb560749,
- 0x839b5eed, 0x2df3cf7c, 0x043b7b8e, 0xaa53ea1f, 0x57aa126a,
- 0xf9c283fb, 0xd00a3709, 0x7e62a698, 0xf088c1a2, 0x5ee05033,
- 0x7728e4c1, 0xd9407550, 0x24b98d25, 0x8ad11cb4, 0xa319a846,
- 0x0d7139d7},
- {0x00000000, 0xb9fbdbe8, 0xa886b191, 0x117d6a79, 0x8a7c6563,
- 0x3387be8b, 0x22fad4f2, 0x9b010f1a, 0xcf89cc87, 0x7672176f,
- 0x670f7d16, 0xdef4a6fe, 0x45f5a9e4, 0xfc0e720c, 0xed731875,
- 0x5488c39d, 0x44629f4f, 0xfd9944a7, 0xece42ede, 0x551ff536,
- 0xce1efa2c, 0x77e521c4, 0x66984bbd, 0xdf639055, 0x8beb53c8,
- 0x32108820, 0x236de259, 0x9a9639b1, 0x019736ab, 0xb86ced43,
- 0xa911873a, 0x10ea5cd2, 0x88c53e9e, 0x313ee576, 0x20438f0f,
- 0x99b854e7, 0x02b95bfd, 0xbb428015, 0xaa3fea6c, 0x13c43184,
- 0x474cf219, 0xfeb729f1, 0xefca4388, 0x56319860, 0xcd30977a,
- 0x74cb4c92, 0x65b626eb, 0xdc4dfd03, 0xcca7a1d1, 0x755c7a39,
- 0x64211040, 0xdddacba8, 0x46dbc4b2, 0xff201f5a, 0xee5d7523,
- 0x57a6aecb, 0x032e6d56, 0xbad5b6be, 0xaba8dcc7, 0x1253072f,
- 0x89520835, 0x30a9d3dd, 0x21d4b9a4, 0x982f624c, 0xcafb7b7d,
- 0x7300a095, 0x627dcaec, 0xdb861104, 0x40871e1e, 0xf97cc5f6,
- 0xe801af8f, 0x51fa7467, 0x0572b7fa, 0xbc896c12, 0xadf4066b,
- 0x140fdd83, 0x8f0ed299, 0x36f50971, 0x27886308, 0x9e73b8e0,
- 0x8e99e432, 0x37623fda, 0x261f55a3, 0x9fe48e4b, 0x04e58151,
- 0xbd1e5ab9, 0xac6330c0, 0x1598eb28, 0x411028b5, 0xf8ebf35d,
- 0xe9969924, 0x506d42cc, 0xcb6c4dd6, 0x7297963e, 0x63eafc47,
- 0xda1127af, 0x423e45e3, 0xfbc59e0b, 0xeab8f472, 0x53432f9a,
- 0xc8422080, 0x71b9fb68, 0x60c49111, 0xd93f4af9, 0x8db78964,
- 0x344c528c, 0x253138f5, 0x9ccae31d, 0x07cbec07, 0xbe3037ef,
- 0xaf4d5d96, 0x16b6867e, 0x065cdaac, 0xbfa70144, 0xaeda6b3d,
- 0x1721b0d5, 0x8c20bfcf, 0x35db6427, 0x24a60e5e, 0x9d5dd5b6,
- 0xc9d5162b, 0x702ecdc3, 0x6153a7ba, 0xd8a87c52, 0x43a97348,
- 0xfa52a8a0, 0xeb2fc2d9, 0x52d41931, 0x4e87f0bb, 0xf77c2b53,
- 0xe601412a, 0x5ffa9ac2, 0xc4fb95d8, 0x7d004e30, 0x6c7d2449,
- 0xd586ffa1, 0x810e3c3c, 0x38f5e7d4, 0x29888dad, 0x90735645,
- 0x0b72595f, 0xb28982b7, 0xa3f4e8ce, 0x1a0f3326, 0x0ae56ff4,
- 0xb31eb41c, 0xa263de65, 0x1b98058d, 0x80990a97, 0x3962d17f,
- 0x281fbb06, 0x91e460ee, 0xc56ca373, 0x7c97789b, 0x6dea12e2,
- 0xd411c90a, 0x4f10c610, 0xf6eb1df8, 0xe7967781, 0x5e6dac69,
- 0xc642ce25, 0x7fb915cd, 0x6ec47fb4, 0xd73fa45c, 0x4c3eab46,
- 0xf5c570ae, 0xe4b81ad7, 0x5d43c13f, 0x09cb02a2, 0xb030d94a,
- 0xa14db333, 0x18b668db, 0x83b767c1, 0x3a4cbc29, 0x2b31d650,
- 0x92ca0db8, 0x8220516a, 0x3bdb8a82, 0x2aa6e0fb, 0x935d3b13,
- 0x085c3409, 0xb1a7efe1, 0xa0da8598, 0x19215e70, 0x4da99ded,
- 0xf4524605, 0xe52f2c7c, 0x5cd4f794, 0xc7d5f88e, 0x7e2e2366,
- 0x6f53491f, 0xd6a892f7, 0x847c8bc6, 0x3d87502e, 0x2cfa3a57,
- 0x9501e1bf, 0x0e00eea5, 0xb7fb354d, 0xa6865f34, 0x1f7d84dc,
- 0x4bf54741, 0xf20e9ca9, 0xe373f6d0, 0x5a882d38, 0xc1892222,
- 0x7872f9ca, 0x690f93b3, 0xd0f4485b, 0xc01e1489, 0x79e5cf61,
- 0x6898a518, 0xd1637ef0, 0x4a6271ea, 0xf399aa02, 0xe2e4c07b,
- 0x5b1f1b93, 0x0f97d80e, 0xb66c03e6, 0xa711699f, 0x1eeab277,
- 0x85ebbd6d, 0x3c106685, 0x2d6d0cfc, 0x9496d714, 0x0cb9b558,
- 0xb5426eb0, 0xa43f04c9, 0x1dc4df21, 0x86c5d03b, 0x3f3e0bd3,
- 0x2e4361aa, 0x97b8ba42, 0xc33079df, 0x7acba237, 0x6bb6c84e,
- 0xd24d13a6, 0x494c1cbc, 0xf0b7c754, 0xe1caad2d, 0x583176c5,
- 0x48db2a17, 0xf120f1ff, 0xe05d9b86, 0x59a6406e, 0xc2a74f74,
- 0x7b5c949c, 0x6a21fee5, 0xd3da250d, 0x8752e690, 0x3ea93d78,
- 0x2fd45701, 0x962f8ce9, 0x0d2e83f3, 0xb4d5581b, 0xa5a83262,
- 0x1c53e98a},
- {0x00000000, 0x9d0fe176, 0xe16ec4ad, 0x7c6125db, 0x19ac8f1b,
- 0x84a36e6d, 0xf8c24bb6, 0x65cdaac0, 0x33591e36, 0xae56ff40,
- 0xd237da9b, 0x4f383bed, 0x2af5912d, 0xb7fa705b, 0xcb9b5580,
- 0x5694b4f6, 0x66b23c6c, 0xfbbddd1a, 0x87dcf8c1, 0x1ad319b7,
- 0x7f1eb377, 0xe2115201, 0x9e7077da, 0x037f96ac, 0x55eb225a,
- 0xc8e4c32c, 0xb485e6f7, 0x298a0781, 0x4c47ad41, 0xd1484c37,
- 0xad2969ec, 0x3026889a, 0xcd6478d8, 0x506b99ae, 0x2c0abc75,
- 0xb1055d03, 0xd4c8f7c3, 0x49c716b5, 0x35a6336e, 0xa8a9d218,
- 0xfe3d66ee, 0x63328798, 0x1f53a243, 0x825c4335, 0xe791e9f5,
- 0x7a9e0883, 0x06ff2d58, 0x9bf0cc2e, 0xabd644b4, 0x36d9a5c2,
- 0x4ab88019, 0xd7b7616f, 0xb27acbaf, 0x2f752ad9, 0x53140f02,
- 0xce1bee74, 0x988f5a82, 0x0580bbf4, 0x79e19e2f, 0xe4ee7f59,
- 0x8123d599, 0x1c2c34ef, 0x604d1134, 0xfd42f042, 0x41b9f7f1,
- 0xdcb61687, 0xa0d7335c, 0x3dd8d22a, 0x581578ea, 0xc51a999c,
- 0xb97bbc47, 0x24745d31, 0x72e0e9c7, 0xefef08b1, 0x938e2d6a,
- 0x0e81cc1c, 0x6b4c66dc, 0xf64387aa, 0x8a22a271, 0x172d4307,
- 0x270bcb9d, 0xba042aeb, 0xc6650f30, 0x5b6aee46, 0x3ea74486,
- 0xa3a8a5f0, 0xdfc9802b, 0x42c6615d, 0x1452d5ab, 0x895d34dd,
- 0xf53c1106, 0x6833f070, 0x0dfe5ab0, 0x90f1bbc6, 0xec909e1d,
- 0x719f7f6b, 0x8cdd8f29, 0x11d26e5f, 0x6db34b84, 0xf0bcaaf2,
- 0x95710032, 0x087ee144, 0x741fc49f, 0xe91025e9, 0xbf84911f,
- 0x228b7069, 0x5eea55b2, 0xc3e5b4c4, 0xa6281e04, 0x3b27ff72,
- 0x4746daa9, 0xda493bdf, 0xea6fb345, 0x77605233, 0x0b0177e8,
- 0x960e969e, 0xf3c33c5e, 0x6eccdd28, 0x12adf8f3, 0x8fa21985,
- 0xd936ad73, 0x44394c05, 0x385869de, 0xa55788a8, 0xc09a2268,
- 0x5d95c31e, 0x21f4e6c5, 0xbcfb07b3, 0x8373efe2, 0x1e7c0e94,
- 0x621d2b4f, 0xff12ca39, 0x9adf60f9, 0x07d0818f, 0x7bb1a454,
- 0xe6be4522, 0xb02af1d4, 0x2d2510a2, 0x51443579, 0xcc4bd40f,
- 0xa9867ecf, 0x34899fb9, 0x48e8ba62, 0xd5e75b14, 0xe5c1d38e,
- 0x78ce32f8, 0x04af1723, 0x99a0f655, 0xfc6d5c95, 0x6162bde3,
- 0x1d039838, 0x800c794e, 0xd698cdb8, 0x4b972cce, 0x37f60915,
- 0xaaf9e863, 0xcf3442a3, 0x523ba3d5, 0x2e5a860e, 0xb3556778,
- 0x4e17973a, 0xd318764c, 0xaf795397, 0x3276b2e1, 0x57bb1821,
- 0xcab4f957, 0xb6d5dc8c, 0x2bda3dfa, 0x7d4e890c, 0xe041687a,
- 0x9c204da1, 0x012facd7, 0x64e20617, 0xf9ede761, 0x858cc2ba,
- 0x188323cc, 0x28a5ab56, 0xb5aa4a20, 0xc9cb6ffb, 0x54c48e8d,
- 0x3109244d, 0xac06c53b, 0xd067e0e0, 0x4d680196, 0x1bfcb560,
- 0x86f35416, 0xfa9271cd, 0x679d90bb, 0x02503a7b, 0x9f5fdb0d,
- 0xe33efed6, 0x7e311fa0, 0xc2ca1813, 0x5fc5f965, 0x23a4dcbe,
- 0xbeab3dc8, 0xdb669708, 0x4669767e, 0x3a0853a5, 0xa707b2d3,
- 0xf1930625, 0x6c9ce753, 0x10fdc288, 0x8df223fe, 0xe83f893e,
- 0x75306848, 0x09514d93, 0x945eace5, 0xa478247f, 0x3977c509,
- 0x4516e0d2, 0xd81901a4, 0xbdd4ab64, 0x20db4a12, 0x5cba6fc9,
- 0xc1b58ebf, 0x97213a49, 0x0a2edb3f, 0x764ffee4, 0xeb401f92,
- 0x8e8db552, 0x13825424, 0x6fe371ff, 0xf2ec9089, 0x0fae60cb,
- 0x92a181bd, 0xeec0a466, 0x73cf4510, 0x1602efd0, 0x8b0d0ea6,
- 0xf76c2b7d, 0x6a63ca0b, 0x3cf77efd, 0xa1f89f8b, 0xdd99ba50,
- 0x40965b26, 0x255bf1e6, 0xb8541090, 0xc435354b, 0x593ad43d,
- 0x691c5ca7, 0xf413bdd1, 0x8872980a, 0x157d797c, 0x70b0d3bc,
- 0xedbf32ca, 0x91de1711, 0x0cd1f667, 0x5a454291, 0xc74aa3e7,
- 0xbb2b863c, 0x2624674a, 0x43e9cd8a, 0xdee62cfc, 0xa2870927,
- 0x3f88e851},
- {0x00000000, 0xdd96d985, 0x605cb54b, 0xbdca6cce, 0xc0b96a96,
- 0x1d2fb313, 0xa0e5dfdd, 0x7d730658, 0x5a03d36d, 0x87950ae8,
- 0x3a5f6626, 0xe7c9bfa3, 0x9abab9fb, 0x472c607e, 0xfae60cb0,
- 0x2770d535, 0xb407a6da, 0x69917f5f, 0xd45b1391, 0x09cdca14,
- 0x74becc4c, 0xa92815c9, 0x14e27907, 0xc974a082, 0xee0475b7,
- 0x3392ac32, 0x8e58c0fc, 0x53ce1979, 0x2ebd1f21, 0xf32bc6a4,
- 0x4ee1aa6a, 0x937773ef, 0xb37e4bf5, 0x6ee89270, 0xd322febe,
- 0x0eb4273b, 0x73c72163, 0xae51f8e6, 0x139b9428, 0xce0d4dad,
- 0xe97d9898, 0x34eb411d, 0x89212dd3, 0x54b7f456, 0x29c4f20e,
- 0xf4522b8b, 0x49984745, 0x940e9ec0, 0x0779ed2f, 0xdaef34aa,
- 0x67255864, 0xbab381e1, 0xc7c087b9, 0x1a565e3c, 0xa79c32f2,
- 0x7a0aeb77, 0x5d7a3e42, 0x80ece7c7, 0x3d268b09, 0xe0b0528c,
- 0x9dc354d4, 0x40558d51, 0xfd9fe19f, 0x2009381a, 0xbd8d91ab,
- 0x601b482e, 0xddd124e0, 0x0047fd65, 0x7d34fb3d, 0xa0a222b8,
- 0x1d684e76, 0xc0fe97f3, 0xe78e42c6, 0x3a189b43, 0x87d2f78d,
- 0x5a442e08, 0x27372850, 0xfaa1f1d5, 0x476b9d1b, 0x9afd449e,
- 0x098a3771, 0xd41ceef4, 0x69d6823a, 0xb4405bbf, 0xc9335de7,
- 0x14a58462, 0xa96fe8ac, 0x74f93129, 0x5389e41c, 0x8e1f3d99,
- 0x33d55157, 0xee4388d2, 0x93308e8a, 0x4ea6570f, 0xf36c3bc1,
- 0x2efae244, 0x0ef3da5e, 0xd36503db, 0x6eaf6f15, 0xb339b690,
- 0xce4ab0c8, 0x13dc694d, 0xae160583, 0x7380dc06, 0x54f00933,
- 0x8966d0b6, 0x34acbc78, 0xe93a65fd, 0x944963a5, 0x49dfba20,
- 0xf415d6ee, 0x29830f6b, 0xbaf47c84, 0x6762a501, 0xdaa8c9cf,
- 0x073e104a, 0x7a4d1612, 0xa7dbcf97, 0x1a11a359, 0xc7877adc,
- 0xe0f7afe9, 0x3d61766c, 0x80ab1aa2, 0x5d3dc327, 0x204ec57f,
- 0xfdd81cfa, 0x40127034, 0x9d84a9b1, 0xa06a2517, 0x7dfcfc92,
- 0xc036905c, 0x1da049d9, 0x60d34f81, 0xbd459604, 0x008ffaca,
- 0xdd19234f, 0xfa69f67a, 0x27ff2fff, 0x9a354331, 0x47a39ab4,
- 0x3ad09cec, 0xe7464569, 0x5a8c29a7, 0x871af022, 0x146d83cd,
- 0xc9fb5a48, 0x74313686, 0xa9a7ef03, 0xd4d4e95b, 0x094230de,
- 0xb4885c10, 0x691e8595, 0x4e6e50a0, 0x93f88925, 0x2e32e5eb,
- 0xf3a43c6e, 0x8ed73a36, 0x5341e3b3, 0xee8b8f7d, 0x331d56f8,
- 0x13146ee2, 0xce82b767, 0x7348dba9, 0xaede022c, 0xd3ad0474,
- 0x0e3bddf1, 0xb3f1b13f, 0x6e6768ba, 0x4917bd8f, 0x9481640a,
- 0x294b08c4, 0xf4ddd141, 0x89aed719, 0x54380e9c, 0xe9f26252,
- 0x3464bbd7, 0xa713c838, 0x7a8511bd, 0xc74f7d73, 0x1ad9a4f6,
- 0x67aaa2ae, 0xba3c7b2b, 0x07f617e5, 0xda60ce60, 0xfd101b55,
- 0x2086c2d0, 0x9d4cae1e, 0x40da779b, 0x3da971c3, 0xe03fa846,
- 0x5df5c488, 0x80631d0d, 0x1de7b4bc, 0xc0716d39, 0x7dbb01f7,
- 0xa02dd872, 0xdd5ede2a, 0x00c807af, 0xbd026b61, 0x6094b2e4,
- 0x47e467d1, 0x9a72be54, 0x27b8d29a, 0xfa2e0b1f, 0x875d0d47,
- 0x5acbd4c2, 0xe701b80c, 0x3a976189, 0xa9e01266, 0x7476cbe3,
- 0xc9bca72d, 0x142a7ea8, 0x695978f0, 0xb4cfa175, 0x0905cdbb,
- 0xd493143e, 0xf3e3c10b, 0x2e75188e, 0x93bf7440, 0x4e29adc5,
- 0x335aab9d, 0xeecc7218, 0x53061ed6, 0x8e90c753, 0xae99ff49,
- 0x730f26cc, 0xcec54a02, 0x13539387, 0x6e2095df, 0xb3b64c5a,
- 0x0e7c2094, 0xd3eaf911, 0xf49a2c24, 0x290cf5a1, 0x94c6996f,
- 0x495040ea, 0x342346b2, 0xe9b59f37, 0x547ff3f9, 0x89e92a7c,
- 0x1a9e5993, 0xc7088016, 0x7ac2ecd8, 0xa754355d, 0xda273305,
- 0x07b1ea80, 0xba7b864e, 0x67ed5fcb, 0x409d8afe, 0x9d0b537b,
- 0x20c13fb5, 0xfd57e630, 0x8024e068, 0x5db239ed, 0xe0785523,
- 0x3dee8ca6}};
-
-local const z_word_t FAR crc_braid_big_table[][256] = {
- {0x00000000, 0x85d996dd, 0x4bb55c60, 0xce6ccabd, 0x966ab9c0,
- 0x13b32f1d, 0xdddfe5a0, 0x5806737d, 0x6dd3035a, 0xe80a9587,
- 0x26665f3a, 0xa3bfc9e7, 0xfbb9ba9a, 0x7e602c47, 0xb00ce6fa,
- 0x35d57027, 0xdaa607b4, 0x5f7f9169, 0x91135bd4, 0x14cacd09,
- 0x4cccbe74, 0xc91528a9, 0x0779e214, 0x82a074c9, 0xb77504ee,
- 0x32ac9233, 0xfcc0588e, 0x7919ce53, 0x211fbd2e, 0xa4c62bf3,
- 0x6aaae14e, 0xef737793, 0xf54b7eb3, 0x7092e86e, 0xbefe22d3,
- 0x3b27b40e, 0x6321c773, 0xe6f851ae, 0x28949b13, 0xad4d0dce,
- 0x98987de9, 0x1d41eb34, 0xd32d2189, 0x56f4b754, 0x0ef2c429,
- 0x8b2b52f4, 0x45479849, 0xc09e0e94, 0x2fed7907, 0xaa34efda,
- 0x64582567, 0xe181b3ba, 0xb987c0c7, 0x3c5e561a, 0xf2329ca7,
- 0x77eb0a7a, 0x423e7a5d, 0xc7e7ec80, 0x098b263d, 0x8c52b0e0,
- 0xd454c39d, 0x518d5540, 0x9fe19ffd, 0x1a380920, 0xab918dbd,
- 0x2e481b60, 0xe024d1dd, 0x65fd4700, 0x3dfb347d, 0xb822a2a0,
- 0x764e681d, 0xf397fec0, 0xc6428ee7, 0x439b183a, 0x8df7d287,
- 0x082e445a, 0x50283727, 0xd5f1a1fa, 0x1b9d6b47, 0x9e44fd9a,
- 0x71378a09, 0xf4ee1cd4, 0x3a82d669, 0xbf5b40b4, 0xe75d33c9,
- 0x6284a514, 0xace86fa9, 0x2931f974, 0x1ce48953, 0x993d1f8e,
- 0x5751d533, 0xd28843ee, 0x8a8e3093, 0x0f57a64e, 0xc13b6cf3,
- 0x44e2fa2e, 0x5edaf30e, 0xdb0365d3, 0x156faf6e, 0x90b639b3,
- 0xc8b04ace, 0x4d69dc13, 0x830516ae, 0x06dc8073, 0x3309f054,
- 0xb6d06689, 0x78bcac34, 0xfd653ae9, 0xa5634994, 0x20badf49,
- 0xeed615f4, 0x6b0f8329, 0x847cf4ba, 0x01a56267, 0xcfc9a8da,
- 0x4a103e07, 0x12164d7a, 0x97cfdba7, 0x59a3111a, 0xdc7a87c7,
- 0xe9aff7e0, 0x6c76613d, 0xa21aab80, 0x27c33d5d, 0x7fc54e20,
- 0xfa1cd8fd, 0x34701240, 0xb1a9849d, 0x17256aa0, 0x92fcfc7d,
- 0x5c9036c0, 0xd949a01d, 0x814fd360, 0x049645bd, 0xcafa8f00,
- 0x4f2319dd, 0x7af669fa, 0xff2fff27, 0x3143359a, 0xb49aa347,
- 0xec9cd03a, 0x694546e7, 0xa7298c5a, 0x22f01a87, 0xcd836d14,
- 0x485afbc9, 0x86363174, 0x03efa7a9, 0x5be9d4d4, 0xde304209,
- 0x105c88b4, 0x95851e69, 0xa0506e4e, 0x2589f893, 0xebe5322e,
- 0x6e3ca4f3, 0x363ad78e, 0xb3e34153, 0x7d8f8bee, 0xf8561d33,
- 0xe26e1413, 0x67b782ce, 0xa9db4873, 0x2c02deae, 0x7404add3,
- 0xf1dd3b0e, 0x3fb1f1b3, 0xba68676e, 0x8fbd1749, 0x0a648194,
- 0xc4084b29, 0x41d1ddf4, 0x19d7ae89, 0x9c0e3854, 0x5262f2e9,
- 0xd7bb6434, 0x38c813a7, 0xbd11857a, 0x737d4fc7, 0xf6a4d91a,
- 0xaea2aa67, 0x2b7b3cba, 0xe517f607, 0x60ce60da, 0x551b10fd,
- 0xd0c28620, 0x1eae4c9d, 0x9b77da40, 0xc371a93d, 0x46a83fe0,
- 0x88c4f55d, 0x0d1d6380, 0xbcb4e71d, 0x396d71c0, 0xf701bb7d,
- 0x72d82da0, 0x2ade5edd, 0xaf07c800, 0x616b02bd, 0xe4b29460,
- 0xd167e447, 0x54be729a, 0x9ad2b827, 0x1f0b2efa, 0x470d5d87,
- 0xc2d4cb5a, 0x0cb801e7, 0x8961973a, 0x6612e0a9, 0xe3cb7674,
- 0x2da7bcc9, 0xa87e2a14, 0xf0785969, 0x75a1cfb4, 0xbbcd0509,
- 0x3e1493d4, 0x0bc1e3f3, 0x8e18752e, 0x4074bf93, 0xc5ad294e,
- 0x9dab5a33, 0x1872ccee, 0xd61e0653, 0x53c7908e, 0x49ff99ae,
- 0xcc260f73, 0x024ac5ce, 0x87935313, 0xdf95206e, 0x5a4cb6b3,
- 0x94207c0e, 0x11f9ead3, 0x242c9af4, 0xa1f50c29, 0x6f99c694,
- 0xea405049, 0xb2462334, 0x379fb5e9, 0xf9f37f54, 0x7c2ae989,
- 0x93599e1a, 0x168008c7, 0xd8ecc27a, 0x5d3554a7, 0x053327da,
- 0x80eab107, 0x4e867bba, 0xcb5fed67, 0xfe8a9d40, 0x7b530b9d,
- 0xb53fc120, 0x30e657fd, 0x68e02480, 0xed39b25d, 0x235578e0,
- 0xa68cee3d},
- {0x00000000, 0x76e10f9d, 0xadc46ee1, 0xdb25617c, 0x1b8fac19,
- 0x6d6ea384, 0xb64bc2f8, 0xc0aacd65, 0x361e5933, 0x40ff56ae,
- 0x9bda37d2, 0xed3b384f, 0x2d91f52a, 0x5b70fab7, 0x80559bcb,
- 0xf6b49456, 0x6c3cb266, 0x1addbdfb, 0xc1f8dc87, 0xb719d31a,
- 0x77b31e7f, 0x015211e2, 0xda77709e, 0xac967f03, 0x5a22eb55,
- 0x2cc3e4c8, 0xf7e685b4, 0x81078a29, 0x41ad474c, 0x374c48d1,
- 0xec6929ad, 0x9a882630, 0xd87864cd, 0xae996b50, 0x75bc0a2c,
- 0x035d05b1, 0xc3f7c8d4, 0xb516c749, 0x6e33a635, 0x18d2a9a8,
- 0xee663dfe, 0x98873263, 0x43a2531f, 0x35435c82, 0xf5e991e7,
- 0x83089e7a, 0x582dff06, 0x2eccf09b, 0xb444d6ab, 0xc2a5d936,
- 0x1980b84a, 0x6f61b7d7, 0xafcb7ab2, 0xd92a752f, 0x020f1453,
- 0x74ee1bce, 0x825a8f98, 0xf4bb8005, 0x2f9ee179, 0x597feee4,
- 0x99d52381, 0xef342c1c, 0x34114d60, 0x42f042fd, 0xf1f7b941,
- 0x8716b6dc, 0x5c33d7a0, 0x2ad2d83d, 0xea781558, 0x9c991ac5,
- 0x47bc7bb9, 0x315d7424, 0xc7e9e072, 0xb108efef, 0x6a2d8e93,
- 0x1ccc810e, 0xdc664c6b, 0xaa8743f6, 0x71a2228a, 0x07432d17,
- 0x9dcb0b27, 0xeb2a04ba, 0x300f65c6, 0x46ee6a5b, 0x8644a73e,
- 0xf0a5a8a3, 0x2b80c9df, 0x5d61c642, 0xabd55214, 0xdd345d89,
- 0x06113cf5, 0x70f03368, 0xb05afe0d, 0xc6bbf190, 0x1d9e90ec,
- 0x6b7f9f71, 0x298fdd8c, 0x5f6ed211, 0x844bb36d, 0xf2aabcf0,
- 0x32007195, 0x44e17e08, 0x9fc41f74, 0xe92510e9, 0x1f9184bf,
- 0x69708b22, 0xb255ea5e, 0xc4b4e5c3, 0x041e28a6, 0x72ff273b,
- 0xa9da4647, 0xdf3b49da, 0x45b36fea, 0x33526077, 0xe877010b,
- 0x9e960e96, 0x5e3cc3f3, 0x28ddcc6e, 0xf3f8ad12, 0x8519a28f,
- 0x73ad36d9, 0x054c3944, 0xde695838, 0xa88857a5, 0x68229ac0,
- 0x1ec3955d, 0xc5e6f421, 0xb307fbbc, 0xe2ef7383, 0x940e7c1e,
- 0x4f2b1d62, 0x39ca12ff, 0xf960df9a, 0x8f81d007, 0x54a4b17b,
- 0x2245bee6, 0xd4f12ab0, 0xa210252d, 0x79354451, 0x0fd44bcc,
- 0xcf7e86a9, 0xb99f8934, 0x62bae848, 0x145be7d5, 0x8ed3c1e5,
- 0xf832ce78, 0x2317af04, 0x55f6a099, 0x955c6dfc, 0xe3bd6261,
- 0x3898031d, 0x4e790c80, 0xb8cd98d6, 0xce2c974b, 0x1509f637,
- 0x63e8f9aa, 0xa34234cf, 0xd5a33b52, 0x0e865a2e, 0x786755b3,
- 0x3a97174e, 0x4c7618d3, 0x975379af, 0xe1b27632, 0x2118bb57,
- 0x57f9b4ca, 0x8cdcd5b6, 0xfa3dda2b, 0x0c894e7d, 0x7a6841e0,
- 0xa14d209c, 0xd7ac2f01, 0x1706e264, 0x61e7edf9, 0xbac28c85,
- 0xcc238318, 0x56aba528, 0x204aaab5, 0xfb6fcbc9, 0x8d8ec454,
- 0x4d240931, 0x3bc506ac, 0xe0e067d0, 0x9601684d, 0x60b5fc1b,
- 0x1654f386, 0xcd7192fa, 0xbb909d67, 0x7b3a5002, 0x0ddb5f9f,
- 0xd6fe3ee3, 0xa01f317e, 0x1318cac2, 0x65f9c55f, 0xbedca423,
- 0xc83dabbe, 0x089766db, 0x7e766946, 0xa553083a, 0xd3b207a7,
- 0x250693f1, 0x53e79c6c, 0x88c2fd10, 0xfe23f28d, 0x3e893fe8,
- 0x48683075, 0x934d5109, 0xe5ac5e94, 0x7f2478a4, 0x09c57739,
- 0xd2e01645, 0xa40119d8, 0x64abd4bd, 0x124adb20, 0xc96fba5c,
- 0xbf8eb5c1, 0x493a2197, 0x3fdb2e0a, 0xe4fe4f76, 0x921f40eb,
- 0x52b58d8e, 0x24548213, 0xff71e36f, 0x8990ecf2, 0xcb60ae0f,
- 0xbd81a192, 0x66a4c0ee, 0x1045cf73, 0xd0ef0216, 0xa60e0d8b,
- 0x7d2b6cf7, 0x0bca636a, 0xfd7ef73c, 0x8b9ff8a1, 0x50ba99dd,
- 0x265b9640, 0xe6f15b25, 0x901054b8, 0x4b3535c4, 0x3dd43a59,
- 0xa75c1c69, 0xd1bd13f4, 0x0a987288, 0x7c797d15, 0xbcd3b070,
- 0xca32bfed, 0x1117de91, 0x67f6d10c, 0x9142455a, 0xe7a34ac7,
- 0x3c862bbb, 0x4a672426, 0x8acde943, 0xfc2ce6de, 0x270987a2,
- 0x51e8883f},
- {0x00000000, 0xe8dbfbb9, 0x91b186a8, 0x796a7d11, 0x63657c8a,
- 0x8bbe8733, 0xf2d4fa22, 0x1a0f019b, 0x87cc89cf, 0x6f177276,
- 0x167d0f67, 0xfea6f4de, 0xe4a9f545, 0x0c720efc, 0x751873ed,
- 0x9dc38854, 0x4f9f6244, 0xa74499fd, 0xde2ee4ec, 0x36f51f55,
- 0x2cfa1ece, 0xc421e577, 0xbd4b9866, 0x559063df, 0xc853eb8b,
- 0x20881032, 0x59e26d23, 0xb139969a, 0xab369701, 0x43ed6cb8,
- 0x3a8711a9, 0xd25cea10, 0x9e3ec588, 0x76e53e31, 0x0f8f4320,
- 0xe754b899, 0xfd5bb902, 0x158042bb, 0x6cea3faa, 0x8431c413,
- 0x19f24c47, 0xf129b7fe, 0x8843caef, 0x60983156, 0x7a9730cd,
- 0x924ccb74, 0xeb26b665, 0x03fd4ddc, 0xd1a1a7cc, 0x397a5c75,
- 0x40102164, 0xa8cbdadd, 0xb2c4db46, 0x5a1f20ff, 0x23755dee,
- 0xcbaea657, 0x566d2e03, 0xbeb6d5ba, 0xc7dca8ab, 0x2f075312,
- 0x35085289, 0xddd3a930, 0xa4b9d421, 0x4c622f98, 0x7d7bfbca,
- 0x95a00073, 0xecca7d62, 0x041186db, 0x1e1e8740, 0xf6c57cf9,
- 0x8faf01e8, 0x6774fa51, 0xfab77205, 0x126c89bc, 0x6b06f4ad,
- 0x83dd0f14, 0x99d20e8f, 0x7109f536, 0x08638827, 0xe0b8739e,
- 0x32e4998e, 0xda3f6237, 0xa3551f26, 0x4b8ee49f, 0x5181e504,
- 0xb95a1ebd, 0xc03063ac, 0x28eb9815, 0xb5281041, 0x5df3ebf8,
- 0x249996e9, 0xcc426d50, 0xd64d6ccb, 0x3e969772, 0x47fcea63,
- 0xaf2711da, 0xe3453e42, 0x0b9ec5fb, 0x72f4b8ea, 0x9a2f4353,
- 0x802042c8, 0x68fbb971, 0x1191c460, 0xf94a3fd9, 0x6489b78d,
- 0x8c524c34, 0xf5383125, 0x1de3ca9c, 0x07eccb07, 0xef3730be,
- 0x965d4daf, 0x7e86b616, 0xacda5c06, 0x4401a7bf, 0x3d6bdaae,
- 0xd5b02117, 0xcfbf208c, 0x2764db35, 0x5e0ea624, 0xb6d55d9d,
- 0x2b16d5c9, 0xc3cd2e70, 0xbaa75361, 0x527ca8d8, 0x4873a943,
- 0xa0a852fa, 0xd9c22feb, 0x3119d452, 0xbbf0874e, 0x532b7cf7,
- 0x2a4101e6, 0xc29afa5f, 0xd895fbc4, 0x304e007d, 0x49247d6c,
- 0xa1ff86d5, 0x3c3c0e81, 0xd4e7f538, 0xad8d8829, 0x45567390,
- 0x5f59720b, 0xb78289b2, 0xcee8f4a3, 0x26330f1a, 0xf46fe50a,
- 0x1cb41eb3, 0x65de63a2, 0x8d05981b, 0x970a9980, 0x7fd16239,
- 0x06bb1f28, 0xee60e491, 0x73a36cc5, 0x9b78977c, 0xe212ea6d,
- 0x0ac911d4, 0x10c6104f, 0xf81debf6, 0x817796e7, 0x69ac6d5e,
- 0x25ce42c6, 0xcd15b97f, 0xb47fc46e, 0x5ca43fd7, 0x46ab3e4c,
- 0xae70c5f5, 0xd71ab8e4, 0x3fc1435d, 0xa202cb09, 0x4ad930b0,
- 0x33b34da1, 0xdb68b618, 0xc167b783, 0x29bc4c3a, 0x50d6312b,
- 0xb80dca92, 0x6a512082, 0x828adb3b, 0xfbe0a62a, 0x133b5d93,
- 0x09345c08, 0xe1efa7b1, 0x9885daa0, 0x705e2119, 0xed9da94d,
- 0x054652f4, 0x7c2c2fe5, 0x94f7d45c, 0x8ef8d5c7, 0x66232e7e,
- 0x1f49536f, 0xf792a8d6, 0xc68b7c84, 0x2e50873d, 0x573afa2c,
- 0xbfe10195, 0xa5ee000e, 0x4d35fbb7, 0x345f86a6, 0xdc847d1f,
- 0x4147f54b, 0xa99c0ef2, 0xd0f673e3, 0x382d885a, 0x222289c1,
- 0xcaf97278, 0xb3930f69, 0x5b48f4d0, 0x89141ec0, 0x61cfe579,
- 0x18a59868, 0xf07e63d1, 0xea71624a, 0x02aa99f3, 0x7bc0e4e2,
- 0x931b1f5b, 0x0ed8970f, 0xe6036cb6, 0x9f6911a7, 0x77b2ea1e,
- 0x6dbdeb85, 0x8566103c, 0xfc0c6d2d, 0x14d79694, 0x58b5b90c,
- 0xb06e42b5, 0xc9043fa4, 0x21dfc41d, 0x3bd0c586, 0xd30b3e3f,
- 0xaa61432e, 0x42bab897, 0xdf7930c3, 0x37a2cb7a, 0x4ec8b66b,
- 0xa6134dd2, 0xbc1c4c49, 0x54c7b7f0, 0x2dadcae1, 0xc5763158,
- 0x172adb48, 0xfff120f1, 0x869b5de0, 0x6e40a659, 0x744fa7c2,
- 0x9c945c7b, 0xe5fe216a, 0x0d25dad3, 0x90e65287, 0x783da93e,
- 0x0157d42f, 0xe98c2f96, 0xf3832e0d, 0x1b58d5b4, 0x6232a8a5,
- 0x8ae9531c},
- {0x00000000, 0x919168ae, 0x6325a087, 0xf2b4c829, 0x874c31d4,
- 0x16dd597a, 0xe4699153, 0x75f8f9fd, 0x4f9f1373, 0xde0e7bdd,
- 0x2cbab3f4, 0xbd2bdb5a, 0xc8d322a7, 0x59424a09, 0xabf68220,
- 0x3a67ea8e, 0x9e3e27e6, 0x0faf4f48, 0xfd1b8761, 0x6c8aefcf,
- 0x19721632, 0x88e37e9c, 0x7a57b6b5, 0xebc6de1b, 0xd1a13495,
- 0x40305c3b, 0xb2849412, 0x2315fcbc, 0x56ed0541, 0xc77c6def,
- 0x35c8a5c6, 0xa459cd68, 0x7d7b3f17, 0xecea57b9, 0x1e5e9f90,
- 0x8fcff73e, 0xfa370ec3, 0x6ba6666d, 0x9912ae44, 0x0883c6ea,
- 0x32e42c64, 0xa37544ca, 0x51c18ce3, 0xc050e44d, 0xb5a81db0,
- 0x2439751e, 0xd68dbd37, 0x471cd599, 0xe34518f1, 0x72d4705f,
- 0x8060b876, 0x11f1d0d8, 0x64092925, 0xf598418b, 0x072c89a2,
- 0x96bde10c, 0xacda0b82, 0x3d4b632c, 0xcfffab05, 0x5e6ec3ab,
- 0x2b963a56, 0xba0752f8, 0x48b39ad1, 0xd922f27f, 0xfaf67e2e,
- 0x6b671680, 0x99d3dea9, 0x0842b607, 0x7dba4ffa, 0xec2b2754,
- 0x1e9fef7d, 0x8f0e87d3, 0xb5696d5d, 0x24f805f3, 0xd64ccdda,
- 0x47dda574, 0x32255c89, 0xa3b43427, 0x5100fc0e, 0xc09194a0,
- 0x64c859c8, 0xf5593166, 0x07edf94f, 0x967c91e1, 0xe384681c,
- 0x721500b2, 0x80a1c89b, 0x1130a035, 0x2b574abb, 0xbac62215,
- 0x4872ea3c, 0xd9e38292, 0xac1b7b6f, 0x3d8a13c1, 0xcf3edbe8,
- 0x5eafb346, 0x878d4139, 0x161c2997, 0xe4a8e1be, 0x75398910,
- 0x00c170ed, 0x91501843, 0x63e4d06a, 0xf275b8c4, 0xc812524a,
- 0x59833ae4, 0xab37f2cd, 0x3aa69a63, 0x4f5e639e, 0xdecf0b30,
- 0x2c7bc319, 0xbdeaabb7, 0x19b366df, 0x88220e71, 0x7a96c658,
- 0xeb07aef6, 0x9eff570b, 0x0f6e3fa5, 0xfddaf78c, 0x6c4b9f22,
- 0x562c75ac, 0xc7bd1d02, 0x3509d52b, 0xa498bd85, 0xd1604478,
- 0x40f12cd6, 0xb245e4ff, 0x23d48c51, 0xf4edfd5c, 0x657c95f2,
- 0x97c85ddb, 0x06593575, 0x73a1cc88, 0xe230a426, 0x10846c0f,
- 0x811504a1, 0xbb72ee2f, 0x2ae38681, 0xd8574ea8, 0x49c62606,
- 0x3c3edffb, 0xadafb755, 0x5f1b7f7c, 0xce8a17d2, 0x6ad3daba,
- 0xfb42b214, 0x09f67a3d, 0x98671293, 0xed9feb6e, 0x7c0e83c0,
- 0x8eba4be9, 0x1f2b2347, 0x254cc9c9, 0xb4dda167, 0x4669694e,
- 0xd7f801e0, 0xa200f81d, 0x339190b3, 0xc125589a, 0x50b43034,
- 0x8996c24b, 0x1807aae5, 0xeab362cc, 0x7b220a62, 0x0edaf39f,
- 0x9f4b9b31, 0x6dff5318, 0xfc6e3bb6, 0xc609d138, 0x5798b996,
- 0xa52c71bf, 0x34bd1911, 0x4145e0ec, 0xd0d48842, 0x2260406b,
- 0xb3f128c5, 0x17a8e5ad, 0x86398d03, 0x748d452a, 0xe51c2d84,
- 0x90e4d479, 0x0175bcd7, 0xf3c174fe, 0x62501c50, 0x5837f6de,
- 0xc9a69e70, 0x3b125659, 0xaa833ef7, 0xdf7bc70a, 0x4eeaafa4,
- 0xbc5e678d, 0x2dcf0f23, 0x0e1b8372, 0x9f8aebdc, 0x6d3e23f5,
- 0xfcaf4b5b, 0x8957b2a6, 0x18c6da08, 0xea721221, 0x7be37a8f,
- 0x41849001, 0xd015f8af, 0x22a13086, 0xb3305828, 0xc6c8a1d5,
- 0x5759c97b, 0xa5ed0152, 0x347c69fc, 0x9025a494, 0x01b4cc3a,
- 0xf3000413, 0x62916cbd, 0x17699540, 0x86f8fdee, 0x744c35c7,
- 0xe5dd5d69, 0xdfbab7e7, 0x4e2bdf49, 0xbc9f1760, 0x2d0e7fce,
- 0x58f68633, 0xc967ee9d, 0x3bd326b4, 0xaa424e1a, 0x7360bc65,
- 0xe2f1d4cb, 0x10451ce2, 0x81d4744c, 0xf42c8db1, 0x65bde51f,
- 0x97092d36, 0x06984598, 0x3cffaf16, 0xad6ec7b8, 0x5fda0f91,
- 0xce4b673f, 0xbbb39ec2, 0x2a22f66c, 0xd8963e45, 0x490756eb,
- 0xed5e9b83, 0x7ccff32d, 0x8e7b3b04, 0x1fea53aa, 0x6a12aa57,
- 0xfb83c2f9, 0x09370ad0, 0x98a6627e, 0xa2c188f0, 0x3350e05e,
- 0xc1e42877, 0x507540d9, 0x258db924, 0xb41cd18a, 0x46a819a3,
- 0xd739710d}};
-
-#endif
-
-#endif
-
-#if N == 5
-
-#if W == 8
-
-local const z_crc_t FAR crc_braid_table[][256] = {
- {0x00000000, 0xaf449247, 0x85f822cf, 0x2abcb088, 0xd08143df,
- 0x7fc5d198, 0x55796110, 0xfa3df357, 0x7a7381ff, 0xd53713b8,
- 0xff8ba330, 0x50cf3177, 0xaaf2c220, 0x05b65067, 0x2f0ae0ef,
- 0x804e72a8, 0xf4e703fe, 0x5ba391b9, 0x711f2131, 0xde5bb376,
- 0x24664021, 0x8b22d266, 0xa19e62ee, 0x0edaf0a9, 0x8e948201,
- 0x21d01046, 0x0b6ca0ce, 0xa4283289, 0x5e15c1de, 0xf1515399,
- 0xdbede311, 0x74a97156, 0x32bf01bd, 0x9dfb93fa, 0xb7472372,
- 0x1803b135, 0xe23e4262, 0x4d7ad025, 0x67c660ad, 0xc882f2ea,
- 0x48cc8042, 0xe7881205, 0xcd34a28d, 0x627030ca, 0x984dc39d,
- 0x370951da, 0x1db5e152, 0xb2f17315, 0xc6580243, 0x691c9004,
- 0x43a0208c, 0xece4b2cb, 0x16d9419c, 0xb99dd3db, 0x93216353,
- 0x3c65f114, 0xbc2b83bc, 0x136f11fb, 0x39d3a173, 0x96973334,
- 0x6caac063, 0xc3ee5224, 0xe952e2ac, 0x461670eb, 0x657e037a,
- 0xca3a913d, 0xe08621b5, 0x4fc2b3f2, 0xb5ff40a5, 0x1abbd2e2,
- 0x3007626a, 0x9f43f02d, 0x1f0d8285, 0xb04910c2, 0x9af5a04a,
- 0x35b1320d, 0xcf8cc15a, 0x60c8531d, 0x4a74e395, 0xe53071d2,
- 0x91990084, 0x3edd92c3, 0x1461224b, 0xbb25b00c, 0x4118435b,
- 0xee5cd11c, 0xc4e06194, 0x6ba4f3d3, 0xebea817b, 0x44ae133c,
- 0x6e12a3b4, 0xc15631f3, 0x3b6bc2a4, 0x942f50e3, 0xbe93e06b,
- 0x11d7722c, 0x57c102c7, 0xf8859080, 0xd2392008, 0x7d7db24f,
- 0x87404118, 0x2804d35f, 0x02b863d7, 0xadfcf190, 0x2db28338,
- 0x82f6117f, 0xa84aa1f7, 0x070e33b0, 0xfd33c0e7, 0x527752a0,
- 0x78cbe228, 0xd78f706f, 0xa3260139, 0x0c62937e, 0x26de23f6,
- 0x899ab1b1, 0x73a742e6, 0xdce3d0a1, 0xf65f6029, 0x591bf26e,
- 0xd95580c6, 0x76111281, 0x5cada209, 0xf3e9304e, 0x09d4c319,
- 0xa690515e, 0x8c2ce1d6, 0x23687391, 0xcafc06f4, 0x65b894b3,
- 0x4f04243b, 0xe040b67c, 0x1a7d452b, 0xb539d76c, 0x9f8567e4,
- 0x30c1f5a3, 0xb08f870b, 0x1fcb154c, 0x3577a5c4, 0x9a333783,
- 0x600ec4d4, 0xcf4a5693, 0xe5f6e61b, 0x4ab2745c, 0x3e1b050a,
- 0x915f974d, 0xbbe327c5, 0x14a7b582, 0xee9a46d5, 0x41ded492,
- 0x6b62641a, 0xc426f65d, 0x446884f5, 0xeb2c16b2, 0xc190a63a,
- 0x6ed4347d, 0x94e9c72a, 0x3bad556d, 0x1111e5e5, 0xbe5577a2,
- 0xf8430749, 0x5707950e, 0x7dbb2586, 0xd2ffb7c1, 0x28c24496,
- 0x8786d6d1, 0xad3a6659, 0x027ef41e, 0x823086b6, 0x2d7414f1,
- 0x07c8a479, 0xa88c363e, 0x52b1c569, 0xfdf5572e, 0xd749e7a6,
- 0x780d75e1, 0x0ca404b7, 0xa3e096f0, 0x895c2678, 0x2618b43f,
- 0xdc254768, 0x7361d52f, 0x59dd65a7, 0xf699f7e0, 0x76d78548,
- 0xd993170f, 0xf32fa787, 0x5c6b35c0, 0xa656c697, 0x091254d0,
- 0x23aee458, 0x8cea761f, 0xaf82058e, 0x00c697c9, 0x2a7a2741,
- 0x853eb506, 0x7f034651, 0xd047d416, 0xfafb649e, 0x55bff6d9,
- 0xd5f18471, 0x7ab51636, 0x5009a6be, 0xff4d34f9, 0x0570c7ae,
- 0xaa3455e9, 0x8088e561, 0x2fcc7726, 0x5b650670, 0xf4219437,
- 0xde9d24bf, 0x71d9b6f8, 0x8be445af, 0x24a0d7e8, 0x0e1c6760,
- 0xa158f527, 0x2116878f, 0x8e5215c8, 0xa4eea540, 0x0baa3707,
- 0xf197c450, 0x5ed35617, 0x746fe69f, 0xdb2b74d8, 0x9d3d0433,
- 0x32799674, 0x18c526fc, 0xb781b4bb, 0x4dbc47ec, 0xe2f8d5ab,
- 0xc8446523, 0x6700f764, 0xe74e85cc, 0x480a178b, 0x62b6a703,
- 0xcdf23544, 0x37cfc613, 0x988b5454, 0xb237e4dc, 0x1d73769b,
- 0x69da07cd, 0xc69e958a, 0xec222502, 0x4366b745, 0xb95b4412,
- 0x161fd655, 0x3ca366dd, 0x93e7f49a, 0x13a98632, 0xbced1475,
- 0x9651a4fd, 0x391536ba, 0xc328c5ed, 0x6c6c57aa, 0x46d0e722,
- 0xe9947565},
- {0x00000000, 0x4e890ba9, 0x9d121752, 0xd39b1cfb, 0xe15528e5,
- 0xafdc234c, 0x7c473fb7, 0x32ce341e, 0x19db578b, 0x57525c22,
- 0x84c940d9, 0xca404b70, 0xf88e7f6e, 0xb60774c7, 0x659c683c,
- 0x2b156395, 0x33b6af16, 0x7d3fa4bf, 0xaea4b844, 0xe02db3ed,
- 0xd2e387f3, 0x9c6a8c5a, 0x4ff190a1, 0x01789b08, 0x2a6df89d,
- 0x64e4f334, 0xb77fefcf, 0xf9f6e466, 0xcb38d078, 0x85b1dbd1,
- 0x562ac72a, 0x18a3cc83, 0x676d5e2c, 0x29e45585, 0xfa7f497e,
- 0xb4f642d7, 0x863876c9, 0xc8b17d60, 0x1b2a619b, 0x55a36a32,
- 0x7eb609a7, 0x303f020e, 0xe3a41ef5, 0xad2d155c, 0x9fe32142,
- 0xd16a2aeb, 0x02f13610, 0x4c783db9, 0x54dbf13a, 0x1a52fa93,
- 0xc9c9e668, 0x8740edc1, 0xb58ed9df, 0xfb07d276, 0x289cce8d,
- 0x6615c524, 0x4d00a6b1, 0x0389ad18, 0xd012b1e3, 0x9e9bba4a,
- 0xac558e54, 0xe2dc85fd, 0x31479906, 0x7fce92af, 0xcedabc58,
- 0x8053b7f1, 0x53c8ab0a, 0x1d41a0a3, 0x2f8f94bd, 0x61069f14,
- 0xb29d83ef, 0xfc148846, 0xd701ebd3, 0x9988e07a, 0x4a13fc81,
- 0x049af728, 0x3654c336, 0x78ddc89f, 0xab46d464, 0xe5cfdfcd,
- 0xfd6c134e, 0xb3e518e7, 0x607e041c, 0x2ef70fb5, 0x1c393bab,
- 0x52b03002, 0x812b2cf9, 0xcfa22750, 0xe4b744c5, 0xaa3e4f6c,
- 0x79a55397, 0x372c583e, 0x05e26c20, 0x4b6b6789, 0x98f07b72,
- 0xd67970db, 0xa9b7e274, 0xe73ee9dd, 0x34a5f526, 0x7a2cfe8f,
- 0x48e2ca91, 0x066bc138, 0xd5f0ddc3, 0x9b79d66a, 0xb06cb5ff,
- 0xfee5be56, 0x2d7ea2ad, 0x63f7a904, 0x51399d1a, 0x1fb096b3,
- 0xcc2b8a48, 0x82a281e1, 0x9a014d62, 0xd48846cb, 0x07135a30,
- 0x499a5199, 0x7b546587, 0x35dd6e2e, 0xe64672d5, 0xa8cf797c,
- 0x83da1ae9, 0xcd531140, 0x1ec80dbb, 0x50410612, 0x628f320c,
- 0x2c0639a5, 0xff9d255e, 0xb1142ef7, 0x46c47ef1, 0x084d7558,
- 0xdbd669a3, 0x955f620a, 0xa7915614, 0xe9185dbd, 0x3a834146,
- 0x740a4aef, 0x5f1f297a, 0x119622d3, 0xc20d3e28, 0x8c843581,
- 0xbe4a019f, 0xf0c30a36, 0x235816cd, 0x6dd11d64, 0x7572d1e7,
- 0x3bfbda4e, 0xe860c6b5, 0xa6e9cd1c, 0x9427f902, 0xdaaef2ab,
- 0x0935ee50, 0x47bce5f9, 0x6ca9866c, 0x22208dc5, 0xf1bb913e,
- 0xbf329a97, 0x8dfcae89, 0xc375a520, 0x10eeb9db, 0x5e67b272,
- 0x21a920dd, 0x6f202b74, 0xbcbb378f, 0xf2323c26, 0xc0fc0838,
- 0x8e750391, 0x5dee1f6a, 0x136714c3, 0x38727756, 0x76fb7cff,
- 0xa5606004, 0xebe96bad, 0xd9275fb3, 0x97ae541a, 0x443548e1,
- 0x0abc4348, 0x121f8fcb, 0x5c968462, 0x8f0d9899, 0xc1849330,
- 0xf34aa72e, 0xbdc3ac87, 0x6e58b07c, 0x20d1bbd5, 0x0bc4d840,
- 0x454dd3e9, 0x96d6cf12, 0xd85fc4bb, 0xea91f0a5, 0xa418fb0c,
- 0x7783e7f7, 0x390aec5e, 0x881ec2a9, 0xc697c900, 0x150cd5fb,
- 0x5b85de52, 0x694bea4c, 0x27c2e1e5, 0xf459fd1e, 0xbad0f6b7,
- 0x91c59522, 0xdf4c9e8b, 0x0cd78270, 0x425e89d9, 0x7090bdc7,
- 0x3e19b66e, 0xed82aa95, 0xa30ba13c, 0xbba86dbf, 0xf5216616,
- 0x26ba7aed, 0x68337144, 0x5afd455a, 0x14744ef3, 0xc7ef5208,
- 0x896659a1, 0xa2733a34, 0xecfa319d, 0x3f612d66, 0x71e826cf,
- 0x432612d1, 0x0daf1978, 0xde340583, 0x90bd0e2a, 0xef739c85,
- 0xa1fa972c, 0x72618bd7, 0x3ce8807e, 0x0e26b460, 0x40afbfc9,
- 0x9334a332, 0xddbda89b, 0xf6a8cb0e, 0xb821c0a7, 0x6bbadc5c,
- 0x2533d7f5, 0x17fde3eb, 0x5974e842, 0x8aeff4b9, 0xc466ff10,
- 0xdcc53393, 0x924c383a, 0x41d724c1, 0x0f5e2f68, 0x3d901b76,
- 0x731910df, 0xa0820c24, 0xee0b078d, 0xc51e6418, 0x8b976fb1,
- 0x580c734a, 0x168578e3, 0x244b4cfd, 0x6ac24754, 0xb9595baf,
- 0xf7d05006},
- {0x00000000, 0x8d88fde2, 0xc060fd85, 0x4de80067, 0x5bb0fd4b,
- 0xd63800a9, 0x9bd000ce, 0x1658fd2c, 0xb761fa96, 0x3ae90774,
- 0x77010713, 0xfa89faf1, 0xecd107dd, 0x6159fa3f, 0x2cb1fa58,
- 0xa13907ba, 0xb5b2f36d, 0x383a0e8f, 0x75d20ee8, 0xf85af30a,
- 0xee020e26, 0x638af3c4, 0x2e62f3a3, 0xa3ea0e41, 0x02d309fb,
- 0x8f5bf419, 0xc2b3f47e, 0x4f3b099c, 0x5963f4b0, 0xd4eb0952,
- 0x99030935, 0x148bf4d7, 0xb014e09b, 0x3d9c1d79, 0x70741d1e,
- 0xfdfce0fc, 0xeba41dd0, 0x662ce032, 0x2bc4e055, 0xa64c1db7,
- 0x07751a0d, 0x8afde7ef, 0xc715e788, 0x4a9d1a6a, 0x5cc5e746,
- 0xd14d1aa4, 0x9ca51ac3, 0x112de721, 0x05a613f6, 0x882eee14,
- 0xc5c6ee73, 0x484e1391, 0x5e16eebd, 0xd39e135f, 0x9e761338,
- 0x13feeeda, 0xb2c7e960, 0x3f4f1482, 0x72a714e5, 0xff2fe907,
- 0xe977142b, 0x64ffe9c9, 0x2917e9ae, 0xa49f144c, 0xbb58c777,
- 0x36d03a95, 0x7b383af2, 0xf6b0c710, 0xe0e83a3c, 0x6d60c7de,
- 0x2088c7b9, 0xad003a5b, 0x0c393de1, 0x81b1c003, 0xcc59c064,
- 0x41d13d86, 0x5789c0aa, 0xda013d48, 0x97e93d2f, 0x1a61c0cd,
- 0x0eea341a, 0x8362c9f8, 0xce8ac99f, 0x4302347d, 0x555ac951,
- 0xd8d234b3, 0x953a34d4, 0x18b2c936, 0xb98bce8c, 0x3403336e,
- 0x79eb3309, 0xf463ceeb, 0xe23b33c7, 0x6fb3ce25, 0x225bce42,
- 0xafd333a0, 0x0b4c27ec, 0x86c4da0e, 0xcb2cda69, 0x46a4278b,
- 0x50fcdaa7, 0xdd742745, 0x909c2722, 0x1d14dac0, 0xbc2ddd7a,
- 0x31a52098, 0x7c4d20ff, 0xf1c5dd1d, 0xe79d2031, 0x6a15ddd3,
- 0x27fdddb4, 0xaa752056, 0xbefed481, 0x33762963, 0x7e9e2904,
- 0xf316d4e6, 0xe54e29ca, 0x68c6d428, 0x252ed44f, 0xa8a629ad,
- 0x099f2e17, 0x8417d3f5, 0xc9ffd392, 0x44772e70, 0x522fd35c,
- 0xdfa72ebe, 0x924f2ed9, 0x1fc7d33b, 0xadc088af, 0x2048754d,
- 0x6da0752a, 0xe02888c8, 0xf67075e4, 0x7bf88806, 0x36108861,
- 0xbb987583, 0x1aa17239, 0x97298fdb, 0xdac18fbc, 0x5749725e,
- 0x41118f72, 0xcc997290, 0x817172f7, 0x0cf98f15, 0x18727bc2,
- 0x95fa8620, 0xd8128647, 0x559a7ba5, 0x43c28689, 0xce4a7b6b,
- 0x83a27b0c, 0x0e2a86ee, 0xaf138154, 0x229b7cb6, 0x6f737cd1,
- 0xe2fb8133, 0xf4a37c1f, 0x792b81fd, 0x34c3819a, 0xb94b7c78,
- 0x1dd46834, 0x905c95d6, 0xddb495b1, 0x503c6853, 0x4664957f,
- 0xcbec689d, 0x860468fa, 0x0b8c9518, 0xaab592a2, 0x273d6f40,
- 0x6ad56f27, 0xe75d92c5, 0xf1056fe9, 0x7c8d920b, 0x3165926c,
- 0xbced6f8e, 0xa8669b59, 0x25ee66bb, 0x680666dc, 0xe58e9b3e,
- 0xf3d66612, 0x7e5e9bf0, 0x33b69b97, 0xbe3e6675, 0x1f0761cf,
- 0x928f9c2d, 0xdf679c4a, 0x52ef61a8, 0x44b79c84, 0xc93f6166,
- 0x84d76101, 0x095f9ce3, 0x16984fd8, 0x9b10b23a, 0xd6f8b25d,
- 0x5b704fbf, 0x4d28b293, 0xc0a04f71, 0x8d484f16, 0x00c0b2f4,
- 0xa1f9b54e, 0x2c7148ac, 0x619948cb, 0xec11b529, 0xfa494805,
- 0x77c1b5e7, 0x3a29b580, 0xb7a14862, 0xa32abcb5, 0x2ea24157,
- 0x634a4130, 0xeec2bcd2, 0xf89a41fe, 0x7512bc1c, 0x38fabc7b,
- 0xb5724199, 0x144b4623, 0x99c3bbc1, 0xd42bbba6, 0x59a34644,
- 0x4ffbbb68, 0xc273468a, 0x8f9b46ed, 0x0213bb0f, 0xa68caf43,
- 0x2b0452a1, 0x66ec52c6, 0xeb64af24, 0xfd3c5208, 0x70b4afea,
- 0x3d5caf8d, 0xb0d4526f, 0x11ed55d5, 0x9c65a837, 0xd18da850,
- 0x5c0555b2, 0x4a5da89e, 0xc7d5557c, 0x8a3d551b, 0x07b5a8f9,
- 0x133e5c2e, 0x9eb6a1cc, 0xd35ea1ab, 0x5ed65c49, 0x488ea165,
- 0xc5065c87, 0x88ee5ce0, 0x0566a102, 0xa45fa6b8, 0x29d75b5a,
- 0x643f5b3d, 0xe9b7a6df, 0xffef5bf3, 0x7267a611, 0x3f8fa676,
- 0xb2075b94},
- {0x00000000, 0x80f0171f, 0xda91287f, 0x5a613f60, 0x6e5356bf,
- 0xeea341a0, 0xb4c27ec0, 0x343269df, 0xdca6ad7e, 0x5c56ba61,
- 0x06378501, 0x86c7921e, 0xb2f5fbc1, 0x3205ecde, 0x6864d3be,
- 0xe894c4a1, 0x623c5cbd, 0xe2cc4ba2, 0xb8ad74c2, 0x385d63dd,
- 0x0c6f0a02, 0x8c9f1d1d, 0xd6fe227d, 0x560e3562, 0xbe9af1c3,
- 0x3e6ae6dc, 0x640bd9bc, 0xe4fbcea3, 0xd0c9a77c, 0x5039b063,
- 0x0a588f03, 0x8aa8981c, 0xc478b97a, 0x4488ae65, 0x1ee99105,
- 0x9e19861a, 0xaa2befc5, 0x2adbf8da, 0x70bac7ba, 0xf04ad0a5,
- 0x18de1404, 0x982e031b, 0xc24f3c7b, 0x42bf2b64, 0x768d42bb,
- 0xf67d55a4, 0xac1c6ac4, 0x2cec7ddb, 0xa644e5c7, 0x26b4f2d8,
- 0x7cd5cdb8, 0xfc25daa7, 0xc817b378, 0x48e7a467, 0x12869b07,
- 0x92768c18, 0x7ae248b9, 0xfa125fa6, 0xa07360c6, 0x208377d9,
- 0x14b11e06, 0x94410919, 0xce203679, 0x4ed02166, 0x538074b5,
- 0xd37063aa, 0x89115cca, 0x09e14bd5, 0x3dd3220a, 0xbd233515,
- 0xe7420a75, 0x67b21d6a, 0x8f26d9cb, 0x0fd6ced4, 0x55b7f1b4,
- 0xd547e6ab, 0xe1758f74, 0x6185986b, 0x3be4a70b, 0xbb14b014,
- 0x31bc2808, 0xb14c3f17, 0xeb2d0077, 0x6bdd1768, 0x5fef7eb7,
- 0xdf1f69a8, 0x857e56c8, 0x058e41d7, 0xed1a8576, 0x6dea9269,
- 0x378bad09, 0xb77bba16, 0x8349d3c9, 0x03b9c4d6, 0x59d8fbb6,
- 0xd928eca9, 0x97f8cdcf, 0x1708dad0, 0x4d69e5b0, 0xcd99f2af,
- 0xf9ab9b70, 0x795b8c6f, 0x233ab30f, 0xa3caa410, 0x4b5e60b1,
- 0xcbae77ae, 0x91cf48ce, 0x113f5fd1, 0x250d360e, 0xa5fd2111,
- 0xff9c1e71, 0x7f6c096e, 0xf5c49172, 0x7534866d, 0x2f55b90d,
- 0xafa5ae12, 0x9b97c7cd, 0x1b67d0d2, 0x4106efb2, 0xc1f6f8ad,
- 0x29623c0c, 0xa9922b13, 0xf3f31473, 0x7303036c, 0x47316ab3,
- 0xc7c17dac, 0x9da042cc, 0x1d5055d3, 0xa700e96a, 0x27f0fe75,
- 0x7d91c115, 0xfd61d60a, 0xc953bfd5, 0x49a3a8ca, 0x13c297aa,
- 0x933280b5, 0x7ba64414, 0xfb56530b, 0xa1376c6b, 0x21c77b74,
- 0x15f512ab, 0x950505b4, 0xcf643ad4, 0x4f942dcb, 0xc53cb5d7,
- 0x45cca2c8, 0x1fad9da8, 0x9f5d8ab7, 0xab6fe368, 0x2b9ff477,
- 0x71fecb17, 0xf10edc08, 0x199a18a9, 0x996a0fb6, 0xc30b30d6,
- 0x43fb27c9, 0x77c94e16, 0xf7395909, 0xad586669, 0x2da87176,
- 0x63785010, 0xe388470f, 0xb9e9786f, 0x39196f70, 0x0d2b06af,
- 0x8ddb11b0, 0xd7ba2ed0, 0x574a39cf, 0xbfdefd6e, 0x3f2eea71,
- 0x654fd511, 0xe5bfc20e, 0xd18dabd1, 0x517dbcce, 0x0b1c83ae,
- 0x8bec94b1, 0x01440cad, 0x81b41bb2, 0xdbd524d2, 0x5b2533cd,
- 0x6f175a12, 0xefe74d0d, 0xb586726d, 0x35766572, 0xdde2a1d3,
- 0x5d12b6cc, 0x077389ac, 0x87839eb3, 0xb3b1f76c, 0x3341e073,
- 0x6920df13, 0xe9d0c80c, 0xf4809ddf, 0x74708ac0, 0x2e11b5a0,
- 0xaee1a2bf, 0x9ad3cb60, 0x1a23dc7f, 0x4042e31f, 0xc0b2f400,
- 0x282630a1, 0xa8d627be, 0xf2b718de, 0x72470fc1, 0x4675661e,
- 0xc6857101, 0x9ce44e61, 0x1c14597e, 0x96bcc162, 0x164cd67d,
- 0x4c2de91d, 0xccddfe02, 0xf8ef97dd, 0x781f80c2, 0x227ebfa2,
- 0xa28ea8bd, 0x4a1a6c1c, 0xcaea7b03, 0x908b4463, 0x107b537c,
- 0x24493aa3, 0xa4b92dbc, 0xfed812dc, 0x7e2805c3, 0x30f824a5,
- 0xb00833ba, 0xea690cda, 0x6a991bc5, 0x5eab721a, 0xde5b6505,
- 0x843a5a65, 0x04ca4d7a, 0xec5e89db, 0x6cae9ec4, 0x36cfa1a4,
- 0xb63fb6bb, 0x820ddf64, 0x02fdc87b, 0x589cf71b, 0xd86ce004,
- 0x52c47818, 0xd2346f07, 0x88555067, 0x08a54778, 0x3c972ea7,
- 0xbc6739b8, 0xe60606d8, 0x66f611c7, 0x8e62d566, 0x0e92c279,
- 0x54f3fd19, 0xd403ea06, 0xe03183d9, 0x60c194c6, 0x3aa0aba6,
- 0xba50bcb9},
- {0x00000000, 0x9570d495, 0xf190af6b, 0x64e07bfe, 0x38505897,
- 0xad208c02, 0xc9c0f7fc, 0x5cb02369, 0x70a0b12e, 0xe5d065bb,
- 0x81301e45, 0x1440cad0, 0x48f0e9b9, 0xdd803d2c, 0xb96046d2,
- 0x2c109247, 0xe141625c, 0x7431b6c9, 0x10d1cd37, 0x85a119a2,
- 0xd9113acb, 0x4c61ee5e, 0x288195a0, 0xbdf14135, 0x91e1d372,
- 0x049107e7, 0x60717c19, 0xf501a88c, 0xa9b18be5, 0x3cc15f70,
- 0x5821248e, 0xcd51f01b, 0x19f3c2f9, 0x8c83166c, 0xe8636d92,
- 0x7d13b907, 0x21a39a6e, 0xb4d34efb, 0xd0333505, 0x4543e190,
- 0x695373d7, 0xfc23a742, 0x98c3dcbc, 0x0db30829, 0x51032b40,
- 0xc473ffd5, 0xa093842b, 0x35e350be, 0xf8b2a0a5, 0x6dc27430,
- 0x09220fce, 0x9c52db5b, 0xc0e2f832, 0x55922ca7, 0x31725759,
- 0xa40283cc, 0x8812118b, 0x1d62c51e, 0x7982bee0, 0xecf26a75,
- 0xb042491c, 0x25329d89, 0x41d2e677, 0xd4a232e2, 0x33e785f2,
- 0xa6975167, 0xc2772a99, 0x5707fe0c, 0x0bb7dd65, 0x9ec709f0,
- 0xfa27720e, 0x6f57a69b, 0x434734dc, 0xd637e049, 0xb2d79bb7,
- 0x27a74f22, 0x7b176c4b, 0xee67b8de, 0x8a87c320, 0x1ff717b5,
- 0xd2a6e7ae, 0x47d6333b, 0x233648c5, 0xb6469c50, 0xeaf6bf39,
- 0x7f866bac, 0x1b661052, 0x8e16c4c7, 0xa2065680, 0x37768215,
- 0x5396f9eb, 0xc6e62d7e, 0x9a560e17, 0x0f26da82, 0x6bc6a17c,
- 0xfeb675e9, 0x2a14470b, 0xbf64939e, 0xdb84e860, 0x4ef43cf5,
- 0x12441f9c, 0x8734cb09, 0xe3d4b0f7, 0x76a46462, 0x5ab4f625,
- 0xcfc422b0, 0xab24594e, 0x3e548ddb, 0x62e4aeb2, 0xf7947a27,
- 0x937401d9, 0x0604d54c, 0xcb552557, 0x5e25f1c2, 0x3ac58a3c,
- 0xafb55ea9, 0xf3057dc0, 0x6675a955, 0x0295d2ab, 0x97e5063e,
- 0xbbf59479, 0x2e8540ec, 0x4a653b12, 0xdf15ef87, 0x83a5ccee,
- 0x16d5187b, 0x72356385, 0xe745b710, 0x67cf0be4, 0xf2bfdf71,
- 0x965fa48f, 0x032f701a, 0x5f9f5373, 0xcaef87e6, 0xae0ffc18,
- 0x3b7f288d, 0x176fbaca, 0x821f6e5f, 0xe6ff15a1, 0x738fc134,
- 0x2f3fe25d, 0xba4f36c8, 0xdeaf4d36, 0x4bdf99a3, 0x868e69b8,
- 0x13febd2d, 0x771ec6d3, 0xe26e1246, 0xbede312f, 0x2baee5ba,
- 0x4f4e9e44, 0xda3e4ad1, 0xf62ed896, 0x635e0c03, 0x07be77fd,
- 0x92cea368, 0xce7e8001, 0x5b0e5494, 0x3fee2f6a, 0xaa9efbff,
- 0x7e3cc91d, 0xeb4c1d88, 0x8fac6676, 0x1adcb2e3, 0x466c918a,
- 0xd31c451f, 0xb7fc3ee1, 0x228cea74, 0x0e9c7833, 0x9becaca6,
- 0xff0cd758, 0x6a7c03cd, 0x36cc20a4, 0xa3bcf431, 0xc75c8fcf,
- 0x522c5b5a, 0x9f7dab41, 0x0a0d7fd4, 0x6eed042a, 0xfb9dd0bf,
- 0xa72df3d6, 0x325d2743, 0x56bd5cbd, 0xc3cd8828, 0xefdd1a6f,
- 0x7aadcefa, 0x1e4db504, 0x8b3d6191, 0xd78d42f8, 0x42fd966d,
- 0x261ded93, 0xb36d3906, 0x54288e16, 0xc1585a83, 0xa5b8217d,
- 0x30c8f5e8, 0x6c78d681, 0xf9080214, 0x9de879ea, 0x0898ad7f,
- 0x24883f38, 0xb1f8ebad, 0xd5189053, 0x406844c6, 0x1cd867af,
- 0x89a8b33a, 0xed48c8c4, 0x78381c51, 0xb569ec4a, 0x201938df,
- 0x44f94321, 0xd18997b4, 0x8d39b4dd, 0x18496048, 0x7ca91bb6,
- 0xe9d9cf23, 0xc5c95d64, 0x50b989f1, 0x3459f20f, 0xa129269a,
- 0xfd9905f3, 0x68e9d166, 0x0c09aa98, 0x99797e0d, 0x4ddb4cef,
- 0xd8ab987a, 0xbc4be384, 0x293b3711, 0x758b1478, 0xe0fbc0ed,
- 0x841bbb13, 0x116b6f86, 0x3d7bfdc1, 0xa80b2954, 0xcceb52aa,
- 0x599b863f, 0x052ba556, 0x905b71c3, 0xf4bb0a3d, 0x61cbdea8,
- 0xac9a2eb3, 0x39eafa26, 0x5d0a81d8, 0xc87a554d, 0x94ca7624,
- 0x01baa2b1, 0x655ad94f, 0xf02a0dda, 0xdc3a9f9d, 0x494a4b08,
- 0x2daa30f6, 0xb8dae463, 0xe46ac70a, 0x711a139f, 0x15fa6861,
- 0x808abcf4},
- {0x00000000, 0xcf9e17c8, 0x444d29d1, 0x8bd33e19, 0x889a53a2,
- 0x4704446a, 0xccd77a73, 0x03496dbb, 0xca45a105, 0x05dbb6cd,
- 0x8e0888d4, 0x41969f1c, 0x42dff2a7, 0x8d41e56f, 0x0692db76,
- 0xc90cccbe, 0x4ffa444b, 0x80645383, 0x0bb76d9a, 0xc4297a52,
- 0xc76017e9, 0x08fe0021, 0x832d3e38, 0x4cb329f0, 0x85bfe54e,
- 0x4a21f286, 0xc1f2cc9f, 0x0e6cdb57, 0x0d25b6ec, 0xc2bba124,
- 0x49689f3d, 0x86f688f5, 0x9ff48896, 0x506a9f5e, 0xdbb9a147,
- 0x1427b68f, 0x176edb34, 0xd8f0ccfc, 0x5323f2e5, 0x9cbde52d,
- 0x55b12993, 0x9a2f3e5b, 0x11fc0042, 0xde62178a, 0xdd2b7a31,
- 0x12b56df9, 0x996653e0, 0x56f84428, 0xd00eccdd, 0x1f90db15,
- 0x9443e50c, 0x5bddf2c4, 0x58949f7f, 0x970a88b7, 0x1cd9b6ae,
- 0xd347a166, 0x1a4b6dd8, 0xd5d57a10, 0x5e064409, 0x919853c1,
- 0x92d13e7a, 0x5d4f29b2, 0xd69c17ab, 0x19020063, 0xe498176d,
- 0x2b0600a5, 0xa0d53ebc, 0x6f4b2974, 0x6c0244cf, 0xa39c5307,
- 0x284f6d1e, 0xe7d17ad6, 0x2eddb668, 0xe143a1a0, 0x6a909fb9,
- 0xa50e8871, 0xa647e5ca, 0x69d9f202, 0xe20acc1b, 0x2d94dbd3,
- 0xab625326, 0x64fc44ee, 0xef2f7af7, 0x20b16d3f, 0x23f80084,
- 0xec66174c, 0x67b52955, 0xa82b3e9d, 0x6127f223, 0xaeb9e5eb,
- 0x256adbf2, 0xeaf4cc3a, 0xe9bda181, 0x2623b649, 0xadf08850,
- 0x626e9f98, 0x7b6c9ffb, 0xb4f28833, 0x3f21b62a, 0xf0bfa1e2,
- 0xf3f6cc59, 0x3c68db91, 0xb7bbe588, 0x7825f240, 0xb1293efe,
- 0x7eb72936, 0xf564172f, 0x3afa00e7, 0x39b36d5c, 0xf62d7a94,
- 0x7dfe448d, 0xb2605345, 0x3496dbb0, 0xfb08cc78, 0x70dbf261,
- 0xbf45e5a9, 0xbc0c8812, 0x73929fda, 0xf841a1c3, 0x37dfb60b,
- 0xfed37ab5, 0x314d6d7d, 0xba9e5364, 0x750044ac, 0x76492917,
- 0xb9d73edf, 0x320400c6, 0xfd9a170e, 0x1241289b, 0xdddf3f53,
- 0x560c014a, 0x99921682, 0x9adb7b39, 0x55456cf1, 0xde9652e8,
- 0x11084520, 0xd804899e, 0x179a9e56, 0x9c49a04f, 0x53d7b787,
- 0x509eda3c, 0x9f00cdf4, 0x14d3f3ed, 0xdb4de425, 0x5dbb6cd0,
- 0x92257b18, 0x19f64501, 0xd66852c9, 0xd5213f72, 0x1abf28ba,
- 0x916c16a3, 0x5ef2016b, 0x97fecdd5, 0x5860da1d, 0xd3b3e404,
- 0x1c2df3cc, 0x1f649e77, 0xd0fa89bf, 0x5b29b7a6, 0x94b7a06e,
- 0x8db5a00d, 0x422bb7c5, 0xc9f889dc, 0x06669e14, 0x052ff3af,
- 0xcab1e467, 0x4162da7e, 0x8efccdb6, 0x47f00108, 0x886e16c0,
- 0x03bd28d9, 0xcc233f11, 0xcf6a52aa, 0x00f44562, 0x8b277b7b,
- 0x44b96cb3, 0xc24fe446, 0x0dd1f38e, 0x8602cd97, 0x499cda5f,
- 0x4ad5b7e4, 0x854ba02c, 0x0e989e35, 0xc10689fd, 0x080a4543,
- 0xc794528b, 0x4c476c92, 0x83d97b5a, 0x809016e1, 0x4f0e0129,
- 0xc4dd3f30, 0x0b4328f8, 0xf6d93ff6, 0x3947283e, 0xb2941627,
- 0x7d0a01ef, 0x7e436c54, 0xb1dd7b9c, 0x3a0e4585, 0xf590524d,
- 0x3c9c9ef3, 0xf302893b, 0x78d1b722, 0xb74fa0ea, 0xb406cd51,
- 0x7b98da99, 0xf04be480, 0x3fd5f348, 0xb9237bbd, 0x76bd6c75,
- 0xfd6e526c, 0x32f045a4, 0x31b9281f, 0xfe273fd7, 0x75f401ce,
- 0xba6a1606, 0x7366dab8, 0xbcf8cd70, 0x372bf369, 0xf8b5e4a1,
- 0xfbfc891a, 0x34629ed2, 0xbfb1a0cb, 0x702fb703, 0x692db760,
- 0xa6b3a0a8, 0x2d609eb1, 0xe2fe8979, 0xe1b7e4c2, 0x2e29f30a,
- 0xa5facd13, 0x6a64dadb, 0xa3681665, 0x6cf601ad, 0xe7253fb4,
- 0x28bb287c, 0x2bf245c7, 0xe46c520f, 0x6fbf6c16, 0xa0217bde,
- 0x26d7f32b, 0xe949e4e3, 0x629adafa, 0xad04cd32, 0xae4da089,
- 0x61d3b741, 0xea008958, 0x259e9e90, 0xec92522e, 0x230c45e6,
- 0xa8df7bff, 0x67416c37, 0x6408018c, 0xab961644, 0x2045285d,
- 0xefdb3f95},
- {0x00000000, 0x24825136, 0x4904a26c, 0x6d86f35a, 0x920944d8,
- 0xb68b15ee, 0xdb0de6b4, 0xff8fb782, 0xff638ff1, 0xdbe1dec7,
- 0xb6672d9d, 0x92e57cab, 0x6d6acb29, 0x49e89a1f, 0x246e6945,
- 0x00ec3873, 0x25b619a3, 0x01344895, 0x6cb2bbcf, 0x4830eaf9,
- 0xb7bf5d7b, 0x933d0c4d, 0xfebbff17, 0xda39ae21, 0xdad59652,
- 0xfe57c764, 0x93d1343e, 0xb7536508, 0x48dcd28a, 0x6c5e83bc,
- 0x01d870e6, 0x255a21d0, 0x4b6c3346, 0x6fee6270, 0x0268912a,
- 0x26eac01c, 0xd965779e, 0xfde726a8, 0x9061d5f2, 0xb4e384c4,
- 0xb40fbcb7, 0x908ded81, 0xfd0b1edb, 0xd9894fed, 0x2606f86f,
- 0x0284a959, 0x6f025a03, 0x4b800b35, 0x6eda2ae5, 0x4a587bd3,
- 0x27de8889, 0x035cd9bf, 0xfcd36e3d, 0xd8513f0b, 0xb5d7cc51,
- 0x91559d67, 0x91b9a514, 0xb53bf422, 0xd8bd0778, 0xfc3f564e,
- 0x03b0e1cc, 0x2732b0fa, 0x4ab443a0, 0x6e361296, 0x96d8668c,
- 0xb25a37ba, 0xdfdcc4e0, 0xfb5e95d6, 0x04d12254, 0x20537362,
- 0x4dd58038, 0x6957d10e, 0x69bbe97d, 0x4d39b84b, 0x20bf4b11,
- 0x043d1a27, 0xfbb2ada5, 0xdf30fc93, 0xb2b60fc9, 0x96345eff,
- 0xb36e7f2f, 0x97ec2e19, 0xfa6add43, 0xdee88c75, 0x21673bf7,
- 0x05e56ac1, 0x6863999b, 0x4ce1c8ad, 0x4c0df0de, 0x688fa1e8,
- 0x050952b2, 0x218b0384, 0xde04b406, 0xfa86e530, 0x9700166a,
- 0xb382475c, 0xddb455ca, 0xf93604fc, 0x94b0f7a6, 0xb032a690,
- 0x4fbd1112, 0x6b3f4024, 0x06b9b37e, 0x223be248, 0x22d7da3b,
- 0x06558b0d, 0x6bd37857, 0x4f512961, 0xb0de9ee3, 0x945ccfd5,
- 0xf9da3c8f, 0xdd586db9, 0xf8024c69, 0xdc801d5f, 0xb106ee05,
- 0x9584bf33, 0x6a0b08b1, 0x4e895987, 0x230faadd, 0x078dfbeb,
- 0x0761c398, 0x23e392ae, 0x4e6561f4, 0x6ae730c2, 0x95688740,
- 0xb1ead676, 0xdc6c252c, 0xf8ee741a, 0xf6c1cb59, 0xd2439a6f,
- 0xbfc56935, 0x9b473803, 0x64c88f81, 0x404adeb7, 0x2dcc2ded,
- 0x094e7cdb, 0x09a244a8, 0x2d20159e, 0x40a6e6c4, 0x6424b7f2,
- 0x9bab0070, 0xbf295146, 0xd2afa21c, 0xf62df32a, 0xd377d2fa,
- 0xf7f583cc, 0x9a737096, 0xbef121a0, 0x417e9622, 0x65fcc714,
- 0x087a344e, 0x2cf86578, 0x2c145d0b, 0x08960c3d, 0x6510ff67,
- 0x4192ae51, 0xbe1d19d3, 0x9a9f48e5, 0xf719bbbf, 0xd39bea89,
- 0xbdadf81f, 0x992fa929, 0xf4a95a73, 0xd02b0b45, 0x2fa4bcc7,
- 0x0b26edf1, 0x66a01eab, 0x42224f9d, 0x42ce77ee, 0x664c26d8,
- 0x0bcad582, 0x2f4884b4, 0xd0c73336, 0xf4456200, 0x99c3915a,
- 0xbd41c06c, 0x981be1bc, 0xbc99b08a, 0xd11f43d0, 0xf59d12e6,
- 0x0a12a564, 0x2e90f452, 0x43160708, 0x6794563e, 0x67786e4d,
- 0x43fa3f7b, 0x2e7ccc21, 0x0afe9d17, 0xf5712a95, 0xd1f37ba3,
- 0xbc7588f9, 0x98f7d9cf, 0x6019add5, 0x449bfce3, 0x291d0fb9,
- 0x0d9f5e8f, 0xf210e90d, 0xd692b83b, 0xbb144b61, 0x9f961a57,
- 0x9f7a2224, 0xbbf87312, 0xd67e8048, 0xf2fcd17e, 0x0d7366fc,
- 0x29f137ca, 0x4477c490, 0x60f595a6, 0x45afb476, 0x612de540,
- 0x0cab161a, 0x2829472c, 0xd7a6f0ae, 0xf324a198, 0x9ea252c2,
- 0xba2003f4, 0xbacc3b87, 0x9e4e6ab1, 0xf3c899eb, 0xd74ac8dd,
- 0x28c57f5f, 0x0c472e69, 0x61c1dd33, 0x45438c05, 0x2b759e93,
- 0x0ff7cfa5, 0x62713cff, 0x46f36dc9, 0xb97cda4b, 0x9dfe8b7d,
- 0xf0787827, 0xd4fa2911, 0xd4161162, 0xf0944054, 0x9d12b30e,
- 0xb990e238, 0x461f55ba, 0x629d048c, 0x0f1bf7d6, 0x2b99a6e0,
- 0x0ec38730, 0x2a41d606, 0x47c7255c, 0x6345746a, 0x9ccac3e8,
- 0xb84892de, 0xd5ce6184, 0xf14c30b2, 0xf1a008c1, 0xd52259f7,
- 0xb8a4aaad, 0x9c26fb9b, 0x63a94c19, 0x472b1d2f, 0x2aadee75,
- 0x0e2fbf43},
- {0x00000000, 0x36f290f3, 0x6de521e6, 0x5b17b115, 0xdbca43cc,
- 0xed38d33f, 0xb62f622a, 0x80ddf2d9, 0x6ce581d9, 0x5a17112a,
- 0x0100a03f, 0x37f230cc, 0xb72fc215, 0x81dd52e6, 0xdacae3f3,
- 0xec387300, 0xd9cb03b2, 0xef399341, 0xb42e2254, 0x82dcb2a7,
- 0x0201407e, 0x34f3d08d, 0x6fe46198, 0x5916f16b, 0xb52e826b,
- 0x83dc1298, 0xd8cba38d, 0xee39337e, 0x6ee4c1a7, 0x58165154,
- 0x0301e041, 0x35f370b2, 0x68e70125, 0x5e1591d6, 0x050220c3,
- 0x33f0b030, 0xb32d42e9, 0x85dfd21a, 0xdec8630f, 0xe83af3fc,
- 0x040280fc, 0x32f0100f, 0x69e7a11a, 0x5f1531e9, 0xdfc8c330,
- 0xe93a53c3, 0xb22de2d6, 0x84df7225, 0xb12c0297, 0x87de9264,
- 0xdcc92371, 0xea3bb382, 0x6ae6415b, 0x5c14d1a8, 0x070360bd,
- 0x31f1f04e, 0xddc9834e, 0xeb3b13bd, 0xb02ca2a8, 0x86de325b,
- 0x0603c082, 0x30f15071, 0x6be6e164, 0x5d147197, 0xd1ce024a,
- 0xe73c92b9, 0xbc2b23ac, 0x8ad9b35f, 0x0a044186, 0x3cf6d175,
- 0x67e16060, 0x5113f093, 0xbd2b8393, 0x8bd91360, 0xd0cea275,
- 0xe63c3286, 0x66e1c05f, 0x501350ac, 0x0b04e1b9, 0x3df6714a,
- 0x080501f8, 0x3ef7910b, 0x65e0201e, 0x5312b0ed, 0xd3cf4234,
- 0xe53dd2c7, 0xbe2a63d2, 0x88d8f321, 0x64e08021, 0x521210d2,
- 0x0905a1c7, 0x3ff73134, 0xbf2ac3ed, 0x89d8531e, 0xd2cfe20b,
- 0xe43d72f8, 0xb929036f, 0x8fdb939c, 0xd4cc2289, 0xe23eb27a,
- 0x62e340a3, 0x5411d050, 0x0f066145, 0x39f4f1b6, 0xd5cc82b6,
- 0xe33e1245, 0xb829a350, 0x8edb33a3, 0x0e06c17a, 0x38f45189,
- 0x63e3e09c, 0x5511706f, 0x60e200dd, 0x5610902e, 0x0d07213b,
- 0x3bf5b1c8, 0xbb284311, 0x8ddad3e2, 0xd6cd62f7, 0xe03ff204,
- 0x0c078104, 0x3af511f7, 0x61e2a0e2, 0x57103011, 0xd7cdc2c8,
- 0xe13f523b, 0xba28e32e, 0x8cda73dd, 0x78ed02d5, 0x4e1f9226,
- 0x15082333, 0x23fab3c0, 0xa3274119, 0x95d5d1ea, 0xcec260ff,
- 0xf830f00c, 0x1408830c, 0x22fa13ff, 0x79eda2ea, 0x4f1f3219,
- 0xcfc2c0c0, 0xf9305033, 0xa227e126, 0x94d571d5, 0xa1260167,
- 0x97d49194, 0xccc32081, 0xfa31b072, 0x7aec42ab, 0x4c1ed258,
- 0x1709634d, 0x21fbf3be, 0xcdc380be, 0xfb31104d, 0xa026a158,
- 0x96d431ab, 0x1609c372, 0x20fb5381, 0x7bece294, 0x4d1e7267,
- 0x100a03f0, 0x26f89303, 0x7def2216, 0x4b1db2e5, 0xcbc0403c,
- 0xfd32d0cf, 0xa62561da, 0x90d7f129, 0x7cef8229, 0x4a1d12da,
- 0x110aa3cf, 0x27f8333c, 0xa725c1e5, 0x91d75116, 0xcac0e003,
- 0xfc3270f0, 0xc9c10042, 0xff3390b1, 0xa42421a4, 0x92d6b157,
- 0x120b438e, 0x24f9d37d, 0x7fee6268, 0x491cf29b, 0xa524819b,
- 0x93d61168, 0xc8c1a07d, 0xfe33308e, 0x7eeec257, 0x481c52a4,
- 0x130be3b1, 0x25f97342, 0xa923009f, 0x9fd1906c, 0xc4c62179,
- 0xf234b18a, 0x72e94353, 0x441bd3a0, 0x1f0c62b5, 0x29fef246,
- 0xc5c68146, 0xf33411b5, 0xa823a0a0, 0x9ed13053, 0x1e0cc28a,
- 0x28fe5279, 0x73e9e36c, 0x451b739f, 0x70e8032d, 0x461a93de,
- 0x1d0d22cb, 0x2bffb238, 0xab2240e1, 0x9dd0d012, 0xc6c76107,
- 0xf035f1f4, 0x1c0d82f4, 0x2aff1207, 0x71e8a312, 0x471a33e1,
- 0xc7c7c138, 0xf13551cb, 0xaa22e0de, 0x9cd0702d, 0xc1c401ba,
- 0xf7369149, 0xac21205c, 0x9ad3b0af, 0x1a0e4276, 0x2cfcd285,
- 0x77eb6390, 0x4119f363, 0xad218063, 0x9bd31090, 0xc0c4a185,
- 0xf6363176, 0x76ebc3af, 0x4019535c, 0x1b0ee249, 0x2dfc72ba,
- 0x180f0208, 0x2efd92fb, 0x75ea23ee, 0x4318b31d, 0xc3c541c4,
- 0xf537d137, 0xae206022, 0x98d2f0d1, 0x74ea83d1, 0x42181322,
- 0x190fa237, 0x2ffd32c4, 0xaf20c01d, 0x99d250ee, 0xc2c5e1fb,
- 0xf4377108}};
-
-local const z_word_t FAR crc_braid_big_table[][256] = {
- {0x0000000000000000, 0xf390f23600000000, 0xe621e56d00000000,
- 0x15b1175b00000000, 0xcc43cadb00000000, 0x3fd338ed00000000,
- 0x2a622fb600000000, 0xd9f2dd8000000000, 0xd981e56c00000000,
- 0x2a11175a00000000, 0x3fa0000100000000, 0xcc30f23700000000,
- 0x15c22fb700000000, 0xe652dd8100000000, 0xf3e3cada00000000,
- 0x007338ec00000000, 0xb203cbd900000000, 0x419339ef00000000,
- 0x54222eb400000000, 0xa7b2dc8200000000, 0x7e40010200000000,
- 0x8dd0f33400000000, 0x9861e46f00000000, 0x6bf1165900000000,
- 0x6b822eb500000000, 0x9812dc8300000000, 0x8da3cbd800000000,
- 0x7e3339ee00000000, 0xa7c1e46e00000000, 0x5451165800000000,
- 0x41e0010300000000, 0xb270f33500000000, 0x2501e76800000000,
- 0xd691155e00000000, 0xc320020500000000, 0x30b0f03300000000,
- 0xe9422db300000000, 0x1ad2df8500000000, 0x0f63c8de00000000,
- 0xfcf33ae800000000, 0xfc80020400000000, 0x0f10f03200000000,
- 0x1aa1e76900000000, 0xe931155f00000000, 0x30c3c8df00000000,
- 0xc3533ae900000000, 0xd6e22db200000000, 0x2572df8400000000,
- 0x97022cb100000000, 0x6492de8700000000, 0x7123c9dc00000000,
- 0x82b33bea00000000, 0x5b41e66a00000000, 0xa8d1145c00000000,
- 0xbd60030700000000, 0x4ef0f13100000000, 0x4e83c9dd00000000,
- 0xbd133beb00000000, 0xa8a22cb000000000, 0x5b32de8600000000,
- 0x82c0030600000000, 0x7150f13000000000, 0x64e1e66b00000000,
- 0x9771145d00000000, 0x4a02ced100000000, 0xb9923ce700000000,
- 0xac232bbc00000000, 0x5fb3d98a00000000, 0x8641040a00000000,
- 0x75d1f63c00000000, 0x6060e16700000000, 0x93f0135100000000,
- 0x93832bbd00000000, 0x6013d98b00000000, 0x75a2ced000000000,
- 0x86323ce600000000, 0x5fc0e16600000000, 0xac50135000000000,
- 0xb9e1040b00000000, 0x4a71f63d00000000, 0xf801050800000000,
- 0x0b91f73e00000000, 0x1e20e06500000000, 0xedb0125300000000,
- 0x3442cfd300000000, 0xc7d23de500000000, 0xd2632abe00000000,
- 0x21f3d88800000000, 0x2180e06400000000, 0xd210125200000000,
- 0xc7a1050900000000, 0x3431f73f00000000, 0xedc32abf00000000,
- 0x1e53d88900000000, 0x0be2cfd200000000, 0xf8723de400000000,
- 0x6f0329b900000000, 0x9c93db8f00000000, 0x8922ccd400000000,
- 0x7ab23ee200000000, 0xa340e36200000000, 0x50d0115400000000,
- 0x4561060f00000000, 0xb6f1f43900000000, 0xb682ccd500000000,
- 0x45123ee300000000, 0x50a329b800000000, 0xa333db8e00000000,
- 0x7ac1060e00000000, 0x8951f43800000000, 0x9ce0e36300000000,
- 0x6f70115500000000, 0xdd00e26000000000, 0x2e90105600000000,
- 0x3b21070d00000000, 0xc8b1f53b00000000, 0x114328bb00000000,
- 0xe2d3da8d00000000, 0xf762cdd600000000, 0x04f23fe000000000,
- 0x0481070c00000000, 0xf711f53a00000000, 0xe2a0e26100000000,
- 0x1130105700000000, 0xc8c2cdd700000000, 0x3b523fe100000000,
- 0x2ee328ba00000000, 0xdd73da8c00000000, 0xd502ed7800000000,
- 0x26921f4e00000000, 0x3323081500000000, 0xc0b3fa2300000000,
- 0x194127a300000000, 0xead1d59500000000, 0xff60c2ce00000000,
- 0x0cf030f800000000, 0x0c83081400000000, 0xff13fa2200000000,
- 0xeaa2ed7900000000, 0x19321f4f00000000, 0xc0c0c2cf00000000,
- 0x335030f900000000, 0x26e127a200000000, 0xd571d59400000000,
- 0x670126a100000000, 0x9491d49700000000, 0x8120c3cc00000000,
- 0x72b031fa00000000, 0xab42ec7a00000000, 0x58d21e4c00000000,
- 0x4d63091700000000, 0xbef3fb2100000000, 0xbe80c3cd00000000,
- 0x4d1031fb00000000, 0x58a126a000000000, 0xab31d49600000000,
- 0x72c3091600000000, 0x8153fb2000000000, 0x94e2ec7b00000000,
- 0x67721e4d00000000, 0xf0030a1000000000, 0x0393f82600000000,
- 0x1622ef7d00000000, 0xe5b21d4b00000000, 0x3c40c0cb00000000,
- 0xcfd032fd00000000, 0xda6125a600000000, 0x29f1d79000000000,
- 0x2982ef7c00000000, 0xda121d4a00000000, 0xcfa30a1100000000,
- 0x3c33f82700000000, 0xe5c125a700000000, 0x1651d79100000000,
- 0x03e0c0ca00000000, 0xf07032fc00000000, 0x4200c1c900000000,
- 0xb19033ff00000000, 0xa42124a400000000, 0x57b1d69200000000,
- 0x8e430b1200000000, 0x7dd3f92400000000, 0x6862ee7f00000000,
- 0x9bf21c4900000000, 0x9b8124a500000000, 0x6811d69300000000,
- 0x7da0c1c800000000, 0x8e3033fe00000000, 0x57c2ee7e00000000,
- 0xa4521c4800000000, 0xb1e30b1300000000, 0x4273f92500000000,
- 0x9f0023a900000000, 0x6c90d19f00000000, 0x7921c6c400000000,
- 0x8ab134f200000000, 0x5343e97200000000, 0xa0d31b4400000000,
- 0xb5620c1f00000000, 0x46f2fe2900000000, 0x4681c6c500000000,
- 0xb51134f300000000, 0xa0a023a800000000, 0x5330d19e00000000,
- 0x8ac20c1e00000000, 0x7952fe2800000000, 0x6ce3e97300000000,
- 0x9f731b4500000000, 0x2d03e87000000000, 0xde931a4600000000,
- 0xcb220d1d00000000, 0x38b2ff2b00000000, 0xe14022ab00000000,
- 0x12d0d09d00000000, 0x0761c7c600000000, 0xf4f135f000000000,
- 0xf4820d1c00000000, 0x0712ff2a00000000, 0x12a3e87100000000,
- 0xe1331a4700000000, 0x38c1c7c700000000, 0xcb5135f100000000,
- 0xdee022aa00000000, 0x2d70d09c00000000, 0xba01c4c100000000,
- 0x499136f700000000, 0x5c2021ac00000000, 0xafb0d39a00000000,
- 0x76420e1a00000000, 0x85d2fc2c00000000, 0x9063eb7700000000,
- 0x63f3194100000000, 0x638021ad00000000, 0x9010d39b00000000,
- 0x85a1c4c000000000, 0x763136f600000000, 0xafc3eb7600000000,
- 0x5c53194000000000, 0x49e20e1b00000000, 0xba72fc2d00000000,
- 0x08020f1800000000, 0xfb92fd2e00000000, 0xee23ea7500000000,
- 0x1db3184300000000, 0xc441c5c300000000, 0x37d137f500000000,
- 0x226020ae00000000, 0xd1f0d29800000000, 0xd183ea7400000000,
- 0x2213184200000000, 0x37a20f1900000000, 0xc432fd2f00000000,
- 0x1dc020af00000000, 0xee50d29900000000, 0xfbe1c5c200000000,
- 0x087137f400000000},
- {0x0000000000000000, 0x3651822400000000, 0x6ca2044900000000,
- 0x5af3866d00000000, 0xd844099200000000, 0xee158bb600000000,
- 0xb4e60ddb00000000, 0x82b78fff00000000, 0xf18f63ff00000000,
- 0xc7dee1db00000000, 0x9d2d67b600000000, 0xab7ce59200000000,
- 0x29cb6a6d00000000, 0x1f9ae84900000000, 0x45696e2400000000,
- 0x7338ec0000000000, 0xa319b62500000000, 0x9548340100000000,
- 0xcfbbb26c00000000, 0xf9ea304800000000, 0x7b5dbfb700000000,
- 0x4d0c3d9300000000, 0x17ffbbfe00000000, 0x21ae39da00000000,
- 0x5296d5da00000000, 0x64c757fe00000000, 0x3e34d19300000000,
- 0x086553b700000000, 0x8ad2dc4800000000, 0xbc835e6c00000000,
- 0xe670d80100000000, 0xd0215a2500000000, 0x46336c4b00000000,
- 0x7062ee6f00000000, 0x2a91680200000000, 0x1cc0ea2600000000,
- 0x9e7765d900000000, 0xa826e7fd00000000, 0xf2d5619000000000,
- 0xc484e3b400000000, 0xb7bc0fb400000000, 0x81ed8d9000000000,
- 0xdb1e0bfd00000000, 0xed4f89d900000000, 0x6ff8062600000000,
- 0x59a9840200000000, 0x035a026f00000000, 0x350b804b00000000,
- 0xe52ada6e00000000, 0xd37b584a00000000, 0x8988de2700000000,
- 0xbfd95c0300000000, 0x3d6ed3fc00000000, 0x0b3f51d800000000,
- 0x51ccd7b500000000, 0x679d559100000000, 0x14a5b99100000000,
- 0x22f43bb500000000, 0x7807bdd800000000, 0x4e563ffc00000000,
- 0xcce1b00300000000, 0xfab0322700000000, 0xa043b44a00000000,
- 0x9612366e00000000, 0x8c66d89600000000, 0xba375ab200000000,
- 0xe0c4dcdf00000000, 0xd6955efb00000000, 0x5422d10400000000,
- 0x6273532000000000, 0x3880d54d00000000, 0x0ed1576900000000,
- 0x7de9bb6900000000, 0x4bb8394d00000000, 0x114bbf2000000000,
- 0x271a3d0400000000, 0xa5adb2fb00000000, 0x93fc30df00000000,
- 0xc90fb6b200000000, 0xff5e349600000000, 0x2f7f6eb300000000,
- 0x192eec9700000000, 0x43dd6afa00000000, 0x758ce8de00000000,
- 0xf73b672100000000, 0xc16ae50500000000, 0x9b99636800000000,
- 0xadc8e14c00000000, 0xdef00d4c00000000, 0xe8a18f6800000000,
- 0xb252090500000000, 0x84038b2100000000, 0x06b404de00000000,
- 0x30e586fa00000000, 0x6a16009700000000, 0x5c4782b300000000,
- 0xca55b4dd00000000, 0xfc0436f900000000, 0xa6f7b09400000000,
- 0x90a632b000000000, 0x1211bd4f00000000, 0x24403f6b00000000,
- 0x7eb3b90600000000, 0x48e23b2200000000, 0x3bdad72200000000,
- 0x0d8b550600000000, 0x5778d36b00000000, 0x6129514f00000000,
- 0xe39edeb000000000, 0xd5cf5c9400000000, 0x8f3cdaf900000000,
- 0xb96d58dd00000000, 0x694c02f800000000, 0x5f1d80dc00000000,
- 0x05ee06b100000000, 0x33bf849500000000, 0xb1080b6a00000000,
- 0x8759894e00000000, 0xddaa0f2300000000, 0xebfb8d0700000000,
- 0x98c3610700000000, 0xae92e32300000000, 0xf461654e00000000,
- 0xc230e76a00000000, 0x4087689500000000, 0x76d6eab100000000,
- 0x2c256cdc00000000, 0x1a74eef800000000, 0x59cbc1f600000000,
- 0x6f9a43d200000000, 0x3569c5bf00000000, 0x0338479b00000000,
- 0x818fc86400000000, 0xb7de4a4000000000, 0xed2dcc2d00000000,
- 0xdb7c4e0900000000, 0xa844a20900000000, 0x9e15202d00000000,
- 0xc4e6a64000000000, 0xf2b7246400000000, 0x7000ab9b00000000,
- 0x465129bf00000000, 0x1ca2afd200000000, 0x2af32df600000000,
- 0xfad277d300000000, 0xcc83f5f700000000, 0x9670739a00000000,
- 0xa021f1be00000000, 0x22967e4100000000, 0x14c7fc6500000000,
- 0x4e347a0800000000, 0x7865f82c00000000, 0x0b5d142c00000000,
- 0x3d0c960800000000, 0x67ff106500000000, 0x51ae924100000000,
- 0xd3191dbe00000000, 0xe5489f9a00000000, 0xbfbb19f700000000,
- 0x89ea9bd300000000, 0x1ff8adbd00000000, 0x29a92f9900000000,
- 0x735aa9f400000000, 0x450b2bd000000000, 0xc7bca42f00000000,
- 0xf1ed260b00000000, 0xab1ea06600000000, 0x9d4f224200000000,
- 0xee77ce4200000000, 0xd8264c6600000000, 0x82d5ca0b00000000,
- 0xb484482f00000000, 0x3633c7d000000000, 0x006245f400000000,
- 0x5a91c39900000000, 0x6cc041bd00000000, 0xbce11b9800000000,
- 0x8ab099bc00000000, 0xd0431fd100000000, 0xe6129df500000000,
- 0x64a5120a00000000, 0x52f4902e00000000, 0x0807164300000000,
- 0x3e56946700000000, 0x4d6e786700000000, 0x7b3ffa4300000000,
- 0x21cc7c2e00000000, 0x179dfe0a00000000, 0x952a71f500000000,
- 0xa37bf3d100000000, 0xf98875bc00000000, 0xcfd9f79800000000,
- 0xd5ad196000000000, 0xe3fc9b4400000000, 0xb90f1d2900000000,
- 0x8f5e9f0d00000000, 0x0de910f200000000, 0x3bb892d600000000,
- 0x614b14bb00000000, 0x571a969f00000000, 0x24227a9f00000000,
- 0x1273f8bb00000000, 0x48807ed600000000, 0x7ed1fcf200000000,
- 0xfc66730d00000000, 0xca37f12900000000, 0x90c4774400000000,
- 0xa695f56000000000, 0x76b4af4500000000, 0x40e52d6100000000,
- 0x1a16ab0c00000000, 0x2c47292800000000, 0xaef0a6d700000000,
- 0x98a124f300000000, 0xc252a29e00000000, 0xf40320ba00000000,
- 0x873bccba00000000, 0xb16a4e9e00000000, 0xeb99c8f300000000,
- 0xddc84ad700000000, 0x5f7fc52800000000, 0x692e470c00000000,
- 0x33ddc16100000000, 0x058c434500000000, 0x939e752b00000000,
- 0xa5cff70f00000000, 0xff3c716200000000, 0xc96df34600000000,
- 0x4bda7cb900000000, 0x7d8bfe9d00000000, 0x277878f000000000,
- 0x1129fad400000000, 0x621116d400000000, 0x544094f000000000,
- 0x0eb3129d00000000, 0x38e290b900000000, 0xba551f4600000000,
- 0x8c049d6200000000, 0xd6f71b0f00000000, 0xe0a6992b00000000,
- 0x3087c30e00000000, 0x06d6412a00000000, 0x5c25c74700000000,
- 0x6a74456300000000, 0xe8c3ca9c00000000, 0xde9248b800000000,
- 0x8461ced500000000, 0xb2304cf100000000, 0xc108a0f100000000,
- 0xf75922d500000000, 0xadaaa4b800000000, 0x9bfb269c00000000,
- 0x194ca96300000000, 0x2f1d2b4700000000, 0x75eead2a00000000,
- 0x43bf2f0e00000000},
- {0x0000000000000000, 0xc8179ecf00000000, 0xd1294d4400000000,
- 0x193ed38b00000000, 0xa2539a8800000000, 0x6a44044700000000,
- 0x737ad7cc00000000, 0xbb6d490300000000, 0x05a145ca00000000,
- 0xcdb6db0500000000, 0xd488088e00000000, 0x1c9f964100000000,
- 0xa7f2df4200000000, 0x6fe5418d00000000, 0x76db920600000000,
- 0xbecc0cc900000000, 0x4b44fa4f00000000, 0x8353648000000000,
- 0x9a6db70b00000000, 0x527a29c400000000, 0xe91760c700000000,
- 0x2100fe0800000000, 0x383e2d8300000000, 0xf029b34c00000000,
- 0x4ee5bf8500000000, 0x86f2214a00000000, 0x9fccf2c100000000,
- 0x57db6c0e00000000, 0xecb6250d00000000, 0x24a1bbc200000000,
- 0x3d9f684900000000, 0xf588f68600000000, 0x9688f49f00000000,
- 0x5e9f6a5000000000, 0x47a1b9db00000000, 0x8fb6271400000000,
- 0x34db6e1700000000, 0xfcccf0d800000000, 0xe5f2235300000000,
- 0x2de5bd9c00000000, 0x9329b15500000000, 0x5b3e2f9a00000000,
- 0x4200fc1100000000, 0x8a1762de00000000, 0x317a2bdd00000000,
- 0xf96db51200000000, 0xe053669900000000, 0x2844f85600000000,
- 0xddcc0ed000000000, 0x15db901f00000000, 0x0ce5439400000000,
- 0xc4f2dd5b00000000, 0x7f9f945800000000, 0xb7880a9700000000,
- 0xaeb6d91c00000000, 0x66a147d300000000, 0xd86d4b1a00000000,
- 0x107ad5d500000000, 0x0944065e00000000, 0xc153989100000000,
- 0x7a3ed19200000000, 0xb2294f5d00000000, 0xab179cd600000000,
- 0x6300021900000000, 0x6d1798e400000000, 0xa500062b00000000,
- 0xbc3ed5a000000000, 0x74294b6f00000000, 0xcf44026c00000000,
- 0x07539ca300000000, 0x1e6d4f2800000000, 0xd67ad1e700000000,
- 0x68b6dd2e00000000, 0xa0a143e100000000, 0xb99f906a00000000,
- 0x71880ea500000000, 0xcae547a600000000, 0x02f2d96900000000,
- 0x1bcc0ae200000000, 0xd3db942d00000000, 0x265362ab00000000,
- 0xee44fc6400000000, 0xf77a2fef00000000, 0x3f6db12000000000,
- 0x8400f82300000000, 0x4c1766ec00000000, 0x5529b56700000000,
- 0x9d3e2ba800000000, 0x23f2276100000000, 0xebe5b9ae00000000,
- 0xf2db6a2500000000, 0x3accf4ea00000000, 0x81a1bde900000000,
- 0x49b6232600000000, 0x5088f0ad00000000, 0x989f6e6200000000,
- 0xfb9f6c7b00000000, 0x3388f2b400000000, 0x2ab6213f00000000,
- 0xe2a1bff000000000, 0x59ccf6f300000000, 0x91db683c00000000,
- 0x88e5bbb700000000, 0x40f2257800000000, 0xfe3e29b100000000,
- 0x3629b77e00000000, 0x2f1764f500000000, 0xe700fa3a00000000,
- 0x5c6db33900000000, 0x947a2df600000000, 0x8d44fe7d00000000,
- 0x455360b200000000, 0xb0db963400000000, 0x78cc08fb00000000,
- 0x61f2db7000000000, 0xa9e545bf00000000, 0x12880cbc00000000,
- 0xda9f927300000000, 0xc3a141f800000000, 0x0bb6df3700000000,
- 0xb57ad3fe00000000, 0x7d6d4d3100000000, 0x64539eba00000000,
- 0xac44007500000000, 0x1729497600000000, 0xdf3ed7b900000000,
- 0xc600043200000000, 0x0e179afd00000000, 0x9b28411200000000,
- 0x533fdfdd00000000, 0x4a010c5600000000, 0x8216929900000000,
- 0x397bdb9a00000000, 0xf16c455500000000, 0xe85296de00000000,
- 0x2045081100000000, 0x9e8904d800000000, 0x569e9a1700000000,
- 0x4fa0499c00000000, 0x87b7d75300000000, 0x3cda9e5000000000,
- 0xf4cd009f00000000, 0xedf3d31400000000, 0x25e44ddb00000000,
- 0xd06cbb5d00000000, 0x187b259200000000, 0x0145f61900000000,
- 0xc95268d600000000, 0x723f21d500000000, 0xba28bf1a00000000,
- 0xa3166c9100000000, 0x6b01f25e00000000, 0xd5cdfe9700000000,
- 0x1dda605800000000, 0x04e4b3d300000000, 0xccf32d1c00000000,
- 0x779e641f00000000, 0xbf89fad000000000, 0xa6b7295b00000000,
- 0x6ea0b79400000000, 0x0da0b58d00000000, 0xc5b72b4200000000,
- 0xdc89f8c900000000, 0x149e660600000000, 0xaff32f0500000000,
- 0x67e4b1ca00000000, 0x7eda624100000000, 0xb6cdfc8e00000000,
- 0x0801f04700000000, 0xc0166e8800000000, 0xd928bd0300000000,
- 0x113f23cc00000000, 0xaa526acf00000000, 0x6245f40000000000,
- 0x7b7b278b00000000, 0xb36cb94400000000, 0x46e44fc200000000,
- 0x8ef3d10d00000000, 0x97cd028600000000, 0x5fda9c4900000000,
- 0xe4b7d54a00000000, 0x2ca04b8500000000, 0x359e980e00000000,
- 0xfd8906c100000000, 0x43450a0800000000, 0x8b5294c700000000,
- 0x926c474c00000000, 0x5a7bd98300000000, 0xe116908000000000,
- 0x29010e4f00000000, 0x303fddc400000000, 0xf828430b00000000,
- 0xf63fd9f600000000, 0x3e28473900000000, 0x271694b200000000,
- 0xef010a7d00000000, 0x546c437e00000000, 0x9c7bddb100000000,
- 0x85450e3a00000000, 0x4d5290f500000000, 0xf39e9c3c00000000,
- 0x3b8902f300000000, 0x22b7d17800000000, 0xeaa04fb700000000,
- 0x51cd06b400000000, 0x99da987b00000000, 0x80e44bf000000000,
- 0x48f3d53f00000000, 0xbd7b23b900000000, 0x756cbd7600000000,
- 0x6c526efd00000000, 0xa445f03200000000, 0x1f28b93100000000,
- 0xd73f27fe00000000, 0xce01f47500000000, 0x06166aba00000000,
- 0xb8da667300000000, 0x70cdf8bc00000000, 0x69f32b3700000000,
- 0xa1e4b5f800000000, 0x1a89fcfb00000000, 0xd29e623400000000,
- 0xcba0b1bf00000000, 0x03b72f7000000000, 0x60b72d6900000000,
- 0xa8a0b3a600000000, 0xb19e602d00000000, 0x7989fee200000000,
- 0xc2e4b7e100000000, 0x0af3292e00000000, 0x13cdfaa500000000,
- 0xdbda646a00000000, 0x651668a300000000, 0xad01f66c00000000,
- 0xb43f25e700000000, 0x7c28bb2800000000, 0xc745f22b00000000,
- 0x0f526ce400000000, 0x166cbf6f00000000, 0xde7b21a000000000,
- 0x2bf3d72600000000, 0xe3e449e900000000, 0xfada9a6200000000,
- 0x32cd04ad00000000, 0x89a04dae00000000, 0x41b7d36100000000,
- 0x588900ea00000000, 0x909e9e2500000000, 0x2e5292ec00000000,
- 0xe6450c2300000000, 0xff7bdfa800000000, 0x376c416700000000,
- 0x8c01086400000000, 0x441696ab00000000, 0x5d28452000000000,
- 0x953fdbef00000000},
- {0x0000000000000000, 0x95d4709500000000, 0x6baf90f100000000,
- 0xfe7be06400000000, 0x9758503800000000, 0x028c20ad00000000,
- 0xfcf7c0c900000000, 0x6923b05c00000000, 0x2eb1a07000000000,
- 0xbb65d0e500000000, 0x451e308100000000, 0xd0ca401400000000,
- 0xb9e9f04800000000, 0x2c3d80dd00000000, 0xd24660b900000000,
- 0x4792102c00000000, 0x5c6241e100000000, 0xc9b6317400000000,
- 0x37cdd11000000000, 0xa219a18500000000, 0xcb3a11d900000000,
- 0x5eee614c00000000, 0xa095812800000000, 0x3541f1bd00000000,
- 0x72d3e19100000000, 0xe707910400000000, 0x197c716000000000,
- 0x8ca801f500000000, 0xe58bb1a900000000, 0x705fc13c00000000,
- 0x8e24215800000000, 0x1bf051cd00000000, 0xf9c2f31900000000,
- 0x6c16838c00000000, 0x926d63e800000000, 0x07b9137d00000000,
- 0x6e9aa32100000000, 0xfb4ed3b400000000, 0x053533d000000000,
- 0x90e1434500000000, 0xd773536900000000, 0x42a723fc00000000,
- 0xbcdcc39800000000, 0x2908b30d00000000, 0x402b035100000000,
- 0xd5ff73c400000000, 0x2b8493a000000000, 0xbe50e33500000000,
- 0xa5a0b2f800000000, 0x3074c26d00000000, 0xce0f220900000000,
- 0x5bdb529c00000000, 0x32f8e2c000000000, 0xa72c925500000000,
- 0x5957723100000000, 0xcc8302a400000000, 0x8b11128800000000,
- 0x1ec5621d00000000, 0xe0be827900000000, 0x756af2ec00000000,
- 0x1c4942b000000000, 0x899d322500000000, 0x77e6d24100000000,
- 0xe232a2d400000000, 0xf285e73300000000, 0x675197a600000000,
- 0x992a77c200000000, 0x0cfe075700000000, 0x65ddb70b00000000,
- 0xf009c79e00000000, 0x0e7227fa00000000, 0x9ba6576f00000000,
- 0xdc34474300000000, 0x49e037d600000000, 0xb79bd7b200000000,
- 0x224fa72700000000, 0x4b6c177b00000000, 0xdeb867ee00000000,
- 0x20c3878a00000000, 0xb517f71f00000000, 0xaee7a6d200000000,
- 0x3b33d64700000000, 0xc548362300000000, 0x509c46b600000000,
- 0x39bff6ea00000000, 0xac6b867f00000000, 0x5210661b00000000,
- 0xc7c4168e00000000, 0x805606a200000000, 0x1582763700000000,
- 0xebf9965300000000, 0x7e2de6c600000000, 0x170e569a00000000,
- 0x82da260f00000000, 0x7ca1c66b00000000, 0xe975b6fe00000000,
- 0x0b47142a00000000, 0x9e9364bf00000000, 0x60e884db00000000,
- 0xf53cf44e00000000, 0x9c1f441200000000, 0x09cb348700000000,
- 0xf7b0d4e300000000, 0x6264a47600000000, 0x25f6b45a00000000,
- 0xb022c4cf00000000, 0x4e5924ab00000000, 0xdb8d543e00000000,
- 0xb2aee46200000000, 0x277a94f700000000, 0xd901749300000000,
- 0x4cd5040600000000, 0x572555cb00000000, 0xc2f1255e00000000,
- 0x3c8ac53a00000000, 0xa95eb5af00000000, 0xc07d05f300000000,
- 0x55a9756600000000, 0xabd2950200000000, 0x3e06e59700000000,
- 0x7994f5bb00000000, 0xec40852e00000000, 0x123b654a00000000,
- 0x87ef15df00000000, 0xeecca58300000000, 0x7b18d51600000000,
- 0x8563357200000000, 0x10b745e700000000, 0xe40bcf6700000000,
- 0x71dfbff200000000, 0x8fa45f9600000000, 0x1a702f0300000000,
- 0x73539f5f00000000, 0xe687efca00000000, 0x18fc0fae00000000,
- 0x8d287f3b00000000, 0xcaba6f1700000000, 0x5f6e1f8200000000,
- 0xa115ffe600000000, 0x34c18f7300000000, 0x5de23f2f00000000,
- 0xc8364fba00000000, 0x364dafde00000000, 0xa399df4b00000000,
- 0xb8698e8600000000, 0x2dbdfe1300000000, 0xd3c61e7700000000,
- 0x46126ee200000000, 0x2f31debe00000000, 0xbae5ae2b00000000,
- 0x449e4e4f00000000, 0xd14a3eda00000000, 0x96d82ef600000000,
- 0x030c5e6300000000, 0xfd77be0700000000, 0x68a3ce9200000000,
- 0x01807ece00000000, 0x94540e5b00000000, 0x6a2fee3f00000000,
- 0xfffb9eaa00000000, 0x1dc93c7e00000000, 0x881d4ceb00000000,
- 0x7666ac8f00000000, 0xe3b2dc1a00000000, 0x8a916c4600000000,
- 0x1f451cd300000000, 0xe13efcb700000000, 0x74ea8c2200000000,
- 0x33789c0e00000000, 0xa6acec9b00000000, 0x58d70cff00000000,
- 0xcd037c6a00000000, 0xa420cc3600000000, 0x31f4bca300000000,
- 0xcf8f5cc700000000, 0x5a5b2c5200000000, 0x41ab7d9f00000000,
- 0xd47f0d0a00000000, 0x2a04ed6e00000000, 0xbfd09dfb00000000,
- 0xd6f32da700000000, 0x43275d3200000000, 0xbd5cbd5600000000,
- 0x2888cdc300000000, 0x6f1addef00000000, 0xfacead7a00000000,
- 0x04b54d1e00000000, 0x91613d8b00000000, 0xf8428dd700000000,
- 0x6d96fd4200000000, 0x93ed1d2600000000, 0x06396db300000000,
- 0x168e285400000000, 0x835a58c100000000, 0x7d21b8a500000000,
- 0xe8f5c83000000000, 0x81d6786c00000000, 0x140208f900000000,
- 0xea79e89d00000000, 0x7fad980800000000, 0x383f882400000000,
- 0xadebf8b100000000, 0x539018d500000000, 0xc644684000000000,
- 0xaf67d81c00000000, 0x3ab3a88900000000, 0xc4c848ed00000000,
- 0x511c387800000000, 0x4aec69b500000000, 0xdf38192000000000,
- 0x2143f94400000000, 0xb49789d100000000, 0xddb4398d00000000,
- 0x4860491800000000, 0xb61ba97c00000000, 0x23cfd9e900000000,
- 0x645dc9c500000000, 0xf189b95000000000, 0x0ff2593400000000,
- 0x9a2629a100000000, 0xf30599fd00000000, 0x66d1e96800000000,
- 0x98aa090c00000000, 0x0d7e799900000000, 0xef4cdb4d00000000,
- 0x7a98abd800000000, 0x84e34bbc00000000, 0x11373b2900000000,
- 0x78148b7500000000, 0xedc0fbe000000000, 0x13bb1b8400000000,
- 0x866f6b1100000000, 0xc1fd7b3d00000000, 0x54290ba800000000,
- 0xaa52ebcc00000000, 0x3f869b5900000000, 0x56a52b0500000000,
- 0xc3715b9000000000, 0x3d0abbf400000000, 0xa8decb6100000000,
- 0xb32e9aac00000000, 0x26faea3900000000, 0xd8810a5d00000000,
- 0x4d557ac800000000, 0x2476ca9400000000, 0xb1a2ba0100000000,
- 0x4fd95a6500000000, 0xda0d2af000000000, 0x9d9f3adc00000000,
- 0x084b4a4900000000, 0xf630aa2d00000000, 0x63e4dab800000000,
- 0x0ac76ae400000000, 0x9f131a7100000000, 0x6168fa1500000000,
- 0xf4bc8a8000000000},
- {0x0000000000000000, 0x1f17f08000000000, 0x7f2891da00000000,
- 0x603f615a00000000, 0xbf56536e00000000, 0xa041a3ee00000000,
- 0xc07ec2b400000000, 0xdf69323400000000, 0x7eada6dc00000000,
- 0x61ba565c00000000, 0x0185370600000000, 0x1e92c78600000000,
- 0xc1fbf5b200000000, 0xdeec053200000000, 0xbed3646800000000,
- 0xa1c494e800000000, 0xbd5c3c6200000000, 0xa24bcce200000000,
- 0xc274adb800000000, 0xdd635d3800000000, 0x020a6f0c00000000,
- 0x1d1d9f8c00000000, 0x7d22fed600000000, 0x62350e5600000000,
- 0xc3f19abe00000000, 0xdce66a3e00000000, 0xbcd90b6400000000,
- 0xa3cefbe400000000, 0x7ca7c9d000000000, 0x63b0395000000000,
- 0x038f580a00000000, 0x1c98a88a00000000, 0x7ab978c400000000,
- 0x65ae884400000000, 0x0591e91e00000000, 0x1a86199e00000000,
- 0xc5ef2baa00000000, 0xdaf8db2a00000000, 0xbac7ba7000000000,
- 0xa5d04af000000000, 0x0414de1800000000, 0x1b032e9800000000,
- 0x7b3c4fc200000000, 0x642bbf4200000000, 0xbb428d7600000000,
- 0xa4557df600000000, 0xc46a1cac00000000, 0xdb7dec2c00000000,
- 0xc7e544a600000000, 0xd8f2b42600000000, 0xb8cdd57c00000000,
- 0xa7da25fc00000000, 0x78b317c800000000, 0x67a4e74800000000,
- 0x079b861200000000, 0x188c769200000000, 0xb948e27a00000000,
- 0xa65f12fa00000000, 0xc66073a000000000, 0xd977832000000000,
- 0x061eb11400000000, 0x1909419400000000, 0x793620ce00000000,
- 0x6621d04e00000000, 0xb574805300000000, 0xaa6370d300000000,
- 0xca5c118900000000, 0xd54be10900000000, 0x0a22d33d00000000,
- 0x153523bd00000000, 0x750a42e700000000, 0x6a1db26700000000,
- 0xcbd9268f00000000, 0xd4ced60f00000000, 0xb4f1b75500000000,
- 0xabe647d500000000, 0x748f75e100000000, 0x6b98856100000000,
- 0x0ba7e43b00000000, 0x14b014bb00000000, 0x0828bc3100000000,
- 0x173f4cb100000000, 0x77002deb00000000, 0x6817dd6b00000000,
- 0xb77eef5f00000000, 0xa8691fdf00000000, 0xc8567e8500000000,
- 0xd7418e0500000000, 0x76851aed00000000, 0x6992ea6d00000000,
- 0x09ad8b3700000000, 0x16ba7bb700000000, 0xc9d3498300000000,
- 0xd6c4b90300000000, 0xb6fbd85900000000, 0xa9ec28d900000000,
- 0xcfcdf89700000000, 0xd0da081700000000, 0xb0e5694d00000000,
- 0xaff299cd00000000, 0x709babf900000000, 0x6f8c5b7900000000,
- 0x0fb33a2300000000, 0x10a4caa300000000, 0xb1605e4b00000000,
- 0xae77aecb00000000, 0xce48cf9100000000, 0xd15f3f1100000000,
- 0x0e360d2500000000, 0x1121fda500000000, 0x711e9cff00000000,
- 0x6e096c7f00000000, 0x7291c4f500000000, 0x6d86347500000000,
- 0x0db9552f00000000, 0x12aea5af00000000, 0xcdc7979b00000000,
- 0xd2d0671b00000000, 0xb2ef064100000000, 0xadf8f6c100000000,
- 0x0c3c622900000000, 0x132b92a900000000, 0x7314f3f300000000,
- 0x6c03037300000000, 0xb36a314700000000, 0xac7dc1c700000000,
- 0xcc42a09d00000000, 0xd355501d00000000, 0x6ae900a700000000,
- 0x75fef02700000000, 0x15c1917d00000000, 0x0ad661fd00000000,
- 0xd5bf53c900000000, 0xcaa8a34900000000, 0xaa97c21300000000,
- 0xb580329300000000, 0x1444a67b00000000, 0x0b5356fb00000000,
- 0x6b6c37a100000000, 0x747bc72100000000, 0xab12f51500000000,
- 0xb405059500000000, 0xd43a64cf00000000, 0xcb2d944f00000000,
- 0xd7b53cc500000000, 0xc8a2cc4500000000, 0xa89dad1f00000000,
- 0xb78a5d9f00000000, 0x68e36fab00000000, 0x77f49f2b00000000,
- 0x17cbfe7100000000, 0x08dc0ef100000000, 0xa9189a1900000000,
- 0xb60f6a9900000000, 0xd6300bc300000000, 0xc927fb4300000000,
- 0x164ec97700000000, 0x095939f700000000, 0x696658ad00000000,
- 0x7671a82d00000000, 0x1050786300000000, 0x0f4788e300000000,
- 0x6f78e9b900000000, 0x706f193900000000, 0xaf062b0d00000000,
- 0xb011db8d00000000, 0xd02ebad700000000, 0xcf394a5700000000,
- 0x6efddebf00000000, 0x71ea2e3f00000000, 0x11d54f6500000000,
- 0x0ec2bfe500000000, 0xd1ab8dd100000000, 0xcebc7d5100000000,
- 0xae831c0b00000000, 0xb194ec8b00000000, 0xad0c440100000000,
- 0xb21bb48100000000, 0xd224d5db00000000, 0xcd33255b00000000,
- 0x125a176f00000000, 0x0d4de7ef00000000, 0x6d7286b500000000,
- 0x7265763500000000, 0xd3a1e2dd00000000, 0xccb6125d00000000,
- 0xac89730700000000, 0xb39e838700000000, 0x6cf7b1b300000000,
- 0x73e0413300000000, 0x13df206900000000, 0x0cc8d0e900000000,
- 0xdf9d80f400000000, 0xc08a707400000000, 0xa0b5112e00000000,
- 0xbfa2e1ae00000000, 0x60cbd39a00000000, 0x7fdc231a00000000,
- 0x1fe3424000000000, 0x00f4b2c000000000, 0xa130262800000000,
- 0xbe27d6a800000000, 0xde18b7f200000000, 0xc10f477200000000,
- 0x1e66754600000000, 0x017185c600000000, 0x614ee49c00000000,
- 0x7e59141c00000000, 0x62c1bc9600000000, 0x7dd64c1600000000,
- 0x1de92d4c00000000, 0x02feddcc00000000, 0xdd97eff800000000,
- 0xc2801f7800000000, 0xa2bf7e2200000000, 0xbda88ea200000000,
- 0x1c6c1a4a00000000, 0x037beaca00000000, 0x63448b9000000000,
- 0x7c537b1000000000, 0xa33a492400000000, 0xbc2db9a400000000,
- 0xdc12d8fe00000000, 0xc305287e00000000, 0xa524f83000000000,
- 0xba3308b000000000, 0xda0c69ea00000000, 0xc51b996a00000000,
- 0x1a72ab5e00000000, 0x05655bde00000000, 0x655a3a8400000000,
- 0x7a4dca0400000000, 0xdb895eec00000000, 0xc49eae6c00000000,
- 0xa4a1cf3600000000, 0xbbb63fb600000000, 0x64df0d8200000000,
- 0x7bc8fd0200000000, 0x1bf79c5800000000, 0x04e06cd800000000,
- 0x1878c45200000000, 0x076f34d200000000, 0x6750558800000000,
- 0x7847a50800000000, 0xa72e973c00000000, 0xb83967bc00000000,
- 0xd80606e600000000, 0xc711f66600000000, 0x66d5628e00000000,
- 0x79c2920e00000000, 0x19fdf35400000000, 0x06ea03d400000000,
- 0xd98331e000000000, 0xc694c16000000000, 0xa6aba03a00000000,
- 0xb9bc50ba00000000},
- {0x0000000000000000, 0xe2fd888d00000000, 0x85fd60c000000000,
- 0x6700e84d00000000, 0x4bfdb05b00000000, 0xa90038d600000000,
- 0xce00d09b00000000, 0x2cfd581600000000, 0x96fa61b700000000,
- 0x7407e93a00000000, 0x1307017700000000, 0xf1fa89fa00000000,
- 0xdd07d1ec00000000, 0x3ffa596100000000, 0x58fab12c00000000,
- 0xba0739a100000000, 0x6df3b2b500000000, 0x8f0e3a3800000000,
- 0xe80ed27500000000, 0x0af35af800000000, 0x260e02ee00000000,
- 0xc4f38a6300000000, 0xa3f3622e00000000, 0x410eeaa300000000,
- 0xfb09d30200000000, 0x19f45b8f00000000, 0x7ef4b3c200000000,
- 0x9c093b4f00000000, 0xb0f4635900000000, 0x5209ebd400000000,
- 0x3509039900000000, 0xd7f48b1400000000, 0x9be014b000000000,
- 0x791d9c3d00000000, 0x1e1d747000000000, 0xfce0fcfd00000000,
- 0xd01da4eb00000000, 0x32e02c6600000000, 0x55e0c42b00000000,
- 0xb71d4ca600000000, 0x0d1a750700000000, 0xefe7fd8a00000000,
- 0x88e715c700000000, 0x6a1a9d4a00000000, 0x46e7c55c00000000,
- 0xa41a4dd100000000, 0xc31aa59c00000000, 0x21e72d1100000000,
- 0xf613a60500000000, 0x14ee2e8800000000, 0x73eec6c500000000,
- 0x91134e4800000000, 0xbdee165e00000000, 0x5f139ed300000000,
- 0x3813769e00000000, 0xdaeefe1300000000, 0x60e9c7b200000000,
- 0x82144f3f00000000, 0xe514a77200000000, 0x07e92fff00000000,
- 0x2b1477e900000000, 0xc9e9ff6400000000, 0xaee9172900000000,
- 0x4c149fa400000000, 0x77c758bb00000000, 0x953ad03600000000,
- 0xf23a387b00000000, 0x10c7b0f600000000, 0x3c3ae8e000000000,
- 0xdec7606d00000000, 0xb9c7882000000000, 0x5b3a00ad00000000,
- 0xe13d390c00000000, 0x03c0b18100000000, 0x64c059cc00000000,
- 0x863dd14100000000, 0xaac0895700000000, 0x483d01da00000000,
- 0x2f3de99700000000, 0xcdc0611a00000000, 0x1a34ea0e00000000,
- 0xf8c9628300000000, 0x9fc98ace00000000, 0x7d34024300000000,
- 0x51c95a5500000000, 0xb334d2d800000000, 0xd4343a9500000000,
- 0x36c9b21800000000, 0x8cce8bb900000000, 0x6e33033400000000,
- 0x0933eb7900000000, 0xebce63f400000000, 0xc7333be200000000,
- 0x25ceb36f00000000, 0x42ce5b2200000000, 0xa033d3af00000000,
- 0xec274c0b00000000, 0x0edac48600000000, 0x69da2ccb00000000,
- 0x8b27a44600000000, 0xa7dafc5000000000, 0x452774dd00000000,
- 0x22279c9000000000, 0xc0da141d00000000, 0x7add2dbc00000000,
- 0x9820a53100000000, 0xff204d7c00000000, 0x1dddc5f100000000,
- 0x31209de700000000, 0xd3dd156a00000000, 0xb4ddfd2700000000,
- 0x562075aa00000000, 0x81d4febe00000000, 0x6329763300000000,
- 0x04299e7e00000000, 0xe6d416f300000000, 0xca294ee500000000,
- 0x28d4c66800000000, 0x4fd42e2500000000, 0xad29a6a800000000,
- 0x172e9f0900000000, 0xf5d3178400000000, 0x92d3ffc900000000,
- 0x702e774400000000, 0x5cd32f5200000000, 0xbe2ea7df00000000,
- 0xd92e4f9200000000, 0x3bd3c71f00000000, 0xaf88c0ad00000000,
- 0x4d75482000000000, 0x2a75a06d00000000, 0xc88828e000000000,
- 0xe47570f600000000, 0x0688f87b00000000, 0x6188103600000000,
- 0x837598bb00000000, 0x3972a11a00000000, 0xdb8f299700000000,
- 0xbc8fc1da00000000, 0x5e72495700000000, 0x728f114100000000,
- 0x907299cc00000000, 0xf772718100000000, 0x158ff90c00000000,
- 0xc27b721800000000, 0x2086fa9500000000, 0x478612d800000000,
- 0xa57b9a5500000000, 0x8986c24300000000, 0x6b7b4ace00000000,
- 0x0c7ba28300000000, 0xee862a0e00000000, 0x548113af00000000,
- 0xb67c9b2200000000, 0xd17c736f00000000, 0x3381fbe200000000,
- 0x1f7ca3f400000000, 0xfd812b7900000000, 0x9a81c33400000000,
- 0x787c4bb900000000, 0x3468d41d00000000, 0xd6955c9000000000,
- 0xb195b4dd00000000, 0x53683c5000000000, 0x7f95644600000000,
- 0x9d68eccb00000000, 0xfa68048600000000, 0x18958c0b00000000,
- 0xa292b5aa00000000, 0x406f3d2700000000, 0x276fd56a00000000,
- 0xc5925de700000000, 0xe96f05f100000000, 0x0b928d7c00000000,
- 0x6c92653100000000, 0x8e6fedbc00000000, 0x599b66a800000000,
- 0xbb66ee2500000000, 0xdc66066800000000, 0x3e9b8ee500000000,
- 0x1266d6f300000000, 0xf09b5e7e00000000, 0x979bb63300000000,
- 0x75663ebe00000000, 0xcf61071f00000000, 0x2d9c8f9200000000,
- 0x4a9c67df00000000, 0xa861ef5200000000, 0x849cb74400000000,
- 0x66613fc900000000, 0x0161d78400000000, 0xe39c5f0900000000,
- 0xd84f981600000000, 0x3ab2109b00000000, 0x5db2f8d600000000,
- 0xbf4f705b00000000, 0x93b2284d00000000, 0x714fa0c000000000,
- 0x164f488d00000000, 0xf4b2c00000000000, 0x4eb5f9a100000000,
- 0xac48712c00000000, 0xcb48996100000000, 0x29b511ec00000000,
- 0x054849fa00000000, 0xe7b5c17700000000, 0x80b5293a00000000,
- 0x6248a1b700000000, 0xb5bc2aa300000000, 0x5741a22e00000000,
- 0x30414a6300000000, 0xd2bcc2ee00000000, 0xfe419af800000000,
- 0x1cbc127500000000, 0x7bbcfa3800000000, 0x994172b500000000,
- 0x23464b1400000000, 0xc1bbc39900000000, 0xa6bb2bd400000000,
- 0x4446a35900000000, 0x68bbfb4f00000000, 0x8a4673c200000000,
- 0xed469b8f00000000, 0x0fbb130200000000, 0x43af8ca600000000,
- 0xa152042b00000000, 0xc652ec6600000000, 0x24af64eb00000000,
- 0x08523cfd00000000, 0xeaafb47000000000, 0x8daf5c3d00000000,
- 0x6f52d4b000000000, 0xd555ed1100000000, 0x37a8659c00000000,
- 0x50a88dd100000000, 0xb255055c00000000, 0x9ea85d4a00000000,
- 0x7c55d5c700000000, 0x1b553d8a00000000, 0xf9a8b50700000000,
- 0x2e5c3e1300000000, 0xcca1b69e00000000, 0xaba15ed300000000,
- 0x495cd65e00000000, 0x65a18e4800000000, 0x875c06c500000000,
- 0xe05cee8800000000, 0x02a1660500000000, 0xb8a65fa400000000,
- 0x5a5bd72900000000, 0x3d5b3f6400000000, 0xdfa6b7e900000000,
- 0xf35befff00000000, 0x11a6677200000000, 0x76a68f3f00000000,
- 0x945b07b200000000},
- {0x0000000000000000, 0xa90b894e00000000, 0x5217129d00000000,
- 0xfb1c9bd300000000, 0xe52855e100000000, 0x4c23dcaf00000000,
- 0xb73f477c00000000, 0x1e34ce3200000000, 0x8b57db1900000000,
- 0x225c525700000000, 0xd940c98400000000, 0x704b40ca00000000,
- 0x6e7f8ef800000000, 0xc77407b600000000, 0x3c689c6500000000,
- 0x9563152b00000000, 0x16afb63300000000, 0xbfa43f7d00000000,
- 0x44b8a4ae00000000, 0xedb32de000000000, 0xf387e3d200000000,
- 0x5a8c6a9c00000000, 0xa190f14f00000000, 0x089b780100000000,
- 0x9df86d2a00000000, 0x34f3e46400000000, 0xcfef7fb700000000,
- 0x66e4f6f900000000, 0x78d038cb00000000, 0xd1dbb18500000000,
- 0x2ac72a5600000000, 0x83cca31800000000, 0x2c5e6d6700000000,
- 0x8555e42900000000, 0x7e497ffa00000000, 0xd742f6b400000000,
- 0xc976388600000000, 0x607db1c800000000, 0x9b612a1b00000000,
- 0x326aa35500000000, 0xa709b67e00000000, 0x0e023f3000000000,
- 0xf51ea4e300000000, 0x5c152dad00000000, 0x4221e39f00000000,
- 0xeb2a6ad100000000, 0x1036f10200000000, 0xb93d784c00000000,
- 0x3af1db5400000000, 0x93fa521a00000000, 0x68e6c9c900000000,
- 0xc1ed408700000000, 0xdfd98eb500000000, 0x76d207fb00000000,
- 0x8dce9c2800000000, 0x24c5156600000000, 0xb1a6004d00000000,
- 0x18ad890300000000, 0xe3b112d000000000, 0x4aba9b9e00000000,
- 0x548e55ac00000000, 0xfd85dce200000000, 0x0699473100000000,
- 0xaf92ce7f00000000, 0x58bcdace00000000, 0xf1b7538000000000,
- 0x0aabc85300000000, 0xa3a0411d00000000, 0xbd948f2f00000000,
- 0x149f066100000000, 0xef839db200000000, 0x468814fc00000000,
- 0xd3eb01d700000000, 0x7ae0889900000000, 0x81fc134a00000000,
- 0x28f79a0400000000, 0x36c3543600000000, 0x9fc8dd7800000000,
- 0x64d446ab00000000, 0xcddfcfe500000000, 0x4e136cfd00000000,
- 0xe718e5b300000000, 0x1c047e6000000000, 0xb50ff72e00000000,
- 0xab3b391c00000000, 0x0230b05200000000, 0xf92c2b8100000000,
- 0x5027a2cf00000000, 0xc544b7e400000000, 0x6c4f3eaa00000000,
- 0x9753a57900000000, 0x3e582c3700000000, 0x206ce20500000000,
- 0x89676b4b00000000, 0x727bf09800000000, 0xdb7079d600000000,
- 0x74e2b7a900000000, 0xdde93ee700000000, 0x26f5a53400000000,
- 0x8ffe2c7a00000000, 0x91cae24800000000, 0x38c16b0600000000,
- 0xc3ddf0d500000000, 0x6ad6799b00000000, 0xffb56cb000000000,
- 0x56bee5fe00000000, 0xada27e2d00000000, 0x04a9f76300000000,
- 0x1a9d395100000000, 0xb396b01f00000000, 0x488a2bcc00000000,
- 0xe181a28200000000, 0x624d019a00000000, 0xcb4688d400000000,
- 0x305a130700000000, 0x99519a4900000000, 0x8765547b00000000,
- 0x2e6edd3500000000, 0xd57246e600000000, 0x7c79cfa800000000,
- 0xe91ada8300000000, 0x401153cd00000000, 0xbb0dc81e00000000,
- 0x1206415000000000, 0x0c328f6200000000, 0xa539062c00000000,
- 0x5e259dff00000000, 0xf72e14b100000000, 0xf17ec44600000000,
- 0x58754d0800000000, 0xa369d6db00000000, 0x0a625f9500000000,
- 0x145691a700000000, 0xbd5d18e900000000, 0x4641833a00000000,
- 0xef4a0a7400000000, 0x7a291f5f00000000, 0xd322961100000000,
- 0x283e0dc200000000, 0x8135848c00000000, 0x9f014abe00000000,
- 0x360ac3f000000000, 0xcd16582300000000, 0x641dd16d00000000,
- 0xe7d1727500000000, 0x4edafb3b00000000, 0xb5c660e800000000,
- 0x1ccde9a600000000, 0x02f9279400000000, 0xabf2aeda00000000,
- 0x50ee350900000000, 0xf9e5bc4700000000, 0x6c86a96c00000000,
- 0xc58d202200000000, 0x3e91bbf100000000, 0x979a32bf00000000,
- 0x89aefc8d00000000, 0x20a575c300000000, 0xdbb9ee1000000000,
- 0x72b2675e00000000, 0xdd20a92100000000, 0x742b206f00000000,
- 0x8f37bbbc00000000, 0x263c32f200000000, 0x3808fcc000000000,
- 0x9103758e00000000, 0x6a1fee5d00000000, 0xc314671300000000,
- 0x5677723800000000, 0xff7cfb7600000000, 0x046060a500000000,
- 0xad6be9eb00000000, 0xb35f27d900000000, 0x1a54ae9700000000,
- 0xe148354400000000, 0x4843bc0a00000000, 0xcb8f1f1200000000,
- 0x6284965c00000000, 0x99980d8f00000000, 0x309384c100000000,
- 0x2ea74af300000000, 0x87acc3bd00000000, 0x7cb0586e00000000,
- 0xd5bbd12000000000, 0x40d8c40b00000000, 0xe9d34d4500000000,
- 0x12cfd69600000000, 0xbbc45fd800000000, 0xa5f091ea00000000,
- 0x0cfb18a400000000, 0xf7e7837700000000, 0x5eec0a3900000000,
- 0xa9c21e8800000000, 0x00c997c600000000, 0xfbd50c1500000000,
- 0x52de855b00000000, 0x4cea4b6900000000, 0xe5e1c22700000000,
- 0x1efd59f400000000, 0xb7f6d0ba00000000, 0x2295c59100000000,
- 0x8b9e4cdf00000000, 0x7082d70c00000000, 0xd9895e4200000000,
- 0xc7bd907000000000, 0x6eb6193e00000000, 0x95aa82ed00000000,
- 0x3ca10ba300000000, 0xbf6da8bb00000000, 0x166621f500000000,
- 0xed7aba2600000000, 0x4471336800000000, 0x5a45fd5a00000000,
- 0xf34e741400000000, 0x0852efc700000000, 0xa159668900000000,
- 0x343a73a200000000, 0x9d31faec00000000, 0x662d613f00000000,
- 0xcf26e87100000000, 0xd112264300000000, 0x7819af0d00000000,
- 0x830534de00000000, 0x2a0ebd9000000000, 0x859c73ef00000000,
- 0x2c97faa100000000, 0xd78b617200000000, 0x7e80e83c00000000,
- 0x60b4260e00000000, 0xc9bfaf4000000000, 0x32a3349300000000,
- 0x9ba8bddd00000000, 0x0ecba8f600000000, 0xa7c021b800000000,
- 0x5cdcba6b00000000, 0xf5d7332500000000, 0xebe3fd1700000000,
- 0x42e8745900000000, 0xb9f4ef8a00000000, 0x10ff66c400000000,
- 0x9333c5dc00000000, 0x3a384c9200000000, 0xc124d74100000000,
- 0x682f5e0f00000000, 0x761b903d00000000, 0xdf10197300000000,
- 0x240c82a000000000, 0x8d070bee00000000, 0x18641ec500000000,
- 0xb16f978b00000000, 0x4a730c5800000000, 0xe378851600000000,
- 0xfd4c4b2400000000, 0x5447c26a00000000, 0xaf5b59b900000000,
- 0x0650d0f700000000},
- {0x0000000000000000, 0x479244af00000000, 0xcf22f88500000000,
- 0x88b0bc2a00000000, 0xdf4381d000000000, 0x98d1c57f00000000,
- 0x1061795500000000, 0x57f33dfa00000000, 0xff81737a00000000,
- 0xb81337d500000000, 0x30a38bff00000000, 0x7731cf5000000000,
- 0x20c2f2aa00000000, 0x6750b60500000000, 0xefe00a2f00000000,
- 0xa8724e8000000000, 0xfe03e7f400000000, 0xb991a35b00000000,
- 0x31211f7100000000, 0x76b35bde00000000, 0x2140662400000000,
- 0x66d2228b00000000, 0xee629ea100000000, 0xa9f0da0e00000000,
- 0x0182948e00000000, 0x4610d02100000000, 0xcea06c0b00000000,
- 0x893228a400000000, 0xdec1155e00000000, 0x995351f100000000,
- 0x11e3eddb00000000, 0x5671a97400000000, 0xbd01bf3200000000,
- 0xfa93fb9d00000000, 0x722347b700000000, 0x35b1031800000000,
- 0x62423ee200000000, 0x25d07a4d00000000, 0xad60c66700000000,
- 0xeaf282c800000000, 0x4280cc4800000000, 0x051288e700000000,
- 0x8da234cd00000000, 0xca30706200000000, 0x9dc34d9800000000,
- 0xda51093700000000, 0x52e1b51d00000000, 0x1573f1b200000000,
- 0x430258c600000000, 0x04901c6900000000, 0x8c20a04300000000,
- 0xcbb2e4ec00000000, 0x9c41d91600000000, 0xdbd39db900000000,
- 0x5363219300000000, 0x14f1653c00000000, 0xbc832bbc00000000,
- 0xfb116f1300000000, 0x73a1d33900000000, 0x3433979600000000,
- 0x63c0aa6c00000000, 0x2452eec300000000, 0xace252e900000000,
- 0xeb70164600000000, 0x7a037e6500000000, 0x3d913aca00000000,
- 0xb52186e000000000, 0xf2b3c24f00000000, 0xa540ffb500000000,
- 0xe2d2bb1a00000000, 0x6a62073000000000, 0x2df0439f00000000,
- 0x85820d1f00000000, 0xc21049b000000000, 0x4aa0f59a00000000,
- 0x0d32b13500000000, 0x5ac18ccf00000000, 0x1d53c86000000000,
- 0x95e3744a00000000, 0xd27130e500000000, 0x8400999100000000,
- 0xc392dd3e00000000, 0x4b22611400000000, 0x0cb025bb00000000,
- 0x5b43184100000000, 0x1cd15cee00000000, 0x9461e0c400000000,
- 0xd3f3a46b00000000, 0x7b81eaeb00000000, 0x3c13ae4400000000,
- 0xb4a3126e00000000, 0xf33156c100000000, 0xa4c26b3b00000000,
- 0xe3502f9400000000, 0x6be093be00000000, 0x2c72d71100000000,
- 0xc702c15700000000, 0x809085f800000000, 0x082039d200000000,
- 0x4fb27d7d00000000, 0x1841408700000000, 0x5fd3042800000000,
- 0xd763b80200000000, 0x90f1fcad00000000, 0x3883b22d00000000,
- 0x7f11f68200000000, 0xf7a14aa800000000, 0xb0330e0700000000,
- 0xe7c033fd00000000, 0xa052775200000000, 0x28e2cb7800000000,
- 0x6f708fd700000000, 0x390126a300000000, 0x7e93620c00000000,
- 0xf623de2600000000, 0xb1b19a8900000000, 0xe642a77300000000,
- 0xa1d0e3dc00000000, 0x29605ff600000000, 0x6ef21b5900000000,
- 0xc68055d900000000, 0x8112117600000000, 0x09a2ad5c00000000,
- 0x4e30e9f300000000, 0x19c3d40900000000, 0x5e5190a600000000,
- 0xd6e12c8c00000000, 0x9173682300000000, 0xf406fcca00000000,
- 0xb394b86500000000, 0x3b24044f00000000, 0x7cb640e000000000,
- 0x2b457d1a00000000, 0x6cd739b500000000, 0xe467859f00000000,
- 0xa3f5c13000000000, 0x0b878fb000000000, 0x4c15cb1f00000000,
- 0xc4a5773500000000, 0x8337339a00000000, 0xd4c40e6000000000,
- 0x93564acf00000000, 0x1be6f6e500000000, 0x5c74b24a00000000,
- 0x0a051b3e00000000, 0x4d975f9100000000, 0xc527e3bb00000000,
- 0x82b5a71400000000, 0xd5469aee00000000, 0x92d4de4100000000,
- 0x1a64626b00000000, 0x5df626c400000000, 0xf584684400000000,
- 0xb2162ceb00000000, 0x3aa690c100000000, 0x7d34d46e00000000,
- 0x2ac7e99400000000, 0x6d55ad3b00000000, 0xe5e5111100000000,
- 0xa27755be00000000, 0x490743f800000000, 0x0e95075700000000,
- 0x8625bb7d00000000, 0xc1b7ffd200000000, 0x9644c22800000000,
- 0xd1d6868700000000, 0x59663aad00000000, 0x1ef47e0200000000,
- 0xb686308200000000, 0xf114742d00000000, 0x79a4c80700000000,
- 0x3e368ca800000000, 0x69c5b15200000000, 0x2e57f5fd00000000,
- 0xa6e749d700000000, 0xe1750d7800000000, 0xb704a40c00000000,
- 0xf096e0a300000000, 0x78265c8900000000, 0x3fb4182600000000,
- 0x684725dc00000000, 0x2fd5617300000000, 0xa765dd5900000000,
- 0xe0f799f600000000, 0x4885d77600000000, 0x0f1793d900000000,
- 0x87a72ff300000000, 0xc0356b5c00000000, 0x97c656a600000000,
- 0xd054120900000000, 0x58e4ae2300000000, 0x1f76ea8c00000000,
- 0x8e0582af00000000, 0xc997c60000000000, 0x41277a2a00000000,
- 0x06b53e8500000000, 0x5146037f00000000, 0x16d447d000000000,
- 0x9e64fbfa00000000, 0xd9f6bf5500000000, 0x7184f1d500000000,
- 0x3616b57a00000000, 0xbea6095000000000, 0xf9344dff00000000,
- 0xaec7700500000000, 0xe95534aa00000000, 0x61e5888000000000,
- 0x2677cc2f00000000, 0x7006655b00000000, 0x379421f400000000,
- 0xbf249dde00000000, 0xf8b6d97100000000, 0xaf45e48b00000000,
- 0xe8d7a02400000000, 0x60671c0e00000000, 0x27f558a100000000,
- 0x8f87162100000000, 0xc815528e00000000, 0x40a5eea400000000,
- 0x0737aa0b00000000, 0x50c497f100000000, 0x1756d35e00000000,
- 0x9fe66f7400000000, 0xd8742bdb00000000, 0x33043d9d00000000,
- 0x7496793200000000, 0xfc26c51800000000, 0xbbb481b700000000,
- 0xec47bc4d00000000, 0xabd5f8e200000000, 0x236544c800000000,
- 0x64f7006700000000, 0xcc854ee700000000, 0x8b170a4800000000,
- 0x03a7b66200000000, 0x4435f2cd00000000, 0x13c6cf3700000000,
- 0x54548b9800000000, 0xdce437b200000000, 0x9b76731d00000000,
- 0xcd07da6900000000, 0x8a959ec600000000, 0x022522ec00000000,
- 0x45b7664300000000, 0x12445bb900000000, 0x55d61f1600000000,
- 0xdd66a33c00000000, 0x9af4e79300000000, 0x3286a91300000000,
- 0x7514edbc00000000, 0xfda4519600000000, 0xba36153900000000,
- 0xedc528c300000000, 0xaa576c6c00000000, 0x22e7d04600000000,
- 0x657594e900000000}};
-
-#else /* W == 4 */
-
-local const z_crc_t FAR crc_braid_table[][256] = {
- {0x00000000, 0x65673b46, 0xcace768c, 0xafa94dca, 0x4eedeb59,
- 0x2b8ad01f, 0x84239dd5, 0xe144a693, 0x9ddbd6b2, 0xf8bcedf4,
- 0x5715a03e, 0x32729b78, 0xd3363deb, 0xb65106ad, 0x19f84b67,
- 0x7c9f7021, 0xe0c6ab25, 0x85a19063, 0x2a08dda9, 0x4f6fe6ef,
- 0xae2b407c, 0xcb4c7b3a, 0x64e536f0, 0x01820db6, 0x7d1d7d97,
- 0x187a46d1, 0xb7d30b1b, 0xd2b4305d, 0x33f096ce, 0x5697ad88,
- 0xf93ee042, 0x9c59db04, 0x1afc500b, 0x7f9b6b4d, 0xd0322687,
- 0xb5551dc1, 0x5411bb52, 0x31768014, 0x9edfcdde, 0xfbb8f698,
- 0x872786b9, 0xe240bdff, 0x4de9f035, 0x288ecb73, 0xc9ca6de0,
- 0xacad56a6, 0x03041b6c, 0x6663202a, 0xfa3afb2e, 0x9f5dc068,
- 0x30f48da2, 0x5593b6e4, 0xb4d71077, 0xd1b02b31, 0x7e1966fb,
- 0x1b7e5dbd, 0x67e12d9c, 0x028616da, 0xad2f5b10, 0xc8486056,
- 0x290cc6c5, 0x4c6bfd83, 0xe3c2b049, 0x86a58b0f, 0x35f8a016,
- 0x509f9b50, 0xff36d69a, 0x9a51eddc, 0x7b154b4f, 0x1e727009,
- 0xb1db3dc3, 0xd4bc0685, 0xa82376a4, 0xcd444de2, 0x62ed0028,
- 0x078a3b6e, 0xe6ce9dfd, 0x83a9a6bb, 0x2c00eb71, 0x4967d037,
- 0xd53e0b33, 0xb0593075, 0x1ff07dbf, 0x7a9746f9, 0x9bd3e06a,
- 0xfeb4db2c, 0x511d96e6, 0x347aada0, 0x48e5dd81, 0x2d82e6c7,
- 0x822bab0d, 0xe74c904b, 0x060836d8, 0x636f0d9e, 0xccc64054,
- 0xa9a17b12, 0x2f04f01d, 0x4a63cb5b, 0xe5ca8691, 0x80adbdd7,
- 0x61e91b44, 0x048e2002, 0xab276dc8, 0xce40568e, 0xb2df26af,
- 0xd7b81de9, 0x78115023, 0x1d766b65, 0xfc32cdf6, 0x9955f6b0,
- 0x36fcbb7a, 0x539b803c, 0xcfc25b38, 0xaaa5607e, 0x050c2db4,
- 0x606b16f2, 0x812fb061, 0xe4488b27, 0x4be1c6ed, 0x2e86fdab,
- 0x52198d8a, 0x377eb6cc, 0x98d7fb06, 0xfdb0c040, 0x1cf466d3,
- 0x79935d95, 0xd63a105f, 0xb35d2b19, 0x6bf1402c, 0x0e967b6a,
- 0xa13f36a0, 0xc4580de6, 0x251cab75, 0x407b9033, 0xefd2ddf9,
- 0x8ab5e6bf, 0xf62a969e, 0x934dadd8, 0x3ce4e012, 0x5983db54,
- 0xb8c77dc7, 0xdda04681, 0x72090b4b, 0x176e300d, 0x8b37eb09,
- 0xee50d04f, 0x41f99d85, 0x249ea6c3, 0xc5da0050, 0xa0bd3b16,
- 0x0f1476dc, 0x6a734d9a, 0x16ec3dbb, 0x738b06fd, 0xdc224b37,
- 0xb9457071, 0x5801d6e2, 0x3d66eda4, 0x92cfa06e, 0xf7a89b28,
- 0x710d1027, 0x146a2b61, 0xbbc366ab, 0xdea45ded, 0x3fe0fb7e,
- 0x5a87c038, 0xf52e8df2, 0x9049b6b4, 0xecd6c695, 0x89b1fdd3,
- 0x2618b019, 0x437f8b5f, 0xa23b2dcc, 0xc75c168a, 0x68f55b40,
- 0x0d926006, 0x91cbbb02, 0xf4ac8044, 0x5b05cd8e, 0x3e62f6c8,
- 0xdf26505b, 0xba416b1d, 0x15e826d7, 0x708f1d91, 0x0c106db0,
- 0x697756f6, 0xc6de1b3c, 0xa3b9207a, 0x42fd86e9, 0x279abdaf,
- 0x8833f065, 0xed54cb23, 0x5e09e03a, 0x3b6edb7c, 0x94c796b6,
- 0xf1a0adf0, 0x10e40b63, 0x75833025, 0xda2a7def, 0xbf4d46a9,
- 0xc3d23688, 0xa6b50dce, 0x091c4004, 0x6c7b7b42, 0x8d3fddd1,
- 0xe858e697, 0x47f1ab5d, 0x2296901b, 0xbecf4b1f, 0xdba87059,
- 0x74013d93, 0x116606d5, 0xf022a046, 0x95459b00, 0x3aecd6ca,
- 0x5f8bed8c, 0x23149dad, 0x4673a6eb, 0xe9daeb21, 0x8cbdd067,
- 0x6df976f4, 0x089e4db2, 0xa7370078, 0xc2503b3e, 0x44f5b031,
- 0x21928b77, 0x8e3bc6bd, 0xeb5cfdfb, 0x0a185b68, 0x6f7f602e,
- 0xc0d62de4, 0xa5b116a2, 0xd92e6683, 0xbc495dc5, 0x13e0100f,
- 0x76872b49, 0x97c38dda, 0xf2a4b69c, 0x5d0dfb56, 0x386ac010,
- 0xa4331b14, 0xc1542052, 0x6efd6d98, 0x0b9a56de, 0xeadef04d,
- 0x8fb9cb0b, 0x201086c1, 0x4577bd87, 0x39e8cda6, 0x5c8ff6e0,
- 0xf326bb2a, 0x9641806c, 0x770526ff, 0x12621db9, 0xbdcb5073,
- 0xd8ac6b35},
- {0x00000000, 0xd7e28058, 0x74b406f1, 0xa35686a9, 0xe9680de2,
- 0x3e8a8dba, 0x9ddc0b13, 0x4a3e8b4b, 0x09a11d85, 0xde439ddd,
- 0x7d151b74, 0xaaf79b2c, 0xe0c91067, 0x372b903f, 0x947d1696,
- 0x439f96ce, 0x13423b0a, 0xc4a0bb52, 0x67f63dfb, 0xb014bda3,
- 0xfa2a36e8, 0x2dc8b6b0, 0x8e9e3019, 0x597cb041, 0x1ae3268f,
- 0xcd01a6d7, 0x6e57207e, 0xb9b5a026, 0xf38b2b6d, 0x2469ab35,
- 0x873f2d9c, 0x50ddadc4, 0x26847614, 0xf166f64c, 0x523070e5,
- 0x85d2f0bd, 0xcfec7bf6, 0x180efbae, 0xbb587d07, 0x6cbafd5f,
- 0x2f256b91, 0xf8c7ebc9, 0x5b916d60, 0x8c73ed38, 0xc64d6673,
- 0x11afe62b, 0xb2f96082, 0x651be0da, 0x35c64d1e, 0xe224cd46,
- 0x41724bef, 0x9690cbb7, 0xdcae40fc, 0x0b4cc0a4, 0xa81a460d,
- 0x7ff8c655, 0x3c67509b, 0xeb85d0c3, 0x48d3566a, 0x9f31d632,
- 0xd50f5d79, 0x02eddd21, 0xa1bb5b88, 0x7659dbd0, 0x4d08ec28,
- 0x9aea6c70, 0x39bcead9, 0xee5e6a81, 0xa460e1ca, 0x73826192,
- 0xd0d4e73b, 0x07366763, 0x44a9f1ad, 0x934b71f5, 0x301df75c,
- 0xe7ff7704, 0xadc1fc4f, 0x7a237c17, 0xd975fabe, 0x0e977ae6,
- 0x5e4ad722, 0x89a8577a, 0x2afed1d3, 0xfd1c518b, 0xb722dac0,
- 0x60c05a98, 0xc396dc31, 0x14745c69, 0x57ebcaa7, 0x80094aff,
- 0x235fcc56, 0xf4bd4c0e, 0xbe83c745, 0x6961471d, 0xca37c1b4,
- 0x1dd541ec, 0x6b8c9a3c, 0xbc6e1a64, 0x1f389ccd, 0xc8da1c95,
- 0x82e497de, 0x55061786, 0xf650912f, 0x21b21177, 0x622d87b9,
- 0xb5cf07e1, 0x16998148, 0xc17b0110, 0x8b458a5b, 0x5ca70a03,
- 0xfff18caa, 0x28130cf2, 0x78cea136, 0xaf2c216e, 0x0c7aa7c7,
- 0xdb98279f, 0x91a6acd4, 0x46442c8c, 0xe512aa25, 0x32f02a7d,
- 0x716fbcb3, 0xa68d3ceb, 0x05dbba42, 0xd2393a1a, 0x9807b151,
- 0x4fe53109, 0xecb3b7a0, 0x3b5137f8, 0x9a11d850, 0x4df35808,
- 0xeea5dea1, 0x39475ef9, 0x7379d5b2, 0xa49b55ea, 0x07cdd343,
- 0xd02f531b, 0x93b0c5d5, 0x4452458d, 0xe704c324, 0x30e6437c,
- 0x7ad8c837, 0xad3a486f, 0x0e6ccec6, 0xd98e4e9e, 0x8953e35a,
- 0x5eb16302, 0xfde7e5ab, 0x2a0565f3, 0x603beeb8, 0xb7d96ee0,
- 0x148fe849, 0xc36d6811, 0x80f2fedf, 0x57107e87, 0xf446f82e,
- 0x23a47876, 0x699af33d, 0xbe787365, 0x1d2ef5cc, 0xcacc7594,
- 0xbc95ae44, 0x6b772e1c, 0xc821a8b5, 0x1fc328ed, 0x55fda3a6,
- 0x821f23fe, 0x2149a557, 0xf6ab250f, 0xb534b3c1, 0x62d63399,
- 0xc180b530, 0x16623568, 0x5c5cbe23, 0x8bbe3e7b, 0x28e8b8d2,
- 0xff0a388a, 0xafd7954e, 0x78351516, 0xdb6393bf, 0x0c8113e7,
- 0x46bf98ac, 0x915d18f4, 0x320b9e5d, 0xe5e91e05, 0xa67688cb,
- 0x71940893, 0xd2c28e3a, 0x05200e62, 0x4f1e8529, 0x98fc0571,
- 0x3baa83d8, 0xec480380, 0xd7193478, 0x00fbb420, 0xa3ad3289,
- 0x744fb2d1, 0x3e71399a, 0xe993b9c2, 0x4ac53f6b, 0x9d27bf33,
- 0xdeb829fd, 0x095aa9a5, 0xaa0c2f0c, 0x7deeaf54, 0x37d0241f,
- 0xe032a447, 0x436422ee, 0x9486a2b6, 0xc45b0f72, 0x13b98f2a,
- 0xb0ef0983, 0x670d89db, 0x2d330290, 0xfad182c8, 0x59870461,
- 0x8e658439, 0xcdfa12f7, 0x1a1892af, 0xb94e1406, 0x6eac945e,
- 0x24921f15, 0xf3709f4d, 0x502619e4, 0x87c499bc, 0xf19d426c,
- 0x267fc234, 0x8529449d, 0x52cbc4c5, 0x18f54f8e, 0xcf17cfd6,
- 0x6c41497f, 0xbba3c927, 0xf83c5fe9, 0x2fdedfb1, 0x8c885918,
- 0x5b6ad940, 0x1154520b, 0xc6b6d253, 0x65e054fa, 0xb202d4a2,
- 0xe2df7966, 0x353df93e, 0x966b7f97, 0x4189ffcf, 0x0bb77484,
- 0xdc55f4dc, 0x7f037275, 0xa8e1f22d, 0xeb7e64e3, 0x3c9ce4bb,
- 0x9fca6212, 0x4828e24a, 0x02166901, 0xd5f4e959, 0x76a26ff0,
- 0xa140efa8},
- {0x00000000, 0xef52b6e1, 0x05d46b83, 0xea86dd62, 0x0ba8d706,
- 0xe4fa61e7, 0x0e7cbc85, 0xe12e0a64, 0x1751ae0c, 0xf80318ed,
- 0x1285c58f, 0xfdd7736e, 0x1cf9790a, 0xf3abcfeb, 0x192d1289,
- 0xf67fa468, 0x2ea35c18, 0xc1f1eaf9, 0x2b77379b, 0xc425817a,
- 0x250b8b1e, 0xca593dff, 0x20dfe09d, 0xcf8d567c, 0x39f2f214,
- 0xd6a044f5, 0x3c269997, 0xd3742f76, 0x325a2512, 0xdd0893f3,
- 0x378e4e91, 0xd8dcf870, 0x5d46b830, 0xb2140ed1, 0x5892d3b3,
- 0xb7c06552, 0x56ee6f36, 0xb9bcd9d7, 0x533a04b5, 0xbc68b254,
- 0x4a17163c, 0xa545a0dd, 0x4fc37dbf, 0xa091cb5e, 0x41bfc13a,
- 0xaeed77db, 0x446baab9, 0xab391c58, 0x73e5e428, 0x9cb752c9,
- 0x76318fab, 0x9963394a, 0x784d332e, 0x971f85cf, 0x7d9958ad,
- 0x92cbee4c, 0x64b44a24, 0x8be6fcc5, 0x616021a7, 0x8e329746,
- 0x6f1c9d22, 0x804e2bc3, 0x6ac8f6a1, 0x859a4040, 0xba8d7060,
- 0x55dfc681, 0xbf591be3, 0x500bad02, 0xb125a766, 0x5e771187,
- 0xb4f1cce5, 0x5ba37a04, 0xaddcde6c, 0x428e688d, 0xa808b5ef,
- 0x475a030e, 0xa674096a, 0x4926bf8b, 0xa3a062e9, 0x4cf2d408,
- 0x942e2c78, 0x7b7c9a99, 0x91fa47fb, 0x7ea8f11a, 0x9f86fb7e,
- 0x70d44d9f, 0x9a5290fd, 0x7500261c, 0x837f8274, 0x6c2d3495,
- 0x86abe9f7, 0x69f95f16, 0x88d75572, 0x6785e393, 0x8d033ef1,
- 0x62518810, 0xe7cbc850, 0x08997eb1, 0xe21fa3d3, 0x0d4d1532,
- 0xec631f56, 0x0331a9b7, 0xe9b774d5, 0x06e5c234, 0xf09a665c,
- 0x1fc8d0bd, 0xf54e0ddf, 0x1a1cbb3e, 0xfb32b15a, 0x146007bb,
- 0xfee6dad9, 0x11b46c38, 0xc9689448, 0x263a22a9, 0xccbcffcb,
- 0x23ee492a, 0xc2c0434e, 0x2d92f5af, 0xc71428cd, 0x28469e2c,
- 0xde393a44, 0x316b8ca5, 0xdbed51c7, 0x34bfe726, 0xd591ed42,
- 0x3ac35ba3, 0xd04586c1, 0x3f173020, 0xae6be681, 0x41395060,
- 0xabbf8d02, 0x44ed3be3, 0xa5c33187, 0x4a918766, 0xa0175a04,
- 0x4f45ece5, 0xb93a488d, 0x5668fe6c, 0xbcee230e, 0x53bc95ef,
- 0xb2929f8b, 0x5dc0296a, 0xb746f408, 0x581442e9, 0x80c8ba99,
- 0x6f9a0c78, 0x851cd11a, 0x6a4e67fb, 0x8b606d9f, 0x6432db7e,
- 0x8eb4061c, 0x61e6b0fd, 0x97991495, 0x78cba274, 0x924d7f16,
- 0x7d1fc9f7, 0x9c31c393, 0x73637572, 0x99e5a810, 0x76b71ef1,
- 0xf32d5eb1, 0x1c7fe850, 0xf6f93532, 0x19ab83d3, 0xf88589b7,
- 0x17d73f56, 0xfd51e234, 0x120354d5, 0xe47cf0bd, 0x0b2e465c,
- 0xe1a89b3e, 0x0efa2ddf, 0xefd427bb, 0x0086915a, 0xea004c38,
- 0x0552fad9, 0xdd8e02a9, 0x32dcb448, 0xd85a692a, 0x3708dfcb,
- 0xd626d5af, 0x3974634e, 0xd3f2be2c, 0x3ca008cd, 0xcadfaca5,
- 0x258d1a44, 0xcf0bc726, 0x205971c7, 0xc1777ba3, 0x2e25cd42,
- 0xc4a31020, 0x2bf1a6c1, 0x14e696e1, 0xfbb42000, 0x1132fd62,
- 0xfe604b83, 0x1f4e41e7, 0xf01cf706, 0x1a9a2a64, 0xf5c89c85,
- 0x03b738ed, 0xece58e0c, 0x0663536e, 0xe931e58f, 0x081fefeb,
- 0xe74d590a, 0x0dcb8468, 0xe2993289, 0x3a45caf9, 0xd5177c18,
- 0x3f91a17a, 0xd0c3179b, 0x31ed1dff, 0xdebfab1e, 0x3439767c,
- 0xdb6bc09d, 0x2d1464f5, 0xc246d214, 0x28c00f76, 0xc792b997,
- 0x26bcb3f3, 0xc9ee0512, 0x2368d870, 0xcc3a6e91, 0x49a02ed1,
- 0xa6f29830, 0x4c744552, 0xa326f3b3, 0x4208f9d7, 0xad5a4f36,
- 0x47dc9254, 0xa88e24b5, 0x5ef180dd, 0xb1a3363c, 0x5b25eb5e,
- 0xb4775dbf, 0x555957db, 0xba0be13a, 0x508d3c58, 0xbfdf8ab9,
- 0x670372c9, 0x8851c428, 0x62d7194a, 0x8d85afab, 0x6caba5cf,
- 0x83f9132e, 0x697fce4c, 0x862d78ad, 0x7052dcc5, 0x9f006a24,
- 0x7586b746, 0x9ad401a7, 0x7bfa0bc3, 0x94a8bd22, 0x7e2e6040,
- 0x917cd6a1},
- {0x00000000, 0x87a6cb43, 0xd43c90c7, 0x539a5b84, 0x730827cf,
- 0xf4aeec8c, 0xa734b708, 0x20927c4b, 0xe6104f9e, 0x61b684dd,
- 0x322cdf59, 0xb58a141a, 0x95186851, 0x12bea312, 0x4124f896,
- 0xc68233d5, 0x1751997d, 0x90f7523e, 0xc36d09ba, 0x44cbc2f9,
- 0x6459beb2, 0xe3ff75f1, 0xb0652e75, 0x37c3e536, 0xf141d6e3,
- 0x76e71da0, 0x257d4624, 0xa2db8d67, 0x8249f12c, 0x05ef3a6f,
- 0x567561eb, 0xd1d3aaa8, 0x2ea332fa, 0xa905f9b9, 0xfa9fa23d,
- 0x7d39697e, 0x5dab1535, 0xda0dde76, 0x899785f2, 0x0e314eb1,
- 0xc8b37d64, 0x4f15b627, 0x1c8feda3, 0x9b2926e0, 0xbbbb5aab,
- 0x3c1d91e8, 0x6f87ca6c, 0xe821012f, 0x39f2ab87, 0xbe5460c4,
- 0xedce3b40, 0x6a68f003, 0x4afa8c48, 0xcd5c470b, 0x9ec61c8f,
- 0x1960d7cc, 0xdfe2e419, 0x58442f5a, 0x0bde74de, 0x8c78bf9d,
- 0xaceac3d6, 0x2b4c0895, 0x78d65311, 0xff709852, 0x5d4665f4,
- 0xdae0aeb7, 0x897af533, 0x0edc3e70, 0x2e4e423b, 0xa9e88978,
- 0xfa72d2fc, 0x7dd419bf, 0xbb562a6a, 0x3cf0e129, 0x6f6abaad,
- 0xe8cc71ee, 0xc85e0da5, 0x4ff8c6e6, 0x1c629d62, 0x9bc45621,
- 0x4a17fc89, 0xcdb137ca, 0x9e2b6c4e, 0x198da70d, 0x391fdb46,
- 0xbeb91005, 0xed234b81, 0x6a8580c2, 0xac07b317, 0x2ba17854,
- 0x783b23d0, 0xff9de893, 0xdf0f94d8, 0x58a95f9b, 0x0b33041f,
- 0x8c95cf5c, 0x73e5570e, 0xf4439c4d, 0xa7d9c7c9, 0x207f0c8a,
- 0x00ed70c1, 0x874bbb82, 0xd4d1e006, 0x53772b45, 0x95f51890,
- 0x1253d3d3, 0x41c98857, 0xc66f4314, 0xe6fd3f5f, 0x615bf41c,
- 0x32c1af98, 0xb56764db, 0x64b4ce73, 0xe3120530, 0xb0885eb4,
- 0x372e95f7, 0x17bce9bc, 0x901a22ff, 0xc380797b, 0x4426b238,
- 0x82a481ed, 0x05024aae, 0x5698112a, 0xd13eda69, 0xf1aca622,
- 0x760a6d61, 0x259036e5, 0xa236fda6, 0xba8ccbe8, 0x3d2a00ab,
- 0x6eb05b2f, 0xe916906c, 0xc984ec27, 0x4e222764, 0x1db87ce0,
- 0x9a1eb7a3, 0x5c9c8476, 0xdb3a4f35, 0x88a014b1, 0x0f06dff2,
- 0x2f94a3b9, 0xa83268fa, 0xfba8337e, 0x7c0ef83d, 0xaddd5295,
- 0x2a7b99d6, 0x79e1c252, 0xfe470911, 0xded5755a, 0x5973be19,
- 0x0ae9e59d, 0x8d4f2ede, 0x4bcd1d0b, 0xcc6bd648, 0x9ff18dcc,
- 0x1857468f, 0x38c53ac4, 0xbf63f187, 0xecf9aa03, 0x6b5f6140,
- 0x942ff912, 0x13893251, 0x401369d5, 0xc7b5a296, 0xe727dedd,
- 0x6081159e, 0x331b4e1a, 0xb4bd8559, 0x723fb68c, 0xf5997dcf,
- 0xa603264b, 0x21a5ed08, 0x01379143, 0x86915a00, 0xd50b0184,
- 0x52adcac7, 0x837e606f, 0x04d8ab2c, 0x5742f0a8, 0xd0e43beb,
- 0xf07647a0, 0x77d08ce3, 0x244ad767, 0xa3ec1c24, 0x656e2ff1,
- 0xe2c8e4b2, 0xb152bf36, 0x36f47475, 0x1666083e, 0x91c0c37d,
- 0xc25a98f9, 0x45fc53ba, 0xe7caae1c, 0x606c655f, 0x33f63edb,
- 0xb450f598, 0x94c289d3, 0x13644290, 0x40fe1914, 0xc758d257,
- 0x01dae182, 0x867c2ac1, 0xd5e67145, 0x5240ba06, 0x72d2c64d,
- 0xf5740d0e, 0xa6ee568a, 0x21489dc9, 0xf09b3761, 0x773dfc22,
- 0x24a7a7a6, 0xa3016ce5, 0x839310ae, 0x0435dbed, 0x57af8069,
- 0xd0094b2a, 0x168b78ff, 0x912db3bc, 0xc2b7e838, 0x4511237b,
- 0x65835f30, 0xe2259473, 0xb1bfcff7, 0x361904b4, 0xc9699ce6,
- 0x4ecf57a5, 0x1d550c21, 0x9af3c762, 0xba61bb29, 0x3dc7706a,
- 0x6e5d2bee, 0xe9fbe0ad, 0x2f79d378, 0xa8df183b, 0xfb4543bf,
- 0x7ce388fc, 0x5c71f4b7, 0xdbd73ff4, 0x884d6470, 0x0febaf33,
- 0xde38059b, 0x599eced8, 0x0a04955c, 0x8da25e1f, 0xad302254,
- 0x2a96e917, 0x790cb293, 0xfeaa79d0, 0x38284a05, 0xbf8e8146,
- 0xec14dac2, 0x6bb21181, 0x4b206dca, 0xcc86a689, 0x9f1cfd0d,
- 0x18ba364e}};
-
-local const z_word_t FAR crc_braid_big_table[][256] = {
- {0x00000000, 0x43cba687, 0xc7903cd4, 0x845b9a53, 0xcf270873,
- 0x8cecaef4, 0x08b734a7, 0x4b7c9220, 0x9e4f10e6, 0xdd84b661,
- 0x59df2c32, 0x1a148ab5, 0x51681895, 0x12a3be12, 0x96f82441,
- 0xd53382c6, 0x7d995117, 0x3e52f790, 0xba096dc3, 0xf9c2cb44,
- 0xb2be5964, 0xf175ffe3, 0x752e65b0, 0x36e5c337, 0xe3d641f1,
- 0xa01de776, 0x24467d25, 0x678ddba2, 0x2cf14982, 0x6f3aef05,
- 0xeb617556, 0xa8aad3d1, 0xfa32a32e, 0xb9f905a9, 0x3da29ffa,
- 0x7e69397d, 0x3515ab5d, 0x76de0dda, 0xf2859789, 0xb14e310e,
- 0x647db3c8, 0x27b6154f, 0xa3ed8f1c, 0xe026299b, 0xab5abbbb,
- 0xe8911d3c, 0x6cca876f, 0x2f0121e8, 0x87abf239, 0xc46054be,
- 0x403bceed, 0x03f0686a, 0x488cfa4a, 0x0b475ccd, 0x8f1cc69e,
- 0xccd76019, 0x19e4e2df, 0x5a2f4458, 0xde74de0b, 0x9dbf788c,
- 0xd6c3eaac, 0x95084c2b, 0x1153d678, 0x529870ff, 0xf465465d,
- 0xb7aee0da, 0x33f57a89, 0x703edc0e, 0x3b424e2e, 0x7889e8a9,
- 0xfcd272fa, 0xbf19d47d, 0x6a2a56bb, 0x29e1f03c, 0xadba6a6f,
- 0xee71cce8, 0xa50d5ec8, 0xe6c6f84f, 0x629d621c, 0x2156c49b,
- 0x89fc174a, 0xca37b1cd, 0x4e6c2b9e, 0x0da78d19, 0x46db1f39,
- 0x0510b9be, 0x814b23ed, 0xc280856a, 0x17b307ac, 0x5478a12b,
- 0xd0233b78, 0x93e89dff, 0xd8940fdf, 0x9b5fa958, 0x1f04330b,
- 0x5ccf958c, 0x0e57e573, 0x4d9c43f4, 0xc9c7d9a7, 0x8a0c7f20,
- 0xc170ed00, 0x82bb4b87, 0x06e0d1d4, 0x452b7753, 0x9018f595,
- 0xd3d35312, 0x5788c941, 0x14436fc6, 0x5f3ffde6, 0x1cf45b61,
- 0x98afc132, 0xdb6467b5, 0x73ceb464, 0x300512e3, 0xb45e88b0,
- 0xf7952e37, 0xbce9bc17, 0xff221a90, 0x7b7980c3, 0x38b22644,
- 0xed81a482, 0xae4a0205, 0x2a119856, 0x69da3ed1, 0x22a6acf1,
- 0x616d0a76, 0xe5369025, 0xa6fd36a2, 0xe8cb8cba, 0xab002a3d,
- 0x2f5bb06e, 0x6c9016e9, 0x27ec84c9, 0x6427224e, 0xe07cb81d,
- 0xa3b71e9a, 0x76849c5c, 0x354f3adb, 0xb114a088, 0xf2df060f,
- 0xb9a3942f, 0xfa6832a8, 0x7e33a8fb, 0x3df80e7c, 0x9552ddad,
- 0xd6997b2a, 0x52c2e179, 0x110947fe, 0x5a75d5de, 0x19be7359,
- 0x9de5e90a, 0xde2e4f8d, 0x0b1dcd4b, 0x48d66bcc, 0xcc8df19f,
- 0x8f465718, 0xc43ac538, 0x87f163bf, 0x03aaf9ec, 0x40615f6b,
- 0x12f92f94, 0x51328913, 0xd5691340, 0x96a2b5c7, 0xddde27e7,
- 0x9e158160, 0x1a4e1b33, 0x5985bdb4, 0x8cb63f72, 0xcf7d99f5,
- 0x4b2603a6, 0x08eda521, 0x43913701, 0x005a9186, 0x84010bd5,
- 0xc7caad52, 0x6f607e83, 0x2cabd804, 0xa8f04257, 0xeb3be4d0,
- 0xa04776f0, 0xe38cd077, 0x67d74a24, 0x241ceca3, 0xf12f6e65,
- 0xb2e4c8e2, 0x36bf52b1, 0x7574f436, 0x3e086616, 0x7dc3c091,
- 0xf9985ac2, 0xba53fc45, 0x1caecae7, 0x5f656c60, 0xdb3ef633,
- 0x98f550b4, 0xd389c294, 0x90426413, 0x1419fe40, 0x57d258c7,
- 0x82e1da01, 0xc12a7c86, 0x4571e6d5, 0x06ba4052, 0x4dc6d272,
- 0x0e0d74f5, 0x8a56eea6, 0xc99d4821, 0x61379bf0, 0x22fc3d77,
- 0xa6a7a724, 0xe56c01a3, 0xae109383, 0xeddb3504, 0x6980af57,
- 0x2a4b09d0, 0xff788b16, 0xbcb32d91, 0x38e8b7c2, 0x7b231145,
- 0x305f8365, 0x739425e2, 0xf7cfbfb1, 0xb4041936, 0xe69c69c9,
- 0xa557cf4e, 0x210c551d, 0x62c7f39a, 0x29bb61ba, 0x6a70c73d,
- 0xee2b5d6e, 0xade0fbe9, 0x78d3792f, 0x3b18dfa8, 0xbf4345fb,
- 0xfc88e37c, 0xb7f4715c, 0xf43fd7db, 0x70644d88, 0x33afeb0f,
- 0x9b0538de, 0xd8ce9e59, 0x5c95040a, 0x1f5ea28d, 0x542230ad,
- 0x17e9962a, 0x93b20c79, 0xd079aafe, 0x054a2838, 0x46818ebf,
- 0xc2da14ec, 0x8111b26b, 0xca6d204b, 0x89a686cc, 0x0dfd1c9f,
- 0x4e36ba18},
- {0x00000000, 0xe1b652ef, 0x836bd405, 0x62dd86ea, 0x06d7a80b,
- 0xe761fae4, 0x85bc7c0e, 0x640a2ee1, 0x0cae5117, 0xed1803f8,
- 0x8fc58512, 0x6e73d7fd, 0x0a79f91c, 0xebcfabf3, 0x89122d19,
- 0x68a47ff6, 0x185ca32e, 0xf9eaf1c1, 0x9b37772b, 0x7a8125c4,
- 0x1e8b0b25, 0xff3d59ca, 0x9de0df20, 0x7c568dcf, 0x14f2f239,
- 0xf544a0d6, 0x9799263c, 0x762f74d3, 0x12255a32, 0xf39308dd,
- 0x914e8e37, 0x70f8dcd8, 0x30b8465d, 0xd10e14b2, 0xb3d39258,
- 0x5265c0b7, 0x366fee56, 0xd7d9bcb9, 0xb5043a53, 0x54b268bc,
- 0x3c16174a, 0xdda045a5, 0xbf7dc34f, 0x5ecb91a0, 0x3ac1bf41,
- 0xdb77edae, 0xb9aa6b44, 0x581c39ab, 0x28e4e573, 0xc952b79c,
- 0xab8f3176, 0x4a396399, 0x2e334d78, 0xcf851f97, 0xad58997d,
- 0x4ceecb92, 0x244ab464, 0xc5fce68b, 0xa7216061, 0x4697328e,
- 0x229d1c6f, 0xc32b4e80, 0xa1f6c86a, 0x40409a85, 0x60708dba,
- 0x81c6df55, 0xe31b59bf, 0x02ad0b50, 0x66a725b1, 0x8711775e,
- 0xe5ccf1b4, 0x047aa35b, 0x6cdedcad, 0x8d688e42, 0xefb508a8,
- 0x0e035a47, 0x6a0974a6, 0x8bbf2649, 0xe962a0a3, 0x08d4f24c,
- 0x782c2e94, 0x999a7c7b, 0xfb47fa91, 0x1af1a87e, 0x7efb869f,
- 0x9f4dd470, 0xfd90529a, 0x1c260075, 0x74827f83, 0x95342d6c,
- 0xf7e9ab86, 0x165ff969, 0x7255d788, 0x93e38567, 0xf13e038d,
- 0x10885162, 0x50c8cbe7, 0xb17e9908, 0xd3a31fe2, 0x32154d0d,
- 0x561f63ec, 0xb7a93103, 0xd574b7e9, 0x34c2e506, 0x5c669af0,
- 0xbdd0c81f, 0xdf0d4ef5, 0x3ebb1c1a, 0x5ab132fb, 0xbb076014,
- 0xd9dae6fe, 0x386cb411, 0x489468c9, 0xa9223a26, 0xcbffbccc,
- 0x2a49ee23, 0x4e43c0c2, 0xaff5922d, 0xcd2814c7, 0x2c9e4628,
- 0x443a39de, 0xa58c6b31, 0xc751eddb, 0x26e7bf34, 0x42ed91d5,
- 0xa35bc33a, 0xc18645d0, 0x2030173f, 0x81e66bae, 0x60503941,
- 0x028dbfab, 0xe33bed44, 0x8731c3a5, 0x6687914a, 0x045a17a0,
- 0xe5ec454f, 0x8d483ab9, 0x6cfe6856, 0x0e23eebc, 0xef95bc53,
- 0x8b9f92b2, 0x6a29c05d, 0x08f446b7, 0xe9421458, 0x99bac880,
- 0x780c9a6f, 0x1ad11c85, 0xfb674e6a, 0x9f6d608b, 0x7edb3264,
- 0x1c06b48e, 0xfdb0e661, 0x95149997, 0x74a2cb78, 0x167f4d92,
- 0xf7c91f7d, 0x93c3319c, 0x72756373, 0x10a8e599, 0xf11eb776,
- 0xb15e2df3, 0x50e87f1c, 0x3235f9f6, 0xd383ab19, 0xb78985f8,
- 0x563fd717, 0x34e251fd, 0xd5540312, 0xbdf07ce4, 0x5c462e0b,
- 0x3e9ba8e1, 0xdf2dfa0e, 0xbb27d4ef, 0x5a918600, 0x384c00ea,
- 0xd9fa5205, 0xa9028edd, 0x48b4dc32, 0x2a695ad8, 0xcbdf0837,
- 0xafd526d6, 0x4e637439, 0x2cbef2d3, 0xcd08a03c, 0xa5acdfca,
- 0x441a8d25, 0x26c70bcf, 0xc7715920, 0xa37b77c1, 0x42cd252e,
- 0x2010a3c4, 0xc1a6f12b, 0xe196e614, 0x0020b4fb, 0x62fd3211,
- 0x834b60fe, 0xe7414e1f, 0x06f71cf0, 0x642a9a1a, 0x859cc8f5,
- 0xed38b703, 0x0c8ee5ec, 0x6e536306, 0x8fe531e9, 0xebef1f08,
- 0x0a594de7, 0x6884cb0d, 0x893299e2, 0xf9ca453a, 0x187c17d5,
- 0x7aa1913f, 0x9b17c3d0, 0xff1ded31, 0x1eabbfde, 0x7c763934,
- 0x9dc06bdb, 0xf564142d, 0x14d246c2, 0x760fc028, 0x97b992c7,
- 0xf3b3bc26, 0x1205eec9, 0x70d86823, 0x916e3acc, 0xd12ea049,
- 0x3098f2a6, 0x5245744c, 0xb3f326a3, 0xd7f90842, 0x364f5aad,
- 0x5492dc47, 0xb5248ea8, 0xdd80f15e, 0x3c36a3b1, 0x5eeb255b,
- 0xbf5d77b4, 0xdb575955, 0x3ae10bba, 0x583c8d50, 0xb98adfbf,
- 0xc9720367, 0x28c45188, 0x4a19d762, 0xabaf858d, 0xcfa5ab6c,
- 0x2e13f983, 0x4cce7f69, 0xad782d86, 0xc5dc5270, 0x246a009f,
- 0x46b78675, 0xa701d49a, 0xc30bfa7b, 0x22bda894, 0x40602e7e,
- 0xa1d67c91},
- {0x00000000, 0x5880e2d7, 0xf106b474, 0xa98656a3, 0xe20d68e9,
- 0xba8d8a3e, 0x130bdc9d, 0x4b8b3e4a, 0x851da109, 0xdd9d43de,
- 0x741b157d, 0x2c9bf7aa, 0x6710c9e0, 0x3f902b37, 0x96167d94,
- 0xce969f43, 0x0a3b4213, 0x52bba0c4, 0xfb3df667, 0xa3bd14b0,
- 0xe8362afa, 0xb0b6c82d, 0x19309e8e, 0x41b07c59, 0x8f26e31a,
- 0xd7a601cd, 0x7e20576e, 0x26a0b5b9, 0x6d2b8bf3, 0x35ab6924,
- 0x9c2d3f87, 0xc4addd50, 0x14768426, 0x4cf666f1, 0xe5703052,
- 0xbdf0d285, 0xf67beccf, 0xaefb0e18, 0x077d58bb, 0x5ffdba6c,
- 0x916b252f, 0xc9ebc7f8, 0x606d915b, 0x38ed738c, 0x73664dc6,
- 0x2be6af11, 0x8260f9b2, 0xdae01b65, 0x1e4dc635, 0x46cd24e2,
- 0xef4b7241, 0xb7cb9096, 0xfc40aedc, 0xa4c04c0b, 0x0d461aa8,
- 0x55c6f87f, 0x9b50673c, 0xc3d085eb, 0x6a56d348, 0x32d6319f,
- 0x795d0fd5, 0x21dded02, 0x885bbba1, 0xd0db5976, 0x28ec084d,
- 0x706cea9a, 0xd9eabc39, 0x816a5eee, 0xcae160a4, 0x92618273,
- 0x3be7d4d0, 0x63673607, 0xadf1a944, 0xf5714b93, 0x5cf71d30,
- 0x0477ffe7, 0x4ffcc1ad, 0x177c237a, 0xbefa75d9, 0xe67a970e,
- 0x22d74a5e, 0x7a57a889, 0xd3d1fe2a, 0x8b511cfd, 0xc0da22b7,
- 0x985ac060, 0x31dc96c3, 0x695c7414, 0xa7caeb57, 0xff4a0980,
- 0x56cc5f23, 0x0e4cbdf4, 0x45c783be, 0x1d476169, 0xb4c137ca,
- 0xec41d51d, 0x3c9a8c6b, 0x641a6ebc, 0xcd9c381f, 0x951cdac8,
- 0xde97e482, 0x86170655, 0x2f9150f6, 0x7711b221, 0xb9872d62,
- 0xe107cfb5, 0x48819916, 0x10017bc1, 0x5b8a458b, 0x030aa75c,
- 0xaa8cf1ff, 0xf20c1328, 0x36a1ce78, 0x6e212caf, 0xc7a77a0c,
- 0x9f2798db, 0xd4aca691, 0x8c2c4446, 0x25aa12e5, 0x7d2af032,
- 0xb3bc6f71, 0xeb3c8da6, 0x42badb05, 0x1a3a39d2, 0x51b10798,
- 0x0931e54f, 0xa0b7b3ec, 0xf837513b, 0x50d8119a, 0x0858f34d,
- 0xa1dea5ee, 0xf95e4739, 0xb2d57973, 0xea559ba4, 0x43d3cd07,
- 0x1b532fd0, 0xd5c5b093, 0x8d455244, 0x24c304e7, 0x7c43e630,
- 0x37c8d87a, 0x6f483aad, 0xc6ce6c0e, 0x9e4e8ed9, 0x5ae35389,
- 0x0263b15e, 0xabe5e7fd, 0xf365052a, 0xb8ee3b60, 0xe06ed9b7,
- 0x49e88f14, 0x11686dc3, 0xdffef280, 0x877e1057, 0x2ef846f4,
- 0x7678a423, 0x3df39a69, 0x657378be, 0xccf52e1d, 0x9475ccca,
- 0x44ae95bc, 0x1c2e776b, 0xb5a821c8, 0xed28c31f, 0xa6a3fd55,
- 0xfe231f82, 0x57a54921, 0x0f25abf6, 0xc1b334b5, 0x9933d662,
- 0x30b580c1, 0x68356216, 0x23be5c5c, 0x7b3ebe8b, 0xd2b8e828,
- 0x8a380aff, 0x4e95d7af, 0x16153578, 0xbf9363db, 0xe713810c,
- 0xac98bf46, 0xf4185d91, 0x5d9e0b32, 0x051ee9e5, 0xcb8876a6,
- 0x93089471, 0x3a8ec2d2, 0x620e2005, 0x29851e4f, 0x7105fc98,
- 0xd883aa3b, 0x800348ec, 0x783419d7, 0x20b4fb00, 0x8932ada3,
- 0xd1b24f74, 0x9a39713e, 0xc2b993e9, 0x6b3fc54a, 0x33bf279d,
- 0xfd29b8de, 0xa5a95a09, 0x0c2f0caa, 0x54afee7d, 0x1f24d037,
- 0x47a432e0, 0xee226443, 0xb6a28694, 0x720f5bc4, 0x2a8fb913,
- 0x8309efb0, 0xdb890d67, 0x9002332d, 0xc882d1fa, 0x61048759,
- 0x3984658e, 0xf712facd, 0xaf92181a, 0x06144eb9, 0x5e94ac6e,
- 0x151f9224, 0x4d9f70f3, 0xe4192650, 0xbc99c487, 0x6c429df1,
- 0x34c27f26, 0x9d442985, 0xc5c4cb52, 0x8e4ff518, 0xd6cf17cf,
- 0x7f49416c, 0x27c9a3bb, 0xe95f3cf8, 0xb1dfde2f, 0x1859888c,
- 0x40d96a5b, 0x0b525411, 0x53d2b6c6, 0xfa54e065, 0xa2d402b2,
- 0x6679dfe2, 0x3ef93d35, 0x977f6b96, 0xcfff8941, 0x8474b70b,
- 0xdcf455dc, 0x7572037f, 0x2df2e1a8, 0xe3647eeb, 0xbbe49c3c,
- 0x1262ca9f, 0x4ae22848, 0x01691602, 0x59e9f4d5, 0xf06fa276,
- 0xa8ef40a1},
- {0x00000000, 0x463b6765, 0x8c76ceca, 0xca4da9af, 0x59ebed4e,
- 0x1fd08a2b, 0xd59d2384, 0x93a644e1, 0xb2d6db9d, 0xf4edbcf8,
- 0x3ea01557, 0x789b7232, 0xeb3d36d3, 0xad0651b6, 0x674bf819,
- 0x21709f7c, 0x25abc6e0, 0x6390a185, 0xa9dd082a, 0xefe66f4f,
- 0x7c402bae, 0x3a7b4ccb, 0xf036e564, 0xb60d8201, 0x977d1d7d,
- 0xd1467a18, 0x1b0bd3b7, 0x5d30b4d2, 0xce96f033, 0x88ad9756,
- 0x42e03ef9, 0x04db599c, 0x0b50fc1a, 0x4d6b9b7f, 0x872632d0,
- 0xc11d55b5, 0x52bb1154, 0x14807631, 0xdecddf9e, 0x98f6b8fb,
- 0xb9862787, 0xffbd40e2, 0x35f0e94d, 0x73cb8e28, 0xe06dcac9,
- 0xa656adac, 0x6c1b0403, 0x2a206366, 0x2efb3afa, 0x68c05d9f,
- 0xa28df430, 0xe4b69355, 0x7710d7b4, 0x312bb0d1, 0xfb66197e,
- 0xbd5d7e1b, 0x9c2de167, 0xda168602, 0x105b2fad, 0x566048c8,
- 0xc5c60c29, 0x83fd6b4c, 0x49b0c2e3, 0x0f8ba586, 0x16a0f835,
- 0x509b9f50, 0x9ad636ff, 0xdced519a, 0x4f4b157b, 0x0970721e,
- 0xc33ddbb1, 0x8506bcd4, 0xa47623a8, 0xe24d44cd, 0x2800ed62,
- 0x6e3b8a07, 0xfd9dcee6, 0xbba6a983, 0x71eb002c, 0x37d06749,
- 0x330b3ed5, 0x753059b0, 0xbf7df01f, 0xf946977a, 0x6ae0d39b,
- 0x2cdbb4fe, 0xe6961d51, 0xa0ad7a34, 0x81dde548, 0xc7e6822d,
- 0x0dab2b82, 0x4b904ce7, 0xd8360806, 0x9e0d6f63, 0x5440c6cc,
- 0x127ba1a9, 0x1df0042f, 0x5bcb634a, 0x9186cae5, 0xd7bdad80,
- 0x441be961, 0x02208e04, 0xc86d27ab, 0x8e5640ce, 0xaf26dfb2,
- 0xe91db8d7, 0x23501178, 0x656b761d, 0xf6cd32fc, 0xb0f65599,
- 0x7abbfc36, 0x3c809b53, 0x385bc2cf, 0x7e60a5aa, 0xb42d0c05,
- 0xf2166b60, 0x61b02f81, 0x278b48e4, 0xedc6e14b, 0xabfd862e,
- 0x8a8d1952, 0xccb67e37, 0x06fbd798, 0x40c0b0fd, 0xd366f41c,
- 0x955d9379, 0x5f103ad6, 0x192b5db3, 0x2c40f16b, 0x6a7b960e,
- 0xa0363fa1, 0xe60d58c4, 0x75ab1c25, 0x33907b40, 0xf9ddd2ef,
- 0xbfe6b58a, 0x9e962af6, 0xd8ad4d93, 0x12e0e43c, 0x54db8359,
- 0xc77dc7b8, 0x8146a0dd, 0x4b0b0972, 0x0d306e17, 0x09eb378b,
- 0x4fd050ee, 0x859df941, 0xc3a69e24, 0x5000dac5, 0x163bbda0,
- 0xdc76140f, 0x9a4d736a, 0xbb3dec16, 0xfd068b73, 0x374b22dc,
- 0x717045b9, 0xe2d60158, 0xa4ed663d, 0x6ea0cf92, 0x289ba8f7,
- 0x27100d71, 0x612b6a14, 0xab66c3bb, 0xed5da4de, 0x7efbe03f,
- 0x38c0875a, 0xf28d2ef5, 0xb4b64990, 0x95c6d6ec, 0xd3fdb189,
- 0x19b01826, 0x5f8b7f43, 0xcc2d3ba2, 0x8a165cc7, 0x405bf568,
- 0x0660920d, 0x02bbcb91, 0x4480acf4, 0x8ecd055b, 0xc8f6623e,
- 0x5b5026df, 0x1d6b41ba, 0xd726e815, 0x911d8f70, 0xb06d100c,
- 0xf6567769, 0x3c1bdec6, 0x7a20b9a3, 0xe986fd42, 0xafbd9a27,
- 0x65f03388, 0x23cb54ed, 0x3ae0095e, 0x7cdb6e3b, 0xb696c794,
- 0xf0ada0f1, 0x630be410, 0x25308375, 0xef7d2ada, 0xa9464dbf,
- 0x8836d2c3, 0xce0db5a6, 0x04401c09, 0x427b7b6c, 0xd1dd3f8d,
- 0x97e658e8, 0x5dabf147, 0x1b909622, 0x1f4bcfbe, 0x5970a8db,
- 0x933d0174, 0xd5066611, 0x46a022f0, 0x009b4595, 0xcad6ec3a,
- 0x8ced8b5f, 0xad9d1423, 0xeba67346, 0x21ebdae9, 0x67d0bd8c,
- 0xf476f96d, 0xb24d9e08, 0x780037a7, 0x3e3b50c2, 0x31b0f544,
- 0x778b9221, 0xbdc63b8e, 0xfbfd5ceb, 0x685b180a, 0x2e607f6f,
- 0xe42dd6c0, 0xa216b1a5, 0x83662ed9, 0xc55d49bc, 0x0f10e013,
- 0x492b8776, 0xda8dc397, 0x9cb6a4f2, 0x56fb0d5d, 0x10c06a38,
- 0x141b33a4, 0x522054c1, 0x986dfd6e, 0xde569a0b, 0x4df0deea,
- 0x0bcbb98f, 0xc1861020, 0x87bd7745, 0xa6cde839, 0xe0f68f5c,
- 0x2abb26f3, 0x6c804196, 0xff260577, 0xb91d6212, 0x7350cbbd,
- 0x356bacd8}};
-
-#endif
-
-#endif
-
-#if N == 6
-
-#if W == 8
-
-local const z_crc_t FAR crc_braid_table[][256] = {
- {0x00000000, 0x3db1ecdc, 0x7b63d9b8, 0x46d23564, 0xf6c7b370,
- 0xcb765fac, 0x8da46ac8, 0xb0158614, 0x36fe60a1, 0x0b4f8c7d,
- 0x4d9db919, 0x702c55c5, 0xc039d3d1, 0xfd883f0d, 0xbb5a0a69,
- 0x86ebe6b5, 0x6dfcc142, 0x504d2d9e, 0x169f18fa, 0x2b2ef426,
- 0x9b3b7232, 0xa68a9eee, 0xe058ab8a, 0xdde94756, 0x5b02a1e3,
- 0x66b34d3f, 0x2061785b, 0x1dd09487, 0xadc51293, 0x9074fe4f,
- 0xd6a6cb2b, 0xeb1727f7, 0xdbf98284, 0xe6486e58, 0xa09a5b3c,
- 0x9d2bb7e0, 0x2d3e31f4, 0x108fdd28, 0x565de84c, 0x6bec0490,
- 0xed07e225, 0xd0b60ef9, 0x96643b9d, 0xabd5d741, 0x1bc05155,
- 0x2671bd89, 0x60a388ed, 0x5d126431, 0xb60543c6, 0x8bb4af1a,
- 0xcd669a7e, 0xf0d776a2, 0x40c2f0b6, 0x7d731c6a, 0x3ba1290e,
- 0x0610c5d2, 0x80fb2367, 0xbd4acfbb, 0xfb98fadf, 0xc6291603,
- 0x763c9017, 0x4b8d7ccb, 0x0d5f49af, 0x30eea573, 0x6c820349,
- 0x5133ef95, 0x17e1daf1, 0x2a50362d, 0x9a45b039, 0xa7f45ce5,
- 0xe1266981, 0xdc97855d, 0x5a7c63e8, 0x67cd8f34, 0x211fba50,
- 0x1cae568c, 0xacbbd098, 0x910a3c44, 0xd7d80920, 0xea69e5fc,
- 0x017ec20b, 0x3ccf2ed7, 0x7a1d1bb3, 0x47acf76f, 0xf7b9717b,
- 0xca089da7, 0x8cdaa8c3, 0xb16b441f, 0x3780a2aa, 0x0a314e76,
- 0x4ce37b12, 0x715297ce, 0xc14711da, 0xfcf6fd06, 0xba24c862,
- 0x879524be, 0xb77b81cd, 0x8aca6d11, 0xcc185875, 0xf1a9b4a9,
- 0x41bc32bd, 0x7c0dde61, 0x3adfeb05, 0x076e07d9, 0x8185e16c,
- 0xbc340db0, 0xfae638d4, 0xc757d408, 0x7742521c, 0x4af3bec0,
- 0x0c218ba4, 0x31906778, 0xda87408f, 0xe736ac53, 0xa1e49937,
- 0x9c5575eb, 0x2c40f3ff, 0x11f11f23, 0x57232a47, 0x6a92c69b,
- 0xec79202e, 0xd1c8ccf2, 0x971af996, 0xaaab154a, 0x1abe935e,
- 0x270f7f82, 0x61dd4ae6, 0x5c6ca63a, 0xd9040692, 0xe4b5ea4e,
- 0xa267df2a, 0x9fd633f6, 0x2fc3b5e2, 0x1272593e, 0x54a06c5a,
- 0x69118086, 0xeffa6633, 0xd24b8aef, 0x9499bf8b, 0xa9285357,
- 0x193dd543, 0x248c399f, 0x625e0cfb, 0x5fefe027, 0xb4f8c7d0,
- 0x89492b0c, 0xcf9b1e68, 0xf22af2b4, 0x423f74a0, 0x7f8e987c,
- 0x395cad18, 0x04ed41c4, 0x8206a771, 0xbfb74bad, 0xf9657ec9,
- 0xc4d49215, 0x74c11401, 0x4970f8dd, 0x0fa2cdb9, 0x32132165,
- 0x02fd8416, 0x3f4c68ca, 0x799e5dae, 0x442fb172, 0xf43a3766,
- 0xc98bdbba, 0x8f59eede, 0xb2e80202, 0x3403e4b7, 0x09b2086b,
- 0x4f603d0f, 0x72d1d1d3, 0xc2c457c7, 0xff75bb1b, 0xb9a78e7f,
- 0x841662a3, 0x6f014554, 0x52b0a988, 0x14629cec, 0x29d37030,
- 0x99c6f624, 0xa4771af8, 0xe2a52f9c, 0xdf14c340, 0x59ff25f5,
- 0x644ec929, 0x229cfc4d, 0x1f2d1091, 0xaf389685, 0x92897a59,
- 0xd45b4f3d, 0xe9eaa3e1, 0xb58605db, 0x8837e907, 0xcee5dc63,
- 0xf35430bf, 0x4341b6ab, 0x7ef05a77, 0x38226f13, 0x059383cf,
- 0x8378657a, 0xbec989a6, 0xf81bbcc2, 0xc5aa501e, 0x75bfd60a,
- 0x480e3ad6, 0x0edc0fb2, 0x336de36e, 0xd87ac499, 0xe5cb2845,
- 0xa3191d21, 0x9ea8f1fd, 0x2ebd77e9, 0x130c9b35, 0x55deae51,
- 0x686f428d, 0xee84a438, 0xd33548e4, 0x95e77d80, 0xa856915c,
- 0x18431748, 0x25f2fb94, 0x6320cef0, 0x5e91222c, 0x6e7f875f,
- 0x53ce6b83, 0x151c5ee7, 0x28adb23b, 0x98b8342f, 0xa509d8f3,
- 0xe3dbed97, 0xde6a014b, 0x5881e7fe, 0x65300b22, 0x23e23e46,
- 0x1e53d29a, 0xae46548e, 0x93f7b852, 0xd5258d36, 0xe89461ea,
- 0x0383461d, 0x3e32aac1, 0x78e09fa5, 0x45517379, 0xf544f56d,
- 0xc8f519b1, 0x8e272cd5, 0xb396c009, 0x357d26bc, 0x08ccca60,
- 0x4e1eff04, 0x73af13d8, 0xc3ba95cc, 0xfe0b7910, 0xb8d94c74,
- 0x8568a0a8},
- {0x00000000, 0x69790b65, 0xd2f216ca, 0xbb8b1daf, 0x7e952bd5,
- 0x17ec20b0, 0xac673d1f, 0xc51e367a, 0xfd2a57aa, 0x94535ccf,
- 0x2fd84160, 0x46a14a05, 0x83bf7c7f, 0xeac6771a, 0x514d6ab5,
- 0x383461d0, 0x2125a915, 0x485ca270, 0xf3d7bfdf, 0x9aaeb4ba,
- 0x5fb082c0, 0x36c989a5, 0x8d42940a, 0xe43b9f6f, 0xdc0ffebf,
- 0xb576f5da, 0x0efde875, 0x6784e310, 0xa29ad56a, 0xcbe3de0f,
- 0x7068c3a0, 0x1911c8c5, 0x424b522a, 0x2b32594f, 0x90b944e0,
- 0xf9c04f85, 0x3cde79ff, 0x55a7729a, 0xee2c6f35, 0x87556450,
- 0xbf610580, 0xd6180ee5, 0x6d93134a, 0x04ea182f, 0xc1f42e55,
- 0xa88d2530, 0x1306389f, 0x7a7f33fa, 0x636efb3f, 0x0a17f05a,
- 0xb19cedf5, 0xd8e5e690, 0x1dfbd0ea, 0x7482db8f, 0xcf09c620,
- 0xa670cd45, 0x9e44ac95, 0xf73da7f0, 0x4cb6ba5f, 0x25cfb13a,
- 0xe0d18740, 0x89a88c25, 0x3223918a, 0x5b5a9aef, 0x8496a454,
- 0xedefaf31, 0x5664b29e, 0x3f1db9fb, 0xfa038f81, 0x937a84e4,
- 0x28f1994b, 0x4188922e, 0x79bcf3fe, 0x10c5f89b, 0xab4ee534,
- 0xc237ee51, 0x0729d82b, 0x6e50d34e, 0xd5dbcee1, 0xbca2c584,
- 0xa5b30d41, 0xccca0624, 0x77411b8b, 0x1e3810ee, 0xdb262694,
- 0xb25f2df1, 0x09d4305e, 0x60ad3b3b, 0x58995aeb, 0x31e0518e,
- 0x8a6b4c21, 0xe3124744, 0x260c713e, 0x4f757a5b, 0xf4fe67f4,
- 0x9d876c91, 0xc6ddf67e, 0xafa4fd1b, 0x142fe0b4, 0x7d56ebd1,
- 0xb848ddab, 0xd131d6ce, 0x6abacb61, 0x03c3c004, 0x3bf7a1d4,
- 0x528eaab1, 0xe905b71e, 0x807cbc7b, 0x45628a01, 0x2c1b8164,
- 0x97909ccb, 0xfee997ae, 0xe7f85f6b, 0x8e81540e, 0x350a49a1,
- 0x5c7342c4, 0x996d74be, 0xf0147fdb, 0x4b9f6274, 0x22e66911,
- 0x1ad208c1, 0x73ab03a4, 0xc8201e0b, 0xa159156e, 0x64472314,
- 0x0d3e2871, 0xb6b535de, 0xdfcc3ebb, 0xd25c4ee9, 0xbb25458c,
- 0x00ae5823, 0x69d75346, 0xacc9653c, 0xc5b06e59, 0x7e3b73f6,
- 0x17427893, 0x2f761943, 0x460f1226, 0xfd840f89, 0x94fd04ec,
- 0x51e33296, 0x389a39f3, 0x8311245c, 0xea682f39, 0xf379e7fc,
- 0x9a00ec99, 0x218bf136, 0x48f2fa53, 0x8deccc29, 0xe495c74c,
- 0x5f1edae3, 0x3667d186, 0x0e53b056, 0x672abb33, 0xdca1a69c,
- 0xb5d8adf9, 0x70c69b83, 0x19bf90e6, 0xa2348d49, 0xcb4d862c,
- 0x90171cc3, 0xf96e17a6, 0x42e50a09, 0x2b9c016c, 0xee823716,
- 0x87fb3c73, 0x3c7021dc, 0x55092ab9, 0x6d3d4b69, 0x0444400c,
- 0xbfcf5da3, 0xd6b656c6, 0x13a860bc, 0x7ad16bd9, 0xc15a7676,
- 0xa8237d13, 0xb132b5d6, 0xd84bbeb3, 0x63c0a31c, 0x0ab9a879,
- 0xcfa79e03, 0xa6de9566, 0x1d5588c9, 0x742c83ac, 0x4c18e27c,
- 0x2561e919, 0x9eeaf4b6, 0xf793ffd3, 0x328dc9a9, 0x5bf4c2cc,
- 0xe07fdf63, 0x8906d406, 0x56caeabd, 0x3fb3e1d8, 0x8438fc77,
- 0xed41f712, 0x285fc168, 0x4126ca0d, 0xfaadd7a2, 0x93d4dcc7,
- 0xabe0bd17, 0xc299b672, 0x7912abdd, 0x106ba0b8, 0xd57596c2,
- 0xbc0c9da7, 0x07878008, 0x6efe8b6d, 0x77ef43a8, 0x1e9648cd,
- 0xa51d5562, 0xcc645e07, 0x097a687d, 0x60036318, 0xdb887eb7,
- 0xb2f175d2, 0x8ac51402, 0xe3bc1f67, 0x583702c8, 0x314e09ad,
- 0xf4503fd7, 0x9d2934b2, 0x26a2291d, 0x4fdb2278, 0x1481b897,
- 0x7df8b3f2, 0xc673ae5d, 0xaf0aa538, 0x6a149342, 0x036d9827,
- 0xb8e68588, 0xd19f8eed, 0xe9abef3d, 0x80d2e458, 0x3b59f9f7,
- 0x5220f292, 0x973ec4e8, 0xfe47cf8d, 0x45ccd222, 0x2cb5d947,
- 0x35a41182, 0x5cdd1ae7, 0xe7560748, 0x8e2f0c2d, 0x4b313a57,
- 0x22483132, 0x99c32c9d, 0xf0ba27f8, 0xc88e4628, 0xa1f74d4d,
- 0x1a7c50e2, 0x73055b87, 0xb61b6dfd, 0xdf626698, 0x64e97b37,
- 0x0d907052},
- {0x00000000, 0x7fc99b93, 0xff933726, 0x805aacb5, 0x2457680d,
- 0x5b9ef39e, 0xdbc45f2b, 0xa40dc4b8, 0x48aed01a, 0x37674b89,
- 0xb73de73c, 0xc8f47caf, 0x6cf9b817, 0x13302384, 0x936a8f31,
- 0xeca314a2, 0x915da034, 0xee943ba7, 0x6ece9712, 0x11070c81,
- 0xb50ac839, 0xcac353aa, 0x4a99ff1f, 0x3550648c, 0xd9f3702e,
- 0xa63aebbd, 0x26604708, 0x59a9dc9b, 0xfda41823, 0x826d83b0,
- 0x02372f05, 0x7dfeb496, 0xf9ca4629, 0x8603ddba, 0x0659710f,
- 0x7990ea9c, 0xdd9d2e24, 0xa254b5b7, 0x220e1902, 0x5dc78291,
- 0xb1649633, 0xcead0da0, 0x4ef7a115, 0x313e3a86, 0x9533fe3e,
- 0xeafa65ad, 0x6aa0c918, 0x1569528b, 0x6897e61d, 0x175e7d8e,
- 0x9704d13b, 0xe8cd4aa8, 0x4cc08e10, 0x33091583, 0xb353b936,
- 0xcc9a22a5, 0x20393607, 0x5ff0ad94, 0xdfaa0121, 0xa0639ab2,
- 0x046e5e0a, 0x7ba7c599, 0xfbfd692c, 0x8434f2bf, 0x28e58a13,
- 0x572c1180, 0xd776bd35, 0xa8bf26a6, 0x0cb2e21e, 0x737b798d,
- 0xf321d538, 0x8ce84eab, 0x604b5a09, 0x1f82c19a, 0x9fd86d2f,
- 0xe011f6bc, 0x441c3204, 0x3bd5a997, 0xbb8f0522, 0xc4469eb1,
- 0xb9b82a27, 0xc671b1b4, 0x462b1d01, 0x39e28692, 0x9def422a,
- 0xe226d9b9, 0x627c750c, 0x1db5ee9f, 0xf116fa3d, 0x8edf61ae,
- 0x0e85cd1b, 0x714c5688, 0xd5419230, 0xaa8809a3, 0x2ad2a516,
- 0x551b3e85, 0xd12fcc3a, 0xaee657a9, 0x2ebcfb1c, 0x5175608f,
- 0xf578a437, 0x8ab13fa4, 0x0aeb9311, 0x75220882, 0x99811c20,
- 0xe64887b3, 0x66122b06, 0x19dbb095, 0xbdd6742d, 0xc21fefbe,
- 0x4245430b, 0x3d8cd898, 0x40726c0e, 0x3fbbf79d, 0xbfe15b28,
- 0xc028c0bb, 0x64250403, 0x1bec9f90, 0x9bb63325, 0xe47fa8b6,
- 0x08dcbc14, 0x77152787, 0xf74f8b32, 0x888610a1, 0x2c8bd419,
- 0x53424f8a, 0xd318e33f, 0xacd178ac, 0x51cb1426, 0x2e028fb5,
- 0xae582300, 0xd191b893, 0x759c7c2b, 0x0a55e7b8, 0x8a0f4b0d,
- 0xf5c6d09e, 0x1965c43c, 0x66ac5faf, 0xe6f6f31a, 0x993f6889,
- 0x3d32ac31, 0x42fb37a2, 0xc2a19b17, 0xbd680084, 0xc096b412,
- 0xbf5f2f81, 0x3f058334, 0x40cc18a7, 0xe4c1dc1f, 0x9b08478c,
- 0x1b52eb39, 0x649b70aa, 0x88386408, 0xf7f1ff9b, 0x77ab532e,
- 0x0862c8bd, 0xac6f0c05, 0xd3a69796, 0x53fc3b23, 0x2c35a0b0,
- 0xa801520f, 0xd7c8c99c, 0x57926529, 0x285bfeba, 0x8c563a02,
- 0xf39fa191, 0x73c50d24, 0x0c0c96b7, 0xe0af8215, 0x9f661986,
- 0x1f3cb533, 0x60f52ea0, 0xc4f8ea18, 0xbb31718b, 0x3b6bdd3e,
- 0x44a246ad, 0x395cf23b, 0x469569a8, 0xc6cfc51d, 0xb9065e8e,
- 0x1d0b9a36, 0x62c201a5, 0xe298ad10, 0x9d513683, 0x71f22221,
- 0x0e3bb9b2, 0x8e611507, 0xf1a88e94, 0x55a54a2c, 0x2a6cd1bf,
- 0xaa367d0a, 0xd5ffe699, 0x792e9e35, 0x06e705a6, 0x86bda913,
- 0xf9743280, 0x5d79f638, 0x22b06dab, 0xa2eac11e, 0xdd235a8d,
- 0x31804e2f, 0x4e49d5bc, 0xce137909, 0xb1dae29a, 0x15d72622,
- 0x6a1ebdb1, 0xea441104, 0x958d8a97, 0xe8733e01, 0x97baa592,
- 0x17e00927, 0x682992b4, 0xcc24560c, 0xb3edcd9f, 0x33b7612a,
- 0x4c7efab9, 0xa0ddee1b, 0xdf147588, 0x5f4ed93d, 0x208742ae,
- 0x848a8616, 0xfb431d85, 0x7b19b130, 0x04d02aa3, 0x80e4d81c,
- 0xff2d438f, 0x7f77ef3a, 0x00be74a9, 0xa4b3b011, 0xdb7a2b82,
- 0x5b208737, 0x24e91ca4, 0xc84a0806, 0xb7839395, 0x37d93f20,
- 0x4810a4b3, 0xec1d600b, 0x93d4fb98, 0x138e572d, 0x6c47ccbe,
- 0x11b97828, 0x6e70e3bb, 0xee2a4f0e, 0x91e3d49d, 0x35ee1025,
- 0x4a278bb6, 0xca7d2703, 0xb5b4bc90, 0x5917a832, 0x26de33a1,
- 0xa6849f14, 0xd94d0487, 0x7d40c03f, 0x02895bac, 0x82d3f719,
- 0xfd1a6c8a},
- {0x00000000, 0xa396284c, 0x9c5d56d9, 0x3fcb7e95, 0xe3cbabf3,
- 0x405d83bf, 0x7f96fd2a, 0xdc00d566, 0x1ce651a7, 0xbf7079eb,
- 0x80bb077e, 0x232d2f32, 0xff2dfa54, 0x5cbbd218, 0x6370ac8d,
- 0xc0e684c1, 0x39cca34e, 0x9a5a8b02, 0xa591f597, 0x0607dddb,
- 0xda0708bd, 0x799120f1, 0x465a5e64, 0xe5cc7628, 0x252af2e9,
- 0x86bcdaa5, 0xb977a430, 0x1ae18c7c, 0xc6e1591a, 0x65777156,
- 0x5abc0fc3, 0xf92a278f, 0x7399469c, 0xd00f6ed0, 0xefc41045,
- 0x4c523809, 0x9052ed6f, 0x33c4c523, 0x0c0fbbb6, 0xaf9993fa,
- 0x6f7f173b, 0xcce93f77, 0xf32241e2, 0x50b469ae, 0x8cb4bcc8,
- 0x2f229484, 0x10e9ea11, 0xb37fc25d, 0x4a55e5d2, 0xe9c3cd9e,
- 0xd608b30b, 0x759e9b47, 0xa99e4e21, 0x0a08666d, 0x35c318f8,
- 0x965530b4, 0x56b3b475, 0xf5259c39, 0xcaeee2ac, 0x6978cae0,
- 0xb5781f86, 0x16ee37ca, 0x2925495f, 0x8ab36113, 0xe7328d38,
- 0x44a4a574, 0x7b6fdbe1, 0xd8f9f3ad, 0x04f926cb, 0xa76f0e87,
- 0x98a47012, 0x3b32585e, 0xfbd4dc9f, 0x5842f4d3, 0x67898a46,
- 0xc41fa20a, 0x181f776c, 0xbb895f20, 0x844221b5, 0x27d409f9,
- 0xdefe2e76, 0x7d68063a, 0x42a378af, 0xe13550e3, 0x3d358585,
- 0x9ea3adc9, 0xa168d35c, 0x02fefb10, 0xc2187fd1, 0x618e579d,
- 0x5e452908, 0xfdd30144, 0x21d3d422, 0x8245fc6e, 0xbd8e82fb,
- 0x1e18aab7, 0x94abcba4, 0x373de3e8, 0x08f69d7d, 0xab60b531,
- 0x77606057, 0xd4f6481b, 0xeb3d368e, 0x48ab1ec2, 0x884d9a03,
- 0x2bdbb24f, 0x1410ccda, 0xb786e496, 0x6b8631f0, 0xc81019bc,
- 0xf7db6729, 0x544d4f65, 0xad6768ea, 0x0ef140a6, 0x313a3e33,
- 0x92ac167f, 0x4eacc319, 0xed3aeb55, 0xd2f195c0, 0x7167bd8c,
- 0xb181394d, 0x12171101, 0x2ddc6f94, 0x8e4a47d8, 0x524a92be,
- 0xf1dcbaf2, 0xce17c467, 0x6d81ec2b, 0x15141c31, 0xb682347d,
- 0x89494ae8, 0x2adf62a4, 0xf6dfb7c2, 0x55499f8e, 0x6a82e11b,
- 0xc914c957, 0x09f24d96, 0xaa6465da, 0x95af1b4f, 0x36393303,
- 0xea39e665, 0x49afce29, 0x7664b0bc, 0xd5f298f0, 0x2cd8bf7f,
- 0x8f4e9733, 0xb085e9a6, 0x1313c1ea, 0xcf13148c, 0x6c853cc0,
- 0x534e4255, 0xf0d86a19, 0x303eeed8, 0x93a8c694, 0xac63b801,
- 0x0ff5904d, 0xd3f5452b, 0x70636d67, 0x4fa813f2, 0xec3e3bbe,
- 0x668d5aad, 0xc51b72e1, 0xfad00c74, 0x59462438, 0x8546f15e,
- 0x26d0d912, 0x191ba787, 0xba8d8fcb, 0x7a6b0b0a, 0xd9fd2346,
- 0xe6365dd3, 0x45a0759f, 0x99a0a0f9, 0x3a3688b5, 0x05fdf620,
- 0xa66bde6c, 0x5f41f9e3, 0xfcd7d1af, 0xc31caf3a, 0x608a8776,
- 0xbc8a5210, 0x1f1c7a5c, 0x20d704c9, 0x83412c85, 0x43a7a844,
- 0xe0318008, 0xdffafe9d, 0x7c6cd6d1, 0xa06c03b7, 0x03fa2bfb,
- 0x3c31556e, 0x9fa77d22, 0xf2269109, 0x51b0b945, 0x6e7bc7d0,
- 0xcdedef9c, 0x11ed3afa, 0xb27b12b6, 0x8db06c23, 0x2e26446f,
- 0xeec0c0ae, 0x4d56e8e2, 0x729d9677, 0xd10bbe3b, 0x0d0b6b5d,
- 0xae9d4311, 0x91563d84, 0x32c015c8, 0xcbea3247, 0x687c1a0b,
- 0x57b7649e, 0xf4214cd2, 0x282199b4, 0x8bb7b1f8, 0xb47ccf6d,
- 0x17eae721, 0xd70c63e0, 0x749a4bac, 0x4b513539, 0xe8c71d75,
- 0x34c7c813, 0x9751e05f, 0xa89a9eca, 0x0b0cb686, 0x81bfd795,
- 0x2229ffd9, 0x1de2814c, 0xbe74a900, 0x62747c66, 0xc1e2542a,
- 0xfe292abf, 0x5dbf02f3, 0x9d598632, 0x3ecfae7e, 0x0104d0eb,
- 0xa292f8a7, 0x7e922dc1, 0xdd04058d, 0xe2cf7b18, 0x41595354,
- 0xb87374db, 0x1be55c97, 0x242e2202, 0x87b80a4e, 0x5bb8df28,
- 0xf82ef764, 0xc7e589f1, 0x6473a1bd, 0xa495257c, 0x07030d30,
- 0x38c873a5, 0x9b5e5be9, 0x475e8e8f, 0xe4c8a6c3, 0xdb03d856,
- 0x7895f01a},
- {0x00000000, 0x2a283862, 0x545070c4, 0x7e7848a6, 0xa8a0e188,
- 0x8288d9ea, 0xfcf0914c, 0xd6d8a92e, 0x8a30c551, 0xa018fd33,
- 0xde60b595, 0xf4488df7, 0x229024d9, 0x08b81cbb, 0x76c0541d,
- 0x5ce86c7f, 0xcf108ce3, 0xe538b481, 0x9b40fc27, 0xb168c445,
- 0x67b06d6b, 0x4d985509, 0x33e01daf, 0x19c825cd, 0x452049b2,
- 0x6f0871d0, 0x11703976, 0x3b580114, 0xed80a83a, 0xc7a89058,
- 0xb9d0d8fe, 0x93f8e09c, 0x45501f87, 0x6f7827e5, 0x11006f43,
- 0x3b285721, 0xedf0fe0f, 0xc7d8c66d, 0xb9a08ecb, 0x9388b6a9,
- 0xcf60dad6, 0xe548e2b4, 0x9b30aa12, 0xb1189270, 0x67c03b5e,
- 0x4de8033c, 0x33904b9a, 0x19b873f8, 0x8a409364, 0xa068ab06,
- 0xde10e3a0, 0xf438dbc2, 0x22e072ec, 0x08c84a8e, 0x76b00228,
- 0x5c983a4a, 0x00705635, 0x2a586e57, 0x542026f1, 0x7e081e93,
- 0xa8d0b7bd, 0x82f88fdf, 0xfc80c779, 0xd6a8ff1b, 0x8aa03f0e,
- 0xa088076c, 0xdef04fca, 0xf4d877a8, 0x2200de86, 0x0828e6e4,
- 0x7650ae42, 0x5c789620, 0x0090fa5f, 0x2ab8c23d, 0x54c08a9b,
- 0x7ee8b2f9, 0xa8301bd7, 0x821823b5, 0xfc606b13, 0xd6485371,
- 0x45b0b3ed, 0x6f988b8f, 0x11e0c329, 0x3bc8fb4b, 0xed105265,
- 0xc7386a07, 0xb94022a1, 0x93681ac3, 0xcf8076bc, 0xe5a84ede,
- 0x9bd00678, 0xb1f83e1a, 0x67209734, 0x4d08af56, 0x3370e7f0,
- 0x1958df92, 0xcff02089, 0xe5d818eb, 0x9ba0504d, 0xb188682f,
- 0x6750c101, 0x4d78f963, 0x3300b1c5, 0x192889a7, 0x45c0e5d8,
- 0x6fe8ddba, 0x1190951c, 0x3bb8ad7e, 0xed600450, 0xc7483c32,
- 0xb9307494, 0x93184cf6, 0x00e0ac6a, 0x2ac89408, 0x54b0dcae,
- 0x7e98e4cc, 0xa8404de2, 0x82687580, 0xfc103d26, 0xd6380544,
- 0x8ad0693b, 0xa0f85159, 0xde8019ff, 0xf4a8219d, 0x227088b3,
- 0x0858b0d1, 0x7620f877, 0x5c08c015, 0xce31785d, 0xe419403f,
- 0x9a610899, 0xb04930fb, 0x669199d5, 0x4cb9a1b7, 0x32c1e911,
- 0x18e9d173, 0x4401bd0c, 0x6e29856e, 0x1051cdc8, 0x3a79f5aa,
- 0xeca15c84, 0xc68964e6, 0xb8f12c40, 0x92d91422, 0x0121f4be,
- 0x2b09ccdc, 0x5571847a, 0x7f59bc18, 0xa9811536, 0x83a92d54,
- 0xfdd165f2, 0xd7f95d90, 0x8b1131ef, 0xa139098d, 0xdf41412b,
- 0xf5697949, 0x23b1d067, 0x0999e805, 0x77e1a0a3, 0x5dc998c1,
- 0x8b6167da, 0xa1495fb8, 0xdf31171e, 0xf5192f7c, 0x23c18652,
- 0x09e9be30, 0x7791f696, 0x5db9cef4, 0x0151a28b, 0x2b799ae9,
- 0x5501d24f, 0x7f29ea2d, 0xa9f14303, 0x83d97b61, 0xfda133c7,
- 0xd7890ba5, 0x4471eb39, 0x6e59d35b, 0x10219bfd, 0x3a09a39f,
- 0xecd10ab1, 0xc6f932d3, 0xb8817a75, 0x92a94217, 0xce412e68,
- 0xe469160a, 0x9a115eac, 0xb03966ce, 0x66e1cfe0, 0x4cc9f782,
- 0x32b1bf24, 0x18998746, 0x44914753, 0x6eb97f31, 0x10c13797,
- 0x3ae90ff5, 0xec31a6db, 0xc6199eb9, 0xb861d61f, 0x9249ee7d,
- 0xcea18202, 0xe489ba60, 0x9af1f2c6, 0xb0d9caa4, 0x6601638a,
- 0x4c295be8, 0x3251134e, 0x18792b2c, 0x8b81cbb0, 0xa1a9f3d2,
- 0xdfd1bb74, 0xf5f98316, 0x23212a38, 0x0909125a, 0x77715afc,
- 0x5d59629e, 0x01b10ee1, 0x2b993683, 0x55e17e25, 0x7fc94647,
- 0xa911ef69, 0x8339d70b, 0xfd419fad, 0xd769a7cf, 0x01c158d4,
- 0x2be960b6, 0x55912810, 0x7fb91072, 0xa961b95c, 0x8349813e,
- 0xfd31c998, 0xd719f1fa, 0x8bf19d85, 0xa1d9a5e7, 0xdfa1ed41,
- 0xf589d523, 0x23517c0d, 0x0979446f, 0x77010cc9, 0x5d2934ab,
- 0xced1d437, 0xe4f9ec55, 0x9a81a4f3, 0xb0a99c91, 0x667135bf,
- 0x4c590ddd, 0x3221457b, 0x18097d19, 0x44e11166, 0x6ec92904,
- 0x10b161a2, 0x3a9959c0, 0xec41f0ee, 0xc669c88c, 0xb811802a,
- 0x9239b848},
- {0x00000000, 0x4713f6fb, 0x8e27edf6, 0xc9341b0d, 0xc73eddad,
- 0x802d2b56, 0x4919305b, 0x0e0ac6a0, 0x550cbd1b, 0x121f4be0,
- 0xdb2b50ed, 0x9c38a616, 0x923260b6, 0xd521964d, 0x1c158d40,
- 0x5b067bbb, 0xaa197a36, 0xed0a8ccd, 0x243e97c0, 0x632d613b,
- 0x6d27a79b, 0x2a345160, 0xe3004a6d, 0xa413bc96, 0xff15c72d,
- 0xb80631d6, 0x71322adb, 0x3621dc20, 0x382b1a80, 0x7f38ec7b,
- 0xb60cf776, 0xf11f018d, 0x8f43f22d, 0xc85004d6, 0x01641fdb,
- 0x4677e920, 0x487d2f80, 0x0f6ed97b, 0xc65ac276, 0x8149348d,
- 0xda4f4f36, 0x9d5cb9cd, 0x5468a2c0, 0x137b543b, 0x1d71929b,
- 0x5a626460, 0x93567f6d, 0xd4458996, 0x255a881b, 0x62497ee0,
- 0xab7d65ed, 0xec6e9316, 0xe26455b6, 0xa577a34d, 0x6c43b840,
- 0x2b504ebb, 0x70563500, 0x3745c3fb, 0xfe71d8f6, 0xb9622e0d,
- 0xb768e8ad, 0xf07b1e56, 0x394f055b, 0x7e5cf3a0, 0xc5f6e21b,
- 0x82e514e0, 0x4bd10fed, 0x0cc2f916, 0x02c83fb6, 0x45dbc94d,
- 0x8cefd240, 0xcbfc24bb, 0x90fa5f00, 0xd7e9a9fb, 0x1eddb2f6,
- 0x59ce440d, 0x57c482ad, 0x10d77456, 0xd9e36f5b, 0x9ef099a0,
- 0x6fef982d, 0x28fc6ed6, 0xe1c875db, 0xa6db8320, 0xa8d14580,
- 0xefc2b37b, 0x26f6a876, 0x61e55e8d, 0x3ae32536, 0x7df0d3cd,
- 0xb4c4c8c0, 0xf3d73e3b, 0xfdddf89b, 0xbace0e60, 0x73fa156d,
- 0x34e9e396, 0x4ab51036, 0x0da6e6cd, 0xc492fdc0, 0x83810b3b,
- 0x8d8bcd9b, 0xca983b60, 0x03ac206d, 0x44bfd696, 0x1fb9ad2d,
- 0x58aa5bd6, 0x919e40db, 0xd68db620, 0xd8877080, 0x9f94867b,
- 0x56a09d76, 0x11b36b8d, 0xe0ac6a00, 0xa7bf9cfb, 0x6e8b87f6,
- 0x2998710d, 0x2792b7ad, 0x60814156, 0xa9b55a5b, 0xeea6aca0,
- 0xb5a0d71b, 0xf2b321e0, 0x3b873aed, 0x7c94cc16, 0x729e0ab6,
- 0x358dfc4d, 0xfcb9e740, 0xbbaa11bb, 0x509cc277, 0x178f348c,
- 0xdebb2f81, 0x99a8d97a, 0x97a21fda, 0xd0b1e921, 0x1985f22c,
- 0x5e9604d7, 0x05907f6c, 0x42838997, 0x8bb7929a, 0xcca46461,
- 0xc2aea2c1, 0x85bd543a, 0x4c894f37, 0x0b9ab9cc, 0xfa85b841,
- 0xbd964eba, 0x74a255b7, 0x33b1a34c, 0x3dbb65ec, 0x7aa89317,
- 0xb39c881a, 0xf48f7ee1, 0xaf89055a, 0xe89af3a1, 0x21aee8ac,
- 0x66bd1e57, 0x68b7d8f7, 0x2fa42e0c, 0xe6903501, 0xa183c3fa,
- 0xdfdf305a, 0x98ccc6a1, 0x51f8ddac, 0x16eb2b57, 0x18e1edf7,
- 0x5ff21b0c, 0x96c60001, 0xd1d5f6fa, 0x8ad38d41, 0xcdc07bba,
- 0x04f460b7, 0x43e7964c, 0x4ded50ec, 0x0afea617, 0xc3cabd1a,
- 0x84d94be1, 0x75c64a6c, 0x32d5bc97, 0xfbe1a79a, 0xbcf25161,
- 0xb2f897c1, 0xf5eb613a, 0x3cdf7a37, 0x7bcc8ccc, 0x20caf777,
- 0x67d9018c, 0xaeed1a81, 0xe9feec7a, 0xe7f42ada, 0xa0e7dc21,
- 0x69d3c72c, 0x2ec031d7, 0x956a206c, 0xd279d697, 0x1b4dcd9a,
- 0x5c5e3b61, 0x5254fdc1, 0x15470b3a, 0xdc731037, 0x9b60e6cc,
- 0xc0669d77, 0x87756b8c, 0x4e417081, 0x0952867a, 0x075840da,
- 0x404bb621, 0x897fad2c, 0xce6c5bd7, 0x3f735a5a, 0x7860aca1,
- 0xb154b7ac, 0xf6474157, 0xf84d87f7, 0xbf5e710c, 0x766a6a01,
- 0x31799cfa, 0x6a7fe741, 0x2d6c11ba, 0xe4580ab7, 0xa34bfc4c,
- 0xad413aec, 0xea52cc17, 0x2366d71a, 0x647521e1, 0x1a29d241,
- 0x5d3a24ba, 0x940e3fb7, 0xd31dc94c, 0xdd170fec, 0x9a04f917,
- 0x5330e21a, 0x142314e1, 0x4f256f5a, 0x083699a1, 0xc10282ac,
- 0x86117457, 0x881bb2f7, 0xcf08440c, 0x063c5f01, 0x412fa9fa,
- 0xb030a877, 0xf7235e8c, 0x3e174581, 0x7904b37a, 0x770e75da,
- 0x301d8321, 0xf929982c, 0xbe3a6ed7, 0xe53c156c, 0xa22fe397,
- 0x6b1bf89a, 0x2c080e61, 0x2202c8c1, 0x65113e3a, 0xac252537,
- 0xeb36d3cc},
- {0x00000000, 0xa13984ee, 0x99020f9d, 0x383b8b73, 0xe975197b,
- 0x484c9d95, 0x707716e6, 0xd14e9208, 0x099b34b7, 0xa8a2b059,
- 0x90993b2a, 0x31a0bfc4, 0xe0ee2dcc, 0x41d7a922, 0x79ec2251,
- 0xd8d5a6bf, 0x1336696e, 0xb20fed80, 0x8a3466f3, 0x2b0de21d,
- 0xfa437015, 0x5b7af4fb, 0x63417f88, 0xc278fb66, 0x1aad5dd9,
- 0xbb94d937, 0x83af5244, 0x2296d6aa, 0xf3d844a2, 0x52e1c04c,
- 0x6ada4b3f, 0xcbe3cfd1, 0x266cd2dc, 0x87555632, 0xbf6edd41,
- 0x1e5759af, 0xcf19cba7, 0x6e204f49, 0x561bc43a, 0xf72240d4,
- 0x2ff7e66b, 0x8ece6285, 0xb6f5e9f6, 0x17cc6d18, 0xc682ff10,
- 0x67bb7bfe, 0x5f80f08d, 0xfeb97463, 0x355abbb2, 0x94633f5c,
- 0xac58b42f, 0x0d6130c1, 0xdc2fa2c9, 0x7d162627, 0x452dad54,
- 0xe41429ba, 0x3cc18f05, 0x9df80beb, 0xa5c38098, 0x04fa0476,
- 0xd5b4967e, 0x748d1290, 0x4cb699e3, 0xed8f1d0d, 0x4cd9a5b8,
- 0xede02156, 0xd5dbaa25, 0x74e22ecb, 0xa5acbcc3, 0x0495382d,
- 0x3caeb35e, 0x9d9737b0, 0x4542910f, 0xe47b15e1, 0xdc409e92,
- 0x7d791a7c, 0xac378874, 0x0d0e0c9a, 0x353587e9, 0x940c0307,
- 0x5fefccd6, 0xfed64838, 0xc6edc34b, 0x67d447a5, 0xb69ad5ad,
- 0x17a35143, 0x2f98da30, 0x8ea15ede, 0x5674f861, 0xf74d7c8f,
- 0xcf76f7fc, 0x6e4f7312, 0xbf01e11a, 0x1e3865f4, 0x2603ee87,
- 0x873a6a69, 0x6ab57764, 0xcb8cf38a, 0xf3b778f9, 0x528efc17,
- 0x83c06e1f, 0x22f9eaf1, 0x1ac26182, 0xbbfbe56c, 0x632e43d3,
- 0xc217c73d, 0xfa2c4c4e, 0x5b15c8a0, 0x8a5b5aa8, 0x2b62de46,
- 0x13595535, 0xb260d1db, 0x79831e0a, 0xd8ba9ae4, 0xe0811197,
- 0x41b89579, 0x90f60771, 0x31cf839f, 0x09f408ec, 0xa8cd8c02,
- 0x70182abd, 0xd121ae53, 0xe91a2520, 0x4823a1ce, 0x996d33c6,
- 0x3854b728, 0x006f3c5b, 0xa156b8b5, 0x99b34b70, 0x388acf9e,
- 0x00b144ed, 0xa188c003, 0x70c6520b, 0xd1ffd6e5, 0xe9c45d96,
- 0x48fdd978, 0x90287fc7, 0x3111fb29, 0x092a705a, 0xa813f4b4,
- 0x795d66bc, 0xd864e252, 0xe05f6921, 0x4166edcf, 0x8a85221e,
- 0x2bbca6f0, 0x13872d83, 0xb2bea96d, 0x63f03b65, 0xc2c9bf8b,
- 0xfaf234f8, 0x5bcbb016, 0x831e16a9, 0x22279247, 0x1a1c1934,
- 0xbb259dda, 0x6a6b0fd2, 0xcb528b3c, 0xf369004f, 0x525084a1,
- 0xbfdf99ac, 0x1ee61d42, 0x26dd9631, 0x87e412df, 0x56aa80d7,
- 0xf7930439, 0xcfa88f4a, 0x6e910ba4, 0xb644ad1b, 0x177d29f5,
- 0x2f46a286, 0x8e7f2668, 0x5f31b460, 0xfe08308e, 0xc633bbfd,
- 0x670a3f13, 0xace9f0c2, 0x0dd0742c, 0x35ebff5f, 0x94d27bb1,
- 0x459ce9b9, 0xe4a56d57, 0xdc9ee624, 0x7da762ca, 0xa572c475,
- 0x044b409b, 0x3c70cbe8, 0x9d494f06, 0x4c07dd0e, 0xed3e59e0,
- 0xd505d293, 0x743c567d, 0xd56aeec8, 0x74536a26, 0x4c68e155,
- 0xed5165bb, 0x3c1ff7b3, 0x9d26735d, 0xa51df82e, 0x04247cc0,
- 0xdcf1da7f, 0x7dc85e91, 0x45f3d5e2, 0xe4ca510c, 0x3584c304,
- 0x94bd47ea, 0xac86cc99, 0x0dbf4877, 0xc65c87a6, 0x67650348,
- 0x5f5e883b, 0xfe670cd5, 0x2f299edd, 0x8e101a33, 0xb62b9140,
- 0x171215ae, 0xcfc7b311, 0x6efe37ff, 0x56c5bc8c, 0xf7fc3862,
- 0x26b2aa6a, 0x878b2e84, 0xbfb0a5f7, 0x1e892119, 0xf3063c14,
- 0x523fb8fa, 0x6a043389, 0xcb3db767, 0x1a73256f, 0xbb4aa181,
- 0x83712af2, 0x2248ae1c, 0xfa9d08a3, 0x5ba48c4d, 0x639f073e,
- 0xc2a683d0, 0x13e811d8, 0xb2d19536, 0x8aea1e45, 0x2bd39aab,
- 0xe030557a, 0x4109d194, 0x79325ae7, 0xd80bde09, 0x09454c01,
- 0xa87cc8ef, 0x9047439c, 0x317ec772, 0xe9ab61cd, 0x4892e523,
- 0x70a96e50, 0xd190eabe, 0x00de78b6, 0xa1e7fc58, 0x99dc772b,
- 0x38e5f3c5},
- {0x00000000, 0xe81790a1, 0x0b5e2703, 0xe349b7a2, 0x16bc4e06,
- 0xfeabdea7, 0x1de26905, 0xf5f5f9a4, 0x2d789c0c, 0xc56f0cad,
- 0x2626bb0f, 0xce312bae, 0x3bc4d20a, 0xd3d342ab, 0x309af509,
- 0xd88d65a8, 0x5af13818, 0xb2e6a8b9, 0x51af1f1b, 0xb9b88fba,
- 0x4c4d761e, 0xa45ae6bf, 0x4713511d, 0xaf04c1bc, 0x7789a414,
- 0x9f9e34b5, 0x7cd78317, 0x94c013b6, 0x6135ea12, 0x89227ab3,
- 0x6a6bcd11, 0x827c5db0, 0xb5e27030, 0x5df5e091, 0xbebc5733,
- 0x56abc792, 0xa35e3e36, 0x4b49ae97, 0xa8001935, 0x40178994,
- 0x989aec3c, 0x708d7c9d, 0x93c4cb3f, 0x7bd35b9e, 0x8e26a23a,
- 0x6631329b, 0x85788539, 0x6d6f1598, 0xef134828, 0x0704d889,
- 0xe44d6f2b, 0x0c5aff8a, 0xf9af062e, 0x11b8968f, 0xf2f1212d,
- 0x1ae6b18c, 0xc26bd424, 0x2a7c4485, 0xc935f327, 0x21226386,
- 0xd4d79a22, 0x3cc00a83, 0xdf89bd21, 0x379e2d80, 0xb0b5e621,
- 0x58a27680, 0xbbebc122, 0x53fc5183, 0xa609a827, 0x4e1e3886,
- 0xad578f24, 0x45401f85, 0x9dcd7a2d, 0x75daea8c, 0x96935d2e,
- 0x7e84cd8f, 0x8b71342b, 0x6366a48a, 0x802f1328, 0x68388389,
- 0xea44de39, 0x02534e98, 0xe11af93a, 0x090d699b, 0xfcf8903f,
- 0x14ef009e, 0xf7a6b73c, 0x1fb1279d, 0xc73c4235, 0x2f2bd294,
- 0xcc626536, 0x2475f597, 0xd1800c33, 0x39979c92, 0xdade2b30,
- 0x32c9bb91, 0x05579611, 0xed4006b0, 0x0e09b112, 0xe61e21b3,
- 0x13ebd817, 0xfbfc48b6, 0x18b5ff14, 0xf0a26fb5, 0x282f0a1d,
- 0xc0389abc, 0x23712d1e, 0xcb66bdbf, 0x3e93441b, 0xd684d4ba,
- 0x35cd6318, 0xdddaf3b9, 0x5fa6ae09, 0xb7b13ea8, 0x54f8890a,
- 0xbcef19ab, 0x491ae00f, 0xa10d70ae, 0x4244c70c, 0xaa5357ad,
- 0x72de3205, 0x9ac9a2a4, 0x79801506, 0x919785a7, 0x64627c03,
- 0x8c75eca2, 0x6f3c5b00, 0x872bcba1, 0xba1aca03, 0x520d5aa2,
- 0xb144ed00, 0x59537da1, 0xaca68405, 0x44b114a4, 0xa7f8a306,
- 0x4fef33a7, 0x9762560f, 0x7f75c6ae, 0x9c3c710c, 0x742be1ad,
- 0x81de1809, 0x69c988a8, 0x8a803f0a, 0x6297afab, 0xe0ebf21b,
- 0x08fc62ba, 0xebb5d518, 0x03a245b9, 0xf657bc1d, 0x1e402cbc,
- 0xfd099b1e, 0x151e0bbf, 0xcd936e17, 0x2584feb6, 0xc6cd4914,
- 0x2edad9b5, 0xdb2f2011, 0x3338b0b0, 0xd0710712, 0x386697b3,
- 0x0ff8ba33, 0xe7ef2a92, 0x04a69d30, 0xecb10d91, 0x1944f435,
- 0xf1536494, 0x121ad336, 0xfa0d4397, 0x2280263f, 0xca97b69e,
- 0x29de013c, 0xc1c9919d, 0x343c6839, 0xdc2bf898, 0x3f624f3a,
- 0xd775df9b, 0x5509822b, 0xbd1e128a, 0x5e57a528, 0xb6403589,
- 0x43b5cc2d, 0xaba25c8c, 0x48ebeb2e, 0xa0fc7b8f, 0x78711e27,
- 0x90668e86, 0x732f3924, 0x9b38a985, 0x6ecd5021, 0x86dac080,
- 0x65937722, 0x8d84e783, 0x0aaf2c22, 0xe2b8bc83, 0x01f10b21,
- 0xe9e69b80, 0x1c136224, 0xf404f285, 0x174d4527, 0xff5ad586,
- 0x27d7b02e, 0xcfc0208f, 0x2c89972d, 0xc49e078c, 0x316bfe28,
- 0xd97c6e89, 0x3a35d92b, 0xd222498a, 0x505e143a, 0xb849849b,
- 0x5b003339, 0xb317a398, 0x46e25a3c, 0xaef5ca9d, 0x4dbc7d3f,
- 0xa5abed9e, 0x7d268836, 0x95311897, 0x7678af35, 0x9e6f3f94,
- 0x6b9ac630, 0x838d5691, 0x60c4e133, 0x88d37192, 0xbf4d5c12,
- 0x575accb3, 0xb4137b11, 0x5c04ebb0, 0xa9f11214, 0x41e682b5,
- 0xa2af3517, 0x4ab8a5b6, 0x9235c01e, 0x7a2250bf, 0x996be71d,
- 0x717c77bc, 0x84898e18, 0x6c9e1eb9, 0x8fd7a91b, 0x67c039ba,
- 0xe5bc640a, 0x0dabf4ab, 0xeee24309, 0x06f5d3a8, 0xf3002a0c,
- 0x1b17baad, 0xf85e0d0f, 0x10499dae, 0xc8c4f806, 0x20d368a7,
- 0xc39adf05, 0x2b8d4fa4, 0xde78b600, 0x366f26a1, 0xd5269103,
- 0x3d3101a2}};
-
-local const z_word_t FAR crc_braid_big_table[][256] = {
- {0x0000000000000000, 0xa19017e800000000, 0x03275e0b00000000,
- 0xa2b749e300000000, 0x064ebc1600000000, 0xa7deabfe00000000,
- 0x0569e21d00000000, 0xa4f9f5f500000000, 0x0c9c782d00000000,
- 0xad0c6fc500000000, 0x0fbb262600000000, 0xae2b31ce00000000,
- 0x0ad2c43b00000000, 0xab42d3d300000000, 0x09f59a3000000000,
- 0xa8658dd800000000, 0x1838f15a00000000, 0xb9a8e6b200000000,
- 0x1b1faf5100000000, 0xba8fb8b900000000, 0x1e764d4c00000000,
- 0xbfe65aa400000000, 0x1d51134700000000, 0xbcc104af00000000,
- 0x14a4897700000000, 0xb5349e9f00000000, 0x1783d77c00000000,
- 0xb613c09400000000, 0x12ea356100000000, 0xb37a228900000000,
- 0x11cd6b6a00000000, 0xb05d7c8200000000, 0x3070e2b500000000,
- 0x91e0f55d00000000, 0x3357bcbe00000000, 0x92c7ab5600000000,
- 0x363e5ea300000000, 0x97ae494b00000000, 0x351900a800000000,
- 0x9489174000000000, 0x3cec9a9800000000, 0x9d7c8d7000000000,
- 0x3fcbc49300000000, 0x9e5bd37b00000000, 0x3aa2268e00000000,
- 0x9b32316600000000, 0x3985788500000000, 0x98156f6d00000000,
- 0x284813ef00000000, 0x89d8040700000000, 0x2b6f4de400000000,
- 0x8aff5a0c00000000, 0x2e06aff900000000, 0x8f96b81100000000,
- 0x2d21f1f200000000, 0x8cb1e61a00000000, 0x24d46bc200000000,
- 0x85447c2a00000000, 0x27f335c900000000, 0x8663222100000000,
- 0x229ad7d400000000, 0x830ac03c00000000, 0x21bd89df00000000,
- 0x802d9e3700000000, 0x21e6b5b000000000, 0x8076a25800000000,
- 0x22c1ebbb00000000, 0x8351fc5300000000, 0x27a809a600000000,
- 0x86381e4e00000000, 0x248f57ad00000000, 0x851f404500000000,
- 0x2d7acd9d00000000, 0x8ceada7500000000, 0x2e5d939600000000,
- 0x8fcd847e00000000, 0x2b34718b00000000, 0x8aa4666300000000,
- 0x28132f8000000000, 0x8983386800000000, 0x39de44ea00000000,
- 0x984e530200000000, 0x3af91ae100000000, 0x9b690d0900000000,
- 0x3f90f8fc00000000, 0x9e00ef1400000000, 0x3cb7a6f700000000,
- 0x9d27b11f00000000, 0x35423cc700000000, 0x94d22b2f00000000,
- 0x366562cc00000000, 0x97f5752400000000, 0x330c80d100000000,
- 0x929c973900000000, 0x302bdeda00000000, 0x91bbc93200000000,
- 0x1196570500000000, 0xb00640ed00000000, 0x12b1090e00000000,
- 0xb3211ee600000000, 0x17d8eb1300000000, 0xb648fcfb00000000,
- 0x14ffb51800000000, 0xb56fa2f000000000, 0x1d0a2f2800000000,
- 0xbc9a38c000000000, 0x1e2d712300000000, 0xbfbd66cb00000000,
- 0x1b44933e00000000, 0xbad484d600000000, 0x1863cd3500000000,
- 0xb9f3dadd00000000, 0x09aea65f00000000, 0xa83eb1b700000000,
- 0x0a89f85400000000, 0xab19efbc00000000, 0x0fe01a4900000000,
- 0xae700da100000000, 0x0cc7444200000000, 0xad5753aa00000000,
- 0x0532de7200000000, 0xa4a2c99a00000000, 0x0615807900000000,
- 0xa785979100000000, 0x037c626400000000, 0xa2ec758c00000000,
- 0x005b3c6f00000000, 0xa1cb2b8700000000, 0x03ca1aba00000000,
- 0xa25a0d5200000000, 0x00ed44b100000000, 0xa17d535900000000,
- 0x0584a6ac00000000, 0xa414b14400000000, 0x06a3f8a700000000,
- 0xa733ef4f00000000, 0x0f56629700000000, 0xaec6757f00000000,
- 0x0c713c9c00000000, 0xade12b7400000000, 0x0918de8100000000,
- 0xa888c96900000000, 0x0a3f808a00000000, 0xabaf976200000000,
- 0x1bf2ebe000000000, 0xba62fc0800000000, 0x18d5b5eb00000000,
- 0xb945a20300000000, 0x1dbc57f600000000, 0xbc2c401e00000000,
- 0x1e9b09fd00000000, 0xbf0b1e1500000000, 0x176e93cd00000000,
- 0xb6fe842500000000, 0x1449cdc600000000, 0xb5d9da2e00000000,
- 0x11202fdb00000000, 0xb0b0383300000000, 0x120771d000000000,
- 0xb397663800000000, 0x33baf80f00000000, 0x922aefe700000000,
- 0x309da60400000000, 0x910db1ec00000000, 0x35f4441900000000,
- 0x946453f100000000, 0x36d31a1200000000, 0x97430dfa00000000,
- 0x3f26802200000000, 0x9eb697ca00000000, 0x3c01de2900000000,
- 0x9d91c9c100000000, 0x39683c3400000000, 0x98f82bdc00000000,
- 0x3a4f623f00000000, 0x9bdf75d700000000, 0x2b82095500000000,
- 0x8a121ebd00000000, 0x28a5575e00000000, 0x893540b600000000,
- 0x2dccb54300000000, 0x8c5ca2ab00000000, 0x2eebeb4800000000,
- 0x8f7bfca000000000, 0x271e717800000000, 0x868e669000000000,
- 0x24392f7300000000, 0x85a9389b00000000, 0x2150cd6e00000000,
- 0x80c0da8600000000, 0x2277936500000000, 0x83e7848d00000000,
- 0x222caf0a00000000, 0x83bcb8e200000000, 0x210bf10100000000,
- 0x809be6e900000000, 0x2462131c00000000, 0x85f204f400000000,
- 0x27454d1700000000, 0x86d55aff00000000, 0x2eb0d72700000000,
- 0x8f20c0cf00000000, 0x2d97892c00000000, 0x8c079ec400000000,
- 0x28fe6b3100000000, 0x896e7cd900000000, 0x2bd9353a00000000,
- 0x8a4922d200000000, 0x3a145e5000000000, 0x9b8449b800000000,
- 0x3933005b00000000, 0x98a317b300000000, 0x3c5ae24600000000,
- 0x9dcaf5ae00000000, 0x3f7dbc4d00000000, 0x9eedaba500000000,
- 0x3688267d00000000, 0x9718319500000000, 0x35af787600000000,
- 0x943f6f9e00000000, 0x30c69a6b00000000, 0x91568d8300000000,
- 0x33e1c46000000000, 0x9271d38800000000, 0x125c4dbf00000000,
- 0xb3cc5a5700000000, 0x117b13b400000000, 0xb0eb045c00000000,
- 0x1412f1a900000000, 0xb582e64100000000, 0x1735afa200000000,
- 0xb6a5b84a00000000, 0x1ec0359200000000, 0xbf50227a00000000,
- 0x1de76b9900000000, 0xbc777c7100000000, 0x188e898400000000,
- 0xb91e9e6c00000000, 0x1ba9d78f00000000, 0xba39c06700000000,
- 0x0a64bce500000000, 0xabf4ab0d00000000, 0x0943e2ee00000000,
- 0xa8d3f50600000000, 0x0c2a00f300000000, 0xadba171b00000000,
- 0x0f0d5ef800000000, 0xae9d491000000000, 0x06f8c4c800000000,
- 0xa768d32000000000, 0x05df9ac300000000, 0xa44f8d2b00000000,
- 0x00b678de00000000, 0xa1266f3600000000, 0x039126d500000000,
- 0xa201313d00000000},
- {0x0000000000000000, 0xee8439a100000000, 0x9d0f029900000000,
- 0x738b3b3800000000, 0x7b1975e900000000, 0x959d4c4800000000,
- 0xe616777000000000, 0x08924ed100000000, 0xb7349b0900000000,
- 0x59b0a2a800000000, 0x2a3b999000000000, 0xc4bfa03100000000,
- 0xcc2deee000000000, 0x22a9d74100000000, 0x5122ec7900000000,
- 0xbfa6d5d800000000, 0x6e69361300000000, 0x80ed0fb200000000,
- 0xf366348a00000000, 0x1de20d2b00000000, 0x157043fa00000000,
- 0xfbf47a5b00000000, 0x887f416300000000, 0x66fb78c200000000,
- 0xd95dad1a00000000, 0x37d994bb00000000, 0x4452af8300000000,
- 0xaad6962200000000, 0xa244d8f300000000, 0x4cc0e15200000000,
- 0x3f4bda6a00000000, 0xd1cfe3cb00000000, 0xdcd26c2600000000,
- 0x3256558700000000, 0x41dd6ebf00000000, 0xaf59571e00000000,
- 0xa7cb19cf00000000, 0x494f206e00000000, 0x3ac41b5600000000,
- 0xd44022f700000000, 0x6be6f72f00000000, 0x8562ce8e00000000,
- 0xf6e9f5b600000000, 0x186dcc1700000000, 0x10ff82c600000000,
- 0xfe7bbb6700000000, 0x8df0805f00000000, 0x6374b9fe00000000,
- 0xb2bb5a3500000000, 0x5c3f639400000000, 0x2fb458ac00000000,
- 0xc130610d00000000, 0xc9a22fdc00000000, 0x2726167d00000000,
- 0x54ad2d4500000000, 0xba2914e400000000, 0x058fc13c00000000,
- 0xeb0bf89d00000000, 0x9880c3a500000000, 0x7604fa0400000000,
- 0x7e96b4d500000000, 0x90128d7400000000, 0xe399b64c00000000,
- 0x0d1d8fed00000000, 0xb8a5d94c00000000, 0x5621e0ed00000000,
- 0x25aadbd500000000, 0xcb2ee27400000000, 0xc3bcaca500000000,
- 0x2d38950400000000, 0x5eb3ae3c00000000, 0xb037979d00000000,
- 0x0f91424500000000, 0xe1157be400000000, 0x929e40dc00000000,
- 0x7c1a797d00000000, 0x748837ac00000000, 0x9a0c0e0d00000000,
- 0xe987353500000000, 0x07030c9400000000, 0xd6ccef5f00000000,
- 0x3848d6fe00000000, 0x4bc3edc600000000, 0xa547d46700000000,
- 0xadd59ab600000000, 0x4351a31700000000, 0x30da982f00000000,
- 0xde5ea18e00000000, 0x61f8745600000000, 0x8f7c4df700000000,
- 0xfcf776cf00000000, 0x12734f6e00000000, 0x1ae101bf00000000,
- 0xf465381e00000000, 0x87ee032600000000, 0x696a3a8700000000,
- 0x6477b56a00000000, 0x8af38ccb00000000, 0xf978b7f300000000,
- 0x17fc8e5200000000, 0x1f6ec08300000000, 0xf1eaf92200000000,
- 0x8261c21a00000000, 0x6ce5fbbb00000000, 0xd3432e6300000000,
- 0x3dc717c200000000, 0x4e4c2cfa00000000, 0xa0c8155b00000000,
- 0xa85a5b8a00000000, 0x46de622b00000000, 0x3555591300000000,
- 0xdbd160b200000000, 0x0a1e837900000000, 0xe49abad800000000,
- 0x971181e000000000, 0x7995b84100000000, 0x7107f69000000000,
- 0x9f83cf3100000000, 0xec08f40900000000, 0x028ccda800000000,
- 0xbd2a187000000000, 0x53ae21d100000000, 0x20251ae900000000,
- 0xcea1234800000000, 0xc6336d9900000000, 0x28b7543800000000,
- 0x5b3c6f0000000000, 0xb5b856a100000000, 0x704bb39900000000,
- 0x9ecf8a3800000000, 0xed44b10000000000, 0x03c088a100000000,
- 0x0b52c67000000000, 0xe5d6ffd100000000, 0x965dc4e900000000,
- 0x78d9fd4800000000, 0xc77f289000000000, 0x29fb113100000000,
- 0x5a702a0900000000, 0xb4f413a800000000, 0xbc665d7900000000,
- 0x52e264d800000000, 0x21695fe000000000, 0xcfed664100000000,
- 0x1e22858a00000000, 0xf0a6bc2b00000000, 0x832d871300000000,
- 0x6da9beb200000000, 0x653bf06300000000, 0x8bbfc9c200000000,
- 0xf834f2fa00000000, 0x16b0cb5b00000000, 0xa9161e8300000000,
- 0x4792272200000000, 0x34191c1a00000000, 0xda9d25bb00000000,
- 0xd20f6b6a00000000, 0x3c8b52cb00000000, 0x4f0069f300000000,
- 0xa184505200000000, 0xac99dfbf00000000, 0x421de61e00000000,
- 0x3196dd2600000000, 0xdf12e48700000000, 0xd780aa5600000000,
- 0x390493f700000000, 0x4a8fa8cf00000000, 0xa40b916e00000000,
- 0x1bad44b600000000, 0xf5297d1700000000, 0x86a2462f00000000,
- 0x68267f8e00000000, 0x60b4315f00000000, 0x8e3008fe00000000,
- 0xfdbb33c600000000, 0x133f0a6700000000, 0xc2f0e9ac00000000,
- 0x2c74d00d00000000, 0x5fffeb3500000000, 0xb17bd29400000000,
- 0xb9e99c4500000000, 0x576da5e400000000, 0x24e69edc00000000,
- 0xca62a77d00000000, 0x75c472a500000000, 0x9b404b0400000000,
- 0xe8cb703c00000000, 0x064f499d00000000, 0x0edd074c00000000,
- 0xe0593eed00000000, 0x93d205d500000000, 0x7d563c7400000000,
- 0xc8ee6ad500000000, 0x266a537400000000, 0x55e1684c00000000,
- 0xbb6551ed00000000, 0xb3f71f3c00000000, 0x5d73269d00000000,
- 0x2ef81da500000000, 0xc07c240400000000, 0x7fdaf1dc00000000,
- 0x915ec87d00000000, 0xe2d5f34500000000, 0x0c51cae400000000,
- 0x04c3843500000000, 0xea47bd9400000000, 0x99cc86ac00000000,
- 0x7748bf0d00000000, 0xa6875cc600000000, 0x4803656700000000,
- 0x3b885e5f00000000, 0xd50c67fe00000000, 0xdd9e292f00000000,
- 0x331a108e00000000, 0x40912bb600000000, 0xae15121700000000,
- 0x11b3c7cf00000000, 0xff37fe6e00000000, 0x8cbcc55600000000,
- 0x6238fcf700000000, 0x6aaab22600000000, 0x842e8b8700000000,
- 0xf7a5b0bf00000000, 0x1921891e00000000, 0x143c06f300000000,
- 0xfab83f5200000000, 0x8933046a00000000, 0x67b73dcb00000000,
- 0x6f25731a00000000, 0x81a14abb00000000, 0xf22a718300000000,
- 0x1cae482200000000, 0xa3089dfa00000000, 0x4d8ca45b00000000,
- 0x3e079f6300000000, 0xd083a6c200000000, 0xd811e81300000000,
- 0x3695d1b200000000, 0x451eea8a00000000, 0xab9ad32b00000000,
- 0x7a5530e000000000, 0x94d1094100000000, 0xe75a327900000000,
- 0x09de0bd800000000, 0x014c450900000000, 0xefc87ca800000000,
- 0x9c43479000000000, 0x72c77e3100000000, 0xcd61abe900000000,
- 0x23e5924800000000, 0x506ea97000000000, 0xbeea90d100000000,
- 0xb678de0000000000, 0x58fce7a100000000, 0x2b77dc9900000000,
- 0xc5f3e53800000000},
- {0x0000000000000000, 0xfbf6134700000000, 0xf6ed278e00000000,
- 0x0d1b34c900000000, 0xaddd3ec700000000, 0x562b2d8000000000,
- 0x5b30194900000000, 0xa0c60a0e00000000, 0x1bbd0c5500000000,
- 0xe04b1f1200000000, 0xed502bdb00000000, 0x16a6389c00000000,
- 0xb660329200000000, 0x4d9621d500000000, 0x408d151c00000000,
- 0xbb7b065b00000000, 0x367a19aa00000000, 0xcd8c0aed00000000,
- 0xc0973e2400000000, 0x3b612d6300000000, 0x9ba7276d00000000,
- 0x6051342a00000000, 0x6d4a00e300000000, 0x96bc13a400000000,
- 0x2dc715ff00000000, 0xd63106b800000000, 0xdb2a327100000000,
- 0x20dc213600000000, 0x801a2b3800000000, 0x7bec387f00000000,
- 0x76f70cb600000000, 0x8d011ff100000000, 0x2df2438f00000000,
- 0xd60450c800000000, 0xdb1f640100000000, 0x20e9774600000000,
- 0x802f7d4800000000, 0x7bd96e0f00000000, 0x76c25ac600000000,
- 0x8d34498100000000, 0x364f4fda00000000, 0xcdb95c9d00000000,
- 0xc0a2685400000000, 0x3b547b1300000000, 0x9b92711d00000000,
- 0x6064625a00000000, 0x6d7f569300000000, 0x968945d400000000,
- 0x1b885a2500000000, 0xe07e496200000000, 0xed657dab00000000,
- 0x16936eec00000000, 0xb65564e200000000, 0x4da377a500000000,
- 0x40b8436c00000000, 0xbb4e502b00000000, 0x0035567000000000,
- 0xfbc3453700000000, 0xf6d871fe00000000, 0x0d2e62b900000000,
- 0xade868b700000000, 0x561e7bf000000000, 0x5b054f3900000000,
- 0xa0f35c7e00000000, 0x1be2f6c500000000, 0xe014e58200000000,
- 0xed0fd14b00000000, 0x16f9c20c00000000, 0xb63fc80200000000,
- 0x4dc9db4500000000, 0x40d2ef8c00000000, 0xbb24fccb00000000,
- 0x005ffa9000000000, 0xfba9e9d700000000, 0xf6b2dd1e00000000,
- 0x0d44ce5900000000, 0xad82c45700000000, 0x5674d71000000000,
- 0x5b6fe3d900000000, 0xa099f09e00000000, 0x2d98ef6f00000000,
- 0xd66efc2800000000, 0xdb75c8e100000000, 0x2083dba600000000,
- 0x8045d1a800000000, 0x7bb3c2ef00000000, 0x76a8f62600000000,
- 0x8d5ee56100000000, 0x3625e33a00000000, 0xcdd3f07d00000000,
- 0xc0c8c4b400000000, 0x3b3ed7f300000000, 0x9bf8ddfd00000000,
- 0x600eceba00000000, 0x6d15fa7300000000, 0x96e3e93400000000,
- 0x3610b54a00000000, 0xcde6a60d00000000, 0xc0fd92c400000000,
- 0x3b0b818300000000, 0x9bcd8b8d00000000, 0x603b98ca00000000,
- 0x6d20ac0300000000, 0x96d6bf4400000000, 0x2dadb91f00000000,
- 0xd65baa5800000000, 0xdb409e9100000000, 0x20b68dd600000000,
- 0x807087d800000000, 0x7b86949f00000000, 0x769da05600000000,
- 0x8d6bb31100000000, 0x006aace000000000, 0xfb9cbfa700000000,
- 0xf6878b6e00000000, 0x0d71982900000000, 0xadb7922700000000,
- 0x5641816000000000, 0x5b5ab5a900000000, 0xa0aca6ee00000000,
- 0x1bd7a0b500000000, 0xe021b3f200000000, 0xed3a873b00000000,
- 0x16cc947c00000000, 0xb60a9e7200000000, 0x4dfc8d3500000000,
- 0x40e7b9fc00000000, 0xbb11aabb00000000, 0x77c29c5000000000,
- 0x8c348f1700000000, 0x812fbbde00000000, 0x7ad9a89900000000,
- 0xda1fa29700000000, 0x21e9b1d000000000, 0x2cf2851900000000,
- 0xd704965e00000000, 0x6c7f900500000000, 0x9789834200000000,
- 0x9a92b78b00000000, 0x6164a4cc00000000, 0xc1a2aec200000000,
- 0x3a54bd8500000000, 0x374f894c00000000, 0xccb99a0b00000000,
- 0x41b885fa00000000, 0xba4e96bd00000000, 0xb755a27400000000,
- 0x4ca3b13300000000, 0xec65bb3d00000000, 0x1793a87a00000000,
- 0x1a889cb300000000, 0xe17e8ff400000000, 0x5a0589af00000000,
- 0xa1f39ae800000000, 0xace8ae2100000000, 0x571ebd6600000000,
- 0xf7d8b76800000000, 0x0c2ea42f00000000, 0x013590e600000000,
- 0xfac383a100000000, 0x5a30dfdf00000000, 0xa1c6cc9800000000,
- 0xacddf85100000000, 0x572beb1600000000, 0xf7ede11800000000,
- 0x0c1bf25f00000000, 0x0100c69600000000, 0xfaf6d5d100000000,
- 0x418dd38a00000000, 0xba7bc0cd00000000, 0xb760f40400000000,
- 0x4c96e74300000000, 0xec50ed4d00000000, 0x17a6fe0a00000000,
- 0x1abdcac300000000, 0xe14bd98400000000, 0x6c4ac67500000000,
- 0x97bcd53200000000, 0x9aa7e1fb00000000, 0x6151f2bc00000000,
- 0xc197f8b200000000, 0x3a61ebf500000000, 0x377adf3c00000000,
- 0xcc8ccc7b00000000, 0x77f7ca2000000000, 0x8c01d96700000000,
- 0x811aedae00000000, 0x7aecfee900000000, 0xda2af4e700000000,
- 0x21dce7a000000000, 0x2cc7d36900000000, 0xd731c02e00000000,
- 0x6c206a9500000000, 0x97d679d200000000, 0x9acd4d1b00000000,
- 0x613b5e5c00000000, 0xc1fd545200000000, 0x3a0b471500000000,
- 0x371073dc00000000, 0xcce6609b00000000, 0x779d66c000000000,
- 0x8c6b758700000000, 0x8170414e00000000, 0x7a86520900000000,
- 0xda40580700000000, 0x21b64b4000000000, 0x2cad7f8900000000,
- 0xd75b6cce00000000, 0x5a5a733f00000000, 0xa1ac607800000000,
- 0xacb754b100000000, 0x574147f600000000, 0xf7874df800000000,
- 0x0c715ebf00000000, 0x016a6a7600000000, 0xfa9c793100000000,
- 0x41e77f6a00000000, 0xba116c2d00000000, 0xb70a58e400000000,
- 0x4cfc4ba300000000, 0xec3a41ad00000000, 0x17cc52ea00000000,
- 0x1ad7662300000000, 0xe121756400000000, 0x41d2291a00000000,
- 0xba243a5d00000000, 0xb73f0e9400000000, 0x4cc91dd300000000,
- 0xec0f17dd00000000, 0x17f9049a00000000, 0x1ae2305300000000,
- 0xe114231400000000, 0x5a6f254f00000000, 0xa199360800000000,
- 0xac8202c100000000, 0x5774118600000000, 0xf7b21b8800000000,
- 0x0c4408cf00000000, 0x015f3c0600000000, 0xfaa92f4100000000,
- 0x77a830b000000000, 0x8c5e23f700000000, 0x8145173e00000000,
- 0x7ab3047900000000, 0xda750e7700000000, 0x21831d3000000000,
- 0x2c9829f900000000, 0xd76e3abe00000000, 0x6c153ce500000000,
- 0x97e32fa200000000, 0x9af81b6b00000000, 0x610e082c00000000,
- 0xc1c8022200000000, 0x3a3e116500000000, 0x372525ac00000000,
- 0xccd336eb00000000},
- {0x0000000000000000, 0x6238282a00000000, 0xc470505400000000,
- 0xa648787e00000000, 0x88e1a0a800000000, 0xead9888200000000,
- 0x4c91f0fc00000000, 0x2ea9d8d600000000, 0x51c5308a00000000,
- 0x33fd18a000000000, 0x95b560de00000000, 0xf78d48f400000000,
- 0xd924902200000000, 0xbb1cb80800000000, 0x1d54c07600000000,
- 0x7f6ce85c00000000, 0xe38c10cf00000000, 0x81b438e500000000,
- 0x27fc409b00000000, 0x45c468b100000000, 0x6b6db06700000000,
- 0x0955984d00000000, 0xaf1de03300000000, 0xcd25c81900000000,
- 0xb249204500000000, 0xd071086f00000000, 0x7639701100000000,
- 0x1401583b00000000, 0x3aa880ed00000000, 0x5890a8c700000000,
- 0xfed8d0b900000000, 0x9ce0f89300000000, 0x871f504500000000,
- 0xe527786f00000000, 0x436f001100000000, 0x2157283b00000000,
- 0x0ffef0ed00000000, 0x6dc6d8c700000000, 0xcb8ea0b900000000,
- 0xa9b6889300000000, 0xd6da60cf00000000, 0xb4e248e500000000,
- 0x12aa309b00000000, 0x709218b100000000, 0x5e3bc06700000000,
- 0x3c03e84d00000000, 0x9a4b903300000000, 0xf873b81900000000,
- 0x6493408a00000000, 0x06ab68a000000000, 0xa0e310de00000000,
- 0xc2db38f400000000, 0xec72e02200000000, 0x8e4ac80800000000,
- 0x2802b07600000000, 0x4a3a985c00000000, 0x3556700000000000,
- 0x576e582a00000000, 0xf126205400000000, 0x931e087e00000000,
- 0xbdb7d0a800000000, 0xdf8ff88200000000, 0x79c780fc00000000,
- 0x1bffa8d600000000, 0x0e3fa08a00000000, 0x6c0788a000000000,
- 0xca4ff0de00000000, 0xa877d8f400000000, 0x86de002200000000,
- 0xe4e6280800000000, 0x42ae507600000000, 0x2096785c00000000,
- 0x5ffa900000000000, 0x3dc2b82a00000000, 0x9b8ac05400000000,
- 0xf9b2e87e00000000, 0xd71b30a800000000, 0xb523188200000000,
- 0x136b60fc00000000, 0x715348d600000000, 0xedb3b04500000000,
- 0x8f8b986f00000000, 0x29c3e01100000000, 0x4bfbc83b00000000,
- 0x655210ed00000000, 0x076a38c700000000, 0xa12240b900000000,
- 0xc31a689300000000, 0xbc7680cf00000000, 0xde4ea8e500000000,
- 0x7806d09b00000000, 0x1a3ef8b100000000, 0x3497206700000000,
- 0x56af084d00000000, 0xf0e7703300000000, 0x92df581900000000,
- 0x8920f0cf00000000, 0xeb18d8e500000000, 0x4d50a09b00000000,
- 0x2f6888b100000000, 0x01c1506700000000, 0x63f9784d00000000,
- 0xc5b1003300000000, 0xa789281900000000, 0xd8e5c04500000000,
- 0xbadde86f00000000, 0x1c95901100000000, 0x7eadb83b00000000,
- 0x500460ed00000000, 0x323c48c700000000, 0x947430b900000000,
- 0xf64c189300000000, 0x6aace00000000000, 0x0894c82a00000000,
- 0xaedcb05400000000, 0xcce4987e00000000, 0xe24d40a800000000,
- 0x8075688200000000, 0x263d10fc00000000, 0x440538d600000000,
- 0x3b69d08a00000000, 0x5951f8a000000000, 0xff1980de00000000,
- 0x9d21a8f400000000, 0xb388702200000000, 0xd1b0580800000000,
- 0x77f8207600000000, 0x15c0085c00000000, 0x5d7831ce00000000,
- 0x3f4019e400000000, 0x9908619a00000000, 0xfb3049b000000000,
- 0xd599916600000000, 0xb7a1b94c00000000, 0x11e9c13200000000,
- 0x73d1e91800000000, 0x0cbd014400000000, 0x6e85296e00000000,
- 0xc8cd511000000000, 0xaaf5793a00000000, 0x845ca1ec00000000,
- 0xe66489c600000000, 0x402cf1b800000000, 0x2214d99200000000,
- 0xbef4210100000000, 0xdccc092b00000000, 0x7a84715500000000,
- 0x18bc597f00000000, 0x361581a900000000, 0x542da98300000000,
- 0xf265d1fd00000000, 0x905df9d700000000, 0xef31118b00000000,
- 0x8d0939a100000000, 0x2b4141df00000000, 0x497969f500000000,
- 0x67d0b12300000000, 0x05e8990900000000, 0xa3a0e17700000000,
- 0xc198c95d00000000, 0xda67618b00000000, 0xb85f49a100000000,
- 0x1e1731df00000000, 0x7c2f19f500000000, 0x5286c12300000000,
- 0x30bee90900000000, 0x96f6917700000000, 0xf4ceb95d00000000,
- 0x8ba2510100000000, 0xe99a792b00000000, 0x4fd2015500000000,
- 0x2dea297f00000000, 0x0343f1a900000000, 0x617bd98300000000,
- 0xc733a1fd00000000, 0xa50b89d700000000, 0x39eb714400000000,
- 0x5bd3596e00000000, 0xfd9b211000000000, 0x9fa3093a00000000,
- 0xb10ad1ec00000000, 0xd332f9c600000000, 0x757a81b800000000,
- 0x1742a99200000000, 0x682e41ce00000000, 0x0a1669e400000000,
- 0xac5e119a00000000, 0xce6639b000000000, 0xe0cfe16600000000,
- 0x82f7c94c00000000, 0x24bfb13200000000, 0x4687991800000000,
- 0x5347914400000000, 0x317fb96e00000000, 0x9737c11000000000,
- 0xf50fe93a00000000, 0xdba631ec00000000, 0xb99e19c600000000,
- 0x1fd661b800000000, 0x7dee499200000000, 0x0282a1ce00000000,
- 0x60ba89e400000000, 0xc6f2f19a00000000, 0xa4cad9b000000000,
- 0x8a63016600000000, 0xe85b294c00000000, 0x4e13513200000000,
- 0x2c2b791800000000, 0xb0cb818b00000000, 0xd2f3a9a100000000,
- 0x74bbd1df00000000, 0x1683f9f500000000, 0x382a212300000000,
- 0x5a12090900000000, 0xfc5a717700000000, 0x9e62595d00000000,
- 0xe10eb10100000000, 0x8336992b00000000, 0x257ee15500000000,
- 0x4746c97f00000000, 0x69ef11a900000000, 0x0bd7398300000000,
- 0xad9f41fd00000000, 0xcfa769d700000000, 0xd458c10100000000,
- 0xb660e92b00000000, 0x1028915500000000, 0x7210b97f00000000,
- 0x5cb961a900000000, 0x3e81498300000000, 0x98c931fd00000000,
- 0xfaf119d700000000, 0x859df18b00000000, 0xe7a5d9a100000000,
- 0x41eda1df00000000, 0x23d589f500000000, 0x0d7c512300000000,
- 0x6f44790900000000, 0xc90c017700000000, 0xab34295d00000000,
- 0x37d4d1ce00000000, 0x55ecf9e400000000, 0xf3a4819a00000000,
- 0x919ca9b000000000, 0xbf35716600000000, 0xdd0d594c00000000,
- 0x7b45213200000000, 0x197d091800000000, 0x6611e14400000000,
- 0x0429c96e00000000, 0xa261b11000000000, 0xc059993a00000000,
- 0xeef041ec00000000, 0x8cc869c600000000, 0x2a8011b800000000,
- 0x48b8399200000000},
- {0x0000000000000000, 0x4c2896a300000000, 0xd9565d9c00000000,
- 0x957ecb3f00000000, 0xf3abcbe300000000, 0xbf835d4000000000,
- 0x2afd967f00000000, 0x66d500dc00000000, 0xa751e61c00000000,
- 0xeb7970bf00000000, 0x7e07bb8000000000, 0x322f2d2300000000,
- 0x54fa2dff00000000, 0x18d2bb5c00000000, 0x8dac706300000000,
- 0xc184e6c000000000, 0x4ea3cc3900000000, 0x028b5a9a00000000,
- 0x97f591a500000000, 0xdbdd070600000000, 0xbd0807da00000000,
- 0xf120917900000000, 0x645e5a4600000000, 0x2876cce500000000,
- 0xe9f22a2500000000, 0xa5dabc8600000000, 0x30a477b900000000,
- 0x7c8ce11a00000000, 0x1a59e1c600000000, 0x5671776500000000,
- 0xc30fbc5a00000000, 0x8f272af900000000, 0x9c46997300000000,
- 0xd06e0fd000000000, 0x4510c4ef00000000, 0x0938524c00000000,
- 0x6fed529000000000, 0x23c5c43300000000, 0xb6bb0f0c00000000,
- 0xfa9399af00000000, 0x3b177f6f00000000, 0x773fe9cc00000000,
- 0xe24122f300000000, 0xae69b45000000000, 0xc8bcb48c00000000,
- 0x8494222f00000000, 0x11eae91000000000, 0x5dc27fb300000000,
- 0xd2e5554a00000000, 0x9ecdc3e900000000, 0x0bb308d600000000,
- 0x479b9e7500000000, 0x214e9ea900000000, 0x6d66080a00000000,
- 0xf818c33500000000, 0xb430559600000000, 0x75b4b35600000000,
- 0x399c25f500000000, 0xace2eeca00000000, 0xe0ca786900000000,
- 0x861f78b500000000, 0xca37ee1600000000, 0x5f49252900000000,
- 0x1361b38a00000000, 0x388d32e700000000, 0x74a5a44400000000,
- 0xe1db6f7b00000000, 0xadf3f9d800000000, 0xcb26f90400000000,
- 0x870e6fa700000000, 0x1270a49800000000, 0x5e58323b00000000,
- 0x9fdcd4fb00000000, 0xd3f4425800000000, 0x468a896700000000,
- 0x0aa21fc400000000, 0x6c771f1800000000, 0x205f89bb00000000,
- 0xb521428400000000, 0xf909d42700000000, 0x762efede00000000,
- 0x3a06687d00000000, 0xaf78a34200000000, 0xe35035e100000000,
- 0x8585353d00000000, 0xc9ada39e00000000, 0x5cd368a100000000,
- 0x10fbfe0200000000, 0xd17f18c200000000, 0x9d578e6100000000,
- 0x0829455e00000000, 0x4401d3fd00000000, 0x22d4d32100000000,
- 0x6efc458200000000, 0xfb828ebd00000000, 0xb7aa181e00000000,
- 0xa4cbab9400000000, 0xe8e33d3700000000, 0x7d9df60800000000,
- 0x31b560ab00000000, 0x5760607700000000, 0x1b48f6d400000000,
- 0x8e363deb00000000, 0xc21eab4800000000, 0x039a4d8800000000,
- 0x4fb2db2b00000000, 0xdacc101400000000, 0x96e486b700000000,
- 0xf031866b00000000, 0xbc1910c800000000, 0x2967dbf700000000,
- 0x654f4d5400000000, 0xea6867ad00000000, 0xa640f10e00000000,
- 0x333e3a3100000000, 0x7f16ac9200000000, 0x19c3ac4e00000000,
- 0x55eb3aed00000000, 0xc095f1d200000000, 0x8cbd677100000000,
- 0x4d3981b100000000, 0x0111171200000000, 0x946fdc2d00000000,
- 0xd8474a8e00000000, 0xbe924a5200000000, 0xf2badcf100000000,
- 0x67c417ce00000000, 0x2bec816d00000000, 0x311c141500000000,
- 0x7d3482b600000000, 0xe84a498900000000, 0xa462df2a00000000,
- 0xc2b7dff600000000, 0x8e9f495500000000, 0x1be1826a00000000,
- 0x57c914c900000000, 0x964df20900000000, 0xda6564aa00000000,
- 0x4f1baf9500000000, 0x0333393600000000, 0x65e639ea00000000,
- 0x29ceaf4900000000, 0xbcb0647600000000, 0xf098f2d500000000,
- 0x7fbfd82c00000000, 0x33974e8f00000000, 0xa6e985b000000000,
- 0xeac1131300000000, 0x8c1413cf00000000, 0xc03c856c00000000,
- 0x55424e5300000000, 0x196ad8f000000000, 0xd8ee3e3000000000,
- 0x94c6a89300000000, 0x01b863ac00000000, 0x4d90f50f00000000,
- 0x2b45f5d300000000, 0x676d637000000000, 0xf213a84f00000000,
- 0xbe3b3eec00000000, 0xad5a8d6600000000, 0xe1721bc500000000,
- 0x740cd0fa00000000, 0x3824465900000000, 0x5ef1468500000000,
- 0x12d9d02600000000, 0x87a71b1900000000, 0xcb8f8dba00000000,
- 0x0a0b6b7a00000000, 0x4623fdd900000000, 0xd35d36e600000000,
- 0x9f75a04500000000, 0xf9a0a09900000000, 0xb588363a00000000,
- 0x20f6fd0500000000, 0x6cde6ba600000000, 0xe3f9415f00000000,
- 0xafd1d7fc00000000, 0x3aaf1cc300000000, 0x76878a6000000000,
- 0x10528abc00000000, 0x5c7a1c1f00000000, 0xc904d72000000000,
- 0x852c418300000000, 0x44a8a74300000000, 0x088031e000000000,
- 0x9dfefadf00000000, 0xd1d66c7c00000000, 0xb7036ca000000000,
- 0xfb2bfa0300000000, 0x6e55313c00000000, 0x227da79f00000000,
- 0x099126f200000000, 0x45b9b05100000000, 0xd0c77b6e00000000,
- 0x9cefedcd00000000, 0xfa3aed1100000000, 0xb6127bb200000000,
- 0x236cb08d00000000, 0x6f44262e00000000, 0xaec0c0ee00000000,
- 0xe2e8564d00000000, 0x77969d7200000000, 0x3bbe0bd100000000,
- 0x5d6b0b0d00000000, 0x11439dae00000000, 0x843d569100000000,
- 0xc815c03200000000, 0x4732eacb00000000, 0x0b1a7c6800000000,
- 0x9e64b75700000000, 0xd24c21f400000000, 0xb499212800000000,
- 0xf8b1b78b00000000, 0x6dcf7cb400000000, 0x21e7ea1700000000,
- 0xe0630cd700000000, 0xac4b9a7400000000, 0x3935514b00000000,
- 0x751dc7e800000000, 0x13c8c73400000000, 0x5fe0519700000000,
- 0xca9e9aa800000000, 0x86b60c0b00000000, 0x95d7bf8100000000,
- 0xd9ff292200000000, 0x4c81e21d00000000, 0x00a974be00000000,
- 0x667c746200000000, 0x2a54e2c100000000, 0xbf2a29fe00000000,
- 0xf302bf5d00000000, 0x3286599d00000000, 0x7eaecf3e00000000,
- 0xebd0040100000000, 0xa7f892a200000000, 0xc12d927e00000000,
- 0x8d0504dd00000000, 0x187bcfe200000000, 0x5453594100000000,
- 0xdb7473b800000000, 0x975ce51b00000000, 0x02222e2400000000,
- 0x4e0ab88700000000, 0x28dfb85b00000000, 0x64f72ef800000000,
- 0xf189e5c700000000, 0xbda1736400000000, 0x7c2595a400000000,
- 0x300d030700000000, 0xa573c83800000000, 0xe95b5e9b00000000,
- 0x8f8e5e4700000000, 0xc3a6c8e400000000, 0x56d803db00000000,
- 0x1af0957800000000},
- {0x0000000000000000, 0x939bc97f00000000, 0x263793ff00000000,
- 0xb5ac5a8000000000, 0x0d68572400000000, 0x9ef39e5b00000000,
- 0x2b5fc4db00000000, 0xb8c40da400000000, 0x1ad0ae4800000000,
- 0x894b673700000000, 0x3ce73db700000000, 0xaf7cf4c800000000,
- 0x17b8f96c00000000, 0x8423301300000000, 0x318f6a9300000000,
- 0xa214a3ec00000000, 0x34a05d9100000000, 0xa73b94ee00000000,
- 0x1297ce6e00000000, 0x810c071100000000, 0x39c80ab500000000,
- 0xaa53c3ca00000000, 0x1fff994a00000000, 0x8c64503500000000,
- 0x2e70f3d900000000, 0xbdeb3aa600000000, 0x0847602600000000,
- 0x9bdca95900000000, 0x2318a4fd00000000, 0xb0836d8200000000,
- 0x052f370200000000, 0x96b4fe7d00000000, 0x2946caf900000000,
- 0xbadd038600000000, 0x0f71590600000000, 0x9cea907900000000,
- 0x242e9ddd00000000, 0xb7b554a200000000, 0x02190e2200000000,
- 0x9182c75d00000000, 0x339664b100000000, 0xa00dadce00000000,
- 0x15a1f74e00000000, 0x863a3e3100000000, 0x3efe339500000000,
- 0xad65faea00000000, 0x18c9a06a00000000, 0x8b52691500000000,
- 0x1de6976800000000, 0x8e7d5e1700000000, 0x3bd1049700000000,
- 0xa84acde800000000, 0x108ec04c00000000, 0x8315093300000000,
- 0x36b953b300000000, 0xa5229acc00000000, 0x0736392000000000,
- 0x94adf05f00000000, 0x2101aadf00000000, 0xb29a63a000000000,
- 0x0a5e6e0400000000, 0x99c5a77b00000000, 0x2c69fdfb00000000,
- 0xbff2348400000000, 0x138ae52800000000, 0x80112c5700000000,
- 0x35bd76d700000000, 0xa626bfa800000000, 0x1ee2b20c00000000,
- 0x8d797b7300000000, 0x38d521f300000000, 0xab4ee88c00000000,
- 0x095a4b6000000000, 0x9ac1821f00000000, 0x2f6dd89f00000000,
- 0xbcf611e000000000, 0x04321c4400000000, 0x97a9d53b00000000,
- 0x22058fbb00000000, 0xb19e46c400000000, 0x272ab8b900000000,
- 0xb4b171c600000000, 0x011d2b4600000000, 0x9286e23900000000,
- 0x2a42ef9d00000000, 0xb9d926e200000000, 0x0c757c6200000000,
- 0x9feeb51d00000000, 0x3dfa16f100000000, 0xae61df8e00000000,
- 0x1bcd850e00000000, 0x88564c7100000000, 0x309241d500000000,
- 0xa30988aa00000000, 0x16a5d22a00000000, 0x853e1b5500000000,
- 0x3acc2fd100000000, 0xa957e6ae00000000, 0x1cfbbc2e00000000,
- 0x8f60755100000000, 0x37a478f500000000, 0xa43fb18a00000000,
- 0x1193eb0a00000000, 0x8208227500000000, 0x201c819900000000,
- 0xb38748e600000000, 0x062b126600000000, 0x95b0db1900000000,
- 0x2d74d6bd00000000, 0xbeef1fc200000000, 0x0b43454200000000,
- 0x98d88c3d00000000, 0x0e6c724000000000, 0x9df7bb3f00000000,
- 0x285be1bf00000000, 0xbbc028c000000000, 0x0304256400000000,
- 0x909fec1b00000000, 0x2533b69b00000000, 0xb6a87fe400000000,
- 0x14bcdc0800000000, 0x8727157700000000, 0x328b4ff700000000,
- 0xa110868800000000, 0x19d48b2c00000000, 0x8a4f425300000000,
- 0x3fe318d300000000, 0xac78d1ac00000000, 0x2614cb5100000000,
- 0xb58f022e00000000, 0x002358ae00000000, 0x93b891d100000000,
- 0x2b7c9c7500000000, 0xb8e7550a00000000, 0x0d4b0f8a00000000,
- 0x9ed0c6f500000000, 0x3cc4651900000000, 0xaf5fac6600000000,
- 0x1af3f6e600000000, 0x89683f9900000000, 0x31ac323d00000000,
- 0xa237fb4200000000, 0x179ba1c200000000, 0x840068bd00000000,
- 0x12b496c000000000, 0x812f5fbf00000000, 0x3483053f00000000,
- 0xa718cc4000000000, 0x1fdcc1e400000000, 0x8c47089b00000000,
- 0x39eb521b00000000, 0xaa709b6400000000, 0x0864388800000000,
- 0x9bfff1f700000000, 0x2e53ab7700000000, 0xbdc8620800000000,
- 0x050c6fac00000000, 0x9697a6d300000000, 0x233bfc5300000000,
- 0xb0a0352c00000000, 0x0f5201a800000000, 0x9cc9c8d700000000,
- 0x2965925700000000, 0xbafe5b2800000000, 0x023a568c00000000,
- 0x91a19ff300000000, 0x240dc57300000000, 0xb7960c0c00000000,
- 0x1582afe000000000, 0x8619669f00000000, 0x33b53c1f00000000,
- 0xa02ef56000000000, 0x18eaf8c400000000, 0x8b7131bb00000000,
- 0x3edd6b3b00000000, 0xad46a24400000000, 0x3bf25c3900000000,
- 0xa869954600000000, 0x1dc5cfc600000000, 0x8e5e06b900000000,
- 0x369a0b1d00000000, 0xa501c26200000000, 0x10ad98e200000000,
- 0x8336519d00000000, 0x2122f27100000000, 0xb2b93b0e00000000,
- 0x0715618e00000000, 0x948ea8f100000000, 0x2c4aa55500000000,
- 0xbfd16c2a00000000, 0x0a7d36aa00000000, 0x99e6ffd500000000,
- 0x359e2e7900000000, 0xa605e70600000000, 0x13a9bd8600000000,
- 0x803274f900000000, 0x38f6795d00000000, 0xab6db02200000000,
- 0x1ec1eaa200000000, 0x8d5a23dd00000000, 0x2f4e803100000000,
- 0xbcd5494e00000000, 0x097913ce00000000, 0x9ae2dab100000000,
- 0x2226d71500000000, 0xb1bd1e6a00000000, 0x041144ea00000000,
- 0x978a8d9500000000, 0x013e73e800000000, 0x92a5ba9700000000,
- 0x2709e01700000000, 0xb492296800000000, 0x0c5624cc00000000,
- 0x9fcdedb300000000, 0x2a61b73300000000, 0xb9fa7e4c00000000,
- 0x1beedda000000000, 0x887514df00000000, 0x3dd94e5f00000000,
- 0xae42872000000000, 0x16868a8400000000, 0x851d43fb00000000,
- 0x30b1197b00000000, 0xa32ad00400000000, 0x1cd8e48000000000,
- 0x8f432dff00000000, 0x3aef777f00000000, 0xa974be0000000000,
- 0x11b0b3a400000000, 0x822b7adb00000000, 0x3787205b00000000,
- 0xa41ce92400000000, 0x06084ac800000000, 0x959383b700000000,
- 0x203fd93700000000, 0xb3a4104800000000, 0x0b601dec00000000,
- 0x98fbd49300000000, 0x2d578e1300000000, 0xbecc476c00000000,
- 0x2878b91100000000, 0xbbe3706e00000000, 0x0e4f2aee00000000,
- 0x9dd4e39100000000, 0x2510ee3500000000, 0xb68b274a00000000,
- 0x03277dca00000000, 0x90bcb4b500000000, 0x32a8175900000000,
- 0xa133de2600000000, 0x149f84a600000000, 0x87044dd900000000,
- 0x3fc0407d00000000, 0xac5b890200000000, 0x19f7d38200000000,
- 0x8a6c1afd00000000},
- {0x0000000000000000, 0x650b796900000000, 0xca16f2d200000000,
- 0xaf1d8bbb00000000, 0xd52b957e00000000, 0xb020ec1700000000,
- 0x1f3d67ac00000000, 0x7a361ec500000000, 0xaa572afd00000000,
- 0xcf5c539400000000, 0x6041d82f00000000, 0x054aa14600000000,
- 0x7f7cbf8300000000, 0x1a77c6ea00000000, 0xb56a4d5100000000,
- 0xd061343800000000, 0x15a9252100000000, 0x70a25c4800000000,
- 0xdfbfd7f300000000, 0xbab4ae9a00000000, 0xc082b05f00000000,
- 0xa589c93600000000, 0x0a94428d00000000, 0x6f9f3be400000000,
- 0xbffe0fdc00000000, 0xdaf576b500000000, 0x75e8fd0e00000000,
- 0x10e3846700000000, 0x6ad59aa200000000, 0x0fdee3cb00000000,
- 0xa0c3687000000000, 0xc5c8111900000000, 0x2a524b4200000000,
- 0x4f59322b00000000, 0xe044b99000000000, 0x854fc0f900000000,
- 0xff79de3c00000000, 0x9a72a75500000000, 0x356f2cee00000000,
- 0x5064558700000000, 0x800561bf00000000, 0xe50e18d600000000,
- 0x4a13936d00000000, 0x2f18ea0400000000, 0x552ef4c100000000,
- 0x30258da800000000, 0x9f38061300000000, 0xfa337f7a00000000,
- 0x3ffb6e6300000000, 0x5af0170a00000000, 0xf5ed9cb100000000,
- 0x90e6e5d800000000, 0xead0fb1d00000000, 0x8fdb827400000000,
- 0x20c609cf00000000, 0x45cd70a600000000, 0x95ac449e00000000,
- 0xf0a73df700000000, 0x5fbab64c00000000, 0x3ab1cf2500000000,
- 0x4087d1e000000000, 0x258ca88900000000, 0x8a91233200000000,
- 0xef9a5a5b00000000, 0x54a4968400000000, 0x31afefed00000000,
- 0x9eb2645600000000, 0xfbb91d3f00000000, 0x818f03fa00000000,
- 0xe4847a9300000000, 0x4b99f12800000000, 0x2e92884100000000,
- 0xfef3bc7900000000, 0x9bf8c51000000000, 0x34e54eab00000000,
- 0x51ee37c200000000, 0x2bd8290700000000, 0x4ed3506e00000000,
- 0xe1cedbd500000000, 0x84c5a2bc00000000, 0x410db3a500000000,
- 0x2406cacc00000000, 0x8b1b417700000000, 0xee10381e00000000,
- 0x942626db00000000, 0xf12d5fb200000000, 0x5e30d40900000000,
- 0x3b3bad6000000000, 0xeb5a995800000000, 0x8e51e03100000000,
- 0x214c6b8a00000000, 0x444712e300000000, 0x3e710c2600000000,
- 0x5b7a754f00000000, 0xf467fef400000000, 0x916c879d00000000,
- 0x7ef6ddc600000000, 0x1bfda4af00000000, 0xb4e02f1400000000,
- 0xd1eb567d00000000, 0xabdd48b800000000, 0xced631d100000000,
- 0x61cbba6a00000000, 0x04c0c30300000000, 0xd4a1f73b00000000,
- 0xb1aa8e5200000000, 0x1eb705e900000000, 0x7bbc7c8000000000,
- 0x018a624500000000, 0x64811b2c00000000, 0xcb9c909700000000,
- 0xae97e9fe00000000, 0x6b5ff8e700000000, 0x0e54818e00000000,
- 0xa1490a3500000000, 0xc442735c00000000, 0xbe746d9900000000,
- 0xdb7f14f000000000, 0x74629f4b00000000, 0x1169e62200000000,
- 0xc108d21a00000000, 0xa403ab7300000000, 0x0b1e20c800000000,
- 0x6e1559a100000000, 0x1423476400000000, 0x71283e0d00000000,
- 0xde35b5b600000000, 0xbb3eccdf00000000, 0xe94e5cd200000000,
- 0x8c4525bb00000000, 0x2358ae0000000000, 0x4653d76900000000,
- 0x3c65c9ac00000000, 0x596eb0c500000000, 0xf6733b7e00000000,
- 0x9378421700000000, 0x4319762f00000000, 0x26120f4600000000,
- 0x890f84fd00000000, 0xec04fd9400000000, 0x9632e35100000000,
- 0xf3399a3800000000, 0x5c24118300000000, 0x392f68ea00000000,
- 0xfce779f300000000, 0x99ec009a00000000, 0x36f18b2100000000,
- 0x53faf24800000000, 0x29ccec8d00000000, 0x4cc795e400000000,
- 0xe3da1e5f00000000, 0x86d1673600000000, 0x56b0530e00000000,
- 0x33bb2a6700000000, 0x9ca6a1dc00000000, 0xf9add8b500000000,
- 0x839bc67000000000, 0xe690bf1900000000, 0x498d34a200000000,
- 0x2c864dcb00000000, 0xc31c179000000000, 0xa6176ef900000000,
- 0x090ae54200000000, 0x6c019c2b00000000, 0x163782ee00000000,
- 0x733cfb8700000000, 0xdc21703c00000000, 0xb92a095500000000,
- 0x694b3d6d00000000, 0x0c40440400000000, 0xa35dcfbf00000000,
- 0xc656b6d600000000, 0xbc60a81300000000, 0xd96bd17a00000000,
- 0x76765ac100000000, 0x137d23a800000000, 0xd6b532b100000000,
- 0xb3be4bd800000000, 0x1ca3c06300000000, 0x79a8b90a00000000,
- 0x039ea7cf00000000, 0x6695dea600000000, 0xc988551d00000000,
- 0xac832c7400000000, 0x7ce2184c00000000, 0x19e9612500000000,
- 0xb6f4ea9e00000000, 0xd3ff93f700000000, 0xa9c98d3200000000,
- 0xccc2f45b00000000, 0x63df7fe000000000, 0x06d4068900000000,
- 0xbdeaca5600000000, 0xd8e1b33f00000000, 0x77fc388400000000,
- 0x12f741ed00000000, 0x68c15f2800000000, 0x0dca264100000000,
- 0xa2d7adfa00000000, 0xc7dcd49300000000, 0x17bde0ab00000000,
- 0x72b699c200000000, 0xddab127900000000, 0xb8a06b1000000000,
- 0xc29675d500000000, 0xa79d0cbc00000000, 0x0880870700000000,
- 0x6d8bfe6e00000000, 0xa843ef7700000000, 0xcd48961e00000000,
- 0x62551da500000000, 0x075e64cc00000000, 0x7d687a0900000000,
- 0x1863036000000000, 0xb77e88db00000000, 0xd275f1b200000000,
- 0x0214c58a00000000, 0x671fbce300000000, 0xc802375800000000,
- 0xad094e3100000000, 0xd73f50f400000000, 0xb234299d00000000,
- 0x1d29a22600000000, 0x7822db4f00000000, 0x97b8811400000000,
- 0xf2b3f87d00000000, 0x5dae73c600000000, 0x38a50aaf00000000,
- 0x4293146a00000000, 0x27986d0300000000, 0x8885e6b800000000,
- 0xed8e9fd100000000, 0x3defabe900000000, 0x58e4d28000000000,
- 0xf7f9593b00000000, 0x92f2205200000000, 0xe8c43e9700000000,
- 0x8dcf47fe00000000, 0x22d2cc4500000000, 0x47d9b52c00000000,
- 0x8211a43500000000, 0xe71add5c00000000, 0x480756e700000000,
- 0x2d0c2f8e00000000, 0x573a314b00000000, 0x3231482200000000,
- 0x9d2cc39900000000, 0xf827baf000000000, 0x28468ec800000000,
- 0x4d4df7a100000000, 0xe2507c1a00000000, 0x875b057300000000,
- 0xfd6d1bb600000000, 0x986662df00000000, 0x377be96400000000,
- 0x5270900d00000000},
- {0x0000000000000000, 0xdcecb13d00000000, 0xb8d9637b00000000,
- 0x6435d24600000000, 0x70b3c7f600000000, 0xac5f76cb00000000,
- 0xc86aa48d00000000, 0x148615b000000000, 0xa160fe3600000000,
- 0x7d8c4f0b00000000, 0x19b99d4d00000000, 0xc5552c7000000000,
- 0xd1d339c000000000, 0x0d3f88fd00000000, 0x690a5abb00000000,
- 0xb5e6eb8600000000, 0x42c1fc6d00000000, 0x9e2d4d5000000000,
- 0xfa189f1600000000, 0x26f42e2b00000000, 0x32723b9b00000000,
- 0xee9e8aa600000000, 0x8aab58e000000000, 0x5647e9dd00000000,
- 0xe3a1025b00000000, 0x3f4db36600000000, 0x5b78612000000000,
- 0x8794d01d00000000, 0x9312c5ad00000000, 0x4ffe749000000000,
- 0x2bcba6d600000000, 0xf72717eb00000000, 0x8482f9db00000000,
- 0x586e48e600000000, 0x3c5b9aa000000000, 0xe0b72b9d00000000,
- 0xf4313e2d00000000, 0x28dd8f1000000000, 0x4ce85d5600000000,
- 0x9004ec6b00000000, 0x25e207ed00000000, 0xf90eb6d000000000,
- 0x9d3b649600000000, 0x41d7d5ab00000000, 0x5551c01b00000000,
- 0x89bd712600000000, 0xed88a36000000000, 0x3164125d00000000,
- 0xc64305b600000000, 0x1aafb48b00000000, 0x7e9a66cd00000000,
- 0xa276d7f000000000, 0xb6f0c24000000000, 0x6a1c737d00000000,
- 0x0e29a13b00000000, 0xd2c5100600000000, 0x6723fb8000000000,
- 0xbbcf4abd00000000, 0xdffa98fb00000000, 0x031629c600000000,
- 0x17903c7600000000, 0xcb7c8d4b00000000, 0xaf495f0d00000000,
- 0x73a5ee3000000000, 0x4903826c00000000, 0x95ef335100000000,
- 0xf1dae11700000000, 0x2d36502a00000000, 0x39b0459a00000000,
- 0xe55cf4a700000000, 0x816926e100000000, 0x5d8597dc00000000,
- 0xe8637c5a00000000, 0x348fcd6700000000, 0x50ba1f2100000000,
- 0x8c56ae1c00000000, 0x98d0bbac00000000, 0x443c0a9100000000,
- 0x2009d8d700000000, 0xfce569ea00000000, 0x0bc27e0100000000,
- 0xd72ecf3c00000000, 0xb31b1d7a00000000, 0x6ff7ac4700000000,
- 0x7b71b9f700000000, 0xa79d08ca00000000, 0xc3a8da8c00000000,
- 0x1f446bb100000000, 0xaaa2803700000000, 0x764e310a00000000,
- 0x127be34c00000000, 0xce97527100000000, 0xda1147c100000000,
- 0x06fdf6fc00000000, 0x62c824ba00000000, 0xbe24958700000000,
- 0xcd817bb700000000, 0x116dca8a00000000, 0x755818cc00000000,
- 0xa9b4a9f100000000, 0xbd32bc4100000000, 0x61de0d7c00000000,
- 0x05ebdf3a00000000, 0xd9076e0700000000, 0x6ce1858100000000,
- 0xb00d34bc00000000, 0xd438e6fa00000000, 0x08d457c700000000,
- 0x1c52427700000000, 0xc0bef34a00000000, 0xa48b210c00000000,
- 0x7867903100000000, 0x8f4087da00000000, 0x53ac36e700000000,
- 0x3799e4a100000000, 0xeb75559c00000000, 0xfff3402c00000000,
- 0x231ff11100000000, 0x472a235700000000, 0x9bc6926a00000000,
- 0x2e2079ec00000000, 0xf2ccc8d100000000, 0x96f91a9700000000,
- 0x4a15abaa00000000, 0x5e93be1a00000000, 0x827f0f2700000000,
- 0xe64add6100000000, 0x3aa66c5c00000000, 0x920604d900000000,
- 0x4eeab5e400000000, 0x2adf67a200000000, 0xf633d69f00000000,
- 0xe2b5c32f00000000, 0x3e59721200000000, 0x5a6ca05400000000,
- 0x8680116900000000, 0x3366faef00000000, 0xef8a4bd200000000,
- 0x8bbf999400000000, 0x575328a900000000, 0x43d53d1900000000,
- 0x9f398c2400000000, 0xfb0c5e6200000000, 0x27e0ef5f00000000,
- 0xd0c7f8b400000000, 0x0c2b498900000000, 0x681e9bcf00000000,
- 0xb4f22af200000000, 0xa0743f4200000000, 0x7c988e7f00000000,
- 0x18ad5c3900000000, 0xc441ed0400000000, 0x71a7068200000000,
- 0xad4bb7bf00000000, 0xc97e65f900000000, 0x1592d4c400000000,
- 0x0114c17400000000, 0xddf8704900000000, 0xb9cda20f00000000,
- 0x6521133200000000, 0x1684fd0200000000, 0xca684c3f00000000,
- 0xae5d9e7900000000, 0x72b12f4400000000, 0x66373af400000000,
- 0xbadb8bc900000000, 0xdeee598f00000000, 0x0202e8b200000000,
- 0xb7e4033400000000, 0x6b08b20900000000, 0x0f3d604f00000000,
- 0xd3d1d17200000000, 0xc757c4c200000000, 0x1bbb75ff00000000,
- 0x7f8ea7b900000000, 0xa362168400000000, 0x5445016f00000000,
- 0x88a9b05200000000, 0xec9c621400000000, 0x3070d32900000000,
- 0x24f6c69900000000, 0xf81a77a400000000, 0x9c2fa5e200000000,
- 0x40c314df00000000, 0xf525ff5900000000, 0x29c94e6400000000,
- 0x4dfc9c2200000000, 0x91102d1f00000000, 0x859638af00000000,
- 0x597a899200000000, 0x3d4f5bd400000000, 0xe1a3eae900000000,
- 0xdb0586b500000000, 0x07e9378800000000, 0x63dce5ce00000000,
- 0xbf3054f300000000, 0xabb6414300000000, 0x775af07e00000000,
- 0x136f223800000000, 0xcf83930500000000, 0x7a65788300000000,
- 0xa689c9be00000000, 0xc2bc1bf800000000, 0x1e50aac500000000,
- 0x0ad6bf7500000000, 0xd63a0e4800000000, 0xb20fdc0e00000000,
- 0x6ee36d3300000000, 0x99c47ad800000000, 0x4528cbe500000000,
- 0x211d19a300000000, 0xfdf1a89e00000000, 0xe977bd2e00000000,
- 0x359b0c1300000000, 0x51aede5500000000, 0x8d426f6800000000,
- 0x38a484ee00000000, 0xe44835d300000000, 0x807de79500000000,
- 0x5c9156a800000000, 0x4817431800000000, 0x94fbf22500000000,
- 0xf0ce206300000000, 0x2c22915e00000000, 0x5f877f6e00000000,
- 0x836bce5300000000, 0xe75e1c1500000000, 0x3bb2ad2800000000,
- 0x2f34b89800000000, 0xf3d809a500000000, 0x97eddbe300000000,
- 0x4b016ade00000000, 0xfee7815800000000, 0x220b306500000000,
- 0x463ee22300000000, 0x9ad2531e00000000, 0x8e5446ae00000000,
- 0x52b8f79300000000, 0x368d25d500000000, 0xea6194e800000000,
- 0x1d46830300000000, 0xc1aa323e00000000, 0xa59fe07800000000,
- 0x7973514500000000, 0x6df544f500000000, 0xb119f5c800000000,
- 0xd52c278e00000000, 0x09c096b300000000, 0xbc267d3500000000,
- 0x60cacc0800000000, 0x04ff1e4e00000000, 0xd813af7300000000,
- 0xcc95bac300000000, 0x10790bfe00000000, 0x744cd9b800000000,
- 0xa8a0688500000000}};
-
-#else /* W == 4 */
-
-local const z_crc_t FAR crc_braid_table[][256] = {
- {0x00000000, 0x81256527, 0xd93bcc0f, 0x581ea928, 0x69069e5f,
- 0xe823fb78, 0xb03d5250, 0x31183777, 0xd20d3cbe, 0x53285999,
- 0x0b36f0b1, 0x8a139596, 0xbb0ba2e1, 0x3a2ec7c6, 0x62306eee,
- 0xe3150bc9, 0x7f6b7f3d, 0xfe4e1a1a, 0xa650b332, 0x2775d615,
- 0x166de162, 0x97488445, 0xcf562d6d, 0x4e73484a, 0xad664383,
- 0x2c4326a4, 0x745d8f8c, 0xf578eaab, 0xc460dddc, 0x4545b8fb,
- 0x1d5b11d3, 0x9c7e74f4, 0xfed6fe7a, 0x7ff39b5d, 0x27ed3275,
- 0xa6c85752, 0x97d06025, 0x16f50502, 0x4eebac2a, 0xcfcec90d,
- 0x2cdbc2c4, 0xadfea7e3, 0xf5e00ecb, 0x74c56bec, 0x45dd5c9b,
- 0xc4f839bc, 0x9ce69094, 0x1dc3f5b3, 0x81bd8147, 0x0098e460,
- 0x58864d48, 0xd9a3286f, 0xe8bb1f18, 0x699e7a3f, 0x3180d317,
- 0xb0a5b630, 0x53b0bdf9, 0xd295d8de, 0x8a8b71f6, 0x0bae14d1,
- 0x3ab623a6, 0xbb934681, 0xe38defa9, 0x62a88a8e, 0x26dcfab5,
- 0xa7f99f92, 0xffe736ba, 0x7ec2539d, 0x4fda64ea, 0xceff01cd,
- 0x96e1a8e5, 0x17c4cdc2, 0xf4d1c60b, 0x75f4a32c, 0x2dea0a04,
- 0xaccf6f23, 0x9dd75854, 0x1cf23d73, 0x44ec945b, 0xc5c9f17c,
- 0x59b78588, 0xd892e0af, 0x808c4987, 0x01a92ca0, 0x30b11bd7,
- 0xb1947ef0, 0xe98ad7d8, 0x68afb2ff, 0x8bbab936, 0x0a9fdc11,
- 0x52817539, 0xd3a4101e, 0xe2bc2769, 0x6399424e, 0x3b87eb66,
- 0xbaa28e41, 0xd80a04cf, 0x592f61e8, 0x0131c8c0, 0x8014ade7,
- 0xb10c9a90, 0x3029ffb7, 0x6837569f, 0xe91233b8, 0x0a073871,
- 0x8b225d56, 0xd33cf47e, 0x52199159, 0x6301a62e, 0xe224c309,
- 0xba3a6a21, 0x3b1f0f06, 0xa7617bf2, 0x26441ed5, 0x7e5ab7fd,
- 0xff7fd2da, 0xce67e5ad, 0x4f42808a, 0x175c29a2, 0x96794c85,
- 0x756c474c, 0xf449226b, 0xac578b43, 0x2d72ee64, 0x1c6ad913,
- 0x9d4fbc34, 0xc551151c, 0x4474703b, 0x4db9f56a, 0xcc9c904d,
- 0x94823965, 0x15a75c42, 0x24bf6b35, 0xa59a0e12, 0xfd84a73a,
- 0x7ca1c21d, 0x9fb4c9d4, 0x1e91acf3, 0x468f05db, 0xc7aa60fc,
- 0xf6b2578b, 0x779732ac, 0x2f899b84, 0xaeacfea3, 0x32d28a57,
- 0xb3f7ef70, 0xebe94658, 0x6acc237f, 0x5bd41408, 0xdaf1712f,
- 0x82efd807, 0x03cabd20, 0xe0dfb6e9, 0x61fad3ce, 0x39e47ae6,
- 0xb8c11fc1, 0x89d928b6, 0x08fc4d91, 0x50e2e4b9, 0xd1c7819e,
- 0xb36f0b10, 0x324a6e37, 0x6a54c71f, 0xeb71a238, 0xda69954f,
- 0x5b4cf068, 0x03525940, 0x82773c67, 0x616237ae, 0xe0475289,
- 0xb859fba1, 0x397c9e86, 0x0864a9f1, 0x8941ccd6, 0xd15f65fe,
- 0x507a00d9, 0xcc04742d, 0x4d21110a, 0x153fb822, 0x941add05,
- 0xa502ea72, 0x24278f55, 0x7c39267d, 0xfd1c435a, 0x1e094893,
- 0x9f2c2db4, 0xc732849c, 0x4617e1bb, 0x770fd6cc, 0xf62ab3eb,
- 0xae341ac3, 0x2f117fe4, 0x6b650fdf, 0xea406af8, 0xb25ec3d0,
- 0x337ba6f7, 0x02639180, 0x8346f4a7, 0xdb585d8f, 0x5a7d38a8,
- 0xb9683361, 0x384d5646, 0x6053ff6e, 0xe1769a49, 0xd06ead3e,
- 0x514bc819, 0x09556131, 0x88700416, 0x140e70e2, 0x952b15c5,
- 0xcd35bced, 0x4c10d9ca, 0x7d08eebd, 0xfc2d8b9a, 0xa43322b2,
- 0x25164795, 0xc6034c5c, 0x4726297b, 0x1f388053, 0x9e1de574,
- 0xaf05d203, 0x2e20b724, 0x763e1e0c, 0xf71b7b2b, 0x95b3f1a5,
- 0x14969482, 0x4c883daa, 0xcdad588d, 0xfcb56ffa, 0x7d900add,
- 0x258ea3f5, 0xa4abc6d2, 0x47becd1b, 0xc69ba83c, 0x9e850114,
- 0x1fa06433, 0x2eb85344, 0xaf9d3663, 0xf7839f4b, 0x76a6fa6c,
- 0xead88e98, 0x6bfdebbf, 0x33e34297, 0xb2c627b0, 0x83de10c7,
- 0x02fb75e0, 0x5ae5dcc8, 0xdbc0b9ef, 0x38d5b226, 0xb9f0d701,
- 0xe1ee7e29, 0x60cb1b0e, 0x51d32c79, 0xd0f6495e, 0x88e8e076,
- 0x09cd8551},
- {0x00000000, 0x9b73ead4, 0xed96d3e9, 0x76e5393d, 0x005ca193,
- 0x9b2f4b47, 0xedca727a, 0x76b998ae, 0x00b94326, 0x9bcaa9f2,
- 0xed2f90cf, 0x765c7a1b, 0x00e5e2b5, 0x9b960861, 0xed73315c,
- 0x7600db88, 0x0172864c, 0x9a016c98, 0xece455a5, 0x7797bf71,
- 0x012e27df, 0x9a5dcd0b, 0xecb8f436, 0x77cb1ee2, 0x01cbc56a,
- 0x9ab82fbe, 0xec5d1683, 0x772efc57, 0x019764f9, 0x9ae48e2d,
- 0xec01b710, 0x77725dc4, 0x02e50c98, 0x9996e64c, 0xef73df71,
- 0x740035a5, 0x02b9ad0b, 0x99ca47df, 0xef2f7ee2, 0x745c9436,
- 0x025c4fbe, 0x992fa56a, 0xefca9c57, 0x74b97683, 0x0200ee2d,
- 0x997304f9, 0xef963dc4, 0x74e5d710, 0x03978ad4, 0x98e46000,
- 0xee01593d, 0x7572b3e9, 0x03cb2b47, 0x98b8c193, 0xee5df8ae,
- 0x752e127a, 0x032ec9f2, 0x985d2326, 0xeeb81a1b, 0x75cbf0cf,
- 0x03726861, 0x980182b5, 0xeee4bb88, 0x7597515c, 0x05ca1930,
- 0x9eb9f3e4, 0xe85ccad9, 0x732f200d, 0x0596b8a3, 0x9ee55277,
- 0xe8006b4a, 0x7373819e, 0x05735a16, 0x9e00b0c2, 0xe8e589ff,
- 0x7396632b, 0x052ffb85, 0x9e5c1151, 0xe8b9286c, 0x73cac2b8,
- 0x04b89f7c, 0x9fcb75a8, 0xe92e4c95, 0x725da641, 0x04e43eef,
- 0x9f97d43b, 0xe972ed06, 0x720107d2, 0x0401dc5a, 0x9f72368e,
- 0xe9970fb3, 0x72e4e567, 0x045d7dc9, 0x9f2e971d, 0xe9cbae20,
- 0x72b844f4, 0x072f15a8, 0x9c5cff7c, 0xeab9c641, 0x71ca2c95,
- 0x0773b43b, 0x9c005eef, 0xeae567d2, 0x71968d06, 0x0796568e,
- 0x9ce5bc5a, 0xea008567, 0x71736fb3, 0x07caf71d, 0x9cb91dc9,
- 0xea5c24f4, 0x712fce20, 0x065d93e4, 0x9d2e7930, 0xebcb400d,
- 0x70b8aad9, 0x06013277, 0x9d72d8a3, 0xeb97e19e, 0x70e40b4a,
- 0x06e4d0c2, 0x9d973a16, 0xeb72032b, 0x7001e9ff, 0x06b87151,
- 0x9dcb9b85, 0xeb2ea2b8, 0x705d486c, 0x0b943260, 0x90e7d8b4,
- 0xe602e189, 0x7d710b5d, 0x0bc893f3, 0x90bb7927, 0xe65e401a,
- 0x7d2daace, 0x0b2d7146, 0x905e9b92, 0xe6bba2af, 0x7dc8487b,
- 0x0b71d0d5, 0x90023a01, 0xe6e7033c, 0x7d94e9e8, 0x0ae6b42c,
- 0x91955ef8, 0xe77067c5, 0x7c038d11, 0x0aba15bf, 0x91c9ff6b,
- 0xe72cc656, 0x7c5f2c82, 0x0a5ff70a, 0x912c1dde, 0xe7c924e3,
- 0x7cbace37, 0x0a035699, 0x9170bc4d, 0xe7958570, 0x7ce66fa4,
- 0x09713ef8, 0x9202d42c, 0xe4e7ed11, 0x7f9407c5, 0x092d9f6b,
- 0x925e75bf, 0xe4bb4c82, 0x7fc8a656, 0x09c87dde, 0x92bb970a,
- 0xe45eae37, 0x7f2d44e3, 0x0994dc4d, 0x92e73699, 0xe4020fa4,
- 0x7f71e570, 0x0803b8b4, 0x93705260, 0xe5956b5d, 0x7ee68189,
- 0x085f1927, 0x932cf3f3, 0xe5c9cace, 0x7eba201a, 0x08bafb92,
- 0x93c91146, 0xe52c287b, 0x7e5fc2af, 0x08e65a01, 0x9395b0d5,
- 0xe57089e8, 0x7e03633c, 0x0e5e2b50, 0x952dc184, 0xe3c8f8b9,
- 0x78bb126d, 0x0e028ac3, 0x95716017, 0xe394592a, 0x78e7b3fe,
- 0x0ee76876, 0x959482a2, 0xe371bb9f, 0x7802514b, 0x0ebbc9e5,
- 0x95c82331, 0xe32d1a0c, 0x785ef0d8, 0x0f2cad1c, 0x945f47c8,
- 0xe2ba7ef5, 0x79c99421, 0x0f700c8f, 0x9403e65b, 0xe2e6df66,
- 0x799535b2, 0x0f95ee3a, 0x94e604ee, 0xe2033dd3, 0x7970d707,
- 0x0fc94fa9, 0x94baa57d, 0xe25f9c40, 0x792c7694, 0x0cbb27c8,
- 0x97c8cd1c, 0xe12df421, 0x7a5e1ef5, 0x0ce7865b, 0x97946c8f,
- 0xe17155b2, 0x7a02bf66, 0x0c0264ee, 0x97718e3a, 0xe194b707,
- 0x7ae75dd3, 0x0c5ec57d, 0x972d2fa9, 0xe1c81694, 0x7abbfc40,
- 0x0dc9a184, 0x96ba4b50, 0xe05f726d, 0x7b2c98b9, 0x0d950017,
- 0x96e6eac3, 0xe003d3fe, 0x7b70392a, 0x0d70e2a2, 0x96030876,
- 0xe0e6314b, 0x7b95db9f, 0x0d2c4331, 0x965fa9e5, 0xe0ba90d8,
- 0x7bc97a0c},
- {0x00000000, 0x172864c0, 0x2e50c980, 0x3978ad40, 0x5ca19300,
- 0x4b89f7c0, 0x72f15a80, 0x65d93e40, 0xb9432600, 0xae6b42c0,
- 0x9713ef80, 0x803b8b40, 0xe5e2b500, 0xf2cad1c0, 0xcbb27c80,
- 0xdc9a1840, 0xa9f74a41, 0xbedf2e81, 0x87a783c1, 0x908fe701,
- 0xf556d941, 0xe27ebd81, 0xdb0610c1, 0xcc2e7401, 0x10b46c41,
- 0x079c0881, 0x3ee4a5c1, 0x29ccc101, 0x4c15ff41, 0x5b3d9b81,
- 0x624536c1, 0x756d5201, 0x889f92c3, 0x9fb7f603, 0xa6cf5b43,
- 0xb1e73f83, 0xd43e01c3, 0xc3166503, 0xfa6ec843, 0xed46ac83,
- 0x31dcb4c3, 0x26f4d003, 0x1f8c7d43, 0x08a41983, 0x6d7d27c3,
- 0x7a554303, 0x432dee43, 0x54058a83, 0x2168d882, 0x3640bc42,
- 0x0f381102, 0x181075c2, 0x7dc94b82, 0x6ae12f42, 0x53998202,
- 0x44b1e6c2, 0x982bfe82, 0x8f039a42, 0xb67b3702, 0xa15353c2,
- 0xc48a6d82, 0xd3a20942, 0xeadaa402, 0xfdf2c0c2, 0xca4e23c7,
- 0xdd664707, 0xe41eea47, 0xf3368e87, 0x96efb0c7, 0x81c7d407,
- 0xb8bf7947, 0xaf971d87, 0x730d05c7, 0x64256107, 0x5d5dcc47,
- 0x4a75a887, 0x2fac96c7, 0x3884f207, 0x01fc5f47, 0x16d43b87,
- 0x63b96986, 0x74910d46, 0x4de9a006, 0x5ac1c4c6, 0x3f18fa86,
- 0x28309e46, 0x11483306, 0x066057c6, 0xdafa4f86, 0xcdd22b46,
- 0xf4aa8606, 0xe382e2c6, 0x865bdc86, 0x9173b846, 0xa80b1506,
- 0xbf2371c6, 0x42d1b104, 0x55f9d5c4, 0x6c817884, 0x7ba91c44,
- 0x1e702204, 0x095846c4, 0x3020eb84, 0x27088f44, 0xfb929704,
- 0xecbaf3c4, 0xd5c25e84, 0xc2ea3a44, 0xa7330404, 0xb01b60c4,
- 0x8963cd84, 0x9e4ba944, 0xeb26fb45, 0xfc0e9f85, 0xc57632c5,
- 0xd25e5605, 0xb7876845, 0xa0af0c85, 0x99d7a1c5, 0x8effc505,
- 0x5265dd45, 0x454db985, 0x7c3514c5, 0x6b1d7005, 0x0ec44e45,
- 0x19ec2a85, 0x209487c5, 0x37bce305, 0x4fed41cf, 0x58c5250f,
- 0x61bd884f, 0x7695ec8f, 0x134cd2cf, 0x0464b60f, 0x3d1c1b4f,
- 0x2a347f8f, 0xf6ae67cf, 0xe186030f, 0xd8feae4f, 0xcfd6ca8f,
- 0xaa0ff4cf, 0xbd27900f, 0x845f3d4f, 0x9377598f, 0xe61a0b8e,
- 0xf1326f4e, 0xc84ac20e, 0xdf62a6ce, 0xbabb988e, 0xad93fc4e,
- 0x94eb510e, 0x83c335ce, 0x5f592d8e, 0x4871494e, 0x7109e40e,
- 0x662180ce, 0x03f8be8e, 0x14d0da4e, 0x2da8770e, 0x3a8013ce,
- 0xc772d30c, 0xd05ab7cc, 0xe9221a8c, 0xfe0a7e4c, 0x9bd3400c,
- 0x8cfb24cc, 0xb583898c, 0xa2abed4c, 0x7e31f50c, 0x691991cc,
- 0x50613c8c, 0x4749584c, 0x2290660c, 0x35b802cc, 0x0cc0af8c,
- 0x1be8cb4c, 0x6e85994d, 0x79adfd8d, 0x40d550cd, 0x57fd340d,
- 0x32240a4d, 0x250c6e8d, 0x1c74c3cd, 0x0b5ca70d, 0xd7c6bf4d,
- 0xc0eedb8d, 0xf99676cd, 0xeebe120d, 0x8b672c4d, 0x9c4f488d,
- 0xa537e5cd, 0xb21f810d, 0x85a36208, 0x928b06c8, 0xabf3ab88,
- 0xbcdbcf48, 0xd902f108, 0xce2a95c8, 0xf7523888, 0xe07a5c48,
- 0x3ce04408, 0x2bc820c8, 0x12b08d88, 0x0598e948, 0x6041d708,
- 0x7769b3c8, 0x4e111e88, 0x59397a48, 0x2c542849, 0x3b7c4c89,
- 0x0204e1c9, 0x152c8509, 0x70f5bb49, 0x67dddf89, 0x5ea572c9,
- 0x498d1609, 0x95170e49, 0x823f6a89, 0xbb47c7c9, 0xac6fa309,
- 0xc9b69d49, 0xde9ef989, 0xe7e654c9, 0xf0ce3009, 0x0d3cf0cb,
- 0x1a14940b, 0x236c394b, 0x34445d8b, 0x519d63cb, 0x46b5070b,
- 0x7fcdaa4b, 0x68e5ce8b, 0xb47fd6cb, 0xa357b20b, 0x9a2f1f4b,
- 0x8d077b8b, 0xe8de45cb, 0xfff6210b, 0xc68e8c4b, 0xd1a6e88b,
- 0xa4cbba8a, 0xb3e3de4a, 0x8a9b730a, 0x9db317ca, 0xf86a298a,
- 0xef424d4a, 0xd63ae00a, 0xc11284ca, 0x1d889c8a, 0x0aa0f84a,
- 0x33d8550a, 0x24f031ca, 0x41290f8a, 0x56016b4a, 0x6f79c60a,
- 0x7851a2ca},
- {0x00000000, 0x9fda839e, 0xe4c4017d, 0x7b1e82e3, 0x12f904bb,
- 0x8d238725, 0xf63d05c6, 0x69e78658, 0x25f20976, 0xba288ae8,
- 0xc136080b, 0x5eec8b95, 0x370b0dcd, 0xa8d18e53, 0xd3cf0cb0,
- 0x4c158f2e, 0x4be412ec, 0xd43e9172, 0xaf201391, 0x30fa900f,
- 0x591d1657, 0xc6c795c9, 0xbdd9172a, 0x220394b4, 0x6e161b9a,
- 0xf1cc9804, 0x8ad21ae7, 0x15089979, 0x7cef1f21, 0xe3359cbf,
- 0x982b1e5c, 0x07f19dc2, 0x97c825d8, 0x0812a646, 0x730c24a5,
- 0xecd6a73b, 0x85312163, 0x1aeba2fd, 0x61f5201e, 0xfe2fa380,
- 0xb23a2cae, 0x2de0af30, 0x56fe2dd3, 0xc924ae4d, 0xa0c32815,
- 0x3f19ab8b, 0x44072968, 0xdbddaaf6, 0xdc2c3734, 0x43f6b4aa,
- 0x38e83649, 0xa732b5d7, 0xced5338f, 0x510fb011, 0x2a1132f2,
- 0xb5cbb16c, 0xf9de3e42, 0x6604bddc, 0x1d1a3f3f, 0x82c0bca1,
- 0xeb273af9, 0x74fdb967, 0x0fe33b84, 0x9039b81a, 0xf4e14df1,
- 0x6b3bce6f, 0x10254c8c, 0x8fffcf12, 0xe618494a, 0x79c2cad4,
- 0x02dc4837, 0x9d06cba9, 0xd1134487, 0x4ec9c719, 0x35d745fa,
- 0xaa0dc664, 0xc3ea403c, 0x5c30c3a2, 0x272e4141, 0xb8f4c2df,
- 0xbf055f1d, 0x20dfdc83, 0x5bc15e60, 0xc41bddfe, 0xadfc5ba6,
- 0x3226d838, 0x49385adb, 0xd6e2d945, 0x9af7566b, 0x052dd5f5,
- 0x7e335716, 0xe1e9d488, 0x880e52d0, 0x17d4d14e, 0x6cca53ad,
- 0xf310d033, 0x63296829, 0xfcf3ebb7, 0x87ed6954, 0x1837eaca,
- 0x71d06c92, 0xee0aef0c, 0x95146def, 0x0aceee71, 0x46db615f,
- 0xd901e2c1, 0xa21f6022, 0x3dc5e3bc, 0x542265e4, 0xcbf8e67a,
- 0xb0e66499, 0x2f3ce707, 0x28cd7ac5, 0xb717f95b, 0xcc097bb8,
- 0x53d3f826, 0x3a347e7e, 0xa5eefde0, 0xdef07f03, 0x412afc9d,
- 0x0d3f73b3, 0x92e5f02d, 0xe9fb72ce, 0x7621f150, 0x1fc67708,
- 0x801cf496, 0xfb027675, 0x64d8f5eb, 0x32b39da3, 0xad691e3d,
- 0xd6779cde, 0x49ad1f40, 0x204a9918, 0xbf901a86, 0xc48e9865,
- 0x5b541bfb, 0x174194d5, 0x889b174b, 0xf38595a8, 0x6c5f1636,
- 0x05b8906e, 0x9a6213f0, 0xe17c9113, 0x7ea6128d, 0x79578f4f,
- 0xe68d0cd1, 0x9d938e32, 0x02490dac, 0x6bae8bf4, 0xf474086a,
- 0x8f6a8a89, 0x10b00917, 0x5ca58639, 0xc37f05a7, 0xb8618744,
- 0x27bb04da, 0x4e5c8282, 0xd186011c, 0xaa9883ff, 0x35420061,
- 0xa57bb87b, 0x3aa13be5, 0x41bfb906, 0xde653a98, 0xb782bcc0,
- 0x28583f5e, 0x5346bdbd, 0xcc9c3e23, 0x8089b10d, 0x1f533293,
- 0x644db070, 0xfb9733ee, 0x9270b5b6, 0x0daa3628, 0x76b4b4cb,
- 0xe96e3755, 0xee9faa97, 0x71452909, 0x0a5babea, 0x95812874,
- 0xfc66ae2c, 0x63bc2db2, 0x18a2af51, 0x87782ccf, 0xcb6da3e1,
- 0x54b7207f, 0x2fa9a29c, 0xb0732102, 0xd994a75a, 0x464e24c4,
- 0x3d50a627, 0xa28a25b9, 0xc652d052, 0x598853cc, 0x2296d12f,
- 0xbd4c52b1, 0xd4abd4e9, 0x4b715777, 0x306fd594, 0xafb5560a,
- 0xe3a0d924, 0x7c7a5aba, 0x0764d859, 0x98be5bc7, 0xf159dd9f,
- 0x6e835e01, 0x159ddce2, 0x8a475f7c, 0x8db6c2be, 0x126c4120,
- 0x6972c3c3, 0xf6a8405d, 0x9f4fc605, 0x0095459b, 0x7b8bc778,
- 0xe45144e6, 0xa844cbc8, 0x379e4856, 0x4c80cab5, 0xd35a492b,
- 0xbabdcf73, 0x25674ced, 0x5e79ce0e, 0xc1a34d90, 0x519af58a,
- 0xce407614, 0xb55ef4f7, 0x2a847769, 0x4363f131, 0xdcb972af,
- 0xa7a7f04c, 0x387d73d2, 0x7468fcfc, 0xebb27f62, 0x90acfd81,
- 0x0f767e1f, 0x6691f847, 0xf94b7bd9, 0x8255f93a, 0x1d8f7aa4,
- 0x1a7ee766, 0x85a464f8, 0xfebae61b, 0x61606585, 0x0887e3dd,
- 0x975d6043, 0xec43e2a0, 0x7399613e, 0x3f8cee10, 0xa0566d8e,
- 0xdb48ef6d, 0x44926cf3, 0x2d75eaab, 0xb2af6935, 0xc9b1ebd6,
- 0x566b6848}};
-
-local const z_word_t FAR crc_braid_big_table[][256] = {
- {0x00000000, 0x9e83da9f, 0x7d01c4e4, 0xe3821e7b, 0xbb04f912,
- 0x2587238d, 0xc6053df6, 0x5886e769, 0x7609f225, 0xe88a28ba,
- 0x0b0836c1, 0x958bec5e, 0xcd0d0b37, 0x538ed1a8, 0xb00ccfd3,
- 0x2e8f154c, 0xec12e44b, 0x72913ed4, 0x911320af, 0x0f90fa30,
- 0x57161d59, 0xc995c7c6, 0x2a17d9bd, 0xb4940322, 0x9a1b166e,
- 0x0498ccf1, 0xe71ad28a, 0x79990815, 0x211fef7c, 0xbf9c35e3,
- 0x5c1e2b98, 0xc29df107, 0xd825c897, 0x46a61208, 0xa5240c73,
- 0x3ba7d6ec, 0x63213185, 0xfda2eb1a, 0x1e20f561, 0x80a32ffe,
- 0xae2c3ab2, 0x30afe02d, 0xd32dfe56, 0x4dae24c9, 0x1528c3a0,
- 0x8bab193f, 0x68290744, 0xf6aadddb, 0x34372cdc, 0xaab4f643,
- 0x4936e838, 0xd7b532a7, 0x8f33d5ce, 0x11b00f51, 0xf232112a,
- 0x6cb1cbb5, 0x423edef9, 0xdcbd0466, 0x3f3f1a1d, 0xa1bcc082,
- 0xf93a27eb, 0x67b9fd74, 0x843be30f, 0x1ab83990, 0xf14de1f4,
- 0x6fce3b6b, 0x8c4c2510, 0x12cfff8f, 0x4a4918e6, 0xd4cac279,
- 0x3748dc02, 0xa9cb069d, 0x874413d1, 0x19c7c94e, 0xfa45d735,
- 0x64c60daa, 0x3c40eac3, 0xa2c3305c, 0x41412e27, 0xdfc2f4b8,
- 0x1d5f05bf, 0x83dcdf20, 0x605ec15b, 0xfedd1bc4, 0xa65bfcad,
- 0x38d82632, 0xdb5a3849, 0x45d9e2d6, 0x6b56f79a, 0xf5d52d05,
- 0x1657337e, 0x88d4e9e1, 0xd0520e88, 0x4ed1d417, 0xad53ca6c,
- 0x33d010f3, 0x29682963, 0xb7ebf3fc, 0x5469ed87, 0xcaea3718,
- 0x926cd071, 0x0cef0aee, 0xef6d1495, 0x71eece0a, 0x5f61db46,
- 0xc1e201d9, 0x22601fa2, 0xbce3c53d, 0xe4652254, 0x7ae6f8cb,
- 0x9964e6b0, 0x07e73c2f, 0xc57acd28, 0x5bf917b7, 0xb87b09cc,
- 0x26f8d353, 0x7e7e343a, 0xe0fdeea5, 0x037ff0de, 0x9dfc2a41,
- 0xb3733f0d, 0x2df0e592, 0xce72fbe9, 0x50f12176, 0x0877c61f,
- 0x96f41c80, 0x757602fb, 0xebf5d864, 0xa39db332, 0x3d1e69ad,
- 0xde9c77d6, 0x401fad49, 0x18994a20, 0x861a90bf, 0x65988ec4,
- 0xfb1b545b, 0xd5944117, 0x4b179b88, 0xa89585f3, 0x36165f6c,
- 0x6e90b805, 0xf013629a, 0x13917ce1, 0x8d12a67e, 0x4f8f5779,
- 0xd10c8de6, 0x328e939d, 0xac0d4902, 0xf48bae6b, 0x6a0874f4,
- 0x898a6a8f, 0x1709b010, 0x3986a55c, 0xa7057fc3, 0x448761b8,
- 0xda04bb27, 0x82825c4e, 0x1c0186d1, 0xff8398aa, 0x61004235,
- 0x7bb87ba5, 0xe53ba13a, 0x06b9bf41, 0x983a65de, 0xc0bc82b7,
- 0x5e3f5828, 0xbdbd4653, 0x233e9ccc, 0x0db18980, 0x9332531f,
- 0x70b04d64, 0xee3397fb, 0xb6b57092, 0x2836aa0d, 0xcbb4b476,
- 0x55376ee9, 0x97aa9fee, 0x09294571, 0xeaab5b0a, 0x74288195,
- 0x2cae66fc, 0xb22dbc63, 0x51afa218, 0xcf2c7887, 0xe1a36dcb,
- 0x7f20b754, 0x9ca2a92f, 0x022173b0, 0x5aa794d9, 0xc4244e46,
- 0x27a6503d, 0xb9258aa2, 0x52d052c6, 0xcc538859, 0x2fd19622,
- 0xb1524cbd, 0xe9d4abd4, 0x7757714b, 0x94d56f30, 0x0a56b5af,
- 0x24d9a0e3, 0xba5a7a7c, 0x59d86407, 0xc75bbe98, 0x9fdd59f1,
- 0x015e836e, 0xe2dc9d15, 0x7c5f478a, 0xbec2b68d, 0x20416c12,
- 0xc3c37269, 0x5d40a8f6, 0x05c64f9f, 0x9b459500, 0x78c78b7b,
- 0xe64451e4, 0xc8cb44a8, 0x56489e37, 0xb5ca804c, 0x2b495ad3,
- 0x73cfbdba, 0xed4c6725, 0x0ece795e, 0x904da3c1, 0x8af59a51,
- 0x147640ce, 0xf7f45eb5, 0x6977842a, 0x31f16343, 0xaf72b9dc,
- 0x4cf0a7a7, 0xd2737d38, 0xfcfc6874, 0x627fb2eb, 0x81fdac90,
- 0x1f7e760f, 0x47f89166, 0xd97b4bf9, 0x3af95582, 0xa47a8f1d,
- 0x66e77e1a, 0xf864a485, 0x1be6bafe, 0x85656061, 0xdde38708,
- 0x43605d97, 0xa0e243ec, 0x3e619973, 0x10ee8c3f, 0x8e6d56a0,
- 0x6def48db, 0xf36c9244, 0xabea752d, 0x3569afb2, 0xd6ebb1c9,
- 0x48686b56},
- {0x00000000, 0xc0642817, 0x80c9502e, 0x40ad7839, 0x0093a15c,
- 0xc0f7894b, 0x805af172, 0x403ed965, 0x002643b9, 0xc0426bae,
- 0x80ef1397, 0x408b3b80, 0x00b5e2e5, 0xc0d1caf2, 0x807cb2cb,
- 0x40189adc, 0x414af7a9, 0x812edfbe, 0xc183a787, 0x01e78f90,
- 0x41d956f5, 0x81bd7ee2, 0xc11006db, 0x01742ecc, 0x416cb410,
- 0x81089c07, 0xc1a5e43e, 0x01c1cc29, 0x41ff154c, 0x819b3d5b,
- 0xc1364562, 0x01526d75, 0xc3929f88, 0x03f6b79f, 0x435bcfa6,
- 0x833fe7b1, 0xc3013ed4, 0x036516c3, 0x43c86efa, 0x83ac46ed,
- 0xc3b4dc31, 0x03d0f426, 0x437d8c1f, 0x8319a408, 0xc3277d6d,
- 0x0343557a, 0x43ee2d43, 0x838a0554, 0x82d86821, 0x42bc4036,
- 0x0211380f, 0xc2751018, 0x824bc97d, 0x422fe16a, 0x02829953,
- 0xc2e6b144, 0x82fe2b98, 0x429a038f, 0x02377bb6, 0xc25353a1,
- 0x826d8ac4, 0x4209a2d3, 0x02a4daea, 0xc2c0f2fd, 0xc7234eca,
- 0x074766dd, 0x47ea1ee4, 0x878e36f3, 0xc7b0ef96, 0x07d4c781,
- 0x4779bfb8, 0x871d97af, 0xc7050d73, 0x07612564, 0x47cc5d5d,
- 0x87a8754a, 0xc796ac2f, 0x07f28438, 0x475ffc01, 0x873bd416,
- 0x8669b963, 0x460d9174, 0x06a0e94d, 0xc6c4c15a, 0x86fa183f,
- 0x469e3028, 0x06334811, 0xc6576006, 0x864ffada, 0x462bd2cd,
- 0x0686aaf4, 0xc6e282e3, 0x86dc5b86, 0x46b87391, 0x06150ba8,
- 0xc67123bf, 0x04b1d142, 0xc4d5f955, 0x8478816c, 0x441ca97b,
- 0x0422701e, 0xc4465809, 0x84eb2030, 0x448f0827, 0x049792fb,
- 0xc4f3baec, 0x845ec2d5, 0x443aeac2, 0x040433a7, 0xc4601bb0,
- 0x84cd6389, 0x44a94b9e, 0x45fb26eb, 0x859f0efc, 0xc53276c5,
- 0x05565ed2, 0x456887b7, 0x850cafa0, 0xc5a1d799, 0x05c5ff8e,
- 0x45dd6552, 0x85b94d45, 0xc514357c, 0x05701d6b, 0x454ec40e,
- 0x852aec19, 0xc5879420, 0x05e3bc37, 0xcf41ed4f, 0x0f25c558,
- 0x4f88bd61, 0x8fec9576, 0xcfd24c13, 0x0fb66404, 0x4f1b1c3d,
- 0x8f7f342a, 0xcf67aef6, 0x0f0386e1, 0x4faefed8, 0x8fcad6cf,
- 0xcff40faa, 0x0f9027bd, 0x4f3d5f84, 0x8f597793, 0x8e0b1ae6,
- 0x4e6f32f1, 0x0ec24ac8, 0xcea662df, 0x8e98bbba, 0x4efc93ad,
- 0x0e51eb94, 0xce35c383, 0x8e2d595f, 0x4e497148, 0x0ee40971,
- 0xce802166, 0x8ebef803, 0x4edad014, 0x0e77a82d, 0xce13803a,
- 0x0cd372c7, 0xccb75ad0, 0x8c1a22e9, 0x4c7e0afe, 0x0c40d39b,
- 0xcc24fb8c, 0x8c8983b5, 0x4cedaba2, 0x0cf5317e, 0xcc911969,
- 0x8c3c6150, 0x4c584947, 0x0c669022, 0xcc02b835, 0x8cafc00c,
- 0x4ccbe81b, 0x4d99856e, 0x8dfdad79, 0xcd50d540, 0x0d34fd57,
- 0x4d0a2432, 0x8d6e0c25, 0xcdc3741c, 0x0da75c0b, 0x4dbfc6d7,
- 0x8ddbeec0, 0xcd7696f9, 0x0d12beee, 0x4d2c678b, 0x8d484f9c,
- 0xcde537a5, 0x0d811fb2, 0x0862a385, 0xc8068b92, 0x88abf3ab,
- 0x48cfdbbc, 0x08f102d9, 0xc8952ace, 0x883852f7, 0x485c7ae0,
- 0x0844e03c, 0xc820c82b, 0x888db012, 0x48e99805, 0x08d74160,
- 0xc8b36977, 0x881e114e, 0x487a3959, 0x4928542c, 0x894c7c3b,
- 0xc9e10402, 0x09852c15, 0x49bbf570, 0x89dfdd67, 0xc972a55e,
- 0x09168d49, 0x490e1795, 0x896a3f82, 0xc9c747bb, 0x09a36fac,
- 0x499db6c9, 0x89f99ede, 0xc954e6e7, 0x0930cef0, 0xcbf03c0d,
- 0x0b94141a, 0x4b396c23, 0x8b5d4434, 0xcb639d51, 0x0b07b546,
- 0x4baacd7f, 0x8bcee568, 0xcbd67fb4, 0x0bb257a3, 0x4b1f2f9a,
- 0x8b7b078d, 0xcb45dee8, 0x0b21f6ff, 0x4b8c8ec6, 0x8be8a6d1,
- 0x8abacba4, 0x4adee3b3, 0x0a739b8a, 0xca17b39d, 0x8a296af8,
- 0x4a4d42ef, 0x0ae03ad6, 0xca8412c1, 0x8a9c881d, 0x4af8a00a,
- 0x0a55d833, 0xca31f024, 0x8a0f2941, 0x4a6b0156, 0x0ac6796f,
- 0xcaa25178},
- {0x00000000, 0xd4ea739b, 0xe9d396ed, 0x3d39e576, 0x93a15c00,
- 0x474b2f9b, 0x7a72caed, 0xae98b976, 0x2643b900, 0xf2a9ca9b,
- 0xcf902fed, 0x1b7a5c76, 0xb5e2e500, 0x6108969b, 0x5c3173ed,
- 0x88db0076, 0x4c867201, 0x986c019a, 0xa555e4ec, 0x71bf9777,
- 0xdf272e01, 0x0bcd5d9a, 0x36f4b8ec, 0xe21ecb77, 0x6ac5cb01,
- 0xbe2fb89a, 0x83165dec, 0x57fc2e77, 0xf9649701, 0x2d8ee49a,
- 0x10b701ec, 0xc45d7277, 0x980ce502, 0x4ce69699, 0x71df73ef,
- 0xa5350074, 0x0badb902, 0xdf47ca99, 0xe27e2fef, 0x36945c74,
- 0xbe4f5c02, 0x6aa52f99, 0x579ccaef, 0x8376b974, 0x2dee0002,
- 0xf9047399, 0xc43d96ef, 0x10d7e574, 0xd48a9703, 0x0060e498,
- 0x3d5901ee, 0xe9b37275, 0x472bcb03, 0x93c1b898, 0xaef85dee,
- 0x7a122e75, 0xf2c92e03, 0x26235d98, 0x1b1ab8ee, 0xcff0cb75,
- 0x61687203, 0xb5820198, 0x88bbe4ee, 0x5c519775, 0x3019ca05,
- 0xe4f3b99e, 0xd9ca5ce8, 0x0d202f73, 0xa3b89605, 0x7752e59e,
- 0x4a6b00e8, 0x9e817373, 0x165a7305, 0xc2b0009e, 0xff89e5e8,
- 0x2b639673, 0x85fb2f05, 0x51115c9e, 0x6c28b9e8, 0xb8c2ca73,
- 0x7c9fb804, 0xa875cb9f, 0x954c2ee9, 0x41a65d72, 0xef3ee404,
- 0x3bd4979f, 0x06ed72e9, 0xd2070172, 0x5adc0104, 0x8e36729f,
- 0xb30f97e9, 0x67e5e472, 0xc97d5d04, 0x1d972e9f, 0x20aecbe9,
- 0xf444b872, 0xa8152f07, 0x7cff5c9c, 0x41c6b9ea, 0x952cca71,
- 0x3bb47307, 0xef5e009c, 0xd267e5ea, 0x068d9671, 0x8e569607,
- 0x5abce59c, 0x678500ea, 0xb36f7371, 0x1df7ca07, 0xc91db99c,
- 0xf4245cea, 0x20ce2f71, 0xe4935d06, 0x30792e9d, 0x0d40cbeb,
- 0xd9aab870, 0x77320106, 0xa3d8729d, 0x9ee197eb, 0x4a0be470,
- 0xc2d0e406, 0x163a979d, 0x2b0372eb, 0xffe90170, 0x5171b806,
- 0x859bcb9d, 0xb8a22eeb, 0x6c485d70, 0x6032940b, 0xb4d8e790,
- 0x89e102e6, 0x5d0b717d, 0xf393c80b, 0x2779bb90, 0x1a405ee6,
- 0xceaa2d7d, 0x46712d0b, 0x929b5e90, 0xafa2bbe6, 0x7b48c87d,
- 0xd5d0710b, 0x013a0290, 0x3c03e7e6, 0xe8e9947d, 0x2cb4e60a,
- 0xf85e9591, 0xc56770e7, 0x118d037c, 0xbf15ba0a, 0x6bffc991,
- 0x56c62ce7, 0x822c5f7c, 0x0af75f0a, 0xde1d2c91, 0xe324c9e7,
- 0x37ceba7c, 0x9956030a, 0x4dbc7091, 0x708595e7, 0xa46fe67c,
- 0xf83e7109, 0x2cd40292, 0x11ede7e4, 0xc507947f, 0x6b9f2d09,
- 0xbf755e92, 0x824cbbe4, 0x56a6c87f, 0xde7dc809, 0x0a97bb92,
- 0x37ae5ee4, 0xe3442d7f, 0x4ddc9409, 0x9936e792, 0xa40f02e4,
- 0x70e5717f, 0xb4b80308, 0x60527093, 0x5d6b95e5, 0x8981e67e,
- 0x27195f08, 0xf3f32c93, 0xcecac9e5, 0x1a20ba7e, 0x92fbba08,
- 0x4611c993, 0x7b282ce5, 0xafc25f7e, 0x015ae608, 0xd5b09593,
- 0xe88970e5, 0x3c63037e, 0x502b5e0e, 0x84c12d95, 0xb9f8c8e3,
- 0x6d12bb78, 0xc38a020e, 0x17607195, 0x2a5994e3, 0xfeb3e778,
- 0x7668e70e, 0xa2829495, 0x9fbb71e3, 0x4b510278, 0xe5c9bb0e,
- 0x3123c895, 0x0c1a2de3, 0xd8f05e78, 0x1cad2c0f, 0xc8475f94,
- 0xf57ebae2, 0x2194c979, 0x8f0c700f, 0x5be60394, 0x66dfe6e2,
- 0xb2359579, 0x3aee950f, 0xee04e694, 0xd33d03e2, 0x07d77079,
- 0xa94fc90f, 0x7da5ba94, 0x409c5fe2, 0x94762c79, 0xc827bb0c,
- 0x1ccdc897, 0x21f42de1, 0xf51e5e7a, 0x5b86e70c, 0x8f6c9497,
- 0xb25571e1, 0x66bf027a, 0xee64020c, 0x3a8e7197, 0x07b794e1,
- 0xd35de77a, 0x7dc55e0c, 0xa92f2d97, 0x9416c8e1, 0x40fcbb7a,
- 0x84a1c90d, 0x504bba96, 0x6d725fe0, 0xb9982c7b, 0x1700950d,
- 0xc3eae696, 0xfed303e0, 0x2a39707b, 0xa2e2700d, 0x76080396,
- 0x4b31e6e0, 0x9fdb957b, 0x31432c0d, 0xe5a95f96, 0xd890bae0,
- 0x0c7ac97b},
- {0x00000000, 0x27652581, 0x0fcc3bd9, 0x28a91e58, 0x5f9e0669,
- 0x78fb23e8, 0x50523db0, 0x77371831, 0xbe3c0dd2, 0x99592853,
- 0xb1f0360b, 0x9695138a, 0xe1a20bbb, 0xc6c72e3a, 0xee6e3062,
- 0xc90b15e3, 0x3d7f6b7f, 0x1a1a4efe, 0x32b350a6, 0x15d67527,
- 0x62e16d16, 0x45844897, 0x6d2d56cf, 0x4a48734e, 0x834366ad,
- 0xa426432c, 0x8c8f5d74, 0xabea78f5, 0xdcdd60c4, 0xfbb84545,
- 0xd3115b1d, 0xf4747e9c, 0x7afed6fe, 0x5d9bf37f, 0x7532ed27,
- 0x5257c8a6, 0x2560d097, 0x0205f516, 0x2aaceb4e, 0x0dc9cecf,
- 0xc4c2db2c, 0xe3a7fead, 0xcb0ee0f5, 0xec6bc574, 0x9b5cdd45,
- 0xbc39f8c4, 0x9490e69c, 0xb3f5c31d, 0x4781bd81, 0x60e49800,
- 0x484d8658, 0x6f28a3d9, 0x181fbbe8, 0x3f7a9e69, 0x17d38031,
- 0x30b6a5b0, 0xf9bdb053, 0xded895d2, 0xf6718b8a, 0xd114ae0b,
- 0xa623b63a, 0x814693bb, 0xa9ef8de3, 0x8e8aa862, 0xb5fadc26,
- 0x929ff9a7, 0xba36e7ff, 0x9d53c27e, 0xea64da4f, 0xcd01ffce,
- 0xe5a8e196, 0xc2cdc417, 0x0bc6d1f4, 0x2ca3f475, 0x040aea2d,
- 0x236fcfac, 0x5458d79d, 0x733df21c, 0x5b94ec44, 0x7cf1c9c5,
- 0x8885b759, 0xafe092d8, 0x87498c80, 0xa02ca901, 0xd71bb130,
- 0xf07e94b1, 0xd8d78ae9, 0xffb2af68, 0x36b9ba8b, 0x11dc9f0a,
- 0x39758152, 0x1e10a4d3, 0x6927bce2, 0x4e429963, 0x66eb873b,
- 0x418ea2ba, 0xcf040ad8, 0xe8612f59, 0xc0c83101, 0xe7ad1480,
- 0x909a0cb1, 0xb7ff2930, 0x9f563768, 0xb83312e9, 0x7138070a,
- 0x565d228b, 0x7ef43cd3, 0x59911952, 0x2ea60163, 0x09c324e2,
- 0x216a3aba, 0x060f1f3b, 0xf27b61a7, 0xd51e4426, 0xfdb75a7e,
- 0xdad27fff, 0xade567ce, 0x8a80424f, 0xa2295c17, 0x854c7996,
- 0x4c476c75, 0x6b2249f4, 0x438b57ac, 0x64ee722d, 0x13d96a1c,
- 0x34bc4f9d, 0x1c1551c5, 0x3b707444, 0x6af5b94d, 0x4d909ccc,
- 0x65398294, 0x425ca715, 0x356bbf24, 0x120e9aa5, 0x3aa784fd,
- 0x1dc2a17c, 0xd4c9b49f, 0xf3ac911e, 0xdb058f46, 0xfc60aac7,
- 0x8b57b2f6, 0xac329777, 0x849b892f, 0xa3feacae, 0x578ad232,
- 0x70eff7b3, 0x5846e9eb, 0x7f23cc6a, 0x0814d45b, 0x2f71f1da,
- 0x07d8ef82, 0x20bdca03, 0xe9b6dfe0, 0xced3fa61, 0xe67ae439,
- 0xc11fc1b8, 0xb628d989, 0x914dfc08, 0xb9e4e250, 0x9e81c7d1,
- 0x100b6fb3, 0x376e4a32, 0x1fc7546a, 0x38a271eb, 0x4f9569da,
- 0x68f04c5b, 0x40595203, 0x673c7782, 0xae376261, 0x895247e0,
- 0xa1fb59b8, 0x869e7c39, 0xf1a96408, 0xd6cc4189, 0xfe655fd1,
- 0xd9007a50, 0x2d7404cc, 0x0a11214d, 0x22b83f15, 0x05dd1a94,
- 0x72ea02a5, 0x558f2724, 0x7d26397c, 0x5a431cfd, 0x9348091e,
- 0xb42d2c9f, 0x9c8432c7, 0xbbe11746, 0xccd60f77, 0xebb32af6,
- 0xc31a34ae, 0xe47f112f, 0xdf0f656b, 0xf86a40ea, 0xd0c35eb2,
- 0xf7a67b33, 0x80916302, 0xa7f44683, 0x8f5d58db, 0xa8387d5a,
- 0x613368b9, 0x46564d38, 0x6eff5360, 0x499a76e1, 0x3ead6ed0,
- 0x19c84b51, 0x31615509, 0x16047088, 0xe2700e14, 0xc5152b95,
- 0xedbc35cd, 0xcad9104c, 0xbdee087d, 0x9a8b2dfc, 0xb22233a4,
- 0x95471625, 0x5c4c03c6, 0x7b292647, 0x5380381f, 0x74e51d9e,
- 0x03d205af, 0x24b7202e, 0x0c1e3e76, 0x2b7b1bf7, 0xa5f1b395,
- 0x82949614, 0xaa3d884c, 0x8d58adcd, 0xfa6fb5fc, 0xdd0a907d,
- 0xf5a38e25, 0xd2c6aba4, 0x1bcdbe47, 0x3ca89bc6, 0x1401859e,
- 0x3364a01f, 0x4453b82e, 0x63369daf, 0x4b9f83f7, 0x6cfaa676,
- 0x988ed8ea, 0xbfebfd6b, 0x9742e333, 0xb027c6b2, 0xc710de83,
- 0xe075fb02, 0xc8dce55a, 0xefb9c0db, 0x26b2d538, 0x01d7f0b9,
- 0x297eeee1, 0x0e1bcb60, 0x792cd351, 0x5e49f6d0, 0x76e0e888,
- 0x5185cd09}};
-
-#endif
-
-#endif
-
-#endif
-
-local const z_crc_t FAR x2n_table[] = {
- 0x40000000, 0x20000000, 0x08000000, 0x00800000, 0x00008000,
- 0xedb88320, 0xb1e6b092, 0xa06a2517, 0xed627dae, 0x88d14467,
- 0xd7bbfe6a, 0xec447f11, 0x8e7ea170, 0x6427800e, 0x4d47bae0,
- 0x09fe548f, 0x83852d0f, 0x30362f1a, 0x7b5a9cc3, 0x31fec169,
- 0x9fec022a, 0x6c8dedc4, 0x15d6874d, 0x5fde7a4e, 0xbad90e37,
- 0x2e4e5eef, 0x4eaba214, 0xa8a472c0, 0x429a969e, 0x148d302a,
- 0xc40ba6d0, 0xc4e22c3c};
diff --git a/thirdparty/freetype/src/gzip/gzguts.h b/thirdparty/freetype/src/gzip/gzguts.h
deleted file mode 100644
index 4f09a52a7a..0000000000
--- a/thirdparty/freetype/src/gzip/gzguts.h
+++ /dev/null
@@ -1,219 +0,0 @@
-/* gzguts.h -- zlib internal header definitions for gz* operations
- * Copyright (C) 2004-2019 Mark Adler
- * For conditions of distribution and use, see copyright notice in zlib.h
- */
-
-#ifdef _LARGEFILE64_SOURCE
-# ifndef _LARGEFILE_SOURCE
-# define _LARGEFILE_SOURCE 1
-# endif
-# ifdef _FILE_OFFSET_BITS
-# undef _FILE_OFFSET_BITS
-# endif
-#endif
-
-#ifdef HAVE_HIDDEN
-# define ZLIB_INTERNAL __attribute__((visibility ("hidden")))
-#else
-# define ZLIB_INTERNAL
-#endif
-
-#include <stdio.h>
-#include "zlib.h"
-#ifdef STDC
-# include <string.h>
-# include <stdlib.h>
-# include <limits.h>
-#endif
-
-#ifndef _POSIX_SOURCE
-# define _POSIX_SOURCE
-#endif
-#include <fcntl.h>
-
-#ifdef _WIN32
-# include <stddef.h>
-#endif
-
-#if defined(__TURBOC__) || defined(_MSC_VER) || defined(_WIN32)
-# include <io.h>
-#endif
-
-#if defined(_WIN32)
-# define WIDECHAR
-#endif
-
-#ifdef WINAPI_FAMILY
-# define open _open
-# define read _read
-# define write _write
-# define close _close
-#endif
-
-#ifdef NO_DEFLATE /* for compatibility with old definition */
-# define NO_GZCOMPRESS
-#endif
-
-#if defined(STDC99) || (defined(__TURBOC__) && __TURBOC__ >= 0x550)
-# ifndef HAVE_VSNPRINTF
-# define HAVE_VSNPRINTF
-# endif
-#endif
-
-#if defined(__CYGWIN__)
-# ifndef HAVE_VSNPRINTF
-# define HAVE_VSNPRINTF
-# endif
-#endif
-
-#if defined(MSDOS) && defined(__BORLANDC__) && (BORLANDC > 0x410)
-# ifndef HAVE_VSNPRINTF
-# define HAVE_VSNPRINTF
-# endif
-#endif
-
-#ifndef HAVE_VSNPRINTF
-# ifdef MSDOS
-/* vsnprintf may exist on some MS-DOS compilers (DJGPP?),
- but for now we just assume it doesn't. */
-# define NO_vsnprintf
-# endif
-# ifdef __TURBOC__
-# define NO_vsnprintf
-# endif
-# ifdef WIN32
-/* In Win32, vsnprintf is available as the "non-ANSI" _vsnprintf. */
-# if !defined(vsnprintf) && !defined(NO_vsnprintf)
-# if !defined(_MSC_VER) || ( defined(_MSC_VER) && _MSC_VER < 1500 )
-# define vsnprintf _vsnprintf
-# endif
-# endif
-# endif
-# ifdef __SASC
-# define NO_vsnprintf
-# endif
-# ifdef VMS
-# define NO_vsnprintf
-# endif
-# ifdef __OS400__
-# define NO_vsnprintf
-# endif
-# ifdef __MVS__
-# define NO_vsnprintf
-# endif
-#endif
-
-/* unlike snprintf (which is required in C99), _snprintf does not guarantee
- null termination of the result -- however this is only used in gzlib.c where
- the result is assured to fit in the space provided */
-#if defined(_MSC_VER) && _MSC_VER < 1900
-# define snprintf _snprintf
-#endif
-
-#ifndef local
-# define local static
-#endif
-/* since "static" is used to mean two completely different things in C, we
- define "local" for the non-static meaning of "static", for readability
- (compile with -Dlocal if your debugger can't find static symbols) */
-
-/* gz* functions always use library allocation functions */
-#ifndef STDC
- extern voidp malloc OF((uInt size));
- extern void free OF((voidpf ptr));
-#endif
-
-/* get errno and strerror definition */
-#if defined UNDER_CE
-# include <windows.h>
-# define zstrerror() gz_strwinerror((DWORD)GetLastError())
-#else
-# ifndef NO_STRERROR
-# include <errno.h>
-# define zstrerror() strerror(errno)
-# else
-# define zstrerror() "stdio error (consult errno)"
-# endif
-#endif
-
-/* provide prototypes for these when building zlib without LFS */
-#if !defined(_LARGEFILE64_SOURCE) || _LFS64_LARGEFILE-0 == 0
- ZEXTERN gzFile ZEXPORT gzopen64 OF((const char *, const char *));
- ZEXTERN z_off64_t ZEXPORT gzseek64 OF((gzFile, z_off64_t, int));
- ZEXTERN z_off64_t ZEXPORT gztell64 OF((gzFile));
- ZEXTERN z_off64_t ZEXPORT gzoffset64 OF((gzFile));
-#endif
-
-/* default memLevel */
-#if MAX_MEM_LEVEL >= 8
-# define DEF_MEM_LEVEL 8
-#else
-# define DEF_MEM_LEVEL MAX_MEM_LEVEL
-#endif
-
-/* default i/o buffer size -- double this for output when reading (this and
- twice this must be able to fit in an unsigned type) */
-#define GZBUFSIZE 8192
-
-/* gzip modes, also provide a little integrity check on the passed structure */
-#define GZ_NONE 0
-#define GZ_READ 7247
-#define GZ_WRITE 31153
-#define GZ_APPEND 1 /* mode set to GZ_WRITE after the file is opened */
-
-/* values for gz_state how */
-#define LOOK 0 /* look for a gzip header */
-#define COPY__ 1 /* copy input directly */
-#define GZIP 2 /* decompress a gzip stream */
-
-/* internal gzip file state data structure */
-typedef struct {
- /* exposed contents for gzgetc() macro */
- struct gzFile_s x; /* "x" for exposed */
- /* x.have: number of bytes available at x.next */
- /* x.next: next output data to deliver or write */
- /* x.pos: current position in uncompressed data */
- /* used for both reading and writing */
- int mode; /* see gzip modes above */
- int fd; /* file descriptor */
- char *path; /* path or fd for error messages */
- unsigned size; /* buffer size, zero if not allocated yet */
- unsigned want; /* requested buffer size, default is GZBUFSIZE */
- unsigned char *in; /* input buffer (double-sized when writing) */
- unsigned char *out; /* output buffer (double-sized when reading) */
- int direct; /* 0 if processing gzip, 1 if transparent */
- /* just for reading */
- int how; /* 0: get header, 1: copy, 2: decompress */
- z_off64_t start; /* where the gzip data started, for rewinding */
- int eof; /* true if end of input file reached */
- int past; /* true if read requested past end */
- /* just for writing */
- int level; /* compression level */
- int strategy; /* compression strategy */
- int reset; /* true if a reset is pending after a Z_FINISH */
- /* seek request */
- z_off64_t skip; /* amount to skip (already rewound if backwards) */
- int seek; /* true if seek request pending */
- /* error information */
- int err; /* error code */
- char *msg; /* error message */
- /* zlib inflate or deflate stream */
- z_stream strm; /* stream structure in-place (not a pointer) */
-} gz_state;
-typedef gz_state FAR *gz_statep;
-
-/* shared functions */
-void ZLIB_INTERNAL gz_error OF((gz_statep, int, const char *));
-#if defined UNDER_CE
-char ZLIB_INTERNAL *gz_strwinerror OF((DWORD error));
-#endif
-
-/* GT_OFF(x), where x is an unsigned value, is true if x > maximum z_off64_t
- value -- needed when comparing unsigned to z_off64_t, which is signed
- (possible z_off64_t types off_t, off64_t, and long are all signed) */
-#ifdef INT_MAX
-# define GT_OFF(x) (sizeof(int) == sizeof(z_off64_t) && (x) > INT_MAX)
-#else
-unsigned ZLIB_INTERNAL gz_intmax OF((void));
-# define GT_OFF(x) (sizeof(int) == sizeof(z_off64_t) && (x) > gz_intmax())
-#endif
diff --git a/thirdparty/freetype/src/gzip/infback.c b/thirdparty/freetype/src/gzip/infback.c
deleted file mode 100644
index 5fb8c67941..0000000000
--- a/thirdparty/freetype/src/gzip/infback.c
+++ /dev/null
@@ -1,641 +0,0 @@
-/* infback.c -- inflate using a call-back interface
- * Copyright (C) 1995-2022 Mark Adler
- * For conditions of distribution and use, see copyright notice in zlib.h
- */
-
-/*
- This code is largely copied from inflate.c. Normally either infback.o or
- inflate.o would be linked into an application--not both. The interface
- with inffast.c is retained so that optimized assembler-coded versions of
- inflate_fast() can be used with either inflate.c or infback.c.
- */
-
-#include "zutil.h"
-#include "inftrees.h"
-#include "inflate.h"
-#include "inffast.h"
-
-/* function prototypes */
-local void fixedtables OF((struct inflate_state FAR *state));
-
-/*
- strm provides memory allocation functions in zalloc and zfree, or
- Z_NULL to use the library memory allocation functions.
-
- windowBits is in the range 8..15, and window is a user-supplied
- window and output buffer that is 2**windowBits bytes.
- */
-int ZEXPORT inflateBackInit_(
- z_streamp strm,
- int windowBits,
- unsigned char FAR *window,
- const char *version,
- int stream_size)
-{
- struct inflate_state FAR *state;
-
- if (version == Z_NULL || version[0] != ZLIB_VERSION[0] ||
- stream_size != (int)(sizeof(z_stream)))
- return Z_VERSION_ERROR;
- if (strm == Z_NULL || window == Z_NULL ||
- windowBits < 8 || windowBits > 15)
- return Z_STREAM_ERROR;
- strm->msg = Z_NULL; /* in case we return an error */
- if (strm->zalloc == (alloc_func)0) {
-#ifdef Z_SOLO
- return Z_STREAM_ERROR;
-#else
- strm->zalloc = zcalloc;
- strm->opaque = (voidpf)0;
-#endif
- }
- if (strm->zfree == (free_func)0)
-#ifdef Z_SOLO
- return Z_STREAM_ERROR;
-#else
- strm->zfree = zcfree;
-#endif
- state = (struct inflate_state FAR *)ZALLOC(strm, 1,
- sizeof(struct inflate_state));
- if (state == Z_NULL) return Z_MEM_ERROR;
- Tracev((stderr, "inflate: allocated\n"));
- strm->state = (struct internal_state FAR *)state;
- state->dmax = 32768U;
- state->wbits = (uInt)windowBits;
- state->wsize = 1U << windowBits;
- state->window = window;
- state->wnext = 0;
- state->whave = 0;
- return Z_OK;
-}
-
-/*
- Return state with length and distance decoding tables and index sizes set to
- fixed code decoding. Normally this returns fixed tables from inffixed.h.
- If BUILDFIXED is defined, then instead this routine builds the tables the
- first time it's called, and returns those tables the first time and
- thereafter. This reduces the size of the code by about 2K bytes, in
- exchange for a little execution time. However, BUILDFIXED should not be
- used for threaded applications, since the rewriting of the tables and virgin
- may not be thread-safe.
- */
-local void fixedtables(
- struct inflate_state FAR *state)
-{
-#ifdef BUILDFIXED
- static int virgin = 1;
- static code *lenfix, *distfix;
- static code fixed[544];
-
- /* build fixed huffman tables if first call (may not be thread safe) */
- if (virgin) {
- unsigned sym, bits;
- static code *next;
-
- /* literal/length table */
- sym = 0;
- while (sym < 144) state->lens[sym++] = 8;
- while (sym < 256) state->lens[sym++] = 9;
- while (sym < 280) state->lens[sym++] = 7;
- while (sym < 288) state->lens[sym++] = 8;
- next = fixed;
- lenfix = next;
- bits = 9;
- inflate_table(LENS, state->lens, 288, &(next), &(bits), state->work);
-
- /* distance table */
- sym = 0;
- while (sym < 32) state->lens[sym++] = 5;
- distfix = next;
- bits = 5;
- inflate_table(DISTS, state->lens, 32, &(next), &(bits), state->work);
-
- /* do this just once */
- virgin = 0;
- }
-#else /* !BUILDFIXED */
-# include "inffixed.h"
-#endif /* BUILDFIXED */
- state->lencode = lenfix;
- state->lenbits = 9;
- state->distcode = distfix;
- state->distbits = 5;
-}
-
-/* Macros for inflateBack(): */
-
-/* Load returned state from inflate_fast() */
-#define LOAD() \
- do { \
- put = strm->next_out; \
- left = strm->avail_out; \
- next = strm->next_in; \
- have = strm->avail_in; \
- hold = state->hold; \
- bits = state->bits; \
- } while (0)
-
-/* Set state from registers for inflate_fast() */
-#define RESTORE() \
- do { \
- strm->next_out = put; \
- strm->avail_out = left; \
- strm->next_in = next; \
- strm->avail_in = have; \
- state->hold = hold; \
- state->bits = bits; \
- } while (0)
-
-/* Clear the input bit accumulator */
-#define INITBITS() \
- do { \
- hold = 0; \
- bits = 0; \
- } while (0)
-
-/* Assure that some input is available. If input is requested, but denied,
- then return a Z_BUF_ERROR from inflateBack(). */
-#define PULL() \
- do { \
- if (have == 0) { \
- have = in(in_desc, &next); \
- if (have == 0) { \
- next = Z_NULL; \
- ret = Z_BUF_ERROR; \
- goto inf_leave; \
- } \
- } \
- } while (0)
-
-/* Get a byte of input into the bit accumulator, or return from inflateBack()
- with an error if there is no input available. */
-#define PULLBYTE() \
- do { \
- PULL(); \
- have--; \
- hold += (unsigned long)(*next++) << bits; \
- bits += 8; \
- } while (0)
-
-/* Assure that there are at least n bits in the bit accumulator. If there is
- not enough available input to do that, then return from inflateBack() with
- an error. */
-#define NEEDBITS(n) \
- do { \
- while (bits < (unsigned)(n)) \
- PULLBYTE(); \
- } while (0)
-
-/* Return the low n bits of the bit accumulator (n < 16) */
-#define BITS(n) \
- ((unsigned)hold & ((1U << (n)) - 1))
-
-/* Remove n bits from the bit accumulator */
-#define DROPBITS(n) \
- do { \
- hold >>= (n); \
- bits -= (unsigned)(n); \
- } while (0)
-
-/* Remove zero to seven bits as needed to go to a byte boundary */
-#define BYTEBITS() \
- do { \
- hold >>= bits & 7; \
- bits -= bits & 7; \
- } while (0)
-
-/* Assure that some output space is available, by writing out the window
- if it's full. If the write fails, return from inflateBack() with a
- Z_BUF_ERROR. */
-#define ROOM() \
- do { \
- if (left == 0) { \
- put = state->window; \
- left = state->wsize; \
- state->whave = left; \
- if (out(out_desc, put, left)) { \
- ret = Z_BUF_ERROR; \
- goto inf_leave; \
- } \
- } \
- } while (0)
-
-/*
- strm provides the memory allocation functions and window buffer on input,
- and provides information on the unused input on return. For Z_DATA_ERROR
- returns, strm will also provide an error message.
-
- in() and out() are the call-back input and output functions. When
- inflateBack() needs more input, it calls in(). When inflateBack() has
- filled the window with output, or when it completes with data in the
- window, it calls out() to write out the data. The application must not
- change the provided input until in() is called again or inflateBack()
- returns. The application must not change the window/output buffer until
- inflateBack() returns.
-
- in() and out() are called with a descriptor parameter provided in the
- inflateBack() call. This parameter can be a structure that provides the
- information required to do the read or write, as well as accumulated
- information on the input and output such as totals and check values.
-
- in() should return zero on failure. out() should return non-zero on
- failure. If either in() or out() fails, than inflateBack() returns a
- Z_BUF_ERROR. strm->next_in can be checked for Z_NULL to see whether it
- was in() or out() that caused in the error. Otherwise, inflateBack()
- returns Z_STREAM_END on success, Z_DATA_ERROR for an deflate format
- error, or Z_MEM_ERROR if it could not allocate memory for the state.
- inflateBack() can also return Z_STREAM_ERROR if the input parameters
- are not correct, i.e. strm is Z_NULL or the state was not initialized.
- */
-int ZEXPORT inflateBack(
- z_streamp strm,
- in_func in,
- void FAR *in_desc,
- out_func out,
- void FAR *out_desc)
-{
- struct inflate_state FAR *state;
- z_const unsigned char FAR *next; /* next input */
- unsigned char FAR *put; /* next output */
- unsigned have, left; /* available input and output */
- unsigned long hold; /* bit buffer */
- unsigned bits; /* bits in bit buffer */
- unsigned copy; /* number of stored or match bytes to copy */
- unsigned char FAR *from; /* where to copy match bytes from */
- code here; /* current decoding table entry */
- code last; /* parent table entry */
- unsigned len; /* length to copy for repeats, bits to drop */
- int ret; /* return code */
- static const unsigned short order[19] = /* permutation of code lengths */
- {16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15};
-
- /* Check that the strm exists and that the state was initialized */
- if (strm == Z_NULL || strm->state == Z_NULL)
- return Z_STREAM_ERROR;
- state = (struct inflate_state FAR *)strm->state;
-
- /* Reset the state */
- strm->msg = Z_NULL;
- state->mode = TYPE;
- state->last = 0;
- state->whave = 0;
- next = strm->next_in;
- have = next != Z_NULL ? strm->avail_in : 0;
- hold = 0;
- bits = 0;
- put = state->window;
- left = state->wsize;
-
- /* Inflate until end of block marked as last */
- for (;;)
- switch (state->mode) {
- case TYPE:
- /* determine and dispatch block type */
- if (state->last) {
- BYTEBITS();
- state->mode = DONE;
- break;
- }
- NEEDBITS(3);
- state->last = BITS(1);
- DROPBITS(1);
- switch (BITS(2)) {
- case 0: /* stored block */
- Tracev((stderr, "inflate: stored block%s\n",
- state->last ? " (last)" : ""));
- state->mode = STORED;
- break;
- case 1: /* fixed block */
- fixedtables(state);
- Tracev((stderr, "inflate: fixed codes block%s\n",
- state->last ? " (last)" : ""));
- state->mode = LEN; /* decode codes */
- break;
- case 2: /* dynamic block */
- Tracev((stderr, "inflate: dynamic codes block%s\n",
- state->last ? " (last)" : ""));
- state->mode = TABLE;
- break;
- case 3:
- strm->msg = (char *)"invalid block type";
- state->mode = BAD;
- }
- DROPBITS(2);
- break;
-
- case STORED:
- /* get and verify stored block length */
- BYTEBITS(); /* go to byte boundary */
- NEEDBITS(32);
- if ((hold & 0xffff) != ((hold >> 16) ^ 0xffff)) {
- strm->msg = (char *)"invalid stored block lengths";
- state->mode = BAD;
- break;
- }
- state->length = (unsigned)hold & 0xffff;
- Tracev((stderr, "inflate: stored length %u\n",
- state->length));
- INITBITS();
-
- /* copy stored block from input to output */
- while (state->length != 0) {
- copy = state->length;
- PULL();
- ROOM();
- if (copy > have) copy = have;
- if (copy > left) copy = left;
- zmemcpy(put, next, copy);
- have -= copy;
- next += copy;
- left -= copy;
- put += copy;
- state->length -= copy;
- }
- Tracev((stderr, "inflate: stored end\n"));
- state->mode = TYPE;
- break;
-
- case TABLE:
- /* get dynamic table entries descriptor */
- NEEDBITS(14);
- state->nlen = BITS(5) + 257;
- DROPBITS(5);
- state->ndist = BITS(5) + 1;
- DROPBITS(5);
- state->ncode = BITS(4) + 4;
- DROPBITS(4);
-#ifndef PKZIP_BUG_WORKAROUND
- if (state->nlen > 286 || state->ndist > 30) {
- strm->msg = (char *)"too many length or distance symbols";
- state->mode = BAD;
- break;
- }
-#endif
- Tracev((stderr, "inflate: table sizes ok\n"));
-
- /* get code length code lengths (not a typo) */
- state->have = 0;
- while (state->have < state->ncode) {
- NEEDBITS(3);
- state->lens[order[state->have++]] = (unsigned short)BITS(3);
- DROPBITS(3);
- }
- while (state->have < 19)
- state->lens[order[state->have++]] = 0;
- state->next = state->codes;
- state->lencode = (code const FAR *)(state->next);
- state->lenbits = 7;
- ret = inflate_table(CODES, state->lens, 19, &(state->next),
- &(state->lenbits), state->work);
- if (ret) {
- strm->msg = (char *)"invalid code lengths set";
- state->mode = BAD;
- break;
- }
- Tracev((stderr, "inflate: code lengths ok\n"));
-
- /* get length and distance code code lengths */
- state->have = 0;
- while (state->have < state->nlen + state->ndist) {
- for (;;) {
- here = state->lencode[BITS(state->lenbits)];
- if ((unsigned)(here.bits) <= bits) break;
- PULLBYTE();
- }
- if (here.val < 16) {
- DROPBITS(here.bits);
- state->lens[state->have++] = here.val;
- }
- else {
- if (here.val == 16) {
- NEEDBITS(here.bits + 2);
- DROPBITS(here.bits);
- if (state->have == 0) {
- strm->msg = (char *)"invalid bit length repeat";
- state->mode = BAD;
- break;
- }
- len = (unsigned)(state->lens[state->have - 1]);
- copy = 3 + BITS(2);
- DROPBITS(2);
- }
- else if (here.val == 17) {
- NEEDBITS(here.bits + 3);
- DROPBITS(here.bits);
- len = 0;
- copy = 3 + BITS(3);
- DROPBITS(3);
- }
- else {
- NEEDBITS(here.bits + 7);
- DROPBITS(here.bits);
- len = 0;
- copy = 11 + BITS(7);
- DROPBITS(7);
- }
- if (state->have + copy > state->nlen + state->ndist) {
- strm->msg = (char *)"invalid bit length repeat";
- state->mode = BAD;
- break;
- }
- while (copy--)
- state->lens[state->have++] = (unsigned short)len;
- }
- }
-
- /* handle error breaks in while */
- if (state->mode == BAD) break;
-
- /* check for end-of-block code (better have one) */
- if (state->lens[256] == 0) {
- strm->msg = (char *)"invalid code -- missing end-of-block";
- state->mode = BAD;
- break;
- }
-
- /* build code tables -- note: do not change the lenbits or distbits
- values here (9 and 6) without reading the comments in inftrees.h
- concerning the ENOUGH constants, which depend on those values */
- state->next = state->codes;
- state->lencode = (code const FAR *)(state->next);
- state->lenbits = 9;
- ret = inflate_table(LENS, state->lens, state->nlen, &(state->next),
- &(state->lenbits), state->work);
- if (ret) {
- strm->msg = (char *)"invalid literal/lengths set";
- state->mode = BAD;
- break;
- }
- state->distcode = (code const FAR *)(state->next);
- state->distbits = 6;
- ret = inflate_table(DISTS, state->lens + state->nlen, state->ndist,
- &(state->next), &(state->distbits), state->work);
- if (ret) {
- strm->msg = (char *)"invalid distances set";
- state->mode = BAD;
- break;
- }
- Tracev((stderr, "inflate: codes ok\n"));
- state->mode = LEN;
- /* fallthrough */
-
- case LEN:
- /* use inflate_fast() if we have enough input and output */
- if (have >= 6 && left >= 258) {
- RESTORE();
- if (state->whave < state->wsize)
- state->whave = state->wsize - left;
- inflate_fast(strm, state->wsize);
- LOAD();
- break;
- }
-
- /* get a literal, length, or end-of-block code */
- for (;;) {
- here = state->lencode[BITS(state->lenbits)];
- if ((unsigned)(here.bits) <= bits) break;
- PULLBYTE();
- }
- if (here.op && (here.op & 0xf0) == 0) {
- last = here;
- for (;;) {
- here = state->lencode[last.val +
- (BITS(last.bits + last.op) >> last.bits)];
- if ((unsigned)(last.bits + here.bits) <= bits) break;
- PULLBYTE();
- }
- DROPBITS(last.bits);
- }
- DROPBITS(here.bits);
- state->length = (unsigned)here.val;
-
- /* process literal */
- if (here.op == 0) {
- Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ?
- "inflate: literal '%c'\n" :
- "inflate: literal 0x%02x\n", here.val));
- ROOM();
- *put++ = (unsigned char)(state->length);
- left--;
- state->mode = LEN;
- break;
- }
-
- /* process end of block */
- if (here.op & 32) {
- Tracevv((stderr, "inflate: end of block\n"));
- state->mode = TYPE;
- break;
- }
-
- /* invalid code */
- if (here.op & 64) {
- strm->msg = (char *)"invalid literal/length code";
- state->mode = BAD;
- break;
- }
-
- /* length code -- get extra bits, if any */
- state->extra = (unsigned)(here.op) & 15;
- if (state->extra != 0) {
- NEEDBITS(state->extra);
- state->length += BITS(state->extra);
- DROPBITS(state->extra);
- }
- Tracevv((stderr, "inflate: length %u\n", state->length));
-
- /* get distance code */
- for (;;) {
- here = state->distcode[BITS(state->distbits)];
- if ((unsigned)(here.bits) <= bits) break;
- PULLBYTE();
- }
- if ((here.op & 0xf0) == 0) {
- last = here;
- for (;;) {
- here = state->distcode[last.val +
- (BITS(last.bits + last.op) >> last.bits)];
- if ((unsigned)(last.bits + here.bits) <= bits) break;
- PULLBYTE();
- }
- DROPBITS(last.bits);
- }
- DROPBITS(here.bits);
- if (here.op & 64) {
- strm->msg = (char *)"invalid distance code";
- state->mode = BAD;
- break;
- }
- state->offset = (unsigned)here.val;
-
- /* get distance extra bits, if any */
- state->extra = (unsigned)(here.op) & 15;
- if (state->extra != 0) {
- NEEDBITS(state->extra);
- state->offset += BITS(state->extra);
- DROPBITS(state->extra);
- }
- if (state->offset > state->wsize - (state->whave < state->wsize ?
- left : 0)) {
- strm->msg = (char *)"invalid distance too far back";
- state->mode = BAD;
- break;
- }
- Tracevv((stderr, "inflate: distance %u\n", state->offset));
-
- /* copy match from window to output */
- do {
- ROOM();
- copy = state->wsize - state->offset;
- if (copy < left) {
- from = put + copy;
- copy = left - copy;
- }
- else {
- from = put - state->offset;
- copy = left;
- }
- if (copy > state->length) copy = state->length;
- state->length -= copy;
- left -= copy;
- do {
- *put++ = *from++;
- } while (--copy);
- } while (state->length != 0);
- break;
-
- case DONE:
- /* inflate stream terminated properly -- write leftover output */
- ret = Z_STREAM_END;
- if (left < state->wsize) {
- if (out(out_desc, state->window, state->wsize - left))
- ret = Z_BUF_ERROR;
- }
- goto inf_leave;
-
- case BAD:
- ret = Z_DATA_ERROR;
- goto inf_leave;
-
- default: /* can't happen, but makes compilers happy */
- ret = Z_STREAM_ERROR;
- goto inf_leave;
- }
-
- /* Return unused input */
- inf_leave:
- strm->next_in = next;
- strm->avail_in = have;
- return ret;
-}
-
-int ZEXPORT inflateBackEnd(
- z_streamp strm)
-{
- if (strm == Z_NULL || strm->state == Z_NULL || strm->zfree == (free_func)0)
- return Z_STREAM_ERROR;
- ZFREE(strm, strm->state);
- strm->state = Z_NULL;
- Tracev((stderr, "inflate: end\n"));
- return Z_OK;
-}
diff --git a/thirdparty/freetype/src/gzip/inffast.c b/thirdparty/freetype/src/gzip/inffast.c
deleted file mode 100644
index 809737b13c..0000000000
--- a/thirdparty/freetype/src/gzip/inffast.c
+++ /dev/null
@@ -1,323 +0,0 @@
-/* inffast.c -- fast decoding
- * Copyright (C) 1995-2017 Mark Adler
- * For conditions of distribution and use, see copyright notice in zlib.h
- */
-
-#include "zutil.h"
-#include "inftrees.h"
-#include "inflate.h"
-#include "inffast.h"
-
-#ifdef ASMINF
-# pragma message("Assembler code may have bugs -- use at your own risk")
-#else
-
-/*
- Decode literal, length, and distance codes and write out the resulting
- literal and match bytes until either not enough input or output is
- available, an end-of-block is encountered, or a data error is encountered.
- When large enough input and output buffers are supplied to inflate(), for
- example, a 16K input buffer and a 64K output buffer, more than 95% of the
- inflate execution time is spent in this routine.
-
- Entry assumptions:
-
- state->mode == LEN
- strm->avail_in >= 6
- strm->avail_out >= 258
- start >= strm->avail_out
- state->bits < 8
-
- On return, state->mode is one of:
-
- LEN -- ran out of enough output space or enough available input
- TYPE -- reached end of block code, inflate() to interpret next block
- BAD -- error in block data
-
- Notes:
-
- - The maximum input bits used by a length/distance pair is 15 bits for the
- length code, 5 bits for the length extra, 15 bits for the distance code,
- and 13 bits for the distance extra. This totals 48 bits, or six bytes.
- Therefore if strm->avail_in >= 6, then there is enough input to avoid
- checking for available input while decoding.
-
- - The maximum bytes that a single length/distance pair can output is 258
- bytes, which is the maximum length that can be coded. inflate_fast()
- requires strm->avail_out >= 258 for each loop to avoid checking for
- output space.
- */
-void ZLIB_INTERNAL inflate_fast(
- z_streamp strm,
- unsigned start)
-{
- struct inflate_state FAR *state;
- z_const unsigned char FAR *in; /* local strm->next_in */
- z_const unsigned char FAR *last; /* have enough input while in < last */
- unsigned char FAR *out; /* local strm->next_out */
- unsigned char FAR *beg; /* inflate()'s initial strm->next_out */
- unsigned char FAR *end; /* while out < end, enough space available */
-#ifdef INFLATE_STRICT
- unsigned dmax; /* maximum distance from zlib header */
-#endif
- unsigned wsize; /* window size or zero if not using window */
- unsigned whave; /* valid bytes in the window */
- unsigned wnext; /* window write index */
- unsigned char FAR *window; /* allocated sliding window, if wsize != 0 */
- unsigned long hold; /* local strm->hold */
- unsigned bits; /* local strm->bits */
- code const FAR *lcode; /* local strm->lencode */
- code const FAR *dcode; /* local strm->distcode */
- unsigned lmask; /* mask for first level of length codes */
- unsigned dmask; /* mask for first level of distance codes */
- code const *here; /* retrieved table entry */
- unsigned op; /* code bits, operation, extra bits, or */
- /* window position, window bytes to copy */
- unsigned len; /* match length, unused bytes */
- unsigned dist; /* match distance */
- unsigned char FAR *from; /* where to copy match from */
-
- /* copy state to local variables */
- state = (struct inflate_state FAR *)strm->state;
- in = strm->next_in;
- last = in + (strm->avail_in - 5);
- out = strm->next_out;
- beg = out - (start - strm->avail_out);
- end = out + (strm->avail_out - 257);
-#ifdef INFLATE_STRICT
- dmax = state->dmax;
-#endif
- wsize = state->wsize;
- whave = state->whave;
- wnext = state->wnext;
- window = state->window;
- hold = state->hold;
- bits = state->bits;
- lcode = state->lencode;
- dcode = state->distcode;
- lmask = (1U << state->lenbits) - 1;
- dmask = (1U << state->distbits) - 1;
-
- /* decode literals and length/distances until end-of-block or not enough
- input data or output space */
- do {
- if (bits < 15) {
- hold += (unsigned long)(*in++) << bits;
- bits += 8;
- hold += (unsigned long)(*in++) << bits;
- bits += 8;
- }
- here = lcode + (hold & lmask);
- dolen:
- op = (unsigned)(here->bits);
- hold >>= op;
- bits -= op;
- op = (unsigned)(here->op);
- if (op == 0) { /* literal */
- Tracevv((stderr, here->val >= 0x20 && here->val < 0x7f ?
- "inflate: literal '%c'\n" :
- "inflate: literal 0x%02x\n", here->val));
- *out++ = (unsigned char)(here->val);
- }
- else if (op & 16) { /* length base */
- len = (unsigned)(here->val);
- op &= 15; /* number of extra bits */
- if (op) {
- if (bits < op) {
- hold += (unsigned long)(*in++) << bits;
- bits += 8;
- }
- len += (unsigned)hold & ((1U << op) - 1);
- hold >>= op;
- bits -= op;
- }
- Tracevv((stderr, "inflate: length %u\n", len));
- if (bits < 15) {
- hold += (unsigned long)(*in++) << bits;
- bits += 8;
- hold += (unsigned long)(*in++) << bits;
- bits += 8;
- }
- here = dcode + (hold & dmask);
- dodist:
- op = (unsigned)(here->bits);
- hold >>= op;
- bits -= op;
- op = (unsigned)(here->op);
- if (op & 16) { /* distance base */
- dist = (unsigned)(here->val);
- op &= 15; /* number of extra bits */
- if (bits < op) {
- hold += (unsigned long)(*in++) << bits;
- bits += 8;
- if (bits < op) {
- hold += (unsigned long)(*in++) << bits;
- bits += 8;
- }
- }
- dist += (unsigned)hold & ((1U << op) - 1);
-#ifdef INFLATE_STRICT
- if (dist > dmax) {
- strm->msg = (char *)"invalid distance too far back";
- state->mode = BAD;
- break;
- }
-#endif
- hold >>= op;
- bits -= op;
- Tracevv((stderr, "inflate: distance %u\n", dist));
- op = (unsigned)(out - beg); /* max distance in output */
- if (dist > op) { /* see if copy from window */
- op = dist - op; /* distance back in window */
- if (op > whave) {
- if (state->sane) {
- strm->msg =
- (char *)"invalid distance too far back";
- state->mode = BAD;
- break;
- }
-#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR
- if (len <= op - whave) {
- do {
- *out++ = 0;
- } while (--len);
- continue;
- }
- len -= op - whave;
- do {
- *out++ = 0;
- } while (--op > whave);
- if (op == 0) {
- from = out - dist;
- do {
- *out++ = *from++;
- } while (--len);
- continue;
- }
-#endif
- }
- from = window;
- if (wnext == 0) { /* very common case */
- from += wsize - op;
- if (op < len) { /* some from window */
- len -= op;
- do {
- *out++ = *from++;
- } while (--op);
- from = out - dist; /* rest from output */
- }
- }
- else if (wnext < op) { /* wrap around window */
- from += wsize + wnext - op;
- op -= wnext;
- if (op < len) { /* some from end of window */
- len -= op;
- do {
- *out++ = *from++;
- } while (--op);
- from = window;
- if (wnext < len) { /* some from start of window */
- op = wnext;
- len -= op;
- do {
- *out++ = *from++;
- } while (--op);
- from = out - dist; /* rest from output */
- }
- }
- }
- else { /* contiguous in window */
- from += wnext - op;
- if (op < len) { /* some from window */
- len -= op;
- do {
- *out++ = *from++;
- } while (--op);
- from = out - dist; /* rest from output */
- }
- }
- while (len > 2) {
- *out++ = *from++;
- *out++ = *from++;
- *out++ = *from++;
- len -= 3;
- }
- if (len) {
- *out++ = *from++;
- if (len > 1)
- *out++ = *from++;
- }
- }
- else {
- from = out - dist; /* copy direct from output */
- do { /* minimum length is three */
- *out++ = *from++;
- *out++ = *from++;
- *out++ = *from++;
- len -= 3;
- } while (len > 2);
- if (len) {
- *out++ = *from++;
- if (len > 1)
- *out++ = *from++;
- }
- }
- }
- else if ((op & 64) == 0) { /* 2nd level distance code */
- here = dcode + here->val + (hold & ((1U << op) - 1));
- goto dodist;
- }
- else {
- strm->msg = (char *)"invalid distance code";
- state->mode = BAD;
- break;
- }
- }
- else if ((op & 64) == 0) { /* 2nd level length code */
- here = lcode + here->val + (hold & ((1U << op) - 1));
- goto dolen;
- }
- else if (op & 32) { /* end-of-block */
- Tracevv((stderr, "inflate: end of block\n"));
- state->mode = TYPE;
- break;
- }
- else {
- strm->msg = (char *)"invalid literal/length code";
- state->mode = BAD;
- break;
- }
- } while (in < last && out < end);
-
- /* return unused bytes (on entry, bits < 8, so in won't go too far back) */
- len = bits >> 3;
- in -= len;
- bits -= len << 3;
- hold &= (1U << bits) - 1;
-
- /* update state and return */
- strm->next_in = in;
- strm->next_out = out;
- strm->avail_in = (unsigned)(in < last ? 5 + (last - in) : 5 - (in - last));
- strm->avail_out = (unsigned)(out < end ?
- 257 + (end - out) : 257 - (out - end));
- state->hold = hold;
- state->bits = bits;
- return;
-}
-
-/*
- inflate_fast() speedups that turned out slower (on a PowerPC G3 750CXe):
- - Using bit fields for code structure
- - Different op definition to avoid & for extra bits (do & for table bits)
- - Three separate decoding do-loops for direct, window, and wnext == 0
- - Special case for distance > 1 copies to do overlapped load and store copy
- - Explicit branch predictions (based on measured branch probabilities)
- - Deferring match copy and interspersed it with decoding subsequent codes
- - Swapping literal/length else
- - Swapping window/direct else
- - Larger unrolled copy loops (three is about right)
- - Moving len -= 3 statement into middle of loop
- */
-
-#endif /* !ASMINF */
diff --git a/thirdparty/freetype/src/gzip/inffast.h b/thirdparty/freetype/src/gzip/inffast.h
deleted file mode 100644
index e5c1aa4ca8..0000000000
--- a/thirdparty/freetype/src/gzip/inffast.h
+++ /dev/null
@@ -1,11 +0,0 @@
-/* inffast.h -- header to use inffast.c
- * Copyright (C) 1995-2003, 2010 Mark Adler
- * For conditions of distribution and use, see copyright notice in zlib.h
- */
-
-/* WARNING: this file should *not* be used by applications. It is
- part of the implementation of the compression library and is
- subject to change. Applications should only use zlib.h.
- */
-
-void ZLIB_INTERNAL inflate_fast OF((z_streamp strm, unsigned start));
diff --git a/thirdparty/freetype/src/gzip/inffixed.h b/thirdparty/freetype/src/gzip/inffixed.h
deleted file mode 100644
index d628327769..0000000000
--- a/thirdparty/freetype/src/gzip/inffixed.h
+++ /dev/null
@@ -1,94 +0,0 @@
- /* inffixed.h -- table for decoding fixed codes
- * Generated automatically by makefixed().
- */
-
- /* WARNING: this file should *not* be used by applications.
- It is part of the implementation of this library and is
- subject to change. Applications should only use zlib.h.
- */
-
- static const code lenfix[512] = {
- {96,7,0},{0,8,80},{0,8,16},{20,8,115},{18,7,31},{0,8,112},{0,8,48},
- {0,9,192},{16,7,10},{0,8,96},{0,8,32},{0,9,160},{0,8,0},{0,8,128},
- {0,8,64},{0,9,224},{16,7,6},{0,8,88},{0,8,24},{0,9,144},{19,7,59},
- {0,8,120},{0,8,56},{0,9,208},{17,7,17},{0,8,104},{0,8,40},{0,9,176},
- {0,8,8},{0,8,136},{0,8,72},{0,9,240},{16,7,4},{0,8,84},{0,8,20},
- {21,8,227},{19,7,43},{0,8,116},{0,8,52},{0,9,200},{17,7,13},{0,8,100},
- {0,8,36},{0,9,168},{0,8,4},{0,8,132},{0,8,68},{0,9,232},{16,7,8},
- {0,8,92},{0,8,28},{0,9,152},{20,7,83},{0,8,124},{0,8,60},{0,9,216},
- {18,7,23},{0,8,108},{0,8,44},{0,9,184},{0,8,12},{0,8,140},{0,8,76},
- {0,9,248},{16,7,3},{0,8,82},{0,8,18},{21,8,163},{19,7,35},{0,8,114},
- {0,8,50},{0,9,196},{17,7,11},{0,8,98},{0,8,34},{0,9,164},{0,8,2},
- {0,8,130},{0,8,66},{0,9,228},{16,7,7},{0,8,90},{0,8,26},{0,9,148},
- {20,7,67},{0,8,122},{0,8,58},{0,9,212},{18,7,19},{0,8,106},{0,8,42},
- {0,9,180},{0,8,10},{0,8,138},{0,8,74},{0,9,244},{16,7,5},{0,8,86},
- {0,8,22},{64,8,0},{19,7,51},{0,8,118},{0,8,54},{0,9,204},{17,7,15},
- {0,8,102},{0,8,38},{0,9,172},{0,8,6},{0,8,134},{0,8,70},{0,9,236},
- {16,7,9},{0,8,94},{0,8,30},{0,9,156},{20,7,99},{0,8,126},{0,8,62},
- {0,9,220},{18,7,27},{0,8,110},{0,8,46},{0,9,188},{0,8,14},{0,8,142},
- {0,8,78},{0,9,252},{96,7,0},{0,8,81},{0,8,17},{21,8,131},{18,7,31},
- {0,8,113},{0,8,49},{0,9,194},{16,7,10},{0,8,97},{0,8,33},{0,9,162},
- {0,8,1},{0,8,129},{0,8,65},{0,9,226},{16,7,6},{0,8,89},{0,8,25},
- {0,9,146},{19,7,59},{0,8,121},{0,8,57},{0,9,210},{17,7,17},{0,8,105},
- {0,8,41},{0,9,178},{0,8,9},{0,8,137},{0,8,73},{0,9,242},{16,7,4},
- {0,8,85},{0,8,21},{16,8,258},{19,7,43},{0,8,117},{0,8,53},{0,9,202},
- {17,7,13},{0,8,101},{0,8,37},{0,9,170},{0,8,5},{0,8,133},{0,8,69},
- {0,9,234},{16,7,8},{0,8,93},{0,8,29},{0,9,154},{20,7,83},{0,8,125},
- {0,8,61},{0,9,218},{18,7,23},{0,8,109},{0,8,45},{0,9,186},{0,8,13},
- {0,8,141},{0,8,77},{0,9,250},{16,7,3},{0,8,83},{0,8,19},{21,8,195},
- {19,7,35},{0,8,115},{0,8,51},{0,9,198},{17,7,11},{0,8,99},{0,8,35},
- {0,9,166},{0,8,3},{0,8,131},{0,8,67},{0,9,230},{16,7,7},{0,8,91},
- {0,8,27},{0,9,150},{20,7,67},{0,8,123},{0,8,59},{0,9,214},{18,7,19},
- {0,8,107},{0,8,43},{0,9,182},{0,8,11},{0,8,139},{0,8,75},{0,9,246},
- {16,7,5},{0,8,87},{0,8,23},{64,8,0},{19,7,51},{0,8,119},{0,8,55},
- {0,9,206},{17,7,15},{0,8,103},{0,8,39},{0,9,174},{0,8,7},{0,8,135},
- {0,8,71},{0,9,238},{16,7,9},{0,8,95},{0,8,31},{0,9,158},{20,7,99},
- {0,8,127},{0,8,63},{0,9,222},{18,7,27},{0,8,111},{0,8,47},{0,9,190},
- {0,8,15},{0,8,143},{0,8,79},{0,9,254},{96,7,0},{0,8,80},{0,8,16},
- {20,8,115},{18,7,31},{0,8,112},{0,8,48},{0,9,193},{16,7,10},{0,8,96},
- {0,8,32},{0,9,161},{0,8,0},{0,8,128},{0,8,64},{0,9,225},{16,7,6},
- {0,8,88},{0,8,24},{0,9,145},{19,7,59},{0,8,120},{0,8,56},{0,9,209},
- {17,7,17},{0,8,104},{0,8,40},{0,9,177},{0,8,8},{0,8,136},{0,8,72},
- {0,9,241},{16,7,4},{0,8,84},{0,8,20},{21,8,227},{19,7,43},{0,8,116},
- {0,8,52},{0,9,201},{17,7,13},{0,8,100},{0,8,36},{0,9,169},{0,8,4},
- {0,8,132},{0,8,68},{0,9,233},{16,7,8},{0,8,92},{0,8,28},{0,9,153},
- {20,7,83},{0,8,124},{0,8,60},{0,9,217},{18,7,23},{0,8,108},{0,8,44},
- {0,9,185},{0,8,12},{0,8,140},{0,8,76},{0,9,249},{16,7,3},{0,8,82},
- {0,8,18},{21,8,163},{19,7,35},{0,8,114},{0,8,50},{0,9,197},{17,7,11},
- {0,8,98},{0,8,34},{0,9,165},{0,8,2},{0,8,130},{0,8,66},{0,9,229},
- {16,7,7},{0,8,90},{0,8,26},{0,9,149},{20,7,67},{0,8,122},{0,8,58},
- {0,9,213},{18,7,19},{0,8,106},{0,8,42},{0,9,181},{0,8,10},{0,8,138},
- {0,8,74},{0,9,245},{16,7,5},{0,8,86},{0,8,22},{64,8,0},{19,7,51},
- {0,8,118},{0,8,54},{0,9,205},{17,7,15},{0,8,102},{0,8,38},{0,9,173},
- {0,8,6},{0,8,134},{0,8,70},{0,9,237},{16,7,9},{0,8,94},{0,8,30},
- {0,9,157},{20,7,99},{0,8,126},{0,8,62},{0,9,221},{18,7,27},{0,8,110},
- {0,8,46},{0,9,189},{0,8,14},{0,8,142},{0,8,78},{0,9,253},{96,7,0},
- {0,8,81},{0,8,17},{21,8,131},{18,7,31},{0,8,113},{0,8,49},{0,9,195},
- {16,7,10},{0,8,97},{0,8,33},{0,9,163},{0,8,1},{0,8,129},{0,8,65},
- {0,9,227},{16,7,6},{0,8,89},{0,8,25},{0,9,147},{19,7,59},{0,8,121},
- {0,8,57},{0,9,211},{17,7,17},{0,8,105},{0,8,41},{0,9,179},{0,8,9},
- {0,8,137},{0,8,73},{0,9,243},{16,7,4},{0,8,85},{0,8,21},{16,8,258},
- {19,7,43},{0,8,117},{0,8,53},{0,9,203},{17,7,13},{0,8,101},{0,8,37},
- {0,9,171},{0,8,5},{0,8,133},{0,8,69},{0,9,235},{16,7,8},{0,8,93},
- {0,8,29},{0,9,155},{20,7,83},{0,8,125},{0,8,61},{0,9,219},{18,7,23},
- {0,8,109},{0,8,45},{0,9,187},{0,8,13},{0,8,141},{0,8,77},{0,9,251},
- {16,7,3},{0,8,83},{0,8,19},{21,8,195},{19,7,35},{0,8,115},{0,8,51},
- {0,9,199},{17,7,11},{0,8,99},{0,8,35},{0,9,167},{0,8,3},{0,8,131},
- {0,8,67},{0,9,231},{16,7,7},{0,8,91},{0,8,27},{0,9,151},{20,7,67},
- {0,8,123},{0,8,59},{0,9,215},{18,7,19},{0,8,107},{0,8,43},{0,9,183},
- {0,8,11},{0,8,139},{0,8,75},{0,9,247},{16,7,5},{0,8,87},{0,8,23},
- {64,8,0},{19,7,51},{0,8,119},{0,8,55},{0,9,207},{17,7,15},{0,8,103},
- {0,8,39},{0,9,175},{0,8,7},{0,8,135},{0,8,71},{0,9,239},{16,7,9},
- {0,8,95},{0,8,31},{0,9,159},{20,7,99},{0,8,127},{0,8,63},{0,9,223},
- {18,7,27},{0,8,111},{0,8,47},{0,9,191},{0,8,15},{0,8,143},{0,8,79},
- {0,9,255}
- };
-
- static const code distfix[32] = {
- {16,5,1},{23,5,257},{19,5,17},{27,5,4097},{17,5,5},{25,5,1025},
- {21,5,65},{29,5,16385},{16,5,3},{24,5,513},{20,5,33},{28,5,8193},
- {18,5,9},{26,5,2049},{22,5,129},{64,5,0},{16,5,2},{23,5,385},
- {19,5,25},{27,5,6145},{17,5,7},{25,5,1537},{21,5,97},{29,5,24577},
- {16,5,4},{24,5,769},{20,5,49},{28,5,12289},{18,5,13},{26,5,3073},
- {22,5,193},{64,5,0}
- };
diff --git a/thirdparty/freetype/src/gzip/inflate.c b/thirdparty/freetype/src/gzip/inflate.c
deleted file mode 100644
index 5bf5b815e5..0000000000
--- a/thirdparty/freetype/src/gzip/inflate.c
+++ /dev/null
@@ -1,1610 +0,0 @@
-/* inflate.c -- zlib decompression
- * Copyright (C) 1995-2022 Mark Adler
- * For conditions of distribution and use, see copyright notice in zlib.h
- */
-
-/*
- * Change history:
- *
- * 1.2.beta0 24 Nov 2002
- * - First version -- complete rewrite of inflate to simplify code, avoid
- * creation of window when not needed, minimize use of window when it is
- * needed, make inffast.c even faster, implement gzip decoding, and to
- * improve code readability and style over the previous zlib inflate code
- *
- * 1.2.beta1 25 Nov 2002
- * - Use pointers for available input and output checking in inffast.c
- * - Remove input and output counters in inffast.c
- * - Change inffast.c entry and loop from avail_in >= 7 to >= 6
- * - Remove unnecessary second byte pull from length extra in inffast.c
- * - Unroll direct copy to three copies per loop in inffast.c
- *
- * 1.2.beta2 4 Dec 2002
- * - Change external routine names to reduce potential conflicts
- * - Correct filename to inffixed.h for fixed tables in inflate.c
- * - Make hbuf[] unsigned char to match parameter type in inflate.c
- * - Change strm->next_out[-state->offset] to *(strm->next_out - state->offset)
- * to avoid negation problem on Alphas (64 bit) in inflate.c
- *
- * 1.2.beta3 22 Dec 2002
- * - Add comments on state->bits assertion in inffast.c
- * - Add comments on op field in inftrees.h
- * - Fix bug in reuse of allocated window after inflateReset()
- * - Remove bit fields--back to byte structure for speed
- * - Remove distance extra == 0 check in inflate_fast()--only helps for lengths
- * - Change post-increments to pre-increments in inflate_fast(), PPC biased?
- * - Add compile time option, POSTINC, to use post-increments instead (Intel?)
- * - Make MATCH copy in inflate() much faster for when inflate_fast() not used
- * - Use local copies of stream next and avail values, as well as local bit
- * buffer and bit count in inflate()--for speed when inflate_fast() not used
- *
- * 1.2.beta4 1 Jan 2003
- * - Split ptr - 257 statements in inflate_table() to avoid compiler warnings
- * - Move a comment on output buffer sizes from inffast.c to inflate.c
- * - Add comments in inffast.c to introduce the inflate_fast() routine
- * - Rearrange window copies in inflate_fast() for speed and simplification
- * - Unroll last copy for window match in inflate_fast()
- * - Use local copies of window variables in inflate_fast() for speed
- * - Pull out common wnext == 0 case for speed in inflate_fast()
- * - Make op and len in inflate_fast() unsigned for consistency
- * - Add FAR to lcode and dcode declarations in inflate_fast()
- * - Simplified bad distance check in inflate_fast()
- * - Added inflateBackInit(), inflateBack(), and inflateBackEnd() in new
- * source file infback.c to provide a call-back interface to inflate for
- * programs like gzip and unzip -- uses window as output buffer to avoid
- * window copying
- *
- * 1.2.beta5 1 Jan 2003
- * - Improved inflateBack() interface to allow the caller to provide initial
- * input in strm.
- * - Fixed stored blocks bug in inflateBack()
- *
- * 1.2.beta6 4 Jan 2003
- * - Added comments in inffast.c on effectiveness of POSTINC
- * - Typecasting all around to reduce compiler warnings
- * - Changed loops from while (1) or do {} while (1) to for (;;), again to
- * make compilers happy
- * - Changed type of window in inflateBackInit() to unsigned char *
- *
- * 1.2.beta7 27 Jan 2003
- * - Changed many types to unsigned or unsigned short to avoid warnings
- * - Added inflateCopy() function
- *
- * 1.2.0 9 Mar 2003
- * - Changed inflateBack() interface to provide separate opaque descriptors
- * for the in() and out() functions
- * - Changed inflateBack() argument and in_func typedef to swap the length
- * and buffer address return values for the input function
- * - Check next_in and next_out for Z_NULL on entry to inflate()
- *
- * The history for versions after 1.2.0 are in ChangeLog in zlib distribution.
- */
-
-#include "zutil.h"
-#include "inftrees.h"
-#include "inflate.h"
-#include "inffast.h"
-
-#ifdef MAKEFIXED
-# ifndef BUILDFIXED
-# define BUILDFIXED
-# endif
-#endif
-
-/* function prototypes */
-local int inflateStateCheck OF((z_streamp strm));
-local void fixedtables OF((struct inflate_state FAR *state));
-local int updatewindow OF((z_streamp strm, const unsigned char FAR *end,
- unsigned copy));
-#ifdef BUILDFIXED
- void makefixed OF((void));
-#endif
-#ifndef Z_FREETYPE
-local unsigned syncsearch OF((unsigned FAR *have, const unsigned char FAR *buf,
- unsigned len));
-#endif
-
-local int inflateStateCheck(
- z_streamp strm)
-{
- struct inflate_state FAR *state;
- if (strm == Z_NULL ||
- strm->zalloc == (alloc_func)0 || strm->zfree == (free_func)0)
- return 1;
- state = (struct inflate_state FAR *)strm->state;
- if (state == Z_NULL || state->strm != strm ||
- state->mode < HEAD || state->mode > SYNC)
- return 1;
- return 0;
-}
-
-int ZEXPORT inflateResetKeep(
- z_streamp strm)
-{
- struct inflate_state FAR *state;
-
- if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
- state = (struct inflate_state FAR *)strm->state;
- strm->total_in = strm->total_out = state->total = 0;
- strm->msg = Z_NULL;
- if (state->wrap) /* to support ill-conceived Java test suite */
- strm->adler = state->wrap & 1;
- state->mode = HEAD;
- state->last = 0;
- state->havedict = 0;
- state->flags = -1;
- state->dmax = 32768U;
- state->head = Z_NULL;
- state->hold = 0;
- state->bits = 0;
- state->lencode = state->distcode = state->next = state->codes;
- state->sane = 1;
- state->back = -1;
- Tracev((stderr, "inflate: reset\n"));
- return Z_OK;
-}
-
-int ZEXPORT inflateReset(
- z_streamp strm)
-{
- struct inflate_state FAR *state;
-
- if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
- state = (struct inflate_state FAR *)strm->state;
- state->wsize = 0;
- state->whave = 0;
- state->wnext = 0;
- return inflateResetKeep(strm);
-}
-
-int ZEXPORT inflateReset2(
- z_streamp strm,
- int windowBits)
-{
- int wrap;
- struct inflate_state FAR *state;
-
- /* get the state */
- if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
- state = (struct inflate_state FAR *)strm->state;
-
- /* extract wrap request from windowBits parameter */
- if (windowBits < 0) {
- wrap = 0;
- windowBits = -windowBits;
- }
- else {
- wrap = (windowBits >> 4) + 5;
-#ifdef GUNZIP
- if (windowBits < 48)
- windowBits &= 15;
-#endif
- }
-
- /* set number of window bits, free window if different */
- if (windowBits && (windowBits < 8 || windowBits > 15))
- return Z_STREAM_ERROR;
- if (state->window != Z_NULL && state->wbits != (unsigned)windowBits) {
- ZFREE(strm, state->window);
- state->window = Z_NULL;
- }
-
- /* update state and reset the rest of it */
- state->wrap = wrap;
- state->wbits = (unsigned)windowBits;
- return inflateReset(strm);
-}
-
-int ZEXPORT inflateInit2_(
- z_streamp strm,
- int windowBits,
- const char *version,
- int stream_size)
-{
- int ret;
- struct inflate_state FAR *state;
-
- if (version == Z_NULL || version[0] != ZLIB_VERSION[0] ||
- stream_size != (int)(sizeof(z_stream)))
- return Z_VERSION_ERROR;
- if (strm == Z_NULL) return Z_STREAM_ERROR;
- strm->msg = Z_NULL; /* in case we return an error */
- if (strm->zalloc == (alloc_func)0) {
-#ifdef Z_SOLO
- return Z_STREAM_ERROR;
-#else
- strm->zalloc = zcalloc;
- strm->opaque = (voidpf)0;
-#endif
- }
- if (strm->zfree == (free_func)0)
-#ifdef Z_SOLO
- return Z_STREAM_ERROR;
-#else
- strm->zfree = zcfree;
-#endif
- state = (struct inflate_state FAR *)
- ZALLOC(strm, 1, sizeof(struct inflate_state));
- if (state == Z_NULL) return Z_MEM_ERROR;
- Tracev((stderr, "inflate: allocated\n"));
- strm->state = (struct internal_state FAR *)state;
- state->strm = strm;
- state->window = Z_NULL;
- state->mode = HEAD; /* to pass state test in inflateReset2() */
- ret = inflateReset2(strm, windowBits);
- if (ret != Z_OK) {
- ZFREE(strm, state);
- strm->state = Z_NULL;
- }
- return ret;
-}
-
-int ZEXPORT inflateInit_(
- z_streamp strm,
- const char *version,
- int stream_size)
-{
- return inflateInit2_(strm, DEF_WBITS, version, stream_size);
-}
-
-#ifndef Z_FREETYPE
-
-int ZEXPORT inflatePrime(
- z_streamp strm,
- int bits,
- int value)
-{
- struct inflate_state FAR *state;
-
- if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
- state = (struct inflate_state FAR *)strm->state;
- if (bits < 0) {
- state->hold = 0;
- state->bits = 0;
- return Z_OK;
- }
- if (bits > 16 || state->bits + (uInt)bits > 32) return Z_STREAM_ERROR;
- value &= (1L << bits) - 1;
- state->hold += (unsigned)value << state->bits;
- state->bits += (uInt)bits;
- return Z_OK;
-}
-
-#endif /* !Z_FREETYPE */
-
-/*
- Return state with length and distance decoding tables and index sizes set to
- fixed code decoding. Normally this returns fixed tables from inffixed.h.
- If BUILDFIXED is defined, then instead this routine builds the tables the
- first time it's called, and returns those tables the first time and
- thereafter. This reduces the size of the code by about 2K bytes, in
- exchange for a little execution time. However, BUILDFIXED should not be
- used for threaded applications, since the rewriting of the tables and virgin
- may not be thread-safe.
- */
-local void fixedtables(
- struct inflate_state FAR *state)
-{
-#ifdef BUILDFIXED
- static int virgin = 1;
- static code *lenfix, *distfix;
- static code fixed[544];
-
- /* build fixed huffman tables if first call (may not be thread safe) */
- if (virgin) {
- unsigned sym, bits;
- static code *next;
-
- /* literal/length table */
- sym = 0;
- while (sym < 144) state->lens[sym++] = 8;
- while (sym < 256) state->lens[sym++] = 9;
- while (sym < 280) state->lens[sym++] = 7;
- while (sym < 288) state->lens[sym++] = 8;
- next = fixed;
- lenfix = next;
- bits = 9;
- inflate_table(LENS, state->lens, 288, &(next), &(bits), state->work);
-
- /* distance table */
- sym = 0;
- while (sym < 32) state->lens[sym++] = 5;
- distfix = next;
- bits = 5;
- inflate_table(DISTS, state->lens, 32, &(next), &(bits), state->work);
-
- /* do this just once */
- virgin = 0;
- }
-#else /* !BUILDFIXED */
-# include "inffixed.h"
-#endif /* BUILDFIXED */
- state->lencode = lenfix;
- state->lenbits = 9;
- state->distcode = distfix;
- state->distbits = 5;
-}
-
-#ifdef MAKEFIXED
-#include <stdio.h>
-
-/*
- Write out the inffixed.h that is #include'd above. Defining MAKEFIXED also
- defines BUILDFIXED, so the tables are built on the fly. makefixed() writes
- those tables to stdout, which would be piped to inffixed.h. A small program
- can simply call makefixed to do this:
-
- void makefixed(void);
-
- int main(void)
- {
- makefixed();
- return 0;
- }
-
- Then that can be linked with zlib built with MAKEFIXED defined and run:
-
- a.out > inffixed.h
- */
-void makefixed()
-{
- unsigned low, size;
- struct inflate_state state;
-
- fixedtables(&state);
- puts(" /* inffixed.h -- table for decoding fixed codes");
- puts(" * Generated automatically by makefixed().");
- puts(" */");
- puts("");
- puts(" /* WARNING: this file should *not* be used by applications.");
- puts(" It is part of the implementation of this library and is");
- puts(" subject to change. Applications should only use zlib.h.");
- puts(" */");
- puts("");
- size = 1U << 9;
- printf(" static const code lenfix[%u] = {", size);
- low = 0;
- for (;;) {
- if ((low % 7) == 0) printf("\n ");
- printf("{%u,%u,%d}", (low & 127) == 99 ? 64 : state.lencode[low].op,
- state.lencode[low].bits, state.lencode[low].val);
- if (++low == size) break;
- putchar(',');
- }
- puts("\n };");
- size = 1U << 5;
- printf("\n static const code distfix[%u] = {", size);
- low = 0;
- for (;;) {
- if ((low % 6) == 0) printf("\n ");
- printf("{%u,%u,%d}", state.distcode[low].op, state.distcode[low].bits,
- state.distcode[low].val);
- if (++low == size) break;
- putchar(',');
- }
- puts("\n };");
-}
-#endif /* MAKEFIXED */
-
-/*
- Update the window with the last wsize (normally 32K) bytes written before
- returning. If window does not exist yet, create it. This is only called
- when a window is already in use, or when output has been written during this
- inflate call, but the end of the deflate stream has not been reached yet.
- It is also called to create a window for dictionary data when a dictionary
- is loaded.
-
- Providing output buffers larger than 32K to inflate() should provide a speed
- advantage, since only the last 32K of output is copied to the sliding window
- upon return from inflate(), and since all distances after the first 32K of
- output will fall in the output data, making match copies simpler and faster.
- The advantage may be dependent on the size of the processor's data caches.
- */
-local int updatewindow(
- z_streamp strm,
- const Bytef *end,
- unsigned copy)
-{
- struct inflate_state FAR *state;
- unsigned dist;
-
- state = (struct inflate_state FAR *)strm->state;
-
- /* if it hasn't been done already, allocate space for the window */
- if (state->window == Z_NULL) {
- state->window = (unsigned char FAR *)
- ZALLOC(strm, 1U << state->wbits,
- sizeof(unsigned char));
- if (state->window == Z_NULL) return 1;
- }
-
- /* if window not in use yet, initialize */
- if (state->wsize == 0) {
- state->wsize = 1U << state->wbits;
- state->wnext = 0;
- state->whave = 0;
- }
-
- /* copy state->wsize or less output bytes into the circular window */
- if (copy >= state->wsize) {
- zmemcpy(state->window, end - state->wsize, state->wsize);
- state->wnext = 0;
- state->whave = state->wsize;
- }
- else {
- dist = state->wsize - state->wnext;
- if (dist > copy) dist = copy;
- zmemcpy(state->window + state->wnext, end - copy, dist);
- copy -= dist;
- if (copy) {
- zmemcpy(state->window, end - copy, copy);
- state->wnext = copy;
- state->whave = state->wsize;
- }
- else {
- state->wnext += dist;
- if (state->wnext == state->wsize) state->wnext = 0;
- if (state->whave < state->wsize) state->whave += dist;
- }
- }
- return 0;
-}
-
-/* Macros for inflate(): */
-
-/* check function to use adler32() for zlib or crc32() for gzip */
-#ifdef GUNZIP
-# define UPDATE_CHECK(check, buf, len) \
- (state->flags ? crc32(check, buf, len) : adler32(check, buf, len))
-#else
-# define UPDATE_CHECK(check, buf, len) adler32(check, buf, len)
-#endif
-
-/* check macros for header crc */
-#ifdef GUNZIP
-# define CRC2(check, word) \
- do { \
- hbuf[0] = (unsigned char)(word); \
- hbuf[1] = (unsigned char)((word) >> 8); \
- check = crc32(check, hbuf, 2); \
- } while (0)
-
-# define CRC4(check, word) \
- do { \
- hbuf[0] = (unsigned char)(word); \
- hbuf[1] = (unsigned char)((word) >> 8); \
- hbuf[2] = (unsigned char)((word) >> 16); \
- hbuf[3] = (unsigned char)((word) >> 24); \
- check = crc32(check, hbuf, 4); \
- } while (0)
-#endif
-
-/* Load registers with state in inflate() for speed */
-#define LOAD() \
- do { \
- put = strm->next_out; \
- left = strm->avail_out; \
- next = strm->next_in; \
- have = strm->avail_in; \
- hold = state->hold; \
- bits = state->bits; \
- } while (0)
-
-/* Restore state from registers in inflate() */
-#define RESTORE() \
- do { \
- strm->next_out = put; \
- strm->avail_out = left; \
- strm->next_in = next; \
- strm->avail_in = have; \
- state->hold = hold; \
- state->bits = bits; \
- } while (0)
-
-/* Clear the input bit accumulator */
-#define INITBITS() \
- do { \
- hold = 0; \
- bits = 0; \
- } while (0)
-
-/* Get a byte of input into the bit accumulator, or return from inflate()
- if there is no input available. */
-#define PULLBYTE() \
- do { \
- if (have == 0) goto inf_leave; \
- have--; \
- hold += (unsigned long)(*next++) << bits; \
- bits += 8; \
- } while (0)
-
-/* Assure that there are at least n bits in the bit accumulator. If there is
- not enough available input to do that, then return from inflate(). */
-#define NEEDBITS(n) \
- do { \
- while (bits < (unsigned)(n)) \
- PULLBYTE(); \
- } while (0)
-
-/* Return the low n bits of the bit accumulator (n < 16) */
-#define BITS(n) \
- ((unsigned)hold & ((1U << (n)) - 1))
-
-/* Remove n bits from the bit accumulator */
-#define DROPBITS(n) \
- do { \
- hold >>= (n); \
- bits -= (unsigned)(n); \
- } while (0)
-
-/* Remove zero to seven bits as needed to go to a byte boundary */
-#define BYTEBITS() \
- do { \
- hold >>= bits & 7; \
- bits -= bits & 7; \
- } while (0)
-
-/*
- inflate() uses a state machine to process as much input data and generate as
- much output data as possible before returning. The state machine is
- structured roughly as follows:
-
- for (;;) switch (state) {
- ...
- case STATEn:
- if (not enough input data or output space to make progress)
- return;
- ... make progress ...
- state = STATEm;
- break;
- ...
- }
-
- so when inflate() is called again, the same case is attempted again, and
- if the appropriate resources are provided, the machine proceeds to the
- next state. The NEEDBITS() macro is usually the way the state evaluates
- whether it can proceed or should return. NEEDBITS() does the return if
- the requested bits are not available. The typical use of the BITS macros
- is:
-
- NEEDBITS(n);
- ... do something with BITS(n) ...
- DROPBITS(n);
-
- where NEEDBITS(n) either returns from inflate() if there isn't enough
- input left to load n bits into the accumulator, or it continues. BITS(n)
- gives the low n bits in the accumulator. When done, DROPBITS(n) drops
- the low n bits off the accumulator. INITBITS() clears the accumulator
- and sets the number of available bits to zero. BYTEBITS() discards just
- enough bits to put the accumulator on a byte boundary. After BYTEBITS()
- and a NEEDBITS(8), then BITS(8) would return the next byte in the stream.
-
- NEEDBITS(n) uses PULLBYTE() to get an available byte of input, or to return
- if there is no input available. The decoding of variable length codes uses
- PULLBYTE() directly in order to pull just enough bytes to decode the next
- code, and no more.
-
- Some states loop until they get enough input, making sure that enough
- state information is maintained to continue the loop where it left off
- if NEEDBITS() returns in the loop. For example, want, need, and keep
- would all have to actually be part of the saved state in case NEEDBITS()
- returns:
-
- case STATEw:
- while (want < need) {
- NEEDBITS(n);
- keep[want++] = BITS(n);
- DROPBITS(n);
- }
- state = STATEx;
- case STATEx:
-
- As shown above, if the next state is also the next case, then the break
- is omitted.
-
- A state may also return if there is not enough output space available to
- complete that state. Those states are copying stored data, writing a
- literal byte, and copying a matching string.
-
- When returning, a "goto inf_leave" is used to update the total counters,
- update the check value, and determine whether any progress has been made
- during that inflate() call in order to return the proper return code.
- Progress is defined as a change in either strm->avail_in or strm->avail_out.
- When there is a window, goto inf_leave will update the window with the last
- output written. If a goto inf_leave occurs in the middle of decompression
- and there is no window currently, goto inf_leave will create one and copy
- output to the window for the next call of inflate().
-
- In this implementation, the flush parameter of inflate() only affects the
- return code (per zlib.h). inflate() always writes as much as possible to
- strm->next_out, given the space available and the provided input--the effect
- documented in zlib.h of Z_SYNC_FLUSH. Furthermore, inflate() always defers
- the allocation of and copying into a sliding window until necessary, which
- provides the effect documented in zlib.h for Z_FINISH when the entire input
- stream available. So the only thing the flush parameter actually does is:
- when flush is set to Z_FINISH, inflate() cannot return Z_OK. Instead it
- will return Z_BUF_ERROR if it has not reached the end of the stream.
- */
-
-int ZEXPORT inflate(
- z_streamp strm,
- int flush)
-{
- struct inflate_state FAR *state;
- z_const unsigned char FAR *next; /* next input */
- unsigned char FAR *put; /* next output */
- unsigned have, left; /* available input and output */
- unsigned long hold; /* bit buffer */
- unsigned bits; /* bits in bit buffer */
- unsigned in, out; /* save starting available input and output */
- unsigned copy; /* number of stored or match bytes to copy */
- unsigned char FAR *from; /* where to copy match bytes from */
- code here; /* current decoding table entry */
- code last; /* parent table entry */
- unsigned len; /* length to copy for repeats, bits to drop */
- int ret; /* return code */
-#ifdef GUNZIP
- unsigned char hbuf[4]; /* buffer for gzip header crc calculation */
-#endif
- static const unsigned short order[19] = /* permutation of code lengths */
- {16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15};
-
- if (inflateStateCheck(strm) || strm->next_out == Z_NULL ||
- (strm->next_in == Z_NULL && strm->avail_in != 0))
- return Z_STREAM_ERROR;
-
- state = (struct inflate_state FAR *)strm->state;
- if (state->mode == TYPE) state->mode = TYPEDO; /* skip check */
- LOAD();
- in = have;
- out = left;
- ret = Z_OK;
- for (;;)
- switch (state->mode) {
- case HEAD:
- if (state->wrap == 0) {
- state->mode = TYPEDO;
- break;
- }
- NEEDBITS(16);
-#ifdef GUNZIP
- if ((state->wrap & 2) && hold == 0x8b1f) { /* gzip header */
- if (state->wbits == 0)
- state->wbits = 15;
- state->check = crc32(0L, Z_NULL, 0);
- CRC2(state->check, hold);
- INITBITS();
- state->mode = FLAGS;
- break;
- }
- if (state->head != Z_NULL)
- state->head->done = -1;
- if (!(state->wrap & 1) || /* check if zlib header allowed */
-#else
- if (
-#endif
- ((BITS(8) << 8) + (hold >> 8)) % 31) {
- strm->msg = (char *)"incorrect header check";
- state->mode = BAD;
- break;
- }
- if (BITS(4) != Z_DEFLATED) {
- strm->msg = (char *)"unknown compression method";
- state->mode = BAD;
- break;
- }
- DROPBITS(4);
- len = BITS(4) + 8;
- if (state->wbits == 0)
- state->wbits = len;
- if (len > 15 || len > state->wbits) {
- strm->msg = (char *)"invalid window size";
- state->mode = BAD;
- break;
- }
- state->dmax = 1U << len;
- state->flags = 0; /* indicate zlib header */
- Tracev((stderr, "inflate: zlib header ok\n"));
- strm->adler = state->check = adler32(0L, Z_NULL, 0);
- state->mode = hold & 0x200 ? DICTID : TYPE;
- INITBITS();
- break;
-#ifdef GUNZIP
- case FLAGS:
- NEEDBITS(16);
- state->flags = (int)(hold);
- if ((state->flags & 0xff) != Z_DEFLATED) {
- strm->msg = (char *)"unknown compression method";
- state->mode = BAD;
- break;
- }
- if (state->flags & 0xe000) {
- strm->msg = (char *)"unknown header flags set";
- state->mode = BAD;
- break;
- }
- if (state->head != Z_NULL)
- state->head->text = (int)((hold >> 8) & 1);
- if ((state->flags & 0x0200) && (state->wrap & 4))
- CRC2(state->check, hold);
- INITBITS();
- state->mode = TIME;
- /* fallthrough */
- case TIME:
- NEEDBITS(32);
- if (state->head != Z_NULL)
- state->head->time = hold;
- if ((state->flags & 0x0200) && (state->wrap & 4))
- CRC4(state->check, hold);
- INITBITS();
- state->mode = OS;
- /* fallthrough */
- case OS:
- NEEDBITS(16);
- if (state->head != Z_NULL) {
- state->head->xflags = (int)(hold & 0xff);
- state->head->os = (int)(hold >> 8);
- }
- if ((state->flags & 0x0200) && (state->wrap & 4))
- CRC2(state->check, hold);
- INITBITS();
- state->mode = EXLEN;
- /* fallthrough */
- case EXLEN:
- if (state->flags & 0x0400) {
- NEEDBITS(16);
- state->length = (unsigned)(hold);
- if (state->head != Z_NULL)
- state->head->extra_len = (unsigned)hold;
- if ((state->flags & 0x0200) && (state->wrap & 4))
- CRC2(state->check, hold);
- INITBITS();
- }
- else if (state->head != Z_NULL)
- state->head->extra = Z_NULL;
- state->mode = EXTRA;
- /* fallthrough */
- case EXTRA:
- if (state->flags & 0x0400) {
- copy = state->length;
- if (copy > have) copy = have;
- if (copy) {
- if (state->head != Z_NULL &&
- state->head->extra != Z_NULL) {
- len = state->head->extra_len - state->length;
- zmemcpy(state->head->extra + len, next,
- len + copy > state->head->extra_max ?
- state->head->extra_max - len : copy);
- }
- if ((state->flags & 0x0200) && (state->wrap & 4))
- state->check = crc32(state->check, next, copy);
- have -= copy;
- next += copy;
- state->length -= copy;
- }
- if (state->length) goto inf_leave;
- }
- state->length = 0;
- state->mode = NAME;
- /* fallthrough */
- case NAME:
- if (state->flags & 0x0800) {
- if (have == 0) goto inf_leave;
- copy = 0;
- do {
- len = (unsigned)(next[copy++]);
- if (state->head != Z_NULL &&
- state->head->name != Z_NULL &&
- state->length < state->head->name_max)
- state->head->name[state->length++] = (Bytef)len;
- } while (len && copy < have);
- if ((state->flags & 0x0200) && (state->wrap & 4))
- state->check = crc32(state->check, next, copy);
- have -= copy;
- next += copy;
- if (len) goto inf_leave;
- }
- else if (state->head != Z_NULL)
- state->head->name = Z_NULL;
- state->length = 0;
- state->mode = COMMENT;
- /* fallthrough */
- case COMMENT:
- if (state->flags & 0x1000) {
- if (have == 0) goto inf_leave;
- copy = 0;
- do {
- len = (unsigned)(next[copy++]);
- if (state->head != Z_NULL &&
- state->head->comment != Z_NULL &&
- state->length < state->head->comm_max)
- state->head->comment[state->length++] = (Bytef)len;
- } while (len && copy < have);
- if ((state->flags & 0x0200) && (state->wrap & 4))
- state->check = crc32(state->check, next, copy);
- have -= copy;
- next += copy;
- if (len) goto inf_leave;
- }
- else if (state->head != Z_NULL)
- state->head->comment = Z_NULL;
- state->mode = HCRC;
- /* fallthrough */
- case HCRC:
- if (state->flags & 0x0200) {
- NEEDBITS(16);
- if ((state->wrap & 4) && hold != (state->check & 0xffff)) {
- strm->msg = (char *)"header crc mismatch";
- state->mode = BAD;
- break;
- }
- INITBITS();
- }
- if (state->head != Z_NULL) {
- state->head->hcrc = (int)((state->flags >> 9) & 1);
- state->head->done = 1;
- }
- strm->adler = state->check = crc32(0L, Z_NULL, 0);
- state->mode = TYPE;
- break;
-#endif
- case DICTID:
- NEEDBITS(32);
- strm->adler = state->check = ZSWAP32(hold);
- INITBITS();
- state->mode = DICT;
- /* fallthrough */
- case DICT:
- if (state->havedict == 0) {
- RESTORE();
- return Z_NEED_DICT;
- }
- strm->adler = state->check = adler32(0L, Z_NULL, 0);
- state->mode = TYPE;
- /* fallthrough */
- case TYPE:
- if (flush == Z_BLOCK || flush == Z_TREES) goto inf_leave;
- /* fallthrough */
- case TYPEDO:
- if (state->last) {
- BYTEBITS();
- state->mode = CHECK;
- break;
- }
- NEEDBITS(3);
- state->last = BITS(1);
- DROPBITS(1);
- switch (BITS(2)) {
- case 0: /* stored block */
- Tracev((stderr, "inflate: stored block%s\n",
- state->last ? " (last)" : ""));
- state->mode = STORED;
- break;
- case 1: /* fixed block */
- fixedtables(state);
- Tracev((stderr, "inflate: fixed codes block%s\n",
- state->last ? " (last)" : ""));
- state->mode = LEN_; /* decode codes */
- if (flush == Z_TREES) {
- DROPBITS(2);
- goto inf_leave;
- }
- break;
- case 2: /* dynamic block */
- Tracev((stderr, "inflate: dynamic codes block%s\n",
- state->last ? " (last)" : ""));
- state->mode = TABLE;
- break;
- case 3:
- strm->msg = (char *)"invalid block type";
- state->mode = BAD;
- }
- DROPBITS(2);
- break;
- case STORED:
- BYTEBITS(); /* go to byte boundary */
- NEEDBITS(32);
- if ((hold & 0xffff) != ((hold >> 16) ^ 0xffff)) {
- strm->msg = (char *)"invalid stored block lengths";
- state->mode = BAD;
- break;
- }
- state->length = (unsigned)hold & 0xffff;
- Tracev((stderr, "inflate: stored length %u\n",
- state->length));
- INITBITS();
- state->mode = COPY_;
- if (flush == Z_TREES) goto inf_leave;
- /* fallthrough */
- case COPY_:
- state->mode = COPY;
- /* fallthrough */
- case COPY:
- copy = state->length;
- if (copy) {
- if (copy > have) copy = have;
- if (copy > left) copy = left;
- if (copy == 0) goto inf_leave;
- zmemcpy(put, next, copy);
- have -= copy;
- next += copy;
- left -= copy;
- put += copy;
- state->length -= copy;
- break;
- }
- Tracev((stderr, "inflate: stored end\n"));
- state->mode = TYPE;
- break;
- case TABLE:
- NEEDBITS(14);
- state->nlen = BITS(5) + 257;
- DROPBITS(5);
- state->ndist = BITS(5) + 1;
- DROPBITS(5);
- state->ncode = BITS(4) + 4;
- DROPBITS(4);
-#ifndef PKZIP_BUG_WORKAROUND
- if (state->nlen > 286 || state->ndist > 30) {
- strm->msg = (char *)"too many length or distance symbols";
- state->mode = BAD;
- break;
- }
-#endif
- Tracev((stderr, "inflate: table sizes ok\n"));
- state->have = 0;
- state->mode = LENLENS;
- /* fallthrough */
- case LENLENS:
- while (state->have < state->ncode) {
- NEEDBITS(3);
- state->lens[order[state->have++]] = (unsigned short)BITS(3);
- DROPBITS(3);
- }
- while (state->have < 19)
- state->lens[order[state->have++]] = 0;
- state->next = state->codes;
- state->lencode = (const code FAR *)(state->next);
- state->lenbits = 7;
- ret = inflate_table(CODES, state->lens, 19, &(state->next),
- &(state->lenbits), state->work);
- if (ret) {
- strm->msg = (char *)"invalid code lengths set";
- state->mode = BAD;
- break;
- }
- Tracev((stderr, "inflate: code lengths ok\n"));
- state->have = 0;
- state->mode = CODELENS;
- /* fallthrough */
- case CODELENS:
- while (state->have < state->nlen + state->ndist) {
- for (;;) {
- here = state->lencode[BITS(state->lenbits)];
- if ((unsigned)(here.bits) <= bits) break;
- PULLBYTE();
- }
- if (here.val < 16) {
- DROPBITS(here.bits);
- state->lens[state->have++] = here.val;
- }
- else {
- if (here.val == 16) {
- NEEDBITS(here.bits + 2);
- DROPBITS(here.bits);
- if (state->have == 0) {
- strm->msg = (char *)"invalid bit length repeat";
- state->mode = BAD;
- break;
- }
- len = state->lens[state->have - 1];
- copy = 3 + BITS(2);
- DROPBITS(2);
- }
- else if (here.val == 17) {
- NEEDBITS(here.bits + 3);
- DROPBITS(here.bits);
- len = 0;
- copy = 3 + BITS(3);
- DROPBITS(3);
- }
- else {
- NEEDBITS(here.bits + 7);
- DROPBITS(here.bits);
- len = 0;
- copy = 11 + BITS(7);
- DROPBITS(7);
- }
- if (state->have + copy > state->nlen + state->ndist) {
- strm->msg = (char *)"invalid bit length repeat";
- state->mode = BAD;
- break;
- }
- while (copy--)
- state->lens[state->have++] = (unsigned short)len;
- }
- }
-
- /* handle error breaks in while */
- if (state->mode == BAD) break;
-
- /* check for end-of-block code (better have one) */
- if (state->lens[256] == 0) {
- strm->msg = (char *)"invalid code -- missing end-of-block";
- state->mode = BAD;
- break;
- }
-
- /* build code tables -- note: do not change the lenbits or distbits
- values here (9 and 6) without reading the comments in inftrees.h
- concerning the ENOUGH constants, which depend on those values */
- state->next = state->codes;
- state->lencode = (const code FAR *)(state->next);
- state->lenbits = 9;
- ret = inflate_table(LENS, state->lens, state->nlen, &(state->next),
- &(state->lenbits), state->work);
- if (ret) {
- strm->msg = (char *)"invalid literal/lengths set";
- state->mode = BAD;
- break;
- }
- state->distcode = (const code FAR *)(state->next);
- state->distbits = 6;
- ret = inflate_table(DISTS, state->lens + state->nlen, state->ndist,
- &(state->next), &(state->distbits), state->work);
- if (ret) {
- strm->msg = (char *)"invalid distances set";
- state->mode = BAD;
- break;
- }
- Tracev((stderr, "inflate: codes ok\n"));
- state->mode = LEN_;
- if (flush == Z_TREES) goto inf_leave;
- /* fallthrough */
- case LEN_:
- state->mode = LEN;
- /* fallthrough */
- case LEN:
- if (have >= 6 && left >= 258) {
- RESTORE();
- inflate_fast(strm, out);
- LOAD();
- if (state->mode == TYPE)
- state->back = -1;
- break;
- }
- state->back = 0;
- for (;;) {
- here = state->lencode[BITS(state->lenbits)];
- if ((unsigned)(here.bits) <= bits) break;
- PULLBYTE();
- }
- if (here.op && (here.op & 0xf0) == 0) {
- last = here;
- for (;;) {
- here = state->lencode[last.val +
- (BITS(last.bits + last.op) >> last.bits)];
- if ((unsigned)(last.bits + here.bits) <= bits) break;
- PULLBYTE();
- }
- DROPBITS(last.bits);
- state->back += last.bits;
- }
- DROPBITS(here.bits);
- state->back += here.bits;
- state->length = (unsigned)here.val;
- if ((int)(here.op) == 0) {
- Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ?
- "inflate: literal '%c'\n" :
- "inflate: literal 0x%02x\n", here.val));
- state->mode = LIT;
- break;
- }
- if (here.op & 32) {
- Tracevv((stderr, "inflate: end of block\n"));
- state->back = -1;
- state->mode = TYPE;
- break;
- }
- if (here.op & 64) {
- strm->msg = (char *)"invalid literal/length code";
- state->mode = BAD;
- break;
- }
- state->extra = (unsigned)(here.op) & 15;
- state->mode = LENEXT;
- /* fallthrough */
- case LENEXT:
- if (state->extra) {
- NEEDBITS(state->extra);
- state->length += BITS(state->extra);
- DROPBITS(state->extra);
- state->back += state->extra;
- }
- Tracevv((stderr, "inflate: length %u\n", state->length));
- state->was = state->length;
- state->mode = DIST;
- /* fallthrough */
- case DIST:
- for (;;) {
- here = state->distcode[BITS(state->distbits)];
- if ((unsigned)(here.bits) <= bits) break;
- PULLBYTE();
- }
- if ((here.op & 0xf0) == 0) {
- last = here;
- for (;;) {
- here = state->distcode[last.val +
- (BITS(last.bits + last.op) >> last.bits)];
- if ((unsigned)(last.bits + here.bits) <= bits) break;
- PULLBYTE();
- }
- DROPBITS(last.bits);
- state->back += last.bits;
- }
- DROPBITS(here.bits);
- state->back += here.bits;
- if (here.op & 64) {
- strm->msg = (char *)"invalid distance code";
- state->mode = BAD;
- break;
- }
- state->offset = (unsigned)here.val;
- state->extra = (unsigned)(here.op) & 15;
- state->mode = DISTEXT;
- /* fallthrough */
- case DISTEXT:
- if (state->extra) {
- NEEDBITS(state->extra);
- state->offset += BITS(state->extra);
- DROPBITS(state->extra);
- state->back += state->extra;
- }
-#ifdef INFLATE_STRICT
- if (state->offset > state->dmax) {
- strm->msg = (char *)"invalid distance too far back";
- state->mode = BAD;
- break;
- }
-#endif
- Tracevv((stderr, "inflate: distance %u\n", state->offset));
- state->mode = MATCH;
- /* fallthrough */
- case MATCH:
- if (left == 0) goto inf_leave;
- copy = out - left;
- if (state->offset > copy) { /* copy from window */
- copy = state->offset - copy;
- if (copy > state->whave) {
- if (state->sane) {
- strm->msg = (char *)"invalid distance too far back";
- state->mode = BAD;
- break;
- }
-#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR
- Trace((stderr, "inflate.c too far\n"));
- copy -= state->whave;
- if (copy > state->length) copy = state->length;
- if (copy > left) copy = left;
- left -= copy;
- state->length -= copy;
- do {
- *put++ = 0;
- } while (--copy);
- if (state->length == 0) state->mode = LEN;
- break;
-#endif
- }
- if (copy > state->wnext) {
- copy -= state->wnext;
- from = state->window + (state->wsize - copy);
- }
- else
- from = state->window + (state->wnext - copy);
- if (copy > state->length) copy = state->length;
- }
- else { /* copy from output */
- from = put - state->offset;
- copy = state->length;
- }
- if (copy > left) copy = left;
- left -= copy;
- state->length -= copy;
- do {
- *put++ = *from++;
- } while (--copy);
- if (state->length == 0) state->mode = LEN;
- break;
- case LIT:
- if (left == 0) goto inf_leave;
- *put++ = (unsigned char)(state->length);
- left--;
- state->mode = LEN;
- break;
- case CHECK:
- if (state->wrap) {
- NEEDBITS(32);
- out -= left;
- strm->total_out += out;
- state->total += out;
- if ((state->wrap & 4) && out)
- strm->adler = state->check =
- UPDATE_CHECK(state->check, put - out, out);
- out = left;
- if ((state->wrap & 4) && (
-#ifdef GUNZIP
- state->flags ? hold :
-#endif
- ZSWAP32(hold)) != state->check) {
- strm->msg = (char *)"incorrect data check";
- state->mode = BAD;
- break;
- }
- INITBITS();
- Tracev((stderr, "inflate: check matches trailer\n"));
- }
-#ifdef GUNZIP
- state->mode = LENGTH;
- /* fallthrough */
- case LENGTH:
- if (state->wrap && state->flags) {
- NEEDBITS(32);
- if ((state->wrap & 4) && hold != (state->total & 0xffffffff)) {
- strm->msg = (char *)"incorrect length check";
- state->mode = BAD;
- break;
- }
- INITBITS();
- Tracev((stderr, "inflate: length matches trailer\n"));
- }
-#endif
- state->mode = DONE;
- /* fallthrough */
- case DONE:
- ret = Z_STREAM_END;
- goto inf_leave;
- case BAD:
- ret = Z_DATA_ERROR;
- goto inf_leave;
- case MEM:
- return Z_MEM_ERROR;
- case SYNC:
- /* fallthrough */
- default:
- return Z_STREAM_ERROR;
- }
-
- /*
- Return from inflate(), updating the total counts and the check value.
- If there was no progress during the inflate() call, return a buffer
- error. Call updatewindow() to create and/or update the window state.
- Note: a memory error from inflate() is non-recoverable.
- */
- inf_leave:
- RESTORE();
- if (state->wsize || (out != strm->avail_out && state->mode < BAD &&
- (state->mode < CHECK || flush != Z_FINISH)))
- if (updatewindow(strm, strm->next_out, out - strm->avail_out)) {
- state->mode = MEM;
- return Z_MEM_ERROR;
- }
- in -= strm->avail_in;
- out -= strm->avail_out;
- strm->total_in += in;
- strm->total_out += out;
- state->total += out;
- if ((state->wrap & 4) && out)
- strm->adler = state->check =
- UPDATE_CHECK(state->check, strm->next_out - out, out);
- strm->data_type = (int)state->bits + (state->last ? 64 : 0) +
- (state->mode == TYPE ? 128 : 0) +
- (state->mode == LEN_ || state->mode == COPY_ ? 256 : 0);
- if (((in == 0 && out == 0) || flush == Z_FINISH) && ret == Z_OK)
- ret = Z_BUF_ERROR;
- return ret;
-}
-
-int ZEXPORT inflateEnd(
- z_streamp strm)
-{
- struct inflate_state FAR *state;
- if (inflateStateCheck(strm))
- return Z_STREAM_ERROR;
- state = (struct inflate_state FAR *)strm->state;
- if (state->window != Z_NULL) ZFREE(strm, state->window);
- ZFREE(strm, strm->state);
- strm->state = Z_NULL;
- Tracev((stderr, "inflate: end\n"));
- return Z_OK;
-}
-
-#ifndef Z_FREETYPE
-
-int ZEXPORT inflateGetDictionary(
- z_streamp strm,
- Bytef *dictionary,
- uInt *dictLength)
-{
- struct inflate_state FAR *state;
-
- /* check state */
- if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
- state = (struct inflate_state FAR *)strm->state;
-
- /* copy dictionary */
- if (state->whave && dictionary != Z_NULL) {
- zmemcpy(dictionary, state->window + state->wnext,
- state->whave - state->wnext);
- zmemcpy(dictionary + state->whave - state->wnext,
- state->window, state->wnext);
- }
- if (dictLength != Z_NULL)
- *dictLength = state->whave;
- return Z_OK;
-}
-
-int ZEXPORT inflateSetDictionary(
- z_streamp strm,
- const Bytef *dictionary,
- uInt dictLength)
-{
- struct inflate_state FAR *state;
- unsigned long dictid;
- int ret;
-
- /* check state */
- if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
- state = (struct inflate_state FAR *)strm->state;
- if (state->wrap != 0 && state->mode != DICT)
- return Z_STREAM_ERROR;
-
- /* check for correct dictionary identifier */
- if (state->mode == DICT) {
- dictid = adler32(0L, Z_NULL, 0);
- dictid = adler32(dictid, dictionary, dictLength);
- if (dictid != state->check)
- return Z_DATA_ERROR;
- }
-
- /* copy dictionary to window using updatewindow(), which will amend the
- existing dictionary if appropriate */
- ret = updatewindow(strm, dictionary + dictLength, dictLength);
- if (ret) {
- state->mode = MEM;
- return Z_MEM_ERROR;
- }
- state->havedict = 1;
- Tracev((stderr, "inflate: dictionary set\n"));
- return Z_OK;
-}
-
-int ZEXPORT inflateGetHeader(
- z_streamp strm,
- gz_headerp head)
-{
- struct inflate_state FAR *state;
-
- /* check state */
- if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
- state = (struct inflate_state FAR *)strm->state;
- if ((state->wrap & 2) == 0) return Z_STREAM_ERROR;
-
- /* save header structure */
- state->head = head;
- head->done = 0;
- return Z_OK;
-}
-
-/*
- Search buf[0..len-1] for the pattern: 0, 0, 0xff, 0xff. Return when found
- or when out of input. When called, *have is the number of pattern bytes
- found in order so far, in 0..3. On return *have is updated to the new
- state. If on return *have equals four, then the pattern was found and the
- return value is how many bytes were read including the last byte of the
- pattern. If *have is less than four, then the pattern has not been found
- yet and the return value is len. In the latter case, syncsearch() can be
- called again with more data and the *have state. *have is initialized to
- zero for the first call.
- */
-local unsigned syncsearch(
- unsigned FAR *have,
- const unsigned char FAR *buf,
- unsigned len)
-{
- unsigned got;
- unsigned next;
-
- got = *have;
- next = 0;
- while (next < len && got < 4) {
- if ((int)(buf[next]) == (got < 2 ? 0 : 0xff))
- got++;
- else if (buf[next])
- got = 0;
- else
- got = 4 - got;
- next++;
- }
- *have = got;
- return next;
-}
-
-int ZEXPORT inflateSync(
- z_streamp strm)
-{
- unsigned len; /* number of bytes to look at or looked at */
- int flags; /* temporary to save header status */
- unsigned long in, out; /* temporary to save total_in and total_out */
- unsigned char buf[4]; /* to restore bit buffer to byte string */
- struct inflate_state FAR *state;
-
- /* check parameters */
- if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
- state = (struct inflate_state FAR *)strm->state;
- if (strm->avail_in == 0 && state->bits < 8) return Z_BUF_ERROR;
-
- /* if first time, start search in bit buffer */
- if (state->mode != SYNC) {
- state->mode = SYNC;
- state->hold <<= state->bits & 7;
- state->bits -= state->bits & 7;
- len = 0;
- while (state->bits >= 8) {
- buf[len++] = (unsigned char)(state->hold);
- state->hold >>= 8;
- state->bits -= 8;
- }
- state->have = 0;
- syncsearch(&(state->have), buf, len);
- }
-
- /* search available input */
- len = syncsearch(&(state->have), strm->next_in, strm->avail_in);
- strm->avail_in -= len;
- strm->next_in += len;
- strm->total_in += len;
-
- /* return no joy or set up to restart inflate() on a new block */
- if (state->have != 4) return Z_DATA_ERROR;
- if (state->flags == -1)
- state->wrap = 0; /* if no header yet, treat as raw */
- else
- state->wrap &= ~4; /* no point in computing a check value now */
- flags = state->flags;
- in = strm->total_in; out = strm->total_out;
- inflateReset(strm);
- strm->total_in = in; strm->total_out = out;
- state->flags = flags;
- state->mode = TYPE;
- return Z_OK;
-}
-
-#endif /* !Z_FREETYPE */
-
-/*
- Returns true if inflate is currently at the end of a block generated by
- Z_SYNC_FLUSH or Z_FULL_FLUSH. This function is used by one PPP
- implementation to provide an additional safety check. PPP uses
- Z_SYNC_FLUSH but removes the length bytes of the resulting empty stored
- block. When decompressing, PPP checks that at the end of input packet,
- inflate is waiting for these length bytes.
- */
-int ZEXPORT inflateSyncPoint(
- z_streamp strm)
-{
- struct inflate_state FAR *state;
-
- if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
- state = (struct inflate_state FAR *)strm->state;
- return state->mode == STORED && state->bits == 0;
-}
-
-#ifndef Z_FREETYPE
-
-int ZEXPORT inflateCopy(
- z_streamp dest,
- z_streamp source)
-{
- struct inflate_state FAR *state;
- struct inflate_state FAR *copy;
- unsigned char FAR *window;
- unsigned wsize;
-
- /* check input */
- if (inflateStateCheck(source) || dest == Z_NULL)
- return Z_STREAM_ERROR;
- state = (struct inflate_state FAR *)source->state;
-
- /* allocate space */
- copy = (struct inflate_state FAR *)
- ZALLOC(source, 1, sizeof(struct inflate_state));
- if (copy == Z_NULL) return Z_MEM_ERROR;
- window = Z_NULL;
- if (state->window != Z_NULL) {
- window = (unsigned char FAR *)
- ZALLOC(source, 1U << state->wbits, sizeof(unsigned char));
- if (window == Z_NULL) {
- ZFREE(source, copy);
- return Z_MEM_ERROR;
- }
- }
-
- /* copy state */
- zmemcpy((voidpf)dest, (voidpf)source, sizeof(z_stream));
- zmemcpy((voidpf)copy, (voidpf)state, sizeof(struct inflate_state));
- copy->strm = dest;
- if (state->lencode >= state->codes &&
- state->lencode <= state->codes + ENOUGH - 1) {
- copy->lencode = copy->codes + (state->lencode - state->codes);
- copy->distcode = copy->codes + (state->distcode - state->codes);
- }
- copy->next = copy->codes + (state->next - state->codes);
- if (window != Z_NULL) {
- wsize = 1U << state->wbits;
- zmemcpy(window, state->window, wsize);
- }
- copy->window = window;
- dest->state = (struct internal_state FAR *)copy;
- return Z_OK;
-}
-
-#endif /* !Z_FREETYPE */
-
-int ZEXPORT inflateUndermine(
- z_streamp strm,
- int subvert)
-{
- struct inflate_state FAR *state;
-
- if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
- state = (struct inflate_state FAR *)strm->state;
-#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR
- state->sane = !subvert;
- return Z_OK;
-#else
- (void)subvert;
- state->sane = 1;
- return Z_DATA_ERROR;
-#endif
-}
-
-int ZEXPORT inflateValidate(
- z_streamp strm,
- int check)
-{
- struct inflate_state FAR *state;
-
- if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
- state = (struct inflate_state FAR *)strm->state;
- if (check && state->wrap)
- state->wrap |= 4;
- else
- state->wrap &= ~4;
- return Z_OK;
-}
-
-#ifndef Z_FREETYPE
-
-long ZEXPORT inflateMark(
- z_streamp strm)
-{
- struct inflate_state FAR *state;
-
- if (inflateStateCheck(strm))
- return -(1L << 16);
- state = (struct inflate_state FAR *)strm->state;
- return (long)(((unsigned long)((long)state->back)) << 16) +
- (state->mode == COPY ? state->length :
- (state->mode == MATCH ? state->was - state->length : 0));
-}
-
-unsigned long ZEXPORT inflateCodesUsed(
- z_streamp strm)
-{
- struct inflate_state FAR *state;
- if (inflateStateCheck(strm)) return (unsigned long)-1;
- state = (struct inflate_state FAR *)strm->state;
- return (unsigned long)(state->next - state->codes);
-}
-
-#endif /* !Z_FREETYPE */
diff --git a/thirdparty/freetype/src/gzip/inflate.h b/thirdparty/freetype/src/gzip/inflate.h
deleted file mode 100644
index c6f5a52e16..0000000000
--- a/thirdparty/freetype/src/gzip/inflate.h
+++ /dev/null
@@ -1,131 +0,0 @@
-/* inflate.h -- internal inflate state definition
- * Copyright (C) 1995-2019 Mark Adler
- * For conditions of distribution and use, see copyright notice in zlib.h
- */
-
-#ifndef INFLATE_H
-#define INFLATE_H
-
-/* WARNING: this file should *not* be used by applications. It is
- part of the implementation of the compression library and is
- subject to change. Applications should only use zlib.h.
- */
-
-/* define NO_GZIP when compiling if you want to disable gzip header and
- trailer decoding by inflate(). NO_GZIP would be used to avoid linking in
- the crc code when it is not needed. For shared libraries, gzip decoding
- should be left enabled. */
-#ifndef NO_GZIP
-# define GUNZIP
-#endif
-
-/* Possible inflate modes between inflate() calls */
-typedef enum {
- HEAD = 16180, /* i: waiting for magic header */
- FLAGS, /* i: waiting for method and flags (gzip) */
- TIME, /* i: waiting for modification time (gzip) */
- OS, /* i: waiting for extra flags and operating system (gzip) */
- EXLEN, /* i: waiting for extra length (gzip) */
- EXTRA, /* i: waiting for extra bytes (gzip) */
- NAME, /* i: waiting for end of file name (gzip) */
- COMMENT, /* i: waiting for end of comment (gzip) */
- HCRC, /* i: waiting for header crc (gzip) */
- DICTID, /* i: waiting for dictionary check value */
- DICT, /* waiting for inflateSetDictionary() call */
- TYPE, /* i: waiting for type bits, including last-flag bit */
- TYPEDO, /* i: same, but skip check to exit inflate on new block */
- STORED, /* i: waiting for stored size (length and complement) */
- COPY_, /* i/o: same as COPY below, but only first time in */
- COPY, /* i/o: waiting for input or output to copy stored block */
- TABLE, /* i: waiting for dynamic block table lengths */
- LENLENS, /* i: waiting for code length code lengths */
- CODELENS, /* i: waiting for length/lit and distance code lengths */
- LEN_, /* i: same as LEN below, but only first time in */
- LEN, /* i: waiting for length/lit/eob code */
- LENEXT, /* i: waiting for length extra bits */
- DIST, /* i: waiting for distance code */
- DISTEXT, /* i: waiting for distance extra bits */
- MATCH, /* o: waiting for output space to copy string */
- LIT, /* o: waiting for output space to write literal */
- CHECK, /* i: waiting for 32-bit check value */
- LENGTH, /* i: waiting for 32-bit length (gzip) */
- DONE, /* finished check, done -- remain here until reset */
- BAD, /* got a data error -- remain here until reset */
- MEM, /* got an inflate() memory error -- remain here until reset */
- SYNC /* looking for synchronization bytes to restart inflate() */
-} inflate_mode;
-
-/*
- State transitions between above modes -
-
- (most modes can go to BAD or MEM on error -- not shown for clarity)
-
- Process header:
- HEAD -> (gzip) or (zlib) or (raw)
- (gzip) -> FLAGS -> TIME -> OS -> EXLEN -> EXTRA -> NAME -> COMMENT ->
- HCRC -> TYPE
- (zlib) -> DICTID or TYPE
- DICTID -> DICT -> TYPE
- (raw) -> TYPEDO
- Read deflate blocks:
- TYPE -> TYPEDO -> STORED or TABLE or LEN_ or CHECK
- STORED -> COPY_ -> COPY -> TYPE
- TABLE -> LENLENS -> CODELENS -> LEN_
- LEN_ -> LEN
- Read deflate codes in fixed or dynamic block:
- LEN -> LENEXT or LIT or TYPE
- LENEXT -> DIST -> DISTEXT -> MATCH -> LEN
- LIT -> LEN
- Process trailer:
- CHECK -> LENGTH -> DONE
- */
-
-/* State maintained between inflate() calls -- approximately 7K bytes, not
- including the allocated sliding window, which is up to 32K bytes. */
-struct inflate_state {
- z_streamp strm; /* pointer back to this zlib stream */
- inflate_mode mode; /* current inflate mode */
- int last; /* true if processing last block */
- int wrap; /* bit 0 true for zlib, bit 1 true for gzip,
- bit 2 true to validate check value */
- int havedict; /* true if dictionary provided */
- int flags; /* gzip header method and flags, 0 if zlib, or
- -1 if raw or no header yet */
- unsigned dmax; /* zlib header max distance (INFLATE_STRICT) */
- unsigned long check; /* protected copy of check value */
- unsigned long total; /* protected copy of output count */
- gz_headerp head; /* where to save gzip header information */
- /* sliding window */
- unsigned wbits; /* log base 2 of requested window size */
- unsigned wsize; /* window size or zero if not using window */
- unsigned whave; /* valid bytes in the window */
- unsigned wnext; /* window write index */
- unsigned char FAR *window; /* allocated sliding window, if needed */
- /* bit accumulator */
- unsigned long hold; /* input bit accumulator */
- unsigned bits; /* number of bits in "in" */
- /* for string and stored block copying */
- unsigned length; /* literal or length of data to copy */
- unsigned offset; /* distance back to copy string from */
- /* for table and code decoding */
- unsigned extra; /* extra bits needed */
- /* fixed and dynamic code tables */
- code const FAR *lencode; /* starting table for length/literal codes */
- code const FAR *distcode; /* starting table for distance codes */
- unsigned lenbits; /* index bits for lencode */
- unsigned distbits; /* index bits for distcode */
- /* dynamic table building */
- unsigned ncode; /* number of code length code lengths */
- unsigned nlen; /* number of length code lengths */
- unsigned ndist; /* number of distance code lengths */
- unsigned have; /* number of code lengths in lens[] */
- code FAR *next; /* next available space in codes[] */
- unsigned short lens[320]; /* temporary storage for code lengths */
- unsigned short work[288]; /* work area for code table building */
- code codes[ENOUGH]; /* space for code tables */
- int sane; /* if false, allow invalid distance too far */
- int back; /* bits back of last unprocessed length/lit */
- unsigned was; /* initial length of match */
-};
-
-#endif /* INFLATE_H */
diff --git a/thirdparty/freetype/src/gzip/inftrees.c b/thirdparty/freetype/src/gzip/inftrees.c
deleted file mode 100644
index 0b58b29b1b..0000000000
--- a/thirdparty/freetype/src/gzip/inftrees.c
+++ /dev/null
@@ -1,304 +0,0 @@
-/* inftrees.c -- generate Huffman trees for efficient decoding
- * Copyright (C) 1995-2022 Mark Adler
- * For conditions of distribution and use, see copyright notice in zlib.h
- */
-
-#include "zutil.h"
-#include "inftrees.h"
-
-#define MAXBITS 15
-
-const char inflate_copyright[] =
- " inflate 1.2.12 Copyright 1995-2022 Mark Adler ";
-/*
- If you use the zlib library in a product, an acknowledgment is welcome
- in the documentation of your product. If for some reason you cannot
- include such an acknowledgment, I would appreciate that you keep this
- copyright string in the executable of your product.
- */
-
-/*
- Build a set of tables to decode the provided canonical Huffman code.
- The code lengths are lens[0..codes-1]. The result starts at *table,
- whose indices are 0..2^bits-1. work is a writable array of at least
- lens shorts, which is used as a work area. type is the type of code
- to be generated, CODES, LENS, or DISTS. On return, zero is success,
- -1 is an invalid code, and +1 means that ENOUGH isn't enough. table
- on return points to the next available entry's address. bits is the
- requested root table index bits, and on return it is the actual root
- table index bits. It will differ if the request is greater than the
- longest code or if it is less than the shortest code.
- */
-int ZLIB_INTERNAL inflate_table(
- codetype type,
- unsigned short FAR *lens,
- unsigned codes,
- code FAR * FAR *table,
- unsigned FAR *bits,
- unsigned short FAR *work)
-{
- unsigned len; /* a code's length in bits */
- unsigned sym; /* index of code symbols */
- unsigned min, max; /* minimum and maximum code lengths */
- unsigned root; /* number of index bits for root table */
- unsigned curr; /* number of index bits for current table */
- unsigned drop; /* code bits to drop for sub-table */
- int left; /* number of prefix codes available */
- unsigned used; /* code entries in table used */
- unsigned huff; /* Huffman code */
- unsigned incr; /* for incrementing code, index */
- unsigned fill; /* index for replicating entries */
- unsigned low; /* low bits for current root entry */
- unsigned mask; /* mask for low root bits */
- code here; /* table entry for duplication */
- code FAR *next; /* next available space in table */
- const unsigned short FAR *base; /* base value table to use */
- const unsigned short FAR *extra; /* extra bits table to use */
- unsigned match; /* use base and extra for symbol >= match */
- unsigned short count[MAXBITS+1]; /* number of codes of each length */
- unsigned short offs[MAXBITS+1]; /* offsets in table for each length */
- static const unsigned short lbase[31] = { /* Length codes 257..285 base */
- 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
- 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
- static const unsigned short lext[31] = { /* Length codes 257..285 extra */
- 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18,
- 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 199, 202};
- static const unsigned short dbase[32] = { /* Distance codes 0..29 base */
- 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
- 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
- 8193, 12289, 16385, 24577, 0, 0};
- static const unsigned short dext[32] = { /* Distance codes 0..29 extra */
- 16, 16, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22,
- 23, 23, 24, 24, 25, 25, 26, 26, 27, 27,
- 28, 28, 29, 29, 64, 64};
-
- /*
- Process a set of code lengths to create a canonical Huffman code. The
- code lengths are lens[0..codes-1]. Each length corresponds to the
- symbols 0..codes-1. The Huffman code is generated by first sorting the
- symbols by length from short to long, and retaining the symbol order
- for codes with equal lengths. Then the code starts with all zero bits
- for the first code of the shortest length, and the codes are integer
- increments for the same length, and zeros are appended as the length
- increases. For the deflate format, these bits are stored backwards
- from their more natural integer increment ordering, and so when the
- decoding tables are built in the large loop below, the integer codes
- are incremented backwards.
-
- This routine assumes, but does not check, that all of the entries in
- lens[] are in the range 0..MAXBITS. The caller must assure this.
- 1..MAXBITS is interpreted as that code length. zero means that that
- symbol does not occur in this code.
-
- The codes are sorted by computing a count of codes for each length,
- creating from that a table of starting indices for each length in the
- sorted table, and then entering the symbols in order in the sorted
- table. The sorted table is work[], with that space being provided by
- the caller.
-
- The length counts are used for other purposes as well, i.e. finding
- the minimum and maximum length codes, determining if there are any
- codes at all, checking for a valid set of lengths, and looking ahead
- at length counts to determine sub-table sizes when building the
- decoding tables.
- */
-
- /* accumulate lengths for codes (assumes lens[] all in 0..MAXBITS) */
- for (len = 0; len <= MAXBITS; len++)
- count[len] = 0;
- for (sym = 0; sym < codes; sym++)
- count[lens[sym]]++;
-
- /* bound code lengths, force root to be within code lengths */
- root = *bits;
- for (max = MAXBITS; max >= 1; max--)
- if (count[max] != 0) break;
- if (root > max) root = max;
- if (max == 0) { /* no symbols to code at all */
- here.op = (unsigned char)64; /* invalid code marker */
- here.bits = (unsigned char)1;
- here.val = (unsigned short)0;
- *(*table)++ = here; /* make a table to force an error */
- *(*table)++ = here;
- *bits = 1;
- return 0; /* no symbols, but wait for decoding to report error */
- }
- for (min = 1; min < max; min++)
- if (count[min] != 0) break;
- if (root < min) root = min;
-
- /* check for an over-subscribed or incomplete set of lengths */
- left = 1;
- for (len = 1; len <= MAXBITS; len++) {
- left <<= 1;
- left -= count[len];
- if (left < 0) return -1; /* over-subscribed */
- }
- if (left > 0 && (type == CODES || max != 1))
- return -1; /* incomplete set */
-
- /* generate offsets into symbol table for each length for sorting */
- offs[1] = 0;
- for (len = 1; len < MAXBITS; len++)
- offs[len + 1] = offs[len] + count[len];
-
- /* sort symbols by length, by symbol order within each length */
- for (sym = 0; sym < codes; sym++)
- if (lens[sym] != 0) work[offs[lens[sym]]++] = (unsigned short)sym;
-
- /*
- Create and fill in decoding tables. In this loop, the table being
- filled is at next and has curr index bits. The code being used is huff
- with length len. That code is converted to an index by dropping drop
- bits off of the bottom. For codes where len is less than drop + curr,
- those top drop + curr - len bits are incremented through all values to
- fill the table with replicated entries.
-
- root is the number of index bits for the root table. When len exceeds
- root, sub-tables are created pointed to by the root entry with an index
- of the low root bits of huff. This is saved in low to check for when a
- new sub-table should be started. drop is zero when the root table is
- being filled, and drop is root when sub-tables are being filled.
-
- When a new sub-table is needed, it is necessary to look ahead in the
- code lengths to determine what size sub-table is needed. The length
- counts are used for this, and so count[] is decremented as codes are
- entered in the tables.
-
- used keeps track of how many table entries have been allocated from the
- provided *table space. It is checked for LENS and DIST tables against
- the constants ENOUGH_LENS and ENOUGH_DISTS to guard against changes in
- the initial root table size constants. See the comments in inftrees.h
- for more information.
-
- sym increments through all symbols, and the loop terminates when
- all codes of length max, i.e. all codes, have been processed. This
- routine permits incomplete codes, so another loop after this one fills
- in the rest of the decoding tables with invalid code markers.
- */
-
- /* set up for code type */
- switch (type) {
- case CODES:
- base = extra = work; /* dummy value--not used */
- match = 20;
- break;
- case LENS:
- base = lbase;
- extra = lext;
- match = 257;
- break;
- default: /* DISTS */
- base = dbase;
- extra = dext;
- match = 0;
- }
-
- /* initialize state for loop */
- huff = 0; /* starting code */
- sym = 0; /* starting code symbol */
- len = min; /* starting code length */
- next = *table; /* current table to fill in */
- curr = root; /* current table index bits */
- drop = 0; /* current bits to drop from code for index */
- low = (unsigned)(-1); /* trigger new sub-table when len > root */
- used = 1U << root; /* use root table entries */
- mask = used - 1; /* mask for comparing low */
-
- /* check available table space */
- if ((type == LENS && used > ENOUGH_LENS) ||
- (type == DISTS && used > ENOUGH_DISTS))
- return 1;
-
- /* process all codes and make table entries */
- for (;;) {
- /* create table entry */
- here.bits = (unsigned char)(len - drop);
- if (work[sym] + 1U < match) {
- here.op = (unsigned char)0;
- here.val = work[sym];
- }
- else if (work[sym] >= match) {
- here.op = (unsigned char)(extra[work[sym] - match]);
- here.val = base[work[sym] - match];
- }
- else {
- here.op = (unsigned char)(32 + 64); /* end of block */
- here.val = 0;
- }
-
- /* replicate for those indices with low len bits equal to huff */
- incr = 1U << (len - drop);
- fill = 1U << curr;
- min = fill; /* save offset to next table */
- do {
- fill -= incr;
- next[(huff >> drop) + fill] = here;
- } while (fill != 0);
-
- /* backwards increment the len-bit code huff */
- incr = 1U << (len - 1);
- while (huff & incr)
- incr >>= 1;
- if (incr != 0) {
- huff &= incr - 1;
- huff += incr;
- }
- else
- huff = 0;
-
- /* go to next symbol, update count, len */
- sym++;
- if (--(count[len]) == 0) {
- if (len == max) break;
- len = lens[work[sym]];
- }
-
- /* create new sub-table if needed */
- if (len > root && (huff & mask) != low) {
- /* if first time, transition to sub-tables */
- if (drop == 0)
- drop = root;
-
- /* increment past last table */
- next += min; /* here min is 1 << curr */
-
- /* determine length of next table */
- curr = len - drop;
- left = (int)(1 << curr);
- while (curr + drop < max) {
- left -= count[curr + drop];
- if (left <= 0) break;
- curr++;
- left <<= 1;
- }
-
- /* check for enough space */
- used += 1U << curr;
- if ((type == LENS && used > ENOUGH_LENS) ||
- (type == DISTS && used > ENOUGH_DISTS))
- return 1;
-
- /* point entry in root table to sub-table */
- low = huff & mask;
- (*table)[low].op = (unsigned char)curr;
- (*table)[low].bits = (unsigned char)root;
- (*table)[low].val = (unsigned short)(next - *table);
- }
- }
-
- /* fill in remaining table entry if code is incomplete (guaranteed to have
- at most one remaining entry, since if the code is incomplete, the
- maximum code length that was allowed to get this far is one bit) */
- if (huff != 0) {
- here.op = (unsigned char)64; /* invalid code marker */
- here.bits = (unsigned char)(len - drop);
- here.val = (unsigned short)0;
- next[huff] = here;
- }
-
- /* set return parameters */
- *table += used;
- *bits = root;
- return 0;
-}
diff --git a/thirdparty/freetype/src/gzip/inftrees.h b/thirdparty/freetype/src/gzip/inftrees.h
deleted file mode 100644
index c94eb78b5d..0000000000
--- a/thirdparty/freetype/src/gzip/inftrees.h
+++ /dev/null
@@ -1,67 +0,0 @@
-/* inftrees.h -- header to use inftrees.c
- * Copyright (C) 1995-2005, 2010 Mark Adler
- * For conditions of distribution and use, see copyright notice in zlib.h
- */
-
-#ifndef INFTREES_H
-#define INFTREES_H
-
-/* WARNING: this file should *not* be used by applications. It is
- part of the implementation of the compression library and is
- subject to change. Applications should only use zlib.h.
- */
-
-/* Structure for decoding tables. Each entry provides either the
- information needed to do the operation requested by the code that
- indexed that table entry, or it provides a pointer to another
- table that indexes more bits of the code. op indicates whether
- the entry is a pointer to another table, a literal, a length or
- distance, an end-of-block, or an invalid code. For a table
- pointer, the low four bits of op is the number of index bits of
- that table. For a length or distance, the low four bits of op
- is the number of extra bits to get after the code. bits is
- the number of bits in this code or part of the code to drop off
- of the bit buffer. val is the actual byte to output in the case
- of a literal, the base length or distance, or the offset from
- the current table to the next table. Each entry is four bytes. */
-typedef struct {
- unsigned char op; /* operation, extra bits, table bits */
- unsigned char bits; /* bits in this part of the code */
- unsigned short val; /* offset in table or code value */
-} code;
-
-/* op values as set by inflate_table():
- 00000000 - literal
- 0000tttt - table link, tttt != 0 is the number of table index bits
- 0001eeee - length or distance, eeee is the number of extra bits
- 01100000 - end of block
- 01000000 - invalid code
- */
-
-/* Maximum size of the dynamic table. The maximum number of code structures is
- 1444, which is the sum of 852 for literal/length codes and 592 for distance
- codes. These values were found by exhaustive searches using the program
- examples/enough.c found in the zlib distribtution. The arguments to that
- program are the number of symbols, the initial root table size, and the
- maximum bit length of a code. "enough 286 9 15" for literal/length codes
- returns returns 852, and "enough 30 6 15" for distance codes returns 592.
- The initial root table size (9 or 6) is found in the fifth argument of the
- inflate_table() calls in inflate.c and infback.c. If the root table size is
- changed, then these maximum sizes would be need to be recalculated and
- updated. */
-#define ENOUGH_LENS 852
-#define ENOUGH_DISTS 592
-#define ENOUGH (ENOUGH_LENS+ENOUGH_DISTS)
-
-/* Type of code to build for inflate_table() */
-typedef enum {
- CODES,
- LENS,
- DISTS
-} codetype;
-
-int ZLIB_INTERNAL inflate_table OF((codetype type, unsigned short FAR *lens,
- unsigned codes, code FAR * FAR *table,
- unsigned FAR *bits, unsigned short FAR *work));
-
-#endif /* INFTREES_H_ */
diff --git a/thirdparty/freetype/src/gzip/zlib.h b/thirdparty/freetype/src/gzip/zlib.h
deleted file mode 100644
index d760140c2e..0000000000
--- a/thirdparty/freetype/src/gzip/zlib.h
+++ /dev/null
@@ -1,1968 +0,0 @@
-/* zlib.h -- interface of the 'zlib' general purpose compression library
- version 1.2.12, March 11th, 2022
-
- Copyright (C) 1995-2022 Jean-loup Gailly and Mark Adler
-
- This software is provided 'as-is', without any express or implied
- warranty. In no event will the authors be held liable for any damages
- arising from the use of this software.
-
- Permission is granted to anyone to use this software for any purpose,
- including commercial applications, and to alter it and redistribute it
- freely, subject to the following restrictions:
-
- 1. The origin of this software must not be misrepresented; you must not
- claim that you wrote the original software. If you use this software
- in a product, an acknowledgment in the product documentation would be
- appreciated but is not required.
- 2. Altered source versions must be plainly marked as such, and must not be
- misrepresented as being the original software.
- 3. This notice may not be removed or altered from any source distribution.
-
- Jean-loup Gailly Mark Adler
- jloup@gzip.org madler@alumni.caltech.edu
-
-
- The data format used by the zlib library is described by RFCs (Request for
- Comments) 1950 to 1952 in the files http://tools.ietf.org/html/rfc1950
- (zlib format), rfc1951 (deflate format) and rfc1952 (gzip format).
-*/
-
-#ifndef ZLIB_H
-#define ZLIB_H
-
-#include "ftzconf.h"
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-#define ZLIB_VERSION "1.2.12"
-#define ZLIB_VERNUM 0x12c0
-#define ZLIB_VER_MAJOR 1
-#define ZLIB_VER_MINOR 2
-#define ZLIB_VER_REVISION 12
-#define ZLIB_VER_SUBREVISION 0
-
-/*
- The 'zlib' compression library provides in-memory compression and
- decompression functions, including integrity checks of the uncompressed data.
- This version of the library supports only one compression method (deflation)
- but other algorithms will be added later and will have the same stream
- interface.
-
- Compression can be done in a single step if the buffers are large enough,
- or can be done by repeated calls of the compression function. In the latter
- case, the application must provide more input and/or consume the output
- (providing more output space) before each call.
-
- The compressed data format used by default by the in-memory functions is
- the zlib format, which is a zlib wrapper documented in RFC 1950, wrapped
- around a deflate stream, which is itself documented in RFC 1951.
-
- The library also supports reading and writing files in gzip (.gz) format
- with an interface similar to that of stdio using the functions that start
- with "gz". The gzip format is different from the zlib format. gzip is a
- gzip wrapper, documented in RFC 1952, wrapped around a deflate stream.
-
- This library can optionally read and write gzip and raw deflate streams in
- memory as well.
-
- The zlib format was designed to be compact and fast for use in memory
- and on communications channels. The gzip format was designed for single-
- file compression on file systems, has a larger header than zlib to maintain
- directory information, and uses a different, slower check method than zlib.
-
- The library does not install any signal handler. The decoder checks
- the consistency of the compressed data, so the library should never crash
- even in the case of corrupted input.
-*/
-
-typedef voidpf (*alloc_func) OF((voidpf opaque, uInt items, uInt size));
-typedef void (*free_func) OF((voidpf opaque, voidpf address));
-
-struct internal_state;
-
-typedef struct z_stream_s {
- z_const Bytef *next_in; /* next input byte */
- uInt avail_in; /* number of bytes available at next_in */
- uLong total_in; /* total number of input bytes read so far */
-
- Bytef *next_out; /* next output byte will go here */
- uInt avail_out; /* remaining free space at next_out */
- uLong total_out; /* total number of bytes output so far */
-
- z_const char *msg; /* last error message, NULL if no error */
- struct internal_state FAR *state; /* not visible by applications */
-
- alloc_func zalloc; /* used to allocate the internal state */
- free_func zfree; /* used to free the internal state */
- voidpf opaque; /* private data object passed to zalloc and zfree */
-
- int data_type; /* best guess about the data type: binary or text
- for deflate, or the decoding state for inflate */
- uLong adler; /* Adler-32 or CRC-32 value of the uncompressed data */
- uLong reserved; /* reserved for future use */
-} z_stream;
-
-typedef z_stream FAR *z_streamp;
-
-/*
- gzip header information passed to and from zlib routines. See RFC 1952
- for more details on the meanings of these fields.
-*/
-typedef struct gz_header_s {
- int text; /* true if compressed data believed to be text */
- uLong time; /* modification time */
- int xflags; /* extra flags (not used when writing a gzip file) */
- int os; /* operating system */
- Bytef *extra; /* pointer to extra field or Z_NULL if none */
- uInt extra_len; /* extra field length (valid if extra != Z_NULL) */
- uInt extra_max; /* space at extra (only when reading header) */
- Bytef *name; /* pointer to zero-terminated file name or Z_NULL */
- uInt name_max; /* space at name (only when reading header) */
- Bytef *comment; /* pointer to zero-terminated comment or Z_NULL */
- uInt comm_max; /* space at comment (only when reading header) */
- int hcrc; /* true if there was or will be a header crc */
- int done; /* true when done reading gzip header (not used
- when writing a gzip file) */
-} gz_header;
-
-typedef gz_header FAR *gz_headerp;
-
-/*
- The application must update next_in and avail_in when avail_in has dropped
- to zero. It must update next_out and avail_out when avail_out has dropped
- to zero. The application must initialize zalloc, zfree and opaque before
- calling the init function. All other fields are set by the compression
- library and must not be updated by the application.
-
- The opaque value provided by the application will be passed as the first
- parameter for calls of zalloc and zfree. This can be useful for custom
- memory management. The compression library attaches no meaning to the
- opaque value.
-
- zalloc must return Z_NULL if there is not enough memory for the object.
- If zlib is used in a multi-threaded application, zalloc and zfree must be
- thread safe. In that case, zlib is thread-safe. When zalloc and zfree are
- Z_NULL on entry to the initialization function, they are set to internal
- routines that use the standard library functions malloc() and free().
-
- On 16-bit systems, the functions zalloc and zfree must be able to allocate
- exactly 65536 bytes, but will not be required to allocate more than this if
- the symbol MAXSEG_64K is defined (see zconf.h). WARNING: On MSDOS, pointers
- returned by zalloc for objects of exactly 65536 bytes *must* have their
- offset normalized to zero. The default allocation function provided by this
- library ensures this (see zutil.c). To reduce memory requirements and avoid
- any allocation of 64K objects, at the expense of compression ratio, compile
- the library with -DMAX_WBITS=14 (see zconf.h).
-
- The fields total_in and total_out can be used for statistics or progress
- reports. After compression, total_in holds the total size of the
- uncompressed data and may be saved for use by the decompressor (particularly
- if the decompressor wants to decompress everything in a single step).
-*/
-
- /* constants */
-
-#define Z_NO_FLUSH 0
-#define Z_PARTIAL_FLUSH 1
-#define Z_SYNC_FLUSH 2
-#define Z_FULL_FLUSH 3
-#define Z_FINISH 4
-#define Z_BLOCK 5
-#define Z_TREES 6
-/* Allowed flush values; see deflate() and inflate() below for details */
-
-#define Z_OK 0
-#define Z_STREAM_END 1
-#define Z_NEED_DICT 2
-#define Z_ERRNO (-1)
-#define Z_STREAM_ERROR (-2)
-#define Z_DATA_ERROR (-3)
-#define Z_MEM_ERROR (-4)
-#define Z_BUF_ERROR (-5)
-#define Z_VERSION_ERROR (-6)
-/* Return codes for the compression/decompression functions. Negative values
- * are errors, positive values are used for special but normal events.
- */
-
-#define Z_NO_COMPRESSION 0
-#define Z_BEST_SPEED 1
-#define Z_BEST_COMPRESSION 9
-#define Z_DEFAULT_COMPRESSION (-1)
-/* compression levels */
-
-#define Z_FILTERED 1
-#define Z_HUFFMAN_ONLY 2
-#define Z_RLE 3
-#define Z_FIXED 4
-#define Z_DEFAULT_STRATEGY 0
-/* compression strategy; see deflateInit2() below for details */
-
-#define Z_BINARY 0
-#define Z_TEXT 1
-#define Z_ASCII Z_TEXT /* for compatibility with 1.2.2 and earlier */
-#define Z_UNKNOWN 2
-/* Possible values of the data_type field for deflate() */
-
-#define Z_DEFLATED 8
-/* The deflate compression method (the only one supported in this version) */
-
-#define Z_NULL 0 /* for initializing zalloc, zfree, opaque */
-
-#ifndef Z_FREETYPE
-
-#define zlib_version zlibVersion()
-/* for compatibility with versions < 1.0.2 */
-
-
- /* basic functions */
-
-ZEXTERN const char * ZEXPORT zlibVersion OF((void));
-/* The application can compare zlibVersion and ZLIB_VERSION for consistency.
- If the first character differs, the library code actually used is not
- compatible with the zlib.h header file used by the application. This check
- is automatically made by deflateInit and inflateInit.
- */
-
-/*
-ZEXTERN int ZEXPORT deflateInit OF((z_streamp strm, int level));
-
- Initializes the internal stream state for compression. The fields
- zalloc, zfree and opaque must be initialized before by the caller. If
- zalloc and zfree are set to Z_NULL, deflateInit updates them to use default
- allocation functions.
-
- The compression level must be Z_DEFAULT_COMPRESSION, or between 0 and 9:
- 1 gives best speed, 9 gives best compression, 0 gives no compression at all
- (the input data is simply copied a block at a time). Z_DEFAULT_COMPRESSION
- requests a default compromise between speed and compression (currently
- equivalent to level 6).
-
- deflateInit returns Z_OK if success, Z_MEM_ERROR if there was not enough
- memory, Z_STREAM_ERROR if level is not a valid compression level, or
- Z_VERSION_ERROR if the zlib library version (zlib_version) is incompatible
- with the version assumed by the caller (ZLIB_VERSION). msg is set to null
- if there is no error message. deflateInit does not perform any compression:
- this will be done by deflate().
-*/
-
-ZEXTERN int ZEXPORT deflate OF((z_streamp strm, int flush));
-/*
- deflate compresses as much data as possible, and stops when the input
- buffer becomes empty or the output buffer becomes full. It may introduce
- some output latency (reading input without producing any output) except when
- forced to flush.
-
- The detailed semantics are as follows. deflate performs one or both of the
- following actions:
-
- - Compress more input starting at next_in and update next_in and avail_in
- accordingly. If not all input can be processed (because there is not
- enough room in the output buffer), next_in and avail_in are updated and
- processing will resume at this point for the next call of deflate().
-
- - Generate more output starting at next_out and update next_out and avail_out
- accordingly. This action is forced if the parameter flush is non zero.
- Forcing flush frequently degrades the compression ratio, so this parameter
- should be set only when necessary. Some output may be provided even if
- flush is zero.
-
- Before the call of deflate(), the application should ensure that at least
- one of the actions is possible, by providing more input and/or consuming more
- output, and updating avail_in or avail_out accordingly; avail_out should
- never be zero before the call. The application can consume the compressed
- output when it wants, for example when the output buffer is full (avail_out
- == 0), or after each call of deflate(). If deflate returns Z_OK and with
- zero avail_out, it must be called again after making room in the output
- buffer because there might be more output pending. See deflatePending(),
- which can be used if desired to determine whether or not there is more ouput
- in that case.
-
- Normally the parameter flush is set to Z_NO_FLUSH, which allows deflate to
- decide how much data to accumulate before producing output, in order to
- maximize compression.
-
- If the parameter flush is set to Z_SYNC_FLUSH, all pending output is
- flushed to the output buffer and the output is aligned on a byte boundary, so
- that the decompressor can get all input data available so far. (In
- particular avail_in is zero after the call if enough output space has been
- provided before the call.) Flushing may degrade compression for some
- compression algorithms and so it should be used only when necessary. This
- completes the current deflate block and follows it with an empty stored block
- that is three bits plus filler bits to the next byte, followed by four bytes
- (00 00 ff ff).
-
- If flush is set to Z_PARTIAL_FLUSH, all pending output is flushed to the
- output buffer, but the output is not aligned to a byte boundary. All of the
- input data so far will be available to the decompressor, as for Z_SYNC_FLUSH.
- This completes the current deflate block and follows it with an empty fixed
- codes block that is 10 bits long. This assures that enough bytes are output
- in order for the decompressor to finish the block before the empty fixed
- codes block.
-
- If flush is set to Z_BLOCK, a deflate block is completed and emitted, as
- for Z_SYNC_FLUSH, but the output is not aligned on a byte boundary, and up to
- seven bits of the current block are held to be written as the next byte after
- the next deflate block is completed. In this case, the decompressor may not
- be provided enough bits at this point in order to complete decompression of
- the data provided so far to the compressor. It may need to wait for the next
- block to be emitted. This is for advanced applications that need to control
- the emission of deflate blocks.
-
- If flush is set to Z_FULL_FLUSH, all output is flushed as with
- Z_SYNC_FLUSH, and the compression state is reset so that decompression can
- restart from this point if previous compressed data has been damaged or if
- random access is desired. Using Z_FULL_FLUSH too often can seriously degrade
- compression.
-
- If deflate returns with avail_out == 0, this function must be called again
- with the same value of the flush parameter and more output space (updated
- avail_out), until the flush is complete (deflate returns with non-zero
- avail_out). In the case of a Z_FULL_FLUSH or Z_SYNC_FLUSH, make sure that
- avail_out is greater than six to avoid repeated flush markers due to
- avail_out == 0 on return.
-
- If the parameter flush is set to Z_FINISH, pending input is processed,
- pending output is flushed and deflate returns with Z_STREAM_END if there was
- enough output space. If deflate returns with Z_OK or Z_BUF_ERROR, this
- function must be called again with Z_FINISH and more output space (updated
- avail_out) but no more input data, until it returns with Z_STREAM_END or an
- error. After deflate has returned Z_STREAM_END, the only possible operations
- on the stream are deflateReset or deflateEnd.
-
- Z_FINISH can be used in the first deflate call after deflateInit if all the
- compression is to be done in a single step. In order to complete in one
- call, avail_out must be at least the value returned by deflateBound (see
- below). Then deflate is guaranteed to return Z_STREAM_END. If not enough
- output space is provided, deflate will not return Z_STREAM_END, and it must
- be called again as described above.
-
- deflate() sets strm->adler to the Adler-32 checksum of all input read
- so far (that is, total_in bytes). If a gzip stream is being generated, then
- strm->adler will be the CRC-32 checksum of the input read so far. (See
- deflateInit2 below.)
-
- deflate() may update strm->data_type if it can make a good guess about
- the input data type (Z_BINARY or Z_TEXT). If in doubt, the data is
- considered binary. This field is only for information purposes and does not
- affect the compression algorithm in any manner.
-
- deflate() returns Z_OK if some progress has been made (more input
- processed or more output produced), Z_STREAM_END if all input has been
- consumed and all output has been produced (only when flush is set to
- Z_FINISH), Z_STREAM_ERROR if the stream state was inconsistent (for example
- if next_in or next_out was Z_NULL or the state was inadvertently written over
- by the application), or Z_BUF_ERROR if no progress is possible (for example
- avail_in or avail_out was zero). Note that Z_BUF_ERROR is not fatal, and
- deflate() can be called again with more input and more output space to
- continue compressing.
-*/
-
-
-ZEXTERN int ZEXPORT deflateEnd OF((z_streamp strm));
-/*
- All dynamically allocated data structures for this stream are freed.
- This function discards any unprocessed input and does not flush any pending
- output.
-
- deflateEnd returns Z_OK if success, Z_STREAM_ERROR if the
- stream state was inconsistent, Z_DATA_ERROR if the stream was freed
- prematurely (some input or output was discarded). In the error case, msg
- may be set but then points to a static string (which must not be
- deallocated).
-*/
-
-#endif /* !Z_FREETYPE */
-
-/*
-ZEXTERN int ZEXPORT inflateInit OF((z_streamp strm));
-
- Initializes the internal stream state for decompression. The fields
- next_in, avail_in, zalloc, zfree and opaque must be initialized before by
- the caller. In the current version of inflate, the provided input is not
- read or consumed. The allocation of a sliding window will be deferred to
- the first call of inflate (if the decompression does not complete on the
- first call). If zalloc and zfree are set to Z_NULL, inflateInit updates
- them to use default allocation functions.
-
- inflateInit returns Z_OK if success, Z_MEM_ERROR if there was not enough
- memory, Z_VERSION_ERROR if the zlib library version is incompatible with the
- version assumed by the caller, or Z_STREAM_ERROR if the parameters are
- invalid, such as a null pointer to the structure. msg is set to null if
- there is no error message. inflateInit does not perform any decompression.
- Actual decompression will be done by inflate(). So next_in, and avail_in,
- next_out, and avail_out are unused and unchanged. The current
- implementation of inflateInit() does not process any header information --
- that is deferred until inflate() is called.
-*/
-
-
-ZEXTERN int ZEXPORT inflate OF((z_streamp strm, int flush));
-/*
- inflate decompresses as much data as possible, and stops when the input
- buffer becomes empty or the output buffer becomes full. It may introduce
- some output latency (reading input without producing any output) except when
- forced to flush.
-
- The detailed semantics are as follows. inflate performs one or both of the
- following actions:
-
- - Decompress more input starting at next_in and update next_in and avail_in
- accordingly. If not all input can be processed (because there is not
- enough room in the output buffer), then next_in and avail_in are updated
- accordingly, and processing will resume at this point for the next call of
- inflate().
-
- - Generate more output starting at next_out and update next_out and avail_out
- accordingly. inflate() provides as much output as possible, until there is
- no more input data or no more space in the output buffer (see below about
- the flush parameter).
-
- Before the call of inflate(), the application should ensure that at least
- one of the actions is possible, by providing more input and/or consuming more
- output, and updating the next_* and avail_* values accordingly. If the
- caller of inflate() does not provide both available input and available
- output space, it is possible that there will be no progress made. The
- application can consume the uncompressed output when it wants, for example
- when the output buffer is full (avail_out == 0), or after each call of
- inflate(). If inflate returns Z_OK and with zero avail_out, it must be
- called again after making room in the output buffer because there might be
- more output pending.
-
- The flush parameter of inflate() can be Z_NO_FLUSH, Z_SYNC_FLUSH, Z_FINISH,
- Z_BLOCK, or Z_TREES. Z_SYNC_FLUSH requests that inflate() flush as much
- output as possible to the output buffer. Z_BLOCK requests that inflate()
- stop if and when it gets to the next deflate block boundary. When decoding
- the zlib or gzip format, this will cause inflate() to return immediately
- after the header and before the first block. When doing a raw inflate,
- inflate() will go ahead and process the first block, and will return when it
- gets to the end of that block, or when it runs out of data.
-
- The Z_BLOCK option assists in appending to or combining deflate streams.
- To assist in this, on return inflate() always sets strm->data_type to the
- number of unused bits in the last byte taken from strm->next_in, plus 64 if
- inflate() is currently decoding the last block in the deflate stream, plus
- 128 if inflate() returned immediately after decoding an end-of-block code or
- decoding the complete header up to just before the first byte of the deflate
- stream. The end-of-block will not be indicated until all of the uncompressed
- data from that block has been written to strm->next_out. The number of
- unused bits may in general be greater than seven, except when bit 7 of
- data_type is set, in which case the number of unused bits will be less than
- eight. data_type is set as noted here every time inflate() returns for all
- flush options, and so can be used to determine the amount of currently
- consumed input in bits.
-
- The Z_TREES option behaves as Z_BLOCK does, but it also returns when the
- end of each deflate block header is reached, before any actual data in that
- block is decoded. This allows the caller to determine the length of the
- deflate block header for later use in random access within a deflate block.
- 256 is added to the value of strm->data_type when inflate() returns
- immediately after reaching the end of the deflate block header.
-
- inflate() should normally be called until it returns Z_STREAM_END or an
- error. However if all decompression is to be performed in a single step (a
- single call of inflate), the parameter flush should be set to Z_FINISH. In
- this case all pending input is processed and all pending output is flushed;
- avail_out must be large enough to hold all of the uncompressed data for the
- operation to complete. (The size of the uncompressed data may have been
- saved by the compressor for this purpose.) The use of Z_FINISH is not
- required to perform an inflation in one step. However it may be used to
- inform inflate that a faster approach can be used for the single inflate()
- call. Z_FINISH also informs inflate to not maintain a sliding window if the
- stream completes, which reduces inflate's memory footprint. If the stream
- does not complete, either because not all of the stream is provided or not
- enough output space is provided, then a sliding window will be allocated and
- inflate() can be called again to continue the operation as if Z_NO_FLUSH had
- been used.
-
- In this implementation, inflate() always flushes as much output as
- possible to the output buffer, and always uses the faster approach on the
- first call. So the effects of the flush parameter in this implementation are
- on the return value of inflate() as noted below, when inflate() returns early
- when Z_BLOCK or Z_TREES is used, and when inflate() avoids the allocation of
- memory for a sliding window when Z_FINISH is used.
-
- If a preset dictionary is needed after this call (see inflateSetDictionary
- below), inflate sets strm->adler to the Adler-32 checksum of the dictionary
- chosen by the compressor and returns Z_NEED_DICT; otherwise it sets
- strm->adler to the Adler-32 checksum of all output produced so far (that is,
- total_out bytes) and returns Z_OK, Z_STREAM_END or an error code as described
- below. At the end of the stream, inflate() checks that its computed Adler-32
- checksum is equal to that saved by the compressor and returns Z_STREAM_END
- only if the checksum is correct.
-
- inflate() can decompress and check either zlib-wrapped or gzip-wrapped
- deflate data. The header type is detected automatically, if requested when
- initializing with inflateInit2(). Any information contained in the gzip
- header is not retained unless inflateGetHeader() is used. When processing
- gzip-wrapped deflate data, strm->adler32 is set to the CRC-32 of the output
- produced so far. The CRC-32 is checked against the gzip trailer, as is the
- uncompressed length, modulo 2^32.
-
- inflate() returns Z_OK if some progress has been made (more input processed
- or more output produced), Z_STREAM_END if the end of the compressed data has
- been reached and all uncompressed output has been produced, Z_NEED_DICT if a
- preset dictionary is needed at this point, Z_DATA_ERROR if the input data was
- corrupted (input stream not conforming to the zlib format or incorrect check
- value, in which case strm->msg points to a string with a more specific
- error), Z_STREAM_ERROR if the stream structure was inconsistent (for example
- next_in or next_out was Z_NULL, or the state was inadvertently written over
- by the application), Z_MEM_ERROR if there was not enough memory, Z_BUF_ERROR
- if no progress was possible or if there was not enough room in the output
- buffer when Z_FINISH is used. Note that Z_BUF_ERROR is not fatal, and
- inflate() can be called again with more input and more output space to
- continue decompressing. If Z_DATA_ERROR is returned, the application may
- then call inflateSync() to look for a good compression block if a partial
- recovery of the data is to be attempted.
-*/
-
-
-ZEXTERN int ZEXPORT inflateEnd OF((z_streamp strm));
-/*
- All dynamically allocated data structures for this stream are freed.
- This function discards any unprocessed input and does not flush any pending
- output.
-
- inflateEnd returns Z_OK if success, or Z_STREAM_ERROR if the stream state
- was inconsistent.
-*/
-
-
- /* Advanced functions */
-
-/*
- The following functions are needed only in some special applications.
-*/
-
-#ifndef Z_FREETYPE
-
-/*
-ZEXTERN int ZEXPORT deflateInit2 OF((z_streamp strm,
- int level,
- int method,
- int windowBits,
- int memLevel,
- int strategy));
-
- This is another version of deflateInit with more compression options. The
- fields zalloc, zfree and opaque must be initialized before by the caller.
-
- The method parameter is the compression method. It must be Z_DEFLATED in
- this version of the library.
-
- The windowBits parameter is the base two logarithm of the window size
- (the size of the history buffer). It should be in the range 8..15 for this
- version of the library. Larger values of this parameter result in better
- compression at the expense of memory usage. The default value is 15 if
- deflateInit is used instead.
-
- For the current implementation of deflate(), a windowBits value of 8 (a
- window size of 256 bytes) is not supported. As a result, a request for 8
- will result in 9 (a 512-byte window). In that case, providing 8 to
- inflateInit2() will result in an error when the zlib header with 9 is
- checked against the initialization of inflate(). The remedy is to not use 8
- with deflateInit2() with this initialization, or at least in that case use 9
- with inflateInit2().
-
- windowBits can also be -8..-15 for raw deflate. In this case, -windowBits
- determines the window size. deflate() will then generate raw deflate data
- with no zlib header or trailer, and will not compute a check value.
-
- windowBits can also be greater than 15 for optional gzip encoding. Add
- 16 to windowBits to write a simple gzip header and trailer around the
- compressed data instead of a zlib wrapper. The gzip header will have no
- file name, no extra data, no comment, no modification time (set to zero), no
- header crc, and the operating system will be set to the appropriate value,
- if the operating system was determined at compile time. If a gzip stream is
- being written, strm->adler is a CRC-32 instead of an Adler-32.
-
- For raw deflate or gzip encoding, a request for a 256-byte window is
- rejected as invalid, since only the zlib header provides a means of
- transmitting the window size to the decompressor.
-
- The memLevel parameter specifies how much memory should be allocated
- for the internal compression state. memLevel=1 uses minimum memory but is
- slow and reduces compression ratio; memLevel=9 uses maximum memory for
- optimal speed. The default value is 8. See zconf.h for total memory usage
- as a function of windowBits and memLevel.
-
- The strategy parameter is used to tune the compression algorithm. Use the
- value Z_DEFAULT_STRATEGY for normal data, Z_FILTERED for data produced by a
- filter (or predictor), Z_HUFFMAN_ONLY to force Huffman encoding only (no
- string match), or Z_RLE to limit match distances to one (run-length
- encoding). Filtered data consists mostly of small values with a somewhat
- random distribution. In this case, the compression algorithm is tuned to
- compress them better. The effect of Z_FILTERED is to force more Huffman
- coding and less string matching; it is somewhat intermediate between
- Z_DEFAULT_STRATEGY and Z_HUFFMAN_ONLY. Z_RLE is designed to be almost as
- fast as Z_HUFFMAN_ONLY, but give better compression for PNG image data. The
- strategy parameter only affects the compression ratio but not the
- correctness of the compressed output even if it is not set appropriately.
- Z_FIXED prevents the use of dynamic Huffman codes, allowing for a simpler
- decoder for special applications.
-
- deflateInit2 returns Z_OK if success, Z_MEM_ERROR if there was not enough
- memory, Z_STREAM_ERROR if any parameter is invalid (such as an invalid
- method), or Z_VERSION_ERROR if the zlib library version (zlib_version) is
- incompatible with the version assumed by the caller (ZLIB_VERSION). msg is
- set to null if there is no error message. deflateInit2 does not perform any
- compression: this will be done by deflate().
-*/
-
-ZEXTERN int ZEXPORT deflateSetDictionary OF((z_streamp strm,
- const Bytef *dictionary,
- uInt dictLength));
-/*
- Initializes the compression dictionary from the given byte sequence
- without producing any compressed output. When using the zlib format, this
- function must be called immediately after deflateInit, deflateInit2 or
- deflateReset, and before any call of deflate. When doing raw deflate, this
- function must be called either before any call of deflate, or immediately
- after the completion of a deflate block, i.e. after all input has been
- consumed and all output has been delivered when using any of the flush
- options Z_BLOCK, Z_PARTIAL_FLUSH, Z_SYNC_FLUSH, or Z_FULL_FLUSH. The
- compressor and decompressor must use exactly the same dictionary (see
- inflateSetDictionary).
-
- The dictionary should consist of strings (byte sequences) that are likely
- to be encountered later in the data to be compressed, with the most commonly
- used strings preferably put towards the end of the dictionary. Using a
- dictionary is most useful when the data to be compressed is short and can be
- predicted with good accuracy; the data can then be compressed better than
- with the default empty dictionary.
-
- Depending on the size of the compression data structures selected by
- deflateInit or deflateInit2, a part of the dictionary may in effect be
- discarded, for example if the dictionary is larger than the window size
- provided in deflateInit or deflateInit2. Thus the strings most likely to be
- useful should be put at the end of the dictionary, not at the front. In
- addition, the current implementation of deflate will use at most the window
- size minus 262 bytes of the provided dictionary.
-
- Upon return of this function, strm->adler is set to the Adler-32 value
- of the dictionary; the decompressor may later use this value to determine
- which dictionary has been used by the compressor. (The Adler-32 value
- applies to the whole dictionary even if only a subset of the dictionary is
- actually used by the compressor.) If a raw deflate was requested, then the
- Adler-32 value is not computed and strm->adler is not set.
-
- deflateSetDictionary returns Z_OK if success, or Z_STREAM_ERROR if a
- parameter is invalid (e.g. dictionary being Z_NULL) or the stream state is
- inconsistent (for example if deflate has already been called for this stream
- or if not at a block boundary for raw deflate). deflateSetDictionary does
- not perform any compression: this will be done by deflate().
-*/
-
-ZEXTERN int ZEXPORT deflateGetDictionary OF((z_streamp strm,
- Bytef *dictionary,
- uInt *dictLength));
-/*
- Returns the sliding dictionary being maintained by deflate. dictLength is
- set to the number of bytes in the dictionary, and that many bytes are copied
- to dictionary. dictionary must have enough space, where 32768 bytes is
- always enough. If deflateGetDictionary() is called with dictionary equal to
- Z_NULL, then only the dictionary length is returned, and nothing is copied.
- Similary, if dictLength is Z_NULL, then it is not set.
-
- deflateGetDictionary() may return a length less than the window size, even
- when more than the window size in input has been provided. It may return up
- to 258 bytes less in that case, due to how zlib's implementation of deflate
- manages the sliding window and lookahead for matches, where matches can be
- up to 258 bytes long. If the application needs the last window-size bytes of
- input, then that would need to be saved by the application outside of zlib.
-
- deflateGetDictionary returns Z_OK on success, or Z_STREAM_ERROR if the
- stream state is inconsistent.
-*/
-
-ZEXTERN int ZEXPORT deflateCopy OF((z_streamp dest,
- z_streamp source));
-/*
- Sets the destination stream as a complete copy of the source stream.
-
- This function can be useful when several compression strategies will be
- tried, for example when there are several ways of pre-processing the input
- data with a filter. The streams that will be discarded should then be freed
- by calling deflateEnd. Note that deflateCopy duplicates the internal
- compression state which can be quite large, so this strategy is slow and can
- consume lots of memory.
-
- deflateCopy returns Z_OK if success, Z_MEM_ERROR if there was not
- enough memory, Z_STREAM_ERROR if the source stream state was inconsistent
- (such as zalloc being Z_NULL). msg is left unchanged in both source and
- destination.
-*/
-
-ZEXTERN int ZEXPORT deflateReset OF((z_streamp strm));
-/*
- This function is equivalent to deflateEnd followed by deflateInit, but
- does not free and reallocate the internal compression state. The stream
- will leave the compression level and any other attributes that may have been
- set unchanged.
-
- deflateReset returns Z_OK if success, or Z_STREAM_ERROR if the source
- stream state was inconsistent (such as zalloc or state being Z_NULL).
-*/
-
-ZEXTERN int ZEXPORT deflateParams OF((z_streamp strm,
- int level,
- int strategy));
-/*
- Dynamically update the compression level and compression strategy. The
- interpretation of level and strategy is as in deflateInit2(). This can be
- used to switch between compression and straight copy of the input data, or
- to switch to a different kind of input data requiring a different strategy.
- If the compression approach (which is a function of the level) or the
- strategy is changed, and if there have been any deflate() calls since the
- state was initialized or reset, then the input available so far is
- compressed with the old level and strategy using deflate(strm, Z_BLOCK).
- There are three approaches for the compression levels 0, 1..3, and 4..9
- respectively. The new level and strategy will take effect at the next call
- of deflate().
-
- If a deflate(strm, Z_BLOCK) is performed by deflateParams(), and it does
- not have enough output space to complete, then the parameter change will not
- take effect. In this case, deflateParams() can be called again with the
- same parameters and more output space to try again.
-
- In order to assure a change in the parameters on the first try, the
- deflate stream should be flushed using deflate() with Z_BLOCK or other flush
- request until strm.avail_out is not zero, before calling deflateParams().
- Then no more input data should be provided before the deflateParams() call.
- If this is done, the old level and strategy will be applied to the data
- compressed before deflateParams(), and the new level and strategy will be
- applied to the the data compressed after deflateParams().
-
- deflateParams returns Z_OK on success, Z_STREAM_ERROR if the source stream
- state was inconsistent or if a parameter was invalid, or Z_BUF_ERROR if
- there was not enough output space to complete the compression of the
- available input data before a change in the strategy or approach. Note that
- in the case of a Z_BUF_ERROR, the parameters are not changed. A return
- value of Z_BUF_ERROR is not fatal, in which case deflateParams() can be
- retried with more output space.
-*/
-
-ZEXTERN int ZEXPORT deflateTune OF((z_streamp strm,
- int good_length,
- int max_lazy,
- int nice_length,
- int max_chain));
-/*
- Fine tune deflate's internal compression parameters. This should only be
- used by someone who understands the algorithm used by zlib's deflate for
- searching for the best matching string, and even then only by the most
- fanatic optimizer trying to squeeze out the last compressed bit for their
- specific input data. Read the deflate.c source code for the meaning of the
- max_lazy, good_length, nice_length, and max_chain parameters.
-
- deflateTune() can be called after deflateInit() or deflateInit2(), and
- returns Z_OK on success, or Z_STREAM_ERROR for an invalid deflate stream.
- */
-
-ZEXTERN uLong ZEXPORT deflateBound OF((z_streamp strm,
- uLong sourceLen));
-/*
- deflateBound() returns an upper bound on the compressed size after
- deflation of sourceLen bytes. It must be called after deflateInit() or
- deflateInit2(), and after deflateSetHeader(), if used. This would be used
- to allocate an output buffer for deflation in a single pass, and so would be
- called before deflate(). If that first deflate() call is provided the
- sourceLen input bytes, an output buffer allocated to the size returned by
- deflateBound(), and the flush value Z_FINISH, then deflate() is guaranteed
- to return Z_STREAM_END. Note that it is possible for the compressed size to
- be larger than the value returned by deflateBound() if flush options other
- than Z_FINISH or Z_NO_FLUSH are used.
-*/
-
-ZEXTERN int ZEXPORT deflatePending OF((z_streamp strm,
- unsigned *pending,
- int *bits));
-/*
- deflatePending() returns the number of bytes and bits of output that have
- been generated, but not yet provided in the available output. The bytes not
- provided would be due to the available output space having being consumed.
- The number of bits of output not provided are between 0 and 7, where they
- await more bits to join them in order to fill out a full byte. If pending
- or bits are Z_NULL, then those values are not set.
-
- deflatePending returns Z_OK if success, or Z_STREAM_ERROR if the source
- stream state was inconsistent.
- */
-
-ZEXTERN int ZEXPORT deflatePrime OF((z_streamp strm,
- int bits,
- int value));
-/*
- deflatePrime() inserts bits in the deflate output stream. The intent
- is that this function is used to start off the deflate output with the bits
- leftover from a previous deflate stream when appending to it. As such, this
- function can only be used for raw deflate, and must be used before the first
- deflate() call after a deflateInit2() or deflateReset(). bits must be less
- than or equal to 16, and that many of the least significant bits of value
- will be inserted in the output.
-
- deflatePrime returns Z_OK if success, Z_BUF_ERROR if there was not enough
- room in the internal buffer to insert the bits, or Z_STREAM_ERROR if the
- source stream state was inconsistent.
-*/
-
-ZEXTERN int ZEXPORT deflateSetHeader OF((z_streamp strm,
- gz_headerp head));
-/*
- deflateSetHeader() provides gzip header information for when a gzip
- stream is requested by deflateInit2(). deflateSetHeader() may be called
- after deflateInit2() or deflateReset() and before the first call of
- deflate(). The text, time, os, extra field, name, and comment information
- in the provided gz_header structure are written to the gzip header (xflag is
- ignored -- the extra flags are set according to the compression level). The
- caller must assure that, if not Z_NULL, name and comment are terminated with
- a zero byte, and that if extra is not Z_NULL, that extra_len bytes are
- available there. If hcrc is true, a gzip header crc is included. Note that
- the current versions of the command-line version of gzip (up through version
- 1.3.x) do not support header crc's, and will report that it is a "multi-part
- gzip file" and give up.
-
- If deflateSetHeader is not used, the default gzip header has text false,
- the time set to zero, and os set to 255, with no extra, name, or comment
- fields. The gzip header is returned to the default state by deflateReset().
-
- deflateSetHeader returns Z_OK if success, or Z_STREAM_ERROR if the source
- stream state was inconsistent.
-*/
-
-/*
-ZEXTERN int ZEXPORT inflateInit2 OF((z_streamp strm,
- int windowBits));
-
- This is another version of inflateInit with an extra parameter. The
- fields next_in, avail_in, zalloc, zfree and opaque must be initialized
- before by the caller.
-
- The windowBits parameter is the base two logarithm of the maximum window
- size (the size of the history buffer). It should be in the range 8..15 for
- this version of the library. The default value is 15 if inflateInit is used
- instead. windowBits must be greater than or equal to the windowBits value
- provided to deflateInit2() while compressing, or it must be equal to 15 if
- deflateInit2() was not used. If a compressed stream with a larger window
- size is given as input, inflate() will return with the error code
- Z_DATA_ERROR instead of trying to allocate a larger window.
-
- windowBits can also be zero to request that inflate use the window size in
- the zlib header of the compressed stream.
-
- windowBits can also be -8..-15 for raw inflate. In this case, -windowBits
- determines the window size. inflate() will then process raw deflate data,
- not looking for a zlib or gzip header, not generating a check value, and not
- looking for any check values for comparison at the end of the stream. This
- is for use with other formats that use the deflate compressed data format
- such as zip. Those formats provide their own check values. If a custom
- format is developed using the raw deflate format for compressed data, it is
- recommended that a check value such as an Adler-32 or a CRC-32 be applied to
- the uncompressed data as is done in the zlib, gzip, and zip formats. For
- most applications, the zlib format should be used as is. Note that comments
- above on the use in deflateInit2() applies to the magnitude of windowBits.
-
- windowBits can also be greater than 15 for optional gzip decoding. Add
- 32 to windowBits to enable zlib and gzip decoding with automatic header
- detection, or add 16 to decode only the gzip format (the zlib format will
- return a Z_DATA_ERROR). If a gzip stream is being decoded, strm->adler is a
- CRC-32 instead of an Adler-32. Unlike the gunzip utility and gzread() (see
- below), inflate() will *not* automatically decode concatenated gzip members.
- inflate() will return Z_STREAM_END at the end of the gzip member. The state
- would need to be reset to continue decoding a subsequent gzip member. This
- *must* be done if there is more data after a gzip member, in order for the
- decompression to be compliant with the gzip standard (RFC 1952).
-
- inflateInit2 returns Z_OK if success, Z_MEM_ERROR if there was not enough
- memory, Z_VERSION_ERROR if the zlib library version is incompatible with the
- version assumed by the caller, or Z_STREAM_ERROR if the parameters are
- invalid, such as a null pointer to the structure. msg is set to null if
- there is no error message. inflateInit2 does not perform any decompression
- apart from possibly reading the zlib header if present: actual decompression
- will be done by inflate(). (So next_in and avail_in may be modified, but
- next_out and avail_out are unused and unchanged.) The current implementation
- of inflateInit2() does not process any header information -- that is
- deferred until inflate() is called.
-*/
-
-ZEXTERN int ZEXPORT inflateSetDictionary OF((z_streamp strm,
- const Bytef *dictionary,
- uInt dictLength));
-/*
- Initializes the decompression dictionary from the given uncompressed byte
- sequence. This function must be called immediately after a call of inflate,
- if that call returned Z_NEED_DICT. The dictionary chosen by the compressor
- can be determined from the Adler-32 value returned by that call of inflate.
- The compressor and decompressor must use exactly the same dictionary (see
- deflateSetDictionary). For raw inflate, this function can be called at any
- time to set the dictionary. If the provided dictionary is smaller than the
- window and there is already data in the window, then the provided dictionary
- will amend what's there. The application must insure that the dictionary
- that was used for compression is provided.
-
- inflateSetDictionary returns Z_OK if success, Z_STREAM_ERROR if a
- parameter is invalid (e.g. dictionary being Z_NULL) or the stream state is
- inconsistent, Z_DATA_ERROR if the given dictionary doesn't match the
- expected one (incorrect Adler-32 value). inflateSetDictionary does not
- perform any decompression: this will be done by subsequent calls of
- inflate().
-*/
-
-ZEXTERN int ZEXPORT inflateGetDictionary OF((z_streamp strm,
- Bytef *dictionary,
- uInt *dictLength));
-/*
- Returns the sliding dictionary being maintained by inflate. dictLength is
- set to the number of bytes in the dictionary, and that many bytes are copied
- to dictionary. dictionary must have enough space, where 32768 bytes is
- always enough. If inflateGetDictionary() is called with dictionary equal to
- Z_NULL, then only the dictionary length is returned, and nothing is copied.
- Similary, if dictLength is Z_NULL, then it is not set.
-
- inflateGetDictionary returns Z_OK on success, or Z_STREAM_ERROR if the
- stream state is inconsistent.
-*/
-
-ZEXTERN int ZEXPORT inflateSync OF((z_streamp strm));
-/*
- Skips invalid compressed data until a possible full flush point (see above
- for the description of deflate with Z_FULL_FLUSH) can be found, or until all
- available input is skipped. No output is provided.
-
- inflateSync searches for a 00 00 FF FF pattern in the compressed data.
- All full flush points have this pattern, but not all occurrences of this
- pattern are full flush points.
-
- inflateSync returns Z_OK if a possible full flush point has been found,
- Z_BUF_ERROR if no more input was provided, Z_DATA_ERROR if no flush point
- has been found, or Z_STREAM_ERROR if the stream structure was inconsistent.
- In the success case, the application may save the current current value of
- total_in which indicates where valid compressed data was found. In the
- error case, the application may repeatedly call inflateSync, providing more
- input each time, until success or end of the input data.
-*/
-
-ZEXTERN int ZEXPORT inflateCopy OF((z_streamp dest,
- z_streamp source));
-/*
- Sets the destination stream as a complete copy of the source stream.
-
- This function can be useful when randomly accessing a large stream. The
- first pass through the stream can periodically record the inflate state,
- allowing restarting inflate at those points when randomly accessing the
- stream.
-
- inflateCopy returns Z_OK if success, Z_MEM_ERROR if there was not
- enough memory, Z_STREAM_ERROR if the source stream state was inconsistent
- (such as zalloc being Z_NULL). msg is left unchanged in both source and
- destination.
-*/
-
-#endif /* !Z_FREETYPE */
-
-ZEXTERN int ZEXPORT inflateReset OF((z_streamp strm));
-/*
- This function is equivalent to inflateEnd followed by inflateInit,
- but does not free and reallocate the internal decompression state. The
- stream will keep attributes that may have been set by inflateInit2.
-
- inflateReset returns Z_OK if success, or Z_STREAM_ERROR if the source
- stream state was inconsistent (such as zalloc or state being Z_NULL).
-*/
-
-ZEXTERN int ZEXPORT inflateReset2 OF((z_streamp strm,
- int windowBits));
-/*
- This function is the same as inflateReset, but it also permits changing
- the wrap and window size requests. The windowBits parameter is interpreted
- the same as it is for inflateInit2. If the window size is changed, then the
- memory allocated for the window is freed, and the window will be reallocated
- by inflate() if needed.
-
- inflateReset2 returns Z_OK if success, or Z_STREAM_ERROR if the source
- stream state was inconsistent (such as zalloc or state being Z_NULL), or if
- the windowBits parameter is invalid.
-*/
-
-#ifndef Z_FREETYPE
-
-ZEXTERN int ZEXPORT inflatePrime OF((z_streamp strm,
- int bits,
- int value));
-/*
- This function inserts bits in the inflate input stream. The intent is
- that this function is used to start inflating at a bit position in the
- middle of a byte. The provided bits will be used before any bytes are used
- from next_in. This function should only be used with raw inflate, and
- should be used before the first inflate() call after inflateInit2() or
- inflateReset(). bits must be less than or equal to 16, and that many of the
- least significant bits of value will be inserted in the input.
-
- If bits is negative, then the input stream bit buffer is emptied. Then
- inflatePrime() can be called again to put bits in the buffer. This is used
- to clear out bits leftover after feeding inflate a block description prior
- to feeding inflate codes.
-
- inflatePrime returns Z_OK if success, or Z_STREAM_ERROR if the source
- stream state was inconsistent.
-*/
-
-ZEXTERN long ZEXPORT inflateMark OF((z_streamp strm));
-/*
- This function returns two values, one in the lower 16 bits of the return
- value, and the other in the remaining upper bits, obtained by shifting the
- return value down 16 bits. If the upper value is -1 and the lower value is
- zero, then inflate() is currently decoding information outside of a block.
- If the upper value is -1 and the lower value is non-zero, then inflate is in
- the middle of a stored block, with the lower value equaling the number of
- bytes from the input remaining to copy. If the upper value is not -1, then
- it is the number of bits back from the current bit position in the input of
- the code (literal or length/distance pair) currently being processed. In
- that case the lower value is the number of bytes already emitted for that
- code.
-
- A code is being processed if inflate is waiting for more input to complete
- decoding of the code, or if it has completed decoding but is waiting for
- more output space to write the literal or match data.
-
- inflateMark() is used to mark locations in the input data for random
- access, which may be at bit positions, and to note those cases where the
- output of a code may span boundaries of random access blocks. The current
- location in the input stream can be determined from avail_in and data_type
- as noted in the description for the Z_BLOCK flush parameter for inflate.
-
- inflateMark returns the value noted above, or -65536 if the provided
- source stream state was inconsistent.
-*/
-
-ZEXTERN int ZEXPORT inflateGetHeader OF((z_streamp strm,
- gz_headerp head));
-/*
- inflateGetHeader() requests that gzip header information be stored in the
- provided gz_header structure. inflateGetHeader() may be called after
- inflateInit2() or inflateReset(), and before the first call of inflate().
- As inflate() processes the gzip stream, head->done is zero until the header
- is completed, at which time head->done is set to one. If a zlib stream is
- being decoded, then head->done is set to -1 to indicate that there will be
- no gzip header information forthcoming. Note that Z_BLOCK or Z_TREES can be
- used to force inflate() to return immediately after header processing is
- complete and before any actual data is decompressed.
-
- The text, time, xflags, and os fields are filled in with the gzip header
- contents. hcrc is set to true if there is a header CRC. (The header CRC
- was valid if done is set to one.) If extra is not Z_NULL, then extra_max
- contains the maximum number of bytes to write to extra. Once done is true,
- extra_len contains the actual extra field length, and extra contains the
- extra field, or that field truncated if extra_max is less than extra_len.
- If name is not Z_NULL, then up to name_max characters are written there,
- terminated with a zero unless the length is greater than name_max. If
- comment is not Z_NULL, then up to comm_max characters are written there,
- terminated with a zero unless the length is greater than comm_max. When any
- of extra, name, or comment are not Z_NULL and the respective field is not
- present in the header, then that field is set to Z_NULL to signal its
- absence. This allows the use of deflateSetHeader() with the returned
- structure to duplicate the header. However if those fields are set to
- allocated memory, then the application will need to save those pointers
- elsewhere so that they can be eventually freed.
-
- If inflateGetHeader is not used, then the header information is simply
- discarded. The header is always checked for validity, including the header
- CRC if present. inflateReset() will reset the process to discard the header
- information. The application would need to call inflateGetHeader() again to
- retrieve the header from the next gzip stream.
-
- inflateGetHeader returns Z_OK if success, or Z_STREAM_ERROR if the source
- stream state was inconsistent.
-*/
-
-#endif /* !Z_FREETYPE */
-
-/*
-ZEXTERN int ZEXPORT inflateBackInit OF((z_streamp strm, int windowBits,
- unsigned char FAR *window));
-
- Initialize the internal stream state for decompression using inflateBack()
- calls. The fields zalloc, zfree and opaque in strm must be initialized
- before the call. If zalloc and zfree are Z_NULL, then the default library-
- derived memory allocation routines are used. windowBits is the base two
- logarithm of the window size, in the range 8..15. window is a caller
- supplied buffer of that size. Except for special applications where it is
- assured that deflate was used with small window sizes, windowBits must be 15
- and a 32K byte window must be supplied to be able to decompress general
- deflate streams.
-
- See inflateBack() for the usage of these routines.
-
- inflateBackInit will return Z_OK on success, Z_STREAM_ERROR if any of
- the parameters are invalid, Z_MEM_ERROR if the internal state could not be
- allocated, or Z_VERSION_ERROR if the version of the library does not match
- the version of the header file.
-*/
-
-typedef unsigned (*in_func) OF((void FAR *,
- z_const unsigned char FAR * FAR *));
-typedef int (*out_func) OF((void FAR *, unsigned char FAR *, unsigned));
-
-#ifndef Z_FREETYPE
-
-ZEXTERN int ZEXPORT inflateBack OF((z_streamp strm,
- in_func in, void FAR *in_desc,
- out_func out, void FAR *out_desc));
-/*
- inflateBack() does a raw inflate with a single call using a call-back
- interface for input and output. This is potentially more efficient than
- inflate() for file i/o applications, in that it avoids copying between the
- output and the sliding window by simply making the window itself the output
- buffer. inflate() can be faster on modern CPUs when used with large
- buffers. inflateBack() trusts the application to not change the output
- buffer passed by the output function, at least until inflateBack() returns.
-
- inflateBackInit() must be called first to allocate the internal state
- and to initialize the state with the user-provided window buffer.
- inflateBack() may then be used multiple times to inflate a complete, raw
- deflate stream with each call. inflateBackEnd() is then called to free the
- allocated state.
-
- A raw deflate stream is one with no zlib or gzip header or trailer.
- This routine would normally be used in a utility that reads zip or gzip
- files and writes out uncompressed files. The utility would decode the
- header and process the trailer on its own, hence this routine expects only
- the raw deflate stream to decompress. This is different from the default
- behavior of inflate(), which expects a zlib header and trailer around the
- deflate stream.
-
- inflateBack() uses two subroutines supplied by the caller that are then
- called by inflateBack() for input and output. inflateBack() calls those
- routines until it reads a complete deflate stream and writes out all of the
- uncompressed data, or until it encounters an error. The function's
- parameters and return types are defined above in the in_func and out_func
- typedefs. inflateBack() will call in(in_desc, &buf) which should return the
- number of bytes of provided input, and a pointer to that input in buf. If
- there is no input available, in() must return zero -- buf is ignored in that
- case -- and inflateBack() will return a buffer error. inflateBack() will
- call out(out_desc, buf, len) to write the uncompressed data buf[0..len-1].
- out() should return zero on success, or non-zero on failure. If out()
- returns non-zero, inflateBack() will return with an error. Neither in() nor
- out() are permitted to change the contents of the window provided to
- inflateBackInit(), which is also the buffer that out() uses to write from.
- The length written by out() will be at most the window size. Any non-zero
- amount of input may be provided by in().
-
- For convenience, inflateBack() can be provided input on the first call by
- setting strm->next_in and strm->avail_in. If that input is exhausted, then
- in() will be called. Therefore strm->next_in must be initialized before
- calling inflateBack(). If strm->next_in is Z_NULL, then in() will be called
- immediately for input. If strm->next_in is not Z_NULL, then strm->avail_in
- must also be initialized, and then if strm->avail_in is not zero, input will
- initially be taken from strm->next_in[0 .. strm->avail_in - 1].
-
- The in_desc and out_desc parameters of inflateBack() is passed as the
- first parameter of in() and out() respectively when they are called. These
- descriptors can be optionally used to pass any information that the caller-
- supplied in() and out() functions need to do their job.
-
- On return, inflateBack() will set strm->next_in and strm->avail_in to
- pass back any unused input that was provided by the last in() call. The
- return values of inflateBack() can be Z_STREAM_END on success, Z_BUF_ERROR
- if in() or out() returned an error, Z_DATA_ERROR if there was a format error
- in the deflate stream (in which case strm->msg is set to indicate the nature
- of the error), or Z_STREAM_ERROR if the stream was not properly initialized.
- In the case of Z_BUF_ERROR, an input or output error can be distinguished
- using strm->next_in which will be Z_NULL only if in() returned an error. If
- strm->next_in is not Z_NULL, then the Z_BUF_ERROR was due to out() returning
- non-zero. (in() will always be called before out(), so strm->next_in is
- assured to be defined if out() returns non-zero.) Note that inflateBack()
- cannot return Z_OK.
-*/
-
-ZEXTERN int ZEXPORT inflateBackEnd OF((z_streamp strm));
-/*
- All memory allocated by inflateBackInit() is freed.
-
- inflateBackEnd() returns Z_OK on success, or Z_STREAM_ERROR if the stream
- state was inconsistent.
-*/
-
-ZEXTERN uLong ZEXPORT zlibCompileFlags OF((void));
-/* Return flags indicating compile-time options.
-
- Type sizes, two bits each, 00 = 16 bits, 01 = 32, 10 = 64, 11 = other:
- 1.0: size of uInt
- 3.2: size of uLong
- 5.4: size of voidpf (pointer)
- 7.6: size of z_off_t
-
- Compiler, assembler, and debug options:
- 8: ZLIB_DEBUG
- 9: ASMV or ASMINF -- use ASM code
- 10: ZLIB_WINAPI -- exported functions use the WINAPI calling convention
- 11: 0 (reserved)
-
- One-time table building (smaller code, but not thread-safe if true):
- 12: BUILDFIXED -- build static block decoding tables when needed
- 13: DYNAMIC_CRC_TABLE -- build CRC calculation tables when needed
- 14,15: 0 (reserved)
-
- Library content (indicates missing functionality):
- 16: NO_GZCOMPRESS -- gz* functions cannot compress (to avoid linking
- deflate code when not needed)
- 17: NO_GZIP -- deflate can't write gzip streams, and inflate can't detect
- and decode gzip streams (to avoid linking crc code)
- 18-19: 0 (reserved)
-
- Operation variations (changes in library functionality):
- 20: PKZIP_BUG_WORKAROUND -- slightly more permissive inflate
- 21: FASTEST -- deflate algorithm with only one, lowest compression level
- 22,23: 0 (reserved)
-
- The sprintf variant used by gzprintf (zero is best):
- 24: 0 = vs*, 1 = s* -- 1 means limited to 20 arguments after the format
- 25: 0 = *nprintf, 1 = *printf -- 1 means gzprintf() not secure!
- 26: 0 = returns value, 1 = void -- 1 means inferred string length returned
-
- Remainder:
- 27-31: 0 (reserved)
- */
-
-#endif /* !Z_FREETYPE */
-
-#ifndef Z_SOLO
-
- /* utility functions */
-
-/*
- The following utility functions are implemented on top of the basic
- stream-oriented functions. To simplify the interface, some default options
- are assumed (compression level and memory usage, standard memory allocation
- functions). The source code of these utility functions can be modified if
- you need special options.
-*/
-
-ZEXTERN int ZEXPORT compress OF((Bytef *dest, uLongf *destLen,
- const Bytef *source, uLong sourceLen));
-/*
- Compresses the source buffer into the destination buffer. sourceLen is
- the byte length of the source buffer. Upon entry, destLen is the total size
- of the destination buffer, which must be at least the value returned by
- compressBound(sourceLen). Upon exit, destLen is the actual size of the
- compressed data. compress() is equivalent to compress2() with a level
- parameter of Z_DEFAULT_COMPRESSION.
-
- compress returns Z_OK if success, Z_MEM_ERROR if there was not
- enough memory, Z_BUF_ERROR if there was not enough room in the output
- buffer.
-*/
-
-ZEXTERN int ZEXPORT compress2 OF((Bytef *dest, uLongf *destLen,
- const Bytef *source, uLong sourceLen,
- int level));
-/*
- Compresses the source buffer into the destination buffer. The level
- parameter has the same meaning as in deflateInit. sourceLen is the byte
- length of the source buffer. Upon entry, destLen is the total size of the
- destination buffer, which must be at least the value returned by
- compressBound(sourceLen). Upon exit, destLen is the actual size of the
- compressed data.
-
- compress2 returns Z_OK if success, Z_MEM_ERROR if there was not enough
- memory, Z_BUF_ERROR if there was not enough room in the output buffer,
- Z_STREAM_ERROR if the level parameter is invalid.
-*/
-
-ZEXTERN uLong ZEXPORT compressBound OF((uLong sourceLen));
-/*
- compressBound() returns an upper bound on the compressed size after
- compress() or compress2() on sourceLen bytes. It would be used before a
- compress() or compress2() call to allocate the destination buffer.
-*/
-
-ZEXTERN int ZEXPORT uncompress OF((Bytef *dest, uLongf *destLen,
- const Bytef *source, uLong sourceLen));
-/*
- Decompresses the source buffer into the destination buffer. sourceLen is
- the byte length of the source buffer. Upon entry, destLen is the total size
- of the destination buffer, which must be large enough to hold the entire
- uncompressed data. (The size of the uncompressed data must have been saved
- previously by the compressor and transmitted to the decompressor by some
- mechanism outside the scope of this compression library.) Upon exit, destLen
- is the actual size of the uncompressed data.
-
- uncompress returns Z_OK if success, Z_MEM_ERROR if there was not
- enough memory, Z_BUF_ERROR if there was not enough room in the output
- buffer, or Z_DATA_ERROR if the input data was corrupted or incomplete. In
- the case where there is not enough room, uncompress() will fill the output
- buffer with the uncompressed data up to that point.
-*/
-
-ZEXTERN int ZEXPORT uncompress2 OF((Bytef *dest, uLongf *destLen,
- const Bytef *source, uLong *sourceLen));
-/*
- Same as uncompress, except that sourceLen is a pointer, where the
- length of the source is *sourceLen. On return, *sourceLen is the number of
- source bytes consumed.
-*/
-
- /* gzip file access functions */
-
-/*
- This library supports reading and writing files in gzip (.gz) format with
- an interface similar to that of stdio, using the functions that start with
- "gz". The gzip format is different from the zlib format. gzip is a gzip
- wrapper, documented in RFC 1952, wrapped around a deflate stream.
-*/
-
-typedef struct gzFile_s *gzFile; /* semi-opaque gzip file descriptor */
-
-/*
-ZEXTERN gzFile ZEXPORT gzopen OF((const char *path, const char *mode));
-
- Open the gzip (.gz) file at path for reading and decompressing, or
- compressing and writing. The mode parameter is as in fopen ("rb" or "wb")
- but can also include a compression level ("wb9") or a strategy: 'f' for
- filtered data as in "wb6f", 'h' for Huffman-only compression as in "wb1h",
- 'R' for run-length encoding as in "wb1R", or 'F' for fixed code compression
- as in "wb9F". (See the description of deflateInit2 for more information
- about the strategy parameter.) 'T' will request transparent writing or
- appending with no compression and not using the gzip format.
-
- "a" can be used instead of "w" to request that the gzip stream that will
- be written be appended to the file. "+" will result in an error, since
- reading and writing to the same gzip file is not supported. The addition of
- "x" when writing will create the file exclusively, which fails if the file
- already exists. On systems that support it, the addition of "e" when
- reading or writing will set the flag to close the file on an execve() call.
-
- These functions, as well as gzip, will read and decode a sequence of gzip
- streams in a file. The append function of gzopen() can be used to create
- such a file. (Also see gzflush() for another way to do this.) When
- appending, gzopen does not test whether the file begins with a gzip stream,
- nor does it look for the end of the gzip streams to begin appending. gzopen
- will simply append a gzip stream to the existing file.
-
- gzopen can be used to read a file which is not in gzip format; in this
- case gzread will directly read from the file without decompression. When
- reading, this will be detected automatically by looking for the magic two-
- byte gzip header.
-
- gzopen returns NULL if the file could not be opened, if there was
- insufficient memory to allocate the gzFile state, or if an invalid mode was
- specified (an 'r', 'w', or 'a' was not provided, or '+' was provided).
- errno can be checked to determine if the reason gzopen failed was that the
- file could not be opened.
-*/
-
-ZEXTERN gzFile ZEXPORT gzdopen OF((int fd, const char *mode));
-/*
- Associate a gzFile with the file descriptor fd. File descriptors are
- obtained from calls like open, dup, creat, pipe or fileno (if the file has
- been previously opened with fopen). The mode parameter is as in gzopen.
-
- The next call of gzclose on the returned gzFile will also close the file
- descriptor fd, just like fclose(fdopen(fd, mode)) closes the file descriptor
- fd. If you want to keep fd open, use fd = dup(fd_keep); gz = gzdopen(fd,
- mode);. The duplicated descriptor should be saved to avoid a leak, since
- gzdopen does not close fd if it fails. If you are using fileno() to get the
- file descriptor from a FILE *, then you will have to use dup() to avoid
- double-close()ing the file descriptor. Both gzclose() and fclose() will
- close the associated file descriptor, so they need to have different file
- descriptors.
-
- gzdopen returns NULL if there was insufficient memory to allocate the
- gzFile state, if an invalid mode was specified (an 'r', 'w', or 'a' was not
- provided, or '+' was provided), or if fd is -1. The file descriptor is not
- used until the next gz* read, write, seek, or close operation, so gzdopen
- will not detect if fd is invalid (unless fd is -1).
-*/
-
-ZEXTERN int ZEXPORT gzbuffer OF((gzFile file, unsigned size));
-/*
- Set the internal buffer size used by this library's functions for file to
- size. The default buffer size is 8192 bytes. This function must be called
- after gzopen() or gzdopen(), and before any other calls that read or write
- the file. The buffer memory allocation is always deferred to the first read
- or write. Three times that size in buffer space is allocated. A larger
- buffer size of, for example, 64K or 128K bytes will noticeably increase the
- speed of decompression (reading).
-
- The new buffer size also affects the maximum length for gzprintf().
-
- gzbuffer() returns 0 on success, or -1 on failure, such as being called
- too late.
-*/
-
-ZEXTERN int ZEXPORT gzsetparams OF((gzFile file, int level, int strategy));
-/*
- Dynamically update the compression level and strategy for file. See the
- description of deflateInit2 for the meaning of these parameters. Previously
- provided data is flushed before applying the parameter changes.
-
- gzsetparams returns Z_OK if success, Z_STREAM_ERROR if the file was not
- opened for writing, Z_ERRNO if there is an error writing the flushed data,
- or Z_MEM_ERROR if there is a memory allocation error.
-*/
-
-ZEXTERN int ZEXPORT gzread OF((gzFile file, voidp buf, unsigned len));
-/*
- Read and decompress up to len uncompressed bytes from file into buf. If
- the input file is not in gzip format, gzread copies the given number of
- bytes into the buffer directly from the file.
-
- After reaching the end of a gzip stream in the input, gzread will continue
- to read, looking for another gzip stream. Any number of gzip streams may be
- concatenated in the input file, and will all be decompressed by gzread().
- If something other than a gzip stream is encountered after a gzip stream,
- that remaining trailing garbage is ignored (and no error is returned).
-
- gzread can be used to read a gzip file that is being concurrently written.
- Upon reaching the end of the input, gzread will return with the available
- data. If the error code returned by gzerror is Z_OK or Z_BUF_ERROR, then
- gzclearerr can be used to clear the end of file indicator in order to permit
- gzread to be tried again. Z_OK indicates that a gzip stream was completed
- on the last gzread. Z_BUF_ERROR indicates that the input file ended in the
- middle of a gzip stream. Note that gzread does not return -1 in the event
- of an incomplete gzip stream. This error is deferred until gzclose(), which
- will return Z_BUF_ERROR if the last gzread ended in the middle of a gzip
- stream. Alternatively, gzerror can be used before gzclose to detect this
- case.
-
- gzread returns the number of uncompressed bytes actually read, less than
- len for end of file, or -1 for error. If len is too large to fit in an int,
- then nothing is read, -1 is returned, and the error state is set to
- Z_STREAM_ERROR.
-*/
-
-ZEXTERN z_size_t ZEXPORT gzfread OF((voidp buf, z_size_t size, z_size_t nitems,
- gzFile file));
-/*
- Read and decompress up to nitems items of size size from file into buf,
- otherwise operating as gzread() does. This duplicates the interface of
- stdio's fread(), with size_t request and return types. If the library
- defines size_t, then z_size_t is identical to size_t. If not, then z_size_t
- is an unsigned integer type that can contain a pointer.
-
- gzfread() returns the number of full items read of size size, or zero if
- the end of the file was reached and a full item could not be read, or if
- there was an error. gzerror() must be consulted if zero is returned in
- order to determine if there was an error. If the multiplication of size and
- nitems overflows, i.e. the product does not fit in a z_size_t, then nothing
- is read, zero is returned, and the error state is set to Z_STREAM_ERROR.
-
- In the event that the end of file is reached and only a partial item is
- available at the end, i.e. the remaining uncompressed data length is not a
- multiple of size, then the final partial item is nevetheless read into buf
- and the end-of-file flag is set. The length of the partial item read is not
- provided, but could be inferred from the result of gztell(). This behavior
- is the same as the behavior of fread() implementations in common libraries,
- but it prevents the direct use of gzfread() to read a concurrently written
- file, reseting and retrying on end-of-file, when size is not 1.
-*/
-
-ZEXTERN int ZEXPORT gzwrite OF((gzFile file, voidpc buf, unsigned len));
-/*
- Compress and write the len uncompressed bytes at buf to file. gzwrite
- returns the number of uncompressed bytes written or 0 in case of error.
-*/
-
-ZEXTERN z_size_t ZEXPORT gzfwrite OF((voidpc buf, z_size_t size,
- z_size_t nitems, gzFile file));
-/*
- Compress and write nitems items of size size from buf to file, duplicating
- the interface of stdio's fwrite(), with size_t request and return types. If
- the library defines size_t, then z_size_t is identical to size_t. If not,
- then z_size_t is an unsigned integer type that can contain a pointer.
-
- gzfwrite() returns the number of full items written of size size, or zero
- if there was an error. If the multiplication of size and nitems overflows,
- i.e. the product does not fit in a z_size_t, then nothing is written, zero
- is returned, and the error state is set to Z_STREAM_ERROR.
-*/
-
-ZEXTERN int ZEXPORTVA gzprintf Z_ARG((gzFile file, const char *format, ...));
-/*
- Convert, format, compress, and write the arguments (...) to file under
- control of the string format, as in fprintf. gzprintf returns the number of
- uncompressed bytes actually written, or a negative zlib error code in case
- of error. The number of uncompressed bytes written is limited to 8191, or
- one less than the buffer size given to gzbuffer(). The caller should assure
- that this limit is not exceeded. If it is exceeded, then gzprintf() will
- return an error (0) with nothing written. In this case, there may also be a
- buffer overflow with unpredictable consequences, which is possible only if
- zlib was compiled with the insecure functions sprintf() or vsprintf(),
- because the secure snprintf() or vsnprintf() functions were not available.
- This can be determined using zlibCompileFlags().
-*/
-
-ZEXTERN int ZEXPORT gzputs OF((gzFile file, const char *s));
-/*
- Compress and write the given null-terminated string s to file, excluding
- the terminating null character.
-
- gzputs returns the number of characters written, or -1 in case of error.
-*/
-
-ZEXTERN char * ZEXPORT gzgets OF((gzFile file, char *buf, int len));
-/*
- Read and decompress bytes from file into buf, until len-1 characters are
- read, or until a newline character is read and transferred to buf, or an
- end-of-file condition is encountered. If any characters are read or if len
- is one, the string is terminated with a null character. If no characters
- are read due to an end-of-file or len is less than one, then the buffer is
- left untouched.
-
- gzgets returns buf which is a null-terminated string, or it returns NULL
- for end-of-file or in case of error. If there was an error, the contents at
- buf are indeterminate.
-*/
-
-ZEXTERN int ZEXPORT gzputc OF((gzFile file, int c));
-/*
- Compress and write c, converted to an unsigned char, into file. gzputc
- returns the value that was written, or -1 in case of error.
-*/
-
-ZEXTERN int ZEXPORT gzgetc OF((gzFile file));
-/*
- Read and decompress one byte from file. gzgetc returns this byte or -1
- in case of end of file or error. This is implemented as a macro for speed.
- As such, it does not do all of the checking the other functions do. I.e.
- it does not check to see if file is NULL, nor whether the structure file
- points to has been clobbered or not.
-*/
-
-ZEXTERN int ZEXPORT gzungetc OF((int c, gzFile file));
-/*
- Push c back onto the stream for file to be read as the first character on
- the next read. At least one character of push-back is always allowed.
- gzungetc() returns the character pushed, or -1 on failure. gzungetc() will
- fail if c is -1, and may fail if a character has been pushed but not read
- yet. If gzungetc is used immediately after gzopen or gzdopen, at least the
- output buffer size of pushed characters is allowed. (See gzbuffer above.)
- The pushed character will be discarded if the stream is repositioned with
- gzseek() or gzrewind().
-*/
-
-ZEXTERN int ZEXPORT gzflush OF((gzFile file, int flush));
-/*
- Flush all pending output to file. The parameter flush is as in the
- deflate() function. The return value is the zlib error number (see function
- gzerror below). gzflush is only permitted when writing.
-
- If the flush parameter is Z_FINISH, the remaining data is written and the
- gzip stream is completed in the output. If gzwrite() is called again, a new
- gzip stream will be started in the output. gzread() is able to read such
- concatenated gzip streams.
-
- gzflush should be called only when strictly necessary because it will
- degrade compression if called too often.
-*/
-
-/*
-ZEXTERN z_off_t ZEXPORT gzseek OF((gzFile file,
- z_off_t offset, int whence));
-
- Set the starting position to offset relative to whence for the next gzread
- or gzwrite on file. The offset represents a number of bytes in the
- uncompressed data stream. The whence parameter is defined as in lseek(2);
- the value SEEK_END is not supported.
-
- If the file is opened for reading, this function is emulated but can be
- extremely slow. If the file is opened for writing, only forward seeks are
- supported; gzseek then compresses a sequence of zeroes up to the new
- starting position.
-
- gzseek returns the resulting offset location as measured in bytes from
- the beginning of the uncompressed stream, or -1 in case of error, in
- particular if the file is opened for writing and the new starting position
- would be before the current position.
-*/
-
-ZEXTERN int ZEXPORT gzrewind OF((gzFile file));
-/*
- Rewind file. This function is supported only for reading.
-
- gzrewind(file) is equivalent to (int)gzseek(file, 0L, SEEK_SET).
-*/
-
-/*
-ZEXTERN z_off_t ZEXPORT gztell OF((gzFile file));
-
- Return the starting position for the next gzread or gzwrite on file.
- This position represents a number of bytes in the uncompressed data stream,
- and is zero when starting, even if appending or reading a gzip stream from
- the middle of a file using gzdopen().
-
- gztell(file) is equivalent to gzseek(file, 0L, SEEK_CUR)
-*/
-
-/*
-ZEXTERN z_off_t ZEXPORT gzoffset OF((gzFile file));
-
- Return the current compressed (actual) read or write offset of file. This
- offset includes the count of bytes that precede the gzip stream, for example
- when appending or when using gzdopen() for reading. When reading, the
- offset does not include as yet unused buffered input. This information can
- be used for a progress indicator. On error, gzoffset() returns -1.
-*/
-
-ZEXTERN int ZEXPORT gzeof OF((gzFile file));
-/*
- Return true (1) if the end-of-file indicator for file has been set while
- reading, false (0) otherwise. Note that the end-of-file indicator is set
- only if the read tried to go past the end of the input, but came up short.
- Therefore, just like feof(), gzeof() may return false even if there is no
- more data to read, in the event that the last read request was for the exact
- number of bytes remaining in the input file. This will happen if the input
- file size is an exact multiple of the buffer size.
-
- If gzeof() returns true, then the read functions will return no more data,
- unless the end-of-file indicator is reset by gzclearerr() and the input file
- has grown since the previous end of file was detected.
-*/
-
-ZEXTERN int ZEXPORT gzdirect OF((gzFile file));
-/*
- Return true (1) if file is being copied directly while reading, or false
- (0) if file is a gzip stream being decompressed.
-
- If the input file is empty, gzdirect() will return true, since the input
- does not contain a gzip stream.
-
- If gzdirect() is used immediately after gzopen() or gzdopen() it will
- cause buffers to be allocated to allow reading the file to determine if it
- is a gzip file. Therefore if gzbuffer() is used, it should be called before
- gzdirect().
-
- When writing, gzdirect() returns true (1) if transparent writing was
- requested ("wT" for the gzopen() mode), or false (0) otherwise. (Note:
- gzdirect() is not needed when writing. Transparent writing must be
- explicitly requested, so the application already knows the answer. When
- linking statically, using gzdirect() will include all of the zlib code for
- gzip file reading and decompression, which may not be desired.)
-*/
-
-ZEXTERN int ZEXPORT gzclose OF((gzFile file));
-/*
- Flush all pending output for file, if necessary, close file and
- deallocate the (de)compression state. Note that once file is closed, you
- cannot call gzerror with file, since its structures have been deallocated.
- gzclose must not be called more than once on the same file, just as free
- must not be called more than once on the same allocation.
-
- gzclose will return Z_STREAM_ERROR if file is not valid, Z_ERRNO on a
- file operation error, Z_MEM_ERROR if out of memory, Z_BUF_ERROR if the
- last read ended in the middle of a gzip stream, or Z_OK on success.
-*/
-
-ZEXTERN int ZEXPORT gzclose_r OF((gzFile file));
-ZEXTERN int ZEXPORT gzclose_w OF((gzFile file));
-/*
- Same as gzclose(), but gzclose_r() is only for use when reading, and
- gzclose_w() is only for use when writing or appending. The advantage to
- using these instead of gzclose() is that they avoid linking in zlib
- compression or decompression code that is not used when only reading or only
- writing respectively. If gzclose() is used, then both compression and
- decompression code will be included the application when linking to a static
- zlib library.
-*/
-
-ZEXTERN const char * ZEXPORT gzerror OF((gzFile file, int *errnum));
-/*
- Return the error message for the last error which occurred on file.
- errnum is set to zlib error number. If an error occurred in the file system
- and not in the compression library, errnum is set to Z_ERRNO and the
- application may consult errno to get the exact error code.
-
- The application must not modify the returned string. Future calls to
- this function may invalidate the previously returned string. If file is
- closed, then the string previously returned by gzerror will no longer be
- available.
-
- gzerror() should be used to distinguish errors from end-of-file for those
- functions above that do not distinguish those cases in their return values.
-*/
-
-ZEXTERN void ZEXPORT gzclearerr OF((gzFile file));
-/*
- Clear the error and end-of-file flags for file. This is analogous to the
- clearerr() function in stdio. This is useful for continuing to read a gzip
- file that is being written concurrently.
-*/
-
-#endif /* !Z_SOLO */
-
- /* checksum functions */
-
-/*
- These functions are not related to compression but are exported
- anyway because they might be useful in applications using the compression
- library.
-*/
-
-ZEXTERN uLong ZEXPORT adler32 OF((uLong adler, const Bytef *buf, uInt len));
-/*
- Update a running Adler-32 checksum with the bytes buf[0..len-1] and
- return the updated checksum. An Adler-32 value is in the range of a 32-bit
- unsigned integer. If buf is Z_NULL, this function returns the required
- initial value for the checksum.
-
- An Adler-32 checksum is almost as reliable as a CRC-32 but can be computed
- much faster.
-
- Usage example:
-
- uLong adler = adler32(0L, Z_NULL, 0);
-
- while (read_buffer(buffer, length) != EOF) {
- adler = adler32(adler, buffer, length);
- }
- if (adler != original_adler) error();
-*/
-
-ZEXTERN uLong ZEXPORT adler32_z OF((uLong adler, const Bytef *buf,
- z_size_t len));
-/*
- Same as adler32(), but with a size_t length.
-*/
-
-/*
-ZEXTERN uLong ZEXPORT adler32_combine OF((uLong adler1, uLong adler2,
- z_off_t len2));
-
- Combine two Adler-32 checksums into one. For two sequences of bytes, seq1
- and seq2 with lengths len1 and len2, Adler-32 checksums were calculated for
- each, adler1 and adler2. adler32_combine() returns the Adler-32 checksum of
- seq1 and seq2 concatenated, requiring only adler1, adler2, and len2. Note
- that the z_off_t type (like off_t) is a signed integer. If len2 is
- negative, the result has no meaning or utility.
-*/
-
-ZEXTERN uLong ZEXPORT crc32 OF((uLong crc, const Bytef *buf, uInt len));
-/*
- Update a running CRC-32 with the bytes buf[0..len-1] and return the
- updated CRC-32. A CRC-32 value is in the range of a 32-bit unsigned integer.
- If buf is Z_NULL, this function returns the required initial value for the
- crc. Pre- and post-conditioning (one's complement) is performed within this
- function so it shouldn't be done by the application.
-
- Usage example:
-
- uLong crc = crc32(0L, Z_NULL, 0);
-
- while (read_buffer(buffer, length) != EOF) {
- crc = crc32(crc, buffer, length);
- }
- if (crc != original_crc) error();
-*/
-
-#ifndef Z_FREETYPE
-
-ZEXTERN uLong ZEXPORT crc32_z OF((uLong crc, const Bytef *buf,
- z_size_t len));
-/*
- Same as crc32(), but with a size_t length.
-*/
-
-/*
-ZEXTERN uLong ZEXPORT crc32_combine OF((uLong crc1, uLong crc2, z_off_t len2));
-
- Combine two CRC-32 check values into one. For two sequences of bytes,
- seq1 and seq2 with lengths len1 and len2, CRC-32 check values were
- calculated for each, crc1 and crc2. crc32_combine() returns the CRC-32
- check value of seq1 and seq2 concatenated, requiring only crc1, crc2, and
- len2.
-*/
-
-/*
-ZEXTERN uLong ZEXPORT crc32_combine_gen OF((z_off_t len2));
-
- Return the operator corresponding to length len2, to be used with
- crc32_combine_op().
-*/
-
-ZEXTERN uLong ZEXPORT crc32_combine_op OF((uLong crc1, uLong crc2, uLong op));
-/*
- Give the same result as crc32_combine(), using op in place of len2. op is
- is generated from len2 by crc32_combine_gen(). This will be faster than
- crc32_combine() if the generated op is used more than once.
-*/
-
-
- /* various hacks, don't look :) */
-
-/* deflateInit and inflateInit are macros to allow checking the zlib version
- * and the compiler's view of z_stream:
- */
-ZEXTERN int ZEXPORT deflateInit_ OF((z_streamp strm, int level,
- const char *version, int stream_size));
-ZEXTERN int ZEXPORT inflateInit_ OF((z_streamp strm,
- const char *version, int stream_size));
-ZEXTERN int ZEXPORT deflateInit2_ OF((z_streamp strm, int level, int method,
- int windowBits, int memLevel,
- int strategy, const char *version,
- int stream_size));
-ZEXTERN int ZEXPORT inflateInit2_ OF((z_streamp strm, int windowBits,
- const char *version, int stream_size));
-ZEXTERN int ZEXPORT inflateBackInit_ OF((z_streamp strm, int windowBits,
- unsigned char FAR *window,
- const char *version,
- int stream_size));
-#ifdef Z_PREFIX_SET
-# define z_deflateInit(strm, level) \
- deflateInit_((strm), (level), ZLIB_VERSION, (int)sizeof(z_stream))
-# define z_inflateInit(strm) \
- inflateInit_((strm), ZLIB_VERSION, (int)sizeof(z_stream))
-# define z_deflateInit2(strm, level, method, windowBits, memLevel, strategy) \
- deflateInit2_((strm),(level),(method),(windowBits),(memLevel),\
- (strategy), ZLIB_VERSION, (int)sizeof(z_stream))
-# define z_inflateInit2(strm, windowBits) \
- inflateInit2_((strm), (windowBits), ZLIB_VERSION, \
- (int)sizeof(z_stream))
-# define z_inflateBackInit(strm, windowBits, window) \
- inflateBackInit_((strm), (windowBits), (window), \
- ZLIB_VERSION, (int)sizeof(z_stream))
-#else
-# define deflateInit(strm, level) \
- deflateInit_((strm), (level), ZLIB_VERSION, (int)sizeof(z_stream))
-# define inflateInit(strm) \
- inflateInit_((strm), ZLIB_VERSION, (int)sizeof(z_stream))
-# define deflateInit2(strm, level, method, windowBits, memLevel, strategy) \
- deflateInit2_((strm),(level),(method),(windowBits),(memLevel),\
- (strategy), ZLIB_VERSION, (int)sizeof(z_stream))
-# define inflateInit2(strm, windowBits) \
- inflateInit2_((strm), (windowBits), ZLIB_VERSION, \
- (int)sizeof(z_stream))
-# define inflateBackInit(strm, windowBits, window) \
- inflateBackInit_((strm), (windowBits), (window), \
- ZLIB_VERSION, (int)sizeof(z_stream))
-#endif
-
-#else /* Z_FREETYPE */
-
-
-ZEXTERN int ZEXPORT inflateInit2_ OF((z_streamp strm, int windowBits,
- const char *version, int stream_size));
-
-# define inflateInit2(strm, windowBits) \
- inflateInit2_((strm), (windowBits), ZLIB_VERSION, \
- (int)sizeof(z_stream))
-
-#endif /* Z_FREETYPE */
-
-
-#ifndef Z_SOLO
-
-/* gzgetc() macro and its supporting function and exposed data structure. Note
- * that the real internal state is much larger than the exposed structure.
- * This abbreviated structure exposes just enough for the gzgetc() macro. The
- * user should not mess with these exposed elements, since their names or
- * behavior could change in the future, perhaps even capriciously. They can
- * only be used by the gzgetc() macro. You have been warned.
- */
-struct gzFile_s {
- unsigned have;
- unsigned char *next;
- z_off64_t pos;
-};
-ZEXTERN int ZEXPORT gzgetc_ OF((gzFile file)); /* backward compatibility */
-#ifdef Z_PREFIX_SET
-# undef z_gzgetc
-# define z_gzgetc(g) \
- ((g)->have ? ((g)->have--, (g)->pos++, *((g)->next)++) : (gzgetc)(g))
-#else
-# define gzgetc(g) \
- ((g)->have ? ((g)->have--, (g)->pos++, *((g)->next)++) : (gzgetc)(g))
-#endif
-
-/* provide 64-bit offset functions if _LARGEFILE64_SOURCE defined, and/or
- * change the regular functions to 64 bits if _FILE_OFFSET_BITS is 64 (if
- * both are true, the application gets the *64 functions, and the regular
- * functions are changed to 64 bits) -- in case these are set on systems
- * without large file support, _LFS64_LARGEFILE must also be true
- */
-#ifdef Z_LARGE64
- ZEXTERN gzFile ZEXPORT gzopen64 OF((const char *, const char *));
- ZEXTERN z_off64_t ZEXPORT gzseek64 OF((gzFile, z_off64_t, int));
- ZEXTERN z_off64_t ZEXPORT gztell64 OF((gzFile));
- ZEXTERN z_off64_t ZEXPORT gzoffset64 OF((gzFile));
- ZEXTERN uLong ZEXPORT adler32_combine64 OF((uLong, uLong, z_off64_t));
- ZEXTERN uLong ZEXPORT crc32_combine64 OF((uLong, uLong, z_off64_t));
- ZEXTERN uLong ZEXPORT crc32_combine_gen64 OF((z_off64_t));
-#endif
-
-#if !defined(ZLIB_INTERNAL) && defined(Z_WANT64)
-# ifdef Z_PREFIX_SET
-# define z_gzopen z_gzopen64
-# define z_gzseek z_gzseek64
-# define z_gztell z_gztell64
-# define z_gzoffset z_gzoffset64
-# define z_adler32_combine z_adler32_combine64
-# define z_crc32_combine z_crc32_combine64
-# define z_crc32_combine_gen z_crc32_combine_gen64
-# else
-# define gzopen gzopen64
-# define gzseek gzseek64
-# define gztell gztell64
-# define gzoffset gzoffset64
-# define adler32_combine adler32_combine64
-# define crc32_combine crc32_combine64
-# define crc32_combine_gen crc32_combine_gen64
-# endif
-# ifndef Z_LARGE64
- ZEXTERN gzFile ZEXPORT gzopen64 OF((const char *, const char *));
- ZEXTERN z_off_t ZEXPORT gzseek64 OF((gzFile, z_off_t, int));
- ZEXTERN z_off_t ZEXPORT gztell64 OF((gzFile));
- ZEXTERN z_off_t ZEXPORT gzoffset64 OF((gzFile));
- ZEXTERN uLong ZEXPORT adler32_combine64 OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine64 OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine_gen64 OF((z_off_t));
-# endif
-#else
- ZEXTERN gzFile ZEXPORT gzopen OF((const char *, const char *));
- ZEXTERN z_off_t ZEXPORT gzseek OF((gzFile, z_off_t, int));
- ZEXTERN z_off_t ZEXPORT gztell OF((gzFile));
- ZEXTERN z_off_t ZEXPORT gzoffset OF((gzFile));
- ZEXTERN uLong ZEXPORT adler32_combine OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine_gen OF((z_off_t));
-#endif
-
-#else /* Z_SOLO */
-
-#ifndef Z_FREETYPE
- ZEXTERN uLong ZEXPORT adler32_combine OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine_gen OF((z_off_t));
-#endif
-
-#endif /* !Z_SOLO */
-
-/* undocumented functions */
-#ifndef Z_FREETYPE
-ZEXTERN const char * ZEXPORT zError OF((int));
-ZEXTERN int ZEXPORT inflateSyncPoint OF((z_streamp));
-ZEXTERN const z_crc_t FAR * ZEXPORT get_crc_table OF((void));
-ZEXTERN int ZEXPORT inflateUndermine OF((z_streamp, int));
-ZEXTERN int ZEXPORT inflateValidate OF((z_streamp, int));
-ZEXTERN unsigned long ZEXPORT inflateCodesUsed OF ((z_streamp));
-ZEXTERN int ZEXPORT inflateResetKeep OF((z_streamp));
-ZEXTERN int ZEXPORT deflateResetKeep OF((z_streamp));
-#if defined(_WIN32) && !defined(Z_SOLO)
-ZEXTERN gzFile ZEXPORT gzopen_w OF((const wchar_t *path,
- const char *mode));
-#endif
-#if defined(STDC) || defined(Z_HAVE_STDARG_H)
-# ifndef Z_SOLO
-ZEXTERN int ZEXPORTVA gzvprintf Z_ARG((gzFile file,
- const char *format,
- va_list va));
-# endif
-#endif
-#endif /* !Z_FREETYPE */
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* ZLIB_H */
diff --git a/thirdparty/freetype/src/gzip/zutil.c b/thirdparty/freetype/src/gzip/zutil.c
deleted file mode 100644
index a19ac2b96d..0000000000
--- a/thirdparty/freetype/src/gzip/zutil.c
+++ /dev/null
@@ -1,325 +0,0 @@
-/* zutil.c -- target dependent utility functions for the compression library
- * Copyright (C) 1995-2017 Jean-loup Gailly
- * For conditions of distribution and use, see copyright notice in zlib.h
- */
-
-/* @(#) $Id$ */
-
-#include "zutil.h"
-#ifndef Z_SOLO
-# include "gzguts.h"
-#endif
-
-z_const char * const z_errmsg[10] = {
- (z_const char *)"need dictionary", /* Z_NEED_DICT 2 */
- (z_const char *)"stream end", /* Z_STREAM_END 1 */
- (z_const char *)"", /* Z_OK 0 */
- (z_const char *)"file error", /* Z_ERRNO (-1) */
- (z_const char *)"stream error", /* Z_STREAM_ERROR (-2) */
- (z_const char *)"data error", /* Z_DATA_ERROR (-3) */
- (z_const char *)"insufficient memory", /* Z_MEM_ERROR (-4) */
- (z_const char *)"buffer error", /* Z_BUF_ERROR (-5) */
- (z_const char *)"incompatible version",/* Z_VERSION_ERROR (-6) */
- (z_const char *)""
-};
-
-
-const char * ZEXPORT zlibVersion()
-{
- return ZLIB_VERSION;
-}
-
-uLong ZEXPORT zlibCompileFlags()
-{
- uLong flags;
-
- flags = 0;
- switch ((int)(sizeof(uInt))) {
- case 2: break;
- case 4: flags += 1; break;
- case 8: flags += 2; break;
- default: flags += 3;
- }
- switch ((int)(sizeof(uLong))) {
- case 2: break;
- case 4: flags += 1 << 2; break;
- case 8: flags += 2 << 2; break;
- default: flags += 3 << 2;
- }
- switch ((int)(sizeof(voidpf))) {
- case 2: break;
- case 4: flags += 1 << 4; break;
- case 8: flags += 2 << 4; break;
- default: flags += 3 << 4;
- }
- switch ((int)(sizeof(z_off_t))) {
- case 2: break;
- case 4: flags += 1 << 6; break;
- case 8: flags += 2 << 6; break;
- default: flags += 3 << 6;
- }
-#ifdef ZLIB_DEBUG
- flags += 1 << 8;
-#endif
-#if defined(ASMV) || defined(ASMINF)
- flags += 1 << 9;
-#endif
-#ifdef ZLIB_WINAPI
- flags += 1 << 10;
-#endif
-#ifdef BUILDFIXED
- flags += 1 << 12;
-#endif
-#ifdef DYNAMIC_CRC_TABLE
- flags += 1 << 13;
-#endif
-#ifdef NO_GZCOMPRESS
- flags += 1L << 16;
-#endif
-#ifdef NO_GZIP
- flags += 1L << 17;
-#endif
-#ifdef PKZIP_BUG_WORKAROUND
- flags += 1L << 20;
-#endif
-#ifdef FASTEST
- flags += 1L << 21;
-#endif
-#if defined(STDC) || defined(Z_HAVE_STDARG_H)
-# ifdef NO_vsnprintf
- flags += 1L << 25;
-# ifdef HAS_vsprintf_void
- flags += 1L << 26;
-# endif
-# else
-# ifdef HAS_vsnprintf_void
- flags += 1L << 26;
-# endif
-# endif
-#else
- flags += 1L << 24;
-# ifdef NO_snprintf
- flags += 1L << 25;
-# ifdef HAS_sprintf_void
- flags += 1L << 26;
-# endif
-# else
-# ifdef HAS_snprintf_void
- flags += 1L << 26;
-# endif
-# endif
-#endif
- return flags;
-}
-
-#ifdef ZLIB_DEBUG
-#include <stdlib.h>
-# ifndef verbose
-# define verbose 0
-# endif
-int ZLIB_INTERNAL z_verbose = verbose;
-
-void ZLIB_INTERNAL z_error (
- char *m)
-{
- fprintf(stderr, "%s\n", m);
- exit(1);
-}
-#endif
-
-/* exported to allow conversion of error code to string for compress() and
- * uncompress()
- */
-const char * ZEXPORT zError(
- int err)
-{
- return ERR_MSG(err);
-}
-
-#if defined(_WIN32_WCE) && _WIN32_WCE < 0x800
- /* The older Microsoft C Run-Time Library for Windows CE doesn't have
- * errno. We define it as a global variable to simplify porting.
- * Its value is always 0 and should not be used.
- */
- int errno = 0;
-#endif
-
-#ifndef HAVE_MEMCPY
-
-void ZLIB_INTERNAL zmemcpy(
- Bytef* dest,
- const Bytef* source,
- uInt len)
-{
- if (len == 0) return;
- do {
- *dest++ = *source++; /* ??? to be unrolled */
- } while (--len != 0);
-}
-
-int ZLIB_INTERNAL zmemcmp(
- const Bytef* s1,
- const Bytef* s2,
- uInt len)
-{
- uInt j;
-
- for (j = 0; j < len; j++) {
- if (s1[j] != s2[j]) return 2*(s1[j] > s2[j])-1;
- }
- return 0;
-}
-
-void ZLIB_INTERNAL zmemzero(
- Bytef* dest,
- uInt len)
-{
- if (len == 0) return;
- do {
- *dest++ = 0; /* ??? to be unrolled */
- } while (--len != 0);
-}
-#endif
-
-#ifndef Z_SOLO
-
-#ifdef SYS16BIT
-
-#ifdef __TURBOC__
-/* Turbo C in 16-bit mode */
-
-# define MY_ZCALLOC
-
-/* Turbo C malloc() does not allow dynamic allocation of 64K bytes
- * and farmalloc(64K) returns a pointer with an offset of 8, so we
- * must fix the pointer. Warning: the pointer must be put back to its
- * original form in order to free it, use zcfree().
- */
-
-#define MAX_PTR 10
-/* 10*64K = 640K */
-
-local int next_ptr = 0;
-
-typedef struct ptr_table_s {
- voidpf org_ptr;
- voidpf new_ptr;
-} ptr_table;
-
-local ptr_table table[MAX_PTR];
-/* This table is used to remember the original form of pointers
- * to large buffers (64K). Such pointers are normalized with a zero offset.
- * Since MSDOS is not a preemptive multitasking OS, this table is not
- * protected from concurrent access. This hack doesn't work anyway on
- * a protected system like OS/2. Use Microsoft C instead.
- */
-
-voidpf ZLIB_INTERNAL zcalloc (voidpf opaque, unsigned items, unsigned size)
-{
- voidpf buf;
- ulg bsize = (ulg)items*size;
-
- (void)opaque;
-
- /* If we allocate less than 65520 bytes, we assume that farmalloc
- * will return a usable pointer which doesn't have to be normalized.
- */
- if (bsize < 65520L) {
- buf = farmalloc(bsize);
- if (*(ush*)&buf != 0) return buf;
- } else {
- buf = farmalloc(bsize + 16L);
- }
- if (buf == NULL || next_ptr >= MAX_PTR) return NULL;
- table[next_ptr].org_ptr = buf;
-
- /* Normalize the pointer to seg:0 */
- *((ush*)&buf+1) += ((ush)((uch*)buf-0) + 15) >> 4;
- *(ush*)&buf = 0;
- table[next_ptr++].new_ptr = buf;
- return buf;
-}
-
-void ZLIB_INTERNAL zcfree (voidpf opaque, voidpf ptr)
-{
- int n;
-
- (void)opaque;
-
- if (*(ush*)&ptr != 0) { /* object < 64K */
- farfree(ptr);
- return;
- }
- /* Find the original pointer */
- for (n = 0; n < next_ptr; n++) {
- if (ptr != table[n].new_ptr) continue;
-
- farfree(table[n].org_ptr);
- while (++n < next_ptr) {
- table[n-1] = table[n];
- }
- next_ptr--;
- return;
- }
- Assert(0, "zcfree: ptr not found");
-}
-
-#endif /* __TURBOC__ */
-
-
-#ifdef M_I86
-/* Microsoft C in 16-bit mode */
-
-# define MY_ZCALLOC
-
-#if (!defined(_MSC_VER) || (_MSC_VER <= 600))
-# define _halloc halloc
-# define _hfree hfree
-#endif
-
-voidpf ZLIB_INTERNAL zcalloc (voidpf opaque, uInt items, uInt size)
-{
- (void)opaque;
- return _halloc((long)items, size);
-}
-
-void ZLIB_INTERNAL zcfree (voidpf opaque, voidpf ptr)
-{
- (void)opaque;
- _hfree(ptr);
-}
-
-#endif /* M_I86 */
-
-#endif /* SYS16BIT */
-
-
-#ifndef MY_ZCALLOC /* Any system without a special alloc function */
-
-#ifndef STDC
-extern voidp malloc OF((uInt size));
-extern voidp calloc OF((uInt items, uInt size));
-extern void free OF((voidpf ptr));
-#endif
-
-voidpf ZLIB_INTERNAL zcalloc (
- voidpf opaque,
- unsigned items,
- unsigned size)
-{
- (void)opaque;
- return sizeof(uInt) > 2 ? (voidpf)malloc(items * size) :
- (voidpf)calloc(items, size);
-}
-
-void ZLIB_INTERNAL zcfree (
- voidpf opaque,
- voidpf ptr)
-{
- (void)opaque;
- free(ptr);
-}
-
-#endif /* MY_ZCALLOC */
-
-#endif /* !Z_SOLO */
diff --git a/thirdparty/freetype/src/gzip/zutil.h b/thirdparty/freetype/src/gzip/zutil.h
deleted file mode 100644
index 14f0f1a85e..0000000000
--- a/thirdparty/freetype/src/gzip/zutil.h
+++ /dev/null
@@ -1,278 +0,0 @@
-/* zutil.h -- internal interface and configuration of the compression library
- * Copyright (C) 1995-2022 Jean-loup Gailly, Mark Adler
- * For conditions of distribution and use, see copyright notice in zlib.h
- */
-
-/* WARNING: this file should *not* be used by applications. It is
- part of the implementation of the compression library and is
- subject to change. Applications should only use zlib.h.
- */
-
-/* @(#) $Id$ */
-
-#ifndef ZUTIL_H
-#define ZUTIL_H
-
-#ifdef HAVE_HIDDEN
-# define ZLIB_INTERNAL __attribute__((visibility ("hidden")))
-#else
-# define ZLIB_INTERNAL
-#endif
-
-#include "zlib.h"
-
-#if defined(STDC) && !defined(Z_SOLO)
-# if !(defined(_WIN32_WCE) && defined(_MSC_VER))
-# include <stddef.h>
-# endif
-# include <string.h>
-# include <stdlib.h>
-#endif
-
-#ifndef local
-# define local static
-#endif
-/* since "static" is used to mean two completely different things in C, we
- define "local" for the non-static meaning of "static", for readability
- (compile with -Dlocal if your debugger can't find static symbols) */
-
-typedef unsigned char uch;
-typedef uch FAR uchf;
-typedef unsigned short ush;
-typedef ush FAR ushf;
-typedef unsigned long ulg;
-
-#if !defined(Z_U8) && !defined(Z_SOLO) && defined(STDC)
-# include <limits.h>
-# if (ULONG_MAX == 0xffffffffffffffff)
-# define Z_U8 unsigned long
-# elif (ULLONG_MAX == 0xffffffffffffffff)
-# define Z_U8 unsigned long long
-# elif (UINT_MAX == 0xffffffffffffffff)
-# define Z_U8 unsigned
-# endif
-#endif
-
-extern z_const char * const z_errmsg[10]; /* indexed by 2-zlib_error */
-/* (size given to avoid silly warnings with Visual C++) */
-
-#define ERR_MSG(err) z_errmsg[Z_NEED_DICT-(err)]
-
-#define ERR_RETURN(strm,err) \
- return (strm->msg = ERR_MSG(err), (err))
-/* To be used only when the state is known to be valid */
-
- /* common constants */
-
-#ifndef DEF_WBITS
-# define DEF_WBITS MAX_WBITS
-#endif
-/* default windowBits for decompression. MAX_WBITS is for compression only */
-
-#if MAX_MEM_LEVEL >= 8
-# define DEF_MEM_LEVEL 8
-#else
-# define DEF_MEM_LEVEL MAX_MEM_LEVEL
-#endif
-/* default memLevel */
-
-#define STORED_BLOCK 0
-#define STATIC_TREES 1
-#define DYN_TREES 2
-/* The three kinds of block type */
-
-#define MIN_MATCH 3
-#define MAX_MATCH 258
-/* The minimum and maximum match lengths */
-
-#define PRESET_DICT 0x20 /* preset dictionary flag in zlib header */
-
- /* target dependencies */
-
-#if defined(MSDOS) || (defined(WINDOWS) && !defined(WIN32))
-# define OS_CODE 0x00
-# ifndef Z_SOLO
-# if defined(__TURBOC__) || defined(__BORLANDC__)
-# if (__STDC__ == 1) && (defined(__LARGE__) || defined(__COMPACT__))
- /* Allow compilation with ANSI keywords only enabled */
- void _Cdecl farfree( void *block );
- void *_Cdecl farmalloc( unsigned long nbytes );
-# else
-# include <alloc.h>
-# endif
-# else /* MSC or DJGPP */
-# include <malloc.h>
-# endif
-# endif
-#endif
-
-#ifdef AMIGA
-# define OS_CODE 1
-#endif
-
-#if defined(VAXC) || defined(VMS)
-# define OS_CODE 2
-# define F_OPEN(name, mode) \
- fopen((name), (mode), "mbc=60", "ctx=stm", "rfm=fix", "mrs=512")
-#endif
-
-#ifdef __370__
-# if __TARGET_LIB__ < 0x20000000
-# define OS_CODE 4
-# elif __TARGET_LIB__ < 0x40000000
-# define OS_CODE 11
-# else
-# define OS_CODE 8
-# endif
-#endif
-
-#if defined(ATARI) || defined(atarist)
-# define OS_CODE 5
-#endif
-
-#ifdef OS2
-# define OS_CODE 6
-# if defined(M_I86) && !defined(Z_SOLO)
-# include <malloc.h>
-# endif
-#endif
-
-#if defined(MACOS) || defined(TARGET_OS_MAC)
-# define OS_CODE 7
-# ifndef Z_SOLO
-# if defined(__MWERKS__) && __dest_os != __be_os && __dest_os != __win32_os
-# include <unix.h> /* for fdopen */
-# else
-# ifndef fdopen
-# define fdopen(fd,mode) NULL /* No fdopen() */
-# endif
-# endif
-# endif
-#endif
-
-#ifdef __acorn
-# define OS_CODE 13
-#endif
-
-#if defined(WIN32) && !defined(__CYGWIN__)
-# define OS_CODE 10
-#endif
-
-#ifdef _BEOS_
-# define OS_CODE 16
-#endif
-
-#ifdef __TOS_OS400__
-# define OS_CODE 18
-#endif
-
-#ifdef __APPLE__
-# define OS_CODE 19
-#endif
-
-#if defined(_BEOS_) || defined(RISCOS)
-# define fdopen(fd,mode) NULL /* No fdopen() */
-#endif
-
-#if (defined(_MSC_VER) && (_MSC_VER > 600)) && !defined __INTERIX
-# if defined(_WIN32_WCE)
-# define fdopen(fd,mode) NULL /* No fdopen() */
-# else
-# define fdopen(fd,type) _fdopen(fd,type)
-# endif
-#endif
-
-#if defined(__BORLANDC__) && !defined(MSDOS)
- #pragma warn -8004
- #pragma warn -8008
- #pragma warn -8066
-#endif
-
-#ifndef Z_FREETYPE
-
-/* provide prototypes for these when building zlib without LFS */
-#if !defined(_WIN32) && \
- (!defined(_LARGEFILE64_SOURCE) || _LFS64_LARGEFILE-0 == 0)
- ZEXTERN uLong ZEXPORT adler32_combine64 OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine64 OF((uLong, uLong, z_off_t));
-#endif
-
-#endif /* !Z_FREETYPE */
-
- /* common defaults */
-
-#ifndef OS_CODE
-# define OS_CODE 3 /* assume Unix */
-#endif
-
-#ifndef F_OPEN
-# define F_OPEN(name, mode) fopen((name), (mode))
-#endif
-
- /* functions */
-
-#if defined(pyr) || defined(Z_SOLO)
-# define NO_MEMCPY
-#endif
-#if defined(SMALL_MEDIUM) && !defined(_MSC_VER) && !defined(__SC__)
- /* Use our own functions for small and medium model with MSC <= 5.0.
- * You may have to use the same strategy for Borland C (untested).
- * The __SC__ check is for Symantec.
- */
-# define NO_MEMCPY
-#endif
-#if defined(STDC) && !defined(HAVE_MEMCPY) && !defined(NO_MEMCPY)
-# define HAVE_MEMCPY
-#endif
-#ifdef HAVE_MEMCPY
-# ifdef SMALL_MEDIUM /* MSDOS small or medium model */
-# define zmemcpy _fmemcpy
-# define zmemcmp _fmemcmp
-# define zmemzero(dest, len) _fmemset(dest, 0, len)
-# else
-# define zmemcpy ft_memcpy
-# define zmemcmp ft_memcmp
-# define zmemzero(dest, len) ft_memset(dest, 0, len)
-# endif
-#else
- void ZLIB_INTERNAL zmemcpy OF((Bytef* dest, const Bytef* source, uInt len));
- int ZLIB_INTERNAL zmemcmp OF((const Bytef* s1, const Bytef* s2, uInt len));
- void ZLIB_INTERNAL zmemzero OF((Bytef* dest, uInt len));
-#endif
-
-/* Diagnostic functions */
-#ifdef ZLIB_DEBUG
-# include <stdio.h>
- extern int ZLIB_INTERNAL z_verbose;
- extern void ZLIB_INTERNAL z_error OF((char *m));
-# define Assert(cond,msg) {if(!(cond)) z_error(msg);}
-# define Trace(x) {if (z_verbose>=0) fprintf x ;}
-# define Tracev(x) {if (z_verbose>0) fprintf x ;}
-# define Tracevv(x) {if (z_verbose>1) fprintf x ;}
-# define Tracec(c,x) {if (z_verbose>0 && (c)) fprintf x ;}
-# define Tracecv(c,x) {if (z_verbose>1 && (c)) fprintf x ;}
-#else
-# define Assert(cond,msg)
-# define Trace(x)
-# define Tracev(x)
-# define Tracevv(x)
-# define Tracec(c,x)
-# define Tracecv(c,x)
-#endif
-
-#ifndef Z_SOLO
- voidpf ZLIB_INTERNAL zcalloc OF((voidpf opaque, unsigned items,
- unsigned size));
- void ZLIB_INTERNAL zcfree OF((voidpf opaque, voidpf ptr));
-#endif
-
-#define ZALLOC(strm, items, size) \
- (*((strm)->zalloc))((strm)->opaque, (items), (size))
-#define ZFREE(strm, addr) (*((strm)->zfree))((strm)->opaque, (voidpf)(addr))
-#define TRY_FREE(s, p) {if (p) ZFREE(s, p);}
-
-/* Reverse the bytes in a 32-bit value */
-#define ZSWAP32(q) ((((q) >> 24) & 0xff) + (((q) >> 8) & 0xff00) + \
- (((q) & 0xff00) << 8) + (((q) & 0xff) << 24))
-
-#endif /* ZUTIL_H */
diff --git a/thirdparty/glad/KHR/khrplatform.h b/thirdparty/glad/KHR/khrplatform.h
index dd22d92701..01646449ca 100644
--- a/thirdparty/glad/KHR/khrplatform.h
+++ b/thirdparty/glad/KHR/khrplatform.h
@@ -153,6 +153,20 @@ typedef int64_t khronos_int64_t;
typedef uint64_t khronos_uint64_t;
#define KHRONOS_SUPPORT_INT64 1
#define KHRONOS_SUPPORT_FLOAT 1
+/*
+ * To support platform where unsigned long cannot be used interchangeably with
+ * inptr_t (e.g. CHERI-extended ISAs), we can use the stdint.h intptr_t.
+ * Ideally, we could just use (u)intptr_t everywhere, but this could result in
+ * ABI breakage if khronos_uintptr_t is changed from unsigned long to
+ * unsigned long long or similar (this results in different C++ name mangling).
+ * To avoid changes for existing platforms, we restrict usage of intptr_t to
+ * platforms where the size of a pointer is larger than the size of long.
+ */
+#if defined(__SIZEOF_LONG__) && defined(__SIZEOF_POINTER__)
+#if __SIZEOF_POINTER__ > __SIZEOF_LONG__
+#define KHRONOS_USE_INTPTR_T
+#endif
+#endif
#elif defined(__VMS ) || defined(__sgi)
@@ -235,14 +249,21 @@ typedef unsigned short int khronos_uint16_t;
* pointers are 64 bits, but 'long' is still 32 bits. Win64 appears
* to be the only LLP64 architecture in current use.
*/
-#ifdef _WIN64
+#ifdef KHRONOS_USE_INTPTR_T
+typedef intptr_t khronos_intptr_t;
+typedef uintptr_t khronos_uintptr_t;
+#elif defined(_WIN64)
typedef signed long long int khronos_intptr_t;
typedef unsigned long long int khronos_uintptr_t;
-typedef signed long long int khronos_ssize_t;
-typedef unsigned long long int khronos_usize_t;
#else
typedef signed long int khronos_intptr_t;
typedef unsigned long int khronos_uintptr_t;
+#endif
+
+#if defined(_WIN64)
+typedef signed long long int khronos_ssize_t;
+typedef unsigned long long int khronos_usize_t;
+#else
typedef signed long int khronos_ssize_t;
typedef unsigned long int khronos_usize_t;
#endif
diff --git a/thirdparty/glad/LICENSE b/thirdparty/glad/LICENSE
index b6e2ca25b0..4965a6bffc 100644
--- a/thirdparty/glad/LICENSE
+++ b/thirdparty/glad/LICENSE
@@ -1,20 +1,63 @@
-The MIT License (MIT)
-
-Copyright (c) 2013-2018 David Herberth
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software is furnished to do so,
-subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+The glad source code:
+
+ The MIT License (MIT)
+
+ Copyright (c) 2013-2022 David Herberth
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy of
+ this software and associated documentation files (the "Software"), to deal in
+ the Software without restriction, including without limitation the rights to
+ use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+ the Software, and to permit persons to whom the Software is furnished to do so,
+ subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+ FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+ COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+ IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+The Khronos Specifications:
+
+ Copyright (c) 2013-2020 The Khronos Group Inc.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+The EGL Specification and various headers:
+
+ Copyright (c) 2007-2016 The Khronos Group Inc.
+
+ Permission is hereby granted, free of charge, to any person obtaining a
+ copy of this software and/or associated documentation files (the
+ "Materials"), to deal in the Materials without restriction, including
+ without limitation the rights to use, copy, modify, merge, publish,
+ distribute, sublicense, and/or sell copies of the Materials, and to
+ permit persons to whom the Materials are furnished to do so, subject to
+ the following conditions:
+
+ The above copyright notice and this permission notice shall be included
+ in all copies or substantial portions of the Materials.
+
+ THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
diff --git a/thirdparty/glad/gl.c b/thirdparty/glad/gl.c
new file mode 100644
index 0000000000..6be716284c
--- /dev/null
+++ b/thirdparty/glad/gl.c
@@ -0,0 +1,1995 @@
+/**
+ * SPDX-License-Identifier: (WTFPL OR CC0-1.0) AND Apache-2.0
+ */
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <glad/gl.h>
+
+#ifndef GLAD_IMPL_UTIL_C_
+#define GLAD_IMPL_UTIL_C_
+
+#ifdef _MSC_VER
+#define GLAD_IMPL_UTIL_SSCANF sscanf_s
+#else
+#define GLAD_IMPL_UTIL_SSCANF sscanf
+#endif
+
+#endif /* GLAD_IMPL_UTIL_C_ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+int GLAD_GL_VERSION_1_0 = 0;
+int GLAD_GL_VERSION_1_1 = 0;
+int GLAD_GL_VERSION_1_2 = 0;
+int GLAD_GL_VERSION_1_3 = 0;
+int GLAD_GL_VERSION_1_4 = 0;
+int GLAD_GL_VERSION_1_5 = 0;
+int GLAD_GL_VERSION_2_0 = 0;
+int GLAD_GL_VERSION_2_1 = 0;
+int GLAD_GL_VERSION_3_0 = 0;
+int GLAD_GL_VERSION_3_1 = 0;
+int GLAD_GL_VERSION_3_2 = 0;
+int GLAD_GL_VERSION_3_3 = 0;
+int GLAD_GL_ARB_debug_output = 0;
+int GLAD_GL_ARB_framebuffer_object = 0;
+int GLAD_GL_EXT_framebuffer_blit = 0;
+int GLAD_GL_EXT_framebuffer_multisample = 0;
+int GLAD_GL_EXT_framebuffer_object = 0;
+int GLAD_GL_OVR_multiview = 0;
+int GLAD_GL_OVR_multiview2 = 0;
+
+
+
+PFNGLACCUMPROC glad_glAccum = NULL;
+PFNGLACTIVETEXTUREPROC glad_glActiveTexture = NULL;
+PFNGLALPHAFUNCPROC glad_glAlphaFunc = NULL;
+PFNGLARETEXTURESRESIDENTPROC glad_glAreTexturesResident = NULL;
+PFNGLARRAYELEMENTPROC glad_glArrayElement = NULL;
+PFNGLATTACHSHADERPROC glad_glAttachShader = NULL;
+PFNGLBEGINPROC glad_glBegin = NULL;
+PFNGLBEGINCONDITIONALRENDERPROC glad_glBeginConditionalRender = NULL;
+PFNGLBEGINQUERYPROC glad_glBeginQuery = NULL;
+PFNGLBEGINTRANSFORMFEEDBACKPROC glad_glBeginTransformFeedback = NULL;
+PFNGLBINDATTRIBLOCATIONPROC glad_glBindAttribLocation = NULL;
+PFNGLBINDBUFFERPROC glad_glBindBuffer = NULL;
+PFNGLBINDBUFFERBASEPROC glad_glBindBufferBase = NULL;
+PFNGLBINDBUFFERRANGEPROC glad_glBindBufferRange = NULL;
+PFNGLBINDFRAGDATALOCATIONPROC glad_glBindFragDataLocation = NULL;
+PFNGLBINDFRAGDATALOCATIONINDEXEDPROC glad_glBindFragDataLocationIndexed = NULL;
+PFNGLBINDFRAMEBUFFERPROC glad_glBindFramebuffer = NULL;
+PFNGLBINDFRAMEBUFFEREXTPROC glad_glBindFramebufferEXT = NULL;
+PFNGLBINDRENDERBUFFERPROC glad_glBindRenderbuffer = NULL;
+PFNGLBINDRENDERBUFFEREXTPROC glad_glBindRenderbufferEXT = NULL;
+PFNGLBINDSAMPLERPROC glad_glBindSampler = NULL;
+PFNGLBINDTEXTUREPROC glad_glBindTexture = NULL;
+PFNGLBINDVERTEXARRAYPROC glad_glBindVertexArray = NULL;
+PFNGLBITMAPPROC glad_glBitmap = NULL;
+PFNGLBLENDCOLORPROC glad_glBlendColor = NULL;
+PFNGLBLENDEQUATIONPROC glad_glBlendEquation = NULL;
+PFNGLBLENDEQUATIONSEPARATEPROC glad_glBlendEquationSeparate = NULL;
+PFNGLBLENDFUNCPROC glad_glBlendFunc = NULL;
+PFNGLBLENDFUNCSEPARATEPROC glad_glBlendFuncSeparate = NULL;
+PFNGLBLITFRAMEBUFFERPROC glad_glBlitFramebuffer = NULL;
+PFNGLBLITFRAMEBUFFEREXTPROC glad_glBlitFramebufferEXT = NULL;
+PFNGLBUFFERDATAPROC glad_glBufferData = NULL;
+PFNGLBUFFERSUBDATAPROC glad_glBufferSubData = NULL;
+PFNGLCALLLISTPROC glad_glCallList = NULL;
+PFNGLCALLLISTSPROC glad_glCallLists = NULL;
+PFNGLCHECKFRAMEBUFFERSTATUSPROC glad_glCheckFramebufferStatus = NULL;
+PFNGLCHECKFRAMEBUFFERSTATUSEXTPROC glad_glCheckFramebufferStatusEXT = NULL;
+PFNGLCLAMPCOLORPROC glad_glClampColor = NULL;
+PFNGLCLEARPROC glad_glClear = NULL;
+PFNGLCLEARACCUMPROC glad_glClearAccum = NULL;
+PFNGLCLEARBUFFERFIPROC glad_glClearBufferfi = NULL;
+PFNGLCLEARBUFFERFVPROC glad_glClearBufferfv = NULL;
+PFNGLCLEARBUFFERIVPROC glad_glClearBufferiv = NULL;
+PFNGLCLEARBUFFERUIVPROC glad_glClearBufferuiv = NULL;
+PFNGLCLEARCOLORPROC glad_glClearColor = NULL;
+PFNGLCLEARDEPTHPROC glad_glClearDepth = NULL;
+PFNGLCLEARINDEXPROC glad_glClearIndex = NULL;
+PFNGLCLEARSTENCILPROC glad_glClearStencil = NULL;
+PFNGLCLIENTACTIVETEXTUREPROC glad_glClientActiveTexture = NULL;
+PFNGLCLIENTWAITSYNCPROC glad_glClientWaitSync = NULL;
+PFNGLCLIPPLANEPROC glad_glClipPlane = NULL;
+PFNGLCOLOR3BPROC glad_glColor3b = NULL;
+PFNGLCOLOR3BVPROC glad_glColor3bv = NULL;
+PFNGLCOLOR3DPROC glad_glColor3d = NULL;
+PFNGLCOLOR3DVPROC glad_glColor3dv = NULL;
+PFNGLCOLOR3FPROC glad_glColor3f = NULL;
+PFNGLCOLOR3FVPROC glad_glColor3fv = NULL;
+PFNGLCOLOR3IPROC glad_glColor3i = NULL;
+PFNGLCOLOR3IVPROC glad_glColor3iv = NULL;
+PFNGLCOLOR3SPROC glad_glColor3s = NULL;
+PFNGLCOLOR3SVPROC glad_glColor3sv = NULL;
+PFNGLCOLOR3UBPROC glad_glColor3ub = NULL;
+PFNGLCOLOR3UBVPROC glad_glColor3ubv = NULL;
+PFNGLCOLOR3UIPROC glad_glColor3ui = NULL;
+PFNGLCOLOR3UIVPROC glad_glColor3uiv = NULL;
+PFNGLCOLOR3USPROC glad_glColor3us = NULL;
+PFNGLCOLOR3USVPROC glad_glColor3usv = NULL;
+PFNGLCOLOR4BPROC glad_glColor4b = NULL;
+PFNGLCOLOR4BVPROC glad_glColor4bv = NULL;
+PFNGLCOLOR4DPROC glad_glColor4d = NULL;
+PFNGLCOLOR4DVPROC glad_glColor4dv = NULL;
+PFNGLCOLOR4FPROC glad_glColor4f = NULL;
+PFNGLCOLOR4FVPROC glad_glColor4fv = NULL;
+PFNGLCOLOR4IPROC glad_glColor4i = NULL;
+PFNGLCOLOR4IVPROC glad_glColor4iv = NULL;
+PFNGLCOLOR4SPROC glad_glColor4s = NULL;
+PFNGLCOLOR4SVPROC glad_glColor4sv = NULL;
+PFNGLCOLOR4UBPROC glad_glColor4ub = NULL;
+PFNGLCOLOR4UBVPROC glad_glColor4ubv = NULL;
+PFNGLCOLOR4UIPROC glad_glColor4ui = NULL;
+PFNGLCOLOR4UIVPROC glad_glColor4uiv = NULL;
+PFNGLCOLOR4USPROC glad_glColor4us = NULL;
+PFNGLCOLOR4USVPROC glad_glColor4usv = NULL;
+PFNGLCOLORMASKPROC glad_glColorMask = NULL;
+PFNGLCOLORMASKIPROC glad_glColorMaski = NULL;
+PFNGLCOLORMATERIALPROC glad_glColorMaterial = NULL;
+PFNGLCOLORP3UIPROC glad_glColorP3ui = NULL;
+PFNGLCOLORP3UIVPROC glad_glColorP3uiv = NULL;
+PFNGLCOLORP4UIPROC glad_glColorP4ui = NULL;
+PFNGLCOLORP4UIVPROC glad_glColorP4uiv = NULL;
+PFNGLCOLORPOINTERPROC glad_glColorPointer = NULL;
+PFNGLCOMPILESHADERPROC glad_glCompileShader = NULL;
+PFNGLCOMPRESSEDTEXIMAGE1DPROC glad_glCompressedTexImage1D = NULL;
+PFNGLCOMPRESSEDTEXIMAGE2DPROC glad_glCompressedTexImage2D = NULL;
+PFNGLCOMPRESSEDTEXIMAGE3DPROC glad_glCompressedTexImage3D = NULL;
+PFNGLCOMPRESSEDTEXSUBIMAGE1DPROC glad_glCompressedTexSubImage1D = NULL;
+PFNGLCOMPRESSEDTEXSUBIMAGE2DPROC glad_glCompressedTexSubImage2D = NULL;
+PFNGLCOMPRESSEDTEXSUBIMAGE3DPROC glad_glCompressedTexSubImage3D = NULL;
+PFNGLCOPYBUFFERSUBDATAPROC glad_glCopyBufferSubData = NULL;
+PFNGLCOPYPIXELSPROC glad_glCopyPixels = NULL;
+PFNGLCOPYTEXIMAGE1DPROC glad_glCopyTexImage1D = NULL;
+PFNGLCOPYTEXIMAGE2DPROC glad_glCopyTexImage2D = NULL;
+PFNGLCOPYTEXSUBIMAGE1DPROC glad_glCopyTexSubImage1D = NULL;
+PFNGLCOPYTEXSUBIMAGE2DPROC glad_glCopyTexSubImage2D = NULL;
+PFNGLCOPYTEXSUBIMAGE3DPROC glad_glCopyTexSubImage3D = NULL;
+PFNGLCREATEPROGRAMPROC glad_glCreateProgram = NULL;
+PFNGLCREATESHADERPROC glad_glCreateShader = NULL;
+PFNGLCULLFACEPROC glad_glCullFace = NULL;
+PFNGLDEBUGMESSAGECALLBACKARBPROC glad_glDebugMessageCallbackARB = NULL;
+PFNGLDEBUGMESSAGECONTROLARBPROC glad_glDebugMessageControlARB = NULL;
+PFNGLDEBUGMESSAGEINSERTARBPROC glad_glDebugMessageInsertARB = NULL;
+PFNGLDELETEBUFFERSPROC glad_glDeleteBuffers = NULL;
+PFNGLDELETEFRAMEBUFFERSPROC glad_glDeleteFramebuffers = NULL;
+PFNGLDELETEFRAMEBUFFERSEXTPROC glad_glDeleteFramebuffersEXT = NULL;
+PFNGLDELETELISTSPROC glad_glDeleteLists = NULL;
+PFNGLDELETEPROGRAMPROC glad_glDeleteProgram = NULL;
+PFNGLDELETEQUERIESPROC glad_glDeleteQueries = NULL;
+PFNGLDELETERENDERBUFFERSPROC glad_glDeleteRenderbuffers = NULL;
+PFNGLDELETERENDERBUFFERSEXTPROC glad_glDeleteRenderbuffersEXT = NULL;
+PFNGLDELETESAMPLERSPROC glad_glDeleteSamplers = NULL;
+PFNGLDELETESHADERPROC glad_glDeleteShader = NULL;
+PFNGLDELETESYNCPROC glad_glDeleteSync = NULL;
+PFNGLDELETETEXTURESPROC glad_glDeleteTextures = NULL;
+PFNGLDELETEVERTEXARRAYSPROC glad_glDeleteVertexArrays = NULL;
+PFNGLDEPTHFUNCPROC glad_glDepthFunc = NULL;
+PFNGLDEPTHMASKPROC glad_glDepthMask = NULL;
+PFNGLDEPTHRANGEPROC glad_glDepthRange = NULL;
+PFNGLDETACHSHADERPROC glad_glDetachShader = NULL;
+PFNGLDISABLEPROC glad_glDisable = NULL;
+PFNGLDISABLECLIENTSTATEPROC glad_glDisableClientState = NULL;
+PFNGLDISABLEVERTEXATTRIBARRAYPROC glad_glDisableVertexAttribArray = NULL;
+PFNGLDISABLEIPROC glad_glDisablei = NULL;
+PFNGLDRAWARRAYSPROC glad_glDrawArrays = NULL;
+PFNGLDRAWARRAYSINSTANCEDPROC glad_glDrawArraysInstanced = NULL;
+PFNGLDRAWBUFFERPROC glad_glDrawBuffer = NULL;
+PFNGLDRAWBUFFERSPROC glad_glDrawBuffers = NULL;
+PFNGLDRAWELEMENTSPROC glad_glDrawElements = NULL;
+PFNGLDRAWELEMENTSBASEVERTEXPROC glad_glDrawElementsBaseVertex = NULL;
+PFNGLDRAWELEMENTSINSTANCEDPROC glad_glDrawElementsInstanced = NULL;
+PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXPROC glad_glDrawElementsInstancedBaseVertex = NULL;
+PFNGLDRAWPIXELSPROC glad_glDrawPixels = NULL;
+PFNGLDRAWRANGEELEMENTSPROC glad_glDrawRangeElements = NULL;
+PFNGLDRAWRANGEELEMENTSBASEVERTEXPROC glad_glDrawRangeElementsBaseVertex = NULL;
+PFNGLEDGEFLAGPROC glad_glEdgeFlag = NULL;
+PFNGLEDGEFLAGPOINTERPROC glad_glEdgeFlagPointer = NULL;
+PFNGLEDGEFLAGVPROC glad_glEdgeFlagv = NULL;
+PFNGLENABLEPROC glad_glEnable = NULL;
+PFNGLENABLECLIENTSTATEPROC glad_glEnableClientState = NULL;
+PFNGLENABLEVERTEXATTRIBARRAYPROC glad_glEnableVertexAttribArray = NULL;
+PFNGLENABLEIPROC glad_glEnablei = NULL;
+PFNGLENDPROC glad_glEnd = NULL;
+PFNGLENDCONDITIONALRENDERPROC glad_glEndConditionalRender = NULL;
+PFNGLENDLISTPROC glad_glEndList = NULL;
+PFNGLENDQUERYPROC glad_glEndQuery = NULL;
+PFNGLENDTRANSFORMFEEDBACKPROC glad_glEndTransformFeedback = NULL;
+PFNGLEVALCOORD1DPROC glad_glEvalCoord1d = NULL;
+PFNGLEVALCOORD1DVPROC glad_glEvalCoord1dv = NULL;
+PFNGLEVALCOORD1FPROC glad_glEvalCoord1f = NULL;
+PFNGLEVALCOORD1FVPROC glad_glEvalCoord1fv = NULL;
+PFNGLEVALCOORD2DPROC glad_glEvalCoord2d = NULL;
+PFNGLEVALCOORD2DVPROC glad_glEvalCoord2dv = NULL;
+PFNGLEVALCOORD2FPROC glad_glEvalCoord2f = NULL;
+PFNGLEVALCOORD2FVPROC glad_glEvalCoord2fv = NULL;
+PFNGLEVALMESH1PROC glad_glEvalMesh1 = NULL;
+PFNGLEVALMESH2PROC glad_glEvalMesh2 = NULL;
+PFNGLEVALPOINT1PROC glad_glEvalPoint1 = NULL;
+PFNGLEVALPOINT2PROC glad_glEvalPoint2 = NULL;
+PFNGLFEEDBACKBUFFERPROC glad_glFeedbackBuffer = NULL;
+PFNGLFENCESYNCPROC glad_glFenceSync = NULL;
+PFNGLFINISHPROC glad_glFinish = NULL;
+PFNGLFLUSHPROC glad_glFlush = NULL;
+PFNGLFLUSHMAPPEDBUFFERRANGEPROC glad_glFlushMappedBufferRange = NULL;
+PFNGLFOGCOORDPOINTERPROC glad_glFogCoordPointer = NULL;
+PFNGLFOGCOORDDPROC glad_glFogCoordd = NULL;
+PFNGLFOGCOORDDVPROC glad_glFogCoorddv = NULL;
+PFNGLFOGCOORDFPROC glad_glFogCoordf = NULL;
+PFNGLFOGCOORDFVPROC glad_glFogCoordfv = NULL;
+PFNGLFOGFPROC glad_glFogf = NULL;
+PFNGLFOGFVPROC glad_glFogfv = NULL;
+PFNGLFOGIPROC glad_glFogi = NULL;
+PFNGLFOGIVPROC glad_glFogiv = NULL;
+PFNGLFRAMEBUFFERRENDERBUFFERPROC glad_glFramebufferRenderbuffer = NULL;
+PFNGLFRAMEBUFFERRENDERBUFFEREXTPROC glad_glFramebufferRenderbufferEXT = NULL;
+PFNGLFRAMEBUFFERTEXTUREPROC glad_glFramebufferTexture = NULL;
+PFNGLFRAMEBUFFERTEXTURE1DPROC glad_glFramebufferTexture1D = NULL;
+PFNGLFRAMEBUFFERTEXTURE1DEXTPROC glad_glFramebufferTexture1DEXT = NULL;
+PFNGLFRAMEBUFFERTEXTURE2DPROC glad_glFramebufferTexture2D = NULL;
+PFNGLFRAMEBUFFERTEXTURE2DEXTPROC glad_glFramebufferTexture2DEXT = NULL;
+PFNGLFRAMEBUFFERTEXTURE3DPROC glad_glFramebufferTexture3D = NULL;
+PFNGLFRAMEBUFFERTEXTURE3DEXTPROC glad_glFramebufferTexture3DEXT = NULL;
+PFNGLFRAMEBUFFERTEXTURELAYERPROC glad_glFramebufferTextureLayer = NULL;
+PFNGLFRAMEBUFFERTEXTUREMULTIVIEWOVRPROC glad_glFramebufferTextureMultiviewOVR = NULL;
+PFNGLFRONTFACEPROC glad_glFrontFace = NULL;
+PFNGLFRUSTUMPROC glad_glFrustum = NULL;
+PFNGLGENBUFFERSPROC glad_glGenBuffers = NULL;
+PFNGLGENFRAMEBUFFERSPROC glad_glGenFramebuffers = NULL;
+PFNGLGENFRAMEBUFFERSEXTPROC glad_glGenFramebuffersEXT = NULL;
+PFNGLGENLISTSPROC glad_glGenLists = NULL;
+PFNGLGENQUERIESPROC glad_glGenQueries = NULL;
+PFNGLGENRENDERBUFFERSPROC glad_glGenRenderbuffers = NULL;
+PFNGLGENRENDERBUFFERSEXTPROC glad_glGenRenderbuffersEXT = NULL;
+PFNGLGENSAMPLERSPROC glad_glGenSamplers = NULL;
+PFNGLGENTEXTURESPROC glad_glGenTextures = NULL;
+PFNGLGENVERTEXARRAYSPROC glad_glGenVertexArrays = NULL;
+PFNGLGENERATEMIPMAPPROC glad_glGenerateMipmap = NULL;
+PFNGLGENERATEMIPMAPEXTPROC glad_glGenerateMipmapEXT = NULL;
+PFNGLGETACTIVEATTRIBPROC glad_glGetActiveAttrib = NULL;
+PFNGLGETACTIVEUNIFORMPROC glad_glGetActiveUniform = NULL;
+PFNGLGETACTIVEUNIFORMBLOCKNAMEPROC glad_glGetActiveUniformBlockName = NULL;
+PFNGLGETACTIVEUNIFORMBLOCKIVPROC glad_glGetActiveUniformBlockiv = NULL;
+PFNGLGETACTIVEUNIFORMNAMEPROC glad_glGetActiveUniformName = NULL;
+PFNGLGETACTIVEUNIFORMSIVPROC glad_glGetActiveUniformsiv = NULL;
+PFNGLGETATTACHEDSHADERSPROC glad_glGetAttachedShaders = NULL;
+PFNGLGETATTRIBLOCATIONPROC glad_glGetAttribLocation = NULL;
+PFNGLGETBOOLEANI_VPROC glad_glGetBooleani_v = NULL;
+PFNGLGETBOOLEANVPROC glad_glGetBooleanv = NULL;
+PFNGLGETBUFFERPARAMETERI64VPROC glad_glGetBufferParameteri64v = NULL;
+PFNGLGETBUFFERPARAMETERIVPROC glad_glGetBufferParameteriv = NULL;
+PFNGLGETBUFFERPOINTERVPROC glad_glGetBufferPointerv = NULL;
+PFNGLGETBUFFERSUBDATAPROC glad_glGetBufferSubData = NULL;
+PFNGLGETCLIPPLANEPROC glad_glGetClipPlane = NULL;
+PFNGLGETCOMPRESSEDTEXIMAGEPROC glad_glGetCompressedTexImage = NULL;
+PFNGLGETDEBUGMESSAGELOGARBPROC glad_glGetDebugMessageLogARB = NULL;
+PFNGLGETDOUBLEVPROC glad_glGetDoublev = NULL;
+PFNGLGETERRORPROC glad_glGetError = NULL;
+PFNGLGETFLOATVPROC glad_glGetFloatv = NULL;
+PFNGLGETFRAGDATAINDEXPROC glad_glGetFragDataIndex = NULL;
+PFNGLGETFRAGDATALOCATIONPROC glad_glGetFragDataLocation = NULL;
+PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVPROC glad_glGetFramebufferAttachmentParameteriv = NULL;
+PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVEXTPROC glad_glGetFramebufferAttachmentParameterivEXT = NULL;
+PFNGLGETINTEGER64I_VPROC glad_glGetInteger64i_v = NULL;
+PFNGLGETINTEGER64VPROC glad_glGetInteger64v = NULL;
+PFNGLGETINTEGERI_VPROC glad_glGetIntegeri_v = NULL;
+PFNGLGETINTEGERVPROC glad_glGetIntegerv = NULL;
+PFNGLGETLIGHTFVPROC glad_glGetLightfv = NULL;
+PFNGLGETLIGHTIVPROC glad_glGetLightiv = NULL;
+PFNGLGETMAPDVPROC glad_glGetMapdv = NULL;
+PFNGLGETMAPFVPROC glad_glGetMapfv = NULL;
+PFNGLGETMAPIVPROC glad_glGetMapiv = NULL;
+PFNGLGETMATERIALFVPROC glad_glGetMaterialfv = NULL;
+PFNGLGETMATERIALIVPROC glad_glGetMaterialiv = NULL;
+PFNGLGETMULTISAMPLEFVPROC glad_glGetMultisamplefv = NULL;
+PFNGLGETPIXELMAPFVPROC glad_glGetPixelMapfv = NULL;
+PFNGLGETPIXELMAPUIVPROC glad_glGetPixelMapuiv = NULL;
+PFNGLGETPIXELMAPUSVPROC glad_glGetPixelMapusv = NULL;
+PFNGLGETPOINTERVPROC glad_glGetPointerv = NULL;
+PFNGLGETPOLYGONSTIPPLEPROC glad_glGetPolygonStipple = NULL;
+PFNGLGETPROGRAMINFOLOGPROC glad_glGetProgramInfoLog = NULL;
+PFNGLGETPROGRAMIVPROC glad_glGetProgramiv = NULL;
+PFNGLGETQUERYOBJECTI64VPROC glad_glGetQueryObjecti64v = NULL;
+PFNGLGETQUERYOBJECTIVPROC glad_glGetQueryObjectiv = NULL;
+PFNGLGETQUERYOBJECTUI64VPROC glad_glGetQueryObjectui64v = NULL;
+PFNGLGETQUERYOBJECTUIVPROC glad_glGetQueryObjectuiv = NULL;
+PFNGLGETQUERYIVPROC glad_glGetQueryiv = NULL;
+PFNGLGETRENDERBUFFERPARAMETERIVPROC glad_glGetRenderbufferParameteriv = NULL;
+PFNGLGETRENDERBUFFERPARAMETERIVEXTPROC glad_glGetRenderbufferParameterivEXT = NULL;
+PFNGLGETSAMPLERPARAMETERIIVPROC glad_glGetSamplerParameterIiv = NULL;
+PFNGLGETSAMPLERPARAMETERIUIVPROC glad_glGetSamplerParameterIuiv = NULL;
+PFNGLGETSAMPLERPARAMETERFVPROC glad_glGetSamplerParameterfv = NULL;
+PFNGLGETSAMPLERPARAMETERIVPROC glad_glGetSamplerParameteriv = NULL;
+PFNGLGETSHADERINFOLOGPROC glad_glGetShaderInfoLog = NULL;
+PFNGLGETSHADERSOURCEPROC glad_glGetShaderSource = NULL;
+PFNGLGETSHADERIVPROC glad_glGetShaderiv = NULL;
+PFNGLGETSTRINGPROC glad_glGetString = NULL;
+PFNGLGETSTRINGIPROC glad_glGetStringi = NULL;
+PFNGLGETSYNCIVPROC glad_glGetSynciv = NULL;
+PFNGLGETTEXENVFVPROC glad_glGetTexEnvfv = NULL;
+PFNGLGETTEXENVIVPROC glad_glGetTexEnviv = NULL;
+PFNGLGETTEXGENDVPROC glad_glGetTexGendv = NULL;
+PFNGLGETTEXGENFVPROC glad_glGetTexGenfv = NULL;
+PFNGLGETTEXGENIVPROC glad_glGetTexGeniv = NULL;
+PFNGLGETTEXIMAGEPROC glad_glGetTexImage = NULL;
+PFNGLGETTEXLEVELPARAMETERFVPROC glad_glGetTexLevelParameterfv = NULL;
+PFNGLGETTEXLEVELPARAMETERIVPROC glad_glGetTexLevelParameteriv = NULL;
+PFNGLGETTEXPARAMETERIIVPROC glad_glGetTexParameterIiv = NULL;
+PFNGLGETTEXPARAMETERIUIVPROC glad_glGetTexParameterIuiv = NULL;
+PFNGLGETTEXPARAMETERFVPROC glad_glGetTexParameterfv = NULL;
+PFNGLGETTEXPARAMETERIVPROC glad_glGetTexParameteriv = NULL;
+PFNGLGETTRANSFORMFEEDBACKVARYINGPROC glad_glGetTransformFeedbackVarying = NULL;
+PFNGLGETUNIFORMBLOCKINDEXPROC glad_glGetUniformBlockIndex = NULL;
+PFNGLGETUNIFORMINDICESPROC glad_glGetUniformIndices = NULL;
+PFNGLGETUNIFORMLOCATIONPROC glad_glGetUniformLocation = NULL;
+PFNGLGETUNIFORMFVPROC glad_glGetUniformfv = NULL;
+PFNGLGETUNIFORMIVPROC glad_glGetUniformiv = NULL;
+PFNGLGETUNIFORMUIVPROC glad_glGetUniformuiv = NULL;
+PFNGLGETVERTEXATTRIBIIVPROC glad_glGetVertexAttribIiv = NULL;
+PFNGLGETVERTEXATTRIBIUIVPROC glad_glGetVertexAttribIuiv = NULL;
+PFNGLGETVERTEXATTRIBPOINTERVPROC glad_glGetVertexAttribPointerv = NULL;
+PFNGLGETVERTEXATTRIBDVPROC glad_glGetVertexAttribdv = NULL;
+PFNGLGETVERTEXATTRIBFVPROC glad_glGetVertexAttribfv = NULL;
+PFNGLGETVERTEXATTRIBIVPROC glad_glGetVertexAttribiv = NULL;
+PFNGLHINTPROC glad_glHint = NULL;
+PFNGLINDEXMASKPROC glad_glIndexMask = NULL;
+PFNGLINDEXPOINTERPROC glad_glIndexPointer = NULL;
+PFNGLINDEXDPROC glad_glIndexd = NULL;
+PFNGLINDEXDVPROC glad_glIndexdv = NULL;
+PFNGLINDEXFPROC glad_glIndexf = NULL;
+PFNGLINDEXFVPROC glad_glIndexfv = NULL;
+PFNGLINDEXIPROC glad_glIndexi = NULL;
+PFNGLINDEXIVPROC glad_glIndexiv = NULL;
+PFNGLINDEXSPROC glad_glIndexs = NULL;
+PFNGLINDEXSVPROC glad_glIndexsv = NULL;
+PFNGLINDEXUBPROC glad_glIndexub = NULL;
+PFNGLINDEXUBVPROC glad_glIndexubv = NULL;
+PFNGLINITNAMESPROC glad_glInitNames = NULL;
+PFNGLINTERLEAVEDARRAYSPROC glad_glInterleavedArrays = NULL;
+PFNGLISBUFFERPROC glad_glIsBuffer = NULL;
+PFNGLISENABLEDPROC glad_glIsEnabled = NULL;
+PFNGLISENABLEDIPROC glad_glIsEnabledi = NULL;
+PFNGLISFRAMEBUFFERPROC glad_glIsFramebuffer = NULL;
+PFNGLISFRAMEBUFFEREXTPROC glad_glIsFramebufferEXT = NULL;
+PFNGLISLISTPROC glad_glIsList = NULL;
+PFNGLISPROGRAMPROC glad_glIsProgram = NULL;
+PFNGLISQUERYPROC glad_glIsQuery = NULL;
+PFNGLISRENDERBUFFERPROC glad_glIsRenderbuffer = NULL;
+PFNGLISRENDERBUFFEREXTPROC glad_glIsRenderbufferEXT = NULL;
+PFNGLISSAMPLERPROC glad_glIsSampler = NULL;
+PFNGLISSHADERPROC glad_glIsShader = NULL;
+PFNGLISSYNCPROC glad_glIsSync = NULL;
+PFNGLISTEXTUREPROC glad_glIsTexture = NULL;
+PFNGLISVERTEXARRAYPROC glad_glIsVertexArray = NULL;
+PFNGLLIGHTMODELFPROC glad_glLightModelf = NULL;
+PFNGLLIGHTMODELFVPROC glad_glLightModelfv = NULL;
+PFNGLLIGHTMODELIPROC glad_glLightModeli = NULL;
+PFNGLLIGHTMODELIVPROC glad_glLightModeliv = NULL;
+PFNGLLIGHTFPROC glad_glLightf = NULL;
+PFNGLLIGHTFVPROC glad_glLightfv = NULL;
+PFNGLLIGHTIPROC glad_glLighti = NULL;
+PFNGLLIGHTIVPROC glad_glLightiv = NULL;
+PFNGLLINESTIPPLEPROC glad_glLineStipple = NULL;
+PFNGLLINEWIDTHPROC glad_glLineWidth = NULL;
+PFNGLLINKPROGRAMPROC glad_glLinkProgram = NULL;
+PFNGLLISTBASEPROC glad_glListBase = NULL;
+PFNGLLOADIDENTITYPROC glad_glLoadIdentity = NULL;
+PFNGLLOADMATRIXDPROC glad_glLoadMatrixd = NULL;
+PFNGLLOADMATRIXFPROC glad_glLoadMatrixf = NULL;
+PFNGLLOADNAMEPROC glad_glLoadName = NULL;
+PFNGLLOADTRANSPOSEMATRIXDPROC glad_glLoadTransposeMatrixd = NULL;
+PFNGLLOADTRANSPOSEMATRIXFPROC glad_glLoadTransposeMatrixf = NULL;
+PFNGLLOGICOPPROC glad_glLogicOp = NULL;
+PFNGLMAP1DPROC glad_glMap1d = NULL;
+PFNGLMAP1FPROC glad_glMap1f = NULL;
+PFNGLMAP2DPROC glad_glMap2d = NULL;
+PFNGLMAP2FPROC glad_glMap2f = NULL;
+PFNGLMAPBUFFERPROC glad_glMapBuffer = NULL;
+PFNGLMAPBUFFERRANGEPROC glad_glMapBufferRange = NULL;
+PFNGLMAPGRID1DPROC glad_glMapGrid1d = NULL;
+PFNGLMAPGRID1FPROC glad_glMapGrid1f = NULL;
+PFNGLMAPGRID2DPROC glad_glMapGrid2d = NULL;
+PFNGLMAPGRID2FPROC glad_glMapGrid2f = NULL;
+PFNGLMATERIALFPROC glad_glMaterialf = NULL;
+PFNGLMATERIALFVPROC glad_glMaterialfv = NULL;
+PFNGLMATERIALIPROC glad_glMateriali = NULL;
+PFNGLMATERIALIVPROC glad_glMaterialiv = NULL;
+PFNGLMATRIXMODEPROC glad_glMatrixMode = NULL;
+PFNGLMULTMATRIXDPROC glad_glMultMatrixd = NULL;
+PFNGLMULTMATRIXFPROC glad_glMultMatrixf = NULL;
+PFNGLMULTTRANSPOSEMATRIXDPROC glad_glMultTransposeMatrixd = NULL;
+PFNGLMULTTRANSPOSEMATRIXFPROC glad_glMultTransposeMatrixf = NULL;
+PFNGLMULTIDRAWARRAYSPROC glad_glMultiDrawArrays = NULL;
+PFNGLMULTIDRAWELEMENTSPROC glad_glMultiDrawElements = NULL;
+PFNGLMULTIDRAWELEMENTSBASEVERTEXPROC glad_glMultiDrawElementsBaseVertex = NULL;
+PFNGLMULTITEXCOORD1DPROC glad_glMultiTexCoord1d = NULL;
+PFNGLMULTITEXCOORD1DVPROC glad_glMultiTexCoord1dv = NULL;
+PFNGLMULTITEXCOORD1FPROC glad_glMultiTexCoord1f = NULL;
+PFNGLMULTITEXCOORD1FVPROC glad_glMultiTexCoord1fv = NULL;
+PFNGLMULTITEXCOORD1IPROC glad_glMultiTexCoord1i = NULL;
+PFNGLMULTITEXCOORD1IVPROC glad_glMultiTexCoord1iv = NULL;
+PFNGLMULTITEXCOORD1SPROC glad_glMultiTexCoord1s = NULL;
+PFNGLMULTITEXCOORD1SVPROC glad_glMultiTexCoord1sv = NULL;
+PFNGLMULTITEXCOORD2DPROC glad_glMultiTexCoord2d = NULL;
+PFNGLMULTITEXCOORD2DVPROC glad_glMultiTexCoord2dv = NULL;
+PFNGLMULTITEXCOORD2FPROC glad_glMultiTexCoord2f = NULL;
+PFNGLMULTITEXCOORD2FVPROC glad_glMultiTexCoord2fv = NULL;
+PFNGLMULTITEXCOORD2IPROC glad_glMultiTexCoord2i = NULL;
+PFNGLMULTITEXCOORD2IVPROC glad_glMultiTexCoord2iv = NULL;
+PFNGLMULTITEXCOORD2SPROC glad_glMultiTexCoord2s = NULL;
+PFNGLMULTITEXCOORD2SVPROC glad_glMultiTexCoord2sv = NULL;
+PFNGLMULTITEXCOORD3DPROC glad_glMultiTexCoord3d = NULL;
+PFNGLMULTITEXCOORD3DVPROC glad_glMultiTexCoord3dv = NULL;
+PFNGLMULTITEXCOORD3FPROC glad_glMultiTexCoord3f = NULL;
+PFNGLMULTITEXCOORD3FVPROC glad_glMultiTexCoord3fv = NULL;
+PFNGLMULTITEXCOORD3IPROC glad_glMultiTexCoord3i = NULL;
+PFNGLMULTITEXCOORD3IVPROC glad_glMultiTexCoord3iv = NULL;
+PFNGLMULTITEXCOORD3SPROC glad_glMultiTexCoord3s = NULL;
+PFNGLMULTITEXCOORD3SVPROC glad_glMultiTexCoord3sv = NULL;
+PFNGLMULTITEXCOORD4DPROC glad_glMultiTexCoord4d = NULL;
+PFNGLMULTITEXCOORD4DVPROC glad_glMultiTexCoord4dv = NULL;
+PFNGLMULTITEXCOORD4FPROC glad_glMultiTexCoord4f = NULL;
+PFNGLMULTITEXCOORD4FVPROC glad_glMultiTexCoord4fv = NULL;
+PFNGLMULTITEXCOORD4IPROC glad_glMultiTexCoord4i = NULL;
+PFNGLMULTITEXCOORD4IVPROC glad_glMultiTexCoord4iv = NULL;
+PFNGLMULTITEXCOORD4SPROC glad_glMultiTexCoord4s = NULL;
+PFNGLMULTITEXCOORD4SVPROC glad_glMultiTexCoord4sv = NULL;
+PFNGLMULTITEXCOORDP1UIPROC glad_glMultiTexCoordP1ui = NULL;
+PFNGLMULTITEXCOORDP1UIVPROC glad_glMultiTexCoordP1uiv = NULL;
+PFNGLMULTITEXCOORDP2UIPROC glad_glMultiTexCoordP2ui = NULL;
+PFNGLMULTITEXCOORDP2UIVPROC glad_glMultiTexCoordP2uiv = NULL;
+PFNGLMULTITEXCOORDP3UIPROC glad_glMultiTexCoordP3ui = NULL;
+PFNGLMULTITEXCOORDP3UIVPROC glad_glMultiTexCoordP3uiv = NULL;
+PFNGLMULTITEXCOORDP4UIPROC glad_glMultiTexCoordP4ui = NULL;
+PFNGLMULTITEXCOORDP4UIVPROC glad_glMultiTexCoordP4uiv = NULL;
+PFNGLNEWLISTPROC glad_glNewList = NULL;
+PFNGLNORMAL3BPROC glad_glNormal3b = NULL;
+PFNGLNORMAL3BVPROC glad_glNormal3bv = NULL;
+PFNGLNORMAL3DPROC glad_glNormal3d = NULL;
+PFNGLNORMAL3DVPROC glad_glNormal3dv = NULL;
+PFNGLNORMAL3FPROC glad_glNormal3f = NULL;
+PFNGLNORMAL3FVPROC glad_glNormal3fv = NULL;
+PFNGLNORMAL3IPROC glad_glNormal3i = NULL;
+PFNGLNORMAL3IVPROC glad_glNormal3iv = NULL;
+PFNGLNORMAL3SPROC glad_glNormal3s = NULL;
+PFNGLNORMAL3SVPROC glad_glNormal3sv = NULL;
+PFNGLNORMALP3UIPROC glad_glNormalP3ui = NULL;
+PFNGLNORMALP3UIVPROC glad_glNormalP3uiv = NULL;
+PFNGLNORMALPOINTERPROC glad_glNormalPointer = NULL;
+PFNGLORTHOPROC glad_glOrtho = NULL;
+PFNGLPASSTHROUGHPROC glad_glPassThrough = NULL;
+PFNGLPIXELMAPFVPROC glad_glPixelMapfv = NULL;
+PFNGLPIXELMAPUIVPROC glad_glPixelMapuiv = NULL;
+PFNGLPIXELMAPUSVPROC glad_glPixelMapusv = NULL;
+PFNGLPIXELSTOREFPROC glad_glPixelStoref = NULL;
+PFNGLPIXELSTOREIPROC glad_glPixelStorei = NULL;
+PFNGLPIXELTRANSFERFPROC glad_glPixelTransferf = NULL;
+PFNGLPIXELTRANSFERIPROC glad_glPixelTransferi = NULL;
+PFNGLPIXELZOOMPROC glad_glPixelZoom = NULL;
+PFNGLPOINTPARAMETERFPROC glad_glPointParameterf = NULL;
+PFNGLPOINTPARAMETERFVPROC glad_glPointParameterfv = NULL;
+PFNGLPOINTPARAMETERIPROC glad_glPointParameteri = NULL;
+PFNGLPOINTPARAMETERIVPROC glad_glPointParameteriv = NULL;
+PFNGLPOINTSIZEPROC glad_glPointSize = NULL;
+PFNGLPOLYGONMODEPROC glad_glPolygonMode = NULL;
+PFNGLPOLYGONOFFSETPROC glad_glPolygonOffset = NULL;
+PFNGLPOLYGONSTIPPLEPROC glad_glPolygonStipple = NULL;
+PFNGLPOPATTRIBPROC glad_glPopAttrib = NULL;
+PFNGLPOPCLIENTATTRIBPROC glad_glPopClientAttrib = NULL;
+PFNGLPOPMATRIXPROC glad_glPopMatrix = NULL;
+PFNGLPOPNAMEPROC glad_glPopName = NULL;
+PFNGLPRIMITIVERESTARTINDEXPROC glad_glPrimitiveRestartIndex = NULL;
+PFNGLPRIORITIZETEXTURESPROC glad_glPrioritizeTextures = NULL;
+PFNGLPROVOKINGVERTEXPROC glad_glProvokingVertex = NULL;
+PFNGLPUSHATTRIBPROC glad_glPushAttrib = NULL;
+PFNGLPUSHCLIENTATTRIBPROC glad_glPushClientAttrib = NULL;
+PFNGLPUSHMATRIXPROC glad_glPushMatrix = NULL;
+PFNGLPUSHNAMEPROC glad_glPushName = NULL;
+PFNGLQUERYCOUNTERPROC glad_glQueryCounter = NULL;
+PFNGLRASTERPOS2DPROC glad_glRasterPos2d = NULL;
+PFNGLRASTERPOS2DVPROC glad_glRasterPos2dv = NULL;
+PFNGLRASTERPOS2FPROC glad_glRasterPos2f = NULL;
+PFNGLRASTERPOS2FVPROC glad_glRasterPos2fv = NULL;
+PFNGLRASTERPOS2IPROC glad_glRasterPos2i = NULL;
+PFNGLRASTERPOS2IVPROC glad_glRasterPos2iv = NULL;
+PFNGLRASTERPOS2SPROC glad_glRasterPos2s = NULL;
+PFNGLRASTERPOS2SVPROC glad_glRasterPos2sv = NULL;
+PFNGLRASTERPOS3DPROC glad_glRasterPos3d = NULL;
+PFNGLRASTERPOS3DVPROC glad_glRasterPos3dv = NULL;
+PFNGLRASTERPOS3FPROC glad_glRasterPos3f = NULL;
+PFNGLRASTERPOS3FVPROC glad_glRasterPos3fv = NULL;
+PFNGLRASTERPOS3IPROC glad_glRasterPos3i = NULL;
+PFNGLRASTERPOS3IVPROC glad_glRasterPos3iv = NULL;
+PFNGLRASTERPOS3SPROC glad_glRasterPos3s = NULL;
+PFNGLRASTERPOS3SVPROC glad_glRasterPos3sv = NULL;
+PFNGLRASTERPOS4DPROC glad_glRasterPos4d = NULL;
+PFNGLRASTERPOS4DVPROC glad_glRasterPos4dv = NULL;
+PFNGLRASTERPOS4FPROC glad_glRasterPos4f = NULL;
+PFNGLRASTERPOS4FVPROC glad_glRasterPos4fv = NULL;
+PFNGLRASTERPOS4IPROC glad_glRasterPos4i = NULL;
+PFNGLRASTERPOS4IVPROC glad_glRasterPos4iv = NULL;
+PFNGLRASTERPOS4SPROC glad_glRasterPos4s = NULL;
+PFNGLRASTERPOS4SVPROC glad_glRasterPos4sv = NULL;
+PFNGLREADBUFFERPROC glad_glReadBuffer = NULL;
+PFNGLREADPIXELSPROC glad_glReadPixels = NULL;
+PFNGLRECTDPROC glad_glRectd = NULL;
+PFNGLRECTDVPROC glad_glRectdv = NULL;
+PFNGLRECTFPROC glad_glRectf = NULL;
+PFNGLRECTFVPROC glad_glRectfv = NULL;
+PFNGLRECTIPROC glad_glRecti = NULL;
+PFNGLRECTIVPROC glad_glRectiv = NULL;
+PFNGLRECTSPROC glad_glRects = NULL;
+PFNGLRECTSVPROC glad_glRectsv = NULL;
+PFNGLRENDERMODEPROC glad_glRenderMode = NULL;
+PFNGLRENDERBUFFERSTORAGEPROC glad_glRenderbufferStorage = NULL;
+PFNGLRENDERBUFFERSTORAGEEXTPROC glad_glRenderbufferStorageEXT = NULL;
+PFNGLRENDERBUFFERSTORAGEMULTISAMPLEPROC glad_glRenderbufferStorageMultisample = NULL;
+PFNGLRENDERBUFFERSTORAGEMULTISAMPLEEXTPROC glad_glRenderbufferStorageMultisampleEXT = NULL;
+PFNGLROTATEDPROC glad_glRotated = NULL;
+PFNGLROTATEFPROC glad_glRotatef = NULL;
+PFNGLSAMPLECOVERAGEPROC glad_glSampleCoverage = NULL;
+PFNGLSAMPLEMASKIPROC glad_glSampleMaski = NULL;
+PFNGLSAMPLERPARAMETERIIVPROC glad_glSamplerParameterIiv = NULL;
+PFNGLSAMPLERPARAMETERIUIVPROC glad_glSamplerParameterIuiv = NULL;
+PFNGLSAMPLERPARAMETERFPROC glad_glSamplerParameterf = NULL;
+PFNGLSAMPLERPARAMETERFVPROC glad_glSamplerParameterfv = NULL;
+PFNGLSAMPLERPARAMETERIPROC glad_glSamplerParameteri = NULL;
+PFNGLSAMPLERPARAMETERIVPROC glad_glSamplerParameteriv = NULL;
+PFNGLSCALEDPROC glad_glScaled = NULL;
+PFNGLSCALEFPROC glad_glScalef = NULL;
+PFNGLSCISSORPROC glad_glScissor = NULL;
+PFNGLSECONDARYCOLOR3BPROC glad_glSecondaryColor3b = NULL;
+PFNGLSECONDARYCOLOR3BVPROC glad_glSecondaryColor3bv = NULL;
+PFNGLSECONDARYCOLOR3DPROC glad_glSecondaryColor3d = NULL;
+PFNGLSECONDARYCOLOR3DVPROC glad_glSecondaryColor3dv = NULL;
+PFNGLSECONDARYCOLOR3FPROC glad_glSecondaryColor3f = NULL;
+PFNGLSECONDARYCOLOR3FVPROC glad_glSecondaryColor3fv = NULL;
+PFNGLSECONDARYCOLOR3IPROC glad_glSecondaryColor3i = NULL;
+PFNGLSECONDARYCOLOR3IVPROC glad_glSecondaryColor3iv = NULL;
+PFNGLSECONDARYCOLOR3SPROC glad_glSecondaryColor3s = NULL;
+PFNGLSECONDARYCOLOR3SVPROC glad_glSecondaryColor3sv = NULL;
+PFNGLSECONDARYCOLOR3UBPROC glad_glSecondaryColor3ub = NULL;
+PFNGLSECONDARYCOLOR3UBVPROC glad_glSecondaryColor3ubv = NULL;
+PFNGLSECONDARYCOLOR3UIPROC glad_glSecondaryColor3ui = NULL;
+PFNGLSECONDARYCOLOR3UIVPROC glad_glSecondaryColor3uiv = NULL;
+PFNGLSECONDARYCOLOR3USPROC glad_glSecondaryColor3us = NULL;
+PFNGLSECONDARYCOLOR3USVPROC glad_glSecondaryColor3usv = NULL;
+PFNGLSECONDARYCOLORP3UIPROC glad_glSecondaryColorP3ui = NULL;
+PFNGLSECONDARYCOLORP3UIVPROC glad_glSecondaryColorP3uiv = NULL;
+PFNGLSECONDARYCOLORPOINTERPROC glad_glSecondaryColorPointer = NULL;
+PFNGLSELECTBUFFERPROC glad_glSelectBuffer = NULL;
+PFNGLSHADEMODELPROC glad_glShadeModel = NULL;
+PFNGLSHADERSOURCEPROC glad_glShaderSource = NULL;
+PFNGLSTENCILFUNCPROC glad_glStencilFunc = NULL;
+PFNGLSTENCILFUNCSEPARATEPROC glad_glStencilFuncSeparate = NULL;
+PFNGLSTENCILMASKPROC glad_glStencilMask = NULL;
+PFNGLSTENCILMASKSEPARATEPROC glad_glStencilMaskSeparate = NULL;
+PFNGLSTENCILOPPROC glad_glStencilOp = NULL;
+PFNGLSTENCILOPSEPARATEPROC glad_glStencilOpSeparate = NULL;
+PFNGLTEXBUFFERPROC glad_glTexBuffer = NULL;
+PFNGLTEXCOORD1DPROC glad_glTexCoord1d = NULL;
+PFNGLTEXCOORD1DVPROC glad_glTexCoord1dv = NULL;
+PFNGLTEXCOORD1FPROC glad_glTexCoord1f = NULL;
+PFNGLTEXCOORD1FVPROC glad_glTexCoord1fv = NULL;
+PFNGLTEXCOORD1IPROC glad_glTexCoord1i = NULL;
+PFNGLTEXCOORD1IVPROC glad_glTexCoord1iv = NULL;
+PFNGLTEXCOORD1SPROC glad_glTexCoord1s = NULL;
+PFNGLTEXCOORD1SVPROC glad_glTexCoord1sv = NULL;
+PFNGLTEXCOORD2DPROC glad_glTexCoord2d = NULL;
+PFNGLTEXCOORD2DVPROC glad_glTexCoord2dv = NULL;
+PFNGLTEXCOORD2FPROC glad_glTexCoord2f = NULL;
+PFNGLTEXCOORD2FVPROC glad_glTexCoord2fv = NULL;
+PFNGLTEXCOORD2IPROC glad_glTexCoord2i = NULL;
+PFNGLTEXCOORD2IVPROC glad_glTexCoord2iv = NULL;
+PFNGLTEXCOORD2SPROC glad_glTexCoord2s = NULL;
+PFNGLTEXCOORD2SVPROC glad_glTexCoord2sv = NULL;
+PFNGLTEXCOORD3DPROC glad_glTexCoord3d = NULL;
+PFNGLTEXCOORD3DVPROC glad_glTexCoord3dv = NULL;
+PFNGLTEXCOORD3FPROC glad_glTexCoord3f = NULL;
+PFNGLTEXCOORD3FVPROC glad_glTexCoord3fv = NULL;
+PFNGLTEXCOORD3IPROC glad_glTexCoord3i = NULL;
+PFNGLTEXCOORD3IVPROC glad_glTexCoord3iv = NULL;
+PFNGLTEXCOORD3SPROC glad_glTexCoord3s = NULL;
+PFNGLTEXCOORD3SVPROC glad_glTexCoord3sv = NULL;
+PFNGLTEXCOORD4DPROC glad_glTexCoord4d = NULL;
+PFNGLTEXCOORD4DVPROC glad_glTexCoord4dv = NULL;
+PFNGLTEXCOORD4FPROC glad_glTexCoord4f = NULL;
+PFNGLTEXCOORD4FVPROC glad_glTexCoord4fv = NULL;
+PFNGLTEXCOORD4IPROC glad_glTexCoord4i = NULL;
+PFNGLTEXCOORD4IVPROC glad_glTexCoord4iv = NULL;
+PFNGLTEXCOORD4SPROC glad_glTexCoord4s = NULL;
+PFNGLTEXCOORD4SVPROC glad_glTexCoord4sv = NULL;
+PFNGLTEXCOORDP1UIPROC glad_glTexCoordP1ui = NULL;
+PFNGLTEXCOORDP1UIVPROC glad_glTexCoordP1uiv = NULL;
+PFNGLTEXCOORDP2UIPROC glad_glTexCoordP2ui = NULL;
+PFNGLTEXCOORDP2UIVPROC glad_glTexCoordP2uiv = NULL;
+PFNGLTEXCOORDP3UIPROC glad_glTexCoordP3ui = NULL;
+PFNGLTEXCOORDP3UIVPROC glad_glTexCoordP3uiv = NULL;
+PFNGLTEXCOORDP4UIPROC glad_glTexCoordP4ui = NULL;
+PFNGLTEXCOORDP4UIVPROC glad_glTexCoordP4uiv = NULL;
+PFNGLTEXCOORDPOINTERPROC glad_glTexCoordPointer = NULL;
+PFNGLTEXENVFPROC glad_glTexEnvf = NULL;
+PFNGLTEXENVFVPROC glad_glTexEnvfv = NULL;
+PFNGLTEXENVIPROC glad_glTexEnvi = NULL;
+PFNGLTEXENVIVPROC glad_glTexEnviv = NULL;
+PFNGLTEXGENDPROC glad_glTexGend = NULL;
+PFNGLTEXGENDVPROC glad_glTexGendv = NULL;
+PFNGLTEXGENFPROC glad_glTexGenf = NULL;
+PFNGLTEXGENFVPROC glad_glTexGenfv = NULL;
+PFNGLTEXGENIPROC glad_glTexGeni = NULL;
+PFNGLTEXGENIVPROC glad_glTexGeniv = NULL;
+PFNGLTEXIMAGE1DPROC glad_glTexImage1D = NULL;
+PFNGLTEXIMAGE2DPROC glad_glTexImage2D = NULL;
+PFNGLTEXIMAGE2DMULTISAMPLEPROC glad_glTexImage2DMultisample = NULL;
+PFNGLTEXIMAGE3DPROC glad_glTexImage3D = NULL;
+PFNGLTEXIMAGE3DMULTISAMPLEPROC glad_glTexImage3DMultisample = NULL;
+PFNGLTEXPARAMETERIIVPROC glad_glTexParameterIiv = NULL;
+PFNGLTEXPARAMETERIUIVPROC glad_glTexParameterIuiv = NULL;
+PFNGLTEXPARAMETERFPROC glad_glTexParameterf = NULL;
+PFNGLTEXPARAMETERFVPROC glad_glTexParameterfv = NULL;
+PFNGLTEXPARAMETERIPROC glad_glTexParameteri = NULL;
+PFNGLTEXPARAMETERIVPROC glad_glTexParameteriv = NULL;
+PFNGLTEXSUBIMAGE1DPROC glad_glTexSubImage1D = NULL;
+PFNGLTEXSUBIMAGE2DPROC glad_glTexSubImage2D = NULL;
+PFNGLTEXSUBIMAGE3DPROC glad_glTexSubImage3D = NULL;
+PFNGLTRANSFORMFEEDBACKVARYINGSPROC glad_glTransformFeedbackVaryings = NULL;
+PFNGLTRANSLATEDPROC glad_glTranslated = NULL;
+PFNGLTRANSLATEFPROC glad_glTranslatef = NULL;
+PFNGLUNIFORM1FPROC glad_glUniform1f = NULL;
+PFNGLUNIFORM1FVPROC glad_glUniform1fv = NULL;
+PFNGLUNIFORM1IPROC glad_glUniform1i = NULL;
+PFNGLUNIFORM1IVPROC glad_glUniform1iv = NULL;
+PFNGLUNIFORM1UIPROC glad_glUniform1ui = NULL;
+PFNGLUNIFORM1UIVPROC glad_glUniform1uiv = NULL;
+PFNGLUNIFORM2FPROC glad_glUniform2f = NULL;
+PFNGLUNIFORM2FVPROC glad_glUniform2fv = NULL;
+PFNGLUNIFORM2IPROC glad_glUniform2i = NULL;
+PFNGLUNIFORM2IVPROC glad_glUniform2iv = NULL;
+PFNGLUNIFORM2UIPROC glad_glUniform2ui = NULL;
+PFNGLUNIFORM2UIVPROC glad_glUniform2uiv = NULL;
+PFNGLUNIFORM3FPROC glad_glUniform3f = NULL;
+PFNGLUNIFORM3FVPROC glad_glUniform3fv = NULL;
+PFNGLUNIFORM3IPROC glad_glUniform3i = NULL;
+PFNGLUNIFORM3IVPROC glad_glUniform3iv = NULL;
+PFNGLUNIFORM3UIPROC glad_glUniform3ui = NULL;
+PFNGLUNIFORM3UIVPROC glad_glUniform3uiv = NULL;
+PFNGLUNIFORM4FPROC glad_glUniform4f = NULL;
+PFNGLUNIFORM4FVPROC glad_glUniform4fv = NULL;
+PFNGLUNIFORM4IPROC glad_glUniform4i = NULL;
+PFNGLUNIFORM4IVPROC glad_glUniform4iv = NULL;
+PFNGLUNIFORM4UIPROC glad_glUniform4ui = NULL;
+PFNGLUNIFORM4UIVPROC glad_glUniform4uiv = NULL;
+PFNGLUNIFORMBLOCKBINDINGPROC glad_glUniformBlockBinding = NULL;
+PFNGLUNIFORMMATRIX2FVPROC glad_glUniformMatrix2fv = NULL;
+PFNGLUNIFORMMATRIX2X3FVPROC glad_glUniformMatrix2x3fv = NULL;
+PFNGLUNIFORMMATRIX2X4FVPROC glad_glUniformMatrix2x4fv = NULL;
+PFNGLUNIFORMMATRIX3FVPROC glad_glUniformMatrix3fv = NULL;
+PFNGLUNIFORMMATRIX3X2FVPROC glad_glUniformMatrix3x2fv = NULL;
+PFNGLUNIFORMMATRIX3X4FVPROC glad_glUniformMatrix3x4fv = NULL;
+PFNGLUNIFORMMATRIX4FVPROC glad_glUniformMatrix4fv = NULL;
+PFNGLUNIFORMMATRIX4X2FVPROC glad_glUniformMatrix4x2fv = NULL;
+PFNGLUNIFORMMATRIX4X3FVPROC glad_glUniformMatrix4x3fv = NULL;
+PFNGLUNMAPBUFFERPROC glad_glUnmapBuffer = NULL;
+PFNGLUSEPROGRAMPROC glad_glUseProgram = NULL;
+PFNGLVALIDATEPROGRAMPROC glad_glValidateProgram = NULL;
+PFNGLVERTEX2DPROC glad_glVertex2d = NULL;
+PFNGLVERTEX2DVPROC glad_glVertex2dv = NULL;
+PFNGLVERTEX2FPROC glad_glVertex2f = NULL;
+PFNGLVERTEX2FVPROC glad_glVertex2fv = NULL;
+PFNGLVERTEX2IPROC glad_glVertex2i = NULL;
+PFNGLVERTEX2IVPROC glad_glVertex2iv = NULL;
+PFNGLVERTEX2SPROC glad_glVertex2s = NULL;
+PFNGLVERTEX2SVPROC glad_glVertex2sv = NULL;
+PFNGLVERTEX3DPROC glad_glVertex3d = NULL;
+PFNGLVERTEX3DVPROC glad_glVertex3dv = NULL;
+PFNGLVERTEX3FPROC glad_glVertex3f = NULL;
+PFNGLVERTEX3FVPROC glad_glVertex3fv = NULL;
+PFNGLVERTEX3IPROC glad_glVertex3i = NULL;
+PFNGLVERTEX3IVPROC glad_glVertex3iv = NULL;
+PFNGLVERTEX3SPROC glad_glVertex3s = NULL;
+PFNGLVERTEX3SVPROC glad_glVertex3sv = NULL;
+PFNGLVERTEX4DPROC glad_glVertex4d = NULL;
+PFNGLVERTEX4DVPROC glad_glVertex4dv = NULL;
+PFNGLVERTEX4FPROC glad_glVertex4f = NULL;
+PFNGLVERTEX4FVPROC glad_glVertex4fv = NULL;
+PFNGLVERTEX4IPROC glad_glVertex4i = NULL;
+PFNGLVERTEX4IVPROC glad_glVertex4iv = NULL;
+PFNGLVERTEX4SPROC glad_glVertex4s = NULL;
+PFNGLVERTEX4SVPROC glad_glVertex4sv = NULL;
+PFNGLVERTEXATTRIB1DPROC glad_glVertexAttrib1d = NULL;
+PFNGLVERTEXATTRIB1DVPROC glad_glVertexAttrib1dv = NULL;
+PFNGLVERTEXATTRIB1FPROC glad_glVertexAttrib1f = NULL;
+PFNGLVERTEXATTRIB1FVPROC glad_glVertexAttrib1fv = NULL;
+PFNGLVERTEXATTRIB1SPROC glad_glVertexAttrib1s = NULL;
+PFNGLVERTEXATTRIB1SVPROC glad_glVertexAttrib1sv = NULL;
+PFNGLVERTEXATTRIB2DPROC glad_glVertexAttrib2d = NULL;
+PFNGLVERTEXATTRIB2DVPROC glad_glVertexAttrib2dv = NULL;
+PFNGLVERTEXATTRIB2FPROC glad_glVertexAttrib2f = NULL;
+PFNGLVERTEXATTRIB2FVPROC glad_glVertexAttrib2fv = NULL;
+PFNGLVERTEXATTRIB2SPROC glad_glVertexAttrib2s = NULL;
+PFNGLVERTEXATTRIB2SVPROC glad_glVertexAttrib2sv = NULL;
+PFNGLVERTEXATTRIB3DPROC glad_glVertexAttrib3d = NULL;
+PFNGLVERTEXATTRIB3DVPROC glad_glVertexAttrib3dv = NULL;
+PFNGLVERTEXATTRIB3FPROC glad_glVertexAttrib3f = NULL;
+PFNGLVERTEXATTRIB3FVPROC glad_glVertexAttrib3fv = NULL;
+PFNGLVERTEXATTRIB3SPROC glad_glVertexAttrib3s = NULL;
+PFNGLVERTEXATTRIB3SVPROC glad_glVertexAttrib3sv = NULL;
+PFNGLVERTEXATTRIB4NBVPROC glad_glVertexAttrib4Nbv = NULL;
+PFNGLVERTEXATTRIB4NIVPROC glad_glVertexAttrib4Niv = NULL;
+PFNGLVERTEXATTRIB4NSVPROC glad_glVertexAttrib4Nsv = NULL;
+PFNGLVERTEXATTRIB4NUBPROC glad_glVertexAttrib4Nub = NULL;
+PFNGLVERTEXATTRIB4NUBVPROC glad_glVertexAttrib4Nubv = NULL;
+PFNGLVERTEXATTRIB4NUIVPROC glad_glVertexAttrib4Nuiv = NULL;
+PFNGLVERTEXATTRIB4NUSVPROC glad_glVertexAttrib4Nusv = NULL;
+PFNGLVERTEXATTRIB4BVPROC glad_glVertexAttrib4bv = NULL;
+PFNGLVERTEXATTRIB4DPROC glad_glVertexAttrib4d = NULL;
+PFNGLVERTEXATTRIB4DVPROC glad_glVertexAttrib4dv = NULL;
+PFNGLVERTEXATTRIB4FPROC glad_glVertexAttrib4f = NULL;
+PFNGLVERTEXATTRIB4FVPROC glad_glVertexAttrib4fv = NULL;
+PFNGLVERTEXATTRIB4IVPROC glad_glVertexAttrib4iv = NULL;
+PFNGLVERTEXATTRIB4SPROC glad_glVertexAttrib4s = NULL;
+PFNGLVERTEXATTRIB4SVPROC glad_glVertexAttrib4sv = NULL;
+PFNGLVERTEXATTRIB4UBVPROC glad_glVertexAttrib4ubv = NULL;
+PFNGLVERTEXATTRIB4UIVPROC glad_glVertexAttrib4uiv = NULL;
+PFNGLVERTEXATTRIB4USVPROC glad_glVertexAttrib4usv = NULL;
+PFNGLVERTEXATTRIBDIVISORPROC glad_glVertexAttribDivisor = NULL;
+PFNGLVERTEXATTRIBI1IPROC glad_glVertexAttribI1i = NULL;
+PFNGLVERTEXATTRIBI1IVPROC glad_glVertexAttribI1iv = NULL;
+PFNGLVERTEXATTRIBI1UIPROC glad_glVertexAttribI1ui = NULL;
+PFNGLVERTEXATTRIBI1UIVPROC glad_glVertexAttribI1uiv = NULL;
+PFNGLVERTEXATTRIBI2IPROC glad_glVertexAttribI2i = NULL;
+PFNGLVERTEXATTRIBI2IVPROC glad_glVertexAttribI2iv = NULL;
+PFNGLVERTEXATTRIBI2UIPROC glad_glVertexAttribI2ui = NULL;
+PFNGLVERTEXATTRIBI2UIVPROC glad_glVertexAttribI2uiv = NULL;
+PFNGLVERTEXATTRIBI3IPROC glad_glVertexAttribI3i = NULL;
+PFNGLVERTEXATTRIBI3IVPROC glad_glVertexAttribI3iv = NULL;
+PFNGLVERTEXATTRIBI3UIPROC glad_glVertexAttribI3ui = NULL;
+PFNGLVERTEXATTRIBI3UIVPROC glad_glVertexAttribI3uiv = NULL;
+PFNGLVERTEXATTRIBI4BVPROC glad_glVertexAttribI4bv = NULL;
+PFNGLVERTEXATTRIBI4IPROC glad_glVertexAttribI4i = NULL;
+PFNGLVERTEXATTRIBI4IVPROC glad_glVertexAttribI4iv = NULL;
+PFNGLVERTEXATTRIBI4SVPROC glad_glVertexAttribI4sv = NULL;
+PFNGLVERTEXATTRIBI4UBVPROC glad_glVertexAttribI4ubv = NULL;
+PFNGLVERTEXATTRIBI4UIPROC glad_glVertexAttribI4ui = NULL;
+PFNGLVERTEXATTRIBI4UIVPROC glad_glVertexAttribI4uiv = NULL;
+PFNGLVERTEXATTRIBI4USVPROC glad_glVertexAttribI4usv = NULL;
+PFNGLVERTEXATTRIBIPOINTERPROC glad_glVertexAttribIPointer = NULL;
+PFNGLVERTEXATTRIBP1UIPROC glad_glVertexAttribP1ui = NULL;
+PFNGLVERTEXATTRIBP1UIVPROC glad_glVertexAttribP1uiv = NULL;
+PFNGLVERTEXATTRIBP2UIPROC glad_glVertexAttribP2ui = NULL;
+PFNGLVERTEXATTRIBP2UIVPROC glad_glVertexAttribP2uiv = NULL;
+PFNGLVERTEXATTRIBP3UIPROC glad_glVertexAttribP3ui = NULL;
+PFNGLVERTEXATTRIBP3UIVPROC glad_glVertexAttribP3uiv = NULL;
+PFNGLVERTEXATTRIBP4UIPROC glad_glVertexAttribP4ui = NULL;
+PFNGLVERTEXATTRIBP4UIVPROC glad_glVertexAttribP4uiv = NULL;
+PFNGLVERTEXATTRIBPOINTERPROC glad_glVertexAttribPointer = NULL;
+PFNGLVERTEXP2UIPROC glad_glVertexP2ui = NULL;
+PFNGLVERTEXP2UIVPROC glad_glVertexP2uiv = NULL;
+PFNGLVERTEXP3UIPROC glad_glVertexP3ui = NULL;
+PFNGLVERTEXP3UIVPROC glad_glVertexP3uiv = NULL;
+PFNGLVERTEXP4UIPROC glad_glVertexP4ui = NULL;
+PFNGLVERTEXP4UIVPROC glad_glVertexP4uiv = NULL;
+PFNGLVERTEXPOINTERPROC glad_glVertexPointer = NULL;
+PFNGLVIEWPORTPROC glad_glViewport = NULL;
+PFNGLWAITSYNCPROC glad_glWaitSync = NULL;
+PFNGLWINDOWPOS2DPROC glad_glWindowPos2d = NULL;
+PFNGLWINDOWPOS2DVPROC glad_glWindowPos2dv = NULL;
+PFNGLWINDOWPOS2FPROC glad_glWindowPos2f = NULL;
+PFNGLWINDOWPOS2FVPROC glad_glWindowPos2fv = NULL;
+PFNGLWINDOWPOS2IPROC glad_glWindowPos2i = NULL;
+PFNGLWINDOWPOS2IVPROC glad_glWindowPos2iv = NULL;
+PFNGLWINDOWPOS2SPROC glad_glWindowPos2s = NULL;
+PFNGLWINDOWPOS2SVPROC glad_glWindowPos2sv = NULL;
+PFNGLWINDOWPOS3DPROC glad_glWindowPos3d = NULL;
+PFNGLWINDOWPOS3DVPROC glad_glWindowPos3dv = NULL;
+PFNGLWINDOWPOS3FPROC glad_glWindowPos3f = NULL;
+PFNGLWINDOWPOS3FVPROC glad_glWindowPos3fv = NULL;
+PFNGLWINDOWPOS3IPROC glad_glWindowPos3i = NULL;
+PFNGLWINDOWPOS3IVPROC glad_glWindowPos3iv = NULL;
+PFNGLWINDOWPOS3SPROC glad_glWindowPos3s = NULL;
+PFNGLWINDOWPOS3SVPROC glad_glWindowPos3sv = NULL;
+
+
+static void glad_gl_load_GL_VERSION_1_0( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GL_VERSION_1_0) return;
+ glad_glAccum = (PFNGLACCUMPROC) load(userptr, "glAccum");
+ glad_glAlphaFunc = (PFNGLALPHAFUNCPROC) load(userptr, "glAlphaFunc");
+ glad_glBegin = (PFNGLBEGINPROC) load(userptr, "glBegin");
+ glad_glBitmap = (PFNGLBITMAPPROC) load(userptr, "glBitmap");
+ glad_glBlendFunc = (PFNGLBLENDFUNCPROC) load(userptr, "glBlendFunc");
+ glad_glCallList = (PFNGLCALLLISTPROC) load(userptr, "glCallList");
+ glad_glCallLists = (PFNGLCALLLISTSPROC) load(userptr, "glCallLists");
+ glad_glClear = (PFNGLCLEARPROC) load(userptr, "glClear");
+ glad_glClearAccum = (PFNGLCLEARACCUMPROC) load(userptr, "glClearAccum");
+ glad_glClearColor = (PFNGLCLEARCOLORPROC) load(userptr, "glClearColor");
+ glad_glClearDepth = (PFNGLCLEARDEPTHPROC) load(userptr, "glClearDepth");
+ glad_glClearIndex = (PFNGLCLEARINDEXPROC) load(userptr, "glClearIndex");
+ glad_glClearStencil = (PFNGLCLEARSTENCILPROC) load(userptr, "glClearStencil");
+ glad_glClipPlane = (PFNGLCLIPPLANEPROC) load(userptr, "glClipPlane");
+ glad_glColor3b = (PFNGLCOLOR3BPROC) load(userptr, "glColor3b");
+ glad_glColor3bv = (PFNGLCOLOR3BVPROC) load(userptr, "glColor3bv");
+ glad_glColor3d = (PFNGLCOLOR3DPROC) load(userptr, "glColor3d");
+ glad_glColor3dv = (PFNGLCOLOR3DVPROC) load(userptr, "glColor3dv");
+ glad_glColor3f = (PFNGLCOLOR3FPROC) load(userptr, "glColor3f");
+ glad_glColor3fv = (PFNGLCOLOR3FVPROC) load(userptr, "glColor3fv");
+ glad_glColor3i = (PFNGLCOLOR3IPROC) load(userptr, "glColor3i");
+ glad_glColor3iv = (PFNGLCOLOR3IVPROC) load(userptr, "glColor3iv");
+ glad_glColor3s = (PFNGLCOLOR3SPROC) load(userptr, "glColor3s");
+ glad_glColor3sv = (PFNGLCOLOR3SVPROC) load(userptr, "glColor3sv");
+ glad_glColor3ub = (PFNGLCOLOR3UBPROC) load(userptr, "glColor3ub");
+ glad_glColor3ubv = (PFNGLCOLOR3UBVPROC) load(userptr, "glColor3ubv");
+ glad_glColor3ui = (PFNGLCOLOR3UIPROC) load(userptr, "glColor3ui");
+ glad_glColor3uiv = (PFNGLCOLOR3UIVPROC) load(userptr, "glColor3uiv");
+ glad_glColor3us = (PFNGLCOLOR3USPROC) load(userptr, "glColor3us");
+ glad_glColor3usv = (PFNGLCOLOR3USVPROC) load(userptr, "glColor3usv");
+ glad_glColor4b = (PFNGLCOLOR4BPROC) load(userptr, "glColor4b");
+ glad_glColor4bv = (PFNGLCOLOR4BVPROC) load(userptr, "glColor4bv");
+ glad_glColor4d = (PFNGLCOLOR4DPROC) load(userptr, "glColor4d");
+ glad_glColor4dv = (PFNGLCOLOR4DVPROC) load(userptr, "glColor4dv");
+ glad_glColor4f = (PFNGLCOLOR4FPROC) load(userptr, "glColor4f");
+ glad_glColor4fv = (PFNGLCOLOR4FVPROC) load(userptr, "glColor4fv");
+ glad_glColor4i = (PFNGLCOLOR4IPROC) load(userptr, "glColor4i");
+ glad_glColor4iv = (PFNGLCOLOR4IVPROC) load(userptr, "glColor4iv");
+ glad_glColor4s = (PFNGLCOLOR4SPROC) load(userptr, "glColor4s");
+ glad_glColor4sv = (PFNGLCOLOR4SVPROC) load(userptr, "glColor4sv");
+ glad_glColor4ub = (PFNGLCOLOR4UBPROC) load(userptr, "glColor4ub");
+ glad_glColor4ubv = (PFNGLCOLOR4UBVPROC) load(userptr, "glColor4ubv");
+ glad_glColor4ui = (PFNGLCOLOR4UIPROC) load(userptr, "glColor4ui");
+ glad_glColor4uiv = (PFNGLCOLOR4UIVPROC) load(userptr, "glColor4uiv");
+ glad_glColor4us = (PFNGLCOLOR4USPROC) load(userptr, "glColor4us");
+ glad_glColor4usv = (PFNGLCOLOR4USVPROC) load(userptr, "glColor4usv");
+ glad_glColorMask = (PFNGLCOLORMASKPROC) load(userptr, "glColorMask");
+ glad_glColorMaterial = (PFNGLCOLORMATERIALPROC) load(userptr, "glColorMaterial");
+ glad_glCopyPixels = (PFNGLCOPYPIXELSPROC) load(userptr, "glCopyPixels");
+ glad_glCullFace = (PFNGLCULLFACEPROC) load(userptr, "glCullFace");
+ glad_glDeleteLists = (PFNGLDELETELISTSPROC) load(userptr, "glDeleteLists");
+ glad_glDepthFunc = (PFNGLDEPTHFUNCPROC) load(userptr, "glDepthFunc");
+ glad_glDepthMask = (PFNGLDEPTHMASKPROC) load(userptr, "glDepthMask");
+ glad_glDepthRange = (PFNGLDEPTHRANGEPROC) load(userptr, "glDepthRange");
+ glad_glDisable = (PFNGLDISABLEPROC) load(userptr, "glDisable");
+ glad_glDrawBuffer = (PFNGLDRAWBUFFERPROC) load(userptr, "glDrawBuffer");
+ glad_glDrawPixels = (PFNGLDRAWPIXELSPROC) load(userptr, "glDrawPixels");
+ glad_glEdgeFlag = (PFNGLEDGEFLAGPROC) load(userptr, "glEdgeFlag");
+ glad_glEdgeFlagv = (PFNGLEDGEFLAGVPROC) load(userptr, "glEdgeFlagv");
+ glad_glEnable = (PFNGLENABLEPROC) load(userptr, "glEnable");
+ glad_glEnd = (PFNGLENDPROC) load(userptr, "glEnd");
+ glad_glEndList = (PFNGLENDLISTPROC) load(userptr, "glEndList");
+ glad_glEvalCoord1d = (PFNGLEVALCOORD1DPROC) load(userptr, "glEvalCoord1d");
+ glad_glEvalCoord1dv = (PFNGLEVALCOORD1DVPROC) load(userptr, "glEvalCoord1dv");
+ glad_glEvalCoord1f = (PFNGLEVALCOORD1FPROC) load(userptr, "glEvalCoord1f");
+ glad_glEvalCoord1fv = (PFNGLEVALCOORD1FVPROC) load(userptr, "glEvalCoord1fv");
+ glad_glEvalCoord2d = (PFNGLEVALCOORD2DPROC) load(userptr, "glEvalCoord2d");
+ glad_glEvalCoord2dv = (PFNGLEVALCOORD2DVPROC) load(userptr, "glEvalCoord2dv");
+ glad_glEvalCoord2f = (PFNGLEVALCOORD2FPROC) load(userptr, "glEvalCoord2f");
+ glad_glEvalCoord2fv = (PFNGLEVALCOORD2FVPROC) load(userptr, "glEvalCoord2fv");
+ glad_glEvalMesh1 = (PFNGLEVALMESH1PROC) load(userptr, "glEvalMesh1");
+ glad_glEvalMesh2 = (PFNGLEVALMESH2PROC) load(userptr, "glEvalMesh2");
+ glad_glEvalPoint1 = (PFNGLEVALPOINT1PROC) load(userptr, "glEvalPoint1");
+ glad_glEvalPoint2 = (PFNGLEVALPOINT2PROC) load(userptr, "glEvalPoint2");
+ glad_glFeedbackBuffer = (PFNGLFEEDBACKBUFFERPROC) load(userptr, "glFeedbackBuffer");
+ glad_glFinish = (PFNGLFINISHPROC) load(userptr, "glFinish");
+ glad_glFlush = (PFNGLFLUSHPROC) load(userptr, "glFlush");
+ glad_glFogf = (PFNGLFOGFPROC) load(userptr, "glFogf");
+ glad_glFogfv = (PFNGLFOGFVPROC) load(userptr, "glFogfv");
+ glad_glFogi = (PFNGLFOGIPROC) load(userptr, "glFogi");
+ glad_glFogiv = (PFNGLFOGIVPROC) load(userptr, "glFogiv");
+ glad_glFrontFace = (PFNGLFRONTFACEPROC) load(userptr, "glFrontFace");
+ glad_glFrustum = (PFNGLFRUSTUMPROC) load(userptr, "glFrustum");
+ glad_glGenLists = (PFNGLGENLISTSPROC) load(userptr, "glGenLists");
+ glad_glGetBooleanv = (PFNGLGETBOOLEANVPROC) load(userptr, "glGetBooleanv");
+ glad_glGetClipPlane = (PFNGLGETCLIPPLANEPROC) load(userptr, "glGetClipPlane");
+ glad_glGetDoublev = (PFNGLGETDOUBLEVPROC) load(userptr, "glGetDoublev");
+ glad_glGetError = (PFNGLGETERRORPROC) load(userptr, "glGetError");
+ glad_glGetFloatv = (PFNGLGETFLOATVPROC) load(userptr, "glGetFloatv");
+ glad_glGetIntegerv = (PFNGLGETINTEGERVPROC) load(userptr, "glGetIntegerv");
+ glad_glGetLightfv = (PFNGLGETLIGHTFVPROC) load(userptr, "glGetLightfv");
+ glad_glGetLightiv = (PFNGLGETLIGHTIVPROC) load(userptr, "glGetLightiv");
+ glad_glGetMapdv = (PFNGLGETMAPDVPROC) load(userptr, "glGetMapdv");
+ glad_glGetMapfv = (PFNGLGETMAPFVPROC) load(userptr, "glGetMapfv");
+ glad_glGetMapiv = (PFNGLGETMAPIVPROC) load(userptr, "glGetMapiv");
+ glad_glGetMaterialfv = (PFNGLGETMATERIALFVPROC) load(userptr, "glGetMaterialfv");
+ glad_glGetMaterialiv = (PFNGLGETMATERIALIVPROC) load(userptr, "glGetMaterialiv");
+ glad_glGetPixelMapfv = (PFNGLGETPIXELMAPFVPROC) load(userptr, "glGetPixelMapfv");
+ glad_glGetPixelMapuiv = (PFNGLGETPIXELMAPUIVPROC) load(userptr, "glGetPixelMapuiv");
+ glad_glGetPixelMapusv = (PFNGLGETPIXELMAPUSVPROC) load(userptr, "glGetPixelMapusv");
+ glad_glGetPolygonStipple = (PFNGLGETPOLYGONSTIPPLEPROC) load(userptr, "glGetPolygonStipple");
+ glad_glGetString = (PFNGLGETSTRINGPROC) load(userptr, "glGetString");
+ glad_glGetTexEnvfv = (PFNGLGETTEXENVFVPROC) load(userptr, "glGetTexEnvfv");
+ glad_glGetTexEnviv = (PFNGLGETTEXENVIVPROC) load(userptr, "glGetTexEnviv");
+ glad_glGetTexGendv = (PFNGLGETTEXGENDVPROC) load(userptr, "glGetTexGendv");
+ glad_glGetTexGenfv = (PFNGLGETTEXGENFVPROC) load(userptr, "glGetTexGenfv");
+ glad_glGetTexGeniv = (PFNGLGETTEXGENIVPROC) load(userptr, "glGetTexGeniv");
+ glad_glGetTexImage = (PFNGLGETTEXIMAGEPROC) load(userptr, "glGetTexImage");
+ glad_glGetTexLevelParameterfv = (PFNGLGETTEXLEVELPARAMETERFVPROC) load(userptr, "glGetTexLevelParameterfv");
+ glad_glGetTexLevelParameteriv = (PFNGLGETTEXLEVELPARAMETERIVPROC) load(userptr, "glGetTexLevelParameteriv");
+ glad_glGetTexParameterfv = (PFNGLGETTEXPARAMETERFVPROC) load(userptr, "glGetTexParameterfv");
+ glad_glGetTexParameteriv = (PFNGLGETTEXPARAMETERIVPROC) load(userptr, "glGetTexParameteriv");
+ glad_glHint = (PFNGLHINTPROC) load(userptr, "glHint");
+ glad_glIndexMask = (PFNGLINDEXMASKPROC) load(userptr, "glIndexMask");
+ glad_glIndexd = (PFNGLINDEXDPROC) load(userptr, "glIndexd");
+ glad_glIndexdv = (PFNGLINDEXDVPROC) load(userptr, "glIndexdv");
+ glad_glIndexf = (PFNGLINDEXFPROC) load(userptr, "glIndexf");
+ glad_glIndexfv = (PFNGLINDEXFVPROC) load(userptr, "glIndexfv");
+ glad_glIndexi = (PFNGLINDEXIPROC) load(userptr, "glIndexi");
+ glad_glIndexiv = (PFNGLINDEXIVPROC) load(userptr, "glIndexiv");
+ glad_glIndexs = (PFNGLINDEXSPROC) load(userptr, "glIndexs");
+ glad_glIndexsv = (PFNGLINDEXSVPROC) load(userptr, "glIndexsv");
+ glad_glInitNames = (PFNGLINITNAMESPROC) load(userptr, "glInitNames");
+ glad_glIsEnabled = (PFNGLISENABLEDPROC) load(userptr, "glIsEnabled");
+ glad_glIsList = (PFNGLISLISTPROC) load(userptr, "glIsList");
+ glad_glLightModelf = (PFNGLLIGHTMODELFPROC) load(userptr, "glLightModelf");
+ glad_glLightModelfv = (PFNGLLIGHTMODELFVPROC) load(userptr, "glLightModelfv");
+ glad_glLightModeli = (PFNGLLIGHTMODELIPROC) load(userptr, "glLightModeli");
+ glad_glLightModeliv = (PFNGLLIGHTMODELIVPROC) load(userptr, "glLightModeliv");
+ glad_glLightf = (PFNGLLIGHTFPROC) load(userptr, "glLightf");
+ glad_glLightfv = (PFNGLLIGHTFVPROC) load(userptr, "glLightfv");
+ glad_glLighti = (PFNGLLIGHTIPROC) load(userptr, "glLighti");
+ glad_glLightiv = (PFNGLLIGHTIVPROC) load(userptr, "glLightiv");
+ glad_glLineStipple = (PFNGLLINESTIPPLEPROC) load(userptr, "glLineStipple");
+ glad_glLineWidth = (PFNGLLINEWIDTHPROC) load(userptr, "glLineWidth");
+ glad_glListBase = (PFNGLLISTBASEPROC) load(userptr, "glListBase");
+ glad_glLoadIdentity = (PFNGLLOADIDENTITYPROC) load(userptr, "glLoadIdentity");
+ glad_glLoadMatrixd = (PFNGLLOADMATRIXDPROC) load(userptr, "glLoadMatrixd");
+ glad_glLoadMatrixf = (PFNGLLOADMATRIXFPROC) load(userptr, "glLoadMatrixf");
+ glad_glLoadName = (PFNGLLOADNAMEPROC) load(userptr, "glLoadName");
+ glad_glLogicOp = (PFNGLLOGICOPPROC) load(userptr, "glLogicOp");
+ glad_glMap1d = (PFNGLMAP1DPROC) load(userptr, "glMap1d");
+ glad_glMap1f = (PFNGLMAP1FPROC) load(userptr, "glMap1f");
+ glad_glMap2d = (PFNGLMAP2DPROC) load(userptr, "glMap2d");
+ glad_glMap2f = (PFNGLMAP2FPROC) load(userptr, "glMap2f");
+ glad_glMapGrid1d = (PFNGLMAPGRID1DPROC) load(userptr, "glMapGrid1d");
+ glad_glMapGrid1f = (PFNGLMAPGRID1FPROC) load(userptr, "glMapGrid1f");
+ glad_glMapGrid2d = (PFNGLMAPGRID2DPROC) load(userptr, "glMapGrid2d");
+ glad_glMapGrid2f = (PFNGLMAPGRID2FPROC) load(userptr, "glMapGrid2f");
+ glad_glMaterialf = (PFNGLMATERIALFPROC) load(userptr, "glMaterialf");
+ glad_glMaterialfv = (PFNGLMATERIALFVPROC) load(userptr, "glMaterialfv");
+ glad_glMateriali = (PFNGLMATERIALIPROC) load(userptr, "glMateriali");
+ glad_glMaterialiv = (PFNGLMATERIALIVPROC) load(userptr, "glMaterialiv");
+ glad_glMatrixMode = (PFNGLMATRIXMODEPROC) load(userptr, "glMatrixMode");
+ glad_glMultMatrixd = (PFNGLMULTMATRIXDPROC) load(userptr, "glMultMatrixd");
+ glad_glMultMatrixf = (PFNGLMULTMATRIXFPROC) load(userptr, "glMultMatrixf");
+ glad_glNewList = (PFNGLNEWLISTPROC) load(userptr, "glNewList");
+ glad_glNormal3b = (PFNGLNORMAL3BPROC) load(userptr, "glNormal3b");
+ glad_glNormal3bv = (PFNGLNORMAL3BVPROC) load(userptr, "glNormal3bv");
+ glad_glNormal3d = (PFNGLNORMAL3DPROC) load(userptr, "glNormal3d");
+ glad_glNormal3dv = (PFNGLNORMAL3DVPROC) load(userptr, "glNormal3dv");
+ glad_glNormal3f = (PFNGLNORMAL3FPROC) load(userptr, "glNormal3f");
+ glad_glNormal3fv = (PFNGLNORMAL3FVPROC) load(userptr, "glNormal3fv");
+ glad_glNormal3i = (PFNGLNORMAL3IPROC) load(userptr, "glNormal3i");
+ glad_glNormal3iv = (PFNGLNORMAL3IVPROC) load(userptr, "glNormal3iv");
+ glad_glNormal3s = (PFNGLNORMAL3SPROC) load(userptr, "glNormal3s");
+ glad_glNormal3sv = (PFNGLNORMAL3SVPROC) load(userptr, "glNormal3sv");
+ glad_glOrtho = (PFNGLORTHOPROC) load(userptr, "glOrtho");
+ glad_glPassThrough = (PFNGLPASSTHROUGHPROC) load(userptr, "glPassThrough");
+ glad_glPixelMapfv = (PFNGLPIXELMAPFVPROC) load(userptr, "glPixelMapfv");
+ glad_glPixelMapuiv = (PFNGLPIXELMAPUIVPROC) load(userptr, "glPixelMapuiv");
+ glad_glPixelMapusv = (PFNGLPIXELMAPUSVPROC) load(userptr, "glPixelMapusv");
+ glad_glPixelStoref = (PFNGLPIXELSTOREFPROC) load(userptr, "glPixelStoref");
+ glad_glPixelStorei = (PFNGLPIXELSTOREIPROC) load(userptr, "glPixelStorei");
+ glad_glPixelTransferf = (PFNGLPIXELTRANSFERFPROC) load(userptr, "glPixelTransferf");
+ glad_glPixelTransferi = (PFNGLPIXELTRANSFERIPROC) load(userptr, "glPixelTransferi");
+ glad_glPixelZoom = (PFNGLPIXELZOOMPROC) load(userptr, "glPixelZoom");
+ glad_glPointSize = (PFNGLPOINTSIZEPROC) load(userptr, "glPointSize");
+ glad_glPolygonMode = (PFNGLPOLYGONMODEPROC) load(userptr, "glPolygonMode");
+ glad_glPolygonStipple = (PFNGLPOLYGONSTIPPLEPROC) load(userptr, "glPolygonStipple");
+ glad_glPopAttrib = (PFNGLPOPATTRIBPROC) load(userptr, "glPopAttrib");
+ glad_glPopMatrix = (PFNGLPOPMATRIXPROC) load(userptr, "glPopMatrix");
+ glad_glPopName = (PFNGLPOPNAMEPROC) load(userptr, "glPopName");
+ glad_glPushAttrib = (PFNGLPUSHATTRIBPROC) load(userptr, "glPushAttrib");
+ glad_glPushMatrix = (PFNGLPUSHMATRIXPROC) load(userptr, "glPushMatrix");
+ glad_glPushName = (PFNGLPUSHNAMEPROC) load(userptr, "glPushName");
+ glad_glRasterPos2d = (PFNGLRASTERPOS2DPROC) load(userptr, "glRasterPos2d");
+ glad_glRasterPos2dv = (PFNGLRASTERPOS2DVPROC) load(userptr, "glRasterPos2dv");
+ glad_glRasterPos2f = (PFNGLRASTERPOS2FPROC) load(userptr, "glRasterPos2f");
+ glad_glRasterPos2fv = (PFNGLRASTERPOS2FVPROC) load(userptr, "glRasterPos2fv");
+ glad_glRasterPos2i = (PFNGLRASTERPOS2IPROC) load(userptr, "glRasterPos2i");
+ glad_glRasterPos2iv = (PFNGLRASTERPOS2IVPROC) load(userptr, "glRasterPos2iv");
+ glad_glRasterPos2s = (PFNGLRASTERPOS2SPROC) load(userptr, "glRasterPos2s");
+ glad_glRasterPos2sv = (PFNGLRASTERPOS2SVPROC) load(userptr, "glRasterPos2sv");
+ glad_glRasterPos3d = (PFNGLRASTERPOS3DPROC) load(userptr, "glRasterPos3d");
+ glad_glRasterPos3dv = (PFNGLRASTERPOS3DVPROC) load(userptr, "glRasterPos3dv");
+ glad_glRasterPos3f = (PFNGLRASTERPOS3FPROC) load(userptr, "glRasterPos3f");
+ glad_glRasterPos3fv = (PFNGLRASTERPOS3FVPROC) load(userptr, "glRasterPos3fv");
+ glad_glRasterPos3i = (PFNGLRASTERPOS3IPROC) load(userptr, "glRasterPos3i");
+ glad_glRasterPos3iv = (PFNGLRASTERPOS3IVPROC) load(userptr, "glRasterPos3iv");
+ glad_glRasterPos3s = (PFNGLRASTERPOS3SPROC) load(userptr, "glRasterPos3s");
+ glad_glRasterPos3sv = (PFNGLRASTERPOS3SVPROC) load(userptr, "glRasterPos3sv");
+ glad_glRasterPos4d = (PFNGLRASTERPOS4DPROC) load(userptr, "glRasterPos4d");
+ glad_glRasterPos4dv = (PFNGLRASTERPOS4DVPROC) load(userptr, "glRasterPos4dv");
+ glad_glRasterPos4f = (PFNGLRASTERPOS4FPROC) load(userptr, "glRasterPos4f");
+ glad_glRasterPos4fv = (PFNGLRASTERPOS4FVPROC) load(userptr, "glRasterPos4fv");
+ glad_glRasterPos4i = (PFNGLRASTERPOS4IPROC) load(userptr, "glRasterPos4i");
+ glad_glRasterPos4iv = (PFNGLRASTERPOS4IVPROC) load(userptr, "glRasterPos4iv");
+ glad_glRasterPos4s = (PFNGLRASTERPOS4SPROC) load(userptr, "glRasterPos4s");
+ glad_glRasterPos4sv = (PFNGLRASTERPOS4SVPROC) load(userptr, "glRasterPos4sv");
+ glad_glReadBuffer = (PFNGLREADBUFFERPROC) load(userptr, "glReadBuffer");
+ glad_glReadPixels = (PFNGLREADPIXELSPROC) load(userptr, "glReadPixels");
+ glad_glRectd = (PFNGLRECTDPROC) load(userptr, "glRectd");
+ glad_glRectdv = (PFNGLRECTDVPROC) load(userptr, "glRectdv");
+ glad_glRectf = (PFNGLRECTFPROC) load(userptr, "glRectf");
+ glad_glRectfv = (PFNGLRECTFVPROC) load(userptr, "glRectfv");
+ glad_glRecti = (PFNGLRECTIPROC) load(userptr, "glRecti");
+ glad_glRectiv = (PFNGLRECTIVPROC) load(userptr, "glRectiv");
+ glad_glRects = (PFNGLRECTSPROC) load(userptr, "glRects");
+ glad_glRectsv = (PFNGLRECTSVPROC) load(userptr, "glRectsv");
+ glad_glRenderMode = (PFNGLRENDERMODEPROC) load(userptr, "glRenderMode");
+ glad_glRotated = (PFNGLROTATEDPROC) load(userptr, "glRotated");
+ glad_glRotatef = (PFNGLROTATEFPROC) load(userptr, "glRotatef");
+ glad_glScaled = (PFNGLSCALEDPROC) load(userptr, "glScaled");
+ glad_glScalef = (PFNGLSCALEFPROC) load(userptr, "glScalef");
+ glad_glScissor = (PFNGLSCISSORPROC) load(userptr, "glScissor");
+ glad_glSelectBuffer = (PFNGLSELECTBUFFERPROC) load(userptr, "glSelectBuffer");
+ glad_glShadeModel = (PFNGLSHADEMODELPROC) load(userptr, "glShadeModel");
+ glad_glStencilFunc = (PFNGLSTENCILFUNCPROC) load(userptr, "glStencilFunc");
+ glad_glStencilMask = (PFNGLSTENCILMASKPROC) load(userptr, "glStencilMask");
+ glad_glStencilOp = (PFNGLSTENCILOPPROC) load(userptr, "glStencilOp");
+ glad_glTexCoord1d = (PFNGLTEXCOORD1DPROC) load(userptr, "glTexCoord1d");
+ glad_glTexCoord1dv = (PFNGLTEXCOORD1DVPROC) load(userptr, "glTexCoord1dv");
+ glad_glTexCoord1f = (PFNGLTEXCOORD1FPROC) load(userptr, "glTexCoord1f");
+ glad_glTexCoord1fv = (PFNGLTEXCOORD1FVPROC) load(userptr, "glTexCoord1fv");
+ glad_glTexCoord1i = (PFNGLTEXCOORD1IPROC) load(userptr, "glTexCoord1i");
+ glad_glTexCoord1iv = (PFNGLTEXCOORD1IVPROC) load(userptr, "glTexCoord1iv");
+ glad_glTexCoord1s = (PFNGLTEXCOORD1SPROC) load(userptr, "glTexCoord1s");
+ glad_glTexCoord1sv = (PFNGLTEXCOORD1SVPROC) load(userptr, "glTexCoord1sv");
+ glad_glTexCoord2d = (PFNGLTEXCOORD2DPROC) load(userptr, "glTexCoord2d");
+ glad_glTexCoord2dv = (PFNGLTEXCOORD2DVPROC) load(userptr, "glTexCoord2dv");
+ glad_glTexCoord2f = (PFNGLTEXCOORD2FPROC) load(userptr, "glTexCoord2f");
+ glad_glTexCoord2fv = (PFNGLTEXCOORD2FVPROC) load(userptr, "glTexCoord2fv");
+ glad_glTexCoord2i = (PFNGLTEXCOORD2IPROC) load(userptr, "glTexCoord2i");
+ glad_glTexCoord2iv = (PFNGLTEXCOORD2IVPROC) load(userptr, "glTexCoord2iv");
+ glad_glTexCoord2s = (PFNGLTEXCOORD2SPROC) load(userptr, "glTexCoord2s");
+ glad_glTexCoord2sv = (PFNGLTEXCOORD2SVPROC) load(userptr, "glTexCoord2sv");
+ glad_glTexCoord3d = (PFNGLTEXCOORD3DPROC) load(userptr, "glTexCoord3d");
+ glad_glTexCoord3dv = (PFNGLTEXCOORD3DVPROC) load(userptr, "glTexCoord3dv");
+ glad_glTexCoord3f = (PFNGLTEXCOORD3FPROC) load(userptr, "glTexCoord3f");
+ glad_glTexCoord3fv = (PFNGLTEXCOORD3FVPROC) load(userptr, "glTexCoord3fv");
+ glad_glTexCoord3i = (PFNGLTEXCOORD3IPROC) load(userptr, "glTexCoord3i");
+ glad_glTexCoord3iv = (PFNGLTEXCOORD3IVPROC) load(userptr, "glTexCoord3iv");
+ glad_glTexCoord3s = (PFNGLTEXCOORD3SPROC) load(userptr, "glTexCoord3s");
+ glad_glTexCoord3sv = (PFNGLTEXCOORD3SVPROC) load(userptr, "glTexCoord3sv");
+ glad_glTexCoord4d = (PFNGLTEXCOORD4DPROC) load(userptr, "glTexCoord4d");
+ glad_glTexCoord4dv = (PFNGLTEXCOORD4DVPROC) load(userptr, "glTexCoord4dv");
+ glad_glTexCoord4f = (PFNGLTEXCOORD4FPROC) load(userptr, "glTexCoord4f");
+ glad_glTexCoord4fv = (PFNGLTEXCOORD4FVPROC) load(userptr, "glTexCoord4fv");
+ glad_glTexCoord4i = (PFNGLTEXCOORD4IPROC) load(userptr, "glTexCoord4i");
+ glad_glTexCoord4iv = (PFNGLTEXCOORD4IVPROC) load(userptr, "glTexCoord4iv");
+ glad_glTexCoord4s = (PFNGLTEXCOORD4SPROC) load(userptr, "glTexCoord4s");
+ glad_glTexCoord4sv = (PFNGLTEXCOORD4SVPROC) load(userptr, "glTexCoord4sv");
+ glad_glTexEnvf = (PFNGLTEXENVFPROC) load(userptr, "glTexEnvf");
+ glad_glTexEnvfv = (PFNGLTEXENVFVPROC) load(userptr, "glTexEnvfv");
+ glad_glTexEnvi = (PFNGLTEXENVIPROC) load(userptr, "glTexEnvi");
+ glad_glTexEnviv = (PFNGLTEXENVIVPROC) load(userptr, "glTexEnviv");
+ glad_glTexGend = (PFNGLTEXGENDPROC) load(userptr, "glTexGend");
+ glad_glTexGendv = (PFNGLTEXGENDVPROC) load(userptr, "glTexGendv");
+ glad_glTexGenf = (PFNGLTEXGENFPROC) load(userptr, "glTexGenf");
+ glad_glTexGenfv = (PFNGLTEXGENFVPROC) load(userptr, "glTexGenfv");
+ glad_glTexGeni = (PFNGLTEXGENIPROC) load(userptr, "glTexGeni");
+ glad_glTexGeniv = (PFNGLTEXGENIVPROC) load(userptr, "glTexGeniv");
+ glad_glTexImage1D = (PFNGLTEXIMAGE1DPROC) load(userptr, "glTexImage1D");
+ glad_glTexImage2D = (PFNGLTEXIMAGE2DPROC) load(userptr, "glTexImage2D");
+ glad_glTexParameterf = (PFNGLTEXPARAMETERFPROC) load(userptr, "glTexParameterf");
+ glad_glTexParameterfv = (PFNGLTEXPARAMETERFVPROC) load(userptr, "glTexParameterfv");
+ glad_glTexParameteri = (PFNGLTEXPARAMETERIPROC) load(userptr, "glTexParameteri");
+ glad_glTexParameteriv = (PFNGLTEXPARAMETERIVPROC) load(userptr, "glTexParameteriv");
+ glad_glTranslated = (PFNGLTRANSLATEDPROC) load(userptr, "glTranslated");
+ glad_glTranslatef = (PFNGLTRANSLATEFPROC) load(userptr, "glTranslatef");
+ glad_glVertex2d = (PFNGLVERTEX2DPROC) load(userptr, "glVertex2d");
+ glad_glVertex2dv = (PFNGLVERTEX2DVPROC) load(userptr, "glVertex2dv");
+ glad_glVertex2f = (PFNGLVERTEX2FPROC) load(userptr, "glVertex2f");
+ glad_glVertex2fv = (PFNGLVERTEX2FVPROC) load(userptr, "glVertex2fv");
+ glad_glVertex2i = (PFNGLVERTEX2IPROC) load(userptr, "glVertex2i");
+ glad_glVertex2iv = (PFNGLVERTEX2IVPROC) load(userptr, "glVertex2iv");
+ glad_glVertex2s = (PFNGLVERTEX2SPROC) load(userptr, "glVertex2s");
+ glad_glVertex2sv = (PFNGLVERTEX2SVPROC) load(userptr, "glVertex2sv");
+ glad_glVertex3d = (PFNGLVERTEX3DPROC) load(userptr, "glVertex3d");
+ glad_glVertex3dv = (PFNGLVERTEX3DVPROC) load(userptr, "glVertex3dv");
+ glad_glVertex3f = (PFNGLVERTEX3FPROC) load(userptr, "glVertex3f");
+ glad_glVertex3fv = (PFNGLVERTEX3FVPROC) load(userptr, "glVertex3fv");
+ glad_glVertex3i = (PFNGLVERTEX3IPROC) load(userptr, "glVertex3i");
+ glad_glVertex3iv = (PFNGLVERTEX3IVPROC) load(userptr, "glVertex3iv");
+ glad_glVertex3s = (PFNGLVERTEX3SPROC) load(userptr, "glVertex3s");
+ glad_glVertex3sv = (PFNGLVERTEX3SVPROC) load(userptr, "glVertex3sv");
+ glad_glVertex4d = (PFNGLVERTEX4DPROC) load(userptr, "glVertex4d");
+ glad_glVertex4dv = (PFNGLVERTEX4DVPROC) load(userptr, "glVertex4dv");
+ glad_glVertex4f = (PFNGLVERTEX4FPROC) load(userptr, "glVertex4f");
+ glad_glVertex4fv = (PFNGLVERTEX4FVPROC) load(userptr, "glVertex4fv");
+ glad_glVertex4i = (PFNGLVERTEX4IPROC) load(userptr, "glVertex4i");
+ glad_glVertex4iv = (PFNGLVERTEX4IVPROC) load(userptr, "glVertex4iv");
+ glad_glVertex4s = (PFNGLVERTEX4SPROC) load(userptr, "glVertex4s");
+ glad_glVertex4sv = (PFNGLVERTEX4SVPROC) load(userptr, "glVertex4sv");
+ glad_glViewport = (PFNGLVIEWPORTPROC) load(userptr, "glViewport");
+}
+static void glad_gl_load_GL_VERSION_1_1( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GL_VERSION_1_1) return;
+ glad_glAreTexturesResident = (PFNGLARETEXTURESRESIDENTPROC) load(userptr, "glAreTexturesResident");
+ glad_glArrayElement = (PFNGLARRAYELEMENTPROC) load(userptr, "glArrayElement");
+ glad_glBindTexture = (PFNGLBINDTEXTUREPROC) load(userptr, "glBindTexture");
+ glad_glColorPointer = (PFNGLCOLORPOINTERPROC) load(userptr, "glColorPointer");
+ glad_glCopyTexImage1D = (PFNGLCOPYTEXIMAGE1DPROC) load(userptr, "glCopyTexImage1D");
+ glad_glCopyTexImage2D = (PFNGLCOPYTEXIMAGE2DPROC) load(userptr, "glCopyTexImage2D");
+ glad_glCopyTexSubImage1D = (PFNGLCOPYTEXSUBIMAGE1DPROC) load(userptr, "glCopyTexSubImage1D");
+ glad_glCopyTexSubImage2D = (PFNGLCOPYTEXSUBIMAGE2DPROC) load(userptr, "glCopyTexSubImage2D");
+ glad_glDeleteTextures = (PFNGLDELETETEXTURESPROC) load(userptr, "glDeleteTextures");
+ glad_glDisableClientState = (PFNGLDISABLECLIENTSTATEPROC) load(userptr, "glDisableClientState");
+ glad_glDrawArrays = (PFNGLDRAWARRAYSPROC) load(userptr, "glDrawArrays");
+ glad_glDrawElements = (PFNGLDRAWELEMENTSPROC) load(userptr, "glDrawElements");
+ glad_glEdgeFlagPointer = (PFNGLEDGEFLAGPOINTERPROC) load(userptr, "glEdgeFlagPointer");
+ glad_glEnableClientState = (PFNGLENABLECLIENTSTATEPROC) load(userptr, "glEnableClientState");
+ glad_glGenTextures = (PFNGLGENTEXTURESPROC) load(userptr, "glGenTextures");
+ glad_glGetPointerv = (PFNGLGETPOINTERVPROC) load(userptr, "glGetPointerv");
+ glad_glIndexPointer = (PFNGLINDEXPOINTERPROC) load(userptr, "glIndexPointer");
+ glad_glIndexub = (PFNGLINDEXUBPROC) load(userptr, "glIndexub");
+ glad_glIndexubv = (PFNGLINDEXUBVPROC) load(userptr, "glIndexubv");
+ glad_glInterleavedArrays = (PFNGLINTERLEAVEDARRAYSPROC) load(userptr, "glInterleavedArrays");
+ glad_glIsTexture = (PFNGLISTEXTUREPROC) load(userptr, "glIsTexture");
+ glad_glNormalPointer = (PFNGLNORMALPOINTERPROC) load(userptr, "glNormalPointer");
+ glad_glPolygonOffset = (PFNGLPOLYGONOFFSETPROC) load(userptr, "glPolygonOffset");
+ glad_glPopClientAttrib = (PFNGLPOPCLIENTATTRIBPROC) load(userptr, "glPopClientAttrib");
+ glad_glPrioritizeTextures = (PFNGLPRIORITIZETEXTURESPROC) load(userptr, "glPrioritizeTextures");
+ glad_glPushClientAttrib = (PFNGLPUSHCLIENTATTRIBPROC) load(userptr, "glPushClientAttrib");
+ glad_glTexCoordPointer = (PFNGLTEXCOORDPOINTERPROC) load(userptr, "glTexCoordPointer");
+ glad_glTexSubImage1D = (PFNGLTEXSUBIMAGE1DPROC) load(userptr, "glTexSubImage1D");
+ glad_glTexSubImage2D = (PFNGLTEXSUBIMAGE2DPROC) load(userptr, "glTexSubImage2D");
+ glad_glVertexPointer = (PFNGLVERTEXPOINTERPROC) load(userptr, "glVertexPointer");
+}
+static void glad_gl_load_GL_VERSION_1_2( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GL_VERSION_1_2) return;
+ glad_glCopyTexSubImage3D = (PFNGLCOPYTEXSUBIMAGE3DPROC) load(userptr, "glCopyTexSubImage3D");
+ glad_glDrawRangeElements = (PFNGLDRAWRANGEELEMENTSPROC) load(userptr, "glDrawRangeElements");
+ glad_glTexImage3D = (PFNGLTEXIMAGE3DPROC) load(userptr, "glTexImage3D");
+ glad_glTexSubImage3D = (PFNGLTEXSUBIMAGE3DPROC) load(userptr, "glTexSubImage3D");
+}
+static void glad_gl_load_GL_VERSION_1_3( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GL_VERSION_1_3) return;
+ glad_glActiveTexture = (PFNGLACTIVETEXTUREPROC) load(userptr, "glActiveTexture");
+ glad_glClientActiveTexture = (PFNGLCLIENTACTIVETEXTUREPROC) load(userptr, "glClientActiveTexture");
+ glad_glCompressedTexImage1D = (PFNGLCOMPRESSEDTEXIMAGE1DPROC) load(userptr, "glCompressedTexImage1D");
+ glad_glCompressedTexImage2D = (PFNGLCOMPRESSEDTEXIMAGE2DPROC) load(userptr, "glCompressedTexImage2D");
+ glad_glCompressedTexImage3D = (PFNGLCOMPRESSEDTEXIMAGE3DPROC) load(userptr, "glCompressedTexImage3D");
+ glad_glCompressedTexSubImage1D = (PFNGLCOMPRESSEDTEXSUBIMAGE1DPROC) load(userptr, "glCompressedTexSubImage1D");
+ glad_glCompressedTexSubImage2D = (PFNGLCOMPRESSEDTEXSUBIMAGE2DPROC) load(userptr, "glCompressedTexSubImage2D");
+ glad_glCompressedTexSubImage3D = (PFNGLCOMPRESSEDTEXSUBIMAGE3DPROC) load(userptr, "glCompressedTexSubImage3D");
+ glad_glGetCompressedTexImage = (PFNGLGETCOMPRESSEDTEXIMAGEPROC) load(userptr, "glGetCompressedTexImage");
+ glad_glLoadTransposeMatrixd = (PFNGLLOADTRANSPOSEMATRIXDPROC) load(userptr, "glLoadTransposeMatrixd");
+ glad_glLoadTransposeMatrixf = (PFNGLLOADTRANSPOSEMATRIXFPROC) load(userptr, "glLoadTransposeMatrixf");
+ glad_glMultTransposeMatrixd = (PFNGLMULTTRANSPOSEMATRIXDPROC) load(userptr, "glMultTransposeMatrixd");
+ glad_glMultTransposeMatrixf = (PFNGLMULTTRANSPOSEMATRIXFPROC) load(userptr, "glMultTransposeMatrixf");
+ glad_glMultiTexCoord1d = (PFNGLMULTITEXCOORD1DPROC) load(userptr, "glMultiTexCoord1d");
+ glad_glMultiTexCoord1dv = (PFNGLMULTITEXCOORD1DVPROC) load(userptr, "glMultiTexCoord1dv");
+ glad_glMultiTexCoord1f = (PFNGLMULTITEXCOORD1FPROC) load(userptr, "glMultiTexCoord1f");
+ glad_glMultiTexCoord1fv = (PFNGLMULTITEXCOORD1FVPROC) load(userptr, "glMultiTexCoord1fv");
+ glad_glMultiTexCoord1i = (PFNGLMULTITEXCOORD1IPROC) load(userptr, "glMultiTexCoord1i");
+ glad_glMultiTexCoord1iv = (PFNGLMULTITEXCOORD1IVPROC) load(userptr, "glMultiTexCoord1iv");
+ glad_glMultiTexCoord1s = (PFNGLMULTITEXCOORD1SPROC) load(userptr, "glMultiTexCoord1s");
+ glad_glMultiTexCoord1sv = (PFNGLMULTITEXCOORD1SVPROC) load(userptr, "glMultiTexCoord1sv");
+ glad_glMultiTexCoord2d = (PFNGLMULTITEXCOORD2DPROC) load(userptr, "glMultiTexCoord2d");
+ glad_glMultiTexCoord2dv = (PFNGLMULTITEXCOORD2DVPROC) load(userptr, "glMultiTexCoord2dv");
+ glad_glMultiTexCoord2f = (PFNGLMULTITEXCOORD2FPROC) load(userptr, "glMultiTexCoord2f");
+ glad_glMultiTexCoord2fv = (PFNGLMULTITEXCOORD2FVPROC) load(userptr, "glMultiTexCoord2fv");
+ glad_glMultiTexCoord2i = (PFNGLMULTITEXCOORD2IPROC) load(userptr, "glMultiTexCoord2i");
+ glad_glMultiTexCoord2iv = (PFNGLMULTITEXCOORD2IVPROC) load(userptr, "glMultiTexCoord2iv");
+ glad_glMultiTexCoord2s = (PFNGLMULTITEXCOORD2SPROC) load(userptr, "glMultiTexCoord2s");
+ glad_glMultiTexCoord2sv = (PFNGLMULTITEXCOORD2SVPROC) load(userptr, "glMultiTexCoord2sv");
+ glad_glMultiTexCoord3d = (PFNGLMULTITEXCOORD3DPROC) load(userptr, "glMultiTexCoord3d");
+ glad_glMultiTexCoord3dv = (PFNGLMULTITEXCOORD3DVPROC) load(userptr, "glMultiTexCoord3dv");
+ glad_glMultiTexCoord3f = (PFNGLMULTITEXCOORD3FPROC) load(userptr, "glMultiTexCoord3f");
+ glad_glMultiTexCoord3fv = (PFNGLMULTITEXCOORD3FVPROC) load(userptr, "glMultiTexCoord3fv");
+ glad_glMultiTexCoord3i = (PFNGLMULTITEXCOORD3IPROC) load(userptr, "glMultiTexCoord3i");
+ glad_glMultiTexCoord3iv = (PFNGLMULTITEXCOORD3IVPROC) load(userptr, "glMultiTexCoord3iv");
+ glad_glMultiTexCoord3s = (PFNGLMULTITEXCOORD3SPROC) load(userptr, "glMultiTexCoord3s");
+ glad_glMultiTexCoord3sv = (PFNGLMULTITEXCOORD3SVPROC) load(userptr, "glMultiTexCoord3sv");
+ glad_glMultiTexCoord4d = (PFNGLMULTITEXCOORD4DPROC) load(userptr, "glMultiTexCoord4d");
+ glad_glMultiTexCoord4dv = (PFNGLMULTITEXCOORD4DVPROC) load(userptr, "glMultiTexCoord4dv");
+ glad_glMultiTexCoord4f = (PFNGLMULTITEXCOORD4FPROC) load(userptr, "glMultiTexCoord4f");
+ glad_glMultiTexCoord4fv = (PFNGLMULTITEXCOORD4FVPROC) load(userptr, "glMultiTexCoord4fv");
+ glad_glMultiTexCoord4i = (PFNGLMULTITEXCOORD4IPROC) load(userptr, "glMultiTexCoord4i");
+ glad_glMultiTexCoord4iv = (PFNGLMULTITEXCOORD4IVPROC) load(userptr, "glMultiTexCoord4iv");
+ glad_glMultiTexCoord4s = (PFNGLMULTITEXCOORD4SPROC) load(userptr, "glMultiTexCoord4s");
+ glad_glMultiTexCoord4sv = (PFNGLMULTITEXCOORD4SVPROC) load(userptr, "glMultiTexCoord4sv");
+ glad_glSampleCoverage = (PFNGLSAMPLECOVERAGEPROC) load(userptr, "glSampleCoverage");
+}
+static void glad_gl_load_GL_VERSION_1_4( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GL_VERSION_1_4) return;
+ glad_glBlendColor = (PFNGLBLENDCOLORPROC) load(userptr, "glBlendColor");
+ glad_glBlendEquation = (PFNGLBLENDEQUATIONPROC) load(userptr, "glBlendEquation");
+ glad_glBlendFuncSeparate = (PFNGLBLENDFUNCSEPARATEPROC) load(userptr, "glBlendFuncSeparate");
+ glad_glFogCoordPointer = (PFNGLFOGCOORDPOINTERPROC) load(userptr, "glFogCoordPointer");
+ glad_glFogCoordd = (PFNGLFOGCOORDDPROC) load(userptr, "glFogCoordd");
+ glad_glFogCoorddv = (PFNGLFOGCOORDDVPROC) load(userptr, "glFogCoorddv");
+ glad_glFogCoordf = (PFNGLFOGCOORDFPROC) load(userptr, "glFogCoordf");
+ glad_glFogCoordfv = (PFNGLFOGCOORDFVPROC) load(userptr, "glFogCoordfv");
+ glad_glMultiDrawArrays = (PFNGLMULTIDRAWARRAYSPROC) load(userptr, "glMultiDrawArrays");
+ glad_glMultiDrawElements = (PFNGLMULTIDRAWELEMENTSPROC) load(userptr, "glMultiDrawElements");
+ glad_glPointParameterf = (PFNGLPOINTPARAMETERFPROC) load(userptr, "glPointParameterf");
+ glad_glPointParameterfv = (PFNGLPOINTPARAMETERFVPROC) load(userptr, "glPointParameterfv");
+ glad_glPointParameteri = (PFNGLPOINTPARAMETERIPROC) load(userptr, "glPointParameteri");
+ glad_glPointParameteriv = (PFNGLPOINTPARAMETERIVPROC) load(userptr, "glPointParameteriv");
+ glad_glSecondaryColor3b = (PFNGLSECONDARYCOLOR3BPROC) load(userptr, "glSecondaryColor3b");
+ glad_glSecondaryColor3bv = (PFNGLSECONDARYCOLOR3BVPROC) load(userptr, "glSecondaryColor3bv");
+ glad_glSecondaryColor3d = (PFNGLSECONDARYCOLOR3DPROC) load(userptr, "glSecondaryColor3d");
+ glad_glSecondaryColor3dv = (PFNGLSECONDARYCOLOR3DVPROC) load(userptr, "glSecondaryColor3dv");
+ glad_glSecondaryColor3f = (PFNGLSECONDARYCOLOR3FPROC) load(userptr, "glSecondaryColor3f");
+ glad_glSecondaryColor3fv = (PFNGLSECONDARYCOLOR3FVPROC) load(userptr, "glSecondaryColor3fv");
+ glad_glSecondaryColor3i = (PFNGLSECONDARYCOLOR3IPROC) load(userptr, "glSecondaryColor3i");
+ glad_glSecondaryColor3iv = (PFNGLSECONDARYCOLOR3IVPROC) load(userptr, "glSecondaryColor3iv");
+ glad_glSecondaryColor3s = (PFNGLSECONDARYCOLOR3SPROC) load(userptr, "glSecondaryColor3s");
+ glad_glSecondaryColor3sv = (PFNGLSECONDARYCOLOR3SVPROC) load(userptr, "glSecondaryColor3sv");
+ glad_glSecondaryColor3ub = (PFNGLSECONDARYCOLOR3UBPROC) load(userptr, "glSecondaryColor3ub");
+ glad_glSecondaryColor3ubv = (PFNGLSECONDARYCOLOR3UBVPROC) load(userptr, "glSecondaryColor3ubv");
+ glad_glSecondaryColor3ui = (PFNGLSECONDARYCOLOR3UIPROC) load(userptr, "glSecondaryColor3ui");
+ glad_glSecondaryColor3uiv = (PFNGLSECONDARYCOLOR3UIVPROC) load(userptr, "glSecondaryColor3uiv");
+ glad_glSecondaryColor3us = (PFNGLSECONDARYCOLOR3USPROC) load(userptr, "glSecondaryColor3us");
+ glad_glSecondaryColor3usv = (PFNGLSECONDARYCOLOR3USVPROC) load(userptr, "glSecondaryColor3usv");
+ glad_glSecondaryColorPointer = (PFNGLSECONDARYCOLORPOINTERPROC) load(userptr, "glSecondaryColorPointer");
+ glad_glWindowPos2d = (PFNGLWINDOWPOS2DPROC) load(userptr, "glWindowPos2d");
+ glad_glWindowPos2dv = (PFNGLWINDOWPOS2DVPROC) load(userptr, "glWindowPos2dv");
+ glad_glWindowPos2f = (PFNGLWINDOWPOS2FPROC) load(userptr, "glWindowPos2f");
+ glad_glWindowPos2fv = (PFNGLWINDOWPOS2FVPROC) load(userptr, "glWindowPos2fv");
+ glad_glWindowPos2i = (PFNGLWINDOWPOS2IPROC) load(userptr, "glWindowPos2i");
+ glad_glWindowPos2iv = (PFNGLWINDOWPOS2IVPROC) load(userptr, "glWindowPos2iv");
+ glad_glWindowPos2s = (PFNGLWINDOWPOS2SPROC) load(userptr, "glWindowPos2s");
+ glad_glWindowPos2sv = (PFNGLWINDOWPOS2SVPROC) load(userptr, "glWindowPos2sv");
+ glad_glWindowPos3d = (PFNGLWINDOWPOS3DPROC) load(userptr, "glWindowPos3d");
+ glad_glWindowPos3dv = (PFNGLWINDOWPOS3DVPROC) load(userptr, "glWindowPos3dv");
+ glad_glWindowPos3f = (PFNGLWINDOWPOS3FPROC) load(userptr, "glWindowPos3f");
+ glad_glWindowPos3fv = (PFNGLWINDOWPOS3FVPROC) load(userptr, "glWindowPos3fv");
+ glad_glWindowPos3i = (PFNGLWINDOWPOS3IPROC) load(userptr, "glWindowPos3i");
+ glad_glWindowPos3iv = (PFNGLWINDOWPOS3IVPROC) load(userptr, "glWindowPos3iv");
+ glad_glWindowPos3s = (PFNGLWINDOWPOS3SPROC) load(userptr, "glWindowPos3s");
+ glad_glWindowPos3sv = (PFNGLWINDOWPOS3SVPROC) load(userptr, "glWindowPos3sv");
+}
+static void glad_gl_load_GL_VERSION_1_5( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GL_VERSION_1_5) return;
+ glad_glBeginQuery = (PFNGLBEGINQUERYPROC) load(userptr, "glBeginQuery");
+ glad_glBindBuffer = (PFNGLBINDBUFFERPROC) load(userptr, "glBindBuffer");
+ glad_glBufferData = (PFNGLBUFFERDATAPROC) load(userptr, "glBufferData");
+ glad_glBufferSubData = (PFNGLBUFFERSUBDATAPROC) load(userptr, "glBufferSubData");
+ glad_glDeleteBuffers = (PFNGLDELETEBUFFERSPROC) load(userptr, "glDeleteBuffers");
+ glad_glDeleteQueries = (PFNGLDELETEQUERIESPROC) load(userptr, "glDeleteQueries");
+ glad_glEndQuery = (PFNGLENDQUERYPROC) load(userptr, "glEndQuery");
+ glad_glGenBuffers = (PFNGLGENBUFFERSPROC) load(userptr, "glGenBuffers");
+ glad_glGenQueries = (PFNGLGENQUERIESPROC) load(userptr, "glGenQueries");
+ glad_glGetBufferParameteriv = (PFNGLGETBUFFERPARAMETERIVPROC) load(userptr, "glGetBufferParameteriv");
+ glad_glGetBufferPointerv = (PFNGLGETBUFFERPOINTERVPROC) load(userptr, "glGetBufferPointerv");
+ glad_glGetBufferSubData = (PFNGLGETBUFFERSUBDATAPROC) load(userptr, "glGetBufferSubData");
+ glad_glGetQueryObjectiv = (PFNGLGETQUERYOBJECTIVPROC) load(userptr, "glGetQueryObjectiv");
+ glad_glGetQueryObjectuiv = (PFNGLGETQUERYOBJECTUIVPROC) load(userptr, "glGetQueryObjectuiv");
+ glad_glGetQueryiv = (PFNGLGETQUERYIVPROC) load(userptr, "glGetQueryiv");
+ glad_glIsBuffer = (PFNGLISBUFFERPROC) load(userptr, "glIsBuffer");
+ glad_glIsQuery = (PFNGLISQUERYPROC) load(userptr, "glIsQuery");
+ glad_glMapBuffer = (PFNGLMAPBUFFERPROC) load(userptr, "glMapBuffer");
+ glad_glUnmapBuffer = (PFNGLUNMAPBUFFERPROC) load(userptr, "glUnmapBuffer");
+}
+static void glad_gl_load_GL_VERSION_2_0( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GL_VERSION_2_0) return;
+ glad_glAttachShader = (PFNGLATTACHSHADERPROC) load(userptr, "glAttachShader");
+ glad_glBindAttribLocation = (PFNGLBINDATTRIBLOCATIONPROC) load(userptr, "glBindAttribLocation");
+ glad_glBlendEquationSeparate = (PFNGLBLENDEQUATIONSEPARATEPROC) load(userptr, "glBlendEquationSeparate");
+ glad_glCompileShader = (PFNGLCOMPILESHADERPROC) load(userptr, "glCompileShader");
+ glad_glCreateProgram = (PFNGLCREATEPROGRAMPROC) load(userptr, "glCreateProgram");
+ glad_glCreateShader = (PFNGLCREATESHADERPROC) load(userptr, "glCreateShader");
+ glad_glDeleteProgram = (PFNGLDELETEPROGRAMPROC) load(userptr, "glDeleteProgram");
+ glad_glDeleteShader = (PFNGLDELETESHADERPROC) load(userptr, "glDeleteShader");
+ glad_glDetachShader = (PFNGLDETACHSHADERPROC) load(userptr, "glDetachShader");
+ glad_glDisableVertexAttribArray = (PFNGLDISABLEVERTEXATTRIBARRAYPROC) load(userptr, "glDisableVertexAttribArray");
+ glad_glDrawBuffers = (PFNGLDRAWBUFFERSPROC) load(userptr, "glDrawBuffers");
+ glad_glEnableVertexAttribArray = (PFNGLENABLEVERTEXATTRIBARRAYPROC) load(userptr, "glEnableVertexAttribArray");
+ glad_glGetActiveAttrib = (PFNGLGETACTIVEATTRIBPROC) load(userptr, "glGetActiveAttrib");
+ glad_glGetActiveUniform = (PFNGLGETACTIVEUNIFORMPROC) load(userptr, "glGetActiveUniform");
+ glad_glGetAttachedShaders = (PFNGLGETATTACHEDSHADERSPROC) load(userptr, "glGetAttachedShaders");
+ glad_glGetAttribLocation = (PFNGLGETATTRIBLOCATIONPROC) load(userptr, "glGetAttribLocation");
+ glad_glGetProgramInfoLog = (PFNGLGETPROGRAMINFOLOGPROC) load(userptr, "glGetProgramInfoLog");
+ glad_glGetProgramiv = (PFNGLGETPROGRAMIVPROC) load(userptr, "glGetProgramiv");
+ glad_glGetShaderInfoLog = (PFNGLGETSHADERINFOLOGPROC) load(userptr, "glGetShaderInfoLog");
+ glad_glGetShaderSource = (PFNGLGETSHADERSOURCEPROC) load(userptr, "glGetShaderSource");
+ glad_glGetShaderiv = (PFNGLGETSHADERIVPROC) load(userptr, "glGetShaderiv");
+ glad_glGetUniformLocation = (PFNGLGETUNIFORMLOCATIONPROC) load(userptr, "glGetUniformLocation");
+ glad_glGetUniformfv = (PFNGLGETUNIFORMFVPROC) load(userptr, "glGetUniformfv");
+ glad_glGetUniformiv = (PFNGLGETUNIFORMIVPROC) load(userptr, "glGetUniformiv");
+ glad_glGetVertexAttribPointerv = (PFNGLGETVERTEXATTRIBPOINTERVPROC) load(userptr, "glGetVertexAttribPointerv");
+ glad_glGetVertexAttribdv = (PFNGLGETVERTEXATTRIBDVPROC) load(userptr, "glGetVertexAttribdv");
+ glad_glGetVertexAttribfv = (PFNGLGETVERTEXATTRIBFVPROC) load(userptr, "glGetVertexAttribfv");
+ glad_glGetVertexAttribiv = (PFNGLGETVERTEXATTRIBIVPROC) load(userptr, "glGetVertexAttribiv");
+ glad_glIsProgram = (PFNGLISPROGRAMPROC) load(userptr, "glIsProgram");
+ glad_glIsShader = (PFNGLISSHADERPROC) load(userptr, "glIsShader");
+ glad_glLinkProgram = (PFNGLLINKPROGRAMPROC) load(userptr, "glLinkProgram");
+ glad_glShaderSource = (PFNGLSHADERSOURCEPROC) load(userptr, "glShaderSource");
+ glad_glStencilFuncSeparate = (PFNGLSTENCILFUNCSEPARATEPROC) load(userptr, "glStencilFuncSeparate");
+ glad_glStencilMaskSeparate = (PFNGLSTENCILMASKSEPARATEPROC) load(userptr, "glStencilMaskSeparate");
+ glad_glStencilOpSeparate = (PFNGLSTENCILOPSEPARATEPROC) load(userptr, "glStencilOpSeparate");
+ glad_glUniform1f = (PFNGLUNIFORM1FPROC) load(userptr, "glUniform1f");
+ glad_glUniform1fv = (PFNGLUNIFORM1FVPROC) load(userptr, "glUniform1fv");
+ glad_glUniform1i = (PFNGLUNIFORM1IPROC) load(userptr, "glUniform1i");
+ glad_glUniform1iv = (PFNGLUNIFORM1IVPROC) load(userptr, "glUniform1iv");
+ glad_glUniform2f = (PFNGLUNIFORM2FPROC) load(userptr, "glUniform2f");
+ glad_glUniform2fv = (PFNGLUNIFORM2FVPROC) load(userptr, "glUniform2fv");
+ glad_glUniform2i = (PFNGLUNIFORM2IPROC) load(userptr, "glUniform2i");
+ glad_glUniform2iv = (PFNGLUNIFORM2IVPROC) load(userptr, "glUniform2iv");
+ glad_glUniform3f = (PFNGLUNIFORM3FPROC) load(userptr, "glUniform3f");
+ glad_glUniform3fv = (PFNGLUNIFORM3FVPROC) load(userptr, "glUniform3fv");
+ glad_glUniform3i = (PFNGLUNIFORM3IPROC) load(userptr, "glUniform3i");
+ glad_glUniform3iv = (PFNGLUNIFORM3IVPROC) load(userptr, "glUniform3iv");
+ glad_glUniform4f = (PFNGLUNIFORM4FPROC) load(userptr, "glUniform4f");
+ glad_glUniform4fv = (PFNGLUNIFORM4FVPROC) load(userptr, "glUniform4fv");
+ glad_glUniform4i = (PFNGLUNIFORM4IPROC) load(userptr, "glUniform4i");
+ glad_glUniform4iv = (PFNGLUNIFORM4IVPROC) load(userptr, "glUniform4iv");
+ glad_glUniformMatrix2fv = (PFNGLUNIFORMMATRIX2FVPROC) load(userptr, "glUniformMatrix2fv");
+ glad_glUniformMatrix3fv = (PFNGLUNIFORMMATRIX3FVPROC) load(userptr, "glUniformMatrix3fv");
+ glad_glUniformMatrix4fv = (PFNGLUNIFORMMATRIX4FVPROC) load(userptr, "glUniformMatrix4fv");
+ glad_glUseProgram = (PFNGLUSEPROGRAMPROC) load(userptr, "glUseProgram");
+ glad_glValidateProgram = (PFNGLVALIDATEPROGRAMPROC) load(userptr, "glValidateProgram");
+ glad_glVertexAttrib1d = (PFNGLVERTEXATTRIB1DPROC) load(userptr, "glVertexAttrib1d");
+ glad_glVertexAttrib1dv = (PFNGLVERTEXATTRIB1DVPROC) load(userptr, "glVertexAttrib1dv");
+ glad_glVertexAttrib1f = (PFNGLVERTEXATTRIB1FPROC) load(userptr, "glVertexAttrib1f");
+ glad_glVertexAttrib1fv = (PFNGLVERTEXATTRIB1FVPROC) load(userptr, "glVertexAttrib1fv");
+ glad_glVertexAttrib1s = (PFNGLVERTEXATTRIB1SPROC) load(userptr, "glVertexAttrib1s");
+ glad_glVertexAttrib1sv = (PFNGLVERTEXATTRIB1SVPROC) load(userptr, "glVertexAttrib1sv");
+ glad_glVertexAttrib2d = (PFNGLVERTEXATTRIB2DPROC) load(userptr, "glVertexAttrib2d");
+ glad_glVertexAttrib2dv = (PFNGLVERTEXATTRIB2DVPROC) load(userptr, "glVertexAttrib2dv");
+ glad_glVertexAttrib2f = (PFNGLVERTEXATTRIB2FPROC) load(userptr, "glVertexAttrib2f");
+ glad_glVertexAttrib2fv = (PFNGLVERTEXATTRIB2FVPROC) load(userptr, "glVertexAttrib2fv");
+ glad_glVertexAttrib2s = (PFNGLVERTEXATTRIB2SPROC) load(userptr, "glVertexAttrib2s");
+ glad_glVertexAttrib2sv = (PFNGLVERTEXATTRIB2SVPROC) load(userptr, "glVertexAttrib2sv");
+ glad_glVertexAttrib3d = (PFNGLVERTEXATTRIB3DPROC) load(userptr, "glVertexAttrib3d");
+ glad_glVertexAttrib3dv = (PFNGLVERTEXATTRIB3DVPROC) load(userptr, "glVertexAttrib3dv");
+ glad_glVertexAttrib3f = (PFNGLVERTEXATTRIB3FPROC) load(userptr, "glVertexAttrib3f");
+ glad_glVertexAttrib3fv = (PFNGLVERTEXATTRIB3FVPROC) load(userptr, "glVertexAttrib3fv");
+ glad_glVertexAttrib3s = (PFNGLVERTEXATTRIB3SPROC) load(userptr, "glVertexAttrib3s");
+ glad_glVertexAttrib3sv = (PFNGLVERTEXATTRIB3SVPROC) load(userptr, "glVertexAttrib3sv");
+ glad_glVertexAttrib4Nbv = (PFNGLVERTEXATTRIB4NBVPROC) load(userptr, "glVertexAttrib4Nbv");
+ glad_glVertexAttrib4Niv = (PFNGLVERTEXATTRIB4NIVPROC) load(userptr, "glVertexAttrib4Niv");
+ glad_glVertexAttrib4Nsv = (PFNGLVERTEXATTRIB4NSVPROC) load(userptr, "glVertexAttrib4Nsv");
+ glad_glVertexAttrib4Nub = (PFNGLVERTEXATTRIB4NUBPROC) load(userptr, "glVertexAttrib4Nub");
+ glad_glVertexAttrib4Nubv = (PFNGLVERTEXATTRIB4NUBVPROC) load(userptr, "glVertexAttrib4Nubv");
+ glad_glVertexAttrib4Nuiv = (PFNGLVERTEXATTRIB4NUIVPROC) load(userptr, "glVertexAttrib4Nuiv");
+ glad_glVertexAttrib4Nusv = (PFNGLVERTEXATTRIB4NUSVPROC) load(userptr, "glVertexAttrib4Nusv");
+ glad_glVertexAttrib4bv = (PFNGLVERTEXATTRIB4BVPROC) load(userptr, "glVertexAttrib4bv");
+ glad_glVertexAttrib4d = (PFNGLVERTEXATTRIB4DPROC) load(userptr, "glVertexAttrib4d");
+ glad_glVertexAttrib4dv = (PFNGLVERTEXATTRIB4DVPROC) load(userptr, "glVertexAttrib4dv");
+ glad_glVertexAttrib4f = (PFNGLVERTEXATTRIB4FPROC) load(userptr, "glVertexAttrib4f");
+ glad_glVertexAttrib4fv = (PFNGLVERTEXATTRIB4FVPROC) load(userptr, "glVertexAttrib4fv");
+ glad_glVertexAttrib4iv = (PFNGLVERTEXATTRIB4IVPROC) load(userptr, "glVertexAttrib4iv");
+ glad_glVertexAttrib4s = (PFNGLVERTEXATTRIB4SPROC) load(userptr, "glVertexAttrib4s");
+ glad_glVertexAttrib4sv = (PFNGLVERTEXATTRIB4SVPROC) load(userptr, "glVertexAttrib4sv");
+ glad_glVertexAttrib4ubv = (PFNGLVERTEXATTRIB4UBVPROC) load(userptr, "glVertexAttrib4ubv");
+ glad_glVertexAttrib4uiv = (PFNGLVERTEXATTRIB4UIVPROC) load(userptr, "glVertexAttrib4uiv");
+ glad_glVertexAttrib4usv = (PFNGLVERTEXATTRIB4USVPROC) load(userptr, "glVertexAttrib4usv");
+ glad_glVertexAttribPointer = (PFNGLVERTEXATTRIBPOINTERPROC) load(userptr, "glVertexAttribPointer");
+}
+static void glad_gl_load_GL_VERSION_2_1( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GL_VERSION_2_1) return;
+ glad_glUniformMatrix2x3fv = (PFNGLUNIFORMMATRIX2X3FVPROC) load(userptr, "glUniformMatrix2x3fv");
+ glad_glUniformMatrix2x4fv = (PFNGLUNIFORMMATRIX2X4FVPROC) load(userptr, "glUniformMatrix2x4fv");
+ glad_glUniformMatrix3x2fv = (PFNGLUNIFORMMATRIX3X2FVPROC) load(userptr, "glUniformMatrix3x2fv");
+ glad_glUniformMatrix3x4fv = (PFNGLUNIFORMMATRIX3X4FVPROC) load(userptr, "glUniformMatrix3x4fv");
+ glad_glUniformMatrix4x2fv = (PFNGLUNIFORMMATRIX4X2FVPROC) load(userptr, "glUniformMatrix4x2fv");
+ glad_glUniformMatrix4x3fv = (PFNGLUNIFORMMATRIX4X3FVPROC) load(userptr, "glUniformMatrix4x3fv");
+}
+static void glad_gl_load_GL_VERSION_3_0( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GL_VERSION_3_0) return;
+ glad_glBeginConditionalRender = (PFNGLBEGINCONDITIONALRENDERPROC) load(userptr, "glBeginConditionalRender");
+ glad_glBeginTransformFeedback = (PFNGLBEGINTRANSFORMFEEDBACKPROC) load(userptr, "glBeginTransformFeedback");
+ glad_glBindBufferBase = (PFNGLBINDBUFFERBASEPROC) load(userptr, "glBindBufferBase");
+ glad_glBindBufferRange = (PFNGLBINDBUFFERRANGEPROC) load(userptr, "glBindBufferRange");
+ glad_glBindFragDataLocation = (PFNGLBINDFRAGDATALOCATIONPROC) load(userptr, "glBindFragDataLocation");
+ glad_glBindFramebuffer = (PFNGLBINDFRAMEBUFFERPROC) load(userptr, "glBindFramebuffer");
+ glad_glBindRenderbuffer = (PFNGLBINDRENDERBUFFERPROC) load(userptr, "glBindRenderbuffer");
+ glad_glBindVertexArray = (PFNGLBINDVERTEXARRAYPROC) load(userptr, "glBindVertexArray");
+ glad_glBlitFramebuffer = (PFNGLBLITFRAMEBUFFERPROC) load(userptr, "glBlitFramebuffer");
+ glad_glCheckFramebufferStatus = (PFNGLCHECKFRAMEBUFFERSTATUSPROC) load(userptr, "glCheckFramebufferStatus");
+ glad_glClampColor = (PFNGLCLAMPCOLORPROC) load(userptr, "glClampColor");
+ glad_glClearBufferfi = (PFNGLCLEARBUFFERFIPROC) load(userptr, "glClearBufferfi");
+ glad_glClearBufferfv = (PFNGLCLEARBUFFERFVPROC) load(userptr, "glClearBufferfv");
+ glad_glClearBufferiv = (PFNGLCLEARBUFFERIVPROC) load(userptr, "glClearBufferiv");
+ glad_glClearBufferuiv = (PFNGLCLEARBUFFERUIVPROC) load(userptr, "glClearBufferuiv");
+ glad_glColorMaski = (PFNGLCOLORMASKIPROC) load(userptr, "glColorMaski");
+ glad_glDeleteFramebuffers = (PFNGLDELETEFRAMEBUFFERSPROC) load(userptr, "glDeleteFramebuffers");
+ glad_glDeleteRenderbuffers = (PFNGLDELETERENDERBUFFERSPROC) load(userptr, "glDeleteRenderbuffers");
+ glad_glDeleteVertexArrays = (PFNGLDELETEVERTEXARRAYSPROC) load(userptr, "glDeleteVertexArrays");
+ glad_glDisablei = (PFNGLDISABLEIPROC) load(userptr, "glDisablei");
+ glad_glEnablei = (PFNGLENABLEIPROC) load(userptr, "glEnablei");
+ glad_glEndConditionalRender = (PFNGLENDCONDITIONALRENDERPROC) load(userptr, "glEndConditionalRender");
+ glad_glEndTransformFeedback = (PFNGLENDTRANSFORMFEEDBACKPROC) load(userptr, "glEndTransformFeedback");
+ glad_glFlushMappedBufferRange = (PFNGLFLUSHMAPPEDBUFFERRANGEPROC) load(userptr, "glFlushMappedBufferRange");
+ glad_glFramebufferRenderbuffer = (PFNGLFRAMEBUFFERRENDERBUFFERPROC) load(userptr, "glFramebufferRenderbuffer");
+ glad_glFramebufferTexture1D = (PFNGLFRAMEBUFFERTEXTURE1DPROC) load(userptr, "glFramebufferTexture1D");
+ glad_glFramebufferTexture2D = (PFNGLFRAMEBUFFERTEXTURE2DPROC) load(userptr, "glFramebufferTexture2D");
+ glad_glFramebufferTexture3D = (PFNGLFRAMEBUFFERTEXTURE3DPROC) load(userptr, "glFramebufferTexture3D");
+ glad_glFramebufferTextureLayer = (PFNGLFRAMEBUFFERTEXTURELAYERPROC) load(userptr, "glFramebufferTextureLayer");
+ glad_glGenFramebuffers = (PFNGLGENFRAMEBUFFERSPROC) load(userptr, "glGenFramebuffers");
+ glad_glGenRenderbuffers = (PFNGLGENRENDERBUFFERSPROC) load(userptr, "glGenRenderbuffers");
+ glad_glGenVertexArrays = (PFNGLGENVERTEXARRAYSPROC) load(userptr, "glGenVertexArrays");
+ glad_glGenerateMipmap = (PFNGLGENERATEMIPMAPPROC) load(userptr, "glGenerateMipmap");
+ glad_glGetBooleani_v = (PFNGLGETBOOLEANI_VPROC) load(userptr, "glGetBooleani_v");
+ glad_glGetFragDataLocation = (PFNGLGETFRAGDATALOCATIONPROC) load(userptr, "glGetFragDataLocation");
+ glad_glGetFramebufferAttachmentParameteriv = (PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVPROC) load(userptr, "glGetFramebufferAttachmentParameteriv");
+ glad_glGetIntegeri_v = (PFNGLGETINTEGERI_VPROC) load(userptr, "glGetIntegeri_v");
+ glad_glGetRenderbufferParameteriv = (PFNGLGETRENDERBUFFERPARAMETERIVPROC) load(userptr, "glGetRenderbufferParameteriv");
+ glad_glGetStringi = (PFNGLGETSTRINGIPROC) load(userptr, "glGetStringi");
+ glad_glGetTexParameterIiv = (PFNGLGETTEXPARAMETERIIVPROC) load(userptr, "glGetTexParameterIiv");
+ glad_glGetTexParameterIuiv = (PFNGLGETTEXPARAMETERIUIVPROC) load(userptr, "glGetTexParameterIuiv");
+ glad_glGetTransformFeedbackVarying = (PFNGLGETTRANSFORMFEEDBACKVARYINGPROC) load(userptr, "glGetTransformFeedbackVarying");
+ glad_glGetUniformuiv = (PFNGLGETUNIFORMUIVPROC) load(userptr, "glGetUniformuiv");
+ glad_glGetVertexAttribIiv = (PFNGLGETVERTEXATTRIBIIVPROC) load(userptr, "glGetVertexAttribIiv");
+ glad_glGetVertexAttribIuiv = (PFNGLGETVERTEXATTRIBIUIVPROC) load(userptr, "glGetVertexAttribIuiv");
+ glad_glIsEnabledi = (PFNGLISENABLEDIPROC) load(userptr, "glIsEnabledi");
+ glad_glIsFramebuffer = (PFNGLISFRAMEBUFFERPROC) load(userptr, "glIsFramebuffer");
+ glad_glIsRenderbuffer = (PFNGLISRENDERBUFFERPROC) load(userptr, "glIsRenderbuffer");
+ glad_glIsVertexArray = (PFNGLISVERTEXARRAYPROC) load(userptr, "glIsVertexArray");
+ glad_glMapBufferRange = (PFNGLMAPBUFFERRANGEPROC) load(userptr, "glMapBufferRange");
+ glad_glRenderbufferStorage = (PFNGLRENDERBUFFERSTORAGEPROC) load(userptr, "glRenderbufferStorage");
+ glad_glRenderbufferStorageMultisample = (PFNGLRENDERBUFFERSTORAGEMULTISAMPLEPROC) load(userptr, "glRenderbufferStorageMultisample");
+ glad_glTexParameterIiv = (PFNGLTEXPARAMETERIIVPROC) load(userptr, "glTexParameterIiv");
+ glad_glTexParameterIuiv = (PFNGLTEXPARAMETERIUIVPROC) load(userptr, "glTexParameterIuiv");
+ glad_glTransformFeedbackVaryings = (PFNGLTRANSFORMFEEDBACKVARYINGSPROC) load(userptr, "glTransformFeedbackVaryings");
+ glad_glUniform1ui = (PFNGLUNIFORM1UIPROC) load(userptr, "glUniform1ui");
+ glad_glUniform1uiv = (PFNGLUNIFORM1UIVPROC) load(userptr, "glUniform1uiv");
+ glad_glUniform2ui = (PFNGLUNIFORM2UIPROC) load(userptr, "glUniform2ui");
+ glad_glUniform2uiv = (PFNGLUNIFORM2UIVPROC) load(userptr, "glUniform2uiv");
+ glad_glUniform3ui = (PFNGLUNIFORM3UIPROC) load(userptr, "glUniform3ui");
+ glad_glUniform3uiv = (PFNGLUNIFORM3UIVPROC) load(userptr, "glUniform3uiv");
+ glad_glUniform4ui = (PFNGLUNIFORM4UIPROC) load(userptr, "glUniform4ui");
+ glad_glUniform4uiv = (PFNGLUNIFORM4UIVPROC) load(userptr, "glUniform4uiv");
+ glad_glVertexAttribI1i = (PFNGLVERTEXATTRIBI1IPROC) load(userptr, "glVertexAttribI1i");
+ glad_glVertexAttribI1iv = (PFNGLVERTEXATTRIBI1IVPROC) load(userptr, "glVertexAttribI1iv");
+ glad_glVertexAttribI1ui = (PFNGLVERTEXATTRIBI1UIPROC) load(userptr, "glVertexAttribI1ui");
+ glad_glVertexAttribI1uiv = (PFNGLVERTEXATTRIBI1UIVPROC) load(userptr, "glVertexAttribI1uiv");
+ glad_glVertexAttribI2i = (PFNGLVERTEXATTRIBI2IPROC) load(userptr, "glVertexAttribI2i");
+ glad_glVertexAttribI2iv = (PFNGLVERTEXATTRIBI2IVPROC) load(userptr, "glVertexAttribI2iv");
+ glad_glVertexAttribI2ui = (PFNGLVERTEXATTRIBI2UIPROC) load(userptr, "glVertexAttribI2ui");
+ glad_glVertexAttribI2uiv = (PFNGLVERTEXATTRIBI2UIVPROC) load(userptr, "glVertexAttribI2uiv");
+ glad_glVertexAttribI3i = (PFNGLVERTEXATTRIBI3IPROC) load(userptr, "glVertexAttribI3i");
+ glad_glVertexAttribI3iv = (PFNGLVERTEXATTRIBI3IVPROC) load(userptr, "glVertexAttribI3iv");
+ glad_glVertexAttribI3ui = (PFNGLVERTEXATTRIBI3UIPROC) load(userptr, "glVertexAttribI3ui");
+ glad_glVertexAttribI3uiv = (PFNGLVERTEXATTRIBI3UIVPROC) load(userptr, "glVertexAttribI3uiv");
+ glad_glVertexAttribI4bv = (PFNGLVERTEXATTRIBI4BVPROC) load(userptr, "glVertexAttribI4bv");
+ glad_glVertexAttribI4i = (PFNGLVERTEXATTRIBI4IPROC) load(userptr, "glVertexAttribI4i");
+ glad_glVertexAttribI4iv = (PFNGLVERTEXATTRIBI4IVPROC) load(userptr, "glVertexAttribI4iv");
+ glad_glVertexAttribI4sv = (PFNGLVERTEXATTRIBI4SVPROC) load(userptr, "glVertexAttribI4sv");
+ glad_glVertexAttribI4ubv = (PFNGLVERTEXATTRIBI4UBVPROC) load(userptr, "glVertexAttribI4ubv");
+ glad_glVertexAttribI4ui = (PFNGLVERTEXATTRIBI4UIPROC) load(userptr, "glVertexAttribI4ui");
+ glad_glVertexAttribI4uiv = (PFNGLVERTEXATTRIBI4UIVPROC) load(userptr, "glVertexAttribI4uiv");
+ glad_glVertexAttribI4usv = (PFNGLVERTEXATTRIBI4USVPROC) load(userptr, "glVertexAttribI4usv");
+ glad_glVertexAttribIPointer = (PFNGLVERTEXATTRIBIPOINTERPROC) load(userptr, "glVertexAttribIPointer");
+}
+static void glad_gl_load_GL_VERSION_3_1( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GL_VERSION_3_1) return;
+ glad_glBindBufferBase = (PFNGLBINDBUFFERBASEPROC) load(userptr, "glBindBufferBase");
+ glad_glBindBufferRange = (PFNGLBINDBUFFERRANGEPROC) load(userptr, "glBindBufferRange");
+ glad_glCopyBufferSubData = (PFNGLCOPYBUFFERSUBDATAPROC) load(userptr, "glCopyBufferSubData");
+ glad_glDrawArraysInstanced = (PFNGLDRAWARRAYSINSTANCEDPROC) load(userptr, "glDrawArraysInstanced");
+ glad_glDrawElementsInstanced = (PFNGLDRAWELEMENTSINSTANCEDPROC) load(userptr, "glDrawElementsInstanced");
+ glad_glGetActiveUniformBlockName = (PFNGLGETACTIVEUNIFORMBLOCKNAMEPROC) load(userptr, "glGetActiveUniformBlockName");
+ glad_glGetActiveUniformBlockiv = (PFNGLGETACTIVEUNIFORMBLOCKIVPROC) load(userptr, "glGetActiveUniformBlockiv");
+ glad_glGetActiveUniformName = (PFNGLGETACTIVEUNIFORMNAMEPROC) load(userptr, "glGetActiveUniformName");
+ glad_glGetActiveUniformsiv = (PFNGLGETACTIVEUNIFORMSIVPROC) load(userptr, "glGetActiveUniformsiv");
+ glad_glGetIntegeri_v = (PFNGLGETINTEGERI_VPROC) load(userptr, "glGetIntegeri_v");
+ glad_glGetUniformBlockIndex = (PFNGLGETUNIFORMBLOCKINDEXPROC) load(userptr, "glGetUniformBlockIndex");
+ glad_glGetUniformIndices = (PFNGLGETUNIFORMINDICESPROC) load(userptr, "glGetUniformIndices");
+ glad_glPrimitiveRestartIndex = (PFNGLPRIMITIVERESTARTINDEXPROC) load(userptr, "glPrimitiveRestartIndex");
+ glad_glTexBuffer = (PFNGLTEXBUFFERPROC) load(userptr, "glTexBuffer");
+ glad_glUniformBlockBinding = (PFNGLUNIFORMBLOCKBINDINGPROC) load(userptr, "glUniformBlockBinding");
+}
+static void glad_gl_load_GL_VERSION_3_2( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GL_VERSION_3_2) return;
+ glad_glClientWaitSync = (PFNGLCLIENTWAITSYNCPROC) load(userptr, "glClientWaitSync");
+ glad_glDeleteSync = (PFNGLDELETESYNCPROC) load(userptr, "glDeleteSync");
+ glad_glDrawElementsBaseVertex = (PFNGLDRAWELEMENTSBASEVERTEXPROC) load(userptr, "glDrawElementsBaseVertex");
+ glad_glDrawElementsInstancedBaseVertex = (PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXPROC) load(userptr, "glDrawElementsInstancedBaseVertex");
+ glad_glDrawRangeElementsBaseVertex = (PFNGLDRAWRANGEELEMENTSBASEVERTEXPROC) load(userptr, "glDrawRangeElementsBaseVertex");
+ glad_glFenceSync = (PFNGLFENCESYNCPROC) load(userptr, "glFenceSync");
+ glad_glFramebufferTexture = (PFNGLFRAMEBUFFERTEXTUREPROC) load(userptr, "glFramebufferTexture");
+ glad_glGetBufferParameteri64v = (PFNGLGETBUFFERPARAMETERI64VPROC) load(userptr, "glGetBufferParameteri64v");
+ glad_glGetInteger64i_v = (PFNGLGETINTEGER64I_VPROC) load(userptr, "glGetInteger64i_v");
+ glad_glGetInteger64v = (PFNGLGETINTEGER64VPROC) load(userptr, "glGetInteger64v");
+ glad_glGetMultisamplefv = (PFNGLGETMULTISAMPLEFVPROC) load(userptr, "glGetMultisamplefv");
+ glad_glGetSynciv = (PFNGLGETSYNCIVPROC) load(userptr, "glGetSynciv");
+ glad_glIsSync = (PFNGLISSYNCPROC) load(userptr, "glIsSync");
+ glad_glMultiDrawElementsBaseVertex = (PFNGLMULTIDRAWELEMENTSBASEVERTEXPROC) load(userptr, "glMultiDrawElementsBaseVertex");
+ glad_glProvokingVertex = (PFNGLPROVOKINGVERTEXPROC) load(userptr, "glProvokingVertex");
+ glad_glSampleMaski = (PFNGLSAMPLEMASKIPROC) load(userptr, "glSampleMaski");
+ glad_glTexImage2DMultisample = (PFNGLTEXIMAGE2DMULTISAMPLEPROC) load(userptr, "glTexImage2DMultisample");
+ glad_glTexImage3DMultisample = (PFNGLTEXIMAGE3DMULTISAMPLEPROC) load(userptr, "glTexImage3DMultisample");
+ glad_glWaitSync = (PFNGLWAITSYNCPROC) load(userptr, "glWaitSync");
+}
+static void glad_gl_load_GL_VERSION_3_3( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GL_VERSION_3_3) return;
+ glad_glBindFragDataLocationIndexed = (PFNGLBINDFRAGDATALOCATIONINDEXEDPROC) load(userptr, "glBindFragDataLocationIndexed");
+ glad_glBindSampler = (PFNGLBINDSAMPLERPROC) load(userptr, "glBindSampler");
+ glad_glColorP3ui = (PFNGLCOLORP3UIPROC) load(userptr, "glColorP3ui");
+ glad_glColorP3uiv = (PFNGLCOLORP3UIVPROC) load(userptr, "glColorP3uiv");
+ glad_glColorP4ui = (PFNGLCOLORP4UIPROC) load(userptr, "glColorP4ui");
+ glad_glColorP4uiv = (PFNGLCOLORP4UIVPROC) load(userptr, "glColorP4uiv");
+ glad_glDeleteSamplers = (PFNGLDELETESAMPLERSPROC) load(userptr, "glDeleteSamplers");
+ glad_glGenSamplers = (PFNGLGENSAMPLERSPROC) load(userptr, "glGenSamplers");
+ glad_glGetFragDataIndex = (PFNGLGETFRAGDATAINDEXPROC) load(userptr, "glGetFragDataIndex");
+ glad_glGetQueryObjecti64v = (PFNGLGETQUERYOBJECTI64VPROC) load(userptr, "glGetQueryObjecti64v");
+ glad_glGetQueryObjectui64v = (PFNGLGETQUERYOBJECTUI64VPROC) load(userptr, "glGetQueryObjectui64v");
+ glad_glGetSamplerParameterIiv = (PFNGLGETSAMPLERPARAMETERIIVPROC) load(userptr, "glGetSamplerParameterIiv");
+ glad_glGetSamplerParameterIuiv = (PFNGLGETSAMPLERPARAMETERIUIVPROC) load(userptr, "glGetSamplerParameterIuiv");
+ glad_glGetSamplerParameterfv = (PFNGLGETSAMPLERPARAMETERFVPROC) load(userptr, "glGetSamplerParameterfv");
+ glad_glGetSamplerParameteriv = (PFNGLGETSAMPLERPARAMETERIVPROC) load(userptr, "glGetSamplerParameteriv");
+ glad_glIsSampler = (PFNGLISSAMPLERPROC) load(userptr, "glIsSampler");
+ glad_glMultiTexCoordP1ui = (PFNGLMULTITEXCOORDP1UIPROC) load(userptr, "glMultiTexCoordP1ui");
+ glad_glMultiTexCoordP1uiv = (PFNGLMULTITEXCOORDP1UIVPROC) load(userptr, "glMultiTexCoordP1uiv");
+ glad_glMultiTexCoordP2ui = (PFNGLMULTITEXCOORDP2UIPROC) load(userptr, "glMultiTexCoordP2ui");
+ glad_glMultiTexCoordP2uiv = (PFNGLMULTITEXCOORDP2UIVPROC) load(userptr, "glMultiTexCoordP2uiv");
+ glad_glMultiTexCoordP3ui = (PFNGLMULTITEXCOORDP3UIPROC) load(userptr, "glMultiTexCoordP3ui");
+ glad_glMultiTexCoordP3uiv = (PFNGLMULTITEXCOORDP3UIVPROC) load(userptr, "glMultiTexCoordP3uiv");
+ glad_glMultiTexCoordP4ui = (PFNGLMULTITEXCOORDP4UIPROC) load(userptr, "glMultiTexCoordP4ui");
+ glad_glMultiTexCoordP4uiv = (PFNGLMULTITEXCOORDP4UIVPROC) load(userptr, "glMultiTexCoordP4uiv");
+ glad_glNormalP3ui = (PFNGLNORMALP3UIPROC) load(userptr, "glNormalP3ui");
+ glad_glNormalP3uiv = (PFNGLNORMALP3UIVPROC) load(userptr, "glNormalP3uiv");
+ glad_glQueryCounter = (PFNGLQUERYCOUNTERPROC) load(userptr, "glQueryCounter");
+ glad_glSamplerParameterIiv = (PFNGLSAMPLERPARAMETERIIVPROC) load(userptr, "glSamplerParameterIiv");
+ glad_glSamplerParameterIuiv = (PFNGLSAMPLERPARAMETERIUIVPROC) load(userptr, "glSamplerParameterIuiv");
+ glad_glSamplerParameterf = (PFNGLSAMPLERPARAMETERFPROC) load(userptr, "glSamplerParameterf");
+ glad_glSamplerParameterfv = (PFNGLSAMPLERPARAMETERFVPROC) load(userptr, "glSamplerParameterfv");
+ glad_glSamplerParameteri = (PFNGLSAMPLERPARAMETERIPROC) load(userptr, "glSamplerParameteri");
+ glad_glSamplerParameteriv = (PFNGLSAMPLERPARAMETERIVPROC) load(userptr, "glSamplerParameteriv");
+ glad_glSecondaryColorP3ui = (PFNGLSECONDARYCOLORP3UIPROC) load(userptr, "glSecondaryColorP3ui");
+ glad_glSecondaryColorP3uiv = (PFNGLSECONDARYCOLORP3UIVPROC) load(userptr, "glSecondaryColorP3uiv");
+ glad_glTexCoordP1ui = (PFNGLTEXCOORDP1UIPROC) load(userptr, "glTexCoordP1ui");
+ glad_glTexCoordP1uiv = (PFNGLTEXCOORDP1UIVPROC) load(userptr, "glTexCoordP1uiv");
+ glad_glTexCoordP2ui = (PFNGLTEXCOORDP2UIPROC) load(userptr, "glTexCoordP2ui");
+ glad_glTexCoordP2uiv = (PFNGLTEXCOORDP2UIVPROC) load(userptr, "glTexCoordP2uiv");
+ glad_glTexCoordP3ui = (PFNGLTEXCOORDP3UIPROC) load(userptr, "glTexCoordP3ui");
+ glad_glTexCoordP3uiv = (PFNGLTEXCOORDP3UIVPROC) load(userptr, "glTexCoordP3uiv");
+ glad_glTexCoordP4ui = (PFNGLTEXCOORDP4UIPROC) load(userptr, "glTexCoordP4ui");
+ glad_glTexCoordP4uiv = (PFNGLTEXCOORDP4UIVPROC) load(userptr, "glTexCoordP4uiv");
+ glad_glVertexAttribDivisor = (PFNGLVERTEXATTRIBDIVISORPROC) load(userptr, "glVertexAttribDivisor");
+ glad_glVertexAttribP1ui = (PFNGLVERTEXATTRIBP1UIPROC) load(userptr, "glVertexAttribP1ui");
+ glad_glVertexAttribP1uiv = (PFNGLVERTEXATTRIBP1UIVPROC) load(userptr, "glVertexAttribP1uiv");
+ glad_glVertexAttribP2ui = (PFNGLVERTEXATTRIBP2UIPROC) load(userptr, "glVertexAttribP2ui");
+ glad_glVertexAttribP2uiv = (PFNGLVERTEXATTRIBP2UIVPROC) load(userptr, "glVertexAttribP2uiv");
+ glad_glVertexAttribP3ui = (PFNGLVERTEXATTRIBP3UIPROC) load(userptr, "glVertexAttribP3ui");
+ glad_glVertexAttribP3uiv = (PFNGLVERTEXATTRIBP3UIVPROC) load(userptr, "glVertexAttribP3uiv");
+ glad_glVertexAttribP4ui = (PFNGLVERTEXATTRIBP4UIPROC) load(userptr, "glVertexAttribP4ui");
+ glad_glVertexAttribP4uiv = (PFNGLVERTEXATTRIBP4UIVPROC) load(userptr, "glVertexAttribP4uiv");
+ glad_glVertexP2ui = (PFNGLVERTEXP2UIPROC) load(userptr, "glVertexP2ui");
+ glad_glVertexP2uiv = (PFNGLVERTEXP2UIVPROC) load(userptr, "glVertexP2uiv");
+ glad_glVertexP3ui = (PFNGLVERTEXP3UIPROC) load(userptr, "glVertexP3ui");
+ glad_glVertexP3uiv = (PFNGLVERTEXP3UIVPROC) load(userptr, "glVertexP3uiv");
+ glad_glVertexP4ui = (PFNGLVERTEXP4UIPROC) load(userptr, "glVertexP4ui");
+ glad_glVertexP4uiv = (PFNGLVERTEXP4UIVPROC) load(userptr, "glVertexP4uiv");
+}
+static void glad_gl_load_GL_ARB_debug_output( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GL_ARB_debug_output) return;
+ glad_glDebugMessageCallbackARB = (PFNGLDEBUGMESSAGECALLBACKARBPROC) load(userptr, "glDebugMessageCallbackARB");
+ glad_glDebugMessageControlARB = (PFNGLDEBUGMESSAGECONTROLARBPROC) load(userptr, "glDebugMessageControlARB");
+ glad_glDebugMessageInsertARB = (PFNGLDEBUGMESSAGEINSERTARBPROC) load(userptr, "glDebugMessageInsertARB");
+ glad_glGetDebugMessageLogARB = (PFNGLGETDEBUGMESSAGELOGARBPROC) load(userptr, "glGetDebugMessageLogARB");
+}
+static void glad_gl_load_GL_ARB_framebuffer_object( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GL_ARB_framebuffer_object) return;
+ glad_glBindFramebuffer = (PFNGLBINDFRAMEBUFFERPROC) load(userptr, "glBindFramebuffer");
+ glad_glBindRenderbuffer = (PFNGLBINDRENDERBUFFERPROC) load(userptr, "glBindRenderbuffer");
+ glad_glBlitFramebuffer = (PFNGLBLITFRAMEBUFFERPROC) load(userptr, "glBlitFramebuffer");
+ glad_glCheckFramebufferStatus = (PFNGLCHECKFRAMEBUFFERSTATUSPROC) load(userptr, "glCheckFramebufferStatus");
+ glad_glDeleteFramebuffers = (PFNGLDELETEFRAMEBUFFERSPROC) load(userptr, "glDeleteFramebuffers");
+ glad_glDeleteRenderbuffers = (PFNGLDELETERENDERBUFFERSPROC) load(userptr, "glDeleteRenderbuffers");
+ glad_glFramebufferRenderbuffer = (PFNGLFRAMEBUFFERRENDERBUFFERPROC) load(userptr, "glFramebufferRenderbuffer");
+ glad_glFramebufferTexture1D = (PFNGLFRAMEBUFFERTEXTURE1DPROC) load(userptr, "glFramebufferTexture1D");
+ glad_glFramebufferTexture2D = (PFNGLFRAMEBUFFERTEXTURE2DPROC) load(userptr, "glFramebufferTexture2D");
+ glad_glFramebufferTexture3D = (PFNGLFRAMEBUFFERTEXTURE3DPROC) load(userptr, "glFramebufferTexture3D");
+ glad_glFramebufferTextureLayer = (PFNGLFRAMEBUFFERTEXTURELAYERPROC) load(userptr, "glFramebufferTextureLayer");
+ glad_glGenFramebuffers = (PFNGLGENFRAMEBUFFERSPROC) load(userptr, "glGenFramebuffers");
+ glad_glGenRenderbuffers = (PFNGLGENRENDERBUFFERSPROC) load(userptr, "glGenRenderbuffers");
+ glad_glGenerateMipmap = (PFNGLGENERATEMIPMAPPROC) load(userptr, "glGenerateMipmap");
+ glad_glGetFramebufferAttachmentParameteriv = (PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVPROC) load(userptr, "glGetFramebufferAttachmentParameteriv");
+ glad_glGetRenderbufferParameteriv = (PFNGLGETRENDERBUFFERPARAMETERIVPROC) load(userptr, "glGetRenderbufferParameteriv");
+ glad_glIsFramebuffer = (PFNGLISFRAMEBUFFERPROC) load(userptr, "glIsFramebuffer");
+ glad_glIsRenderbuffer = (PFNGLISRENDERBUFFERPROC) load(userptr, "glIsRenderbuffer");
+ glad_glRenderbufferStorage = (PFNGLRENDERBUFFERSTORAGEPROC) load(userptr, "glRenderbufferStorage");
+ glad_glRenderbufferStorageMultisample = (PFNGLRENDERBUFFERSTORAGEMULTISAMPLEPROC) load(userptr, "glRenderbufferStorageMultisample");
+}
+static void glad_gl_load_GL_EXT_framebuffer_blit( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GL_EXT_framebuffer_blit) return;
+ glad_glBlitFramebufferEXT = (PFNGLBLITFRAMEBUFFEREXTPROC) load(userptr, "glBlitFramebufferEXT");
+}
+static void glad_gl_load_GL_EXT_framebuffer_multisample( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GL_EXT_framebuffer_multisample) return;
+ glad_glRenderbufferStorageMultisampleEXT = (PFNGLRENDERBUFFERSTORAGEMULTISAMPLEEXTPROC) load(userptr, "glRenderbufferStorageMultisampleEXT");
+}
+static void glad_gl_load_GL_EXT_framebuffer_object( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GL_EXT_framebuffer_object) return;
+ glad_glBindFramebufferEXT = (PFNGLBINDFRAMEBUFFEREXTPROC) load(userptr, "glBindFramebufferEXT");
+ glad_glBindRenderbufferEXT = (PFNGLBINDRENDERBUFFEREXTPROC) load(userptr, "glBindRenderbufferEXT");
+ glad_glCheckFramebufferStatusEXT = (PFNGLCHECKFRAMEBUFFERSTATUSEXTPROC) load(userptr, "glCheckFramebufferStatusEXT");
+ glad_glDeleteFramebuffersEXT = (PFNGLDELETEFRAMEBUFFERSEXTPROC) load(userptr, "glDeleteFramebuffersEXT");
+ glad_glDeleteRenderbuffersEXT = (PFNGLDELETERENDERBUFFERSEXTPROC) load(userptr, "glDeleteRenderbuffersEXT");
+ glad_glFramebufferRenderbufferEXT = (PFNGLFRAMEBUFFERRENDERBUFFEREXTPROC) load(userptr, "glFramebufferRenderbufferEXT");
+ glad_glFramebufferTexture1DEXT = (PFNGLFRAMEBUFFERTEXTURE1DEXTPROC) load(userptr, "glFramebufferTexture1DEXT");
+ glad_glFramebufferTexture2DEXT = (PFNGLFRAMEBUFFERTEXTURE2DEXTPROC) load(userptr, "glFramebufferTexture2DEXT");
+ glad_glFramebufferTexture3DEXT = (PFNGLFRAMEBUFFERTEXTURE3DEXTPROC) load(userptr, "glFramebufferTexture3DEXT");
+ glad_glGenFramebuffersEXT = (PFNGLGENFRAMEBUFFERSEXTPROC) load(userptr, "glGenFramebuffersEXT");
+ glad_glGenRenderbuffersEXT = (PFNGLGENRENDERBUFFERSEXTPROC) load(userptr, "glGenRenderbuffersEXT");
+ glad_glGenerateMipmapEXT = (PFNGLGENERATEMIPMAPEXTPROC) load(userptr, "glGenerateMipmapEXT");
+ glad_glGetFramebufferAttachmentParameterivEXT = (PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVEXTPROC) load(userptr, "glGetFramebufferAttachmentParameterivEXT");
+ glad_glGetRenderbufferParameterivEXT = (PFNGLGETRENDERBUFFERPARAMETERIVEXTPROC) load(userptr, "glGetRenderbufferParameterivEXT");
+ glad_glIsFramebufferEXT = (PFNGLISFRAMEBUFFEREXTPROC) load(userptr, "glIsFramebufferEXT");
+ glad_glIsRenderbufferEXT = (PFNGLISRENDERBUFFEREXTPROC) load(userptr, "glIsRenderbufferEXT");
+ glad_glRenderbufferStorageEXT = (PFNGLRENDERBUFFERSTORAGEEXTPROC) load(userptr, "glRenderbufferStorageEXT");
+}
+static void glad_gl_load_GL_OVR_multiview( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GL_OVR_multiview) return;
+ glad_glFramebufferTextureMultiviewOVR = (PFNGLFRAMEBUFFERTEXTUREMULTIVIEWOVRPROC) load(userptr, "glFramebufferTextureMultiviewOVR");
+}
+
+
+
+#if defined(GL_ES_VERSION_3_0) || defined(GL_VERSION_3_0)
+#define GLAD_GL_IS_SOME_NEW_VERSION 1
+#else
+#define GLAD_GL_IS_SOME_NEW_VERSION 0
+#endif
+
+static int glad_gl_get_extensions( int version, const char **out_exts, unsigned int *out_num_exts_i, char ***out_exts_i) {
+#if GLAD_GL_IS_SOME_NEW_VERSION
+ if(GLAD_VERSION_MAJOR(version) < 3) {
+#else
+ GLAD_UNUSED(version);
+ GLAD_UNUSED(out_num_exts_i);
+ GLAD_UNUSED(out_exts_i);
+#endif
+ if (glad_glGetString == NULL) {
+ return 0;
+ }
+ *out_exts = (const char *)glad_glGetString(GL_EXTENSIONS);
+#if GLAD_GL_IS_SOME_NEW_VERSION
+ } else {
+ unsigned int index = 0;
+ unsigned int num_exts_i = 0;
+ char **exts_i = NULL;
+ if (glad_glGetStringi == NULL || glad_glGetIntegerv == NULL) {
+ return 0;
+ }
+ glad_glGetIntegerv(GL_NUM_EXTENSIONS, (int*) &num_exts_i);
+ if (num_exts_i > 0) {
+ exts_i = (char **) malloc(num_exts_i * (sizeof *exts_i));
+ }
+ if (exts_i == NULL) {
+ return 0;
+ }
+ for(index = 0; index < num_exts_i; index++) {
+ const char *gl_str_tmp = (const char*) glad_glGetStringi(GL_EXTENSIONS, index);
+ size_t len = strlen(gl_str_tmp) + 1;
+
+ char *local_str = (char*) malloc(len * sizeof(char));
+ if(local_str != NULL) {
+ memcpy(local_str, gl_str_tmp, len * sizeof(char));
+ }
+
+ exts_i[index] = local_str;
+ }
+
+ *out_num_exts_i = num_exts_i;
+ *out_exts_i = exts_i;
+ }
+#endif
+ return 1;
+}
+static void glad_gl_free_extensions(char **exts_i, unsigned int num_exts_i) {
+ if (exts_i != NULL) {
+ unsigned int index;
+ for(index = 0; index < num_exts_i; index++) {
+ free((void *) (exts_i[index]));
+ }
+ free((void *)exts_i);
+ exts_i = NULL;
+ }
+}
+static int glad_gl_has_extension(int version, const char *exts, unsigned int num_exts_i, char **exts_i, const char *ext) {
+ if(GLAD_VERSION_MAJOR(version) < 3 || !GLAD_GL_IS_SOME_NEW_VERSION) {
+ const char *extensions;
+ const char *loc;
+ const char *terminator;
+ extensions = exts;
+ if(extensions == NULL || ext == NULL) {
+ return 0;
+ }
+ while(1) {
+ loc = strstr(extensions, ext);
+ if(loc == NULL) {
+ return 0;
+ }
+ terminator = loc + strlen(ext);
+ if((loc == extensions || *(loc - 1) == ' ') &&
+ (*terminator == ' ' || *terminator == '\0')) {
+ return 1;
+ }
+ extensions = terminator;
+ }
+ } else {
+ unsigned int index;
+ for(index = 0; index < num_exts_i; index++) {
+ const char *e = exts_i[index];
+ if(strcmp(e, ext) == 0) {
+ return 1;
+ }
+ }
+ }
+ return 0;
+}
+
+static GLADapiproc glad_gl_get_proc_from_userptr(void *userptr, const char* name) {
+ return (GLAD_GNUC_EXTENSION (GLADapiproc (*)(const char *name)) userptr)(name);
+}
+
+static int glad_gl_find_extensions_gl( int version) {
+ const char *exts = NULL;
+ unsigned int num_exts_i = 0;
+ char **exts_i = NULL;
+ if (!glad_gl_get_extensions(version, &exts, &num_exts_i, &exts_i)) return 0;
+
+ GLAD_GL_ARB_debug_output = glad_gl_has_extension(version, exts, num_exts_i, exts_i, "GL_ARB_debug_output");
+ GLAD_GL_ARB_framebuffer_object = glad_gl_has_extension(version, exts, num_exts_i, exts_i, "GL_ARB_framebuffer_object");
+ GLAD_GL_EXT_framebuffer_blit = glad_gl_has_extension(version, exts, num_exts_i, exts_i, "GL_EXT_framebuffer_blit");
+ GLAD_GL_EXT_framebuffer_multisample = glad_gl_has_extension(version, exts, num_exts_i, exts_i, "GL_EXT_framebuffer_multisample");
+ GLAD_GL_EXT_framebuffer_object = glad_gl_has_extension(version, exts, num_exts_i, exts_i, "GL_EXT_framebuffer_object");
+ GLAD_GL_OVR_multiview = glad_gl_has_extension(version, exts, num_exts_i, exts_i, "GL_OVR_multiview");
+ GLAD_GL_OVR_multiview2 = glad_gl_has_extension(version, exts, num_exts_i, exts_i, "GL_OVR_multiview2");
+
+ glad_gl_free_extensions(exts_i, num_exts_i);
+
+ return 1;
+}
+
+static int glad_gl_find_core_gl(void) {
+ int i;
+ const char* version;
+ const char* prefixes[] = {
+ "OpenGL ES-CM ",
+ "OpenGL ES-CL ",
+ "OpenGL ES ",
+ "OpenGL SC ",
+ NULL
+ };
+ int major = 0;
+ int minor = 0;
+ version = (const char*) glad_glGetString(GL_VERSION);
+ if (!version) return 0;
+ for (i = 0; prefixes[i]; i++) {
+ const size_t length = strlen(prefixes[i]);
+ if (strncmp(version, prefixes[i], length) == 0) {
+ version += length;
+ break;
+ }
+ }
+
+ GLAD_IMPL_UTIL_SSCANF(version, "%d.%d", &major, &minor);
+
+ GLAD_GL_VERSION_1_0 = (major == 1 && minor >= 0) || major > 1;
+ GLAD_GL_VERSION_1_1 = (major == 1 && minor >= 1) || major > 1;
+ GLAD_GL_VERSION_1_2 = (major == 1 && minor >= 2) || major > 1;
+ GLAD_GL_VERSION_1_3 = (major == 1 && minor >= 3) || major > 1;
+ GLAD_GL_VERSION_1_4 = (major == 1 && minor >= 4) || major > 1;
+ GLAD_GL_VERSION_1_5 = (major == 1 && minor >= 5) || major > 1;
+ GLAD_GL_VERSION_2_0 = (major == 2 && minor >= 0) || major > 2;
+ GLAD_GL_VERSION_2_1 = (major == 2 && minor >= 1) || major > 2;
+ GLAD_GL_VERSION_3_0 = (major == 3 && minor >= 0) || major > 3;
+ GLAD_GL_VERSION_3_1 = (major == 3 && minor >= 1) || major > 3;
+ GLAD_GL_VERSION_3_2 = (major == 3 && minor >= 2) || major > 3;
+ GLAD_GL_VERSION_3_3 = (major == 3 && minor >= 3) || major > 3;
+
+ return GLAD_MAKE_VERSION(major, minor);
+}
+
+int gladLoadGLUserPtr( GLADuserptrloadfunc load, void *userptr) {
+ int version;
+
+ glad_glGetString = (PFNGLGETSTRINGPROC) load(userptr, "glGetString");
+ if(glad_glGetString == NULL) return 0;
+ if(glad_glGetString(GL_VERSION) == NULL) return 0;
+ version = glad_gl_find_core_gl();
+
+ glad_gl_load_GL_VERSION_1_0(load, userptr);
+ glad_gl_load_GL_VERSION_1_1(load, userptr);
+ glad_gl_load_GL_VERSION_1_2(load, userptr);
+ glad_gl_load_GL_VERSION_1_3(load, userptr);
+ glad_gl_load_GL_VERSION_1_4(load, userptr);
+ glad_gl_load_GL_VERSION_1_5(load, userptr);
+ glad_gl_load_GL_VERSION_2_0(load, userptr);
+ glad_gl_load_GL_VERSION_2_1(load, userptr);
+ glad_gl_load_GL_VERSION_3_0(load, userptr);
+ glad_gl_load_GL_VERSION_3_1(load, userptr);
+ glad_gl_load_GL_VERSION_3_2(load, userptr);
+ glad_gl_load_GL_VERSION_3_3(load, userptr);
+
+ if (!glad_gl_find_extensions_gl(version)) return 0;
+ glad_gl_load_GL_ARB_debug_output(load, userptr);
+ glad_gl_load_GL_ARB_framebuffer_object(load, userptr);
+ glad_gl_load_GL_EXT_framebuffer_blit(load, userptr);
+ glad_gl_load_GL_EXT_framebuffer_multisample(load, userptr);
+ glad_gl_load_GL_EXT_framebuffer_object(load, userptr);
+ glad_gl_load_GL_OVR_multiview(load, userptr);
+
+
+
+ return version;
+}
+
+
+int gladLoadGL( GLADloadfunc load) {
+ return gladLoadGLUserPtr( glad_gl_get_proc_from_userptr, GLAD_GNUC_EXTENSION (void*) load);
+}
+
+
+
+
+
+#ifdef GLAD_GL
+
+#ifndef GLAD_LOADER_LIBRARY_C_
+#define GLAD_LOADER_LIBRARY_C_
+
+#include <stddef.h>
+#include <stdlib.h>
+
+#if GLAD_PLATFORM_WIN32
+#include <windows.h>
+#else
+#include <dlfcn.h>
+#endif
+
+
+static void* glad_get_dlopen_handle(const char *lib_names[], int length) {
+ void *handle = NULL;
+ int i;
+
+ for (i = 0; i < length; ++i) {
+#if GLAD_PLATFORM_WIN32
+ #if GLAD_PLATFORM_UWP
+ size_t buffer_size = (strlen(lib_names[i]) + 1) * sizeof(WCHAR);
+ LPWSTR buffer = (LPWSTR) malloc(buffer_size);
+ if (buffer != NULL) {
+ int ret = MultiByteToWideChar(CP_ACP, 0, lib_names[i], -1, buffer, buffer_size);
+ if (ret != 0) {
+ handle = (void*) LoadPackagedLibrary(buffer, 0);
+ }
+ free((void*) buffer);
+ }
+ #else
+ handle = (void*) LoadLibraryA(lib_names[i]);
+ #endif
+#else
+ handle = dlopen(lib_names[i], RTLD_LAZY | RTLD_LOCAL);
+#endif
+ if (handle != NULL) {
+ return handle;
+ }
+ }
+
+ return NULL;
+}
+
+static void glad_close_dlopen_handle(void* handle) {
+ if (handle != NULL) {
+#if GLAD_PLATFORM_WIN32
+ FreeLibrary((HMODULE) handle);
+#else
+ dlclose(handle);
+#endif
+ }
+}
+
+static GLADapiproc glad_dlsym_handle(void* handle, const char *name) {
+ if (handle == NULL) {
+ return NULL;
+ }
+
+#if GLAD_PLATFORM_WIN32
+ return (GLADapiproc) GetProcAddress((HMODULE) handle, name);
+#else
+ return GLAD_GNUC_EXTENSION (GLADapiproc) dlsym(handle, name);
+#endif
+}
+
+#endif /* GLAD_LOADER_LIBRARY_C_ */
+
+typedef void* (GLAD_API_PTR *GLADglprocaddrfunc)(const char*);
+struct _glad_gl_userptr {
+ void *handle;
+ GLADglprocaddrfunc gl_get_proc_address_ptr;
+};
+
+static GLADapiproc glad_gl_get_proc(void *vuserptr, const char *name) {
+ struct _glad_gl_userptr userptr = *(struct _glad_gl_userptr*) vuserptr;
+ GLADapiproc result = NULL;
+
+ if(userptr.gl_get_proc_address_ptr != NULL) {
+ result = GLAD_GNUC_EXTENSION (GLADapiproc) userptr.gl_get_proc_address_ptr(name);
+ }
+ if(result == NULL) {
+ result = glad_dlsym_handle(userptr.handle, name);
+ }
+
+ return result;
+}
+
+static void* _glad_GL_loader_handle = NULL;
+
+static void* glad_gl_dlopen_handle(void) {
+#if GLAD_PLATFORM_APPLE
+ static const char *NAMES[] = {
+ "../Frameworks/OpenGL.framework/OpenGL",
+ "/Library/Frameworks/OpenGL.framework/OpenGL",
+ "/System/Library/Frameworks/OpenGL.framework/OpenGL",
+ "/System/Library/Frameworks/OpenGL.framework/Versions/Current/OpenGL"
+ };
+#elif GLAD_PLATFORM_WIN32
+ static const char *NAMES[] = {"opengl32.dll"};
+#else
+ static const char *NAMES[] = {
+ #if defined(__CYGWIN__)
+ "libGL-1.so",
+ #endif
+ "libGL.so.1",
+ "libGL.so"
+ };
+#endif
+
+ if (_glad_GL_loader_handle == NULL) {
+ _glad_GL_loader_handle = glad_get_dlopen_handle(NAMES, sizeof(NAMES) / sizeof(NAMES[0]));
+ }
+
+ return _glad_GL_loader_handle;
+}
+
+static struct _glad_gl_userptr glad_gl_build_userptr(void *handle) {
+ struct _glad_gl_userptr userptr;
+
+ userptr.handle = handle;
+#if GLAD_PLATFORM_APPLE || defined(__HAIKU__)
+ userptr.gl_get_proc_address_ptr = NULL;
+#elif GLAD_PLATFORM_WIN32
+ userptr.gl_get_proc_address_ptr =
+ (GLADglprocaddrfunc) glad_dlsym_handle(handle, "wglGetProcAddress");
+#else
+ userptr.gl_get_proc_address_ptr =
+ (GLADglprocaddrfunc) glad_dlsym_handle(handle, "glXGetProcAddressARB");
+#endif
+
+ return userptr;
+}
+
+int gladLoaderLoadGL(void) {
+ int version = 0;
+ void *handle;
+ int did_load = 0;
+ struct _glad_gl_userptr userptr;
+
+ did_load = _glad_GL_loader_handle == NULL;
+ handle = glad_gl_dlopen_handle();
+ if (handle) {
+ userptr = glad_gl_build_userptr(handle);
+
+ version = gladLoadGLUserPtr(glad_gl_get_proc, &userptr);
+
+ if (did_load) {
+ gladLoaderUnloadGL();
+ }
+ }
+
+ return version;
+}
+
+
+
+void gladLoaderUnloadGL(void) {
+ if (_glad_GL_loader_handle != NULL) {
+ glad_close_dlopen_handle(_glad_GL_loader_handle);
+ _glad_GL_loader_handle = NULL;
+ }
+}
+
+#endif /* GLAD_GL */
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/thirdparty/glad/glad.c b/thirdparty/glad/glad.c
deleted file mode 100644
index dc1b8cb697..0000000000
--- a/thirdparty/glad/glad.c
+++ /dev/null
@@ -1,1939 +0,0 @@
-/*
-
- OpenGL loader generated by glad 0.1.34 on Tue Nov 17 16:41:02 2020.
-
- Language/Generator: C/C++
- Specification: gl
- APIs: gl=3.3
- Profile: compatibility
- Extensions:
- GL_ARB_debug_output,
- GL_ARB_framebuffer_object,
- GL_EXT_framebuffer_blit,
- GL_EXT_framebuffer_multisample,
- GL_EXT_framebuffer_object
- Loader: True
- Local files: False
- Omit khrplatform: False
- Reproducible: False
-
- Commandline:
- --profile="compatibility" --api="gl=3.3" --generator="c" --spec="gl" --extensions="GL_ARB_debug_output,GL_ARB_framebuffer_object,GL_EXT_framebuffer_blit,GL_EXT_framebuffer_multisample,GL_EXT_framebuffer_object"
- Online:
- https://glad.dav1d.de/#profile=compatibility&language=c&specification=gl&loader=on&api=gl%3D3.3&extensions=GL_ARB_debug_output&extensions=GL_ARB_framebuffer_object&extensions=GL_EXT_framebuffer_blit&extensions=GL_EXT_framebuffer_multisample&extensions=GL_EXT_framebuffer_object
-*/
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <glad/glad.h>
-
-static void* get_proc(const char *namez);
-
-#if defined(_WIN32) || defined(__CYGWIN__)
-#ifndef _WINDOWS_
-#undef APIENTRY
-#endif
-#include <windows.h>
-static HMODULE libGL;
-
-typedef void* (APIENTRYP PFNWGLGETPROCADDRESSPROC_PRIVATE)(const char*);
-static PFNWGLGETPROCADDRESSPROC_PRIVATE gladGetProcAddressPtr;
-
-#ifdef _MSC_VER
-#ifdef __has_include
- #if __has_include(<winapifamily.h>)
- #define HAVE_WINAPIFAMILY 1
- #endif
-#elif _MSC_VER >= 1700 && !_USING_V110_SDK71_
- #define HAVE_WINAPIFAMILY 1
-#endif
-#endif
-
-#ifdef HAVE_WINAPIFAMILY
- #include <winapifamily.h>
- #if !WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) && WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_APP)
- #define IS_UWP 1
- #endif
-#endif
-
-static
-int open_gl(void) {
-#ifndef IS_UWP
- libGL = LoadLibraryW(L"opengl32.dll");
- if(libGL != NULL) {
- void (* tmp)(void);
- tmp = (void(*)(void)) GetProcAddress(libGL, "wglGetProcAddress");
- gladGetProcAddressPtr = (PFNWGLGETPROCADDRESSPROC_PRIVATE) tmp;
- return gladGetProcAddressPtr != NULL;
- }
-#endif
-
- return 0;
-}
-
-static
-void close_gl(void) {
- if(libGL != NULL) {
- FreeLibrary((HMODULE) libGL);
- libGL = NULL;
- }
-}
-#else
-#include <dlfcn.h>
-static void* libGL;
-
-#if !defined(__APPLE__) && !defined(__HAIKU__)
-typedef void* (APIENTRYP PFNGLXGETPROCADDRESSPROC_PRIVATE)(const char*);
-static PFNGLXGETPROCADDRESSPROC_PRIVATE gladGetProcAddressPtr;
-#endif
-
-static
-int open_gl(void) {
-#ifdef __APPLE__
- static const char *NAMES[] = {
- "../Frameworks/OpenGL.framework/OpenGL",
- "/Library/Frameworks/OpenGL.framework/OpenGL",
- "/System/Library/Frameworks/OpenGL.framework/OpenGL",
- "/System/Library/Frameworks/OpenGL.framework/Versions/Current/OpenGL"
- };
-#else
- static const char *NAMES[] = {"libGL.so.1", "libGL.so"};
-#endif
-
- unsigned int index = 0;
- for(index = 0; index < (sizeof(NAMES) / sizeof(NAMES[0])); index++) {
- libGL = dlopen(NAMES[index], RTLD_NOW | RTLD_GLOBAL);
-
- if(libGL != NULL) {
-#if defined(__APPLE__) || defined(__HAIKU__)
- return 1;
-#else
- gladGetProcAddressPtr = (PFNGLXGETPROCADDRESSPROC_PRIVATE)dlsym(libGL,
- "glXGetProcAddressARB");
- return gladGetProcAddressPtr != NULL;
-#endif
- }
- }
-
- return 0;
-}
-
-static
-void close_gl(void) {
- if(libGL != NULL) {
- dlclose(libGL);
- libGL = NULL;
- }
-}
-#endif
-
-static
-void* get_proc(const char *namez) {
- void* result = NULL;
- if(libGL == NULL) return NULL;
-
-#if !defined(__APPLE__) && !defined(__HAIKU__)
- if(gladGetProcAddressPtr != NULL) {
- result = gladGetProcAddressPtr(namez);
- }
-#endif
- if(result == NULL) {
-#if defined(_WIN32) || defined(__CYGWIN__)
- result = (void*)GetProcAddress((HMODULE) libGL, namez);
-#else
- result = dlsym(libGL, namez);
-#endif
- }
-
- return result;
-}
-
-int gladLoadGL(void) {
- int status = 0;
-
- if(open_gl()) {
- status = gladLoadGLLoader(&get_proc);
- close_gl();
- }
-
- return status;
-}
-
-struct gladGLversionStruct GLVersion = { 0, 0 };
-
-#if defined(GL_ES_VERSION_3_0) || defined(GL_VERSION_3_0)
-#define _GLAD_IS_SOME_NEW_VERSION 1
-#endif
-
-static int max_loaded_major;
-static int max_loaded_minor;
-
-static const char *exts = NULL;
-static int num_exts_i = 0;
-static char **exts_i = NULL;
-
-static int get_exts(void) {
-#ifdef _GLAD_IS_SOME_NEW_VERSION
- if(max_loaded_major < 3) {
-#endif
- exts = (const char *)glGetString(GL_EXTENSIONS);
-#ifdef _GLAD_IS_SOME_NEW_VERSION
- } else {
- unsigned int index;
-
- num_exts_i = 0;
- glGetIntegerv(GL_NUM_EXTENSIONS, &num_exts_i);
- if (num_exts_i > 0) {
- exts_i = (char **)malloc((size_t)num_exts_i * (sizeof *exts_i));
- }
-
- if (exts_i == NULL) {
- return 0;
- }
-
- for(index = 0; index < (unsigned)num_exts_i; index++) {
- const char *gl_str_tmp = (const char*)glGetStringi(GL_EXTENSIONS, index);
- size_t len = strlen(gl_str_tmp);
-
- char *local_str = (char*)malloc((len+1) * sizeof(char));
- if(local_str != NULL) {
- memcpy(local_str, gl_str_tmp, (len+1) * sizeof(char));
- }
- exts_i[index] = local_str;
- }
- }
-#endif
- return 1;
-}
-
-static void free_exts(void) {
- if (exts_i != NULL) {
- int index;
- for(index = 0; index < num_exts_i; index++) {
- free((char *)exts_i[index]);
- }
- free((void *)exts_i);
- exts_i = NULL;
- }
-}
-
-static int has_ext(const char *ext) {
-#ifdef _GLAD_IS_SOME_NEW_VERSION
- if(max_loaded_major < 3) {
-#endif
- const char *extensions;
- const char *loc;
- const char *terminator;
- extensions = exts;
- if(extensions == NULL || ext == NULL) {
- return 0;
- }
-
- while(1) {
- loc = strstr(extensions, ext);
- if(loc == NULL) {
- return 0;
- }
-
- terminator = loc + strlen(ext);
- if((loc == extensions || *(loc - 1) == ' ') &&
- (*terminator == ' ' || *terminator == '\0')) {
- return 1;
- }
- extensions = terminator;
- }
-#ifdef _GLAD_IS_SOME_NEW_VERSION
- } else {
- int index;
- if(exts_i == NULL) return 0;
- for(index = 0; index < num_exts_i; index++) {
- const char *e = exts_i[index];
-
- if(exts_i[index] != NULL && strcmp(e, ext) == 0) {
- return 1;
- }
- }
- }
-#endif
-
- return 0;
-}
-int GLAD_GL_VERSION_1_0 = 0;
-int GLAD_GL_VERSION_1_1 = 0;
-int GLAD_GL_VERSION_1_2 = 0;
-int GLAD_GL_VERSION_1_3 = 0;
-int GLAD_GL_VERSION_1_4 = 0;
-int GLAD_GL_VERSION_1_5 = 0;
-int GLAD_GL_VERSION_2_0 = 0;
-int GLAD_GL_VERSION_2_1 = 0;
-int GLAD_GL_VERSION_3_0 = 0;
-int GLAD_GL_VERSION_3_1 = 0;
-int GLAD_GL_VERSION_3_2 = 0;
-int GLAD_GL_VERSION_3_3 = 0;
-PFNGLACCUMPROC glad_glAccum = NULL;
-PFNGLACTIVETEXTUREPROC glad_glActiveTexture = NULL;
-PFNGLALPHAFUNCPROC glad_glAlphaFunc = NULL;
-PFNGLARETEXTURESRESIDENTPROC glad_glAreTexturesResident = NULL;
-PFNGLARRAYELEMENTPROC glad_glArrayElement = NULL;
-PFNGLATTACHSHADERPROC glad_glAttachShader = NULL;
-PFNGLBEGINPROC glad_glBegin = NULL;
-PFNGLBEGINCONDITIONALRENDERPROC glad_glBeginConditionalRender = NULL;
-PFNGLBEGINQUERYPROC glad_glBeginQuery = NULL;
-PFNGLBEGINTRANSFORMFEEDBACKPROC glad_glBeginTransformFeedback = NULL;
-PFNGLBINDATTRIBLOCATIONPROC glad_glBindAttribLocation = NULL;
-PFNGLBINDBUFFERPROC glad_glBindBuffer = NULL;
-PFNGLBINDBUFFERBASEPROC glad_glBindBufferBase = NULL;
-PFNGLBINDBUFFERRANGEPROC glad_glBindBufferRange = NULL;
-PFNGLBINDFRAGDATALOCATIONPROC glad_glBindFragDataLocation = NULL;
-PFNGLBINDFRAGDATALOCATIONINDEXEDPROC glad_glBindFragDataLocationIndexed = NULL;
-PFNGLBINDFRAMEBUFFERPROC glad_glBindFramebuffer = NULL;
-PFNGLBINDRENDERBUFFERPROC glad_glBindRenderbuffer = NULL;
-PFNGLBINDSAMPLERPROC glad_glBindSampler = NULL;
-PFNGLBINDTEXTUREPROC glad_glBindTexture = NULL;
-PFNGLBINDVERTEXARRAYPROC glad_glBindVertexArray = NULL;
-PFNGLBITMAPPROC glad_glBitmap = NULL;
-PFNGLBLENDCOLORPROC glad_glBlendColor = NULL;
-PFNGLBLENDEQUATIONPROC glad_glBlendEquation = NULL;
-PFNGLBLENDEQUATIONSEPARATEPROC glad_glBlendEquationSeparate = NULL;
-PFNGLBLENDFUNCPROC glad_glBlendFunc = NULL;
-PFNGLBLENDFUNCSEPARATEPROC glad_glBlendFuncSeparate = NULL;
-PFNGLBLITFRAMEBUFFERPROC glad_glBlitFramebuffer = NULL;
-PFNGLBUFFERDATAPROC glad_glBufferData = NULL;
-PFNGLBUFFERSUBDATAPROC glad_glBufferSubData = NULL;
-PFNGLCALLLISTPROC glad_glCallList = NULL;
-PFNGLCALLLISTSPROC glad_glCallLists = NULL;
-PFNGLCHECKFRAMEBUFFERSTATUSPROC glad_glCheckFramebufferStatus = NULL;
-PFNGLCLAMPCOLORPROC glad_glClampColor = NULL;
-PFNGLCLEARPROC glad_glClear = NULL;
-PFNGLCLEARACCUMPROC glad_glClearAccum = NULL;
-PFNGLCLEARBUFFERFIPROC glad_glClearBufferfi = NULL;
-PFNGLCLEARBUFFERFVPROC glad_glClearBufferfv = NULL;
-PFNGLCLEARBUFFERIVPROC glad_glClearBufferiv = NULL;
-PFNGLCLEARBUFFERUIVPROC glad_glClearBufferuiv = NULL;
-PFNGLCLEARCOLORPROC glad_glClearColor = NULL;
-PFNGLCLEARDEPTHPROC glad_glClearDepth = NULL;
-PFNGLCLEARINDEXPROC glad_glClearIndex = NULL;
-PFNGLCLEARSTENCILPROC glad_glClearStencil = NULL;
-PFNGLCLIENTACTIVETEXTUREPROC glad_glClientActiveTexture = NULL;
-PFNGLCLIENTWAITSYNCPROC glad_glClientWaitSync = NULL;
-PFNGLCLIPPLANEPROC glad_glClipPlane = NULL;
-PFNGLCOLOR3BPROC glad_glColor3b = NULL;
-PFNGLCOLOR3BVPROC glad_glColor3bv = NULL;
-PFNGLCOLOR3DPROC glad_glColor3d = NULL;
-PFNGLCOLOR3DVPROC glad_glColor3dv = NULL;
-PFNGLCOLOR3FPROC glad_glColor3f = NULL;
-PFNGLCOLOR3FVPROC glad_glColor3fv = NULL;
-PFNGLCOLOR3IPROC glad_glColor3i = NULL;
-PFNGLCOLOR3IVPROC glad_glColor3iv = NULL;
-PFNGLCOLOR3SPROC glad_glColor3s = NULL;
-PFNGLCOLOR3SVPROC glad_glColor3sv = NULL;
-PFNGLCOLOR3UBPROC glad_glColor3ub = NULL;
-PFNGLCOLOR3UBVPROC glad_glColor3ubv = NULL;
-PFNGLCOLOR3UIPROC glad_glColor3ui = NULL;
-PFNGLCOLOR3UIVPROC glad_glColor3uiv = NULL;
-PFNGLCOLOR3USPROC glad_glColor3us = NULL;
-PFNGLCOLOR3USVPROC glad_glColor3usv = NULL;
-PFNGLCOLOR4BPROC glad_glColor4b = NULL;
-PFNGLCOLOR4BVPROC glad_glColor4bv = NULL;
-PFNGLCOLOR4DPROC glad_glColor4d = NULL;
-PFNGLCOLOR4DVPROC glad_glColor4dv = NULL;
-PFNGLCOLOR4FPROC glad_glColor4f = NULL;
-PFNGLCOLOR4FVPROC glad_glColor4fv = NULL;
-PFNGLCOLOR4IPROC glad_glColor4i = NULL;
-PFNGLCOLOR4IVPROC glad_glColor4iv = NULL;
-PFNGLCOLOR4SPROC glad_glColor4s = NULL;
-PFNGLCOLOR4SVPROC glad_glColor4sv = NULL;
-PFNGLCOLOR4UBPROC glad_glColor4ub = NULL;
-PFNGLCOLOR4UBVPROC glad_glColor4ubv = NULL;
-PFNGLCOLOR4UIPROC glad_glColor4ui = NULL;
-PFNGLCOLOR4UIVPROC glad_glColor4uiv = NULL;
-PFNGLCOLOR4USPROC glad_glColor4us = NULL;
-PFNGLCOLOR4USVPROC glad_glColor4usv = NULL;
-PFNGLCOLORMASKPROC glad_glColorMask = NULL;
-PFNGLCOLORMASKIPROC glad_glColorMaski = NULL;
-PFNGLCOLORMATERIALPROC glad_glColorMaterial = NULL;
-PFNGLCOLORP3UIPROC glad_glColorP3ui = NULL;
-PFNGLCOLORP3UIVPROC glad_glColorP3uiv = NULL;
-PFNGLCOLORP4UIPROC glad_glColorP4ui = NULL;
-PFNGLCOLORP4UIVPROC glad_glColorP4uiv = NULL;
-PFNGLCOLORPOINTERPROC glad_glColorPointer = NULL;
-PFNGLCOMPILESHADERPROC glad_glCompileShader = NULL;
-PFNGLCOMPRESSEDTEXIMAGE1DPROC glad_glCompressedTexImage1D = NULL;
-PFNGLCOMPRESSEDTEXIMAGE2DPROC glad_glCompressedTexImage2D = NULL;
-PFNGLCOMPRESSEDTEXIMAGE3DPROC glad_glCompressedTexImage3D = NULL;
-PFNGLCOMPRESSEDTEXSUBIMAGE1DPROC glad_glCompressedTexSubImage1D = NULL;
-PFNGLCOMPRESSEDTEXSUBIMAGE2DPROC glad_glCompressedTexSubImage2D = NULL;
-PFNGLCOMPRESSEDTEXSUBIMAGE3DPROC glad_glCompressedTexSubImage3D = NULL;
-PFNGLCOPYBUFFERSUBDATAPROC glad_glCopyBufferSubData = NULL;
-PFNGLCOPYPIXELSPROC glad_glCopyPixels = NULL;
-PFNGLCOPYTEXIMAGE1DPROC glad_glCopyTexImage1D = NULL;
-PFNGLCOPYTEXIMAGE2DPROC glad_glCopyTexImage2D = NULL;
-PFNGLCOPYTEXSUBIMAGE1DPROC glad_glCopyTexSubImage1D = NULL;
-PFNGLCOPYTEXSUBIMAGE2DPROC glad_glCopyTexSubImage2D = NULL;
-PFNGLCOPYTEXSUBIMAGE3DPROC glad_glCopyTexSubImage3D = NULL;
-PFNGLCREATEPROGRAMPROC glad_glCreateProgram = NULL;
-PFNGLCREATESHADERPROC glad_glCreateShader = NULL;
-PFNGLCULLFACEPROC glad_glCullFace = NULL;
-PFNGLDELETEBUFFERSPROC glad_glDeleteBuffers = NULL;
-PFNGLDELETEFRAMEBUFFERSPROC glad_glDeleteFramebuffers = NULL;
-PFNGLDELETELISTSPROC glad_glDeleteLists = NULL;
-PFNGLDELETEPROGRAMPROC glad_glDeleteProgram = NULL;
-PFNGLDELETEQUERIESPROC glad_glDeleteQueries = NULL;
-PFNGLDELETERENDERBUFFERSPROC glad_glDeleteRenderbuffers = NULL;
-PFNGLDELETESAMPLERSPROC glad_glDeleteSamplers = NULL;
-PFNGLDELETESHADERPROC glad_glDeleteShader = NULL;
-PFNGLDELETESYNCPROC glad_glDeleteSync = NULL;
-PFNGLDELETETEXTURESPROC glad_glDeleteTextures = NULL;
-PFNGLDELETEVERTEXARRAYSPROC glad_glDeleteVertexArrays = NULL;
-PFNGLDEPTHFUNCPROC glad_glDepthFunc = NULL;
-PFNGLDEPTHMASKPROC glad_glDepthMask = NULL;
-PFNGLDEPTHRANGEPROC glad_glDepthRange = NULL;
-PFNGLDETACHSHADERPROC glad_glDetachShader = NULL;
-PFNGLDISABLEPROC glad_glDisable = NULL;
-PFNGLDISABLECLIENTSTATEPROC glad_glDisableClientState = NULL;
-PFNGLDISABLEVERTEXATTRIBARRAYPROC glad_glDisableVertexAttribArray = NULL;
-PFNGLDISABLEIPROC glad_glDisablei = NULL;
-PFNGLDRAWARRAYSPROC glad_glDrawArrays = NULL;
-PFNGLDRAWARRAYSINSTANCEDPROC glad_glDrawArraysInstanced = NULL;
-PFNGLDRAWBUFFERPROC glad_glDrawBuffer = NULL;
-PFNGLDRAWBUFFERSPROC glad_glDrawBuffers = NULL;
-PFNGLDRAWELEMENTSPROC glad_glDrawElements = NULL;
-PFNGLDRAWELEMENTSBASEVERTEXPROC glad_glDrawElementsBaseVertex = NULL;
-PFNGLDRAWELEMENTSINSTANCEDPROC glad_glDrawElementsInstanced = NULL;
-PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXPROC glad_glDrawElementsInstancedBaseVertex = NULL;
-PFNGLDRAWPIXELSPROC glad_glDrawPixels = NULL;
-PFNGLDRAWRANGEELEMENTSPROC glad_glDrawRangeElements = NULL;
-PFNGLDRAWRANGEELEMENTSBASEVERTEXPROC glad_glDrawRangeElementsBaseVertex = NULL;
-PFNGLEDGEFLAGPROC glad_glEdgeFlag = NULL;
-PFNGLEDGEFLAGPOINTERPROC glad_glEdgeFlagPointer = NULL;
-PFNGLEDGEFLAGVPROC glad_glEdgeFlagv = NULL;
-PFNGLENABLEPROC glad_glEnable = NULL;
-PFNGLENABLECLIENTSTATEPROC glad_glEnableClientState = NULL;
-PFNGLENABLEVERTEXATTRIBARRAYPROC glad_glEnableVertexAttribArray = NULL;
-PFNGLENABLEIPROC glad_glEnablei = NULL;
-PFNGLENDPROC glad_glEnd = NULL;
-PFNGLENDCONDITIONALRENDERPROC glad_glEndConditionalRender = NULL;
-PFNGLENDLISTPROC glad_glEndList = NULL;
-PFNGLENDQUERYPROC glad_glEndQuery = NULL;
-PFNGLENDTRANSFORMFEEDBACKPROC glad_glEndTransformFeedback = NULL;
-PFNGLEVALCOORD1DPROC glad_glEvalCoord1d = NULL;
-PFNGLEVALCOORD1DVPROC glad_glEvalCoord1dv = NULL;
-PFNGLEVALCOORD1FPROC glad_glEvalCoord1f = NULL;
-PFNGLEVALCOORD1FVPROC glad_glEvalCoord1fv = NULL;
-PFNGLEVALCOORD2DPROC glad_glEvalCoord2d = NULL;
-PFNGLEVALCOORD2DVPROC glad_glEvalCoord2dv = NULL;
-PFNGLEVALCOORD2FPROC glad_glEvalCoord2f = NULL;
-PFNGLEVALCOORD2FVPROC glad_glEvalCoord2fv = NULL;
-PFNGLEVALMESH1PROC glad_glEvalMesh1 = NULL;
-PFNGLEVALMESH2PROC glad_glEvalMesh2 = NULL;
-PFNGLEVALPOINT1PROC glad_glEvalPoint1 = NULL;
-PFNGLEVALPOINT2PROC glad_glEvalPoint2 = NULL;
-PFNGLFEEDBACKBUFFERPROC glad_glFeedbackBuffer = NULL;
-PFNGLFENCESYNCPROC glad_glFenceSync = NULL;
-PFNGLFINISHPROC glad_glFinish = NULL;
-PFNGLFLUSHPROC glad_glFlush = NULL;
-PFNGLFLUSHMAPPEDBUFFERRANGEPROC glad_glFlushMappedBufferRange = NULL;
-PFNGLFOGCOORDPOINTERPROC glad_glFogCoordPointer = NULL;
-PFNGLFOGCOORDDPROC glad_glFogCoordd = NULL;
-PFNGLFOGCOORDDVPROC glad_glFogCoorddv = NULL;
-PFNGLFOGCOORDFPROC glad_glFogCoordf = NULL;
-PFNGLFOGCOORDFVPROC glad_glFogCoordfv = NULL;
-PFNGLFOGFPROC glad_glFogf = NULL;
-PFNGLFOGFVPROC glad_glFogfv = NULL;
-PFNGLFOGIPROC glad_glFogi = NULL;
-PFNGLFOGIVPROC glad_glFogiv = NULL;
-PFNGLFRAMEBUFFERRENDERBUFFERPROC glad_glFramebufferRenderbuffer = NULL;
-PFNGLFRAMEBUFFERTEXTUREPROC glad_glFramebufferTexture = NULL;
-PFNGLFRAMEBUFFERTEXTURE1DPROC glad_glFramebufferTexture1D = NULL;
-PFNGLFRAMEBUFFERTEXTURE2DPROC glad_glFramebufferTexture2D = NULL;
-PFNGLFRAMEBUFFERTEXTURE3DPROC glad_glFramebufferTexture3D = NULL;
-PFNGLFRAMEBUFFERTEXTURELAYERPROC glad_glFramebufferTextureLayer = NULL;
-PFNGLFRONTFACEPROC glad_glFrontFace = NULL;
-PFNGLFRUSTUMPROC glad_glFrustum = NULL;
-PFNGLGENBUFFERSPROC glad_glGenBuffers = NULL;
-PFNGLGENFRAMEBUFFERSPROC glad_glGenFramebuffers = NULL;
-PFNGLGENLISTSPROC glad_glGenLists = NULL;
-PFNGLGENQUERIESPROC glad_glGenQueries = NULL;
-PFNGLGENRENDERBUFFERSPROC glad_glGenRenderbuffers = NULL;
-PFNGLGENSAMPLERSPROC glad_glGenSamplers = NULL;
-PFNGLGENTEXTURESPROC glad_glGenTextures = NULL;
-PFNGLGENVERTEXARRAYSPROC glad_glGenVertexArrays = NULL;
-PFNGLGENERATEMIPMAPPROC glad_glGenerateMipmap = NULL;
-PFNGLGETACTIVEATTRIBPROC glad_glGetActiveAttrib = NULL;
-PFNGLGETACTIVEUNIFORMPROC glad_glGetActiveUniform = NULL;
-PFNGLGETACTIVEUNIFORMBLOCKNAMEPROC glad_glGetActiveUniformBlockName = NULL;
-PFNGLGETACTIVEUNIFORMBLOCKIVPROC glad_glGetActiveUniformBlockiv = NULL;
-PFNGLGETACTIVEUNIFORMNAMEPROC glad_glGetActiveUniformName = NULL;
-PFNGLGETACTIVEUNIFORMSIVPROC glad_glGetActiveUniformsiv = NULL;
-PFNGLGETATTACHEDSHADERSPROC glad_glGetAttachedShaders = NULL;
-PFNGLGETATTRIBLOCATIONPROC glad_glGetAttribLocation = NULL;
-PFNGLGETBOOLEANI_VPROC glad_glGetBooleani_v = NULL;
-PFNGLGETBOOLEANVPROC glad_glGetBooleanv = NULL;
-PFNGLGETBUFFERPARAMETERI64VPROC glad_glGetBufferParameteri64v = NULL;
-PFNGLGETBUFFERPARAMETERIVPROC glad_glGetBufferParameteriv = NULL;
-PFNGLGETBUFFERPOINTERVPROC glad_glGetBufferPointerv = NULL;
-PFNGLGETBUFFERSUBDATAPROC glad_glGetBufferSubData = NULL;
-PFNGLGETCLIPPLANEPROC glad_glGetClipPlane = NULL;
-PFNGLGETCOMPRESSEDTEXIMAGEPROC glad_glGetCompressedTexImage = NULL;
-PFNGLGETDOUBLEVPROC glad_glGetDoublev = NULL;
-PFNGLGETERRORPROC glad_glGetError = NULL;
-PFNGLGETFLOATVPROC glad_glGetFloatv = NULL;
-PFNGLGETFRAGDATAINDEXPROC glad_glGetFragDataIndex = NULL;
-PFNGLGETFRAGDATALOCATIONPROC glad_glGetFragDataLocation = NULL;
-PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVPROC glad_glGetFramebufferAttachmentParameteriv = NULL;
-PFNGLGETINTEGER64I_VPROC glad_glGetInteger64i_v = NULL;
-PFNGLGETINTEGER64VPROC glad_glGetInteger64v = NULL;
-PFNGLGETINTEGERI_VPROC glad_glGetIntegeri_v = NULL;
-PFNGLGETINTEGERVPROC glad_glGetIntegerv = NULL;
-PFNGLGETLIGHTFVPROC glad_glGetLightfv = NULL;
-PFNGLGETLIGHTIVPROC glad_glGetLightiv = NULL;
-PFNGLGETMAPDVPROC glad_glGetMapdv = NULL;
-PFNGLGETMAPFVPROC glad_glGetMapfv = NULL;
-PFNGLGETMAPIVPROC glad_glGetMapiv = NULL;
-PFNGLGETMATERIALFVPROC glad_glGetMaterialfv = NULL;
-PFNGLGETMATERIALIVPROC glad_glGetMaterialiv = NULL;
-PFNGLGETMULTISAMPLEFVPROC glad_glGetMultisamplefv = NULL;
-PFNGLGETPIXELMAPFVPROC glad_glGetPixelMapfv = NULL;
-PFNGLGETPIXELMAPUIVPROC glad_glGetPixelMapuiv = NULL;
-PFNGLGETPIXELMAPUSVPROC glad_glGetPixelMapusv = NULL;
-PFNGLGETPOINTERVPROC glad_glGetPointerv = NULL;
-PFNGLGETPOLYGONSTIPPLEPROC glad_glGetPolygonStipple = NULL;
-PFNGLGETPROGRAMINFOLOGPROC glad_glGetProgramInfoLog = NULL;
-PFNGLGETPROGRAMIVPROC glad_glGetProgramiv = NULL;
-PFNGLGETQUERYOBJECTI64VPROC glad_glGetQueryObjecti64v = NULL;
-PFNGLGETQUERYOBJECTIVPROC glad_glGetQueryObjectiv = NULL;
-PFNGLGETQUERYOBJECTUI64VPROC glad_glGetQueryObjectui64v = NULL;
-PFNGLGETQUERYOBJECTUIVPROC glad_glGetQueryObjectuiv = NULL;
-PFNGLGETQUERYIVPROC glad_glGetQueryiv = NULL;
-PFNGLGETRENDERBUFFERPARAMETERIVPROC glad_glGetRenderbufferParameteriv = NULL;
-PFNGLGETSAMPLERPARAMETERIIVPROC glad_glGetSamplerParameterIiv = NULL;
-PFNGLGETSAMPLERPARAMETERIUIVPROC glad_glGetSamplerParameterIuiv = NULL;
-PFNGLGETSAMPLERPARAMETERFVPROC glad_glGetSamplerParameterfv = NULL;
-PFNGLGETSAMPLERPARAMETERIVPROC glad_glGetSamplerParameteriv = NULL;
-PFNGLGETSHADERINFOLOGPROC glad_glGetShaderInfoLog = NULL;
-PFNGLGETSHADERSOURCEPROC glad_glGetShaderSource = NULL;
-PFNGLGETSHADERIVPROC glad_glGetShaderiv = NULL;
-PFNGLGETSTRINGPROC glad_glGetString = NULL;
-PFNGLGETSTRINGIPROC glad_glGetStringi = NULL;
-PFNGLGETSYNCIVPROC glad_glGetSynciv = NULL;
-PFNGLGETTEXENVFVPROC glad_glGetTexEnvfv = NULL;
-PFNGLGETTEXENVIVPROC glad_glGetTexEnviv = NULL;
-PFNGLGETTEXGENDVPROC glad_glGetTexGendv = NULL;
-PFNGLGETTEXGENFVPROC glad_glGetTexGenfv = NULL;
-PFNGLGETTEXGENIVPROC glad_glGetTexGeniv = NULL;
-PFNGLGETTEXIMAGEPROC glad_glGetTexImage = NULL;
-PFNGLGETTEXLEVELPARAMETERFVPROC glad_glGetTexLevelParameterfv = NULL;
-PFNGLGETTEXLEVELPARAMETERIVPROC glad_glGetTexLevelParameteriv = NULL;
-PFNGLGETTEXPARAMETERIIVPROC glad_glGetTexParameterIiv = NULL;
-PFNGLGETTEXPARAMETERIUIVPROC glad_glGetTexParameterIuiv = NULL;
-PFNGLGETTEXPARAMETERFVPROC glad_glGetTexParameterfv = NULL;
-PFNGLGETTEXPARAMETERIVPROC glad_glGetTexParameteriv = NULL;
-PFNGLGETTRANSFORMFEEDBACKVARYINGPROC glad_glGetTransformFeedbackVarying = NULL;
-PFNGLGETUNIFORMBLOCKINDEXPROC glad_glGetUniformBlockIndex = NULL;
-PFNGLGETUNIFORMINDICESPROC glad_glGetUniformIndices = NULL;
-PFNGLGETUNIFORMLOCATIONPROC glad_glGetUniformLocation = NULL;
-PFNGLGETUNIFORMFVPROC glad_glGetUniformfv = NULL;
-PFNGLGETUNIFORMIVPROC glad_glGetUniformiv = NULL;
-PFNGLGETUNIFORMUIVPROC glad_glGetUniformuiv = NULL;
-PFNGLGETVERTEXATTRIBIIVPROC glad_glGetVertexAttribIiv = NULL;
-PFNGLGETVERTEXATTRIBIUIVPROC glad_glGetVertexAttribIuiv = NULL;
-PFNGLGETVERTEXATTRIBPOINTERVPROC glad_glGetVertexAttribPointerv = NULL;
-PFNGLGETVERTEXATTRIBDVPROC glad_glGetVertexAttribdv = NULL;
-PFNGLGETVERTEXATTRIBFVPROC glad_glGetVertexAttribfv = NULL;
-PFNGLGETVERTEXATTRIBIVPROC glad_glGetVertexAttribiv = NULL;
-PFNGLHINTPROC glad_glHint = NULL;
-PFNGLINDEXMASKPROC glad_glIndexMask = NULL;
-PFNGLINDEXPOINTERPROC glad_glIndexPointer = NULL;
-PFNGLINDEXDPROC glad_glIndexd = NULL;
-PFNGLINDEXDVPROC glad_glIndexdv = NULL;
-PFNGLINDEXFPROC glad_glIndexf = NULL;
-PFNGLINDEXFVPROC glad_glIndexfv = NULL;
-PFNGLINDEXIPROC glad_glIndexi = NULL;
-PFNGLINDEXIVPROC glad_glIndexiv = NULL;
-PFNGLINDEXSPROC glad_glIndexs = NULL;
-PFNGLINDEXSVPROC glad_glIndexsv = NULL;
-PFNGLINDEXUBPROC glad_glIndexub = NULL;
-PFNGLINDEXUBVPROC glad_glIndexubv = NULL;
-PFNGLINITNAMESPROC glad_glInitNames = NULL;
-PFNGLINTERLEAVEDARRAYSPROC glad_glInterleavedArrays = NULL;
-PFNGLISBUFFERPROC glad_glIsBuffer = NULL;
-PFNGLISENABLEDPROC glad_glIsEnabled = NULL;
-PFNGLISENABLEDIPROC glad_glIsEnabledi = NULL;
-PFNGLISFRAMEBUFFERPROC glad_glIsFramebuffer = NULL;
-PFNGLISLISTPROC glad_glIsList = NULL;
-PFNGLISPROGRAMPROC glad_glIsProgram = NULL;
-PFNGLISQUERYPROC glad_glIsQuery = NULL;
-PFNGLISRENDERBUFFERPROC glad_glIsRenderbuffer = NULL;
-PFNGLISSAMPLERPROC glad_glIsSampler = NULL;
-PFNGLISSHADERPROC glad_glIsShader = NULL;
-PFNGLISSYNCPROC glad_glIsSync = NULL;
-PFNGLISTEXTUREPROC glad_glIsTexture = NULL;
-PFNGLISVERTEXARRAYPROC glad_glIsVertexArray = NULL;
-PFNGLLIGHTMODELFPROC glad_glLightModelf = NULL;
-PFNGLLIGHTMODELFVPROC glad_glLightModelfv = NULL;
-PFNGLLIGHTMODELIPROC glad_glLightModeli = NULL;
-PFNGLLIGHTMODELIVPROC glad_glLightModeliv = NULL;
-PFNGLLIGHTFPROC glad_glLightf = NULL;
-PFNGLLIGHTFVPROC glad_glLightfv = NULL;
-PFNGLLIGHTIPROC glad_glLighti = NULL;
-PFNGLLIGHTIVPROC glad_glLightiv = NULL;
-PFNGLLINESTIPPLEPROC glad_glLineStipple = NULL;
-PFNGLLINEWIDTHPROC glad_glLineWidth = NULL;
-PFNGLLINKPROGRAMPROC glad_glLinkProgram = NULL;
-PFNGLLISTBASEPROC glad_glListBase = NULL;
-PFNGLLOADIDENTITYPROC glad_glLoadIdentity = NULL;
-PFNGLLOADMATRIXDPROC glad_glLoadMatrixd = NULL;
-PFNGLLOADMATRIXFPROC glad_glLoadMatrixf = NULL;
-PFNGLLOADNAMEPROC glad_glLoadName = NULL;
-PFNGLLOADTRANSPOSEMATRIXDPROC glad_glLoadTransposeMatrixd = NULL;
-PFNGLLOADTRANSPOSEMATRIXFPROC glad_glLoadTransposeMatrixf = NULL;
-PFNGLLOGICOPPROC glad_glLogicOp = NULL;
-PFNGLMAP1DPROC glad_glMap1d = NULL;
-PFNGLMAP1FPROC glad_glMap1f = NULL;
-PFNGLMAP2DPROC glad_glMap2d = NULL;
-PFNGLMAP2FPROC glad_glMap2f = NULL;
-PFNGLMAPBUFFERPROC glad_glMapBuffer = NULL;
-PFNGLMAPBUFFERRANGEPROC glad_glMapBufferRange = NULL;
-PFNGLMAPGRID1DPROC glad_glMapGrid1d = NULL;
-PFNGLMAPGRID1FPROC glad_glMapGrid1f = NULL;
-PFNGLMAPGRID2DPROC glad_glMapGrid2d = NULL;
-PFNGLMAPGRID2FPROC glad_glMapGrid2f = NULL;
-PFNGLMATERIALFPROC glad_glMaterialf = NULL;
-PFNGLMATERIALFVPROC glad_glMaterialfv = NULL;
-PFNGLMATERIALIPROC glad_glMateriali = NULL;
-PFNGLMATERIALIVPROC glad_glMaterialiv = NULL;
-PFNGLMATRIXMODEPROC glad_glMatrixMode = NULL;
-PFNGLMULTMATRIXDPROC glad_glMultMatrixd = NULL;
-PFNGLMULTMATRIXFPROC glad_glMultMatrixf = NULL;
-PFNGLMULTTRANSPOSEMATRIXDPROC glad_glMultTransposeMatrixd = NULL;
-PFNGLMULTTRANSPOSEMATRIXFPROC glad_glMultTransposeMatrixf = NULL;
-PFNGLMULTIDRAWARRAYSPROC glad_glMultiDrawArrays = NULL;
-PFNGLMULTIDRAWELEMENTSPROC glad_glMultiDrawElements = NULL;
-PFNGLMULTIDRAWELEMENTSBASEVERTEXPROC glad_glMultiDrawElementsBaseVertex = NULL;
-PFNGLMULTITEXCOORD1DPROC glad_glMultiTexCoord1d = NULL;
-PFNGLMULTITEXCOORD1DVPROC glad_glMultiTexCoord1dv = NULL;
-PFNGLMULTITEXCOORD1FPROC glad_glMultiTexCoord1f = NULL;
-PFNGLMULTITEXCOORD1FVPROC glad_glMultiTexCoord1fv = NULL;
-PFNGLMULTITEXCOORD1IPROC glad_glMultiTexCoord1i = NULL;
-PFNGLMULTITEXCOORD1IVPROC glad_glMultiTexCoord1iv = NULL;
-PFNGLMULTITEXCOORD1SPROC glad_glMultiTexCoord1s = NULL;
-PFNGLMULTITEXCOORD1SVPROC glad_glMultiTexCoord1sv = NULL;
-PFNGLMULTITEXCOORD2DPROC glad_glMultiTexCoord2d = NULL;
-PFNGLMULTITEXCOORD2DVPROC glad_glMultiTexCoord2dv = NULL;
-PFNGLMULTITEXCOORD2FPROC glad_glMultiTexCoord2f = NULL;
-PFNGLMULTITEXCOORD2FVPROC glad_glMultiTexCoord2fv = NULL;
-PFNGLMULTITEXCOORD2IPROC glad_glMultiTexCoord2i = NULL;
-PFNGLMULTITEXCOORD2IVPROC glad_glMultiTexCoord2iv = NULL;
-PFNGLMULTITEXCOORD2SPROC glad_glMultiTexCoord2s = NULL;
-PFNGLMULTITEXCOORD2SVPROC glad_glMultiTexCoord2sv = NULL;
-PFNGLMULTITEXCOORD3DPROC glad_glMultiTexCoord3d = NULL;
-PFNGLMULTITEXCOORD3DVPROC glad_glMultiTexCoord3dv = NULL;
-PFNGLMULTITEXCOORD3FPROC glad_glMultiTexCoord3f = NULL;
-PFNGLMULTITEXCOORD3FVPROC glad_glMultiTexCoord3fv = NULL;
-PFNGLMULTITEXCOORD3IPROC glad_glMultiTexCoord3i = NULL;
-PFNGLMULTITEXCOORD3IVPROC glad_glMultiTexCoord3iv = NULL;
-PFNGLMULTITEXCOORD3SPROC glad_glMultiTexCoord3s = NULL;
-PFNGLMULTITEXCOORD3SVPROC glad_glMultiTexCoord3sv = NULL;
-PFNGLMULTITEXCOORD4DPROC glad_glMultiTexCoord4d = NULL;
-PFNGLMULTITEXCOORD4DVPROC glad_glMultiTexCoord4dv = NULL;
-PFNGLMULTITEXCOORD4FPROC glad_glMultiTexCoord4f = NULL;
-PFNGLMULTITEXCOORD4FVPROC glad_glMultiTexCoord4fv = NULL;
-PFNGLMULTITEXCOORD4IPROC glad_glMultiTexCoord4i = NULL;
-PFNGLMULTITEXCOORD4IVPROC glad_glMultiTexCoord4iv = NULL;
-PFNGLMULTITEXCOORD4SPROC glad_glMultiTexCoord4s = NULL;
-PFNGLMULTITEXCOORD4SVPROC glad_glMultiTexCoord4sv = NULL;
-PFNGLMULTITEXCOORDP1UIPROC glad_glMultiTexCoordP1ui = NULL;
-PFNGLMULTITEXCOORDP1UIVPROC glad_glMultiTexCoordP1uiv = NULL;
-PFNGLMULTITEXCOORDP2UIPROC glad_glMultiTexCoordP2ui = NULL;
-PFNGLMULTITEXCOORDP2UIVPROC glad_glMultiTexCoordP2uiv = NULL;
-PFNGLMULTITEXCOORDP3UIPROC glad_glMultiTexCoordP3ui = NULL;
-PFNGLMULTITEXCOORDP3UIVPROC glad_glMultiTexCoordP3uiv = NULL;
-PFNGLMULTITEXCOORDP4UIPROC glad_glMultiTexCoordP4ui = NULL;
-PFNGLMULTITEXCOORDP4UIVPROC glad_glMultiTexCoordP4uiv = NULL;
-PFNGLNEWLISTPROC glad_glNewList = NULL;
-PFNGLNORMAL3BPROC glad_glNormal3b = NULL;
-PFNGLNORMAL3BVPROC glad_glNormal3bv = NULL;
-PFNGLNORMAL3DPROC glad_glNormal3d = NULL;
-PFNGLNORMAL3DVPROC glad_glNormal3dv = NULL;
-PFNGLNORMAL3FPROC glad_glNormal3f = NULL;
-PFNGLNORMAL3FVPROC glad_glNormal3fv = NULL;
-PFNGLNORMAL3IPROC glad_glNormal3i = NULL;
-PFNGLNORMAL3IVPROC glad_glNormal3iv = NULL;
-PFNGLNORMAL3SPROC glad_glNormal3s = NULL;
-PFNGLNORMAL3SVPROC glad_glNormal3sv = NULL;
-PFNGLNORMALP3UIPROC glad_glNormalP3ui = NULL;
-PFNGLNORMALP3UIVPROC glad_glNormalP3uiv = NULL;
-PFNGLNORMALPOINTERPROC glad_glNormalPointer = NULL;
-PFNGLORTHOPROC glad_glOrtho = NULL;
-PFNGLPASSTHROUGHPROC glad_glPassThrough = NULL;
-PFNGLPIXELMAPFVPROC glad_glPixelMapfv = NULL;
-PFNGLPIXELMAPUIVPROC glad_glPixelMapuiv = NULL;
-PFNGLPIXELMAPUSVPROC glad_glPixelMapusv = NULL;
-PFNGLPIXELSTOREFPROC glad_glPixelStoref = NULL;
-PFNGLPIXELSTOREIPROC glad_glPixelStorei = NULL;
-PFNGLPIXELTRANSFERFPROC glad_glPixelTransferf = NULL;
-PFNGLPIXELTRANSFERIPROC glad_glPixelTransferi = NULL;
-PFNGLPIXELZOOMPROC glad_glPixelZoom = NULL;
-PFNGLPOINTPARAMETERFPROC glad_glPointParameterf = NULL;
-PFNGLPOINTPARAMETERFVPROC glad_glPointParameterfv = NULL;
-PFNGLPOINTPARAMETERIPROC glad_glPointParameteri = NULL;
-PFNGLPOINTPARAMETERIVPROC glad_glPointParameteriv = NULL;
-PFNGLPOINTSIZEPROC glad_glPointSize = NULL;
-PFNGLPOLYGONMODEPROC glad_glPolygonMode = NULL;
-PFNGLPOLYGONOFFSETPROC glad_glPolygonOffset = NULL;
-PFNGLPOLYGONSTIPPLEPROC glad_glPolygonStipple = NULL;
-PFNGLPOPATTRIBPROC glad_glPopAttrib = NULL;
-PFNGLPOPCLIENTATTRIBPROC glad_glPopClientAttrib = NULL;
-PFNGLPOPMATRIXPROC glad_glPopMatrix = NULL;
-PFNGLPOPNAMEPROC glad_glPopName = NULL;
-PFNGLPRIMITIVERESTARTINDEXPROC glad_glPrimitiveRestartIndex = NULL;
-PFNGLPRIORITIZETEXTURESPROC glad_glPrioritizeTextures = NULL;
-PFNGLPROVOKINGVERTEXPROC glad_glProvokingVertex = NULL;
-PFNGLPUSHATTRIBPROC glad_glPushAttrib = NULL;
-PFNGLPUSHCLIENTATTRIBPROC glad_glPushClientAttrib = NULL;
-PFNGLPUSHMATRIXPROC glad_glPushMatrix = NULL;
-PFNGLPUSHNAMEPROC glad_glPushName = NULL;
-PFNGLQUERYCOUNTERPROC glad_glQueryCounter = NULL;
-PFNGLRASTERPOS2DPROC glad_glRasterPos2d = NULL;
-PFNGLRASTERPOS2DVPROC glad_glRasterPos2dv = NULL;
-PFNGLRASTERPOS2FPROC glad_glRasterPos2f = NULL;
-PFNGLRASTERPOS2FVPROC glad_glRasterPos2fv = NULL;
-PFNGLRASTERPOS2IPROC glad_glRasterPos2i = NULL;
-PFNGLRASTERPOS2IVPROC glad_glRasterPos2iv = NULL;
-PFNGLRASTERPOS2SPROC glad_glRasterPos2s = NULL;
-PFNGLRASTERPOS2SVPROC glad_glRasterPos2sv = NULL;
-PFNGLRASTERPOS3DPROC glad_glRasterPos3d = NULL;
-PFNGLRASTERPOS3DVPROC glad_glRasterPos3dv = NULL;
-PFNGLRASTERPOS3FPROC glad_glRasterPos3f = NULL;
-PFNGLRASTERPOS3FVPROC glad_glRasterPos3fv = NULL;
-PFNGLRASTERPOS3IPROC glad_glRasterPos3i = NULL;
-PFNGLRASTERPOS3IVPROC glad_glRasterPos3iv = NULL;
-PFNGLRASTERPOS3SPROC glad_glRasterPos3s = NULL;
-PFNGLRASTERPOS3SVPROC glad_glRasterPos3sv = NULL;
-PFNGLRASTERPOS4DPROC glad_glRasterPos4d = NULL;
-PFNGLRASTERPOS4DVPROC glad_glRasterPos4dv = NULL;
-PFNGLRASTERPOS4FPROC glad_glRasterPos4f = NULL;
-PFNGLRASTERPOS4FVPROC glad_glRasterPos4fv = NULL;
-PFNGLRASTERPOS4IPROC glad_glRasterPos4i = NULL;
-PFNGLRASTERPOS4IVPROC glad_glRasterPos4iv = NULL;
-PFNGLRASTERPOS4SPROC glad_glRasterPos4s = NULL;
-PFNGLRASTERPOS4SVPROC glad_glRasterPos4sv = NULL;
-PFNGLREADBUFFERPROC glad_glReadBuffer = NULL;
-PFNGLREADPIXELSPROC glad_glReadPixels = NULL;
-PFNGLRECTDPROC glad_glRectd = NULL;
-PFNGLRECTDVPROC glad_glRectdv = NULL;
-PFNGLRECTFPROC glad_glRectf = NULL;
-PFNGLRECTFVPROC glad_glRectfv = NULL;
-PFNGLRECTIPROC glad_glRecti = NULL;
-PFNGLRECTIVPROC glad_glRectiv = NULL;
-PFNGLRECTSPROC glad_glRects = NULL;
-PFNGLRECTSVPROC glad_glRectsv = NULL;
-PFNGLRENDERMODEPROC glad_glRenderMode = NULL;
-PFNGLRENDERBUFFERSTORAGEPROC glad_glRenderbufferStorage = NULL;
-PFNGLRENDERBUFFERSTORAGEMULTISAMPLEPROC glad_glRenderbufferStorageMultisample = NULL;
-PFNGLROTATEDPROC glad_glRotated = NULL;
-PFNGLROTATEFPROC glad_glRotatef = NULL;
-PFNGLSAMPLECOVERAGEPROC glad_glSampleCoverage = NULL;
-PFNGLSAMPLEMASKIPROC glad_glSampleMaski = NULL;
-PFNGLSAMPLERPARAMETERIIVPROC glad_glSamplerParameterIiv = NULL;
-PFNGLSAMPLERPARAMETERIUIVPROC glad_glSamplerParameterIuiv = NULL;
-PFNGLSAMPLERPARAMETERFPROC glad_glSamplerParameterf = NULL;
-PFNGLSAMPLERPARAMETERFVPROC glad_glSamplerParameterfv = NULL;
-PFNGLSAMPLERPARAMETERIPROC glad_glSamplerParameteri = NULL;
-PFNGLSAMPLERPARAMETERIVPROC glad_glSamplerParameteriv = NULL;
-PFNGLSCALEDPROC glad_glScaled = NULL;
-PFNGLSCALEFPROC glad_glScalef = NULL;
-PFNGLSCISSORPROC glad_glScissor = NULL;
-PFNGLSECONDARYCOLOR3BPROC glad_glSecondaryColor3b = NULL;
-PFNGLSECONDARYCOLOR3BVPROC glad_glSecondaryColor3bv = NULL;
-PFNGLSECONDARYCOLOR3DPROC glad_glSecondaryColor3d = NULL;
-PFNGLSECONDARYCOLOR3DVPROC glad_glSecondaryColor3dv = NULL;
-PFNGLSECONDARYCOLOR3FPROC glad_glSecondaryColor3f = NULL;
-PFNGLSECONDARYCOLOR3FVPROC glad_glSecondaryColor3fv = NULL;
-PFNGLSECONDARYCOLOR3IPROC glad_glSecondaryColor3i = NULL;
-PFNGLSECONDARYCOLOR3IVPROC glad_glSecondaryColor3iv = NULL;
-PFNGLSECONDARYCOLOR3SPROC glad_glSecondaryColor3s = NULL;
-PFNGLSECONDARYCOLOR3SVPROC glad_glSecondaryColor3sv = NULL;
-PFNGLSECONDARYCOLOR3UBPROC glad_glSecondaryColor3ub = NULL;
-PFNGLSECONDARYCOLOR3UBVPROC glad_glSecondaryColor3ubv = NULL;
-PFNGLSECONDARYCOLOR3UIPROC glad_glSecondaryColor3ui = NULL;
-PFNGLSECONDARYCOLOR3UIVPROC glad_glSecondaryColor3uiv = NULL;
-PFNGLSECONDARYCOLOR3USPROC glad_glSecondaryColor3us = NULL;
-PFNGLSECONDARYCOLOR3USVPROC glad_glSecondaryColor3usv = NULL;
-PFNGLSECONDARYCOLORP3UIPROC glad_glSecondaryColorP3ui = NULL;
-PFNGLSECONDARYCOLORP3UIVPROC glad_glSecondaryColorP3uiv = NULL;
-PFNGLSECONDARYCOLORPOINTERPROC glad_glSecondaryColorPointer = NULL;
-PFNGLSELECTBUFFERPROC glad_glSelectBuffer = NULL;
-PFNGLSHADEMODELPROC glad_glShadeModel = NULL;
-PFNGLSHADERSOURCEPROC glad_glShaderSource = NULL;
-PFNGLSTENCILFUNCPROC glad_glStencilFunc = NULL;
-PFNGLSTENCILFUNCSEPARATEPROC glad_glStencilFuncSeparate = NULL;
-PFNGLSTENCILMASKPROC glad_glStencilMask = NULL;
-PFNGLSTENCILMASKSEPARATEPROC glad_glStencilMaskSeparate = NULL;
-PFNGLSTENCILOPPROC glad_glStencilOp = NULL;
-PFNGLSTENCILOPSEPARATEPROC glad_glStencilOpSeparate = NULL;
-PFNGLTEXBUFFERPROC glad_glTexBuffer = NULL;
-PFNGLTEXCOORD1DPROC glad_glTexCoord1d = NULL;
-PFNGLTEXCOORD1DVPROC glad_glTexCoord1dv = NULL;
-PFNGLTEXCOORD1FPROC glad_glTexCoord1f = NULL;
-PFNGLTEXCOORD1FVPROC glad_glTexCoord1fv = NULL;
-PFNGLTEXCOORD1IPROC glad_glTexCoord1i = NULL;
-PFNGLTEXCOORD1IVPROC glad_glTexCoord1iv = NULL;
-PFNGLTEXCOORD1SPROC glad_glTexCoord1s = NULL;
-PFNGLTEXCOORD1SVPROC glad_glTexCoord1sv = NULL;
-PFNGLTEXCOORD2DPROC glad_glTexCoord2d = NULL;
-PFNGLTEXCOORD2DVPROC glad_glTexCoord2dv = NULL;
-PFNGLTEXCOORD2FPROC glad_glTexCoord2f = NULL;
-PFNGLTEXCOORD2FVPROC glad_glTexCoord2fv = NULL;
-PFNGLTEXCOORD2IPROC glad_glTexCoord2i = NULL;
-PFNGLTEXCOORD2IVPROC glad_glTexCoord2iv = NULL;
-PFNGLTEXCOORD2SPROC glad_glTexCoord2s = NULL;
-PFNGLTEXCOORD2SVPROC glad_glTexCoord2sv = NULL;
-PFNGLTEXCOORD3DPROC glad_glTexCoord3d = NULL;
-PFNGLTEXCOORD3DVPROC glad_glTexCoord3dv = NULL;
-PFNGLTEXCOORD3FPROC glad_glTexCoord3f = NULL;
-PFNGLTEXCOORD3FVPROC glad_glTexCoord3fv = NULL;
-PFNGLTEXCOORD3IPROC glad_glTexCoord3i = NULL;
-PFNGLTEXCOORD3IVPROC glad_glTexCoord3iv = NULL;
-PFNGLTEXCOORD3SPROC glad_glTexCoord3s = NULL;
-PFNGLTEXCOORD3SVPROC glad_glTexCoord3sv = NULL;
-PFNGLTEXCOORD4DPROC glad_glTexCoord4d = NULL;
-PFNGLTEXCOORD4DVPROC glad_glTexCoord4dv = NULL;
-PFNGLTEXCOORD4FPROC glad_glTexCoord4f = NULL;
-PFNGLTEXCOORD4FVPROC glad_glTexCoord4fv = NULL;
-PFNGLTEXCOORD4IPROC glad_glTexCoord4i = NULL;
-PFNGLTEXCOORD4IVPROC glad_glTexCoord4iv = NULL;
-PFNGLTEXCOORD4SPROC glad_glTexCoord4s = NULL;
-PFNGLTEXCOORD4SVPROC glad_glTexCoord4sv = NULL;
-PFNGLTEXCOORDP1UIPROC glad_glTexCoordP1ui = NULL;
-PFNGLTEXCOORDP1UIVPROC glad_glTexCoordP1uiv = NULL;
-PFNGLTEXCOORDP2UIPROC glad_glTexCoordP2ui = NULL;
-PFNGLTEXCOORDP2UIVPROC glad_glTexCoordP2uiv = NULL;
-PFNGLTEXCOORDP3UIPROC glad_glTexCoordP3ui = NULL;
-PFNGLTEXCOORDP3UIVPROC glad_glTexCoordP3uiv = NULL;
-PFNGLTEXCOORDP4UIPROC glad_glTexCoordP4ui = NULL;
-PFNGLTEXCOORDP4UIVPROC glad_glTexCoordP4uiv = NULL;
-PFNGLTEXCOORDPOINTERPROC glad_glTexCoordPointer = NULL;
-PFNGLTEXENVFPROC glad_glTexEnvf = NULL;
-PFNGLTEXENVFVPROC glad_glTexEnvfv = NULL;
-PFNGLTEXENVIPROC glad_glTexEnvi = NULL;
-PFNGLTEXENVIVPROC glad_glTexEnviv = NULL;
-PFNGLTEXGENDPROC glad_glTexGend = NULL;
-PFNGLTEXGENDVPROC glad_glTexGendv = NULL;
-PFNGLTEXGENFPROC glad_glTexGenf = NULL;
-PFNGLTEXGENFVPROC glad_glTexGenfv = NULL;
-PFNGLTEXGENIPROC glad_glTexGeni = NULL;
-PFNGLTEXGENIVPROC glad_glTexGeniv = NULL;
-PFNGLTEXIMAGE1DPROC glad_glTexImage1D = NULL;
-PFNGLTEXIMAGE2DPROC glad_glTexImage2D = NULL;
-PFNGLTEXIMAGE2DMULTISAMPLEPROC glad_glTexImage2DMultisample = NULL;
-PFNGLTEXIMAGE3DPROC glad_glTexImage3D = NULL;
-PFNGLTEXIMAGE3DMULTISAMPLEPROC glad_glTexImage3DMultisample = NULL;
-PFNGLTEXPARAMETERIIVPROC glad_glTexParameterIiv = NULL;
-PFNGLTEXPARAMETERIUIVPROC glad_glTexParameterIuiv = NULL;
-PFNGLTEXPARAMETERFPROC glad_glTexParameterf = NULL;
-PFNGLTEXPARAMETERFVPROC glad_glTexParameterfv = NULL;
-PFNGLTEXPARAMETERIPROC glad_glTexParameteri = NULL;
-PFNGLTEXPARAMETERIVPROC glad_glTexParameteriv = NULL;
-PFNGLTEXSUBIMAGE1DPROC glad_glTexSubImage1D = NULL;
-PFNGLTEXSUBIMAGE2DPROC glad_glTexSubImage2D = NULL;
-PFNGLTEXSUBIMAGE3DPROC glad_glTexSubImage3D = NULL;
-PFNGLTRANSFORMFEEDBACKVARYINGSPROC glad_glTransformFeedbackVaryings = NULL;
-PFNGLTRANSLATEDPROC glad_glTranslated = NULL;
-PFNGLTRANSLATEFPROC glad_glTranslatef = NULL;
-PFNGLUNIFORM1FPROC glad_glUniform1f = NULL;
-PFNGLUNIFORM1FVPROC glad_glUniform1fv = NULL;
-PFNGLUNIFORM1IPROC glad_glUniform1i = NULL;
-PFNGLUNIFORM1IVPROC glad_glUniform1iv = NULL;
-PFNGLUNIFORM1UIPROC glad_glUniform1ui = NULL;
-PFNGLUNIFORM1UIVPROC glad_glUniform1uiv = NULL;
-PFNGLUNIFORM2FPROC glad_glUniform2f = NULL;
-PFNGLUNIFORM2FVPROC glad_glUniform2fv = NULL;
-PFNGLUNIFORM2IPROC glad_glUniform2i = NULL;
-PFNGLUNIFORM2IVPROC glad_glUniform2iv = NULL;
-PFNGLUNIFORM2UIPROC glad_glUniform2ui = NULL;
-PFNGLUNIFORM2UIVPROC glad_glUniform2uiv = NULL;
-PFNGLUNIFORM3FPROC glad_glUniform3f = NULL;
-PFNGLUNIFORM3FVPROC glad_glUniform3fv = NULL;
-PFNGLUNIFORM3IPROC glad_glUniform3i = NULL;
-PFNGLUNIFORM3IVPROC glad_glUniform3iv = NULL;
-PFNGLUNIFORM3UIPROC glad_glUniform3ui = NULL;
-PFNGLUNIFORM3UIVPROC glad_glUniform3uiv = NULL;
-PFNGLUNIFORM4FPROC glad_glUniform4f = NULL;
-PFNGLUNIFORM4FVPROC glad_glUniform4fv = NULL;
-PFNGLUNIFORM4IPROC glad_glUniform4i = NULL;
-PFNGLUNIFORM4IVPROC glad_glUniform4iv = NULL;
-PFNGLUNIFORM4UIPROC glad_glUniform4ui = NULL;
-PFNGLUNIFORM4UIVPROC glad_glUniform4uiv = NULL;
-PFNGLUNIFORMBLOCKBINDINGPROC glad_glUniformBlockBinding = NULL;
-PFNGLUNIFORMMATRIX2FVPROC glad_glUniformMatrix2fv = NULL;
-PFNGLUNIFORMMATRIX2X3FVPROC glad_glUniformMatrix2x3fv = NULL;
-PFNGLUNIFORMMATRIX2X4FVPROC glad_glUniformMatrix2x4fv = NULL;
-PFNGLUNIFORMMATRIX3FVPROC glad_glUniformMatrix3fv = NULL;
-PFNGLUNIFORMMATRIX3X2FVPROC glad_glUniformMatrix3x2fv = NULL;
-PFNGLUNIFORMMATRIX3X4FVPROC glad_glUniformMatrix3x4fv = NULL;
-PFNGLUNIFORMMATRIX4FVPROC glad_glUniformMatrix4fv = NULL;
-PFNGLUNIFORMMATRIX4X2FVPROC glad_glUniformMatrix4x2fv = NULL;
-PFNGLUNIFORMMATRIX4X3FVPROC glad_glUniformMatrix4x3fv = NULL;
-PFNGLUNMAPBUFFERPROC glad_glUnmapBuffer = NULL;
-PFNGLUSEPROGRAMPROC glad_glUseProgram = NULL;
-PFNGLVALIDATEPROGRAMPROC glad_glValidateProgram = NULL;
-PFNGLVERTEX2DPROC glad_glVertex2d = NULL;
-PFNGLVERTEX2DVPROC glad_glVertex2dv = NULL;
-PFNGLVERTEX2FPROC glad_glVertex2f = NULL;
-PFNGLVERTEX2FVPROC glad_glVertex2fv = NULL;
-PFNGLVERTEX2IPROC glad_glVertex2i = NULL;
-PFNGLVERTEX2IVPROC glad_glVertex2iv = NULL;
-PFNGLVERTEX2SPROC glad_glVertex2s = NULL;
-PFNGLVERTEX2SVPROC glad_glVertex2sv = NULL;
-PFNGLVERTEX3DPROC glad_glVertex3d = NULL;
-PFNGLVERTEX3DVPROC glad_glVertex3dv = NULL;
-PFNGLVERTEX3FPROC glad_glVertex3f = NULL;
-PFNGLVERTEX3FVPROC glad_glVertex3fv = NULL;
-PFNGLVERTEX3IPROC glad_glVertex3i = NULL;
-PFNGLVERTEX3IVPROC glad_glVertex3iv = NULL;
-PFNGLVERTEX3SPROC glad_glVertex3s = NULL;
-PFNGLVERTEX3SVPROC glad_glVertex3sv = NULL;
-PFNGLVERTEX4DPROC glad_glVertex4d = NULL;
-PFNGLVERTEX4DVPROC glad_glVertex4dv = NULL;
-PFNGLVERTEX4FPROC glad_glVertex4f = NULL;
-PFNGLVERTEX4FVPROC glad_glVertex4fv = NULL;
-PFNGLVERTEX4IPROC glad_glVertex4i = NULL;
-PFNGLVERTEX4IVPROC glad_glVertex4iv = NULL;
-PFNGLVERTEX4SPROC glad_glVertex4s = NULL;
-PFNGLVERTEX4SVPROC glad_glVertex4sv = NULL;
-PFNGLVERTEXATTRIB1DPROC glad_glVertexAttrib1d = NULL;
-PFNGLVERTEXATTRIB1DVPROC glad_glVertexAttrib1dv = NULL;
-PFNGLVERTEXATTRIB1FPROC glad_glVertexAttrib1f = NULL;
-PFNGLVERTEXATTRIB1FVPROC glad_glVertexAttrib1fv = NULL;
-PFNGLVERTEXATTRIB1SPROC glad_glVertexAttrib1s = NULL;
-PFNGLVERTEXATTRIB1SVPROC glad_glVertexAttrib1sv = NULL;
-PFNGLVERTEXATTRIB2DPROC glad_glVertexAttrib2d = NULL;
-PFNGLVERTEXATTRIB2DVPROC glad_glVertexAttrib2dv = NULL;
-PFNGLVERTEXATTRIB2FPROC glad_glVertexAttrib2f = NULL;
-PFNGLVERTEXATTRIB2FVPROC glad_glVertexAttrib2fv = NULL;
-PFNGLVERTEXATTRIB2SPROC glad_glVertexAttrib2s = NULL;
-PFNGLVERTEXATTRIB2SVPROC glad_glVertexAttrib2sv = NULL;
-PFNGLVERTEXATTRIB3DPROC glad_glVertexAttrib3d = NULL;
-PFNGLVERTEXATTRIB3DVPROC glad_glVertexAttrib3dv = NULL;
-PFNGLVERTEXATTRIB3FPROC glad_glVertexAttrib3f = NULL;
-PFNGLVERTEXATTRIB3FVPROC glad_glVertexAttrib3fv = NULL;
-PFNGLVERTEXATTRIB3SPROC glad_glVertexAttrib3s = NULL;
-PFNGLVERTEXATTRIB3SVPROC glad_glVertexAttrib3sv = NULL;
-PFNGLVERTEXATTRIB4NBVPROC glad_glVertexAttrib4Nbv = NULL;
-PFNGLVERTEXATTRIB4NIVPROC glad_glVertexAttrib4Niv = NULL;
-PFNGLVERTEXATTRIB4NSVPROC glad_glVertexAttrib4Nsv = NULL;
-PFNGLVERTEXATTRIB4NUBPROC glad_glVertexAttrib4Nub = NULL;
-PFNGLVERTEXATTRIB4NUBVPROC glad_glVertexAttrib4Nubv = NULL;
-PFNGLVERTEXATTRIB4NUIVPROC glad_glVertexAttrib4Nuiv = NULL;
-PFNGLVERTEXATTRIB4NUSVPROC glad_glVertexAttrib4Nusv = NULL;
-PFNGLVERTEXATTRIB4BVPROC glad_glVertexAttrib4bv = NULL;
-PFNGLVERTEXATTRIB4DPROC glad_glVertexAttrib4d = NULL;
-PFNGLVERTEXATTRIB4DVPROC glad_glVertexAttrib4dv = NULL;
-PFNGLVERTEXATTRIB4FPROC glad_glVertexAttrib4f = NULL;
-PFNGLVERTEXATTRIB4FVPROC glad_glVertexAttrib4fv = NULL;
-PFNGLVERTEXATTRIB4IVPROC glad_glVertexAttrib4iv = NULL;
-PFNGLVERTEXATTRIB4SPROC glad_glVertexAttrib4s = NULL;
-PFNGLVERTEXATTRIB4SVPROC glad_glVertexAttrib4sv = NULL;
-PFNGLVERTEXATTRIB4UBVPROC glad_glVertexAttrib4ubv = NULL;
-PFNGLVERTEXATTRIB4UIVPROC glad_glVertexAttrib4uiv = NULL;
-PFNGLVERTEXATTRIB4USVPROC glad_glVertexAttrib4usv = NULL;
-PFNGLVERTEXATTRIBDIVISORPROC glad_glVertexAttribDivisor = NULL;
-PFNGLVERTEXATTRIBI1IPROC glad_glVertexAttribI1i = NULL;
-PFNGLVERTEXATTRIBI1IVPROC glad_glVertexAttribI1iv = NULL;
-PFNGLVERTEXATTRIBI1UIPROC glad_glVertexAttribI1ui = NULL;
-PFNGLVERTEXATTRIBI1UIVPROC glad_glVertexAttribI1uiv = NULL;
-PFNGLVERTEXATTRIBI2IPROC glad_glVertexAttribI2i = NULL;
-PFNGLVERTEXATTRIBI2IVPROC glad_glVertexAttribI2iv = NULL;
-PFNGLVERTEXATTRIBI2UIPROC glad_glVertexAttribI2ui = NULL;
-PFNGLVERTEXATTRIBI2UIVPROC glad_glVertexAttribI2uiv = NULL;
-PFNGLVERTEXATTRIBI3IPROC glad_glVertexAttribI3i = NULL;
-PFNGLVERTEXATTRIBI3IVPROC glad_glVertexAttribI3iv = NULL;
-PFNGLVERTEXATTRIBI3UIPROC glad_glVertexAttribI3ui = NULL;
-PFNGLVERTEXATTRIBI3UIVPROC glad_glVertexAttribI3uiv = NULL;
-PFNGLVERTEXATTRIBI4BVPROC glad_glVertexAttribI4bv = NULL;
-PFNGLVERTEXATTRIBI4IPROC glad_glVertexAttribI4i = NULL;
-PFNGLVERTEXATTRIBI4IVPROC glad_glVertexAttribI4iv = NULL;
-PFNGLVERTEXATTRIBI4SVPROC glad_glVertexAttribI4sv = NULL;
-PFNGLVERTEXATTRIBI4UBVPROC glad_glVertexAttribI4ubv = NULL;
-PFNGLVERTEXATTRIBI4UIPROC glad_glVertexAttribI4ui = NULL;
-PFNGLVERTEXATTRIBI4UIVPROC glad_glVertexAttribI4uiv = NULL;
-PFNGLVERTEXATTRIBI4USVPROC glad_glVertexAttribI4usv = NULL;
-PFNGLVERTEXATTRIBIPOINTERPROC glad_glVertexAttribIPointer = NULL;
-PFNGLVERTEXATTRIBP1UIPROC glad_glVertexAttribP1ui = NULL;
-PFNGLVERTEXATTRIBP1UIVPROC glad_glVertexAttribP1uiv = NULL;
-PFNGLVERTEXATTRIBP2UIPROC glad_glVertexAttribP2ui = NULL;
-PFNGLVERTEXATTRIBP2UIVPROC glad_glVertexAttribP2uiv = NULL;
-PFNGLVERTEXATTRIBP3UIPROC glad_glVertexAttribP3ui = NULL;
-PFNGLVERTEXATTRIBP3UIVPROC glad_glVertexAttribP3uiv = NULL;
-PFNGLVERTEXATTRIBP4UIPROC glad_glVertexAttribP4ui = NULL;
-PFNGLVERTEXATTRIBP4UIVPROC glad_glVertexAttribP4uiv = NULL;
-PFNGLVERTEXATTRIBPOINTERPROC glad_glVertexAttribPointer = NULL;
-PFNGLVERTEXP2UIPROC glad_glVertexP2ui = NULL;
-PFNGLVERTEXP2UIVPROC glad_glVertexP2uiv = NULL;
-PFNGLVERTEXP3UIPROC glad_glVertexP3ui = NULL;
-PFNGLVERTEXP3UIVPROC glad_glVertexP3uiv = NULL;
-PFNGLVERTEXP4UIPROC glad_glVertexP4ui = NULL;
-PFNGLVERTEXP4UIVPROC glad_glVertexP4uiv = NULL;
-PFNGLVERTEXPOINTERPROC glad_glVertexPointer = NULL;
-PFNGLVIEWPORTPROC glad_glViewport = NULL;
-PFNGLWAITSYNCPROC glad_glWaitSync = NULL;
-PFNGLWINDOWPOS2DPROC glad_glWindowPos2d = NULL;
-PFNGLWINDOWPOS2DVPROC glad_glWindowPos2dv = NULL;
-PFNGLWINDOWPOS2FPROC glad_glWindowPos2f = NULL;
-PFNGLWINDOWPOS2FVPROC glad_glWindowPos2fv = NULL;
-PFNGLWINDOWPOS2IPROC glad_glWindowPos2i = NULL;
-PFNGLWINDOWPOS2IVPROC glad_glWindowPos2iv = NULL;
-PFNGLWINDOWPOS2SPROC glad_glWindowPos2s = NULL;
-PFNGLWINDOWPOS2SVPROC glad_glWindowPos2sv = NULL;
-PFNGLWINDOWPOS3DPROC glad_glWindowPos3d = NULL;
-PFNGLWINDOWPOS3DVPROC glad_glWindowPos3dv = NULL;
-PFNGLWINDOWPOS3FPROC glad_glWindowPos3f = NULL;
-PFNGLWINDOWPOS3FVPROC glad_glWindowPos3fv = NULL;
-PFNGLWINDOWPOS3IPROC glad_glWindowPos3i = NULL;
-PFNGLWINDOWPOS3IVPROC glad_glWindowPos3iv = NULL;
-PFNGLWINDOWPOS3SPROC glad_glWindowPos3s = NULL;
-PFNGLWINDOWPOS3SVPROC glad_glWindowPos3sv = NULL;
-int GLAD_GL_ARB_debug_output = 0;
-int GLAD_GL_ARB_framebuffer_object = 0;
-int GLAD_GL_EXT_framebuffer_blit = 0;
-int GLAD_GL_EXT_framebuffer_multisample = 0;
-int GLAD_GL_EXT_framebuffer_object = 0;
-PFNGLDEBUGMESSAGECONTROLARBPROC glad_glDebugMessageControlARB = NULL;
-PFNGLDEBUGMESSAGEINSERTARBPROC glad_glDebugMessageInsertARB = NULL;
-PFNGLDEBUGMESSAGECALLBACKARBPROC glad_glDebugMessageCallbackARB = NULL;
-PFNGLGETDEBUGMESSAGELOGARBPROC glad_glGetDebugMessageLogARB = NULL;
-PFNGLBLITFRAMEBUFFEREXTPROC glad_glBlitFramebufferEXT = NULL;
-PFNGLRENDERBUFFERSTORAGEMULTISAMPLEEXTPROC glad_glRenderbufferStorageMultisampleEXT = NULL;
-PFNGLISRENDERBUFFEREXTPROC glad_glIsRenderbufferEXT = NULL;
-PFNGLBINDRENDERBUFFEREXTPROC glad_glBindRenderbufferEXT = NULL;
-PFNGLDELETERENDERBUFFERSEXTPROC glad_glDeleteRenderbuffersEXT = NULL;
-PFNGLGENRENDERBUFFERSEXTPROC glad_glGenRenderbuffersEXT = NULL;
-PFNGLRENDERBUFFERSTORAGEEXTPROC glad_glRenderbufferStorageEXT = NULL;
-PFNGLGETRENDERBUFFERPARAMETERIVEXTPROC glad_glGetRenderbufferParameterivEXT = NULL;
-PFNGLISFRAMEBUFFEREXTPROC glad_glIsFramebufferEXT = NULL;
-PFNGLBINDFRAMEBUFFEREXTPROC glad_glBindFramebufferEXT = NULL;
-PFNGLDELETEFRAMEBUFFERSEXTPROC glad_glDeleteFramebuffersEXT = NULL;
-PFNGLGENFRAMEBUFFERSEXTPROC glad_glGenFramebuffersEXT = NULL;
-PFNGLCHECKFRAMEBUFFERSTATUSEXTPROC glad_glCheckFramebufferStatusEXT = NULL;
-PFNGLFRAMEBUFFERTEXTURE1DEXTPROC glad_glFramebufferTexture1DEXT = NULL;
-PFNGLFRAMEBUFFERTEXTURE2DEXTPROC glad_glFramebufferTexture2DEXT = NULL;
-PFNGLFRAMEBUFFERTEXTURE3DEXTPROC glad_glFramebufferTexture3DEXT = NULL;
-PFNGLFRAMEBUFFERRENDERBUFFEREXTPROC glad_glFramebufferRenderbufferEXT = NULL;
-PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVEXTPROC glad_glGetFramebufferAttachmentParameterivEXT = NULL;
-PFNGLGENERATEMIPMAPEXTPROC glad_glGenerateMipmapEXT = NULL;
-static void load_GL_VERSION_1_0(GLADloadproc load) {
- if(!GLAD_GL_VERSION_1_0) return;
- glad_glCullFace = (PFNGLCULLFACEPROC)load("glCullFace");
- glad_glFrontFace = (PFNGLFRONTFACEPROC)load("glFrontFace");
- glad_glHint = (PFNGLHINTPROC)load("glHint");
- glad_glLineWidth = (PFNGLLINEWIDTHPROC)load("glLineWidth");
- glad_glPointSize = (PFNGLPOINTSIZEPROC)load("glPointSize");
- glad_glPolygonMode = (PFNGLPOLYGONMODEPROC)load("glPolygonMode");
- glad_glScissor = (PFNGLSCISSORPROC)load("glScissor");
- glad_glTexParameterf = (PFNGLTEXPARAMETERFPROC)load("glTexParameterf");
- glad_glTexParameterfv = (PFNGLTEXPARAMETERFVPROC)load("glTexParameterfv");
- glad_glTexParameteri = (PFNGLTEXPARAMETERIPROC)load("glTexParameteri");
- glad_glTexParameteriv = (PFNGLTEXPARAMETERIVPROC)load("glTexParameteriv");
- glad_glTexImage1D = (PFNGLTEXIMAGE1DPROC)load("glTexImage1D");
- glad_glTexImage2D = (PFNGLTEXIMAGE2DPROC)load("glTexImage2D");
- glad_glDrawBuffer = (PFNGLDRAWBUFFERPROC)load("glDrawBuffer");
- glad_glClear = (PFNGLCLEARPROC)load("glClear");
- glad_glClearColor = (PFNGLCLEARCOLORPROC)load("glClearColor");
- glad_glClearStencil = (PFNGLCLEARSTENCILPROC)load("glClearStencil");
- glad_glClearDepth = (PFNGLCLEARDEPTHPROC)load("glClearDepth");
- glad_glStencilMask = (PFNGLSTENCILMASKPROC)load("glStencilMask");
- glad_glColorMask = (PFNGLCOLORMASKPROC)load("glColorMask");
- glad_glDepthMask = (PFNGLDEPTHMASKPROC)load("glDepthMask");
- glad_glDisable = (PFNGLDISABLEPROC)load("glDisable");
- glad_glEnable = (PFNGLENABLEPROC)load("glEnable");
- glad_glFinish = (PFNGLFINISHPROC)load("glFinish");
- glad_glFlush = (PFNGLFLUSHPROC)load("glFlush");
- glad_glBlendFunc = (PFNGLBLENDFUNCPROC)load("glBlendFunc");
- glad_glLogicOp = (PFNGLLOGICOPPROC)load("glLogicOp");
- glad_glStencilFunc = (PFNGLSTENCILFUNCPROC)load("glStencilFunc");
- glad_glStencilOp = (PFNGLSTENCILOPPROC)load("glStencilOp");
- glad_glDepthFunc = (PFNGLDEPTHFUNCPROC)load("glDepthFunc");
- glad_glPixelStoref = (PFNGLPIXELSTOREFPROC)load("glPixelStoref");
- glad_glPixelStorei = (PFNGLPIXELSTOREIPROC)load("glPixelStorei");
- glad_glReadBuffer = (PFNGLREADBUFFERPROC)load("glReadBuffer");
- glad_glReadPixels = (PFNGLREADPIXELSPROC)load("glReadPixels");
- glad_glGetBooleanv = (PFNGLGETBOOLEANVPROC)load("glGetBooleanv");
- glad_glGetDoublev = (PFNGLGETDOUBLEVPROC)load("glGetDoublev");
- glad_glGetError = (PFNGLGETERRORPROC)load("glGetError");
- glad_glGetFloatv = (PFNGLGETFLOATVPROC)load("glGetFloatv");
- glad_glGetIntegerv = (PFNGLGETINTEGERVPROC)load("glGetIntegerv");
- glad_glGetString = (PFNGLGETSTRINGPROC)load("glGetString");
- glad_glGetTexImage = (PFNGLGETTEXIMAGEPROC)load("glGetTexImage");
- glad_glGetTexParameterfv = (PFNGLGETTEXPARAMETERFVPROC)load("glGetTexParameterfv");
- glad_glGetTexParameteriv = (PFNGLGETTEXPARAMETERIVPROC)load("glGetTexParameteriv");
- glad_glGetTexLevelParameterfv = (PFNGLGETTEXLEVELPARAMETERFVPROC)load("glGetTexLevelParameterfv");
- glad_glGetTexLevelParameteriv = (PFNGLGETTEXLEVELPARAMETERIVPROC)load("glGetTexLevelParameteriv");
- glad_glIsEnabled = (PFNGLISENABLEDPROC)load("glIsEnabled");
- glad_glDepthRange = (PFNGLDEPTHRANGEPROC)load("glDepthRange");
- glad_glViewport = (PFNGLVIEWPORTPROC)load("glViewport");
- glad_glNewList = (PFNGLNEWLISTPROC)load("glNewList");
- glad_glEndList = (PFNGLENDLISTPROC)load("glEndList");
- glad_glCallList = (PFNGLCALLLISTPROC)load("glCallList");
- glad_glCallLists = (PFNGLCALLLISTSPROC)load("glCallLists");
- glad_glDeleteLists = (PFNGLDELETELISTSPROC)load("glDeleteLists");
- glad_glGenLists = (PFNGLGENLISTSPROC)load("glGenLists");
- glad_glListBase = (PFNGLLISTBASEPROC)load("glListBase");
- glad_glBegin = (PFNGLBEGINPROC)load("glBegin");
- glad_glBitmap = (PFNGLBITMAPPROC)load("glBitmap");
- glad_glColor3b = (PFNGLCOLOR3BPROC)load("glColor3b");
- glad_glColor3bv = (PFNGLCOLOR3BVPROC)load("glColor3bv");
- glad_glColor3d = (PFNGLCOLOR3DPROC)load("glColor3d");
- glad_glColor3dv = (PFNGLCOLOR3DVPROC)load("glColor3dv");
- glad_glColor3f = (PFNGLCOLOR3FPROC)load("glColor3f");
- glad_glColor3fv = (PFNGLCOLOR3FVPROC)load("glColor3fv");
- glad_glColor3i = (PFNGLCOLOR3IPROC)load("glColor3i");
- glad_glColor3iv = (PFNGLCOLOR3IVPROC)load("glColor3iv");
- glad_glColor3s = (PFNGLCOLOR3SPROC)load("glColor3s");
- glad_glColor3sv = (PFNGLCOLOR3SVPROC)load("glColor3sv");
- glad_glColor3ub = (PFNGLCOLOR3UBPROC)load("glColor3ub");
- glad_glColor3ubv = (PFNGLCOLOR3UBVPROC)load("glColor3ubv");
- glad_glColor3ui = (PFNGLCOLOR3UIPROC)load("glColor3ui");
- glad_glColor3uiv = (PFNGLCOLOR3UIVPROC)load("glColor3uiv");
- glad_glColor3us = (PFNGLCOLOR3USPROC)load("glColor3us");
- glad_glColor3usv = (PFNGLCOLOR3USVPROC)load("glColor3usv");
- glad_glColor4b = (PFNGLCOLOR4BPROC)load("glColor4b");
- glad_glColor4bv = (PFNGLCOLOR4BVPROC)load("glColor4bv");
- glad_glColor4d = (PFNGLCOLOR4DPROC)load("glColor4d");
- glad_glColor4dv = (PFNGLCOLOR4DVPROC)load("glColor4dv");
- glad_glColor4f = (PFNGLCOLOR4FPROC)load("glColor4f");
- glad_glColor4fv = (PFNGLCOLOR4FVPROC)load("glColor4fv");
- glad_glColor4i = (PFNGLCOLOR4IPROC)load("glColor4i");
- glad_glColor4iv = (PFNGLCOLOR4IVPROC)load("glColor4iv");
- glad_glColor4s = (PFNGLCOLOR4SPROC)load("glColor4s");
- glad_glColor4sv = (PFNGLCOLOR4SVPROC)load("glColor4sv");
- glad_glColor4ub = (PFNGLCOLOR4UBPROC)load("glColor4ub");
- glad_glColor4ubv = (PFNGLCOLOR4UBVPROC)load("glColor4ubv");
- glad_glColor4ui = (PFNGLCOLOR4UIPROC)load("glColor4ui");
- glad_glColor4uiv = (PFNGLCOLOR4UIVPROC)load("glColor4uiv");
- glad_glColor4us = (PFNGLCOLOR4USPROC)load("glColor4us");
- glad_glColor4usv = (PFNGLCOLOR4USVPROC)load("glColor4usv");
- glad_glEdgeFlag = (PFNGLEDGEFLAGPROC)load("glEdgeFlag");
- glad_glEdgeFlagv = (PFNGLEDGEFLAGVPROC)load("glEdgeFlagv");
- glad_glEnd = (PFNGLENDPROC)load("glEnd");
- glad_glIndexd = (PFNGLINDEXDPROC)load("glIndexd");
- glad_glIndexdv = (PFNGLINDEXDVPROC)load("glIndexdv");
- glad_glIndexf = (PFNGLINDEXFPROC)load("glIndexf");
- glad_glIndexfv = (PFNGLINDEXFVPROC)load("glIndexfv");
- glad_glIndexi = (PFNGLINDEXIPROC)load("glIndexi");
- glad_glIndexiv = (PFNGLINDEXIVPROC)load("glIndexiv");
- glad_glIndexs = (PFNGLINDEXSPROC)load("glIndexs");
- glad_glIndexsv = (PFNGLINDEXSVPROC)load("glIndexsv");
- glad_glNormal3b = (PFNGLNORMAL3BPROC)load("glNormal3b");
- glad_glNormal3bv = (PFNGLNORMAL3BVPROC)load("glNormal3bv");
- glad_glNormal3d = (PFNGLNORMAL3DPROC)load("glNormal3d");
- glad_glNormal3dv = (PFNGLNORMAL3DVPROC)load("glNormal3dv");
- glad_glNormal3f = (PFNGLNORMAL3FPROC)load("glNormal3f");
- glad_glNormal3fv = (PFNGLNORMAL3FVPROC)load("glNormal3fv");
- glad_glNormal3i = (PFNGLNORMAL3IPROC)load("glNormal3i");
- glad_glNormal3iv = (PFNGLNORMAL3IVPROC)load("glNormal3iv");
- glad_glNormal3s = (PFNGLNORMAL3SPROC)load("glNormal3s");
- glad_glNormal3sv = (PFNGLNORMAL3SVPROC)load("glNormal3sv");
- glad_glRasterPos2d = (PFNGLRASTERPOS2DPROC)load("glRasterPos2d");
- glad_glRasterPos2dv = (PFNGLRASTERPOS2DVPROC)load("glRasterPos2dv");
- glad_glRasterPos2f = (PFNGLRASTERPOS2FPROC)load("glRasterPos2f");
- glad_glRasterPos2fv = (PFNGLRASTERPOS2FVPROC)load("glRasterPos2fv");
- glad_glRasterPos2i = (PFNGLRASTERPOS2IPROC)load("glRasterPos2i");
- glad_glRasterPos2iv = (PFNGLRASTERPOS2IVPROC)load("glRasterPos2iv");
- glad_glRasterPos2s = (PFNGLRASTERPOS2SPROC)load("glRasterPos2s");
- glad_glRasterPos2sv = (PFNGLRASTERPOS2SVPROC)load("glRasterPos2sv");
- glad_glRasterPos3d = (PFNGLRASTERPOS3DPROC)load("glRasterPos3d");
- glad_glRasterPos3dv = (PFNGLRASTERPOS3DVPROC)load("glRasterPos3dv");
- glad_glRasterPos3f = (PFNGLRASTERPOS3FPROC)load("glRasterPos3f");
- glad_glRasterPos3fv = (PFNGLRASTERPOS3FVPROC)load("glRasterPos3fv");
- glad_glRasterPos3i = (PFNGLRASTERPOS3IPROC)load("glRasterPos3i");
- glad_glRasterPos3iv = (PFNGLRASTERPOS3IVPROC)load("glRasterPos3iv");
- glad_glRasterPos3s = (PFNGLRASTERPOS3SPROC)load("glRasterPos3s");
- glad_glRasterPos3sv = (PFNGLRASTERPOS3SVPROC)load("glRasterPos3sv");
- glad_glRasterPos4d = (PFNGLRASTERPOS4DPROC)load("glRasterPos4d");
- glad_glRasterPos4dv = (PFNGLRASTERPOS4DVPROC)load("glRasterPos4dv");
- glad_glRasterPos4f = (PFNGLRASTERPOS4FPROC)load("glRasterPos4f");
- glad_glRasterPos4fv = (PFNGLRASTERPOS4FVPROC)load("glRasterPos4fv");
- glad_glRasterPos4i = (PFNGLRASTERPOS4IPROC)load("glRasterPos4i");
- glad_glRasterPos4iv = (PFNGLRASTERPOS4IVPROC)load("glRasterPos4iv");
- glad_glRasterPos4s = (PFNGLRASTERPOS4SPROC)load("glRasterPos4s");
- glad_glRasterPos4sv = (PFNGLRASTERPOS4SVPROC)load("glRasterPos4sv");
- glad_glRectd = (PFNGLRECTDPROC)load("glRectd");
- glad_glRectdv = (PFNGLRECTDVPROC)load("glRectdv");
- glad_glRectf = (PFNGLRECTFPROC)load("glRectf");
- glad_glRectfv = (PFNGLRECTFVPROC)load("glRectfv");
- glad_glRecti = (PFNGLRECTIPROC)load("glRecti");
- glad_glRectiv = (PFNGLRECTIVPROC)load("glRectiv");
- glad_glRects = (PFNGLRECTSPROC)load("glRects");
- glad_glRectsv = (PFNGLRECTSVPROC)load("glRectsv");
- glad_glTexCoord1d = (PFNGLTEXCOORD1DPROC)load("glTexCoord1d");
- glad_glTexCoord1dv = (PFNGLTEXCOORD1DVPROC)load("glTexCoord1dv");
- glad_glTexCoord1f = (PFNGLTEXCOORD1FPROC)load("glTexCoord1f");
- glad_glTexCoord1fv = (PFNGLTEXCOORD1FVPROC)load("glTexCoord1fv");
- glad_glTexCoord1i = (PFNGLTEXCOORD1IPROC)load("glTexCoord1i");
- glad_glTexCoord1iv = (PFNGLTEXCOORD1IVPROC)load("glTexCoord1iv");
- glad_glTexCoord1s = (PFNGLTEXCOORD1SPROC)load("glTexCoord1s");
- glad_glTexCoord1sv = (PFNGLTEXCOORD1SVPROC)load("glTexCoord1sv");
- glad_glTexCoord2d = (PFNGLTEXCOORD2DPROC)load("glTexCoord2d");
- glad_glTexCoord2dv = (PFNGLTEXCOORD2DVPROC)load("glTexCoord2dv");
- glad_glTexCoord2f = (PFNGLTEXCOORD2FPROC)load("glTexCoord2f");
- glad_glTexCoord2fv = (PFNGLTEXCOORD2FVPROC)load("glTexCoord2fv");
- glad_glTexCoord2i = (PFNGLTEXCOORD2IPROC)load("glTexCoord2i");
- glad_glTexCoord2iv = (PFNGLTEXCOORD2IVPROC)load("glTexCoord2iv");
- glad_glTexCoord2s = (PFNGLTEXCOORD2SPROC)load("glTexCoord2s");
- glad_glTexCoord2sv = (PFNGLTEXCOORD2SVPROC)load("glTexCoord2sv");
- glad_glTexCoord3d = (PFNGLTEXCOORD3DPROC)load("glTexCoord3d");
- glad_glTexCoord3dv = (PFNGLTEXCOORD3DVPROC)load("glTexCoord3dv");
- glad_glTexCoord3f = (PFNGLTEXCOORD3FPROC)load("glTexCoord3f");
- glad_glTexCoord3fv = (PFNGLTEXCOORD3FVPROC)load("glTexCoord3fv");
- glad_glTexCoord3i = (PFNGLTEXCOORD3IPROC)load("glTexCoord3i");
- glad_glTexCoord3iv = (PFNGLTEXCOORD3IVPROC)load("glTexCoord3iv");
- glad_glTexCoord3s = (PFNGLTEXCOORD3SPROC)load("glTexCoord3s");
- glad_glTexCoord3sv = (PFNGLTEXCOORD3SVPROC)load("glTexCoord3sv");
- glad_glTexCoord4d = (PFNGLTEXCOORD4DPROC)load("glTexCoord4d");
- glad_glTexCoord4dv = (PFNGLTEXCOORD4DVPROC)load("glTexCoord4dv");
- glad_glTexCoord4f = (PFNGLTEXCOORD4FPROC)load("glTexCoord4f");
- glad_glTexCoord4fv = (PFNGLTEXCOORD4FVPROC)load("glTexCoord4fv");
- glad_glTexCoord4i = (PFNGLTEXCOORD4IPROC)load("glTexCoord4i");
- glad_glTexCoord4iv = (PFNGLTEXCOORD4IVPROC)load("glTexCoord4iv");
- glad_glTexCoord4s = (PFNGLTEXCOORD4SPROC)load("glTexCoord4s");
- glad_glTexCoord4sv = (PFNGLTEXCOORD4SVPROC)load("glTexCoord4sv");
- glad_glVertex2d = (PFNGLVERTEX2DPROC)load("glVertex2d");
- glad_glVertex2dv = (PFNGLVERTEX2DVPROC)load("glVertex2dv");
- glad_glVertex2f = (PFNGLVERTEX2FPROC)load("glVertex2f");
- glad_glVertex2fv = (PFNGLVERTEX2FVPROC)load("glVertex2fv");
- glad_glVertex2i = (PFNGLVERTEX2IPROC)load("glVertex2i");
- glad_glVertex2iv = (PFNGLVERTEX2IVPROC)load("glVertex2iv");
- glad_glVertex2s = (PFNGLVERTEX2SPROC)load("glVertex2s");
- glad_glVertex2sv = (PFNGLVERTEX2SVPROC)load("glVertex2sv");
- glad_glVertex3d = (PFNGLVERTEX3DPROC)load("glVertex3d");
- glad_glVertex3dv = (PFNGLVERTEX3DVPROC)load("glVertex3dv");
- glad_glVertex3f = (PFNGLVERTEX3FPROC)load("glVertex3f");
- glad_glVertex3fv = (PFNGLVERTEX3FVPROC)load("glVertex3fv");
- glad_glVertex3i = (PFNGLVERTEX3IPROC)load("glVertex3i");
- glad_glVertex3iv = (PFNGLVERTEX3IVPROC)load("glVertex3iv");
- glad_glVertex3s = (PFNGLVERTEX3SPROC)load("glVertex3s");
- glad_glVertex3sv = (PFNGLVERTEX3SVPROC)load("glVertex3sv");
- glad_glVertex4d = (PFNGLVERTEX4DPROC)load("glVertex4d");
- glad_glVertex4dv = (PFNGLVERTEX4DVPROC)load("glVertex4dv");
- glad_glVertex4f = (PFNGLVERTEX4FPROC)load("glVertex4f");
- glad_glVertex4fv = (PFNGLVERTEX4FVPROC)load("glVertex4fv");
- glad_glVertex4i = (PFNGLVERTEX4IPROC)load("glVertex4i");
- glad_glVertex4iv = (PFNGLVERTEX4IVPROC)load("glVertex4iv");
- glad_glVertex4s = (PFNGLVERTEX4SPROC)load("glVertex4s");
- glad_glVertex4sv = (PFNGLVERTEX4SVPROC)load("glVertex4sv");
- glad_glClipPlane = (PFNGLCLIPPLANEPROC)load("glClipPlane");
- glad_glColorMaterial = (PFNGLCOLORMATERIALPROC)load("glColorMaterial");
- glad_glFogf = (PFNGLFOGFPROC)load("glFogf");
- glad_glFogfv = (PFNGLFOGFVPROC)load("glFogfv");
- glad_glFogi = (PFNGLFOGIPROC)load("glFogi");
- glad_glFogiv = (PFNGLFOGIVPROC)load("glFogiv");
- glad_glLightf = (PFNGLLIGHTFPROC)load("glLightf");
- glad_glLightfv = (PFNGLLIGHTFVPROC)load("glLightfv");
- glad_glLighti = (PFNGLLIGHTIPROC)load("glLighti");
- glad_glLightiv = (PFNGLLIGHTIVPROC)load("glLightiv");
- glad_glLightModelf = (PFNGLLIGHTMODELFPROC)load("glLightModelf");
- glad_glLightModelfv = (PFNGLLIGHTMODELFVPROC)load("glLightModelfv");
- glad_glLightModeli = (PFNGLLIGHTMODELIPROC)load("glLightModeli");
- glad_glLightModeliv = (PFNGLLIGHTMODELIVPROC)load("glLightModeliv");
- glad_glLineStipple = (PFNGLLINESTIPPLEPROC)load("glLineStipple");
- glad_glMaterialf = (PFNGLMATERIALFPROC)load("glMaterialf");
- glad_glMaterialfv = (PFNGLMATERIALFVPROC)load("glMaterialfv");
- glad_glMateriali = (PFNGLMATERIALIPROC)load("glMateriali");
- glad_glMaterialiv = (PFNGLMATERIALIVPROC)load("glMaterialiv");
- glad_glPolygonStipple = (PFNGLPOLYGONSTIPPLEPROC)load("glPolygonStipple");
- glad_glShadeModel = (PFNGLSHADEMODELPROC)load("glShadeModel");
- glad_glTexEnvf = (PFNGLTEXENVFPROC)load("glTexEnvf");
- glad_glTexEnvfv = (PFNGLTEXENVFVPROC)load("glTexEnvfv");
- glad_glTexEnvi = (PFNGLTEXENVIPROC)load("glTexEnvi");
- glad_glTexEnviv = (PFNGLTEXENVIVPROC)load("glTexEnviv");
- glad_glTexGend = (PFNGLTEXGENDPROC)load("glTexGend");
- glad_glTexGendv = (PFNGLTEXGENDVPROC)load("glTexGendv");
- glad_glTexGenf = (PFNGLTEXGENFPROC)load("glTexGenf");
- glad_glTexGenfv = (PFNGLTEXGENFVPROC)load("glTexGenfv");
- glad_glTexGeni = (PFNGLTEXGENIPROC)load("glTexGeni");
- glad_glTexGeniv = (PFNGLTEXGENIVPROC)load("glTexGeniv");
- glad_glFeedbackBuffer = (PFNGLFEEDBACKBUFFERPROC)load("glFeedbackBuffer");
- glad_glSelectBuffer = (PFNGLSELECTBUFFERPROC)load("glSelectBuffer");
- glad_glRenderMode = (PFNGLRENDERMODEPROC)load("glRenderMode");
- glad_glInitNames = (PFNGLINITNAMESPROC)load("glInitNames");
- glad_glLoadName = (PFNGLLOADNAMEPROC)load("glLoadName");
- glad_glPassThrough = (PFNGLPASSTHROUGHPROC)load("glPassThrough");
- glad_glPopName = (PFNGLPOPNAMEPROC)load("glPopName");
- glad_glPushName = (PFNGLPUSHNAMEPROC)load("glPushName");
- glad_glClearAccum = (PFNGLCLEARACCUMPROC)load("glClearAccum");
- glad_glClearIndex = (PFNGLCLEARINDEXPROC)load("glClearIndex");
- glad_glIndexMask = (PFNGLINDEXMASKPROC)load("glIndexMask");
- glad_glAccum = (PFNGLACCUMPROC)load("glAccum");
- glad_glPopAttrib = (PFNGLPOPATTRIBPROC)load("glPopAttrib");
- glad_glPushAttrib = (PFNGLPUSHATTRIBPROC)load("glPushAttrib");
- glad_glMap1d = (PFNGLMAP1DPROC)load("glMap1d");
- glad_glMap1f = (PFNGLMAP1FPROC)load("glMap1f");
- glad_glMap2d = (PFNGLMAP2DPROC)load("glMap2d");
- glad_glMap2f = (PFNGLMAP2FPROC)load("glMap2f");
- glad_glMapGrid1d = (PFNGLMAPGRID1DPROC)load("glMapGrid1d");
- glad_glMapGrid1f = (PFNGLMAPGRID1FPROC)load("glMapGrid1f");
- glad_glMapGrid2d = (PFNGLMAPGRID2DPROC)load("glMapGrid2d");
- glad_glMapGrid2f = (PFNGLMAPGRID2FPROC)load("glMapGrid2f");
- glad_glEvalCoord1d = (PFNGLEVALCOORD1DPROC)load("glEvalCoord1d");
- glad_glEvalCoord1dv = (PFNGLEVALCOORD1DVPROC)load("glEvalCoord1dv");
- glad_glEvalCoord1f = (PFNGLEVALCOORD1FPROC)load("glEvalCoord1f");
- glad_glEvalCoord1fv = (PFNGLEVALCOORD1FVPROC)load("glEvalCoord1fv");
- glad_glEvalCoord2d = (PFNGLEVALCOORD2DPROC)load("glEvalCoord2d");
- glad_glEvalCoord2dv = (PFNGLEVALCOORD2DVPROC)load("glEvalCoord2dv");
- glad_glEvalCoord2f = (PFNGLEVALCOORD2FPROC)load("glEvalCoord2f");
- glad_glEvalCoord2fv = (PFNGLEVALCOORD2FVPROC)load("glEvalCoord2fv");
- glad_glEvalMesh1 = (PFNGLEVALMESH1PROC)load("glEvalMesh1");
- glad_glEvalPoint1 = (PFNGLEVALPOINT1PROC)load("glEvalPoint1");
- glad_glEvalMesh2 = (PFNGLEVALMESH2PROC)load("glEvalMesh2");
- glad_glEvalPoint2 = (PFNGLEVALPOINT2PROC)load("glEvalPoint2");
- glad_glAlphaFunc = (PFNGLALPHAFUNCPROC)load("glAlphaFunc");
- glad_glPixelZoom = (PFNGLPIXELZOOMPROC)load("glPixelZoom");
- glad_glPixelTransferf = (PFNGLPIXELTRANSFERFPROC)load("glPixelTransferf");
- glad_glPixelTransferi = (PFNGLPIXELTRANSFERIPROC)load("glPixelTransferi");
- glad_glPixelMapfv = (PFNGLPIXELMAPFVPROC)load("glPixelMapfv");
- glad_glPixelMapuiv = (PFNGLPIXELMAPUIVPROC)load("glPixelMapuiv");
- glad_glPixelMapusv = (PFNGLPIXELMAPUSVPROC)load("glPixelMapusv");
- glad_glCopyPixels = (PFNGLCOPYPIXELSPROC)load("glCopyPixels");
- glad_glDrawPixels = (PFNGLDRAWPIXELSPROC)load("glDrawPixels");
- glad_glGetClipPlane = (PFNGLGETCLIPPLANEPROC)load("glGetClipPlane");
- glad_glGetLightfv = (PFNGLGETLIGHTFVPROC)load("glGetLightfv");
- glad_glGetLightiv = (PFNGLGETLIGHTIVPROC)load("glGetLightiv");
- glad_glGetMapdv = (PFNGLGETMAPDVPROC)load("glGetMapdv");
- glad_glGetMapfv = (PFNGLGETMAPFVPROC)load("glGetMapfv");
- glad_glGetMapiv = (PFNGLGETMAPIVPROC)load("glGetMapiv");
- glad_glGetMaterialfv = (PFNGLGETMATERIALFVPROC)load("glGetMaterialfv");
- glad_glGetMaterialiv = (PFNGLGETMATERIALIVPROC)load("glGetMaterialiv");
- glad_glGetPixelMapfv = (PFNGLGETPIXELMAPFVPROC)load("glGetPixelMapfv");
- glad_glGetPixelMapuiv = (PFNGLGETPIXELMAPUIVPROC)load("glGetPixelMapuiv");
- glad_glGetPixelMapusv = (PFNGLGETPIXELMAPUSVPROC)load("glGetPixelMapusv");
- glad_glGetPolygonStipple = (PFNGLGETPOLYGONSTIPPLEPROC)load("glGetPolygonStipple");
- glad_glGetTexEnvfv = (PFNGLGETTEXENVFVPROC)load("glGetTexEnvfv");
- glad_glGetTexEnviv = (PFNGLGETTEXENVIVPROC)load("glGetTexEnviv");
- glad_glGetTexGendv = (PFNGLGETTEXGENDVPROC)load("glGetTexGendv");
- glad_glGetTexGenfv = (PFNGLGETTEXGENFVPROC)load("glGetTexGenfv");
- glad_glGetTexGeniv = (PFNGLGETTEXGENIVPROC)load("glGetTexGeniv");
- glad_glIsList = (PFNGLISLISTPROC)load("glIsList");
- glad_glFrustum = (PFNGLFRUSTUMPROC)load("glFrustum");
- glad_glLoadIdentity = (PFNGLLOADIDENTITYPROC)load("glLoadIdentity");
- glad_glLoadMatrixf = (PFNGLLOADMATRIXFPROC)load("glLoadMatrixf");
- glad_glLoadMatrixd = (PFNGLLOADMATRIXDPROC)load("glLoadMatrixd");
- glad_glMatrixMode = (PFNGLMATRIXMODEPROC)load("glMatrixMode");
- glad_glMultMatrixf = (PFNGLMULTMATRIXFPROC)load("glMultMatrixf");
- glad_glMultMatrixd = (PFNGLMULTMATRIXDPROC)load("glMultMatrixd");
- glad_glOrtho = (PFNGLORTHOPROC)load("glOrtho");
- glad_glPopMatrix = (PFNGLPOPMATRIXPROC)load("glPopMatrix");
- glad_glPushMatrix = (PFNGLPUSHMATRIXPROC)load("glPushMatrix");
- glad_glRotated = (PFNGLROTATEDPROC)load("glRotated");
- glad_glRotatef = (PFNGLROTATEFPROC)load("glRotatef");
- glad_glScaled = (PFNGLSCALEDPROC)load("glScaled");
- glad_glScalef = (PFNGLSCALEFPROC)load("glScalef");
- glad_glTranslated = (PFNGLTRANSLATEDPROC)load("glTranslated");
- glad_glTranslatef = (PFNGLTRANSLATEFPROC)load("glTranslatef");
-}
-static void load_GL_VERSION_1_1(GLADloadproc load) {
- if(!GLAD_GL_VERSION_1_1) return;
- glad_glDrawArrays = (PFNGLDRAWARRAYSPROC)load("glDrawArrays");
- glad_glDrawElements = (PFNGLDRAWELEMENTSPROC)load("glDrawElements");
- glad_glGetPointerv = (PFNGLGETPOINTERVPROC)load("glGetPointerv");
- glad_glPolygonOffset = (PFNGLPOLYGONOFFSETPROC)load("glPolygonOffset");
- glad_glCopyTexImage1D = (PFNGLCOPYTEXIMAGE1DPROC)load("glCopyTexImage1D");
- glad_glCopyTexImage2D = (PFNGLCOPYTEXIMAGE2DPROC)load("glCopyTexImage2D");
- glad_glCopyTexSubImage1D = (PFNGLCOPYTEXSUBIMAGE1DPROC)load("glCopyTexSubImage1D");
- glad_glCopyTexSubImage2D = (PFNGLCOPYTEXSUBIMAGE2DPROC)load("glCopyTexSubImage2D");
- glad_glTexSubImage1D = (PFNGLTEXSUBIMAGE1DPROC)load("glTexSubImage1D");
- glad_glTexSubImage2D = (PFNGLTEXSUBIMAGE2DPROC)load("glTexSubImage2D");
- glad_glBindTexture = (PFNGLBINDTEXTUREPROC)load("glBindTexture");
- glad_glDeleteTextures = (PFNGLDELETETEXTURESPROC)load("glDeleteTextures");
- glad_glGenTextures = (PFNGLGENTEXTURESPROC)load("glGenTextures");
- glad_glIsTexture = (PFNGLISTEXTUREPROC)load("glIsTexture");
- glad_glArrayElement = (PFNGLARRAYELEMENTPROC)load("glArrayElement");
- glad_glColorPointer = (PFNGLCOLORPOINTERPROC)load("glColorPointer");
- glad_glDisableClientState = (PFNGLDISABLECLIENTSTATEPROC)load("glDisableClientState");
- glad_glEdgeFlagPointer = (PFNGLEDGEFLAGPOINTERPROC)load("glEdgeFlagPointer");
- glad_glEnableClientState = (PFNGLENABLECLIENTSTATEPROC)load("glEnableClientState");
- glad_glIndexPointer = (PFNGLINDEXPOINTERPROC)load("glIndexPointer");
- glad_glInterleavedArrays = (PFNGLINTERLEAVEDARRAYSPROC)load("glInterleavedArrays");
- glad_glNormalPointer = (PFNGLNORMALPOINTERPROC)load("glNormalPointer");
- glad_glTexCoordPointer = (PFNGLTEXCOORDPOINTERPROC)load("glTexCoordPointer");
- glad_glVertexPointer = (PFNGLVERTEXPOINTERPROC)load("glVertexPointer");
- glad_glAreTexturesResident = (PFNGLARETEXTURESRESIDENTPROC)load("glAreTexturesResident");
- glad_glPrioritizeTextures = (PFNGLPRIORITIZETEXTURESPROC)load("glPrioritizeTextures");
- glad_glIndexub = (PFNGLINDEXUBPROC)load("glIndexub");
- glad_glIndexubv = (PFNGLINDEXUBVPROC)load("glIndexubv");
- glad_glPopClientAttrib = (PFNGLPOPCLIENTATTRIBPROC)load("glPopClientAttrib");
- glad_glPushClientAttrib = (PFNGLPUSHCLIENTATTRIBPROC)load("glPushClientAttrib");
-}
-static void load_GL_VERSION_1_2(GLADloadproc load) {
- if(!GLAD_GL_VERSION_1_2) return;
- glad_glDrawRangeElements = (PFNGLDRAWRANGEELEMENTSPROC)load("glDrawRangeElements");
- glad_glTexImage3D = (PFNGLTEXIMAGE3DPROC)load("glTexImage3D");
- glad_glTexSubImage3D = (PFNGLTEXSUBIMAGE3DPROC)load("glTexSubImage3D");
- glad_glCopyTexSubImage3D = (PFNGLCOPYTEXSUBIMAGE3DPROC)load("glCopyTexSubImage3D");
-}
-static void load_GL_VERSION_1_3(GLADloadproc load) {
- if(!GLAD_GL_VERSION_1_3) return;
- glad_glActiveTexture = (PFNGLACTIVETEXTUREPROC)load("glActiveTexture");
- glad_glSampleCoverage = (PFNGLSAMPLECOVERAGEPROC)load("glSampleCoverage");
- glad_glCompressedTexImage3D = (PFNGLCOMPRESSEDTEXIMAGE3DPROC)load("glCompressedTexImage3D");
- glad_glCompressedTexImage2D = (PFNGLCOMPRESSEDTEXIMAGE2DPROC)load("glCompressedTexImage2D");
- glad_glCompressedTexImage1D = (PFNGLCOMPRESSEDTEXIMAGE1DPROC)load("glCompressedTexImage1D");
- glad_glCompressedTexSubImage3D = (PFNGLCOMPRESSEDTEXSUBIMAGE3DPROC)load("glCompressedTexSubImage3D");
- glad_glCompressedTexSubImage2D = (PFNGLCOMPRESSEDTEXSUBIMAGE2DPROC)load("glCompressedTexSubImage2D");
- glad_glCompressedTexSubImage1D = (PFNGLCOMPRESSEDTEXSUBIMAGE1DPROC)load("glCompressedTexSubImage1D");
- glad_glGetCompressedTexImage = (PFNGLGETCOMPRESSEDTEXIMAGEPROC)load("glGetCompressedTexImage");
- glad_glClientActiveTexture = (PFNGLCLIENTACTIVETEXTUREPROC)load("glClientActiveTexture");
- glad_glMultiTexCoord1d = (PFNGLMULTITEXCOORD1DPROC)load("glMultiTexCoord1d");
- glad_glMultiTexCoord1dv = (PFNGLMULTITEXCOORD1DVPROC)load("glMultiTexCoord1dv");
- glad_glMultiTexCoord1f = (PFNGLMULTITEXCOORD1FPROC)load("glMultiTexCoord1f");
- glad_glMultiTexCoord1fv = (PFNGLMULTITEXCOORD1FVPROC)load("glMultiTexCoord1fv");
- glad_glMultiTexCoord1i = (PFNGLMULTITEXCOORD1IPROC)load("glMultiTexCoord1i");
- glad_glMultiTexCoord1iv = (PFNGLMULTITEXCOORD1IVPROC)load("glMultiTexCoord1iv");
- glad_glMultiTexCoord1s = (PFNGLMULTITEXCOORD1SPROC)load("glMultiTexCoord1s");
- glad_glMultiTexCoord1sv = (PFNGLMULTITEXCOORD1SVPROC)load("glMultiTexCoord1sv");
- glad_glMultiTexCoord2d = (PFNGLMULTITEXCOORD2DPROC)load("glMultiTexCoord2d");
- glad_glMultiTexCoord2dv = (PFNGLMULTITEXCOORD2DVPROC)load("glMultiTexCoord2dv");
- glad_glMultiTexCoord2f = (PFNGLMULTITEXCOORD2FPROC)load("glMultiTexCoord2f");
- glad_glMultiTexCoord2fv = (PFNGLMULTITEXCOORD2FVPROC)load("glMultiTexCoord2fv");
- glad_glMultiTexCoord2i = (PFNGLMULTITEXCOORD2IPROC)load("glMultiTexCoord2i");
- glad_glMultiTexCoord2iv = (PFNGLMULTITEXCOORD2IVPROC)load("glMultiTexCoord2iv");
- glad_glMultiTexCoord2s = (PFNGLMULTITEXCOORD2SPROC)load("glMultiTexCoord2s");
- glad_glMultiTexCoord2sv = (PFNGLMULTITEXCOORD2SVPROC)load("glMultiTexCoord2sv");
- glad_glMultiTexCoord3d = (PFNGLMULTITEXCOORD3DPROC)load("glMultiTexCoord3d");
- glad_glMultiTexCoord3dv = (PFNGLMULTITEXCOORD3DVPROC)load("glMultiTexCoord3dv");
- glad_glMultiTexCoord3f = (PFNGLMULTITEXCOORD3FPROC)load("glMultiTexCoord3f");
- glad_glMultiTexCoord3fv = (PFNGLMULTITEXCOORD3FVPROC)load("glMultiTexCoord3fv");
- glad_glMultiTexCoord3i = (PFNGLMULTITEXCOORD3IPROC)load("glMultiTexCoord3i");
- glad_glMultiTexCoord3iv = (PFNGLMULTITEXCOORD3IVPROC)load("glMultiTexCoord3iv");
- glad_glMultiTexCoord3s = (PFNGLMULTITEXCOORD3SPROC)load("glMultiTexCoord3s");
- glad_glMultiTexCoord3sv = (PFNGLMULTITEXCOORD3SVPROC)load("glMultiTexCoord3sv");
- glad_glMultiTexCoord4d = (PFNGLMULTITEXCOORD4DPROC)load("glMultiTexCoord4d");
- glad_glMultiTexCoord4dv = (PFNGLMULTITEXCOORD4DVPROC)load("glMultiTexCoord4dv");
- glad_glMultiTexCoord4f = (PFNGLMULTITEXCOORD4FPROC)load("glMultiTexCoord4f");
- glad_glMultiTexCoord4fv = (PFNGLMULTITEXCOORD4FVPROC)load("glMultiTexCoord4fv");
- glad_glMultiTexCoord4i = (PFNGLMULTITEXCOORD4IPROC)load("glMultiTexCoord4i");
- glad_glMultiTexCoord4iv = (PFNGLMULTITEXCOORD4IVPROC)load("glMultiTexCoord4iv");
- glad_glMultiTexCoord4s = (PFNGLMULTITEXCOORD4SPROC)load("glMultiTexCoord4s");
- glad_glMultiTexCoord4sv = (PFNGLMULTITEXCOORD4SVPROC)load("glMultiTexCoord4sv");
- glad_glLoadTransposeMatrixf = (PFNGLLOADTRANSPOSEMATRIXFPROC)load("glLoadTransposeMatrixf");
- glad_glLoadTransposeMatrixd = (PFNGLLOADTRANSPOSEMATRIXDPROC)load("glLoadTransposeMatrixd");
- glad_glMultTransposeMatrixf = (PFNGLMULTTRANSPOSEMATRIXFPROC)load("glMultTransposeMatrixf");
- glad_glMultTransposeMatrixd = (PFNGLMULTTRANSPOSEMATRIXDPROC)load("glMultTransposeMatrixd");
-}
-static void load_GL_VERSION_1_4(GLADloadproc load) {
- if(!GLAD_GL_VERSION_1_4) return;
- glad_glBlendFuncSeparate = (PFNGLBLENDFUNCSEPARATEPROC)load("glBlendFuncSeparate");
- glad_glMultiDrawArrays = (PFNGLMULTIDRAWARRAYSPROC)load("glMultiDrawArrays");
- glad_glMultiDrawElements = (PFNGLMULTIDRAWELEMENTSPROC)load("glMultiDrawElements");
- glad_glPointParameterf = (PFNGLPOINTPARAMETERFPROC)load("glPointParameterf");
- glad_glPointParameterfv = (PFNGLPOINTPARAMETERFVPROC)load("glPointParameterfv");
- glad_glPointParameteri = (PFNGLPOINTPARAMETERIPROC)load("glPointParameteri");
- glad_glPointParameteriv = (PFNGLPOINTPARAMETERIVPROC)load("glPointParameteriv");
- glad_glFogCoordf = (PFNGLFOGCOORDFPROC)load("glFogCoordf");
- glad_glFogCoordfv = (PFNGLFOGCOORDFVPROC)load("glFogCoordfv");
- glad_glFogCoordd = (PFNGLFOGCOORDDPROC)load("glFogCoordd");
- glad_glFogCoorddv = (PFNGLFOGCOORDDVPROC)load("glFogCoorddv");
- glad_glFogCoordPointer = (PFNGLFOGCOORDPOINTERPROC)load("glFogCoordPointer");
- glad_glSecondaryColor3b = (PFNGLSECONDARYCOLOR3BPROC)load("glSecondaryColor3b");
- glad_glSecondaryColor3bv = (PFNGLSECONDARYCOLOR3BVPROC)load("glSecondaryColor3bv");
- glad_glSecondaryColor3d = (PFNGLSECONDARYCOLOR3DPROC)load("glSecondaryColor3d");
- glad_glSecondaryColor3dv = (PFNGLSECONDARYCOLOR3DVPROC)load("glSecondaryColor3dv");
- glad_glSecondaryColor3f = (PFNGLSECONDARYCOLOR3FPROC)load("glSecondaryColor3f");
- glad_glSecondaryColor3fv = (PFNGLSECONDARYCOLOR3FVPROC)load("glSecondaryColor3fv");
- glad_glSecondaryColor3i = (PFNGLSECONDARYCOLOR3IPROC)load("glSecondaryColor3i");
- glad_glSecondaryColor3iv = (PFNGLSECONDARYCOLOR3IVPROC)load("glSecondaryColor3iv");
- glad_glSecondaryColor3s = (PFNGLSECONDARYCOLOR3SPROC)load("glSecondaryColor3s");
- glad_glSecondaryColor3sv = (PFNGLSECONDARYCOLOR3SVPROC)load("glSecondaryColor3sv");
- glad_glSecondaryColor3ub = (PFNGLSECONDARYCOLOR3UBPROC)load("glSecondaryColor3ub");
- glad_glSecondaryColor3ubv = (PFNGLSECONDARYCOLOR3UBVPROC)load("glSecondaryColor3ubv");
- glad_glSecondaryColor3ui = (PFNGLSECONDARYCOLOR3UIPROC)load("glSecondaryColor3ui");
- glad_glSecondaryColor3uiv = (PFNGLSECONDARYCOLOR3UIVPROC)load("glSecondaryColor3uiv");
- glad_glSecondaryColor3us = (PFNGLSECONDARYCOLOR3USPROC)load("glSecondaryColor3us");
- glad_glSecondaryColor3usv = (PFNGLSECONDARYCOLOR3USVPROC)load("glSecondaryColor3usv");
- glad_glSecondaryColorPointer = (PFNGLSECONDARYCOLORPOINTERPROC)load("glSecondaryColorPointer");
- glad_glWindowPos2d = (PFNGLWINDOWPOS2DPROC)load("glWindowPos2d");
- glad_glWindowPos2dv = (PFNGLWINDOWPOS2DVPROC)load("glWindowPos2dv");
- glad_glWindowPos2f = (PFNGLWINDOWPOS2FPROC)load("glWindowPos2f");
- glad_glWindowPos2fv = (PFNGLWINDOWPOS2FVPROC)load("glWindowPos2fv");
- glad_glWindowPos2i = (PFNGLWINDOWPOS2IPROC)load("glWindowPos2i");
- glad_glWindowPos2iv = (PFNGLWINDOWPOS2IVPROC)load("glWindowPos2iv");
- glad_glWindowPos2s = (PFNGLWINDOWPOS2SPROC)load("glWindowPos2s");
- glad_glWindowPos2sv = (PFNGLWINDOWPOS2SVPROC)load("glWindowPos2sv");
- glad_glWindowPos3d = (PFNGLWINDOWPOS3DPROC)load("glWindowPos3d");
- glad_glWindowPos3dv = (PFNGLWINDOWPOS3DVPROC)load("glWindowPos3dv");
- glad_glWindowPos3f = (PFNGLWINDOWPOS3FPROC)load("glWindowPos3f");
- glad_glWindowPos3fv = (PFNGLWINDOWPOS3FVPROC)load("glWindowPos3fv");
- glad_glWindowPos3i = (PFNGLWINDOWPOS3IPROC)load("glWindowPos3i");
- glad_glWindowPos3iv = (PFNGLWINDOWPOS3IVPROC)load("glWindowPos3iv");
- glad_glWindowPos3s = (PFNGLWINDOWPOS3SPROC)load("glWindowPos3s");
- glad_glWindowPos3sv = (PFNGLWINDOWPOS3SVPROC)load("glWindowPos3sv");
- glad_glBlendColor = (PFNGLBLENDCOLORPROC)load("glBlendColor");
- glad_glBlendEquation = (PFNGLBLENDEQUATIONPROC)load("glBlendEquation");
-}
-static void load_GL_VERSION_1_5(GLADloadproc load) {
- if(!GLAD_GL_VERSION_1_5) return;
- glad_glGenQueries = (PFNGLGENQUERIESPROC)load("glGenQueries");
- glad_glDeleteQueries = (PFNGLDELETEQUERIESPROC)load("glDeleteQueries");
- glad_glIsQuery = (PFNGLISQUERYPROC)load("glIsQuery");
- glad_glBeginQuery = (PFNGLBEGINQUERYPROC)load("glBeginQuery");
- glad_glEndQuery = (PFNGLENDQUERYPROC)load("glEndQuery");
- glad_glGetQueryiv = (PFNGLGETQUERYIVPROC)load("glGetQueryiv");
- glad_glGetQueryObjectiv = (PFNGLGETQUERYOBJECTIVPROC)load("glGetQueryObjectiv");
- glad_glGetQueryObjectuiv = (PFNGLGETQUERYOBJECTUIVPROC)load("glGetQueryObjectuiv");
- glad_glBindBuffer = (PFNGLBINDBUFFERPROC)load("glBindBuffer");
- glad_glDeleteBuffers = (PFNGLDELETEBUFFERSPROC)load("glDeleteBuffers");
- glad_glGenBuffers = (PFNGLGENBUFFERSPROC)load("glGenBuffers");
- glad_glIsBuffer = (PFNGLISBUFFERPROC)load("glIsBuffer");
- glad_glBufferData = (PFNGLBUFFERDATAPROC)load("glBufferData");
- glad_glBufferSubData = (PFNGLBUFFERSUBDATAPROC)load("glBufferSubData");
- glad_glGetBufferSubData = (PFNGLGETBUFFERSUBDATAPROC)load("glGetBufferSubData");
- glad_glMapBuffer = (PFNGLMAPBUFFERPROC)load("glMapBuffer");
- glad_glUnmapBuffer = (PFNGLUNMAPBUFFERPROC)load("glUnmapBuffer");
- glad_glGetBufferParameteriv = (PFNGLGETBUFFERPARAMETERIVPROC)load("glGetBufferParameteriv");
- glad_glGetBufferPointerv = (PFNGLGETBUFFERPOINTERVPROC)load("glGetBufferPointerv");
-}
-static void load_GL_VERSION_2_0(GLADloadproc load) {
- if(!GLAD_GL_VERSION_2_0) return;
- glad_glBlendEquationSeparate = (PFNGLBLENDEQUATIONSEPARATEPROC)load("glBlendEquationSeparate");
- glad_glDrawBuffers = (PFNGLDRAWBUFFERSPROC)load("glDrawBuffers");
- glad_glStencilOpSeparate = (PFNGLSTENCILOPSEPARATEPROC)load("glStencilOpSeparate");
- glad_glStencilFuncSeparate = (PFNGLSTENCILFUNCSEPARATEPROC)load("glStencilFuncSeparate");
- glad_glStencilMaskSeparate = (PFNGLSTENCILMASKSEPARATEPROC)load("glStencilMaskSeparate");
- glad_glAttachShader = (PFNGLATTACHSHADERPROC)load("glAttachShader");
- glad_glBindAttribLocation = (PFNGLBINDATTRIBLOCATIONPROC)load("glBindAttribLocation");
- glad_glCompileShader = (PFNGLCOMPILESHADERPROC)load("glCompileShader");
- glad_glCreateProgram = (PFNGLCREATEPROGRAMPROC)load("glCreateProgram");
- glad_glCreateShader = (PFNGLCREATESHADERPROC)load("glCreateShader");
- glad_glDeleteProgram = (PFNGLDELETEPROGRAMPROC)load("glDeleteProgram");
- glad_glDeleteShader = (PFNGLDELETESHADERPROC)load("glDeleteShader");
- glad_glDetachShader = (PFNGLDETACHSHADERPROC)load("glDetachShader");
- glad_glDisableVertexAttribArray = (PFNGLDISABLEVERTEXATTRIBARRAYPROC)load("glDisableVertexAttribArray");
- glad_glEnableVertexAttribArray = (PFNGLENABLEVERTEXATTRIBARRAYPROC)load("glEnableVertexAttribArray");
- glad_glGetActiveAttrib = (PFNGLGETACTIVEATTRIBPROC)load("glGetActiveAttrib");
- glad_glGetActiveUniform = (PFNGLGETACTIVEUNIFORMPROC)load("glGetActiveUniform");
- glad_glGetAttachedShaders = (PFNGLGETATTACHEDSHADERSPROC)load("glGetAttachedShaders");
- glad_glGetAttribLocation = (PFNGLGETATTRIBLOCATIONPROC)load("glGetAttribLocation");
- glad_glGetProgramiv = (PFNGLGETPROGRAMIVPROC)load("glGetProgramiv");
- glad_glGetProgramInfoLog = (PFNGLGETPROGRAMINFOLOGPROC)load("glGetProgramInfoLog");
- glad_glGetShaderiv = (PFNGLGETSHADERIVPROC)load("glGetShaderiv");
- glad_glGetShaderInfoLog = (PFNGLGETSHADERINFOLOGPROC)load("glGetShaderInfoLog");
- glad_glGetShaderSource = (PFNGLGETSHADERSOURCEPROC)load("glGetShaderSource");
- glad_glGetUniformLocation = (PFNGLGETUNIFORMLOCATIONPROC)load("glGetUniformLocation");
- glad_glGetUniformfv = (PFNGLGETUNIFORMFVPROC)load("glGetUniformfv");
- glad_glGetUniformiv = (PFNGLGETUNIFORMIVPROC)load("glGetUniformiv");
- glad_glGetVertexAttribdv = (PFNGLGETVERTEXATTRIBDVPROC)load("glGetVertexAttribdv");
- glad_glGetVertexAttribfv = (PFNGLGETVERTEXATTRIBFVPROC)load("glGetVertexAttribfv");
- glad_glGetVertexAttribiv = (PFNGLGETVERTEXATTRIBIVPROC)load("glGetVertexAttribiv");
- glad_glGetVertexAttribPointerv = (PFNGLGETVERTEXATTRIBPOINTERVPROC)load("glGetVertexAttribPointerv");
- glad_glIsProgram = (PFNGLISPROGRAMPROC)load("glIsProgram");
- glad_glIsShader = (PFNGLISSHADERPROC)load("glIsShader");
- glad_glLinkProgram = (PFNGLLINKPROGRAMPROC)load("glLinkProgram");
- glad_glShaderSource = (PFNGLSHADERSOURCEPROC)load("glShaderSource");
- glad_glUseProgram = (PFNGLUSEPROGRAMPROC)load("glUseProgram");
- glad_glUniform1f = (PFNGLUNIFORM1FPROC)load("glUniform1f");
- glad_glUniform2f = (PFNGLUNIFORM2FPROC)load("glUniform2f");
- glad_glUniform3f = (PFNGLUNIFORM3FPROC)load("glUniform3f");
- glad_glUniform4f = (PFNGLUNIFORM4FPROC)load("glUniform4f");
- glad_glUniform1i = (PFNGLUNIFORM1IPROC)load("glUniform1i");
- glad_glUniform2i = (PFNGLUNIFORM2IPROC)load("glUniform2i");
- glad_glUniform3i = (PFNGLUNIFORM3IPROC)load("glUniform3i");
- glad_glUniform4i = (PFNGLUNIFORM4IPROC)load("glUniform4i");
- glad_glUniform1fv = (PFNGLUNIFORM1FVPROC)load("glUniform1fv");
- glad_glUniform2fv = (PFNGLUNIFORM2FVPROC)load("glUniform2fv");
- glad_glUniform3fv = (PFNGLUNIFORM3FVPROC)load("glUniform3fv");
- glad_glUniform4fv = (PFNGLUNIFORM4FVPROC)load("glUniform4fv");
- glad_glUniform1iv = (PFNGLUNIFORM1IVPROC)load("glUniform1iv");
- glad_glUniform2iv = (PFNGLUNIFORM2IVPROC)load("glUniform2iv");
- glad_glUniform3iv = (PFNGLUNIFORM3IVPROC)load("glUniform3iv");
- glad_glUniform4iv = (PFNGLUNIFORM4IVPROC)load("glUniform4iv");
- glad_glUniformMatrix2fv = (PFNGLUNIFORMMATRIX2FVPROC)load("glUniformMatrix2fv");
- glad_glUniformMatrix3fv = (PFNGLUNIFORMMATRIX3FVPROC)load("glUniformMatrix3fv");
- glad_glUniformMatrix4fv = (PFNGLUNIFORMMATRIX4FVPROC)load("glUniformMatrix4fv");
- glad_glValidateProgram = (PFNGLVALIDATEPROGRAMPROC)load("glValidateProgram");
- glad_glVertexAttrib1d = (PFNGLVERTEXATTRIB1DPROC)load("glVertexAttrib1d");
- glad_glVertexAttrib1dv = (PFNGLVERTEXATTRIB1DVPROC)load("glVertexAttrib1dv");
- glad_glVertexAttrib1f = (PFNGLVERTEXATTRIB1FPROC)load("glVertexAttrib1f");
- glad_glVertexAttrib1fv = (PFNGLVERTEXATTRIB1FVPROC)load("glVertexAttrib1fv");
- glad_glVertexAttrib1s = (PFNGLVERTEXATTRIB1SPROC)load("glVertexAttrib1s");
- glad_glVertexAttrib1sv = (PFNGLVERTEXATTRIB1SVPROC)load("glVertexAttrib1sv");
- glad_glVertexAttrib2d = (PFNGLVERTEXATTRIB2DPROC)load("glVertexAttrib2d");
- glad_glVertexAttrib2dv = (PFNGLVERTEXATTRIB2DVPROC)load("glVertexAttrib2dv");
- glad_glVertexAttrib2f = (PFNGLVERTEXATTRIB2FPROC)load("glVertexAttrib2f");
- glad_glVertexAttrib2fv = (PFNGLVERTEXATTRIB2FVPROC)load("glVertexAttrib2fv");
- glad_glVertexAttrib2s = (PFNGLVERTEXATTRIB2SPROC)load("glVertexAttrib2s");
- glad_glVertexAttrib2sv = (PFNGLVERTEXATTRIB2SVPROC)load("glVertexAttrib2sv");
- glad_glVertexAttrib3d = (PFNGLVERTEXATTRIB3DPROC)load("glVertexAttrib3d");
- glad_glVertexAttrib3dv = (PFNGLVERTEXATTRIB3DVPROC)load("glVertexAttrib3dv");
- glad_glVertexAttrib3f = (PFNGLVERTEXATTRIB3FPROC)load("glVertexAttrib3f");
- glad_glVertexAttrib3fv = (PFNGLVERTEXATTRIB3FVPROC)load("glVertexAttrib3fv");
- glad_glVertexAttrib3s = (PFNGLVERTEXATTRIB3SPROC)load("glVertexAttrib3s");
- glad_glVertexAttrib3sv = (PFNGLVERTEXATTRIB3SVPROC)load("glVertexAttrib3sv");
- glad_glVertexAttrib4Nbv = (PFNGLVERTEXATTRIB4NBVPROC)load("glVertexAttrib4Nbv");
- glad_glVertexAttrib4Niv = (PFNGLVERTEXATTRIB4NIVPROC)load("glVertexAttrib4Niv");
- glad_glVertexAttrib4Nsv = (PFNGLVERTEXATTRIB4NSVPROC)load("glVertexAttrib4Nsv");
- glad_glVertexAttrib4Nub = (PFNGLVERTEXATTRIB4NUBPROC)load("glVertexAttrib4Nub");
- glad_glVertexAttrib4Nubv = (PFNGLVERTEXATTRIB4NUBVPROC)load("glVertexAttrib4Nubv");
- glad_glVertexAttrib4Nuiv = (PFNGLVERTEXATTRIB4NUIVPROC)load("glVertexAttrib4Nuiv");
- glad_glVertexAttrib4Nusv = (PFNGLVERTEXATTRIB4NUSVPROC)load("glVertexAttrib4Nusv");
- glad_glVertexAttrib4bv = (PFNGLVERTEXATTRIB4BVPROC)load("glVertexAttrib4bv");
- glad_glVertexAttrib4d = (PFNGLVERTEXATTRIB4DPROC)load("glVertexAttrib4d");
- glad_glVertexAttrib4dv = (PFNGLVERTEXATTRIB4DVPROC)load("glVertexAttrib4dv");
- glad_glVertexAttrib4f = (PFNGLVERTEXATTRIB4FPROC)load("glVertexAttrib4f");
- glad_glVertexAttrib4fv = (PFNGLVERTEXATTRIB4FVPROC)load("glVertexAttrib4fv");
- glad_glVertexAttrib4iv = (PFNGLVERTEXATTRIB4IVPROC)load("glVertexAttrib4iv");
- glad_glVertexAttrib4s = (PFNGLVERTEXATTRIB4SPROC)load("glVertexAttrib4s");
- glad_glVertexAttrib4sv = (PFNGLVERTEXATTRIB4SVPROC)load("glVertexAttrib4sv");
- glad_glVertexAttrib4ubv = (PFNGLVERTEXATTRIB4UBVPROC)load("glVertexAttrib4ubv");
- glad_glVertexAttrib4uiv = (PFNGLVERTEXATTRIB4UIVPROC)load("glVertexAttrib4uiv");
- glad_glVertexAttrib4usv = (PFNGLVERTEXATTRIB4USVPROC)load("glVertexAttrib4usv");
- glad_glVertexAttribPointer = (PFNGLVERTEXATTRIBPOINTERPROC)load("glVertexAttribPointer");
-}
-static void load_GL_VERSION_2_1(GLADloadproc load) {
- if(!GLAD_GL_VERSION_2_1) return;
- glad_glUniformMatrix2x3fv = (PFNGLUNIFORMMATRIX2X3FVPROC)load("glUniformMatrix2x3fv");
- glad_glUniformMatrix3x2fv = (PFNGLUNIFORMMATRIX3X2FVPROC)load("glUniformMatrix3x2fv");
- glad_glUniformMatrix2x4fv = (PFNGLUNIFORMMATRIX2X4FVPROC)load("glUniformMatrix2x4fv");
- glad_glUniformMatrix4x2fv = (PFNGLUNIFORMMATRIX4X2FVPROC)load("glUniformMatrix4x2fv");
- glad_glUniformMatrix3x4fv = (PFNGLUNIFORMMATRIX3X4FVPROC)load("glUniformMatrix3x4fv");
- glad_glUniformMatrix4x3fv = (PFNGLUNIFORMMATRIX4X3FVPROC)load("glUniformMatrix4x3fv");
-}
-static void load_GL_VERSION_3_0(GLADloadproc load) {
- if(!GLAD_GL_VERSION_3_0) return;
- glad_glColorMaski = (PFNGLCOLORMASKIPROC)load("glColorMaski");
- glad_glGetBooleani_v = (PFNGLGETBOOLEANI_VPROC)load("glGetBooleani_v");
- glad_glGetIntegeri_v = (PFNGLGETINTEGERI_VPROC)load("glGetIntegeri_v");
- glad_glEnablei = (PFNGLENABLEIPROC)load("glEnablei");
- glad_glDisablei = (PFNGLDISABLEIPROC)load("glDisablei");
- glad_glIsEnabledi = (PFNGLISENABLEDIPROC)load("glIsEnabledi");
- glad_glBeginTransformFeedback = (PFNGLBEGINTRANSFORMFEEDBACKPROC)load("glBeginTransformFeedback");
- glad_glEndTransformFeedback = (PFNGLENDTRANSFORMFEEDBACKPROC)load("glEndTransformFeedback");
- glad_glBindBufferRange = (PFNGLBINDBUFFERRANGEPROC)load("glBindBufferRange");
- glad_glBindBufferBase = (PFNGLBINDBUFFERBASEPROC)load("glBindBufferBase");
- glad_glTransformFeedbackVaryings = (PFNGLTRANSFORMFEEDBACKVARYINGSPROC)load("glTransformFeedbackVaryings");
- glad_glGetTransformFeedbackVarying = (PFNGLGETTRANSFORMFEEDBACKVARYINGPROC)load("glGetTransformFeedbackVarying");
- glad_glClampColor = (PFNGLCLAMPCOLORPROC)load("glClampColor");
- glad_glBeginConditionalRender = (PFNGLBEGINCONDITIONALRENDERPROC)load("glBeginConditionalRender");
- glad_glEndConditionalRender = (PFNGLENDCONDITIONALRENDERPROC)load("glEndConditionalRender");
- glad_glVertexAttribIPointer = (PFNGLVERTEXATTRIBIPOINTERPROC)load("glVertexAttribIPointer");
- glad_glGetVertexAttribIiv = (PFNGLGETVERTEXATTRIBIIVPROC)load("glGetVertexAttribIiv");
- glad_glGetVertexAttribIuiv = (PFNGLGETVERTEXATTRIBIUIVPROC)load("glGetVertexAttribIuiv");
- glad_glVertexAttribI1i = (PFNGLVERTEXATTRIBI1IPROC)load("glVertexAttribI1i");
- glad_glVertexAttribI2i = (PFNGLVERTEXATTRIBI2IPROC)load("glVertexAttribI2i");
- glad_glVertexAttribI3i = (PFNGLVERTEXATTRIBI3IPROC)load("glVertexAttribI3i");
- glad_glVertexAttribI4i = (PFNGLVERTEXATTRIBI4IPROC)load("glVertexAttribI4i");
- glad_glVertexAttribI1ui = (PFNGLVERTEXATTRIBI1UIPROC)load("glVertexAttribI1ui");
- glad_glVertexAttribI2ui = (PFNGLVERTEXATTRIBI2UIPROC)load("glVertexAttribI2ui");
- glad_glVertexAttribI3ui = (PFNGLVERTEXATTRIBI3UIPROC)load("glVertexAttribI3ui");
- glad_glVertexAttribI4ui = (PFNGLVERTEXATTRIBI4UIPROC)load("glVertexAttribI4ui");
- glad_glVertexAttribI1iv = (PFNGLVERTEXATTRIBI1IVPROC)load("glVertexAttribI1iv");
- glad_glVertexAttribI2iv = (PFNGLVERTEXATTRIBI2IVPROC)load("glVertexAttribI2iv");
- glad_glVertexAttribI3iv = (PFNGLVERTEXATTRIBI3IVPROC)load("glVertexAttribI3iv");
- glad_glVertexAttribI4iv = (PFNGLVERTEXATTRIBI4IVPROC)load("glVertexAttribI4iv");
- glad_glVertexAttribI1uiv = (PFNGLVERTEXATTRIBI1UIVPROC)load("glVertexAttribI1uiv");
- glad_glVertexAttribI2uiv = (PFNGLVERTEXATTRIBI2UIVPROC)load("glVertexAttribI2uiv");
- glad_glVertexAttribI3uiv = (PFNGLVERTEXATTRIBI3UIVPROC)load("glVertexAttribI3uiv");
- glad_glVertexAttribI4uiv = (PFNGLVERTEXATTRIBI4UIVPROC)load("glVertexAttribI4uiv");
- glad_glVertexAttribI4bv = (PFNGLVERTEXATTRIBI4BVPROC)load("glVertexAttribI4bv");
- glad_glVertexAttribI4sv = (PFNGLVERTEXATTRIBI4SVPROC)load("glVertexAttribI4sv");
- glad_glVertexAttribI4ubv = (PFNGLVERTEXATTRIBI4UBVPROC)load("glVertexAttribI4ubv");
- glad_glVertexAttribI4usv = (PFNGLVERTEXATTRIBI4USVPROC)load("glVertexAttribI4usv");
- glad_glGetUniformuiv = (PFNGLGETUNIFORMUIVPROC)load("glGetUniformuiv");
- glad_glBindFragDataLocation = (PFNGLBINDFRAGDATALOCATIONPROC)load("glBindFragDataLocation");
- glad_glGetFragDataLocation = (PFNGLGETFRAGDATALOCATIONPROC)load("glGetFragDataLocation");
- glad_glUniform1ui = (PFNGLUNIFORM1UIPROC)load("glUniform1ui");
- glad_glUniform2ui = (PFNGLUNIFORM2UIPROC)load("glUniform2ui");
- glad_glUniform3ui = (PFNGLUNIFORM3UIPROC)load("glUniform3ui");
- glad_glUniform4ui = (PFNGLUNIFORM4UIPROC)load("glUniform4ui");
- glad_glUniform1uiv = (PFNGLUNIFORM1UIVPROC)load("glUniform1uiv");
- glad_glUniform2uiv = (PFNGLUNIFORM2UIVPROC)load("glUniform2uiv");
- glad_glUniform3uiv = (PFNGLUNIFORM3UIVPROC)load("glUniform3uiv");
- glad_glUniform4uiv = (PFNGLUNIFORM4UIVPROC)load("glUniform4uiv");
- glad_glTexParameterIiv = (PFNGLTEXPARAMETERIIVPROC)load("glTexParameterIiv");
- glad_glTexParameterIuiv = (PFNGLTEXPARAMETERIUIVPROC)load("glTexParameterIuiv");
- glad_glGetTexParameterIiv = (PFNGLGETTEXPARAMETERIIVPROC)load("glGetTexParameterIiv");
- glad_glGetTexParameterIuiv = (PFNGLGETTEXPARAMETERIUIVPROC)load("glGetTexParameterIuiv");
- glad_glClearBufferiv = (PFNGLCLEARBUFFERIVPROC)load("glClearBufferiv");
- glad_glClearBufferuiv = (PFNGLCLEARBUFFERUIVPROC)load("glClearBufferuiv");
- glad_glClearBufferfv = (PFNGLCLEARBUFFERFVPROC)load("glClearBufferfv");
- glad_glClearBufferfi = (PFNGLCLEARBUFFERFIPROC)load("glClearBufferfi");
- glad_glGetStringi = (PFNGLGETSTRINGIPROC)load("glGetStringi");
- glad_glIsRenderbuffer = (PFNGLISRENDERBUFFERPROC)load("glIsRenderbuffer");
- glad_glBindRenderbuffer = (PFNGLBINDRENDERBUFFERPROC)load("glBindRenderbuffer");
- glad_glDeleteRenderbuffers = (PFNGLDELETERENDERBUFFERSPROC)load("glDeleteRenderbuffers");
- glad_glGenRenderbuffers = (PFNGLGENRENDERBUFFERSPROC)load("glGenRenderbuffers");
- glad_glRenderbufferStorage = (PFNGLRENDERBUFFERSTORAGEPROC)load("glRenderbufferStorage");
- glad_glGetRenderbufferParameteriv = (PFNGLGETRENDERBUFFERPARAMETERIVPROC)load("glGetRenderbufferParameteriv");
- glad_glIsFramebuffer = (PFNGLISFRAMEBUFFERPROC)load("glIsFramebuffer");
- glad_glBindFramebuffer = (PFNGLBINDFRAMEBUFFERPROC)load("glBindFramebuffer");
- glad_glDeleteFramebuffers = (PFNGLDELETEFRAMEBUFFERSPROC)load("glDeleteFramebuffers");
- glad_glGenFramebuffers = (PFNGLGENFRAMEBUFFERSPROC)load("glGenFramebuffers");
- glad_glCheckFramebufferStatus = (PFNGLCHECKFRAMEBUFFERSTATUSPROC)load("glCheckFramebufferStatus");
- glad_glFramebufferTexture1D = (PFNGLFRAMEBUFFERTEXTURE1DPROC)load("glFramebufferTexture1D");
- glad_glFramebufferTexture2D = (PFNGLFRAMEBUFFERTEXTURE2DPROC)load("glFramebufferTexture2D");
- glad_glFramebufferTexture3D = (PFNGLFRAMEBUFFERTEXTURE3DPROC)load("glFramebufferTexture3D");
- glad_glFramebufferRenderbuffer = (PFNGLFRAMEBUFFERRENDERBUFFERPROC)load("glFramebufferRenderbuffer");
- glad_glGetFramebufferAttachmentParameteriv = (PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVPROC)load("glGetFramebufferAttachmentParameteriv");
- glad_glGenerateMipmap = (PFNGLGENERATEMIPMAPPROC)load("glGenerateMipmap");
- glad_glBlitFramebuffer = (PFNGLBLITFRAMEBUFFERPROC)load("glBlitFramebuffer");
- glad_glRenderbufferStorageMultisample = (PFNGLRENDERBUFFERSTORAGEMULTISAMPLEPROC)load("glRenderbufferStorageMultisample");
- glad_glFramebufferTextureLayer = (PFNGLFRAMEBUFFERTEXTURELAYERPROC)load("glFramebufferTextureLayer");
- glad_glMapBufferRange = (PFNGLMAPBUFFERRANGEPROC)load("glMapBufferRange");
- glad_glFlushMappedBufferRange = (PFNGLFLUSHMAPPEDBUFFERRANGEPROC)load("glFlushMappedBufferRange");
- glad_glBindVertexArray = (PFNGLBINDVERTEXARRAYPROC)load("glBindVertexArray");
- glad_glDeleteVertexArrays = (PFNGLDELETEVERTEXARRAYSPROC)load("glDeleteVertexArrays");
- glad_glGenVertexArrays = (PFNGLGENVERTEXARRAYSPROC)load("glGenVertexArrays");
- glad_glIsVertexArray = (PFNGLISVERTEXARRAYPROC)load("glIsVertexArray");
-}
-static void load_GL_VERSION_3_1(GLADloadproc load) {
- if(!GLAD_GL_VERSION_3_1) return;
- glad_glDrawArraysInstanced = (PFNGLDRAWARRAYSINSTANCEDPROC)load("glDrawArraysInstanced");
- glad_glDrawElementsInstanced = (PFNGLDRAWELEMENTSINSTANCEDPROC)load("glDrawElementsInstanced");
- glad_glTexBuffer = (PFNGLTEXBUFFERPROC)load("glTexBuffer");
- glad_glPrimitiveRestartIndex = (PFNGLPRIMITIVERESTARTINDEXPROC)load("glPrimitiveRestartIndex");
- glad_glCopyBufferSubData = (PFNGLCOPYBUFFERSUBDATAPROC)load("glCopyBufferSubData");
- glad_glGetUniformIndices = (PFNGLGETUNIFORMINDICESPROC)load("glGetUniformIndices");
- glad_glGetActiveUniformsiv = (PFNGLGETACTIVEUNIFORMSIVPROC)load("glGetActiveUniformsiv");
- glad_glGetActiveUniformName = (PFNGLGETACTIVEUNIFORMNAMEPROC)load("glGetActiveUniformName");
- glad_glGetUniformBlockIndex = (PFNGLGETUNIFORMBLOCKINDEXPROC)load("glGetUniformBlockIndex");
- glad_glGetActiveUniformBlockiv = (PFNGLGETACTIVEUNIFORMBLOCKIVPROC)load("glGetActiveUniformBlockiv");
- glad_glGetActiveUniformBlockName = (PFNGLGETACTIVEUNIFORMBLOCKNAMEPROC)load("glGetActiveUniformBlockName");
- glad_glUniformBlockBinding = (PFNGLUNIFORMBLOCKBINDINGPROC)load("glUniformBlockBinding");
- glad_glBindBufferRange = (PFNGLBINDBUFFERRANGEPROC)load("glBindBufferRange");
- glad_glBindBufferBase = (PFNGLBINDBUFFERBASEPROC)load("glBindBufferBase");
- glad_glGetIntegeri_v = (PFNGLGETINTEGERI_VPROC)load("glGetIntegeri_v");
-}
-static void load_GL_VERSION_3_2(GLADloadproc load) {
- if(!GLAD_GL_VERSION_3_2) return;
- glad_glDrawElementsBaseVertex = (PFNGLDRAWELEMENTSBASEVERTEXPROC)load("glDrawElementsBaseVertex");
- glad_glDrawRangeElementsBaseVertex = (PFNGLDRAWRANGEELEMENTSBASEVERTEXPROC)load("glDrawRangeElementsBaseVertex");
- glad_glDrawElementsInstancedBaseVertex = (PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXPROC)load("glDrawElementsInstancedBaseVertex");
- glad_glMultiDrawElementsBaseVertex = (PFNGLMULTIDRAWELEMENTSBASEVERTEXPROC)load("glMultiDrawElementsBaseVertex");
- glad_glProvokingVertex = (PFNGLPROVOKINGVERTEXPROC)load("glProvokingVertex");
- glad_glFenceSync = (PFNGLFENCESYNCPROC)load("glFenceSync");
- glad_glIsSync = (PFNGLISSYNCPROC)load("glIsSync");
- glad_glDeleteSync = (PFNGLDELETESYNCPROC)load("glDeleteSync");
- glad_glClientWaitSync = (PFNGLCLIENTWAITSYNCPROC)load("glClientWaitSync");
- glad_glWaitSync = (PFNGLWAITSYNCPROC)load("glWaitSync");
- glad_glGetInteger64v = (PFNGLGETINTEGER64VPROC)load("glGetInteger64v");
- glad_glGetSynciv = (PFNGLGETSYNCIVPROC)load("glGetSynciv");
- glad_glGetInteger64i_v = (PFNGLGETINTEGER64I_VPROC)load("glGetInteger64i_v");
- glad_glGetBufferParameteri64v = (PFNGLGETBUFFERPARAMETERI64VPROC)load("glGetBufferParameteri64v");
- glad_glFramebufferTexture = (PFNGLFRAMEBUFFERTEXTUREPROC)load("glFramebufferTexture");
- glad_glTexImage2DMultisample = (PFNGLTEXIMAGE2DMULTISAMPLEPROC)load("glTexImage2DMultisample");
- glad_glTexImage3DMultisample = (PFNGLTEXIMAGE3DMULTISAMPLEPROC)load("glTexImage3DMultisample");
- glad_glGetMultisamplefv = (PFNGLGETMULTISAMPLEFVPROC)load("glGetMultisamplefv");
- glad_glSampleMaski = (PFNGLSAMPLEMASKIPROC)load("glSampleMaski");
-}
-static void load_GL_VERSION_3_3(GLADloadproc load) {
- if(!GLAD_GL_VERSION_3_3) return;
- glad_glBindFragDataLocationIndexed = (PFNGLBINDFRAGDATALOCATIONINDEXEDPROC)load("glBindFragDataLocationIndexed");
- glad_glGetFragDataIndex = (PFNGLGETFRAGDATAINDEXPROC)load("glGetFragDataIndex");
- glad_glGenSamplers = (PFNGLGENSAMPLERSPROC)load("glGenSamplers");
- glad_glDeleteSamplers = (PFNGLDELETESAMPLERSPROC)load("glDeleteSamplers");
- glad_glIsSampler = (PFNGLISSAMPLERPROC)load("glIsSampler");
- glad_glBindSampler = (PFNGLBINDSAMPLERPROC)load("glBindSampler");
- glad_glSamplerParameteri = (PFNGLSAMPLERPARAMETERIPROC)load("glSamplerParameteri");
- glad_glSamplerParameteriv = (PFNGLSAMPLERPARAMETERIVPROC)load("glSamplerParameteriv");
- glad_glSamplerParameterf = (PFNGLSAMPLERPARAMETERFPROC)load("glSamplerParameterf");
- glad_glSamplerParameterfv = (PFNGLSAMPLERPARAMETERFVPROC)load("glSamplerParameterfv");
- glad_glSamplerParameterIiv = (PFNGLSAMPLERPARAMETERIIVPROC)load("glSamplerParameterIiv");
- glad_glSamplerParameterIuiv = (PFNGLSAMPLERPARAMETERIUIVPROC)load("glSamplerParameterIuiv");
- glad_glGetSamplerParameteriv = (PFNGLGETSAMPLERPARAMETERIVPROC)load("glGetSamplerParameteriv");
- glad_glGetSamplerParameterIiv = (PFNGLGETSAMPLERPARAMETERIIVPROC)load("glGetSamplerParameterIiv");
- glad_glGetSamplerParameterfv = (PFNGLGETSAMPLERPARAMETERFVPROC)load("glGetSamplerParameterfv");
- glad_glGetSamplerParameterIuiv = (PFNGLGETSAMPLERPARAMETERIUIVPROC)load("glGetSamplerParameterIuiv");
- glad_glQueryCounter = (PFNGLQUERYCOUNTERPROC)load("glQueryCounter");
- glad_glGetQueryObjecti64v = (PFNGLGETQUERYOBJECTI64VPROC)load("glGetQueryObjecti64v");
- glad_glGetQueryObjectui64v = (PFNGLGETQUERYOBJECTUI64VPROC)load("glGetQueryObjectui64v");
- glad_glVertexAttribDivisor = (PFNGLVERTEXATTRIBDIVISORPROC)load("glVertexAttribDivisor");
- glad_glVertexAttribP1ui = (PFNGLVERTEXATTRIBP1UIPROC)load("glVertexAttribP1ui");
- glad_glVertexAttribP1uiv = (PFNGLVERTEXATTRIBP1UIVPROC)load("glVertexAttribP1uiv");
- glad_glVertexAttribP2ui = (PFNGLVERTEXATTRIBP2UIPROC)load("glVertexAttribP2ui");
- glad_glVertexAttribP2uiv = (PFNGLVERTEXATTRIBP2UIVPROC)load("glVertexAttribP2uiv");
- glad_glVertexAttribP3ui = (PFNGLVERTEXATTRIBP3UIPROC)load("glVertexAttribP3ui");
- glad_glVertexAttribP3uiv = (PFNGLVERTEXATTRIBP3UIVPROC)load("glVertexAttribP3uiv");
- glad_glVertexAttribP4ui = (PFNGLVERTEXATTRIBP4UIPROC)load("glVertexAttribP4ui");
- glad_glVertexAttribP4uiv = (PFNGLVERTEXATTRIBP4UIVPROC)load("glVertexAttribP4uiv");
- glad_glVertexP2ui = (PFNGLVERTEXP2UIPROC)load("glVertexP2ui");
- glad_glVertexP2uiv = (PFNGLVERTEXP2UIVPROC)load("glVertexP2uiv");
- glad_glVertexP3ui = (PFNGLVERTEXP3UIPROC)load("glVertexP3ui");
- glad_glVertexP3uiv = (PFNGLVERTEXP3UIVPROC)load("glVertexP3uiv");
- glad_glVertexP4ui = (PFNGLVERTEXP4UIPROC)load("glVertexP4ui");
- glad_glVertexP4uiv = (PFNGLVERTEXP4UIVPROC)load("glVertexP4uiv");
- glad_glTexCoordP1ui = (PFNGLTEXCOORDP1UIPROC)load("glTexCoordP1ui");
- glad_glTexCoordP1uiv = (PFNGLTEXCOORDP1UIVPROC)load("glTexCoordP1uiv");
- glad_glTexCoordP2ui = (PFNGLTEXCOORDP2UIPROC)load("glTexCoordP2ui");
- glad_glTexCoordP2uiv = (PFNGLTEXCOORDP2UIVPROC)load("glTexCoordP2uiv");
- glad_glTexCoordP3ui = (PFNGLTEXCOORDP3UIPROC)load("glTexCoordP3ui");
- glad_glTexCoordP3uiv = (PFNGLTEXCOORDP3UIVPROC)load("glTexCoordP3uiv");
- glad_glTexCoordP4ui = (PFNGLTEXCOORDP4UIPROC)load("glTexCoordP4ui");
- glad_glTexCoordP4uiv = (PFNGLTEXCOORDP4UIVPROC)load("glTexCoordP4uiv");
- glad_glMultiTexCoordP1ui = (PFNGLMULTITEXCOORDP1UIPROC)load("glMultiTexCoordP1ui");
- glad_glMultiTexCoordP1uiv = (PFNGLMULTITEXCOORDP1UIVPROC)load("glMultiTexCoordP1uiv");
- glad_glMultiTexCoordP2ui = (PFNGLMULTITEXCOORDP2UIPROC)load("glMultiTexCoordP2ui");
- glad_glMultiTexCoordP2uiv = (PFNGLMULTITEXCOORDP2UIVPROC)load("glMultiTexCoordP2uiv");
- glad_glMultiTexCoordP3ui = (PFNGLMULTITEXCOORDP3UIPROC)load("glMultiTexCoordP3ui");
- glad_glMultiTexCoordP3uiv = (PFNGLMULTITEXCOORDP3UIVPROC)load("glMultiTexCoordP3uiv");
- glad_glMultiTexCoordP4ui = (PFNGLMULTITEXCOORDP4UIPROC)load("glMultiTexCoordP4ui");
- glad_glMultiTexCoordP4uiv = (PFNGLMULTITEXCOORDP4UIVPROC)load("glMultiTexCoordP4uiv");
- glad_glNormalP3ui = (PFNGLNORMALP3UIPROC)load("glNormalP3ui");
- glad_glNormalP3uiv = (PFNGLNORMALP3UIVPROC)load("glNormalP3uiv");
- glad_glColorP3ui = (PFNGLCOLORP3UIPROC)load("glColorP3ui");
- glad_glColorP3uiv = (PFNGLCOLORP3UIVPROC)load("glColorP3uiv");
- glad_glColorP4ui = (PFNGLCOLORP4UIPROC)load("glColorP4ui");
- glad_glColorP4uiv = (PFNGLCOLORP4UIVPROC)load("glColorP4uiv");
- glad_glSecondaryColorP3ui = (PFNGLSECONDARYCOLORP3UIPROC)load("glSecondaryColorP3ui");
- glad_glSecondaryColorP3uiv = (PFNGLSECONDARYCOLORP3UIVPROC)load("glSecondaryColorP3uiv");
-}
-static void load_GL_ARB_debug_output(GLADloadproc load) {
- if(!GLAD_GL_ARB_debug_output) return;
- glad_glDebugMessageControlARB = (PFNGLDEBUGMESSAGECONTROLARBPROC)load("glDebugMessageControlARB");
- glad_glDebugMessageInsertARB = (PFNGLDEBUGMESSAGEINSERTARBPROC)load("glDebugMessageInsertARB");
- glad_glDebugMessageCallbackARB = (PFNGLDEBUGMESSAGECALLBACKARBPROC)load("glDebugMessageCallbackARB");
- glad_glGetDebugMessageLogARB = (PFNGLGETDEBUGMESSAGELOGARBPROC)load("glGetDebugMessageLogARB");
-}
-static void load_GL_ARB_framebuffer_object(GLADloadproc load) {
- if(!GLAD_GL_ARB_framebuffer_object) return;
- glad_glIsRenderbuffer = (PFNGLISRENDERBUFFERPROC)load("glIsRenderbuffer");
- glad_glBindRenderbuffer = (PFNGLBINDRENDERBUFFERPROC)load("glBindRenderbuffer");
- glad_glDeleteRenderbuffers = (PFNGLDELETERENDERBUFFERSPROC)load("glDeleteRenderbuffers");
- glad_glGenRenderbuffers = (PFNGLGENRENDERBUFFERSPROC)load("glGenRenderbuffers");
- glad_glRenderbufferStorage = (PFNGLRENDERBUFFERSTORAGEPROC)load("glRenderbufferStorage");
- glad_glGetRenderbufferParameteriv = (PFNGLGETRENDERBUFFERPARAMETERIVPROC)load("glGetRenderbufferParameteriv");
- glad_glIsFramebuffer = (PFNGLISFRAMEBUFFERPROC)load("glIsFramebuffer");
- glad_glBindFramebuffer = (PFNGLBINDFRAMEBUFFERPROC)load("glBindFramebuffer");
- glad_glDeleteFramebuffers = (PFNGLDELETEFRAMEBUFFERSPROC)load("glDeleteFramebuffers");
- glad_glGenFramebuffers = (PFNGLGENFRAMEBUFFERSPROC)load("glGenFramebuffers");
- glad_glCheckFramebufferStatus = (PFNGLCHECKFRAMEBUFFERSTATUSPROC)load("glCheckFramebufferStatus");
- glad_glFramebufferTexture1D = (PFNGLFRAMEBUFFERTEXTURE1DPROC)load("glFramebufferTexture1D");
- glad_glFramebufferTexture2D = (PFNGLFRAMEBUFFERTEXTURE2DPROC)load("glFramebufferTexture2D");
- glad_glFramebufferTexture3D = (PFNGLFRAMEBUFFERTEXTURE3DPROC)load("glFramebufferTexture3D");
- glad_glFramebufferRenderbuffer = (PFNGLFRAMEBUFFERRENDERBUFFERPROC)load("glFramebufferRenderbuffer");
- glad_glGetFramebufferAttachmentParameteriv = (PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVPROC)load("glGetFramebufferAttachmentParameteriv");
- glad_glGenerateMipmap = (PFNGLGENERATEMIPMAPPROC)load("glGenerateMipmap");
- glad_glBlitFramebuffer = (PFNGLBLITFRAMEBUFFERPROC)load("glBlitFramebuffer");
- glad_glRenderbufferStorageMultisample = (PFNGLRENDERBUFFERSTORAGEMULTISAMPLEPROC)load("glRenderbufferStorageMultisample");
- glad_glFramebufferTextureLayer = (PFNGLFRAMEBUFFERTEXTURELAYERPROC)load("glFramebufferTextureLayer");
-}
-static void load_GL_EXT_framebuffer_blit(GLADloadproc load) {
- if(!GLAD_GL_EXT_framebuffer_blit) return;
- glad_glBlitFramebufferEXT = (PFNGLBLITFRAMEBUFFEREXTPROC)load("glBlitFramebufferEXT");
-}
-static void load_GL_EXT_framebuffer_multisample(GLADloadproc load) {
- if(!GLAD_GL_EXT_framebuffer_multisample) return;
- glad_glRenderbufferStorageMultisampleEXT = (PFNGLRENDERBUFFERSTORAGEMULTISAMPLEEXTPROC)load("glRenderbufferStorageMultisampleEXT");
-}
-static void load_GL_EXT_framebuffer_object(GLADloadproc load) {
- if(!GLAD_GL_EXT_framebuffer_object) return;
- glad_glIsRenderbufferEXT = (PFNGLISRENDERBUFFEREXTPROC)load("glIsRenderbufferEXT");
- glad_glBindRenderbufferEXT = (PFNGLBINDRENDERBUFFEREXTPROC)load("glBindRenderbufferEXT");
- glad_glDeleteRenderbuffersEXT = (PFNGLDELETERENDERBUFFERSEXTPROC)load("glDeleteRenderbuffersEXT");
- glad_glGenRenderbuffersEXT = (PFNGLGENRENDERBUFFERSEXTPROC)load("glGenRenderbuffersEXT");
- glad_glRenderbufferStorageEXT = (PFNGLRENDERBUFFERSTORAGEEXTPROC)load("glRenderbufferStorageEXT");
- glad_glGetRenderbufferParameterivEXT = (PFNGLGETRENDERBUFFERPARAMETERIVEXTPROC)load("glGetRenderbufferParameterivEXT");
- glad_glIsFramebufferEXT = (PFNGLISFRAMEBUFFEREXTPROC)load("glIsFramebufferEXT");
- glad_glBindFramebufferEXT = (PFNGLBINDFRAMEBUFFEREXTPROC)load("glBindFramebufferEXT");
- glad_glDeleteFramebuffersEXT = (PFNGLDELETEFRAMEBUFFERSEXTPROC)load("glDeleteFramebuffersEXT");
- glad_glGenFramebuffersEXT = (PFNGLGENFRAMEBUFFERSEXTPROC)load("glGenFramebuffersEXT");
- glad_glCheckFramebufferStatusEXT = (PFNGLCHECKFRAMEBUFFERSTATUSEXTPROC)load("glCheckFramebufferStatusEXT");
- glad_glFramebufferTexture1DEXT = (PFNGLFRAMEBUFFERTEXTURE1DEXTPROC)load("glFramebufferTexture1DEXT");
- glad_glFramebufferTexture2DEXT = (PFNGLFRAMEBUFFERTEXTURE2DEXTPROC)load("glFramebufferTexture2DEXT");
- glad_glFramebufferTexture3DEXT = (PFNGLFRAMEBUFFERTEXTURE3DEXTPROC)load("glFramebufferTexture3DEXT");
- glad_glFramebufferRenderbufferEXT = (PFNGLFRAMEBUFFERRENDERBUFFEREXTPROC)load("glFramebufferRenderbufferEXT");
- glad_glGetFramebufferAttachmentParameterivEXT = (PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVEXTPROC)load("glGetFramebufferAttachmentParameterivEXT");
- glad_glGenerateMipmapEXT = (PFNGLGENERATEMIPMAPEXTPROC)load("glGenerateMipmapEXT");
-}
-static int find_extensionsGL(void) {
- if (!get_exts()) return 0;
- GLAD_GL_ARB_debug_output = has_ext("GL_ARB_debug_output");
- GLAD_GL_ARB_framebuffer_object = has_ext("GL_ARB_framebuffer_object");
- GLAD_GL_EXT_framebuffer_blit = has_ext("GL_EXT_framebuffer_blit");
- GLAD_GL_EXT_framebuffer_multisample = has_ext("GL_EXT_framebuffer_multisample");
- GLAD_GL_EXT_framebuffer_object = has_ext("GL_EXT_framebuffer_object");
- free_exts();
- return 1;
-}
-
-static void find_coreGL(void) {
-
- /* Thank you @elmindreda
- * https://github.com/elmindreda/greg/blob/master/templates/greg.c.in#L176
- * https://github.com/glfw/glfw/blob/master/src/context.c#L36
- */
- int i, major, minor;
-
- const char* version;
- const char* prefixes[] = {
- "OpenGL ES-CM ",
- "OpenGL ES-CL ",
- "OpenGL ES ",
- NULL
- };
-
- version = (const char*) glGetString(GL_VERSION);
- if (!version) return;
-
- for (i = 0; prefixes[i]; i++) {
- const size_t length = strlen(prefixes[i]);
- if (strncmp(version, prefixes[i], length) == 0) {
- version += length;
- break;
- }
- }
-
-/* PR #18 */
-#ifdef _MSC_VER
- sscanf_s(version, "%d.%d", &major, &minor);
-#else
- sscanf(version, "%d.%d", &major, &minor);
-#endif
-
- GLVersion.major = major; GLVersion.minor = minor;
- max_loaded_major = major; max_loaded_minor = minor;
- GLAD_GL_VERSION_1_0 = (major == 1 && minor >= 0) || major > 1;
- GLAD_GL_VERSION_1_1 = (major == 1 && minor >= 1) || major > 1;
- GLAD_GL_VERSION_1_2 = (major == 1 && minor >= 2) || major > 1;
- GLAD_GL_VERSION_1_3 = (major == 1 && minor >= 3) || major > 1;
- GLAD_GL_VERSION_1_4 = (major == 1 && minor >= 4) || major > 1;
- GLAD_GL_VERSION_1_5 = (major == 1 && minor >= 5) || major > 1;
- GLAD_GL_VERSION_2_0 = (major == 2 && minor >= 0) || major > 2;
- GLAD_GL_VERSION_2_1 = (major == 2 && minor >= 1) || major > 2;
- GLAD_GL_VERSION_3_0 = (major == 3 && minor >= 0) || major > 3;
- GLAD_GL_VERSION_3_1 = (major == 3 && minor >= 1) || major > 3;
- GLAD_GL_VERSION_3_2 = (major == 3 && minor >= 2) || major > 3;
- GLAD_GL_VERSION_3_3 = (major == 3 && minor >= 3) || major > 3;
- if (GLVersion.major > 3 || (GLVersion.major >= 3 && GLVersion.minor >= 3)) {
- max_loaded_major = 3;
- max_loaded_minor = 3;
- }
-}
-
-int gladLoadGLLoader(GLADloadproc load) {
- GLVersion.major = 0; GLVersion.minor = 0;
- glGetString = (PFNGLGETSTRINGPROC)load("glGetString");
- if(glGetString == NULL) return 0;
- if(glGetString(GL_VERSION) == NULL) return 0;
- find_coreGL();
- load_GL_VERSION_1_0(load);
- load_GL_VERSION_1_1(load);
- load_GL_VERSION_1_2(load);
- load_GL_VERSION_1_3(load);
- load_GL_VERSION_1_4(load);
- load_GL_VERSION_1_5(load);
- load_GL_VERSION_2_0(load);
- load_GL_VERSION_2_1(load);
- load_GL_VERSION_3_0(load);
- load_GL_VERSION_3_1(load);
- load_GL_VERSION_3_2(load);
- load_GL_VERSION_3_3(load);
-
- if (!find_extensionsGL()) return 0;
- load_GL_ARB_debug_output(load);
- load_GL_ARB_framebuffer_object(load);
- load_GL_EXT_framebuffer_blit(load);
- load_GL_EXT_framebuffer_multisample(load);
- load_GL_EXT_framebuffer_object(load);
- return GLVersion.major != 0 || GLVersion.minor != 0;
-}
-
diff --git a/thirdparty/glad/glad/gl.h b/thirdparty/glad/glad/gl.h
new file mode 100644
index 0000000000..9836296226
--- /dev/null
+++ b/thirdparty/glad/glad/gl.h
@@ -0,0 +1,3884 @@
+/**
+ * Loader generated by glad 2.0.2 on Mon Nov 7 12:17:15 2022
+ *
+ * SPDX-License-Identifier: (WTFPL OR CC0-1.0) AND Apache-2.0
+ *
+ * Generator: C/C++
+ * Specification: gl
+ * Extensions: 7
+ *
+ * APIs:
+ * - gl:compatibility=3.3
+ *
+ * Options:
+ * - ALIAS = False
+ * - DEBUG = False
+ * - HEADER_ONLY = False
+ * - LOADER = True
+ * - MX = False
+ * - ON_DEMAND = False
+ *
+ * Commandline:
+ * --api='gl:compatibility=3.3' --extensions='GL_ARB_debug_output,GL_ARB_framebuffer_object,GL_EXT_framebuffer_blit,GL_EXT_framebuffer_multisample,GL_EXT_framebuffer_object,GL_OVR_multiview,GL_OVR_multiview2' c --loader
+ *
+ * Online:
+ * http://glad.sh/#api=gl%3Acompatibility%3D3.3&extensions=GL_ARB_debug_output%2CGL_ARB_framebuffer_object%2CGL_EXT_framebuffer_blit%2CGL_EXT_framebuffer_multisample%2CGL_EXT_framebuffer_object%2CGL_OVR_multiview%2CGL_OVR_multiview2&generator=c&options=LOADER
+ *
+ */
+
+#ifndef GLAD_GL_H_
+#define GLAD_GL_H_
+
+#ifdef __clang__
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wreserved-id-macro"
+#endif
+#ifdef __gl_h_
+ #error OpenGL (gl.h) header already included (API: gl), remove previous include!
+#endif
+#define __gl_h_ 1
+#ifdef __gl3_h_
+ #error OpenGL (gl3.h) header already included (API: gl), remove previous include!
+#endif
+#define __gl3_h_ 1
+#ifdef __glext_h_
+ #error OpenGL (glext.h) header already included (API: gl), remove previous include!
+#endif
+#define __glext_h_ 1
+#ifdef __gl3ext_h_
+ #error OpenGL (gl3ext.h) header already included (API: gl), remove previous include!
+#endif
+#define __gl3ext_h_ 1
+#ifdef __clang__
+#pragma clang diagnostic pop
+#endif
+
+#define GLAD_GL
+#define GLAD_OPTION_GL_LOADER
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef GLAD_PLATFORM_H_
+#define GLAD_PLATFORM_H_
+
+#ifndef GLAD_PLATFORM_WIN32
+ #if defined(_WIN32) || defined(__WIN32__) || defined(WIN32) || defined(__MINGW32__)
+ #define GLAD_PLATFORM_WIN32 1
+ #else
+ #define GLAD_PLATFORM_WIN32 0
+ #endif
+#endif
+
+#ifndef GLAD_PLATFORM_APPLE
+ #ifdef __APPLE__
+ #define GLAD_PLATFORM_APPLE 1
+ #else
+ #define GLAD_PLATFORM_APPLE 0
+ #endif
+#endif
+
+#ifndef GLAD_PLATFORM_EMSCRIPTEN
+ #ifdef __EMSCRIPTEN__
+ #define GLAD_PLATFORM_EMSCRIPTEN 1
+ #else
+ #define GLAD_PLATFORM_EMSCRIPTEN 0
+ #endif
+#endif
+
+#ifndef GLAD_PLATFORM_UWP
+ #if defined(_MSC_VER) && !defined(GLAD_INTERNAL_HAVE_WINAPIFAMILY)
+ #ifdef __has_include
+ #if __has_include(<winapifamily.h>)
+ #define GLAD_INTERNAL_HAVE_WINAPIFAMILY 1
+ #endif
+ #elif _MSC_VER >= 1700 && !_USING_V110_SDK71_
+ #define GLAD_INTERNAL_HAVE_WINAPIFAMILY 1
+ #endif
+ #endif
+
+ #ifdef GLAD_INTERNAL_HAVE_WINAPIFAMILY
+ #include <winapifamily.h>
+ #if !WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) && WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_APP)
+ #define GLAD_PLATFORM_UWP 1
+ #endif
+ #endif
+
+ #ifndef GLAD_PLATFORM_UWP
+ #define GLAD_PLATFORM_UWP 0
+ #endif
+#endif
+
+#ifdef __GNUC__
+ #define GLAD_GNUC_EXTENSION __extension__
+#else
+ #define GLAD_GNUC_EXTENSION
+#endif
+
+#define GLAD_UNUSED(x) (void)(x)
+
+#ifndef GLAD_API_CALL
+ #if defined(GLAD_API_CALL_EXPORT)
+ #if GLAD_PLATFORM_WIN32 || defined(__CYGWIN__)
+ #if defined(GLAD_API_CALL_EXPORT_BUILD)
+ #if defined(__GNUC__)
+ #define GLAD_API_CALL __attribute__ ((dllexport)) extern
+ #else
+ #define GLAD_API_CALL __declspec(dllexport) extern
+ #endif
+ #else
+ #if defined(__GNUC__)
+ #define GLAD_API_CALL __attribute__ ((dllimport)) extern
+ #else
+ #define GLAD_API_CALL __declspec(dllimport) extern
+ #endif
+ #endif
+ #elif defined(__GNUC__) && defined(GLAD_API_CALL_EXPORT_BUILD)
+ #define GLAD_API_CALL __attribute__ ((visibility ("default"))) extern
+ #else
+ #define GLAD_API_CALL extern
+ #endif
+ #else
+ #define GLAD_API_CALL extern
+ #endif
+#endif
+
+#ifdef APIENTRY
+ #define GLAD_API_PTR APIENTRY
+#elif GLAD_PLATFORM_WIN32
+ #define GLAD_API_PTR __stdcall
+#else
+ #define GLAD_API_PTR
+#endif
+
+#ifndef GLAPI
+#define GLAPI GLAD_API_CALL
+#endif
+
+#ifndef GLAPIENTRY
+#define GLAPIENTRY GLAD_API_PTR
+#endif
+
+#define GLAD_MAKE_VERSION(major, minor) (major * 10000 + minor)
+#define GLAD_VERSION_MAJOR(version) (version / 10000)
+#define GLAD_VERSION_MINOR(version) (version % 10000)
+
+#define GLAD_GENERATOR_VERSION "2.0.2"
+
+typedef void (*GLADapiproc)(void);
+
+typedef GLADapiproc (*GLADloadfunc)(const char *name);
+typedef GLADapiproc (*GLADuserptrloadfunc)(void *userptr, const char *name);
+
+typedef void (*GLADprecallback)(const char *name, GLADapiproc apiproc, int len_args, ...);
+typedef void (*GLADpostcallback)(void *ret, const char *name, GLADapiproc apiproc, int len_args, ...);
+
+#endif /* GLAD_PLATFORM_H_ */
+
+#define GL_2D 0x0600
+#define GL_2_BYTES 0x1407
+#define GL_3D 0x0601
+#define GL_3D_COLOR 0x0602
+#define GL_3D_COLOR_TEXTURE 0x0603
+#define GL_3_BYTES 0x1408
+#define GL_4D_COLOR_TEXTURE 0x0604
+#define GL_4_BYTES 0x1409
+#define GL_ACCUM 0x0100
+#define GL_ACCUM_ALPHA_BITS 0x0D5B
+#define GL_ACCUM_BLUE_BITS 0x0D5A
+#define GL_ACCUM_BUFFER_BIT 0x00000200
+#define GL_ACCUM_CLEAR_VALUE 0x0B80
+#define GL_ACCUM_GREEN_BITS 0x0D59
+#define GL_ACCUM_RED_BITS 0x0D58
+#define GL_ACTIVE_ATTRIBUTES 0x8B89
+#define GL_ACTIVE_ATTRIBUTE_MAX_LENGTH 0x8B8A
+#define GL_ACTIVE_TEXTURE 0x84E0
+#define GL_ACTIVE_UNIFORMS 0x8B86
+#define GL_ACTIVE_UNIFORM_BLOCKS 0x8A36
+#define GL_ACTIVE_UNIFORM_BLOCK_MAX_NAME_LENGTH 0x8A35
+#define GL_ACTIVE_UNIFORM_MAX_LENGTH 0x8B87
+#define GL_ADD 0x0104
+#define GL_ADD_SIGNED 0x8574
+#define GL_ALIASED_LINE_WIDTH_RANGE 0x846E
+#define GL_ALIASED_POINT_SIZE_RANGE 0x846D
+#define GL_ALL_ATTRIB_BITS 0xFFFFFFFF
+#define GL_ALPHA 0x1906
+#define GL_ALPHA12 0x803D
+#define GL_ALPHA16 0x803E
+#define GL_ALPHA4 0x803B
+#define GL_ALPHA8 0x803C
+#define GL_ALPHA_BIAS 0x0D1D
+#define GL_ALPHA_BITS 0x0D55
+#define GL_ALPHA_INTEGER 0x8D97
+#define GL_ALPHA_SCALE 0x0D1C
+#define GL_ALPHA_TEST 0x0BC0
+#define GL_ALPHA_TEST_FUNC 0x0BC1
+#define GL_ALPHA_TEST_REF 0x0BC2
+#define GL_ALREADY_SIGNALED 0x911A
+#define GL_ALWAYS 0x0207
+#define GL_AMBIENT 0x1200
+#define GL_AMBIENT_AND_DIFFUSE 0x1602
+#define GL_AND 0x1501
+#define GL_AND_INVERTED 0x1504
+#define GL_AND_REVERSE 0x1502
+#define GL_ANY_SAMPLES_PASSED 0x8C2F
+#define GL_ARRAY_BUFFER 0x8892
+#define GL_ARRAY_BUFFER_BINDING 0x8894
+#define GL_ATTACHED_SHADERS 0x8B85
+#define GL_ATTRIB_STACK_DEPTH 0x0BB0
+#define GL_AUTO_NORMAL 0x0D80
+#define GL_AUX0 0x0409
+#define GL_AUX1 0x040A
+#define GL_AUX2 0x040B
+#define GL_AUX3 0x040C
+#define GL_AUX_BUFFERS 0x0C00
+#define GL_BACK 0x0405
+#define GL_BACK_LEFT 0x0402
+#define GL_BACK_RIGHT 0x0403
+#define GL_BGR 0x80E0
+#define GL_BGRA 0x80E1
+#define GL_BGRA_INTEGER 0x8D9B
+#define GL_BGR_INTEGER 0x8D9A
+#define GL_BITMAP 0x1A00
+#define GL_BITMAP_TOKEN 0x0704
+#define GL_BLEND 0x0BE2
+#define GL_BLEND_COLOR 0x8005
+#define GL_BLEND_DST 0x0BE0
+#define GL_BLEND_DST_ALPHA 0x80CA
+#define GL_BLEND_DST_RGB 0x80C8
+#define GL_BLEND_EQUATION 0x8009
+#define GL_BLEND_EQUATION_ALPHA 0x883D
+#define GL_BLEND_EQUATION_RGB 0x8009
+#define GL_BLEND_SRC 0x0BE1
+#define GL_BLEND_SRC_ALPHA 0x80CB
+#define GL_BLEND_SRC_RGB 0x80C9
+#define GL_BLUE 0x1905
+#define GL_BLUE_BIAS 0x0D1B
+#define GL_BLUE_BITS 0x0D54
+#define GL_BLUE_INTEGER 0x8D96
+#define GL_BLUE_SCALE 0x0D1A
+#define GL_BOOL 0x8B56
+#define GL_BOOL_VEC2 0x8B57
+#define GL_BOOL_VEC3 0x8B58
+#define GL_BOOL_VEC4 0x8B59
+#define GL_BUFFER_ACCESS 0x88BB
+#define GL_BUFFER_ACCESS_FLAGS 0x911F
+#define GL_BUFFER_MAPPED 0x88BC
+#define GL_BUFFER_MAP_LENGTH 0x9120
+#define GL_BUFFER_MAP_OFFSET 0x9121
+#define GL_BUFFER_MAP_POINTER 0x88BD
+#define GL_BUFFER_SIZE 0x8764
+#define GL_BUFFER_USAGE 0x8765
+#define GL_BYTE 0x1400
+#define GL_C3F_V3F 0x2A24
+#define GL_C4F_N3F_V3F 0x2A26
+#define GL_C4UB_V2F 0x2A22
+#define GL_C4UB_V3F 0x2A23
+#define GL_CCW 0x0901
+#define GL_CLAMP 0x2900
+#define GL_CLAMP_FRAGMENT_COLOR 0x891B
+#define GL_CLAMP_READ_COLOR 0x891C
+#define GL_CLAMP_TO_BORDER 0x812D
+#define GL_CLAMP_TO_EDGE 0x812F
+#define GL_CLAMP_VERTEX_COLOR 0x891A
+#define GL_CLEAR 0x1500
+#define GL_CLIENT_ACTIVE_TEXTURE 0x84E1
+#define GL_CLIENT_ALL_ATTRIB_BITS 0xFFFFFFFF
+#define GL_CLIENT_ATTRIB_STACK_DEPTH 0x0BB1
+#define GL_CLIENT_PIXEL_STORE_BIT 0x00000001
+#define GL_CLIENT_VERTEX_ARRAY_BIT 0x00000002
+#define GL_CLIP_DISTANCE0 0x3000
+#define GL_CLIP_DISTANCE1 0x3001
+#define GL_CLIP_DISTANCE2 0x3002
+#define GL_CLIP_DISTANCE3 0x3003
+#define GL_CLIP_DISTANCE4 0x3004
+#define GL_CLIP_DISTANCE5 0x3005
+#define GL_CLIP_DISTANCE6 0x3006
+#define GL_CLIP_DISTANCE7 0x3007
+#define GL_CLIP_PLANE0 0x3000
+#define GL_CLIP_PLANE1 0x3001
+#define GL_CLIP_PLANE2 0x3002
+#define GL_CLIP_PLANE3 0x3003
+#define GL_CLIP_PLANE4 0x3004
+#define GL_CLIP_PLANE5 0x3005
+#define GL_COEFF 0x0A00
+#define GL_COLOR 0x1800
+#define GL_COLOR_ARRAY 0x8076
+#define GL_COLOR_ARRAY_BUFFER_BINDING 0x8898
+#define GL_COLOR_ARRAY_POINTER 0x8090
+#define GL_COLOR_ARRAY_SIZE 0x8081
+#define GL_COLOR_ARRAY_STRIDE 0x8083
+#define GL_COLOR_ARRAY_TYPE 0x8082
+#define GL_COLOR_ATTACHMENT0 0x8CE0
+#define GL_COLOR_ATTACHMENT0_EXT 0x8CE0
+#define GL_COLOR_ATTACHMENT1 0x8CE1
+#define GL_COLOR_ATTACHMENT10 0x8CEA
+#define GL_COLOR_ATTACHMENT10_EXT 0x8CEA
+#define GL_COLOR_ATTACHMENT11 0x8CEB
+#define GL_COLOR_ATTACHMENT11_EXT 0x8CEB
+#define GL_COLOR_ATTACHMENT12 0x8CEC
+#define GL_COLOR_ATTACHMENT12_EXT 0x8CEC
+#define GL_COLOR_ATTACHMENT13 0x8CED
+#define GL_COLOR_ATTACHMENT13_EXT 0x8CED
+#define GL_COLOR_ATTACHMENT14 0x8CEE
+#define GL_COLOR_ATTACHMENT14_EXT 0x8CEE
+#define GL_COLOR_ATTACHMENT15 0x8CEF
+#define GL_COLOR_ATTACHMENT15_EXT 0x8CEF
+#define GL_COLOR_ATTACHMENT16 0x8CF0
+#define GL_COLOR_ATTACHMENT17 0x8CF1
+#define GL_COLOR_ATTACHMENT18 0x8CF2
+#define GL_COLOR_ATTACHMENT19 0x8CF3
+#define GL_COLOR_ATTACHMENT1_EXT 0x8CE1
+#define GL_COLOR_ATTACHMENT2 0x8CE2
+#define GL_COLOR_ATTACHMENT20 0x8CF4
+#define GL_COLOR_ATTACHMENT21 0x8CF5
+#define GL_COLOR_ATTACHMENT22 0x8CF6
+#define GL_COLOR_ATTACHMENT23 0x8CF7
+#define GL_COLOR_ATTACHMENT24 0x8CF8
+#define GL_COLOR_ATTACHMENT25 0x8CF9
+#define GL_COLOR_ATTACHMENT26 0x8CFA
+#define GL_COLOR_ATTACHMENT27 0x8CFB
+#define GL_COLOR_ATTACHMENT28 0x8CFC
+#define GL_COLOR_ATTACHMENT29 0x8CFD
+#define GL_COLOR_ATTACHMENT2_EXT 0x8CE2
+#define GL_COLOR_ATTACHMENT3 0x8CE3
+#define GL_COLOR_ATTACHMENT30 0x8CFE
+#define GL_COLOR_ATTACHMENT31 0x8CFF
+#define GL_COLOR_ATTACHMENT3_EXT 0x8CE3
+#define GL_COLOR_ATTACHMENT4 0x8CE4
+#define GL_COLOR_ATTACHMENT4_EXT 0x8CE4
+#define GL_COLOR_ATTACHMENT5 0x8CE5
+#define GL_COLOR_ATTACHMENT5_EXT 0x8CE5
+#define GL_COLOR_ATTACHMENT6 0x8CE6
+#define GL_COLOR_ATTACHMENT6_EXT 0x8CE6
+#define GL_COLOR_ATTACHMENT7 0x8CE7
+#define GL_COLOR_ATTACHMENT7_EXT 0x8CE7
+#define GL_COLOR_ATTACHMENT8 0x8CE8
+#define GL_COLOR_ATTACHMENT8_EXT 0x8CE8
+#define GL_COLOR_ATTACHMENT9 0x8CE9
+#define GL_COLOR_ATTACHMENT9_EXT 0x8CE9
+#define GL_COLOR_BUFFER_BIT 0x00004000
+#define GL_COLOR_CLEAR_VALUE 0x0C22
+#define GL_COLOR_INDEX 0x1900
+#define GL_COLOR_INDEXES 0x1603
+#define GL_COLOR_LOGIC_OP 0x0BF2
+#define GL_COLOR_MATERIAL 0x0B57
+#define GL_COLOR_MATERIAL_FACE 0x0B55
+#define GL_COLOR_MATERIAL_PARAMETER 0x0B56
+#define GL_COLOR_SUM 0x8458
+#define GL_COLOR_WRITEMASK 0x0C23
+#define GL_COMBINE 0x8570
+#define GL_COMBINE_ALPHA 0x8572
+#define GL_COMBINE_RGB 0x8571
+#define GL_COMPARE_REF_TO_TEXTURE 0x884E
+#define GL_COMPARE_R_TO_TEXTURE 0x884E
+#define GL_COMPILE 0x1300
+#define GL_COMPILE_AND_EXECUTE 0x1301
+#define GL_COMPILE_STATUS 0x8B81
+#define GL_COMPRESSED_ALPHA 0x84E9
+#define GL_COMPRESSED_INTENSITY 0x84EC
+#define GL_COMPRESSED_LUMINANCE 0x84EA
+#define GL_COMPRESSED_LUMINANCE_ALPHA 0x84EB
+#define GL_COMPRESSED_RED 0x8225
+#define GL_COMPRESSED_RED_RGTC1 0x8DBB
+#define GL_COMPRESSED_RG 0x8226
+#define GL_COMPRESSED_RGB 0x84ED
+#define GL_COMPRESSED_RGBA 0x84EE
+#define GL_COMPRESSED_RG_RGTC2 0x8DBD
+#define GL_COMPRESSED_SIGNED_RED_RGTC1 0x8DBC
+#define GL_COMPRESSED_SIGNED_RG_RGTC2 0x8DBE
+#define GL_COMPRESSED_SLUMINANCE 0x8C4A
+#define GL_COMPRESSED_SLUMINANCE_ALPHA 0x8C4B
+#define GL_COMPRESSED_SRGB 0x8C48
+#define GL_COMPRESSED_SRGB_ALPHA 0x8C49
+#define GL_COMPRESSED_TEXTURE_FORMATS 0x86A3
+#define GL_CONDITION_SATISFIED 0x911C
+#define GL_CONSTANT 0x8576
+#define GL_CONSTANT_ALPHA 0x8003
+#define GL_CONSTANT_ATTENUATION 0x1207
+#define GL_CONSTANT_COLOR 0x8001
+#define GL_CONTEXT_COMPATIBILITY_PROFILE_BIT 0x00000002
+#define GL_CONTEXT_CORE_PROFILE_BIT 0x00000001
+#define GL_CONTEXT_FLAGS 0x821E
+#define GL_CONTEXT_FLAG_FORWARD_COMPATIBLE_BIT 0x00000001
+#define GL_CONTEXT_PROFILE_MASK 0x9126
+#define GL_COORD_REPLACE 0x8862
+#define GL_COPY 0x1503
+#define GL_COPY_INVERTED 0x150C
+#define GL_COPY_PIXEL_TOKEN 0x0706
+#define GL_COPY_READ_BUFFER 0x8F36
+#define GL_COPY_WRITE_BUFFER 0x8F37
+#define GL_CULL_FACE 0x0B44
+#define GL_CULL_FACE_MODE 0x0B45
+#define GL_CURRENT_BIT 0x00000001
+#define GL_CURRENT_COLOR 0x0B00
+#define GL_CURRENT_FOG_COORD 0x8453
+#define GL_CURRENT_FOG_COORDINATE 0x8453
+#define GL_CURRENT_INDEX 0x0B01
+#define GL_CURRENT_NORMAL 0x0B02
+#define GL_CURRENT_PROGRAM 0x8B8D
+#define GL_CURRENT_QUERY 0x8865
+#define GL_CURRENT_RASTER_COLOR 0x0B04
+#define GL_CURRENT_RASTER_DISTANCE 0x0B09
+#define GL_CURRENT_RASTER_INDEX 0x0B05
+#define GL_CURRENT_RASTER_POSITION 0x0B07
+#define GL_CURRENT_RASTER_POSITION_VALID 0x0B08
+#define GL_CURRENT_RASTER_SECONDARY_COLOR 0x845F
+#define GL_CURRENT_RASTER_TEXTURE_COORDS 0x0B06
+#define GL_CURRENT_SECONDARY_COLOR 0x8459
+#define GL_CURRENT_TEXTURE_COORDS 0x0B03
+#define GL_CURRENT_VERTEX_ATTRIB 0x8626
+#define GL_CW 0x0900
+#define GL_DEBUG_CALLBACK_FUNCTION_ARB 0x8244
+#define GL_DEBUG_CALLBACK_USER_PARAM_ARB 0x8245
+#define GL_DEBUG_LOGGED_MESSAGES_ARB 0x9145
+#define GL_DEBUG_NEXT_LOGGED_MESSAGE_LENGTH_ARB 0x8243
+#define GL_DEBUG_OUTPUT_SYNCHRONOUS_ARB 0x8242
+#define GL_DEBUG_SEVERITY_HIGH_ARB 0x9146
+#define GL_DEBUG_SEVERITY_LOW_ARB 0x9148
+#define GL_DEBUG_SEVERITY_MEDIUM_ARB 0x9147
+#define GL_DEBUG_SOURCE_API_ARB 0x8246
+#define GL_DEBUG_SOURCE_APPLICATION_ARB 0x824A
+#define GL_DEBUG_SOURCE_OTHER_ARB 0x824B
+#define GL_DEBUG_SOURCE_SHADER_COMPILER_ARB 0x8248
+#define GL_DEBUG_SOURCE_THIRD_PARTY_ARB 0x8249
+#define GL_DEBUG_SOURCE_WINDOW_SYSTEM_ARB 0x8247
+#define GL_DEBUG_TYPE_DEPRECATED_BEHAVIOR_ARB 0x824D
+#define GL_DEBUG_TYPE_ERROR_ARB 0x824C
+#define GL_DEBUG_TYPE_OTHER_ARB 0x8251
+#define GL_DEBUG_TYPE_PERFORMANCE_ARB 0x8250
+#define GL_DEBUG_TYPE_PORTABILITY_ARB 0x824F
+#define GL_DEBUG_TYPE_UNDEFINED_BEHAVIOR_ARB 0x824E
+#define GL_DECAL 0x2101
+#define GL_DECR 0x1E03
+#define GL_DECR_WRAP 0x8508
+#define GL_DELETE_STATUS 0x8B80
+#define GL_DEPTH 0x1801
+#define GL_DEPTH24_STENCIL8 0x88F0
+#define GL_DEPTH32F_STENCIL8 0x8CAD
+#define GL_DEPTH_ATTACHMENT 0x8D00
+#define GL_DEPTH_ATTACHMENT_EXT 0x8D00
+#define GL_DEPTH_BIAS 0x0D1F
+#define GL_DEPTH_BITS 0x0D56
+#define GL_DEPTH_BUFFER_BIT 0x00000100
+#define GL_DEPTH_CLAMP 0x864F
+#define GL_DEPTH_CLEAR_VALUE 0x0B73
+#define GL_DEPTH_COMPONENT 0x1902
+#define GL_DEPTH_COMPONENT16 0x81A5
+#define GL_DEPTH_COMPONENT24 0x81A6
+#define GL_DEPTH_COMPONENT32 0x81A7
+#define GL_DEPTH_COMPONENT32F 0x8CAC
+#define GL_DEPTH_FUNC 0x0B74
+#define GL_DEPTH_RANGE 0x0B70
+#define GL_DEPTH_SCALE 0x0D1E
+#define GL_DEPTH_STENCIL 0x84F9
+#define GL_DEPTH_STENCIL_ATTACHMENT 0x821A
+#define GL_DEPTH_TEST 0x0B71
+#define GL_DEPTH_TEXTURE_MODE 0x884B
+#define GL_DEPTH_WRITEMASK 0x0B72
+#define GL_DIFFUSE 0x1201
+#define GL_DITHER 0x0BD0
+#define GL_DOMAIN 0x0A02
+#define GL_DONT_CARE 0x1100
+#define GL_DOT3_RGB 0x86AE
+#define GL_DOT3_RGBA 0x86AF
+#define GL_DOUBLE 0x140A
+#define GL_DOUBLEBUFFER 0x0C32
+#define GL_DRAW_BUFFER 0x0C01
+#define GL_DRAW_BUFFER0 0x8825
+#define GL_DRAW_BUFFER1 0x8826
+#define GL_DRAW_BUFFER10 0x882F
+#define GL_DRAW_BUFFER11 0x8830
+#define GL_DRAW_BUFFER12 0x8831
+#define GL_DRAW_BUFFER13 0x8832
+#define GL_DRAW_BUFFER14 0x8833
+#define GL_DRAW_BUFFER15 0x8834
+#define GL_DRAW_BUFFER2 0x8827
+#define GL_DRAW_BUFFER3 0x8828
+#define GL_DRAW_BUFFER4 0x8829
+#define GL_DRAW_BUFFER5 0x882A
+#define GL_DRAW_BUFFER6 0x882B
+#define GL_DRAW_BUFFER7 0x882C
+#define GL_DRAW_BUFFER8 0x882D
+#define GL_DRAW_BUFFER9 0x882E
+#define GL_DRAW_FRAMEBUFFER 0x8CA9
+#define GL_DRAW_FRAMEBUFFER_BINDING 0x8CA6
+#define GL_DRAW_FRAMEBUFFER_BINDING_EXT 0x8CA6
+#define GL_DRAW_FRAMEBUFFER_EXT 0x8CA9
+#define GL_DRAW_PIXEL_TOKEN 0x0705
+#define GL_DST_ALPHA 0x0304
+#define GL_DST_COLOR 0x0306
+#define GL_DYNAMIC_COPY 0x88EA
+#define GL_DYNAMIC_DRAW 0x88E8
+#define GL_DYNAMIC_READ 0x88E9
+#define GL_EDGE_FLAG 0x0B43
+#define GL_EDGE_FLAG_ARRAY 0x8079
+#define GL_EDGE_FLAG_ARRAY_BUFFER_BINDING 0x889B
+#define GL_EDGE_FLAG_ARRAY_POINTER 0x8093
+#define GL_EDGE_FLAG_ARRAY_STRIDE 0x808C
+#define GL_ELEMENT_ARRAY_BUFFER 0x8893
+#define GL_ELEMENT_ARRAY_BUFFER_BINDING 0x8895
+#define GL_EMISSION 0x1600
+#define GL_ENABLE_BIT 0x00002000
+#define GL_EQUAL 0x0202
+#define GL_EQUIV 0x1509
+#define GL_EVAL_BIT 0x00010000
+#define GL_EXP 0x0800
+#define GL_EXP2 0x0801
+#define GL_EXTENSIONS 0x1F03
+#define GL_EYE_LINEAR 0x2400
+#define GL_EYE_PLANE 0x2502
+#define GL_FALSE 0
+#define GL_FASTEST 0x1101
+#define GL_FEEDBACK 0x1C01
+#define GL_FEEDBACK_BUFFER_POINTER 0x0DF0
+#define GL_FEEDBACK_BUFFER_SIZE 0x0DF1
+#define GL_FEEDBACK_BUFFER_TYPE 0x0DF2
+#define GL_FILL 0x1B02
+#define GL_FIRST_VERTEX_CONVENTION 0x8E4D
+#define GL_FIXED_ONLY 0x891D
+#define GL_FLAT 0x1D00
+#define GL_FLOAT 0x1406
+#define GL_FLOAT_32_UNSIGNED_INT_24_8_REV 0x8DAD
+#define GL_FLOAT_MAT2 0x8B5A
+#define GL_FLOAT_MAT2x3 0x8B65
+#define GL_FLOAT_MAT2x4 0x8B66
+#define GL_FLOAT_MAT3 0x8B5B
+#define GL_FLOAT_MAT3x2 0x8B67
+#define GL_FLOAT_MAT3x4 0x8B68
+#define GL_FLOAT_MAT4 0x8B5C
+#define GL_FLOAT_MAT4x2 0x8B69
+#define GL_FLOAT_MAT4x3 0x8B6A
+#define GL_FLOAT_VEC2 0x8B50
+#define GL_FLOAT_VEC3 0x8B51
+#define GL_FLOAT_VEC4 0x8B52
+#define GL_FOG 0x0B60
+#define GL_FOG_BIT 0x00000080
+#define GL_FOG_COLOR 0x0B66
+#define GL_FOG_COORD 0x8451
+#define GL_FOG_COORDINATE 0x8451
+#define GL_FOG_COORDINATE_ARRAY 0x8457
+#define GL_FOG_COORDINATE_ARRAY_BUFFER_BINDING 0x889D
+#define GL_FOG_COORDINATE_ARRAY_POINTER 0x8456
+#define GL_FOG_COORDINATE_ARRAY_STRIDE 0x8455
+#define GL_FOG_COORDINATE_ARRAY_TYPE 0x8454
+#define GL_FOG_COORDINATE_SOURCE 0x8450
+#define GL_FOG_COORD_ARRAY 0x8457
+#define GL_FOG_COORD_ARRAY_BUFFER_BINDING 0x889D
+#define GL_FOG_COORD_ARRAY_POINTER 0x8456
+#define GL_FOG_COORD_ARRAY_STRIDE 0x8455
+#define GL_FOG_COORD_ARRAY_TYPE 0x8454
+#define GL_FOG_COORD_SRC 0x8450
+#define GL_FOG_DENSITY 0x0B62
+#define GL_FOG_END 0x0B64
+#define GL_FOG_HINT 0x0C54
+#define GL_FOG_INDEX 0x0B61
+#define GL_FOG_MODE 0x0B65
+#define GL_FOG_START 0x0B63
+#define GL_FRAGMENT_DEPTH 0x8452
+#define GL_FRAGMENT_SHADER 0x8B30
+#define GL_FRAGMENT_SHADER_DERIVATIVE_HINT 0x8B8B
+#define GL_FRAMEBUFFER 0x8D40
+#define GL_FRAMEBUFFER_ATTACHMENT_ALPHA_SIZE 0x8215
+#define GL_FRAMEBUFFER_ATTACHMENT_BLUE_SIZE 0x8214
+#define GL_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING 0x8210
+#define GL_FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE 0x8211
+#define GL_FRAMEBUFFER_ATTACHMENT_DEPTH_SIZE 0x8216
+#define GL_FRAMEBUFFER_ATTACHMENT_GREEN_SIZE 0x8213
+#define GL_FRAMEBUFFER_ATTACHMENT_LAYERED 0x8DA7
+#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME 0x8CD1
+#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME_EXT 0x8CD1
+#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE 0x8CD0
+#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE_EXT 0x8CD0
+#define GL_FRAMEBUFFER_ATTACHMENT_RED_SIZE 0x8212
+#define GL_FRAMEBUFFER_ATTACHMENT_STENCIL_SIZE 0x8217
+#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_3D_ZOFFSET_EXT 0x8CD4
+#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_BASE_VIEW_INDEX_OVR 0x9632
+#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE 0x8CD3
+#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE_EXT 0x8CD3
+#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER 0x8CD4
+#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL 0x8CD2
+#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL_EXT 0x8CD2
+#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_NUM_VIEWS_OVR 0x9630
+#define GL_FRAMEBUFFER_BINDING 0x8CA6
+#define GL_FRAMEBUFFER_BINDING_EXT 0x8CA6
+#define GL_FRAMEBUFFER_COMPLETE 0x8CD5
+#define GL_FRAMEBUFFER_COMPLETE_EXT 0x8CD5
+#define GL_FRAMEBUFFER_DEFAULT 0x8218
+#define GL_FRAMEBUFFER_EXT 0x8D40
+#define GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT 0x8CD6
+#define GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT_EXT 0x8CD6
+#define GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS_EXT 0x8CD9
+#define GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER 0x8CDB
+#define GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER_EXT 0x8CDB
+#define GL_FRAMEBUFFER_INCOMPLETE_FORMATS_EXT 0x8CDA
+#define GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS 0x8DA8
+#define GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT 0x8CD7
+#define GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT_EXT 0x8CD7
+#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE 0x8D56
+#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_EXT 0x8D56
+#define GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER 0x8CDC
+#define GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER_EXT 0x8CDC
+#define GL_FRAMEBUFFER_INCOMPLETE_VIEW_TARGETS_OVR 0x9633
+#define GL_FRAMEBUFFER_SRGB 0x8DB9
+#define GL_FRAMEBUFFER_UNDEFINED 0x8219
+#define GL_FRAMEBUFFER_UNSUPPORTED 0x8CDD
+#define GL_FRAMEBUFFER_UNSUPPORTED_EXT 0x8CDD
+#define GL_FRONT 0x0404
+#define GL_FRONT_AND_BACK 0x0408
+#define GL_FRONT_FACE 0x0B46
+#define GL_FRONT_LEFT 0x0400
+#define GL_FRONT_RIGHT 0x0401
+#define GL_FUNC_ADD 0x8006
+#define GL_FUNC_REVERSE_SUBTRACT 0x800B
+#define GL_FUNC_SUBTRACT 0x800A
+#define GL_GENERATE_MIPMAP 0x8191
+#define GL_GENERATE_MIPMAP_HINT 0x8192
+#define GL_GEOMETRY_INPUT_TYPE 0x8917
+#define GL_GEOMETRY_OUTPUT_TYPE 0x8918
+#define GL_GEOMETRY_SHADER 0x8DD9
+#define GL_GEOMETRY_VERTICES_OUT 0x8916
+#define GL_GEQUAL 0x0206
+#define GL_GREATER 0x0204
+#define GL_GREEN 0x1904
+#define GL_GREEN_BIAS 0x0D19
+#define GL_GREEN_BITS 0x0D53
+#define GL_GREEN_INTEGER 0x8D95
+#define GL_GREEN_SCALE 0x0D18
+#define GL_HALF_FLOAT 0x140B
+#define GL_HINT_BIT 0x00008000
+#define GL_INCR 0x1E02
+#define GL_INCR_WRAP 0x8507
+#define GL_INDEX 0x8222
+#define GL_INDEX_ARRAY 0x8077
+#define GL_INDEX_ARRAY_BUFFER_BINDING 0x8899
+#define GL_INDEX_ARRAY_POINTER 0x8091
+#define GL_INDEX_ARRAY_STRIDE 0x8086
+#define GL_INDEX_ARRAY_TYPE 0x8085
+#define GL_INDEX_BITS 0x0D51
+#define GL_INDEX_CLEAR_VALUE 0x0C20
+#define GL_INDEX_LOGIC_OP 0x0BF1
+#define GL_INDEX_MODE 0x0C30
+#define GL_INDEX_OFFSET 0x0D13
+#define GL_INDEX_SHIFT 0x0D12
+#define GL_INDEX_WRITEMASK 0x0C21
+#define GL_INFO_LOG_LENGTH 0x8B84
+#define GL_INT 0x1404
+#define GL_INTENSITY 0x8049
+#define GL_INTENSITY12 0x804C
+#define GL_INTENSITY16 0x804D
+#define GL_INTENSITY4 0x804A
+#define GL_INTENSITY8 0x804B
+#define GL_INTERLEAVED_ATTRIBS 0x8C8C
+#define GL_INTERPOLATE 0x8575
+#define GL_INT_2_10_10_10_REV 0x8D9F
+#define GL_INT_SAMPLER_1D 0x8DC9
+#define GL_INT_SAMPLER_1D_ARRAY 0x8DCE
+#define GL_INT_SAMPLER_2D 0x8DCA
+#define GL_INT_SAMPLER_2D_ARRAY 0x8DCF
+#define GL_INT_SAMPLER_2D_MULTISAMPLE 0x9109
+#define GL_INT_SAMPLER_2D_MULTISAMPLE_ARRAY 0x910C
+#define GL_INT_SAMPLER_2D_RECT 0x8DCD
+#define GL_INT_SAMPLER_3D 0x8DCB
+#define GL_INT_SAMPLER_BUFFER 0x8DD0
+#define GL_INT_SAMPLER_CUBE 0x8DCC
+#define GL_INT_VEC2 0x8B53
+#define GL_INT_VEC3 0x8B54
+#define GL_INT_VEC4 0x8B55
+#define GL_INVALID_ENUM 0x0500
+#define GL_INVALID_FRAMEBUFFER_OPERATION 0x0506
+#define GL_INVALID_FRAMEBUFFER_OPERATION_EXT 0x0506
+#define GL_INVALID_INDEX 0xFFFFFFFF
+#define GL_INVALID_OPERATION 0x0502
+#define GL_INVALID_VALUE 0x0501
+#define GL_INVERT 0x150A
+#define GL_KEEP 0x1E00
+#define GL_LAST_VERTEX_CONVENTION 0x8E4E
+#define GL_LEFT 0x0406
+#define GL_LEQUAL 0x0203
+#define GL_LESS 0x0201
+#define GL_LIGHT0 0x4000
+#define GL_LIGHT1 0x4001
+#define GL_LIGHT2 0x4002
+#define GL_LIGHT3 0x4003
+#define GL_LIGHT4 0x4004
+#define GL_LIGHT5 0x4005
+#define GL_LIGHT6 0x4006
+#define GL_LIGHT7 0x4007
+#define GL_LIGHTING 0x0B50
+#define GL_LIGHTING_BIT 0x00000040
+#define GL_LIGHT_MODEL_AMBIENT 0x0B53
+#define GL_LIGHT_MODEL_COLOR_CONTROL 0x81F8
+#define GL_LIGHT_MODEL_LOCAL_VIEWER 0x0B51
+#define GL_LIGHT_MODEL_TWO_SIDE 0x0B52
+#define GL_LINE 0x1B01
+#define GL_LINEAR 0x2601
+#define GL_LINEAR_ATTENUATION 0x1208
+#define GL_LINEAR_MIPMAP_LINEAR 0x2703
+#define GL_LINEAR_MIPMAP_NEAREST 0x2701
+#define GL_LINES 0x0001
+#define GL_LINES_ADJACENCY 0x000A
+#define GL_LINE_BIT 0x00000004
+#define GL_LINE_LOOP 0x0002
+#define GL_LINE_RESET_TOKEN 0x0707
+#define GL_LINE_SMOOTH 0x0B20
+#define GL_LINE_SMOOTH_HINT 0x0C52
+#define GL_LINE_STIPPLE 0x0B24
+#define GL_LINE_STIPPLE_PATTERN 0x0B25
+#define GL_LINE_STIPPLE_REPEAT 0x0B26
+#define GL_LINE_STRIP 0x0003
+#define GL_LINE_STRIP_ADJACENCY 0x000B
+#define GL_LINE_TOKEN 0x0702
+#define GL_LINE_WIDTH 0x0B21
+#define GL_LINE_WIDTH_GRANULARITY 0x0B23
+#define GL_LINE_WIDTH_RANGE 0x0B22
+#define GL_LINK_STATUS 0x8B82
+#define GL_LIST_BASE 0x0B32
+#define GL_LIST_BIT 0x00020000
+#define GL_LIST_INDEX 0x0B33
+#define GL_LIST_MODE 0x0B30
+#define GL_LOAD 0x0101
+#define GL_LOGIC_OP 0x0BF1
+#define GL_LOGIC_OP_MODE 0x0BF0
+#define GL_LOWER_LEFT 0x8CA1
+#define GL_LUMINANCE 0x1909
+#define GL_LUMINANCE12 0x8041
+#define GL_LUMINANCE12_ALPHA12 0x8047
+#define GL_LUMINANCE12_ALPHA4 0x8046
+#define GL_LUMINANCE16 0x8042
+#define GL_LUMINANCE16_ALPHA16 0x8048
+#define GL_LUMINANCE4 0x803F
+#define GL_LUMINANCE4_ALPHA4 0x8043
+#define GL_LUMINANCE6_ALPHA2 0x8044
+#define GL_LUMINANCE8 0x8040
+#define GL_LUMINANCE8_ALPHA8 0x8045
+#define GL_LUMINANCE_ALPHA 0x190A
+#define GL_MAJOR_VERSION 0x821B
+#define GL_MAP1_COLOR_4 0x0D90
+#define GL_MAP1_GRID_DOMAIN 0x0DD0
+#define GL_MAP1_GRID_SEGMENTS 0x0DD1
+#define GL_MAP1_INDEX 0x0D91
+#define GL_MAP1_NORMAL 0x0D92
+#define GL_MAP1_TEXTURE_COORD_1 0x0D93
+#define GL_MAP1_TEXTURE_COORD_2 0x0D94
+#define GL_MAP1_TEXTURE_COORD_3 0x0D95
+#define GL_MAP1_TEXTURE_COORD_4 0x0D96
+#define GL_MAP1_VERTEX_3 0x0D97
+#define GL_MAP1_VERTEX_4 0x0D98
+#define GL_MAP2_COLOR_4 0x0DB0
+#define GL_MAP2_GRID_DOMAIN 0x0DD2
+#define GL_MAP2_GRID_SEGMENTS 0x0DD3
+#define GL_MAP2_INDEX 0x0DB1
+#define GL_MAP2_NORMAL 0x0DB2
+#define GL_MAP2_TEXTURE_COORD_1 0x0DB3
+#define GL_MAP2_TEXTURE_COORD_2 0x0DB4
+#define GL_MAP2_TEXTURE_COORD_3 0x0DB5
+#define GL_MAP2_TEXTURE_COORD_4 0x0DB6
+#define GL_MAP2_VERTEX_3 0x0DB7
+#define GL_MAP2_VERTEX_4 0x0DB8
+#define GL_MAP_COLOR 0x0D10
+#define GL_MAP_FLUSH_EXPLICIT_BIT 0x0010
+#define GL_MAP_INVALIDATE_BUFFER_BIT 0x0008
+#define GL_MAP_INVALIDATE_RANGE_BIT 0x0004
+#define GL_MAP_READ_BIT 0x0001
+#define GL_MAP_STENCIL 0x0D11
+#define GL_MAP_UNSYNCHRONIZED_BIT 0x0020
+#define GL_MAP_WRITE_BIT 0x0002
+#define GL_MATRIX_MODE 0x0BA0
+#define GL_MAX 0x8008
+#define GL_MAX_3D_TEXTURE_SIZE 0x8073
+#define GL_MAX_ARRAY_TEXTURE_LAYERS 0x88FF
+#define GL_MAX_ATTRIB_STACK_DEPTH 0x0D35
+#define GL_MAX_CLIENT_ATTRIB_STACK_DEPTH 0x0D3B
+#define GL_MAX_CLIP_DISTANCES 0x0D32
+#define GL_MAX_CLIP_PLANES 0x0D32
+#define GL_MAX_COLOR_ATTACHMENTS 0x8CDF
+#define GL_MAX_COLOR_ATTACHMENTS_EXT 0x8CDF
+#define GL_MAX_COLOR_TEXTURE_SAMPLES 0x910E
+#define GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS 0x8A33
+#define GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS 0x8A32
+#define GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS 0x8B4D
+#define GL_MAX_COMBINED_UNIFORM_BLOCKS 0x8A2E
+#define GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS 0x8A31
+#define GL_MAX_CUBE_MAP_TEXTURE_SIZE 0x851C
+#define GL_MAX_DEBUG_LOGGED_MESSAGES_ARB 0x9144
+#define GL_MAX_DEBUG_MESSAGE_LENGTH_ARB 0x9143
+#define GL_MAX_DEPTH_TEXTURE_SAMPLES 0x910F
+#define GL_MAX_DRAW_BUFFERS 0x8824
+#define GL_MAX_DUAL_SOURCE_DRAW_BUFFERS 0x88FC
+#define GL_MAX_ELEMENTS_INDICES 0x80E9
+#define GL_MAX_ELEMENTS_VERTICES 0x80E8
+#define GL_MAX_EVAL_ORDER 0x0D30
+#define GL_MAX_FRAGMENT_INPUT_COMPONENTS 0x9125
+#define GL_MAX_FRAGMENT_UNIFORM_BLOCKS 0x8A2D
+#define GL_MAX_FRAGMENT_UNIFORM_COMPONENTS 0x8B49
+#define GL_MAX_GEOMETRY_INPUT_COMPONENTS 0x9123
+#define GL_MAX_GEOMETRY_OUTPUT_COMPONENTS 0x9124
+#define GL_MAX_GEOMETRY_OUTPUT_VERTICES 0x8DE0
+#define GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS 0x8C29
+#define GL_MAX_GEOMETRY_TOTAL_OUTPUT_COMPONENTS 0x8DE1
+#define GL_MAX_GEOMETRY_UNIFORM_BLOCKS 0x8A2C
+#define GL_MAX_GEOMETRY_UNIFORM_COMPONENTS 0x8DDF
+#define GL_MAX_INTEGER_SAMPLES 0x9110
+#define GL_MAX_LIGHTS 0x0D31
+#define GL_MAX_LIST_NESTING 0x0B31
+#define GL_MAX_MODELVIEW_STACK_DEPTH 0x0D36
+#define GL_MAX_NAME_STACK_DEPTH 0x0D37
+#define GL_MAX_PIXEL_MAP_TABLE 0x0D34
+#define GL_MAX_PROGRAM_TEXEL_OFFSET 0x8905
+#define GL_MAX_PROJECTION_STACK_DEPTH 0x0D38
+#define GL_MAX_RECTANGLE_TEXTURE_SIZE 0x84F8
+#define GL_MAX_RENDERBUFFER_SIZE 0x84E8
+#define GL_MAX_RENDERBUFFER_SIZE_EXT 0x84E8
+#define GL_MAX_SAMPLES 0x8D57
+#define GL_MAX_SAMPLES_EXT 0x8D57
+#define GL_MAX_SAMPLE_MASK_WORDS 0x8E59
+#define GL_MAX_SERVER_WAIT_TIMEOUT 0x9111
+#define GL_MAX_TEXTURE_BUFFER_SIZE 0x8C2B
+#define GL_MAX_TEXTURE_COORDS 0x8871
+#define GL_MAX_TEXTURE_IMAGE_UNITS 0x8872
+#define GL_MAX_TEXTURE_LOD_BIAS 0x84FD
+#define GL_MAX_TEXTURE_SIZE 0x0D33
+#define GL_MAX_TEXTURE_STACK_DEPTH 0x0D39
+#define GL_MAX_TEXTURE_UNITS 0x84E2
+#define GL_MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS 0x8C8A
+#define GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS 0x8C8B
+#define GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS 0x8C80
+#define GL_MAX_UNIFORM_BLOCK_SIZE 0x8A30
+#define GL_MAX_UNIFORM_BUFFER_BINDINGS 0x8A2F
+#define GL_MAX_VARYING_COMPONENTS 0x8B4B
+#define GL_MAX_VARYING_FLOATS 0x8B4B
+#define GL_MAX_VERTEX_ATTRIBS 0x8869
+#define GL_MAX_VERTEX_OUTPUT_COMPONENTS 0x9122
+#define GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS 0x8B4C
+#define GL_MAX_VERTEX_UNIFORM_BLOCKS 0x8A2B
+#define GL_MAX_VERTEX_UNIFORM_COMPONENTS 0x8B4A
+#define GL_MAX_VIEWPORT_DIMS 0x0D3A
+#define GL_MAX_VIEWS_OVR 0x9631
+#define GL_MIN 0x8007
+#define GL_MINOR_VERSION 0x821C
+#define GL_MIN_PROGRAM_TEXEL_OFFSET 0x8904
+#define GL_MIRRORED_REPEAT 0x8370
+#define GL_MODELVIEW 0x1700
+#define GL_MODELVIEW_MATRIX 0x0BA6
+#define GL_MODELVIEW_STACK_DEPTH 0x0BA3
+#define GL_MODULATE 0x2100
+#define GL_MULT 0x0103
+#define GL_MULTISAMPLE 0x809D
+#define GL_MULTISAMPLE_BIT 0x20000000
+#define GL_N3F_V3F 0x2A25
+#define GL_NAME_STACK_DEPTH 0x0D70
+#define GL_NAND 0x150E
+#define GL_NEAREST 0x2600
+#define GL_NEAREST_MIPMAP_LINEAR 0x2702
+#define GL_NEAREST_MIPMAP_NEAREST 0x2700
+#define GL_NEVER 0x0200
+#define GL_NICEST 0x1102
+#define GL_NONE 0
+#define GL_NOOP 0x1505
+#define GL_NOR 0x1508
+#define GL_NORMALIZE 0x0BA1
+#define GL_NORMAL_ARRAY 0x8075
+#define GL_NORMAL_ARRAY_BUFFER_BINDING 0x8897
+#define GL_NORMAL_ARRAY_POINTER 0x808F
+#define GL_NORMAL_ARRAY_STRIDE 0x807F
+#define GL_NORMAL_ARRAY_TYPE 0x807E
+#define GL_NORMAL_MAP 0x8511
+#define GL_NOTEQUAL 0x0205
+#define GL_NO_ERROR 0
+#define GL_NUM_COMPRESSED_TEXTURE_FORMATS 0x86A2
+#define GL_NUM_EXTENSIONS 0x821D
+#define GL_OBJECT_LINEAR 0x2401
+#define GL_OBJECT_PLANE 0x2501
+#define GL_OBJECT_TYPE 0x9112
+#define GL_ONE 1
+#define GL_ONE_MINUS_CONSTANT_ALPHA 0x8004
+#define GL_ONE_MINUS_CONSTANT_COLOR 0x8002
+#define GL_ONE_MINUS_DST_ALPHA 0x0305
+#define GL_ONE_MINUS_DST_COLOR 0x0307
+#define GL_ONE_MINUS_SRC1_ALPHA 0x88FB
+#define GL_ONE_MINUS_SRC1_COLOR 0x88FA
+#define GL_ONE_MINUS_SRC_ALPHA 0x0303
+#define GL_ONE_MINUS_SRC_COLOR 0x0301
+#define GL_OPERAND0_ALPHA 0x8598
+#define GL_OPERAND0_RGB 0x8590
+#define GL_OPERAND1_ALPHA 0x8599
+#define GL_OPERAND1_RGB 0x8591
+#define GL_OPERAND2_ALPHA 0x859A
+#define GL_OPERAND2_RGB 0x8592
+#define GL_OR 0x1507
+#define GL_ORDER 0x0A01
+#define GL_OR_INVERTED 0x150D
+#define GL_OR_REVERSE 0x150B
+#define GL_OUT_OF_MEMORY 0x0505
+#define GL_PACK_ALIGNMENT 0x0D05
+#define GL_PACK_IMAGE_HEIGHT 0x806C
+#define GL_PACK_LSB_FIRST 0x0D01
+#define GL_PACK_ROW_LENGTH 0x0D02
+#define GL_PACK_SKIP_IMAGES 0x806B
+#define GL_PACK_SKIP_PIXELS 0x0D04
+#define GL_PACK_SKIP_ROWS 0x0D03
+#define GL_PACK_SWAP_BYTES 0x0D00
+#define GL_PASS_THROUGH_TOKEN 0x0700
+#define GL_PERSPECTIVE_CORRECTION_HINT 0x0C50
+#define GL_PIXEL_MAP_A_TO_A 0x0C79
+#define GL_PIXEL_MAP_A_TO_A_SIZE 0x0CB9
+#define GL_PIXEL_MAP_B_TO_B 0x0C78
+#define GL_PIXEL_MAP_B_TO_B_SIZE 0x0CB8
+#define GL_PIXEL_MAP_G_TO_G 0x0C77
+#define GL_PIXEL_MAP_G_TO_G_SIZE 0x0CB7
+#define GL_PIXEL_MAP_I_TO_A 0x0C75
+#define GL_PIXEL_MAP_I_TO_A_SIZE 0x0CB5
+#define GL_PIXEL_MAP_I_TO_B 0x0C74
+#define GL_PIXEL_MAP_I_TO_B_SIZE 0x0CB4
+#define GL_PIXEL_MAP_I_TO_G 0x0C73
+#define GL_PIXEL_MAP_I_TO_G_SIZE 0x0CB3
+#define GL_PIXEL_MAP_I_TO_I 0x0C70
+#define GL_PIXEL_MAP_I_TO_I_SIZE 0x0CB0
+#define GL_PIXEL_MAP_I_TO_R 0x0C72
+#define GL_PIXEL_MAP_I_TO_R_SIZE 0x0CB2
+#define GL_PIXEL_MAP_R_TO_R 0x0C76
+#define GL_PIXEL_MAP_R_TO_R_SIZE 0x0CB6
+#define GL_PIXEL_MAP_S_TO_S 0x0C71
+#define GL_PIXEL_MAP_S_TO_S_SIZE 0x0CB1
+#define GL_PIXEL_MODE_BIT 0x00000020
+#define GL_PIXEL_PACK_BUFFER 0x88EB
+#define GL_PIXEL_PACK_BUFFER_BINDING 0x88ED
+#define GL_PIXEL_UNPACK_BUFFER 0x88EC
+#define GL_PIXEL_UNPACK_BUFFER_BINDING 0x88EF
+#define GL_POINT 0x1B00
+#define GL_POINTS 0x0000
+#define GL_POINT_BIT 0x00000002
+#define GL_POINT_DISTANCE_ATTENUATION 0x8129
+#define GL_POINT_FADE_THRESHOLD_SIZE 0x8128
+#define GL_POINT_SIZE 0x0B11
+#define GL_POINT_SIZE_GRANULARITY 0x0B13
+#define GL_POINT_SIZE_MAX 0x8127
+#define GL_POINT_SIZE_MIN 0x8126
+#define GL_POINT_SIZE_RANGE 0x0B12
+#define GL_POINT_SMOOTH 0x0B10
+#define GL_POINT_SMOOTH_HINT 0x0C51
+#define GL_POINT_SPRITE 0x8861
+#define GL_POINT_SPRITE_COORD_ORIGIN 0x8CA0
+#define GL_POINT_TOKEN 0x0701
+#define GL_POLYGON 0x0009
+#define GL_POLYGON_BIT 0x00000008
+#define GL_POLYGON_MODE 0x0B40
+#define GL_POLYGON_OFFSET_FACTOR 0x8038
+#define GL_POLYGON_OFFSET_FILL 0x8037
+#define GL_POLYGON_OFFSET_LINE 0x2A02
+#define GL_POLYGON_OFFSET_POINT 0x2A01
+#define GL_POLYGON_OFFSET_UNITS 0x2A00
+#define GL_POLYGON_SMOOTH 0x0B41
+#define GL_POLYGON_SMOOTH_HINT 0x0C53
+#define GL_POLYGON_STIPPLE 0x0B42
+#define GL_POLYGON_STIPPLE_BIT 0x00000010
+#define GL_POLYGON_TOKEN 0x0703
+#define GL_POSITION 0x1203
+#define GL_PREVIOUS 0x8578
+#define GL_PRIMARY_COLOR 0x8577
+#define GL_PRIMITIVES_GENERATED 0x8C87
+#define GL_PRIMITIVE_RESTART 0x8F9D
+#define GL_PRIMITIVE_RESTART_INDEX 0x8F9E
+#define GL_PROGRAM_POINT_SIZE 0x8642
+#define GL_PROJECTION 0x1701
+#define GL_PROJECTION_MATRIX 0x0BA7
+#define GL_PROJECTION_STACK_DEPTH 0x0BA4
+#define GL_PROVOKING_VERTEX 0x8E4F
+#define GL_PROXY_TEXTURE_1D 0x8063
+#define GL_PROXY_TEXTURE_1D_ARRAY 0x8C19
+#define GL_PROXY_TEXTURE_2D 0x8064
+#define GL_PROXY_TEXTURE_2D_ARRAY 0x8C1B
+#define GL_PROXY_TEXTURE_2D_MULTISAMPLE 0x9101
+#define GL_PROXY_TEXTURE_2D_MULTISAMPLE_ARRAY 0x9103
+#define GL_PROXY_TEXTURE_3D 0x8070
+#define GL_PROXY_TEXTURE_CUBE_MAP 0x851B
+#define GL_PROXY_TEXTURE_RECTANGLE 0x84F7
+#define GL_Q 0x2003
+#define GL_QUADRATIC_ATTENUATION 0x1209
+#define GL_QUADS 0x0007
+#define GL_QUADS_FOLLOW_PROVOKING_VERTEX_CONVENTION 0x8E4C
+#define GL_QUAD_STRIP 0x0008
+#define GL_QUERY_BY_REGION_NO_WAIT 0x8E16
+#define GL_QUERY_BY_REGION_WAIT 0x8E15
+#define GL_QUERY_COUNTER_BITS 0x8864
+#define GL_QUERY_NO_WAIT 0x8E14
+#define GL_QUERY_RESULT 0x8866
+#define GL_QUERY_RESULT_AVAILABLE 0x8867
+#define GL_QUERY_WAIT 0x8E13
+#define GL_R 0x2002
+#define GL_R11F_G11F_B10F 0x8C3A
+#define GL_R16 0x822A
+#define GL_R16F 0x822D
+#define GL_R16I 0x8233
+#define GL_R16UI 0x8234
+#define GL_R16_SNORM 0x8F98
+#define GL_R32F 0x822E
+#define GL_R32I 0x8235
+#define GL_R32UI 0x8236
+#define GL_R3_G3_B2 0x2A10
+#define GL_R8 0x8229
+#define GL_R8I 0x8231
+#define GL_R8UI 0x8232
+#define GL_R8_SNORM 0x8F94
+#define GL_RASTERIZER_DISCARD 0x8C89
+#define GL_READ_BUFFER 0x0C02
+#define GL_READ_FRAMEBUFFER 0x8CA8
+#define GL_READ_FRAMEBUFFER_BINDING 0x8CAA
+#define GL_READ_FRAMEBUFFER_BINDING_EXT 0x8CAA
+#define GL_READ_FRAMEBUFFER_EXT 0x8CA8
+#define GL_READ_ONLY 0x88B8
+#define GL_READ_WRITE 0x88BA
+#define GL_RED 0x1903
+#define GL_RED_BIAS 0x0D15
+#define GL_RED_BITS 0x0D52
+#define GL_RED_INTEGER 0x8D94
+#define GL_RED_SCALE 0x0D14
+#define GL_REFLECTION_MAP 0x8512
+#define GL_RENDER 0x1C00
+#define GL_RENDERBUFFER 0x8D41
+#define GL_RENDERBUFFER_ALPHA_SIZE 0x8D53
+#define GL_RENDERBUFFER_ALPHA_SIZE_EXT 0x8D53
+#define GL_RENDERBUFFER_BINDING 0x8CA7
+#define GL_RENDERBUFFER_BINDING_EXT 0x8CA7
+#define GL_RENDERBUFFER_BLUE_SIZE 0x8D52
+#define GL_RENDERBUFFER_BLUE_SIZE_EXT 0x8D52
+#define GL_RENDERBUFFER_DEPTH_SIZE 0x8D54
+#define GL_RENDERBUFFER_DEPTH_SIZE_EXT 0x8D54
+#define GL_RENDERBUFFER_EXT 0x8D41
+#define GL_RENDERBUFFER_GREEN_SIZE 0x8D51
+#define GL_RENDERBUFFER_GREEN_SIZE_EXT 0x8D51
+#define GL_RENDERBUFFER_HEIGHT 0x8D43
+#define GL_RENDERBUFFER_HEIGHT_EXT 0x8D43
+#define GL_RENDERBUFFER_INTERNAL_FORMAT 0x8D44
+#define GL_RENDERBUFFER_INTERNAL_FORMAT_EXT 0x8D44
+#define GL_RENDERBUFFER_RED_SIZE 0x8D50
+#define GL_RENDERBUFFER_RED_SIZE_EXT 0x8D50
+#define GL_RENDERBUFFER_SAMPLES 0x8CAB
+#define GL_RENDERBUFFER_SAMPLES_EXT 0x8CAB
+#define GL_RENDERBUFFER_STENCIL_SIZE 0x8D55
+#define GL_RENDERBUFFER_STENCIL_SIZE_EXT 0x8D55
+#define GL_RENDERBUFFER_WIDTH 0x8D42
+#define GL_RENDERBUFFER_WIDTH_EXT 0x8D42
+#define GL_RENDERER 0x1F01
+#define GL_RENDER_MODE 0x0C40
+#define GL_REPEAT 0x2901
+#define GL_REPLACE 0x1E01
+#define GL_RESCALE_NORMAL 0x803A
+#define GL_RETURN 0x0102
+#define GL_RG 0x8227
+#define GL_RG16 0x822C
+#define GL_RG16F 0x822F
+#define GL_RG16I 0x8239
+#define GL_RG16UI 0x823A
+#define GL_RG16_SNORM 0x8F99
+#define GL_RG32F 0x8230
+#define GL_RG32I 0x823B
+#define GL_RG32UI 0x823C
+#define GL_RG8 0x822B
+#define GL_RG8I 0x8237
+#define GL_RG8UI 0x8238
+#define GL_RG8_SNORM 0x8F95
+#define GL_RGB 0x1907
+#define GL_RGB10 0x8052
+#define GL_RGB10_A2 0x8059
+#define GL_RGB10_A2UI 0x906F
+#define GL_RGB12 0x8053
+#define GL_RGB16 0x8054
+#define GL_RGB16F 0x881B
+#define GL_RGB16I 0x8D89
+#define GL_RGB16UI 0x8D77
+#define GL_RGB16_SNORM 0x8F9A
+#define GL_RGB32F 0x8815
+#define GL_RGB32I 0x8D83
+#define GL_RGB32UI 0x8D71
+#define GL_RGB4 0x804F
+#define GL_RGB5 0x8050
+#define GL_RGB5_A1 0x8057
+#define GL_RGB8 0x8051
+#define GL_RGB8I 0x8D8F
+#define GL_RGB8UI 0x8D7D
+#define GL_RGB8_SNORM 0x8F96
+#define GL_RGB9_E5 0x8C3D
+#define GL_RGBA 0x1908
+#define GL_RGBA12 0x805A
+#define GL_RGBA16 0x805B
+#define GL_RGBA16F 0x881A
+#define GL_RGBA16I 0x8D88
+#define GL_RGBA16UI 0x8D76
+#define GL_RGBA16_SNORM 0x8F9B
+#define GL_RGBA2 0x8055
+#define GL_RGBA32F 0x8814
+#define GL_RGBA32I 0x8D82
+#define GL_RGBA32UI 0x8D70
+#define GL_RGBA4 0x8056
+#define GL_RGBA8 0x8058
+#define GL_RGBA8I 0x8D8E
+#define GL_RGBA8UI 0x8D7C
+#define GL_RGBA8_SNORM 0x8F97
+#define GL_RGBA_INTEGER 0x8D99
+#define GL_RGBA_MODE 0x0C31
+#define GL_RGB_INTEGER 0x8D98
+#define GL_RGB_SCALE 0x8573
+#define GL_RG_INTEGER 0x8228
+#define GL_RIGHT 0x0407
+#define GL_S 0x2000
+#define GL_SAMPLER_1D 0x8B5D
+#define GL_SAMPLER_1D_ARRAY 0x8DC0
+#define GL_SAMPLER_1D_ARRAY_SHADOW 0x8DC3
+#define GL_SAMPLER_1D_SHADOW 0x8B61
+#define GL_SAMPLER_2D 0x8B5E
+#define GL_SAMPLER_2D_ARRAY 0x8DC1
+#define GL_SAMPLER_2D_ARRAY_SHADOW 0x8DC4
+#define GL_SAMPLER_2D_MULTISAMPLE 0x9108
+#define GL_SAMPLER_2D_MULTISAMPLE_ARRAY 0x910B
+#define GL_SAMPLER_2D_RECT 0x8B63
+#define GL_SAMPLER_2D_RECT_SHADOW 0x8B64
+#define GL_SAMPLER_2D_SHADOW 0x8B62
+#define GL_SAMPLER_3D 0x8B5F
+#define GL_SAMPLER_BINDING 0x8919
+#define GL_SAMPLER_BUFFER 0x8DC2
+#define GL_SAMPLER_CUBE 0x8B60
+#define GL_SAMPLER_CUBE_SHADOW 0x8DC5
+#define GL_SAMPLES 0x80A9
+#define GL_SAMPLES_PASSED 0x8914
+#define GL_SAMPLE_ALPHA_TO_COVERAGE 0x809E
+#define GL_SAMPLE_ALPHA_TO_ONE 0x809F
+#define GL_SAMPLE_BUFFERS 0x80A8
+#define GL_SAMPLE_COVERAGE 0x80A0
+#define GL_SAMPLE_COVERAGE_INVERT 0x80AB
+#define GL_SAMPLE_COVERAGE_VALUE 0x80AA
+#define GL_SAMPLE_MASK 0x8E51
+#define GL_SAMPLE_MASK_VALUE 0x8E52
+#define GL_SAMPLE_POSITION 0x8E50
+#define GL_SCISSOR_BIT 0x00080000
+#define GL_SCISSOR_BOX 0x0C10
+#define GL_SCISSOR_TEST 0x0C11
+#define GL_SECONDARY_COLOR_ARRAY 0x845E
+#define GL_SECONDARY_COLOR_ARRAY_BUFFER_BINDING 0x889C
+#define GL_SECONDARY_COLOR_ARRAY_POINTER 0x845D
+#define GL_SECONDARY_COLOR_ARRAY_SIZE 0x845A
+#define GL_SECONDARY_COLOR_ARRAY_STRIDE 0x845C
+#define GL_SECONDARY_COLOR_ARRAY_TYPE 0x845B
+#define GL_SELECT 0x1C02
+#define GL_SELECTION_BUFFER_POINTER 0x0DF3
+#define GL_SELECTION_BUFFER_SIZE 0x0DF4
+#define GL_SEPARATE_ATTRIBS 0x8C8D
+#define GL_SEPARATE_SPECULAR_COLOR 0x81FA
+#define GL_SET 0x150F
+#define GL_SHADER_SOURCE_LENGTH 0x8B88
+#define GL_SHADER_TYPE 0x8B4F
+#define GL_SHADE_MODEL 0x0B54
+#define GL_SHADING_LANGUAGE_VERSION 0x8B8C
+#define GL_SHININESS 0x1601
+#define GL_SHORT 0x1402
+#define GL_SIGNALED 0x9119
+#define GL_SIGNED_NORMALIZED 0x8F9C
+#define GL_SINGLE_COLOR 0x81F9
+#define GL_SLUMINANCE 0x8C46
+#define GL_SLUMINANCE8 0x8C47
+#define GL_SLUMINANCE8_ALPHA8 0x8C45
+#define GL_SLUMINANCE_ALPHA 0x8C44
+#define GL_SMOOTH 0x1D01
+#define GL_SMOOTH_LINE_WIDTH_GRANULARITY 0x0B23
+#define GL_SMOOTH_LINE_WIDTH_RANGE 0x0B22
+#define GL_SMOOTH_POINT_SIZE_GRANULARITY 0x0B13
+#define GL_SMOOTH_POINT_SIZE_RANGE 0x0B12
+#define GL_SOURCE0_ALPHA 0x8588
+#define GL_SOURCE0_RGB 0x8580
+#define GL_SOURCE1_ALPHA 0x8589
+#define GL_SOURCE1_RGB 0x8581
+#define GL_SOURCE2_ALPHA 0x858A
+#define GL_SOURCE2_RGB 0x8582
+#define GL_SPECULAR 0x1202
+#define GL_SPHERE_MAP 0x2402
+#define GL_SPOT_CUTOFF 0x1206
+#define GL_SPOT_DIRECTION 0x1204
+#define GL_SPOT_EXPONENT 0x1205
+#define GL_SRC0_ALPHA 0x8588
+#define GL_SRC0_RGB 0x8580
+#define GL_SRC1_ALPHA 0x8589
+#define GL_SRC1_COLOR 0x88F9
+#define GL_SRC1_RGB 0x8581
+#define GL_SRC2_ALPHA 0x858A
+#define GL_SRC2_RGB 0x8582
+#define GL_SRC_ALPHA 0x0302
+#define GL_SRC_ALPHA_SATURATE 0x0308
+#define GL_SRC_COLOR 0x0300
+#define GL_SRGB 0x8C40
+#define GL_SRGB8 0x8C41
+#define GL_SRGB8_ALPHA8 0x8C43
+#define GL_SRGB_ALPHA 0x8C42
+#define GL_STACK_OVERFLOW 0x0503
+#define GL_STACK_UNDERFLOW 0x0504
+#define GL_STATIC_COPY 0x88E6
+#define GL_STATIC_DRAW 0x88E4
+#define GL_STATIC_READ 0x88E5
+#define GL_STENCIL 0x1802
+#define GL_STENCIL_ATTACHMENT 0x8D20
+#define GL_STENCIL_ATTACHMENT_EXT 0x8D20
+#define GL_STENCIL_BACK_FAIL 0x8801
+#define GL_STENCIL_BACK_FUNC 0x8800
+#define GL_STENCIL_BACK_PASS_DEPTH_FAIL 0x8802
+#define GL_STENCIL_BACK_PASS_DEPTH_PASS 0x8803
+#define GL_STENCIL_BACK_REF 0x8CA3
+#define GL_STENCIL_BACK_VALUE_MASK 0x8CA4
+#define GL_STENCIL_BACK_WRITEMASK 0x8CA5
+#define GL_STENCIL_BITS 0x0D57
+#define GL_STENCIL_BUFFER_BIT 0x00000400
+#define GL_STENCIL_CLEAR_VALUE 0x0B91
+#define GL_STENCIL_FAIL 0x0B94
+#define GL_STENCIL_FUNC 0x0B92
+#define GL_STENCIL_INDEX 0x1901
+#define GL_STENCIL_INDEX1 0x8D46
+#define GL_STENCIL_INDEX16 0x8D49
+#define GL_STENCIL_INDEX16_EXT 0x8D49
+#define GL_STENCIL_INDEX1_EXT 0x8D46
+#define GL_STENCIL_INDEX4 0x8D47
+#define GL_STENCIL_INDEX4_EXT 0x8D47
+#define GL_STENCIL_INDEX8 0x8D48
+#define GL_STENCIL_INDEX8_EXT 0x8D48
+#define GL_STENCIL_PASS_DEPTH_FAIL 0x0B95
+#define GL_STENCIL_PASS_DEPTH_PASS 0x0B96
+#define GL_STENCIL_REF 0x0B97
+#define GL_STENCIL_TEST 0x0B90
+#define GL_STENCIL_VALUE_MASK 0x0B93
+#define GL_STENCIL_WRITEMASK 0x0B98
+#define GL_STEREO 0x0C33
+#define GL_STREAM_COPY 0x88E2
+#define GL_STREAM_DRAW 0x88E0
+#define GL_STREAM_READ 0x88E1
+#define GL_SUBPIXEL_BITS 0x0D50
+#define GL_SUBTRACT 0x84E7
+#define GL_SYNC_CONDITION 0x9113
+#define GL_SYNC_FENCE 0x9116
+#define GL_SYNC_FLAGS 0x9115
+#define GL_SYNC_FLUSH_COMMANDS_BIT 0x00000001
+#define GL_SYNC_GPU_COMMANDS_COMPLETE 0x9117
+#define GL_SYNC_STATUS 0x9114
+#define GL_T 0x2001
+#define GL_T2F_C3F_V3F 0x2A2A
+#define GL_T2F_C4F_N3F_V3F 0x2A2C
+#define GL_T2F_C4UB_V3F 0x2A29
+#define GL_T2F_N3F_V3F 0x2A2B
+#define GL_T2F_V3F 0x2A27
+#define GL_T4F_C4F_N3F_V4F 0x2A2D
+#define GL_T4F_V4F 0x2A28
+#define GL_TEXTURE 0x1702
+#define GL_TEXTURE0 0x84C0
+#define GL_TEXTURE1 0x84C1
+#define GL_TEXTURE10 0x84CA
+#define GL_TEXTURE11 0x84CB
+#define GL_TEXTURE12 0x84CC
+#define GL_TEXTURE13 0x84CD
+#define GL_TEXTURE14 0x84CE
+#define GL_TEXTURE15 0x84CF
+#define GL_TEXTURE16 0x84D0
+#define GL_TEXTURE17 0x84D1
+#define GL_TEXTURE18 0x84D2
+#define GL_TEXTURE19 0x84D3
+#define GL_TEXTURE2 0x84C2
+#define GL_TEXTURE20 0x84D4
+#define GL_TEXTURE21 0x84D5
+#define GL_TEXTURE22 0x84D6
+#define GL_TEXTURE23 0x84D7
+#define GL_TEXTURE24 0x84D8
+#define GL_TEXTURE25 0x84D9
+#define GL_TEXTURE26 0x84DA
+#define GL_TEXTURE27 0x84DB
+#define GL_TEXTURE28 0x84DC
+#define GL_TEXTURE29 0x84DD
+#define GL_TEXTURE3 0x84C3
+#define GL_TEXTURE30 0x84DE
+#define GL_TEXTURE31 0x84DF
+#define GL_TEXTURE4 0x84C4
+#define GL_TEXTURE5 0x84C5
+#define GL_TEXTURE6 0x84C6
+#define GL_TEXTURE7 0x84C7
+#define GL_TEXTURE8 0x84C8
+#define GL_TEXTURE9 0x84C9
+#define GL_TEXTURE_1D 0x0DE0
+#define GL_TEXTURE_1D_ARRAY 0x8C18
+#define GL_TEXTURE_2D 0x0DE1
+#define GL_TEXTURE_2D_ARRAY 0x8C1A
+#define GL_TEXTURE_2D_MULTISAMPLE 0x9100
+#define GL_TEXTURE_2D_MULTISAMPLE_ARRAY 0x9102
+#define GL_TEXTURE_3D 0x806F
+#define GL_TEXTURE_ALPHA_SIZE 0x805F
+#define GL_TEXTURE_ALPHA_TYPE 0x8C13
+#define GL_TEXTURE_BASE_LEVEL 0x813C
+#define GL_TEXTURE_BINDING_1D 0x8068
+#define GL_TEXTURE_BINDING_1D_ARRAY 0x8C1C
+#define GL_TEXTURE_BINDING_2D 0x8069
+#define GL_TEXTURE_BINDING_2D_ARRAY 0x8C1D
+#define GL_TEXTURE_BINDING_2D_MULTISAMPLE 0x9104
+#define GL_TEXTURE_BINDING_2D_MULTISAMPLE_ARRAY 0x9105
+#define GL_TEXTURE_BINDING_3D 0x806A
+#define GL_TEXTURE_BINDING_BUFFER 0x8C2C
+#define GL_TEXTURE_BINDING_CUBE_MAP 0x8514
+#define GL_TEXTURE_BINDING_RECTANGLE 0x84F6
+#define GL_TEXTURE_BIT 0x00040000
+#define GL_TEXTURE_BLUE_SIZE 0x805E
+#define GL_TEXTURE_BLUE_TYPE 0x8C12
+#define GL_TEXTURE_BORDER 0x1005
+#define GL_TEXTURE_BORDER_COLOR 0x1004
+#define GL_TEXTURE_BUFFER 0x8C2A
+#define GL_TEXTURE_BUFFER_DATA_STORE_BINDING 0x8C2D
+#define GL_TEXTURE_COMPARE_FUNC 0x884D
+#define GL_TEXTURE_COMPARE_MODE 0x884C
+#define GL_TEXTURE_COMPONENTS 0x1003
+#define GL_TEXTURE_COMPRESSED 0x86A1
+#define GL_TEXTURE_COMPRESSED_IMAGE_SIZE 0x86A0
+#define GL_TEXTURE_COMPRESSION_HINT 0x84EF
+#define GL_TEXTURE_COORD_ARRAY 0x8078
+#define GL_TEXTURE_COORD_ARRAY_BUFFER_BINDING 0x889A
+#define GL_TEXTURE_COORD_ARRAY_POINTER 0x8092
+#define GL_TEXTURE_COORD_ARRAY_SIZE 0x8088
+#define GL_TEXTURE_COORD_ARRAY_STRIDE 0x808A
+#define GL_TEXTURE_COORD_ARRAY_TYPE 0x8089
+#define GL_TEXTURE_CUBE_MAP 0x8513
+#define GL_TEXTURE_CUBE_MAP_NEGATIVE_X 0x8516
+#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Y 0x8518
+#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Z 0x851A
+#define GL_TEXTURE_CUBE_MAP_POSITIVE_X 0x8515
+#define GL_TEXTURE_CUBE_MAP_POSITIVE_Y 0x8517
+#define GL_TEXTURE_CUBE_MAP_POSITIVE_Z 0x8519
+#define GL_TEXTURE_CUBE_MAP_SEAMLESS 0x884F
+#define GL_TEXTURE_DEPTH 0x8071
+#define GL_TEXTURE_DEPTH_SIZE 0x884A
+#define GL_TEXTURE_DEPTH_TYPE 0x8C16
+#define GL_TEXTURE_ENV 0x2300
+#define GL_TEXTURE_ENV_COLOR 0x2201
+#define GL_TEXTURE_ENV_MODE 0x2200
+#define GL_TEXTURE_FILTER_CONTROL 0x8500
+#define GL_TEXTURE_FIXED_SAMPLE_LOCATIONS 0x9107
+#define GL_TEXTURE_GEN_MODE 0x2500
+#define GL_TEXTURE_GEN_Q 0x0C63
+#define GL_TEXTURE_GEN_R 0x0C62
+#define GL_TEXTURE_GEN_S 0x0C60
+#define GL_TEXTURE_GEN_T 0x0C61
+#define GL_TEXTURE_GREEN_SIZE 0x805D
+#define GL_TEXTURE_GREEN_TYPE 0x8C11
+#define GL_TEXTURE_HEIGHT 0x1001
+#define GL_TEXTURE_INTENSITY_SIZE 0x8061
+#define GL_TEXTURE_INTENSITY_TYPE 0x8C15
+#define GL_TEXTURE_INTERNAL_FORMAT 0x1003
+#define GL_TEXTURE_LOD_BIAS 0x8501
+#define GL_TEXTURE_LUMINANCE_SIZE 0x8060
+#define GL_TEXTURE_LUMINANCE_TYPE 0x8C14
+#define GL_TEXTURE_MAG_FILTER 0x2800
+#define GL_TEXTURE_MATRIX 0x0BA8
+#define GL_TEXTURE_MAX_LEVEL 0x813D
+#define GL_TEXTURE_MAX_LOD 0x813B
+#define GL_TEXTURE_MIN_FILTER 0x2801
+#define GL_TEXTURE_MIN_LOD 0x813A
+#define GL_TEXTURE_PRIORITY 0x8066
+#define GL_TEXTURE_RECTANGLE 0x84F5
+#define GL_TEXTURE_RED_SIZE 0x805C
+#define GL_TEXTURE_RED_TYPE 0x8C10
+#define GL_TEXTURE_RESIDENT 0x8067
+#define GL_TEXTURE_SAMPLES 0x9106
+#define GL_TEXTURE_SHARED_SIZE 0x8C3F
+#define GL_TEXTURE_STACK_DEPTH 0x0BA5
+#define GL_TEXTURE_STENCIL_SIZE 0x88F1
+#define GL_TEXTURE_SWIZZLE_A 0x8E45
+#define GL_TEXTURE_SWIZZLE_B 0x8E44
+#define GL_TEXTURE_SWIZZLE_G 0x8E43
+#define GL_TEXTURE_SWIZZLE_R 0x8E42
+#define GL_TEXTURE_SWIZZLE_RGBA 0x8E46
+#define GL_TEXTURE_WIDTH 0x1000
+#define GL_TEXTURE_WRAP_R 0x8072
+#define GL_TEXTURE_WRAP_S 0x2802
+#define GL_TEXTURE_WRAP_T 0x2803
+#define GL_TIMEOUT_EXPIRED 0x911B
+#define GL_TIMEOUT_IGNORED 0xFFFFFFFFFFFFFFFF
+#define GL_TIMESTAMP 0x8E28
+#define GL_TIME_ELAPSED 0x88BF
+#define GL_TRANSFORM_BIT 0x00001000
+#define GL_TRANSFORM_FEEDBACK_BUFFER 0x8C8E
+#define GL_TRANSFORM_FEEDBACK_BUFFER_BINDING 0x8C8F
+#define GL_TRANSFORM_FEEDBACK_BUFFER_MODE 0x8C7F
+#define GL_TRANSFORM_FEEDBACK_BUFFER_SIZE 0x8C85
+#define GL_TRANSFORM_FEEDBACK_BUFFER_START 0x8C84
+#define GL_TRANSFORM_FEEDBACK_PRIMITIVES_WRITTEN 0x8C88
+#define GL_TRANSFORM_FEEDBACK_VARYINGS 0x8C83
+#define GL_TRANSFORM_FEEDBACK_VARYING_MAX_LENGTH 0x8C76
+#define GL_TRANSPOSE_COLOR_MATRIX 0x84E6
+#define GL_TRANSPOSE_MODELVIEW_MATRIX 0x84E3
+#define GL_TRANSPOSE_PROJECTION_MATRIX 0x84E4
+#define GL_TRANSPOSE_TEXTURE_MATRIX 0x84E5
+#define GL_TRIANGLES 0x0004
+#define GL_TRIANGLES_ADJACENCY 0x000C
+#define GL_TRIANGLE_FAN 0x0006
+#define GL_TRIANGLE_STRIP 0x0005
+#define GL_TRIANGLE_STRIP_ADJACENCY 0x000D
+#define GL_TRUE 1
+#define GL_UNIFORM_ARRAY_STRIDE 0x8A3C
+#define GL_UNIFORM_BLOCK_ACTIVE_UNIFORMS 0x8A42
+#define GL_UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES 0x8A43
+#define GL_UNIFORM_BLOCK_BINDING 0x8A3F
+#define GL_UNIFORM_BLOCK_DATA_SIZE 0x8A40
+#define GL_UNIFORM_BLOCK_INDEX 0x8A3A
+#define GL_UNIFORM_BLOCK_NAME_LENGTH 0x8A41
+#define GL_UNIFORM_BLOCK_REFERENCED_BY_FRAGMENT_SHADER 0x8A46
+#define GL_UNIFORM_BLOCK_REFERENCED_BY_GEOMETRY_SHADER 0x8A45
+#define GL_UNIFORM_BLOCK_REFERENCED_BY_VERTEX_SHADER 0x8A44
+#define GL_UNIFORM_BUFFER 0x8A11
+#define GL_UNIFORM_BUFFER_BINDING 0x8A28
+#define GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT 0x8A34
+#define GL_UNIFORM_BUFFER_SIZE 0x8A2A
+#define GL_UNIFORM_BUFFER_START 0x8A29
+#define GL_UNIFORM_IS_ROW_MAJOR 0x8A3E
+#define GL_UNIFORM_MATRIX_STRIDE 0x8A3D
+#define GL_UNIFORM_NAME_LENGTH 0x8A39
+#define GL_UNIFORM_OFFSET 0x8A3B
+#define GL_UNIFORM_SIZE 0x8A38
+#define GL_UNIFORM_TYPE 0x8A37
+#define GL_UNPACK_ALIGNMENT 0x0CF5
+#define GL_UNPACK_IMAGE_HEIGHT 0x806E
+#define GL_UNPACK_LSB_FIRST 0x0CF1
+#define GL_UNPACK_ROW_LENGTH 0x0CF2
+#define GL_UNPACK_SKIP_IMAGES 0x806D
+#define GL_UNPACK_SKIP_PIXELS 0x0CF4
+#define GL_UNPACK_SKIP_ROWS 0x0CF3
+#define GL_UNPACK_SWAP_BYTES 0x0CF0
+#define GL_UNSIGNALED 0x9118
+#define GL_UNSIGNED_BYTE 0x1401
+#define GL_UNSIGNED_BYTE_2_3_3_REV 0x8362
+#define GL_UNSIGNED_BYTE_3_3_2 0x8032
+#define GL_UNSIGNED_INT 0x1405
+#define GL_UNSIGNED_INT_10F_11F_11F_REV 0x8C3B
+#define GL_UNSIGNED_INT_10_10_10_2 0x8036
+#define GL_UNSIGNED_INT_24_8 0x84FA
+#define GL_UNSIGNED_INT_2_10_10_10_REV 0x8368
+#define GL_UNSIGNED_INT_5_9_9_9_REV 0x8C3E
+#define GL_UNSIGNED_INT_8_8_8_8 0x8035
+#define GL_UNSIGNED_INT_8_8_8_8_REV 0x8367
+#define GL_UNSIGNED_INT_SAMPLER_1D 0x8DD1
+#define GL_UNSIGNED_INT_SAMPLER_1D_ARRAY 0x8DD6
+#define GL_UNSIGNED_INT_SAMPLER_2D 0x8DD2
+#define GL_UNSIGNED_INT_SAMPLER_2D_ARRAY 0x8DD7
+#define GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE 0x910A
+#define GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE_ARRAY 0x910D
+#define GL_UNSIGNED_INT_SAMPLER_2D_RECT 0x8DD5
+#define GL_UNSIGNED_INT_SAMPLER_3D 0x8DD3
+#define GL_UNSIGNED_INT_SAMPLER_BUFFER 0x8DD8
+#define GL_UNSIGNED_INT_SAMPLER_CUBE 0x8DD4
+#define GL_UNSIGNED_INT_VEC2 0x8DC6
+#define GL_UNSIGNED_INT_VEC3 0x8DC7
+#define GL_UNSIGNED_INT_VEC4 0x8DC8
+#define GL_UNSIGNED_NORMALIZED 0x8C17
+#define GL_UNSIGNED_SHORT 0x1403
+#define GL_UNSIGNED_SHORT_1_5_5_5_REV 0x8366
+#define GL_UNSIGNED_SHORT_4_4_4_4 0x8033
+#define GL_UNSIGNED_SHORT_4_4_4_4_REV 0x8365
+#define GL_UNSIGNED_SHORT_5_5_5_1 0x8034
+#define GL_UNSIGNED_SHORT_5_6_5 0x8363
+#define GL_UNSIGNED_SHORT_5_6_5_REV 0x8364
+#define GL_UPPER_LEFT 0x8CA2
+#define GL_V2F 0x2A20
+#define GL_V3F 0x2A21
+#define GL_VALIDATE_STATUS 0x8B83
+#define GL_VENDOR 0x1F00
+#define GL_VERSION 0x1F02
+#define GL_VERTEX_ARRAY 0x8074
+#define GL_VERTEX_ARRAY_BINDING 0x85B5
+#define GL_VERTEX_ARRAY_BUFFER_BINDING 0x8896
+#define GL_VERTEX_ARRAY_POINTER 0x808E
+#define GL_VERTEX_ARRAY_SIZE 0x807A
+#define GL_VERTEX_ARRAY_STRIDE 0x807C
+#define GL_VERTEX_ARRAY_TYPE 0x807B
+#define GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING 0x889F
+#define GL_VERTEX_ATTRIB_ARRAY_DIVISOR 0x88FE
+#define GL_VERTEX_ATTRIB_ARRAY_ENABLED 0x8622
+#define GL_VERTEX_ATTRIB_ARRAY_INTEGER 0x88FD
+#define GL_VERTEX_ATTRIB_ARRAY_NORMALIZED 0x886A
+#define GL_VERTEX_ATTRIB_ARRAY_POINTER 0x8645
+#define GL_VERTEX_ATTRIB_ARRAY_SIZE 0x8623
+#define GL_VERTEX_ATTRIB_ARRAY_STRIDE 0x8624
+#define GL_VERTEX_ATTRIB_ARRAY_TYPE 0x8625
+#define GL_VERTEX_PROGRAM_POINT_SIZE 0x8642
+#define GL_VERTEX_PROGRAM_TWO_SIDE 0x8643
+#define GL_VERTEX_SHADER 0x8B31
+#define GL_VIEWPORT 0x0BA2
+#define GL_VIEWPORT_BIT 0x00000800
+#define GL_WAIT_FAILED 0x911D
+#define GL_WEIGHT_ARRAY_BUFFER_BINDING 0x889E
+#define GL_WRITE_ONLY 0x88B9
+#define GL_XOR 0x1506
+#define GL_ZERO 0
+#define GL_ZOOM_X 0x0D16
+#define GL_ZOOM_Y 0x0D17
+
+
+#include <KHR/khrplatform.h>
+typedef unsigned int GLenum;
+typedef unsigned char GLboolean;
+typedef unsigned int GLbitfield;
+typedef void GLvoid;
+typedef khronos_int8_t GLbyte;
+typedef khronos_uint8_t GLubyte;
+typedef khronos_int16_t GLshort;
+typedef khronos_uint16_t GLushort;
+typedef int GLint;
+typedef unsigned int GLuint;
+typedef khronos_int32_t GLclampx;
+typedef int GLsizei;
+typedef khronos_float_t GLfloat;
+typedef khronos_float_t GLclampf;
+typedef double GLdouble;
+typedef double GLclampd;
+typedef void *GLeglClientBufferEXT;
+typedef void *GLeglImageOES;
+typedef char GLchar;
+typedef char GLcharARB;
+#ifdef __APPLE__
+typedef void *GLhandleARB;
+#else
+typedef unsigned int GLhandleARB;
+#endif
+typedef khronos_uint16_t GLhalf;
+typedef khronos_uint16_t GLhalfARB;
+typedef khronos_int32_t GLfixed;
+#if defined(__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__) && (__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__ > 1060)
+typedef khronos_intptr_t GLintptr;
+#else
+typedef khronos_intptr_t GLintptr;
+#endif
+#if defined(__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__) && (__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__ > 1060)
+typedef khronos_intptr_t GLintptrARB;
+#else
+typedef khronos_intptr_t GLintptrARB;
+#endif
+#if defined(__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__) && (__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__ > 1060)
+typedef khronos_ssize_t GLsizeiptr;
+#else
+typedef khronos_ssize_t GLsizeiptr;
+#endif
+#if defined(__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__) && (__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__ > 1060)
+typedef khronos_ssize_t GLsizeiptrARB;
+#else
+typedef khronos_ssize_t GLsizeiptrARB;
+#endif
+typedef khronos_int64_t GLint64;
+typedef khronos_int64_t GLint64EXT;
+typedef khronos_uint64_t GLuint64;
+typedef khronos_uint64_t GLuint64EXT;
+typedef struct __GLsync *GLsync;
+struct _cl_context;
+struct _cl_event;
+typedef void (GLAD_API_PTR *GLDEBUGPROC)(GLenum source,GLenum type,GLuint id,GLenum severity,GLsizei length,const GLchar *message,const void *userParam);
+typedef void (GLAD_API_PTR *GLDEBUGPROCARB)(GLenum source,GLenum type,GLuint id,GLenum severity,GLsizei length,const GLchar *message,const void *userParam);
+typedef void (GLAD_API_PTR *GLDEBUGPROCKHR)(GLenum source,GLenum type,GLuint id,GLenum severity,GLsizei length,const GLchar *message,const void *userParam);
+typedef void (GLAD_API_PTR *GLDEBUGPROCAMD)(GLuint id,GLenum category,GLenum severity,GLsizei length,const GLchar *message,void *userParam);
+typedef unsigned short GLhalfNV;
+typedef GLintptr GLvdpauSurfaceNV;
+typedef void (GLAD_API_PTR *GLVULKANPROCNV)(void);
+
+
+#define GL_VERSION_1_0 1
+GLAD_API_CALL int GLAD_GL_VERSION_1_0;
+#define GL_VERSION_1_1 1
+GLAD_API_CALL int GLAD_GL_VERSION_1_1;
+#define GL_VERSION_1_2 1
+GLAD_API_CALL int GLAD_GL_VERSION_1_2;
+#define GL_VERSION_1_3 1
+GLAD_API_CALL int GLAD_GL_VERSION_1_3;
+#define GL_VERSION_1_4 1
+GLAD_API_CALL int GLAD_GL_VERSION_1_4;
+#define GL_VERSION_1_5 1
+GLAD_API_CALL int GLAD_GL_VERSION_1_5;
+#define GL_VERSION_2_0 1
+GLAD_API_CALL int GLAD_GL_VERSION_2_0;
+#define GL_VERSION_2_1 1
+GLAD_API_CALL int GLAD_GL_VERSION_2_1;
+#define GL_VERSION_3_0 1
+GLAD_API_CALL int GLAD_GL_VERSION_3_0;
+#define GL_VERSION_3_1 1
+GLAD_API_CALL int GLAD_GL_VERSION_3_1;
+#define GL_VERSION_3_2 1
+GLAD_API_CALL int GLAD_GL_VERSION_3_2;
+#define GL_VERSION_3_3 1
+GLAD_API_CALL int GLAD_GL_VERSION_3_3;
+#define GL_ARB_debug_output 1
+GLAD_API_CALL int GLAD_GL_ARB_debug_output;
+#define GL_ARB_framebuffer_object 1
+GLAD_API_CALL int GLAD_GL_ARB_framebuffer_object;
+#define GL_EXT_framebuffer_blit 1
+GLAD_API_CALL int GLAD_GL_EXT_framebuffer_blit;
+#define GL_EXT_framebuffer_multisample 1
+GLAD_API_CALL int GLAD_GL_EXT_framebuffer_multisample;
+#define GL_EXT_framebuffer_object 1
+GLAD_API_CALL int GLAD_GL_EXT_framebuffer_object;
+#define GL_OVR_multiview 1
+GLAD_API_CALL int GLAD_GL_OVR_multiview;
+#define GL_OVR_multiview2 1
+GLAD_API_CALL int GLAD_GL_OVR_multiview2;
+
+
+typedef void (GLAD_API_PTR *PFNGLACCUMPROC)(GLenum op, GLfloat value);
+typedef void (GLAD_API_PTR *PFNGLACTIVETEXTUREPROC)(GLenum texture);
+typedef void (GLAD_API_PTR *PFNGLALPHAFUNCPROC)(GLenum func, GLfloat ref);
+typedef GLboolean (GLAD_API_PTR *PFNGLARETEXTURESRESIDENTPROC)(GLsizei n, const GLuint * textures, GLboolean * residences);
+typedef void (GLAD_API_PTR *PFNGLARRAYELEMENTPROC)(GLint i);
+typedef void (GLAD_API_PTR *PFNGLATTACHSHADERPROC)(GLuint program, GLuint shader);
+typedef void (GLAD_API_PTR *PFNGLBEGINPROC)(GLenum mode);
+typedef void (GLAD_API_PTR *PFNGLBEGINCONDITIONALRENDERPROC)(GLuint id, GLenum mode);
+typedef void (GLAD_API_PTR *PFNGLBEGINQUERYPROC)(GLenum target, GLuint id);
+typedef void (GLAD_API_PTR *PFNGLBEGINTRANSFORMFEEDBACKPROC)(GLenum primitiveMode);
+typedef void (GLAD_API_PTR *PFNGLBINDATTRIBLOCATIONPROC)(GLuint program, GLuint index, const GLchar * name);
+typedef void (GLAD_API_PTR *PFNGLBINDBUFFERPROC)(GLenum target, GLuint buffer);
+typedef void (GLAD_API_PTR *PFNGLBINDBUFFERBASEPROC)(GLenum target, GLuint index, GLuint buffer);
+typedef void (GLAD_API_PTR *PFNGLBINDBUFFERRANGEPROC)(GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size);
+typedef void (GLAD_API_PTR *PFNGLBINDFRAGDATALOCATIONPROC)(GLuint program, GLuint color, const GLchar * name);
+typedef void (GLAD_API_PTR *PFNGLBINDFRAGDATALOCATIONINDEXEDPROC)(GLuint program, GLuint colorNumber, GLuint index, const GLchar * name);
+typedef void (GLAD_API_PTR *PFNGLBINDFRAMEBUFFERPROC)(GLenum target, GLuint framebuffer);
+typedef void (GLAD_API_PTR *PFNGLBINDFRAMEBUFFEREXTPROC)(GLenum target, GLuint framebuffer);
+typedef void (GLAD_API_PTR *PFNGLBINDRENDERBUFFERPROC)(GLenum target, GLuint renderbuffer);
+typedef void (GLAD_API_PTR *PFNGLBINDRENDERBUFFEREXTPROC)(GLenum target, GLuint renderbuffer);
+typedef void (GLAD_API_PTR *PFNGLBINDSAMPLERPROC)(GLuint unit, GLuint sampler);
+typedef void (GLAD_API_PTR *PFNGLBINDTEXTUREPROC)(GLenum target, GLuint texture);
+typedef void (GLAD_API_PTR *PFNGLBINDVERTEXARRAYPROC)(GLuint array);
+typedef void (GLAD_API_PTR *PFNGLBITMAPPROC)(GLsizei width, GLsizei height, GLfloat xorig, GLfloat yorig, GLfloat xmove, GLfloat ymove, const GLubyte * bitmap);
+typedef void (GLAD_API_PTR *PFNGLBLENDCOLORPROC)(GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha);
+typedef void (GLAD_API_PTR *PFNGLBLENDEQUATIONPROC)(GLenum mode);
+typedef void (GLAD_API_PTR *PFNGLBLENDEQUATIONSEPARATEPROC)(GLenum modeRGB, GLenum modeAlpha);
+typedef void (GLAD_API_PTR *PFNGLBLENDFUNCPROC)(GLenum sfactor, GLenum dfactor);
+typedef void (GLAD_API_PTR *PFNGLBLENDFUNCSEPARATEPROC)(GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha);
+typedef void (GLAD_API_PTR *PFNGLBLITFRAMEBUFFERPROC)(GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter);
+typedef void (GLAD_API_PTR *PFNGLBLITFRAMEBUFFEREXTPROC)(GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter);
+typedef void (GLAD_API_PTR *PFNGLBUFFERDATAPROC)(GLenum target, GLsizeiptr size, const void * data, GLenum usage);
+typedef void (GLAD_API_PTR *PFNGLBUFFERSUBDATAPROC)(GLenum target, GLintptr offset, GLsizeiptr size, const void * data);
+typedef void (GLAD_API_PTR *PFNGLCALLLISTPROC)(GLuint list);
+typedef void (GLAD_API_PTR *PFNGLCALLLISTSPROC)(GLsizei n, GLenum type, const void * lists);
+typedef GLenum (GLAD_API_PTR *PFNGLCHECKFRAMEBUFFERSTATUSPROC)(GLenum target);
+typedef GLenum (GLAD_API_PTR *PFNGLCHECKFRAMEBUFFERSTATUSEXTPROC)(GLenum target);
+typedef void (GLAD_API_PTR *PFNGLCLAMPCOLORPROC)(GLenum target, GLenum clamp);
+typedef void (GLAD_API_PTR *PFNGLCLEARPROC)(GLbitfield mask);
+typedef void (GLAD_API_PTR *PFNGLCLEARACCUMPROC)(GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha);
+typedef void (GLAD_API_PTR *PFNGLCLEARBUFFERFIPROC)(GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil);
+typedef void (GLAD_API_PTR *PFNGLCLEARBUFFERFVPROC)(GLenum buffer, GLint drawbuffer, const GLfloat * value);
+typedef void (GLAD_API_PTR *PFNGLCLEARBUFFERIVPROC)(GLenum buffer, GLint drawbuffer, const GLint * value);
+typedef void (GLAD_API_PTR *PFNGLCLEARBUFFERUIVPROC)(GLenum buffer, GLint drawbuffer, const GLuint * value);
+typedef void (GLAD_API_PTR *PFNGLCLEARCOLORPROC)(GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha);
+typedef void (GLAD_API_PTR *PFNGLCLEARDEPTHPROC)(GLdouble depth);
+typedef void (GLAD_API_PTR *PFNGLCLEARINDEXPROC)(GLfloat c);
+typedef void (GLAD_API_PTR *PFNGLCLEARSTENCILPROC)(GLint s);
+typedef void (GLAD_API_PTR *PFNGLCLIENTACTIVETEXTUREPROC)(GLenum texture);
+typedef GLenum (GLAD_API_PTR *PFNGLCLIENTWAITSYNCPROC)(GLsync sync, GLbitfield flags, GLuint64 timeout);
+typedef void (GLAD_API_PTR *PFNGLCLIPPLANEPROC)(GLenum plane, const GLdouble * equation);
+typedef void (GLAD_API_PTR *PFNGLCOLOR3BPROC)(GLbyte red, GLbyte green, GLbyte blue);
+typedef void (GLAD_API_PTR *PFNGLCOLOR3BVPROC)(const GLbyte * v);
+typedef void (GLAD_API_PTR *PFNGLCOLOR3DPROC)(GLdouble red, GLdouble green, GLdouble blue);
+typedef void (GLAD_API_PTR *PFNGLCOLOR3DVPROC)(const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLCOLOR3FPROC)(GLfloat red, GLfloat green, GLfloat blue);
+typedef void (GLAD_API_PTR *PFNGLCOLOR3FVPROC)(const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLCOLOR3IPROC)(GLint red, GLint green, GLint blue);
+typedef void (GLAD_API_PTR *PFNGLCOLOR3IVPROC)(const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLCOLOR3SPROC)(GLshort red, GLshort green, GLshort blue);
+typedef void (GLAD_API_PTR *PFNGLCOLOR3SVPROC)(const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLCOLOR3UBPROC)(GLubyte red, GLubyte green, GLubyte blue);
+typedef void (GLAD_API_PTR *PFNGLCOLOR3UBVPROC)(const GLubyte * v);
+typedef void (GLAD_API_PTR *PFNGLCOLOR3UIPROC)(GLuint red, GLuint green, GLuint blue);
+typedef void (GLAD_API_PTR *PFNGLCOLOR3UIVPROC)(const GLuint * v);
+typedef void (GLAD_API_PTR *PFNGLCOLOR3USPROC)(GLushort red, GLushort green, GLushort blue);
+typedef void (GLAD_API_PTR *PFNGLCOLOR3USVPROC)(const GLushort * v);
+typedef void (GLAD_API_PTR *PFNGLCOLOR4BPROC)(GLbyte red, GLbyte green, GLbyte blue, GLbyte alpha);
+typedef void (GLAD_API_PTR *PFNGLCOLOR4BVPROC)(const GLbyte * v);
+typedef void (GLAD_API_PTR *PFNGLCOLOR4DPROC)(GLdouble red, GLdouble green, GLdouble blue, GLdouble alpha);
+typedef void (GLAD_API_PTR *PFNGLCOLOR4DVPROC)(const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLCOLOR4FPROC)(GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha);
+typedef void (GLAD_API_PTR *PFNGLCOLOR4FVPROC)(const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLCOLOR4IPROC)(GLint red, GLint green, GLint blue, GLint alpha);
+typedef void (GLAD_API_PTR *PFNGLCOLOR4IVPROC)(const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLCOLOR4SPROC)(GLshort red, GLshort green, GLshort blue, GLshort alpha);
+typedef void (GLAD_API_PTR *PFNGLCOLOR4SVPROC)(const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLCOLOR4UBPROC)(GLubyte red, GLubyte green, GLubyte blue, GLubyte alpha);
+typedef void (GLAD_API_PTR *PFNGLCOLOR4UBVPROC)(const GLubyte * v);
+typedef void (GLAD_API_PTR *PFNGLCOLOR4UIPROC)(GLuint red, GLuint green, GLuint blue, GLuint alpha);
+typedef void (GLAD_API_PTR *PFNGLCOLOR4UIVPROC)(const GLuint * v);
+typedef void (GLAD_API_PTR *PFNGLCOLOR4USPROC)(GLushort red, GLushort green, GLushort blue, GLushort alpha);
+typedef void (GLAD_API_PTR *PFNGLCOLOR4USVPROC)(const GLushort * v);
+typedef void (GLAD_API_PTR *PFNGLCOLORMASKPROC)(GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha);
+typedef void (GLAD_API_PTR *PFNGLCOLORMASKIPROC)(GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a);
+typedef void (GLAD_API_PTR *PFNGLCOLORMATERIALPROC)(GLenum face, GLenum mode);
+typedef void (GLAD_API_PTR *PFNGLCOLORP3UIPROC)(GLenum type, GLuint color);
+typedef void (GLAD_API_PTR *PFNGLCOLORP3UIVPROC)(GLenum type, const GLuint * color);
+typedef void (GLAD_API_PTR *PFNGLCOLORP4UIPROC)(GLenum type, GLuint color);
+typedef void (GLAD_API_PTR *PFNGLCOLORP4UIVPROC)(GLenum type, const GLuint * color);
+typedef void (GLAD_API_PTR *PFNGLCOLORPOINTERPROC)(GLint size, GLenum type, GLsizei stride, const void * pointer);
+typedef void (GLAD_API_PTR *PFNGLCOMPILESHADERPROC)(GLuint shader);
+typedef void (GLAD_API_PTR *PFNGLCOMPRESSEDTEXIMAGE1DPROC)(GLenum target, GLint level, GLenum internalformat, GLsizei width, GLint border, GLsizei imageSize, const void * data);
+typedef void (GLAD_API_PTR *PFNGLCOMPRESSEDTEXIMAGE2DPROC)(GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void * data);
+typedef void (GLAD_API_PTR *PFNGLCOMPRESSEDTEXIMAGE3DPROC)(GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void * data);
+typedef void (GLAD_API_PTR *PFNGLCOMPRESSEDTEXSUBIMAGE1DPROC)(GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLsizei imageSize, const void * data);
+typedef void (GLAD_API_PTR *PFNGLCOMPRESSEDTEXSUBIMAGE2DPROC)(GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void * data);
+typedef void (GLAD_API_PTR *PFNGLCOMPRESSEDTEXSUBIMAGE3DPROC)(GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void * data);
+typedef void (GLAD_API_PTR *PFNGLCOPYBUFFERSUBDATAPROC)(GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size);
+typedef void (GLAD_API_PTR *PFNGLCOPYPIXELSPROC)(GLint x, GLint y, GLsizei width, GLsizei height, GLenum type);
+typedef void (GLAD_API_PTR *PFNGLCOPYTEXIMAGE1DPROC)(GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLint border);
+typedef void (GLAD_API_PTR *PFNGLCOPYTEXIMAGE2DPROC)(GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border);
+typedef void (GLAD_API_PTR *PFNGLCOPYTEXSUBIMAGE1DPROC)(GLenum target, GLint level, GLint xoffset, GLint x, GLint y, GLsizei width);
+typedef void (GLAD_API_PTR *PFNGLCOPYTEXSUBIMAGE2DPROC)(GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height);
+typedef void (GLAD_API_PTR *PFNGLCOPYTEXSUBIMAGE3DPROC)(GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height);
+typedef GLuint (GLAD_API_PTR *PFNGLCREATEPROGRAMPROC)(void);
+typedef GLuint (GLAD_API_PTR *PFNGLCREATESHADERPROC)(GLenum type);
+typedef void (GLAD_API_PTR *PFNGLCULLFACEPROC)(GLenum mode);
+typedef void (GLAD_API_PTR *PFNGLDEBUGMESSAGECALLBACKARBPROC)(GLDEBUGPROCARB callback, const void * userParam);
+typedef void (GLAD_API_PTR *PFNGLDEBUGMESSAGECONTROLARBPROC)(GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint * ids, GLboolean enabled);
+typedef void (GLAD_API_PTR *PFNGLDEBUGMESSAGEINSERTARBPROC)(GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar * buf);
+typedef void (GLAD_API_PTR *PFNGLDELETEBUFFERSPROC)(GLsizei n, const GLuint * buffers);
+typedef void (GLAD_API_PTR *PFNGLDELETEFRAMEBUFFERSPROC)(GLsizei n, const GLuint * framebuffers);
+typedef void (GLAD_API_PTR *PFNGLDELETEFRAMEBUFFERSEXTPROC)(GLsizei n, const GLuint * framebuffers);
+typedef void (GLAD_API_PTR *PFNGLDELETELISTSPROC)(GLuint list, GLsizei range);
+typedef void (GLAD_API_PTR *PFNGLDELETEPROGRAMPROC)(GLuint program);
+typedef void (GLAD_API_PTR *PFNGLDELETEQUERIESPROC)(GLsizei n, const GLuint * ids);
+typedef void (GLAD_API_PTR *PFNGLDELETERENDERBUFFERSPROC)(GLsizei n, const GLuint * renderbuffers);
+typedef void (GLAD_API_PTR *PFNGLDELETERENDERBUFFERSEXTPROC)(GLsizei n, const GLuint * renderbuffers);
+typedef void (GLAD_API_PTR *PFNGLDELETESAMPLERSPROC)(GLsizei count, const GLuint * samplers);
+typedef void (GLAD_API_PTR *PFNGLDELETESHADERPROC)(GLuint shader);
+typedef void (GLAD_API_PTR *PFNGLDELETESYNCPROC)(GLsync sync);
+typedef void (GLAD_API_PTR *PFNGLDELETETEXTURESPROC)(GLsizei n, const GLuint * textures);
+typedef void (GLAD_API_PTR *PFNGLDELETEVERTEXARRAYSPROC)(GLsizei n, const GLuint * arrays);
+typedef void (GLAD_API_PTR *PFNGLDEPTHFUNCPROC)(GLenum func);
+typedef void (GLAD_API_PTR *PFNGLDEPTHMASKPROC)(GLboolean flag);
+typedef void (GLAD_API_PTR *PFNGLDEPTHRANGEPROC)(GLdouble n, GLdouble f);
+typedef void (GLAD_API_PTR *PFNGLDETACHSHADERPROC)(GLuint program, GLuint shader);
+typedef void (GLAD_API_PTR *PFNGLDISABLEPROC)(GLenum cap);
+typedef void (GLAD_API_PTR *PFNGLDISABLECLIENTSTATEPROC)(GLenum array);
+typedef void (GLAD_API_PTR *PFNGLDISABLEVERTEXATTRIBARRAYPROC)(GLuint index);
+typedef void (GLAD_API_PTR *PFNGLDISABLEIPROC)(GLenum target, GLuint index);
+typedef void (GLAD_API_PTR *PFNGLDRAWARRAYSPROC)(GLenum mode, GLint first, GLsizei count);
+typedef void (GLAD_API_PTR *PFNGLDRAWARRAYSINSTANCEDPROC)(GLenum mode, GLint first, GLsizei count, GLsizei instancecount);
+typedef void (GLAD_API_PTR *PFNGLDRAWBUFFERPROC)(GLenum buf);
+typedef void (GLAD_API_PTR *PFNGLDRAWBUFFERSPROC)(GLsizei n, const GLenum * bufs);
+typedef void (GLAD_API_PTR *PFNGLDRAWELEMENTSPROC)(GLenum mode, GLsizei count, GLenum type, const void * indices);
+typedef void (GLAD_API_PTR *PFNGLDRAWELEMENTSBASEVERTEXPROC)(GLenum mode, GLsizei count, GLenum type, const void * indices, GLint basevertex);
+typedef void (GLAD_API_PTR *PFNGLDRAWELEMENTSINSTANCEDPROC)(GLenum mode, GLsizei count, GLenum type, const void * indices, GLsizei instancecount);
+typedef void (GLAD_API_PTR *PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXPROC)(GLenum mode, GLsizei count, GLenum type, const void * indices, GLsizei instancecount, GLint basevertex);
+typedef void (GLAD_API_PTR *PFNGLDRAWPIXELSPROC)(GLsizei width, GLsizei height, GLenum format, GLenum type, const void * pixels);
+typedef void (GLAD_API_PTR *PFNGLDRAWRANGEELEMENTSPROC)(GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void * indices);
+typedef void (GLAD_API_PTR *PFNGLDRAWRANGEELEMENTSBASEVERTEXPROC)(GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void * indices, GLint basevertex);
+typedef void (GLAD_API_PTR *PFNGLEDGEFLAGPROC)(GLboolean flag);
+typedef void (GLAD_API_PTR *PFNGLEDGEFLAGPOINTERPROC)(GLsizei stride, const void * pointer);
+typedef void (GLAD_API_PTR *PFNGLEDGEFLAGVPROC)(const GLboolean * flag);
+typedef void (GLAD_API_PTR *PFNGLENABLEPROC)(GLenum cap);
+typedef void (GLAD_API_PTR *PFNGLENABLECLIENTSTATEPROC)(GLenum array);
+typedef void (GLAD_API_PTR *PFNGLENABLEVERTEXATTRIBARRAYPROC)(GLuint index);
+typedef void (GLAD_API_PTR *PFNGLENABLEIPROC)(GLenum target, GLuint index);
+typedef void (GLAD_API_PTR *PFNGLENDPROC)(void);
+typedef void (GLAD_API_PTR *PFNGLENDCONDITIONALRENDERPROC)(void);
+typedef void (GLAD_API_PTR *PFNGLENDLISTPROC)(void);
+typedef void (GLAD_API_PTR *PFNGLENDQUERYPROC)(GLenum target);
+typedef void (GLAD_API_PTR *PFNGLENDTRANSFORMFEEDBACKPROC)(void);
+typedef void (GLAD_API_PTR *PFNGLEVALCOORD1DPROC)(GLdouble u);
+typedef void (GLAD_API_PTR *PFNGLEVALCOORD1DVPROC)(const GLdouble * u);
+typedef void (GLAD_API_PTR *PFNGLEVALCOORD1FPROC)(GLfloat u);
+typedef void (GLAD_API_PTR *PFNGLEVALCOORD1FVPROC)(const GLfloat * u);
+typedef void (GLAD_API_PTR *PFNGLEVALCOORD2DPROC)(GLdouble u, GLdouble v);
+typedef void (GLAD_API_PTR *PFNGLEVALCOORD2DVPROC)(const GLdouble * u);
+typedef void (GLAD_API_PTR *PFNGLEVALCOORD2FPROC)(GLfloat u, GLfloat v);
+typedef void (GLAD_API_PTR *PFNGLEVALCOORD2FVPROC)(const GLfloat * u);
+typedef void (GLAD_API_PTR *PFNGLEVALMESH1PROC)(GLenum mode, GLint i1, GLint i2);
+typedef void (GLAD_API_PTR *PFNGLEVALMESH2PROC)(GLenum mode, GLint i1, GLint i2, GLint j1, GLint j2);
+typedef void (GLAD_API_PTR *PFNGLEVALPOINT1PROC)(GLint i);
+typedef void (GLAD_API_PTR *PFNGLEVALPOINT2PROC)(GLint i, GLint j);
+typedef void (GLAD_API_PTR *PFNGLFEEDBACKBUFFERPROC)(GLsizei size, GLenum type, GLfloat * buffer);
+typedef GLsync (GLAD_API_PTR *PFNGLFENCESYNCPROC)(GLenum condition, GLbitfield flags);
+typedef void (GLAD_API_PTR *PFNGLFINISHPROC)(void);
+typedef void (GLAD_API_PTR *PFNGLFLUSHPROC)(void);
+typedef void (GLAD_API_PTR *PFNGLFLUSHMAPPEDBUFFERRANGEPROC)(GLenum target, GLintptr offset, GLsizeiptr length);
+typedef void (GLAD_API_PTR *PFNGLFOGCOORDPOINTERPROC)(GLenum type, GLsizei stride, const void * pointer);
+typedef void (GLAD_API_PTR *PFNGLFOGCOORDDPROC)(GLdouble coord);
+typedef void (GLAD_API_PTR *PFNGLFOGCOORDDVPROC)(const GLdouble * coord);
+typedef void (GLAD_API_PTR *PFNGLFOGCOORDFPROC)(GLfloat coord);
+typedef void (GLAD_API_PTR *PFNGLFOGCOORDFVPROC)(const GLfloat * coord);
+typedef void (GLAD_API_PTR *PFNGLFOGFPROC)(GLenum pname, GLfloat param);
+typedef void (GLAD_API_PTR *PFNGLFOGFVPROC)(GLenum pname, const GLfloat * params);
+typedef void (GLAD_API_PTR *PFNGLFOGIPROC)(GLenum pname, GLint param);
+typedef void (GLAD_API_PTR *PFNGLFOGIVPROC)(GLenum pname, const GLint * params);
+typedef void (GLAD_API_PTR *PFNGLFRAMEBUFFERRENDERBUFFERPROC)(GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer);
+typedef void (GLAD_API_PTR *PFNGLFRAMEBUFFERRENDERBUFFEREXTPROC)(GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer);
+typedef void (GLAD_API_PTR *PFNGLFRAMEBUFFERTEXTUREPROC)(GLenum target, GLenum attachment, GLuint texture, GLint level);
+typedef void (GLAD_API_PTR *PFNGLFRAMEBUFFERTEXTURE1DPROC)(GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level);
+typedef void (GLAD_API_PTR *PFNGLFRAMEBUFFERTEXTURE1DEXTPROC)(GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level);
+typedef void (GLAD_API_PTR *PFNGLFRAMEBUFFERTEXTURE2DPROC)(GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level);
+typedef void (GLAD_API_PTR *PFNGLFRAMEBUFFERTEXTURE2DEXTPROC)(GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level);
+typedef void (GLAD_API_PTR *PFNGLFRAMEBUFFERTEXTURE3DPROC)(GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint zoffset);
+typedef void (GLAD_API_PTR *PFNGLFRAMEBUFFERTEXTURE3DEXTPROC)(GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint zoffset);
+typedef void (GLAD_API_PTR *PFNGLFRAMEBUFFERTEXTURELAYERPROC)(GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer);
+typedef void (GLAD_API_PTR *PFNGLFRAMEBUFFERTEXTUREMULTIVIEWOVRPROC)(GLenum target, GLenum attachment, GLuint texture, GLint level, GLint baseViewIndex, GLsizei numViews);
+typedef void (GLAD_API_PTR *PFNGLFRONTFACEPROC)(GLenum mode);
+typedef void (GLAD_API_PTR *PFNGLFRUSTUMPROC)(GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar);
+typedef void (GLAD_API_PTR *PFNGLGENBUFFERSPROC)(GLsizei n, GLuint * buffers);
+typedef void (GLAD_API_PTR *PFNGLGENFRAMEBUFFERSPROC)(GLsizei n, GLuint * framebuffers);
+typedef void (GLAD_API_PTR *PFNGLGENFRAMEBUFFERSEXTPROC)(GLsizei n, GLuint * framebuffers);
+typedef GLuint (GLAD_API_PTR *PFNGLGENLISTSPROC)(GLsizei range);
+typedef void (GLAD_API_PTR *PFNGLGENQUERIESPROC)(GLsizei n, GLuint * ids);
+typedef void (GLAD_API_PTR *PFNGLGENRENDERBUFFERSPROC)(GLsizei n, GLuint * renderbuffers);
+typedef void (GLAD_API_PTR *PFNGLGENRENDERBUFFERSEXTPROC)(GLsizei n, GLuint * renderbuffers);
+typedef void (GLAD_API_PTR *PFNGLGENSAMPLERSPROC)(GLsizei count, GLuint * samplers);
+typedef void (GLAD_API_PTR *PFNGLGENTEXTURESPROC)(GLsizei n, GLuint * textures);
+typedef void (GLAD_API_PTR *PFNGLGENVERTEXARRAYSPROC)(GLsizei n, GLuint * arrays);
+typedef void (GLAD_API_PTR *PFNGLGENERATEMIPMAPPROC)(GLenum target);
+typedef void (GLAD_API_PTR *PFNGLGENERATEMIPMAPEXTPROC)(GLenum target);
+typedef void (GLAD_API_PTR *PFNGLGETACTIVEATTRIBPROC)(GLuint program, GLuint index, GLsizei bufSize, GLsizei * length, GLint * size, GLenum * type, GLchar * name);
+typedef void (GLAD_API_PTR *PFNGLGETACTIVEUNIFORMPROC)(GLuint program, GLuint index, GLsizei bufSize, GLsizei * length, GLint * size, GLenum * type, GLchar * name);
+typedef void (GLAD_API_PTR *PFNGLGETACTIVEUNIFORMBLOCKNAMEPROC)(GLuint program, GLuint uniformBlockIndex, GLsizei bufSize, GLsizei * length, GLchar * uniformBlockName);
+typedef void (GLAD_API_PTR *PFNGLGETACTIVEUNIFORMBLOCKIVPROC)(GLuint program, GLuint uniformBlockIndex, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETACTIVEUNIFORMNAMEPROC)(GLuint program, GLuint uniformIndex, GLsizei bufSize, GLsizei * length, GLchar * uniformName);
+typedef void (GLAD_API_PTR *PFNGLGETACTIVEUNIFORMSIVPROC)(GLuint program, GLsizei uniformCount, const GLuint * uniformIndices, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETATTACHEDSHADERSPROC)(GLuint program, GLsizei maxCount, GLsizei * count, GLuint * shaders);
+typedef GLint (GLAD_API_PTR *PFNGLGETATTRIBLOCATIONPROC)(GLuint program, const GLchar * name);
+typedef void (GLAD_API_PTR *PFNGLGETBOOLEANI_VPROC)(GLenum target, GLuint index, GLboolean * data);
+typedef void (GLAD_API_PTR *PFNGLGETBOOLEANVPROC)(GLenum pname, GLboolean * data);
+typedef void (GLAD_API_PTR *PFNGLGETBUFFERPARAMETERI64VPROC)(GLenum target, GLenum pname, GLint64 * params);
+typedef void (GLAD_API_PTR *PFNGLGETBUFFERPARAMETERIVPROC)(GLenum target, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETBUFFERPOINTERVPROC)(GLenum target, GLenum pname, void ** params);
+typedef void (GLAD_API_PTR *PFNGLGETBUFFERSUBDATAPROC)(GLenum target, GLintptr offset, GLsizeiptr size, void * data);
+typedef void (GLAD_API_PTR *PFNGLGETCLIPPLANEPROC)(GLenum plane, GLdouble * equation);
+typedef void (GLAD_API_PTR *PFNGLGETCOMPRESSEDTEXIMAGEPROC)(GLenum target, GLint level, void * img);
+typedef GLuint (GLAD_API_PTR *PFNGLGETDEBUGMESSAGELOGARBPROC)(GLuint count, GLsizei bufSize, GLenum * sources, GLenum * types, GLuint * ids, GLenum * severities, GLsizei * lengths, GLchar * messageLog);
+typedef void (GLAD_API_PTR *PFNGLGETDOUBLEVPROC)(GLenum pname, GLdouble * data);
+typedef GLenum (GLAD_API_PTR *PFNGLGETERRORPROC)(void);
+typedef void (GLAD_API_PTR *PFNGLGETFLOATVPROC)(GLenum pname, GLfloat * data);
+typedef GLint (GLAD_API_PTR *PFNGLGETFRAGDATAINDEXPROC)(GLuint program, const GLchar * name);
+typedef GLint (GLAD_API_PTR *PFNGLGETFRAGDATALOCATIONPROC)(GLuint program, const GLchar * name);
+typedef void (GLAD_API_PTR *PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVPROC)(GLenum target, GLenum attachment, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVEXTPROC)(GLenum target, GLenum attachment, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETINTEGER64I_VPROC)(GLenum target, GLuint index, GLint64 * data);
+typedef void (GLAD_API_PTR *PFNGLGETINTEGER64VPROC)(GLenum pname, GLint64 * data);
+typedef void (GLAD_API_PTR *PFNGLGETINTEGERI_VPROC)(GLenum target, GLuint index, GLint * data);
+typedef void (GLAD_API_PTR *PFNGLGETINTEGERVPROC)(GLenum pname, GLint * data);
+typedef void (GLAD_API_PTR *PFNGLGETLIGHTFVPROC)(GLenum light, GLenum pname, GLfloat * params);
+typedef void (GLAD_API_PTR *PFNGLGETLIGHTIVPROC)(GLenum light, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETMAPDVPROC)(GLenum target, GLenum query, GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLGETMAPFVPROC)(GLenum target, GLenum query, GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLGETMAPIVPROC)(GLenum target, GLenum query, GLint * v);
+typedef void (GLAD_API_PTR *PFNGLGETMATERIALFVPROC)(GLenum face, GLenum pname, GLfloat * params);
+typedef void (GLAD_API_PTR *PFNGLGETMATERIALIVPROC)(GLenum face, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETMULTISAMPLEFVPROC)(GLenum pname, GLuint index, GLfloat * val);
+typedef void (GLAD_API_PTR *PFNGLGETPIXELMAPFVPROC)(GLenum map, GLfloat * values);
+typedef void (GLAD_API_PTR *PFNGLGETPIXELMAPUIVPROC)(GLenum map, GLuint * values);
+typedef void (GLAD_API_PTR *PFNGLGETPIXELMAPUSVPROC)(GLenum map, GLushort * values);
+typedef void (GLAD_API_PTR *PFNGLGETPOINTERVPROC)(GLenum pname, void ** params);
+typedef void (GLAD_API_PTR *PFNGLGETPOLYGONSTIPPLEPROC)(GLubyte * mask);
+typedef void (GLAD_API_PTR *PFNGLGETPROGRAMINFOLOGPROC)(GLuint program, GLsizei bufSize, GLsizei * length, GLchar * infoLog);
+typedef void (GLAD_API_PTR *PFNGLGETPROGRAMIVPROC)(GLuint program, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETQUERYOBJECTI64VPROC)(GLuint id, GLenum pname, GLint64 * params);
+typedef void (GLAD_API_PTR *PFNGLGETQUERYOBJECTIVPROC)(GLuint id, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETQUERYOBJECTUI64VPROC)(GLuint id, GLenum pname, GLuint64 * params);
+typedef void (GLAD_API_PTR *PFNGLGETQUERYOBJECTUIVPROC)(GLuint id, GLenum pname, GLuint * params);
+typedef void (GLAD_API_PTR *PFNGLGETQUERYIVPROC)(GLenum target, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETRENDERBUFFERPARAMETERIVPROC)(GLenum target, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETRENDERBUFFERPARAMETERIVEXTPROC)(GLenum target, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETSAMPLERPARAMETERIIVPROC)(GLuint sampler, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETSAMPLERPARAMETERIUIVPROC)(GLuint sampler, GLenum pname, GLuint * params);
+typedef void (GLAD_API_PTR *PFNGLGETSAMPLERPARAMETERFVPROC)(GLuint sampler, GLenum pname, GLfloat * params);
+typedef void (GLAD_API_PTR *PFNGLGETSAMPLERPARAMETERIVPROC)(GLuint sampler, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETSHADERINFOLOGPROC)(GLuint shader, GLsizei bufSize, GLsizei * length, GLchar * infoLog);
+typedef void (GLAD_API_PTR *PFNGLGETSHADERSOURCEPROC)(GLuint shader, GLsizei bufSize, GLsizei * length, GLchar * source);
+typedef void (GLAD_API_PTR *PFNGLGETSHADERIVPROC)(GLuint shader, GLenum pname, GLint * params);
+typedef const GLubyte * (GLAD_API_PTR *PFNGLGETSTRINGPROC)(GLenum name);
+typedef const GLubyte * (GLAD_API_PTR *PFNGLGETSTRINGIPROC)(GLenum name, GLuint index);
+typedef void (GLAD_API_PTR *PFNGLGETSYNCIVPROC)(GLsync sync, GLenum pname, GLsizei count, GLsizei * length, GLint * values);
+typedef void (GLAD_API_PTR *PFNGLGETTEXENVFVPROC)(GLenum target, GLenum pname, GLfloat * params);
+typedef void (GLAD_API_PTR *PFNGLGETTEXENVIVPROC)(GLenum target, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETTEXGENDVPROC)(GLenum coord, GLenum pname, GLdouble * params);
+typedef void (GLAD_API_PTR *PFNGLGETTEXGENFVPROC)(GLenum coord, GLenum pname, GLfloat * params);
+typedef void (GLAD_API_PTR *PFNGLGETTEXGENIVPROC)(GLenum coord, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETTEXIMAGEPROC)(GLenum target, GLint level, GLenum format, GLenum type, void * pixels);
+typedef void (GLAD_API_PTR *PFNGLGETTEXLEVELPARAMETERFVPROC)(GLenum target, GLint level, GLenum pname, GLfloat * params);
+typedef void (GLAD_API_PTR *PFNGLGETTEXLEVELPARAMETERIVPROC)(GLenum target, GLint level, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETTEXPARAMETERIIVPROC)(GLenum target, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETTEXPARAMETERIUIVPROC)(GLenum target, GLenum pname, GLuint * params);
+typedef void (GLAD_API_PTR *PFNGLGETTEXPARAMETERFVPROC)(GLenum target, GLenum pname, GLfloat * params);
+typedef void (GLAD_API_PTR *PFNGLGETTEXPARAMETERIVPROC)(GLenum target, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETTRANSFORMFEEDBACKVARYINGPROC)(GLuint program, GLuint index, GLsizei bufSize, GLsizei * length, GLsizei * size, GLenum * type, GLchar * name);
+typedef GLuint (GLAD_API_PTR *PFNGLGETUNIFORMBLOCKINDEXPROC)(GLuint program, const GLchar * uniformBlockName);
+typedef void (GLAD_API_PTR *PFNGLGETUNIFORMINDICESPROC)(GLuint program, GLsizei uniformCount, const GLchar *const* uniformNames, GLuint * uniformIndices);
+typedef GLint (GLAD_API_PTR *PFNGLGETUNIFORMLOCATIONPROC)(GLuint program, const GLchar * name);
+typedef void (GLAD_API_PTR *PFNGLGETUNIFORMFVPROC)(GLuint program, GLint location, GLfloat * params);
+typedef void (GLAD_API_PTR *PFNGLGETUNIFORMIVPROC)(GLuint program, GLint location, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETUNIFORMUIVPROC)(GLuint program, GLint location, GLuint * params);
+typedef void (GLAD_API_PTR *PFNGLGETVERTEXATTRIBIIVPROC)(GLuint index, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLGETVERTEXATTRIBIUIVPROC)(GLuint index, GLenum pname, GLuint * params);
+typedef void (GLAD_API_PTR *PFNGLGETVERTEXATTRIBPOINTERVPROC)(GLuint index, GLenum pname, void ** pointer);
+typedef void (GLAD_API_PTR *PFNGLGETVERTEXATTRIBDVPROC)(GLuint index, GLenum pname, GLdouble * params);
+typedef void (GLAD_API_PTR *PFNGLGETVERTEXATTRIBFVPROC)(GLuint index, GLenum pname, GLfloat * params);
+typedef void (GLAD_API_PTR *PFNGLGETVERTEXATTRIBIVPROC)(GLuint index, GLenum pname, GLint * params);
+typedef void (GLAD_API_PTR *PFNGLHINTPROC)(GLenum target, GLenum mode);
+typedef void (GLAD_API_PTR *PFNGLINDEXMASKPROC)(GLuint mask);
+typedef void (GLAD_API_PTR *PFNGLINDEXPOINTERPROC)(GLenum type, GLsizei stride, const void * pointer);
+typedef void (GLAD_API_PTR *PFNGLINDEXDPROC)(GLdouble c);
+typedef void (GLAD_API_PTR *PFNGLINDEXDVPROC)(const GLdouble * c);
+typedef void (GLAD_API_PTR *PFNGLINDEXFPROC)(GLfloat c);
+typedef void (GLAD_API_PTR *PFNGLINDEXFVPROC)(const GLfloat * c);
+typedef void (GLAD_API_PTR *PFNGLINDEXIPROC)(GLint c);
+typedef void (GLAD_API_PTR *PFNGLINDEXIVPROC)(const GLint * c);
+typedef void (GLAD_API_PTR *PFNGLINDEXSPROC)(GLshort c);
+typedef void (GLAD_API_PTR *PFNGLINDEXSVPROC)(const GLshort * c);
+typedef void (GLAD_API_PTR *PFNGLINDEXUBPROC)(GLubyte c);
+typedef void (GLAD_API_PTR *PFNGLINDEXUBVPROC)(const GLubyte * c);
+typedef void (GLAD_API_PTR *PFNGLINITNAMESPROC)(void);
+typedef void (GLAD_API_PTR *PFNGLINTERLEAVEDARRAYSPROC)(GLenum format, GLsizei stride, const void * pointer);
+typedef GLboolean (GLAD_API_PTR *PFNGLISBUFFERPROC)(GLuint buffer);
+typedef GLboolean (GLAD_API_PTR *PFNGLISENABLEDPROC)(GLenum cap);
+typedef GLboolean (GLAD_API_PTR *PFNGLISENABLEDIPROC)(GLenum target, GLuint index);
+typedef GLboolean (GLAD_API_PTR *PFNGLISFRAMEBUFFERPROC)(GLuint framebuffer);
+typedef GLboolean (GLAD_API_PTR *PFNGLISFRAMEBUFFEREXTPROC)(GLuint framebuffer);
+typedef GLboolean (GLAD_API_PTR *PFNGLISLISTPROC)(GLuint list);
+typedef GLboolean (GLAD_API_PTR *PFNGLISPROGRAMPROC)(GLuint program);
+typedef GLboolean (GLAD_API_PTR *PFNGLISQUERYPROC)(GLuint id);
+typedef GLboolean (GLAD_API_PTR *PFNGLISRENDERBUFFERPROC)(GLuint renderbuffer);
+typedef GLboolean (GLAD_API_PTR *PFNGLISRENDERBUFFEREXTPROC)(GLuint renderbuffer);
+typedef GLboolean (GLAD_API_PTR *PFNGLISSAMPLERPROC)(GLuint sampler);
+typedef GLboolean (GLAD_API_PTR *PFNGLISSHADERPROC)(GLuint shader);
+typedef GLboolean (GLAD_API_PTR *PFNGLISSYNCPROC)(GLsync sync);
+typedef GLboolean (GLAD_API_PTR *PFNGLISTEXTUREPROC)(GLuint texture);
+typedef GLboolean (GLAD_API_PTR *PFNGLISVERTEXARRAYPROC)(GLuint array);
+typedef void (GLAD_API_PTR *PFNGLLIGHTMODELFPROC)(GLenum pname, GLfloat param);
+typedef void (GLAD_API_PTR *PFNGLLIGHTMODELFVPROC)(GLenum pname, const GLfloat * params);
+typedef void (GLAD_API_PTR *PFNGLLIGHTMODELIPROC)(GLenum pname, GLint param);
+typedef void (GLAD_API_PTR *PFNGLLIGHTMODELIVPROC)(GLenum pname, const GLint * params);
+typedef void (GLAD_API_PTR *PFNGLLIGHTFPROC)(GLenum light, GLenum pname, GLfloat param);
+typedef void (GLAD_API_PTR *PFNGLLIGHTFVPROC)(GLenum light, GLenum pname, const GLfloat * params);
+typedef void (GLAD_API_PTR *PFNGLLIGHTIPROC)(GLenum light, GLenum pname, GLint param);
+typedef void (GLAD_API_PTR *PFNGLLIGHTIVPROC)(GLenum light, GLenum pname, const GLint * params);
+typedef void (GLAD_API_PTR *PFNGLLINESTIPPLEPROC)(GLint factor, GLushort pattern);
+typedef void (GLAD_API_PTR *PFNGLLINEWIDTHPROC)(GLfloat width);
+typedef void (GLAD_API_PTR *PFNGLLINKPROGRAMPROC)(GLuint program);
+typedef void (GLAD_API_PTR *PFNGLLISTBASEPROC)(GLuint base);
+typedef void (GLAD_API_PTR *PFNGLLOADIDENTITYPROC)(void);
+typedef void (GLAD_API_PTR *PFNGLLOADMATRIXDPROC)(const GLdouble * m);
+typedef void (GLAD_API_PTR *PFNGLLOADMATRIXFPROC)(const GLfloat * m);
+typedef void (GLAD_API_PTR *PFNGLLOADNAMEPROC)(GLuint name);
+typedef void (GLAD_API_PTR *PFNGLLOADTRANSPOSEMATRIXDPROC)(const GLdouble * m);
+typedef void (GLAD_API_PTR *PFNGLLOADTRANSPOSEMATRIXFPROC)(const GLfloat * m);
+typedef void (GLAD_API_PTR *PFNGLLOGICOPPROC)(GLenum opcode);
+typedef void (GLAD_API_PTR *PFNGLMAP1DPROC)(GLenum target, GLdouble u1, GLdouble u2, GLint stride, GLint order, const GLdouble * points);
+typedef void (GLAD_API_PTR *PFNGLMAP1FPROC)(GLenum target, GLfloat u1, GLfloat u2, GLint stride, GLint order, const GLfloat * points);
+typedef void (GLAD_API_PTR *PFNGLMAP2DPROC)(GLenum target, GLdouble u1, GLdouble u2, GLint ustride, GLint uorder, GLdouble v1, GLdouble v2, GLint vstride, GLint vorder, const GLdouble * points);
+typedef void (GLAD_API_PTR *PFNGLMAP2FPROC)(GLenum target, GLfloat u1, GLfloat u2, GLint ustride, GLint uorder, GLfloat v1, GLfloat v2, GLint vstride, GLint vorder, const GLfloat * points);
+typedef void * (GLAD_API_PTR *PFNGLMAPBUFFERPROC)(GLenum target, GLenum access);
+typedef void * (GLAD_API_PTR *PFNGLMAPBUFFERRANGEPROC)(GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access);
+typedef void (GLAD_API_PTR *PFNGLMAPGRID1DPROC)(GLint un, GLdouble u1, GLdouble u2);
+typedef void (GLAD_API_PTR *PFNGLMAPGRID1FPROC)(GLint un, GLfloat u1, GLfloat u2);
+typedef void (GLAD_API_PTR *PFNGLMAPGRID2DPROC)(GLint un, GLdouble u1, GLdouble u2, GLint vn, GLdouble v1, GLdouble v2);
+typedef void (GLAD_API_PTR *PFNGLMAPGRID2FPROC)(GLint un, GLfloat u1, GLfloat u2, GLint vn, GLfloat v1, GLfloat v2);
+typedef void (GLAD_API_PTR *PFNGLMATERIALFPROC)(GLenum face, GLenum pname, GLfloat param);
+typedef void (GLAD_API_PTR *PFNGLMATERIALFVPROC)(GLenum face, GLenum pname, const GLfloat * params);
+typedef void (GLAD_API_PTR *PFNGLMATERIALIPROC)(GLenum face, GLenum pname, GLint param);
+typedef void (GLAD_API_PTR *PFNGLMATERIALIVPROC)(GLenum face, GLenum pname, const GLint * params);
+typedef void (GLAD_API_PTR *PFNGLMATRIXMODEPROC)(GLenum mode);
+typedef void (GLAD_API_PTR *PFNGLMULTMATRIXDPROC)(const GLdouble * m);
+typedef void (GLAD_API_PTR *PFNGLMULTMATRIXFPROC)(const GLfloat * m);
+typedef void (GLAD_API_PTR *PFNGLMULTTRANSPOSEMATRIXDPROC)(const GLdouble * m);
+typedef void (GLAD_API_PTR *PFNGLMULTTRANSPOSEMATRIXFPROC)(const GLfloat * m);
+typedef void (GLAD_API_PTR *PFNGLMULTIDRAWARRAYSPROC)(GLenum mode, const GLint * first, const GLsizei * count, GLsizei drawcount);
+typedef void (GLAD_API_PTR *PFNGLMULTIDRAWELEMENTSPROC)(GLenum mode, const GLsizei * count, GLenum type, const void *const* indices, GLsizei drawcount);
+typedef void (GLAD_API_PTR *PFNGLMULTIDRAWELEMENTSBASEVERTEXPROC)(GLenum mode, const GLsizei * count, GLenum type, const void *const* indices, GLsizei drawcount, const GLint * basevertex);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD1DPROC)(GLenum target, GLdouble s);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD1DVPROC)(GLenum target, const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD1FPROC)(GLenum target, GLfloat s);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD1FVPROC)(GLenum target, const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD1IPROC)(GLenum target, GLint s);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD1IVPROC)(GLenum target, const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD1SPROC)(GLenum target, GLshort s);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD1SVPROC)(GLenum target, const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD2DPROC)(GLenum target, GLdouble s, GLdouble t);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD2DVPROC)(GLenum target, const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD2FPROC)(GLenum target, GLfloat s, GLfloat t);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD2FVPROC)(GLenum target, const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD2IPROC)(GLenum target, GLint s, GLint t);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD2IVPROC)(GLenum target, const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD2SPROC)(GLenum target, GLshort s, GLshort t);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD2SVPROC)(GLenum target, const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD3DPROC)(GLenum target, GLdouble s, GLdouble t, GLdouble r);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD3DVPROC)(GLenum target, const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD3FPROC)(GLenum target, GLfloat s, GLfloat t, GLfloat r);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD3FVPROC)(GLenum target, const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD3IPROC)(GLenum target, GLint s, GLint t, GLint r);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD3IVPROC)(GLenum target, const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD3SPROC)(GLenum target, GLshort s, GLshort t, GLshort r);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD3SVPROC)(GLenum target, const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD4DPROC)(GLenum target, GLdouble s, GLdouble t, GLdouble r, GLdouble q);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD4DVPROC)(GLenum target, const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD4FPROC)(GLenum target, GLfloat s, GLfloat t, GLfloat r, GLfloat q);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD4FVPROC)(GLenum target, const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD4IPROC)(GLenum target, GLint s, GLint t, GLint r, GLint q);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD4IVPROC)(GLenum target, const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD4SPROC)(GLenum target, GLshort s, GLshort t, GLshort r, GLshort q);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORD4SVPROC)(GLenum target, const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORDP1UIPROC)(GLenum texture, GLenum type, GLuint coords);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORDP1UIVPROC)(GLenum texture, GLenum type, const GLuint * coords);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORDP2UIPROC)(GLenum texture, GLenum type, GLuint coords);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORDP2UIVPROC)(GLenum texture, GLenum type, const GLuint * coords);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORDP3UIPROC)(GLenum texture, GLenum type, GLuint coords);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORDP3UIVPROC)(GLenum texture, GLenum type, const GLuint * coords);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORDP4UIPROC)(GLenum texture, GLenum type, GLuint coords);
+typedef void (GLAD_API_PTR *PFNGLMULTITEXCOORDP4UIVPROC)(GLenum texture, GLenum type, const GLuint * coords);
+typedef void (GLAD_API_PTR *PFNGLNEWLISTPROC)(GLuint list, GLenum mode);
+typedef void (GLAD_API_PTR *PFNGLNORMAL3BPROC)(GLbyte nx, GLbyte ny, GLbyte nz);
+typedef void (GLAD_API_PTR *PFNGLNORMAL3BVPROC)(const GLbyte * v);
+typedef void (GLAD_API_PTR *PFNGLNORMAL3DPROC)(GLdouble nx, GLdouble ny, GLdouble nz);
+typedef void (GLAD_API_PTR *PFNGLNORMAL3DVPROC)(const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLNORMAL3FPROC)(GLfloat nx, GLfloat ny, GLfloat nz);
+typedef void (GLAD_API_PTR *PFNGLNORMAL3FVPROC)(const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLNORMAL3IPROC)(GLint nx, GLint ny, GLint nz);
+typedef void (GLAD_API_PTR *PFNGLNORMAL3IVPROC)(const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLNORMAL3SPROC)(GLshort nx, GLshort ny, GLshort nz);
+typedef void (GLAD_API_PTR *PFNGLNORMAL3SVPROC)(const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLNORMALP3UIPROC)(GLenum type, GLuint coords);
+typedef void (GLAD_API_PTR *PFNGLNORMALP3UIVPROC)(GLenum type, const GLuint * coords);
+typedef void (GLAD_API_PTR *PFNGLNORMALPOINTERPROC)(GLenum type, GLsizei stride, const void * pointer);
+typedef void (GLAD_API_PTR *PFNGLORTHOPROC)(GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar);
+typedef void (GLAD_API_PTR *PFNGLPASSTHROUGHPROC)(GLfloat token);
+typedef void (GLAD_API_PTR *PFNGLPIXELMAPFVPROC)(GLenum map, GLsizei mapsize, const GLfloat * values);
+typedef void (GLAD_API_PTR *PFNGLPIXELMAPUIVPROC)(GLenum map, GLsizei mapsize, const GLuint * values);
+typedef void (GLAD_API_PTR *PFNGLPIXELMAPUSVPROC)(GLenum map, GLsizei mapsize, const GLushort * values);
+typedef void (GLAD_API_PTR *PFNGLPIXELSTOREFPROC)(GLenum pname, GLfloat param);
+typedef void (GLAD_API_PTR *PFNGLPIXELSTOREIPROC)(GLenum pname, GLint param);
+typedef void (GLAD_API_PTR *PFNGLPIXELTRANSFERFPROC)(GLenum pname, GLfloat param);
+typedef void (GLAD_API_PTR *PFNGLPIXELTRANSFERIPROC)(GLenum pname, GLint param);
+typedef void (GLAD_API_PTR *PFNGLPIXELZOOMPROC)(GLfloat xfactor, GLfloat yfactor);
+typedef void (GLAD_API_PTR *PFNGLPOINTPARAMETERFPROC)(GLenum pname, GLfloat param);
+typedef void (GLAD_API_PTR *PFNGLPOINTPARAMETERFVPROC)(GLenum pname, const GLfloat * params);
+typedef void (GLAD_API_PTR *PFNGLPOINTPARAMETERIPROC)(GLenum pname, GLint param);
+typedef void (GLAD_API_PTR *PFNGLPOINTPARAMETERIVPROC)(GLenum pname, const GLint * params);
+typedef void (GLAD_API_PTR *PFNGLPOINTSIZEPROC)(GLfloat size);
+typedef void (GLAD_API_PTR *PFNGLPOLYGONMODEPROC)(GLenum face, GLenum mode);
+typedef void (GLAD_API_PTR *PFNGLPOLYGONOFFSETPROC)(GLfloat factor, GLfloat units);
+typedef void (GLAD_API_PTR *PFNGLPOLYGONSTIPPLEPROC)(const GLubyte * mask);
+typedef void (GLAD_API_PTR *PFNGLPOPATTRIBPROC)(void);
+typedef void (GLAD_API_PTR *PFNGLPOPCLIENTATTRIBPROC)(void);
+typedef void (GLAD_API_PTR *PFNGLPOPMATRIXPROC)(void);
+typedef void (GLAD_API_PTR *PFNGLPOPNAMEPROC)(void);
+typedef void (GLAD_API_PTR *PFNGLPRIMITIVERESTARTINDEXPROC)(GLuint index);
+typedef void (GLAD_API_PTR *PFNGLPRIORITIZETEXTURESPROC)(GLsizei n, const GLuint * textures, const GLfloat * priorities);
+typedef void (GLAD_API_PTR *PFNGLPROVOKINGVERTEXPROC)(GLenum mode);
+typedef void (GLAD_API_PTR *PFNGLPUSHATTRIBPROC)(GLbitfield mask);
+typedef void (GLAD_API_PTR *PFNGLPUSHCLIENTATTRIBPROC)(GLbitfield mask);
+typedef void (GLAD_API_PTR *PFNGLPUSHMATRIXPROC)(void);
+typedef void (GLAD_API_PTR *PFNGLPUSHNAMEPROC)(GLuint name);
+typedef void (GLAD_API_PTR *PFNGLQUERYCOUNTERPROC)(GLuint id, GLenum target);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS2DPROC)(GLdouble x, GLdouble y);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS2DVPROC)(const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS2FPROC)(GLfloat x, GLfloat y);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS2FVPROC)(const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS2IPROC)(GLint x, GLint y);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS2IVPROC)(const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS2SPROC)(GLshort x, GLshort y);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS2SVPROC)(const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS3DPROC)(GLdouble x, GLdouble y, GLdouble z);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS3DVPROC)(const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS3FPROC)(GLfloat x, GLfloat y, GLfloat z);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS3FVPROC)(const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS3IPROC)(GLint x, GLint y, GLint z);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS3IVPROC)(const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS3SPROC)(GLshort x, GLshort y, GLshort z);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS3SVPROC)(const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS4DPROC)(GLdouble x, GLdouble y, GLdouble z, GLdouble w);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS4DVPROC)(const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS4FPROC)(GLfloat x, GLfloat y, GLfloat z, GLfloat w);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS4FVPROC)(const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS4IPROC)(GLint x, GLint y, GLint z, GLint w);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS4IVPROC)(const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS4SPROC)(GLshort x, GLshort y, GLshort z, GLshort w);
+typedef void (GLAD_API_PTR *PFNGLRASTERPOS4SVPROC)(const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLREADBUFFERPROC)(GLenum src);
+typedef void (GLAD_API_PTR *PFNGLREADPIXELSPROC)(GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void * pixels);
+typedef void (GLAD_API_PTR *PFNGLRECTDPROC)(GLdouble x1, GLdouble y1, GLdouble x2, GLdouble y2);
+typedef void (GLAD_API_PTR *PFNGLRECTDVPROC)(const GLdouble * v1, const GLdouble * v2);
+typedef void (GLAD_API_PTR *PFNGLRECTFPROC)(GLfloat x1, GLfloat y1, GLfloat x2, GLfloat y2);
+typedef void (GLAD_API_PTR *PFNGLRECTFVPROC)(const GLfloat * v1, const GLfloat * v2);
+typedef void (GLAD_API_PTR *PFNGLRECTIPROC)(GLint x1, GLint y1, GLint x2, GLint y2);
+typedef void (GLAD_API_PTR *PFNGLRECTIVPROC)(const GLint * v1, const GLint * v2);
+typedef void (GLAD_API_PTR *PFNGLRECTSPROC)(GLshort x1, GLshort y1, GLshort x2, GLshort y2);
+typedef void (GLAD_API_PTR *PFNGLRECTSVPROC)(const GLshort * v1, const GLshort * v2);
+typedef GLint (GLAD_API_PTR *PFNGLRENDERMODEPROC)(GLenum mode);
+typedef void (GLAD_API_PTR *PFNGLRENDERBUFFERSTORAGEPROC)(GLenum target, GLenum internalformat, GLsizei width, GLsizei height);
+typedef void (GLAD_API_PTR *PFNGLRENDERBUFFERSTORAGEEXTPROC)(GLenum target, GLenum internalformat, GLsizei width, GLsizei height);
+typedef void (GLAD_API_PTR *PFNGLRENDERBUFFERSTORAGEMULTISAMPLEPROC)(GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height);
+typedef void (GLAD_API_PTR *PFNGLRENDERBUFFERSTORAGEMULTISAMPLEEXTPROC)(GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height);
+typedef void (GLAD_API_PTR *PFNGLROTATEDPROC)(GLdouble angle, GLdouble x, GLdouble y, GLdouble z);
+typedef void (GLAD_API_PTR *PFNGLROTATEFPROC)(GLfloat angle, GLfloat x, GLfloat y, GLfloat z);
+typedef void (GLAD_API_PTR *PFNGLSAMPLECOVERAGEPROC)(GLfloat value, GLboolean invert);
+typedef void (GLAD_API_PTR *PFNGLSAMPLEMASKIPROC)(GLuint maskNumber, GLbitfield mask);
+typedef void (GLAD_API_PTR *PFNGLSAMPLERPARAMETERIIVPROC)(GLuint sampler, GLenum pname, const GLint * param);
+typedef void (GLAD_API_PTR *PFNGLSAMPLERPARAMETERIUIVPROC)(GLuint sampler, GLenum pname, const GLuint * param);
+typedef void (GLAD_API_PTR *PFNGLSAMPLERPARAMETERFPROC)(GLuint sampler, GLenum pname, GLfloat param);
+typedef void (GLAD_API_PTR *PFNGLSAMPLERPARAMETERFVPROC)(GLuint sampler, GLenum pname, const GLfloat * param);
+typedef void (GLAD_API_PTR *PFNGLSAMPLERPARAMETERIPROC)(GLuint sampler, GLenum pname, GLint param);
+typedef void (GLAD_API_PTR *PFNGLSAMPLERPARAMETERIVPROC)(GLuint sampler, GLenum pname, const GLint * param);
+typedef void (GLAD_API_PTR *PFNGLSCALEDPROC)(GLdouble x, GLdouble y, GLdouble z);
+typedef void (GLAD_API_PTR *PFNGLSCALEFPROC)(GLfloat x, GLfloat y, GLfloat z);
+typedef void (GLAD_API_PTR *PFNGLSCISSORPROC)(GLint x, GLint y, GLsizei width, GLsizei height);
+typedef void (GLAD_API_PTR *PFNGLSECONDARYCOLOR3BPROC)(GLbyte red, GLbyte green, GLbyte blue);
+typedef void (GLAD_API_PTR *PFNGLSECONDARYCOLOR3BVPROC)(const GLbyte * v);
+typedef void (GLAD_API_PTR *PFNGLSECONDARYCOLOR3DPROC)(GLdouble red, GLdouble green, GLdouble blue);
+typedef void (GLAD_API_PTR *PFNGLSECONDARYCOLOR3DVPROC)(const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLSECONDARYCOLOR3FPROC)(GLfloat red, GLfloat green, GLfloat blue);
+typedef void (GLAD_API_PTR *PFNGLSECONDARYCOLOR3FVPROC)(const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLSECONDARYCOLOR3IPROC)(GLint red, GLint green, GLint blue);
+typedef void (GLAD_API_PTR *PFNGLSECONDARYCOLOR3IVPROC)(const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLSECONDARYCOLOR3SPROC)(GLshort red, GLshort green, GLshort blue);
+typedef void (GLAD_API_PTR *PFNGLSECONDARYCOLOR3SVPROC)(const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLSECONDARYCOLOR3UBPROC)(GLubyte red, GLubyte green, GLubyte blue);
+typedef void (GLAD_API_PTR *PFNGLSECONDARYCOLOR3UBVPROC)(const GLubyte * v);
+typedef void (GLAD_API_PTR *PFNGLSECONDARYCOLOR3UIPROC)(GLuint red, GLuint green, GLuint blue);
+typedef void (GLAD_API_PTR *PFNGLSECONDARYCOLOR3UIVPROC)(const GLuint * v);
+typedef void (GLAD_API_PTR *PFNGLSECONDARYCOLOR3USPROC)(GLushort red, GLushort green, GLushort blue);
+typedef void (GLAD_API_PTR *PFNGLSECONDARYCOLOR3USVPROC)(const GLushort * v);
+typedef void (GLAD_API_PTR *PFNGLSECONDARYCOLORP3UIPROC)(GLenum type, GLuint color);
+typedef void (GLAD_API_PTR *PFNGLSECONDARYCOLORP3UIVPROC)(GLenum type, const GLuint * color);
+typedef void (GLAD_API_PTR *PFNGLSECONDARYCOLORPOINTERPROC)(GLint size, GLenum type, GLsizei stride, const void * pointer);
+typedef void (GLAD_API_PTR *PFNGLSELECTBUFFERPROC)(GLsizei size, GLuint * buffer);
+typedef void (GLAD_API_PTR *PFNGLSHADEMODELPROC)(GLenum mode);
+typedef void (GLAD_API_PTR *PFNGLSHADERSOURCEPROC)(GLuint shader, GLsizei count, const GLchar *const* string, const GLint * length);
+typedef void (GLAD_API_PTR *PFNGLSTENCILFUNCPROC)(GLenum func, GLint ref, GLuint mask);
+typedef void (GLAD_API_PTR *PFNGLSTENCILFUNCSEPARATEPROC)(GLenum face, GLenum func, GLint ref, GLuint mask);
+typedef void (GLAD_API_PTR *PFNGLSTENCILMASKPROC)(GLuint mask);
+typedef void (GLAD_API_PTR *PFNGLSTENCILMASKSEPARATEPROC)(GLenum face, GLuint mask);
+typedef void (GLAD_API_PTR *PFNGLSTENCILOPPROC)(GLenum fail, GLenum zfail, GLenum zpass);
+typedef void (GLAD_API_PTR *PFNGLSTENCILOPSEPARATEPROC)(GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass);
+typedef void (GLAD_API_PTR *PFNGLTEXBUFFERPROC)(GLenum target, GLenum internalformat, GLuint buffer);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD1DPROC)(GLdouble s);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD1DVPROC)(const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD1FPROC)(GLfloat s);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD1FVPROC)(const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD1IPROC)(GLint s);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD1IVPROC)(const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD1SPROC)(GLshort s);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD1SVPROC)(const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD2DPROC)(GLdouble s, GLdouble t);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD2DVPROC)(const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD2FPROC)(GLfloat s, GLfloat t);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD2FVPROC)(const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD2IPROC)(GLint s, GLint t);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD2IVPROC)(const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD2SPROC)(GLshort s, GLshort t);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD2SVPROC)(const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD3DPROC)(GLdouble s, GLdouble t, GLdouble r);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD3DVPROC)(const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD3FPROC)(GLfloat s, GLfloat t, GLfloat r);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD3FVPROC)(const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD3IPROC)(GLint s, GLint t, GLint r);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD3IVPROC)(const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD3SPROC)(GLshort s, GLshort t, GLshort r);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD3SVPROC)(const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD4DPROC)(GLdouble s, GLdouble t, GLdouble r, GLdouble q);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD4DVPROC)(const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD4FPROC)(GLfloat s, GLfloat t, GLfloat r, GLfloat q);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD4FVPROC)(const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD4IPROC)(GLint s, GLint t, GLint r, GLint q);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD4IVPROC)(const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD4SPROC)(GLshort s, GLshort t, GLshort r, GLshort q);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORD4SVPROC)(const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORDP1UIPROC)(GLenum type, GLuint coords);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORDP1UIVPROC)(GLenum type, const GLuint * coords);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORDP2UIPROC)(GLenum type, GLuint coords);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORDP2UIVPROC)(GLenum type, const GLuint * coords);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORDP3UIPROC)(GLenum type, GLuint coords);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORDP3UIVPROC)(GLenum type, const GLuint * coords);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORDP4UIPROC)(GLenum type, GLuint coords);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORDP4UIVPROC)(GLenum type, const GLuint * coords);
+typedef void (GLAD_API_PTR *PFNGLTEXCOORDPOINTERPROC)(GLint size, GLenum type, GLsizei stride, const void * pointer);
+typedef void (GLAD_API_PTR *PFNGLTEXENVFPROC)(GLenum target, GLenum pname, GLfloat param);
+typedef void (GLAD_API_PTR *PFNGLTEXENVFVPROC)(GLenum target, GLenum pname, const GLfloat * params);
+typedef void (GLAD_API_PTR *PFNGLTEXENVIPROC)(GLenum target, GLenum pname, GLint param);
+typedef void (GLAD_API_PTR *PFNGLTEXENVIVPROC)(GLenum target, GLenum pname, const GLint * params);
+typedef void (GLAD_API_PTR *PFNGLTEXGENDPROC)(GLenum coord, GLenum pname, GLdouble param);
+typedef void (GLAD_API_PTR *PFNGLTEXGENDVPROC)(GLenum coord, GLenum pname, const GLdouble * params);
+typedef void (GLAD_API_PTR *PFNGLTEXGENFPROC)(GLenum coord, GLenum pname, GLfloat param);
+typedef void (GLAD_API_PTR *PFNGLTEXGENFVPROC)(GLenum coord, GLenum pname, const GLfloat * params);
+typedef void (GLAD_API_PTR *PFNGLTEXGENIPROC)(GLenum coord, GLenum pname, GLint param);
+typedef void (GLAD_API_PTR *PFNGLTEXGENIVPROC)(GLenum coord, GLenum pname, const GLint * params);
+typedef void (GLAD_API_PTR *PFNGLTEXIMAGE1DPROC)(GLenum target, GLint level, GLint internalformat, GLsizei width, GLint border, GLenum format, GLenum type, const void * pixels);
+typedef void (GLAD_API_PTR *PFNGLTEXIMAGE2DPROC)(GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void * pixels);
+typedef void (GLAD_API_PTR *PFNGLTEXIMAGE2DMULTISAMPLEPROC)(GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations);
+typedef void (GLAD_API_PTR *PFNGLTEXIMAGE3DPROC)(GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void * pixels);
+typedef void (GLAD_API_PTR *PFNGLTEXIMAGE3DMULTISAMPLEPROC)(GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations);
+typedef void (GLAD_API_PTR *PFNGLTEXPARAMETERIIVPROC)(GLenum target, GLenum pname, const GLint * params);
+typedef void (GLAD_API_PTR *PFNGLTEXPARAMETERIUIVPROC)(GLenum target, GLenum pname, const GLuint * params);
+typedef void (GLAD_API_PTR *PFNGLTEXPARAMETERFPROC)(GLenum target, GLenum pname, GLfloat param);
+typedef void (GLAD_API_PTR *PFNGLTEXPARAMETERFVPROC)(GLenum target, GLenum pname, const GLfloat * params);
+typedef void (GLAD_API_PTR *PFNGLTEXPARAMETERIPROC)(GLenum target, GLenum pname, GLint param);
+typedef void (GLAD_API_PTR *PFNGLTEXPARAMETERIVPROC)(GLenum target, GLenum pname, const GLint * params);
+typedef void (GLAD_API_PTR *PFNGLTEXSUBIMAGE1DPROC)(GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLenum type, const void * pixels);
+typedef void (GLAD_API_PTR *PFNGLTEXSUBIMAGE2DPROC)(GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void * pixels);
+typedef void (GLAD_API_PTR *PFNGLTEXSUBIMAGE3DPROC)(GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void * pixels);
+typedef void (GLAD_API_PTR *PFNGLTRANSFORMFEEDBACKVARYINGSPROC)(GLuint program, GLsizei count, const GLchar *const* varyings, GLenum bufferMode);
+typedef void (GLAD_API_PTR *PFNGLTRANSLATEDPROC)(GLdouble x, GLdouble y, GLdouble z);
+typedef void (GLAD_API_PTR *PFNGLTRANSLATEFPROC)(GLfloat x, GLfloat y, GLfloat z);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM1FPROC)(GLint location, GLfloat v0);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM1FVPROC)(GLint location, GLsizei count, const GLfloat * value);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM1IPROC)(GLint location, GLint v0);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM1IVPROC)(GLint location, GLsizei count, const GLint * value);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM1UIPROC)(GLint location, GLuint v0);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM1UIVPROC)(GLint location, GLsizei count, const GLuint * value);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM2FPROC)(GLint location, GLfloat v0, GLfloat v1);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM2FVPROC)(GLint location, GLsizei count, const GLfloat * value);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM2IPROC)(GLint location, GLint v0, GLint v1);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM2IVPROC)(GLint location, GLsizei count, const GLint * value);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM2UIPROC)(GLint location, GLuint v0, GLuint v1);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM2UIVPROC)(GLint location, GLsizei count, const GLuint * value);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM3FPROC)(GLint location, GLfloat v0, GLfloat v1, GLfloat v2);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM3FVPROC)(GLint location, GLsizei count, const GLfloat * value);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM3IPROC)(GLint location, GLint v0, GLint v1, GLint v2);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM3IVPROC)(GLint location, GLsizei count, const GLint * value);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM3UIPROC)(GLint location, GLuint v0, GLuint v1, GLuint v2);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM3UIVPROC)(GLint location, GLsizei count, const GLuint * value);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM4FPROC)(GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM4FVPROC)(GLint location, GLsizei count, const GLfloat * value);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM4IPROC)(GLint location, GLint v0, GLint v1, GLint v2, GLint v3);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM4IVPROC)(GLint location, GLsizei count, const GLint * value);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM4UIPROC)(GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3);
+typedef void (GLAD_API_PTR *PFNGLUNIFORM4UIVPROC)(GLint location, GLsizei count, const GLuint * value);
+typedef void (GLAD_API_PTR *PFNGLUNIFORMBLOCKBINDINGPROC)(GLuint program, GLuint uniformBlockIndex, GLuint uniformBlockBinding);
+typedef void (GLAD_API_PTR *PFNGLUNIFORMMATRIX2FVPROC)(GLint location, GLsizei count, GLboolean transpose, const GLfloat * value);
+typedef void (GLAD_API_PTR *PFNGLUNIFORMMATRIX2X3FVPROC)(GLint location, GLsizei count, GLboolean transpose, const GLfloat * value);
+typedef void (GLAD_API_PTR *PFNGLUNIFORMMATRIX2X4FVPROC)(GLint location, GLsizei count, GLboolean transpose, const GLfloat * value);
+typedef void (GLAD_API_PTR *PFNGLUNIFORMMATRIX3FVPROC)(GLint location, GLsizei count, GLboolean transpose, const GLfloat * value);
+typedef void (GLAD_API_PTR *PFNGLUNIFORMMATRIX3X2FVPROC)(GLint location, GLsizei count, GLboolean transpose, const GLfloat * value);
+typedef void (GLAD_API_PTR *PFNGLUNIFORMMATRIX3X4FVPROC)(GLint location, GLsizei count, GLboolean transpose, const GLfloat * value);
+typedef void (GLAD_API_PTR *PFNGLUNIFORMMATRIX4FVPROC)(GLint location, GLsizei count, GLboolean transpose, const GLfloat * value);
+typedef void (GLAD_API_PTR *PFNGLUNIFORMMATRIX4X2FVPROC)(GLint location, GLsizei count, GLboolean transpose, const GLfloat * value);
+typedef void (GLAD_API_PTR *PFNGLUNIFORMMATRIX4X3FVPROC)(GLint location, GLsizei count, GLboolean transpose, const GLfloat * value);
+typedef GLboolean (GLAD_API_PTR *PFNGLUNMAPBUFFERPROC)(GLenum target);
+typedef void (GLAD_API_PTR *PFNGLUSEPROGRAMPROC)(GLuint program);
+typedef void (GLAD_API_PTR *PFNGLVALIDATEPROGRAMPROC)(GLuint program);
+typedef void (GLAD_API_PTR *PFNGLVERTEX2DPROC)(GLdouble x, GLdouble y);
+typedef void (GLAD_API_PTR *PFNGLVERTEX2DVPROC)(const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEX2FPROC)(GLfloat x, GLfloat y);
+typedef void (GLAD_API_PTR *PFNGLVERTEX2FVPROC)(const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEX2IPROC)(GLint x, GLint y);
+typedef void (GLAD_API_PTR *PFNGLVERTEX2IVPROC)(const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEX2SPROC)(GLshort x, GLshort y);
+typedef void (GLAD_API_PTR *PFNGLVERTEX2SVPROC)(const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEX3DPROC)(GLdouble x, GLdouble y, GLdouble z);
+typedef void (GLAD_API_PTR *PFNGLVERTEX3DVPROC)(const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEX3FPROC)(GLfloat x, GLfloat y, GLfloat z);
+typedef void (GLAD_API_PTR *PFNGLVERTEX3FVPROC)(const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEX3IPROC)(GLint x, GLint y, GLint z);
+typedef void (GLAD_API_PTR *PFNGLVERTEX3IVPROC)(const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEX3SPROC)(GLshort x, GLshort y, GLshort z);
+typedef void (GLAD_API_PTR *PFNGLVERTEX3SVPROC)(const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEX4DPROC)(GLdouble x, GLdouble y, GLdouble z, GLdouble w);
+typedef void (GLAD_API_PTR *PFNGLVERTEX4DVPROC)(const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEX4FPROC)(GLfloat x, GLfloat y, GLfloat z, GLfloat w);
+typedef void (GLAD_API_PTR *PFNGLVERTEX4FVPROC)(const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEX4IPROC)(GLint x, GLint y, GLint z, GLint w);
+typedef void (GLAD_API_PTR *PFNGLVERTEX4IVPROC)(const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEX4SPROC)(GLshort x, GLshort y, GLshort z, GLshort w);
+typedef void (GLAD_API_PTR *PFNGLVERTEX4SVPROC)(const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB1DPROC)(GLuint index, GLdouble x);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB1DVPROC)(GLuint index, const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB1FPROC)(GLuint index, GLfloat x);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB1FVPROC)(GLuint index, const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB1SPROC)(GLuint index, GLshort x);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB1SVPROC)(GLuint index, const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB2DPROC)(GLuint index, GLdouble x, GLdouble y);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB2DVPROC)(GLuint index, const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB2FPROC)(GLuint index, GLfloat x, GLfloat y);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB2FVPROC)(GLuint index, const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB2SPROC)(GLuint index, GLshort x, GLshort y);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB2SVPROC)(GLuint index, const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB3DPROC)(GLuint index, GLdouble x, GLdouble y, GLdouble z);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB3DVPROC)(GLuint index, const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB3FPROC)(GLuint index, GLfloat x, GLfloat y, GLfloat z);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB3FVPROC)(GLuint index, const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB3SPROC)(GLuint index, GLshort x, GLshort y, GLshort z);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB3SVPROC)(GLuint index, const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB4NBVPROC)(GLuint index, const GLbyte * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB4NIVPROC)(GLuint index, const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB4NSVPROC)(GLuint index, const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB4NUBPROC)(GLuint index, GLubyte x, GLubyte y, GLubyte z, GLubyte w);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB4NUBVPROC)(GLuint index, const GLubyte * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB4NUIVPROC)(GLuint index, const GLuint * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB4NUSVPROC)(GLuint index, const GLushort * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB4BVPROC)(GLuint index, const GLbyte * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB4DPROC)(GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB4DVPROC)(GLuint index, const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB4FPROC)(GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB4FVPROC)(GLuint index, const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB4IVPROC)(GLuint index, const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB4SPROC)(GLuint index, GLshort x, GLshort y, GLshort z, GLshort w);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB4SVPROC)(GLuint index, const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB4UBVPROC)(GLuint index, const GLubyte * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB4UIVPROC)(GLuint index, const GLuint * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIB4USVPROC)(GLuint index, const GLushort * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBDIVISORPROC)(GLuint index, GLuint divisor);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBI1IPROC)(GLuint index, GLint x);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBI1IVPROC)(GLuint index, const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBI1UIPROC)(GLuint index, GLuint x);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBI1UIVPROC)(GLuint index, const GLuint * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBI2IPROC)(GLuint index, GLint x, GLint y);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBI2IVPROC)(GLuint index, const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBI2UIPROC)(GLuint index, GLuint x, GLuint y);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBI2UIVPROC)(GLuint index, const GLuint * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBI3IPROC)(GLuint index, GLint x, GLint y, GLint z);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBI3IVPROC)(GLuint index, const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBI3UIPROC)(GLuint index, GLuint x, GLuint y, GLuint z);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBI3UIVPROC)(GLuint index, const GLuint * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBI4BVPROC)(GLuint index, const GLbyte * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBI4IPROC)(GLuint index, GLint x, GLint y, GLint z, GLint w);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBI4IVPROC)(GLuint index, const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBI4SVPROC)(GLuint index, const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBI4UBVPROC)(GLuint index, const GLubyte * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBI4UIPROC)(GLuint index, GLuint x, GLuint y, GLuint z, GLuint w);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBI4UIVPROC)(GLuint index, const GLuint * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBI4USVPROC)(GLuint index, const GLushort * v);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBIPOINTERPROC)(GLuint index, GLint size, GLenum type, GLsizei stride, const void * pointer);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBP1UIPROC)(GLuint index, GLenum type, GLboolean normalized, GLuint value);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBP1UIVPROC)(GLuint index, GLenum type, GLboolean normalized, const GLuint * value);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBP2UIPROC)(GLuint index, GLenum type, GLboolean normalized, GLuint value);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBP2UIVPROC)(GLuint index, GLenum type, GLboolean normalized, const GLuint * value);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBP3UIPROC)(GLuint index, GLenum type, GLboolean normalized, GLuint value);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBP3UIVPROC)(GLuint index, GLenum type, GLboolean normalized, const GLuint * value);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBP4UIPROC)(GLuint index, GLenum type, GLboolean normalized, GLuint value);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBP4UIVPROC)(GLuint index, GLenum type, GLboolean normalized, const GLuint * value);
+typedef void (GLAD_API_PTR *PFNGLVERTEXATTRIBPOINTERPROC)(GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void * pointer);
+typedef void (GLAD_API_PTR *PFNGLVERTEXP2UIPROC)(GLenum type, GLuint value);
+typedef void (GLAD_API_PTR *PFNGLVERTEXP2UIVPROC)(GLenum type, const GLuint * value);
+typedef void (GLAD_API_PTR *PFNGLVERTEXP3UIPROC)(GLenum type, GLuint value);
+typedef void (GLAD_API_PTR *PFNGLVERTEXP3UIVPROC)(GLenum type, const GLuint * value);
+typedef void (GLAD_API_PTR *PFNGLVERTEXP4UIPROC)(GLenum type, GLuint value);
+typedef void (GLAD_API_PTR *PFNGLVERTEXP4UIVPROC)(GLenum type, const GLuint * value);
+typedef void (GLAD_API_PTR *PFNGLVERTEXPOINTERPROC)(GLint size, GLenum type, GLsizei stride, const void * pointer);
+typedef void (GLAD_API_PTR *PFNGLVIEWPORTPROC)(GLint x, GLint y, GLsizei width, GLsizei height);
+typedef void (GLAD_API_PTR *PFNGLWAITSYNCPROC)(GLsync sync, GLbitfield flags, GLuint64 timeout);
+typedef void (GLAD_API_PTR *PFNGLWINDOWPOS2DPROC)(GLdouble x, GLdouble y);
+typedef void (GLAD_API_PTR *PFNGLWINDOWPOS2DVPROC)(const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLWINDOWPOS2FPROC)(GLfloat x, GLfloat y);
+typedef void (GLAD_API_PTR *PFNGLWINDOWPOS2FVPROC)(const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLWINDOWPOS2IPROC)(GLint x, GLint y);
+typedef void (GLAD_API_PTR *PFNGLWINDOWPOS2IVPROC)(const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLWINDOWPOS2SPROC)(GLshort x, GLshort y);
+typedef void (GLAD_API_PTR *PFNGLWINDOWPOS2SVPROC)(const GLshort * v);
+typedef void (GLAD_API_PTR *PFNGLWINDOWPOS3DPROC)(GLdouble x, GLdouble y, GLdouble z);
+typedef void (GLAD_API_PTR *PFNGLWINDOWPOS3DVPROC)(const GLdouble * v);
+typedef void (GLAD_API_PTR *PFNGLWINDOWPOS3FPROC)(GLfloat x, GLfloat y, GLfloat z);
+typedef void (GLAD_API_PTR *PFNGLWINDOWPOS3FVPROC)(const GLfloat * v);
+typedef void (GLAD_API_PTR *PFNGLWINDOWPOS3IPROC)(GLint x, GLint y, GLint z);
+typedef void (GLAD_API_PTR *PFNGLWINDOWPOS3IVPROC)(const GLint * v);
+typedef void (GLAD_API_PTR *PFNGLWINDOWPOS3SPROC)(GLshort x, GLshort y, GLshort z);
+typedef void (GLAD_API_PTR *PFNGLWINDOWPOS3SVPROC)(const GLshort * v);
+
+GLAD_API_CALL PFNGLACCUMPROC glad_glAccum;
+#define glAccum glad_glAccum
+GLAD_API_CALL PFNGLACTIVETEXTUREPROC glad_glActiveTexture;
+#define glActiveTexture glad_glActiveTexture
+GLAD_API_CALL PFNGLALPHAFUNCPROC glad_glAlphaFunc;
+#define glAlphaFunc glad_glAlphaFunc
+GLAD_API_CALL PFNGLARETEXTURESRESIDENTPROC glad_glAreTexturesResident;
+#define glAreTexturesResident glad_glAreTexturesResident
+GLAD_API_CALL PFNGLARRAYELEMENTPROC glad_glArrayElement;
+#define glArrayElement glad_glArrayElement
+GLAD_API_CALL PFNGLATTACHSHADERPROC glad_glAttachShader;
+#define glAttachShader glad_glAttachShader
+GLAD_API_CALL PFNGLBEGINPROC glad_glBegin;
+#define glBegin glad_glBegin
+GLAD_API_CALL PFNGLBEGINCONDITIONALRENDERPROC glad_glBeginConditionalRender;
+#define glBeginConditionalRender glad_glBeginConditionalRender
+GLAD_API_CALL PFNGLBEGINQUERYPROC glad_glBeginQuery;
+#define glBeginQuery glad_glBeginQuery
+GLAD_API_CALL PFNGLBEGINTRANSFORMFEEDBACKPROC glad_glBeginTransformFeedback;
+#define glBeginTransformFeedback glad_glBeginTransformFeedback
+GLAD_API_CALL PFNGLBINDATTRIBLOCATIONPROC glad_glBindAttribLocation;
+#define glBindAttribLocation glad_glBindAttribLocation
+GLAD_API_CALL PFNGLBINDBUFFERPROC glad_glBindBuffer;
+#define glBindBuffer glad_glBindBuffer
+GLAD_API_CALL PFNGLBINDBUFFERBASEPROC glad_glBindBufferBase;
+#define glBindBufferBase glad_glBindBufferBase
+GLAD_API_CALL PFNGLBINDBUFFERRANGEPROC glad_glBindBufferRange;
+#define glBindBufferRange glad_glBindBufferRange
+GLAD_API_CALL PFNGLBINDFRAGDATALOCATIONPROC glad_glBindFragDataLocation;
+#define glBindFragDataLocation glad_glBindFragDataLocation
+GLAD_API_CALL PFNGLBINDFRAGDATALOCATIONINDEXEDPROC glad_glBindFragDataLocationIndexed;
+#define glBindFragDataLocationIndexed glad_glBindFragDataLocationIndexed
+GLAD_API_CALL PFNGLBINDFRAMEBUFFERPROC glad_glBindFramebuffer;
+#define glBindFramebuffer glad_glBindFramebuffer
+GLAD_API_CALL PFNGLBINDFRAMEBUFFEREXTPROC glad_glBindFramebufferEXT;
+#define glBindFramebufferEXT glad_glBindFramebufferEXT
+GLAD_API_CALL PFNGLBINDRENDERBUFFERPROC glad_glBindRenderbuffer;
+#define glBindRenderbuffer glad_glBindRenderbuffer
+GLAD_API_CALL PFNGLBINDRENDERBUFFEREXTPROC glad_glBindRenderbufferEXT;
+#define glBindRenderbufferEXT glad_glBindRenderbufferEXT
+GLAD_API_CALL PFNGLBINDSAMPLERPROC glad_glBindSampler;
+#define glBindSampler glad_glBindSampler
+GLAD_API_CALL PFNGLBINDTEXTUREPROC glad_glBindTexture;
+#define glBindTexture glad_glBindTexture
+GLAD_API_CALL PFNGLBINDVERTEXARRAYPROC glad_glBindVertexArray;
+#define glBindVertexArray glad_glBindVertexArray
+GLAD_API_CALL PFNGLBITMAPPROC glad_glBitmap;
+#define glBitmap glad_glBitmap
+GLAD_API_CALL PFNGLBLENDCOLORPROC glad_glBlendColor;
+#define glBlendColor glad_glBlendColor
+GLAD_API_CALL PFNGLBLENDEQUATIONPROC glad_glBlendEquation;
+#define glBlendEquation glad_glBlendEquation
+GLAD_API_CALL PFNGLBLENDEQUATIONSEPARATEPROC glad_glBlendEquationSeparate;
+#define glBlendEquationSeparate glad_glBlendEquationSeparate
+GLAD_API_CALL PFNGLBLENDFUNCPROC glad_glBlendFunc;
+#define glBlendFunc glad_glBlendFunc
+GLAD_API_CALL PFNGLBLENDFUNCSEPARATEPROC glad_glBlendFuncSeparate;
+#define glBlendFuncSeparate glad_glBlendFuncSeparate
+GLAD_API_CALL PFNGLBLITFRAMEBUFFERPROC glad_glBlitFramebuffer;
+#define glBlitFramebuffer glad_glBlitFramebuffer
+GLAD_API_CALL PFNGLBLITFRAMEBUFFEREXTPROC glad_glBlitFramebufferEXT;
+#define glBlitFramebufferEXT glad_glBlitFramebufferEXT
+GLAD_API_CALL PFNGLBUFFERDATAPROC glad_glBufferData;
+#define glBufferData glad_glBufferData
+GLAD_API_CALL PFNGLBUFFERSUBDATAPROC glad_glBufferSubData;
+#define glBufferSubData glad_glBufferSubData
+GLAD_API_CALL PFNGLCALLLISTPROC glad_glCallList;
+#define glCallList glad_glCallList
+GLAD_API_CALL PFNGLCALLLISTSPROC glad_glCallLists;
+#define glCallLists glad_glCallLists
+GLAD_API_CALL PFNGLCHECKFRAMEBUFFERSTATUSPROC glad_glCheckFramebufferStatus;
+#define glCheckFramebufferStatus glad_glCheckFramebufferStatus
+GLAD_API_CALL PFNGLCHECKFRAMEBUFFERSTATUSEXTPROC glad_glCheckFramebufferStatusEXT;
+#define glCheckFramebufferStatusEXT glad_glCheckFramebufferStatusEXT
+GLAD_API_CALL PFNGLCLAMPCOLORPROC glad_glClampColor;
+#define glClampColor glad_glClampColor
+GLAD_API_CALL PFNGLCLEARPROC glad_glClear;
+#define glClear glad_glClear
+GLAD_API_CALL PFNGLCLEARACCUMPROC glad_glClearAccum;
+#define glClearAccum glad_glClearAccum
+GLAD_API_CALL PFNGLCLEARBUFFERFIPROC glad_glClearBufferfi;
+#define glClearBufferfi glad_glClearBufferfi
+GLAD_API_CALL PFNGLCLEARBUFFERFVPROC glad_glClearBufferfv;
+#define glClearBufferfv glad_glClearBufferfv
+GLAD_API_CALL PFNGLCLEARBUFFERIVPROC glad_glClearBufferiv;
+#define glClearBufferiv glad_glClearBufferiv
+GLAD_API_CALL PFNGLCLEARBUFFERUIVPROC glad_glClearBufferuiv;
+#define glClearBufferuiv glad_glClearBufferuiv
+GLAD_API_CALL PFNGLCLEARCOLORPROC glad_glClearColor;
+#define glClearColor glad_glClearColor
+GLAD_API_CALL PFNGLCLEARDEPTHPROC glad_glClearDepth;
+#define glClearDepth glad_glClearDepth
+GLAD_API_CALL PFNGLCLEARINDEXPROC glad_glClearIndex;
+#define glClearIndex glad_glClearIndex
+GLAD_API_CALL PFNGLCLEARSTENCILPROC glad_glClearStencil;
+#define glClearStencil glad_glClearStencil
+GLAD_API_CALL PFNGLCLIENTACTIVETEXTUREPROC glad_glClientActiveTexture;
+#define glClientActiveTexture glad_glClientActiveTexture
+GLAD_API_CALL PFNGLCLIENTWAITSYNCPROC glad_glClientWaitSync;
+#define glClientWaitSync glad_glClientWaitSync
+GLAD_API_CALL PFNGLCLIPPLANEPROC glad_glClipPlane;
+#define glClipPlane glad_glClipPlane
+GLAD_API_CALL PFNGLCOLOR3BPROC glad_glColor3b;
+#define glColor3b glad_glColor3b
+GLAD_API_CALL PFNGLCOLOR3BVPROC glad_glColor3bv;
+#define glColor3bv glad_glColor3bv
+GLAD_API_CALL PFNGLCOLOR3DPROC glad_glColor3d;
+#define glColor3d glad_glColor3d
+GLAD_API_CALL PFNGLCOLOR3DVPROC glad_glColor3dv;
+#define glColor3dv glad_glColor3dv
+GLAD_API_CALL PFNGLCOLOR3FPROC glad_glColor3f;
+#define glColor3f glad_glColor3f
+GLAD_API_CALL PFNGLCOLOR3FVPROC glad_glColor3fv;
+#define glColor3fv glad_glColor3fv
+GLAD_API_CALL PFNGLCOLOR3IPROC glad_glColor3i;
+#define glColor3i glad_glColor3i
+GLAD_API_CALL PFNGLCOLOR3IVPROC glad_glColor3iv;
+#define glColor3iv glad_glColor3iv
+GLAD_API_CALL PFNGLCOLOR3SPROC glad_glColor3s;
+#define glColor3s glad_glColor3s
+GLAD_API_CALL PFNGLCOLOR3SVPROC glad_glColor3sv;
+#define glColor3sv glad_glColor3sv
+GLAD_API_CALL PFNGLCOLOR3UBPROC glad_glColor3ub;
+#define glColor3ub glad_glColor3ub
+GLAD_API_CALL PFNGLCOLOR3UBVPROC glad_glColor3ubv;
+#define glColor3ubv glad_glColor3ubv
+GLAD_API_CALL PFNGLCOLOR3UIPROC glad_glColor3ui;
+#define glColor3ui glad_glColor3ui
+GLAD_API_CALL PFNGLCOLOR3UIVPROC glad_glColor3uiv;
+#define glColor3uiv glad_glColor3uiv
+GLAD_API_CALL PFNGLCOLOR3USPROC glad_glColor3us;
+#define glColor3us glad_glColor3us
+GLAD_API_CALL PFNGLCOLOR3USVPROC glad_glColor3usv;
+#define glColor3usv glad_glColor3usv
+GLAD_API_CALL PFNGLCOLOR4BPROC glad_glColor4b;
+#define glColor4b glad_glColor4b
+GLAD_API_CALL PFNGLCOLOR4BVPROC glad_glColor4bv;
+#define glColor4bv glad_glColor4bv
+GLAD_API_CALL PFNGLCOLOR4DPROC glad_glColor4d;
+#define glColor4d glad_glColor4d
+GLAD_API_CALL PFNGLCOLOR4DVPROC glad_glColor4dv;
+#define glColor4dv glad_glColor4dv
+GLAD_API_CALL PFNGLCOLOR4FPROC glad_glColor4f;
+#define glColor4f glad_glColor4f
+GLAD_API_CALL PFNGLCOLOR4FVPROC glad_glColor4fv;
+#define glColor4fv glad_glColor4fv
+GLAD_API_CALL PFNGLCOLOR4IPROC glad_glColor4i;
+#define glColor4i glad_glColor4i
+GLAD_API_CALL PFNGLCOLOR4IVPROC glad_glColor4iv;
+#define glColor4iv glad_glColor4iv
+GLAD_API_CALL PFNGLCOLOR4SPROC glad_glColor4s;
+#define glColor4s glad_glColor4s
+GLAD_API_CALL PFNGLCOLOR4SVPROC glad_glColor4sv;
+#define glColor4sv glad_glColor4sv
+GLAD_API_CALL PFNGLCOLOR4UBPROC glad_glColor4ub;
+#define glColor4ub glad_glColor4ub
+GLAD_API_CALL PFNGLCOLOR4UBVPROC glad_glColor4ubv;
+#define glColor4ubv glad_glColor4ubv
+GLAD_API_CALL PFNGLCOLOR4UIPROC glad_glColor4ui;
+#define glColor4ui glad_glColor4ui
+GLAD_API_CALL PFNGLCOLOR4UIVPROC glad_glColor4uiv;
+#define glColor4uiv glad_glColor4uiv
+GLAD_API_CALL PFNGLCOLOR4USPROC glad_glColor4us;
+#define glColor4us glad_glColor4us
+GLAD_API_CALL PFNGLCOLOR4USVPROC glad_glColor4usv;
+#define glColor4usv glad_glColor4usv
+GLAD_API_CALL PFNGLCOLORMASKPROC glad_glColorMask;
+#define glColorMask glad_glColorMask
+GLAD_API_CALL PFNGLCOLORMASKIPROC glad_glColorMaski;
+#define glColorMaski glad_glColorMaski
+GLAD_API_CALL PFNGLCOLORMATERIALPROC glad_glColorMaterial;
+#define glColorMaterial glad_glColorMaterial
+GLAD_API_CALL PFNGLCOLORP3UIPROC glad_glColorP3ui;
+#define glColorP3ui glad_glColorP3ui
+GLAD_API_CALL PFNGLCOLORP3UIVPROC glad_glColorP3uiv;
+#define glColorP3uiv glad_glColorP3uiv
+GLAD_API_CALL PFNGLCOLORP4UIPROC glad_glColorP4ui;
+#define glColorP4ui glad_glColorP4ui
+GLAD_API_CALL PFNGLCOLORP4UIVPROC glad_glColorP4uiv;
+#define glColorP4uiv glad_glColorP4uiv
+GLAD_API_CALL PFNGLCOLORPOINTERPROC glad_glColorPointer;
+#define glColorPointer glad_glColorPointer
+GLAD_API_CALL PFNGLCOMPILESHADERPROC glad_glCompileShader;
+#define glCompileShader glad_glCompileShader
+GLAD_API_CALL PFNGLCOMPRESSEDTEXIMAGE1DPROC glad_glCompressedTexImage1D;
+#define glCompressedTexImage1D glad_glCompressedTexImage1D
+GLAD_API_CALL PFNGLCOMPRESSEDTEXIMAGE2DPROC glad_glCompressedTexImage2D;
+#define glCompressedTexImage2D glad_glCompressedTexImage2D
+GLAD_API_CALL PFNGLCOMPRESSEDTEXIMAGE3DPROC glad_glCompressedTexImage3D;
+#define glCompressedTexImage3D glad_glCompressedTexImage3D
+GLAD_API_CALL PFNGLCOMPRESSEDTEXSUBIMAGE1DPROC glad_glCompressedTexSubImage1D;
+#define glCompressedTexSubImage1D glad_glCompressedTexSubImage1D
+GLAD_API_CALL PFNGLCOMPRESSEDTEXSUBIMAGE2DPROC glad_glCompressedTexSubImage2D;
+#define glCompressedTexSubImage2D glad_glCompressedTexSubImage2D
+GLAD_API_CALL PFNGLCOMPRESSEDTEXSUBIMAGE3DPROC glad_glCompressedTexSubImage3D;
+#define glCompressedTexSubImage3D glad_glCompressedTexSubImage3D
+GLAD_API_CALL PFNGLCOPYBUFFERSUBDATAPROC glad_glCopyBufferSubData;
+#define glCopyBufferSubData glad_glCopyBufferSubData
+GLAD_API_CALL PFNGLCOPYPIXELSPROC glad_glCopyPixels;
+#define glCopyPixels glad_glCopyPixels
+GLAD_API_CALL PFNGLCOPYTEXIMAGE1DPROC glad_glCopyTexImage1D;
+#define glCopyTexImage1D glad_glCopyTexImage1D
+GLAD_API_CALL PFNGLCOPYTEXIMAGE2DPROC glad_glCopyTexImage2D;
+#define glCopyTexImage2D glad_glCopyTexImage2D
+GLAD_API_CALL PFNGLCOPYTEXSUBIMAGE1DPROC glad_glCopyTexSubImage1D;
+#define glCopyTexSubImage1D glad_glCopyTexSubImage1D
+GLAD_API_CALL PFNGLCOPYTEXSUBIMAGE2DPROC glad_glCopyTexSubImage2D;
+#define glCopyTexSubImage2D glad_glCopyTexSubImage2D
+GLAD_API_CALL PFNGLCOPYTEXSUBIMAGE3DPROC glad_glCopyTexSubImage3D;
+#define glCopyTexSubImage3D glad_glCopyTexSubImage3D
+GLAD_API_CALL PFNGLCREATEPROGRAMPROC glad_glCreateProgram;
+#define glCreateProgram glad_glCreateProgram
+GLAD_API_CALL PFNGLCREATESHADERPROC glad_glCreateShader;
+#define glCreateShader glad_glCreateShader
+GLAD_API_CALL PFNGLCULLFACEPROC glad_glCullFace;
+#define glCullFace glad_glCullFace
+GLAD_API_CALL PFNGLDEBUGMESSAGECALLBACKARBPROC glad_glDebugMessageCallbackARB;
+#define glDebugMessageCallbackARB glad_glDebugMessageCallbackARB
+GLAD_API_CALL PFNGLDEBUGMESSAGECONTROLARBPROC glad_glDebugMessageControlARB;
+#define glDebugMessageControlARB glad_glDebugMessageControlARB
+GLAD_API_CALL PFNGLDEBUGMESSAGEINSERTARBPROC glad_glDebugMessageInsertARB;
+#define glDebugMessageInsertARB glad_glDebugMessageInsertARB
+GLAD_API_CALL PFNGLDELETEBUFFERSPROC glad_glDeleteBuffers;
+#define glDeleteBuffers glad_glDeleteBuffers
+GLAD_API_CALL PFNGLDELETEFRAMEBUFFERSPROC glad_glDeleteFramebuffers;
+#define glDeleteFramebuffers glad_glDeleteFramebuffers
+GLAD_API_CALL PFNGLDELETEFRAMEBUFFERSEXTPROC glad_glDeleteFramebuffersEXT;
+#define glDeleteFramebuffersEXT glad_glDeleteFramebuffersEXT
+GLAD_API_CALL PFNGLDELETELISTSPROC glad_glDeleteLists;
+#define glDeleteLists glad_glDeleteLists
+GLAD_API_CALL PFNGLDELETEPROGRAMPROC glad_glDeleteProgram;
+#define glDeleteProgram glad_glDeleteProgram
+GLAD_API_CALL PFNGLDELETEQUERIESPROC glad_glDeleteQueries;
+#define glDeleteQueries glad_glDeleteQueries
+GLAD_API_CALL PFNGLDELETERENDERBUFFERSPROC glad_glDeleteRenderbuffers;
+#define glDeleteRenderbuffers glad_glDeleteRenderbuffers
+GLAD_API_CALL PFNGLDELETERENDERBUFFERSEXTPROC glad_glDeleteRenderbuffersEXT;
+#define glDeleteRenderbuffersEXT glad_glDeleteRenderbuffersEXT
+GLAD_API_CALL PFNGLDELETESAMPLERSPROC glad_glDeleteSamplers;
+#define glDeleteSamplers glad_glDeleteSamplers
+GLAD_API_CALL PFNGLDELETESHADERPROC glad_glDeleteShader;
+#define glDeleteShader glad_glDeleteShader
+GLAD_API_CALL PFNGLDELETESYNCPROC glad_glDeleteSync;
+#define glDeleteSync glad_glDeleteSync
+GLAD_API_CALL PFNGLDELETETEXTURESPROC glad_glDeleteTextures;
+#define glDeleteTextures glad_glDeleteTextures
+GLAD_API_CALL PFNGLDELETEVERTEXARRAYSPROC glad_glDeleteVertexArrays;
+#define glDeleteVertexArrays glad_glDeleteVertexArrays
+GLAD_API_CALL PFNGLDEPTHFUNCPROC glad_glDepthFunc;
+#define glDepthFunc glad_glDepthFunc
+GLAD_API_CALL PFNGLDEPTHMASKPROC glad_glDepthMask;
+#define glDepthMask glad_glDepthMask
+GLAD_API_CALL PFNGLDEPTHRANGEPROC glad_glDepthRange;
+#define glDepthRange glad_glDepthRange
+GLAD_API_CALL PFNGLDETACHSHADERPROC glad_glDetachShader;
+#define glDetachShader glad_glDetachShader
+GLAD_API_CALL PFNGLDISABLEPROC glad_glDisable;
+#define glDisable glad_glDisable
+GLAD_API_CALL PFNGLDISABLECLIENTSTATEPROC glad_glDisableClientState;
+#define glDisableClientState glad_glDisableClientState
+GLAD_API_CALL PFNGLDISABLEVERTEXATTRIBARRAYPROC glad_glDisableVertexAttribArray;
+#define glDisableVertexAttribArray glad_glDisableVertexAttribArray
+GLAD_API_CALL PFNGLDISABLEIPROC glad_glDisablei;
+#define glDisablei glad_glDisablei
+GLAD_API_CALL PFNGLDRAWARRAYSPROC glad_glDrawArrays;
+#define glDrawArrays glad_glDrawArrays
+GLAD_API_CALL PFNGLDRAWARRAYSINSTANCEDPROC glad_glDrawArraysInstanced;
+#define glDrawArraysInstanced glad_glDrawArraysInstanced
+GLAD_API_CALL PFNGLDRAWBUFFERPROC glad_glDrawBuffer;
+#define glDrawBuffer glad_glDrawBuffer
+GLAD_API_CALL PFNGLDRAWBUFFERSPROC glad_glDrawBuffers;
+#define glDrawBuffers glad_glDrawBuffers
+GLAD_API_CALL PFNGLDRAWELEMENTSPROC glad_glDrawElements;
+#define glDrawElements glad_glDrawElements
+GLAD_API_CALL PFNGLDRAWELEMENTSBASEVERTEXPROC glad_glDrawElementsBaseVertex;
+#define glDrawElementsBaseVertex glad_glDrawElementsBaseVertex
+GLAD_API_CALL PFNGLDRAWELEMENTSINSTANCEDPROC glad_glDrawElementsInstanced;
+#define glDrawElementsInstanced glad_glDrawElementsInstanced
+GLAD_API_CALL PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXPROC glad_glDrawElementsInstancedBaseVertex;
+#define glDrawElementsInstancedBaseVertex glad_glDrawElementsInstancedBaseVertex
+GLAD_API_CALL PFNGLDRAWPIXELSPROC glad_glDrawPixels;
+#define glDrawPixels glad_glDrawPixels
+GLAD_API_CALL PFNGLDRAWRANGEELEMENTSPROC glad_glDrawRangeElements;
+#define glDrawRangeElements glad_glDrawRangeElements
+GLAD_API_CALL PFNGLDRAWRANGEELEMENTSBASEVERTEXPROC glad_glDrawRangeElementsBaseVertex;
+#define glDrawRangeElementsBaseVertex glad_glDrawRangeElementsBaseVertex
+GLAD_API_CALL PFNGLEDGEFLAGPROC glad_glEdgeFlag;
+#define glEdgeFlag glad_glEdgeFlag
+GLAD_API_CALL PFNGLEDGEFLAGPOINTERPROC glad_glEdgeFlagPointer;
+#define glEdgeFlagPointer glad_glEdgeFlagPointer
+GLAD_API_CALL PFNGLEDGEFLAGVPROC glad_glEdgeFlagv;
+#define glEdgeFlagv glad_glEdgeFlagv
+GLAD_API_CALL PFNGLENABLEPROC glad_glEnable;
+#define glEnable glad_glEnable
+GLAD_API_CALL PFNGLENABLECLIENTSTATEPROC glad_glEnableClientState;
+#define glEnableClientState glad_glEnableClientState
+GLAD_API_CALL PFNGLENABLEVERTEXATTRIBARRAYPROC glad_glEnableVertexAttribArray;
+#define glEnableVertexAttribArray glad_glEnableVertexAttribArray
+GLAD_API_CALL PFNGLENABLEIPROC glad_glEnablei;
+#define glEnablei glad_glEnablei
+GLAD_API_CALL PFNGLENDPROC glad_glEnd;
+#define glEnd glad_glEnd
+GLAD_API_CALL PFNGLENDCONDITIONALRENDERPROC glad_glEndConditionalRender;
+#define glEndConditionalRender glad_glEndConditionalRender
+GLAD_API_CALL PFNGLENDLISTPROC glad_glEndList;
+#define glEndList glad_glEndList
+GLAD_API_CALL PFNGLENDQUERYPROC glad_glEndQuery;
+#define glEndQuery glad_glEndQuery
+GLAD_API_CALL PFNGLENDTRANSFORMFEEDBACKPROC glad_glEndTransformFeedback;
+#define glEndTransformFeedback glad_glEndTransformFeedback
+GLAD_API_CALL PFNGLEVALCOORD1DPROC glad_glEvalCoord1d;
+#define glEvalCoord1d glad_glEvalCoord1d
+GLAD_API_CALL PFNGLEVALCOORD1DVPROC glad_glEvalCoord1dv;
+#define glEvalCoord1dv glad_glEvalCoord1dv
+GLAD_API_CALL PFNGLEVALCOORD1FPROC glad_glEvalCoord1f;
+#define glEvalCoord1f glad_glEvalCoord1f
+GLAD_API_CALL PFNGLEVALCOORD1FVPROC glad_glEvalCoord1fv;
+#define glEvalCoord1fv glad_glEvalCoord1fv
+GLAD_API_CALL PFNGLEVALCOORD2DPROC glad_glEvalCoord2d;
+#define glEvalCoord2d glad_glEvalCoord2d
+GLAD_API_CALL PFNGLEVALCOORD2DVPROC glad_glEvalCoord2dv;
+#define glEvalCoord2dv glad_glEvalCoord2dv
+GLAD_API_CALL PFNGLEVALCOORD2FPROC glad_glEvalCoord2f;
+#define glEvalCoord2f glad_glEvalCoord2f
+GLAD_API_CALL PFNGLEVALCOORD2FVPROC glad_glEvalCoord2fv;
+#define glEvalCoord2fv glad_glEvalCoord2fv
+GLAD_API_CALL PFNGLEVALMESH1PROC glad_glEvalMesh1;
+#define glEvalMesh1 glad_glEvalMesh1
+GLAD_API_CALL PFNGLEVALMESH2PROC glad_glEvalMesh2;
+#define glEvalMesh2 glad_glEvalMesh2
+GLAD_API_CALL PFNGLEVALPOINT1PROC glad_glEvalPoint1;
+#define glEvalPoint1 glad_glEvalPoint1
+GLAD_API_CALL PFNGLEVALPOINT2PROC glad_glEvalPoint2;
+#define glEvalPoint2 glad_glEvalPoint2
+GLAD_API_CALL PFNGLFEEDBACKBUFFERPROC glad_glFeedbackBuffer;
+#define glFeedbackBuffer glad_glFeedbackBuffer
+GLAD_API_CALL PFNGLFENCESYNCPROC glad_glFenceSync;
+#define glFenceSync glad_glFenceSync
+GLAD_API_CALL PFNGLFINISHPROC glad_glFinish;
+#define glFinish glad_glFinish
+GLAD_API_CALL PFNGLFLUSHPROC glad_glFlush;
+#define glFlush glad_glFlush
+GLAD_API_CALL PFNGLFLUSHMAPPEDBUFFERRANGEPROC glad_glFlushMappedBufferRange;
+#define glFlushMappedBufferRange glad_glFlushMappedBufferRange
+GLAD_API_CALL PFNGLFOGCOORDPOINTERPROC glad_glFogCoordPointer;
+#define glFogCoordPointer glad_glFogCoordPointer
+GLAD_API_CALL PFNGLFOGCOORDDPROC glad_glFogCoordd;
+#define glFogCoordd glad_glFogCoordd
+GLAD_API_CALL PFNGLFOGCOORDDVPROC glad_glFogCoorddv;
+#define glFogCoorddv glad_glFogCoorddv
+GLAD_API_CALL PFNGLFOGCOORDFPROC glad_glFogCoordf;
+#define glFogCoordf glad_glFogCoordf
+GLAD_API_CALL PFNGLFOGCOORDFVPROC glad_glFogCoordfv;
+#define glFogCoordfv glad_glFogCoordfv
+GLAD_API_CALL PFNGLFOGFPROC glad_glFogf;
+#define glFogf glad_glFogf
+GLAD_API_CALL PFNGLFOGFVPROC glad_glFogfv;
+#define glFogfv glad_glFogfv
+GLAD_API_CALL PFNGLFOGIPROC glad_glFogi;
+#define glFogi glad_glFogi
+GLAD_API_CALL PFNGLFOGIVPROC glad_glFogiv;
+#define glFogiv glad_glFogiv
+GLAD_API_CALL PFNGLFRAMEBUFFERRENDERBUFFERPROC glad_glFramebufferRenderbuffer;
+#define glFramebufferRenderbuffer glad_glFramebufferRenderbuffer
+GLAD_API_CALL PFNGLFRAMEBUFFERRENDERBUFFEREXTPROC glad_glFramebufferRenderbufferEXT;
+#define glFramebufferRenderbufferEXT glad_glFramebufferRenderbufferEXT
+GLAD_API_CALL PFNGLFRAMEBUFFERTEXTUREPROC glad_glFramebufferTexture;
+#define glFramebufferTexture glad_glFramebufferTexture
+GLAD_API_CALL PFNGLFRAMEBUFFERTEXTURE1DPROC glad_glFramebufferTexture1D;
+#define glFramebufferTexture1D glad_glFramebufferTexture1D
+GLAD_API_CALL PFNGLFRAMEBUFFERTEXTURE1DEXTPROC glad_glFramebufferTexture1DEXT;
+#define glFramebufferTexture1DEXT glad_glFramebufferTexture1DEXT
+GLAD_API_CALL PFNGLFRAMEBUFFERTEXTURE2DPROC glad_glFramebufferTexture2D;
+#define glFramebufferTexture2D glad_glFramebufferTexture2D
+GLAD_API_CALL PFNGLFRAMEBUFFERTEXTURE2DEXTPROC glad_glFramebufferTexture2DEXT;
+#define glFramebufferTexture2DEXT glad_glFramebufferTexture2DEXT
+GLAD_API_CALL PFNGLFRAMEBUFFERTEXTURE3DPROC glad_glFramebufferTexture3D;
+#define glFramebufferTexture3D glad_glFramebufferTexture3D
+GLAD_API_CALL PFNGLFRAMEBUFFERTEXTURE3DEXTPROC glad_glFramebufferTexture3DEXT;
+#define glFramebufferTexture3DEXT glad_glFramebufferTexture3DEXT
+GLAD_API_CALL PFNGLFRAMEBUFFERTEXTURELAYERPROC glad_glFramebufferTextureLayer;
+#define glFramebufferTextureLayer glad_glFramebufferTextureLayer
+GLAD_API_CALL PFNGLFRAMEBUFFERTEXTUREMULTIVIEWOVRPROC glad_glFramebufferTextureMultiviewOVR;
+#define glFramebufferTextureMultiviewOVR glad_glFramebufferTextureMultiviewOVR
+GLAD_API_CALL PFNGLFRONTFACEPROC glad_glFrontFace;
+#define glFrontFace glad_glFrontFace
+GLAD_API_CALL PFNGLFRUSTUMPROC glad_glFrustum;
+#define glFrustum glad_glFrustum
+GLAD_API_CALL PFNGLGENBUFFERSPROC glad_glGenBuffers;
+#define glGenBuffers glad_glGenBuffers
+GLAD_API_CALL PFNGLGENFRAMEBUFFERSPROC glad_glGenFramebuffers;
+#define glGenFramebuffers glad_glGenFramebuffers
+GLAD_API_CALL PFNGLGENFRAMEBUFFERSEXTPROC glad_glGenFramebuffersEXT;
+#define glGenFramebuffersEXT glad_glGenFramebuffersEXT
+GLAD_API_CALL PFNGLGENLISTSPROC glad_glGenLists;
+#define glGenLists glad_glGenLists
+GLAD_API_CALL PFNGLGENQUERIESPROC glad_glGenQueries;
+#define glGenQueries glad_glGenQueries
+GLAD_API_CALL PFNGLGENRENDERBUFFERSPROC glad_glGenRenderbuffers;
+#define glGenRenderbuffers glad_glGenRenderbuffers
+GLAD_API_CALL PFNGLGENRENDERBUFFERSEXTPROC glad_glGenRenderbuffersEXT;
+#define glGenRenderbuffersEXT glad_glGenRenderbuffersEXT
+GLAD_API_CALL PFNGLGENSAMPLERSPROC glad_glGenSamplers;
+#define glGenSamplers glad_glGenSamplers
+GLAD_API_CALL PFNGLGENTEXTURESPROC glad_glGenTextures;
+#define glGenTextures glad_glGenTextures
+GLAD_API_CALL PFNGLGENVERTEXARRAYSPROC glad_glGenVertexArrays;
+#define glGenVertexArrays glad_glGenVertexArrays
+GLAD_API_CALL PFNGLGENERATEMIPMAPPROC glad_glGenerateMipmap;
+#define glGenerateMipmap glad_glGenerateMipmap
+GLAD_API_CALL PFNGLGENERATEMIPMAPEXTPROC glad_glGenerateMipmapEXT;
+#define glGenerateMipmapEXT glad_glGenerateMipmapEXT
+GLAD_API_CALL PFNGLGETACTIVEATTRIBPROC glad_glGetActiveAttrib;
+#define glGetActiveAttrib glad_glGetActiveAttrib
+GLAD_API_CALL PFNGLGETACTIVEUNIFORMPROC glad_glGetActiveUniform;
+#define glGetActiveUniform glad_glGetActiveUniform
+GLAD_API_CALL PFNGLGETACTIVEUNIFORMBLOCKNAMEPROC glad_glGetActiveUniformBlockName;
+#define glGetActiveUniformBlockName glad_glGetActiveUniformBlockName
+GLAD_API_CALL PFNGLGETACTIVEUNIFORMBLOCKIVPROC glad_glGetActiveUniformBlockiv;
+#define glGetActiveUniformBlockiv glad_glGetActiveUniformBlockiv
+GLAD_API_CALL PFNGLGETACTIVEUNIFORMNAMEPROC glad_glGetActiveUniformName;
+#define glGetActiveUniformName glad_glGetActiveUniformName
+GLAD_API_CALL PFNGLGETACTIVEUNIFORMSIVPROC glad_glGetActiveUniformsiv;
+#define glGetActiveUniformsiv glad_glGetActiveUniformsiv
+GLAD_API_CALL PFNGLGETATTACHEDSHADERSPROC glad_glGetAttachedShaders;
+#define glGetAttachedShaders glad_glGetAttachedShaders
+GLAD_API_CALL PFNGLGETATTRIBLOCATIONPROC glad_glGetAttribLocation;
+#define glGetAttribLocation glad_glGetAttribLocation
+GLAD_API_CALL PFNGLGETBOOLEANI_VPROC glad_glGetBooleani_v;
+#define glGetBooleani_v glad_glGetBooleani_v
+GLAD_API_CALL PFNGLGETBOOLEANVPROC glad_glGetBooleanv;
+#define glGetBooleanv glad_glGetBooleanv
+GLAD_API_CALL PFNGLGETBUFFERPARAMETERI64VPROC glad_glGetBufferParameteri64v;
+#define glGetBufferParameteri64v glad_glGetBufferParameteri64v
+GLAD_API_CALL PFNGLGETBUFFERPARAMETERIVPROC glad_glGetBufferParameteriv;
+#define glGetBufferParameteriv glad_glGetBufferParameteriv
+GLAD_API_CALL PFNGLGETBUFFERPOINTERVPROC glad_glGetBufferPointerv;
+#define glGetBufferPointerv glad_glGetBufferPointerv
+GLAD_API_CALL PFNGLGETBUFFERSUBDATAPROC glad_glGetBufferSubData;
+#define glGetBufferSubData glad_glGetBufferSubData
+GLAD_API_CALL PFNGLGETCLIPPLANEPROC glad_glGetClipPlane;
+#define glGetClipPlane glad_glGetClipPlane
+GLAD_API_CALL PFNGLGETCOMPRESSEDTEXIMAGEPROC glad_glGetCompressedTexImage;
+#define glGetCompressedTexImage glad_glGetCompressedTexImage
+GLAD_API_CALL PFNGLGETDEBUGMESSAGELOGARBPROC glad_glGetDebugMessageLogARB;
+#define glGetDebugMessageLogARB glad_glGetDebugMessageLogARB
+GLAD_API_CALL PFNGLGETDOUBLEVPROC glad_glGetDoublev;
+#define glGetDoublev glad_glGetDoublev
+GLAD_API_CALL PFNGLGETERRORPROC glad_glGetError;
+#define glGetError glad_glGetError
+GLAD_API_CALL PFNGLGETFLOATVPROC glad_glGetFloatv;
+#define glGetFloatv glad_glGetFloatv
+GLAD_API_CALL PFNGLGETFRAGDATAINDEXPROC glad_glGetFragDataIndex;
+#define glGetFragDataIndex glad_glGetFragDataIndex
+GLAD_API_CALL PFNGLGETFRAGDATALOCATIONPROC glad_glGetFragDataLocation;
+#define glGetFragDataLocation glad_glGetFragDataLocation
+GLAD_API_CALL PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVPROC glad_glGetFramebufferAttachmentParameteriv;
+#define glGetFramebufferAttachmentParameteriv glad_glGetFramebufferAttachmentParameteriv
+GLAD_API_CALL PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVEXTPROC glad_glGetFramebufferAttachmentParameterivEXT;
+#define glGetFramebufferAttachmentParameterivEXT glad_glGetFramebufferAttachmentParameterivEXT
+GLAD_API_CALL PFNGLGETINTEGER64I_VPROC glad_glGetInteger64i_v;
+#define glGetInteger64i_v glad_glGetInteger64i_v
+GLAD_API_CALL PFNGLGETINTEGER64VPROC glad_glGetInteger64v;
+#define glGetInteger64v glad_glGetInteger64v
+GLAD_API_CALL PFNGLGETINTEGERI_VPROC glad_glGetIntegeri_v;
+#define glGetIntegeri_v glad_glGetIntegeri_v
+GLAD_API_CALL PFNGLGETINTEGERVPROC glad_glGetIntegerv;
+#define glGetIntegerv glad_glGetIntegerv
+GLAD_API_CALL PFNGLGETLIGHTFVPROC glad_glGetLightfv;
+#define glGetLightfv glad_glGetLightfv
+GLAD_API_CALL PFNGLGETLIGHTIVPROC glad_glGetLightiv;
+#define glGetLightiv glad_glGetLightiv
+GLAD_API_CALL PFNGLGETMAPDVPROC glad_glGetMapdv;
+#define glGetMapdv glad_glGetMapdv
+GLAD_API_CALL PFNGLGETMAPFVPROC glad_glGetMapfv;
+#define glGetMapfv glad_glGetMapfv
+GLAD_API_CALL PFNGLGETMAPIVPROC glad_glGetMapiv;
+#define glGetMapiv glad_glGetMapiv
+GLAD_API_CALL PFNGLGETMATERIALFVPROC glad_glGetMaterialfv;
+#define glGetMaterialfv glad_glGetMaterialfv
+GLAD_API_CALL PFNGLGETMATERIALIVPROC glad_glGetMaterialiv;
+#define glGetMaterialiv glad_glGetMaterialiv
+GLAD_API_CALL PFNGLGETMULTISAMPLEFVPROC glad_glGetMultisamplefv;
+#define glGetMultisamplefv glad_glGetMultisamplefv
+GLAD_API_CALL PFNGLGETPIXELMAPFVPROC glad_glGetPixelMapfv;
+#define glGetPixelMapfv glad_glGetPixelMapfv
+GLAD_API_CALL PFNGLGETPIXELMAPUIVPROC glad_glGetPixelMapuiv;
+#define glGetPixelMapuiv glad_glGetPixelMapuiv
+GLAD_API_CALL PFNGLGETPIXELMAPUSVPROC glad_glGetPixelMapusv;
+#define glGetPixelMapusv glad_glGetPixelMapusv
+GLAD_API_CALL PFNGLGETPOINTERVPROC glad_glGetPointerv;
+#define glGetPointerv glad_glGetPointerv
+GLAD_API_CALL PFNGLGETPOLYGONSTIPPLEPROC glad_glGetPolygonStipple;
+#define glGetPolygonStipple glad_glGetPolygonStipple
+GLAD_API_CALL PFNGLGETPROGRAMINFOLOGPROC glad_glGetProgramInfoLog;
+#define glGetProgramInfoLog glad_glGetProgramInfoLog
+GLAD_API_CALL PFNGLGETPROGRAMIVPROC glad_glGetProgramiv;
+#define glGetProgramiv glad_glGetProgramiv
+GLAD_API_CALL PFNGLGETQUERYOBJECTI64VPROC glad_glGetQueryObjecti64v;
+#define glGetQueryObjecti64v glad_glGetQueryObjecti64v
+GLAD_API_CALL PFNGLGETQUERYOBJECTIVPROC glad_glGetQueryObjectiv;
+#define glGetQueryObjectiv glad_glGetQueryObjectiv
+GLAD_API_CALL PFNGLGETQUERYOBJECTUI64VPROC glad_glGetQueryObjectui64v;
+#define glGetQueryObjectui64v glad_glGetQueryObjectui64v
+GLAD_API_CALL PFNGLGETQUERYOBJECTUIVPROC glad_glGetQueryObjectuiv;
+#define glGetQueryObjectuiv glad_glGetQueryObjectuiv
+GLAD_API_CALL PFNGLGETQUERYIVPROC glad_glGetQueryiv;
+#define glGetQueryiv glad_glGetQueryiv
+GLAD_API_CALL PFNGLGETRENDERBUFFERPARAMETERIVPROC glad_glGetRenderbufferParameteriv;
+#define glGetRenderbufferParameteriv glad_glGetRenderbufferParameteriv
+GLAD_API_CALL PFNGLGETRENDERBUFFERPARAMETERIVEXTPROC glad_glGetRenderbufferParameterivEXT;
+#define glGetRenderbufferParameterivEXT glad_glGetRenderbufferParameterivEXT
+GLAD_API_CALL PFNGLGETSAMPLERPARAMETERIIVPROC glad_glGetSamplerParameterIiv;
+#define glGetSamplerParameterIiv glad_glGetSamplerParameterIiv
+GLAD_API_CALL PFNGLGETSAMPLERPARAMETERIUIVPROC glad_glGetSamplerParameterIuiv;
+#define glGetSamplerParameterIuiv glad_glGetSamplerParameterIuiv
+GLAD_API_CALL PFNGLGETSAMPLERPARAMETERFVPROC glad_glGetSamplerParameterfv;
+#define glGetSamplerParameterfv glad_glGetSamplerParameterfv
+GLAD_API_CALL PFNGLGETSAMPLERPARAMETERIVPROC glad_glGetSamplerParameteriv;
+#define glGetSamplerParameteriv glad_glGetSamplerParameteriv
+GLAD_API_CALL PFNGLGETSHADERINFOLOGPROC glad_glGetShaderInfoLog;
+#define glGetShaderInfoLog glad_glGetShaderInfoLog
+GLAD_API_CALL PFNGLGETSHADERSOURCEPROC glad_glGetShaderSource;
+#define glGetShaderSource glad_glGetShaderSource
+GLAD_API_CALL PFNGLGETSHADERIVPROC glad_glGetShaderiv;
+#define glGetShaderiv glad_glGetShaderiv
+GLAD_API_CALL PFNGLGETSTRINGPROC glad_glGetString;
+#define glGetString glad_glGetString
+GLAD_API_CALL PFNGLGETSTRINGIPROC glad_glGetStringi;
+#define glGetStringi glad_glGetStringi
+GLAD_API_CALL PFNGLGETSYNCIVPROC glad_glGetSynciv;
+#define glGetSynciv glad_glGetSynciv
+GLAD_API_CALL PFNGLGETTEXENVFVPROC glad_glGetTexEnvfv;
+#define glGetTexEnvfv glad_glGetTexEnvfv
+GLAD_API_CALL PFNGLGETTEXENVIVPROC glad_glGetTexEnviv;
+#define glGetTexEnviv glad_glGetTexEnviv
+GLAD_API_CALL PFNGLGETTEXGENDVPROC glad_glGetTexGendv;
+#define glGetTexGendv glad_glGetTexGendv
+GLAD_API_CALL PFNGLGETTEXGENFVPROC glad_glGetTexGenfv;
+#define glGetTexGenfv glad_glGetTexGenfv
+GLAD_API_CALL PFNGLGETTEXGENIVPROC glad_glGetTexGeniv;
+#define glGetTexGeniv glad_glGetTexGeniv
+GLAD_API_CALL PFNGLGETTEXIMAGEPROC glad_glGetTexImage;
+#define glGetTexImage glad_glGetTexImage
+GLAD_API_CALL PFNGLGETTEXLEVELPARAMETERFVPROC glad_glGetTexLevelParameterfv;
+#define glGetTexLevelParameterfv glad_glGetTexLevelParameterfv
+GLAD_API_CALL PFNGLGETTEXLEVELPARAMETERIVPROC glad_glGetTexLevelParameteriv;
+#define glGetTexLevelParameteriv glad_glGetTexLevelParameteriv
+GLAD_API_CALL PFNGLGETTEXPARAMETERIIVPROC glad_glGetTexParameterIiv;
+#define glGetTexParameterIiv glad_glGetTexParameterIiv
+GLAD_API_CALL PFNGLGETTEXPARAMETERIUIVPROC glad_glGetTexParameterIuiv;
+#define glGetTexParameterIuiv glad_glGetTexParameterIuiv
+GLAD_API_CALL PFNGLGETTEXPARAMETERFVPROC glad_glGetTexParameterfv;
+#define glGetTexParameterfv glad_glGetTexParameterfv
+GLAD_API_CALL PFNGLGETTEXPARAMETERIVPROC glad_glGetTexParameteriv;
+#define glGetTexParameteriv glad_glGetTexParameteriv
+GLAD_API_CALL PFNGLGETTRANSFORMFEEDBACKVARYINGPROC glad_glGetTransformFeedbackVarying;
+#define glGetTransformFeedbackVarying glad_glGetTransformFeedbackVarying
+GLAD_API_CALL PFNGLGETUNIFORMBLOCKINDEXPROC glad_glGetUniformBlockIndex;
+#define glGetUniformBlockIndex glad_glGetUniformBlockIndex
+GLAD_API_CALL PFNGLGETUNIFORMINDICESPROC glad_glGetUniformIndices;
+#define glGetUniformIndices glad_glGetUniformIndices
+GLAD_API_CALL PFNGLGETUNIFORMLOCATIONPROC glad_glGetUniformLocation;
+#define glGetUniformLocation glad_glGetUniformLocation
+GLAD_API_CALL PFNGLGETUNIFORMFVPROC glad_glGetUniformfv;
+#define glGetUniformfv glad_glGetUniformfv
+GLAD_API_CALL PFNGLGETUNIFORMIVPROC glad_glGetUniformiv;
+#define glGetUniformiv glad_glGetUniformiv
+GLAD_API_CALL PFNGLGETUNIFORMUIVPROC glad_glGetUniformuiv;
+#define glGetUniformuiv glad_glGetUniformuiv
+GLAD_API_CALL PFNGLGETVERTEXATTRIBIIVPROC glad_glGetVertexAttribIiv;
+#define glGetVertexAttribIiv glad_glGetVertexAttribIiv
+GLAD_API_CALL PFNGLGETVERTEXATTRIBIUIVPROC glad_glGetVertexAttribIuiv;
+#define glGetVertexAttribIuiv glad_glGetVertexAttribIuiv
+GLAD_API_CALL PFNGLGETVERTEXATTRIBPOINTERVPROC glad_glGetVertexAttribPointerv;
+#define glGetVertexAttribPointerv glad_glGetVertexAttribPointerv
+GLAD_API_CALL PFNGLGETVERTEXATTRIBDVPROC glad_glGetVertexAttribdv;
+#define glGetVertexAttribdv glad_glGetVertexAttribdv
+GLAD_API_CALL PFNGLGETVERTEXATTRIBFVPROC glad_glGetVertexAttribfv;
+#define glGetVertexAttribfv glad_glGetVertexAttribfv
+GLAD_API_CALL PFNGLGETVERTEXATTRIBIVPROC glad_glGetVertexAttribiv;
+#define glGetVertexAttribiv glad_glGetVertexAttribiv
+GLAD_API_CALL PFNGLHINTPROC glad_glHint;
+#define glHint glad_glHint
+GLAD_API_CALL PFNGLINDEXMASKPROC glad_glIndexMask;
+#define glIndexMask glad_glIndexMask
+GLAD_API_CALL PFNGLINDEXPOINTERPROC glad_glIndexPointer;
+#define glIndexPointer glad_glIndexPointer
+GLAD_API_CALL PFNGLINDEXDPROC glad_glIndexd;
+#define glIndexd glad_glIndexd
+GLAD_API_CALL PFNGLINDEXDVPROC glad_glIndexdv;
+#define glIndexdv glad_glIndexdv
+GLAD_API_CALL PFNGLINDEXFPROC glad_glIndexf;
+#define glIndexf glad_glIndexf
+GLAD_API_CALL PFNGLINDEXFVPROC glad_glIndexfv;
+#define glIndexfv glad_glIndexfv
+GLAD_API_CALL PFNGLINDEXIPROC glad_glIndexi;
+#define glIndexi glad_glIndexi
+GLAD_API_CALL PFNGLINDEXIVPROC glad_glIndexiv;
+#define glIndexiv glad_glIndexiv
+GLAD_API_CALL PFNGLINDEXSPROC glad_glIndexs;
+#define glIndexs glad_glIndexs
+GLAD_API_CALL PFNGLINDEXSVPROC glad_glIndexsv;
+#define glIndexsv glad_glIndexsv
+GLAD_API_CALL PFNGLINDEXUBPROC glad_glIndexub;
+#define glIndexub glad_glIndexub
+GLAD_API_CALL PFNGLINDEXUBVPROC glad_glIndexubv;
+#define glIndexubv glad_glIndexubv
+GLAD_API_CALL PFNGLINITNAMESPROC glad_glInitNames;
+#define glInitNames glad_glInitNames
+GLAD_API_CALL PFNGLINTERLEAVEDARRAYSPROC glad_glInterleavedArrays;
+#define glInterleavedArrays glad_glInterleavedArrays
+GLAD_API_CALL PFNGLISBUFFERPROC glad_glIsBuffer;
+#define glIsBuffer glad_glIsBuffer
+GLAD_API_CALL PFNGLISENABLEDPROC glad_glIsEnabled;
+#define glIsEnabled glad_glIsEnabled
+GLAD_API_CALL PFNGLISENABLEDIPROC glad_glIsEnabledi;
+#define glIsEnabledi glad_glIsEnabledi
+GLAD_API_CALL PFNGLISFRAMEBUFFERPROC glad_glIsFramebuffer;
+#define glIsFramebuffer glad_glIsFramebuffer
+GLAD_API_CALL PFNGLISFRAMEBUFFEREXTPROC glad_glIsFramebufferEXT;
+#define glIsFramebufferEXT glad_glIsFramebufferEXT
+GLAD_API_CALL PFNGLISLISTPROC glad_glIsList;
+#define glIsList glad_glIsList
+GLAD_API_CALL PFNGLISPROGRAMPROC glad_glIsProgram;
+#define glIsProgram glad_glIsProgram
+GLAD_API_CALL PFNGLISQUERYPROC glad_glIsQuery;
+#define glIsQuery glad_glIsQuery
+GLAD_API_CALL PFNGLISRENDERBUFFERPROC glad_glIsRenderbuffer;
+#define glIsRenderbuffer glad_glIsRenderbuffer
+GLAD_API_CALL PFNGLISRENDERBUFFEREXTPROC glad_glIsRenderbufferEXT;
+#define glIsRenderbufferEXT glad_glIsRenderbufferEXT
+GLAD_API_CALL PFNGLISSAMPLERPROC glad_glIsSampler;
+#define glIsSampler glad_glIsSampler
+GLAD_API_CALL PFNGLISSHADERPROC glad_glIsShader;
+#define glIsShader glad_glIsShader
+GLAD_API_CALL PFNGLISSYNCPROC glad_glIsSync;
+#define glIsSync glad_glIsSync
+GLAD_API_CALL PFNGLISTEXTUREPROC glad_glIsTexture;
+#define glIsTexture glad_glIsTexture
+GLAD_API_CALL PFNGLISVERTEXARRAYPROC glad_glIsVertexArray;
+#define glIsVertexArray glad_glIsVertexArray
+GLAD_API_CALL PFNGLLIGHTMODELFPROC glad_glLightModelf;
+#define glLightModelf glad_glLightModelf
+GLAD_API_CALL PFNGLLIGHTMODELFVPROC glad_glLightModelfv;
+#define glLightModelfv glad_glLightModelfv
+GLAD_API_CALL PFNGLLIGHTMODELIPROC glad_glLightModeli;
+#define glLightModeli glad_glLightModeli
+GLAD_API_CALL PFNGLLIGHTMODELIVPROC glad_glLightModeliv;
+#define glLightModeliv glad_glLightModeliv
+GLAD_API_CALL PFNGLLIGHTFPROC glad_glLightf;
+#define glLightf glad_glLightf
+GLAD_API_CALL PFNGLLIGHTFVPROC glad_glLightfv;
+#define glLightfv glad_glLightfv
+GLAD_API_CALL PFNGLLIGHTIPROC glad_glLighti;
+#define glLighti glad_glLighti
+GLAD_API_CALL PFNGLLIGHTIVPROC glad_glLightiv;
+#define glLightiv glad_glLightiv
+GLAD_API_CALL PFNGLLINESTIPPLEPROC glad_glLineStipple;
+#define glLineStipple glad_glLineStipple
+GLAD_API_CALL PFNGLLINEWIDTHPROC glad_glLineWidth;
+#define glLineWidth glad_glLineWidth
+GLAD_API_CALL PFNGLLINKPROGRAMPROC glad_glLinkProgram;
+#define glLinkProgram glad_glLinkProgram
+GLAD_API_CALL PFNGLLISTBASEPROC glad_glListBase;
+#define glListBase glad_glListBase
+GLAD_API_CALL PFNGLLOADIDENTITYPROC glad_glLoadIdentity;
+#define glLoadIdentity glad_glLoadIdentity
+GLAD_API_CALL PFNGLLOADMATRIXDPROC glad_glLoadMatrixd;
+#define glLoadMatrixd glad_glLoadMatrixd
+GLAD_API_CALL PFNGLLOADMATRIXFPROC glad_glLoadMatrixf;
+#define glLoadMatrixf glad_glLoadMatrixf
+GLAD_API_CALL PFNGLLOADNAMEPROC glad_glLoadName;
+#define glLoadName glad_glLoadName
+GLAD_API_CALL PFNGLLOADTRANSPOSEMATRIXDPROC glad_glLoadTransposeMatrixd;
+#define glLoadTransposeMatrixd glad_glLoadTransposeMatrixd
+GLAD_API_CALL PFNGLLOADTRANSPOSEMATRIXFPROC glad_glLoadTransposeMatrixf;
+#define glLoadTransposeMatrixf glad_glLoadTransposeMatrixf
+GLAD_API_CALL PFNGLLOGICOPPROC glad_glLogicOp;
+#define glLogicOp glad_glLogicOp
+GLAD_API_CALL PFNGLMAP1DPROC glad_glMap1d;
+#define glMap1d glad_glMap1d
+GLAD_API_CALL PFNGLMAP1FPROC glad_glMap1f;
+#define glMap1f glad_glMap1f
+GLAD_API_CALL PFNGLMAP2DPROC glad_glMap2d;
+#define glMap2d glad_glMap2d
+GLAD_API_CALL PFNGLMAP2FPROC glad_glMap2f;
+#define glMap2f glad_glMap2f
+GLAD_API_CALL PFNGLMAPBUFFERPROC glad_glMapBuffer;
+#define glMapBuffer glad_glMapBuffer
+GLAD_API_CALL PFNGLMAPBUFFERRANGEPROC glad_glMapBufferRange;
+#define glMapBufferRange glad_glMapBufferRange
+GLAD_API_CALL PFNGLMAPGRID1DPROC glad_glMapGrid1d;
+#define glMapGrid1d glad_glMapGrid1d
+GLAD_API_CALL PFNGLMAPGRID1FPROC glad_glMapGrid1f;
+#define glMapGrid1f glad_glMapGrid1f
+GLAD_API_CALL PFNGLMAPGRID2DPROC glad_glMapGrid2d;
+#define glMapGrid2d glad_glMapGrid2d
+GLAD_API_CALL PFNGLMAPGRID2FPROC glad_glMapGrid2f;
+#define glMapGrid2f glad_glMapGrid2f
+GLAD_API_CALL PFNGLMATERIALFPROC glad_glMaterialf;
+#define glMaterialf glad_glMaterialf
+GLAD_API_CALL PFNGLMATERIALFVPROC glad_glMaterialfv;
+#define glMaterialfv glad_glMaterialfv
+GLAD_API_CALL PFNGLMATERIALIPROC glad_glMateriali;
+#define glMateriali glad_glMateriali
+GLAD_API_CALL PFNGLMATERIALIVPROC glad_glMaterialiv;
+#define glMaterialiv glad_glMaterialiv
+GLAD_API_CALL PFNGLMATRIXMODEPROC glad_glMatrixMode;
+#define glMatrixMode glad_glMatrixMode
+GLAD_API_CALL PFNGLMULTMATRIXDPROC glad_glMultMatrixd;
+#define glMultMatrixd glad_glMultMatrixd
+GLAD_API_CALL PFNGLMULTMATRIXFPROC glad_glMultMatrixf;
+#define glMultMatrixf glad_glMultMatrixf
+GLAD_API_CALL PFNGLMULTTRANSPOSEMATRIXDPROC glad_glMultTransposeMatrixd;
+#define glMultTransposeMatrixd glad_glMultTransposeMatrixd
+GLAD_API_CALL PFNGLMULTTRANSPOSEMATRIXFPROC glad_glMultTransposeMatrixf;
+#define glMultTransposeMatrixf glad_glMultTransposeMatrixf
+GLAD_API_CALL PFNGLMULTIDRAWARRAYSPROC glad_glMultiDrawArrays;
+#define glMultiDrawArrays glad_glMultiDrawArrays
+GLAD_API_CALL PFNGLMULTIDRAWELEMENTSPROC glad_glMultiDrawElements;
+#define glMultiDrawElements glad_glMultiDrawElements
+GLAD_API_CALL PFNGLMULTIDRAWELEMENTSBASEVERTEXPROC glad_glMultiDrawElementsBaseVertex;
+#define glMultiDrawElementsBaseVertex glad_glMultiDrawElementsBaseVertex
+GLAD_API_CALL PFNGLMULTITEXCOORD1DPROC glad_glMultiTexCoord1d;
+#define glMultiTexCoord1d glad_glMultiTexCoord1d
+GLAD_API_CALL PFNGLMULTITEXCOORD1DVPROC glad_glMultiTexCoord1dv;
+#define glMultiTexCoord1dv glad_glMultiTexCoord1dv
+GLAD_API_CALL PFNGLMULTITEXCOORD1FPROC glad_glMultiTexCoord1f;
+#define glMultiTexCoord1f glad_glMultiTexCoord1f
+GLAD_API_CALL PFNGLMULTITEXCOORD1FVPROC glad_glMultiTexCoord1fv;
+#define glMultiTexCoord1fv glad_glMultiTexCoord1fv
+GLAD_API_CALL PFNGLMULTITEXCOORD1IPROC glad_glMultiTexCoord1i;
+#define glMultiTexCoord1i glad_glMultiTexCoord1i
+GLAD_API_CALL PFNGLMULTITEXCOORD1IVPROC glad_glMultiTexCoord1iv;
+#define glMultiTexCoord1iv glad_glMultiTexCoord1iv
+GLAD_API_CALL PFNGLMULTITEXCOORD1SPROC glad_glMultiTexCoord1s;
+#define glMultiTexCoord1s glad_glMultiTexCoord1s
+GLAD_API_CALL PFNGLMULTITEXCOORD1SVPROC glad_glMultiTexCoord1sv;
+#define glMultiTexCoord1sv glad_glMultiTexCoord1sv
+GLAD_API_CALL PFNGLMULTITEXCOORD2DPROC glad_glMultiTexCoord2d;
+#define glMultiTexCoord2d glad_glMultiTexCoord2d
+GLAD_API_CALL PFNGLMULTITEXCOORD2DVPROC glad_glMultiTexCoord2dv;
+#define glMultiTexCoord2dv glad_glMultiTexCoord2dv
+GLAD_API_CALL PFNGLMULTITEXCOORD2FPROC glad_glMultiTexCoord2f;
+#define glMultiTexCoord2f glad_glMultiTexCoord2f
+GLAD_API_CALL PFNGLMULTITEXCOORD2FVPROC glad_glMultiTexCoord2fv;
+#define glMultiTexCoord2fv glad_glMultiTexCoord2fv
+GLAD_API_CALL PFNGLMULTITEXCOORD2IPROC glad_glMultiTexCoord2i;
+#define glMultiTexCoord2i glad_glMultiTexCoord2i
+GLAD_API_CALL PFNGLMULTITEXCOORD2IVPROC glad_glMultiTexCoord2iv;
+#define glMultiTexCoord2iv glad_glMultiTexCoord2iv
+GLAD_API_CALL PFNGLMULTITEXCOORD2SPROC glad_glMultiTexCoord2s;
+#define glMultiTexCoord2s glad_glMultiTexCoord2s
+GLAD_API_CALL PFNGLMULTITEXCOORD2SVPROC glad_glMultiTexCoord2sv;
+#define glMultiTexCoord2sv glad_glMultiTexCoord2sv
+GLAD_API_CALL PFNGLMULTITEXCOORD3DPROC glad_glMultiTexCoord3d;
+#define glMultiTexCoord3d glad_glMultiTexCoord3d
+GLAD_API_CALL PFNGLMULTITEXCOORD3DVPROC glad_glMultiTexCoord3dv;
+#define glMultiTexCoord3dv glad_glMultiTexCoord3dv
+GLAD_API_CALL PFNGLMULTITEXCOORD3FPROC glad_glMultiTexCoord3f;
+#define glMultiTexCoord3f glad_glMultiTexCoord3f
+GLAD_API_CALL PFNGLMULTITEXCOORD3FVPROC glad_glMultiTexCoord3fv;
+#define glMultiTexCoord3fv glad_glMultiTexCoord3fv
+GLAD_API_CALL PFNGLMULTITEXCOORD3IPROC glad_glMultiTexCoord3i;
+#define glMultiTexCoord3i glad_glMultiTexCoord3i
+GLAD_API_CALL PFNGLMULTITEXCOORD3IVPROC glad_glMultiTexCoord3iv;
+#define glMultiTexCoord3iv glad_glMultiTexCoord3iv
+GLAD_API_CALL PFNGLMULTITEXCOORD3SPROC glad_glMultiTexCoord3s;
+#define glMultiTexCoord3s glad_glMultiTexCoord3s
+GLAD_API_CALL PFNGLMULTITEXCOORD3SVPROC glad_glMultiTexCoord3sv;
+#define glMultiTexCoord3sv glad_glMultiTexCoord3sv
+GLAD_API_CALL PFNGLMULTITEXCOORD4DPROC glad_glMultiTexCoord4d;
+#define glMultiTexCoord4d glad_glMultiTexCoord4d
+GLAD_API_CALL PFNGLMULTITEXCOORD4DVPROC glad_glMultiTexCoord4dv;
+#define glMultiTexCoord4dv glad_glMultiTexCoord4dv
+GLAD_API_CALL PFNGLMULTITEXCOORD4FPROC glad_glMultiTexCoord4f;
+#define glMultiTexCoord4f glad_glMultiTexCoord4f
+GLAD_API_CALL PFNGLMULTITEXCOORD4FVPROC glad_glMultiTexCoord4fv;
+#define glMultiTexCoord4fv glad_glMultiTexCoord4fv
+GLAD_API_CALL PFNGLMULTITEXCOORD4IPROC glad_glMultiTexCoord4i;
+#define glMultiTexCoord4i glad_glMultiTexCoord4i
+GLAD_API_CALL PFNGLMULTITEXCOORD4IVPROC glad_glMultiTexCoord4iv;
+#define glMultiTexCoord4iv glad_glMultiTexCoord4iv
+GLAD_API_CALL PFNGLMULTITEXCOORD4SPROC glad_glMultiTexCoord4s;
+#define glMultiTexCoord4s glad_glMultiTexCoord4s
+GLAD_API_CALL PFNGLMULTITEXCOORD4SVPROC glad_glMultiTexCoord4sv;
+#define glMultiTexCoord4sv glad_glMultiTexCoord4sv
+GLAD_API_CALL PFNGLMULTITEXCOORDP1UIPROC glad_glMultiTexCoordP1ui;
+#define glMultiTexCoordP1ui glad_glMultiTexCoordP1ui
+GLAD_API_CALL PFNGLMULTITEXCOORDP1UIVPROC glad_glMultiTexCoordP1uiv;
+#define glMultiTexCoordP1uiv glad_glMultiTexCoordP1uiv
+GLAD_API_CALL PFNGLMULTITEXCOORDP2UIPROC glad_glMultiTexCoordP2ui;
+#define glMultiTexCoordP2ui glad_glMultiTexCoordP2ui
+GLAD_API_CALL PFNGLMULTITEXCOORDP2UIVPROC glad_glMultiTexCoordP2uiv;
+#define glMultiTexCoordP2uiv glad_glMultiTexCoordP2uiv
+GLAD_API_CALL PFNGLMULTITEXCOORDP3UIPROC glad_glMultiTexCoordP3ui;
+#define glMultiTexCoordP3ui glad_glMultiTexCoordP3ui
+GLAD_API_CALL PFNGLMULTITEXCOORDP3UIVPROC glad_glMultiTexCoordP3uiv;
+#define glMultiTexCoordP3uiv glad_glMultiTexCoordP3uiv
+GLAD_API_CALL PFNGLMULTITEXCOORDP4UIPROC glad_glMultiTexCoordP4ui;
+#define glMultiTexCoordP4ui glad_glMultiTexCoordP4ui
+GLAD_API_CALL PFNGLMULTITEXCOORDP4UIVPROC glad_glMultiTexCoordP4uiv;
+#define glMultiTexCoordP4uiv glad_glMultiTexCoordP4uiv
+GLAD_API_CALL PFNGLNEWLISTPROC glad_glNewList;
+#define glNewList glad_glNewList
+GLAD_API_CALL PFNGLNORMAL3BPROC glad_glNormal3b;
+#define glNormal3b glad_glNormal3b
+GLAD_API_CALL PFNGLNORMAL3BVPROC glad_glNormal3bv;
+#define glNormal3bv glad_glNormal3bv
+GLAD_API_CALL PFNGLNORMAL3DPROC glad_glNormal3d;
+#define glNormal3d glad_glNormal3d
+GLAD_API_CALL PFNGLNORMAL3DVPROC glad_glNormal3dv;
+#define glNormal3dv glad_glNormal3dv
+GLAD_API_CALL PFNGLNORMAL3FPROC glad_glNormal3f;
+#define glNormal3f glad_glNormal3f
+GLAD_API_CALL PFNGLNORMAL3FVPROC glad_glNormal3fv;
+#define glNormal3fv glad_glNormal3fv
+GLAD_API_CALL PFNGLNORMAL3IPROC glad_glNormal3i;
+#define glNormal3i glad_glNormal3i
+GLAD_API_CALL PFNGLNORMAL3IVPROC glad_glNormal3iv;
+#define glNormal3iv glad_glNormal3iv
+GLAD_API_CALL PFNGLNORMAL3SPROC glad_glNormal3s;
+#define glNormal3s glad_glNormal3s
+GLAD_API_CALL PFNGLNORMAL3SVPROC glad_glNormal3sv;
+#define glNormal3sv glad_glNormal3sv
+GLAD_API_CALL PFNGLNORMALP3UIPROC glad_glNormalP3ui;
+#define glNormalP3ui glad_glNormalP3ui
+GLAD_API_CALL PFNGLNORMALP3UIVPROC glad_glNormalP3uiv;
+#define glNormalP3uiv glad_glNormalP3uiv
+GLAD_API_CALL PFNGLNORMALPOINTERPROC glad_glNormalPointer;
+#define glNormalPointer glad_glNormalPointer
+GLAD_API_CALL PFNGLORTHOPROC glad_glOrtho;
+#define glOrtho glad_glOrtho
+GLAD_API_CALL PFNGLPASSTHROUGHPROC glad_glPassThrough;
+#define glPassThrough glad_glPassThrough
+GLAD_API_CALL PFNGLPIXELMAPFVPROC glad_glPixelMapfv;
+#define glPixelMapfv glad_glPixelMapfv
+GLAD_API_CALL PFNGLPIXELMAPUIVPROC glad_glPixelMapuiv;
+#define glPixelMapuiv glad_glPixelMapuiv
+GLAD_API_CALL PFNGLPIXELMAPUSVPROC glad_glPixelMapusv;
+#define glPixelMapusv glad_glPixelMapusv
+GLAD_API_CALL PFNGLPIXELSTOREFPROC glad_glPixelStoref;
+#define glPixelStoref glad_glPixelStoref
+GLAD_API_CALL PFNGLPIXELSTOREIPROC glad_glPixelStorei;
+#define glPixelStorei glad_glPixelStorei
+GLAD_API_CALL PFNGLPIXELTRANSFERFPROC glad_glPixelTransferf;
+#define glPixelTransferf glad_glPixelTransferf
+GLAD_API_CALL PFNGLPIXELTRANSFERIPROC glad_glPixelTransferi;
+#define glPixelTransferi glad_glPixelTransferi
+GLAD_API_CALL PFNGLPIXELZOOMPROC glad_glPixelZoom;
+#define glPixelZoom glad_glPixelZoom
+GLAD_API_CALL PFNGLPOINTPARAMETERFPROC glad_glPointParameterf;
+#define glPointParameterf glad_glPointParameterf
+GLAD_API_CALL PFNGLPOINTPARAMETERFVPROC glad_glPointParameterfv;
+#define glPointParameterfv glad_glPointParameterfv
+GLAD_API_CALL PFNGLPOINTPARAMETERIPROC glad_glPointParameteri;
+#define glPointParameteri glad_glPointParameteri
+GLAD_API_CALL PFNGLPOINTPARAMETERIVPROC glad_glPointParameteriv;
+#define glPointParameteriv glad_glPointParameteriv
+GLAD_API_CALL PFNGLPOINTSIZEPROC glad_glPointSize;
+#define glPointSize glad_glPointSize
+GLAD_API_CALL PFNGLPOLYGONMODEPROC glad_glPolygonMode;
+#define glPolygonMode glad_glPolygonMode
+GLAD_API_CALL PFNGLPOLYGONOFFSETPROC glad_glPolygonOffset;
+#define glPolygonOffset glad_glPolygonOffset
+GLAD_API_CALL PFNGLPOLYGONSTIPPLEPROC glad_glPolygonStipple;
+#define glPolygonStipple glad_glPolygonStipple
+GLAD_API_CALL PFNGLPOPATTRIBPROC glad_glPopAttrib;
+#define glPopAttrib glad_glPopAttrib
+GLAD_API_CALL PFNGLPOPCLIENTATTRIBPROC glad_glPopClientAttrib;
+#define glPopClientAttrib glad_glPopClientAttrib
+GLAD_API_CALL PFNGLPOPMATRIXPROC glad_glPopMatrix;
+#define glPopMatrix glad_glPopMatrix
+GLAD_API_CALL PFNGLPOPNAMEPROC glad_glPopName;
+#define glPopName glad_glPopName
+GLAD_API_CALL PFNGLPRIMITIVERESTARTINDEXPROC glad_glPrimitiveRestartIndex;
+#define glPrimitiveRestartIndex glad_glPrimitiveRestartIndex
+GLAD_API_CALL PFNGLPRIORITIZETEXTURESPROC glad_glPrioritizeTextures;
+#define glPrioritizeTextures glad_glPrioritizeTextures
+GLAD_API_CALL PFNGLPROVOKINGVERTEXPROC glad_glProvokingVertex;
+#define glProvokingVertex glad_glProvokingVertex
+GLAD_API_CALL PFNGLPUSHATTRIBPROC glad_glPushAttrib;
+#define glPushAttrib glad_glPushAttrib
+GLAD_API_CALL PFNGLPUSHCLIENTATTRIBPROC glad_glPushClientAttrib;
+#define glPushClientAttrib glad_glPushClientAttrib
+GLAD_API_CALL PFNGLPUSHMATRIXPROC glad_glPushMatrix;
+#define glPushMatrix glad_glPushMatrix
+GLAD_API_CALL PFNGLPUSHNAMEPROC glad_glPushName;
+#define glPushName glad_glPushName
+GLAD_API_CALL PFNGLQUERYCOUNTERPROC glad_glQueryCounter;
+#define glQueryCounter glad_glQueryCounter
+GLAD_API_CALL PFNGLRASTERPOS2DPROC glad_glRasterPos2d;
+#define glRasterPos2d glad_glRasterPos2d
+GLAD_API_CALL PFNGLRASTERPOS2DVPROC glad_glRasterPos2dv;
+#define glRasterPos2dv glad_glRasterPos2dv
+GLAD_API_CALL PFNGLRASTERPOS2FPROC glad_glRasterPos2f;
+#define glRasterPos2f glad_glRasterPos2f
+GLAD_API_CALL PFNGLRASTERPOS2FVPROC glad_glRasterPos2fv;
+#define glRasterPos2fv glad_glRasterPos2fv
+GLAD_API_CALL PFNGLRASTERPOS2IPROC glad_glRasterPos2i;
+#define glRasterPos2i glad_glRasterPos2i
+GLAD_API_CALL PFNGLRASTERPOS2IVPROC glad_glRasterPos2iv;
+#define glRasterPos2iv glad_glRasterPos2iv
+GLAD_API_CALL PFNGLRASTERPOS2SPROC glad_glRasterPos2s;
+#define glRasterPos2s glad_glRasterPos2s
+GLAD_API_CALL PFNGLRASTERPOS2SVPROC glad_glRasterPos2sv;
+#define glRasterPos2sv glad_glRasterPos2sv
+GLAD_API_CALL PFNGLRASTERPOS3DPROC glad_glRasterPos3d;
+#define glRasterPos3d glad_glRasterPos3d
+GLAD_API_CALL PFNGLRASTERPOS3DVPROC glad_glRasterPos3dv;
+#define glRasterPos3dv glad_glRasterPos3dv
+GLAD_API_CALL PFNGLRASTERPOS3FPROC glad_glRasterPos3f;
+#define glRasterPos3f glad_glRasterPos3f
+GLAD_API_CALL PFNGLRASTERPOS3FVPROC glad_glRasterPos3fv;
+#define glRasterPos3fv glad_glRasterPos3fv
+GLAD_API_CALL PFNGLRASTERPOS3IPROC glad_glRasterPos3i;
+#define glRasterPos3i glad_glRasterPos3i
+GLAD_API_CALL PFNGLRASTERPOS3IVPROC glad_glRasterPos3iv;
+#define glRasterPos3iv glad_glRasterPos3iv
+GLAD_API_CALL PFNGLRASTERPOS3SPROC glad_glRasterPos3s;
+#define glRasterPos3s glad_glRasterPos3s
+GLAD_API_CALL PFNGLRASTERPOS3SVPROC glad_glRasterPos3sv;
+#define glRasterPos3sv glad_glRasterPos3sv
+GLAD_API_CALL PFNGLRASTERPOS4DPROC glad_glRasterPos4d;
+#define glRasterPos4d glad_glRasterPos4d
+GLAD_API_CALL PFNGLRASTERPOS4DVPROC glad_glRasterPos4dv;
+#define glRasterPos4dv glad_glRasterPos4dv
+GLAD_API_CALL PFNGLRASTERPOS4FPROC glad_glRasterPos4f;
+#define glRasterPos4f glad_glRasterPos4f
+GLAD_API_CALL PFNGLRASTERPOS4FVPROC glad_glRasterPos4fv;
+#define glRasterPos4fv glad_glRasterPos4fv
+GLAD_API_CALL PFNGLRASTERPOS4IPROC glad_glRasterPos4i;
+#define glRasterPos4i glad_glRasterPos4i
+GLAD_API_CALL PFNGLRASTERPOS4IVPROC glad_glRasterPos4iv;
+#define glRasterPos4iv glad_glRasterPos4iv
+GLAD_API_CALL PFNGLRASTERPOS4SPROC glad_glRasterPos4s;
+#define glRasterPos4s glad_glRasterPos4s
+GLAD_API_CALL PFNGLRASTERPOS4SVPROC glad_glRasterPos4sv;
+#define glRasterPos4sv glad_glRasterPos4sv
+GLAD_API_CALL PFNGLREADBUFFERPROC glad_glReadBuffer;
+#define glReadBuffer glad_glReadBuffer
+GLAD_API_CALL PFNGLREADPIXELSPROC glad_glReadPixels;
+#define glReadPixels glad_glReadPixels
+GLAD_API_CALL PFNGLRECTDPROC glad_glRectd;
+#define glRectd glad_glRectd
+GLAD_API_CALL PFNGLRECTDVPROC glad_glRectdv;
+#define glRectdv glad_glRectdv
+GLAD_API_CALL PFNGLRECTFPROC glad_glRectf;
+#define glRectf glad_glRectf
+GLAD_API_CALL PFNGLRECTFVPROC glad_glRectfv;
+#define glRectfv glad_glRectfv
+GLAD_API_CALL PFNGLRECTIPROC glad_glRecti;
+#define glRecti glad_glRecti
+GLAD_API_CALL PFNGLRECTIVPROC glad_glRectiv;
+#define glRectiv glad_glRectiv
+GLAD_API_CALL PFNGLRECTSPROC glad_glRects;
+#define glRects glad_glRects
+GLAD_API_CALL PFNGLRECTSVPROC glad_glRectsv;
+#define glRectsv glad_glRectsv
+GLAD_API_CALL PFNGLRENDERMODEPROC glad_glRenderMode;
+#define glRenderMode glad_glRenderMode
+GLAD_API_CALL PFNGLRENDERBUFFERSTORAGEPROC glad_glRenderbufferStorage;
+#define glRenderbufferStorage glad_glRenderbufferStorage
+GLAD_API_CALL PFNGLRENDERBUFFERSTORAGEEXTPROC glad_glRenderbufferStorageEXT;
+#define glRenderbufferStorageEXT glad_glRenderbufferStorageEXT
+GLAD_API_CALL PFNGLRENDERBUFFERSTORAGEMULTISAMPLEPROC glad_glRenderbufferStorageMultisample;
+#define glRenderbufferStorageMultisample glad_glRenderbufferStorageMultisample
+GLAD_API_CALL PFNGLRENDERBUFFERSTORAGEMULTISAMPLEEXTPROC glad_glRenderbufferStorageMultisampleEXT;
+#define glRenderbufferStorageMultisampleEXT glad_glRenderbufferStorageMultisampleEXT
+GLAD_API_CALL PFNGLROTATEDPROC glad_glRotated;
+#define glRotated glad_glRotated
+GLAD_API_CALL PFNGLROTATEFPROC glad_glRotatef;
+#define glRotatef glad_glRotatef
+GLAD_API_CALL PFNGLSAMPLECOVERAGEPROC glad_glSampleCoverage;
+#define glSampleCoverage glad_glSampleCoverage
+GLAD_API_CALL PFNGLSAMPLEMASKIPROC glad_glSampleMaski;
+#define glSampleMaski glad_glSampleMaski
+GLAD_API_CALL PFNGLSAMPLERPARAMETERIIVPROC glad_glSamplerParameterIiv;
+#define glSamplerParameterIiv glad_glSamplerParameterIiv
+GLAD_API_CALL PFNGLSAMPLERPARAMETERIUIVPROC glad_glSamplerParameterIuiv;
+#define glSamplerParameterIuiv glad_glSamplerParameterIuiv
+GLAD_API_CALL PFNGLSAMPLERPARAMETERFPROC glad_glSamplerParameterf;
+#define glSamplerParameterf glad_glSamplerParameterf
+GLAD_API_CALL PFNGLSAMPLERPARAMETERFVPROC glad_glSamplerParameterfv;
+#define glSamplerParameterfv glad_glSamplerParameterfv
+GLAD_API_CALL PFNGLSAMPLERPARAMETERIPROC glad_glSamplerParameteri;
+#define glSamplerParameteri glad_glSamplerParameteri
+GLAD_API_CALL PFNGLSAMPLERPARAMETERIVPROC glad_glSamplerParameteriv;
+#define glSamplerParameteriv glad_glSamplerParameteriv
+GLAD_API_CALL PFNGLSCALEDPROC glad_glScaled;
+#define glScaled glad_glScaled
+GLAD_API_CALL PFNGLSCALEFPROC glad_glScalef;
+#define glScalef glad_glScalef
+GLAD_API_CALL PFNGLSCISSORPROC glad_glScissor;
+#define glScissor glad_glScissor
+GLAD_API_CALL PFNGLSECONDARYCOLOR3BPROC glad_glSecondaryColor3b;
+#define glSecondaryColor3b glad_glSecondaryColor3b
+GLAD_API_CALL PFNGLSECONDARYCOLOR3BVPROC glad_glSecondaryColor3bv;
+#define glSecondaryColor3bv glad_glSecondaryColor3bv
+GLAD_API_CALL PFNGLSECONDARYCOLOR3DPROC glad_glSecondaryColor3d;
+#define glSecondaryColor3d glad_glSecondaryColor3d
+GLAD_API_CALL PFNGLSECONDARYCOLOR3DVPROC glad_glSecondaryColor3dv;
+#define glSecondaryColor3dv glad_glSecondaryColor3dv
+GLAD_API_CALL PFNGLSECONDARYCOLOR3FPROC glad_glSecondaryColor3f;
+#define glSecondaryColor3f glad_glSecondaryColor3f
+GLAD_API_CALL PFNGLSECONDARYCOLOR3FVPROC glad_glSecondaryColor3fv;
+#define glSecondaryColor3fv glad_glSecondaryColor3fv
+GLAD_API_CALL PFNGLSECONDARYCOLOR3IPROC glad_glSecondaryColor3i;
+#define glSecondaryColor3i glad_glSecondaryColor3i
+GLAD_API_CALL PFNGLSECONDARYCOLOR3IVPROC glad_glSecondaryColor3iv;
+#define glSecondaryColor3iv glad_glSecondaryColor3iv
+GLAD_API_CALL PFNGLSECONDARYCOLOR3SPROC glad_glSecondaryColor3s;
+#define glSecondaryColor3s glad_glSecondaryColor3s
+GLAD_API_CALL PFNGLSECONDARYCOLOR3SVPROC glad_glSecondaryColor3sv;
+#define glSecondaryColor3sv glad_glSecondaryColor3sv
+GLAD_API_CALL PFNGLSECONDARYCOLOR3UBPROC glad_glSecondaryColor3ub;
+#define glSecondaryColor3ub glad_glSecondaryColor3ub
+GLAD_API_CALL PFNGLSECONDARYCOLOR3UBVPROC glad_glSecondaryColor3ubv;
+#define glSecondaryColor3ubv glad_glSecondaryColor3ubv
+GLAD_API_CALL PFNGLSECONDARYCOLOR3UIPROC glad_glSecondaryColor3ui;
+#define glSecondaryColor3ui glad_glSecondaryColor3ui
+GLAD_API_CALL PFNGLSECONDARYCOLOR3UIVPROC glad_glSecondaryColor3uiv;
+#define glSecondaryColor3uiv glad_glSecondaryColor3uiv
+GLAD_API_CALL PFNGLSECONDARYCOLOR3USPROC glad_glSecondaryColor3us;
+#define glSecondaryColor3us glad_glSecondaryColor3us
+GLAD_API_CALL PFNGLSECONDARYCOLOR3USVPROC glad_glSecondaryColor3usv;
+#define glSecondaryColor3usv glad_glSecondaryColor3usv
+GLAD_API_CALL PFNGLSECONDARYCOLORP3UIPROC glad_glSecondaryColorP3ui;
+#define glSecondaryColorP3ui glad_glSecondaryColorP3ui
+GLAD_API_CALL PFNGLSECONDARYCOLORP3UIVPROC glad_glSecondaryColorP3uiv;
+#define glSecondaryColorP3uiv glad_glSecondaryColorP3uiv
+GLAD_API_CALL PFNGLSECONDARYCOLORPOINTERPROC glad_glSecondaryColorPointer;
+#define glSecondaryColorPointer glad_glSecondaryColorPointer
+GLAD_API_CALL PFNGLSELECTBUFFERPROC glad_glSelectBuffer;
+#define glSelectBuffer glad_glSelectBuffer
+GLAD_API_CALL PFNGLSHADEMODELPROC glad_glShadeModel;
+#define glShadeModel glad_glShadeModel
+GLAD_API_CALL PFNGLSHADERSOURCEPROC glad_glShaderSource;
+#define glShaderSource glad_glShaderSource
+GLAD_API_CALL PFNGLSTENCILFUNCPROC glad_glStencilFunc;
+#define glStencilFunc glad_glStencilFunc
+GLAD_API_CALL PFNGLSTENCILFUNCSEPARATEPROC glad_glStencilFuncSeparate;
+#define glStencilFuncSeparate glad_glStencilFuncSeparate
+GLAD_API_CALL PFNGLSTENCILMASKPROC glad_glStencilMask;
+#define glStencilMask glad_glStencilMask
+GLAD_API_CALL PFNGLSTENCILMASKSEPARATEPROC glad_glStencilMaskSeparate;
+#define glStencilMaskSeparate glad_glStencilMaskSeparate
+GLAD_API_CALL PFNGLSTENCILOPPROC glad_glStencilOp;
+#define glStencilOp glad_glStencilOp
+GLAD_API_CALL PFNGLSTENCILOPSEPARATEPROC glad_glStencilOpSeparate;
+#define glStencilOpSeparate glad_glStencilOpSeparate
+GLAD_API_CALL PFNGLTEXBUFFERPROC glad_glTexBuffer;
+#define glTexBuffer glad_glTexBuffer
+GLAD_API_CALL PFNGLTEXCOORD1DPROC glad_glTexCoord1d;
+#define glTexCoord1d glad_glTexCoord1d
+GLAD_API_CALL PFNGLTEXCOORD1DVPROC glad_glTexCoord1dv;
+#define glTexCoord1dv glad_glTexCoord1dv
+GLAD_API_CALL PFNGLTEXCOORD1FPROC glad_glTexCoord1f;
+#define glTexCoord1f glad_glTexCoord1f
+GLAD_API_CALL PFNGLTEXCOORD1FVPROC glad_glTexCoord1fv;
+#define glTexCoord1fv glad_glTexCoord1fv
+GLAD_API_CALL PFNGLTEXCOORD1IPROC glad_glTexCoord1i;
+#define glTexCoord1i glad_glTexCoord1i
+GLAD_API_CALL PFNGLTEXCOORD1IVPROC glad_glTexCoord1iv;
+#define glTexCoord1iv glad_glTexCoord1iv
+GLAD_API_CALL PFNGLTEXCOORD1SPROC glad_glTexCoord1s;
+#define glTexCoord1s glad_glTexCoord1s
+GLAD_API_CALL PFNGLTEXCOORD1SVPROC glad_glTexCoord1sv;
+#define glTexCoord1sv glad_glTexCoord1sv
+GLAD_API_CALL PFNGLTEXCOORD2DPROC glad_glTexCoord2d;
+#define glTexCoord2d glad_glTexCoord2d
+GLAD_API_CALL PFNGLTEXCOORD2DVPROC glad_glTexCoord2dv;
+#define glTexCoord2dv glad_glTexCoord2dv
+GLAD_API_CALL PFNGLTEXCOORD2FPROC glad_glTexCoord2f;
+#define glTexCoord2f glad_glTexCoord2f
+GLAD_API_CALL PFNGLTEXCOORD2FVPROC glad_glTexCoord2fv;
+#define glTexCoord2fv glad_glTexCoord2fv
+GLAD_API_CALL PFNGLTEXCOORD2IPROC glad_glTexCoord2i;
+#define glTexCoord2i glad_glTexCoord2i
+GLAD_API_CALL PFNGLTEXCOORD2IVPROC glad_glTexCoord2iv;
+#define glTexCoord2iv glad_glTexCoord2iv
+GLAD_API_CALL PFNGLTEXCOORD2SPROC glad_glTexCoord2s;
+#define glTexCoord2s glad_glTexCoord2s
+GLAD_API_CALL PFNGLTEXCOORD2SVPROC glad_glTexCoord2sv;
+#define glTexCoord2sv glad_glTexCoord2sv
+GLAD_API_CALL PFNGLTEXCOORD3DPROC glad_glTexCoord3d;
+#define glTexCoord3d glad_glTexCoord3d
+GLAD_API_CALL PFNGLTEXCOORD3DVPROC glad_glTexCoord3dv;
+#define glTexCoord3dv glad_glTexCoord3dv
+GLAD_API_CALL PFNGLTEXCOORD3FPROC glad_glTexCoord3f;
+#define glTexCoord3f glad_glTexCoord3f
+GLAD_API_CALL PFNGLTEXCOORD3FVPROC glad_glTexCoord3fv;
+#define glTexCoord3fv glad_glTexCoord3fv
+GLAD_API_CALL PFNGLTEXCOORD3IPROC glad_glTexCoord3i;
+#define glTexCoord3i glad_glTexCoord3i
+GLAD_API_CALL PFNGLTEXCOORD3IVPROC glad_glTexCoord3iv;
+#define glTexCoord3iv glad_glTexCoord3iv
+GLAD_API_CALL PFNGLTEXCOORD3SPROC glad_glTexCoord3s;
+#define glTexCoord3s glad_glTexCoord3s
+GLAD_API_CALL PFNGLTEXCOORD3SVPROC glad_glTexCoord3sv;
+#define glTexCoord3sv glad_glTexCoord3sv
+GLAD_API_CALL PFNGLTEXCOORD4DPROC glad_glTexCoord4d;
+#define glTexCoord4d glad_glTexCoord4d
+GLAD_API_CALL PFNGLTEXCOORD4DVPROC glad_glTexCoord4dv;
+#define glTexCoord4dv glad_glTexCoord4dv
+GLAD_API_CALL PFNGLTEXCOORD4FPROC glad_glTexCoord4f;
+#define glTexCoord4f glad_glTexCoord4f
+GLAD_API_CALL PFNGLTEXCOORD4FVPROC glad_glTexCoord4fv;
+#define glTexCoord4fv glad_glTexCoord4fv
+GLAD_API_CALL PFNGLTEXCOORD4IPROC glad_glTexCoord4i;
+#define glTexCoord4i glad_glTexCoord4i
+GLAD_API_CALL PFNGLTEXCOORD4IVPROC glad_glTexCoord4iv;
+#define glTexCoord4iv glad_glTexCoord4iv
+GLAD_API_CALL PFNGLTEXCOORD4SPROC glad_glTexCoord4s;
+#define glTexCoord4s glad_glTexCoord4s
+GLAD_API_CALL PFNGLTEXCOORD4SVPROC glad_glTexCoord4sv;
+#define glTexCoord4sv glad_glTexCoord4sv
+GLAD_API_CALL PFNGLTEXCOORDP1UIPROC glad_glTexCoordP1ui;
+#define glTexCoordP1ui glad_glTexCoordP1ui
+GLAD_API_CALL PFNGLTEXCOORDP1UIVPROC glad_glTexCoordP1uiv;
+#define glTexCoordP1uiv glad_glTexCoordP1uiv
+GLAD_API_CALL PFNGLTEXCOORDP2UIPROC glad_glTexCoordP2ui;
+#define glTexCoordP2ui glad_glTexCoordP2ui
+GLAD_API_CALL PFNGLTEXCOORDP2UIVPROC glad_glTexCoordP2uiv;
+#define glTexCoordP2uiv glad_glTexCoordP2uiv
+GLAD_API_CALL PFNGLTEXCOORDP3UIPROC glad_glTexCoordP3ui;
+#define glTexCoordP3ui glad_glTexCoordP3ui
+GLAD_API_CALL PFNGLTEXCOORDP3UIVPROC glad_glTexCoordP3uiv;
+#define glTexCoordP3uiv glad_glTexCoordP3uiv
+GLAD_API_CALL PFNGLTEXCOORDP4UIPROC glad_glTexCoordP4ui;
+#define glTexCoordP4ui glad_glTexCoordP4ui
+GLAD_API_CALL PFNGLTEXCOORDP4UIVPROC glad_glTexCoordP4uiv;
+#define glTexCoordP4uiv glad_glTexCoordP4uiv
+GLAD_API_CALL PFNGLTEXCOORDPOINTERPROC glad_glTexCoordPointer;
+#define glTexCoordPointer glad_glTexCoordPointer
+GLAD_API_CALL PFNGLTEXENVFPROC glad_glTexEnvf;
+#define glTexEnvf glad_glTexEnvf
+GLAD_API_CALL PFNGLTEXENVFVPROC glad_glTexEnvfv;
+#define glTexEnvfv glad_glTexEnvfv
+GLAD_API_CALL PFNGLTEXENVIPROC glad_glTexEnvi;
+#define glTexEnvi glad_glTexEnvi
+GLAD_API_CALL PFNGLTEXENVIVPROC glad_glTexEnviv;
+#define glTexEnviv glad_glTexEnviv
+GLAD_API_CALL PFNGLTEXGENDPROC glad_glTexGend;
+#define glTexGend glad_glTexGend
+GLAD_API_CALL PFNGLTEXGENDVPROC glad_glTexGendv;
+#define glTexGendv glad_glTexGendv
+GLAD_API_CALL PFNGLTEXGENFPROC glad_glTexGenf;
+#define glTexGenf glad_glTexGenf
+GLAD_API_CALL PFNGLTEXGENFVPROC glad_glTexGenfv;
+#define glTexGenfv glad_glTexGenfv
+GLAD_API_CALL PFNGLTEXGENIPROC glad_glTexGeni;
+#define glTexGeni glad_glTexGeni
+GLAD_API_CALL PFNGLTEXGENIVPROC glad_glTexGeniv;
+#define glTexGeniv glad_glTexGeniv
+GLAD_API_CALL PFNGLTEXIMAGE1DPROC glad_glTexImage1D;
+#define glTexImage1D glad_glTexImage1D
+GLAD_API_CALL PFNGLTEXIMAGE2DPROC glad_glTexImage2D;
+#define glTexImage2D glad_glTexImage2D
+GLAD_API_CALL PFNGLTEXIMAGE2DMULTISAMPLEPROC glad_glTexImage2DMultisample;
+#define glTexImage2DMultisample glad_glTexImage2DMultisample
+GLAD_API_CALL PFNGLTEXIMAGE3DPROC glad_glTexImage3D;
+#define glTexImage3D glad_glTexImage3D
+GLAD_API_CALL PFNGLTEXIMAGE3DMULTISAMPLEPROC glad_glTexImage3DMultisample;
+#define glTexImage3DMultisample glad_glTexImage3DMultisample
+GLAD_API_CALL PFNGLTEXPARAMETERIIVPROC glad_glTexParameterIiv;
+#define glTexParameterIiv glad_glTexParameterIiv
+GLAD_API_CALL PFNGLTEXPARAMETERIUIVPROC glad_glTexParameterIuiv;
+#define glTexParameterIuiv glad_glTexParameterIuiv
+GLAD_API_CALL PFNGLTEXPARAMETERFPROC glad_glTexParameterf;
+#define glTexParameterf glad_glTexParameterf
+GLAD_API_CALL PFNGLTEXPARAMETERFVPROC glad_glTexParameterfv;
+#define glTexParameterfv glad_glTexParameterfv
+GLAD_API_CALL PFNGLTEXPARAMETERIPROC glad_glTexParameteri;
+#define glTexParameteri glad_glTexParameteri
+GLAD_API_CALL PFNGLTEXPARAMETERIVPROC glad_glTexParameteriv;
+#define glTexParameteriv glad_glTexParameteriv
+GLAD_API_CALL PFNGLTEXSUBIMAGE1DPROC glad_glTexSubImage1D;
+#define glTexSubImage1D glad_glTexSubImage1D
+GLAD_API_CALL PFNGLTEXSUBIMAGE2DPROC glad_glTexSubImage2D;
+#define glTexSubImage2D glad_glTexSubImage2D
+GLAD_API_CALL PFNGLTEXSUBIMAGE3DPROC glad_glTexSubImage3D;
+#define glTexSubImage3D glad_glTexSubImage3D
+GLAD_API_CALL PFNGLTRANSFORMFEEDBACKVARYINGSPROC glad_glTransformFeedbackVaryings;
+#define glTransformFeedbackVaryings glad_glTransformFeedbackVaryings
+GLAD_API_CALL PFNGLTRANSLATEDPROC glad_glTranslated;
+#define glTranslated glad_glTranslated
+GLAD_API_CALL PFNGLTRANSLATEFPROC glad_glTranslatef;
+#define glTranslatef glad_glTranslatef
+GLAD_API_CALL PFNGLUNIFORM1FPROC glad_glUniform1f;
+#define glUniform1f glad_glUniform1f
+GLAD_API_CALL PFNGLUNIFORM1FVPROC glad_glUniform1fv;
+#define glUniform1fv glad_glUniform1fv
+GLAD_API_CALL PFNGLUNIFORM1IPROC glad_glUniform1i;
+#define glUniform1i glad_glUniform1i
+GLAD_API_CALL PFNGLUNIFORM1IVPROC glad_glUniform1iv;
+#define glUniform1iv glad_glUniform1iv
+GLAD_API_CALL PFNGLUNIFORM1UIPROC glad_glUniform1ui;
+#define glUniform1ui glad_glUniform1ui
+GLAD_API_CALL PFNGLUNIFORM1UIVPROC glad_glUniform1uiv;
+#define glUniform1uiv glad_glUniform1uiv
+GLAD_API_CALL PFNGLUNIFORM2FPROC glad_glUniform2f;
+#define glUniform2f glad_glUniform2f
+GLAD_API_CALL PFNGLUNIFORM2FVPROC glad_glUniform2fv;
+#define glUniform2fv glad_glUniform2fv
+GLAD_API_CALL PFNGLUNIFORM2IPROC glad_glUniform2i;
+#define glUniform2i glad_glUniform2i
+GLAD_API_CALL PFNGLUNIFORM2IVPROC glad_glUniform2iv;
+#define glUniform2iv glad_glUniform2iv
+GLAD_API_CALL PFNGLUNIFORM2UIPROC glad_glUniform2ui;
+#define glUniform2ui glad_glUniform2ui
+GLAD_API_CALL PFNGLUNIFORM2UIVPROC glad_glUniform2uiv;
+#define glUniform2uiv glad_glUniform2uiv
+GLAD_API_CALL PFNGLUNIFORM3FPROC glad_glUniform3f;
+#define glUniform3f glad_glUniform3f
+GLAD_API_CALL PFNGLUNIFORM3FVPROC glad_glUniform3fv;
+#define glUniform3fv glad_glUniform3fv
+GLAD_API_CALL PFNGLUNIFORM3IPROC glad_glUniform3i;
+#define glUniform3i glad_glUniform3i
+GLAD_API_CALL PFNGLUNIFORM3IVPROC glad_glUniform3iv;
+#define glUniform3iv glad_glUniform3iv
+GLAD_API_CALL PFNGLUNIFORM3UIPROC glad_glUniform3ui;
+#define glUniform3ui glad_glUniform3ui
+GLAD_API_CALL PFNGLUNIFORM3UIVPROC glad_glUniform3uiv;
+#define glUniform3uiv glad_glUniform3uiv
+GLAD_API_CALL PFNGLUNIFORM4FPROC glad_glUniform4f;
+#define glUniform4f glad_glUniform4f
+GLAD_API_CALL PFNGLUNIFORM4FVPROC glad_glUniform4fv;
+#define glUniform4fv glad_glUniform4fv
+GLAD_API_CALL PFNGLUNIFORM4IPROC glad_glUniform4i;
+#define glUniform4i glad_glUniform4i
+GLAD_API_CALL PFNGLUNIFORM4IVPROC glad_glUniform4iv;
+#define glUniform4iv glad_glUniform4iv
+GLAD_API_CALL PFNGLUNIFORM4UIPROC glad_glUniform4ui;
+#define glUniform4ui glad_glUniform4ui
+GLAD_API_CALL PFNGLUNIFORM4UIVPROC glad_glUniform4uiv;
+#define glUniform4uiv glad_glUniform4uiv
+GLAD_API_CALL PFNGLUNIFORMBLOCKBINDINGPROC glad_glUniformBlockBinding;
+#define glUniformBlockBinding glad_glUniformBlockBinding
+GLAD_API_CALL PFNGLUNIFORMMATRIX2FVPROC glad_glUniformMatrix2fv;
+#define glUniformMatrix2fv glad_glUniformMatrix2fv
+GLAD_API_CALL PFNGLUNIFORMMATRIX2X3FVPROC glad_glUniformMatrix2x3fv;
+#define glUniformMatrix2x3fv glad_glUniformMatrix2x3fv
+GLAD_API_CALL PFNGLUNIFORMMATRIX2X4FVPROC glad_glUniformMatrix2x4fv;
+#define glUniformMatrix2x4fv glad_glUniformMatrix2x4fv
+GLAD_API_CALL PFNGLUNIFORMMATRIX3FVPROC glad_glUniformMatrix3fv;
+#define glUniformMatrix3fv glad_glUniformMatrix3fv
+GLAD_API_CALL PFNGLUNIFORMMATRIX3X2FVPROC glad_glUniformMatrix3x2fv;
+#define glUniformMatrix3x2fv glad_glUniformMatrix3x2fv
+GLAD_API_CALL PFNGLUNIFORMMATRIX3X4FVPROC glad_glUniformMatrix3x4fv;
+#define glUniformMatrix3x4fv glad_glUniformMatrix3x4fv
+GLAD_API_CALL PFNGLUNIFORMMATRIX4FVPROC glad_glUniformMatrix4fv;
+#define glUniformMatrix4fv glad_glUniformMatrix4fv
+GLAD_API_CALL PFNGLUNIFORMMATRIX4X2FVPROC glad_glUniformMatrix4x2fv;
+#define glUniformMatrix4x2fv glad_glUniformMatrix4x2fv
+GLAD_API_CALL PFNGLUNIFORMMATRIX4X3FVPROC glad_glUniformMatrix4x3fv;
+#define glUniformMatrix4x3fv glad_glUniformMatrix4x3fv
+GLAD_API_CALL PFNGLUNMAPBUFFERPROC glad_glUnmapBuffer;
+#define glUnmapBuffer glad_glUnmapBuffer
+GLAD_API_CALL PFNGLUSEPROGRAMPROC glad_glUseProgram;
+#define glUseProgram glad_glUseProgram
+GLAD_API_CALL PFNGLVALIDATEPROGRAMPROC glad_glValidateProgram;
+#define glValidateProgram glad_glValidateProgram
+GLAD_API_CALL PFNGLVERTEX2DPROC glad_glVertex2d;
+#define glVertex2d glad_glVertex2d
+GLAD_API_CALL PFNGLVERTEX2DVPROC glad_glVertex2dv;
+#define glVertex2dv glad_glVertex2dv
+GLAD_API_CALL PFNGLVERTEX2FPROC glad_glVertex2f;
+#define glVertex2f glad_glVertex2f
+GLAD_API_CALL PFNGLVERTEX2FVPROC glad_glVertex2fv;
+#define glVertex2fv glad_glVertex2fv
+GLAD_API_CALL PFNGLVERTEX2IPROC glad_glVertex2i;
+#define glVertex2i glad_glVertex2i
+GLAD_API_CALL PFNGLVERTEX2IVPROC glad_glVertex2iv;
+#define glVertex2iv glad_glVertex2iv
+GLAD_API_CALL PFNGLVERTEX2SPROC glad_glVertex2s;
+#define glVertex2s glad_glVertex2s
+GLAD_API_CALL PFNGLVERTEX2SVPROC glad_glVertex2sv;
+#define glVertex2sv glad_glVertex2sv
+GLAD_API_CALL PFNGLVERTEX3DPROC glad_glVertex3d;
+#define glVertex3d glad_glVertex3d
+GLAD_API_CALL PFNGLVERTEX3DVPROC glad_glVertex3dv;
+#define glVertex3dv glad_glVertex3dv
+GLAD_API_CALL PFNGLVERTEX3FPROC glad_glVertex3f;
+#define glVertex3f glad_glVertex3f
+GLAD_API_CALL PFNGLVERTEX3FVPROC glad_glVertex3fv;
+#define glVertex3fv glad_glVertex3fv
+GLAD_API_CALL PFNGLVERTEX3IPROC glad_glVertex3i;
+#define glVertex3i glad_glVertex3i
+GLAD_API_CALL PFNGLVERTEX3IVPROC glad_glVertex3iv;
+#define glVertex3iv glad_glVertex3iv
+GLAD_API_CALL PFNGLVERTEX3SPROC glad_glVertex3s;
+#define glVertex3s glad_glVertex3s
+GLAD_API_CALL PFNGLVERTEX3SVPROC glad_glVertex3sv;
+#define glVertex3sv glad_glVertex3sv
+GLAD_API_CALL PFNGLVERTEX4DPROC glad_glVertex4d;
+#define glVertex4d glad_glVertex4d
+GLAD_API_CALL PFNGLVERTEX4DVPROC glad_glVertex4dv;
+#define glVertex4dv glad_glVertex4dv
+GLAD_API_CALL PFNGLVERTEX4FPROC glad_glVertex4f;
+#define glVertex4f glad_glVertex4f
+GLAD_API_CALL PFNGLVERTEX4FVPROC glad_glVertex4fv;
+#define glVertex4fv glad_glVertex4fv
+GLAD_API_CALL PFNGLVERTEX4IPROC glad_glVertex4i;
+#define glVertex4i glad_glVertex4i
+GLAD_API_CALL PFNGLVERTEX4IVPROC glad_glVertex4iv;
+#define glVertex4iv glad_glVertex4iv
+GLAD_API_CALL PFNGLVERTEX4SPROC glad_glVertex4s;
+#define glVertex4s glad_glVertex4s
+GLAD_API_CALL PFNGLVERTEX4SVPROC glad_glVertex4sv;
+#define glVertex4sv glad_glVertex4sv
+GLAD_API_CALL PFNGLVERTEXATTRIB1DPROC glad_glVertexAttrib1d;
+#define glVertexAttrib1d glad_glVertexAttrib1d
+GLAD_API_CALL PFNGLVERTEXATTRIB1DVPROC glad_glVertexAttrib1dv;
+#define glVertexAttrib1dv glad_glVertexAttrib1dv
+GLAD_API_CALL PFNGLVERTEXATTRIB1FPROC glad_glVertexAttrib1f;
+#define glVertexAttrib1f glad_glVertexAttrib1f
+GLAD_API_CALL PFNGLVERTEXATTRIB1FVPROC glad_glVertexAttrib1fv;
+#define glVertexAttrib1fv glad_glVertexAttrib1fv
+GLAD_API_CALL PFNGLVERTEXATTRIB1SPROC glad_glVertexAttrib1s;
+#define glVertexAttrib1s glad_glVertexAttrib1s
+GLAD_API_CALL PFNGLVERTEXATTRIB1SVPROC glad_glVertexAttrib1sv;
+#define glVertexAttrib1sv glad_glVertexAttrib1sv
+GLAD_API_CALL PFNGLVERTEXATTRIB2DPROC glad_glVertexAttrib2d;
+#define glVertexAttrib2d glad_glVertexAttrib2d
+GLAD_API_CALL PFNGLVERTEXATTRIB2DVPROC glad_glVertexAttrib2dv;
+#define glVertexAttrib2dv glad_glVertexAttrib2dv
+GLAD_API_CALL PFNGLVERTEXATTRIB2FPROC glad_glVertexAttrib2f;
+#define glVertexAttrib2f glad_glVertexAttrib2f
+GLAD_API_CALL PFNGLVERTEXATTRIB2FVPROC glad_glVertexAttrib2fv;
+#define glVertexAttrib2fv glad_glVertexAttrib2fv
+GLAD_API_CALL PFNGLVERTEXATTRIB2SPROC glad_glVertexAttrib2s;
+#define glVertexAttrib2s glad_glVertexAttrib2s
+GLAD_API_CALL PFNGLVERTEXATTRIB2SVPROC glad_glVertexAttrib2sv;
+#define glVertexAttrib2sv glad_glVertexAttrib2sv
+GLAD_API_CALL PFNGLVERTEXATTRIB3DPROC glad_glVertexAttrib3d;
+#define glVertexAttrib3d glad_glVertexAttrib3d
+GLAD_API_CALL PFNGLVERTEXATTRIB3DVPROC glad_glVertexAttrib3dv;
+#define glVertexAttrib3dv glad_glVertexAttrib3dv
+GLAD_API_CALL PFNGLVERTEXATTRIB3FPROC glad_glVertexAttrib3f;
+#define glVertexAttrib3f glad_glVertexAttrib3f
+GLAD_API_CALL PFNGLVERTEXATTRIB3FVPROC glad_glVertexAttrib3fv;
+#define glVertexAttrib3fv glad_glVertexAttrib3fv
+GLAD_API_CALL PFNGLVERTEXATTRIB3SPROC glad_glVertexAttrib3s;
+#define glVertexAttrib3s glad_glVertexAttrib3s
+GLAD_API_CALL PFNGLVERTEXATTRIB3SVPROC glad_glVertexAttrib3sv;
+#define glVertexAttrib3sv glad_glVertexAttrib3sv
+GLAD_API_CALL PFNGLVERTEXATTRIB4NBVPROC glad_glVertexAttrib4Nbv;
+#define glVertexAttrib4Nbv glad_glVertexAttrib4Nbv
+GLAD_API_CALL PFNGLVERTEXATTRIB4NIVPROC glad_glVertexAttrib4Niv;
+#define glVertexAttrib4Niv glad_glVertexAttrib4Niv
+GLAD_API_CALL PFNGLVERTEXATTRIB4NSVPROC glad_glVertexAttrib4Nsv;
+#define glVertexAttrib4Nsv glad_glVertexAttrib4Nsv
+GLAD_API_CALL PFNGLVERTEXATTRIB4NUBPROC glad_glVertexAttrib4Nub;
+#define glVertexAttrib4Nub glad_glVertexAttrib4Nub
+GLAD_API_CALL PFNGLVERTEXATTRIB4NUBVPROC glad_glVertexAttrib4Nubv;
+#define glVertexAttrib4Nubv glad_glVertexAttrib4Nubv
+GLAD_API_CALL PFNGLVERTEXATTRIB4NUIVPROC glad_glVertexAttrib4Nuiv;
+#define glVertexAttrib4Nuiv glad_glVertexAttrib4Nuiv
+GLAD_API_CALL PFNGLVERTEXATTRIB4NUSVPROC glad_glVertexAttrib4Nusv;
+#define glVertexAttrib4Nusv glad_glVertexAttrib4Nusv
+GLAD_API_CALL PFNGLVERTEXATTRIB4BVPROC glad_glVertexAttrib4bv;
+#define glVertexAttrib4bv glad_glVertexAttrib4bv
+GLAD_API_CALL PFNGLVERTEXATTRIB4DPROC glad_glVertexAttrib4d;
+#define glVertexAttrib4d glad_glVertexAttrib4d
+GLAD_API_CALL PFNGLVERTEXATTRIB4DVPROC glad_glVertexAttrib4dv;
+#define glVertexAttrib4dv glad_glVertexAttrib4dv
+GLAD_API_CALL PFNGLVERTEXATTRIB4FPROC glad_glVertexAttrib4f;
+#define glVertexAttrib4f glad_glVertexAttrib4f
+GLAD_API_CALL PFNGLVERTEXATTRIB4FVPROC glad_glVertexAttrib4fv;
+#define glVertexAttrib4fv glad_glVertexAttrib4fv
+GLAD_API_CALL PFNGLVERTEXATTRIB4IVPROC glad_glVertexAttrib4iv;
+#define glVertexAttrib4iv glad_glVertexAttrib4iv
+GLAD_API_CALL PFNGLVERTEXATTRIB4SPROC glad_glVertexAttrib4s;
+#define glVertexAttrib4s glad_glVertexAttrib4s
+GLAD_API_CALL PFNGLVERTEXATTRIB4SVPROC glad_glVertexAttrib4sv;
+#define glVertexAttrib4sv glad_glVertexAttrib4sv
+GLAD_API_CALL PFNGLVERTEXATTRIB4UBVPROC glad_glVertexAttrib4ubv;
+#define glVertexAttrib4ubv glad_glVertexAttrib4ubv
+GLAD_API_CALL PFNGLVERTEXATTRIB4UIVPROC glad_glVertexAttrib4uiv;
+#define glVertexAttrib4uiv glad_glVertexAttrib4uiv
+GLAD_API_CALL PFNGLVERTEXATTRIB4USVPROC glad_glVertexAttrib4usv;
+#define glVertexAttrib4usv glad_glVertexAttrib4usv
+GLAD_API_CALL PFNGLVERTEXATTRIBDIVISORPROC glad_glVertexAttribDivisor;
+#define glVertexAttribDivisor glad_glVertexAttribDivisor
+GLAD_API_CALL PFNGLVERTEXATTRIBI1IPROC glad_glVertexAttribI1i;
+#define glVertexAttribI1i glad_glVertexAttribI1i
+GLAD_API_CALL PFNGLVERTEXATTRIBI1IVPROC glad_glVertexAttribI1iv;
+#define glVertexAttribI1iv glad_glVertexAttribI1iv
+GLAD_API_CALL PFNGLVERTEXATTRIBI1UIPROC glad_glVertexAttribI1ui;
+#define glVertexAttribI1ui glad_glVertexAttribI1ui
+GLAD_API_CALL PFNGLVERTEXATTRIBI1UIVPROC glad_glVertexAttribI1uiv;
+#define glVertexAttribI1uiv glad_glVertexAttribI1uiv
+GLAD_API_CALL PFNGLVERTEXATTRIBI2IPROC glad_glVertexAttribI2i;
+#define glVertexAttribI2i glad_glVertexAttribI2i
+GLAD_API_CALL PFNGLVERTEXATTRIBI2IVPROC glad_glVertexAttribI2iv;
+#define glVertexAttribI2iv glad_glVertexAttribI2iv
+GLAD_API_CALL PFNGLVERTEXATTRIBI2UIPROC glad_glVertexAttribI2ui;
+#define glVertexAttribI2ui glad_glVertexAttribI2ui
+GLAD_API_CALL PFNGLVERTEXATTRIBI2UIVPROC glad_glVertexAttribI2uiv;
+#define glVertexAttribI2uiv glad_glVertexAttribI2uiv
+GLAD_API_CALL PFNGLVERTEXATTRIBI3IPROC glad_glVertexAttribI3i;
+#define glVertexAttribI3i glad_glVertexAttribI3i
+GLAD_API_CALL PFNGLVERTEXATTRIBI3IVPROC glad_glVertexAttribI3iv;
+#define glVertexAttribI3iv glad_glVertexAttribI3iv
+GLAD_API_CALL PFNGLVERTEXATTRIBI3UIPROC glad_glVertexAttribI3ui;
+#define glVertexAttribI3ui glad_glVertexAttribI3ui
+GLAD_API_CALL PFNGLVERTEXATTRIBI3UIVPROC glad_glVertexAttribI3uiv;
+#define glVertexAttribI3uiv glad_glVertexAttribI3uiv
+GLAD_API_CALL PFNGLVERTEXATTRIBI4BVPROC glad_glVertexAttribI4bv;
+#define glVertexAttribI4bv glad_glVertexAttribI4bv
+GLAD_API_CALL PFNGLVERTEXATTRIBI4IPROC glad_glVertexAttribI4i;
+#define glVertexAttribI4i glad_glVertexAttribI4i
+GLAD_API_CALL PFNGLVERTEXATTRIBI4IVPROC glad_glVertexAttribI4iv;
+#define glVertexAttribI4iv glad_glVertexAttribI4iv
+GLAD_API_CALL PFNGLVERTEXATTRIBI4SVPROC glad_glVertexAttribI4sv;
+#define glVertexAttribI4sv glad_glVertexAttribI4sv
+GLAD_API_CALL PFNGLVERTEXATTRIBI4UBVPROC glad_glVertexAttribI4ubv;
+#define glVertexAttribI4ubv glad_glVertexAttribI4ubv
+GLAD_API_CALL PFNGLVERTEXATTRIBI4UIPROC glad_glVertexAttribI4ui;
+#define glVertexAttribI4ui glad_glVertexAttribI4ui
+GLAD_API_CALL PFNGLVERTEXATTRIBI4UIVPROC glad_glVertexAttribI4uiv;
+#define glVertexAttribI4uiv glad_glVertexAttribI4uiv
+GLAD_API_CALL PFNGLVERTEXATTRIBI4USVPROC glad_glVertexAttribI4usv;
+#define glVertexAttribI4usv glad_glVertexAttribI4usv
+GLAD_API_CALL PFNGLVERTEXATTRIBIPOINTERPROC glad_glVertexAttribIPointer;
+#define glVertexAttribIPointer glad_glVertexAttribIPointer
+GLAD_API_CALL PFNGLVERTEXATTRIBP1UIPROC glad_glVertexAttribP1ui;
+#define glVertexAttribP1ui glad_glVertexAttribP1ui
+GLAD_API_CALL PFNGLVERTEXATTRIBP1UIVPROC glad_glVertexAttribP1uiv;
+#define glVertexAttribP1uiv glad_glVertexAttribP1uiv
+GLAD_API_CALL PFNGLVERTEXATTRIBP2UIPROC glad_glVertexAttribP2ui;
+#define glVertexAttribP2ui glad_glVertexAttribP2ui
+GLAD_API_CALL PFNGLVERTEXATTRIBP2UIVPROC glad_glVertexAttribP2uiv;
+#define glVertexAttribP2uiv glad_glVertexAttribP2uiv
+GLAD_API_CALL PFNGLVERTEXATTRIBP3UIPROC glad_glVertexAttribP3ui;
+#define glVertexAttribP3ui glad_glVertexAttribP3ui
+GLAD_API_CALL PFNGLVERTEXATTRIBP3UIVPROC glad_glVertexAttribP3uiv;
+#define glVertexAttribP3uiv glad_glVertexAttribP3uiv
+GLAD_API_CALL PFNGLVERTEXATTRIBP4UIPROC glad_glVertexAttribP4ui;
+#define glVertexAttribP4ui glad_glVertexAttribP4ui
+GLAD_API_CALL PFNGLVERTEXATTRIBP4UIVPROC glad_glVertexAttribP4uiv;
+#define glVertexAttribP4uiv glad_glVertexAttribP4uiv
+GLAD_API_CALL PFNGLVERTEXATTRIBPOINTERPROC glad_glVertexAttribPointer;
+#define glVertexAttribPointer glad_glVertexAttribPointer
+GLAD_API_CALL PFNGLVERTEXP2UIPROC glad_glVertexP2ui;
+#define glVertexP2ui glad_glVertexP2ui
+GLAD_API_CALL PFNGLVERTEXP2UIVPROC glad_glVertexP2uiv;
+#define glVertexP2uiv glad_glVertexP2uiv
+GLAD_API_CALL PFNGLVERTEXP3UIPROC glad_glVertexP3ui;
+#define glVertexP3ui glad_glVertexP3ui
+GLAD_API_CALL PFNGLVERTEXP3UIVPROC glad_glVertexP3uiv;
+#define glVertexP3uiv glad_glVertexP3uiv
+GLAD_API_CALL PFNGLVERTEXP4UIPROC glad_glVertexP4ui;
+#define glVertexP4ui glad_glVertexP4ui
+GLAD_API_CALL PFNGLVERTEXP4UIVPROC glad_glVertexP4uiv;
+#define glVertexP4uiv glad_glVertexP4uiv
+GLAD_API_CALL PFNGLVERTEXPOINTERPROC glad_glVertexPointer;
+#define glVertexPointer glad_glVertexPointer
+GLAD_API_CALL PFNGLVIEWPORTPROC glad_glViewport;
+#define glViewport glad_glViewport
+GLAD_API_CALL PFNGLWAITSYNCPROC glad_glWaitSync;
+#define glWaitSync glad_glWaitSync
+GLAD_API_CALL PFNGLWINDOWPOS2DPROC glad_glWindowPos2d;
+#define glWindowPos2d glad_glWindowPos2d
+GLAD_API_CALL PFNGLWINDOWPOS2DVPROC glad_glWindowPos2dv;
+#define glWindowPos2dv glad_glWindowPos2dv
+GLAD_API_CALL PFNGLWINDOWPOS2FPROC glad_glWindowPos2f;
+#define glWindowPos2f glad_glWindowPos2f
+GLAD_API_CALL PFNGLWINDOWPOS2FVPROC glad_glWindowPos2fv;
+#define glWindowPos2fv glad_glWindowPos2fv
+GLAD_API_CALL PFNGLWINDOWPOS2IPROC glad_glWindowPos2i;
+#define glWindowPos2i glad_glWindowPos2i
+GLAD_API_CALL PFNGLWINDOWPOS2IVPROC glad_glWindowPos2iv;
+#define glWindowPos2iv glad_glWindowPos2iv
+GLAD_API_CALL PFNGLWINDOWPOS2SPROC glad_glWindowPos2s;
+#define glWindowPos2s glad_glWindowPos2s
+GLAD_API_CALL PFNGLWINDOWPOS2SVPROC glad_glWindowPos2sv;
+#define glWindowPos2sv glad_glWindowPos2sv
+GLAD_API_CALL PFNGLWINDOWPOS3DPROC glad_glWindowPos3d;
+#define glWindowPos3d glad_glWindowPos3d
+GLAD_API_CALL PFNGLWINDOWPOS3DVPROC glad_glWindowPos3dv;
+#define glWindowPos3dv glad_glWindowPos3dv
+GLAD_API_CALL PFNGLWINDOWPOS3FPROC glad_glWindowPos3f;
+#define glWindowPos3f glad_glWindowPos3f
+GLAD_API_CALL PFNGLWINDOWPOS3FVPROC glad_glWindowPos3fv;
+#define glWindowPos3fv glad_glWindowPos3fv
+GLAD_API_CALL PFNGLWINDOWPOS3IPROC glad_glWindowPos3i;
+#define glWindowPos3i glad_glWindowPos3i
+GLAD_API_CALL PFNGLWINDOWPOS3IVPROC glad_glWindowPos3iv;
+#define glWindowPos3iv glad_glWindowPos3iv
+GLAD_API_CALL PFNGLWINDOWPOS3SPROC glad_glWindowPos3s;
+#define glWindowPos3s glad_glWindowPos3s
+GLAD_API_CALL PFNGLWINDOWPOS3SVPROC glad_glWindowPos3sv;
+#define glWindowPos3sv glad_glWindowPos3sv
+
+
+
+
+
+GLAD_API_CALL int gladLoadGLUserPtr( GLADuserptrloadfunc load, void *userptr);
+GLAD_API_CALL int gladLoadGL( GLADloadfunc load);
+
+
+#ifdef GLAD_GL
+
+GLAD_API_CALL int gladLoaderLoadGL(void);
+GLAD_API_CALL void gladLoaderUnloadGL(void);
+
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/thirdparty/glad/glad/glad.h b/thirdparty/glad/glad/glad.h
deleted file mode 100644
index f211e6aa57..0000000000
--- a/thirdparty/glad/glad/glad.h
+++ /dev/null
@@ -1,3784 +0,0 @@
-/*
-
- OpenGL loader generated by glad 0.1.34 on Tue Nov 17 16:41:02 2020.
-
- Language/Generator: C/C++
- Specification: gl
- APIs: gl=3.3
- Profile: compatibility
- Extensions:
- GL_ARB_debug_output,
- GL_ARB_framebuffer_object,
- GL_EXT_framebuffer_blit,
- GL_EXT_framebuffer_multisample,
- GL_EXT_framebuffer_object
- Loader: True
- Local files: False
- Omit khrplatform: False
- Reproducible: False
-
- Commandline:
- --profile="compatibility" --api="gl=3.3" --generator="c" --spec="gl" --extensions="GL_ARB_debug_output,GL_ARB_framebuffer_object,GL_EXT_framebuffer_blit,GL_EXT_framebuffer_multisample,GL_EXT_framebuffer_object"
- Online:
- https://glad.dav1d.de/#profile=compatibility&language=c&specification=gl&loader=on&api=gl%3D3.3&extensions=GL_ARB_debug_output&extensions=GL_ARB_framebuffer_object&extensions=GL_EXT_framebuffer_blit&extensions=GL_EXT_framebuffer_multisample&extensions=GL_EXT_framebuffer_object
-*/
-
-
-#ifndef __glad_h_
-#define __glad_h_
-
-#ifdef __gl_h_
-#error OpenGL header already included, remove this include, glad already provides it
-#endif
-#define __gl_h_
-
-#if defined(_WIN32) && !defined(APIENTRY) && !defined(__CYGWIN__) && !defined(__SCITECH_SNAP__)
-#define APIENTRY __stdcall
-#endif
-
-#ifndef APIENTRY
-#define APIENTRY
-#endif
-#ifndef APIENTRYP
-#define APIENTRYP APIENTRY *
-#endif
-
-#ifndef GLAPIENTRY
-#define GLAPIENTRY APIENTRY
-#endif
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-struct gladGLversionStruct {
- int major;
- int minor;
-};
-
-typedef void* (* GLADloadproc)(const char *name);
-
-#ifndef GLAPI
-# if defined(GLAD_GLAPI_EXPORT)
-# if defined(_WIN32) || defined(__CYGWIN__)
-# if defined(GLAD_GLAPI_EXPORT_BUILD)
-# if defined(__GNUC__)
-# define GLAPI __attribute__ ((dllexport)) extern
-# else
-# define GLAPI __declspec(dllexport) extern
-# endif
-# else
-# if defined(__GNUC__)
-# define GLAPI __attribute__ ((dllimport)) extern
-# else
-# define GLAPI __declspec(dllimport) extern
-# endif
-# endif
-# elif defined(__GNUC__) && defined(GLAD_GLAPI_EXPORT_BUILD)
-# define GLAPI __attribute__ ((visibility ("default"))) extern
-# else
-# define GLAPI extern
-# endif
-# else
-# define GLAPI extern
-# endif
-#endif
-
-GLAPI struct gladGLversionStruct GLVersion;
-
-GLAPI int gladLoadGL(void);
-
-GLAPI int gladLoadGLLoader(GLADloadproc);
-
-#include <KHR/khrplatform.h>
-typedef unsigned int GLenum;
-typedef unsigned char GLboolean;
-typedef unsigned int GLbitfield;
-typedef void GLvoid;
-typedef khronos_int8_t GLbyte;
-typedef khronos_uint8_t GLubyte;
-typedef khronos_int16_t GLshort;
-typedef khronos_uint16_t GLushort;
-typedef int GLint;
-typedef unsigned int GLuint;
-typedef khronos_int32_t GLclampx;
-typedef int GLsizei;
-typedef khronos_float_t GLfloat;
-typedef khronos_float_t GLclampf;
-typedef double GLdouble;
-typedef double GLclampd;
-typedef void *GLeglClientBufferEXT;
-typedef void *GLeglImageOES;
-typedef char GLchar;
-typedef char GLcharARB;
-#ifdef __APPLE__
-typedef void *GLhandleARB;
-#else
-typedef unsigned int GLhandleARB;
-#endif
-typedef khronos_uint16_t GLhalf;
-typedef khronos_uint16_t GLhalfARB;
-typedef khronos_int32_t GLfixed;
-typedef khronos_intptr_t GLintptr;
-typedef khronos_intptr_t GLintptrARB;
-typedef khronos_ssize_t GLsizeiptr;
-typedef khronos_ssize_t GLsizeiptrARB;
-typedef khronos_int64_t GLint64;
-typedef khronos_int64_t GLint64EXT;
-typedef khronos_uint64_t GLuint64;
-typedef khronos_uint64_t GLuint64EXT;
-typedef struct __GLsync *GLsync;
-struct _cl_context;
-struct _cl_event;
-typedef void (APIENTRY *GLDEBUGPROC)(GLenum source,GLenum type,GLuint id,GLenum severity,GLsizei length,const GLchar *message,const void *userParam);
-typedef void (APIENTRY *GLDEBUGPROCARB)(GLenum source,GLenum type,GLuint id,GLenum severity,GLsizei length,const GLchar *message,const void *userParam);
-typedef void (APIENTRY *GLDEBUGPROCKHR)(GLenum source,GLenum type,GLuint id,GLenum severity,GLsizei length,const GLchar *message,const void *userParam);
-typedef void (APIENTRY *GLDEBUGPROCAMD)(GLuint id,GLenum category,GLenum severity,GLsizei length,const GLchar *message,void *userParam);
-typedef unsigned short GLhalfNV;
-typedef GLintptr GLvdpauSurfaceNV;
-typedef void (APIENTRY *GLVULKANPROCNV)(void);
-#define GL_DEPTH_BUFFER_BIT 0x00000100
-#define GL_STENCIL_BUFFER_BIT 0x00000400
-#define GL_COLOR_BUFFER_BIT 0x00004000
-#define GL_FALSE 0
-#define GL_TRUE 1
-#define GL_POINTS 0x0000
-#define GL_LINES 0x0001
-#define GL_LINE_LOOP 0x0002
-#define GL_LINE_STRIP 0x0003
-#define GL_TRIANGLES 0x0004
-#define GL_TRIANGLE_STRIP 0x0005
-#define GL_TRIANGLE_FAN 0x0006
-#define GL_QUADS 0x0007
-#define GL_NEVER 0x0200
-#define GL_LESS 0x0201
-#define GL_EQUAL 0x0202
-#define GL_LEQUAL 0x0203
-#define GL_GREATER 0x0204
-#define GL_NOTEQUAL 0x0205
-#define GL_GEQUAL 0x0206
-#define GL_ALWAYS 0x0207
-#define GL_ZERO 0
-#define GL_ONE 1
-#define GL_SRC_COLOR 0x0300
-#define GL_ONE_MINUS_SRC_COLOR 0x0301
-#define GL_SRC_ALPHA 0x0302
-#define GL_ONE_MINUS_SRC_ALPHA 0x0303
-#define GL_DST_ALPHA 0x0304
-#define GL_ONE_MINUS_DST_ALPHA 0x0305
-#define GL_DST_COLOR 0x0306
-#define GL_ONE_MINUS_DST_COLOR 0x0307
-#define GL_SRC_ALPHA_SATURATE 0x0308
-#define GL_NONE 0
-#define GL_FRONT_LEFT 0x0400
-#define GL_FRONT_RIGHT 0x0401
-#define GL_BACK_LEFT 0x0402
-#define GL_BACK_RIGHT 0x0403
-#define GL_FRONT 0x0404
-#define GL_BACK 0x0405
-#define GL_LEFT 0x0406
-#define GL_RIGHT 0x0407
-#define GL_FRONT_AND_BACK 0x0408
-#define GL_NO_ERROR 0
-#define GL_INVALID_ENUM 0x0500
-#define GL_INVALID_VALUE 0x0501
-#define GL_INVALID_OPERATION 0x0502
-#define GL_OUT_OF_MEMORY 0x0505
-#define GL_CW 0x0900
-#define GL_CCW 0x0901
-#define GL_POINT_SIZE 0x0B11
-#define GL_POINT_SIZE_RANGE 0x0B12
-#define GL_POINT_SIZE_GRANULARITY 0x0B13
-#define GL_LINE_SMOOTH 0x0B20
-#define GL_LINE_WIDTH 0x0B21
-#define GL_LINE_WIDTH_RANGE 0x0B22
-#define GL_LINE_WIDTH_GRANULARITY 0x0B23
-#define GL_POLYGON_MODE 0x0B40
-#define GL_POLYGON_SMOOTH 0x0B41
-#define GL_CULL_FACE 0x0B44
-#define GL_CULL_FACE_MODE 0x0B45
-#define GL_FRONT_FACE 0x0B46
-#define GL_DEPTH_RANGE 0x0B70
-#define GL_DEPTH_TEST 0x0B71
-#define GL_DEPTH_WRITEMASK 0x0B72
-#define GL_DEPTH_CLEAR_VALUE 0x0B73
-#define GL_DEPTH_FUNC 0x0B74
-#define GL_STENCIL_TEST 0x0B90
-#define GL_STENCIL_CLEAR_VALUE 0x0B91
-#define GL_STENCIL_FUNC 0x0B92
-#define GL_STENCIL_VALUE_MASK 0x0B93
-#define GL_STENCIL_FAIL 0x0B94
-#define GL_STENCIL_PASS_DEPTH_FAIL 0x0B95
-#define GL_STENCIL_PASS_DEPTH_PASS 0x0B96
-#define GL_STENCIL_REF 0x0B97
-#define GL_STENCIL_WRITEMASK 0x0B98
-#define GL_VIEWPORT 0x0BA2
-#define GL_DITHER 0x0BD0
-#define GL_BLEND_DST 0x0BE0
-#define GL_BLEND_SRC 0x0BE1
-#define GL_BLEND 0x0BE2
-#define GL_LOGIC_OP_MODE 0x0BF0
-#define GL_DRAW_BUFFER 0x0C01
-#define GL_READ_BUFFER 0x0C02
-#define GL_SCISSOR_BOX 0x0C10
-#define GL_SCISSOR_TEST 0x0C11
-#define GL_COLOR_CLEAR_VALUE 0x0C22
-#define GL_COLOR_WRITEMASK 0x0C23
-#define GL_DOUBLEBUFFER 0x0C32
-#define GL_STEREO 0x0C33
-#define GL_LINE_SMOOTH_HINT 0x0C52
-#define GL_POLYGON_SMOOTH_HINT 0x0C53
-#define GL_UNPACK_SWAP_BYTES 0x0CF0
-#define GL_UNPACK_LSB_FIRST 0x0CF1
-#define GL_UNPACK_ROW_LENGTH 0x0CF2
-#define GL_UNPACK_SKIP_ROWS 0x0CF3
-#define GL_UNPACK_SKIP_PIXELS 0x0CF4
-#define GL_UNPACK_ALIGNMENT 0x0CF5
-#define GL_PACK_SWAP_BYTES 0x0D00
-#define GL_PACK_LSB_FIRST 0x0D01
-#define GL_PACK_ROW_LENGTH 0x0D02
-#define GL_PACK_SKIP_ROWS 0x0D03
-#define GL_PACK_SKIP_PIXELS 0x0D04
-#define GL_PACK_ALIGNMENT 0x0D05
-#define GL_MAX_TEXTURE_SIZE 0x0D33
-#define GL_MAX_VIEWPORT_DIMS 0x0D3A
-#define GL_SUBPIXEL_BITS 0x0D50
-#define GL_TEXTURE_1D 0x0DE0
-#define GL_TEXTURE_2D 0x0DE1
-#define GL_TEXTURE_WIDTH 0x1000
-#define GL_TEXTURE_HEIGHT 0x1001
-#define GL_TEXTURE_BORDER_COLOR 0x1004
-#define GL_DONT_CARE 0x1100
-#define GL_FASTEST 0x1101
-#define GL_NICEST 0x1102
-#define GL_BYTE 0x1400
-#define GL_UNSIGNED_BYTE 0x1401
-#define GL_SHORT 0x1402
-#define GL_UNSIGNED_SHORT 0x1403
-#define GL_INT 0x1404
-#define GL_UNSIGNED_INT 0x1405
-#define GL_FLOAT 0x1406
-#define GL_STACK_OVERFLOW 0x0503
-#define GL_STACK_UNDERFLOW 0x0504
-#define GL_CLEAR 0x1500
-#define GL_AND 0x1501
-#define GL_AND_REVERSE 0x1502
-#define GL_COPY 0x1503
-#define GL_AND_INVERTED 0x1504
-#define GL_NOOP 0x1505
-#define GL_XOR 0x1506
-#define GL_OR 0x1507
-#define GL_NOR 0x1508
-#define GL_EQUIV 0x1509
-#define GL_INVERT 0x150A
-#define GL_OR_REVERSE 0x150B
-#define GL_COPY_INVERTED 0x150C
-#define GL_OR_INVERTED 0x150D
-#define GL_NAND 0x150E
-#define GL_SET 0x150F
-#define GL_TEXTURE 0x1702
-#define GL_COLOR 0x1800
-#define GL_DEPTH 0x1801
-#define GL_STENCIL 0x1802
-#define GL_STENCIL_INDEX 0x1901
-#define GL_DEPTH_COMPONENT 0x1902
-#define GL_RED 0x1903
-#define GL_GREEN 0x1904
-#define GL_BLUE 0x1905
-#define GL_ALPHA 0x1906
-#define GL_RGB 0x1907
-#define GL_RGBA 0x1908
-#define GL_POINT 0x1B00
-#define GL_LINE 0x1B01
-#define GL_FILL 0x1B02
-#define GL_KEEP 0x1E00
-#define GL_REPLACE 0x1E01
-#define GL_INCR 0x1E02
-#define GL_DECR 0x1E03
-#define GL_VENDOR 0x1F00
-#define GL_RENDERER 0x1F01
-#define GL_VERSION 0x1F02
-#define GL_EXTENSIONS 0x1F03
-#define GL_NEAREST 0x2600
-#define GL_LINEAR 0x2601
-#define GL_NEAREST_MIPMAP_NEAREST 0x2700
-#define GL_LINEAR_MIPMAP_NEAREST 0x2701
-#define GL_NEAREST_MIPMAP_LINEAR 0x2702
-#define GL_LINEAR_MIPMAP_LINEAR 0x2703
-#define GL_TEXTURE_MAG_FILTER 0x2800
-#define GL_TEXTURE_MIN_FILTER 0x2801
-#define GL_TEXTURE_WRAP_S 0x2802
-#define GL_TEXTURE_WRAP_T 0x2803
-#define GL_REPEAT 0x2901
-#define GL_CURRENT_BIT 0x00000001
-#define GL_POINT_BIT 0x00000002
-#define GL_LINE_BIT 0x00000004
-#define GL_POLYGON_BIT 0x00000008
-#define GL_POLYGON_STIPPLE_BIT 0x00000010
-#define GL_PIXEL_MODE_BIT 0x00000020
-#define GL_LIGHTING_BIT 0x00000040
-#define GL_FOG_BIT 0x00000080
-#define GL_ACCUM_BUFFER_BIT 0x00000200
-#define GL_VIEWPORT_BIT 0x00000800
-#define GL_TRANSFORM_BIT 0x00001000
-#define GL_ENABLE_BIT 0x00002000
-#define GL_HINT_BIT 0x00008000
-#define GL_EVAL_BIT 0x00010000
-#define GL_LIST_BIT 0x00020000
-#define GL_TEXTURE_BIT 0x00040000
-#define GL_SCISSOR_BIT 0x00080000
-#define GL_ALL_ATTRIB_BITS 0xFFFFFFFF
-#define GL_QUAD_STRIP 0x0008
-#define GL_POLYGON 0x0009
-#define GL_ACCUM 0x0100
-#define GL_LOAD 0x0101
-#define GL_RETURN 0x0102
-#define GL_MULT 0x0103
-#define GL_ADD 0x0104
-#define GL_AUX0 0x0409
-#define GL_AUX1 0x040A
-#define GL_AUX2 0x040B
-#define GL_AUX3 0x040C
-#define GL_2D 0x0600
-#define GL_3D 0x0601
-#define GL_3D_COLOR 0x0602
-#define GL_3D_COLOR_TEXTURE 0x0603
-#define GL_4D_COLOR_TEXTURE 0x0604
-#define GL_PASS_THROUGH_TOKEN 0x0700
-#define GL_POINT_TOKEN 0x0701
-#define GL_LINE_TOKEN 0x0702
-#define GL_POLYGON_TOKEN 0x0703
-#define GL_BITMAP_TOKEN 0x0704
-#define GL_DRAW_PIXEL_TOKEN 0x0705
-#define GL_COPY_PIXEL_TOKEN 0x0706
-#define GL_LINE_RESET_TOKEN 0x0707
-#define GL_EXP 0x0800
-#define GL_EXP2 0x0801
-#define GL_COEFF 0x0A00
-#define GL_ORDER 0x0A01
-#define GL_DOMAIN 0x0A02
-#define GL_PIXEL_MAP_I_TO_I 0x0C70
-#define GL_PIXEL_MAP_S_TO_S 0x0C71
-#define GL_PIXEL_MAP_I_TO_R 0x0C72
-#define GL_PIXEL_MAP_I_TO_G 0x0C73
-#define GL_PIXEL_MAP_I_TO_B 0x0C74
-#define GL_PIXEL_MAP_I_TO_A 0x0C75
-#define GL_PIXEL_MAP_R_TO_R 0x0C76
-#define GL_PIXEL_MAP_G_TO_G 0x0C77
-#define GL_PIXEL_MAP_B_TO_B 0x0C78
-#define GL_PIXEL_MAP_A_TO_A 0x0C79
-#define GL_CURRENT_COLOR 0x0B00
-#define GL_CURRENT_INDEX 0x0B01
-#define GL_CURRENT_NORMAL 0x0B02
-#define GL_CURRENT_TEXTURE_COORDS 0x0B03
-#define GL_CURRENT_RASTER_COLOR 0x0B04
-#define GL_CURRENT_RASTER_INDEX 0x0B05
-#define GL_CURRENT_RASTER_TEXTURE_COORDS 0x0B06
-#define GL_CURRENT_RASTER_POSITION 0x0B07
-#define GL_CURRENT_RASTER_POSITION_VALID 0x0B08
-#define GL_CURRENT_RASTER_DISTANCE 0x0B09
-#define GL_POINT_SMOOTH 0x0B10
-#define GL_LINE_STIPPLE 0x0B24
-#define GL_LINE_STIPPLE_PATTERN 0x0B25
-#define GL_LINE_STIPPLE_REPEAT 0x0B26
-#define GL_LIST_MODE 0x0B30
-#define GL_MAX_LIST_NESTING 0x0B31
-#define GL_LIST_BASE 0x0B32
-#define GL_LIST_INDEX 0x0B33
-#define GL_POLYGON_STIPPLE 0x0B42
-#define GL_EDGE_FLAG 0x0B43
-#define GL_LIGHTING 0x0B50
-#define GL_LIGHT_MODEL_LOCAL_VIEWER 0x0B51
-#define GL_LIGHT_MODEL_TWO_SIDE 0x0B52
-#define GL_LIGHT_MODEL_AMBIENT 0x0B53
-#define GL_SHADE_MODEL 0x0B54
-#define GL_COLOR_MATERIAL_FACE 0x0B55
-#define GL_COLOR_MATERIAL_PARAMETER 0x0B56
-#define GL_COLOR_MATERIAL 0x0B57
-#define GL_FOG 0x0B60
-#define GL_FOG_INDEX 0x0B61
-#define GL_FOG_DENSITY 0x0B62
-#define GL_FOG_START 0x0B63
-#define GL_FOG_END 0x0B64
-#define GL_FOG_MODE 0x0B65
-#define GL_FOG_COLOR 0x0B66
-#define GL_ACCUM_CLEAR_VALUE 0x0B80
-#define GL_MATRIX_MODE 0x0BA0
-#define GL_NORMALIZE 0x0BA1
-#define GL_MODELVIEW_STACK_DEPTH 0x0BA3
-#define GL_PROJECTION_STACK_DEPTH 0x0BA4
-#define GL_TEXTURE_STACK_DEPTH 0x0BA5
-#define GL_MODELVIEW_MATRIX 0x0BA6
-#define GL_PROJECTION_MATRIX 0x0BA7
-#define GL_TEXTURE_MATRIX 0x0BA8
-#define GL_ATTRIB_STACK_DEPTH 0x0BB0
-#define GL_ALPHA_TEST 0x0BC0
-#define GL_ALPHA_TEST_FUNC 0x0BC1
-#define GL_ALPHA_TEST_REF 0x0BC2
-#define GL_LOGIC_OP 0x0BF1
-#define GL_AUX_BUFFERS 0x0C00
-#define GL_INDEX_CLEAR_VALUE 0x0C20
-#define GL_INDEX_WRITEMASK 0x0C21
-#define GL_INDEX_MODE 0x0C30
-#define GL_RGBA_MODE 0x0C31
-#define GL_RENDER_MODE 0x0C40
-#define GL_PERSPECTIVE_CORRECTION_HINT 0x0C50
-#define GL_POINT_SMOOTH_HINT 0x0C51
-#define GL_FOG_HINT 0x0C54
-#define GL_TEXTURE_GEN_S 0x0C60
-#define GL_TEXTURE_GEN_T 0x0C61
-#define GL_TEXTURE_GEN_R 0x0C62
-#define GL_TEXTURE_GEN_Q 0x0C63
-#define GL_PIXEL_MAP_I_TO_I_SIZE 0x0CB0
-#define GL_PIXEL_MAP_S_TO_S_SIZE 0x0CB1
-#define GL_PIXEL_MAP_I_TO_R_SIZE 0x0CB2
-#define GL_PIXEL_MAP_I_TO_G_SIZE 0x0CB3
-#define GL_PIXEL_MAP_I_TO_B_SIZE 0x0CB4
-#define GL_PIXEL_MAP_I_TO_A_SIZE 0x0CB5
-#define GL_PIXEL_MAP_R_TO_R_SIZE 0x0CB6
-#define GL_PIXEL_MAP_G_TO_G_SIZE 0x0CB7
-#define GL_PIXEL_MAP_B_TO_B_SIZE 0x0CB8
-#define GL_PIXEL_MAP_A_TO_A_SIZE 0x0CB9
-#define GL_MAP_COLOR 0x0D10
-#define GL_MAP_STENCIL 0x0D11
-#define GL_INDEX_SHIFT 0x0D12
-#define GL_INDEX_OFFSET 0x0D13
-#define GL_RED_SCALE 0x0D14
-#define GL_RED_BIAS 0x0D15
-#define GL_ZOOM_X 0x0D16
-#define GL_ZOOM_Y 0x0D17
-#define GL_GREEN_SCALE 0x0D18
-#define GL_GREEN_BIAS 0x0D19
-#define GL_BLUE_SCALE 0x0D1A
-#define GL_BLUE_BIAS 0x0D1B
-#define GL_ALPHA_SCALE 0x0D1C
-#define GL_ALPHA_BIAS 0x0D1D
-#define GL_DEPTH_SCALE 0x0D1E
-#define GL_DEPTH_BIAS 0x0D1F
-#define GL_MAX_EVAL_ORDER 0x0D30
-#define GL_MAX_LIGHTS 0x0D31
-#define GL_MAX_CLIP_PLANES 0x0D32
-#define GL_MAX_PIXEL_MAP_TABLE 0x0D34
-#define GL_MAX_ATTRIB_STACK_DEPTH 0x0D35
-#define GL_MAX_MODELVIEW_STACK_DEPTH 0x0D36
-#define GL_MAX_NAME_STACK_DEPTH 0x0D37
-#define GL_MAX_PROJECTION_STACK_DEPTH 0x0D38
-#define GL_MAX_TEXTURE_STACK_DEPTH 0x0D39
-#define GL_INDEX_BITS 0x0D51
-#define GL_RED_BITS 0x0D52
-#define GL_GREEN_BITS 0x0D53
-#define GL_BLUE_BITS 0x0D54
-#define GL_ALPHA_BITS 0x0D55
-#define GL_DEPTH_BITS 0x0D56
-#define GL_STENCIL_BITS 0x0D57
-#define GL_ACCUM_RED_BITS 0x0D58
-#define GL_ACCUM_GREEN_BITS 0x0D59
-#define GL_ACCUM_BLUE_BITS 0x0D5A
-#define GL_ACCUM_ALPHA_BITS 0x0D5B
-#define GL_NAME_STACK_DEPTH 0x0D70
-#define GL_AUTO_NORMAL 0x0D80
-#define GL_MAP1_COLOR_4 0x0D90
-#define GL_MAP1_INDEX 0x0D91
-#define GL_MAP1_NORMAL 0x0D92
-#define GL_MAP1_TEXTURE_COORD_1 0x0D93
-#define GL_MAP1_TEXTURE_COORD_2 0x0D94
-#define GL_MAP1_TEXTURE_COORD_3 0x0D95
-#define GL_MAP1_TEXTURE_COORD_4 0x0D96
-#define GL_MAP1_VERTEX_3 0x0D97
-#define GL_MAP1_VERTEX_4 0x0D98
-#define GL_MAP2_COLOR_4 0x0DB0
-#define GL_MAP2_INDEX 0x0DB1
-#define GL_MAP2_NORMAL 0x0DB2
-#define GL_MAP2_TEXTURE_COORD_1 0x0DB3
-#define GL_MAP2_TEXTURE_COORD_2 0x0DB4
-#define GL_MAP2_TEXTURE_COORD_3 0x0DB5
-#define GL_MAP2_TEXTURE_COORD_4 0x0DB6
-#define GL_MAP2_VERTEX_3 0x0DB7
-#define GL_MAP2_VERTEX_4 0x0DB8
-#define GL_MAP1_GRID_DOMAIN 0x0DD0
-#define GL_MAP1_GRID_SEGMENTS 0x0DD1
-#define GL_MAP2_GRID_DOMAIN 0x0DD2
-#define GL_MAP2_GRID_SEGMENTS 0x0DD3
-#define GL_TEXTURE_COMPONENTS 0x1003
-#define GL_TEXTURE_BORDER 0x1005
-#define GL_AMBIENT 0x1200
-#define GL_DIFFUSE 0x1201
-#define GL_SPECULAR 0x1202
-#define GL_POSITION 0x1203
-#define GL_SPOT_DIRECTION 0x1204
-#define GL_SPOT_EXPONENT 0x1205
-#define GL_SPOT_CUTOFF 0x1206
-#define GL_CONSTANT_ATTENUATION 0x1207
-#define GL_LINEAR_ATTENUATION 0x1208
-#define GL_QUADRATIC_ATTENUATION 0x1209
-#define GL_COMPILE 0x1300
-#define GL_COMPILE_AND_EXECUTE 0x1301
-#define GL_2_BYTES 0x1407
-#define GL_3_BYTES 0x1408
-#define GL_4_BYTES 0x1409
-#define GL_EMISSION 0x1600
-#define GL_SHININESS 0x1601
-#define GL_AMBIENT_AND_DIFFUSE 0x1602
-#define GL_COLOR_INDEXES 0x1603
-#define GL_MODELVIEW 0x1700
-#define GL_PROJECTION 0x1701
-#define GL_COLOR_INDEX 0x1900
-#define GL_LUMINANCE 0x1909
-#define GL_LUMINANCE_ALPHA 0x190A
-#define GL_BITMAP 0x1A00
-#define GL_RENDER 0x1C00
-#define GL_FEEDBACK 0x1C01
-#define GL_SELECT 0x1C02
-#define GL_FLAT 0x1D00
-#define GL_SMOOTH 0x1D01
-#define GL_S 0x2000
-#define GL_T 0x2001
-#define GL_R 0x2002
-#define GL_Q 0x2003
-#define GL_MODULATE 0x2100
-#define GL_DECAL 0x2101
-#define GL_TEXTURE_ENV_MODE 0x2200
-#define GL_TEXTURE_ENV_COLOR 0x2201
-#define GL_TEXTURE_ENV 0x2300
-#define GL_EYE_LINEAR 0x2400
-#define GL_OBJECT_LINEAR 0x2401
-#define GL_SPHERE_MAP 0x2402
-#define GL_TEXTURE_GEN_MODE 0x2500
-#define GL_OBJECT_PLANE 0x2501
-#define GL_EYE_PLANE 0x2502
-#define GL_CLAMP 0x2900
-#define GL_CLIP_PLANE0 0x3000
-#define GL_CLIP_PLANE1 0x3001
-#define GL_CLIP_PLANE2 0x3002
-#define GL_CLIP_PLANE3 0x3003
-#define GL_CLIP_PLANE4 0x3004
-#define GL_CLIP_PLANE5 0x3005
-#define GL_LIGHT0 0x4000
-#define GL_LIGHT1 0x4001
-#define GL_LIGHT2 0x4002
-#define GL_LIGHT3 0x4003
-#define GL_LIGHT4 0x4004
-#define GL_LIGHT5 0x4005
-#define GL_LIGHT6 0x4006
-#define GL_LIGHT7 0x4007
-#define GL_COLOR_LOGIC_OP 0x0BF2
-#define GL_POLYGON_OFFSET_UNITS 0x2A00
-#define GL_POLYGON_OFFSET_POINT 0x2A01
-#define GL_POLYGON_OFFSET_LINE 0x2A02
-#define GL_POLYGON_OFFSET_FILL 0x8037
-#define GL_POLYGON_OFFSET_FACTOR 0x8038
-#define GL_TEXTURE_BINDING_1D 0x8068
-#define GL_TEXTURE_BINDING_2D 0x8069
-#define GL_TEXTURE_INTERNAL_FORMAT 0x1003
-#define GL_TEXTURE_RED_SIZE 0x805C
-#define GL_TEXTURE_GREEN_SIZE 0x805D
-#define GL_TEXTURE_BLUE_SIZE 0x805E
-#define GL_TEXTURE_ALPHA_SIZE 0x805F
-#define GL_DOUBLE 0x140A
-#define GL_PROXY_TEXTURE_1D 0x8063
-#define GL_PROXY_TEXTURE_2D 0x8064
-#define GL_R3_G3_B2 0x2A10
-#define GL_RGB4 0x804F
-#define GL_RGB5 0x8050
-#define GL_RGB8 0x8051
-#define GL_RGB10 0x8052
-#define GL_RGB12 0x8053
-#define GL_RGB16 0x8054
-#define GL_RGBA2 0x8055
-#define GL_RGBA4 0x8056
-#define GL_RGB5_A1 0x8057
-#define GL_RGBA8 0x8058
-#define GL_RGB10_A2 0x8059
-#define GL_RGBA12 0x805A
-#define GL_RGBA16 0x805B
-#define GL_CLIENT_PIXEL_STORE_BIT 0x00000001
-#define GL_CLIENT_VERTEX_ARRAY_BIT 0x00000002
-#define GL_CLIENT_ALL_ATTRIB_BITS 0xFFFFFFFF
-#define GL_VERTEX_ARRAY_POINTER 0x808E
-#define GL_NORMAL_ARRAY_POINTER 0x808F
-#define GL_COLOR_ARRAY_POINTER 0x8090
-#define GL_INDEX_ARRAY_POINTER 0x8091
-#define GL_TEXTURE_COORD_ARRAY_POINTER 0x8092
-#define GL_EDGE_FLAG_ARRAY_POINTER 0x8093
-#define GL_FEEDBACK_BUFFER_POINTER 0x0DF0
-#define GL_SELECTION_BUFFER_POINTER 0x0DF3
-#define GL_CLIENT_ATTRIB_STACK_DEPTH 0x0BB1
-#define GL_INDEX_LOGIC_OP 0x0BF1
-#define GL_MAX_CLIENT_ATTRIB_STACK_DEPTH 0x0D3B
-#define GL_FEEDBACK_BUFFER_SIZE 0x0DF1
-#define GL_FEEDBACK_BUFFER_TYPE 0x0DF2
-#define GL_SELECTION_BUFFER_SIZE 0x0DF4
-#define GL_VERTEX_ARRAY 0x8074
-#define GL_NORMAL_ARRAY 0x8075
-#define GL_COLOR_ARRAY 0x8076
-#define GL_INDEX_ARRAY 0x8077
-#define GL_TEXTURE_COORD_ARRAY 0x8078
-#define GL_EDGE_FLAG_ARRAY 0x8079
-#define GL_VERTEX_ARRAY_SIZE 0x807A
-#define GL_VERTEX_ARRAY_TYPE 0x807B
-#define GL_VERTEX_ARRAY_STRIDE 0x807C
-#define GL_NORMAL_ARRAY_TYPE 0x807E
-#define GL_NORMAL_ARRAY_STRIDE 0x807F
-#define GL_COLOR_ARRAY_SIZE 0x8081
-#define GL_COLOR_ARRAY_TYPE 0x8082
-#define GL_COLOR_ARRAY_STRIDE 0x8083
-#define GL_INDEX_ARRAY_TYPE 0x8085
-#define GL_INDEX_ARRAY_STRIDE 0x8086
-#define GL_TEXTURE_COORD_ARRAY_SIZE 0x8088
-#define GL_TEXTURE_COORD_ARRAY_TYPE 0x8089
-#define GL_TEXTURE_COORD_ARRAY_STRIDE 0x808A
-#define GL_EDGE_FLAG_ARRAY_STRIDE 0x808C
-#define GL_TEXTURE_LUMINANCE_SIZE 0x8060
-#define GL_TEXTURE_INTENSITY_SIZE 0x8061
-#define GL_TEXTURE_PRIORITY 0x8066
-#define GL_TEXTURE_RESIDENT 0x8067
-#define GL_ALPHA4 0x803B
-#define GL_ALPHA8 0x803C
-#define GL_ALPHA12 0x803D
-#define GL_ALPHA16 0x803E
-#define GL_LUMINANCE4 0x803F
-#define GL_LUMINANCE8 0x8040
-#define GL_LUMINANCE12 0x8041
-#define GL_LUMINANCE16 0x8042
-#define GL_LUMINANCE4_ALPHA4 0x8043
-#define GL_LUMINANCE6_ALPHA2 0x8044
-#define GL_LUMINANCE8_ALPHA8 0x8045
-#define GL_LUMINANCE12_ALPHA4 0x8046
-#define GL_LUMINANCE12_ALPHA12 0x8047
-#define GL_LUMINANCE16_ALPHA16 0x8048
-#define GL_INTENSITY 0x8049
-#define GL_INTENSITY4 0x804A
-#define GL_INTENSITY8 0x804B
-#define GL_INTENSITY12 0x804C
-#define GL_INTENSITY16 0x804D
-#define GL_V2F 0x2A20
-#define GL_V3F 0x2A21
-#define GL_C4UB_V2F 0x2A22
-#define GL_C4UB_V3F 0x2A23
-#define GL_C3F_V3F 0x2A24
-#define GL_N3F_V3F 0x2A25
-#define GL_C4F_N3F_V3F 0x2A26
-#define GL_T2F_V3F 0x2A27
-#define GL_T4F_V4F 0x2A28
-#define GL_T2F_C4UB_V3F 0x2A29
-#define GL_T2F_C3F_V3F 0x2A2A
-#define GL_T2F_N3F_V3F 0x2A2B
-#define GL_T2F_C4F_N3F_V3F 0x2A2C
-#define GL_T4F_C4F_N3F_V4F 0x2A2D
-#define GL_UNSIGNED_BYTE_3_3_2 0x8032
-#define GL_UNSIGNED_SHORT_4_4_4_4 0x8033
-#define GL_UNSIGNED_SHORT_5_5_5_1 0x8034
-#define GL_UNSIGNED_INT_8_8_8_8 0x8035
-#define GL_UNSIGNED_INT_10_10_10_2 0x8036
-#define GL_TEXTURE_BINDING_3D 0x806A
-#define GL_PACK_SKIP_IMAGES 0x806B
-#define GL_PACK_IMAGE_HEIGHT 0x806C
-#define GL_UNPACK_SKIP_IMAGES 0x806D
-#define GL_UNPACK_IMAGE_HEIGHT 0x806E
-#define GL_TEXTURE_3D 0x806F
-#define GL_PROXY_TEXTURE_3D 0x8070
-#define GL_TEXTURE_DEPTH 0x8071
-#define GL_TEXTURE_WRAP_R 0x8072
-#define GL_MAX_3D_TEXTURE_SIZE 0x8073
-#define GL_UNSIGNED_BYTE_2_3_3_REV 0x8362
-#define GL_UNSIGNED_SHORT_5_6_5 0x8363
-#define GL_UNSIGNED_SHORT_5_6_5_REV 0x8364
-#define GL_UNSIGNED_SHORT_4_4_4_4_REV 0x8365
-#define GL_UNSIGNED_SHORT_1_5_5_5_REV 0x8366
-#define GL_UNSIGNED_INT_8_8_8_8_REV 0x8367
-#define GL_UNSIGNED_INT_2_10_10_10_REV 0x8368
-#define GL_BGR 0x80E0
-#define GL_BGRA 0x80E1
-#define GL_MAX_ELEMENTS_VERTICES 0x80E8
-#define GL_MAX_ELEMENTS_INDICES 0x80E9
-#define GL_CLAMP_TO_EDGE 0x812F
-#define GL_TEXTURE_MIN_LOD 0x813A
-#define GL_TEXTURE_MAX_LOD 0x813B
-#define GL_TEXTURE_BASE_LEVEL 0x813C
-#define GL_TEXTURE_MAX_LEVEL 0x813D
-#define GL_SMOOTH_POINT_SIZE_RANGE 0x0B12
-#define GL_SMOOTH_POINT_SIZE_GRANULARITY 0x0B13
-#define GL_SMOOTH_LINE_WIDTH_RANGE 0x0B22
-#define GL_SMOOTH_LINE_WIDTH_GRANULARITY 0x0B23
-#define GL_ALIASED_LINE_WIDTH_RANGE 0x846E
-#define GL_RESCALE_NORMAL 0x803A
-#define GL_LIGHT_MODEL_COLOR_CONTROL 0x81F8
-#define GL_SINGLE_COLOR 0x81F9
-#define GL_SEPARATE_SPECULAR_COLOR 0x81FA
-#define GL_ALIASED_POINT_SIZE_RANGE 0x846D
-#define GL_TEXTURE0 0x84C0
-#define GL_TEXTURE1 0x84C1
-#define GL_TEXTURE2 0x84C2
-#define GL_TEXTURE3 0x84C3
-#define GL_TEXTURE4 0x84C4
-#define GL_TEXTURE5 0x84C5
-#define GL_TEXTURE6 0x84C6
-#define GL_TEXTURE7 0x84C7
-#define GL_TEXTURE8 0x84C8
-#define GL_TEXTURE9 0x84C9
-#define GL_TEXTURE10 0x84CA
-#define GL_TEXTURE11 0x84CB
-#define GL_TEXTURE12 0x84CC
-#define GL_TEXTURE13 0x84CD
-#define GL_TEXTURE14 0x84CE
-#define GL_TEXTURE15 0x84CF
-#define GL_TEXTURE16 0x84D0
-#define GL_TEXTURE17 0x84D1
-#define GL_TEXTURE18 0x84D2
-#define GL_TEXTURE19 0x84D3
-#define GL_TEXTURE20 0x84D4
-#define GL_TEXTURE21 0x84D5
-#define GL_TEXTURE22 0x84D6
-#define GL_TEXTURE23 0x84D7
-#define GL_TEXTURE24 0x84D8
-#define GL_TEXTURE25 0x84D9
-#define GL_TEXTURE26 0x84DA
-#define GL_TEXTURE27 0x84DB
-#define GL_TEXTURE28 0x84DC
-#define GL_TEXTURE29 0x84DD
-#define GL_TEXTURE30 0x84DE
-#define GL_TEXTURE31 0x84DF
-#define GL_ACTIVE_TEXTURE 0x84E0
-#define GL_MULTISAMPLE 0x809D
-#define GL_SAMPLE_ALPHA_TO_COVERAGE 0x809E
-#define GL_SAMPLE_ALPHA_TO_ONE 0x809F
-#define GL_SAMPLE_COVERAGE 0x80A0
-#define GL_SAMPLE_BUFFERS 0x80A8
-#define GL_SAMPLES 0x80A9
-#define GL_SAMPLE_COVERAGE_VALUE 0x80AA
-#define GL_SAMPLE_COVERAGE_INVERT 0x80AB
-#define GL_TEXTURE_CUBE_MAP 0x8513
-#define GL_TEXTURE_BINDING_CUBE_MAP 0x8514
-#define GL_TEXTURE_CUBE_MAP_POSITIVE_X 0x8515
-#define GL_TEXTURE_CUBE_MAP_NEGATIVE_X 0x8516
-#define GL_TEXTURE_CUBE_MAP_POSITIVE_Y 0x8517
-#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Y 0x8518
-#define GL_TEXTURE_CUBE_MAP_POSITIVE_Z 0x8519
-#define GL_TEXTURE_CUBE_MAP_NEGATIVE_Z 0x851A
-#define GL_PROXY_TEXTURE_CUBE_MAP 0x851B
-#define GL_MAX_CUBE_MAP_TEXTURE_SIZE 0x851C
-#define GL_COMPRESSED_RGB 0x84ED
-#define GL_COMPRESSED_RGBA 0x84EE
-#define GL_TEXTURE_COMPRESSION_HINT 0x84EF
-#define GL_TEXTURE_COMPRESSED_IMAGE_SIZE 0x86A0
-#define GL_TEXTURE_COMPRESSED 0x86A1
-#define GL_NUM_COMPRESSED_TEXTURE_FORMATS 0x86A2
-#define GL_COMPRESSED_TEXTURE_FORMATS 0x86A3
-#define GL_CLAMP_TO_BORDER 0x812D
-#define GL_CLIENT_ACTIVE_TEXTURE 0x84E1
-#define GL_MAX_TEXTURE_UNITS 0x84E2
-#define GL_TRANSPOSE_MODELVIEW_MATRIX 0x84E3
-#define GL_TRANSPOSE_PROJECTION_MATRIX 0x84E4
-#define GL_TRANSPOSE_TEXTURE_MATRIX 0x84E5
-#define GL_TRANSPOSE_COLOR_MATRIX 0x84E6
-#define GL_MULTISAMPLE_BIT 0x20000000
-#define GL_NORMAL_MAP 0x8511
-#define GL_REFLECTION_MAP 0x8512
-#define GL_COMPRESSED_ALPHA 0x84E9
-#define GL_COMPRESSED_LUMINANCE 0x84EA
-#define GL_COMPRESSED_LUMINANCE_ALPHA 0x84EB
-#define GL_COMPRESSED_INTENSITY 0x84EC
-#define GL_COMBINE 0x8570
-#define GL_COMBINE_RGB 0x8571
-#define GL_COMBINE_ALPHA 0x8572
-#define GL_SOURCE0_RGB 0x8580
-#define GL_SOURCE1_RGB 0x8581
-#define GL_SOURCE2_RGB 0x8582
-#define GL_SOURCE0_ALPHA 0x8588
-#define GL_SOURCE1_ALPHA 0x8589
-#define GL_SOURCE2_ALPHA 0x858A
-#define GL_OPERAND0_RGB 0x8590
-#define GL_OPERAND1_RGB 0x8591
-#define GL_OPERAND2_RGB 0x8592
-#define GL_OPERAND0_ALPHA 0x8598
-#define GL_OPERAND1_ALPHA 0x8599
-#define GL_OPERAND2_ALPHA 0x859A
-#define GL_RGB_SCALE 0x8573
-#define GL_ADD_SIGNED 0x8574
-#define GL_INTERPOLATE 0x8575
-#define GL_SUBTRACT 0x84E7
-#define GL_CONSTANT 0x8576
-#define GL_PRIMARY_COLOR 0x8577
-#define GL_PREVIOUS 0x8578
-#define GL_DOT3_RGB 0x86AE
-#define GL_DOT3_RGBA 0x86AF
-#define GL_BLEND_DST_RGB 0x80C8
-#define GL_BLEND_SRC_RGB 0x80C9
-#define GL_BLEND_DST_ALPHA 0x80CA
-#define GL_BLEND_SRC_ALPHA 0x80CB
-#define GL_POINT_FADE_THRESHOLD_SIZE 0x8128
-#define GL_DEPTH_COMPONENT16 0x81A5
-#define GL_DEPTH_COMPONENT24 0x81A6
-#define GL_DEPTH_COMPONENT32 0x81A7
-#define GL_MIRRORED_REPEAT 0x8370
-#define GL_MAX_TEXTURE_LOD_BIAS 0x84FD
-#define GL_TEXTURE_LOD_BIAS 0x8501
-#define GL_INCR_WRAP 0x8507
-#define GL_DECR_WRAP 0x8508
-#define GL_TEXTURE_DEPTH_SIZE 0x884A
-#define GL_TEXTURE_COMPARE_MODE 0x884C
-#define GL_TEXTURE_COMPARE_FUNC 0x884D
-#define GL_POINT_SIZE_MIN 0x8126
-#define GL_POINT_SIZE_MAX 0x8127
-#define GL_POINT_DISTANCE_ATTENUATION 0x8129
-#define GL_GENERATE_MIPMAP 0x8191
-#define GL_GENERATE_MIPMAP_HINT 0x8192
-#define GL_FOG_COORDINATE_SOURCE 0x8450
-#define GL_FOG_COORDINATE 0x8451
-#define GL_FRAGMENT_DEPTH 0x8452
-#define GL_CURRENT_FOG_COORDINATE 0x8453
-#define GL_FOG_COORDINATE_ARRAY_TYPE 0x8454
-#define GL_FOG_COORDINATE_ARRAY_STRIDE 0x8455
-#define GL_FOG_COORDINATE_ARRAY_POINTER 0x8456
-#define GL_FOG_COORDINATE_ARRAY 0x8457
-#define GL_COLOR_SUM 0x8458
-#define GL_CURRENT_SECONDARY_COLOR 0x8459
-#define GL_SECONDARY_COLOR_ARRAY_SIZE 0x845A
-#define GL_SECONDARY_COLOR_ARRAY_TYPE 0x845B
-#define GL_SECONDARY_COLOR_ARRAY_STRIDE 0x845C
-#define GL_SECONDARY_COLOR_ARRAY_POINTER 0x845D
-#define GL_SECONDARY_COLOR_ARRAY 0x845E
-#define GL_TEXTURE_FILTER_CONTROL 0x8500
-#define GL_DEPTH_TEXTURE_MODE 0x884B
-#define GL_COMPARE_R_TO_TEXTURE 0x884E
-#define GL_BLEND_COLOR 0x8005
-#define GL_BLEND_EQUATION 0x8009
-#define GL_CONSTANT_COLOR 0x8001
-#define GL_ONE_MINUS_CONSTANT_COLOR 0x8002
-#define GL_CONSTANT_ALPHA 0x8003
-#define GL_ONE_MINUS_CONSTANT_ALPHA 0x8004
-#define GL_FUNC_ADD 0x8006
-#define GL_FUNC_REVERSE_SUBTRACT 0x800B
-#define GL_FUNC_SUBTRACT 0x800A
-#define GL_MIN 0x8007
-#define GL_MAX 0x8008
-#define GL_BUFFER_SIZE 0x8764
-#define GL_BUFFER_USAGE 0x8765
-#define GL_QUERY_COUNTER_BITS 0x8864
-#define GL_CURRENT_QUERY 0x8865
-#define GL_QUERY_RESULT 0x8866
-#define GL_QUERY_RESULT_AVAILABLE 0x8867
-#define GL_ARRAY_BUFFER 0x8892
-#define GL_ELEMENT_ARRAY_BUFFER 0x8893
-#define GL_ARRAY_BUFFER_BINDING 0x8894
-#define GL_ELEMENT_ARRAY_BUFFER_BINDING 0x8895
-#define GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING 0x889F
-#define GL_READ_ONLY 0x88B8
-#define GL_WRITE_ONLY 0x88B9
-#define GL_READ_WRITE 0x88BA
-#define GL_BUFFER_ACCESS 0x88BB
-#define GL_BUFFER_MAPPED 0x88BC
-#define GL_BUFFER_MAP_POINTER 0x88BD
-#define GL_STREAM_DRAW 0x88E0
-#define GL_STREAM_READ 0x88E1
-#define GL_STREAM_COPY 0x88E2
-#define GL_STATIC_DRAW 0x88E4
-#define GL_STATIC_READ 0x88E5
-#define GL_STATIC_COPY 0x88E6
-#define GL_DYNAMIC_DRAW 0x88E8
-#define GL_DYNAMIC_READ 0x88E9
-#define GL_DYNAMIC_COPY 0x88EA
-#define GL_SAMPLES_PASSED 0x8914
-#define GL_SRC1_ALPHA 0x8589
-#define GL_VERTEX_ARRAY_BUFFER_BINDING 0x8896
-#define GL_NORMAL_ARRAY_BUFFER_BINDING 0x8897
-#define GL_COLOR_ARRAY_BUFFER_BINDING 0x8898
-#define GL_INDEX_ARRAY_BUFFER_BINDING 0x8899
-#define GL_TEXTURE_COORD_ARRAY_BUFFER_BINDING 0x889A
-#define GL_EDGE_FLAG_ARRAY_BUFFER_BINDING 0x889B
-#define GL_SECONDARY_COLOR_ARRAY_BUFFER_BINDING 0x889C
-#define GL_FOG_COORDINATE_ARRAY_BUFFER_BINDING 0x889D
-#define GL_WEIGHT_ARRAY_BUFFER_BINDING 0x889E
-#define GL_FOG_COORD_SRC 0x8450
-#define GL_FOG_COORD 0x8451
-#define GL_CURRENT_FOG_COORD 0x8453
-#define GL_FOG_COORD_ARRAY_TYPE 0x8454
-#define GL_FOG_COORD_ARRAY_STRIDE 0x8455
-#define GL_FOG_COORD_ARRAY_POINTER 0x8456
-#define GL_FOG_COORD_ARRAY 0x8457
-#define GL_FOG_COORD_ARRAY_BUFFER_BINDING 0x889D
-#define GL_SRC0_RGB 0x8580
-#define GL_SRC1_RGB 0x8581
-#define GL_SRC2_RGB 0x8582
-#define GL_SRC0_ALPHA 0x8588
-#define GL_SRC2_ALPHA 0x858A
-#define GL_BLEND_EQUATION_RGB 0x8009
-#define GL_VERTEX_ATTRIB_ARRAY_ENABLED 0x8622
-#define GL_VERTEX_ATTRIB_ARRAY_SIZE 0x8623
-#define GL_VERTEX_ATTRIB_ARRAY_STRIDE 0x8624
-#define GL_VERTEX_ATTRIB_ARRAY_TYPE 0x8625
-#define GL_CURRENT_VERTEX_ATTRIB 0x8626
-#define GL_VERTEX_PROGRAM_POINT_SIZE 0x8642
-#define GL_VERTEX_ATTRIB_ARRAY_POINTER 0x8645
-#define GL_STENCIL_BACK_FUNC 0x8800
-#define GL_STENCIL_BACK_FAIL 0x8801
-#define GL_STENCIL_BACK_PASS_DEPTH_FAIL 0x8802
-#define GL_STENCIL_BACK_PASS_DEPTH_PASS 0x8803
-#define GL_MAX_DRAW_BUFFERS 0x8824
-#define GL_DRAW_BUFFER0 0x8825
-#define GL_DRAW_BUFFER1 0x8826
-#define GL_DRAW_BUFFER2 0x8827
-#define GL_DRAW_BUFFER3 0x8828
-#define GL_DRAW_BUFFER4 0x8829
-#define GL_DRAW_BUFFER5 0x882A
-#define GL_DRAW_BUFFER6 0x882B
-#define GL_DRAW_BUFFER7 0x882C
-#define GL_DRAW_BUFFER8 0x882D
-#define GL_DRAW_BUFFER9 0x882E
-#define GL_DRAW_BUFFER10 0x882F
-#define GL_DRAW_BUFFER11 0x8830
-#define GL_DRAW_BUFFER12 0x8831
-#define GL_DRAW_BUFFER13 0x8832
-#define GL_DRAW_BUFFER14 0x8833
-#define GL_DRAW_BUFFER15 0x8834
-#define GL_BLEND_EQUATION_ALPHA 0x883D
-#define GL_MAX_VERTEX_ATTRIBS 0x8869
-#define GL_VERTEX_ATTRIB_ARRAY_NORMALIZED 0x886A
-#define GL_MAX_TEXTURE_IMAGE_UNITS 0x8872
-#define GL_FRAGMENT_SHADER 0x8B30
-#define GL_VERTEX_SHADER 0x8B31
-#define GL_MAX_FRAGMENT_UNIFORM_COMPONENTS 0x8B49
-#define GL_MAX_VERTEX_UNIFORM_COMPONENTS 0x8B4A
-#define GL_MAX_VARYING_FLOATS 0x8B4B
-#define GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS 0x8B4C
-#define GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS 0x8B4D
-#define GL_SHADER_TYPE 0x8B4F
-#define GL_FLOAT_VEC2 0x8B50
-#define GL_FLOAT_VEC3 0x8B51
-#define GL_FLOAT_VEC4 0x8B52
-#define GL_INT_VEC2 0x8B53
-#define GL_INT_VEC3 0x8B54
-#define GL_INT_VEC4 0x8B55
-#define GL_BOOL 0x8B56
-#define GL_BOOL_VEC2 0x8B57
-#define GL_BOOL_VEC3 0x8B58
-#define GL_BOOL_VEC4 0x8B59
-#define GL_FLOAT_MAT2 0x8B5A
-#define GL_FLOAT_MAT3 0x8B5B
-#define GL_FLOAT_MAT4 0x8B5C
-#define GL_SAMPLER_1D 0x8B5D
-#define GL_SAMPLER_2D 0x8B5E
-#define GL_SAMPLER_3D 0x8B5F
-#define GL_SAMPLER_CUBE 0x8B60
-#define GL_SAMPLER_1D_SHADOW 0x8B61
-#define GL_SAMPLER_2D_SHADOW 0x8B62
-#define GL_DELETE_STATUS 0x8B80
-#define GL_COMPILE_STATUS 0x8B81
-#define GL_LINK_STATUS 0x8B82
-#define GL_VALIDATE_STATUS 0x8B83
-#define GL_INFO_LOG_LENGTH 0x8B84
-#define GL_ATTACHED_SHADERS 0x8B85
-#define GL_ACTIVE_UNIFORMS 0x8B86
-#define GL_ACTIVE_UNIFORM_MAX_LENGTH 0x8B87
-#define GL_SHADER_SOURCE_LENGTH 0x8B88
-#define GL_ACTIVE_ATTRIBUTES 0x8B89
-#define GL_ACTIVE_ATTRIBUTE_MAX_LENGTH 0x8B8A
-#define GL_FRAGMENT_SHADER_DERIVATIVE_HINT 0x8B8B
-#define GL_SHADING_LANGUAGE_VERSION 0x8B8C
-#define GL_CURRENT_PROGRAM 0x8B8D
-#define GL_POINT_SPRITE_COORD_ORIGIN 0x8CA0
-#define GL_LOWER_LEFT 0x8CA1
-#define GL_UPPER_LEFT 0x8CA2
-#define GL_STENCIL_BACK_REF 0x8CA3
-#define GL_STENCIL_BACK_VALUE_MASK 0x8CA4
-#define GL_STENCIL_BACK_WRITEMASK 0x8CA5
-#define GL_VERTEX_PROGRAM_TWO_SIDE 0x8643
-#define GL_POINT_SPRITE 0x8861
-#define GL_COORD_REPLACE 0x8862
-#define GL_MAX_TEXTURE_COORDS 0x8871
-#define GL_PIXEL_PACK_BUFFER 0x88EB
-#define GL_PIXEL_UNPACK_BUFFER 0x88EC
-#define GL_PIXEL_PACK_BUFFER_BINDING 0x88ED
-#define GL_PIXEL_UNPACK_BUFFER_BINDING 0x88EF
-#define GL_FLOAT_MAT2x3 0x8B65
-#define GL_FLOAT_MAT2x4 0x8B66
-#define GL_FLOAT_MAT3x2 0x8B67
-#define GL_FLOAT_MAT3x4 0x8B68
-#define GL_FLOAT_MAT4x2 0x8B69
-#define GL_FLOAT_MAT4x3 0x8B6A
-#define GL_SRGB 0x8C40
-#define GL_SRGB8 0x8C41
-#define GL_SRGB_ALPHA 0x8C42
-#define GL_SRGB8_ALPHA8 0x8C43
-#define GL_COMPRESSED_SRGB 0x8C48
-#define GL_COMPRESSED_SRGB_ALPHA 0x8C49
-#define GL_CURRENT_RASTER_SECONDARY_COLOR 0x845F
-#define GL_SLUMINANCE_ALPHA 0x8C44
-#define GL_SLUMINANCE8_ALPHA8 0x8C45
-#define GL_SLUMINANCE 0x8C46
-#define GL_SLUMINANCE8 0x8C47
-#define GL_COMPRESSED_SLUMINANCE 0x8C4A
-#define GL_COMPRESSED_SLUMINANCE_ALPHA 0x8C4B
-#define GL_COMPARE_REF_TO_TEXTURE 0x884E
-#define GL_CLIP_DISTANCE0 0x3000
-#define GL_CLIP_DISTANCE1 0x3001
-#define GL_CLIP_DISTANCE2 0x3002
-#define GL_CLIP_DISTANCE3 0x3003
-#define GL_CLIP_DISTANCE4 0x3004
-#define GL_CLIP_DISTANCE5 0x3005
-#define GL_CLIP_DISTANCE6 0x3006
-#define GL_CLIP_DISTANCE7 0x3007
-#define GL_MAX_CLIP_DISTANCES 0x0D32
-#define GL_MAJOR_VERSION 0x821B
-#define GL_MINOR_VERSION 0x821C
-#define GL_NUM_EXTENSIONS 0x821D
-#define GL_CONTEXT_FLAGS 0x821E
-#define GL_COMPRESSED_RED 0x8225
-#define GL_COMPRESSED_RG 0x8226
-#define GL_CONTEXT_FLAG_FORWARD_COMPATIBLE_BIT 0x00000001
-#define GL_RGBA32F 0x8814
-#define GL_RGB32F 0x8815
-#define GL_RGBA16F 0x881A
-#define GL_RGB16F 0x881B
-#define GL_VERTEX_ATTRIB_ARRAY_INTEGER 0x88FD
-#define GL_MAX_ARRAY_TEXTURE_LAYERS 0x88FF
-#define GL_MIN_PROGRAM_TEXEL_OFFSET 0x8904
-#define GL_MAX_PROGRAM_TEXEL_OFFSET 0x8905
-#define GL_CLAMP_READ_COLOR 0x891C
-#define GL_FIXED_ONLY 0x891D
-#define GL_MAX_VARYING_COMPONENTS 0x8B4B
-#define GL_TEXTURE_1D_ARRAY 0x8C18
-#define GL_PROXY_TEXTURE_1D_ARRAY 0x8C19
-#define GL_TEXTURE_2D_ARRAY 0x8C1A
-#define GL_PROXY_TEXTURE_2D_ARRAY 0x8C1B
-#define GL_TEXTURE_BINDING_1D_ARRAY 0x8C1C
-#define GL_TEXTURE_BINDING_2D_ARRAY 0x8C1D
-#define GL_R11F_G11F_B10F 0x8C3A
-#define GL_UNSIGNED_INT_10F_11F_11F_REV 0x8C3B
-#define GL_RGB9_E5 0x8C3D
-#define GL_UNSIGNED_INT_5_9_9_9_REV 0x8C3E
-#define GL_TEXTURE_SHARED_SIZE 0x8C3F
-#define GL_TRANSFORM_FEEDBACK_VARYING_MAX_LENGTH 0x8C76
-#define GL_TRANSFORM_FEEDBACK_BUFFER_MODE 0x8C7F
-#define GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS 0x8C80
-#define GL_TRANSFORM_FEEDBACK_VARYINGS 0x8C83
-#define GL_TRANSFORM_FEEDBACK_BUFFER_START 0x8C84
-#define GL_TRANSFORM_FEEDBACK_BUFFER_SIZE 0x8C85
-#define GL_PRIMITIVES_GENERATED 0x8C87
-#define GL_TRANSFORM_FEEDBACK_PRIMITIVES_WRITTEN 0x8C88
-#define GL_RASTERIZER_DISCARD 0x8C89
-#define GL_MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS 0x8C8A
-#define GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS 0x8C8B
-#define GL_INTERLEAVED_ATTRIBS 0x8C8C
-#define GL_SEPARATE_ATTRIBS 0x8C8D
-#define GL_TRANSFORM_FEEDBACK_BUFFER 0x8C8E
-#define GL_TRANSFORM_FEEDBACK_BUFFER_BINDING 0x8C8F
-#define GL_RGBA32UI 0x8D70
-#define GL_RGB32UI 0x8D71
-#define GL_RGBA16UI 0x8D76
-#define GL_RGB16UI 0x8D77
-#define GL_RGBA8UI 0x8D7C
-#define GL_RGB8UI 0x8D7D
-#define GL_RGBA32I 0x8D82
-#define GL_RGB32I 0x8D83
-#define GL_RGBA16I 0x8D88
-#define GL_RGB16I 0x8D89
-#define GL_RGBA8I 0x8D8E
-#define GL_RGB8I 0x8D8F
-#define GL_RED_INTEGER 0x8D94
-#define GL_GREEN_INTEGER 0x8D95
-#define GL_BLUE_INTEGER 0x8D96
-#define GL_RGB_INTEGER 0x8D98
-#define GL_RGBA_INTEGER 0x8D99
-#define GL_BGR_INTEGER 0x8D9A
-#define GL_BGRA_INTEGER 0x8D9B
-#define GL_SAMPLER_1D_ARRAY 0x8DC0
-#define GL_SAMPLER_2D_ARRAY 0x8DC1
-#define GL_SAMPLER_1D_ARRAY_SHADOW 0x8DC3
-#define GL_SAMPLER_2D_ARRAY_SHADOW 0x8DC4
-#define GL_SAMPLER_CUBE_SHADOW 0x8DC5
-#define GL_UNSIGNED_INT_VEC2 0x8DC6
-#define GL_UNSIGNED_INT_VEC3 0x8DC7
-#define GL_UNSIGNED_INT_VEC4 0x8DC8
-#define GL_INT_SAMPLER_1D 0x8DC9
-#define GL_INT_SAMPLER_2D 0x8DCA
-#define GL_INT_SAMPLER_3D 0x8DCB
-#define GL_INT_SAMPLER_CUBE 0x8DCC
-#define GL_INT_SAMPLER_1D_ARRAY 0x8DCE
-#define GL_INT_SAMPLER_2D_ARRAY 0x8DCF
-#define GL_UNSIGNED_INT_SAMPLER_1D 0x8DD1
-#define GL_UNSIGNED_INT_SAMPLER_2D 0x8DD2
-#define GL_UNSIGNED_INT_SAMPLER_3D 0x8DD3
-#define GL_UNSIGNED_INT_SAMPLER_CUBE 0x8DD4
-#define GL_UNSIGNED_INT_SAMPLER_1D_ARRAY 0x8DD6
-#define GL_UNSIGNED_INT_SAMPLER_2D_ARRAY 0x8DD7
-#define GL_QUERY_WAIT 0x8E13
-#define GL_QUERY_NO_WAIT 0x8E14
-#define GL_QUERY_BY_REGION_WAIT 0x8E15
-#define GL_QUERY_BY_REGION_NO_WAIT 0x8E16
-#define GL_BUFFER_ACCESS_FLAGS 0x911F
-#define GL_BUFFER_MAP_LENGTH 0x9120
-#define GL_BUFFER_MAP_OFFSET 0x9121
-#define GL_DEPTH_COMPONENT32F 0x8CAC
-#define GL_DEPTH32F_STENCIL8 0x8CAD
-#define GL_FLOAT_32_UNSIGNED_INT_24_8_REV 0x8DAD
-#define GL_INVALID_FRAMEBUFFER_OPERATION 0x0506
-#define GL_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING 0x8210
-#define GL_FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE 0x8211
-#define GL_FRAMEBUFFER_ATTACHMENT_RED_SIZE 0x8212
-#define GL_FRAMEBUFFER_ATTACHMENT_GREEN_SIZE 0x8213
-#define GL_FRAMEBUFFER_ATTACHMENT_BLUE_SIZE 0x8214
-#define GL_FRAMEBUFFER_ATTACHMENT_ALPHA_SIZE 0x8215
-#define GL_FRAMEBUFFER_ATTACHMENT_DEPTH_SIZE 0x8216
-#define GL_FRAMEBUFFER_ATTACHMENT_STENCIL_SIZE 0x8217
-#define GL_FRAMEBUFFER_DEFAULT 0x8218
-#define GL_FRAMEBUFFER_UNDEFINED 0x8219
-#define GL_DEPTH_STENCIL_ATTACHMENT 0x821A
-#define GL_MAX_RENDERBUFFER_SIZE 0x84E8
-#define GL_DEPTH_STENCIL 0x84F9
-#define GL_UNSIGNED_INT_24_8 0x84FA
-#define GL_DEPTH24_STENCIL8 0x88F0
-#define GL_TEXTURE_STENCIL_SIZE 0x88F1
-#define GL_TEXTURE_RED_TYPE 0x8C10
-#define GL_TEXTURE_GREEN_TYPE 0x8C11
-#define GL_TEXTURE_BLUE_TYPE 0x8C12
-#define GL_TEXTURE_ALPHA_TYPE 0x8C13
-#define GL_TEXTURE_DEPTH_TYPE 0x8C16
-#define GL_UNSIGNED_NORMALIZED 0x8C17
-#define GL_FRAMEBUFFER_BINDING 0x8CA6
-#define GL_DRAW_FRAMEBUFFER_BINDING 0x8CA6
-#define GL_RENDERBUFFER_BINDING 0x8CA7
-#define GL_READ_FRAMEBUFFER 0x8CA8
-#define GL_DRAW_FRAMEBUFFER 0x8CA9
-#define GL_READ_FRAMEBUFFER_BINDING 0x8CAA
-#define GL_RENDERBUFFER_SAMPLES 0x8CAB
-#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE 0x8CD0
-#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME 0x8CD1
-#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL 0x8CD2
-#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE 0x8CD3
-#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER 0x8CD4
-#define GL_FRAMEBUFFER_COMPLETE 0x8CD5
-#define GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT 0x8CD6
-#define GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT 0x8CD7
-#define GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER 0x8CDB
-#define GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER 0x8CDC
-#define GL_FRAMEBUFFER_UNSUPPORTED 0x8CDD
-#define GL_MAX_COLOR_ATTACHMENTS 0x8CDF
-#define GL_COLOR_ATTACHMENT0 0x8CE0
-#define GL_COLOR_ATTACHMENT1 0x8CE1
-#define GL_COLOR_ATTACHMENT2 0x8CE2
-#define GL_COLOR_ATTACHMENT3 0x8CE3
-#define GL_COLOR_ATTACHMENT4 0x8CE4
-#define GL_COLOR_ATTACHMENT5 0x8CE5
-#define GL_COLOR_ATTACHMENT6 0x8CE6
-#define GL_COLOR_ATTACHMENT7 0x8CE7
-#define GL_COLOR_ATTACHMENT8 0x8CE8
-#define GL_COLOR_ATTACHMENT9 0x8CE9
-#define GL_COLOR_ATTACHMENT10 0x8CEA
-#define GL_COLOR_ATTACHMENT11 0x8CEB
-#define GL_COLOR_ATTACHMENT12 0x8CEC
-#define GL_COLOR_ATTACHMENT13 0x8CED
-#define GL_COLOR_ATTACHMENT14 0x8CEE
-#define GL_COLOR_ATTACHMENT15 0x8CEF
-#define GL_COLOR_ATTACHMENT16 0x8CF0
-#define GL_COLOR_ATTACHMENT17 0x8CF1
-#define GL_COLOR_ATTACHMENT18 0x8CF2
-#define GL_COLOR_ATTACHMENT19 0x8CF3
-#define GL_COLOR_ATTACHMENT20 0x8CF4
-#define GL_COLOR_ATTACHMENT21 0x8CF5
-#define GL_COLOR_ATTACHMENT22 0x8CF6
-#define GL_COLOR_ATTACHMENT23 0x8CF7
-#define GL_COLOR_ATTACHMENT24 0x8CF8
-#define GL_COLOR_ATTACHMENT25 0x8CF9
-#define GL_COLOR_ATTACHMENT26 0x8CFA
-#define GL_COLOR_ATTACHMENT27 0x8CFB
-#define GL_COLOR_ATTACHMENT28 0x8CFC
-#define GL_COLOR_ATTACHMENT29 0x8CFD
-#define GL_COLOR_ATTACHMENT30 0x8CFE
-#define GL_COLOR_ATTACHMENT31 0x8CFF
-#define GL_DEPTH_ATTACHMENT 0x8D00
-#define GL_STENCIL_ATTACHMENT 0x8D20
-#define GL_FRAMEBUFFER 0x8D40
-#define GL_RENDERBUFFER 0x8D41
-#define GL_RENDERBUFFER_WIDTH 0x8D42
-#define GL_RENDERBUFFER_HEIGHT 0x8D43
-#define GL_RENDERBUFFER_INTERNAL_FORMAT 0x8D44
-#define GL_STENCIL_INDEX1 0x8D46
-#define GL_STENCIL_INDEX4 0x8D47
-#define GL_STENCIL_INDEX8 0x8D48
-#define GL_STENCIL_INDEX16 0x8D49
-#define GL_RENDERBUFFER_RED_SIZE 0x8D50
-#define GL_RENDERBUFFER_GREEN_SIZE 0x8D51
-#define GL_RENDERBUFFER_BLUE_SIZE 0x8D52
-#define GL_RENDERBUFFER_ALPHA_SIZE 0x8D53
-#define GL_RENDERBUFFER_DEPTH_SIZE 0x8D54
-#define GL_RENDERBUFFER_STENCIL_SIZE 0x8D55
-#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE 0x8D56
-#define GL_MAX_SAMPLES 0x8D57
-#define GL_INDEX 0x8222
-#define GL_TEXTURE_LUMINANCE_TYPE 0x8C14
-#define GL_TEXTURE_INTENSITY_TYPE 0x8C15
-#define GL_FRAMEBUFFER_SRGB 0x8DB9
-#define GL_HALF_FLOAT 0x140B
-#define GL_MAP_READ_BIT 0x0001
-#define GL_MAP_WRITE_BIT 0x0002
-#define GL_MAP_INVALIDATE_RANGE_BIT 0x0004
-#define GL_MAP_INVALIDATE_BUFFER_BIT 0x0008
-#define GL_MAP_FLUSH_EXPLICIT_BIT 0x0010
-#define GL_MAP_UNSYNCHRONIZED_BIT 0x0020
-#define GL_COMPRESSED_RED_RGTC1 0x8DBB
-#define GL_COMPRESSED_SIGNED_RED_RGTC1 0x8DBC
-#define GL_COMPRESSED_RG_RGTC2 0x8DBD
-#define GL_COMPRESSED_SIGNED_RG_RGTC2 0x8DBE
-#define GL_RG 0x8227
-#define GL_RG_INTEGER 0x8228
-#define GL_R8 0x8229
-#define GL_R16 0x822A
-#define GL_RG8 0x822B
-#define GL_RG16 0x822C
-#define GL_R16F 0x822D
-#define GL_R32F 0x822E
-#define GL_RG16F 0x822F
-#define GL_RG32F 0x8230
-#define GL_R8I 0x8231
-#define GL_R8UI 0x8232
-#define GL_R16I 0x8233
-#define GL_R16UI 0x8234
-#define GL_R32I 0x8235
-#define GL_R32UI 0x8236
-#define GL_RG8I 0x8237
-#define GL_RG8UI 0x8238
-#define GL_RG16I 0x8239
-#define GL_RG16UI 0x823A
-#define GL_RG32I 0x823B
-#define GL_RG32UI 0x823C
-#define GL_VERTEX_ARRAY_BINDING 0x85B5
-#define GL_CLAMP_VERTEX_COLOR 0x891A
-#define GL_CLAMP_FRAGMENT_COLOR 0x891B
-#define GL_ALPHA_INTEGER 0x8D97
-#define GL_SAMPLER_2D_RECT 0x8B63
-#define GL_SAMPLER_2D_RECT_SHADOW 0x8B64
-#define GL_SAMPLER_BUFFER 0x8DC2
-#define GL_INT_SAMPLER_2D_RECT 0x8DCD
-#define GL_INT_SAMPLER_BUFFER 0x8DD0
-#define GL_UNSIGNED_INT_SAMPLER_2D_RECT 0x8DD5
-#define GL_UNSIGNED_INT_SAMPLER_BUFFER 0x8DD8
-#define GL_TEXTURE_BUFFER 0x8C2A
-#define GL_MAX_TEXTURE_BUFFER_SIZE 0x8C2B
-#define GL_TEXTURE_BINDING_BUFFER 0x8C2C
-#define GL_TEXTURE_BUFFER_DATA_STORE_BINDING 0x8C2D
-#define GL_TEXTURE_RECTANGLE 0x84F5
-#define GL_TEXTURE_BINDING_RECTANGLE 0x84F6
-#define GL_PROXY_TEXTURE_RECTANGLE 0x84F7
-#define GL_MAX_RECTANGLE_TEXTURE_SIZE 0x84F8
-#define GL_R8_SNORM 0x8F94
-#define GL_RG8_SNORM 0x8F95
-#define GL_RGB8_SNORM 0x8F96
-#define GL_RGBA8_SNORM 0x8F97
-#define GL_R16_SNORM 0x8F98
-#define GL_RG16_SNORM 0x8F99
-#define GL_RGB16_SNORM 0x8F9A
-#define GL_RGBA16_SNORM 0x8F9B
-#define GL_SIGNED_NORMALIZED 0x8F9C
-#define GL_PRIMITIVE_RESTART 0x8F9D
-#define GL_PRIMITIVE_RESTART_INDEX 0x8F9E
-#define GL_COPY_READ_BUFFER 0x8F36
-#define GL_COPY_WRITE_BUFFER 0x8F37
-#define GL_UNIFORM_BUFFER 0x8A11
-#define GL_UNIFORM_BUFFER_BINDING 0x8A28
-#define GL_UNIFORM_BUFFER_START 0x8A29
-#define GL_UNIFORM_BUFFER_SIZE 0x8A2A
-#define GL_MAX_VERTEX_UNIFORM_BLOCKS 0x8A2B
-#define GL_MAX_GEOMETRY_UNIFORM_BLOCKS 0x8A2C
-#define GL_MAX_FRAGMENT_UNIFORM_BLOCKS 0x8A2D
-#define GL_MAX_COMBINED_UNIFORM_BLOCKS 0x8A2E
-#define GL_MAX_UNIFORM_BUFFER_BINDINGS 0x8A2F
-#define GL_MAX_UNIFORM_BLOCK_SIZE 0x8A30
-#define GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS 0x8A31
-#define GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS 0x8A32
-#define GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS 0x8A33
-#define GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT 0x8A34
-#define GL_ACTIVE_UNIFORM_BLOCK_MAX_NAME_LENGTH 0x8A35
-#define GL_ACTIVE_UNIFORM_BLOCKS 0x8A36
-#define GL_UNIFORM_TYPE 0x8A37
-#define GL_UNIFORM_SIZE 0x8A38
-#define GL_UNIFORM_NAME_LENGTH 0x8A39
-#define GL_UNIFORM_BLOCK_INDEX 0x8A3A
-#define GL_UNIFORM_OFFSET 0x8A3B
-#define GL_UNIFORM_ARRAY_STRIDE 0x8A3C
-#define GL_UNIFORM_MATRIX_STRIDE 0x8A3D
-#define GL_UNIFORM_IS_ROW_MAJOR 0x8A3E
-#define GL_UNIFORM_BLOCK_BINDING 0x8A3F
-#define GL_UNIFORM_BLOCK_DATA_SIZE 0x8A40
-#define GL_UNIFORM_BLOCK_NAME_LENGTH 0x8A41
-#define GL_UNIFORM_BLOCK_ACTIVE_UNIFORMS 0x8A42
-#define GL_UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES 0x8A43
-#define GL_UNIFORM_BLOCK_REFERENCED_BY_VERTEX_SHADER 0x8A44
-#define GL_UNIFORM_BLOCK_REFERENCED_BY_GEOMETRY_SHADER 0x8A45
-#define GL_UNIFORM_BLOCK_REFERENCED_BY_FRAGMENT_SHADER 0x8A46
-#define GL_INVALID_INDEX 0xFFFFFFFF
-#define GL_CONTEXT_CORE_PROFILE_BIT 0x00000001
-#define GL_CONTEXT_COMPATIBILITY_PROFILE_BIT 0x00000002
-#define GL_LINES_ADJACENCY 0x000A
-#define GL_LINE_STRIP_ADJACENCY 0x000B
-#define GL_TRIANGLES_ADJACENCY 0x000C
-#define GL_TRIANGLE_STRIP_ADJACENCY 0x000D
-#define GL_PROGRAM_POINT_SIZE 0x8642
-#define GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS 0x8C29
-#define GL_FRAMEBUFFER_ATTACHMENT_LAYERED 0x8DA7
-#define GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS 0x8DA8
-#define GL_GEOMETRY_SHADER 0x8DD9
-#define GL_GEOMETRY_VERTICES_OUT 0x8916
-#define GL_GEOMETRY_INPUT_TYPE 0x8917
-#define GL_GEOMETRY_OUTPUT_TYPE 0x8918
-#define GL_MAX_GEOMETRY_UNIFORM_COMPONENTS 0x8DDF
-#define GL_MAX_GEOMETRY_OUTPUT_VERTICES 0x8DE0
-#define GL_MAX_GEOMETRY_TOTAL_OUTPUT_COMPONENTS 0x8DE1
-#define GL_MAX_VERTEX_OUTPUT_COMPONENTS 0x9122
-#define GL_MAX_GEOMETRY_INPUT_COMPONENTS 0x9123
-#define GL_MAX_GEOMETRY_OUTPUT_COMPONENTS 0x9124
-#define GL_MAX_FRAGMENT_INPUT_COMPONENTS 0x9125
-#define GL_CONTEXT_PROFILE_MASK 0x9126
-#define GL_DEPTH_CLAMP 0x864F
-#define GL_QUADS_FOLLOW_PROVOKING_VERTEX_CONVENTION 0x8E4C
-#define GL_FIRST_VERTEX_CONVENTION 0x8E4D
-#define GL_LAST_VERTEX_CONVENTION 0x8E4E
-#define GL_PROVOKING_VERTEX 0x8E4F
-#define GL_TEXTURE_CUBE_MAP_SEAMLESS 0x884F
-#define GL_MAX_SERVER_WAIT_TIMEOUT 0x9111
-#define GL_OBJECT_TYPE 0x9112
-#define GL_SYNC_CONDITION 0x9113
-#define GL_SYNC_STATUS 0x9114
-#define GL_SYNC_FLAGS 0x9115
-#define GL_SYNC_FENCE 0x9116
-#define GL_SYNC_GPU_COMMANDS_COMPLETE 0x9117
-#define GL_UNSIGNALED 0x9118
-#define GL_SIGNALED 0x9119
-#define GL_ALREADY_SIGNALED 0x911A
-#define GL_TIMEOUT_EXPIRED 0x911B
-#define GL_CONDITION_SATISFIED 0x911C
-#define GL_WAIT_FAILED 0x911D
-#define GL_TIMEOUT_IGNORED 0xFFFFFFFFFFFFFFFF
-#define GL_SYNC_FLUSH_COMMANDS_BIT 0x00000001
-#define GL_SAMPLE_POSITION 0x8E50
-#define GL_SAMPLE_MASK 0x8E51
-#define GL_SAMPLE_MASK_VALUE 0x8E52
-#define GL_MAX_SAMPLE_MASK_WORDS 0x8E59
-#define GL_TEXTURE_2D_MULTISAMPLE 0x9100
-#define GL_PROXY_TEXTURE_2D_MULTISAMPLE 0x9101
-#define GL_TEXTURE_2D_MULTISAMPLE_ARRAY 0x9102
-#define GL_PROXY_TEXTURE_2D_MULTISAMPLE_ARRAY 0x9103
-#define GL_TEXTURE_BINDING_2D_MULTISAMPLE 0x9104
-#define GL_TEXTURE_BINDING_2D_MULTISAMPLE_ARRAY 0x9105
-#define GL_TEXTURE_SAMPLES 0x9106
-#define GL_TEXTURE_FIXED_SAMPLE_LOCATIONS 0x9107
-#define GL_SAMPLER_2D_MULTISAMPLE 0x9108
-#define GL_INT_SAMPLER_2D_MULTISAMPLE 0x9109
-#define GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE 0x910A
-#define GL_SAMPLER_2D_MULTISAMPLE_ARRAY 0x910B
-#define GL_INT_SAMPLER_2D_MULTISAMPLE_ARRAY 0x910C
-#define GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE_ARRAY 0x910D
-#define GL_MAX_COLOR_TEXTURE_SAMPLES 0x910E
-#define GL_MAX_DEPTH_TEXTURE_SAMPLES 0x910F
-#define GL_MAX_INTEGER_SAMPLES 0x9110
-#define GL_VERTEX_ATTRIB_ARRAY_DIVISOR 0x88FE
-#define GL_SRC1_COLOR 0x88F9
-#define GL_ONE_MINUS_SRC1_COLOR 0x88FA
-#define GL_ONE_MINUS_SRC1_ALPHA 0x88FB
-#define GL_MAX_DUAL_SOURCE_DRAW_BUFFERS 0x88FC
-#define GL_ANY_SAMPLES_PASSED 0x8C2F
-#define GL_SAMPLER_BINDING 0x8919
-#define GL_RGB10_A2UI 0x906F
-#define GL_TEXTURE_SWIZZLE_R 0x8E42
-#define GL_TEXTURE_SWIZZLE_G 0x8E43
-#define GL_TEXTURE_SWIZZLE_B 0x8E44
-#define GL_TEXTURE_SWIZZLE_A 0x8E45
-#define GL_TEXTURE_SWIZZLE_RGBA 0x8E46
-#define GL_TIME_ELAPSED 0x88BF
-#define GL_TIMESTAMP 0x8E28
-#define GL_INT_2_10_10_10_REV 0x8D9F
-#ifndef GL_VERSION_1_0
-#define GL_VERSION_1_0 1
-GLAPI int GLAD_GL_VERSION_1_0;
-typedef void (APIENTRYP PFNGLCULLFACEPROC)(GLenum mode);
-GLAPI PFNGLCULLFACEPROC glad_glCullFace;
-#define glCullFace glad_glCullFace
-typedef void (APIENTRYP PFNGLFRONTFACEPROC)(GLenum mode);
-GLAPI PFNGLFRONTFACEPROC glad_glFrontFace;
-#define glFrontFace glad_glFrontFace
-typedef void (APIENTRYP PFNGLHINTPROC)(GLenum target, GLenum mode);
-GLAPI PFNGLHINTPROC glad_glHint;
-#define glHint glad_glHint
-typedef void (APIENTRYP PFNGLLINEWIDTHPROC)(GLfloat width);
-GLAPI PFNGLLINEWIDTHPROC glad_glLineWidth;
-#define glLineWidth glad_glLineWidth
-typedef void (APIENTRYP PFNGLPOINTSIZEPROC)(GLfloat size);
-GLAPI PFNGLPOINTSIZEPROC glad_glPointSize;
-#define glPointSize glad_glPointSize
-typedef void (APIENTRYP PFNGLPOLYGONMODEPROC)(GLenum face, GLenum mode);
-GLAPI PFNGLPOLYGONMODEPROC glad_glPolygonMode;
-#define glPolygonMode glad_glPolygonMode
-typedef void (APIENTRYP PFNGLSCISSORPROC)(GLint x, GLint y, GLsizei width, GLsizei height);
-GLAPI PFNGLSCISSORPROC glad_glScissor;
-#define glScissor glad_glScissor
-typedef void (APIENTRYP PFNGLTEXPARAMETERFPROC)(GLenum target, GLenum pname, GLfloat param);
-GLAPI PFNGLTEXPARAMETERFPROC glad_glTexParameterf;
-#define glTexParameterf glad_glTexParameterf
-typedef void (APIENTRYP PFNGLTEXPARAMETERFVPROC)(GLenum target, GLenum pname, const GLfloat *params);
-GLAPI PFNGLTEXPARAMETERFVPROC glad_glTexParameterfv;
-#define glTexParameterfv glad_glTexParameterfv
-typedef void (APIENTRYP PFNGLTEXPARAMETERIPROC)(GLenum target, GLenum pname, GLint param);
-GLAPI PFNGLTEXPARAMETERIPROC glad_glTexParameteri;
-#define glTexParameteri glad_glTexParameteri
-typedef void (APIENTRYP PFNGLTEXPARAMETERIVPROC)(GLenum target, GLenum pname, const GLint *params);
-GLAPI PFNGLTEXPARAMETERIVPROC glad_glTexParameteriv;
-#define glTexParameteriv glad_glTexParameteriv
-typedef void (APIENTRYP PFNGLTEXIMAGE1DPROC)(GLenum target, GLint level, GLint internalformat, GLsizei width, GLint border, GLenum format, GLenum type, const void *pixels);
-GLAPI PFNGLTEXIMAGE1DPROC glad_glTexImage1D;
-#define glTexImage1D glad_glTexImage1D
-typedef void (APIENTRYP PFNGLTEXIMAGE2DPROC)(GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void *pixels);
-GLAPI PFNGLTEXIMAGE2DPROC glad_glTexImage2D;
-#define glTexImage2D glad_glTexImage2D
-typedef void (APIENTRYP PFNGLDRAWBUFFERPROC)(GLenum buf);
-GLAPI PFNGLDRAWBUFFERPROC glad_glDrawBuffer;
-#define glDrawBuffer glad_glDrawBuffer
-typedef void (APIENTRYP PFNGLCLEARPROC)(GLbitfield mask);
-GLAPI PFNGLCLEARPROC glad_glClear;
-#define glClear glad_glClear
-typedef void (APIENTRYP PFNGLCLEARCOLORPROC)(GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha);
-GLAPI PFNGLCLEARCOLORPROC glad_glClearColor;
-#define glClearColor glad_glClearColor
-typedef void (APIENTRYP PFNGLCLEARSTENCILPROC)(GLint s);
-GLAPI PFNGLCLEARSTENCILPROC glad_glClearStencil;
-#define glClearStencil glad_glClearStencil
-typedef void (APIENTRYP PFNGLCLEARDEPTHPROC)(GLdouble depth);
-GLAPI PFNGLCLEARDEPTHPROC glad_glClearDepth;
-#define glClearDepth glad_glClearDepth
-typedef void (APIENTRYP PFNGLSTENCILMASKPROC)(GLuint mask);
-GLAPI PFNGLSTENCILMASKPROC glad_glStencilMask;
-#define glStencilMask glad_glStencilMask
-typedef void (APIENTRYP PFNGLCOLORMASKPROC)(GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha);
-GLAPI PFNGLCOLORMASKPROC glad_glColorMask;
-#define glColorMask glad_glColorMask
-typedef void (APIENTRYP PFNGLDEPTHMASKPROC)(GLboolean flag);
-GLAPI PFNGLDEPTHMASKPROC glad_glDepthMask;
-#define glDepthMask glad_glDepthMask
-typedef void (APIENTRYP PFNGLDISABLEPROC)(GLenum cap);
-GLAPI PFNGLDISABLEPROC glad_glDisable;
-#define glDisable glad_glDisable
-typedef void (APIENTRYP PFNGLENABLEPROC)(GLenum cap);
-GLAPI PFNGLENABLEPROC glad_glEnable;
-#define glEnable glad_glEnable
-typedef void (APIENTRYP PFNGLFINISHPROC)(void);
-GLAPI PFNGLFINISHPROC glad_glFinish;
-#define glFinish glad_glFinish
-typedef void (APIENTRYP PFNGLFLUSHPROC)(void);
-GLAPI PFNGLFLUSHPROC glad_glFlush;
-#define glFlush glad_glFlush
-typedef void (APIENTRYP PFNGLBLENDFUNCPROC)(GLenum sfactor, GLenum dfactor);
-GLAPI PFNGLBLENDFUNCPROC glad_glBlendFunc;
-#define glBlendFunc glad_glBlendFunc
-typedef void (APIENTRYP PFNGLLOGICOPPROC)(GLenum opcode);
-GLAPI PFNGLLOGICOPPROC glad_glLogicOp;
-#define glLogicOp glad_glLogicOp
-typedef void (APIENTRYP PFNGLSTENCILFUNCPROC)(GLenum func, GLint ref, GLuint mask);
-GLAPI PFNGLSTENCILFUNCPROC glad_glStencilFunc;
-#define glStencilFunc glad_glStencilFunc
-typedef void (APIENTRYP PFNGLSTENCILOPPROC)(GLenum fail, GLenum zfail, GLenum zpass);
-GLAPI PFNGLSTENCILOPPROC glad_glStencilOp;
-#define glStencilOp glad_glStencilOp
-typedef void (APIENTRYP PFNGLDEPTHFUNCPROC)(GLenum func);
-GLAPI PFNGLDEPTHFUNCPROC glad_glDepthFunc;
-#define glDepthFunc glad_glDepthFunc
-typedef void (APIENTRYP PFNGLPIXELSTOREFPROC)(GLenum pname, GLfloat param);
-GLAPI PFNGLPIXELSTOREFPROC glad_glPixelStoref;
-#define glPixelStoref glad_glPixelStoref
-typedef void (APIENTRYP PFNGLPIXELSTOREIPROC)(GLenum pname, GLint param);
-GLAPI PFNGLPIXELSTOREIPROC glad_glPixelStorei;
-#define glPixelStorei glad_glPixelStorei
-typedef void (APIENTRYP PFNGLREADBUFFERPROC)(GLenum src);
-GLAPI PFNGLREADBUFFERPROC glad_glReadBuffer;
-#define glReadBuffer glad_glReadBuffer
-typedef void (APIENTRYP PFNGLREADPIXELSPROC)(GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, void *pixels);
-GLAPI PFNGLREADPIXELSPROC glad_glReadPixels;
-#define glReadPixels glad_glReadPixels
-typedef void (APIENTRYP PFNGLGETBOOLEANVPROC)(GLenum pname, GLboolean *data);
-GLAPI PFNGLGETBOOLEANVPROC glad_glGetBooleanv;
-#define glGetBooleanv glad_glGetBooleanv
-typedef void (APIENTRYP PFNGLGETDOUBLEVPROC)(GLenum pname, GLdouble *data);
-GLAPI PFNGLGETDOUBLEVPROC glad_glGetDoublev;
-#define glGetDoublev glad_glGetDoublev
-typedef GLenum (APIENTRYP PFNGLGETERRORPROC)(void);
-GLAPI PFNGLGETERRORPROC glad_glGetError;
-#define glGetError glad_glGetError
-typedef void (APIENTRYP PFNGLGETFLOATVPROC)(GLenum pname, GLfloat *data);
-GLAPI PFNGLGETFLOATVPROC glad_glGetFloatv;
-#define glGetFloatv glad_glGetFloatv
-typedef void (APIENTRYP PFNGLGETINTEGERVPROC)(GLenum pname, GLint *data);
-GLAPI PFNGLGETINTEGERVPROC glad_glGetIntegerv;
-#define glGetIntegerv glad_glGetIntegerv
-typedef const GLubyte * (APIENTRYP PFNGLGETSTRINGPROC)(GLenum name);
-GLAPI PFNGLGETSTRINGPROC glad_glGetString;
-#define glGetString glad_glGetString
-typedef void (APIENTRYP PFNGLGETTEXIMAGEPROC)(GLenum target, GLint level, GLenum format, GLenum type, void *pixels);
-GLAPI PFNGLGETTEXIMAGEPROC glad_glGetTexImage;
-#define glGetTexImage glad_glGetTexImage
-typedef void (APIENTRYP PFNGLGETTEXPARAMETERFVPROC)(GLenum target, GLenum pname, GLfloat *params);
-GLAPI PFNGLGETTEXPARAMETERFVPROC glad_glGetTexParameterfv;
-#define glGetTexParameterfv glad_glGetTexParameterfv
-typedef void (APIENTRYP PFNGLGETTEXPARAMETERIVPROC)(GLenum target, GLenum pname, GLint *params);
-GLAPI PFNGLGETTEXPARAMETERIVPROC glad_glGetTexParameteriv;
-#define glGetTexParameteriv glad_glGetTexParameteriv
-typedef void (APIENTRYP PFNGLGETTEXLEVELPARAMETERFVPROC)(GLenum target, GLint level, GLenum pname, GLfloat *params);
-GLAPI PFNGLGETTEXLEVELPARAMETERFVPROC glad_glGetTexLevelParameterfv;
-#define glGetTexLevelParameterfv glad_glGetTexLevelParameterfv
-typedef void (APIENTRYP PFNGLGETTEXLEVELPARAMETERIVPROC)(GLenum target, GLint level, GLenum pname, GLint *params);
-GLAPI PFNGLGETTEXLEVELPARAMETERIVPROC glad_glGetTexLevelParameteriv;
-#define glGetTexLevelParameteriv glad_glGetTexLevelParameteriv
-typedef GLboolean (APIENTRYP PFNGLISENABLEDPROC)(GLenum cap);
-GLAPI PFNGLISENABLEDPROC glad_glIsEnabled;
-#define glIsEnabled glad_glIsEnabled
-typedef void (APIENTRYP PFNGLDEPTHRANGEPROC)(GLdouble n, GLdouble f);
-GLAPI PFNGLDEPTHRANGEPROC glad_glDepthRange;
-#define glDepthRange glad_glDepthRange
-typedef void (APIENTRYP PFNGLVIEWPORTPROC)(GLint x, GLint y, GLsizei width, GLsizei height);
-GLAPI PFNGLVIEWPORTPROC glad_glViewport;
-#define glViewport glad_glViewport
-typedef void (APIENTRYP PFNGLNEWLISTPROC)(GLuint list, GLenum mode);
-GLAPI PFNGLNEWLISTPROC glad_glNewList;
-#define glNewList glad_glNewList
-typedef void (APIENTRYP PFNGLENDLISTPROC)(void);
-GLAPI PFNGLENDLISTPROC glad_glEndList;
-#define glEndList glad_glEndList
-typedef void (APIENTRYP PFNGLCALLLISTPROC)(GLuint list);
-GLAPI PFNGLCALLLISTPROC glad_glCallList;
-#define glCallList glad_glCallList
-typedef void (APIENTRYP PFNGLCALLLISTSPROC)(GLsizei n, GLenum type, const void *lists);
-GLAPI PFNGLCALLLISTSPROC glad_glCallLists;
-#define glCallLists glad_glCallLists
-typedef void (APIENTRYP PFNGLDELETELISTSPROC)(GLuint list, GLsizei range);
-GLAPI PFNGLDELETELISTSPROC glad_glDeleteLists;
-#define glDeleteLists glad_glDeleteLists
-typedef GLuint (APIENTRYP PFNGLGENLISTSPROC)(GLsizei range);
-GLAPI PFNGLGENLISTSPROC glad_glGenLists;
-#define glGenLists glad_glGenLists
-typedef void (APIENTRYP PFNGLLISTBASEPROC)(GLuint base);
-GLAPI PFNGLLISTBASEPROC glad_glListBase;
-#define glListBase glad_glListBase
-typedef void (APIENTRYP PFNGLBEGINPROC)(GLenum mode);
-GLAPI PFNGLBEGINPROC glad_glBegin;
-#define glBegin glad_glBegin
-typedef void (APIENTRYP PFNGLBITMAPPROC)(GLsizei width, GLsizei height, GLfloat xorig, GLfloat yorig, GLfloat xmove, GLfloat ymove, const GLubyte *bitmap);
-GLAPI PFNGLBITMAPPROC glad_glBitmap;
-#define glBitmap glad_glBitmap
-typedef void (APIENTRYP PFNGLCOLOR3BPROC)(GLbyte red, GLbyte green, GLbyte blue);
-GLAPI PFNGLCOLOR3BPROC glad_glColor3b;
-#define glColor3b glad_glColor3b
-typedef void (APIENTRYP PFNGLCOLOR3BVPROC)(const GLbyte *v);
-GLAPI PFNGLCOLOR3BVPROC glad_glColor3bv;
-#define glColor3bv glad_glColor3bv
-typedef void (APIENTRYP PFNGLCOLOR3DPROC)(GLdouble red, GLdouble green, GLdouble blue);
-GLAPI PFNGLCOLOR3DPROC glad_glColor3d;
-#define glColor3d glad_glColor3d
-typedef void (APIENTRYP PFNGLCOLOR3DVPROC)(const GLdouble *v);
-GLAPI PFNGLCOLOR3DVPROC glad_glColor3dv;
-#define glColor3dv glad_glColor3dv
-typedef void (APIENTRYP PFNGLCOLOR3FPROC)(GLfloat red, GLfloat green, GLfloat blue);
-GLAPI PFNGLCOLOR3FPROC glad_glColor3f;
-#define glColor3f glad_glColor3f
-typedef void (APIENTRYP PFNGLCOLOR3FVPROC)(const GLfloat *v);
-GLAPI PFNGLCOLOR3FVPROC glad_glColor3fv;
-#define glColor3fv glad_glColor3fv
-typedef void (APIENTRYP PFNGLCOLOR3IPROC)(GLint red, GLint green, GLint blue);
-GLAPI PFNGLCOLOR3IPROC glad_glColor3i;
-#define glColor3i glad_glColor3i
-typedef void (APIENTRYP PFNGLCOLOR3IVPROC)(const GLint *v);
-GLAPI PFNGLCOLOR3IVPROC glad_glColor3iv;
-#define glColor3iv glad_glColor3iv
-typedef void (APIENTRYP PFNGLCOLOR3SPROC)(GLshort red, GLshort green, GLshort blue);
-GLAPI PFNGLCOLOR3SPROC glad_glColor3s;
-#define glColor3s glad_glColor3s
-typedef void (APIENTRYP PFNGLCOLOR3SVPROC)(const GLshort *v);
-GLAPI PFNGLCOLOR3SVPROC glad_glColor3sv;
-#define glColor3sv glad_glColor3sv
-typedef void (APIENTRYP PFNGLCOLOR3UBPROC)(GLubyte red, GLubyte green, GLubyte blue);
-GLAPI PFNGLCOLOR3UBPROC glad_glColor3ub;
-#define glColor3ub glad_glColor3ub
-typedef void (APIENTRYP PFNGLCOLOR3UBVPROC)(const GLubyte *v);
-GLAPI PFNGLCOLOR3UBVPROC glad_glColor3ubv;
-#define glColor3ubv glad_glColor3ubv
-typedef void (APIENTRYP PFNGLCOLOR3UIPROC)(GLuint red, GLuint green, GLuint blue);
-GLAPI PFNGLCOLOR3UIPROC glad_glColor3ui;
-#define glColor3ui glad_glColor3ui
-typedef void (APIENTRYP PFNGLCOLOR3UIVPROC)(const GLuint *v);
-GLAPI PFNGLCOLOR3UIVPROC glad_glColor3uiv;
-#define glColor3uiv glad_glColor3uiv
-typedef void (APIENTRYP PFNGLCOLOR3USPROC)(GLushort red, GLushort green, GLushort blue);
-GLAPI PFNGLCOLOR3USPROC glad_glColor3us;
-#define glColor3us glad_glColor3us
-typedef void (APIENTRYP PFNGLCOLOR3USVPROC)(const GLushort *v);
-GLAPI PFNGLCOLOR3USVPROC glad_glColor3usv;
-#define glColor3usv glad_glColor3usv
-typedef void (APIENTRYP PFNGLCOLOR4BPROC)(GLbyte red, GLbyte green, GLbyte blue, GLbyte alpha);
-GLAPI PFNGLCOLOR4BPROC glad_glColor4b;
-#define glColor4b glad_glColor4b
-typedef void (APIENTRYP PFNGLCOLOR4BVPROC)(const GLbyte *v);
-GLAPI PFNGLCOLOR4BVPROC glad_glColor4bv;
-#define glColor4bv glad_glColor4bv
-typedef void (APIENTRYP PFNGLCOLOR4DPROC)(GLdouble red, GLdouble green, GLdouble blue, GLdouble alpha);
-GLAPI PFNGLCOLOR4DPROC glad_glColor4d;
-#define glColor4d glad_glColor4d
-typedef void (APIENTRYP PFNGLCOLOR4DVPROC)(const GLdouble *v);
-GLAPI PFNGLCOLOR4DVPROC glad_glColor4dv;
-#define glColor4dv glad_glColor4dv
-typedef void (APIENTRYP PFNGLCOLOR4FPROC)(GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha);
-GLAPI PFNGLCOLOR4FPROC glad_glColor4f;
-#define glColor4f glad_glColor4f
-typedef void (APIENTRYP PFNGLCOLOR4FVPROC)(const GLfloat *v);
-GLAPI PFNGLCOLOR4FVPROC glad_glColor4fv;
-#define glColor4fv glad_glColor4fv
-typedef void (APIENTRYP PFNGLCOLOR4IPROC)(GLint red, GLint green, GLint blue, GLint alpha);
-GLAPI PFNGLCOLOR4IPROC glad_glColor4i;
-#define glColor4i glad_glColor4i
-typedef void (APIENTRYP PFNGLCOLOR4IVPROC)(const GLint *v);
-GLAPI PFNGLCOLOR4IVPROC glad_glColor4iv;
-#define glColor4iv glad_glColor4iv
-typedef void (APIENTRYP PFNGLCOLOR4SPROC)(GLshort red, GLshort green, GLshort blue, GLshort alpha);
-GLAPI PFNGLCOLOR4SPROC glad_glColor4s;
-#define glColor4s glad_glColor4s
-typedef void (APIENTRYP PFNGLCOLOR4SVPROC)(const GLshort *v);
-GLAPI PFNGLCOLOR4SVPROC glad_glColor4sv;
-#define glColor4sv glad_glColor4sv
-typedef void (APIENTRYP PFNGLCOLOR4UBPROC)(GLubyte red, GLubyte green, GLubyte blue, GLubyte alpha);
-GLAPI PFNGLCOLOR4UBPROC glad_glColor4ub;
-#define glColor4ub glad_glColor4ub
-typedef void (APIENTRYP PFNGLCOLOR4UBVPROC)(const GLubyte *v);
-GLAPI PFNGLCOLOR4UBVPROC glad_glColor4ubv;
-#define glColor4ubv glad_glColor4ubv
-typedef void (APIENTRYP PFNGLCOLOR4UIPROC)(GLuint red, GLuint green, GLuint blue, GLuint alpha);
-GLAPI PFNGLCOLOR4UIPROC glad_glColor4ui;
-#define glColor4ui glad_glColor4ui
-typedef void (APIENTRYP PFNGLCOLOR4UIVPROC)(const GLuint *v);
-GLAPI PFNGLCOLOR4UIVPROC glad_glColor4uiv;
-#define glColor4uiv glad_glColor4uiv
-typedef void (APIENTRYP PFNGLCOLOR4USPROC)(GLushort red, GLushort green, GLushort blue, GLushort alpha);
-GLAPI PFNGLCOLOR4USPROC glad_glColor4us;
-#define glColor4us glad_glColor4us
-typedef void (APIENTRYP PFNGLCOLOR4USVPROC)(const GLushort *v);
-GLAPI PFNGLCOLOR4USVPROC glad_glColor4usv;
-#define glColor4usv glad_glColor4usv
-typedef void (APIENTRYP PFNGLEDGEFLAGPROC)(GLboolean flag);
-GLAPI PFNGLEDGEFLAGPROC glad_glEdgeFlag;
-#define glEdgeFlag glad_glEdgeFlag
-typedef void (APIENTRYP PFNGLEDGEFLAGVPROC)(const GLboolean *flag);
-GLAPI PFNGLEDGEFLAGVPROC glad_glEdgeFlagv;
-#define glEdgeFlagv glad_glEdgeFlagv
-typedef void (APIENTRYP PFNGLENDPROC)(void);
-GLAPI PFNGLENDPROC glad_glEnd;
-#define glEnd glad_glEnd
-typedef void (APIENTRYP PFNGLINDEXDPROC)(GLdouble c);
-GLAPI PFNGLINDEXDPROC glad_glIndexd;
-#define glIndexd glad_glIndexd
-typedef void (APIENTRYP PFNGLINDEXDVPROC)(const GLdouble *c);
-GLAPI PFNGLINDEXDVPROC glad_glIndexdv;
-#define glIndexdv glad_glIndexdv
-typedef void (APIENTRYP PFNGLINDEXFPROC)(GLfloat c);
-GLAPI PFNGLINDEXFPROC glad_glIndexf;
-#define glIndexf glad_glIndexf
-typedef void (APIENTRYP PFNGLINDEXFVPROC)(const GLfloat *c);
-GLAPI PFNGLINDEXFVPROC glad_glIndexfv;
-#define glIndexfv glad_glIndexfv
-typedef void (APIENTRYP PFNGLINDEXIPROC)(GLint c);
-GLAPI PFNGLINDEXIPROC glad_glIndexi;
-#define glIndexi glad_glIndexi
-typedef void (APIENTRYP PFNGLINDEXIVPROC)(const GLint *c);
-GLAPI PFNGLINDEXIVPROC glad_glIndexiv;
-#define glIndexiv glad_glIndexiv
-typedef void (APIENTRYP PFNGLINDEXSPROC)(GLshort c);
-GLAPI PFNGLINDEXSPROC glad_glIndexs;
-#define glIndexs glad_glIndexs
-typedef void (APIENTRYP PFNGLINDEXSVPROC)(const GLshort *c);
-GLAPI PFNGLINDEXSVPROC glad_glIndexsv;
-#define glIndexsv glad_glIndexsv
-typedef void (APIENTRYP PFNGLNORMAL3BPROC)(GLbyte nx, GLbyte ny, GLbyte nz);
-GLAPI PFNGLNORMAL3BPROC glad_glNormal3b;
-#define glNormal3b glad_glNormal3b
-typedef void (APIENTRYP PFNGLNORMAL3BVPROC)(const GLbyte *v);
-GLAPI PFNGLNORMAL3BVPROC glad_glNormal3bv;
-#define glNormal3bv glad_glNormal3bv
-typedef void (APIENTRYP PFNGLNORMAL3DPROC)(GLdouble nx, GLdouble ny, GLdouble nz);
-GLAPI PFNGLNORMAL3DPROC glad_glNormal3d;
-#define glNormal3d glad_glNormal3d
-typedef void (APIENTRYP PFNGLNORMAL3DVPROC)(const GLdouble *v);
-GLAPI PFNGLNORMAL3DVPROC glad_glNormal3dv;
-#define glNormal3dv glad_glNormal3dv
-typedef void (APIENTRYP PFNGLNORMAL3FPROC)(GLfloat nx, GLfloat ny, GLfloat nz);
-GLAPI PFNGLNORMAL3FPROC glad_glNormal3f;
-#define glNormal3f glad_glNormal3f
-typedef void (APIENTRYP PFNGLNORMAL3FVPROC)(const GLfloat *v);
-GLAPI PFNGLNORMAL3FVPROC glad_glNormal3fv;
-#define glNormal3fv glad_glNormal3fv
-typedef void (APIENTRYP PFNGLNORMAL3IPROC)(GLint nx, GLint ny, GLint nz);
-GLAPI PFNGLNORMAL3IPROC glad_glNormal3i;
-#define glNormal3i glad_glNormal3i
-typedef void (APIENTRYP PFNGLNORMAL3IVPROC)(const GLint *v);
-GLAPI PFNGLNORMAL3IVPROC glad_glNormal3iv;
-#define glNormal3iv glad_glNormal3iv
-typedef void (APIENTRYP PFNGLNORMAL3SPROC)(GLshort nx, GLshort ny, GLshort nz);
-GLAPI PFNGLNORMAL3SPROC glad_glNormal3s;
-#define glNormal3s glad_glNormal3s
-typedef void (APIENTRYP PFNGLNORMAL3SVPROC)(const GLshort *v);
-GLAPI PFNGLNORMAL3SVPROC glad_glNormal3sv;
-#define glNormal3sv glad_glNormal3sv
-typedef void (APIENTRYP PFNGLRASTERPOS2DPROC)(GLdouble x, GLdouble y);
-GLAPI PFNGLRASTERPOS2DPROC glad_glRasterPos2d;
-#define glRasterPos2d glad_glRasterPos2d
-typedef void (APIENTRYP PFNGLRASTERPOS2DVPROC)(const GLdouble *v);
-GLAPI PFNGLRASTERPOS2DVPROC glad_glRasterPos2dv;
-#define glRasterPos2dv glad_glRasterPos2dv
-typedef void (APIENTRYP PFNGLRASTERPOS2FPROC)(GLfloat x, GLfloat y);
-GLAPI PFNGLRASTERPOS2FPROC glad_glRasterPos2f;
-#define glRasterPos2f glad_glRasterPos2f
-typedef void (APIENTRYP PFNGLRASTERPOS2FVPROC)(const GLfloat *v);
-GLAPI PFNGLRASTERPOS2FVPROC glad_glRasterPos2fv;
-#define glRasterPos2fv glad_glRasterPos2fv
-typedef void (APIENTRYP PFNGLRASTERPOS2IPROC)(GLint x, GLint y);
-GLAPI PFNGLRASTERPOS2IPROC glad_glRasterPos2i;
-#define glRasterPos2i glad_glRasterPos2i
-typedef void (APIENTRYP PFNGLRASTERPOS2IVPROC)(const GLint *v);
-GLAPI PFNGLRASTERPOS2IVPROC glad_glRasterPos2iv;
-#define glRasterPos2iv glad_glRasterPos2iv
-typedef void (APIENTRYP PFNGLRASTERPOS2SPROC)(GLshort x, GLshort y);
-GLAPI PFNGLRASTERPOS2SPROC glad_glRasterPos2s;
-#define glRasterPos2s glad_glRasterPos2s
-typedef void (APIENTRYP PFNGLRASTERPOS2SVPROC)(const GLshort *v);
-GLAPI PFNGLRASTERPOS2SVPROC glad_glRasterPos2sv;
-#define glRasterPos2sv glad_glRasterPos2sv
-typedef void (APIENTRYP PFNGLRASTERPOS3DPROC)(GLdouble x, GLdouble y, GLdouble z);
-GLAPI PFNGLRASTERPOS3DPROC glad_glRasterPos3d;
-#define glRasterPos3d glad_glRasterPos3d
-typedef void (APIENTRYP PFNGLRASTERPOS3DVPROC)(const GLdouble *v);
-GLAPI PFNGLRASTERPOS3DVPROC glad_glRasterPos3dv;
-#define glRasterPos3dv glad_glRasterPos3dv
-typedef void (APIENTRYP PFNGLRASTERPOS3FPROC)(GLfloat x, GLfloat y, GLfloat z);
-GLAPI PFNGLRASTERPOS3FPROC glad_glRasterPos3f;
-#define glRasterPos3f glad_glRasterPos3f
-typedef void (APIENTRYP PFNGLRASTERPOS3FVPROC)(const GLfloat *v);
-GLAPI PFNGLRASTERPOS3FVPROC glad_glRasterPos3fv;
-#define glRasterPos3fv glad_glRasterPos3fv
-typedef void (APIENTRYP PFNGLRASTERPOS3IPROC)(GLint x, GLint y, GLint z);
-GLAPI PFNGLRASTERPOS3IPROC glad_glRasterPos3i;
-#define glRasterPos3i glad_glRasterPos3i
-typedef void (APIENTRYP PFNGLRASTERPOS3IVPROC)(const GLint *v);
-GLAPI PFNGLRASTERPOS3IVPROC glad_glRasterPos3iv;
-#define glRasterPos3iv glad_glRasterPos3iv
-typedef void (APIENTRYP PFNGLRASTERPOS3SPROC)(GLshort x, GLshort y, GLshort z);
-GLAPI PFNGLRASTERPOS3SPROC glad_glRasterPos3s;
-#define glRasterPos3s glad_glRasterPos3s
-typedef void (APIENTRYP PFNGLRASTERPOS3SVPROC)(const GLshort *v);
-GLAPI PFNGLRASTERPOS3SVPROC glad_glRasterPos3sv;
-#define glRasterPos3sv glad_glRasterPos3sv
-typedef void (APIENTRYP PFNGLRASTERPOS4DPROC)(GLdouble x, GLdouble y, GLdouble z, GLdouble w);
-GLAPI PFNGLRASTERPOS4DPROC glad_glRasterPos4d;
-#define glRasterPos4d glad_glRasterPos4d
-typedef void (APIENTRYP PFNGLRASTERPOS4DVPROC)(const GLdouble *v);
-GLAPI PFNGLRASTERPOS4DVPROC glad_glRasterPos4dv;
-#define glRasterPos4dv glad_glRasterPos4dv
-typedef void (APIENTRYP PFNGLRASTERPOS4FPROC)(GLfloat x, GLfloat y, GLfloat z, GLfloat w);
-GLAPI PFNGLRASTERPOS4FPROC glad_glRasterPos4f;
-#define glRasterPos4f glad_glRasterPos4f
-typedef void (APIENTRYP PFNGLRASTERPOS4FVPROC)(const GLfloat *v);
-GLAPI PFNGLRASTERPOS4FVPROC glad_glRasterPos4fv;
-#define glRasterPos4fv glad_glRasterPos4fv
-typedef void (APIENTRYP PFNGLRASTERPOS4IPROC)(GLint x, GLint y, GLint z, GLint w);
-GLAPI PFNGLRASTERPOS4IPROC glad_glRasterPos4i;
-#define glRasterPos4i glad_glRasterPos4i
-typedef void (APIENTRYP PFNGLRASTERPOS4IVPROC)(const GLint *v);
-GLAPI PFNGLRASTERPOS4IVPROC glad_glRasterPos4iv;
-#define glRasterPos4iv glad_glRasterPos4iv
-typedef void (APIENTRYP PFNGLRASTERPOS4SPROC)(GLshort x, GLshort y, GLshort z, GLshort w);
-GLAPI PFNGLRASTERPOS4SPROC glad_glRasterPos4s;
-#define glRasterPos4s glad_glRasterPos4s
-typedef void (APIENTRYP PFNGLRASTERPOS4SVPROC)(const GLshort *v);
-GLAPI PFNGLRASTERPOS4SVPROC glad_glRasterPos4sv;
-#define glRasterPos4sv glad_glRasterPos4sv
-typedef void (APIENTRYP PFNGLRECTDPROC)(GLdouble x1, GLdouble y1, GLdouble x2, GLdouble y2);
-GLAPI PFNGLRECTDPROC glad_glRectd;
-#define glRectd glad_glRectd
-typedef void (APIENTRYP PFNGLRECTDVPROC)(const GLdouble *v1, const GLdouble *v2);
-GLAPI PFNGLRECTDVPROC glad_glRectdv;
-#define glRectdv glad_glRectdv
-typedef void (APIENTRYP PFNGLRECTFPROC)(GLfloat x1, GLfloat y1, GLfloat x2, GLfloat y2);
-GLAPI PFNGLRECTFPROC glad_glRectf;
-#define glRectf glad_glRectf
-typedef void (APIENTRYP PFNGLRECTFVPROC)(const GLfloat *v1, const GLfloat *v2);
-GLAPI PFNGLRECTFVPROC glad_glRectfv;
-#define glRectfv glad_glRectfv
-typedef void (APIENTRYP PFNGLRECTIPROC)(GLint x1, GLint y1, GLint x2, GLint y2);
-GLAPI PFNGLRECTIPROC glad_glRecti;
-#define glRecti glad_glRecti
-typedef void (APIENTRYP PFNGLRECTIVPROC)(const GLint *v1, const GLint *v2);
-GLAPI PFNGLRECTIVPROC glad_glRectiv;
-#define glRectiv glad_glRectiv
-typedef void (APIENTRYP PFNGLRECTSPROC)(GLshort x1, GLshort y1, GLshort x2, GLshort y2);
-GLAPI PFNGLRECTSPROC glad_glRects;
-#define glRects glad_glRects
-typedef void (APIENTRYP PFNGLRECTSVPROC)(const GLshort *v1, const GLshort *v2);
-GLAPI PFNGLRECTSVPROC glad_glRectsv;
-#define glRectsv glad_glRectsv
-typedef void (APIENTRYP PFNGLTEXCOORD1DPROC)(GLdouble s);
-GLAPI PFNGLTEXCOORD1DPROC glad_glTexCoord1d;
-#define glTexCoord1d glad_glTexCoord1d
-typedef void (APIENTRYP PFNGLTEXCOORD1DVPROC)(const GLdouble *v);
-GLAPI PFNGLTEXCOORD1DVPROC glad_glTexCoord1dv;
-#define glTexCoord1dv glad_glTexCoord1dv
-typedef void (APIENTRYP PFNGLTEXCOORD1FPROC)(GLfloat s);
-GLAPI PFNGLTEXCOORD1FPROC glad_glTexCoord1f;
-#define glTexCoord1f glad_glTexCoord1f
-typedef void (APIENTRYP PFNGLTEXCOORD1FVPROC)(const GLfloat *v);
-GLAPI PFNGLTEXCOORD1FVPROC glad_glTexCoord1fv;
-#define glTexCoord1fv glad_glTexCoord1fv
-typedef void (APIENTRYP PFNGLTEXCOORD1IPROC)(GLint s);
-GLAPI PFNGLTEXCOORD1IPROC glad_glTexCoord1i;
-#define glTexCoord1i glad_glTexCoord1i
-typedef void (APIENTRYP PFNGLTEXCOORD1IVPROC)(const GLint *v);
-GLAPI PFNGLTEXCOORD1IVPROC glad_glTexCoord1iv;
-#define glTexCoord1iv glad_glTexCoord1iv
-typedef void (APIENTRYP PFNGLTEXCOORD1SPROC)(GLshort s);
-GLAPI PFNGLTEXCOORD1SPROC glad_glTexCoord1s;
-#define glTexCoord1s glad_glTexCoord1s
-typedef void (APIENTRYP PFNGLTEXCOORD1SVPROC)(const GLshort *v);
-GLAPI PFNGLTEXCOORD1SVPROC glad_glTexCoord1sv;
-#define glTexCoord1sv glad_glTexCoord1sv
-typedef void (APIENTRYP PFNGLTEXCOORD2DPROC)(GLdouble s, GLdouble t);
-GLAPI PFNGLTEXCOORD2DPROC glad_glTexCoord2d;
-#define glTexCoord2d glad_glTexCoord2d
-typedef void (APIENTRYP PFNGLTEXCOORD2DVPROC)(const GLdouble *v);
-GLAPI PFNGLTEXCOORD2DVPROC glad_glTexCoord2dv;
-#define glTexCoord2dv glad_glTexCoord2dv
-typedef void (APIENTRYP PFNGLTEXCOORD2FPROC)(GLfloat s, GLfloat t);
-GLAPI PFNGLTEXCOORD2FPROC glad_glTexCoord2f;
-#define glTexCoord2f glad_glTexCoord2f
-typedef void (APIENTRYP PFNGLTEXCOORD2FVPROC)(const GLfloat *v);
-GLAPI PFNGLTEXCOORD2FVPROC glad_glTexCoord2fv;
-#define glTexCoord2fv glad_glTexCoord2fv
-typedef void (APIENTRYP PFNGLTEXCOORD2IPROC)(GLint s, GLint t);
-GLAPI PFNGLTEXCOORD2IPROC glad_glTexCoord2i;
-#define glTexCoord2i glad_glTexCoord2i
-typedef void (APIENTRYP PFNGLTEXCOORD2IVPROC)(const GLint *v);
-GLAPI PFNGLTEXCOORD2IVPROC glad_glTexCoord2iv;
-#define glTexCoord2iv glad_glTexCoord2iv
-typedef void (APIENTRYP PFNGLTEXCOORD2SPROC)(GLshort s, GLshort t);
-GLAPI PFNGLTEXCOORD2SPROC glad_glTexCoord2s;
-#define glTexCoord2s glad_glTexCoord2s
-typedef void (APIENTRYP PFNGLTEXCOORD2SVPROC)(const GLshort *v);
-GLAPI PFNGLTEXCOORD2SVPROC glad_glTexCoord2sv;
-#define glTexCoord2sv glad_glTexCoord2sv
-typedef void (APIENTRYP PFNGLTEXCOORD3DPROC)(GLdouble s, GLdouble t, GLdouble r);
-GLAPI PFNGLTEXCOORD3DPROC glad_glTexCoord3d;
-#define glTexCoord3d glad_glTexCoord3d
-typedef void (APIENTRYP PFNGLTEXCOORD3DVPROC)(const GLdouble *v);
-GLAPI PFNGLTEXCOORD3DVPROC glad_glTexCoord3dv;
-#define glTexCoord3dv glad_glTexCoord3dv
-typedef void (APIENTRYP PFNGLTEXCOORD3FPROC)(GLfloat s, GLfloat t, GLfloat r);
-GLAPI PFNGLTEXCOORD3FPROC glad_glTexCoord3f;
-#define glTexCoord3f glad_glTexCoord3f
-typedef void (APIENTRYP PFNGLTEXCOORD3FVPROC)(const GLfloat *v);
-GLAPI PFNGLTEXCOORD3FVPROC glad_glTexCoord3fv;
-#define glTexCoord3fv glad_glTexCoord3fv
-typedef void (APIENTRYP PFNGLTEXCOORD3IPROC)(GLint s, GLint t, GLint r);
-GLAPI PFNGLTEXCOORD3IPROC glad_glTexCoord3i;
-#define glTexCoord3i glad_glTexCoord3i
-typedef void (APIENTRYP PFNGLTEXCOORD3IVPROC)(const GLint *v);
-GLAPI PFNGLTEXCOORD3IVPROC glad_glTexCoord3iv;
-#define glTexCoord3iv glad_glTexCoord3iv
-typedef void (APIENTRYP PFNGLTEXCOORD3SPROC)(GLshort s, GLshort t, GLshort r);
-GLAPI PFNGLTEXCOORD3SPROC glad_glTexCoord3s;
-#define glTexCoord3s glad_glTexCoord3s
-typedef void (APIENTRYP PFNGLTEXCOORD3SVPROC)(const GLshort *v);
-GLAPI PFNGLTEXCOORD3SVPROC glad_glTexCoord3sv;
-#define glTexCoord3sv glad_glTexCoord3sv
-typedef void (APIENTRYP PFNGLTEXCOORD4DPROC)(GLdouble s, GLdouble t, GLdouble r, GLdouble q);
-GLAPI PFNGLTEXCOORD4DPROC glad_glTexCoord4d;
-#define glTexCoord4d glad_glTexCoord4d
-typedef void (APIENTRYP PFNGLTEXCOORD4DVPROC)(const GLdouble *v);
-GLAPI PFNGLTEXCOORD4DVPROC glad_glTexCoord4dv;
-#define glTexCoord4dv glad_glTexCoord4dv
-typedef void (APIENTRYP PFNGLTEXCOORD4FPROC)(GLfloat s, GLfloat t, GLfloat r, GLfloat q);
-GLAPI PFNGLTEXCOORD4FPROC glad_glTexCoord4f;
-#define glTexCoord4f glad_glTexCoord4f
-typedef void (APIENTRYP PFNGLTEXCOORD4FVPROC)(const GLfloat *v);
-GLAPI PFNGLTEXCOORD4FVPROC glad_glTexCoord4fv;
-#define glTexCoord4fv glad_glTexCoord4fv
-typedef void (APIENTRYP PFNGLTEXCOORD4IPROC)(GLint s, GLint t, GLint r, GLint q);
-GLAPI PFNGLTEXCOORD4IPROC glad_glTexCoord4i;
-#define glTexCoord4i glad_glTexCoord4i
-typedef void (APIENTRYP PFNGLTEXCOORD4IVPROC)(const GLint *v);
-GLAPI PFNGLTEXCOORD4IVPROC glad_glTexCoord4iv;
-#define glTexCoord4iv glad_glTexCoord4iv
-typedef void (APIENTRYP PFNGLTEXCOORD4SPROC)(GLshort s, GLshort t, GLshort r, GLshort q);
-GLAPI PFNGLTEXCOORD4SPROC glad_glTexCoord4s;
-#define glTexCoord4s glad_glTexCoord4s
-typedef void (APIENTRYP PFNGLTEXCOORD4SVPROC)(const GLshort *v);
-GLAPI PFNGLTEXCOORD4SVPROC glad_glTexCoord4sv;
-#define glTexCoord4sv glad_glTexCoord4sv
-typedef void (APIENTRYP PFNGLVERTEX2DPROC)(GLdouble x, GLdouble y);
-GLAPI PFNGLVERTEX2DPROC glad_glVertex2d;
-#define glVertex2d glad_glVertex2d
-typedef void (APIENTRYP PFNGLVERTEX2DVPROC)(const GLdouble *v);
-GLAPI PFNGLVERTEX2DVPROC glad_glVertex2dv;
-#define glVertex2dv glad_glVertex2dv
-typedef void (APIENTRYP PFNGLVERTEX2FPROC)(GLfloat x, GLfloat y);
-GLAPI PFNGLVERTEX2FPROC glad_glVertex2f;
-#define glVertex2f glad_glVertex2f
-typedef void (APIENTRYP PFNGLVERTEX2FVPROC)(const GLfloat *v);
-GLAPI PFNGLVERTEX2FVPROC glad_glVertex2fv;
-#define glVertex2fv glad_glVertex2fv
-typedef void (APIENTRYP PFNGLVERTEX2IPROC)(GLint x, GLint y);
-GLAPI PFNGLVERTEX2IPROC glad_glVertex2i;
-#define glVertex2i glad_glVertex2i
-typedef void (APIENTRYP PFNGLVERTEX2IVPROC)(const GLint *v);
-GLAPI PFNGLVERTEX2IVPROC glad_glVertex2iv;
-#define glVertex2iv glad_glVertex2iv
-typedef void (APIENTRYP PFNGLVERTEX2SPROC)(GLshort x, GLshort y);
-GLAPI PFNGLVERTEX2SPROC glad_glVertex2s;
-#define glVertex2s glad_glVertex2s
-typedef void (APIENTRYP PFNGLVERTEX2SVPROC)(const GLshort *v);
-GLAPI PFNGLVERTEX2SVPROC glad_glVertex2sv;
-#define glVertex2sv glad_glVertex2sv
-typedef void (APIENTRYP PFNGLVERTEX3DPROC)(GLdouble x, GLdouble y, GLdouble z);
-GLAPI PFNGLVERTEX3DPROC glad_glVertex3d;
-#define glVertex3d glad_glVertex3d
-typedef void (APIENTRYP PFNGLVERTEX3DVPROC)(const GLdouble *v);
-GLAPI PFNGLVERTEX3DVPROC glad_glVertex3dv;
-#define glVertex3dv glad_glVertex3dv
-typedef void (APIENTRYP PFNGLVERTEX3FPROC)(GLfloat x, GLfloat y, GLfloat z);
-GLAPI PFNGLVERTEX3FPROC glad_glVertex3f;
-#define glVertex3f glad_glVertex3f
-typedef void (APIENTRYP PFNGLVERTEX3FVPROC)(const GLfloat *v);
-GLAPI PFNGLVERTEX3FVPROC glad_glVertex3fv;
-#define glVertex3fv glad_glVertex3fv
-typedef void (APIENTRYP PFNGLVERTEX3IPROC)(GLint x, GLint y, GLint z);
-GLAPI PFNGLVERTEX3IPROC glad_glVertex3i;
-#define glVertex3i glad_glVertex3i
-typedef void (APIENTRYP PFNGLVERTEX3IVPROC)(const GLint *v);
-GLAPI PFNGLVERTEX3IVPROC glad_glVertex3iv;
-#define glVertex3iv glad_glVertex3iv
-typedef void (APIENTRYP PFNGLVERTEX3SPROC)(GLshort x, GLshort y, GLshort z);
-GLAPI PFNGLVERTEX3SPROC glad_glVertex3s;
-#define glVertex3s glad_glVertex3s
-typedef void (APIENTRYP PFNGLVERTEX3SVPROC)(const GLshort *v);
-GLAPI PFNGLVERTEX3SVPROC glad_glVertex3sv;
-#define glVertex3sv glad_glVertex3sv
-typedef void (APIENTRYP PFNGLVERTEX4DPROC)(GLdouble x, GLdouble y, GLdouble z, GLdouble w);
-GLAPI PFNGLVERTEX4DPROC glad_glVertex4d;
-#define glVertex4d glad_glVertex4d
-typedef void (APIENTRYP PFNGLVERTEX4DVPROC)(const GLdouble *v);
-GLAPI PFNGLVERTEX4DVPROC glad_glVertex4dv;
-#define glVertex4dv glad_glVertex4dv
-typedef void (APIENTRYP PFNGLVERTEX4FPROC)(GLfloat x, GLfloat y, GLfloat z, GLfloat w);
-GLAPI PFNGLVERTEX4FPROC glad_glVertex4f;
-#define glVertex4f glad_glVertex4f
-typedef void (APIENTRYP PFNGLVERTEX4FVPROC)(const GLfloat *v);
-GLAPI PFNGLVERTEX4FVPROC glad_glVertex4fv;
-#define glVertex4fv glad_glVertex4fv
-typedef void (APIENTRYP PFNGLVERTEX4IPROC)(GLint x, GLint y, GLint z, GLint w);
-GLAPI PFNGLVERTEX4IPROC glad_glVertex4i;
-#define glVertex4i glad_glVertex4i
-typedef void (APIENTRYP PFNGLVERTEX4IVPROC)(const GLint *v);
-GLAPI PFNGLVERTEX4IVPROC glad_glVertex4iv;
-#define glVertex4iv glad_glVertex4iv
-typedef void (APIENTRYP PFNGLVERTEX4SPROC)(GLshort x, GLshort y, GLshort z, GLshort w);
-GLAPI PFNGLVERTEX4SPROC glad_glVertex4s;
-#define glVertex4s glad_glVertex4s
-typedef void (APIENTRYP PFNGLVERTEX4SVPROC)(const GLshort *v);
-GLAPI PFNGLVERTEX4SVPROC glad_glVertex4sv;
-#define glVertex4sv glad_glVertex4sv
-typedef void (APIENTRYP PFNGLCLIPPLANEPROC)(GLenum plane, const GLdouble *equation);
-GLAPI PFNGLCLIPPLANEPROC glad_glClipPlane;
-#define glClipPlane glad_glClipPlane
-typedef void (APIENTRYP PFNGLCOLORMATERIALPROC)(GLenum face, GLenum mode);
-GLAPI PFNGLCOLORMATERIALPROC glad_glColorMaterial;
-#define glColorMaterial glad_glColorMaterial
-typedef void (APIENTRYP PFNGLFOGFPROC)(GLenum pname, GLfloat param);
-GLAPI PFNGLFOGFPROC glad_glFogf;
-#define glFogf glad_glFogf
-typedef void (APIENTRYP PFNGLFOGFVPROC)(GLenum pname, const GLfloat *params);
-GLAPI PFNGLFOGFVPROC glad_glFogfv;
-#define glFogfv glad_glFogfv
-typedef void (APIENTRYP PFNGLFOGIPROC)(GLenum pname, GLint param);
-GLAPI PFNGLFOGIPROC glad_glFogi;
-#define glFogi glad_glFogi
-typedef void (APIENTRYP PFNGLFOGIVPROC)(GLenum pname, const GLint *params);
-GLAPI PFNGLFOGIVPROC glad_glFogiv;
-#define glFogiv glad_glFogiv
-typedef void (APIENTRYP PFNGLLIGHTFPROC)(GLenum light, GLenum pname, GLfloat param);
-GLAPI PFNGLLIGHTFPROC glad_glLightf;
-#define glLightf glad_glLightf
-typedef void (APIENTRYP PFNGLLIGHTFVPROC)(GLenum light, GLenum pname, const GLfloat *params);
-GLAPI PFNGLLIGHTFVPROC glad_glLightfv;
-#define glLightfv glad_glLightfv
-typedef void (APIENTRYP PFNGLLIGHTIPROC)(GLenum light, GLenum pname, GLint param);
-GLAPI PFNGLLIGHTIPROC glad_glLighti;
-#define glLighti glad_glLighti
-typedef void (APIENTRYP PFNGLLIGHTIVPROC)(GLenum light, GLenum pname, const GLint *params);
-GLAPI PFNGLLIGHTIVPROC glad_glLightiv;
-#define glLightiv glad_glLightiv
-typedef void (APIENTRYP PFNGLLIGHTMODELFPROC)(GLenum pname, GLfloat param);
-GLAPI PFNGLLIGHTMODELFPROC glad_glLightModelf;
-#define glLightModelf glad_glLightModelf
-typedef void (APIENTRYP PFNGLLIGHTMODELFVPROC)(GLenum pname, const GLfloat *params);
-GLAPI PFNGLLIGHTMODELFVPROC glad_glLightModelfv;
-#define glLightModelfv glad_glLightModelfv
-typedef void (APIENTRYP PFNGLLIGHTMODELIPROC)(GLenum pname, GLint param);
-GLAPI PFNGLLIGHTMODELIPROC glad_glLightModeli;
-#define glLightModeli glad_glLightModeli
-typedef void (APIENTRYP PFNGLLIGHTMODELIVPROC)(GLenum pname, const GLint *params);
-GLAPI PFNGLLIGHTMODELIVPROC glad_glLightModeliv;
-#define glLightModeliv glad_glLightModeliv
-typedef void (APIENTRYP PFNGLLINESTIPPLEPROC)(GLint factor, GLushort pattern);
-GLAPI PFNGLLINESTIPPLEPROC glad_glLineStipple;
-#define glLineStipple glad_glLineStipple
-typedef void (APIENTRYP PFNGLMATERIALFPROC)(GLenum face, GLenum pname, GLfloat param);
-GLAPI PFNGLMATERIALFPROC glad_glMaterialf;
-#define glMaterialf glad_glMaterialf
-typedef void (APIENTRYP PFNGLMATERIALFVPROC)(GLenum face, GLenum pname, const GLfloat *params);
-GLAPI PFNGLMATERIALFVPROC glad_glMaterialfv;
-#define glMaterialfv glad_glMaterialfv
-typedef void (APIENTRYP PFNGLMATERIALIPROC)(GLenum face, GLenum pname, GLint param);
-GLAPI PFNGLMATERIALIPROC glad_glMateriali;
-#define glMateriali glad_glMateriali
-typedef void (APIENTRYP PFNGLMATERIALIVPROC)(GLenum face, GLenum pname, const GLint *params);
-GLAPI PFNGLMATERIALIVPROC glad_glMaterialiv;
-#define glMaterialiv glad_glMaterialiv
-typedef void (APIENTRYP PFNGLPOLYGONSTIPPLEPROC)(const GLubyte *mask);
-GLAPI PFNGLPOLYGONSTIPPLEPROC glad_glPolygonStipple;
-#define glPolygonStipple glad_glPolygonStipple
-typedef void (APIENTRYP PFNGLSHADEMODELPROC)(GLenum mode);
-GLAPI PFNGLSHADEMODELPROC glad_glShadeModel;
-#define glShadeModel glad_glShadeModel
-typedef void (APIENTRYP PFNGLTEXENVFPROC)(GLenum target, GLenum pname, GLfloat param);
-GLAPI PFNGLTEXENVFPROC glad_glTexEnvf;
-#define glTexEnvf glad_glTexEnvf
-typedef void (APIENTRYP PFNGLTEXENVFVPROC)(GLenum target, GLenum pname, const GLfloat *params);
-GLAPI PFNGLTEXENVFVPROC glad_glTexEnvfv;
-#define glTexEnvfv glad_glTexEnvfv
-typedef void (APIENTRYP PFNGLTEXENVIPROC)(GLenum target, GLenum pname, GLint param);
-GLAPI PFNGLTEXENVIPROC glad_glTexEnvi;
-#define glTexEnvi glad_glTexEnvi
-typedef void (APIENTRYP PFNGLTEXENVIVPROC)(GLenum target, GLenum pname, const GLint *params);
-GLAPI PFNGLTEXENVIVPROC glad_glTexEnviv;
-#define glTexEnviv glad_glTexEnviv
-typedef void (APIENTRYP PFNGLTEXGENDPROC)(GLenum coord, GLenum pname, GLdouble param);
-GLAPI PFNGLTEXGENDPROC glad_glTexGend;
-#define glTexGend glad_glTexGend
-typedef void (APIENTRYP PFNGLTEXGENDVPROC)(GLenum coord, GLenum pname, const GLdouble *params);
-GLAPI PFNGLTEXGENDVPROC glad_glTexGendv;
-#define glTexGendv glad_glTexGendv
-typedef void (APIENTRYP PFNGLTEXGENFPROC)(GLenum coord, GLenum pname, GLfloat param);
-GLAPI PFNGLTEXGENFPROC glad_glTexGenf;
-#define glTexGenf glad_glTexGenf
-typedef void (APIENTRYP PFNGLTEXGENFVPROC)(GLenum coord, GLenum pname, const GLfloat *params);
-GLAPI PFNGLTEXGENFVPROC glad_glTexGenfv;
-#define glTexGenfv glad_glTexGenfv
-typedef void (APIENTRYP PFNGLTEXGENIPROC)(GLenum coord, GLenum pname, GLint param);
-GLAPI PFNGLTEXGENIPROC glad_glTexGeni;
-#define glTexGeni glad_glTexGeni
-typedef void (APIENTRYP PFNGLTEXGENIVPROC)(GLenum coord, GLenum pname, const GLint *params);
-GLAPI PFNGLTEXGENIVPROC glad_glTexGeniv;
-#define glTexGeniv glad_glTexGeniv
-typedef void (APIENTRYP PFNGLFEEDBACKBUFFERPROC)(GLsizei size, GLenum type, GLfloat *buffer);
-GLAPI PFNGLFEEDBACKBUFFERPROC glad_glFeedbackBuffer;
-#define glFeedbackBuffer glad_glFeedbackBuffer
-typedef void (APIENTRYP PFNGLSELECTBUFFERPROC)(GLsizei size, GLuint *buffer);
-GLAPI PFNGLSELECTBUFFERPROC glad_glSelectBuffer;
-#define glSelectBuffer glad_glSelectBuffer
-typedef GLint (APIENTRYP PFNGLRENDERMODEPROC)(GLenum mode);
-GLAPI PFNGLRENDERMODEPROC glad_glRenderMode;
-#define glRenderMode glad_glRenderMode
-typedef void (APIENTRYP PFNGLINITNAMESPROC)(void);
-GLAPI PFNGLINITNAMESPROC glad_glInitNames;
-#define glInitNames glad_glInitNames
-typedef void (APIENTRYP PFNGLLOADNAMEPROC)(GLuint name);
-GLAPI PFNGLLOADNAMEPROC glad_glLoadName;
-#define glLoadName glad_glLoadName
-typedef void (APIENTRYP PFNGLPASSTHROUGHPROC)(GLfloat token);
-GLAPI PFNGLPASSTHROUGHPROC glad_glPassThrough;
-#define glPassThrough glad_glPassThrough
-typedef void (APIENTRYP PFNGLPOPNAMEPROC)(void);
-GLAPI PFNGLPOPNAMEPROC glad_glPopName;
-#define glPopName glad_glPopName
-typedef void (APIENTRYP PFNGLPUSHNAMEPROC)(GLuint name);
-GLAPI PFNGLPUSHNAMEPROC glad_glPushName;
-#define glPushName glad_glPushName
-typedef void (APIENTRYP PFNGLCLEARACCUMPROC)(GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha);
-GLAPI PFNGLCLEARACCUMPROC glad_glClearAccum;
-#define glClearAccum glad_glClearAccum
-typedef void (APIENTRYP PFNGLCLEARINDEXPROC)(GLfloat c);
-GLAPI PFNGLCLEARINDEXPROC glad_glClearIndex;
-#define glClearIndex glad_glClearIndex
-typedef void (APIENTRYP PFNGLINDEXMASKPROC)(GLuint mask);
-GLAPI PFNGLINDEXMASKPROC glad_glIndexMask;
-#define glIndexMask glad_glIndexMask
-typedef void (APIENTRYP PFNGLACCUMPROC)(GLenum op, GLfloat value);
-GLAPI PFNGLACCUMPROC glad_glAccum;
-#define glAccum glad_glAccum
-typedef void (APIENTRYP PFNGLPOPATTRIBPROC)(void);
-GLAPI PFNGLPOPATTRIBPROC glad_glPopAttrib;
-#define glPopAttrib glad_glPopAttrib
-typedef void (APIENTRYP PFNGLPUSHATTRIBPROC)(GLbitfield mask);
-GLAPI PFNGLPUSHATTRIBPROC glad_glPushAttrib;
-#define glPushAttrib glad_glPushAttrib
-typedef void (APIENTRYP PFNGLMAP1DPROC)(GLenum target, GLdouble u1, GLdouble u2, GLint stride, GLint order, const GLdouble *points);
-GLAPI PFNGLMAP1DPROC glad_glMap1d;
-#define glMap1d glad_glMap1d
-typedef void (APIENTRYP PFNGLMAP1FPROC)(GLenum target, GLfloat u1, GLfloat u2, GLint stride, GLint order, const GLfloat *points);
-GLAPI PFNGLMAP1FPROC glad_glMap1f;
-#define glMap1f glad_glMap1f
-typedef void (APIENTRYP PFNGLMAP2DPROC)(GLenum target, GLdouble u1, GLdouble u2, GLint ustride, GLint uorder, GLdouble v1, GLdouble v2, GLint vstride, GLint vorder, const GLdouble *points);
-GLAPI PFNGLMAP2DPROC glad_glMap2d;
-#define glMap2d glad_glMap2d
-typedef void (APIENTRYP PFNGLMAP2FPROC)(GLenum target, GLfloat u1, GLfloat u2, GLint ustride, GLint uorder, GLfloat v1, GLfloat v2, GLint vstride, GLint vorder, const GLfloat *points);
-GLAPI PFNGLMAP2FPROC glad_glMap2f;
-#define glMap2f glad_glMap2f
-typedef void (APIENTRYP PFNGLMAPGRID1DPROC)(GLint un, GLdouble u1, GLdouble u2);
-GLAPI PFNGLMAPGRID1DPROC glad_glMapGrid1d;
-#define glMapGrid1d glad_glMapGrid1d
-typedef void (APIENTRYP PFNGLMAPGRID1FPROC)(GLint un, GLfloat u1, GLfloat u2);
-GLAPI PFNGLMAPGRID1FPROC glad_glMapGrid1f;
-#define glMapGrid1f glad_glMapGrid1f
-typedef void (APIENTRYP PFNGLMAPGRID2DPROC)(GLint un, GLdouble u1, GLdouble u2, GLint vn, GLdouble v1, GLdouble v2);
-GLAPI PFNGLMAPGRID2DPROC glad_glMapGrid2d;
-#define glMapGrid2d glad_glMapGrid2d
-typedef void (APIENTRYP PFNGLMAPGRID2FPROC)(GLint un, GLfloat u1, GLfloat u2, GLint vn, GLfloat v1, GLfloat v2);
-GLAPI PFNGLMAPGRID2FPROC glad_glMapGrid2f;
-#define glMapGrid2f glad_glMapGrid2f
-typedef void (APIENTRYP PFNGLEVALCOORD1DPROC)(GLdouble u);
-GLAPI PFNGLEVALCOORD1DPROC glad_glEvalCoord1d;
-#define glEvalCoord1d glad_glEvalCoord1d
-typedef void (APIENTRYP PFNGLEVALCOORD1DVPROC)(const GLdouble *u);
-GLAPI PFNGLEVALCOORD1DVPROC glad_glEvalCoord1dv;
-#define glEvalCoord1dv glad_glEvalCoord1dv
-typedef void (APIENTRYP PFNGLEVALCOORD1FPROC)(GLfloat u);
-GLAPI PFNGLEVALCOORD1FPROC glad_glEvalCoord1f;
-#define glEvalCoord1f glad_glEvalCoord1f
-typedef void (APIENTRYP PFNGLEVALCOORD1FVPROC)(const GLfloat *u);
-GLAPI PFNGLEVALCOORD1FVPROC glad_glEvalCoord1fv;
-#define glEvalCoord1fv glad_glEvalCoord1fv
-typedef void (APIENTRYP PFNGLEVALCOORD2DPROC)(GLdouble u, GLdouble v);
-GLAPI PFNGLEVALCOORD2DPROC glad_glEvalCoord2d;
-#define glEvalCoord2d glad_glEvalCoord2d
-typedef void (APIENTRYP PFNGLEVALCOORD2DVPROC)(const GLdouble *u);
-GLAPI PFNGLEVALCOORD2DVPROC glad_glEvalCoord2dv;
-#define glEvalCoord2dv glad_glEvalCoord2dv
-typedef void (APIENTRYP PFNGLEVALCOORD2FPROC)(GLfloat u, GLfloat v);
-GLAPI PFNGLEVALCOORD2FPROC glad_glEvalCoord2f;
-#define glEvalCoord2f glad_glEvalCoord2f
-typedef void (APIENTRYP PFNGLEVALCOORD2FVPROC)(const GLfloat *u);
-GLAPI PFNGLEVALCOORD2FVPROC glad_glEvalCoord2fv;
-#define glEvalCoord2fv glad_glEvalCoord2fv
-typedef void (APIENTRYP PFNGLEVALMESH1PROC)(GLenum mode, GLint i1, GLint i2);
-GLAPI PFNGLEVALMESH1PROC glad_glEvalMesh1;
-#define glEvalMesh1 glad_glEvalMesh1
-typedef void (APIENTRYP PFNGLEVALPOINT1PROC)(GLint i);
-GLAPI PFNGLEVALPOINT1PROC glad_glEvalPoint1;
-#define glEvalPoint1 glad_glEvalPoint1
-typedef void (APIENTRYP PFNGLEVALMESH2PROC)(GLenum mode, GLint i1, GLint i2, GLint j1, GLint j2);
-GLAPI PFNGLEVALMESH2PROC glad_glEvalMesh2;
-#define glEvalMesh2 glad_glEvalMesh2
-typedef void (APIENTRYP PFNGLEVALPOINT2PROC)(GLint i, GLint j);
-GLAPI PFNGLEVALPOINT2PROC glad_glEvalPoint2;
-#define glEvalPoint2 glad_glEvalPoint2
-typedef void (APIENTRYP PFNGLALPHAFUNCPROC)(GLenum func, GLfloat ref);
-GLAPI PFNGLALPHAFUNCPROC glad_glAlphaFunc;
-#define glAlphaFunc glad_glAlphaFunc
-typedef void (APIENTRYP PFNGLPIXELZOOMPROC)(GLfloat xfactor, GLfloat yfactor);
-GLAPI PFNGLPIXELZOOMPROC glad_glPixelZoom;
-#define glPixelZoom glad_glPixelZoom
-typedef void (APIENTRYP PFNGLPIXELTRANSFERFPROC)(GLenum pname, GLfloat param);
-GLAPI PFNGLPIXELTRANSFERFPROC glad_glPixelTransferf;
-#define glPixelTransferf glad_glPixelTransferf
-typedef void (APIENTRYP PFNGLPIXELTRANSFERIPROC)(GLenum pname, GLint param);
-GLAPI PFNGLPIXELTRANSFERIPROC glad_glPixelTransferi;
-#define glPixelTransferi glad_glPixelTransferi
-typedef void (APIENTRYP PFNGLPIXELMAPFVPROC)(GLenum map, GLsizei mapsize, const GLfloat *values);
-GLAPI PFNGLPIXELMAPFVPROC glad_glPixelMapfv;
-#define glPixelMapfv glad_glPixelMapfv
-typedef void (APIENTRYP PFNGLPIXELMAPUIVPROC)(GLenum map, GLsizei mapsize, const GLuint *values);
-GLAPI PFNGLPIXELMAPUIVPROC glad_glPixelMapuiv;
-#define glPixelMapuiv glad_glPixelMapuiv
-typedef void (APIENTRYP PFNGLPIXELMAPUSVPROC)(GLenum map, GLsizei mapsize, const GLushort *values);
-GLAPI PFNGLPIXELMAPUSVPROC glad_glPixelMapusv;
-#define glPixelMapusv glad_glPixelMapusv
-typedef void (APIENTRYP PFNGLCOPYPIXELSPROC)(GLint x, GLint y, GLsizei width, GLsizei height, GLenum type);
-GLAPI PFNGLCOPYPIXELSPROC glad_glCopyPixels;
-#define glCopyPixels glad_glCopyPixels
-typedef void (APIENTRYP PFNGLDRAWPIXELSPROC)(GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels);
-GLAPI PFNGLDRAWPIXELSPROC glad_glDrawPixels;
-#define glDrawPixels glad_glDrawPixels
-typedef void (APIENTRYP PFNGLGETCLIPPLANEPROC)(GLenum plane, GLdouble *equation);
-GLAPI PFNGLGETCLIPPLANEPROC glad_glGetClipPlane;
-#define glGetClipPlane glad_glGetClipPlane
-typedef void (APIENTRYP PFNGLGETLIGHTFVPROC)(GLenum light, GLenum pname, GLfloat *params);
-GLAPI PFNGLGETLIGHTFVPROC glad_glGetLightfv;
-#define glGetLightfv glad_glGetLightfv
-typedef void (APIENTRYP PFNGLGETLIGHTIVPROC)(GLenum light, GLenum pname, GLint *params);
-GLAPI PFNGLGETLIGHTIVPROC glad_glGetLightiv;
-#define glGetLightiv glad_glGetLightiv
-typedef void (APIENTRYP PFNGLGETMAPDVPROC)(GLenum target, GLenum query, GLdouble *v);
-GLAPI PFNGLGETMAPDVPROC glad_glGetMapdv;
-#define glGetMapdv glad_glGetMapdv
-typedef void (APIENTRYP PFNGLGETMAPFVPROC)(GLenum target, GLenum query, GLfloat *v);
-GLAPI PFNGLGETMAPFVPROC glad_glGetMapfv;
-#define glGetMapfv glad_glGetMapfv
-typedef void (APIENTRYP PFNGLGETMAPIVPROC)(GLenum target, GLenum query, GLint *v);
-GLAPI PFNGLGETMAPIVPROC glad_glGetMapiv;
-#define glGetMapiv glad_glGetMapiv
-typedef void (APIENTRYP PFNGLGETMATERIALFVPROC)(GLenum face, GLenum pname, GLfloat *params);
-GLAPI PFNGLGETMATERIALFVPROC glad_glGetMaterialfv;
-#define glGetMaterialfv glad_glGetMaterialfv
-typedef void (APIENTRYP PFNGLGETMATERIALIVPROC)(GLenum face, GLenum pname, GLint *params);
-GLAPI PFNGLGETMATERIALIVPROC glad_glGetMaterialiv;
-#define glGetMaterialiv glad_glGetMaterialiv
-typedef void (APIENTRYP PFNGLGETPIXELMAPFVPROC)(GLenum map, GLfloat *values);
-GLAPI PFNGLGETPIXELMAPFVPROC glad_glGetPixelMapfv;
-#define glGetPixelMapfv glad_glGetPixelMapfv
-typedef void (APIENTRYP PFNGLGETPIXELMAPUIVPROC)(GLenum map, GLuint *values);
-GLAPI PFNGLGETPIXELMAPUIVPROC glad_glGetPixelMapuiv;
-#define glGetPixelMapuiv glad_glGetPixelMapuiv
-typedef void (APIENTRYP PFNGLGETPIXELMAPUSVPROC)(GLenum map, GLushort *values);
-GLAPI PFNGLGETPIXELMAPUSVPROC glad_glGetPixelMapusv;
-#define glGetPixelMapusv glad_glGetPixelMapusv
-typedef void (APIENTRYP PFNGLGETPOLYGONSTIPPLEPROC)(GLubyte *mask);
-GLAPI PFNGLGETPOLYGONSTIPPLEPROC glad_glGetPolygonStipple;
-#define glGetPolygonStipple glad_glGetPolygonStipple
-typedef void (APIENTRYP PFNGLGETTEXENVFVPROC)(GLenum target, GLenum pname, GLfloat *params);
-GLAPI PFNGLGETTEXENVFVPROC glad_glGetTexEnvfv;
-#define glGetTexEnvfv glad_glGetTexEnvfv
-typedef void (APIENTRYP PFNGLGETTEXENVIVPROC)(GLenum target, GLenum pname, GLint *params);
-GLAPI PFNGLGETTEXENVIVPROC glad_glGetTexEnviv;
-#define glGetTexEnviv glad_glGetTexEnviv
-typedef void (APIENTRYP PFNGLGETTEXGENDVPROC)(GLenum coord, GLenum pname, GLdouble *params);
-GLAPI PFNGLGETTEXGENDVPROC glad_glGetTexGendv;
-#define glGetTexGendv glad_glGetTexGendv
-typedef void (APIENTRYP PFNGLGETTEXGENFVPROC)(GLenum coord, GLenum pname, GLfloat *params);
-GLAPI PFNGLGETTEXGENFVPROC glad_glGetTexGenfv;
-#define glGetTexGenfv glad_glGetTexGenfv
-typedef void (APIENTRYP PFNGLGETTEXGENIVPROC)(GLenum coord, GLenum pname, GLint *params);
-GLAPI PFNGLGETTEXGENIVPROC glad_glGetTexGeniv;
-#define glGetTexGeniv glad_glGetTexGeniv
-typedef GLboolean (APIENTRYP PFNGLISLISTPROC)(GLuint list);
-GLAPI PFNGLISLISTPROC glad_glIsList;
-#define glIsList glad_glIsList
-typedef void (APIENTRYP PFNGLFRUSTUMPROC)(GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar);
-GLAPI PFNGLFRUSTUMPROC glad_glFrustum;
-#define glFrustum glad_glFrustum
-typedef void (APIENTRYP PFNGLLOADIDENTITYPROC)(void);
-GLAPI PFNGLLOADIDENTITYPROC glad_glLoadIdentity;
-#define glLoadIdentity glad_glLoadIdentity
-typedef void (APIENTRYP PFNGLLOADMATRIXFPROC)(const GLfloat *m);
-GLAPI PFNGLLOADMATRIXFPROC glad_glLoadMatrixf;
-#define glLoadMatrixf glad_glLoadMatrixf
-typedef void (APIENTRYP PFNGLLOADMATRIXDPROC)(const GLdouble *m);
-GLAPI PFNGLLOADMATRIXDPROC glad_glLoadMatrixd;
-#define glLoadMatrixd glad_glLoadMatrixd
-typedef void (APIENTRYP PFNGLMATRIXMODEPROC)(GLenum mode);
-GLAPI PFNGLMATRIXMODEPROC glad_glMatrixMode;
-#define glMatrixMode glad_glMatrixMode
-typedef void (APIENTRYP PFNGLMULTMATRIXFPROC)(const GLfloat *m);
-GLAPI PFNGLMULTMATRIXFPROC glad_glMultMatrixf;
-#define glMultMatrixf glad_glMultMatrixf
-typedef void (APIENTRYP PFNGLMULTMATRIXDPROC)(const GLdouble *m);
-GLAPI PFNGLMULTMATRIXDPROC glad_glMultMatrixd;
-#define glMultMatrixd glad_glMultMatrixd
-typedef void (APIENTRYP PFNGLORTHOPROC)(GLdouble left, GLdouble right, GLdouble bottom, GLdouble top, GLdouble zNear, GLdouble zFar);
-GLAPI PFNGLORTHOPROC glad_glOrtho;
-#define glOrtho glad_glOrtho
-typedef void (APIENTRYP PFNGLPOPMATRIXPROC)(void);
-GLAPI PFNGLPOPMATRIXPROC glad_glPopMatrix;
-#define glPopMatrix glad_glPopMatrix
-typedef void (APIENTRYP PFNGLPUSHMATRIXPROC)(void);
-GLAPI PFNGLPUSHMATRIXPROC glad_glPushMatrix;
-#define glPushMatrix glad_glPushMatrix
-typedef void (APIENTRYP PFNGLROTATEDPROC)(GLdouble angle, GLdouble x, GLdouble y, GLdouble z);
-GLAPI PFNGLROTATEDPROC glad_glRotated;
-#define glRotated glad_glRotated
-typedef void (APIENTRYP PFNGLROTATEFPROC)(GLfloat angle, GLfloat x, GLfloat y, GLfloat z);
-GLAPI PFNGLROTATEFPROC glad_glRotatef;
-#define glRotatef glad_glRotatef
-typedef void (APIENTRYP PFNGLSCALEDPROC)(GLdouble x, GLdouble y, GLdouble z);
-GLAPI PFNGLSCALEDPROC glad_glScaled;
-#define glScaled glad_glScaled
-typedef void (APIENTRYP PFNGLSCALEFPROC)(GLfloat x, GLfloat y, GLfloat z);
-GLAPI PFNGLSCALEFPROC glad_glScalef;
-#define glScalef glad_glScalef
-typedef void (APIENTRYP PFNGLTRANSLATEDPROC)(GLdouble x, GLdouble y, GLdouble z);
-GLAPI PFNGLTRANSLATEDPROC glad_glTranslated;
-#define glTranslated glad_glTranslated
-typedef void (APIENTRYP PFNGLTRANSLATEFPROC)(GLfloat x, GLfloat y, GLfloat z);
-GLAPI PFNGLTRANSLATEFPROC glad_glTranslatef;
-#define glTranslatef glad_glTranslatef
-#endif
-#ifndef GL_VERSION_1_1
-#define GL_VERSION_1_1 1
-GLAPI int GLAD_GL_VERSION_1_1;
-typedef void (APIENTRYP PFNGLDRAWARRAYSPROC)(GLenum mode, GLint first, GLsizei count);
-GLAPI PFNGLDRAWARRAYSPROC glad_glDrawArrays;
-#define glDrawArrays glad_glDrawArrays
-typedef void (APIENTRYP PFNGLDRAWELEMENTSPROC)(GLenum mode, GLsizei count, GLenum type, const void *indices);
-GLAPI PFNGLDRAWELEMENTSPROC glad_glDrawElements;
-#define glDrawElements glad_glDrawElements
-typedef void (APIENTRYP PFNGLGETPOINTERVPROC)(GLenum pname, void **params);
-GLAPI PFNGLGETPOINTERVPROC glad_glGetPointerv;
-#define glGetPointerv glad_glGetPointerv
-typedef void (APIENTRYP PFNGLPOLYGONOFFSETPROC)(GLfloat factor, GLfloat units);
-GLAPI PFNGLPOLYGONOFFSETPROC glad_glPolygonOffset;
-#define glPolygonOffset glad_glPolygonOffset
-typedef void (APIENTRYP PFNGLCOPYTEXIMAGE1DPROC)(GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLint border);
-GLAPI PFNGLCOPYTEXIMAGE1DPROC glad_glCopyTexImage1D;
-#define glCopyTexImage1D glad_glCopyTexImage1D
-typedef void (APIENTRYP PFNGLCOPYTEXIMAGE2DPROC)(GLenum target, GLint level, GLenum internalformat, GLint x, GLint y, GLsizei width, GLsizei height, GLint border);
-GLAPI PFNGLCOPYTEXIMAGE2DPROC glad_glCopyTexImage2D;
-#define glCopyTexImage2D glad_glCopyTexImage2D
-typedef void (APIENTRYP PFNGLCOPYTEXSUBIMAGE1DPROC)(GLenum target, GLint level, GLint xoffset, GLint x, GLint y, GLsizei width);
-GLAPI PFNGLCOPYTEXSUBIMAGE1DPROC glad_glCopyTexSubImage1D;
-#define glCopyTexSubImage1D glad_glCopyTexSubImage1D
-typedef void (APIENTRYP PFNGLCOPYTEXSUBIMAGE2DPROC)(GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint x, GLint y, GLsizei width, GLsizei height);
-GLAPI PFNGLCOPYTEXSUBIMAGE2DPROC glad_glCopyTexSubImage2D;
-#define glCopyTexSubImage2D glad_glCopyTexSubImage2D
-typedef void (APIENTRYP PFNGLTEXSUBIMAGE1DPROC)(GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLenum type, const void *pixels);
-GLAPI PFNGLTEXSUBIMAGE1DPROC glad_glTexSubImage1D;
-#define glTexSubImage1D glad_glTexSubImage1D
-typedef void (APIENTRYP PFNGLTEXSUBIMAGE2DPROC)(GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void *pixels);
-GLAPI PFNGLTEXSUBIMAGE2DPROC glad_glTexSubImage2D;
-#define glTexSubImage2D glad_glTexSubImage2D
-typedef void (APIENTRYP PFNGLBINDTEXTUREPROC)(GLenum target, GLuint texture);
-GLAPI PFNGLBINDTEXTUREPROC glad_glBindTexture;
-#define glBindTexture glad_glBindTexture
-typedef void (APIENTRYP PFNGLDELETETEXTURESPROC)(GLsizei n, const GLuint *textures);
-GLAPI PFNGLDELETETEXTURESPROC glad_glDeleteTextures;
-#define glDeleteTextures glad_glDeleteTextures
-typedef void (APIENTRYP PFNGLGENTEXTURESPROC)(GLsizei n, GLuint *textures);
-GLAPI PFNGLGENTEXTURESPROC glad_glGenTextures;
-#define glGenTextures glad_glGenTextures
-typedef GLboolean (APIENTRYP PFNGLISTEXTUREPROC)(GLuint texture);
-GLAPI PFNGLISTEXTUREPROC glad_glIsTexture;
-#define glIsTexture glad_glIsTexture
-typedef void (APIENTRYP PFNGLARRAYELEMENTPROC)(GLint i);
-GLAPI PFNGLARRAYELEMENTPROC glad_glArrayElement;
-#define glArrayElement glad_glArrayElement
-typedef void (APIENTRYP PFNGLCOLORPOINTERPROC)(GLint size, GLenum type, GLsizei stride, const void *pointer);
-GLAPI PFNGLCOLORPOINTERPROC glad_glColorPointer;
-#define glColorPointer glad_glColorPointer
-typedef void (APIENTRYP PFNGLDISABLECLIENTSTATEPROC)(GLenum array);
-GLAPI PFNGLDISABLECLIENTSTATEPROC glad_glDisableClientState;
-#define glDisableClientState glad_glDisableClientState
-typedef void (APIENTRYP PFNGLEDGEFLAGPOINTERPROC)(GLsizei stride, const void *pointer);
-GLAPI PFNGLEDGEFLAGPOINTERPROC glad_glEdgeFlagPointer;
-#define glEdgeFlagPointer glad_glEdgeFlagPointer
-typedef void (APIENTRYP PFNGLENABLECLIENTSTATEPROC)(GLenum array);
-GLAPI PFNGLENABLECLIENTSTATEPROC glad_glEnableClientState;
-#define glEnableClientState glad_glEnableClientState
-typedef void (APIENTRYP PFNGLINDEXPOINTERPROC)(GLenum type, GLsizei stride, const void *pointer);
-GLAPI PFNGLINDEXPOINTERPROC glad_glIndexPointer;
-#define glIndexPointer glad_glIndexPointer
-typedef void (APIENTRYP PFNGLINTERLEAVEDARRAYSPROC)(GLenum format, GLsizei stride, const void *pointer);
-GLAPI PFNGLINTERLEAVEDARRAYSPROC glad_glInterleavedArrays;
-#define glInterleavedArrays glad_glInterleavedArrays
-typedef void (APIENTRYP PFNGLNORMALPOINTERPROC)(GLenum type, GLsizei stride, const void *pointer);
-GLAPI PFNGLNORMALPOINTERPROC glad_glNormalPointer;
-#define glNormalPointer glad_glNormalPointer
-typedef void (APIENTRYP PFNGLTEXCOORDPOINTERPROC)(GLint size, GLenum type, GLsizei stride, const void *pointer);
-GLAPI PFNGLTEXCOORDPOINTERPROC glad_glTexCoordPointer;
-#define glTexCoordPointer glad_glTexCoordPointer
-typedef void (APIENTRYP PFNGLVERTEXPOINTERPROC)(GLint size, GLenum type, GLsizei stride, const void *pointer);
-GLAPI PFNGLVERTEXPOINTERPROC glad_glVertexPointer;
-#define glVertexPointer glad_glVertexPointer
-typedef GLboolean (APIENTRYP PFNGLARETEXTURESRESIDENTPROC)(GLsizei n, const GLuint *textures, GLboolean *residences);
-GLAPI PFNGLARETEXTURESRESIDENTPROC glad_glAreTexturesResident;
-#define glAreTexturesResident glad_glAreTexturesResident
-typedef void (APIENTRYP PFNGLPRIORITIZETEXTURESPROC)(GLsizei n, const GLuint *textures, const GLfloat *priorities);
-GLAPI PFNGLPRIORITIZETEXTURESPROC glad_glPrioritizeTextures;
-#define glPrioritizeTextures glad_glPrioritizeTextures
-typedef void (APIENTRYP PFNGLINDEXUBPROC)(GLubyte c);
-GLAPI PFNGLINDEXUBPROC glad_glIndexub;
-#define glIndexub glad_glIndexub
-typedef void (APIENTRYP PFNGLINDEXUBVPROC)(const GLubyte *c);
-GLAPI PFNGLINDEXUBVPROC glad_glIndexubv;
-#define glIndexubv glad_glIndexubv
-typedef void (APIENTRYP PFNGLPOPCLIENTATTRIBPROC)(void);
-GLAPI PFNGLPOPCLIENTATTRIBPROC glad_glPopClientAttrib;
-#define glPopClientAttrib glad_glPopClientAttrib
-typedef void (APIENTRYP PFNGLPUSHCLIENTATTRIBPROC)(GLbitfield mask);
-GLAPI PFNGLPUSHCLIENTATTRIBPROC glad_glPushClientAttrib;
-#define glPushClientAttrib glad_glPushClientAttrib
-#endif
-#ifndef GL_VERSION_1_2
-#define GL_VERSION_1_2 1
-GLAPI int GLAD_GL_VERSION_1_2;
-typedef void (APIENTRYP PFNGLDRAWRANGEELEMENTSPROC)(GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices);
-GLAPI PFNGLDRAWRANGEELEMENTSPROC glad_glDrawRangeElements;
-#define glDrawRangeElements glad_glDrawRangeElements
-typedef void (APIENTRYP PFNGLTEXIMAGE3DPROC)(GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void *pixels);
-GLAPI PFNGLTEXIMAGE3DPROC glad_glTexImage3D;
-#define glTexImage3D glad_glTexImage3D
-typedef void (APIENTRYP PFNGLTEXSUBIMAGE3DPROC)(GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void *pixels);
-GLAPI PFNGLTEXSUBIMAGE3DPROC glad_glTexSubImage3D;
-#define glTexSubImage3D glad_glTexSubImage3D
-typedef void (APIENTRYP PFNGLCOPYTEXSUBIMAGE3DPROC)(GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLint x, GLint y, GLsizei width, GLsizei height);
-GLAPI PFNGLCOPYTEXSUBIMAGE3DPROC glad_glCopyTexSubImage3D;
-#define glCopyTexSubImage3D glad_glCopyTexSubImage3D
-#endif
-#ifndef GL_VERSION_1_3
-#define GL_VERSION_1_3 1
-GLAPI int GLAD_GL_VERSION_1_3;
-typedef void (APIENTRYP PFNGLACTIVETEXTUREPROC)(GLenum texture);
-GLAPI PFNGLACTIVETEXTUREPROC glad_glActiveTexture;
-#define glActiveTexture glad_glActiveTexture
-typedef void (APIENTRYP PFNGLSAMPLECOVERAGEPROC)(GLfloat value, GLboolean invert);
-GLAPI PFNGLSAMPLECOVERAGEPROC glad_glSampleCoverage;
-#define glSampleCoverage glad_glSampleCoverage
-typedef void (APIENTRYP PFNGLCOMPRESSEDTEXIMAGE3DPROC)(GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void *data);
-GLAPI PFNGLCOMPRESSEDTEXIMAGE3DPROC glad_glCompressedTexImage3D;
-#define glCompressedTexImage3D glad_glCompressedTexImage3D
-typedef void (APIENTRYP PFNGLCOMPRESSEDTEXIMAGE2DPROC)(GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void *data);
-GLAPI PFNGLCOMPRESSEDTEXIMAGE2DPROC glad_glCompressedTexImage2D;
-#define glCompressedTexImage2D glad_glCompressedTexImage2D
-typedef void (APIENTRYP PFNGLCOMPRESSEDTEXIMAGE1DPROC)(GLenum target, GLint level, GLenum internalformat, GLsizei width, GLint border, GLsizei imageSize, const void *data);
-GLAPI PFNGLCOMPRESSEDTEXIMAGE1DPROC glad_glCompressedTexImage1D;
-#define glCompressedTexImage1D glad_glCompressedTexImage1D
-typedef void (APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE3DPROC)(GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data);
-GLAPI PFNGLCOMPRESSEDTEXSUBIMAGE3DPROC glad_glCompressedTexSubImage3D;
-#define glCompressedTexSubImage3D glad_glCompressedTexSubImage3D
-typedef void (APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE2DPROC)(GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data);
-GLAPI PFNGLCOMPRESSEDTEXSUBIMAGE2DPROC glad_glCompressedTexSubImage2D;
-#define glCompressedTexSubImage2D glad_glCompressedTexSubImage2D
-typedef void (APIENTRYP PFNGLCOMPRESSEDTEXSUBIMAGE1DPROC)(GLenum target, GLint level, GLint xoffset, GLsizei width, GLenum format, GLsizei imageSize, const void *data);
-GLAPI PFNGLCOMPRESSEDTEXSUBIMAGE1DPROC glad_glCompressedTexSubImage1D;
-#define glCompressedTexSubImage1D glad_glCompressedTexSubImage1D
-typedef void (APIENTRYP PFNGLGETCOMPRESSEDTEXIMAGEPROC)(GLenum target, GLint level, void *img);
-GLAPI PFNGLGETCOMPRESSEDTEXIMAGEPROC glad_glGetCompressedTexImage;
-#define glGetCompressedTexImage glad_glGetCompressedTexImage
-typedef void (APIENTRYP PFNGLCLIENTACTIVETEXTUREPROC)(GLenum texture);
-GLAPI PFNGLCLIENTACTIVETEXTUREPROC glad_glClientActiveTexture;
-#define glClientActiveTexture glad_glClientActiveTexture
-typedef void (APIENTRYP PFNGLMULTITEXCOORD1DPROC)(GLenum target, GLdouble s);
-GLAPI PFNGLMULTITEXCOORD1DPROC glad_glMultiTexCoord1d;
-#define glMultiTexCoord1d glad_glMultiTexCoord1d
-typedef void (APIENTRYP PFNGLMULTITEXCOORD1DVPROC)(GLenum target, const GLdouble *v);
-GLAPI PFNGLMULTITEXCOORD1DVPROC glad_glMultiTexCoord1dv;
-#define glMultiTexCoord1dv glad_glMultiTexCoord1dv
-typedef void (APIENTRYP PFNGLMULTITEXCOORD1FPROC)(GLenum target, GLfloat s);
-GLAPI PFNGLMULTITEXCOORD1FPROC glad_glMultiTexCoord1f;
-#define glMultiTexCoord1f glad_glMultiTexCoord1f
-typedef void (APIENTRYP PFNGLMULTITEXCOORD1FVPROC)(GLenum target, const GLfloat *v);
-GLAPI PFNGLMULTITEXCOORD1FVPROC glad_glMultiTexCoord1fv;
-#define glMultiTexCoord1fv glad_glMultiTexCoord1fv
-typedef void (APIENTRYP PFNGLMULTITEXCOORD1IPROC)(GLenum target, GLint s);
-GLAPI PFNGLMULTITEXCOORD1IPROC glad_glMultiTexCoord1i;
-#define glMultiTexCoord1i glad_glMultiTexCoord1i
-typedef void (APIENTRYP PFNGLMULTITEXCOORD1IVPROC)(GLenum target, const GLint *v);
-GLAPI PFNGLMULTITEXCOORD1IVPROC glad_glMultiTexCoord1iv;
-#define glMultiTexCoord1iv glad_glMultiTexCoord1iv
-typedef void (APIENTRYP PFNGLMULTITEXCOORD1SPROC)(GLenum target, GLshort s);
-GLAPI PFNGLMULTITEXCOORD1SPROC glad_glMultiTexCoord1s;
-#define glMultiTexCoord1s glad_glMultiTexCoord1s
-typedef void (APIENTRYP PFNGLMULTITEXCOORD1SVPROC)(GLenum target, const GLshort *v);
-GLAPI PFNGLMULTITEXCOORD1SVPROC glad_glMultiTexCoord1sv;
-#define glMultiTexCoord1sv glad_glMultiTexCoord1sv
-typedef void (APIENTRYP PFNGLMULTITEXCOORD2DPROC)(GLenum target, GLdouble s, GLdouble t);
-GLAPI PFNGLMULTITEXCOORD2DPROC glad_glMultiTexCoord2d;
-#define glMultiTexCoord2d glad_glMultiTexCoord2d
-typedef void (APIENTRYP PFNGLMULTITEXCOORD2DVPROC)(GLenum target, const GLdouble *v);
-GLAPI PFNGLMULTITEXCOORD2DVPROC glad_glMultiTexCoord2dv;
-#define glMultiTexCoord2dv glad_glMultiTexCoord2dv
-typedef void (APIENTRYP PFNGLMULTITEXCOORD2FPROC)(GLenum target, GLfloat s, GLfloat t);
-GLAPI PFNGLMULTITEXCOORD2FPROC glad_glMultiTexCoord2f;
-#define glMultiTexCoord2f glad_glMultiTexCoord2f
-typedef void (APIENTRYP PFNGLMULTITEXCOORD2FVPROC)(GLenum target, const GLfloat *v);
-GLAPI PFNGLMULTITEXCOORD2FVPROC glad_glMultiTexCoord2fv;
-#define glMultiTexCoord2fv glad_glMultiTexCoord2fv
-typedef void (APIENTRYP PFNGLMULTITEXCOORD2IPROC)(GLenum target, GLint s, GLint t);
-GLAPI PFNGLMULTITEXCOORD2IPROC glad_glMultiTexCoord2i;
-#define glMultiTexCoord2i glad_glMultiTexCoord2i
-typedef void (APIENTRYP PFNGLMULTITEXCOORD2IVPROC)(GLenum target, const GLint *v);
-GLAPI PFNGLMULTITEXCOORD2IVPROC glad_glMultiTexCoord2iv;
-#define glMultiTexCoord2iv glad_glMultiTexCoord2iv
-typedef void (APIENTRYP PFNGLMULTITEXCOORD2SPROC)(GLenum target, GLshort s, GLshort t);
-GLAPI PFNGLMULTITEXCOORD2SPROC glad_glMultiTexCoord2s;
-#define glMultiTexCoord2s glad_glMultiTexCoord2s
-typedef void (APIENTRYP PFNGLMULTITEXCOORD2SVPROC)(GLenum target, const GLshort *v);
-GLAPI PFNGLMULTITEXCOORD2SVPROC glad_glMultiTexCoord2sv;
-#define glMultiTexCoord2sv glad_glMultiTexCoord2sv
-typedef void (APIENTRYP PFNGLMULTITEXCOORD3DPROC)(GLenum target, GLdouble s, GLdouble t, GLdouble r);
-GLAPI PFNGLMULTITEXCOORD3DPROC glad_glMultiTexCoord3d;
-#define glMultiTexCoord3d glad_glMultiTexCoord3d
-typedef void (APIENTRYP PFNGLMULTITEXCOORD3DVPROC)(GLenum target, const GLdouble *v);
-GLAPI PFNGLMULTITEXCOORD3DVPROC glad_glMultiTexCoord3dv;
-#define glMultiTexCoord3dv glad_glMultiTexCoord3dv
-typedef void (APIENTRYP PFNGLMULTITEXCOORD3FPROC)(GLenum target, GLfloat s, GLfloat t, GLfloat r);
-GLAPI PFNGLMULTITEXCOORD3FPROC glad_glMultiTexCoord3f;
-#define glMultiTexCoord3f glad_glMultiTexCoord3f
-typedef void (APIENTRYP PFNGLMULTITEXCOORD3FVPROC)(GLenum target, const GLfloat *v);
-GLAPI PFNGLMULTITEXCOORD3FVPROC glad_glMultiTexCoord3fv;
-#define glMultiTexCoord3fv glad_glMultiTexCoord3fv
-typedef void (APIENTRYP PFNGLMULTITEXCOORD3IPROC)(GLenum target, GLint s, GLint t, GLint r);
-GLAPI PFNGLMULTITEXCOORD3IPROC glad_glMultiTexCoord3i;
-#define glMultiTexCoord3i glad_glMultiTexCoord3i
-typedef void (APIENTRYP PFNGLMULTITEXCOORD3IVPROC)(GLenum target, const GLint *v);
-GLAPI PFNGLMULTITEXCOORD3IVPROC glad_glMultiTexCoord3iv;
-#define glMultiTexCoord3iv glad_glMultiTexCoord3iv
-typedef void (APIENTRYP PFNGLMULTITEXCOORD3SPROC)(GLenum target, GLshort s, GLshort t, GLshort r);
-GLAPI PFNGLMULTITEXCOORD3SPROC glad_glMultiTexCoord3s;
-#define glMultiTexCoord3s glad_glMultiTexCoord3s
-typedef void (APIENTRYP PFNGLMULTITEXCOORD3SVPROC)(GLenum target, const GLshort *v);
-GLAPI PFNGLMULTITEXCOORD3SVPROC glad_glMultiTexCoord3sv;
-#define glMultiTexCoord3sv glad_glMultiTexCoord3sv
-typedef void (APIENTRYP PFNGLMULTITEXCOORD4DPROC)(GLenum target, GLdouble s, GLdouble t, GLdouble r, GLdouble q);
-GLAPI PFNGLMULTITEXCOORD4DPROC glad_glMultiTexCoord4d;
-#define glMultiTexCoord4d glad_glMultiTexCoord4d
-typedef void (APIENTRYP PFNGLMULTITEXCOORD4DVPROC)(GLenum target, const GLdouble *v);
-GLAPI PFNGLMULTITEXCOORD4DVPROC glad_glMultiTexCoord4dv;
-#define glMultiTexCoord4dv glad_glMultiTexCoord4dv
-typedef void (APIENTRYP PFNGLMULTITEXCOORD4FPROC)(GLenum target, GLfloat s, GLfloat t, GLfloat r, GLfloat q);
-GLAPI PFNGLMULTITEXCOORD4FPROC glad_glMultiTexCoord4f;
-#define glMultiTexCoord4f glad_glMultiTexCoord4f
-typedef void (APIENTRYP PFNGLMULTITEXCOORD4FVPROC)(GLenum target, const GLfloat *v);
-GLAPI PFNGLMULTITEXCOORD4FVPROC glad_glMultiTexCoord4fv;
-#define glMultiTexCoord4fv glad_glMultiTexCoord4fv
-typedef void (APIENTRYP PFNGLMULTITEXCOORD4IPROC)(GLenum target, GLint s, GLint t, GLint r, GLint q);
-GLAPI PFNGLMULTITEXCOORD4IPROC glad_glMultiTexCoord4i;
-#define glMultiTexCoord4i glad_glMultiTexCoord4i
-typedef void (APIENTRYP PFNGLMULTITEXCOORD4IVPROC)(GLenum target, const GLint *v);
-GLAPI PFNGLMULTITEXCOORD4IVPROC glad_glMultiTexCoord4iv;
-#define glMultiTexCoord4iv glad_glMultiTexCoord4iv
-typedef void (APIENTRYP PFNGLMULTITEXCOORD4SPROC)(GLenum target, GLshort s, GLshort t, GLshort r, GLshort q);
-GLAPI PFNGLMULTITEXCOORD4SPROC glad_glMultiTexCoord4s;
-#define glMultiTexCoord4s glad_glMultiTexCoord4s
-typedef void (APIENTRYP PFNGLMULTITEXCOORD4SVPROC)(GLenum target, const GLshort *v);
-GLAPI PFNGLMULTITEXCOORD4SVPROC glad_glMultiTexCoord4sv;
-#define glMultiTexCoord4sv glad_glMultiTexCoord4sv
-typedef void (APIENTRYP PFNGLLOADTRANSPOSEMATRIXFPROC)(const GLfloat *m);
-GLAPI PFNGLLOADTRANSPOSEMATRIXFPROC glad_glLoadTransposeMatrixf;
-#define glLoadTransposeMatrixf glad_glLoadTransposeMatrixf
-typedef void (APIENTRYP PFNGLLOADTRANSPOSEMATRIXDPROC)(const GLdouble *m);
-GLAPI PFNGLLOADTRANSPOSEMATRIXDPROC glad_glLoadTransposeMatrixd;
-#define glLoadTransposeMatrixd glad_glLoadTransposeMatrixd
-typedef void (APIENTRYP PFNGLMULTTRANSPOSEMATRIXFPROC)(const GLfloat *m);
-GLAPI PFNGLMULTTRANSPOSEMATRIXFPROC glad_glMultTransposeMatrixf;
-#define glMultTransposeMatrixf glad_glMultTransposeMatrixf
-typedef void (APIENTRYP PFNGLMULTTRANSPOSEMATRIXDPROC)(const GLdouble *m);
-GLAPI PFNGLMULTTRANSPOSEMATRIXDPROC glad_glMultTransposeMatrixd;
-#define glMultTransposeMatrixd glad_glMultTransposeMatrixd
-#endif
-#ifndef GL_VERSION_1_4
-#define GL_VERSION_1_4 1
-GLAPI int GLAD_GL_VERSION_1_4;
-typedef void (APIENTRYP PFNGLBLENDFUNCSEPARATEPROC)(GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha);
-GLAPI PFNGLBLENDFUNCSEPARATEPROC glad_glBlendFuncSeparate;
-#define glBlendFuncSeparate glad_glBlendFuncSeparate
-typedef void (APIENTRYP PFNGLMULTIDRAWARRAYSPROC)(GLenum mode, const GLint *first, const GLsizei *count, GLsizei drawcount);
-GLAPI PFNGLMULTIDRAWARRAYSPROC glad_glMultiDrawArrays;
-#define glMultiDrawArrays glad_glMultiDrawArrays
-typedef void (APIENTRYP PFNGLMULTIDRAWELEMENTSPROC)(GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei drawcount);
-GLAPI PFNGLMULTIDRAWELEMENTSPROC glad_glMultiDrawElements;
-#define glMultiDrawElements glad_glMultiDrawElements
-typedef void (APIENTRYP PFNGLPOINTPARAMETERFPROC)(GLenum pname, GLfloat param);
-GLAPI PFNGLPOINTPARAMETERFPROC glad_glPointParameterf;
-#define glPointParameterf glad_glPointParameterf
-typedef void (APIENTRYP PFNGLPOINTPARAMETERFVPROC)(GLenum pname, const GLfloat *params);
-GLAPI PFNGLPOINTPARAMETERFVPROC glad_glPointParameterfv;
-#define glPointParameterfv glad_glPointParameterfv
-typedef void (APIENTRYP PFNGLPOINTPARAMETERIPROC)(GLenum pname, GLint param);
-GLAPI PFNGLPOINTPARAMETERIPROC glad_glPointParameteri;
-#define glPointParameteri glad_glPointParameteri
-typedef void (APIENTRYP PFNGLPOINTPARAMETERIVPROC)(GLenum pname, const GLint *params);
-GLAPI PFNGLPOINTPARAMETERIVPROC glad_glPointParameteriv;
-#define glPointParameteriv glad_glPointParameteriv
-typedef void (APIENTRYP PFNGLFOGCOORDFPROC)(GLfloat coord);
-GLAPI PFNGLFOGCOORDFPROC glad_glFogCoordf;
-#define glFogCoordf glad_glFogCoordf
-typedef void (APIENTRYP PFNGLFOGCOORDFVPROC)(const GLfloat *coord);
-GLAPI PFNGLFOGCOORDFVPROC glad_glFogCoordfv;
-#define glFogCoordfv glad_glFogCoordfv
-typedef void (APIENTRYP PFNGLFOGCOORDDPROC)(GLdouble coord);
-GLAPI PFNGLFOGCOORDDPROC glad_glFogCoordd;
-#define glFogCoordd glad_glFogCoordd
-typedef void (APIENTRYP PFNGLFOGCOORDDVPROC)(const GLdouble *coord);
-GLAPI PFNGLFOGCOORDDVPROC glad_glFogCoorddv;
-#define glFogCoorddv glad_glFogCoorddv
-typedef void (APIENTRYP PFNGLFOGCOORDPOINTERPROC)(GLenum type, GLsizei stride, const void *pointer);
-GLAPI PFNGLFOGCOORDPOINTERPROC glad_glFogCoordPointer;
-#define glFogCoordPointer glad_glFogCoordPointer
-typedef void (APIENTRYP PFNGLSECONDARYCOLOR3BPROC)(GLbyte red, GLbyte green, GLbyte blue);
-GLAPI PFNGLSECONDARYCOLOR3BPROC glad_glSecondaryColor3b;
-#define glSecondaryColor3b glad_glSecondaryColor3b
-typedef void (APIENTRYP PFNGLSECONDARYCOLOR3BVPROC)(const GLbyte *v);
-GLAPI PFNGLSECONDARYCOLOR3BVPROC glad_glSecondaryColor3bv;
-#define glSecondaryColor3bv glad_glSecondaryColor3bv
-typedef void (APIENTRYP PFNGLSECONDARYCOLOR3DPROC)(GLdouble red, GLdouble green, GLdouble blue);
-GLAPI PFNGLSECONDARYCOLOR3DPROC glad_glSecondaryColor3d;
-#define glSecondaryColor3d glad_glSecondaryColor3d
-typedef void (APIENTRYP PFNGLSECONDARYCOLOR3DVPROC)(const GLdouble *v);
-GLAPI PFNGLSECONDARYCOLOR3DVPROC glad_glSecondaryColor3dv;
-#define glSecondaryColor3dv glad_glSecondaryColor3dv
-typedef void (APIENTRYP PFNGLSECONDARYCOLOR3FPROC)(GLfloat red, GLfloat green, GLfloat blue);
-GLAPI PFNGLSECONDARYCOLOR3FPROC glad_glSecondaryColor3f;
-#define glSecondaryColor3f glad_glSecondaryColor3f
-typedef void (APIENTRYP PFNGLSECONDARYCOLOR3FVPROC)(const GLfloat *v);
-GLAPI PFNGLSECONDARYCOLOR3FVPROC glad_glSecondaryColor3fv;
-#define glSecondaryColor3fv glad_glSecondaryColor3fv
-typedef void (APIENTRYP PFNGLSECONDARYCOLOR3IPROC)(GLint red, GLint green, GLint blue);
-GLAPI PFNGLSECONDARYCOLOR3IPROC glad_glSecondaryColor3i;
-#define glSecondaryColor3i glad_glSecondaryColor3i
-typedef void (APIENTRYP PFNGLSECONDARYCOLOR3IVPROC)(const GLint *v);
-GLAPI PFNGLSECONDARYCOLOR3IVPROC glad_glSecondaryColor3iv;
-#define glSecondaryColor3iv glad_glSecondaryColor3iv
-typedef void (APIENTRYP PFNGLSECONDARYCOLOR3SPROC)(GLshort red, GLshort green, GLshort blue);
-GLAPI PFNGLSECONDARYCOLOR3SPROC glad_glSecondaryColor3s;
-#define glSecondaryColor3s glad_glSecondaryColor3s
-typedef void (APIENTRYP PFNGLSECONDARYCOLOR3SVPROC)(const GLshort *v);
-GLAPI PFNGLSECONDARYCOLOR3SVPROC glad_glSecondaryColor3sv;
-#define glSecondaryColor3sv glad_glSecondaryColor3sv
-typedef void (APIENTRYP PFNGLSECONDARYCOLOR3UBPROC)(GLubyte red, GLubyte green, GLubyte blue);
-GLAPI PFNGLSECONDARYCOLOR3UBPROC glad_glSecondaryColor3ub;
-#define glSecondaryColor3ub glad_glSecondaryColor3ub
-typedef void (APIENTRYP PFNGLSECONDARYCOLOR3UBVPROC)(const GLubyte *v);
-GLAPI PFNGLSECONDARYCOLOR3UBVPROC glad_glSecondaryColor3ubv;
-#define glSecondaryColor3ubv glad_glSecondaryColor3ubv
-typedef void (APIENTRYP PFNGLSECONDARYCOLOR3UIPROC)(GLuint red, GLuint green, GLuint blue);
-GLAPI PFNGLSECONDARYCOLOR3UIPROC glad_glSecondaryColor3ui;
-#define glSecondaryColor3ui glad_glSecondaryColor3ui
-typedef void (APIENTRYP PFNGLSECONDARYCOLOR3UIVPROC)(const GLuint *v);
-GLAPI PFNGLSECONDARYCOLOR3UIVPROC glad_glSecondaryColor3uiv;
-#define glSecondaryColor3uiv glad_glSecondaryColor3uiv
-typedef void (APIENTRYP PFNGLSECONDARYCOLOR3USPROC)(GLushort red, GLushort green, GLushort blue);
-GLAPI PFNGLSECONDARYCOLOR3USPROC glad_glSecondaryColor3us;
-#define glSecondaryColor3us glad_glSecondaryColor3us
-typedef void (APIENTRYP PFNGLSECONDARYCOLOR3USVPROC)(const GLushort *v);
-GLAPI PFNGLSECONDARYCOLOR3USVPROC glad_glSecondaryColor3usv;
-#define glSecondaryColor3usv glad_glSecondaryColor3usv
-typedef void (APIENTRYP PFNGLSECONDARYCOLORPOINTERPROC)(GLint size, GLenum type, GLsizei stride, const void *pointer);
-GLAPI PFNGLSECONDARYCOLORPOINTERPROC glad_glSecondaryColorPointer;
-#define glSecondaryColorPointer glad_glSecondaryColorPointer
-typedef void (APIENTRYP PFNGLWINDOWPOS2DPROC)(GLdouble x, GLdouble y);
-GLAPI PFNGLWINDOWPOS2DPROC glad_glWindowPos2d;
-#define glWindowPos2d glad_glWindowPos2d
-typedef void (APIENTRYP PFNGLWINDOWPOS2DVPROC)(const GLdouble *v);
-GLAPI PFNGLWINDOWPOS2DVPROC glad_glWindowPos2dv;
-#define glWindowPos2dv glad_glWindowPos2dv
-typedef void (APIENTRYP PFNGLWINDOWPOS2FPROC)(GLfloat x, GLfloat y);
-GLAPI PFNGLWINDOWPOS2FPROC glad_glWindowPos2f;
-#define glWindowPos2f glad_glWindowPos2f
-typedef void (APIENTRYP PFNGLWINDOWPOS2FVPROC)(const GLfloat *v);
-GLAPI PFNGLWINDOWPOS2FVPROC glad_glWindowPos2fv;
-#define glWindowPos2fv glad_glWindowPos2fv
-typedef void (APIENTRYP PFNGLWINDOWPOS2IPROC)(GLint x, GLint y);
-GLAPI PFNGLWINDOWPOS2IPROC glad_glWindowPos2i;
-#define glWindowPos2i glad_glWindowPos2i
-typedef void (APIENTRYP PFNGLWINDOWPOS2IVPROC)(const GLint *v);
-GLAPI PFNGLWINDOWPOS2IVPROC glad_glWindowPos2iv;
-#define glWindowPos2iv glad_glWindowPos2iv
-typedef void (APIENTRYP PFNGLWINDOWPOS2SPROC)(GLshort x, GLshort y);
-GLAPI PFNGLWINDOWPOS2SPROC glad_glWindowPos2s;
-#define glWindowPos2s glad_glWindowPos2s
-typedef void (APIENTRYP PFNGLWINDOWPOS2SVPROC)(const GLshort *v);
-GLAPI PFNGLWINDOWPOS2SVPROC glad_glWindowPos2sv;
-#define glWindowPos2sv glad_glWindowPos2sv
-typedef void (APIENTRYP PFNGLWINDOWPOS3DPROC)(GLdouble x, GLdouble y, GLdouble z);
-GLAPI PFNGLWINDOWPOS3DPROC glad_glWindowPos3d;
-#define glWindowPos3d glad_glWindowPos3d
-typedef void (APIENTRYP PFNGLWINDOWPOS3DVPROC)(const GLdouble *v);
-GLAPI PFNGLWINDOWPOS3DVPROC glad_glWindowPos3dv;
-#define glWindowPos3dv glad_glWindowPos3dv
-typedef void (APIENTRYP PFNGLWINDOWPOS3FPROC)(GLfloat x, GLfloat y, GLfloat z);
-GLAPI PFNGLWINDOWPOS3FPROC glad_glWindowPos3f;
-#define glWindowPos3f glad_glWindowPos3f
-typedef void (APIENTRYP PFNGLWINDOWPOS3FVPROC)(const GLfloat *v);
-GLAPI PFNGLWINDOWPOS3FVPROC glad_glWindowPos3fv;
-#define glWindowPos3fv glad_glWindowPos3fv
-typedef void (APIENTRYP PFNGLWINDOWPOS3IPROC)(GLint x, GLint y, GLint z);
-GLAPI PFNGLWINDOWPOS3IPROC glad_glWindowPos3i;
-#define glWindowPos3i glad_glWindowPos3i
-typedef void (APIENTRYP PFNGLWINDOWPOS3IVPROC)(const GLint *v);
-GLAPI PFNGLWINDOWPOS3IVPROC glad_glWindowPos3iv;
-#define glWindowPos3iv glad_glWindowPos3iv
-typedef void (APIENTRYP PFNGLWINDOWPOS3SPROC)(GLshort x, GLshort y, GLshort z);
-GLAPI PFNGLWINDOWPOS3SPROC glad_glWindowPos3s;
-#define glWindowPos3s glad_glWindowPos3s
-typedef void (APIENTRYP PFNGLWINDOWPOS3SVPROC)(const GLshort *v);
-GLAPI PFNGLWINDOWPOS3SVPROC glad_glWindowPos3sv;
-#define glWindowPos3sv glad_glWindowPos3sv
-typedef void (APIENTRYP PFNGLBLENDCOLORPROC)(GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha);
-GLAPI PFNGLBLENDCOLORPROC glad_glBlendColor;
-#define glBlendColor glad_glBlendColor
-typedef void (APIENTRYP PFNGLBLENDEQUATIONPROC)(GLenum mode);
-GLAPI PFNGLBLENDEQUATIONPROC glad_glBlendEquation;
-#define glBlendEquation glad_glBlendEquation
-#endif
-#ifndef GL_VERSION_1_5
-#define GL_VERSION_1_5 1
-GLAPI int GLAD_GL_VERSION_1_5;
-typedef void (APIENTRYP PFNGLGENQUERIESPROC)(GLsizei n, GLuint *ids);
-GLAPI PFNGLGENQUERIESPROC glad_glGenQueries;
-#define glGenQueries glad_glGenQueries
-typedef void (APIENTRYP PFNGLDELETEQUERIESPROC)(GLsizei n, const GLuint *ids);
-GLAPI PFNGLDELETEQUERIESPROC glad_glDeleteQueries;
-#define glDeleteQueries glad_glDeleteQueries
-typedef GLboolean (APIENTRYP PFNGLISQUERYPROC)(GLuint id);
-GLAPI PFNGLISQUERYPROC glad_glIsQuery;
-#define glIsQuery glad_glIsQuery
-typedef void (APIENTRYP PFNGLBEGINQUERYPROC)(GLenum target, GLuint id);
-GLAPI PFNGLBEGINQUERYPROC glad_glBeginQuery;
-#define glBeginQuery glad_glBeginQuery
-typedef void (APIENTRYP PFNGLENDQUERYPROC)(GLenum target);
-GLAPI PFNGLENDQUERYPROC glad_glEndQuery;
-#define glEndQuery glad_glEndQuery
-typedef void (APIENTRYP PFNGLGETQUERYIVPROC)(GLenum target, GLenum pname, GLint *params);
-GLAPI PFNGLGETQUERYIVPROC glad_glGetQueryiv;
-#define glGetQueryiv glad_glGetQueryiv
-typedef void (APIENTRYP PFNGLGETQUERYOBJECTIVPROC)(GLuint id, GLenum pname, GLint *params);
-GLAPI PFNGLGETQUERYOBJECTIVPROC glad_glGetQueryObjectiv;
-#define glGetQueryObjectiv glad_glGetQueryObjectiv
-typedef void (APIENTRYP PFNGLGETQUERYOBJECTUIVPROC)(GLuint id, GLenum pname, GLuint *params);
-GLAPI PFNGLGETQUERYOBJECTUIVPROC glad_glGetQueryObjectuiv;
-#define glGetQueryObjectuiv glad_glGetQueryObjectuiv
-typedef void (APIENTRYP PFNGLBINDBUFFERPROC)(GLenum target, GLuint buffer);
-GLAPI PFNGLBINDBUFFERPROC glad_glBindBuffer;
-#define glBindBuffer glad_glBindBuffer
-typedef void (APIENTRYP PFNGLDELETEBUFFERSPROC)(GLsizei n, const GLuint *buffers);
-GLAPI PFNGLDELETEBUFFERSPROC glad_glDeleteBuffers;
-#define glDeleteBuffers glad_glDeleteBuffers
-typedef void (APIENTRYP PFNGLGENBUFFERSPROC)(GLsizei n, GLuint *buffers);
-GLAPI PFNGLGENBUFFERSPROC glad_glGenBuffers;
-#define glGenBuffers glad_glGenBuffers
-typedef GLboolean (APIENTRYP PFNGLISBUFFERPROC)(GLuint buffer);
-GLAPI PFNGLISBUFFERPROC glad_glIsBuffer;
-#define glIsBuffer glad_glIsBuffer
-typedef void (APIENTRYP PFNGLBUFFERDATAPROC)(GLenum target, GLsizeiptr size, const void *data, GLenum usage);
-GLAPI PFNGLBUFFERDATAPROC glad_glBufferData;
-#define glBufferData glad_glBufferData
-typedef void (APIENTRYP PFNGLBUFFERSUBDATAPROC)(GLenum target, GLintptr offset, GLsizeiptr size, const void *data);
-GLAPI PFNGLBUFFERSUBDATAPROC glad_glBufferSubData;
-#define glBufferSubData glad_glBufferSubData
-typedef void (APIENTRYP PFNGLGETBUFFERSUBDATAPROC)(GLenum target, GLintptr offset, GLsizeiptr size, void *data);
-GLAPI PFNGLGETBUFFERSUBDATAPROC glad_glGetBufferSubData;
-#define glGetBufferSubData glad_glGetBufferSubData
-typedef void * (APIENTRYP PFNGLMAPBUFFERPROC)(GLenum target, GLenum access);
-GLAPI PFNGLMAPBUFFERPROC glad_glMapBuffer;
-#define glMapBuffer glad_glMapBuffer
-typedef GLboolean (APIENTRYP PFNGLUNMAPBUFFERPROC)(GLenum target);
-GLAPI PFNGLUNMAPBUFFERPROC glad_glUnmapBuffer;
-#define glUnmapBuffer glad_glUnmapBuffer
-typedef void (APIENTRYP PFNGLGETBUFFERPARAMETERIVPROC)(GLenum target, GLenum pname, GLint *params);
-GLAPI PFNGLGETBUFFERPARAMETERIVPROC glad_glGetBufferParameteriv;
-#define glGetBufferParameteriv glad_glGetBufferParameteriv
-typedef void (APIENTRYP PFNGLGETBUFFERPOINTERVPROC)(GLenum target, GLenum pname, void **params);
-GLAPI PFNGLGETBUFFERPOINTERVPROC glad_glGetBufferPointerv;
-#define glGetBufferPointerv glad_glGetBufferPointerv
-#endif
-#ifndef GL_VERSION_2_0
-#define GL_VERSION_2_0 1
-GLAPI int GLAD_GL_VERSION_2_0;
-typedef void (APIENTRYP PFNGLBLENDEQUATIONSEPARATEPROC)(GLenum modeRGB, GLenum modeAlpha);
-GLAPI PFNGLBLENDEQUATIONSEPARATEPROC glad_glBlendEquationSeparate;
-#define glBlendEquationSeparate glad_glBlendEquationSeparate
-typedef void (APIENTRYP PFNGLDRAWBUFFERSPROC)(GLsizei n, const GLenum *bufs);
-GLAPI PFNGLDRAWBUFFERSPROC glad_glDrawBuffers;
-#define glDrawBuffers glad_glDrawBuffers
-typedef void (APIENTRYP PFNGLSTENCILOPSEPARATEPROC)(GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass);
-GLAPI PFNGLSTENCILOPSEPARATEPROC glad_glStencilOpSeparate;
-#define glStencilOpSeparate glad_glStencilOpSeparate
-typedef void (APIENTRYP PFNGLSTENCILFUNCSEPARATEPROC)(GLenum face, GLenum func, GLint ref, GLuint mask);
-GLAPI PFNGLSTENCILFUNCSEPARATEPROC glad_glStencilFuncSeparate;
-#define glStencilFuncSeparate glad_glStencilFuncSeparate
-typedef void (APIENTRYP PFNGLSTENCILMASKSEPARATEPROC)(GLenum face, GLuint mask);
-GLAPI PFNGLSTENCILMASKSEPARATEPROC glad_glStencilMaskSeparate;
-#define glStencilMaskSeparate glad_glStencilMaskSeparate
-typedef void (APIENTRYP PFNGLATTACHSHADERPROC)(GLuint program, GLuint shader);
-GLAPI PFNGLATTACHSHADERPROC glad_glAttachShader;
-#define glAttachShader glad_glAttachShader
-typedef void (APIENTRYP PFNGLBINDATTRIBLOCATIONPROC)(GLuint program, GLuint index, const GLchar *name);
-GLAPI PFNGLBINDATTRIBLOCATIONPROC glad_glBindAttribLocation;
-#define glBindAttribLocation glad_glBindAttribLocation
-typedef void (APIENTRYP PFNGLCOMPILESHADERPROC)(GLuint shader);
-GLAPI PFNGLCOMPILESHADERPROC glad_glCompileShader;
-#define glCompileShader glad_glCompileShader
-typedef GLuint (APIENTRYP PFNGLCREATEPROGRAMPROC)(void);
-GLAPI PFNGLCREATEPROGRAMPROC glad_glCreateProgram;
-#define glCreateProgram glad_glCreateProgram
-typedef GLuint (APIENTRYP PFNGLCREATESHADERPROC)(GLenum type);
-GLAPI PFNGLCREATESHADERPROC glad_glCreateShader;
-#define glCreateShader glad_glCreateShader
-typedef void (APIENTRYP PFNGLDELETEPROGRAMPROC)(GLuint program);
-GLAPI PFNGLDELETEPROGRAMPROC glad_glDeleteProgram;
-#define glDeleteProgram glad_glDeleteProgram
-typedef void (APIENTRYP PFNGLDELETESHADERPROC)(GLuint shader);
-GLAPI PFNGLDELETESHADERPROC glad_glDeleteShader;
-#define glDeleteShader glad_glDeleteShader
-typedef void (APIENTRYP PFNGLDETACHSHADERPROC)(GLuint program, GLuint shader);
-GLAPI PFNGLDETACHSHADERPROC glad_glDetachShader;
-#define glDetachShader glad_glDetachShader
-typedef void (APIENTRYP PFNGLDISABLEVERTEXATTRIBARRAYPROC)(GLuint index);
-GLAPI PFNGLDISABLEVERTEXATTRIBARRAYPROC glad_glDisableVertexAttribArray;
-#define glDisableVertexAttribArray glad_glDisableVertexAttribArray
-typedef void (APIENTRYP PFNGLENABLEVERTEXATTRIBARRAYPROC)(GLuint index);
-GLAPI PFNGLENABLEVERTEXATTRIBARRAYPROC glad_glEnableVertexAttribArray;
-#define glEnableVertexAttribArray glad_glEnableVertexAttribArray
-typedef void (APIENTRYP PFNGLGETACTIVEATTRIBPROC)(GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name);
-GLAPI PFNGLGETACTIVEATTRIBPROC glad_glGetActiveAttrib;
-#define glGetActiveAttrib glad_glGetActiveAttrib
-typedef void (APIENTRYP PFNGLGETACTIVEUNIFORMPROC)(GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLint *size, GLenum *type, GLchar *name);
-GLAPI PFNGLGETACTIVEUNIFORMPROC glad_glGetActiveUniform;
-#define glGetActiveUniform glad_glGetActiveUniform
-typedef void (APIENTRYP PFNGLGETATTACHEDSHADERSPROC)(GLuint program, GLsizei maxCount, GLsizei *count, GLuint *shaders);
-GLAPI PFNGLGETATTACHEDSHADERSPROC glad_glGetAttachedShaders;
-#define glGetAttachedShaders glad_glGetAttachedShaders
-typedef GLint (APIENTRYP PFNGLGETATTRIBLOCATIONPROC)(GLuint program, const GLchar *name);
-GLAPI PFNGLGETATTRIBLOCATIONPROC glad_glGetAttribLocation;
-#define glGetAttribLocation glad_glGetAttribLocation
-typedef void (APIENTRYP PFNGLGETPROGRAMIVPROC)(GLuint program, GLenum pname, GLint *params);
-GLAPI PFNGLGETPROGRAMIVPROC glad_glGetProgramiv;
-#define glGetProgramiv glad_glGetProgramiv
-typedef void (APIENTRYP PFNGLGETPROGRAMINFOLOGPROC)(GLuint program, GLsizei bufSize, GLsizei *length, GLchar *infoLog);
-GLAPI PFNGLGETPROGRAMINFOLOGPROC glad_glGetProgramInfoLog;
-#define glGetProgramInfoLog glad_glGetProgramInfoLog
-typedef void (APIENTRYP PFNGLGETSHADERIVPROC)(GLuint shader, GLenum pname, GLint *params);
-GLAPI PFNGLGETSHADERIVPROC glad_glGetShaderiv;
-#define glGetShaderiv glad_glGetShaderiv
-typedef void (APIENTRYP PFNGLGETSHADERINFOLOGPROC)(GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *infoLog);
-GLAPI PFNGLGETSHADERINFOLOGPROC glad_glGetShaderInfoLog;
-#define glGetShaderInfoLog glad_glGetShaderInfoLog
-typedef void (APIENTRYP PFNGLGETSHADERSOURCEPROC)(GLuint shader, GLsizei bufSize, GLsizei *length, GLchar *source);
-GLAPI PFNGLGETSHADERSOURCEPROC glad_glGetShaderSource;
-#define glGetShaderSource glad_glGetShaderSource
-typedef GLint (APIENTRYP PFNGLGETUNIFORMLOCATIONPROC)(GLuint program, const GLchar *name);
-GLAPI PFNGLGETUNIFORMLOCATIONPROC glad_glGetUniformLocation;
-#define glGetUniformLocation glad_glGetUniformLocation
-typedef void (APIENTRYP PFNGLGETUNIFORMFVPROC)(GLuint program, GLint location, GLfloat *params);
-GLAPI PFNGLGETUNIFORMFVPROC glad_glGetUniformfv;
-#define glGetUniformfv glad_glGetUniformfv
-typedef void (APIENTRYP PFNGLGETUNIFORMIVPROC)(GLuint program, GLint location, GLint *params);
-GLAPI PFNGLGETUNIFORMIVPROC glad_glGetUniformiv;
-#define glGetUniformiv glad_glGetUniformiv
-typedef void (APIENTRYP PFNGLGETVERTEXATTRIBDVPROC)(GLuint index, GLenum pname, GLdouble *params);
-GLAPI PFNGLGETVERTEXATTRIBDVPROC glad_glGetVertexAttribdv;
-#define glGetVertexAttribdv glad_glGetVertexAttribdv
-typedef void (APIENTRYP PFNGLGETVERTEXATTRIBFVPROC)(GLuint index, GLenum pname, GLfloat *params);
-GLAPI PFNGLGETVERTEXATTRIBFVPROC glad_glGetVertexAttribfv;
-#define glGetVertexAttribfv glad_glGetVertexAttribfv
-typedef void (APIENTRYP PFNGLGETVERTEXATTRIBIVPROC)(GLuint index, GLenum pname, GLint *params);
-GLAPI PFNGLGETVERTEXATTRIBIVPROC glad_glGetVertexAttribiv;
-#define glGetVertexAttribiv glad_glGetVertexAttribiv
-typedef void (APIENTRYP PFNGLGETVERTEXATTRIBPOINTERVPROC)(GLuint index, GLenum pname, void **pointer);
-GLAPI PFNGLGETVERTEXATTRIBPOINTERVPROC glad_glGetVertexAttribPointerv;
-#define glGetVertexAttribPointerv glad_glGetVertexAttribPointerv
-typedef GLboolean (APIENTRYP PFNGLISPROGRAMPROC)(GLuint program);
-GLAPI PFNGLISPROGRAMPROC glad_glIsProgram;
-#define glIsProgram glad_glIsProgram
-typedef GLboolean (APIENTRYP PFNGLISSHADERPROC)(GLuint shader);
-GLAPI PFNGLISSHADERPROC glad_glIsShader;
-#define glIsShader glad_glIsShader
-typedef void (APIENTRYP PFNGLLINKPROGRAMPROC)(GLuint program);
-GLAPI PFNGLLINKPROGRAMPROC glad_glLinkProgram;
-#define glLinkProgram glad_glLinkProgram
-typedef void (APIENTRYP PFNGLSHADERSOURCEPROC)(GLuint shader, GLsizei count, const GLchar *const*string, const GLint *length);
-GLAPI PFNGLSHADERSOURCEPROC glad_glShaderSource;
-#define glShaderSource glad_glShaderSource
-typedef void (APIENTRYP PFNGLUSEPROGRAMPROC)(GLuint program);
-GLAPI PFNGLUSEPROGRAMPROC glad_glUseProgram;
-#define glUseProgram glad_glUseProgram
-typedef void (APIENTRYP PFNGLUNIFORM1FPROC)(GLint location, GLfloat v0);
-GLAPI PFNGLUNIFORM1FPROC glad_glUniform1f;
-#define glUniform1f glad_glUniform1f
-typedef void (APIENTRYP PFNGLUNIFORM2FPROC)(GLint location, GLfloat v0, GLfloat v1);
-GLAPI PFNGLUNIFORM2FPROC glad_glUniform2f;
-#define glUniform2f glad_glUniform2f
-typedef void (APIENTRYP PFNGLUNIFORM3FPROC)(GLint location, GLfloat v0, GLfloat v1, GLfloat v2);
-GLAPI PFNGLUNIFORM3FPROC glad_glUniform3f;
-#define glUniform3f glad_glUniform3f
-typedef void (APIENTRYP PFNGLUNIFORM4FPROC)(GLint location, GLfloat v0, GLfloat v1, GLfloat v2, GLfloat v3);
-GLAPI PFNGLUNIFORM4FPROC glad_glUniform4f;
-#define glUniform4f glad_glUniform4f
-typedef void (APIENTRYP PFNGLUNIFORM1IPROC)(GLint location, GLint v0);
-GLAPI PFNGLUNIFORM1IPROC glad_glUniform1i;
-#define glUniform1i glad_glUniform1i
-typedef void (APIENTRYP PFNGLUNIFORM2IPROC)(GLint location, GLint v0, GLint v1);
-GLAPI PFNGLUNIFORM2IPROC glad_glUniform2i;
-#define glUniform2i glad_glUniform2i
-typedef void (APIENTRYP PFNGLUNIFORM3IPROC)(GLint location, GLint v0, GLint v1, GLint v2);
-GLAPI PFNGLUNIFORM3IPROC glad_glUniform3i;
-#define glUniform3i glad_glUniform3i
-typedef void (APIENTRYP PFNGLUNIFORM4IPROC)(GLint location, GLint v0, GLint v1, GLint v2, GLint v3);
-GLAPI PFNGLUNIFORM4IPROC glad_glUniform4i;
-#define glUniform4i glad_glUniform4i
-typedef void (APIENTRYP PFNGLUNIFORM1FVPROC)(GLint location, GLsizei count, const GLfloat *value);
-GLAPI PFNGLUNIFORM1FVPROC glad_glUniform1fv;
-#define glUniform1fv glad_glUniform1fv
-typedef void (APIENTRYP PFNGLUNIFORM2FVPROC)(GLint location, GLsizei count, const GLfloat *value);
-GLAPI PFNGLUNIFORM2FVPROC glad_glUniform2fv;
-#define glUniform2fv glad_glUniform2fv
-typedef void (APIENTRYP PFNGLUNIFORM3FVPROC)(GLint location, GLsizei count, const GLfloat *value);
-GLAPI PFNGLUNIFORM3FVPROC glad_glUniform3fv;
-#define glUniform3fv glad_glUniform3fv
-typedef void (APIENTRYP PFNGLUNIFORM4FVPROC)(GLint location, GLsizei count, const GLfloat *value);
-GLAPI PFNGLUNIFORM4FVPROC glad_glUniform4fv;
-#define glUniform4fv glad_glUniform4fv
-typedef void (APIENTRYP PFNGLUNIFORM1IVPROC)(GLint location, GLsizei count, const GLint *value);
-GLAPI PFNGLUNIFORM1IVPROC glad_glUniform1iv;
-#define glUniform1iv glad_glUniform1iv
-typedef void (APIENTRYP PFNGLUNIFORM2IVPROC)(GLint location, GLsizei count, const GLint *value);
-GLAPI PFNGLUNIFORM2IVPROC glad_glUniform2iv;
-#define glUniform2iv glad_glUniform2iv
-typedef void (APIENTRYP PFNGLUNIFORM3IVPROC)(GLint location, GLsizei count, const GLint *value);
-GLAPI PFNGLUNIFORM3IVPROC glad_glUniform3iv;
-#define glUniform3iv glad_glUniform3iv
-typedef void (APIENTRYP PFNGLUNIFORM4IVPROC)(GLint location, GLsizei count, const GLint *value);
-GLAPI PFNGLUNIFORM4IVPROC glad_glUniform4iv;
-#define glUniform4iv glad_glUniform4iv
-typedef void (APIENTRYP PFNGLUNIFORMMATRIX2FVPROC)(GLint location, GLsizei count, GLboolean transpose, const GLfloat *value);
-GLAPI PFNGLUNIFORMMATRIX2FVPROC glad_glUniformMatrix2fv;
-#define glUniformMatrix2fv glad_glUniformMatrix2fv
-typedef void (APIENTRYP PFNGLUNIFORMMATRIX3FVPROC)(GLint location, GLsizei count, GLboolean transpose, const GLfloat *value);
-GLAPI PFNGLUNIFORMMATRIX3FVPROC glad_glUniformMatrix3fv;
-#define glUniformMatrix3fv glad_glUniformMatrix3fv
-typedef void (APIENTRYP PFNGLUNIFORMMATRIX4FVPROC)(GLint location, GLsizei count, GLboolean transpose, const GLfloat *value);
-GLAPI PFNGLUNIFORMMATRIX4FVPROC glad_glUniformMatrix4fv;
-#define glUniformMatrix4fv glad_glUniformMatrix4fv
-typedef void (APIENTRYP PFNGLVALIDATEPROGRAMPROC)(GLuint program);
-GLAPI PFNGLVALIDATEPROGRAMPROC glad_glValidateProgram;
-#define glValidateProgram glad_glValidateProgram
-typedef void (APIENTRYP PFNGLVERTEXATTRIB1DPROC)(GLuint index, GLdouble x);
-GLAPI PFNGLVERTEXATTRIB1DPROC glad_glVertexAttrib1d;
-#define glVertexAttrib1d glad_glVertexAttrib1d
-typedef void (APIENTRYP PFNGLVERTEXATTRIB1DVPROC)(GLuint index, const GLdouble *v);
-GLAPI PFNGLVERTEXATTRIB1DVPROC glad_glVertexAttrib1dv;
-#define glVertexAttrib1dv glad_glVertexAttrib1dv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB1FPROC)(GLuint index, GLfloat x);
-GLAPI PFNGLVERTEXATTRIB1FPROC glad_glVertexAttrib1f;
-#define glVertexAttrib1f glad_glVertexAttrib1f
-typedef void (APIENTRYP PFNGLVERTEXATTRIB1FVPROC)(GLuint index, const GLfloat *v);
-GLAPI PFNGLVERTEXATTRIB1FVPROC glad_glVertexAttrib1fv;
-#define glVertexAttrib1fv glad_glVertexAttrib1fv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB1SPROC)(GLuint index, GLshort x);
-GLAPI PFNGLVERTEXATTRIB1SPROC glad_glVertexAttrib1s;
-#define glVertexAttrib1s glad_glVertexAttrib1s
-typedef void (APIENTRYP PFNGLVERTEXATTRIB1SVPROC)(GLuint index, const GLshort *v);
-GLAPI PFNGLVERTEXATTRIB1SVPROC glad_glVertexAttrib1sv;
-#define glVertexAttrib1sv glad_glVertexAttrib1sv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB2DPROC)(GLuint index, GLdouble x, GLdouble y);
-GLAPI PFNGLVERTEXATTRIB2DPROC glad_glVertexAttrib2d;
-#define glVertexAttrib2d glad_glVertexAttrib2d
-typedef void (APIENTRYP PFNGLVERTEXATTRIB2DVPROC)(GLuint index, const GLdouble *v);
-GLAPI PFNGLVERTEXATTRIB2DVPROC glad_glVertexAttrib2dv;
-#define glVertexAttrib2dv glad_glVertexAttrib2dv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB2FPROC)(GLuint index, GLfloat x, GLfloat y);
-GLAPI PFNGLVERTEXATTRIB2FPROC glad_glVertexAttrib2f;
-#define glVertexAttrib2f glad_glVertexAttrib2f
-typedef void (APIENTRYP PFNGLVERTEXATTRIB2FVPROC)(GLuint index, const GLfloat *v);
-GLAPI PFNGLVERTEXATTRIB2FVPROC glad_glVertexAttrib2fv;
-#define glVertexAttrib2fv glad_glVertexAttrib2fv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB2SPROC)(GLuint index, GLshort x, GLshort y);
-GLAPI PFNGLVERTEXATTRIB2SPROC glad_glVertexAttrib2s;
-#define glVertexAttrib2s glad_glVertexAttrib2s
-typedef void (APIENTRYP PFNGLVERTEXATTRIB2SVPROC)(GLuint index, const GLshort *v);
-GLAPI PFNGLVERTEXATTRIB2SVPROC glad_glVertexAttrib2sv;
-#define glVertexAttrib2sv glad_glVertexAttrib2sv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB3DPROC)(GLuint index, GLdouble x, GLdouble y, GLdouble z);
-GLAPI PFNGLVERTEXATTRIB3DPROC glad_glVertexAttrib3d;
-#define glVertexAttrib3d glad_glVertexAttrib3d
-typedef void (APIENTRYP PFNGLVERTEXATTRIB3DVPROC)(GLuint index, const GLdouble *v);
-GLAPI PFNGLVERTEXATTRIB3DVPROC glad_glVertexAttrib3dv;
-#define glVertexAttrib3dv glad_glVertexAttrib3dv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB3FPROC)(GLuint index, GLfloat x, GLfloat y, GLfloat z);
-GLAPI PFNGLVERTEXATTRIB3FPROC glad_glVertexAttrib3f;
-#define glVertexAttrib3f glad_glVertexAttrib3f
-typedef void (APIENTRYP PFNGLVERTEXATTRIB3FVPROC)(GLuint index, const GLfloat *v);
-GLAPI PFNGLVERTEXATTRIB3FVPROC glad_glVertexAttrib3fv;
-#define glVertexAttrib3fv glad_glVertexAttrib3fv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB3SPROC)(GLuint index, GLshort x, GLshort y, GLshort z);
-GLAPI PFNGLVERTEXATTRIB3SPROC glad_glVertexAttrib3s;
-#define glVertexAttrib3s glad_glVertexAttrib3s
-typedef void (APIENTRYP PFNGLVERTEXATTRIB3SVPROC)(GLuint index, const GLshort *v);
-GLAPI PFNGLVERTEXATTRIB3SVPROC glad_glVertexAttrib3sv;
-#define glVertexAttrib3sv glad_glVertexAttrib3sv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB4NBVPROC)(GLuint index, const GLbyte *v);
-GLAPI PFNGLVERTEXATTRIB4NBVPROC glad_glVertexAttrib4Nbv;
-#define glVertexAttrib4Nbv glad_glVertexAttrib4Nbv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB4NIVPROC)(GLuint index, const GLint *v);
-GLAPI PFNGLVERTEXATTRIB4NIVPROC glad_glVertexAttrib4Niv;
-#define glVertexAttrib4Niv glad_glVertexAttrib4Niv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB4NSVPROC)(GLuint index, const GLshort *v);
-GLAPI PFNGLVERTEXATTRIB4NSVPROC glad_glVertexAttrib4Nsv;
-#define glVertexAttrib4Nsv glad_glVertexAttrib4Nsv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB4NUBPROC)(GLuint index, GLubyte x, GLubyte y, GLubyte z, GLubyte w);
-GLAPI PFNGLVERTEXATTRIB4NUBPROC glad_glVertexAttrib4Nub;
-#define glVertexAttrib4Nub glad_glVertexAttrib4Nub
-typedef void (APIENTRYP PFNGLVERTEXATTRIB4NUBVPROC)(GLuint index, const GLubyte *v);
-GLAPI PFNGLVERTEXATTRIB4NUBVPROC glad_glVertexAttrib4Nubv;
-#define glVertexAttrib4Nubv glad_glVertexAttrib4Nubv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB4NUIVPROC)(GLuint index, const GLuint *v);
-GLAPI PFNGLVERTEXATTRIB4NUIVPROC glad_glVertexAttrib4Nuiv;
-#define glVertexAttrib4Nuiv glad_glVertexAttrib4Nuiv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB4NUSVPROC)(GLuint index, const GLushort *v);
-GLAPI PFNGLVERTEXATTRIB4NUSVPROC glad_glVertexAttrib4Nusv;
-#define glVertexAttrib4Nusv glad_glVertexAttrib4Nusv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB4BVPROC)(GLuint index, const GLbyte *v);
-GLAPI PFNGLVERTEXATTRIB4BVPROC glad_glVertexAttrib4bv;
-#define glVertexAttrib4bv glad_glVertexAttrib4bv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB4DPROC)(GLuint index, GLdouble x, GLdouble y, GLdouble z, GLdouble w);
-GLAPI PFNGLVERTEXATTRIB4DPROC glad_glVertexAttrib4d;
-#define glVertexAttrib4d glad_glVertexAttrib4d
-typedef void (APIENTRYP PFNGLVERTEXATTRIB4DVPROC)(GLuint index, const GLdouble *v);
-GLAPI PFNGLVERTEXATTRIB4DVPROC glad_glVertexAttrib4dv;
-#define glVertexAttrib4dv glad_glVertexAttrib4dv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB4FPROC)(GLuint index, GLfloat x, GLfloat y, GLfloat z, GLfloat w);
-GLAPI PFNGLVERTEXATTRIB4FPROC glad_glVertexAttrib4f;
-#define glVertexAttrib4f glad_glVertexAttrib4f
-typedef void (APIENTRYP PFNGLVERTEXATTRIB4FVPROC)(GLuint index, const GLfloat *v);
-GLAPI PFNGLVERTEXATTRIB4FVPROC glad_glVertexAttrib4fv;
-#define glVertexAttrib4fv glad_glVertexAttrib4fv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB4IVPROC)(GLuint index, const GLint *v);
-GLAPI PFNGLVERTEXATTRIB4IVPROC glad_glVertexAttrib4iv;
-#define glVertexAttrib4iv glad_glVertexAttrib4iv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB4SPROC)(GLuint index, GLshort x, GLshort y, GLshort z, GLshort w);
-GLAPI PFNGLVERTEXATTRIB4SPROC glad_glVertexAttrib4s;
-#define glVertexAttrib4s glad_glVertexAttrib4s
-typedef void (APIENTRYP PFNGLVERTEXATTRIB4SVPROC)(GLuint index, const GLshort *v);
-GLAPI PFNGLVERTEXATTRIB4SVPROC glad_glVertexAttrib4sv;
-#define glVertexAttrib4sv glad_glVertexAttrib4sv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB4UBVPROC)(GLuint index, const GLubyte *v);
-GLAPI PFNGLVERTEXATTRIB4UBVPROC glad_glVertexAttrib4ubv;
-#define glVertexAttrib4ubv glad_glVertexAttrib4ubv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB4UIVPROC)(GLuint index, const GLuint *v);
-GLAPI PFNGLVERTEXATTRIB4UIVPROC glad_glVertexAttrib4uiv;
-#define glVertexAttrib4uiv glad_glVertexAttrib4uiv
-typedef void (APIENTRYP PFNGLVERTEXATTRIB4USVPROC)(GLuint index, const GLushort *v);
-GLAPI PFNGLVERTEXATTRIB4USVPROC glad_glVertexAttrib4usv;
-#define glVertexAttrib4usv glad_glVertexAttrib4usv
-typedef void (APIENTRYP PFNGLVERTEXATTRIBPOINTERPROC)(GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer);
-GLAPI PFNGLVERTEXATTRIBPOINTERPROC glad_glVertexAttribPointer;
-#define glVertexAttribPointer glad_glVertexAttribPointer
-#endif
-#ifndef GL_VERSION_2_1
-#define GL_VERSION_2_1 1
-GLAPI int GLAD_GL_VERSION_2_1;
-typedef void (APIENTRYP PFNGLUNIFORMMATRIX2X3FVPROC)(GLint location, GLsizei count, GLboolean transpose, const GLfloat *value);
-GLAPI PFNGLUNIFORMMATRIX2X3FVPROC glad_glUniformMatrix2x3fv;
-#define glUniformMatrix2x3fv glad_glUniformMatrix2x3fv
-typedef void (APIENTRYP PFNGLUNIFORMMATRIX3X2FVPROC)(GLint location, GLsizei count, GLboolean transpose, const GLfloat *value);
-GLAPI PFNGLUNIFORMMATRIX3X2FVPROC glad_glUniformMatrix3x2fv;
-#define glUniformMatrix3x2fv glad_glUniformMatrix3x2fv
-typedef void (APIENTRYP PFNGLUNIFORMMATRIX2X4FVPROC)(GLint location, GLsizei count, GLboolean transpose, const GLfloat *value);
-GLAPI PFNGLUNIFORMMATRIX2X4FVPROC glad_glUniformMatrix2x4fv;
-#define glUniformMatrix2x4fv glad_glUniformMatrix2x4fv
-typedef void (APIENTRYP PFNGLUNIFORMMATRIX4X2FVPROC)(GLint location, GLsizei count, GLboolean transpose, const GLfloat *value);
-GLAPI PFNGLUNIFORMMATRIX4X2FVPROC glad_glUniformMatrix4x2fv;
-#define glUniformMatrix4x2fv glad_glUniformMatrix4x2fv
-typedef void (APIENTRYP PFNGLUNIFORMMATRIX3X4FVPROC)(GLint location, GLsizei count, GLboolean transpose, const GLfloat *value);
-GLAPI PFNGLUNIFORMMATRIX3X4FVPROC glad_glUniformMatrix3x4fv;
-#define glUniformMatrix3x4fv glad_glUniformMatrix3x4fv
-typedef void (APIENTRYP PFNGLUNIFORMMATRIX4X3FVPROC)(GLint location, GLsizei count, GLboolean transpose, const GLfloat *value);
-GLAPI PFNGLUNIFORMMATRIX4X3FVPROC glad_glUniformMatrix4x3fv;
-#define glUniformMatrix4x3fv glad_glUniformMatrix4x3fv
-#endif
-#ifndef GL_VERSION_3_0
-#define GL_VERSION_3_0 1
-GLAPI int GLAD_GL_VERSION_3_0;
-typedef void (APIENTRYP PFNGLCOLORMASKIPROC)(GLuint index, GLboolean r, GLboolean g, GLboolean b, GLboolean a);
-GLAPI PFNGLCOLORMASKIPROC glad_glColorMaski;
-#define glColorMaski glad_glColorMaski
-typedef void (APIENTRYP PFNGLGETBOOLEANI_VPROC)(GLenum target, GLuint index, GLboolean *data);
-GLAPI PFNGLGETBOOLEANI_VPROC glad_glGetBooleani_v;
-#define glGetBooleani_v glad_glGetBooleani_v
-typedef void (APIENTRYP PFNGLGETINTEGERI_VPROC)(GLenum target, GLuint index, GLint *data);
-GLAPI PFNGLGETINTEGERI_VPROC glad_glGetIntegeri_v;
-#define glGetIntegeri_v glad_glGetIntegeri_v
-typedef void (APIENTRYP PFNGLENABLEIPROC)(GLenum target, GLuint index);
-GLAPI PFNGLENABLEIPROC glad_glEnablei;
-#define glEnablei glad_glEnablei
-typedef void (APIENTRYP PFNGLDISABLEIPROC)(GLenum target, GLuint index);
-GLAPI PFNGLDISABLEIPROC glad_glDisablei;
-#define glDisablei glad_glDisablei
-typedef GLboolean (APIENTRYP PFNGLISENABLEDIPROC)(GLenum target, GLuint index);
-GLAPI PFNGLISENABLEDIPROC glad_glIsEnabledi;
-#define glIsEnabledi glad_glIsEnabledi
-typedef void (APIENTRYP PFNGLBEGINTRANSFORMFEEDBACKPROC)(GLenum primitiveMode);
-GLAPI PFNGLBEGINTRANSFORMFEEDBACKPROC glad_glBeginTransformFeedback;
-#define glBeginTransformFeedback glad_glBeginTransformFeedback
-typedef void (APIENTRYP PFNGLENDTRANSFORMFEEDBACKPROC)(void);
-GLAPI PFNGLENDTRANSFORMFEEDBACKPROC glad_glEndTransformFeedback;
-#define glEndTransformFeedback glad_glEndTransformFeedback
-typedef void (APIENTRYP PFNGLBINDBUFFERRANGEPROC)(GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size);
-GLAPI PFNGLBINDBUFFERRANGEPROC glad_glBindBufferRange;
-#define glBindBufferRange glad_glBindBufferRange
-typedef void (APIENTRYP PFNGLBINDBUFFERBASEPROC)(GLenum target, GLuint index, GLuint buffer);
-GLAPI PFNGLBINDBUFFERBASEPROC glad_glBindBufferBase;
-#define glBindBufferBase glad_glBindBufferBase
-typedef void (APIENTRYP PFNGLTRANSFORMFEEDBACKVARYINGSPROC)(GLuint program, GLsizei count, const GLchar *const*varyings, GLenum bufferMode);
-GLAPI PFNGLTRANSFORMFEEDBACKVARYINGSPROC glad_glTransformFeedbackVaryings;
-#define glTransformFeedbackVaryings glad_glTransformFeedbackVaryings
-typedef void (APIENTRYP PFNGLGETTRANSFORMFEEDBACKVARYINGPROC)(GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLsizei *size, GLenum *type, GLchar *name);
-GLAPI PFNGLGETTRANSFORMFEEDBACKVARYINGPROC glad_glGetTransformFeedbackVarying;
-#define glGetTransformFeedbackVarying glad_glGetTransformFeedbackVarying
-typedef void (APIENTRYP PFNGLCLAMPCOLORPROC)(GLenum target, GLenum clamp);
-GLAPI PFNGLCLAMPCOLORPROC glad_glClampColor;
-#define glClampColor glad_glClampColor
-typedef void (APIENTRYP PFNGLBEGINCONDITIONALRENDERPROC)(GLuint id, GLenum mode);
-GLAPI PFNGLBEGINCONDITIONALRENDERPROC glad_glBeginConditionalRender;
-#define glBeginConditionalRender glad_glBeginConditionalRender
-typedef void (APIENTRYP PFNGLENDCONDITIONALRENDERPROC)(void);
-GLAPI PFNGLENDCONDITIONALRENDERPROC glad_glEndConditionalRender;
-#define glEndConditionalRender glad_glEndConditionalRender
-typedef void (APIENTRYP PFNGLVERTEXATTRIBIPOINTERPROC)(GLuint index, GLint size, GLenum type, GLsizei stride, const void *pointer);
-GLAPI PFNGLVERTEXATTRIBIPOINTERPROC glad_glVertexAttribIPointer;
-#define glVertexAttribIPointer glad_glVertexAttribIPointer
-typedef void (APIENTRYP PFNGLGETVERTEXATTRIBIIVPROC)(GLuint index, GLenum pname, GLint *params);
-GLAPI PFNGLGETVERTEXATTRIBIIVPROC glad_glGetVertexAttribIiv;
-#define glGetVertexAttribIiv glad_glGetVertexAttribIiv
-typedef void (APIENTRYP PFNGLGETVERTEXATTRIBIUIVPROC)(GLuint index, GLenum pname, GLuint *params);
-GLAPI PFNGLGETVERTEXATTRIBIUIVPROC glad_glGetVertexAttribIuiv;
-#define glGetVertexAttribIuiv glad_glGetVertexAttribIuiv
-typedef void (APIENTRYP PFNGLVERTEXATTRIBI1IPROC)(GLuint index, GLint x);
-GLAPI PFNGLVERTEXATTRIBI1IPROC glad_glVertexAttribI1i;
-#define glVertexAttribI1i glad_glVertexAttribI1i
-typedef void (APIENTRYP PFNGLVERTEXATTRIBI2IPROC)(GLuint index, GLint x, GLint y);
-GLAPI PFNGLVERTEXATTRIBI2IPROC glad_glVertexAttribI2i;
-#define glVertexAttribI2i glad_glVertexAttribI2i
-typedef void (APIENTRYP PFNGLVERTEXATTRIBI3IPROC)(GLuint index, GLint x, GLint y, GLint z);
-GLAPI PFNGLVERTEXATTRIBI3IPROC glad_glVertexAttribI3i;
-#define glVertexAttribI3i glad_glVertexAttribI3i
-typedef void (APIENTRYP PFNGLVERTEXATTRIBI4IPROC)(GLuint index, GLint x, GLint y, GLint z, GLint w);
-GLAPI PFNGLVERTEXATTRIBI4IPROC glad_glVertexAttribI4i;
-#define glVertexAttribI4i glad_glVertexAttribI4i
-typedef void (APIENTRYP PFNGLVERTEXATTRIBI1UIPROC)(GLuint index, GLuint x);
-GLAPI PFNGLVERTEXATTRIBI1UIPROC glad_glVertexAttribI1ui;
-#define glVertexAttribI1ui glad_glVertexAttribI1ui
-typedef void (APIENTRYP PFNGLVERTEXATTRIBI2UIPROC)(GLuint index, GLuint x, GLuint y);
-GLAPI PFNGLVERTEXATTRIBI2UIPROC glad_glVertexAttribI2ui;
-#define glVertexAttribI2ui glad_glVertexAttribI2ui
-typedef void (APIENTRYP PFNGLVERTEXATTRIBI3UIPROC)(GLuint index, GLuint x, GLuint y, GLuint z);
-GLAPI PFNGLVERTEXATTRIBI3UIPROC glad_glVertexAttribI3ui;
-#define glVertexAttribI3ui glad_glVertexAttribI3ui
-typedef void (APIENTRYP PFNGLVERTEXATTRIBI4UIPROC)(GLuint index, GLuint x, GLuint y, GLuint z, GLuint w);
-GLAPI PFNGLVERTEXATTRIBI4UIPROC glad_glVertexAttribI4ui;
-#define glVertexAttribI4ui glad_glVertexAttribI4ui
-typedef void (APIENTRYP PFNGLVERTEXATTRIBI1IVPROC)(GLuint index, const GLint *v);
-GLAPI PFNGLVERTEXATTRIBI1IVPROC glad_glVertexAttribI1iv;
-#define glVertexAttribI1iv glad_glVertexAttribI1iv
-typedef void (APIENTRYP PFNGLVERTEXATTRIBI2IVPROC)(GLuint index, const GLint *v);
-GLAPI PFNGLVERTEXATTRIBI2IVPROC glad_glVertexAttribI2iv;
-#define glVertexAttribI2iv glad_glVertexAttribI2iv
-typedef void (APIENTRYP PFNGLVERTEXATTRIBI3IVPROC)(GLuint index, const GLint *v);
-GLAPI PFNGLVERTEXATTRIBI3IVPROC glad_glVertexAttribI3iv;
-#define glVertexAttribI3iv glad_glVertexAttribI3iv
-typedef void (APIENTRYP PFNGLVERTEXATTRIBI4IVPROC)(GLuint index, const GLint *v);
-GLAPI PFNGLVERTEXATTRIBI4IVPROC glad_glVertexAttribI4iv;
-#define glVertexAttribI4iv glad_glVertexAttribI4iv
-typedef void (APIENTRYP PFNGLVERTEXATTRIBI1UIVPROC)(GLuint index, const GLuint *v);
-GLAPI PFNGLVERTEXATTRIBI1UIVPROC glad_glVertexAttribI1uiv;
-#define glVertexAttribI1uiv glad_glVertexAttribI1uiv
-typedef void (APIENTRYP PFNGLVERTEXATTRIBI2UIVPROC)(GLuint index, const GLuint *v);
-GLAPI PFNGLVERTEXATTRIBI2UIVPROC glad_glVertexAttribI2uiv;
-#define glVertexAttribI2uiv glad_glVertexAttribI2uiv
-typedef void (APIENTRYP PFNGLVERTEXATTRIBI3UIVPROC)(GLuint index, const GLuint *v);
-GLAPI PFNGLVERTEXATTRIBI3UIVPROC glad_glVertexAttribI3uiv;
-#define glVertexAttribI3uiv glad_glVertexAttribI3uiv
-typedef void (APIENTRYP PFNGLVERTEXATTRIBI4UIVPROC)(GLuint index, const GLuint *v);
-GLAPI PFNGLVERTEXATTRIBI4UIVPROC glad_glVertexAttribI4uiv;
-#define glVertexAttribI4uiv glad_glVertexAttribI4uiv
-typedef void (APIENTRYP PFNGLVERTEXATTRIBI4BVPROC)(GLuint index, const GLbyte *v);
-GLAPI PFNGLVERTEXATTRIBI4BVPROC glad_glVertexAttribI4bv;
-#define glVertexAttribI4bv glad_glVertexAttribI4bv
-typedef void (APIENTRYP PFNGLVERTEXATTRIBI4SVPROC)(GLuint index, const GLshort *v);
-GLAPI PFNGLVERTEXATTRIBI4SVPROC glad_glVertexAttribI4sv;
-#define glVertexAttribI4sv glad_glVertexAttribI4sv
-typedef void (APIENTRYP PFNGLVERTEXATTRIBI4UBVPROC)(GLuint index, const GLubyte *v);
-GLAPI PFNGLVERTEXATTRIBI4UBVPROC glad_glVertexAttribI4ubv;
-#define glVertexAttribI4ubv glad_glVertexAttribI4ubv
-typedef void (APIENTRYP PFNGLVERTEXATTRIBI4USVPROC)(GLuint index, const GLushort *v);
-GLAPI PFNGLVERTEXATTRIBI4USVPROC glad_glVertexAttribI4usv;
-#define glVertexAttribI4usv glad_glVertexAttribI4usv
-typedef void (APIENTRYP PFNGLGETUNIFORMUIVPROC)(GLuint program, GLint location, GLuint *params);
-GLAPI PFNGLGETUNIFORMUIVPROC glad_glGetUniformuiv;
-#define glGetUniformuiv glad_glGetUniformuiv
-typedef void (APIENTRYP PFNGLBINDFRAGDATALOCATIONPROC)(GLuint program, GLuint color, const GLchar *name);
-GLAPI PFNGLBINDFRAGDATALOCATIONPROC glad_glBindFragDataLocation;
-#define glBindFragDataLocation glad_glBindFragDataLocation
-typedef GLint (APIENTRYP PFNGLGETFRAGDATALOCATIONPROC)(GLuint program, const GLchar *name);
-GLAPI PFNGLGETFRAGDATALOCATIONPROC glad_glGetFragDataLocation;
-#define glGetFragDataLocation glad_glGetFragDataLocation
-typedef void (APIENTRYP PFNGLUNIFORM1UIPROC)(GLint location, GLuint v0);
-GLAPI PFNGLUNIFORM1UIPROC glad_glUniform1ui;
-#define glUniform1ui glad_glUniform1ui
-typedef void (APIENTRYP PFNGLUNIFORM2UIPROC)(GLint location, GLuint v0, GLuint v1);
-GLAPI PFNGLUNIFORM2UIPROC glad_glUniform2ui;
-#define glUniform2ui glad_glUniform2ui
-typedef void (APIENTRYP PFNGLUNIFORM3UIPROC)(GLint location, GLuint v0, GLuint v1, GLuint v2);
-GLAPI PFNGLUNIFORM3UIPROC glad_glUniform3ui;
-#define glUniform3ui glad_glUniform3ui
-typedef void (APIENTRYP PFNGLUNIFORM4UIPROC)(GLint location, GLuint v0, GLuint v1, GLuint v2, GLuint v3);
-GLAPI PFNGLUNIFORM4UIPROC glad_glUniform4ui;
-#define glUniform4ui glad_glUniform4ui
-typedef void (APIENTRYP PFNGLUNIFORM1UIVPROC)(GLint location, GLsizei count, const GLuint *value);
-GLAPI PFNGLUNIFORM1UIVPROC glad_glUniform1uiv;
-#define glUniform1uiv glad_glUniform1uiv
-typedef void (APIENTRYP PFNGLUNIFORM2UIVPROC)(GLint location, GLsizei count, const GLuint *value);
-GLAPI PFNGLUNIFORM2UIVPROC glad_glUniform2uiv;
-#define glUniform2uiv glad_glUniform2uiv
-typedef void (APIENTRYP PFNGLUNIFORM3UIVPROC)(GLint location, GLsizei count, const GLuint *value);
-GLAPI PFNGLUNIFORM3UIVPROC glad_glUniform3uiv;
-#define glUniform3uiv glad_glUniform3uiv
-typedef void (APIENTRYP PFNGLUNIFORM4UIVPROC)(GLint location, GLsizei count, const GLuint *value);
-GLAPI PFNGLUNIFORM4UIVPROC glad_glUniform4uiv;
-#define glUniform4uiv glad_glUniform4uiv
-typedef void (APIENTRYP PFNGLTEXPARAMETERIIVPROC)(GLenum target, GLenum pname, const GLint *params);
-GLAPI PFNGLTEXPARAMETERIIVPROC glad_glTexParameterIiv;
-#define glTexParameterIiv glad_glTexParameterIiv
-typedef void (APIENTRYP PFNGLTEXPARAMETERIUIVPROC)(GLenum target, GLenum pname, const GLuint *params);
-GLAPI PFNGLTEXPARAMETERIUIVPROC glad_glTexParameterIuiv;
-#define glTexParameterIuiv glad_glTexParameterIuiv
-typedef void (APIENTRYP PFNGLGETTEXPARAMETERIIVPROC)(GLenum target, GLenum pname, GLint *params);
-GLAPI PFNGLGETTEXPARAMETERIIVPROC glad_glGetTexParameterIiv;
-#define glGetTexParameterIiv glad_glGetTexParameterIiv
-typedef void (APIENTRYP PFNGLGETTEXPARAMETERIUIVPROC)(GLenum target, GLenum pname, GLuint *params);
-GLAPI PFNGLGETTEXPARAMETERIUIVPROC glad_glGetTexParameterIuiv;
-#define glGetTexParameterIuiv glad_glGetTexParameterIuiv
-typedef void (APIENTRYP PFNGLCLEARBUFFERIVPROC)(GLenum buffer, GLint drawbuffer, const GLint *value);
-GLAPI PFNGLCLEARBUFFERIVPROC glad_glClearBufferiv;
-#define glClearBufferiv glad_glClearBufferiv
-typedef void (APIENTRYP PFNGLCLEARBUFFERUIVPROC)(GLenum buffer, GLint drawbuffer, const GLuint *value);
-GLAPI PFNGLCLEARBUFFERUIVPROC glad_glClearBufferuiv;
-#define glClearBufferuiv glad_glClearBufferuiv
-typedef void (APIENTRYP PFNGLCLEARBUFFERFVPROC)(GLenum buffer, GLint drawbuffer, const GLfloat *value);
-GLAPI PFNGLCLEARBUFFERFVPROC glad_glClearBufferfv;
-#define glClearBufferfv glad_glClearBufferfv
-typedef void (APIENTRYP PFNGLCLEARBUFFERFIPROC)(GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil);
-GLAPI PFNGLCLEARBUFFERFIPROC glad_glClearBufferfi;
-#define glClearBufferfi glad_glClearBufferfi
-typedef const GLubyte * (APIENTRYP PFNGLGETSTRINGIPROC)(GLenum name, GLuint index);
-GLAPI PFNGLGETSTRINGIPROC glad_glGetStringi;
-#define glGetStringi glad_glGetStringi
-typedef GLboolean (APIENTRYP PFNGLISRENDERBUFFERPROC)(GLuint renderbuffer);
-GLAPI PFNGLISRENDERBUFFERPROC glad_glIsRenderbuffer;
-#define glIsRenderbuffer glad_glIsRenderbuffer
-typedef void (APIENTRYP PFNGLBINDRENDERBUFFERPROC)(GLenum target, GLuint renderbuffer);
-GLAPI PFNGLBINDRENDERBUFFERPROC glad_glBindRenderbuffer;
-#define glBindRenderbuffer glad_glBindRenderbuffer
-typedef void (APIENTRYP PFNGLDELETERENDERBUFFERSPROC)(GLsizei n, const GLuint *renderbuffers);
-GLAPI PFNGLDELETERENDERBUFFERSPROC glad_glDeleteRenderbuffers;
-#define glDeleteRenderbuffers glad_glDeleteRenderbuffers
-typedef void (APIENTRYP PFNGLGENRENDERBUFFERSPROC)(GLsizei n, GLuint *renderbuffers);
-GLAPI PFNGLGENRENDERBUFFERSPROC glad_glGenRenderbuffers;
-#define glGenRenderbuffers glad_glGenRenderbuffers
-typedef void (APIENTRYP PFNGLRENDERBUFFERSTORAGEPROC)(GLenum target, GLenum internalformat, GLsizei width, GLsizei height);
-GLAPI PFNGLRENDERBUFFERSTORAGEPROC glad_glRenderbufferStorage;
-#define glRenderbufferStorage glad_glRenderbufferStorage
-typedef void (APIENTRYP PFNGLGETRENDERBUFFERPARAMETERIVPROC)(GLenum target, GLenum pname, GLint *params);
-GLAPI PFNGLGETRENDERBUFFERPARAMETERIVPROC glad_glGetRenderbufferParameteriv;
-#define glGetRenderbufferParameteriv glad_glGetRenderbufferParameteriv
-typedef GLboolean (APIENTRYP PFNGLISFRAMEBUFFERPROC)(GLuint framebuffer);
-GLAPI PFNGLISFRAMEBUFFERPROC glad_glIsFramebuffer;
-#define glIsFramebuffer glad_glIsFramebuffer
-typedef void (APIENTRYP PFNGLBINDFRAMEBUFFERPROC)(GLenum target, GLuint framebuffer);
-GLAPI PFNGLBINDFRAMEBUFFERPROC glad_glBindFramebuffer;
-#define glBindFramebuffer glad_glBindFramebuffer
-typedef void (APIENTRYP PFNGLDELETEFRAMEBUFFERSPROC)(GLsizei n, const GLuint *framebuffers);
-GLAPI PFNGLDELETEFRAMEBUFFERSPROC glad_glDeleteFramebuffers;
-#define glDeleteFramebuffers glad_glDeleteFramebuffers
-typedef void (APIENTRYP PFNGLGENFRAMEBUFFERSPROC)(GLsizei n, GLuint *framebuffers);
-GLAPI PFNGLGENFRAMEBUFFERSPROC glad_glGenFramebuffers;
-#define glGenFramebuffers glad_glGenFramebuffers
-typedef GLenum (APIENTRYP PFNGLCHECKFRAMEBUFFERSTATUSPROC)(GLenum target);
-GLAPI PFNGLCHECKFRAMEBUFFERSTATUSPROC glad_glCheckFramebufferStatus;
-#define glCheckFramebufferStatus glad_glCheckFramebufferStatus
-typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURE1DPROC)(GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level);
-GLAPI PFNGLFRAMEBUFFERTEXTURE1DPROC glad_glFramebufferTexture1D;
-#define glFramebufferTexture1D glad_glFramebufferTexture1D
-typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DPROC)(GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level);
-GLAPI PFNGLFRAMEBUFFERTEXTURE2DPROC glad_glFramebufferTexture2D;
-#define glFramebufferTexture2D glad_glFramebufferTexture2D
-typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURE3DPROC)(GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint zoffset);
-GLAPI PFNGLFRAMEBUFFERTEXTURE3DPROC glad_glFramebufferTexture3D;
-#define glFramebufferTexture3D glad_glFramebufferTexture3D
-typedef void (APIENTRYP PFNGLFRAMEBUFFERRENDERBUFFERPROC)(GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer);
-GLAPI PFNGLFRAMEBUFFERRENDERBUFFERPROC glad_glFramebufferRenderbuffer;
-#define glFramebufferRenderbuffer glad_glFramebufferRenderbuffer
-typedef void (APIENTRYP PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVPROC)(GLenum target, GLenum attachment, GLenum pname, GLint *params);
-GLAPI PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVPROC glad_glGetFramebufferAttachmentParameteriv;
-#define glGetFramebufferAttachmentParameteriv glad_glGetFramebufferAttachmentParameteriv
-typedef void (APIENTRYP PFNGLGENERATEMIPMAPPROC)(GLenum target);
-GLAPI PFNGLGENERATEMIPMAPPROC glad_glGenerateMipmap;
-#define glGenerateMipmap glad_glGenerateMipmap
-typedef void (APIENTRYP PFNGLBLITFRAMEBUFFERPROC)(GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter);
-GLAPI PFNGLBLITFRAMEBUFFERPROC glad_glBlitFramebuffer;
-#define glBlitFramebuffer glad_glBlitFramebuffer
-typedef void (APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEPROC)(GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height);
-GLAPI PFNGLRENDERBUFFERSTORAGEMULTISAMPLEPROC glad_glRenderbufferStorageMultisample;
-#define glRenderbufferStorageMultisample glad_glRenderbufferStorageMultisample
-typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURELAYERPROC)(GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer);
-GLAPI PFNGLFRAMEBUFFERTEXTURELAYERPROC glad_glFramebufferTextureLayer;
-#define glFramebufferTextureLayer glad_glFramebufferTextureLayer
-typedef void * (APIENTRYP PFNGLMAPBUFFERRANGEPROC)(GLenum target, GLintptr offset, GLsizeiptr length, GLbitfield access);
-GLAPI PFNGLMAPBUFFERRANGEPROC glad_glMapBufferRange;
-#define glMapBufferRange glad_glMapBufferRange
-typedef void (APIENTRYP PFNGLFLUSHMAPPEDBUFFERRANGEPROC)(GLenum target, GLintptr offset, GLsizeiptr length);
-GLAPI PFNGLFLUSHMAPPEDBUFFERRANGEPROC glad_glFlushMappedBufferRange;
-#define glFlushMappedBufferRange glad_glFlushMappedBufferRange
-typedef void (APIENTRYP PFNGLBINDVERTEXARRAYPROC)(GLuint array);
-GLAPI PFNGLBINDVERTEXARRAYPROC glad_glBindVertexArray;
-#define glBindVertexArray glad_glBindVertexArray
-typedef void (APIENTRYP PFNGLDELETEVERTEXARRAYSPROC)(GLsizei n, const GLuint *arrays);
-GLAPI PFNGLDELETEVERTEXARRAYSPROC glad_glDeleteVertexArrays;
-#define glDeleteVertexArrays glad_glDeleteVertexArrays
-typedef void (APIENTRYP PFNGLGENVERTEXARRAYSPROC)(GLsizei n, GLuint *arrays);
-GLAPI PFNGLGENVERTEXARRAYSPROC glad_glGenVertexArrays;
-#define glGenVertexArrays glad_glGenVertexArrays
-typedef GLboolean (APIENTRYP PFNGLISVERTEXARRAYPROC)(GLuint array);
-GLAPI PFNGLISVERTEXARRAYPROC glad_glIsVertexArray;
-#define glIsVertexArray glad_glIsVertexArray
-#endif
-#ifndef GL_VERSION_3_1
-#define GL_VERSION_3_1 1
-GLAPI int GLAD_GL_VERSION_3_1;
-typedef void (APIENTRYP PFNGLDRAWARRAYSINSTANCEDPROC)(GLenum mode, GLint first, GLsizei count, GLsizei instancecount);
-GLAPI PFNGLDRAWARRAYSINSTANCEDPROC glad_glDrawArraysInstanced;
-#define glDrawArraysInstanced glad_glDrawArraysInstanced
-typedef void (APIENTRYP PFNGLDRAWELEMENTSINSTANCEDPROC)(GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount);
-GLAPI PFNGLDRAWELEMENTSINSTANCEDPROC glad_glDrawElementsInstanced;
-#define glDrawElementsInstanced glad_glDrawElementsInstanced
-typedef void (APIENTRYP PFNGLTEXBUFFERPROC)(GLenum target, GLenum internalformat, GLuint buffer);
-GLAPI PFNGLTEXBUFFERPROC glad_glTexBuffer;
-#define glTexBuffer glad_glTexBuffer
-typedef void (APIENTRYP PFNGLPRIMITIVERESTARTINDEXPROC)(GLuint index);
-GLAPI PFNGLPRIMITIVERESTARTINDEXPROC glad_glPrimitiveRestartIndex;
-#define glPrimitiveRestartIndex glad_glPrimitiveRestartIndex
-typedef void (APIENTRYP PFNGLCOPYBUFFERSUBDATAPROC)(GLenum readTarget, GLenum writeTarget, GLintptr readOffset, GLintptr writeOffset, GLsizeiptr size);
-GLAPI PFNGLCOPYBUFFERSUBDATAPROC glad_glCopyBufferSubData;
-#define glCopyBufferSubData glad_glCopyBufferSubData
-typedef void (APIENTRYP PFNGLGETUNIFORMINDICESPROC)(GLuint program, GLsizei uniformCount, const GLchar *const*uniformNames, GLuint *uniformIndices);
-GLAPI PFNGLGETUNIFORMINDICESPROC glad_glGetUniformIndices;
-#define glGetUniformIndices glad_glGetUniformIndices
-typedef void (APIENTRYP PFNGLGETACTIVEUNIFORMSIVPROC)(GLuint program, GLsizei uniformCount, const GLuint *uniformIndices, GLenum pname, GLint *params);
-GLAPI PFNGLGETACTIVEUNIFORMSIVPROC glad_glGetActiveUniformsiv;
-#define glGetActiveUniformsiv glad_glGetActiveUniformsiv
-typedef void (APIENTRYP PFNGLGETACTIVEUNIFORMNAMEPROC)(GLuint program, GLuint uniformIndex, GLsizei bufSize, GLsizei *length, GLchar *uniformName);
-GLAPI PFNGLGETACTIVEUNIFORMNAMEPROC glad_glGetActiveUniformName;
-#define glGetActiveUniformName glad_glGetActiveUniformName
-typedef GLuint (APIENTRYP PFNGLGETUNIFORMBLOCKINDEXPROC)(GLuint program, const GLchar *uniformBlockName);
-GLAPI PFNGLGETUNIFORMBLOCKINDEXPROC glad_glGetUniformBlockIndex;
-#define glGetUniformBlockIndex glad_glGetUniformBlockIndex
-typedef void (APIENTRYP PFNGLGETACTIVEUNIFORMBLOCKIVPROC)(GLuint program, GLuint uniformBlockIndex, GLenum pname, GLint *params);
-GLAPI PFNGLGETACTIVEUNIFORMBLOCKIVPROC glad_glGetActiveUniformBlockiv;
-#define glGetActiveUniformBlockiv glad_glGetActiveUniformBlockiv
-typedef void (APIENTRYP PFNGLGETACTIVEUNIFORMBLOCKNAMEPROC)(GLuint program, GLuint uniformBlockIndex, GLsizei bufSize, GLsizei *length, GLchar *uniformBlockName);
-GLAPI PFNGLGETACTIVEUNIFORMBLOCKNAMEPROC glad_glGetActiveUniformBlockName;
-#define glGetActiveUniformBlockName glad_glGetActiveUniformBlockName
-typedef void (APIENTRYP PFNGLUNIFORMBLOCKBINDINGPROC)(GLuint program, GLuint uniformBlockIndex, GLuint uniformBlockBinding);
-GLAPI PFNGLUNIFORMBLOCKBINDINGPROC glad_glUniformBlockBinding;
-#define glUniformBlockBinding glad_glUniformBlockBinding
-#endif
-#ifndef GL_VERSION_3_2
-#define GL_VERSION_3_2 1
-GLAPI int GLAD_GL_VERSION_3_2;
-typedef void (APIENTRYP PFNGLDRAWELEMENTSBASEVERTEXPROC)(GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex);
-GLAPI PFNGLDRAWELEMENTSBASEVERTEXPROC glad_glDrawElementsBaseVertex;
-#define glDrawElementsBaseVertex glad_glDrawElementsBaseVertex
-typedef void (APIENTRYP PFNGLDRAWRANGEELEMENTSBASEVERTEXPROC)(GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex);
-GLAPI PFNGLDRAWRANGEELEMENTSBASEVERTEXPROC glad_glDrawRangeElementsBaseVertex;
-#define glDrawRangeElementsBaseVertex glad_glDrawRangeElementsBaseVertex
-typedef void (APIENTRYP PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXPROC)(GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei instancecount, GLint basevertex);
-GLAPI PFNGLDRAWELEMENTSINSTANCEDBASEVERTEXPROC glad_glDrawElementsInstancedBaseVertex;
-#define glDrawElementsInstancedBaseVertex glad_glDrawElementsInstancedBaseVertex
-typedef void (APIENTRYP PFNGLMULTIDRAWELEMENTSBASEVERTEXPROC)(GLenum mode, const GLsizei *count, GLenum type, const void *const*indices, GLsizei drawcount, const GLint *basevertex);
-GLAPI PFNGLMULTIDRAWELEMENTSBASEVERTEXPROC glad_glMultiDrawElementsBaseVertex;
-#define glMultiDrawElementsBaseVertex glad_glMultiDrawElementsBaseVertex
-typedef void (APIENTRYP PFNGLPROVOKINGVERTEXPROC)(GLenum mode);
-GLAPI PFNGLPROVOKINGVERTEXPROC glad_glProvokingVertex;
-#define glProvokingVertex glad_glProvokingVertex
-typedef GLsync (APIENTRYP PFNGLFENCESYNCPROC)(GLenum condition, GLbitfield flags);
-GLAPI PFNGLFENCESYNCPROC glad_glFenceSync;
-#define glFenceSync glad_glFenceSync
-typedef GLboolean (APIENTRYP PFNGLISSYNCPROC)(GLsync sync);
-GLAPI PFNGLISSYNCPROC glad_glIsSync;
-#define glIsSync glad_glIsSync
-typedef void (APIENTRYP PFNGLDELETESYNCPROC)(GLsync sync);
-GLAPI PFNGLDELETESYNCPROC glad_glDeleteSync;
-#define glDeleteSync glad_glDeleteSync
-typedef GLenum (APIENTRYP PFNGLCLIENTWAITSYNCPROC)(GLsync sync, GLbitfield flags, GLuint64 timeout);
-GLAPI PFNGLCLIENTWAITSYNCPROC glad_glClientWaitSync;
-#define glClientWaitSync glad_glClientWaitSync
-typedef void (APIENTRYP PFNGLWAITSYNCPROC)(GLsync sync, GLbitfield flags, GLuint64 timeout);
-GLAPI PFNGLWAITSYNCPROC glad_glWaitSync;
-#define glWaitSync glad_glWaitSync
-typedef void (APIENTRYP PFNGLGETINTEGER64VPROC)(GLenum pname, GLint64 *data);
-GLAPI PFNGLGETINTEGER64VPROC glad_glGetInteger64v;
-#define glGetInteger64v glad_glGetInteger64v
-typedef void (APIENTRYP PFNGLGETSYNCIVPROC)(GLsync sync, GLenum pname, GLsizei count, GLsizei *length, GLint *values);
-GLAPI PFNGLGETSYNCIVPROC glad_glGetSynciv;
-#define glGetSynciv glad_glGetSynciv
-typedef void (APIENTRYP PFNGLGETINTEGER64I_VPROC)(GLenum target, GLuint index, GLint64 *data);
-GLAPI PFNGLGETINTEGER64I_VPROC glad_glGetInteger64i_v;
-#define glGetInteger64i_v glad_glGetInteger64i_v
-typedef void (APIENTRYP PFNGLGETBUFFERPARAMETERI64VPROC)(GLenum target, GLenum pname, GLint64 *params);
-GLAPI PFNGLGETBUFFERPARAMETERI64VPROC glad_glGetBufferParameteri64v;
-#define glGetBufferParameteri64v glad_glGetBufferParameteri64v
-typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTUREPROC)(GLenum target, GLenum attachment, GLuint texture, GLint level);
-GLAPI PFNGLFRAMEBUFFERTEXTUREPROC glad_glFramebufferTexture;
-#define glFramebufferTexture glad_glFramebufferTexture
-typedef void (APIENTRYP PFNGLTEXIMAGE2DMULTISAMPLEPROC)(GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations);
-GLAPI PFNGLTEXIMAGE2DMULTISAMPLEPROC glad_glTexImage2DMultisample;
-#define glTexImage2DMultisample glad_glTexImage2DMultisample
-typedef void (APIENTRYP PFNGLTEXIMAGE3DMULTISAMPLEPROC)(GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations);
-GLAPI PFNGLTEXIMAGE3DMULTISAMPLEPROC glad_glTexImage3DMultisample;
-#define glTexImage3DMultisample glad_glTexImage3DMultisample
-typedef void (APIENTRYP PFNGLGETMULTISAMPLEFVPROC)(GLenum pname, GLuint index, GLfloat *val);
-GLAPI PFNGLGETMULTISAMPLEFVPROC glad_glGetMultisamplefv;
-#define glGetMultisamplefv glad_glGetMultisamplefv
-typedef void (APIENTRYP PFNGLSAMPLEMASKIPROC)(GLuint maskNumber, GLbitfield mask);
-GLAPI PFNGLSAMPLEMASKIPROC glad_glSampleMaski;
-#define glSampleMaski glad_glSampleMaski
-#endif
-#ifndef GL_VERSION_3_3
-#define GL_VERSION_3_3 1
-GLAPI int GLAD_GL_VERSION_3_3;
-typedef void (APIENTRYP PFNGLBINDFRAGDATALOCATIONINDEXEDPROC)(GLuint program, GLuint colorNumber, GLuint index, const GLchar *name);
-GLAPI PFNGLBINDFRAGDATALOCATIONINDEXEDPROC glad_glBindFragDataLocationIndexed;
-#define glBindFragDataLocationIndexed glad_glBindFragDataLocationIndexed
-typedef GLint (APIENTRYP PFNGLGETFRAGDATAINDEXPROC)(GLuint program, const GLchar *name);
-GLAPI PFNGLGETFRAGDATAINDEXPROC glad_glGetFragDataIndex;
-#define glGetFragDataIndex glad_glGetFragDataIndex
-typedef void (APIENTRYP PFNGLGENSAMPLERSPROC)(GLsizei count, GLuint *samplers);
-GLAPI PFNGLGENSAMPLERSPROC glad_glGenSamplers;
-#define glGenSamplers glad_glGenSamplers
-typedef void (APIENTRYP PFNGLDELETESAMPLERSPROC)(GLsizei count, const GLuint *samplers);
-GLAPI PFNGLDELETESAMPLERSPROC glad_glDeleteSamplers;
-#define glDeleteSamplers glad_glDeleteSamplers
-typedef GLboolean (APIENTRYP PFNGLISSAMPLERPROC)(GLuint sampler);
-GLAPI PFNGLISSAMPLERPROC glad_glIsSampler;
-#define glIsSampler glad_glIsSampler
-typedef void (APIENTRYP PFNGLBINDSAMPLERPROC)(GLuint unit, GLuint sampler);
-GLAPI PFNGLBINDSAMPLERPROC glad_glBindSampler;
-#define glBindSampler glad_glBindSampler
-typedef void (APIENTRYP PFNGLSAMPLERPARAMETERIPROC)(GLuint sampler, GLenum pname, GLint param);
-GLAPI PFNGLSAMPLERPARAMETERIPROC glad_glSamplerParameteri;
-#define glSamplerParameteri glad_glSamplerParameteri
-typedef void (APIENTRYP PFNGLSAMPLERPARAMETERIVPROC)(GLuint sampler, GLenum pname, const GLint *param);
-GLAPI PFNGLSAMPLERPARAMETERIVPROC glad_glSamplerParameteriv;
-#define glSamplerParameteriv glad_glSamplerParameteriv
-typedef void (APIENTRYP PFNGLSAMPLERPARAMETERFPROC)(GLuint sampler, GLenum pname, GLfloat param);
-GLAPI PFNGLSAMPLERPARAMETERFPROC glad_glSamplerParameterf;
-#define glSamplerParameterf glad_glSamplerParameterf
-typedef void (APIENTRYP PFNGLSAMPLERPARAMETERFVPROC)(GLuint sampler, GLenum pname, const GLfloat *param);
-GLAPI PFNGLSAMPLERPARAMETERFVPROC glad_glSamplerParameterfv;
-#define glSamplerParameterfv glad_glSamplerParameterfv
-typedef void (APIENTRYP PFNGLSAMPLERPARAMETERIIVPROC)(GLuint sampler, GLenum pname, const GLint *param);
-GLAPI PFNGLSAMPLERPARAMETERIIVPROC glad_glSamplerParameterIiv;
-#define glSamplerParameterIiv glad_glSamplerParameterIiv
-typedef void (APIENTRYP PFNGLSAMPLERPARAMETERIUIVPROC)(GLuint sampler, GLenum pname, const GLuint *param);
-GLAPI PFNGLSAMPLERPARAMETERIUIVPROC glad_glSamplerParameterIuiv;
-#define glSamplerParameterIuiv glad_glSamplerParameterIuiv
-typedef void (APIENTRYP PFNGLGETSAMPLERPARAMETERIVPROC)(GLuint sampler, GLenum pname, GLint *params);
-GLAPI PFNGLGETSAMPLERPARAMETERIVPROC glad_glGetSamplerParameteriv;
-#define glGetSamplerParameteriv glad_glGetSamplerParameteriv
-typedef void (APIENTRYP PFNGLGETSAMPLERPARAMETERIIVPROC)(GLuint sampler, GLenum pname, GLint *params);
-GLAPI PFNGLGETSAMPLERPARAMETERIIVPROC glad_glGetSamplerParameterIiv;
-#define glGetSamplerParameterIiv glad_glGetSamplerParameterIiv
-typedef void (APIENTRYP PFNGLGETSAMPLERPARAMETERFVPROC)(GLuint sampler, GLenum pname, GLfloat *params);
-GLAPI PFNGLGETSAMPLERPARAMETERFVPROC glad_glGetSamplerParameterfv;
-#define glGetSamplerParameterfv glad_glGetSamplerParameterfv
-typedef void (APIENTRYP PFNGLGETSAMPLERPARAMETERIUIVPROC)(GLuint sampler, GLenum pname, GLuint *params);
-GLAPI PFNGLGETSAMPLERPARAMETERIUIVPROC glad_glGetSamplerParameterIuiv;
-#define glGetSamplerParameterIuiv glad_glGetSamplerParameterIuiv
-typedef void (APIENTRYP PFNGLQUERYCOUNTERPROC)(GLuint id, GLenum target);
-GLAPI PFNGLQUERYCOUNTERPROC glad_glQueryCounter;
-#define glQueryCounter glad_glQueryCounter
-typedef void (APIENTRYP PFNGLGETQUERYOBJECTI64VPROC)(GLuint id, GLenum pname, GLint64 *params);
-GLAPI PFNGLGETQUERYOBJECTI64VPROC glad_glGetQueryObjecti64v;
-#define glGetQueryObjecti64v glad_glGetQueryObjecti64v
-typedef void (APIENTRYP PFNGLGETQUERYOBJECTUI64VPROC)(GLuint id, GLenum pname, GLuint64 *params);
-GLAPI PFNGLGETQUERYOBJECTUI64VPROC glad_glGetQueryObjectui64v;
-#define glGetQueryObjectui64v glad_glGetQueryObjectui64v
-typedef void (APIENTRYP PFNGLVERTEXATTRIBDIVISORPROC)(GLuint index, GLuint divisor);
-GLAPI PFNGLVERTEXATTRIBDIVISORPROC glad_glVertexAttribDivisor;
-#define glVertexAttribDivisor glad_glVertexAttribDivisor
-typedef void (APIENTRYP PFNGLVERTEXATTRIBP1UIPROC)(GLuint index, GLenum type, GLboolean normalized, GLuint value);
-GLAPI PFNGLVERTEXATTRIBP1UIPROC glad_glVertexAttribP1ui;
-#define glVertexAttribP1ui glad_glVertexAttribP1ui
-typedef void (APIENTRYP PFNGLVERTEXATTRIBP1UIVPROC)(GLuint index, GLenum type, GLboolean normalized, const GLuint *value);
-GLAPI PFNGLVERTEXATTRIBP1UIVPROC glad_glVertexAttribP1uiv;
-#define glVertexAttribP1uiv glad_glVertexAttribP1uiv
-typedef void (APIENTRYP PFNGLVERTEXATTRIBP2UIPROC)(GLuint index, GLenum type, GLboolean normalized, GLuint value);
-GLAPI PFNGLVERTEXATTRIBP2UIPROC glad_glVertexAttribP2ui;
-#define glVertexAttribP2ui glad_glVertexAttribP2ui
-typedef void (APIENTRYP PFNGLVERTEXATTRIBP2UIVPROC)(GLuint index, GLenum type, GLboolean normalized, const GLuint *value);
-GLAPI PFNGLVERTEXATTRIBP2UIVPROC glad_glVertexAttribP2uiv;
-#define glVertexAttribP2uiv glad_glVertexAttribP2uiv
-typedef void (APIENTRYP PFNGLVERTEXATTRIBP3UIPROC)(GLuint index, GLenum type, GLboolean normalized, GLuint value);
-GLAPI PFNGLVERTEXATTRIBP3UIPROC glad_glVertexAttribP3ui;
-#define glVertexAttribP3ui glad_glVertexAttribP3ui
-typedef void (APIENTRYP PFNGLVERTEXATTRIBP3UIVPROC)(GLuint index, GLenum type, GLboolean normalized, const GLuint *value);
-GLAPI PFNGLVERTEXATTRIBP3UIVPROC glad_glVertexAttribP3uiv;
-#define glVertexAttribP3uiv glad_glVertexAttribP3uiv
-typedef void (APIENTRYP PFNGLVERTEXATTRIBP4UIPROC)(GLuint index, GLenum type, GLboolean normalized, GLuint value);
-GLAPI PFNGLVERTEXATTRIBP4UIPROC glad_glVertexAttribP4ui;
-#define glVertexAttribP4ui glad_glVertexAttribP4ui
-typedef void (APIENTRYP PFNGLVERTEXATTRIBP4UIVPROC)(GLuint index, GLenum type, GLboolean normalized, const GLuint *value);
-GLAPI PFNGLVERTEXATTRIBP4UIVPROC glad_glVertexAttribP4uiv;
-#define glVertexAttribP4uiv glad_glVertexAttribP4uiv
-typedef void (APIENTRYP PFNGLVERTEXP2UIPROC)(GLenum type, GLuint value);
-GLAPI PFNGLVERTEXP2UIPROC glad_glVertexP2ui;
-#define glVertexP2ui glad_glVertexP2ui
-typedef void (APIENTRYP PFNGLVERTEXP2UIVPROC)(GLenum type, const GLuint *value);
-GLAPI PFNGLVERTEXP2UIVPROC glad_glVertexP2uiv;
-#define glVertexP2uiv glad_glVertexP2uiv
-typedef void (APIENTRYP PFNGLVERTEXP3UIPROC)(GLenum type, GLuint value);
-GLAPI PFNGLVERTEXP3UIPROC glad_glVertexP3ui;
-#define glVertexP3ui glad_glVertexP3ui
-typedef void (APIENTRYP PFNGLVERTEXP3UIVPROC)(GLenum type, const GLuint *value);
-GLAPI PFNGLVERTEXP3UIVPROC glad_glVertexP3uiv;
-#define glVertexP3uiv glad_glVertexP3uiv
-typedef void (APIENTRYP PFNGLVERTEXP4UIPROC)(GLenum type, GLuint value);
-GLAPI PFNGLVERTEXP4UIPROC glad_glVertexP4ui;
-#define glVertexP4ui glad_glVertexP4ui
-typedef void (APIENTRYP PFNGLVERTEXP4UIVPROC)(GLenum type, const GLuint *value);
-GLAPI PFNGLVERTEXP4UIVPROC glad_glVertexP4uiv;
-#define glVertexP4uiv glad_glVertexP4uiv
-typedef void (APIENTRYP PFNGLTEXCOORDP1UIPROC)(GLenum type, GLuint coords);
-GLAPI PFNGLTEXCOORDP1UIPROC glad_glTexCoordP1ui;
-#define glTexCoordP1ui glad_glTexCoordP1ui
-typedef void (APIENTRYP PFNGLTEXCOORDP1UIVPROC)(GLenum type, const GLuint *coords);
-GLAPI PFNGLTEXCOORDP1UIVPROC glad_glTexCoordP1uiv;
-#define glTexCoordP1uiv glad_glTexCoordP1uiv
-typedef void (APIENTRYP PFNGLTEXCOORDP2UIPROC)(GLenum type, GLuint coords);
-GLAPI PFNGLTEXCOORDP2UIPROC glad_glTexCoordP2ui;
-#define glTexCoordP2ui glad_glTexCoordP2ui
-typedef void (APIENTRYP PFNGLTEXCOORDP2UIVPROC)(GLenum type, const GLuint *coords);
-GLAPI PFNGLTEXCOORDP2UIVPROC glad_glTexCoordP2uiv;
-#define glTexCoordP2uiv glad_glTexCoordP2uiv
-typedef void (APIENTRYP PFNGLTEXCOORDP3UIPROC)(GLenum type, GLuint coords);
-GLAPI PFNGLTEXCOORDP3UIPROC glad_glTexCoordP3ui;
-#define glTexCoordP3ui glad_glTexCoordP3ui
-typedef void (APIENTRYP PFNGLTEXCOORDP3UIVPROC)(GLenum type, const GLuint *coords);
-GLAPI PFNGLTEXCOORDP3UIVPROC glad_glTexCoordP3uiv;
-#define glTexCoordP3uiv glad_glTexCoordP3uiv
-typedef void (APIENTRYP PFNGLTEXCOORDP4UIPROC)(GLenum type, GLuint coords);
-GLAPI PFNGLTEXCOORDP4UIPROC glad_glTexCoordP4ui;
-#define glTexCoordP4ui glad_glTexCoordP4ui
-typedef void (APIENTRYP PFNGLTEXCOORDP4UIVPROC)(GLenum type, const GLuint *coords);
-GLAPI PFNGLTEXCOORDP4UIVPROC glad_glTexCoordP4uiv;
-#define glTexCoordP4uiv glad_glTexCoordP4uiv
-typedef void (APIENTRYP PFNGLMULTITEXCOORDP1UIPROC)(GLenum texture, GLenum type, GLuint coords);
-GLAPI PFNGLMULTITEXCOORDP1UIPROC glad_glMultiTexCoordP1ui;
-#define glMultiTexCoordP1ui glad_glMultiTexCoordP1ui
-typedef void (APIENTRYP PFNGLMULTITEXCOORDP1UIVPROC)(GLenum texture, GLenum type, const GLuint *coords);
-GLAPI PFNGLMULTITEXCOORDP1UIVPROC glad_glMultiTexCoordP1uiv;
-#define glMultiTexCoordP1uiv glad_glMultiTexCoordP1uiv
-typedef void (APIENTRYP PFNGLMULTITEXCOORDP2UIPROC)(GLenum texture, GLenum type, GLuint coords);
-GLAPI PFNGLMULTITEXCOORDP2UIPROC glad_glMultiTexCoordP2ui;
-#define glMultiTexCoordP2ui glad_glMultiTexCoordP2ui
-typedef void (APIENTRYP PFNGLMULTITEXCOORDP2UIVPROC)(GLenum texture, GLenum type, const GLuint *coords);
-GLAPI PFNGLMULTITEXCOORDP2UIVPROC glad_glMultiTexCoordP2uiv;
-#define glMultiTexCoordP2uiv glad_glMultiTexCoordP2uiv
-typedef void (APIENTRYP PFNGLMULTITEXCOORDP3UIPROC)(GLenum texture, GLenum type, GLuint coords);
-GLAPI PFNGLMULTITEXCOORDP3UIPROC glad_glMultiTexCoordP3ui;
-#define glMultiTexCoordP3ui glad_glMultiTexCoordP3ui
-typedef void (APIENTRYP PFNGLMULTITEXCOORDP3UIVPROC)(GLenum texture, GLenum type, const GLuint *coords);
-GLAPI PFNGLMULTITEXCOORDP3UIVPROC glad_glMultiTexCoordP3uiv;
-#define glMultiTexCoordP3uiv glad_glMultiTexCoordP3uiv
-typedef void (APIENTRYP PFNGLMULTITEXCOORDP4UIPROC)(GLenum texture, GLenum type, GLuint coords);
-GLAPI PFNGLMULTITEXCOORDP4UIPROC glad_glMultiTexCoordP4ui;
-#define glMultiTexCoordP4ui glad_glMultiTexCoordP4ui
-typedef void (APIENTRYP PFNGLMULTITEXCOORDP4UIVPROC)(GLenum texture, GLenum type, const GLuint *coords);
-GLAPI PFNGLMULTITEXCOORDP4UIVPROC glad_glMultiTexCoordP4uiv;
-#define glMultiTexCoordP4uiv glad_glMultiTexCoordP4uiv
-typedef void (APIENTRYP PFNGLNORMALP3UIPROC)(GLenum type, GLuint coords);
-GLAPI PFNGLNORMALP3UIPROC glad_glNormalP3ui;
-#define glNormalP3ui glad_glNormalP3ui
-typedef void (APIENTRYP PFNGLNORMALP3UIVPROC)(GLenum type, const GLuint *coords);
-GLAPI PFNGLNORMALP3UIVPROC glad_glNormalP3uiv;
-#define glNormalP3uiv glad_glNormalP3uiv
-typedef void (APIENTRYP PFNGLCOLORP3UIPROC)(GLenum type, GLuint color);
-GLAPI PFNGLCOLORP3UIPROC glad_glColorP3ui;
-#define glColorP3ui glad_glColorP3ui
-typedef void (APIENTRYP PFNGLCOLORP3UIVPROC)(GLenum type, const GLuint *color);
-GLAPI PFNGLCOLORP3UIVPROC glad_glColorP3uiv;
-#define glColorP3uiv glad_glColorP3uiv
-typedef void (APIENTRYP PFNGLCOLORP4UIPROC)(GLenum type, GLuint color);
-GLAPI PFNGLCOLORP4UIPROC glad_glColorP4ui;
-#define glColorP4ui glad_glColorP4ui
-typedef void (APIENTRYP PFNGLCOLORP4UIVPROC)(GLenum type, const GLuint *color);
-GLAPI PFNGLCOLORP4UIVPROC glad_glColorP4uiv;
-#define glColorP4uiv glad_glColorP4uiv
-typedef void (APIENTRYP PFNGLSECONDARYCOLORP3UIPROC)(GLenum type, GLuint color);
-GLAPI PFNGLSECONDARYCOLORP3UIPROC glad_glSecondaryColorP3ui;
-#define glSecondaryColorP3ui glad_glSecondaryColorP3ui
-typedef void (APIENTRYP PFNGLSECONDARYCOLORP3UIVPROC)(GLenum type, const GLuint *color);
-GLAPI PFNGLSECONDARYCOLORP3UIVPROC glad_glSecondaryColorP3uiv;
-#define glSecondaryColorP3uiv glad_glSecondaryColorP3uiv
-#endif
-#define GL_DEBUG_OUTPUT_SYNCHRONOUS_ARB 0x8242
-#define GL_DEBUG_NEXT_LOGGED_MESSAGE_LENGTH_ARB 0x8243
-#define GL_DEBUG_CALLBACK_FUNCTION_ARB 0x8244
-#define GL_DEBUG_CALLBACK_USER_PARAM_ARB 0x8245
-#define GL_DEBUG_SOURCE_API_ARB 0x8246
-#define GL_DEBUG_SOURCE_WINDOW_SYSTEM_ARB 0x8247
-#define GL_DEBUG_SOURCE_SHADER_COMPILER_ARB 0x8248
-#define GL_DEBUG_SOURCE_THIRD_PARTY_ARB 0x8249
-#define GL_DEBUG_SOURCE_APPLICATION_ARB 0x824A
-#define GL_DEBUG_SOURCE_OTHER_ARB 0x824B
-#define GL_DEBUG_TYPE_ERROR_ARB 0x824C
-#define GL_DEBUG_TYPE_DEPRECATED_BEHAVIOR_ARB 0x824D
-#define GL_DEBUG_TYPE_UNDEFINED_BEHAVIOR_ARB 0x824E
-#define GL_DEBUG_TYPE_PORTABILITY_ARB 0x824F
-#define GL_DEBUG_TYPE_PERFORMANCE_ARB 0x8250
-#define GL_DEBUG_TYPE_OTHER_ARB 0x8251
-#define GL_MAX_DEBUG_MESSAGE_LENGTH_ARB 0x9143
-#define GL_MAX_DEBUG_LOGGED_MESSAGES_ARB 0x9144
-#define GL_DEBUG_LOGGED_MESSAGES_ARB 0x9145
-#define GL_DEBUG_SEVERITY_HIGH_ARB 0x9146
-#define GL_DEBUG_SEVERITY_MEDIUM_ARB 0x9147
-#define GL_DEBUG_SEVERITY_LOW_ARB 0x9148
-#define GL_READ_FRAMEBUFFER_EXT 0x8CA8
-#define GL_DRAW_FRAMEBUFFER_EXT 0x8CA9
-#define GL_DRAW_FRAMEBUFFER_BINDING_EXT 0x8CA6
-#define GL_READ_FRAMEBUFFER_BINDING_EXT 0x8CAA
-#define GL_RENDERBUFFER_SAMPLES_EXT 0x8CAB
-#define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_EXT 0x8D56
-#define GL_MAX_SAMPLES_EXT 0x8D57
-#define GL_INVALID_FRAMEBUFFER_OPERATION_EXT 0x0506
-#define GL_MAX_RENDERBUFFER_SIZE_EXT 0x84E8
-#define GL_FRAMEBUFFER_BINDING_EXT 0x8CA6
-#define GL_RENDERBUFFER_BINDING_EXT 0x8CA7
-#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE_EXT 0x8CD0
-#define GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME_EXT 0x8CD1
-#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL_EXT 0x8CD2
-#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE_EXT 0x8CD3
-#define GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_3D_ZOFFSET_EXT 0x8CD4
-#define GL_FRAMEBUFFER_COMPLETE_EXT 0x8CD5
-#define GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT_EXT 0x8CD6
-#define GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT_EXT 0x8CD7
-#define GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS_EXT 0x8CD9
-#define GL_FRAMEBUFFER_INCOMPLETE_FORMATS_EXT 0x8CDA
-#define GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER_EXT 0x8CDB
-#define GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER_EXT 0x8CDC
-#define GL_FRAMEBUFFER_UNSUPPORTED_EXT 0x8CDD
-#define GL_MAX_COLOR_ATTACHMENTS_EXT 0x8CDF
-#define GL_COLOR_ATTACHMENT0_EXT 0x8CE0
-#define GL_COLOR_ATTACHMENT1_EXT 0x8CE1
-#define GL_COLOR_ATTACHMENT2_EXT 0x8CE2
-#define GL_COLOR_ATTACHMENT3_EXT 0x8CE3
-#define GL_COLOR_ATTACHMENT4_EXT 0x8CE4
-#define GL_COLOR_ATTACHMENT5_EXT 0x8CE5
-#define GL_COLOR_ATTACHMENT6_EXT 0x8CE6
-#define GL_COLOR_ATTACHMENT7_EXT 0x8CE7
-#define GL_COLOR_ATTACHMENT8_EXT 0x8CE8
-#define GL_COLOR_ATTACHMENT9_EXT 0x8CE9
-#define GL_COLOR_ATTACHMENT10_EXT 0x8CEA
-#define GL_COLOR_ATTACHMENT11_EXT 0x8CEB
-#define GL_COLOR_ATTACHMENT12_EXT 0x8CEC
-#define GL_COLOR_ATTACHMENT13_EXT 0x8CED
-#define GL_COLOR_ATTACHMENT14_EXT 0x8CEE
-#define GL_COLOR_ATTACHMENT15_EXT 0x8CEF
-#define GL_DEPTH_ATTACHMENT_EXT 0x8D00
-#define GL_STENCIL_ATTACHMENT_EXT 0x8D20
-#define GL_FRAMEBUFFER_EXT 0x8D40
-#define GL_RENDERBUFFER_EXT 0x8D41
-#define GL_RENDERBUFFER_WIDTH_EXT 0x8D42
-#define GL_RENDERBUFFER_HEIGHT_EXT 0x8D43
-#define GL_RENDERBUFFER_INTERNAL_FORMAT_EXT 0x8D44
-#define GL_STENCIL_INDEX1_EXT 0x8D46
-#define GL_STENCIL_INDEX4_EXT 0x8D47
-#define GL_STENCIL_INDEX8_EXT 0x8D48
-#define GL_STENCIL_INDEX16_EXT 0x8D49
-#define GL_RENDERBUFFER_RED_SIZE_EXT 0x8D50
-#define GL_RENDERBUFFER_GREEN_SIZE_EXT 0x8D51
-#define GL_RENDERBUFFER_BLUE_SIZE_EXT 0x8D52
-#define GL_RENDERBUFFER_ALPHA_SIZE_EXT 0x8D53
-#define GL_RENDERBUFFER_DEPTH_SIZE_EXT 0x8D54
-#define GL_RENDERBUFFER_STENCIL_SIZE_EXT 0x8D55
-#ifndef GL_ARB_debug_output
-#define GL_ARB_debug_output 1
-GLAPI int GLAD_GL_ARB_debug_output;
-typedef void (APIENTRYP PFNGLDEBUGMESSAGECONTROLARBPROC)(GLenum source, GLenum type, GLenum severity, GLsizei count, const GLuint *ids, GLboolean enabled);
-GLAPI PFNGLDEBUGMESSAGECONTROLARBPROC glad_glDebugMessageControlARB;
-#define glDebugMessageControlARB glad_glDebugMessageControlARB
-typedef void (APIENTRYP PFNGLDEBUGMESSAGEINSERTARBPROC)(GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar *buf);
-GLAPI PFNGLDEBUGMESSAGEINSERTARBPROC glad_glDebugMessageInsertARB;
-#define glDebugMessageInsertARB glad_glDebugMessageInsertARB
-typedef void (APIENTRYP PFNGLDEBUGMESSAGECALLBACKARBPROC)(GLDEBUGPROCARB callback, const void *userParam);
-GLAPI PFNGLDEBUGMESSAGECALLBACKARBPROC glad_glDebugMessageCallbackARB;
-#define glDebugMessageCallbackARB glad_glDebugMessageCallbackARB
-typedef GLuint (APIENTRYP PFNGLGETDEBUGMESSAGELOGARBPROC)(GLuint count, GLsizei bufSize, GLenum *sources, GLenum *types, GLuint *ids, GLenum *severities, GLsizei *lengths, GLchar *messageLog);
-GLAPI PFNGLGETDEBUGMESSAGELOGARBPROC glad_glGetDebugMessageLogARB;
-#define glGetDebugMessageLogARB glad_glGetDebugMessageLogARB
-#endif
-#ifndef GL_ARB_framebuffer_object
-#define GL_ARB_framebuffer_object 1
-GLAPI int GLAD_GL_ARB_framebuffer_object;
-#endif
-#ifndef GL_EXT_framebuffer_blit
-#define GL_EXT_framebuffer_blit 1
-GLAPI int GLAD_GL_EXT_framebuffer_blit;
-typedef void (APIENTRYP PFNGLBLITFRAMEBUFFEREXTPROC)(GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter);
-GLAPI PFNGLBLITFRAMEBUFFEREXTPROC glad_glBlitFramebufferEXT;
-#define glBlitFramebufferEXT glad_glBlitFramebufferEXT
-#endif
-#ifndef GL_EXT_framebuffer_multisample
-#define GL_EXT_framebuffer_multisample 1
-GLAPI int GLAD_GL_EXT_framebuffer_multisample;
-typedef void (APIENTRYP PFNGLRENDERBUFFERSTORAGEMULTISAMPLEEXTPROC)(GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height);
-GLAPI PFNGLRENDERBUFFERSTORAGEMULTISAMPLEEXTPROC glad_glRenderbufferStorageMultisampleEXT;
-#define glRenderbufferStorageMultisampleEXT glad_glRenderbufferStorageMultisampleEXT
-#endif
-#ifndef GL_EXT_framebuffer_object
-#define GL_EXT_framebuffer_object 1
-GLAPI int GLAD_GL_EXT_framebuffer_object;
-typedef GLboolean (APIENTRYP PFNGLISRENDERBUFFEREXTPROC)(GLuint renderbuffer);
-GLAPI PFNGLISRENDERBUFFEREXTPROC glad_glIsRenderbufferEXT;
-#define glIsRenderbufferEXT glad_glIsRenderbufferEXT
-typedef void (APIENTRYP PFNGLBINDRENDERBUFFEREXTPROC)(GLenum target, GLuint renderbuffer);
-GLAPI PFNGLBINDRENDERBUFFEREXTPROC glad_glBindRenderbufferEXT;
-#define glBindRenderbufferEXT glad_glBindRenderbufferEXT
-typedef void (APIENTRYP PFNGLDELETERENDERBUFFERSEXTPROC)(GLsizei n, const GLuint *renderbuffers);
-GLAPI PFNGLDELETERENDERBUFFERSEXTPROC glad_glDeleteRenderbuffersEXT;
-#define glDeleteRenderbuffersEXT glad_glDeleteRenderbuffersEXT
-typedef void (APIENTRYP PFNGLGENRENDERBUFFERSEXTPROC)(GLsizei n, GLuint *renderbuffers);
-GLAPI PFNGLGENRENDERBUFFERSEXTPROC glad_glGenRenderbuffersEXT;
-#define glGenRenderbuffersEXT glad_glGenRenderbuffersEXT
-typedef void (APIENTRYP PFNGLRENDERBUFFERSTORAGEEXTPROC)(GLenum target, GLenum internalformat, GLsizei width, GLsizei height);
-GLAPI PFNGLRENDERBUFFERSTORAGEEXTPROC glad_glRenderbufferStorageEXT;
-#define glRenderbufferStorageEXT glad_glRenderbufferStorageEXT
-typedef void (APIENTRYP PFNGLGETRENDERBUFFERPARAMETERIVEXTPROC)(GLenum target, GLenum pname, GLint *params);
-GLAPI PFNGLGETRENDERBUFFERPARAMETERIVEXTPROC glad_glGetRenderbufferParameterivEXT;
-#define glGetRenderbufferParameterivEXT glad_glGetRenderbufferParameterivEXT
-typedef GLboolean (APIENTRYP PFNGLISFRAMEBUFFEREXTPROC)(GLuint framebuffer);
-GLAPI PFNGLISFRAMEBUFFEREXTPROC glad_glIsFramebufferEXT;
-#define glIsFramebufferEXT glad_glIsFramebufferEXT
-typedef void (APIENTRYP PFNGLBINDFRAMEBUFFEREXTPROC)(GLenum target, GLuint framebuffer);
-GLAPI PFNGLBINDFRAMEBUFFEREXTPROC glad_glBindFramebufferEXT;
-#define glBindFramebufferEXT glad_glBindFramebufferEXT
-typedef void (APIENTRYP PFNGLDELETEFRAMEBUFFERSEXTPROC)(GLsizei n, const GLuint *framebuffers);
-GLAPI PFNGLDELETEFRAMEBUFFERSEXTPROC glad_glDeleteFramebuffersEXT;
-#define glDeleteFramebuffersEXT glad_glDeleteFramebuffersEXT
-typedef void (APIENTRYP PFNGLGENFRAMEBUFFERSEXTPROC)(GLsizei n, GLuint *framebuffers);
-GLAPI PFNGLGENFRAMEBUFFERSEXTPROC glad_glGenFramebuffersEXT;
-#define glGenFramebuffersEXT glad_glGenFramebuffersEXT
-typedef GLenum (APIENTRYP PFNGLCHECKFRAMEBUFFERSTATUSEXTPROC)(GLenum target);
-GLAPI PFNGLCHECKFRAMEBUFFERSTATUSEXTPROC glad_glCheckFramebufferStatusEXT;
-#define glCheckFramebufferStatusEXT glad_glCheckFramebufferStatusEXT
-typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURE1DEXTPROC)(GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level);
-GLAPI PFNGLFRAMEBUFFERTEXTURE1DEXTPROC glad_glFramebufferTexture1DEXT;
-#define glFramebufferTexture1DEXT glad_glFramebufferTexture1DEXT
-typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURE2DEXTPROC)(GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level);
-GLAPI PFNGLFRAMEBUFFERTEXTURE2DEXTPROC glad_glFramebufferTexture2DEXT;
-#define glFramebufferTexture2DEXT glad_glFramebufferTexture2DEXT
-typedef void (APIENTRYP PFNGLFRAMEBUFFERTEXTURE3DEXTPROC)(GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level, GLint zoffset);
-GLAPI PFNGLFRAMEBUFFERTEXTURE3DEXTPROC glad_glFramebufferTexture3DEXT;
-#define glFramebufferTexture3DEXT glad_glFramebufferTexture3DEXT
-typedef void (APIENTRYP PFNGLFRAMEBUFFERRENDERBUFFEREXTPROC)(GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer);
-GLAPI PFNGLFRAMEBUFFERRENDERBUFFEREXTPROC glad_glFramebufferRenderbufferEXT;
-#define glFramebufferRenderbufferEXT glad_glFramebufferRenderbufferEXT
-typedef void (APIENTRYP PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVEXTPROC)(GLenum target, GLenum attachment, GLenum pname, GLint *params);
-GLAPI PFNGLGETFRAMEBUFFERATTACHMENTPARAMETERIVEXTPROC glad_glGetFramebufferAttachmentParameterivEXT;
-#define glGetFramebufferAttachmentParameterivEXT glad_glGetFramebufferAttachmentParameterivEXT
-typedef void (APIENTRYP PFNGLGENERATEMIPMAPEXTPROC)(GLenum target);
-GLAPI PFNGLGENERATEMIPMAPEXTPROC glad_glGenerateMipmapEXT;
-#define glGenerateMipmapEXT glad_glGenerateMipmapEXT
-#endif
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif
diff --git a/thirdparty/glad/glad/glx.h b/thirdparty/glad/glad/glx.h
new file mode 100644
index 0000000000..ac115fa63b
--- /dev/null
+++ b/thirdparty/glad/glad/glx.h
@@ -0,0 +1,605 @@
+/**
+ * Loader generated by glad 2.0.2 on Tue Nov 15 09:49:49 2022
+ *
+ * SPDX-License-Identifier: (WTFPL OR CC0-1.0) AND Apache-2.0
+ *
+ * Generator: C/C++
+ * Specification: glx
+ * Extensions: 6
+ *
+ * APIs:
+ * - glx=1.4
+ *
+ * Options:
+ * - ALIAS = False
+ * - DEBUG = False
+ * - HEADER_ONLY = False
+ * - LOADER = True
+ * - MX = False
+ * - ON_DEMAND = False
+ *
+ * Commandline:
+ * --api='glx=1.4' --extensions='GLX_ARB_create_context,GLX_ARB_create_context_profile,GLX_ARB_get_proc_address,GLX_EXT_swap_control,GLX_MESA_swap_control,GLX_SGI_swap_control' c --loader
+ *
+ * Online:
+ * http://glad.sh/#api=glx%3D1.4&extensions=GLX_ARB_create_context%2CGLX_ARB_create_context_profile%2CGLX_ARB_get_proc_address%2CGLX_EXT_swap_control%2CGLX_MESA_swap_control%2CGLX_SGI_swap_control&generator=c&options=LOADER
+ *
+ */
+
+#ifndef GLAD_GLX_H_
+#define GLAD_GLX_H_
+
+#ifdef GLX_H
+ #error GLX header already included (API: glx), remove previous include!
+#endif
+#define GLX_H 1
+
+
+#include <X11/X.h>
+#include <X11/Xlib.h>
+#include <X11/Xutil.h>
+
+#include <glad/gl.h>
+
+#define GLAD_GLX
+#define GLAD_OPTION_GLX_LOADER
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef GLAD_PLATFORM_H_
+#define GLAD_PLATFORM_H_
+
+#ifndef GLAD_PLATFORM_WIN32
+ #if defined(_WIN32) || defined(__WIN32__) || defined(WIN32) || defined(__MINGW32__)
+ #define GLAD_PLATFORM_WIN32 1
+ #else
+ #define GLAD_PLATFORM_WIN32 0
+ #endif
+#endif
+
+#ifndef GLAD_PLATFORM_APPLE
+ #ifdef __APPLE__
+ #define GLAD_PLATFORM_APPLE 1
+ #else
+ #define GLAD_PLATFORM_APPLE 0
+ #endif
+#endif
+
+#ifndef GLAD_PLATFORM_EMSCRIPTEN
+ #ifdef __EMSCRIPTEN__
+ #define GLAD_PLATFORM_EMSCRIPTEN 1
+ #else
+ #define GLAD_PLATFORM_EMSCRIPTEN 0
+ #endif
+#endif
+
+#ifndef GLAD_PLATFORM_UWP
+ #if defined(_MSC_VER) && !defined(GLAD_INTERNAL_HAVE_WINAPIFAMILY)
+ #ifdef __has_include
+ #if __has_include(<winapifamily.h>)
+ #define GLAD_INTERNAL_HAVE_WINAPIFAMILY 1
+ #endif
+ #elif _MSC_VER >= 1700 && !_USING_V110_SDK71_
+ #define GLAD_INTERNAL_HAVE_WINAPIFAMILY 1
+ #endif
+ #endif
+
+ #ifdef GLAD_INTERNAL_HAVE_WINAPIFAMILY
+ #include <winapifamily.h>
+ #if !WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) && WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_APP)
+ #define GLAD_PLATFORM_UWP 1
+ #endif
+ #endif
+
+ #ifndef GLAD_PLATFORM_UWP
+ #define GLAD_PLATFORM_UWP 0
+ #endif
+#endif
+
+#ifdef __GNUC__
+ #define GLAD_GNUC_EXTENSION __extension__
+#else
+ #define GLAD_GNUC_EXTENSION
+#endif
+
+#define GLAD_UNUSED(x) (void)(x)
+
+#ifndef GLAD_API_CALL
+ #if defined(GLAD_API_CALL_EXPORT)
+ #if GLAD_PLATFORM_WIN32 || defined(__CYGWIN__)
+ #if defined(GLAD_API_CALL_EXPORT_BUILD)
+ #if defined(__GNUC__)
+ #define GLAD_API_CALL __attribute__ ((dllexport)) extern
+ #else
+ #define GLAD_API_CALL __declspec(dllexport) extern
+ #endif
+ #else
+ #if defined(__GNUC__)
+ #define GLAD_API_CALL __attribute__ ((dllimport)) extern
+ #else
+ #define GLAD_API_CALL __declspec(dllimport) extern
+ #endif
+ #endif
+ #elif defined(__GNUC__) && defined(GLAD_API_CALL_EXPORT_BUILD)
+ #define GLAD_API_CALL __attribute__ ((visibility ("default"))) extern
+ #else
+ #define GLAD_API_CALL extern
+ #endif
+ #else
+ #define GLAD_API_CALL extern
+ #endif
+#endif
+
+#ifdef APIENTRY
+ #define GLAD_API_PTR APIENTRY
+#elif GLAD_PLATFORM_WIN32
+ #define GLAD_API_PTR __stdcall
+#else
+ #define GLAD_API_PTR
+#endif
+
+#ifndef GLAPI
+#define GLAPI GLAD_API_CALL
+#endif
+
+#ifndef GLAPIENTRY
+#define GLAPIENTRY GLAD_API_PTR
+#endif
+
+#define GLAD_MAKE_VERSION(major, minor) (major * 10000 + minor)
+#define GLAD_VERSION_MAJOR(version) (version / 10000)
+#define GLAD_VERSION_MINOR(version) (version % 10000)
+
+#define GLAD_GENERATOR_VERSION "2.0.2"
+
+typedef void (*GLADapiproc)(void);
+
+typedef GLADapiproc (*GLADloadfunc)(const char *name);
+typedef GLADapiproc (*GLADuserptrloadfunc)(void *userptr, const char *name);
+
+typedef void (*GLADprecallback)(const char *name, GLADapiproc apiproc, int len_args, ...);
+typedef void (*GLADpostcallback)(void *ret, const char *name, GLADapiproc apiproc, int len_args, ...);
+
+#endif /* GLAD_PLATFORM_H_ */
+
+#define GLX_ACCUM_ALPHA_SIZE 17
+#define GLX_ACCUM_BLUE_SIZE 16
+#define GLX_ACCUM_BUFFER_BIT 0x00000080
+#define GLX_ACCUM_GREEN_SIZE 15
+#define GLX_ACCUM_RED_SIZE 14
+#define GLX_ALPHA_SIZE 11
+#define GLX_AUX_BUFFERS 7
+#define GLX_AUX_BUFFERS_BIT 0x00000010
+#define GLX_BACK_LEFT_BUFFER_BIT 0x00000004
+#define GLX_BACK_RIGHT_BUFFER_BIT 0x00000008
+#define GLX_BAD_ATTRIBUTE 2
+#define GLX_BAD_CONTEXT 5
+#define GLX_BAD_ENUM 7
+#define GLX_BAD_SCREEN 1
+#define GLX_BAD_VALUE 6
+#define GLX_BAD_VISUAL 4
+#define GLX_BLUE_SIZE 10
+#define GLX_BUFFER_SIZE 2
+#define GLX_BufferSwapComplete 1
+#define GLX_COLOR_INDEX_BIT 0x00000002
+#define GLX_COLOR_INDEX_TYPE 0x8015
+#define GLX_CONFIG_CAVEAT 0x20
+#define GLX_CONTEXT_COMPATIBILITY_PROFILE_BIT_ARB 0x00000002
+#define GLX_CONTEXT_CORE_PROFILE_BIT_ARB 0x00000001
+#define GLX_CONTEXT_DEBUG_BIT_ARB 0x00000001
+#define GLX_CONTEXT_FLAGS_ARB 0x2094
+#define GLX_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB 0x00000002
+#define GLX_CONTEXT_MAJOR_VERSION_ARB 0x2091
+#define GLX_CONTEXT_MINOR_VERSION_ARB 0x2092
+#define GLX_CONTEXT_PROFILE_MASK_ARB 0x9126
+#define GLX_DAMAGED 0x8020
+#define GLX_DEPTH_BUFFER_BIT 0x00000020
+#define GLX_DEPTH_SIZE 12
+#define GLX_DIRECT_COLOR 0x8003
+#define GLX_DONT_CARE 0xFFFFFFFF
+#define GLX_DOUBLEBUFFER 5
+#define GLX_DRAWABLE_TYPE 0x8010
+#define GLX_EVENT_MASK 0x801F
+#define GLX_EXTENSIONS 0x3
+#define GLX_EXTENSION_NAME "GLX"
+#define GLX_FBCONFIG_ID 0x8013
+#define GLX_FRONT_LEFT_BUFFER_BIT 0x00000001
+#define GLX_FRONT_RIGHT_BUFFER_BIT 0x00000002
+#define GLX_GRAY_SCALE 0x8006
+#define GLX_GREEN_SIZE 9
+#define GLX_HEIGHT 0x801E
+#define GLX_LARGEST_PBUFFER 0x801C
+#define GLX_LEVEL 3
+#define GLX_MAX_PBUFFER_HEIGHT 0x8017
+#define GLX_MAX_PBUFFER_PIXELS 0x8018
+#define GLX_MAX_PBUFFER_WIDTH 0x8016
+#define GLX_MAX_SWAP_INTERVAL_EXT 0x20F2
+#define GLX_NONE 0x8000
+#define GLX_NON_CONFORMANT_CONFIG 0x800D
+#define GLX_NO_EXTENSION 3
+#define GLX_PBUFFER 0x8023
+#define GLX_PBUFFER_BIT 0x00000004
+#define GLX_PBUFFER_CLOBBER_MASK 0x08000000
+#define GLX_PBUFFER_HEIGHT 0x8040
+#define GLX_PBUFFER_WIDTH 0x8041
+#define GLX_PIXMAP_BIT 0x00000002
+#define GLX_PRESERVED_CONTENTS 0x801B
+#define GLX_PSEUDO_COLOR 0x8004
+#define GLX_PbufferClobber 0
+#define GLX_RED_SIZE 8
+#define GLX_RENDER_TYPE 0x8011
+#define GLX_RGBA 4
+#define GLX_RGBA_BIT 0x00000001
+#define GLX_RGBA_TYPE 0x8014
+#define GLX_SAMPLES 100001
+#define GLX_SAMPLE_BUFFERS 100000
+#define GLX_SAVED 0x8021
+#define GLX_SCREEN 0x800C
+#define GLX_SLOW_CONFIG 0x8001
+#define GLX_STATIC_COLOR 0x8005
+#define GLX_STATIC_GRAY 0x8007
+#define GLX_STENCIL_BUFFER_BIT 0x00000040
+#define GLX_STENCIL_SIZE 13
+#define GLX_STEREO 6
+#define GLX_SWAP_INTERVAL_EXT 0x20F1
+#define GLX_TRANSPARENT_ALPHA_VALUE 0x28
+#define GLX_TRANSPARENT_BLUE_VALUE 0x27
+#define GLX_TRANSPARENT_GREEN_VALUE 0x26
+#define GLX_TRANSPARENT_INDEX 0x8009
+#define GLX_TRANSPARENT_INDEX_VALUE 0x24
+#define GLX_TRANSPARENT_RED_VALUE 0x25
+#define GLX_TRANSPARENT_RGB 0x8008
+#define GLX_TRANSPARENT_TYPE 0x23
+#define GLX_TRUE_COLOR 0x8002
+#define GLX_USE_GL 1
+#define GLX_VENDOR 0x1
+#define GLX_VERSION 0x2
+#define GLX_VISUAL_ID 0x800B
+#define GLX_WIDTH 0x801D
+#define GLX_WINDOW 0x8022
+#define GLX_WINDOW_BIT 0x00000001
+#define GLX_X_RENDERABLE 0x8012
+#define GLX_X_VISUAL_TYPE 0x22
+#define __GLX_NUMBER_EVENTS 17
+
+
+#ifndef GLEXT_64_TYPES_DEFINED
+/* This code block is duplicated in glext.h, so must be protected */
+#define GLEXT_64_TYPES_DEFINED
+/* Define int32_t, int64_t, and uint64_t types for UST/MSC */
+/* (as used in the GLX_OML_sync_control extension). */
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#include <inttypes.h>
+#elif defined(__sun__) || defined(__digital__)
+#include <inttypes.h>
+#if defined(__STDC__)
+#if defined(__arch64__) || defined(_LP64)
+typedef long int int64_t;
+typedef unsigned long int uint64_t;
+#else
+typedef long long int int64_t;
+typedef unsigned long long int uint64_t;
+#endif /* __arch64__ */
+#endif /* __STDC__ */
+#elif defined( __VMS ) || defined(__sgi)
+#include <inttypes.h>
+#elif defined(__SCO__) || defined(__USLC__)
+#include <stdint.h>
+#elif defined(__UNIXOS2__) || defined(__SOL64__)
+typedef long int int32_t;
+typedef long long int int64_t;
+typedef unsigned long long int uint64_t;
+#elif defined(_WIN32) && defined(__GNUC__)
+#include <stdint.h>
+#elif defined(_WIN32)
+typedef __int32 int32_t;
+typedef __int64 int64_t;
+typedef unsigned __int64 uint64_t;
+#else
+/* Fallback if nothing above works */
+#include <inttypes.h>
+#endif
+#endif
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+#if defined(__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__) && (__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__ > 1060)
+
+#else
+
+#endif
+
+#if defined(__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__) && (__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__ > 1060)
+
+#else
+
+#endif
+
+
+
+
+
+
+
+typedef XID GLXFBConfigID;
+typedef struct __GLXFBConfigRec *GLXFBConfig;
+typedef XID GLXContextID;
+typedef struct __GLXcontextRec *GLXContext;
+typedef XID GLXPixmap;
+typedef XID GLXDrawable;
+typedef XID GLXWindow;
+typedef XID GLXPbuffer;
+typedef void (GLAD_API_PTR *__GLXextFuncPtr)(void);
+typedef XID GLXVideoCaptureDeviceNV;
+typedef unsigned int GLXVideoDeviceNV;
+typedef XID GLXVideoSourceSGIX;
+typedef XID GLXFBConfigIDSGIX;
+typedef struct __GLXFBConfigRec *GLXFBConfigSGIX;
+typedef XID GLXPbufferSGIX;
+typedef struct {
+ int event_type; /* GLX_DAMAGED or GLX_SAVED */
+ int draw_type; /* GLX_WINDOW or GLX_PBUFFER */
+ unsigned long serial; /* # of last request processed by server */
+ Bool send_event; /* true if this came for SendEvent request */
+ Display *display; /* display the event was read from */
+ GLXDrawable drawable; /* XID of Drawable */
+ unsigned int buffer_mask; /* mask indicating which buffers are affected */
+ unsigned int aux_buffer; /* which aux buffer was affected */
+ int x, y;
+ int width, height;
+ int count; /* if nonzero, at least this many more */
+} GLXPbufferClobberEvent;
+typedef struct {
+ int type;
+ unsigned long serial; /* # of last request processed by server */
+ Bool send_event; /* true if this came from a SendEvent request */
+ Display *display; /* Display the event was read from */
+ GLXDrawable drawable; /* drawable on which event was requested in event mask */
+ int event_type;
+ int64_t ust;
+ int64_t msc;
+ int64_t sbc;
+} GLXBufferSwapComplete;
+typedef union __GLXEvent {
+ GLXPbufferClobberEvent glxpbufferclobber;
+ GLXBufferSwapComplete glxbufferswapcomplete;
+ long pad[24];
+} GLXEvent;
+typedef struct {
+ int type;
+ unsigned long serial;
+ Bool send_event;
+ Display *display;
+ int extension;
+ int evtype;
+ GLXDrawable window;
+ Bool stereo_tree;
+} GLXStereoNotifyEventEXT;
+typedef struct {
+ int type;
+ unsigned long serial; /* # of last request processed by server */
+ Bool send_event; /* true if this came for SendEvent request */
+ Display *display; /* display the event was read from */
+ GLXDrawable drawable; /* i.d. of Drawable */
+ int event_type; /* GLX_DAMAGED_SGIX or GLX_SAVED_SGIX */
+ int draw_type; /* GLX_WINDOW_SGIX or GLX_PBUFFER_SGIX */
+ unsigned int mask; /* mask indicating which buffers are affected*/
+ int x, y;
+ int width, height;
+ int count; /* if nonzero, at least this many more */
+} GLXBufferClobberEventSGIX;
+typedef struct {
+ char pipeName[80]; /* Should be [GLX_HYPERPIPE_PIPE_NAME_LENGTH_SGIX] */
+ int networkId;
+} GLXHyperpipeNetworkSGIX;
+typedef struct {
+ char pipeName[80]; /* Should be [GLX_HYPERPIPE_PIPE_NAME_LENGTH_SGIX] */
+ int channel;
+ unsigned int participationType;
+ int timeSlice;
+} GLXHyperpipeConfigSGIX;
+typedef struct {
+ char pipeName[80]; /* Should be [GLX_HYPERPIPE_PIPE_NAME_LENGTH_SGIX] */
+ int srcXOrigin, srcYOrigin, srcWidth, srcHeight;
+ int destXOrigin, destYOrigin, destWidth, destHeight;
+} GLXPipeRect;
+typedef struct {
+ char pipeName[80]; /* Should be [GLX_HYPERPIPE_PIPE_NAME_LENGTH_SGIX] */
+ int XOrigin, YOrigin, maxHeight, maxWidth;
+} GLXPipeRectLimits;
+
+
+#define GLX_VERSION_1_0 1
+GLAD_API_CALL int GLAD_GLX_VERSION_1_0;
+#define GLX_VERSION_1_1 1
+GLAD_API_CALL int GLAD_GLX_VERSION_1_1;
+#define GLX_VERSION_1_2 1
+GLAD_API_CALL int GLAD_GLX_VERSION_1_2;
+#define GLX_VERSION_1_3 1
+GLAD_API_CALL int GLAD_GLX_VERSION_1_3;
+#define GLX_VERSION_1_4 1
+GLAD_API_CALL int GLAD_GLX_VERSION_1_4;
+#define GLX_ARB_create_context 1
+GLAD_API_CALL int GLAD_GLX_ARB_create_context;
+#define GLX_ARB_create_context_profile 1
+GLAD_API_CALL int GLAD_GLX_ARB_create_context_profile;
+#define GLX_ARB_get_proc_address 1
+GLAD_API_CALL int GLAD_GLX_ARB_get_proc_address;
+#define GLX_EXT_swap_control 1
+GLAD_API_CALL int GLAD_GLX_EXT_swap_control;
+#define GLX_MESA_swap_control 1
+GLAD_API_CALL int GLAD_GLX_MESA_swap_control;
+#define GLX_SGI_swap_control 1
+GLAD_API_CALL int GLAD_GLX_SGI_swap_control;
+
+
+typedef GLXFBConfig * (GLAD_API_PTR *PFNGLXCHOOSEFBCONFIGPROC)(Display * dpy, int screen, const int * attrib_list, int * nelements);
+typedef XVisualInfo * (GLAD_API_PTR *PFNGLXCHOOSEVISUALPROC)(Display * dpy, int screen, int * attribList);
+typedef void (GLAD_API_PTR *PFNGLXCOPYCONTEXTPROC)(Display * dpy, GLXContext src, GLXContext dst, unsigned long mask);
+typedef GLXContext (GLAD_API_PTR *PFNGLXCREATECONTEXTPROC)(Display * dpy, XVisualInfo * vis, GLXContext shareList, Bool direct);
+typedef GLXContext (GLAD_API_PTR *PFNGLXCREATECONTEXTATTRIBSARBPROC)(Display * dpy, GLXFBConfig config, GLXContext share_context, Bool direct, const int * attrib_list);
+typedef GLXPixmap (GLAD_API_PTR *PFNGLXCREATEGLXPIXMAPPROC)(Display * dpy, XVisualInfo * visual, Pixmap pixmap);
+typedef GLXContext (GLAD_API_PTR *PFNGLXCREATENEWCONTEXTPROC)(Display * dpy, GLXFBConfig config, int render_type, GLXContext share_list, Bool direct);
+typedef GLXPbuffer (GLAD_API_PTR *PFNGLXCREATEPBUFFERPROC)(Display * dpy, GLXFBConfig config, const int * attrib_list);
+typedef GLXPixmap (GLAD_API_PTR *PFNGLXCREATEPIXMAPPROC)(Display * dpy, GLXFBConfig config, Pixmap pixmap, const int * attrib_list);
+typedef GLXWindow (GLAD_API_PTR *PFNGLXCREATEWINDOWPROC)(Display * dpy, GLXFBConfig config, Window win, const int * attrib_list);
+typedef void (GLAD_API_PTR *PFNGLXDESTROYCONTEXTPROC)(Display * dpy, GLXContext ctx);
+typedef void (GLAD_API_PTR *PFNGLXDESTROYGLXPIXMAPPROC)(Display * dpy, GLXPixmap pixmap);
+typedef void (GLAD_API_PTR *PFNGLXDESTROYPBUFFERPROC)(Display * dpy, GLXPbuffer pbuf);
+typedef void (GLAD_API_PTR *PFNGLXDESTROYPIXMAPPROC)(Display * dpy, GLXPixmap pixmap);
+typedef void (GLAD_API_PTR *PFNGLXDESTROYWINDOWPROC)(Display * dpy, GLXWindow win);
+typedef const char * (GLAD_API_PTR *PFNGLXGETCLIENTSTRINGPROC)(Display * dpy, int name);
+typedef int (GLAD_API_PTR *PFNGLXGETCONFIGPROC)(Display * dpy, XVisualInfo * visual, int attrib, int * value);
+typedef GLXContext (GLAD_API_PTR *PFNGLXGETCURRENTCONTEXTPROC)(void);
+typedef Display * (GLAD_API_PTR *PFNGLXGETCURRENTDISPLAYPROC)(void);
+typedef GLXDrawable (GLAD_API_PTR *PFNGLXGETCURRENTDRAWABLEPROC)(void);
+typedef GLXDrawable (GLAD_API_PTR *PFNGLXGETCURRENTREADDRAWABLEPROC)(void);
+typedef int (GLAD_API_PTR *PFNGLXGETFBCONFIGATTRIBPROC)(Display * dpy, GLXFBConfig config, int attribute, int * value);
+typedef GLXFBConfig * (GLAD_API_PTR *PFNGLXGETFBCONFIGSPROC)(Display * dpy, int screen, int * nelements);
+typedef __GLXextFuncPtr (GLAD_API_PTR *PFNGLXGETPROCADDRESSPROC)(const GLubyte * procName);
+typedef __GLXextFuncPtr (GLAD_API_PTR *PFNGLXGETPROCADDRESSARBPROC)(const GLubyte * procName);
+typedef void (GLAD_API_PTR *PFNGLXGETSELECTEDEVENTPROC)(Display * dpy, GLXDrawable draw, unsigned long * event_mask);
+typedef int (GLAD_API_PTR *PFNGLXGETSWAPINTERVALMESAPROC)(void);
+typedef XVisualInfo * (GLAD_API_PTR *PFNGLXGETVISUALFROMFBCONFIGPROC)(Display * dpy, GLXFBConfig config);
+typedef Bool (GLAD_API_PTR *PFNGLXISDIRECTPROC)(Display * dpy, GLXContext ctx);
+typedef Bool (GLAD_API_PTR *PFNGLXMAKECONTEXTCURRENTPROC)(Display * dpy, GLXDrawable draw, GLXDrawable read, GLXContext ctx);
+typedef Bool (GLAD_API_PTR *PFNGLXMAKECURRENTPROC)(Display * dpy, GLXDrawable drawable, GLXContext ctx);
+typedef int (GLAD_API_PTR *PFNGLXQUERYCONTEXTPROC)(Display * dpy, GLXContext ctx, int attribute, int * value);
+typedef void (GLAD_API_PTR *PFNGLXQUERYDRAWABLEPROC)(Display * dpy, GLXDrawable draw, int attribute, unsigned int * value);
+typedef Bool (GLAD_API_PTR *PFNGLXQUERYEXTENSIONPROC)(Display * dpy, int * errorb, int * event);
+typedef const char * (GLAD_API_PTR *PFNGLXQUERYEXTENSIONSSTRINGPROC)(Display * dpy, int screen);
+typedef const char * (GLAD_API_PTR *PFNGLXQUERYSERVERSTRINGPROC)(Display * dpy, int screen, int name);
+typedef Bool (GLAD_API_PTR *PFNGLXQUERYVERSIONPROC)(Display * dpy, int * maj, int * min);
+typedef void (GLAD_API_PTR *PFNGLXSELECTEVENTPROC)(Display * dpy, GLXDrawable draw, unsigned long event_mask);
+typedef void (GLAD_API_PTR *PFNGLXSWAPBUFFERSPROC)(Display * dpy, GLXDrawable drawable);
+typedef void (GLAD_API_PTR *PFNGLXSWAPINTERVALEXTPROC)(Display * dpy, GLXDrawable drawable, int interval);
+typedef int (GLAD_API_PTR *PFNGLXSWAPINTERVALMESAPROC)(unsigned int interval);
+typedef int (GLAD_API_PTR *PFNGLXSWAPINTERVALSGIPROC)(int interval);
+typedef void (GLAD_API_PTR *PFNGLXUSEXFONTPROC)(Font font, int first, int count, int list);
+typedef void (GLAD_API_PTR *PFNGLXWAITGLPROC)(void);
+typedef void (GLAD_API_PTR *PFNGLXWAITXPROC)(void);
+
+GLAD_API_CALL PFNGLXCHOOSEFBCONFIGPROC glad_glXChooseFBConfig;
+#define glXChooseFBConfig glad_glXChooseFBConfig
+GLAD_API_CALL PFNGLXCHOOSEVISUALPROC glad_glXChooseVisual;
+#define glXChooseVisual glad_glXChooseVisual
+GLAD_API_CALL PFNGLXCOPYCONTEXTPROC glad_glXCopyContext;
+#define glXCopyContext glad_glXCopyContext
+GLAD_API_CALL PFNGLXCREATECONTEXTPROC glad_glXCreateContext;
+#define glXCreateContext glad_glXCreateContext
+GLAD_API_CALL PFNGLXCREATECONTEXTATTRIBSARBPROC glad_glXCreateContextAttribsARB;
+#define glXCreateContextAttribsARB glad_glXCreateContextAttribsARB
+GLAD_API_CALL PFNGLXCREATEGLXPIXMAPPROC glad_glXCreateGLXPixmap;
+#define glXCreateGLXPixmap glad_glXCreateGLXPixmap
+GLAD_API_CALL PFNGLXCREATENEWCONTEXTPROC glad_glXCreateNewContext;
+#define glXCreateNewContext glad_glXCreateNewContext
+GLAD_API_CALL PFNGLXCREATEPBUFFERPROC glad_glXCreatePbuffer;
+#define glXCreatePbuffer glad_glXCreatePbuffer
+GLAD_API_CALL PFNGLXCREATEPIXMAPPROC glad_glXCreatePixmap;
+#define glXCreatePixmap glad_glXCreatePixmap
+GLAD_API_CALL PFNGLXCREATEWINDOWPROC glad_glXCreateWindow;
+#define glXCreateWindow glad_glXCreateWindow
+GLAD_API_CALL PFNGLXDESTROYCONTEXTPROC glad_glXDestroyContext;
+#define glXDestroyContext glad_glXDestroyContext
+GLAD_API_CALL PFNGLXDESTROYGLXPIXMAPPROC glad_glXDestroyGLXPixmap;
+#define glXDestroyGLXPixmap glad_glXDestroyGLXPixmap
+GLAD_API_CALL PFNGLXDESTROYPBUFFERPROC glad_glXDestroyPbuffer;
+#define glXDestroyPbuffer glad_glXDestroyPbuffer
+GLAD_API_CALL PFNGLXDESTROYPIXMAPPROC glad_glXDestroyPixmap;
+#define glXDestroyPixmap glad_glXDestroyPixmap
+GLAD_API_CALL PFNGLXDESTROYWINDOWPROC glad_glXDestroyWindow;
+#define glXDestroyWindow glad_glXDestroyWindow
+GLAD_API_CALL PFNGLXGETCLIENTSTRINGPROC glad_glXGetClientString;
+#define glXGetClientString glad_glXGetClientString
+GLAD_API_CALL PFNGLXGETCONFIGPROC glad_glXGetConfig;
+#define glXGetConfig glad_glXGetConfig
+GLAD_API_CALL PFNGLXGETCURRENTCONTEXTPROC glad_glXGetCurrentContext;
+#define glXGetCurrentContext glad_glXGetCurrentContext
+GLAD_API_CALL PFNGLXGETCURRENTDISPLAYPROC glad_glXGetCurrentDisplay;
+#define glXGetCurrentDisplay glad_glXGetCurrentDisplay
+GLAD_API_CALL PFNGLXGETCURRENTDRAWABLEPROC glad_glXGetCurrentDrawable;
+#define glXGetCurrentDrawable glad_glXGetCurrentDrawable
+GLAD_API_CALL PFNGLXGETCURRENTREADDRAWABLEPROC glad_glXGetCurrentReadDrawable;
+#define glXGetCurrentReadDrawable glad_glXGetCurrentReadDrawable
+GLAD_API_CALL PFNGLXGETFBCONFIGATTRIBPROC glad_glXGetFBConfigAttrib;
+#define glXGetFBConfigAttrib glad_glXGetFBConfigAttrib
+GLAD_API_CALL PFNGLXGETFBCONFIGSPROC glad_glXGetFBConfigs;
+#define glXGetFBConfigs glad_glXGetFBConfigs
+GLAD_API_CALL PFNGLXGETPROCADDRESSPROC glad_glXGetProcAddress;
+#define glXGetProcAddress glad_glXGetProcAddress
+GLAD_API_CALL PFNGLXGETPROCADDRESSARBPROC glad_glXGetProcAddressARB;
+#define glXGetProcAddressARB glad_glXGetProcAddressARB
+GLAD_API_CALL PFNGLXGETSELECTEDEVENTPROC glad_glXGetSelectedEvent;
+#define glXGetSelectedEvent glad_glXGetSelectedEvent
+GLAD_API_CALL PFNGLXGETSWAPINTERVALMESAPROC glad_glXGetSwapIntervalMESA;
+#define glXGetSwapIntervalMESA glad_glXGetSwapIntervalMESA
+GLAD_API_CALL PFNGLXGETVISUALFROMFBCONFIGPROC glad_glXGetVisualFromFBConfig;
+#define glXGetVisualFromFBConfig glad_glXGetVisualFromFBConfig
+GLAD_API_CALL PFNGLXISDIRECTPROC glad_glXIsDirect;
+#define glXIsDirect glad_glXIsDirect
+GLAD_API_CALL PFNGLXMAKECONTEXTCURRENTPROC glad_glXMakeContextCurrent;
+#define glXMakeContextCurrent glad_glXMakeContextCurrent
+GLAD_API_CALL PFNGLXMAKECURRENTPROC glad_glXMakeCurrent;
+#define glXMakeCurrent glad_glXMakeCurrent
+GLAD_API_CALL PFNGLXQUERYCONTEXTPROC glad_glXQueryContext;
+#define glXQueryContext glad_glXQueryContext
+GLAD_API_CALL PFNGLXQUERYDRAWABLEPROC glad_glXQueryDrawable;
+#define glXQueryDrawable glad_glXQueryDrawable
+GLAD_API_CALL PFNGLXQUERYEXTENSIONPROC glad_glXQueryExtension;
+#define glXQueryExtension glad_glXQueryExtension
+GLAD_API_CALL PFNGLXQUERYEXTENSIONSSTRINGPROC glad_glXQueryExtensionsString;
+#define glXQueryExtensionsString glad_glXQueryExtensionsString
+GLAD_API_CALL PFNGLXQUERYSERVERSTRINGPROC glad_glXQueryServerString;
+#define glXQueryServerString glad_glXQueryServerString
+GLAD_API_CALL PFNGLXQUERYVERSIONPROC glad_glXQueryVersion;
+#define glXQueryVersion glad_glXQueryVersion
+GLAD_API_CALL PFNGLXSELECTEVENTPROC glad_glXSelectEvent;
+#define glXSelectEvent glad_glXSelectEvent
+GLAD_API_CALL PFNGLXSWAPBUFFERSPROC glad_glXSwapBuffers;
+#define glXSwapBuffers glad_glXSwapBuffers
+GLAD_API_CALL PFNGLXSWAPINTERVALEXTPROC glad_glXSwapIntervalEXT;
+#define glXSwapIntervalEXT glad_glXSwapIntervalEXT
+GLAD_API_CALL PFNGLXSWAPINTERVALMESAPROC glad_glXSwapIntervalMESA;
+#define glXSwapIntervalMESA glad_glXSwapIntervalMESA
+GLAD_API_CALL PFNGLXSWAPINTERVALSGIPROC glad_glXSwapIntervalSGI;
+#define glXSwapIntervalSGI glad_glXSwapIntervalSGI
+GLAD_API_CALL PFNGLXUSEXFONTPROC glad_glXUseXFont;
+#define glXUseXFont glad_glXUseXFont
+GLAD_API_CALL PFNGLXWAITGLPROC glad_glXWaitGL;
+#define glXWaitGL glad_glXWaitGL
+GLAD_API_CALL PFNGLXWAITXPROC glad_glXWaitX;
+#define glXWaitX glad_glXWaitX
+
+
+
+
+
+GLAD_API_CALL int gladLoadGLXUserPtr(Display *display, int screen, GLADuserptrloadfunc load, void *userptr);
+GLAD_API_CALL int gladLoadGLX(Display *display, int screen, GLADloadfunc load);
+
+#ifdef GLAD_GLX
+
+GLAD_API_CALL int gladLoaderLoadGLX(Display *display, int screen);
+
+GLAD_API_CALL void gladLoaderUnloadGLX(void);
+
+#endif
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/thirdparty/glad/glx.c b/thirdparty/glad/glx.c
new file mode 100644
index 0000000000..6391027db2
--- /dev/null
+++ b/thirdparty/glad/glx.c
@@ -0,0 +1,395 @@
+/**
+ * SPDX-License-Identifier: (WTFPL OR CC0-1.0) AND Apache-2.0
+ */
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <glad/glx.h>
+
+#ifndef GLAD_IMPL_UTIL_C_
+#define GLAD_IMPL_UTIL_C_
+
+#ifdef _MSC_VER
+#define GLAD_IMPL_UTIL_SSCANF sscanf_s
+#else
+#define GLAD_IMPL_UTIL_SSCANF sscanf
+#endif
+
+#endif /* GLAD_IMPL_UTIL_C_ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+int GLAD_GLX_VERSION_1_0 = 0;
+int GLAD_GLX_VERSION_1_1 = 0;
+int GLAD_GLX_VERSION_1_2 = 0;
+int GLAD_GLX_VERSION_1_3 = 0;
+int GLAD_GLX_VERSION_1_4 = 0;
+int GLAD_GLX_ARB_create_context = 0;
+int GLAD_GLX_ARB_create_context_profile = 0;
+int GLAD_GLX_ARB_get_proc_address = 0;
+int GLAD_GLX_EXT_swap_control = 0;
+int GLAD_GLX_MESA_swap_control = 0;
+int GLAD_GLX_SGI_swap_control = 0;
+
+
+
+PFNGLXCHOOSEFBCONFIGPROC glad_glXChooseFBConfig = NULL;
+PFNGLXCHOOSEVISUALPROC glad_glXChooseVisual = NULL;
+PFNGLXCOPYCONTEXTPROC glad_glXCopyContext = NULL;
+PFNGLXCREATECONTEXTPROC glad_glXCreateContext = NULL;
+PFNGLXCREATECONTEXTATTRIBSARBPROC glad_glXCreateContextAttribsARB = NULL;
+PFNGLXCREATEGLXPIXMAPPROC glad_glXCreateGLXPixmap = NULL;
+PFNGLXCREATENEWCONTEXTPROC glad_glXCreateNewContext = NULL;
+PFNGLXCREATEPBUFFERPROC glad_glXCreatePbuffer = NULL;
+PFNGLXCREATEPIXMAPPROC glad_glXCreatePixmap = NULL;
+PFNGLXCREATEWINDOWPROC glad_glXCreateWindow = NULL;
+PFNGLXDESTROYCONTEXTPROC glad_glXDestroyContext = NULL;
+PFNGLXDESTROYGLXPIXMAPPROC glad_glXDestroyGLXPixmap = NULL;
+PFNGLXDESTROYPBUFFERPROC glad_glXDestroyPbuffer = NULL;
+PFNGLXDESTROYPIXMAPPROC glad_glXDestroyPixmap = NULL;
+PFNGLXDESTROYWINDOWPROC glad_glXDestroyWindow = NULL;
+PFNGLXGETCLIENTSTRINGPROC glad_glXGetClientString = NULL;
+PFNGLXGETCONFIGPROC glad_glXGetConfig = NULL;
+PFNGLXGETCURRENTCONTEXTPROC glad_glXGetCurrentContext = NULL;
+PFNGLXGETCURRENTDISPLAYPROC glad_glXGetCurrentDisplay = NULL;
+PFNGLXGETCURRENTDRAWABLEPROC glad_glXGetCurrentDrawable = NULL;
+PFNGLXGETCURRENTREADDRAWABLEPROC glad_glXGetCurrentReadDrawable = NULL;
+PFNGLXGETFBCONFIGATTRIBPROC glad_glXGetFBConfigAttrib = NULL;
+PFNGLXGETFBCONFIGSPROC glad_glXGetFBConfigs = NULL;
+PFNGLXGETPROCADDRESSPROC glad_glXGetProcAddress = NULL;
+PFNGLXGETPROCADDRESSARBPROC glad_glXGetProcAddressARB = NULL;
+PFNGLXGETSELECTEDEVENTPROC glad_glXGetSelectedEvent = NULL;
+PFNGLXGETSWAPINTERVALMESAPROC glad_glXGetSwapIntervalMESA = NULL;
+PFNGLXGETVISUALFROMFBCONFIGPROC glad_glXGetVisualFromFBConfig = NULL;
+PFNGLXISDIRECTPROC glad_glXIsDirect = NULL;
+PFNGLXMAKECONTEXTCURRENTPROC glad_glXMakeContextCurrent = NULL;
+PFNGLXMAKECURRENTPROC glad_glXMakeCurrent = NULL;
+PFNGLXQUERYCONTEXTPROC glad_glXQueryContext = NULL;
+PFNGLXQUERYDRAWABLEPROC glad_glXQueryDrawable = NULL;
+PFNGLXQUERYEXTENSIONPROC glad_glXQueryExtension = NULL;
+PFNGLXQUERYEXTENSIONSSTRINGPROC glad_glXQueryExtensionsString = NULL;
+PFNGLXQUERYSERVERSTRINGPROC glad_glXQueryServerString = NULL;
+PFNGLXQUERYVERSIONPROC glad_glXQueryVersion = NULL;
+PFNGLXSELECTEVENTPROC glad_glXSelectEvent = NULL;
+PFNGLXSWAPBUFFERSPROC glad_glXSwapBuffers = NULL;
+PFNGLXSWAPINTERVALEXTPROC glad_glXSwapIntervalEXT = NULL;
+PFNGLXSWAPINTERVALMESAPROC glad_glXSwapIntervalMESA = NULL;
+PFNGLXSWAPINTERVALSGIPROC glad_glXSwapIntervalSGI = NULL;
+PFNGLXUSEXFONTPROC glad_glXUseXFont = NULL;
+PFNGLXWAITGLPROC glad_glXWaitGL = NULL;
+PFNGLXWAITXPROC glad_glXWaitX = NULL;
+
+
+static void glad_glx_load_GLX_VERSION_1_0( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GLX_VERSION_1_0) return;
+ glad_glXChooseVisual = (PFNGLXCHOOSEVISUALPROC) load(userptr, "glXChooseVisual");
+ glad_glXCopyContext = (PFNGLXCOPYCONTEXTPROC) load(userptr, "glXCopyContext");
+ glad_glXCreateContext = (PFNGLXCREATECONTEXTPROC) load(userptr, "glXCreateContext");
+ glad_glXCreateGLXPixmap = (PFNGLXCREATEGLXPIXMAPPROC) load(userptr, "glXCreateGLXPixmap");
+ glad_glXDestroyContext = (PFNGLXDESTROYCONTEXTPROC) load(userptr, "glXDestroyContext");
+ glad_glXDestroyGLXPixmap = (PFNGLXDESTROYGLXPIXMAPPROC) load(userptr, "glXDestroyGLXPixmap");
+ glad_glXGetConfig = (PFNGLXGETCONFIGPROC) load(userptr, "glXGetConfig");
+ glad_glXGetCurrentContext = (PFNGLXGETCURRENTCONTEXTPROC) load(userptr, "glXGetCurrentContext");
+ glad_glXGetCurrentDrawable = (PFNGLXGETCURRENTDRAWABLEPROC) load(userptr, "glXGetCurrentDrawable");
+ glad_glXIsDirect = (PFNGLXISDIRECTPROC) load(userptr, "glXIsDirect");
+ glad_glXMakeCurrent = (PFNGLXMAKECURRENTPROC) load(userptr, "glXMakeCurrent");
+ glad_glXQueryExtension = (PFNGLXQUERYEXTENSIONPROC) load(userptr, "glXQueryExtension");
+ glad_glXQueryVersion = (PFNGLXQUERYVERSIONPROC) load(userptr, "glXQueryVersion");
+ glad_glXSwapBuffers = (PFNGLXSWAPBUFFERSPROC) load(userptr, "glXSwapBuffers");
+ glad_glXUseXFont = (PFNGLXUSEXFONTPROC) load(userptr, "glXUseXFont");
+ glad_glXWaitGL = (PFNGLXWAITGLPROC) load(userptr, "glXWaitGL");
+ glad_glXWaitX = (PFNGLXWAITXPROC) load(userptr, "glXWaitX");
+}
+static void glad_glx_load_GLX_VERSION_1_1( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GLX_VERSION_1_1) return;
+ glad_glXGetClientString = (PFNGLXGETCLIENTSTRINGPROC) load(userptr, "glXGetClientString");
+ glad_glXQueryExtensionsString = (PFNGLXQUERYEXTENSIONSSTRINGPROC) load(userptr, "glXQueryExtensionsString");
+ glad_glXQueryServerString = (PFNGLXQUERYSERVERSTRINGPROC) load(userptr, "glXQueryServerString");
+}
+static void glad_glx_load_GLX_VERSION_1_2( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GLX_VERSION_1_2) return;
+ glad_glXGetCurrentDisplay = (PFNGLXGETCURRENTDISPLAYPROC) load(userptr, "glXGetCurrentDisplay");
+}
+static void glad_glx_load_GLX_VERSION_1_3( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GLX_VERSION_1_3) return;
+ glad_glXChooseFBConfig = (PFNGLXCHOOSEFBCONFIGPROC) load(userptr, "glXChooseFBConfig");
+ glad_glXCreateNewContext = (PFNGLXCREATENEWCONTEXTPROC) load(userptr, "glXCreateNewContext");
+ glad_glXCreatePbuffer = (PFNGLXCREATEPBUFFERPROC) load(userptr, "glXCreatePbuffer");
+ glad_glXCreatePixmap = (PFNGLXCREATEPIXMAPPROC) load(userptr, "glXCreatePixmap");
+ glad_glXCreateWindow = (PFNGLXCREATEWINDOWPROC) load(userptr, "glXCreateWindow");
+ glad_glXDestroyPbuffer = (PFNGLXDESTROYPBUFFERPROC) load(userptr, "glXDestroyPbuffer");
+ glad_glXDestroyPixmap = (PFNGLXDESTROYPIXMAPPROC) load(userptr, "glXDestroyPixmap");
+ glad_glXDestroyWindow = (PFNGLXDESTROYWINDOWPROC) load(userptr, "glXDestroyWindow");
+ glad_glXGetCurrentReadDrawable = (PFNGLXGETCURRENTREADDRAWABLEPROC) load(userptr, "glXGetCurrentReadDrawable");
+ glad_glXGetFBConfigAttrib = (PFNGLXGETFBCONFIGATTRIBPROC) load(userptr, "glXGetFBConfigAttrib");
+ glad_glXGetFBConfigs = (PFNGLXGETFBCONFIGSPROC) load(userptr, "glXGetFBConfigs");
+ glad_glXGetSelectedEvent = (PFNGLXGETSELECTEDEVENTPROC) load(userptr, "glXGetSelectedEvent");
+ glad_glXGetVisualFromFBConfig = (PFNGLXGETVISUALFROMFBCONFIGPROC) load(userptr, "glXGetVisualFromFBConfig");
+ glad_glXMakeContextCurrent = (PFNGLXMAKECONTEXTCURRENTPROC) load(userptr, "glXMakeContextCurrent");
+ glad_glXQueryContext = (PFNGLXQUERYCONTEXTPROC) load(userptr, "glXQueryContext");
+ glad_glXQueryDrawable = (PFNGLXQUERYDRAWABLEPROC) load(userptr, "glXQueryDrawable");
+ glad_glXSelectEvent = (PFNGLXSELECTEVENTPROC) load(userptr, "glXSelectEvent");
+}
+static void glad_glx_load_GLX_VERSION_1_4( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GLX_VERSION_1_4) return;
+ glad_glXGetProcAddress = (PFNGLXGETPROCADDRESSPROC) load(userptr, "glXGetProcAddress");
+}
+static void glad_glx_load_GLX_ARB_create_context( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GLX_ARB_create_context) return;
+ glad_glXCreateContextAttribsARB = (PFNGLXCREATECONTEXTATTRIBSARBPROC) load(userptr, "glXCreateContextAttribsARB");
+}
+static void glad_glx_load_GLX_ARB_get_proc_address( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GLX_ARB_get_proc_address) return;
+ glad_glXGetProcAddressARB = (PFNGLXGETPROCADDRESSARBPROC) load(userptr, "glXGetProcAddressARB");
+}
+static void glad_glx_load_GLX_EXT_swap_control( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GLX_EXT_swap_control) return;
+ glad_glXSwapIntervalEXT = (PFNGLXSWAPINTERVALEXTPROC) load(userptr, "glXSwapIntervalEXT");
+}
+static void glad_glx_load_GLX_MESA_swap_control( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GLX_MESA_swap_control) return;
+ glad_glXGetSwapIntervalMESA = (PFNGLXGETSWAPINTERVALMESAPROC) load(userptr, "glXGetSwapIntervalMESA");
+ glad_glXSwapIntervalMESA = (PFNGLXSWAPINTERVALMESAPROC) load(userptr, "glXSwapIntervalMESA");
+}
+static void glad_glx_load_GLX_SGI_swap_control( GLADuserptrloadfunc load, void* userptr) {
+ if(!GLAD_GLX_SGI_swap_control) return;
+ glad_glXSwapIntervalSGI = (PFNGLXSWAPINTERVALSGIPROC) load(userptr, "glXSwapIntervalSGI");
+}
+
+
+
+static int glad_glx_has_extension(Display *display, int screen, const char *ext) {
+#ifndef GLX_VERSION_1_1
+ GLAD_UNUSED(display);
+ GLAD_UNUSED(screen);
+ GLAD_UNUSED(ext);
+#else
+ const char *terminator;
+ const char *loc;
+ const char *extensions;
+
+ if (glXQueryExtensionsString == NULL) {
+ return 0;
+ }
+
+ extensions = glXQueryExtensionsString(display, screen);
+
+ if(extensions == NULL || ext == NULL) {
+ return 0;
+ }
+
+ while(1) {
+ loc = strstr(extensions, ext);
+ if(loc == NULL)
+ break;
+
+ terminator = loc + strlen(ext);
+ if((loc == extensions || *(loc - 1) == ' ') &&
+ (*terminator == ' ' || *terminator == '\0')) {
+ return 1;
+ }
+ extensions = terminator;
+ }
+#endif
+
+ return 0;
+}
+
+static GLADapiproc glad_glx_get_proc_from_userptr(void *userptr, const char* name) {
+ return (GLAD_GNUC_EXTENSION (GLADapiproc (*)(const char *name)) userptr)(name);
+}
+
+static int glad_glx_find_extensions(Display *display, int screen) {
+ GLAD_GLX_ARB_create_context = glad_glx_has_extension(display, screen, "GLX_ARB_create_context");
+ GLAD_GLX_ARB_create_context_profile = glad_glx_has_extension(display, screen, "GLX_ARB_create_context_profile");
+ GLAD_GLX_ARB_get_proc_address = glad_glx_has_extension(display, screen, "GLX_ARB_get_proc_address");
+ GLAD_GLX_EXT_swap_control = glad_glx_has_extension(display, screen, "GLX_EXT_swap_control");
+ GLAD_GLX_MESA_swap_control = glad_glx_has_extension(display, screen, "GLX_MESA_swap_control");
+ GLAD_GLX_SGI_swap_control = glad_glx_has_extension(display, screen, "GLX_SGI_swap_control");
+ return 1;
+}
+
+static int glad_glx_find_core_glx(Display **display, int *screen) {
+ int major = 0, minor = 0;
+ if(*display == NULL) {
+#ifdef GLAD_GLX_NO_X11
+ GLAD_UNUSED(screen);
+ return 0;
+#else
+ *display = XOpenDisplay(0);
+ if (*display == NULL) {
+ return 0;
+ }
+ *screen = XScreenNumberOfScreen(XDefaultScreenOfDisplay(*display));
+#endif
+ }
+ glXQueryVersion(*display, &major, &minor);
+ GLAD_GLX_VERSION_1_0 = (major == 1 && minor >= 0) || major > 1;
+ GLAD_GLX_VERSION_1_1 = (major == 1 && minor >= 1) || major > 1;
+ GLAD_GLX_VERSION_1_2 = (major == 1 && minor >= 2) || major > 1;
+ GLAD_GLX_VERSION_1_3 = (major == 1 && minor >= 3) || major > 1;
+ GLAD_GLX_VERSION_1_4 = (major == 1 && minor >= 4) || major > 1;
+ return GLAD_MAKE_VERSION(major, minor);
+}
+
+int gladLoadGLXUserPtr(Display *display, int screen, GLADuserptrloadfunc load, void *userptr) {
+ int version;
+ glXQueryVersion = (PFNGLXQUERYVERSIONPROC) load(userptr, "glXQueryVersion");
+ if(glXQueryVersion == NULL) return 0;
+ version = glad_glx_find_core_glx(&display, &screen);
+
+ glad_glx_load_GLX_VERSION_1_0(load, userptr);
+ glad_glx_load_GLX_VERSION_1_1(load, userptr);
+ glad_glx_load_GLX_VERSION_1_2(load, userptr);
+ glad_glx_load_GLX_VERSION_1_3(load, userptr);
+ glad_glx_load_GLX_VERSION_1_4(load, userptr);
+
+ if (!glad_glx_find_extensions(display, screen)) return 0;
+ glad_glx_load_GLX_ARB_create_context(load, userptr);
+ glad_glx_load_GLX_ARB_get_proc_address(load, userptr);
+ glad_glx_load_GLX_EXT_swap_control(load, userptr);
+ glad_glx_load_GLX_MESA_swap_control(load, userptr);
+ glad_glx_load_GLX_SGI_swap_control(load, userptr);
+
+
+ return version;
+}
+
+int gladLoadGLX(Display *display, int screen, GLADloadfunc load) {
+ return gladLoadGLXUserPtr(display, screen, glad_glx_get_proc_from_userptr, GLAD_GNUC_EXTENSION (void*) load);
+}
+
+
+
+#ifdef GLAD_GLX
+
+#ifndef GLAD_LOADER_LIBRARY_C_
+#define GLAD_LOADER_LIBRARY_C_
+
+#include <stddef.h>
+#include <stdlib.h>
+
+#if GLAD_PLATFORM_WIN32
+#include <windows.h>
+#else
+#include <dlfcn.h>
+#endif
+
+
+static void* glad_get_dlopen_handle(const char *lib_names[], int length) {
+ void *handle = NULL;
+ int i;
+
+ for (i = 0; i < length; ++i) {
+#if GLAD_PLATFORM_WIN32
+ #if GLAD_PLATFORM_UWP
+ size_t buffer_size = (strlen(lib_names[i]) + 1) * sizeof(WCHAR);
+ LPWSTR buffer = (LPWSTR) malloc(buffer_size);
+ if (buffer != NULL) {
+ int ret = MultiByteToWideChar(CP_ACP, 0, lib_names[i], -1, buffer, buffer_size);
+ if (ret != 0) {
+ handle = (void*) LoadPackagedLibrary(buffer, 0);
+ }
+ free((void*) buffer);
+ }
+ #else
+ handle = (void*) LoadLibraryA(lib_names[i]);
+ #endif
+#else
+ handle = dlopen(lib_names[i], RTLD_LAZY | RTLD_LOCAL);
+#endif
+ if (handle != NULL) {
+ return handle;
+ }
+ }
+
+ return NULL;
+}
+
+static void glad_close_dlopen_handle(void* handle) {
+ if (handle != NULL) {
+#if GLAD_PLATFORM_WIN32
+ FreeLibrary((HMODULE) handle);
+#else
+ dlclose(handle);
+#endif
+ }
+}
+
+static GLADapiproc glad_dlsym_handle(void* handle, const char *name) {
+ if (handle == NULL) {
+ return NULL;
+ }
+
+#if GLAD_PLATFORM_WIN32
+ return (GLADapiproc) GetProcAddress((HMODULE) handle, name);
+#else
+ return GLAD_GNUC_EXTENSION (GLADapiproc) dlsym(handle, name);
+#endif
+}
+
+#endif /* GLAD_LOADER_LIBRARY_C_ */
+
+typedef void* (GLAD_API_PTR *GLADglxprocaddrfunc)(const char*);
+
+static GLADapiproc glad_glx_get_proc(void *userptr, const char *name) {
+ return GLAD_GNUC_EXTENSION ((GLADapiproc (*)(const char *name)) userptr)(name);
+}
+
+static void* _glx_handle;
+
+static void* glad_glx_dlopen_handle(void) {
+ static const char *NAMES[] = {
+#if defined __CYGWIN__
+ "libGL-1.so",
+#endif
+ "libGL.so.1",
+ "libGL.so"
+ };
+
+ if (_glx_handle == NULL) {
+ _glx_handle = glad_get_dlopen_handle(NAMES, sizeof(NAMES) / sizeof(NAMES[0]));
+ }
+
+ return _glx_handle;
+}
+
+int gladLoaderLoadGLX(Display *display, int screen) {
+ int version = 0;
+ void *handle = NULL;
+ int did_load = 0;
+ GLADglxprocaddrfunc loader;
+
+ did_load = _glx_handle == NULL;
+ handle = glad_glx_dlopen_handle();
+ if (handle != NULL) {
+ loader = (GLADglxprocaddrfunc) glad_dlsym_handle(handle, "glXGetProcAddressARB");
+ if (loader != NULL) {
+ version = gladLoadGLXUserPtr(display, screen, glad_glx_get_proc, GLAD_GNUC_EXTENSION (void*) loader);
+ }
+
+ if (!version && did_load) {
+ gladLoaderUnloadGLX();
+ }
+ }
+
+ return version;
+}
+
+
+void gladLoaderUnloadGLX() {
+ if (_glx_handle != NULL) {
+ glad_close_dlopen_handle(_glx_handle);
+ _glx_handle = NULL;
+ }
+}
+
+#endif /* GLAD_GLX */
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/thirdparty/glslang/SPIRV/GLSL.ext.EXT.h b/thirdparty/glslang/SPIRV/GLSL.ext.EXT.h
index f48f1304d6..a247b4cd13 100644
--- a/thirdparty/glslang/SPIRV/GLSL.ext.EXT.h
+++ b/thirdparty/glslang/SPIRV/GLSL.ext.EXT.h
@@ -39,5 +39,6 @@ static const char* const E_SPV_EXT_shader_atomic_float_add = "SPV_EXT_shader_ato
static const char* const E_SPV_EXT_shader_atomic_float16_add = "SPV_EXT_shader_atomic_float16_add";
static const char* const E_SPV_EXT_shader_atomic_float_min_max = "SPV_EXT_shader_atomic_float_min_max";
static const char* const E_SPV_EXT_shader_image_int64 = "SPV_EXT_shader_image_int64";
+static const char* const E_SPV_EXT_mesh_shader = "SPV_EXT_mesh_shader";
#endif // #ifndef GLSLextEXT_H
diff --git a/thirdparty/glslang/SPIRV/GLSL.ext.KHR.h b/thirdparty/glslang/SPIRV/GLSL.ext.KHR.h
index 5eb3e94482..d5c670f0e1 100644
--- a/thirdparty/glslang/SPIRV/GLSL.ext.KHR.h
+++ b/thirdparty/glslang/SPIRV/GLSL.ext.KHR.h
@@ -29,7 +29,7 @@
#define GLSLextKHR_H
static const int GLSLextKHRVersion = 100;
-static const int GLSLextKHRRevision = 2;
+static const int GLSLextKHRRevision = 3;
static const char* const E_SPV_KHR_shader_ballot = "SPV_KHR_shader_ballot";
static const char* const E_SPV_KHR_subgroup_vote = "SPV_KHR_subgroup_vote";
@@ -52,5 +52,7 @@ static const char* const E_SPV_KHR_fragment_shading_rate = "SPV_KHR_fragm
static const char* const E_SPV_KHR_terminate_invocation = "SPV_KHR_terminate_invocation";
static const char* const E_SPV_KHR_workgroup_memory_explicit_layout = "SPV_KHR_workgroup_memory_explicit_layout";
static const char* const E_SPV_KHR_subgroup_uniform_control_flow = "SPV_KHR_subgroup_uniform_control_flow";
+static const char* const E_SPV_KHR_fragment_shader_barycentric = "SPV_KHR_fragment_shader_barycentric";
+static const char* const E_SPV_AMD_shader_early_and_late_fragment_tests = "SPV_AMD_shader_early_and_late_fragment_tests";
#endif // #ifndef GLSLextKHR_H
diff --git a/thirdparty/glslang/SPIRV/GlslangToSpv.cpp b/thirdparty/glslang/SPIRV/GlslangToSpv.cpp
index 39941d3752..ccb4602be2 100644
--- a/thirdparty/glslang/SPIRV/GlslangToSpv.cpp
+++ b/thirdparty/glslang/SPIRV/GlslangToSpv.cpp
@@ -260,6 +260,7 @@ protected:
std::unordered_map<std::string, spv::Id> extBuiltinMap;
std::unordered_map<long long, spv::Id> symbolValues;
+ std::unordered_map<uint32_t, spv::Id> builtInVariableIds;
std::unordered_set<long long> rValueParameters; // set of formal function parameters passed as rValues,
// rather than a pointer
std::unordered_map<std::string, spv::Function*> functionMap;
@@ -279,6 +280,9 @@ protected:
// Used later for generating OpTraceKHR/OpExecuteCallableKHR
std::unordered_map<unsigned int, glslang::TIntermSymbol *> locationToSymbol[2];
+
+ // Used by Task shader while generating opearnds for OpEmitMeshTasksEXT
+ spv::Id taskPayloadID;
};
//
@@ -314,7 +318,7 @@ spv::SourceLanguage TranslateSourceLanguage(glslang::EShSource source, EProfile
}
// Translate glslang language (stage) to SPIR-V execution model.
-spv::ExecutionModel TranslateExecutionModel(EShLanguage stage)
+spv::ExecutionModel TranslateExecutionModel(EShLanguage stage, bool isMeshShaderEXT = false)
{
switch (stage) {
case EShLangVertex: return spv::ExecutionModelVertex;
@@ -330,8 +334,8 @@ spv::ExecutionModel TranslateExecutionModel(EShLanguage stage)
case EShLangClosestHit: return spv::ExecutionModelClosestHitKHR;
case EShLangMiss: return spv::ExecutionModelMissKHR;
case EShLangCallable: return spv::ExecutionModelCallableKHR;
- case EShLangTaskNV: return spv::ExecutionModelTaskNV;
- case EShLangMeshNV: return spv::ExecutionModelMeshNV;
+ case EShLangTask: return (isMeshShaderEXT)? spv::ExecutionModelTaskEXT : spv::ExecutionModelTaskNV;
+ case EShLangMesh: return (isMeshShaderEXT)? spv::ExecutionModelMeshEXT: spv::ExecutionModelMeshNV;
#endif
default:
assert(0);
@@ -762,7 +766,7 @@ spv::BuiltIn TGlslangToSpvTraverser::TranslateBuiltInDecoration(glslang::TBuiltI
return spv::BuiltInSampleMask;
case glslang::EbvLayer:
- if (glslangIntermediate->getStage() == EShLangMeshNV) {
+ if (glslangIntermediate->getStage() == EShLangMesh) {
return spv::BuiltInLayer;
}
if (glslangIntermediate->getStage() == EShLangGeometry ||
@@ -1007,6 +1011,8 @@ spv::BuiltIn TGlslangToSpvTraverser::TranslateBuiltInDecoration(glslang::TBuiltI
return spv::BuiltInRayTminKHR;
case glslang::EbvRayTmax:
return spv::BuiltInRayTmaxKHR;
+ case glslang::EbvCullMask:
+ return spv::BuiltInCullMaskKHR;
case glslang::EbvInstanceCustomIndex:
return spv::BuiltInInstanceCustomIndexKHR;
case glslang::EbvHitT:
@@ -1048,6 +1054,15 @@ spv::BuiltIn TGlslangToSpvTraverser::TranslateBuiltInDecoration(glslang::TBuiltI
builder.addCapability(spv::CapabilityFragmentBarycentricNV);
return spv::BuiltInBaryCoordNoPerspNV;
+ case glslang::EbvBaryCoordEXT:
+ builder.addExtension(spv::E_SPV_KHR_fragment_shader_barycentric);
+ builder.addCapability(spv::CapabilityFragmentBarycentricKHR);
+ return spv::BuiltInBaryCoordKHR;
+ case glslang::EbvBaryCoordNoPerspEXT:
+ builder.addExtension(spv::E_SPV_KHR_fragment_shader_barycentric);
+ builder.addCapability(spv::CapabilityFragmentBarycentricKHR);
+ return spv::BuiltInBaryCoordNoPerspKHR;
+
// mesh shaders
case glslang::EbvTaskCountNV:
return spv::BuiltInTaskCountNV;
@@ -1066,6 +1081,16 @@ spv::BuiltIn TGlslangToSpvTraverser::TranslateBuiltInDecoration(glslang::TBuiltI
case glslang::EbvMeshViewIndicesNV:
return spv::BuiltInMeshViewIndicesNV;
+ // SPV_EXT_mesh_shader
+ case glslang::EbvPrimitivePointIndicesEXT:
+ return spv::BuiltInPrimitivePointIndicesEXT;
+ case glslang::EbvPrimitiveLineIndicesEXT:
+ return spv::BuiltInPrimitiveLineIndicesEXT;
+ case glslang::EbvPrimitiveTriangleIndicesEXT:
+ return spv::BuiltInPrimitiveTriangleIndicesEXT;
+ case glslang::EbvCullPrimitiveEXT:
+ return spv::BuiltInCullPrimitiveEXT;
+
// sm builtins
case glslang::EbvWarpsPerSM:
builder.addExtension(spv::E_SPV_NV_shader_sm_builtins);
@@ -1309,6 +1334,7 @@ spv::StorageClass TGlslangToSpvTraverser::TranslateStorageClass(const glslang::T
case glslang::EvqHitAttr: return spv::StorageClassHitAttributeKHR;
case glslang::EvqCallableData: return spv::StorageClassCallableDataKHR;
case glslang::EvqCallableDataIn: return spv::StorageClassIncomingCallableDataKHR;
+ case glslang::EvqtaskPayloadSharedEXT : return spv::StorageClassTaskPayloadWorkgroupEXT;
case glslang::EvqSpirvStorageClass: return static_cast<spv::StorageClass>(type.getQualifier().spirvStorageClass);
#endif
default:
@@ -1326,7 +1352,9 @@ void TGlslangToSpvTraverser::TranslateLiterals(const glslang::TVector<const glsl
for (auto constant : constants) {
if (constant->getBasicType() == glslang::EbtFloat) {
float floatValue = static_cast<float>(constant->getConstArray()[0].getDConst());
- unsigned literal = *reinterpret_cast<unsigned*>(&floatValue);
+ unsigned literal;
+ static_assert(sizeof(literal) == sizeof(floatValue), "sizeof(unsigned) != sizeof(float)");
+ memcpy(&literal, &floatValue, sizeof(literal));
literals.push_back(literal);
} else if (constant->getBasicType() == glslang::EbtInt) {
unsigned literal = constant->getConstArray()[0].getIConst();
@@ -1452,6 +1480,8 @@ void InheritQualifiers(glslang::TQualifier& child, const glslang::TQualifier& pa
child.perViewNV = true;
if (parent.perTaskNV)
child.perTaskNV = true;
+ if (parent.storage == glslang::EvqtaskPayloadSharedEXT)
+ child.storage = glslang::EvqtaskPayloadSharedEXT;
if (parent.patch)
child.patch = true;
if (parent.sample)
@@ -1511,9 +1541,12 @@ TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion,
inEntryPoint(false), entryPointTerminated(false), linkageOnly(false),
glslangIntermediate(glslangIntermediate),
nanMinMaxClamp(glslangIntermediate->getNanMinMaxClamp()),
- nonSemanticDebugPrintf(0)
+ nonSemanticDebugPrintf(0),
+ taskPayloadID(0)
{
- spv::ExecutionModel executionModel = TranslateExecutionModel(glslangIntermediate->getStage());
+ bool isMeshShaderExt = (glslangIntermediate->getRequestedExtensions().find(glslang::E_GL_EXT_mesh_shader) !=
+ glslangIntermediate->getRequestedExtensions().end());
+ spv::ExecutionModel executionModel = TranslateExecutionModel(glslangIntermediate->getStage(), isMeshShaderExt);
builder.clearAccessChain();
builder.setSource(TranslateSourceLanguage(glslangIntermediate->getSource(), glslangIntermediate->getProfile()),
@@ -1545,6 +1578,10 @@ TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion,
for (auto iItr = include_txt.begin(); iItr != include_txt.end(); ++iItr)
builder.addInclude(iItr->first, iItr->second);
}
+
+ builder.setEmitNonSemanticShaderDebugInfo(options.emitNonSemanticShaderDebugInfo);
+ builder.setEmitNonSemanticShaderDebugSource(options.emitNonSemanticShaderDebugSource);
+
stdBuiltins = builder.import("GLSL.std.450");
spv::AddressingModel addressingModel = spv::AddressingModelLogical;
@@ -1611,6 +1648,12 @@ TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion,
if (glslangIntermediate->getEarlyFragmentTests())
builder.addExecutionMode(shaderEntry, spv::ExecutionModeEarlyFragmentTests);
+ if (glslangIntermediate->getEarlyAndLateFragmentTestsAMD())
+ {
+ builder.addExecutionMode(shaderEntry, spv::ExecutionModeEarlyAndLateFragmentTestsAMD);
+ builder.addExtension(spv::E_SPV_AMD_shader_early_and_late_fragment_tests);
+ }
+
if (glslangIntermediate->getPostDepthCoverage()) {
builder.addCapability(spv::CapabilitySampleMaskPostDepthCoverage);
builder.addExecutionMode(shaderEntry, spv::ExecutionModePostDepthCoverage);
@@ -1620,6 +1663,9 @@ TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion,
if (glslangIntermediate->isDepthReplacing())
builder.addExecutionMode(shaderEntry, spv::ExecutionModeDepthReplacing);
+ if (glslangIntermediate->isStencilReplacing())
+ builder.addExecutionMode(shaderEntry, spv::ExecutionModeStencilRefReplacingEXT);
+
#ifndef GLSLANG_WEB
switch(glslangIntermediate->getDepth()) {
@@ -1628,6 +1674,20 @@ TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion,
case glslang::EldUnchanged: mode = spv::ExecutionModeDepthUnchanged; break;
default: mode = spv::ExecutionModeMax; break;
}
+
+ if (mode != spv::ExecutionModeMax)
+ builder.addExecutionMode(shaderEntry, (spv::ExecutionMode)mode);
+
+ switch (glslangIntermediate->getStencil()) {
+ case glslang::ElsRefUnchangedFrontAMD: mode = spv::ExecutionModeStencilRefUnchangedFrontAMD; break;
+ case glslang::ElsRefGreaterFrontAMD: mode = spv::ExecutionModeStencilRefGreaterFrontAMD; break;
+ case glslang::ElsRefLessFrontAMD: mode = spv::ExecutionModeStencilRefLessFrontAMD; break;
+ case glslang::ElsRefUnchangedBackAMD: mode = spv::ExecutionModeStencilRefUnchangedBackAMD; break;
+ case glslang::ElsRefGreaterBackAMD: mode = spv::ExecutionModeStencilRefGreaterBackAMD; break;
+ case glslang::ElsRefLessBackAMD: mode = spv::ExecutionModeStencilRefLessBackAMD; break;
+ default: mode = spv::ExecutionModeMax; break;
+ }
+
if (mode != spv::ExecutionModeMax)
builder.addExecutionMode(shaderEntry, (spv::ExecutionMode)mode);
switch (glslangIntermediate->getInterlockOrdering()) {
@@ -1766,7 +1826,7 @@ TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion,
case EShLangAnyHit:
case EShLangClosestHit:
case EShLangMiss:
- case EShLangCallable:
+ case EShLangCallable:
{
auto& extensions = glslangIntermediate->getRequestedExtensions();
if (extensions.find("GL_NV_ray_tracing") == extensions.end()) {
@@ -1777,12 +1837,24 @@ TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion,
builder.addCapability(spv::CapabilityRayTracingNV);
builder.addExtension("SPV_NV_ray_tracing");
}
+ if (glslangIntermediate->getStage() != EShLangRayGen && glslangIntermediate->getStage() != EShLangCallable)
+ {
+ if (extensions.find("GL_EXT_ray_cull_mask") != extensions.end()) {
+ builder.addCapability(spv::CapabilityRayCullMaskKHR);
+ builder.addExtension("SPV_KHR_ray_cull_mask");
+ }
+ }
break;
}
- case EShLangTaskNV:
- case EShLangMeshNV:
- builder.addCapability(spv::CapabilityMeshShadingNV);
- builder.addExtension(spv::E_SPV_NV_mesh_shader);
+ case EShLangTask:
+ case EShLangMesh:
+ if(isMeshShaderExt) {
+ builder.addCapability(spv::CapabilityMeshShadingEXT);
+ builder.addExtension(spv::E_SPV_EXT_mesh_shader);
+ } else {
+ builder.addCapability(spv::CapabilityMeshShadingNV);
+ builder.addExtension(spv::E_SPV_NV_mesh_shader);
+ }
if (glslangIntermediate->getSpv().spv >= glslang::EShTargetSpv_1_6) {
std::vector<spv::Id> dimConstId;
for (int dim = 0; dim < 3; ++dim) {
@@ -1799,7 +1871,7 @@ TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion,
glslangIntermediate->getLocalSize(1),
glslangIntermediate->getLocalSize(2));
}
- if (glslangIntermediate->getStage() == EShLangMeshNV) {
+ if (glslangIntermediate->getStage() == EShLangMesh) {
builder.addExecutionMode(shaderEntry, spv::ExecutionModeOutputVertices,
glslangIntermediate->getVertices());
builder.addExecutionMode(shaderEntry, spv::ExecutionModeOutputPrimitivesNV,
@@ -1918,7 +1990,6 @@ void TGlslangToSpvTraverser::visitSymbol(glslang::TIntermSymbol* symbol)
if (symbol->getType().getQualifier().isSpecConstant())
spec_constant_op_mode_setter.turnOnSpecConstantOpMode();
-
#ifdef ENABLE_HLSL
// Skip symbol handling if it is string-typed
if (symbol->getBasicType() == glslang::EbtString)
@@ -1929,6 +2000,9 @@ void TGlslangToSpvTraverser::visitSymbol(glslang::TIntermSymbol* symbol)
// Formal function parameters were mapped during makeFunctions().
spv::Id id = getSymbolId(symbol);
+ if (symbol->getType().getQualifier().isTaskPayload())
+ taskPayloadID = id; // cache the taskPayloadID to be used it as operand for OpEmitMeshTasksEXT
+
if (builder.isPointer(id)) {
if (!symbol->getType().getQualifier().isParamInput() &&
!symbol->getType().getQualifier().isParamOutput()) {
@@ -2456,6 +2530,14 @@ bool TGlslangToSpvTraverser::visitUnary(glslang::TVisit /* visit */, glslang::TI
return false;
}
+ // Force variable declaration - Debug Mode Only
+ if (node->getOp() == glslang::EOpDeclare) {
+ builder.clearAccessChain();
+ node->getOperand()->traverse(this);
+ builder.clearAccessChain();
+ return false;
+ }
+
// Start by evaluating the operand
// Does it need a swizzle inversion? If so, evaluation is inverted;
@@ -2472,7 +2554,7 @@ bool TGlslangToSpvTraverser::visitUnary(glslang::TVisit /* visit */, glslang::TI
operandNode = node->getOperand()->getAsBinaryNode()->getLeft();
else
operandNode = node->getOperand();
-
+
operandNode->traverse(this);
spv::Id operand = spv::NoResult;
@@ -2716,32 +2798,38 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
spv::Decoration precision = TranslatePrecisionDecoration(node->getOperationPrecision());
switch (node->getOp()) {
+ case glslang::EOpScope:
case glslang::EOpSequence:
{
- if (preVisit)
+ if (visit == glslang::EvPreVisit) {
++sequenceDepth;
- else
- --sequenceDepth;
-
- if (sequenceDepth == 1) {
- // If this is the parent node of all the functions, we want to see them
- // early, so all call points have actual SPIR-V functions to reference.
- // In all cases, still let the traverser visit the children for us.
- makeFunctions(node->getAsAggregate()->getSequence());
+ if (sequenceDepth == 1) {
+ // If this is the parent node of all the functions, we want to see them
+ // early, so all call points have actual SPIR-V functions to reference.
+ // In all cases, still let the traverser visit the children for us.
+ makeFunctions(node->getAsAggregate()->getSequence());
- // Also, we want all globals initializers to go into the beginning of the entry point, before
- // anything else gets there, so visit out of order, doing them all now.
- makeGlobalInitializers(node->getAsAggregate()->getSequence());
+ // Also, we want all globals initializers to go into the beginning of the entry point, before
+ // anything else gets there, so visit out of order, doing them all now.
+ makeGlobalInitializers(node->getAsAggregate()->getSequence());
- //Pre process linker objects for ray tracing stages
- if (glslangIntermediate->isRayTracingStage())
- collectRayTracingLinkerObjects();
+ //Pre process linker objects for ray tracing stages
+ if (glslangIntermediate->isRayTracingStage())
+ collectRayTracingLinkerObjects();
- // Initializers are done, don't want to visit again, but functions and link objects need to be processed,
- // so do them manually.
- visitFunctions(node->getAsAggregate()->getSequence());
+ // Initializers are done, don't want to visit again, but functions and link objects need to be processed,
+ // so do them manually.
+ visitFunctions(node->getAsAggregate()->getSequence());
- return false;
+ return false;
+ } else {
+ if (node->getOp() == glslang::EOpScope)
+ builder.enterScope(0);
+ }
+ } else {
+ if (sequenceDepth > 1 && node->getOp() == glslang::EOpScope)
+ builder.leaveScope();
+ --sequenceDepth;
}
return true;
@@ -2770,10 +2858,17 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
if (isShaderEntryPoint(node)) {
inEntryPoint = true;
builder.setBuildPoint(shaderEntry->getLastBlock());
+ builder.enterFunction(shaderEntry);
currentFunction = shaderEntry;
} else {
handleFunctionEntry(node);
}
+ if (options.generateDebugInfo) {
+ const auto& loc = node->getLoc();
+ const char* sourceFileName = loc.getFilename();
+ spv::Id sourceFileId = sourceFileName ? builder.getStringId(sourceFileName) : builder.getSourceFile();
+ currentFunction->setDebugLineInfo(sourceFileId, loc.line, loc.column);
+ }
} else {
if (inEntryPoint)
entryPointTerminated = true;
@@ -2913,9 +3008,17 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
std::vector<spv::Id> arguments;
translateArguments(*node, arguments, lvalueCoherentFlags);
spv::Id constructed;
- if (node->getOp() == glslang::EOpConstructTextureSampler)
- constructed = builder.createOp(spv::OpSampledImage, resultType(), arguments);
- else if (node->getOp() == glslang::EOpConstructStruct ||
+ if (node->getOp() == glslang::EOpConstructTextureSampler) {
+ const glslang::TType& texType = node->getSequence()[0]->getAsTyped()->getType();
+ if (glslangIntermediate->getSpv().spv >= glslang::EShTargetSpv_1_6 &&
+ texType.getSampler().isBuffer()) {
+ // SamplerBuffer is not supported in spirv1.6 so
+ // `samplerBuffer(textureBuffer, sampler)` is a no-op
+ // and textureBuffer is the result going forward
+ constructed = arguments[0];
+ } else
+ constructed = builder.createOp(spv::OpSampledImage, resultType(), arguments);
+ } else if (node->getOp() == glslang::EOpConstructStruct ||
node->getOp() == glslang::EOpConstructCooperativeMatrix ||
node->getType().isArray()) {
std::vector<spv::Id> constituents;
@@ -3055,6 +3158,8 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
case glslang::EOpExecuteCallableNV:
case glslang::EOpExecuteCallableKHR:
case glslang::EOpWritePackedPrimitiveIndices4x8NV:
+ case glslang::EOpEmitMeshTasksEXT:
+ case glslang::EOpSetMeshOutputsEXT:
noReturnValue = true;
break;
case glslang::EOpRayQueryInitialize:
@@ -3449,7 +3554,7 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
break;
case 1:
{
- OpDecorations decorations = { precision,
+ OpDecorations decorations = { precision,
TranslateNoContractionDecoration(node->getType().getQualifier()),
TranslateNonUniformDecoration(node->getType().getQualifier()) };
result = createUnaryOperation(
@@ -3571,7 +3676,7 @@ bool TGlslangToSpvTraverser::visitSelection(glslang::TVisit /* visit */, glslang
// smear condition to vector, if necessary (AST is always scalar)
// Before 1.4, smear like for mix(), starting with 1.4, keep it scalar
if (glslangIntermediate->getSpv().spv < glslang::EShTargetSpv_1_4 && builder.isVector(trueValue)) {
- condition = builder.smearScalar(spv::NoPrecision, condition,
+ condition = builder.smearScalar(spv::NoPrecision, condition,
builder.makeVectorType(builder.makeBoolType(),
builder.getNumComponents(trueValue)));
}
@@ -3742,8 +3847,8 @@ bool TGlslangToSpvTraverser::visitLoop(glslang::TVisit /* visit */, glslang::TIn
// by a block-ending branch. But we don't want to put any other body/test
// instructions in it, since the body/test may have arbitrary instructions,
// including merges of its own.
- builder.setLine(node->getLoc().line, node->getLoc().getFilename());
builder.setBuildPoint(&blocks.head);
+ builder.setLine(node->getLoc().line, node->getLoc().getFilename());
builder.createLoopMerge(&blocks.merge, &blocks.continue_target, control, operands);
if (node->testFirst() && node->getTest()) {
spv::Block& test = builder.makeNewBlock();
@@ -3962,7 +4067,7 @@ spv::Id TGlslangToSpvTraverser::createSpvVariable(const glslang::TIntermSymbol*
initializer = builder.makeNullConstant(spvType);
}
- return builder.createVariable(spv::NoPrecision, storageClass, spvType, name, initializer);
+ return builder.createVariable(spv::NoPrecision, storageClass, spvType, name, initializer, false);
}
// Return type Id of the sampled type.
@@ -4050,7 +4155,7 @@ spv::Id TGlslangToSpvTraverser::convertGlslangToSpvType(const glslang::TType& ty
if (explicitLayout != glslang::ElpNone)
spvType = builder.makeUintType(32);
else
- spvType = builder.makeBoolType();
+ spvType = builder.makeBoolType(false);
break;
case glslang::EbtInt:
spvType = builder.makeIntType(32);
@@ -4150,8 +4255,10 @@ spv::Id TGlslangToSpvTraverser::convertGlslangToSpvType(const glslang::TType& ty
spvType = builder.makeImageType(getSampledType(sampler), TranslateDimensionality(sampler),
sampler.isShadow(), sampler.isArrayed(), sampler.isMultiSample(),
sampler.isImageClass() ? 2 : 1, TranslateImageFormat(type));
- if (sampler.isCombined()) {
- // already has both image and sampler, make the combined type
+ if (sampler.isCombined() &&
+ (!sampler.isBuffer() || glslangIntermediate->getSpv().spv < glslang::EShTargetSpv_1_6)) {
+ // Already has both image and sampler, make the combined type. Only combine sampler to
+ // buffer if before SPIR-V 1.6.
spvType = builder.makeSampledImageType(spvType);
}
}
@@ -4191,7 +4298,9 @@ spv::Id TGlslangToSpvTraverser::convertGlslangToSpvType(const glslang::TType& ty
if (typeParam.constant->isLiteral()) {
if (typeParam.constant->getBasicType() == glslang::EbtFloat) {
float floatValue = static_cast<float>(typeParam.constant->getConstArray()[0].getDConst());
- unsigned literal = *reinterpret_cast<unsigned*>(&floatValue);
+ unsigned literal;
+ static_assert(sizeof(literal) == sizeof(floatValue), "sizeof(unsigned) != sizeof(float)");
+ memcpy(&literal, &floatValue, sizeof(literal));
operands.push_back({false, literal});
} else if (typeParam.constant->getBasicType() == glslang::EbtInt) {
unsigned literal = typeParam.constant->getConstArray()[0].getIConst();
@@ -4336,7 +4445,7 @@ bool TGlslangToSpvTraverser::filterMember(const glslang::TType& member)
extensions.find("GL_NV_stereo_view_rendering") == extensions.end())
return true;
- if (glslangIntermediate->getStage() != EShLangMeshNV) {
+ if (glslangIntermediate->getStage() != EShLangMesh) {
if (member.getFieldName() == "gl_ViewportMask" &&
extensions.find("GL_NV_viewport_array2") == extensions.end())
return true;
@@ -4366,14 +4475,14 @@ spv::Id TGlslangToSpvTraverser::convertGlslangStructToSpvType(const glslang::TTy
// except sometimes for blocks
std::vector<std::pair<glslang::TType*, glslang::TQualifier> > deferredForwardPointers;
for (int i = 0; i < (int)glslangMembers->size(); i++) {
- glslang::TType& glslangMember = *(*glslangMembers)[i].type;
- if (glslangMember.hiddenMember()) {
+ auto& glslangMember = (*glslangMembers)[i];
+ if (glslangMember.type->hiddenMember()) {
++memberDelta;
if (type.getBasicType() == glslang::EbtBlock)
memberRemapper[glslangTypeToIdMap[glslangMembers]][i] = -1;
} else {
if (type.getBasicType() == glslang::EbtBlock) {
- if (filterMember(glslangMember)) {
+ if (filterMember(*glslangMember.type)) {
memberDelta++;
memberRemapper[glslangTypeToIdMap[glslangMembers]][i] = -1;
continue;
@@ -4381,7 +4490,7 @@ spv::Id TGlslangToSpvTraverser::convertGlslangStructToSpvType(const glslang::TTy
memberRemapper[glslangTypeToIdMap[glslangMembers]][i] = i - memberDelta;
}
// modify just this child's view of the qualifier
- glslang::TQualifier memberQualifier = glslangMember.getQualifier();
+ glslang::TQualifier memberQualifier = glslangMember.type->getQualifier();
InheritQualifiers(memberQualifier, qualifier);
// manually inherit location
@@ -4392,25 +4501,38 @@ spv::Id TGlslangToSpvTraverser::convertGlslangStructToSpvType(const glslang::TTy
bool lastBufferBlockMember = qualifier.storage == glslang::EvqBuffer &&
i == (int)glslangMembers->size() - 1;
- // Make forward pointers for any pointer members, and create a list of members to
- // convert to spirv types after creating the struct.
- if (glslangMember.isReference()) {
- if (forwardPointers.find(glslangMember.getReferentType()) == forwardPointers.end()) {
- deferredForwardPointers.push_back(std::make_pair(&glslangMember, memberQualifier));
- }
- spvMembers.push_back(
- convertGlslangToSpvType(glslangMember, explicitLayout, memberQualifier, lastBufferBlockMember,
- true));
- } else {
- spvMembers.push_back(
- convertGlslangToSpvType(glslangMember, explicitLayout, memberQualifier, lastBufferBlockMember,
- false));
+ // Make forward pointers for any pointer members.
+ if (glslangMember.type->isReference() &&
+ forwardPointers.find(glslangMember.type->getReferentType()) == forwardPointers.end()) {
+ deferredForwardPointers.push_back(std::make_pair(glslangMember.type, memberQualifier));
+ }
+
+ // Create the member type.
+ auto const spvMember = convertGlslangToSpvType(*glslangMember.type, explicitLayout, memberQualifier, lastBufferBlockMember,
+ glslangMember.type->isReference());
+ spvMembers.push_back(spvMember);
+
+ // Update the builder with the type's location so that we can create debug types for the structure members.
+ // There doesn't exist a "clean" entry point for this information to be passed along to the builder so, for now,
+ // it is stored in the builder and consumed during the construction of composite debug types.
+ // TODO: This probably warrants further investigation. This approach was decided to be the least ugly of the
+ // quick and dirty approaches that were tried.
+ // Advantages of this approach:
+ // + Relatively clean. No direct calls into debug type system.
+ // + Handles nested recursive structures.
+ // Disadvantages of this approach:
+ // + Not as clean as desired. Traverser queries/sets persistent state. This is fragile.
+ // + Table lookup during creation of composite debug types. This really shouldn't be necessary.
+ if(options.emitNonSemanticShaderDebugInfo) {
+ builder.debugTypeLocs[spvMember].name = glslangMember.type->getFieldName().c_str();
+ builder.debugTypeLocs[spvMember].line = glslangMember.loc.line;
+ builder.debugTypeLocs[spvMember].column = glslangMember.loc.column;
}
}
}
// Make the SPIR-V type
- spv::Id spvType = builder.makeStructType(spvMembers, type.getTypeName().c_str());
+ spv::Id spvType = builder.makeStructType(spvMembers, type.getTypeName().c_str(), false);
if (! HasNonLayoutQualifiers(type, qualifier))
structMap[explicitLayout][qualifier.layoutMatrix][glslangMembers] = spvType;
@@ -5004,6 +5126,7 @@ void TGlslangToSpvTraverser::makeFunctions(const glslang::TIntermSequence& glslF
// GLSL has copy-in/copy-out semantics. They can be handled though with a pointer to a copy.
std::vector<spv::Id> paramTypes;
+ std::vector<char const*> paramNames;
std::vector<std::vector<spv::Decoration>> paramDecorations; // list of decorations per parameter
glslang::TIntermSequence& parameters = glslFunction->getSequence()[0]->getAsAggregate()->getSequence();
@@ -5028,10 +5151,14 @@ void TGlslangToSpvTraverser::makeFunctions(const glslang::TIntermSequence& glslF
paramTypes.push_back(typeId);
}
+ for (auto const parameter:parameters) {
+ paramNames.push_back(parameter->getAsSymbolNode()->getName().c_str());
+ }
+
spv::Block* functionBlock;
spv::Function *function = builder.makeFunctionEntry(TranslatePrecisionDecoration(glslFunction->getType()),
convertGlslangToSpvType(glslFunction->getType()),
- glslFunction->getName().c_str(), paramTypes,
+ glslFunction->getName().c_str(), paramTypes, paramNames,
paramDecorations, &functionBlock);
if (implicitThis)
function->setImplicitThis();
@@ -5121,6 +5248,7 @@ void TGlslangToSpvTraverser::handleFunctionEntry(const glslang::TIntermAggregate
currentFunction = functionMap[node->getName().c_str()];
spv::Block* functionBlock = currentFunction->getEntryBlock();
builder.setBuildPoint(functionBlock);
+ builder.enterFunction(currentFunction);
}
void TGlslangToSpvTraverser::translateArguments(const glslang::TIntermAggregate& node, std::vector<spv::Id>& arguments,
@@ -5548,10 +5676,12 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
operands.push_back(sample);
spv::Id resultTypeId;
+ glslang::TBasicType typeProxy = node->getBasicType();
// imageAtomicStore has a void return type so base the pointer type on
// the type of the value operand.
if (node->getOp() == glslang::EOpImageAtomicStore) {
resultTypeId = builder.makePointer(spv::StorageClassImage, builder.getTypeId(*opIt));
+ typeProxy = node->getAsAggregate()->getSequence()[0]->getAsTyped()->getType().getSampler().type;
} else {
resultTypeId = builder.makePointer(spv::StorageClassImage, resultType());
}
@@ -5565,7 +5695,7 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
for (; opIt != arguments.end(); ++opIt)
operands.push_back(*opIt);
- return createAtomicOperation(node->getOp(), precision, resultType(), operands, node->getBasicType(),
+ return createAtomicOperation(node->getOp(), precision, resultType(), operands, typeProxy,
lvalueCoherentFlags);
}
}
@@ -5769,10 +5899,10 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
assert(builder.isStructType(resultStructType));
//resType (SPIR-V type) contains 6 elements:
- //Member 0 must be a Boolean type scalar(LOD),
- //Member 1 must be a vector of integer type, whose Signedness operand is 0(anchor),
- //Member 2 must be a vector of integer type, whose Signedness operand is 0(offset),
- //Member 3 must be a vector of integer type, whose Signedness operand is 0(mask),
+ //Member 0 must be a Boolean type scalar(LOD),
+ //Member 1 must be a vector of integer type, whose Signedness operand is 0(anchor),
+ //Member 2 must be a vector of integer type, whose Signedness operand is 0(offset),
+ //Member 3 must be a vector of integer type, whose Signedness operand is 0(mask),
//Member 4 must be a scalar of integer type, whose Signedness operand is 0(lod),
//Member 5 must be a scalar of integer type, whose Signedness operand is 0(granularity).
std::vector<spv::Id> members;
@@ -5785,7 +5915,7 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
//call ImageFootprintNV
spv::Id res = builder.createTextureCall(precision, resType, sparse, cracked.fetch, cracked.proj,
cracked.gather, noImplicitLod, params, signExtensionMask());
-
+
//copy resType (SPIR-V type) to resultStructType(OpenGL type)
for (int i = 0; i < 5; i++) {
builder.clearAccessChain();
@@ -5838,7 +5968,7 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
}
#endif
- std::vector<spv::Id> result( 1,
+ std::vector<spv::Id> result( 1,
builder.createTextureCall(precision, resultType(), sparse, cracked.fetch, cracked.proj, cracked.gather,
noImplicitLod, params, signExtensionMask())
);
@@ -7361,7 +7491,7 @@ spv::Id TGlslangToSpvTraverser::createAtomicOperation(glslang::TOperator op, spv
} else {
scopeId = builder.makeUintConstant(spv::ScopeDevice);
}
- // semantics default to relaxed
+ // semantics default to relaxed
spv::Id semanticsId = builder.makeUintConstant(lvalueCoherentFlags.isVolatile() &&
glslangIntermediate->usingVulkanMemoryModel() ?
spv::MemorySemanticsVolatileMask :
@@ -8465,6 +8595,15 @@ spv::Id TGlslangToSpvTraverser::createMiscOperation(glslang::TOperator op, spv::
case glslang::EOpWritePackedPrimitiveIndices4x8NV:
builder.createNoResultOp(spv::OpWritePackedPrimitiveIndices4x8NV, operands);
return 0;
+ case glslang::EOpEmitMeshTasksEXT:
+ if (taskPayloadID)
+ operands.push_back(taskPayloadID);
+ // As per SPV_EXT_mesh_shader make it a terminating instruction in the current block
+ builder.makeStatementTerminator(spv::OpEmitMeshTasksEXT, operands, "post-OpEmitMeshTasksEXT");
+ return 0;
+ case glslang::EOpSetMeshOutputsEXT:
+ builder.createNoResultOp(spv::OpSetMeshOutputsEXT, operands);
+ return 0;
case glslang::EOpCooperativeMatrixMulAdd:
opCode = spv::OpCooperativeMatrixMulAddNV;
break;
@@ -8728,7 +8867,32 @@ spv::Id TGlslangToSpvTraverser::getSymbolId(const glslang::TIntermSymbol* symbol
// it was not found, create it
spv::BuiltIn builtIn = TranslateBuiltInDecoration(symbol->getQualifier().builtIn, false);
auto forcedType = getForcedType(symbol->getQualifier().builtIn, symbol->getType());
+
+ // There are pairs of symbols that map to the same SPIR-V built-in:
+ // gl_ObjectToWorldEXT and gl_ObjectToWorld3x4EXT, and gl_WorldToObjectEXT
+ // and gl_WorldToObject3x4EXT. SPIR-V forbids having two OpVariables
+ // with the same BuiltIn in the same storage class, so we must re-use one.
+ const bool mayNeedToReuseBuiltIn =
+ builtIn == spv::BuiltInObjectToWorldKHR ||
+ builtIn == spv::BuiltInWorldToObjectKHR;
+
+ if (mayNeedToReuseBuiltIn) {
+ auto iter = builtInVariableIds.find(uint32_t(builtIn));
+ if (builtInVariableIds.end() != iter) {
+ id = iter->second;
+ symbolValues[symbol->getId()] = id;
+ if (forcedType.second != spv::NoType)
+ forceType[id] = forcedType.second;
+ return id;
+ }
+ }
+
id = createSpvVariable(symbol, forcedType.first);
+
+ if (mayNeedToReuseBuiltIn) {
+ builtInVariableIds.insert({uint32_t(builtIn), id});
+ }
+
symbolValues[symbol->getId()] = id;
if (forcedType.second != spv::NoType)
forceType[id] = forcedType.second;
@@ -8857,6 +9021,12 @@ spv::Id TGlslangToSpvTraverser::getSymbolId(const glslang::TIntermSymbol* symbol
builder.addExtension(spv::E_SPV_NV_fragment_shader_barycentric);
}
+ if (symbol->getQualifier().pervertexEXT) {
+ builder.addDecoration(id, spv::DecorationPerVertexKHR);
+ builder.addCapability(spv::CapabilityFragmentBarycentricKHR);
+ builder.addExtension(spv::E_SPV_KHR_fragment_shader_barycentric);
+ }
+
if (glslangIntermediate->getHlslFunctionality1() && symbol->getType().getQualifier().semanticName != nullptr) {
builder.addExtension("SPV_GOOGLE_hlsl_functionality1");
builder.addDecoration(id, (spv::Decoration)spv::DecorationHlslSemanticGOOGLE,
@@ -8918,13 +9088,21 @@ spv::Id TGlslangToSpvTraverser::getSymbolId(const glslang::TIntermSymbol* symbol
// add per-primitive, per-view. per-task decorations to a struct member (member >= 0) or an object
void TGlslangToSpvTraverser::addMeshNVDecoration(spv::Id id, int member, const glslang::TQualifier& qualifier)
{
+ bool isMeshShaderExt = (glslangIntermediate->getRequestedExtensions().find(glslang::E_GL_EXT_mesh_shader) !=
+ glslangIntermediate->getRequestedExtensions().end());
+
if (member >= 0) {
if (qualifier.perPrimitiveNV) {
// Need to add capability/extension for fragment shader.
// Mesh shader already adds this by default.
if (glslangIntermediate->getStage() == EShLangFragment) {
- builder.addCapability(spv::CapabilityMeshShadingNV);
- builder.addExtension(spv::E_SPV_NV_mesh_shader);
+ if(isMeshShaderExt) {
+ builder.addCapability(spv::CapabilityMeshShadingEXT);
+ builder.addExtension(spv::E_SPV_EXT_mesh_shader);
+ } else {
+ builder.addCapability(spv::CapabilityMeshShadingNV);
+ builder.addExtension(spv::E_SPV_NV_mesh_shader);
+ }
}
builder.addMemberDecoration(id, (unsigned)member, spv::DecorationPerPrimitiveNV);
}
@@ -8937,8 +9115,13 @@ void TGlslangToSpvTraverser::addMeshNVDecoration(spv::Id id, int member, const g
// Need to add capability/extension for fragment shader.
// Mesh shader already adds this by default.
if (glslangIntermediate->getStage() == EShLangFragment) {
- builder.addCapability(spv::CapabilityMeshShadingNV);
- builder.addExtension(spv::E_SPV_NV_mesh_shader);
+ if(isMeshShaderExt) {
+ builder.addCapability(spv::CapabilityMeshShadingEXT);
+ builder.addExtension(spv::E_SPV_EXT_mesh_shader);
+ } else {
+ builder.addCapability(spv::CapabilityMeshShadingNV);
+ builder.addExtension(spv::E_SPV_NV_mesh_shader);
+ }
}
builder.addDecoration(id, spv::DecorationPerPrimitiveNV);
}
@@ -9078,15 +9261,19 @@ spv::Id TGlslangToSpvTraverser::createSpvConstantFromConstUnionArray(const glsla
break;
#ifndef GLSLANG_WEB
case glslang::EbtInt8:
+ builder.addCapability(spv::CapabilityInt8);
spvConsts.push_back(builder.makeInt8Constant(zero ? 0 : consts[nextConst].getI8Const()));
break;
case glslang::EbtUint8:
+ builder.addCapability(spv::CapabilityInt8);
spvConsts.push_back(builder.makeUint8Constant(zero ? 0 : consts[nextConst].getU8Const()));
break;
case glslang::EbtInt16:
+ builder.addCapability(spv::CapabilityInt16);
spvConsts.push_back(builder.makeInt16Constant(zero ? 0 : consts[nextConst].getI16Const()));
break;
case glslang::EbtUint16:
+ builder.addCapability(spv::CapabilityInt16);
spvConsts.push_back(builder.makeUint16Constant(zero ? 0 : consts[nextConst].getU16Const()));
break;
case glslang::EbtInt64:
@@ -9099,6 +9286,7 @@ spv::Id TGlslangToSpvTraverser::createSpvConstantFromConstUnionArray(const glsla
spvConsts.push_back(builder.makeDoubleConstant(zero ? 0.0 : consts[nextConst].getDConst()));
break;
case glslang::EbtFloat16:
+ builder.addCapability(spv::CapabilityFloat16);
spvConsts.push_back(builder.makeFloat16Constant(zero ? 0.0F : (float)consts[nextConst].getDConst()));
break;
#endif
@@ -9127,15 +9315,19 @@ spv::Id TGlslangToSpvTraverser::createSpvConstantFromConstUnionArray(const glsla
break;
#ifndef GLSLANG_WEB
case glslang::EbtInt8:
+ builder.addCapability(spv::CapabilityInt8);
scalar = builder.makeInt8Constant(zero ? 0 : consts[nextConst].getI8Const(), specConstant);
break;
case glslang::EbtUint8:
+ builder.addCapability(spv::CapabilityInt8);
scalar = builder.makeUint8Constant(zero ? 0 : consts[nextConst].getU8Const(), specConstant);
break;
case glslang::EbtInt16:
+ builder.addCapability(spv::CapabilityInt16);
scalar = builder.makeInt16Constant(zero ? 0 : consts[nextConst].getI16Const(), specConstant);
break;
case glslang::EbtUint16:
+ builder.addCapability(spv::CapabilityInt16);
scalar = builder.makeUint16Constant(zero ? 0 : consts[nextConst].getU16Const(), specConstant);
break;
case glslang::EbtInt64:
@@ -9148,6 +9340,7 @@ spv::Id TGlslangToSpvTraverser::createSpvConstantFromConstUnionArray(const glsla
scalar = builder.makeDoubleConstant(zero ? 0.0 : consts[nextConst].getDConst(), specConstant);
break;
case glslang::EbtFloat16:
+ builder.addCapability(spv::CapabilityFloat16);
scalar = builder.makeFloat16Constant(zero ? 0.0F : (float)consts[nextConst].getDConst(), specConstant);
break;
case glslang::EbtReference:
@@ -9345,7 +9538,8 @@ int GetSpirvGeneratorVersion()
// return 7; // GLSL volatile keyword maps to both SPIR-V decorations Volatile and Coherent
// return 8; // switch to new dead block eliminator; use OpUnreachable
// return 9; // don't include opaque function parameters in OpEntryPoint global's operand list
- return 10; // Generate OpFUnordNotEqual for != comparisons
+ // return 10; // Generate OpFUnordNotEqual for != comparisons
+ return 11; // Make OpEmitMeshTasksEXT a terminal instruction
}
// Write SPIR-V out to a binary file
diff --git a/thirdparty/glslang/SPIRV/NonSemanticShaderDebugInfo100.h b/thirdparty/glslang/SPIRV/NonSemanticShaderDebugInfo100.h
new file mode 100644
index 0000000000..c52f32f809
--- /dev/null
+++ b/thirdparty/glslang/SPIRV/NonSemanticShaderDebugInfo100.h
@@ -0,0 +1,171 @@
+// Copyright (c) 2018 The Khronos Group Inc.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and/or associated documentation files (the "Materials"),
+// to deal in the Materials without restriction, including without limitation
+// the rights to use, copy, modify, merge, publish, distribute, sublicense,
+// and/or sell copies of the Materials, and to permit persons to whom the
+// Materials are furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Materials.
+//
+// MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS
+// STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND
+// HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/
+//
+// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS
+// IN THE MATERIALS.
+
+#ifndef SPIRV_UNIFIED1_NonSemanticShaderDebugInfo100_H_
+#define SPIRV_UNIFIED1_NonSemanticShaderDebugInfo100_H_
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+enum {
+ NonSemanticShaderDebugInfo100Version = 100,
+ NonSemanticShaderDebugInfo100Version_BitWidthPadding = 0x7fffffff
+};
+enum {
+ NonSemanticShaderDebugInfo100Revision = 6,
+ NonSemanticShaderDebugInfo100Revision_BitWidthPadding = 0x7fffffff
+};
+
+enum NonSemanticShaderDebugInfo100Instructions {
+ NonSemanticShaderDebugInfo100DebugInfoNone = 0,
+ NonSemanticShaderDebugInfo100DebugCompilationUnit = 1,
+ NonSemanticShaderDebugInfo100DebugTypeBasic = 2,
+ NonSemanticShaderDebugInfo100DebugTypePointer = 3,
+ NonSemanticShaderDebugInfo100DebugTypeQualifier = 4,
+ NonSemanticShaderDebugInfo100DebugTypeArray = 5,
+ NonSemanticShaderDebugInfo100DebugTypeVector = 6,
+ NonSemanticShaderDebugInfo100DebugTypedef = 7,
+ NonSemanticShaderDebugInfo100DebugTypeFunction = 8,
+ NonSemanticShaderDebugInfo100DebugTypeEnum = 9,
+ NonSemanticShaderDebugInfo100DebugTypeComposite = 10,
+ NonSemanticShaderDebugInfo100DebugTypeMember = 11,
+ NonSemanticShaderDebugInfo100DebugTypeInheritance = 12,
+ NonSemanticShaderDebugInfo100DebugTypePtrToMember = 13,
+ NonSemanticShaderDebugInfo100DebugTypeTemplate = 14,
+ NonSemanticShaderDebugInfo100DebugTypeTemplateParameter = 15,
+ NonSemanticShaderDebugInfo100DebugTypeTemplateTemplateParameter = 16,
+ NonSemanticShaderDebugInfo100DebugTypeTemplateParameterPack = 17,
+ NonSemanticShaderDebugInfo100DebugGlobalVariable = 18,
+ NonSemanticShaderDebugInfo100DebugFunctionDeclaration = 19,
+ NonSemanticShaderDebugInfo100DebugFunction = 20,
+ NonSemanticShaderDebugInfo100DebugLexicalBlock = 21,
+ NonSemanticShaderDebugInfo100DebugLexicalBlockDiscriminator = 22,
+ NonSemanticShaderDebugInfo100DebugScope = 23,
+ NonSemanticShaderDebugInfo100DebugNoScope = 24,
+ NonSemanticShaderDebugInfo100DebugInlinedAt = 25,
+ NonSemanticShaderDebugInfo100DebugLocalVariable = 26,
+ NonSemanticShaderDebugInfo100DebugInlinedVariable = 27,
+ NonSemanticShaderDebugInfo100DebugDeclare = 28,
+ NonSemanticShaderDebugInfo100DebugValue = 29,
+ NonSemanticShaderDebugInfo100DebugOperation = 30,
+ NonSemanticShaderDebugInfo100DebugExpression = 31,
+ NonSemanticShaderDebugInfo100DebugMacroDef = 32,
+ NonSemanticShaderDebugInfo100DebugMacroUndef = 33,
+ NonSemanticShaderDebugInfo100DebugImportedEntity = 34,
+ NonSemanticShaderDebugInfo100DebugSource = 35,
+ NonSemanticShaderDebugInfo100DebugFunctionDefinition = 101,
+ NonSemanticShaderDebugInfo100DebugSourceContinued = 102,
+ NonSemanticShaderDebugInfo100DebugLine = 103,
+ NonSemanticShaderDebugInfo100DebugNoLine = 104,
+ NonSemanticShaderDebugInfo100DebugBuildIdentifier = 105,
+ NonSemanticShaderDebugInfo100DebugStoragePath = 106,
+ NonSemanticShaderDebugInfo100DebugEntryPoint = 107,
+ NonSemanticShaderDebugInfo100DebugTypeMatrix = 108,
+ NonSemanticShaderDebugInfo100InstructionsMax = 0x7fffffff
+};
+
+
+enum NonSemanticShaderDebugInfo100DebugInfoFlags {
+ NonSemanticShaderDebugInfo100None = 0x0000,
+ NonSemanticShaderDebugInfo100FlagIsProtected = 0x01,
+ NonSemanticShaderDebugInfo100FlagIsPrivate = 0x02,
+ NonSemanticShaderDebugInfo100FlagIsPublic = 0x03,
+ NonSemanticShaderDebugInfo100FlagIsLocal = 0x04,
+ NonSemanticShaderDebugInfo100FlagIsDefinition = 0x08,
+ NonSemanticShaderDebugInfo100FlagFwdDecl = 0x10,
+ NonSemanticShaderDebugInfo100FlagArtificial = 0x20,
+ NonSemanticShaderDebugInfo100FlagExplicit = 0x40,
+ NonSemanticShaderDebugInfo100FlagPrototyped = 0x80,
+ NonSemanticShaderDebugInfo100FlagObjectPointer = 0x100,
+ NonSemanticShaderDebugInfo100FlagStaticMember = 0x200,
+ NonSemanticShaderDebugInfo100FlagIndirectVariable = 0x400,
+ NonSemanticShaderDebugInfo100FlagLValueReference = 0x800,
+ NonSemanticShaderDebugInfo100FlagRValueReference = 0x1000,
+ NonSemanticShaderDebugInfo100FlagIsOptimized = 0x2000,
+ NonSemanticShaderDebugInfo100FlagIsEnumClass = 0x4000,
+ NonSemanticShaderDebugInfo100FlagTypePassByValue = 0x8000,
+ NonSemanticShaderDebugInfo100FlagTypePassByReference = 0x10000,
+ NonSemanticShaderDebugInfo100FlagUnknownPhysicalLayout = 0x20000,
+ NonSemanticShaderDebugInfo100DebugInfoFlagsMax = 0x7fffffff
+};
+
+enum NonSemanticShaderDebugInfo100BuildIdentifierFlags {
+ NonSemanticShaderDebugInfo100IdentifierPossibleDuplicates = 0x01,
+ NonSemanticShaderDebugInfo100BuildIdentifierFlagsMax = 0x7fffffff
+};
+
+enum NonSemanticShaderDebugInfo100DebugBaseTypeAttributeEncoding {
+ NonSemanticShaderDebugInfo100Unspecified = 0,
+ NonSemanticShaderDebugInfo100Address = 1,
+ NonSemanticShaderDebugInfo100Boolean = 2,
+ NonSemanticShaderDebugInfo100Float = 3,
+ NonSemanticShaderDebugInfo100Signed = 4,
+ NonSemanticShaderDebugInfo100SignedChar = 5,
+ NonSemanticShaderDebugInfo100Unsigned = 6,
+ NonSemanticShaderDebugInfo100UnsignedChar = 7,
+ NonSemanticShaderDebugInfo100DebugBaseTypeAttributeEncodingMax = 0x7fffffff
+};
+
+enum NonSemanticShaderDebugInfo100DebugCompositeType {
+ NonSemanticShaderDebugInfo100Class = 0,
+ NonSemanticShaderDebugInfo100Structure = 1,
+ NonSemanticShaderDebugInfo100Union = 2,
+ NonSemanticShaderDebugInfo100DebugCompositeTypeMax = 0x7fffffff
+};
+
+enum NonSemanticShaderDebugInfo100DebugTypeQualifier {
+ NonSemanticShaderDebugInfo100ConstType = 0,
+ NonSemanticShaderDebugInfo100VolatileType = 1,
+ NonSemanticShaderDebugInfo100RestrictType = 2,
+ NonSemanticShaderDebugInfo100AtomicType = 3,
+ NonSemanticShaderDebugInfo100DebugTypeQualifierMax = 0x7fffffff
+};
+
+enum NonSemanticShaderDebugInfo100DebugOperation {
+ NonSemanticShaderDebugInfo100Deref = 0,
+ NonSemanticShaderDebugInfo100Plus = 1,
+ NonSemanticShaderDebugInfo100Minus = 2,
+ NonSemanticShaderDebugInfo100PlusUconst = 3,
+ NonSemanticShaderDebugInfo100BitPiece = 4,
+ NonSemanticShaderDebugInfo100Swap = 5,
+ NonSemanticShaderDebugInfo100Xderef = 6,
+ NonSemanticShaderDebugInfo100StackValue = 7,
+ NonSemanticShaderDebugInfo100Constu = 8,
+ NonSemanticShaderDebugInfo100Fragment = 9,
+ NonSemanticShaderDebugInfo100DebugOperationMax = 0x7fffffff
+};
+
+enum NonSemanticShaderDebugInfo100DebugImportedEntity {
+ NonSemanticShaderDebugInfo100ImportedModule = 0,
+ NonSemanticShaderDebugInfo100ImportedDeclaration = 1,
+ NonSemanticShaderDebugInfo100DebugImportedEntityMax = 0x7fffffff
+};
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // SPIRV_UNIFIED1_NonSemanticShaderDebugInfo100_H_
diff --git a/thirdparty/glslang/SPIRV/SPVRemapper.cpp b/thirdparty/glslang/SPIRV/SPVRemapper.cpp
index fdfbeb90cd..6aca8cbcf0 100644
--- a/thirdparty/glslang/SPIRV/SPVRemapper.cpp
+++ b/thirdparty/glslang/SPIRV/SPVRemapper.cpp
@@ -160,15 +160,29 @@ namespace spv {
}
// Is this an opcode we should remove when using --strip?
- bool spirvbin_t::isStripOp(spv::Op opCode) const
+ bool spirvbin_t::isStripOp(spv::Op opCode, unsigned start) const
{
switch (opCode) {
case spv::OpSource:
case spv::OpSourceExtension:
case spv::OpName:
case spv::OpMemberName:
- case spv::OpLine: return true;
- default: return false;
+ case spv::OpLine :
+ {
+ const std::string name = literalString(start + 2);
+
+ std::vector<std::string>::const_iterator it;
+ for (it = stripWhiteList.begin(); it < stripWhiteList.end(); it++)
+ {
+ if (name.find(*it) != std::string::npos) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+ default :
+ return false;
}
}
@@ -372,7 +386,7 @@ namespace spv {
process(
[&](spv::Op opCode, unsigned start) {
// remember opcodes we want to strip later
- if (isStripOp(opCode))
+ if (isStripOp(opCode, start))
stripInst(start);
return true;
},
@@ -1494,13 +1508,24 @@ namespace spv {
}
// remap from a memory image
- void spirvbin_t::remap(std::vector<std::uint32_t>& in_spv, std::uint32_t opts)
+ void spirvbin_t::remap(std::vector<std::uint32_t>& in_spv, const std::vector<std::string>& whiteListStrings,
+ std::uint32_t opts)
{
+ stripWhiteList = whiteListStrings;
spv.swap(in_spv);
remap(opts);
spv.swap(in_spv);
}
+ // remap from a memory image - legacy interface without white list
+ void spirvbin_t::remap(std::vector<std::uint32_t>& in_spv, std::uint32_t opts)
+ {
+ stripWhiteList.clear();
+ spv.swap(in_spv);
+ remap(opts);
+ spv.swap(in_spv);
+ }
+
} // namespace SPV
#endif // defined (use_cpp11)
diff --git a/thirdparty/glslang/SPIRV/SPVRemapper.h b/thirdparty/glslang/SPIRV/SPVRemapper.h
index d6b9c346dd..d21694635a 100644
--- a/thirdparty/glslang/SPIRV/SPVRemapper.h
+++ b/thirdparty/glslang/SPIRV/SPVRemapper.h
@@ -118,6 +118,10 @@ public:
virtual ~spirvbin_t() { }
// remap on an existing binary in memory
+ void remap(std::vector<std::uint32_t>& spv, const std::vector<std::string>& whiteListStrings,
+ std::uint32_t opts = DO_EVERYTHING);
+
+ // remap on an existing binary in memory - legacy interface without white list
void remap(std::vector<std::uint32_t>& spv, std::uint32_t opts = DO_EVERYTHING);
// Type for error/log handler functions
@@ -180,6 +184,8 @@ private:
unsigned typeSizeInWords(spv::Id id) const;
unsigned idTypeSizeInWords(spv::Id id) const;
+ bool isStripOp(spv::Op opCode, unsigned start) const;
+
spv::Id& asId(unsigned word) { return spv[word]; }
const spv::Id& asId(unsigned word) const { return spv[word]; }
spv::Op asOpCode(unsigned word) const { return opOpCode(spv[word]); }
@@ -249,6 +255,8 @@ private:
std::vector<spirword_t> spv; // SPIR words
+ std::vector<std::string> stripWhiteList;
+
namemap_t nameMap; // ID names from OpName
// Since we want to also do binary ops, we can't use std::vector<bool>. we could use
diff --git a/thirdparty/glslang/SPIRV/SpvBuilder.cpp b/thirdparty/glslang/SPIRV/SpvBuilder.cpp
index 36a3f09744..7c5ea874ba 100644
--- a/thirdparty/glslang/SPIRV/SpvBuilder.cpp
+++ b/thirdparty/glslang/SPIRV/SpvBuilder.cpp
@@ -59,12 +59,15 @@ namespace spv {
Builder::Builder(unsigned int spvVersion, unsigned int magicNumber, SpvBuildLogger* buildLogger) :
spvVersion(spvVersion),
- source(SourceLanguageUnknown),
+ sourceLang(SourceLanguageUnknown),
sourceVersion(0),
sourceFileStringId(NoResult),
currentLine(0),
currentFile(nullptr),
+ currentFileId(NoResult),
+ lastDebugScopeId(NoResult),
emitOpLines(false),
+ emitNonSemanticShaderDebugInfo(false),
addressModel(AddressingModelLogical),
memoryModel(MemoryModelGLSL450),
builderNumber(magicNumber),
@@ -98,8 +101,12 @@ void Builder::setLine(int lineNum)
{
if (lineNum != 0 && lineNum != currentLine) {
currentLine = lineNum;
- if (emitOpLines)
- addLine(sourceFileStringId, currentLine, 0);
+ if (emitOpLines) {
+ if (emitNonSemanticShaderDebugInfo)
+ addDebugScopeAndLine(currentFileId, currentLine, 0);
+ else
+ addLine(sourceFileStringId, currentLine, 0);
+ }
}
}
@@ -118,7 +125,10 @@ void Builder::setLine(int lineNum, const char* filename)
currentFile = filename;
if (emitOpLines) {
spv::Id strId = getStringId(filename);
- addLine(strId, currentLine, 0);
+ if (emitNonSemanticShaderDebugInfo)
+ addDebugScopeAndLine(strId, currentLine, 0);
+ else
+ addLine(strId, currentLine, 0);
}
}
}
@@ -132,22 +142,49 @@ void Builder::addLine(Id fileName, int lineNum, int column)
buildPoint->addInstruction(std::unique_ptr<Instruction>(line));
}
+void Builder::addDebugScopeAndLine(Id fileName, int lineNum, int column)
+{
+ if (currentDebugScopeId.top() != lastDebugScopeId) {
+ spv::Id resultId = getUniqueId();
+ Instruction* scopeInst = new Instruction(resultId, makeVoidType(), OpExtInst);
+ scopeInst->addIdOperand(nonSemanticShaderDebugInfo);
+ scopeInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugScope);
+ scopeInst->addIdOperand(currentDebugScopeId.top());
+ buildPoint->addInstruction(std::unique_ptr<Instruction>(scopeInst));
+ lastDebugScopeId = currentDebugScopeId.top();
+ }
+ spv::Id resultId = getUniqueId();
+ Instruction* lineInst = new Instruction(resultId, makeVoidType(), OpExtInst);
+ lineInst->addIdOperand(nonSemanticShaderDebugInfo);
+ lineInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugLine);
+ lineInst->addIdOperand(makeDebugSource(fileName));
+ lineInst->addIdOperand(makeUintConstant(lineNum));
+ lineInst->addIdOperand(makeUintConstant(lineNum));
+ lineInst->addIdOperand(makeUintConstant(column));
+ lineInst->addIdOperand(makeUintConstant(column));
+ buildPoint->addInstruction(std::unique_ptr<Instruction>(lineInst));
+}
+
// For creating new groupedTypes (will return old type if the requested one was already made).
Id Builder::makeVoidType()
{
Instruction* type;
if (groupedTypes[OpTypeVoid].size() == 0) {
- type = new Instruction(getUniqueId(), NoType, OpTypeVoid);
+ Id typeId = getUniqueId();
+ type = new Instruction(typeId, NoType, OpTypeVoid);
groupedTypes[OpTypeVoid].push_back(type);
constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
module.mapInstruction(type);
+ // Core OpTypeVoid used for debug void type
+ if (emitNonSemanticShaderDebugInfo)
+ debugId[typeId] = typeId;
} else
type = groupedTypes[OpTypeVoid].back();
return type->getResultId();
}
-Id Builder::makeBoolType()
+Id Builder::makeBoolType(bool const compilerGenerated)
{
Instruction* type;
if (groupedTypes[OpTypeBool].size() == 0) {
@@ -158,6 +195,12 @@ Id Builder::makeBoolType()
} else
type = groupedTypes[OpTypeBool].back();
+ if (emitNonSemanticShaderDebugInfo && !compilerGenerated)
+ {
+ auto const debugResultId = makeBoolDebugType(32);
+ debugId[type->getResultId()] = debugResultId;
+ }
+
return type->getResultId();
}
@@ -172,6 +215,12 @@ Id Builder::makeSamplerType()
} else
type = groupedTypes[OpTypeSampler].back();
+ if (emitNonSemanticShaderDebugInfo)
+ {
+ auto const debugResultId = makeCompositeDebugType({}, "type.sampler", NonSemanticShaderDebugInfo100Structure, true);
+ debugId[type->getResultId()] = debugResultId;
+ }
+
return type->getResultId();
}
@@ -268,6 +317,12 @@ Id Builder::makeIntegerType(int width, bool hasSign)
break;
}
+ if (emitNonSemanticShaderDebugInfo)
+ {
+ auto const debugResultId = makeIntegerDebugType(width, hasSign);
+ debugId[type->getResultId()] = debugResultId;
+ }
+
return type->getResultId();
}
@@ -305,6 +360,12 @@ Id Builder::makeFloatType(int width)
break;
}
+ if (emitNonSemanticShaderDebugInfo)
+ {
+ auto const debugResultId = makeFloatDebugType(width);
+ debugId[type->getResultId()] = debugResultId;
+ }
+
return type->getResultId();
}
@@ -312,7 +373,7 @@ Id Builder::makeFloatType(int width)
// See makeStructResultType() for non-decorated structs
// needed as the result of some instructions, which does
// check for duplicates.
-Id Builder::makeStructType(const std::vector<Id>& members, const char* name)
+Id Builder::makeStructType(const std::vector<Id>& members, const char* name, bool const compilerGenerated)
{
// Don't look for previous one, because in the general case,
// structs can be duplicated except for decorations.
@@ -326,6 +387,12 @@ Id Builder::makeStructType(const std::vector<Id>& members, const char* name)
module.mapInstruction(type);
addName(type->getResultId(), name);
+ if (emitNonSemanticShaderDebugInfo && !compilerGenerated)
+ {
+ auto const debugResultId = makeCompositeDebugType(members, name, NonSemanticShaderDebugInfo100Structure);
+ debugId[type->getResultId()] = debugResultId;
+ }
+
return type->getResultId();
}
@@ -372,6 +439,12 @@ Id Builder::makeVectorType(Id component, int size)
constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
module.mapInstruction(type);
+ if (emitNonSemanticShaderDebugInfo)
+ {
+ auto const debugResultId = makeVectorDebugType(component, size);
+ debugId[type->getResultId()] = debugResultId;
+ }
+
return type->getResultId();
}
@@ -398,6 +471,12 @@ Id Builder::makeMatrixType(Id component, int cols, int rows)
constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
module.mapInstruction(type);
+ if (emitNonSemanticShaderDebugInfo)
+ {
+ auto const debugResultId = makeMatrixDebugType(column, cols);
+ debugId[type->getResultId()] = debugResultId;
+ }
+
return type->getResultId();
}
@@ -484,6 +563,12 @@ Id Builder::makeArrayType(Id element, Id sizeId, int stride)
constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
module.mapInstruction(type);
+ if (emitNonSemanticShaderDebugInfo)
+ {
+ auto const debugResultId = makeArrayDebugType(element, sizeId);
+ debugId[type->getResultId()] = debugResultId;
+ }
+
return type->getResultId();
}
@@ -494,6 +579,12 @@ Id Builder::makeRuntimeArray(Id element)
constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
module.mapInstruction(type);
+ if (emitNonSemanticShaderDebugInfo)
+ {
+ auto const debugResultId = makeArrayDebugType(element, makeUintConstant(0));
+ debugId[type->getResultId()] = debugResultId;
+ }
+
return type->getResultId();
}
@@ -513,11 +604,25 @@ Id Builder::makeFunctionType(Id returnType, const std::vector<Id>& paramTypes)
}
}
if (! mismatch)
+ {
+ // If compiling HLSL, glslang will create a wrapper function around the entrypoint. Accordingly, a void(void)
+ // function type is created for the wrapper function. However, nonsemantic shader debug information is disabled
+ // while creating the HLSL wrapper. Consequently, if we encounter another void(void) function, we need to create
+ // the associated debug function type if it hasn't been created yet.
+ if(emitNonSemanticShaderDebugInfo && debugId[type->getResultId()] == 0) {
+ assert(sourceLang == spv::SourceLanguageHLSL);
+ assert(getTypeClass(returnType) == OpTypeVoid && paramTypes.size() == 0);
+
+ Id debugTypeId = makeDebugFunctionType(returnType, {});
+ debugId[type->getResultId()] = debugTypeId;
+ }
return type->getResultId();
+ }
}
// not found, make it
- type = new Instruction(getUniqueId(), NoType, OpTypeFunction);
+ Id typeId = getUniqueId();
+ type = new Instruction(typeId, NoType, OpTypeFunction);
type->addIdOperand(returnType);
for (int p = 0; p < (int)paramTypes.size(); ++p)
type->addIdOperand(paramTypes[p]);
@@ -525,9 +630,34 @@ Id Builder::makeFunctionType(Id returnType, const std::vector<Id>& paramTypes)
constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
module.mapInstruction(type);
+ // make debug type and map it
+ if (emitNonSemanticShaderDebugInfo) {
+ Id debugTypeId = makeDebugFunctionType(returnType, paramTypes);
+ debugId[typeId] = debugTypeId;
+ }
+
return type->getResultId();
}
+Id Builder::makeDebugFunctionType(Id returnType, const std::vector<Id>& paramTypes)
+{
+ assert(debugId[returnType] != 0);
+
+ Id typeId = getUniqueId();
+ auto type = new Instruction(typeId, makeVoidType(), OpExtInst);
+ type->addIdOperand(nonSemanticShaderDebugInfo);
+ type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeFunction);
+ type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsPublic));
+ type->addIdOperand(debugId[returnType]);
+ for (auto const paramType : paramTypes) {
+ assert(isPointerType(paramType) || isArrayType(paramType));
+ type->addIdOperand(debugId[getContainedTypeId(paramType)]);
+ }
+ constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
+ module.mapInstruction(type);
+ return typeId;
+}
+
Id Builder::makeImageType(Id sampledType, Dim dim, bool depth, bool arrayed, bool ms, unsigned sampled,
ImageFormat format)
{
@@ -609,6 +739,22 @@ Id Builder::makeImageType(Id sampledType, Dim dim, bool depth, bool arrayed, boo
}
#endif
+ if (emitNonSemanticShaderDebugInfo)
+ {
+ auto TypeName = [&dim]() -> char const* {
+ switch (dim) {
+ case Dim1D: return "type.1d.image";
+ case Dim2D: return "type.2d.image";
+ case Dim3D: return "type.3d.image";
+ case DimCube: return "type.cube.image";
+ default: return "type.image";
+ }
+ };
+
+ auto const debugResultId = makeCompositeDebugType({}, TypeName(), NonSemanticShaderDebugInfo100Class, true);
+ debugId[type->getResultId()] = debugResultId;
+ }
+
return type->getResultId();
}
@@ -630,9 +776,376 @@ Id Builder::makeSampledImageType(Id imageType)
constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
module.mapInstruction(type);
+ if (emitNonSemanticShaderDebugInfo)
+ {
+ auto const debugResultId = makeCompositeDebugType({}, "type.sampled.image", NonSemanticShaderDebugInfo100Class, true);
+ debugId[type->getResultId()] = debugResultId;
+ }
+
+ return type->getResultId();
+}
+
+Id Builder::makeDebugInfoNone()
+{
+ if (debugInfoNone != 0)
+ return debugInfoNone;
+
+ Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), OpExtInst);
+ inst->addIdOperand(nonSemanticShaderDebugInfo);
+ inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugInfoNone);
+
+ constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
+ module.mapInstruction(inst);
+
+ debugInfoNone = inst->getResultId();
+
+ return debugInfoNone;
+}
+
+Id Builder::makeBoolDebugType(int const size)
+{
+ // try to find it
+ Instruction* type;
+ for (int t = 0; t < (int)groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].size(); ++t) {
+ type = groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic][t];
+ if (type->getIdOperand(0) == getStringId("bool") &&
+ type->getIdOperand(1) == static_cast<unsigned int>(size) &&
+ type->getIdOperand(2) == NonSemanticShaderDebugInfo100Boolean)
+ return type->getResultId();
+ }
+
+ type = new Instruction(getUniqueId(), makeVoidType(), OpExtInst);
+ type->addIdOperand(nonSemanticShaderDebugInfo);
+ type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeBasic);
+
+ type->addIdOperand(getStringId("bool")); // name id
+ type->addIdOperand(makeUintConstant(size)); // size id
+ type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100Boolean)); // encoding id
+ type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100None)); // flags id
+
+ groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].push_back(type);
+ constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
+ module.mapInstruction(type);
+
return type->getResultId();
}
+Id Builder::makeIntegerDebugType(int const width, bool const hasSign)
+{
+ // try to find it
+ Instruction* type;
+ for (int t = 0; t < (int)groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].size(); ++t) {
+ type = groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic][t];
+ if (type->getIdOperand(0) == (hasSign ? getStringId("int") : getStringId("uint")) &&
+ type->getIdOperand(1) == static_cast<unsigned int>(width) &&
+ type->getIdOperand(2) == (hasSign ? NonSemanticShaderDebugInfo100Signed : NonSemanticShaderDebugInfo100Unsigned))
+ return type->getResultId();
+ }
+
+ // not found, make it
+ type = new Instruction(getUniqueId(), makeVoidType(), OpExtInst);
+ type->addIdOperand(nonSemanticShaderDebugInfo);
+ type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeBasic);
+ if(hasSign == true) {
+ type->addIdOperand(getStringId("int")); // name id
+ } else {
+ type->addIdOperand(getStringId("uint")); // name id
+ }
+ type->addIdOperand(makeUintConstant(width)); // size id
+ if(hasSign == true) {
+ type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100Signed)); // encoding id
+ } else {
+ type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100Unsigned)); // encoding id
+ }
+ type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100None)); // flags id
+
+ groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].push_back(type);
+ constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
+ module.mapInstruction(type);
+
+ return type->getResultId();
+}
+
+Id Builder::makeFloatDebugType(int const width)
+{
+ // try to find it
+ Instruction* type;
+ for (int t = 0; t < (int)groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].size(); ++t) {
+ type = groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic][t];
+ if (type->getIdOperand(0) == getStringId("float") &&
+ type->getIdOperand(1) == static_cast<unsigned int>(width) &&
+ type->getIdOperand(2) == NonSemanticShaderDebugInfo100Float)
+ return type->getResultId();
+ }
+
+ // not found, make it
+ type = new Instruction(getUniqueId(), makeVoidType(), OpExtInst);
+ type->addIdOperand(nonSemanticShaderDebugInfo);
+ type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeBasic);
+ type->addIdOperand(getStringId("float")); // name id
+ type->addIdOperand(makeUintConstant(width)); // size id
+ type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100Float)); // encoding id
+ type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100None)); // flags id
+
+ groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].push_back(type);
+ constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
+ module.mapInstruction(type);
+
+ return type->getResultId();
+}
+
+Id Builder::makeSequentialDebugType(Id const baseType, Id const componentCount, NonSemanticShaderDebugInfo100Instructions const sequenceType)
+{
+ assert(sequenceType == NonSemanticShaderDebugInfo100DebugTypeArray ||
+ sequenceType == NonSemanticShaderDebugInfo100DebugTypeVector);
+
+ // try to find it
+ Instruction* type;
+ for (int t = 0; t < (int)groupedDebugTypes[sequenceType].size(); ++t) {
+ type = groupedDebugTypes[sequenceType][t];
+ if (type->getIdOperand(0) == baseType &&
+ type->getIdOperand(1) == makeUintConstant(componentCount))
+ return type->getResultId();
+ }
+
+ // not found, make it
+ type = new Instruction(getUniqueId(), makeVoidType(), OpExtInst);
+ type->addIdOperand(nonSemanticShaderDebugInfo);
+ type->addImmediateOperand(sequenceType);
+ type->addIdOperand(debugId[baseType]); // base type
+ type->addIdOperand(componentCount); // component count
+
+ groupedDebugTypes[sequenceType].push_back(type);
+ constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
+ module.mapInstruction(type);
+
+ return type->getResultId();
+}
+
+Id Builder::makeArrayDebugType(Id const baseType, Id const componentCount)
+{
+ return makeSequentialDebugType(baseType, componentCount, NonSemanticShaderDebugInfo100DebugTypeArray);
+}
+
+Id Builder::makeVectorDebugType(Id const baseType, int const componentCount)
+{
+ return makeSequentialDebugType(baseType, makeUintConstant(componentCount), NonSemanticShaderDebugInfo100DebugTypeVector);;
+}
+
+Id Builder::makeMatrixDebugType(Id const vectorType, int const vectorCount, bool columnMajor)
+{
+ // try to find it
+ Instruction* type;
+ for (int t = 0; t < (int)groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeMatrix].size(); ++t) {
+ type = groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeMatrix][t];
+ if (type->getIdOperand(0) == vectorType &&
+ type->getIdOperand(1) == makeUintConstant(vectorCount))
+ return type->getResultId();
+ }
+
+ // not found, make it
+ type = new Instruction(getUniqueId(), makeVoidType(), OpExtInst);
+ type->addIdOperand(nonSemanticShaderDebugInfo);
+ type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeMatrix);
+ type->addIdOperand(debugId[vectorType]); // vector type id
+ type->addIdOperand(makeUintConstant(vectorCount)); // component count id
+ type->addIdOperand(makeBoolConstant(columnMajor)); // column-major id
+
+ groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeMatrix].push_back(type);
+ constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
+ module.mapInstruction(type);
+
+ return type->getResultId();
+}
+
+Id Builder::makeMemberDebugType(Id const memberType, DebugTypeLoc const& debugTypeLoc)
+{
+ assert(debugId[memberType] != 0);
+
+ Instruction* type = new Instruction(getUniqueId(), makeVoidType(), OpExtInst);
+ type->addIdOperand(nonSemanticShaderDebugInfo);
+ type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeMember);
+ type->addIdOperand(getStringId(debugTypeLoc.name)); // name id
+ type->addIdOperand(debugId[memberType]); // type id
+ type->addIdOperand(makeDebugSource(sourceFileStringId)); // source id TODO: verify this works across include directives
+ type->addIdOperand(makeUintConstant(debugTypeLoc.line)); // line id TODO: currentLine is always zero
+ type->addIdOperand(makeUintConstant(debugTypeLoc.column)); // TODO: column id
+ type->addIdOperand(makeUintConstant(0)); // TODO: offset id
+ type->addIdOperand(makeUintConstant(0)); // TODO: size id
+ type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsPublic)); // flags id
+
+ groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeMember].push_back(type);
+ constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
+ module.mapInstruction(type);
+
+ return type->getResultId();
+}
+
+// Note: To represent a source language opaque type, this instruction must have no Members operands, Size operand must be
+// DebugInfoNone, and Name must start with @ to avoid clashes with user defined names.
+Id Builder::makeCompositeDebugType(std::vector<Id> const& memberTypes, char const*const name,
+ NonSemanticShaderDebugInfo100DebugCompositeType const tag, bool const isOpaqueType)
+{
+ // Create the debug member types.
+ std::vector<Id> memberDebugTypes;
+ for(auto const memberType : memberTypes) {
+ assert(debugTypeLocs.find(memberType) != debugTypeLocs.end());
+
+ memberDebugTypes.emplace_back(makeMemberDebugType(memberType, debugTypeLocs[memberType]));
+
+ // TODO: Need to rethink this method of passing location information.
+ // debugTypeLocs.erase(memberType);
+ }
+
+ // Create The structure debug type.
+ Instruction* type = new Instruction(getUniqueId(), makeVoidType(), OpExtInst);
+ type->addIdOperand(nonSemanticShaderDebugInfo);
+ type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeComposite);
+ type->addIdOperand(getStringId(name)); // name id
+ type->addIdOperand(makeUintConstant(tag)); // tag id
+ type->addIdOperand(makeDebugSource(sourceFileStringId)); // source id TODO: verify this works across include directives
+ type->addIdOperand(makeUintConstant(currentLine)); // line id TODO: currentLine always zero?
+ type->addIdOperand(makeUintConstant(0)); // TODO: column id
+ type->addIdOperand(makeDebugCompilationUnit()); // scope id
+ if(isOpaqueType == true) {
+ // Prepend '@' to opaque types.
+ type->addIdOperand(getStringId('@' + std::string(name))); // linkage name id
+ type->addIdOperand(makeDebugInfoNone()); // size id
+ } else {
+ type->addIdOperand(getStringId(name)); // linkage name id
+ type->addIdOperand(makeUintConstant(0)); // TODO: size id
+ }
+ type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsPublic)); // flags id
+ assert(isOpaqueType == false || (isOpaqueType == true && memberDebugTypes.empty()));
+ for(auto const memberDebugType : memberDebugTypes) {
+ type->addIdOperand(memberDebugType);
+ }
+
+ groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeComposite].push_back(type);
+ constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
+ module.mapInstruction(type);
+
+ return type->getResultId();
+}
+
+Id Builder::makeDebugSource(const Id fileName) {
+ if (debugSourceId.find(fileName) != debugSourceId.end())
+ return debugSourceId[fileName];
+ spv::Id resultId = getUniqueId();
+ Instruction* sourceInst = new Instruction(resultId, makeVoidType(), OpExtInst);
+ sourceInst->addIdOperand(nonSemanticShaderDebugInfo);
+ sourceInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugSource);
+ sourceInst->addIdOperand(fileName);
+ if (emitNonSemanticShaderDebugSource) {
+ spv::Id sourceId = 0;
+ if (fileName == sourceFileStringId) {
+ sourceId = getStringId(sourceText);
+ } else {
+ auto incItr = includeFiles.find(fileName);
+ assert(incItr != includeFiles.end());
+ sourceId = getStringId(*incItr->second);
+ }
+ sourceInst->addIdOperand(sourceId);
+ }
+ constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(sourceInst));
+ module.mapInstruction(sourceInst);
+ debugSourceId[fileName] = resultId;
+ return resultId;
+}
+
+Id Builder::makeDebugCompilationUnit() {
+ if (nonSemanticShaderCompilationUnitId != 0)
+ return nonSemanticShaderCompilationUnitId;
+ spv::Id resultId = getUniqueId();
+ Instruction* sourceInst = new Instruction(resultId, makeVoidType(), OpExtInst);
+ sourceInst->addIdOperand(nonSemanticShaderDebugInfo);
+ sourceInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugCompilationUnit);
+ sourceInst->addIdOperand(makeUintConstant(1)); // TODO(greg-lunarg): Get rid of magic number
+ sourceInst->addIdOperand(makeUintConstant(4)); // TODO(greg-lunarg): Get rid of magic number
+ sourceInst->addIdOperand(makeDebugSource(sourceFileStringId));
+ sourceInst->addIdOperand(makeUintConstant(sourceLang));
+ constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(sourceInst));
+ module.mapInstruction(sourceInst);
+ nonSemanticShaderCompilationUnitId = resultId;
+ return resultId;
+}
+
+Id Builder::createDebugGlobalVariable(Id const type, char const*const name, Id const variable)
+{
+ assert(type != 0);
+
+ Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), OpExtInst);
+ inst->addIdOperand(nonSemanticShaderDebugInfo);
+ inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugGlobalVariable);
+ inst->addIdOperand(getStringId(name)); // name id
+ inst->addIdOperand(type); // type id
+ inst->addIdOperand(makeDebugSource(sourceFileStringId)); // source id
+ inst->addIdOperand(makeUintConstant(currentLine)); // line id TODO: currentLine always zero?
+ inst->addIdOperand(makeUintConstant(0)); // TODO: column id
+ inst->addIdOperand(makeDebugCompilationUnit()); // scope id
+ inst->addIdOperand(getStringId(name)); // linkage name id
+ inst->addIdOperand(variable); // variable id
+ inst->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsDefinition)); // flags id
+
+ constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
+ module.mapInstruction(inst);
+
+ return inst->getResultId();
+}
+
+Id Builder::createDebugLocalVariable(Id type, char const*const name, size_t const argNumber)
+{
+ assert(name != nullptr);
+ Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), OpExtInst);
+ inst->addIdOperand(nonSemanticShaderDebugInfo);
+ inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugLocalVariable);
+ inst->addIdOperand(getStringId(name)); // name id
+ inst->addIdOperand(type); // type id
+ inst->addIdOperand(makeDebugSource(sourceFileStringId)); // source id
+ inst->addIdOperand(makeUintConstant(currentLine)); // line id
+ inst->addIdOperand(makeUintConstant(0)); // TODO: column id
+ inst->addIdOperand(currentDebugScopeId.top()); // scope id
+ inst->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsLocal)); // flags id
+ if(argNumber != 0) {
+ inst->addIdOperand(makeUintConstant(argNumber));
+ }
+
+ constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
+ module.mapInstruction(inst);
+
+ return inst->getResultId();
+}
+
+Id Builder::makeDebugExpression()
+{
+ if (debugExpression != 0)
+ return debugExpression;
+
+ Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), OpExtInst);
+ inst->addIdOperand(nonSemanticShaderDebugInfo);
+ inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugExpression);
+
+ constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
+ module.mapInstruction(inst);
+
+ debugExpression = inst->getResultId();
+
+ return debugExpression;
+}
+
+Id Builder::makeDebugDeclare(Id const debugLocalVariable, Id const localVariable)
+{
+ Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), OpExtInst);
+ inst->addIdOperand(nonSemanticShaderDebugInfo);
+ inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugDeclare);
+ inst->addIdOperand(debugLocalVariable); // debug local variable id
+ inst->addIdOperand(localVariable); // local variable id
+ inst->addIdOperand(makeDebugExpression()); // expression id
+ buildPoint->addInstruction(std::unique_ptr<Instruction>(inst));
+
+ return inst->getResultId();
+}
+
#ifndef GLSLANG_WEB
Id Builder::makeAccelerationStructureType()
{
@@ -920,6 +1433,17 @@ bool Builder::isSpecConstantOpCode(Op opcode) const
}
}
+bool Builder::isRayTracingOpCode(Op opcode) const
+{
+ switch (opcode) {
+ case OpTypeAccelerationStructureKHR:
+ case OpTypeRayQueryKHR:
+ return true;
+ default:
+ return false;
+ }
+}
+
Id Builder::makeNullConstant(Id typeId)
{
Instruction* constant;
@@ -1136,6 +1660,19 @@ Id Builder::makeFpConstant(Id type, double d, bool specConstant)
return NoResult;
}
+Id Builder::importNonSemanticShaderDebugInfoInstructions()
+{
+ assert(emitNonSemanticShaderDebugInfo == true);
+
+ if(nonSemanticShaderDebugInfo == 0)
+ {
+ this->addExtension(spv::E_SPV_KHR_non_semantic_info);
+ nonSemanticShaderDebugInfo = this->import("NonSemantic.Shader.DebugInfo.100");
+ }
+
+ return nonSemanticShaderDebugInfo;
+}
+
Id Builder::findCompositeConstant(Op typeClass, Id typeId, const std::vector<Id>& comps)
{
Instruction* constant = 0;
@@ -1447,23 +1984,34 @@ Function* Builder::makeEntryPoint(const char* entryPoint)
assert(! entryPointFunction);
Block* entry;
- std::vector<Id> params;
+ std::vector<Id> paramsTypes;
+ std::vector<char const*> paramNames;
std::vector<std::vector<Decoration>> decorations;
- entryPointFunction = makeFunctionEntry(NoPrecision, makeVoidType(), entryPoint, params, decorations, &entry);
+ auto const returnType = makeVoidType();
+
+ restoreNonSemanticShaderDebugInfo = emitNonSemanticShaderDebugInfo;
+ if(sourceLang == spv::SourceLanguageHLSL) {
+ emitNonSemanticShaderDebugInfo = false;
+ }
+
+ entryPointFunction = makeFunctionEntry(NoPrecision, returnType, entryPoint, paramsTypes, paramNames, decorations, &entry);
+
+ emitNonSemanticShaderDebugInfo = restoreNonSemanticShaderDebugInfo;
return entryPointFunction;
}
// Comments in header
Function* Builder::makeFunctionEntry(Decoration precision, Id returnType, const char* name,
- const std::vector<Id>& paramTypes,
+ const std::vector<Id>& paramTypes, const std::vector<char const*>& paramNames,
const std::vector<std::vector<Decoration>>& decorations, Block **entry)
{
// Make the function and initial instructions in it
Id typeId = makeFunctionType(returnType, paramTypes);
Id firstParamId = paramTypes.size() == 0 ? 0 : getUniqueIds((int)paramTypes.size());
- Function* function = new Function(getUniqueId(), returnType, typeId, firstParamId, module);
+ Id funcId = getUniqueId();
+ Function* function = new Function(funcId, returnType, typeId, firstParamId, module);
// Set up the precisions
setPrecision(function->getId(), precision);
@@ -1475,11 +2023,39 @@ Function* Builder::makeFunctionEntry(Decoration precision, Id returnType, const
}
}
+ // Make the debug function instruction
+ if (emitNonSemanticShaderDebugInfo) {
+ Id nameId = getStringId(unmangleFunctionName(name));
+ Id debugFuncId = makeDebugFunction(function, nameId, typeId);
+ debugId[funcId] = debugFuncId;
+ currentDebugScopeId.push(debugFuncId);
+ lastDebugScopeId = NoResult;
+ }
+
// CFG
- if (entry) {
- *entry = new Block(getUniqueId(), *function);
- function->addBlock(*entry);
- setBuildPoint(*entry);
+ assert(entry != nullptr);
+ *entry = new Block(getUniqueId(), *function);
+ function->addBlock(*entry);
+ setBuildPoint(*entry);
+
+ // DebugScope and DebugLine for parameter DebugDeclares
+ if (emitNonSemanticShaderDebugInfo && (int)paramTypes.size() > 0) {
+ addDebugScopeAndLine(currentFileId, currentLine, 0);
+ }
+
+ if (emitNonSemanticShaderDebugInfo) {
+ assert(paramTypes.size() == paramNames.size());
+ for(size_t p = 0; p < paramTypes.size(); ++p)
+ {
+ auto const& paramType = paramTypes[p];
+ assert(isPointerType(paramType) || isArrayType(paramType));
+ assert(debugId[getContainedTypeId(paramType)] != 0);
+ auto const& paramName = paramNames[p];
+ auto const debugLocalVariableId = createDebugLocalVariable(debugId[getContainedTypeId(paramType)], paramName, p+1);
+ debugId[firstParamId + p] = debugLocalVariableId;
+
+ makeDebugDeclare(debugLocalVariableId, firstParamId + p);
+ }
}
if (name)
@@ -1487,9 +2063,62 @@ Function* Builder::makeFunctionEntry(Decoration precision, Id returnType, const
functions.push_back(std::unique_ptr<Function>(function));
+ // Clear debug scope stack
+ if (emitNonSemanticShaderDebugInfo)
+ currentDebugScopeId.pop();
+
return function;
}
+Id Builder::makeDebugFunction(Function* function, Id nameId, Id funcTypeId) {
+ assert(function != nullptr);
+ assert(nameId != 0);
+ assert(funcTypeId != 0);
+ assert(debugId[funcTypeId] != 0);
+
+ Id funcId = getUniqueId();
+ auto type = new Instruction(funcId, makeVoidType(), OpExtInst);
+ type->addIdOperand(nonSemanticShaderDebugInfo);
+ type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugFunction);
+ type->addIdOperand(nameId);
+ type->addIdOperand(debugId[funcTypeId]);
+ type->addIdOperand(makeDebugSource(currentFileId)); // Will be fixed later when true filename available
+ type->addIdOperand(makeUintConstant(currentLine)); // Will be fixed later when true line available
+ type->addIdOperand(makeUintConstant(0)); // column
+ type->addIdOperand(makeDebugCompilationUnit()); // scope
+ type->addIdOperand(nameId); // linkage name
+ type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsPublic));
+ type->addIdOperand(makeUintConstant(currentLine)); // TODO(greg-lunarg): correct scope line
+ constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
+ module.mapInstruction(type);
+ return funcId;
+}
+
+Id Builder::makeDebugLexicalBlock(uint32_t line) {
+ Id lexId = getUniqueId();
+ auto lex = new Instruction(lexId, makeVoidType(), OpExtInst);
+ lex->addIdOperand(nonSemanticShaderDebugInfo);
+ lex->addImmediateOperand(NonSemanticShaderDebugInfo100DebugLexicalBlock);
+ lex->addIdOperand(makeDebugSource(currentFileId));
+ lex->addIdOperand(makeUintConstant(line));
+ lex->addIdOperand(makeUintConstant(0)); // column
+ lex->addIdOperand(currentDebugScopeId.top()); // scope
+ constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(lex));
+ module.mapInstruction(lex);
+ return lexId;
+}
+
+std::string Builder::unmangleFunctionName(std::string const& name) const
+{
+ assert(name.length() > 0);
+
+ if(name.rfind('(') != std::string::npos) {
+ return name.substr(0, name.rfind('('));
+ } else {
+ return name;
+ }
+}
+
// Comments in header
void Builder::makeReturn(bool implicit, Id retVal)
{
@@ -1505,6 +2134,48 @@ void Builder::makeReturn(bool implicit, Id retVal)
}
// Comments in header
+void Builder::enterScope(uint32_t line)
+{
+ // Generate new lexical scope debug instruction
+ Id lexId = makeDebugLexicalBlock(line);
+ currentDebugScopeId.push(lexId);
+ lastDebugScopeId = NoResult;
+}
+
+// Comments in header
+void Builder::leaveScope()
+{
+ // Pop current scope from stack and clear current scope
+ currentDebugScopeId.pop();
+ lastDebugScopeId = NoResult;
+}
+
+// Comments in header
+void Builder::enterFunction(Function const* function)
+{
+ // Save and disable debugInfo for HLSL entry point function. It is a wrapper
+ // function with no user code in it.
+ restoreNonSemanticShaderDebugInfo = emitNonSemanticShaderDebugInfo;
+ if (sourceLang == spv::SourceLanguageHLSL && function == entryPointFunction) {
+ emitNonSemanticShaderDebugInfo = false;
+ }
+
+ if (emitNonSemanticShaderDebugInfo) {
+ // Initialize scope state
+ Id funcId = function->getFuncId();
+ currentDebugScopeId.push(debugId[funcId]);
+ // Create DebugFunctionDefinition
+ spv::Id resultId = getUniqueId();
+ Instruction* defInst = new Instruction(resultId, makeVoidType(), OpExtInst);
+ defInst->addIdOperand(nonSemanticShaderDebugInfo);
+ defInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugFunctionDefinition);
+ defInst->addIdOperand(debugId[funcId]);
+ defInst->addIdOperand(funcId);
+ buildPoint->addInstruction(std::unique_ptr<Instruction>(defInst));
+ }
+}
+
+// Comments in header
void Builder::leaveFunction()
{
Block* block = buildPoint;
@@ -1519,6 +2190,12 @@ void Builder::leaveFunction()
makeReturn(true, createUndefined(function.getReturnType()));
}
}
+
+ // Clear function scope from debug scope stack
+ if (emitNonSemanticShaderDebugInfo)
+ currentDebugScopeId.pop();
+
+ emitNonSemanticShaderDebugInfo = restoreNonSemanticShaderDebugInfo;
}
// Comments in header
@@ -1529,7 +2206,18 @@ void Builder::makeStatementTerminator(spv::Op opcode, const char *name)
}
// Comments in header
-Id Builder::createVariable(Decoration precision, StorageClass storageClass, Id type, const char* name, Id initializer)
+void Builder::makeStatementTerminator(spv::Op opcode, const std::vector<Id>& operands, const char* name)
+{
+ // It's assumed that the terminator instruction is always of void return type
+ // However in future if there is a need for non void return type, new helper
+ // methods can be created.
+ createNoResultOp(opcode, operands);
+ createAndSetNoPredecessorBlock(name);
+}
+
+// Comments in header
+Id Builder::createVariable(Decoration precision, StorageClass storageClass, Id type, const char* name, Id initializer,
+ bool const compilerGenerated)
{
Id pointerType = makePointer(storageClass, type);
Instruction* inst = new Instruction(getUniqueId(), pointerType, OpVariable);
@@ -1541,11 +2229,26 @@ Id Builder::createVariable(Decoration precision, StorageClass storageClass, Id t
case StorageClassFunction:
// Validation rules require the declaration in the entry block
buildPoint->getParent().addLocalVariable(std::unique_ptr<Instruction>(inst));
+
+ if (emitNonSemanticShaderDebugInfo && !compilerGenerated)
+ {
+ auto const debugLocalVariableId = createDebugLocalVariable(debugId[type], name);
+ debugId[inst->getResultId()] = debugLocalVariableId;
+
+ makeDebugDeclare(debugLocalVariableId, inst->getResultId());
+ }
+
break;
default:
constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
module.mapInstruction(inst);
+
+ if (emitNonSemanticShaderDebugInfo && !isRayTracingOpCode(getOpCode(type)))
+ {
+ auto const debugResultId = createDebugGlobalVariable(debugId[type], name, inst->getResultId());
+ debugId[inst->getResultId()] = debugResultId;
+ }
break;
}
@@ -1575,7 +2278,7 @@ spv::MemoryAccessMask Builder::sanitizeMemoryAccessForStorageClass(spv::MemoryAc
case spv::StorageClassPhysicalStorageBufferEXT:
break;
default:
- memoryAccess = spv::MemoryAccessMask(memoryAccess &
+ memoryAccess = spv::MemoryAccessMask(memoryAccess &
~(spv::MemoryAccessMakePointerAvailableKHRMask |
spv::MemoryAccessMakePointerVisibleKHRMask |
spv::MemoryAccessNonPrivatePointerKHRMask));
@@ -2051,7 +2754,7 @@ Id Builder::createTextureCall(Decoration precision, Id resultType, bool sparse,
texArgs[numArgs++] = parameters.granularity;
if (parameters.coarse != NoResult)
texArgs[numArgs++] = parameters.coarse;
-#endif
+#endif
//
// Set up the optional arguments
@@ -3271,10 +3974,10 @@ void Builder::dumpSourceInstructions(const spv::Id fileId, const std::string& te
const int opSourceWordCount = 4;
const int nonNullBytesPerInstruction = 4 * (maxWordCount - opSourceWordCount) - 1;
- if (source != SourceLanguageUnknown) {
+ if (sourceLang != SourceLanguageUnknown) {
// OpSource Language Version File Source
Instruction sourceInst(NoResult, NoType, OpSource);
- sourceInst.addImmediateOperand(source);
+ sourceInst.addImmediateOperand(sourceLang);
sourceInst.addImmediateOperand(sourceVersion);
// File operand
if (fileId != NoResult) {
@@ -3307,6 +4010,7 @@ void Builder::dumpSourceInstructions(const spv::Id fileId, const std::string& te
// Dump an OpSource[Continued] sequence for the source and every include file
void Builder::dumpSourceInstructions(std::vector<unsigned int>& out) const
{
+ if (emitNonSemanticShaderDebugInfo) return;
dumpSourceInstructions(sourceFileStringId, sourceText, out);
for (auto iItr = includeFiles.begin(); iItr != includeFiles.end(); ++iItr)
dumpSourceInstructions(iItr->first, *iItr->second, out);
diff --git a/thirdparty/glslang/SPIRV/SpvBuilder.h b/thirdparty/glslang/SPIRV/SpvBuilder.h
index c72d9b287e..f7fdc6ad84 100644
--- a/thirdparty/glslang/SPIRV/SpvBuilder.h
+++ b/thirdparty/glslang/SPIRV/SpvBuilder.h
@@ -50,6 +50,10 @@
#include "Logger.h"
#include "spirv.hpp"
#include "spvIR.h"
+namespace spv {
+ #include "GLSL.ext.KHR.h"
+ #include "NonSemanticShaderDebugInfo100.h"
+}
#include <algorithm>
#include <map>
@@ -82,7 +86,7 @@ public:
void setSource(spv::SourceLanguage lang, int version)
{
- source = lang;
+ sourceLang = lang;
sourceVersion = version;
}
spv::Id getStringId(const std::string& str)
@@ -99,14 +103,32 @@ public:
stringIds[file_c_str] = strId;
return strId;
}
+ spv::Id getSourceFile() const
+ {
+ return sourceFileStringId;
+ }
void setSourceFile(const std::string& file)
{
sourceFileStringId = getStringId(file);
+ currentFileId = sourceFileStringId;
}
void setSourceText(const std::string& text) { sourceText = text; }
void addSourceExtension(const char* ext) { sourceExtensions.push_back(ext); }
void addModuleProcessed(const std::string& p) { moduleProcesses.push_back(p.c_str()); }
void setEmitOpLines() { emitOpLines = true; }
+ void setEmitNonSemanticShaderDebugInfo(bool const emit)
+ {
+ emitNonSemanticShaderDebugInfo = emit;
+
+ if(emit)
+ {
+ importNonSemanticShaderDebugInfoInstructions();
+ }
+ }
+ void setEmitNonSemanticShaderDebugSource(bool const src)
+ {
+ emitNonSemanticShaderDebugSource = src;
+ }
void addExtension(const char* ext) { extensions.insert(ext); }
void removeExtension(const char* ext)
{
@@ -159,10 +181,11 @@ public:
void setLine(int line, const char* filename);
// Low-level OpLine. See setLine() for a layered helper.
void addLine(Id fileName, int line, int column);
+ void addDebugScopeAndLine(Id fileName, int line, int column);
// For creating new types (will return old type if the requested one was already made).
Id makeVoidType();
- Id makeBoolType();
+ Id makeBoolType(bool const compilerGenerated = true);
Id makePointer(StorageClass, Id pointee);
Id makeForwardPointer(StorageClass);
Id makePointerFromForwardPointer(StorageClass, Id forwardPointerType, Id pointee);
@@ -170,7 +193,7 @@ public:
Id makeIntType(int width) { return makeIntegerType(width, true); }
Id makeUintType(int width) { return makeIntegerType(width, false); }
Id makeFloatType(int width);
- Id makeStructType(const std::vector<Id>& members, const char*);
+ Id makeStructType(const std::vector<Id>& members, const char* name, bool const compilerGenerated = true);
Id makeStructResultType(Id type0, Id type1);
Id makeVectorType(Id component, int size);
Id makeMatrixType(Id component, int cols, int rows);
@@ -183,6 +206,36 @@ public:
Id makeCooperativeMatrixType(Id component, Id scope, Id rows, Id cols);
Id makeGenericType(spv::Op opcode, std::vector<spv::IdImmediate>& operands);
+ // SPIR-V NonSemantic Shader DebugInfo Instructions
+ struct DebugTypeLoc {
+ std::string name {};
+ int line {0};
+ int column {0};
+ };
+ std::unordered_map<Id, DebugTypeLoc> debugTypeLocs;
+ Id makeDebugInfoNone();
+ Id makeBoolDebugType(int const size);
+ Id makeIntegerDebugType(int const width, bool const hasSign);
+ Id makeFloatDebugType(int const width);
+ Id makeSequentialDebugType(Id const baseType, Id const componentCount, NonSemanticShaderDebugInfo100Instructions const sequenceType);
+ Id makeArrayDebugType(Id const baseType, Id const componentCount);
+ Id makeVectorDebugType(Id const baseType, int const componentCount);
+ Id makeMatrixDebugType(Id const vectorType, int const vectorCount, bool columnMajor = true);
+ Id makeMemberDebugType(Id const memberType, DebugTypeLoc const& debugTypeLoc);
+ Id makeCompositeDebugType(std::vector<Id> const& memberTypes, char const*const name,
+ NonSemanticShaderDebugInfo100DebugCompositeType const tag, bool const isOpaqueType = false);
+ Id makeDebugSource(const Id fileName);
+ Id makeDebugCompilationUnit();
+ Id createDebugGlobalVariable(Id const type, char const*const name, Id const variable);
+ Id createDebugLocalVariable(Id type, char const*const name, size_t const argNumber = 0);
+ Id makeDebugExpression();
+ Id makeDebugDeclare(Id const debugLocalVariable, Id const localVariable);
+ Id makeDebugValue(Id const debugLocalVariable, Id const value);
+ Id makeDebugFunctionType(Id returnType, const std::vector<Id>& paramTypes);
+ Id makeDebugFunction(Function* function, Id nameId, Id funcTypeId);
+ Id makeDebugLexicalBlock(uint32_t line);
+ std::string unmangleFunctionName(std::string const& name) const;
+
// accelerationStructureNV type
Id makeAccelerationStructureType();
// rayQueryEXT type
@@ -257,6 +310,8 @@ public:
// See if a resultId is valid for use as an initializer.
bool isValidInitializer(Id resultId) const { return isConstant(resultId) || isGlobalVariable(resultId); }
+ bool isRayTracingOpCode(Op opcode) const;
+
int getScalarTypeWidth(Id typeId) const
{
Id scalarTypeId = getScalarTypeId(typeId);
@@ -318,6 +373,8 @@ public:
Id makeFloat16Constant(float f16, bool specConstant = false);
Id makeFpConstant(Id type, double d, bool specConstant = false);
+ Id importNonSemanticShaderDebugInfoInstructions();
+
// Turn the array of constants into a proper spv constant of the requested type.
Id makeCompositeConstant(Id type, const std::vector<Id>& comps, bool specConst = false);
@@ -340,7 +397,12 @@ public:
void addMemberDecoration(Id, unsigned int member, Decoration, const std::vector<const char*>& strings);
// At the end of what block do the next create*() instructions go?
- void setBuildPoint(Block* bp) { buildPoint = bp; }
+ // Also reset current last DebugScope and current source line to unknown
+ void setBuildPoint(Block* bp) {
+ buildPoint = bp;
+ lastDebugScopeId = NoResult;
+ currentLine = 0;
+ }
Block* getBuildPoint() const { return buildPoint; }
// Make the entry-point function. The returned pointer is only valid
@@ -351,12 +413,22 @@ public:
// Return the function, pass back the entry.
// The returned pointer is only valid for the lifetime of this builder.
Function* makeFunctionEntry(Decoration precision, Id returnType, const char* name,
- const std::vector<Id>& paramTypes, const std::vector<std::vector<Decoration>>& precisions, Block **entry = 0);
+ const std::vector<Id>& paramTypes, const std::vector<char const*>& paramNames,
+ const std::vector<std::vector<Decoration>>& precisions, Block **entry = 0);
// Create a return. An 'implicit' return is one not appearing in the source
// code. In the case of an implicit return, no post-return block is inserted.
void makeReturn(bool implicit, Id retVal = 0);
+ // Initialize state and generate instructions for new lexical scope
+ void enterScope(uint32_t line);
+
+ // Set state and generate instructions to exit current lexical scope
+ void leaveScope();
+
+ // Prepare builder for generation of instructions for a function.
+ void enterFunction(Function const* function);
+
// Generate all the code needed to finish up a function.
void leaveFunction();
@@ -364,9 +436,13 @@ public:
// discard, terminate-invocation, terminateRayEXT, or ignoreIntersectionEXT
void makeStatementTerminator(spv::Op opcode, const char *name);
+ // Create block terminator instruction for statements that have input operands
+ // such as OpEmitMeshTasksEXT
+ void makeStatementTerminator(spv::Op opcode, const std::vector<Id>& operands, const char* name);
+
// Create a global or function local or IO variable.
- Id createVariable(Decoration precision, StorageClass, Id type, const char* name = nullptr,
- Id initializer = NoResult);
+ Id createVariable(Decoration precision, StorageClass storageClass, Id type, const char* name = nullptr,
+ Id initializer = NoResult, bool const compilerGenerated = true);
// Create an intermediate with an undefined value.
Id createUndefined(Id type);
@@ -801,13 +877,23 @@ public:
const;
unsigned int spvVersion; // the version of SPIR-V to emit in the header
- SourceLanguage source;
+ SourceLanguage sourceLang;
int sourceVersion;
spv::Id sourceFileStringId;
+ spv::Id nonSemanticShaderCompilationUnitId {0};
+ spv::Id nonSemanticShaderDebugInfo {0};
+ spv::Id debugInfoNone {0};
+ spv::Id debugExpression {0}; // Debug expression with zero operations.
std::string sourceText;
int currentLine;
const char* currentFile;
+ spv::Id currentFileId;
+ std::stack<spv::Id> currentDebugScopeId;
+ spv::Id lastDebugScopeId;
bool emitOpLines;
+ bool emitNonSemanticShaderDebugInfo;
+ bool restoreNonSemanticShaderDebugInfo;
+ bool emitNonSemanticShaderDebugSource;
std::set<std::string> extensions;
std::vector<const char*> sourceExtensions;
std::vector<const char*> moduleProcesses;
@@ -841,6 +927,8 @@ public:
std::unordered_map<unsigned int, std::vector<Instruction*>> groupedStructConstants;
// map type opcodes to type instructions
std::unordered_map<unsigned int, std::vector<Instruction*>> groupedTypes;
+ // map type opcodes to debug type instructions
+ std::unordered_map<unsigned int, std::vector<Instruction*>> groupedDebugTypes;
// list of OpConstantNull instructions
std::vector<Instruction*> nullConstants;
@@ -856,6 +944,12 @@ public:
// map from include file name ids to their contents
std::map<spv::Id, const std::string*> includeFiles;
+ // map from core id to debug id
+ std::map <spv::Id, spv::Id> debugId;
+
+ // map from file name string id to DebugSource id
+ std::unordered_map<spv::Id, spv::Id> debugSourceId;
+
// The stream for outputting warnings and errors.
SpvBuildLogger* logger;
}; // end Builder class
diff --git a/thirdparty/glslang/SPIRV/SpvTools.cpp b/thirdparty/glslang/SPIRV/SpvTools.cpp
index e8f825119a..25299937a3 100644
--- a/thirdparty/glslang/SPIRV/SpvTools.cpp
+++ b/thirdparty/glslang/SPIRV/SpvTools.cpp
@@ -212,6 +212,8 @@ void SpirvToolsTransform(const glslang::TIntermediate& intermediate, std::vector
optimizer.RegisterPass(spvtools::CreateInterpolateFixupPass());
if (options->optimizeSize) {
optimizer.RegisterPass(spvtools::CreateRedundancyEliminationPass());
+ if (intermediate.getStage() == EShLanguage::EShLangVertex)
+ optimizer.RegisterPass(spvtools::CreateEliminateDeadInputComponentsPass());
}
optimizer.RegisterPass(spvtools::CreateAggressiveDCEPass());
optimizer.RegisterPass(spvtools::CreateCFGCleanupPass());
diff --git a/thirdparty/glslang/SPIRV/SpvTools.h b/thirdparty/glslang/SPIRV/SpvTools.h
index 3fb3cbacd3..5386048ab6 100644
--- a/thirdparty/glslang/SPIRV/SpvTools.h
+++ b/thirdparty/glslang/SPIRV/SpvTools.h
@@ -53,14 +53,14 @@
namespace glslang {
struct SpvOptions {
- SpvOptions() : generateDebugInfo(false), stripDebugInfo(false), disableOptimizer(true),
- optimizeSize(false), disassemble(false), validate(false) { }
- bool generateDebugInfo;
- bool stripDebugInfo;
- bool disableOptimizer;
- bool optimizeSize;
- bool disassemble;
- bool validate;
+ bool generateDebugInfo {false};
+ bool stripDebugInfo {false};
+ bool disableOptimizer {true};
+ bool optimizeSize {false};
+ bool disassemble {false};
+ bool validate {false};
+ bool emitNonSemanticShaderDebugInfo {false};
+ bool emitNonSemanticShaderDebugSource{ false };
};
#if ENABLE_OPT
diff --git a/thirdparty/glslang/SPIRV/doc.cpp b/thirdparty/glslang/SPIRV/doc.cpp
index 9a569e0d7f..b7fe3e7424 100644
--- a/thirdparty/glslang/SPIRV/doc.cpp
+++ b/thirdparty/glslang/SPIRV/doc.cpp
@@ -97,6 +97,8 @@ const char* ExecutionModelString(int model)
case 6: return "Kernel";
case ExecutionModelTaskNV: return "TaskNV";
case ExecutionModelMeshNV: return "MeshNV";
+ case ExecutionModelTaskEXT: return "TaskEXT";
+ case ExecutionModelMeshEXT: return "MeshEXT";
default: return "Bad";
@@ -173,28 +175,32 @@ const char* ExecutionModeString(int mode)
case 31: return "ContractionOff";
case 32: return "Bad";
- case ExecutionModeInitializer: return "Initializer";
- case ExecutionModeFinalizer: return "Finalizer";
- case ExecutionModeSubgroupSize: return "SubgroupSize";
- case ExecutionModeSubgroupsPerWorkgroup: return "SubgroupsPerWorkgroup";
- case ExecutionModeSubgroupsPerWorkgroupId: return "SubgroupsPerWorkgroupId";
- case ExecutionModeLocalSizeId: return "LocalSizeId";
- case ExecutionModeLocalSizeHintId: return "LocalSizeHintId";
-
- case ExecutionModePostDepthCoverage: return "PostDepthCoverage";
- case ExecutionModeDenormPreserve: return "DenormPreserve";
- case ExecutionModeDenormFlushToZero: return "DenormFlushToZero";
- case ExecutionModeSignedZeroInfNanPreserve: return "SignedZeroInfNanPreserve";
- case ExecutionModeRoundingModeRTE: return "RoundingModeRTE";
- case ExecutionModeRoundingModeRTZ: return "RoundingModeRTZ";
- case ExecutionModeStencilRefReplacingEXT: return "StencilRefReplacingEXT";
+ case ExecutionModeInitializer: return "Initializer";
+ case ExecutionModeFinalizer: return "Finalizer";
+ case ExecutionModeSubgroupSize: return "SubgroupSize";
+ case ExecutionModeSubgroupsPerWorkgroup: return "SubgroupsPerWorkgroup";
+ case ExecutionModeSubgroupsPerWorkgroupId: return "SubgroupsPerWorkgroupId";
+ case ExecutionModeLocalSizeId: return "LocalSizeId";
+ case ExecutionModeLocalSizeHintId: return "LocalSizeHintId";
+
+ case ExecutionModePostDepthCoverage: return "PostDepthCoverage";
+ case ExecutionModeDenormPreserve: return "DenormPreserve";
+ case ExecutionModeDenormFlushToZero: return "DenormFlushToZero";
+ case ExecutionModeSignedZeroInfNanPreserve: return "SignedZeroInfNanPreserve";
+ case ExecutionModeRoundingModeRTE: return "RoundingModeRTE";
+ case ExecutionModeRoundingModeRTZ: return "RoundingModeRTZ";
+ case ExecutionModeEarlyAndLateFragmentTestsAMD: return "EarlyAndLateFragmentTestsAMD";
+ case ExecutionModeStencilRefUnchangedFrontAMD: return "StencilRefUnchangedFrontAMD";
+ case ExecutionModeStencilRefLessFrontAMD: return "StencilRefLessFrontAMD";
+ case ExecutionModeStencilRefGreaterBackAMD: return "StencilRefGreaterBackAMD";
+ case ExecutionModeStencilRefReplacingEXT: return "StencilRefReplacingEXT";
case ExecutionModeSubgroupUniformControlFlowKHR: return "SubgroupUniformControlFlow";
- case ExecutionModeOutputLinesNV: return "OutputLinesNV";
- case ExecutionModeOutputPrimitivesNV: return "OutputPrimitivesNV";
- case ExecutionModeOutputTrianglesNV: return "OutputTrianglesNV";
- case ExecutionModeDerivativeGroupQuadsNV: return "DerivativeGroupQuadsNV";
- case ExecutionModeDerivativeGroupLinearNV: return "DerivativeGroupLinearNV";
+ case ExecutionModeOutputLinesNV: return "OutputLinesNV";
+ case ExecutionModeOutputPrimitivesNV: return "OutputPrimitivesNV";
+ case ExecutionModeOutputTrianglesNV: return "OutputTrianglesNV";
+ case ExecutionModeDerivativeGroupQuadsNV: return "DerivativeGroupQuadsNV";
+ case ExecutionModeDerivativeGroupLinearNV: return "DerivativeGroupLinearNV";
case ExecutionModePixelInterlockOrderedEXT: return "PixelInterlockOrderedEXT";
case ExecutionModePixelInterlockUnorderedEXT: return "PixelInterlockUnorderedEXT";
@@ -238,7 +244,7 @@ const char* StorageClassString(int StorageClass)
case StorageClassIncomingCallableDataKHR: return "IncomingCallableDataKHR";
case StorageClassPhysicalStorageBufferEXT: return "PhysicalStorageBufferEXT";
-
+ case StorageClassTaskPayloadWorkgroupEXT: return "TaskPayloadWorkgroupEXT";
default: return "Bad";
}
}
@@ -305,7 +311,8 @@ const char* DecorationString(int decoration)
case DecorationPerPrimitiveNV: return "PerPrimitiveNV";
case DecorationPerViewNV: return "PerViewNV";
case DecorationPerTaskNV: return "PerTaskNV";
- case DecorationPerVertexNV: return "PerVertexNV";
+
+ case DecorationPerVertexKHR: return "PerVertexKHR";
case DecorationNonUniformEXT: return "DecorationNonUniformEXT";
case DecorationHlslCounterBufferGOOGLE: return "DecorationHlslCounterBufferGOOGLE";
@@ -392,6 +399,7 @@ const char* BuiltInString(int builtIn)
case BuiltInObjectRayDirectionKHR: return "ObjectRayDirectionKHR";
case BuiltInRayTminKHR: return "RayTminKHR";
case BuiltInRayTmaxKHR: return "RayTmaxKHR";
+ case BuiltInCullMaskKHR: return "CullMaskKHR";
case BuiltInInstanceCustomIndexKHR: return "InstanceCustomIndexKHR";
case BuiltInRayGeometryIndexKHR: return "RayGeometryIndexKHR";
case BuiltInObjectToWorldKHR: return "ObjectToWorldKHR";
@@ -406,8 +414,8 @@ const char* BuiltInString(int builtIn)
case BuiltInViewportMaskPerViewNV: return "ViewportMaskPerViewNV";
// case BuiltInFragmentSizeNV: return "FragmentSizeNV"; // superseded by BuiltInFragSizeEXT
// case BuiltInInvocationsPerPixelNV: return "InvocationsPerPixelNV"; // superseded by BuiltInFragInvocationCountEXT
- case BuiltInBaryCoordNV: return "BaryCoordNV";
- case BuiltInBaryCoordNoPerspNV: return "BaryCoordNoPerspNV";
+ case BuiltInBaryCoordKHR: return "BaryCoordKHR";
+ case BuiltInBaryCoordNoPerspKHR: return "BaryCoordNoPerspKHR";
case BuiltInFragSizeEXT: return "FragSizeEXT";
case BuiltInFragInvocationCountEXT: return "FragInvocationCountEXT";
@@ -427,6 +435,10 @@ const char* BuiltInString(int builtIn)
case BuiltInWarpIDNV: return "WarpIDNV";
case BuiltInSMIDNV: return "SMIDNV";
case BuiltInCurrentRayTimeNV: return "CurrentRayTimeNV";
+ case BuiltInPrimitivePointIndicesEXT: return "PrimitivePointIndicesEXT";
+ case BuiltInPrimitiveLineIndicesEXT: return "PrimitiveLineIndicesEXT";
+ case BuiltInPrimitiveTriangleIndicesEXT: return "PrimitiveTriangleIndicesEXT";
+ case BuiltInCullPrimitiveEXT: return "CullPrimitiveEXT";
default: return "Bad";
}
@@ -925,14 +937,16 @@ const char* CapabilityString(int info)
case CapabilityRayTracingNV: return "RayTracingNV";
case CapabilityRayTracingMotionBlurNV: return "RayTracingMotionBlurNV";
case CapabilityRayTracingKHR: return "RayTracingKHR";
+ case CapabilityRayCullMaskKHR: return "RayCullMaskKHR";
case CapabilityRayQueryKHR: return "RayQueryKHR";
case CapabilityRayTracingProvisionalKHR: return "RayTracingProvisionalKHR";
case CapabilityRayTraversalPrimitiveCullingKHR: return "RayTraversalPrimitiveCullingKHR";
case CapabilityComputeDerivativeGroupQuadsNV: return "ComputeDerivativeGroupQuadsNV";
case CapabilityComputeDerivativeGroupLinearNV: return "ComputeDerivativeGroupLinearNV";
- case CapabilityFragmentBarycentricNV: return "FragmentBarycentricNV";
+ case CapabilityFragmentBarycentricKHR: return "FragmentBarycentricKHR";
case CapabilityMeshShadingNV: return "MeshShadingNV";
case CapabilityImageFootprintNV: return "ImageFootprintNV";
+ case CapabilityMeshShadingEXT: return "MeshShadingEXT";
// case CapabilityShadingRateNV: return "ShadingRateNV"; // superseded by FragmentDensityEXT
case CapabilitySampleMaskOverrideCoverageNV: return "SampleMaskOverrideCoverageNV";
case CapabilityFragmentDensityEXT: return "FragmentDensityEXT";
@@ -1400,6 +1414,8 @@ const char* OpcodeString(int op)
case OpGroupNonUniformPartitionNV: return "OpGroupNonUniformPartitionNV";
case OpImageSampleFootprintNV: return "OpImageSampleFootprintNV";
case OpWritePackedPrimitiveIndices4x8NV: return "OpWritePackedPrimitiveIndices4x8NV";
+ case OpEmitMeshTasksEXT: return "OpEmitMeshTasksEXT";
+ case OpSetMeshOutputsEXT: return "OpSetMeshOutputsEXT";
case OpTypeRayQueryKHR: return "OpTypeRayQueryKHR";
case OpRayQueryInitializeKHR: return "OpRayQueryInitializeKHR";
@@ -2974,6 +2990,17 @@ void Parameterize()
InstructionDesc[OpWritePackedPrimitiveIndices4x8NV].operands.push(OperandId, "'Index Offset'");
InstructionDesc[OpWritePackedPrimitiveIndices4x8NV].operands.push(OperandId, "'Packed Indices'");
+ InstructionDesc[OpEmitMeshTasksEXT].operands.push(OperandId, "'groupCountX'");
+ InstructionDesc[OpEmitMeshTasksEXT].operands.push(OperandId, "'groupCountY'");
+ InstructionDesc[OpEmitMeshTasksEXT].operands.push(OperandId, "'groupCountZ'");
+ InstructionDesc[OpEmitMeshTasksEXT].operands.push(OperandId, "'Payload'");
+ InstructionDesc[OpEmitMeshTasksEXT].setResultAndType(false, false);
+
+ InstructionDesc[OpSetMeshOutputsEXT].operands.push(OperandId, "'vertexCount'");
+ InstructionDesc[OpSetMeshOutputsEXT].operands.push(OperandId, "'primitiveCount'");
+ InstructionDesc[OpSetMeshOutputsEXT].setResultAndType(false, false);
+
+
InstructionDesc[OpTypeCooperativeMatrixNV].operands.push(OperandId, "'Component Type'");
InstructionDesc[OpTypeCooperativeMatrixNV].operands.push(OperandId, "'Scope'");
InstructionDesc[OpTypeCooperativeMatrixNV].operands.push(OperandId, "'Rows'");
diff --git a/thirdparty/glslang/SPIRV/spirv.hpp b/thirdparty/glslang/SPIRV/spirv.hpp
index 3d500ebbd9..f85469d441 100644
--- a/thirdparty/glslang/SPIRV/spirv.hpp
+++ b/thirdparty/glslang/SPIRV/spirv.hpp
@@ -1,19 +1,19 @@
// Copyright (c) 2014-2020 The Khronos Group Inc.
-//
+//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and/or associated documentation files (the "Materials"),
// to deal in the Materials without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Materials, and to permit persons to whom the
// Materials are furnished to do so, subject to the following conditions:
-//
+//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Materials.
-//
+//
// MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS
// STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND
-// HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/
-//
+// HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/
+//
// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
@@ -27,7 +27,7 @@
// Enumeration tokens for SPIR-V, in various styles:
// C, C++, C++11, JSON, Lua, Python, C#, D
-//
+//
// - C will have tokens with a "Spv" prefix, e.g.: SpvSourceLanguageGLSL
// - C++ will have tokens in the "spv" name space, e.g.: spv::SourceLanguageGLSL
// - C++11 will use enum classes in the spv namespace, e.g.: spv::SourceLanguage::GLSL
@@ -36,7 +36,7 @@
// - C# will use enum classes in the Specification class located in the "Spv" namespace,
// e.g.: Spv.Specification.SourceLanguage.GLSL
// - D will have tokens under the "spv" module, e.g: spv.SourceLanguage.GLSL
-//
+//
// Some tokens act like mask values, which can be OR'd together,
// while others are mutually exclusive. The mask-like ones have
// "Mask" in their name, and a parallel enum that has the shift
@@ -91,6 +91,8 @@ enum ExecutionModel {
ExecutionModelMissNV = 5317,
ExecutionModelCallableKHR = 5318,
ExecutionModelCallableNV = 5318,
+ ExecutionModelTaskEXT = 5364,
+ ExecutionModelMeshEXT = 5365,
ExecutionModelMax = 0x7fffffff,
};
@@ -158,11 +160,21 @@ enum ExecutionMode {
ExecutionModeSignedZeroInfNanPreserve = 4461,
ExecutionModeRoundingModeRTE = 4462,
ExecutionModeRoundingModeRTZ = 4463,
+ ExecutionModeEarlyAndLateFragmentTestsAMD = 5017,
ExecutionModeStencilRefReplacingEXT = 5027,
+ ExecutionModeStencilRefUnchangedFrontAMD = 5079,
+ ExecutionModeStencilRefGreaterFrontAMD = 5080,
+ ExecutionModeStencilRefLessFrontAMD = 5081,
+ ExecutionModeStencilRefUnchangedBackAMD = 5082,
+ ExecutionModeStencilRefGreaterBackAMD = 5083,
+ ExecutionModeStencilRefLessBackAMD = 5084,
+ ExecutionModeOutputLinesEXT = 5269,
ExecutionModeOutputLinesNV = 5269,
+ ExecutionModeOutputPrimitivesEXT = 5270,
ExecutionModeOutputPrimitivesNV = 5270,
ExecutionModeDerivativeGroupQuadsNV = 5289,
ExecutionModeDerivativeGroupLinearNV = 5290,
+ ExecutionModeOutputTrianglesEXT = 5298,
ExecutionModeOutputTrianglesNV = 5298,
ExecutionModePixelInterlockOrderedEXT = 5366,
ExecutionModePixelInterlockUnorderedEXT = 5367,
@@ -211,6 +223,7 @@ enum StorageClass {
StorageClassShaderRecordBufferNV = 5343,
StorageClassPhysicalStorageBuffer = 5349,
StorageClassPhysicalStorageBufferEXT = 5349,
+ StorageClassTaskPayloadWorkgroupEXT = 5402,
StorageClassCodeSectionINTEL = 5605,
StorageClassDeviceOnlyINTEL = 5936,
StorageClassHostOnlyINTEL = 5937,
@@ -493,6 +506,7 @@ enum Decoration {
DecorationPassthroughNV = 5250,
DecorationViewportRelativeNV = 5252,
DecorationSecondaryViewportRelativeNV = 5256,
+ DecorationPerPrimitiveEXT = 5271,
DecorationPerPrimitiveNV = 5271,
DecorationPerViewNV = 5272,
DecorationPerTaskNV = 5273,
@@ -640,6 +654,10 @@ enum BuiltIn {
BuiltInFragmentSizeNV = 5292,
BuiltInFragInvocationCountEXT = 5293,
BuiltInInvocationsPerPixelNV = 5293,
+ BuiltInPrimitivePointIndicesEXT = 5294,
+ BuiltInPrimitiveLineIndicesEXT = 5295,
+ BuiltInPrimitiveTriangleIndicesEXT = 5296,
+ BuiltInCullPrimitiveEXT = 5299,
BuiltInLaunchIdKHR = 5319,
BuiltInLaunchIdNV = 5319,
BuiltInLaunchSizeKHR = 5320,
@@ -673,6 +691,7 @@ enum BuiltIn {
BuiltInSMCountNV = 5375,
BuiltInWarpIDNV = 5376,
BuiltInSMIDNV = 5377,
+ BuiltInCullMaskKHR = 6021,
BuiltInMax = 0x7fffffff,
};
@@ -975,7 +994,8 @@ enum Capability {
CapabilityFragmentFullyCoveredEXT = 5265,
CapabilityMeshShadingNV = 5266,
CapabilityImageFootprintNV = 5282,
- CapabilityFragmentBarycentricKHR = 5284,
+ CapabilityMeshShadingEXT = 5283,
+ CapabilityFragmentBarycentricKHR = 5284,
CapabilityFragmentBarycentricNV = 5284,
CapabilityComputeDerivativeGroupQuadsNV = 5288,
CapabilityFragmentDensityEXT = 5291,
@@ -1069,6 +1089,7 @@ enum Capability {
CapabilityDotProductInput4x8BitPackedKHR = 6018,
CapabilityDotProduct = 6019,
CapabilityDotProductKHR = 6019,
+ CapabilityRayCullMaskKHR = 6020,
CapabilityBitInstructions = 6025,
CapabilityAtomicFloat32AddEXT = 6033,
CapabilityAtomicFloat64AddEXT = 6034,
@@ -1568,6 +1589,8 @@ enum Op {
OpFragmentFetchAMD = 5012,
OpReadClockKHR = 5056,
OpImageSampleFootprintNV = 5283,
+ OpEmitMeshTasksEXT = 5294,
+ OpSetMeshOutputsEXT = 5295,
OpGroupNonUniformPartitionNV = 5296,
OpWritePackedPrimitiveIndices4x8NV = 5299,
OpReportIntersectionKHR = 5334,
@@ -2225,6 +2248,8 @@ inline void HasResultAndType(Op opcode, bool *hasResult, bool *hasResultType) {
case OpReadClockKHR: *hasResult = true; *hasResultType = true; break;
case OpImageSampleFootprintNV: *hasResult = true; *hasResultType = true; break;
case OpGroupNonUniformPartitionNV: *hasResult = true; *hasResultType = true; break;
+ case OpEmitMeshTasksEXT: *hasResult = false; *hasResultType = false; break;
+ case OpSetMeshOutputsEXT: *hasResult = false; *hasResultType = false; break;
case OpWritePackedPrimitiveIndices4x8NV: *hasResult = false; *hasResultType = false; break;
case OpReportIntersectionNV: *hasResult = true; *hasResultType = true; break;
case OpIgnoreIntersectionNV: *hasResult = false; *hasResultType = false; break;
@@ -2506,4 +2531,3 @@ inline FragmentShadingRateMask operator|(FragmentShadingRateMask a, FragmentShad
} // end namespace spv
#endif // #ifndef spirv_HPP
-
diff --git a/thirdparty/glslang/SPIRV/spvIR.h b/thirdparty/glslang/SPIRV/spvIR.h
index 5249a5ba73..09691273ab 100644
--- a/thirdparty/glslang/SPIRV/spvIR.h
+++ b/thirdparty/glslang/SPIRV/spvIR.h
@@ -349,6 +349,7 @@ public:
const std::vector<Block*>& getBlocks() const { return blocks; }
void addLocalVariable(std::unique_ptr<Instruction> inst);
Id getReturnType() const { return functionInstruction.getTypeId(); }
+ Id getFuncId() const { return functionInstruction.getResultId(); }
void setReturnPrecision(Decoration precision)
{
if (precision == DecorationRelaxedPrecision)
@@ -357,6 +358,14 @@ public:
Decoration getReturnPrecision() const
{ return reducedPrecisionReturn ? DecorationRelaxedPrecision : NoPrecision; }
+ void setDebugLineInfo(Id fileName, int line, int column) {
+ lineInstruction = std::unique_ptr<Instruction>{new Instruction(OpLine)};
+ lineInstruction->addIdOperand(fileName);
+ lineInstruction->addImmediateOperand(line);
+ lineInstruction->addImmediateOperand(column);
+ }
+ bool hasDebugLineInfo() const { return lineInstruction != nullptr; }
+
void setImplicitThis() { implicitThis = true; }
bool hasImplicitThis() const { return implicitThis; }
@@ -373,6 +382,11 @@ public:
void dump(std::vector<unsigned int>& out) const
{
+ // OpLine
+ if (lineInstruction != nullptr) {
+ lineInstruction->dump(out);
+ }
+
// OpFunction
functionInstruction.dump(out);
@@ -391,6 +405,7 @@ protected:
Function& operator=(Function&);
Module& parent;
+ std::unique_ptr<Instruction> lineInstruction;
Instruction functionInstruction;
std::vector<Instruction*> parameterInstructions;
std::vector<Block*> blocks;
@@ -457,7 +472,8 @@ protected:
// - the OpFunction instruction
// - all the OpFunctionParameter instructions
__inline Function::Function(Id id, Id resultType, Id functionType, Id firstParamId, Module& parent)
- : parent(parent), functionInstruction(id, resultType, OpFunction), implicitThis(false),
+ : parent(parent), lineInstruction(nullptr),
+ functionInstruction(id, resultType, OpFunction), implicitThis(false),
reducedPrecisionReturn(false)
{
// OpFunction
diff --git a/thirdparty/glslang/glslang/Include/BaseTypes.h b/thirdparty/glslang/glslang/Include/BaseTypes.h
index c8203c2232..156d98b9af 100644
--- a/thirdparty/glslang/glslang/Include/BaseTypes.h
+++ b/thirdparty/glslang/glslang/Include/BaseTypes.h
@@ -105,6 +105,8 @@ enum TStorageQualifier {
EvqCallableData,
EvqCallableDataIn,
+ EvqtaskPayloadSharedEXT,
+
// parameters
EvqIn, // also, for 'in' in the grammar before we know if it's a pipeline input or an 'in' parameter
EvqOut, // also, for 'out' in the grammar before we know if it's a pipeline output or an 'out' parameter
@@ -128,6 +130,7 @@ enum TStorageQualifier {
// built-ins written by fragment shader
EvqFragColor,
EvqFragDepth,
+ EvqFragStencil,
// end of list
EvqLast
@@ -263,6 +266,7 @@ enum TBuiltInVariable {
EbvObjectRayDirection,
EbvRayTmin,
EbvRayTmax,
+ EbvCullMask,
EbvHitT,
EbvHitKind,
EbvObjectToWorld,
@@ -274,6 +278,8 @@ enum TBuiltInVariable {
// barycentrics
EbvBaryCoordNV,
EbvBaryCoordNoPerspNV,
+ EbvBaryCoordEXT,
+ EbvBaryCoordNoPerspEXT,
// mesh shaders
EbvTaskCountNV,
EbvPrimitiveCountNV,
@@ -283,6 +289,11 @@ enum TBuiltInVariable {
EbvLayerPerViewNV,
EbvMeshViewCountNV,
EbvMeshViewIndicesNV,
+ //GL_EXT_mesh_shader
+ EbvPrimitivePointIndicesEXT,
+ EbvPrimitiveLineIndicesEXT,
+ EbvPrimitiveTriangleIndicesEXT,
+ EbvCullPrimitiveEXT,
// sm builtins
EbvWarpsPerSM,
@@ -350,11 +361,13 @@ __inline const char* GetStorageQualifierString(TStorageQualifier q)
case EvqPointCoord: return "gl_PointCoord"; break;
case EvqFragColor: return "fragColor"; break;
case EvqFragDepth: return "gl_FragDepth"; break;
+ case EvqFragStencil: return "gl_FragStencilRefARB"; break;
case EvqPayload: return "rayPayloadNV"; break;
case EvqPayloadIn: return "rayPayloadInNV"; break;
case EvqHitAttr: return "hitAttributeNV"; break;
case EvqCallableData: return "callableDataNV"; break;
case EvqCallableDataIn: return "callableDataInNV"; break;
+ case EvqtaskPayloadSharedEXT: return "taskPayloadSharedEXT"; break;
default: return "unknown qualifier";
}
}
@@ -478,8 +491,10 @@ __inline const char* GetBuiltInVariableString(TBuiltInVariable v)
case EbvWorldToObject: return "WorldToObjectNV";
case EbvCurrentRayTimeNV: return "CurrentRayTimeNV";
- case EbvBaryCoordNV: return "BaryCoordNV";
- case EbvBaryCoordNoPerspNV: return "BaryCoordNoPerspNV";
+ case EbvBaryCoordEXT:
+ case EbvBaryCoordNV: return "BaryCoordKHR";
+ case EbvBaryCoordNoPerspEXT:
+ case EbvBaryCoordNoPerspNV: return "BaryCoordNoPerspKHR";
case EbvTaskCountNV: return "TaskCountNV";
case EbvPrimitiveCountNV: return "PrimitiveCountNV";
@@ -489,6 +504,11 @@ __inline const char* GetBuiltInVariableString(TBuiltInVariable v)
case EbvLayerPerViewNV: return "LayerPerViewNV";
case EbvMeshViewCountNV: return "MeshViewCountNV";
case EbvMeshViewIndicesNV: return "MeshViewIndicesNV";
+ // GL_EXT_mesh_shader
+ case EbvPrimitivePointIndicesEXT: return "PrimitivePointIndicesEXT";
+ case EbvPrimitiveLineIndicesEXT: return "PrimitiveLineIndicesEXT";
+ case EbvPrimitiveTriangleIndicesEXT: return "PrimitiveTriangleIndicesEXT";
+ case EbvCullPrimitiveEXT: return "CullPrimitiveEXT";
case EbvWarpsPerSM: return "WarpsPerSMNV";
case EbvSMCount: return "SMCountNV";
diff --git a/thirdparty/glslang/glslang/Include/Common.h b/thirdparty/glslang/glslang/Include/Common.h
index 9042a1aa27..a5b41cb397 100644
--- a/thirdparty/glslang/glslang/Include/Common.h
+++ b/thirdparty/glslang/glslang/Include/Common.h
@@ -66,7 +66,7 @@ std::string to_string(const T& val) {
}
#endif
-#if (defined(_MSC_VER) && _MSC_VER < 1900 /*vs2015*/) || MINGW_HAS_SECURE_API
+#if (defined(_MSC_VER) && _MSC_VER < 1900 /*vs2015*/) || (defined(MINGW_HAS_SECURE_API) && MINGW_HAS_SECURE_API)
#include <basetsd.h>
#ifndef snprintf
#define snprintf sprintf_s
@@ -218,7 +218,7 @@ template <class T> T Max(const T a, const T b) { return a > b ? a : b; }
//
// Create a TString object from an integer.
//
-#if defined _MSC_VER || MINGW_HAS_SECURE_API
+#if defined(_MSC_VER) || (defined(MINGW_HAS_SECURE_API) && MINGW_HAS_SECURE_API)
inline const TString String(const int i, const int base = 10)
{
char text[16]; // 32 bit ints are at most 10 digits in base 10
diff --git a/thirdparty/glslang/glslang/Include/ResourceLimits.h b/thirdparty/glslang/glslang/Include/ResourceLimits.h
index b670cf163f..b36c8d6273 100644
--- a/thirdparty/glslang/glslang/Include/ResourceLimits.h
+++ b/thirdparty/glslang/glslang/Include/ResourceLimits.h
@@ -142,6 +142,15 @@ struct TBuiltInResource {
int maxTaskWorkGroupSizeY_NV;
int maxTaskWorkGroupSizeZ_NV;
int maxMeshViewCountNV;
+ int maxMeshOutputVerticesEXT;
+ int maxMeshOutputPrimitivesEXT;
+ int maxMeshWorkGroupSizeX_EXT;
+ int maxMeshWorkGroupSizeY_EXT;
+ int maxMeshWorkGroupSizeZ_EXT;
+ int maxTaskWorkGroupSizeX_EXT;
+ int maxTaskWorkGroupSizeY_EXT;
+ int maxTaskWorkGroupSizeZ_EXT;
+ int maxMeshViewCountEXT;
int maxDualSourceDrawBuffersEXT;
TLimits limits;
diff --git a/thirdparty/glslang/glslang/Include/Types.h b/thirdparty/glslang/glslang/Include/Types.h
index 6a7e61df3a..a6f47e8476 100644
--- a/thirdparty/glslang/glslang/Include/Types.h
+++ b/thirdparty/glslang/glslang/Include/Types.h
@@ -443,6 +443,18 @@ enum TLayoutDepth {
EldCount
};
+enum TLayoutStencil {
+ ElsNone,
+ ElsRefUnchangedFrontAMD,
+ ElsRefGreaterFrontAMD,
+ ElsRefLessFrontAMD,
+ ElsRefUnchangedBackAMD,
+ ElsRefGreaterBackAMD,
+ ElsRefLessBackAMD,
+
+ ElsCount
+};
+
enum TBlendEquationShift {
// No 'EBlendNone':
// These are used as bit-shift amounts. A mask of such shifts will have type 'int',
@@ -552,6 +564,7 @@ public:
perViewNV = false;
perTaskNV = false;
#endif
+ pervertexEXT = false;
}
void clearMemory()
@@ -604,7 +617,8 @@ public:
bool isNoContraction() const { return false; }
void setNoContraction() { }
bool isPervertexNV() const { return false; }
- void setNullInit() { }
+ bool isPervertexEXT() const { return pervertexEXT; }
+ void setNullInit() {}
bool isNullInit() const { return false; }
void setSpirvByReference() { }
bool isSpirvByReference() { return false; }
@@ -615,6 +629,7 @@ public:
bool nopersp : 1;
bool explicitInterp : 1;
bool pervertexNV : 1;
+ bool pervertexEXT : 1;
bool perPrimitiveNV : 1;
bool perViewNV : 1;
bool perTaskNV : 1;
@@ -663,12 +678,13 @@ public:
}
bool isAuxiliary() const
{
- return centroid || patch || sample || pervertexNV;
+ return centroid || patch || sample || pervertexNV || pervertexEXT;
}
bool isPatch() const { return patch; }
bool isNoContraction() const { return noContraction; }
void setNoContraction() { noContraction = true; }
bool isPervertexNV() const { return pervertexNV; }
+ bool isPervertexEXT() const { return pervertexEXT; }
void setNullInit() { nullInit = true; }
bool isNullInit() const { return nullInit; }
void setSpirvByReference() { spirvByReference = true; }
@@ -701,6 +717,7 @@ public:
case EvqVaryingOut:
case EvqFragColor:
case EvqFragDepth:
+ case EvqFragStencil:
return true;
default:
return false;
@@ -768,6 +785,7 @@ public:
case EvqVaryingOut:
case EvqFragColor:
case EvqFragDepth:
+ case EvqFragStencil:
return true;
default:
return false;
@@ -815,7 +833,7 @@ public:
}
storage = EvqUniform;
break;
- case EbsStorageBuffer :
+ case EbsStorageBuffer :
storage = EvqBuffer;
break;
#ifndef GLSLANG_WEB
@@ -838,6 +856,7 @@ public:
bool isPerPrimitive() const { return perPrimitiveNV; }
bool isPerView() const { return perViewNV; }
bool isTaskMemory() const { return perTaskNV; }
+ bool isTaskPayload() const { return storage == EvqtaskPayloadSharedEXT; }
bool isAnyPayload() const {
return storage == EvqPayload || storage == EvqPayloadIn;
}
@@ -856,8 +875,8 @@ public:
case EShLangTessEvaluation:
return ! patch && isPipeInput();
case EShLangFragment:
- return pervertexNV && isPipeInput();
- case EShLangMeshNV:
+ return (pervertexNV || pervertexEXT) && isPipeInput();
+ case EShLangMesh:
return ! perTaskNV && isPipeOutput();
default:
@@ -1235,6 +1254,18 @@ public:
default: return "none";
}
}
+ static const char* getLayoutStencilString(TLayoutStencil s)
+ {
+ switch (s) {
+ case ElsRefUnchangedFrontAMD: return "stencil_ref_unchanged_front_amd";
+ case ElsRefGreaterFrontAMD: return "stencil_ref_greater_front_amd";
+ case ElsRefLessFrontAMD: return "stencil_ref_less_front_amd";
+ case ElsRefUnchangedBackAMD: return "stencil_ref_unchanged_back_amd";
+ case ElsRefGreaterBackAMD: return "stencil_ref_greater_back_amd";
+ case ElsRefLessBackAMD: return "stencil_ref_less_back_amd";
+ default: return "none";
+ }
+ }
static const char* getBlendEquationString(TBlendEquationShift e)
{
switch (e) {
@@ -1332,7 +1363,9 @@ struct TShaderQualifiers {
#ifndef GLSLANG_WEB
bool earlyFragmentTests; // fragment input
bool postDepthCoverage; // fragment input
+ bool earlyAndLateFragmentTestsAMD; //fragment input
TLayoutDepth layoutDepth;
+ TLayoutStencil layoutStencil;
bool blendEquation; // true if any blend equation was specified
int numViews; // multiview extenstions
TInterlockOrdering interlockOrdering;
@@ -1342,6 +1375,7 @@ struct TShaderQualifiers {
int primitives; // mesh shader "max_primitives"DerivativeGroupLinear; // true if layout derivative_group_linearNV set
bool layoutPrimitiveCulling; // true if layout primitive_culling set
TLayoutDepth getDepth() const { return layoutDepth; }
+ TLayoutStencil getStencil() const { return layoutStencil; }
#else
TLayoutDepth getDepth() const { return EldNone; }
#endif
@@ -1367,8 +1401,10 @@ struct TShaderQualifiers {
localSizeSpecId[2] = TQualifier::layoutNotSet;
#ifndef GLSLANG_WEB
earlyFragmentTests = false;
+ earlyAndLateFragmentTestsAMD = false;
postDepthCoverage = false;
layoutDepth = EldNone;
+ layoutStencil = ElsNone;
blendEquation = false;
numViews = TQualifier::layoutNotSet;
layoutOverrideCoverage = false;
@@ -1420,10 +1456,14 @@ struct TShaderQualifiers {
#ifndef GLSLANG_WEB
if (src.earlyFragmentTests)
earlyFragmentTests = true;
+ if (src.earlyAndLateFragmentTestsAMD)
+ earlyAndLateFragmentTestsAMD = true;
if (src.postDepthCoverage)
postDepthCoverage = true;
if (src.layoutDepth)
layoutDepth = src.layoutDepth;
+ if (src.layoutStencil)
+ layoutStencil = src.layoutStencil;
if (src.blendEquation)
blendEquation = src.blendEquation;
if (src.numViews != TQualifier::layoutNotSet)
@@ -2142,7 +2182,8 @@ public:
const char* getPrecisionQualifierString() const { return ""; }
TString getBasicTypeString() const { return ""; }
#else
- TString getCompleteString() const
+ TString getCompleteString(bool syntactic = false, bool getQualifiers = true, bool getPrecision = true,
+ bool getType = true, TString name = "", TString structName = "") const
{
TString typeString;
@@ -2150,232 +2191,337 @@ public:
const auto appendUint = [&](unsigned int u) { typeString.append(std::to_string(u).c_str()); };
const auto appendInt = [&](int i) { typeString.append(std::to_string(i).c_str()); };
- if (qualifier.hasSprivDecorate())
+ if (getQualifiers) {
+ if (qualifier.hasSprivDecorate())
appendStr(qualifier.getSpirvDecorateQualifierString().c_str());
- if (qualifier.hasLayout()) {
+ if (qualifier.hasLayout()) {
// To reduce noise, skip this if the only layout is an xfb_buffer
// with no triggering xfb_offset.
TQualifier noXfbBuffer = qualifier;
noXfbBuffer.layoutXfbBuffer = TQualifier::layoutXfbBufferEnd;
if (noXfbBuffer.hasLayout()) {
- appendStr("layout(");
- if (qualifier.hasAnyLocation()) {
- appendStr(" location=");
- appendUint(qualifier.layoutLocation);
- if (qualifier.hasComponent()) {
- appendStr(" component=");
- appendUint(qualifier.layoutComponent);
- }
- if (qualifier.hasIndex()) {
- appendStr(" index=");
- appendUint(qualifier.layoutIndex);
- }
- }
- if (qualifier.hasSet()) {
- appendStr(" set=");
- appendUint(qualifier.layoutSet);
- }
- if (qualifier.hasBinding()) {
- appendStr(" binding=");
- appendUint(qualifier.layoutBinding);
- }
- if (qualifier.hasStream()) {
- appendStr(" stream=");
- appendUint(qualifier.layoutStream);
- }
- if (qualifier.hasMatrix()) {
- appendStr(" ");
- appendStr(TQualifier::getLayoutMatrixString(qualifier.layoutMatrix));
- }
- if (qualifier.hasPacking()) {
- appendStr(" ");
- appendStr(TQualifier::getLayoutPackingString(qualifier.layoutPacking));
- }
- if (qualifier.hasOffset()) {
- appendStr(" offset=");
- appendInt(qualifier.layoutOffset);
- }
- if (qualifier.hasAlign()) {
- appendStr(" align=");
- appendInt(qualifier.layoutAlign);
- }
- if (qualifier.hasFormat()) {
- appendStr(" ");
- appendStr(TQualifier::getLayoutFormatString(qualifier.layoutFormat));
- }
- if (qualifier.hasXfbBuffer() && qualifier.hasXfbOffset()) {
- appendStr(" xfb_buffer=");
- appendUint(qualifier.layoutXfbBuffer);
+ appendStr("layout(");
+ if (qualifier.hasAnyLocation()) {
+ appendStr(" location=");
+ appendUint(qualifier.layoutLocation);
+ if (qualifier.hasComponent()) {
+ appendStr(" component=");
+ appendUint(qualifier.layoutComponent);
}
- if (qualifier.hasXfbOffset()) {
- appendStr(" xfb_offset=");
- appendUint(qualifier.layoutXfbOffset);
+ if (qualifier.hasIndex()) {
+ appendStr(" index=");
+ appendUint(qualifier.layoutIndex);
}
- if (qualifier.hasXfbStride()) {
- appendStr(" xfb_stride=");
- appendUint(qualifier.layoutXfbStride);
- }
- if (qualifier.hasAttachment()) {
- appendStr(" input_attachment_index=");
- appendUint(qualifier.layoutAttachment);
- }
- if (qualifier.hasSpecConstantId()) {
- appendStr(" constant_id=");
- appendUint(qualifier.layoutSpecConstantId);
- }
- if (qualifier.layoutPushConstant)
- appendStr(" push_constant");
- if (qualifier.layoutBufferReference)
- appendStr(" buffer_reference");
- if (qualifier.hasBufferReferenceAlign()) {
- appendStr(" buffer_reference_align=");
- appendUint(1u << qualifier.layoutBufferReferenceAlign);
- }
-
- if (qualifier.layoutPassthrough)
- appendStr(" passthrough");
- if (qualifier.layoutViewportRelative)
- appendStr(" layoutViewportRelative");
- if (qualifier.layoutSecondaryViewportRelativeOffset != -2048) {
- appendStr(" layoutSecondaryViewportRelativeOffset=");
- appendInt(qualifier.layoutSecondaryViewportRelativeOffset);
- }
- if (qualifier.layoutShaderRecord)
- appendStr(" shaderRecordNV");
-
- appendStr(")");
+ }
+ if (qualifier.hasSet()) {
+ appendStr(" set=");
+ appendUint(qualifier.layoutSet);
+ }
+ if (qualifier.hasBinding()) {
+ appendStr(" binding=");
+ appendUint(qualifier.layoutBinding);
+ }
+ if (qualifier.hasStream()) {
+ appendStr(" stream=");
+ appendUint(qualifier.layoutStream);
+ }
+ if (qualifier.hasMatrix()) {
+ appendStr(" ");
+ appendStr(TQualifier::getLayoutMatrixString(qualifier.layoutMatrix));
+ }
+ if (qualifier.hasPacking()) {
+ appendStr(" ");
+ appendStr(TQualifier::getLayoutPackingString(qualifier.layoutPacking));
+ }
+ if (qualifier.hasOffset()) {
+ appendStr(" offset=");
+ appendInt(qualifier.layoutOffset);
+ }
+ if (qualifier.hasAlign()) {
+ appendStr(" align=");
+ appendInt(qualifier.layoutAlign);
+ }
+ if (qualifier.hasFormat()) {
+ appendStr(" ");
+ appendStr(TQualifier::getLayoutFormatString(qualifier.layoutFormat));
+ }
+ if (qualifier.hasXfbBuffer() && qualifier.hasXfbOffset()) {
+ appendStr(" xfb_buffer=");
+ appendUint(qualifier.layoutXfbBuffer);
+ }
+ if (qualifier.hasXfbOffset()) {
+ appendStr(" xfb_offset=");
+ appendUint(qualifier.layoutXfbOffset);
+ }
+ if (qualifier.hasXfbStride()) {
+ appendStr(" xfb_stride=");
+ appendUint(qualifier.layoutXfbStride);
+ }
+ if (qualifier.hasAttachment()) {
+ appendStr(" input_attachment_index=");
+ appendUint(qualifier.layoutAttachment);
+ }
+ if (qualifier.hasSpecConstantId()) {
+ appendStr(" constant_id=");
+ appendUint(qualifier.layoutSpecConstantId);
+ }
+ if (qualifier.layoutPushConstant)
+ appendStr(" push_constant");
+ if (qualifier.layoutBufferReference)
+ appendStr(" buffer_reference");
+ if (qualifier.hasBufferReferenceAlign()) {
+ appendStr(" buffer_reference_align=");
+ appendUint(1u << qualifier.layoutBufferReferenceAlign);
+ }
+
+ if (qualifier.layoutPassthrough)
+ appendStr(" passthrough");
+ if (qualifier.layoutViewportRelative)
+ appendStr(" layoutViewportRelative");
+ if (qualifier.layoutSecondaryViewportRelativeOffset != -2048) {
+ appendStr(" layoutSecondaryViewportRelativeOffset=");
+ appendInt(qualifier.layoutSecondaryViewportRelativeOffset);
+ }
+ if (qualifier.layoutShaderRecord)
+ appendStr(" shaderRecordNV");
+
+ appendStr(")");
}
- }
+ }
- if (qualifier.invariant)
+ if (qualifier.invariant)
appendStr(" invariant");
- if (qualifier.noContraction)
+ if (qualifier.noContraction)
appendStr(" noContraction");
- if (qualifier.centroid)
+ if (qualifier.centroid)
appendStr(" centroid");
- if (qualifier.smooth)
+ if (qualifier.smooth)
appendStr(" smooth");
- if (qualifier.flat)
+ if (qualifier.flat)
appendStr(" flat");
- if (qualifier.nopersp)
+ if (qualifier.nopersp)
appendStr(" noperspective");
- if (qualifier.explicitInterp)
+ if (qualifier.explicitInterp)
appendStr(" __explicitInterpAMD");
- if (qualifier.pervertexNV)
+ if (qualifier.pervertexNV)
appendStr(" pervertexNV");
- if (qualifier.perPrimitiveNV)
+ if (qualifier.pervertexEXT)
+ appendStr(" pervertexEXT");
+ if (qualifier.perPrimitiveNV)
appendStr(" perprimitiveNV");
- if (qualifier.perViewNV)
+ if (qualifier.perViewNV)
appendStr(" perviewNV");
- if (qualifier.perTaskNV)
+ if (qualifier.perTaskNV)
appendStr(" taskNV");
- if (qualifier.patch)
+ if (qualifier.patch)
appendStr(" patch");
- if (qualifier.sample)
+ if (qualifier.sample)
appendStr(" sample");
- if (qualifier.coherent)
+ if (qualifier.coherent)
appendStr(" coherent");
- if (qualifier.devicecoherent)
+ if (qualifier.devicecoherent)
appendStr(" devicecoherent");
- if (qualifier.queuefamilycoherent)
+ if (qualifier.queuefamilycoherent)
appendStr(" queuefamilycoherent");
- if (qualifier.workgroupcoherent)
+ if (qualifier.workgroupcoherent)
appendStr(" workgroupcoherent");
- if (qualifier.subgroupcoherent)
+ if (qualifier.subgroupcoherent)
appendStr(" subgroupcoherent");
- if (qualifier.shadercallcoherent)
+ if (qualifier.shadercallcoherent)
appendStr(" shadercallcoherent");
- if (qualifier.nonprivate)
+ if (qualifier.nonprivate)
appendStr(" nonprivate");
- if (qualifier.volatil)
+ if (qualifier.volatil)
appendStr(" volatile");
- if (qualifier.restrict)
+ if (qualifier.restrict)
appendStr(" restrict");
- if (qualifier.readonly)
+ if (qualifier.readonly)
appendStr(" readonly");
- if (qualifier.writeonly)
+ if (qualifier.writeonly)
appendStr(" writeonly");
- if (qualifier.specConstant)
+ if (qualifier.specConstant)
appendStr(" specialization-constant");
- if (qualifier.nonUniform)
+ if (qualifier.nonUniform)
appendStr(" nonuniform");
- if (qualifier.isNullInit())
+ if (qualifier.isNullInit())
appendStr(" null-init");
- if (qualifier.isSpirvByReference())
+ if (qualifier.isSpirvByReference())
appendStr(" spirv_by_reference");
- if (qualifier.isSpirvLiteral())
+ if (qualifier.isSpirvLiteral())
appendStr(" spirv_literal");
- appendStr(" ");
- appendStr(getStorageQualifierString());
- if (isArray()) {
- for(int i = 0; i < (int)arraySizes->getNumDims(); ++i) {
+ appendStr(" ");
+ appendStr(getStorageQualifierString());
+ }
+ if (getType) {
+ if (syntactic) {
+ if (getPrecision && qualifier.precision != EpqNone) {
+ appendStr(" ");
+ appendStr(getPrecisionQualifierString());
+ }
+ if (isVector() || isMatrix()) {
+ appendStr(" ");
+ switch (basicType) {
+ case EbtDouble:
+ appendStr("d");
+ break;
+ case EbtInt:
+ appendStr("i");
+ break;
+ case EbtUint:
+ appendStr("u");
+ break;
+ case EbtBool:
+ appendStr("b");
+ break;
+ case EbtFloat:
+ default:
+ break;
+ }
+ if (isVector()) {
+ appendStr("vec");
+ appendInt(vectorSize);
+ } else {
+ appendStr("mat");
+ appendInt(matrixCols);
+ appendStr("x");
+ appendInt(matrixRows);
+ }
+ } else if (isStruct() && structure) {
+ appendStr(" ");
+ appendStr(structName.c_str());
+ appendStr("{");
+ bool hasHiddenMember = true;
+ for (size_t i = 0; i < structure->size(); ++i) {
+ if (!(*structure)[i].type->hiddenMember()) {
+ if (!hasHiddenMember)
+ appendStr(", ");
+ typeString.append((*structure)[i].type->getCompleteString(syntactic, getQualifiers, getPrecision, getType, (*structure)[i].type->getFieldName()));
+ hasHiddenMember = false;
+ }
+ }
+ appendStr("}");
+ } else {
+ appendStr(" ");
+ switch (basicType) {
+ case EbtDouble:
+ appendStr("double");
+ break;
+ case EbtInt:
+ appendStr("int");
+ break;
+ case EbtUint:
+ appendStr("uint");
+ break;
+ case EbtBool:
+ appendStr("bool");
+ break;
+ case EbtFloat:
+ appendStr("float");
+ break;
+ default:
+ appendStr("unexpected");
+ break;
+ }
+ }
+ if (name.length() > 0) {
+ appendStr(" ");
+ appendStr(name.c_str());
+ }
+ if (isArray()) {
+ for (int i = 0; i < (int)arraySizes->getNumDims(); ++i) {
int size = arraySizes->getDimSize(i);
if (size == UnsizedArraySize && i == 0 && arraySizes->isVariablyIndexed())
- appendStr(" runtime-sized array of");
+ appendStr("[]");
else {
- if (size == UnsizedArraySize) {
- appendStr(" unsized");
- if (i == 0) {
- appendStr(" ");
- appendInt(arraySizes->getImplicitSize());
- }
- } else {
- appendStr(" ");
- appendInt(arraySizes->getDimSize(i));
+ if (size == UnsizedArraySize) {
+ appendStr("[");
+ if (i == 0)
+ appendInt(arraySizes->getImplicitSize());
+ appendStr("]");
+ }
+ else {
+ appendStr("[");
+ appendInt(arraySizes->getDimSize(i));
+ appendStr("]");
+ }
+ }
+ }
+ }
+ }
+ else {
+ if (isArray()) {
+ for (int i = 0; i < (int)arraySizes->getNumDims(); ++i) {
+ int size = arraySizes->getDimSize(i);
+ if (size == UnsizedArraySize && i == 0 && arraySizes->isVariablyIndexed())
+ appendStr(" runtime-sized array of");
+ else {
+ if (size == UnsizedArraySize) {
+ appendStr(" unsized");
+ if (i == 0) {
+ appendStr(" ");
+ appendInt(arraySizes->getImplicitSize());
}
- appendStr("-element array of");
+ }
+ else {
+ appendStr(" ");
+ appendInt(arraySizes->getDimSize(i));
+ }
+ appendStr("-element array of");
}
+ }
}
- }
- if (isParameterized()) {
- appendStr("<");
- for(int i = 0; i < (int)typeParameters->getNumDims(); ++i) {
+ if (isParameterized()) {
+ appendStr("<");
+ for (int i = 0; i < (int)typeParameters->getNumDims(); ++i) {
appendInt(typeParameters->getDimSize(i));
if (i != (int)typeParameters->getNumDims() - 1)
- appendStr(", ");
+ appendStr(", ");
+ }
+ appendStr(">");
+ }
+ if (getPrecision && qualifier.precision != EpqNone) {
+ appendStr(" ");
+ appendStr(getPrecisionQualifierString());
+ }
+ if (isMatrix()) {
+ appendStr(" ");
+ appendInt(matrixCols);
+ appendStr("X");
+ appendInt(matrixRows);
+ appendStr(" matrix of");
+ }
+ else if (isVector()) {
+ appendStr(" ");
+ appendInt(vectorSize);
+ appendStr("-component vector of");
}
- appendStr(">");
- }
- if (qualifier.precision != EpqNone) {
- appendStr(" ");
- appendStr(getPrecisionQualifierString());
- }
- if (isMatrix()) {
- appendStr(" ");
- appendInt(matrixCols);
- appendStr("X");
- appendInt(matrixRows);
- appendStr(" matrix of");
- } else if (isVector()) {
- appendStr(" ");
- appendInt(vectorSize);
- appendStr("-component vector of");
- }
-
- appendStr(" ");
- typeString.append(getBasicTypeString());
- if (qualifier.builtIn != EbvNone) {
appendStr(" ");
- appendStr(getBuiltInVariableString());
- }
+ typeString.append(getBasicTypeString());
- // Add struct/block members
- if (isStruct() && structure) {
- appendStr("{");
- bool hasHiddenMember = true;
- for (size_t i = 0; i < structure->size(); ++i) {
- if (! (*structure)[i].type->hiddenMember()) {
- if (!hasHiddenMember)
- appendStr(", ");
- typeString.append((*structure)[i].type->getCompleteString());
- typeString.append(" ");
- typeString.append((*structure)[i].type->getFieldName());
- hasHiddenMember = false;
+ if (qualifier.builtIn != EbvNone) {
+ appendStr(" ");
+ appendStr(getBuiltInVariableString());
+ }
+
+ // Add struct/block members
+ if (isStruct() && structure) {
+ appendStr("{");
+ bool hasHiddenMember = true;
+ for (size_t i = 0; i < structure->size(); ++i) {
+ if (!(*structure)[i].type->hiddenMember()) {
+ if (!hasHiddenMember)
+ appendStr(", ");
+ typeString.append((*structure)[i].type->getCompleteString());
+ typeString.append(" ");
+ typeString.append((*structure)[i].type->getFieldName());
+ hasHiddenMember = false;
}
+ }
+ appendStr("}");
}
- appendStr("}");
+ }
}
return typeString;
@@ -2398,7 +2544,7 @@ public:
void setStruct(TTypeList* s) { assert(isStruct()); structure = s; }
TTypeList* getWritableStruct() const { assert(isStruct()); return structure; } // This should only be used when known to not be sharing with other threads
void setBasicType(const TBasicType& t) { basicType = t; }
-
+
int computeNumComponents() const
{
int components = 0;
@@ -2444,10 +2590,20 @@ public:
// type definitions, and member names to be considered the same type.
// This rule applies recursively for nested or embedded types."
//
- bool sameStructType(const TType& right) const
+ // If type mismatch in structure, return member indices through lpidx and rpidx.
+ // If matching members for either block are exhausted, return -1 for exhausted
+ // block and the index of the unmatched member. Otherwise return {-1,-1}.
+ //
+ bool sameStructType(const TType& right, int* lpidx = nullptr, int* rpidx = nullptr) const
{
- // TODO: Why return true when neither types are structures?
+ // Initialize error to general type mismatch.
+ if (lpidx != nullptr) {
+ *lpidx = -1;
+ *rpidx = -1;
+ }
+
// Most commonly, they are both nullptr, or the same pointer to the same actual structure
+ // TODO: Why return true when neither types are structures?
if ((!isStruct() && !right.isStruct()) ||
(isStruct() && right.isStruct() && structure == right.structure))
return true;
@@ -2464,11 +2620,17 @@ public:
bool isGLPerVertex = *typeName == "gl_PerVertex";
// Both being nullptr was caught above, now they both have to be structures of the same number of elements
- if (structure->size() != right.structure->size() && !isGLPerVertex)
+ if (lpidx == nullptr &&
+ (structure->size() != right.structure->size() && !isGLPerVertex)) {
return false;
+ }
// Compare the names and types of all the members, which have to match
for (size_t li = 0, ri = 0; li < structure->size() || ri < right.structure->size(); ++li, ++ri) {
+ if (lpidx != nullptr) {
+ *lpidx = static_cast<int>(li);
+ *rpidx = static_cast<int>(ri);
+ }
if (li < structure->size() && ri < right.structure->size()) {
if ((*structure)[li].type->getFieldName() == (*right.structure)[ri].type->getFieldName()) {
if (*(*structure)[li].type != *(*right.structure)[ri].type)
@@ -2498,11 +2660,19 @@ public:
}
// If we get here, then there should only be inconsistently declared members left
} else if (li < structure->size()) {
- if (!(*structure)[li].type->hiddenMember() && !isInconsistentGLPerVertexMember((*structure)[li].type->getFieldName()))
+ if (!(*structure)[li].type->hiddenMember() && !isInconsistentGLPerVertexMember((*structure)[li].type->getFieldName())) {
+ if (lpidx != nullptr) {
+ *rpidx = -1;
+ }
return false;
+ }
} else {
- if (!(*right.structure)[ri].type->hiddenMember() && !isInconsistentGLPerVertexMember((*right.structure)[ri].type->getFieldName()))
+ if (!(*right.structure)[ri].type->hiddenMember() && !isInconsistentGLPerVertexMember((*right.structure)[ri].type->getFieldName())) {
+ if (lpidx != nullptr) {
+ *lpidx = -1;
+ }
return false;
+ }
}
}
@@ -2526,10 +2696,15 @@ public:
return *referentType == *right.referentType;
}
- // See if two types match, in all aspects except arrayness
- bool sameElementType(const TType& right) const
+ // See if two types match, in all aspects except arrayness
+ // If mismatch in structure members, return member indices in lpidx and rpidx.
+ bool sameElementType(const TType& right, int* lpidx = nullptr, int* rpidx = nullptr) const
{
- return basicType == right.basicType && sameElementShape(right);
+ if (lpidx != nullptr) {
+ *lpidx = -1;
+ *rpidx = -1;
+ }
+ return basicType == right.basicType && sameElementShape(right, lpidx, rpidx);
}
// See if two type's arrayness match
@@ -2563,15 +2738,20 @@ public:
#endif
// See if two type's elements match in all ways except basic type
- bool sameElementShape(const TType& right) const
+ // If mismatch in structure members, return member indices in lpidx and rpidx.
+ bool sameElementShape(const TType& right, int* lpidx = nullptr, int* rpidx = nullptr) const
{
- return sampler == right.sampler &&
+ if (lpidx != nullptr) {
+ *lpidx = -1;
+ *rpidx = -1;
+ }
+ return ((basicType != EbtSampler && right.basicType != EbtSampler) || sampler == right.sampler) &&
vectorSize == right.vectorSize &&
matrixCols == right.matrixCols &&
matrixRows == right.matrixRows &&
vector1 == right.vector1 &&
isCoopMat() == right.isCoopMat() &&
- sameStructType(right) &&
+ sameStructType(right, lpidx, rpidx) &&
sameReferenceType(right);
}
diff --git a/thirdparty/glslang/glslang/Include/glslang_c_interface.h b/thirdparty/glslang/glslang/Include/glslang_c_interface.h
index 14ab6acbc2..f540f26d6c 100644
--- a/thirdparty/glslang/glslang/Include/glslang_c_interface.h
+++ b/thirdparty/glslang/glslang/Include/glslang_c_interface.h
@@ -148,6 +148,15 @@ typedef struct glslang_resource_s {
int max_task_work_group_size_y_nv;
int max_task_work_group_size_z_nv;
int max_mesh_view_count_nv;
+ int max_mesh_output_vertices_ext;
+ int max_mesh_output_primitives_ext;
+ int max_mesh_work_group_size_x_ext;
+ int max_mesh_work_group_size_y_ext;
+ int max_mesh_work_group_size_z_ext;
+ int max_task_work_group_size_x_ext;
+ int max_task_work_group_size_y_ext;
+ int max_task_work_group_size_z_ext;
+ int max_mesh_view_count_ext;
int maxDualSourceDrawBuffersEXT;
glslang_limits_t limits;
@@ -199,6 +208,18 @@ typedef struct glsl_include_callbacks_s {
glsl_free_include_result_func free_include_result;
} glsl_include_callbacks_t;
+/* SpvOptions counterpart */
+typedef struct glslang_spv_options_s {
+ bool generate_debug_info;
+ bool strip_debug_info;
+ bool disable_optimizer;
+ bool optimize_size;
+ bool disassemble;
+ bool validate;
+ bool emit_nonsemantic_shader_debug_info;
+ bool emit_nonsemantic_shader_debug_source;
+} glslang_spv_options_t;
+
#ifdef __cplusplus
extern "C" {
#endif
@@ -224,9 +245,11 @@ GLSLANG_EXPORT void glslang_finalize_process();
GLSLANG_EXPORT glslang_shader_t* glslang_shader_create(const glslang_input_t* input);
GLSLANG_EXPORT void glslang_shader_delete(glslang_shader_t* shader);
+GLSLANG_EXPORT void glslang_shader_set_preamble(glslang_shader_t* shader, const char* s);
GLSLANG_EXPORT void glslang_shader_shift_binding(glslang_shader_t* shader, glslang_resource_type_t res, unsigned int base);
GLSLANG_EXPORT void glslang_shader_shift_binding_for_set(glslang_shader_t* shader, glslang_resource_type_t res, unsigned int base, unsigned int set);
GLSLANG_EXPORT void glslang_shader_set_options(glslang_shader_t* shader, int options); // glslang_shader_options_t
+GLSLANG_EXPORT void glslang_shader_set_glsl_version(glslang_shader_t* shader, int version);
GLSLANG_EXPORT int glslang_shader_preprocess(glslang_shader_t* shader, const glslang_input_t* input);
GLSLANG_EXPORT int glslang_shader_parse(glslang_shader_t* shader, const glslang_input_t* input);
GLSLANG_EXPORT const char* glslang_shader_get_preprocessed_code(glslang_shader_t* shader);
@@ -237,8 +260,11 @@ GLSLANG_EXPORT glslang_program_t* glslang_program_create();
GLSLANG_EXPORT void glslang_program_delete(glslang_program_t* program);
GLSLANG_EXPORT void glslang_program_add_shader(glslang_program_t* program, glslang_shader_t* shader);
GLSLANG_EXPORT int glslang_program_link(glslang_program_t* program, int messages); // glslang_messages_t
+GLSLANG_EXPORT void glslang_program_add_source_text(glslang_program_t* program, glslang_stage_t stage, const char* text, size_t len);
+GLSLANG_EXPORT void glslang_program_set_source_file(glslang_program_t* program, glslang_stage_t stage, const char* file);
GLSLANG_EXPORT int glslang_program_map_io(glslang_program_t* program);
GLSLANG_EXPORT void glslang_program_SPIRV_generate(glslang_program_t* program, glslang_stage_t stage);
+GLSLANG_EXPORT void glslang_program_SPIRV_generate_with_options(glslang_program_t* program, glslang_stage_t stage, glslang_spv_options_t* spv_options);
GLSLANG_EXPORT size_t glslang_program_SPIRV_get_size(glslang_program_t* program);
GLSLANG_EXPORT void glslang_program_SPIRV_get(glslang_program_t* program, unsigned int*);
GLSLANG_EXPORT unsigned int* glslang_program_SPIRV_get_ptr(glslang_program_t* program);
diff --git a/thirdparty/glslang/glslang/Include/glslang_c_shader_types.h b/thirdparty/glslang/glslang/Include/glslang_c_shader_types.h
index df19777b0b..9bc211496c 100644
--- a/thirdparty/glslang/glslang/Include/glslang_c_shader_types.h
+++ b/thirdparty/glslang/glslang/Include/glslang_c_shader_types.h
@@ -43,14 +43,22 @@ typedef enum {
GLSLANG_STAGE_GEOMETRY,
GLSLANG_STAGE_FRAGMENT,
GLSLANG_STAGE_COMPUTE,
- GLSLANG_STAGE_RAYGEN_NV,
- GLSLANG_STAGE_INTERSECT_NV,
- GLSLANG_STAGE_ANYHIT_NV,
- GLSLANG_STAGE_CLOSESTHIT_NV,
- GLSLANG_STAGE_MISS_NV,
- GLSLANG_STAGE_CALLABLE_NV,
- GLSLANG_STAGE_TASK_NV,
- GLSLANG_STAGE_MESH_NV,
+ GLSLANG_STAGE_RAYGEN,
+ GLSLANG_STAGE_RAYGEN_NV = GLSLANG_STAGE_RAYGEN,
+ GLSLANG_STAGE_INTERSECT,
+ GLSLANG_STAGE_INTERSECT_NV = GLSLANG_STAGE_INTERSECT,
+ GLSLANG_STAGE_ANYHIT,
+ GLSLANG_STAGE_ANYHIT_NV = GLSLANG_STAGE_ANYHIT,
+ GLSLANG_STAGE_CLOSESTHIT,
+ GLSLANG_STAGE_CLOSESTHIT_NV = GLSLANG_STAGE_CLOSESTHIT,
+ GLSLANG_STAGE_MISS,
+ GLSLANG_STAGE_MISS_NV = GLSLANG_STAGE_MISS,
+ GLSLANG_STAGE_CALLABLE,
+ GLSLANG_STAGE_CALLABLE_NV = GLSLANG_STAGE_CALLABLE,
+ GLSLANG_STAGE_TASK,
+ GLSLANG_STAGE_TASK_NV = GLSLANG_STAGE_TASK,
+ GLSLANG_STAGE_MESH,
+ GLSLANG_STAGE_MESH_NV = GLSLANG_STAGE_MESH,
LAST_ELEMENT_MARKER(GLSLANG_STAGE_COUNT),
} glslang_stage_t; // would be better as stage, but this is ancient now
@@ -62,14 +70,22 @@ typedef enum {
GLSLANG_STAGE_GEOMETRY_MASK = (1 << GLSLANG_STAGE_GEOMETRY),
GLSLANG_STAGE_FRAGMENT_MASK = (1 << GLSLANG_STAGE_FRAGMENT),
GLSLANG_STAGE_COMPUTE_MASK = (1 << GLSLANG_STAGE_COMPUTE),
- GLSLANG_STAGE_RAYGEN_NV_MASK = (1 << GLSLANG_STAGE_RAYGEN_NV),
- GLSLANG_STAGE_INTERSECT_NV_MASK = (1 << GLSLANG_STAGE_INTERSECT_NV),
- GLSLANG_STAGE_ANYHIT_NV_MASK = (1 << GLSLANG_STAGE_ANYHIT_NV),
- GLSLANG_STAGE_CLOSESTHIT_NV_MASK = (1 << GLSLANG_STAGE_CLOSESTHIT_NV),
- GLSLANG_STAGE_MISS_NV_MASK = (1 << GLSLANG_STAGE_MISS_NV),
- GLSLANG_STAGE_CALLABLE_NV_MASK = (1 << GLSLANG_STAGE_CALLABLE_NV),
- GLSLANG_STAGE_TASK_NV_MASK = (1 << GLSLANG_STAGE_TASK_NV),
- GLSLANG_STAGE_MESH_NV_MASK = (1 << GLSLANG_STAGE_MESH_NV),
+ GLSLANG_STAGE_RAYGEN_MASK = (1 << GLSLANG_STAGE_RAYGEN),
+ GLSLANG_STAGE_RAYGEN_NV_MASK = GLSLANG_STAGE_RAYGEN_MASK,
+ GLSLANG_STAGE_INTERSECT_MASK = (1 << GLSLANG_STAGE_INTERSECT),
+ GLSLANG_STAGE_INTERSECT_NV_MASK = GLSLANG_STAGE_INTERSECT_MASK,
+ GLSLANG_STAGE_ANYHIT_MASK = (1 << GLSLANG_STAGE_ANYHIT),
+ GLSLANG_STAGE_ANYHIT_NV_MASK = GLSLANG_STAGE_ANYHIT_MASK,
+ GLSLANG_STAGE_CLOSESTHIT_MASK = (1 << GLSLANG_STAGE_CLOSESTHIT),
+ GLSLANG_STAGE_CLOSESTHIT_NV_MASK = GLSLANG_STAGE_CLOSESTHIT_MASK,
+ GLSLANG_STAGE_MISS_MASK = (1 << GLSLANG_STAGE_MISS),
+ GLSLANG_STAGE_MISS_NV_MASK = GLSLANG_STAGE_MISS_MASK,
+ GLSLANG_STAGE_CALLABLE_MASK = (1 << GLSLANG_STAGE_CALLABLE),
+ GLSLANG_STAGE_CALLABLE_NV_MASK = GLSLANG_STAGE_CALLABLE_MASK,
+ GLSLANG_STAGE_TASK_MASK = (1 << GLSLANG_STAGE_TASK),
+ GLSLANG_STAGE_TASK_NV_MASK = GLSLANG_STAGE_TASK_MASK,
+ GLSLANG_STAGE_MESH_MASK = (1 << GLSLANG_STAGE_MESH),
+ GLSLANG_STAGE_MESH_NV_MASK = GLSLANG_STAGE_MESH_MASK,
LAST_ELEMENT_MARKER(GLSLANG_STAGE_MASK_COUNT),
} glslang_stage_mask_t;
@@ -121,7 +137,9 @@ typedef enum {
/* EShExecutable counterpart */
typedef enum { GLSLANG_EX_VERTEX_FRAGMENT, GLSLANG_EX_FRAGMENT } glslang_executable_t;
-/* EShOptimizationLevel counterpart */
+// EShOptimizationLevel counterpart
+// This enum is not used in the current C interface, but could be added at a later date.
+// GLSLANG_OPT_NONE is the current default.
typedef enum {
GLSLANG_OPT_NO_GENERATION,
GLSLANG_OPT_NONE,
@@ -155,6 +173,7 @@ typedef enum {
GLSLANG_MSG_HLSL_LEGALIZATION_BIT = (1 << 12),
GLSLANG_MSG_HLSL_DX9_COMPATIBLE_BIT = (1 << 13),
GLSLANG_MSG_BUILTIN_SYMBOL_TABLE_BIT = (1 << 14),
+ GLSLANG_MSG_ENHANCED = (1 << 15),
LAST_ELEMENT_MARKER(GLSLANG_MSG_COUNT),
} glslang_messages_t;
diff --git a/thirdparty/glslang/glslang/Include/intermediate.h b/thirdparty/glslang/glslang/Include/intermediate.h
index 595bd623db..a0240028d1 100644
--- a/thirdparty/glslang/glslang/Include/intermediate.h
+++ b/thirdparty/glslang/glslang/Include/intermediate.h
@@ -67,6 +67,7 @@ class TIntermediate;
enum TOperator {
EOpNull, // if in a node, should only mean a node is still being built
EOpSequence, // denotes a list of statements, or parameters, etc.
+ EOpScope, // Used by debugging to denote a scoped list of statements
EOpLinkerObjects, // for aggregate node of objects the linker may need, if not reference by the rest of the AST
EOpFunctionCall,
EOpFunction, // For function definition
@@ -91,6 +92,8 @@ enum TOperator {
EOpCopyObject,
+ EOpDeclare, // Used by debugging to force declaration of variable in correct scope
+
// (u)int* -> bool
EOpConvInt8ToBool,
EOpConvUint8ToBool,
@@ -934,6 +937,8 @@ enum TOperator {
EOpExecuteCallableNV,
EOpExecuteCallableKHR,
EOpWritePackedPrimitiveIndices4x8NV,
+ EOpEmitMeshTasksEXT,
+ EOpSetMeshOutputsEXT,
//
// GL_EXT_ray_query operations
@@ -1155,7 +1160,7 @@ public:
virtual bool isIntegerDomain() const { return type.isIntegerDomain(); }
bool isAtomic() const { return type.isAtomic(); }
bool isReference() const { return type.isReference(); }
- TString getCompleteString() const { return type.getCompleteString(); }
+ TString getCompleteString(bool enhanced = false) const { return type.getCompleteString(enhanced); }
protected:
TIntermTyped& operator=(const TIntermTyped&);
diff --git a/thirdparty/glslang/glslang/MachineIndependent/Initialize.cpp b/thirdparty/glslang/glslang/MachineIndependent/Initialize.cpp
index 03fdce9f6b..0cbb9e78f4 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/Initialize.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/Initialize.cpp
@@ -2268,11 +2268,11 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"\n"
);
- stageBuiltins[EShLangMeshNV].append(
+ stageBuiltins[EShLangMesh].append(
"void subgroupMemoryBarrierShared();"
"\n"
);
- stageBuiltins[EShLangTaskNV].append(
+ stageBuiltins[EShLangTask].append(
"void subgroupMemoryBarrierShared();"
"\n"
);
@@ -4298,10 +4298,10 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"void barrier();"
);
if ((profile != EEsProfile && version >= 450) || (profile == EEsProfile && version >= 320)) {
- stageBuiltins[EShLangMeshNV].append(
+ stageBuiltins[EShLangMesh].append(
"void barrier();"
);
- stageBuiltins[EShLangTaskNV].append(
+ stageBuiltins[EShLangTask].append(
"void barrier();"
);
}
@@ -4326,11 +4326,11 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
commonBuiltins.append("void memoryBarrierImage();");
}
if ((profile != EEsProfile && version >= 450) || (profile == EEsProfile && version >= 320)) {
- stageBuiltins[EShLangMeshNV].append(
+ stageBuiltins[EShLangMesh].append(
"void memoryBarrierShared();"
"void groupMemoryBarrier();"
);
- stageBuiltins[EShLangTaskNV].append(
+ stageBuiltins[EShLangTask].append(
"void memoryBarrierShared();"
"void groupMemoryBarrier();"
);
@@ -4655,10 +4655,21 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
// Builtins for GL_NV_mesh_shader
if ((profile != EEsProfile && version >= 450) || (profile == EEsProfile && version >= 320)) {
- stageBuiltins[EShLangMeshNV].append(
+ stageBuiltins[EShLangMesh].append(
"void writePackedPrimitiveIndices4x8NV(uint, uint);"
"\n");
}
+ // Builtins for GL_EXT_mesh_shader
+ if ((profile != EEsProfile && version >= 450) || (profile == EEsProfile && version >= 320)) {
+ // Builtins for GL_EXT_mesh_shader
+ stageBuiltins[EShLangTask].append(
+ "void EmitMeshTasksEXT(uint, uint, uint);"
+ "\n");
+
+ stageBuiltins[EShLangMesh].append(
+ "void SetMeshOutputsEXT(uint, uint);"
+ "\n");
+ }
#endif // !GLSLANG_ANGLE
#endif // !GLSLANG_WEB
@@ -4855,7 +4866,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
if ((profile != EEsProfile && version >= 450) || (profile == EEsProfile && version >= 320)) {
// per-vertex attributes
- stageBuiltins[EShLangMeshNV].append(
+ stageBuiltins[EShLangMesh].append(
"out gl_MeshPerVertexNV {"
"vec4 gl_Position;"
"float gl_PointSize;"
@@ -4868,7 +4879,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
);
// per-primitive attributes
- stageBuiltins[EShLangMeshNV].append(
+ stageBuiltins[EShLangMesh].append(
"perprimitiveNV out gl_MeshPerPrimitiveNV {"
"int gl_PrimitiveID;"
"int gl_Layer;"
@@ -4879,7 +4890,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"} gl_MeshPrimitivesNV[];"
);
- stageBuiltins[EShLangMeshNV].append(
+ stageBuiltins[EShLangMesh].append(
"out uint gl_PrimitiveCountNV;"
"out uint gl_PrimitiveIndicesNV[];"
@@ -4893,10 +4904,38 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"in highp uvec3 gl_GlobalInvocationID;"
"in highp uint gl_LocalInvocationIndex;"
+ "\n");
+ // GL_EXT_mesh_shader
+ stageBuiltins[EShLangMesh].append(
+ "out uint gl_PrimitivePointIndicesEXT[];"
+ "out uvec2 gl_PrimitiveLineIndicesEXT[];"
+ "out uvec3 gl_PrimitiveTriangleIndicesEXT[];"
+ "in highp uvec3 gl_NumWorkGroups;"
"\n");
- stageBuiltins[EShLangTaskNV].append(
+ // per-vertex attributes
+ stageBuiltins[EShLangMesh].append(
+ "out gl_MeshPerVertexEXT {"
+ "vec4 gl_Position;"
+ "float gl_PointSize;"
+ "float gl_ClipDistance[];"
+ "float gl_CullDistance[];"
+ "} gl_MeshVerticesEXT[];"
+ );
+
+ // per-primitive attributes
+ stageBuiltins[EShLangMesh].append(
+ "perprimitiveEXT out gl_MeshPerPrimitiveEXT {"
+ "int gl_PrimitiveID;"
+ "int gl_Layer;"
+ "int gl_ViewportIndex;"
+ "bool gl_CullPrimitiveEXT;"
+ "int gl_PrimitiveShadingRateEXT;"
+ "} gl_MeshPrimitivesEXT[];"
+ );
+
+ stageBuiltins[EShLangTask].append(
"out uint gl_TaskCountNV;"
"const highp uvec3 gl_WorkGroupSize = uvec3(1,1,1);"
@@ -4909,27 +4948,28 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"in uint gl_MeshViewCountNV;"
"in uint gl_MeshViewIndicesNV[4];"
-
+ "in highp uvec3 gl_NumWorkGroups;"
"\n");
}
if (profile != EEsProfile && version >= 450) {
- stageBuiltins[EShLangMeshNV].append(
+ stageBuiltins[EShLangMesh].append(
"in highp int gl_DeviceIndex;" // GL_EXT_device_group
"in int gl_DrawIDARB;" // GL_ARB_shader_draw_parameters
+ "in int gl_ViewIndex;" // GL_EXT_multiview
"\n");
- stageBuiltins[EShLangTaskNV].append(
+ stageBuiltins[EShLangTask].append(
"in highp int gl_DeviceIndex;" // GL_EXT_device_group
"in int gl_DrawIDARB;" // GL_ARB_shader_draw_parameters
"\n");
if (version >= 460) {
- stageBuiltins[EShLangMeshNV].append(
+ stageBuiltins[EShLangMesh].append(
"in int gl_DrawID;"
"\n");
- stageBuiltins[EShLangTaskNV].append(
+ stageBuiltins[EShLangTask].append(
"in int gl_DrawID;"
"\n");
}
@@ -5571,6 +5611,8 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"flat in int gl_InvocationsPerPixelNV;"
"in vec3 gl_BaryCoordNV;" // GL_NV_fragment_shader_barycentric
"in vec3 gl_BaryCoordNoPerspNV;"
+ "in vec3 gl_BaryCoordEXT;" // GL_EXT_fragment_shader_barycentric
+ "in vec3 gl_BaryCoordNoPerspEXT;"
);
if (version >= 450)
@@ -5635,7 +5677,9 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
stageBuiltins[EShLangFragment].append(
"in vec3 gl_BaryCoordNV;"
"in vec3 gl_BaryCoordNoPerspNV;"
- );
+ "in vec3 gl_BaryCoordEXT;"
+ "in vec3 gl_BaryCoordNoPerspEXT;"
+ );
if (version >= 310)
stageBuiltins[EShLangFragment].append(
"flat in highp int gl_ShadingRateEXT;" // GL_EXT_fragment_shading_rate
@@ -5700,8 +5744,8 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
stageBuiltins[EShLangGeometry] .append(ballotDecls);
stageBuiltins[EShLangCompute] .append(ballotDecls);
stageBuiltins[EShLangFragment] .append(fragmentBallotDecls);
- stageBuiltins[EShLangMeshNV] .append(ballotDecls);
- stageBuiltins[EShLangTaskNV] .append(ballotDecls);
+ stageBuiltins[EShLangMesh] .append(ballotDecls);
+ stageBuiltins[EShLangTask] .append(ballotDecls);
stageBuiltins[EShLangRayGen] .append(rtBallotDecls);
stageBuiltins[EShLangIntersect] .append(rtBallotDecls);
// No volatile qualifier on these builtins in any-hit
@@ -5769,10 +5813,10 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
stageBuiltins[EShLangCompute] .append(subgroupDecls);
stageBuiltins[EShLangCompute] .append(computeSubgroupDecls);
stageBuiltins[EShLangFragment] .append(fragmentSubgroupDecls);
- stageBuiltins[EShLangMeshNV] .append(subgroupDecls);
- stageBuiltins[EShLangMeshNV] .append(computeSubgroupDecls);
- stageBuiltins[EShLangTaskNV] .append(subgroupDecls);
- stageBuiltins[EShLangTaskNV] .append(computeSubgroupDecls);
+ stageBuiltins[EShLangMesh] .append(subgroupDecls);
+ stageBuiltins[EShLangMesh] .append(computeSubgroupDecls);
+ stageBuiltins[EShLangTask] .append(subgroupDecls);
+ stageBuiltins[EShLangTask] .append(computeSubgroupDecls);
stageBuiltins[EShLangRayGen] .append(rtSubgroupDecls);
stageBuiltins[EShLangIntersect] .append(rtSubgroupDecls);
// No volatile qualifier on these builtins in any-hit
@@ -5806,6 +5850,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"const uint gl_RayFlagsCullNoOpaqueEXT = 128U;"
"const uint gl_RayFlagsSkipTrianglesEXT = 256U;"
"const uint gl_RayFlagsSkipAABBEXT = 512U;"
+ "const uint gl_RayFlagsForceOpacityMicromap2StateEXT = 1024U;"
"const uint gl_HitKindFrontFacingTriangleEXT = 254U;"
"const uint gl_HitKindBackFacingTriangleEXT = 255U;"
"\n";
@@ -5857,6 +5902,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"in uint gl_IncomingRayFlagsNV;"
"in uint gl_IncomingRayFlagsEXT;"
"in float gl_CurrentRayTimeNV;"
+ "in uint gl_CullMaskEXT;"
"\n";
const char *hitDecls =
"in uvec3 gl_LaunchIDNV;"
@@ -5893,6 +5939,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"in uint gl_IncomingRayFlagsNV;"
"in uint gl_IncomingRayFlagsEXT;"
"in float gl_CurrentRayTimeNV;"
+ "in uint gl_CullMaskEXT;"
"\n";
const char *missDecls =
"in uvec3 gl_LaunchIDNV;"
@@ -5912,6 +5959,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"in uint gl_IncomingRayFlagsNV;"
"in uint gl_IncomingRayFlagsEXT;"
"in float gl_CurrentRayTimeNV;"
+ "in uint gl_CullMaskEXT;"
"\n";
const char *callableDecls =
@@ -7607,6 +7655,23 @@ static void SpecialQualifier(const char* name, TStorageQualifier qualifier, TBui
}
//
+// Modify the symbol's flat decoration.
+//
+// Safe to call even if name is not present.
+//
+// Originally written to transform gl_SubGroupSizeARB from uniform to fragment input in Vulkan.
+//
+static void ModifyFlatDecoration(const char* name, bool flat, TSymbolTable& symbolTable)
+{
+ TSymbol* symbol = symbolTable.find(name);
+ if (symbol == nullptr)
+ return;
+
+ TQualifier& symQualifier = symbol->getWritableType().getQualifier();
+ symQualifier.flat = flat;
+}
+
+//
// To tag built-in variables with their TBuiltInVariable enum. Use this when the
// normal declaration text already gets the qualifier right, and all that's needed
// is setting the builtIn field. This should be the normal way for all new
@@ -7989,9 +8054,12 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
BuiltInVariable("gl_SubGroupLeMaskARB", EbvSubGroupLeMask, symbolTable);
BuiltInVariable("gl_SubGroupLtMaskARB", EbvSubGroupLtMask, symbolTable);
- if (spvVersion.vulkan > 0)
+ if (spvVersion.vulkan > 0) {
// Treat "gl_SubGroupSizeARB" as shader input instead of uniform for Vulkan
SpecialQualifier("gl_SubGroupSizeARB", EvqVaryingIn, EbvSubGroupSize, symbolTable);
+ if (language == EShLangFragment)
+ ModifyFlatDecoration("gl_SubGroupSizeARB", true, symbolTable);
+ }
else
BuiltInVariable("gl_SubGroupSizeARB", EbvSubGroupSize, symbolTable);
}
@@ -8058,6 +8126,7 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
SpecialQualifier("gl_FragDepth", EvqFragDepth, EbvFragDepth, symbolTable);
#ifndef GLSLANG_WEB
SpecialQualifier("gl_FragDepthEXT", EvqFragDepth, EbvFragDepth, symbolTable);
+ SpecialQualifier("gl_FragStencilRefARB", EvqFragStencil, EbvFragStencilRef, symbolTable);
SpecialQualifier("gl_HelperInvocation", EvqVaryingIn, EbvHelperInvocation, symbolTable);
BuiltInVariable("gl_ClipDistance", EbvClipDistance, symbolTable);
@@ -8099,6 +8168,7 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.setFunctionExtensions("rayQueryGetWorldRayDirectionEXT", 1, &E_GL_EXT_ray_query);
symbolTable.setVariableExtensions("gl_RayFlagsSkipAABBEXT", 1, &E_GL_EXT_ray_flags_primitive_culling);
symbolTable.setVariableExtensions("gl_RayFlagsSkipTrianglesEXT", 1, &E_GL_EXT_ray_flags_primitive_culling);
+ symbolTable.setVariableExtensions("gl_RayFlagsForceOpacityMicromap2StateEXT", 1, &E_GL_EXT_opacity_micromap);
}
if ((profile != EEsProfile && version >= 130) ||
@@ -8318,6 +8388,10 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.setVariableExtensions("gl_BaryCoordNoPerspNV", 1, &E_GL_NV_fragment_shader_barycentric);
BuiltInVariable("gl_BaryCoordNV", EbvBaryCoordNV, symbolTable);
BuiltInVariable("gl_BaryCoordNoPerspNV", EbvBaryCoordNoPerspNV, symbolTable);
+ symbolTable.setVariableExtensions("gl_BaryCoordEXT", 1, &E_GL_EXT_fragment_shader_barycentric);
+ symbolTable.setVariableExtensions("gl_BaryCoordNoPerspEXT", 1, &E_GL_EXT_fragment_shader_barycentric);
+ BuiltInVariable("gl_BaryCoordEXT", EbvBaryCoordEXT, symbolTable);
+ BuiltInVariable("gl_BaryCoordNoPerspEXT", EbvBaryCoordNoPerspEXT, symbolTable);
}
if ((profile != EEsProfile && version >= 450) ||
@@ -8418,9 +8492,12 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
BuiltInVariable("gl_SubGroupLeMaskARB", EbvSubGroupLeMask, symbolTable);
BuiltInVariable("gl_SubGroupLtMaskARB", EbvSubGroupLtMask, symbolTable);
- if (spvVersion.vulkan > 0)
+ if (spvVersion.vulkan > 0) {
// Treat "gl_SubGroupSizeARB" as shader input instead of uniform for Vulkan
SpecialQualifier("gl_SubGroupSizeARB", EvqVaryingIn, EbvSubGroupSize, symbolTable);
+ if (language == EShLangFragment)
+ ModifyFlatDecoration("gl_SubGroupSizeARB", true, symbolTable);
+ }
else
BuiltInVariable("gl_SubGroupSizeARB", EbvSubGroupSize, symbolTable);
}
@@ -8635,9 +8712,12 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
BuiltInVariable("gl_SubGroupLeMaskARB", EbvSubGroupLeMask, symbolTable);
BuiltInVariable("gl_SubGroupLtMaskARB", EbvSubGroupLtMask, symbolTable);
- if (spvVersion.vulkan > 0)
+ if (spvVersion.vulkan > 0) {
// Treat "gl_SubGroupSizeARB" as shader input instead of uniform for Vulkan
SpecialQualifier("gl_SubGroupSizeARB", EvqVaryingIn, EbvSubGroupSize, symbolTable);
+ if (language == EShLangFragment)
+ ModifyFlatDecoration("gl_SubGroupSizeARB", true, symbolTable);
+ }
else
BuiltInVariable("gl_SubGroupSizeARB", EbvSubGroupSize, symbolTable);
}
@@ -8743,6 +8823,7 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.setVariableExtensions("gl_RayTminEXT", 1, &E_GL_EXT_ray_tracing);
symbolTable.setVariableExtensions("gl_RayTmaxNV", 1, &E_GL_NV_ray_tracing);
symbolTable.setVariableExtensions("gl_RayTmaxEXT", 1, &E_GL_EXT_ray_tracing);
+ symbolTable.setVariableExtensions("gl_CullMaskEXT", 1, &E_GL_EXT_ray_cull_mask);
symbolTable.setVariableExtensions("gl_HitTNV", 1, &E_GL_NV_ray_tracing);
symbolTable.setVariableExtensions("gl_HitTEXT", 1, &E_GL_EXT_ray_tracing);
symbolTable.setVariableExtensions("gl_HitKindNV", 1, &E_GL_NV_ray_tracing);
@@ -8792,6 +8873,7 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
BuiltInVariable("gl_RayTminEXT", EbvRayTmin, symbolTable);
BuiltInVariable("gl_RayTmaxNV", EbvRayTmax, symbolTable);
BuiltInVariable("gl_RayTmaxEXT", EbvRayTmax, symbolTable);
+ BuiltInVariable("gl_CullMaskEXT", EbvCullMask, symbolTable);
BuiltInVariable("gl_HitTNV", EbvHitT, symbolTable);
BuiltInVariable("gl_HitTEXT", EbvHitT, symbolTable);
BuiltInVariable("gl_HitKindNV", EbvHitKind, symbolTable);
@@ -8823,9 +8905,12 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
BuiltInVariable("gl_SubGroupLeMaskARB", EbvSubGroupLeMask, symbolTable);
BuiltInVariable("gl_SubGroupLtMaskARB", EbvSubGroupLtMask, symbolTable);
- if (spvVersion.vulkan > 0)
+ if (spvVersion.vulkan > 0) {
// Treat "gl_SubGroupSizeARB" as shader input instead of uniform for Vulkan
SpecialQualifier("gl_SubGroupSizeARB", EvqVaryingIn, EbvSubGroupSize, symbolTable);
+ if (language == EShLangFragment)
+ ModifyFlatDecoration("gl_SubGroupSizeARB", true, symbolTable);
+ }
else
BuiltInVariable("gl_SubGroupSizeARB", EbvSubGroupSize, symbolTable);
@@ -8869,7 +8954,7 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
}
break;
- case EShLangMeshNV:
+ case EShLangMesh:
if ((profile != EEsProfile && version >= 450) || (profile == EEsProfile && version >= 320)) {
// per-vertex builtins
symbolTable.setVariableExtensions("gl_MeshVerticesNV", "gl_Position", 1, &E_GL_NV_mesh_shader);
@@ -8913,12 +8998,19 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.setVariableExtensions("gl_PrimitiveIndicesNV", 1, &E_GL_NV_mesh_shader);
symbolTable.setVariableExtensions("gl_MeshViewCountNV", 1, &E_GL_NV_mesh_shader);
symbolTable.setVariableExtensions("gl_MeshViewIndicesNV", 1, &E_GL_NV_mesh_shader);
- symbolTable.setVariableExtensions("gl_WorkGroupSize", 1, &E_GL_NV_mesh_shader);
- symbolTable.setVariableExtensions("gl_WorkGroupID", 1, &E_GL_NV_mesh_shader);
- symbolTable.setVariableExtensions("gl_LocalInvocationID", 1, &E_GL_NV_mesh_shader);
- symbolTable.setVariableExtensions("gl_GlobalInvocationID", 1, &E_GL_NV_mesh_shader);
- symbolTable.setVariableExtensions("gl_LocalInvocationIndex", 1, &E_GL_NV_mesh_shader);
-
+ if (profile != EEsProfile) {
+ symbolTable.setVariableExtensions("gl_WorkGroupSize", Num_AEP_mesh_shader, AEP_mesh_shader);
+ symbolTable.setVariableExtensions("gl_WorkGroupID", Num_AEP_mesh_shader, AEP_mesh_shader);
+ symbolTable.setVariableExtensions("gl_LocalInvocationID", Num_AEP_mesh_shader, AEP_mesh_shader);
+ symbolTable.setVariableExtensions("gl_GlobalInvocationID", Num_AEP_mesh_shader, AEP_mesh_shader);
+ symbolTable.setVariableExtensions("gl_LocalInvocationIndex", Num_AEP_mesh_shader, AEP_mesh_shader);
+ } else {
+ symbolTable.setVariableExtensions("gl_WorkGroupSize", 1, &E_GL_NV_mesh_shader);
+ symbolTable.setVariableExtensions("gl_WorkGroupID", 1, &E_GL_NV_mesh_shader);
+ symbolTable.setVariableExtensions("gl_LocalInvocationID", 1, &E_GL_NV_mesh_shader);
+ symbolTable.setVariableExtensions("gl_GlobalInvocationID", 1, &E_GL_NV_mesh_shader);
+ symbolTable.setVariableExtensions("gl_LocalInvocationIndex", 1, &E_GL_NV_mesh_shader);
+ }
BuiltInVariable("gl_PrimitiveCountNV", EbvPrimitiveCountNV, symbolTable);
BuiltInVariable("gl_PrimitiveIndicesNV", EbvPrimitiveIndicesNV, symbolTable);
BuiltInVariable("gl_MeshViewCountNV", EbvMeshViewCountNV, symbolTable);
@@ -8936,12 +9028,54 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.setVariableExtensions("gl_MaxMeshViewCountNV", 1, &E_GL_NV_mesh_shader);
// builtin functions
- symbolTable.setFunctionExtensions("barrier", 1, &E_GL_NV_mesh_shader);
- symbolTable.setFunctionExtensions("memoryBarrierShared", 1, &E_GL_NV_mesh_shader);
- symbolTable.setFunctionExtensions("groupMemoryBarrier", 1, &E_GL_NV_mesh_shader);
+ if (profile != EEsProfile) {
+ symbolTable.setFunctionExtensions("barrier", Num_AEP_mesh_shader, AEP_mesh_shader);
+ symbolTable.setFunctionExtensions("memoryBarrierShared", Num_AEP_mesh_shader, AEP_mesh_shader);
+ symbolTable.setFunctionExtensions("groupMemoryBarrier", Num_AEP_mesh_shader, AEP_mesh_shader);
+ } else {
+ symbolTable.setFunctionExtensions("barrier", 1, &E_GL_NV_mesh_shader);
+ symbolTable.setFunctionExtensions("memoryBarrierShared", 1, &E_GL_NV_mesh_shader);
+ symbolTable.setFunctionExtensions("groupMemoryBarrier", 1, &E_GL_NV_mesh_shader);
+ }
+ symbolTable.setFunctionExtensions("writePackedPrimitiveIndices4x8NV", 1, &E_GL_NV_mesh_shader);
}
if (profile != EEsProfile && version >= 450) {
+ // GL_EXT_Mesh_shader
+ symbolTable.setVariableExtensions("gl_PrimitivePointIndicesEXT", 1, &E_GL_EXT_mesh_shader);
+ symbolTable.setVariableExtensions("gl_PrimitiveLineIndicesEXT", 1, &E_GL_EXT_mesh_shader);
+ symbolTable.setVariableExtensions("gl_PrimitiveTriangleIndicesEXT", 1, &E_GL_EXT_mesh_shader);
+ symbolTable.setVariableExtensions("gl_NumWorkGroups", 1, &E_GL_EXT_mesh_shader);
+
+ BuiltInVariable("gl_PrimitivePointIndicesEXT", EbvPrimitivePointIndicesEXT, symbolTable);
+ BuiltInVariable("gl_PrimitiveLineIndicesEXT", EbvPrimitiveLineIndicesEXT, symbolTable);
+ BuiltInVariable("gl_PrimitiveTriangleIndicesEXT", EbvPrimitiveTriangleIndicesEXT, symbolTable);
+ BuiltInVariable("gl_NumWorkGroups", EbvNumWorkGroups, symbolTable);
+
+ symbolTable.setVariableExtensions("gl_MeshVerticesEXT", "gl_Position", 1, &E_GL_EXT_mesh_shader);
+ symbolTable.setVariableExtensions("gl_MeshVerticesEXT", "gl_PointSize", 1, &E_GL_EXT_mesh_shader);
+ symbolTable.setVariableExtensions("gl_MeshVerticesEXT", "gl_ClipDistance", 1, &E_GL_EXT_mesh_shader);
+ symbolTable.setVariableExtensions("gl_MeshVerticesEXT", "gl_CullDistance", 1, &E_GL_EXT_mesh_shader);
+
+ BuiltInVariable("gl_MeshVerticesEXT", "gl_Position", EbvPosition, symbolTable);
+ BuiltInVariable("gl_MeshVerticesEXT", "gl_PointSize", EbvPointSize, symbolTable);
+ BuiltInVariable("gl_MeshVerticesEXT", "gl_ClipDistance", EbvClipDistance, symbolTable);
+ BuiltInVariable("gl_MeshVerticesEXT", "gl_CullDistance", EbvCullDistance, symbolTable);
+
+ symbolTable.setVariableExtensions("gl_MeshPrimitivesEXT", "gl_PrimitiveID", 1, &E_GL_EXT_mesh_shader);
+ symbolTable.setVariableExtensions("gl_MeshPrimitivesEXT", "gl_Layer", 1, &E_GL_EXT_mesh_shader);
+ symbolTable.setVariableExtensions("gl_MeshPrimitivesEXT", "gl_ViewportIndex", 1, &E_GL_EXT_mesh_shader);
+ symbolTable.setVariableExtensions("gl_MeshPrimitivesEXT", "gl_CullPrimitiveEXT", 1, &E_GL_EXT_mesh_shader);
+ symbolTable.setVariableExtensions("gl_MeshPrimitivesEXT", "gl_PrimitiveShadingRateEXT", 1, &E_GL_EXT_mesh_shader);
+
+ BuiltInVariable("gl_MeshPrimitivesEXT", "gl_PrimitiveID", EbvPrimitiveId, symbolTable);
+ BuiltInVariable("gl_MeshPrimitivesEXT", "gl_Layer", EbvLayer, symbolTable);
+ BuiltInVariable("gl_MeshPrimitivesEXT", "gl_ViewportIndex", EbvViewportIndex, symbolTable);
+ BuiltInVariable("gl_MeshPrimitivesEXT", "gl_CullPrimitiveEXT", EbvCullPrimitiveEXT, symbolTable);
+ BuiltInVariable("gl_MeshPrimitivesEXT", "gl_PrimitiveShadingRateEXT", EbvPrimitiveShadingRateKHR, symbolTable);
+
+ symbolTable.setFunctionExtensions("SetMeshOutputsEXT", 1, &E_GL_EXT_mesh_shader);
+
// GL_EXT_device_group
symbolTable.setVariableExtensions("gl_DeviceIndex", 1, &E_GL_EXT_device_group);
BuiltInVariable("gl_DeviceIndex", EbvDeviceIndex, symbolTable);
@@ -8952,6 +9086,9 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
if (version >= 460) {
BuiltInVariable("gl_DrawID", EbvDrawId, symbolTable);
}
+ // GL_EXT_multiview
+ BuiltInVariable("gl_ViewIndex", EbvViewIndex, symbolTable);
+ symbolTable.setVariableExtensions("gl_ViewIndex", 1, &E_GL_EXT_multiview);
// GL_ARB_shader_ballot
symbolTable.setVariableExtensions("gl_SubGroupSizeARB", 1, &E_GL_ARB_shader_ballot);
@@ -8969,9 +9106,12 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
BuiltInVariable("gl_SubGroupLeMaskARB", EbvSubGroupLeMask, symbolTable);
BuiltInVariable("gl_SubGroupLtMaskARB", EbvSubGroupLtMask, symbolTable);
- if (spvVersion.vulkan > 0)
+ if (spvVersion.vulkan > 0) {
// Treat "gl_SubGroupSizeARB" as shader input instead of uniform for Vulkan
SpecialQualifier("gl_SubGroupSizeARB", EvqVaryingIn, EbvSubGroupSize, symbolTable);
+ if (language == EShLangFragment)
+ ModifyFlatDecoration("gl_SubGroupSizeARB", true, symbolTable);
+ }
else
BuiltInVariable("gl_SubGroupSizeARB", EbvSubGroupSize, symbolTable);
}
@@ -9021,16 +9161,24 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
}
break;
- case EShLangTaskNV:
+ case EShLangTask:
if ((profile != EEsProfile && version >= 450) || (profile == EEsProfile && version >= 320)) {
symbolTable.setVariableExtensions("gl_TaskCountNV", 1, &E_GL_NV_mesh_shader);
- symbolTable.setVariableExtensions("gl_WorkGroupSize", 1, &E_GL_NV_mesh_shader);
- symbolTable.setVariableExtensions("gl_WorkGroupID", 1, &E_GL_NV_mesh_shader);
- symbolTable.setVariableExtensions("gl_LocalInvocationID", 1, &E_GL_NV_mesh_shader);
- symbolTable.setVariableExtensions("gl_GlobalInvocationID", 1, &E_GL_NV_mesh_shader);
- symbolTable.setVariableExtensions("gl_LocalInvocationIndex", 1, &E_GL_NV_mesh_shader);
symbolTable.setVariableExtensions("gl_MeshViewCountNV", 1, &E_GL_NV_mesh_shader);
symbolTable.setVariableExtensions("gl_MeshViewIndicesNV", 1, &E_GL_NV_mesh_shader);
+ if (profile != EEsProfile) {
+ symbolTable.setVariableExtensions("gl_WorkGroupSize", Num_AEP_mesh_shader, AEP_mesh_shader);
+ symbolTable.setVariableExtensions("gl_WorkGroupID", Num_AEP_mesh_shader, AEP_mesh_shader);
+ symbolTable.setVariableExtensions("gl_LocalInvocationID", Num_AEP_mesh_shader, AEP_mesh_shader);
+ symbolTable.setVariableExtensions("gl_GlobalInvocationID", Num_AEP_mesh_shader, AEP_mesh_shader);
+ symbolTable.setVariableExtensions("gl_LocalInvocationIndex", Num_AEP_mesh_shader, AEP_mesh_shader);
+ } else {
+ symbolTable.setVariableExtensions("gl_WorkGroupSize", 1, &E_GL_NV_mesh_shader);
+ symbolTable.setVariableExtensions("gl_WorkGroupID", 1, &E_GL_NV_mesh_shader);
+ symbolTable.setVariableExtensions("gl_LocalInvocationID", 1, &E_GL_NV_mesh_shader);
+ symbolTable.setVariableExtensions("gl_GlobalInvocationID", 1, &E_GL_NV_mesh_shader);
+ symbolTable.setVariableExtensions("gl_LocalInvocationIndex", 1, &E_GL_NV_mesh_shader);
+ }
BuiltInVariable("gl_TaskCountNV", EbvTaskCountNV, symbolTable);
BuiltInVariable("gl_WorkGroupSize", EbvWorkGroupSize, symbolTable);
@@ -9044,12 +9192,23 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.setVariableExtensions("gl_MaxTaskWorkGroupSizeNV", 1, &E_GL_NV_mesh_shader);
symbolTable.setVariableExtensions("gl_MaxMeshViewCountNV", 1, &E_GL_NV_mesh_shader);
- symbolTable.setFunctionExtensions("barrier", 1, &E_GL_NV_mesh_shader);
- symbolTable.setFunctionExtensions("memoryBarrierShared", 1, &E_GL_NV_mesh_shader);
- symbolTable.setFunctionExtensions("groupMemoryBarrier", 1, &E_GL_NV_mesh_shader);
+ if (profile != EEsProfile) {
+ symbolTable.setFunctionExtensions("barrier", Num_AEP_mesh_shader, AEP_mesh_shader);
+ symbolTable.setFunctionExtensions("memoryBarrierShared", Num_AEP_mesh_shader, AEP_mesh_shader);
+ symbolTable.setFunctionExtensions("groupMemoryBarrier", Num_AEP_mesh_shader, AEP_mesh_shader);
+ } else {
+ symbolTable.setFunctionExtensions("barrier", 1, &E_GL_NV_mesh_shader);
+ symbolTable.setFunctionExtensions("memoryBarrierShared", 1, &E_GL_NV_mesh_shader);
+ symbolTable.setFunctionExtensions("groupMemoryBarrier", 1, &E_GL_NV_mesh_shader);
+ }
}
if (profile != EEsProfile && version >= 450) {
+ // GL_EXT_mesh_shader
+ symbolTable.setFunctionExtensions("EmitMeshTasksEXT", 1, &E_GL_EXT_mesh_shader);
+ symbolTable.setVariableExtensions("gl_NumWorkGroups", 1, &E_GL_EXT_mesh_shader);
+ BuiltInVariable("gl_NumWorkGroups", EbvNumWorkGroups, symbolTable);
+
// GL_EXT_device_group
symbolTable.setVariableExtensions("gl_DeviceIndex", 1, &E_GL_EXT_device_group);
BuiltInVariable("gl_DeviceIndex", EbvDeviceIndex, symbolTable);
@@ -9077,9 +9236,12 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
BuiltInVariable("gl_SubGroupLeMaskARB", EbvSubGroupLeMask, symbolTable);
BuiltInVariable("gl_SubGroupLtMaskARB", EbvSubGroupLtMask, symbolTable);
- if (spvVersion.vulkan > 0)
+ if (spvVersion.vulkan > 0) {
// Treat "gl_SubGroupSizeARB" as shader input instead of uniform for Vulkan
SpecialQualifier("gl_SubGroupSizeARB", EvqVaryingIn, EbvSubGroupSize, symbolTable);
+ if (language == EShLangFragment)
+ ModifyFlatDecoration("gl_SubGroupSizeARB", true, symbolTable);
+ }
else
BuiltInVariable("gl_SubGroupSizeARB", EbvSubGroupSize, symbolTable);
}
@@ -9673,17 +9835,27 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.relateToOperator("executeCallableEXT", EOpExecuteCallableKHR);
}
break;
- case EShLangMeshNV:
+ case EShLangMesh:
if ((profile != EEsProfile && version >= 450) || (profile == EEsProfile && version >= 320)) {
symbolTable.relateToOperator("writePackedPrimitiveIndices4x8NV", EOpWritePackedPrimitiveIndices4x8NV);
+ symbolTable.relateToOperator("memoryBarrierShared", EOpMemoryBarrierShared);
+ symbolTable.relateToOperator("groupMemoryBarrier", EOpGroupMemoryBarrier);
+ symbolTable.relateToOperator("subgroupMemoryBarrierShared", EOpSubgroupMemoryBarrierShared);
}
- // fall through
- case EShLangTaskNV:
+
+ if (profile != EEsProfile && version >= 450) {
+ symbolTable.relateToOperator("SetMeshOutputsEXT", EOpSetMeshOutputsEXT);
+ }
+ break;
+ case EShLangTask:
if ((profile != EEsProfile && version >= 450) || (profile == EEsProfile && version >= 320)) {
symbolTable.relateToOperator("memoryBarrierShared", EOpMemoryBarrierShared);
symbolTable.relateToOperator("groupMemoryBarrier", EOpGroupMemoryBarrier);
symbolTable.relateToOperator("subgroupMemoryBarrierShared", EOpSubgroupMemoryBarrierShared);
}
+ if (profile != EEsProfile && version >= 450) {
+ symbolTable.relateToOperator("EmitMeshTasksEXT", EOpEmitMeshTasksEXT);
+ }
break;
default:
diff --git a/thirdparty/glslang/glslang/MachineIndependent/Intermediate.cpp b/thirdparty/glslang/glslang/MachineIndependent/Intermediate.cpp
index 6aea5b3d7c..6a43ef3e84 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/Intermediate.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/Intermediate.cpp
@@ -2733,10 +2733,10 @@ TIntermAggregate* TIntermediate::addForLoop(TIntermNode* body, TIntermNode* init
TIntermAggregate* loopSequence = (initializer == nullptr ||
initializer->getAsAggregate() == nullptr) ? makeAggregate(initializer, loc)
: initializer->getAsAggregate();
- if (loopSequence != nullptr && loopSequence->getOp() == EOpSequence)
+ if (loopSequence != nullptr && (loopSequence->getOp() == EOpSequence || loopSequence->getOp() == EOpScope))
loopSequence->setOp(EOpNull);
loopSequence = growAggregate(loopSequence, node);
- loopSequence->setOperator(EOpSequence);
+ loopSequence->setOperator(getDebugInfo() ? EOpScope : EOpSequence);
return loopSequence;
}
@@ -2766,7 +2766,7 @@ void TIntermBranch::updatePrecision(TPrecisionQualifier parentPrecision)
return;
if (exp->getBasicType() == EbtInt || exp->getBasicType() == EbtUint ||
- exp->getBasicType() == EbtFloat || exp->getBasicType() == EbtFloat16) {
+ exp->getBasicType() == EbtFloat) {
if (parentPrecision != EpqNone && exp->getQualifier().precision == EpqNone) {
exp->propagatePrecision(parentPrecision);
}
@@ -3284,7 +3284,7 @@ bool TIntermediate::promoteUnary(TIntermUnary& node)
void TIntermUnary::updatePrecision()
{
if (getBasicType() == EbtInt || getBasicType() == EbtUint ||
- getBasicType() == EbtFloat || getBasicType() == EbtFloat16) {
+ getBasicType() == EbtFloat) {
if (operand->getQualifier().precision > getQualifier().precision)
getQualifier().precision = operand->getQualifier().precision;
}
@@ -3785,7 +3785,7 @@ bool TIntermediate::promoteAggregate(TIntermAggregate& node)
void TIntermAggregate::updatePrecision()
{
if (getBasicType() == EbtInt || getBasicType() == EbtUint ||
- getBasicType() == EbtFloat || getBasicType() == EbtFloat16) {
+ getBasicType() == EbtFloat) {
TPrecisionQualifier maxPrecision = EpqNone;
TIntermSequence operands = getSequence();
for (unsigned int i = 0; i < operands.size(); ++i) {
@@ -3807,7 +3807,7 @@ void TIntermAggregate::updatePrecision()
void TIntermBinary::updatePrecision()
{
if (getBasicType() == EbtInt || getBasicType() == EbtUint ||
- getBasicType() == EbtFloat || getBasicType() == EbtFloat16) {
+ getBasicType() == EbtFloat) {
if (op == EOpRightShift || op == EOpLeftShift) {
// For shifts get precision from left side only and thus no need to propagate
getQualifier().precision = left->getQualifier().precision;
diff --git a/thirdparty/glslang/glslang/MachineIndependent/ParseContextBase.cpp b/thirdparty/glslang/glslang/MachineIndependent/ParseContextBase.cpp
index 1da50d62f9..616580f993 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/ParseContextBase.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/ParseContextBase.cpp
@@ -74,6 +74,9 @@ void C_DECL TParseContextBase::error(const TSourceLoc& loc, const char* szReason
{
if (messages & EShMsgOnlyPreprocessor)
return;
+ // If enhanced msg readability, only print one error
+ if (messages & EShMsgEnhanced && numErrors > 0)
+ return;
va_list args;
va_start(args, szExtraInfoFormat);
outputMessage(loc, szReason, szToken, szExtraInfoFormat, EPrefixError, args);
diff --git a/thirdparty/glslang/glslang/MachineIndependent/ParseHelper.cpp b/thirdparty/glslang/glslang/MachineIndependent/ParseHelper.cpp
index fa291160cb..e2ac43ca19 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/ParseHelper.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/ParseHelper.cpp
@@ -502,6 +502,16 @@ TIntermTyped* TParseContext::handleVariable(const TSourceLoc& loc, TSymbol* symb
error(loc, "cannot be used (maybe an instance name is needed)", string->c_str(), "");
variable = nullptr;
}
+
+ if (language == EShLangMesh && variable) {
+ TLayoutGeometry primitiveType = intermediate.getOutputPrimitive();
+ if ((variable->getMangledName() == "gl_PrimitiveTriangleIndicesEXT" && primitiveType != ElgTriangles) ||
+ (variable->getMangledName() == "gl_PrimitiveLineIndicesEXT" && primitiveType != ElgLines) ||
+ (variable->getMangledName() == "gl_PrimitivePointIndicesEXT" && primitiveType != ElgPoints)) {
+ error(loc, "cannot be used (ouput primitive type mismatch)", string->c_str(), "");
+ variable = nullptr;
+ }
+ }
} else {
if (symbol)
error(loc, "variable name expected", string->c_str(), "");
@@ -716,8 +726,8 @@ bool TParseContext::isIoResizeArray(const TType& type) const
(language == EShLangTessControl && type.getQualifier().storage == EvqVaryingOut &&
! type.getQualifier().patch) ||
(language == EShLangFragment && type.getQualifier().storage == EvqVaryingIn &&
- type.getQualifier().pervertexNV) ||
- (language == EShLangMeshNV && type.getQualifier().storage == EvqVaryingOut &&
+ (type.getQualifier().pervertexNV || type.getQualifier().pervertexEXT)) ||
+ (language == EShLangMesh && type.getQualifier().storage == EvqVaryingOut &&
!type.getQualifier().perTaskNV));
}
@@ -794,7 +804,7 @@ void TParseContext::checkIoArraysConsistency(const TSourceLoc &loc, bool tailOnl
// As I/O array sizes don't change, fetch requiredSize only once,
// except for mesh shaders which could have different I/O array sizes based on type qualifiers.
- if (firstIteration || (language == EShLangMeshNV)) {
+ if (firstIteration || (language == EShLangMesh)) {
requiredSize = getIoArrayImplicitSize(type.getQualifier(), &featureString);
if (requiredSize == 0)
break;
@@ -823,10 +833,11 @@ int TParseContext::getIoArrayImplicitSize(const TQualifier &qualifier, TString *
// Number of vertices for Fragment shader is always three.
expectedSize = 3;
str = "vertices";
- } else if (language == EShLangMeshNV) {
+ } else if (language == EShLangMesh) {
unsigned int maxPrimitives =
intermediate.getPrimitives() != TQualifier::layoutNotSet ? intermediate.getPrimitives() : 0;
- if (qualifier.builtIn == EbvPrimitiveIndicesNV) {
+ if (qualifier.builtIn == EbvPrimitiveIndicesNV || qualifier.builtIn == EbvPrimitiveTriangleIndicesEXT ||
+ qualifier.builtIn == EbvPrimitiveLineIndicesEXT || qualifier.builtIn == EbvPrimitivePointIndicesEXT) {
expectedSize = maxPrimitives * TQualifier::mapGeometryToSize(intermediate.getOutputPrimitive());
str = "max_primitives*";
str += TQualifier::getGeometryString(intermediate.getOutputPrimitive());
@@ -856,9 +867,9 @@ void TParseContext::checkIoArrayConsistency(const TSourceLoc& loc, int requiredS
error(loc, "inconsistent output number of vertices for array size of", feature, name.c_str());
else if (language == EShLangFragment) {
if (type.getOuterArraySize() > requiredSize)
- error(loc, " cannot be greater than 3 for pervertexNV", feature, name.c_str());
+ error(loc, " cannot be greater than 3 for pervertexEXT", feature, name.c_str());
}
- else if (language == EShLangMeshNV)
+ else if (language == EShLangMesh)
error(loc, "inconsistent output array size of", feature, name.c_str());
else
assert(0);
@@ -902,8 +913,10 @@ TIntermTyped* TParseContext::handleBinaryMath(const TSourceLoc& loc, const char*
result = intermediate.addBinaryMath(op, left, right, loc);
}
- if (result == nullptr)
- binaryOpError(loc, str, left->getCompleteString(), right->getCompleteString());
+ if (result == nullptr) {
+ bool enhanced = intermediate.getEnhancedMsgs();
+ binaryOpError(loc, str, left->getCompleteString(enhanced), right->getCompleteString(enhanced));
+ }
return result;
}
@@ -926,8 +939,10 @@ TIntermTyped* TParseContext::handleUnaryMath(const TSourceLoc& loc, const char*
if (result)
return result;
- else
- unaryOpError(loc, str, childNode->getCompleteString());
+ else {
+ bool enhanced = intermediate.getEnhancedMsgs();
+ unaryOpError(loc, str, childNode->getCompleteString(enhanced));
+ }
return childNode;
}
@@ -953,8 +968,8 @@ TIntermTyped* TParseContext::handleDotDereference(const TSourceLoc& loc, TInterm
requireProfile(loc, ~EEsProfile, feature);
profileRequires(loc, ~EEsProfile, 420, E_GL_ARB_shading_language_420pack, feature);
} else if (!base->getType().isCoopMat()) {
- error(loc, "does not operate on this type:", field.c_str(), base->getType().getCompleteString().c_str());
-
+ bool enhanced = intermediate.getEnhancedMsgs();
+ error(loc, "does not operate on this type:", field.c_str(), base->getType().getCompleteString(enhanced).c_str());
return base;
}
@@ -1005,10 +1020,16 @@ TIntermTyped* TParseContext::handleDotDereference(const TSourceLoc& loc, TInterm
intermediate.addIoAccessed(field);
}
inheritMemoryQualifiers(base->getQualifier(), result->getWritableType().getQualifier());
- } else
- error(loc, "no such field in structure", field.c_str(), "");
+ } else {
+ auto baseSymbol = base;
+ while (baseSymbol->getAsSymbolNode() == nullptr)
+ baseSymbol = baseSymbol->getAsBinaryNode()->getLeft();
+ TString structName;
+ structName.append("\'").append(baseSymbol->getAsSymbolNode()->getName().c_str()).append( "\'");
+ error(loc, "no such field in structure", field.c_str(), structName.c_str());
+ }
} else
- error(loc, "does not apply to this type:", field.c_str(), base->getType().getCompleteString().c_str());
+ error(loc, "does not apply to this type:", field.c_str(), base->getType().getCompleteString(intermediate.getEnhancedMsgs()).c_str());
// Propagate noContraction up the dereference chain
if (base->getQualifier().isNoContraction())
@@ -1314,7 +1335,7 @@ TIntermTyped* TParseContext::handleFunctionCall(const TSourceLoc& loc, TFunction
//
result = addConstructor(loc, arguments, type);
if (result == nullptr)
- error(loc, "cannot construct with these arguments", type.getCompleteString().c_str(), "");
+ error(loc, "cannot construct with these arguments", type.getCompleteString(intermediate.getEnhancedMsgs()).c_str(), "");
}
} else {
//
@@ -1494,7 +1515,7 @@ TIntermTyped* TParseContext::handleBuiltInFunctionCall(TSourceLoc loc, TIntermNo
else
error(arguments->getLoc(), " wrong operand type", "Internal Error",
"built in unary operator function. Type: %s",
- static_cast<TIntermTyped*>(arguments)->getCompleteString().c_str());
+ static_cast<TIntermTyped*>(arguments)->getCompleteString(intermediate.getEnhancedMsgs()).c_str());
} else if (result->getAsOperator())
builtInOpCheck(loc, function, *result->getAsOperator());
@@ -1990,18 +2011,18 @@ void TParseContext::memorySemanticsCheck(const TSourceLoc& loc, const TFunction&
break;
}
- if ((semantics & gl_SemanticsAcquire) &&
+ if ((semantics & gl_SemanticsAcquire) &&
(callNode.getOp() == EOpAtomicStore || callNode.getOp() == EOpImageAtomicStore)) {
error(loc, "gl_SemanticsAcquire must not be used with (image) atomic store",
fnCandidate.getName().c_str(), "");
}
- if ((semantics & gl_SemanticsRelease) &&
+ if ((semantics & gl_SemanticsRelease) &&
(callNode.getOp() == EOpAtomicLoad || callNode.getOp() == EOpImageAtomicLoad)) {
error(loc, "gl_SemanticsRelease must not be used with (image) atomic load",
fnCandidate.getName().c_str(), "");
}
- if ((semantics & gl_SemanticsAcquireRelease) &&
- (callNode.getOp() == EOpAtomicStore || callNode.getOp() == EOpImageAtomicStore ||
+ if ((semantics & gl_SemanticsAcquireRelease) &&
+ (callNode.getOp() == EOpAtomicStore || callNode.getOp() == EOpImageAtomicStore ||
callNode.getOp() == EOpAtomicLoad || callNode.getOp() == EOpImageAtomicLoad)) {
error(loc, "gl_SemanticsAcquireRelease must not be used with (image) atomic load/store",
fnCandidate.getName().c_str(), "");
@@ -2317,7 +2338,7 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
error(loc, "argument must be compile-time constant", "payload number", "a");
else {
unsigned int location = (*argp)[10]->getAsConstantUnion()->getAsConstantUnion()->getConstArray()[0].getUConst();
- if (intermediate.checkLocationRT(0, location) < 0)
+ if (!extensionTurnedOn(E_GL_EXT_spirv_intrinsics) && intermediate.checkLocationRT(0, location) < 0)
error(loc, "with layout(location =", "no rayPayloadEXT/rayPayloadInEXT declared", "%d)", location);
}
break;
@@ -2330,7 +2351,7 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
error(loc, "argument must be compile-time constant", "callable data number", "");
else {
unsigned int location = (*argp)[1]->getAsConstantUnion()->getAsConstantUnion()->getConstArray()[0].getUConst();
- if (intermediate.checkLocationRT(1, location) < 0)
+ if (!extensionTurnedOn(E_GL_EXT_spirv_intrinsics) && intermediate.checkLocationRT(1, location) < 0)
error(loc, "with layout(location =", "no callableDataEXT/callableDataInEXT declared", "%d)", location);
}
break;
@@ -2452,7 +2473,7 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
const TIntermTyped* base = TIntermediate::findLValueBase(arg0, true , true);
const TType* refType = (base->getType().isReference()) ? base->getType().getReferentType() : nullptr;
const TQualifier& qualifier = (refType != nullptr) ? refType->getQualifier() : base->getType().getQualifier();
- if (qualifier.storage != EvqShared && qualifier.storage != EvqBuffer)
+ if (qualifier.storage != EvqShared && qualifier.storage != EvqBuffer && qualifier.storage != EvqtaskPayloadSharedEXT)
error(loc,"Atomic memory function can only be used for shader storage block member or shared variable.",
fnCandidate.getName().c_str(), "");
@@ -2550,7 +2571,7 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
}
if (profile != EEsProfile && version < 450) {
- if ((*argp)[0]->getAsTyped()->getBasicType() != EbtFloat &&
+ if ((*argp)[0]->getAsTyped()->getBasicType() != EbtFloat &&
(*argp)[0]->getAsTyped()->getBasicType() != EbtDouble &&
(*argp)[1]->getAsTyped()->getBasicType() != EbtFloat &&
(*argp)[1]->getAsTyped()->getBasicType() != EbtDouble &&
@@ -2599,23 +2620,24 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
// Check that if extended types are being used that the correct extensions are enabled.
if (arg0 != nullptr) {
const TType& type = arg0->getType();
+ bool enhanced = intermediate.getEnhancedMsgs();
switch (type.getBasicType()) {
default:
break;
case EbtInt8:
case EbtUint8:
- requireExtensions(loc, 1, &E_GL_EXT_shader_subgroup_extended_types_int8, type.getCompleteString().c_str());
+ requireExtensions(loc, 1, &E_GL_EXT_shader_subgroup_extended_types_int8, type.getCompleteString(enhanced).c_str());
break;
case EbtInt16:
case EbtUint16:
- requireExtensions(loc, 1, &E_GL_EXT_shader_subgroup_extended_types_int16, type.getCompleteString().c_str());
+ requireExtensions(loc, 1, &E_GL_EXT_shader_subgroup_extended_types_int16, type.getCompleteString(enhanced).c_str());
break;
case EbtInt64:
case EbtUint64:
- requireExtensions(loc, 1, &E_GL_EXT_shader_subgroup_extended_types_int64, type.getCompleteString().c_str());
+ requireExtensions(loc, 1, &E_GL_EXT_shader_subgroup_extended_types_int64, type.getCompleteString(enhanced).c_str());
break;
case EbtFloat16:
- requireExtensions(loc, 1, &E_GL_EXT_shader_subgroup_extended_types_float16, type.getCompleteString().c_str());
+ requireExtensions(loc, 1, &E_GL_EXT_shader_subgroup_extended_types_float16, type.getCompleteString(enhanced).c_str());
break;
}
}
@@ -2788,7 +2810,10 @@ TFunction* TParseContext::handleConstructorCall(const TSourceLoc& loc, const TPu
TOperator op = intermediate.mapTypeToConstructorOp(type);
if (op == EOpNull) {
- error(loc, "cannot construct this type", type.getBasicString(), "");
+ if (intermediate.getEnhancedMsgs() && type.getBasicType() == EbtSampler)
+ error(loc, "function not supported in this version; use texture() instead", "texture*D*", "");
+ else
+ error(loc, "cannot construct this type", type.getBasicString(), "");
op = EOpConstructFloat;
TType errorType(EbtFloat);
type.shallowCopy(errorType);
@@ -2974,7 +2999,17 @@ bool TParseContext::lValueErrorCheck(const TSourceLoc& loc, const char* op, TInt
if (isEsProfile() && intermediate.getEarlyFragmentTests())
message = "can't modify gl_FragDepth if using early_fragment_tests";
break;
+ case EvqFragStencil:
+ intermediate.setStencilReplacing();
+ // "In addition, it is an error to statically write to gl_FragDepth in the fragment shader."
+ if (isEsProfile() && intermediate.getEarlyFragmentTests())
+ message = "can't modify EvqFragStencil if using early_fragment_tests";
+ break;
+ case EvqtaskPayloadSharedEXT:
+ if (language == EShLangMesh)
+ message = "can't modify variable with storage qualifier taskPayloadSharedEXT in mesh shaders";
+ break;
default:
break;
}
@@ -3013,7 +3048,7 @@ void TParseContext::rValueErrorCheck(const TSourceLoc& loc, const char* op, TInt
if (symNode && symNode->getQualifier().isExplicitInterpolation())
error(loc, "can't read from explicitly-interpolated object: ", op, symNode->getName().c_str());
- // local_size_{xyz} must be assigned or specialized before gl_WorkGroupSize can be assigned.
+ // local_size_{xyz} must be assigned or specialized before gl_WorkGroupSize can be assigned.
if(node->getQualifier().builtIn == EbvWorkGroupSize &&
!(intermediate.isLocalSizeSet() || intermediate.isLocalSizeSpecialized()))
error(loc, "can't read from gl_WorkGroupSize before a fixed workgroup size has been declared", op, "");
@@ -3198,6 +3233,12 @@ bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, T
break;
}
+ TString constructorString;
+ if (intermediate.getEnhancedMsgs())
+ constructorString.append(type.getCompleteString(true, false, false, true)).append(" constructor");
+ else
+ constructorString.append("constructor");
+
// See if it's a matrix
bool constructingMatrix = false;
switch (op) {
@@ -3255,7 +3296,7 @@ bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, T
if (function[arg].type->isArray()) {
if (function[arg].type->isUnsizedArray()) {
// Can't construct from an unsized array.
- error(loc, "array argument must be sized", "constructor", "");
+ error(loc, "array argument must be sized", constructorString.c_str(), "");
return true;
}
arrayArg = true;
@@ -3285,13 +3326,13 @@ bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, T
intArgument = true;
if (type.isStruct()) {
if (function[arg].type->contains16BitFloat()) {
- requireFloat16Arithmetic(loc, "constructor", "can't construct structure containing 16-bit type");
+ requireFloat16Arithmetic(loc, constructorString.c_str(), "can't construct structure containing 16-bit type");
}
if (function[arg].type->contains16BitInt()) {
- requireInt16Arithmetic(loc, "constructor", "can't construct structure containing 16-bit type");
+ requireInt16Arithmetic(loc, constructorString.c_str(), "can't construct structure containing 16-bit type");
}
if (function[arg].type->contains8BitInt()) {
- requireInt8Arithmetic(loc, "constructor", "can't construct structure containing 8-bit type");
+ requireInt8Arithmetic(loc, constructorString.c_str(), "can't construct structure containing 8-bit type");
}
}
}
@@ -3305,9 +3346,9 @@ bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, T
case EOpConstructF16Vec3:
case EOpConstructF16Vec4:
if (type.isArray())
- requireFloat16Arithmetic(loc, "constructor", "16-bit arrays not supported");
+ requireFloat16Arithmetic(loc, constructorString.c_str(), "16-bit arrays not supported");
if (type.isVector() && function.getParamCount() != 1)
- requireFloat16Arithmetic(loc, "constructor", "16-bit vectors only take vector types");
+ requireFloat16Arithmetic(loc, constructorString.c_str(), "16-bit vectors only take vector types");
break;
case EOpConstructUint16:
case EOpConstructU16Vec2:
@@ -3318,9 +3359,9 @@ bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, T
case EOpConstructI16Vec3:
case EOpConstructI16Vec4:
if (type.isArray())
- requireInt16Arithmetic(loc, "constructor", "16-bit arrays not supported");
+ requireInt16Arithmetic(loc, constructorString.c_str(), "16-bit arrays not supported");
if (type.isVector() && function.getParamCount() != 1)
- requireInt16Arithmetic(loc, "constructor", "16-bit vectors only take vector types");
+ requireInt16Arithmetic(loc, constructorString.c_str(), "16-bit vectors only take vector types");
break;
case EOpConstructUint8:
case EOpConstructU8Vec2:
@@ -3331,9 +3372,9 @@ bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, T
case EOpConstructI8Vec3:
case EOpConstructI8Vec4:
if (type.isArray())
- requireInt8Arithmetic(loc, "constructor", "8-bit arrays not supported");
+ requireInt8Arithmetic(loc, constructorString.c_str(), "8-bit arrays not supported");
if (type.isVector() && function.getParamCount() != 1)
- requireInt8Arithmetic(loc, "constructor", "8-bit vectors only take vector types");
+ requireInt8Arithmetic(loc, constructorString.c_str(), "8-bit vectors only take vector types");
break;
default:
break;
@@ -3415,7 +3456,7 @@ bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, T
if (type.isArray()) {
if (function.getParamCount() == 0) {
- error(loc, "array constructor must have at least one argument", "constructor", "");
+ error(loc, "array constructor must have at least one argument", constructorString.c_str(), "");
return true;
}
@@ -3423,7 +3464,7 @@ bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, T
// auto adapt the constructor type to the number of arguments
type.changeOuterArraySize(function.getParamCount());
} else if (type.getOuterArraySize() != function.getParamCount()) {
- error(loc, "array constructor needs one argument per array element", "constructor", "");
+ error(loc, "array constructor needs one argument per array element", constructorString.c_str(), "");
return true;
}
@@ -3436,7 +3477,7 @@ bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, T
// At least the dimensionalities have to match.
if (! function[0].type->isArray() ||
arraySizes.getNumDims() != function[0].type->getArraySizes()->getNumDims() + 1) {
- error(loc, "array constructor argument not correct type to construct array element", "constructor", "");
+ error(loc, "array constructor argument not correct type to construct array element", constructorString.c_str(), "");
return true;
}
@@ -3453,7 +3494,7 @@ bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, T
}
if (arrayArg && op != EOpConstructStruct && ! type.isArrayOfArrays()) {
- error(loc, "constructing non-array constituent from array argument", "constructor", "");
+ error(loc, "constructing non-array constituent from array argument", constructorString.c_str(), "");
return true;
}
@@ -3463,51 +3504,51 @@ bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, T
// "If a matrix argument is given to a matrix constructor,
// it is a compile-time error to have any other arguments."
if (function.getParamCount() != 1)
- error(loc, "matrix constructed from matrix can only have one argument", "constructor", "");
+ error(loc, "matrix constructed from matrix can only have one argument", constructorString.c_str(), "");
return false;
}
if (overFull) {
- error(loc, "too many arguments", "constructor", "");
+ error(loc, "too many arguments", constructorString.c_str(), "");
return true;
}
if (op == EOpConstructStruct && ! type.isArray() && (int)type.getStruct()->size() != function.getParamCount()) {
- error(loc, "Number of constructor parameters does not match the number of structure fields", "constructor", "");
+ error(loc, "Number of constructor parameters does not match the number of structure fields", constructorString.c_str(), "");
return true;
}
if ((op != EOpConstructStruct && size != 1 && size < type.computeNumComponents()) ||
(op == EOpConstructStruct && size < type.computeNumComponents())) {
- error(loc, "not enough data provided for construction", "constructor", "");
+ error(loc, "not enough data provided for construction", constructorString.c_str(), "");
return true;
}
if (type.isCoopMat() && function.getParamCount() != 1) {
- error(loc, "wrong number of arguments", "constructor", "");
+ error(loc, "wrong number of arguments", constructorString.c_str(), "");
return true;
}
if (type.isCoopMat() &&
!(function[0].type->isScalar() || function[0].type->isCoopMat())) {
- error(loc, "Cooperative matrix constructor argument must be scalar or cooperative matrix", "constructor", "");
+ error(loc, "Cooperative matrix constructor argument must be scalar or cooperative matrix", constructorString.c_str(), "");
return true;
}
TIntermTyped* typed = node->getAsTyped();
if (typed == nullptr) {
- error(loc, "constructor argument does not have a type", "constructor", "");
+ error(loc, "constructor argument does not have a type", constructorString.c_str(), "");
return true;
}
if (op != EOpConstructStruct && op != EOpConstructNonuniform && typed->getBasicType() == EbtSampler) {
- error(loc, "cannot convert a sampler", "constructor", "");
+ error(loc, "cannot convert a sampler", constructorString.c_str(), "");
return true;
}
if (op != EOpConstructStruct && typed->isAtomic()) {
- error(loc, "cannot convert an atomic_uint", "constructor", "");
+ error(loc, "cannot convert an atomic_uint", constructorString.c_str(), "");
return true;
}
if (typed->getBasicType() == EbtVoid) {
- error(loc, "cannot convert a void", "constructor", "");
+ error(loc, "cannot convert a void", constructorString.c_str(), "");
return true;
}
@@ -3786,7 +3827,7 @@ void TParseContext::globalQualifierTypeCheck(const TSourceLoc& loc, const TQuali
if (isTypeInt(publicType.basicType) || publicType.basicType == EbtDouble)
profileRequires(loc, EEsProfile, 300, nullptr, "shader input/output");
- if (!qualifier.flat && !qualifier.isExplicitInterpolation() && !qualifier.isPervertexNV()) {
+ if (!qualifier.flat && !qualifier.isExplicitInterpolation() && !qualifier.isPervertexNV() && !qualifier.isPervertexEXT()) {
if (isTypeInt(publicType.basicType) ||
publicType.basicType == EbtDouble ||
(publicType.userDef && ( publicType.userDef->containsBasicType(EbtInt)
@@ -3805,6 +3846,9 @@ void TParseContext::globalQualifierTypeCheck(const TSourceLoc& loc, const TQuali
if (qualifier.isPatch() && qualifier.isInterpolation())
error(loc, "cannot use interpolation qualifiers with patch", "patch", "");
+ if (qualifier.isTaskPayload() && publicType.basicType == EbtBlock)
+ error(loc, "taskPayloadSharedEXT variables should not be declared as interface blocks", "taskPayloadSharedEXT", "");
+
if (qualifier.isTaskMemory() && publicType.basicType != EbtBlock)
error(loc, "taskNV variables can be declared only as blocks", "taskNV", "");
@@ -3962,7 +4006,7 @@ void TParseContext::mergeQualifiers(const TSourceLoc& loc, TQualifier& dst, cons
(src.workgroupcoherent && (dst.coherent || dst.devicecoherent || dst.queuefamilycoherent || dst.subgroupcoherent || dst.shadercallcoherent)) ||
(src.subgroupcoherent && (dst.coherent || dst.devicecoherent || dst.queuefamilycoherent || dst.workgroupcoherent || dst.shadercallcoherent)) ||
(src.shadercallcoherent && (dst.coherent || dst.devicecoherent || dst.queuefamilycoherent || dst.workgroupcoherent || dst.subgroupcoherent)))) {
- error(loc, "only one coherent/devicecoherent/queuefamilycoherent/workgroupcoherent/subgroupcoherent/shadercallcoherent qualifier allowed",
+ error(loc, "only one coherent/devicecoherent/queuefamilycoherent/workgroupcoherent/subgroupcoherent/shadercallcoherent qualifier allowed",
GetPrecisionQualifierString(src.precision), "");
}
#endif
@@ -4320,10 +4364,10 @@ void TParseContext::arraySizesCheck(const TSourceLoc& loc, const TQualifier& qua
extensionsTurnedOn(Num_AEP_tessellation_shader, AEP_tessellation_shader))
return;
break;
- case EShLangMeshNV:
+ case EShLangMesh:
if (qualifier.storage == EvqVaryingOut)
if ((isEsProfile() && version >= 320) ||
- extensionTurnedOn(E_GL_NV_mesh_shader))
+ extensionsTurnedOn(Num_AEP_mesh_shader, AEP_mesh_shader))
return;
break;
default:
@@ -4591,7 +4635,7 @@ TSymbol* TParseContext::redeclareBuiltinVariable(const TSourceLoc& loc, const TS
if (ssoPre150 ||
(identifier == "gl_FragDepth" && ((nonEsRedecls && version >= 420) || esRedecls)) ||
- (identifier == "gl_FragCoord" && ((nonEsRedecls && version >= 150) || esRedecls)) ||
+ (identifier == "gl_FragCoord" && ((nonEsRedecls && version >= 140) || esRedecls)) ||
identifier == "gl_ClipDistance" ||
identifier == "gl_CullDistance" ||
identifier == "gl_ShadingRateEXT" ||
@@ -4607,6 +4651,9 @@ TSymbol* TParseContext::redeclareBuiltinVariable(const TSourceLoc& loc, const TS
identifier == "gl_SampleMask" ||
identifier == "gl_Layer" ||
identifier == "gl_PrimitiveIndicesNV" ||
+ identifier == "gl_PrimitivePointIndicesEXT" ||
+ identifier == "gl_PrimitiveLineIndicesEXT" ||
+ identifier == "gl_PrimitiveTriangleIndicesEXT" ||
identifier == "gl_TexCoord") {
// Find the existing symbol, if any.
@@ -4660,7 +4707,7 @@ TSymbol* TParseContext::redeclareBuiltinVariable(const TSourceLoc& loc, const TS
symbolQualifier.storage != qualifier.storage)
error(loc, "cannot change qualification of", "redeclaration", symbol->getName().c_str());
} else if (identifier == "gl_FragCoord") {
- if (intermediate.inIoAccessed("gl_FragCoord"))
+ if (!intermediate.getTexCoordRedeclared() && intermediate.inIoAccessed("gl_FragCoord"))
error(loc, "cannot redeclare after use", "gl_FragCoord", "");
if (qualifier.nopersp != symbolQualifier.nopersp || qualifier.flat != symbolQualifier.flat ||
qualifier.isMemory() || qualifier.isAuxiliary())
@@ -4670,6 +4717,9 @@ TSymbol* TParseContext::redeclareBuiltinVariable(const TSourceLoc& loc, const TS
if (! builtIn && (publicType.pixelCenterInteger != intermediate.getPixelCenterInteger() ||
publicType.originUpperLeft != intermediate.getOriginUpperLeft()))
error(loc, "cannot redeclare with different qualification:", "redeclaration", symbol->getName().c_str());
+
+
+ intermediate.setTexCoordRedeclared();
if (publicType.pixelCenterInteger)
intermediate.setPixelCenterInteger();
if (publicType.originUpperLeft)
@@ -4686,10 +4736,22 @@ TSymbol* TParseContext::redeclareBuiltinVariable(const TSourceLoc& loc, const TS
if (! intermediate.setDepth(publicType.layoutDepth))
error(loc, "all redeclarations must use the same depth layout on", "redeclaration", symbol->getName().c_str());
}
+ } else if (identifier == "gl_FragStencilRefARB") {
+ if (qualifier.nopersp != symbolQualifier.nopersp || qualifier.flat != symbolQualifier.flat ||
+ qualifier.isMemory() || qualifier.isAuxiliary())
+ error(loc, "can only change layout qualification of", "redeclaration", symbol->getName().c_str());
+ if (qualifier.storage != EvqVaryingOut)
+ error(loc, "cannot change output storage qualification of", "redeclaration", symbol->getName().c_str());
+ if (publicType.layoutStencil != ElsNone) {
+ if (intermediate.inIoAccessed("gl_FragStencilRefARB"))
+ error(loc, "cannot redeclare after use", "gl_FragStencilRefARB", "");
+ if (!intermediate.setStencil(publicType.layoutStencil))
+ error(loc, "all redeclarations must use the same stencil layout on", "redeclaration",
+ symbol->getName().c_str());
+ }
}
else if (
- identifier == "gl_PrimitiveIndicesNV" ||
- identifier == "gl_FragStencilRefARB") {
+ identifier == "gl_PrimitiveIndicesNV") {
if (qualifier.hasLayout())
error(loc, "cannot apply layout qualifier to", "redeclaration", symbol->getName().c_str());
if (qualifier.storage != EvqVaryingOut)
@@ -4730,7 +4792,8 @@ void TParseContext::redeclareBuiltinBlock(const TSourceLoc& loc, TTypeList& newT
profileRequires(loc, ~EEsProfile, 410, E_GL_ARB_separate_shader_objects, feature);
if (blockName != "gl_PerVertex" && blockName != "gl_PerFragment" &&
- blockName != "gl_MeshPerVertexNV" && blockName != "gl_MeshPerPrimitiveNV") {
+ blockName != "gl_MeshPerVertexNV" && blockName != "gl_MeshPerPrimitiveNV" &&
+ blockName != "gl_MeshPerVertexEXT" && blockName != "gl_MeshPerPrimitiveEXT") {
error(loc, "cannot redeclare block: ", "block declaration", blockName.c_str());
return;
}
@@ -5299,11 +5362,11 @@ void TParseContext::finish()
if (!isEsProfile() && version < 430)
requireExtensions(getCurrentLoc(), 1, &E_GL_ARB_compute_shader, "compute shaders");
break;
- case EShLangTaskNV:
- requireExtensions(getCurrentLoc(), 1, &E_GL_NV_mesh_shader, "task shaders");
+ case EShLangTask:
+ requireExtensions(getCurrentLoc(), Num_AEP_mesh_shader, AEP_mesh_shader, "task shaders");
break;
- case EShLangMeshNV:
- requireExtensions(getCurrentLoc(), 1, &E_GL_NV_mesh_shader, "mesh shaders");
+ case EShLangMesh:
+ requireExtensions(getCurrentLoc(), Num_AEP_mesh_shader, AEP_mesh_shader, "mesh shaders");
break;
default:
break;
@@ -5413,12 +5476,12 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
intermediate.setUsePhysicalStorageBuffer();
return;
}
- if (language == EShLangGeometry || language == EShLangTessEvaluation || language == EShLangMeshNV) {
+ if (language == EShLangGeometry || language == EShLangTessEvaluation || language == EShLangMesh) {
if (id == TQualifier::getGeometryString(ElgTriangles)) {
publicType.shaderQualifiers.geometry = ElgTriangles;
return;
}
- if (language == EShLangGeometry || language == EShLangMeshNV) {
+ if (language == EShLangGeometry || language == EShLangMesh) {
if (id == TQualifier::getGeometryString(ElgPoints)) {
publicType.shaderQualifiers.geometry = ElgPoints;
return;
@@ -5501,12 +5564,19 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
}
if (language == EShLangFragment) {
if (id == "origin_upper_left") {
- requireProfile(loc, ECoreProfile | ECompatibilityProfile, "origin_upper_left");
+ requireProfile(loc, ECoreProfile | ECompatibilityProfile | ENoProfile, "origin_upper_left");
+ if (profile == ENoProfile) {
+ profileRequires(loc,ECoreProfile | ECompatibilityProfile, 140, E_GL_ARB_fragment_coord_conventions, "origin_upper_left");
+ }
+
publicType.shaderQualifiers.originUpperLeft = true;
return;
}
if (id == "pixel_center_integer") {
- requireProfile(loc, ECoreProfile | ECompatibilityProfile, "pixel_center_integer");
+ requireProfile(loc, ECoreProfile | ECompatibilityProfile | ENoProfile, "pixel_center_integer");
+ if (profile == ENoProfile) {
+ profileRequires(loc,ECoreProfile | ECompatibilityProfile, 140, E_GL_ARB_fragment_coord_conventions, "pixel_center_integer");
+ }
publicType.shaderQualifiers.pixelCenterInteger = true;
return;
}
@@ -5516,6 +5586,12 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
publicType.shaderQualifiers.earlyFragmentTests = true;
return;
}
+ if (id == "early_and_late_fragment_tests_amd") {
+ profileRequires(loc, ENoProfile | ECoreProfile | ECompatibilityProfile, 420, E_GL_AMD_shader_early_and_late_fragment_tests, "early_and_late_fragment_tests_amd");
+ profileRequires(loc, EEsProfile, 310, nullptr, "early_and_late_fragment_tests_amd");
+ publicType.shaderQualifiers.earlyAndLateFragmentTestsAMD = true;
+ return;
+ }
if (id == "post_depth_coverage") {
requireExtensions(loc, Num_post_depth_coverageEXTs, post_depth_coverageEXTs, "post depth coverage");
if (extensionTurnedOn(E_GL_ARB_post_depth_coverage)) {
@@ -5532,6 +5608,14 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
return;
}
}
+ for (TLayoutStencil stencil = (TLayoutStencil)(ElsNone + 1); stencil < ElsCount; stencil = (TLayoutStencil)(stencil+1)) {
+ if (id == TQualifier::getLayoutStencilString(stencil)) {
+ requireProfile(loc, ECoreProfile | ECompatibilityProfile, "stencil layout qualifier");
+ profileRequires(loc, ECoreProfile | ECompatibilityProfile, 420, nullptr, "stencil layout qualifier");
+ publicType.shaderQualifiers.layoutStencil = stencil;
+ return;
+ }
+ }
for (TInterlockOrdering order = (TInterlockOrdering)(EioNone + 1); order < EioCount; order = (TInterlockOrdering)(order+1)) {
if (id == TQualifier::getInterlockOrderingString(order)) {
requireProfile(loc, ECoreProfile | ECompatibilityProfile, "fragment shader interlock layout qualifier");
@@ -5675,7 +5759,7 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
return;
} else if (id == "location") {
profileRequires(loc, EEsProfile, 300, nullptr, "location");
- const char* exts[2] = { E_GL_ARB_separate_shader_objects, E_GL_ARB_explicit_attrib_location };
+ const char* exts[2] = { E_GL_ARB_separate_shader_objects, E_GL_ARB_explicit_attrib_location };
// GL_ARB_explicit_uniform_location requires 330 or GL_ARB_explicit_attrib_location we do not need to add it here
profileRequires(loc, ~EEsProfile, 330, 2, exts, "location");
if ((unsigned int)value >= TQualifier::layoutLocationEnd)
@@ -5885,9 +5969,9 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
}
break;
- case EShLangMeshNV:
+ case EShLangMesh:
if (id == "max_vertices") {
- requireExtensions(loc, 1, &E_GL_NV_mesh_shader, "max_vertices");
+ requireExtensions(loc, Num_AEP_mesh_shader, AEP_mesh_shader, "max_vertices");
publicType.shaderQualifiers.vertices = value;
if (value > resources.maxMeshOutputVerticesNV)
error(loc, "too large, must be less than gl_MaxMeshOutputVerticesNV", "max_vertices", "");
@@ -5896,7 +5980,7 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
return;
}
if (id == "max_primitives") {
- requireExtensions(loc, 1, &E_GL_NV_mesh_shader, "max_primitives");
+ requireExtensions(loc, Num_AEP_mesh_shader, AEP_mesh_shader, "max_primitives");
publicType.shaderQualifiers.primitives = value;
if (value > resources.maxMeshOutputPrimitivesNV)
error(loc, "too large, must be less than gl_MaxMeshOutputPrimitivesNV", "max_primitives", "");
@@ -5906,14 +5990,14 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
}
// Fall through
- case EShLangTaskNV:
+ case EShLangTask:
// Fall through
#endif
case EShLangCompute:
if (id.compare(0, 11, "local_size_") == 0) {
#ifndef GLSLANG_WEB
- if (language == EShLangMeshNV || language == EShLangTaskNV) {
- requireExtensions(loc, 1, &E_GL_NV_mesh_shader, "gl_WorkGroupSize");
+ if (language == EShLangMesh || language == EShLangTask) {
+ requireExtensions(loc, Num_AEP_mesh_shader, AEP_mesh_shader, "gl_WorkGroupSize");
} else {
profileRequires(loc, EEsProfile, 310, 0, "gl_WorkGroupSize");
profileRequires(loc, ~EEsProfile, 430, E_GL_ARB_compute_shader, "gl_WorkGroupSize");
@@ -6039,6 +6123,8 @@ void TParseContext::mergeObjectLayoutQualifiers(TQualifier& dst, const TQualifie
dst.layoutShaderRecord = true;
if (src.pervertexNV)
dst.pervertexNV = true;
+ if (src.pervertexEXT)
+ dst.pervertexEXT = true;
#endif
}
}
@@ -6187,6 +6273,9 @@ void TParseContext::layoutTypeCheck(const TSourceLoc& loc, const TType& type)
if (type.getBasicType() == EbtBlock)
error(loc, "cannot apply to uniform or buffer block", "location", "");
break;
+ case EvqtaskPayloadSharedEXT:
+ error(loc, "cannot apply to taskPayloadSharedEXT", "location", "");
+ break;
#ifndef GLSLANG_WEB
case EvqPayload:
case EvqPayloadIn:
@@ -6338,8 +6427,12 @@ void TParseContext::layoutTypeCheck(const TSourceLoc& loc, const TType& type)
profileRequires(loc, ECoreProfile | ECompatibilityProfile, 0, E_GL_EXT_shader_image_load_formatted, explanation);
}
- if (qualifier.isPushConstant() && type.getBasicType() != EbtBlock)
- error(loc, "can only be used with a block", "push_constant", "");
+ if (qualifier.isPushConstant()) {
+ if (type.getBasicType() != EbtBlock)
+ error(loc, "can only be used with a block", "push_constant", "");
+ if (type.isArray())
+ error(loc, "Push constants blocks can't be an array", "push_constant", "");
+ }
if (qualifier.hasBufferReference() && type.getBasicType() != EbtBlock)
error(loc, "can only be used with a block", "buffer_reference", "");
@@ -6544,7 +6637,7 @@ void TParseContext::checkNoShaderLayouts(const TSourceLoc& loc, const TShaderQua
error(loc, message, "local_size id", "");
}
if (shaderQualifiers.vertices != TQualifier::layoutNotSet) {
- if (language == EShLangGeometry || language == EShLangMeshNV)
+ if (language == EShLangGeometry || language == EShLangMesh)
error(loc, message, "max_vertices", "");
else if (language == EShLangTessControl)
error(loc, message, "vertices", "");
@@ -6556,7 +6649,7 @@ void TParseContext::checkNoShaderLayouts(const TSourceLoc& loc, const TShaderQua
if (shaderQualifiers.postDepthCoverage)
error(loc, message, "post_depth_coverage", "");
if (shaderQualifiers.primitives != TQualifier::layoutNotSet) {
- if (language == EShLangMeshNV)
+ if (language == EShLangMesh)
error(loc, message, "max_primitives", "");
else
assert(0);
@@ -6967,12 +7060,14 @@ TIntermTyped* TParseContext::vkRelaxedRemapFunctionCall(const TSourceLoc& loc, T
TFunction realFunc(&name, function->getType());
+ // Use copyParam to avoid shared ownership of the 'type' field
+ // of the parameter.
for (int i = 0; i < function->getParamCount(); ++i) {
- realFunc.addParameter((*function)[i]);
+ realFunc.addParameter(TParameter().copyParam((*function)[i]));
}
TParameter tmpP = { 0, &uintType };
- realFunc.addParameter(tmpP);
+ realFunc.addParameter(TParameter().copyParam(tmpP));
arguments = intermediate.growAggregate(arguments, intermediate.addConstantUnion(1, loc, true));
result = handleFunctionCall(loc, &realFunc, arguments);
@@ -6985,11 +7080,11 @@ TIntermTyped* TParseContext::vkRelaxedRemapFunctionCall(const TSourceLoc& loc, T
TFunction realFunc(&name, function->getType());
for (int i = 0; i < function->getParamCount(); ++i) {
- realFunc.addParameter((*function)[i]);
+ realFunc.addParameter(TParameter().copyParam((*function)[i]));
}
TParameter tmpP = { 0, &uintType };
- realFunc.addParameter(tmpP);
+ realFunc.addParameter(TParameter().copyParam(tmpP));
arguments = intermediate.growAggregate(arguments, intermediate.addConstantUnion(-1, loc, true));
result = handleFunctionCall(loc, &realFunc, arguments);
@@ -7200,6 +7295,8 @@ TIntermNode* TParseContext::declareVariable(const TSourceLoc& loc, TString& iden
requireInt8Arithmetic(loc, "qualifier", "(u)int8 types can only be in uniform block or buffer storage");
}
+ if (type.getQualifier().storage == EvqtaskPayloadSharedEXT)
+ intermediate.addTaskPayloadEXTCount();
if (type.getQualifier().storage == EvqShared && type.containsCoopMat())
error(loc, "qualifier", "Cooperative matrix types must not be used in shared memory", "");
@@ -7223,6 +7320,8 @@ TIntermNode* TParseContext::declareVariable(const TSourceLoc& loc, TString& iden
error(loc, "can only apply origin_upper_left and pixel_center_origin to gl_FragCoord", "layout qualifier", "");
if (identifier != "gl_FragDepth" && publicType.shaderQualifiers.getDepth() != EldNone)
error(loc, "can only apply depth layout to gl_FragDepth", "layout qualifier", "");
+ if (identifier != "gl_FragStencilRefARB" && publicType.shaderQualifiers.getStencil() != ElsNone)
+ error(loc, "can only apply depth layout to gl_FragStencilRefARB", "layout qualifier", "");
// Check for redeclaration of built-ins and/or attempting to declare a reserved name
TSymbol* symbol = redeclareBuiltinVariable(loc, identifier, type.getQualifier(), publicType.shaderQualifiers);
@@ -7430,14 +7529,14 @@ TIntermNode* TParseContext::executeInitializer(const TSourceLoc& loc, TIntermTyp
// Uniforms require a compile-time constant initializer
if (qualifier == EvqUniform && ! initializer->getType().getQualifier().isFrontEndConstant()) {
error(loc, "uniform initializers must be constant", "=", "'%s'",
- variable->getType().getCompleteString().c_str());
+ variable->getType().getCompleteString(intermediate.getEnhancedMsgs()).c_str());
variable->getWritableType().getQualifier().makeTemporary();
return nullptr;
}
// Global consts require a constant initializer (specialization constant is okay)
if (qualifier == EvqConst && symbolTable.atGlobalLevel() && ! initializer->getType().getQualifier().isConstant()) {
error(loc, "global const initializers must be constant", "=", "'%s'",
- variable->getType().getCompleteString().c_str());
+ variable->getType().getCompleteString(intermediate.getEnhancedMsgs()).c_str());
variable->getWritableType().getQualifier().makeTemporary();
return nullptr;
}
@@ -7500,7 +7599,7 @@ TIntermNode* TParseContext::executeInitializer(const TSourceLoc& loc, TIntermTyp
TIntermSymbol* intermSymbol = intermediate.addSymbol(*variable, loc);
TIntermTyped* initNode = intermediate.addAssign(EOpAssign, intermSymbol, initializer, loc);
if (! initNode)
- assignError(loc, "=", intermSymbol->getCompleteString(), initializer->getCompleteString());
+ assignError(loc, "=", intermSymbol->getCompleteString(intermediate.getEnhancedMsgs()), initializer->getCompleteString(intermediate.getEnhancedMsgs()));
return initNode;
}
@@ -7571,7 +7670,7 @@ TIntermTyped* TParseContext::convertInitializerList(const TSourceLoc& loc, const
}
} else if (type.isMatrix()) {
if (type.getMatrixCols() != (int)initList->getSequence().size()) {
- error(loc, "wrong number of matrix columns:", "initializer list", type.getCompleteString().c_str());
+ error(loc, "wrong number of matrix columns:", "initializer list", type.getCompleteString(intermediate.getEnhancedMsgs()).c_str());
return nullptr;
}
TType vectorType(type, 0); // dereferenced type
@@ -7582,20 +7681,20 @@ TIntermTyped* TParseContext::convertInitializerList(const TSourceLoc& loc, const
}
} else if (type.isVector()) {
if (type.getVectorSize() != (int)initList->getSequence().size()) {
- error(loc, "wrong vector size (or rows in a matrix column):", "initializer list", type.getCompleteString().c_str());
+ error(loc, "wrong vector size (or rows in a matrix column):", "initializer list", type.getCompleteString(intermediate.getEnhancedMsgs()).c_str());
return nullptr;
}
TBasicType destType = type.getBasicType();
for (int i = 0; i < type.getVectorSize(); ++i) {
TBasicType initType = initList->getSequence()[i]->getAsTyped()->getBasicType();
if (destType != initType && !intermediate.canImplicitlyPromote(initType, destType)) {
- error(loc, "type mismatch in initializer list", "initializer list", type.getCompleteString().c_str());
+ error(loc, "type mismatch in initializer list", "initializer list", type.getCompleteString(intermediate.getEnhancedMsgs()).c_str());
return nullptr;
}
}
} else {
- error(loc, "unexpected initializer-list type:", "initializer list", type.getCompleteString().c_str());
+ error(loc, "unexpected initializer-list type:", "initializer list", type.getCompleteString(intermediate.getEnhancedMsgs()).c_str());
return nullptr;
}
@@ -8059,12 +8158,12 @@ TIntermTyped* TParseContext::constructBuiltIn(const TType& type, TOperator op, T
case EOpConstructAccStruct:
if ((node->getType().isScalar() && node->getType().getBasicType() == EbtUint64)) {
// construct acceleration structure from uint64
- requireExtensions(loc, 1, &E_GL_EXT_ray_tracing, "uint64_t conversion to acclerationStructureEXT");
+ requireExtensions(loc, Num_ray_tracing_EXTs, ray_tracing_EXTs, "uint64_t conversion to acclerationStructureEXT");
return intermediate.addBuiltInFunctionCall(node->getLoc(), EOpConvUint64ToAccStruct, true, node,
type);
} else if (node->getType().isVector() && node->getType().getBasicType() == EbtUint && node->getVectorSize() == 2) {
// construct acceleration structure from uint64
- requireExtensions(loc, 1, &E_GL_EXT_ray_tracing, "uvec2 conversion to accelerationStructureEXT");
+ requireExtensions(loc, Num_ray_tracing_EXTs, ray_tracing_EXTs, "uvec2 conversion to accelerationStructureEXT");
return intermediate.addBuiltInFunctionCall(node->getLoc(), EOpConvUvec2ToAccStruct, true, node,
type);
} else
@@ -8103,8 +8202,9 @@ TIntermTyped* TParseContext::constructAggregate(TIntermNode* node, const TType&
{
TIntermTyped* converted = intermediate.addConversion(EOpConstructStruct, type, node->getAsTyped());
if (! converted || converted->getType() != type) {
+ bool enhanced = intermediate.getEnhancedMsgs();
error(loc, "", "constructor", "cannot convert parameter %d from '%s' to '%s'", paramCount,
- node->getAsTyped()->getType().getCompleteString().c_str(), type.getCompleteString().c_str());
+ node->getAsTyped()->getType().getCompleteString(enhanced).c_str(), type.getCompleteString(enhanced).c_str());
return nullptr;
}
@@ -8163,6 +8263,8 @@ void TParseContext::declareBlock(const TSourceLoc& loc, TTypeList& typeList, con
memberQualifier.perViewNV = currentBlockQualifier.perViewNV;
if (currentBlockQualifier.perTaskNV)
memberQualifier.perTaskNV = currentBlockQualifier.perTaskNV;
+ if (currentBlockQualifier.storage == EvqtaskPayloadSharedEXT)
+ memberQualifier.storage = EvqtaskPayloadSharedEXT;
if (memberQualifier.storage == EvqSpirvStorageClass)
error(memberLoc, "member cannot have a spirv_storage_class qualifier", memberType.getFieldName().c_str(), "");
if (memberQualifier.hasSprivDecorate() && !memberQualifier.getSpirvDecorate().decorateIds.empty())
@@ -8463,23 +8565,23 @@ void TParseContext::blockStageIoCheck(const TSourceLoc& loc, const TQualifier& q
// It is a compile-time error to have an input block in a vertex shader or an output block in a fragment shader
// "Compute shaders do not permit user-defined input variables..."
requireStage(loc, (EShLanguageMask)(EShLangTessControlMask|EShLangTessEvaluationMask|EShLangGeometryMask|
- EShLangFragmentMask|EShLangMeshNVMask), "input block");
+ EShLangFragmentMask|EShLangMeshMask), "input block");
if (language == EShLangFragment) {
profileRequires(loc, EEsProfile, 320, Num_AEP_shader_io_blocks, AEP_shader_io_blocks, "fragment input block");
- } else if (language == EShLangMeshNV && ! qualifier.isTaskMemory()) {
+ } else if (language == EShLangMesh && ! qualifier.isTaskMemory()) {
error(loc, "input blocks cannot be used in a mesh shader", "out", "");
}
break;
case EvqVaryingOut:
profileRequires(loc, ~EEsProfile, 150, E_GL_ARB_separate_shader_objects, "output block");
requireStage(loc, (EShLanguageMask)(EShLangVertexMask|EShLangTessControlMask|EShLangTessEvaluationMask|
- EShLangGeometryMask|EShLangMeshNVMask|EShLangTaskNVMask), "output block");
+ EShLangGeometryMask|EShLangMeshMask|EShLangTaskMask), "output block");
// ES 310 can have a block before shader_io is turned on, so skip this test for built-ins
if (language == EShLangVertex && ! parsingBuiltins) {
profileRequires(loc, EEsProfile, 320, Num_AEP_shader_io_blocks, AEP_shader_io_blocks, "vertex output block");
- } else if (language == EShLangMeshNV && qualifier.isTaskMemory()) {
+ } else if (language == EShLangMesh && qualifier.isTaskMemory()) {
error(loc, "can only use on input blocks in mesh shader", "taskNV", "");
- } else if (language == EShLangTaskNV && ! qualifier.isTaskMemory()) {
+ } else if (language == EShLangTask && ! qualifier.isTaskMemory()) {
error(loc, "output blocks cannot be used in a task shader", "out", "");
}
break;
@@ -8893,7 +8995,7 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
{
#ifndef GLSLANG_WEB
if (publicType.shaderQualifiers.vertices != TQualifier::layoutNotSet) {
- assert(language == EShLangTessControl || language == EShLangGeometry || language == EShLangMeshNV);
+ assert(language == EShLangTessControl || language == EShLangGeometry || language == EShLangMesh);
const char* id = (language == EShLangTessControl) ? "vertices" : "max_vertices";
if (publicType.qualifier.storage != EvqVaryingOut)
@@ -8905,7 +9007,7 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
checkIoArraysConsistency(loc);
}
if (publicType.shaderQualifiers.primitives != TQualifier::layoutNotSet) {
- assert(language == EShLangMeshNV);
+ assert(language == EShLangMesh);
const char* id = "max_primitives";
if (publicType.qualifier.storage != EvqVaryingOut)
@@ -8929,7 +9031,7 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
case ElgTrianglesAdjacency:
case ElgQuads:
case ElgIsolines:
- if (language == EShLangMeshNV) {
+ if (language == EShLangMesh) {
error(loc, "cannot apply to input", TQualifier::getGeometryString(publicType.shaderQualifiers.geometry), "");
break;
}
@@ -8946,7 +9048,7 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
switch (publicType.shaderQualifiers.geometry) {
case ElgLines:
case ElgTriangles:
- if (language != EShLangMeshNV) {
+ if (language != EShLangMesh) {
error(loc, "cannot apply to 'out'", TQualifier::getGeometryString(publicType.shaderQualifiers.geometry), "");
break;
}
@@ -9002,24 +9104,56 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
error(loc, "too large; see gl_MaxComputeWorkGroupSize", "local_size", "");
}
#ifndef GLSLANG_WEB
- else if (language == EShLangMeshNV) {
+ else if (language == EShLangMesh) {
switch (i) {
- case 0: max = resources.maxMeshWorkGroupSizeX_NV; break;
- case 1: max = resources.maxMeshWorkGroupSizeY_NV; break;
- case 2: max = resources.maxMeshWorkGroupSizeZ_NV; break;
+ case 0:
+ max = extensionTurnedOn(E_GL_EXT_mesh_shader) ?
+ resources.maxMeshWorkGroupSizeX_EXT :
+ resources.maxMeshWorkGroupSizeX_NV;
+ break;
+ case 1:
+ max = extensionTurnedOn(E_GL_EXT_mesh_shader) ?
+ resources.maxMeshWorkGroupSizeY_EXT :
+ resources.maxMeshWorkGroupSizeY_NV ;
+ break;
+ case 2:
+ max = extensionTurnedOn(E_GL_EXT_mesh_shader) ?
+ resources.maxMeshWorkGroupSizeZ_EXT :
+ resources.maxMeshWorkGroupSizeZ_NV ;
+ break;
default: break;
}
- if (intermediate.getLocalSize(i) > (unsigned int)max)
- error(loc, "too large; see gl_MaxMeshWorkGroupSizeNV", "local_size", "");
- } else if (language == EShLangTaskNV) {
+ if (intermediate.getLocalSize(i) > (unsigned int)max) {
+ TString maxsErrtring = "too large, see ";
+ maxsErrtring.append(extensionTurnedOn(E_GL_EXT_mesh_shader) ?
+ "gl_MaxMeshWorkGroupSizeEXT" : "gl_MaxMeshWorkGroupSizeNV");
+ error(loc, maxsErrtring.c_str(), "local_size", "");
+ }
+ } else if (language == EShLangTask) {
switch (i) {
- case 0: max = resources.maxTaskWorkGroupSizeX_NV; break;
- case 1: max = resources.maxTaskWorkGroupSizeY_NV; break;
- case 2: max = resources.maxTaskWorkGroupSizeZ_NV; break;
+ case 0:
+ max = extensionTurnedOn(E_GL_EXT_mesh_shader) ?
+ resources.maxTaskWorkGroupSizeX_EXT :
+ resources.maxTaskWorkGroupSizeX_NV;
+ break;
+ case 1:
+ max = extensionTurnedOn(E_GL_EXT_mesh_shader) ?
+ resources.maxTaskWorkGroupSizeY_EXT:
+ resources.maxTaskWorkGroupSizeY_NV;
+ break;
+ case 2:
+ max = extensionTurnedOn(E_GL_EXT_mesh_shader) ?
+ resources.maxTaskWorkGroupSizeZ_EXT:
+ resources.maxTaskWorkGroupSizeZ_NV;
+ break;
default: break;
}
- if (intermediate.getLocalSize(i) > (unsigned int)max)
- error(loc, "too large; see gl_MaxTaskWorkGroupSizeNV", "local_size", "");
+ if (intermediate.getLocalSize(i) > (unsigned int)max) {
+ TString maxsErrtring = "too large, see ";
+ maxsErrtring.append(extensionTurnedOn(E_GL_EXT_mesh_shader) ?
+ "gl_MaxTaskWorkGroupSizeEXT" : "gl_MaxTaskWorkGroupSizeNV");
+ error(loc, maxsErrtring.c_str(), "local_size", "");
+ }
}
#endif
else {
@@ -9054,6 +9188,12 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
else
error(loc, "can only apply to 'in'", "early_fragment_tests", "");
}
+ if (publicType.shaderQualifiers.earlyAndLateFragmentTestsAMD) {
+ if (publicType.qualifier.storage == EvqVaryingIn)
+ intermediate.setEarlyAndLateFragmentTestsAMD();
+ else
+ error(loc, "can only apply to 'in'", "early_and_late_fragment_tests_amd", "");
+ }
if (publicType.shaderQualifiers.postDepthCoverage) {
if (publicType.qualifier.storage == EvqVaryingIn)
intermediate.setPostDepthCoverage();
@@ -9102,7 +9242,7 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
error(loc, "can only apply to 'in'", "derivative_group_linearNV", "");
}
// Check mesh out array sizes, once all the necessary out qualifiers are defined.
- if ((language == EShLangMeshNV) &&
+ if ((language == EShLangMesh) &&
(intermediate.getVertices() != TQualifier::layoutNotSet) &&
(intermediate.getPrimitives() != TQualifier::layoutNotSet) &&
(intermediate.getOutputPrimitive() != ElgNone))
@@ -9119,7 +9259,7 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
// Exit early as further checks are not valid
return;
}
-#endif
+#endif
const TQualifier& qualifier = publicType.qualifier;
if (qualifier.isAuxiliary() ||
@@ -9317,4 +9457,3 @@ const TTypeList* TParseContext::recordStructCopy(TStructRecord& record, const TT
}
} // end namespace glslang
-
diff --git a/thirdparty/glslang/glslang/MachineIndependent/Scan.cpp b/thirdparty/glslang/glslang/MachineIndependent/Scan.cpp
index c387aede0e..7f51173ebc 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/Scan.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/Scan.cpp
@@ -739,6 +739,7 @@ void TScanContext::fillInKeywordMap()
(*KeywordMap)["f16subpassInputMS"] = F16SUBPASSINPUTMS;
(*KeywordMap)["__explicitInterpAMD"] = EXPLICITINTERPAMD;
(*KeywordMap)["pervertexNV"] = PERVERTEXNV;
+ (*KeywordMap)["pervertexEXT"] = PERVERTEXEXT;
(*KeywordMap)["precise"] = PRECISE;
(*KeywordMap)["rayPayloadNV"] = PAYLOADNV;
@@ -757,6 +758,8 @@ void TScanContext::fillInKeywordMap()
(*KeywordMap)["perprimitiveNV"] = PERPRIMITIVENV;
(*KeywordMap)["perviewNV"] = PERVIEWNV;
(*KeywordMap)["taskNV"] = PERTASKNV;
+ (*KeywordMap)["perprimitiveEXT"] = PERPRIMITIVEEXT;
+ (*KeywordMap)["taskPayloadSharedEXT"] = TASKPAYLOADWORKGROUPEXT;
(*KeywordMap)["fcoopmatNV"] = FCOOPMATNV;
(*KeywordMap)["icoopmatNV"] = ICOOPMATNV;
@@ -1719,6 +1722,12 @@ int TScanContext::tokenizeIdentifier()
return keyword;
return identifierOrType();
+ case PERVERTEXEXT:
+ if ((!parseContext.isEsProfile() && parseContext.version >= 450) ||
+ parseContext.extensionTurnedOn(E_GL_EXT_fragment_shader_barycentric))
+ return keyword;
+ return identifierOrType();
+
case PRECISE:
if ((parseContext.isEsProfile() &&
(parseContext.version >= 320 || parseContext.extensionsTurnedOn(Num_AEP_gpu_shader5, AEP_gpu_shader5))) ||
@@ -1733,12 +1742,18 @@ int TScanContext::tokenizeIdentifier()
case PERPRIMITIVENV:
case PERVIEWNV:
case PERTASKNV:
- if ((!parseContext.isEsProfile() && parseContext.version >= 450) ||
- (parseContext.isEsProfile() && parseContext.version >= 320) ||
+ if (parseContext.symbolTable.atBuiltInLevel() ||
parseContext.extensionTurnedOn(E_GL_NV_mesh_shader))
return keyword;
return identifierOrType();
+ case PERPRIMITIVEEXT:
+ case TASKPAYLOADWORKGROUPEXT:
+ if (parseContext.symbolTable.atBuiltInLevel() ||
+ parseContext.extensionTurnedOn(E_GL_EXT_mesh_shader))
+ return keyword;
+ return identifierOrType();
+
case FCOOPMATNV:
afterType = true;
if (parseContext.symbolTable.atBuiltInLevel() ||
diff --git a/thirdparty/glslang/glslang/MachineIndependent/ShaderLang.cpp b/thirdparty/glslang/glslang/MachineIndependent/ShaderLang.cpp
index bcf2c33ff7..57e3423a76 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/ShaderLang.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/ShaderLang.cpp
@@ -391,13 +391,13 @@ bool InitializeSymbolTables(TInfoSink& infoSink, TSymbolTable** commonTable, TS
// check for mesh
if ((profile != EEsProfile && version >= 450) ||
(profile == EEsProfile && version >= 320))
- InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangMeshNV, source,
+ InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangMesh, source,
infoSink, commonTable, symbolTables);
// check for task
if ((profile != EEsProfile && version >= 450) ||
(profile == EEsProfile && version >= 320))
- InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangTaskNV, source,
+ InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangTask, source,
infoSink, commonTable, symbolTables);
#endif // !GLSLANG_ANGLE
#endif // !GLSLANG_WEB
@@ -650,8 +650,8 @@ bool DeduceVersionProfile(TInfoSink& infoSink, EShLanguage stage, bool versionNo
version = 460;
}
break;
- case EShLangMeshNV:
- case EShLangTaskNV:
+ case EShLangMesh:
+ case EShLangTask:
if ((profile == EEsProfile && version < 320) ||
(profile != EEsProfile && version < 450)) {
correct = false;
@@ -813,6 +813,7 @@ bool ProcessDeferred(
// set version/profile to defaultVersion/defaultProfile regardless of the #version
// directive in the source code
bool forceDefaultVersionAndProfile,
+ int overrideVersion, // overrides version specified by #verison or default version
bool forwardCompatible, // give errors for use of deprecated features
EShMessages messages, // warnings/errors/AST; things to print out
TIntermediate& intermediate, // returned tree, etc.
@@ -900,6 +901,9 @@ bool ProcessDeferred(
version = defaultVersion;
profile = defaultProfile;
}
+ if (source == EShSourceGlsl && overrideVersion != 0) {
+ version = overrideVersion;
+ }
bool goodVersion = DeduceVersionProfile(compiler->infoSink, stage,
versionNotFirst, defaultVersion, source, version, profile, spvVersion);
@@ -1275,6 +1279,7 @@ bool PreprocessDeferred(
int defaultVersion, // use 100 for ES environment, 110 for desktop
EProfile defaultProfile,
bool forceDefaultVersionAndProfile,
+ int overrideVersion, // use 0 if not overriding GLSL version
bool forwardCompatible, // give errors for use of deprecated features
EShMessages messages, // warnings/errors/AST; things to print out
TShader::Includer& includer,
@@ -1285,7 +1290,7 @@ bool PreprocessDeferred(
DoPreprocessing parser(outputString);
return ProcessDeferred(compiler, shaderStrings, numStrings, inputLengths, stringNames,
preamble, optLevel, resources, defaultVersion,
- defaultProfile, forceDefaultVersionAndProfile,
+ defaultProfile, forceDefaultVersionAndProfile, overrideVersion,
forwardCompatible, messages, intermediate, parser,
false, includer, "", environment);
}
@@ -1314,6 +1319,7 @@ bool CompileDeferred(
int defaultVersion, // use 100 for ES environment, 110 for desktop
EProfile defaultProfile,
bool forceDefaultVersionAndProfile,
+ int overrideVersion, // use 0 if not overriding GLSL version
bool forwardCompatible, // give errors for use of deprecated features
EShMessages messages, // warnings/errors/AST; things to print out
TIntermediate& intermediate,// returned tree, etc.
@@ -1324,7 +1330,7 @@ bool CompileDeferred(
DoFullParse parser;
return ProcessDeferred(compiler, shaderStrings, numStrings, inputLengths, stringNames,
preamble, optLevel, resources, defaultVersion,
- defaultProfile, forceDefaultVersionAndProfile,
+ defaultProfile, forceDefaultVersionAndProfile, overrideVersion,
forwardCompatible, messages, intermediate, parser,
true, includer, sourceEntryPointName, environment);
}
@@ -1498,7 +1504,7 @@ int ShCompile(
TIntermediate intermediate(compiler->getLanguage());
TShader::ForbidIncluder includer;
bool success = CompileDeferred(compiler, shaderStrings, numStrings, inputLengths, nullptr,
- "", optLevel, resources, defaultVersion, ENoProfile, false,
+ "", optLevel, resources, defaultVersion, ENoProfile, false, 0,
forwardCompatible, messages, intermediate, includer);
//
@@ -1759,7 +1765,7 @@ public:
};
TShader::TShader(EShLanguage s)
- : stage(s), lengths(nullptr), stringNames(nullptr), preamble("")
+ : stage(s), lengths(nullptr), stringNames(nullptr), preamble(""), overrideVersion(0)
{
pool = new TPoolAllocator;
infoSink = new TInfoSink;
@@ -1828,8 +1834,15 @@ void TShader::setUniqueId(unsigned long long id)
intermediate->setUniqueId(id);
}
+void TShader::setOverrideVersion(int version)
+{
+ overrideVersion = version;
+}
+
+void TShader::setDebugInfo(bool debugInfo) { intermediate->setDebugInfo(debugInfo); }
void TShader::setInvertY(bool invert) { intermediate->setInvertY(invert); }
void TShader::setDxPositionW(bool invert) { intermediate->setDxPositionW(invert); }
+void TShader::setEnhancedMsgs() { intermediate->setEnhancedMsgs(); }
void TShader::setNanMinMaxClamp(bool useNonNan) { intermediate->setNanMinMaxClamp(useNonNan); }
#ifndef GLSLANG_WEB
@@ -1909,7 +1922,7 @@ bool TShader::parse(const TBuiltInResource* builtInResources, int defaultVersion
return CompileDeferred(compiler, strings, numStrings, lengths, stringNames,
preamble, EShOptNone, builtInResources, defaultVersion,
- defaultProfile, forceDefaultVersionAndProfile,
+ defaultProfile, forceDefaultVersionAndProfile, overrideVersion,
forwardCompatible, messages, *intermediate, includer, sourceEntryPointName,
&environment);
}
@@ -1936,7 +1949,7 @@ bool TShader::preprocess(const TBuiltInResource* builtInResources,
return PreprocessDeferred(compiler, strings, numStrings, lengths, stringNames, preamble,
EShOptNone, builtInResources, defaultVersion,
- defaultProfile, forceDefaultVersionAndProfile,
+ defaultProfile, forceDefaultVersionAndProfile, overrideVersion,
forwardCompatible, message, includer, *intermediate, output_string,
&environment);
}
@@ -2049,6 +2062,8 @@ bool TProgram::linkStage(EShLanguage stage, EShMessages messages)
firstIntermediate->getVersion(),
firstIntermediate->getProfile());
intermediate[stage]->setLimits(firstIntermediate->getLimits());
+ if (firstIntermediate->getEnhancedMsgs())
+ intermediate[stage]->setEnhancedMsgs();
// The new TIntermediate must use the same origin as the original TIntermediates.
// Otherwise linking will fail due to different coordinate systems.
diff --git a/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.cpp b/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.cpp
index a3ffa0c467..2f1b5ac3cb 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.cpp
@@ -383,7 +383,7 @@ TFunction::TFunction(const TFunction& copyOf) : TSymbol(copyOf)
for (unsigned int i = 0; i < copyOf.parameters.size(); ++i) {
TParameter param;
parameters.push_back(param);
- parameters.back().copyParam(copyOf.parameters[i]);
+ (void)parameters.back().copyParam(copyOf.parameters[i]);
}
extensions = nullptr;
diff --git a/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.h b/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.h
index 31312ecbaa..2e570bb3bc 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.h
+++ b/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.h
@@ -224,7 +224,7 @@ struct TParameter {
TString *name;
TType* type;
TIntermTyped* defaultValue;
- void copyParam(const TParameter& param)
+ TParameter& copyParam(const TParameter& param)
{
if (param.name)
name = NewPoolTString(param.name->c_str());
@@ -232,6 +232,7 @@ struct TParameter {
name = 0;
type = param.type->clone();
defaultValue = param.defaultValue;
+ return *this;
}
TBuiltInVariable getDeclaredBuiltIn() const { return type->getQualifier().declaredBuiltIn; }
};
diff --git a/thirdparty/glslang/glslang/MachineIndependent/Versions.cpp b/thirdparty/glslang/glslang/MachineIndependent/Versions.cpp
index 8d96e0e104..a5fd107535 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/Versions.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/Versions.cpp
@@ -166,7 +166,8 @@ void TParseVersions::initializeExtensionBehavior()
} extensionData;
const extensionData exts[] = { {E_GL_EXT_ray_tracing, EShTargetSpv_1_4},
- {E_GL_NV_ray_tracing_motion_blur, EShTargetSpv_1_4}
+ {E_GL_NV_ray_tracing_motion_blur, EShTargetSpv_1_4},
+ {E_GL_EXT_mesh_shader, EShTargetSpv_1_4}
};
for (size_t ii = 0; ii < sizeof(exts) / sizeof(exts[0]); ii++) {
@@ -259,6 +260,8 @@ void TParseVersions::initializeExtensionBehavior()
extensionBehavior[E_GL_EXT_shader_8bit_storage] = EBhDisable;
extensionBehavior[E_GL_EXT_subgroup_uniform_control_flow] = EBhDisable;
+ extensionBehavior[E_GL_EXT_fragment_shader_barycentric] = EBhDisable;
+
// #line and #include
extensionBehavior[E_GL_GOOGLE_cpp_style_line_directive] = EBhDisable;
extensionBehavior[E_GL_GOOGLE_include_directive] = EBhDisable;
@@ -273,6 +276,7 @@ void TParseVersions::initializeExtensionBehavior()
extensionBehavior[E_GL_AMD_shader_image_load_store_lod] = EBhDisable;
extensionBehavior[E_GL_AMD_shader_fragment_mask] = EBhDisable;
extensionBehavior[E_GL_AMD_gpu_shader_half_float_fetch] = EBhDisable;
+ extensionBehavior[E_GL_AMD_shader_early_and_late_fragment_tests] = EBhDisable;
extensionBehavior[E_GL_INTEL_shader_integer_functions2] = EBhDisable;
@@ -334,13 +338,16 @@ void TParseVersions::initializeExtensionBehavior()
extensionBehavior[E_GL_EXT_ray_tracing] = EBhDisable;
extensionBehavior[E_GL_EXT_ray_query] = EBhDisable;
extensionBehavior[E_GL_EXT_ray_flags_primitive_culling] = EBhDisable;
+ extensionBehavior[E_GL_EXT_ray_cull_mask] = EBhDisable;
extensionBehavior[E_GL_EXT_blend_func_extended] = EBhDisable;
extensionBehavior[E_GL_EXT_shader_implicit_conversions] = EBhDisable;
extensionBehavior[E_GL_EXT_fragment_shading_rate] = EBhDisable;
- extensionBehavior[E_GL_EXT_shader_image_int64] = EBhDisable;
+ extensionBehavior[E_GL_EXT_shader_image_int64] = EBhDisable;
extensionBehavior[E_GL_EXT_terminate_invocation] = EBhDisable;
extensionBehavior[E_GL_EXT_shared_memory_block] = EBhDisable;
extensionBehavior[E_GL_EXT_spirv_intrinsics] = EBhDisable;
+ extensionBehavior[E_GL_EXT_mesh_shader] = EBhDisable;
+ extensionBehavior[E_GL_EXT_opacity_micromap] = EBhDisable;
// OVR extensions
extensionBehavior[E_GL_OVR_multiview] = EBhDisable;
@@ -505,7 +512,9 @@ void TParseVersions::getPreamble(std::string& preamble)
"#define GL_EXT_ray_tracing 1\n"
"#define GL_EXT_ray_query 1\n"
"#define GL_EXT_ray_flags_primitive_culling 1\n"
+ "#define GL_EXT_ray_cull_mask 1\n"
"#define GL_EXT_spirv_intrinsics 1\n"
+ "#define GL_EXT_mesh_shader 1\n"
"#define GL_AMD_shader_ballot 1\n"
"#define GL_AMD_shader_trinary_minmax 1\n"
@@ -552,6 +561,8 @@ void TParseVersions::getPreamble(std::string& preamble)
"#define GL_EXT_shader_atomic_float 1\n"
"#define GL_EXT_shader_atomic_float2 1\n"
+
+ "#define GL_EXT_fragment_shader_barycentric 1\n"
;
if (version >= 150) {
@@ -634,8 +645,8 @@ void TParseVersions::getPreamble(std::string& preamble)
case EShLangClosestHit: preamble += "#define GL_CLOSEST_HIT_SHADER_EXT 1 \n"; break;
case EShLangMiss: preamble += "#define GL_MISS_SHADER_EXT 1 \n"; break;
case EShLangCallable: preamble += "#define GL_CALLABLE_SHADER_EXT 1 \n"; break;
- case EShLangTaskNV: preamble += "#define GL_TASK_SHADER_NV 1 \n"; break;
- case EShLangMeshNV: preamble += "#define GL_MESH_SHADER_NV 1 \n"; break;
+ case EShLangTask: preamble += "#define GL_TASK_SHADER_NV 1 \n"; break;
+ case EShLangMesh: preamble += "#define GL_MESH_SHADER_NV 1 \n"; break;
default: break;
}
}
@@ -661,8 +672,8 @@ const char* StageName(EShLanguage stage)
case EShLangClosestHit: return "closest-hit";
case EShLangMiss: return "miss";
case EShLangCallable: return "callable";
- case EShLangMeshNV: return "mesh";
- case EShLangTaskNV: return "task";
+ case EShLangMesh: return "mesh";
+ case EShLangTask: return "task";
#endif
default: return "unknown stage";
}
@@ -1053,10 +1064,22 @@ void TParseVersions::checkExtensionStage(const TSourceLoc& loc, const char * con
{
// GL_NV_mesh_shader extension is only allowed in task/mesh shaders
if (strcmp(extension, "GL_NV_mesh_shader") == 0) {
- requireStage(loc, (EShLanguageMask)(EShLangTaskNVMask | EShLangMeshNVMask | EShLangFragmentMask),
+ requireStage(loc, (EShLanguageMask)(EShLangTaskMask | EShLangMeshMask | EShLangFragmentMask),
"#extension GL_NV_mesh_shader");
profileRequires(loc, ECoreProfile, 450, 0, "#extension GL_NV_mesh_shader");
profileRequires(loc, EEsProfile, 320, 0, "#extension GL_NV_mesh_shader");
+ if (extensionTurnedOn(E_GL_EXT_mesh_shader)) {
+ error(loc, "GL_EXT_mesh_shader is already turned on, and not allowed with", "#extension", extension);
+ }
+ }
+ else if (strcmp(extension, "GL_EXT_mesh_shader") == 0) {
+ requireStage(loc, (EShLanguageMask)(EShLangTaskMask | EShLangMeshMask | EShLangFragmentMask),
+ "#extension GL_EXT_mesh_shader");
+ profileRequires(loc, ECoreProfile, 450, 0, "#extension GL_EXT_mesh_shader");
+ profileRequires(loc, EEsProfile, 320, 0, "#extension GL_EXT_mesh_shader");
+ if (extensionTurnedOn(E_GL_NV_mesh_shader)) {
+ error(loc, "GL_NV_mesh_shader is already turned on, and not allowed with", "#extension", extension);
+ }
}
}
diff --git a/thirdparty/glslang/glslang/MachineIndependent/Versions.h b/thirdparty/glslang/glslang/MachineIndependent/Versions.h
index 96a6e1fc52..f06abdd6f0 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/Versions.h
+++ b/thirdparty/glslang/glslang/MachineIndependent/Versions.h
@@ -201,6 +201,7 @@ const char* const E_GL_EXT_debug_printf = "GL_EXT_debug_prin
const char* const E_GL_EXT_ray_tracing = "GL_EXT_ray_tracing";
const char* const E_GL_EXT_ray_query = "GL_EXT_ray_query";
const char* const E_GL_EXT_ray_flags_primitive_culling = "GL_EXT_ray_flags_primitive_culling";
+const char* const E_GL_EXT_ray_cull_mask = "GL_EXT_ray_cull_mask";
const char* const E_GL_EXT_blend_func_extended = "GL_EXT_blend_func_extended";
const char* const E_GL_EXT_shader_implicit_conversions = "GL_EXT_shader_implicit_conversions";
const char* const E_GL_EXT_fragment_shading_rate = "GL_EXT_fragment_shading_rate";
@@ -209,12 +210,19 @@ const char* const E_GL_EXT_null_initializer = "GL_EXT_null_initi
const char* const E_GL_EXT_shared_memory_block = "GL_EXT_shared_memory_block";
const char* const E_GL_EXT_subgroup_uniform_control_flow = "GL_EXT_subgroup_uniform_control_flow";
const char* const E_GL_EXT_spirv_intrinsics = "GL_EXT_spirv_intrinsics";
+const char* const E_GL_EXT_fragment_shader_barycentric = "GL_EXT_fragment_shader_barycentric";
+const char* const E_GL_EXT_mesh_shader = "GL_EXT_mesh_shader";
+const char* const E_GL_EXT_opacity_micromap = "GL_EXT_opacity_micromap";
// Arrays of extensions for the above viewportEXTs duplications
const char* const post_depth_coverageEXTs[] = { E_GL_ARB_post_depth_coverage, E_GL_EXT_post_depth_coverage };
const int Num_post_depth_coverageEXTs = sizeof(post_depth_coverageEXTs) / sizeof(post_depth_coverageEXTs[0]);
+// Array of extensions to cover both extensions providing ray tracing capabilities.
+const char* const ray_tracing_EXTs[] = { E_GL_EXT_ray_query, E_GL_EXT_ray_tracing };
+const int Num_ray_tracing_EXTs = sizeof(ray_tracing_EXTs) / sizeof(ray_tracing_EXTs[0]);
+
// OVR extensions
const char* const E_GL_OVR_multiview = "GL_OVR_multiview";
const char* const E_GL_OVR_multiview2 = "GL_OVR_multiview2";
@@ -236,6 +244,7 @@ const char* const E_GL_AMD_gpu_shader_int16 = "GL_AMD_gpu_sh
const char* const E_GL_AMD_shader_image_load_store_lod = "GL_AMD_shader_image_load_store_lod";
const char* const E_GL_AMD_shader_fragment_mask = "GL_AMD_shader_fragment_mask";
const char* const E_GL_AMD_gpu_shader_half_float_fetch = "GL_AMD_gpu_shader_half_float_fetch";
+const char* const E_GL_AMD_shader_early_and_late_fragment_tests = "GL_AMD_shader_early_and_late_fragment_tests";
const char* const E_GL_INTEL_shader_integer_functions2 = "GL_INTEL_shader_integer_functions2";
@@ -281,7 +290,7 @@ const char* const E_GL_EXT_tessellation_shader = "GL_EXT_tessel
const char* const E_GL_EXT_tessellation_point_size = "GL_EXT_tessellation_point_size";
const char* const E_GL_EXT_texture_buffer = "GL_EXT_texture_buffer";
const char* const E_GL_EXT_texture_cube_map_array = "GL_EXT_texture_cube_map_array";
-const char* const E_GL_EXT_shader_integer_mix = "GL_EXT_shader_integer_mix";
+const char* const E_GL_EXT_shader_integer_mix = "GL_EXT_shader_integer_mix";
// OES matching AEP
const char* const E_GL_OES_geometry_shader = "GL_OES_geometry_shader";
@@ -342,6 +351,9 @@ const int Num_AEP_texture_buffer = sizeof(AEP_texture_buffer)/sizeof(AEP_texture
const char* const AEP_texture_cube_map_array[] = { E_GL_EXT_texture_cube_map_array, E_GL_OES_texture_cube_map_array };
const int Num_AEP_texture_cube_map_array = sizeof(AEP_texture_cube_map_array)/sizeof(AEP_texture_cube_map_array[0]);
+const char* const AEP_mesh_shader[] = { E_GL_NV_mesh_shader, E_GL_EXT_mesh_shader };
+const int Num_AEP_mesh_shader = sizeof(AEP_mesh_shader)/sizeof(AEP_mesh_shader[0]);
+
} // end namespace glslang
#endif // _VERSIONS_INCLUDED_
diff --git a/thirdparty/glslang/glslang/MachineIndependent/glslang.y b/thirdparty/glslang/glslang/MachineIndependent/glslang.y
index df53eb5bd8..35242f2157 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/glslang.y
+++ b/thirdparty/glslang/glslang/MachineIndependent/glslang.y
@@ -151,7 +151,7 @@ extern int yylex(YYSTYPE*, TParseContext&);
%parse-param {glslang::TParseContext* pParseContext}
%lex-param {parseContext}
-%pure-parser // enable thread safety
+%define api.pure // enable thread safety
%expect 1 // One shift reduce conflict because of if | else
%token <lex> CONST BOOL INT UINT FLOAT
@@ -315,7 +315,7 @@ extern int yylex(YYSTYPE*, TParseContext&);
%token <lex> PATCH SAMPLE NONUNIFORM
%token <lex> COHERENT VOLATILE RESTRICT READONLY WRITEONLY DEVICECOHERENT QUEUEFAMILYCOHERENT WORKGROUPCOHERENT
%token <lex> SUBGROUPCOHERENT NONPRIVATE SHADERCALLCOHERENT
-%token <lex> NOPERSPECTIVE EXPLICITINTERPAMD PERVERTEXNV PERPRIMITIVENV PERVIEWNV PERTASKNV
+%token <lex> NOPERSPECTIVE EXPLICITINTERPAMD PERVERTEXEXT PERVERTEXNV PERPRIMITIVENV PERVIEWNV PERTASKNV PERPRIMITIVEEXT TASKPAYLOADWORKGROUPEXT
%token <lex> PRECISE
@@ -798,7 +798,7 @@ conditional_expression
parseContext.rValueErrorCheck($5.loc, ":", $6);
$$ = parseContext.intermediate.addSelection($1, $4, $6, $2.loc);
if ($$ == 0) {
- parseContext.binaryOpError($2.loc, ":", $4->getCompleteString(), $6->getCompleteString());
+ parseContext.binaryOpError($2.loc, ":", $4->getCompleteString(parseContext.intermediate.getEnhancedMsgs()), $6->getCompleteString(parseContext.intermediate.getEnhancedMsgs()));
$$ = $6;
}
}
@@ -815,7 +815,7 @@ assignment_expression
parseContext.rValueErrorCheck($2.loc, "assign", $3);
$$ = parseContext.addAssign($2.loc, $2.op, $1, $3);
if ($$ == 0) {
- parseContext.assignError($2.loc, "assign", $1->getCompleteString(), $3->getCompleteString());
+ parseContext.assignError($2.loc, "assign", $1->getCompleteString(parseContext.intermediate.getEnhancedMsgs()), $3->getCompleteString(parseContext.intermediate.getEnhancedMsgs()));
$$ = $1;
}
}
@@ -877,7 +877,7 @@ expression
parseContext.samplerConstructorLocationCheck($2.loc, ",", $3);
$$ = parseContext.intermediate.addComma($1, $3, $2.loc);
if ($$ == 0) {
- parseContext.binaryOpError($2.loc, ",", $1->getCompleteString(), $3->getCompleteString());
+ parseContext.binaryOpError($2.loc, ",", $1->getCompleteString(parseContext.intermediate.getEnhancedMsgs()), $3->getCompleteString(parseContext.intermediate.getEnhancedMsgs()));
$$ = $3;
}
}
@@ -1290,27 +1290,45 @@ interpolation_qualifier
$$.init($1.loc);
$$.qualifier.pervertexNV = true;
}
+ | PERVERTEXEXT {
+ parseContext.globalCheck($1.loc, "pervertexEXT");
+ parseContext.profileRequires($1.loc, ECoreProfile, 0, E_GL_EXT_fragment_shader_barycentric, "fragment shader barycentric");
+ parseContext.profileRequires($1.loc, ECompatibilityProfile, 0, E_GL_EXT_fragment_shader_barycentric, "fragment shader barycentric");
+ parseContext.profileRequires($1.loc, EEsProfile, 0, E_GL_EXT_fragment_shader_barycentric, "fragment shader barycentric");
+ $$.init($1.loc);
+ $$.qualifier.pervertexEXT = true;
+ }
| PERPRIMITIVENV {
// No need for profile version or extension check. Shader stage already checks both.
parseContext.globalCheck($1.loc, "perprimitiveNV");
- parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangFragmentMask | EShLangMeshNVMask), "perprimitiveNV");
+ parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangFragmentMask | EShLangMeshMask), "perprimitiveNV");
// Fragment shader stage doesn't check for extension. So we explicitly add below extension check.
if (parseContext.language == EShLangFragment)
parseContext.requireExtensions($1.loc, 1, &E_GL_NV_mesh_shader, "perprimitiveNV");
$$.init($1.loc);
$$.qualifier.perPrimitiveNV = true;
}
+ | PERPRIMITIVEEXT {
+ // No need for profile version or extension check. Shader stage already checks both.
+ parseContext.globalCheck($1.loc, "perprimitiveEXT");
+ parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangFragmentMask | EShLangMeshMask), "perprimitiveEXT");
+ // Fragment shader stage doesn't check for extension. So we explicitly add below extension check.
+ if (parseContext.language == EShLangFragment)
+ parseContext.requireExtensions($1.loc, 1, &E_GL_EXT_mesh_shader, "perprimitiveEXT");
+ $$.init($1.loc);
+ $$.qualifier.perPrimitiveNV = true;
+ }
| PERVIEWNV {
// No need for profile version or extension check. Shader stage already checks both.
parseContext.globalCheck($1.loc, "perviewNV");
- parseContext.requireStage($1.loc, EShLangMeshNV, "perviewNV");
+ parseContext.requireStage($1.loc, EShLangMesh, "perviewNV");
$$.init($1.loc);
$$.qualifier.perViewNV = true;
}
| PERTASKNV {
// No need for profile version or extension check. Shader stage already checks both.
parseContext.globalCheck($1.loc, "taskNV");
- parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangTaskNVMask | EShLangMeshNVMask), "taskNV");
+ parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangTaskMask | EShLangMeshMask), "taskNV");
$$.init($1.loc);
$$.qualifier.perTaskNV = true;
}
@@ -1461,7 +1479,7 @@ storage_qualifier
parseContext.globalCheck($1.loc, "shared");
parseContext.profileRequires($1.loc, ECoreProfile | ECompatibilityProfile, 430, E_GL_ARB_compute_shader, "shared");
parseContext.profileRequires($1.loc, EEsProfile, 310, 0, "shared");
- parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangComputeMask | EShLangMeshNVMask | EShLangTaskNVMask), "shared");
+ parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangComputeMask | EShLangMeshMask | EShLangTaskMask), "shared");
$$.init($1.loc);
$$.qualifier.storage = EvqShared;
}
@@ -1648,6 +1666,13 @@ storage_qualifier
parseContext.unimplemented($1.loc, "subroutine");
$$.init($1.loc);
}
+ | TASKPAYLOADWORKGROUPEXT {
+ // No need for profile version or extension check. Shader stage already checks both.
+ parseContext.globalCheck($1.loc, "taskPayloadSharedEXT");
+ parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangTaskMask | EShLangMeshMask), "taskPayloadSharedEXT ");
+ $$.init($1.loc);
+ $$.qualifier.storage = EvqtaskPayloadSharedEXT;
+ }
;
@@ -3718,7 +3743,7 @@ compound_statement
}
RIGHT_BRACE {
if ($3 && $3->getAsAggregate())
- $3->getAsAggregate()->setOperator(EOpSequence);
+ $3->getAsAggregate()->setOperator(parseContext.intermediate.getDebugInfo() ? EOpScope : EOpSequence);
$$ = $3;
}
;
diff --git a/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp b/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp
index 5ba6a6d495..7ca3e711b2 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp
@@ -571,142 +571,145 @@ enum yysymbol_kind_t
YYSYMBOL_SHADERCALLCOHERENT = 447, /* SHADERCALLCOHERENT */
YYSYMBOL_NOPERSPECTIVE = 448, /* NOPERSPECTIVE */
YYSYMBOL_EXPLICITINTERPAMD = 449, /* EXPLICITINTERPAMD */
- YYSYMBOL_PERVERTEXNV = 450, /* PERVERTEXNV */
- YYSYMBOL_PERPRIMITIVENV = 451, /* PERPRIMITIVENV */
- YYSYMBOL_PERVIEWNV = 452, /* PERVIEWNV */
- YYSYMBOL_PERTASKNV = 453, /* PERTASKNV */
- YYSYMBOL_PRECISE = 454, /* PRECISE */
- YYSYMBOL_YYACCEPT = 455, /* $accept */
- YYSYMBOL_variable_identifier = 456, /* variable_identifier */
- YYSYMBOL_primary_expression = 457, /* primary_expression */
- YYSYMBOL_postfix_expression = 458, /* postfix_expression */
- YYSYMBOL_integer_expression = 459, /* integer_expression */
- YYSYMBOL_function_call = 460, /* function_call */
- YYSYMBOL_function_call_or_method = 461, /* function_call_or_method */
- YYSYMBOL_function_call_generic = 462, /* function_call_generic */
- YYSYMBOL_function_call_header_no_parameters = 463, /* function_call_header_no_parameters */
- YYSYMBOL_function_call_header_with_parameters = 464, /* function_call_header_with_parameters */
- YYSYMBOL_function_call_header = 465, /* function_call_header */
- YYSYMBOL_function_identifier = 466, /* function_identifier */
- YYSYMBOL_unary_expression = 467, /* unary_expression */
- YYSYMBOL_unary_operator = 468, /* unary_operator */
- YYSYMBOL_multiplicative_expression = 469, /* multiplicative_expression */
- YYSYMBOL_additive_expression = 470, /* additive_expression */
- YYSYMBOL_shift_expression = 471, /* shift_expression */
- YYSYMBOL_relational_expression = 472, /* relational_expression */
- YYSYMBOL_equality_expression = 473, /* equality_expression */
- YYSYMBOL_and_expression = 474, /* and_expression */
- YYSYMBOL_exclusive_or_expression = 475, /* exclusive_or_expression */
- YYSYMBOL_inclusive_or_expression = 476, /* inclusive_or_expression */
- YYSYMBOL_logical_and_expression = 477, /* logical_and_expression */
- YYSYMBOL_logical_xor_expression = 478, /* logical_xor_expression */
- YYSYMBOL_logical_or_expression = 479, /* logical_or_expression */
- YYSYMBOL_conditional_expression = 480, /* conditional_expression */
- YYSYMBOL_481_1 = 481, /* $@1 */
- YYSYMBOL_assignment_expression = 482, /* assignment_expression */
- YYSYMBOL_assignment_operator = 483, /* assignment_operator */
- YYSYMBOL_expression = 484, /* expression */
- YYSYMBOL_constant_expression = 485, /* constant_expression */
- YYSYMBOL_declaration = 486, /* declaration */
- YYSYMBOL_block_structure = 487, /* block_structure */
- YYSYMBOL_488_2 = 488, /* $@2 */
- YYSYMBOL_identifier_list = 489, /* identifier_list */
- YYSYMBOL_function_prototype = 490, /* function_prototype */
- YYSYMBOL_function_declarator = 491, /* function_declarator */
- YYSYMBOL_function_header_with_parameters = 492, /* function_header_with_parameters */
- YYSYMBOL_function_header = 493, /* function_header */
- YYSYMBOL_parameter_declarator = 494, /* parameter_declarator */
- YYSYMBOL_parameter_declaration = 495, /* parameter_declaration */
- YYSYMBOL_parameter_type_specifier = 496, /* parameter_type_specifier */
- YYSYMBOL_init_declarator_list = 497, /* init_declarator_list */
- YYSYMBOL_single_declaration = 498, /* single_declaration */
- YYSYMBOL_fully_specified_type = 499, /* fully_specified_type */
- YYSYMBOL_invariant_qualifier = 500, /* invariant_qualifier */
- YYSYMBOL_interpolation_qualifier = 501, /* interpolation_qualifier */
- YYSYMBOL_layout_qualifier = 502, /* layout_qualifier */
- YYSYMBOL_layout_qualifier_id_list = 503, /* layout_qualifier_id_list */
- YYSYMBOL_layout_qualifier_id = 504, /* layout_qualifier_id */
- YYSYMBOL_precise_qualifier = 505, /* precise_qualifier */
- YYSYMBOL_type_qualifier = 506, /* type_qualifier */
- YYSYMBOL_single_type_qualifier = 507, /* single_type_qualifier */
- YYSYMBOL_storage_qualifier = 508, /* storage_qualifier */
- YYSYMBOL_non_uniform_qualifier = 509, /* non_uniform_qualifier */
- YYSYMBOL_type_name_list = 510, /* type_name_list */
- YYSYMBOL_type_specifier = 511, /* type_specifier */
- YYSYMBOL_array_specifier = 512, /* array_specifier */
- YYSYMBOL_type_parameter_specifier_opt = 513, /* type_parameter_specifier_opt */
- YYSYMBOL_type_parameter_specifier = 514, /* type_parameter_specifier */
- YYSYMBOL_type_parameter_specifier_list = 515, /* type_parameter_specifier_list */
- YYSYMBOL_type_specifier_nonarray = 516, /* type_specifier_nonarray */
- YYSYMBOL_precision_qualifier = 517, /* precision_qualifier */
- YYSYMBOL_struct_specifier = 518, /* struct_specifier */
- YYSYMBOL_519_3 = 519, /* $@3 */
- YYSYMBOL_520_4 = 520, /* $@4 */
- YYSYMBOL_struct_declaration_list = 521, /* struct_declaration_list */
- YYSYMBOL_struct_declaration = 522, /* struct_declaration */
- YYSYMBOL_struct_declarator_list = 523, /* struct_declarator_list */
- YYSYMBOL_struct_declarator = 524, /* struct_declarator */
- YYSYMBOL_initializer = 525, /* initializer */
- YYSYMBOL_initializer_list = 526, /* initializer_list */
- YYSYMBOL_declaration_statement = 527, /* declaration_statement */
- YYSYMBOL_statement = 528, /* statement */
- YYSYMBOL_simple_statement = 529, /* simple_statement */
- YYSYMBOL_demote_statement = 530, /* demote_statement */
- YYSYMBOL_compound_statement = 531, /* compound_statement */
- YYSYMBOL_532_5 = 532, /* $@5 */
- YYSYMBOL_533_6 = 533, /* $@6 */
- YYSYMBOL_statement_no_new_scope = 534, /* statement_no_new_scope */
- YYSYMBOL_statement_scoped = 535, /* statement_scoped */
- YYSYMBOL_536_7 = 536, /* $@7 */
- YYSYMBOL_537_8 = 537, /* $@8 */
- YYSYMBOL_compound_statement_no_new_scope = 538, /* compound_statement_no_new_scope */
- YYSYMBOL_statement_list = 539, /* statement_list */
- YYSYMBOL_expression_statement = 540, /* expression_statement */
- YYSYMBOL_selection_statement = 541, /* selection_statement */
- YYSYMBOL_selection_statement_nonattributed = 542, /* selection_statement_nonattributed */
- YYSYMBOL_selection_rest_statement = 543, /* selection_rest_statement */
- YYSYMBOL_condition = 544, /* condition */
- YYSYMBOL_switch_statement = 545, /* switch_statement */
- YYSYMBOL_switch_statement_nonattributed = 546, /* switch_statement_nonattributed */
- YYSYMBOL_547_9 = 547, /* $@9 */
- YYSYMBOL_switch_statement_list = 548, /* switch_statement_list */
- YYSYMBOL_case_label = 549, /* case_label */
- YYSYMBOL_iteration_statement = 550, /* iteration_statement */
- YYSYMBOL_iteration_statement_nonattributed = 551, /* iteration_statement_nonattributed */
- YYSYMBOL_552_10 = 552, /* $@10 */
- YYSYMBOL_553_11 = 553, /* $@11 */
- YYSYMBOL_554_12 = 554, /* $@12 */
- YYSYMBOL_for_init_statement = 555, /* for_init_statement */
- YYSYMBOL_conditionopt = 556, /* conditionopt */
- YYSYMBOL_for_rest_statement = 557, /* for_rest_statement */
- YYSYMBOL_jump_statement = 558, /* jump_statement */
- YYSYMBOL_translation_unit = 559, /* translation_unit */
- YYSYMBOL_external_declaration = 560, /* external_declaration */
- YYSYMBOL_function_definition = 561, /* function_definition */
- YYSYMBOL_562_13 = 562, /* $@13 */
- YYSYMBOL_attribute = 563, /* attribute */
- YYSYMBOL_attribute_list = 564, /* attribute_list */
- YYSYMBOL_single_attribute = 565, /* single_attribute */
- YYSYMBOL_spirv_requirements_list = 566, /* spirv_requirements_list */
- YYSYMBOL_spirv_requirements_parameter = 567, /* spirv_requirements_parameter */
- YYSYMBOL_spirv_extension_list = 568, /* spirv_extension_list */
- YYSYMBOL_spirv_capability_list = 569, /* spirv_capability_list */
- YYSYMBOL_spirv_execution_mode_qualifier = 570, /* spirv_execution_mode_qualifier */
- YYSYMBOL_spirv_execution_mode_parameter_list = 571, /* spirv_execution_mode_parameter_list */
- YYSYMBOL_spirv_execution_mode_parameter = 572, /* spirv_execution_mode_parameter */
- YYSYMBOL_spirv_execution_mode_id_parameter_list = 573, /* spirv_execution_mode_id_parameter_list */
- YYSYMBOL_spirv_storage_class_qualifier = 574, /* spirv_storage_class_qualifier */
- YYSYMBOL_spirv_decorate_qualifier = 575, /* spirv_decorate_qualifier */
- YYSYMBOL_spirv_decorate_parameter_list = 576, /* spirv_decorate_parameter_list */
- YYSYMBOL_spirv_decorate_parameter = 577, /* spirv_decorate_parameter */
- YYSYMBOL_spirv_decorate_id_parameter_list = 578, /* spirv_decorate_id_parameter_list */
- YYSYMBOL_spirv_decorate_string_parameter_list = 579, /* spirv_decorate_string_parameter_list */
- YYSYMBOL_spirv_type_specifier = 580, /* spirv_type_specifier */
- YYSYMBOL_spirv_type_parameter_list = 581, /* spirv_type_parameter_list */
- YYSYMBOL_spirv_type_parameter = 582, /* spirv_type_parameter */
- YYSYMBOL_spirv_instruction_qualifier = 583, /* spirv_instruction_qualifier */
- YYSYMBOL_spirv_instruction_qualifier_list = 584, /* spirv_instruction_qualifier_list */
- YYSYMBOL_spirv_instruction_qualifier_id = 585 /* spirv_instruction_qualifier_id */
+ YYSYMBOL_PERVERTEXEXT = 450, /* PERVERTEXEXT */
+ YYSYMBOL_PERVERTEXNV = 451, /* PERVERTEXNV */
+ YYSYMBOL_PERPRIMITIVENV = 452, /* PERPRIMITIVENV */
+ YYSYMBOL_PERVIEWNV = 453, /* PERVIEWNV */
+ YYSYMBOL_PERTASKNV = 454, /* PERTASKNV */
+ YYSYMBOL_PERPRIMITIVEEXT = 455, /* PERPRIMITIVEEXT */
+ YYSYMBOL_TASKPAYLOADWORKGROUPEXT = 456, /* TASKPAYLOADWORKGROUPEXT */
+ YYSYMBOL_PRECISE = 457, /* PRECISE */
+ YYSYMBOL_YYACCEPT = 458, /* $accept */
+ YYSYMBOL_variable_identifier = 459, /* variable_identifier */
+ YYSYMBOL_primary_expression = 460, /* primary_expression */
+ YYSYMBOL_postfix_expression = 461, /* postfix_expression */
+ YYSYMBOL_integer_expression = 462, /* integer_expression */
+ YYSYMBOL_function_call = 463, /* function_call */
+ YYSYMBOL_function_call_or_method = 464, /* function_call_or_method */
+ YYSYMBOL_function_call_generic = 465, /* function_call_generic */
+ YYSYMBOL_function_call_header_no_parameters = 466, /* function_call_header_no_parameters */
+ YYSYMBOL_function_call_header_with_parameters = 467, /* function_call_header_with_parameters */
+ YYSYMBOL_function_call_header = 468, /* function_call_header */
+ YYSYMBOL_function_identifier = 469, /* function_identifier */
+ YYSYMBOL_unary_expression = 470, /* unary_expression */
+ YYSYMBOL_unary_operator = 471, /* unary_operator */
+ YYSYMBOL_multiplicative_expression = 472, /* multiplicative_expression */
+ YYSYMBOL_additive_expression = 473, /* additive_expression */
+ YYSYMBOL_shift_expression = 474, /* shift_expression */
+ YYSYMBOL_relational_expression = 475, /* relational_expression */
+ YYSYMBOL_equality_expression = 476, /* equality_expression */
+ YYSYMBOL_and_expression = 477, /* and_expression */
+ YYSYMBOL_exclusive_or_expression = 478, /* exclusive_or_expression */
+ YYSYMBOL_inclusive_or_expression = 479, /* inclusive_or_expression */
+ YYSYMBOL_logical_and_expression = 480, /* logical_and_expression */
+ YYSYMBOL_logical_xor_expression = 481, /* logical_xor_expression */
+ YYSYMBOL_logical_or_expression = 482, /* logical_or_expression */
+ YYSYMBOL_conditional_expression = 483, /* conditional_expression */
+ YYSYMBOL_484_1 = 484, /* $@1 */
+ YYSYMBOL_assignment_expression = 485, /* assignment_expression */
+ YYSYMBOL_assignment_operator = 486, /* assignment_operator */
+ YYSYMBOL_expression = 487, /* expression */
+ YYSYMBOL_constant_expression = 488, /* constant_expression */
+ YYSYMBOL_declaration = 489, /* declaration */
+ YYSYMBOL_block_structure = 490, /* block_structure */
+ YYSYMBOL_491_2 = 491, /* $@2 */
+ YYSYMBOL_identifier_list = 492, /* identifier_list */
+ YYSYMBOL_function_prototype = 493, /* function_prototype */
+ YYSYMBOL_function_declarator = 494, /* function_declarator */
+ YYSYMBOL_function_header_with_parameters = 495, /* function_header_with_parameters */
+ YYSYMBOL_function_header = 496, /* function_header */
+ YYSYMBOL_parameter_declarator = 497, /* parameter_declarator */
+ YYSYMBOL_parameter_declaration = 498, /* parameter_declaration */
+ YYSYMBOL_parameter_type_specifier = 499, /* parameter_type_specifier */
+ YYSYMBOL_init_declarator_list = 500, /* init_declarator_list */
+ YYSYMBOL_single_declaration = 501, /* single_declaration */
+ YYSYMBOL_fully_specified_type = 502, /* fully_specified_type */
+ YYSYMBOL_invariant_qualifier = 503, /* invariant_qualifier */
+ YYSYMBOL_interpolation_qualifier = 504, /* interpolation_qualifier */
+ YYSYMBOL_layout_qualifier = 505, /* layout_qualifier */
+ YYSYMBOL_layout_qualifier_id_list = 506, /* layout_qualifier_id_list */
+ YYSYMBOL_layout_qualifier_id = 507, /* layout_qualifier_id */
+ YYSYMBOL_precise_qualifier = 508, /* precise_qualifier */
+ YYSYMBOL_type_qualifier = 509, /* type_qualifier */
+ YYSYMBOL_single_type_qualifier = 510, /* single_type_qualifier */
+ YYSYMBOL_storage_qualifier = 511, /* storage_qualifier */
+ YYSYMBOL_non_uniform_qualifier = 512, /* non_uniform_qualifier */
+ YYSYMBOL_type_name_list = 513, /* type_name_list */
+ YYSYMBOL_type_specifier = 514, /* type_specifier */
+ YYSYMBOL_array_specifier = 515, /* array_specifier */
+ YYSYMBOL_type_parameter_specifier_opt = 516, /* type_parameter_specifier_opt */
+ YYSYMBOL_type_parameter_specifier = 517, /* type_parameter_specifier */
+ YYSYMBOL_type_parameter_specifier_list = 518, /* type_parameter_specifier_list */
+ YYSYMBOL_type_specifier_nonarray = 519, /* type_specifier_nonarray */
+ YYSYMBOL_precision_qualifier = 520, /* precision_qualifier */
+ YYSYMBOL_struct_specifier = 521, /* struct_specifier */
+ YYSYMBOL_522_3 = 522, /* $@3 */
+ YYSYMBOL_523_4 = 523, /* $@4 */
+ YYSYMBOL_struct_declaration_list = 524, /* struct_declaration_list */
+ YYSYMBOL_struct_declaration = 525, /* struct_declaration */
+ YYSYMBOL_struct_declarator_list = 526, /* struct_declarator_list */
+ YYSYMBOL_struct_declarator = 527, /* struct_declarator */
+ YYSYMBOL_initializer = 528, /* initializer */
+ YYSYMBOL_initializer_list = 529, /* initializer_list */
+ YYSYMBOL_declaration_statement = 530, /* declaration_statement */
+ YYSYMBOL_statement = 531, /* statement */
+ YYSYMBOL_simple_statement = 532, /* simple_statement */
+ YYSYMBOL_demote_statement = 533, /* demote_statement */
+ YYSYMBOL_compound_statement = 534, /* compound_statement */
+ YYSYMBOL_535_5 = 535, /* $@5 */
+ YYSYMBOL_536_6 = 536, /* $@6 */
+ YYSYMBOL_statement_no_new_scope = 537, /* statement_no_new_scope */
+ YYSYMBOL_statement_scoped = 538, /* statement_scoped */
+ YYSYMBOL_539_7 = 539, /* $@7 */
+ YYSYMBOL_540_8 = 540, /* $@8 */
+ YYSYMBOL_compound_statement_no_new_scope = 541, /* compound_statement_no_new_scope */
+ YYSYMBOL_statement_list = 542, /* statement_list */
+ YYSYMBOL_expression_statement = 543, /* expression_statement */
+ YYSYMBOL_selection_statement = 544, /* selection_statement */
+ YYSYMBOL_selection_statement_nonattributed = 545, /* selection_statement_nonattributed */
+ YYSYMBOL_selection_rest_statement = 546, /* selection_rest_statement */
+ YYSYMBOL_condition = 547, /* condition */
+ YYSYMBOL_switch_statement = 548, /* switch_statement */
+ YYSYMBOL_switch_statement_nonattributed = 549, /* switch_statement_nonattributed */
+ YYSYMBOL_550_9 = 550, /* $@9 */
+ YYSYMBOL_switch_statement_list = 551, /* switch_statement_list */
+ YYSYMBOL_case_label = 552, /* case_label */
+ YYSYMBOL_iteration_statement = 553, /* iteration_statement */
+ YYSYMBOL_iteration_statement_nonattributed = 554, /* iteration_statement_nonattributed */
+ YYSYMBOL_555_10 = 555, /* $@10 */
+ YYSYMBOL_556_11 = 556, /* $@11 */
+ YYSYMBOL_557_12 = 557, /* $@12 */
+ YYSYMBOL_for_init_statement = 558, /* for_init_statement */
+ YYSYMBOL_conditionopt = 559, /* conditionopt */
+ YYSYMBOL_for_rest_statement = 560, /* for_rest_statement */
+ YYSYMBOL_jump_statement = 561, /* jump_statement */
+ YYSYMBOL_translation_unit = 562, /* translation_unit */
+ YYSYMBOL_external_declaration = 563, /* external_declaration */
+ YYSYMBOL_function_definition = 564, /* function_definition */
+ YYSYMBOL_565_13 = 565, /* $@13 */
+ YYSYMBOL_attribute = 566, /* attribute */
+ YYSYMBOL_attribute_list = 567, /* attribute_list */
+ YYSYMBOL_single_attribute = 568, /* single_attribute */
+ YYSYMBOL_spirv_requirements_list = 569, /* spirv_requirements_list */
+ YYSYMBOL_spirv_requirements_parameter = 570, /* spirv_requirements_parameter */
+ YYSYMBOL_spirv_extension_list = 571, /* spirv_extension_list */
+ YYSYMBOL_spirv_capability_list = 572, /* spirv_capability_list */
+ YYSYMBOL_spirv_execution_mode_qualifier = 573, /* spirv_execution_mode_qualifier */
+ YYSYMBOL_spirv_execution_mode_parameter_list = 574, /* spirv_execution_mode_parameter_list */
+ YYSYMBOL_spirv_execution_mode_parameter = 575, /* spirv_execution_mode_parameter */
+ YYSYMBOL_spirv_execution_mode_id_parameter_list = 576, /* spirv_execution_mode_id_parameter_list */
+ YYSYMBOL_spirv_storage_class_qualifier = 577, /* spirv_storage_class_qualifier */
+ YYSYMBOL_spirv_decorate_qualifier = 578, /* spirv_decorate_qualifier */
+ YYSYMBOL_spirv_decorate_parameter_list = 579, /* spirv_decorate_parameter_list */
+ YYSYMBOL_spirv_decorate_parameter = 580, /* spirv_decorate_parameter */
+ YYSYMBOL_spirv_decorate_id_parameter_list = 581, /* spirv_decorate_id_parameter_list */
+ YYSYMBOL_spirv_decorate_string_parameter_list = 582, /* spirv_decorate_string_parameter_list */
+ YYSYMBOL_spirv_type_specifier = 583, /* spirv_type_specifier */
+ YYSYMBOL_spirv_type_parameter_list = 584, /* spirv_type_parameter_list */
+ YYSYMBOL_spirv_type_parameter = 585, /* spirv_type_parameter */
+ YYSYMBOL_spirv_instruction_qualifier = 586, /* spirv_instruction_qualifier */
+ YYSYMBOL_spirv_instruction_qualifier_list = 587, /* spirv_instruction_qualifier_list */
+ YYSYMBOL_spirv_instruction_qualifier_id = 588 /* spirv_instruction_qualifier_id */
};
typedef enum yysymbol_kind_t yysymbol_kind_t;
@@ -728,7 +731,7 @@ typedef enum yysymbol_kind_t yysymbol_kind_t;
extern int yylex(YYSTYPE*, TParseContext&);
-#line 732 "MachineIndependent/glslang_tab.cpp"
+#line 735 "MachineIndependent/glslang_tab.cpp"
#ifdef short
@@ -1032,21 +1035,21 @@ union yyalloc
#endif /* !YYCOPY_NEEDED */
/* YYFINAL -- State number of the termination state. */
-#define YYFINAL 442
+#define YYFINAL 445
/* YYLAST -- Last index in YYTABLE. */
-#define YYLAST 12452
+#define YYLAST 12503
/* YYNTOKENS -- Number of terminals. */
-#define YYNTOKENS 455
+#define YYNTOKENS 458
/* YYNNTS -- Number of nonterminals. */
#define YYNNTS 131
/* YYNRULES -- Number of rules. */
-#define YYNRULES 683
+#define YYNRULES 686
/* YYNSTATES -- Number of states. */
-#define YYNSTATES 929
+#define YYNSTATES 932
/* YYMAXUTOK -- Last valid token kind. */
-#define YYMAXUTOK 709
+#define YYMAXUTOK 712
/* YYTRANSLATE(TOKEN-NUM) -- Symbol number corresponding to TOKEN-NUM
@@ -1130,7 +1133,8 @@ static const yytype_int16 yytranslate[] =
415, 416, 417, 418, 419, 420, 421, 422, 423, 424,
425, 426, 427, 428, 429, 430, 431, 432, 433, 434,
435, 436, 437, 438, 439, 440, 441, 442, 443, 444,
- 445, 446, 447, 448, 449, 450, 451, 452, 453, 454
+ 445, 446, 447, 448, 449, 450, 451, 452, 453, 454,
+ 455, 456, 457
};
#if YYDEBUG
@@ -1151,61 +1155,61 @@ static const yytype_int16 yyrline[] =
982, 988, 994, 1004, 1007, 1014, 1022, 1042, 1065, 1080,
1105, 1116, 1126, 1136, 1146, 1155, 1158, 1162, 1166, 1171,
1179, 1186, 1191, 1196, 1201, 1210, 1220, 1247, 1256, 1263,
- 1271, 1278, 1285, 1293, 1303, 1310, 1321, 1327, 1330, 1337,
- 1341, 1345, 1354, 1364, 1367, 1378, 1381, 1384, 1388, 1392,
- 1397, 1401, 1404, 1409, 1413, 1418, 1427, 1431, 1436, 1442,
- 1448, 1455, 1460, 1468, 1474, 1486, 1500, 1506, 1511, 1519,
- 1527, 1535, 1543, 1551, 1559, 1567, 1575, 1582, 1589, 1593,
- 1598, 1603, 1608, 1613, 1618, 1623, 1627, 1631, 1635, 1639,
- 1645, 1656, 1663, 1666, 1675, 1680, 1690, 1695, 1703, 1707,
- 1717, 1720, 1726, 1732, 1739, 1749, 1753, 1757, 1761, 1766,
- 1770, 1775, 1780, 1785, 1790, 1795, 1800, 1805, 1810, 1815,
- 1821, 1827, 1833, 1838, 1843, 1848, 1853, 1858, 1863, 1868,
- 1873, 1878, 1883, 1888, 1894, 1901, 1906, 1911, 1916, 1921,
- 1926, 1931, 1936, 1941, 1946, 1951, 1956, 1964, 1972, 1980,
- 1986, 1992, 1998, 2004, 2010, 2016, 2022, 2028, 2034, 2040,
- 2046, 2052, 2058, 2064, 2070, 2076, 2082, 2088, 2094, 2100,
- 2106, 2112, 2118, 2124, 2130, 2136, 2142, 2148, 2154, 2160,
- 2166, 2172, 2178, 2186, 2194, 2202, 2210, 2218, 2226, 2234,
- 2242, 2250, 2258, 2266, 2274, 2280, 2286, 2292, 2298, 2304,
- 2310, 2316, 2322, 2328, 2334, 2340, 2346, 2352, 2358, 2364,
- 2370, 2376, 2382, 2388, 2394, 2400, 2406, 2412, 2418, 2424,
- 2430, 2436, 2442, 2448, 2454, 2460, 2466, 2472, 2478, 2484,
- 2490, 2494, 2498, 2502, 2507, 2513, 2518, 2523, 2528, 2533,
- 2538, 2543, 2549, 2554, 2559, 2564, 2569, 2574, 2580, 2586,
- 2592, 2598, 2604, 2610, 2616, 2622, 2628, 2634, 2640, 2646,
- 2652, 2658, 2663, 2668, 2673, 2678, 2683, 2688, 2694, 2699,
- 2704, 2709, 2714, 2719, 2724, 2729, 2735, 2740, 2745, 2750,
- 2755, 2760, 2765, 2770, 2775, 2780, 2785, 2790, 2795, 2800,
- 2805, 2811, 2816, 2821, 2827, 2833, 2838, 2843, 2848, 2854,
- 2859, 2864, 2869, 2875, 2880, 2885, 2890, 2896, 2901, 2906,
- 2911, 2917, 2923, 2929, 2935, 2940, 2946, 2952, 2958, 2963,
- 2968, 2973, 2978, 2983, 2989, 2994, 2999, 3004, 3010, 3015,
- 3020, 3025, 3031, 3036, 3041, 3046, 3052, 3057, 3062, 3067,
- 3073, 3078, 3083, 3088, 3094, 3099, 3104, 3109, 3115, 3120,
- 3125, 3130, 3136, 3141, 3146, 3151, 3157, 3162, 3167, 3172,
- 3178, 3183, 3188, 3193, 3199, 3204, 3209, 3214, 3220, 3225,
- 3230, 3235, 3241, 3246, 3251, 3256, 3262, 3267, 3272, 3277,
- 3283, 3288, 3293, 3298, 3303, 3308, 3313, 3318, 3323, 3328,
- 3333, 3338, 3343, 3348, 3353, 3358, 3363, 3368, 3373, 3378,
- 3383, 3388, 3393, 3398, 3403, 3409, 3415, 3421, 3427, 3434,
- 3441, 3447, 3453, 3459, 3465, 3471, 3477, 3483, 3488, 3493,
- 3509, 3514, 3519, 3527, 3527, 3538, 3538, 3548, 3551, 3564,
- 3586, 3613, 3617, 3623, 3628, 3639, 3643, 3649, 3655, 3666,
- 3669, 3676, 3680, 3681, 3687, 3688, 3689, 3690, 3691, 3692,
- 3693, 3695, 3701, 3710, 3711, 3715, 3711, 3727, 3728, 3732,
- 3732, 3739, 3739, 3753, 3756, 3764, 3772, 3783, 3784, 3788,
- 3792, 3800, 3807, 3811, 3819, 3823, 3836, 3840, 3848, 3848,
- 3868, 3871, 3877, 3889, 3901, 3905, 3913, 3913, 3928, 3928,
- 3946, 3946, 3967, 3970, 3976, 3979, 3985, 3989, 3996, 4001,
- 4006, 4013, 4016, 4020, 4025, 4029, 4039, 4043, 4052, 4055,
- 4059, 4068, 4068, 4110, 4115, 4118, 4123, 4126, 4133, 4136,
- 4141, 4144, 4149, 4152, 4157, 4160, 4165, 4169, 4174, 4178,
- 4183, 4187, 4194, 4197, 4202, 4205, 4208, 4211, 4214, 4219,
- 4228, 4239, 4244, 4252, 4256, 4261, 4265, 4270, 4274, 4279,
- 4283, 4290, 4293, 4298, 4301, 4304, 4307, 4312, 4320, 4330,
- 4334, 4339, 4343, 4348, 4352, 4359, 4362, 4367, 4372, 4375,
- 4381, 4384, 4389, 4392
+ 1271, 1278, 1285, 1293, 1301, 1311, 1321, 1328, 1339, 1345,
+ 1348, 1355, 1359, 1363, 1372, 1382, 1385, 1396, 1399, 1402,
+ 1406, 1410, 1415, 1419, 1422, 1427, 1431, 1436, 1445, 1449,
+ 1454, 1460, 1466, 1473, 1478, 1486, 1492, 1504, 1518, 1524,
+ 1529, 1537, 1545, 1553, 1561, 1569, 1577, 1585, 1593, 1600,
+ 1607, 1611, 1616, 1621, 1626, 1631, 1636, 1641, 1645, 1649,
+ 1653, 1657, 1663, 1669, 1681, 1688, 1691, 1700, 1705, 1715,
+ 1720, 1728, 1732, 1742, 1745, 1751, 1757, 1764, 1774, 1778,
+ 1782, 1786, 1791, 1795, 1800, 1805, 1810, 1815, 1820, 1825,
+ 1830, 1835, 1840, 1846, 1852, 1858, 1863, 1868, 1873, 1878,
+ 1883, 1888, 1893, 1898, 1903, 1908, 1913, 1919, 1926, 1931,
+ 1936, 1941, 1946, 1951, 1956, 1961, 1966, 1971, 1976, 1981,
+ 1989, 1997, 2005, 2011, 2017, 2023, 2029, 2035, 2041, 2047,
+ 2053, 2059, 2065, 2071, 2077, 2083, 2089, 2095, 2101, 2107,
+ 2113, 2119, 2125, 2131, 2137, 2143, 2149, 2155, 2161, 2167,
+ 2173, 2179, 2185, 2191, 2197, 2203, 2211, 2219, 2227, 2235,
+ 2243, 2251, 2259, 2267, 2275, 2283, 2291, 2299, 2305, 2311,
+ 2317, 2323, 2329, 2335, 2341, 2347, 2353, 2359, 2365, 2371,
+ 2377, 2383, 2389, 2395, 2401, 2407, 2413, 2419, 2425, 2431,
+ 2437, 2443, 2449, 2455, 2461, 2467, 2473, 2479, 2485, 2491,
+ 2497, 2503, 2509, 2515, 2519, 2523, 2527, 2532, 2538, 2543,
+ 2548, 2553, 2558, 2563, 2568, 2574, 2579, 2584, 2589, 2594,
+ 2599, 2605, 2611, 2617, 2623, 2629, 2635, 2641, 2647, 2653,
+ 2659, 2665, 2671, 2677, 2683, 2688, 2693, 2698, 2703, 2708,
+ 2713, 2719, 2724, 2729, 2734, 2739, 2744, 2749, 2754, 2760,
+ 2765, 2770, 2775, 2780, 2785, 2790, 2795, 2800, 2805, 2810,
+ 2815, 2820, 2825, 2830, 2836, 2841, 2846, 2852, 2858, 2863,
+ 2868, 2873, 2879, 2884, 2889, 2894, 2900, 2905, 2910, 2915,
+ 2921, 2926, 2931, 2936, 2942, 2948, 2954, 2960, 2965, 2971,
+ 2977, 2983, 2988, 2993, 2998, 3003, 3008, 3014, 3019, 3024,
+ 3029, 3035, 3040, 3045, 3050, 3056, 3061, 3066, 3071, 3077,
+ 3082, 3087, 3092, 3098, 3103, 3108, 3113, 3119, 3124, 3129,
+ 3134, 3140, 3145, 3150, 3155, 3161, 3166, 3171, 3176, 3182,
+ 3187, 3192, 3197, 3203, 3208, 3213, 3218, 3224, 3229, 3234,
+ 3239, 3245, 3250, 3255, 3260, 3266, 3271, 3276, 3281, 3287,
+ 3292, 3297, 3302, 3308, 3313, 3318, 3323, 3328, 3333, 3338,
+ 3343, 3348, 3353, 3358, 3363, 3368, 3373, 3378, 3383, 3388,
+ 3393, 3398, 3403, 3408, 3413, 3418, 3423, 3428, 3434, 3440,
+ 3446, 3452, 3459, 3466, 3472, 3478, 3484, 3490, 3496, 3502,
+ 3508, 3513, 3518, 3534, 3539, 3544, 3552, 3552, 3563, 3563,
+ 3573, 3576, 3589, 3611, 3638, 3642, 3648, 3653, 3664, 3668,
+ 3674, 3680, 3691, 3694, 3701, 3705, 3706, 3712, 3713, 3714,
+ 3715, 3716, 3717, 3718, 3720, 3726, 3735, 3736, 3740, 3736,
+ 3752, 3753, 3757, 3757, 3764, 3764, 3778, 3781, 3789, 3797,
+ 3808, 3809, 3813, 3817, 3825, 3832, 3836, 3844, 3848, 3861,
+ 3865, 3873, 3873, 3893, 3896, 3902, 3914, 3926, 3930, 3938,
+ 3938, 3953, 3953, 3971, 3971, 3992, 3995, 4001, 4004, 4010,
+ 4014, 4021, 4026, 4031, 4038, 4041, 4045, 4050, 4054, 4064,
+ 4068, 4077, 4080, 4084, 4093, 4093, 4135, 4140, 4143, 4148,
+ 4151, 4158, 4161, 4166, 4169, 4174, 4177, 4182, 4185, 4190,
+ 4194, 4199, 4203, 4208, 4212, 4219, 4222, 4227, 4230, 4233,
+ 4236, 4239, 4244, 4253, 4264, 4269, 4277, 4281, 4286, 4290,
+ 4295, 4299, 4304, 4308, 4315, 4318, 4323, 4326, 4329, 4332,
+ 4337, 4345, 4355, 4359, 4364, 4368, 4373, 4377, 4384, 4387,
+ 4392, 4397, 4400, 4406, 4409, 4414, 4417
};
#endif
@@ -1319,7 +1323,8 @@ static const char *const yytname[] =
"WRITEONLY", "DEVICECOHERENT", "QUEUEFAMILYCOHERENT",
"WORKGROUPCOHERENT", "SUBGROUPCOHERENT", "NONPRIVATE",
"SHADERCALLCOHERENT", "NOPERSPECTIVE", "EXPLICITINTERPAMD",
- "PERVERTEXNV", "PERPRIMITIVENV", "PERVIEWNV", "PERTASKNV", "PRECISE",
+ "PERVERTEXEXT", "PERVERTEXNV", "PERPRIMITIVENV", "PERVIEWNV",
+ "PERTASKNV", "PERPRIMITIVEEXT", "TASKPAYLOADWORKGROUPEXT", "PRECISE",
"$accept", "variable_identifier", "primary_expression",
"postfix_expression", "integer_expression", "function_call",
"function_call_or_method", "function_call_generic",
@@ -1429,16 +1434,16 @@ static const yytype_int16 yytoknum[] =
675, 676, 677, 678, 679, 680, 681, 682, 683, 684,
685, 686, 687, 688, 689, 690, 691, 692, 693, 694,
695, 696, 697, 698, 699, 700, 701, 702, 703, 704,
- 705, 706, 707, 708, 709
+ 705, 706, 707, 708, 709, 710, 711, 712
};
#endif
-#define YYPACT_NINF (-859)
+#define YYPACT_NINF (-813)
#define yypact_value_is_default(Yyn) \
((Yyn) == YYPACT_NINF)
-#define YYTABLE_NINF (-570)
+#define YYTABLE_NINF (-573)
#define yytable_value_is_error(Yyn) \
0
@@ -1447,99 +1452,100 @@ static const yytype_int16 yytoknum[] =
STATE-NUM. */
static const yytype_int16 yypact[] =
{
- 4548, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -312, -274, -244, -212, -208,
- -201, -181, -169, -859, -859, -194, -859, -859, -859, -859,
- -859, -285, -859, -859, -859, -859, -859, -317, -859, -859,
- -859, -859, -859, -859, -132, -73, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -329, -70,
- -158, -145, 7712, -221, -859, -164, -859, -859, -859, -859,
- 5452, -859, -859, -859, -859, -68, -859, -859, 932, -859,
- -859, 7712, -55, -859, -859, -859, 5904, -80, -154, -150,
- -142, -135, -130, -80, -129, -79, 12060, -859, -45, -354,
- -76, -859, -308, -859, -43, -40, 7712, -859, -859, -859,
- 7712, -72, -71, -859, -265, -859, -257, -859, -859, 10761,
- -39, -859, -859, -859, -35, -69, 7712, -859, -42, -38,
- -37, -859, -302, -859, -235, -32, -33, -28, -27, -217,
- -26, -23, -22, -21, -20, -16, -216, -29, -15, -31,
- -303, -859, -13, 7712, -859, -14, -859, -214, -859, -859,
- -205, 9029, -859, -279, 1384, -859, -859, -859, -859, -859,
- -39, -299, -859, 9462, -275, -859, -34, -859, -137, 10761,
- 10761, -859, 10761, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -248, -859, -859, -859, -3, -203, 11194, -1,
- -859, 10761, -859, -859, -310, 3, -40, 1, -859, -309,
- -80, -859, -30, -859, -319, 5, -128, 10761, -124, -859,
- -157, -122, 10761, -120, 10, -118, -80, -859, 11627, -859,
- -116, 10761, 7, -79, -859, 7712, -25, 6356, -859, 7712,
- 10761, -859, -354, -859, -19, -859, -859, -78, -263, -94,
- -298, -52, -18, -8, -12, 29, 28, -301, 15, 9895,
- -859, 16, -859, -859, 19, 12, 17, -859, 20, 23,
- 18, 10328, 24, 10761, 21, 22, 25, 27, 30, -215,
- -859, -859, -117, -859, -70, 26, 34, -859, -859, -859,
- -859, -859, 1836, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, 5000, 3, 9462, -264, 8163, -859, -859, 9462,
- 7712, -859, -11, -859, -859, -859, -195, -859, -859, 10761,
- -6, -859, -859, 10761, 37, -859, -859, -859, 10761, -859,
- -859, -859, -322, -859, -859, -192, 35, -859, -859, -859,
- -859, -859, -859, -179, -859, -178, -859, -859, -177, 32,
- -859, -859, -859, -859, -175, -859, -174, -859, -167, 36,
- -859, -166, 38, -165, 35, -859, -163, -859, 45, 46,
- -859, -859, -25, -39, -115, -859, -859, -859, 6808, -859,
- -859, -859, 10761, 10761, 10761, 10761, 10761, 10761, 10761, 10761,
- 10761, 10761, 10761, 10761, 10761, 10761, 10761, 10761, 10761, 10761,
- 10761, -859, -859, -859, 51, -859, 2288, -859, -859, -859,
- 2288, -859, 10761, -859, -859, -88, 10761, -63, -859, -859,
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, 10761, 10761, -859, -859, -859, -859,
- -859, -859, -859, 9462, -859, -859, -108, -859, 7260, -859,
- -859, 52, 53, -859, -859, -859, -859, -859, -138, -136,
- -859, -304, -859, -319, -859, -319, -859, 10761, 10761, -859,
- -157, -859, -157, -859, 10761, 10761, -859, 65, 10, -859,
- 11627, -859, 10761, -859, -859, -86, 3, -25, -859, -859,
- -859, -859, -859, -78, -78, -263, -263, -94, -94, -94,
- -94, -298, -298, -52, -18, -8, -12, 29, 28, 10761,
- -859, 2288, 4096, 31, 3644, -156, -859, -155, -859, -859,
- -859, -859, -859, 8596, -859, -859, -859, 66, -859, 39,
- -859, -153, -859, -151, -859, -148, -859, -146, -859, -144,
- -143, -859, -859, -859, -61, 64, 53, 40, 72, 74,
- -859, -859, 4096, 75, -859, -859, -859, -859, -859, -859,
- -859, -859, -859, -859, -859, 10761, -859, 71, 2740, 10761,
- -859, 73, 81, 41, 80, 3192, -859, 83, -859, 9462,
- -859, -859, -859, -141, 10761, 2740, 75, -859, -859, 2288,
- -859, 78, 53, -859, -859, 2288, 86, -859, -859
+ 4575, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -300, -272, -219, -123, -120,
+ -118, -105, -87, -813, -813, -317, -813, -813, -813, -813,
+ -813, -62, -813, -813, -813, -813, -813, -324, -813, -813,
+ -813, -813, -813, -813, -76, -64, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -319, -260, -133, -174, 7760, -191, -813, -166, -813,
+ -813, -813, -813, 5485, -813, -813, -813, -813, -61, -813,
+ -813, 935, -813, -813, 7760, -39, -813, -813, -813, 5940,
+ -50, -335, -267, -162, -152, -139, -50, -137, -46, 12111,
+ -813, -29, -339, -43, -813, -268, -813, -27, -6, 7760,
+ -813, -813, -813, 7760, -37, -36, -813, -298, -813, -237,
+ -813, -813, 10812, -5, -813, -813, -813, 1, -33, 7760,
+ -813, -4, -2, -3, -813, -236, -813, -227, -1, 3,
+ 4, 5, -225, 6, 10, 12, 13, 14, 17, -223,
+ 8, 18, 16, -304, -813, 21, 7760, -813, 19, -813,
+ -222, -813, -813, -207, 9080, -813, -247, 1390, -813, -813,
+ -813, -813, -813, -5, -270, -813, 9513, -250, -813, -22,
+ -813, -132, 10812, 10812, -813, 10812, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -265, -813, -813, -813, 25,
+ -204, 11245, 27, -813, 10812, -813, -813, -314, 30, -6,
+ 33, -813, -315, -50, -813, 15, -813, -325, 32, -130,
+ 10812, -129, -813, -146, -125, 10812, -124, 39, -119, -50,
+ -813, 11678, -813, -115, 10812, 36, -46, -813, 7760, 20,
+ 6395, -813, 7760, 10812, -813, -339, -813, 29, -813, -813,
+ -47, -83, -59, -288, -18, -17, 22, 26, 54, 59,
+ -309, 46, 9946, -813, 37, -813, -813, 50, 56, 58,
+ -813, 72, 74, 65, 10379, 76, 10812, 69, 68, 73,
+ 75, 77, -168, -813, -813, -82, -813, -260, 79, 82,
+ -813, -813, -813, -813, -813, 1845, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, 5030, 30, 9513, -241, 8214,
+ -813, -813, 9513, 7760, -813, 52, -813, -813, -813, -202,
+ -813, -813, 10812, 55, -813, -813, 10812, 85, -813, -813,
+ -813, 10812, -813, -813, -813, -310, -813, -813, -197, 81,
+ -813, -813, -813, -813, -813, -813, -195, -813, -194, -813,
+ -813, -190, 87, -813, -813, -813, -813, -169, -813, -167,
+ -813, -165, 89, -813, -158, 90, -157, 81, -813, -156,
+ -813, 91, 97, -813, -813, 20, -5, -77, -813, -813,
+ -813, 6850, -813, -813, -813, 10812, 10812, 10812, 10812, 10812,
+ 10812, 10812, 10812, 10812, 10812, 10812, 10812, 10812, 10812, 10812,
+ 10812, 10812, 10812, 10812, -813, -813, -813, 96, -813, 2300,
+ -813, -813, -813, 2300, -813, 10812, -813, -813, -49, 10812,
+ -26, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, 10812, 10812, -813,
+ -813, -813, -813, -813, -813, -813, 9513, -813, -813, -31,
+ -813, 7305, -813, -813, 98, 95, -813, -813, -813, -813,
+ -813, -172, -134, -813, -307, -813, -325, -813, -325, -813,
+ 10812, 10812, -813, -146, -813, -146, -813, 10812, 10812, -813,
+ 104, 39, -813, 11678, -813, 10812, -813, -813, -48, 30,
+ 20, -813, -813, -813, -813, -813, -47, -47, -83, -83,
+ -59, -59, -59, -59, -288, -288, -18, -17, 22, 26,
+ 54, 59, 10812, -813, 2300, 4120, 60, 3665, -155, -813,
+ -154, -813, -813, -813, -813, -813, 8647, -813, -813, -813,
+ 106, -813, -15, -813, -147, -813, -145, -813, -144, -813,
+ -143, -813, -142, -140, -813, -813, -813, -24, 101, 95,
+ 71, 107, 110, -813, -813, 4120, 109, -813, -813, -813,
+ -813, -813, -813, -813, -813, -813, -813, -813, 10812, -813,
+ 102, 2755, 10812, -813, 105, 113, 70, 112, 3210, -813,
+ 115, -813, 9513, -813, -813, -813, -135, 10812, 2755, 109,
+ -813, -813, 2300, -813, 111, 95, -813, -813, 2300, 117,
+ -813, -813
};
/* YYDEFACT[STATE-NUM] -- Default reduction number in state STATE-NUM.
@@ -1547,137 +1553,138 @@ static const yytype_int16 yypact[] =
means the default is an error. */
static const yytype_int16 yydefact[] =
{
- 0, 166, 219, 217, 218, 216, 223, 224, 225, 226,
- 227, 228, 229, 230, 231, 220, 221, 222, 232, 233,
- 234, 235, 236, 237, 238, 239, 240, 241, 242, 243,
- 345, 346, 347, 348, 349, 350, 351, 371, 372, 373,
- 374, 375, 376, 377, 386, 399, 400, 387, 388, 390,
- 389, 391, 392, 393, 394, 395, 396, 397, 398, 174,
- 175, 245, 246, 244, 247, 254, 255, 252, 253, 250,
- 251, 248, 249, 277, 278, 279, 289, 290, 291, 274,
- 275, 276, 286, 287, 288, 271, 272, 273, 283, 284,
- 285, 268, 269, 270, 280, 281, 282, 256, 257, 258,
- 292, 293, 294, 259, 260, 261, 304, 305, 306, 262,
- 263, 264, 316, 317, 318, 265, 266, 267, 328, 329,
- 330, 295, 296, 297, 298, 299, 300, 301, 302, 303,
- 307, 308, 309, 310, 311, 312, 313, 314, 315, 319,
- 320, 321, 322, 323, 324, 325, 326, 327, 331, 332,
- 333, 334, 335, 336, 337, 338, 339, 343, 340, 341,
- 342, 524, 525, 526, 355, 356, 379, 382, 344, 353,
- 354, 370, 352, 401, 402, 405, 406, 407, 409, 410,
- 411, 413, 414, 415, 417, 418, 514, 515, 378, 380,
- 381, 357, 358, 359, 403, 360, 364, 365, 368, 408,
- 412, 416, 361, 362, 366, 367, 404, 363, 369, 448,
- 450, 451, 452, 454, 455, 456, 458, 459, 460, 462,
- 463, 464, 466, 467, 468, 470, 471, 472, 474, 475,
- 476, 478, 479, 480, 482, 483, 484, 486, 487, 488,
- 490, 491, 449, 453, 457, 461, 465, 473, 477, 481,
- 469, 485, 489, 492, 493, 494, 495, 496, 497, 498,
- 499, 500, 501, 502, 503, 504, 505, 506, 507, 508,
- 509, 510, 511, 512, 513, 383, 384, 385, 419, 428,
- 430, 424, 429, 431, 432, 434, 435, 436, 438, 439,
- 440, 442, 443, 444, 446, 447, 420, 421, 422, 433,
- 423, 425, 426, 427, 437, 441, 445, 516, 517, 520,
- 521, 522, 523, 518, 519, 0, 0, 0, 0, 0,
- 0, 0, 0, 164, 165, 0, 620, 137, 530, 531,
- 532, 0, 529, 170, 168, 169, 167, 0, 215, 171,
- 172, 173, 139, 138, 0, 199, 180, 182, 178, 184,
- 186, 181, 183, 179, 185, 187, 176, 177, 201, 188,
- 195, 196, 197, 198, 189, 190, 191, 192, 193, 194,
- 140, 141, 142, 143, 144, 145, 152, 619, 0, 621,
- 0, 114, 113, 0, 125, 130, 159, 158, 156, 160,
- 0, 153, 155, 161, 135, 211, 157, 528, 0, 616,
- 618, 0, 0, 162, 163, 527, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 535, 0, 0,
- 0, 99, 0, 94, 0, 109, 0, 121, 115, 123,
- 0, 124, 0, 97, 131, 102, 0, 154, 136, 0,
- 204, 210, 1, 617, 0, 0, 0, 96, 0, 0,
- 0, 628, 0, 680, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 626,
- 0, 624, 0, 0, 533, 149, 151, 0, 147, 202,
- 0, 0, 100, 0, 0, 622, 110, 116, 120, 122,
- 118, 126, 117, 0, 132, 105, 0, 103, 0, 0,
- 0, 9, 0, 43, 42, 44, 41, 5, 6, 7,
- 8, 2, 16, 14, 15, 17, 10, 11, 12, 13,
- 3, 18, 37, 20, 25, 26, 0, 0, 30, 0,
- 213, 0, 36, 34, 0, 205, 111, 0, 95, 0,
- 0, 678, 0, 636, 0, 0, 0, 0, 0, 653,
- 0, 0, 0, 0, 0, 0, 0, 673, 0, 651,
- 0, 0, 0, 0, 98, 0, 0, 0, 537, 0,
- 0, 146, 0, 200, 0, 206, 45, 49, 52, 55,
- 60, 63, 65, 67, 69, 71, 73, 75, 0, 0,
- 101, 564, 573, 577, 0, 0, 0, 598, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 45,
- 78, 91, 0, 551, 0, 161, 135, 554, 575, 553,
- 561, 552, 0, 555, 556, 579, 557, 586, 558, 559,
- 594, 560, 0, 119, 0, 127, 0, 545, 134, 0,
- 0, 107, 0, 104, 38, 39, 0, 22, 23, 0,
- 0, 28, 27, 0, 215, 31, 33, 40, 0, 212,
- 112, 682, 0, 683, 629, 0, 0, 681, 648, 644,
- 645, 646, 647, 0, 642, 0, 93, 649, 0, 0,
- 663, 664, 665, 666, 0, 661, 0, 667, 0, 0,
- 669, 0, 0, 0, 2, 677, 0, 675, 0, 0,
- 623, 625, 0, 543, 0, 541, 536, 538, 0, 150,
- 148, 203, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 168, 222, 220, 221, 219, 226, 227, 228, 229,
+ 230, 231, 232, 233, 234, 223, 224, 225, 235, 236,
+ 237, 238, 239, 240, 241, 242, 243, 244, 245, 246,
+ 348, 349, 350, 351, 352, 353, 354, 374, 375, 376,
+ 377, 378, 379, 380, 389, 402, 403, 390, 391, 393,
+ 392, 394, 395, 396, 397, 398, 399, 400, 401, 176,
+ 177, 248, 249, 247, 250, 257, 258, 255, 256, 253,
+ 254, 251, 252, 280, 281, 282, 292, 293, 294, 277,
+ 278, 279, 289, 290, 291, 274, 275, 276, 286, 287,
+ 288, 271, 272, 273, 283, 284, 285, 259, 260, 261,
+ 295, 296, 297, 262, 263, 264, 307, 308, 309, 265,
+ 266, 267, 319, 320, 321, 268, 269, 270, 331, 332,
+ 333, 298, 299, 300, 301, 302, 303, 304, 305, 306,
+ 310, 311, 312, 313, 314, 315, 316, 317, 318, 322,
+ 323, 324, 325, 326, 327, 328, 329, 330, 334, 335,
+ 336, 337, 338, 339, 340, 341, 342, 346, 343, 344,
+ 345, 527, 528, 529, 358, 359, 382, 385, 347, 356,
+ 357, 373, 355, 404, 405, 408, 409, 410, 412, 413,
+ 414, 416, 417, 418, 420, 421, 517, 518, 381, 383,
+ 384, 360, 361, 362, 406, 363, 367, 368, 371, 411,
+ 415, 419, 364, 365, 369, 370, 407, 366, 372, 451,
+ 453, 454, 455, 457, 458, 459, 461, 462, 463, 465,
+ 466, 467, 469, 470, 471, 473, 474, 475, 477, 478,
+ 479, 481, 482, 483, 485, 486, 487, 489, 490, 491,
+ 493, 494, 452, 456, 460, 464, 468, 476, 480, 484,
+ 472, 488, 492, 495, 496, 497, 498, 499, 500, 501,
+ 502, 503, 504, 505, 506, 507, 508, 509, 510, 511,
+ 512, 513, 514, 515, 516, 386, 387, 388, 422, 431,
+ 433, 427, 432, 434, 435, 437, 438, 439, 441, 442,
+ 443, 445, 446, 447, 449, 450, 423, 424, 425, 436,
+ 426, 428, 429, 430, 440, 444, 448, 519, 520, 523,
+ 524, 525, 526, 521, 522, 0, 0, 0, 0, 0,
+ 0, 0, 0, 166, 167, 0, 623, 137, 533, 534,
+ 535, 0, 532, 172, 170, 171, 169, 0, 218, 173,
+ 174, 175, 139, 138, 0, 201, 182, 184, 180, 186,
+ 188, 183, 185, 181, 187, 189, 178, 179, 204, 190,
+ 197, 198, 199, 200, 191, 192, 193, 194, 195, 196,
+ 140, 141, 143, 142, 144, 146, 147, 145, 203, 154,
+ 622, 0, 624, 0, 114, 113, 0, 125, 130, 161,
+ 160, 158, 162, 0, 155, 157, 163, 135, 214, 159,
+ 531, 0, 619, 621, 0, 0, 164, 165, 530, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 538, 0, 0, 0, 99, 0, 94, 0, 109, 0,
+ 121, 115, 123, 0, 124, 0, 97, 131, 102, 0,
+ 156, 136, 0, 207, 213, 1, 620, 0, 0, 0,
+ 96, 0, 0, 0, 631, 0, 683, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 629, 0, 627, 0, 0, 536, 151, 153,
+ 0, 149, 205, 0, 0, 100, 0, 0, 625, 110,
+ 116, 120, 122, 118, 126, 117, 0, 132, 105, 0,
+ 103, 0, 0, 0, 9, 0, 43, 42, 44, 41,
+ 5, 6, 7, 8, 2, 16, 14, 15, 17, 10,
+ 11, 12, 13, 3, 18, 37, 20, 25, 26, 0,
+ 0, 30, 0, 216, 0, 36, 34, 0, 208, 111,
+ 0, 95, 0, 0, 681, 0, 639, 0, 0, 0,
+ 0, 0, 656, 0, 0, 0, 0, 0, 0, 0,
+ 676, 0, 654, 0, 0, 0, 0, 98, 0, 0,
+ 0, 540, 0, 0, 148, 0, 202, 0, 209, 45,
+ 49, 52, 55, 60, 63, 65, 67, 69, 71, 73,
+ 75, 0, 0, 101, 567, 576, 580, 0, 0, 0,
+ 601, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 45, 78, 91, 0, 554, 0, 163, 135,
+ 557, 578, 556, 564, 555, 0, 558, 559, 582, 560,
+ 589, 561, 562, 597, 563, 0, 119, 0, 127, 0,
+ 548, 134, 0, 0, 107, 0, 104, 38, 39, 0,
+ 22, 23, 0, 0, 28, 27, 0, 218, 31, 33,
+ 40, 0, 215, 112, 685, 0, 686, 632, 0, 0,
+ 684, 651, 647, 648, 649, 650, 0, 645, 0, 93,
+ 652, 0, 0, 666, 667, 668, 669, 0, 664, 0,
+ 670, 0, 0, 672, 0, 0, 0, 2, 680, 0,
+ 678, 0, 0, 626, 628, 0, 546, 0, 544, 539,
+ 541, 0, 152, 150, 206, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 76, 207, 208, 0, 563, 0, 596, 609, 608,
- 0, 600, 0, 612, 610, 0, 0, 0, 593, 613,
- 614, 615, 562, 81, 82, 84, 83, 86, 87, 88,
- 89, 90, 85, 80, 0, 0, 578, 574, 576, 580,
- 587, 595, 129, 0, 548, 549, 0, 133, 0, 108,
- 4, 0, 24, 21, 32, 214, 632, 634, 0, 0,
- 679, 0, 638, 0, 637, 0, 640, 0, 0, 655,
- 0, 654, 0, 657, 0, 0, 659, 0, 0, 674,
- 0, 671, 0, 652, 627, 0, 544, 0, 539, 534,
- 46, 47, 48, 51, 50, 53, 54, 58, 59, 56,
- 57, 61, 62, 64, 66, 68, 70, 72, 74, 0,
- 209, 565, 0, 0, 0, 0, 611, 0, 592, 79,
- 92, 128, 546, 0, 106, 19, 630, 0, 631, 0,
- 643, 0, 650, 0, 662, 0, 668, 0, 670, 0,
- 0, 676, 540, 542, 0, 0, 584, 0, 0, 0,
- 603, 602, 605, 571, 588, 547, 550, 633, 635, 639,
- 641, 656, 658, 660, 672, 0, 566, 0, 0, 0,
- 604, 0, 0, 583, 0, 0, 581, 0, 77, 0,
- 568, 597, 567, 0, 606, 0, 571, 570, 572, 590,
- 585, 0, 607, 601, 582, 591, 0, 599, 589
+ 0, 0, 0, 0, 76, 210, 211, 0, 566, 0,
+ 599, 612, 611, 0, 603, 0, 615, 613, 0, 0,
+ 0, 596, 616, 617, 618, 565, 81, 82, 84, 83,
+ 86, 87, 88, 89, 90, 85, 80, 0, 0, 581,
+ 577, 579, 583, 590, 598, 129, 0, 551, 552, 0,
+ 133, 0, 108, 4, 0, 24, 21, 32, 217, 635,
+ 637, 0, 0, 682, 0, 641, 0, 640, 0, 643,
+ 0, 0, 658, 0, 657, 0, 660, 0, 0, 662,
+ 0, 0, 677, 0, 674, 0, 655, 630, 0, 547,
+ 0, 542, 537, 46, 47, 48, 51, 50, 53, 54,
+ 58, 59, 56, 57, 61, 62, 64, 66, 68, 70,
+ 72, 74, 0, 212, 568, 0, 0, 0, 0, 614,
+ 0, 595, 79, 92, 128, 549, 0, 106, 19, 633,
+ 0, 634, 0, 646, 0, 653, 0, 665, 0, 671,
+ 0, 673, 0, 0, 679, 543, 545, 0, 0, 587,
+ 0, 0, 0, 606, 605, 608, 574, 591, 550, 553,
+ 636, 638, 642, 644, 659, 661, 663, 675, 0, 569,
+ 0, 0, 0, 607, 0, 0, 586, 0, 0, 584,
+ 0, 77, 0, 571, 600, 570, 0, 609, 0, 574,
+ 573, 575, 593, 588, 0, 610, 604, 585, 594, 0,
+ 602, 592
};
/* YYPGOTO[NTERM-NUM]. */
static const yytype_int16 yypgoto[] =
{
- -859, -859, -859, -859, -859, -859, -859, -859, -859, -859,
- -859, -859, -209, -859, -418, -417, -602, -421, -291, -284,
- -286, -283, -281, -287, -859, -473, -859, -490, -859, -497,
- -520, 13, -859, -859, -859, 14, -394, -859, -859, 33,
- 42, 44, -859, -859, -395, -859, -859, -859, -859, -121,
- -859, -381, -369, -859, 9, -859, 0, -424, -859, -859,
- -859, -859, 113, -859, -859, -859, -545, -549, -252, -365,
- -617, -859, -391, -618, -858, -859, -450, -859, -859, -459,
- -458, -859, -859, 43, -721, -387, -859, -173, -859, -422,
- -859, -170, -859, -859, -859, -859, -168, -859, -859, -859,
- -859, -859, -859, -859, -859, 67, -859, -859, 2, -859,
- -97, -300, -386, -859, -859, -859, -326, -323, -327, -859,
- -859, -330, -325, -328, -332, -859, -331, -334, -859, -390,
+ -813, -813, -813, -813, -813, -813, -813, -813, -813, -813,
+ -813, -813, -429, -813, -381, -380, -483, -383, -262, -257,
+ -261, -258, -255, -259, -813, -479, -813, -492, -813, -495,
+ -536, 11, -813, -813, -813, 7, -388, -813, -813, 42,
+ 49, 47, -813, -813, -401, -813, -813, -813, -813, -96,
+ -813, -384, -371, -813, 9, -813, 0, -425, -813, -813,
+ -813, -813, 150, -813, -813, -813, -546, -553, -217, -338,
+ -607, -813, -364, -619, -812, -813, -421, -813, -813, -428,
+ -430, -813, -813, 64, -718, -355, -813, -141, -813, -390,
+ -813, -138, -813, -813, -813, -813, -136, -813, -813, -813,
+ -813, -813, -813, -813, -813, 92, -813, -813, 2, -813,
+ -68, -275, -456, -813, -813, -813, -296, -293, -301, -813,
+ -813, -299, -295, -303, -302, -813, -306, -311, -813, -392,
-530
};
/* YYDEFGOTO[NTERM-NUM]. */
static const yytype_int16 yydefgoto[] =
{
- -1, 520, 521, 522, 781, 523, 524, 525, 526, 527,
- 528, 529, 609, 531, 577, 578, 579, 580, 581, 582,
- 583, 584, 585, 586, 587, 610, 839, 611, 764, 612,
- 695, 613, 378, 640, 498, 614, 380, 381, 382, 427,
- 428, 429, 383, 384, 385, 386, 387, 388, 477, 478,
- 389, 390, 391, 392, 532, 480, 533, 483, 440, 441,
- 534, 395, 396, 397, 569, 473, 567, 568, 704, 705,
- 638, 776, 617, 618, 619, 620, 621, 736, 875, 911,
- 903, 904, 905, 912, 622, 623, 624, 625, 906, 878,
- 626, 627, 907, 926, 628, 629, 630, 842, 740, 844,
- 882, 901, 902, 631, 398, 399, 400, 424, 632, 470,
- 471, 450, 451, 788, 789, 402, 673, 674, 678, 403,
- 404, 684, 685, 688, 691, 405, 696, 697, 406, 452,
- 453
+ -1, 523, 524, 525, 784, 526, 527, 528, 529, 530,
+ 531, 532, 612, 534, 580, 581, 582, 583, 584, 585,
+ 586, 587, 588, 589, 590, 613, 842, 614, 767, 615,
+ 698, 616, 381, 643, 501, 617, 383, 384, 385, 430,
+ 431, 432, 386, 387, 388, 389, 390, 391, 480, 481,
+ 392, 393, 394, 395, 535, 483, 536, 486, 443, 444,
+ 537, 398, 399, 400, 572, 476, 570, 571, 707, 708,
+ 641, 779, 620, 621, 622, 623, 624, 739, 878, 914,
+ 906, 907, 908, 915, 625, 626, 627, 628, 909, 881,
+ 629, 630, 910, 929, 631, 632, 633, 845, 743, 847,
+ 885, 904, 905, 634, 401, 402, 403, 427, 635, 473,
+ 474, 453, 454, 791, 792, 405, 676, 677, 681, 406,
+ 407, 687, 688, 691, 694, 408, 699, 700, 409, 455,
+ 456
};
/* YYTABLE[YYPACT[STATE-NUM]] -- What to do in state STATE-NUM. If
@@ -1685,281 +1692,328 @@ static const yytype_int16 yydefgoto[] =
number is the opposite. If YYTABLE_NINF, syntax error. */
static const yytype_int16 yytable[] =
{
- 394, 430, 401, 637, 768, 646, 445, 444, 588, 393,
- 494, 445, 667, 377, 379, 841, 535, 772, 707, 775,
- 446, 437, 777, 466, 708, 446, 786, 677, 667, 668,
- 421, 475, 687, 719, 720, 730, 417, 407, 655, 661,
- 910, 699, 662, 481, 661, 430, 658, 918, 541, 562,
- 709, 482, 481, 563, 542, 476, 422, 910, 659, 634,
- 787, 437, 669, 670, 671, 672, 633, 635, 418, 721,
- 722, 731, 589, 663, 676, 408, 589, 437, 663, 676,
- 590, 647, 648, 639, 492, 676, 481, 589, 676, 328,
- 329, 330, 565, 493, 773, 778, 495, 676, 715, 496,
- 716, -35, 497, 649, 745, 409, 747, 650, 456, 458,
- 460, 462, 464, 465, 468, 543, 734, 827, 828, 829,
- 830, 544, 843, 753, 754, 755, 756, 757, 758, 759,
- 760, 761, 762, 549, 557, 432, 571, 410, 433, 550,
- 558, 411, 572, 763, 637, 573, 637, 652, 412, 637,
- 665, 574, 782, 653, 664, 780, 851, 415, 790, 707,
- 664, 765, 664, 784, 542, 664, 693, 664, 413, 664,
- 664, 792, 794, 796, 664, 799, 801, 793, 795, 797,
- 414, 800, 802, 803, 806, 809, 565, 811, 565, 804,
- 807, 810, 425, 812, 883, 884, 437, 889, 925, 890,
- 765, 765, 891, 793, 892, 797, 893, 894, 800, 921,
- 804, 426, 807, 812, 856, 765, 858, 419, 857, 642,
- 859, 434, 643, 768, 680, 681, 682, 683, 454, 707,
- 530, 455, 457, 717, 718, 455, 886, 445, 444, 765,
- 459, 817, 766, 455, 818, 845, 852, 461, 853, 847,
- 455, 446, 463, 467, 675, 455, 455, 455, 679, 565,
- 686, 455, 689, 455, 692, 455, 698, 455, 765, 455,
- 817, 846, 576, 872, 849, 850, 420, 862, 677, 816,
- 667, 723, 724, 637, 866, 687, 712, 713, 714, 423,
- 644, 645, 920, 765, 848, 765, 895, 823, 824, 439,
- 825, 826, 831, 832, 447, 449, 469, 768, 474, 479,
- 484, 325, 481, 490, 491, 536, 537, 538, 561, 540,
- 539, 559, 657, 546, 676, 676, 545, 565, 547, 548,
- 551, 676, 676, 552, 553, 554, 555, 676, 576, 676,
- 556, 560, 874, 576, 570, 876, 564, 651, 656, 576,
- 492, 641, 576, 725, 589, 666, 662, 727, 690, 700,
- 703, 576, 726, 637, 728, 729, 711, 732, 737, 741,
- 735, 738, 742, 746, 779, -36, 739, 743, 748, 783,
- 576, 749, 431, -34, 750, 876, 751, -29, 798, 752,
- 438, 393, 805, 791, 808, 813, 814, 565, 394, 393,
- 401, 394, 913, 840, 855, 908, 394, 393, 401, 765,
- 393, 377, 379, 868, 887, 393, 472, 922, 896, 637,
- 448, 888, 898, 899, 879, 897, 431, 486, -569, 909,
- 431, 915, 914, 591, 833, 393, 919, 927, 916, 393,
- 928, 835, 834, 838, 416, 836, 438, 877, 837, 785,
- 815, 710, 873, 880, 917, 393, 923, 881, 924, 769,
- 900, 446, 770, 488, 771, 443, 701, 485, 487, 861,
- 860, 863, 865, 566, 489, 864, 869, 867, 871, 870,
- 0, 0, 393, 0, 616, 0, 0, 877, 0, 0,
- 0, 0, 0, 615, 0, 0, 0, 0, 0, 0,
- 0, 446, 0, 820, 821, 822, 576, 576, 576, 576,
- 576, 576, 576, 576, 576, 576, 576, 576, 576, 576,
- 576, 576, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 660, 0,
+ 397, 433, 404, 448, 640, 591, 771, 382, 448, 396,
+ 649, 380, 497, 533, 680, 670, 447, 710, 538, 690,
+ 449, 844, 440, 671, 469, 449, 711, 733, 702, 420,
+ 775, 670, 778, 664, 418, 780, 665, 712, 789, 658,
+ 424, 664, 661, 722, 723, 433, 478, 457, 565, 410,
+ 458, 495, 566, 484, 662, 579, 672, 673, 674, 675,
+ 496, 421, 440, 734, 650, 651, 425, 666, 636, 638,
+ 479, 679, 790, 647, 648, 666, 679, 411, 440, 724,
+ 725, 484, 679, 484, -35, 679, 652, 667, 637, 913,
+ 653, 485, 568, 667, 679, 667, 921, 781, 667, 426,
+ 667, 592, 667, 667, 592, 660, 913, 667, 642, 748,
+ 592, 750, 593, 737, 544, 460, 498, 776, 458, 499,
+ 545, 579, 500, 546, 846, 552, 579, 560, 574, 547,
+ 412, 553, 579, 561, 575, 579, 459, 461, 463, 465,
+ 467, 468, 471, 576, 579, 640, 655, 640, 783, 577,
+ 640, 668, 656, 793, 768, 795, 797, 785, 710, 545,
+ 799, 796, 798, 579, 787, 435, 800, 696, 436, 854,
+ 756, 757, 758, 759, 760, 761, 762, 763, 764, 765,
+ 859, 802, 429, 804, 860, 806, 568, 803, 568, 805,
+ 766, 807, 809, 812, 814, 886, 887, 440, 810, 813,
+ 815, 768, 768, 892, 928, 893, 894, 895, 896, 796,
+ 897, 800, 803, 807, 810, 924, 815, 428, 861, 437,
+ 462, 768, 862, 458, 645, 771, 413, 646, 710, 414,
+ 464, 415, 788, 458, 448, 683, 684, 685, 686, 830,
+ 831, 832, 833, 466, 416, 470, 458, 447, 458, 889,
+ 848, 449, 678, 682, 850, 458, 458, 689, 692, 568,
+ 458, 458, 417, 695, 865, 680, 458, 701, 720, 721,
+ 458, 869, 690, 422, 768, 852, 853, 769, 718, 820,
+ 719, 819, 821, 670, 640, 423, 823, 824, 825, 579,
+ 579, 579, 579, 579, 579, 579, 579, 579, 579, 579,
+ 579, 579, 579, 579, 579, 923, 442, 768, 820, 771,
+ 849, 875, 328, 329, 330, 726, 727, 715, 716, 717,
+ 450, 679, 679, 855, 477, 856, 487, 568, 679, 679,
+ 768, 851, 768, 898, 679, 452, 679, 826, 827, 472,
+ 828, 829, 482, 834, 835, 325, 484, 877, 493, 494,
+ 879, 539, 540, 543, 728, 541, 542, 548, 562, 549,
+ 550, 551, 554, 644, 640, 564, 555, 891, 556, 557,
+ 558, 579, 579, 559, 563, 654, 659, 573, 579, 579,
+ 567, 592, 495, 665, 579, 434, 579, 693, 703, 731,
+ 879, 738, 729, 441, 396, 730, 732, 568, 735, 740,
+ 669, 397, 396, 404, 397, 706, 911, 916, 382, 397,
+ 396, 404, 380, 396, 714, 741, 451, 742, 396, 475,
+ 640, 744, 925, 745, 746, 749, 751, 752, -36, 434,
+ 489, -34, 753, 434, 754, -29, 755, 782, 396, 794,
+ 786, 816, 396, 801, 880, 808, 811, 817, 843, 441,
+ 858, 768, 871, 882, 890, 899, 900, 901, 396, 902,
+ 912, 449, -572, 918, 917, 594, 836, 919, 922, 838,
+ 930, 931, 837, 839, 841, 491, 569, 840, 490, 713,
+ 492, 419, 876, 883, 880, 396, 920, 619, 818, 927,
+ 926, 488, 884, 446, 772, 903, 618, 773, 704, 774,
+ 866, 449, 864, 863, 874, 870, 868, 873, 867, 872,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 702, 0, 566, 0, 566,
- 0, 0, 0, 0, 393, 0, 393, 0, 393, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 576, 576,
- 0, 0, 0, 0, 0, 576, 576, 0, 0, 0,
- 0, 576, 0, 576, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 616, 0, 0, 0, 0, 0, 0, 0,
- 0, 615, 394, 0, 0, 0, 0, 0, 0, 0,
- 566, 393, 0, 0, 0, 0, 0, 0, 0, 393,
+ 0, 663, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 705, 0,
+ 569, 0, 569, 0, 0, 0, 0, 396, 0, 396,
+ 0, 396, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 619, 0, 0, 0, 0,
+ 0, 0, 0, 0, 618, 397, 0, 0, 0, 0,
+ 0, 0, 0, 569, 396, 0, 0, 0, 0, 0,
+ 0, 0, 396, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 566, 0,
- 0, 0, 0, 0, 0, 0, 0, 393, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 616, 0, 0, 0,
- 616, 0, 0, 0, 0, 615, 0, 0, 0, 615,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 566, 0,
- 0, 0, 0, 0, 0, 0, 0, 393, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 569, 0, 0, 0, 0, 0, 0, 0, 0,
+ 396, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 619,
+ 0, 0, 0, 619, 0, 0, 0, 0, 618, 0,
+ 0, 0, 618, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 569, 0, 0, 0, 0, 0, 0, 0, 0,
+ 396, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 616, 616, 0, 616, 0, 401, 0, 0, 0,
- 615, 615, 0, 615, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 616, 0, 0, 0, 0, 0, 0, 0,
- 0, 615, 0, 0, 0, 0, 0, 0, 616, 0,
- 0, 0, 0, 0, 0, 616, 0, 615, 0, 0,
- 0, 0, 0, 0, 615, 616, 0, 0, 0, 616,
- 0, 0, 0, 0, 615, 616, 0, 0, 615, 0,
- 0, 0, 442, 0, 615, 1, 2, 3, 4, 5,
- 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
- 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
- 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
- 36, 37, 38, 39, 40, 41, 42, 43, 44, 45,
- 46, 47, 48, 49, 50, 51, 52, 53, 54, 55,
- 56, 57, 58, 59, 60, 61, 62, 63, 64, 65,
- 66, 67, 68, 69, 70, 71, 72, 73, 74, 75,
- 76, 77, 78, 79, 80, 81, 82, 83, 84, 85,
- 86, 87, 88, 89, 90, 91, 92, 93, 94, 95,
- 96, 97, 98, 99, 100, 101, 102, 103, 104, 105,
- 106, 107, 108, 109, 110, 111, 112, 113, 114, 115,
- 116, 117, 118, 119, 120, 121, 122, 123, 124, 125,
- 126, 127, 128, 129, 130, 131, 132, 133, 134, 135,
- 136, 137, 138, 139, 140, 141, 142, 143, 144, 145,
- 146, 147, 148, 149, 150, 151, 152, 153, 154, 155,
- 156, 157, 158, 159, 160, 161, 162, 163, 164, 165,
- 166, 167, 168, 169, 170, 171, 172, 173, 174, 175,
- 176, 177, 178, 179, 180, 181, 182, 183, 184, 185,
- 186, 187, 188, 189, 190, 191, 192, 193, 194, 195,
- 196, 197, 198, 199, 200, 201, 202, 203, 204, 205,
- 206, 207, 208, 209, 210, 211, 212, 213, 214, 215,
- 216, 217, 218, 219, 220, 221, 222, 223, 224, 225,
- 226, 227, 228, 229, 230, 231, 232, 233, 234, 235,
- 236, 237, 238, 239, 240, 241, 242, 243, 244, 245,
- 246, 247, 248, 249, 250, 251, 252, 253, 254, 255,
- 256, 257, 258, 259, 260, 261, 262, 263, 264, 265,
- 266, 267, 268, 269, 270, 271, 272, 273, 274, 275,
- 276, 277, 278, 279, 280, 281, 282, 283, 284, 285,
- 286, 287, 288, 289, 290, 291, 292, 293, 294, 295,
- 296, 297, 298, 299, 300, 301, 302, 303, 304, 305,
- 306, 307, 308, 309, 310, 311, 312, 313, 314, 315,
- 316, 317, 318, 319, 320, 321, 322, 323, 324, 0,
+ 0, 0, 0, 0, 619, 619, 0, 619, 0, 404,
+ 0, 0, 0, 618, 618, 0, 618, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 325, 0, 0, 0, 0, 0, 0,
- 0, 326, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 327, 328, 329, 330, 331,
- 0, 0, 0, 0, 0, 0, 0, 0, 332, 333,
- 334, 335, 336, 337, 338, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 619, 0, 0, 0, 0,
+ 0, 0, 0, 0, 618, 0, 0, 0, 0, 0,
+ 0, 619, 0, 0, 0, 0, 0, 0, 619, 0,
+ 618, 0, 0, 0, 0, 0, 0, 618, 619, 0,
+ 0, 0, 619, 0, 0, 0, 0, 618, 619, 0,
+ 0, 618, 0, 0, 0, 445, 0, 618, 1, 2,
+ 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
+ 13, 14, 15, 16, 17, 18, 19, 20, 21, 22,
+ 23, 24, 25, 26, 27, 28, 29, 30, 31, 32,
+ 33, 34, 35, 36, 37, 38, 39, 40, 41, 42,
+ 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
+ 53, 54, 55, 56, 57, 58, 59, 60, 61, 62,
+ 63, 64, 65, 66, 67, 68, 69, 70, 71, 72,
+ 73, 74, 75, 76, 77, 78, 79, 80, 81, 82,
+ 83, 84, 85, 86, 87, 88, 89, 90, 91, 92,
+ 93, 94, 95, 96, 97, 98, 99, 100, 101, 102,
+ 103, 104, 105, 106, 107, 108, 109, 110, 111, 112,
+ 113, 114, 115, 116, 117, 118, 119, 120, 121, 122,
+ 123, 124, 125, 126, 127, 128, 129, 130, 131, 132,
+ 133, 134, 135, 136, 137, 138, 139, 140, 141, 142,
+ 143, 144, 145, 146, 147, 148, 149, 150, 151, 152,
+ 153, 154, 155, 156, 157, 158, 159, 160, 161, 162,
+ 163, 164, 165, 166, 167, 168, 169, 170, 171, 172,
+ 173, 174, 175, 176, 177, 178, 179, 180, 181, 182,
+ 183, 184, 185, 186, 187, 188, 189, 190, 191, 192,
+ 193, 194, 195, 196, 197, 198, 199, 200, 201, 202,
+ 203, 204, 205, 206, 207, 208, 209, 210, 211, 212,
+ 213, 214, 215, 216, 217, 218, 219, 220, 221, 222,
+ 223, 224, 225, 226, 227, 228, 229, 230, 231, 232,
+ 233, 234, 235, 236, 237, 238, 239, 240, 241, 242,
+ 243, 244, 245, 246, 247, 248, 249, 250, 251, 252,
+ 253, 254, 255, 256, 257, 258, 259, 260, 261, 262,
+ 263, 264, 265, 266, 267, 268, 269, 270, 271, 272,
+ 273, 274, 275, 276, 277, 278, 279, 280, 281, 282,
+ 283, 284, 285, 286, 287, 288, 289, 290, 291, 292,
+ 293, 294, 295, 296, 297, 298, 299, 300, 301, 302,
+ 303, 304, 305, 306, 307, 308, 309, 310, 311, 312,
+ 313, 314, 315, 316, 317, 318, 319, 320, 321, 322,
+ 323, 324, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 339, 340, 341, 342, 343, 344, 0, 0, 0, 0,
- 0, 0, 0, 0, 345, 0, 346, 347, 348, 349,
- 350, 351, 352, 353, 354, 355, 356, 357, 358, 359,
- 360, 361, 362, 363, 364, 365, 366, 367, 368, 369,
- 370, 371, 372, 373, 374, 375, 376, 1, 2, 3,
- 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
- 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
- 24, 25, 26, 27, 28, 29, 30, 31, 32, 33,
- 34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
- 44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
- 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
- 64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
- 74, 75, 76, 77, 78, 79, 80, 81, 82, 83,
- 84, 85, 86, 87, 88, 89, 90, 91, 92, 93,
- 94, 95, 96, 97, 98, 99, 100, 101, 102, 103,
- 104, 105, 106, 107, 108, 109, 110, 111, 112, 113,
- 114, 115, 116, 117, 118, 119, 120, 121, 122, 123,
- 124, 125, 126, 127, 128, 129, 130, 131, 132, 133,
- 134, 135, 136, 137, 138, 139, 140, 141, 142, 143,
- 144, 145, 146, 147, 148, 149, 150, 151, 152, 153,
- 154, 155, 156, 157, 158, 159, 160, 161, 162, 163,
- 164, 165, 166, 167, 168, 169, 170, 171, 172, 173,
- 174, 175, 176, 177, 178, 179, 180, 181, 182, 183,
- 184, 185, 186, 187, 188, 189, 190, 191, 192, 193,
- 194, 195, 196, 197, 198, 199, 200, 201, 202, 203,
- 204, 205, 206, 207, 208, 209, 210, 211, 212, 213,
- 214, 215, 216, 217, 218, 219, 220, 221, 222, 223,
- 224, 225, 226, 227, 228, 229, 230, 231, 232, 233,
- 234, 235, 236, 237, 238, 239, 240, 241, 242, 243,
- 244, 245, 246, 247, 248, 249, 250, 251, 252, 253,
- 254, 255, 256, 257, 258, 259, 260, 261, 262, 263,
- 264, 265, 266, 267, 268, 269, 270, 271, 272, 273,
- 274, 275, 276, 277, 278, 279, 280, 281, 282, 283,
- 284, 285, 286, 287, 288, 289, 290, 291, 292, 293,
- 294, 295, 296, 297, 298, 299, 300, 301, 302, 303,
- 304, 305, 306, 307, 308, 309, 310, 311, 312, 313,
- 314, 315, 316, 317, 318, 319, 320, 321, 322, 323,
- 324, 0, 0, 499, 500, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 325, 0, 0, 0,
+ 0, 0, 0, 0, 326, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 327, 328,
+ 329, 330, 331, 0, 0, 0, 0, 0, 0, 0,
+ 0, 332, 333, 334, 335, 336, 337, 338, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 501, 502, 0, 325, 0, 591, 592, 0,
- 0, 0, 0, 593, 503, 504, 505, 506, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 327, 328, 329,
- 330, 331, 0, 0, 0, 507, 508, 509, 510, 511,
- 332, 333, 334, 335, 336, 337, 338, 594, 595, 596,
- 597, 0, 598, 599, 600, 601, 602, 603, 604, 605,
- 606, 607, 339, 340, 341, 342, 343, 344, 512, 513,
- 514, 515, 516, 517, 518, 519, 345, 608, 346, 347,
- 348, 349, 350, 351, 352, 353, 354, 355, 356, 357,
- 358, 359, 360, 361, 362, 363, 364, 365, 366, 367,
- 368, 369, 370, 371, 372, 373, 374, 375, 376, 1,
- 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
- 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
- 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
- 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
- 62, 63, 64, 65, 66, 67, 68, 69, 70, 71,
- 72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
- 82, 83, 84, 85, 86, 87, 88, 89, 90, 91,
- 92, 93, 94, 95, 96, 97, 98, 99, 100, 101,
- 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
- 112, 113, 114, 115, 116, 117, 118, 119, 120, 121,
- 122, 123, 124, 125, 126, 127, 128, 129, 130, 131,
- 132, 133, 134, 135, 136, 137, 138, 139, 140, 141,
- 142, 143, 144, 145, 146, 147, 148, 149, 150, 151,
- 152, 153, 154, 155, 156, 157, 158, 159, 160, 161,
- 162, 163, 164, 165, 166, 167, 168, 169, 170, 171,
- 172, 173, 174, 175, 176, 177, 178, 179, 180, 181,
- 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
- 192, 193, 194, 195, 196, 197, 198, 199, 200, 201,
- 202, 203, 204, 205, 206, 207, 208, 209, 210, 211,
- 212, 213, 214, 215, 216, 217, 218, 219, 220, 221,
- 222, 223, 224, 225, 226, 227, 228, 229, 230, 231,
- 232, 233, 234, 235, 236, 237, 238, 239, 240, 241,
- 242, 243, 244, 245, 246, 247, 248, 249, 250, 251,
- 252, 253, 254, 255, 256, 257, 258, 259, 260, 261,
- 262, 263, 264, 265, 266, 267, 268, 269, 270, 271,
- 272, 273, 274, 275, 276, 277, 278, 279, 280, 281,
- 282, 283, 284, 285, 286, 287, 288, 289, 290, 291,
- 292, 293, 294, 295, 296, 297, 298, 299, 300, 301,
- 302, 303, 304, 305, 306, 307, 308, 309, 310, 311,
- 312, 313, 314, 315, 316, 317, 318, 319, 320, 321,
- 322, 323, 324, 0, 0, 499, 500, 0, 0, 0,
+ 0, 0, 0, 339, 340, 341, 342, 343, 344, 0,
+ 0, 0, 0, 0, 0, 0, 0, 345, 0, 346,
+ 347, 348, 349, 350, 351, 352, 353, 354, 355, 356,
+ 357, 358, 359, 360, 361, 362, 363, 364, 365, 366,
+ 367, 368, 369, 370, 371, 372, 373, 374, 375, 376,
+ 377, 378, 379, 1, 2, 3, 4, 5, 6, 7,
+ 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
+ 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
+ 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
+ 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+ 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+ 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+ 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
+ 78, 79, 80, 81, 82, 83, 84, 85, 86, 87,
+ 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
+ 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
+ 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
+ 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
+ 128, 129, 130, 131, 132, 133, 134, 135, 136, 137,
+ 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
+ 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
+ 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
+ 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
+ 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
+ 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
+ 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
+ 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
+ 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
+ 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
+ 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
+ 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
+ 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
+ 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
+ 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
+ 288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
+ 298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
+ 308, 309, 310, 311, 312, 313, 314, 315, 316, 317,
+ 318, 319, 320, 321, 322, 323, 324, 0, 0, 502,
+ 503, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 504, 505,
+ 0, 325, 0, 594, 595, 0, 0, 0, 0, 596,
+ 506, 507, 508, 509, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 327, 328, 329, 330, 331, 0, 0,
+ 0, 510, 511, 512, 513, 514, 332, 333, 334, 335,
+ 336, 337, 338, 597, 598, 599, 600, 0, 601, 602,
+ 603, 604, 605, 606, 607, 608, 609, 610, 339, 340,
+ 341, 342, 343, 344, 515, 516, 517, 518, 519, 520,
+ 521, 522, 345, 611, 346, 347, 348, 349, 350, 351,
+ 352, 353, 354, 355, 356, 357, 358, 359, 360, 361,
+ 362, 363, 364, 365, 366, 367, 368, 369, 370, 371,
+ 372, 373, 374, 375, 376, 377, 378, 379, 1, 2,
+ 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
+ 13, 14, 15, 16, 17, 18, 19, 20, 21, 22,
+ 23, 24, 25, 26, 27, 28, 29, 30, 31, 32,
+ 33, 34, 35, 36, 37, 38, 39, 40, 41, 42,
+ 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
+ 53, 54, 55, 56, 57, 58, 59, 60, 61, 62,
+ 63, 64, 65, 66, 67, 68, 69, 70, 71, 72,
+ 73, 74, 75, 76, 77, 78, 79, 80, 81, 82,
+ 83, 84, 85, 86, 87, 88, 89, 90, 91, 92,
+ 93, 94, 95, 96, 97, 98, 99, 100, 101, 102,
+ 103, 104, 105, 106, 107, 108, 109, 110, 111, 112,
+ 113, 114, 115, 116, 117, 118, 119, 120, 121, 122,
+ 123, 124, 125, 126, 127, 128, 129, 130, 131, 132,
+ 133, 134, 135, 136, 137, 138, 139, 140, 141, 142,
+ 143, 144, 145, 146, 147, 148, 149, 150, 151, 152,
+ 153, 154, 155, 156, 157, 158, 159, 160, 161, 162,
+ 163, 164, 165, 166, 167, 168, 169, 170, 171, 172,
+ 173, 174, 175, 176, 177, 178, 179, 180, 181, 182,
+ 183, 184, 185, 186, 187, 188, 189, 190, 191, 192,
+ 193, 194, 195, 196, 197, 198, 199, 200, 201, 202,
+ 203, 204, 205, 206, 207, 208, 209, 210, 211, 212,
+ 213, 214, 215, 216, 217, 218, 219, 220, 221, 222,
+ 223, 224, 225, 226, 227, 228, 229, 230, 231, 232,
+ 233, 234, 235, 236, 237, 238, 239, 240, 241, 242,
+ 243, 244, 245, 246, 247, 248, 249, 250, 251, 252,
+ 253, 254, 255, 256, 257, 258, 259, 260, 261, 262,
+ 263, 264, 265, 266, 267, 268, 269, 270, 271, 272,
+ 273, 274, 275, 276, 277, 278, 279, 280, 281, 282,
+ 283, 284, 285, 286, 287, 288, 289, 290, 291, 292,
+ 293, 294, 295, 296, 297, 298, 299, 300, 301, 302,
+ 303, 304, 305, 306, 307, 308, 309, 310, 311, 312,
+ 313, 314, 315, 316, 317, 318, 319, 320, 321, 322,
+ 323, 324, 0, 0, 502, 503, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 501, 502, 0, 325, 0, 591,
- 767, 0, 0, 0, 0, 593, 503, 504, 505, 506,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 327,
- 328, 329, 330, 331, 0, 0, 0, 507, 508, 509,
- 510, 511, 332, 333, 334, 335, 336, 337, 338, 594,
- 595, 596, 597, 0, 598, 599, 600, 601, 602, 603,
- 604, 605, 606, 607, 339, 340, 341, 342, 343, 344,
- 512, 513, 514, 515, 516, 517, 518, 519, 345, 608,
- 346, 347, 348, 349, 350, 351, 352, 353, 354, 355,
- 356, 357, 358, 359, 360, 361, 362, 363, 364, 365,
- 366, 367, 368, 369, 370, 371, 372, 373, 374, 375,
- 376, 1, 2, 3, 4, 5, 6, 7, 8, 9,
- 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
- 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
- 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
- 40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
- 50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
- 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
- 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
- 80, 81, 82, 83, 84, 85, 86, 87, 88, 89,
- 90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
- 100, 101, 102, 103, 104, 105, 106, 107, 108, 109,
- 110, 111, 112, 113, 114, 115, 116, 117, 118, 119,
- 120, 121, 122, 123, 124, 125, 126, 127, 128, 129,
- 130, 131, 132, 133, 134, 135, 136, 137, 138, 139,
- 140, 141, 142, 143, 144, 145, 146, 147, 148, 149,
- 150, 151, 152, 153, 154, 155, 156, 157, 158, 159,
- 160, 161, 162, 163, 164, 165, 166, 167, 168, 169,
- 170, 171, 172, 173, 174, 175, 176, 177, 178, 179,
- 180, 181, 182, 183, 184, 185, 186, 187, 188, 189,
- 190, 191, 192, 193, 194, 195, 196, 197, 198, 199,
- 200, 201, 202, 203, 204, 205, 206, 207, 208, 209,
- 210, 211, 212, 213, 214, 215, 216, 217, 218, 219,
- 220, 221, 222, 223, 224, 225, 226, 227, 228, 229,
- 230, 231, 232, 233, 234, 235, 236, 237, 238, 239,
- 240, 241, 242, 243, 244, 245, 246, 247, 248, 249,
- 250, 251, 252, 253, 254, 255, 256, 257, 258, 259,
- 260, 261, 262, 263, 264, 265, 266, 267, 268, 269,
- 270, 271, 272, 273, 274, 275, 276, 277, 278, 279,
- 280, 281, 282, 283, 284, 285, 286, 287, 288, 289,
- 290, 291, 292, 293, 294, 295, 296, 297, 298, 299,
- 300, 301, 302, 303, 304, 305, 306, 307, 308, 309,
- 310, 311, 312, 313, 314, 315, 316, 317, 318, 319,
- 320, 321, 322, 323, 324, 0, 0, 499, 500, 0,
+ 0, 0, 0, 504, 505, 0, 325, 0, 594, 770,
+ 0, 0, 0, 0, 596, 506, 507, 508, 509, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 327, 328,
+ 329, 330, 331, 0, 0, 0, 510, 511, 512, 513,
+ 514, 332, 333, 334, 335, 336, 337, 338, 597, 598,
+ 599, 600, 0, 601, 602, 603, 604, 605, 606, 607,
+ 608, 609, 610, 339, 340, 341, 342, 343, 344, 515,
+ 516, 517, 518, 519, 520, 521, 522, 345, 611, 346,
+ 347, 348, 349, 350, 351, 352, 353, 354, 355, 356,
+ 357, 358, 359, 360, 361, 362, 363, 364, 365, 366,
+ 367, 368, 369, 370, 371, 372, 373, 374, 375, 376,
+ 377, 378, 379, 1, 2, 3, 4, 5, 6, 7,
+ 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
+ 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
+ 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
+ 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+ 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+ 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+ 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
+ 78, 79, 80, 81, 82, 83, 84, 85, 86, 87,
+ 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
+ 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
+ 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
+ 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
+ 128, 129, 130, 131, 132, 133, 134, 135, 136, 137,
+ 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
+ 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
+ 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
+ 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
+ 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
+ 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
+ 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
+ 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
+ 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
+ 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
+ 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
+ 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
+ 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
+ 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
+ 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
+ 288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
+ 298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
+ 308, 309, 310, 311, 312, 313, 314, 315, 316, 317,
+ 318, 319, 320, 321, 322, 323, 324, 0, 0, 502,
+ 503, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 504, 505,
+ 0, 325, 0, 594, 0, 0, 0, 0, 0, 596,
+ 506, 507, 508, 509, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 327, 328, 329, 330, 331, 0, 0,
+ 0, 510, 511, 512, 513, 514, 332, 333, 334, 335,
+ 336, 337, 338, 597, 598, 599, 600, 0, 601, 602,
+ 603, 604, 605, 606, 607, 608, 609, 610, 339, 340,
+ 341, 342, 343, 344, 515, 516, 517, 518, 519, 520,
+ 521, 522, 345, 611, 346, 347, 348, 349, 350, 351,
+ 352, 353, 354, 355, 356, 357, 358, 359, 360, 361,
+ 362, 363, 364, 365, 366, 367, 368, 369, 370, 371,
+ 372, 373, 374, 375, 376, 377, 378, 379, 1, 2,
+ 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
+ 13, 14, 15, 16, 17, 18, 19, 20, 21, 22,
+ 23, 24, 25, 26, 27, 28, 29, 30, 31, 32,
+ 33, 34, 35, 36, 37, 38, 39, 40, 41, 42,
+ 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
+ 53, 54, 55, 56, 57, 58, 59, 60, 61, 62,
+ 63, 64, 65, 66, 67, 68, 69, 70, 71, 72,
+ 73, 74, 75, 76, 77, 78, 79, 80, 81, 82,
+ 83, 84, 85, 86, 87, 88, 89, 90, 91, 92,
+ 93, 94, 95, 96, 97, 98, 99, 100, 101, 102,
+ 103, 104, 105, 106, 107, 108, 109, 110, 111, 112,
+ 113, 114, 115, 116, 117, 118, 119, 120, 121, 122,
+ 123, 124, 125, 126, 127, 128, 129, 130, 131, 132,
+ 133, 134, 135, 136, 137, 138, 139, 140, 141, 142,
+ 143, 144, 145, 146, 147, 148, 149, 150, 151, 152,
+ 153, 154, 155, 156, 157, 158, 159, 160, 161, 162,
+ 163, 164, 165, 166, 167, 168, 169, 170, 171, 172,
+ 173, 174, 175, 176, 177, 178, 179, 180, 181, 182,
+ 183, 184, 185, 186, 187, 188, 189, 190, 191, 192,
+ 193, 194, 195, 196, 197, 198, 199, 200, 201, 202,
+ 203, 204, 205, 206, 207, 208, 209, 210, 211, 212,
+ 213, 214, 215, 216, 217, 218, 219, 220, 221, 222,
+ 223, 224, 225, 226, 227, 228, 229, 230, 231, 232,
+ 233, 234, 235, 236, 237, 238, 239, 240, 241, 242,
+ 243, 244, 245, 246, 247, 248, 249, 250, 251, 252,
+ 253, 254, 255, 256, 257, 258, 259, 260, 261, 262,
+ 263, 264, 265, 266, 267, 268, 269, 270, 271, 272,
+ 273, 274, 275, 276, 277, 278, 279, 280, 281, 282,
+ 283, 284, 285, 286, 287, 288, 289, 290, 291, 292,
+ 293, 294, 295, 296, 297, 298, 299, 300, 301, 302,
+ 303, 304, 305, 306, 307, 308, 309, 310, 311, 312,
+ 313, 314, 315, 316, 317, 318, 319, 320, 321, 322,
+ 323, 324, 0, 0, 502, 503, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 501, 502, 0, 325,
- 0, 591, 0, 0, 0, 0, 0, 593, 503, 504,
- 505, 506, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 327, 328, 329, 330, 331, 0, 0, 0, 507,
- 508, 509, 510, 511, 332, 333, 334, 335, 336, 337,
- 338, 594, 595, 596, 597, 0, 598, 599, 600, 601,
- 602, 603, 604, 605, 606, 607, 339, 340, 341, 342,
- 343, 344, 512, 513, 514, 515, 516, 517, 518, 519,
- 345, 608, 346, 347, 348, 349, 350, 351, 352, 353,
- 354, 355, 356, 357, 358, 359, 360, 361, 362, 363,
- 364, 365, 366, 367, 368, 369, 370, 371, 372, 373,
- 374, 375, 376, 1, 2, 3, 4, 5, 6, 7,
+ 0, 0, 0, 504, 505, 0, 325, 0, 487, 0,
+ 0, 0, 0, 0, 596, 506, 507, 508, 509, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 327, 328,
+ 329, 330, 331, 0, 0, 0, 510, 511, 512, 513,
+ 514, 332, 333, 334, 335, 336, 337, 338, 597, 598,
+ 599, 600, 0, 601, 602, 603, 604, 605, 606, 607,
+ 608, 609, 610, 339, 340, 341, 342, 343, 344, 515,
+ 516, 517, 518, 519, 520, 521, 522, 345, 611, 346,
+ 347, 348, 349, 350, 351, 352, 353, 354, 355, 356,
+ 357, 358, 359, 360, 361, 362, 363, 364, 365, 366,
+ 367, 368, 369, 370, 371, 372, 373, 374, 375, 376,
+ 377, 378, 379, 1, 2, 3, 4, 5, 6, 7,
8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
@@ -1991,201 +2045,248 @@ static const yytype_int16 yytable[] =
288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
308, 309, 310, 311, 312, 313, 314, 315, 316, 317,
- 318, 319, 320, 321, 322, 323, 324, 0, 0, 499,
- 500, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 501, 502,
- 0, 325, 0, 484, 0, 0, 0, 0, 0, 593,
- 503, 504, 505, 506, 0, 0, 0, 0, 0, 0,
+ 318, 319, 320, 321, 322, 323, 324, 0, 0, 502,
+ 503, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 504, 505,
+ 0, 325, 0, 0, 0, 0, 0, 0, 0, 596,
+ 506, 507, 508, 509, 0, 0, 0, 0, 0, 0,
0, 0, 0, 327, 328, 329, 330, 331, 0, 0,
- 0, 507, 508, 509, 510, 511, 332, 333, 334, 335,
- 336, 337, 338, 594, 595, 596, 597, 0, 598, 599,
- 600, 601, 602, 603, 604, 605, 606, 607, 339, 340,
- 341, 342, 343, 344, 512, 513, 514, 515, 516, 517,
- 518, 519, 345, 608, 346, 347, 348, 349, 350, 351,
+ 0, 510, 511, 512, 513, 514, 332, 333, 334, 335,
+ 336, 337, 338, 597, 598, 599, 600, 0, 601, 602,
+ 603, 604, 605, 606, 607, 608, 609, 610, 339, 340,
+ 341, 342, 343, 344, 515, 516, 517, 518, 519, 520,
+ 521, 522, 345, 611, 346, 347, 348, 349, 350, 351,
352, 353, 354, 355, 356, 357, 358, 359, 360, 361,
362, 363, 364, 365, 366, 367, 368, 369, 370, 371,
- 372, 373, 374, 375, 376, 1, 2, 3, 4, 5,
- 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
- 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
- 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
- 36, 37, 38, 39, 40, 41, 42, 43, 44, 45,
- 46, 47, 48, 49, 50, 51, 52, 53, 54, 55,
- 56, 57, 58, 59, 60, 61, 62, 63, 64, 65,
- 66, 67, 68, 69, 70, 71, 72, 73, 74, 75,
- 76, 77, 78, 79, 80, 81, 82, 83, 84, 85,
- 86, 87, 88, 89, 90, 91, 92, 93, 94, 95,
- 96, 97, 98, 99, 100, 101, 102, 103, 104, 105,
- 106, 107, 108, 109, 110, 111, 112, 113, 114, 115,
- 116, 117, 118, 119, 120, 121, 122, 123, 124, 125,
- 126, 127, 128, 129, 130, 131, 132, 133, 134, 135,
- 136, 137, 138, 139, 140, 141, 142, 143, 144, 145,
- 146, 147, 148, 149, 150, 151, 152, 153, 154, 155,
- 156, 157, 158, 159, 160, 161, 162, 163, 164, 165,
- 166, 167, 168, 169, 170, 171, 172, 173, 174, 175,
- 176, 177, 178, 179, 180, 181, 182, 183, 184, 185,
- 186, 187, 188, 189, 190, 191, 192, 193, 194, 195,
- 196, 197, 198, 199, 200, 201, 202, 203, 204, 205,
- 206, 207, 208, 209, 210, 211, 212, 213, 214, 215,
- 216, 217, 218, 219, 220, 221, 222, 223, 224, 225,
- 226, 227, 228, 229, 230, 231, 232, 233, 234, 235,
- 236, 237, 238, 239, 240, 241, 242, 243, 244, 245,
- 246, 247, 248, 249, 250, 251, 252, 253, 254, 255,
- 256, 257, 258, 259, 260, 261, 262, 263, 264, 265,
- 266, 267, 268, 269, 270, 271, 272, 273, 274, 275,
- 276, 277, 278, 279, 280, 281, 282, 283, 284, 285,
- 286, 287, 288, 289, 290, 291, 292, 293, 294, 295,
- 296, 297, 298, 299, 300, 301, 302, 303, 304, 305,
- 306, 307, 308, 309, 310, 311, 312, 313, 314, 315,
- 316, 317, 318, 319, 320, 321, 322, 323, 324, 0,
- 0, 499, 500, 0, 0, 0, 0, 0, 0, 0,
+ 372, 373, 374, 375, 376, 377, 378, 379, 1, 2,
+ 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
+ 13, 14, 15, 16, 17, 18, 19, 20, 21, 22,
+ 23, 24, 25, 26, 27, 28, 29, 30, 31, 32,
+ 33, 34, 35, 36, 37, 38, 39, 40, 41, 42,
+ 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
+ 53, 54, 55, 56, 57, 58, 59, 60, 61, 62,
+ 63, 64, 65, 66, 67, 68, 69, 70, 71, 72,
+ 73, 74, 75, 76, 77, 78, 79, 80, 81, 82,
+ 83, 84, 85, 86, 87, 88, 89, 90, 91, 92,
+ 93, 94, 95, 96, 97, 98, 99, 100, 101, 102,
+ 103, 104, 105, 106, 107, 108, 109, 110, 111, 112,
+ 113, 114, 115, 116, 117, 118, 119, 120, 121, 122,
+ 123, 124, 125, 126, 127, 128, 129, 130, 131, 132,
+ 133, 134, 135, 136, 137, 138, 139, 140, 141, 142,
+ 143, 144, 145, 146, 147, 148, 149, 150, 151, 152,
+ 153, 154, 155, 156, 157, 158, 159, 160, 161, 162,
+ 163, 164, 165, 166, 167, 168, 169, 170, 171, 172,
+ 173, 174, 175, 176, 177, 178, 179, 180, 181, 182,
+ 183, 184, 185, 186, 187, 188, 189, 190, 191, 192,
+ 193, 194, 195, 196, 197, 198, 199, 200, 201, 202,
+ 203, 204, 205, 206, 207, 208, 209, 210, 211, 212,
+ 213, 214, 215, 216, 217, 218, 219, 220, 221, 222,
+ 223, 224, 225, 226, 227, 228, 229, 230, 231, 232,
+ 233, 234, 235, 236, 237, 238, 239, 240, 241, 242,
+ 243, 244, 245, 246, 247, 248, 249, 250, 251, 252,
+ 253, 254, 255, 256, 257, 258, 259, 260, 261, 262,
+ 263, 264, 265, 266, 267, 268, 269, 270, 271, 272,
+ 273, 274, 275, 276, 277, 278, 279, 280, 281, 282,
+ 283, 284, 285, 286, 287, 288, 289, 290, 291, 292,
+ 293, 294, 295, 296, 297, 298, 299, 300, 301, 302,
+ 303, 304, 305, 306, 307, 308, 309, 310, 311, 312,
+ 313, 314, 315, 316, 317, 318, 319, 320, 321, 322,
+ 323, 324, 0, 0, 502, 503, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 501, 502, 0, 325, 0, 0, 0, 0, 0, 0,
- 0, 593, 503, 504, 505, 506, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 327, 328, 329, 330, 331,
- 0, 0, 0, 507, 508, 509, 510, 511, 332, 333,
- 334, 335, 336, 337, 338, 594, 595, 596, 597, 0,
- 598, 599, 600, 601, 602, 603, 604, 605, 606, 607,
- 339, 340, 341, 342, 343, 344, 512, 513, 514, 515,
- 516, 517, 518, 519, 345, 608, 346, 347, 348, 349,
- 350, 351, 352, 353, 354, 355, 356, 357, 358, 359,
- 360, 361, 362, 363, 364, 365, 366, 367, 368, 369,
- 370, 371, 372, 373, 374, 375, 376, 1, 2, 3,
- 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
- 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
- 24, 25, 26, 27, 28, 29, 30, 31, 32, 33,
- 34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
- 44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
- 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
- 64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
- 74, 75, 76, 77, 78, 79, 80, 81, 82, 83,
- 84, 85, 86, 87, 88, 89, 90, 91, 92, 93,
- 94, 95, 96, 97, 98, 99, 100, 101, 102, 103,
- 104, 105, 106, 107, 108, 109, 110, 111, 112, 113,
- 114, 115, 116, 117, 118, 119, 120, 121, 122, 123,
- 124, 125, 126, 127, 128, 129, 130, 131, 132, 133,
- 134, 135, 136, 137, 138, 139, 140, 141, 142, 143,
- 144, 145, 146, 147, 148, 149, 150, 151, 152, 153,
- 154, 155, 156, 157, 158, 159, 160, 161, 162, 163,
- 164, 165, 166, 167, 168, 169, 170, 171, 172, 173,
- 174, 175, 176, 177, 178, 179, 180, 181, 182, 183,
- 184, 185, 186, 187, 188, 189, 190, 191, 192, 193,
- 194, 195, 196, 197, 198, 199, 200, 201, 202, 203,
- 204, 205, 206, 207, 208, 209, 210, 211, 212, 213,
- 214, 215, 216, 217, 218, 219, 220, 221, 222, 223,
- 224, 225, 226, 227, 228, 229, 230, 231, 232, 233,
- 234, 235, 236, 237, 238, 239, 240, 241, 242, 243,
- 244, 245, 246, 247, 248, 249, 250, 251, 252, 253,
- 254, 255, 256, 257, 258, 259, 260, 261, 262, 263,
- 264, 265, 266, 267, 268, 269, 270, 271, 272, 273,
- 274, 275, 276, 277, 278, 279, 280, 281, 282, 283,
- 284, 285, 286, 287, 288, 289, 290, 291, 292, 293,
- 294, 295, 296, 297, 298, 299, 300, 301, 302, 303,
- 304, 305, 306, 307, 308, 309, 310, 311, 312, 313,
- 314, 315, 316, 317, 318, 319, 320, 321, 322, 323,
- 324, 0, 0, 499, 500, 0, 0, 0, 0, 0,
+ 0, 0, 0, 504, 505, 0, 325, 0, 0, 0,
+ 0, 0, 0, 0, 596, 506, 507, 508, 509, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 327, 328,
+ 329, 330, 331, 0, 0, 0, 510, 511, 512, 513,
+ 514, 332, 333, 334, 335, 336, 337, 338, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 501, 502, 0, 325, 0, 0, 0, 0,
- 0, 0, 0, 593, 503, 504, 505, 506, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 327, 328, 329,
- 330, 331, 0, 0, 0, 507, 508, 509, 510, 511,
- 332, 333, 334, 335, 336, 337, 338, 0, 0, 0,
+ 0, 0, 0, 339, 340, 341, 342, 343, 344, 515,
+ 516, 517, 518, 519, 520, 521, 522, 345, 0, 346,
+ 347, 348, 349, 350, 351, 352, 353, 354, 355, 356,
+ 357, 358, 359, 360, 361, 362, 363, 364, 365, 366,
+ 367, 368, 369, 370, 371, 372, 373, 374, 375, 376,
+ 377, 378, 379, 1, 2, 3, 4, 5, 6, 7,
+ 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
+ 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
+ 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
+ 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+ 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+ 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+ 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
+ 78, 79, 80, 81, 82, 83, 84, 85, 86, 87,
+ 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
+ 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
+ 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
+ 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
+ 128, 129, 130, 131, 132, 133, 134, 135, 136, 137,
+ 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
+ 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
+ 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
+ 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
+ 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
+ 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
+ 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
+ 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
+ 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
+ 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
+ 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
+ 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
+ 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
+ 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
+ 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
+ 288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
+ 298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
+ 308, 309, 310, 311, 312, 313, 314, 0, 0, 0,
+ 318, 319, 320, 321, 322, 323, 324, 0, 0, 502,
+ 503, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 504, 505,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 339, 340, 341, 342, 343, 344, 512, 513,
- 514, 515, 516, 517, 518, 519, 345, 0, 346, 347,
- 348, 349, 350, 351, 352, 353, 354, 355, 356, 357,
- 358, 359, 360, 361, 362, 363, 364, 365, 366, 367,
- 368, 369, 370, 371, 372, 373, 374, 375, 376, 1,
- 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
- 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
- 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
- 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
- 62, 63, 64, 65, 66, 67, 68, 69, 70, 71,
- 72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
- 82, 83, 84, 85, 86, 87, 88, 89, 90, 91,
- 92, 93, 94, 95, 96, 97, 98, 99, 100, 101,
- 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
- 112, 113, 114, 115, 116, 117, 118, 119, 120, 121,
- 122, 123, 124, 125, 126, 127, 128, 129, 130, 131,
- 132, 133, 134, 135, 136, 137, 138, 139, 140, 141,
- 142, 143, 144, 145, 146, 147, 148, 149, 150, 151,
- 152, 153, 154, 155, 156, 157, 158, 159, 160, 161,
- 162, 163, 164, 165, 166, 167, 168, 169, 170, 171,
- 172, 173, 174, 175, 176, 177, 178, 179, 180, 181,
- 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
- 192, 193, 194, 195, 196, 197, 198, 199, 200, 201,
- 202, 203, 204, 205, 206, 207, 208, 209, 210, 211,
- 212, 213, 214, 215, 216, 217, 218, 219, 220, 221,
- 222, 223, 224, 225, 226, 227, 228, 229, 230, 231,
- 232, 233, 234, 235, 236, 237, 238, 239, 240, 241,
- 242, 243, 244, 245, 246, 247, 248, 249, 250, 251,
- 252, 253, 254, 255, 256, 257, 258, 259, 260, 261,
- 262, 263, 264, 265, 266, 267, 268, 269, 270, 271,
- 272, 273, 274, 275, 276, 277, 278, 279, 280, 281,
- 282, 283, 284, 285, 286, 287, 288, 289, 290, 291,
- 292, 293, 294, 295, 296, 297, 298, 299, 300, 301,
- 302, 303, 304, 305, 306, 307, 308, 309, 310, 311,
- 312, 313, 314, 0, 0, 0, 318, 319, 320, 321,
- 322, 323, 324, 0, 0, 499, 500, 0, 0, 0,
+ 506, 507, 508, 509, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 327, 328, 329, 330, 0, 0, 0,
+ 0, 510, 511, 512, 513, 514, 332, 333, 334, 335,
+ 336, 337, 338, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 339, 340,
+ 341, 342, 343, 344, 515, 516, 517, 518, 519, 520,
+ 521, 522, 345, 0, 346, 347, 348, 349, 350, 351,
+ 352, 353, 354, 355, 356, 357, 358, 359, 360, 361,
+ 362, 363, 364, 365, 366, 367, 368, 369, 370, 371,
+ 372, 373, 374, 375, 376, 377, 378, 379, 1, 2,
+ 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
+ 13, 14, 15, 16, 17, 18, 19, 20, 21, 22,
+ 23, 24, 25, 26, 27, 28, 29, 30, 31, 32,
+ 33, 34, 35, 36, 37, 38, 39, 40, 41, 42,
+ 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
+ 53, 54, 55, 56, 57, 58, 59, 60, 61, 62,
+ 63, 64, 65, 66, 67, 68, 69, 70, 71, 72,
+ 73, 74, 75, 76, 77, 78, 79, 80, 81, 82,
+ 83, 84, 85, 86, 87, 88, 89, 90, 91, 92,
+ 93, 94, 95, 96, 97, 98, 99, 100, 101, 102,
+ 103, 104, 105, 106, 107, 108, 109, 110, 111, 112,
+ 113, 114, 115, 116, 117, 118, 119, 120, 121, 122,
+ 123, 124, 125, 126, 127, 128, 129, 130, 131, 132,
+ 133, 134, 135, 136, 137, 138, 139, 140, 141, 142,
+ 143, 144, 145, 146, 147, 148, 149, 150, 151, 152,
+ 153, 154, 155, 156, 157, 158, 159, 160, 161, 162,
+ 163, 164, 165, 166, 167, 168, 169, 170, 171, 172,
+ 173, 174, 175, 176, 177, 178, 179, 180, 181, 182,
+ 183, 184, 185, 186, 187, 188, 189, 190, 191, 192,
+ 193, 194, 195, 196, 197, 198, 199, 200, 201, 202,
+ 203, 204, 205, 206, 207, 208, 209, 210, 211, 212,
+ 213, 214, 215, 216, 217, 218, 219, 220, 221, 222,
+ 223, 224, 225, 226, 227, 228, 229, 230, 231, 232,
+ 233, 234, 235, 236, 237, 238, 239, 240, 241, 242,
+ 243, 244, 245, 246, 247, 248, 249, 250, 251, 252,
+ 253, 254, 255, 256, 257, 258, 259, 260, 261, 262,
+ 263, 264, 265, 266, 267, 268, 269, 270, 271, 272,
+ 273, 274, 275, 276, 277, 278, 279, 280, 281, 282,
+ 283, 284, 285, 286, 287, 288, 289, 290, 291, 292,
+ 293, 294, 295, 296, 297, 298, 299, 300, 301, 302,
+ 303, 304, 305, 306, 307, 308, 309, 310, 311, 312,
+ 313, 314, 315, 316, 317, 318, 319, 320, 321, 322,
+ 323, 324, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 501, 502, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 503, 504, 505, 506,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 327,
- 328, 329, 330, 0, 0, 0, 0, 507, 508, 509,
- 510, 511, 332, 333, 334, 335, 336, 337, 338, 0,
+ 0, 0, 0, 0, 0, 0, 325, 0, 0, 0,
+ 0, 0, 0, 0, 326, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 327, 328,
+ 329, 330, 331, 0, 0, 0, 0, 0, 0, 0,
+ 0, 332, 333, 334, 335, 336, 337, 338, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 339, 340, 341, 342, 343, 344,
- 512, 513, 514, 515, 516, 517, 518, 519, 345, 0,
- 346, 347, 348, 349, 350, 351, 352, 353, 354, 355,
- 356, 357, 358, 359, 360, 361, 362, 363, 364, 365,
- 366, 367, 368, 369, 370, 371, 372, 373, 374, 375,
- 376, 1, 2, 3, 4, 5, 6, 7, 8, 9,
- 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
- 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
- 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
- 40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
- 50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
- 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
- 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
- 80, 81, 82, 83, 84, 85, 86, 87, 88, 89,
- 90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
- 100, 101, 102, 103, 104, 105, 106, 107, 108, 109,
- 110, 111, 112, 113, 114, 115, 116, 117, 118, 119,
- 120, 121, 122, 123, 124, 125, 126, 127, 128, 129,
- 130, 131, 132, 133, 134, 135, 136, 137, 138, 139,
- 140, 141, 142, 143, 144, 145, 146, 147, 148, 149,
- 150, 151, 152, 153, 154, 155, 156, 157, 158, 159,
- 160, 161, 162, 163, 164, 165, 166, 167, 168, 169,
- 170, 171, 172, 173, 174, 175, 176, 177, 178, 179,
- 180, 181, 182, 183, 184, 185, 186, 187, 188, 189,
- 190, 191, 192, 193, 194, 195, 196, 197, 198, 199,
- 200, 201, 202, 203, 204, 205, 206, 207, 208, 209,
- 210, 211, 212, 213, 214, 215, 216, 217, 218, 219,
- 220, 221, 222, 223, 224, 225, 226, 227, 228, 229,
- 230, 231, 232, 233, 234, 235, 236, 237, 238, 239,
- 240, 241, 242, 243, 244, 245, 246, 247, 248, 249,
- 250, 251, 252, 253, 254, 255, 256, 257, 258, 259,
- 260, 261, 262, 263, 264, 265, 266, 267, 268, 269,
- 270, 271, 272, 273, 274, 275, 276, 277, 278, 279,
- 280, 281, 282, 283, 284, 285, 286, 287, 288, 289,
- 290, 291, 292, 293, 294, 295, 296, 297, 298, 299,
- 300, 301, 302, 303, 304, 305, 306, 307, 308, 309,
- 310, 311, 312, 313, 314, 315, 316, 317, 318, 319,
- 320, 321, 322, 323, 324, 0, 0, 0, 0, 0,
+ 0, 0, 0, 339, 340, 341, 342, 343, 344, 0,
+ 0, 0, 0, 0, 0, 0, 0, 345, 0, 346,
+ 347, 348, 349, 350, 351, 352, 353, 354, 355, 356,
+ 357, 358, 359, 360, 361, 362, 363, 364, 365, 366,
+ 367, 368, 369, 370, 371, 372, 373, 374, 375, 376,
+ 377, 378, 379, 1, 2, 3, 4, 5, 6, 7,
+ 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
+ 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
+ 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
+ 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+ 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+ 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+ 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
+ 78, 79, 80, 81, 82, 83, 84, 85, 86, 87,
+ 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
+ 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
+ 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
+ 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
+ 128, 129, 130, 131, 132, 133, 134, 135, 136, 137,
+ 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
+ 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
+ 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
+ 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
+ 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
+ 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
+ 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
+ 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
+ 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
+ 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
+ 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
+ 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
+ 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
+ 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
+ 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
+ 288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
+ 298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
+ 308, 309, 310, 311, 312, 313, 314, 0, 0, 0,
+ 318, 319, 320, 321, 322, 323, 324, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 325,
- 0, 0, 0, 0, 0, 0, 0, 326, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 327, 328, 329, 330, 331, 0, 0, 0, 0,
- 0, 0, 0, 0, 332, 333, 334, 335, 336, 337,
- 338, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 339, 340, 341, 342,
- 343, 344, 0, 0, 0, 0, 0, 0, 0, 0,
- 345, 0, 346, 347, 348, 349, 350, 351, 352, 353,
- 354, 355, 356, 357, 358, 359, 360, 361, 362, 363,
- 364, 365, 366, 367, 368, 369, 370, 371, 372, 373,
- 374, 375, 376, 1, 2, 3, 4, 5, 6, 7,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 327, 328, 329, 330, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 332, 333, 334, 335,
+ 336, 337, 338, 597, 0, 0, 600, 0, 601, 602,
+ 0, 0, 605, 0, 0, 0, 0, 0, 339, 340,
+ 341, 342, 343, 344, 0, 0, 0, 0, 0, 0,
+ 0, 0, 345, 0, 346, 347, 348, 349, 350, 351,
+ 352, 353, 354, 355, 356, 357, 358, 359, 360, 361,
+ 362, 363, 364, 365, 366, 367, 368, 369, 370, 371,
+ 372, 373, 374, 375, 376, 377, 378, 379, 1, 2,
+ 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
+ 13, 14, 15, 16, 17, 18, 19, 20, 21, 22,
+ 23, 24, 25, 26, 27, 28, 29, 30, 31, 32,
+ 33, 34, 35, 36, 37, 38, 39, 40, 41, 42,
+ 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
+ 53, 54, 55, 56, 57, 58, 59, 60, 61, 62,
+ 63, 64, 65, 66, 67, 68, 69, 70, 71, 72,
+ 73, 74, 75, 76, 77, 78, 79, 80, 81, 82,
+ 83, 84, 85, 86, 87, 88, 89, 90, 91, 92,
+ 93, 94, 95, 96, 97, 98, 99, 100, 101, 102,
+ 103, 104, 105, 106, 107, 108, 109, 110, 111, 112,
+ 113, 114, 115, 116, 117, 118, 119, 120, 121, 122,
+ 123, 124, 125, 126, 127, 128, 129, 130, 131, 132,
+ 133, 134, 135, 136, 137, 138, 139, 140, 141, 142,
+ 143, 144, 145, 146, 147, 148, 149, 150, 151, 152,
+ 153, 154, 155, 156, 157, 158, 159, 160, 161, 162,
+ 163, 164, 165, 166, 167, 168, 169, 170, 171, 172,
+ 173, 174, 175, 176, 177, 178, 179, 180, 181, 182,
+ 183, 184, 185, 186, 187, 188, 189, 190, 191, 192,
+ 193, 194, 195, 196, 197, 198, 199, 200, 201, 202,
+ 203, 204, 205, 206, 207, 208, 209, 210, 211, 212,
+ 213, 214, 215, 216, 217, 218, 219, 220, 221, 222,
+ 223, 224, 225, 226, 227, 228, 229, 230, 231, 232,
+ 233, 234, 235, 236, 237, 238, 239, 240, 241, 242,
+ 243, 244, 245, 246, 247, 248, 249, 250, 251, 252,
+ 253, 254, 255, 256, 257, 258, 259, 260, 261, 262,
+ 263, 264, 265, 266, 267, 268, 269, 270, 271, 272,
+ 273, 274, 275, 276, 277, 278, 279, 280, 281, 282,
+ 283, 284, 285, 286, 287, 288, 289, 290, 291, 292,
+ 293, 294, 295, 296, 297, 298, 299, 300, 301, 302,
+ 303, 304, 305, 306, 307, 308, 309, 310, 311, 312,
+ 313, 314, 0, 0, 0, 318, 319, 320, 321, 322,
+ 323, 324, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 438, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 327, 328,
+ 329, 330, 0, 0, 0, 0, 0, 0, 0, 0,
+ 439, 332, 333, 334, 335, 336, 337, 338, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 339, 340, 341, 342, 343, 344, 0,
+ 0, 0, 0, 0, 0, 0, 0, 345, 0, 346,
+ 347, 348, 349, 350, 351, 352, 353, 354, 355, 356,
+ 357, 358, 359, 360, 361, 362, 363, 364, 365, 366,
+ 367, 368, 369, 370, 371, 372, 373, 374, 375, 376,
+ 377, 378, 379, 1, 2, 3, 4, 5, 6, 7,
8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
@@ -2220,68 +2321,205 @@ static const yytype_int16 yytable[] =
318, 319, 320, 321, 322, 323, 324, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 325, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 327, 328, 329, 330, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 332, 333, 334, 335,
+ 336, 337, 338, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 339, 340,
+ 341, 342, 343, 344, 0, 0, 0, 0, 0, 0,
+ 0, 0, 345, 0, 346, 347, 348, 349, 350, 351,
+ 352, 353, 354, 355, 356, 357, 358, 359, 360, 361,
+ 362, 363, 364, 365, 366, 367, 368, 369, 370, 371,
+ 372, 373, 374, 375, 376, 377, 378, 379, 1, 2,
+ 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
+ 13, 14, 15, 16, 17, 18, 19, 20, 21, 22,
+ 23, 24, 25, 26, 27, 28, 29, 30, 31, 32,
+ 33, 34, 35, 36, 37, 38, 39, 40, 41, 42,
+ 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
+ 53, 54, 55, 56, 57, 58, 59, 60, 61, 62,
+ 63, 64, 65, 66, 67, 68, 69, 70, 71, 72,
+ 73, 74, 75, 76, 77, 78, 79, 80, 81, 82,
+ 83, 84, 85, 86, 87, 88, 89, 90, 91, 92,
+ 93, 94, 95, 96, 97, 98, 99, 100, 101, 102,
+ 103, 104, 105, 106, 107, 108, 109, 110, 111, 112,
+ 113, 114, 115, 116, 117, 118, 119, 120, 121, 122,
+ 123, 124, 125, 126, 127, 128, 129, 130, 131, 132,
+ 133, 134, 135, 136, 137, 138, 139, 140, 141, 142,
+ 143, 144, 145, 146, 147, 148, 149, 150, 151, 152,
+ 153, 154, 155, 156, 157, 158, 159, 160, 161, 162,
+ 163, 164, 165, 166, 167, 168, 169, 170, 171, 172,
+ 173, 174, 175, 176, 177, 178, 179, 180, 181, 182,
+ 183, 184, 185, 186, 187, 188, 189, 190, 191, 192,
+ 193, 194, 195, 196, 197, 198, 199, 200, 201, 202,
+ 203, 204, 205, 206, 207, 208, 209, 210, 211, 212,
+ 213, 214, 215, 216, 217, 218, 219, 220, 221, 222,
+ 223, 224, 225, 226, 227, 228, 229, 230, 231, 232,
+ 233, 234, 235, 236, 237, 238, 239, 240, 241, 242,
+ 243, 244, 245, 246, 247, 248, 249, 250, 251, 252,
+ 253, 254, 255, 256, 257, 258, 259, 260, 261, 262,
+ 263, 264, 265, 266, 267, 268, 269, 270, 271, 272,
+ 273, 274, 275, 276, 277, 278, 279, 280, 281, 282,
+ 283, 284, 285, 286, 287, 288, 289, 290, 291, 292,
+ 293, 294, 295, 296, 297, 298, 299, 300, 301, 302,
+ 303, 304, 305, 306, 307, 308, 309, 310, 311, 312,
+ 313, 314, 0, 0, 0, 318, 319, 320, 321, 322,
+ 323, 324, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 709,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 327, 328,
+ 329, 330, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 332, 333, 334, 335, 336, 337, 338, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 339, 340, 341, 342, 343, 344, 0,
+ 0, 0, 0, 0, 0, 0, 0, 345, 0, 346,
+ 347, 348, 349, 350, 351, 352, 353, 354, 355, 356,
+ 357, 358, 359, 360, 361, 362, 363, 364, 365, 366,
+ 367, 368, 369, 370, 371, 372, 373, 374, 375, 376,
+ 377, 378, 379, 1, 2, 3, 4, 5, 6, 7,
+ 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
+ 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
+ 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
+ 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+ 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+ 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+ 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
+ 78, 79, 80, 81, 82, 83, 84, 85, 86, 87,
+ 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
+ 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
+ 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
+ 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
+ 128, 129, 130, 131, 132, 133, 134, 135, 136, 137,
+ 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
+ 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
+ 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
+ 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
+ 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
+ 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
+ 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
+ 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
+ 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
+ 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
+ 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
+ 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
+ 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
+ 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
+ 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
+ 288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
+ 298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
+ 308, 309, 310, 311, 312, 313, 314, 0, 0, 0,
+ 318, 319, 320, 321, 322, 323, 324, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 822, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 327, 328, 329, 330, 0, 0, 0,
0, 0, 0, 0, 0, 0, 332, 333, 334, 335,
- 336, 337, 338, 594, 0, 0, 597, 0, 598, 599,
- 0, 0, 602, 0, 0, 0, 0, 0, 339, 340,
+ 336, 337, 338, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 339, 340,
341, 342, 343, 344, 0, 0, 0, 0, 0, 0,
0, 0, 345, 0, 346, 347, 348, 349, 350, 351,
352, 353, 354, 355, 356, 357, 358, 359, 360, 361,
362, 363, 364, 365, 366, 367, 368, 369, 370, 371,
- 372, 373, 374, 375, 376, 1, 2, 3, 4, 5,
- 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
- 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
- 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
- 36, 37, 38, 39, 40, 41, 42, 43, 44, 45,
- 46, 47, 48, 49, 50, 51, 52, 53, 54, 55,
- 56, 57, 58, 59, 60, 61, 62, 63, 64, 65,
- 66, 67, 68, 69, 70, 71, 72, 73, 74, 75,
- 76, 77, 78, 79, 80, 81, 82, 83, 84, 85,
- 86, 87, 88, 89, 90, 91, 92, 93, 94, 95,
- 96, 97, 98, 99, 100, 101, 102, 103, 104, 105,
- 106, 107, 108, 109, 110, 111, 112, 113, 114, 115,
- 116, 117, 118, 119, 120, 121, 122, 123, 124, 125,
- 126, 127, 128, 129, 130, 131, 132, 133, 134, 135,
- 136, 137, 138, 139, 140, 141, 142, 143, 144, 145,
- 146, 147, 148, 149, 150, 151, 152, 153, 154, 155,
- 156, 157, 158, 159, 160, 161, 162, 163, 164, 165,
- 166, 167, 168, 169, 170, 171, 172, 173, 174, 175,
- 176, 177, 178, 179, 180, 181, 182, 183, 184, 185,
- 186, 187, 188, 189, 190, 191, 192, 193, 194, 195,
- 196, 197, 198, 199, 200, 201, 202, 203, 204, 205,
- 206, 207, 208, 209, 210, 211, 212, 213, 214, 215,
- 216, 217, 218, 219, 220, 221, 222, 223, 224, 225,
- 226, 227, 228, 229, 230, 231, 232, 233, 234, 235,
- 236, 237, 238, 239, 240, 241, 242, 243, 244, 245,
- 246, 247, 248, 249, 250, 251, 252, 253, 254, 255,
- 256, 257, 258, 259, 260, 261, 262, 263, 264, 265,
- 266, 267, 268, 269, 270, 271, 272, 273, 274, 275,
- 276, 277, 278, 279, 280, 281, 282, 283, 284, 285,
- 286, 287, 288, 289, 290, 291, 292, 293, 294, 295,
- 296, 297, 298, 299, 300, 301, 302, 303, 304, 305,
- 306, 307, 308, 309, 310, 311, 312, 313, 314, 0,
- 0, 0, 318, 319, 320, 321, 322, 323, 324, 0,
+ 372, 373, 374, 375, 376, 377, 378, 379, 1, 2,
+ 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
+ 13, 14, 15, 16, 17, 18, 19, 20, 21, 22,
+ 23, 24, 25, 26, 27, 28, 29, 30, 31, 32,
+ 33, 34, 35, 36, 37, 38, 39, 40, 41, 42,
+ 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
+ 53, 54, 55, 56, 57, 58, 59, 60, 61, 62,
+ 63, 64, 65, 66, 67, 68, 69, 70, 71, 72,
+ 73, 74, 75, 76, 77, 78, 79, 80, 81, 82,
+ 83, 84, 85, 86, 87, 88, 89, 90, 91, 92,
+ 93, 94, 95, 96, 97, 98, 99, 100, 101, 102,
+ 103, 104, 105, 106, 107, 108, 109, 110, 111, 112,
+ 113, 114, 115, 116, 117, 118, 119, 120, 121, 122,
+ 123, 124, 125, 126, 127, 128, 129, 130, 131, 132,
+ 133, 134, 135, 136, 137, 138, 139, 140, 141, 142,
+ 143, 144, 145, 146, 147, 148, 149, 150, 151, 152,
+ 153, 154, 155, 156, 157, 158, 159, 160, 161, 162,
+ 163, 164, 165, 166, 167, 168, 169, 170, 171, 172,
+ 173, 174, 175, 176, 177, 178, 179, 180, 181, 182,
+ 183, 184, 185, 186, 187, 188, 189, 190, 191, 192,
+ 193, 194, 195, 196, 197, 198, 199, 200, 201, 202,
+ 203, 204, 205, 206, 207, 208, 209, 210, 211, 212,
+ 213, 214, 215, 216, 217, 218, 219, 220, 221, 222,
+ 223, 224, 225, 226, 227, 228, 229, 230, 231, 232,
+ 233, 234, 235, 236, 237, 238, 239, 240, 241, 242,
+ 243, 244, 245, 246, 247, 248, 249, 250, 251, 252,
+ 253, 254, 255, 256, 257, 258, 259, 260, 261, 262,
+ 263, 264, 265, 266, 267, 268, 269, 270, 271, 272,
+ 273, 274, 275, 276, 277, 278, 279, 280, 281, 282,
+ 283, 284, 285, 286, 287, 288, 289, 290, 291, 292,
+ 293, 294, 295, 296, 297, 298, 299, 300, 301, 302,
+ 303, 304, 305, 306, 307, 308, 309, 310, 311, 312,
+ 313, 314, 0, 0, 0, 318, 319, 320, 321, 322,
+ 323, 324, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 857,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 327, 328,
+ 329, 330, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 332, 333, 334, 335, 336, 337, 338, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 339, 340, 341, 342, 343, 344, 0,
+ 0, 0, 0, 0, 0, 0, 0, 345, 0, 346,
+ 347, 348, 349, 350, 351, 352, 353, 354, 355, 356,
+ 357, 358, 359, 360, 361, 362, 363, 364, 365, 366,
+ 367, 368, 369, 370, 371, 372, 373, 374, 375, 376,
+ 377, 378, 379, 1, 2, 3, 4, 5, 6, 7,
+ 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
+ 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
+ 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
+ 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+ 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+ 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+ 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
+ 78, 79, 80, 81, 82, 83, 84, 85, 86, 87,
+ 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
+ 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
+ 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
+ 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
+ 128, 129, 130, 131, 132, 133, 134, 135, 136, 137,
+ 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
+ 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
+ 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
+ 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
+ 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
+ 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
+ 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
+ 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
+ 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
+ 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
+ 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
+ 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
+ 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
+ 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
+ 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
+ 288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
+ 298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
+ 308, 309, 310, 311, 312, 313, 314, 0, 0, 0,
+ 318, 319, 320, 321, 322, 323, 324, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 435, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 327, 328, 329, 330, 0,
- 0, 0, 0, 0, 0, 0, 0, 436, 332, 333,
- 334, 335, 336, 337, 338, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 339, 340, 341, 342, 343, 344, 0, 0, 0, 0,
- 0, 0, 0, 0, 345, 0, 346, 347, 348, 349,
- 350, 351, 352, 353, 354, 355, 356, 357, 358, 359,
- 360, 361, 362, 363, 364, 365, 366, 367, 368, 369,
- 370, 371, 372, 373, 374, 375, 376, 1, 2, 3,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 327, 328, 329, 330, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 332, 333, 334, 335,
+ 336, 337, 338, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 339, 340,
+ 341, 342, 343, 344, 0, 0, 0, 0, 0, 0,
+ 0, 0, 345, 0, 346, 347, 348, 349, 350, 351,
+ 352, 353, 354, 355, 356, 357, 358, 359, 360, 361,
+ 362, 363, 364, 365, 366, 367, 368, 369, 370, 371,
+ 372, 373, 374, 375, 376, 377, 378, 379, 2, 3,
4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
24, 25, 26, 27, 28, 29, 30, 31, 32, 33,
34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
- 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+ 54, 55, 56, 57, 58, 0, 0, 61, 62, 63,
64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
74, 75, 76, 77, 78, 79, 80, 81, 82, 83,
84, 85, 86, 87, 88, 89, 90, 91, 92, 93,
@@ -2307,117 +2545,68 @@ static const yytype_int16 yytable[] =
284, 285, 286, 287, 288, 289, 290, 291, 292, 293,
294, 295, 296, 297, 298, 299, 300, 301, 302, 303,
304, 305, 306, 307, 308, 309, 310, 311, 312, 313,
- 314, 0, 0, 0, 318, 319, 320, 321, 322, 323,
- 324, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 314, 0, 0, 0, 0, 0, 0, 321, 0, 0,
+ 0, 0, 0, 502, 503, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 325, 0, 0, 0, 0,
+ 0, 0, 504, 505, 0, 0, 0, 639, 777, 0,
+ 0, 0, 0, 0, 506, 507, 508, 509, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 327, 328, 329,
- 330, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 332, 333, 334, 335, 336, 337, 338, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 339, 340, 341, 342, 343, 344, 0, 0,
- 0, 0, 0, 0, 0, 0, 345, 0, 346, 347,
- 348, 349, 350, 351, 352, 353, 354, 355, 356, 357,
- 358, 359, 360, 361, 362, 363, 364, 365, 366, 367,
- 368, 369, 370, 371, 372, 373, 374, 375, 376, 1,
- 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
- 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
- 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
- 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
- 62, 63, 64, 65, 66, 67, 68, 69, 70, 71,
- 72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
- 82, 83, 84, 85, 86, 87, 88, 89, 90, 91,
- 92, 93, 94, 95, 96, 97, 98, 99, 100, 101,
- 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
- 112, 113, 114, 115, 116, 117, 118, 119, 120, 121,
- 122, 123, 124, 125, 126, 127, 128, 129, 130, 131,
- 132, 133, 134, 135, 136, 137, 138, 139, 140, 141,
- 142, 143, 144, 145, 146, 147, 148, 149, 150, 151,
- 152, 153, 154, 155, 156, 157, 158, 159, 160, 161,
- 162, 163, 164, 165, 166, 167, 168, 169, 170, 171,
- 172, 173, 174, 175, 176, 177, 178, 179, 180, 181,
- 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
- 192, 193, 194, 195, 196, 197, 198, 199, 200, 201,
- 202, 203, 204, 205, 206, 207, 208, 209, 210, 211,
- 212, 213, 214, 215, 216, 217, 218, 219, 220, 221,
- 222, 223, 224, 225, 226, 227, 228, 229, 230, 231,
- 232, 233, 234, 235, 236, 237, 238, 239, 240, 241,
- 242, 243, 244, 245, 246, 247, 248, 249, 250, 251,
- 252, 253, 254, 255, 256, 257, 258, 259, 260, 261,
- 262, 263, 264, 265, 266, 267, 268, 269, 270, 271,
- 272, 273, 274, 275, 276, 277, 278, 279, 280, 281,
- 282, 283, 284, 285, 286, 287, 288, 289, 290, 291,
- 292, 293, 294, 295, 296, 297, 298, 299, 300, 301,
- 302, 303, 304, 305, 306, 307, 308, 309, 310, 311,
- 312, 313, 314, 0, 0, 0, 318, 319, 320, 321,
- 322, 323, 324, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 510, 511, 512, 513, 514,
+ 332, 0, 0, 0, 0, 337, 338, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 515, 516,
+ 517, 518, 519, 520, 521, 522, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 706, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 327,
- 328, 329, 330, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 332, 333, 334, 335, 336, 337, 338, 0,
+ 358, 2, 3, 4, 5, 6, 7, 8, 9, 10,
+ 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
+ 21, 22, 23, 24, 25, 26, 27, 28, 29, 30,
+ 31, 32, 33, 34, 35, 36, 37, 38, 39, 40,
+ 41, 42, 43, 44, 45, 46, 47, 48, 49, 50,
+ 51, 52, 53, 54, 55, 56, 57, 58, 0, 0,
+ 61, 62, 63, 64, 65, 66, 67, 68, 69, 70,
+ 71, 72, 73, 74, 75, 76, 77, 78, 79, 80,
+ 81, 82, 83, 84, 85, 86, 87, 88, 89, 90,
+ 91, 92, 93, 94, 95, 96, 97, 98, 99, 100,
+ 101, 102, 103, 104, 105, 106, 107, 108, 109, 110,
+ 111, 112, 113, 114, 115, 116, 117, 118, 119, 120,
+ 121, 122, 123, 124, 125, 126, 127, 128, 129, 130,
+ 131, 132, 133, 134, 135, 136, 137, 138, 139, 140,
+ 141, 142, 143, 144, 145, 146, 147, 148, 149, 150,
+ 151, 152, 153, 154, 155, 156, 157, 158, 159, 160,
+ 161, 162, 163, 164, 165, 166, 167, 168, 169, 170,
+ 171, 172, 173, 174, 175, 176, 177, 178, 179, 180,
+ 181, 182, 183, 184, 185, 186, 187, 188, 189, 190,
+ 191, 192, 193, 194, 195, 196, 197, 198, 199, 200,
+ 201, 202, 203, 204, 205, 206, 207, 208, 209, 210,
+ 211, 212, 213, 214, 215, 216, 217, 218, 219, 220,
+ 221, 222, 223, 224, 225, 226, 227, 228, 229, 230,
+ 231, 232, 233, 234, 235, 236, 237, 238, 239, 240,
+ 241, 242, 243, 244, 245, 246, 247, 248, 249, 250,
+ 251, 252, 253, 254, 255, 256, 257, 258, 259, 260,
+ 261, 262, 263, 264, 265, 266, 267, 268, 269, 270,
+ 271, 272, 273, 274, 275, 276, 277, 278, 279, 280,
+ 281, 282, 283, 284, 285, 286, 287, 288, 289, 290,
+ 291, 292, 293, 294, 295, 296, 297, 298, 299, 300,
+ 301, 302, 303, 304, 305, 306, 307, 308, 309, 310,
+ 311, 312, 313, 314, 0, 0, 0, 0, 0, 0,
+ 321, 0, 0, 0, 0, 0, 502, 503, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 339, 340, 341, 342, 343, 344,
- 0, 0, 0, 0, 0, 0, 0, 0, 345, 0,
- 346, 347, 348, 349, 350, 351, 352, 353, 354, 355,
- 356, 357, 358, 359, 360, 361, 362, 363, 364, 365,
- 366, 367, 368, 369, 370, 371, 372, 373, 374, 375,
- 376, 1, 2, 3, 4, 5, 6, 7, 8, 9,
- 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
- 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
- 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
- 40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
- 50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
- 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
- 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
- 80, 81, 82, 83, 84, 85, 86, 87, 88, 89,
- 90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
- 100, 101, 102, 103, 104, 105, 106, 107, 108, 109,
- 110, 111, 112, 113, 114, 115, 116, 117, 118, 119,
- 120, 121, 122, 123, 124, 125, 126, 127, 128, 129,
- 130, 131, 132, 133, 134, 135, 136, 137, 138, 139,
- 140, 141, 142, 143, 144, 145, 146, 147, 148, 149,
- 150, 151, 152, 153, 154, 155, 156, 157, 158, 159,
- 160, 161, 162, 163, 164, 165, 166, 167, 168, 169,
- 170, 171, 172, 173, 174, 175, 176, 177, 178, 179,
- 180, 181, 182, 183, 184, 185, 186, 187, 188, 189,
- 190, 191, 192, 193, 194, 195, 196, 197, 198, 199,
- 200, 201, 202, 203, 204, 205, 206, 207, 208, 209,
- 210, 211, 212, 213, 214, 215, 216, 217, 218, 219,
- 220, 221, 222, 223, 224, 225, 226, 227, 228, 229,
- 230, 231, 232, 233, 234, 235, 236, 237, 238, 239,
- 240, 241, 242, 243, 244, 245, 246, 247, 248, 249,
- 250, 251, 252, 253, 254, 255, 256, 257, 258, 259,
- 260, 261, 262, 263, 264, 265, 266, 267, 268, 269,
- 270, 271, 272, 273, 274, 275, 276, 277, 278, 279,
- 280, 281, 282, 283, 284, 285, 286, 287, 288, 289,
- 290, 291, 292, 293, 294, 295, 296, 297, 298, 299,
- 300, 301, 302, 303, 304, 305, 306, 307, 308, 309,
- 310, 311, 312, 313, 314, 0, 0, 0, 318, 319,
- 320, 321, 322, 323, 324, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 504, 505, 0, 0, 0,
+ 639, 888, 0, 0, 0, 0, 0, 506, 507, 508,
+ 509, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 510, 511,
+ 512, 513, 514, 332, 0, 0, 0, 0, 337, 338,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 819, 0, 0, 0, 0, 0, 0, 0,
+ 0, 515, 516, 517, 518, 519, 520, 521, 522, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 327, 328, 329, 330, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 332, 333, 334, 335, 336, 337,
- 338, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 339, 340, 341, 342,
- 343, 344, 0, 0, 0, 0, 0, 0, 0, 0,
- 345, 0, 346, 347, 348, 349, 350, 351, 352, 353,
- 354, 355, 356, 357, 358, 359, 360, 361, 362, 363,
- 364, 365, 366, 367, 368, 369, 370, 371, 372, 373,
- 374, 375, 376, 1, 2, 3, 4, 5, 6, 7,
+ 0, 0, 0, 358, 2, 3, 4, 5, 6, 7,
8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
- 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+ 58, 0, 0, 61, 62, 63, 64, 65, 66, 67,
68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
78, 79, 80, 81, 82, 83, 84, 85, 86, 87,
88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
@@ -2443,65 +2632,18 @@ static const yytype_int16 yytable[] =
288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
308, 309, 310, 311, 312, 313, 314, 0, 0, 0,
- 318, 319, 320, 321, 322, 323, 324, 0, 0, 0,
+ 0, 0, 0, 321, 0, 0, 0, 0, 0, 502,
+ 503, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 504, 505,
+ 0, 0, 578, 0, 0, 0, 0, 0, 0, 0,
+ 506, 507, 508, 509, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 510, 511, 512, 513, 514, 332, 0, 0, 0,
+ 0, 337, 338, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 854, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 327, 328, 329, 330, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 332, 333, 334, 335,
- 336, 337, 338, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 339, 340,
- 341, 342, 343, 344, 0, 0, 0, 0, 0, 0,
- 0, 0, 345, 0, 346, 347, 348, 349, 350, 351,
- 352, 353, 354, 355, 356, 357, 358, 359, 360, 361,
- 362, 363, 364, 365, 366, 367, 368, 369, 370, 371,
- 372, 373, 374, 375, 376, 1, 2, 3, 4, 5,
- 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
- 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
- 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
- 36, 37, 38, 39, 40, 41, 42, 43, 44, 45,
- 46, 47, 48, 49, 50, 51, 52, 53, 54, 55,
- 56, 57, 58, 59, 60, 61, 62, 63, 64, 65,
- 66, 67, 68, 69, 70, 71, 72, 73, 74, 75,
- 76, 77, 78, 79, 80, 81, 82, 83, 84, 85,
- 86, 87, 88, 89, 90, 91, 92, 93, 94, 95,
- 96, 97, 98, 99, 100, 101, 102, 103, 104, 105,
- 106, 107, 108, 109, 110, 111, 112, 113, 114, 115,
- 116, 117, 118, 119, 120, 121, 122, 123, 124, 125,
- 126, 127, 128, 129, 130, 131, 132, 133, 134, 135,
- 136, 137, 138, 139, 140, 141, 142, 143, 144, 145,
- 146, 147, 148, 149, 150, 151, 152, 153, 154, 155,
- 156, 157, 158, 159, 160, 161, 162, 163, 164, 165,
- 166, 167, 168, 169, 170, 171, 172, 173, 174, 175,
- 176, 177, 178, 179, 180, 181, 182, 183, 184, 185,
- 186, 187, 188, 189, 190, 191, 192, 193, 194, 195,
- 196, 197, 198, 199, 200, 201, 202, 203, 204, 205,
- 206, 207, 208, 209, 210, 211, 212, 213, 214, 215,
- 216, 217, 218, 219, 220, 221, 222, 223, 224, 225,
- 226, 227, 228, 229, 230, 231, 232, 233, 234, 235,
- 236, 237, 238, 239, 240, 241, 242, 243, 244, 245,
- 246, 247, 248, 249, 250, 251, 252, 253, 254, 255,
- 256, 257, 258, 259, 260, 261, 262, 263, 264, 265,
- 266, 267, 268, 269, 270, 271, 272, 273, 274, 275,
- 276, 277, 278, 279, 280, 281, 282, 283, 284, 285,
- 286, 287, 288, 289, 290, 291, 292, 293, 294, 295,
- 296, 297, 298, 299, 300, 301, 302, 303, 304, 305,
- 306, 307, 308, 309, 310, 311, 312, 313, 314, 0,
- 0, 0, 318, 319, 320, 321, 322, 323, 324, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 327, 328, 329, 330, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 332, 333,
- 334, 335, 336, 337, 338, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 339, 340, 341, 342, 343, 344, 0, 0, 0, 0,
- 0, 0, 0, 0, 345, 0, 346, 347, 348, 349,
- 350, 351, 352, 353, 354, 355, 356, 357, 358, 359,
- 360, 361, 362, 363, 364, 365, 366, 367, 368, 369,
- 370, 371, 372, 373, 374, 375, 376, 2, 3, 4,
+ 0, 0, 0, 0, 515, 516, 517, 518, 519, 520,
+ 521, 522, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 358, 2, 3, 4,
5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
15, 16, 17, 18, 19, 20, 21, 22, 23, 24,
25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
@@ -2534,16 +2676,16 @@ static const yytype_int16 yytable[] =
295, 296, 297, 298, 299, 300, 301, 302, 303, 304,
305, 306, 307, 308, 309, 310, 311, 312, 313, 314,
0, 0, 0, 0, 0, 0, 321, 0, 0, 0,
- 0, 0, 499, 500, 0, 0, 0, 0, 0, 0,
+ 0, 0, 502, 503, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 501, 502, 0, 0, 0, 636, 774, 0, 0,
- 0, 0, 0, 503, 504, 505, 506, 0, 0, 0,
+ 0, 504, 505, 0, 0, 0, 639, 0, 0, 0,
+ 0, 0, 0, 506, 507, 508, 509, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 507, 508, 509, 510, 511, 332,
+ 0, 0, 0, 0, 510, 511, 512, 513, 514, 332,
0, 0, 0, 0, 337, 338, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 512, 513, 514,
- 515, 516, 517, 518, 519, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 515, 516, 517,
+ 518, 519, 520, 521, 522, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 358,
2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
@@ -2577,16 +2719,16 @@ static const yytype_int16 yytable[] =
292, 293, 294, 295, 296, 297, 298, 299, 300, 301,
302, 303, 304, 305, 306, 307, 308, 309, 310, 311,
312, 313, 314, 0, 0, 0, 0, 0, 0, 321,
- 0, 0, 0, 0, 0, 499, 500, 0, 0, 0,
+ 0, 0, 0, 0, 0, 502, 503, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 501, 502, 0, 0, 0, 636,
- 885, 0, 0, 0, 0, 0, 503, 504, 505, 506,
+ 0, 0, 0, 0, 504, 505, 0, 0, 736, 0,
+ 0, 0, 0, 0, 0, 0, 506, 507, 508, 509,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 507, 508, 509,
- 510, 511, 332, 0, 0, 0, 0, 337, 338, 0,
+ 0, 0, 0, 0, 0, 0, 0, 510, 511, 512,
+ 513, 514, 332, 0, 0, 0, 0, 337, 338, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 512, 513, 514, 515, 516, 517, 518, 519, 0, 0,
+ 515, 516, 517, 518, 519, 520, 521, 522, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 358, 2, 3, 4, 5, 6, 7, 8,
9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
@@ -2620,17 +2762,17 @@ static const yytype_int16 yytable[] =
289, 290, 291, 292, 293, 294, 295, 296, 297, 298,
299, 300, 301, 302, 303, 304, 305, 306, 307, 308,
309, 310, 311, 312, 313, 314, 0, 0, 0, 0,
- 0, 0, 321, 0, 0, 0, 0, 0, 499, 500,
+ 0, 0, 321, 0, 0, 0, 0, 0, 502, 503,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 501, 502, 0,
- 0, 575, 0, 0, 0, 0, 0, 0, 0, 503,
- 504, 505, 506, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 504, 505, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 747, 506,
+ 507, 508, 509, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 507, 508, 509, 510, 511, 332, 0, 0, 0, 0,
+ 510, 511, 512, 513, 514, 332, 0, 0, 0, 0,
337, 338, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 512, 513, 514, 515, 516, 517, 518,
- 519, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 515, 516, 517, 518, 519, 520, 521,
+ 522, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 358, 2, 3, 4, 5,
6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
@@ -2664,16 +2806,16 @@ static const yytype_int16 yytable[] =
296, 297, 298, 299, 300, 301, 302, 303, 304, 305,
306, 307, 308, 309, 310, 311, 312, 313, 314, 0,
0, 0, 0, 0, 0, 321, 0, 0, 0, 0,
- 0, 499, 500, 0, 0, 0, 0, 0, 0, 0,
+ 0, 502, 503, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 501, 502, 0, 0, 0, 636, 0, 0, 0, 0,
- 0, 0, 503, 504, 505, 506, 0, 0, 0, 0,
+ 504, 505, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 506, 507, 508, 509, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 507, 508, 509, 510, 511, 332, 0,
+ 0, 0, 0, 510, 511, 512, 513, 514, 332, 0,
0, 0, 0, 337, 338, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 512, 513, 514, 515,
- 516, 517, 518, 519, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 515, 516, 517, 518,
+ 519, 520, 521, 522, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 358, 2,
3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
13, 14, 15, 16, 17, 18, 19, 20, 21, 22,
@@ -2707,16 +2849,16 @@ static const yytype_int16 yytable[] =
293, 294, 295, 296, 297, 298, 299, 300, 301, 302,
303, 304, 305, 306, 307, 308, 309, 310, 311, 312,
313, 314, 0, 0, 0, 0, 0, 0, 321, 0,
- 0, 0, 0, 0, 499, 500, 0, 0, 0, 0,
+ 0, 0, 0, 0, 502, 503, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 501, 502, 0, 0, 733, 0, 0,
- 0, 0, 0, 0, 0, 503, 504, 505, 506, 0,
+ 0, 0, 0, 504, 505, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 506, 507, 508, 509, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 507, 508, 509, 510,
- 511, 332, 0, 0, 0, 0, 337, 338, 0, 0,
+ 0, 0, 0, 0, 0, 0, 510, 511, 512, 513,
+ 514, 332, 0, 0, 0, 0, 337, 657, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 512,
- 513, 514, 515, 516, 517, 518, 519, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 515,
+ 516, 517, 518, 519, 520, 521, 522, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 358, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
@@ -2750,16 +2892,16 @@ static const yytype_int16 yytable[] =
290, 291, 292, 293, 294, 295, 296, 297, 298, 299,
300, 301, 302, 303, 304, 305, 306, 307, 308, 309,
310, 311, 312, 313, 314, 0, 0, 0, 0, 0,
- 0, 321, 0, 0, 0, 0, 0, 499, 500, 0,
+ 0, 321, 0, 0, 0, 0, 0, 502, 503, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 501, 502, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 744, 503, 504,
- 505, 506, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 507,
- 508, 509, 510, 511, 332, 0, 0, 0, 0, 337,
+ 0, 0, 0, 0, 0, 0, 504, 505, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 506, 507,
+ 508, 509, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 510,
+ 511, 512, 513, 697, 332, 0, 0, 0, 0, 337,
338, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 512, 513, 514, 515, 516, 517, 518, 519,
+ 0, 0, 515, 516, 517, 518, 519, 520, 521, 522,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 358, 2, 3, 4, 5, 6,
7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
@@ -2794,422 +2936,248 @@ static const yytype_int16 yytable[] =
297, 298, 299, 300, 301, 302, 303, 304, 305, 306,
307, 308, 309, 310, 311, 312, 313, 314, 0, 0,
0, 0, 0, 0, 321, 0, 0, 0, 0, 0,
- 499, 500, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 501,
- 502, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 503, 504, 505, 506, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 507, 508, 509, 510, 511, 332, 0, 0,
- 0, 0, 337, 338, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 512, 513, 514, 515, 516,
- 517, 518, 519, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 358, 2, 3,
- 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
- 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
- 24, 25, 26, 27, 28, 29, 30, 31, 32, 33,
- 34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
- 44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
- 54, 55, 56, 57, 58, 0, 0, 61, 62, 63,
- 64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
- 74, 75, 76, 77, 78, 79, 80, 81, 82, 83,
- 84, 85, 86, 87, 88, 89, 90, 91, 92, 93,
- 94, 95, 96, 97, 98, 99, 100, 101, 102, 103,
- 104, 105, 106, 107, 108, 109, 110, 111, 112, 113,
- 114, 115, 116, 117, 118, 119, 120, 121, 122, 123,
- 124, 125, 126, 127, 128, 129, 130, 131, 132, 133,
- 134, 135, 136, 137, 138, 139, 140, 141, 142, 143,
- 144, 145, 146, 147, 148, 149, 150, 151, 152, 153,
- 154, 155, 156, 157, 158, 159, 160, 161, 162, 163,
- 164, 165, 166, 167, 168, 169, 170, 171, 172, 173,
- 174, 175, 176, 177, 178, 179, 180, 181, 182, 183,
- 184, 185, 186, 187, 188, 189, 190, 191, 192, 193,
- 194, 195, 196, 197, 198, 199, 200, 201, 202, 203,
- 204, 205, 206, 207, 208, 209, 210, 211, 212, 213,
- 214, 215, 216, 217, 218, 219, 220, 221, 222, 223,
- 224, 225, 226, 227, 228, 229, 230, 231, 232, 233,
- 234, 235, 236, 237, 238, 239, 240, 241, 242, 243,
- 244, 245, 246, 247, 248, 249, 250, 251, 252, 253,
- 254, 255, 256, 257, 258, 259, 260, 261, 262, 263,
- 264, 265, 266, 267, 268, 269, 270, 271, 272, 273,
- 274, 275, 276, 277, 278, 279, 280, 281, 282, 283,
- 284, 285, 286, 287, 288, 289, 290, 291, 292, 293,
- 294, 295, 296, 297, 298, 299, 300, 301, 302, 303,
- 304, 305, 306, 307, 308, 309, 310, 311, 312, 313,
- 314, 0, 0, 0, 0, 0, 0, 321, 0, 0,
- 0, 0, 0, 499, 500, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 501, 502, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 503, 504, 505, 506, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 507, 508, 509, 510, 511,
- 332, 0, 0, 0, 0, 337, 654, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 512, 513,
- 514, 515, 516, 517, 518, 519, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 358, 2, 3, 4, 5, 6, 7, 8, 9, 10,
- 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
- 21, 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 31, 32, 33, 34, 35, 36, 37, 38, 39, 40,
- 41, 42, 43, 44, 45, 46, 47, 48, 49, 50,
- 51, 52, 53, 54, 55, 56, 57, 58, 0, 0,
- 61, 62, 63, 64, 65, 66, 67, 68, 69, 70,
- 71, 72, 73, 74, 75, 76, 77, 78, 79, 80,
- 81, 82, 83, 84, 85, 86, 87, 88, 89, 90,
- 91, 92, 93, 94, 95, 96, 97, 98, 99, 100,
- 101, 102, 103, 104, 105, 106, 107, 108, 109, 110,
- 111, 112, 113, 114, 115, 116, 117, 118, 119, 120,
- 121, 122, 123, 124, 125, 126, 127, 128, 129, 130,
- 131, 132, 133, 134, 135, 136, 137, 138, 139, 140,
- 141, 142, 143, 144, 145, 146, 147, 148, 149, 150,
- 151, 152, 153, 154, 155, 156, 157, 158, 159, 160,
- 161, 162, 163, 164, 165, 166, 167, 168, 169, 170,
- 171, 172, 173, 174, 175, 176, 177, 178, 179, 180,
- 181, 182, 183, 184, 185, 186, 187, 188, 189, 190,
- 191, 192, 193, 194, 195, 196, 197, 198, 199, 200,
- 201, 202, 203, 204, 205, 206, 207, 208, 209, 210,
- 211, 212, 213, 214, 215, 216, 217, 218, 219, 220,
- 221, 222, 223, 224, 225, 226, 227, 228, 229, 230,
- 231, 232, 233, 234, 235, 236, 237, 238, 239, 240,
- 241, 242, 243, 244, 245, 246, 247, 248, 249, 250,
- 251, 252, 253, 254, 255, 256, 257, 258, 259, 260,
- 261, 262, 263, 264, 265, 266, 267, 268, 269, 270,
- 271, 272, 273, 274, 275, 276, 277, 278, 279, 280,
- 281, 282, 283, 284, 285, 286, 287, 288, 289, 290,
- 291, 292, 293, 294, 295, 296, 297, 298, 299, 300,
- 301, 302, 303, 304, 305, 306, 307, 308, 309, 310,
- 311, 312, 313, 314, 0, 0, 0, 0, 0, 0,
- 321, 0, 0, 0, 0, 0, 499, 500, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 501, 502, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 503, 504, 505,
- 506, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 507, 508,
- 509, 510, 694, 332, 0, 0, 0, 0, 337, 338,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 512, 513, 514, 515, 516, 517, 518, 519, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 358, 2, 3, 4, 5, 6, 7,
- 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
- 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
- 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
- 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
- 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
- 58, 0, 0, 61, 62, 63, 64, 65, 66, 67,
- 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
- 78, 79, 80, 81, 82, 83, 84, 85, 86, 87,
- 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
- 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
- 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
- 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
- 128, 129, 130, 131, 132, 133, 134, 135, 136, 137,
- 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
- 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
- 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
- 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
- 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
- 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
- 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
- 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
- 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
- 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
- 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
- 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
- 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
- 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
- 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
- 288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
- 298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
- 308, 309, 310, 311, 312, 313, 314, 0, 0, 0,
- 0, 0, 0, 321, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 332, 0, 0, 0,
- 0, 337, 338
+ 0, 0, 0, 0, 0, 0, 0, 332, 0, 0,
+ 0, 0, 337, 338
};
static const yytype_int16 yycheck[] =
{
- 0, 382, 0, 493, 622, 502, 401, 401, 481, 0,
- 434, 406, 542, 0, 0, 736, 440, 634, 567, 636,
- 401, 390, 639, 413, 569, 406, 348, 547, 558, 348,
- 359, 385, 552, 331, 332, 336, 353, 349, 528, 348,
- 898, 561, 351, 351, 348, 426, 356, 905, 350, 352,
- 570, 359, 351, 356, 356, 409, 385, 915, 368, 358,
- 382, 430, 381, 382, 383, 384, 490, 491, 385, 367,
- 368, 372, 351, 382, 547, 349, 351, 446, 382, 552,
- 359, 329, 330, 358, 349, 558, 351, 351, 561, 374,
- 375, 376, 473, 358, 358, 640, 353, 570, 361, 356,
- 363, 349, 359, 351, 601, 349, 603, 355, 408, 409,
- 410, 411, 412, 413, 414, 350, 589, 719, 720, 721,
- 722, 356, 740, 338, 339, 340, 341, 342, 343, 344,
- 345, 346, 347, 350, 350, 356, 350, 349, 359, 356,
- 356, 349, 356, 358, 634, 350, 636, 350, 349, 639,
- 540, 356, 649, 356, 540, 350, 773, 351, 350, 708,
- 546, 356, 548, 653, 356, 551, 556, 553, 349, 555,
- 556, 350, 350, 350, 560, 350, 350, 356, 356, 356,
- 349, 356, 356, 350, 350, 350, 567, 350, 569, 356,
- 356, 356, 350, 356, 350, 350, 565, 350, 919, 350,
- 356, 356, 350, 356, 350, 356, 350, 350, 356, 350,
- 356, 356, 356, 356, 352, 356, 352, 349, 356, 356,
- 356, 385, 359, 841, 381, 382, 383, 384, 382, 778,
- 439, 385, 382, 327, 328, 385, 853, 632, 632, 356,
- 382, 356, 359, 385, 359, 742, 354, 382, 356, 746,
- 385, 632, 382, 382, 382, 385, 385, 385, 382, 640,
- 382, 385, 382, 385, 382, 385, 382, 385, 356, 385,
- 356, 359, 481, 359, 764, 765, 349, 797, 798, 703,
- 810, 333, 334, 773, 804, 805, 364, 365, 366, 359,
- 499, 500, 909, 356, 357, 356, 357, 715, 716, 367,
- 717, 718, 723, 724, 359, 385, 385, 925, 353, 385,
- 353, 351, 351, 385, 385, 350, 385, 359, 349, 356,
- 358, 350, 531, 356, 797, 798, 358, 708, 356, 356,
- 356, 804, 805, 356, 356, 356, 356, 810, 547, 812,
- 356, 356, 839, 552, 358, 842, 359, 350, 349, 558,
- 349, 385, 561, 371, 351, 385, 351, 369, 348, 352,
- 385, 570, 370, 853, 335, 337, 385, 352, 349, 349,
- 354, 359, 349, 349, 385, 349, 359, 359, 357, 385,
- 589, 359, 382, 349, 359, 882, 359, 350, 356, 359,
- 390, 382, 356, 358, 356, 350, 350, 778, 398, 390,
- 398, 401, 899, 352, 352, 895, 406, 398, 406, 356,
- 401, 398, 398, 348, 348, 406, 416, 914, 354, 909,
- 406, 382, 350, 349, 393, 385, 426, 425, 353, 358,
- 430, 350, 359, 353, 725, 426, 353, 359, 397, 430,
- 354, 727, 726, 730, 331, 728, 446, 842, 729, 658,
- 702, 572, 817, 844, 904, 446, 915, 844, 916, 632,
- 882, 842, 632, 430, 632, 398, 563, 424, 426, 795,
- 793, 798, 802, 473, 430, 800, 808, 805, 812, 810,
- -1, -1, 473, -1, 484, -1, -1, 882, -1, -1,
- -1, -1, -1, 484, -1, -1, -1, -1, -1, -1,
- -1, 882, -1, 712, 713, 714, 715, 716, 717, 718,
+ 0, 385, 0, 404, 496, 484, 625, 0, 409, 0,
+ 505, 0, 437, 442, 550, 545, 404, 570, 443, 555,
+ 404, 739, 393, 348, 416, 409, 572, 336, 564, 353,
+ 637, 561, 639, 348, 351, 642, 351, 573, 348, 531,
+ 359, 348, 356, 331, 332, 429, 385, 382, 352, 349,
+ 385, 349, 356, 351, 368, 484, 381, 382, 383, 384,
+ 358, 385, 433, 372, 329, 330, 385, 382, 493, 494,
+ 409, 550, 382, 502, 503, 382, 555, 349, 449, 367,
+ 368, 351, 561, 351, 349, 564, 351, 543, 358, 901,
+ 355, 359, 476, 549, 573, 551, 908, 643, 554, 359,
+ 556, 351, 558, 559, 351, 534, 918, 563, 358, 604,
+ 351, 606, 359, 592, 350, 382, 353, 358, 385, 356,
+ 356, 550, 359, 350, 743, 350, 555, 350, 350, 356,
+ 349, 356, 561, 356, 356, 564, 411, 412, 413, 414,
+ 415, 416, 417, 350, 573, 637, 350, 639, 350, 356,
+ 642, 543, 356, 350, 356, 350, 350, 652, 711, 356,
+ 350, 356, 356, 592, 656, 356, 356, 559, 359, 776,
+ 338, 339, 340, 341, 342, 343, 344, 345, 346, 347,
+ 352, 350, 356, 350, 356, 350, 570, 356, 572, 356,
+ 358, 356, 350, 350, 350, 350, 350, 568, 356, 356,
+ 356, 356, 356, 350, 922, 350, 350, 350, 350, 356,
+ 350, 356, 356, 356, 356, 350, 356, 350, 352, 385,
+ 382, 356, 356, 385, 356, 844, 349, 359, 781, 349,
+ 382, 349, 661, 385, 635, 381, 382, 383, 384, 722,
+ 723, 724, 725, 382, 349, 382, 385, 635, 385, 856,
+ 745, 635, 382, 382, 749, 385, 385, 382, 382, 643,
+ 385, 385, 349, 382, 800, 801, 385, 382, 327, 328,
+ 385, 807, 808, 349, 356, 767, 768, 359, 361, 356,
+ 363, 706, 359, 813, 776, 349, 715, 716, 717, 718,
719, 720, 721, 722, 723, 724, 725, 726, 727, 728,
- 729, 730, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, 536, -1,
+ 729, 730, 731, 732, 733, 912, 367, 356, 356, 928,
+ 359, 359, 374, 375, 376, 333, 334, 364, 365, 366,
+ 359, 800, 801, 354, 353, 356, 353, 711, 807, 808,
+ 356, 357, 356, 357, 813, 385, 815, 718, 719, 385,
+ 720, 721, 385, 726, 727, 351, 351, 842, 385, 385,
+ 845, 350, 385, 356, 371, 359, 358, 358, 350, 356,
+ 356, 356, 356, 385, 856, 349, 356, 382, 356, 356,
+ 356, 800, 801, 356, 356, 350, 349, 358, 807, 808,
+ 359, 351, 349, 351, 813, 385, 815, 348, 352, 335,
+ 885, 354, 370, 393, 385, 369, 337, 781, 352, 349,
+ 385, 401, 393, 401, 404, 385, 898, 902, 401, 409,
+ 401, 409, 401, 404, 385, 359, 409, 359, 409, 419,
+ 912, 349, 917, 349, 359, 349, 357, 359, 349, 429,
+ 428, 349, 359, 433, 359, 350, 359, 385, 429, 358,
+ 385, 350, 433, 356, 845, 356, 356, 350, 352, 449,
+ 352, 356, 348, 393, 348, 354, 385, 350, 449, 349,
+ 358, 845, 353, 350, 359, 353, 728, 397, 353, 730,
+ 359, 354, 729, 731, 733, 433, 476, 732, 429, 575,
+ 433, 331, 820, 847, 885, 476, 907, 487, 705, 919,
+ 918, 427, 847, 401, 635, 885, 487, 635, 566, 635,
+ 801, 885, 798, 796, 815, 808, 805, 813, 803, 811,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, 565, -1, 567, -1, 569,
- -1, -1, -1, -1, 565, -1, 567, -1, 569, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, 797, 798,
- -1, -1, -1, -1, -1, 804, 805, -1, -1, -1,
- -1, 810, -1, 812, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, 622, -1, -1, -1, -1, -1, -1, -1,
- -1, 622, 632, -1, -1, -1, -1, -1, -1, -1,
- 640, 632, -1, -1, -1, -1, -1, -1, -1, 640,
+ -1, 539, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 568, -1,
+ 570, -1, 572, -1, -1, -1, -1, 568, -1, 570,
+ -1, 572, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, 625, -1, -1, -1, -1,
+ -1, -1, -1, -1, 625, 635, -1, -1, -1, -1,
+ -1, -1, -1, 643, 635, -1, -1, -1, -1, -1,
+ -1, -1, 643, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, 708, -1,
- -1, -1, -1, -1, -1, -1, -1, 708, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, 736, -1, -1, -1,
- 740, -1, -1, -1, -1, 736, -1, -1, -1, 740,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, 778, -1,
- -1, -1, -1, -1, -1, -1, -1, 778, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, 711, -1, -1, -1, -1, -1, -1, -1, -1,
+ 711, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 739,
+ -1, -1, -1, 743, -1, -1, -1, -1, 739, -1,
+ -1, -1, 743, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, 781, -1, -1, -1, -1, -1, -1, -1, -1,
+ 781, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, 841, 842, -1, 844, -1, 844, -1, -1, -1,
- 841, 842, -1, 844, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, 882, -1, -1, -1, -1, -1, -1, -1,
- -1, 882, -1, -1, -1, -1, -1, -1, 898, -1,
- -1, -1, -1, -1, -1, 905, -1, 898, -1, -1,
- -1, -1, -1, -1, 905, 915, -1, -1, -1, 919,
- -1, -1, -1, -1, 915, 925, -1, -1, 919, -1,
- -1, -1, 0, -1, 925, 3, 4, 5, 6, 7,
- 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
- 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
- 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
- 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
- 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
- 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
- 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
- 78, 79, 80, 81, 82, 83, 84, 85, 86, 87,
- 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
- 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
- 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
- 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
- 128, 129, 130, 131, 132, 133, 134, 135, 136, 137,
- 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
- 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
- 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
- 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
- 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
- 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
- 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
- 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
- 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
- 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
- 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
- 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
- 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
- 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
- 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
- 288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
- 298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
- 308, 309, 310, 311, 312, 313, 314, 315, 316, 317,
- 318, 319, 320, 321, 322, 323, 324, 325, 326, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, 844, 845, -1, 847, -1, 847,
+ -1, -1, -1, 844, 845, -1, 847, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, 351, -1, -1, -1, -1, -1, -1,
- -1, 359, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, 373, 374, 375, 376, 377,
- -1, -1, -1, -1, -1, -1, -1, -1, 386, 387,
- 388, 389, 390, 391, 392, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 408, 409, 410, 411, 412, 413, -1, -1, -1, -1,
- -1, -1, -1, -1, 422, -1, 424, 425, 426, 427,
- 428, 429, 430, 431, 432, 433, 434, 435, 436, 437,
- 438, 439, 440, 441, 442, 443, 444, 445, 446, 447,
- 448, 449, 450, 451, 452, 453, 454, 3, 4, 5,
- 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
- 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
- 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
- 36, 37, 38, 39, 40, 41, 42, 43, 44, 45,
- 46, 47, 48, 49, 50, 51, 52, 53, 54, 55,
- 56, 57, 58, 59, 60, 61, 62, 63, 64, 65,
- 66, 67, 68, 69, 70, 71, 72, 73, 74, 75,
- 76, 77, 78, 79, 80, 81, 82, 83, 84, 85,
- 86, 87, 88, 89, 90, 91, 92, 93, 94, 95,
- 96, 97, 98, 99, 100, 101, 102, 103, 104, 105,
- 106, 107, 108, 109, 110, 111, 112, 113, 114, 115,
- 116, 117, 118, 119, 120, 121, 122, 123, 124, 125,
- 126, 127, 128, 129, 130, 131, 132, 133, 134, 135,
- 136, 137, 138, 139, 140, 141, 142, 143, 144, 145,
- 146, 147, 148, 149, 150, 151, 152, 153, 154, 155,
- 156, 157, 158, 159, 160, 161, 162, 163, 164, 165,
- 166, 167, 168, 169, 170, 171, 172, 173, 174, 175,
- 176, 177, 178, 179, 180, 181, 182, 183, 184, 185,
- 186, 187, 188, 189, 190, 191, 192, 193, 194, 195,
- 196, 197, 198, 199, 200, 201, 202, 203, 204, 205,
- 206, 207, 208, 209, 210, 211, 212, 213, 214, 215,
- 216, 217, 218, 219, 220, 221, 222, 223, 224, 225,
- 226, 227, 228, 229, 230, 231, 232, 233, 234, 235,
- 236, 237, 238, 239, 240, 241, 242, 243, 244, 245,
- 246, 247, 248, 249, 250, 251, 252, 253, 254, 255,
- 256, 257, 258, 259, 260, 261, 262, 263, 264, 265,
- 266, 267, 268, 269, 270, 271, 272, 273, 274, 275,
- 276, 277, 278, 279, 280, 281, 282, 283, 284, 285,
- 286, 287, 288, 289, 290, 291, 292, 293, 294, 295,
- 296, 297, 298, 299, 300, 301, 302, 303, 304, 305,
- 306, 307, 308, 309, 310, 311, 312, 313, 314, 315,
- 316, 317, 318, 319, 320, 321, 322, 323, 324, 325,
- 326, -1, -1, 329, 330, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, 885, -1, -1, -1, -1,
+ -1, -1, -1, -1, 885, -1, -1, -1, -1, -1,
+ -1, 901, -1, -1, -1, -1, -1, -1, 908, -1,
+ 901, -1, -1, -1, -1, -1, -1, 908, 918, -1,
+ -1, -1, 922, -1, -1, -1, -1, 918, 928, -1,
+ -1, 922, -1, -1, -1, 0, -1, 928, 3, 4,
+ 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
+ 15, 16, 17, 18, 19, 20, 21, 22, 23, 24,
+ 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
+ 35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
+ 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+ 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
+ 65, 66, 67, 68, 69, 70, 71, 72, 73, 74,
+ 75, 76, 77, 78, 79, 80, 81, 82, 83, 84,
+ 85, 86, 87, 88, 89, 90, 91, 92, 93, 94,
+ 95, 96, 97, 98, 99, 100, 101, 102, 103, 104,
+ 105, 106, 107, 108, 109, 110, 111, 112, 113, 114,
+ 115, 116, 117, 118, 119, 120, 121, 122, 123, 124,
+ 125, 126, 127, 128, 129, 130, 131, 132, 133, 134,
+ 135, 136, 137, 138, 139, 140, 141, 142, 143, 144,
+ 145, 146, 147, 148, 149, 150, 151, 152, 153, 154,
+ 155, 156, 157, 158, 159, 160, 161, 162, 163, 164,
+ 165, 166, 167, 168, 169, 170, 171, 172, 173, 174,
+ 175, 176, 177, 178, 179, 180, 181, 182, 183, 184,
+ 185, 186, 187, 188, 189, 190, 191, 192, 193, 194,
+ 195, 196, 197, 198, 199, 200, 201, 202, 203, 204,
+ 205, 206, 207, 208, 209, 210, 211, 212, 213, 214,
+ 215, 216, 217, 218, 219, 220, 221, 222, 223, 224,
+ 225, 226, 227, 228, 229, 230, 231, 232, 233, 234,
+ 235, 236, 237, 238, 239, 240, 241, 242, 243, 244,
+ 245, 246, 247, 248, 249, 250, 251, 252, 253, 254,
+ 255, 256, 257, 258, 259, 260, 261, 262, 263, 264,
+ 265, 266, 267, 268, 269, 270, 271, 272, 273, 274,
+ 275, 276, 277, 278, 279, 280, 281, 282, 283, 284,
+ 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,
+ 295, 296, 297, 298, 299, 300, 301, 302, 303, 304,
+ 305, 306, 307, 308, 309, 310, 311, 312, 313, 314,
+ 315, 316, 317, 318, 319, 320, 321, 322, 323, 324,
+ 325, 326, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, 348, 349, -1, 351, -1, 353, 354, -1,
- -1, -1, -1, 359, 360, 361, 362, 363, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, 373, 374, 375,
- 376, 377, -1, -1, -1, 381, 382, 383, 384, 385,
- 386, 387, 388, 389, 390, 391, 392, 393, 394, 395,
- 396, -1, 398, 399, 400, 401, 402, 403, 404, 405,
- 406, 407, 408, 409, 410, 411, 412, 413, 414, 415,
- 416, 417, 418, 419, 420, 421, 422, 423, 424, 425,
- 426, 427, 428, 429, 430, 431, 432, 433, 434, 435,
- 436, 437, 438, 439, 440, 441, 442, 443, 444, 445,
- 446, 447, 448, 449, 450, 451, 452, 453, 454, 3,
- 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
- 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
- 24, 25, 26, 27, 28, 29, 30, 31, 32, 33,
- 34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
- 44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
- 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
- 64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
- 74, 75, 76, 77, 78, 79, 80, 81, 82, 83,
- 84, 85, 86, 87, 88, 89, 90, 91, 92, 93,
- 94, 95, 96, 97, 98, 99, 100, 101, 102, 103,
- 104, 105, 106, 107, 108, 109, 110, 111, 112, 113,
- 114, 115, 116, 117, 118, 119, 120, 121, 122, 123,
- 124, 125, 126, 127, 128, 129, 130, 131, 132, 133,
- 134, 135, 136, 137, 138, 139, 140, 141, 142, 143,
- 144, 145, 146, 147, 148, 149, 150, 151, 152, 153,
- 154, 155, 156, 157, 158, 159, 160, 161, 162, 163,
- 164, 165, 166, 167, 168, 169, 170, 171, 172, 173,
- 174, 175, 176, 177, 178, 179, 180, 181, 182, 183,
- 184, 185, 186, 187, 188, 189, 190, 191, 192, 193,
- 194, 195, 196, 197, 198, 199, 200, 201, 202, 203,
- 204, 205, 206, 207, 208, 209, 210, 211, 212, 213,
- 214, 215, 216, 217, 218, 219, 220, 221, 222, 223,
- 224, 225, 226, 227, 228, 229, 230, 231, 232, 233,
- 234, 235, 236, 237, 238, 239, 240, 241, 242, 243,
- 244, 245, 246, 247, 248, 249, 250, 251, 252, 253,
- 254, 255, 256, 257, 258, 259, 260, 261, 262, 263,
- 264, 265, 266, 267, 268, 269, 270, 271, 272, 273,
- 274, 275, 276, 277, 278, 279, 280, 281, 282, 283,
- 284, 285, 286, 287, 288, 289, 290, 291, 292, 293,
- 294, 295, 296, 297, 298, 299, 300, 301, 302, 303,
- 304, 305, 306, 307, 308, 309, 310, 311, 312, 313,
- 314, 315, 316, 317, 318, 319, 320, 321, 322, 323,
- 324, 325, 326, -1, -1, 329, 330, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, 351, -1, -1, -1,
+ -1, -1, -1, -1, 359, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 373, 374,
+ 375, 376, 377, -1, -1, -1, -1, -1, -1, -1,
+ -1, 386, 387, 388, 389, 390, 391, 392, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, 348, 349, -1, 351, -1, 353,
- 354, -1, -1, -1, -1, 359, 360, 361, 362, 363,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, 373,
- 374, 375, 376, 377, -1, -1, -1, 381, 382, 383,
- 384, 385, 386, 387, 388, 389, 390, 391, 392, 393,
- 394, 395, 396, -1, 398, 399, 400, 401, 402, 403,
- 404, 405, 406, 407, 408, 409, 410, 411, 412, 413,
- 414, 415, 416, 417, 418, 419, 420, 421, 422, 423,
- 424, 425, 426, 427, 428, 429, 430, 431, 432, 433,
- 434, 435, 436, 437, 438, 439, 440, 441, 442, 443,
- 444, 445, 446, 447, 448, 449, 450, 451, 452, 453,
- 454, 3, 4, 5, 6, 7, 8, 9, 10, 11,
- 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
- 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
- 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
- 62, 63, 64, 65, 66, 67, 68, 69, 70, 71,
- 72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
- 82, 83, 84, 85, 86, 87, 88, 89, 90, 91,
- 92, 93, 94, 95, 96, 97, 98, 99, 100, 101,
- 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
- 112, 113, 114, 115, 116, 117, 118, 119, 120, 121,
- 122, 123, 124, 125, 126, 127, 128, 129, 130, 131,
- 132, 133, 134, 135, 136, 137, 138, 139, 140, 141,
- 142, 143, 144, 145, 146, 147, 148, 149, 150, 151,
- 152, 153, 154, 155, 156, 157, 158, 159, 160, 161,
- 162, 163, 164, 165, 166, 167, 168, 169, 170, 171,
- 172, 173, 174, 175, 176, 177, 178, 179, 180, 181,
- 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
- 192, 193, 194, 195, 196, 197, 198, 199, 200, 201,
- 202, 203, 204, 205, 206, 207, 208, 209, 210, 211,
- 212, 213, 214, 215, 216, 217, 218, 219, 220, 221,
- 222, 223, 224, 225, 226, 227, 228, 229, 230, 231,
- 232, 233, 234, 235, 236, 237, 238, 239, 240, 241,
- 242, 243, 244, 245, 246, 247, 248, 249, 250, 251,
- 252, 253, 254, 255, 256, 257, 258, 259, 260, 261,
- 262, 263, 264, 265, 266, 267, 268, 269, 270, 271,
- 272, 273, 274, 275, 276, 277, 278, 279, 280, 281,
- 282, 283, 284, 285, 286, 287, 288, 289, 290, 291,
- 292, 293, 294, 295, 296, 297, 298, 299, 300, 301,
- 302, 303, 304, 305, 306, 307, 308, 309, 310, 311,
- 312, 313, 314, 315, 316, 317, 318, 319, 320, 321,
- 322, 323, 324, 325, 326, -1, -1, 329, 330, -1,
+ -1, -1, -1, 408, 409, 410, 411, 412, 413, -1,
+ -1, -1, -1, -1, -1, -1, -1, 422, -1, 424,
+ 425, 426, 427, 428, 429, 430, 431, 432, 433, 434,
+ 435, 436, 437, 438, 439, 440, 441, 442, 443, 444,
+ 445, 446, 447, 448, 449, 450, 451, 452, 453, 454,
+ 455, 456, 457, 3, 4, 5, 6, 7, 8, 9,
+ 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
+ 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
+ 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
+ 40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
+ 50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
+ 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
+ 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
+ 80, 81, 82, 83, 84, 85, 86, 87, 88, 89,
+ 90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
+ 100, 101, 102, 103, 104, 105, 106, 107, 108, 109,
+ 110, 111, 112, 113, 114, 115, 116, 117, 118, 119,
+ 120, 121, 122, 123, 124, 125, 126, 127, 128, 129,
+ 130, 131, 132, 133, 134, 135, 136, 137, 138, 139,
+ 140, 141, 142, 143, 144, 145, 146, 147, 148, 149,
+ 150, 151, 152, 153, 154, 155, 156, 157, 158, 159,
+ 160, 161, 162, 163, 164, 165, 166, 167, 168, 169,
+ 170, 171, 172, 173, 174, 175, 176, 177, 178, 179,
+ 180, 181, 182, 183, 184, 185, 186, 187, 188, 189,
+ 190, 191, 192, 193, 194, 195, 196, 197, 198, 199,
+ 200, 201, 202, 203, 204, 205, 206, 207, 208, 209,
+ 210, 211, 212, 213, 214, 215, 216, 217, 218, 219,
+ 220, 221, 222, 223, 224, 225, 226, 227, 228, 229,
+ 230, 231, 232, 233, 234, 235, 236, 237, 238, 239,
+ 240, 241, 242, 243, 244, 245, 246, 247, 248, 249,
+ 250, 251, 252, 253, 254, 255, 256, 257, 258, 259,
+ 260, 261, 262, 263, 264, 265, 266, 267, 268, 269,
+ 270, 271, 272, 273, 274, 275, 276, 277, 278, 279,
+ 280, 281, 282, 283, 284, 285, 286, 287, 288, 289,
+ 290, 291, 292, 293, 294, 295, 296, 297, 298, 299,
+ 300, 301, 302, 303, 304, 305, 306, 307, 308, 309,
+ 310, 311, 312, 313, 314, 315, 316, 317, 318, 319,
+ 320, 321, 322, 323, 324, 325, 326, -1, -1, 329,
+ 330, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 348, 349,
+ -1, 351, -1, 353, 354, -1, -1, -1, -1, 359,
+ 360, 361, 362, 363, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, 373, 374, 375, 376, 377, -1, -1,
+ -1, 381, 382, 383, 384, 385, 386, 387, 388, 389,
+ 390, 391, 392, 393, 394, 395, 396, -1, 398, 399,
+ 400, 401, 402, 403, 404, 405, 406, 407, 408, 409,
+ 410, 411, 412, 413, 414, 415, 416, 417, 418, 419,
+ 420, 421, 422, 423, 424, 425, 426, 427, 428, 429,
+ 430, 431, 432, 433, 434, 435, 436, 437, 438, 439,
+ 440, 441, 442, 443, 444, 445, 446, 447, 448, 449,
+ 450, 451, 452, 453, 454, 455, 456, 457, 3, 4,
+ 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
+ 15, 16, 17, 18, 19, 20, 21, 22, 23, 24,
+ 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
+ 35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
+ 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+ 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
+ 65, 66, 67, 68, 69, 70, 71, 72, 73, 74,
+ 75, 76, 77, 78, 79, 80, 81, 82, 83, 84,
+ 85, 86, 87, 88, 89, 90, 91, 92, 93, 94,
+ 95, 96, 97, 98, 99, 100, 101, 102, 103, 104,
+ 105, 106, 107, 108, 109, 110, 111, 112, 113, 114,
+ 115, 116, 117, 118, 119, 120, 121, 122, 123, 124,
+ 125, 126, 127, 128, 129, 130, 131, 132, 133, 134,
+ 135, 136, 137, 138, 139, 140, 141, 142, 143, 144,
+ 145, 146, 147, 148, 149, 150, 151, 152, 153, 154,
+ 155, 156, 157, 158, 159, 160, 161, 162, 163, 164,
+ 165, 166, 167, 168, 169, 170, 171, 172, 173, 174,
+ 175, 176, 177, 178, 179, 180, 181, 182, 183, 184,
+ 185, 186, 187, 188, 189, 190, 191, 192, 193, 194,
+ 195, 196, 197, 198, 199, 200, 201, 202, 203, 204,
+ 205, 206, 207, 208, 209, 210, 211, 212, 213, 214,
+ 215, 216, 217, 218, 219, 220, 221, 222, 223, 224,
+ 225, 226, 227, 228, 229, 230, 231, 232, 233, 234,
+ 235, 236, 237, 238, 239, 240, 241, 242, 243, 244,
+ 245, 246, 247, 248, 249, 250, 251, 252, 253, 254,
+ 255, 256, 257, 258, 259, 260, 261, 262, 263, 264,
+ 265, 266, 267, 268, 269, 270, 271, 272, 273, 274,
+ 275, 276, 277, 278, 279, 280, 281, 282, 283, 284,
+ 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,
+ 295, 296, 297, 298, 299, 300, 301, 302, 303, 304,
+ 305, 306, 307, 308, 309, 310, 311, 312, 313, 314,
+ 315, 316, 317, 318, 319, 320, 321, 322, 323, 324,
+ 325, 326, -1, -1, 329, 330, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, 348, 349, -1, 351,
- -1, 353, -1, -1, -1, -1, -1, 359, 360, 361,
- 362, 363, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, 373, 374, 375, 376, 377, -1, -1, -1, 381,
- 382, 383, 384, 385, 386, 387, 388, 389, 390, 391,
- 392, 393, 394, 395, 396, -1, 398, 399, 400, 401,
- 402, 403, 404, 405, 406, 407, 408, 409, 410, 411,
- 412, 413, 414, 415, 416, 417, 418, 419, 420, 421,
- 422, 423, 424, 425, 426, 427, 428, 429, 430, 431,
- 432, 433, 434, 435, 436, 437, 438, 439, 440, 441,
- 442, 443, 444, 445, 446, 447, 448, 449, 450, 451,
- 452, 453, 454, 3, 4, 5, 6, 7, 8, 9,
+ -1, -1, -1, 348, 349, -1, 351, -1, 353, 354,
+ -1, -1, -1, -1, 359, 360, 361, 362, 363, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 373, 374,
+ 375, 376, 377, -1, -1, -1, 381, 382, 383, 384,
+ 385, 386, 387, 388, 389, 390, 391, 392, 393, 394,
+ 395, 396, -1, 398, 399, 400, 401, 402, 403, 404,
+ 405, 406, 407, 408, 409, 410, 411, 412, 413, 414,
+ 415, 416, 417, 418, 419, 420, 421, 422, 423, 424,
+ 425, 426, 427, 428, 429, 430, 431, 432, 433, 434,
+ 435, 436, 437, 438, 439, 440, 441, 442, 443, 444,
+ 445, 446, 447, 448, 449, 450, 451, 452, 453, 454,
+ 455, 456, 457, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
@@ -3254,188 +3222,235 @@ static const yytype_int16 yycheck[] =
420, 421, 422, 423, 424, 425, 426, 427, 428, 429,
430, 431, 432, 433, 434, 435, 436, 437, 438, 439,
440, 441, 442, 443, 444, 445, 446, 447, 448, 449,
- 450, 451, 452, 453, 454, 3, 4, 5, 6, 7,
- 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
- 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
- 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
- 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
- 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
- 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
- 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
- 78, 79, 80, 81, 82, 83, 84, 85, 86, 87,
- 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
- 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
- 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
- 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
- 128, 129, 130, 131, 132, 133, 134, 135, 136, 137,
- 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
- 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
- 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
- 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
- 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
- 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
- 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
- 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
- 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
- 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
- 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
- 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
- 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
- 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
- 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
- 288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
- 298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
- 308, 309, 310, 311, 312, 313, 314, 315, 316, 317,
- 318, 319, 320, 321, 322, 323, 324, 325, 326, -1,
- -1, 329, 330, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 348, 349, -1, 351, -1, -1, -1, -1, -1, -1,
- -1, 359, 360, 361, 362, 363, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, 373, 374, 375, 376, 377,
- -1, -1, -1, 381, 382, 383, 384, 385, 386, 387,
- 388, 389, 390, 391, 392, 393, 394, 395, 396, -1,
- 398, 399, 400, 401, 402, 403, 404, 405, 406, 407,
- 408, 409, 410, 411, 412, 413, 414, 415, 416, 417,
- 418, 419, 420, 421, 422, 423, 424, 425, 426, 427,
- 428, 429, 430, 431, 432, 433, 434, 435, 436, 437,
- 438, 439, 440, 441, 442, 443, 444, 445, 446, 447,
- 448, 449, 450, 451, 452, 453, 454, 3, 4, 5,
- 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
- 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
- 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
- 36, 37, 38, 39, 40, 41, 42, 43, 44, 45,
- 46, 47, 48, 49, 50, 51, 52, 53, 54, 55,
- 56, 57, 58, 59, 60, 61, 62, 63, 64, 65,
- 66, 67, 68, 69, 70, 71, 72, 73, 74, 75,
- 76, 77, 78, 79, 80, 81, 82, 83, 84, 85,
- 86, 87, 88, 89, 90, 91, 92, 93, 94, 95,
- 96, 97, 98, 99, 100, 101, 102, 103, 104, 105,
- 106, 107, 108, 109, 110, 111, 112, 113, 114, 115,
- 116, 117, 118, 119, 120, 121, 122, 123, 124, 125,
- 126, 127, 128, 129, 130, 131, 132, 133, 134, 135,
- 136, 137, 138, 139, 140, 141, 142, 143, 144, 145,
- 146, 147, 148, 149, 150, 151, 152, 153, 154, 155,
- 156, 157, 158, 159, 160, 161, 162, 163, 164, 165,
- 166, 167, 168, 169, 170, 171, 172, 173, 174, 175,
- 176, 177, 178, 179, 180, 181, 182, 183, 184, 185,
- 186, 187, 188, 189, 190, 191, 192, 193, 194, 195,
- 196, 197, 198, 199, 200, 201, 202, 203, 204, 205,
- 206, 207, 208, 209, 210, 211, 212, 213, 214, 215,
- 216, 217, 218, 219, 220, 221, 222, 223, 224, 225,
- 226, 227, 228, 229, 230, 231, 232, 233, 234, 235,
- 236, 237, 238, 239, 240, 241, 242, 243, 244, 245,
- 246, 247, 248, 249, 250, 251, 252, 253, 254, 255,
- 256, 257, 258, 259, 260, 261, 262, 263, 264, 265,
- 266, 267, 268, 269, 270, 271, 272, 273, 274, 275,
- 276, 277, 278, 279, 280, 281, 282, 283, 284, 285,
- 286, 287, 288, 289, 290, 291, 292, 293, 294, 295,
- 296, 297, 298, 299, 300, 301, 302, 303, 304, 305,
- 306, 307, 308, 309, 310, 311, 312, 313, 314, 315,
- 316, 317, 318, 319, 320, 321, 322, 323, 324, 325,
- 326, -1, -1, 329, 330, -1, -1, -1, -1, -1,
+ 450, 451, 452, 453, 454, 455, 456, 457, 3, 4,
+ 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
+ 15, 16, 17, 18, 19, 20, 21, 22, 23, 24,
+ 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
+ 35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
+ 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+ 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
+ 65, 66, 67, 68, 69, 70, 71, 72, 73, 74,
+ 75, 76, 77, 78, 79, 80, 81, 82, 83, 84,
+ 85, 86, 87, 88, 89, 90, 91, 92, 93, 94,
+ 95, 96, 97, 98, 99, 100, 101, 102, 103, 104,
+ 105, 106, 107, 108, 109, 110, 111, 112, 113, 114,
+ 115, 116, 117, 118, 119, 120, 121, 122, 123, 124,
+ 125, 126, 127, 128, 129, 130, 131, 132, 133, 134,
+ 135, 136, 137, 138, 139, 140, 141, 142, 143, 144,
+ 145, 146, 147, 148, 149, 150, 151, 152, 153, 154,
+ 155, 156, 157, 158, 159, 160, 161, 162, 163, 164,
+ 165, 166, 167, 168, 169, 170, 171, 172, 173, 174,
+ 175, 176, 177, 178, 179, 180, 181, 182, 183, 184,
+ 185, 186, 187, 188, 189, 190, 191, 192, 193, 194,
+ 195, 196, 197, 198, 199, 200, 201, 202, 203, 204,
+ 205, 206, 207, 208, 209, 210, 211, 212, 213, 214,
+ 215, 216, 217, 218, 219, 220, 221, 222, 223, 224,
+ 225, 226, 227, 228, 229, 230, 231, 232, 233, 234,
+ 235, 236, 237, 238, 239, 240, 241, 242, 243, 244,
+ 245, 246, 247, 248, 249, 250, 251, 252, 253, 254,
+ 255, 256, 257, 258, 259, 260, 261, 262, 263, 264,
+ 265, 266, 267, 268, 269, 270, 271, 272, 273, 274,
+ 275, 276, 277, 278, 279, 280, 281, 282, 283, 284,
+ 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,
+ 295, 296, 297, 298, 299, 300, 301, 302, 303, 304,
+ 305, 306, 307, 308, 309, 310, 311, 312, 313, 314,
+ 315, 316, 317, 318, 319, 320, 321, 322, 323, 324,
+ 325, 326, -1, -1, 329, 330, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, 348, 349, -1, 351, -1, -1, -1, -1,
- -1, -1, -1, 359, 360, 361, 362, 363, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, 373, 374, 375,
- 376, 377, -1, -1, -1, 381, 382, 383, 384, 385,
- 386, 387, 388, 389, 390, 391, 392, -1, -1, -1,
+ -1, -1, -1, 348, 349, -1, 351, -1, 353, -1,
+ -1, -1, -1, -1, 359, 360, 361, 362, 363, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 373, 374,
+ 375, 376, 377, -1, -1, -1, 381, 382, 383, 384,
+ 385, 386, 387, 388, 389, 390, 391, 392, 393, 394,
+ 395, 396, -1, 398, 399, 400, 401, 402, 403, 404,
+ 405, 406, 407, 408, 409, 410, 411, 412, 413, 414,
+ 415, 416, 417, 418, 419, 420, 421, 422, 423, 424,
+ 425, 426, 427, 428, 429, 430, 431, 432, 433, 434,
+ 435, 436, 437, 438, 439, 440, 441, 442, 443, 444,
+ 445, 446, 447, 448, 449, 450, 451, 452, 453, 454,
+ 455, 456, 457, 3, 4, 5, 6, 7, 8, 9,
+ 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
+ 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
+ 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
+ 40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
+ 50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
+ 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
+ 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
+ 80, 81, 82, 83, 84, 85, 86, 87, 88, 89,
+ 90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
+ 100, 101, 102, 103, 104, 105, 106, 107, 108, 109,
+ 110, 111, 112, 113, 114, 115, 116, 117, 118, 119,
+ 120, 121, 122, 123, 124, 125, 126, 127, 128, 129,
+ 130, 131, 132, 133, 134, 135, 136, 137, 138, 139,
+ 140, 141, 142, 143, 144, 145, 146, 147, 148, 149,
+ 150, 151, 152, 153, 154, 155, 156, 157, 158, 159,
+ 160, 161, 162, 163, 164, 165, 166, 167, 168, 169,
+ 170, 171, 172, 173, 174, 175, 176, 177, 178, 179,
+ 180, 181, 182, 183, 184, 185, 186, 187, 188, 189,
+ 190, 191, 192, 193, 194, 195, 196, 197, 198, 199,
+ 200, 201, 202, 203, 204, 205, 206, 207, 208, 209,
+ 210, 211, 212, 213, 214, 215, 216, 217, 218, 219,
+ 220, 221, 222, 223, 224, 225, 226, 227, 228, 229,
+ 230, 231, 232, 233, 234, 235, 236, 237, 238, 239,
+ 240, 241, 242, 243, 244, 245, 246, 247, 248, 249,
+ 250, 251, 252, 253, 254, 255, 256, 257, 258, 259,
+ 260, 261, 262, 263, 264, 265, 266, 267, 268, 269,
+ 270, 271, 272, 273, 274, 275, 276, 277, 278, 279,
+ 280, 281, 282, 283, 284, 285, 286, 287, 288, 289,
+ 290, 291, 292, 293, 294, 295, 296, 297, 298, 299,
+ 300, 301, 302, 303, 304, 305, 306, 307, 308, 309,
+ 310, 311, 312, 313, 314, 315, 316, 317, 318, 319,
+ 320, 321, 322, 323, 324, 325, 326, -1, -1, 329,
+ 330, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 348, 349,
+ -1, 351, -1, -1, -1, -1, -1, -1, -1, 359,
+ 360, 361, 362, 363, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, 373, 374, 375, 376, 377, -1, -1,
+ -1, 381, 382, 383, 384, 385, 386, 387, 388, 389,
+ 390, 391, 392, 393, 394, 395, 396, -1, 398, 399,
+ 400, 401, 402, 403, 404, 405, 406, 407, 408, 409,
+ 410, 411, 412, 413, 414, 415, 416, 417, 418, 419,
+ 420, 421, 422, 423, 424, 425, 426, 427, 428, 429,
+ 430, 431, 432, 433, 434, 435, 436, 437, 438, 439,
+ 440, 441, 442, 443, 444, 445, 446, 447, 448, 449,
+ 450, 451, 452, 453, 454, 455, 456, 457, 3, 4,
+ 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
+ 15, 16, 17, 18, 19, 20, 21, 22, 23, 24,
+ 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
+ 35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
+ 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+ 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
+ 65, 66, 67, 68, 69, 70, 71, 72, 73, 74,
+ 75, 76, 77, 78, 79, 80, 81, 82, 83, 84,
+ 85, 86, 87, 88, 89, 90, 91, 92, 93, 94,
+ 95, 96, 97, 98, 99, 100, 101, 102, 103, 104,
+ 105, 106, 107, 108, 109, 110, 111, 112, 113, 114,
+ 115, 116, 117, 118, 119, 120, 121, 122, 123, 124,
+ 125, 126, 127, 128, 129, 130, 131, 132, 133, 134,
+ 135, 136, 137, 138, 139, 140, 141, 142, 143, 144,
+ 145, 146, 147, 148, 149, 150, 151, 152, 153, 154,
+ 155, 156, 157, 158, 159, 160, 161, 162, 163, 164,
+ 165, 166, 167, 168, 169, 170, 171, 172, 173, 174,
+ 175, 176, 177, 178, 179, 180, 181, 182, 183, 184,
+ 185, 186, 187, 188, 189, 190, 191, 192, 193, 194,
+ 195, 196, 197, 198, 199, 200, 201, 202, 203, 204,
+ 205, 206, 207, 208, 209, 210, 211, 212, 213, 214,
+ 215, 216, 217, 218, 219, 220, 221, 222, 223, 224,
+ 225, 226, 227, 228, 229, 230, 231, 232, 233, 234,
+ 235, 236, 237, 238, 239, 240, 241, 242, 243, 244,
+ 245, 246, 247, 248, 249, 250, 251, 252, 253, 254,
+ 255, 256, 257, 258, 259, 260, 261, 262, 263, 264,
+ 265, 266, 267, 268, 269, 270, 271, 272, 273, 274,
+ 275, 276, 277, 278, 279, 280, 281, 282, 283, 284,
+ 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,
+ 295, 296, 297, 298, 299, 300, 301, 302, 303, 304,
+ 305, 306, 307, 308, 309, 310, 311, 312, 313, 314,
+ 315, 316, 317, 318, 319, 320, 321, 322, 323, 324,
+ 325, 326, -1, -1, 329, 330, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, 408, 409, 410, 411, 412, 413, 414, 415,
- 416, 417, 418, 419, 420, 421, 422, -1, 424, 425,
- 426, 427, 428, 429, 430, 431, 432, 433, 434, 435,
- 436, 437, 438, 439, 440, 441, 442, 443, 444, 445,
- 446, 447, 448, 449, 450, 451, 452, 453, 454, 3,
- 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
- 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
- 24, 25, 26, 27, 28, 29, 30, 31, 32, 33,
- 34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
- 44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
- 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
- 64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
- 74, 75, 76, 77, 78, 79, 80, 81, 82, 83,
- 84, 85, 86, 87, 88, 89, 90, 91, 92, 93,
- 94, 95, 96, 97, 98, 99, 100, 101, 102, 103,
- 104, 105, 106, 107, 108, 109, 110, 111, 112, 113,
- 114, 115, 116, 117, 118, 119, 120, 121, 122, 123,
- 124, 125, 126, 127, 128, 129, 130, 131, 132, 133,
- 134, 135, 136, 137, 138, 139, 140, 141, 142, 143,
- 144, 145, 146, 147, 148, 149, 150, 151, 152, 153,
- 154, 155, 156, 157, 158, 159, 160, 161, 162, 163,
- 164, 165, 166, 167, 168, 169, 170, 171, 172, 173,
- 174, 175, 176, 177, 178, 179, 180, 181, 182, 183,
- 184, 185, 186, 187, 188, 189, 190, 191, 192, 193,
- 194, 195, 196, 197, 198, 199, 200, 201, 202, 203,
- 204, 205, 206, 207, 208, 209, 210, 211, 212, 213,
- 214, 215, 216, 217, 218, 219, 220, 221, 222, 223,
- 224, 225, 226, 227, 228, 229, 230, 231, 232, 233,
- 234, 235, 236, 237, 238, 239, 240, 241, 242, 243,
- 244, 245, 246, 247, 248, 249, 250, 251, 252, 253,
- 254, 255, 256, 257, 258, 259, 260, 261, 262, 263,
- 264, 265, 266, 267, 268, 269, 270, 271, 272, 273,
- 274, 275, 276, 277, 278, 279, 280, 281, 282, 283,
- 284, 285, 286, 287, 288, 289, 290, 291, 292, 293,
- 294, 295, 296, 297, 298, 299, 300, 301, 302, 303,
- 304, 305, 306, 307, 308, 309, 310, 311, 312, 313,
- 314, 315, 316, -1, -1, -1, 320, 321, 322, 323,
- 324, 325, 326, -1, -1, 329, 330, -1, -1, -1,
+ -1, -1, -1, 348, 349, -1, 351, -1, -1, -1,
+ -1, -1, -1, -1, 359, 360, 361, 362, 363, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 373, 374,
+ 375, 376, 377, -1, -1, -1, 381, 382, 383, 384,
+ 385, 386, 387, 388, 389, 390, 391, 392, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, 348, 349, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, 360, 361, 362, 363,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, 373,
- 374, 375, 376, -1, -1, -1, -1, 381, 382, 383,
- 384, 385, 386, 387, 388, 389, 390, 391, 392, -1,
+ -1, -1, -1, 408, 409, 410, 411, 412, 413, 414,
+ 415, 416, 417, 418, 419, 420, 421, 422, -1, 424,
+ 425, 426, 427, 428, 429, 430, 431, 432, 433, 434,
+ 435, 436, 437, 438, 439, 440, 441, 442, 443, 444,
+ 445, 446, 447, 448, 449, 450, 451, 452, 453, 454,
+ 455, 456, 457, 3, 4, 5, 6, 7, 8, 9,
+ 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
+ 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
+ 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
+ 40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
+ 50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
+ 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
+ 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
+ 80, 81, 82, 83, 84, 85, 86, 87, 88, 89,
+ 90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
+ 100, 101, 102, 103, 104, 105, 106, 107, 108, 109,
+ 110, 111, 112, 113, 114, 115, 116, 117, 118, 119,
+ 120, 121, 122, 123, 124, 125, 126, 127, 128, 129,
+ 130, 131, 132, 133, 134, 135, 136, 137, 138, 139,
+ 140, 141, 142, 143, 144, 145, 146, 147, 148, 149,
+ 150, 151, 152, 153, 154, 155, 156, 157, 158, 159,
+ 160, 161, 162, 163, 164, 165, 166, 167, 168, 169,
+ 170, 171, 172, 173, 174, 175, 176, 177, 178, 179,
+ 180, 181, 182, 183, 184, 185, 186, 187, 188, 189,
+ 190, 191, 192, 193, 194, 195, 196, 197, 198, 199,
+ 200, 201, 202, 203, 204, 205, 206, 207, 208, 209,
+ 210, 211, 212, 213, 214, 215, 216, 217, 218, 219,
+ 220, 221, 222, 223, 224, 225, 226, 227, 228, 229,
+ 230, 231, 232, 233, 234, 235, 236, 237, 238, 239,
+ 240, 241, 242, 243, 244, 245, 246, 247, 248, 249,
+ 250, 251, 252, 253, 254, 255, 256, 257, 258, 259,
+ 260, 261, 262, 263, 264, 265, 266, 267, 268, 269,
+ 270, 271, 272, 273, 274, 275, 276, 277, 278, 279,
+ 280, 281, 282, 283, 284, 285, 286, 287, 288, 289,
+ 290, 291, 292, 293, 294, 295, 296, 297, 298, 299,
+ 300, 301, 302, 303, 304, 305, 306, 307, 308, 309,
+ 310, 311, 312, 313, 314, 315, 316, -1, -1, -1,
+ 320, 321, 322, 323, 324, 325, 326, -1, -1, 329,
+ 330, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 348, 349,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, 408, 409, 410, 411, 412, 413,
- 414, 415, 416, 417, 418, 419, 420, 421, 422, -1,
- 424, 425, 426, 427, 428, 429, 430, 431, 432, 433,
- 434, 435, 436, 437, 438, 439, 440, 441, 442, 443,
- 444, 445, 446, 447, 448, 449, 450, 451, 452, 453,
- 454, 3, 4, 5, 6, 7, 8, 9, 10, 11,
- 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
- 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
- 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
- 62, 63, 64, 65, 66, 67, 68, 69, 70, 71,
- 72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
- 82, 83, 84, 85, 86, 87, 88, 89, 90, 91,
- 92, 93, 94, 95, 96, 97, 98, 99, 100, 101,
- 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
- 112, 113, 114, 115, 116, 117, 118, 119, 120, 121,
- 122, 123, 124, 125, 126, 127, 128, 129, 130, 131,
- 132, 133, 134, 135, 136, 137, 138, 139, 140, 141,
- 142, 143, 144, 145, 146, 147, 148, 149, 150, 151,
- 152, 153, 154, 155, 156, 157, 158, 159, 160, 161,
- 162, 163, 164, 165, 166, 167, 168, 169, 170, 171,
- 172, 173, 174, 175, 176, 177, 178, 179, 180, 181,
- 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
- 192, 193, 194, 195, 196, 197, 198, 199, 200, 201,
- 202, 203, 204, 205, 206, 207, 208, 209, 210, 211,
- 212, 213, 214, 215, 216, 217, 218, 219, 220, 221,
- 222, 223, 224, 225, 226, 227, 228, 229, 230, 231,
- 232, 233, 234, 235, 236, 237, 238, 239, 240, 241,
- 242, 243, 244, 245, 246, 247, 248, 249, 250, 251,
- 252, 253, 254, 255, 256, 257, 258, 259, 260, 261,
- 262, 263, 264, 265, 266, 267, 268, 269, 270, 271,
- 272, 273, 274, 275, 276, 277, 278, 279, 280, 281,
- 282, 283, 284, 285, 286, 287, 288, 289, 290, 291,
- 292, 293, 294, 295, 296, 297, 298, 299, 300, 301,
- 302, 303, 304, 305, 306, 307, 308, 309, 310, 311,
- 312, 313, 314, 315, 316, 317, 318, 319, 320, 321,
- 322, 323, 324, 325, 326, -1, -1, -1, -1, -1,
+ 360, 361, 362, 363, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, 373, 374, 375, 376, -1, -1, -1,
+ -1, 381, 382, 383, 384, 385, 386, 387, 388, 389,
+ 390, 391, 392, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 408, 409,
+ 410, 411, 412, 413, 414, 415, 416, 417, 418, 419,
+ 420, 421, 422, -1, 424, 425, 426, 427, 428, 429,
+ 430, 431, 432, 433, 434, 435, 436, 437, 438, 439,
+ 440, 441, 442, 443, 444, 445, 446, 447, 448, 449,
+ 450, 451, 452, 453, 454, 455, 456, 457, 3, 4,
+ 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
+ 15, 16, 17, 18, 19, 20, 21, 22, 23, 24,
+ 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
+ 35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
+ 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+ 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
+ 65, 66, 67, 68, 69, 70, 71, 72, 73, 74,
+ 75, 76, 77, 78, 79, 80, 81, 82, 83, 84,
+ 85, 86, 87, 88, 89, 90, 91, 92, 93, 94,
+ 95, 96, 97, 98, 99, 100, 101, 102, 103, 104,
+ 105, 106, 107, 108, 109, 110, 111, 112, 113, 114,
+ 115, 116, 117, 118, 119, 120, 121, 122, 123, 124,
+ 125, 126, 127, 128, 129, 130, 131, 132, 133, 134,
+ 135, 136, 137, 138, 139, 140, 141, 142, 143, 144,
+ 145, 146, 147, 148, 149, 150, 151, 152, 153, 154,
+ 155, 156, 157, 158, 159, 160, 161, 162, 163, 164,
+ 165, 166, 167, 168, 169, 170, 171, 172, 173, 174,
+ 175, 176, 177, 178, 179, 180, 181, 182, 183, 184,
+ 185, 186, 187, 188, 189, 190, 191, 192, 193, 194,
+ 195, 196, 197, 198, 199, 200, 201, 202, 203, 204,
+ 205, 206, 207, 208, 209, 210, 211, 212, 213, 214,
+ 215, 216, 217, 218, 219, 220, 221, 222, 223, 224,
+ 225, 226, 227, 228, 229, 230, 231, 232, 233, 234,
+ 235, 236, 237, 238, 239, 240, 241, 242, 243, 244,
+ 245, 246, 247, 248, 249, 250, 251, 252, 253, 254,
+ 255, 256, 257, 258, 259, 260, 261, 262, 263, 264,
+ 265, 266, 267, 268, 269, 270, 271, 272, 273, 274,
+ 275, 276, 277, 278, 279, 280, 281, 282, 283, 284,
+ 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,
+ 295, 296, 297, 298, 299, 300, 301, 302, 303, 304,
+ 305, 306, 307, 308, 309, 310, 311, 312, 313, 314,
+ 315, 316, 317, 318, 319, 320, 321, 322, 323, 324,
+ 325, 326, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, 351,
- -1, -1, -1, -1, -1, -1, -1, 359, -1, -1,
+ -1, -1, -1, -1, -1, -1, 351, -1, -1, -1,
+ -1, -1, -1, -1, 359, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 373, 374,
+ 375, 376, 377, -1, -1, -1, -1, -1, -1, -1,
+ -1, 386, 387, 388, 389, 390, 391, 392, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, 373, 374, 375, 376, 377, -1, -1, -1, -1,
- -1, -1, -1, -1, 386, 387, 388, 389, 390, 391,
- 392, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, 408, 409, 410, 411,
- 412, 413, -1, -1, -1, -1, -1, -1, -1, -1,
- 422, -1, 424, 425, 426, 427, 428, 429, 430, 431,
- 432, 433, 434, 435, 436, 437, 438, 439, 440, 441,
- 442, 443, 444, 445, 446, 447, 448, 449, 450, 451,
- 452, 453, 454, 3, 4, 5, 6, 7, 8, 9,
+ -1, -1, -1, 408, 409, 410, 411, 412, 413, -1,
+ -1, -1, -1, -1, -1, -1, -1, 422, -1, 424,
+ 425, 426, 427, 428, 429, 430, 431, 432, 433, 434,
+ 435, 436, 437, 438, 439, 440, 441, 442, 443, 444,
+ 445, 446, 447, 448, 449, 450, 451, 452, 453, 454,
+ 455, 456, 457, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
@@ -3480,58 +3495,286 @@ static const yytype_int16 yycheck[] =
-1, -1, 422, -1, 424, 425, 426, 427, 428, 429,
430, 431, 432, 433, 434, 435, 436, 437, 438, 439,
440, 441, 442, 443, 444, 445, 446, 447, 448, 449,
- 450, 451, 452, 453, 454, 3, 4, 5, 6, 7,
- 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
- 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
- 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
- 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
- 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
- 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
- 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
- 78, 79, 80, 81, 82, 83, 84, 85, 86, 87,
- 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
- 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
- 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
- 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
- 128, 129, 130, 131, 132, 133, 134, 135, 136, 137,
- 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
- 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
- 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
- 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
- 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
- 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
- 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
- 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
- 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
- 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
- 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
- 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
- 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
- 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
- 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
- 288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
- 298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
- 308, 309, 310, 311, 312, 313, 314, 315, 316, -1,
- -1, -1, 320, 321, 322, 323, 324, 325, 326, -1,
+ 450, 451, 452, 453, 454, 455, 456, 457, 3, 4,
+ 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
+ 15, 16, 17, 18, 19, 20, 21, 22, 23, 24,
+ 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
+ 35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
+ 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+ 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
+ 65, 66, 67, 68, 69, 70, 71, 72, 73, 74,
+ 75, 76, 77, 78, 79, 80, 81, 82, 83, 84,
+ 85, 86, 87, 88, 89, 90, 91, 92, 93, 94,
+ 95, 96, 97, 98, 99, 100, 101, 102, 103, 104,
+ 105, 106, 107, 108, 109, 110, 111, 112, 113, 114,
+ 115, 116, 117, 118, 119, 120, 121, 122, 123, 124,
+ 125, 126, 127, 128, 129, 130, 131, 132, 133, 134,
+ 135, 136, 137, 138, 139, 140, 141, 142, 143, 144,
+ 145, 146, 147, 148, 149, 150, 151, 152, 153, 154,
+ 155, 156, 157, 158, 159, 160, 161, 162, 163, 164,
+ 165, 166, 167, 168, 169, 170, 171, 172, 173, 174,
+ 175, 176, 177, 178, 179, 180, 181, 182, 183, 184,
+ 185, 186, 187, 188, 189, 190, 191, 192, 193, 194,
+ 195, 196, 197, 198, 199, 200, 201, 202, 203, 204,
+ 205, 206, 207, 208, 209, 210, 211, 212, 213, 214,
+ 215, 216, 217, 218, 219, 220, 221, 222, 223, 224,
+ 225, 226, 227, 228, 229, 230, 231, 232, 233, 234,
+ 235, 236, 237, 238, 239, 240, 241, 242, 243, 244,
+ 245, 246, 247, 248, 249, 250, 251, 252, 253, 254,
+ 255, 256, 257, 258, 259, 260, 261, 262, 263, 264,
+ 265, 266, 267, 268, 269, 270, 271, 272, 273, 274,
+ 275, 276, 277, 278, 279, 280, 281, 282, 283, 284,
+ 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,
+ 295, 296, 297, 298, 299, 300, 301, 302, 303, 304,
+ 305, 306, 307, 308, 309, 310, 311, 312, 313, 314,
+ 315, 316, -1, -1, -1, 320, 321, 322, 323, 324,
+ 325, 326, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, 359, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 373, 374,
+ 375, 376, -1, -1, -1, -1, -1, -1, -1, -1,
+ 385, 386, 387, 388, 389, 390, 391, 392, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, 359, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, 373, 374, 375, 376, -1,
- -1, -1, -1, -1, -1, -1, -1, 385, 386, 387,
- 388, 389, 390, 391, 392, -1, -1, -1, -1, -1,
+ -1, -1, -1, 408, 409, 410, 411, 412, 413, -1,
+ -1, -1, -1, -1, -1, -1, -1, 422, -1, 424,
+ 425, 426, 427, 428, 429, 430, 431, 432, 433, 434,
+ 435, 436, 437, 438, 439, 440, 441, 442, 443, 444,
+ 445, 446, 447, 448, 449, 450, 451, 452, 453, 454,
+ 455, 456, 457, 3, 4, 5, 6, 7, 8, 9,
+ 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
+ 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
+ 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
+ 40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
+ 50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
+ 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
+ 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
+ 80, 81, 82, 83, 84, 85, 86, 87, 88, 89,
+ 90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
+ 100, 101, 102, 103, 104, 105, 106, 107, 108, 109,
+ 110, 111, 112, 113, 114, 115, 116, 117, 118, 119,
+ 120, 121, 122, 123, 124, 125, 126, 127, 128, 129,
+ 130, 131, 132, 133, 134, 135, 136, 137, 138, 139,
+ 140, 141, 142, 143, 144, 145, 146, 147, 148, 149,
+ 150, 151, 152, 153, 154, 155, 156, 157, 158, 159,
+ 160, 161, 162, 163, 164, 165, 166, 167, 168, 169,
+ 170, 171, 172, 173, 174, 175, 176, 177, 178, 179,
+ 180, 181, 182, 183, 184, 185, 186, 187, 188, 189,
+ 190, 191, 192, 193, 194, 195, 196, 197, 198, 199,
+ 200, 201, 202, 203, 204, 205, 206, 207, 208, 209,
+ 210, 211, 212, 213, 214, 215, 216, 217, 218, 219,
+ 220, 221, 222, 223, 224, 225, 226, 227, 228, 229,
+ 230, 231, 232, 233, 234, 235, 236, 237, 238, 239,
+ 240, 241, 242, 243, 244, 245, 246, 247, 248, 249,
+ 250, 251, 252, 253, 254, 255, 256, 257, 258, 259,
+ 260, 261, 262, 263, 264, 265, 266, 267, 268, 269,
+ 270, 271, 272, 273, 274, 275, 276, 277, 278, 279,
+ 280, 281, 282, 283, 284, 285, 286, 287, 288, 289,
+ 290, 291, 292, 293, 294, 295, 296, 297, 298, 299,
+ 300, 301, 302, 303, 304, 305, 306, 307, 308, 309,
+ 310, 311, 312, 313, 314, 315, 316, -1, -1, -1,
+ 320, 321, 322, 323, 324, 325, 326, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 408, 409, 410, 411, 412, 413, -1, -1, -1, -1,
- -1, -1, -1, -1, 422, -1, 424, 425, 426, 427,
- 428, 429, 430, 431, 432, 433, 434, 435, 436, 437,
- 438, 439, 440, 441, 442, 443, 444, 445, 446, 447,
- 448, 449, 450, 451, 452, 453, 454, 3, 4, 5,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, 351, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, 373, 374, 375, 376, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, 386, 387, 388, 389,
+ 390, 391, 392, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 408, 409,
+ 410, 411, 412, 413, -1, -1, -1, -1, -1, -1,
+ -1, -1, 422, -1, 424, 425, 426, 427, 428, 429,
+ 430, 431, 432, 433, 434, 435, 436, 437, 438, 439,
+ 440, 441, 442, 443, 444, 445, 446, 447, 448, 449,
+ 450, 451, 452, 453, 454, 455, 456, 457, 3, 4,
+ 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
+ 15, 16, 17, 18, 19, 20, 21, 22, 23, 24,
+ 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
+ 35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
+ 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+ 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
+ 65, 66, 67, 68, 69, 70, 71, 72, 73, 74,
+ 75, 76, 77, 78, 79, 80, 81, 82, 83, 84,
+ 85, 86, 87, 88, 89, 90, 91, 92, 93, 94,
+ 95, 96, 97, 98, 99, 100, 101, 102, 103, 104,
+ 105, 106, 107, 108, 109, 110, 111, 112, 113, 114,
+ 115, 116, 117, 118, 119, 120, 121, 122, 123, 124,
+ 125, 126, 127, 128, 129, 130, 131, 132, 133, 134,
+ 135, 136, 137, 138, 139, 140, 141, 142, 143, 144,
+ 145, 146, 147, 148, 149, 150, 151, 152, 153, 154,
+ 155, 156, 157, 158, 159, 160, 161, 162, 163, 164,
+ 165, 166, 167, 168, 169, 170, 171, 172, 173, 174,
+ 175, 176, 177, 178, 179, 180, 181, 182, 183, 184,
+ 185, 186, 187, 188, 189, 190, 191, 192, 193, 194,
+ 195, 196, 197, 198, 199, 200, 201, 202, 203, 204,
+ 205, 206, 207, 208, 209, 210, 211, 212, 213, 214,
+ 215, 216, 217, 218, 219, 220, 221, 222, 223, 224,
+ 225, 226, 227, 228, 229, 230, 231, 232, 233, 234,
+ 235, 236, 237, 238, 239, 240, 241, 242, 243, 244,
+ 245, 246, 247, 248, 249, 250, 251, 252, 253, 254,
+ 255, 256, 257, 258, 259, 260, 261, 262, 263, 264,
+ 265, 266, 267, 268, 269, 270, 271, 272, 273, 274,
+ 275, 276, 277, 278, 279, 280, 281, 282, 283, 284,
+ 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,
+ 295, 296, 297, 298, 299, 300, 301, 302, 303, 304,
+ 305, 306, 307, 308, 309, 310, 311, 312, 313, 314,
+ 315, 316, -1, -1, -1, 320, 321, 322, 323, 324,
+ 325, 326, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 354,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 373, 374,
+ 375, 376, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, 386, 387, 388, 389, 390, 391, 392, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, 408, 409, 410, 411, 412, 413, -1,
+ -1, -1, -1, -1, -1, -1, -1, 422, -1, 424,
+ 425, 426, 427, 428, 429, 430, 431, 432, 433, 434,
+ 435, 436, 437, 438, 439, 440, 441, 442, 443, 444,
+ 445, 446, 447, 448, 449, 450, 451, 452, 453, 454,
+ 455, 456, 457, 3, 4, 5, 6, 7, 8, 9,
+ 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
+ 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
+ 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
+ 40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
+ 50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
+ 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
+ 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
+ 80, 81, 82, 83, 84, 85, 86, 87, 88, 89,
+ 90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
+ 100, 101, 102, 103, 104, 105, 106, 107, 108, 109,
+ 110, 111, 112, 113, 114, 115, 116, 117, 118, 119,
+ 120, 121, 122, 123, 124, 125, 126, 127, 128, 129,
+ 130, 131, 132, 133, 134, 135, 136, 137, 138, 139,
+ 140, 141, 142, 143, 144, 145, 146, 147, 148, 149,
+ 150, 151, 152, 153, 154, 155, 156, 157, 158, 159,
+ 160, 161, 162, 163, 164, 165, 166, 167, 168, 169,
+ 170, 171, 172, 173, 174, 175, 176, 177, 178, 179,
+ 180, 181, 182, 183, 184, 185, 186, 187, 188, 189,
+ 190, 191, 192, 193, 194, 195, 196, 197, 198, 199,
+ 200, 201, 202, 203, 204, 205, 206, 207, 208, 209,
+ 210, 211, 212, 213, 214, 215, 216, 217, 218, 219,
+ 220, 221, 222, 223, 224, 225, 226, 227, 228, 229,
+ 230, 231, 232, 233, 234, 235, 236, 237, 238, 239,
+ 240, 241, 242, 243, 244, 245, 246, 247, 248, 249,
+ 250, 251, 252, 253, 254, 255, 256, 257, 258, 259,
+ 260, 261, 262, 263, 264, 265, 266, 267, 268, 269,
+ 270, 271, 272, 273, 274, 275, 276, 277, 278, 279,
+ 280, 281, 282, 283, 284, 285, 286, 287, 288, 289,
+ 290, 291, 292, 293, 294, 295, 296, 297, 298, 299,
+ 300, 301, 302, 303, 304, 305, 306, 307, 308, 309,
+ 310, 311, 312, 313, 314, 315, 316, -1, -1, -1,
+ 320, 321, 322, 323, 324, 325, 326, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, 354, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, 373, 374, 375, 376, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, 386, 387, 388, 389,
+ 390, 391, 392, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 408, 409,
+ 410, 411, 412, 413, -1, -1, -1, -1, -1, -1,
+ -1, -1, 422, -1, 424, 425, 426, 427, 428, 429,
+ 430, 431, 432, 433, 434, 435, 436, 437, 438, 439,
+ 440, 441, 442, 443, 444, 445, 446, 447, 448, 449,
+ 450, 451, 452, 453, 454, 455, 456, 457, 3, 4,
+ 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
+ 15, 16, 17, 18, 19, 20, 21, 22, 23, 24,
+ 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
+ 35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
+ 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+ 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
+ 65, 66, 67, 68, 69, 70, 71, 72, 73, 74,
+ 75, 76, 77, 78, 79, 80, 81, 82, 83, 84,
+ 85, 86, 87, 88, 89, 90, 91, 92, 93, 94,
+ 95, 96, 97, 98, 99, 100, 101, 102, 103, 104,
+ 105, 106, 107, 108, 109, 110, 111, 112, 113, 114,
+ 115, 116, 117, 118, 119, 120, 121, 122, 123, 124,
+ 125, 126, 127, 128, 129, 130, 131, 132, 133, 134,
+ 135, 136, 137, 138, 139, 140, 141, 142, 143, 144,
+ 145, 146, 147, 148, 149, 150, 151, 152, 153, 154,
+ 155, 156, 157, 158, 159, 160, 161, 162, 163, 164,
+ 165, 166, 167, 168, 169, 170, 171, 172, 173, 174,
+ 175, 176, 177, 178, 179, 180, 181, 182, 183, 184,
+ 185, 186, 187, 188, 189, 190, 191, 192, 193, 194,
+ 195, 196, 197, 198, 199, 200, 201, 202, 203, 204,
+ 205, 206, 207, 208, 209, 210, 211, 212, 213, 214,
+ 215, 216, 217, 218, 219, 220, 221, 222, 223, 224,
+ 225, 226, 227, 228, 229, 230, 231, 232, 233, 234,
+ 235, 236, 237, 238, 239, 240, 241, 242, 243, 244,
+ 245, 246, 247, 248, 249, 250, 251, 252, 253, 254,
+ 255, 256, 257, 258, 259, 260, 261, 262, 263, 264,
+ 265, 266, 267, 268, 269, 270, 271, 272, 273, 274,
+ 275, 276, 277, 278, 279, 280, 281, 282, 283, 284,
+ 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,
+ 295, 296, 297, 298, 299, 300, 301, 302, 303, 304,
+ 305, 306, 307, 308, 309, 310, 311, 312, 313, 314,
+ 315, 316, -1, -1, -1, 320, 321, 322, 323, 324,
+ 325, 326, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 354,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 373, 374,
+ 375, 376, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, 386, 387, 388, 389, 390, 391, 392, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, 408, 409, 410, 411, 412, 413, -1,
+ -1, -1, -1, -1, -1, -1, -1, 422, -1, 424,
+ 425, 426, 427, 428, 429, 430, 431, 432, 433, 434,
+ 435, 436, 437, 438, 439, 440, 441, 442, 443, 444,
+ 445, 446, 447, 448, 449, 450, 451, 452, 453, 454,
+ 455, 456, 457, 3, 4, 5, 6, 7, 8, 9,
+ 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
+ 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
+ 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
+ 40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
+ 50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
+ 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
+ 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
+ 80, 81, 82, 83, 84, 85, 86, 87, 88, 89,
+ 90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
+ 100, 101, 102, 103, 104, 105, 106, 107, 108, 109,
+ 110, 111, 112, 113, 114, 115, 116, 117, 118, 119,
+ 120, 121, 122, 123, 124, 125, 126, 127, 128, 129,
+ 130, 131, 132, 133, 134, 135, 136, 137, 138, 139,
+ 140, 141, 142, 143, 144, 145, 146, 147, 148, 149,
+ 150, 151, 152, 153, 154, 155, 156, 157, 158, 159,
+ 160, 161, 162, 163, 164, 165, 166, 167, 168, 169,
+ 170, 171, 172, 173, 174, 175, 176, 177, 178, 179,
+ 180, 181, 182, 183, 184, 185, 186, 187, 188, 189,
+ 190, 191, 192, 193, 194, 195, 196, 197, 198, 199,
+ 200, 201, 202, 203, 204, 205, 206, 207, 208, 209,
+ 210, 211, 212, 213, 214, 215, 216, 217, 218, 219,
+ 220, 221, 222, 223, 224, 225, 226, 227, 228, 229,
+ 230, 231, 232, 233, 234, 235, 236, 237, 238, 239,
+ 240, 241, 242, 243, 244, 245, 246, 247, 248, 249,
+ 250, 251, 252, 253, 254, 255, 256, 257, 258, 259,
+ 260, 261, 262, 263, 264, 265, 266, 267, 268, 269,
+ 270, 271, 272, 273, 274, 275, 276, 277, 278, 279,
+ 280, 281, 282, 283, 284, 285, 286, 287, 288, 289,
+ 290, 291, 292, 293, 294, 295, 296, 297, 298, 299,
+ 300, 301, 302, 303, 304, 305, 306, 307, 308, 309,
+ 310, 311, 312, 313, 314, 315, 316, -1, -1, -1,
+ 320, 321, 322, 323, 324, 325, 326, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, 373, 374, 375, 376, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, 386, 387, 388, 389,
+ 390, 391, 392, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 408, 409,
+ 410, 411, 412, 413, -1, -1, -1, -1, -1, -1,
+ -1, -1, 422, -1, 424, 425, 426, 427, 428, 429,
+ 430, 431, 432, 433, 434, 435, 436, 437, 438, 439,
+ 440, 441, 442, 443, 444, 445, 446, 447, 448, 449,
+ 450, 451, 452, 453, 454, 455, 456, 457, 4, 5,
6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
36, 37, 38, 39, 40, 41, 42, 43, 44, 45,
46, 47, 48, 49, 50, 51, 52, 53, 54, 55,
- 56, 57, 58, 59, 60, 61, 62, 63, 64, 65,
+ 56, 57, 58, 59, 60, -1, -1, 63, 64, 65,
66, 67, 68, 69, 70, 71, 72, 73, 74, 75,
76, 77, 78, 79, 80, 81, 82, 83, 84, 85,
86, 87, 88, 89, 90, 91, 92, 93, 94, 95,
@@ -3557,117 +3800,68 @@ static const yytype_int16 yycheck[] =
286, 287, 288, 289, 290, 291, 292, 293, 294, 295,
296, 297, 298, 299, 300, 301, 302, 303, 304, 305,
306, 307, 308, 309, 310, 311, 312, 313, 314, 315,
- 316, -1, -1, -1, 320, 321, 322, 323, 324, 325,
- 326, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, 351, -1, -1, -1, -1,
+ 316, -1, -1, -1, -1, -1, -1, 323, -1, -1,
+ -1, -1, -1, 329, 330, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, 373, 374, 375,
- 376, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 386, 387, 388, 389, 390, 391, 392, -1, -1, -1,
+ -1, -1, 348, 349, -1, -1, -1, 353, 354, -1,
+ -1, -1, -1, -1, 360, 361, 362, 363, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, 408, 409, 410, 411, 412, 413, -1, -1,
- -1, -1, -1, -1, -1, -1, 422, -1, 424, 425,
- 426, 427, 428, 429, 430, 431, 432, 433, 434, 435,
- 436, 437, 438, 439, 440, 441, 442, 443, 444, 445,
- 446, 447, 448, 449, 450, 451, 452, 453, 454, 3,
- 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
- 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
- 24, 25, 26, 27, 28, 29, 30, 31, 32, 33,
- 34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
- 44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
- 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
- 64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
- 74, 75, 76, 77, 78, 79, 80, 81, 82, 83,
- 84, 85, 86, 87, 88, 89, 90, 91, 92, 93,
- 94, 95, 96, 97, 98, 99, 100, 101, 102, 103,
- 104, 105, 106, 107, 108, 109, 110, 111, 112, 113,
- 114, 115, 116, 117, 118, 119, 120, 121, 122, 123,
- 124, 125, 126, 127, 128, 129, 130, 131, 132, 133,
- 134, 135, 136, 137, 138, 139, 140, 141, 142, 143,
- 144, 145, 146, 147, 148, 149, 150, 151, 152, 153,
- 154, 155, 156, 157, 158, 159, 160, 161, 162, 163,
- 164, 165, 166, 167, 168, 169, 170, 171, 172, 173,
- 174, 175, 176, 177, 178, 179, 180, 181, 182, 183,
- 184, 185, 186, 187, 188, 189, 190, 191, 192, 193,
- 194, 195, 196, 197, 198, 199, 200, 201, 202, 203,
- 204, 205, 206, 207, 208, 209, 210, 211, 212, 213,
- 214, 215, 216, 217, 218, 219, 220, 221, 222, 223,
- 224, 225, 226, 227, 228, 229, 230, 231, 232, 233,
- 234, 235, 236, 237, 238, 239, 240, 241, 242, 243,
- 244, 245, 246, 247, 248, 249, 250, 251, 252, 253,
- 254, 255, 256, 257, 258, 259, 260, 261, 262, 263,
- 264, 265, 266, 267, 268, 269, 270, 271, 272, 273,
- 274, 275, 276, 277, 278, 279, 280, 281, 282, 283,
- 284, 285, 286, 287, 288, 289, 290, 291, 292, 293,
- 294, 295, 296, 297, 298, 299, 300, 301, 302, 303,
- 304, 305, 306, 307, 308, 309, 310, 311, 312, 313,
- 314, 315, 316, -1, -1, -1, 320, 321, 322, 323,
- 324, 325, 326, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, 381, 382, 383, 384, 385,
+ 386, -1, -1, -1, -1, 391, 392, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 414, 415,
+ 416, 417, 418, 419, 420, 421, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 354, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, 373,
- 374, 375, 376, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, 386, 387, 388, 389, 390, 391, 392, -1,
+ 436, 4, 5, 6, 7, 8, 9, 10, 11, 12,
+ 13, 14, 15, 16, 17, 18, 19, 20, 21, 22,
+ 23, 24, 25, 26, 27, 28, 29, 30, 31, 32,
+ 33, 34, 35, 36, 37, 38, 39, 40, 41, 42,
+ 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
+ 53, 54, 55, 56, 57, 58, 59, 60, -1, -1,
+ 63, 64, 65, 66, 67, 68, 69, 70, 71, 72,
+ 73, 74, 75, 76, 77, 78, 79, 80, 81, 82,
+ 83, 84, 85, 86, 87, 88, 89, 90, 91, 92,
+ 93, 94, 95, 96, 97, 98, 99, 100, 101, 102,
+ 103, 104, 105, 106, 107, 108, 109, 110, 111, 112,
+ 113, 114, 115, 116, 117, 118, 119, 120, 121, 122,
+ 123, 124, 125, 126, 127, 128, 129, 130, 131, 132,
+ 133, 134, 135, 136, 137, 138, 139, 140, 141, 142,
+ 143, 144, 145, 146, 147, 148, 149, 150, 151, 152,
+ 153, 154, 155, 156, 157, 158, 159, 160, 161, 162,
+ 163, 164, 165, 166, 167, 168, 169, 170, 171, 172,
+ 173, 174, 175, 176, 177, 178, 179, 180, 181, 182,
+ 183, 184, 185, 186, 187, 188, 189, 190, 191, 192,
+ 193, 194, 195, 196, 197, 198, 199, 200, 201, 202,
+ 203, 204, 205, 206, 207, 208, 209, 210, 211, 212,
+ 213, 214, 215, 216, 217, 218, 219, 220, 221, 222,
+ 223, 224, 225, 226, 227, 228, 229, 230, 231, 232,
+ 233, 234, 235, 236, 237, 238, 239, 240, 241, 242,
+ 243, 244, 245, 246, 247, 248, 249, 250, 251, 252,
+ 253, 254, 255, 256, 257, 258, 259, 260, 261, 262,
+ 263, 264, 265, 266, 267, 268, 269, 270, 271, 272,
+ 273, 274, 275, 276, 277, 278, 279, 280, 281, 282,
+ 283, 284, 285, 286, 287, 288, 289, 290, 291, 292,
+ 293, 294, 295, 296, 297, 298, 299, 300, 301, 302,
+ 303, 304, 305, 306, 307, 308, 309, 310, 311, 312,
+ 313, 314, 315, 316, -1, -1, -1, -1, -1, -1,
+ 323, -1, -1, -1, -1, -1, 329, 330, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, 408, 409, 410, 411, 412, 413,
- -1, -1, -1, -1, -1, -1, -1, -1, 422, -1,
- 424, 425, 426, 427, 428, 429, 430, 431, 432, 433,
- 434, 435, 436, 437, 438, 439, 440, 441, 442, 443,
- 444, 445, 446, 447, 448, 449, 450, 451, 452, 453,
- 454, 3, 4, 5, 6, 7, 8, 9, 10, 11,
- 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
- 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
- 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
- 62, 63, 64, 65, 66, 67, 68, 69, 70, 71,
- 72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
- 82, 83, 84, 85, 86, 87, 88, 89, 90, 91,
- 92, 93, 94, 95, 96, 97, 98, 99, 100, 101,
- 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
- 112, 113, 114, 115, 116, 117, 118, 119, 120, 121,
- 122, 123, 124, 125, 126, 127, 128, 129, 130, 131,
- 132, 133, 134, 135, 136, 137, 138, 139, 140, 141,
- 142, 143, 144, 145, 146, 147, 148, 149, 150, 151,
- 152, 153, 154, 155, 156, 157, 158, 159, 160, 161,
- 162, 163, 164, 165, 166, 167, 168, 169, 170, 171,
- 172, 173, 174, 175, 176, 177, 178, 179, 180, 181,
- 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
- 192, 193, 194, 195, 196, 197, 198, 199, 200, 201,
- 202, 203, 204, 205, 206, 207, 208, 209, 210, 211,
- 212, 213, 214, 215, 216, 217, 218, 219, 220, 221,
- 222, 223, 224, 225, 226, 227, 228, 229, 230, 231,
- 232, 233, 234, 235, 236, 237, 238, 239, 240, 241,
- 242, 243, 244, 245, 246, 247, 248, 249, 250, 251,
- 252, 253, 254, 255, 256, 257, 258, 259, 260, 261,
- 262, 263, 264, 265, 266, 267, 268, 269, 270, 271,
- 272, 273, 274, 275, 276, 277, 278, 279, 280, 281,
- 282, 283, 284, 285, 286, 287, 288, 289, 290, 291,
- 292, 293, 294, 295, 296, 297, 298, 299, 300, 301,
- 302, 303, 304, 305, 306, 307, 308, 309, 310, 311,
- 312, 313, 314, 315, 316, -1, -1, -1, 320, 321,
- 322, 323, 324, 325, 326, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, 348, 349, -1, -1, -1,
+ 353, 354, -1, -1, -1, -1, -1, 360, 361, 362,
+ 363, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 381, 382,
+ 383, 384, 385, 386, -1, -1, -1, -1, 391, 392,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, 354, -1, -1, -1, -1, -1, -1, -1,
+ -1, 414, 415, 416, 417, 418, 419, 420, 421, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, 373, 374, 375, 376, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, 386, 387, 388, 389, 390, 391,
- 392, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, 408, 409, 410, 411,
- 412, 413, -1, -1, -1, -1, -1, -1, -1, -1,
- 422, -1, 424, 425, 426, 427, 428, 429, 430, 431,
- 432, 433, 434, 435, 436, 437, 438, 439, 440, 441,
- 442, 443, 444, 445, 446, 447, 448, 449, 450, 451,
- 452, 453, 454, 3, 4, 5, 6, 7, 8, 9,
+ -1, -1, -1, 436, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
- 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
+ 60, -1, -1, 63, 64, 65, 66, 67, 68, 69,
70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
80, 81, 82, 83, 84, 85, 86, 87, 88, 89,
90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
@@ -3693,65 +3887,18 @@ static const yytype_int16 yycheck[] =
290, 291, 292, 293, 294, 295, 296, 297, 298, 299,
300, 301, 302, 303, 304, 305, 306, 307, 308, 309,
310, 311, 312, 313, 314, 315, 316, -1, -1, -1,
- 320, 321, 322, 323, 324, 325, 326, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, 354, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, 373, 374, 375, 376, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, 386, 387, 388, 389,
- 390, 391, 392, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, 408, 409,
- 410, 411, 412, 413, -1, -1, -1, -1, -1, -1,
- -1, -1, 422, -1, 424, 425, 426, 427, 428, 429,
- 430, 431, 432, 433, 434, 435, 436, 437, 438, 439,
- 440, 441, 442, 443, 444, 445, 446, 447, 448, 449,
- 450, 451, 452, 453, 454, 3, 4, 5, 6, 7,
- 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
- 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
- 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
- 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
- 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
- 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
- 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
- 78, 79, 80, 81, 82, 83, 84, 85, 86, 87,
- 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
- 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
- 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
- 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
- 128, 129, 130, 131, 132, 133, 134, 135, 136, 137,
- 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
- 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
- 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
- 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
- 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
- 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
- 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
- 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
- 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
- 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
- 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
- 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
- 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
- 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
- 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
- 288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
- 298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
- 308, 309, 310, 311, 312, 313, 314, 315, 316, -1,
- -1, -1, 320, 321, 322, 323, 324, 325, 326, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, 323, -1, -1, -1, -1, -1, 329,
+ 330, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 348, 349,
+ -1, -1, 352, -1, -1, -1, -1, -1, -1, -1,
+ 360, 361, 362, 363, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, 373, 374, 375, 376, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, 386, 387,
- 388, 389, 390, 391, 392, -1, -1, -1, -1, -1,
+ -1, 381, 382, 383, 384, 385, 386, -1, -1, -1,
+ -1, 391, 392, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 408, 409, 410, 411, 412, 413, -1, -1, -1, -1,
- -1, -1, -1, -1, 422, -1, 424, 425, 426, 427,
- 428, 429, 430, 431, 432, 433, 434, 435, 436, 437,
- 438, 439, 440, 441, 442, 443, 444, 445, 446, 447,
- 448, 449, 450, 451, 452, 453, 454, 4, 5, 6,
+ -1, -1, -1, -1, 414, 415, 416, 417, 418, 419,
+ 420, 421, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, 436, 4, 5, 6,
7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
27, 28, 29, 30, 31, 32, 33, 34, 35, 36,
@@ -3786,7 +3933,7 @@ static const yytype_int16 yycheck[] =
-1, -1, -1, -1, -1, -1, 323, -1, -1, -1,
-1, -1, 329, 330, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, 348, 349, -1, -1, -1, 353, 354, -1, -1,
+ -1, 348, 349, -1, -1, -1, 353, -1, -1, -1,
-1, -1, -1, 360, 361, 362, 363, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 381, 382, 383, 384, 385, 386,
@@ -3829,8 +3976,8 @@ static const yytype_int16 yycheck[] =
314, 315, 316, -1, -1, -1, -1, -1, -1, 323,
-1, -1, -1, -1, -1, 329, 330, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, 348, 349, -1, -1, -1, 353,
- 354, -1, -1, -1, -1, -1, 360, 361, 362, 363,
+ -1, -1, -1, -1, 348, 349, -1, -1, 352, -1,
+ -1, -1, -1, -1, -1, -1, 360, 361, 362, 363,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 381, 382, 383,
384, 385, 386, -1, -1, -1, -1, 391, 392, -1,
@@ -3873,7 +4020,7 @@ static const yytype_int16 yycheck[] =
-1, -1, 323, -1, -1, -1, -1, -1, 329, 330,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 348, 349, -1,
- -1, 352, -1, -1, -1, -1, -1, -1, -1, 360,
+ -1, -1, -1, -1, -1, -1, -1, -1, 359, 360,
361, 362, 363, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
381, 382, 383, 384, 385, 386, -1, -1, -1, -1,
@@ -3916,7 +4063,7 @@ static const yytype_int16 yycheck[] =
-1, -1, -1, -1, -1, 323, -1, -1, -1, -1,
-1, 329, 330, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 348, 349, -1, -1, -1, 353, -1, -1, -1, -1,
+ 348, 349, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, 360, 361, 362, 363, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, 381, 382, 383, 384, 385, 386, -1,
@@ -3959,7 +4106,7 @@ static const yytype_int16 yycheck[] =
315, 316, -1, -1, -1, -1, -1, -1, 323, -1,
-1, -1, -1, -1, 329, 330, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, 348, 349, -1, -1, 352, -1, -1,
+ -1, -1, -1, 348, 349, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, 360, 361, 362, 363, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, 381, 382, 383, 384,
@@ -4003,7 +4150,7 @@ static const yytype_int16 yycheck[] =
-1, 323, -1, -1, -1, -1, -1, 329, 330, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, 348, 349, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, 359, 360, 361,
+ -1, -1, -1, -1, -1, -1, -1, -1, 360, 361,
362, 363, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, 381,
382, 383, 384, 385, 386, -1, -1, -1, -1, 391,
@@ -4044,143 +4191,13 @@ static const yytype_int16 yycheck[] =
299, 300, 301, 302, 303, 304, 305, 306, 307, 308,
309, 310, 311, 312, 313, 314, 315, 316, -1, -1,
-1, -1, -1, -1, 323, -1, -1, -1, -1, -1,
- 329, 330, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, 348,
- 349, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, 360, 361, 362, 363, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, 381, 382, 383, 384, 385, 386, -1, -1,
- -1, -1, 391, 392, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, 414, 415, 416, 417, 418,
- 419, 420, 421, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, 436, 4, 5,
- 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
- 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
- 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
- 36, 37, 38, 39, 40, 41, 42, 43, 44, 45,
- 46, 47, 48, 49, 50, 51, 52, 53, 54, 55,
- 56, 57, 58, 59, 60, -1, -1, 63, 64, 65,
- 66, 67, 68, 69, 70, 71, 72, 73, 74, 75,
- 76, 77, 78, 79, 80, 81, 82, 83, 84, 85,
- 86, 87, 88, 89, 90, 91, 92, 93, 94, 95,
- 96, 97, 98, 99, 100, 101, 102, 103, 104, 105,
- 106, 107, 108, 109, 110, 111, 112, 113, 114, 115,
- 116, 117, 118, 119, 120, 121, 122, 123, 124, 125,
- 126, 127, 128, 129, 130, 131, 132, 133, 134, 135,
- 136, 137, 138, 139, 140, 141, 142, 143, 144, 145,
- 146, 147, 148, 149, 150, 151, 152, 153, 154, 155,
- 156, 157, 158, 159, 160, 161, 162, 163, 164, 165,
- 166, 167, 168, 169, 170, 171, 172, 173, 174, 175,
- 176, 177, 178, 179, 180, 181, 182, 183, 184, 185,
- 186, 187, 188, 189, 190, 191, 192, 193, 194, 195,
- 196, 197, 198, 199, 200, 201, 202, 203, 204, 205,
- 206, 207, 208, 209, 210, 211, 212, 213, 214, 215,
- 216, 217, 218, 219, 220, 221, 222, 223, 224, 225,
- 226, 227, 228, 229, 230, 231, 232, 233, 234, 235,
- 236, 237, 238, 239, 240, 241, 242, 243, 244, 245,
- 246, 247, 248, 249, 250, 251, 252, 253, 254, 255,
- 256, 257, 258, 259, 260, 261, 262, 263, 264, 265,
- 266, 267, 268, 269, 270, 271, 272, 273, 274, 275,
- 276, 277, 278, 279, 280, 281, 282, 283, 284, 285,
- 286, 287, 288, 289, 290, 291, 292, 293, 294, 295,
- 296, 297, 298, 299, 300, 301, 302, 303, 304, 305,
- 306, 307, 308, 309, 310, 311, 312, 313, 314, 315,
- 316, -1, -1, -1, -1, -1, -1, 323, -1, -1,
- -1, -1, -1, 329, 330, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, 348, 349, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, 360, 361, 362, 363, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, 381, 382, 383, 384, 385,
- 386, -1, -1, -1, -1, 391, 392, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, 414, 415,
- 416, 417, 418, 419, 420, 421, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 436, 4, 5, 6, 7, 8, 9, 10, 11, 12,
- 13, 14, 15, 16, 17, 18, 19, 20, 21, 22,
- 23, 24, 25, 26, 27, 28, 29, 30, 31, 32,
- 33, 34, 35, 36, 37, 38, 39, 40, 41, 42,
- 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
- 53, 54, 55, 56, 57, 58, 59, 60, -1, -1,
- 63, 64, 65, 66, 67, 68, 69, 70, 71, 72,
- 73, 74, 75, 76, 77, 78, 79, 80, 81, 82,
- 83, 84, 85, 86, 87, 88, 89, 90, 91, 92,
- 93, 94, 95, 96, 97, 98, 99, 100, 101, 102,
- 103, 104, 105, 106, 107, 108, 109, 110, 111, 112,
- 113, 114, 115, 116, 117, 118, 119, 120, 121, 122,
- 123, 124, 125, 126, 127, 128, 129, 130, 131, 132,
- 133, 134, 135, 136, 137, 138, 139, 140, 141, 142,
- 143, 144, 145, 146, 147, 148, 149, 150, 151, 152,
- 153, 154, 155, 156, 157, 158, 159, 160, 161, 162,
- 163, 164, 165, 166, 167, 168, 169, 170, 171, 172,
- 173, 174, 175, 176, 177, 178, 179, 180, 181, 182,
- 183, 184, 185, 186, 187, 188, 189, 190, 191, 192,
- 193, 194, 195, 196, 197, 198, 199, 200, 201, 202,
- 203, 204, 205, 206, 207, 208, 209, 210, 211, 212,
- 213, 214, 215, 216, 217, 218, 219, 220, 221, 222,
- 223, 224, 225, 226, 227, 228, 229, 230, 231, 232,
- 233, 234, 235, 236, 237, 238, 239, 240, 241, 242,
- 243, 244, 245, 246, 247, 248, 249, 250, 251, 252,
- 253, 254, 255, 256, 257, 258, 259, 260, 261, 262,
- 263, 264, 265, 266, 267, 268, 269, 270, 271, 272,
- 273, 274, 275, 276, 277, 278, 279, 280, 281, 282,
- 283, 284, 285, 286, 287, 288, 289, 290, 291, 292,
- 293, 294, 295, 296, 297, 298, 299, 300, 301, 302,
- 303, 304, 305, 306, 307, 308, 309, 310, 311, 312,
- 313, 314, 315, 316, -1, -1, -1, -1, -1, -1,
- 323, -1, -1, -1, -1, -1, 329, 330, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, 348, 349, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, 360, 361, 362,
- 363, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, 381, 382,
- 383, 384, 385, 386, -1, -1, -1, -1, 391, 392,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, 414, 415, 416, 417, 418, 419, 420, 421, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, 436, 4, 5, 6, 7, 8, 9,
- 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
- 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
- 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
- 40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
- 50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
- 60, -1, -1, 63, 64, 65, 66, 67, 68, 69,
- 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
- 80, 81, 82, 83, 84, 85, 86, 87, 88, 89,
- 90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
- 100, 101, 102, 103, 104, 105, 106, 107, 108, 109,
- 110, 111, 112, 113, 114, 115, 116, 117, 118, 119,
- 120, 121, 122, 123, 124, 125, 126, 127, 128, 129,
- 130, 131, 132, 133, 134, 135, 136, 137, 138, 139,
- 140, 141, 142, 143, 144, 145, 146, 147, 148, 149,
- 150, 151, 152, 153, 154, 155, 156, 157, 158, 159,
- 160, 161, 162, 163, 164, 165, 166, 167, 168, 169,
- 170, 171, 172, 173, 174, 175, 176, 177, 178, 179,
- 180, 181, 182, 183, 184, 185, 186, 187, 188, 189,
- 190, 191, 192, 193, 194, 195, 196, 197, 198, 199,
- 200, 201, 202, 203, 204, 205, 206, 207, 208, 209,
- 210, 211, 212, 213, 214, 215, 216, 217, 218, 219,
- 220, 221, 222, 223, 224, 225, 226, 227, 228, 229,
- 230, 231, 232, 233, 234, 235, 236, 237, 238, 239,
- 240, 241, 242, 243, 244, 245, 246, 247, 248, 249,
- 250, 251, 252, 253, 254, 255, 256, 257, 258, 259,
- 260, 261, 262, 263, 264, 265, 266, 267, 268, 269,
- 270, 271, 272, 273, 274, 275, 276, 277, 278, 279,
- 280, 281, 282, 283, 284, 285, 286, 287, 288, 289,
- 290, 291, 292, 293, 294, 295, 296, 297, 298, 299,
- 300, 301, 302, 303, 304, 305, 306, 307, 308, 309,
- 310, 311, 312, 313, 314, 315, 316, -1, -1, -1,
- -1, -1, -1, 323, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, 386, -1, -1, -1,
- -1, 391, 392
+ -1, -1, -1, -1, -1, -1, -1, 386, -1, -1,
+ -1, -1, 391, 392
};
/* YYSTOS[STATE-NUM] -- The (internal number of the) accessing
@@ -4224,136 +4241,137 @@ static const yytype_int16 yystos[] =
409, 410, 411, 412, 413, 422, 424, 425, 426, 427,
428, 429, 430, 431, 432, 433, 434, 435, 436, 437,
438, 439, 440, 441, 442, 443, 444, 445, 446, 447,
- 448, 449, 450, 451, 452, 453, 454, 486, 487, 490,
- 491, 492, 493, 497, 498, 499, 500, 501, 502, 505,
- 506, 507, 508, 509, 511, 516, 517, 518, 559, 560,
- 561, 563, 570, 574, 575, 580, 583, 349, 349, 349,
- 349, 349, 349, 349, 349, 351, 517, 353, 385, 349,
- 349, 359, 385, 359, 562, 350, 356, 494, 495, 496,
- 506, 511, 356, 359, 385, 359, 385, 507, 511, 367,
- 513, 514, 0, 560, 491, 499, 506, 359, 490, 385,
- 566, 567, 584, 585, 382, 385, 566, 382, 566, 382,
- 566, 382, 566, 382, 566, 566, 584, 382, 566, 385,
- 564, 565, 511, 520, 353, 385, 409, 503, 504, 385,
- 510, 351, 359, 512, 353, 538, 563, 495, 494, 496,
- 385, 385, 349, 358, 512, 353, 356, 359, 489, 329,
- 330, 348, 349, 360, 361, 362, 363, 381, 382, 383,
- 384, 385, 414, 415, 416, 417, 418, 419, 420, 421,
- 456, 457, 458, 460, 461, 462, 463, 464, 465, 466,
- 467, 468, 509, 511, 515, 512, 350, 385, 359, 358,
- 356, 350, 356, 350, 356, 358, 356, 356, 356, 350,
- 356, 356, 356, 356, 356, 356, 356, 350, 356, 350,
- 356, 349, 352, 356, 359, 506, 511, 521, 522, 519,
- 358, 350, 356, 350, 356, 352, 467, 469, 470, 471,
- 472, 473, 474, 475, 476, 477, 478, 479, 480, 351,
- 359, 353, 354, 359, 393, 394, 395, 396, 398, 399,
- 400, 401, 402, 403, 404, 405, 406, 407, 423, 467,
- 480, 482, 484, 486, 490, 509, 511, 527, 528, 529,
- 530, 531, 539, 540, 541, 542, 545, 546, 549, 550,
- 551, 558, 563, 512, 358, 512, 353, 482, 525, 358,
- 488, 385, 356, 359, 467, 467, 484, 329, 330, 351,
- 355, 350, 350, 356, 392, 482, 349, 467, 356, 368,
- 563, 348, 351, 382, 567, 584, 385, 585, 348, 381,
- 382, 383, 384, 571, 572, 382, 480, 485, 573, 382,
- 381, 382, 383, 384, 576, 577, 382, 485, 578, 382,
- 348, 579, 382, 584, 385, 485, 581, 582, 382, 485,
- 352, 565, 511, 385, 523, 524, 354, 522, 521, 485,
- 504, 385, 364, 365, 366, 361, 363, 327, 328, 331,
- 332, 367, 368, 333, 334, 371, 370, 369, 335, 337,
- 336, 372, 352, 352, 480, 354, 532, 349, 359, 359,
- 553, 349, 349, 359, 359, 484, 349, 484, 357, 359,
- 359, 359, 359, 338, 339, 340, 341, 342, 343, 344,
- 345, 346, 347, 358, 483, 356, 359, 354, 528, 542,
- 546, 551, 525, 358, 354, 525, 526, 525, 521, 385,
- 350, 459, 484, 385, 482, 467, 348, 382, 568, 569,
- 350, 358, 350, 356, 350, 356, 350, 356, 356, 350,
- 356, 350, 356, 350, 356, 356, 350, 356, 356, 350,
- 356, 350, 356, 350, 350, 523, 512, 356, 359, 354,
- 467, 467, 467, 469, 469, 470, 470, 471, 471, 471,
- 471, 472, 472, 473, 474, 475, 476, 477, 478, 481,
- 352, 539, 552, 528, 554, 484, 359, 484, 357, 482,
- 482, 525, 354, 356, 354, 352, 352, 356, 352, 356,
- 572, 571, 485, 573, 577, 576, 485, 578, 348, 579,
- 581, 582, 359, 524, 484, 533, 484, 499, 544, 393,
- 527, 540, 555, 350, 350, 354, 525, 348, 382, 350,
- 350, 350, 350, 350, 350, 357, 354, 385, 350, 349,
- 544, 556, 557, 535, 536, 537, 543, 547, 482, 358,
- 529, 534, 538, 484, 359, 350, 397, 531, 529, 353,
- 525, 350, 484, 534, 535, 539, 548, 359, 354
+ 448, 449, 450, 451, 452, 453, 454, 455, 456, 457,
+ 489, 490, 493, 494, 495, 496, 500, 501, 502, 503,
+ 504, 505, 508, 509, 510, 511, 512, 514, 519, 520,
+ 521, 562, 563, 564, 566, 573, 577, 578, 583, 586,
+ 349, 349, 349, 349, 349, 349, 349, 349, 351, 520,
+ 353, 385, 349, 349, 359, 385, 359, 565, 350, 356,
+ 497, 498, 499, 509, 514, 356, 359, 385, 359, 385,
+ 510, 514, 367, 516, 517, 0, 563, 494, 502, 509,
+ 359, 493, 385, 569, 570, 587, 588, 382, 385, 569,
+ 382, 569, 382, 569, 382, 569, 382, 569, 569, 587,
+ 382, 569, 385, 567, 568, 514, 523, 353, 385, 409,
+ 506, 507, 385, 513, 351, 359, 515, 353, 541, 566,
+ 498, 497, 499, 385, 385, 349, 358, 515, 353, 356,
+ 359, 492, 329, 330, 348, 349, 360, 361, 362, 363,
+ 381, 382, 383, 384, 385, 414, 415, 416, 417, 418,
+ 419, 420, 421, 459, 460, 461, 463, 464, 465, 466,
+ 467, 468, 469, 470, 471, 512, 514, 518, 515, 350,
+ 385, 359, 358, 356, 350, 356, 350, 356, 358, 356,
+ 356, 356, 350, 356, 356, 356, 356, 356, 356, 356,
+ 350, 356, 350, 356, 349, 352, 356, 359, 509, 514,
+ 524, 525, 522, 358, 350, 356, 350, 356, 352, 470,
+ 472, 473, 474, 475, 476, 477, 478, 479, 480, 481,
+ 482, 483, 351, 359, 353, 354, 359, 393, 394, 395,
+ 396, 398, 399, 400, 401, 402, 403, 404, 405, 406,
+ 407, 423, 470, 483, 485, 487, 489, 493, 512, 514,
+ 530, 531, 532, 533, 534, 542, 543, 544, 545, 548,
+ 549, 552, 553, 554, 561, 566, 515, 358, 515, 353,
+ 485, 528, 358, 491, 385, 356, 359, 470, 470, 487,
+ 329, 330, 351, 355, 350, 350, 356, 392, 485, 349,
+ 470, 356, 368, 566, 348, 351, 382, 570, 587, 385,
+ 588, 348, 381, 382, 383, 384, 574, 575, 382, 483,
+ 488, 576, 382, 381, 382, 383, 384, 579, 580, 382,
+ 488, 581, 382, 348, 582, 382, 587, 385, 488, 584,
+ 585, 382, 488, 352, 568, 514, 385, 526, 527, 354,
+ 525, 524, 488, 507, 385, 364, 365, 366, 361, 363,
+ 327, 328, 331, 332, 367, 368, 333, 334, 371, 370,
+ 369, 335, 337, 336, 372, 352, 352, 483, 354, 535,
+ 349, 359, 359, 556, 349, 349, 359, 359, 487, 349,
+ 487, 357, 359, 359, 359, 359, 338, 339, 340, 341,
+ 342, 343, 344, 345, 346, 347, 358, 486, 356, 359,
+ 354, 531, 545, 549, 554, 528, 358, 354, 528, 529,
+ 528, 524, 385, 350, 462, 487, 385, 485, 470, 348,
+ 382, 571, 572, 350, 358, 350, 356, 350, 356, 350,
+ 356, 356, 350, 356, 350, 356, 350, 356, 356, 350,
+ 356, 356, 350, 356, 350, 356, 350, 350, 526, 515,
+ 356, 359, 354, 470, 470, 470, 472, 472, 473, 473,
+ 474, 474, 474, 474, 475, 475, 476, 477, 478, 479,
+ 480, 481, 484, 352, 542, 555, 531, 557, 487, 359,
+ 487, 357, 485, 485, 528, 354, 356, 354, 352, 352,
+ 356, 352, 356, 575, 574, 488, 576, 580, 579, 488,
+ 581, 348, 582, 584, 585, 359, 527, 487, 536, 487,
+ 502, 547, 393, 530, 543, 558, 350, 350, 354, 528,
+ 348, 382, 350, 350, 350, 350, 350, 350, 357, 354,
+ 385, 350, 349, 547, 559, 560, 538, 539, 540, 546,
+ 550, 485, 358, 532, 537, 541, 487, 359, 350, 397,
+ 534, 532, 353, 528, 350, 487, 537, 538, 542, 551,
+ 359, 354
};
/* YYR1[YYN] -- Symbol number of symbol that rule YYN derives. */
static const yytype_int16 yyr1[] =
{
- 0, 455, 456, 457, 457, 457, 457, 457, 457, 457,
- 457, 457, 457, 457, 457, 457, 457, 457, 458, 458,
- 458, 458, 458, 458, 459, 460, 461, 462, 462, 463,
- 463, 464, 464, 465, 466, 466, 466, 467, 467, 467,
- 467, 468, 468, 468, 468, 469, 469, 469, 469, 470,
- 470, 470, 471, 471, 471, 472, 472, 472, 472, 472,
- 473, 473, 473, 474, 474, 475, 475, 476, 476, 477,
- 477, 478, 478, 479, 479, 480, 481, 480, 482, 482,
- 483, 483, 483, 483, 483, 483, 483, 483, 483, 483,
- 483, 484, 484, 485, 486, 486, 486, 486, 486, 486,
- 486, 486, 486, 486, 486, 488, 487, 489, 489, 490,
- 490, 490, 490, 491, 491, 492, 492, 493, 494, 494,
- 495, 495, 495, 495, 496, 497, 497, 497, 497, 497,
- 498, 498, 498, 498, 498, 499, 499, 500, 501, 501,
- 501, 501, 501, 501, 501, 501, 502, 503, 503, 504,
- 504, 504, 505, 506, 506, 507, 507, 507, 507, 507,
- 507, 507, 507, 507, 507, 507, 508, 508, 508, 508,
- 508, 508, 508, 508, 508, 508, 508, 508, 508, 508,
- 508, 508, 508, 508, 508, 508, 508, 508, 508, 508,
- 508, 508, 508, 508, 508, 508, 508, 508, 508, 508,
- 508, 509, 510, 510, 511, 511, 512, 512, 512, 512,
- 513, 513, 514, 515, 515, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 516, 516, 516, 516, 516, 516, 516, 516, 516, 516,
- 517, 517, 517, 519, 518, 520, 518, 521, 521, 522,
- 522, 523, 523, 524, 524, 525, 525, 525, 525, 526,
- 526, 527, 528, 528, 529, 529, 529, 529, 529, 529,
- 529, 529, 530, 531, 532, 533, 531, 534, 534, 536,
- 535, 537, 535, 538, 538, 539, 539, 540, 540, 541,
- 541, 542, 543, 543, 544, 544, 545, 545, 547, 546,
- 548, 548, 549, 549, 550, 550, 552, 551, 553, 551,
- 554, 551, 555, 555, 556, 556, 557, 557, 558, 558,
- 558, 558, 558, 558, 558, 558, 559, 559, 560, 560,
- 560, 562, 561, 563, 564, 564, 565, 565, 566, 566,
- 567, 567, 568, 568, 569, 569, 570, 570, 570, 570,
- 570, 570, 571, 571, 572, 572, 572, 572, 572, 573,
- 573, 574, 574, 575, 575, 575, 575, 575, 575, 575,
- 575, 576, 576, 577, 577, 577, 577, 578, 578, 579,
- 579, 580, 580, 580, 580, 581, 581, 582, 583, 583,
- 584, 584, 585, 585
+ 0, 458, 459, 460, 460, 460, 460, 460, 460, 460,
+ 460, 460, 460, 460, 460, 460, 460, 460, 461, 461,
+ 461, 461, 461, 461, 462, 463, 464, 465, 465, 466,
+ 466, 467, 467, 468, 469, 469, 469, 470, 470, 470,
+ 470, 471, 471, 471, 471, 472, 472, 472, 472, 473,
+ 473, 473, 474, 474, 474, 475, 475, 475, 475, 475,
+ 476, 476, 476, 477, 477, 478, 478, 479, 479, 480,
+ 480, 481, 481, 482, 482, 483, 484, 483, 485, 485,
+ 486, 486, 486, 486, 486, 486, 486, 486, 486, 486,
+ 486, 487, 487, 488, 489, 489, 489, 489, 489, 489,
+ 489, 489, 489, 489, 489, 491, 490, 492, 492, 493,
+ 493, 493, 493, 494, 494, 495, 495, 496, 497, 497,
+ 498, 498, 498, 498, 499, 500, 500, 500, 500, 500,
+ 501, 501, 501, 501, 501, 502, 502, 503, 504, 504,
+ 504, 504, 504, 504, 504, 504, 504, 504, 505, 506,
+ 506, 507, 507, 507, 508, 509, 509, 510, 510, 510,
+ 510, 510, 510, 510, 510, 510, 510, 510, 511, 511,
+ 511, 511, 511, 511, 511, 511, 511, 511, 511, 511,
+ 511, 511, 511, 511, 511, 511, 511, 511, 511, 511,
+ 511, 511, 511, 511, 511, 511, 511, 511, 511, 511,
+ 511, 511, 511, 511, 512, 513, 513, 514, 514, 515,
+ 515, 515, 515, 516, 516, 517, 518, 518, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 519, 519, 519, 519, 519, 519, 519,
+ 519, 519, 519, 520, 520, 520, 522, 521, 523, 521,
+ 524, 524, 525, 525, 526, 526, 527, 527, 528, 528,
+ 528, 528, 529, 529, 530, 531, 531, 532, 532, 532,
+ 532, 532, 532, 532, 532, 533, 534, 535, 536, 534,
+ 537, 537, 539, 538, 540, 538, 541, 541, 542, 542,
+ 543, 543, 544, 544, 545, 546, 546, 547, 547, 548,
+ 548, 550, 549, 551, 551, 552, 552, 553, 553, 555,
+ 554, 556, 554, 557, 554, 558, 558, 559, 559, 560,
+ 560, 561, 561, 561, 561, 561, 561, 561, 561, 562,
+ 562, 563, 563, 563, 565, 564, 566, 567, 567, 568,
+ 568, 569, 569, 570, 570, 571, 571, 572, 572, 573,
+ 573, 573, 573, 573, 573, 574, 574, 575, 575, 575,
+ 575, 575, 576, 576, 577, 577, 578, 578, 578, 578,
+ 578, 578, 578, 578, 579, 579, 580, 580, 580, 580,
+ 581, 581, 582, 582, 583, 583, 583, 583, 584, 584,
+ 585, 586, 586, 587, 587, 588, 588
};
/* YYR2[YYN] -- Number of symbols on the right hand side of rule YYN. */
@@ -4373,14 +4391,14 @@ static const yytype_int8 yyr2[] =
3, 3, 4, 1, 1, 2, 3, 3, 2, 3,
2, 1, 2, 1, 1, 1, 3, 4, 6, 5,
1, 2, 3, 5, 4, 1, 2, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 4, 1, 3, 1,
- 3, 1, 1, 1, 2, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 4, 1,
+ 3, 1, 3, 1, 1, 1, 2, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 4, 1, 1, 3, 2, 3, 2, 3, 3, 4,
- 1, 0, 3, 1, 3, 1, 1, 1, 1, 1,
+ 1, 1, 4, 1, 1, 1, 3, 2, 3, 2,
+ 3, 3, 4, 1, 0, 3, 1, 3, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
@@ -4412,22 +4430,22 @@ static const yytype_int8 yyr2[] =
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 0, 6, 0, 5, 1, 2, 3,
- 4, 1, 3, 1, 2, 1, 3, 4, 2, 1,
- 3, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 2, 2, 0, 0, 5, 1, 1, 0,
- 2, 0, 2, 2, 3, 1, 2, 1, 2, 1,
- 2, 5, 3, 1, 1, 4, 1, 2, 0, 8,
- 0, 1, 3, 2, 1, 2, 0, 6, 0, 8,
- 0, 7, 1, 1, 1, 0, 2, 3, 2, 2,
- 2, 3, 2, 2, 2, 2, 1, 2, 1, 1,
- 1, 0, 3, 5, 1, 3, 1, 4, 1, 3,
- 5, 5, 1, 3, 1, 3, 4, 6, 6, 8,
- 6, 8, 1, 3, 1, 1, 1, 1, 1, 1,
- 3, 4, 6, 4, 6, 6, 8, 6, 8, 6,
- 8, 1, 3, 1, 1, 1, 1, 1, 3, 1,
- 3, 6, 8, 4, 6, 1, 3, 1, 4, 6,
- 1, 3, 3, 3
+ 1, 1, 1, 1, 1, 1, 0, 6, 0, 5,
+ 1, 2, 3, 4, 1, 3, 1, 2, 1, 3,
+ 4, 2, 1, 3, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 2, 2, 0, 0, 5,
+ 1, 1, 0, 2, 0, 2, 2, 3, 1, 2,
+ 1, 2, 1, 2, 5, 3, 1, 1, 4, 1,
+ 2, 0, 8, 0, 1, 3, 2, 1, 2, 0,
+ 6, 0, 8, 0, 7, 1, 1, 1, 0, 2,
+ 3, 2, 2, 2, 3, 2, 2, 2, 2, 1,
+ 2, 1, 1, 1, 0, 3, 5, 1, 3, 1,
+ 4, 1, 3, 5, 5, 1, 3, 1, 3, 4,
+ 6, 6, 8, 6, 8, 1, 3, 1, 1, 1,
+ 1, 1, 1, 3, 4, 6, 4, 6, 6, 8,
+ 6, 8, 6, 8, 1, 3, 1, 1, 1, 1,
+ 1, 3, 1, 3, 6, 8, 4, 6, 1, 3,
+ 1, 4, 6, 1, 3, 3, 3
};
@@ -5177,7 +5195,7 @@ yyreduce:
{
(yyval.interm.intermTypedNode) = parseContext.handleVariable((yyvsp[0].lex).loc, (yyvsp[0].lex).symbol, (yyvsp[0].lex).string);
}
-#line 5181 "MachineIndependent/glslang_tab.cpp"
+#line 5199 "MachineIndependent/glslang_tab.cpp"
break;
case 3: /* primary_expression: variable_identifier */
@@ -5185,7 +5203,7 @@ yyreduce:
{
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
}
-#line 5189 "MachineIndependent/glslang_tab.cpp"
+#line 5207 "MachineIndependent/glslang_tab.cpp"
break;
case 4: /* primary_expression: LEFT_PAREN expression RIGHT_PAREN */
@@ -5195,7 +5213,7 @@ yyreduce:
if ((yyval.interm.intermTypedNode)->getAsConstantUnion())
(yyval.interm.intermTypedNode)->getAsConstantUnion()->setExpression();
}
-#line 5199 "MachineIndependent/glslang_tab.cpp"
+#line 5217 "MachineIndependent/glslang_tab.cpp"
break;
case 5: /* primary_expression: FLOATCONSTANT */
@@ -5203,7 +5221,7 @@ yyreduce:
{
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).d, EbtFloat, (yyvsp[0].lex).loc, true);
}
-#line 5207 "MachineIndependent/glslang_tab.cpp"
+#line 5225 "MachineIndependent/glslang_tab.cpp"
break;
case 6: /* primary_expression: INTCONSTANT */
@@ -5211,7 +5229,7 @@ yyreduce:
{
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).i, (yyvsp[0].lex).loc, true);
}
-#line 5215 "MachineIndependent/glslang_tab.cpp"
+#line 5233 "MachineIndependent/glslang_tab.cpp"
break;
case 7: /* primary_expression: UINTCONSTANT */
@@ -5220,7 +5238,7 @@ yyreduce:
parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned literal");
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).u, (yyvsp[0].lex).loc, true);
}
-#line 5224 "MachineIndependent/glslang_tab.cpp"
+#line 5242 "MachineIndependent/glslang_tab.cpp"
break;
case 8: /* primary_expression: BOOLCONSTANT */
@@ -5228,7 +5246,7 @@ yyreduce:
{
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).b, (yyvsp[0].lex).loc, true);
}
-#line 5232 "MachineIndependent/glslang_tab.cpp"
+#line 5250 "MachineIndependent/glslang_tab.cpp"
break;
case 9: /* primary_expression: STRING_LITERAL */
@@ -5236,7 +5254,7 @@ yyreduce:
{
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).string, (yyvsp[0].lex).loc, true);
}
-#line 5240 "MachineIndependent/glslang_tab.cpp"
+#line 5258 "MachineIndependent/glslang_tab.cpp"
break;
case 10: /* primary_expression: INT32CONSTANT */
@@ -5245,7 +5263,7 @@ yyreduce:
parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed literal");
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).i, (yyvsp[0].lex).loc, true);
}
-#line 5249 "MachineIndependent/glslang_tab.cpp"
+#line 5267 "MachineIndependent/glslang_tab.cpp"
break;
case 11: /* primary_expression: UINT32CONSTANT */
@@ -5254,7 +5272,7 @@ yyreduce:
parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed literal");
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).u, (yyvsp[0].lex).loc, true);
}
-#line 5258 "MachineIndependent/glslang_tab.cpp"
+#line 5276 "MachineIndependent/glslang_tab.cpp"
break;
case 12: /* primary_expression: INT64CONSTANT */
@@ -5263,7 +5281,7 @@ yyreduce:
parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer literal");
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).i64, (yyvsp[0].lex).loc, true);
}
-#line 5267 "MachineIndependent/glslang_tab.cpp"
+#line 5285 "MachineIndependent/glslang_tab.cpp"
break;
case 13: /* primary_expression: UINT64CONSTANT */
@@ -5272,7 +5290,7 @@ yyreduce:
parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer literal");
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).u64, (yyvsp[0].lex).loc, true);
}
-#line 5276 "MachineIndependent/glslang_tab.cpp"
+#line 5294 "MachineIndependent/glslang_tab.cpp"
break;
case 14: /* primary_expression: INT16CONSTANT */
@@ -5281,7 +5299,7 @@ yyreduce:
parseContext.explicitInt16Check((yyvsp[0].lex).loc, "16-bit integer literal");
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((short)(yyvsp[0].lex).i, (yyvsp[0].lex).loc, true);
}
-#line 5285 "MachineIndependent/glslang_tab.cpp"
+#line 5303 "MachineIndependent/glslang_tab.cpp"
break;
case 15: /* primary_expression: UINT16CONSTANT */
@@ -5290,7 +5308,7 @@ yyreduce:
parseContext.explicitInt16Check((yyvsp[0].lex).loc, "16-bit unsigned integer literal");
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((unsigned short)(yyvsp[0].lex).u, (yyvsp[0].lex).loc, true);
}
-#line 5294 "MachineIndependent/glslang_tab.cpp"
+#line 5312 "MachineIndependent/glslang_tab.cpp"
break;
case 16: /* primary_expression: DOUBLECONSTANT */
@@ -5301,7 +5319,7 @@ yyreduce:
parseContext.doubleCheck((yyvsp[0].lex).loc, "double literal");
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).d, EbtDouble, (yyvsp[0].lex).loc, true);
}
-#line 5305 "MachineIndependent/glslang_tab.cpp"
+#line 5323 "MachineIndependent/glslang_tab.cpp"
break;
case 17: /* primary_expression: FLOAT16CONSTANT */
@@ -5310,7 +5328,7 @@ yyreduce:
parseContext.float16Check((yyvsp[0].lex).loc, "half float literal");
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).d, EbtFloat16, (yyvsp[0].lex).loc, true);
}
-#line 5314 "MachineIndependent/glslang_tab.cpp"
+#line 5332 "MachineIndependent/glslang_tab.cpp"
break;
case 18: /* postfix_expression: primary_expression */
@@ -5318,7 +5336,7 @@ yyreduce:
{
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
}
-#line 5322 "MachineIndependent/glslang_tab.cpp"
+#line 5340 "MachineIndependent/glslang_tab.cpp"
break;
case 19: /* postfix_expression: postfix_expression LEFT_BRACKET integer_expression RIGHT_BRACKET */
@@ -5326,7 +5344,7 @@ yyreduce:
{
(yyval.interm.intermTypedNode) = parseContext.handleBracketDereference((yyvsp[-2].lex).loc, (yyvsp[-3].interm.intermTypedNode), (yyvsp[-1].interm.intermTypedNode));
}
-#line 5330 "MachineIndependent/glslang_tab.cpp"
+#line 5348 "MachineIndependent/glslang_tab.cpp"
break;
case 20: /* postfix_expression: function_call */
@@ -5334,7 +5352,7 @@ yyreduce:
{
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
}
-#line 5338 "MachineIndependent/glslang_tab.cpp"
+#line 5356 "MachineIndependent/glslang_tab.cpp"
break;
case 21: /* postfix_expression: postfix_expression DOT IDENTIFIER */
@@ -5342,7 +5360,7 @@ yyreduce:
{
(yyval.interm.intermTypedNode) = parseContext.handleDotDereference((yyvsp[0].lex).loc, (yyvsp[-2].interm.intermTypedNode), *(yyvsp[0].lex).string);
}
-#line 5346 "MachineIndependent/glslang_tab.cpp"
+#line 5364 "MachineIndependent/glslang_tab.cpp"
break;
case 22: /* postfix_expression: postfix_expression INC_OP */
@@ -5352,7 +5370,7 @@ yyreduce:
parseContext.lValueErrorCheck((yyvsp[0].lex).loc, "++", (yyvsp[-1].interm.intermTypedNode));
(yyval.interm.intermTypedNode) = parseContext.handleUnaryMath((yyvsp[0].lex).loc, "++", EOpPostIncrement, (yyvsp[-1].interm.intermTypedNode));
}
-#line 5356 "MachineIndependent/glslang_tab.cpp"
+#line 5374 "MachineIndependent/glslang_tab.cpp"
break;
case 23: /* postfix_expression: postfix_expression DEC_OP */
@@ -5362,7 +5380,7 @@ yyreduce:
parseContext.lValueErrorCheck((yyvsp[0].lex).loc, "--", (yyvsp[-1].interm.intermTypedNode));
(yyval.interm.intermTypedNode) = parseContext.handleUnaryMath((yyvsp[0].lex).loc, "--", EOpPostDecrement, (yyvsp[-1].interm.intermTypedNode));
}
-#line 5366 "MachineIndependent/glslang_tab.cpp"
+#line 5384 "MachineIndependent/glslang_tab.cpp"
break;
case 24: /* integer_expression: expression */
@@ -5371,7 +5389,7 @@ yyreduce:
parseContext.integerCheck((yyvsp[0].interm.intermTypedNode), "[]");
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
}
-#line 5375 "MachineIndependent/glslang_tab.cpp"
+#line 5393 "MachineIndependent/glslang_tab.cpp"
break;
case 25: /* function_call: function_call_or_method */
@@ -5380,7 +5398,7 @@ yyreduce:
(yyval.interm.intermTypedNode) = parseContext.handleFunctionCall((yyvsp[0].interm).loc, (yyvsp[0].interm).function, (yyvsp[0].interm).intermNode);
delete (yyvsp[0].interm).function;
}
-#line 5384 "MachineIndependent/glslang_tab.cpp"
+#line 5402 "MachineIndependent/glslang_tab.cpp"
break;
case 26: /* function_call_or_method: function_call_generic */
@@ -5388,7 +5406,7 @@ yyreduce:
{
(yyval.interm) = (yyvsp[0].interm);
}
-#line 5392 "MachineIndependent/glslang_tab.cpp"
+#line 5410 "MachineIndependent/glslang_tab.cpp"
break;
case 27: /* function_call_generic: function_call_header_with_parameters RIGHT_PAREN */
@@ -5397,7 +5415,7 @@ yyreduce:
(yyval.interm) = (yyvsp[-1].interm);
(yyval.interm).loc = (yyvsp[0].lex).loc;
}
-#line 5401 "MachineIndependent/glslang_tab.cpp"
+#line 5419 "MachineIndependent/glslang_tab.cpp"
break;
case 28: /* function_call_generic: function_call_header_no_parameters RIGHT_PAREN */
@@ -5406,7 +5424,7 @@ yyreduce:
(yyval.interm) = (yyvsp[-1].interm);
(yyval.interm).loc = (yyvsp[0].lex).loc;
}
-#line 5410 "MachineIndependent/glslang_tab.cpp"
+#line 5428 "MachineIndependent/glslang_tab.cpp"
break;
case 29: /* function_call_header_no_parameters: function_call_header VOID */
@@ -5414,7 +5432,7 @@ yyreduce:
{
(yyval.interm) = (yyvsp[-1].interm);
}
-#line 5418 "MachineIndependent/glslang_tab.cpp"
+#line 5436 "MachineIndependent/glslang_tab.cpp"
break;
case 30: /* function_call_header_no_parameters: function_call_header */
@@ -5422,7 +5440,7 @@ yyreduce:
{
(yyval.interm) = (yyvsp[0].interm);
}
-#line 5426 "MachineIndependent/glslang_tab.cpp"
+#line 5444 "MachineIndependent/glslang_tab.cpp"
break;
case 31: /* function_call_header_with_parameters: function_call_header assignment_expression */
@@ -5434,7 +5452,7 @@ yyreduce:
(yyval.interm).function = (yyvsp[-1].interm).function;
(yyval.interm).intermNode = (yyvsp[0].interm.intermTypedNode);
}
-#line 5438 "MachineIndependent/glslang_tab.cpp"
+#line 5456 "MachineIndependent/glslang_tab.cpp"
break;
case 32: /* function_call_header_with_parameters: function_call_header_with_parameters COMMA assignment_expression */
@@ -5446,7 +5464,7 @@ yyreduce:
(yyval.interm).function = (yyvsp[-2].interm).function;
(yyval.interm).intermNode = parseContext.intermediate.growAggregate((yyvsp[-2].interm).intermNode, (yyvsp[0].interm.intermTypedNode), (yyvsp[-1].lex).loc);
}
-#line 5450 "MachineIndependent/glslang_tab.cpp"
+#line 5468 "MachineIndependent/glslang_tab.cpp"
break;
case 33: /* function_call_header: function_identifier LEFT_PAREN */
@@ -5454,7 +5472,7 @@ yyreduce:
{
(yyval.interm) = (yyvsp[-1].interm);
}
-#line 5458 "MachineIndependent/glslang_tab.cpp"
+#line 5476 "MachineIndependent/glslang_tab.cpp"
break;
case 34: /* function_identifier: type_specifier */
@@ -5464,7 +5482,7 @@ yyreduce:
(yyval.interm).intermNode = 0;
(yyval.interm).function = parseContext.handleConstructorCall((yyvsp[0].interm.type).loc, (yyvsp[0].interm.type));
}
-#line 5468 "MachineIndependent/glslang_tab.cpp"
+#line 5486 "MachineIndependent/glslang_tab.cpp"
break;
case 35: /* function_identifier: postfix_expression */
@@ -5496,7 +5514,7 @@ yyreduce:
(yyval.interm).function = new TFunction(empty, TType(EbtVoid), EOpNull);
}
}
-#line 5500 "MachineIndependent/glslang_tab.cpp"
+#line 5518 "MachineIndependent/glslang_tab.cpp"
break;
case 36: /* function_identifier: non_uniform_qualifier */
@@ -5506,7 +5524,7 @@ yyreduce:
(yyval.interm).intermNode = 0;
(yyval.interm).function = parseContext.handleConstructorCall((yyvsp[0].interm.type).loc, (yyvsp[0].interm.type));
}
-#line 5510 "MachineIndependent/glslang_tab.cpp"
+#line 5528 "MachineIndependent/glslang_tab.cpp"
break;
case 37: /* unary_expression: postfix_expression */
@@ -5517,7 +5535,7 @@ yyreduce:
if (TIntermMethod* method = (yyvsp[0].interm.intermTypedNode)->getAsMethodNode())
parseContext.error((yyvsp[0].interm.intermTypedNode)->getLoc(), "incomplete method syntax", method->getMethodName().c_str(), "");
}
-#line 5521 "MachineIndependent/glslang_tab.cpp"
+#line 5539 "MachineIndependent/glslang_tab.cpp"
break;
case 38: /* unary_expression: INC_OP unary_expression */
@@ -5526,7 +5544,7 @@ yyreduce:
parseContext.lValueErrorCheck((yyvsp[-1].lex).loc, "++", (yyvsp[0].interm.intermTypedNode));
(yyval.interm.intermTypedNode) = parseContext.handleUnaryMath((yyvsp[-1].lex).loc, "++", EOpPreIncrement, (yyvsp[0].interm.intermTypedNode));
}
-#line 5530 "MachineIndependent/glslang_tab.cpp"
+#line 5548 "MachineIndependent/glslang_tab.cpp"
break;
case 39: /* unary_expression: DEC_OP unary_expression */
@@ -5535,7 +5553,7 @@ yyreduce:
parseContext.lValueErrorCheck((yyvsp[-1].lex).loc, "--", (yyvsp[0].interm.intermTypedNode));
(yyval.interm.intermTypedNode) = parseContext.handleUnaryMath((yyvsp[-1].lex).loc, "--", EOpPreDecrement, (yyvsp[0].interm.intermTypedNode));
}
-#line 5539 "MachineIndependent/glslang_tab.cpp"
+#line 5557 "MachineIndependent/glslang_tab.cpp"
break;
case 40: /* unary_expression: unary_operator unary_expression */
@@ -5556,38 +5574,38 @@ yyreduce:
(yyval.interm.intermTypedNode)->getAsConstantUnion()->setExpression();
}
}
-#line 5560 "MachineIndependent/glslang_tab.cpp"
+#line 5578 "MachineIndependent/glslang_tab.cpp"
break;
case 41: /* unary_operator: PLUS */
#line 627 "MachineIndependent/glslang.y"
{ (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpNull; }
-#line 5566 "MachineIndependent/glslang_tab.cpp"
+#line 5584 "MachineIndependent/glslang_tab.cpp"
break;
case 42: /* unary_operator: DASH */
#line 628 "MachineIndependent/glslang.y"
{ (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpNegative; }
-#line 5572 "MachineIndependent/glslang_tab.cpp"
+#line 5590 "MachineIndependent/glslang_tab.cpp"
break;
case 43: /* unary_operator: BANG */
#line 629 "MachineIndependent/glslang.y"
{ (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpLogicalNot; }
-#line 5578 "MachineIndependent/glslang_tab.cpp"
+#line 5596 "MachineIndependent/glslang_tab.cpp"
break;
case 44: /* unary_operator: TILDE */
#line 630 "MachineIndependent/glslang.y"
{ (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpBitwiseNot;
parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bitwise not"); }
-#line 5585 "MachineIndependent/glslang_tab.cpp"
+#line 5603 "MachineIndependent/glslang_tab.cpp"
break;
case 45: /* multiplicative_expression: unary_expression */
#line 636 "MachineIndependent/glslang.y"
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 5591 "MachineIndependent/glslang_tab.cpp"
+#line 5609 "MachineIndependent/glslang_tab.cpp"
break;
case 46: /* multiplicative_expression: multiplicative_expression STAR unary_expression */
@@ -5597,7 +5615,7 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 5601 "MachineIndependent/glslang_tab.cpp"
+#line 5619 "MachineIndependent/glslang_tab.cpp"
break;
case 47: /* multiplicative_expression: multiplicative_expression SLASH unary_expression */
@@ -5607,7 +5625,7 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 5611 "MachineIndependent/glslang_tab.cpp"
+#line 5629 "MachineIndependent/glslang_tab.cpp"
break;
case 48: /* multiplicative_expression: multiplicative_expression PERCENT unary_expression */
@@ -5618,13 +5636,13 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 5622 "MachineIndependent/glslang_tab.cpp"
+#line 5640 "MachineIndependent/glslang_tab.cpp"
break;
case 49: /* additive_expression: multiplicative_expression */
#line 656 "MachineIndependent/glslang.y"
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 5628 "MachineIndependent/glslang_tab.cpp"
+#line 5646 "MachineIndependent/glslang_tab.cpp"
break;
case 50: /* additive_expression: additive_expression PLUS multiplicative_expression */
@@ -5634,7 +5652,7 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 5638 "MachineIndependent/glslang_tab.cpp"
+#line 5656 "MachineIndependent/glslang_tab.cpp"
break;
case 51: /* additive_expression: additive_expression DASH multiplicative_expression */
@@ -5644,13 +5662,13 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 5648 "MachineIndependent/glslang_tab.cpp"
+#line 5666 "MachineIndependent/glslang_tab.cpp"
break;
case 52: /* shift_expression: additive_expression */
#line 670 "MachineIndependent/glslang.y"
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 5654 "MachineIndependent/glslang_tab.cpp"
+#line 5672 "MachineIndependent/glslang_tab.cpp"
break;
case 53: /* shift_expression: shift_expression LEFT_OP additive_expression */
@@ -5661,7 +5679,7 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 5665 "MachineIndependent/glslang_tab.cpp"
+#line 5683 "MachineIndependent/glslang_tab.cpp"
break;
case 54: /* shift_expression: shift_expression RIGHT_OP additive_expression */
@@ -5672,13 +5690,13 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 5676 "MachineIndependent/glslang_tab.cpp"
+#line 5694 "MachineIndependent/glslang_tab.cpp"
break;
case 55: /* relational_expression: shift_expression */
#line 686 "MachineIndependent/glslang.y"
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 5682 "MachineIndependent/glslang_tab.cpp"
+#line 5700 "MachineIndependent/glslang_tab.cpp"
break;
case 56: /* relational_expression: relational_expression LEFT_ANGLE shift_expression */
@@ -5688,7 +5706,7 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc);
}
-#line 5692 "MachineIndependent/glslang_tab.cpp"
+#line 5710 "MachineIndependent/glslang_tab.cpp"
break;
case 57: /* relational_expression: relational_expression RIGHT_ANGLE shift_expression */
@@ -5698,7 +5716,7 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc);
}
-#line 5702 "MachineIndependent/glslang_tab.cpp"
+#line 5720 "MachineIndependent/glslang_tab.cpp"
break;
case 58: /* relational_expression: relational_expression LE_OP shift_expression */
@@ -5708,7 +5726,7 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc);
}
-#line 5712 "MachineIndependent/glslang_tab.cpp"
+#line 5730 "MachineIndependent/glslang_tab.cpp"
break;
case 59: /* relational_expression: relational_expression GE_OP shift_expression */
@@ -5718,13 +5736,13 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc);
}
-#line 5722 "MachineIndependent/glslang_tab.cpp"
+#line 5740 "MachineIndependent/glslang_tab.cpp"
break;
case 60: /* equality_expression: relational_expression */
#line 710 "MachineIndependent/glslang.y"
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 5728 "MachineIndependent/glslang_tab.cpp"
+#line 5746 "MachineIndependent/glslang_tab.cpp"
break;
case 61: /* equality_expression: equality_expression EQ_OP relational_expression */
@@ -5738,7 +5756,7 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc);
}
-#line 5742 "MachineIndependent/glslang_tab.cpp"
+#line 5760 "MachineIndependent/glslang_tab.cpp"
break;
case 62: /* equality_expression: equality_expression NE_OP relational_expression */
@@ -5752,13 +5770,13 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc);
}
-#line 5756 "MachineIndependent/glslang_tab.cpp"
+#line 5774 "MachineIndependent/glslang_tab.cpp"
break;
case 63: /* and_expression: equality_expression */
#line 732 "MachineIndependent/glslang.y"
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 5762 "MachineIndependent/glslang_tab.cpp"
+#line 5780 "MachineIndependent/glslang_tab.cpp"
break;
case 64: /* and_expression: and_expression AMPERSAND equality_expression */
@@ -5769,13 +5787,13 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 5773 "MachineIndependent/glslang_tab.cpp"
+#line 5791 "MachineIndependent/glslang_tab.cpp"
break;
case 65: /* exclusive_or_expression: and_expression */
#line 742 "MachineIndependent/glslang.y"
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 5779 "MachineIndependent/glslang_tab.cpp"
+#line 5797 "MachineIndependent/glslang_tab.cpp"
break;
case 66: /* exclusive_or_expression: exclusive_or_expression CARET and_expression */
@@ -5786,13 +5804,13 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 5790 "MachineIndependent/glslang_tab.cpp"
+#line 5808 "MachineIndependent/glslang_tab.cpp"
break;
case 67: /* inclusive_or_expression: exclusive_or_expression */
#line 752 "MachineIndependent/glslang.y"
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 5796 "MachineIndependent/glslang_tab.cpp"
+#line 5814 "MachineIndependent/glslang_tab.cpp"
break;
case 68: /* inclusive_or_expression: inclusive_or_expression VERTICAL_BAR exclusive_or_expression */
@@ -5803,13 +5821,13 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 5807 "MachineIndependent/glslang_tab.cpp"
+#line 5825 "MachineIndependent/glslang_tab.cpp"
break;
case 69: /* logical_and_expression: inclusive_or_expression */
#line 762 "MachineIndependent/glslang.y"
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 5813 "MachineIndependent/glslang_tab.cpp"
+#line 5831 "MachineIndependent/glslang_tab.cpp"
break;
case 70: /* logical_and_expression: logical_and_expression AND_OP inclusive_or_expression */
@@ -5819,13 +5837,13 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc);
}
-#line 5823 "MachineIndependent/glslang_tab.cpp"
+#line 5841 "MachineIndependent/glslang_tab.cpp"
break;
case 71: /* logical_xor_expression: logical_and_expression */
#line 771 "MachineIndependent/glslang.y"
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 5829 "MachineIndependent/glslang_tab.cpp"
+#line 5847 "MachineIndependent/glslang_tab.cpp"
break;
case 72: /* logical_xor_expression: logical_xor_expression XOR_OP logical_and_expression */
@@ -5835,13 +5853,13 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc);
}
-#line 5839 "MachineIndependent/glslang_tab.cpp"
+#line 5857 "MachineIndependent/glslang_tab.cpp"
break;
case 73: /* logical_or_expression: logical_xor_expression */
#line 780 "MachineIndependent/glslang.y"
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 5845 "MachineIndependent/glslang_tab.cpp"
+#line 5863 "MachineIndependent/glslang_tab.cpp"
break;
case 74: /* logical_or_expression: logical_or_expression OR_OP logical_xor_expression */
@@ -5851,13 +5869,13 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc);
}
-#line 5855 "MachineIndependent/glslang_tab.cpp"
+#line 5873 "MachineIndependent/glslang_tab.cpp"
break;
case 75: /* conditional_expression: logical_or_expression */
#line 789 "MachineIndependent/glslang.y"
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 5861 "MachineIndependent/glslang_tab.cpp"
+#line 5879 "MachineIndependent/glslang_tab.cpp"
break;
case 76: /* $@1: %empty */
@@ -5865,7 +5883,7 @@ yyreduce:
{
++parseContext.controlFlowNestingLevel;
}
-#line 5869 "MachineIndependent/glslang_tab.cpp"
+#line 5887 "MachineIndependent/glslang_tab.cpp"
break;
case 77: /* conditional_expression: logical_or_expression QUESTION $@1 expression COLON assignment_expression */
@@ -5878,17 +5896,17 @@ yyreduce:
parseContext.rValueErrorCheck((yyvsp[-1].lex).loc, ":", (yyvsp[0].interm.intermTypedNode));
(yyval.interm.intermTypedNode) = parseContext.intermediate.addSelection((yyvsp[-5].interm.intermTypedNode), (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode), (yyvsp[-4].lex).loc);
if ((yyval.interm.intermTypedNode) == 0) {
- parseContext.binaryOpError((yyvsp[-4].lex).loc, ":", (yyvsp[-2].interm.intermTypedNode)->getCompleteString(), (yyvsp[0].interm.intermTypedNode)->getCompleteString());
+ parseContext.binaryOpError((yyvsp[-4].lex).loc, ":", (yyvsp[-2].interm.intermTypedNode)->getCompleteString(parseContext.intermediate.getEnhancedMsgs()), (yyvsp[0].interm.intermTypedNode)->getCompleteString(parseContext.intermediate.getEnhancedMsgs()));
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
}
}
-#line 5886 "MachineIndependent/glslang_tab.cpp"
+#line 5904 "MachineIndependent/glslang_tab.cpp"
break;
case 78: /* assignment_expression: conditional_expression */
#line 808 "MachineIndependent/glslang.y"
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 5892 "MachineIndependent/glslang_tab.cpp"
+#line 5910 "MachineIndependent/glslang_tab.cpp"
break;
case 79: /* assignment_expression: unary_expression assignment_operator assignment_expression */
@@ -5902,11 +5920,11 @@ yyreduce:
parseContext.rValueErrorCheck((yyvsp[-1].interm).loc, "assign", (yyvsp[0].interm.intermTypedNode));
(yyval.interm.intermTypedNode) = parseContext.addAssign((yyvsp[-1].interm).loc, (yyvsp[-1].interm).op, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode));
if ((yyval.interm.intermTypedNode) == 0) {
- parseContext.assignError((yyvsp[-1].interm).loc, "assign", (yyvsp[-2].interm.intermTypedNode)->getCompleteString(), (yyvsp[0].interm.intermTypedNode)->getCompleteString());
+ parseContext.assignError((yyvsp[-1].interm).loc, "assign", (yyvsp[-2].interm.intermTypedNode)->getCompleteString(parseContext.intermediate.getEnhancedMsgs()), (yyvsp[0].interm.intermTypedNode)->getCompleteString(parseContext.intermediate.getEnhancedMsgs()));
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
}
-#line 5910 "MachineIndependent/glslang_tab.cpp"
+#line 5928 "MachineIndependent/glslang_tab.cpp"
break;
case 80: /* assignment_operator: EQUAL */
@@ -5915,7 +5933,7 @@ yyreduce:
(yyval.interm).loc = (yyvsp[0].lex).loc;
(yyval.interm).op = EOpAssign;
}
-#line 5919 "MachineIndependent/glslang_tab.cpp"
+#line 5937 "MachineIndependent/glslang_tab.cpp"
break;
case 81: /* assignment_operator: MUL_ASSIGN */
@@ -5924,7 +5942,7 @@ yyreduce:
(yyval.interm).loc = (yyvsp[0].lex).loc;
(yyval.interm).op = EOpMulAssign;
}
-#line 5928 "MachineIndependent/glslang_tab.cpp"
+#line 5946 "MachineIndependent/glslang_tab.cpp"
break;
case 82: /* assignment_operator: DIV_ASSIGN */
@@ -5933,7 +5951,7 @@ yyreduce:
(yyval.interm).loc = (yyvsp[0].lex).loc;
(yyval.interm).op = EOpDivAssign;
}
-#line 5937 "MachineIndependent/glslang_tab.cpp"
+#line 5955 "MachineIndependent/glslang_tab.cpp"
break;
case 83: /* assignment_operator: MOD_ASSIGN */
@@ -5943,7 +5961,7 @@ yyreduce:
(yyval.interm).loc = (yyvsp[0].lex).loc;
(yyval.interm).op = EOpModAssign;
}
-#line 5947 "MachineIndependent/glslang_tab.cpp"
+#line 5965 "MachineIndependent/glslang_tab.cpp"
break;
case 84: /* assignment_operator: ADD_ASSIGN */
@@ -5952,7 +5970,7 @@ yyreduce:
(yyval.interm).loc = (yyvsp[0].lex).loc;
(yyval.interm).op = EOpAddAssign;
}
-#line 5956 "MachineIndependent/glslang_tab.cpp"
+#line 5974 "MachineIndependent/glslang_tab.cpp"
break;
case 85: /* assignment_operator: SUB_ASSIGN */
@@ -5961,7 +5979,7 @@ yyreduce:
(yyval.interm).loc = (yyvsp[0].lex).loc;
(yyval.interm).op = EOpSubAssign;
}
-#line 5965 "MachineIndependent/glslang_tab.cpp"
+#line 5983 "MachineIndependent/glslang_tab.cpp"
break;
case 86: /* assignment_operator: LEFT_ASSIGN */
@@ -5970,7 +5988,7 @@ yyreduce:
parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bit-shift left assign");
(yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpLeftShiftAssign;
}
-#line 5974 "MachineIndependent/glslang_tab.cpp"
+#line 5992 "MachineIndependent/glslang_tab.cpp"
break;
case 87: /* assignment_operator: RIGHT_ASSIGN */
@@ -5979,7 +5997,7 @@ yyreduce:
parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bit-shift right assign");
(yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpRightShiftAssign;
}
-#line 5983 "MachineIndependent/glslang_tab.cpp"
+#line 6001 "MachineIndependent/glslang_tab.cpp"
break;
case 88: /* assignment_operator: AND_ASSIGN */
@@ -5988,7 +6006,7 @@ yyreduce:
parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bitwise-and assign");
(yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpAndAssign;
}
-#line 5992 "MachineIndependent/glslang_tab.cpp"
+#line 6010 "MachineIndependent/glslang_tab.cpp"
break;
case 89: /* assignment_operator: XOR_ASSIGN */
@@ -5997,7 +6015,7 @@ yyreduce:
parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bitwise-xor assign");
(yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpExclusiveOrAssign;
}
-#line 6001 "MachineIndependent/glslang_tab.cpp"
+#line 6019 "MachineIndependent/glslang_tab.cpp"
break;
case 90: /* assignment_operator: OR_ASSIGN */
@@ -6006,7 +6024,7 @@ yyreduce:
parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bitwise-or assign");
(yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpInclusiveOrAssign;
}
-#line 6010 "MachineIndependent/glslang_tab.cpp"
+#line 6028 "MachineIndependent/glslang_tab.cpp"
break;
case 91: /* expression: assignment_expression */
@@ -6014,7 +6032,7 @@ yyreduce:
{
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
}
-#line 6018 "MachineIndependent/glslang_tab.cpp"
+#line 6036 "MachineIndependent/glslang_tab.cpp"
break;
case 92: /* expression: expression COMMA assignment_expression */
@@ -6023,11 +6041,11 @@ yyreduce:
parseContext.samplerConstructorLocationCheck((yyvsp[-1].lex).loc, ",", (yyvsp[0].interm.intermTypedNode));
(yyval.interm.intermTypedNode) = parseContext.intermediate.addComma((yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode), (yyvsp[-1].lex).loc);
if ((yyval.interm.intermTypedNode) == 0) {
- parseContext.binaryOpError((yyvsp[-1].lex).loc, ",", (yyvsp[-2].interm.intermTypedNode)->getCompleteString(), (yyvsp[0].interm.intermTypedNode)->getCompleteString());
+ parseContext.binaryOpError((yyvsp[-1].lex).loc, ",", (yyvsp[-2].interm.intermTypedNode)->getCompleteString(parseContext.intermediate.getEnhancedMsgs()), (yyvsp[0].interm.intermTypedNode)->getCompleteString(parseContext.intermediate.getEnhancedMsgs()));
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
}
}
-#line 6031 "MachineIndependent/glslang_tab.cpp"
+#line 6049 "MachineIndependent/glslang_tab.cpp"
break;
case 93: /* constant_expression: conditional_expression */
@@ -6036,7 +6054,7 @@ yyreduce:
parseContext.constantValueCheck((yyvsp[0].interm.intermTypedNode), "");
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
}
-#line 6040 "MachineIndependent/glslang_tab.cpp"
+#line 6058 "MachineIndependent/glslang_tab.cpp"
break;
case 94: /* declaration: function_prototype SEMICOLON */
@@ -6046,7 +6064,7 @@ yyreduce:
(yyval.interm.intermNode) = 0;
// TODO: 4.0 functionality: subroutines: make the identifier a user type for this signature
}
-#line 6050 "MachineIndependent/glslang_tab.cpp"
+#line 6068 "MachineIndependent/glslang_tab.cpp"
break;
case 95: /* declaration: spirv_instruction_qualifier function_prototype SEMICOLON */
@@ -6058,7 +6076,7 @@ yyreduce:
(yyval.interm.intermNode) = 0;
// TODO: 4.0 functionality: subroutines: make the identifier a user type for this signature
}
-#line 6062 "MachineIndependent/glslang_tab.cpp"
+#line 6080 "MachineIndependent/glslang_tab.cpp"
break;
case 96: /* declaration: spirv_execution_mode_qualifier SEMICOLON */
@@ -6068,7 +6086,7 @@ yyreduce:
parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_EXT_spirv_intrinsics, "SPIR-V execution mode qualifier");
(yyval.interm.intermNode) = 0;
}
-#line 6072 "MachineIndependent/glslang_tab.cpp"
+#line 6090 "MachineIndependent/glslang_tab.cpp"
break;
case 97: /* declaration: init_declarator_list SEMICOLON */
@@ -6078,7 +6096,7 @@ yyreduce:
(yyvsp[-1].interm).intermNode->getAsAggregate()->setOperator(EOpSequence);
(yyval.interm.intermNode) = (yyvsp[-1].interm).intermNode;
}
-#line 6082 "MachineIndependent/glslang_tab.cpp"
+#line 6100 "MachineIndependent/glslang_tab.cpp"
break;
case 98: /* declaration: PRECISION precision_qualifier type_specifier SEMICOLON */
@@ -6090,7 +6108,7 @@ yyreduce:
parseContext.setDefaultPrecision((yyvsp[-3].lex).loc, (yyvsp[-1].interm.type), (yyvsp[-2].interm.type).qualifier.precision);
(yyval.interm.intermNode) = 0;
}
-#line 6094 "MachineIndependent/glslang_tab.cpp"
+#line 6112 "MachineIndependent/glslang_tab.cpp"
break;
case 99: /* declaration: block_structure SEMICOLON */
@@ -6099,7 +6117,7 @@ yyreduce:
parseContext.declareBlock((yyvsp[-1].interm).loc, *(yyvsp[-1].interm).typeList);
(yyval.interm.intermNode) = 0;
}
-#line 6103 "MachineIndependent/glslang_tab.cpp"
+#line 6121 "MachineIndependent/glslang_tab.cpp"
break;
case 100: /* declaration: block_structure IDENTIFIER SEMICOLON */
@@ -6108,7 +6126,7 @@ yyreduce:
parseContext.declareBlock((yyvsp[-2].interm).loc, *(yyvsp[-2].interm).typeList, (yyvsp[-1].lex).string);
(yyval.interm.intermNode) = 0;
}
-#line 6112 "MachineIndependent/glslang_tab.cpp"
+#line 6130 "MachineIndependent/glslang_tab.cpp"
break;
case 101: /* declaration: block_structure IDENTIFIER array_specifier SEMICOLON */
@@ -6117,7 +6135,7 @@ yyreduce:
parseContext.declareBlock((yyvsp[-3].interm).loc, *(yyvsp[-3].interm).typeList, (yyvsp[-2].lex).string, (yyvsp[-1].interm).arraySizes);
(yyval.interm.intermNode) = 0;
}
-#line 6121 "MachineIndependent/glslang_tab.cpp"
+#line 6139 "MachineIndependent/glslang_tab.cpp"
break;
case 102: /* declaration: type_qualifier SEMICOLON */
@@ -6127,7 +6145,7 @@ yyreduce:
parseContext.updateStandaloneQualifierDefaults((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type));
(yyval.interm.intermNode) = 0;
}
-#line 6131 "MachineIndependent/glslang_tab.cpp"
+#line 6149 "MachineIndependent/glslang_tab.cpp"
break;
case 103: /* declaration: type_qualifier IDENTIFIER SEMICOLON */
@@ -6137,7 +6155,7 @@ yyreduce:
parseContext.addQualifierToExisting((yyvsp[-2].interm.type).loc, (yyvsp[-2].interm.type).qualifier, *(yyvsp[-1].lex).string);
(yyval.interm.intermNode) = 0;
}
-#line 6141 "MachineIndependent/glslang_tab.cpp"
+#line 6159 "MachineIndependent/glslang_tab.cpp"
break;
case 104: /* declaration: type_qualifier IDENTIFIER identifier_list SEMICOLON */
@@ -6148,13 +6166,13 @@ yyreduce:
parseContext.addQualifierToExisting((yyvsp[-3].interm.type).loc, (yyvsp[-3].interm.type).qualifier, *(yyvsp[-1].interm.identifierList));
(yyval.interm.intermNode) = 0;
}
-#line 6152 "MachineIndependent/glslang_tab.cpp"
+#line 6170 "MachineIndependent/glslang_tab.cpp"
break;
case 105: /* $@2: %empty */
#line 956 "MachineIndependent/glslang.y"
{ parseContext.nestedBlockCheck((yyvsp[-2].interm.type).loc); }
-#line 6158 "MachineIndependent/glslang_tab.cpp"
+#line 6176 "MachineIndependent/glslang_tab.cpp"
break;
case 106: /* block_structure: type_qualifier IDENTIFIER LEFT_BRACE $@2 struct_declaration_list RIGHT_BRACE */
@@ -6168,7 +6186,7 @@ yyreduce:
(yyval.interm).loc = (yyvsp[-5].interm.type).loc;
(yyval.interm).typeList = (yyvsp[-1].interm.typeList);
}
-#line 6172 "MachineIndependent/glslang_tab.cpp"
+#line 6190 "MachineIndependent/glslang_tab.cpp"
break;
case 107: /* identifier_list: COMMA IDENTIFIER */
@@ -6177,7 +6195,7 @@ yyreduce:
(yyval.interm.identifierList) = new TIdentifierList;
(yyval.interm.identifierList)->push_back((yyvsp[0].lex).string);
}
-#line 6181 "MachineIndependent/glslang_tab.cpp"
+#line 6199 "MachineIndependent/glslang_tab.cpp"
break;
case 108: /* identifier_list: identifier_list COMMA IDENTIFIER */
@@ -6186,7 +6204,7 @@ yyreduce:
(yyval.interm.identifierList) = (yyvsp[-2].interm.identifierList);
(yyval.interm.identifierList)->push_back((yyvsp[0].lex).string);
}
-#line 6190 "MachineIndependent/glslang_tab.cpp"
+#line 6208 "MachineIndependent/glslang_tab.cpp"
break;
case 109: /* function_prototype: function_declarator RIGHT_PAREN */
@@ -6195,7 +6213,7 @@ yyreduce:
(yyval.interm).function = (yyvsp[-1].interm.function);
(yyval.interm).loc = (yyvsp[0].lex).loc;
}
-#line 6199 "MachineIndependent/glslang_tab.cpp"
+#line 6217 "MachineIndependent/glslang_tab.cpp"
break;
case 110: /* function_prototype: function_declarator RIGHT_PAREN attribute */
@@ -6206,7 +6224,7 @@ yyreduce:
parseContext.requireExtensions((yyvsp[-1].lex).loc, 1, &E_GL_EXT_subgroup_uniform_control_flow, "attribute");
parseContext.handleFunctionAttributes((yyvsp[-1].lex).loc, *(yyvsp[0].interm.attributes));
}
-#line 6210 "MachineIndependent/glslang_tab.cpp"
+#line 6228 "MachineIndependent/glslang_tab.cpp"
break;
case 111: /* function_prototype: attribute function_declarator RIGHT_PAREN */
@@ -6217,7 +6235,7 @@ yyreduce:
parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_EXT_subgroup_uniform_control_flow, "attribute");
parseContext.handleFunctionAttributes((yyvsp[0].lex).loc, *(yyvsp[-2].interm.attributes));
}
-#line 6221 "MachineIndependent/glslang_tab.cpp"
+#line 6239 "MachineIndependent/glslang_tab.cpp"
break;
case 112: /* function_prototype: attribute function_declarator RIGHT_PAREN attribute */
@@ -6229,7 +6247,7 @@ yyreduce:
parseContext.handleFunctionAttributes((yyvsp[-1].lex).loc, *(yyvsp[-3].interm.attributes));
parseContext.handleFunctionAttributes((yyvsp[-1].lex).loc, *(yyvsp[0].interm.attributes));
}
-#line 6233 "MachineIndependent/glslang_tab.cpp"
+#line 6251 "MachineIndependent/glslang_tab.cpp"
break;
case 113: /* function_declarator: function_header */
@@ -6237,7 +6255,7 @@ yyreduce:
{
(yyval.interm.function) = (yyvsp[0].interm.function);
}
-#line 6241 "MachineIndependent/glslang_tab.cpp"
+#line 6259 "MachineIndependent/glslang_tab.cpp"
break;
case 114: /* function_declarator: function_header_with_parameters */
@@ -6245,7 +6263,7 @@ yyreduce:
{
(yyval.interm.function) = (yyvsp[0].interm.function);
}
-#line 6249 "MachineIndependent/glslang_tab.cpp"
+#line 6267 "MachineIndependent/glslang_tab.cpp"
break;
case 115: /* function_header_with_parameters: function_header parameter_declaration */
@@ -6258,7 +6276,7 @@ yyreduce:
else
delete (yyvsp[0].interm).param.type;
}
-#line 6262 "MachineIndependent/glslang_tab.cpp"
+#line 6280 "MachineIndependent/glslang_tab.cpp"
break;
case 116: /* function_header_with_parameters: function_header_with_parameters COMMA parameter_declaration */
@@ -6280,7 +6298,7 @@ yyreduce:
(yyvsp[-2].interm.function)->addParameter((yyvsp[0].interm).param);
}
}
-#line 6284 "MachineIndependent/glslang_tab.cpp"
+#line 6302 "MachineIndependent/glslang_tab.cpp"
break;
case 117: /* function_header: fully_specified_type IDENTIFIER LEFT_PAREN */
@@ -6304,7 +6322,7 @@ yyreduce:
function = new TFunction((yyvsp[-1].lex).string, type);
(yyval.interm.function) = function;
}
-#line 6308 "MachineIndependent/glslang_tab.cpp"
+#line 6326 "MachineIndependent/glslang_tab.cpp"
break;
case 118: /* parameter_declarator: type_specifier IDENTIFIER */
@@ -6324,7 +6342,7 @@ yyreduce:
(yyval.interm).loc = (yyvsp[0].lex).loc;
(yyval.interm).param = param;
}
-#line 6328 "MachineIndependent/glslang_tab.cpp"
+#line 6346 "MachineIndependent/glslang_tab.cpp"
break;
case 119: /* parameter_declarator: type_specifier IDENTIFIER array_specifier */
@@ -6348,7 +6366,7 @@ yyreduce:
(yyval.interm).loc = (yyvsp[-1].lex).loc;
(yyval.interm).param = param;
}
-#line 6352 "MachineIndependent/glslang_tab.cpp"
+#line 6370 "MachineIndependent/glslang_tab.cpp"
break;
case 120: /* parameter_declaration: type_qualifier parameter_declarator */
@@ -6364,7 +6382,7 @@ yyreduce:
parseContext.paramCheckFix((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type).qualifier, *(yyval.interm).param.type);
}
-#line 6368 "MachineIndependent/glslang_tab.cpp"
+#line 6386 "MachineIndependent/glslang_tab.cpp"
break;
case 121: /* parameter_declaration: parameter_declarator */
@@ -6376,7 +6394,7 @@ yyreduce:
parseContext.paramCheckFixStorage((yyvsp[0].interm).loc, EvqTemporary, *(yyval.interm).param.type);
parseContext.precisionQualifierCheck((yyval.interm).loc, (yyval.interm).param.type->getBasicType(), (yyval.interm).param.type->getQualifier());
}
-#line 6380 "MachineIndependent/glslang_tab.cpp"
+#line 6398 "MachineIndependent/glslang_tab.cpp"
break;
case 122: /* parameter_declaration: type_qualifier parameter_type_specifier */
@@ -6391,7 +6409,7 @@ yyreduce:
parseContext.parameterTypeCheck((yyvsp[0].interm).loc, (yyvsp[-1].interm.type).qualifier.storage, *(yyval.interm).param.type);
parseContext.paramCheckFix((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type).qualifier, *(yyval.interm).param.type);
}
-#line 6395 "MachineIndependent/glslang_tab.cpp"
+#line 6413 "MachineIndependent/glslang_tab.cpp"
break;
case 123: /* parameter_declaration: parameter_type_specifier */
@@ -6403,7 +6421,7 @@ yyreduce:
parseContext.paramCheckFixStorage((yyvsp[0].interm).loc, EvqTemporary, *(yyval.interm).param.type);
parseContext.precisionQualifierCheck((yyval.interm).loc, (yyval.interm).param.type->getBasicType(), (yyval.interm).param.type->getQualifier());
}
-#line 6407 "MachineIndependent/glslang_tab.cpp"
+#line 6425 "MachineIndependent/glslang_tab.cpp"
break;
case 124: /* parameter_type_specifier: type_specifier */
@@ -6414,7 +6432,7 @@ yyreduce:
if ((yyvsp[0].interm.type).arraySizes)
parseContext.arraySizeRequiredCheck((yyvsp[0].interm.type).loc, *(yyvsp[0].interm.type).arraySizes);
}
-#line 6418 "MachineIndependent/glslang_tab.cpp"
+#line 6436 "MachineIndependent/glslang_tab.cpp"
break;
case 125: /* init_declarator_list: single_declaration */
@@ -6422,7 +6440,7 @@ yyreduce:
{
(yyval.interm) = (yyvsp[0].interm);
}
-#line 6426 "MachineIndependent/glslang_tab.cpp"
+#line 6444 "MachineIndependent/glslang_tab.cpp"
break;
case 126: /* init_declarator_list: init_declarator_list COMMA IDENTIFIER */
@@ -6431,7 +6449,7 @@ yyreduce:
(yyval.interm) = (yyvsp[-2].interm);
parseContext.declareVariable((yyvsp[0].lex).loc, *(yyvsp[0].lex).string, (yyvsp[-2].interm).type);
}
-#line 6435 "MachineIndependent/glslang_tab.cpp"
+#line 6453 "MachineIndependent/glslang_tab.cpp"
break;
case 127: /* init_declarator_list: init_declarator_list COMMA IDENTIFIER array_specifier */
@@ -6440,7 +6458,7 @@ yyreduce:
(yyval.interm) = (yyvsp[-3].interm);
parseContext.declareVariable((yyvsp[-1].lex).loc, *(yyvsp[-1].lex).string, (yyvsp[-3].interm).type, (yyvsp[0].interm).arraySizes);
}
-#line 6444 "MachineIndependent/glslang_tab.cpp"
+#line 6462 "MachineIndependent/glslang_tab.cpp"
break;
case 128: /* init_declarator_list: init_declarator_list COMMA IDENTIFIER array_specifier EQUAL initializer */
@@ -6450,7 +6468,7 @@ yyreduce:
TIntermNode* initNode = parseContext.declareVariable((yyvsp[-3].lex).loc, *(yyvsp[-3].lex).string, (yyvsp[-5].interm).type, (yyvsp[-2].interm).arraySizes, (yyvsp[0].interm.intermTypedNode));
(yyval.interm).intermNode = parseContext.intermediate.growAggregate((yyvsp[-5].interm).intermNode, initNode, (yyvsp[-1].lex).loc);
}
-#line 6454 "MachineIndependent/glslang_tab.cpp"
+#line 6472 "MachineIndependent/glslang_tab.cpp"
break;
case 129: /* init_declarator_list: init_declarator_list COMMA IDENTIFIER EQUAL initializer */
@@ -6460,7 +6478,7 @@ yyreduce:
TIntermNode* initNode = parseContext.declareVariable((yyvsp[-2].lex).loc, *(yyvsp[-2].lex).string, (yyvsp[-4].interm).type, 0, (yyvsp[0].interm.intermTypedNode));
(yyval.interm).intermNode = parseContext.intermediate.growAggregate((yyvsp[-4].interm).intermNode, initNode, (yyvsp[-1].lex).loc);
}
-#line 6464 "MachineIndependent/glslang_tab.cpp"
+#line 6482 "MachineIndependent/glslang_tab.cpp"
break;
case 130: /* single_declaration: fully_specified_type */
@@ -6472,7 +6490,7 @@ yyreduce:
parseContext.declareTypeDefaults((yyval.interm).loc, (yyval.interm).type);
}
-#line 6476 "MachineIndependent/glslang_tab.cpp"
+#line 6494 "MachineIndependent/glslang_tab.cpp"
break;
case 131: /* single_declaration: fully_specified_type IDENTIFIER */
@@ -6482,7 +6500,7 @@ yyreduce:
(yyval.interm).intermNode = 0;
parseContext.declareVariable((yyvsp[0].lex).loc, *(yyvsp[0].lex).string, (yyvsp[-1].interm.type));
}
-#line 6486 "MachineIndependent/glslang_tab.cpp"
+#line 6504 "MachineIndependent/glslang_tab.cpp"
break;
case 132: /* single_declaration: fully_specified_type IDENTIFIER array_specifier */
@@ -6492,7 +6510,7 @@ yyreduce:
(yyval.interm).intermNode = 0;
parseContext.declareVariable((yyvsp[-1].lex).loc, *(yyvsp[-1].lex).string, (yyvsp[-2].interm.type), (yyvsp[0].interm).arraySizes);
}
-#line 6496 "MachineIndependent/glslang_tab.cpp"
+#line 6514 "MachineIndependent/glslang_tab.cpp"
break;
case 133: /* single_declaration: fully_specified_type IDENTIFIER array_specifier EQUAL initializer */
@@ -6502,7 +6520,7 @@ yyreduce:
TIntermNode* initNode = parseContext.declareVariable((yyvsp[-3].lex).loc, *(yyvsp[-3].lex).string, (yyvsp[-4].interm.type), (yyvsp[-2].interm).arraySizes, (yyvsp[0].interm.intermTypedNode));
(yyval.interm).intermNode = parseContext.intermediate.growAggregate(0, initNode, (yyvsp[-1].lex).loc);
}
-#line 6506 "MachineIndependent/glslang_tab.cpp"
+#line 6524 "MachineIndependent/glslang_tab.cpp"
break;
case 134: /* single_declaration: fully_specified_type IDENTIFIER EQUAL initializer */
@@ -6512,7 +6530,7 @@ yyreduce:
TIntermNode* initNode = parseContext.declareVariable((yyvsp[-2].lex).loc, *(yyvsp[-2].lex).string, (yyvsp[-3].interm.type), 0, (yyvsp[0].interm.intermTypedNode));
(yyval.interm).intermNode = parseContext.intermediate.growAggregate(0, initNode, (yyvsp[-1].lex).loc);
}
-#line 6516 "MachineIndependent/glslang_tab.cpp"
+#line 6534 "MachineIndependent/glslang_tab.cpp"
break;
case 135: /* fully_specified_type: type_specifier */
@@ -6527,7 +6545,7 @@ yyreduce:
}
parseContext.precisionQualifierCheck((yyval.interm.type).loc, (yyval.interm.type).basicType, (yyval.interm.type).qualifier);
}
-#line 6531 "MachineIndependent/glslang_tab.cpp"
+#line 6549 "MachineIndependent/glslang_tab.cpp"
break;
case 136: /* fully_specified_type: type_qualifier type_specifier */
@@ -6556,7 +6574,7 @@ yyreduce:
(parseContext.language == EShLangFragment && (yyval.interm.type).qualifier.storage == EvqVaryingIn)))
(yyval.interm.type).qualifier.smooth = true;
}
-#line 6560 "MachineIndependent/glslang_tab.cpp"
+#line 6578 "MachineIndependent/glslang_tab.cpp"
break;
case 137: /* invariant_qualifier: INVARIANT */
@@ -6567,7 +6585,7 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.invariant = true;
}
-#line 6571 "MachineIndependent/glslang_tab.cpp"
+#line 6589 "MachineIndependent/glslang_tab.cpp"
break;
case 138: /* interpolation_qualifier: SMOOTH */
@@ -6579,7 +6597,7 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.smooth = true;
}
-#line 6583 "MachineIndependent/glslang_tab.cpp"
+#line 6601 "MachineIndependent/glslang_tab.cpp"
break;
case 139: /* interpolation_qualifier: FLAT */
@@ -6591,7 +6609,7 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.flat = true;
}
-#line 6595 "MachineIndependent/glslang_tab.cpp"
+#line 6613 "MachineIndependent/glslang_tab.cpp"
break;
case 140: /* interpolation_qualifier: NOPERSPECTIVE */
@@ -6603,7 +6621,7 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.nopersp = true;
}
-#line 6607 "MachineIndependent/glslang_tab.cpp"
+#line 6625 "MachineIndependent/glslang_tab.cpp"
break;
case 141: /* interpolation_qualifier: EXPLICITINTERPAMD */
@@ -6615,7 +6633,7 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.explicitInterp = true;
}
-#line 6619 "MachineIndependent/glslang_tab.cpp"
+#line 6637 "MachineIndependent/glslang_tab.cpp"
break;
case 142: /* interpolation_qualifier: PERVERTEXNV */
@@ -6628,123 +6646,151 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.pervertexNV = true;
}
-#line 6632 "MachineIndependent/glslang_tab.cpp"
+#line 6650 "MachineIndependent/glslang_tab.cpp"
break;
- case 143: /* interpolation_qualifier: PERPRIMITIVENV */
+ case 143: /* interpolation_qualifier: PERVERTEXEXT */
#line 1293 "MachineIndependent/glslang.y"
+ {
+ parseContext.globalCheck((yyvsp[0].lex).loc, "pervertexEXT");
+ parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 0, E_GL_EXT_fragment_shader_barycentric, "fragment shader barycentric");
+ parseContext.profileRequires((yyvsp[0].lex).loc, ECompatibilityProfile, 0, E_GL_EXT_fragment_shader_barycentric, "fragment shader barycentric");
+ parseContext.profileRequires((yyvsp[0].lex).loc, EEsProfile, 0, E_GL_EXT_fragment_shader_barycentric, "fragment shader barycentric");
+ (yyval.interm.type).init((yyvsp[0].lex).loc);
+ (yyval.interm.type).qualifier.pervertexEXT = true;
+ }
+#line 6663 "MachineIndependent/glslang_tab.cpp"
+ break;
+
+ case 144: /* interpolation_qualifier: PERPRIMITIVENV */
+#line 1301 "MachineIndependent/glslang.y"
{
// No need for profile version or extension check. Shader stage already checks both.
parseContext.globalCheck((yyvsp[0].lex).loc, "perprimitiveNV");
- parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangFragmentMask | EShLangMeshNVMask), "perprimitiveNV");
+ parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangFragmentMask | EShLangMeshMask), "perprimitiveNV");
// Fragment shader stage doesn't check for extension. So we explicitly add below extension check.
if (parseContext.language == EShLangFragment)
parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_NV_mesh_shader, "perprimitiveNV");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.perPrimitiveNV = true;
}
-#line 6647 "MachineIndependent/glslang_tab.cpp"
+#line 6678 "MachineIndependent/glslang_tab.cpp"
break;
- case 144: /* interpolation_qualifier: PERVIEWNV */
-#line 1303 "MachineIndependent/glslang.y"
+ case 145: /* interpolation_qualifier: PERPRIMITIVEEXT */
+#line 1311 "MachineIndependent/glslang.y"
+ {
+ // No need for profile version or extension check. Shader stage already checks both.
+ parseContext.globalCheck((yyvsp[0].lex).loc, "perprimitiveEXT");
+ parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangFragmentMask | EShLangMeshMask), "perprimitiveEXT");
+ // Fragment shader stage doesn't check for extension. So we explicitly add below extension check.
+ if (parseContext.language == EShLangFragment)
+ parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_EXT_mesh_shader, "perprimitiveEXT");
+ (yyval.interm.type).init((yyvsp[0].lex).loc);
+ (yyval.interm.type).qualifier.perPrimitiveNV = true;
+ }
+#line 6693 "MachineIndependent/glslang_tab.cpp"
+ break;
+
+ case 146: /* interpolation_qualifier: PERVIEWNV */
+#line 1321 "MachineIndependent/glslang.y"
{
// No need for profile version or extension check. Shader stage already checks both.
parseContext.globalCheck((yyvsp[0].lex).loc, "perviewNV");
- parseContext.requireStage((yyvsp[0].lex).loc, EShLangMeshNV, "perviewNV");
+ parseContext.requireStage((yyvsp[0].lex).loc, EShLangMesh, "perviewNV");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.perViewNV = true;
}
-#line 6659 "MachineIndependent/glslang_tab.cpp"
+#line 6705 "MachineIndependent/glslang_tab.cpp"
break;
- case 145: /* interpolation_qualifier: PERTASKNV */
-#line 1310 "MachineIndependent/glslang.y"
+ case 147: /* interpolation_qualifier: PERTASKNV */
+#line 1328 "MachineIndependent/glslang.y"
{
// No need for profile version or extension check. Shader stage already checks both.
parseContext.globalCheck((yyvsp[0].lex).loc, "taskNV");
- parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangTaskNVMask | EShLangMeshNVMask), "taskNV");
+ parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangTaskMask | EShLangMeshMask), "taskNV");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.perTaskNV = true;
}
-#line 6671 "MachineIndependent/glslang_tab.cpp"
+#line 6717 "MachineIndependent/glslang_tab.cpp"
break;
- case 146: /* layout_qualifier: LAYOUT LEFT_PAREN layout_qualifier_id_list RIGHT_PAREN */
-#line 1321 "MachineIndependent/glslang.y"
+ case 148: /* layout_qualifier: LAYOUT LEFT_PAREN layout_qualifier_id_list RIGHT_PAREN */
+#line 1339 "MachineIndependent/glslang.y"
{
(yyval.interm.type) = (yyvsp[-1].interm.type);
}
-#line 6679 "MachineIndependent/glslang_tab.cpp"
+#line 6725 "MachineIndependent/glslang_tab.cpp"
break;
- case 147: /* layout_qualifier_id_list: layout_qualifier_id */
-#line 1327 "MachineIndependent/glslang.y"
+ case 149: /* layout_qualifier_id_list: layout_qualifier_id */
+#line 1345 "MachineIndependent/glslang.y"
{
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 6687 "MachineIndependent/glslang_tab.cpp"
+#line 6733 "MachineIndependent/glslang_tab.cpp"
break;
- case 148: /* layout_qualifier_id_list: layout_qualifier_id_list COMMA layout_qualifier_id */
-#line 1330 "MachineIndependent/glslang.y"
+ case 150: /* layout_qualifier_id_list: layout_qualifier_id_list COMMA layout_qualifier_id */
+#line 1348 "MachineIndependent/glslang.y"
{
(yyval.interm.type) = (yyvsp[-2].interm.type);
(yyval.interm.type).shaderQualifiers.merge((yyvsp[0].interm.type).shaderQualifiers);
parseContext.mergeObjectLayoutQualifiers((yyval.interm.type).qualifier, (yyvsp[0].interm.type).qualifier, false);
}
-#line 6697 "MachineIndependent/glslang_tab.cpp"
+#line 6743 "MachineIndependent/glslang_tab.cpp"
break;
- case 149: /* layout_qualifier_id: IDENTIFIER */
-#line 1337 "MachineIndependent/glslang.y"
+ case 151: /* layout_qualifier_id: IDENTIFIER */
+#line 1355 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
parseContext.setLayoutQualifier((yyvsp[0].lex).loc, (yyval.interm.type), *(yyvsp[0].lex).string);
}
-#line 6706 "MachineIndependent/glslang_tab.cpp"
+#line 6752 "MachineIndependent/glslang_tab.cpp"
break;
- case 150: /* layout_qualifier_id: IDENTIFIER EQUAL constant_expression */
-#line 1341 "MachineIndependent/glslang.y"
+ case 152: /* layout_qualifier_id: IDENTIFIER EQUAL constant_expression */
+#line 1359 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[-2].lex).loc);
parseContext.setLayoutQualifier((yyvsp[-2].lex).loc, (yyval.interm.type), *(yyvsp[-2].lex).string, (yyvsp[0].interm.intermTypedNode));
}
-#line 6715 "MachineIndependent/glslang_tab.cpp"
+#line 6761 "MachineIndependent/glslang_tab.cpp"
break;
- case 151: /* layout_qualifier_id: SHARED */
-#line 1345 "MachineIndependent/glslang.y"
+ case 153: /* layout_qualifier_id: SHARED */
+#line 1363 "MachineIndependent/glslang.y"
{ // because "shared" is both an identifier and a keyword
(yyval.interm.type).init((yyvsp[0].lex).loc);
TString strShared("shared");
parseContext.setLayoutQualifier((yyvsp[0].lex).loc, (yyval.interm.type), strShared);
}
-#line 6725 "MachineIndependent/glslang_tab.cpp"
+#line 6771 "MachineIndependent/glslang_tab.cpp"
break;
- case 152: /* precise_qualifier: PRECISE */
-#line 1354 "MachineIndependent/glslang.y"
+ case 154: /* precise_qualifier: PRECISE */
+#line 1372 "MachineIndependent/glslang.y"
{
parseContext.profileRequires((yyval.interm.type).loc, ECoreProfile | ECompatibilityProfile, 400, E_GL_ARB_gpu_shader5, "precise");
parseContext.profileRequires((yyvsp[0].lex).loc, EEsProfile, 320, Num_AEP_gpu_shader5, AEP_gpu_shader5, "precise");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.noContraction = true;
}
-#line 6736 "MachineIndependent/glslang_tab.cpp"
+#line 6782 "MachineIndependent/glslang_tab.cpp"
break;
- case 153: /* type_qualifier: single_type_qualifier */
-#line 1364 "MachineIndependent/glslang.y"
+ case 155: /* type_qualifier: single_type_qualifier */
+#line 1382 "MachineIndependent/glslang.y"
{
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 6744 "MachineIndependent/glslang_tab.cpp"
+#line 6790 "MachineIndependent/glslang_tab.cpp"
break;
- case 154: /* type_qualifier: type_qualifier single_type_qualifier */
-#line 1367 "MachineIndependent/glslang.y"
+ case 156: /* type_qualifier: type_qualifier single_type_qualifier */
+#line 1385 "MachineIndependent/glslang.y"
{
(yyval.interm.type) = (yyvsp[-1].interm.type);
if ((yyval.interm.type).basicType == EbtVoid)
@@ -6753,151 +6799,151 @@ yyreduce:
(yyval.interm.type).shaderQualifiers.merge((yyvsp[0].interm.type).shaderQualifiers);
parseContext.mergeQualifiers((yyval.interm.type).loc, (yyval.interm.type).qualifier, (yyvsp[0].interm.type).qualifier, false);
}
-#line 6757 "MachineIndependent/glslang_tab.cpp"
+#line 6803 "MachineIndependent/glslang_tab.cpp"
break;
- case 155: /* single_type_qualifier: storage_qualifier */
-#line 1378 "MachineIndependent/glslang.y"
+ case 157: /* single_type_qualifier: storage_qualifier */
+#line 1396 "MachineIndependent/glslang.y"
{
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 6765 "MachineIndependent/glslang_tab.cpp"
+#line 6811 "MachineIndependent/glslang_tab.cpp"
break;
- case 156: /* single_type_qualifier: layout_qualifier */
-#line 1381 "MachineIndependent/glslang.y"
+ case 158: /* single_type_qualifier: layout_qualifier */
+#line 1399 "MachineIndependent/glslang.y"
{
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 6773 "MachineIndependent/glslang_tab.cpp"
+#line 6819 "MachineIndependent/glslang_tab.cpp"
break;
- case 157: /* single_type_qualifier: precision_qualifier */
-#line 1384 "MachineIndependent/glslang.y"
+ case 159: /* single_type_qualifier: precision_qualifier */
+#line 1402 "MachineIndependent/glslang.y"
{
parseContext.checkPrecisionQualifier((yyvsp[0].interm.type).loc, (yyvsp[0].interm.type).qualifier.precision);
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 6782 "MachineIndependent/glslang_tab.cpp"
+#line 6828 "MachineIndependent/glslang_tab.cpp"
break;
- case 158: /* single_type_qualifier: interpolation_qualifier */
-#line 1388 "MachineIndependent/glslang.y"
+ case 160: /* single_type_qualifier: interpolation_qualifier */
+#line 1406 "MachineIndependent/glslang.y"
{
// allow inheritance of storage qualifier from block declaration
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 6791 "MachineIndependent/glslang_tab.cpp"
+#line 6837 "MachineIndependent/glslang_tab.cpp"
break;
- case 159: /* single_type_qualifier: invariant_qualifier */
-#line 1392 "MachineIndependent/glslang.y"
+ case 161: /* single_type_qualifier: invariant_qualifier */
+#line 1410 "MachineIndependent/glslang.y"
{
// allow inheritance of storage qualifier from block declaration
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 6800 "MachineIndependent/glslang_tab.cpp"
+#line 6846 "MachineIndependent/glslang_tab.cpp"
break;
- case 160: /* single_type_qualifier: precise_qualifier */
-#line 1397 "MachineIndependent/glslang.y"
+ case 162: /* single_type_qualifier: precise_qualifier */
+#line 1415 "MachineIndependent/glslang.y"
{
// allow inheritance of storage qualifier from block declaration
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 6809 "MachineIndependent/glslang_tab.cpp"
+#line 6855 "MachineIndependent/glslang_tab.cpp"
break;
- case 161: /* single_type_qualifier: non_uniform_qualifier */
-#line 1401 "MachineIndependent/glslang.y"
+ case 163: /* single_type_qualifier: non_uniform_qualifier */
+#line 1419 "MachineIndependent/glslang.y"
{
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 6817 "MachineIndependent/glslang_tab.cpp"
+#line 6863 "MachineIndependent/glslang_tab.cpp"
break;
- case 162: /* single_type_qualifier: spirv_storage_class_qualifier */
-#line 1404 "MachineIndependent/glslang.y"
+ case 164: /* single_type_qualifier: spirv_storage_class_qualifier */
+#line 1422 "MachineIndependent/glslang.y"
{
parseContext.globalCheck((yyvsp[0].interm.type).loc, "spirv_storage_class");
parseContext.requireExtensions((yyvsp[0].interm.type).loc, 1, &E_GL_EXT_spirv_intrinsics, "SPIR-V storage class qualifier");
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 6827 "MachineIndependent/glslang_tab.cpp"
+#line 6873 "MachineIndependent/glslang_tab.cpp"
break;
- case 163: /* single_type_qualifier: spirv_decorate_qualifier */
-#line 1409 "MachineIndependent/glslang.y"
+ case 165: /* single_type_qualifier: spirv_decorate_qualifier */
+#line 1427 "MachineIndependent/glslang.y"
{
parseContext.requireExtensions((yyvsp[0].interm.type).loc, 1, &E_GL_EXT_spirv_intrinsics, "SPIR-V decorate qualifier");
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 6836 "MachineIndependent/glslang_tab.cpp"
+#line 6882 "MachineIndependent/glslang_tab.cpp"
break;
- case 164: /* single_type_qualifier: SPIRV_BY_REFERENCE */
-#line 1413 "MachineIndependent/glslang.y"
+ case 166: /* single_type_qualifier: SPIRV_BY_REFERENCE */
+#line 1431 "MachineIndependent/glslang.y"
{
parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_EXT_spirv_intrinsics, "spirv_by_reference");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.setSpirvByReference();
}
-#line 6846 "MachineIndependent/glslang_tab.cpp"
+#line 6892 "MachineIndependent/glslang_tab.cpp"
break;
- case 165: /* single_type_qualifier: SPIRV_LITERAL */
-#line 1418 "MachineIndependent/glslang.y"
+ case 167: /* single_type_qualifier: SPIRV_LITERAL */
+#line 1436 "MachineIndependent/glslang.y"
{
parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_EXT_spirv_intrinsics, "spirv_by_literal");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.setSpirvLiteral();
}
-#line 6856 "MachineIndependent/glslang_tab.cpp"
+#line 6902 "MachineIndependent/glslang_tab.cpp"
break;
- case 166: /* storage_qualifier: CONST */
-#line 1427 "MachineIndependent/glslang.y"
+ case 168: /* storage_qualifier: CONST */
+#line 1445 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqConst; // will later turn into EvqConstReadOnly, if the initializer is not constant
}
-#line 6865 "MachineIndependent/glslang_tab.cpp"
+#line 6911 "MachineIndependent/glslang_tab.cpp"
break;
- case 167: /* storage_qualifier: INOUT */
-#line 1431 "MachineIndependent/glslang.y"
+ case 169: /* storage_qualifier: INOUT */
+#line 1449 "MachineIndependent/glslang.y"
{
parseContext.globalCheck((yyvsp[0].lex).loc, "inout");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqInOut;
}
-#line 6875 "MachineIndependent/glslang_tab.cpp"
+#line 6921 "MachineIndependent/glslang_tab.cpp"
break;
- case 168: /* storage_qualifier: IN */
-#line 1436 "MachineIndependent/glslang.y"
+ case 170: /* storage_qualifier: IN */
+#line 1454 "MachineIndependent/glslang.y"
{
parseContext.globalCheck((yyvsp[0].lex).loc, "in");
(yyval.interm.type).init((yyvsp[0].lex).loc);
// whether this is a parameter "in" or a pipeline "in" will get sorted out a bit later
(yyval.interm.type).qualifier.storage = EvqIn;
}
-#line 6886 "MachineIndependent/glslang_tab.cpp"
+#line 6932 "MachineIndependent/glslang_tab.cpp"
break;
- case 169: /* storage_qualifier: OUT */
-#line 1442 "MachineIndependent/glslang.y"
+ case 171: /* storage_qualifier: OUT */
+#line 1460 "MachineIndependent/glslang.y"
{
parseContext.globalCheck((yyvsp[0].lex).loc, "out");
(yyval.interm.type).init((yyvsp[0].lex).loc);
// whether this is a parameter "out" or a pipeline "out" will get sorted out a bit later
(yyval.interm.type).qualifier.storage = EvqOut;
}
-#line 6897 "MachineIndependent/glslang_tab.cpp"
+#line 6943 "MachineIndependent/glslang_tab.cpp"
break;
- case 170: /* storage_qualifier: CENTROID */
-#line 1448 "MachineIndependent/glslang.y"
+ case 172: /* storage_qualifier: CENTROID */
+#line 1466 "MachineIndependent/glslang.y"
{
parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 120, 0, "centroid");
parseContext.profileRequires((yyvsp[0].lex).loc, EEsProfile, 300, 0, "centroid");
@@ -6905,44 +6951,44 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.centroid = true;
}
-#line 6909 "MachineIndependent/glslang_tab.cpp"
+#line 6955 "MachineIndependent/glslang_tab.cpp"
break;
- case 171: /* storage_qualifier: UNIFORM */
-#line 1455 "MachineIndependent/glslang.y"
+ case 173: /* storage_qualifier: UNIFORM */
+#line 1473 "MachineIndependent/glslang.y"
{
parseContext.globalCheck((yyvsp[0].lex).loc, "uniform");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqUniform;
}
-#line 6919 "MachineIndependent/glslang_tab.cpp"
+#line 6965 "MachineIndependent/glslang_tab.cpp"
break;
- case 172: /* storage_qualifier: SHARED */
-#line 1460 "MachineIndependent/glslang.y"
+ case 174: /* storage_qualifier: SHARED */
+#line 1478 "MachineIndependent/glslang.y"
{
parseContext.globalCheck((yyvsp[0].lex).loc, "shared");
parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, 430, E_GL_ARB_compute_shader, "shared");
parseContext.profileRequires((yyvsp[0].lex).loc, EEsProfile, 310, 0, "shared");
- parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangComputeMask | EShLangMeshNVMask | EShLangTaskNVMask), "shared");
+ parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangComputeMask | EShLangMeshMask | EShLangTaskMask), "shared");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqShared;
}
-#line 6932 "MachineIndependent/glslang_tab.cpp"
+#line 6978 "MachineIndependent/glslang_tab.cpp"
break;
- case 173: /* storage_qualifier: BUFFER */
-#line 1468 "MachineIndependent/glslang.y"
+ case 175: /* storage_qualifier: BUFFER */
+#line 1486 "MachineIndependent/glslang.y"
{
parseContext.globalCheck((yyvsp[0].lex).loc, "buffer");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqBuffer;
}
-#line 6942 "MachineIndependent/glslang_tab.cpp"
+#line 6988 "MachineIndependent/glslang_tab.cpp"
break;
- case 174: /* storage_qualifier: ATTRIBUTE */
-#line 1474 "MachineIndependent/glslang.y"
+ case 176: /* storage_qualifier: ATTRIBUTE */
+#line 1492 "MachineIndependent/glslang.y"
{
parseContext.requireStage((yyvsp[0].lex).loc, EShLangVertex, "attribute");
parseContext.checkDeprecated((yyvsp[0].lex).loc, ECoreProfile, 130, "attribute");
@@ -6955,11 +7001,11 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqVaryingIn;
}
-#line 6959 "MachineIndependent/glslang_tab.cpp"
+#line 7005 "MachineIndependent/glslang_tab.cpp"
break;
- case 175: /* storage_qualifier: VARYING */
-#line 1486 "MachineIndependent/glslang.y"
+ case 177: /* storage_qualifier: VARYING */
+#line 1504 "MachineIndependent/glslang.y"
{
parseContext.checkDeprecated((yyvsp[0].lex).loc, ENoProfile, 130, "varying");
parseContext.checkDeprecated((yyvsp[0].lex).loc, ECoreProfile, 130, "varying");
@@ -6974,32 +7020,32 @@ yyreduce:
else
(yyval.interm.type).qualifier.storage = EvqVaryingIn;
}
-#line 6978 "MachineIndependent/glslang_tab.cpp"
+#line 7024 "MachineIndependent/glslang_tab.cpp"
break;
- case 176: /* storage_qualifier: PATCH */
-#line 1500 "MachineIndependent/glslang.y"
+ case 178: /* storage_qualifier: PATCH */
+#line 1518 "MachineIndependent/glslang.y"
{
parseContext.globalCheck((yyvsp[0].lex).loc, "patch");
parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangTessControlMask | EShLangTessEvaluationMask), "patch");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.patch = true;
}
-#line 6989 "MachineIndependent/glslang_tab.cpp"
+#line 7035 "MachineIndependent/glslang_tab.cpp"
break;
- case 177: /* storage_qualifier: SAMPLE */
-#line 1506 "MachineIndependent/glslang.y"
+ case 179: /* storage_qualifier: SAMPLE */
+#line 1524 "MachineIndependent/glslang.y"
{
parseContext.globalCheck((yyvsp[0].lex).loc, "sample");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.sample = true;
}
-#line 6999 "MachineIndependent/glslang_tab.cpp"
+#line 7045 "MachineIndependent/glslang_tab.cpp"
break;
- case 178: /* storage_qualifier: HITATTRNV */
-#line 1511 "MachineIndependent/glslang.y"
+ case 180: /* storage_qualifier: HITATTRNV */
+#line 1529 "MachineIndependent/glslang.y"
{
parseContext.globalCheck((yyvsp[0].lex).loc, "hitAttributeNV");
parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangIntersectMask | EShLangClosestHitMask
@@ -7008,11 +7054,11 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqHitAttr;
}
-#line 7012 "MachineIndependent/glslang_tab.cpp"
+#line 7058 "MachineIndependent/glslang_tab.cpp"
break;
- case 179: /* storage_qualifier: HITATTREXT */
-#line 1519 "MachineIndependent/glslang.y"
+ case 181: /* storage_qualifier: HITATTREXT */
+#line 1537 "MachineIndependent/glslang.y"
{
parseContext.globalCheck((yyvsp[0].lex).loc, "hitAttributeEXT");
parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangIntersectMask | EShLangClosestHitMask
@@ -7021,11 +7067,11 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqHitAttr;
}
-#line 7025 "MachineIndependent/glslang_tab.cpp"
+#line 7071 "MachineIndependent/glslang_tab.cpp"
break;
- case 180: /* storage_qualifier: PAYLOADNV */
-#line 1527 "MachineIndependent/glslang.y"
+ case 182: /* storage_qualifier: PAYLOADNV */
+#line 1545 "MachineIndependent/glslang.y"
{
parseContext.globalCheck((yyvsp[0].lex).loc, "rayPayloadNV");
parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangRayGenMask | EShLangClosestHitMask |
@@ -7034,11 +7080,11 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqPayload;
}
-#line 7038 "MachineIndependent/glslang_tab.cpp"
+#line 7084 "MachineIndependent/glslang_tab.cpp"
break;
- case 181: /* storage_qualifier: PAYLOADEXT */
-#line 1535 "MachineIndependent/glslang.y"
+ case 183: /* storage_qualifier: PAYLOADEXT */
+#line 1553 "MachineIndependent/glslang.y"
{
parseContext.globalCheck((yyvsp[0].lex).loc, "rayPayloadEXT");
parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangRayGenMask | EShLangClosestHitMask |
@@ -7047,11 +7093,11 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqPayload;
}
-#line 7051 "MachineIndependent/glslang_tab.cpp"
+#line 7097 "MachineIndependent/glslang_tab.cpp"
break;
- case 182: /* storage_qualifier: PAYLOADINNV */
-#line 1543 "MachineIndependent/glslang.y"
+ case 184: /* storage_qualifier: PAYLOADINNV */
+#line 1561 "MachineIndependent/glslang.y"
{
parseContext.globalCheck((yyvsp[0].lex).loc, "rayPayloadInNV");
parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangClosestHitMask |
@@ -7060,11 +7106,11 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqPayloadIn;
}
-#line 7064 "MachineIndependent/glslang_tab.cpp"
+#line 7110 "MachineIndependent/glslang_tab.cpp"
break;
- case 183: /* storage_qualifier: PAYLOADINEXT */
-#line 1551 "MachineIndependent/glslang.y"
+ case 185: /* storage_qualifier: PAYLOADINEXT */
+#line 1569 "MachineIndependent/glslang.y"
{
parseContext.globalCheck((yyvsp[0].lex).loc, "rayPayloadInEXT");
parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangClosestHitMask |
@@ -7073,11 +7119,11 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqPayloadIn;
}
-#line 7077 "MachineIndependent/glslang_tab.cpp"
+#line 7123 "MachineIndependent/glslang_tab.cpp"
break;
- case 184: /* storage_qualifier: CALLDATANV */
-#line 1559 "MachineIndependent/glslang.y"
+ case 186: /* storage_qualifier: CALLDATANV */
+#line 1577 "MachineIndependent/glslang.y"
{
parseContext.globalCheck((yyvsp[0].lex).loc, "callableDataNV");
parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangRayGenMask |
@@ -7086,11 +7132,11 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqCallableData;
}
-#line 7090 "MachineIndependent/glslang_tab.cpp"
+#line 7136 "MachineIndependent/glslang_tab.cpp"
break;
- case 185: /* storage_qualifier: CALLDATAEXT */
-#line 1567 "MachineIndependent/glslang.y"
+ case 187: /* storage_qualifier: CALLDATAEXT */
+#line 1585 "MachineIndependent/glslang.y"
{
parseContext.globalCheck((yyvsp[0].lex).loc, "callableDataEXT");
parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangRayGenMask |
@@ -7099,11 +7145,11 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqCallableData;
}
-#line 7103 "MachineIndependent/glslang_tab.cpp"
+#line 7149 "MachineIndependent/glslang_tab.cpp"
break;
- case 186: /* storage_qualifier: CALLDATAINNV */
-#line 1575 "MachineIndependent/glslang.y"
+ case 188: /* storage_qualifier: CALLDATAINNV */
+#line 1593 "MachineIndependent/glslang.y"
{
parseContext.globalCheck((yyvsp[0].lex).loc, "callableDataInNV");
parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangCallableMask), "callableDataInNV");
@@ -7111,11 +7157,11 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqCallableDataIn;
}
-#line 7115 "MachineIndependent/glslang_tab.cpp"
+#line 7161 "MachineIndependent/glslang_tab.cpp"
break;
- case 187: /* storage_qualifier: CALLDATAINEXT */
-#line 1582 "MachineIndependent/glslang.y"
+ case 189: /* storage_qualifier: CALLDATAINEXT */
+#line 1600 "MachineIndependent/glslang.y"
{
parseContext.globalCheck((yyvsp[0].lex).loc, "callableDataInEXT");
parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangCallableMask), "callableDataInEXT");
@@ -7123,175 +7169,187 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqCallableDataIn;
}
-#line 7127 "MachineIndependent/glslang_tab.cpp"
+#line 7173 "MachineIndependent/glslang_tab.cpp"
break;
- case 188: /* storage_qualifier: COHERENT */
-#line 1589 "MachineIndependent/glslang.y"
+ case 190: /* storage_qualifier: COHERENT */
+#line 1607 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.coherent = true;
}
-#line 7136 "MachineIndependent/glslang_tab.cpp"
+#line 7182 "MachineIndependent/glslang_tab.cpp"
break;
- case 189: /* storage_qualifier: DEVICECOHERENT */
-#line 1593 "MachineIndependent/glslang.y"
+ case 191: /* storage_qualifier: DEVICECOHERENT */
+#line 1611 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_KHR_memory_scope_semantics, "devicecoherent");
(yyval.interm.type).qualifier.devicecoherent = true;
}
-#line 7146 "MachineIndependent/glslang_tab.cpp"
+#line 7192 "MachineIndependent/glslang_tab.cpp"
break;
- case 190: /* storage_qualifier: QUEUEFAMILYCOHERENT */
-#line 1598 "MachineIndependent/glslang.y"
+ case 192: /* storage_qualifier: QUEUEFAMILYCOHERENT */
+#line 1616 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_KHR_memory_scope_semantics, "queuefamilycoherent");
(yyval.interm.type).qualifier.queuefamilycoherent = true;
}
-#line 7156 "MachineIndependent/glslang_tab.cpp"
+#line 7202 "MachineIndependent/glslang_tab.cpp"
break;
- case 191: /* storage_qualifier: WORKGROUPCOHERENT */
-#line 1603 "MachineIndependent/glslang.y"
+ case 193: /* storage_qualifier: WORKGROUPCOHERENT */
+#line 1621 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_KHR_memory_scope_semantics, "workgroupcoherent");
(yyval.interm.type).qualifier.workgroupcoherent = true;
}
-#line 7166 "MachineIndependent/glslang_tab.cpp"
+#line 7212 "MachineIndependent/glslang_tab.cpp"
break;
- case 192: /* storage_qualifier: SUBGROUPCOHERENT */
-#line 1608 "MachineIndependent/glslang.y"
+ case 194: /* storage_qualifier: SUBGROUPCOHERENT */
+#line 1626 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_KHR_memory_scope_semantics, "subgroupcoherent");
(yyval.interm.type).qualifier.subgroupcoherent = true;
}
-#line 7176 "MachineIndependent/glslang_tab.cpp"
+#line 7222 "MachineIndependent/glslang_tab.cpp"
break;
- case 193: /* storage_qualifier: NONPRIVATE */
-#line 1613 "MachineIndependent/glslang.y"
+ case 195: /* storage_qualifier: NONPRIVATE */
+#line 1631 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_KHR_memory_scope_semantics, "nonprivate");
(yyval.interm.type).qualifier.nonprivate = true;
}
-#line 7186 "MachineIndependent/glslang_tab.cpp"
+#line 7232 "MachineIndependent/glslang_tab.cpp"
break;
- case 194: /* storage_qualifier: SHADERCALLCOHERENT */
-#line 1618 "MachineIndependent/glslang.y"
+ case 196: /* storage_qualifier: SHADERCALLCOHERENT */
+#line 1636 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_EXT_ray_tracing, "shadercallcoherent");
(yyval.interm.type).qualifier.shadercallcoherent = true;
}
-#line 7196 "MachineIndependent/glslang_tab.cpp"
+#line 7242 "MachineIndependent/glslang_tab.cpp"
break;
- case 195: /* storage_qualifier: VOLATILE */
-#line 1623 "MachineIndependent/glslang.y"
+ case 197: /* storage_qualifier: VOLATILE */
+#line 1641 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.volatil = true;
}
-#line 7205 "MachineIndependent/glslang_tab.cpp"
+#line 7251 "MachineIndependent/glslang_tab.cpp"
break;
- case 196: /* storage_qualifier: RESTRICT */
-#line 1627 "MachineIndependent/glslang.y"
+ case 198: /* storage_qualifier: RESTRICT */
+#line 1645 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.restrict = true;
}
-#line 7214 "MachineIndependent/glslang_tab.cpp"
+#line 7260 "MachineIndependent/glslang_tab.cpp"
break;
- case 197: /* storage_qualifier: READONLY */
-#line 1631 "MachineIndependent/glslang.y"
+ case 199: /* storage_qualifier: READONLY */
+#line 1649 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.readonly = true;
}
-#line 7223 "MachineIndependent/glslang_tab.cpp"
+#line 7269 "MachineIndependent/glslang_tab.cpp"
break;
- case 198: /* storage_qualifier: WRITEONLY */
-#line 1635 "MachineIndependent/glslang.y"
+ case 200: /* storage_qualifier: WRITEONLY */
+#line 1653 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.writeonly = true;
}
-#line 7232 "MachineIndependent/glslang_tab.cpp"
+#line 7278 "MachineIndependent/glslang_tab.cpp"
break;
- case 199: /* storage_qualifier: SUBROUTINE */
-#line 1639 "MachineIndependent/glslang.y"
+ case 201: /* storage_qualifier: SUBROUTINE */
+#line 1657 "MachineIndependent/glslang.y"
{
parseContext.spvRemoved((yyvsp[0].lex).loc, "subroutine");
parseContext.globalCheck((yyvsp[0].lex).loc, "subroutine");
parseContext.unimplemented((yyvsp[0].lex).loc, "subroutine");
(yyval.interm.type).init((yyvsp[0].lex).loc);
}
-#line 7243 "MachineIndependent/glslang_tab.cpp"
+#line 7289 "MachineIndependent/glslang_tab.cpp"
break;
- case 200: /* storage_qualifier: SUBROUTINE LEFT_PAREN type_name_list RIGHT_PAREN */
-#line 1645 "MachineIndependent/glslang.y"
+ case 202: /* storage_qualifier: SUBROUTINE LEFT_PAREN type_name_list RIGHT_PAREN */
+#line 1663 "MachineIndependent/glslang.y"
{
parseContext.spvRemoved((yyvsp[-3].lex).loc, "subroutine");
parseContext.globalCheck((yyvsp[-3].lex).loc, "subroutine");
parseContext.unimplemented((yyvsp[-3].lex).loc, "subroutine");
(yyval.interm.type).init((yyvsp[-3].lex).loc);
}
-#line 7254 "MachineIndependent/glslang_tab.cpp"
+#line 7300 "MachineIndependent/glslang_tab.cpp"
break;
- case 201: /* non_uniform_qualifier: NONUNIFORM */
-#line 1656 "MachineIndependent/glslang.y"
+ case 203: /* storage_qualifier: TASKPAYLOADWORKGROUPEXT */
+#line 1669 "MachineIndependent/glslang.y"
+ {
+ // No need for profile version or extension check. Shader stage already checks both.
+ parseContext.globalCheck((yyvsp[0].lex).loc, "taskPayloadSharedEXT");
+ parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangTaskMask | EShLangMeshMask), "taskPayloadSharedEXT ");
+ (yyval.interm.type).init((yyvsp[0].lex).loc);
+ (yyval.interm.type).qualifier.storage = EvqtaskPayloadSharedEXT;
+ }
+#line 7312 "MachineIndependent/glslang_tab.cpp"
+ break;
+
+ case 204: /* non_uniform_qualifier: NONUNIFORM */
+#line 1681 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.nonUniform = true;
}
-#line 7263 "MachineIndependent/glslang_tab.cpp"
+#line 7321 "MachineIndependent/glslang_tab.cpp"
break;
- case 202: /* type_name_list: IDENTIFIER */
-#line 1663 "MachineIndependent/glslang.y"
+ case 205: /* type_name_list: IDENTIFIER */
+#line 1688 "MachineIndependent/glslang.y"
{
// TODO
}
-#line 7271 "MachineIndependent/glslang_tab.cpp"
+#line 7329 "MachineIndependent/glslang_tab.cpp"
break;
- case 203: /* type_name_list: type_name_list COMMA IDENTIFIER */
-#line 1666 "MachineIndependent/glslang.y"
+ case 206: /* type_name_list: type_name_list COMMA IDENTIFIER */
+#line 1691 "MachineIndependent/glslang.y"
{
// TODO: 4.0 semantics: subroutines
// 1) make sure each identifier is a type declared earlier with SUBROUTINE
// 2) save all of the identifiers for future comparison with the declared function
}
-#line 7281 "MachineIndependent/glslang_tab.cpp"
+#line 7339 "MachineIndependent/glslang_tab.cpp"
break;
- case 204: /* type_specifier: type_specifier_nonarray type_parameter_specifier_opt */
-#line 1675 "MachineIndependent/glslang.y"
+ case 207: /* type_specifier: type_specifier_nonarray type_parameter_specifier_opt */
+#line 1700 "MachineIndependent/glslang.y"
{
(yyval.interm.type) = (yyvsp[-1].interm.type);
(yyval.interm.type).qualifier.precision = parseContext.getDefaultPrecision((yyval.interm.type));
(yyval.interm.type).typeParameters = (yyvsp[0].interm.typeParameters);
}
-#line 7291 "MachineIndependent/glslang_tab.cpp"
+#line 7349 "MachineIndependent/glslang_tab.cpp"
break;
- case 205: /* type_specifier: type_specifier_nonarray type_parameter_specifier_opt array_specifier */
-#line 1680 "MachineIndependent/glslang.y"
+ case 208: /* type_specifier: type_specifier_nonarray type_parameter_specifier_opt array_specifier */
+#line 1705 "MachineIndependent/glslang.y"
{
parseContext.arrayOfArrayVersionCheck((yyvsp[0].interm).loc, (yyvsp[0].interm).arraySizes);
(yyval.interm.type) = (yyvsp[-2].interm.type);
@@ -7299,21 +7357,21 @@ yyreduce:
(yyval.interm.type).typeParameters = (yyvsp[-1].interm.typeParameters);
(yyval.interm.type).arraySizes = (yyvsp[0].interm).arraySizes;
}
-#line 7303 "MachineIndependent/glslang_tab.cpp"
+#line 7361 "MachineIndependent/glslang_tab.cpp"
break;
- case 206: /* array_specifier: LEFT_BRACKET RIGHT_BRACKET */
-#line 1690 "MachineIndependent/glslang.y"
+ case 209: /* array_specifier: LEFT_BRACKET RIGHT_BRACKET */
+#line 1715 "MachineIndependent/glslang.y"
{
(yyval.interm).loc = (yyvsp[-1].lex).loc;
(yyval.interm).arraySizes = new TArraySizes;
(yyval.interm).arraySizes->addInnerSize();
}
-#line 7313 "MachineIndependent/glslang_tab.cpp"
+#line 7371 "MachineIndependent/glslang_tab.cpp"
break;
- case 207: /* array_specifier: LEFT_BRACKET conditional_expression RIGHT_BRACKET */
-#line 1695 "MachineIndependent/glslang.y"
+ case 210: /* array_specifier: LEFT_BRACKET conditional_expression RIGHT_BRACKET */
+#line 1720 "MachineIndependent/glslang.y"
{
(yyval.interm).loc = (yyvsp[-2].lex).loc;
(yyval.interm).arraySizes = new TArraySizes;
@@ -7322,20 +7380,20 @@ yyreduce:
parseContext.arraySizeCheck((yyvsp[-1].interm.intermTypedNode)->getLoc(), (yyvsp[-1].interm.intermTypedNode), size, "array size");
(yyval.interm).arraySizes->addInnerSize(size);
}
-#line 7326 "MachineIndependent/glslang_tab.cpp"
+#line 7384 "MachineIndependent/glslang_tab.cpp"
break;
- case 208: /* array_specifier: array_specifier LEFT_BRACKET RIGHT_BRACKET */
-#line 1703 "MachineIndependent/glslang.y"
+ case 211: /* array_specifier: array_specifier LEFT_BRACKET RIGHT_BRACKET */
+#line 1728 "MachineIndependent/glslang.y"
{
(yyval.interm) = (yyvsp[-2].interm);
(yyval.interm).arraySizes->addInnerSize();
}
-#line 7335 "MachineIndependent/glslang_tab.cpp"
+#line 7393 "MachineIndependent/glslang_tab.cpp"
break;
- case 209: /* array_specifier: array_specifier LEFT_BRACKET conditional_expression RIGHT_BRACKET */
-#line 1707 "MachineIndependent/glslang.y"
+ case 212: /* array_specifier: array_specifier LEFT_BRACKET conditional_expression RIGHT_BRACKET */
+#line 1732 "MachineIndependent/glslang.y"
{
(yyval.interm) = (yyvsp[-3].interm);
@@ -7343,35 +7401,35 @@ yyreduce:
parseContext.arraySizeCheck((yyvsp[-1].interm.intermTypedNode)->getLoc(), (yyvsp[-1].interm.intermTypedNode), size, "array size");
(yyval.interm).arraySizes->addInnerSize(size);
}
-#line 7347 "MachineIndependent/glslang_tab.cpp"
+#line 7405 "MachineIndependent/glslang_tab.cpp"
break;
- case 210: /* type_parameter_specifier_opt: type_parameter_specifier */
-#line 1717 "MachineIndependent/glslang.y"
+ case 213: /* type_parameter_specifier_opt: type_parameter_specifier */
+#line 1742 "MachineIndependent/glslang.y"
{
(yyval.interm.typeParameters) = (yyvsp[0].interm.typeParameters);
}
-#line 7355 "MachineIndependent/glslang_tab.cpp"
+#line 7413 "MachineIndependent/glslang_tab.cpp"
break;
- case 211: /* type_parameter_specifier_opt: %empty */
-#line 1720 "MachineIndependent/glslang.y"
+ case 214: /* type_parameter_specifier_opt: %empty */
+#line 1745 "MachineIndependent/glslang.y"
{
(yyval.interm.typeParameters) = 0;
}
-#line 7363 "MachineIndependent/glslang_tab.cpp"
+#line 7421 "MachineIndependent/glslang_tab.cpp"
break;
- case 212: /* type_parameter_specifier: LEFT_ANGLE type_parameter_specifier_list RIGHT_ANGLE */
-#line 1726 "MachineIndependent/glslang.y"
+ case 215: /* type_parameter_specifier: LEFT_ANGLE type_parameter_specifier_list RIGHT_ANGLE */
+#line 1751 "MachineIndependent/glslang.y"
{
(yyval.interm.typeParameters) = (yyvsp[-1].interm.typeParameters);
}
-#line 7371 "MachineIndependent/glslang_tab.cpp"
+#line 7429 "MachineIndependent/glslang_tab.cpp"
break;
- case 213: /* type_parameter_specifier_list: unary_expression */
-#line 1732 "MachineIndependent/glslang.y"
+ case 216: /* type_parameter_specifier_list: unary_expression */
+#line 1757 "MachineIndependent/glslang.y"
{
(yyval.interm.typeParameters) = new TArraySizes;
@@ -7379,11 +7437,11 @@ yyreduce:
parseContext.arraySizeCheck((yyvsp[0].interm.intermTypedNode)->getLoc(), (yyvsp[0].interm.intermTypedNode), size, "type parameter");
(yyval.interm.typeParameters)->addInnerSize(size);
}
-#line 7383 "MachineIndependent/glslang_tab.cpp"
+#line 7441 "MachineIndependent/glslang_tab.cpp"
break;
- case 214: /* type_parameter_specifier_list: type_parameter_specifier_list COMMA unary_expression */
-#line 1739 "MachineIndependent/glslang.y"
+ case 217: /* type_parameter_specifier_list: type_parameter_specifier_list COMMA unary_expression */
+#line 1764 "MachineIndependent/glslang.y"
{
(yyval.interm.typeParameters) = (yyvsp[-2].interm.typeParameters);
@@ -7391,300 +7449,300 @@ yyreduce:
parseContext.arraySizeCheck((yyvsp[0].interm.intermTypedNode)->getLoc(), (yyvsp[0].interm.intermTypedNode), size, "type parameter");
(yyval.interm.typeParameters)->addInnerSize(size);
}
-#line 7395 "MachineIndependent/glslang_tab.cpp"
+#line 7453 "MachineIndependent/glslang_tab.cpp"
break;
- case 215: /* type_specifier_nonarray: VOID */
-#line 1749 "MachineIndependent/glslang.y"
+ case 218: /* type_specifier_nonarray: VOID */
+#line 1774 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtVoid;
}
-#line 7404 "MachineIndependent/glslang_tab.cpp"
+#line 7462 "MachineIndependent/glslang_tab.cpp"
break;
- case 216: /* type_specifier_nonarray: FLOAT */
-#line 1753 "MachineIndependent/glslang.y"
+ case 219: /* type_specifier_nonarray: FLOAT */
+#line 1778 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
}
-#line 7413 "MachineIndependent/glslang_tab.cpp"
+#line 7471 "MachineIndependent/glslang_tab.cpp"
break;
- case 217: /* type_specifier_nonarray: INT */
-#line 1757 "MachineIndependent/glslang.y"
+ case 220: /* type_specifier_nonarray: INT */
+#line 1782 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt;
}
-#line 7422 "MachineIndependent/glslang_tab.cpp"
+#line 7480 "MachineIndependent/glslang_tab.cpp"
break;
- case 218: /* type_specifier_nonarray: UINT */
-#line 1761 "MachineIndependent/glslang.y"
+ case 221: /* type_specifier_nonarray: UINT */
+#line 1786 "MachineIndependent/glslang.y"
{
parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned integer");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint;
}
-#line 7432 "MachineIndependent/glslang_tab.cpp"
+#line 7490 "MachineIndependent/glslang_tab.cpp"
break;
- case 219: /* type_specifier_nonarray: BOOL */
-#line 1766 "MachineIndependent/glslang.y"
+ case 222: /* type_specifier_nonarray: BOOL */
+#line 1791 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtBool;
}
-#line 7441 "MachineIndependent/glslang_tab.cpp"
+#line 7499 "MachineIndependent/glslang_tab.cpp"
break;
- case 220: /* type_specifier_nonarray: VEC2 */
-#line 1770 "MachineIndependent/glslang.y"
+ case 223: /* type_specifier_nonarray: VEC2 */
+#line 1795 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setVector(2);
}
-#line 7451 "MachineIndependent/glslang_tab.cpp"
+#line 7509 "MachineIndependent/glslang_tab.cpp"
break;
- case 221: /* type_specifier_nonarray: VEC3 */
-#line 1775 "MachineIndependent/glslang.y"
+ case 224: /* type_specifier_nonarray: VEC3 */
+#line 1800 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setVector(3);
}
-#line 7461 "MachineIndependent/glslang_tab.cpp"
+#line 7519 "MachineIndependent/glslang_tab.cpp"
break;
- case 222: /* type_specifier_nonarray: VEC4 */
-#line 1780 "MachineIndependent/glslang.y"
+ case 225: /* type_specifier_nonarray: VEC4 */
+#line 1805 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setVector(4);
}
-#line 7471 "MachineIndependent/glslang_tab.cpp"
+#line 7529 "MachineIndependent/glslang_tab.cpp"
break;
- case 223: /* type_specifier_nonarray: BVEC2 */
-#line 1785 "MachineIndependent/glslang.y"
+ case 226: /* type_specifier_nonarray: BVEC2 */
+#line 1810 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtBool;
(yyval.interm.type).setVector(2);
}
-#line 7481 "MachineIndependent/glslang_tab.cpp"
+#line 7539 "MachineIndependent/glslang_tab.cpp"
break;
- case 224: /* type_specifier_nonarray: BVEC3 */
-#line 1790 "MachineIndependent/glslang.y"
+ case 227: /* type_specifier_nonarray: BVEC3 */
+#line 1815 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtBool;
(yyval.interm.type).setVector(3);
}
-#line 7491 "MachineIndependent/glslang_tab.cpp"
+#line 7549 "MachineIndependent/glslang_tab.cpp"
break;
- case 225: /* type_specifier_nonarray: BVEC4 */
-#line 1795 "MachineIndependent/glslang.y"
+ case 228: /* type_specifier_nonarray: BVEC4 */
+#line 1820 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtBool;
(yyval.interm.type).setVector(4);
}
-#line 7501 "MachineIndependent/glslang_tab.cpp"
+#line 7559 "MachineIndependent/glslang_tab.cpp"
break;
- case 226: /* type_specifier_nonarray: IVEC2 */
-#line 1800 "MachineIndependent/glslang.y"
+ case 229: /* type_specifier_nonarray: IVEC2 */
+#line 1825 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt;
(yyval.interm.type).setVector(2);
}
-#line 7511 "MachineIndependent/glslang_tab.cpp"
+#line 7569 "MachineIndependent/glslang_tab.cpp"
break;
- case 227: /* type_specifier_nonarray: IVEC3 */
-#line 1805 "MachineIndependent/glslang.y"
+ case 230: /* type_specifier_nonarray: IVEC3 */
+#line 1830 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt;
(yyval.interm.type).setVector(3);
}
-#line 7521 "MachineIndependent/glslang_tab.cpp"
+#line 7579 "MachineIndependent/glslang_tab.cpp"
break;
- case 228: /* type_specifier_nonarray: IVEC4 */
-#line 1810 "MachineIndependent/glslang.y"
+ case 231: /* type_specifier_nonarray: IVEC4 */
+#line 1835 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt;
(yyval.interm.type).setVector(4);
}
-#line 7531 "MachineIndependent/glslang_tab.cpp"
+#line 7589 "MachineIndependent/glslang_tab.cpp"
break;
- case 229: /* type_specifier_nonarray: UVEC2 */
-#line 1815 "MachineIndependent/glslang.y"
+ case 232: /* type_specifier_nonarray: UVEC2 */
+#line 1840 "MachineIndependent/glslang.y"
{
parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned integer vector");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint;
(yyval.interm.type).setVector(2);
}
-#line 7542 "MachineIndependent/glslang_tab.cpp"
+#line 7600 "MachineIndependent/glslang_tab.cpp"
break;
- case 230: /* type_specifier_nonarray: UVEC3 */
-#line 1821 "MachineIndependent/glslang.y"
+ case 233: /* type_specifier_nonarray: UVEC3 */
+#line 1846 "MachineIndependent/glslang.y"
{
parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned integer vector");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint;
(yyval.interm.type).setVector(3);
}
-#line 7553 "MachineIndependent/glslang_tab.cpp"
+#line 7611 "MachineIndependent/glslang_tab.cpp"
break;
- case 231: /* type_specifier_nonarray: UVEC4 */
-#line 1827 "MachineIndependent/glslang.y"
+ case 234: /* type_specifier_nonarray: UVEC4 */
+#line 1852 "MachineIndependent/glslang.y"
{
parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned integer vector");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint;
(yyval.interm.type).setVector(4);
}
-#line 7564 "MachineIndependent/glslang_tab.cpp"
+#line 7622 "MachineIndependent/glslang_tab.cpp"
break;
- case 232: /* type_specifier_nonarray: MAT2 */
-#line 1833 "MachineIndependent/glslang.y"
+ case 235: /* type_specifier_nonarray: MAT2 */
+#line 1858 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(2, 2);
}
-#line 7574 "MachineIndependent/glslang_tab.cpp"
+#line 7632 "MachineIndependent/glslang_tab.cpp"
break;
- case 233: /* type_specifier_nonarray: MAT3 */
-#line 1838 "MachineIndependent/glslang.y"
+ case 236: /* type_specifier_nonarray: MAT3 */
+#line 1863 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(3, 3);
}
-#line 7584 "MachineIndependent/glslang_tab.cpp"
+#line 7642 "MachineIndependent/glslang_tab.cpp"
break;
- case 234: /* type_specifier_nonarray: MAT4 */
-#line 1843 "MachineIndependent/glslang.y"
+ case 237: /* type_specifier_nonarray: MAT4 */
+#line 1868 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(4, 4);
}
-#line 7594 "MachineIndependent/glslang_tab.cpp"
+#line 7652 "MachineIndependent/glslang_tab.cpp"
break;
- case 235: /* type_specifier_nonarray: MAT2X2 */
-#line 1848 "MachineIndependent/glslang.y"
+ case 238: /* type_specifier_nonarray: MAT2X2 */
+#line 1873 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(2, 2);
}
-#line 7604 "MachineIndependent/glslang_tab.cpp"
+#line 7662 "MachineIndependent/glslang_tab.cpp"
break;
- case 236: /* type_specifier_nonarray: MAT2X3 */
-#line 1853 "MachineIndependent/glslang.y"
+ case 239: /* type_specifier_nonarray: MAT2X3 */
+#line 1878 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(2, 3);
}
-#line 7614 "MachineIndependent/glslang_tab.cpp"
+#line 7672 "MachineIndependent/glslang_tab.cpp"
break;
- case 237: /* type_specifier_nonarray: MAT2X4 */
-#line 1858 "MachineIndependent/glslang.y"
+ case 240: /* type_specifier_nonarray: MAT2X4 */
+#line 1883 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(2, 4);
}
-#line 7624 "MachineIndependent/glslang_tab.cpp"
+#line 7682 "MachineIndependent/glslang_tab.cpp"
break;
- case 238: /* type_specifier_nonarray: MAT3X2 */
-#line 1863 "MachineIndependent/glslang.y"
+ case 241: /* type_specifier_nonarray: MAT3X2 */
+#line 1888 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(3, 2);
}
-#line 7634 "MachineIndependent/glslang_tab.cpp"
+#line 7692 "MachineIndependent/glslang_tab.cpp"
break;
- case 239: /* type_specifier_nonarray: MAT3X3 */
-#line 1868 "MachineIndependent/glslang.y"
+ case 242: /* type_specifier_nonarray: MAT3X3 */
+#line 1893 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(3, 3);
}
-#line 7644 "MachineIndependent/glslang_tab.cpp"
+#line 7702 "MachineIndependent/glslang_tab.cpp"
break;
- case 240: /* type_specifier_nonarray: MAT3X4 */
-#line 1873 "MachineIndependent/glslang.y"
+ case 243: /* type_specifier_nonarray: MAT3X4 */
+#line 1898 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(3, 4);
}
-#line 7654 "MachineIndependent/glslang_tab.cpp"
+#line 7712 "MachineIndependent/glslang_tab.cpp"
break;
- case 241: /* type_specifier_nonarray: MAT4X2 */
-#line 1878 "MachineIndependent/glslang.y"
+ case 244: /* type_specifier_nonarray: MAT4X2 */
+#line 1903 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(4, 2);
}
-#line 7664 "MachineIndependent/glslang_tab.cpp"
+#line 7722 "MachineIndependent/glslang_tab.cpp"
break;
- case 242: /* type_specifier_nonarray: MAT4X3 */
-#line 1883 "MachineIndependent/glslang.y"
+ case 245: /* type_specifier_nonarray: MAT4X3 */
+#line 1908 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(4, 3);
}
-#line 7674 "MachineIndependent/glslang_tab.cpp"
+#line 7732 "MachineIndependent/glslang_tab.cpp"
break;
- case 243: /* type_specifier_nonarray: MAT4X4 */
-#line 1888 "MachineIndependent/glslang.y"
+ case 246: /* type_specifier_nonarray: MAT4X4 */
+#line 1913 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(4, 4);
}
-#line 7684 "MachineIndependent/glslang_tab.cpp"
+#line 7742 "MachineIndependent/glslang_tab.cpp"
break;
- case 244: /* type_specifier_nonarray: DOUBLE */
-#line 1894 "MachineIndependent/glslang.y"
+ case 247: /* type_specifier_nonarray: DOUBLE */
+#line 1919 "MachineIndependent/glslang.y"
{
parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double");
if (! parseContext.symbolTable.atBuiltInLevel())
@@ -7692,121 +7750,121 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
}
-#line 7696 "MachineIndependent/glslang_tab.cpp"
+#line 7754 "MachineIndependent/glslang_tab.cpp"
break;
- case 245: /* type_specifier_nonarray: FLOAT16_T */
-#line 1901 "MachineIndependent/glslang.y"
+ case 248: /* type_specifier_nonarray: FLOAT16_T */
+#line 1926 "MachineIndependent/glslang.y"
{
parseContext.float16ScalarVectorCheck((yyvsp[0].lex).loc, "float16_t", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
}
-#line 7706 "MachineIndependent/glslang_tab.cpp"
+#line 7764 "MachineIndependent/glslang_tab.cpp"
break;
- case 246: /* type_specifier_nonarray: FLOAT32_T */
-#line 1906 "MachineIndependent/glslang.y"
+ case 249: /* type_specifier_nonarray: FLOAT32_T */
+#line 1931 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
}
-#line 7716 "MachineIndependent/glslang_tab.cpp"
+#line 7774 "MachineIndependent/glslang_tab.cpp"
break;
- case 247: /* type_specifier_nonarray: FLOAT64_T */
-#line 1911 "MachineIndependent/glslang.y"
+ case 250: /* type_specifier_nonarray: FLOAT64_T */
+#line 1936 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
}
-#line 7726 "MachineIndependent/glslang_tab.cpp"
+#line 7784 "MachineIndependent/glslang_tab.cpp"
break;
- case 248: /* type_specifier_nonarray: INT8_T */
-#line 1916 "MachineIndependent/glslang.y"
+ case 251: /* type_specifier_nonarray: INT8_T */
+#line 1941 "MachineIndependent/glslang.y"
{
parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit signed integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt8;
}
-#line 7736 "MachineIndependent/glslang_tab.cpp"
+#line 7794 "MachineIndependent/glslang_tab.cpp"
break;
- case 249: /* type_specifier_nonarray: UINT8_T */
-#line 1921 "MachineIndependent/glslang.y"
+ case 252: /* type_specifier_nonarray: UINT8_T */
+#line 1946 "MachineIndependent/glslang.y"
{
parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit unsigned integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint8;
}
-#line 7746 "MachineIndependent/glslang_tab.cpp"
+#line 7804 "MachineIndependent/glslang_tab.cpp"
break;
- case 250: /* type_specifier_nonarray: INT16_T */
-#line 1926 "MachineIndependent/glslang.y"
+ case 253: /* type_specifier_nonarray: INT16_T */
+#line 1951 "MachineIndependent/glslang.y"
{
parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit signed integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt16;
}
-#line 7756 "MachineIndependent/glslang_tab.cpp"
+#line 7814 "MachineIndependent/glslang_tab.cpp"
break;
- case 251: /* type_specifier_nonarray: UINT16_T */
-#line 1931 "MachineIndependent/glslang.y"
+ case 254: /* type_specifier_nonarray: UINT16_T */
+#line 1956 "MachineIndependent/glslang.y"
{
parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit unsigned integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint16;
}
-#line 7766 "MachineIndependent/glslang_tab.cpp"
+#line 7824 "MachineIndependent/glslang_tab.cpp"
break;
- case 252: /* type_specifier_nonarray: INT32_T */
-#line 1936 "MachineIndependent/glslang.y"
+ case 255: /* type_specifier_nonarray: INT32_T */
+#line 1961 "MachineIndependent/glslang.y"
{
parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt;
}
-#line 7776 "MachineIndependent/glslang_tab.cpp"
+#line 7834 "MachineIndependent/glslang_tab.cpp"
break;
- case 253: /* type_specifier_nonarray: UINT32_T */
-#line 1941 "MachineIndependent/glslang.y"
+ case 256: /* type_specifier_nonarray: UINT32_T */
+#line 1966 "MachineIndependent/glslang.y"
{
parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit unsigned integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint;
}
-#line 7786 "MachineIndependent/glslang_tab.cpp"
+#line 7844 "MachineIndependent/glslang_tab.cpp"
break;
- case 254: /* type_specifier_nonarray: INT64_T */
-#line 1946 "MachineIndependent/glslang.y"
+ case 257: /* type_specifier_nonarray: INT64_T */
+#line 1971 "MachineIndependent/glslang.y"
{
parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt64;
}
-#line 7796 "MachineIndependent/glslang_tab.cpp"
+#line 7854 "MachineIndependent/glslang_tab.cpp"
break;
- case 255: /* type_specifier_nonarray: UINT64_T */
-#line 1951 "MachineIndependent/glslang.y"
+ case 258: /* type_specifier_nonarray: UINT64_T */
+#line 1976 "MachineIndependent/glslang.y"
{
parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint64;
}
-#line 7806 "MachineIndependent/glslang_tab.cpp"
+#line 7864 "MachineIndependent/glslang_tab.cpp"
break;
- case 256: /* type_specifier_nonarray: DVEC2 */
-#line 1956 "MachineIndependent/glslang.y"
+ case 259: /* type_specifier_nonarray: DVEC2 */
+#line 1981 "MachineIndependent/glslang.y"
{
parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double vector");
if (! parseContext.symbolTable.atBuiltInLevel())
@@ -7815,11 +7873,11 @@ yyreduce:
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setVector(2);
}
-#line 7819 "MachineIndependent/glslang_tab.cpp"
+#line 7877 "MachineIndependent/glslang_tab.cpp"
break;
- case 257: /* type_specifier_nonarray: DVEC3 */
-#line 1964 "MachineIndependent/glslang.y"
+ case 260: /* type_specifier_nonarray: DVEC3 */
+#line 1989 "MachineIndependent/glslang.y"
{
parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double vector");
if (! parseContext.symbolTable.atBuiltInLevel())
@@ -7828,11 +7886,11 @@ yyreduce:
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setVector(3);
}
-#line 7832 "MachineIndependent/glslang_tab.cpp"
+#line 7890 "MachineIndependent/glslang_tab.cpp"
break;
- case 258: /* type_specifier_nonarray: DVEC4 */
-#line 1972 "MachineIndependent/glslang.y"
+ case 261: /* type_specifier_nonarray: DVEC4 */
+#line 1997 "MachineIndependent/glslang.y"
{
parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double vector");
if (! parseContext.symbolTable.atBuiltInLevel())
@@ -7841,374 +7899,374 @@ yyreduce:
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setVector(4);
}
-#line 7845 "MachineIndependent/glslang_tab.cpp"
+#line 7903 "MachineIndependent/glslang_tab.cpp"
break;
- case 259: /* type_specifier_nonarray: F16VEC2 */
-#line 1980 "MachineIndependent/glslang.y"
+ case 262: /* type_specifier_nonarray: F16VEC2 */
+#line 2005 "MachineIndependent/glslang.y"
{
parseContext.float16ScalarVectorCheck((yyvsp[0].lex).loc, "half float vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setVector(2);
}
-#line 7856 "MachineIndependent/glslang_tab.cpp"
+#line 7914 "MachineIndependent/glslang_tab.cpp"
break;
- case 260: /* type_specifier_nonarray: F16VEC3 */
-#line 1986 "MachineIndependent/glslang.y"
+ case 263: /* type_specifier_nonarray: F16VEC3 */
+#line 2011 "MachineIndependent/glslang.y"
{
parseContext.float16ScalarVectorCheck((yyvsp[0].lex).loc, "half float vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setVector(3);
}
-#line 7867 "MachineIndependent/glslang_tab.cpp"
+#line 7925 "MachineIndependent/glslang_tab.cpp"
break;
- case 261: /* type_specifier_nonarray: F16VEC4 */
-#line 1992 "MachineIndependent/glslang.y"
+ case 264: /* type_specifier_nonarray: F16VEC4 */
+#line 2017 "MachineIndependent/glslang.y"
{
parseContext.float16ScalarVectorCheck((yyvsp[0].lex).loc, "half float vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setVector(4);
}
-#line 7878 "MachineIndependent/glslang_tab.cpp"
+#line 7936 "MachineIndependent/glslang_tab.cpp"
break;
- case 262: /* type_specifier_nonarray: F32VEC2 */
-#line 1998 "MachineIndependent/glslang.y"
+ case 265: /* type_specifier_nonarray: F32VEC2 */
+#line 2023 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setVector(2);
}
-#line 7889 "MachineIndependent/glslang_tab.cpp"
+#line 7947 "MachineIndependent/glslang_tab.cpp"
break;
- case 263: /* type_specifier_nonarray: F32VEC3 */
-#line 2004 "MachineIndependent/glslang.y"
+ case 266: /* type_specifier_nonarray: F32VEC3 */
+#line 2029 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setVector(3);
}
-#line 7900 "MachineIndependent/glslang_tab.cpp"
+#line 7958 "MachineIndependent/glslang_tab.cpp"
break;
- case 264: /* type_specifier_nonarray: F32VEC4 */
-#line 2010 "MachineIndependent/glslang.y"
+ case 267: /* type_specifier_nonarray: F32VEC4 */
+#line 2035 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setVector(4);
}
-#line 7911 "MachineIndependent/glslang_tab.cpp"
+#line 7969 "MachineIndependent/glslang_tab.cpp"
break;
- case 265: /* type_specifier_nonarray: F64VEC2 */
-#line 2016 "MachineIndependent/glslang.y"
+ case 268: /* type_specifier_nonarray: F64VEC2 */
+#line 2041 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setVector(2);
}
-#line 7922 "MachineIndependent/glslang_tab.cpp"
+#line 7980 "MachineIndependent/glslang_tab.cpp"
break;
- case 266: /* type_specifier_nonarray: F64VEC3 */
-#line 2022 "MachineIndependent/glslang.y"
+ case 269: /* type_specifier_nonarray: F64VEC3 */
+#line 2047 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setVector(3);
}
-#line 7933 "MachineIndependent/glslang_tab.cpp"
+#line 7991 "MachineIndependent/glslang_tab.cpp"
break;
- case 267: /* type_specifier_nonarray: F64VEC4 */
-#line 2028 "MachineIndependent/glslang.y"
+ case 270: /* type_specifier_nonarray: F64VEC4 */
+#line 2053 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setVector(4);
}
-#line 7944 "MachineIndependent/glslang_tab.cpp"
+#line 8002 "MachineIndependent/glslang_tab.cpp"
break;
- case 268: /* type_specifier_nonarray: I8VEC2 */
-#line 2034 "MachineIndependent/glslang.y"
+ case 271: /* type_specifier_nonarray: I8VEC2 */
+#line 2059 "MachineIndependent/glslang.y"
{
parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt8;
(yyval.interm.type).setVector(2);
}
-#line 7955 "MachineIndependent/glslang_tab.cpp"
+#line 8013 "MachineIndependent/glslang_tab.cpp"
break;
- case 269: /* type_specifier_nonarray: I8VEC3 */
-#line 2040 "MachineIndependent/glslang.y"
+ case 272: /* type_specifier_nonarray: I8VEC3 */
+#line 2065 "MachineIndependent/glslang.y"
{
parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt8;
(yyval.interm.type).setVector(3);
}
-#line 7966 "MachineIndependent/glslang_tab.cpp"
+#line 8024 "MachineIndependent/glslang_tab.cpp"
break;
- case 270: /* type_specifier_nonarray: I8VEC4 */
-#line 2046 "MachineIndependent/glslang.y"
+ case 273: /* type_specifier_nonarray: I8VEC4 */
+#line 2071 "MachineIndependent/glslang.y"
{
parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt8;
(yyval.interm.type).setVector(4);
}
-#line 7977 "MachineIndependent/glslang_tab.cpp"
+#line 8035 "MachineIndependent/glslang_tab.cpp"
break;
- case 271: /* type_specifier_nonarray: I16VEC2 */
-#line 2052 "MachineIndependent/glslang.y"
+ case 274: /* type_specifier_nonarray: I16VEC2 */
+#line 2077 "MachineIndependent/glslang.y"
{
parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt16;
(yyval.interm.type).setVector(2);
}
-#line 7988 "MachineIndependent/glslang_tab.cpp"
+#line 8046 "MachineIndependent/glslang_tab.cpp"
break;
- case 272: /* type_specifier_nonarray: I16VEC3 */
-#line 2058 "MachineIndependent/glslang.y"
+ case 275: /* type_specifier_nonarray: I16VEC3 */
+#line 2083 "MachineIndependent/glslang.y"
{
parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt16;
(yyval.interm.type).setVector(3);
}
-#line 7999 "MachineIndependent/glslang_tab.cpp"
+#line 8057 "MachineIndependent/glslang_tab.cpp"
break;
- case 273: /* type_specifier_nonarray: I16VEC4 */
-#line 2064 "MachineIndependent/glslang.y"
+ case 276: /* type_specifier_nonarray: I16VEC4 */
+#line 2089 "MachineIndependent/glslang.y"
{
parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt16;
(yyval.interm.type).setVector(4);
}
-#line 8010 "MachineIndependent/glslang_tab.cpp"
+#line 8068 "MachineIndependent/glslang_tab.cpp"
break;
- case 274: /* type_specifier_nonarray: I32VEC2 */
-#line 2070 "MachineIndependent/glslang.y"
+ case 277: /* type_specifier_nonarray: I32VEC2 */
+#line 2095 "MachineIndependent/glslang.y"
{
parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt;
(yyval.interm.type).setVector(2);
}
-#line 8021 "MachineIndependent/glslang_tab.cpp"
+#line 8079 "MachineIndependent/glslang_tab.cpp"
break;
- case 275: /* type_specifier_nonarray: I32VEC3 */
-#line 2076 "MachineIndependent/glslang.y"
+ case 278: /* type_specifier_nonarray: I32VEC3 */
+#line 2101 "MachineIndependent/glslang.y"
{
parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt;
(yyval.interm.type).setVector(3);
}
-#line 8032 "MachineIndependent/glslang_tab.cpp"
+#line 8090 "MachineIndependent/glslang_tab.cpp"
break;
- case 276: /* type_specifier_nonarray: I32VEC4 */
-#line 2082 "MachineIndependent/glslang.y"
+ case 279: /* type_specifier_nonarray: I32VEC4 */
+#line 2107 "MachineIndependent/glslang.y"
{
parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt;
(yyval.interm.type).setVector(4);
}
-#line 8043 "MachineIndependent/glslang_tab.cpp"
+#line 8101 "MachineIndependent/glslang_tab.cpp"
break;
- case 277: /* type_specifier_nonarray: I64VEC2 */
-#line 2088 "MachineIndependent/glslang.y"
+ case 280: /* type_specifier_nonarray: I64VEC2 */
+#line 2113 "MachineIndependent/glslang.y"
{
parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt64;
(yyval.interm.type).setVector(2);
}
-#line 8054 "MachineIndependent/glslang_tab.cpp"
+#line 8112 "MachineIndependent/glslang_tab.cpp"
break;
- case 278: /* type_specifier_nonarray: I64VEC3 */
-#line 2094 "MachineIndependent/glslang.y"
+ case 281: /* type_specifier_nonarray: I64VEC3 */
+#line 2119 "MachineIndependent/glslang.y"
{
parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt64;
(yyval.interm.type).setVector(3);
}
-#line 8065 "MachineIndependent/glslang_tab.cpp"
+#line 8123 "MachineIndependent/glslang_tab.cpp"
break;
- case 279: /* type_specifier_nonarray: I64VEC4 */
-#line 2100 "MachineIndependent/glslang.y"
+ case 282: /* type_specifier_nonarray: I64VEC4 */
+#line 2125 "MachineIndependent/glslang.y"
{
parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt64;
(yyval.interm.type).setVector(4);
}
-#line 8076 "MachineIndependent/glslang_tab.cpp"
+#line 8134 "MachineIndependent/glslang_tab.cpp"
break;
- case 280: /* type_specifier_nonarray: U8VEC2 */
-#line 2106 "MachineIndependent/glslang.y"
+ case 283: /* type_specifier_nonarray: U8VEC2 */
+#line 2131 "MachineIndependent/glslang.y"
{
parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint8;
(yyval.interm.type).setVector(2);
}
-#line 8087 "MachineIndependent/glslang_tab.cpp"
+#line 8145 "MachineIndependent/glslang_tab.cpp"
break;
- case 281: /* type_specifier_nonarray: U8VEC3 */
-#line 2112 "MachineIndependent/glslang.y"
+ case 284: /* type_specifier_nonarray: U8VEC3 */
+#line 2137 "MachineIndependent/glslang.y"
{
parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint8;
(yyval.interm.type).setVector(3);
}
-#line 8098 "MachineIndependent/glslang_tab.cpp"
+#line 8156 "MachineIndependent/glslang_tab.cpp"
break;
- case 282: /* type_specifier_nonarray: U8VEC4 */
-#line 2118 "MachineIndependent/glslang.y"
+ case 285: /* type_specifier_nonarray: U8VEC4 */
+#line 2143 "MachineIndependent/glslang.y"
{
parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint8;
(yyval.interm.type).setVector(4);
}
-#line 8109 "MachineIndependent/glslang_tab.cpp"
+#line 8167 "MachineIndependent/glslang_tab.cpp"
break;
- case 283: /* type_specifier_nonarray: U16VEC2 */
-#line 2124 "MachineIndependent/glslang.y"
+ case 286: /* type_specifier_nonarray: U16VEC2 */
+#line 2149 "MachineIndependent/glslang.y"
{
parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint16;
(yyval.interm.type).setVector(2);
}
-#line 8120 "MachineIndependent/glslang_tab.cpp"
+#line 8178 "MachineIndependent/glslang_tab.cpp"
break;
- case 284: /* type_specifier_nonarray: U16VEC3 */
-#line 2130 "MachineIndependent/glslang.y"
+ case 287: /* type_specifier_nonarray: U16VEC3 */
+#line 2155 "MachineIndependent/glslang.y"
{
parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint16;
(yyval.interm.type).setVector(3);
}
-#line 8131 "MachineIndependent/glslang_tab.cpp"
+#line 8189 "MachineIndependent/glslang_tab.cpp"
break;
- case 285: /* type_specifier_nonarray: U16VEC4 */
-#line 2136 "MachineIndependent/glslang.y"
+ case 288: /* type_specifier_nonarray: U16VEC4 */
+#line 2161 "MachineIndependent/glslang.y"
{
parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint16;
(yyval.interm.type).setVector(4);
}
-#line 8142 "MachineIndependent/glslang_tab.cpp"
+#line 8200 "MachineIndependent/glslang_tab.cpp"
break;
- case 286: /* type_specifier_nonarray: U32VEC2 */
-#line 2142 "MachineIndependent/glslang.y"
+ case 289: /* type_specifier_nonarray: U32VEC2 */
+#line 2167 "MachineIndependent/glslang.y"
{
parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint;
(yyval.interm.type).setVector(2);
}
-#line 8153 "MachineIndependent/glslang_tab.cpp"
+#line 8211 "MachineIndependent/glslang_tab.cpp"
break;
- case 287: /* type_specifier_nonarray: U32VEC3 */
-#line 2148 "MachineIndependent/glslang.y"
+ case 290: /* type_specifier_nonarray: U32VEC3 */
+#line 2173 "MachineIndependent/glslang.y"
{
parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint;
(yyval.interm.type).setVector(3);
}
-#line 8164 "MachineIndependent/glslang_tab.cpp"
+#line 8222 "MachineIndependent/glslang_tab.cpp"
break;
- case 288: /* type_specifier_nonarray: U32VEC4 */
-#line 2154 "MachineIndependent/glslang.y"
+ case 291: /* type_specifier_nonarray: U32VEC4 */
+#line 2179 "MachineIndependent/glslang.y"
{
parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint;
(yyval.interm.type).setVector(4);
}
-#line 8175 "MachineIndependent/glslang_tab.cpp"
+#line 8233 "MachineIndependent/glslang_tab.cpp"
break;
- case 289: /* type_specifier_nonarray: U64VEC2 */
-#line 2160 "MachineIndependent/glslang.y"
+ case 292: /* type_specifier_nonarray: U64VEC2 */
+#line 2185 "MachineIndependent/glslang.y"
{
parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint64;
(yyval.interm.type).setVector(2);
}
-#line 8186 "MachineIndependent/glslang_tab.cpp"
+#line 8244 "MachineIndependent/glslang_tab.cpp"
break;
- case 290: /* type_specifier_nonarray: U64VEC3 */
-#line 2166 "MachineIndependent/glslang.y"
+ case 293: /* type_specifier_nonarray: U64VEC3 */
+#line 2191 "MachineIndependent/glslang.y"
{
parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint64;
(yyval.interm.type).setVector(3);
}
-#line 8197 "MachineIndependent/glslang_tab.cpp"
+#line 8255 "MachineIndependent/glslang_tab.cpp"
break;
- case 291: /* type_specifier_nonarray: U64VEC4 */
-#line 2172 "MachineIndependent/glslang.y"
+ case 294: /* type_specifier_nonarray: U64VEC4 */
+#line 2197 "MachineIndependent/glslang.y"
{
parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint64;
(yyval.interm.type).setVector(4);
}
-#line 8208 "MachineIndependent/glslang_tab.cpp"
+#line 8266 "MachineIndependent/glslang_tab.cpp"
break;
- case 292: /* type_specifier_nonarray: DMAT2 */
-#line 2178 "MachineIndependent/glslang.y"
+ case 295: /* type_specifier_nonarray: DMAT2 */
+#line 2203 "MachineIndependent/glslang.y"
{
parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
if (! parseContext.symbolTable.atBuiltInLevel())
@@ -8217,11 +8275,11 @@ yyreduce:
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(2, 2);
}
-#line 8221 "MachineIndependent/glslang_tab.cpp"
+#line 8279 "MachineIndependent/glslang_tab.cpp"
break;
- case 293: /* type_specifier_nonarray: DMAT3 */
-#line 2186 "MachineIndependent/glslang.y"
+ case 296: /* type_specifier_nonarray: DMAT3 */
+#line 2211 "MachineIndependent/glslang.y"
{
parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
if (! parseContext.symbolTable.atBuiltInLevel())
@@ -8230,11 +8288,11 @@ yyreduce:
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(3, 3);
}
-#line 8234 "MachineIndependent/glslang_tab.cpp"
+#line 8292 "MachineIndependent/glslang_tab.cpp"
break;
- case 294: /* type_specifier_nonarray: DMAT4 */
-#line 2194 "MachineIndependent/glslang.y"
+ case 297: /* type_specifier_nonarray: DMAT4 */
+#line 2219 "MachineIndependent/glslang.y"
{
parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
if (! parseContext.symbolTable.atBuiltInLevel())
@@ -8243,11 +8301,11 @@ yyreduce:
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(4, 4);
}
-#line 8247 "MachineIndependent/glslang_tab.cpp"
+#line 8305 "MachineIndependent/glslang_tab.cpp"
break;
- case 295: /* type_specifier_nonarray: DMAT2X2 */
-#line 2202 "MachineIndependent/glslang.y"
+ case 298: /* type_specifier_nonarray: DMAT2X2 */
+#line 2227 "MachineIndependent/glslang.y"
{
parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
if (! parseContext.symbolTable.atBuiltInLevel())
@@ -8256,11 +8314,11 @@ yyreduce:
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(2, 2);
}
-#line 8260 "MachineIndependent/glslang_tab.cpp"
+#line 8318 "MachineIndependent/glslang_tab.cpp"
break;
- case 296: /* type_specifier_nonarray: DMAT2X3 */
-#line 2210 "MachineIndependent/glslang.y"
+ case 299: /* type_specifier_nonarray: DMAT2X3 */
+#line 2235 "MachineIndependent/glslang.y"
{
parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
if (! parseContext.symbolTable.atBuiltInLevel())
@@ -8269,11 +8327,11 @@ yyreduce:
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(2, 3);
}
-#line 8273 "MachineIndependent/glslang_tab.cpp"
+#line 8331 "MachineIndependent/glslang_tab.cpp"
break;
- case 297: /* type_specifier_nonarray: DMAT2X4 */
-#line 2218 "MachineIndependent/glslang.y"
+ case 300: /* type_specifier_nonarray: DMAT2X4 */
+#line 2243 "MachineIndependent/glslang.y"
{
parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
if (! parseContext.symbolTable.atBuiltInLevel())
@@ -8282,11 +8340,11 @@ yyreduce:
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(2, 4);
}
-#line 8286 "MachineIndependent/glslang_tab.cpp"
+#line 8344 "MachineIndependent/glslang_tab.cpp"
break;
- case 298: /* type_specifier_nonarray: DMAT3X2 */
-#line 2226 "MachineIndependent/glslang.y"
+ case 301: /* type_specifier_nonarray: DMAT3X2 */
+#line 2251 "MachineIndependent/glslang.y"
{
parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
if (! parseContext.symbolTable.atBuiltInLevel())
@@ -8295,11 +8353,11 @@ yyreduce:
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(3, 2);
}
-#line 8299 "MachineIndependent/glslang_tab.cpp"
+#line 8357 "MachineIndependent/glslang_tab.cpp"
break;
- case 299: /* type_specifier_nonarray: DMAT3X3 */
-#line 2234 "MachineIndependent/glslang.y"
+ case 302: /* type_specifier_nonarray: DMAT3X3 */
+#line 2259 "MachineIndependent/glslang.y"
{
parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
if (! parseContext.symbolTable.atBuiltInLevel())
@@ -8308,11 +8366,11 @@ yyreduce:
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(3, 3);
}
-#line 8312 "MachineIndependent/glslang_tab.cpp"
+#line 8370 "MachineIndependent/glslang_tab.cpp"
break;
- case 300: /* type_specifier_nonarray: DMAT3X4 */
-#line 2242 "MachineIndependent/glslang.y"
+ case 303: /* type_specifier_nonarray: DMAT3X4 */
+#line 2267 "MachineIndependent/glslang.y"
{
parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
if (! parseContext.symbolTable.atBuiltInLevel())
@@ -8321,11 +8379,11 @@ yyreduce:
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(3, 4);
}
-#line 8325 "MachineIndependent/glslang_tab.cpp"
+#line 8383 "MachineIndependent/glslang_tab.cpp"
break;
- case 301: /* type_specifier_nonarray: DMAT4X2 */
-#line 2250 "MachineIndependent/glslang.y"
+ case 304: /* type_specifier_nonarray: DMAT4X2 */
+#line 2275 "MachineIndependent/glslang.y"
{
parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
if (! parseContext.symbolTable.atBuiltInLevel())
@@ -8334,11 +8392,11 @@ yyreduce:
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(4, 2);
}
-#line 8338 "MachineIndependent/glslang_tab.cpp"
+#line 8396 "MachineIndependent/glslang_tab.cpp"
break;
- case 302: /* type_specifier_nonarray: DMAT4X3 */
-#line 2258 "MachineIndependent/glslang.y"
+ case 305: /* type_specifier_nonarray: DMAT4X3 */
+#line 2283 "MachineIndependent/glslang.y"
{
parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
if (! parseContext.symbolTable.atBuiltInLevel())
@@ -8347,11 +8405,11 @@ yyreduce:
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(4, 3);
}
-#line 8351 "MachineIndependent/glslang_tab.cpp"
+#line 8409 "MachineIndependent/glslang_tab.cpp"
break;
- case 303: /* type_specifier_nonarray: DMAT4X4 */
-#line 2266 "MachineIndependent/glslang.y"
+ case 306: /* type_specifier_nonarray: DMAT4X4 */
+#line 2291 "MachineIndependent/glslang.y"
{
parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
if (! parseContext.symbolTable.atBuiltInLevel())
@@ -8360,2228 +8418,2228 @@ yyreduce:
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(4, 4);
}
-#line 8364 "MachineIndependent/glslang_tab.cpp"
+#line 8422 "MachineIndependent/glslang_tab.cpp"
break;
- case 304: /* type_specifier_nonarray: F16MAT2 */
-#line 2274 "MachineIndependent/glslang.y"
+ case 307: /* type_specifier_nonarray: F16MAT2 */
+#line 2299 "MachineIndependent/glslang.y"
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(2, 2);
}
-#line 8375 "MachineIndependent/glslang_tab.cpp"
+#line 8433 "MachineIndependent/glslang_tab.cpp"
break;
- case 305: /* type_specifier_nonarray: F16MAT3 */
-#line 2280 "MachineIndependent/glslang.y"
+ case 308: /* type_specifier_nonarray: F16MAT3 */
+#line 2305 "MachineIndependent/glslang.y"
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(3, 3);
}
-#line 8386 "MachineIndependent/glslang_tab.cpp"
+#line 8444 "MachineIndependent/glslang_tab.cpp"
break;
- case 306: /* type_specifier_nonarray: F16MAT4 */
-#line 2286 "MachineIndependent/glslang.y"
+ case 309: /* type_specifier_nonarray: F16MAT4 */
+#line 2311 "MachineIndependent/glslang.y"
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(4, 4);
}
-#line 8397 "MachineIndependent/glslang_tab.cpp"
+#line 8455 "MachineIndependent/glslang_tab.cpp"
break;
- case 307: /* type_specifier_nonarray: F16MAT2X2 */
-#line 2292 "MachineIndependent/glslang.y"
+ case 310: /* type_specifier_nonarray: F16MAT2X2 */
+#line 2317 "MachineIndependent/glslang.y"
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(2, 2);
}
-#line 8408 "MachineIndependent/glslang_tab.cpp"
+#line 8466 "MachineIndependent/glslang_tab.cpp"
break;
- case 308: /* type_specifier_nonarray: F16MAT2X3 */
-#line 2298 "MachineIndependent/glslang.y"
+ case 311: /* type_specifier_nonarray: F16MAT2X3 */
+#line 2323 "MachineIndependent/glslang.y"
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(2, 3);
}
-#line 8419 "MachineIndependent/glslang_tab.cpp"
+#line 8477 "MachineIndependent/glslang_tab.cpp"
break;
- case 309: /* type_specifier_nonarray: F16MAT2X4 */
-#line 2304 "MachineIndependent/glslang.y"
+ case 312: /* type_specifier_nonarray: F16MAT2X4 */
+#line 2329 "MachineIndependent/glslang.y"
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(2, 4);
}
-#line 8430 "MachineIndependent/glslang_tab.cpp"
+#line 8488 "MachineIndependent/glslang_tab.cpp"
break;
- case 310: /* type_specifier_nonarray: F16MAT3X2 */
-#line 2310 "MachineIndependent/glslang.y"
+ case 313: /* type_specifier_nonarray: F16MAT3X2 */
+#line 2335 "MachineIndependent/glslang.y"
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(3, 2);
}
-#line 8441 "MachineIndependent/glslang_tab.cpp"
+#line 8499 "MachineIndependent/glslang_tab.cpp"
break;
- case 311: /* type_specifier_nonarray: F16MAT3X3 */
-#line 2316 "MachineIndependent/glslang.y"
+ case 314: /* type_specifier_nonarray: F16MAT3X3 */
+#line 2341 "MachineIndependent/glslang.y"
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(3, 3);
}
-#line 8452 "MachineIndependent/glslang_tab.cpp"
+#line 8510 "MachineIndependent/glslang_tab.cpp"
break;
- case 312: /* type_specifier_nonarray: F16MAT3X4 */
-#line 2322 "MachineIndependent/glslang.y"
+ case 315: /* type_specifier_nonarray: F16MAT3X4 */
+#line 2347 "MachineIndependent/glslang.y"
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(3, 4);
}
-#line 8463 "MachineIndependent/glslang_tab.cpp"
+#line 8521 "MachineIndependent/glslang_tab.cpp"
break;
- case 313: /* type_specifier_nonarray: F16MAT4X2 */
-#line 2328 "MachineIndependent/glslang.y"
+ case 316: /* type_specifier_nonarray: F16MAT4X2 */
+#line 2353 "MachineIndependent/glslang.y"
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(4, 2);
}
-#line 8474 "MachineIndependent/glslang_tab.cpp"
+#line 8532 "MachineIndependent/glslang_tab.cpp"
break;
- case 314: /* type_specifier_nonarray: F16MAT4X3 */
-#line 2334 "MachineIndependent/glslang.y"
+ case 317: /* type_specifier_nonarray: F16MAT4X3 */
+#line 2359 "MachineIndependent/glslang.y"
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(4, 3);
}
-#line 8485 "MachineIndependent/glslang_tab.cpp"
+#line 8543 "MachineIndependent/glslang_tab.cpp"
break;
- case 315: /* type_specifier_nonarray: F16MAT4X4 */
-#line 2340 "MachineIndependent/glslang.y"
+ case 318: /* type_specifier_nonarray: F16MAT4X4 */
+#line 2365 "MachineIndependent/glslang.y"
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(4, 4);
}
-#line 8496 "MachineIndependent/glslang_tab.cpp"
+#line 8554 "MachineIndependent/glslang_tab.cpp"
break;
- case 316: /* type_specifier_nonarray: F32MAT2 */
-#line 2346 "MachineIndependent/glslang.y"
+ case 319: /* type_specifier_nonarray: F32MAT2 */
+#line 2371 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(2, 2);
}
-#line 8507 "MachineIndependent/glslang_tab.cpp"
+#line 8565 "MachineIndependent/glslang_tab.cpp"
break;
- case 317: /* type_specifier_nonarray: F32MAT3 */
-#line 2352 "MachineIndependent/glslang.y"
+ case 320: /* type_specifier_nonarray: F32MAT3 */
+#line 2377 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(3, 3);
}
-#line 8518 "MachineIndependent/glslang_tab.cpp"
+#line 8576 "MachineIndependent/glslang_tab.cpp"
break;
- case 318: /* type_specifier_nonarray: F32MAT4 */
-#line 2358 "MachineIndependent/glslang.y"
+ case 321: /* type_specifier_nonarray: F32MAT4 */
+#line 2383 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(4, 4);
}
-#line 8529 "MachineIndependent/glslang_tab.cpp"
+#line 8587 "MachineIndependent/glslang_tab.cpp"
break;
- case 319: /* type_specifier_nonarray: F32MAT2X2 */
-#line 2364 "MachineIndependent/glslang.y"
+ case 322: /* type_specifier_nonarray: F32MAT2X2 */
+#line 2389 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(2, 2);
}
-#line 8540 "MachineIndependent/glslang_tab.cpp"
+#line 8598 "MachineIndependent/glslang_tab.cpp"
break;
- case 320: /* type_specifier_nonarray: F32MAT2X3 */
-#line 2370 "MachineIndependent/glslang.y"
+ case 323: /* type_specifier_nonarray: F32MAT2X3 */
+#line 2395 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(2, 3);
}
-#line 8551 "MachineIndependent/glslang_tab.cpp"
+#line 8609 "MachineIndependent/glslang_tab.cpp"
break;
- case 321: /* type_specifier_nonarray: F32MAT2X4 */
-#line 2376 "MachineIndependent/glslang.y"
+ case 324: /* type_specifier_nonarray: F32MAT2X4 */
+#line 2401 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(2, 4);
}
-#line 8562 "MachineIndependent/glslang_tab.cpp"
+#line 8620 "MachineIndependent/glslang_tab.cpp"
break;
- case 322: /* type_specifier_nonarray: F32MAT3X2 */
-#line 2382 "MachineIndependent/glslang.y"
+ case 325: /* type_specifier_nonarray: F32MAT3X2 */
+#line 2407 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(3, 2);
}
-#line 8573 "MachineIndependent/glslang_tab.cpp"
+#line 8631 "MachineIndependent/glslang_tab.cpp"
break;
- case 323: /* type_specifier_nonarray: F32MAT3X3 */
-#line 2388 "MachineIndependent/glslang.y"
+ case 326: /* type_specifier_nonarray: F32MAT3X3 */
+#line 2413 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(3, 3);
}
-#line 8584 "MachineIndependent/glslang_tab.cpp"
+#line 8642 "MachineIndependent/glslang_tab.cpp"
break;
- case 324: /* type_specifier_nonarray: F32MAT3X4 */
-#line 2394 "MachineIndependent/glslang.y"
+ case 327: /* type_specifier_nonarray: F32MAT3X4 */
+#line 2419 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(3, 4);
}
-#line 8595 "MachineIndependent/glslang_tab.cpp"
+#line 8653 "MachineIndependent/glslang_tab.cpp"
break;
- case 325: /* type_specifier_nonarray: F32MAT4X2 */
-#line 2400 "MachineIndependent/glslang.y"
+ case 328: /* type_specifier_nonarray: F32MAT4X2 */
+#line 2425 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(4, 2);
}
-#line 8606 "MachineIndependent/glslang_tab.cpp"
+#line 8664 "MachineIndependent/glslang_tab.cpp"
break;
- case 326: /* type_specifier_nonarray: F32MAT4X3 */
-#line 2406 "MachineIndependent/glslang.y"
+ case 329: /* type_specifier_nonarray: F32MAT4X3 */
+#line 2431 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(4, 3);
}
-#line 8617 "MachineIndependent/glslang_tab.cpp"
+#line 8675 "MachineIndependent/glslang_tab.cpp"
break;
- case 327: /* type_specifier_nonarray: F32MAT4X4 */
-#line 2412 "MachineIndependent/glslang.y"
+ case 330: /* type_specifier_nonarray: F32MAT4X4 */
+#line 2437 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(4, 4);
}
-#line 8628 "MachineIndependent/glslang_tab.cpp"
+#line 8686 "MachineIndependent/glslang_tab.cpp"
break;
- case 328: /* type_specifier_nonarray: F64MAT2 */
-#line 2418 "MachineIndependent/glslang.y"
+ case 331: /* type_specifier_nonarray: F64MAT2 */
+#line 2443 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(2, 2);
}
-#line 8639 "MachineIndependent/glslang_tab.cpp"
+#line 8697 "MachineIndependent/glslang_tab.cpp"
break;
- case 329: /* type_specifier_nonarray: F64MAT3 */
-#line 2424 "MachineIndependent/glslang.y"
+ case 332: /* type_specifier_nonarray: F64MAT3 */
+#line 2449 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(3, 3);
}
-#line 8650 "MachineIndependent/glslang_tab.cpp"
+#line 8708 "MachineIndependent/glslang_tab.cpp"
break;
- case 330: /* type_specifier_nonarray: F64MAT4 */
-#line 2430 "MachineIndependent/glslang.y"
+ case 333: /* type_specifier_nonarray: F64MAT4 */
+#line 2455 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(4, 4);
}
-#line 8661 "MachineIndependent/glslang_tab.cpp"
+#line 8719 "MachineIndependent/glslang_tab.cpp"
break;
- case 331: /* type_specifier_nonarray: F64MAT2X2 */
-#line 2436 "MachineIndependent/glslang.y"
+ case 334: /* type_specifier_nonarray: F64MAT2X2 */
+#line 2461 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(2, 2);
}
-#line 8672 "MachineIndependent/glslang_tab.cpp"
+#line 8730 "MachineIndependent/glslang_tab.cpp"
break;
- case 332: /* type_specifier_nonarray: F64MAT2X3 */
-#line 2442 "MachineIndependent/glslang.y"
+ case 335: /* type_specifier_nonarray: F64MAT2X3 */
+#line 2467 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(2, 3);
}
-#line 8683 "MachineIndependent/glslang_tab.cpp"
+#line 8741 "MachineIndependent/glslang_tab.cpp"
break;
- case 333: /* type_specifier_nonarray: F64MAT2X4 */
-#line 2448 "MachineIndependent/glslang.y"
+ case 336: /* type_specifier_nonarray: F64MAT2X4 */
+#line 2473 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(2, 4);
}
-#line 8694 "MachineIndependent/glslang_tab.cpp"
+#line 8752 "MachineIndependent/glslang_tab.cpp"
break;
- case 334: /* type_specifier_nonarray: F64MAT3X2 */
-#line 2454 "MachineIndependent/glslang.y"
+ case 337: /* type_specifier_nonarray: F64MAT3X2 */
+#line 2479 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(3, 2);
}
-#line 8705 "MachineIndependent/glslang_tab.cpp"
+#line 8763 "MachineIndependent/glslang_tab.cpp"
break;
- case 335: /* type_specifier_nonarray: F64MAT3X3 */
-#line 2460 "MachineIndependent/glslang.y"
+ case 338: /* type_specifier_nonarray: F64MAT3X3 */
+#line 2485 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(3, 3);
}
-#line 8716 "MachineIndependent/glslang_tab.cpp"
+#line 8774 "MachineIndependent/glslang_tab.cpp"
break;
- case 336: /* type_specifier_nonarray: F64MAT3X4 */
-#line 2466 "MachineIndependent/glslang.y"
+ case 339: /* type_specifier_nonarray: F64MAT3X4 */
+#line 2491 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(3, 4);
}
-#line 8727 "MachineIndependent/glslang_tab.cpp"
+#line 8785 "MachineIndependent/glslang_tab.cpp"
break;
- case 337: /* type_specifier_nonarray: F64MAT4X2 */
-#line 2472 "MachineIndependent/glslang.y"
+ case 340: /* type_specifier_nonarray: F64MAT4X2 */
+#line 2497 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(4, 2);
}
-#line 8738 "MachineIndependent/glslang_tab.cpp"
+#line 8796 "MachineIndependent/glslang_tab.cpp"
break;
- case 338: /* type_specifier_nonarray: F64MAT4X3 */
-#line 2478 "MachineIndependent/glslang.y"
+ case 341: /* type_specifier_nonarray: F64MAT4X3 */
+#line 2503 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(4, 3);
}
-#line 8749 "MachineIndependent/glslang_tab.cpp"
+#line 8807 "MachineIndependent/glslang_tab.cpp"
break;
- case 339: /* type_specifier_nonarray: F64MAT4X4 */
-#line 2484 "MachineIndependent/glslang.y"
+ case 342: /* type_specifier_nonarray: F64MAT4X4 */
+#line 2509 "MachineIndependent/glslang.y"
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(4, 4);
}
-#line 8760 "MachineIndependent/glslang_tab.cpp"
+#line 8818 "MachineIndependent/glslang_tab.cpp"
break;
- case 340: /* type_specifier_nonarray: ACCSTRUCTNV */
-#line 2490 "MachineIndependent/glslang.y"
+ case 343: /* type_specifier_nonarray: ACCSTRUCTNV */
+#line 2515 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtAccStruct;
}
-#line 8769 "MachineIndependent/glslang_tab.cpp"
+#line 8827 "MachineIndependent/glslang_tab.cpp"
break;
- case 341: /* type_specifier_nonarray: ACCSTRUCTEXT */
-#line 2494 "MachineIndependent/glslang.y"
+ case 344: /* type_specifier_nonarray: ACCSTRUCTEXT */
+#line 2519 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtAccStruct;
}
-#line 8778 "MachineIndependent/glslang_tab.cpp"
+#line 8836 "MachineIndependent/glslang_tab.cpp"
break;
- case 342: /* type_specifier_nonarray: RAYQUERYEXT */
-#line 2498 "MachineIndependent/glslang.y"
+ case 345: /* type_specifier_nonarray: RAYQUERYEXT */
+#line 2523 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtRayQuery;
}
-#line 8787 "MachineIndependent/glslang_tab.cpp"
+#line 8845 "MachineIndependent/glslang_tab.cpp"
break;
- case 343: /* type_specifier_nonarray: ATOMIC_UINT */
-#line 2502 "MachineIndependent/glslang.y"
+ case 346: /* type_specifier_nonarray: ATOMIC_UINT */
+#line 2527 "MachineIndependent/glslang.y"
{
parseContext.vulkanRemoved((yyvsp[0].lex).loc, "atomic counter types");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtAtomicUint;
}
-#line 8797 "MachineIndependent/glslang_tab.cpp"
+#line 8855 "MachineIndependent/glslang_tab.cpp"
break;
- case 344: /* type_specifier_nonarray: SAMPLER1D */
-#line 2507 "MachineIndependent/glslang.y"
+ case 347: /* type_specifier_nonarray: SAMPLER1D */
+#line 2532 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, Esd1D);
}
-#line 8807 "MachineIndependent/glslang_tab.cpp"
+#line 8865 "MachineIndependent/glslang_tab.cpp"
break;
- case 345: /* type_specifier_nonarray: SAMPLER2D */
-#line 2513 "MachineIndependent/glslang.y"
+ case 348: /* type_specifier_nonarray: SAMPLER2D */
+#line 2538 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, Esd2D);
}
-#line 8817 "MachineIndependent/glslang_tab.cpp"
+#line 8875 "MachineIndependent/glslang_tab.cpp"
break;
- case 346: /* type_specifier_nonarray: SAMPLER3D */
-#line 2518 "MachineIndependent/glslang.y"
+ case 349: /* type_specifier_nonarray: SAMPLER3D */
+#line 2543 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, Esd3D);
}
-#line 8827 "MachineIndependent/glslang_tab.cpp"
+#line 8885 "MachineIndependent/glslang_tab.cpp"
break;
- case 347: /* type_specifier_nonarray: SAMPLERCUBE */
-#line 2523 "MachineIndependent/glslang.y"
+ case 350: /* type_specifier_nonarray: SAMPLERCUBE */
+#line 2548 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, EsdCube);
}
-#line 8837 "MachineIndependent/glslang_tab.cpp"
+#line 8895 "MachineIndependent/glslang_tab.cpp"
break;
- case 348: /* type_specifier_nonarray: SAMPLER2DSHADOW */
-#line 2528 "MachineIndependent/glslang.y"
+ case 351: /* type_specifier_nonarray: SAMPLER2DSHADOW */
+#line 2553 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, Esd2D, false, true);
}
-#line 8847 "MachineIndependent/glslang_tab.cpp"
+#line 8905 "MachineIndependent/glslang_tab.cpp"
break;
- case 349: /* type_specifier_nonarray: SAMPLERCUBESHADOW */
-#line 2533 "MachineIndependent/glslang.y"
+ case 352: /* type_specifier_nonarray: SAMPLERCUBESHADOW */
+#line 2558 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, EsdCube, false, true);
}
-#line 8857 "MachineIndependent/glslang_tab.cpp"
+#line 8915 "MachineIndependent/glslang_tab.cpp"
break;
- case 350: /* type_specifier_nonarray: SAMPLER2DARRAY */
-#line 2538 "MachineIndependent/glslang.y"
+ case 353: /* type_specifier_nonarray: SAMPLER2DARRAY */
+#line 2563 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, Esd2D, true);
}
-#line 8867 "MachineIndependent/glslang_tab.cpp"
+#line 8925 "MachineIndependent/glslang_tab.cpp"
break;
- case 351: /* type_specifier_nonarray: SAMPLER2DARRAYSHADOW */
-#line 2543 "MachineIndependent/glslang.y"
+ case 354: /* type_specifier_nonarray: SAMPLER2DARRAYSHADOW */
+#line 2568 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, Esd2D, true, true);
}
-#line 8877 "MachineIndependent/glslang_tab.cpp"
+#line 8935 "MachineIndependent/glslang_tab.cpp"
break;
- case 352: /* type_specifier_nonarray: SAMPLER1DSHADOW */
-#line 2549 "MachineIndependent/glslang.y"
+ case 355: /* type_specifier_nonarray: SAMPLER1DSHADOW */
+#line 2574 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, Esd1D, false, true);
}
-#line 8887 "MachineIndependent/glslang_tab.cpp"
+#line 8945 "MachineIndependent/glslang_tab.cpp"
break;
- case 353: /* type_specifier_nonarray: SAMPLER1DARRAY */
-#line 2554 "MachineIndependent/glslang.y"
+ case 356: /* type_specifier_nonarray: SAMPLER1DARRAY */
+#line 2579 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, Esd1D, true);
}
-#line 8897 "MachineIndependent/glslang_tab.cpp"
+#line 8955 "MachineIndependent/glslang_tab.cpp"
break;
- case 354: /* type_specifier_nonarray: SAMPLER1DARRAYSHADOW */
-#line 2559 "MachineIndependent/glslang.y"
+ case 357: /* type_specifier_nonarray: SAMPLER1DARRAYSHADOW */
+#line 2584 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, Esd1D, true, true);
}
-#line 8907 "MachineIndependent/glslang_tab.cpp"
+#line 8965 "MachineIndependent/glslang_tab.cpp"
break;
- case 355: /* type_specifier_nonarray: SAMPLERCUBEARRAY */
-#line 2564 "MachineIndependent/glslang.y"
+ case 358: /* type_specifier_nonarray: SAMPLERCUBEARRAY */
+#line 2589 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, EsdCube, true);
}
-#line 8917 "MachineIndependent/glslang_tab.cpp"
+#line 8975 "MachineIndependent/glslang_tab.cpp"
break;
- case 356: /* type_specifier_nonarray: SAMPLERCUBEARRAYSHADOW */
-#line 2569 "MachineIndependent/glslang.y"
+ case 359: /* type_specifier_nonarray: SAMPLERCUBEARRAYSHADOW */
+#line 2594 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, EsdCube, true, true);
}
-#line 8927 "MachineIndependent/glslang_tab.cpp"
+#line 8985 "MachineIndependent/glslang_tab.cpp"
break;
- case 357: /* type_specifier_nonarray: F16SAMPLER1D */
-#line 2574 "MachineIndependent/glslang.y"
+ case 360: /* type_specifier_nonarray: F16SAMPLER1D */
+#line 2599 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, Esd1D);
}
-#line 8938 "MachineIndependent/glslang_tab.cpp"
+#line 8996 "MachineIndependent/glslang_tab.cpp"
break;
- case 358: /* type_specifier_nonarray: F16SAMPLER2D */
-#line 2580 "MachineIndependent/glslang.y"
+ case 361: /* type_specifier_nonarray: F16SAMPLER2D */
+#line 2605 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, Esd2D);
}
-#line 8949 "MachineIndependent/glslang_tab.cpp"
+#line 9007 "MachineIndependent/glslang_tab.cpp"
break;
- case 359: /* type_specifier_nonarray: F16SAMPLER3D */
-#line 2586 "MachineIndependent/glslang.y"
+ case 362: /* type_specifier_nonarray: F16SAMPLER3D */
+#line 2611 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, Esd3D);
}
-#line 8960 "MachineIndependent/glslang_tab.cpp"
+#line 9018 "MachineIndependent/glslang_tab.cpp"
break;
- case 360: /* type_specifier_nonarray: F16SAMPLERCUBE */
-#line 2592 "MachineIndependent/glslang.y"
+ case 363: /* type_specifier_nonarray: F16SAMPLERCUBE */
+#line 2617 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, EsdCube);
}
-#line 8971 "MachineIndependent/glslang_tab.cpp"
+#line 9029 "MachineIndependent/glslang_tab.cpp"
break;
- case 361: /* type_specifier_nonarray: F16SAMPLER1DSHADOW */
-#line 2598 "MachineIndependent/glslang.y"
+ case 364: /* type_specifier_nonarray: F16SAMPLER1DSHADOW */
+#line 2623 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, Esd1D, false, true);
}
-#line 8982 "MachineIndependent/glslang_tab.cpp"
+#line 9040 "MachineIndependent/glslang_tab.cpp"
break;
- case 362: /* type_specifier_nonarray: F16SAMPLER2DSHADOW */
-#line 2604 "MachineIndependent/glslang.y"
+ case 365: /* type_specifier_nonarray: F16SAMPLER2DSHADOW */
+#line 2629 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, Esd2D, false, true);
}
-#line 8993 "MachineIndependent/glslang_tab.cpp"
+#line 9051 "MachineIndependent/glslang_tab.cpp"
break;
- case 363: /* type_specifier_nonarray: F16SAMPLERCUBESHADOW */
-#line 2610 "MachineIndependent/glslang.y"
+ case 366: /* type_specifier_nonarray: F16SAMPLERCUBESHADOW */
+#line 2635 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, EsdCube, false, true);
}
-#line 9004 "MachineIndependent/glslang_tab.cpp"
+#line 9062 "MachineIndependent/glslang_tab.cpp"
break;
- case 364: /* type_specifier_nonarray: F16SAMPLER1DARRAY */
-#line 2616 "MachineIndependent/glslang.y"
+ case 367: /* type_specifier_nonarray: F16SAMPLER1DARRAY */
+#line 2641 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, Esd1D, true);
}
-#line 9015 "MachineIndependent/glslang_tab.cpp"
+#line 9073 "MachineIndependent/glslang_tab.cpp"
break;
- case 365: /* type_specifier_nonarray: F16SAMPLER2DARRAY */
-#line 2622 "MachineIndependent/glslang.y"
+ case 368: /* type_specifier_nonarray: F16SAMPLER2DARRAY */
+#line 2647 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, Esd2D, true);
}
-#line 9026 "MachineIndependent/glslang_tab.cpp"
+#line 9084 "MachineIndependent/glslang_tab.cpp"
break;
- case 366: /* type_specifier_nonarray: F16SAMPLER1DARRAYSHADOW */
-#line 2628 "MachineIndependent/glslang.y"
+ case 369: /* type_specifier_nonarray: F16SAMPLER1DARRAYSHADOW */
+#line 2653 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, Esd1D, true, true);
}
-#line 9037 "MachineIndependent/glslang_tab.cpp"
+#line 9095 "MachineIndependent/glslang_tab.cpp"
break;
- case 367: /* type_specifier_nonarray: F16SAMPLER2DARRAYSHADOW */
-#line 2634 "MachineIndependent/glslang.y"
+ case 370: /* type_specifier_nonarray: F16SAMPLER2DARRAYSHADOW */
+#line 2659 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, Esd2D, true, true);
}
-#line 9048 "MachineIndependent/glslang_tab.cpp"
+#line 9106 "MachineIndependent/glslang_tab.cpp"
break;
- case 368: /* type_specifier_nonarray: F16SAMPLERCUBEARRAY */
-#line 2640 "MachineIndependent/glslang.y"
+ case 371: /* type_specifier_nonarray: F16SAMPLERCUBEARRAY */
+#line 2665 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, EsdCube, true);
}
-#line 9059 "MachineIndependent/glslang_tab.cpp"
+#line 9117 "MachineIndependent/glslang_tab.cpp"
break;
- case 369: /* type_specifier_nonarray: F16SAMPLERCUBEARRAYSHADOW */
-#line 2646 "MachineIndependent/glslang.y"
+ case 372: /* type_specifier_nonarray: F16SAMPLERCUBEARRAYSHADOW */
+#line 2671 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, EsdCube, true, true);
}
-#line 9070 "MachineIndependent/glslang_tab.cpp"
+#line 9128 "MachineIndependent/glslang_tab.cpp"
break;
- case 370: /* type_specifier_nonarray: ISAMPLER1D */
-#line 2652 "MachineIndependent/glslang.y"
+ case 373: /* type_specifier_nonarray: ISAMPLER1D */
+#line 2677 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtInt, Esd1D);
}
-#line 9080 "MachineIndependent/glslang_tab.cpp"
+#line 9138 "MachineIndependent/glslang_tab.cpp"
break;
- case 371: /* type_specifier_nonarray: ISAMPLER2D */
-#line 2658 "MachineIndependent/glslang.y"
+ case 374: /* type_specifier_nonarray: ISAMPLER2D */
+#line 2683 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtInt, Esd2D);
}
-#line 9090 "MachineIndependent/glslang_tab.cpp"
+#line 9148 "MachineIndependent/glslang_tab.cpp"
break;
- case 372: /* type_specifier_nonarray: ISAMPLER3D */
-#line 2663 "MachineIndependent/glslang.y"
+ case 375: /* type_specifier_nonarray: ISAMPLER3D */
+#line 2688 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtInt, Esd3D);
}
-#line 9100 "MachineIndependent/glslang_tab.cpp"
+#line 9158 "MachineIndependent/glslang_tab.cpp"
break;
- case 373: /* type_specifier_nonarray: ISAMPLERCUBE */
-#line 2668 "MachineIndependent/glslang.y"
+ case 376: /* type_specifier_nonarray: ISAMPLERCUBE */
+#line 2693 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtInt, EsdCube);
}
-#line 9110 "MachineIndependent/glslang_tab.cpp"
+#line 9168 "MachineIndependent/glslang_tab.cpp"
break;
- case 374: /* type_specifier_nonarray: ISAMPLER2DARRAY */
-#line 2673 "MachineIndependent/glslang.y"
+ case 377: /* type_specifier_nonarray: ISAMPLER2DARRAY */
+#line 2698 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtInt, Esd2D, true);
}
-#line 9120 "MachineIndependent/glslang_tab.cpp"
+#line 9178 "MachineIndependent/glslang_tab.cpp"
break;
- case 375: /* type_specifier_nonarray: USAMPLER2D */
-#line 2678 "MachineIndependent/glslang.y"
+ case 378: /* type_specifier_nonarray: USAMPLER2D */
+#line 2703 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtUint, Esd2D);
}
-#line 9130 "MachineIndependent/glslang_tab.cpp"
+#line 9188 "MachineIndependent/glslang_tab.cpp"
break;
- case 376: /* type_specifier_nonarray: USAMPLER3D */
-#line 2683 "MachineIndependent/glslang.y"
+ case 379: /* type_specifier_nonarray: USAMPLER3D */
+#line 2708 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtUint, Esd3D);
}
-#line 9140 "MachineIndependent/glslang_tab.cpp"
+#line 9198 "MachineIndependent/glslang_tab.cpp"
break;
- case 377: /* type_specifier_nonarray: USAMPLERCUBE */
-#line 2688 "MachineIndependent/glslang.y"
+ case 380: /* type_specifier_nonarray: USAMPLERCUBE */
+#line 2713 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtUint, EsdCube);
}
-#line 9150 "MachineIndependent/glslang_tab.cpp"
+#line 9208 "MachineIndependent/glslang_tab.cpp"
break;
- case 378: /* type_specifier_nonarray: ISAMPLER1DARRAY */
-#line 2694 "MachineIndependent/glslang.y"
+ case 381: /* type_specifier_nonarray: ISAMPLER1DARRAY */
+#line 2719 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtInt, Esd1D, true);
}
-#line 9160 "MachineIndependent/glslang_tab.cpp"
+#line 9218 "MachineIndependent/glslang_tab.cpp"
break;
- case 379: /* type_specifier_nonarray: ISAMPLERCUBEARRAY */
-#line 2699 "MachineIndependent/glslang.y"
+ case 382: /* type_specifier_nonarray: ISAMPLERCUBEARRAY */
+#line 2724 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtInt, EsdCube, true);
}
-#line 9170 "MachineIndependent/glslang_tab.cpp"
+#line 9228 "MachineIndependent/glslang_tab.cpp"
break;
- case 380: /* type_specifier_nonarray: USAMPLER1D */
-#line 2704 "MachineIndependent/glslang.y"
+ case 383: /* type_specifier_nonarray: USAMPLER1D */
+#line 2729 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtUint, Esd1D);
}
-#line 9180 "MachineIndependent/glslang_tab.cpp"
+#line 9238 "MachineIndependent/glslang_tab.cpp"
break;
- case 381: /* type_specifier_nonarray: USAMPLER1DARRAY */
-#line 2709 "MachineIndependent/glslang.y"
+ case 384: /* type_specifier_nonarray: USAMPLER1DARRAY */
+#line 2734 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtUint, Esd1D, true);
}
-#line 9190 "MachineIndependent/glslang_tab.cpp"
+#line 9248 "MachineIndependent/glslang_tab.cpp"
break;
- case 382: /* type_specifier_nonarray: USAMPLERCUBEARRAY */
-#line 2714 "MachineIndependent/glslang.y"
+ case 385: /* type_specifier_nonarray: USAMPLERCUBEARRAY */
+#line 2739 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtUint, EsdCube, true);
}
-#line 9200 "MachineIndependent/glslang_tab.cpp"
+#line 9258 "MachineIndependent/glslang_tab.cpp"
break;
- case 383: /* type_specifier_nonarray: TEXTURECUBEARRAY */
-#line 2719 "MachineIndependent/glslang.y"
+ case 386: /* type_specifier_nonarray: TEXTURECUBEARRAY */
+#line 2744 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat, EsdCube, true);
}
-#line 9210 "MachineIndependent/glslang_tab.cpp"
+#line 9268 "MachineIndependent/glslang_tab.cpp"
break;
- case 384: /* type_specifier_nonarray: ITEXTURECUBEARRAY */
-#line 2724 "MachineIndependent/glslang.y"
+ case 387: /* type_specifier_nonarray: ITEXTURECUBEARRAY */
+#line 2749 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtInt, EsdCube, true);
}
-#line 9220 "MachineIndependent/glslang_tab.cpp"
+#line 9278 "MachineIndependent/glslang_tab.cpp"
break;
- case 385: /* type_specifier_nonarray: UTEXTURECUBEARRAY */
-#line 2729 "MachineIndependent/glslang.y"
+ case 388: /* type_specifier_nonarray: UTEXTURECUBEARRAY */
+#line 2754 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtUint, EsdCube, true);
}
-#line 9230 "MachineIndependent/glslang_tab.cpp"
+#line 9288 "MachineIndependent/glslang_tab.cpp"
break;
- case 386: /* type_specifier_nonarray: USAMPLER2DARRAY */
-#line 2735 "MachineIndependent/glslang.y"
+ case 389: /* type_specifier_nonarray: USAMPLER2DARRAY */
+#line 2760 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtUint, Esd2D, true);
}
-#line 9240 "MachineIndependent/glslang_tab.cpp"
+#line 9298 "MachineIndependent/glslang_tab.cpp"
break;
- case 387: /* type_specifier_nonarray: TEXTURE2D */
-#line 2740 "MachineIndependent/glslang.y"
+ case 390: /* type_specifier_nonarray: TEXTURE2D */
+#line 2765 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat, Esd2D);
}
-#line 9250 "MachineIndependent/glslang_tab.cpp"
+#line 9308 "MachineIndependent/glslang_tab.cpp"
break;
- case 388: /* type_specifier_nonarray: TEXTURE3D */
-#line 2745 "MachineIndependent/glslang.y"
+ case 391: /* type_specifier_nonarray: TEXTURE3D */
+#line 2770 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat, Esd3D);
}
-#line 9260 "MachineIndependent/glslang_tab.cpp"
+#line 9318 "MachineIndependent/glslang_tab.cpp"
break;
- case 389: /* type_specifier_nonarray: TEXTURE2DARRAY */
-#line 2750 "MachineIndependent/glslang.y"
+ case 392: /* type_specifier_nonarray: TEXTURE2DARRAY */
+#line 2775 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat, Esd2D, true);
}
-#line 9270 "MachineIndependent/glslang_tab.cpp"
+#line 9328 "MachineIndependent/glslang_tab.cpp"
break;
- case 390: /* type_specifier_nonarray: TEXTURECUBE */
-#line 2755 "MachineIndependent/glslang.y"
+ case 393: /* type_specifier_nonarray: TEXTURECUBE */
+#line 2780 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat, EsdCube);
}
-#line 9280 "MachineIndependent/glslang_tab.cpp"
+#line 9338 "MachineIndependent/glslang_tab.cpp"
break;
- case 391: /* type_specifier_nonarray: ITEXTURE2D */
-#line 2760 "MachineIndependent/glslang.y"
+ case 394: /* type_specifier_nonarray: ITEXTURE2D */
+#line 2785 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtInt, Esd2D);
}
-#line 9290 "MachineIndependent/glslang_tab.cpp"
+#line 9348 "MachineIndependent/glslang_tab.cpp"
break;
- case 392: /* type_specifier_nonarray: ITEXTURE3D */
-#line 2765 "MachineIndependent/glslang.y"
+ case 395: /* type_specifier_nonarray: ITEXTURE3D */
+#line 2790 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtInt, Esd3D);
}
-#line 9300 "MachineIndependent/glslang_tab.cpp"
+#line 9358 "MachineIndependent/glslang_tab.cpp"
break;
- case 393: /* type_specifier_nonarray: ITEXTURECUBE */
-#line 2770 "MachineIndependent/glslang.y"
+ case 396: /* type_specifier_nonarray: ITEXTURECUBE */
+#line 2795 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtInt, EsdCube);
}
-#line 9310 "MachineIndependent/glslang_tab.cpp"
+#line 9368 "MachineIndependent/glslang_tab.cpp"
break;
- case 394: /* type_specifier_nonarray: ITEXTURE2DARRAY */
-#line 2775 "MachineIndependent/glslang.y"
+ case 397: /* type_specifier_nonarray: ITEXTURE2DARRAY */
+#line 2800 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtInt, Esd2D, true);
}
-#line 9320 "MachineIndependent/glslang_tab.cpp"
+#line 9378 "MachineIndependent/glslang_tab.cpp"
break;
- case 395: /* type_specifier_nonarray: UTEXTURE2D */
-#line 2780 "MachineIndependent/glslang.y"
+ case 398: /* type_specifier_nonarray: UTEXTURE2D */
+#line 2805 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtUint, Esd2D);
}
-#line 9330 "MachineIndependent/glslang_tab.cpp"
+#line 9388 "MachineIndependent/glslang_tab.cpp"
break;
- case 396: /* type_specifier_nonarray: UTEXTURE3D */
-#line 2785 "MachineIndependent/glslang.y"
+ case 399: /* type_specifier_nonarray: UTEXTURE3D */
+#line 2810 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtUint, Esd3D);
}
-#line 9340 "MachineIndependent/glslang_tab.cpp"
+#line 9398 "MachineIndependent/glslang_tab.cpp"
break;
- case 397: /* type_specifier_nonarray: UTEXTURECUBE */
-#line 2790 "MachineIndependent/glslang.y"
+ case 400: /* type_specifier_nonarray: UTEXTURECUBE */
+#line 2815 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtUint, EsdCube);
}
-#line 9350 "MachineIndependent/glslang_tab.cpp"
+#line 9408 "MachineIndependent/glslang_tab.cpp"
break;
- case 398: /* type_specifier_nonarray: UTEXTURE2DARRAY */
-#line 2795 "MachineIndependent/glslang.y"
+ case 401: /* type_specifier_nonarray: UTEXTURE2DARRAY */
+#line 2820 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtUint, Esd2D, true);
}
-#line 9360 "MachineIndependent/glslang_tab.cpp"
+#line 9418 "MachineIndependent/glslang_tab.cpp"
break;
- case 399: /* type_specifier_nonarray: SAMPLER */
-#line 2800 "MachineIndependent/glslang.y"
+ case 402: /* type_specifier_nonarray: SAMPLER */
+#line 2825 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setPureSampler(false);
}
-#line 9370 "MachineIndependent/glslang_tab.cpp"
+#line 9428 "MachineIndependent/glslang_tab.cpp"
break;
- case 400: /* type_specifier_nonarray: SAMPLERSHADOW */
-#line 2805 "MachineIndependent/glslang.y"
+ case 403: /* type_specifier_nonarray: SAMPLERSHADOW */
+#line 2830 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setPureSampler(true);
}
-#line 9380 "MachineIndependent/glslang_tab.cpp"
+#line 9438 "MachineIndependent/glslang_tab.cpp"
break;
- case 401: /* type_specifier_nonarray: SAMPLER2DRECT */
-#line 2811 "MachineIndependent/glslang.y"
+ case 404: /* type_specifier_nonarray: SAMPLER2DRECT */
+#line 2836 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, EsdRect);
}
-#line 9390 "MachineIndependent/glslang_tab.cpp"
+#line 9448 "MachineIndependent/glslang_tab.cpp"
break;
- case 402: /* type_specifier_nonarray: SAMPLER2DRECTSHADOW */
-#line 2816 "MachineIndependent/glslang.y"
+ case 405: /* type_specifier_nonarray: SAMPLER2DRECTSHADOW */
+#line 2841 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, EsdRect, false, true);
}
-#line 9400 "MachineIndependent/glslang_tab.cpp"
+#line 9458 "MachineIndependent/glslang_tab.cpp"
break;
- case 403: /* type_specifier_nonarray: F16SAMPLER2DRECT */
-#line 2821 "MachineIndependent/glslang.y"
+ case 406: /* type_specifier_nonarray: F16SAMPLER2DRECT */
+#line 2846 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, EsdRect);
}
-#line 9411 "MachineIndependent/glslang_tab.cpp"
+#line 9469 "MachineIndependent/glslang_tab.cpp"
break;
- case 404: /* type_specifier_nonarray: F16SAMPLER2DRECTSHADOW */
-#line 2827 "MachineIndependent/glslang.y"
+ case 407: /* type_specifier_nonarray: F16SAMPLER2DRECTSHADOW */
+#line 2852 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, EsdRect, false, true);
}
-#line 9422 "MachineIndependent/glslang_tab.cpp"
+#line 9480 "MachineIndependent/glslang_tab.cpp"
break;
- case 405: /* type_specifier_nonarray: ISAMPLER2DRECT */
-#line 2833 "MachineIndependent/glslang.y"
+ case 408: /* type_specifier_nonarray: ISAMPLER2DRECT */
+#line 2858 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtInt, EsdRect);
}
-#line 9432 "MachineIndependent/glslang_tab.cpp"
+#line 9490 "MachineIndependent/glslang_tab.cpp"
break;
- case 406: /* type_specifier_nonarray: USAMPLER2DRECT */
-#line 2838 "MachineIndependent/glslang.y"
+ case 409: /* type_specifier_nonarray: USAMPLER2DRECT */
+#line 2863 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtUint, EsdRect);
}
-#line 9442 "MachineIndependent/glslang_tab.cpp"
+#line 9500 "MachineIndependent/glslang_tab.cpp"
break;
- case 407: /* type_specifier_nonarray: SAMPLERBUFFER */
-#line 2843 "MachineIndependent/glslang.y"
+ case 410: /* type_specifier_nonarray: SAMPLERBUFFER */
+#line 2868 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, EsdBuffer);
}
-#line 9452 "MachineIndependent/glslang_tab.cpp"
+#line 9510 "MachineIndependent/glslang_tab.cpp"
break;
- case 408: /* type_specifier_nonarray: F16SAMPLERBUFFER */
-#line 2848 "MachineIndependent/glslang.y"
+ case 411: /* type_specifier_nonarray: F16SAMPLERBUFFER */
+#line 2873 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, EsdBuffer);
}
-#line 9463 "MachineIndependent/glslang_tab.cpp"
+#line 9521 "MachineIndependent/glslang_tab.cpp"
break;
- case 409: /* type_specifier_nonarray: ISAMPLERBUFFER */
-#line 2854 "MachineIndependent/glslang.y"
+ case 412: /* type_specifier_nonarray: ISAMPLERBUFFER */
+#line 2879 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtInt, EsdBuffer);
}
-#line 9473 "MachineIndependent/glslang_tab.cpp"
+#line 9531 "MachineIndependent/glslang_tab.cpp"
break;
- case 410: /* type_specifier_nonarray: USAMPLERBUFFER */
-#line 2859 "MachineIndependent/glslang.y"
+ case 413: /* type_specifier_nonarray: USAMPLERBUFFER */
+#line 2884 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtUint, EsdBuffer);
}
-#line 9483 "MachineIndependent/glslang_tab.cpp"
+#line 9541 "MachineIndependent/glslang_tab.cpp"
break;
- case 411: /* type_specifier_nonarray: SAMPLER2DMS */
-#line 2864 "MachineIndependent/glslang.y"
+ case 414: /* type_specifier_nonarray: SAMPLER2DMS */
+#line 2889 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, Esd2D, false, false, true);
}
-#line 9493 "MachineIndependent/glslang_tab.cpp"
+#line 9551 "MachineIndependent/glslang_tab.cpp"
break;
- case 412: /* type_specifier_nonarray: F16SAMPLER2DMS */
-#line 2869 "MachineIndependent/glslang.y"
+ case 415: /* type_specifier_nonarray: F16SAMPLER2DMS */
+#line 2894 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, Esd2D, false, false, true);
}
-#line 9504 "MachineIndependent/glslang_tab.cpp"
+#line 9562 "MachineIndependent/glslang_tab.cpp"
break;
- case 413: /* type_specifier_nonarray: ISAMPLER2DMS */
-#line 2875 "MachineIndependent/glslang.y"
+ case 416: /* type_specifier_nonarray: ISAMPLER2DMS */
+#line 2900 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtInt, Esd2D, false, false, true);
}
-#line 9514 "MachineIndependent/glslang_tab.cpp"
+#line 9572 "MachineIndependent/glslang_tab.cpp"
break;
- case 414: /* type_specifier_nonarray: USAMPLER2DMS */
-#line 2880 "MachineIndependent/glslang.y"
+ case 417: /* type_specifier_nonarray: USAMPLER2DMS */
+#line 2905 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtUint, Esd2D, false, false, true);
}
-#line 9524 "MachineIndependent/glslang_tab.cpp"
+#line 9582 "MachineIndependent/glslang_tab.cpp"
break;
- case 415: /* type_specifier_nonarray: SAMPLER2DMSARRAY */
-#line 2885 "MachineIndependent/glslang.y"
+ case 418: /* type_specifier_nonarray: SAMPLER2DMSARRAY */
+#line 2910 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, Esd2D, true, false, true);
}
-#line 9534 "MachineIndependent/glslang_tab.cpp"
+#line 9592 "MachineIndependent/glslang_tab.cpp"
break;
- case 416: /* type_specifier_nonarray: F16SAMPLER2DMSARRAY */
-#line 2890 "MachineIndependent/glslang.y"
+ case 419: /* type_specifier_nonarray: F16SAMPLER2DMSARRAY */
+#line 2915 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, Esd2D, true, false, true);
}
-#line 9545 "MachineIndependent/glslang_tab.cpp"
+#line 9603 "MachineIndependent/glslang_tab.cpp"
break;
- case 417: /* type_specifier_nonarray: ISAMPLER2DMSARRAY */
-#line 2896 "MachineIndependent/glslang.y"
+ case 420: /* type_specifier_nonarray: ISAMPLER2DMSARRAY */
+#line 2921 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtInt, Esd2D, true, false, true);
}
-#line 9555 "MachineIndependent/glslang_tab.cpp"
+#line 9613 "MachineIndependent/glslang_tab.cpp"
break;
- case 418: /* type_specifier_nonarray: USAMPLER2DMSARRAY */
-#line 2901 "MachineIndependent/glslang.y"
+ case 421: /* type_specifier_nonarray: USAMPLER2DMSARRAY */
+#line 2926 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtUint, Esd2D, true, false, true);
}
-#line 9565 "MachineIndependent/glslang_tab.cpp"
+#line 9623 "MachineIndependent/glslang_tab.cpp"
break;
- case 419: /* type_specifier_nonarray: TEXTURE1D */
-#line 2906 "MachineIndependent/glslang.y"
+ case 422: /* type_specifier_nonarray: TEXTURE1D */
+#line 2931 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat, Esd1D);
}
-#line 9575 "MachineIndependent/glslang_tab.cpp"
+#line 9633 "MachineIndependent/glslang_tab.cpp"
break;
- case 420: /* type_specifier_nonarray: F16TEXTURE1D */
-#line 2911 "MachineIndependent/glslang.y"
+ case 423: /* type_specifier_nonarray: F16TEXTURE1D */
+#line 2936 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat16, Esd1D);
}
-#line 9586 "MachineIndependent/glslang_tab.cpp"
+#line 9644 "MachineIndependent/glslang_tab.cpp"
break;
- case 421: /* type_specifier_nonarray: F16TEXTURE2D */
-#line 2917 "MachineIndependent/glslang.y"
+ case 424: /* type_specifier_nonarray: F16TEXTURE2D */
+#line 2942 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat16, Esd2D);
}
-#line 9597 "MachineIndependent/glslang_tab.cpp"
+#line 9655 "MachineIndependent/glslang_tab.cpp"
break;
- case 422: /* type_specifier_nonarray: F16TEXTURE3D */
-#line 2923 "MachineIndependent/glslang.y"
+ case 425: /* type_specifier_nonarray: F16TEXTURE3D */
+#line 2948 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat16, Esd3D);
}
-#line 9608 "MachineIndependent/glslang_tab.cpp"
+#line 9666 "MachineIndependent/glslang_tab.cpp"
break;
- case 423: /* type_specifier_nonarray: F16TEXTURECUBE */
-#line 2929 "MachineIndependent/glslang.y"
+ case 426: /* type_specifier_nonarray: F16TEXTURECUBE */
+#line 2954 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat16, EsdCube);
}
-#line 9619 "MachineIndependent/glslang_tab.cpp"
+#line 9677 "MachineIndependent/glslang_tab.cpp"
break;
- case 424: /* type_specifier_nonarray: TEXTURE1DARRAY */
-#line 2935 "MachineIndependent/glslang.y"
+ case 427: /* type_specifier_nonarray: TEXTURE1DARRAY */
+#line 2960 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat, Esd1D, true);
}
-#line 9629 "MachineIndependent/glslang_tab.cpp"
+#line 9687 "MachineIndependent/glslang_tab.cpp"
break;
- case 425: /* type_specifier_nonarray: F16TEXTURE1DARRAY */
-#line 2940 "MachineIndependent/glslang.y"
+ case 428: /* type_specifier_nonarray: F16TEXTURE1DARRAY */
+#line 2965 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat16, Esd1D, true);
}
-#line 9640 "MachineIndependent/glslang_tab.cpp"
+#line 9698 "MachineIndependent/glslang_tab.cpp"
break;
- case 426: /* type_specifier_nonarray: F16TEXTURE2DARRAY */
-#line 2946 "MachineIndependent/glslang.y"
+ case 429: /* type_specifier_nonarray: F16TEXTURE2DARRAY */
+#line 2971 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat16, Esd2D, true);
}
-#line 9651 "MachineIndependent/glslang_tab.cpp"
+#line 9709 "MachineIndependent/glslang_tab.cpp"
break;
- case 427: /* type_specifier_nonarray: F16TEXTURECUBEARRAY */
-#line 2952 "MachineIndependent/glslang.y"
+ case 430: /* type_specifier_nonarray: F16TEXTURECUBEARRAY */
+#line 2977 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat16, EsdCube, true);
}
-#line 9662 "MachineIndependent/glslang_tab.cpp"
+#line 9720 "MachineIndependent/glslang_tab.cpp"
break;
- case 428: /* type_specifier_nonarray: ITEXTURE1D */
-#line 2958 "MachineIndependent/glslang.y"
+ case 431: /* type_specifier_nonarray: ITEXTURE1D */
+#line 2983 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtInt, Esd1D);
}
-#line 9672 "MachineIndependent/glslang_tab.cpp"
+#line 9730 "MachineIndependent/glslang_tab.cpp"
break;
- case 429: /* type_specifier_nonarray: ITEXTURE1DARRAY */
-#line 2963 "MachineIndependent/glslang.y"
+ case 432: /* type_specifier_nonarray: ITEXTURE1DARRAY */
+#line 2988 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtInt, Esd1D, true);
}
-#line 9682 "MachineIndependent/glslang_tab.cpp"
+#line 9740 "MachineIndependent/glslang_tab.cpp"
break;
- case 430: /* type_specifier_nonarray: UTEXTURE1D */
-#line 2968 "MachineIndependent/glslang.y"
+ case 433: /* type_specifier_nonarray: UTEXTURE1D */
+#line 2993 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtUint, Esd1D);
}
-#line 9692 "MachineIndependent/glslang_tab.cpp"
+#line 9750 "MachineIndependent/glslang_tab.cpp"
break;
- case 431: /* type_specifier_nonarray: UTEXTURE1DARRAY */
-#line 2973 "MachineIndependent/glslang.y"
+ case 434: /* type_specifier_nonarray: UTEXTURE1DARRAY */
+#line 2998 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtUint, Esd1D, true);
}
-#line 9702 "MachineIndependent/glslang_tab.cpp"
+#line 9760 "MachineIndependent/glslang_tab.cpp"
break;
- case 432: /* type_specifier_nonarray: TEXTURE2DRECT */
-#line 2978 "MachineIndependent/glslang.y"
+ case 435: /* type_specifier_nonarray: TEXTURE2DRECT */
+#line 3003 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat, EsdRect);
}
-#line 9712 "MachineIndependent/glslang_tab.cpp"
+#line 9770 "MachineIndependent/glslang_tab.cpp"
break;
- case 433: /* type_specifier_nonarray: F16TEXTURE2DRECT */
-#line 2983 "MachineIndependent/glslang.y"
+ case 436: /* type_specifier_nonarray: F16TEXTURE2DRECT */
+#line 3008 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat16, EsdRect);
}
-#line 9723 "MachineIndependent/glslang_tab.cpp"
+#line 9781 "MachineIndependent/glslang_tab.cpp"
break;
- case 434: /* type_specifier_nonarray: ITEXTURE2DRECT */
-#line 2989 "MachineIndependent/glslang.y"
+ case 437: /* type_specifier_nonarray: ITEXTURE2DRECT */
+#line 3014 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtInt, EsdRect);
}
-#line 9733 "MachineIndependent/glslang_tab.cpp"
+#line 9791 "MachineIndependent/glslang_tab.cpp"
break;
- case 435: /* type_specifier_nonarray: UTEXTURE2DRECT */
-#line 2994 "MachineIndependent/glslang.y"
+ case 438: /* type_specifier_nonarray: UTEXTURE2DRECT */
+#line 3019 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtUint, EsdRect);
}
-#line 9743 "MachineIndependent/glslang_tab.cpp"
+#line 9801 "MachineIndependent/glslang_tab.cpp"
break;
- case 436: /* type_specifier_nonarray: TEXTUREBUFFER */
-#line 2999 "MachineIndependent/glslang.y"
+ case 439: /* type_specifier_nonarray: TEXTUREBUFFER */
+#line 3024 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat, EsdBuffer);
}
-#line 9753 "MachineIndependent/glslang_tab.cpp"
+#line 9811 "MachineIndependent/glslang_tab.cpp"
break;
- case 437: /* type_specifier_nonarray: F16TEXTUREBUFFER */
-#line 3004 "MachineIndependent/glslang.y"
+ case 440: /* type_specifier_nonarray: F16TEXTUREBUFFER */
+#line 3029 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat16, EsdBuffer);
}
-#line 9764 "MachineIndependent/glslang_tab.cpp"
+#line 9822 "MachineIndependent/glslang_tab.cpp"
break;
- case 438: /* type_specifier_nonarray: ITEXTUREBUFFER */
-#line 3010 "MachineIndependent/glslang.y"
+ case 441: /* type_specifier_nonarray: ITEXTUREBUFFER */
+#line 3035 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtInt, EsdBuffer);
}
-#line 9774 "MachineIndependent/glslang_tab.cpp"
+#line 9832 "MachineIndependent/glslang_tab.cpp"
break;
- case 439: /* type_specifier_nonarray: UTEXTUREBUFFER */
-#line 3015 "MachineIndependent/glslang.y"
+ case 442: /* type_specifier_nonarray: UTEXTUREBUFFER */
+#line 3040 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtUint, EsdBuffer);
}
-#line 9784 "MachineIndependent/glslang_tab.cpp"
+#line 9842 "MachineIndependent/glslang_tab.cpp"
break;
- case 440: /* type_specifier_nonarray: TEXTURE2DMS */
-#line 3020 "MachineIndependent/glslang.y"
+ case 443: /* type_specifier_nonarray: TEXTURE2DMS */
+#line 3045 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat, Esd2D, false, false, true);
}
-#line 9794 "MachineIndependent/glslang_tab.cpp"
+#line 9852 "MachineIndependent/glslang_tab.cpp"
break;
- case 441: /* type_specifier_nonarray: F16TEXTURE2DMS */
-#line 3025 "MachineIndependent/glslang.y"
+ case 444: /* type_specifier_nonarray: F16TEXTURE2DMS */
+#line 3050 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat16, Esd2D, false, false, true);
}
-#line 9805 "MachineIndependent/glslang_tab.cpp"
+#line 9863 "MachineIndependent/glslang_tab.cpp"
break;
- case 442: /* type_specifier_nonarray: ITEXTURE2DMS */
-#line 3031 "MachineIndependent/glslang.y"
+ case 445: /* type_specifier_nonarray: ITEXTURE2DMS */
+#line 3056 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtInt, Esd2D, false, false, true);
}
-#line 9815 "MachineIndependent/glslang_tab.cpp"
+#line 9873 "MachineIndependent/glslang_tab.cpp"
break;
- case 443: /* type_specifier_nonarray: UTEXTURE2DMS */
-#line 3036 "MachineIndependent/glslang.y"
+ case 446: /* type_specifier_nonarray: UTEXTURE2DMS */
+#line 3061 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtUint, Esd2D, false, false, true);
}
-#line 9825 "MachineIndependent/glslang_tab.cpp"
+#line 9883 "MachineIndependent/glslang_tab.cpp"
break;
- case 444: /* type_specifier_nonarray: TEXTURE2DMSARRAY */
-#line 3041 "MachineIndependent/glslang.y"
+ case 447: /* type_specifier_nonarray: TEXTURE2DMSARRAY */
+#line 3066 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat, Esd2D, true, false, true);
}
-#line 9835 "MachineIndependent/glslang_tab.cpp"
+#line 9893 "MachineIndependent/glslang_tab.cpp"
break;
- case 445: /* type_specifier_nonarray: F16TEXTURE2DMSARRAY */
-#line 3046 "MachineIndependent/glslang.y"
+ case 448: /* type_specifier_nonarray: F16TEXTURE2DMSARRAY */
+#line 3071 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat16, Esd2D, true, false, true);
}
-#line 9846 "MachineIndependent/glslang_tab.cpp"
+#line 9904 "MachineIndependent/glslang_tab.cpp"
break;
- case 446: /* type_specifier_nonarray: ITEXTURE2DMSARRAY */
-#line 3052 "MachineIndependent/glslang.y"
+ case 449: /* type_specifier_nonarray: ITEXTURE2DMSARRAY */
+#line 3077 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtInt, Esd2D, true, false, true);
}
-#line 9856 "MachineIndependent/glslang_tab.cpp"
+#line 9914 "MachineIndependent/glslang_tab.cpp"
break;
- case 447: /* type_specifier_nonarray: UTEXTURE2DMSARRAY */
-#line 3057 "MachineIndependent/glslang.y"
+ case 450: /* type_specifier_nonarray: UTEXTURE2DMSARRAY */
+#line 3082 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtUint, Esd2D, true, false, true);
}
-#line 9866 "MachineIndependent/glslang_tab.cpp"
+#line 9924 "MachineIndependent/glslang_tab.cpp"
break;
- case 448: /* type_specifier_nonarray: IMAGE1D */
-#line 3062 "MachineIndependent/glslang.y"
+ case 451: /* type_specifier_nonarray: IMAGE1D */
+#line 3087 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, Esd1D);
}
-#line 9876 "MachineIndependent/glslang_tab.cpp"
+#line 9934 "MachineIndependent/glslang_tab.cpp"
break;
- case 449: /* type_specifier_nonarray: F16IMAGE1D */
-#line 3067 "MachineIndependent/glslang.y"
+ case 452: /* type_specifier_nonarray: F16IMAGE1D */
+#line 3092 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, Esd1D);
}
-#line 9887 "MachineIndependent/glslang_tab.cpp"
+#line 9945 "MachineIndependent/glslang_tab.cpp"
break;
- case 450: /* type_specifier_nonarray: IIMAGE1D */
-#line 3073 "MachineIndependent/glslang.y"
+ case 453: /* type_specifier_nonarray: IIMAGE1D */
+#line 3098 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, Esd1D);
}
-#line 9897 "MachineIndependent/glslang_tab.cpp"
+#line 9955 "MachineIndependent/glslang_tab.cpp"
break;
- case 451: /* type_specifier_nonarray: UIMAGE1D */
-#line 3078 "MachineIndependent/glslang.y"
+ case 454: /* type_specifier_nonarray: UIMAGE1D */
+#line 3103 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, Esd1D);
}
-#line 9907 "MachineIndependent/glslang_tab.cpp"
+#line 9965 "MachineIndependent/glslang_tab.cpp"
break;
- case 452: /* type_specifier_nonarray: IMAGE2D */
-#line 3083 "MachineIndependent/glslang.y"
+ case 455: /* type_specifier_nonarray: IMAGE2D */
+#line 3108 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, Esd2D);
}
-#line 9917 "MachineIndependent/glslang_tab.cpp"
+#line 9975 "MachineIndependent/glslang_tab.cpp"
break;
- case 453: /* type_specifier_nonarray: F16IMAGE2D */
-#line 3088 "MachineIndependent/glslang.y"
+ case 456: /* type_specifier_nonarray: F16IMAGE2D */
+#line 3113 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, Esd2D);
}
-#line 9928 "MachineIndependent/glslang_tab.cpp"
+#line 9986 "MachineIndependent/glslang_tab.cpp"
break;
- case 454: /* type_specifier_nonarray: IIMAGE2D */
-#line 3094 "MachineIndependent/glslang.y"
+ case 457: /* type_specifier_nonarray: IIMAGE2D */
+#line 3119 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, Esd2D);
}
-#line 9938 "MachineIndependent/glslang_tab.cpp"
+#line 9996 "MachineIndependent/glslang_tab.cpp"
break;
- case 455: /* type_specifier_nonarray: UIMAGE2D */
-#line 3099 "MachineIndependent/glslang.y"
+ case 458: /* type_specifier_nonarray: UIMAGE2D */
+#line 3124 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, Esd2D);
}
-#line 9948 "MachineIndependent/glslang_tab.cpp"
+#line 10006 "MachineIndependent/glslang_tab.cpp"
break;
- case 456: /* type_specifier_nonarray: IMAGE3D */
-#line 3104 "MachineIndependent/glslang.y"
+ case 459: /* type_specifier_nonarray: IMAGE3D */
+#line 3129 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, Esd3D);
}
-#line 9958 "MachineIndependent/glslang_tab.cpp"
+#line 10016 "MachineIndependent/glslang_tab.cpp"
break;
- case 457: /* type_specifier_nonarray: F16IMAGE3D */
-#line 3109 "MachineIndependent/glslang.y"
+ case 460: /* type_specifier_nonarray: F16IMAGE3D */
+#line 3134 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, Esd3D);
}
-#line 9969 "MachineIndependent/glslang_tab.cpp"
+#line 10027 "MachineIndependent/glslang_tab.cpp"
break;
- case 458: /* type_specifier_nonarray: IIMAGE3D */
-#line 3115 "MachineIndependent/glslang.y"
+ case 461: /* type_specifier_nonarray: IIMAGE3D */
+#line 3140 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, Esd3D);
}
-#line 9979 "MachineIndependent/glslang_tab.cpp"
+#line 10037 "MachineIndependent/glslang_tab.cpp"
break;
- case 459: /* type_specifier_nonarray: UIMAGE3D */
-#line 3120 "MachineIndependent/glslang.y"
+ case 462: /* type_specifier_nonarray: UIMAGE3D */
+#line 3145 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, Esd3D);
}
-#line 9989 "MachineIndependent/glslang_tab.cpp"
+#line 10047 "MachineIndependent/glslang_tab.cpp"
break;
- case 460: /* type_specifier_nonarray: IMAGE2DRECT */
-#line 3125 "MachineIndependent/glslang.y"
+ case 463: /* type_specifier_nonarray: IMAGE2DRECT */
+#line 3150 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, EsdRect);
}
-#line 9999 "MachineIndependent/glslang_tab.cpp"
+#line 10057 "MachineIndependent/glslang_tab.cpp"
break;
- case 461: /* type_specifier_nonarray: F16IMAGE2DRECT */
-#line 3130 "MachineIndependent/glslang.y"
+ case 464: /* type_specifier_nonarray: F16IMAGE2DRECT */
+#line 3155 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, EsdRect);
}
-#line 10010 "MachineIndependent/glslang_tab.cpp"
+#line 10068 "MachineIndependent/glslang_tab.cpp"
break;
- case 462: /* type_specifier_nonarray: IIMAGE2DRECT */
-#line 3136 "MachineIndependent/glslang.y"
+ case 465: /* type_specifier_nonarray: IIMAGE2DRECT */
+#line 3161 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, EsdRect);
}
-#line 10020 "MachineIndependent/glslang_tab.cpp"
+#line 10078 "MachineIndependent/glslang_tab.cpp"
break;
- case 463: /* type_specifier_nonarray: UIMAGE2DRECT */
-#line 3141 "MachineIndependent/glslang.y"
+ case 466: /* type_specifier_nonarray: UIMAGE2DRECT */
+#line 3166 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, EsdRect);
}
-#line 10030 "MachineIndependent/glslang_tab.cpp"
+#line 10088 "MachineIndependent/glslang_tab.cpp"
break;
- case 464: /* type_specifier_nonarray: IMAGECUBE */
-#line 3146 "MachineIndependent/glslang.y"
+ case 467: /* type_specifier_nonarray: IMAGECUBE */
+#line 3171 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, EsdCube);
}
-#line 10040 "MachineIndependent/glslang_tab.cpp"
+#line 10098 "MachineIndependent/glslang_tab.cpp"
break;
- case 465: /* type_specifier_nonarray: F16IMAGECUBE */
-#line 3151 "MachineIndependent/glslang.y"
+ case 468: /* type_specifier_nonarray: F16IMAGECUBE */
+#line 3176 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, EsdCube);
}
-#line 10051 "MachineIndependent/glslang_tab.cpp"
+#line 10109 "MachineIndependent/glslang_tab.cpp"
break;
- case 466: /* type_specifier_nonarray: IIMAGECUBE */
-#line 3157 "MachineIndependent/glslang.y"
+ case 469: /* type_specifier_nonarray: IIMAGECUBE */
+#line 3182 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, EsdCube);
}
-#line 10061 "MachineIndependent/glslang_tab.cpp"
+#line 10119 "MachineIndependent/glslang_tab.cpp"
break;
- case 467: /* type_specifier_nonarray: UIMAGECUBE */
-#line 3162 "MachineIndependent/glslang.y"
+ case 470: /* type_specifier_nonarray: UIMAGECUBE */
+#line 3187 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, EsdCube);
}
-#line 10071 "MachineIndependent/glslang_tab.cpp"
+#line 10129 "MachineIndependent/glslang_tab.cpp"
break;
- case 468: /* type_specifier_nonarray: IMAGEBUFFER */
-#line 3167 "MachineIndependent/glslang.y"
+ case 471: /* type_specifier_nonarray: IMAGEBUFFER */
+#line 3192 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, EsdBuffer);
}
-#line 10081 "MachineIndependent/glslang_tab.cpp"
+#line 10139 "MachineIndependent/glslang_tab.cpp"
break;
- case 469: /* type_specifier_nonarray: F16IMAGEBUFFER */
-#line 3172 "MachineIndependent/glslang.y"
+ case 472: /* type_specifier_nonarray: F16IMAGEBUFFER */
+#line 3197 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, EsdBuffer);
}
-#line 10092 "MachineIndependent/glslang_tab.cpp"
+#line 10150 "MachineIndependent/glslang_tab.cpp"
break;
- case 470: /* type_specifier_nonarray: IIMAGEBUFFER */
-#line 3178 "MachineIndependent/glslang.y"
+ case 473: /* type_specifier_nonarray: IIMAGEBUFFER */
+#line 3203 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, EsdBuffer);
}
-#line 10102 "MachineIndependent/glslang_tab.cpp"
+#line 10160 "MachineIndependent/glslang_tab.cpp"
break;
- case 471: /* type_specifier_nonarray: UIMAGEBUFFER */
-#line 3183 "MachineIndependent/glslang.y"
+ case 474: /* type_specifier_nonarray: UIMAGEBUFFER */
+#line 3208 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, EsdBuffer);
}
-#line 10112 "MachineIndependent/glslang_tab.cpp"
+#line 10170 "MachineIndependent/glslang_tab.cpp"
break;
- case 472: /* type_specifier_nonarray: IMAGE1DARRAY */
-#line 3188 "MachineIndependent/glslang.y"
+ case 475: /* type_specifier_nonarray: IMAGE1DARRAY */
+#line 3213 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, Esd1D, true);
}
-#line 10122 "MachineIndependent/glslang_tab.cpp"
+#line 10180 "MachineIndependent/glslang_tab.cpp"
break;
- case 473: /* type_specifier_nonarray: F16IMAGE1DARRAY */
-#line 3193 "MachineIndependent/glslang.y"
+ case 476: /* type_specifier_nonarray: F16IMAGE1DARRAY */
+#line 3218 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, Esd1D, true);
}
-#line 10133 "MachineIndependent/glslang_tab.cpp"
+#line 10191 "MachineIndependent/glslang_tab.cpp"
break;
- case 474: /* type_specifier_nonarray: IIMAGE1DARRAY */
-#line 3199 "MachineIndependent/glslang.y"
+ case 477: /* type_specifier_nonarray: IIMAGE1DARRAY */
+#line 3224 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, Esd1D, true);
}
-#line 10143 "MachineIndependent/glslang_tab.cpp"
+#line 10201 "MachineIndependent/glslang_tab.cpp"
break;
- case 475: /* type_specifier_nonarray: UIMAGE1DARRAY */
-#line 3204 "MachineIndependent/glslang.y"
+ case 478: /* type_specifier_nonarray: UIMAGE1DARRAY */
+#line 3229 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, Esd1D, true);
}
-#line 10153 "MachineIndependent/glslang_tab.cpp"
+#line 10211 "MachineIndependent/glslang_tab.cpp"
break;
- case 476: /* type_specifier_nonarray: IMAGE2DARRAY */
-#line 3209 "MachineIndependent/glslang.y"
+ case 479: /* type_specifier_nonarray: IMAGE2DARRAY */
+#line 3234 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, Esd2D, true);
}
-#line 10163 "MachineIndependent/glslang_tab.cpp"
+#line 10221 "MachineIndependent/glslang_tab.cpp"
break;
- case 477: /* type_specifier_nonarray: F16IMAGE2DARRAY */
-#line 3214 "MachineIndependent/glslang.y"
+ case 480: /* type_specifier_nonarray: F16IMAGE2DARRAY */
+#line 3239 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, Esd2D, true);
}
-#line 10174 "MachineIndependent/glslang_tab.cpp"
+#line 10232 "MachineIndependent/glslang_tab.cpp"
break;
- case 478: /* type_specifier_nonarray: IIMAGE2DARRAY */
-#line 3220 "MachineIndependent/glslang.y"
+ case 481: /* type_specifier_nonarray: IIMAGE2DARRAY */
+#line 3245 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, Esd2D, true);
}
-#line 10184 "MachineIndependent/glslang_tab.cpp"
+#line 10242 "MachineIndependent/glslang_tab.cpp"
break;
- case 479: /* type_specifier_nonarray: UIMAGE2DARRAY */
-#line 3225 "MachineIndependent/glslang.y"
+ case 482: /* type_specifier_nonarray: UIMAGE2DARRAY */
+#line 3250 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, Esd2D, true);
}
-#line 10194 "MachineIndependent/glslang_tab.cpp"
+#line 10252 "MachineIndependent/glslang_tab.cpp"
break;
- case 480: /* type_specifier_nonarray: IMAGECUBEARRAY */
-#line 3230 "MachineIndependent/glslang.y"
+ case 483: /* type_specifier_nonarray: IMAGECUBEARRAY */
+#line 3255 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, EsdCube, true);
}
-#line 10204 "MachineIndependent/glslang_tab.cpp"
+#line 10262 "MachineIndependent/glslang_tab.cpp"
break;
- case 481: /* type_specifier_nonarray: F16IMAGECUBEARRAY */
-#line 3235 "MachineIndependent/glslang.y"
+ case 484: /* type_specifier_nonarray: F16IMAGECUBEARRAY */
+#line 3260 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, EsdCube, true);
}
-#line 10215 "MachineIndependent/glslang_tab.cpp"
+#line 10273 "MachineIndependent/glslang_tab.cpp"
break;
- case 482: /* type_specifier_nonarray: IIMAGECUBEARRAY */
-#line 3241 "MachineIndependent/glslang.y"
+ case 485: /* type_specifier_nonarray: IIMAGECUBEARRAY */
+#line 3266 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, EsdCube, true);
}
-#line 10225 "MachineIndependent/glslang_tab.cpp"
+#line 10283 "MachineIndependent/glslang_tab.cpp"
break;
- case 483: /* type_specifier_nonarray: UIMAGECUBEARRAY */
-#line 3246 "MachineIndependent/glslang.y"
+ case 486: /* type_specifier_nonarray: UIMAGECUBEARRAY */
+#line 3271 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, EsdCube, true);
}
-#line 10235 "MachineIndependent/glslang_tab.cpp"
+#line 10293 "MachineIndependent/glslang_tab.cpp"
break;
- case 484: /* type_specifier_nonarray: IMAGE2DMS */
-#line 3251 "MachineIndependent/glslang.y"
+ case 487: /* type_specifier_nonarray: IMAGE2DMS */
+#line 3276 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, Esd2D, false, false, true);
}
-#line 10245 "MachineIndependent/glslang_tab.cpp"
+#line 10303 "MachineIndependent/glslang_tab.cpp"
break;
- case 485: /* type_specifier_nonarray: F16IMAGE2DMS */
-#line 3256 "MachineIndependent/glslang.y"
+ case 488: /* type_specifier_nonarray: F16IMAGE2DMS */
+#line 3281 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, Esd2D, false, false, true);
}
-#line 10256 "MachineIndependent/glslang_tab.cpp"
+#line 10314 "MachineIndependent/glslang_tab.cpp"
break;
- case 486: /* type_specifier_nonarray: IIMAGE2DMS */
-#line 3262 "MachineIndependent/glslang.y"
+ case 489: /* type_specifier_nonarray: IIMAGE2DMS */
+#line 3287 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, Esd2D, false, false, true);
}
-#line 10266 "MachineIndependent/glslang_tab.cpp"
+#line 10324 "MachineIndependent/glslang_tab.cpp"
break;
- case 487: /* type_specifier_nonarray: UIMAGE2DMS */
-#line 3267 "MachineIndependent/glslang.y"
+ case 490: /* type_specifier_nonarray: UIMAGE2DMS */
+#line 3292 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, Esd2D, false, false, true);
}
-#line 10276 "MachineIndependent/glslang_tab.cpp"
+#line 10334 "MachineIndependent/glslang_tab.cpp"
break;
- case 488: /* type_specifier_nonarray: IMAGE2DMSARRAY */
-#line 3272 "MachineIndependent/glslang.y"
+ case 491: /* type_specifier_nonarray: IMAGE2DMSARRAY */
+#line 3297 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, Esd2D, true, false, true);
}
-#line 10286 "MachineIndependent/glslang_tab.cpp"
+#line 10344 "MachineIndependent/glslang_tab.cpp"
break;
- case 489: /* type_specifier_nonarray: F16IMAGE2DMSARRAY */
-#line 3277 "MachineIndependent/glslang.y"
+ case 492: /* type_specifier_nonarray: F16IMAGE2DMSARRAY */
+#line 3302 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, Esd2D, true, false, true);
}
-#line 10297 "MachineIndependent/glslang_tab.cpp"
+#line 10355 "MachineIndependent/glslang_tab.cpp"
break;
- case 490: /* type_specifier_nonarray: IIMAGE2DMSARRAY */
-#line 3283 "MachineIndependent/glslang.y"
+ case 493: /* type_specifier_nonarray: IIMAGE2DMSARRAY */
+#line 3308 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, Esd2D, true, false, true);
}
-#line 10307 "MachineIndependent/glslang_tab.cpp"
+#line 10365 "MachineIndependent/glslang_tab.cpp"
break;
- case 491: /* type_specifier_nonarray: UIMAGE2DMSARRAY */
-#line 3288 "MachineIndependent/glslang.y"
+ case 494: /* type_specifier_nonarray: UIMAGE2DMSARRAY */
+#line 3313 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, Esd2D, true, false, true);
}
-#line 10317 "MachineIndependent/glslang_tab.cpp"
+#line 10375 "MachineIndependent/glslang_tab.cpp"
break;
- case 492: /* type_specifier_nonarray: I64IMAGE1D */
-#line 3293 "MachineIndependent/glslang.y"
+ case 495: /* type_specifier_nonarray: I64IMAGE1D */
+#line 3318 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt64, Esd1D);
}
-#line 10327 "MachineIndependent/glslang_tab.cpp"
+#line 10385 "MachineIndependent/glslang_tab.cpp"
break;
- case 493: /* type_specifier_nonarray: U64IMAGE1D */
-#line 3298 "MachineIndependent/glslang.y"
+ case 496: /* type_specifier_nonarray: U64IMAGE1D */
+#line 3323 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint64, Esd1D);
}
-#line 10337 "MachineIndependent/glslang_tab.cpp"
+#line 10395 "MachineIndependent/glslang_tab.cpp"
break;
- case 494: /* type_specifier_nonarray: I64IMAGE2D */
-#line 3303 "MachineIndependent/glslang.y"
+ case 497: /* type_specifier_nonarray: I64IMAGE2D */
+#line 3328 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt64, Esd2D);
}
-#line 10347 "MachineIndependent/glslang_tab.cpp"
+#line 10405 "MachineIndependent/glslang_tab.cpp"
break;
- case 495: /* type_specifier_nonarray: U64IMAGE2D */
-#line 3308 "MachineIndependent/glslang.y"
+ case 498: /* type_specifier_nonarray: U64IMAGE2D */
+#line 3333 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint64, Esd2D);
}
-#line 10357 "MachineIndependent/glslang_tab.cpp"
+#line 10415 "MachineIndependent/glslang_tab.cpp"
break;
- case 496: /* type_specifier_nonarray: I64IMAGE3D */
-#line 3313 "MachineIndependent/glslang.y"
+ case 499: /* type_specifier_nonarray: I64IMAGE3D */
+#line 3338 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt64, Esd3D);
}
-#line 10367 "MachineIndependent/glslang_tab.cpp"
+#line 10425 "MachineIndependent/glslang_tab.cpp"
break;
- case 497: /* type_specifier_nonarray: U64IMAGE3D */
-#line 3318 "MachineIndependent/glslang.y"
+ case 500: /* type_specifier_nonarray: U64IMAGE3D */
+#line 3343 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint64, Esd3D);
}
-#line 10377 "MachineIndependent/glslang_tab.cpp"
+#line 10435 "MachineIndependent/glslang_tab.cpp"
break;
- case 498: /* type_specifier_nonarray: I64IMAGE2DRECT */
-#line 3323 "MachineIndependent/glslang.y"
+ case 501: /* type_specifier_nonarray: I64IMAGE2DRECT */
+#line 3348 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt64, EsdRect);
}
-#line 10387 "MachineIndependent/glslang_tab.cpp"
+#line 10445 "MachineIndependent/glslang_tab.cpp"
break;
- case 499: /* type_specifier_nonarray: U64IMAGE2DRECT */
-#line 3328 "MachineIndependent/glslang.y"
+ case 502: /* type_specifier_nonarray: U64IMAGE2DRECT */
+#line 3353 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint64, EsdRect);
}
-#line 10397 "MachineIndependent/glslang_tab.cpp"
+#line 10455 "MachineIndependent/glslang_tab.cpp"
break;
- case 500: /* type_specifier_nonarray: I64IMAGECUBE */
-#line 3333 "MachineIndependent/glslang.y"
+ case 503: /* type_specifier_nonarray: I64IMAGECUBE */
+#line 3358 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt64, EsdCube);
}
-#line 10407 "MachineIndependent/glslang_tab.cpp"
+#line 10465 "MachineIndependent/glslang_tab.cpp"
break;
- case 501: /* type_specifier_nonarray: U64IMAGECUBE */
-#line 3338 "MachineIndependent/glslang.y"
+ case 504: /* type_specifier_nonarray: U64IMAGECUBE */
+#line 3363 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint64, EsdCube);
}
-#line 10417 "MachineIndependent/glslang_tab.cpp"
+#line 10475 "MachineIndependent/glslang_tab.cpp"
break;
- case 502: /* type_specifier_nonarray: I64IMAGEBUFFER */
-#line 3343 "MachineIndependent/glslang.y"
+ case 505: /* type_specifier_nonarray: I64IMAGEBUFFER */
+#line 3368 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt64, EsdBuffer);
}
-#line 10427 "MachineIndependent/glslang_tab.cpp"
+#line 10485 "MachineIndependent/glslang_tab.cpp"
break;
- case 503: /* type_specifier_nonarray: U64IMAGEBUFFER */
-#line 3348 "MachineIndependent/glslang.y"
+ case 506: /* type_specifier_nonarray: U64IMAGEBUFFER */
+#line 3373 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint64, EsdBuffer);
}
-#line 10437 "MachineIndependent/glslang_tab.cpp"
+#line 10495 "MachineIndependent/glslang_tab.cpp"
break;
- case 504: /* type_specifier_nonarray: I64IMAGE1DARRAY */
-#line 3353 "MachineIndependent/glslang.y"
+ case 507: /* type_specifier_nonarray: I64IMAGE1DARRAY */
+#line 3378 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt64, Esd1D, true);
}
-#line 10447 "MachineIndependent/glslang_tab.cpp"
+#line 10505 "MachineIndependent/glslang_tab.cpp"
break;
- case 505: /* type_specifier_nonarray: U64IMAGE1DARRAY */
-#line 3358 "MachineIndependent/glslang.y"
+ case 508: /* type_specifier_nonarray: U64IMAGE1DARRAY */
+#line 3383 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint64, Esd1D, true);
}
-#line 10457 "MachineIndependent/glslang_tab.cpp"
+#line 10515 "MachineIndependent/glslang_tab.cpp"
break;
- case 506: /* type_specifier_nonarray: I64IMAGE2DARRAY */
-#line 3363 "MachineIndependent/glslang.y"
+ case 509: /* type_specifier_nonarray: I64IMAGE2DARRAY */
+#line 3388 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt64, Esd2D, true);
}
-#line 10467 "MachineIndependent/glslang_tab.cpp"
+#line 10525 "MachineIndependent/glslang_tab.cpp"
break;
- case 507: /* type_specifier_nonarray: U64IMAGE2DARRAY */
-#line 3368 "MachineIndependent/glslang.y"
+ case 510: /* type_specifier_nonarray: U64IMAGE2DARRAY */
+#line 3393 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint64, Esd2D, true);
}
-#line 10477 "MachineIndependent/glslang_tab.cpp"
+#line 10535 "MachineIndependent/glslang_tab.cpp"
break;
- case 508: /* type_specifier_nonarray: I64IMAGECUBEARRAY */
-#line 3373 "MachineIndependent/glslang.y"
+ case 511: /* type_specifier_nonarray: I64IMAGECUBEARRAY */
+#line 3398 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt64, EsdCube, true);
}
-#line 10487 "MachineIndependent/glslang_tab.cpp"
+#line 10545 "MachineIndependent/glslang_tab.cpp"
break;
- case 509: /* type_specifier_nonarray: U64IMAGECUBEARRAY */
-#line 3378 "MachineIndependent/glslang.y"
+ case 512: /* type_specifier_nonarray: U64IMAGECUBEARRAY */
+#line 3403 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint64, EsdCube, true);
}
-#line 10497 "MachineIndependent/glslang_tab.cpp"
+#line 10555 "MachineIndependent/glslang_tab.cpp"
break;
- case 510: /* type_specifier_nonarray: I64IMAGE2DMS */
-#line 3383 "MachineIndependent/glslang.y"
+ case 513: /* type_specifier_nonarray: I64IMAGE2DMS */
+#line 3408 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt64, Esd2D, false, false, true);
}
-#line 10507 "MachineIndependent/glslang_tab.cpp"
+#line 10565 "MachineIndependent/glslang_tab.cpp"
break;
- case 511: /* type_specifier_nonarray: U64IMAGE2DMS */
-#line 3388 "MachineIndependent/glslang.y"
+ case 514: /* type_specifier_nonarray: U64IMAGE2DMS */
+#line 3413 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint64, Esd2D, false, false, true);
}
-#line 10517 "MachineIndependent/glslang_tab.cpp"
+#line 10575 "MachineIndependent/glslang_tab.cpp"
break;
- case 512: /* type_specifier_nonarray: I64IMAGE2DMSARRAY */
-#line 3393 "MachineIndependent/glslang.y"
+ case 515: /* type_specifier_nonarray: I64IMAGE2DMSARRAY */
+#line 3418 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt64, Esd2D, true, false, true);
}
-#line 10527 "MachineIndependent/glslang_tab.cpp"
+#line 10585 "MachineIndependent/glslang_tab.cpp"
break;
- case 513: /* type_specifier_nonarray: U64IMAGE2DMSARRAY */
-#line 3398 "MachineIndependent/glslang.y"
+ case 516: /* type_specifier_nonarray: U64IMAGE2DMSARRAY */
+#line 3423 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint64, Esd2D, true, false, true);
}
-#line 10537 "MachineIndependent/glslang_tab.cpp"
+#line 10595 "MachineIndependent/glslang_tab.cpp"
break;
- case 514: /* type_specifier_nonarray: SAMPLEREXTERNALOES */
-#line 3403 "MachineIndependent/glslang.y"
+ case 517: /* type_specifier_nonarray: SAMPLEREXTERNALOES */
+#line 3428 "MachineIndependent/glslang.y"
{ // GL_OES_EGL_image_external
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, Esd2D);
(yyval.interm.type).sampler.external = true;
}
-#line 10548 "MachineIndependent/glslang_tab.cpp"
+#line 10606 "MachineIndependent/glslang_tab.cpp"
break;
- case 515: /* type_specifier_nonarray: SAMPLEREXTERNAL2DY2YEXT */
-#line 3409 "MachineIndependent/glslang.y"
+ case 518: /* type_specifier_nonarray: SAMPLEREXTERNAL2DY2YEXT */
+#line 3434 "MachineIndependent/glslang.y"
{ // GL_EXT_YUV_target
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, Esd2D);
(yyval.interm.type).sampler.yuv = true;
}
-#line 10559 "MachineIndependent/glslang_tab.cpp"
+#line 10617 "MachineIndependent/glslang_tab.cpp"
break;
- case 516: /* type_specifier_nonarray: SUBPASSINPUT */
-#line 3415 "MachineIndependent/glslang.y"
+ case 519: /* type_specifier_nonarray: SUBPASSINPUT */
+#line 3440 "MachineIndependent/glslang.y"
{
parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setSubpass(EbtFloat);
}
-#line 10570 "MachineIndependent/glslang_tab.cpp"
+#line 10628 "MachineIndependent/glslang_tab.cpp"
break;
- case 517: /* type_specifier_nonarray: SUBPASSINPUTMS */
-#line 3421 "MachineIndependent/glslang.y"
+ case 520: /* type_specifier_nonarray: SUBPASSINPUTMS */
+#line 3446 "MachineIndependent/glslang.y"
{
parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setSubpass(EbtFloat, true);
}
-#line 10581 "MachineIndependent/glslang_tab.cpp"
+#line 10639 "MachineIndependent/glslang_tab.cpp"
break;
- case 518: /* type_specifier_nonarray: F16SUBPASSINPUT */
-#line 3427 "MachineIndependent/glslang.y"
+ case 521: /* type_specifier_nonarray: F16SUBPASSINPUT */
+#line 3452 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float subpass input", parseContext.symbolTable.atBuiltInLevel());
parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input");
@@ -10589,11 +10647,11 @@ yyreduce:
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setSubpass(EbtFloat16);
}
-#line 10593 "MachineIndependent/glslang_tab.cpp"
+#line 10651 "MachineIndependent/glslang_tab.cpp"
break;
- case 519: /* type_specifier_nonarray: F16SUBPASSINPUTMS */
-#line 3434 "MachineIndependent/glslang.y"
+ case 522: /* type_specifier_nonarray: F16SUBPASSINPUTMS */
+#line 3459 "MachineIndependent/glslang.y"
{
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float subpass input", parseContext.symbolTable.atBuiltInLevel());
parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input");
@@ -10601,107 +10659,107 @@ yyreduce:
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setSubpass(EbtFloat16, true);
}
-#line 10605 "MachineIndependent/glslang_tab.cpp"
+#line 10663 "MachineIndependent/glslang_tab.cpp"
break;
- case 520: /* type_specifier_nonarray: ISUBPASSINPUT */
-#line 3441 "MachineIndependent/glslang.y"
+ case 523: /* type_specifier_nonarray: ISUBPASSINPUT */
+#line 3466 "MachineIndependent/glslang.y"
{
parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setSubpass(EbtInt);
}
-#line 10616 "MachineIndependent/glslang_tab.cpp"
+#line 10674 "MachineIndependent/glslang_tab.cpp"
break;
- case 521: /* type_specifier_nonarray: ISUBPASSINPUTMS */
-#line 3447 "MachineIndependent/glslang.y"
+ case 524: /* type_specifier_nonarray: ISUBPASSINPUTMS */
+#line 3472 "MachineIndependent/glslang.y"
{
parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setSubpass(EbtInt, true);
}
-#line 10627 "MachineIndependent/glslang_tab.cpp"
+#line 10685 "MachineIndependent/glslang_tab.cpp"
break;
- case 522: /* type_specifier_nonarray: USUBPASSINPUT */
-#line 3453 "MachineIndependent/glslang.y"
+ case 525: /* type_specifier_nonarray: USUBPASSINPUT */
+#line 3478 "MachineIndependent/glslang.y"
{
parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setSubpass(EbtUint);
}
-#line 10638 "MachineIndependent/glslang_tab.cpp"
+#line 10696 "MachineIndependent/glslang_tab.cpp"
break;
- case 523: /* type_specifier_nonarray: USUBPASSINPUTMS */
-#line 3459 "MachineIndependent/glslang.y"
+ case 526: /* type_specifier_nonarray: USUBPASSINPUTMS */
+#line 3484 "MachineIndependent/glslang.y"
{
parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setSubpass(EbtUint, true);
}
-#line 10649 "MachineIndependent/glslang_tab.cpp"
+#line 10707 "MachineIndependent/glslang_tab.cpp"
break;
- case 524: /* type_specifier_nonarray: FCOOPMATNV */
-#line 3465 "MachineIndependent/glslang.y"
+ case 527: /* type_specifier_nonarray: FCOOPMATNV */
+#line 3490 "MachineIndependent/glslang.y"
{
parseContext.fcoopmatCheck((yyvsp[0].lex).loc, "fcoopmatNV", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).coopmat = true;
}
-#line 10660 "MachineIndependent/glslang_tab.cpp"
+#line 10718 "MachineIndependent/glslang_tab.cpp"
break;
- case 525: /* type_specifier_nonarray: ICOOPMATNV */
-#line 3471 "MachineIndependent/glslang.y"
+ case 528: /* type_specifier_nonarray: ICOOPMATNV */
+#line 3496 "MachineIndependent/glslang.y"
{
parseContext.intcoopmatCheck((yyvsp[0].lex).loc, "icoopmatNV", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt;
(yyval.interm.type).coopmat = true;
}
-#line 10671 "MachineIndependent/glslang_tab.cpp"
+#line 10729 "MachineIndependent/glslang_tab.cpp"
break;
- case 526: /* type_specifier_nonarray: UCOOPMATNV */
-#line 3477 "MachineIndependent/glslang.y"
+ case 529: /* type_specifier_nonarray: UCOOPMATNV */
+#line 3502 "MachineIndependent/glslang.y"
{
parseContext.intcoopmatCheck((yyvsp[0].lex).loc, "ucoopmatNV", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtUint;
(yyval.interm.type).coopmat = true;
}
-#line 10682 "MachineIndependent/glslang_tab.cpp"
+#line 10740 "MachineIndependent/glslang_tab.cpp"
break;
- case 527: /* type_specifier_nonarray: spirv_type_specifier */
-#line 3483 "MachineIndependent/glslang.y"
+ case 530: /* type_specifier_nonarray: spirv_type_specifier */
+#line 3508 "MachineIndependent/glslang.y"
{
parseContext.requireExtensions((yyvsp[0].interm.type).loc, 1, &E_GL_EXT_spirv_intrinsics, "SPIR-V type specifier");
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 10691 "MachineIndependent/glslang_tab.cpp"
+#line 10749 "MachineIndependent/glslang_tab.cpp"
break;
- case 528: /* type_specifier_nonarray: struct_specifier */
-#line 3488 "MachineIndependent/glslang.y"
+ case 531: /* type_specifier_nonarray: struct_specifier */
+#line 3513 "MachineIndependent/glslang.y"
{
(yyval.interm.type) = (yyvsp[0].interm.type);
(yyval.interm.type).qualifier.storage = parseContext.symbolTable.atGlobalLevel() ? EvqGlobal : EvqTemporary;
parseContext.structTypeCheck((yyval.interm.type).loc, (yyval.interm.type));
}
-#line 10701 "MachineIndependent/glslang_tab.cpp"
+#line 10759 "MachineIndependent/glslang_tab.cpp"
break;
- case 529: /* type_specifier_nonarray: TYPE_NAME */
-#line 3493 "MachineIndependent/glslang.y"
+ case 532: /* type_specifier_nonarray: TYPE_NAME */
+#line 3518 "MachineIndependent/glslang.y"
{
//
// This is for user defined type names. The lexical phase looked up the
@@ -10715,47 +10773,47 @@ yyreduce:
} else
parseContext.error((yyvsp[0].lex).loc, "expected type name", (yyvsp[0].lex).string->c_str(), "");
}
-#line 10719 "MachineIndependent/glslang_tab.cpp"
+#line 10777 "MachineIndependent/glslang_tab.cpp"
break;
- case 530: /* precision_qualifier: HIGH_PRECISION */
-#line 3509 "MachineIndependent/glslang.y"
+ case 533: /* precision_qualifier: HIGH_PRECISION */
+#line 3534 "MachineIndependent/glslang.y"
{
parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 130, 0, "highp precision qualifier");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
parseContext.handlePrecisionQualifier((yyvsp[0].lex).loc, (yyval.interm.type).qualifier, EpqHigh);
}
-#line 10729 "MachineIndependent/glslang_tab.cpp"
+#line 10787 "MachineIndependent/glslang_tab.cpp"
break;
- case 531: /* precision_qualifier: MEDIUM_PRECISION */
-#line 3514 "MachineIndependent/glslang.y"
+ case 534: /* precision_qualifier: MEDIUM_PRECISION */
+#line 3539 "MachineIndependent/glslang.y"
{
parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 130, 0, "mediump precision qualifier");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
parseContext.handlePrecisionQualifier((yyvsp[0].lex).loc, (yyval.interm.type).qualifier, EpqMedium);
}
-#line 10739 "MachineIndependent/glslang_tab.cpp"
+#line 10797 "MachineIndependent/glslang_tab.cpp"
break;
- case 532: /* precision_qualifier: LOW_PRECISION */
-#line 3519 "MachineIndependent/glslang.y"
+ case 535: /* precision_qualifier: LOW_PRECISION */
+#line 3544 "MachineIndependent/glslang.y"
{
parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 130, 0, "lowp precision qualifier");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
parseContext.handlePrecisionQualifier((yyvsp[0].lex).loc, (yyval.interm.type).qualifier, EpqLow);
}
-#line 10749 "MachineIndependent/glslang_tab.cpp"
+#line 10807 "MachineIndependent/glslang_tab.cpp"
break;
- case 533: /* $@3: %empty */
-#line 3527 "MachineIndependent/glslang.y"
+ case 536: /* $@3: %empty */
+#line 3552 "MachineIndependent/glslang.y"
{ parseContext.nestedStructCheck((yyvsp[-2].lex).loc); }
-#line 10755 "MachineIndependent/glslang_tab.cpp"
+#line 10813 "MachineIndependent/glslang_tab.cpp"
break;
- case 534: /* struct_specifier: STRUCT IDENTIFIER LEFT_BRACE $@3 struct_declaration_list RIGHT_BRACE */
-#line 3527 "MachineIndependent/glslang.y"
+ case 537: /* struct_specifier: STRUCT IDENTIFIER LEFT_BRACE $@3 struct_declaration_list RIGHT_BRACE */
+#line 3552 "MachineIndependent/glslang.y"
{
TType* structure = new TType((yyvsp[-1].interm.typeList), *(yyvsp[-4].lex).string);
parseContext.structArrayCheck((yyvsp[-4].lex).loc, *structure);
@@ -10767,17 +10825,17 @@ yyreduce:
(yyval.interm.type).userDef = structure;
--parseContext.structNestingLevel;
}
-#line 10771 "MachineIndependent/glslang_tab.cpp"
+#line 10829 "MachineIndependent/glslang_tab.cpp"
break;
- case 535: /* $@4: %empty */
-#line 3538 "MachineIndependent/glslang.y"
+ case 538: /* $@4: %empty */
+#line 3563 "MachineIndependent/glslang.y"
{ parseContext.nestedStructCheck((yyvsp[-1].lex).loc); }
-#line 10777 "MachineIndependent/glslang_tab.cpp"
+#line 10835 "MachineIndependent/glslang_tab.cpp"
break;
- case 536: /* struct_specifier: STRUCT LEFT_BRACE $@4 struct_declaration_list RIGHT_BRACE */
-#line 3538 "MachineIndependent/glslang.y"
+ case 539: /* struct_specifier: STRUCT LEFT_BRACE $@4 struct_declaration_list RIGHT_BRACE */
+#line 3563 "MachineIndependent/glslang.y"
{
TType* structure = new TType((yyvsp[-1].interm.typeList), TString(""));
(yyval.interm.type).init((yyvsp[-4].lex).loc);
@@ -10785,19 +10843,19 @@ yyreduce:
(yyval.interm.type).userDef = structure;
--parseContext.structNestingLevel;
}
-#line 10789 "MachineIndependent/glslang_tab.cpp"
+#line 10847 "MachineIndependent/glslang_tab.cpp"
break;
- case 537: /* struct_declaration_list: struct_declaration */
-#line 3548 "MachineIndependent/glslang.y"
+ case 540: /* struct_declaration_list: struct_declaration */
+#line 3573 "MachineIndependent/glslang.y"
{
(yyval.interm.typeList) = (yyvsp[0].interm.typeList);
}
-#line 10797 "MachineIndependent/glslang_tab.cpp"
+#line 10855 "MachineIndependent/glslang_tab.cpp"
break;
- case 538: /* struct_declaration_list: struct_declaration_list struct_declaration */
-#line 3551 "MachineIndependent/glslang.y"
+ case 541: /* struct_declaration_list: struct_declaration_list struct_declaration */
+#line 3576 "MachineIndependent/glslang.y"
{
(yyval.interm.typeList) = (yyvsp[-1].interm.typeList);
for (unsigned int i = 0; i < (yyvsp[0].interm.typeList)->size(); ++i) {
@@ -10808,11 +10866,11 @@ yyreduce:
(yyval.interm.typeList)->push_back((*(yyvsp[0].interm.typeList))[i]);
}
}
-#line 10812 "MachineIndependent/glslang_tab.cpp"
+#line 10870 "MachineIndependent/glslang_tab.cpp"
break;
- case 539: /* struct_declaration: type_specifier struct_declarator_list SEMICOLON */
-#line 3564 "MachineIndependent/glslang.y"
+ case 542: /* struct_declaration: type_specifier struct_declarator_list SEMICOLON */
+#line 3589 "MachineIndependent/glslang.y"
{
if ((yyvsp[-2].interm.type).arraySizes) {
parseContext.profileRequires((yyvsp[-2].interm.type).loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type");
@@ -10835,11 +10893,11 @@ yyreduce:
(*(yyval.interm.typeList))[i].type->shallowCopy(type);
}
}
-#line 10839 "MachineIndependent/glslang_tab.cpp"
+#line 10897 "MachineIndependent/glslang_tab.cpp"
break;
- case 540: /* struct_declaration: type_qualifier type_specifier struct_declarator_list SEMICOLON */
-#line 3586 "MachineIndependent/glslang.y"
+ case 543: /* struct_declaration: type_qualifier type_specifier struct_declarator_list SEMICOLON */
+#line 3611 "MachineIndependent/glslang.y"
{
if ((yyvsp[-2].interm.type).arraySizes) {
parseContext.profileRequires((yyvsp[-2].interm.type).loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type");
@@ -10864,38 +10922,38 @@ yyreduce:
(*(yyval.interm.typeList))[i].type->shallowCopy(type);
}
}
-#line 10868 "MachineIndependent/glslang_tab.cpp"
+#line 10926 "MachineIndependent/glslang_tab.cpp"
break;
- case 541: /* struct_declarator_list: struct_declarator */
-#line 3613 "MachineIndependent/glslang.y"
+ case 544: /* struct_declarator_list: struct_declarator */
+#line 3638 "MachineIndependent/glslang.y"
{
(yyval.interm.typeList) = new TTypeList;
(yyval.interm.typeList)->push_back((yyvsp[0].interm.typeLine));
}
-#line 10877 "MachineIndependent/glslang_tab.cpp"
+#line 10935 "MachineIndependent/glslang_tab.cpp"
break;
- case 542: /* struct_declarator_list: struct_declarator_list COMMA struct_declarator */
-#line 3617 "MachineIndependent/glslang.y"
+ case 545: /* struct_declarator_list: struct_declarator_list COMMA struct_declarator */
+#line 3642 "MachineIndependent/glslang.y"
{
(yyval.interm.typeList)->push_back((yyvsp[0].interm.typeLine));
}
-#line 10885 "MachineIndependent/glslang_tab.cpp"
+#line 10943 "MachineIndependent/glslang_tab.cpp"
break;
- case 543: /* struct_declarator: IDENTIFIER */
-#line 3623 "MachineIndependent/glslang.y"
+ case 546: /* struct_declarator: IDENTIFIER */
+#line 3648 "MachineIndependent/glslang.y"
{
(yyval.interm.typeLine).type = new TType(EbtVoid);
(yyval.interm.typeLine).loc = (yyvsp[0].lex).loc;
(yyval.interm.typeLine).type->setFieldName(*(yyvsp[0].lex).string);
}
-#line 10895 "MachineIndependent/glslang_tab.cpp"
+#line 10953 "MachineIndependent/glslang_tab.cpp"
break;
- case 544: /* struct_declarator: IDENTIFIER array_specifier */
-#line 3628 "MachineIndependent/glslang.y"
+ case 547: /* struct_declarator: IDENTIFIER array_specifier */
+#line 3653 "MachineIndependent/glslang.y"
{
parseContext.arrayOfArrayVersionCheck((yyvsp[-1].lex).loc, (yyvsp[0].interm).arraySizes);
@@ -10904,246 +10962,246 @@ yyreduce:
(yyval.interm.typeLine).type->setFieldName(*(yyvsp[-1].lex).string);
(yyval.interm.typeLine).type->transferArraySizes((yyvsp[0].interm).arraySizes);
}
-#line 10908 "MachineIndependent/glslang_tab.cpp"
+#line 10966 "MachineIndependent/glslang_tab.cpp"
break;
- case 545: /* initializer: assignment_expression */
-#line 3639 "MachineIndependent/glslang.y"
+ case 548: /* initializer: assignment_expression */
+#line 3664 "MachineIndependent/glslang.y"
{
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
}
-#line 10916 "MachineIndependent/glslang_tab.cpp"
+#line 10974 "MachineIndependent/glslang_tab.cpp"
break;
- case 546: /* initializer: LEFT_BRACE initializer_list RIGHT_BRACE */
-#line 3643 "MachineIndependent/glslang.y"
+ case 549: /* initializer: LEFT_BRACE initializer_list RIGHT_BRACE */
+#line 3668 "MachineIndependent/glslang.y"
{
const char* initFeature = "{ } style initializers";
parseContext.requireProfile((yyvsp[-2].lex).loc, ~EEsProfile, initFeature);
parseContext.profileRequires((yyvsp[-2].lex).loc, ~EEsProfile, 420, E_GL_ARB_shading_language_420pack, initFeature);
(yyval.interm.intermTypedNode) = (yyvsp[-1].interm.intermTypedNode);
}
-#line 10927 "MachineIndependent/glslang_tab.cpp"
+#line 10985 "MachineIndependent/glslang_tab.cpp"
break;
- case 547: /* initializer: LEFT_BRACE initializer_list COMMA RIGHT_BRACE */
-#line 3649 "MachineIndependent/glslang.y"
+ case 550: /* initializer: LEFT_BRACE initializer_list COMMA RIGHT_BRACE */
+#line 3674 "MachineIndependent/glslang.y"
{
const char* initFeature = "{ } style initializers";
parseContext.requireProfile((yyvsp[-3].lex).loc, ~EEsProfile, initFeature);
parseContext.profileRequires((yyvsp[-3].lex).loc, ~EEsProfile, 420, E_GL_ARB_shading_language_420pack, initFeature);
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 10938 "MachineIndependent/glslang_tab.cpp"
+#line 10996 "MachineIndependent/glslang_tab.cpp"
break;
- case 548: /* initializer: LEFT_BRACE RIGHT_BRACE */
-#line 3655 "MachineIndependent/glslang.y"
+ case 551: /* initializer: LEFT_BRACE RIGHT_BRACE */
+#line 3680 "MachineIndependent/glslang.y"
{
const char* initFeature = "empty { } initializer";
parseContext.profileRequires((yyvsp[-1].lex).loc, EEsProfile, 0, E_GL_EXT_null_initializer, initFeature);
parseContext.profileRequires((yyvsp[-1].lex).loc, ~EEsProfile, 0, E_GL_EXT_null_initializer, initFeature);
(yyval.interm.intermTypedNode) = parseContext.intermediate.makeAggregate((yyvsp[-1].lex).loc);
}
-#line 10949 "MachineIndependent/glslang_tab.cpp"
+#line 11007 "MachineIndependent/glslang_tab.cpp"
break;
- case 549: /* initializer_list: initializer */
-#line 3666 "MachineIndependent/glslang.y"
+ case 552: /* initializer_list: initializer */
+#line 3691 "MachineIndependent/glslang.y"
{
(yyval.interm.intermTypedNode) = parseContext.intermediate.growAggregate(0, (yyvsp[0].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)->getLoc());
}
-#line 10957 "MachineIndependent/glslang_tab.cpp"
+#line 11015 "MachineIndependent/glslang_tab.cpp"
break;
- case 550: /* initializer_list: initializer_list COMMA initializer */
-#line 3669 "MachineIndependent/glslang.y"
+ case 553: /* initializer_list: initializer_list COMMA initializer */
+#line 3694 "MachineIndependent/glslang.y"
{
(yyval.interm.intermTypedNode) = parseContext.intermediate.growAggregate((yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode));
}
-#line 10965 "MachineIndependent/glslang_tab.cpp"
+#line 11023 "MachineIndependent/glslang_tab.cpp"
break;
- case 551: /* declaration_statement: declaration */
-#line 3676 "MachineIndependent/glslang.y"
+ case 554: /* declaration_statement: declaration */
+#line 3701 "MachineIndependent/glslang.y"
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 10971 "MachineIndependent/glslang_tab.cpp"
+#line 11029 "MachineIndependent/glslang_tab.cpp"
break;
- case 552: /* statement: compound_statement */
-#line 3680 "MachineIndependent/glslang.y"
+ case 555: /* statement: compound_statement */
+#line 3705 "MachineIndependent/glslang.y"
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 10977 "MachineIndependent/glslang_tab.cpp"
+#line 11035 "MachineIndependent/glslang_tab.cpp"
break;
- case 553: /* statement: simple_statement */
-#line 3681 "MachineIndependent/glslang.y"
+ case 556: /* statement: simple_statement */
+#line 3706 "MachineIndependent/glslang.y"
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 10983 "MachineIndependent/glslang_tab.cpp"
+#line 11041 "MachineIndependent/glslang_tab.cpp"
break;
- case 554: /* simple_statement: declaration_statement */
-#line 3687 "MachineIndependent/glslang.y"
+ case 557: /* simple_statement: declaration_statement */
+#line 3712 "MachineIndependent/glslang.y"
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 10989 "MachineIndependent/glslang_tab.cpp"
+#line 11047 "MachineIndependent/glslang_tab.cpp"
break;
- case 555: /* simple_statement: expression_statement */
-#line 3688 "MachineIndependent/glslang.y"
+ case 558: /* simple_statement: expression_statement */
+#line 3713 "MachineIndependent/glslang.y"
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 10995 "MachineIndependent/glslang_tab.cpp"
+#line 11053 "MachineIndependent/glslang_tab.cpp"
break;
- case 556: /* simple_statement: selection_statement */
-#line 3689 "MachineIndependent/glslang.y"
+ case 559: /* simple_statement: selection_statement */
+#line 3714 "MachineIndependent/glslang.y"
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 11001 "MachineIndependent/glslang_tab.cpp"
+#line 11059 "MachineIndependent/glslang_tab.cpp"
break;
- case 557: /* simple_statement: switch_statement */
-#line 3690 "MachineIndependent/glslang.y"
+ case 560: /* simple_statement: switch_statement */
+#line 3715 "MachineIndependent/glslang.y"
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 11007 "MachineIndependent/glslang_tab.cpp"
+#line 11065 "MachineIndependent/glslang_tab.cpp"
break;
- case 558: /* simple_statement: case_label */
-#line 3691 "MachineIndependent/glslang.y"
+ case 561: /* simple_statement: case_label */
+#line 3716 "MachineIndependent/glslang.y"
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 11013 "MachineIndependent/glslang_tab.cpp"
+#line 11071 "MachineIndependent/glslang_tab.cpp"
break;
- case 559: /* simple_statement: iteration_statement */
-#line 3692 "MachineIndependent/glslang.y"
+ case 562: /* simple_statement: iteration_statement */
+#line 3717 "MachineIndependent/glslang.y"
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 11019 "MachineIndependent/glslang_tab.cpp"
+#line 11077 "MachineIndependent/glslang_tab.cpp"
break;
- case 560: /* simple_statement: jump_statement */
-#line 3693 "MachineIndependent/glslang.y"
+ case 563: /* simple_statement: jump_statement */
+#line 3718 "MachineIndependent/glslang.y"
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 11025 "MachineIndependent/glslang_tab.cpp"
+#line 11083 "MachineIndependent/glslang_tab.cpp"
break;
- case 561: /* simple_statement: demote_statement */
-#line 3695 "MachineIndependent/glslang.y"
+ case 564: /* simple_statement: demote_statement */
+#line 3720 "MachineIndependent/glslang.y"
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 11031 "MachineIndependent/glslang_tab.cpp"
+#line 11089 "MachineIndependent/glslang_tab.cpp"
break;
- case 562: /* demote_statement: DEMOTE SEMICOLON */
-#line 3701 "MachineIndependent/glslang.y"
+ case 565: /* demote_statement: DEMOTE SEMICOLON */
+#line 3726 "MachineIndependent/glslang.y"
{
parseContext.requireStage((yyvsp[-1].lex).loc, EShLangFragment, "demote");
parseContext.requireExtensions((yyvsp[-1].lex).loc, 1, &E_GL_EXT_demote_to_helper_invocation, "demote");
(yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpDemote, (yyvsp[-1].lex).loc);
}
-#line 11041 "MachineIndependent/glslang_tab.cpp"
+#line 11099 "MachineIndependent/glslang_tab.cpp"
break;
- case 563: /* compound_statement: LEFT_BRACE RIGHT_BRACE */
-#line 3710 "MachineIndependent/glslang.y"
+ case 566: /* compound_statement: LEFT_BRACE RIGHT_BRACE */
+#line 3735 "MachineIndependent/glslang.y"
{ (yyval.interm.intermNode) = 0; }
-#line 11047 "MachineIndependent/glslang_tab.cpp"
+#line 11105 "MachineIndependent/glslang_tab.cpp"
break;
- case 564: /* $@5: %empty */
-#line 3711 "MachineIndependent/glslang.y"
+ case 567: /* $@5: %empty */
+#line 3736 "MachineIndependent/glslang.y"
{
parseContext.symbolTable.push();
++parseContext.statementNestingLevel;
}
-#line 11056 "MachineIndependent/glslang_tab.cpp"
+#line 11114 "MachineIndependent/glslang_tab.cpp"
break;
- case 565: /* $@6: %empty */
-#line 3715 "MachineIndependent/glslang.y"
+ case 568: /* $@6: %empty */
+#line 3740 "MachineIndependent/glslang.y"
{
parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]);
--parseContext.statementNestingLevel;
}
-#line 11065 "MachineIndependent/glslang_tab.cpp"
+#line 11123 "MachineIndependent/glslang_tab.cpp"
break;
- case 566: /* compound_statement: LEFT_BRACE $@5 statement_list $@6 RIGHT_BRACE */
-#line 3719 "MachineIndependent/glslang.y"
+ case 569: /* compound_statement: LEFT_BRACE $@5 statement_list $@6 RIGHT_BRACE */
+#line 3744 "MachineIndependent/glslang.y"
{
if ((yyvsp[-2].interm.intermNode) && (yyvsp[-2].interm.intermNode)->getAsAggregate())
- (yyvsp[-2].interm.intermNode)->getAsAggregate()->setOperator(EOpSequence);
+ (yyvsp[-2].interm.intermNode)->getAsAggregate()->setOperator(parseContext.intermediate.getDebugInfo() ? EOpScope : EOpSequence);
(yyval.interm.intermNode) = (yyvsp[-2].interm.intermNode);
}
-#line 11075 "MachineIndependent/glslang_tab.cpp"
+#line 11133 "MachineIndependent/glslang_tab.cpp"
break;
- case 567: /* statement_no_new_scope: compound_statement_no_new_scope */
-#line 3727 "MachineIndependent/glslang.y"
+ case 570: /* statement_no_new_scope: compound_statement_no_new_scope */
+#line 3752 "MachineIndependent/glslang.y"
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 11081 "MachineIndependent/glslang_tab.cpp"
+#line 11139 "MachineIndependent/glslang_tab.cpp"
break;
- case 568: /* statement_no_new_scope: simple_statement */
-#line 3728 "MachineIndependent/glslang.y"
+ case 571: /* statement_no_new_scope: simple_statement */
+#line 3753 "MachineIndependent/glslang.y"
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 11087 "MachineIndependent/glslang_tab.cpp"
+#line 11145 "MachineIndependent/glslang_tab.cpp"
break;
- case 569: /* $@7: %empty */
-#line 3732 "MachineIndependent/glslang.y"
+ case 572: /* $@7: %empty */
+#line 3757 "MachineIndependent/glslang.y"
{
++parseContext.controlFlowNestingLevel;
}
-#line 11095 "MachineIndependent/glslang_tab.cpp"
+#line 11153 "MachineIndependent/glslang_tab.cpp"
break;
- case 570: /* statement_scoped: $@7 compound_statement */
-#line 3735 "MachineIndependent/glslang.y"
+ case 573: /* statement_scoped: $@7 compound_statement */
+#line 3760 "MachineIndependent/glslang.y"
{
--parseContext.controlFlowNestingLevel;
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 11104 "MachineIndependent/glslang_tab.cpp"
+#line 11162 "MachineIndependent/glslang_tab.cpp"
break;
- case 571: /* $@8: %empty */
-#line 3739 "MachineIndependent/glslang.y"
+ case 574: /* $@8: %empty */
+#line 3764 "MachineIndependent/glslang.y"
{
parseContext.symbolTable.push();
++parseContext.statementNestingLevel;
++parseContext.controlFlowNestingLevel;
}
-#line 11114 "MachineIndependent/glslang_tab.cpp"
+#line 11172 "MachineIndependent/glslang_tab.cpp"
break;
- case 572: /* statement_scoped: $@8 simple_statement */
-#line 3744 "MachineIndependent/glslang.y"
+ case 575: /* statement_scoped: $@8 simple_statement */
+#line 3769 "MachineIndependent/glslang.y"
{
parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]);
--parseContext.statementNestingLevel;
--parseContext.controlFlowNestingLevel;
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 11125 "MachineIndependent/glslang_tab.cpp"
+#line 11183 "MachineIndependent/glslang_tab.cpp"
break;
- case 573: /* compound_statement_no_new_scope: LEFT_BRACE RIGHT_BRACE */
-#line 3753 "MachineIndependent/glslang.y"
+ case 576: /* compound_statement_no_new_scope: LEFT_BRACE RIGHT_BRACE */
+#line 3778 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = 0;
}
-#line 11133 "MachineIndependent/glslang_tab.cpp"
+#line 11191 "MachineIndependent/glslang_tab.cpp"
break;
- case 574: /* compound_statement_no_new_scope: LEFT_BRACE statement_list RIGHT_BRACE */
-#line 3756 "MachineIndependent/glslang.y"
+ case 577: /* compound_statement_no_new_scope: LEFT_BRACE statement_list RIGHT_BRACE */
+#line 3781 "MachineIndependent/glslang.y"
{
if ((yyvsp[-1].interm.intermNode) && (yyvsp[-1].interm.intermNode)->getAsAggregate())
(yyvsp[-1].interm.intermNode)->getAsAggregate()->setOperator(EOpSequence);
(yyval.interm.intermNode) = (yyvsp[-1].interm.intermNode);
}
-#line 11143 "MachineIndependent/glslang_tab.cpp"
+#line 11201 "MachineIndependent/glslang_tab.cpp"
break;
- case 575: /* statement_list: statement */
-#line 3764 "MachineIndependent/glslang.y"
+ case 578: /* statement_list: statement */
+#line 3789 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.makeAggregate((yyvsp[0].interm.intermNode));
if ((yyvsp[0].interm.intermNode) && (yyvsp[0].interm.intermNode)->getAsBranchNode() && ((yyvsp[0].interm.intermNode)->getAsBranchNode()->getFlowOp() == EOpCase ||
@@ -11152,11 +11210,11 @@ yyreduce:
(yyval.interm.intermNode) = 0; // start a fresh subsequence for what's after this case
}
}
-#line 11156 "MachineIndependent/glslang_tab.cpp"
+#line 11214 "MachineIndependent/glslang_tab.cpp"
break;
- case 576: /* statement_list: statement_list statement */
-#line 3772 "MachineIndependent/glslang.y"
+ case 579: /* statement_list: statement_list statement */
+#line 3797 "MachineIndependent/glslang.y"
{
if ((yyvsp[0].interm.intermNode) && (yyvsp[0].interm.intermNode)->getAsBranchNode() && ((yyvsp[0].interm.intermNode)->getAsBranchNode()->getFlowOp() == EOpCase ||
(yyvsp[0].interm.intermNode)->getAsBranchNode()->getFlowOp() == EOpDefault)) {
@@ -11165,77 +11223,77 @@ yyreduce:
} else
(yyval.interm.intermNode) = parseContext.intermediate.growAggregate((yyvsp[-1].interm.intermNode), (yyvsp[0].interm.intermNode));
}
-#line 11169 "MachineIndependent/glslang_tab.cpp"
+#line 11227 "MachineIndependent/glslang_tab.cpp"
break;
- case 577: /* expression_statement: SEMICOLON */
-#line 3783 "MachineIndependent/glslang.y"
+ case 580: /* expression_statement: SEMICOLON */
+#line 3808 "MachineIndependent/glslang.y"
{ (yyval.interm.intermNode) = 0; }
-#line 11175 "MachineIndependent/glslang_tab.cpp"
+#line 11233 "MachineIndependent/glslang_tab.cpp"
break;
- case 578: /* expression_statement: expression SEMICOLON */
-#line 3784 "MachineIndependent/glslang.y"
+ case 581: /* expression_statement: expression SEMICOLON */
+#line 3809 "MachineIndependent/glslang.y"
{ (yyval.interm.intermNode) = static_cast<TIntermNode*>((yyvsp[-1].interm.intermTypedNode)); }
-#line 11181 "MachineIndependent/glslang_tab.cpp"
+#line 11239 "MachineIndependent/glslang_tab.cpp"
break;
- case 579: /* selection_statement: selection_statement_nonattributed */
-#line 3788 "MachineIndependent/glslang.y"
+ case 582: /* selection_statement: selection_statement_nonattributed */
+#line 3813 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 11189 "MachineIndependent/glslang_tab.cpp"
+#line 11247 "MachineIndependent/glslang_tab.cpp"
break;
- case 580: /* selection_statement: attribute selection_statement_nonattributed */
-#line 3792 "MachineIndependent/glslang.y"
+ case 583: /* selection_statement: attribute selection_statement_nonattributed */
+#line 3817 "MachineIndependent/glslang.y"
{
parseContext.requireExtensions((yyvsp[0].interm.intermNode)->getLoc(), 1, &E_GL_EXT_control_flow_attributes, "attribute");
parseContext.handleSelectionAttributes(*(yyvsp[-1].interm.attributes), (yyvsp[0].interm.intermNode));
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 11199 "MachineIndependent/glslang_tab.cpp"
+#line 11257 "MachineIndependent/glslang_tab.cpp"
break;
- case 581: /* selection_statement_nonattributed: IF LEFT_PAREN expression RIGHT_PAREN selection_rest_statement */
-#line 3800 "MachineIndependent/glslang.y"
+ case 584: /* selection_statement_nonattributed: IF LEFT_PAREN expression RIGHT_PAREN selection_rest_statement */
+#line 3825 "MachineIndependent/glslang.y"
{
parseContext.boolCheck((yyvsp[-4].lex).loc, (yyvsp[-2].interm.intermTypedNode));
(yyval.interm.intermNode) = parseContext.intermediate.addSelection((yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.nodePair), (yyvsp[-4].lex).loc);
}
-#line 11208 "MachineIndependent/glslang_tab.cpp"
+#line 11266 "MachineIndependent/glslang_tab.cpp"
break;
- case 582: /* selection_rest_statement: statement_scoped ELSE statement_scoped */
-#line 3807 "MachineIndependent/glslang.y"
+ case 585: /* selection_rest_statement: statement_scoped ELSE statement_scoped */
+#line 3832 "MachineIndependent/glslang.y"
{
(yyval.interm.nodePair).node1 = (yyvsp[-2].interm.intermNode);
(yyval.interm.nodePair).node2 = (yyvsp[0].interm.intermNode);
}
-#line 11217 "MachineIndependent/glslang_tab.cpp"
+#line 11275 "MachineIndependent/glslang_tab.cpp"
break;
- case 583: /* selection_rest_statement: statement_scoped */
-#line 3811 "MachineIndependent/glslang.y"
+ case 586: /* selection_rest_statement: statement_scoped */
+#line 3836 "MachineIndependent/glslang.y"
{
(yyval.interm.nodePair).node1 = (yyvsp[0].interm.intermNode);
(yyval.interm.nodePair).node2 = 0;
}
-#line 11226 "MachineIndependent/glslang_tab.cpp"
+#line 11284 "MachineIndependent/glslang_tab.cpp"
break;
- case 584: /* condition: expression */
-#line 3819 "MachineIndependent/glslang.y"
+ case 587: /* condition: expression */
+#line 3844 "MachineIndependent/glslang.y"
{
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
parseContext.boolCheck((yyvsp[0].interm.intermTypedNode)->getLoc(), (yyvsp[0].interm.intermTypedNode));
}
-#line 11235 "MachineIndependent/glslang_tab.cpp"
+#line 11293 "MachineIndependent/glslang_tab.cpp"
break;
- case 585: /* condition: fully_specified_type IDENTIFIER EQUAL initializer */
-#line 3823 "MachineIndependent/glslang.y"
+ case 588: /* condition: fully_specified_type IDENTIFIER EQUAL initializer */
+#line 3848 "MachineIndependent/glslang.y"
{
parseContext.boolCheck((yyvsp[-2].lex).loc, (yyvsp[-3].interm.type));
@@ -11246,29 +11304,29 @@ yyreduce:
else
(yyval.interm.intermTypedNode) = 0;
}
-#line 11250 "MachineIndependent/glslang_tab.cpp"
+#line 11308 "MachineIndependent/glslang_tab.cpp"
break;
- case 586: /* switch_statement: switch_statement_nonattributed */
-#line 3836 "MachineIndependent/glslang.y"
+ case 589: /* switch_statement: switch_statement_nonattributed */
+#line 3861 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 11258 "MachineIndependent/glslang_tab.cpp"
+#line 11316 "MachineIndependent/glslang_tab.cpp"
break;
- case 587: /* switch_statement: attribute switch_statement_nonattributed */
-#line 3840 "MachineIndependent/glslang.y"
+ case 590: /* switch_statement: attribute switch_statement_nonattributed */
+#line 3865 "MachineIndependent/glslang.y"
{
parseContext.requireExtensions((yyvsp[0].interm.intermNode)->getLoc(), 1, &E_GL_EXT_control_flow_attributes, "attribute");
parseContext.handleSwitchAttributes(*(yyvsp[-1].interm.attributes), (yyvsp[0].interm.intermNode));
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 11268 "MachineIndependent/glslang_tab.cpp"
+#line 11326 "MachineIndependent/glslang_tab.cpp"
break;
- case 588: /* $@9: %empty */
-#line 3848 "MachineIndependent/glslang.y"
+ case 591: /* $@9: %empty */
+#line 3873 "MachineIndependent/glslang.y"
{
// start new switch sequence on the switch stack
++parseContext.controlFlowNestingLevel;
@@ -11277,11 +11335,11 @@ yyreduce:
parseContext.switchLevel.push_back(parseContext.statementNestingLevel);
parseContext.symbolTable.push();
}
-#line 11281 "MachineIndependent/glslang_tab.cpp"
+#line 11339 "MachineIndependent/glslang_tab.cpp"
break;
- case 589: /* switch_statement_nonattributed: SWITCH LEFT_PAREN expression RIGHT_PAREN $@9 LEFT_BRACE switch_statement_list RIGHT_BRACE */
-#line 3856 "MachineIndependent/glslang.y"
+ case 592: /* switch_statement_nonattributed: SWITCH LEFT_PAREN expression RIGHT_PAREN $@9 LEFT_BRACE switch_statement_list RIGHT_BRACE */
+#line 3881 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.addSwitch((yyvsp[-7].lex).loc, (yyvsp[-5].interm.intermTypedNode), (yyvsp[-1].interm.intermNode) ? (yyvsp[-1].interm.intermNode)->getAsAggregate() : 0);
delete parseContext.switchSequenceStack.back();
@@ -11291,27 +11349,27 @@ yyreduce:
--parseContext.statementNestingLevel;
--parseContext.controlFlowNestingLevel;
}
-#line 11295 "MachineIndependent/glslang_tab.cpp"
+#line 11353 "MachineIndependent/glslang_tab.cpp"
break;
- case 590: /* switch_statement_list: %empty */
-#line 3868 "MachineIndependent/glslang.y"
+ case 593: /* switch_statement_list: %empty */
+#line 3893 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = 0;
}
-#line 11303 "MachineIndependent/glslang_tab.cpp"
+#line 11361 "MachineIndependent/glslang_tab.cpp"
break;
- case 591: /* switch_statement_list: statement_list */
-#line 3871 "MachineIndependent/glslang.y"
+ case 594: /* switch_statement_list: statement_list */
+#line 3896 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 11311 "MachineIndependent/glslang_tab.cpp"
+#line 11369 "MachineIndependent/glslang_tab.cpp"
break;
- case 592: /* case_label: CASE expression COLON */
-#line 3877 "MachineIndependent/glslang.y"
+ case 595: /* case_label: CASE expression COLON */
+#line 3902 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = 0;
if (parseContext.switchLevel.size() == 0)
@@ -11324,11 +11382,11 @@ yyreduce:
(yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpCase, (yyvsp[-1].interm.intermTypedNode), (yyvsp[-2].lex).loc);
}
}
-#line 11328 "MachineIndependent/glslang_tab.cpp"
+#line 11386 "MachineIndependent/glslang_tab.cpp"
break;
- case 593: /* case_label: DEFAULT COLON */
-#line 3889 "MachineIndependent/glslang.y"
+ case 596: /* case_label: DEFAULT COLON */
+#line 3914 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = 0;
if (parseContext.switchLevel.size() == 0)
@@ -11338,29 +11396,29 @@ yyreduce:
else
(yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpDefault, (yyvsp[-1].lex).loc);
}
-#line 11342 "MachineIndependent/glslang_tab.cpp"
+#line 11400 "MachineIndependent/glslang_tab.cpp"
break;
- case 594: /* iteration_statement: iteration_statement_nonattributed */
-#line 3901 "MachineIndependent/glslang.y"
+ case 597: /* iteration_statement: iteration_statement_nonattributed */
+#line 3926 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 11350 "MachineIndependent/glslang_tab.cpp"
+#line 11408 "MachineIndependent/glslang_tab.cpp"
break;
- case 595: /* iteration_statement: attribute iteration_statement_nonattributed */
-#line 3905 "MachineIndependent/glslang.y"
+ case 598: /* iteration_statement: attribute iteration_statement_nonattributed */
+#line 3930 "MachineIndependent/glslang.y"
{
parseContext.requireExtensions((yyvsp[0].interm.intermNode)->getLoc(), 1, &E_GL_EXT_control_flow_attributes, "attribute");
parseContext.handleLoopAttributes(*(yyvsp[-1].interm.attributes), (yyvsp[0].interm.intermNode));
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 11360 "MachineIndependent/glslang_tab.cpp"
+#line 11418 "MachineIndependent/glslang_tab.cpp"
break;
- case 596: /* $@10: %empty */
-#line 3913 "MachineIndependent/glslang.y"
+ case 599: /* $@10: %empty */
+#line 3938 "MachineIndependent/glslang.y"
{
if (! parseContext.limits.whileLoops)
parseContext.error((yyvsp[-1].lex).loc, "while loops not available", "limitation", "");
@@ -11369,11 +11427,11 @@ yyreduce:
++parseContext.statementNestingLevel;
++parseContext.controlFlowNestingLevel;
}
-#line 11373 "MachineIndependent/glslang_tab.cpp"
+#line 11431 "MachineIndependent/glslang_tab.cpp"
break;
- case 597: /* iteration_statement_nonattributed: WHILE LEFT_PAREN $@10 condition RIGHT_PAREN statement_no_new_scope */
-#line 3921 "MachineIndependent/glslang.y"
+ case 600: /* iteration_statement_nonattributed: WHILE LEFT_PAREN $@10 condition RIGHT_PAREN statement_no_new_scope */
+#line 3946 "MachineIndependent/glslang.y"
{
parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]);
(yyval.interm.intermNode) = parseContext.intermediate.addLoop((yyvsp[0].interm.intermNode), (yyvsp[-2].interm.intermTypedNode), 0, true, (yyvsp[-5].lex).loc);
@@ -11381,22 +11439,22 @@ yyreduce:
--parseContext.statementNestingLevel;
--parseContext.controlFlowNestingLevel;
}
-#line 11385 "MachineIndependent/glslang_tab.cpp"
+#line 11443 "MachineIndependent/glslang_tab.cpp"
break;
- case 598: /* $@11: %empty */
-#line 3928 "MachineIndependent/glslang.y"
+ case 601: /* $@11: %empty */
+#line 3953 "MachineIndependent/glslang.y"
{
parseContext.symbolTable.push();
++parseContext.loopNestingLevel;
++parseContext.statementNestingLevel;
++parseContext.controlFlowNestingLevel;
}
-#line 11396 "MachineIndependent/glslang_tab.cpp"
+#line 11454 "MachineIndependent/glslang_tab.cpp"
break;
- case 599: /* iteration_statement_nonattributed: DO $@11 statement WHILE LEFT_PAREN expression RIGHT_PAREN SEMICOLON */
-#line 3934 "MachineIndependent/glslang.y"
+ case 602: /* iteration_statement_nonattributed: DO $@11 statement WHILE LEFT_PAREN expression RIGHT_PAREN SEMICOLON */
+#line 3959 "MachineIndependent/glslang.y"
{
if (! parseContext.limits.whileLoops)
parseContext.error((yyvsp[-7].lex).loc, "do-while loops not available", "limitation", "");
@@ -11409,22 +11467,22 @@ yyreduce:
--parseContext.statementNestingLevel;
--parseContext.controlFlowNestingLevel;
}
-#line 11413 "MachineIndependent/glslang_tab.cpp"
+#line 11471 "MachineIndependent/glslang_tab.cpp"
break;
- case 600: /* $@12: %empty */
-#line 3946 "MachineIndependent/glslang.y"
+ case 603: /* $@12: %empty */
+#line 3971 "MachineIndependent/glslang.y"
{
parseContext.symbolTable.push();
++parseContext.loopNestingLevel;
++parseContext.statementNestingLevel;
++parseContext.controlFlowNestingLevel;
}
-#line 11424 "MachineIndependent/glslang_tab.cpp"
+#line 11482 "MachineIndependent/glslang_tab.cpp"
break;
- case 601: /* iteration_statement_nonattributed: FOR LEFT_PAREN $@12 for_init_statement for_rest_statement RIGHT_PAREN statement_no_new_scope */
-#line 3952 "MachineIndependent/glslang.y"
+ case 604: /* iteration_statement_nonattributed: FOR LEFT_PAREN $@12 for_init_statement for_rest_statement RIGHT_PAREN statement_no_new_scope */
+#line 3977 "MachineIndependent/glslang.y"
{
parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]);
(yyval.interm.intermNode) = parseContext.intermediate.makeAggregate((yyvsp[-3].interm.intermNode), (yyvsp[-5].lex).loc);
@@ -11437,81 +11495,81 @@ yyreduce:
--parseContext.statementNestingLevel;
--parseContext.controlFlowNestingLevel;
}
-#line 11441 "MachineIndependent/glslang_tab.cpp"
+#line 11499 "MachineIndependent/glslang_tab.cpp"
break;
- case 602: /* for_init_statement: expression_statement */
-#line 3967 "MachineIndependent/glslang.y"
+ case 605: /* for_init_statement: expression_statement */
+#line 3992 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 11449 "MachineIndependent/glslang_tab.cpp"
+#line 11507 "MachineIndependent/glslang_tab.cpp"
break;
- case 603: /* for_init_statement: declaration_statement */
-#line 3970 "MachineIndependent/glslang.y"
+ case 606: /* for_init_statement: declaration_statement */
+#line 3995 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 11457 "MachineIndependent/glslang_tab.cpp"
+#line 11515 "MachineIndependent/glslang_tab.cpp"
break;
- case 604: /* conditionopt: condition */
-#line 3976 "MachineIndependent/glslang.y"
+ case 607: /* conditionopt: condition */
+#line 4001 "MachineIndependent/glslang.y"
{
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
}
-#line 11465 "MachineIndependent/glslang_tab.cpp"
+#line 11523 "MachineIndependent/glslang_tab.cpp"
break;
- case 605: /* conditionopt: %empty */
-#line 3979 "MachineIndependent/glslang.y"
+ case 608: /* conditionopt: %empty */
+#line 4004 "MachineIndependent/glslang.y"
{
(yyval.interm.intermTypedNode) = 0;
}
-#line 11473 "MachineIndependent/glslang_tab.cpp"
+#line 11531 "MachineIndependent/glslang_tab.cpp"
break;
- case 606: /* for_rest_statement: conditionopt SEMICOLON */
-#line 3985 "MachineIndependent/glslang.y"
+ case 609: /* for_rest_statement: conditionopt SEMICOLON */
+#line 4010 "MachineIndependent/glslang.y"
{
(yyval.interm.nodePair).node1 = (yyvsp[-1].interm.intermTypedNode);
(yyval.interm.nodePair).node2 = 0;
}
-#line 11482 "MachineIndependent/glslang_tab.cpp"
+#line 11540 "MachineIndependent/glslang_tab.cpp"
break;
- case 607: /* for_rest_statement: conditionopt SEMICOLON expression */
-#line 3989 "MachineIndependent/glslang.y"
+ case 610: /* for_rest_statement: conditionopt SEMICOLON expression */
+#line 4014 "MachineIndependent/glslang.y"
{
(yyval.interm.nodePair).node1 = (yyvsp[-2].interm.intermTypedNode);
(yyval.interm.nodePair).node2 = (yyvsp[0].interm.intermTypedNode);
}
-#line 11491 "MachineIndependent/glslang_tab.cpp"
+#line 11549 "MachineIndependent/glslang_tab.cpp"
break;
- case 608: /* jump_statement: CONTINUE SEMICOLON */
-#line 3996 "MachineIndependent/glslang.y"
+ case 611: /* jump_statement: CONTINUE SEMICOLON */
+#line 4021 "MachineIndependent/glslang.y"
{
if (parseContext.loopNestingLevel <= 0)
parseContext.error((yyvsp[-1].lex).loc, "continue statement only allowed in loops", "", "");
(yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpContinue, (yyvsp[-1].lex).loc);
}
-#line 11501 "MachineIndependent/glslang_tab.cpp"
+#line 11559 "MachineIndependent/glslang_tab.cpp"
break;
- case 609: /* jump_statement: BREAK SEMICOLON */
-#line 4001 "MachineIndependent/glslang.y"
+ case 612: /* jump_statement: BREAK SEMICOLON */
+#line 4026 "MachineIndependent/glslang.y"
{
if (parseContext.loopNestingLevel + parseContext.switchSequenceStack.size() <= 0)
parseContext.error((yyvsp[-1].lex).loc, "break statement only allowed in switch and loops", "", "");
(yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpBreak, (yyvsp[-1].lex).loc);
}
-#line 11511 "MachineIndependent/glslang_tab.cpp"
+#line 11569 "MachineIndependent/glslang_tab.cpp"
break;
- case 610: /* jump_statement: RETURN SEMICOLON */
-#line 4006 "MachineIndependent/glslang.y"
+ case 613: /* jump_statement: RETURN SEMICOLON */
+#line 4031 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpReturn, (yyvsp[-1].lex).loc);
if (parseContext.currentFunctionType->getBasicType() != EbtVoid)
@@ -11519,101 +11577,101 @@ yyreduce:
if (parseContext.inMain)
parseContext.postEntryPointReturn = true;
}
-#line 11523 "MachineIndependent/glslang_tab.cpp"
+#line 11581 "MachineIndependent/glslang_tab.cpp"
break;
- case 611: /* jump_statement: RETURN expression SEMICOLON */
-#line 4013 "MachineIndependent/glslang.y"
+ case 614: /* jump_statement: RETURN expression SEMICOLON */
+#line 4038 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.handleReturnValue((yyvsp[-2].lex).loc, (yyvsp[-1].interm.intermTypedNode));
}
-#line 11531 "MachineIndependent/glslang_tab.cpp"
+#line 11589 "MachineIndependent/glslang_tab.cpp"
break;
- case 612: /* jump_statement: DISCARD SEMICOLON */
-#line 4016 "MachineIndependent/glslang.y"
+ case 615: /* jump_statement: DISCARD SEMICOLON */
+#line 4041 "MachineIndependent/glslang.y"
{
parseContext.requireStage((yyvsp[-1].lex).loc, EShLangFragment, "discard");
(yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpKill, (yyvsp[-1].lex).loc);
}
-#line 11540 "MachineIndependent/glslang_tab.cpp"
+#line 11598 "MachineIndependent/glslang_tab.cpp"
break;
- case 613: /* jump_statement: TERMINATE_INVOCATION SEMICOLON */
-#line 4020 "MachineIndependent/glslang.y"
+ case 616: /* jump_statement: TERMINATE_INVOCATION SEMICOLON */
+#line 4045 "MachineIndependent/glslang.y"
{
parseContext.requireStage((yyvsp[-1].lex).loc, EShLangFragment, "terminateInvocation");
(yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpTerminateInvocation, (yyvsp[-1].lex).loc);
}
-#line 11549 "MachineIndependent/glslang_tab.cpp"
+#line 11607 "MachineIndependent/glslang_tab.cpp"
break;
- case 614: /* jump_statement: TERMINATE_RAY SEMICOLON */
-#line 4025 "MachineIndependent/glslang.y"
+ case 617: /* jump_statement: TERMINATE_RAY SEMICOLON */
+#line 4050 "MachineIndependent/glslang.y"
{
parseContext.requireStage((yyvsp[-1].lex).loc, EShLangAnyHit, "terminateRayEXT");
(yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpTerminateRayKHR, (yyvsp[-1].lex).loc);
}
-#line 11558 "MachineIndependent/glslang_tab.cpp"
+#line 11616 "MachineIndependent/glslang_tab.cpp"
break;
- case 615: /* jump_statement: IGNORE_INTERSECTION SEMICOLON */
-#line 4029 "MachineIndependent/glslang.y"
+ case 618: /* jump_statement: IGNORE_INTERSECTION SEMICOLON */
+#line 4054 "MachineIndependent/glslang.y"
{
parseContext.requireStage((yyvsp[-1].lex).loc, EShLangAnyHit, "ignoreIntersectionEXT");
(yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpIgnoreIntersectionKHR, (yyvsp[-1].lex).loc);
}
-#line 11567 "MachineIndependent/glslang_tab.cpp"
+#line 11625 "MachineIndependent/glslang_tab.cpp"
break;
- case 616: /* translation_unit: external_declaration */
-#line 4039 "MachineIndependent/glslang.y"
+ case 619: /* translation_unit: external_declaration */
+#line 4064 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
parseContext.intermediate.setTreeRoot((yyval.interm.intermNode));
}
-#line 11576 "MachineIndependent/glslang_tab.cpp"
+#line 11634 "MachineIndependent/glslang_tab.cpp"
break;
- case 617: /* translation_unit: translation_unit external_declaration */
-#line 4043 "MachineIndependent/glslang.y"
+ case 620: /* translation_unit: translation_unit external_declaration */
+#line 4068 "MachineIndependent/glslang.y"
{
if ((yyvsp[0].interm.intermNode) != nullptr) {
(yyval.interm.intermNode) = parseContext.intermediate.growAggregate((yyvsp[-1].interm.intermNode), (yyvsp[0].interm.intermNode));
parseContext.intermediate.setTreeRoot((yyval.interm.intermNode));
}
}
-#line 11587 "MachineIndependent/glslang_tab.cpp"
+#line 11645 "MachineIndependent/glslang_tab.cpp"
break;
- case 618: /* external_declaration: function_definition */
-#line 4052 "MachineIndependent/glslang.y"
+ case 621: /* external_declaration: function_definition */
+#line 4077 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 11595 "MachineIndependent/glslang_tab.cpp"
+#line 11653 "MachineIndependent/glslang_tab.cpp"
break;
- case 619: /* external_declaration: declaration */
-#line 4055 "MachineIndependent/glslang.y"
+ case 622: /* external_declaration: declaration */
+#line 4080 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 11603 "MachineIndependent/glslang_tab.cpp"
+#line 11661 "MachineIndependent/glslang_tab.cpp"
break;
- case 620: /* external_declaration: SEMICOLON */
-#line 4059 "MachineIndependent/glslang.y"
+ case 623: /* external_declaration: SEMICOLON */
+#line 4084 "MachineIndependent/glslang.y"
{
parseContext.requireProfile((yyvsp[0].lex).loc, ~EEsProfile, "extraneous semicolon");
parseContext.profileRequires((yyvsp[0].lex).loc, ~EEsProfile, 460, nullptr, "extraneous semicolon");
(yyval.interm.intermNode) = nullptr;
}
-#line 11613 "MachineIndependent/glslang_tab.cpp"
+#line 11671 "MachineIndependent/glslang_tab.cpp"
break;
- case 621: /* $@13: %empty */
-#line 4068 "MachineIndependent/glslang.y"
+ case 624: /* $@13: %empty */
+#line 4093 "MachineIndependent/glslang.y"
{
(yyvsp[0].interm).function = parseContext.handleFunctionDeclarator((yyvsp[0].interm).loc, *(yyvsp[0].interm).function, false /* not prototype */);
(yyvsp[0].interm).intermNode = parseContext.handleFunctionDefinition((yyvsp[0].interm).loc, *(yyvsp[0].interm).function);
@@ -11626,11 +11684,11 @@ yyreduce:
++parseContext.statementNestingLevel;
}
}
-#line 11630 "MachineIndependent/glslang_tab.cpp"
+#line 11688 "MachineIndependent/glslang_tab.cpp"
break;
- case 622: /* function_definition: function_prototype $@13 compound_statement_no_new_scope */
-#line 4080 "MachineIndependent/glslang.y"
+ case 625: /* function_definition: function_prototype $@13 compound_statement_no_new_scope */
+#line 4105 "MachineIndependent/glslang.y"
{
// May be best done as post process phase on intermediate code
if (parseContext.currentFunctionType->getBasicType() != EbtVoid && ! parseContext.functionReturnsValue)
@@ -11657,228 +11715,228 @@ yyreduce:
--parseContext.statementNestingLevel;
}
}
-#line 11661 "MachineIndependent/glslang_tab.cpp"
+#line 11719 "MachineIndependent/glslang_tab.cpp"
break;
- case 623: /* attribute: LEFT_BRACKET LEFT_BRACKET attribute_list RIGHT_BRACKET RIGHT_BRACKET */
-#line 4110 "MachineIndependent/glslang.y"
+ case 626: /* attribute: LEFT_BRACKET LEFT_BRACKET attribute_list RIGHT_BRACKET RIGHT_BRACKET */
+#line 4135 "MachineIndependent/glslang.y"
{
(yyval.interm.attributes) = (yyvsp[-2].interm.attributes);
}
-#line 11669 "MachineIndependent/glslang_tab.cpp"
+#line 11727 "MachineIndependent/glslang_tab.cpp"
break;
- case 624: /* attribute_list: single_attribute */
-#line 4115 "MachineIndependent/glslang.y"
+ case 627: /* attribute_list: single_attribute */
+#line 4140 "MachineIndependent/glslang.y"
{
(yyval.interm.attributes) = (yyvsp[0].interm.attributes);
}
-#line 11677 "MachineIndependent/glslang_tab.cpp"
+#line 11735 "MachineIndependent/glslang_tab.cpp"
break;
- case 625: /* attribute_list: attribute_list COMMA single_attribute */
-#line 4118 "MachineIndependent/glslang.y"
+ case 628: /* attribute_list: attribute_list COMMA single_attribute */
+#line 4143 "MachineIndependent/glslang.y"
{
(yyval.interm.attributes) = parseContext.mergeAttributes((yyvsp[-2].interm.attributes), (yyvsp[0].interm.attributes));
}
-#line 11685 "MachineIndependent/glslang_tab.cpp"
+#line 11743 "MachineIndependent/glslang_tab.cpp"
break;
- case 626: /* single_attribute: IDENTIFIER */
-#line 4123 "MachineIndependent/glslang.y"
+ case 629: /* single_attribute: IDENTIFIER */
+#line 4148 "MachineIndependent/glslang.y"
{
(yyval.interm.attributes) = parseContext.makeAttributes(*(yyvsp[0].lex).string);
}
-#line 11693 "MachineIndependent/glslang_tab.cpp"
+#line 11751 "MachineIndependent/glslang_tab.cpp"
break;
- case 627: /* single_attribute: IDENTIFIER LEFT_PAREN constant_expression RIGHT_PAREN */
-#line 4126 "MachineIndependent/glslang.y"
+ case 630: /* single_attribute: IDENTIFIER LEFT_PAREN constant_expression RIGHT_PAREN */
+#line 4151 "MachineIndependent/glslang.y"
{
(yyval.interm.attributes) = parseContext.makeAttributes(*(yyvsp[-3].lex).string, (yyvsp[-1].interm.intermTypedNode));
}
-#line 11701 "MachineIndependent/glslang_tab.cpp"
+#line 11759 "MachineIndependent/glslang_tab.cpp"
break;
- case 628: /* spirv_requirements_list: spirv_requirements_parameter */
-#line 4133 "MachineIndependent/glslang.y"
+ case 631: /* spirv_requirements_list: spirv_requirements_parameter */
+#line 4158 "MachineIndependent/glslang.y"
{
(yyval.interm.spirvReq) = (yyvsp[0].interm.spirvReq);
}
-#line 11709 "MachineIndependent/glslang_tab.cpp"
+#line 11767 "MachineIndependent/glslang_tab.cpp"
break;
- case 629: /* spirv_requirements_list: spirv_requirements_list COMMA spirv_requirements_parameter */
-#line 4136 "MachineIndependent/glslang.y"
+ case 632: /* spirv_requirements_list: spirv_requirements_list COMMA spirv_requirements_parameter */
+#line 4161 "MachineIndependent/glslang.y"
{
(yyval.interm.spirvReq) = parseContext.mergeSpirvRequirements((yyvsp[-1].lex).loc, (yyvsp[-2].interm.spirvReq), (yyvsp[0].interm.spirvReq));
}
-#line 11717 "MachineIndependent/glslang_tab.cpp"
+#line 11775 "MachineIndependent/glslang_tab.cpp"
break;
- case 630: /* spirv_requirements_parameter: IDENTIFIER EQUAL LEFT_BRACKET spirv_extension_list RIGHT_BRACKET */
-#line 4141 "MachineIndependent/glslang.y"
+ case 633: /* spirv_requirements_parameter: IDENTIFIER EQUAL LEFT_BRACKET spirv_extension_list RIGHT_BRACKET */
+#line 4166 "MachineIndependent/glslang.y"
{
(yyval.interm.spirvReq) = parseContext.makeSpirvRequirement((yyvsp[-3].lex).loc, *(yyvsp[-4].lex).string, (yyvsp[-1].interm.intermNode)->getAsAggregate(), nullptr);
}
-#line 11725 "MachineIndependent/glslang_tab.cpp"
+#line 11783 "MachineIndependent/glslang_tab.cpp"
break;
- case 631: /* spirv_requirements_parameter: IDENTIFIER EQUAL LEFT_BRACKET spirv_capability_list RIGHT_BRACKET */
-#line 4144 "MachineIndependent/glslang.y"
+ case 634: /* spirv_requirements_parameter: IDENTIFIER EQUAL LEFT_BRACKET spirv_capability_list RIGHT_BRACKET */
+#line 4169 "MachineIndependent/glslang.y"
{
(yyval.interm.spirvReq) = parseContext.makeSpirvRequirement((yyvsp[-3].lex).loc, *(yyvsp[-4].lex).string, nullptr, (yyvsp[-1].interm.intermNode)->getAsAggregate());
}
-#line 11733 "MachineIndependent/glslang_tab.cpp"
+#line 11791 "MachineIndependent/glslang_tab.cpp"
break;
- case 632: /* spirv_extension_list: STRING_LITERAL */
-#line 4149 "MachineIndependent/glslang.y"
+ case 635: /* spirv_extension_list: STRING_LITERAL */
+#line 4174 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.makeAggregate(parseContext.intermediate.addConstantUnion((yyvsp[0].lex).string, (yyvsp[0].lex).loc, true));
}
-#line 11741 "MachineIndependent/glslang_tab.cpp"
+#line 11799 "MachineIndependent/glslang_tab.cpp"
break;
- case 633: /* spirv_extension_list: spirv_extension_list COMMA STRING_LITERAL */
-#line 4152 "MachineIndependent/glslang.y"
+ case 636: /* spirv_extension_list: spirv_extension_list COMMA STRING_LITERAL */
+#line 4177 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.growAggregate((yyvsp[-2].interm.intermNode), parseContext.intermediate.addConstantUnion((yyvsp[0].lex).string, (yyvsp[0].lex).loc, true));
}
-#line 11749 "MachineIndependent/glslang_tab.cpp"
+#line 11807 "MachineIndependent/glslang_tab.cpp"
break;
- case 634: /* spirv_capability_list: INTCONSTANT */
-#line 4157 "MachineIndependent/glslang.y"
+ case 637: /* spirv_capability_list: INTCONSTANT */
+#line 4182 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.makeAggregate(parseContext.intermediate.addConstantUnion((yyvsp[0].lex).i, (yyvsp[0].lex).loc, true));
}
-#line 11757 "MachineIndependent/glslang_tab.cpp"
+#line 11815 "MachineIndependent/glslang_tab.cpp"
break;
- case 635: /* spirv_capability_list: spirv_capability_list COMMA INTCONSTANT */
-#line 4160 "MachineIndependent/glslang.y"
+ case 638: /* spirv_capability_list: spirv_capability_list COMMA INTCONSTANT */
+#line 4185 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.growAggregate((yyvsp[-2].interm.intermNode), parseContext.intermediate.addConstantUnion((yyvsp[0].lex).i, (yyvsp[0].lex).loc, true));
}
-#line 11765 "MachineIndependent/glslang_tab.cpp"
+#line 11823 "MachineIndependent/glslang_tab.cpp"
break;
- case 636: /* spirv_execution_mode_qualifier: SPIRV_EXECUTION_MODE LEFT_PAREN INTCONSTANT RIGHT_PAREN */
-#line 4165 "MachineIndependent/glslang.y"
+ case 639: /* spirv_execution_mode_qualifier: SPIRV_EXECUTION_MODE LEFT_PAREN INTCONSTANT RIGHT_PAREN */
+#line 4190 "MachineIndependent/glslang.y"
{
parseContext.intermediate.insertSpirvExecutionMode((yyvsp[-1].lex).i);
(yyval.interm.intermNode) = 0;
}
-#line 11774 "MachineIndependent/glslang_tab.cpp"
+#line 11832 "MachineIndependent/glslang_tab.cpp"
break;
- case 637: /* spirv_execution_mode_qualifier: SPIRV_EXECUTION_MODE LEFT_PAREN spirv_requirements_list COMMA INTCONSTANT RIGHT_PAREN */
-#line 4169 "MachineIndependent/glslang.y"
+ case 640: /* spirv_execution_mode_qualifier: SPIRV_EXECUTION_MODE LEFT_PAREN spirv_requirements_list COMMA INTCONSTANT RIGHT_PAREN */
+#line 4194 "MachineIndependent/glslang.y"
{
parseContext.intermediate.insertSpirvRequirement((yyvsp[-3].interm.spirvReq));
parseContext.intermediate.insertSpirvExecutionMode((yyvsp[-1].lex).i);
(yyval.interm.intermNode) = 0;
}
-#line 11784 "MachineIndependent/glslang_tab.cpp"
+#line 11842 "MachineIndependent/glslang_tab.cpp"
break;
- case 638: /* spirv_execution_mode_qualifier: SPIRV_EXECUTION_MODE LEFT_PAREN INTCONSTANT COMMA spirv_execution_mode_parameter_list RIGHT_PAREN */
-#line 4174 "MachineIndependent/glslang.y"
+ case 641: /* spirv_execution_mode_qualifier: SPIRV_EXECUTION_MODE LEFT_PAREN INTCONSTANT COMMA spirv_execution_mode_parameter_list RIGHT_PAREN */
+#line 4199 "MachineIndependent/glslang.y"
{
parseContext.intermediate.insertSpirvExecutionMode((yyvsp[-3].lex).i, (yyvsp[-1].interm.intermNode)->getAsAggregate());
(yyval.interm.intermNode) = 0;
}
-#line 11793 "MachineIndependent/glslang_tab.cpp"
+#line 11851 "MachineIndependent/glslang_tab.cpp"
break;
- case 639: /* spirv_execution_mode_qualifier: SPIRV_EXECUTION_MODE LEFT_PAREN spirv_requirements_list COMMA INTCONSTANT COMMA spirv_execution_mode_parameter_list RIGHT_PAREN */
-#line 4178 "MachineIndependent/glslang.y"
+ case 642: /* spirv_execution_mode_qualifier: SPIRV_EXECUTION_MODE LEFT_PAREN spirv_requirements_list COMMA INTCONSTANT COMMA spirv_execution_mode_parameter_list RIGHT_PAREN */
+#line 4203 "MachineIndependent/glslang.y"
{
parseContext.intermediate.insertSpirvRequirement((yyvsp[-5].interm.spirvReq));
parseContext.intermediate.insertSpirvExecutionMode((yyvsp[-3].lex).i, (yyvsp[-1].interm.intermNode)->getAsAggregate());
(yyval.interm.intermNode) = 0;
}
-#line 11803 "MachineIndependent/glslang_tab.cpp"
+#line 11861 "MachineIndependent/glslang_tab.cpp"
break;
- case 640: /* spirv_execution_mode_qualifier: SPIRV_EXECUTION_MODE_ID LEFT_PAREN INTCONSTANT COMMA spirv_execution_mode_id_parameter_list RIGHT_PAREN */
-#line 4183 "MachineIndependent/glslang.y"
+ case 643: /* spirv_execution_mode_qualifier: SPIRV_EXECUTION_MODE_ID LEFT_PAREN INTCONSTANT COMMA spirv_execution_mode_id_parameter_list RIGHT_PAREN */
+#line 4208 "MachineIndependent/glslang.y"
{
parseContext.intermediate.insertSpirvExecutionModeId((yyvsp[-3].lex).i, (yyvsp[-1].interm.intermNode)->getAsAggregate());
(yyval.interm.intermNode) = 0;
}
-#line 11812 "MachineIndependent/glslang_tab.cpp"
+#line 11870 "MachineIndependent/glslang_tab.cpp"
break;
- case 641: /* spirv_execution_mode_qualifier: SPIRV_EXECUTION_MODE_ID LEFT_PAREN spirv_requirements_list COMMA INTCONSTANT COMMA spirv_execution_mode_id_parameter_list RIGHT_PAREN */
-#line 4187 "MachineIndependent/glslang.y"
+ case 644: /* spirv_execution_mode_qualifier: SPIRV_EXECUTION_MODE_ID LEFT_PAREN spirv_requirements_list COMMA INTCONSTANT COMMA spirv_execution_mode_id_parameter_list RIGHT_PAREN */
+#line 4212 "MachineIndependent/glslang.y"
{
parseContext.intermediate.insertSpirvRequirement((yyvsp[-5].interm.spirvReq));
parseContext.intermediate.insertSpirvExecutionModeId((yyvsp[-3].lex).i, (yyvsp[-1].interm.intermNode)->getAsAggregate());
(yyval.interm.intermNode) = 0;
}
-#line 11822 "MachineIndependent/glslang_tab.cpp"
+#line 11880 "MachineIndependent/glslang_tab.cpp"
break;
- case 642: /* spirv_execution_mode_parameter_list: spirv_execution_mode_parameter */
-#line 4194 "MachineIndependent/glslang.y"
+ case 645: /* spirv_execution_mode_parameter_list: spirv_execution_mode_parameter */
+#line 4219 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.makeAggregate((yyvsp[0].interm.intermNode));
}
-#line 11830 "MachineIndependent/glslang_tab.cpp"
+#line 11888 "MachineIndependent/glslang_tab.cpp"
break;
- case 643: /* spirv_execution_mode_parameter_list: spirv_execution_mode_parameter_list COMMA spirv_execution_mode_parameter */
-#line 4197 "MachineIndependent/glslang.y"
+ case 646: /* spirv_execution_mode_parameter_list: spirv_execution_mode_parameter_list COMMA spirv_execution_mode_parameter */
+#line 4222 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.growAggregate((yyvsp[-2].interm.intermNode), (yyvsp[0].interm.intermNode));
}
-#line 11838 "MachineIndependent/glslang_tab.cpp"
+#line 11896 "MachineIndependent/glslang_tab.cpp"
break;
- case 644: /* spirv_execution_mode_parameter: FLOATCONSTANT */
-#line 4202 "MachineIndependent/glslang.y"
+ case 647: /* spirv_execution_mode_parameter: FLOATCONSTANT */
+#line 4227 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).d, EbtFloat, (yyvsp[0].lex).loc, true);
}
-#line 11846 "MachineIndependent/glslang_tab.cpp"
+#line 11904 "MachineIndependent/glslang_tab.cpp"
break;
- case 645: /* spirv_execution_mode_parameter: INTCONSTANT */
-#line 4205 "MachineIndependent/glslang.y"
+ case 648: /* spirv_execution_mode_parameter: INTCONSTANT */
+#line 4230 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).i, (yyvsp[0].lex).loc, true);
}
-#line 11854 "MachineIndependent/glslang_tab.cpp"
+#line 11912 "MachineIndependent/glslang_tab.cpp"
break;
- case 646: /* spirv_execution_mode_parameter: UINTCONSTANT */
-#line 4208 "MachineIndependent/glslang.y"
+ case 649: /* spirv_execution_mode_parameter: UINTCONSTANT */
+#line 4233 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).u, (yyvsp[0].lex).loc, true);
}
-#line 11862 "MachineIndependent/glslang_tab.cpp"
+#line 11920 "MachineIndependent/glslang_tab.cpp"
break;
- case 647: /* spirv_execution_mode_parameter: BOOLCONSTANT */
-#line 4211 "MachineIndependent/glslang.y"
+ case 650: /* spirv_execution_mode_parameter: BOOLCONSTANT */
+#line 4236 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).b, (yyvsp[0].lex).loc, true);
}
-#line 11870 "MachineIndependent/glslang_tab.cpp"
+#line 11928 "MachineIndependent/glslang_tab.cpp"
break;
- case 648: /* spirv_execution_mode_parameter: STRING_LITERAL */
-#line 4214 "MachineIndependent/glslang.y"
+ case 651: /* spirv_execution_mode_parameter: STRING_LITERAL */
+#line 4239 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).string, (yyvsp[0].lex).loc, true);
}
-#line 11878 "MachineIndependent/glslang_tab.cpp"
+#line 11936 "MachineIndependent/glslang_tab.cpp"
break;
- case 649: /* spirv_execution_mode_id_parameter_list: constant_expression */
-#line 4219 "MachineIndependent/glslang.y"
+ case 652: /* spirv_execution_mode_id_parameter_list: constant_expression */
+#line 4244 "MachineIndependent/glslang.y"
{
if ((yyvsp[0].interm.intermTypedNode)->getBasicType() != EbtFloat &&
(yyvsp[0].interm.intermTypedNode)->getBasicType() != EbtInt &&
@@ -11888,11 +11946,11 @@ yyreduce:
parseContext.error((yyvsp[0].interm.intermTypedNode)->getLoc(), "this type not allowed", (yyvsp[0].interm.intermTypedNode)->getType().getBasicString(), "");
(yyval.interm.intermNode) = parseContext.intermediate.makeAggregate((yyvsp[0].interm.intermTypedNode));
}
-#line 11892 "MachineIndependent/glslang_tab.cpp"
+#line 11950 "MachineIndependent/glslang_tab.cpp"
break;
- case 650: /* spirv_execution_mode_id_parameter_list: spirv_execution_mode_id_parameter_list COMMA constant_expression */
-#line 4228 "MachineIndependent/glslang.y"
+ case 653: /* spirv_execution_mode_id_parameter_list: spirv_execution_mode_id_parameter_list COMMA constant_expression */
+#line 4253 "MachineIndependent/glslang.y"
{
if ((yyvsp[0].interm.intermTypedNode)->getBasicType() != EbtFloat &&
(yyvsp[0].interm.intermTypedNode)->getBasicType() != EbtInt &&
@@ -11902,156 +11960,156 @@ yyreduce:
parseContext.error((yyvsp[0].interm.intermTypedNode)->getLoc(), "this type not allowed", (yyvsp[0].interm.intermTypedNode)->getType().getBasicString(), "");
(yyval.interm.intermNode) = parseContext.intermediate.growAggregate((yyvsp[-2].interm.intermNode), (yyvsp[0].interm.intermTypedNode));
}
-#line 11906 "MachineIndependent/glslang_tab.cpp"
+#line 11964 "MachineIndependent/glslang_tab.cpp"
break;
- case 651: /* spirv_storage_class_qualifier: SPIRV_STORAGE_CLASS LEFT_PAREN INTCONSTANT RIGHT_PAREN */
-#line 4239 "MachineIndependent/glslang.y"
+ case 654: /* spirv_storage_class_qualifier: SPIRV_STORAGE_CLASS LEFT_PAREN INTCONSTANT RIGHT_PAREN */
+#line 4264 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[-3].lex).loc);
(yyval.interm.type).qualifier.storage = EvqSpirvStorageClass;
(yyval.interm.type).qualifier.spirvStorageClass = (yyvsp[-1].lex).i;
}
-#line 11916 "MachineIndependent/glslang_tab.cpp"
+#line 11974 "MachineIndependent/glslang_tab.cpp"
break;
- case 652: /* spirv_storage_class_qualifier: SPIRV_STORAGE_CLASS LEFT_PAREN spirv_requirements_list COMMA INTCONSTANT RIGHT_PAREN */
-#line 4244 "MachineIndependent/glslang.y"
+ case 655: /* spirv_storage_class_qualifier: SPIRV_STORAGE_CLASS LEFT_PAREN spirv_requirements_list COMMA INTCONSTANT RIGHT_PAREN */
+#line 4269 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[-5].lex).loc);
parseContext.intermediate.insertSpirvRequirement((yyvsp[-3].interm.spirvReq));
(yyval.interm.type).qualifier.storage = EvqSpirvStorageClass;
(yyval.interm.type).qualifier.spirvStorageClass = (yyvsp[-1].lex).i;
}
-#line 11927 "MachineIndependent/glslang_tab.cpp"
+#line 11985 "MachineIndependent/glslang_tab.cpp"
break;
- case 653: /* spirv_decorate_qualifier: SPIRV_DECORATE LEFT_PAREN INTCONSTANT RIGHT_PAREN */
-#line 4252 "MachineIndependent/glslang.y"
+ case 656: /* spirv_decorate_qualifier: SPIRV_DECORATE LEFT_PAREN INTCONSTANT RIGHT_PAREN */
+#line 4277 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[-3].lex).loc);
(yyval.interm.type).qualifier.setSpirvDecorate((yyvsp[-1].lex).i);
}
-#line 11936 "MachineIndependent/glslang_tab.cpp"
+#line 11994 "MachineIndependent/glslang_tab.cpp"
break;
- case 654: /* spirv_decorate_qualifier: SPIRV_DECORATE LEFT_PAREN spirv_requirements_list COMMA INTCONSTANT RIGHT_PAREN */
-#line 4256 "MachineIndependent/glslang.y"
+ case 657: /* spirv_decorate_qualifier: SPIRV_DECORATE LEFT_PAREN spirv_requirements_list COMMA INTCONSTANT RIGHT_PAREN */
+#line 4281 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[-5].lex).loc);
parseContext.intermediate.insertSpirvRequirement((yyvsp[-3].interm.spirvReq));
(yyval.interm.type).qualifier.setSpirvDecorate((yyvsp[-1].lex).i);
}
-#line 11946 "MachineIndependent/glslang_tab.cpp"
+#line 12004 "MachineIndependent/glslang_tab.cpp"
break;
- case 655: /* spirv_decorate_qualifier: SPIRV_DECORATE LEFT_PAREN INTCONSTANT COMMA spirv_decorate_parameter_list RIGHT_PAREN */
-#line 4261 "MachineIndependent/glslang.y"
+ case 658: /* spirv_decorate_qualifier: SPIRV_DECORATE LEFT_PAREN INTCONSTANT COMMA spirv_decorate_parameter_list RIGHT_PAREN */
+#line 4286 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[-5].lex).loc);
(yyval.interm.type).qualifier.setSpirvDecorate((yyvsp[-3].lex).i, (yyvsp[-1].interm.intermNode)->getAsAggregate());
}
-#line 11955 "MachineIndependent/glslang_tab.cpp"
+#line 12013 "MachineIndependent/glslang_tab.cpp"
break;
- case 656: /* spirv_decorate_qualifier: SPIRV_DECORATE LEFT_PAREN spirv_requirements_list COMMA INTCONSTANT COMMA spirv_decorate_parameter_list RIGHT_PAREN */
-#line 4265 "MachineIndependent/glslang.y"
+ case 659: /* spirv_decorate_qualifier: SPIRV_DECORATE LEFT_PAREN spirv_requirements_list COMMA INTCONSTANT COMMA spirv_decorate_parameter_list RIGHT_PAREN */
+#line 4290 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[-7].lex).loc);
parseContext.intermediate.insertSpirvRequirement((yyvsp[-5].interm.spirvReq));
(yyval.interm.type).qualifier.setSpirvDecorate((yyvsp[-3].lex).i, (yyvsp[-1].interm.intermNode)->getAsAggregate());
}
-#line 11965 "MachineIndependent/glslang_tab.cpp"
+#line 12023 "MachineIndependent/glslang_tab.cpp"
break;
- case 657: /* spirv_decorate_qualifier: SPIRV_DECORATE_ID LEFT_PAREN INTCONSTANT COMMA spirv_decorate_id_parameter_list RIGHT_PAREN */
-#line 4270 "MachineIndependent/glslang.y"
+ case 660: /* spirv_decorate_qualifier: SPIRV_DECORATE_ID LEFT_PAREN INTCONSTANT COMMA spirv_decorate_id_parameter_list RIGHT_PAREN */
+#line 4295 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[-5].lex).loc);
(yyval.interm.type).qualifier.setSpirvDecorateId((yyvsp[-3].lex).i, (yyvsp[-1].interm.intermNode)->getAsAggregate());
}
-#line 11974 "MachineIndependent/glslang_tab.cpp"
+#line 12032 "MachineIndependent/glslang_tab.cpp"
break;
- case 658: /* spirv_decorate_qualifier: SPIRV_DECORATE_ID LEFT_PAREN spirv_requirements_list COMMA INTCONSTANT COMMA spirv_decorate_id_parameter_list RIGHT_PAREN */
-#line 4274 "MachineIndependent/glslang.y"
+ case 661: /* spirv_decorate_qualifier: SPIRV_DECORATE_ID LEFT_PAREN spirv_requirements_list COMMA INTCONSTANT COMMA spirv_decorate_id_parameter_list RIGHT_PAREN */
+#line 4299 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[-7].lex).loc);
parseContext.intermediate.insertSpirvRequirement((yyvsp[-5].interm.spirvReq));
(yyval.interm.type).qualifier.setSpirvDecorateId((yyvsp[-3].lex).i, (yyvsp[-1].interm.intermNode)->getAsAggregate());
}
-#line 11984 "MachineIndependent/glslang_tab.cpp"
+#line 12042 "MachineIndependent/glslang_tab.cpp"
break;
- case 659: /* spirv_decorate_qualifier: SPIRV_DECORATE_STRING LEFT_PAREN INTCONSTANT COMMA spirv_decorate_string_parameter_list RIGHT_PAREN */
-#line 4279 "MachineIndependent/glslang.y"
+ case 662: /* spirv_decorate_qualifier: SPIRV_DECORATE_STRING LEFT_PAREN INTCONSTANT COMMA spirv_decorate_string_parameter_list RIGHT_PAREN */
+#line 4304 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[-5].lex).loc);
(yyval.interm.type).qualifier.setSpirvDecorateString((yyvsp[-3].lex).i, (yyvsp[-1].interm.intermNode)->getAsAggregate());
}
-#line 11993 "MachineIndependent/glslang_tab.cpp"
+#line 12051 "MachineIndependent/glslang_tab.cpp"
break;
- case 660: /* spirv_decorate_qualifier: SPIRV_DECORATE_STRING LEFT_PAREN spirv_requirements_list COMMA INTCONSTANT COMMA spirv_decorate_string_parameter_list RIGHT_PAREN */
-#line 4283 "MachineIndependent/glslang.y"
+ case 663: /* spirv_decorate_qualifier: SPIRV_DECORATE_STRING LEFT_PAREN spirv_requirements_list COMMA INTCONSTANT COMMA spirv_decorate_string_parameter_list RIGHT_PAREN */
+#line 4308 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[-7].lex).loc);
parseContext.intermediate.insertSpirvRequirement((yyvsp[-5].interm.spirvReq));
(yyval.interm.type).qualifier.setSpirvDecorateString((yyvsp[-3].lex).i, (yyvsp[-1].interm.intermNode)->getAsAggregate());
}
-#line 12003 "MachineIndependent/glslang_tab.cpp"
+#line 12061 "MachineIndependent/glslang_tab.cpp"
break;
- case 661: /* spirv_decorate_parameter_list: spirv_decorate_parameter */
-#line 4290 "MachineIndependent/glslang.y"
+ case 664: /* spirv_decorate_parameter_list: spirv_decorate_parameter */
+#line 4315 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.makeAggregate((yyvsp[0].interm.intermNode));
}
-#line 12011 "MachineIndependent/glslang_tab.cpp"
+#line 12069 "MachineIndependent/glslang_tab.cpp"
break;
- case 662: /* spirv_decorate_parameter_list: spirv_decorate_parameter_list COMMA spirv_decorate_parameter */
-#line 4293 "MachineIndependent/glslang.y"
+ case 665: /* spirv_decorate_parameter_list: spirv_decorate_parameter_list COMMA spirv_decorate_parameter */
+#line 4318 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.growAggregate((yyvsp[-2].interm.intermNode), (yyvsp[0].interm.intermNode));
}
-#line 12019 "MachineIndependent/glslang_tab.cpp"
+#line 12077 "MachineIndependent/glslang_tab.cpp"
break;
- case 663: /* spirv_decorate_parameter: FLOATCONSTANT */
-#line 4298 "MachineIndependent/glslang.y"
+ case 666: /* spirv_decorate_parameter: FLOATCONSTANT */
+#line 4323 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).d, EbtFloat, (yyvsp[0].lex).loc, true);
}
-#line 12027 "MachineIndependent/glslang_tab.cpp"
+#line 12085 "MachineIndependent/glslang_tab.cpp"
break;
- case 664: /* spirv_decorate_parameter: INTCONSTANT */
-#line 4301 "MachineIndependent/glslang.y"
+ case 667: /* spirv_decorate_parameter: INTCONSTANT */
+#line 4326 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).i, (yyvsp[0].lex).loc, true);
}
-#line 12035 "MachineIndependent/glslang_tab.cpp"
+#line 12093 "MachineIndependent/glslang_tab.cpp"
break;
- case 665: /* spirv_decorate_parameter: UINTCONSTANT */
-#line 4304 "MachineIndependent/glslang.y"
+ case 668: /* spirv_decorate_parameter: UINTCONSTANT */
+#line 4329 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).u, (yyvsp[0].lex).loc, true);
}
-#line 12043 "MachineIndependent/glslang_tab.cpp"
+#line 12101 "MachineIndependent/glslang_tab.cpp"
break;
- case 666: /* spirv_decorate_parameter: BOOLCONSTANT */
-#line 4307 "MachineIndependent/glslang.y"
+ case 669: /* spirv_decorate_parameter: BOOLCONSTANT */
+#line 4332 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).b, (yyvsp[0].lex).loc, true);
}
-#line 12051 "MachineIndependent/glslang_tab.cpp"
+#line 12109 "MachineIndependent/glslang_tab.cpp"
break;
- case 667: /* spirv_decorate_id_parameter_list: constant_expression */
-#line 4312 "MachineIndependent/glslang.y"
+ case 670: /* spirv_decorate_id_parameter_list: constant_expression */
+#line 4337 "MachineIndependent/glslang.y"
{
if ((yyvsp[0].interm.intermTypedNode)->getBasicType() != EbtFloat &&
(yyvsp[0].interm.intermTypedNode)->getBasicType() != EbtInt &&
@@ -12060,11 +12118,11 @@ yyreduce:
parseContext.error((yyvsp[0].interm.intermTypedNode)->getLoc(), "this type not allowed", (yyvsp[0].interm.intermTypedNode)->getType().getBasicString(), "");
(yyval.interm.intermNode) = parseContext.intermediate.makeAggregate((yyvsp[0].interm.intermTypedNode));
}
-#line 12064 "MachineIndependent/glslang_tab.cpp"
+#line 12122 "MachineIndependent/glslang_tab.cpp"
break;
- case 668: /* spirv_decorate_id_parameter_list: spirv_decorate_id_parameter_list COMMA constant_expression */
-#line 4320 "MachineIndependent/glslang.y"
+ case 671: /* spirv_decorate_id_parameter_list: spirv_decorate_id_parameter_list COMMA constant_expression */
+#line 4345 "MachineIndependent/glslang.y"
{
if ((yyvsp[0].interm.intermTypedNode)->getBasicType() != EbtFloat &&
(yyvsp[0].interm.intermTypedNode)->getBasicType() != EbtInt &&
@@ -12073,139 +12131,139 @@ yyreduce:
parseContext.error((yyvsp[0].interm.intermTypedNode)->getLoc(), "this type not allowed", (yyvsp[0].interm.intermTypedNode)->getType().getBasicString(), "");
(yyval.interm.intermNode) = parseContext.intermediate.growAggregate((yyvsp[-2].interm.intermNode), (yyvsp[0].interm.intermTypedNode));
}
-#line 12077 "MachineIndependent/glslang_tab.cpp"
+#line 12135 "MachineIndependent/glslang_tab.cpp"
break;
- case 669: /* spirv_decorate_string_parameter_list: STRING_LITERAL */
-#line 4330 "MachineIndependent/glslang.y"
+ case 672: /* spirv_decorate_string_parameter_list: STRING_LITERAL */
+#line 4355 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.makeAggregate(
parseContext.intermediate.addConstantUnion((yyvsp[0].lex).string, (yyvsp[0].lex).loc, true));
}
-#line 12086 "MachineIndependent/glslang_tab.cpp"
+#line 12144 "MachineIndependent/glslang_tab.cpp"
break;
- case 670: /* spirv_decorate_string_parameter_list: spirv_decorate_string_parameter_list COMMA STRING_LITERAL */
-#line 4334 "MachineIndependent/glslang.y"
+ case 673: /* spirv_decorate_string_parameter_list: spirv_decorate_string_parameter_list COMMA STRING_LITERAL */
+#line 4359 "MachineIndependent/glslang.y"
{
(yyval.interm.intermNode) = parseContext.intermediate.growAggregate((yyvsp[-2].interm.intermNode), parseContext.intermediate.addConstantUnion((yyvsp[0].lex).string, (yyvsp[0].lex).loc, true));
}
-#line 12094 "MachineIndependent/glslang_tab.cpp"
+#line 12152 "MachineIndependent/glslang_tab.cpp"
break;
- case 671: /* spirv_type_specifier: SPIRV_TYPE LEFT_PAREN spirv_instruction_qualifier_list COMMA spirv_type_parameter_list RIGHT_PAREN */
-#line 4339 "MachineIndependent/glslang.y"
+ case 674: /* spirv_type_specifier: SPIRV_TYPE LEFT_PAREN spirv_instruction_qualifier_list COMMA spirv_type_parameter_list RIGHT_PAREN */
+#line 4364 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[-5].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).setSpirvType(*(yyvsp[-3].interm.spirvInst), (yyvsp[-1].interm.spirvTypeParams));
}
-#line 12103 "MachineIndependent/glslang_tab.cpp"
+#line 12161 "MachineIndependent/glslang_tab.cpp"
break;
- case 672: /* spirv_type_specifier: SPIRV_TYPE LEFT_PAREN spirv_requirements_list COMMA spirv_instruction_qualifier_list COMMA spirv_type_parameter_list RIGHT_PAREN */
-#line 4343 "MachineIndependent/glslang.y"
+ case 675: /* spirv_type_specifier: SPIRV_TYPE LEFT_PAREN spirv_requirements_list COMMA spirv_instruction_qualifier_list COMMA spirv_type_parameter_list RIGHT_PAREN */
+#line 4368 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[-7].lex).loc, parseContext.symbolTable.atGlobalLevel());
parseContext.intermediate.insertSpirvRequirement((yyvsp[-5].interm.spirvReq));
(yyval.interm.type).setSpirvType(*(yyvsp[-3].interm.spirvInst), (yyvsp[-1].interm.spirvTypeParams));
}
-#line 12113 "MachineIndependent/glslang_tab.cpp"
+#line 12171 "MachineIndependent/glslang_tab.cpp"
break;
- case 673: /* spirv_type_specifier: SPIRV_TYPE LEFT_PAREN spirv_instruction_qualifier_list RIGHT_PAREN */
-#line 4348 "MachineIndependent/glslang.y"
+ case 676: /* spirv_type_specifier: SPIRV_TYPE LEFT_PAREN spirv_instruction_qualifier_list RIGHT_PAREN */
+#line 4373 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[-3].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).setSpirvType(*(yyvsp[-1].interm.spirvInst));
}
-#line 12122 "MachineIndependent/glslang_tab.cpp"
+#line 12180 "MachineIndependent/glslang_tab.cpp"
break;
- case 674: /* spirv_type_specifier: SPIRV_TYPE LEFT_PAREN spirv_requirements_list COMMA spirv_instruction_qualifier_list RIGHT_PAREN */
-#line 4352 "MachineIndependent/glslang.y"
+ case 677: /* spirv_type_specifier: SPIRV_TYPE LEFT_PAREN spirv_requirements_list COMMA spirv_instruction_qualifier_list RIGHT_PAREN */
+#line 4377 "MachineIndependent/glslang.y"
{
(yyval.interm.type).init((yyvsp[-5].lex).loc, parseContext.symbolTable.atGlobalLevel());
parseContext.intermediate.insertSpirvRequirement((yyvsp[-3].interm.spirvReq));
(yyval.interm.type).setSpirvType(*(yyvsp[-1].interm.spirvInst));
}
-#line 12132 "MachineIndependent/glslang_tab.cpp"
+#line 12190 "MachineIndependent/glslang_tab.cpp"
break;
- case 675: /* spirv_type_parameter_list: spirv_type_parameter */
-#line 4359 "MachineIndependent/glslang.y"
+ case 678: /* spirv_type_parameter_list: spirv_type_parameter */
+#line 4384 "MachineIndependent/glslang.y"
{
(yyval.interm.spirvTypeParams) = (yyvsp[0].interm.spirvTypeParams);
}
-#line 12140 "MachineIndependent/glslang_tab.cpp"
+#line 12198 "MachineIndependent/glslang_tab.cpp"
break;
- case 676: /* spirv_type_parameter_list: spirv_type_parameter_list COMMA spirv_type_parameter */
-#line 4362 "MachineIndependent/glslang.y"
+ case 679: /* spirv_type_parameter_list: spirv_type_parameter_list COMMA spirv_type_parameter */
+#line 4387 "MachineIndependent/glslang.y"
{
(yyval.interm.spirvTypeParams) = parseContext.mergeSpirvTypeParameters((yyvsp[-2].interm.spirvTypeParams), (yyvsp[0].interm.spirvTypeParams));
}
-#line 12148 "MachineIndependent/glslang_tab.cpp"
+#line 12206 "MachineIndependent/glslang_tab.cpp"
break;
- case 677: /* spirv_type_parameter: constant_expression */
-#line 4367 "MachineIndependent/glslang.y"
+ case 680: /* spirv_type_parameter: constant_expression */
+#line 4392 "MachineIndependent/glslang.y"
{
(yyval.interm.spirvTypeParams) = parseContext.makeSpirvTypeParameters((yyvsp[0].interm.intermTypedNode)->getLoc(), (yyvsp[0].interm.intermTypedNode)->getAsConstantUnion());
}
-#line 12156 "MachineIndependent/glslang_tab.cpp"
+#line 12214 "MachineIndependent/glslang_tab.cpp"
break;
- case 678: /* spirv_instruction_qualifier: SPIRV_INSTRUCTION LEFT_PAREN spirv_instruction_qualifier_list RIGHT_PAREN */
-#line 4372 "MachineIndependent/glslang.y"
+ case 681: /* spirv_instruction_qualifier: SPIRV_INSTRUCTION LEFT_PAREN spirv_instruction_qualifier_list RIGHT_PAREN */
+#line 4397 "MachineIndependent/glslang.y"
{
(yyval.interm.spirvInst) = (yyvsp[-1].interm.spirvInst);
}
-#line 12164 "MachineIndependent/glslang_tab.cpp"
+#line 12222 "MachineIndependent/glslang_tab.cpp"
break;
- case 679: /* spirv_instruction_qualifier: SPIRV_INSTRUCTION LEFT_PAREN spirv_requirements_list COMMA spirv_instruction_qualifier_list RIGHT_PAREN */
-#line 4375 "MachineIndependent/glslang.y"
+ case 682: /* spirv_instruction_qualifier: SPIRV_INSTRUCTION LEFT_PAREN spirv_requirements_list COMMA spirv_instruction_qualifier_list RIGHT_PAREN */
+#line 4400 "MachineIndependent/glslang.y"
{
parseContext.intermediate.insertSpirvRequirement((yyvsp[-3].interm.spirvReq));
(yyval.interm.spirvInst) = (yyvsp[-1].interm.spirvInst);
}
-#line 12173 "MachineIndependent/glslang_tab.cpp"
+#line 12231 "MachineIndependent/glslang_tab.cpp"
break;
- case 680: /* spirv_instruction_qualifier_list: spirv_instruction_qualifier_id */
-#line 4381 "MachineIndependent/glslang.y"
+ case 683: /* spirv_instruction_qualifier_list: spirv_instruction_qualifier_id */
+#line 4406 "MachineIndependent/glslang.y"
{
(yyval.interm.spirvInst) = (yyvsp[0].interm.spirvInst);
}
-#line 12181 "MachineIndependent/glslang_tab.cpp"
+#line 12239 "MachineIndependent/glslang_tab.cpp"
break;
- case 681: /* spirv_instruction_qualifier_list: spirv_instruction_qualifier_list COMMA spirv_instruction_qualifier_id */
-#line 4384 "MachineIndependent/glslang.y"
+ case 684: /* spirv_instruction_qualifier_list: spirv_instruction_qualifier_list COMMA spirv_instruction_qualifier_id */
+#line 4409 "MachineIndependent/glslang.y"
{
(yyval.interm.spirvInst) = parseContext.mergeSpirvInstruction((yyvsp[-1].lex).loc, (yyvsp[-2].interm.spirvInst), (yyvsp[0].interm.spirvInst));
}
-#line 12189 "MachineIndependent/glslang_tab.cpp"
+#line 12247 "MachineIndependent/glslang_tab.cpp"
break;
- case 682: /* spirv_instruction_qualifier_id: IDENTIFIER EQUAL STRING_LITERAL */
-#line 4389 "MachineIndependent/glslang.y"
+ case 685: /* spirv_instruction_qualifier_id: IDENTIFIER EQUAL STRING_LITERAL */
+#line 4414 "MachineIndependent/glslang.y"
{
(yyval.interm.spirvInst) = parseContext.makeSpirvInstruction((yyvsp[-1].lex).loc, *(yyvsp[-2].lex).string, *(yyvsp[0].lex).string);
}
-#line 12197 "MachineIndependent/glslang_tab.cpp"
+#line 12255 "MachineIndependent/glslang_tab.cpp"
break;
- case 683: /* spirv_instruction_qualifier_id: IDENTIFIER EQUAL INTCONSTANT */
-#line 4392 "MachineIndependent/glslang.y"
+ case 686: /* spirv_instruction_qualifier_id: IDENTIFIER EQUAL INTCONSTANT */
+#line 4417 "MachineIndependent/glslang.y"
{
(yyval.interm.spirvInst) = parseContext.makeSpirvInstruction((yyvsp[-1].lex).loc, *(yyvsp[-2].lex).string, (yyvsp[0].lex).i);
}
-#line 12205 "MachineIndependent/glslang_tab.cpp"
+#line 12263 "MachineIndependent/glslang_tab.cpp"
break;
-#line 12209 "MachineIndependent/glslang_tab.cpp"
+#line 12267 "MachineIndependent/glslang_tab.cpp"
default: break;
}
@@ -12430,5 +12488,5 @@ yyreturn:
return yyresult;
}
-#line 4397 "MachineIndependent/glslang.y"
+#line 4422 "MachineIndependent/glslang.y"
diff --git a/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp.h b/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp.h
index 596a10e6d9..18cef4688e 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp.h
+++ b/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp.h
@@ -501,11 +501,14 @@ extern int yydebug;
SHADERCALLCOHERENT = 702, /* SHADERCALLCOHERENT */
NOPERSPECTIVE = 703, /* NOPERSPECTIVE */
EXPLICITINTERPAMD = 704, /* EXPLICITINTERPAMD */
- PERVERTEXNV = 705, /* PERVERTEXNV */
- PERPRIMITIVENV = 706, /* PERPRIMITIVENV */
- PERVIEWNV = 707, /* PERVIEWNV */
- PERTASKNV = 708, /* PERTASKNV */
- PRECISE = 709 /* PRECISE */
+ PERVERTEXEXT = 705, /* PERVERTEXEXT */
+ PERVERTEXNV = 706, /* PERVERTEXNV */
+ PERPRIMITIVENV = 707, /* PERPRIMITIVENV */
+ PERVIEWNV = 708, /* PERVIEWNV */
+ PERTASKNV = 709, /* PERTASKNV */
+ PERPRIMITIVEEXT = 710, /* PERPRIMITIVEEXT */
+ TASKPAYLOADWORKGROUPEXT = 711, /* TASKPAYLOADWORKGROUPEXT */
+ PRECISE = 712 /* PRECISE */
};
typedef enum yytokentype yytoken_kind_t;
#endif
@@ -553,7 +556,7 @@ union YYSTYPE
glslang::TArraySizes* typeParameters;
} interm;
-#line 557 "MachineIndependent/glslang_tab.cpp.h"
+#line 560 "MachineIndependent/glslang_tab.cpp.h"
};
typedef union YYSTYPE YYSTYPE;
diff --git a/thirdparty/glslang/glslang/MachineIndependent/intermOut.cpp b/thirdparty/glslang/glslang/MachineIndependent/intermOut.cpp
index d8a3aab5d0..987556128b 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/intermOut.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/intermOut.cpp
@@ -665,6 +665,8 @@ bool TOutputTraverser::visitUnary(TVisit /* visit */, TIntermUnary* node)
case EOpConstructReference: out.debug << "Construct reference type"; break;
+ case EOpDeclare: out.debug << "Declare"; break;
+
#ifndef GLSLANG_WEB
case EOpSpirvInst: out.debug << "spirv_instruction"; break;
#endif
@@ -692,6 +694,7 @@ bool TOutputTraverser::visitAggregate(TVisit /* visit */, TIntermAggregate* node
switch (node->getOp()) {
case EOpSequence: out.debug << "Sequence\n"; return true;
+ case EOpScope: out.debug << "Scope\n"; return true;
case EOpLinkerObjects: out.debug << "Linker Objects\n"; return true;
case EOpComma: out.debug << "Comma"; break;
case EOpFunction: out.debug << "Function Definition: " << node->getName(); break;
@@ -1068,6 +1071,8 @@ bool TOutputTraverser::visitAggregate(TVisit /* visit */, TIntermAggregate* node
case EOpExecuteCallableNV: out.debug << "executeCallableNV"; break;
case EOpExecuteCallableKHR: out.debug << "executeCallableKHR"; break;
case EOpWritePackedPrimitiveIndices4x8NV: out.debug << "writePackedPrimitiveIndices4x8NV"; break;
+ case EOpEmitMeshTasksEXT: out.debug << "EmitMeshTasksEXT"; break;
+ case EOpSetMeshOutputsEXT: out.debug << "SetMeshOutputsEXT"; break;
case EOpRayQueryInitialize: out.debug << "rayQueryInitializeEXT"; break;
case EOpRayQueryTerminate: out.debug << "rayQueryTerminateEXT"; break;
@@ -1107,7 +1112,7 @@ bool TOutputTraverser::visitAggregate(TVisit /* visit */, TIntermAggregate* node
default: out.debug.message(EPrefixError, "Bad aggregation op");
}
- if (node->getOp() != EOpSequence && node->getOp() != EOpParameters)
+ if (node->getOp() != EOpSequence && node->getOp() != EOpScope && node->getOp() != EOpParameters)
out.debug << " (" << node->getCompleteString() << ")";
out.debug << "\n";
@@ -1522,12 +1527,12 @@ void TIntermediate::output(TInfoSink& infoSink, bool tree)
infoSink.debug << "interlock ordering = " << TQualifier::getInterlockOrderingString(interlockOrdering) << "\n";
break;
- case EShLangMeshNV:
+ case EShLangMesh:
infoSink.debug << "max_vertices = " << vertices << "\n";
infoSink.debug << "max_primitives = " << primitives << "\n";
infoSink.debug << "output primitive = " << TQualifier::getGeometryString(outputPrimitive) << "\n";
// Fall through
- case EShLangTaskNV:
+ case EShLangTask:
// Fall through
case EShLangCompute:
infoSink.debug << "local_size = (" << localSize[0] << ", " << localSize[1] << ", " << localSize[2] << ")\n";
diff --git a/thirdparty/glslang/glslang/MachineIndependent/iomapper.cpp b/thirdparty/glslang/glslang/MachineIndependent/iomapper.cpp
index a3c53f505d..4250e92da6 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/iomapper.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/iomapper.cpp
@@ -203,11 +203,7 @@ struct TResolverUniformAdaptor {
inline void operator()(std::pair<const TString, TVarEntryInfo>& entKey) {
TVarEntryInfo& ent = entKey.second;
- ent.newLocation = -1;
- ent.newComponent = -1;
- ent.newBinding = -1;
- ent.newSet = -1;
- ent.newIndex = -1;
+ ent.clearNewAssignments();
const bool isValid = resolver.validateBinding(stage, ent);
if (isValid) {
resolver.resolveSet(ent.stage, ent);
@@ -281,11 +277,7 @@ struct TResolverInOutAdaptor {
inline void operator()(std::pair<const TString, TVarEntryInfo>& entKey)
{
TVarEntryInfo& ent = entKey.second;
- ent.newLocation = -1;
- ent.newComponent = -1;
- ent.newBinding = -1;
- ent.newSet = -1;
- ent.newIndex = -1;
+ ent.clearNewAssignments();
const bool isValid = resolver.validateInOut(ent.stage, ent);
if (isValid) {
resolver.resolveInOutLocation(stage, ent);
@@ -1670,6 +1662,10 @@ bool TGlslIoMapper::doMap(TIoMapResolver* resolver, TInfoSink& infoSink) {
if (size <= int(autoPushConstantMaxSize)) {
qualifier.setBlockStorage(EbsPushConstant);
qualifier.layoutPacking = autoPushConstantBlockPacking;
+ // Push constants don't have set/binding etc. decorations, remove those.
+ qualifier.layoutSet = TQualifier::layoutSetEnd;
+ at->second.clearNewAssignments();
+
upgraded = true;
}
}
@@ -1677,10 +1673,14 @@ bool TGlslIoMapper::doMap(TIoMapResolver* resolver, TInfoSink& infoSink) {
// If it's been upgraded to push_constant, then remove it from the uniformVector
// so it doesn't get a set/binding assigned to it.
if (upgraded) {
- auto at = std::find_if(uniformVector.begin(), uniformVector.end(),
- [this](const TVarLivePair& p) { return p.first == autoPushConstantBlockName; });
- if (at != uniformVector.end())
- uniformVector.erase(at);
+ while (1) {
+ auto at = std::find_if(uniformVector.begin(), uniformVector.end(),
+ [this](const TVarLivePair& p) { return p.first == autoPushConstantBlockName; });
+ if (at != uniformVector.end())
+ uniformVector.erase(at);
+ else
+ break;
+ }
}
}
for (size_t stage = 0; stage < EShLangCount; stage++) {
diff --git a/thirdparty/glslang/glslang/MachineIndependent/iomapper.h b/thirdparty/glslang/glslang/MachineIndependent/iomapper.h
index c43864e3aa..ba7bc3bbc7 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/iomapper.h
+++ b/thirdparty/glslang/glslang/MachineIndependent/iomapper.h
@@ -61,6 +61,15 @@ struct TVarEntryInfo {
int newComponent;
int newIndex;
EShLanguage stage;
+
+ void clearNewAssignments() {
+ newBinding = -1;
+ newSet = -1;
+ newLocation = -1;
+ newComponent = -1;
+ newIndex = -1;
+ }
+
struct TOrderById {
inline bool operator()(const TVarEntryInfo& l, const TVarEntryInfo& r) { return l.id < r.id; }
};
diff --git a/thirdparty/glslang/glslang/MachineIndependent/linkValidate.cpp b/thirdparty/glslang/glslang/MachineIndependent/linkValidate.cpp
index 620be97c97..acc512fd4f 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/linkValidate.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/linkValidate.cpp
@@ -55,22 +55,28 @@ namespace glslang {
//
// Link-time error emitter.
//
-void TIntermediate::error(TInfoSink& infoSink, const char* message)
+void TIntermediate::error(TInfoSink& infoSink, const char* message, EShLanguage unitStage)
{
#ifndef GLSLANG_WEB
infoSink.info.prefix(EPrefixError);
- infoSink.info << "Linking " << StageName(language) << " stage: " << message << "\n";
+ if (unitStage < EShLangCount)
+ infoSink.info << "Linking " << StageName(getStage()) << " and " << StageName(unitStage) << " stages: " << message << "\n";
+ else
+ infoSink.info << "Linking " << StageName(language) << " stage: " << message << "\n";
#endif
++numErrors;
}
// Link-time warning.
-void TIntermediate::warn(TInfoSink& infoSink, const char* message)
+void TIntermediate::warn(TInfoSink& infoSink, const char* message, EShLanguage unitStage)
{
#ifndef GLSLANG_WEB
infoSink.info.prefix(EPrefixWarning);
- infoSink.info << "Linking " << StageName(language) << " stage: " << message << "\n";
+ if (unitStage < EShLangCount)
+ infoSink.info << "Linking " << StageName(language) << " and " << StageName(unitStage) << " stages: " << message << "\n";
+ else
+ infoSink.info << "Linking " << StageName(language) << " stage: " << message << "\n";
#endif
}
@@ -114,7 +120,7 @@ void TIntermediate::mergeUniformObjects(TInfoSink& infoSink, TIntermediate& unit
}
//
-// do error checking on the shader boundary in / out vars
+// do error checking on the shader boundary in / out vars
//
void TIntermediate::checkStageIO(TInfoSink& infoSink, TIntermediate& unit) {
if (unit.treeRoot == nullptr || treeRoot == nullptr)
@@ -206,7 +212,7 @@ void TIntermediate::mergeModes(TInfoSink& infoSink, TIntermediate& unit)
if (vertices == TQualifier::layoutNotSet)
vertices = unit.vertices;
else if (unit.vertices != TQualifier::layoutNotSet && vertices != unit.vertices) {
- if (language == EShLangGeometry || language == EShLangMeshNV)
+ if (language == EShLangGeometry || language == EShLangMesh)
error(infoSink, "Contradictory layout max_vertices values");
else if (language == EShLangTessControl)
error(infoSink, "Contradictory layout vertices values");
@@ -216,7 +222,7 @@ void TIntermediate::mergeModes(TInfoSink& infoSink, TIntermediate& unit)
if (primitives == TQualifier::layoutNotSet)
primitives = unit.primitives;
else if (primitives != unit.primitives) {
- if (language == EShLangMeshNV)
+ if (language == EShLangMesh)
error(infoSink, "Contradictory layout max_primitives values");
else
assert(0);
@@ -313,6 +319,7 @@ void TIntermediate::mergeModes(TInfoSink& infoSink, TIntermediate& unit)
MERGE_TRUE(autoMapLocations);
MERGE_TRUE(invertY);
MERGE_TRUE(dxPositionW);
+ MERGE_TRUE(debugInfo);
MERGE_TRUE(flattenUniformArrays);
MERGE_TRUE(useUnknownFormat);
MERGE_TRUE(hlslOffsets);
@@ -580,9 +587,6 @@ void TIntermediate::mergeGlobalUniformBlocks(TInfoSink& infoSink, TIntermediate&
}
void TIntermediate::mergeBlockDefinitions(TInfoSink& infoSink, TIntermSymbol* block, TIntermSymbol* unitBlock, TIntermediate* unit) {
- if (block->getType() == unitBlock->getType()) {
- return;
- }
if (block->getType().getTypeName() != unitBlock->getType().getTypeName() ||
block->getType().getBasicType() != unitBlock->getType().getBasicType() ||
@@ -629,44 +633,42 @@ void TIntermediate::mergeBlockDefinitions(TInfoSink& infoSink, TIntermSymbol* bl
}
}
- TType unitType;
- unitType.shallowCopy(unitBlock->getType());
-
// update symbol node in unit tree,
// and other nodes that may reference it
class TMergeBlockTraverser : public TIntermTraverser {
public:
- TMergeBlockTraverser(const glslang::TType &type, const glslang::TType& unitType,
- glslang::TIntermediate& unit,
- const std::map<unsigned int, unsigned int>& memberIdxUpdates) :
- newType(type), unitType(unitType), unit(unit), memberIndexUpdates(memberIdxUpdates)
- { }
- virtual ~TMergeBlockTraverser() { }
-
- const glslang::TType& newType; // type with modifications
- const glslang::TType& unitType; // copy of original type
- glslang::TIntermediate& unit; // intermediate that is being updated
- const std::map<unsigned int, unsigned int>& memberIndexUpdates;
-
- virtual void visitSymbol(TIntermSymbol* symbol)
+ TMergeBlockTraverser(const TIntermSymbol* newSym)
+ : newSymbol(newSym), newType(nullptr), unit(nullptr), memberIndexUpdates(nullptr)
+ {
+ }
+ TMergeBlockTraverser(const TIntermSymbol* newSym, const glslang::TType* unitType, glslang::TIntermediate* unit,
+ const std::map<unsigned int, unsigned int>* memberIdxUpdates)
+ : TIntermTraverser(false, true), newSymbol(newSym), newType(unitType), unit(unit), memberIndexUpdates(memberIdxUpdates)
{
- glslang::TType& symType = symbol->getWritableType();
+ }
+ virtual ~TMergeBlockTraverser() {}
- if (symType == unitType) {
- // each symbol node has a local copy of the unitType
- // if merging involves changing properties that aren't shared objects
- // they should be updated in all instances
+ const TIntermSymbol* newSymbol;
+ const glslang::TType* newType; // shallow copy of the new type
+ glslang::TIntermediate* unit; // intermediate that is being updated
+ const std::map<unsigned int, unsigned int>* memberIndexUpdates;
- // e.g. the struct list is a ptr to an object, so it can be updated
- // once, outside the traverser
- //*symType.getWritableStruct() = *newType.getStruct();
+ virtual void visitSymbol(TIntermSymbol* symbol)
+ {
+ if (newSymbol->getAccessName() == symbol->getAccessName() &&
+ newSymbol->getQualifier().getBlockStorage() == symbol->getQualifier().getBlockStorage()) {
+ // Each symbol node may have a local copy of the block structure.
+ // Update those structures to match the new one post-merge
+ *(symbol->getWritableType().getWritableStruct()) = *(newSymbol->getType().getStruct());
}
-
}
virtual bool visitBinary(TVisit, glslang::TIntermBinary* node)
{
- if (node->getOp() == EOpIndexDirectStruct && node->getLeft()->getType() == unitType) {
+ if (!unit || !newType || !memberIndexUpdates || memberIndexUpdates->empty())
+ return true;
+
+ if (node->getOp() == EOpIndexDirectStruct && node->getLeft()->getType() == *newType) {
// this is a dereference to a member of the block since the
// member list changed, need to update this to point to the
// right index
@@ -674,8 +676,8 @@ void TIntermediate::mergeBlockDefinitions(TInfoSink& infoSink, TIntermSymbol* bl
glslang::TIntermConstantUnion* constNode = node->getRight()->getAsConstantUnion();
unsigned int memberIdx = constNode->getConstArray()[0].getUConst();
- unsigned int newIdx = memberIndexUpdates.at(memberIdx);
- TIntermTyped* newConstNode = unit.addConstantUnion(newIdx, node->getRight()->getLoc());
+ unsigned int newIdx = memberIndexUpdates->at(memberIdx);
+ TIntermTyped* newConstNode = unit->addConstantUnion(newIdx, node->getRight()->getLoc());
node->setRight(newConstNode);
delete constNode;
@@ -684,10 +686,20 @@ void TIntermediate::mergeBlockDefinitions(TInfoSink& infoSink, TIntermSymbol* bl
}
return true;
}
- } finalLinkTraverser(block->getType(), unitType, *unit, memberIndexUpdates);
+ };
+
+ // 'this' may have symbols that are using the old block structure, so traverse the tree to update those
+ // in 'visitSymbol'
+ TMergeBlockTraverser finalLinkTraverser(block);
+ getTreeRoot()->traverse(&finalLinkTraverser);
- // update the tree to use the new type
- unit->getTreeRoot()->traverse(&finalLinkTraverser);
+ // The 'unit' intermediate needs the block structures update, but also structure entry indices
+ // may have changed from the old block to the new one that it was merged into, so update those
+ // in 'visitBinary'
+ TType newType;
+ newType.shallowCopy(block->getType());
+ TMergeBlockTraverser unitFinalLinkTraverser(block, &newType, unit, &memberIndexUpdates);
+ unit->getTreeRoot()->traverse(&unitFinalLinkTraverser);
// update the member list
(*unitMemberList) = (*memberList);
@@ -819,6 +831,10 @@ void TIntermediate::mergeErrorCheck(TInfoSink& infoSink, const TIntermSymbol& sy
#if !defined(GLSLANG_WEB) && !defined(GLSLANG_ANGLE)
bool crossStage = getStage() != unitStage;
bool writeTypeComparison = false;
+ bool errorReported = false;
+ bool printQualifiers = false;
+ bool printPrecision = false;
+ bool printType = false;
// Types have to match
{
@@ -850,11 +866,48 @@ void TIntermediate::mergeErrorCheck(TInfoSink& infoSink, const TIntermSymbol& sy
(symbol.getType().isUnsizedArray() || unitSymbol.getType().isUnsizedArray()));
}
- if (!symbol.getType().sameElementType(unitSymbol.getType()) ||
- !symbol.getType().sameTypeParameters(unitSymbol.getType()) ||
- !arraysMatch ) {
+ int lpidx = -1;
+ int rpidx = -1;
+ if (!symbol.getType().sameElementType(unitSymbol.getType(), &lpidx, &rpidx)) {
+ if (lpidx >= 0 && rpidx >= 0) {
+ error(infoSink, "Member names and types must match:", unitStage);
+ infoSink.info << " Block: " << symbol.getType().getTypeName() << "\n";
+ infoSink.info << " " << StageName(getStage()) << " stage: \""
+ << (*symbol.getType().getStruct())[lpidx].type->getCompleteString(true, false, false, true,
+ (*symbol.getType().getStruct())[lpidx].type->getFieldName()) << "\"\n";
+ infoSink.info << " " << StageName(unitStage) << " stage: \""
+ << (*unitSymbol.getType().getStruct())[rpidx].type->getCompleteString(true, false, false, true,
+ (*unitSymbol.getType().getStruct())[rpidx].type->getFieldName()) << "\"\n";
+ errorReported = true;
+ } else if (lpidx >= 0 && rpidx == -1) {
+ TString errmsg = StageName(getStage());
+ errmsg.append(" block member has no corresponding member in ").append(StageName(unitStage)).append(" block:");
+ error(infoSink, errmsg.c_str(), unitStage);
+ infoSink.info << " " << StageName(getStage()) << " stage: Block: " << symbol.getType().getTypeName() << ", Member: "
+ << (*symbol.getType().getStruct())[lpidx].type->getFieldName() << "\n";
+ infoSink.info << " " << StageName(unitStage) << " stage: Block: " << unitSymbol.getType().getTypeName() << ", Member: n/a \n";
+ errorReported = true;
+ } else if (lpidx == -1 && rpidx >= 0) {
+ TString errmsg = StageName(unitStage);
+ errmsg.append(" block member has no corresponding member in ").append(StageName(getStage())).append(" block:");
+ error(infoSink, errmsg.c_str(), unitStage);
+ infoSink.info << " " << StageName(unitStage) << " stage: Block: " << unitSymbol.getType().getTypeName() << ", Member: "
+ << (*unitSymbol.getType().getStruct())[rpidx].type->getFieldName() << "\n";
+ infoSink.info << " " << StageName(getStage()) << " stage: Block: " << symbol.getType().getTypeName() << ", Member: n/a \n";
+ errorReported = true;
+ } else {
+ error(infoSink, "Types must match:", unitStage);
+ writeTypeComparison = true;
+ printType = true;
+ }
+ } else if (!arraysMatch) {
+ error(infoSink, "Array sizes must be compatible:", unitStage);
writeTypeComparison = true;
- error(infoSink, "Types must match:");
+ printType = true;
+ } else if (!symbol.getType().sameTypeParameters(unitSymbol.getType())) {
+ error(infoSink, "Type parameters must match:", unitStage);
+ writeTypeComparison = true;
+ printType = true;
}
}
@@ -875,13 +928,35 @@ void TIntermediate::mergeErrorCheck(TInfoSink& infoSink, const TIntermSymbol& sy
}
const TQualifier& qualifier = (*symbol.getType().getStruct())[li].type->getQualifier();
const TQualifier & unitQualifier = (*unitSymbol.getType().getStruct())[ri].type->getQualifier();
- if (qualifier.layoutMatrix != unitQualifier.layoutMatrix ||
- qualifier.layoutOffset != unitQualifier.layoutOffset ||
- qualifier.layoutAlign != unitQualifier.layoutAlign ||
- qualifier.layoutLocation != unitQualifier.layoutLocation ||
- qualifier.layoutComponent != unitQualifier.layoutComponent) {
- error(infoSink, "Interface block member layout qualifiers must match:");
- writeTypeComparison = true;
+ bool layoutQualifierError = false;
+ if (qualifier.layoutMatrix != unitQualifier.layoutMatrix) {
+ error(infoSink, "Interface block member layout matrix qualifier must match:", unitStage);
+ layoutQualifierError = true;
+ }
+ if (qualifier.layoutOffset != unitQualifier.layoutOffset) {
+ error(infoSink, "Interface block member layout offset qualifier must match:", unitStage);
+ layoutQualifierError = true;
+ }
+ if (qualifier.layoutAlign != unitQualifier.layoutAlign) {
+ error(infoSink, "Interface block member layout align qualifier must match:", unitStage);
+ layoutQualifierError = true;
+ }
+ if (qualifier.layoutLocation != unitQualifier.layoutLocation) {
+ error(infoSink, "Interface block member layout location qualifier must match:", unitStage);
+ layoutQualifierError = true;
+ }
+ if (qualifier.layoutComponent != unitQualifier.layoutComponent) {
+ error(infoSink, "Interface block member layout component qualifier must match:", unitStage);
+ layoutQualifierError = true;
+ }
+ if (layoutQualifierError) {
+ infoSink.info << " " << StageName(getStage()) << " stage: Block: " << symbol.getType().getTypeName() << ", Member: "
+ << (*symbol.getType().getStruct())[li].type->getFieldName() << " \""
+ << (*symbol.getType().getStruct())[li].type->getCompleteString(true, true, false, false) << "\"\n";
+ infoSink.info << " " << StageName(unitStage) << " stage: Block: " << unitSymbol.getType().getTypeName() << ", Member: "
+ << (*unitSymbol.getType().getStruct())[ri].type->getFieldName() << " \""
+ << (*unitSymbol.getType().getStruct())[ri].type->getCompleteString(true, true, false, false) << "\"\n";
+ errorReported = true;
}
++li;
++ri;
@@ -895,8 +970,9 @@ void TIntermediate::mergeErrorCheck(TInfoSink& infoSink, const TIntermSymbol& sy
// Qualifiers have to (almost) match
// Storage...
if (!isInOut && symbol.getQualifier().storage != unitSymbol.getQualifier().storage) {
- error(infoSink, "Storage qualifiers must match:");
+ error(infoSink, "Storage qualifiers must match:", unitStage);
writeTypeComparison = true;
+ printQualifiers = true;
}
// Uniform and buffer blocks must either both have an instance name, or
@@ -904,37 +980,40 @@ void TIntermediate::mergeErrorCheck(TInfoSink& infoSink, const TIntermSymbol& sy
if (symbol.getQualifier().isUniformOrBuffer() &&
(IsAnonymous(symbol.getName()) != IsAnonymous(unitSymbol.getName()))) {
error(infoSink, "Matched Uniform or Storage blocks must all be anonymous,"
- " or all be named:");
+ " or all be named:", unitStage);
writeTypeComparison = true;
}
if (symbol.getQualifier().storage == unitSymbol.getQualifier().storage &&
(IsAnonymous(symbol.getName()) != IsAnonymous(unitSymbol.getName()) ||
(!IsAnonymous(symbol.getName()) && symbol.getName() != unitSymbol.getName()))) {
- warn(infoSink, "Matched shader interfaces are using different instance names.");
+ warn(infoSink, "Matched shader interfaces are using different instance names.", unitStage);
writeTypeComparison = true;
}
// Precision...
if (!isInOut && symbol.getQualifier().precision != unitSymbol.getQualifier().precision) {
- error(infoSink, "Precision qualifiers must match:");
+ error(infoSink, "Precision qualifiers must match:", unitStage);
writeTypeComparison = true;
+ printPrecision = true;
}
// Invariance...
if (! crossStage && symbol.getQualifier().invariant != unitSymbol.getQualifier().invariant) {
- error(infoSink, "Presence of invariant qualifier must match:");
+ error(infoSink, "Presence of invariant qualifier must match:", unitStage);
writeTypeComparison = true;
+ printQualifiers = true;
}
// Precise...
if (! crossStage && symbol.getQualifier().isNoContraction() != unitSymbol.getQualifier().isNoContraction()) {
- error(infoSink, "Presence of precise qualifier must match:");
+ error(infoSink, "Presence of precise qualifier must match:", unitStage);
writeTypeComparison = true;
+ printPrecision = true;
}
// Auxiliary and interpolation...
- // "interpolation qualification (e.g., flat) and auxiliary qualification (e.g. centroid) may differ.
+ // "interpolation qualification (e.g., flat) and auxiliary qualification (e.g. centroid) may differ.
// These mismatches are allowed between any pair of stages ...
// those provided in the fragment shader supersede those provided in previous stages."
if (!crossStage &&
@@ -944,57 +1023,137 @@ void TIntermediate::mergeErrorCheck(TInfoSink& infoSink, const TIntermSymbol& sy
symbol.getQualifier().isSample()!= unitSymbol.getQualifier().isSample() ||
symbol.getQualifier().isPatch() != unitSymbol.getQualifier().isPatch() ||
symbol.getQualifier().isNonPerspective() != unitSymbol.getQualifier().isNonPerspective())) {
- error(infoSink, "Interpolation and auxiliary storage qualifiers must match:");
+ error(infoSink, "Interpolation and auxiliary storage qualifiers must match:", unitStage);
writeTypeComparison = true;
+ printQualifiers = true;
}
// Memory...
- if (symbol.getQualifier().coherent != unitSymbol.getQualifier().coherent ||
- symbol.getQualifier().devicecoherent != unitSymbol.getQualifier().devicecoherent ||
- symbol.getQualifier().queuefamilycoherent != unitSymbol.getQualifier().queuefamilycoherent ||
- symbol.getQualifier().workgroupcoherent != unitSymbol.getQualifier().workgroupcoherent ||
- symbol.getQualifier().subgroupcoherent != unitSymbol.getQualifier().subgroupcoherent ||
- symbol.getQualifier().shadercallcoherent!= unitSymbol.getQualifier().shadercallcoherent ||
- symbol.getQualifier().nonprivate != unitSymbol.getQualifier().nonprivate ||
- symbol.getQualifier().volatil != unitSymbol.getQualifier().volatil ||
- symbol.getQualifier().restrict != unitSymbol.getQualifier().restrict ||
- symbol.getQualifier().readonly != unitSymbol.getQualifier().readonly ||
- symbol.getQualifier().writeonly != unitSymbol.getQualifier().writeonly) {
- error(infoSink, "Memory qualifiers must match:");
- writeTypeComparison = true;
+ bool memoryQualifierError = false;
+ if (symbol.getQualifier().coherent != unitSymbol.getQualifier().coherent) {
+ error(infoSink, "Memory coherent qualifier must match:", unitStage);
+ memoryQualifierError = true;
+ }
+ if (symbol.getQualifier().devicecoherent != unitSymbol.getQualifier().devicecoherent) {
+ error(infoSink, "Memory devicecoherent qualifier must match:", unitStage);
+ memoryQualifierError = true;
+ }
+ if (symbol.getQualifier().queuefamilycoherent != unitSymbol.getQualifier().queuefamilycoherent) {
+ error(infoSink, "Memory queuefamilycoherent qualifier must match:", unitStage);
+ memoryQualifierError = true;
+ }
+ if (symbol.getQualifier().workgroupcoherent != unitSymbol.getQualifier().workgroupcoherent) {
+ error(infoSink, "Memory workgroupcoherent qualifier must match:", unitStage);
+ memoryQualifierError = true;
+ }
+ if (symbol.getQualifier().subgroupcoherent != unitSymbol.getQualifier().subgroupcoherent) {
+ error(infoSink, "Memory subgroupcoherent qualifier must match:", unitStage);
+ memoryQualifierError = true;
+ }
+ if (symbol.getQualifier().shadercallcoherent != unitSymbol.getQualifier().shadercallcoherent) {
+ error(infoSink, "Memory shadercallcoherent qualifier must match:", unitStage);
+ memoryQualifierError = true;
+ }
+ if (symbol.getQualifier().nonprivate != unitSymbol.getQualifier().nonprivate) {
+ error(infoSink, "Memory nonprivate qualifier must match:", unitStage);
+ memoryQualifierError = true;
+ }
+ if (symbol.getQualifier().volatil != unitSymbol.getQualifier().volatil) {
+ error(infoSink, "Memory volatil qualifier must match:", unitStage);
+ memoryQualifierError = true;
+ }
+ if (symbol.getQualifier().restrict != unitSymbol.getQualifier().restrict) {
+ error(infoSink, "Memory restrict qualifier must match:", unitStage);
+ memoryQualifierError = true;
+ }
+ if (symbol.getQualifier().readonly != unitSymbol.getQualifier().readonly) {
+ error(infoSink, "Memory readonly qualifier must match:", unitStage);
+ memoryQualifierError = true;
+ }
+ if (symbol.getQualifier().writeonly != unitSymbol.getQualifier().writeonly) {
+ error(infoSink, "Memory writeonly qualifier must match:", unitStage);
+ memoryQualifierError = true;
+ }
+ if (memoryQualifierError) {
+ writeTypeComparison = true;
+ printQualifiers = true;
}
// Layouts...
// TODO: 4.4 enhanced layouts: Generalize to include offset/align: current spec
// requires separate user-supplied offset from actual computed offset, but
// current implementation only has one offset.
- if (symbol.getQualifier().layoutMatrix != unitSymbol.getQualifier().layoutMatrix ||
- symbol.getQualifier().layoutPacking != unitSymbol.getQualifier().layoutPacking ||
- (symbol.getQualifier().hasLocation() && unitSymbol.getQualifier().hasLocation() && symbol.getQualifier().layoutLocation != unitSymbol.getQualifier().layoutLocation) ||
- symbol.getQualifier().layoutComponent != unitSymbol.getQualifier().layoutComponent ||
- symbol.getQualifier().layoutIndex != unitSymbol.getQualifier().layoutIndex ||
- (symbol.getQualifier().hasBinding() && unitSymbol.getQualifier().hasBinding() && symbol.getQualifier().layoutBinding != unitSymbol.getQualifier().layoutBinding) ||
- (symbol.getQualifier().hasBinding() && (symbol.getQualifier().layoutOffset != unitSymbol.getQualifier().layoutOffset))) {
- error(infoSink, "Layout qualification must match:");
+ bool layoutQualifierError = false;
+ if (symbol.getQualifier().layoutMatrix != unitSymbol.getQualifier().layoutMatrix) {
+ error(infoSink, "Layout matrix qualifier must match:", unitStage);
+ layoutQualifierError = true;
+ }
+ if (symbol.getQualifier().layoutPacking != unitSymbol.getQualifier().layoutPacking) {
+ error(infoSink, "Layout packing qualifier must match:", unitStage);
+ layoutQualifierError = true;
+ }
+ if (symbol.getQualifier().hasLocation() && unitSymbol.getQualifier().hasLocation() && symbol.getQualifier().layoutLocation != unitSymbol.getQualifier().layoutLocation) {
+ error(infoSink, "Layout location qualifier must match:", unitStage);
+ layoutQualifierError = true;
+ }
+ if (symbol.getQualifier().layoutComponent != unitSymbol.getQualifier().layoutComponent) {
+ error(infoSink, "Layout component qualifier must match:", unitStage);
+ layoutQualifierError = true;
+ }
+ if (symbol.getQualifier().layoutIndex != unitSymbol.getQualifier().layoutIndex) {
+ error(infoSink, "Layout index qualifier must match:", unitStage);
+ layoutQualifierError = true;
+ }
+ if (symbol.getQualifier().hasBinding() && unitSymbol.getQualifier().hasBinding() && symbol.getQualifier().layoutBinding != unitSymbol.getQualifier().layoutBinding) {
+ error(infoSink, "Layout binding qualifier must match:", unitStage);
+ layoutQualifierError = true;
+ }
+ if (symbol.getQualifier().hasBinding() && (symbol.getQualifier().layoutOffset != unitSymbol.getQualifier().layoutOffset)) {
+ error(infoSink, "Layout offset qualifier must match:", unitStage);
+ layoutQualifierError = true;
+ }
+ if (layoutQualifierError) {
writeTypeComparison = true;
+ printQualifiers = true;
}
// Initializers have to match, if both are present, and if we don't already know the types don't match
- if (! writeTypeComparison) {
+ if (! writeTypeComparison && ! errorReported) {
if (! symbol.getConstArray().empty() && ! unitSymbol.getConstArray().empty()) {
if (symbol.getConstArray() != unitSymbol.getConstArray()) {
- error(infoSink, "Initializers must match:");
+ error(infoSink, "Initializers must match:", unitStage);
infoSink.info << " " << symbol.getName() << "\n";
}
}
}
if (writeTypeComparison) {
- infoSink.info << " " << symbol.getName() << ": \"" << symbol.getType().getCompleteString() << "\" versus ";
- if (symbol.getName() != unitSymbol.getName())
- infoSink.info << unitSymbol.getName() << ": ";
-
- infoSink.info << "\"" << unitSymbol.getType().getCompleteString() << "\"\n";
+ if (symbol.getType().getBasicType() == EbtBlock && unitSymbol.getType().getBasicType() == EbtBlock &&
+ symbol.getType().getStruct() && unitSymbol.getType().getStruct()) {
+ if (printType) {
+ infoSink.info << " " << StageName(getStage()) << " stage: \"" << symbol.getType().getCompleteString(true, printQualifiers, printPrecision,
+ printType, symbol.getName(), symbol.getType().getTypeName()) << "\"\n";
+ infoSink.info << " " << StageName(unitStage) << " stage: \"" << unitSymbol.getType().getCompleteString(true, printQualifiers, printPrecision,
+ printType, unitSymbol.getName(), unitSymbol.getType().getTypeName()) << "\"\n";
+ } else {
+ infoSink.info << " " << StageName(getStage()) << " stage: Block: " << symbol.getType().getTypeName() << " Instance: " << symbol.getName()
+ << ": \"" << symbol.getType().getCompleteString(true, printQualifiers, printPrecision, printType) << "\"\n";
+ infoSink.info << " " << StageName(unitStage) << " stage: Block: " << unitSymbol.getType().getTypeName() << " Instance: " << unitSymbol.getName()
+ << ": \"" << unitSymbol.getType().getCompleteString(true, printQualifiers, printPrecision, printType) << "\"\n";
+ }
+ } else {
+ if (printType) {
+ infoSink.info << " " << StageName(getStage()) << " stage: \""
+ << symbol.getType().getCompleteString(true, printQualifiers, printPrecision, printType, symbol.getName()) << "\"\n";
+ infoSink.info << " " << StageName(unitStage) << " stage: \""
+ << unitSymbol.getType().getCompleteString(true, printQualifiers, printPrecision, printType, unitSymbol.getName()) << "\"\n";
+ } else {
+ infoSink.info << " " << StageName(getStage()) << " stage: " << symbol.getName() << " \""
+ << symbol.getType().getCompleteString(true, printQualifiers, printPrecision, printType) << "\"\n";
+ infoSink.info << " " << StageName(unitStage) << " stage: " << unitSymbol.getName() << " \""
+ << unitSymbol.getType().getCompleteString(true, printQualifiers, printPrecision, printType) << "\"\n";
+ }
+ }
}
#endif
}
@@ -1136,8 +1295,8 @@ void TIntermediate::finalCheck(TInfoSink& infoSink, bool keepUncalled)
error(infoSink, "At least one shader must specify a layout(max_vertices = value)");
break;
case EShLangFragment:
- // for GL_ARB_post_depth_coverage, EarlyFragmentTest is set automatically in
- // ParseHelper.cpp. So if we reach here, this must be GL_EXT_post_depth_coverage
+ // for GL_ARB_post_depth_coverage, EarlyFragmentTest is set automatically in
+ // ParseHelper.cpp. So if we reach here, this must be GL_EXT_post_depth_coverage
// requiring explicit early_fragment_tests
if (getPostDepthCoverage() && !getEarlyFragmentTests())
error(infoSink, "post_depth_coverage requires early_fragment_tests");
@@ -1154,7 +1313,7 @@ void TIntermediate::finalCheck(TInfoSink& infoSink, bool keepUncalled)
if (numShaderRecordBlocks > 1)
error(infoSink, "Only one shaderRecordNV buffer block is allowed per stage");
break;
- case EShLangMeshNV:
+ case EShLangMesh:
// NV_mesh_shader doesn't allow use of both single-view and per-view builtins.
if (inIoAccessed("gl_Position") && inIoAccessed("gl_PositionPerViewNV"))
error(infoSink, "Can only use one of gl_Position or gl_PositionPerViewNV");
@@ -1173,9 +1332,11 @@ void TIntermediate::finalCheck(TInfoSink& infoSink, bool keepUncalled)
if (primitives == TQualifier::layoutNotSet)
error(infoSink, "At least one shader must specify a layout(max_primitives = value)");
// fall through
- case EShLangTaskNV:
+ case EShLangTask:
if (numTaskNVBlocks > 1)
error(infoSink, "Only one taskNV interface block is allowed per shader");
+ if (numTaskEXTPayloads > 1)
+ error(infoSink, "Only single variable of type taskPayloadSharedEXT is allowed per shader");
sharedBlockCheck(infoSink);
break;
default:
@@ -2068,7 +2229,7 @@ int TIntermediate::getScalarAlignment(const TType& type, int& size, int& stride,
if (type.isVector()) {
int scalarAlign = getBaseAlignmentScalar(type, size);
-
+
size *= type.getVectorSize();
return scalarAlign;
}
@@ -2089,7 +2250,7 @@ int TIntermediate::getScalarAlignment(const TType& type, int& size, int& stride,
assert(0); // all cases should be covered above
size = 1;
- return 1;
+ return 1;
}
int TIntermediate::getMemberAlignment(const TType& type, int& size, int& stride, TLayoutPacking layoutPacking, bool rowMajor)
@@ -2179,8 +2340,8 @@ bool TIntermediate::isIoResizeArray(const TType& type, EShLanguage language) {
! type.getQualifier().patch) ||
(language == EShLangTessEvaluation && type.getQualifier().storage == EvqVaryingIn) ||
(language == EShLangFragment && type.getQualifier().storage == EvqVaryingIn &&
- type.getQualifier().pervertexNV) ||
- (language == EShLangMeshNV && type.getQualifier().storage == EvqVaryingOut &&
+ (type.getQualifier().pervertexNV || type.getQualifier().pervertexEXT)) ||
+ (language == EShLangMesh && type.getQualifier().storage == EvqVaryingOut &&
!type.getQualifier().perTaskNV));
}
#endif // not GLSLANG_WEB
diff --git a/thirdparty/glslang/glslang/MachineIndependent/localintermediate.h b/thirdparty/glslang/glslang/MachineIndependent/localintermediate.h
index a658c09c6c..e7a171cde1 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/localintermediate.h
+++ b/thirdparty/glslang/glslang/MachineIndependent/localintermediate.h
@@ -291,10 +291,13 @@ public:
numEntryPoints(0), numErrors(0), numPushConstants(0), recursive(false),
invertY(false),
dxPositionW(false),
+ enhancedMsgs(false),
+ debugInfo(false),
useStorageBuffer(false),
invariantAll(false),
nanMinMaxClamp(false),
depthReplacing(false),
+ stencilReplacing(false),
uniqueId(0),
globalUniformBlockName(""),
atomicCounterBlockName(""),
@@ -308,9 +311,9 @@ public:
useVulkanMemoryModel(false),
invocations(TQualifier::layoutNotSet), vertices(TQualifier::layoutNotSet),
inputPrimitive(ElgNone), outputPrimitive(ElgNone),
- pixelCenterInteger(false), originUpperLeft(false),
+ pixelCenterInteger(false), originUpperLeft(false),texCoordBuiltinRedeclared(false),
vertexSpacing(EvsNone), vertexOrder(EvoNone), interlockOrdering(EioNone), pointMode(false), earlyFragmentTests(false),
- postDepthCoverage(false), depthLayout(EldNone),
+ postDepthCoverage(false), earlyAndLateFragmentTestsAMD(false), depthLayout(EldNone), stencilLayout(ElsNone),
hlslFunctionality1(false),
blendEquations(0), xfbMode(false), multiStream(false),
layoutOverrideCoverage(false),
@@ -320,6 +323,7 @@ public:
primitives(TQualifier::layoutNotSet),
numTaskNVBlocks(0),
layoutPrimitiveCulling(false),
+ numTaskEXTPayloads(0),
autoMapBindings(false),
autoMapLocations(false),
flattenUniformArrays(false),
@@ -459,6 +463,12 @@ public:
const std::string& getEntryPointName() const { return entryPointName; }
const std::string& getEntryPointMangledName() const { return entryPointMangledName; }
+ void setDebugInfo(bool debuginfo)
+ {
+ debugInfo = debuginfo;
+ }
+ bool getDebugInfo() const { return debugInfo; }
+
void setInvertY(bool invert)
{
invertY = invert;
@@ -469,12 +479,18 @@ public:
void setDxPositionW(bool dxPosW)
{
- dxPositionW = dxPosW;
- if (dxPositionW)
- processes.addProcess("dx-position-w");
+ dxPositionW = dxPosW;
+ if (dxPositionW)
+ processes.addProcess("dx-position-w");
}
bool getDxPositionW() const { return dxPositionW; }
+ void setEnhancedMsgs()
+ {
+ enhancedMsgs = true;
+ }
+ bool getEnhancedMsgs() const { return enhancedMsgs && getSource() == EShSourceGlsl; }
+
#ifdef ENABLE_HLSL
void setSource(EShSource s) { source = s; }
EShSource getSource() const { return source; }
@@ -580,6 +596,8 @@ public:
bool isInvariantAll() const { return invariantAll; }
void setDepthReplacing() { depthReplacing = true; }
bool isDepthReplacing() const { return depthReplacing; }
+ void setStencilReplacing() { stencilReplacing = true; }
+ bool isStencilReplacing() const { return stencilReplacing; }
bool setLocalSize(int dim, int size)
{
if (localSizeNotDefault[dim])
@@ -629,6 +647,7 @@ public:
int getNumPushConstants() const { return 0; }
void addShaderRecordCount() { }
void addTaskNVCount() { }
+ void addTaskPayloadEXTCount() { }
void setUseVulkanMemoryModel() { }
bool usingVulkanMemoryModel() const { return false; }
bool usingPhysicalStorageBuffer() const { return false; }
@@ -746,6 +765,7 @@ public:
int getNumPushConstants() const { return numPushConstants; }
void addShaderRecordCount() { ++numShaderRecordBlocks; }
void addTaskNVCount() { ++numTaskNVBlocks; }
+ void addTaskPayloadEXTCount() { ++numTaskEXTPayloads; }
bool setInvocations(int i)
{
@@ -814,7 +834,9 @@ public:
void setPostDepthCoverage() { postDepthCoverage = true; }
bool getPostDepthCoverage() const { return postDepthCoverage; }
void setEarlyFragmentTests() { earlyFragmentTests = true; }
+ void setEarlyAndLateFragmentTestsAMD() { earlyAndLateFragmentTestsAMD = true; }
bool getEarlyFragmentTests() const { return earlyFragmentTests; }
+ bool getEarlyAndLateFragmentTestsAMD() const { return earlyAndLateFragmentTestsAMD; }
bool setDepth(TLayoutDepth d)
{
if (depthLayout != EldNone)
@@ -822,11 +844,21 @@ public:
depthLayout = d;
return true;
}
+ bool setStencil(TLayoutStencil s)
+ {
+ if (stencilLayout != ElsNone)
+ return stencilLayout == s;
+ stencilLayout = s;
+ return true;
+ }
TLayoutDepth getDepth() const { return depthLayout; }
+ TLayoutStencil getStencil() const { return stencilLayout; }
void setOriginUpperLeft() { originUpperLeft = true; }
bool getOriginUpperLeft() const { return originUpperLeft; }
void setPixelCenterInteger() { pixelCenterInteger = true; }
bool getPixelCenterInteger() const { return pixelCenterInteger; }
+ void setTexCoordRedeclared() { texCoordBuiltinRedeclared = true; }
+ bool getTexCoordRedeclared() const { return texCoordBuiltinRedeclared; }
void addBlendEquation(TBlendEquationShift b) { blendEquations |= (1 << b); }
unsigned int getBlendEquations() const { return blendEquations; }
bool setXfbBufferStride(int buffer, unsigned stride)
@@ -1031,8 +1063,8 @@ public:
protected:
TIntermSymbol* addSymbol(long long Id, const TString&, const TType&, const TConstUnionArray&, TIntermTyped* subtree, const TSourceLoc&);
- void error(TInfoSink& infoSink, const char*);
- void warn(TInfoSink& infoSink, const char*);
+ void error(TInfoSink& infoSink, const char*, EShLanguage unitStage = EShLangCount);
+ void warn(TInfoSink& infoSink, const char*, EShLanguage unitStage = EShLangCount);
void mergeCallGraphs(TInfoSink&, TIntermediate&);
void mergeModes(TInfoSink&, TIntermediate&);
void mergeTrees(TInfoSink&, TIntermediate&);
@@ -1086,10 +1118,13 @@ protected:
bool recursive;
bool invertY;
bool dxPositionW;
+ bool enhancedMsgs;
+ bool debugInfo;
bool useStorageBuffer;
bool invariantAll;
bool nanMinMaxClamp; // true if desiring min/max/clamp to favor non-NaN over NaN
bool depthReplacing;
+ bool stencilReplacing;
int localSize[3];
bool localSizeNotDefault[3];
int localSizeSpecId[3];
@@ -1114,13 +1149,16 @@ protected:
TLayoutGeometry outputPrimitive;
bool pixelCenterInteger;
bool originUpperLeft;
+ bool texCoordBuiltinRedeclared;
TVertexSpacing vertexSpacing;
TVertexOrder vertexOrder;
TInterlockOrdering interlockOrdering;
bool pointMode;
bool earlyFragmentTests;
bool postDepthCoverage;
+ bool earlyAndLateFragmentTestsAMD;
TLayoutDepth depthLayout;
+ TLayoutStencil stencilLayout;
bool hlslFunctionality1;
int blendEquations; // an 'or'ing of masks of shifts of TBlendEquationShift
bool xfbMode;
@@ -1133,6 +1171,7 @@ protected:
int primitives;
int numTaskNVBlocks;
bool layoutPrimitiveCulling;
+ int numTaskEXTPayloads;
// Base shift values
std::array<unsigned int, EResCount> shiftBinding;
@@ -1174,6 +1213,7 @@ protected:
// for callableData/callableDataIn
// set of names of statically read/written I/O that might need extra checking
std::set<TString> ioAccessed;
+
// source code of shader, useful as part of debug information
std::string sourceFile;
std::string sourceText;
diff --git a/thirdparty/glslang/glslang/OSDependent/Unix/ossource.cpp b/thirdparty/glslang/glslang/OSDependent/Unix/ossource.cpp
index 1cbd616e98..b98df9348d 100644
--- a/thirdparty/glslang/glslang/OSDependent/Unix/ossource.cpp
+++ b/thirdparty/glslang/glslang/OSDependent/Unix/ossource.cpp
@@ -57,15 +57,6 @@ namespace glslang {
//
//
-// Wrapper for Linux call to DetachThread. This is required as pthread_cleanup_push() expects
-// the cleanup routine to return void.
-//
-static void DetachThreadLinux(void *)
-{
- DetachThread();
-}
-
-//
// Thread Local Storage Operations
//
inline OS_TLSIndex PthreadKeyToTLSIndex(pthread_key_t key)
diff --git a/thirdparty/glslang/glslang/Public/ShaderLang.h b/thirdparty/glslang/glslang/Public/ShaderLang.h
index 3c0e2910d6..78dd323a2e 100644
--- a/thirdparty/glslang/glslang/Public/ShaderLang.h
+++ b/thirdparty/glslang/glslang/Public/ShaderLang.h
@@ -108,8 +108,10 @@ typedef enum {
EShLangMissNV = EShLangMiss,
EShLangCallable,
EShLangCallableNV = EShLangCallable,
- EShLangTaskNV,
- EShLangMeshNV,
+ EShLangTask,
+ EShLangTaskNV = EShLangTask,
+ EShLangMesh,
+ EShLangMeshNV = EShLangMesh,
LAST_ELEMENT_MARKER(EShLangCount),
} EShLanguage; // would be better as stage, but this is ancient now
@@ -132,8 +134,10 @@ typedef enum : unsigned {
EShLangMissNVMask = EShLangMissMask,
EShLangCallableMask = (1 << EShLangCallable),
EShLangCallableNVMask = EShLangCallableMask,
- EShLangTaskNVMask = (1 << EShLangTaskNV),
- EShLangMeshNVMask = (1 << EShLangMeshNV),
+ EShLangTaskMask = (1 << EShLangTask),
+ EShLangTaskNVMask = EShLangTaskMask,
+ EShLangMeshMask = (1 << EShLangMesh),
+ EShLangMeshNVMask = EShLangMeshMask,
LAST_ELEMENT_MARKER(EShLanguageMaskCount),
} EShLanguageMask;
@@ -264,6 +268,7 @@ enum EShMessages : unsigned {
EShMsgHlslLegalization = (1 << 12), // enable HLSL Legalization messages
EShMsgHlslDX9Compatible = (1 << 13), // enable HLSL DX9 compatible mode (for samplers and semantics)
EShMsgBuiltinSymbolTable = (1 << 14), // print the builtin symbol table
+ EShMsgEnhanced = (1 << 15), // enhanced message readability
LAST_ELEMENT_MARKER(EShMsgCount),
};
@@ -300,7 +305,7 @@ typedef struct {
//
// ShHandle held by but opaque to the driver. It is allocated,
-// managed, and de-allocated by the compiler/linker. It's contents
+// managed, and de-allocated by the compiler/linker. Its contents
// are defined by and used by the compiler and linker. For example,
// symbol table information and object code passed from the compiler
// to the linker can be stored where ShHandle points.
@@ -470,6 +475,8 @@ public:
GLSLANG_EXPORT void setSourceEntryPoint(const char* sourceEntryPointName);
GLSLANG_EXPORT void addProcesses(const std::vector<std::string>&);
GLSLANG_EXPORT void setUniqueId(unsigned long long id);
+ GLSLANG_EXPORT void setOverrideVersion(int version);
+ GLSLANG_EXPORT void setDebugInfo(bool debugInfo);
// IO resolver binding data: see comments in ShaderLang.cpp
GLSLANG_EXPORT void setShiftBinding(TResourceType res, unsigned int base);
@@ -488,6 +495,7 @@ public:
GLSLANG_EXPORT void setUniformLocationBase(int base);
GLSLANG_EXPORT void setInvertY(bool invert);
GLSLANG_EXPORT void setDxPositionW(bool dxPosW);
+ GLSLANG_EXPORT void setEnhancedMsgs();
#ifdef ENABLE_HLSL
GLSLANG_EXPORT void setHlslIoMapping(bool hlslIoMap);
GLSLANG_EXPORT void setFlattenUniformArrays(bool flatten);
@@ -522,7 +530,7 @@ public:
// See the definitions of TEnvironment, EShSource, EShLanguage,
// and EShClient for choices and more detail.
//
- // setEnvClient: The client that will be hosting the execution, and it's version.
+ // setEnvClient: The client that will be hosting the execution, and its version.
// Note 'version' is not the version of the languages involved, but
// the version of the client environment.
// Use EShClientNone and version of 0 if there is no client, e.g.
@@ -709,6 +717,9 @@ protected:
// a function in the source string can be renamed FROM this TO the name given in setEntryPoint.
std::string sourceEntryPointName;
+ // overrides #version in shader source or default version if #version isn't present
+ int overrideVersion;
+
TEnvironment environment;
friend class TProgram;
diff --git a/thirdparty/glslang/glslang/build_info.h b/thirdparty/glslang/glslang/build_info.h
index 3a445c9cb0..e711b20957 100644
--- a/thirdparty/glslang/glslang/build_info.h
+++ b/thirdparty/glslang/glslang/build_info.h
@@ -35,28 +35,28 @@
#define GLSLANG_BUILD_INFO
#define GLSLANG_VERSION_MAJOR 11
-#define GLSLANG_VERSION_MINOR 8
+#define GLSLANG_VERSION_MINOR 12
#define GLSLANG_VERSION_PATCH 0
#define GLSLANG_VERSION_FLAVOR ""
#define GLSLANG_VERSION_GREATER_THAN(major, minor, patch) \
- (((major) > GLSLANG_VERSION_MAJOR) || ((major) == GLSLANG_VERSION_MAJOR && \
- (((minor) > GLSLANG_VERSION_MINOR) || ((minor) == GLSLANG_VERSION_MINOR && \
- ((patch) > GLSLANG_VERSION_PATCH)))))
+ ((GLSLANG_VERSION_MAJOR) > (major) || ((major) == GLSLANG_VERSION_MAJOR && \
+ ((GLSLANG_VERSION_MINOR) > (minor) || ((minor) == GLSLANG_VERSION_MINOR && \
+ (GLSLANG_VERSION_PATCH) > (patch)))))
#define GLSLANG_VERSION_GREATER_OR_EQUAL_TO(major, minor, patch) \
- (((major) > GLSLANG_VERSION_MAJOR) || ((major) == GLSLANG_VERSION_MAJOR && \
- (((minor) > GLSLANG_VERSION_MINOR) || ((minor) == GLSLANG_VERSION_MINOR && \
- ((patch) >= GLSLANG_VERSION_PATCH)))))
+ ((GLSLANG_VERSION_MAJOR) > (major) || ((major) == GLSLANG_VERSION_MAJOR && \
+ ((GLSLANG_VERSION_MINOR) > (minor) || ((minor) == GLSLANG_VERSION_MINOR && \
+ (GLSLANG_VERSION_PATCH >= (patch))))))
#define GLSLANG_VERSION_LESS_THAN(major, minor, patch) \
- (((major) < GLSLANG_VERSION_MAJOR) || ((major) == GLSLANG_VERSION_MAJOR && \
- (((minor) < GLSLANG_VERSION_MINOR) || ((minor) == GLSLANG_VERSION_MINOR && \
- ((patch) < GLSLANG_VERSION_PATCH)))))
+ ((GLSLANG_VERSION_MAJOR) < (major) || ((major) == GLSLANG_VERSION_MAJOR && \
+ ((GLSLANG_VERSION_MINOR) < (minor) || ((minor) == GLSLANG_VERSION_MINOR && \
+ (GLSLANG_VERSION_PATCH) < (patch)))))
#define GLSLANG_VERSION_LESS_OR_EQUAL_TO(major, minor, patch) \
- (((major) < GLSLANG_VERSION_MAJOR) || ((major) == GLSLANG_VERSION_MAJOR && \
- (((minor) < GLSLANG_VERSION_MINOR) || ((minor) == GLSLANG_VERSION_MINOR && \
- ((patch) <= GLSLANG_VERSION_PATCH)))))
+ ((GLSLANG_VERSION_MAJOR) < (major) || ((major) == GLSLANG_VERSION_MAJOR && \
+ ((GLSLANG_VERSION_MINOR) < (minor) || ((minor) == GLSLANG_VERSION_MINOR && \
+ (GLSLANG_VERSION_PATCH <= (patch))))))
#endif // GLSLANG_BUILD_INFO
diff --git a/thirdparty/glslang/patches/unused_cleanup.diff b/thirdparty/glslang/patches/unused_cleanup.diff
deleted file mode 100644
index 3e9a9c23f9..0000000000
--- a/thirdparty/glslang/patches/unused_cleanup.diff
+++ /dev/null
@@ -1,61 +0,0 @@
-diff --git a/thirdparty/glslang/glslang/OSDependent/Unix/ossource.cpp b/thirdparty/glslang/glslang/OSDependent/Unix/ossource.cpp
-index 81da99c2c4..1cbd616e98 100644
---- a/thirdparty/glslang/glslang/OSDependent/Unix/ossource.cpp
-+++ b/thirdparty/glslang/glslang/OSDependent/Unix/ossource.cpp
-@@ -65,43 +65,6 @@ static void DetachThreadLinux(void *)
- DetachThread();
- }
-
--//
--// Registers cleanup handler, sets cancel type and state, and executes the thread specific
--// cleanup handler. This function will be called in the Standalone.cpp for regression
--// testing. When OpenGL applications are run with the driver code, Linux OS does the
--// thread cleanup.
--//
--void OS_CleanupThreadData(void)
--{
--#if defined(__ANDROID__) || defined(__Fuchsia__)
-- DetachThreadLinux(NULL);
--#else
-- int old_cancel_state, old_cancel_type;
-- void *cleanupArg = NULL;
--
-- //
-- // Set thread cancel state and push cleanup handler.
-- //
-- pthread_setcancelstate(PTHREAD_CANCEL_ENABLE, &old_cancel_state);
-- pthread_cleanup_push(DetachThreadLinux, (void *) cleanupArg);
--
-- //
-- // Put the thread in deferred cancellation mode.
-- //
-- pthread_setcanceltype(PTHREAD_CANCEL_DEFERRED, &old_cancel_type);
--
-- //
-- // Pop cleanup handler and execute it prior to unregistering the cleanup handler.
-- //
-- pthread_cleanup_pop(1);
--
-- //
-- // Restore the thread's previous cancellation mode.
-- //
-- pthread_setcanceltype(old_cancel_state, NULL);
--#endif
--}
--
- //
- // Thread Local Storage Operations
- //
-diff --git a/thirdparty/glslang/glslang/OSDependent/osinclude.h b/thirdparty/glslang/glslang/OSDependent/osinclude.h
-index 218abe4f23..fcfeff2cc4 100644
---- a/thirdparty/glslang/glslang/OSDependent/osinclude.h
-+++ b/thirdparty/glslang/glslang/OSDependent/osinclude.h
-@@ -54,8 +54,6 @@ void ReleaseGlobalLock();
-
- typedef unsigned int (*TThreadEntrypoint)(void*);
-
--void OS_CleanupThreadData(void);
--
- void OS_DumpMemoryCounters();
-
- } // end namespace glslang
diff --git a/thirdparty/graphite/include/graphite2/Font.h b/thirdparty/graphite/include/graphite2/Font.h
index fe569295a5..b0a1e36370 100644
--- a/thirdparty/graphite/include/graphite2/Font.h
+++ b/thirdparty/graphite/include/graphite2/Font.h
@@ -1,29 +1,5 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
- Alternatively, the contents of this file may be used under the terms
- of the Mozilla Public License (http://mozilla.org/MPL) or the GNU
- General Public License, as published by the Free Software Foundation,
- either version 2 of the License or (at your option) any later version.
-*/
+/* SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later */
+/* Copyright 2010, SIL International, All rights reserved. */
#pragma once
#include "graphite2/Types.h"
diff --git a/thirdparty/graphite/include/graphite2/Log.h b/thirdparty/graphite/include/graphite2/Log.h
index a5a6947fab..d83b1b90e7 100644
--- a/thirdparty/graphite/include/graphite2/Log.h
+++ b/thirdparty/graphite/include/graphite2/Log.h
@@ -1,29 +1,5 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
- Alternatively, the contents of this file may be used under the terms
- of the Mozilla Public License (http://mozilla.org/MPL) or the GNU
- General Public License, as published by the Free Software Foundation,
- either version 2 of the License or (at your option) any later version.
-*/
+/* SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later */
+/* Copyright 2010, SIL International, All rights reserved. */
#pragma once
#include <graphite2/Types.h>
diff --git a/thirdparty/graphite/include/graphite2/Segment.h b/thirdparty/graphite/include/graphite2/Segment.h
index 0e24f5d795..9969b4eb45 100644
--- a/thirdparty/graphite/include/graphite2/Segment.h
+++ b/thirdparty/graphite/include/graphite2/Segment.h
@@ -1,29 +1,5 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
- Alternatively, the contents of this file may be used under the terms
- of the Mozilla Public License (http://mozilla.org/MPL) or the GNU
- General Public License, as published by the Free Software Foundation,
- either version 2 of the License or (at your option) any later version.
-*/
+/* SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later */
+/* Copyright 2010, SIL International, All rights reserved. */
#pragma once
#include "graphite2/Types.h"
diff --git a/thirdparty/graphite/include/graphite2/Types.h b/thirdparty/graphite/include/graphite2/Types.h
index 916c91191b..32236a6195 100644
--- a/thirdparty/graphite/include/graphite2/Types.h
+++ b/thirdparty/graphite/include/graphite2/Types.h
@@ -1,29 +1,5 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
- Alternatively, the contents of this file may be used under the terms
- of the Mozilla Public License (http://mozilla.org/MPL) or the GNU
- General Public License, as published by the Free Software Foundation,
- either version 2 of the License or (at your option) any later version.
-*/
+/* SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later */
+/* Copyright 2010, SIL International, All rights reserved. */
#pragma once
#include <stddef.h>
diff --git a/thirdparty/graphite/src/CmapCache.cpp b/thirdparty/graphite/src/CmapCache.cpp
index d070019a34..ac860e9ee2 100644
--- a/thirdparty/graphite/src/CmapCache.cpp
+++ b/thirdparty/graphite/src/CmapCache.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#include "inc/Main.h"
#include "inc/CmapCache.h"
diff --git a/thirdparty/graphite/src/Code.cpp b/thirdparty/graphite/src/Code.cpp
index ec5ab298ca..4a53ae871b 100644
--- a/thirdparty/graphite/src/Code.cpp
+++ b/thirdparty/graphite/src/Code.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
// This class represents loaded graphite stack machine code. It performs
// basic sanity checks, on the incoming code to prevent more obvious problems
// from crashing graphite.
diff --git a/thirdparty/graphite/src/Collider.cpp b/thirdparty/graphite/src/Collider.cpp
index 1929b39a58..ca36d4550a 100644
--- a/thirdparty/graphite/src/Collider.cpp
+++ b/thirdparty/graphite/src/Collider.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#include <algorithm>
#include <limits>
#include <cmath>
diff --git a/thirdparty/graphite/src/Decompressor.cpp b/thirdparty/graphite/src/Decompressor.cpp
index 42dc9113e5..f9f1e28e5b 100644
--- a/thirdparty/graphite/src/Decompressor.cpp
+++ b/thirdparty/graphite/src/Decompressor.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2015, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2015, SIL International, All rights reserved.
+
#include <cassert>
#include "inc/Decompressor.h"
diff --git a/thirdparty/graphite/src/Face.cpp b/thirdparty/graphite/src/Face.cpp
index 3e106050d7..4dd5a780cf 100644
--- a/thirdparty/graphite/src/Face.cpp
+++ b/thirdparty/graphite/src/Face.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#include <cstring>
#include "graphite2/Segment.h"
#include "inc/CmapCache.h"
diff --git a/thirdparty/graphite/src/FeatureMap.cpp b/thirdparty/graphite/src/FeatureMap.cpp
index 014a88fd08..992be05540 100644
--- a/thirdparty/graphite/src/FeatureMap.cpp
+++ b/thirdparty/graphite/src/FeatureMap.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#include <cstring>
#include "inc/Main.h"
diff --git a/thirdparty/graphite/src/FileFace.cpp b/thirdparty/graphite/src/FileFace.cpp
index 7e663876a7..2e41044688 100644
--- a/thirdparty/graphite/src/FileFace.cpp
+++ b/thirdparty/graphite/src/FileFace.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2012, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2012, SIL International, All rights reserved.
+
#include <cstring>
#include "inc/FileFace.h"
diff --git a/thirdparty/graphite/src/Font.cpp b/thirdparty/graphite/src/Font.cpp
index faf3715f9d..bffb4223d5 100644
--- a/thirdparty/graphite/src/Font.cpp
+++ b/thirdparty/graphite/src/Font.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#include "inc/Face.h"
#include "inc/Font.h"
#include "inc/GlyphCache.h"
diff --git a/thirdparty/graphite/src/GlyphCache.cpp b/thirdparty/graphite/src/GlyphCache.cpp
index 282bdc18fd..30479bdc4d 100644
--- a/thirdparty/graphite/src/GlyphCache.cpp
+++ b/thirdparty/graphite/src/GlyphCache.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2012, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2012, SIL International, All rights reserved.
+
#include "graphite2/Font.h"
#include "inc/Main.h"
@@ -44,12 +21,18 @@ namespace
// variable length structures.
template<typename W>
- class _glat_iterator : public std::iterator<std::input_iterator_tag, std::pair<sparse::key_type, sparse::mapped_type> >
+ class _glat_iterator
{
unsigned short key() const { return uint16(be::peek<W>(_e) + _n); }
unsigned int run() const { return be::peek<W>(_e+sizeof(W)); }
void advance_entry() { _n = 0; _e = _v; be::skip<W>(_v,2); }
public:
+ using iterator_category = std::input_iterator_tag;
+ using value_type = std::pair<sparse::key_type, sparse::mapped_type>;
+ using difference_type = ptrdiff_t;
+ using pointer = value_type *;
+ using reference = value_type &;
+
_glat_iterator(const void * glat=0) : _e(reinterpret_cast<const byte *>(glat)), _v(_e+2*sizeof(W)), _n(0) {}
_glat_iterator<W> & operator ++ () {
diff --git a/thirdparty/graphite/src/GlyphFace.cpp b/thirdparty/graphite/src/GlyphFace.cpp
index bc5e63a9f0..d9408f556b 100644
--- a/thirdparty/graphite/src/GlyphFace.cpp
+++ b/thirdparty/graphite/src/GlyphFace.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#include "inc/GlyphFace.h"
diff --git a/thirdparty/graphite/src/Intervals.cpp b/thirdparty/graphite/src/Intervals.cpp
index 0fe99a127a..b223a67f56 100644
--- a/thirdparty/graphite/src/Intervals.cpp
+++ b/thirdparty/graphite/src/Intervals.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#include <algorithm>
#include <cmath>
#include <limits>
diff --git a/thirdparty/graphite/src/Justifier.cpp b/thirdparty/graphite/src/Justifier.cpp
index 78c11e6a51..0bb18225b9 100644
--- a/thirdparty/graphite/src/Justifier.cpp
+++ b/thirdparty/graphite/src/Justifier.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2012, SIL International, All rights reserved.
- Copyright 2012, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#include "inc/Segment.h"
#include "graphite2/Font.h"
diff --git a/thirdparty/graphite/src/NameTable.cpp b/thirdparty/graphite/src/NameTable.cpp
index d42b7f95bd..85892f179c 100644
--- a/thirdparty/graphite/src/NameTable.cpp
+++ b/thirdparty/graphite/src/NameTable.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#include "inc/Main.h"
#include "inc/Endian.h"
diff --git a/thirdparty/graphite/src/Pass.cpp b/thirdparty/graphite/src/Pass.cpp
index 47ae2064f7..986fed83b1 100644
--- a/thirdparty/graphite/src/Pass.cpp
+++ b/thirdparty/graphite/src/Pass.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#include "inc/Main.h"
#include "inc/debug.h"
#include "inc/Endian.h"
@@ -194,7 +171,7 @@ bool Pass::readPass(const byte * const pass_start, size_t pass_length, size_t su
m_cPConstraint = vm::Machine::Code(true, pcCode, pcCode + pass_constraint_len,
precontext[0], be::peek<uint16>(sort_keys), *m_silf, face, PASS_TYPE_UNKNOWN);
if (e.test(!m_cPConstraint, E_OUTOFMEM)
- || e.test(m_cPConstraint.status() != Code::loaded, m_cPConstraint.status() + E_CODEFAILURE))
+ || e.test(m_cPConstraint.status() != Code::loaded, int(m_cPConstraint.status()) + E_CODEFAILURE))
return face.error(e);
face.error_context(face.error_context() - 1);
}
@@ -266,8 +243,8 @@ bool Pass::readRules(const byte * rule_map, const size_t num_entries,
r->constraint = new (m_codes+n*2-1) vm::Machine::Code(true, rc_begin, rc_end, r->preContext, r->sort, *m_silf, face, pt, &prog_pool_free);
if (e.test(!r->action || !r->constraint, E_OUTOFMEM)
- || e.test(r->action->status() != Code::loaded, r->action->status() + E_CODEFAILURE)
- || e.test(r->constraint->status() != Code::loaded, r->constraint->status() + E_CODEFAILURE)
+ || e.test(r->action->status() != Code::loaded, int(r->action->status()) + E_CODEFAILURE)
+ || e.test(r->constraint->status() != Code::loaded, int(r->constraint->status()) + E_CODEFAILURE)
|| e.test(!r->constraint->immutable(), E_MUTABLECCODE))
return face.error(e);
}
diff --git a/thirdparty/graphite/src/Position.cpp b/thirdparty/graphite/src/Position.cpp
index d2fdbd4e7c..a727f61664 100644
--- a/thirdparty/graphite/src/Position.cpp
+++ b/thirdparty/graphite/src/Position.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#include "inc/Position.h"
#include <cmath>
diff --git a/thirdparty/graphite/src/Segment.cpp b/thirdparty/graphite/src/Segment.cpp
index 62edd4250f..7bb3d132fa 100644
--- a/thirdparty/graphite/src/Segment.cpp
+++ b/thirdparty/graphite/src/Segment.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#include "inc/UtfCodec.h"
#include <cstring>
#include <cstdlib>
diff --git a/thirdparty/graphite/src/Silf.cpp b/thirdparty/graphite/src/Silf.cpp
index 44d3c96171..51a7473627 100644
--- a/thirdparty/graphite/src/Silf.cpp
+++ b/thirdparty/graphite/src/Silf.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#include <cstdlib>
#include "graphite2/Segment.h"
#include "inc/debug.h"
diff --git a/thirdparty/graphite/src/Slot.cpp b/thirdparty/graphite/src/Slot.cpp
index 0fdb098952..040e90d109 100644
--- a/thirdparty/graphite/src/Slot.cpp
+++ b/thirdparty/graphite/src/Slot.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#include "inc/Segment.h"
#include "inc/Slot.h"
#include "inc/Silf.h"
diff --git a/thirdparty/graphite/src/Sparse.cpp b/thirdparty/graphite/src/Sparse.cpp
index aa43113669..746dfc9522 100644
--- a/thirdparty/graphite/src/Sparse.cpp
+++ b/thirdparty/graphite/src/Sparse.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2011, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2011, SIL International, All rights reserved.
+
#include <cassert>
#include "inc/Sparse.h"
#include "inc/bits.h"
diff --git a/thirdparty/graphite/src/TtfUtil.cpp b/thirdparty/graphite/src/TtfUtil.cpp
index 2eb46a11fb..47940112ee 100644
--- a/thirdparty/graphite/src/TtfUtil.cpp
+++ b/thirdparty/graphite/src/TtfUtil.cpp
@@ -1,39 +1,14 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
-/*--------------------------------------------------------------------*//*:Ignore this sentence.
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
-File: TtfUtil.cpp
+/*
Responsibility: Alan Ward
Last reviewed: Not yet.
Description
Implements the methods for TtfUtil class. This file should remain portable to any C++
environment by only using standard C++ and the TTF structurs defined in Tt.h.
--------------------------------------------------------------------------------*//*:End Ignore*/
+*/
/***********************************************************************************************
diff --git a/thirdparty/graphite/src/UtfCodec.cpp b/thirdparty/graphite/src/UtfCodec.cpp
index a944bbf9d0..9156bad00f 100644
--- a/thirdparty/graphite/src/UtfCodec.cpp
+++ b/thirdparty/graphite/src/UtfCodec.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#include "inc/UtfCodec.h"
//using namespace graphite2;
diff --git a/thirdparty/graphite/src/call_machine.cpp b/thirdparty/graphite/src/call_machine.cpp
index fcd8a0c2c1..7d9d1ef315 100644
--- a/thirdparty/graphite/src/call_machine.cpp
+++ b/thirdparty/graphite/src/call_machine.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
// This call threaded interpreter implmentation for machine.h
// Author: Tim Eves
diff --git a/thirdparty/graphite/src/direct_machine.cpp b/thirdparty/graphite/src/direct_machine.cpp
index 86206cfe37..e3a28fa536 100644
--- a/thirdparty/graphite/src/direct_machine.cpp
+++ b/thirdparty/graphite/src/direct_machine.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
// This direct threaded interpreter implmentation for machine.h
// Author: Tim Eves
diff --git a/thirdparty/graphite/src/gr_char_info.cpp b/thirdparty/graphite/src/gr_char_info.cpp
index 612f9ba694..1345fae847 100644
--- a/thirdparty/graphite/src/gr_char_info.cpp
+++ b/thirdparty/graphite/src/gr_char_info.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#include <cassert>
#include "graphite2/Segment.h"
#include "inc/CharInfo.h"
diff --git a/thirdparty/graphite/src/gr_face.cpp b/thirdparty/graphite/src/gr_face.cpp
index baa469727b..6e80036113 100644
--- a/thirdparty/graphite/src/gr_face.cpp
+++ b/thirdparty/graphite/src/gr_face.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#include "graphite2/Font.h"
#include "inc/Face.h"
#include "inc/FileFace.h"
diff --git a/thirdparty/graphite/src/gr_features.cpp b/thirdparty/graphite/src/gr_features.cpp
index a560e053f2..67208f14e6 100644
--- a/thirdparty/graphite/src/gr_features.cpp
+++ b/thirdparty/graphite/src/gr_features.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#include "graphite2/Font.h"
#include "inc/Face.h"
#include "inc/FeatureMap.h"
diff --git a/thirdparty/graphite/src/gr_font.cpp b/thirdparty/graphite/src/gr_font.cpp
index 724cc83c13..fc85112e24 100644
--- a/thirdparty/graphite/src/gr_font.cpp
+++ b/thirdparty/graphite/src/gr_font.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#include "graphite2/Font.h"
#include "inc/Font.h"
diff --git a/thirdparty/graphite/src/gr_logging.cpp b/thirdparty/graphite/src/gr_logging.cpp
index 8f1e675e62..c6f03fd616 100644
--- a/thirdparty/graphite/src/gr_logging.cpp
+++ b/thirdparty/graphite/src/gr_logging.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#include <cstdio>
#include "graphite2/Log.h"
diff --git a/thirdparty/graphite/src/gr_segment.cpp b/thirdparty/graphite/src/gr_segment.cpp
index 7a27e9c562..7592296c04 100644
--- a/thirdparty/graphite/src/gr_segment.cpp
+++ b/thirdparty/graphite/src/gr_segment.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#include "graphite2/Segment.h"
#include "inc/UtfCodec.h"
#include "inc/Segment.h"
diff --git a/thirdparty/graphite/src/gr_slot.cpp b/thirdparty/graphite/src/gr_slot.cpp
index a3c6b46a7f..a6adefec61 100644
--- a/thirdparty/graphite/src/gr_slot.cpp
+++ b/thirdparty/graphite/src/gr_slot.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#include "graphite2/Segment.h"
#include "inc/Segment.h"
#include "inc/Slot.h"
diff --git a/thirdparty/graphite/src/inc/CharInfo.h b/thirdparty/graphite/src/inc/CharInfo.h
index 01e7e31ac9..0fbfefda4d 100644
--- a/thirdparty/graphite/src/inc/CharInfo.h
+++ b/thirdparty/graphite/src/inc/CharInfo.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#pragma once
#include "inc/Main.h"
diff --git a/thirdparty/graphite/src/inc/CmapCache.h b/thirdparty/graphite/src/inc/CmapCache.h
index 7820c958b0..b18ce2929b 100644
--- a/thirdparty/graphite/src/inc/CmapCache.h
+++ b/thirdparty/graphite/src/inc/CmapCache.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#pragma once
#include "inc/Main.h"
diff --git a/thirdparty/graphite/src/inc/Code.h b/thirdparty/graphite/src/inc/Code.h
index 3cee67c81d..24e92e429d 100644
--- a/thirdparty/graphite/src/inc/Code.h
+++ b/thirdparty/graphite/src/inc/Code.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
// This class represents loaded graphite stack machine code. It performs
// basic sanity checks, on the incoming code to prevent more obvious problems
// from crashing graphite.
diff --git a/thirdparty/graphite/src/inc/Collider.h b/thirdparty/graphite/src/inc/Collider.h
index 71e8400501..3a922660d4 100644
--- a/thirdparty/graphite/src/inc/Collider.h
+++ b/thirdparty/graphite/src/inc/Collider.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#pragma once
#include "inc/List.h"
diff --git a/thirdparty/graphite/src/inc/Compression.h b/thirdparty/graphite/src/inc/Compression.h
index 9fe10e025d..4f5193b931 100644
--- a/thirdparty/graphite/src/inc/Compression.h
+++ b/thirdparty/graphite/src/inc/Compression.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2015, SIL International, All rights reserved.
- Copyright 2015, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#pragma once
diff --git a/thirdparty/graphite/src/inc/Decompressor.h b/thirdparty/graphite/src/inc/Decompressor.h
index 10f21b7af1..1113e004c1 100644
--- a/thirdparty/graphite/src/inc/Decompressor.h
+++ b/thirdparty/graphite/src/inc/Decompressor.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2015, SIL International, All rights reserved.
- Copyright 2015, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#pragma once
diff --git a/thirdparty/graphite/src/inc/Endian.h b/thirdparty/graphite/src/inc/Endian.h
index 56ecfd8667..7bd201f92b 100644
--- a/thirdparty/graphite/src/inc/Endian.h
+++ b/thirdparty/graphite/src/inc/Endian.h
@@ -1,30 +1,5 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2011, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
-
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2011, SIL International, All rights reserved.
/*
Description:
A set of fast template based decoders for decoding values of any C integer
diff --git a/thirdparty/graphite/src/inc/Error.h b/thirdparty/graphite/src/inc/Error.h
index 2b7ab763a2..7e14c8e754 100644
--- a/thirdparty/graphite/src/inc/Error.h
+++ b/thirdparty/graphite/src/inc/Error.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2013, SIL International, All rights reserved.
- Copyright 2013, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#pragma once
// numbers are explicitly assigned for future proofing
@@ -38,7 +15,7 @@ public:
operator bool() { return (_e != 0); }
int error() { return _e; }
void error(int e) { _e = e; }
- bool test(bool pr, int err) { return (_e = int(pr) * err); }
+ bool test(bool pr, int err) { return (_e = pr ? err : 0); }
private:
int _e;
@@ -56,7 +33,7 @@ enum errcontext {
EC_ARULEMAP = 9 // in Silf %d, pass %d, state %d
};
-enum errors {
+enum error {
E_OUTOFMEM = 1, // Out of memory
E_NOGLYPHS = 2, // There are no glyphs in the font
E_BADUPEM = 3, // The units per em for the font is bad (0)
@@ -125,7 +102,7 @@ enum errors {
E_BADJUMPCODE = 65, // Code jumps past end of op codes
E_CODEBADARGS = 66, // Code arguments exhausted
E_CODENORETURN = 67, // Missing return type op code at end of code
- E_CODENESTEDCTXT = 68, // Nested context encountered in code
+ E_CODENESTEDCTXT = 68, // Nested context encountered in code
// Compression errors
E_BADSCHEME = 69,
E_SHRINKERFAILED = 70,
diff --git a/thirdparty/graphite/src/inc/Face.h b/thirdparty/graphite/src/inc/Face.h
index 355c5aa0d3..fac7fd4b8e 100644
--- a/thirdparty/graphite/src/inc/Face.h
+++ b/thirdparty/graphite/src/inc/Face.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#pragma once
#include <cstdio>
diff --git a/thirdparty/graphite/src/inc/FeatureMap.h b/thirdparty/graphite/src/inc/FeatureMap.h
index 0f05e941a2..a98008f738 100644
--- a/thirdparty/graphite/src/inc/FeatureMap.h
+++ b/thirdparty/graphite/src/inc/FeatureMap.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#pragma once
#include "inc/Main.h"
#include "inc/FeatureVal.h"
diff --git a/thirdparty/graphite/src/inc/FeatureVal.h b/thirdparty/graphite/src/inc/FeatureVal.h
index cd3f93b2b5..35397f0f3b 100644
--- a/thirdparty/graphite/src/inc/FeatureVal.h
+++ b/thirdparty/graphite/src/inc/FeatureVal.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#pragma once
#include <cstring>
#include <cassert>
diff --git a/thirdparty/graphite/src/inc/FileFace.h b/thirdparty/graphite/src/inc/FileFace.h
index 35927847f8..521942ee85 100644
--- a/thirdparty/graphite/src/inc/FileFace.h
+++ b/thirdparty/graphite/src/inc/FileFace.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2012, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2012, SIL International, All rights reserved.
+
#pragma once
//#include "inc/FeatureMap.h"
diff --git a/thirdparty/graphite/src/inc/Font.h b/thirdparty/graphite/src/inc/Font.h
index 9bc9ffb510..537ad9de0e 100644
--- a/thirdparty/graphite/src/inc/Font.h
+++ b/thirdparty/graphite/src/inc/Font.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#pragma once
#include <cassert>
#include "graphite2/Font.h"
diff --git a/thirdparty/graphite/src/inc/GlyphCache.h b/thirdparty/graphite/src/inc/GlyphCache.h
index 7d5324e522..5f6d3eea18 100644
--- a/thirdparty/graphite/src/inc/GlyphCache.h
+++ b/thirdparty/graphite/src/inc/GlyphCache.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2012, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2012, SIL International, All rights reserved.
+
#pragma once
diff --git a/thirdparty/graphite/src/inc/GlyphFace.h b/thirdparty/graphite/src/inc/GlyphFace.h
index fc29056146..f964c0581a 100644
--- a/thirdparty/graphite/src/inc/GlyphFace.h
+++ b/thirdparty/graphite/src/inc/GlyphFace.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#pragma once
#include "inc/Main.h"
diff --git a/thirdparty/graphite/src/inc/Intervals.h b/thirdparty/graphite/src/inc/Intervals.h
index 81d23187b6..15427d428a 100644
--- a/thirdparty/graphite/src/inc/Intervals.h
+++ b/thirdparty/graphite/src/inc/Intervals.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#pragma once
#include <utility>
diff --git a/thirdparty/graphite/src/inc/List.h b/thirdparty/graphite/src/inc/List.h
index a3b7a77961..3a36c17a7a 100644
--- a/thirdparty/graphite/src/inc/List.h
+++ b/thirdparty/graphite/src/inc/List.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
// designed to have a limited subset of the std::vector api
#pragma once
diff --git a/thirdparty/graphite/src/inc/Machine.h b/thirdparty/graphite/src/inc/Machine.h
index b23819fb98..5ceedc296d 100644
--- a/thirdparty/graphite/src/inc/Machine.h
+++ b/thirdparty/graphite/src/inc/Machine.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
// This general interpreter interface.
// Author: Tim Eves
diff --git a/thirdparty/graphite/src/inc/Main.h b/thirdparty/graphite/src/inc/Main.h
index ebf02dd553..6a412efeca 100644
--- a/thirdparty/graphite/src/inc/Main.h
+++ b/thirdparty/graphite/src/inc/Main.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#pragma once
#include <cstdlib>
diff --git a/thirdparty/graphite/src/inc/NameTable.h b/thirdparty/graphite/src/inc/NameTable.h
index 0fdbeb4d85..5a7913ddc5 100644
--- a/thirdparty/graphite/src/inc/NameTable.h
+++ b/thirdparty/graphite/src/inc/NameTable.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#pragma once
#include <graphite2/Segment.h>
diff --git a/thirdparty/graphite/src/inc/Pass.h b/thirdparty/graphite/src/inc/Pass.h
index e687a87d8c..fb507e6a04 100644
--- a/thirdparty/graphite/src/inc/Pass.h
+++ b/thirdparty/graphite/src/inc/Pass.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#pragma once
#include <cstdlib>
diff --git a/thirdparty/graphite/src/inc/Position.h b/thirdparty/graphite/src/inc/Position.h
index 510e4f4c41..554acc411e 100644
--- a/thirdparty/graphite/src/inc/Position.h
+++ b/thirdparty/graphite/src/inc/Position.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#pragma once
namespace graphite2 {
diff --git a/thirdparty/graphite/src/inc/Rule.h b/thirdparty/graphite/src/inc/Rule.h
index 5964e003a6..09025dc799 100644
--- a/thirdparty/graphite/src/inc/Rule.h
+++ b/thirdparty/graphite/src/inc/Rule.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2011, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2011, SIL International, All rights reserved.
+
#pragma once
diff --git a/thirdparty/graphite/src/inc/Segment.h b/thirdparty/graphite/src/inc/Segment.h
index 6cf83408d4..a3a8eef648 100644
--- a/thirdparty/graphite/src/inc/Segment.h
+++ b/thirdparty/graphite/src/inc/Segment.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#pragma once
#include "inc/Main.h"
diff --git a/thirdparty/graphite/src/inc/Silf.h b/thirdparty/graphite/src/inc/Silf.h
index edc0c3a16d..6fe2aaef1f 100644
--- a/thirdparty/graphite/src/inc/Silf.h
+++ b/thirdparty/graphite/src/inc/Silf.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#pragma once
#include "graphite2/Font.h"
diff --git a/thirdparty/graphite/src/inc/Slot.h b/thirdparty/graphite/src/inc/Slot.h
index df39d9a3bb..0d8571d336 100644
--- a/thirdparty/graphite/src/inc/Slot.h
+++ b/thirdparty/graphite/src/inc/Slot.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#pragma once
#include "graphite2/Types.h"
diff --git a/thirdparty/graphite/src/inc/Sparse.h b/thirdparty/graphite/src/inc/Sparse.h
index fcda890171..3c5d33ae5d 100644
--- a/thirdparty/graphite/src/inc/Sparse.h
+++ b/thirdparty/graphite/src/inc/Sparse.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2011, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2011, SIL International, All rights reserved.
+
#pragma once
#include <iterator>
#include <utility>
diff --git a/thirdparty/graphite/src/inc/TtfTypes.h b/thirdparty/graphite/src/inc/TtfTypes.h
index ae67915304..e4fd49f943 100644
--- a/thirdparty/graphite/src/inc/TtfTypes.h
+++ b/thirdparty/graphite/src/inc/TtfTypes.h
@@ -1,39 +1,14 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
-#pragma once
-/*--------------------------------------------------------------------*//*:Ignore this sentence.
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
-File: TtfTypes.h
+#pragma once
+/*
Responsibility: Tim Eves
Last reviewed: Not yet.
Description:
Provides types required to represent the TTF basic types.
--------------------------------------------------------------------------------*//*:End Ignore*/
+*/
//**********************************************************************************************
diff --git a/thirdparty/graphite/src/inc/TtfUtil.h b/thirdparty/graphite/src/inc/TtfUtil.h
index 3952bc06fb..1ccab42fb8 100644
--- a/thirdparty/graphite/src/inc/TtfUtil.h
+++ b/thirdparty/graphite/src/inc/TtfUtil.h
@@ -1,39 +1,13 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
-#pragma once
-/*--------------------------------------------------------------------*//*:Ignore this sentence.
-
-File: TtfUtil.h
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+/*
Responsibility: Alan Ward
Last reviewed: Not yet.
Description:
Utility class for handling TrueType font files.
-----------------------------------------------------------------------------------------------*/
+*/
+#pragma once
#include <cstddef>
diff --git a/thirdparty/graphite/src/inc/UtfCodec.h b/thirdparty/graphite/src/inc/UtfCodec.h
index 24a343d8d9..bab652c119 100644
--- a/thirdparty/graphite/src/inc/UtfCodec.h
+++ b/thirdparty/graphite/src/inc/UtfCodec.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2011, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2011, SIL International, All rights reserved.
+
#pragma once
#include <cstdlib>
diff --git a/thirdparty/graphite/src/inc/bits.h b/thirdparty/graphite/src/inc/bits.h
index 9365986a10..168ff256d3 100644
--- a/thirdparty/graphite/src/inc/bits.h
+++ b/thirdparty/graphite/src/inc/bits.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2012, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2012, SIL International, All rights reserved.
+
#pragma once
namespace graphite2
diff --git a/thirdparty/graphite/src/inc/debug.h b/thirdparty/graphite/src/inc/debug.h
index 97175eb2cc..5b9c63872b 100644
--- a/thirdparty/graphite/src/inc/debug.h
+++ b/thirdparty/graphite/src/inc/debug.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2011, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2011, SIL International, All rights reserved.
+
// debug.h
//
// Created on: 22 Dec 2011
diff --git a/thirdparty/graphite/src/inc/json.h b/thirdparty/graphite/src/inc/json.h
index 554cd9a3d1..72011ef198 100644
--- a/thirdparty/graphite/src/inc/json.h
+++ b/thirdparty/graphite/src/inc/json.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2011, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2011, SIL International, All rights reserved.
+
// JSON pretty printer for graphite font debug output logging.
// Created on: 15 Dec 2011
// Author: Tim Eves
diff --git a/thirdparty/graphite/src/inc/locale2lcid.h b/thirdparty/graphite/src/inc/locale2lcid.h
index 25d5c0a3c8..e591c42b28 100644
--- a/thirdparty/graphite/src/inc/locale2lcid.h
+++ b/thirdparty/graphite/src/inc/locale2lcid.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
#pragma once
#include <cstring>
#include <cassert>
diff --git a/thirdparty/graphite/src/inc/opcode_table.h b/thirdparty/graphite/src/inc/opcode_table.h
index cb5acde9a4..01a84831bc 100644
--- a/thirdparty/graphite/src/inc/opcode_table.h
+++ b/thirdparty/graphite/src/inc/opcode_table.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
// This file will be pulled into and integrated into a machine implmentation
// DO NOT build directly
#pragma once
diff --git a/thirdparty/graphite/src/inc/opcodes.h b/thirdparty/graphite/src/inc/opcodes.h
index ff2f1741e2..10ba177229 100644
--- a/thirdparty/graphite/src/inc/opcodes.h
+++ b/thirdparty/graphite/src/inc/opcodes.h
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2010, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2010, SIL International, All rights reserved.
+
#pragma once
// This file will be pulled into and integrated into a machine implmentation
// DO NOT build directly and under no circumstances ever #include headers in
diff --git a/thirdparty/graphite/src/json.cpp b/thirdparty/graphite/src/json.cpp
index 25f2190f71..7990654eb8 100644
--- a/thirdparty/graphite/src/json.cpp
+++ b/thirdparty/graphite/src/json.cpp
@@ -1,29 +1,6 @@
-/* GRAPHITE2 LICENSING
-
- Copyright 2011, SIL International
- All rights reserved.
-
- This library is free software; you can redistribute it and/or modify
- it under the terms of the GNU Lesser General Public License as published
- by the Free Software Foundation; either version 2.1 of License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should also have received a copy of the GNU Lesser General Public
- License along with this library in the file named "LICENSE".
- If not, write to the Free Software Foundation, 51 Franklin Street,
- Suite 500, Boston, MA 02110-1335, USA or visit their web page on the
- internet at http://www.fsf.org/licenses/lgpl.html.
-
-Alternatively, the contents of this file may be used under the terms of the
-Mozilla Public License (http://mozilla.org/MPL) or the GNU General Public
-License, as published by the Free Software Foundation, either version 2
-of the License or (at your option) any later version.
-*/
+// SPDX-License-Identifier: MIT OR MPL-2.0 OR LGPL-2.1-or-later OR GPL-2.0-or-later
+// Copyright 2011, SIL International, All rights reserved.
+
// JSON debug logging
// Author: Tim Eves
diff --git a/thirdparty/harfbuzz/src/OT/Layout/Common/Coverage.hh b/thirdparty/harfbuzz/src/OT/Layout/Common/Coverage.hh
index eef89a2879..fbd7c642ab 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/Common/Coverage.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/Common/Coverage.hh
@@ -49,7 +49,7 @@ struct Coverage
HBUINT16 format; /* Format identifier */
CoverageFormat1_3<SmallTypes> format1;
CoverageFormat2_4<SmallTypes> format2;
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
CoverageFormat1_3<MediumTypes>format3;
CoverageFormat2_4<MediumTypes>format4;
#endif
@@ -65,7 +65,7 @@ struct Coverage
{
case 1: return_trace (u.format1.sanitize (c));
case 2: return_trace (u.format2.sanitize (c));
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 3: return_trace (u.format3.sanitize (c));
case 4: return_trace (u.format4.sanitize (c));
#endif
@@ -74,10 +74,8 @@ struct Coverage
}
/* Has interface. */
- static constexpr unsigned SENTINEL = NOT_COVERED;
- typedef unsigned int value_t;
- value_t operator [] (hb_codepoint_t k) const { return get (k); }
- bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
+ unsigned operator [] (hb_codepoint_t k) const { return get (k); }
+ bool has (hb_codepoint_t k) const { return (*this)[k] != NOT_COVERED; }
/* Predicate. */
bool operator () (hb_codepoint_t k) const { return has (k); }
@@ -87,7 +85,7 @@ struct Coverage
switch (u.format) {
case 1: return u.format1.get_coverage (glyph_id);
case 2: return u.format2.get_coverage (glyph_id);
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 3: return u.format3.get_coverage (glyph_id);
case 4: return u.format4.get_coverage (glyph_id);
#endif
@@ -100,7 +98,7 @@ struct Coverage
switch (u.format) {
case 1: return u.format1.get_population ();
case 2: return u.format2.get_population ();
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 3: return u.format3.get_population ();
case 4: return u.format4.get_population ();
#endif
@@ -127,7 +125,7 @@ struct Coverage
}
u.format = count <= num_ranges * 3 ? 1 : 2;
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
if (count && last > 0xFFFFu)
u.format += 2;
#endif
@@ -136,7 +134,7 @@ struct Coverage
{
case 1: return_trace (u.format1.serialize (c, glyphs));
case 2: return_trace (u.format2.serialize (c, glyphs));
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 3: return_trace (u.format3.serialize (c, glyphs));
case 4: return_trace (u.format4.serialize (c, glyphs));
#endif
@@ -166,7 +164,7 @@ struct Coverage
{
case 1: return u.format1.intersects (glyphs);
case 2: return u.format2.intersects (glyphs);
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 3: return u.format3.intersects (glyphs);
case 4: return u.format4.intersects (glyphs);
#endif
@@ -179,7 +177,7 @@ struct Coverage
{
case 1: return u.format1.intersects_coverage (glyphs, index);
case 2: return u.format2.intersects_coverage (glyphs, index);
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 3: return u.format3.intersects_coverage (glyphs, index);
case 4: return u.format4.intersects_coverage (glyphs, index);
#endif
@@ -196,7 +194,7 @@ struct Coverage
{
case 1: return u.format1.collect_coverage (glyphs);
case 2: return u.format2.collect_coverage (glyphs);
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 3: return u.format3.collect_coverage (glyphs);
case 4: return u.format4.collect_coverage (glyphs);
#endif
@@ -212,7 +210,7 @@ struct Coverage
{
case 1: return u.format1.intersect_set (glyphs, intersect_glyphs);
case 2: return u.format2.intersect_set (glyphs, intersect_glyphs);
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 3: return u.format3.intersect_set (glyphs, intersect_glyphs);
case 4: return u.format4.intersect_set (glyphs, intersect_glyphs);
#endif
@@ -225,13 +223,13 @@ struct Coverage
static constexpr bool is_sorted_iterator = true;
iter_t (const Coverage &c_ = Null (Coverage))
{
- memset (this, 0, sizeof (*this));
+ hb_memset (this, 0, sizeof (*this));
format = c_.u.format;
switch (format)
{
case 1: u.format1.init (c_.u.format1); return;
case 2: u.format2.init (c_.u.format2); return;
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 3: u.format3.init (c_.u.format3); return;
case 4: u.format4.init (c_.u.format4); return;
#endif
@@ -244,7 +242,7 @@ struct Coverage
{
case 1: return u.format1.__more__ ();
case 2: return u.format2.__more__ ();
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 3: return u.format3.__more__ ();
case 4: return u.format4.__more__ ();
#endif
@@ -257,7 +255,7 @@ struct Coverage
{
case 1: u.format1.__next__ (); break;
case 2: u.format2.__next__ (); break;
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 3: u.format3.__next__ (); break;
case 4: u.format4.__next__ (); break;
#endif
@@ -273,7 +271,7 @@ struct Coverage
{
case 1: return u.format1.get_glyph ();
case 2: return u.format2.get_glyph ();
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 3: return u.format3.get_glyph ();
case 4: return u.format4.get_glyph ();
#endif
@@ -287,7 +285,7 @@ struct Coverage
{
case 1: return u.format1 != o.u.format1;
case 2: return u.format2 != o.u.format2;
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 3: return u.format3 != o.u.format3;
case 4: return u.format4 != o.u.format4;
#endif
@@ -302,7 +300,7 @@ struct Coverage
{
case 1: it.u.format1 = u.format1.__end__ (); break;
case 2: it.u.format2 = u.format2.__end__ (); break;
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 3: it.u.format3 = u.format3.__end__ (); break;
case 4: it.u.format4 = u.format4.__end__ (); break;
#endif
@@ -314,7 +312,7 @@ struct Coverage
private:
unsigned int format;
union {
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
CoverageFormat2_4<MediumTypes>::iter_t format4; /* Put this one first since it's larger; helps shut up compiler. */
CoverageFormat1_3<MediumTypes>::iter_t format3;
#endif
diff --git a/thirdparty/harfbuzz/src/OT/Layout/Common/CoverageFormat1.hh b/thirdparty/harfbuzz/src/OT/Layout/Common/CoverageFormat1.hh
index 82fd48dc50..5d68e3d15e 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/Common/CoverageFormat1.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/Common/CoverageFormat1.hh
@@ -77,7 +77,14 @@ struct CoverageFormat1_3
bool intersects (const hb_set_t *glyphs) const
{
- /* TODO Speed up, using hb_set_next() and bsearch()? */
+ if (glyphArray.len > glyphs->get_population () * hb_bit_storage ((unsigned) glyphArray.len) / 2)
+ {
+ for (hb_codepoint_t g = HB_SET_VALUE_INVALID; glyphs->next (&g);)
+ if (get_coverage (g) != NOT_COVERED)
+ return true;
+ return false;
+ }
+
for (const auto& g : glyphArray.as_array ())
if (glyphs->has (g))
return true;
diff --git a/thirdparty/harfbuzz/src/OT/Layout/Common/CoverageFormat2.hh b/thirdparty/harfbuzz/src/OT/Layout/Common/CoverageFormat2.hh
index 974d094633..d7fcc35202 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/Common/CoverageFormat2.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/Common/CoverageFormat2.hh
@@ -80,8 +80,6 @@ struct CoverageFormat2_4
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (this))) return_trace (false);
- /* TODO(iter) Write more efficiently? */
-
unsigned num_ranges = 0;
hb_codepoint_t last = (hb_codepoint_t) -2;
for (auto g: glyphs)
@@ -115,26 +113,22 @@ struct CoverageFormat2_4
bool intersects (const hb_set_t *glyphs) const
{
+ if (rangeRecord.len > glyphs->get_population () * hb_bit_storage ((unsigned) rangeRecord.len) / 2)
+ {
+ for (hb_codepoint_t g = HB_SET_VALUE_INVALID; glyphs->next (&g);)
+ if (get_coverage (g) != NOT_COVERED)
+ return true;
+ return false;
+ }
+
return hb_any (+ hb_iter (rangeRecord)
| hb_map ([glyphs] (const RangeRecord<Types> &range) { return range.intersects (*glyphs); }));
}
bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
{
- auto cmp = [] (const void *pk, const void *pr) -> int
- {
- unsigned index = * (const unsigned *) pk;
- const RangeRecord<Types> &range = * (const RangeRecord<Types> *) pr;
- if (index < range.value) return -1;
- if (index > (unsigned int) range.value + (range.last - range.first)) return +1;
- return 0;
- };
-
- auto arr = rangeRecord.as_array ();
- unsigned idx;
- if (hb_bsearch_impl (&idx, index,
- arr.arrayZ, arr.length, sizeof (arr[0]),
- (int (*)(const void *_key, const void *_item)) cmp))
- return arr.arrayZ[idx].intersects (*glyphs);
+ auto *range = rangeRecord.as_array ().bsearch (index);
+ if (range)
+ return range->intersects (*glyphs);
return false;
}
@@ -142,9 +136,14 @@ struct CoverageFormat2_4
hb_requires (hb_is_sink_of (IterableOut, hb_codepoint_t))>
void intersect_set (const hb_set_t &glyphs, IterableOut&& intersect_glyphs) const
{
+ /* Break out of loop for overlapping, broken, tables,
+ * to avoid fuzzer timouts. */
+ hb_codepoint_t last = 0;
for (const auto& range : rangeRecord)
{
- hb_codepoint_t last = range.last;
+ if (unlikely (range.first < last))
+ break;
+ last = range.last;
for (hb_codepoint_t g = range.first - 1;
glyphs.next (&g) && g <= last;)
intersect_glyphs << g;
diff --git a/thirdparty/harfbuzz/src/OT/Layout/GPOS/GPOS.hh b/thirdparty/harfbuzz/src/OT/Layout/GPOS/GPOS.hh
index 72829377a6..9493ec987e 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/GPOS/GPOS.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/GPOS/GPOS.hh
@@ -39,7 +39,7 @@ struct GPOS : GSUBGPOS
bool subset (hb_subset_context_t *c) const
{
- hb_subset_layout_context_t l (c, tableTag, c->plan->gpos_lookups, c->plan->gpos_langsys, c->plan->gpos_features);
+ hb_subset_layout_context_t l (c, tableTag);
return GSUBGPOS::subset<PosLookup> (&l);
}
diff --git a/thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkBasePos.hh b/thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkBasePos.hh
index c99b6b2e4b..edf7099c07 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkBasePos.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkBasePos.hh
@@ -13,7 +13,7 @@ struct MarkBasePos
union {
HBUINT16 format; /* Format identifier */
MarkBasePosFormat1_2<SmallTypes> format1;
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
MarkBasePosFormat1_2<MediumTypes> format2;
#endif
} u;
@@ -26,7 +26,7 @@ struct MarkBasePos
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
#endif
default:return_trace (c->default_return_value ());
diff --git a/thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkLigPos.hh b/thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkLigPos.hh
index 8a4de9ffaa..09152fd876 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkLigPos.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkLigPos.hh
@@ -13,7 +13,7 @@ struct MarkLigPos
union {
HBUINT16 format; /* Format identifier */
MarkLigPosFormat1_2<SmallTypes> format1;
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
MarkLigPosFormat1_2<MediumTypes> format2;
#endif
} u;
@@ -26,7 +26,7 @@ struct MarkLigPos
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
#endif
default:return_trace (c->default_return_value ());
diff --git a/thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkMarkPos.hh b/thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkMarkPos.hh
index 74b5105c42..4118fc304f 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkMarkPos.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkMarkPos.hh
@@ -13,7 +13,7 @@ struct MarkMarkPos
union {
HBUINT16 format; /* Format identifier */
MarkMarkPosFormat1_2<SmallTypes> format1;
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
MarkMarkPosFormat1_2<MediumTypes> format2;
#endif
} u;
@@ -26,7 +26,7 @@ struct MarkMarkPos
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
#endif
default:return_trace (c->default_return_value ());
diff --git a/thirdparty/harfbuzz/src/OT/Layout/GPOS/PairPos.hh b/thirdparty/harfbuzz/src/OT/Layout/GPOS/PairPos.hh
index 72bfc43dc4..9823768cbc 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/GPOS/PairPos.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/GPOS/PairPos.hh
@@ -15,7 +15,7 @@ struct PairPos
HBUINT16 format; /* Format identifier */
PairPosFormat1_3<SmallTypes> format1;
PairPosFormat2_4<SmallTypes> format2;
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
PairPosFormat1_3<MediumTypes> format3;
PairPosFormat2_4<MediumTypes> format4;
#endif
@@ -30,7 +30,7 @@ struct PairPos
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
case 4: return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...));
#endif
diff --git a/thirdparty/harfbuzz/src/OT/Layout/GPOS/PairPosFormat1.hh b/thirdparty/harfbuzz/src/OT/Layout/GPOS/PairPosFormat1.hh
index ddf7313f94..9c9b268889 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/GPOS/PairPosFormat1.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/GPOS/PairPosFormat1.hh
@@ -51,8 +51,21 @@ struct PairPosFormat1_3
bool intersects (const hb_set_t *glyphs) const
{
+ auto &cov = this+coverage;
+
+ if (pairSet.len > glyphs->get_population () * hb_bit_storage ((unsigned) pairSet.len) / 4)
+ {
+ for (hb_codepoint_t g = HB_SET_VALUE_INVALID; glyphs->next (&g);)
+ {
+ unsigned i = cov.get_coverage (g);
+ if ((this+pairSet[i]).intersects (glyphs, valueFormat))
+ return true;
+ }
+ return false;
+ }
+
return
- + hb_zip (this+coverage, pairSet)
+ + hb_zip (cov, pairSet)
| hb_filter (*glyphs, hb_first)
| hb_map (hb_second)
| hb_map ([glyphs, this] (const typename Types::template OffsetTo<PairSet> &_)
@@ -171,12 +184,16 @@ struct PairPosFormat1_3
unsigned format1 = 0;
unsigned format2 = 0;
for (const auto & _ :
- + hb_zip (this+coverage, pairSet) | hb_filter (glyphset, hb_first) | hb_map (hb_second))
+ + hb_zip (this+coverage, pairSet)
+ | hb_filter (glyphset, hb_first)
+ | hb_map (hb_second)
+ )
{
const PairSet& set = (this + _);
const PairValueRecord *record = &set.firstPairValueRecord;
- for (unsigned i = 0; i < set.len; i++)
+ unsigned count = set.len;
+ for (unsigned i = 0; i < count; i++)
{
if (record->intersects (glyphset))
{
@@ -185,6 +202,9 @@ struct PairPosFormat1_3
}
record = &StructAtOffset<const PairValueRecord> (record, record_size);
}
+
+ if (format1 == valueFormat[0] && format2 == valueFormat[1])
+ break;
}
return hb_pair (format1, format2);
diff --git a/thirdparty/harfbuzz/src/OT/Layout/GPOS/PairPosFormat2.hh b/thirdparty/harfbuzz/src/OT/Layout/GPOS/PairPosFormat2.hh
index 83b093b988..9c87ac2b03 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/GPOS/PairPosFormat2.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/GPOS/PairPosFormat2.hh
@@ -220,17 +220,25 @@ struct PairPosFormat2_4
if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
{
c->buffer->message (c->font,
- "kerning glyphs at %d,%d",
+ "try kerning glyphs at %d,%d",
c->buffer->idx, skippy_iter.idx);
}
applied_first = valueFormat1.apply_value (c, this, v, buffer->cur_pos());
applied_second = valueFormat2.apply_value (c, this, v + len1, buffer->pos[skippy_iter.idx]);
+ if (applied_first || applied_second)
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "kerned glyphs at %d,%d",
+ c->buffer->idx, skippy_iter.idx);
+ }
+
if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
{
c->buffer->message (c->font,
- "kerned glyphs at %d,%d",
+ "tried kerning glyphs at %d,%d",
c->buffer->idx, skippy_iter.idx);
}
@@ -241,10 +249,15 @@ struct PairPosFormat2_4
boring:
buffer->unsafe_to_concat (buffer->idx, skippy_iter.idx + 1);
+ if (len2)
+ {
+ skippy_iter.idx++;
+ // https://github.com/harfbuzz/harfbuzz/issues/3824
+ // https://github.com/harfbuzz/harfbuzz/issues/3888#issuecomment-1326781116
+ buffer->unsafe_to_break (buffer->idx, skippy_iter.idx + 1);
+ }
buffer->idx = skippy_iter.idx;
- if (len2)
- buffer->idx++;
return_trace (true);
}
@@ -309,6 +322,7 @@ struct PairPosFormat2_4
{
unsigned len1 = valueFormat1.get_len ();
unsigned len2 = valueFormat2.get_len ();
+ unsigned record_size = len1 + len2;
unsigned format1 = 0;
unsigned format2 = 0;
@@ -317,10 +331,13 @@ struct PairPosFormat2_4
{
for (unsigned class2_idx : + hb_range ((unsigned) class2Count) | hb_filter (klass2_map))
{
- unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
+ unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * record_size;
format1 = format1 | valueFormat1.get_effective_format (&values[idx]);
format2 = format2 | valueFormat2.get_effective_format (&values[idx + len1]);
}
+
+ if (format1 == valueFormat1 && format2 == valueFormat2)
+ break;
}
return hb_pair (format1, format2);
diff --git a/thirdparty/harfbuzz/src/OT/Layout/GPOS/PairSet.hh b/thirdparty/harfbuzz/src/OT/Layout/GPOS/PairSet.hh
index aa48d933c3..a318f39913 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/GPOS/PairSet.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/GPOS/PairSet.hh
@@ -112,24 +112,38 @@ struct PairSet
if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
{
c->buffer->message (c->font,
- "kerning glyphs at %d,%d",
+ "try kerning glyphs at %d,%d",
c->buffer->idx, pos);
}
bool applied_first = valueFormats[0].apply_value (c, this, &record->values[0], buffer->cur_pos());
bool applied_second = valueFormats[1].apply_value (c, this, &record->values[len1], buffer->pos[pos]);
+ if (applied_first || applied_second)
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "kerned glyphs at %d,%d",
+ c->buffer->idx, pos);
+ }
+
if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
{
c->buffer->message (c->font,
- "kerned glyphs at %d,%d",
+ "tried kerning glyphs at %d,%d",
c->buffer->idx, pos);
}
if (applied_first || applied_second)
buffer->unsafe_to_break (buffer->idx, pos + 1);
+
if (len2)
- pos++;
+ {
+ pos++;
+ // https://github.com/harfbuzz/harfbuzz/issues/3824
+ // https://github.com/harfbuzz/harfbuzz/issues/3888#issuecomment-1326781116
+ buffer->unsafe_to_break (buffer->idx, pos + 1);
+ }
buffer->idx = pos;
return_trace (true);
diff --git a/thirdparty/harfbuzz/src/OT/Layout/GPOS/SinglePosFormat1.hh b/thirdparty/harfbuzz/src/OT/Layout/GPOS/SinglePosFormat1.hh
index 5a9dd58a63..b4c9fc3db0 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/GPOS/SinglePosFormat1.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/GPOS/SinglePosFormat1.hh
@@ -80,6 +80,24 @@ struct SinglePosFormat1
return_trace (true);
}
+ bool
+ position_single (hb_font_t *font,
+ hb_direction_t direction,
+ hb_codepoint_t gid,
+ hb_glyph_position_t &pos) const
+ {
+ unsigned int index = (this+coverage).get_coverage (gid);
+ if (likely (index == NOT_COVERED)) return false;
+
+ /* This is ugly... */
+ hb_buffer_t buffer;
+ buffer.props.direction = direction;
+ OT::hb_ot_apply_context_t c (1, font, &buffer);
+
+ valueFormat.apply_value (&c, this, values, pos);
+ return true;
+ }
+
template<typename Iterator,
typename SrcLookup,
hb_requires (hb_is_iterator (Iterator))>
diff --git a/thirdparty/harfbuzz/src/OT/Layout/GPOS/SinglePosFormat2.hh b/thirdparty/harfbuzz/src/OT/Layout/GPOS/SinglePosFormat2.hh
index 8a6e8a42a6..c77951156b 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/GPOS/SinglePosFormat2.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/GPOS/SinglePosFormat2.hh
@@ -68,7 +68,7 @@ struct SinglePosFormat2
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
- if (likely (index >= valueCount)) return_trace (false);
+ if (unlikely (index >= valueCount)) return_trace (false);
if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
{
@@ -92,6 +92,28 @@ struct SinglePosFormat2
return_trace (true);
}
+ bool
+ position_single (hb_font_t *font,
+ hb_direction_t direction,
+ hb_codepoint_t gid,
+ hb_glyph_position_t &pos) const
+ {
+ unsigned int index = (this+coverage).get_coverage (gid);
+ if (likely (index == NOT_COVERED)) return false;
+ if (unlikely (index >= valueCount)) return false;
+
+ /* This is ugly... */
+ hb_buffer_t buffer;
+ buffer.props.direction = direction;
+ OT::hb_ot_apply_context_t c (1, font, &buffer);
+
+ valueFormat.apply_value (&c, this,
+ &values[index * valueFormat.get_len ()],
+ pos);
+ return true;
+ }
+
+
template<typename Iterator,
typename SrcLookup,
hb_requires (hb_is_iterator (Iterator))>
diff --git a/thirdparty/harfbuzz/src/OT/Layout/GSUB/AlternateSet.hh b/thirdparty/harfbuzz/src/OT/Layout/GSUB/AlternateSet.hh
index 4a9e9672eb..6c50c9717a 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/GSUB/AlternateSet.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/GSUB/AlternateSet.hh
@@ -84,7 +84,7 @@ struct AlternateSet
{
if (alternates.len && alternate_count)
{
- + alternates.sub_array (start_offset, alternate_count)
+ + alternates.as_array ().sub_array (start_offset, alternate_count)
| hb_sink (hb_array (alternate_glyphs, *alternate_count))
;
}
diff --git a/thirdparty/harfbuzz/src/OT/Layout/GSUB/AlternateSubst.hh b/thirdparty/harfbuzz/src/OT/Layout/GSUB/AlternateSubst.hh
index 37406179a2..9d7cd6fddf 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/GSUB/AlternateSubst.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/GSUB/AlternateSubst.hh
@@ -14,7 +14,7 @@ struct AlternateSubst
union {
HBUINT16 format; /* Format identifier */
AlternateSubstFormat1_2<SmallTypes> format1;
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
AlternateSubstFormat1_2<MediumTypes> format2;
#endif
} u;
@@ -27,7 +27,7 @@ struct AlternateSubst
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
#endif
default:return_trace (c->default_return_value ());
diff --git a/thirdparty/harfbuzz/src/OT/Layout/GSUB/GSUB.hh b/thirdparty/harfbuzz/src/OT/Layout/GSUB/GSUB.hh
index c0ff098f49..900cf603e4 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/GSUB/GSUB.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/GSUB/GSUB.hh
@@ -27,7 +27,7 @@ struct GSUB : GSUBGPOS
bool subset (hb_subset_context_t *c) const
{
- hb_subset_layout_context_t l (c, tableTag, c->plan->gsub_lookups, c->plan->gsub_langsys, c->plan->gsub_features);
+ hb_subset_layout_context_t l (c, tableTag);
return GSUBGPOS::subset<SubstLookup> (&l);
}
diff --git a/thirdparty/harfbuzz/src/OT/Layout/GSUB/Ligature.hh b/thirdparty/harfbuzz/src/OT/Layout/GSUB/Ligature.hh
index f373d921b5..cdb35f525b 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/GSUB/Ligature.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/GSUB/Ligature.hh
@@ -118,7 +118,7 @@ struct Ligature
match_positions[i] += delta;
if (i)
*p++ = ',';
- sprintf (p, "%u", match_positions[i]);
+ snprintf (p, sizeof(buf) - (p - buf), "%u", match_positions[i]);
p += strlen(p);
}
diff --git a/thirdparty/harfbuzz/src/OT/Layout/GSUB/LigatureSubst.hh b/thirdparty/harfbuzz/src/OT/Layout/GSUB/LigatureSubst.hh
index 63707972a8..7ba19e844c 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/GSUB/LigatureSubst.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/GSUB/LigatureSubst.hh
@@ -14,7 +14,7 @@ struct LigatureSubst
union {
HBUINT16 format; /* Format identifier */
LigatureSubstFormat1_2<SmallTypes> format1;
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
LigatureSubstFormat1_2<MediumTypes> format2;
#endif
} u;
@@ -27,7 +27,7 @@ struct LigatureSubst
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
#endif
default:return_trace (c->default_return_value ());
diff --git a/thirdparty/harfbuzz/src/OT/Layout/GSUB/MultipleSubst.hh b/thirdparty/harfbuzz/src/OT/Layout/GSUB/MultipleSubst.hh
index 852ca3eac5..95710ed2be 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/GSUB/MultipleSubst.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/GSUB/MultipleSubst.hh
@@ -14,7 +14,7 @@ struct MultipleSubst
union {
HBUINT16 format; /* Format identifier */
MultipleSubstFormat1_2<SmallTypes> format1;
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
MultipleSubstFormat1_2<MediumTypes> format2;
#endif
} u;
@@ -28,7 +28,7 @@ struct MultipleSubst
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
#endif
default:return_trace (c->default_return_value ());
diff --git a/thirdparty/harfbuzz/src/OT/Layout/GSUB/Sequence.hh b/thirdparty/harfbuzz/src/OT/Layout/GSUB/Sequence.hh
index 3d84a5e6ea..e2190078b8 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/GSUB/Sequence.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/GSUB/Sequence.hh
@@ -117,7 +117,7 @@ struct Sequence
{
if (buf < p)
*p++ = ',';
- sprintf (p, "%u", i);
+ snprintf (p, sizeof(buf) - (p - buf), "%u", i);
p += strlen(p);
}
diff --git a/thirdparty/harfbuzz/src/OT/Layout/GSUB/SingleSubst.hh b/thirdparty/harfbuzz/src/OT/Layout/GSUB/SingleSubst.hh
index 6942e6997f..7da8103168 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/GSUB/SingleSubst.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/GSUB/SingleSubst.hh
@@ -16,7 +16,7 @@ struct SingleSubst
HBUINT16 format; /* Format identifier */
SingleSubstFormat1_3<SmallTypes> format1;
SingleSubstFormat2_4<SmallTypes> format2;
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
SingleSubstFormat1_3<MediumTypes> format3;
SingleSubstFormat2_4<MediumTypes> format4;
#endif
@@ -32,7 +32,7 @@ struct SingleSubst
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
case 4: return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...));
#endif
@@ -55,7 +55,7 @@ struct SingleSubst
format = 1;
hb_codepoint_t mask = 0xFFFFu;
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
if (+ glyphs
| hb_map_retains_sorting (hb_first)
| hb_filter ([] (hb_codepoint_t gid) { return gid > 0xFFFFu; }))
@@ -78,7 +78,7 @@ struct SingleSubst
| hb_map_retains_sorting (hb_first),
delta));
case 2: return_trace (u.format2.serialize (c, glyphs));
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 3: return_trace (u.format3.serialize (c,
+ glyphs
| hb_map_retains_sorting (hb_first),
diff --git a/thirdparty/harfbuzz/src/OT/Layout/GSUB/SingleSubstFormat1.hh b/thirdparty/harfbuzz/src/OT/Layout/GSUB/SingleSubstFormat1.hh
index 13665d7ba1..1be21b98bc 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/GSUB/SingleSubstFormat1.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/GSUB/SingleSubstFormat1.hh
@@ -57,7 +57,7 @@ struct SingleSubstFormat1_3
hb_codepoint_t max_before = intersection.get_max ();
hb_codepoint_t min_after = (min_before + d) & mask;
hb_codepoint_t max_after = (max_before + d) & mask;
- if (pop >= max_before - min_before &&
+ if (intersection.get_population () == max_before - min_before + 1 &&
((min_before <= min_after && min_after <= max_before) ||
(min_before <= max_after && max_after <= max_before)))
return;
diff --git a/thirdparty/harfbuzz/src/OT/Layout/GSUB/SingleSubstFormat2.hh b/thirdparty/harfbuzz/src/OT/Layout/GSUB/SingleSubstFormat2.hh
index 5416299754..01df714525 100644
--- a/thirdparty/harfbuzz/src/OT/Layout/GSUB/SingleSubstFormat2.hh
+++ b/thirdparty/harfbuzz/src/OT/Layout/GSUB/SingleSubstFormat2.hh
@@ -36,8 +36,24 @@ struct SingleSubstFormat2_4
void closure (hb_closure_context_t *c) const
{
- + hb_zip (this+coverage, substitute)
- | hb_filter (c->parent_active_glyphs (), hb_first)
+ auto &cov = this+coverage;
+ auto &glyph_set = c->parent_active_glyphs ();
+
+ if (substitute.len > glyph_set.get_population () * 4)
+ {
+ for (auto g : glyph_set)
+ {
+ unsigned i = cov.get_coverage (g);
+ if (i == NOT_COVERED || i >= substitute.len)
+ continue;
+ c->output->add (substitute.arrayZ[i]);
+ }
+
+ return;
+ }
+
+ + hb_zip (cov, substitute)
+ | hb_filter (glyph_set, hb_first)
| hb_map (hb_second)
| hb_sink (c->output)
;
diff --git a/thirdparty/harfbuzz/src/OT/glyf/CompositeGlyph.hh b/thirdparty/harfbuzz/src/OT/glyf/CompositeGlyph.hh
index fc8e309bc9..edf8cd8797 100644
--- a/thirdparty/harfbuzz/src/OT/glyf/CompositeGlyph.hh
+++ b/thirdparty/harfbuzz/src/OT/glyf/CompositeGlyph.hh
@@ -3,6 +3,7 @@
#include "../../hb-open-type.hh"
+#include "composite-iter.hh"
namespace OT {
@@ -121,7 +122,7 @@ struct CompositeGlyphRecord
if (flags & ARG_1_AND_2_ARE_WORDS)
{
// no overflow, copy and update value with deltas
- memcpy (out, this, len);
+ hb_memcpy (out, this, len);
const HBINT16 *px = reinterpret_cast<const HBINT16 *> (p);
HBINT16 *o = reinterpret_cast<HBINT16 *> (out + len_before_val);
@@ -135,7 +136,7 @@ struct CompositeGlyphRecord
if (new_x <= 127 && new_x >= -128 &&
new_y <= 127 && new_y >= -128)
{
- memcpy (out, this, len);
+ hb_memcpy (out, this, len);
HBINT8 *o = reinterpret_cast<HBINT8 *> (out + len_before_val);
o[0] = new_x;
o[1] = new_y;
@@ -143,7 +144,7 @@ struct CompositeGlyphRecord
else
{
// int8 overflows after deltas applied
- memcpy (out, this, len_before_val);
+ hb_memcpy (out, this, len_before_val);
//update flags
CompositeGlyphRecord *o = reinterpret_cast<CompositeGlyphRecord *> (out);
@@ -152,14 +153,14 @@ struct CompositeGlyphRecord
HBINT16 new_value;
new_value = new_x;
- memcpy (out, &new_value, HBINT16::static_size);
+ hb_memcpy (out, &new_value, HBINT16::static_size);
out += HBINT16::static_size;
new_value = new_y;
- memcpy (out, &new_value, HBINT16::static_size);
+ hb_memcpy (out, &new_value, HBINT16::static_size);
out += HBINT16::static_size;
- memcpy (out, p+2, len - len_before_val - 2);
+ hb_memcpy (out, p+2, len - len_before_val - 2);
len += 2;
}
}
@@ -252,55 +253,7 @@ struct CompositeGlyphRecord
DEFINE_SIZE_MIN (4);
};
-struct composite_iter_t : hb_iter_with_fallback_t<composite_iter_t, const CompositeGlyphRecord &>
-{
- typedef const CompositeGlyphRecord *__item_t__;
- composite_iter_t (hb_bytes_t glyph_, __item_t__ current_) :
- glyph (glyph_), current (nullptr), current_size (0)
- {
- set_current (current_);
- }
-
- composite_iter_t () : glyph (hb_bytes_t ()), current (nullptr), current_size (0) {}
-
- item_t __item__ () const { return *current; }
- bool __more__ () const { return current; }
- void __next__ ()
- {
- if (!current->has_more ()) { current = nullptr; return; }
-
- set_current (&StructAtOffset<CompositeGlyphRecord> (current, current_size));
- }
- composite_iter_t __end__ () const { return composite_iter_t (); }
- bool operator != (const composite_iter_t& o) const
- { return current != o.current; }
-
-
- void set_current (__item_t__ current_)
- {
- if (!glyph.check_range (current_, CompositeGlyphRecord::min_size))
- {
- current = nullptr;
- current_size = 0;
- return;
- }
- unsigned size = current_->get_size ();
- if (!glyph.check_range (current_, size))
- {
- current = nullptr;
- current_size = 0;
- return;
- }
-
- current = current_;
- current_size = size;
- }
-
- private:
- hb_bytes_t glyph;
- __item_t__ current;
- unsigned current_size;
-};
+using composite_iter_t = composite_iter_tmpl<CompositeGlyphRecord>;
struct CompositeGlyph
{
@@ -382,7 +335,7 @@ struct CompositeGlyph
unsigned comp_len = component.get_size ();
if (component.is_anchored ())
{
- memcpy (p, &component, comp_len);
+ hb_memcpy (p, &component, comp_len);
p += comp_len;
}
else
@@ -398,7 +351,7 @@ struct CompositeGlyph
if (source_len > source_comp_len)
{
unsigned instr_len = source_len - source_comp_len;
- memcpy (p, (const char *)c + source_comp_len, instr_len);
+ hb_memcpy (p, (const char *)c + source_comp_len, instr_len);
p += instr_len;
}
diff --git a/thirdparty/harfbuzz/src/OT/glyf/Glyph.hh b/thirdparty/harfbuzz/src/OT/glyf/Glyph.hh
index afcb5dc834..b7215b0170 100644
--- a/thirdparty/harfbuzz/src/OT/glyf/Glyph.hh
+++ b/thirdparty/harfbuzz/src/OT/glyf/Glyph.hh
@@ -7,6 +7,8 @@
#include "GlyphHeader.hh"
#include "SimpleGlyph.hh"
#include "CompositeGlyph.hh"
+#include "VarCompositeGlyph.hh"
+#include "coord-setter.hh"
namespace OT {
@@ -16,6 +18,11 @@ struct glyf_accelerator_t;
namespace glyf_impl {
+#ifndef HB_GLYF_MAX_POINTS
+#define HB_GLYF_MAX_POINTS 10000
+#endif
+
+
enum phantom_point_index_t
{
PHANTOM_LEFT = 0,
@@ -27,7 +34,7 @@ enum phantom_point_index_t
struct Glyph
{
- enum glyph_type_t { EMPTY, SIMPLE, COMPOSITE };
+ enum glyph_type_t { EMPTY, SIMPLE, COMPOSITE, VAR_COMPOSITE };
public:
composite_iter_t get_composite_iterator () const
@@ -35,6 +42,11 @@ struct Glyph
if (type != COMPOSITE) return composite_iter_t ();
return CompositeGlyph (*header, bytes).iter ();
}
+ var_composite_iter_t get_var_composite_iterator () const
+ {
+ if (type != VAR_COMPOSITE) return var_composite_iter_t ();
+ return VarCompositeGlyph (*header, bytes).iter ();
+ }
const hb_bytes_t trim_padding () const
{
@@ -102,40 +114,42 @@ struct Glyph
hb_bytes_t &dest_bytes /* OUT */) const
{
GlyphHeader *glyph_header = nullptr;
- if (all_points.length > 4)
+ if (type != EMPTY && all_points.length > 4)
{
glyph_header = (GlyphHeader *) hb_calloc (1, GlyphHeader::static_size);
if (unlikely (!glyph_header)) return false;
}
- int xMin, xMax;
- xMin = xMax = roundf (all_points[0].x);
-
- int yMin, yMax;
- yMin = yMax = roundf (all_points[0].y);
+ float xMin = 0, xMax = 0;
+ float yMin = 0, yMax = 0;
+ if (all_points.length > 4)
+ {
+ xMin = xMax = all_points[0].x;
+ yMin = yMax = all_points[0].y;
+ }
for (unsigned i = 1; i < all_points.length - 4; i++)
{
- float rounded_x = roundf (all_points[i].x);
- float rounded_y = roundf (all_points[i].y);
- xMin = hb_min (xMin, rounded_x);
- xMax = hb_max (xMax, rounded_x);
- yMin = hb_min (yMin, rounded_y);
- yMax = hb_max (yMax, rounded_y);
+ float x = all_points[i].x;
+ float y = all_points[i].y;
+ xMin = hb_min (xMin, x);
+ xMax = hb_max (xMax, x);
+ yMin = hb_min (yMin, y);
+ yMax = hb_max (yMax, y);
}
- update_mtx (plan, xMin, yMax, all_points);
+ update_mtx (plan, roundf (xMin), roundf (yMax), all_points);
/*for empty glyphs: all_points only include phantom points.
*just update metrics and then return */
- if (all_points.length == 4)
+ if (!glyph_header)
return true;
glyph_header->numberOfContours = header->numberOfContours;
- glyph_header->xMin = xMin;
- glyph_header->yMin = yMin;
- glyph_header->xMax = xMax;
- glyph_header->yMax = yMax;
+ glyph_header->xMin = roundf (xMin);
+ glyph_header->yMin = roundf (yMin);
+ glyph_header->xMax = roundf (xMax);
+ glyph_header->yMax = roundf (yMax);
dest_bytes = hb_bytes_t ((const char *)glyph_header, GlyphHeader::static_size);
return true;
@@ -145,10 +159,17 @@ struct Glyph
hb_font_t *font,
const glyf_accelerator_t &glyf,
hb_bytes_t &dest_start, /* IN/OUT */
- hb_bytes_t &dest_end /* OUT */) const
+ hb_bytes_t &dest_end /* OUT */)
{
contour_point_vector_t all_points, deltas;
- get_points (font, glyf, all_points, &deltas, false);
+ if (!get_points (font, glyf, all_points, &deltas, false, false))
+ return false;
+
+ // .notdef, set type to empty so we only update metrics and don't compile bytes for
+ // it
+ if (gid == 0 &&
+ !(plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE))
+ type = EMPTY;
switch (type) {
case COMPOSITE:
@@ -171,7 +192,12 @@ struct Glyph
break;
}
- return compile_header_bytes (plan, all_points, dest_start);
+ if (!compile_header_bytes (plan, all_points, dest_start))
+ {
+ dest_end.fini ();
+ return false;
+ }
+ return true;
}
@@ -182,17 +208,27 @@ struct Glyph
bool get_points (hb_font_t *font, const accelerator_t &glyf_accelerator,
contour_point_vector_t &all_points /* OUT */,
contour_point_vector_t *deltas = nullptr, /* OUT */
+ bool shift_points_hori = true,
bool use_my_metrics = true,
bool phantom_only = false,
+ hb_array_t<int> coords = hb_array_t<int> (),
unsigned int depth = 0) const
{
if (unlikely (depth > HB_MAX_NESTING_LEVEL)) return false;
+
+ if (!coords)
+ coords = hb_array (font->coords, font->num_coords);
+
contour_point_vector_t stack_points;
bool inplace = type == SIMPLE && all_points.length == 0;
/* Load into all_points if it's empty, as an optimization. */
contour_point_vector_t &points = inplace ? all_points : stack_points;
switch (type) {
+ case SIMPLE:
+ if (unlikely (!SimpleGlyph (*header, bytes).get_contour_points (points, phantom_only)))
+ return false;
+ break;
case COMPOSITE:
{
/* pseudo component points for each component in composite glyph */
@@ -200,20 +236,25 @@ struct Glyph
if (unlikely (!points.resize (num_points))) return false;
break;
}
- case SIMPLE:
- if (unlikely (!SimpleGlyph (*header, bytes).get_contour_points (points, phantom_only)))
- return false;
+#ifndef HB_NO_VAR_COMPOSITES
+ case VAR_COMPOSITE:
+ {
+ for (auto &item : get_var_composite_iterator ())
+ if (unlikely (!item.get_points (points))) return false;
+ }
+#endif
+ default:
break;
}
/* Init phantom points */
if (unlikely (!points.resize (points.length + PHANTOM_COUNT))) return false;
- hb_array_t<contour_point_t> phantoms = points.sub_array (points.length - PHANTOM_COUNT, PHANTOM_COUNT);
+ hb_array_t<contour_point_t> phantoms = points.as_array ().sub_array (points.length - PHANTOM_COUNT, PHANTOM_COUNT);
{
int lsb = 0;
int h_delta = glyf_accelerator.hmtx->get_leading_bearing_without_var_unscaled (gid, &lsb) ?
(int) header->xMin - lsb : 0;
- int tsb = 0;
+ HB_UNUSED int tsb = 0;
int v_orig = (int) header->yMax +
#ifndef HB_NO_VERTICAL
((void) glyf_accelerator.vmtx->get_leading_bearing_without_var_unscaled (gid, &tsb), tsb)
@@ -238,12 +279,13 @@ struct Glyph
if (deltas != nullptr && depth == 0 && type == COMPOSITE)
{
if (unlikely (!deltas->resize (points.length))) return false;
- for (unsigned i = 0 ; i < points.length; i++)
- deltas->arrayZ[i] = points.arrayZ[i];
+ deltas->copy_vector (points);
}
#ifndef HB_NO_VAR
- glyf_accelerator.gvar->apply_deltas_to_points (gid, font, points.as_array ());
+ glyf_accelerator.gvar->apply_deltas_to_points (gid,
+ coords,
+ points.as_array ());
#endif
// mainly used by CompositeGlyph calculating new X/Y offset value so no need to extend it
@@ -269,9 +311,17 @@ struct Glyph
for (auto &item : get_composite_iterator ())
{
comp_points.reset ();
+
if (unlikely (!glyf_accelerator.glyph_for_gid (item.get_gid ())
- .get_points (font, glyf_accelerator, comp_points,
- deltas, use_my_metrics, phantom_only, depth + 1)))
+ .get_points (font,
+ glyf_accelerator,
+ comp_points,
+ deltas,
+ shift_points_hori,
+ use_my_metrics,
+ phantom_only,
+ coords,
+ depth + 1)))
return false;
/* Copy phantom points from component if USE_MY_METRICS flag set */
@@ -299,18 +349,66 @@ struct Glyph
}
}
- all_points.extend (comp_points.sub_array (0, comp_points.length - PHANTOM_COUNT));
+ all_points.extend (comp_points.as_array ().sub_array (0, comp_points.length - PHANTOM_COUNT));
+
+ if (all_points.length > HB_GLYF_MAX_POINTS)
+ return false;
comp_index++;
}
all_points.extend (phantoms);
} break;
+#ifndef HB_NO_VAR_COMPOSITES
+ case VAR_COMPOSITE:
+ {
+ contour_point_vector_t comp_points;
+ hb_array_t<contour_point_t> points_left = points.as_array ();
+ for (auto &item : get_var_composite_iterator ())
+ {
+ hb_array_t<contour_point_t> record_points = points_left.sub_array (0, item.get_num_points ());
+
+ comp_points.reset ();
+
+ coord_setter_t coord_setter (coords);
+ item.set_variations (coord_setter, record_points);
+
+ if (unlikely (!glyf_accelerator.glyph_for_gid (item.get_gid ())
+ .get_points (font,
+ glyf_accelerator,
+ comp_points,
+ deltas,
+ shift_points_hori,
+ use_my_metrics,
+ phantom_only,
+ coord_setter.get_coords (),
+ depth + 1)))
+ return false;
+
+ /* Apply component transformation */
+ item.transform_points (record_points, comp_points);
+
+ /* Copy phantom points from component if USE_MY_METRICS flag set */
+ if (use_my_metrics && item.is_use_my_metrics ())
+ for (unsigned int i = 0; i < PHANTOM_COUNT; i++)
+ phantoms[i] = comp_points[comp_points.length - PHANTOM_COUNT + i];
+
+ all_points.extend (comp_points.as_array ().sub_array (0, comp_points.length - PHANTOM_COUNT));
+
+ if (all_points.length > HB_GLYF_MAX_POINTS)
+ return false;
+
+ points_left += item.get_num_points ();
+ }
+ all_points.extend (phantoms);
+ } break;
+#endif
default:
all_points.extend (phantoms);
+ break;
}
- if (depth == 0) /* Apply at top level */
+ if (depth == 0 && shift_points_hori) /* Apply at top level */
{
/* Undocumented rasterizer behavior:
* Shift points horizontally by the updated left side bearing
@@ -332,14 +430,21 @@ struct Glyph
hb_bytes_t get_bytes () const { return bytes; }
- Glyph (hb_bytes_t bytes_ = hb_bytes_t (),
- hb_codepoint_t gid_ = (hb_codepoint_t) -1) : bytes (bytes_),
- header (bytes.as<GlyphHeader> ()),
- gid (gid_)
+ Glyph () : bytes (),
+ header (bytes.as<GlyphHeader> ()),
+ gid (-1),
+ type(EMPTY)
+ {}
+
+ Glyph (hb_bytes_t bytes_,
+ hb_codepoint_t gid_ = (unsigned) -1) : bytes (bytes_),
+ header (bytes.as<GlyphHeader> ()),
+ gid (gid_)
{
int num_contours = header->numberOfContours;
if (unlikely (num_contours == 0)) type = EMPTY;
else if (num_contours > 0) type = SIMPLE;
+ else if (num_contours == -2) type = VAR_COMPOSITE;
else type = COMPOSITE; /* negative numbers */
}
diff --git a/thirdparty/harfbuzz/src/OT/glyf/SimpleGlyph.hh b/thirdparty/harfbuzz/src/OT/glyf/SimpleGlyph.hh
index b99665d6a0..2b4aa99d25 100644
--- a/thirdparty/harfbuzz/src/OT/glyf/SimpleGlyph.hh
+++ b/thirdparty/harfbuzz/src/OT/glyf/SimpleGlyph.hh
@@ -132,8 +132,8 @@ struct SimpleGlyph
if (unlikely (p + 1 > end)) return false;
unsigned int repeat_count = *p++;
unsigned stop = hb_min (i + repeat_count, count);
- for (; i < stop;)
- points_.arrayZ[i++].flag = flag;
+ for (; i < stop; i++)
+ points_.arrayZ[i].flag = flag;
}
}
return true;
@@ -223,33 +223,34 @@ struct SimpleGlyph
if (value > 0) flag |= same_flag;
else value = -value;
- coords.push ((uint8_t)value);
+ coords.arrayZ[coords.length++] = (uint8_t) value;
}
else
{
int16_t val = value;
- coords.push (val >> 8);
- coords.push (val & 0xff);
+ coords.arrayZ[coords.length++] = val >> 8;
+ coords.arrayZ[coords.length++] = val & 0xff;
}
}
static void encode_flag (uint8_t &flag,
uint8_t &repeat,
- uint8_t &lastflag,
+ uint8_t lastflag,
hb_vector_t<uint8_t> &flags /* OUT */)
{
if (flag == lastflag && repeat != 255)
{
- repeat = repeat + 1;
+ repeat++;
if (repeat == 1)
{
- flags.push(flag);
+ /* We know there's room. */
+ flags.arrayZ[flags.length++] = flag;
}
else
{
unsigned len = flags.length;
- flags[len-2] = flag | FLAG_REPEAT;
- flags[len-1] = repeat;
+ flags.arrayZ[len-2] = flag | FLAG_REPEAT;
+ flags.arrayZ[len-1] = repeat;
}
}
else
@@ -257,7 +258,6 @@ struct SimpleGlyph
repeat = 0;
flags.push (flag);
}
- lastflag = flag;
}
bool compile_bytes_with_deltas (const contour_point_vector_t &all_points,
@@ -269,33 +269,30 @@ struct SimpleGlyph
dest_bytes = hb_bytes_t ();
return true;
}
- //convert absolute values to relative values
unsigned num_points = all_points.length - 4;
- hb_vector_t<hb_pair_t<int, int>> deltas;
- deltas.resize (num_points);
-
- for (unsigned i = 0; i < num_points; i++)
- {
- deltas[i].first = i == 0 ? roundf (all_points[i].x) : roundf (all_points[i].x) - roundf (all_points[i-1].x);
- deltas[i].second = i == 0 ? roundf (all_points[i].y) : roundf (all_points[i].y) - roundf (all_points[i-1].y);
- }
hb_vector_t<uint8_t> flags, x_coords, y_coords;
- flags.alloc (num_points);
- x_coords.alloc (2*num_points);
- y_coords.alloc (2*num_points);
+ if (unlikely (!flags.alloc (num_points))) return false;
+ if (unlikely (!x_coords.alloc (2*num_points))) return false;
+ if (unlikely (!y_coords.alloc (2*num_points))) return false;
- uint8_t lastflag = 0, repeat = 0;
+ uint8_t lastflag = 255, repeat = 0;
+ int prev_x = 0, prev_y = 0;
for (unsigned i = 0; i < num_points; i++)
{
- uint8_t flag = all_points[i].flag;
+ uint8_t flag = all_points.arrayZ[i].flag;
flag &= FLAG_ON_CURVE + FLAG_OVERLAP_SIMPLE;
- encode_coord (deltas[i].first, flag, FLAG_X_SHORT, FLAG_X_SAME, x_coords);
- encode_coord (deltas[i].second, flag, FLAG_Y_SHORT, FLAG_Y_SAME, y_coords);
- if (i == 0) lastflag = flag + 1; //make lastflag != flag for the first point
+ int cur_x = roundf (all_points.arrayZ[i].x);
+ int cur_y = roundf (all_points.arrayZ[i].y);
+ encode_coord (cur_x - prev_x, flag, FLAG_X_SHORT, FLAG_X_SAME, x_coords);
+ encode_coord (cur_y - prev_y, flag, FLAG_Y_SHORT, FLAG_Y_SAME, y_coords);
encode_flag (flag, repeat, lastflag, flags);
+
+ prev_x = cur_x;
+ prev_y = cur_y;
+ lastflag = flag;
}
unsigned len_before_instrs = 2 * header.numberOfContours + 2;
@@ -305,29 +302,29 @@ struct SimpleGlyph
if (!no_hinting)
total_len += len_instrs;
- char *p = (char *) hb_calloc (total_len, sizeof (char));
+ char *p = (char *) hb_malloc (total_len);
if (unlikely (!p)) return false;
const char *src = bytes.arrayZ + GlyphHeader::static_size;
char *cur = p;
- memcpy (p, src, len_before_instrs);
+ hb_memcpy (p, src, len_before_instrs);
cur += len_before_instrs;
src += len_before_instrs;
if (!no_hinting)
{
- memcpy (cur, src, len_instrs);
+ hb_memcpy (cur, src, len_instrs);
cur += len_instrs;
}
- memcpy (cur, flags.arrayZ, flags.length);
+ hb_memcpy (cur, flags.arrayZ, flags.length);
cur += flags.length;
- memcpy (cur, x_coords.arrayZ, x_coords.length);
+ hb_memcpy (cur, x_coords.arrayZ, x_coords.length);
cur += x_coords.length;
- memcpy (cur, y_coords.arrayZ, y_coords.length);
+ hb_memcpy (cur, y_coords.arrayZ, y_coords.length);
dest_bytes = hb_bytes_t (p, total_len);
return true;
diff --git a/thirdparty/harfbuzz/src/OT/glyf/SubsetGlyph.hh b/thirdparty/harfbuzz/src/OT/glyf/SubsetGlyph.hh
index 7ddefc5a91..1a0370c757 100644
--- a/thirdparty/harfbuzz/src/OT/glyf/SubsetGlyph.hh
+++ b/thirdparty/harfbuzz/src/OT/glyf/SubsetGlyph.hh
@@ -14,7 +14,6 @@ namespace glyf_impl {
struct SubsetGlyph
{
- hb_codepoint_t new_gid;
hb_codepoint_t old_gid;
Glyph source_glyph;
hb_bytes_t dest_start; /* region of source_glyph to copy first */
@@ -22,10 +21,18 @@ struct SubsetGlyph
bool serialize (hb_serialize_context_t *c,
bool use_short_loca,
- const hb_subset_plan_t *plan) const
+ const hb_subset_plan_t *plan,
+ hb_font_t *font)
{
TRACE_SERIALIZE (this);
+ if (font)
+ {
+ const OT::glyf_accelerator_t &glyf = *font->face->table.glyf;
+ if (!this->compile_bytes_with_deltas (plan, font, glyf))
+ return_trace (false);
+ }
+
hb_bytes_t dest_glyph = dest_start.copy (c);
dest_glyph = hb_bytes_t (&dest_glyph, dest_glyph.length + dest_end.copy (c).length);
unsigned int pad_length = use_short_loca ? padding () : 0;
diff --git a/thirdparty/harfbuzz/src/OT/glyf/VarCompositeGlyph.hh b/thirdparty/harfbuzz/src/OT/glyf/VarCompositeGlyph.hh
new file mode 100644
index 0000000000..0f4c71c83d
--- /dev/null
+++ b/thirdparty/harfbuzz/src/OT/glyf/VarCompositeGlyph.hh
@@ -0,0 +1,353 @@
+#ifndef OT_GLYF_VARCOMPOSITEGLYPH_HH
+#define OT_GLYF_VARCOMPOSITEGLYPH_HH
+
+
+#include "../../hb-open-type.hh"
+#include "coord-setter.hh"
+
+
+namespace OT {
+namespace glyf_impl {
+
+
+struct VarCompositeGlyphRecord
+{
+ protected:
+ enum var_composite_glyph_flag_t
+ {
+ USE_MY_METRICS = 0x0001,
+ AXIS_INDICES_ARE_SHORT = 0x0002,
+ UNIFORM_SCALE = 0x0004,
+ HAVE_TRANSLATE_X = 0x0008,
+ HAVE_TRANSLATE_Y = 0x0010,
+ HAVE_ROTATION = 0x0020,
+ HAVE_SCALE_X = 0x0040,
+ HAVE_SCALE_Y = 0x0080,
+ HAVE_SKEW_X = 0x0100,
+ HAVE_SKEW_Y = 0x0200,
+ HAVE_TCENTER_X = 0x0400,
+ HAVE_TCENTER_Y = 0x0800,
+ GID_IS_24 = 0x1000,
+ AXES_HAVE_VARIATION = 0x2000,
+ };
+
+ public:
+
+ unsigned int get_size () const
+ {
+ unsigned int size = min_size;
+
+ unsigned axis_width = (flags & AXIS_INDICES_ARE_SHORT) ? 4 : 3;
+ size += numAxes * axis_width;
+
+ // gid
+ size += 2;
+ if (flags & GID_IS_24) size += 1;
+
+ if (flags & HAVE_TRANSLATE_X) size += 2;
+ if (flags & HAVE_TRANSLATE_Y) size += 2;
+ if (flags & HAVE_ROTATION) size += 2;
+ if (flags & HAVE_SCALE_X) size += 2;
+ if (flags & HAVE_SCALE_Y) size += 2;
+ if (flags & HAVE_SKEW_X) size += 2;
+ if (flags & HAVE_SKEW_Y) size += 2;
+ if (flags & HAVE_TCENTER_X) size += 2;
+ if (flags & HAVE_TCENTER_Y) size += 2;
+
+ return size;
+ }
+
+ bool has_more () const { return true; }
+
+ bool is_use_my_metrics () const { return flags & USE_MY_METRICS; }
+
+ hb_codepoint_t get_gid () const
+ {
+ if (flags & GID_IS_24)
+ return StructAfter<const HBGlyphID24> (numAxes);
+ else
+ return StructAfter<const HBGlyphID16> (numAxes);
+ }
+
+ unsigned get_numAxes () const
+ {
+ return numAxes;
+ }
+
+ unsigned get_num_points () const
+ {
+ unsigned num = 0;
+ if (flags & AXES_HAVE_VARIATION) num += numAxes;
+ if (flags & (HAVE_TRANSLATE_X | HAVE_TRANSLATE_Y)) num++;
+ if (flags & HAVE_ROTATION) num++;
+ if (flags & (HAVE_SCALE_X | HAVE_SCALE_Y)) num++;
+ if (flags & (HAVE_SKEW_X | HAVE_SKEW_Y)) num++;
+ if (flags & (HAVE_TCENTER_X | HAVE_TCENTER_Y)) num++;
+ return num;
+ }
+
+ void transform_points (hb_array_t<contour_point_t> record_points,
+ contour_point_vector_t &points) const
+ {
+ float matrix[4];
+ contour_point_t trans;
+
+ get_transformation_from_points (record_points, matrix, trans);
+
+ points.transform (matrix);
+ points.translate (trans);
+ }
+
+ static inline void transform (float (&matrix)[4], contour_point_t &trans,
+ float (other)[6])
+ {
+ // https://github.com/fonttools/fonttools/blob/f66ee05f71c8b57b5f519ee975e95edcd1466e14/Lib/fontTools/misc/transform.py#L268
+ float xx1 = other[0];
+ float xy1 = other[1];
+ float yx1 = other[2];
+ float yy1 = other[3];
+ float dx1 = other[4];
+ float dy1 = other[5];
+ float xx2 = matrix[0];
+ float xy2 = matrix[1];
+ float yx2 = matrix[2];
+ float yy2 = matrix[3];
+ float dx2 = trans.x;
+ float dy2 = trans.y;
+
+ matrix[0] = xx1*xx2 + xy1*yx2;
+ matrix[1] = xx1*xy2 + xy1*yy2;
+ matrix[2] = yx1*xx2 + yy1*yx2;
+ matrix[3] = yx1*xy2 + yy1*yy2;
+ trans.x = xx2*dx1 + yx2*dy1 + dx2;
+ trans.y = xy2*dx1 + yy2*dy1 + dy2;
+ }
+
+ static void translate (float (&matrix)[4], contour_point_t &trans,
+ float translateX, float translateY)
+ {
+ // https://github.com/fonttools/fonttools/blob/f66ee05f71c8b57b5f519ee975e95edcd1466e14/Lib/fontTools/misc/transform.py#L213
+ float other[6] = {1.f, 0.f, 0.f, 1.f, translateX, translateY};
+ transform (matrix, trans, other);
+ }
+
+ static void scale (float (&matrix)[4], contour_point_t &trans,
+ float scaleX, float scaleY)
+ {
+ // https://github.com/fonttools/fonttools/blob/f66ee05f71c8b57b5f519ee975e95edcd1466e14/Lib/fontTools/misc/transform.py#L224
+ float other[6] = {scaleX, 0.f, 0.f, scaleY, 0.f, 0.f};
+ transform (matrix, trans, other);
+ }
+
+ static void rotate (float (&matrix)[4], contour_point_t &trans,
+ float rotation)
+ {
+ // https://github.com/fonttools/fonttools/blob/f66ee05f71c8b57b5f519ee975e95edcd1466e14/Lib/fontTools/misc/transform.py#L240
+ rotation = rotation * float (M_PI);
+ float c = cosf (rotation);
+ float s = sinf (rotation);
+ float other[6] = {c, s, -s, c, 0.f, 0.f};
+ transform (matrix, trans, other);
+ }
+
+ static void skew (float (&matrix)[4], contour_point_t &trans,
+ float skewX, float skewY)
+ {
+ // https://github.com/fonttools/fonttools/blob/f66ee05f71c8b57b5f519ee975e95edcd1466e14/Lib/fontTools/misc/transform.py#L255
+ skewX = skewX * float (M_PI);
+ skewY = skewY * float (M_PI);
+ float other[6] = {1.f, tanf (skewY), tanf (skewX), 1.f, 0.f, 0.f};
+ transform (matrix, trans, other);
+ }
+
+ bool get_points (contour_point_vector_t &points) const
+ {
+ float translateX = 0.f;
+ float translateY = 0.f;
+ float rotation = 0.f;
+ float scaleX = 1.f * (1 << 12);
+ float scaleY = 1.f * (1 << 12);
+ float skewX = 0.f;
+ float skewY = 0.f;
+ float tCenterX = 0.f;
+ float tCenterY = 0.f;
+
+ if (unlikely (!points.resize (points.length + get_num_points ()))) return false;
+
+ unsigned axis_width = (flags & AXIS_INDICES_ARE_SHORT) ? 2 : 1;
+ unsigned axes_size = numAxes * axis_width;
+
+ const F2DOT14 *q = (const F2DOT14 *) (axes_size +
+ (flags & GID_IS_24 ? 3 : 2) +
+ &StructAfter<const HBUINT8> (numAxes));
+
+ hb_array_t<contour_point_t> rec_points = points.as_array ().sub_array (points.length - get_num_points ());
+
+ unsigned count = numAxes;
+ if (flags & AXES_HAVE_VARIATION)
+ {
+ for (unsigned i = 0; i < count; i++)
+ rec_points[i].x = *q++;
+ rec_points += count;
+ }
+ else
+ q += count;
+
+ const HBUINT16 *p = (const HBUINT16 *) q;
+
+ if (flags & HAVE_TRANSLATE_X) translateX = * (const FWORD *) p++;
+ if (flags & HAVE_TRANSLATE_Y) translateY = * (const FWORD *) p++;
+ if (flags & HAVE_ROTATION) rotation = * (const F2DOT14 *) p++;
+ if (flags & HAVE_SCALE_X) scaleX = * (const F4DOT12 *) p++;
+ if (flags & HAVE_SCALE_Y) scaleY = * (const F4DOT12 *) p++;
+ if (flags & HAVE_SKEW_X) skewX = * (const F2DOT14 *) p++;
+ if (flags & HAVE_SKEW_Y) skewY = * (const F2DOT14 *) p++;
+ if (flags & HAVE_TCENTER_X) tCenterX = * (const FWORD *) p++;
+ if (flags & HAVE_TCENTER_Y) tCenterY = * (const FWORD *) p++;
+
+ if ((flags & UNIFORM_SCALE) && !(flags & HAVE_SCALE_Y))
+ scaleY = scaleX;
+
+ if (flags & (HAVE_TRANSLATE_X | HAVE_TRANSLATE_Y))
+ {
+ rec_points[0].x = translateX;
+ rec_points[0].y = translateY;
+ rec_points++;
+ }
+ if (flags & HAVE_ROTATION)
+ {
+ rec_points[0].x = rotation;
+ rec_points++;
+ }
+ if (flags & (HAVE_SCALE_X | HAVE_SCALE_Y))
+ {
+ rec_points[0].x = scaleX;
+ rec_points[0].y = scaleY;
+ rec_points++;
+ }
+ if (flags & (HAVE_SKEW_X | HAVE_SKEW_Y))
+ {
+ rec_points[0].x = skewX;
+ rec_points[0].y = skewY;
+ rec_points++;
+ }
+ if (flags & (HAVE_TCENTER_X | HAVE_TCENTER_Y))
+ {
+ rec_points[0].x = tCenterX;
+ rec_points[0].y = tCenterY;
+ rec_points++;
+ }
+ assert (!rec_points);
+
+ return true;
+ }
+
+ void get_transformation_from_points (hb_array_t<contour_point_t> rec_points,
+ float (&matrix)[4], contour_point_t &trans) const
+ {
+ if (flags & AXES_HAVE_VARIATION)
+ rec_points += numAxes;
+
+ matrix[0] = matrix[3] = 1.f;
+ matrix[1] = matrix[2] = 0.f;
+ trans.init (0.f, 0.f);
+
+ float translateX = 0.f;
+ float translateY = 0.f;
+ float rotation = 0.f;
+ float scaleX = 1.f;
+ float scaleY = 1.f;
+ float skewX = 0.f;
+ float skewY = 0.f;
+ float tCenterX = 0.f;
+ float tCenterY = 0.f;
+
+ if (flags & (HAVE_TRANSLATE_X | HAVE_TRANSLATE_Y))
+ {
+ translateX = rec_points[0].x;
+ translateY = rec_points[0].y;
+ rec_points++;
+ }
+ if (flags & HAVE_ROTATION)
+ {
+ rotation = rec_points[0].x / (1 << 14);
+ rec_points++;
+ }
+ if (flags & (HAVE_SCALE_X | HAVE_SCALE_Y))
+ {
+ scaleX = rec_points[0].x / (1 << 12);
+ scaleY = rec_points[0].y / (1 << 12);
+ rec_points++;
+ }
+ if (flags & (HAVE_SKEW_X | HAVE_SKEW_Y))
+ {
+ skewX = rec_points[0].x / (1 << 14);
+ skewY = rec_points[0].y / (1 << 14);
+ rec_points++;
+ }
+ if (flags & (HAVE_TCENTER_X | HAVE_TCENTER_Y))
+ {
+ tCenterX = rec_points[0].x;
+ tCenterY = rec_points[0].y;
+ rec_points++;
+ }
+ assert (!rec_points);
+
+ translate (matrix, trans, translateX + tCenterX, translateY + tCenterY);
+ rotate (matrix, trans, rotation);
+ scale (matrix, trans, scaleX, scaleY);
+ skew (matrix, trans, -skewX, skewY);
+ translate (matrix, trans, -tCenterX, -tCenterY);
+ }
+
+ void set_variations (coord_setter_t &setter,
+ hb_array_t<contour_point_t> rec_points) const
+ {
+ bool have_variations = flags & AXES_HAVE_VARIATION;
+ unsigned axis_width = (flags & AXIS_INDICES_ARE_SHORT) ? 2 : 1;
+
+ const HBUINT8 *p = (const HBUINT8 *) (((HBUINT8 *) &numAxes) + numAxes.static_size + (flags & GID_IS_24 ? 3 : 2));
+ const HBUINT16 *q = (const HBUINT16 *) (((HBUINT8 *) &numAxes) + numAxes.static_size + (flags & GID_IS_24 ? 3 : 2));
+
+ const F2DOT14 *a = (const F2DOT14 *) ((HBUINT8 *) (axis_width == 1 ? (p + numAxes) : (HBUINT8 *) (q + numAxes)));
+
+ unsigned count = numAxes;
+ for (unsigned i = 0; i < count; i++)
+ {
+ unsigned axis_index = axis_width == 1 ? (unsigned) *p++ : (unsigned) *q++;
+
+ signed v = have_variations ? rec_points[i].x : *a++;
+
+ v += setter[axis_index];
+ v = hb_clamp (v, -(1<<14), (1<<14));
+ setter[axis_index] = v;
+ }
+ }
+
+ protected:
+ HBUINT16 flags;
+ HBUINT8 numAxes;
+ public:
+ DEFINE_SIZE_MIN (3);
+};
+
+using var_composite_iter_t = composite_iter_tmpl<VarCompositeGlyphRecord>;
+
+struct VarCompositeGlyph
+{
+ const GlyphHeader &header;
+ hb_bytes_t bytes;
+ VarCompositeGlyph (const GlyphHeader &header_, hb_bytes_t bytes_) :
+ header (header_), bytes (bytes_) {}
+
+ var_composite_iter_t iter () const
+ { return var_composite_iter_t (bytes, &StructAfter<VarCompositeGlyphRecord, GlyphHeader> (header)); }
+
+};
+
+
+} /* namespace glyf_impl */
+} /* namespace OT */
+
+
+#endif /* OT_GLYF_VARCOMPOSITEGLYPH_HH */
diff --git a/thirdparty/harfbuzz/src/OT/glyf/composite-iter.hh b/thirdparty/harfbuzz/src/OT/glyf/composite-iter.hh
new file mode 100644
index 0000000000..d05701f3d1
--- /dev/null
+++ b/thirdparty/harfbuzz/src/OT/glyf/composite-iter.hh
@@ -0,0 +1,68 @@
+#ifndef OT_GLYF_COMPOSITE_ITER_HH
+#define OT_GLYF_COMPOSITE_ITER_HH
+
+
+#include "../../hb.hh"
+
+
+namespace OT {
+namespace glyf_impl {
+
+
+template <typename CompositeGlyphRecord>
+struct composite_iter_tmpl : hb_iter_with_fallback_t<composite_iter_tmpl<CompositeGlyphRecord>,
+ const CompositeGlyphRecord &>
+{
+ typedef const CompositeGlyphRecord *__item_t__;
+ composite_iter_tmpl (hb_bytes_t glyph_, __item_t__ current_) :
+ glyph (glyph_), current (nullptr), current_size (0)
+ {
+ set_current (current_);
+ }
+
+ composite_iter_tmpl () : glyph (hb_bytes_t ()), current (nullptr), current_size (0) {}
+
+ const CompositeGlyphRecord & __item__ () const { return *current; }
+ bool __more__ () const { return current; }
+ void __next__ ()
+ {
+ if (!current->has_more ()) { current = nullptr; return; }
+
+ set_current (&StructAtOffset<CompositeGlyphRecord> (current, current_size));
+ }
+ composite_iter_tmpl __end__ () const { return composite_iter_tmpl (); }
+ bool operator != (const composite_iter_tmpl& o) const
+ { return current != o.current; }
+
+
+ void set_current (__item_t__ current_)
+ {
+ if (!glyph.check_range (current_, CompositeGlyphRecord::min_size))
+ {
+ current = nullptr;
+ current_size = 0;
+ return;
+ }
+ unsigned size = current_->get_size ();
+ if (!glyph.check_range (current_, size))
+ {
+ current = nullptr;
+ current_size = 0;
+ return;
+ }
+
+ current = current_;
+ current_size = size;
+ }
+
+ private:
+ hb_bytes_t glyph;
+ __item_t__ current;
+ unsigned current_size;
+};
+
+
+} /* namespace glyf_impl */
+} /* namespace OT */
+
+#endif /* OT_GLYF_COMPOSITE_ITER_HH */
diff --git a/thirdparty/harfbuzz/src/OT/glyf/coord-setter.hh b/thirdparty/harfbuzz/src/OT/glyf/coord-setter.hh
new file mode 100644
index 0000000000..df64ed5af7
--- /dev/null
+++ b/thirdparty/harfbuzz/src/OT/glyf/coord-setter.hh
@@ -0,0 +1,34 @@
+#ifndef OT_GLYF_COORD_SETTER_HH
+#define OT_GLYF_COORD_SETTER_HH
+
+
+#include "../../hb.hh"
+
+
+namespace OT {
+namespace glyf_impl {
+
+
+struct coord_setter_t
+{
+ coord_setter_t (hb_array_t<int> coords) :
+ coords (coords) {}
+
+ int& operator [] (unsigned idx)
+ {
+ if (coords.length < idx + 1)
+ coords.resize (idx + 1);
+ return coords[idx];
+ }
+
+ hb_array_t<int> get_coords ()
+ { return coords.as_array (); }
+
+ hb_vector_t<int> coords;
+};
+
+
+} /* namespace glyf_impl */
+} /* namespace OT */
+
+#endif /* OT_GLYF_COORD_SETTER_HH */
diff --git a/thirdparty/harfbuzz/src/OT/glyf/glyf.hh b/thirdparty/harfbuzz/src/OT/glyf/glyf.hh
index be2cb1d0dc..e6e985c38c 100644
--- a/thirdparty/harfbuzz/src/OT/glyf/glyf.hh
+++ b/thirdparty/harfbuzz/src/OT/glyf/glyf.hh
@@ -42,11 +42,15 @@ struct glyf
bool serialize (hb_serialize_context_t *c,
Iterator it,
bool use_short_loca,
- const hb_subset_plan_t *plan)
+ const hb_subset_plan_t *plan,
+ hb_font_t *font)
{
TRACE_SERIALIZE (this);
+
unsigned init_len = c->length ();
- for (const auto &_ : it) _.serialize (c, use_short_loca, plan);
+ for (auto &_ : it)
+ if (unlikely (!_.serialize (c, use_short_loca, plan, font)))
+ return false;
/* As a special case when all glyph in the font are empty, add a zero byte
* to the table, so that OTS doesn’t reject it, and to make the table work
@@ -72,21 +76,28 @@ struct glyf
if (unlikely (!c->serializer->check_success (glyf_prime))) return_trace (false);
hb_vector_t<glyf_impl::SubsetGlyph> glyphs;
- _populate_subset_glyphs (c->plan, &glyphs);
+ _populate_subset_glyphs (c->plan, glyphs);
+ hb_font_t *font = nullptr;
if (!c->plan->pinned_at_default)
- _compile_subset_glyphs_with_deltas (c->plan, &glyphs);
+ {
+ font = _create_font_for_instancing (c->plan);
+ if (unlikely (!font)) return false;
+ }
auto padded_offsets =
+ hb_iter (glyphs)
| hb_map (&glyf_impl::SubsetGlyph::padded_size)
;
- unsigned max_offset = + padded_offsets | hb_reduce (hb_add, 0);
- bool use_short_loca = max_offset < 0x1FFFF;
-
+ bool use_short_loca = false;
+ if (likely (!c->plan->force_long_loca))
+ {
+ unsigned max_offset = + padded_offsets | hb_reduce (hb_add, 0);
+ use_short_loca = max_offset < 0x1FFFF;
+ }
- glyf_prime->serialize (c->serializer, hb_iter (glyphs), use_short_loca, c->plan);
+ glyf_prime->serialize (c->serializer, glyphs.writer (), use_short_loca, c->plan, font);
if (!use_short_loca) {
padded_offsets =
+ hb_iter (glyphs)
@@ -94,9 +105,12 @@ struct glyf
;
}
-
- if (!c->plan->pinned_at_default)
+ if (font)
+ {
_free_compiled_subset_glyphs (&glyphs);
+ hb_font_destroy (font);
+ }
+
if (unlikely (c->serializer->in_error ())) return_trace (false);
return_trace (c->serializer->check_success (glyf_impl::_add_loca_and_head (c->plan,
padded_offsets,
@@ -105,11 +119,10 @@ struct glyf
void
_populate_subset_glyphs (const hb_subset_plan_t *plan,
- hb_vector_t<glyf_impl::SubsetGlyph> *glyphs /* OUT */) const;
-
- void
- _compile_subset_glyphs_with_deltas (const hb_subset_plan_t *plan,
- hb_vector_t<glyf_impl::SubsetGlyph> *glyphs /* OUT */) const;
+ hb_vector_t<glyf_impl::SubsetGlyph> &glyphs /* OUT */) const;
+
+ hb_font_t *
+ _create_font_for_instancing (const hb_subset_plan_t *plan) const;
void _free_compiled_subset_glyphs (hb_vector_t<glyf_impl::SubsetGlyph> *glyphs) const
{
@@ -180,7 +193,7 @@ struct glyf_accelerator_t
contour_point_vector_t all_points;
bool phantom_only = !consumer.is_consuming_contour_points ();
- if (unlikely (!glyph_for_gid (gid).get_points (font, *this, all_points, nullptr, true, phantom_only)))
+ if (unlikely (!glyph_for_gid (gid).get_points (font, *this, all_points, nullptr, true, true, phantom_only)))
return false;
if (consumer.is_consuming_contour_points ())
@@ -374,44 +387,45 @@ struct glyf_accelerator_t
inline void
glyf::_populate_subset_glyphs (const hb_subset_plan_t *plan,
- hb_vector_t<glyf_impl::SubsetGlyph> *glyphs /* OUT */) const
+ hb_vector_t<glyf_impl::SubsetGlyph>& glyphs /* OUT */) const
{
OT::glyf_accelerator_t glyf (plan->source);
+ unsigned num_glyphs = plan->num_output_glyphs ();
+ if (!glyphs.resize (num_glyphs)) return;
- + hb_range (plan->num_output_glyphs ())
- | hb_map ([&] (hb_codepoint_t new_gid)
- {
- glyf_impl::SubsetGlyph subset_glyph = {0};
- subset_glyph.new_gid = new_gid;
-
- /* should never fail: all old gids should be mapped */
- if (!plan->old_gid_for_new_gid (new_gid, &subset_glyph.old_gid))
- return subset_glyph;
-
- if (new_gid == 0 &&
- !(plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE))
- subset_glyph.source_glyph = glyf_impl::Glyph ();
- else
- subset_glyph.source_glyph = glyf.glyph_for_gid (subset_glyph.old_gid, true);
- if (plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
- subset_glyph.drop_hints_bytes ();
- else
- subset_glyph.dest_start = subset_glyph.source_glyph.get_bytes ();
- return subset_glyph;
- })
- | hb_sink (glyphs)
- ;
+ for (auto p : plan->glyph_map->iter ())
+ {
+ unsigned new_gid = p.second;
+ glyf_impl::SubsetGlyph& subset_glyph = glyphs.arrayZ[new_gid];
+ subset_glyph.old_gid = p.first;
+
+ if (unlikely (new_gid == 0 &&
+ !(plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE)) &&
+ plan->pinned_at_default)
+ subset_glyph.source_glyph = glyf_impl::Glyph ();
+ else
+ {
+ /* If plan has an accelerator, the preprocessing step already trimmed glyphs.
+ * Don't trim them again! */
+ subset_glyph.source_glyph = glyf.glyph_for_gid (subset_glyph.old_gid, !plan->accelerator);
+ }
+
+ if (plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
+ subset_glyph.drop_hints_bytes ();
+ else
+ subset_glyph.dest_start = subset_glyph.source_glyph.get_bytes ();
+ }
}
-inline void
-glyf::_compile_subset_glyphs_with_deltas (const hb_subset_plan_t *plan,
- hb_vector_t<glyf_impl::SubsetGlyph> *glyphs /* OUT */) const
+inline hb_font_t *
+glyf::_create_font_for_instancing (const hb_subset_plan_t *plan) const
{
- OT::glyf_accelerator_t glyf (plan->source);
hb_font_t *font = hb_font_create (plan->source);
+ if (unlikely (font == hb_font_get_empty ())) return nullptr;
hb_vector_t<hb_variation_t> vars;
- vars.alloc (plan->user_axes_location->get_population ());
+ if (unlikely (!vars.alloc (plan->user_axes_location->get_population ())))
+ return nullptr;
for (auto _ : *plan->user_axes_location)
{
@@ -421,11 +435,10 @@ glyf::_compile_subset_glyphs_with_deltas (const hb_subset_plan_t *plan,
vars.push (var);
}
+#ifndef HB_NO_VAR
hb_font_set_variations (font, vars.arrayZ, plan->user_axes_location->get_population ());
- for (auto& subset_glyph : *glyphs)
- const_cast<glyf_impl::SubsetGlyph &> (subset_glyph).compile_bytes_with_deltas (plan, font, glyf);
-
- hb_font_destroy (font);
+#endif
+ return font;
}
diff --git a/thirdparty/harfbuzz/src/graph/classdef-graph.hh b/thirdparty/harfbuzz/src/graph/classdef-graph.hh
index 0bda76ac2f..c2e24a7067 100644
--- a/thirdparty/harfbuzz/src/graph/classdef-graph.hh
+++ b/thirdparty/harfbuzz/src/graph/classdef-graph.hh
@@ -112,7 +112,7 @@ struct ClassDef : public OT::ClassDef
{
case 1: return ((ClassDefFormat1*)this)->sanitize (vertex);
case 2: return ((ClassDefFormat2*)this)->sanitize (vertex);
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
// Not currently supported
case 3:
case 4:
diff --git a/thirdparty/harfbuzz/src/graph/coverage-graph.hh b/thirdparty/harfbuzz/src/graph/coverage-graph.hh
index 3c1022f090..49d0936315 100644
--- a/thirdparty/harfbuzz/src/graph/coverage-graph.hh
+++ b/thirdparty/harfbuzz/src/graph/coverage-graph.hh
@@ -136,7 +136,7 @@ struct Coverage : public OT::Layout::Common::Coverage
{
case 1: return ((CoverageFormat1*)this)->sanitize (vertex);
case 2: return ((CoverageFormat2*)this)->sanitize (vertex);
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
// Not currently supported
case 3:
case 4:
diff --git a/thirdparty/harfbuzz/src/graph/graph.hh b/thirdparty/harfbuzz/src/graph/graph.hh
index 64878a84a4..dc5b6a36fe 100644
--- a/thirdparty/harfbuzz/src/graph/graph.hh
+++ b/thirdparty/harfbuzz/src/graph/graph.hh
@@ -49,6 +49,50 @@ struct graph_t
unsigned end = 0;
unsigned priority = 0;
+
+ bool link_positions_valid (unsigned num_objects, bool removed_nil)
+ {
+ hb_set_t assigned_bytes;
+ for (const auto& l : obj.real_links)
+ {
+ if (l.objidx >= num_objects
+ || (removed_nil && !l.objidx))
+ {
+ DEBUG_MSG (SUBSET_REPACK, nullptr,
+ "Invalid graph. Invalid object index.");
+ return false;
+ }
+
+ unsigned start = l.position;
+ unsigned end = start + l.width - 1;
+
+ if (unlikely (l.width < 2 || l.width > 4))
+ {
+ DEBUG_MSG (SUBSET_REPACK, nullptr,
+ "Invalid graph. Invalid link width.");
+ return false;
+ }
+
+ if (unlikely (end >= table_size ()))
+ {
+ DEBUG_MSG (SUBSET_REPACK, nullptr,
+ "Invalid graph. Link position is out of bounds.");
+ return false;
+ }
+
+ if (unlikely (assigned_bytes.intersects (start, end)))
+ {
+ DEBUG_MSG (SUBSET_REPACK, nullptr,
+ "Invalid graph. Found offsets whose positions overlap.");
+ return false;
+ }
+
+ assigned_bytes.add_range (start, end);
+ }
+
+ return !assigned_bytes.in_error ();
+ }
+
void normalize ()
{
obj.real_links.qsort ();
@@ -70,8 +114,8 @@ struct graph_t
{
DEBUG_MSG (SUBSET_REPACK, nullptr,
"vertex [%lu] bytes != [%lu] bytes, depth = %u",
- table_size (),
- other.table_size (),
+ (unsigned long) table_size (),
+ (unsigned long) other.table_size (),
depth);
auto a = as_bytes ();
@@ -132,7 +176,7 @@ struct graph_t
for (unsigned i = 0; i < parents.length; i++)
{
if (parents[i] != parent_index) continue;
- parents.remove (i);
+ parents.remove_unordered (i);
break;
}
}
@@ -148,7 +192,7 @@ struct graph_t
if ((obj.head + link.position) != offset)
continue;
- obj.real_links.remove (i);
+ obj.real_links.remove_unordered (i);
return;
}
}
@@ -286,8 +330,6 @@ struct graph_t
vertices_scratch_.alloc (objects.length);
for (unsigned i = 0; i < objects.length; i++)
{
- // TODO(grieger): check all links point to valid objects.
-
// If this graph came from a serialization buffer object 0 is the
// nil object. We don't need it for our purposes here so drop it.
if (i == 0 && !objects[i])
@@ -299,6 +341,9 @@ struct graph_t
vertex_t* v = vertices_.push ();
if (check_success (!vertices_.in_error ()))
v->obj = *objects[i];
+
+ check_success (v->link_positions_valid (objects.length, removed_nil));
+
if (!removed_nil) continue;
// Fix indices to account for removed nil object.
for (auto& l : v->obj.all_links_writer ()) {
@@ -418,6 +463,13 @@ struct graph_t
hb_swap (sorted_graph[new_id], vertices_[next_id]);
const vertex_t& next = sorted_graph[new_id];
+ if (unlikely (!check_success(new_id >= 0))) {
+ // We are out of ids. Which means we've visited a node more than once.
+ // This graph contains a cycle which is not allowed.
+ DEBUG_MSG (SUBSET_REPACK, nullptr, "Invalid graph. Contains cycle.");
+ return;
+ }
+
id_map[next_id] = new_id--;
for (const auto& link : next.obj.all_links ()) {
@@ -496,6 +548,12 @@ struct graph_t
}
template <typename T, typename ...Ts>
+ vertex_and_table_t<T> as_mutable_table (unsigned parent, const void* offset, Ts... ds)
+ {
+ return as_table_from_index<T> (mutable_index_for_offset (parent, offset), std::forward<Ts>(ds)...);
+ }
+
+ template <typename T, typename ...Ts>
vertex_and_table_t<T> as_table_from_index (unsigned index, Ts... ds)
{
if (index >= vertices_.length)
@@ -574,7 +632,7 @@ struct graph_t
while (roots)
{
- unsigned next = HB_SET_VALUE_INVALID;
+ uint32_t next = HB_SET_VALUE_INVALID;
if (unlikely (!check_success (!roots.in_error ()))) break;
if (!roots.next (&next)) break;
@@ -655,8 +713,8 @@ struct graph_t
auto new_subgraph =
+ subgraph.keys ()
- | hb_map([&] (unsigned node_idx) {
- const unsigned *v;
+ | hb_map([&] (uint32_t node_idx) {
+ const uint32_t *v;
if (index_map.has (node_idx, &v)) return *v;
return node_idx;
})
@@ -666,10 +724,10 @@ struct graph_t
remap_obj_indices (index_map, parents.iter (), true);
// Update roots set with new indices as needed.
- unsigned next = HB_SET_VALUE_INVALID;
+ uint32_t next = HB_SET_VALUE_INVALID;
while (roots.next (&next))
{
- const unsigned *v;
+ const uint32_t *v;
if (index_map.has (next, &v))
{
roots.del (next);
@@ -684,7 +742,7 @@ struct graph_t
{
for (const auto& link : vertices_[node_idx].obj.all_links ())
{
- const unsigned *v;
+ const uint32_t *v;
if (subgraph.has (link.objidx, &v))
{
subgraph.set (link.objidx, *v + 1);
@@ -833,7 +891,20 @@ struct graph_t
* parent to the clone. The copy is a shallow copy, objects
* linked from child are not duplicated.
*/
- bool duplicate (unsigned parent_idx, unsigned child_idx)
+ unsigned duplicate_if_shared (unsigned parent_idx, unsigned child_idx)
+ {
+ unsigned new_idx = duplicate (parent_idx, child_idx);
+ if (new_idx == (unsigned) -1) return child_idx;
+ return new_idx;
+ }
+
+
+ /*
+ * Creates a copy of child and re-assigns the link from
+ * parent to the clone. The copy is a shallow copy, objects
+ * linked from child are not duplicated.
+ */
+ unsigned duplicate (unsigned parent_idx, unsigned child_idx)
{
update_parents ();
@@ -849,7 +920,7 @@ struct graph_t
// to child are from parent.
DEBUG_MSG (SUBSET_REPACK, nullptr, " Not duplicating %d => %d",
parent_idx, child_idx);
- return false;
+ return -1;
}
DEBUG_MSG (SUBSET_REPACK, nullptr, " Duplicating %d => %d",
@@ -869,7 +940,7 @@ struct graph_t
reassign_link (l, parent_idx, clone_idx);
}
- return true;
+ return clone_idx;
}
@@ -922,6 +993,72 @@ struct graph_t
return made_change;
}
+ bool is_fully_connected ()
+ {
+ update_parents();
+
+ if (root().parents)
+ // Root cannot have parents.
+ return false;
+
+ for (unsigned i = 0; i < root_idx (); i++)
+ {
+ if (!vertices_[i].parents)
+ return false;
+ }
+ return true;
+ }
+
+#if 0
+ /*
+ * Saves the current graph to a packed binary format which the repacker fuzzer takes
+ * as a seed.
+ */
+ void save_fuzzer_seed (hb_tag_t tag) const
+ {
+ FILE* f = fopen ("./repacker_fuzzer_seed", "w");
+ fwrite ((void*) &tag, sizeof (tag), 1, f);
+
+ uint16_t num_objects = vertices_.length;
+ fwrite ((void*) &num_objects, sizeof (num_objects), 1, f);
+
+ for (const auto& v : vertices_)
+ {
+ uint16_t blob_size = v.table_size ();
+ fwrite ((void*) &blob_size, sizeof (blob_size), 1, f);
+ fwrite ((const void*) v.obj.head, blob_size, 1, f);
+ }
+
+ uint16_t link_count = 0;
+ for (const auto& v : vertices_)
+ link_count += v.obj.real_links.length;
+
+ fwrite ((void*) &link_count, sizeof (link_count), 1, f);
+
+ typedef struct
+ {
+ uint16_t parent;
+ uint16_t child;
+ uint16_t position;
+ uint8_t width;
+ } link_t;
+
+ for (unsigned i = 0; i < vertices_.length; i++)
+ {
+ for (const auto& l : vertices_[i].obj.real_links)
+ {
+ link_t link {
+ (uint16_t) i, (uint16_t) l.objidx,
+ (uint16_t) l.position, (uint8_t) l.width
+ };
+ fwrite ((void*) &link, sizeof (link), 1, f);
+ }
+ }
+
+ fclose (f);
+ }
+#endif
+
void print_orphaned_nodes ()
{
if (!DEBUG_ENABLED(SUBSET_REPACK)) return;
@@ -930,6 +1067,10 @@ struct graph_t
parents_invalid = true;
update_parents();
+ if (root().parents) {
+ DEBUG_MSG (SUBSET_REPACK, nullptr, "Root node has incoming edges.");
+ }
+
for (unsigned i = 0; i < root_idx (); i++)
{
const auto& v = vertices_[i];
@@ -1046,6 +1187,11 @@ struct graph_t
}
}
+ for (unsigned i = 0; i < vertices_.length; i++)
+ // parents arrays must be accurate or downstream operations like cycle detection
+ // and sorting won't work correctly.
+ check_success (!vertices_[i].parents.in_error ());
+
parents_invalid = false;
}
@@ -1164,7 +1310,7 @@ struct graph_t
{
for (auto& link : vertices_[i].obj.all_links_writer ())
{
- const unsigned *v;
+ const uint32_t *v;
if (!id_map.has (link.objidx, &v)) continue;
if (only_wide && !(link.width == 4 && !link.is_signed)) continue;
diff --git a/thirdparty/harfbuzz/src/graph/gsubgpos-graph.hh b/thirdparty/harfbuzz/src/graph/gsubgpos-graph.hh
index a93e7d1c73..c170638409 100644
--- a/thirdparty/harfbuzz/src/graph/gsubgpos-graph.hh
+++ b/thirdparty/harfbuzz/src/graph/gsubgpos-graph.hh
@@ -131,8 +131,10 @@ struct Lookup : public OT::Lookup
for (unsigned i = 0; i < subTable.len; i++)
{
unsigned subtable_index = c.graph.index_for_offset (this_index, &subTable[i]);
+ unsigned parent_index = this_index;
if (is_ext) {
unsigned ext_subtable_index = subtable_index;
+ parent_index = ext_subtable_index;
ExtensionFormat1<OT::Layout::GSUB_impl::ExtensionSubst>* extension =
(ExtensionFormat1<OT::Layout::GSUB_impl::ExtensionSubst>*)
c.graph.object (ext_subtable_index).head;
@@ -150,9 +152,9 @@ struct Lookup : public OT::Lookup
switch (type)
{
case 2:
- new_sub_tables = split_subtable<PairPos> (c, subtable_index); break;
+ new_sub_tables = split_subtable<PairPos> (c, parent_index, subtable_index); break;
case 4:
- new_sub_tables = split_subtable<MarkBasePos> (c, subtable_index); break;
+ new_sub_tables = split_subtable<MarkBasePos> (c, parent_index, subtable_index); break;
default:
break;
}
@@ -172,13 +174,14 @@ struct Lookup : public OT::Lookup
template<typename T>
hb_vector_t<unsigned> split_subtable (gsubgpos_graph_context_t& c,
+ unsigned parent_idx,
unsigned objidx)
{
T* sub_table = (T*) c.graph.object (objidx).head;
if (!sub_table || !sub_table->sanitize (c.graph.vertices_[objidx]))
return hb_vector_t<unsigned> ();
- return sub_table->split_subtables (c, objidx);
+ return sub_table->split_subtables (c, parent_idx, objidx);
}
void add_sub_tables (gsubgpos_graph_context_t& c,
@@ -198,7 +201,7 @@ struct Lookup : public OT::Lookup
+ new_subtable_count * OT::Offset16::static_size;
char* buffer = (char*) hb_calloc (1, new_size);
c.add_buffer (buffer);
- memcpy (buffer, v.obj.head, v.table_size());
+ hb_memcpy (buffer, v.obj.head, v.table_size());
v.obj.head = buffer;
v.obj.tail = buffer + new_size;
@@ -352,7 +355,7 @@ struct GSTAR : public OT::GSUBGPOS
{
switch (u.version.major) {
case 1: return u.version1.get_lookup_list_offset ();
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 2: return u.version2.get_lookup_list_offset ();
#endif
default: return 0;
@@ -371,7 +374,7 @@ struct GSTAR : public OT::GSUBGPOS
{
switch (u.version.major) {
case 1: find_lookups<SmallTypes> (graph, lookups); break;
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 2: find_lookups<MediumTypes> (graph, lookups); break;
#endif
}
diff --git a/thirdparty/harfbuzz/src/graph/markbasepos-graph.hh b/thirdparty/harfbuzz/src/graph/markbasepos-graph.hh
index 56fa812406..84ef5f71b9 100644
--- a/thirdparty/harfbuzz/src/graph/markbasepos-graph.hh
+++ b/thirdparty/harfbuzz/src/graph/markbasepos-graph.hh
@@ -112,7 +112,7 @@ struct AnchorMatrix : public OT::Layout::GPOS_impl::AnchorMatrix
auto& child = c.graph.vertices_[child_idx];
child.remove_parent (this_index);
- o.real_links.remove (i);
+ o.real_links.remove_unordered (i);
num_links--;
i--;
}
@@ -209,7 +209,9 @@ struct MarkBasePosFormat1 : public OT::Layout::GPOS_impl::MarkBasePosFormat1_2<S
return vertex_len >= MarkBasePosFormat1::static_size;
}
- hb_vector_t<unsigned> split_subtables (gsubgpos_graph_context_t& c, unsigned this_index)
+ hb_vector_t<unsigned> split_subtables (gsubgpos_graph_context_t& c,
+ unsigned parent_index,
+ unsigned this_index)
{
hb_set_t visited;
@@ -261,7 +263,7 @@ struct MarkBasePosFormat1 : public OT::Layout::GPOS_impl::MarkBasePosFormat1_2<S
split_context_t split_context {
c,
this,
- this_index,
+ c.graph.duplicate_if_shared (parent_index, this_index),
std::move (class_to_info),
c.graph.vertices_[mark_array_id].position_to_index_map (),
};
@@ -365,12 +367,12 @@ struct MarkBasePosFormat1 : public OT::Layout::GPOS_impl::MarkBasePosFormat1_2<S
classCount = count;
- auto mark_coverage = sc.c.graph.as_table<Coverage> (this_index,
- &markCoverage);
+ auto mark_coverage = sc.c.graph.as_mutable_table<Coverage> (this_index,
+ &markCoverage);
if (!mark_coverage) return false;
hb_set_t marks = sc.marks_for (0, count);
auto new_coverage =
- + hb_zip (hb_range (), mark_coverage.table->iter ())
+ + hb_enumerate (mark_coverage.table->iter ())
| hb_filter (marks, hb_first)
| hb_map_retains_sorting (hb_second)
;
@@ -380,17 +382,17 @@ struct MarkBasePosFormat1 : public OT::Layout::GPOS_impl::MarkBasePosFormat1_2<S
return false;
- auto base_array = sc.c.graph.as_table<AnchorMatrix> (this_index,
- &baseArray,
- old_count);
+ auto base_array = sc.c.graph.as_mutable_table<AnchorMatrix> (this_index,
+ &baseArray,
+ old_count);
if (!base_array || !base_array.table->shrink (sc.c,
base_array.index,
old_count,
count))
return false;
- auto mark_array = sc.c.graph.as_table<MarkArray> (this_index,
- &markArray);
+ auto mark_array = sc.c.graph.as_mutable_table<MarkArray> (this_index,
+ &markArray);
if (!mark_array || !mark_array.table->shrink (sc.c,
sc.mark_array_links,
mark_array.index,
@@ -429,7 +431,7 @@ struct MarkBasePosFormat1 : public OT::Layout::GPOS_impl::MarkBasePosFormat1_2<S
if (!mark_coverage) return false;
hb_set_t marks = sc.marks_for (start, end);
auto new_coverage =
- + hb_zip (hb_range (), mark_coverage.table->iter ())
+ + hb_enumerate (mark_coverage.table->iter ())
| hb_filter (marks, hb_first)
| hb_map_retains_sorting (hb_second)
;
@@ -469,12 +471,13 @@ struct MarkBasePosFormat1 : public OT::Layout::GPOS_impl::MarkBasePosFormat1_2<S
struct MarkBasePos : public OT::Layout::GPOS_impl::MarkBasePos
{
hb_vector_t<unsigned> split_subtables (gsubgpos_graph_context_t& c,
+ unsigned parent_index,
unsigned this_index)
{
switch (u.format) {
case 1:
- return ((MarkBasePosFormat1*)(&u.format1))->split_subtables (c, this_index);
-#ifndef HB_NO_BORING_EXPANSION
+ return ((MarkBasePosFormat1*)(&u.format1))->split_subtables (c, parent_index, this_index);
+#ifndef HB_NO_BEYOND_64K
case 2: HB_FALLTHROUGH;
// Don't split 24bit PairPos's.
#endif
@@ -491,7 +494,7 @@ struct MarkBasePos : public OT::Layout::GPOS_impl::MarkBasePos
switch (u.format) {
case 1:
return ((MarkBasePosFormat1*)(&u.format1))->sanitize (vertex);
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 2: HB_FALLTHROUGH;
#endif
default:
diff --git a/thirdparty/harfbuzz/src/graph/pairpos-graph.hh b/thirdparty/harfbuzz/src/graph/pairpos-graph.hh
index 976b872329..1c13eb24f9 100644
--- a/thirdparty/harfbuzz/src/graph/pairpos-graph.hh
+++ b/thirdparty/harfbuzz/src/graph/pairpos-graph.hh
@@ -47,7 +47,9 @@ struct PairPosFormat1 : public OT::Layout::GPOS_impl::PairPosFormat1_3<SmallType
min_size + pairSet.get_size () - pairSet.len.get_size();
}
- hb_vector_t<unsigned> split_subtables (gsubgpos_graph_context_t& c, unsigned this_index)
+ hb_vector_t<unsigned> split_subtables (gsubgpos_graph_context_t& c,
+ unsigned parent_index,
+ unsigned this_index)
{
hb_set_t visited;
@@ -81,7 +83,7 @@ struct PairPosFormat1 : public OT::Layout::GPOS_impl::PairPosFormat1_3<SmallType
split_context_t split_context {
c,
this,
- this_index,
+ c.graph.duplicate_if_shared (parent_index, this_index),
};
return actuate_subtable_split<split_context_t> (split_context, split_points);
@@ -125,23 +127,19 @@ struct PairPosFormat1 : public OT::Layout::GPOS_impl::PairPosFormat1_3<SmallType
pairSet.len = count;
c.graph.vertices_[this_index].obj.tail -= (old_count - count) * SmallTypes::size;
- unsigned coverage_id = c.graph.mutable_index_for_offset (this_index, &coverage);
- unsigned coverage_size = c.graph.vertices_[coverage_id].table_size ();
- auto& coverage_v = c.graph.vertices_[coverage_id];
-
- Coverage* coverage_table = (Coverage*) coverage_v.obj.head;
- if (!coverage_table || !coverage_table->sanitize (coverage_v))
- return false;
+ auto coverage = c.graph.as_mutable_table<Coverage> (this_index, &this->coverage);
+ if (!coverage) return false;
+ unsigned coverage_size = coverage.vertex->table_size ();
auto new_coverage =
- + hb_zip (coverage_table->iter (), hb_range ())
+ + hb_zip (coverage.table->iter (), hb_range ())
| hb_filter ([&] (hb_pair_t<unsigned, unsigned> p) {
return p.second < count;
})
| hb_map_retains_sorting (hb_first)
;
- return Coverage::make_coverage (c, new_coverage, coverage_id, coverage_size);
+ return Coverage::make_coverage (c, new_coverage, coverage.index, coverage_size);
}
// Create a new PairPos including PairSet's from start (inclusive) to end (exclusive).
@@ -206,7 +204,9 @@ struct PairPosFormat2 : public OT::Layout::GPOS_impl::PairPosFormat2_4<SmallType
min_size + class1_count * get_class1_record_size ();
}
- hb_vector_t<unsigned> split_subtables (gsubgpos_graph_context_t& c, unsigned this_index)
+ hb_vector_t<unsigned> split_subtables (gsubgpos_graph_context_t& c,
+ unsigned parent_index,
+ unsigned this_index)
{
const unsigned base_size = OT::Layout::GPOS_impl::PairPosFormat2_4<SmallTypes>::min_size;
const unsigned class_def_2_size = size_of (c, this_index, &classDef2);
@@ -287,7 +287,7 @@ struct PairPosFormat2 : public OT::Layout::GPOS_impl::PairPosFormat2_4<SmallType
split_context_t split_context {
c,
this,
- this_index,
+ c.graph.duplicate_if_shared (parent_index, this_index),
class1_record_size,
total_value_len,
value_1_len,
@@ -434,7 +434,7 @@ struct PairPosFormat2 : public OT::Layout::GPOS_impl::PairPosFormat2_4<SmallType
char* start_addr = ((char*)&values[0]) + start * split_context.class1_record_size;
unsigned num_records = end - start;
- memcpy (&pair_pos_prime->values[0],
+ hb_memcpy (&pair_pos_prime->values[0],
start_addr,
num_records * split_context.class1_record_size);
@@ -508,40 +508,37 @@ struct PairPosFormat2 : public OT::Layout::GPOS_impl::PairPosFormat2_4<SmallType
graph.vertices_[split_context.this_index].obj.tail -=
(old_count - count) * split_context.class1_record_size;
- unsigned coverage_id =
- graph.mutable_index_for_offset (split_context.this_index, &coverage);
- unsigned class_def_1_id =
- graph.mutable_index_for_offset (split_context.this_index, &classDef1);
- auto& coverage_v = graph.vertices_[coverage_id];
- auto& class_def_1_v = graph.vertices_[class_def_1_id];
- Coverage* coverage_table = (Coverage*) coverage_v.obj.head;
- ClassDef* class_def_1_table = (ClassDef*) class_def_1_v.obj.head;
- if (!coverage_table
- || !coverage_table->sanitize (coverage_v)
- || !class_def_1_table
- || !class_def_1_table->sanitize (class_def_1_v))
- return false;
+ auto coverage =
+ graph.as_mutable_table<Coverage> (split_context.this_index, &this->coverage);
+ if (!coverage) return false;
+
+ auto class_def_1 =
+ graph.as_mutable_table<ClassDef> (split_context.this_index, &classDef1);
+ if (!class_def_1) return false;
auto klass_map =
- + coverage_table->iter ()
+ + coverage.table->iter ()
| hb_map_retains_sorting ([&] (hb_codepoint_t gid) {
- return hb_pair_t<hb_codepoint_t, hb_codepoint_t> (gid, class_def_1_table->get_class (gid));
+ return hb_pair_t<hb_codepoint_t, hb_codepoint_t> (gid, class_def_1.table->get_class (gid));
})
| hb_filter ([&] (hb_codepoint_t klass) {
return klass < count;
}, hb_second)
;
+ auto new_coverage = + klass_map | hb_map_retains_sorting (hb_first);
if (!Coverage::make_coverage (split_context.c,
- + klass_map | hb_map_retains_sorting (hb_first),
- coverage_id,
- coverage_v.table_size ()))
+ + new_coverage,
+ coverage.index,
+ // existing ranges my not be kept, worst case size is a format 1
+ // coverage table.
+ 4 + new_coverage.len() * 2))
return false;
return ClassDef::make_class_def (split_context.c,
+ klass_map,
- class_def_1_id,
- class_def_1_v.table_size ());
+ class_def_1.index,
+ class_def_1.vertex->table_size ());
}
hb_hashmap_t<unsigned, unsigned>
@@ -605,14 +602,16 @@ struct PairPosFormat2 : public OT::Layout::GPOS_impl::PairPosFormat2_4<SmallType
struct PairPos : public OT::Layout::GPOS_impl::PairPos
{
- hb_vector_t<unsigned> split_subtables (gsubgpos_graph_context_t& c, unsigned this_index)
+ hb_vector_t<unsigned> split_subtables (gsubgpos_graph_context_t& c,
+ unsigned parent_index,
+ unsigned this_index)
{
switch (u.format) {
case 1:
- return ((PairPosFormat1*)(&u.format1))->split_subtables (c, this_index);
+ return ((PairPosFormat1*)(&u.format1))->split_subtables (c, parent_index, this_index);
case 2:
- return ((PairPosFormat2*)(&u.format2))->split_subtables (c, this_index);
-#ifndef HB_NO_BORING_EXPANSION
+ return ((PairPosFormat2*)(&u.format2))->split_subtables (c, parent_index, this_index);
+#ifndef HB_NO_BEYOND_64K
case 3: HB_FALLTHROUGH;
case 4: HB_FALLTHROUGH;
// Don't split 24bit PairPos's.
@@ -632,7 +631,7 @@ struct PairPos : public OT::Layout::GPOS_impl::PairPos
return ((PairPosFormat1*)(&u.format1))->sanitize (vertex);
case 2:
return ((PairPosFormat2*)(&u.format2))->sanitize (vertex);
-#ifndef HB_NO_BORING_EXPANSION
+#ifndef HB_NO_BEYOND_64K
case 3: HB_FALLTHROUGH;
case 4: HB_FALLTHROUGH;
#endif
diff --git a/thirdparty/harfbuzz/src/graph/serialize.hh b/thirdparty/harfbuzz/src/graph/serialize.hh
index ecc6cc5aea..d03a61bd19 100644
--- a/thirdparty/harfbuzz/src/graph/serialize.hh
+++ b/thirdparty/harfbuzz/src/graph/serialize.hh
@@ -33,6 +33,23 @@ struct overflow_record_t
{
unsigned parent;
unsigned child;
+
+ bool operator != (const overflow_record_t o) const
+ { return !(*this == o); }
+
+ inline bool operator == (const overflow_record_t& o) const
+ {
+ return parent == o.parent &&
+ child == o.child;
+ }
+
+ inline uint32_t hash () const
+ {
+ uint32_t current = 0;
+ current = current * 31 + hb_hash (parent);
+ current = current * 31 + hb_hash (child);
+ return current;
+ }
};
inline
@@ -94,6 +111,7 @@ will_overflow (graph_t& graph,
if (overflows) overflows->resize (0);
graph.update_positions ();
+ hb_hashmap_t<overflow_record_t*, bool> record_set;
const auto& vertices = graph.vertices_;
for (int parent_idx = vertices.length - 1; parent_idx >= 0; parent_idx--)
{
@@ -109,7 +127,10 @@ will_overflow (graph_t& graph,
overflow_record_t r;
r.parent = parent_idx;
r.child = link.objidx;
+ if (record_set.has(&r)) continue; // don't keep duplicate overflows.
+
overflows->push (r);
+ record_set.set(&r, true);
}
}
@@ -223,7 +244,7 @@ inline hb_blob_t* serialize (const graph_t& graph)
return nullptr;
}
- memcpy (start, vertices[i].obj.head, size);
+ hb_memcpy (start, vertices[i].obj.head, size);
// Only real links needs to be serialized.
for (const auto& link : vertices[i].obj.real_links)
diff --git a/thirdparty/harfbuzz/src/hb-aat-layout-just-table.hh b/thirdparty/harfbuzz/src/hb-aat-layout-just-table.hh
index 57c105967d..8fd3990f88 100644
--- a/thirdparty/harfbuzz/src/hb-aat-layout-just-table.hh
+++ b/thirdparty/harfbuzz/src/hb-aat-layout-just-table.hh
@@ -70,9 +70,9 @@ struct DecompositionAction
ActionSubrecordHeader
header;
- HBFixed lowerLimit; /* If the distance factor is less than this value,
+ F16DOT16 lowerLimit; /* If the distance factor is less than this value,
* then the ligature is decomposed. */
- HBFixed upperLimit; /* If the distance factor is greater than this value,
+ F16DOT16 upperLimit; /* If the distance factor is greater than this value,
* then the ligature is decomposed. */
HBUINT16 order; /* Numerical order in which this ligature will
* be decomposed; you may want infrequent ligatures
@@ -118,7 +118,7 @@ struct ConditionalAddGlyphAction
protected:
ActionSubrecordHeader
header;
- HBFixed substThreshold; /* Distance growth factor (in ems) at which
+ F16DOT16 substThreshold; /* Distance growth factor (in ems) at which
* this glyph is replaced and the growth factor
* recalculated. */
HBGlyphID16 addGlyph; /* Glyph to be added as kashida. If this value is
@@ -146,13 +146,13 @@ struct DuctileGlyphAction
HBUINT32 variationAxis; /* The 4-byte tag identifying the ductile axis.
* This would normally be 0x64756374 ('duct'),
* but you may use any axis the font contains. */
- HBFixed minimumLimit; /* The lowest value for the ductility axis that
+ F16DOT16 minimumLimit; /* The lowest value for the ductility axis that
* still yields an acceptable appearance. Normally
* this will be 1.0. */
- HBFixed noStretchValue; /* This is the default value that corresponds to
+ F16DOT16 noStretchValue; /* This is the default value that corresponds to
* no change in appearance. Normally, this will
* be 1.0. */
- HBFixed maximumLimit; /* The highest value for the ductility axis that
+ F16DOT16 maximumLimit; /* The highest value for the ductility axis that
* still yields an acceptable appearance. */
public:
DEFINE_SIZE_STATIC (22);
@@ -271,14 +271,14 @@ struct JustWidthDeltaEntry
};
protected:
- HBFixed beforeGrowLimit;/* The ratio by which the advance width of the
+ F16DOT16 beforeGrowLimit;/* The ratio by which the advance width of the
* glyph is permitted to grow on the left or top side. */
- HBFixed beforeShrinkLimit;
+ F16DOT16 beforeShrinkLimit;
/* The ratio by which the advance width of the
* glyph is permitted to shrink on the left or top side. */
- HBFixed afterGrowLimit; /* The ratio by which the advance width of the glyph
+ F16DOT16 afterGrowLimit; /* The ratio by which the advance width of the glyph
* is permitted to shrink on the left or top side. */
- HBFixed afterShrinkLimit;
+ F16DOT16 afterShrinkLimit;
/* The ratio by which the advance width of the glyph
* is at most permitted to shrink on the right or
* bottom side. */
diff --git a/thirdparty/harfbuzz/src/hb-aat-layout-morx-table.hh b/thirdparty/harfbuzz/src/hb-aat-layout-morx-table.hh
index aa4ad4cf3c..8b9190d0be 100644
--- a/thirdparty/harfbuzz/src/hb-aat-layout-morx-table.hh
+++ b/thirdparty/harfbuzz/src/hb-aat-layout-morx-table.hh
@@ -131,14 +131,14 @@ struct RearrangementSubtable
hb_glyph_info_t *info = buffer->info;
hb_glyph_info_t buf[4];
- memcpy (buf, info + start, l * sizeof (buf[0]));
- memcpy (buf + 2, info + end - r, r * sizeof (buf[0]));
+ hb_memcpy (buf, info + start, l * sizeof (buf[0]));
+ hb_memcpy (buf + 2, info + end - r, r * sizeof (buf[0]));
if (l != r)
memmove (info + start + r, info + start + l, (end - start - l - r) * sizeof (buf[0]));
- memcpy (info + start, buf + 2, r * sizeof (buf[0]));
- memcpy (info + end - l, buf, l * sizeof (buf[0]));
+ hb_memcpy (info + start, buf + 2, r * sizeof (buf[0]));
+ hb_memcpy (info + end - l, buf, l * sizeof (buf[0]));
if (reverse_l)
{
buf[0] = info[end - 1];
diff --git a/thirdparty/harfbuzz/src/hb-aat-layout-trak-table.hh b/thirdparty/harfbuzz/src/hb-aat-layout-trak-table.hh
index 68bcb2396f..2ba9355b06 100644
--- a/thirdparty/harfbuzz/src/hb-aat-layout-trak-table.hh
+++ b/thirdparty/harfbuzz/src/hb-aat-layout-trak-table.hh
@@ -62,7 +62,7 @@ struct TrackTableEntry
}
protected:
- HBFixed track; /* Track value for this record. */
+ F16DOT16 track; /* Track value for this record. */
NameID trackNameID; /* The 'name' table index for this track.
* (a short word or phrase like "loose"
* or "very tight") */
@@ -82,7 +82,7 @@ struct TrackData
const void *base) const
{
unsigned int sizes = nSizes;
- hb_array_t<const HBFixed> size_table ((base+sizeTable).arrayZ, sizes);
+ hb_array_t<const F16DOT16> size_table ((base+sizeTable).arrayZ, sizes);
float s0 = size_table[idx].to_float ();
float s1 = size_table[idx + 1].to_float ();
@@ -120,7 +120,7 @@ struct TrackData
if (!sizes) return 0.;
if (sizes == 1) return trackTableEntry->get_value (base, 0, sizes);
- hb_array_t<const HBFixed> size_table ((base+sizeTable).arrayZ, sizes);
+ hb_array_t<const F16DOT16> size_table ((base+sizeTable).arrayZ, sizes);
unsigned int size_index;
for (size_index = 0; size_index < sizes - 1; size_index++)
if (size_table[size_index].to_float () >= ptem)
@@ -141,7 +141,7 @@ struct TrackData
protected:
HBUINT16 nTracks; /* Number of separate tracks included in this table. */
HBUINT16 nSizes; /* Number of point sizes included in this table. */
- NNOffset32To<UnsizedArrayOf<HBFixed>>
+ NNOffset32To<UnsizedArrayOf<F16DOT16>>
sizeTable; /* Offset from start of the tracking table to
* Array[nSizes] of size values.. */
UnsizedArrayOf<TrackTableEntry>
diff --git a/thirdparty/harfbuzz/src/hb-aat-layout.cc b/thirdparty/harfbuzz/src/hb-aat-layout.cc
index e06d286ff0..78427b0d5a 100644
--- a/thirdparty/harfbuzz/src/hb-aat-layout.cc
+++ b/thirdparty/harfbuzz/src/hb-aat-layout.cc
@@ -131,6 +131,7 @@ static const hb_aat_feature_mapping_t feature_mappings[] =
{HB_TAG ('p','n','u','m'), HB_AAT_LAYOUT_FEATURE_TYPE_NUMBER_SPACING, HB_AAT_LAYOUT_FEATURE_SELECTOR_PROPORTIONAL_NUMBERS, (hb_aat_layout_feature_selector_t) 4},
{HB_TAG ('p','w','i','d'), HB_AAT_LAYOUT_FEATURE_TYPE_TEXT_SPACING, HB_AAT_LAYOUT_FEATURE_SELECTOR_PROPORTIONAL_TEXT, (hb_aat_layout_feature_selector_t) 7},
{HB_TAG ('q','w','i','d'), HB_AAT_LAYOUT_FEATURE_TYPE_TEXT_SPACING, HB_AAT_LAYOUT_FEATURE_SELECTOR_QUARTER_WIDTH_TEXT, (hb_aat_layout_feature_selector_t) 7},
+ {HB_TAG ('r','l','i','g'), HB_AAT_LAYOUT_FEATURE_TYPE_LIGATURES, HB_AAT_LAYOUT_FEATURE_SELECTOR_REQUIRED_LIGATURES_ON, HB_AAT_LAYOUT_FEATURE_SELECTOR_REQUIRED_LIGATURES_OFF},
{HB_TAG ('r','u','b','y'), HB_AAT_LAYOUT_FEATURE_TYPE_RUBY_KANA, HB_AAT_LAYOUT_FEATURE_SELECTOR_RUBY_KANA_ON, HB_AAT_LAYOUT_FEATURE_SELECTOR_RUBY_KANA_OFF},
{HB_TAG ('s','i','n','f'), HB_AAT_LAYOUT_FEATURE_TYPE_VERTICAL_POSITION, HB_AAT_LAYOUT_FEATURE_SELECTOR_SCIENTIFIC_INFERIORS, HB_AAT_LAYOUT_FEATURE_SELECTOR_NORMAL_POSITION},
{HB_TAG ('s','m','c','p'), HB_AAT_LAYOUT_FEATURE_TYPE_LOWER_CASE, HB_AAT_LAYOUT_FEATURE_SELECTOR_LOWER_CASE_SMALL_CAPS, HB_AAT_LAYOUT_FEATURE_SELECTOR_DEFAULT_LOWER_CASE},
@@ -288,7 +289,7 @@ is_deleted_glyph (const hb_glyph_info_t *info)
void
hb_aat_layout_remove_deleted_glyphs (hb_buffer_t *buffer)
{
- hb_ot_layout_delete_glyphs_inplace (buffer, is_deleted_glyph);
+ buffer->delete_glyphs_inplace (is_deleted_glyph);
}
/**
diff --git a/thirdparty/harfbuzz/src/hb-aat-map.hh b/thirdparty/harfbuzz/src/hb-aat-map.hh
index c914f58d70..d0ee7d672c 100644
--- a/thirdparty/harfbuzz/src/hb-aat-map.hh
+++ b/thirdparty/harfbuzz/src/hb-aat-map.hh
@@ -38,7 +38,7 @@ struct hb_aat_map_t
void init ()
{
- memset (this, 0, sizeof (*this));
+ hb_memset (this, 0, sizeof (*this));
chain_flags.init ();
}
void fini () { chain_flags.fini (); }
diff --git a/thirdparty/harfbuzz/src/hb-algs.hh b/thirdparty/harfbuzz/src/hb-algs.hh
index cc37c073da..d85a4afe10 100644
--- a/thirdparty/harfbuzz/src/hb-algs.hh
+++ b/thirdparty/harfbuzz/src/hb-algs.hh
@@ -236,17 +236,6 @@ struct
template <typename T> constexpr auto
impl (const T& v, hb_priority<1>) const HB_RETURN (uint32_t, hb_deref (v).hash ())
- template <typename T> constexpr uint32_t
- impl (const hb::shared_ptr<T>& v, hb_priority<1>) const
- {
- return v.get () ? v.get ()->hash () : 0;
- }
- template <typename T> constexpr uint32_t
- impl (const hb::unique_ptr<T>& v, hb_priority<1>) const
- {
- return v.get () ? v.get ()->hash () : 0;
- }
-
template <typename T> constexpr auto
impl (const T& v, hb_priority<0>) const HB_RETURN (uint32_t, std::hash<hb_decay<decltype (hb_deref (v))>>{} (hb_deref (v)))
@@ -495,6 +484,17 @@ struct
}
HB_FUNCOBJ (hb_equal);
+struct
+{
+ template <typename T> void
+ operator () (T& a, T& b) const
+ {
+ using std::swap; // allow ADL
+ swap (a, b);
+ }
+}
+HB_FUNCOBJ (hb_swap);
+
template <typename T1, typename T2>
struct hb_pair_t
@@ -507,7 +507,7 @@ struct hb_pair_t
hb_enable_if (std::is_default_constructible<U1>::value &&
std::is_default_constructible<U2>::value)>
hb_pair_t () : first (), second () {}
- hb_pair_t (T1 a, T2 b) : first (a), second (b) {}
+ hb_pair_t (T1 a, T2 b) : first (std::forward<T1> (a)), second (std::forward<T2> (b)) {}
template <typename Q1, typename Q2,
hb_enable_if (hb_is_convertible (T1, Q1) &&
@@ -524,6 +524,25 @@ struct hb_pair_t
bool operator > (const pair_t& o) const { return first > o.first || (first == o.first && second > o.second); }
bool operator <= (const pair_t& o) const { return !(*this > o); }
+ static int cmp (const void *pa, const void *pb)
+ {
+ pair_t *a = (pair_t *) pa;
+ pair_t *b = (pair_t *) pb;
+
+ if (a->first < b->first) return -1;
+ if (a->first > b->first) return +1;
+ if (a->second < b->second) return -1;
+ if (a->second > b->second) return +1;
+ return 0;
+ }
+
+ friend void swap (hb_pair_t& a, hb_pair_t& b)
+ {
+ hb_swap (a.first, b.first);
+ hb_swap (a.second, b.second);
+ }
+
+
T1 first;
T2 second;
};
@@ -570,17 +589,6 @@ struct
}
HB_FUNCOBJ (hb_clamp);
-struct
-{
- template <typename T> void
- operator () (T& a, T& b) const
- {
- using std::swap; // allow ADL
- swap (a, b);
- }
-}
-HB_FUNCOBJ (hb_swap);
-
/*
* Bithacks.
*/
@@ -849,19 +857,14 @@ hb_in_range (T u, T lo, T hi)
return (T)(u - lo) <= (T)(hi - lo);
}
template <typename T> static inline bool
-hb_in_ranges (T u, T lo1, T hi1, T lo2, T hi2)
+hb_in_ranges (T u, T lo1, T hi1)
{
- return hb_in_range (u, lo1, hi1) || hb_in_range (u, lo2, hi2);
+ return hb_in_range (u, lo1, hi1);
}
-template <typename T> static inline bool
-hb_in_ranges (T u, T lo1, T hi1, T lo2, T hi2, T lo3, T hi3)
+template <typename T, typename ...Ts> static inline bool
+hb_in_ranges (T u, T lo1, T hi1, Ts... ds)
{
- return hb_in_range (u, lo1, hi1) || hb_in_range (u, lo2, hi2) || hb_in_range (u, lo3, hi3);
-}
-template <typename T> static inline bool
-hb_in_ranges (T u, T lo1, T hi1, T lo2, T hi2, T lo3, T hi3, T lo4, T hi4)
-{
- return hb_in_range (u, lo1, hi1) || hb_in_range (u, lo2, hi2) || hb_in_range (u, lo3, hi3) || hb_in_range (u, lo4, hi4);
+ return hb_in_range<T> (u, lo1, hi1) || hb_in_ranges<T> (u, ds...);
}
@@ -869,10 +872,18 @@ hb_in_ranges (T u, T lo1, T hi1, T lo2, T hi2, T lo3, T hi3, T lo4, T hi4)
* Overflow checking.
*/
-/* Consider __builtin_mul_overflow use here also */
static inline bool
-hb_unsigned_mul_overflows (unsigned int count, unsigned int size)
+hb_unsigned_mul_overflows (unsigned int count, unsigned int size, unsigned *result = nullptr)
{
+#if (defined(__GNUC__) && (__GNUC__ >= 4)) || defined(__clang__)
+ unsigned stack_result;
+ if (!result)
+ result = &stack_result;
+ return __builtin_mul_overflow (count, size, result);
+#endif
+
+ if (result)
+ *result = count * size;
return (size > 0) && (count >= ((unsigned int) -1) / size);
}
@@ -1164,9 +1175,12 @@ hb_qsort (void *base, size_t nel, size_t width,
}
-template <typename T, typename T2, typename T3> static inline void
-hb_stable_sort (T *array, unsigned int len, int(*compar)(const T2 *, const T2 *), T3 *array2)
+template <typename T, typename T2, typename T3 = int> static inline void
+hb_stable_sort (T *array, unsigned int len, int(*compar)(const T2 *, const T2 *), T3 *array2 = nullptr)
{
+ static_assert (hb_is_trivially_copy_assignable (T), "");
+ static_assert (hb_is_trivially_copy_assignable (T3), "");
+
for (unsigned int i = 1; i < len; i++)
{
unsigned int j = i;
@@ -1189,12 +1203,6 @@ hb_stable_sort (T *array, unsigned int len, int(*compar)(const T2 *, const T2 *)
}
}
-template <typename T> static inline void
-hb_stable_sort (T *array, unsigned int len, int(*compar)(const T *, const T *))
-{
- hb_stable_sort (array, len, compar, (int *) nullptr);
-}
-
static inline hb_bool_t
hb_codepoint_parse (const char *s, unsigned int len, int base, hb_codepoint_t *out)
{
@@ -1322,47 +1330,4 @@ struct
HB_FUNCOBJ (hb_dec);
-/* Compiler-assisted vectorization. */
-
-/* Type behaving similar to vectorized vars defined using __attribute__((vector_size(...))),
- * basically a fixed-size bitset. */
-template <typename elt_t, unsigned int byte_size>
-struct hb_vector_size_t
-{
- elt_t& operator [] (unsigned int i) { return v[i]; }
- const elt_t& operator [] (unsigned int i) const { return v[i]; }
-
- void clear (unsigned char v = 0) { memset (this, v, sizeof (*this)); }
-
- template <typename Op>
- hb_vector_size_t process (const Op& op) const
- {
- hb_vector_size_t r;
- for (unsigned int i = 0; i < ARRAY_LENGTH (v); i++)
- r.v[i] = op (v[i]);
- return r;
- }
- template <typename Op>
- hb_vector_size_t process (const Op& op, const hb_vector_size_t &o) const
- {
- hb_vector_size_t r;
- for (unsigned int i = 0; i < ARRAY_LENGTH (v); i++)
- r.v[i] = op (v[i], o.v[i]);
- return r;
- }
- hb_vector_size_t operator | (const hb_vector_size_t &o) const
- { return process (hb_bitwise_or, o); }
- hb_vector_size_t operator & (const hb_vector_size_t &o) const
- { return process (hb_bitwise_and, o); }
- hb_vector_size_t operator ^ (const hb_vector_size_t &o) const
- { return process (hb_bitwise_xor, o); }
- hb_vector_size_t operator ~ () const
- { return process (hb_bitwise_neg); }
-
- private:
- static_assert (0 == byte_size % sizeof (elt_t), "");
- elt_t v[byte_size / sizeof (elt_t)];
-};
-
-
#endif /* HB_ALGS_HH */
diff --git a/thirdparty/harfbuzz/src/hb-array.hh b/thirdparty/harfbuzz/src/hb-array.hh
index 5884007c19..17562bc336 100644
--- a/thirdparty/harfbuzz/src/hb-array.hh
+++ b/thirdparty/harfbuzz/src/hb-array.hh
@@ -100,10 +100,18 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
/* Ouch. The operator== compares the contents of the array. For range-based for loops,
* it's best if we can just compare arrayZ, though comparing contents is still fast,
* but also would require that Type has operator==. As such, we optimize this operator
- * for range-based for loop and just compare arrayZ and length. */
+ * for range-based for loop and just compare arrayZ and length.
+ *
+ * The above comment is outdated now because we implemented separate begin/end to
+ * objects that were using hb_array_t for range-based loop before. */
bool operator != (const hb_array_t& o) const
{ return this->arrayZ != o.arrayZ || this->length != o.length; }
+ /* Faster range-based for loop without bounds-check. */
+ Type *begin () const { return arrayZ; }
+ Type *end () const { return arrayZ + length; }
+
+
/* Extra operators.
*/
Type * operator & () const { return arrayZ; }
@@ -112,11 +120,11 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
HB_INTERNAL bool operator == (const hb_array_t &o) const;
- uint32_t hash () const {
+ uint32_t hash () const
+ {
uint32_t current = 0;
- for (unsigned int i = 0; i < this->length; i++) {
- current = current * 31 + hb_hash (this->arrayZ[i]);
- }
+ for (auto &v : *this)
+ current = current * 31 + hb_hash (v);
return current;
}
@@ -184,23 +192,18 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
hb_sorted_array_t<Type> qsort (int (*cmp_)(const void*, const void*))
{
+ //static_assert (hb_enable_if (hb_is_trivially_copy_assignable(Type)), "");
if (likely (length))
hb_qsort (arrayZ, length, this->get_item_size (), cmp_);
return hb_sorted_array_t<Type> (*this);
}
hb_sorted_array_t<Type> qsort ()
{
+ //static_assert (hb_enable_if (hb_is_trivially_copy_assignable(Type)), "");
if (likely (length))
hb_qsort (arrayZ, length, this->get_item_size (), Type::cmp);
return hb_sorted_array_t<Type> (*this);
}
- void qsort (unsigned int start, unsigned int end)
- {
- end = hb_min (end, length);
- assert (start <= end);
- if (likely (start < end))
- hb_qsort (arrayZ + start, end - start, this->get_item_size (), Type::cmp);
- }
/*
* Other methods.
@@ -262,17 +265,31 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
void fini ()
{ hb_free ((void *) arrayZ); arrayZ = nullptr; length = 0; }
- template <typename hb_serialize_context_t>
+ template <typename hb_serialize_context_t,
+ typename U = Type,
+ hb_enable_if (!(sizeof (U) < sizeof (long long) && hb_is_trivially_copy_assignable(hb_decay<Type>)))>
hb_array_t copy (hb_serialize_context_t *c) const
{
TRACE_SERIALIZE (this);
auto* out = c->start_embed (arrayZ);
- if (unlikely (!c->extend_size (out, get_size ()))) return_trace (hb_array_t ());
+ if (unlikely (!c->extend_size (out, get_size (), false))) return_trace (hb_array_t ());
for (unsigned i = 0; i < length; i++)
out[i] = arrayZ[i]; /* TODO: add version that calls c->copy() */
return_trace (hb_array_t (out, length));
}
+ template <typename hb_serialize_context_t,
+ typename U = Type,
+ hb_enable_if (sizeof (U) < sizeof (long long) && hb_is_trivially_copy_assignable(hb_decay<Type>))>
+ hb_array_t copy (hb_serialize_context_t *c) const
+ {
+ TRACE_SERIALIZE (this);
+ auto* out = c->start_embed (arrayZ);
+ if (unlikely (!c->extend_size (out, get_size (), false))) return_trace (hb_array_t ());
+ hb_memcpy (out, arrayZ, get_size ());
+ return_trace (hb_array_t (out, length));
+ }
+
template <typename hb_sanitize_context_t>
bool sanitize (hb_sanitize_context_t *c) const
{ return c->check_array (arrayZ, length); }
@@ -295,8 +312,8 @@ hb_array (T (&array_)[length_])
template <typename Type>
struct hb_sorted_array_t :
- hb_iter_t<hb_sorted_array_t<Type>, Type&>,
- hb_array_t<Type>
+ hb_array_t<Type>,
+ hb_iter_t<hb_sorted_array_t<Type>, Type&>
{
typedef hb_iter_t<hb_sorted_array_t, Type&> iter_base_t;
HB_ITER_USING (iter_base_t);
@@ -316,8 +333,8 @@ struct hb_sorted_array_t :
template <typename U,
hb_enable_if (hb_is_cr_convertible(U, Type))>
constexpr hb_sorted_array_t (const hb_array_t<U> &o) :
- hb_iter_t<hb_sorted_array_t, Type&> (),
- hb_array_t<Type> (o) {}
+ hb_array_t<Type> (o),
+ hb_iter_t<hb_sorted_array_t, Type&> () {}
template <typename U,
hb_enable_if (hb_is_cr_convertible(U, Type))>
hb_sorted_array_t& operator = (const hb_array_t<U> &o)
@@ -329,6 +346,11 @@ struct hb_sorted_array_t :
bool operator != (const hb_sorted_array_t& o) const
{ return this->arrayZ != o.arrayZ || this->length != o.length; }
+ /* Faster range-based for loop without bounds-check. */
+ Type *begin () const { return this->arrayZ; }
+ Type *end () const { return this->arrayZ + this->length; }
+
+
hb_sorted_array_t sub_array (unsigned int start_offset, unsigned int *seg_count /* IN/OUT */) const
{ return hb_sorted_array_t (((const hb_array_t<Type> *) (this))->sub_array (start_offset, seg_count)); }
hb_sorted_array_t sub_array (unsigned int start_offset, unsigned int seg_count) const
@@ -421,18 +443,42 @@ inline bool hb_array_t<const unsigned char>::operator == (const hb_array_t<const
return 0 == hb_memcmp (arrayZ, o.arrayZ, length);
}
+
+/* Specialize hash() for byte arrays. */
+
template <>
-inline uint32_t hb_array_t<const char>::hash () const {
+inline uint32_t hb_array_t<const char>::hash () const
+{
uint32_t current = 0;
- for (unsigned int i = 0; i < this->length; i++)
- current = current * 31 + (uint32_t) (this->arrayZ[i] * 2654435761u);
+ unsigned i = 0;
+
+#if defined(__OPTIMIZE__) && !defined(HB_NO_PACKED) && \
+ ((defined(__GNUC__) && __GNUC__ >= 5) || defined(__clang__))
+ struct __attribute__((packed)) packed_uint32_t { uint32_t v; };
+ for (; i + 4 <= this->length; i += 4)
+ current = current * 31 + hb_hash ((uint32_t) ((packed_uint32_t *) &this->arrayZ[i])->v);
+#endif
+
+ for (; i < this->length; i++)
+ current = current * 31 + hb_hash (this->arrayZ[i]);
return current;
}
+
template <>
-inline uint32_t hb_array_t<const unsigned char>::hash () const {
+inline uint32_t hb_array_t<const unsigned char>::hash () const
+{
uint32_t current = 0;
- for (unsigned int i = 0; i < this->length; i++)
- current = current * 31 + (uint32_t) (this->arrayZ[i] * 2654435761u);
+ unsigned i = 0;
+
+#if defined(__OPTIMIZE__) && !defined(HB_NO_PACKED) && \
+ ((defined(__GNUC__) && __GNUC__ >= 5) || defined(__clang__))
+ struct __attribute__((packed)) packed_uint32_t { uint32_t v; };
+ for (; i + 4 <= this->length; i += 4)
+ current = current * 31 + hb_hash ((uint32_t) ((packed_uint32_t *) &this->arrayZ[i])->v);
+#endif
+
+ for (; i < this->length; i++)
+ current = current * 31 + hb_hash (this->arrayZ[i]);
return current;
}
diff --git a/thirdparty/harfbuzz/src/hb-bit-page.hh b/thirdparty/harfbuzz/src/hb-bit-page.hh
index 95ae1b7bf9..11987054f8 100644
--- a/thirdparty/harfbuzz/src/hb-bit-page.hh
+++ b/thirdparty/harfbuzz/src/hb-bit-page.hh
@@ -30,6 +30,53 @@
#include "hb.hh"
+
+/* Compiler-assisted vectorization. */
+
+/* Type behaving similar to vectorized vars defined using __attribute__((vector_size(...))),
+ * basically a fixed-size bitset. */
+template <typename elt_t, unsigned int byte_size>
+struct hb_vector_size_t
+{
+ elt_t& operator [] (unsigned int i) { return v[i]; }
+ const elt_t& operator [] (unsigned int i) const { return v[i]; }
+
+ void clear (unsigned char v = 0) { hb_memset (this, v, sizeof (*this)); }
+
+ template <typename Op>
+ hb_vector_size_t process (const Op& op) const
+ {
+ hb_vector_size_t r;
+ for (unsigned int i = 0; i < ARRAY_LENGTH (v); i++)
+ r.v[i] = op (v[i]);
+ return r;
+ }
+ template <typename Op>
+ hb_vector_size_t process (const Op& op, const hb_vector_size_t &o) const
+ {
+ hb_vector_size_t r;
+ for (unsigned int i = 0; i < ARRAY_LENGTH (v); i++)
+ r.v[i] = op (v[i], o.v[i]);
+ return r;
+ }
+ hb_vector_size_t operator | (const hb_vector_size_t &o) const
+ { return process (hb_bitwise_or, o); }
+ hb_vector_size_t operator & (const hb_vector_size_t &o) const
+ { return process (hb_bitwise_and, o); }
+ hb_vector_size_t operator ^ (const hb_vector_size_t &o) const
+ { return process (hb_bitwise_xor, o); }
+ hb_vector_size_t operator ~ () const
+ { return process (hb_bitwise_neg); }
+
+ hb_array_t<const elt_t> iter () const
+ { return hb_array (v); }
+
+ private:
+ static_assert (0 == byte_size % sizeof (elt_t), "");
+ elt_t v[byte_size / sizeof (elt_t)];
+};
+
+
struct hb_bit_page_t
{
void init0 () { v.clear (); }
@@ -40,17 +87,17 @@ struct hb_bit_page_t
bool is_empty () const
{
- for (unsigned i = 0; i < len (); i++)
- if (v[i])
- return false;
- return true;
+ return
+ + hb_iter (v)
+ | hb_none
+ ;
}
uint32_t hash () const
{
- uint32_t h = 0;
- for (unsigned i = 0; i < len (); i++)
- h = h * 31 + hb_hash (v[i]);
- return h;
+ return
+ + hb_iter (v)
+ | hb_reduce ([] (uint32_t h, const elt_t &_) { return h * 31 + hb_hash (_); }, (uint32_t) 0u)
+ ;
}
void add (hb_codepoint_t g) { elt (g) |= mask (g); }
@@ -69,7 +116,7 @@ struct hb_bit_page_t
*la |= ~(mask (a) - 1);
la++;
- memset (la, 0xff, (char *) lb - (char *) la);
+ hb_memset (la, 0xff, (char *) lb - (char *) la);
*lb |= ((mask (b) << 1) - 1);
}
@@ -85,7 +132,7 @@ struct hb_bit_page_t
*la &= mask (a) - 1;
la++;
- memset (la, 0, (char *) lb - (char *) la);
+ hb_memset (la, 0, (char *) lb - (char *) la);
*lb &= ~((mask (b) << 1) - 1);
}
@@ -101,13 +148,13 @@ struct hb_bit_page_t
hb_codepoint_t *p,
unsigned int size) const
{
- unsigned int start_v = start_value >> ELT_BITS_LOG_2;
+ unsigned int start_v = start_value / ELT_BITS;
unsigned int start_bit = start_value & ELT_MASK;
unsigned int count = 0;
for (unsigned i = start_v; i < len () && count < size; i++)
{
elt_t bits = v[i];
- uint32_t v_base = base | (i << ELT_BITS_LOG_2);
+ uint32_t v_base = base | (i * ELT_BITS);
for (unsigned int j = start_bit; j < ELT_BITS && count < size; j++)
{
if ((elt_t(1) << j) & bits) {
@@ -132,13 +179,13 @@ struct hb_bit_page_t
unsigned int size,
hb_codepoint_t *next_value) const
{
- unsigned int start_v = start_value >> ELT_BITS_LOG_2;
+ unsigned int start_v = start_value / ELT_BITS;
unsigned int start_bit = start_value & ELT_MASK;
unsigned int count = 0;
for (unsigned i = start_v; i < len () && count < size; i++)
{
elt_t bits = v[i];
- uint32_t v_offset = i << ELT_BITS_LOG_2;
+ uint32_t v_offset = i * ELT_BITS;
for (unsigned int j = start_bit; j < ELT_BITS && count < size; j++)
{
if ((elt_t(1) << j) & bits)
@@ -161,7 +208,10 @@ struct hb_bit_page_t
bool is_equal (const hb_bit_page_t &other) const
{
- return 0 == hb_memcmp (&v, &other.v, sizeof (v));
+ for (unsigned i = 0; i < len (); i++)
+ if (v[i] != other.v[i])
+ return false;
+ return true;
}
bool is_subset (const hb_bit_page_t &larger_page) const
{
@@ -173,10 +223,10 @@ struct hb_bit_page_t
unsigned int get_population () const
{
- unsigned int pop = 0;
- for (unsigned int i = 0; i < len (); i++)
- pop += hb_popcount (v[i]);
- return pop;
+ return
+ + hb_iter (v)
+ | hb_reduce ([] (unsigned pop, const elt_t &_) { return pop + hb_popcount (_); }, 0u)
+ ;
}
bool next (hb_codepoint_t *codepoint) const
@@ -262,8 +312,6 @@ struct hb_bit_page_t
typedef hb_vector_size_t<elt_t, PAGE_BITS / 8> vector_t;
static constexpr unsigned ELT_BITS = sizeof (elt_t) * 8;
- static constexpr unsigned ELT_BITS_LOG_2 = 6;
- static_assert (1 << ELT_BITS_LOG_2 == ELT_BITS, "");
static constexpr unsigned ELT_MASK = ELT_BITS - 1;
static constexpr unsigned BITS = sizeof (vector_t) * 8;
diff --git a/thirdparty/harfbuzz/src/hb-bit-set-invertible.hh b/thirdparty/harfbuzz/src/hb-bit-set-invertible.hh
index 27fb0732ea..ff8aecc60c 100644
--- a/thirdparty/harfbuzz/src/hb-bit-set-invertible.hh
+++ b/thirdparty/harfbuzz/src/hb-bit-set-invertible.hh
@@ -123,10 +123,8 @@ struct hb_bit_set_invertible_t
bool get (hb_codepoint_t g) const { return s.get (g) ^ inverted; }
/* Has interface. */
- static constexpr bool SENTINEL = false;
- typedef bool value_t;
- value_t operator [] (hb_codepoint_t k) const { return get (k); }
- bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
+ bool operator [] (hb_codepoint_t k) const { return get (k); }
+ bool has (hb_codepoint_t k) const { return (*this)[k]; }
/* Predicate. */
bool operator () (hb_codepoint_t k) const { return has (k); }
diff --git a/thirdparty/harfbuzz/src/hb-bit-set.hh b/thirdparty/harfbuzz/src/hb-bit-set.hh
index a63887efda..8de6e037fb 100644
--- a/thirdparty/harfbuzz/src/hb-bit-set.hh
+++ b/thirdparty/harfbuzz/src/hb-bit-set.hh
@@ -85,10 +85,10 @@ struct hb_bit_set_t
void err () { if (successful) successful = false; } /* TODO Remove */
bool in_error () const { return !successful; }
- bool resize (unsigned int count)
+ bool resize (unsigned int count, bool clear = true)
{
if (unlikely (!successful)) return false;
- if (unlikely (!pages.resize (count) || !page_map.resize (count)))
+ if (unlikely (!pages.resize (count, clear) || !page_map.resize (count, clear)))
{
pages.resize (page_map.length);
successful = false;
@@ -330,10 +330,8 @@ struct hb_bit_set_t
}
/* Has interface. */
- static constexpr bool SENTINEL = false;
- typedef bool value_t;
- value_t operator [] (hb_codepoint_t k) const { return get (k); }
- bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
+ bool operator [] (hb_codepoint_t k) const { return get (k); }
+ bool has (hb_codepoint_t k) const { return (*this)[k]; }
/* Predicate. */
bool operator () (hb_codepoint_t k) const { return has (k); }
@@ -352,7 +350,7 @@ struct hb_bit_set_t
{
if (unlikely (!successful)) return;
unsigned int count = other.pages.length;
- if (unlikely (!resize (count)))
+ if (unlikely (!resize (count, false)))
return;
population = other.population;
@@ -391,7 +389,7 @@ struct hb_bit_set_t
bool is_subset (const hb_bit_set_t &larger_set) const
{
if (has_population () && larger_set.has_population () &&
- population != larger_set.population)
+ population > larger_set.population)
return false;
uint32_t spi = 0;
@@ -540,21 +538,21 @@ struct hb_bit_set_t
b = nb;
for (; a && b; )
{
- if (page_map[a - 1].major == other.page_map[b - 1].major)
+ if (page_map.arrayZ[a - 1].major == other.page_map.arrayZ[b - 1].major)
{
a--;
b--;
count--;
- page_map[count] = page_map[a];
+ page_map.arrayZ[count] = page_map.arrayZ[a];
page_at (count).v = op (page_at (a).v, other.page_at (b).v);
}
- else if (page_map[a - 1].major > other.page_map[b - 1].major)
+ else if (page_map.arrayZ[a - 1].major > other.page_map.arrayZ[b - 1].major)
{
a--;
if (passthru_left)
{
count--;
- page_map[count] = page_map[a];
+ page_map.arrayZ[count] = page_map.arrayZ[a];
}
}
else
@@ -563,8 +561,8 @@ struct hb_bit_set_t
if (passthru_right)
{
count--;
- page_map[count].major = other.page_map[b].major;
- page_map[count].index = next_page++;
+ page_map.arrayZ[count].major = other.page_map.arrayZ[b].major;
+ page_map.arrayZ[count].index = next_page++;
page_at (count).v = other.page_at (b).v;
}
}
@@ -574,15 +572,15 @@ struct hb_bit_set_t
{
a--;
count--;
- page_map[count] = page_map [a];
+ page_map.arrayZ[count] = page_map.arrayZ[a];
}
if (passthru_right)
while (b)
{
b--;
count--;
- page_map[count].major = other.page_map[b].major;
- page_map[count].index = next_page++;
+ page_map.arrayZ[count].major = other.page_map.arrayZ[b].major;
+ page_map.arrayZ[count].index = next_page++;
page_at (count).v = other.page_at (b).v;
}
assert (!count);
@@ -605,8 +603,6 @@ struct hb_bit_set_t
bool next (hb_codepoint_t *codepoint) const
{
- // TODO: this should be merged with prev() as both implementations
- // are very similar.
if (unlikely (*codepoint == INVALID)) {
*codepoint = get_min ();
return *codepoint != INVALID;
@@ -640,7 +636,7 @@ struct hb_bit_set_t
for (; i < page_map.length; i++)
{
- const page_map_t &current = page_map.arrayZ[i];
+ const page_map_t &current = page_map_array[i];
hb_codepoint_t m = pages_array[current.index].get_min ();
if (m != INVALID)
{
@@ -663,21 +659,21 @@ struct hb_bit_set_t
page_map_t map = {get_major (*codepoint), 0};
unsigned int i;
page_map.bfind (map, &i, HB_NOT_FOUND_STORE_CLOSEST);
- if (i < page_map.length && page_map[i].major == map.major)
+ if (i < page_map.length && page_map.arrayZ[i].major == map.major)
{
- if (pages[page_map[i].index].previous (codepoint))
+ if (pages[page_map.arrayZ[i].index].previous (codepoint))
{
- *codepoint += page_map[i].major * page_t::PAGE_BITS;
+ *codepoint += page_map.arrayZ[i].major * page_t::PAGE_BITS;
return true;
}
}
i--;
for (; (int) i >= 0; i--)
{
- hb_codepoint_t m = pages[page_map[i].index].get_max ();
+ hb_codepoint_t m = pages.arrayZ[page_map.arrayZ[i].index].get_max ();
if (m != INVALID)
{
- *codepoint = page_map[i].major * page_t::PAGE_BITS + m;
+ *codepoint = page_map.arrayZ[i].major * page_t::PAGE_BITS + m;
return true;
}
}
@@ -905,7 +901,7 @@ struct hb_bit_set_t
{
auto &cached_page = page_map.arrayZ[i];
if (cached_page.major == major)
- return &pages[cached_page.index];
+ return &pages.arrayZ[cached_page.index];
}
page_map_t map = {major, pages.length};
@@ -917,15 +913,15 @@ struct hb_bit_set_t
if (unlikely (!resize (pages.length + 1)))
return nullptr;
- pages[map.index].init0 ();
- memmove (page_map + i + 1,
- page_map + i,
+ pages.arrayZ[map.index].init0 ();
+ memmove (page_map.arrayZ + i + 1,
+ page_map.arrayZ + i,
(page_map.length - 1 - i) * page_map.item_size);
page_map[i] = map;
}
last_page_lookup = i;
- return &pages[page_map[i].index];
+ return &pages.arrayZ[page_map.arrayZ[i].index];
}
const page_t *page_for (hb_codepoint_t g) const
{
@@ -939,7 +935,7 @@ struct hb_bit_set_t
{
auto &cached_page = page_map.arrayZ[i];
if (cached_page.major == major)
- return &pages[cached_page.index];
+ return &pages.arrayZ[cached_page.index];
}
page_map_t key = {major};
@@ -947,10 +943,18 @@ struct hb_bit_set_t
return nullptr;
last_page_lookup = i;
- return &pages[page_map[i].index];
+ return &pages.arrayZ[page_map[i].index];
+ }
+ page_t &page_at (unsigned int i)
+ {
+ assert (i < page_map.length);
+ return pages.arrayZ[page_map.arrayZ[i].index];
+ }
+ const page_t &page_at (unsigned int i) const
+ {
+ assert (i < page_map.length);
+ return pages.arrayZ[page_map.arrayZ[i].index];
}
- page_t &page_at (unsigned int i) { return pages[page_map[i].index]; }
- const page_t &page_at (unsigned int i) const { return pages[page_map[i].index]; }
unsigned int get_major (hb_codepoint_t g) const { return g >> page_t::PAGE_BITS_LOG_2; }
unsigned int page_remainder (hb_codepoint_t g) const { return g & page_t::PAGE_BITMASK; }
hb_codepoint_t major_start (unsigned int major) const { return major << page_t::PAGE_BITS_LOG_2; }
diff --git a/thirdparty/harfbuzz/src/hb-blob.cc b/thirdparty/harfbuzz/src/hb-blob.cc
index 9bc12ea3fa..f0fda1fa4d 100644
--- a/thirdparty/harfbuzz/src/hb-blob.cc
+++ b/thirdparty/harfbuzz/src/hb-blob.cc
@@ -495,7 +495,7 @@ hb_blob_t::try_make_writable ()
DEBUG_MSG_FUNC (BLOB, this, "dupped successfully -> %p\n", this->data);
- memcpy (new_data, this->data, this->length);
+ hb_memcpy (new_data, this->data, this->length);
this->destroy_user_data ();
this->mode = HB_MEMORY_MODE_WRITABLE;
this->data = new_data;
diff --git a/thirdparty/harfbuzz/src/hb-buffer-deserialize-json.hh b/thirdparty/harfbuzz/src/hb-buffer-deserialize-json.hh
index 87095855d6..993bb1f698 100644
--- a/thirdparty/harfbuzz/src/hb-buffer-deserialize-json.hh
+++ b/thirdparty/harfbuzz/src/hb-buffer-deserialize-json.hh
@@ -32,7 +32,7 @@
#include "hb.hh"
-#line 36 "hb-buffer-deserialize-json.hh"
+#line 33 "hb-buffer-deserialize-json.hh"
static const unsigned char _deserialize_json_trans_keys[] = {
0u, 0u, 9u, 123u, 9u, 34u, 97u, 117u, 120u, 121u, 34u, 34u, 9u, 58u, 9u, 57u,
48u, 57u, 9u, 125u, 9u, 125u, 9u, 125u, 34u, 34u, 9u, 58u, 9u, 57u, 48u, 57u,
@@ -557,12 +557,12 @@ _hb_buffer_deserialize_json (hb_buffer_t *buffer,
hb_glyph_info_t info = {0};
hb_glyph_position_t pos = {0};
-#line 561 "hb-buffer-deserialize-json.hh"
+#line 554 "hb-buffer-deserialize-json.hh"
{
cs = deserialize_json_start;
}
-#line 566 "hb-buffer-deserialize-json.hh"
+#line 557 "hb-buffer-deserialize-json.hh"
{
int _slen;
int _trans;
@@ -590,8 +590,8 @@ _resume:
case 1:
#line 38 "hb-buffer-deserialize-json.rl"
{
- memset (&info, 0, sizeof (info));
- memset (&pos , 0, sizeof (pos ));
+ hb_memset (&info, 0, sizeof (info));
+ hb_memset (&pos , 0, sizeof (pos ));
}
break;
case 5:
@@ -774,7 +774,7 @@ _resume:
*end_ptr = p;
}
break;
-#line 778 "hb-buffer-deserialize-json.hh"
+#line 735 "hb-buffer-deserialize-json.hh"
}
_again:
diff --git a/thirdparty/harfbuzz/src/hb-buffer-deserialize-text.hh b/thirdparty/harfbuzz/src/hb-buffer-deserialize-text.hh
index 9062610de2..6b9b4282fc 100644
--- a/thirdparty/harfbuzz/src/hb-buffer-deserialize-text.hh
+++ b/thirdparty/harfbuzz/src/hb-buffer-deserialize-text.hh
@@ -32,32 +32,30 @@
#include "hb.hh"
-#line 36 "hb-buffer-deserialize-text.hh"
+#line 33 "hb-buffer-deserialize-text.hh"
static const unsigned char _deserialize_text_trans_keys[] = {
- 0u, 0u, 9u, 91u, 85u, 85u, 43u, 43u, 48u, 102u, 9u, 85u, 48u, 57u, 45u, 57u,
- 48u, 57u, 48u, 57u, 48u, 57u, 45u, 57u, 48u, 57u, 44u, 44u, 45u, 57u, 48u, 57u,
- 44u, 57u, 43u, 124u, 45u, 57u, 48u, 57u, 9u, 124u, 9u, 124u, 0u, 0u, 9u, 85u,
+ 0u, 0u, 9u, 91u, 85u, 85u, 43u, 43u, 48u, 102u, 9u, 85u, 48u, 57u, 48u, 57u,
+ 45u, 57u, 48u, 57u, 45u, 57u, 48u, 57u, 48u, 57u, 45u, 57u, 48u, 57u, 44u, 44u,
+ 45u, 57u, 48u, 57u, 44u, 57u, 43u, 124u, 9u, 124u, 9u, 124u, 0u, 0u, 9u, 85u,
9u, 124u, 9u, 124u, 9u, 124u, 9u, 124u, 9u, 124u, 9u, 124u, 9u, 124u, 9u, 124u,
9u, 124u, 9u, 124u, 9u, 124u, 9u, 124u, 9u, 124u, 9u, 124u, 9u, 124u, 9u, 124u,
- 9u, 124u, 9u, 124u, 9u, 124u, 0
+ 0
};
static const char _deserialize_text_key_spans[] = {
- 0, 83, 1, 1, 55, 77, 10, 13,
- 10, 10, 10, 13, 10, 1, 13, 10,
- 14, 82, 13, 10, 116, 116, 0, 77,
- 116, 116, 116, 116, 116, 116, 116, 116,
+ 0, 83, 1, 1, 55, 77, 10, 10,
+ 13, 10, 13, 10, 10, 13, 10, 1,
+ 13, 10, 14, 82, 116, 116, 0, 77,
116, 116, 116, 116, 116, 116, 116, 116,
- 116, 116, 116
+ 116, 116, 116, 116, 116, 116, 116, 116
};
static const short _deserialize_text_index_offsets[] = {
0, 0, 84, 86, 88, 144, 222, 233,
- 247, 258, 269, 280, 294, 305, 307, 321,
- 332, 347, 430, 444, 455, 572, 689, 690,
+ 244, 258, 269, 283, 294, 305, 319, 330,
+ 332, 346, 357, 372, 455, 572, 689, 690,
768, 885, 1002, 1119, 1236, 1353, 1470, 1587,
- 1704, 1821, 1938, 2055, 2172, 2289, 2406, 2523,
- 2640, 2757, 2874
+ 1704, 1821, 1938, 2055, 2172, 2289, 2406, 2523
};
static const char _deserialize_text_indicies[] = {
@@ -90,34 +88,34 @@ static const char _deserialize_text_indicies[] = {
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 4, 1, 8,
9, 9, 9, 9, 9, 9, 9, 9,
- 9, 1, 10, 1, 1, 11, 12, 12,
- 12, 12, 12, 12, 12, 12, 12, 1,
+ 9, 1, 10, 11, 11, 11, 11, 11,
+ 11, 11, 11, 11, 1, 12, 1, 1,
13, 14, 14, 14, 14, 14, 14, 14,
14, 14, 1, 15, 16, 16, 16, 16,
- 16, 16, 16, 16, 16, 1, 17, 18,
- 18, 18, 18, 18, 18, 18, 18, 18,
- 1, 19, 1, 1, 20, 21, 21, 21,
+ 16, 16, 16, 16, 16, 1, 17, 1,
+ 1, 18, 19, 19, 19, 19, 19, 19,
+ 19, 19, 19, 1, 20, 21, 21, 21,
21, 21, 21, 21, 21, 21, 1, 22,
23, 23, 23, 23, 23, 23, 23, 23,
- 23, 1, 24, 1, 25, 1, 1, 26,
- 27, 27, 27, 27, 27, 27, 27, 27,
- 27, 1, 28, 29, 29, 29, 29, 29,
- 29, 29, 29, 29, 1, 24, 1, 1,
- 1, 23, 23, 23, 23, 23, 23, 23,
- 23, 23, 23, 1, 30, 30, 1, 1,
+ 23, 1, 24, 1, 1, 25, 26, 26,
+ 26, 26, 26, 26, 26, 26, 26, 1,
+ 27, 28, 28, 28, 28, 28, 28, 28,
+ 28, 28, 1, 29, 1, 30, 1, 1,
+ 31, 32, 32, 32, 32, 32, 32, 32,
+ 32, 32, 1, 33, 34, 34, 34, 34,
+ 34, 34, 34, 34, 34, 1, 29, 1,
+ 1, 1, 28, 28, 28, 28, 28, 28,
+ 28, 28, 28, 28, 1, 35, 35, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 30, 1,
- 1, 30, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 35,
+ 1, 1, 35, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 30, 30, 1,
+ 1, 1, 1, 1, 1, 1, 35, 35,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 30, 1, 31,
- 1, 1, 32, 33, 33, 33, 33, 33,
- 33, 33, 33, 33, 1, 34, 35, 35,
- 35, 35, 35, 35, 35, 35, 35, 1,
+ 1, 1, 1, 1, 1, 1, 35, 1,
36, 36, 36, 36, 36, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 36,
@@ -201,282 +199,235 @@ static const char _deserialize_text_indicies[] = {
48, 48, 48, 48, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 56, 48,
- 57, 57, 57, 57, 57, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 57,
- 30, 30, 58, 30, 30, 30, 30, 30,
- 30, 30, 59, 1, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 60, 30, 30, 61,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 62, 63, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 64, 30, 57, 57, 57,
- 57, 57, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 57, 30, 30, 58,
- 30, 30, 30, 30, 30, 30, 30, 59,
- 1, 30, 30, 30, 65, 66, 66, 66,
- 66, 66, 66, 66, 66, 66, 30, 30,
- 30, 60, 30, 30, 61, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 62, 63, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 64, 30, 67, 67, 67, 67, 67, 1,
+ 57, 57, 57, 57, 57, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 57,
+ 35, 35, 58, 35, 35, 35, 35, 35,
+ 35, 35, 59, 1, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 60, 35, 35, 61,
+ 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 62, 63, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35,
+ 35, 35, 35, 64, 35, 65, 65, 65,
+ 65, 65, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 65, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 67, 1, 1, 68, 1, 1, 1,
- 1, 1, 1, 1, 1, 69, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 70, 1,
+ 1, 66, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 71, 1, 72,
- 72, 72, 72, 72, 1, 1, 1, 1,
+ 67, 1, 68, 68, 68, 68, 68, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 72, 1,
1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 68, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 42, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 42, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 73, 1, 74, 74, 74, 74,
- 74, 48, 48, 48, 48, 48, 48, 48,
+ 1, 1, 1, 1, 1, 69, 1, 70,
+ 70, 70, 70, 70, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48,
- 48, 48, 48, 74, 48, 48, 50, 48,
- 48, 48, 48, 48, 48, 48, 51, 1,
+ 48, 48, 48, 48, 48, 48, 70, 48,
+ 48, 50, 48, 48, 48, 48, 48, 48,
+ 48, 51, 1, 48, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48,
+ 48, 48, 48, 52, 48, 48, 53, 48,
48, 48, 48, 48, 48, 48, 48, 48,
- 52, 48, 48, 53, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48,
- 48, 48, 48, 48, 48, 48, 48, 54,
- 55, 48, 48, 48, 48, 48, 48, 48,
+ 48, 48, 54, 55, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48,
- 48, 48, 48, 48, 48, 48, 48, 56,
- 48, 75, 75, 75, 75, 75, 1, 1,
+ 48, 48, 48, 48, 48, 48, 48, 48,
+ 48, 48, 56, 48, 71, 71, 71, 71,
+ 71, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 71, 1, 1, 72, 1,
+ 1, 1, 1, 1, 1, 1, 1, 73,
1, 1, 1, 1, 1, 1, 1, 1,
- 75, 1, 1, 76, 1, 1, 1, 1,
- 1, 1, 1, 77, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 78, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 45, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
+ 74, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 79, 1, 80, 80,
- 80, 80, 80, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 75,
+ 1, 76, 76, 76, 76, 76, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 80, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
+ 76, 1, 1, 77, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 81, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 78, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 82, 1, 80, 80, 80, 80, 80,
+ 1, 1, 1, 1, 79, 1, 76, 76,
+ 76, 76, 76, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 76, 1, 1,
+ 77, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 21, 21, 21,
+ 21, 21, 21, 21, 21, 21, 21, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 80, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 83, 83, 83, 83, 83, 83,
- 83, 83, 83, 83, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 78, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 81,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 79, 1, 71, 71, 71, 71, 71,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 82, 1,
- 84, 84, 84, 84, 84, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 84,
- 1, 1, 85, 1, 1, 1, 1, 1,
- 1, 1, 86, 1, 1, 1, 1, 1,
+ 1, 1, 71, 1, 1, 72, 1, 1,
+ 1, 1, 1, 1, 1, 1, 73, 1,
+ 1, 1, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 74,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 87, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 75, 1,
+ 80, 80, 80, 80, 80, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 88, 1, 84, 84, 84,
- 84, 84, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 80,
+ 1, 1, 81, 1, 1, 1, 1, 1,
+ 1, 1, 82, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 84, 1, 1, 85,
- 1, 1, 1, 1, 1, 1, 1, 86,
- 1, 1, 1, 1, 29, 29, 29, 29,
- 29, 29, 29, 29, 29, 29, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 83,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 45, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 87, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 84, 1, 85, 85, 85,
+ 85, 85, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 88, 1, 75, 75, 75, 75, 75, 1,
+ 1, 1, 1, 1, 85, 1, 1, 86,
+ 1, 1, 1, 1, 1, 1, 1, 87,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 75, 1, 1, 76, 1, 1, 1,
- 1, 1, 1, 1, 77, 1, 1, 1,
- 1, 89, 89, 89, 89, 89, 89, 89,
- 89, 89, 89, 1, 1, 1, 1, 1,
- 1, 78, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 45, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 88, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 79, 1, 90,
- 90, 90, 90, 90, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 90, 1,
- 1, 91, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
+ 89, 1, 85, 85, 85, 85, 85, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 85, 1, 1, 86, 1, 1, 1,
+ 1, 1, 1, 1, 87, 1, 1, 1,
+ 1, 34, 34, 34, 34, 34, 34, 34,
+ 34, 34, 34, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 92, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 88, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 93, 1, 90, 90, 90, 90,
- 90, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 90, 1, 1, 91, 1,
+ 1, 1, 1, 1, 1, 89, 1, 80,
+ 80, 80, 80, 80, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 35, 35, 35, 35, 35,
- 35, 35, 35, 35, 35, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 80, 1,
+ 1, 81, 1, 1, 1, 1, 1, 1,
+ 1, 82, 1, 1, 1, 1, 90, 90,
+ 90, 90, 90, 90, 90, 90, 90, 90,
+ 1, 1, 1, 1, 1, 1, 83, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 45, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 92, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 93,
- 1, 67, 67, 67, 67, 67, 1, 1,
+ 1, 1, 84, 1, 65, 65, 65, 65,
+ 65, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 65, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 67, 1, 1, 68, 1, 1, 1, 1,
- 1, 1, 1, 1, 69, 1, 1, 1,
- 14, 14, 14, 14, 14, 14, 14, 14,
- 14, 14, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 91, 91, 91, 91, 91,
+ 91, 91, 91, 91, 91, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 70, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
+ 66, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 71, 1, 94, 94,
- 94, 94, 94, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 94, 30, 30,
- 58, 30, 30, 30, 30, 30, 30, 30,
- 59, 1, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 60, 30, 30, 61, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 62, 95, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 96, 30, 94, 94, 94, 94, 94,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 94, 30, 30, 58, 30, 30,
- 30, 30, 30, 30, 30, 59, 1, 30,
- 30, 30, 97, 97, 97, 97, 97, 97,
- 97, 97, 97, 97, 30, 30, 30, 60,
- 30, 30, 61, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 62, 95,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 30,
- 30, 30, 30, 30, 30, 30, 96, 30,
- 0
+ 1, 1, 1, 1, 1, 1, 1, 67,
+ 1, 0
};
static const char _deserialize_text_trans_targs[] = {
1, 0, 2, 26, 3, 4, 20, 5,
- 24, 25, 8, 29, 40, 29, 40, 32,
- 37, 33, 34, 12, 13, 16, 13, 16,
- 14, 15, 35, 36, 35, 36, 27, 19,
- 38, 39, 38, 39, 21, 20, 6, 22,
+ 24, 25, 28, 39, 9, 31, 34, 31,
+ 34, 11, 32, 33, 32, 33, 35, 38,
+ 14, 15, 18, 15, 18, 16, 17, 36,
+ 37, 36, 37, 27, 21, 20, 6, 22,
23, 21, 22, 23, 21, 22, 23, 25,
- 27, 27, 28, 7, 9, 11, 17, 22,
- 31, 27, 28, 7, 9, 11, 17, 22,
- 31, 41, 42, 30, 10, 18, 22, 31,
- 30, 31, 31, 30, 10, 7, 11, 31,
- 30, 22, 31, 34, 30, 10, 7, 22,
- 31, 37, 30, 10, 22, 31, 27, 22,
- 31, 42
+ 27, 27, 7, 8, 12, 13, 19, 22,
+ 30, 27, 7, 8, 12, 13, 19, 22,
+ 30, 29, 22, 30, 29, 30, 30, 29,
+ 7, 10, 22, 30, 29, 7, 22, 30,
+ 29, 7, 8, 13, 30, 29, 7, 8,
+ 22, 30, 38, 39
};
static const char _deserialize_text_trans_actions[] = {
0, 0, 0, 0, 1, 0, 2, 0,
- 2, 2, 3, 4, 4, 5, 5, 4,
- 4, 4, 4, 3, 3, 3, 0, 0,
- 6, 3, 4, 4, 5, 5, 5, 3,
- 4, 4, 5, 5, 7, 8, 9, 7,
+ 2, 2, 3, 3, 4, 3, 3, 5,
+ 5, 4, 3, 3, 5, 5, 3, 3,
+ 4, 4, 4, 0, 0, 6, 4, 3,
+ 3, 5, 5, 5, 7, 8, 9, 7,
7, 0, 0, 0, 10, 10, 10, 8,
- 12, 13, 14, 15, 15, 15, 16, 11,
- 11, 18, 19, 20, 20, 20, 0, 17,
- 17, 4, 4, 21, 22, 22, 21, 21,
- 0, 0, 13, 10, 23, 23, 23, 10,
- 24, 24, 24, 5, 25, 26, 26, 25,
- 25, 5, 27, 28, 27, 27, 30, 29,
- 29, 5
+ 12, 13, 14, 14, 14, 14, 15, 11,
+ 11, 17, 18, 18, 18, 18, 0, 16,
+ 16, 19, 19, 19, 0, 0, 13, 20,
+ 21, 21, 20, 20, 22, 23, 22, 22,
+ 10, 24, 24, 24, 10, 25, 26, 26,
+ 25, 25, 5, 5
};
static const char _deserialize_text_eof_actions[] = {
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 7, 0, 0, 0,
- 10, 10, 11, 17, 17, 21, 0, 11,
- 10, 24, 24, 25, 25, 10, 27, 27,
- 21, 29, 29
+ 10, 10, 11, 16, 19, 0, 11, 20,
+ 22, 22, 20, 10, 25, 25, 10, 19
};
static const int deserialize_text_start = 1;
@@ -509,12 +460,12 @@ _hb_buffer_deserialize_text (hb_buffer_t *buffer,
hb_glyph_info_t info = {0};
hb_glyph_position_t pos = {0};
-#line 513 "hb-buffer-deserialize-text.hh"
+#line 457 "hb-buffer-deserialize-text.hh"
{
cs = deserialize_text_start;
}
-#line 518 "hb-buffer-deserialize-text.hh"
+#line 460 "hb-buffer-deserialize-text.hh"
{
int _slen;
int _trans;
@@ -542,11 +493,11 @@ _resume:
case 1:
#line 38 "hb-buffer-deserialize-text.rl"
{
- memset (&info, 0, sizeof (info));
- memset (&pos , 0, sizeof (pos ));
+ hb_memset (&info, 0, sizeof (info));
+ hb_memset (&pos , 0, sizeof (pos ));
}
break;
- case 3:
+ case 4:
#line 51 "hb-buffer-deserialize-text.rl"
{
tok = p;
@@ -560,7 +511,7 @@ _resume:
#line 56 "hb-buffer-deserialize-text.rl"
{ if (unlikely (!buffer->ensure_unicode ())) return false; }
break;
- case 20:
+ case 18:
#line 58 "hb-buffer-deserialize-text.rl"
{
/* TODO Unescape delimiters. */
@@ -574,7 +525,7 @@ _resume:
#line 66 "hb-buffer-deserialize-text.rl"
{if (!parse_hex (tok, p, &info.codepoint )) return false; }
break;
- case 23:
+ case 24:
#line 68 "hb-buffer-deserialize-text.rl"
{ if (!parse_uint (tok, p, &info.cluster )) return false; }
break;
@@ -586,26 +537,26 @@ _resume:
#line 70 "hb-buffer-deserialize-text.rl"
{ if (!parse_int (tok, p, &pos.y_offset )) return false; }
break;
- case 22:
+ case 21:
#line 71 "hb-buffer-deserialize-text.rl"
{ if (!parse_int (tok, p, &pos.x_advance)) return false; }
break;
- case 28:
+ case 23:
#line 72 "hb-buffer-deserialize-text.rl"
{ if (!parse_int (tok, p, &pos.y_advance)) return false; }
break;
- case 16:
+ case 15:
#line 38 "hb-buffer-deserialize-text.rl"
{
- memset (&info, 0, sizeof (info));
- memset (&pos , 0, sizeof (pos ));
+ hb_memset (&info, 0, sizeof (info));
+ hb_memset (&pos , 0, sizeof (pos ));
}
#line 51 "hb-buffer-deserialize-text.rl"
{
tok = p;
}
break;
- case 4:
+ case 3:
#line 51 "hb-buffer-deserialize-text.rl"
{
tok = p;
@@ -621,7 +572,7 @@ _resume:
#line 56 "hb-buffer-deserialize-text.rl"
{ if (unlikely (!buffer->ensure_unicode ())) return false; }
break;
- case 17:
+ case 16:
#line 58 "hb-buffer-deserialize-text.rl"
{
/* TODO Unescape delimiters. */
@@ -639,18 +590,6 @@ _resume:
*end_ptr = p;
}
break;
- case 19:
-#line 58 "hb-buffer-deserialize-text.rl"
- {
- /* TODO Unescape delimiters. */
- if (!hb_font_glyph_from_string (font,
- tok, p - tok,
- &info.codepoint))
- return false;
-}
-#line 55 "hb-buffer-deserialize-text.rl"
- { if (unlikely (!buffer->ensure_glyphs ())) return false; }
- break;
case 7:
#line 66 "hb-buffer-deserialize-text.rl"
{if (!parse_hex (tok, p, &info.codepoint )) return false; }
@@ -687,7 +626,7 @@ _resume:
*end_ptr = p;
}
break;
- case 21:
+ case 20:
#line 71 "hb-buffer-deserialize-text.rl"
{ if (!parse_int (tok, p, &pos.x_advance)) return false; }
#line 43 "hb-buffer-deserialize-text.rl"
@@ -699,7 +638,7 @@ _resume:
*end_ptr = p;
}
break;
- case 27:
+ case 22:
#line 72 "hb-buffer-deserialize-text.rl"
{ if (!parse_int (tok, p, &pos.y_advance)) return false; }
#line 43 "hb-buffer-deserialize-text.rl"
@@ -711,7 +650,7 @@ _resume:
*end_ptr = p;
}
break;
- case 24:
+ case 19:
#line 73 "hb-buffer-deserialize-text.rl"
{ if (!parse_uint (tok, p, &info.mask )) return false; }
#line 43 "hb-buffer-deserialize-text.rl"
@@ -726,8 +665,8 @@ _resume:
case 12:
#line 38 "hb-buffer-deserialize-text.rl"
{
- memset (&info, 0, sizeof (info));
- memset (&pos , 0, sizeof (pos ));
+ hb_memset (&info, 0, sizeof (info));
+ hb_memset (&pos , 0, sizeof (pos ));
}
#line 51 "hb-buffer-deserialize-text.rl"
{
@@ -736,11 +675,11 @@ _resume:
#line 55 "hb-buffer-deserialize-text.rl"
{ if (unlikely (!buffer->ensure_glyphs ())) return false; }
break;
- case 15:
+ case 14:
#line 38 "hb-buffer-deserialize-text.rl"
{
- memset (&info, 0, sizeof (info));
- memset (&pos , 0, sizeof (pos ));
+ hb_memset (&info, 0, sizeof (info));
+ hb_memset (&pos , 0, sizeof (pos ));
}
#line 51 "hb-buffer-deserialize-text.rl"
{
@@ -755,7 +694,7 @@ _resume:
return false;
}
break;
- case 18:
+ case 17:
#line 58 "hb-buffer-deserialize-text.rl"
{
/* TODO Unescape delimiters. */
@@ -775,31 +714,11 @@ _resume:
*end_ptr = p;
}
break;
- case 29:
-#line 58 "hb-buffer-deserialize-text.rl"
- {
- /* TODO Unescape delimiters. */
- if (!hb_font_glyph_from_string (font,
- tok, p - tok,
- &info.codepoint))
- return false;
-}
-#line 73 "hb-buffer-deserialize-text.rl"
- { if (!parse_uint (tok, p, &info.mask )) return false; }
-#line 43 "hb-buffer-deserialize-text.rl"
- {
- buffer->add_info (info);
- if (unlikely (!buffer->successful))
- return false;
- buffer->pos[buffer->len - 1] = pos;
- *end_ptr = p;
-}
- break;
case 11:
#line 38 "hb-buffer-deserialize-text.rl"
{
- memset (&info, 0, sizeof (info));
- memset (&pos , 0, sizeof (pos ));
+ hb_memset (&info, 0, sizeof (info));
+ hb_memset (&pos , 0, sizeof (pos ));
}
#line 51 "hb-buffer-deserialize-text.rl"
{
@@ -822,54 +741,11 @@ _resume:
*end_ptr = p;
}
break;
- case 14:
-#line 38 "hb-buffer-deserialize-text.rl"
- {
- memset (&info, 0, sizeof (info));
- memset (&pos , 0, sizeof (pos ));
-}
-#line 51 "hb-buffer-deserialize-text.rl"
- {
- tok = p;
-}
-#line 58 "hb-buffer-deserialize-text.rl"
- {
- /* TODO Unescape delimiters. */
- if (!hb_font_glyph_from_string (font,
- tok, p - tok,
- &info.codepoint))
- return false;
-}
-#line 55 "hb-buffer-deserialize-text.rl"
- { if (unlikely (!buffer->ensure_glyphs ())) return false; }
- break;
- case 30:
-#line 58 "hb-buffer-deserialize-text.rl"
- {
- /* TODO Unescape delimiters. */
- if (!hb_font_glyph_from_string (font,
- tok, p - tok,
- &info.codepoint))
- return false;
-}
-#line 73 "hb-buffer-deserialize-text.rl"
- { if (!parse_uint (tok, p, &info.mask )) return false; }
-#line 55 "hb-buffer-deserialize-text.rl"
- { if (unlikely (!buffer->ensure_glyphs ())) return false; }
-#line 43 "hb-buffer-deserialize-text.rl"
- {
- buffer->add_info (info);
- if (unlikely (!buffer->successful))
- return false;
- buffer->pos[buffer->len - 1] = pos;
- *end_ptr = p;
-}
- break;
case 13:
#line 38 "hb-buffer-deserialize-text.rl"
{
- memset (&info, 0, sizeof (info));
- memset (&pos , 0, sizeof (pos ));
+ hb_memset (&info, 0, sizeof (info));
+ hb_memset (&pos , 0, sizeof (pos ));
}
#line 51 "hb-buffer-deserialize-text.rl"
{
@@ -894,7 +770,7 @@ _resume:
*end_ptr = p;
}
break;
-#line 898 "hb-buffer-deserialize-text.hh"
+#line 715 "hb-buffer-deserialize-text.hh"
}
_again:
@@ -906,7 +782,7 @@ _again:
if ( p == eof )
{
switch ( _deserialize_text_eof_actions[cs] ) {
- case 17:
+ case 16:
#line 58 "hb-buffer-deserialize-text.rl"
{
/* TODO Unescape delimiters. */
@@ -960,7 +836,7 @@ _again:
*end_ptr = p;
}
break;
- case 21:
+ case 20:
#line 71 "hb-buffer-deserialize-text.rl"
{ if (!parse_int (tok, p, &pos.x_advance)) return false; }
#line 43 "hb-buffer-deserialize-text.rl"
@@ -972,7 +848,7 @@ _again:
*end_ptr = p;
}
break;
- case 27:
+ case 22:
#line 72 "hb-buffer-deserialize-text.rl"
{ if (!parse_int (tok, p, &pos.y_advance)) return false; }
#line 43 "hb-buffer-deserialize-text.rl"
@@ -984,27 +860,7 @@ _again:
*end_ptr = p;
}
break;
- case 24:
-#line 73 "hb-buffer-deserialize-text.rl"
- { if (!parse_uint (tok, p, &info.mask )) return false; }
-#line 43 "hb-buffer-deserialize-text.rl"
- {
- buffer->add_info (info);
- if (unlikely (!buffer->successful))
- return false;
- buffer->pos[buffer->len - 1] = pos;
- *end_ptr = p;
-}
- break;
- case 29:
-#line 58 "hb-buffer-deserialize-text.rl"
- {
- /* TODO Unescape delimiters. */
- if (!hb_font_glyph_from_string (font,
- tok, p - tok,
- &info.codepoint))
- return false;
-}
+ case 19:
#line 73 "hb-buffer-deserialize-text.rl"
{ if (!parse_uint (tok, p, &info.mask )) return false; }
#line 43 "hb-buffer-deserialize-text.rl"
@@ -1019,8 +875,8 @@ _again:
case 11:
#line 38 "hb-buffer-deserialize-text.rl"
{
- memset (&info, 0, sizeof (info));
- memset (&pos , 0, sizeof (pos ));
+ hb_memset (&info, 0, sizeof (info));
+ hb_memset (&pos , 0, sizeof (pos ));
}
#line 51 "hb-buffer-deserialize-text.rl"
{
@@ -1043,7 +899,7 @@ _again:
*end_ptr = p;
}
break;
-#line 1047 "hb-buffer-deserialize-text.hh"
+#line 825 "hb-buffer-deserialize-text.hh"
}
}
diff --git a/thirdparty/harfbuzz/src/hb-buffer-serialize.cc b/thirdparty/harfbuzz/src/hb-buffer-serialize.cc
index d1e1775430..a458f2318f 100644
--- a/thirdparty/harfbuzz/src/hb-buffer-serialize.cc
+++ b/thirdparty/harfbuzz/src/hb-buffer-serialize.cc
@@ -183,7 +183,7 @@ _hb_buffer_serialize_glyphs_json (hb_buffer_t *buffer,
unsigned int l = p - b;
if (buf_size > l)
{
- memcpy (buf, b, l);
+ hb_memcpy (buf, b, l);
buf += l;
buf_size -= l;
*buf_consumed += l;
@@ -241,7 +241,7 @@ _hb_buffer_serialize_unicode_json (hb_buffer_t *buffer,
unsigned int l = p - b;
if (buf_size > l)
{
- memcpy (buf, b, l);
+ hb_memcpy (buf, b, l);
buf += l;
buf_size -= l;
*buf_consumed += l;
@@ -329,7 +329,7 @@ _hb_buffer_serialize_glyphs_text (hb_buffer_t *buffer,
unsigned int l = p - b;
if (buf_size > l)
{
- memcpy (buf, b, l);
+ hb_memcpy (buf, b, l);
buf += l;
buf_size -= l;
*buf_consumed += l;
@@ -381,7 +381,7 @@ _hb_buffer_serialize_unicode_text (hb_buffer_t *buffer,
unsigned int l = p - b;
if (buf_size > l)
{
- memcpy (buf, b, l);
+ hb_memcpy (buf, b, l);
buf += l;
buf_size -= l;
*buf_consumed += l;
diff --git a/thirdparty/harfbuzz/src/hb-buffer-verify.cc b/thirdparty/harfbuzz/src/hb-buffer-verify.cc
index 5453e1ca94..1cd52b39b1 100644
--- a/thirdparty/harfbuzz/src/hb-buffer-verify.cc
+++ b/thirdparty/harfbuzz/src/hb-buffer-verify.cc
@@ -186,7 +186,7 @@ buffer_verify_unsafe_to_break (hb_buffer_t *buffer,
bool ret = true;
hb_buffer_diff_flags_t diff = hb_buffer_diff (reconstruction, buffer, (hb_codepoint_t) -1, 0);
- if (diff)
+ if (diff & ~HB_BUFFER_DIFF_FLAG_GLYPH_FLAGS_MISMATCH)
{
buffer_verify_error (buffer, font, BUFFER_VERIFY_ERROR "unsafe-to-break test failed.");
ret = false;
@@ -313,7 +313,6 @@ buffer_verify_unsafe_to_concat (hb_buffer_t *buffer,
bool ret = true;
hb_buffer_diff_flags_t diff;
-
/*
* Shape the two fragment streams.
*/
@@ -382,7 +381,7 @@ buffer_verify_unsafe_to_concat (hb_buffer_t *buffer,
* Diff results.
*/
diff = hb_buffer_diff (reconstruction, buffer, (hb_codepoint_t) -1, 0);
- if (diff)
+ if (diff & ~HB_BUFFER_DIFF_FLAG_GLYPH_FLAGS_MISMATCH)
{
buffer_verify_error (buffer, font, BUFFER_VERIFY_ERROR "unsafe-to-concat test failed.");
ret = false;
diff --git a/thirdparty/harfbuzz/src/hb-buffer.cc b/thirdparty/harfbuzz/src/hb-buffer.cc
index 57a5ae03ed..4b6c2d9eaa 100644
--- a/thirdparty/harfbuzz/src/hb-buffer.cc
+++ b/thirdparty/harfbuzz/src/hb-buffer.cc
@@ -172,12 +172,13 @@ hb_buffer_t::enlarge (unsigned int size)
while (size >= new_allocated)
new_allocated += (new_allocated >> 1) + 32;
- static_assert ((sizeof (info[0]) == sizeof (pos[0])), "");
- if (unlikely (hb_unsigned_mul_overflows (new_allocated, sizeof (info[0]))))
+ unsigned new_bytes;
+ if (unlikely (hb_unsigned_mul_overflows (new_allocated, sizeof (info[0]), &new_bytes)))
goto done;
- new_pos = (hb_glyph_position_t *) hb_realloc (pos, new_allocated * sizeof (pos[0]));
- new_info = (hb_glyph_info_t *) hb_realloc (info, new_allocated * sizeof (info[0]));
+ static_assert (sizeof (info[0]) == sizeof (pos[0]), "");
+ new_pos = (hb_glyph_position_t *) hb_realloc (pos, new_bytes);
+ new_info = (hb_glyph_info_t *) hb_realloc (info, new_bytes);
done:
if (unlikely (!new_pos || !new_info))
@@ -208,7 +209,7 @@ hb_buffer_t::make_room_for (unsigned int num_in,
assert (have_output);
out_info = (hb_glyph_info_t *) pos;
- memcpy (out_info, info, out_len * sizeof (out_info[0]));
+ hb_memcpy (out_info, info, out_len * sizeof (out_info[0]));
}
return true;
@@ -229,7 +230,7 @@ hb_buffer_t::shift_forward (unsigned int count)
* Ideally, we should at least set Default_Ignorable bits on
* these, as well as consistent cluster values. But the former
* is layering violation... */
- memset (info + len, 0, (idx + count - len) * sizeof (info[0]));
+ hb_memset (info + len, 0, (idx + count - len) * sizeof (info[0]));
}
len += count;
idx += count;
@@ -298,8 +299,8 @@ hb_buffer_t::clear ()
out_len = 0;
out_info = info;
- memset (context, 0, sizeof context);
- memset (context_len, 0, sizeof context_len);
+ hb_memset (context, 0, sizeof context);
+ hb_memset (context_len, 0, sizeof context_len);
deallocate_var_all ();
serial = 0;
@@ -313,15 +314,14 @@ hb_buffer_t::enter ()
serial = 0;
shaping_failed = false;
scratch_flags = HB_BUFFER_SCRATCH_FLAG_DEFAULT;
- if (likely (!hb_unsigned_mul_overflows (len, HB_BUFFER_MAX_LEN_FACTOR)))
+ unsigned mul;
+ if (likely (!hb_unsigned_mul_overflows (len, HB_BUFFER_MAX_LEN_FACTOR, &mul)))
{
- max_len = hb_max (len * HB_BUFFER_MAX_LEN_FACTOR,
- (unsigned) HB_BUFFER_MAX_LEN_MIN);
+ max_len = hb_max (mul, (unsigned) HB_BUFFER_MAX_LEN_MIN);
}
- if (likely (!hb_unsigned_mul_overflows (len, HB_BUFFER_MAX_OPS_FACTOR)))
+ if (likely (!hb_unsigned_mul_overflows (len, HB_BUFFER_MAX_OPS_FACTOR, &mul)))
{
- max_ops = hb_max (len * HB_BUFFER_MAX_OPS_FACTOR,
- (unsigned) HB_BUFFER_MAX_OPS_MIN);
+ max_ops = hb_max (mul, (unsigned) HB_BUFFER_MAX_OPS_MIN);
}
}
void
@@ -345,7 +345,7 @@ hb_buffer_t::add (hb_codepoint_t codepoint,
glyph = &info[len];
- memset (glyph, 0, sizeof (*glyph));
+ hb_memset (glyph, 0, sizeof (*glyph));
glyph->codepoint = codepoint;
glyph->mask = 0;
glyph->cluster = cluster;
@@ -606,6 +606,53 @@ done:
}
void
+hb_buffer_t::delete_glyphs_inplace (bool (*filter) (const hb_glyph_info_t *info))
+{
+ /* Merge clusters and delete filtered glyphs.
+ * NOTE! We can't use out-buffer as we have positioning data. */
+ unsigned int j = 0;
+ unsigned int count = len;
+ for (unsigned int i = 0; i < count; i++)
+ {
+ if (filter (&info[i]))
+ {
+ /* Merge clusters.
+ * Same logic as delete_glyph(), but for in-place removal. */
+
+ unsigned int cluster = info[i].cluster;
+ if (i + 1 < count && cluster == info[i + 1].cluster)
+ continue; /* Cluster survives; do nothing. */
+
+ if (j)
+ {
+ /* Merge cluster backward. */
+ if (cluster < info[j - 1].cluster)
+ {
+ unsigned int mask = info[i].mask;
+ unsigned int old_cluster = info[j - 1].cluster;
+ for (unsigned k = j; k && info[k - 1].cluster == old_cluster; k--)
+ set_cluster (info[k - 1], cluster, mask);
+ }
+ continue;
+ }
+
+ if (i + 1 < count)
+ merge_clusters (i, i + 2); /* Merge cluster forward. */
+
+ continue;
+ }
+
+ if (j != i)
+ {
+ info[j] = info[i];
+ pos[j] = pos[i];
+ }
+ j++;
+ }
+ len = j;
+}
+
+void
hb_buffer_t::guess_segment_properties ()
{
assert_unicode ();
@@ -933,7 +980,6 @@ hb_buffer_get_unicode_funcs (const hb_buffer_t *buffer)
void
hb_buffer_set_direction (hb_buffer_t *buffer,
hb_direction_t direction)
-
{
if (unlikely (hb_object_is_immutable (buffer)))
return;
@@ -1385,9 +1431,9 @@ hb_buffer_set_length (hb_buffer_t *buffer,
/* Wipe the new space */
if (length > buffer->len) {
- memset (buffer->info + buffer->len, 0, sizeof (buffer->info[0]) * (length - buffer->len));
+ hb_memset (buffer->info + buffer->len, 0, sizeof (buffer->info[0]) * (length - buffer->len));
if (buffer->have_positions)
- memset (buffer->pos + buffer->len, 0, sizeof (buffer->pos[0]) * (length - buffer->len));
+ hb_memset (buffer->pos + buffer->len, 0, sizeof (buffer->pos[0]) * (length - buffer->len));
}
buffer->len = length;
@@ -1795,7 +1841,9 @@ hb_buffer_add_latin1 (hb_buffer_t *buffer,
* marks at stat of run.
*
* This function does not check the validity of @text, it is up to the caller
- * to ensure it contains a valid Unicode code points.
+ * to ensure it contains a valid Unicode scalar values. In contrast,
+ * hb_buffer_add_utf32() can be used that takes similar input but performs
+ * sanity-check on the input.
*
* Since: 0.9.31
**/
@@ -1858,9 +1906,9 @@ hb_buffer_append (hb_buffer_t *buffer,
hb_segment_properties_overlay (&buffer->props, &source->props);
- memcpy (buffer->info + orig_len, source->info + start, (end - start) * sizeof (buffer->info[0]));
+ hb_memcpy (buffer->info + orig_len, source->info + start, (end - start) * sizeof (buffer->info[0]));
if (buffer->have_positions)
- memcpy (buffer->pos + orig_len, source->pos + start, (end - start) * sizeof (buffer->pos[0]));
+ hb_memcpy (buffer->pos + orig_len, source->pos + start, (end - start) * sizeof (buffer->pos[0]));
if (source->content_type == HB_BUFFER_CONTENT_TYPE_UNICODE)
{
@@ -2048,7 +2096,7 @@ hb_buffer_diff (hb_buffer_t *buffer,
result |= HB_BUFFER_DIFF_FLAG_CODEPOINT_MISMATCH;
if (buf_info->cluster != ref_info->cluster)
result |= HB_BUFFER_DIFF_FLAG_CLUSTER_MISMATCH;
- if ((buf_info->mask & ~ref_info->mask & HB_GLYPH_FLAG_DEFINED))
+ if ((buf_info->mask ^ ref_info->mask) & HB_GLYPH_FLAG_DEFINED)
result |= HB_BUFFER_DIFF_FLAG_GLYPH_FLAGS_MISMATCH;
if (contains && ref_info->codepoint == dottedcircle_glyph)
result |= HB_BUFFER_DIFF_FLAG_DOTTED_CIRCLE_PRESENT;
@@ -2103,6 +2151,13 @@ hb_buffer_set_message_func (hb_buffer_t *buffer,
hb_buffer_message_func_t func,
void *user_data, hb_destroy_func_t destroy)
{
+ if (unlikely (hb_object_is_immutable (buffer)))
+ {
+ if (destroy)
+ destroy (user_data);
+ return;
+ }
+
if (buffer->message_destroy)
buffer->message_destroy (buffer->message_data);
diff --git a/thirdparty/harfbuzz/src/hb-buffer.hh b/thirdparty/harfbuzz/src/hb-buffer.hh
index 26c3f0fac8..bb1efe9dd3 100644
--- a/thirdparty/harfbuzz/src/hb-buffer.hh
+++ b/thirdparty/harfbuzz/src/hb-buffer.hh
@@ -32,6 +32,7 @@
#include "hb.hh"
#include "hb-unicode.hh"
+#include "hb-set-digest.hh"
#ifndef HB_BUFFER_MAX_LEN_FACTOR
@@ -207,6 +208,14 @@ struct hb_buffer_t
hb_glyph_info_t &prev () { return out_info[out_len ? out_len - 1 : 0]; }
hb_glyph_info_t prev () const { return out_info[out_len ? out_len - 1 : 0]; }
+ hb_set_digest_t digest () const
+ {
+ hb_set_digest_t d;
+ d.init ();
+ d.add_array (&info[0].codepoint, len, sizeof (info[0]));
+ return d;
+ }
+
HB_INTERNAL void similar (const hb_buffer_t &src);
HB_INTERNAL void reset ();
HB_INTERNAL void clear ();
@@ -402,6 +411,8 @@ struct hb_buffer_t
HB_INTERNAL void merge_out_clusters (unsigned int start, unsigned int end);
/* Merge clusters for deleting current glyph, and skip it. */
HB_INTERNAL void delete_glyph ();
+ HB_INTERNAL void delete_glyphs_inplace (bool (*filter) (const hb_glyph_info_t *info));
+
/* Adds glyph flags in mask to infos with clusters between start and end.
diff --git a/thirdparty/harfbuzz/src/hb-cache.hh b/thirdparty/harfbuzz/src/hb-cache.hh
index 897f313fbd..f8c8108f1f 100644
--- a/thirdparty/harfbuzz/src/hb-cache.hh
+++ b/thirdparty/harfbuzz/src/hb-cache.hh
@@ -46,7 +46,7 @@ struct hb_cache_t
>::type;
static_assert ((key_bits >= cache_bits), "");
- static_assert ((key_bits + value_bits - cache_bits <= 8 * sizeof (item_t)), "");
+ static_assert ((key_bits + value_bits <= cache_bits + 8 * sizeof (item_t)), "");
void init () { clear (); }
void fini () {}
diff --git a/thirdparty/harfbuzz/src/hb-cff-interp-common.hh b/thirdparty/harfbuzz/src/hb-cff-interp-common.hh
index 5c2cb060a4..49805a89c5 100644
--- a/thirdparty/harfbuzz/src/hb-cff-interp-common.hh
+++ b/thirdparty/harfbuzz/src/hb-cff-interp-common.hh
@@ -284,65 +284,56 @@ struct UnsizedByteStr : UnsizedArrayOf <HBUINT8>
/* A byte string associated with the current offset and an error condition */
struct byte_str_ref_t
{
- byte_str_ref_t () { init (); }
-
- void init ()
- {
- str = hb_ubytes_t ();
- offset = 0;
- error = false;
- }
-
- void fini () {}
+ byte_str_ref_t ()
+ : str () {}
byte_str_ref_t (const hb_ubytes_t &str_, unsigned int offset_ = 0)
- : str (str_), offset (offset_), error (false) {}
+ : str (str_) { set_offset (offset_); }
void reset (const hb_ubytes_t &str_, unsigned int offset_ = 0)
{
str = str_;
- offset = offset_;
- error = false;
+ set_offset (offset_);
}
const unsigned char& operator [] (int i) {
- if (unlikely ((unsigned int) (offset + i) >= str.length))
+ if (unlikely ((unsigned int) (get_offset () + i) >= str.length))
{
set_error ();
return Null (unsigned char);
}
- return str[offset + i];
+ return str.arrayZ[get_offset () + i];
}
+ unsigned char head_unchecked () const { return str.arrayZ[get_offset ()]; }
+
/* Conversion to hb_ubytes_t */
- operator hb_ubytes_t () const { return str.sub_array (offset, str.length - offset); }
+ operator hb_ubytes_t () const { return str.sub_array (get_offset ()); }
hb_ubytes_t sub_array (unsigned int offset_, unsigned int len_) const
{ return str.sub_array (offset_, len_); }
bool avail (unsigned int count=1) const
- { return (!in_error () && offset + count <= str.length); }
+ { return get_offset () + count <= str.length; }
void inc (unsigned int count=1)
{
- if (likely (!in_error () && (offset <= str.length) && (offset + count <= str.length)))
- {
- offset += count;
- }
- else
- {
- offset = str.length;
- set_error ();
- }
+ /* Automatically puts us in error if count is out-of-range. */
+ set_offset (get_offset () + count);
}
- void set_error () { error = true; }
- bool in_error () const { return error; }
+ /* We (ab)use ubytes backwards_length as a cursor (called offset),
+ * as well as to store error condition. */
- hb_ubytes_t str;
- unsigned int offset; /* beginning of the sub-string within str */
+ unsigned get_offset () const { return str.backwards_length; }
+ void set_offset (unsigned offset) { str.backwards_length = offset; }
+
+ void set_error () { str.backwards_length = str.length + 1; }
+ bool in_error () const { return str.backwards_length > str.length; }
+
+ unsigned total_size () const { return str.length; }
protected:
- bool error;
+ hb_ubytes_t str;
};
using byte_str_array_t = hb_vector_t<hb_ubytes_t>;
@@ -491,8 +482,15 @@ struct arg_stack_t : cff_stack_t<ARG, 513>
/* an operator prefixed by its operands in a byte string */
struct op_str_t
{
- hb_ubytes_t str;
+ /* This used to have a hb_ubytes_t. Using a pointer and length
+ * in a particular order, saves 8 bytes in this struct and more
+ * in our parsed_cs_op_t subclass. */
+
+ const unsigned char *ptr = nullptr;
+
op_code_t op;
+
+ uint8_t length = 0;
};
/* base of OP_SERIALIZER */
@@ -503,9 +501,11 @@ struct op_serializer_t
{
TRACE_SERIALIZE (this);
- HBUINT8 *d = c->allocate_size<HBUINT8> (opstr.str.length);
+ unsigned char *d = c->allocate_size<unsigned char> (opstr.length);
if (unlikely (!d)) return_trace (false);
- memcpy (d, &opstr.str[0], opstr.str.length);
+ /* Faster than hb_memcpy for small strings. */
+ for (unsigned i = 0; i < opstr.length; i++)
+ d[i] = opstr.ptr[i];
return_trace (true);
}
};
@@ -529,16 +529,20 @@ struct parsed_values_t
{
VAL *val = values.push ();
val->op = op;
- val->str = str_ref.str.sub_array (opStart, str_ref.offset - opStart);
- opStart = str_ref.offset;
+ auto arr = str_ref.sub_array (opStart, str_ref.get_offset () - opStart);
+ val->ptr = arr.arrayZ;
+ val->length = arr.length;
+ opStart = str_ref.get_offset ();
}
void add_op (op_code_t op, const byte_str_ref_t& str_ref, const VAL &v)
{
VAL *val = values.push (v);
val->op = op;
- val->str = str_ref.sub_array ( opStart, str_ref.offset - opStart);
- opStart = str_ref.offset;
+ auto arr = str_ref.sub_array (opStart, str_ref.get_offset () - opStart);
+ val->ptr = arr.arrayZ;
+ val->length = arr.length;
+ opStart = str_ref.get_offset ();
}
bool has_op (op_code_t op) const
@@ -549,8 +553,7 @@ struct parsed_values_t
}
unsigned get_count () const { return values.length; }
- const VAL &get_value (unsigned int i) const { return values[i]; }
- const VAL &operator [] (unsigned int i) const { return get_value (i); }
+ const VAL &operator [] (unsigned int i) const { return values[i]; }
unsigned int opStart;
hb_vector_t<VAL> values;
@@ -565,23 +568,23 @@ struct interp_env_t
str_ref.reset (str_);
}
bool in_error () const
- { return error || str_ref.in_error () || argStack.in_error (); }
+ { return str_ref.in_error () || argStack.in_error (); }
- void set_error () { error = true; }
+ void set_error () { str_ref.set_error (); }
op_code_t fetch_op ()
{
op_code_t op = OpCode_Invalid;
if (unlikely (!str_ref.avail ()))
return OpCode_Invalid;
- op = (op_code_t)(unsigned char)str_ref[0];
+ op = (op_code_t) str_ref.head_unchecked ();
+ str_ref.inc ();
if (op == OpCode_escape) {
if (unlikely (!str_ref.avail ()))
return OpCode_Invalid;
- op = Make_OpCode_ESC(str_ref[1]);
+ op = Make_OpCode_ESC (str_ref.head_unchecked ());
str_ref.inc ();
}
- str_ref.inc ();
return op;
}
@@ -596,8 +599,6 @@ struct interp_env_t
str_ref;
arg_stack_t<ARG>
argStack;
- protected:
- bool error = false;
};
using num_interp_env_t = interp_env_t<>;
diff --git a/thirdparty/harfbuzz/src/hb-cff2-interp-cs.hh b/thirdparty/harfbuzz/src/hb-cff2-interp-cs.hh
index d0b9e7b086..00c25800e6 100644
--- a/thirdparty/harfbuzz/src/hb-cff2-interp-cs.hh
+++ b/thirdparty/harfbuzz/src/hb-cff2-interp-cs.hh
@@ -40,13 +40,15 @@ struct blend_arg_t : number_t
void set_real (double v) { reset_blends (); number_t::set_real (v); }
void set_blends (unsigned int numValues_, unsigned int valueIndex_,
- unsigned int numBlends, hb_array_t<const blend_arg_t> blends_)
+ hb_array_t<const blend_arg_t> blends_)
{
numValues = numValues_;
valueIndex = valueIndex_;
- deltas.resize (numBlends);
+ unsigned numBlends = blends_.length;
+ if (unlikely (!deltas.resize (numBlends)))
+ return;
for (unsigned int i = 0; i < numBlends; i++)
- deltas[i] = blends_[i];
+ deltas.arrayZ[i] = blends_.arrayZ[i];
}
bool blending () const { return deltas.length > 0; }
@@ -61,7 +63,6 @@ struct blend_arg_t : number_t
hb_vector_t<number_t> deltas;
};
-typedef interp_env_t<blend_arg_t> BlendInterpEnv;
typedef biased_subrs_t<CFF2Subrs> cff2_biased_subrs_t;
template <typename ELEM>
@@ -154,8 +155,9 @@ struct cff2_cs_interp_env_t : cs_interp_env_t<ELEM, CFF2Subrs>
{
if (likely (scalars.length == deltas.length))
{
- for (unsigned int i = 0; i < scalars.length; i++)
- v += (double) scalars[i] * deltas[i].to_real ();
+ unsigned count = scalars.length;
+ for (unsigned i = 0; i < count; i++)
+ v += (double) scalars.arrayZ[i] * deltas.arrayZ[i].to_real ();
}
}
return v;
@@ -220,7 +222,7 @@ struct cff2_cs_opset_t : cs_opset_t<ELEM, OPSET, cff2_cs_interp_env_t<ELEM>, PAR
const hb_array_t<const ELEM> blends,
unsigned n, unsigned i)
{
- arg.set_blends (n, i, blends.length, blends);
+ arg.set_blends (n, i, blends);
}
template <typename T = ELEM,
hb_enable_if (!hb_is_same (T, blend_arg_t))>
diff --git a/thirdparty/harfbuzz/src/hb-common.cc b/thirdparty/harfbuzz/src/hb-common.cc
index bbb6cd552b..e9f9cfeb5f 100644
--- a/thirdparty/harfbuzz/src/hb-common.cc
+++ b/thirdparty/harfbuzz/src/hb-common.cc
@@ -285,7 +285,7 @@ struct hb_language_item_t {
lang = (hb_language_t) hb_malloc(len);
if (likely (lang))
{
- memcpy((unsigned char *) lang, s, len);
+ hb_memcpy((unsigned char *) lang, s, len);
for (unsigned char *p = (unsigned char *) lang; *p; p++)
*p = canon_map[*p];
}
@@ -379,7 +379,7 @@ hb_language_from_string (const char *str, int len)
/* NUL-terminate it. */
char strbuf[64];
len = hb_min (len, (int) sizeof (strbuf) - 1);
- memcpy (strbuf, str, len);
+ hb_memcpy (strbuf, str, len);
strbuf[len] = '\0';
item = lang_find_or_insert (strbuf);
}
@@ -976,7 +976,7 @@ hb_feature_from_string (const char *str, int len,
}
if (feature)
- memset (feature, 0, sizeof (*feature));
+ hb_memset (feature, 0, sizeof (*feature));
return false;
}
@@ -1025,7 +1025,7 @@ hb_feature_to_string (hb_feature_t *feature,
}
assert (len < ARRAY_LENGTH (s));
len = hb_min (len, size - 1);
- memcpy (buf, s, len);
+ hb_memcpy (buf, s, len);
buf[len] = '\0';
}
@@ -1088,7 +1088,7 @@ hb_variation_from_string (const char *str, int len,
}
if (variation)
- memset (variation, 0, sizeof (*variation));
+ hb_memset (variation, 0, sizeof (*variation));
return false;
}
@@ -1136,7 +1136,7 @@ get_C_locale ()
/**
* hb_variation_to_string:
* @variation: an #hb_variation_t to convert
- * @buf: (array length=size) (out): output string
+ * @buf: (array length=size) (out caller-allocates): output string
* @size: the allocated size of @buf
*
* Converts an #hb_variation_t into a `NULL`-terminated string in the format
@@ -1166,7 +1166,7 @@ hb_variation_to_string (hb_variation_t *variation,
assert (len < ARRAY_LENGTH (s));
len = hb_min (len, size - 1);
- memcpy (buf, s, len);
+ hb_memcpy (buf, s, len);
buf[len] = '\0';
}
diff --git a/thirdparty/harfbuzz/src/hb-config.hh b/thirdparty/harfbuzz/src/hb-config.hh
index 5567ddaec3..98b1e9d0cf 100644
--- a/thirdparty/harfbuzz/src/hb-config.hh
+++ b/thirdparty/harfbuzz/src/hb-config.hh
@@ -35,8 +35,9 @@
#include "config.h"
#endif
-#ifndef HB_BORING_EXPANSION
-#define HB_NO_BORING_EXPANSION
+#ifndef HB_EXPERIMENTAL_API
+#define HB_NO_BEYOND_64K
+#define HB_NO_VAR_COMPOSITES
#endif
#ifdef HB_TINY
@@ -71,6 +72,7 @@
#define HB_NO_LANGUAGE_PRIVATE_SUBTAG
#define HB_NO_LAYOUT_FEATURE_PARAMS
#define HB_NO_LAYOUT_COLLECT_GLYPHS
+#define HB_NO_LAYOUT_RARELY_USED
#define HB_NO_LAYOUT_UNUSED
#define HB_NO_MATH
#define HB_NO_META
@@ -83,6 +85,7 @@
#define HB_NO_OT_SHAPE_FRACTIONS
#define HB_NO_STYLE
#define HB_NO_SUBSET_LAYOUT
+#define HB_NO_VERTICAL
#define HB_NO_VAR
#endif
@@ -103,7 +106,7 @@
#ifdef HB_NO_BORING_EXPANSION
#define HB_NO_BEYOND_64K
-#define HB_NO_VARIATIONS2
+#define HB_NO_AVAR2
#endif
#ifdef HB_DISABLE_DEPRECATED
@@ -112,6 +115,11 @@
#define HB_IF_NOT_DEPRECATED(x) x
#endif
+#ifdef HB_NO_SHAPER
+#define HB_NO_OT_SHAPE
+#define HB_NO_AAT_SHAPE
+#endif
+
#ifdef HB_NO_AAT
#define HB_NO_OT_NAME_LANGUAGE_AAT
#define HB_NO_AAT_SHAPE
@@ -158,6 +166,7 @@
#define HB_NO_OT_SHAPER_HEBREW_FALLBACK
#define HB_NO_OT_SHAPER_THAI_FALLBACK
#define HB_NO_OT_SHAPER_VOWEL_CONSTRAINTS
+#define HB_NO_OT_SHAPER_MYANMAR_ZAWGYI
#endif
#ifdef NDEBUG
diff --git a/thirdparty/harfbuzz/src/hb-coretext.cc b/thirdparty/harfbuzz/src/hb-coretext.cc
index 99b33c001e..4267e0e13e 100644
--- a/thirdparty/harfbuzz/src/hb-coretext.cc
+++ b/thirdparty/harfbuzz/src/hb-coretext.cc
@@ -347,10 +347,13 @@ _hb_coretext_shaper_font_data_create (hb_font_t *font)
hb_ot_var_axis_info_t info;
unsigned int c = 1;
hb_ot_var_get_axis_infos (font->face, i, &c, &info);
- CFDictionarySetValue (variations,
- CFNumberCreate (kCFAllocatorDefault, kCFNumberIntType, &info.tag),
- CFNumberCreate (kCFAllocatorDefault, kCFNumberFloatType, &font->design_coords[i])
- );
+ float v = hb_clamp (font->design_coords[i], info.min_value, info.max_value);
+
+ CFNumberRef tag_number = CFNumberCreate (kCFAllocatorDefault, kCFNumberIntType, &info.tag);
+ CFNumberRef value_number = CFNumberCreate (kCFAllocatorDefault, kCFNumberFloatType, &v);
+ CFDictionarySetValue (variations, tag_number, value_number);
+ CFRelease (tag_number);
+ CFRelease (value_number);
}
CFDictionaryRef attributes =
@@ -648,7 +651,7 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan,
} else {
active_feature_t *feature = active_features.lsearch (event->feature);
if (feature)
- active_features.remove (feature - active_features.arrayZ);
+ active_features.remove_ordered (feature - active_features.arrayZ);
}
}
}
diff --git a/thirdparty/harfbuzz/src/hb-cplusplus.hh b/thirdparty/harfbuzz/src/hb-cplusplus.hh
index c4d9d29e23..a210ab7960 100644
--- a/thirdparty/harfbuzz/src/hb-cplusplus.hh
+++ b/thirdparty/harfbuzz/src/hb-cplusplus.hh
@@ -69,9 +69,9 @@ struct shared_ptr
operator T * () const { return p; }
T& operator * () const { return *get (); }
T* operator -> () const { return get (); }
- operator bool () { return p; }
- bool operator == (const shared_ptr &o) { return p == o.p; }
- bool operator != (const shared_ptr &o) { return p != o.p; }
+ operator bool () const { return p; }
+ bool operator == (const shared_ptr &o) const { return p == o.p; }
+ bool operator != (const shared_ptr &o) const { return p != o.p; }
static T* get_empty() { return v::get_empty (); }
T* reference() { return v::reference (p); }
diff --git a/thirdparty/harfbuzz/src/hb-draw.cc b/thirdparty/harfbuzz/src/hb-draw.cc
index 46797e64e6..72c203f24f 100644
--- a/thirdparty/harfbuzz/src/hb-draw.cc
+++ b/thirdparty/harfbuzz/src/hb-draw.cc
@@ -80,6 +80,56 @@ hb_draw_close_path_nil (hb_draw_funcs_t *dfuncs HB_UNUSED, void *draw_data HB_UN
void *user_data HB_UNUSED) {}
+static bool
+_hb_draw_funcs_set_preamble (hb_draw_funcs_t *dfuncs,
+ bool func_is_null,
+ void **user_data,
+ hb_destroy_func_t *destroy)
+{
+ if (hb_object_is_immutable (dfuncs))
+ {
+ if (*destroy)
+ (*destroy) (*user_data);
+ return false;
+ }
+
+ if (func_is_null)
+ {
+ if (*destroy)
+ (*destroy) (*user_data);
+ *destroy = nullptr;
+ *user_data = nullptr;
+ }
+
+ return true;
+}
+
+static bool
+_hb_draw_funcs_set_middle (hb_draw_funcs_t *dfuncs,
+ void *user_data,
+ hb_destroy_func_t destroy)
+{
+ if (user_data && !dfuncs->user_data)
+ {
+ dfuncs->user_data = (decltype (dfuncs->user_data)) hb_calloc (1, sizeof (*dfuncs->user_data));
+ if (unlikely (!dfuncs->user_data))
+ goto fail;
+ }
+ if (destroy && !dfuncs->destroy)
+ {
+ dfuncs->destroy = (decltype (dfuncs->destroy)) hb_calloc (1, sizeof (*dfuncs->destroy));
+ if (unlikely (!dfuncs->destroy))
+ goto fail;
+ }
+
+ return true;
+
+fail:
+ if (destroy)
+ (destroy) (user_data);
+ return false;
+}
+
#define HB_DRAW_FUNC_IMPLEMENT(name) \
\
void \
@@ -88,43 +138,24 @@ hb_draw_funcs_set_##name##_func (hb_draw_funcs_t *dfuncs, \
void *user_data, \
hb_destroy_func_t destroy) \
{ \
- if (hb_object_is_immutable (dfuncs)) \
- return; \
+ if (!_hb_draw_funcs_set_preamble (dfuncs, !func, &user_data, &destroy))\
+ return; \
\
if (dfuncs->destroy && dfuncs->destroy->name) \
dfuncs->destroy->name (!dfuncs->user_data ? nullptr : dfuncs->user_data->name); \
\
- if (user_data && !dfuncs->user_data) \
- { \
- dfuncs->user_data = (decltype (dfuncs->user_data)) hb_calloc (1, sizeof (*dfuncs->user_data)); \
- if (unlikely (!dfuncs->user_data)) \
- goto fail; \
- } \
- if (destroy && !dfuncs->destroy) \
- { \
- dfuncs->destroy = (decltype (dfuncs->destroy)) hb_calloc (1, sizeof (*dfuncs->destroy)); \
- if (unlikely (!dfuncs->destroy)) \
- goto fail; \
- } \
+ if (!_hb_draw_funcs_set_middle (dfuncs, user_data, destroy)) \
+ return; \
\
- if (func) { \
+ if (func) \
dfuncs->func.name = func; \
- if (dfuncs->user_data) \
- dfuncs->user_data->name = user_data; \
- if (dfuncs->destroy) \
- dfuncs->destroy->name = destroy; \
- } else { \
+ else \
dfuncs->func.name = hb_draw_##name##_nil; \
- if (dfuncs->user_data) \
- dfuncs->user_data->name = nullptr; \
- if (dfuncs->destroy) \
- dfuncs->destroy->name = nullptr; \
- } \
- return; \
- \
-fail: \
- if (destroy) \
- destroy (user_data); \
+ \
+ if (dfuncs->user_data) \
+ dfuncs->user_data->name = user_data; \
+ if (dfuncs->destroy) \
+ dfuncs->destroy->name = destroy; \
}
HB_DRAW_FUNCS_IMPLEMENT_CALLBACKS
diff --git a/thirdparty/harfbuzz/src/hb-face.cc b/thirdparty/harfbuzz/src/hb-face.cc
index 2160d6a010..8b4b635c7a 100644
--- a/thirdparty/harfbuzz/src/hb-face.cc
+++ b/thirdparty/harfbuzz/src/hb-face.cc
@@ -288,6 +288,7 @@ hb_face_destroy (hb_face_t *face)
{
if (!hb_object_destroy (face)) return;
+#ifndef HB_NO_SHAPER
for (hb_face_t::plan_node_t *node = face->shape_plans; node; )
{
hb_face_t::plan_node_t *next = node->next;
@@ -295,6 +296,7 @@ hb_face_destroy (hb_face_t *face)
hb_free (node);
node = next;
}
+#endif
face->data.fini ();
face->table.fini ();
@@ -633,20 +635,29 @@ hb_face_collect_variation_unicodes (hb_face_t *face,
* face-builder: A face that has add_table().
*/
+struct face_table_info_t
+{
+ hb_blob_t* data;
+ signed order;
+};
+
struct hb_face_builder_data_t
{
- hb_hashmap_t<hb_tag_t, hb_blob_t *> tables;
+ hb_hashmap_t<hb_tag_t, face_table_info_t> tables;
};
static int compare_entries (const void* pa, const void* pb)
{
- const auto& a = * (const hb_pair_t<hb_tag_t, hb_blob_t*> *) pa;
- const auto& b = * (const hb_pair_t<hb_tag_t, hb_blob_t*> *) pb;
+ const auto& a = * (const hb_pair_t<hb_tag_t, face_table_info_t> *) pa;
+ const auto& b = * (const hb_pair_t<hb_tag_t, face_table_info_t> *) pb;
/* Order by blob size first (smallest to largest) and then table tag */
- if (a.second->length != b.second->length)
- return a.second->length < b.second->length ? -1 : +1;
+ if (a.second.order != b.second.order)
+ return a.second.order < b.second.order ? -1 : +1;
+
+ if (a.second.data->length != b.second.data->length)
+ return a.second.data->length < b.second.data->length ? -1 : +1;
return a.first < b.first ? -1 : a.first == b.first ? 0 : +1;
}
@@ -668,8 +679,8 @@ _hb_face_builder_data_destroy (void *user_data)
{
hb_face_builder_data_t *data = (hb_face_builder_data_t *) user_data;
- for (hb_blob_t* b : data->tables.values())
- hb_blob_destroy (b);
+ for (auto info : data->tables.values())
+ hb_blob_destroy (info.data);
data->tables.fini ();
@@ -683,8 +694,8 @@ _hb_face_builder_data_reference_blob (hb_face_builder_data_t *data)
unsigned int table_count = data->tables.get_population ();
unsigned int face_length = table_count * 16 + 12;
- for (hb_blob_t* b : data->tables.values())
- face_length += hb_ceil_to_4 (hb_blob_get_length (b));
+ for (auto info : data->tables.values())
+ face_length += hb_ceil_to_4 (hb_blob_get_length (info.data));
char *buf = (char *) hb_malloc (face_length);
if (unlikely (!buf))
@@ -699,7 +710,7 @@ _hb_face_builder_data_reference_blob (hb_face_builder_data_t *data)
hb_tag_t sfnt_tag = is_cff ? OT::OpenTypeFontFile::CFFTag : OT::OpenTypeFontFile::TrueTypeTag;
// Sort the tags so that produced face is deterministic.
- hb_vector_t<hb_pair_t <hb_tag_t, hb_blob_t*>> sorted_entries;
+ hb_vector_t<hb_pair_t <hb_tag_t, face_table_info_t>> sorted_entries;
data->tables.iter () | hb_sink (sorted_entries);
if (unlikely (sorted_entries.in_error ()))
{
@@ -708,7 +719,13 @@ _hb_face_builder_data_reference_blob (hb_face_builder_data_t *data)
}
sorted_entries.qsort (compare_entries);
- bool ret = f->serialize_single (&c, sfnt_tag, + sorted_entries.iter());
+
+ bool ret = f->serialize_single (&c,
+ sfnt_tag,
+ + sorted_entries.iter()
+ | hb_map ([&] (hb_pair_t<hb_tag_t, face_table_info_t> _) {
+ return hb_pair_t<hb_tag_t, hb_blob_t*> (_.first, _.second.data);
+ }));
c.end_serialize ();
@@ -729,7 +746,7 @@ _hb_face_builder_reference_table (hb_face_t *face HB_UNUSED, hb_tag_t tag, void
if (!tag)
return _hb_face_builder_data_reference_blob (data);
- return hb_blob_reference (data->tables[tag]);
+ return hb_blob_reference (data->tables[tag].data);
}
@@ -769,16 +786,16 @@ hb_face_builder_create ()
hb_bool_t
hb_face_builder_add_table (hb_face_t *face, hb_tag_t tag, hb_blob_t *blob)
{
- if (tag == HB_MAP_VALUE_INVALID)
+ if (unlikely (face->destroy != (hb_destroy_func_t) _hb_face_builder_data_destroy))
return false;
- if (unlikely (face->destroy != (hb_destroy_func_t) _hb_face_builder_data_destroy))
+ if (tag == HB_MAP_VALUE_INVALID)
return false;
hb_face_builder_data_t *data = (hb_face_builder_data_t *) face->user_data;
- hb_blob_t* previous = data->tables.get (tag);
- if (!data->tables.set (tag, hb_blob_reference (blob)))
+ hb_blob_t* previous = data->tables.get (tag).data;
+ if (!data->tables.set (tag, face_table_info_t {hb_blob_reference (blob), -1}))
{
hb_blob_destroy (blob);
return false;
@@ -787,3 +804,39 @@ hb_face_builder_add_table (hb_face_t *face, hb_tag_t tag, hb_blob_t *blob)
hb_blob_destroy (previous);
return true;
}
+
+/**
+ * hb_face_builder_sort_tables:
+ * @face: A face object created with hb_face_builder_create()
+ * @tags: (array zero-terminated=1): ordered list of table tags terminated by
+ * %HB_TAG_NONE
+ *
+ * Set the ordering of tables for serialization. Any tables not
+ * specified in the tags list will be ordered after the tables in
+ * tags, ordered by the default sort ordering.
+ *
+ * Since: 5.3.0
+ **/
+void
+hb_face_builder_sort_tables (hb_face_t *face,
+ const hb_tag_t *tags)
+{
+ if (unlikely (face->destroy != (hb_destroy_func_t) _hb_face_builder_data_destroy))
+ return;
+
+ hb_face_builder_data_t *data = (hb_face_builder_data_t *) face->user_data;
+
+ // Sort all unspecified tables after any specified tables.
+ for (auto& info : data->tables.values_ref())
+ info.order = (unsigned) -1;
+
+ signed order = 0;
+ for (const hb_tag_t* tag = tags;
+ *tag;
+ tag++)
+ {
+ face_table_info_t* info;
+ if (!data->tables.has (*tag, &info)) continue;
+ info->order = order++;
+ }
+}
diff --git a/thirdparty/harfbuzz/src/hb-face.h b/thirdparty/harfbuzz/src/hb-face.h
index 6ef2f8b886..38e7104af6 100644
--- a/thirdparty/harfbuzz/src/hb-face.h
+++ b/thirdparty/harfbuzz/src/hb-face.h
@@ -171,6 +171,10 @@ hb_face_builder_add_table (hb_face_t *face,
hb_tag_t tag,
hb_blob_t *blob);
+HB_EXTERN void
+hb_face_builder_sort_tables (hb_face_t *face,
+ const hb_tag_t *tags);
+
HB_END_DECLS
diff --git a/thirdparty/harfbuzz/src/hb-face.hh b/thirdparty/harfbuzz/src/hb-face.hh
index 12e10d01e0..1bf0606e52 100644
--- a/thirdparty/harfbuzz/src/hb-face.hh
+++ b/thirdparty/harfbuzz/src/hb-face.hh
@@ -65,7 +65,9 @@ struct hb_face_t
hb_shape_plan_t *shape_plan;
plan_node_t *next;
};
+#ifndef HB_NO_SHAPER
hb_atomic_ptr_t<plan_node_t> shape_plans;
+#endif
hb_blob_t *reference_table (hb_tag_t tag) const
{
diff --git a/thirdparty/harfbuzz/src/hb-fallback-shape.cc b/thirdparty/harfbuzz/src/hb-fallback-shape.cc
index f8524ecc8e..c54ad8764b 100644
--- a/thirdparty/harfbuzz/src/hb-fallback-shape.cc
+++ b/thirdparty/harfbuzz/src/hb-fallback-shape.cc
@@ -75,16 +75,6 @@ _hb_fallback_shape (hb_shape_plan_t *shape_plan HB_UNUSED,
const hb_feature_t *features HB_UNUSED,
unsigned int num_features HB_UNUSED)
{
- /* TODO
- *
- * - Apply fallback kern.
- * - Handle Variation Selectors?
- * - Apply normalization?
- *
- * This will make the fallback shaper into a dumb "TrueType"
- * shaper which many people unfortunately still request.
- */
-
hb_codepoint_t space;
bool has_space = (bool) font->get_nominal_glyph (' ', &space);
diff --git a/thirdparty/harfbuzz/src/hb-font.cc b/thirdparty/harfbuzz/src/hb-font.cc
index 856bbdda32..0ce3e2608a 100644
--- a/thirdparty/harfbuzz/src/hb-font.cc
+++ b/thirdparty/harfbuzz/src/hb-font.cc
@@ -71,7 +71,7 @@ hb_font_get_font_h_extents_nil (hb_font_t *font HB_UNUSED,
hb_font_extents_t *extents,
void *user_data HB_UNUSED)
{
- memset (extents, 0, sizeof (*extents));
+ hb_memset (extents, 0, sizeof (*extents));
return false;
}
@@ -96,7 +96,7 @@ hb_font_get_font_v_extents_nil (hb_font_t *font HB_UNUSED,
hb_font_extents_t *extents,
void *user_data HB_UNUSED)
{
- memset (extents, 0, sizeof (*extents));
+ hb_memset (extents, 0, sizeof (*extents));
return false;
}
@@ -409,7 +409,7 @@ hb_font_get_glyph_extents_nil (hb_font_t *font HB_UNUSED,
hb_glyph_extents_t *extents,
void *user_data HB_UNUSED)
{
- memset (extents, 0, sizeof (*extents));
+ hb_memset (extents, 0, sizeof (*extents));
return false;
}
@@ -518,6 +518,7 @@ typedef struct hb_font_get_glyph_shape_default_adaptor_t {
void *draw_data;
float x_scale;
float y_scale;
+ float slant;
} hb_font_get_glyph_shape_default_adaptor_t;
static void
@@ -530,9 +531,10 @@ hb_draw_move_to_default (hb_draw_funcs_t *dfuncs HB_UNUSED,
hb_font_get_glyph_shape_default_adaptor_t *adaptor = (hb_font_get_glyph_shape_default_adaptor_t *) draw_data;
float x_scale = adaptor->x_scale;
float y_scale = adaptor->y_scale;
+ float slant = adaptor->slant;
adaptor->draw_funcs->emit_move_to (adaptor->draw_data, *st,
- x_scale * to_x, y_scale * to_y);
+ x_scale * to_x + slant * to_y, y_scale * to_y);
}
static void
@@ -544,12 +546,13 @@ hb_draw_line_to_default (hb_draw_funcs_t *dfuncs HB_UNUSED, void *draw_data,
hb_font_get_glyph_shape_default_adaptor_t *adaptor = (hb_font_get_glyph_shape_default_adaptor_t *) draw_data;
float x_scale = adaptor->x_scale;
float y_scale = adaptor->y_scale;
+ float slant = adaptor->slant;
- st->current_x *= x_scale;
- st->current_y *= y_scale;
+ st->current_x = st->current_x * x_scale + st->current_y * slant;
+ st->current_y = st->current_y * y_scale;
adaptor->draw_funcs->emit_line_to (adaptor->draw_data, *st,
- x_scale * to_x, y_scale * to_y);
+ x_scale * to_x + slant * to_y, y_scale * to_y);
}
static void
@@ -562,13 +565,14 @@ hb_draw_quadratic_to_default (hb_draw_funcs_t *dfuncs HB_UNUSED, void *draw_data
hb_font_get_glyph_shape_default_adaptor_t *adaptor = (hb_font_get_glyph_shape_default_adaptor_t *) draw_data;
float x_scale = adaptor->x_scale;
float y_scale = adaptor->y_scale;
+ float slant = adaptor->slant;
- st->current_x *= x_scale;
- st->current_y *= y_scale;
+ st->current_x = st->current_x * x_scale + st->current_y * slant;
+ st->current_y = st->current_y * y_scale;
adaptor->draw_funcs->emit_quadratic_to (adaptor->draw_data, *st,
- x_scale * control_x, y_scale * control_y,
- x_scale * to_x, y_scale * to_y);
+ x_scale * control_x + slant * control_y, y_scale * control_y,
+ x_scale * to_x + slant * to_y, y_scale * to_y);
}
static void
@@ -582,14 +586,15 @@ hb_draw_cubic_to_default (hb_draw_funcs_t *dfuncs HB_UNUSED, void *draw_data,
hb_font_get_glyph_shape_default_adaptor_t *adaptor = (hb_font_get_glyph_shape_default_adaptor_t *) draw_data;
float x_scale = adaptor->x_scale;
float y_scale = adaptor->y_scale;
+ float slant = adaptor->slant;
- st->current_x *= x_scale;
- st->current_y *= y_scale;
+ st->current_x = st->current_x * x_scale + st->current_y * slant;
+ st->current_y = st->current_y * y_scale;
adaptor->draw_funcs->emit_cubic_to (adaptor->draw_data, *st,
- x_scale * control1_x, y_scale * control1_y,
- x_scale * control2_x, y_scale * control2_y,
- x_scale * to_x, y_scale * to_y);
+ x_scale * control1_x + slant * control1_y, y_scale * control1_y,
+ x_scale * control2_x + slant * control2_y, y_scale * control2_y,
+ x_scale * to_x + slant * to_y, y_scale * to_y);
}
static void
@@ -623,8 +628,10 @@ hb_font_get_glyph_shape_default (hb_font_t *font,
hb_font_get_glyph_shape_default_adaptor_t adaptor = {
draw_funcs,
draw_data,
- (float) font->x_scale / (float) font->parent->x_scale,
- (float) font->y_scale / (float) font->parent->y_scale
+ font->parent->x_scale ? (float) font->x_scale / (float) font->parent->x_scale : 0.f,
+ font->parent->y_scale ? (float) font->y_scale / (float) font->parent->y_scale : 0.f,
+ font->parent->y_scale ? (font->slant - font->parent->slant) *
+ (float) font->x_scale / (float) font->parent->y_scale : 0.f
};
font->parent->get_glyph_shape (glyph,
@@ -822,6 +829,56 @@ hb_font_funcs_is_immutable (hb_font_funcs_t *ffuncs)
}
+static bool
+_hb_font_funcs_set_preamble (hb_font_funcs_t *ffuncs,
+ bool func_is_null,
+ void **user_data,
+ hb_destroy_func_t *destroy)
+{
+ if (hb_object_is_immutable (ffuncs))
+ {
+ if (*destroy)
+ (*destroy) (*user_data);
+ return false;
+ }
+
+ if (func_is_null)
+ {
+ if (*destroy)
+ (*destroy) (*user_data);
+ *destroy = nullptr;
+ *user_data = nullptr;
+ }
+
+ return true;
+}
+
+static bool
+_hb_font_funcs_set_middle (hb_font_funcs_t *ffuncs,
+ void *user_data,
+ hb_destroy_func_t destroy)
+{
+ if (user_data && !ffuncs->user_data)
+ {
+ ffuncs->user_data = (decltype (ffuncs->user_data)) hb_calloc (1, sizeof (*ffuncs->user_data));
+ if (unlikely (!ffuncs->user_data))
+ goto fail;
+ }
+ if (destroy && !ffuncs->destroy)
+ {
+ ffuncs->destroy = (decltype (ffuncs->destroy)) hb_calloc (1, sizeof (*ffuncs->destroy));
+ if (unlikely (!ffuncs->destroy))
+ goto fail;
+ }
+
+ return true;
+
+fail:
+ if (destroy)
+ (destroy) (user_data);
+ return false;
+}
+
#define HB_FONT_FUNC_IMPLEMENT(name) \
\
void \
@@ -830,51 +887,24 @@ hb_font_funcs_set_##name##_func (hb_font_funcs_t *ffuncs, \
void *user_data, \
hb_destroy_func_t destroy) \
{ \
- if (hb_object_is_immutable (ffuncs)) \
- goto fail; \
- \
- if (!func) \
- { \
- if (destroy) \
- destroy (user_data); \
- destroy = nullptr; \
- user_data = nullptr; \
- } \
+ if (!_hb_font_funcs_set_preamble (ffuncs, !func, &user_data, &destroy))\
+ return; \
\
if (ffuncs->destroy && ffuncs->destroy->name) \
ffuncs->destroy->name (!ffuncs->user_data ? nullptr : ffuncs->user_data->name); \
\
- if (user_data && !ffuncs->user_data) \
- { \
- ffuncs->user_data = (decltype (ffuncs->user_data)) hb_calloc (1, sizeof (*ffuncs->user_data)); \
- if (unlikely (!ffuncs->user_data)) \
- goto fail; \
- } \
- if (destroy && !ffuncs->destroy) \
- { \
- ffuncs->destroy = (decltype (ffuncs->destroy)) hb_calloc (1, sizeof (*ffuncs->destroy)); \
- if (unlikely (!ffuncs->destroy)) \
- goto fail; \
- } \
+ if (!_hb_font_funcs_set_middle (ffuncs, user_data, destroy)) \
+ return; \
\
- if (func) { \
+ if (func) \
ffuncs->get.f.name = func; \
- if (ffuncs->user_data) \
- ffuncs->user_data->name = user_data; \
- if (ffuncs->destroy) \
- ffuncs->destroy->name = destroy; \
- } else { \
+ else \
ffuncs->get.f.name = hb_font_get_##name##_default; \
- if (ffuncs->user_data) \
- ffuncs->user_data->name = nullptr; \
- if (ffuncs->destroy) \
- ffuncs->destroy->name = nullptr; \
- } \
- return; \
- \
-fail: \
- if (destroy) \
- destroy (user_data); \
+ \
+ if (ffuncs->user_data) \
+ ffuncs->user_data->name = user_data; \
+ if (ffuncs->destroy) \
+ ffuncs->destroy->name = destroy; \
}
HB_FONT_FUNCS_IMPLEMENT_CALLBACKS
@@ -1323,7 +1353,7 @@ hb_font_get_glyph_from_name (hb_font_t *font,
* @draw_data: User data to pass to draw callbacks
*
* Fetches the glyph shape that corresponds to a glyph in the specified @font.
- * The shape is returned by way of calls to the callsbacks of the @dfuncs
+ * The shape is returned by way of calls to the callbacks of the @dfuncs
* objects, with @draw_data passed to them.
*
* Since: 4.0.0
@@ -1780,8 +1810,8 @@ hb_font_create_sub_font (hb_font_t *parent)
float *design_coords = (float *) hb_calloc (num_coords, sizeof (parent->design_coords[0]));
if (likely (coords && design_coords))
{
- memcpy (coords, parent->coords, num_coords * sizeof (parent->coords[0]));
- memcpy (design_coords, parent->design_coords, num_coords * sizeof (parent->design_coords[0]));
+ hb_memcpy (coords, parent->coords, num_coords * sizeof (parent->coords[0]));
+ hb_memcpy (design_coords, parent->design_coords, num_coords * sizeof (parent->design_coords[0]));
_hb_font_adopt_var_coords (font, coords, design_coords, num_coords);
}
else
@@ -2443,7 +2473,7 @@ hb_font_set_var_coords_design (hb_font_t *font,
}
if (coords_length)
- memcpy (design_coords, coords, coords_length * sizeof (font->design_coords[0]));
+ hb_memcpy (design_coords, coords, coords_length * sizeof (font->design_coords[0]));
hb_ot_var_normalize_coords (font->face, coords_length, coords, normalized);
_hb_font_adopt_var_coords (font, normalized, design_coords, coords_length);
@@ -2519,8 +2549,8 @@ hb_font_set_var_coords_normalized (hb_font_t *font,
if (coords_length)
{
- memcpy (copy, coords, coords_length * sizeof (coords[0]));
- memcpy (unmapped, coords, coords_length * sizeof (coords[0]));
+ hb_memcpy (copy, coords, coords_length * sizeof (coords[0]));
+ hb_memcpy (unmapped, coords, coords_length * sizeof (coords[0]));
}
/* Best effort design coords simulation */
diff --git a/thirdparty/harfbuzz/src/hb-font.hh b/thirdparty/harfbuzz/src/hb-font.hh
index bb402e23eb..6942d99c70 100644
--- a/thirdparty/harfbuzz/src/hb-font.hh
+++ b/thirdparty/harfbuzz/src/hb-font.hh
@@ -206,14 +206,14 @@ struct hb_font_t
hb_bool_t get_font_h_extents (hb_font_extents_t *extents)
{
- memset (extents, 0, sizeof (*extents));
+ hb_memset (extents, 0, sizeof (*extents));
return klass->get.f.font_h_extents (this, user_data,
extents,
!klass->user_data ? nullptr : klass->user_data->font_h_extents);
}
hb_bool_t get_font_v_extents (hb_font_extents_t *extents)
{
- memset (extents, 0, sizeof (*extents));
+ hb_memset (extents, 0, sizeof (*extents));
return klass->get.f.font_v_extents (this, user_data,
extents,
!klass->user_data ? nullptr : klass->user_data->font_v_extents);
@@ -342,7 +342,7 @@ struct hb_font_t
hb_bool_t get_glyph_extents (hb_codepoint_t glyph,
hb_glyph_extents_t *extents)
{
- memset (extents, 0, sizeof (*extents));
+ hb_memset (extents, 0, sizeof (*extents));
return klass->get.f.glyph_extents (this, user_data,
glyph,
extents,
diff --git a/thirdparty/harfbuzz/src/hb-ft.cc b/thirdparty/harfbuzz/src/hb-ft.cc
index bcc1dd080f..3892dedc13 100644
--- a/thirdparty/harfbuzz/src/hb-ft.cc
+++ b/thirdparty/harfbuzz/src/hb-ft.cc
@@ -89,7 +89,7 @@ struct hb_ft_font_t
bool unref; /* Whether to destroy ft_face when done. */
bool transform; /* Whether to apply FT_Face's transform. */
- mutable hb_mutex_t lock;
+ mutable hb_mutex_t lock; /* Protects members below. */
FT_Face ft_face;
mutable unsigned cached_serial;
mutable hb_ft_advance_cache_t advance_cache;
@@ -732,16 +732,18 @@ hb_ft_get_font_h_extents (hb_font_t *font HB_UNUSED,
static int
_hb_ft_move_to (const FT_Vector *to,
- hb_draw_session_t *drawing)
+ void *arg)
{
+ hb_draw_session_t *drawing = (hb_draw_session_t *) arg;
drawing->move_to (to->x, to->y);
return FT_Err_Ok;
}
static int
_hb_ft_line_to (const FT_Vector *to,
- hb_draw_session_t *drawing)
+ void *arg)
{
+ hb_draw_session_t *drawing = (hb_draw_session_t *) arg;
drawing->line_to (to->x, to->y);
return FT_Err_Ok;
}
@@ -749,8 +751,9 @@ _hb_ft_line_to (const FT_Vector *to,
static int
_hb_ft_conic_to (const FT_Vector *control,
const FT_Vector *to,
- hb_draw_session_t *drawing)
+ void *arg)
{
+ hb_draw_session_t *drawing = (hb_draw_session_t *) arg;
drawing->quadratic_to (control->x, control->y,
to->x, to->y);
return FT_Err_Ok;
@@ -760,8 +763,9 @@ static int
_hb_ft_cubic_to (const FT_Vector *control1,
const FT_Vector *control2,
const FT_Vector *to,
- hb_draw_session_t *drawing)
+ void *arg)
{
+ hb_draw_session_t *drawing = (hb_draw_session_t *) arg;
drawing->cubic_to (control1->x, control1->y,
control2->x, control2->y,
to->x, to->y);
@@ -787,10 +791,10 @@ hb_ft_get_glyph_shape (hb_font_t *font HB_UNUSED,
return;
const FT_Outline_Funcs outline_funcs = {
- (FT_Outline_MoveToFunc) _hb_ft_move_to,
- (FT_Outline_LineToFunc) _hb_ft_line_to,
- (FT_Outline_ConicToFunc) _hb_ft_conic_to,
- (FT_Outline_CubicToFunc) _hb_ft_cubic_to,
+ _hb_ft_move_to,
+ _hb_ft_line_to,
+ _hb_ft_conic_to,
+ _hb_ft_cubic_to,
0, /* shift */
0, /* delta */
};
@@ -975,8 +979,9 @@ hb_ft_face_create_referenced (FT_Face ft_face)
}
static void
-hb_ft_face_finalize (FT_Face ft_face)
+hb_ft_face_finalize (void *arg)
{
+ FT_Face ft_face = (FT_Face) arg;
hb_face_destroy ((hb_face_t *) ft_face->generic.data);
}
@@ -1008,7 +1013,7 @@ hb_ft_face_create_cached (FT_Face ft_face)
ft_face->generic.finalizer (ft_face);
ft_face->generic.data = hb_ft_face_create (ft_face, nullptr);
- ft_face->generic.finalizer = (FT_Generic_Finalizer) hb_ft_face_finalize;
+ ft_face->generic.finalizer = hb_ft_face_finalize;
}
return hb_face_reference ((hb_face_t *) ft_face->generic.data);
@@ -1217,8 +1222,9 @@ get_ft_library ()
}
static void
-_release_blob (FT_Face ft_face)
+_release_blob (void *arg)
{
+ FT_Face ft_face = (FT_Face) arg;
hb_blob_destroy ((hb_blob_t *) ft_face->generic.data);
}
@@ -1271,7 +1277,7 @@ hb_ft_font_set_funcs (hb_font_t *font)
ft_face->generic.data = blob;
- ft_face->generic.finalizer = (FT_Generic_Finalizer) _release_blob;
+ ft_face->generic.finalizer = _release_blob;
_hb_ft_font_set_funcs (font, ft_face, true);
hb_ft_font_set_load_flags (font, FT_LOAD_DEFAULT | FT_LOAD_NO_HINTING);
diff --git a/thirdparty/harfbuzz/src/hb-glib.cc b/thirdparty/harfbuzz/src/hb-glib.cc
index 8ddc7ebad8..1da81696e7 100644
--- a/thirdparty/harfbuzz/src/hb-glib.cc
+++ b/thirdparty/harfbuzz/src/hb-glib.cc
@@ -129,32 +129,9 @@ hb_glib_unicode_compose (hb_unicode_funcs_t *ufuncs HB_UNUSED,
{
#if GLIB_CHECK_VERSION(2,29,12)
return g_unichar_compose (a, b, ab);
+#else
+ return false;
#endif
-
- /* We don't ifdef-out the fallback code such that compiler always
- * sees it and makes sure it's compilable. */
-
- gchar utf8[12];
- gchar *normalized;
- int len;
- hb_bool_t ret;
-
- len = g_unichar_to_utf8 (a, utf8);
- len += g_unichar_to_utf8 (b, utf8 + len);
- normalized = g_utf8_normalize (utf8, len, G_NORMALIZE_NFC);
- len = g_utf8_strlen (normalized, -1);
- if (unlikely (!len))
- return false;
-
- if (len == 1) {
- *ab = g_utf8_get_char (normalized);
- ret = true;
- } else {
- ret = false;
- }
-
- g_free (normalized);
- return ret;
}
static hb_bool_t
@@ -166,55 +143,9 @@ hb_glib_unicode_decompose (hb_unicode_funcs_t *ufuncs HB_UNUSED,
{
#if GLIB_CHECK_VERSION(2,29,12)
return g_unichar_decompose (ab, a, b);
+#else
+ return false;
#endif
-
- /* We don't ifdef-out the fallback code such that compiler always
- * sees it and makes sure it's compilable. */
-
- gchar utf8[6];
- gchar *normalized;
- int len;
- hb_bool_t ret;
-
- len = g_unichar_to_utf8 (ab, utf8);
- normalized = g_utf8_normalize (utf8, len, G_NORMALIZE_NFD);
- len = g_utf8_strlen (normalized, -1);
- if (unlikely (!len))
- return false;
-
- if (len == 1) {
- *a = g_utf8_get_char (normalized);
- *b = 0;
- ret = *a != ab;
- } else if (len == 2) {
- *a = g_utf8_get_char (normalized);
- *b = g_utf8_get_char (g_utf8_next_char (normalized));
- /* Here's the ugly part: if ab decomposes to a single character and
- * that character decomposes again, we have to detect that and undo
- * the second part :-(. */
- gchar *recomposed = g_utf8_normalize (normalized, -1, G_NORMALIZE_NFC);
- hb_codepoint_t c = g_utf8_get_char (recomposed);
- if (c != ab && c != *a) {
- *a = c;
- *b = 0;
- }
- g_free (recomposed);
- ret = true;
- } else {
- /* If decomposed to more than two characters, take the last one,
- * and recompose the rest to get the first component. */
- gchar *end = g_utf8_offset_to_pointer (normalized, len - 1);
- gchar *recomposed;
- *b = g_utf8_get_char (end);
- recomposed = g_utf8_normalize (normalized, end - normalized, G_NORMALIZE_NFC);
- /* We expect that recomposed has exactly one character now. */
- *a = g_utf8_get_char (recomposed);
- g_free (recomposed);
- ret = true;
- }
-
- g_free (normalized);
- return ret;
}
diff --git a/thirdparty/harfbuzz/src/hb-graphite2.cc b/thirdparty/harfbuzz/src/hb-graphite2.cc
index 4d0e687c75..9e068f8d84 100644
--- a/thirdparty/harfbuzz/src/hb-graphite2.cc
+++ b/thirdparty/harfbuzz/src/hb-graphite2.cc
@@ -318,7 +318,7 @@ _hb_graphite2_shape (hb_shape_plan_t *shape_plan HB_UNUSED,
#undef ALLOCATE_ARRAY
- memset (clusters, 0, sizeof (clusters[0]) * buffer->len);
+ hb_memset (clusters, 0, sizeof (clusters[0]) * buffer->len);
hb_codepoint_t *pg = gids;
clusters[0].cluster = buffer->info[0].cluster;
diff --git a/thirdparty/harfbuzz/src/hb-iter.hh b/thirdparty/harfbuzz/src/hb-iter.hh
index 1a3ab43de0..b57f37b132 100644
--- a/thirdparty/harfbuzz/src/hb-iter.hh
+++ b/thirdparty/harfbuzz/src/hb-iter.hh
@@ -73,8 +73,10 @@ struct hb_iter_t
/* Operators. */
iter_t iter () const { return *thiz(); }
iter_t operator + () const { return *thiz(); }
- iter_t begin () const { return *thiz(); }
- iter_t end () const { return thiz()->__end__ (); }
+ iter_t _begin () const { return *thiz(); }
+ iter_t begin () const { return _begin (); }
+ iter_t _end () const { return thiz()->__end__ (); }
+ iter_t end () const { return _end (); }
explicit operator bool () const { return thiz()->__more__ (); }
unsigned len () const { return thiz()->__len__ (); }
/* The following can only be enabled if item_t is reference type. Otherwise
@@ -118,7 +120,9 @@ struct hb_iter_t
#define HB_ITER_USING(Name) \
using item_t = typename Name::item_t; \
+ using Name::_begin; \
using Name::begin; \
+ using Name::_end; \
using Name::end; \
using Name::get_item_size; \
using Name::is_iterator; \
@@ -377,7 +381,7 @@ struct hb_map_iter_t :
void __forward__ (unsigned n) { it += n; }
void __prev__ () { --it; }
void __rewind__ (unsigned n) { it -= n; }
- hb_map_iter_t __end__ () const { return hb_map_iter_t (it.end (), f); }
+ hb_map_iter_t __end__ () const { return hb_map_iter_t (it._end (), f); }
bool operator != (const hb_map_iter_t& o) const
{ return it != o.it; }
@@ -440,7 +444,7 @@ struct hb_filter_iter_t :
bool __more__ () const { return bool (it); }
void __next__ () { do ++it; while (it && !hb_has (p.get (), hb_get (f.get (), *it))); }
void __prev__ () { do --it; while (it && !hb_has (p.get (), hb_get (f.get (), *it))); }
- hb_filter_iter_t __end__ () const { return hb_filter_iter_t (it.end (), p, f); }
+ hb_filter_iter_t __end__ () const { return hb_filter_iter_t (it._end (), p, f); }
bool operator != (const hb_filter_iter_t& o) const
{ return it != o.it; }
@@ -553,7 +557,7 @@ struct hb_zip_iter_t :
void __forward__ (unsigned n) { a += n; b += n; }
void __prev__ () { --a; --b; }
void __rewind__ (unsigned n) { a -= n; b -= n; }
- hb_zip_iter_t __end__ () const { return hb_zip_iter_t (a.end (), b.end ()); }
+ hb_zip_iter_t __end__ () const { return hb_zip_iter_t (a._end (), b._end ()); }
/* Note, we should stop if ANY of the iters reaches end. As such two compare
* unequal if both items are unequal, NOT if either is unequal. */
bool operator != (const hb_zip_iter_t& o) const
@@ -637,7 +641,7 @@ struct hb_concat_iter_t :
}
}
- hb_concat_iter_t __end__ () const { return hb_concat_iter_t (a.end (), b.end ()); }
+ hb_concat_iter_t __end__ () const { return hb_concat_iter_t (a._end (), b._end ()); }
bool operator != (const hb_concat_iter_t& o) const
{
return a != o.a
diff --git a/thirdparty/harfbuzz/src/hb-machinery.hh b/thirdparty/harfbuzz/src/hb-machinery.hh
index 2571f22e15..b555739cfb 100644
--- a/thirdparty/harfbuzz/src/hb-machinery.hh
+++ b/thirdparty/harfbuzz/src/hb-machinery.hh
@@ -136,6 +136,13 @@ static inline Type& StructAfter(TObject &X)
/*
* Lazy loaders.
+ *
+ * The lazy-loaders are thread-safe pointer-like objects that create their
+ * instead on-demand. They also support access to a "data" object that is
+ * necessary for creating their instance. The data object, if specified,
+ * is accessed via pointer math, located at a location before the position
+ * of the loader itself. This avoids having to store a pointer to data
+ * for every lazy-loader. Multiple lazy-loaders can access the same data.
*/
template <typename Data, unsigned int WheresData>
@@ -228,7 +235,8 @@ struct hb_lazy_loader_t : hb_data_wrapper_t<Data, WheresData>
bool cmpexch (Stored *current, Stored *value) const
{
- /* This *must* be called when there are no other threads accessing. */
+ /* This function can only be safely called directly if no
+ * other thread is accessing. */
return this->instance.cmpexch (current, value);
}
@@ -261,7 +269,7 @@ struct hb_lazy_loader_t : hb_data_wrapper_t<Data, WheresData>
hb_free (p);
}
-// private:
+ private:
/* Must only have one pointer. */
hb_atomic_ptr_t<Stored *> instance;
};
@@ -283,7 +291,7 @@ struct hb_table_lazy_loader_t : hb_lazy_loader_t<T,
{
auto c = hb_sanitize_context_t ();
if (core)
- c.set_num_glyphs (0); // So we don't recurse ad infinitum...
+ c.set_num_glyphs (0); // So we don't recurse ad infinitum, or doesn't need num_glyphs
return c.reference_table<T> (face);
}
static void destroy (hb_blob_t *p) { hb_blob_destroy (p); }
diff --git a/thirdparty/harfbuzz/src/hb-map.hh b/thirdparty/harfbuzz/src/hb-map.hh
index 8302e3f8c7..bfb1b3f768 100644
--- a/thirdparty/harfbuzz/src/hb-map.hh
+++ b/thirdparty/harfbuzz/src/hb-map.hh
@@ -43,9 +43,9 @@ struct hb_hashmap_t
hb_hashmap_t () { init (); }
~hb_hashmap_t () { fini (); }
- hb_hashmap_t (const hb_hashmap_t& o) : hb_hashmap_t () { resize (population); hb_copy (o, *this); }
+ hb_hashmap_t (const hb_hashmap_t& o) : hb_hashmap_t () { resize (o.population); hb_copy (o, *this); }
hb_hashmap_t (hb_hashmap_t&& o) : hb_hashmap_t () { hb_swap (*this, o); }
- hb_hashmap_t& operator= (const hb_hashmap_t& o) { resize (population); hb_copy (o, *this); return *this; }
+ hb_hashmap_t& operator= (const hb_hashmap_t& o) { reset (); resize (o.population); hb_copy (o, *this); return *this; }
hb_hashmap_t& operator= (hb_hashmap_t&& o) { hb_swap (*this, o); return *this; }
hb_hashmap_t (std::initializer_list<hb_pair_t<K, V>> lst) : hb_hashmap_t ()
@@ -71,6 +71,11 @@ struct hb_hashmap_t
uint32_t is_tombstone_ : 1;
V value;
+ item_t () : key (),
+ hash (0),
+ is_used_ (false), is_tombstone_ (false),
+ value () {}
+
bool is_used () const { return is_used_; }
void set_used (bool is_used) { is_used_ = is_used; }
bool is_tombstone () const { return is_tombstone_; }
@@ -88,17 +93,8 @@ struct hb_hashmap_t
return minus_1;
};
- void clear ()
- {
- new (std::addressof (key)) K ();
- new (std::addressof (value)) V ();
- hash = 0;
- is_used_ = false;
- is_tombstone_ = false;
- }
-
- bool operator == (const K &o) { return hb_deref (key) == hb_deref (o); }
- bool operator == (const item_t &o) { return *this == o.key; }
+ bool operator == (const K &o) const { return hb_deref (key) == hb_deref (o); }
+ bool operator == (const item_t &o) const { return *this == o.key; }
hb_pair_t<K, V> get_pair() const { return hb_pair_t<K, V> (key, value); }
hb_pair_t<const K &, const V &> get_pair_ref() const { return hb_pair_t<const K &, const V &> (key, value); }
@@ -107,8 +103,8 @@ struct hb_hashmap_t
};
hb_object_header_t header;
- bool successful; /* Allocations successful */
- unsigned int population; /* Not including tombstones. */
+ unsigned int successful : 1; /* Allocations successful */
+ unsigned int population : 31; /* Not including tombstones. */
unsigned int occupancy; /* Including tombstones. */
unsigned int mask;
unsigned int prime;
@@ -118,7 +114,10 @@ struct hb_hashmap_t
{
if (unlikely (!a.successful || !b.successful))
return;
- hb_swap (a.population, b.population);
+ unsigned tmp = a.population;
+ a.population = b.population;
+ b.population = tmp;
+ //hb_swap (a.population, b.population);
hb_swap (a.occupancy, b.occupancy);
hb_swap (a.mask, b.mask);
hb_swap (a.prime, b.prime);
@@ -160,7 +159,9 @@ struct hb_hashmap_t
{
if (unlikely (!successful)) return false;
- unsigned int power = hb_bit_storage (hb_max (population, new_population) * 2 + 8);
+ if (new_population != 0 && (new_population + new_population / 2) < mask) return true;
+
+ unsigned int power = hb_bit_storage (hb_max ((unsigned) population, new_population) * 2 + 8);
unsigned int new_size = 1u << power;
item_t *new_items = (item_t *) hb_malloc ((size_t) new_size * sizeof (item_t));
if (unlikely (!new_items))
@@ -169,9 +170,9 @@ struct hb_hashmap_t
return false;
}
for (auto &_ : hb_iter (new_items, new_size))
- _.clear ();
+ new (&_) item_t ();
- unsigned int old_size = mask + 1;
+ unsigned int old_size = size ();
item_t *old_items = items;
/* Switch to new, empty, array. */
@@ -181,47 +182,82 @@ struct hb_hashmap_t
items = new_items;
/* Insert back old items. */
- if (old_items)
- for (unsigned int i = 0; i < old_size; i++)
+ for (unsigned int i = 0; i < old_size; i++)
+ {
+ if (old_items[i].is_real ())
{
- if (old_items[i].is_real ())
- {
- set_with_hash (old_items[i].key,
- old_items[i].hash,
- std::move (old_items[i].value));
- }
- old_items[i].~item_t ();
+ set_with_hash (std::move (old_items[i].key),
+ old_items[i].hash,
+ std::move (old_items[i].value));
}
+ old_items[i].~item_t ();
+ }
hb_free (old_items);
return true;
}
+ template <typename KK, typename VV>
+ bool set_with_hash (KK&& key, uint32_t hash, VV&& value, bool is_delete=false)
+ {
+ if (unlikely (!successful)) return false;
+ if (unlikely ((occupancy + occupancy / 2) >= mask && !resize ())) return false;
+ item_t &item = item_for_hash (key, hash);
+
+ if (is_delete && !(item == key))
+ return true; /* Trying to delete non-existent key. */
+
+ if (item.is_used ())
+ {
+ occupancy--;
+ if (!item.is_tombstone ())
+ population--;
+ }
+
+ item.key = std::forward<KK> (key);
+ item.value = std::forward<VV> (value);
+ item.hash = hash;
+ item.set_used (true);
+ item.set_tombstone (is_delete);
+
+ occupancy++;
+ if (!is_delete)
+ population++;
+
+ return true;
+ }
+
+ template <typename VV>
+ bool set (const K &key, VV&& value) { return set_with_hash (key, hb_hash (key), std::forward<VV> (value)); }
template <typename VV>
- bool set (K key, VV&& value) { return set_with_hash (key, hb_hash (key), std::forward<VV> (value)); }
+ bool set (K &&key, VV&& value) { return set_with_hash (std::move (key), hb_hash (key), std::forward<VV> (value)); }
- const V& get (K key) const
+ const V& get_with_hash (const K &key, uint32_t hash) const
+ {
+ if (unlikely (!items)) return item_t::default_value ();
+ auto &item = item_for_hash (key, hash);
+ return item.is_real () && item == key ? item.value : item_t::default_value ();
+ }
+ const V& get (const K &key) const
{
if (unlikely (!items)) return item_t::default_value ();
- unsigned int i = bucket_for (key);
- return items[i].is_real () && items[i] == key ? items[i].value : item_t::default_value ();
+ return get_with_hash (key, hb_hash (key));
}
- void del (K key) { set_with_hash (key, hb_hash (key), item_t::default_value (), true); }
+ void del (const K &key) { set_with_hash (key, hb_hash (key), item_t::default_value (), true); }
/* Has interface. */
- typedef const V& value_t;
- value_t operator [] (K k) const { return get (k); }
+ const V& operator [] (K k) const { return get (k); }
template <typename VV=V>
bool has (K key, VV **vp = nullptr) const
{
if (unlikely (!items))
return false;
- unsigned int i = bucket_for (key);
- if (items[i].is_real () && items[i] == key)
+ auto &item = item_for_hash (key, hb_hash (key));
+ if (item.is_real () && item == key)
{
- if (vp) *vp = &items[i].value;
+ if (vp) *vp = std::addressof (item.value);
return true;
}
else
@@ -230,13 +266,18 @@ struct hb_hashmap_t
/* Projection. */
V operator () (K k) const { return get (k); }
+ unsigned size () const { return mask ? mask + 1 : 0; }
+
void clear ()
{
if (unlikely (!successful)) return;
- if (items)
- for (auto &_ : hb_iter (items, mask + 1))
- _.clear ();
+ for (auto &_ : hb_iter (items, size ()))
+ {
+ /* Reconstruct items. */
+ _.~item_t ();
+ new (&_) item_t ();
+ }
population = occupancy = 0;
}
@@ -246,11 +287,10 @@ struct hb_hashmap_t
uint32_t hash () const
{
- uint32_t h = 0;
- for (const auto &item : + hb_array (items, mask ? mask + 1 : 0)
- | hb_filter (&item_t::is_real))
- h ^= item.total_hash ();
- return h;
+ return
+ + iter_items ()
+ | hb_reduce ([] (uint32_t h, const item_t &_) { return h ^ _.total_hash (); }, (uint32_t) 0u)
+ ;
}
bool is_equal (const hb_hashmap_t &other) const
@@ -258,7 +298,7 @@ struct hb_hashmap_t
if (population != other.population) return false;
for (auto pair : iter ())
- if (get (pair.first) != pair.second)
+ if (other.get (pair.first) != pair.second)
return false;
return true;
@@ -271,87 +311,54 @@ struct hb_hashmap_t
/*
* Iterator
*/
- auto iter () const HB_AUTO_RETURN
+
+ auto iter_items () const HB_AUTO_RETURN
(
- + hb_array (items, mask ? mask + 1 : 0)
+ + hb_iter (items, size ())
| hb_filter (&item_t::is_real)
- | hb_map (&item_t::get_pair)
)
auto iter_ref () const HB_AUTO_RETURN
(
- + hb_array (items, mask ? mask + 1 : 0)
- | hb_filter (&item_t::is_real)
+ + iter_items ()
| hb_map (&item_t::get_pair_ref)
)
- auto keys () const HB_AUTO_RETURN
+ auto iter () const HB_AUTO_RETURN
(
- + hb_array (items, mask ? mask + 1 : 0)
- | hb_filter (&item_t::is_real)
- | hb_map (&item_t::key)
- | hb_map (hb_ridentity)
+ + iter_items ()
+ | hb_map (&item_t::get_pair)
)
auto keys_ref () const HB_AUTO_RETURN
(
- + hb_array (items, mask ? mask + 1 : 0)
- | hb_filter (&item_t::is_real)
+ + iter_items ()
| hb_map (&item_t::key)
)
- auto values () const HB_AUTO_RETURN
+ auto keys () const HB_AUTO_RETURN
(
- + hb_array (items, mask ? mask + 1 : 0)
- | hb_filter (&item_t::is_real)
- | hb_map (&item_t::value)
+ + keys_ref ()
| hb_map (hb_ridentity)
)
auto values_ref () const HB_AUTO_RETURN
(
- + hb_array (items, mask ? mask + 1 : 0)
- | hb_filter (&item_t::is_real)
+ + iter_items ()
| hb_map (&item_t::value)
)
+ auto values () const HB_AUTO_RETURN
+ (
+ + values_ref ()
+ | hb_map (hb_ridentity)
+ )
/* Sink interface. */
hb_hashmap_t& operator << (const hb_pair_t<K, V>& v)
{ set (v.first, v.second); return *this; }
-
- protected:
-
- template <typename VV>
- bool set_with_hash (K key, uint32_t hash, VV&& value, bool is_delete=false)
- {
- if (unlikely (!successful)) return false;
- if (unlikely ((occupancy + occupancy / 2) >= mask && !resize ())) return false;
- unsigned int i = bucket_for_hash (key, hash);
-
- if (is_delete && items[i].key != key)
- return true; /* Trying to delete non-existent key. */
-
- if (items[i].is_used ())
- {
- occupancy--;
- if (!items[i].is_tombstone ())
- population--;
- }
-
- items[i].key = key;
- items[i].value = std::forward<VV> (value);
- items[i].hash = hash;
- items[i].set_used (true);
- items[i].set_tombstone (is_delete);
-
- occupancy++;
- if (!is_delete)
- population++;
-
- return true;
- }
-
- unsigned int bucket_for (const K &key) const
- {
- return bucket_for_hash (key, hb_hash (key));
- }
-
- unsigned int bucket_for_hash (const K &key, uint32_t hash) const
+ hb_hashmap_t& operator << (const hb_pair_t<K, V&&>& v)
+ { set (v.first, std::move (v.second)); return *this; }
+ hb_hashmap_t& operator << (const hb_pair_t<K&&, V>& v)
+ { set (std::move (v.first), v.second); return *this; }
+ hb_hashmap_t& operator << (const hb_pair_t<K&&, V&&>& v)
+ { set (std::move (v.first), std::move (v.second)); return *this; }
+
+ item_t& item_for_hash (const K &key, uint32_t hash) const
{
hash &= 0x3FFFFFFF; // We only store lower 30bit of hash
unsigned int i = hash % prime;
@@ -360,12 +367,12 @@ struct hb_hashmap_t
while (items[i].is_used ())
{
if (items[i].hash == hash && items[i] == key)
- return i;
+ return items[i];
if (tombstone == (unsigned) -1 && items[i].is_tombstone ())
tombstone = i;
i = (i + ++step) & mask;
}
- return tombstone == (unsigned) -1 ? i : tombstone;
+ return items[tombstone == (unsigned) -1 ? i : tombstone];
}
static unsigned int prime_for (unsigned int shift)
diff --git a/thirdparty/harfbuzz/src/hb-meta.hh b/thirdparty/harfbuzz/src/hb-meta.hh
index 1921ccbb6d..31aa7fa6f1 100644
--- a/thirdparty/harfbuzz/src/hb-meta.hh
+++ b/thirdparty/harfbuzz/src/hb-meta.hh
@@ -112,8 +112,7 @@ template <typename T> auto _hb_try_add_pointer (hb_priority<1>) -> hb_type_ident
template <typename T> using hb_add_pointer = decltype (_hb_try_add_pointer<T> (hb_prioritize));
-/* TODO Add feature-parity to std::decay. */
-template <typename T> using hb_decay = hb_remove_const<hb_remove_reference<T>>;
+template <typename T> using hb_decay = typename std::decay<T>::type;
#define hb_is_convertible(From,To) std::is_convertible<From, To>::value
@@ -133,6 +132,18 @@ struct
template <typename T> constexpr auto
operator () (T *v) const HB_AUTO_RETURN (*v)
+
+ template <typename T> constexpr auto
+ operator () (const hb::shared_ptr<T>& v) const HB_AUTO_RETURN (*v)
+
+ template <typename T> constexpr auto
+ operator () (hb::shared_ptr<T>& v) const HB_AUTO_RETURN (*v)
+
+ template <typename T> constexpr auto
+ operator () (const hb::unique_ptr<T>& v) const HB_AUTO_RETURN (*v)
+
+ template <typename T> constexpr auto
+ operator () (hb::unique_ptr<T>& v) const HB_AUTO_RETURN (*v)
}
HB_FUNCOBJ (hb_deref);
diff --git a/thirdparty/harfbuzz/src/hb-ms-feature-ranges.hh b/thirdparty/harfbuzz/src/hb-ms-feature-ranges.hh
index d40fdeaa82..46a20c91ea 100644
--- a/thirdparty/harfbuzz/src/hb-ms-feature-ranges.hh
+++ b/thirdparty/harfbuzz/src/hb-ms-feature-ranges.hh
@@ -166,7 +166,7 @@ hb_ms_setup_features (const hb_feature_t *features,
{
auto *feature = active_features.lsearch (event->feature);
if (feature)
- active_features.remove (feature - active_features.arrayZ);
+ active_features.remove_ordered (feature - active_features.arrayZ);
}
}
diff --git a/thirdparty/harfbuzz/src/hb-multimap.hh b/thirdparty/harfbuzz/src/hb-multimap.hh
new file mode 100644
index 0000000000..f0f95917aa
--- /dev/null
+++ b/thirdparty/harfbuzz/src/hb-multimap.hh
@@ -0,0 +1,92 @@
+/*
+ * Copyright © 2022 Behdad Esfahbod
+ *
+ * This is part of HarfBuzz, a text shaping library.
+ *
+ * Permission is hereby granted, without written agreement and without
+ * license or royalty fees, to use, copy, modify, and distribute this
+ * software and its documentation for any purpose, provided that the
+ * above copyright notice and the following two paragraphs appear in
+ * all copies of this software.
+ *
+ * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
+ * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+ * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
+ * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+ * DAMAGE.
+ *
+ * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
+ * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
+ * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
+ * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+ */
+
+#ifndef HB_MULTIMAP_HH
+#define HB_MULTIMAP_HH
+
+#include "hb.hh"
+#include "hb-map.hh"
+#include "hb-vector.hh"
+
+
+/*
+ * hb_multimap_t
+ */
+
+struct hb_multimap_t
+{
+ void add (hb_codepoint_t k, hb_codepoint_t v)
+ {
+ hb_codepoint_t *i;
+ if (multiples_indices.has (k, &i))
+ {
+ multiples_values[*i].push (v);
+ return;
+ }
+
+ hb_codepoint_t *old_v;
+ if (singulars.has (k, &old_v))
+ {
+ hb_codepoint_t old = *old_v;
+ singulars.del (k);
+
+ multiples_indices.set (k, multiples_values.length);
+ auto *vec = multiples_values.push ();
+
+ vec->push (old);
+ vec->push (v);
+
+ return;
+ }
+
+ singulars.set (k, v);
+ }
+
+ hb_array_t<const hb_codepoint_t> get (hb_codepoint_t k) const
+ {
+ hb_codepoint_t *v;
+ if (singulars.has (k, &v))
+ return hb_array (v, 1);
+
+ hb_codepoint_t *i;
+ if (multiples_indices.has (k, &i))
+ return multiples_values[*i].as_array ();
+
+ return hb_array_t<hb_codepoint_t> ();
+ }
+
+ bool in_error () const
+ {
+ return singulars.in_error () || multiples_indices.in_error () || multiples_values.in_error ();
+ }
+
+ protected:
+ hb_map_t singulars;
+ hb_map_t multiples_indices;
+ hb_vector_t<hb_vector_t<hb_codepoint_t>> multiples_values;
+};
+
+
+
+#endif /* HB_MULTIMAP_HH */
diff --git a/thirdparty/harfbuzz/src/hb-mutex.hh b/thirdparty/harfbuzz/src/hb-mutex.hh
index 6914b22450..053f9ddcc4 100644
--- a/thirdparty/harfbuzz/src/hb-mutex.hh
+++ b/thirdparty/harfbuzz/src/hb-mutex.hh
@@ -108,10 +108,11 @@ struct hb_mutex_t
struct hb_lock_t
{
- hb_lock_t (hb_mutex_t &mutex_) : mutex (mutex_) { mutex.lock (); }
- ~hb_lock_t () { mutex.unlock (); }
+ hb_lock_t (hb_mutex_t &mutex_) : mutex (&mutex_) { mutex->lock (); }
+ hb_lock_t (hb_mutex_t *mutex_) : mutex (mutex_) { if (mutex) mutex->lock (); }
+ ~hb_lock_t () { if (mutex) mutex->unlock (); }
private:
- hb_mutex_t &mutex;
+ hb_mutex_t *mutex;
};
diff --git a/thirdparty/harfbuzz/src/hb-number-parser.hh b/thirdparty/harfbuzz/src/hb-number-parser.hh
index 1a9dbba6dd..ec68c3a728 100644
--- a/thirdparty/harfbuzz/src/hb-number-parser.hh
+++ b/thirdparty/harfbuzz/src/hb-number-parser.hh
@@ -31,7 +31,7 @@
#include "hb.hh"
-#line 35 "hb-number-parser.hh"
+#line 32 "hb-number-parser.hh"
static const unsigned char _double_parser_trans_keys[] = {
0u, 0u, 43u, 57u, 46u, 57u, 48u, 57u, 43u, 57u, 48u, 57u, 48u, 101u, 48u, 57u,
46u, 101u, 0
@@ -135,12 +135,12 @@ strtod_rl (const char *p, const char **end_ptr /* IN/OUT */)
int cs;
-#line 139 "hb-number-parser.hh"
+#line 132 "hb-number-parser.hh"
{
cs = double_parser_start;
}
-#line 144 "hb-number-parser.hh"
+#line 135 "hb-number-parser.hh"
{
int _slen;
int _trans;
@@ -198,7 +198,7 @@ _resume:
exp_overflow = true;
}
break;
-#line 202 "hb-number-parser.hh"
+#line 187 "hb-number-parser.hh"
}
_again:
diff --git a/thirdparty/harfbuzz/src/hb-object.hh b/thirdparty/harfbuzz/src/hb-object.hh
index 9876c2923c..a23c25f7ca 100644
--- a/thirdparty/harfbuzz/src/hb-object.hh
+++ b/thirdparty/harfbuzz/src/hb-object.hh
@@ -80,7 +80,7 @@ struct hb_lockable_set_t
if (item)
{
item_t old = *item;
- *item = items[items.length - 1];
+ *item = std::move (items.tail ());
items.pop ();
l.unlock ();
old.fini ();
@@ -123,7 +123,7 @@ struct hb_lockable_set_t
l.lock ();
while (items.length)
{
- item_t old = items[items.length - 1];
+ item_t old = items.tail ();
items.pop ();
l.unlock ();
old.fini ();
diff --git a/thirdparty/harfbuzz/src/hb-open-file.hh b/thirdparty/harfbuzz/src/hb-open-file.hh
index 6eee5827c1..13570a46e0 100644
--- a/thirdparty/harfbuzz/src/hb-open-file.hh
+++ b/thirdparty/harfbuzz/src/hb-open-file.hh
@@ -90,7 +90,7 @@ typedef struct OpenTypeOffsetTable
{
if (table_count)
{
- + tables.sub_array (start_offset, table_count)
+ + tables.as_array ().sub_array (start_offset, table_count)
| hb_map (&TableRecord::tag)
| hb_sink (hb_array (table_tags, *table_count))
;
@@ -158,7 +158,7 @@ typedef struct OpenTypeOffsetTable
return_trace (false);
if (likely (len))
- memcpy (start, blob->data, len);
+ hb_memcpy (start, blob->data, len);
/* 4-byte alignment. */
c->align (4);
diff --git a/thirdparty/harfbuzz/src/hb-open-type.hh b/thirdparty/harfbuzz/src/hb-open-type.hh
index d0d01a68c5..290799127a 100644
--- a/thirdparty/harfbuzz/src/hb-open-type.hh
+++ b/thirdparty/harfbuzz/src/hb-open-type.hh
@@ -141,27 +141,27 @@ typedef HBINT32 FWORD32;
/* 16-bit unsigned integer (HBUINT16) that describes a quantity in FUnits. */
typedef HBUINT16 UFWORD;
-/* 16-bit signed fixed number with the low 14 bits of fraction (2.14). */
-struct F2DOT14 : HBINT16
+template <typename Type, unsigned fraction_bits>
+struct HBFixed : Type
{
- F2DOT14& operator = (uint16_t i ) { HBINT16::operator= (i); return *this; }
- // 16384 means 1<<14
- float to_float () const { return ((int32_t) v) / 16384.f; }
- void set_float (float f) { v = roundf (f * 16384.f); }
+ static constexpr float shift = (float) (1 << fraction_bits);
+ static_assert (Type::static_size * 8 > fraction_bits, "");
+
+ HBFixed& operator = (typename Type::type i ) { Type::operator= (i); return *this; }
+ float to_float (float offset = 0) const { return ((int32_t) Type::v + offset) / shift; }
+ void set_float (float f) { Type::v = roundf (f * shift); }
public:
- DEFINE_SIZE_STATIC (2);
+ DEFINE_SIZE_STATIC (Type::static_size);
};
+/* 16-bit signed fixed number with the low 14 bits of fraction (2.14). */
+using F2DOT14 = HBFixed<HBINT16, 14>;
+
+/* 16-bit signed fixed number with the low 12 bits of fraction (4.12). */
+using F4DOT12 = HBFixed<HBINT16, 12>;
+
/* 32-bit signed fixed-point number (16.16). */
-struct HBFixed : HBINT32
-{
- HBFixed& operator = (uint32_t i) { HBINT32::operator= (i); return *this; }
- // 65536 means 1<<16
- float to_float () const { return ((int32_t) v) / 65536.f; }
- void set_float (float f) { v = roundf (f * 65536.f); }
- public:
- DEFINE_SIZE_STATIC (4);
-};
+using F16DOT16 = HBFixed<HBINT32, 16>;
/* Date represented in number of seconds since 12:00 midnight, January 1,
* 1904. The value is represented as a signed 64-bit integer. */
@@ -212,6 +212,12 @@ typedef Index NameID;
struct VarIdx : HBUINT32 {
static constexpr unsigned NO_VARIATION = 0xFFFFFFFFu;
+ static_assert (NO_VARIATION == HB_OT_LAYOUT_NO_VARIATIONS_INDEX, "");
+ static uint32_t add (uint32_t i, unsigned short v)
+ {
+ if (i == NO_VARIATION) return i;
+ return i + v;
+ }
VarIdx& operator = (uint32_t i) { HBUINT32::operator= (i); return *this; }
};
DECLARE_NULL_NAMESPACE_BYTES (OT, VarIdx);
@@ -496,10 +502,10 @@ struct UnsizedArrayOf
void qsort (unsigned int len, unsigned int start = 0, unsigned int end = (unsigned int) -1)
{ as_array (len).qsort (start, end); }
- bool serialize (hb_serialize_context_t *c, unsigned int items_len)
+ bool serialize (hb_serialize_context_t *c, unsigned int items_len, bool clear = true)
{
TRACE_SERIALIZE (this);
- if (unlikely (!c->extend (this, items_len))) return_trace (false);
+ if (unlikely (!c->extend_size (this, get_size (items_len), clear))) return_trace (false);
return_trace (true);
}
template <typename Iterator,
@@ -507,8 +513,8 @@ struct UnsizedArrayOf
bool serialize (hb_serialize_context_t *c, Iterator items)
{
TRACE_SERIALIZE (this);
- unsigned count = items.len ();
- if (unlikely (!serialize (c, count))) return_trace (false);
+ unsigned count = hb_len (items);
+ if (unlikely (!serialize (c, count, false))) return_trace (false);
/* TODO Umm. Just exhaust the iterator instead? Being extra
* cautious right now.. */
for (unsigned i = 0; i < count; i++, ++items)
@@ -649,14 +655,9 @@ struct ArrayOf
operator iter_t () const { return iter (); }
operator writer_t () { return writer (); }
- hb_array_t<const Type> sub_array (unsigned int start_offset, unsigned int count) const
- { return as_array ().sub_array (start_offset, count); }
- hb_array_t<const Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */) const
- { return as_array ().sub_array (start_offset, count); }
- hb_array_t<Type> sub_array (unsigned int start_offset, unsigned int count)
- { return as_array ().sub_array (start_offset, count); }
- hb_array_t<Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */)
- { return as_array ().sub_array (start_offset, count); }
+ /* Faster range-based for loop. */
+ const Type *begin () const { return arrayZ; }
+ const Type *end () const { return arrayZ + len; }
template <typename T>
Type &lsearch (const T &x, Type &not_found = Crap (Type))
@@ -670,15 +671,15 @@ struct ArrayOf
unsigned int to_store = (unsigned int) -1) const
{ return as_array ().lfind (x, i, not_found, to_store); }
- void qsort (unsigned int start = 0, unsigned int end = (unsigned int) -1)
- { as_array ().qsort (start, end); }
+ void qsort ()
+ { as_array ().qsort (); }
- HB_NODISCARD bool serialize (hb_serialize_context_t *c, unsigned items_len)
+ HB_NODISCARD bool serialize (hb_serialize_context_t *c, unsigned items_len, bool clear = true)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (this))) return_trace (false);
c->check_assign (len, items_len, HB_SERIALIZE_ERROR_ARRAY_OVERFLOW);
- if (unlikely (!c->extend (this))) return_trace (false);
+ if (unlikely (!c->extend_size (this, get_size (), clear))) return_trace (false);
return_trace (true);
}
template <typename Iterator,
@@ -686,8 +687,8 @@ struct ArrayOf
HB_NODISCARD bool serialize (hb_serialize_context_t *c, Iterator items)
{
TRACE_SERIALIZE (this);
- unsigned count = items.len ();
- if (unlikely (!serialize (c, count))) return_trace (false);
+ unsigned count = hb_len (items);
+ if (unlikely (!serialize (c, count, false))) return_trace (false);
/* TODO Umm. Just exhaust the iterator instead? Being extra
* cautious right now.. */
for (unsigned i = 0; i < count; i++, ++items)
@@ -831,21 +832,25 @@ struct HeadlessArrayOf
operator iter_t () const { return iter (); }
operator writer_t () { return writer (); }
- bool serialize (hb_serialize_context_t *c, unsigned int items_len)
+ /* Faster range-based for loop. */
+ const Type *begin () const { return arrayZ; }
+ const Type *end () const { return arrayZ + get_length (); }
+
+ HB_NODISCARD bool serialize (hb_serialize_context_t *c, unsigned int items_len, bool clear = true)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (this))) return_trace (false);
c->check_assign (lenP1, items_len + 1, HB_SERIALIZE_ERROR_ARRAY_OVERFLOW);
- if (unlikely (!c->extend (this))) return_trace (false);
+ if (unlikely (!c->extend_size (this, get_size (), clear))) return_trace (false);
return_trace (true);
}
template <typename Iterator,
hb_requires (hb_is_source_of (Iterator, Type))>
- bool serialize (hb_serialize_context_t *c, Iterator items)
+ HB_NODISCARD bool serialize (hb_serialize_context_t *c, Iterator items)
{
TRACE_SERIALIZE (this);
- unsigned count = items.len ();
- if (unlikely (!serialize (c, count))) return_trace (false);
+ unsigned count = hb_len (items);
+ if (unlikely (!serialize (c, count, false))) return_trace (false);
/* TODO Umm. Just exhaust the iterator instead? Being extra
* cautious right now.. */
for (unsigned i = 0; i < count; i++, ++items)
@@ -947,14 +952,9 @@ struct SortedArrayOf : ArrayOf<Type, LenType>
operator iter_t () const { return iter (); }
operator writer_t () { return writer (); }
- hb_sorted_array_t<const Type> sub_array (unsigned int start_offset, unsigned int count) const
- { return as_array ().sub_array (start_offset, count); }
- hb_sorted_array_t<const Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */) const
- { return as_array ().sub_array (start_offset, count); }
- hb_sorted_array_t<Type> sub_array (unsigned int start_offset, unsigned int count)
- { return as_array ().sub_array (start_offset, count); }
- hb_sorted_array_t<Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */)
- { return as_array ().sub_array (start_offset, count); }
+ /* Faster range-based for loop. */
+ const Type *begin () const { return this->arrayZ; }
+ const Type *end () const { return this->arrayZ + this->len; }
bool serialize (hb_serialize_context_t *c, unsigned int items_len)
{
diff --git a/thirdparty/harfbuzz/src/hb-ot-cff-common.hh b/thirdparty/harfbuzz/src/hb-ot-cff-common.hh
index ae3b83a256..f22824fc69 100644
--- a/thirdparty/harfbuzz/src/hb-ot-cff-common.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-cff-common.hh
@@ -66,95 +66,25 @@ struct CFFIndex
{
TRACE_SERIALIZE (this);
unsigned int size = get_size ();
- CFFIndex *out = c->allocate_size<CFFIndex> (size);
+ CFFIndex *out = c->allocate_size<CFFIndex> (size, false);
if (likely (out))
- memcpy (out, this, size);
+ hb_memcpy (out, this, size);
return_trace (out);
}
+ template <typename Iterable,
+ hb_requires (hb_is_iterable (Iterable))>
bool serialize (hb_serialize_context_t *c,
- unsigned int offSize_,
- const byte_str_array_t &byteArray)
+ const Iterable &iterable)
{
TRACE_SERIALIZE (this);
-
- if (byteArray.length == 0)
- {
- COUNT *dest = c->allocate_min<COUNT> ();
- if (unlikely (!dest)) return_trace (false);
- *dest = 0;
- return_trace (true);
- }
-
- /* serialize CFFIndex header */
- if (unlikely (!c->extend_min (this))) return_trace (false);
- this->count = byteArray.length;
- this->offSize = offSize_;
- if (unlikely (!c->allocate_size<HBUINT8> (offSize_ * (byteArray.length + 1))))
- return_trace (false);
-
- /* serialize indices */
- unsigned int offset = 1;
- unsigned int i = 0;
- for (; i < byteArray.length; i++)
- {
- set_offset_at (i, offset);
- offset += byteArray[i].get_size ();
- }
- set_offset_at (i, offset);
-
- /* serialize data */
- for (unsigned int i = 0; i < byteArray.length; i++)
- {
- const hb_ubytes_t &bs = byteArray[i];
- unsigned char *dest = c->allocate_size<unsigned char> (bs.length);
- if (unlikely (!dest)) return_trace (false);
- memcpy (dest, &bs[0], bs.length);
- }
-
- return_trace (true);
- }
-
- bool serialize (hb_serialize_context_t *c,
- unsigned int offSize_,
- const str_buff_vec_t &buffArray)
- {
- byte_str_array_t byteArray;
- byteArray.init ();
- byteArray.resize (buffArray.length);
- for (unsigned int i = 0; i < byteArray.length; i++)
- byteArray[i] = hb_ubytes_t (buffArray[i].arrayZ, buffArray[i].length);
- bool result = this->serialize (c, offSize_, byteArray);
- byteArray.fini ();
- return result;
- }
-
- template <typename Iterator,
- hb_requires (hb_is_iterator (Iterator))>
- bool serialize (hb_serialize_context_t *c,
- Iterator it)
- {
- TRACE_SERIALIZE (this);
- serialize_header(c, + it | hb_map ([] (const hb_ubytes_t &_) { return _.length; }));
+ auto it = hb_iter (iterable);
+ serialize_header(c, + it | hb_map (hb_iter) | hb_map (hb_len));
for (const auto &_ : +it)
- _.copy (c);
+ hb_iter (_).copy (c);
return_trace (true);
}
- bool serialize (hb_serialize_context_t *c,
- const byte_str_array_t &byteArray)
- { return serialize (c, + hb_iter (byteArray)); }
-
- bool serialize (hb_serialize_context_t *c,
- const str_buff_vec_t &buffArray)
- {
- auto it =
- + hb_iter (buffArray)
- | hb_map ([] (const str_buff_t &_) { return hb_ubytes_t (_.arrayZ, _.length); })
- ;
- return serialize (c, it);
- }
-
template <typename Iterator,
hb_requires (hb_is_iterator (Iterator))>
bool serialize_header (hb_serialize_context_t *c,
@@ -171,7 +101,7 @@ struct CFFIndex
if (!this->count) return_trace (true);
if (unlikely (!c->extend (this->offSize))) return_trace (false);
this->offSize = off_size;
- if (unlikely (!c->allocate_size<HBUINT8> (off_size * (this->count + 1))))
+ if (unlikely (!c->allocate_size<HBUINT8> (off_size * (this->count + 1), false)))
return_trace (false);
/* serialize indices */
@@ -179,14 +109,27 @@ struct CFFIndex
unsigned int i = 0;
for (unsigned _ : +it)
{
- CFFIndex<COUNT>::set_offset_at (i++, offset);
+ set_offset_at (i++, offset);
offset += _;
}
- CFFIndex<COUNT>::set_offset_at (i, offset);
+ set_offset_at (i, offset);
return_trace (true);
}
+ template <typename Iterable,
+ hb_requires (hb_is_iterable (Iterable))>
+ static unsigned total_size (const Iterable &iterable)
+ {
+ auto it = + hb_iter (iterable) | hb_map (hb_iter) | hb_map (hb_len);
+ if (!it) return 0;
+
+ unsigned total = + it | hb_reduce (hb_add, 0);
+ unsigned off_size = (hb_bit_storage (total + 1) + 7) / 8;
+
+ return min_size + HBUINT8::static_size + (hb_len (it) + 1) * off_size + total;
+ }
+
void set_offset_at (unsigned int index, unsigned int offset)
{
assert (index <= count);
@@ -207,10 +150,14 @@ struct CFFIndex
unsigned int size = offSize;
const HBUINT8 *p = offsets + size * index;
- unsigned int offset = 0;
- for (; size; size--)
- offset = (offset << 8) + *p++;
- return offset;
+ switch (size)
+ {
+ case 1: return * (HBUINT8 *) p;
+ case 2: return * (HBUINT16 *) p;
+ case 3: return * (HBUINT24 *) p;
+ case 4: return * (HBUINT32 *) p;
+ default: return 0;
+ }
}
unsigned int length_at (unsigned int index) const
@@ -229,6 +176,7 @@ struct CFFIndex
hb_ubytes_t operator [] (unsigned int index) const
{
if (unlikely (index >= count)) return hb_ubytes_t ();
+ _hb_compiler_memory_r_barrier ();
unsigned length = length_at (index);
if (unlikely (!length)) return hb_ubytes_t ();
return hb_ubytes_t (data_base () + offset_at (index) - 1, length);
@@ -280,7 +228,7 @@ struct CFFIndexOf : CFFIndex<COUNT>
if (unlikely (!c->extend_min (this))) return_trace (false);
this->count = dataArrayLen;
this->offSize = offSize_;
- if (unlikely (!c->allocate_size<HBUINT8> (offSize_ * (dataArrayLen + 1))))
+ if (unlikely (!c->allocate_size<HBUINT8> (offSize_ * (dataArrayLen + 1), false)))
return_trace (false);
/* serialize indices */
@@ -288,10 +236,10 @@ struct CFFIndexOf : CFFIndex<COUNT>
unsigned int i = 0;
for (; i < dataArrayLen; i++)
{
- CFFIndex<COUNT>::set_offset_at (i, offset);
+ this->set_offset_at (i, offset);
offset += dataSizeArray[i];
}
- CFFIndex<COUNT>::set_offset_at (i, offset);
+ this->set_offset_at (i, offset);
/* serialize data */
for (unsigned int i = 0; i < dataArrayLen; i++)
@@ -324,13 +272,12 @@ struct Dict : UnsizedByteStr
template <typename T, typename V>
static bool serialize_int_op (hb_serialize_context_t *c, op_code_t op, V value, op_code_t intOp)
{
- // XXX: not sure why but LLVM fails to compile the following 'unlikely' macro invocation
- if (/*unlikely*/ (!serialize_int<T, V> (c, intOp, value)))
+ if (unlikely ((!serialize_int<T, V> (c, intOp, value))))
return false;
TRACE_SERIALIZE (this);
/* serialize the opcode */
- HBUINT8 *p = c->allocate_size<HBUINT8> (OpCode_Size (op));
+ HBUINT8 *p = c->allocate_size<HBUINT8> (OpCode_Size (op), false);
if (unlikely (!p)) return_trace (false);
if (Is_OpCode_ESC (op))
{
@@ -415,9 +362,8 @@ struct FDSelect0 {
TRACE_SANITIZE (this);
if (unlikely (!(c->check_struct (this))))
return_trace (false);
- for (unsigned int i = 0; i < c->get_num_glyphs (); i++)
- if (unlikely (!fds[i].sanitize (c)))
- return_trace (false);
+ if (unlikely (!c->check_array (fds, c->get_num_glyphs ())))
+ return_trace (false);
return_trace (true);
}
@@ -471,14 +417,20 @@ struct FDSelect3_4
return_trace (true);
}
- hb_codepoint_t get_fd (hb_codepoint_t glyph) const
+ static int _cmp_range (const void *_key, const void *_item)
{
- unsigned int i;
- for (i = 1; i < nRanges (); i++)
- if (glyph < ranges[i].first)
- break;
+ hb_codepoint_t glyph = * (hb_codepoint_t *) _key;
+ FDSelect3_4_Range<GID_TYPE, FD_TYPE> *range = (FDSelect3_4_Range<GID_TYPE, FD_TYPE> *) _item;
- return (hb_codepoint_t) ranges[i - 1].fd;
+ if (glyph < range[0].first) return -1;
+ if (glyph < range[1].first) return 0;
+ return +1;
+ }
+
+ hb_codepoint_t get_fd (hb_codepoint_t glyph) const
+ {
+ auto *range = hb_bsearch (glyph, &ranges[0], nRanges () - 1, sizeof (ranges[0]), _cmp_range);
+ return range ? range->fd : ranges[nRanges () - 1].fd;
}
GID_TYPE &nRanges () { return ranges.len; }
@@ -501,9 +453,9 @@ struct FDSelect
{
TRACE_SERIALIZE (this);
unsigned int size = src.get_size (num_glyphs);
- FDSelect *dest = c->allocate_size<FDSelect> (size);
+ FDSelect *dest = c->allocate_size<FDSelect> (size, false);
if (unlikely (!dest)) return_trace (false);
- memcpy (dest, &src, size);
+ hb_memcpy (dest, &src, size);
return_trace (true);
}
diff --git a/thirdparty/harfbuzz/src/hb-ot-cff1-table.hh b/thirdparty/harfbuzz/src/hb-ot-cff1-table.hh
index 17b0296616..bb856c9ddb 100644
--- a/thirdparty/harfbuzz/src/hb-ot-cff1-table.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-cff1-table.hh
@@ -175,7 +175,7 @@ struct Encoding
unsigned int size = src.get_size ();
Encoding *dest = c->allocate_size<Encoding> (size);
if (unlikely (!dest)) return_trace (false);
- memcpy (dest, &src, size);
+ hb_memcpy (dest, &src, size);
return_trace (true);
}
@@ -471,7 +471,7 @@ struct Charset
unsigned int size = src.get_size (num_glyphs);
Charset *dest = c->allocate_size<Charset> (size);
if (unlikely (!dest)) return_trace (false);
- memcpy (dest, &src, size);
+ hb_memcpy (dest, &src, size);
return_trace (true);
}
@@ -617,7 +617,6 @@ struct CFF1StringIndex : CFF1Index
}
byte_str_array_t bytesArray;
- bytesArray.init ();
if (!bytesArray.resize (sidmap.get_population ()))
return_trace (false);
for (unsigned int i = 0; i < strings.count; i++)
@@ -628,7 +627,6 @@ struct CFF1StringIndex : CFF1Index
}
bool result = CFF1Index::serialize (c, bytesArray);
- bytesArray.fini ();
return_trace (result);
}
};
@@ -813,7 +811,7 @@ struct cff1_top_dict_opset_t : top_dict_opset_t<cff1_top_dict_val_t>
break;
default:
- env.last_offset = env.str_ref.offset;
+ env.last_offset = env.str_ref.get_offset ();
top_dict_opset_t<cff1_top_dict_val_t>::process_op (op, env, dictval);
/* Record this operand below if stack is empty, otherwise done */
if (!env.argStack.is_empty ()) return;
@@ -1295,10 +1293,10 @@ struct cff1
}
protected:
- hb_blob_t *blob = nullptr;
hb_sanitize_context_t sc;
public:
+ hb_blob_t *blob = nullptr;
const Encoding *encoding = nullptr;
const Charset *charset = nullptr;
const CFF1NameIndex *nameIndex = nullptr;
diff --git a/thirdparty/harfbuzz/src/hb-ot-cff2-table.hh b/thirdparty/harfbuzz/src/hb-ot-cff2-table.hh
index 746160dc8e..9081930bbe 100644
--- a/thirdparty/harfbuzz/src/hb-ot-cff2-table.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-cff2-table.hh
@@ -56,7 +56,7 @@ struct CFF2FDSelect
unsigned int size = src.get_size (num_glyphs);
CFF2FDSelect *dest = c->allocate_size<CFF2FDSelect> (size);
if (unlikely (!dest)) return_trace (false);
- memcpy (dest, &src, size);
+ hb_memcpy (dest, &src, size);
return_trace (true);
}
@@ -124,7 +124,7 @@ struct CFF2VariationStore
unsigned int size_ = varStore->get_size ();
CFF2VariationStore *dest = c->allocate_size<CFF2VariationStore> (size_);
if (unlikely (!dest)) return_trace (false);
- memcpy (dest, varStore, size_);
+ hb_memcpy (dest, varStore, size_);
return_trace (true);
}
@@ -483,13 +483,18 @@ struct cff2
blob = nullptr;
}
+ hb_map_t *create_glyph_to_sid_map () const
+ {
+ return nullptr;
+ }
+
bool is_valid () const { return blob; }
protected:
- hb_blob_t *blob = nullptr;
hb_sanitize_context_t sc;
public:
+ hb_blob_t *blob = nullptr;
cff2_top_dict_values_t topDict;
const CFF2Subrs *globalSubrs = nullptr;
const CFF2VariationStore *varStore = nullptr;
diff --git a/thirdparty/harfbuzz/src/hb-ot-cmap-table.hh b/thirdparty/harfbuzz/src/hb-ot-cmap-table.hh
index 09c9fe93f3..523196fa79 100644
--- a/thirdparty/harfbuzz/src/hb-ot-cmap-table.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-cmap-table.hh
@@ -909,7 +909,7 @@ struct DefaultUVS : SortedArray32Of<UnicodeValueRange>
hb_codepoint_t first = arrayZ[i].startUnicodeValue;
hb_codepoint_t last = hb_min ((hb_codepoint_t) (first + arrayZ[i].additionalCount),
(hb_codepoint_t) HB_UNICODE_MAX);
- out->add_range (first, hb_min (last, 0x10FFFFu));
+ out->add_range (first, last);
}
}
@@ -925,37 +925,75 @@ struct DefaultUVS : SortedArray32Of<UnicodeValueRange>
if (unlikely (!c->copy<HBUINT32> (len))) return nullptr;
unsigned init_len = c->length ();
- hb_codepoint_t lastCode = HB_MAP_VALUE_INVALID;
- int count = -1;
-
- for (const UnicodeValueRange& _ : as_array ())
+ if (this->len > unicodes->get_population () * hb_bit_storage ((unsigned) this->len))
{
- for (const unsigned addcnt : hb_range ((unsigned) _.additionalCount + 1))
+ hb_codepoint_t start = HB_SET_VALUE_INVALID;
+ hb_codepoint_t end = HB_SET_VALUE_INVALID;
+
+ for (hb_codepoint_t u = HB_SET_VALUE_INVALID;
+ unicodes->next (&u);)
{
- unsigned curEntry = (unsigned) _.startUnicodeValue + addcnt;
- if (!unicodes->has (curEntry)) continue;
- count += 1;
- if (lastCode == HB_MAP_VALUE_INVALID)
- lastCode = curEntry;
- else if (lastCode + count != curEntry)
+ if (!as_array ().bsearch (u))
+ continue;
+ if (start == HB_SET_VALUE_INVALID)
{
+ start = u;
+ end = start - 1;
+ }
+ if (end + 1 != u || end - start == 255)
+ {
UnicodeValueRange rec;
- rec.startUnicodeValue = lastCode;
- rec.additionalCount = count - 1;
+ rec.startUnicodeValue = start;
+ rec.additionalCount = end - start;
c->copy<UnicodeValueRange> (rec);
-
- lastCode = curEntry;
- count = 0;
+ start = u;
}
+ end = u;
+ }
+ if (start != HB_SET_VALUE_INVALID)
+ {
+ UnicodeValueRange rec;
+ rec.startUnicodeValue = start;
+ rec.additionalCount = end - start;
+ c->copy<UnicodeValueRange> (rec);
}
- }
- if (lastCode != HB_MAP_VALUE_INVALID)
+ }
+ else
{
- UnicodeValueRange rec;
- rec.startUnicodeValue = lastCode;
- rec.additionalCount = count;
- c->copy<UnicodeValueRange> (rec);
+ hb_codepoint_t lastCode = HB_SET_VALUE_INVALID;
+ int count = -1;
+
+ for (const UnicodeValueRange& _ : *this)
+ {
+ hb_codepoint_t curEntry = (hb_codepoint_t) (_.startUnicodeValue - 1);
+ hb_codepoint_t end = curEntry + _.additionalCount + 2;
+
+ for (; unicodes->next (&curEntry) && curEntry < end;)
+ {
+ count += 1;
+ if (lastCode == HB_SET_VALUE_INVALID)
+ lastCode = curEntry;
+ else if (lastCode + count != curEntry)
+ {
+ UnicodeValueRange rec;
+ rec.startUnicodeValue = lastCode;
+ rec.additionalCount = count - 1;
+ c->copy<UnicodeValueRange> (rec);
+
+ lastCode = curEntry;
+ count = 0;
+ }
+ }
+ }
+
+ if (lastCode != HB_MAP_VALUE_INVALID)
+ {
+ UnicodeValueRange rec;
+ rec.startUnicodeValue = lastCode;
+ rec.additionalCount = count;
+ c->copy<UnicodeValueRange> (rec);
+ }
}
if (c->length () - init_len == 0)
@@ -1474,32 +1512,80 @@ struct EncodingRecord
DEFINE_SIZE_STATIC (8);
};
+struct cmap;
+
struct SubtableUnicodesCache {
private:
- const void* base;
- hb_hashmap_t<intptr_t, hb::unique_ptr<hb_set_t>> cached_unicodes;
+ hb_blob_ptr_t<cmap> base_blob;
+ const char* base;
+ hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> cached_unicodes;
public:
+
+ static SubtableUnicodesCache* create (hb_blob_ptr_t<cmap> source_table)
+ {
+ SubtableUnicodesCache* cache =
+ (SubtableUnicodesCache*) hb_malloc (sizeof(SubtableUnicodesCache));
+ new (cache) SubtableUnicodesCache (source_table);
+ return cache;
+ }
+
+ static void destroy (void* value) {
+ if (!value) return;
+
+ SubtableUnicodesCache* cache = (SubtableUnicodesCache*) value;
+ cache->~SubtableUnicodesCache ();
+ hb_free (cache);
+ }
+
SubtableUnicodesCache(const void* cmap_base)
- : base(cmap_base), cached_unicodes() {}
+ : base_blob(),
+ base ((const char*) cmap_base),
+ cached_unicodes ()
+ {}
+
+ SubtableUnicodesCache(hb_blob_ptr_t<cmap> base_blob_)
+ : base_blob(base_blob_),
+ base ((const char *) base_blob.get()),
+ cached_unicodes ()
+ {}
+
+ ~SubtableUnicodesCache()
+ {
+ base_blob.destroy ();
+ }
+
+ bool same_base(const void* other) const
+ {
+ return other == (const void*) base;
+ }
- hb_set_t* set_for (const EncodingRecord* record)
+ const hb_set_t* set_for (const EncodingRecord* record,
+ SubtableUnicodesCache& mutable_cache) const
{
- if (!cached_unicodes.has ((intptr_t) record))
+ if (cached_unicodes.has ((unsigned) ((const char *) record - base)))
+ return cached_unicodes.get ((unsigned) ((const char *) record - base));
+
+ return mutable_cache.set_for (record);
+ }
+
+ const hb_set_t* set_for (const EncodingRecord* record)
+ {
+ if (!cached_unicodes.has ((unsigned) ((const char *) record - base)))
{
hb_set_t *s = hb_set_create ();
if (unlikely (s->in_error ()))
return hb_set_get_empty ();
-
+
(base+record->subtable).collect_unicodes (s);
- if (unlikely (!cached_unicodes.set ((intptr_t) record, hb::unique_ptr<hb_set_t> {s})))
+ if (unlikely (!cached_unicodes.set ((unsigned) ((const char *) record - base), hb::unique_ptr<hb_set_t> {s})))
return hb_set_get_empty ();
return s;
}
- return cached_unicodes.get ((intptr_t) record);
+ return cached_unicodes.get ((unsigned) ((const char *) record - base));
}
};
@@ -1523,13 +1609,30 @@ struct cmap
{
static constexpr hb_tag_t tableTag = HB_OT_TAG_cmap;
+
+ static SubtableUnicodesCache* create_filled_cache(hb_blob_ptr_t<cmap> source_table) {
+ const cmap* cmap = source_table.get();
+ auto it =
+ + hb_iter (cmap->encodingRecord)
+ | hb_filter ([&](const EncodingRecord& _) {
+ return cmap::filter_encoding_records_for_subset (cmap, _);
+ })
+ ;
+
+ SubtableUnicodesCache* cache = SubtableUnicodesCache::create(source_table);
+ for (const EncodingRecord& _ : it)
+ cache->set_for(&_); // populate the cache for this encoding record.
+
+ return cache;
+ }
+
template<typename Iterator, typename EncodingRecIter,
hb_requires (hb_is_iterator (EncodingRecIter))>
bool serialize (hb_serialize_context_t *c,
Iterator it,
EncodingRecIter encodingrec_iter,
const void *base,
- const hb_subset_plan_t *plan,
+ hb_subset_plan_t *plan,
bool drop_format_4 = false)
{
if (unlikely (!c->extend_min ((*this)))) return false;
@@ -1538,7 +1641,14 @@ struct cmap
unsigned format4objidx = 0, format12objidx = 0, format14objidx = 0;
auto snap = c->snapshot ();
- SubtableUnicodesCache unicodes_cache (base);
+ SubtableUnicodesCache local_unicodes_cache (base);
+ const SubtableUnicodesCache* unicodes_cache = &local_unicodes_cache;
+
+ if (plan->accelerator &&
+ plan->accelerator->cmap_cache &&
+ plan->accelerator->cmap_cache->same_base (base))
+ unicodes_cache = plan->accelerator->cmap_cache;
+
for (const EncodingRecord& _ : encodingrec_iter)
{
if (c->in_error ())
@@ -1547,7 +1657,7 @@ struct cmap
unsigned format = (base+_.subtable).u.format;
if (format != 4 && format != 12 && format != 14) continue;
- hb_set_t* unicodes_set = unicodes_cache.set_for (&_);
+ const hb_set_t* unicodes_set = unicodes_cache->set_for (&_, local_unicodes_cache);
if (!drop_format_4 && format == 4)
{
@@ -1566,7 +1676,13 @@ struct cmap
else if (format == 12)
{
- if (_can_drop (_, *unicodes_set, base, unicodes_cache, + it | hb_map (hb_first), encodingrec_iter)) continue;
+ if (_can_drop (_,
+ *unicodes_set,
+ base,
+ *unicodes_cache,
+ local_unicodes_cache,
+ + it | hb_map (hb_first), encodingrec_iter))
+ continue;
c->copy (_, + it | hb_filter (*unicodes_set, hb_first), 12u, base, plan, &format12objidx);
}
else if (format == 14) c->copy (_, it, 14u, base, plan, &format14objidx);
@@ -1585,7 +1701,8 @@ struct cmap
bool _can_drop (const EncodingRecord& cmap12,
const hb_set_t& cmap12_unicodes,
const void* base,
- SubtableUnicodesCache& unicodes_cache,
+ const SubtableUnicodesCache& unicodes_cache,
+ SubtableUnicodesCache& local_unicodes_cache,
Iterator subset_unicodes,
EncodingRecordIterator encoding_records)
{
@@ -1616,7 +1733,7 @@ struct cmap
|| (base+_.subtable).get_language() != target_language)
continue;
- hb_set_t* sibling_unicodes = unicodes_cache.set_for (&_);
+ const hb_set_t* sibling_unicodes = unicodes_cache.set_for (&_, local_unicodes_cache);
auto cmap12 = + subset_unicodes | hb_filter (cmap12_unicodes);
auto sibling = + subset_unicodes | hb_filter (*sibling_unicodes);
@@ -1653,17 +1770,9 @@ struct cmap
auto encodingrec_iter =
+ hb_iter (encodingRecord)
- | hb_filter ([&] (const EncodingRecord& _)
- {
- if ((_.platformID == 0 && _.encodingID == 3) ||
- (_.platformID == 0 && _.encodingID == 4) ||
- (_.platformID == 3 && _.encodingID == 1) ||
- (_.platformID == 3 && _.encodingID == 10) ||
- (this + _.subtable).u.format == 14)
- return true;
-
- return false;
- })
+ | hb_filter ([&](const EncodingRecord& _) {
+ return cmap::filter_encoding_records_for_subset (this, _);
+ })
;
if (unlikely (!encodingrec_iter.len ())) return_trace (false);
@@ -1692,7 +1801,11 @@ struct cmap
{ return (_.second != HB_MAP_VALUE_INVALID); })
;
- return_trace (cmap_prime->serialize (c->serializer, it, encodingrec_iter, this, c->plan));
+ return_trace (cmap_prime->serialize (c->serializer,
+ it,
+ encodingrec_iter,
+ this,
+ c->plan));
}
const CmapSubtable *find_best_subtable (bool *symbol = nullptr) const
@@ -1928,6 +2041,19 @@ struct cmap
encodingRecord.sanitize (c, this));
}
+ private:
+
+ static bool filter_encoding_records_for_subset(const cmap* cmap,
+ const EncodingRecord& _)
+ {
+ return
+ (_.platformID == 0 && _.encodingID == 3) ||
+ (_.platformID == 0 && _.encodingID == 4) ||
+ (_.platformID == 3 && _.encodingID == 1) ||
+ (_.platformID == 3 && _.encodingID == 10) ||
+ (cmap + _.subtable).u.format == 14;
+ }
+
protected:
HBUINT16 version; /* Table version number (0). */
SortedArray16Of<EncodingRecord>
diff --git a/thirdparty/harfbuzz/src/hb-ot-color-cbdt-table.hh b/thirdparty/harfbuzz/src/hb-ot-color-cbdt-table.hh
index 23fa56c4f6..3246894d39 100644
--- a/thirdparty/harfbuzz/src/hb-ot-color-cbdt-table.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-color-cbdt-table.hh
@@ -67,7 +67,7 @@ _copy_data_to_cbdt (hb_vector_t<char> *cbdt_prime,
{
unsigned int new_len = cbdt_prime->length + length;
if (unlikely (!cbdt_prime->alloc (new_len))) return false;
- memcpy (cbdt_prime->arrayZ + cbdt_prime->length, data, length);
+ hb_memcpy (cbdt_prime->arrayZ + cbdt_prime->length, data, length);
cbdt_prime->length = new_len;
return true;
}
@@ -468,13 +468,13 @@ struct IndexSubtableRecord
if (unlikely (!c->serializer->check_success (records->resize (records->length + 1))))
return_trace (false);
- (*records)[records->length - 1].firstGlyphIndex = 1;
- (*records)[records->length - 1].lastGlyphIndex = 0;
+ records->tail ().firstGlyphIndex = 1;
+ records->tail ().lastGlyphIndex = 0;
bitmap_size_context->size += IndexSubtableRecord::min_size;
c->serializer->push ();
- if (unlikely (!add_new_subtable (c, bitmap_size_context, &((*records)[records->length - 1]), lookup, base, start)))
+ if (unlikely (!add_new_subtable (c, bitmap_size_context, &(records->tail ()), lookup, base, start)))
{
c->serializer->pop_discard ();
c->serializer->revert (snap);
diff --git a/thirdparty/harfbuzz/src/hb-ot-color-colr-table.hh b/thirdparty/harfbuzz/src/hb-ot-color-colr-table.hh
index f01d383bdc..1af9d30937 100644
--- a/thirdparty/harfbuzz/src/hb-ot-color-colr-table.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-color-colr-table.hh
@@ -39,11 +39,7 @@
#define HB_OT_TAG_COLR HB_TAG('C','O','L','R')
#ifndef HB_COLRV1_MAX_NESTING_LEVEL
-#define HB_COLRV1_MAX_NESTING_LEVEL 100
-#endif
-
-#ifndef COLRV1_ENABLE_SUBSETTING
-#define COLRV1_ENABLE_SUBSETTING 1
+#define HB_COLRV1_MAX_NESTING_LEVEL 16
#endif
namespace OT {
@@ -188,6 +184,7 @@ struct Variable
protected:
T value;
+ public:
VarIdx varIdxBase;
public:
DEFINE_SIZE_STATIC (4 + T::static_size);
@@ -196,6 +193,8 @@ struct Variable
template <typename T>
struct NoVariable
{
+ static constexpr uint32_t varIdxBase = VarIdx::NO_VARIATION;
+
NoVariable<T>* copy (hb_serialize_context_t *c) const
{
TRACE_SERIALIZE (this);
@@ -358,14 +357,14 @@ struct Affine2x3
return_trace (c->check_struct (this));
}
- HBFixed xx;
- HBFixed yx;
- HBFixed xy;
- HBFixed yy;
- HBFixed dx;
- HBFixed dy;
+ F16DOT16 xx;
+ F16DOT16 yx;
+ F16DOT16 xy;
+ F16DOT16 yy;
+ F16DOT16 dx;
+ F16DOT16 dy;
public:
- DEFINE_SIZE_STATIC (6 * HBFixed::static_size);
+ DEFINE_SIZE_STATIC (6 * F16DOT16::static_size);
};
struct PaintColrLayers
@@ -888,6 +887,11 @@ struct PaintComposite
DEFINE_SIZE_STATIC (8);
};
+struct ClipBoxData
+{
+ int xMin, yMin, xMax, yMax;
+};
+
struct ClipBoxFormat1
{
bool sanitize (hb_sanitize_context_t *c) const
@@ -896,6 +900,14 @@ struct ClipBoxFormat1
return_trace (c->check_struct (this));
}
+ void get_clip_box (ClipBoxData &clip_box, const VarStoreInstancer &instancer HB_UNUSED) const
+ {
+ clip_box.xMin = xMin;
+ clip_box.yMin = yMin;
+ clip_box.xMax = xMax;
+ clip_box.yMax = yMax;
+ }
+
public:
HBUINT8 format; /* format = 1(noVar) or 2(Var)*/
FWORD xMin;
@@ -906,7 +918,20 @@ struct ClipBoxFormat1
DEFINE_SIZE_STATIC (1 + 4 * FWORD::static_size);
};
-struct ClipBoxFormat2 : Variable<ClipBoxFormat1> {};
+struct ClipBoxFormat2 : Variable<ClipBoxFormat1>
+{
+ void get_clip_box (ClipBoxData &clip_box, const VarStoreInstancer &instancer) const
+ {
+ value.get_clip_box(clip_box, instancer);
+ if (instancer)
+ {
+ clip_box.xMin += _hb_roundf (instancer (varIdxBase, 0));
+ clip_box.yMin += _hb_roundf (instancer (varIdxBase, 1));
+ clip_box.xMax += _hb_roundf (instancer (varIdxBase, 2));
+ clip_box.yMax += _hb_roundf (instancer (varIdxBase, 3));
+ }
+ }
+};
struct ClipBox
{
@@ -932,6 +957,28 @@ struct ClipBox
}
}
+ bool get_extents (hb_glyph_extents_t *extents,
+ const VarStoreInstancer &instancer) const
+ {
+ ClipBoxData clip_box;
+ switch (u.format) {
+ case 1:
+ u.format1.get_clip_box (clip_box, instancer);
+ break;
+ case 2:
+ u.format2.get_clip_box (clip_box, instancer);
+ break;
+ default:
+ return false;
+ }
+
+ extents->x_bearing = clip_box.xMin;
+ extents->y_bearing = clip_box.yMax;
+ extents->width = clip_box.xMax - clip_box.xMin;
+ extents->height = clip_box.yMin - clip_box.yMax;
+ return true;
+ }
+
protected:
union {
HBUINT8 format; /* Format identifier */
@@ -942,6 +989,9 @@ struct ClipBox
struct ClipRecord
{
+ int cmp (hb_codepoint_t g) const
+ { return g < startGlyphID ? -1 : g <= endGlyphID ? 0 : +1; }
+
ClipRecord* copy (hb_serialize_context_t *c, const void *base) const
{
TRACE_SERIALIZE (this);
@@ -957,6 +1007,13 @@ struct ClipRecord
return_trace (c->check_struct (this) && clipBox.sanitize (c, base));
}
+ bool get_extents (hb_glyph_extents_t *extents,
+ const void *base,
+ const VarStoreInstancer &instancer) const
+ {
+ return (base+clipBox).get_extents (extents, instancer);
+ }
+
public:
HBUINT16 startGlyphID; // first gid clip applies to
HBUINT16 endGlyphID; // last gid clip applies to, inclusive
@@ -964,6 +1021,7 @@ struct ClipRecord
public:
DEFINE_SIZE_STATIC (7);
};
+DECLARE_NULL_NAMESPACE_BYTES (OT, ClipRecord);
struct ClipList
{
@@ -1052,11 +1110,26 @@ struct ClipList
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
+ // TODO Make a formatted struct!
return_trace (c->check_struct (this) && clips.sanitize (c, this));
}
+ bool
+ get_extents (hb_codepoint_t gid,
+ hb_glyph_extents_t *extents,
+ const VarStoreInstancer &instancer) const
+ {
+ auto *rec = clips.as_array ().bsearch (gid);
+ if (rec)
+ {
+ rec->get_extents (extents, this, instancer);
+ return true;
+ }
+ return false;
+ }
+
HBUINT8 format; // Set to 1.
- Array32Of<ClipRecord> clips; // Clip records, sorted by startGlyphID
+ SortedArray32Of<ClipRecord> clips; // Clip records, sorted by startGlyphID
public:
DEFINE_SIZE_ARRAY_SIZED (5, clips);
};
@@ -1359,7 +1432,7 @@ struct COLR
(this+baseGlyphsZ).sanitize (c, numBaseGlyphs) &&
(this+layersZ).sanitize (c, numLayers) &&
(version == 0 ||
- (COLRV1_ENABLE_SUBSETTING && version == 1 &&
+ (version == 1 &&
baseGlyphList.sanitize (c, this) &&
layerList.sanitize (c, this) &&
clipList.sanitize (c, this) &&
@@ -1516,6 +1589,30 @@ struct COLR
return_trace (true);
}
+ bool
+ get_extents (hb_font_t *font, hb_codepoint_t glyph, hb_glyph_extents_t *extents) const
+ {
+ if (version != 1)
+ return false;
+
+ VarStoreInstancer instancer (this+varStore,
+ this+varIdxMap,
+ hb_array (font->coords, font->num_coords));
+
+ if ((this+clipList).get_extents (glyph,
+ extents,
+ instancer))
+ {
+ extents->x_bearing = font->em_scale_x (extents->x_bearing);
+ extents->y_bearing = font->em_scale_x (extents->y_bearing);
+ extents->width = font->em_scale_x (extents->width);
+ extents->height = font->em_scale_x (extents->height);
+ return true;
+ }
+
+ return false;
+ }
+
protected:
HBUINT16 version; /* Table version number (starts at 0). */
HBUINT16 numBaseGlyphs; /* Number of Base Glyph Records. */
diff --git a/thirdparty/harfbuzz/src/hb-ot-color.cc b/thirdparty/harfbuzz/src/hb-ot-color.cc
index a9ae013682..696ca3e17f 100644
--- a/thirdparty/harfbuzz/src/hb-ot-color.cc
+++ b/thirdparty/harfbuzz/src/hb-ot-color.cc
@@ -295,8 +295,8 @@ hb_ot_color_has_png (hb_face_t *face)
* @glyph: a glyph index
*
* Fetches the PNG image for a glyph. This function takes a font object, not a face object,
- * as input. To get an optimally sized PNG blob, the UPEM value must be set on the @font
- * object. If UPEM is unset, the blob returned will be the largest PNG available.
+ * as input. To get an optimally sized PNG blob, the PPEM values must be set on the @font
+ * object. If PPEM is unset, the blob returned will be the largest PNG available.
*
* If the glyph has no PNG image, the singleton empty blob is returned.
*
diff --git a/thirdparty/harfbuzz/src/hb-ot-face-table-list.hh b/thirdparty/harfbuzz/src/hb-ot-face-table-list.hh
index c05034b3bb..c9da36c1bb 100644
--- a/thirdparty/harfbuzz/src/hb-ot-face-table-list.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-face-table-list.hh
@@ -56,9 +56,9 @@ HB_OT_CORE_TABLE (OT, maxp)
#if !defined(HB_NO_FACE_COLLECT_UNICODES) || !defined(HB_NO_OT_FONT)
HB_OT_ACCELERATOR (OT, cmap)
#endif
-HB_OT_TABLE (OT, hhea)
+HB_OT_CORE_TABLE (OT, hhea)
HB_OT_ACCELERATOR (OT, hmtx)
-HB_OT_TABLE (OT, OS2)
+HB_OT_CORE_TABLE (OT, OS2)
#if !defined(HB_NO_OT_FONT_GLYPH_NAMES) || !defined(HB_NO_METRICS) || !defined(HB_NO_STYLE)
HB_OT_ACCELERATOR (OT, post)
#endif
@@ -66,7 +66,7 @@ HB_OT_ACCELERATOR (OT, post)
HB_OT_ACCELERATOR (OT, name)
#endif
#ifndef HB_NO_STYLE
-HB_OT_TABLE (OT, STAT)
+HB_OT_CORE_TABLE (OT, STAT)
#endif
#ifndef HB_NO_META
HB_OT_ACCELERATOR (OT, meta)
@@ -74,9 +74,9 @@ HB_OT_ACCELERATOR (OT, meta)
/* Vertical layout. */
#ifndef HB_NO_VERTICAL
-HB_OT_TABLE (OT, vhea)
+HB_OT_CORE_TABLE (OT, vhea)
HB_OT_ACCELERATOR (OT, vmtx)
-HB_OT_TABLE (OT, VORG)
+HB_OT_CORE_TABLE (OT, VORG)
#endif
/* TrueType outlines. */
@@ -91,15 +91,15 @@ HB_OT_ACCELERATOR (OT, cff2)
/* OpenType variations. */
#ifndef HB_NO_VAR
-HB_OT_TABLE (OT, fvar)
-HB_OT_TABLE (OT, avar)
+HB_OT_CORE_TABLE (OT, fvar)
+HB_OT_CORE_TABLE (OT, avar)
HB_OT_ACCELERATOR (OT, gvar)
-HB_OT_TABLE (OT, MVAR)
+HB_OT_CORE_TABLE (OT, MVAR)
#endif
/* Legacy kern. */
#ifndef HB_NO_OT_KERN
-HB_OT_TABLE (OT, kern)
+HB_OT_CORE_TABLE (OT, kern)
#endif
/* OpenType shaping. */
@@ -107,12 +107,12 @@ HB_OT_TABLE (OT, kern)
HB_OT_ACCELERATOR (OT, GDEF)
HB_OT_ACCELERATOR (OT, GSUB)
HB_OT_ACCELERATOR (OT, GPOS)
-//HB_OT_TABLE (OT, JSTF)
+//HB_OT_CORE_TABLE (OT, JSTF)
#endif
/* OpenType baseline. */
#ifndef HB_NO_BASE
-HB_OT_TABLE (OT, BASE)
+HB_OT_CORE_TABLE (OT, BASE)
#endif
/* AAT shaping. */
@@ -129,8 +129,8 @@ HB_OT_TABLE (AAT, feat)
/* OpenType color fonts. */
#ifndef HB_NO_COLOR
-HB_OT_TABLE (OT, COLR)
-HB_OT_TABLE (OT, CPAL)
+HB_OT_CORE_TABLE (OT, COLR)
+HB_OT_CORE_TABLE (OT, CPAL)
HB_OT_ACCELERATOR (OT, CBDT)
HB_OT_ACCELERATOR (OT, sbix)
HB_OT_ACCELERATOR (OT, SVG)
@@ -138,7 +138,7 @@ HB_OT_ACCELERATOR (OT, SVG)
/* OpenType math. */
#ifndef HB_NO_MATH
-HB_OT_TABLE (OT, MATH)
+HB_OT_CORE_TABLE (OT, MATH)
#endif
diff --git a/thirdparty/harfbuzz/src/hb-ot-font.cc b/thirdparty/harfbuzz/src/hb-ot-font.cc
index c90a65665c..8405103423 100644
--- a/thirdparty/harfbuzz/src/hb-ot-font.cc
+++ b/thirdparty/harfbuzz/src/hb-ot-font.cc
@@ -40,12 +40,12 @@
#include "hb-ot-cff1-table.hh"
#include "hb-ot-cff2-table.hh"
#include "hb-ot-hmtx-table.hh"
-#include "hb-ot-os2-table.hh"
#include "hb-ot-post-table.hh"
#include "hb-ot-stat-table.hh" // Just so we compile it; unused otherwise.
#include "hb-ot-vorg-table.hh"
#include "hb-ot-color-cbdt-table.hh"
#include "hb-ot-color-sbix-table.hh"
+#include "hb-ot-color-colr-table.hh"
/**
@@ -349,17 +349,17 @@ hb_ot_get_glyph_extents (hb_font_t *font,
#if !defined(HB_NO_OT_FONT_BITMAP) && !defined(HB_NO_COLOR)
if (ot_face->sbix->get_extents (font, glyph, extents)) return true;
+ if (ot_face->CBDT->get_extents (font, glyph, extents)) return true;
+#endif
+#if !defined(HB_NO_COLOR)
+ if (ot_face->COLR->get_extents (font, glyph, extents)) return true;
#endif
if (ot_face->glyf->get_extents (font, glyph, extents)) return true;
#ifndef HB_NO_OT_FONT_CFF
if (ot_face->cff1->get_extents (font, glyph, extents)) return true;
if (ot_face->cff2->get_extents (font, glyph, extents)) return true;
#endif
-#if !defined(HB_NO_OT_FONT_BITMAP) && !defined(HB_NO_COLOR)
- if (ot_face->CBDT->get_extents (font, glyph, extents)) return true;
-#endif
- // TODO Hook up side-bearings variations.
return false;
}
diff --git a/thirdparty/harfbuzz/src/hb-ot-layout-common.hh b/thirdparty/harfbuzz/src/hb-ot-layout-common.hh
index 05916a252c..0f424f5aa6 100644
--- a/thirdparty/harfbuzz/src/hb-ot-layout-common.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-layout-common.hh
@@ -94,6 +94,19 @@ static bool ClassDef_remap_and_serialize (
hb_sorted_vector_t<hb_pair_t<hb_codepoint_t, hb_codepoint_t>> &glyph_and_klass, /* IN/OUT */
hb_map_t *klass_map /*IN/OUT*/);
+struct hb_collect_feature_substitutes_with_var_context_t
+{
+ const hb_map_t *axes_index_tag_map;
+ const hb_hashmap_t<hb_tag_t, int> *axes_location;
+ hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *record_cond_idx_map;
+ hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map;
+
+ // not stored in subset_plan
+ hb_set_t *feature_indices;
+ bool apply;
+ unsigned cur_record_idx;
+ hb_hashmap_t<hb::shared_ptr<hb_map_t>, unsigned> *conditionset_map;
+};
struct hb_prune_langsys_context_t
{
@@ -160,24 +173,40 @@ struct hb_subset_layout_context_t :
const hb_map_t *lookup_index_map;
const hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map;
const hb_map_t *feature_index_map;
+ const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map;
+ hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map;
+
unsigned cur_script_index;
+ unsigned cur_feature_var_record_idx;
hb_subset_layout_context_t (hb_subset_context_t *c_,
- hb_tag_t tag_,
- hb_map_t *lookup_map_,
- hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map_,
- hb_map_t *feature_index_map_) :
+ hb_tag_t tag_) :
subset_context (c_),
table_tag (tag_),
- lookup_index_map (lookup_map_),
- script_langsys_map (script_langsys_map_),
- feature_index_map (feature_index_map_),
cur_script_index (0xFFFFu),
+ cur_feature_var_record_idx (0u),
script_count (0),
langsys_count (0),
feature_index_count (0),
lookup_index_count (0)
- {}
+ {
+ if (tag_ == HB_OT_TAG_GSUB)
+ {
+ lookup_index_map = c_->plan->gsub_lookups;
+ script_langsys_map = c_->plan->gsub_langsys;
+ feature_index_map = c_->plan->gsub_features;
+ feature_substitutes_map = c_->plan->gsub_feature_substitutes_map;
+ feature_record_cond_idx_map = c_->plan->user_axes_location->is_empty () ? nullptr : c_->plan->gsub_feature_record_cond_idx_map;
+ }
+ else
+ {
+ lookup_index_map = c_->plan->gpos_lookups;
+ script_langsys_map = c_->plan->gpos_langsys;
+ feature_index_map = c_->plan->gpos_features;
+ feature_substitutes_map = c_->plan->gpos_feature_substitutes_map;
+ feature_record_cond_idx_map = c_->plan->user_axes_location->is_empty () ? nullptr : c_->plan->gpos_feature_record_cond_idx_map;
+ }
+ }
private:
unsigned script_count;
@@ -324,6 +353,31 @@ struct subset_record_array_t
const void *base;
};
+template<typename OutputArray, typename Arg>
+struct subset_record_array_arg_t
+{
+ subset_record_array_arg_t (hb_subset_layout_context_t *c_, OutputArray* out_,
+ const void *base_,
+ Arg &&arg_) : subset_layout_context (c_),
+ out (out_), base (base_), arg (arg_) {}
+
+ template <typename T>
+ void
+ operator () (T&& record)
+ {
+ auto snap = subset_layout_context->subset_context->serializer->snapshot ();
+ bool ret = record.subset (subset_layout_context, base, arg);
+ if (!ret) subset_layout_context->subset_context->serializer->revert (snap);
+ else out->len++;
+ }
+
+ private:
+ hb_subset_layout_context_t *subset_layout_context;
+ OutputArray *out;
+ const void *base;
+ Arg &&arg;
+};
+
/*
* Helper to subset a RecordList/record array. Subsets each Record in the array and
* discards the record if the subset operation returns false.
@@ -335,6 +389,13 @@ struct
operator () (hb_subset_layout_context_t *c, OutputArray* out,
const void *base) const
{ return subset_record_array_t<OutputArray> (c, out, base); }
+
+ /* Variant with one extra argument passed to subset */
+ template<typename OutputArray, typename Arg>
+ subset_record_array_arg_t<OutputArray, Arg>
+ operator () (hb_subset_layout_context_t *c, OutputArray* out,
+ const void *base, Arg &&arg) const
+ { return subset_record_array_arg_t<OutputArray, Arg> (c, out, base, arg); }
}
HB_FUNCOBJ (subset_record_array);
@@ -417,7 +478,7 @@ struct IndexArray : Array16Of<Index>
{
if (_count)
{
- + this->sub_array (start_offset, _count)
+ + this->as_array ().sub_array (start_offset, _count)
| hb_sink (hb_array (_indexes, *_count))
;
}
@@ -431,94 +492,6 @@ struct IndexArray : Array16Of<Index>
};
-struct Record_sanitize_closure_t {
- hb_tag_t tag;
- const void *list_base;
-};
-
-template <typename Type>
-struct Record
-{
- int cmp (hb_tag_t a) const { return tag.cmp (a); }
-
- bool subset (hb_subset_layout_context_t *c, const void *base) const
- {
- TRACE_SUBSET (this);
- auto *out = c->subset_context->serializer->embed (this);
- if (unlikely (!out)) return_trace (false);
- bool ret = out->offset.serialize_subset (c->subset_context, offset, base, c, &tag);
- return_trace (ret);
- }
-
- bool sanitize (hb_sanitize_context_t *c, const void *base) const
- {
- TRACE_SANITIZE (this);
- const Record_sanitize_closure_t closure = {tag, base};
- return_trace (c->check_struct (this) && offset.sanitize (c, base, &closure));
- }
-
- Tag tag; /* 4-byte Tag identifier */
- Offset16To<Type>
- offset; /* Offset from beginning of object holding
- * the Record */
- public:
- DEFINE_SIZE_STATIC (6);
-};
-
-template <typename Type>
-struct RecordArrayOf : SortedArray16Of<Record<Type>>
-{
- const Offset16To<Type>& get_offset (unsigned int i) const
- { return (*this)[i].offset; }
- Offset16To<Type>& get_offset (unsigned int i)
- { return (*this)[i].offset; }
- const Tag& get_tag (unsigned int i) const
- { return (*this)[i].tag; }
- unsigned int get_tags (unsigned int start_offset,
- unsigned int *record_count /* IN/OUT */,
- hb_tag_t *record_tags /* OUT */) const
- {
- if (record_count)
- {
- + this->sub_array (start_offset, record_count)
- | hb_map (&Record<Type>::tag)
- | hb_sink (hb_array (record_tags, *record_count))
- ;
- }
- return this->len;
- }
- bool find_index (hb_tag_t tag, unsigned int *index) const
- {
- return this->bfind (tag, index, HB_NOT_FOUND_STORE, Index::NOT_FOUND_INDEX);
- }
-};
-
-template <typename Type>
-struct RecordListOf : RecordArrayOf<Type>
-{
- const Type& operator [] (unsigned int i) const
- { return this+this->get_offset (i); }
-
- bool subset (hb_subset_context_t *c,
- hb_subset_layout_context_t *l) const
- {
- TRACE_SUBSET (this);
- auto *out = c->serializer->start_embed (*this);
- if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
-
- + this->iter ()
- | hb_apply (subset_record_array (l, out, this))
- ;
- return_trace (true);
- }
-
- bool sanitize (hb_sanitize_context_t *c) const
- {
- TRACE_SANITIZE (this);
- return_trace (RecordArrayOf<Type>::sanitize (c, this));
- }
-};
-
/* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#size */
struct FeatureParamsSize
{
@@ -685,7 +658,7 @@ struct FeatureParamsCharacterVariants
{
if (char_count)
{
- + characters.sub_array (start_offset, char_count)
+ + characters.as_array ().sub_array (start_offset, char_count)
| hb_sink (hb_array (chars, *char_count))
;
}
@@ -801,6 +774,10 @@ struct FeatureParams
DEFINE_SIZE_MIN (0);
};
+struct Record_sanitize_closure_t {
+ hb_tag_t tag;
+ const void *list_base;
+};
struct Feature
{
@@ -897,6 +874,103 @@ struct Feature
DEFINE_SIZE_ARRAY_SIZED (4, lookupIndex);
};
+template <typename Type>
+struct Record
+{
+ int cmp (hb_tag_t a) const { return tag.cmp (a); }
+
+ bool subset (hb_subset_layout_context_t *c, const void *base, const void *f_sub = nullptr) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->subset_context->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+
+ if (!f_sub)
+ return_trace (out->offset.serialize_subset (c->subset_context, offset, base, c, &tag));
+
+ const Feature& f = *reinterpret_cast<const Feature *> (f_sub);
+ auto *s = c->subset_context->serializer;
+ s->push ();
+
+ out->offset = 0;
+ bool ret = f.subset (c->subset_context, c, &tag);
+ if (ret)
+ s->add_link (out->offset, s->pop_pack ());
+ else
+ s->pop_discard ();
+
+ return_trace (ret);
+ }
+
+ bool sanitize (hb_sanitize_context_t *c, const void *base) const
+ {
+ TRACE_SANITIZE (this);
+ const Record_sanitize_closure_t closure = {tag, base};
+ return_trace (c->check_struct (this) && offset.sanitize (c, base, &closure));
+ }
+
+ Tag tag; /* 4-byte Tag identifier */
+ Offset16To<Type>
+ offset; /* Offset from beginning of object holding
+ * the Record */
+ public:
+ DEFINE_SIZE_STATIC (6);
+};
+
+template <typename Type>
+struct RecordArrayOf : SortedArray16Of<Record<Type>>
+{
+ const Offset16To<Type>& get_offset (unsigned int i) const
+ { return (*this)[i].offset; }
+ Offset16To<Type>& get_offset (unsigned int i)
+ { return (*this)[i].offset; }
+ const Tag& get_tag (unsigned int i) const
+ { return (*this)[i].tag; }
+ unsigned int get_tags (unsigned int start_offset,
+ unsigned int *record_count /* IN/OUT */,
+ hb_tag_t *record_tags /* OUT */) const
+ {
+ if (record_count)
+ {
+ + this->as_array ().sub_array (start_offset, record_count)
+ | hb_map (&Record<Type>::tag)
+ | hb_sink (hb_array (record_tags, *record_count))
+ ;
+ }
+ return this->len;
+ }
+ bool find_index (hb_tag_t tag, unsigned int *index) const
+ {
+ return this->bfind (tag, index, HB_NOT_FOUND_STORE, Index::NOT_FOUND_INDEX);
+ }
+};
+
+template <typename Type>
+struct RecordListOf : RecordArrayOf<Type>
+{
+ const Type& operator [] (unsigned int i) const
+ { return this+this->get_offset (i); }
+
+ bool subset (hb_subset_context_t *c,
+ hb_subset_layout_context_t *l) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+
+ + this->iter ()
+ | hb_apply (subset_record_array (l, out, this))
+ ;
+ return_trace (true);
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (RecordArrayOf<Type>::sanitize (c, this));
+ }
+};
+
struct RecordListOfFeature : RecordListOf<Feature>
{
bool subset (hb_subset_context_t *c,
@@ -906,12 +980,19 @@ struct RecordListOfFeature : RecordListOf<Feature>
auto *out = c->serializer->start_embed (*this);
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
- unsigned count = this->len;
- + hb_zip (*this, hb_range (count))
- | hb_filter (l->feature_index_map, hb_second)
- | hb_map (hb_first)
- | hb_apply (subset_record_array (l, out, this))
+ + hb_enumerate (*this)
+ | hb_filter (l->feature_index_map, hb_first)
+ | hb_apply ([l, out, this] (const hb_pair_t<unsigned, const Record<Feature>&>& _)
+ {
+ const Feature *f_sub = nullptr;
+ const Feature **f = nullptr;
+ if (l->feature_substitutes_map->has (_.first, &f))
+ f_sub = *f;
+
+ subset_record_array (l, out, this, f_sub) (_.second);
+ })
;
+
return_trace (true);
}
};
@@ -996,7 +1077,7 @@ struct LangSys
auto *out = c->serializer->start_embed (*this);
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
- const unsigned *v;
+ const uint32_t *v;
out->reqFeatureIndex = l->feature_index_map->has (reqFeatureIndex, &v) ? *v : 0xFFFFu;
if (!l->visitFeatureIndex (featureIndex.len))
@@ -1064,7 +1145,6 @@ struct Script
return;
}
- unsigned langsys_count = get_lang_sys_count ();
if (has_default_lang_sys ())
{
//only collect features from non-redundant langsys
@@ -1073,24 +1153,24 @@ struct Script
d.collect_features (c);
}
- for (auto _ : + hb_zip (langSys, hb_range (langsys_count)))
+ for (auto _ : + hb_enumerate (langSys))
{
- const LangSys& l = this+_.first.offset;
+ const LangSys& l = this+_.second.offset;
if (!c->visitLangsys (l.get_feature_count ())) continue;
if (l.compare (d, c->duplicate_feature_map)) continue;
l.collect_features (c);
- c->script_langsys_map->get (script_index)->add (_.second);
+ c->script_langsys_map->get (script_index)->add (_.first);
}
}
else
{
- for (auto _ : + hb_zip (langSys, hb_range (langsys_count)))
+ for (auto _ : + hb_enumerate (langSys))
{
- const LangSys& l = this+_.first.offset;
+ const LangSys& l = this+_.second.offset;
if (!c->visitLangsys (l.get_feature_count ())) continue;
l.collect_features (c);
- c->script_langsys_map->get (script_index)->add (_.second);
+ c->script_langsys_map->get (script_index)->add (_.first);
}
}
}
@@ -1128,10 +1208,9 @@ struct Script
const hb_set_t *active_langsys = l->script_langsys_map->get (l->cur_script_index);
if (active_langsys)
{
- unsigned count = langSys.len;
- + hb_zip (langSys, hb_range (count))
- | hb_filter (active_langsys, hb_second)
- | hb_map (hb_first)
+ + hb_enumerate (langSys)
+ | hb_filter (active_langsys, hb_first)
+ | hb_map (hb_second)
| hb_filter ([=] (const Record<LangSys>& record) {return l->visitLangSys (); })
| hb_apply (subset_record_array (l, &(out->langSys), this))
;
@@ -1167,12 +1246,11 @@ struct RecordListOfScript : RecordListOf<Script>
auto *out = c->serializer->start_embed (*this);
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
- unsigned count = this->len;
- for (auto _ : + hb_zip (*this, hb_range (count)))
+ for (auto _ : + hb_enumerate (*this))
{
auto snap = c->serializer->snapshot ();
- l->cur_script_index = _.second;
- bool ret = _.first.subset (l, this);
+ l->cur_script_index = _.first;
+ bool ret = _.second.subset (l, this);
if (!ret) c->serializer->revert (snap);
else out->len++;
}
@@ -1305,7 +1383,13 @@ struct Lookup
outMarkFilteringSet = markFilteringSet;
}
- return_trace (out->subTable.len);
+ // Always keep the lookup even if it's empty. The rest of layout subsetting depends on lookup
+ // indices being consistent with those computed during planning. So if an empty lookup is
+ // discarded during the subset phase it will invalidate all subsequent lookup indices.
+ // Generally we shouldn't end up with an empty lookup as we pre-prune them during the planning
+ // phase, but it can happen in rare cases such as when during closure subtable is considered
+ // degenerate (see: https://github.com/harfbuzz/harfbuzz/issues/3853)
+ return true;
}
template <typename TSubTable>
@@ -1371,10 +1455,9 @@ struct LookupOffsetList : List16OfOffsetTo<TLookup, OffsetType>
auto *out = c->serializer->start_embed (this);
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
- unsigned count = this->len;
- + hb_zip (*this, hb_range (count))
- | hb_filter (l->lookup_index_map, hb_second)
- | hb_map (hb_first)
+ + hb_enumerate (*this)
+ | hb_filter (l->lookup_index_map, hb_first)
+ | hb_map (hb_second)
| hb_apply (subset_offset_array (c, *out, this))
;
return_trace (true);
@@ -1408,7 +1491,7 @@ static bool ClassDef_remap_and_serialize (hb_serialize_context_t *c,
klass_map->set (0, 0);
unsigned idx = klass_map->has (0) ? 1 : 0;
- for (const unsigned k: klasses.iter ())
+ for (const unsigned k: klasses)
{
if (klass_map->has (k)) continue;
klass_map->set (k, idx);
@@ -1441,6 +1524,11 @@ struct ClassDefFormat1_3
return classValue[(unsigned int) (glyph_id - startGlyph)];
}
+ unsigned get_population () const
+ {
+ return classValue.len;
+ }
+
template<typename Iterator,
hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
bool serialize (hb_serialize_context_t *c,
@@ -1465,7 +1553,7 @@ struct ClassDefFormat1_3
startGlyph = glyph_min;
if (unlikely (!classValue.serialize (c, glyph_count))) return_trace (false);
- for (const hb_pair_t<hb_codepoint_t, unsigned> gid_klass_pair : + it)
+ for (const hb_pair_t<hb_codepoint_t, uint32_t> gid_klass_pair : + it)
{
unsigned idx = gid_klass_pair.first - glyph_min;
classValue[idx] = gid_klass_pair.second;
@@ -1556,11 +1644,10 @@ struct ClassDefFormat1_3
bool intersects (const hb_set_t *glyphs) const
{
- /* TODO Speed up, using hb_set_next()? */
hb_codepoint_t start = startGlyph;
hb_codepoint_t end = startGlyph + classValue.len;
for (hb_codepoint_t iter = startGlyph - 1;
- hb_set_next (glyphs, &iter) && iter < end;)
+ glyphs->next (&iter) && iter < end;)
if (classValue[iter - start]) return true;
return false;
}
@@ -1571,10 +1658,10 @@ struct ClassDefFormat1_3
{
/* Match if there's any glyph that is not listed! */
hb_codepoint_t g = HB_SET_VALUE_INVALID;
- if (!hb_set_next (glyphs, &g)) return false;
+ if (!glyphs->next (&g)) return false;
if (g < startGlyph) return true;
g = startGlyph + count - 1;
- if (hb_set_next (glyphs, &g)) return true;
+ if (glyphs->next (&g)) return true;
/* Fall through. */
}
/* TODO Speed up, using set overlap first? */
@@ -1592,12 +1679,12 @@ struct ClassDefFormat1_3
if (klass == 0)
{
unsigned start_glyph = startGlyph;
- for (unsigned g = HB_SET_VALUE_INVALID;
- hb_set_next (glyphs, &g) && g < start_glyph;)
+ for (uint32_t g = HB_SET_VALUE_INVALID;
+ glyphs->next (&g) && g < start_glyph;)
intersect_glyphs->add (g);
- for (unsigned g = startGlyph + count - 1;
- hb_set_next (glyphs, &g);)
+ for (uint32_t g = startGlyph + count - 1;
+ glyphs-> next (&g);)
intersect_glyphs->add (g);
return;
@@ -1613,7 +1700,7 @@ struct ClassDefFormat1_3
unsigned start_glyph = startGlyph;
unsigned end_glyph = start_glyph + count;
for (unsigned g = startGlyph - 1;
- hb_set_next (glyphs, &g) && g < end_glyph;)
+ glyphs->next (&g) && g < end_glyph;)
if (classValue.arrayZ[g - start_glyph] == klass)
intersect_glyphs->add (g);
#endif
@@ -1656,6 +1743,14 @@ struct ClassDefFormat2_4
return rangeRecord.bsearch (glyph_id).value;
}
+ unsigned get_population () const
+ {
+ typename Types::large_int ret = 0;
+ for (const auto &r : rangeRecord)
+ ret += r.get_population ();
+ return ret > UINT_MAX ? UINT_MAX : (unsigned) ret;
+ }
+
template<typename Iterator,
hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
bool serialize (hb_serialize_context_t *c,
@@ -1719,28 +1814,45 @@ struct ClassDefFormat2_4
{
TRACE_SUBSET (this);
const hb_map_t &glyph_map = *c->plan->glyph_map_gsub;
+ const hb_set_t &glyph_set = *c->plan->glyphset_gsub ();
hb_sorted_vector_t<hb_pair_t<hb_codepoint_t, hb_codepoint_t>> glyph_and_klass;
hb_set_t orig_klasses;
- unsigned num_source_glyphs = c->plan->source->get_num_glyphs ();
- unsigned count = rangeRecord.len;
- for (unsigned i = 0; i < count; i++)
+ if (glyph_set.get_population () * hb_bit_storage ((unsigned) rangeRecord.len) / 2
+ < get_population ())
{
- unsigned klass = rangeRecord[i].value;
- if (!klass) continue;
- hb_codepoint_t start = rangeRecord[i].first;
- hb_codepoint_t end = hb_min (rangeRecord[i].last + 1, num_source_glyphs);
- for (hb_codepoint_t g = start; g < end; g++)
+ for (hb_codepoint_t g : glyph_set)
{
- hb_codepoint_t new_gid = glyph_map[g];
+ unsigned klass = get_class (g);
+ if (!klass) continue;
+ hb_codepoint_t new_gid = glyph_map[g];
if (new_gid == HB_MAP_VALUE_INVALID) continue;
- if (glyph_filter && !glyph_filter->has (g)) continue;
-
+ if (glyph_filter && !glyph_filter->has (g)) continue;
glyph_and_klass.push (hb_pair (new_gid, klass));
orig_klasses.add (klass);
}
}
+ else
+ {
+ unsigned num_source_glyphs = c->plan->source->get_num_glyphs ();
+ for (auto &range : rangeRecord)
+ {
+ unsigned klass = range.value;
+ if (!klass) continue;
+ hb_codepoint_t start = range.first;
+ hb_codepoint_t end = hb_min (range.last + 1, num_source_glyphs);
+ for (hb_codepoint_t g = start; g < end; g++)
+ {
+ hb_codepoint_t new_gid = glyph_map[g];
+ if (new_gid == HB_MAP_VALUE_INVALID) continue;
+ if (glyph_filter && !glyph_filter->has (g)) continue;
+
+ glyph_and_klass.push (hb_pair (new_gid, klass));
+ orig_klasses.add (klass);
+ }
+ }
+ }
const hb_set_t& glyphset = *c->plan->glyphset_gsub ();
unsigned glyph_count = glyph_filter
@@ -1767,10 +1879,9 @@ struct ClassDefFormat2_4
template <typename set_t>
bool collect_coverage (set_t *glyphs) const
{
- unsigned int count = rangeRecord.len;
- for (unsigned int i = 0; i < count; i++)
- if (rangeRecord[i].value)
- if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
+ for (auto &range : rangeRecord)
+ if (range.value)
+ if (unlikely (!range.collect_coverage (glyphs)))
return false;
return true;
}
@@ -1778,11 +1889,10 @@ struct ClassDefFormat2_4
template <typename set_t>
bool collect_class (set_t *glyphs, unsigned int klass) const
{
- unsigned int count = rangeRecord.len;
- for (unsigned int i = 0; i < count; i++)
+ for (auto &range : rangeRecord)
{
- if (rangeRecord[i].value == klass)
- if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
+ if (range.value == klass)
+ if (unlikely (!range.collect_coverage (glyphs)))
return false;
}
return true;
@@ -1790,32 +1900,32 @@ struct ClassDefFormat2_4
bool intersects (const hb_set_t *glyphs) const
{
- /* TODO Speed up, using hb_set_next() and bsearch()? */
- unsigned int count = rangeRecord.len;
- for (unsigned int i = 0; i < count; i++)
+ if (rangeRecord.len > glyphs->get_population () * hb_bit_storage ((unsigned) rangeRecord.len) / 2)
{
- const auto& range = rangeRecord[i];
- if (range.intersects (*glyphs) && range.value)
- return true;
+ for (hb_codepoint_t g = HB_SET_VALUE_INVALID; glyphs->next (&g);)
+ if (get_class (g))
+ return true;
+ return false;
}
- return false;
+
+ return hb_any (+ hb_iter (rangeRecord)
+ | hb_map ([glyphs] (const RangeRecord<Types> &range) { return range.intersects (*glyphs) && range.value; }));
}
bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const
{
- unsigned int count = rangeRecord.len;
if (klass == 0)
{
/* Match if there's any glyph that is not listed! */
hb_codepoint_t g = HB_SET_VALUE_INVALID;
- for (unsigned int i = 0; i < count; i++)
+ for (auto &range : rangeRecord)
{
- if (!hb_set_next (glyphs, &g))
+ if (!glyphs->next (&g))
break;
- if (g < rangeRecord[i].first)
+ if (g < range.first)
return true;
- g = rangeRecord[i].last;
+ g = range.last;
}
- if (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
+ if (g != HB_SET_VALUE_INVALID && glyphs->next (&g))
return true;
/* Fall through. */
}
@@ -1827,49 +1937,49 @@ struct ClassDefFormat2_4
void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
{
- unsigned count = rangeRecord.len;
if (klass == 0)
{
hb_codepoint_t g = HB_SET_VALUE_INVALID;
- for (unsigned int i = 0; i < count; i++)
+ for (auto &range : rangeRecord)
{
- if (!hb_set_next (glyphs, &g))
+ if (!glyphs->next (&g))
goto done;
- while (g < rangeRecord[i].first)
+ while (g < range.first)
{
intersect_glyphs->add (g);
- if (!hb_set_next (glyphs, &g))
+ if (!glyphs->next (&g))
goto done;
}
- g = rangeRecord[i].last;
+ g = range.last;
}
- while (hb_set_next (glyphs, &g))
+ while (glyphs->next (&g))
intersect_glyphs->add (g);
done:
return;
}
-#if 0
- /* The following implementation is faster asymptotically, but slower
- * in practice. */
- if ((count >> 3) > glyphs->get_population ())
+ unsigned count = rangeRecord.len;
+ if (count > glyphs->get_population () * hb_bit_storage (count) * 8)
{
for (hb_codepoint_t g = HB_SET_VALUE_INVALID;
- hb_set_next (glyphs, &g);)
- if (rangeRecord.as_array ().bfind (g))
+ glyphs->next (&g);)
+ {
+ unsigned i;
+ if (rangeRecord.as_array ().bfind (g, &i) &&
+ rangeRecord.arrayZ[i].value == klass)
intersect_glyphs->add (g);
+ }
return;
}
-#endif
- for (unsigned int i = 0; i < count; i++)
+ for (auto &range : rangeRecord)
{
- if (rangeRecord[i].value != klass) continue;
+ if (range.value != klass) continue;
- unsigned end = rangeRecord[i].last + 1;
- for (hb_codepoint_t g = rangeRecord[i].first - 1;
- hb_set_next (glyphs, &g) && g < end;)
+ unsigned end = range.last + 1;
+ for (hb_codepoint_t g = range.first - 1;
+ glyphs->next (&g) && g < end;)
intersect_glyphs->add (g);
}
}
@@ -1878,25 +1988,24 @@ struct ClassDefFormat2_4
{
if (glyphs->is_empty ()) return;
- unsigned count = rangeRecord.len;
hb_codepoint_t g = HB_SET_VALUE_INVALID;
- for (unsigned int i = 0; i < count; i++)
+ for (auto &range : rangeRecord)
{
- if (!hb_set_next (glyphs, &g))
+ if (!glyphs->next (&g))
break;
- if (g < rangeRecord[i].first)
+ if (g < range.first)
{
intersect_classes->add (0);
break;
}
- g = rangeRecord[i].last;
+ g = range.last;
}
- if (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
+ if (g != HB_SET_VALUE_INVALID && glyphs->next (&g))
intersect_classes->add (0);
- for (const auto& record : rangeRecord.iter ())
- if (record.intersects (*glyphs))
- intersect_classes->add (record.value);
+ for (const auto& range : rangeRecord)
+ if (range.intersects (*glyphs))
+ intersect_classes->add (range.value);
}
protected:
@@ -1911,10 +2020,8 @@ struct ClassDefFormat2_4
struct ClassDef
{
/* Has interface. */
- static constexpr unsigned SENTINEL = 0;
- typedef unsigned int value_t;
- value_t operator [] (hb_codepoint_t k) const { return get (k); }
- bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
+ unsigned operator [] (hb_codepoint_t k) const { return get (k); }
+ bool has (hb_codepoint_t k) const { return (*this)[k]; }
/* Projection. */
hb_codepoint_t operator () (hb_codepoint_t k) const { return get (k); }
@@ -1932,6 +2039,19 @@ struct ClassDef
}
}
+ unsigned get_population () const
+ {
+ switch (u.format) {
+ case 1: return u.format1.get_population ();
+ case 2: return u.format2.get_population ();
+#ifndef HB_NO_BEYOND_64K
+ case 3: return u.format3.get_population ();
+ case 4: return u.format4.get_population ();
+#endif
+ default:return NOT_COVERED;
+ }
+ }
+
template<typename Iterator,
hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
bool serialize (hb_serialize_context_t *c, Iterator it_with_class_zero)
@@ -2249,7 +2369,7 @@ struct VarRegionList
{
unsigned int backward = region_map.backward (r);
if (backward >= region_count) return_trace (false);
- memcpy (&axesZ[axisCount * r], &src->axesZ[axisCount * backward], VarRegionAxis::static_size * axisCount);
+ hb_memcpy (&axesZ[axisCount * r], &src->axesZ[axisCount * backward], VarRegionAxis::static_size * axisCount);
}
return_trace (true);
@@ -2359,21 +2479,26 @@ struct VarData
unsigned ri_count = src->regionIndices.len;
enum delta_size_t { kZero=0, kNonWord, kWord };
hb_vector_t<delta_size_t> delta_sz;
- hb_vector_t<unsigned int> ri_map; /* maps old index to new index */
+ hb_vector_t<unsigned int> ri_map; /* maps new index to old index */
delta_sz.resize (ri_count);
ri_map.resize (ri_count);
unsigned int new_word_count = 0;
unsigned int r;
+ const HBUINT8 *src_delta_bytes = src->get_delta_bytes ();
+ unsigned src_row_size = src->get_row_size ();
+ unsigned src_word_count = src->wordCount ();
+ bool src_long_words = src->longWords ();
+
bool has_long = false;
- if (src->longWords ())
+ if (src_long_words)
{
- for (r = 0; r < ri_count; r++)
+ for (r = 0; r < src_word_count; r++)
{
for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
{
unsigned int old = inner_map.backward (i);
- int32_t delta = src->get_item_delta (old, r);
+ int32_t delta = src->get_item_delta_fast (old, r, src_delta_bytes, src_row_size);
if (delta < -65536 || 65535 < delta)
{
has_long = true;
@@ -2387,11 +2512,13 @@ struct VarData
signed max_threshold = has_long ? +65535 : +127;
for (r = 0; r < ri_count; r++)
{
+ bool short_circuit = src_long_words == has_long && src_word_count <= r;
+
delta_sz[r] = kZero;
for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
{
unsigned int old = inner_map.backward (i);
- int32_t delta = src->get_item_delta (old, r);
+ int32_t delta = src->get_item_delta_fast (old, r, src_delta_bytes, src_row_size);
if (delta < min_threshold || max_threshold < delta)
{
delta_sz[r] = kWord;
@@ -2399,7 +2526,11 @@ struct VarData
break;
}
else if (delta != 0)
+ {
delta_sz[r] = kNonWord;
+ if (short_circuit)
+ break;
+ }
}
}
@@ -2409,7 +2540,8 @@ struct VarData
for (r = 0; r < ri_count; r++)
if (delta_sz[r])
{
- ri_map[r] = (delta_sz[r] == kWord)? word_index++ : non_word_index++;
+ unsigned new_r = (delta_sz[r] == kWord)? word_index++ : non_word_index++;
+ ri_map[new_r] = r;
new_ri_count++;
}
@@ -2419,14 +2551,20 @@ struct VarData
if (unlikely (!c->extend (this))) return_trace (false);
- for (r = 0; r < ri_count; r++)
- if (delta_sz[r]) regionIndices[ri_map[r]] = region_map[src->regionIndices[r]];
+ for (r = 0; r < new_ri_count; r++)
+ regionIndices[r] = region_map[src->regionIndices[ri_map[r]]];
- for (unsigned int i = 0; i < itemCount; i++)
+ HBUINT8 *delta_bytes = get_delta_bytes ();
+ unsigned row_size = get_row_size ();
+ unsigned count = itemCount;
+ for (unsigned int i = 0; i < count; i++)
{
- unsigned int old = inner_map.backward (i);
- for (unsigned int r = 0; r < ri_count; r++)
- if (delta_sz[r]) set_item_delta (i, ri_map[r], src->get_item_delta (old, r));
+ unsigned int old = inner_map.backward (i);
+ for (unsigned int r = 0; r < new_ri_count; r++)
+ set_item_delta_fast (i, r,
+ src->get_item_delta_fast (old, ri_map[r],
+ src_delta_bytes, src_row_size),
+ delta_bytes, row_size);
}
return_trace (true);
@@ -2434,12 +2572,15 @@ struct VarData
void collect_region_refs (hb_set_t &region_indices, const hb_inc_bimap_t &inner_map) const
{
+ const HBUINT8 *delta_bytes = get_delta_bytes ();
+ unsigned row_size = get_row_size ();
+
for (unsigned int r = 0; r < regionIndices.len; r++)
{
- unsigned int region = regionIndices[r];
+ unsigned int region = regionIndices.arrayZ[r];
if (region_indices.has (region)) continue;
for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
- if (get_item_delta (inner_map.backward (i), r) != 0)
+ if (get_item_delta_fast (inner_map.backward (i), r, delta_bytes, row_size) != 0)
{
region_indices.add (region);
break;
@@ -2454,10 +2595,12 @@ struct VarData
HBUINT8 *get_delta_bytes ()
{ return &StructAfter<HBUINT8> (regionIndices); }
- int32_t get_item_delta (unsigned int item, unsigned int region) const
+ int32_t get_item_delta_fast (unsigned int item, unsigned int region,
+ const HBUINT8 *delta_bytes, unsigned row_size) const
{
- if ( item >= itemCount || unlikely (region >= regionIndices.len)) return 0;
- const HBINT8 *p = (const HBINT8 *) get_delta_bytes () + item * get_row_size ();
+ if (unlikely (item >= itemCount || region >= regionIndices.len)) return 0;
+
+ const HBINT8 *p = (const HBINT8 *) delta_bytes + item * row_size;
unsigned word_count = wordCount ();
bool is_long = longWords ();
if (is_long)
@@ -2475,10 +2618,17 @@ struct VarData
return (p + HBINT16::static_size * word_count)[region - word_count];
}
}
+ int32_t get_item_delta (unsigned int item, unsigned int region) const
+ {
+ return get_item_delta_fast (item, region,
+ get_delta_bytes (),
+ get_row_size ());
+ }
- void set_item_delta (unsigned int item, unsigned int region, int32_t delta)
+ void set_item_delta_fast (unsigned int item, unsigned int region, int32_t delta,
+ HBUINT8 *delta_bytes, unsigned row_size)
{
- HBINT8 *p = (HBINT8 *)get_delta_bytes () + item * get_row_size ();
+ HBINT8 *p = (HBINT8 *) delta_bytes + item * row_size;
unsigned word_count = wordCount ();
bool is_long = longWords ();
if (is_long)
@@ -2496,6 +2646,12 @@ struct VarData
(p + HBINT16::static_size * word_count)[region - word_count] = delta;
}
}
+ void set_item_delta (unsigned int item, unsigned int region, int32_t delta)
+ {
+ set_item_delta_fast (item, region, delta,
+ get_delta_bytes (),
+ get_row_size ());
+ }
bool longWords () const { return wordSizeCount & 0x8000u /* LONG_WORDS */; }
unsigned wordCount () const { return wordSizeCount & 0x7FFFu /* WORD_DELTA_COUNT_MASK */; }
@@ -2559,6 +2715,14 @@ struct VariationStore
unsigned int inner = index & 0xFFFF;
return get_delta (outer, inner, coords, coord_count, cache);
}
+ float get_delta (unsigned int index,
+ hb_array_t<int> coords,
+ VarRegionList::cache_t *cache = nullptr) const
+ {
+ return get_delta (index,
+ coords.arrayZ, coords.length,
+ cache);
+ }
bool sanitize (hb_sanitize_context_t *c) const
{
@@ -2692,6 +2856,13 @@ struct VariationStore
/*
* Feature Variations
*/
+enum Cond_with_Var_flag_t
+{
+ KEEP_COND_WITH_VAR = 0,
+ DROP_COND_WITH_VAR = 1,
+ DROP_RECORD_WITH_VAR = 2,
+ MEM_ERR_WITH_VAR = 3,
+};
struct ConditionFormat1
{
@@ -2702,10 +2873,52 @@ struct ConditionFormat1
TRACE_SUBSET (this);
auto *out = c->serializer->embed (this);
if (unlikely (!out)) return_trace (false);
- return_trace (true);
+
+ const hb_map_t *index_map = c->plan->axes_index_map;
+ if (index_map->is_empty ()) return_trace (true);
+
+ if (!index_map->has (axisIndex))
+ return_trace (false);
+
+ return_trace (c->serializer->check_assign (out->axisIndex, index_map->get (axisIndex),
+ HB_SERIALIZE_ERROR_INT_OVERFLOW));
}
private:
+ Cond_with_Var_flag_t keep_with_variations (hb_collect_feature_substitutes_with_var_context_t *c,
+ hb_map_t *condition_map /* OUT */) const
+ {
+ //invalid axis index, drop the entire record
+ if (!c->axes_index_tag_map->has (axisIndex))
+ return DROP_RECORD_WITH_VAR;
+
+ hb_tag_t axis_tag = c->axes_index_tag_map->get (axisIndex);
+
+ //axis not pinned, keep the condition
+ if (!c->axes_location->has (axis_tag))
+ {
+ // add axisIndex->value into the hashmap so we can check if the record is
+ // unique with variations
+ int16_t min_val = filterRangeMinValue;
+ int16_t max_val = filterRangeMaxValue;
+ hb_codepoint_t val = (max_val << 16) + min_val;
+
+ condition_map->set (axisIndex, val);
+ return KEEP_COND_WITH_VAR;
+ }
+
+ //axis pinned, check if condition is met
+ //TODO: add check for axis Ranges
+ int v = c->axes_location->get (axis_tag);
+
+ //condition not met, drop the entire record
+ if (v < filterRangeMinValue || v > filterRangeMaxValue)
+ return DROP_RECORD_WITH_VAR;
+
+ //axis pinned and condition met, drop the condition
+ return DROP_COND_WITH_VAR;
+ }
+
bool evaluate (const int *coords, unsigned int coord_len) const
{
int coord = axisIndex < coord_len ? coords[axisIndex] : 0;
@@ -2737,6 +2950,15 @@ struct Condition
}
}
+ Cond_with_Var_flag_t keep_with_variations (hb_collect_feature_substitutes_with_var_context_t *c,
+ hb_map_t *condition_map /* OUT */) const
+ {
+ switch (u.format) {
+ case 1: return u.format1.keep_with_variations (c, condition_map);
+ default:return KEEP_COND_WITH_VAR;
+ }
+ }
+
template <typename context_t, typename ...Ts>
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
{
@@ -2778,15 +3000,65 @@ struct ConditionSet
return true;
}
- bool subset (hb_subset_context_t *c) const
+ Cond_with_Var_flag_t keep_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
+ {
+ hb_map_t *condition_map = hb_map_create ();
+ if (unlikely (!condition_map)) return MEM_ERR_WITH_VAR;
+ hb::shared_ptr<hb_map_t> p {condition_map};
+
+ hb_set_t *cond_set = hb_set_create ();
+ if (unlikely (!cond_set)) return MEM_ERR_WITH_VAR;
+ hb::shared_ptr<hb_set_t> s {cond_set};
+
+ unsigned num_kept_cond = 0, cond_idx = 0;
+ for (const auto& offset : conditions)
+ {
+ Cond_with_Var_flag_t ret = (this+offset).keep_with_variations (c, condition_map);
+ // one condition is not met, drop the entire record
+ if (ret == DROP_RECORD_WITH_VAR)
+ return DROP_RECORD_WITH_VAR;
+
+ // axis not pinned, keep this condition
+ if (ret == KEEP_COND_WITH_VAR)
+ {
+ cond_set->add (cond_idx);
+ num_kept_cond++;
+ }
+ cond_idx++;
+ }
+
+ // all conditions met
+ if (num_kept_cond == 0) return DROP_COND_WITH_VAR;
+
+ //check if condition_set is unique with variations
+ if (c->conditionset_map->has (p))
+ //duplicate found, drop the entire record
+ return DROP_RECORD_WITH_VAR;
+
+ c->conditionset_map->set (p, 1);
+ c->record_cond_idx_map->set (c->cur_record_idx, s);
+
+ return KEEP_COND_WITH_VAR;
+ }
+
+ bool subset (hb_subset_context_t *c,
+ hb_subset_layout_context_t *l) const
{
TRACE_SUBSET (this);
auto *out = c->serializer->start_embed (this);
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
- + conditions.iter ()
- | hb_apply (subset_offset_array (c, out->conditions, this))
- ;
+ hb_set_t *retained_cond_set = nullptr;
+ if (l->feature_record_cond_idx_map != nullptr)
+ retained_cond_set = l->feature_record_cond_idx_map->get (l->cur_feature_var_record_idx);
+
+ unsigned int count = conditions.len;
+ for (unsigned int i = 0; i < count; i++)
+ {
+ if (retained_cond_set != nullptr && !retained_cond_set->has (i))
+ continue;
+ subset_offset_array (c, out->conditions, this) (conditions[i]);
+ }
return_trace (bool (out->conditions));
}
@@ -2820,10 +3092,19 @@ struct FeatureTableSubstitutionRecord
feature_indexes->add (featureIndex);
}
+ void collect_feature_substitutes_with_variations (hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
+ const hb_set_t *feature_indices,
+ const void *base) const
+ {
+ if (feature_indices->has (featureIndex))
+ feature_substitutes_map->set (featureIndex, &(base+feature));
+ }
+
bool subset (hb_subset_layout_context_t *c, const void *base) const
{
TRACE_SUBSET (this);
- if (!c->feature_index_map->has (featureIndex)) {
+ if (!c->feature_index_map->has (featureIndex) ||
+ c->feature_substitutes_map->has (featureIndex)) {
// Feature that is being substituted is not being retained, so we don't
// need this.
return_trace (false);
@@ -2865,10 +3146,16 @@ struct FeatureTableSubstitution
}
void collect_lookups (const hb_set_t *feature_indexes,
+ const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
hb_set_t *lookup_indexes /* OUT */) const
{
+ hb_iter (substitutions)
| hb_filter (feature_indexes, &FeatureTableSubstitutionRecord::featureIndex)
+ | hb_filter ([feature_substitutes_map] (const FeatureTableSubstitutionRecord& record)
+ {
+ if (feature_substitutes_map == nullptr) return true;
+ return !feature_substitutes_map->has (record.featureIndex);
+ })
| hb_apply ([this, lookup_indexes] (const FeatureTableSubstitutionRecord& r)
{ r.collect_lookups (this, lookup_indexes); })
;
@@ -2890,6 +3177,12 @@ struct FeatureTableSubstitution
return false;
}
+ void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
+ {
+ for (const FeatureTableSubstitutionRecord& record : substitutions)
+ record.collect_feature_substitutes_with_variations (c->feature_substitutes_map, c->feature_indices, this);
+ }
+
bool subset (hb_subset_context_t *c,
hb_subset_layout_context_t *l) const
{
@@ -2929,9 +3222,10 @@ struct FeatureVariationRecord
void collect_lookups (const void *base,
const hb_set_t *feature_indexes,
+ const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
hb_set_t *lookup_indexes /* OUT */) const
{
- return (base+substitutions).collect_lookups (feature_indexes, lookup_indexes);
+ return (base+substitutions).collect_lookups (feature_indexes, feature_substitutes_map, lookup_indexes);
}
void closure_features (const void *base,
@@ -2946,13 +3240,25 @@ struct FeatureVariationRecord
return (base+substitutions).intersects_features (feature_index_map);
}
+ void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c,
+ const void *base) const
+ {
+ // ret == 1, all conditions met
+ if ((base+conditions).keep_with_variations (c) == DROP_COND_WITH_VAR &&
+ c->apply)
+ {
+ (base+substitutions).collect_feature_substitutes_with_variations (c);
+ c->apply = false; // set variations only once
+ }
+ }
+
bool subset (hb_subset_layout_context_t *c, const void *base) const
{
TRACE_SUBSET (this);
auto *out = c->subset_context->serializer->embed (this);
if (unlikely (!out)) return_trace (false);
- out->conditions.serialize_subset (c->subset_context, conditions, base);
+ out->conditions.serialize_subset (c->subset_context, conditions, base, c);
out->substitutions.serialize_subset (c->subset_context, substitutions, base, c);
return_trace (true);
@@ -3002,6 +3308,16 @@ struct FeatureVariations
return (this+record.substitutions).find_substitute (feature_index);
}
+ void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
+ {
+ unsigned int count = varRecords.len;
+ for (unsigned int i = 0; i < count; i++)
+ {
+ c->cur_record_idx = i;
+ varRecords[i].collect_feature_substitutes_with_variations (c, this);
+ }
+ }
+
FeatureVariations* copy (hb_serialize_context_t *c) const
{
TRACE_SERIALIZE (this);
@@ -3009,17 +3325,25 @@ struct FeatureVariations
}
void collect_lookups (const hb_set_t *feature_indexes,
+ const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
hb_set_t *lookup_indexes /* OUT */) const
{
for (const FeatureVariationRecord& r : varRecords)
- r.collect_lookups (this, feature_indexes, lookup_indexes);
+ r.collect_lookups (this, feature_indexes, feature_substitutes_map, lookup_indexes);
}
void closure_features (const hb_map_t *lookup_indexes,
+ const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map,
hb_set_t *feature_indexes /* OUT */) const
{
- for (const FeatureVariationRecord& record : varRecords)
- record.closure_features (this, lookup_indexes, feature_indexes);
+ unsigned int count = varRecords.len;
+ for (unsigned int i = 0; i < count; i++)
+ {
+ if (feature_record_cond_idx_map != nullptr &&
+ !feature_record_cond_idx_map->has (i))
+ continue;
+ varRecords[i].closure_features (this, lookup_indexes, feature_indexes);
+ }
}
bool subset (hb_subset_context_t *c,
@@ -3041,7 +3365,13 @@ struct FeatureVariations
}
unsigned count = (unsigned) (keep_up_to + 1);
- for (unsigned i = 0; i < count; i++) {
+ for (unsigned i = 0; i < count; i++)
+ {
+ if (l->feature_record_cond_idx_map != nullptr &&
+ !l->feature_record_cond_idx_map->has (i))
+ continue;
+
+ l->cur_feature_var_record_idx = i;
subset_record_array (l, &(out->varRecords), this) (varRecords[i]);
}
return_trace (bool (out->varRecords));
@@ -3171,17 +3501,16 @@ struct VariationDevice
{
TRACE_SERIALIZE (this);
if (!layout_variation_idx_delta_map) return_trace (nullptr);
- auto snap = c->snapshot ();
+
+ hb_pair_t<unsigned, int> *v;
+ if (!layout_variation_idx_delta_map->has (varIdx, &v))
+ return_trace (nullptr);
+
+ c->start_zerocopy (this->static_size);
auto *out = c->embed (this);
if (unlikely (!out)) return_trace (nullptr);
- /* TODO Just get() and bail if NO_VARIATION. Needs to setup the map to return that. */
- if (!layout_variation_idx_delta_map->has (varIdx))
- {
- c->revert (snap);
- return_trace (nullptr);
- }
- unsigned new_idx = hb_first (layout_variation_idx_delta_map->get (varIdx));
+ unsigned new_idx = hb_first (*v);
out->varIdx = new_idx;
return_trace (out);
}
diff --git a/thirdparty/harfbuzz/src/hb-ot-layout-gdef-table.hh b/thirdparty/harfbuzz/src/hb-ot-layout-gdef-table.hh
index 22925fdfa9..a84edef162 100644
--- a/thirdparty/harfbuzz/src/hb-ot-layout-gdef-table.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-layout-gdef-table.hh
@@ -73,7 +73,7 @@ struct AttachList
if (point_count)
{
- + points.sub_array (start_offset, point_count)
+ + points.as_array ().sub_array (start_offset, point_count)
| hb_sink (hb_array (point_array, *point_count))
;
}
@@ -322,7 +322,7 @@ struct LigGlyph
{
if (caret_count)
{
- + carets.sub_array (start_offset, caret_count)
+ + carets.as_array ().sub_array (start_offset, caret_count)
| hb_map (hb_add (this))
| hb_map ([&] (const CaretValue &value) { return value.get_caret_value (font, direction, glyph_id, var_store); })
| hb_sink (hb_array (caret_array, *caret_count))
diff --git a/thirdparty/harfbuzz/src/hb-ot-layout-gsubgpos.hh b/thirdparty/harfbuzz/src/hb-ot-layout-gsubgpos.hh
index c15a42b0f1..04ccefd108 100644
--- a/thirdparty/harfbuzz/src/hb-ot-layout-gsubgpos.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-layout-gsubgpos.hh
@@ -100,8 +100,8 @@ struct hb_closure_context_t :
bool is_lookup_done (unsigned int lookup_index)
{
- if (done_lookups_glyph_count->in_error () ||
- done_lookups_glyph_set->in_error ())
+ if (unlikely (done_lookups_glyph_count->in_error () ||
+ done_lookups_glyph_set->in_error ()))
return true;
/* Have we visited this lookup with the current set of glyphs? */
@@ -535,7 +535,12 @@ struct hb_ot_apply_context_t :
bool next (unsigned *unsafe_to = nullptr)
{
assert (num_items > 0);
- while (idx + num_items < end)
+ /* The alternate condition below is faster at string boundaries,
+ * but produces subpar "unsafe-to-concat" values. */
+ signed stop = (signed) end - (signed) num_items;
+ if (c->buffer->flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT)
+ stop = (signed) end - 1;
+ while ((signed) idx < stop)
{
idx++;
hb_glyph_info_t &info = c->buffer->info[idx];
@@ -568,7 +573,12 @@ struct hb_ot_apply_context_t :
bool prev (unsigned *unsafe_from = nullptr)
{
assert (num_items > 0);
- while (idx > num_items - 1)
+ /* The alternate condition below is faster at string boundaries,
+ * but produces subpar "unsafe-to-concat" values. */
+ unsigned stop = num_items - 1;
+ if (c->buffer->flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT)
+ stop = 1 - 1;
+ while (idx > stop)
{
idx--;
hb_glyph_info_t &info = c->buffer->out_info[idx];
@@ -672,6 +682,7 @@ struct hb_ot_apply_context_t :
const GDEF &gdef;
const VariationStore &var_store;
VariationStore::cache_t *var_store_cache;
+ hb_set_digest_t digest;
hb_direction_t direction;
hb_mask_t lookup_mask = 1;
@@ -707,6 +718,7 @@ struct hb_ot_apply_context_t :
nullptr
#endif
),
+ digest (buffer_->digest ()),
direction (buffer_->props.direction),
has_glyph_classes (gdef.has_glyph_classes ())
{ init_iters (); }
@@ -781,8 +793,10 @@ struct hb_ot_apply_context_t :
void _set_glyph_class (hb_codepoint_t glyph_index,
unsigned int class_guess = 0,
bool ligature = false,
- bool component = false) const
+ bool component = false)
{
+ digest.add (glyph_index);
+
if (new_syllables != (unsigned) -1)
buffer->cur().syllable() = new_syllables;
@@ -815,24 +829,24 @@ struct hb_ot_apply_context_t :
_hb_glyph_info_set_glyph_props (&buffer->cur(), props);
}
- void replace_glyph (hb_codepoint_t glyph_index) const
+ void replace_glyph (hb_codepoint_t glyph_index)
{
_set_glyph_class (glyph_index);
(void) buffer->replace_glyph (glyph_index);
}
- void replace_glyph_inplace (hb_codepoint_t glyph_index) const
+ void replace_glyph_inplace (hb_codepoint_t glyph_index)
{
_set_glyph_class (glyph_index);
buffer->cur().codepoint = glyph_index;
}
void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
- unsigned int class_guess) const
+ unsigned int class_guess)
{
_set_glyph_class (glyph_index, class_guess, true);
(void) buffer->replace_glyph (glyph_index);
}
void output_glyph_for_component (hb_codepoint_t glyph_index,
- unsigned int class_guess) const
+ unsigned int class_guess)
{
_set_glyph_class (glyph_index, class_guess, false, true);
(void) buffer->output_glyph (glyph_index);
@@ -844,7 +858,7 @@ struct hb_accelerate_subtables_context_t :
hb_dispatch_context_t<hb_accelerate_subtables_context_t>
{
template <typename Type>
- static inline bool apply_to (const void *obj, OT::hb_ot_apply_context_t *c)
+ static inline bool apply_to (const void *obj, hb_ot_apply_context_t *c)
{
const Type *typed_obj = (const Type *) obj;
return typed_obj->apply (c);
@@ -852,30 +866,30 @@ struct hb_accelerate_subtables_context_t :
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
template <typename T>
- static inline auto apply_cached_ (const T *obj, OT::hb_ot_apply_context_t *c, hb_priority<1>) HB_RETURN (bool, obj->apply (c, true) )
+ static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, hb_priority<1>) HB_RETURN (bool, obj->apply (c, true) )
template <typename T>
- static inline auto apply_cached_ (const T *obj, OT::hb_ot_apply_context_t *c, hb_priority<0>) HB_RETURN (bool, obj->apply (c) )
+ static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, hb_priority<0>) HB_RETURN (bool, obj->apply (c) )
template <typename Type>
- static inline bool apply_cached_to (const void *obj, OT::hb_ot_apply_context_t *c)
+ static inline bool apply_cached_to (const void *obj, hb_ot_apply_context_t *c)
{
const Type *typed_obj = (const Type *) obj;
return apply_cached_ (typed_obj, c, hb_prioritize);
}
template <typename T>
- static inline auto cache_func_ (const T *obj, OT::hb_ot_apply_context_t *c, bool enter, hb_priority<1>) HB_RETURN (bool, obj->cache_func (c, enter) )
+ static inline auto cache_func_ (const T *obj, hb_ot_apply_context_t *c, bool enter, hb_priority<1>) HB_RETURN (bool, obj->cache_func (c, enter) )
template <typename T>
- static inline bool cache_func_ (const T *obj, OT::hb_ot_apply_context_t *c, bool enter, hb_priority<0>) { return false; }
+ static inline bool cache_func_ (const T *obj, hb_ot_apply_context_t *c, bool enter, hb_priority<0>) { return false; }
template <typename Type>
- static inline bool cache_func_to (const void *obj, OT::hb_ot_apply_context_t *c, bool enter)
+ static inline bool cache_func_to (const void *obj, hb_ot_apply_context_t *c, bool enter)
{
const Type *typed_obj = (const Type *) obj;
return cache_func_ (typed_obj, c, enter, hb_prioritize);
}
#endif
- typedef bool (*hb_apply_func_t) (const void *obj, OT::hb_ot_apply_context_t *c);
- typedef bool (*hb_cache_func_t) (const void *obj, OT::hb_ot_apply_context_t *c, bool enter);
+ typedef bool (*hb_apply_func_t) (const void *obj, hb_ot_apply_context_t *c);
+ typedef bool (*hb_cache_func_t) (const void *obj, hb_ot_apply_context_t *c, bool enter);
struct hb_applicable_t
{
@@ -901,20 +915,20 @@ struct hb_accelerate_subtables_context_t :
obj_.get_coverage ().collect_coverage (&digest);
}
- bool apply (OT::hb_ot_apply_context_t *c) const
+ bool apply (hb_ot_apply_context_t *c) const
{
return digest.may_have (c->buffer->cur().codepoint) && apply_func (obj, c);
}
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
- bool apply_cached (OT::hb_ot_apply_context_t *c) const
+ bool apply_cached (hb_ot_apply_context_t *c) const
{
return digest.may_have (c->buffer->cur().codepoint) && apply_cached_func (obj, c);
}
- bool cache_enter (OT::hb_ot_apply_context_t *c) const
+ bool cache_enter (hb_ot_apply_context_t *c) const
{
return cache_func (obj, c, true);
}
- void cache_leave (OT::hb_ot_apply_context_t *c) const
+ void cache_leave (hb_ot_apply_context_t *c) const
{
cache_func (obj, c, false);
}
@@ -988,8 +1002,8 @@ struct hb_accelerate_subtables_context_t :
};
-typedef bool (*intersects_func_t) (const hb_set_t *glyphs, unsigned value, const void *data);
-typedef void (*intersected_glyphs_func_t) (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs);
+typedef bool (*intersects_func_t) (const hb_set_t *glyphs, unsigned value, const void *data, void *cache);
+typedef void (*intersected_glyphs_func_t) (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, void *cache);
typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, unsigned value, const void *data);
typedef bool (*match_func_t) (hb_glyph_info_t &info, unsigned value, const void *data);
@@ -1012,16 +1026,25 @@ struct ChainContextApplyFuncs
};
-static inline bool intersects_glyph (const hb_set_t *glyphs, unsigned value, const void *data HB_UNUSED)
+static inline bool intersects_glyph (const hb_set_t *glyphs, unsigned value, const void *data HB_UNUSED, void *cache HB_UNUSED)
{
return glyphs->has (value);
}
-static inline bool intersects_class (const hb_set_t *glyphs, unsigned value, const void *data)
+static inline bool intersects_class (const hb_set_t *glyphs, unsigned value, const void *data, void *cache)
{
const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
- return class_def.intersects_class (glyphs, value);
+ hb_map_t *map = (hb_map_t *) cache;
+
+ hb_codepoint_t *cached_v;
+ if (map->has (value, &cached_v))
+ return *cached_v;
+
+ bool v = class_def.intersects_class (glyphs, value);
+ map->set (value, v);
+
+ return v;
}
-static inline bool intersects_coverage (const hb_set_t *glyphs, unsigned value, const void *data)
+static inline bool intersects_coverage (const hb_set_t *glyphs, unsigned value, const void *data, void *cache HB_UNUSED)
{
Offset16To<Coverage> coverage;
coverage = value;
@@ -1029,17 +1052,36 @@ static inline bool intersects_coverage (const hb_set_t *glyphs, unsigned value,
}
-static inline void intersected_glyph (const hb_set_t *glyphs HB_UNUSED, const void *data, unsigned value, hb_set_t *intersected_glyphs)
+static inline void intersected_glyph (const hb_set_t *glyphs HB_UNUSED, const void *data, unsigned value, hb_set_t *intersected_glyphs, HB_UNUSED void *cache)
{
unsigned g = reinterpret_cast<const HBUINT16 *>(data)[value];
intersected_glyphs->add (g);
}
-static inline void intersected_class_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs)
+
+using intersected_class_cache_t = hb_hashmap_t<unsigned, hb_set_t>;
+
+static inline void intersected_class_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, void *cache)
{
const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
- class_def.intersected_class_glyphs (glyphs, value, intersected_glyphs);
+
+ intersected_class_cache_t *map = (intersected_class_cache_t *) cache;
+
+ hb_set_t *cached_v;
+ if (map->has (value, &cached_v))
+ {
+ intersected_glyphs->union_ (*cached_v);
+ return;
+ }
+
+ hb_set_t v;
+ class_def.intersected_class_glyphs (glyphs, value, &v);
+
+ intersected_glyphs->union_ (v);
+
+ map->set (value, std::move (v));
}
-static inline void intersected_coverage_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs)
+
+static inline void intersected_coverage_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, HB_UNUSED void *cache)
{
Offset16To<Coverage> coverage;
coverage = value;
@@ -1052,10 +1094,11 @@ static inline bool array_is_subset_of (const hb_set_t *glyphs,
unsigned int count,
const HBUINT values[],
intersects_func_t intersects_func,
- const void *intersects_data)
+ const void *intersects_data,
+ void *cache)
{
for (const auto &_ : + hb_iter (values, count))
- if (!intersects_func (glyphs, _, intersects_data)) return false;
+ if (!intersects_func (glyphs, _, intersects_data, cache)) return false;
return true;
}
@@ -1492,7 +1535,8 @@ static void context_closure_recurse_lookups (hb_closure_context_t *c,
unsigned value,
ContextFormat context_format,
const void *data,
- intersected_glyphs_func_t intersected_glyphs_func)
+ intersected_glyphs_func_t intersected_glyphs_func,
+ void *cache)
{
hb_set_t *covered_seq_indicies = hb_set_create ();
for (unsigned int i = 0; i < lookupCount; i++)
@@ -1513,7 +1557,7 @@ static void context_closure_recurse_lookups (hb_closure_context_t *c,
pos_glyphs.add (value);
break;
case ContextFormat::ClassBasedContext:
- intersected_glyphs_func (&c->parent_active_glyphs (), data, value, &pos_glyphs);
+ intersected_glyphs_func (&c->parent_active_glyphs (), data, value, &pos_glyphs, cache);
break;
case ContextFormat::CoverageBasedContext:
pos_glyphs.set (c->parent_active_glyphs ());
@@ -1530,7 +1574,7 @@ static void context_closure_recurse_lookups (hb_closure_context_t *c,
input_value = input[seqIndex - 1];
}
- intersected_glyphs_func (c->glyphs, input_data, input_value, &pos_glyphs);
+ intersected_glyphs_func (c->glyphs, input_data, input_value, &pos_glyphs, cache);
}
}
@@ -1710,6 +1754,8 @@ struct ContextClosureLookupContext
ContextClosureFuncs funcs;
ContextFormat context_format;
const void *intersects_data;
+ void *intersects_cache;
+ void *intersected_glyphs_cache;
};
struct ContextCollectGlyphsLookupContext
@@ -1732,7 +1778,9 @@ static inline bool context_intersects (const hb_set_t *glyphs,
{
return array_is_subset_of (glyphs,
inputCount ? inputCount - 1 : 0, input,
- lookup_context.funcs.intersects, lookup_context.intersects_data);
+ lookup_context.funcs.intersects,
+ lookup_context.intersects_data,
+ lookup_context.intersects_cache);
}
template <typename HBUINT>
@@ -1753,7 +1801,8 @@ static inline void context_closure_lookup (hb_closure_context_t *c,
value,
lookup_context.context_format,
lookup_context.intersects_data,
- lookup_context.funcs.intersected_glyphs);
+ lookup_context.funcs.intersected_glyphs,
+ lookup_context.intersected_glyphs_cache);
}
template <typename HBUINT>
@@ -1777,7 +1826,7 @@ static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
const HBUINT input[], /* Array of input values--start with second glyph */
unsigned int lookupCount HB_UNUSED,
const LookupRecord lookupRecord[] HB_UNUSED,
- ContextApplyLookupContext &lookup_context)
+ const ContextApplyLookupContext &lookup_context)
{
return would_match_input (c,
inputCount, input,
@@ -1790,7 +1839,7 @@ static inline bool context_apply_lookup (hb_ot_apply_context_t *c,
const HBUINT input[], /* Array of input values--start with second glyph */
unsigned int lookupCount,
const LookupRecord lookupRecord[],
- ContextApplyLookupContext &lookup_context)
+ const ContextApplyLookupContext &lookup_context)
{
unsigned match_end = 0;
unsigned match_positions[HB_MAX_CONTEXT_LENGTH];
@@ -1858,7 +1907,7 @@ struct Rule
}
bool would_apply (hb_would_apply_context_t *c,
- ContextApplyLookupContext &lookup_context) const
+ const ContextApplyLookupContext &lookup_context) const
{
const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
(inputZ.as_array (inputCount ? inputCount - 1 : 0));
@@ -1869,7 +1918,7 @@ struct Rule
}
bool apply (hb_ot_apply_context_t *c,
- ContextApplyLookupContext &lookup_context) const
+ const ContextApplyLookupContext &lookup_context) const
{
TRACE_APPLY (this);
const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
@@ -1989,7 +2038,7 @@ struct RuleSet
}
bool would_apply (hb_would_apply_context_t *c,
- ContextApplyLookupContext &lookup_context) const
+ const ContextApplyLookupContext &lookup_context) const
{
return
+ hb_iter (rule)
@@ -2000,7 +2049,7 @@ struct RuleSet
}
bool apply (hb_ot_apply_context_t *c,
- ContextApplyLookupContext &lookup_context) const
+ const ContextApplyLookupContext &lookup_context) const
{
TRACE_APPLY (this);
return_trace (
@@ -2108,7 +2157,7 @@ struct ContextFormat1_4
void closure_lookups (hb_closure_lookups_context_t *c) const
{
struct ContextClosureLookupContext lookup_context = {
- {intersects_glyph, intersected_glyph},
+ {intersects_glyph, nullptr},
ContextFormat::SimpleContext,
nullptr
};
@@ -2220,10 +2269,12 @@ struct ContextFormat2_5
const ClassDef &class_def = this+classDef;
+ hb_map_t cache;
struct ContextClosureLookupContext lookup_context = {
- {intersects_class, intersected_class_glyphs},
+ {intersects_class, nullptr},
ContextFormat::ClassBasedContext,
- &class_def
+ &class_def,
+ &cache
};
hb_set_t retained_coverage_glyphs;
@@ -2259,10 +2310,14 @@ struct ContextFormat2_5
const ClassDef &class_def = this+classDef;
+ hb_map_t cache;
+ intersected_class_cache_t intersected_cache;
struct ContextClosureLookupContext lookup_context = {
{intersects_class, intersected_class_glyphs},
ContextFormat::ClassBasedContext,
- &class_def
+ &class_def,
+ &cache,
+ &intersected_cache
};
+ hb_enumerate (ruleSet)
@@ -2286,10 +2341,12 @@ struct ContextFormat2_5
const ClassDef &class_def = this+classDef;
+ hb_map_t cache;
struct ContextClosureLookupContext lookup_context = {
- {intersects_class, intersected_class_glyphs},
+ {intersects_class, nullptr},
ContextFormat::ClassBasedContext,
- &class_def
+ &class_def,
+ &cache
};
+ hb_iter (ruleSet)
@@ -2407,6 +2464,7 @@ struct ContextFormat2_5
const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
bool ret = true;
int non_zero_index = -1, index = 0;
+ auto snapshot = c->serializer->snapshot();
for (const auto& _ : + hb_enumerate (ruleSet)
| hb_filter (klass_map, hb_first))
{
@@ -2418,8 +2476,10 @@ struct ContextFormat2_5
}
if (coverage_glyph_classes.has (_.first) &&
- o->serialize_subset (c, _.second, this, lookup_map, &klass_map))
+ o->serialize_subset (c, _.second, this, lookup_map, &klass_map)) {
non_zero_index = index;
+ snapshot = c->serializer->snapshot();
+ }
index++;
}
@@ -2433,6 +2493,7 @@ struct ContextFormat2_5
out->ruleSet.pop ();
index--;
}
+ c->serializer->revert (snapshot);
return_trace (bool (out->ruleSet));
}
@@ -2469,7 +2530,7 @@ struct ContextFormat3
return false;
struct ContextClosureLookupContext lookup_context = {
- {intersects_coverage, intersected_coverage_glyphs},
+ {intersects_coverage, nullptr},
ContextFormat::CoverageBasedContext,
this
};
@@ -2655,6 +2716,8 @@ struct ChainContextClosureLookupContext
ContextClosureFuncs funcs;
ContextFormat context_format;
const void *intersects_data[3];
+ void *intersects_cache[3];
+ void *intersected_glyphs_cache;
};
struct ChainContextCollectGlyphsLookupContext
@@ -2681,13 +2744,19 @@ static inline bool chain_context_intersects (const hb_set_t *glyphs,
{
return array_is_subset_of (glyphs,
backtrackCount, backtrack,
- lookup_context.funcs.intersects, lookup_context.intersects_data[0])
+ lookup_context.funcs.intersects,
+ lookup_context.intersects_data[0],
+ lookup_context.intersects_cache[0])
&& array_is_subset_of (glyphs,
inputCount ? inputCount - 1 : 0, input,
- lookup_context.funcs.intersects, lookup_context.intersects_data[1])
+ lookup_context.funcs.intersects,
+ lookup_context.intersects_data[1],
+ lookup_context.intersects_cache[1])
&& array_is_subset_of (glyphs,
lookaheadCount, lookahead,
- lookup_context.funcs.intersects, lookup_context.intersects_data[2]);
+ lookup_context.funcs.intersects,
+ lookup_context.intersects_data[2],
+ lookup_context.intersects_cache[2]);
}
template <typename HBUINT>
@@ -2714,7 +2783,8 @@ static inline void chain_context_closure_lookup (hb_closure_context_t *c,
value,
lookup_context.context_format,
lookup_context.intersects_data[1],
- lookup_context.funcs.intersected_glyphs);
+ lookup_context.funcs.intersected_glyphs,
+ lookup_context.intersected_glyphs_cache);
}
template <typename HBUINT>
@@ -2752,7 +2822,7 @@ static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c
const HBUINT lookahead[] HB_UNUSED,
unsigned int lookupCount HB_UNUSED,
const LookupRecord lookupRecord[] HB_UNUSED,
- ChainContextApplyLookupContext &lookup_context)
+ const ChainContextApplyLookupContext &lookup_context)
{
return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
&& would_match_input (c,
@@ -2770,7 +2840,7 @@ static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
const HBUINT lookahead[],
unsigned int lookupCount,
const LookupRecord lookupRecord[],
- ChainContextApplyLookupContext &lookup_context)
+ const ChainContextApplyLookupContext &lookup_context)
{
unsigned end_index = c->buffer->idx;
unsigned match_end = 0;
@@ -2864,7 +2934,7 @@ struct ChainRule
}
bool would_apply (hb_would_apply_context_t *c,
- ChainContextApplyLookupContext &lookup_context) const
+ const ChainContextApplyLookupContext &lookup_context) const
{
const auto &input = StructAfter<decltype (inputX)> (backtrack);
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
@@ -2876,7 +2946,8 @@ struct ChainRule
lookup.arrayZ, lookup_context);
}
- bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
+ bool apply (hb_ot_apply_context_t *c,
+ const ChainContextApplyLookupContext &lookup_context) const
{
TRACE_APPLY (this);
const auto &input = StructAfter<decltype (inputX)> (backtrack);
@@ -3042,7 +3113,8 @@ struct ChainRuleSet
;
}
- bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
+ bool would_apply (hb_would_apply_context_t *c,
+ const ChainContextApplyLookupContext &lookup_context) const
{
return
+ hb_iter (rule)
@@ -3052,7 +3124,8 @@ struct ChainRuleSet
;
}
- bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
+ bool apply (hb_ot_apply_context_t *c,
+ const ChainContextApplyLookupContext &lookup_context) const
{
TRACE_APPLY (this);
return_trace (
@@ -3166,7 +3239,7 @@ struct ChainContextFormat1_4
void closure_lookups (hb_closure_lookups_context_t *c) const
{
struct ChainContextClosureLookupContext lookup_context = {
- {intersects_glyph, intersected_glyph},
+ {intersects_glyph, nullptr},
ContextFormat::SimpleContext,
{nullptr, nullptr, nullptr}
};
@@ -3278,12 +3351,14 @@ struct ChainContextFormat2_5
const ClassDef &input_class_def = this+inputClassDef;
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
+ hb_map_t caches[3] = {};
struct ChainContextClosureLookupContext lookup_context = {
- {intersects_class, intersected_class_glyphs},
+ {intersects_class, nullptr},
ContextFormat::ClassBasedContext,
{&backtrack_class_def,
&input_class_def,
- &lookahead_class_def}
+ &lookahead_class_def},
+ {&caches[0], &caches[1], &caches[2]}
};
hb_set_t retained_coverage_glyphs;
@@ -3321,12 +3396,16 @@ struct ChainContextFormat2_5
const ClassDef &input_class_def = this+inputClassDef;
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
+ hb_map_t caches[3] = {};
+ intersected_class_cache_t intersected_cache;
struct ChainContextClosureLookupContext lookup_context = {
{intersects_class, intersected_class_glyphs},
ContextFormat::ClassBasedContext,
{&backtrack_class_def,
&input_class_def,
- &lookahead_class_def}
+ &lookahead_class_def},
+ {&caches[0], &caches[1], &caches[2]},
+ &intersected_cache
};
+ hb_enumerate (ruleSet)
@@ -3352,12 +3431,14 @@ struct ChainContextFormat2_5
const ClassDef &input_class_def = this+inputClassDef;
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
+ hb_map_t caches[3] = {};
struct ChainContextClosureLookupContext lookup_context = {
- {intersects_class, intersected_class_glyphs},
+ {intersects_class, nullptr},
ContextFormat::ClassBasedContext,
{&backtrack_class_def,
&input_class_def,
- &lookahead_class_def}
+ &lookahead_class_def},
+ {&caches[0], &caches[1], &caches[2]}
};
+ hb_iter (ruleSet)
@@ -3587,7 +3668,7 @@ struct ChainContextFormat3
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
struct ChainContextClosureLookupContext lookup_context = {
- {intersects_coverage, intersected_coverage_glyphs},
+ {intersects_coverage, nullptr},
ContextFormat::CoverageBasedContext,
{this, this, this}
};
@@ -3938,13 +4019,14 @@ struct hb_ot_layout_lookup_accelerator_t
template <typename TLookup>
void init (const TLookup &lookup)
{
- digest.init ();
- lookup.collect_coverage (&digest);
-
subtables.init ();
- OT::hb_accelerate_subtables_context_t c_accelerate_subtables (subtables);
+ hb_accelerate_subtables_context_t c_accelerate_subtables (subtables);
lookup.dispatch (&c_accelerate_subtables);
+ digest.init ();
+ for (auto& subtable : hb_iter (subtables))
+ digest.add (subtable.digest);
+
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
cache_user_idx = c_accelerate_subtables.cache_user_idx;
for (unsigned i = 0; i < subtables.length; i++)
@@ -3962,21 +4044,25 @@ struct hb_ot_layout_lookup_accelerator_t
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
if (use_cache)
{
- for (unsigned int i = 0; i < subtables.length; i++)
- if (subtables[i].apply_cached (c))
- return true;
+ return
+ + hb_iter (subtables)
+ | hb_map ([&c] (const hb_accelerate_subtables_context_t::hb_applicable_t &_) { return _.apply_cached (c); })
+ | hb_any
+ ;
}
else
#endif
{
- for (unsigned int i = 0; i < subtables.length; i++)
- if (subtables[i].apply (c))
- return true;
+ return
+ + hb_iter (subtables)
+ | hb_map ([&c] (const hb_accelerate_subtables_context_t::hb_applicable_t &_) { return _.apply (c); })
+ | hb_any
+ ;
}
return false;
}
- bool cache_enter (OT::hb_ot_apply_context_t *c) const
+ bool cache_enter (hb_ot_apply_context_t *c) const
{
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
return cache_user_idx != (unsigned) -1 &&
@@ -3985,7 +4071,7 @@ struct hb_ot_layout_lookup_accelerator_t
return false;
#endif
}
- void cache_leave (OT::hb_ot_apply_context_t *c) const
+ void cache_leave (hb_ot_apply_context_t *c) const
{
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
subtables[cache_user_idx].cache_leave (c);
@@ -3993,8 +4079,8 @@ struct hb_ot_layout_lookup_accelerator_t
}
- private:
hb_set_digest_t digest;
+ private:
hb_accelerate_subtables_context_t::array_t subtables;
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
unsigned cache_user_idx = (unsigned) -1;
@@ -4236,24 +4322,30 @@ struct GSUBGPOS
}
void feature_variation_collect_lookups (const hb_set_t *feature_indexes,
+ const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
hb_set_t *lookup_indexes /* OUT */) const
{
#ifndef HB_NO_VAR
- get_feature_variations ().collect_lookups (feature_indexes, lookup_indexes);
+ get_feature_variations ().collect_lookups (feature_indexes, feature_substitutes_map, lookup_indexes);
#endif
}
+#ifndef HB_NO_VAR
+ void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
+ { get_feature_variations ().collect_feature_substitutes_with_variations (c); }
+#endif
+
template <typename TLookup>
void closure_lookups (hb_face_t *face,
const hb_set_t *glyphs,
hb_set_t *lookup_indexes /* IN/OUT */) const
{
hb_set_t visited_lookups, inactive_lookups;
- OT::hb_closure_lookups_context_t c (face, glyphs, &visited_lookups, &inactive_lookups);
+ hb_closure_lookups_context_t c (face, glyphs, &visited_lookups, &inactive_lookups);
c.set_recurse_func (TLookup::template dispatch_recurse_func<hb_closure_lookups_context_t>);
- for (unsigned lookup_index : + hb_iter (lookup_indexes))
+ for (unsigned lookup_index : *lookup_indexes)
reinterpret_cast<const TLookup &> (get_lookup (lookup_index)).closure_lookups (&c, lookup_index);
hb_set_union (lookup_indexes, &visited_lookups);
@@ -4278,6 +4370,8 @@ struct GSUBGPOS
}
void prune_features (const hb_map_t *lookup_indices, /* IN */
+ const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map, /* IN */
+ const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map, /* IN */
hb_set_t *feature_indices /* IN/OUT */) const
{
#ifndef HB_NO_VAR
@@ -4285,7 +4379,7 @@ struct GSUBGPOS
// if the FeatureVariation's table and the alternate version(s) intersect the
// set of lookup indices.
hb_set_t alternate_feature_indices;
- get_feature_variations ().closure_features (lookup_indices, &alternate_feature_indices);
+ get_feature_variations ().closure_features (lookup_indices, feature_record_cond_idx_map, &alternate_feature_indices);
if (unlikely (alternate_feature_indices.in_error()))
{
feature_indices->err ();
@@ -4293,9 +4387,8 @@ struct GSUBGPOS
}
#endif
- for (unsigned i : feature_indices->iter())
+ for (unsigned i : hb_iter (feature_indices))
{
- const Feature& f = get_feature (i);
hb_tag_t tag = get_feature_tag (i);
if (tag == HB_TAG ('p', 'r', 'e', 'f'))
// Note: Never ever drop feature 'pref', even if it's empty.
@@ -4305,11 +4398,16 @@ struct GSUBGPOS
continue;
- if (!f.featureParams.is_null () &&
+ const Feature *f = &(get_feature (i));
+ const Feature** p = nullptr;
+ if (feature_substitutes_map->has (i, &p))
+ f = *p;
+
+ if (!f->featureParams.is_null () &&
tag == HB_TAG ('s', 'i', 'z', 'e'))
continue;
- if (!f.intersects_lookup_indexes (lookup_indices)
+ if (!f->intersects_lookup_indexes (lookup_indices)
#ifndef HB_NO_VAR
&& !alternate_feature_indices.has (i)
#endif
diff --git a/thirdparty/harfbuzz/src/hb-ot-layout.cc b/thirdparty/harfbuzz/src/hb-ot-layout.cc
index 44e57987b7..e8091ec3e0 100644
--- a/thirdparty/harfbuzz/src/hb-ot-layout.cc
+++ b/thirdparty/harfbuzz/src/hb-ot-layout.cc
@@ -1271,7 +1271,7 @@ hb_ot_layout_collect_lookups (hb_face_t *face,
hb_set_next (&feature_indexes, &feature_index);)
g.get_feature (feature_index).add_lookup_indexes_to (lookup_indexes);
- g.feature_variation_collect_lookups (&feature_indexes, lookup_indexes);
+ g.feature_variation_collect_lookups (&feature_indexes, nullptr, lookup_indexes);
}
@@ -1465,56 +1465,6 @@ hb_ot_layout_substitute_start (hb_font_t *font,
_hb_ot_layout_set_glyph_props (font, buffer);
}
-void
-hb_ot_layout_delete_glyphs_inplace (hb_buffer_t *buffer,
- bool (*filter) (const hb_glyph_info_t *info))
-{
- /* Merge clusters and delete filtered glyphs.
- * NOTE! We can't use out-buffer as we have positioning data. */
- unsigned int j = 0;
- unsigned int count = buffer->len;
- hb_glyph_info_t *info = buffer->info;
- hb_glyph_position_t *pos = buffer->pos;
- for (unsigned int i = 0; i < count; i++)
- {
- if (filter (&info[i]))
- {
- /* Merge clusters.
- * Same logic as buffer->delete_glyph(), but for in-place removal. */
-
- unsigned int cluster = info[i].cluster;
- if (i + 1 < count && cluster == info[i + 1].cluster)
- continue; /* Cluster survives; do nothing. */
-
- if (j)
- {
- /* Merge cluster backward. */
- if (cluster < info[j - 1].cluster)
- {
- unsigned int mask = info[i].mask;
- unsigned int old_cluster = info[j - 1].cluster;
- for (unsigned k = j; k && info[k - 1].cluster == old_cluster; k--)
- buffer->set_cluster (info[k - 1], cluster, mask);
- }
- continue;
- }
-
- if (i + 1 < count)
- buffer->merge_clusters (i, i + 2); /* Merge cluster forward. */
-
- continue;
- }
-
- if (j != i)
- {
- info[j] = info[i];
- pos[j] = pos[i];
- }
- j++;
- }
- buffer->len = j;
-}
-
/**
* hb_ot_layout_lookup_substitute_closure:
* @face: #hb_face_t to work upon
@@ -1709,6 +1659,8 @@ hb_ot_layout_get_size_params (hb_face_t *face,
return false;
}
+
+
/**
* hb_ot_layout_feature_get_name_ids:
* @face: #hb_face_t to work upon
@@ -1865,7 +1817,7 @@ apply_forward (OT::hb_ot_apply_context_t *c,
while (buffer->idx < buffer->len && buffer->successful)
{
bool applied = false;
- if (accel.may_have (buffer->cur().codepoint) &&
+ if (accel.digest.may_have (buffer->cur().codepoint) &&
(buffer->cur().mask & c->lookup_mask) &&
c->check_glyph_property (&buffer->cur(), c->lookup_props))
{
@@ -1892,7 +1844,7 @@ apply_backward (OT::hb_ot_apply_context_t *c,
hb_buffer_t *buffer = c->buffer;
do
{
- if (accel.may_have (buffer->cur().codepoint) &&
+ if (accel.digest.may_have (buffer->cur().codepoint) &&
(buffer->cur().mask & c->lookup_mask) &&
c->check_glyph_property (&buffer->cur(), c->lookup_props))
ret |= accel.apply (c, false);
@@ -1906,15 +1858,16 @@ apply_backward (OT::hb_ot_apply_context_t *c,
}
template <typename Proxy>
-static inline void
+static inline bool
apply_string (OT::hb_ot_apply_context_t *c,
const typename Proxy::Lookup &lookup,
const OT::hb_ot_layout_lookup_accelerator_t &accel)
{
+ bool ret = false;
hb_buffer_t *buffer = c->buffer;
if (unlikely (!buffer->len || !c->lookup_mask))
- return;
+ return ret;
c->set_lookup_props (lookup.get_props ());
@@ -1925,7 +1878,7 @@ apply_string (OT::hb_ot_apply_context_t *c,
buffer->clear_output ();
buffer->idx = 0;
- apply_forward (c, accel);
+ ret = apply_forward (c, accel);
if (!Proxy::always_inplace)
buffer->sync ();
@@ -1935,8 +1888,10 @@ apply_string (OT::hb_ot_apply_context_t *c,
/* in-place backward substitution/positioning */
assert (!buffer->have_output);
buffer->idx = buffer->len - 1;
- apply_backward (c, accel);
+ ret = apply_backward (c, accel);
}
+
+ return ret;
}
template <typename Proxy>
@@ -1955,23 +1910,42 @@ inline void hb_ot_map_t::apply (const Proxy &proxy,
const stage_map_t *stage = &stages[table_index][stage_index];
for (; i < stage->last_lookup; i++)
{
- unsigned int lookup_index = lookups[table_index][i].index;
- if (!buffer->message (font, "start lookup %d", lookup_index)) continue;
- c.set_lookup_index (lookup_index);
- c.set_lookup_mask (lookups[table_index][i].mask);
- c.set_auto_zwj (lookups[table_index][i].auto_zwj);
- c.set_auto_zwnj (lookups[table_index][i].auto_zwnj);
- c.set_random (lookups[table_index][i].random);
- c.set_per_syllable (lookups[table_index][i].per_syllable);
-
- apply_string<Proxy> (&c,
- proxy.table.get_lookup (lookup_index),
- proxy.accels[lookup_index]);
- (void) buffer->message (font, "end lookup %d", lookup_index);
+ auto &lookup = lookups[table_index][i];
+
+ unsigned int lookup_index = lookup.index;
+ if (!buffer->message (font, "start lookup %d feature '%c%c%c%c'", lookup_index, HB_UNTAG (lookup.feature_tag))) continue;
+
+ /* c.digest is a digest of all the current glyphs in the buffer
+ * (plus some past glyphs).
+ *
+ * Only try applying the lookup if there is any overlap. */
+ if (proxy.accels[lookup_index].digest.may_have (c.digest))
+ {
+ c.set_lookup_index (lookup_index);
+ c.set_lookup_mask (lookup.mask);
+ c.set_auto_zwj (lookup.auto_zwj);
+ c.set_auto_zwnj (lookup.auto_zwnj);
+ c.set_random (lookup.random);
+ c.set_per_syllable (lookup.per_syllable);
+
+ apply_string<Proxy> (&c,
+ proxy.table.get_lookup (lookup_index),
+ proxy.accels[lookup_index]);
+ }
+ else
+ (void) buffer->message (font, "skipped lookup %d feature '%c%c%c%c' because no glyph matches", lookup_index, HB_UNTAG (lookup.feature_tag));
+
+ (void) buffer->message (font, "end lookup %d feature '%c%c%c%c'", lookup_index, HB_UNTAG (lookup.feature_tag));
}
if (stage->pause_func)
- stage->pause_func (plan, font, buffer);
+ {
+ if (stage->pause_func (plan, font, buffer))
+ {
+ /* Refresh working buffer digest since buffer changed. */
+ c.digest = buffer->digest ();
+ }
+ }
}
}
@@ -2341,6 +2315,7 @@ struct hb_get_glyph_alternates_dispatch_t :
( _dispatch (obj, hb_prioritize, std::forward<Ts> (ds)...) )
};
+#ifndef HB_NO_LAYOUT_RARELY_USED
/**
* hb_ot_layout_lookup_get_glyph_alternates:
* @face: a face.
@@ -2373,4 +2348,72 @@ hb_ot_layout_lookup_get_glyph_alternates (hb_face_t *face,
return ret;
}
+
+struct hb_position_single_dispatch_t :
+ hb_dispatch_context_t<hb_position_single_dispatch_t, bool>
+{
+ static return_t default_return_value () { return false; }
+ bool stop_sublookup_iteration (return_t r) const { return r; }
+
+ private:
+ template <typename T, typename ...Ts> auto
+ _dispatch (const T &obj, hb_priority<1>, Ts&&... ds) HB_AUTO_RETURN
+ ( obj.position_single (std::forward<Ts> (ds)...) )
+ template <typename T, typename ...Ts> auto
+ _dispatch (const T &obj, hb_priority<0>, Ts&&... ds) HB_AUTO_RETURN
+ ( default_return_value () )
+ public:
+ template <typename T, typename ...Ts> auto
+ dispatch (const T &obj, Ts&&... ds) HB_AUTO_RETURN
+ ( _dispatch (obj, hb_prioritize, std::forward<Ts> (ds)...) )
+};
+
+/**
+ * hb_ot_layout_lookup_get_optical_bound:
+ * @font: a font.
+ * @lookup_index: index of the feature lookup to query.
+ * @direction: edge of the glyph to query.
+ * @glyph: a glyph id.
+ *
+ * Fetches the optical bound of a glyph positioned at the margin of text.
+ * The direction identifies which edge of the glyph to query.
+ *
+ * Return value: Adjustment value. Negative values mean the glyph will stick out of the margin.
+ *
+ * Since: 5.3.0
+ **/
+hb_position_t
+hb_ot_layout_lookup_get_optical_bound (hb_font_t *font,
+ unsigned lookup_index,
+ hb_direction_t direction,
+ hb_codepoint_t glyph)
+{
+ const OT::PosLookup &lookup = font->face->table.GPOS->table->get_lookup (lookup_index);
+ hb_glyph_position_t pos = {0};
+ hb_position_single_dispatch_t c;
+ lookup.dispatch (&c, font, direction, glyph, pos);
+ hb_position_t ret = 0;
+ switch (direction)
+ {
+ case HB_DIRECTION_LTR:
+ ret = pos.x_offset;
+ break;
+ case HB_DIRECTION_RTL:
+ ret = pos.x_advance - pos.x_offset;
+ break;
+ case HB_DIRECTION_TTB:
+ ret = pos.y_offset;
+ break;
+ case HB_DIRECTION_BTT:
+ ret = pos.y_advance - pos.y_offset;
+ break;
+ case HB_DIRECTION_INVALID:
+ default:
+ break;
+ }
+ return ret;
+}
+#endif
+
+
#endif
diff --git a/thirdparty/harfbuzz/src/hb-ot-layout.h b/thirdparty/harfbuzz/src/hb-ot-layout.h
index 4edddd9e0d..f7b488f870 100644
--- a/thirdparty/harfbuzz/src/hb-ot-layout.h
+++ b/thirdparty/harfbuzz/src/hb-ot-layout.h
@@ -403,6 +403,16 @@ hb_ot_layout_get_size_params (hb_face_t *face,
unsigned int *range_start, /* OUT. May be NULL */
unsigned int *range_end /* OUT. May be NULL */);
+HB_EXTERN hb_position_t
+hb_ot_layout_lookup_get_optical_bound (hb_font_t *font,
+ unsigned lookup_index,
+ hb_direction_t direction,
+ hb_codepoint_t glyph);
+
+
+/*
+ * GSUB/GPOS
+ */
HB_EXTERN hb_bool_t
hb_ot_layout_feature_get_name_ids (hb_face_t *face,
@@ -423,6 +433,7 @@ hb_ot_layout_feature_get_characters (hb_face_t *face,
unsigned int *char_count /* IN/OUT. May be NULL */,
hb_codepoint_t *characters /* OUT. May be NULL */);
+
/*
* BASE
*/
diff --git a/thirdparty/harfbuzz/src/hb-ot-layout.hh b/thirdparty/harfbuzz/src/hb-ot-layout.hh
index de06610cb5..9505d5f147 100644
--- a/thirdparty/harfbuzz/src/hb-ot-layout.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-layout.hh
@@ -102,10 +102,6 @@ HB_INTERNAL void
hb_ot_layout_substitute_start (hb_font_t *font,
hb_buffer_t *buffer);
-HB_INTERNAL void
-hb_ot_layout_delete_glyphs_inplace (hb_buffer_t *buffer,
- bool (*filter) (const hb_glyph_info_t *info));
-
namespace OT {
struct hb_ot_apply_context_t;
struct hb_ot_layout_lookup_accelerator_t;
@@ -552,7 +548,7 @@ _hb_glyph_info_clear_substituted (hb_glyph_info_t *info)
info->glyph_props() &= ~(HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED);
}
-static inline void
+static inline bool
_hb_clear_substitution_flags (const hb_ot_shape_plan_t *plan HB_UNUSED,
hb_font_t *font HB_UNUSED,
hb_buffer_t *buffer)
@@ -561,6 +557,7 @@ _hb_clear_substitution_flags (const hb_ot_shape_plan_t *plan HB_UNUSED,
unsigned int count = buffer->len;
for (unsigned int i = 0; i < count; i++)
_hb_glyph_info_clear_substituted (&info[i]);
+ return false;
}
diff --git a/thirdparty/harfbuzz/src/hb-ot-map.cc b/thirdparty/harfbuzz/src/hb-ot-map.cc
index 39215b335f..8882dbaccb 100644
--- a/thirdparty/harfbuzz/src/hb-ot-map.cc
+++ b/thirdparty/harfbuzz/src/hb-ot-map.cc
@@ -45,7 +45,7 @@ void hb_ot_map_t::collect_lookups (unsigned int table_index, hb_set_t *lookups_o
hb_ot_map_builder_t::hb_ot_map_builder_t (hb_face_t *face_,
const hb_segment_properties_t &props_)
{
- memset (this, 0, sizeof (*this));
+ hb_memset (this, 0, sizeof (*this));
feature_infos.init ();
for (unsigned int table_index = 0; table_index < 2; table_index++)
@@ -133,7 +133,8 @@ hb_ot_map_builder_t::add_lookups (hb_ot_map_t &m,
bool auto_zwnj,
bool auto_zwj,
bool random,
- bool per_syllable)
+ bool per_syllable,
+ hb_tag_t feature_tag)
{
unsigned int lookup_indices[32];
unsigned int offset, len;
@@ -162,6 +163,7 @@ hb_ot_map_builder_t::add_lookups (hb_ot_map_t &m,
lookup->auto_zwj = auto_zwj;
lookup->random = random;
lookup->per_syllable = per_syllable;
+ lookup->feature_tag = feature_tag;
}
offset += len;
@@ -212,24 +214,26 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
if (feature_infos.length)
{
feature_infos.qsort ();
+ auto *f = feature_infos.arrayZ;
unsigned int j = 0;
- for (unsigned int i = 1; i < feature_infos.length; i++)
- if (feature_infos[i].tag != feature_infos[j].tag)
- feature_infos[++j] = feature_infos[i];
+ unsigned count = feature_infos.length;
+ for (unsigned int i = 1; i < count; i++)
+ if (f[i].tag != f[j].tag)
+ f[++j] = f[i];
else {
- if (feature_infos[i].flags & F_GLOBAL) {
- feature_infos[j].flags |= F_GLOBAL;
- feature_infos[j].max_value = feature_infos[i].max_value;
- feature_infos[j].default_value = feature_infos[i].default_value;
+ if (f[i].flags & F_GLOBAL) {
+ f[j].flags |= F_GLOBAL;
+ f[j].max_value = f[i].max_value;
+ f[j].default_value = f[i].default_value;
} else {
- if (feature_infos[j].flags & F_GLOBAL)
- feature_infos[j].flags ^= F_GLOBAL;
- feature_infos[j].max_value = hb_max (feature_infos[j].max_value, feature_infos[i].max_value);
+ if (f[j].flags & F_GLOBAL)
+ f[j].flags ^= F_GLOBAL;
+ f[j].max_value = hb_max (f[j].max_value, f[i].max_value);
/* Inherit default_value from j */
}
- feature_infos[j].flags |= (feature_infos[i].flags & F_HAS_FALLBACK);
- feature_infos[j].stage[0] = hb_min (feature_infos[j].stage[0], feature_infos[i].stage[0]);
- feature_infos[j].stage[1] = hb_min (feature_infos[j].stage[1], feature_infos[i].stage[1]);
+ f[j].flags |= (f[i].flags & F_HAS_FALLBACK);
+ f[j].stage[0] = hb_min (f[j].stage[0], f[i].stage[0]);
+ f[j].stage[1] = hb_min (f[j].stage[1], f[i].stage[1]);
}
feature_infos.shrink (j + 1);
}
@@ -239,7 +243,8 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
static_assert ((!(HB_GLYPH_FLAG_DEFINED & (HB_GLYPH_FLAG_DEFINED + 1))), "");
unsigned int next_bit = hb_popcount (HB_GLYPH_FLAG_DEFINED) + 1;
- for (unsigned int i = 0; i < feature_infos.length; i++)
+ unsigned count = feature_infos.length;
+ for (unsigned int i = 0; i < count; i++)
{
const feature_info_t *info = &feature_infos[i];
@@ -308,7 +313,7 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
map->_1_mask = (1u << map->shift) & map->mask;
map->needs_fallback = !found;
}
- feature_infos.shrink (0); /* Done with these */
+ //feature_infos.shrink (0); /* Done with these */
add_gsub_pause (nullptr);
@@ -317,6 +322,7 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
for (unsigned int table_index = 0; table_index < 2; table_index++)
{
/* Collect lookup indices for features */
+ auto &lookups = m.lookups[table_index];
unsigned int stage_index = 0;
unsigned int last_num_lookups = 0;
@@ -329,36 +335,39 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
key.variations_index[table_index],
global_bit_mask);
- for (unsigned i = 0; i < m.features.length; i++)
- if (m.features[i].stage[table_index] == stage)
+ for (auto &feature : m.features)
+ {
+ if (feature.stage[table_index] == stage)
add_lookups (m, table_index,
- m.features[i].index[table_index],
+ feature.index[table_index],
key.variations_index[table_index],
- m.features[i].mask,
- m.features[i].auto_zwnj,
- m.features[i].auto_zwj,
- m.features[i].random,
- m.features[i].per_syllable);
+ feature.mask,
+ feature.auto_zwnj,
+ feature.auto_zwj,
+ feature.random,
+ feature.per_syllable,
+ feature.tag);
+ }
/* Sort lookups and merge duplicates */
- if (last_num_lookups < m.lookups[table_index].length)
+ if (last_num_lookups < lookups.length)
{
- m.lookups[table_index].qsort (last_num_lookups, m.lookups[table_index].length);
+ lookups.as_array ().sub_array (last_num_lookups, lookups.length - last_num_lookups).qsort ();
unsigned int j = last_num_lookups;
- for (unsigned int i = j + 1; i < m.lookups[table_index].length; i++)
- if (m.lookups[table_index][i].index != m.lookups[table_index][j].index)
- m.lookups[table_index][++j] = m.lookups[table_index][i];
+ for (unsigned int i = j + 1; i < lookups.length; i++)
+ if (lookups.arrayZ[i].index != lookups.arrayZ[j].index)
+ lookups.arrayZ[++j] = lookups.arrayZ[i];
else
{
- m.lookups[table_index][j].mask |= m.lookups[table_index][i].mask;
- m.lookups[table_index][j].auto_zwnj &= m.lookups[table_index][i].auto_zwnj;
- m.lookups[table_index][j].auto_zwj &= m.lookups[table_index][i].auto_zwj;
+ lookups.arrayZ[j].mask |= lookups.arrayZ[i].mask;
+ lookups.arrayZ[j].auto_zwnj &= lookups.arrayZ[i].auto_zwnj;
+ lookups.arrayZ[j].auto_zwj &= lookups.arrayZ[i].auto_zwj;
}
- m.lookups[table_index].shrink (j + 1);
+ lookups.shrink (j + 1);
}
- last_num_lookups = m.lookups[table_index].length;
+ last_num_lookups = lookups.length;
if (stage_index < stages[table_index].length && stages[table_index][stage_index].index == stage) {
hb_ot_map_t::stage_map_t *stage_map = m.stages[table_index].push ();
diff --git a/thirdparty/harfbuzz/src/hb-ot-map.hh b/thirdparty/harfbuzz/src/hb-ot-map.hh
index a7b5eec30d..efc8cae96a 100644
--- a/thirdparty/harfbuzz/src/hb-ot-map.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-map.hh
@@ -69,6 +69,7 @@ struct hb_ot_map_t
unsigned short random : 1;
unsigned short per_syllable : 1;
hb_mask_t mask;
+ hb_tag_t feature_tag;
HB_INTERNAL static int cmp (const void *pa, const void *pb)
{
@@ -78,7 +79,9 @@ struct hb_ot_map_t
}
};
- typedef void (*pause_func_t) (const struct hb_ot_shape_plan_t *plan, hb_font_t *font, hb_buffer_t *buffer);
+ /* Pause functions return true if new glyph indices might have been
+ * added to the buffer. This is used to update buffer digest. */
+ typedef bool (*pause_func_t) (const struct hb_ot_shape_plan_t *plan, hb_font_t *font, hb_buffer_t *buffer);
struct stage_map_t {
unsigned int last_lookup; /* Cumulative */
@@ -87,13 +90,13 @@ struct hb_ot_map_t
void init ()
{
- memset (this, 0, sizeof (*this));
+ hb_memset (this, 0, sizeof (*this));
- features.init ();
+ features.init0 ();
for (unsigned int table_index = 0; table_index < 2; table_index++)
{
- lookups[table_index].init ();
- stages[table_index].init ();
+ lookups[table_index].init0 ();
+ stages[table_index].init0 ();
}
}
void fini ()
@@ -239,7 +242,8 @@ struct hb_ot_map_builder_t
bool auto_zwnj = true,
bool auto_zwj = true,
bool random = false,
- bool per_syllable = false);
+ bool per_syllable = false,
+ hb_tag_t feature_tag = HB_TAG(' ',' ',' ',' '));
struct feature_info_t {
hb_tag_t tag;
diff --git a/thirdparty/harfbuzz/src/hb-ot-math-table.hh b/thirdparty/harfbuzz/src/hb-ot-math-table.hh
index d834d94371..93953370e2 100644
--- a/thirdparty/harfbuzz/src/hb-ot-math-table.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-math-table.hh
@@ -77,11 +77,11 @@ struct MathConstants
HBINT16 *p = c->allocate_size<HBINT16> (HBINT16::static_size * 2);
if (unlikely (!p)) return_trace (nullptr);
- memcpy (p, percentScaleDown, HBINT16::static_size * 2);
+ hb_memcpy (p, percentScaleDown, HBINT16::static_size * 2);
HBUINT16 *m = c->allocate_size<HBUINT16> (HBUINT16::static_size * 2);
if (unlikely (!m)) return_trace (nullptr);
- memcpy (m, minHeight, HBUINT16::static_size * 2);
+ hb_memcpy (m, minHeight, HBUINT16::static_size * 2);
unsigned count = ARRAY_LENGTH (mathValueRecords);
for (unsigned i = 0; i < count; i++)
@@ -786,7 +786,7 @@ struct MathGlyphAssembly
if (parts_count)
{
int64_t mult = font->dir_mult (direction);
- for (auto _ : hb_zip (partRecords.sub_array (start_offset, parts_count),
+ for (auto _ : hb_zip (partRecords.as_array ().sub_array (start_offset, parts_count),
hb_array (parts, *parts_count)))
_.first.extract (_.second, mult, font);
}
@@ -855,7 +855,7 @@ struct MathGlyphConstruction
if (variants_count)
{
int64_t mult = font->dir_mult (direction);
- for (auto _ : hb_zip (mathGlyphVariantRecord.sub_array (start_offset, variants_count),
+ for (auto _ : hb_zip (mathGlyphVariantRecord.as_array ().sub_array (start_offset, variants_count),
hb_array (variants, *variants_count)))
_.second = {_.first.variantGlyph, font->em_mult (_.first.advanceMeasurement, mult)};
}
diff --git a/thirdparty/harfbuzz/src/hb-ot-meta-table.hh b/thirdparty/harfbuzz/src/hb-ot-meta-table.hh
index 93e64c5327..e1b68bcf91 100644
--- a/thirdparty/harfbuzz/src/hb-ot-meta-table.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-meta-table.hh
@@ -84,7 +84,7 @@ struct meta
{
if (count)
{
- + table->dataMaps.sub_array (start_offset, count)
+ + table->dataMaps.as_array ().sub_array (start_offset, count)
| hb_map (&DataMap::get_tag)
| hb_map ([](hb_tag_t tag) { return (hb_ot_meta_tag_t) tag; })
| hb_sink (hb_array (entries, *count))
diff --git a/thirdparty/harfbuzz/src/hb-ot-name-table.hh b/thirdparty/harfbuzz/src/hb-ot-name-table.hh
index 1f2131ffcc..6f4461cc15 100644
--- a/thirdparty/harfbuzz/src/hb-ot-name-table.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-name-table.hh
@@ -30,10 +30,55 @@
#include "hb-open-type.hh"
#include "hb-ot-name-language.hh"
#include "hb-aat-layout.hh"
+#include "hb-utf.hh"
namespace OT {
+template <typename in_utf_t, typename out_utf_t>
+inline unsigned int
+hb_ot_name_convert_utf (hb_bytes_t bytes,
+ unsigned int *text_size /* IN/OUT */,
+ typename out_utf_t::codepoint_t *text /* OUT */)
+{
+ unsigned int src_len = bytes.length / sizeof (typename in_utf_t::codepoint_t);
+ const typename in_utf_t::codepoint_t *src = (const typename in_utf_t::codepoint_t *) bytes.arrayZ;
+ const typename in_utf_t::codepoint_t *src_end = src + src_len;
+
+ typename out_utf_t::codepoint_t *dst = text;
+
+ hb_codepoint_t unicode;
+ const hb_codepoint_t replacement = HB_BUFFER_REPLACEMENT_CODEPOINT_DEFAULT;
+
+ if (text_size && *text_size)
+ {
+ (*text_size)--; /* Save room for NUL-termination. */
+ const typename out_utf_t::codepoint_t *dst_end = text + *text_size;
+
+ while (src < src_end && dst < dst_end)
+ {
+ const typename in_utf_t::codepoint_t *src_next = in_utf_t::next (src, src_end, &unicode, replacement);
+ typename out_utf_t::codepoint_t *dst_next = out_utf_t::encode (dst, dst_end, unicode);
+ if (dst_next == dst)
+ break; /* Out-of-room. */
+
+ dst = dst_next;
+ src = src_next;
+ }
+
+ *text_size = dst - text;
+ *dst = 0; /* NUL-terminate. */
+ }
+
+ /* Accumulate length of rest. */
+ unsigned int dst_len = dst - text;
+ while (src < src_end)
+ {
+ src = in_utf_t::next (src, src_end, &unicode, replacement);
+ dst_len += out_utf_t::encode_len (unicode);
+ }
+ return dst_len;
+}
#define entry_score var.u16[0]
#define entry_index var.u16[1]
@@ -97,12 +142,68 @@ struct NameRecord
return UNSUPPORTED;
}
- NameRecord* copy (hb_serialize_context_t *c, const void *base) const
+ NameRecord* copy (hb_serialize_context_t *c, const void *base
+#ifdef HB_EXPERIMENTAL_API
+ , const hb_hashmap_t<hb_ot_name_record_ids_t, hb_bytes_t> *name_table_overrides
+#endif
+ ) const
{
TRACE_SERIALIZE (this);
+ HB_UNUSED auto snap = c->snapshot ();
auto *out = c->embed (this);
if (unlikely (!out)) return_trace (nullptr);
- out->offset.serialize_copy (c, offset, base, 0, hb_serialize_context_t::Tail, length);
+#ifdef HB_EXPERIMENTAL_API
+ hb_ot_name_record_ids_t record_ids (platformID, encodingID, languageID, nameID);
+ hb_bytes_t* name_bytes;
+
+ if (name_table_overrides->has (record_ids, &name_bytes)) {
+ hb_bytes_t encoded_bytes = *name_bytes;
+ char *name_str_utf16_be = nullptr;
+
+ if (platformID != 1)
+ {
+ unsigned text_size = hb_ot_name_convert_utf<hb_utf8_t, hb_utf16_be_t> (*name_bytes, nullptr, nullptr);
+
+ text_size++; // needs to consider NULL terminator for use in hb_ot_name_convert_utf()
+ unsigned byte_len = text_size * hb_utf16_be_t::codepoint_t::static_size;
+ name_str_utf16_be = (char *) hb_calloc (byte_len, 1);
+ if (!name_str_utf16_be)
+ {
+ c->revert (snap);
+ return_trace (nullptr);
+ }
+ hb_ot_name_convert_utf<hb_utf8_t, hb_utf16_be_t> (*name_bytes, &text_size,
+ (hb_utf16_be_t::codepoint_t *) name_str_utf16_be);
+
+ unsigned encoded_byte_len = text_size * hb_utf16_be_t::codepoint_t::static_size;
+ if (!encoded_byte_len || !c->check_assign (out->length, encoded_byte_len, HB_SERIALIZE_ERROR_INT_OVERFLOW)) {
+ c->revert (snap);
+ hb_free (name_str_utf16_be);
+ return_trace (nullptr);
+ }
+
+ encoded_bytes = hb_bytes_t (name_str_utf16_be, encoded_byte_len);
+ }
+ else
+ {
+ // mac platform, copy the UTF-8 string(all ascii characters) as is
+ if (!c->check_assign (out->length, encoded_bytes.length, HB_SERIALIZE_ERROR_INT_OVERFLOW)) {
+ c->revert (snap);
+ return_trace (nullptr);
+ }
+ }
+
+ out->offset = 0;
+ c->push ();
+ encoded_bytes.copy (c);
+ c->add_link (out->offset, c->pop_pack (), hb_serialize_context_t::Tail, 0);
+ hb_free (name_str_utf16_be);
+ }
+ else
+#endif
+ {
+ out->offset.serialize_copy (c, offset, base, 0, hb_serialize_context_t::Tail, length);
+ }
return_trace (out);
}
@@ -216,29 +317,61 @@ struct name
hb_requires (hb_is_source_of (Iterator, const NameRecord &))>
bool serialize (hb_serialize_context_t *c,
Iterator it,
- const void *src_string_pool)
+ const void *src_string_pool
+#ifdef HB_EXPERIMENTAL_API
+ , const hb_vector_t<hb_ot_name_record_ids_t>& insert_name_records
+ , const hb_hashmap_t<hb_ot_name_record_ids_t, hb_bytes_t> *name_table_overrides
+#endif
+ )
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min ((*this)))) return_trace (false);
+ unsigned total_count = it.len ()
+#ifdef HB_EXPERIMENTAL_API
+ + insert_name_records.length
+#endif
+ ;
this->format = 0;
- this->count = it.len ();
+ if (!c->check_assign (this->count, total_count, HB_SERIALIZE_ERROR_INT_OVERFLOW))
+ return false;
- NameRecord *name_records = (NameRecord *) hb_calloc (it.len (), NameRecord::static_size);
+ NameRecord *name_records = (NameRecord *) hb_calloc (total_count, NameRecord::static_size);
if (unlikely (!name_records)) return_trace (false);
- hb_array_t<NameRecord> records (name_records, it.len ());
+ hb_array_t<NameRecord> records (name_records, total_count);
for (const NameRecord& record : it)
{
+ hb_memcpy (name_records, &record, NameRecord::static_size);
+ name_records++;
+ }
+
+#ifdef HB_EXPERIMENTAL_API
+ for (unsigned i = 0; i < insert_name_records.length; i++)
+ {
+ const hb_ot_name_record_ids_t& ids = insert_name_records[i];
+ NameRecord record;
+ record.platformID = ids.platform_id;
+ record.encodingID = ids.encoding_id;
+ record.languageID = ids.language_id;
+ record.nameID = ids.name_id;
+ record.length = 0; // handled in NameRecord copy()
+ record.offset = 0;
memcpy (name_records, &record, NameRecord::static_size);
name_records++;
}
+#endif
records.qsort ();
- c->copy_all (records, src_string_pool);
+ c->copy_all (records,
+ src_string_pool
+#ifdef HB_EXPERIMENTAL_API
+ , name_table_overrides
+#endif
+ );
hb_free (records.arrayZ);
@@ -256,6 +389,11 @@ struct name
name *name_prime = c->serializer->start_embed<name> ();
if (unlikely (!name_prime)) return_trace (false);
+#ifdef HB_EXPERIMENTAL_API
+ const hb_hashmap_t<hb_ot_name_record_ids_t, hb_bytes_t> *name_table_overrides =
+ c->plan->name_table_overrides;
+#endif
+
auto it =
+ nameRecordZ.as_array (count)
| hb_filter (c->plan->name_ids, &NameRecord::nameID)
@@ -265,10 +403,48 @@ struct name
(c->plan->flags & HB_SUBSET_FLAGS_NAME_LEGACY)
|| namerecord.isUnicode ();
})
+#ifdef HB_EXPERIMENTAL_API
+ | hb_filter ([&] (const NameRecord& namerecord) {
+ if (name_table_overrides->is_empty ())
+ return true;
+ hb_ot_name_record_ids_t rec_ids (namerecord.platformID,
+ namerecord.encodingID,
+ namerecord.languageID,
+ namerecord.nameID);
+
+ hb_bytes_t *p;
+ if (name_table_overrides->has (rec_ids, &p) &&
+ (*p).length == 0)
+ return false;
+ return true;
+ })
+#endif
;
- name_prime->serialize (c->serializer, it, std::addressof (this + stringOffset));
- return_trace (name_prime->count);
+#ifdef HB_EXPERIMENTAL_API
+ hb_vector_t<hb_ot_name_record_ids_t> insert_name_records;
+ if (!name_table_overrides->is_empty ())
+ {
+ if (unlikely (!insert_name_records.alloc (name_table_overrides->get_population ())))
+ return_trace (false);
+ for (const auto& record_ids : name_table_overrides->keys ())
+ {
+ if (name_table_overrides->get (record_ids).length == 0)
+ continue;
+ if (has_name_record_with_ids (record_ids))
+ continue;
+ insert_name_records.push (record_ids);
+ }
+ }
+#endif
+
+ return (name_prime->serialize (c->serializer, it,
+ std::addressof (this + stringOffset)
+#ifdef HB_EXPERIMENTAL_API
+ , insert_name_records
+ , name_table_overrides
+#endif
+ ));
}
bool sanitize_records (hb_sanitize_context_t *c) const
@@ -378,6 +554,23 @@ struct name
hb_vector_t<hb_ot_name_entry_t> names;
};
+ private:
+ // sometimes NameRecords are not sorted in the font file, so use linear search
+ // here
+ bool has_name_record_with_ids (const hb_ot_name_record_ids_t& record_ids) const
+ {
+ for (const auto& record : nameRecordZ.as_array (count))
+ {
+ if (record.platformID == record_ids.platform_id &&
+ record.encodingID == record_ids.encoding_id &&
+ record.languageID == record_ids.language_id &&
+ record.nameID == record_ids.name_id)
+ return true;
+ }
+ return false;
+ }
+
+ public:
/* We only implement format 0 for now. */
HBUINT16 format; /* Format selector (=0/1). */
HBUINT16 count; /* Number of name records. */
diff --git a/thirdparty/harfbuzz/src/hb-ot-name.cc b/thirdparty/harfbuzz/src/hb-ot-name.cc
index c35ac5b3dc..6adf1e8fbe 100644
--- a/thirdparty/harfbuzz/src/hb-ot-name.cc
+++ b/thirdparty/harfbuzz/src/hb-ot-name.cc
@@ -64,52 +64,6 @@ hb_ot_name_list_names (hb_face_t *face,
return (const hb_ot_name_entry_t *) name.names;
}
-
-template <typename in_utf_t, typename out_utf_t>
-static inline unsigned int
-hb_ot_name_convert_utf (hb_bytes_t bytes,
- unsigned int *text_size /* IN/OUT */,
- typename out_utf_t::codepoint_t *text /* OUT */)
-{
- unsigned int src_len = bytes.length / sizeof (typename in_utf_t::codepoint_t);
- const typename in_utf_t::codepoint_t *src = (const typename in_utf_t::codepoint_t *) bytes.arrayZ;
- const typename in_utf_t::codepoint_t *src_end = src + src_len;
-
- typename out_utf_t::codepoint_t *dst = text;
-
- hb_codepoint_t unicode;
- const hb_codepoint_t replacement = HB_BUFFER_REPLACEMENT_CODEPOINT_DEFAULT;
-
- if (text_size && *text_size)
- {
- (*text_size)--; /* Same room for NUL-termination. */
- const typename out_utf_t::codepoint_t *dst_end = text + *text_size;
-
- while (src < src_end && dst < dst_end)
- {
- const typename in_utf_t::codepoint_t *src_next = in_utf_t::next (src, src_end, &unicode, replacement);
- typename out_utf_t::codepoint_t *dst_next = out_utf_t::encode (dst, dst_end, unicode);
- if (dst_next == dst)
- break; /* Out-of-room. */
-
- dst = dst_next;
- src = src_next;
- }
-
- *text_size = dst - text;
- *dst = 0; /* NUL-terminate. */
- }
-
- /* Accumulate length of rest. */
- unsigned int dst_len = dst - text;
- while (src < src_end)
- {
- src = in_utf_t::next (src, src_end, &unicode, replacement);
- dst_len += out_utf_t::encode_len (unicode);
- }
- return dst_len;
-}
-
template <typename utf_t>
static inline unsigned int
hb_ot_name_get_utf (hb_face_t *face,
@@ -130,10 +84,10 @@ hb_ot_name_get_utf (hb_face_t *face,
hb_bytes_t bytes = name.get_name (idx);
if (width == 2) /* UTF16-BE */
- return hb_ot_name_convert_utf<hb_utf16_be_t, utf_t> (bytes, text_size, text);
+ return OT::hb_ot_name_convert_utf<hb_utf16_be_t, utf_t> (bytes, text_size, text);
if (width == 1) /* ASCII */
- return hb_ot_name_convert_utf<hb_ascii_t, utf_t> (bytes, text_size, text);
+ return OT::hb_ot_name_convert_utf<hb_ascii_t, utf_t> (bytes, text_size, text);
}
if (text_size)
@@ -227,5 +181,4 @@ hb_ot_name_get_utf32 (hb_face_t *face,
return hb_ot_name_get_utf<hb_utf32_t> (face, name_id, language, text_size, text);
}
-
#endif
diff --git a/thirdparty/harfbuzz/src/hb-ot-os2-table.hh b/thirdparty/harfbuzz/src/hb-ot-os2-table.hh
index c6e8fad6fc..5b017d56a6 100644
--- a/thirdparty/harfbuzz/src/hb-ot-os2-table.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-os2-table.hh
@@ -212,17 +212,6 @@ struct OS2
TRACE_SUBSET (this);
OS2 *os2_prime = c->serializer->embed (this);
if (unlikely (!os2_prime)) return_trace (false);
- if (c->plan->flags & HB_SUBSET_FLAGS_NO_PRUNE_UNICODE_RANGES)
- return_trace (true);
-
- /* when --gids option is not used, no need to do collect_mapping that is
- * iterating all codepoints in each subtable, which is not efficient */
- uint16_t min_cp, max_cp;
- find_min_and_max_codepoint (c->plan->unicodes, &min_cp, &max_cp);
- os2_prime->usFirstCharIndex = min_cp;
- os2_prime->usLastCharIndex = max_cp;
-
- _update_unicode_ranges (c->plan->unicodes, os2_prime->ulUnicodeRange);
if (c->plan->user_axes_location->has (HB_TAG ('w','g','h','t')) &&
!c->plan->pinned_at_default)
@@ -244,6 +233,18 @@ struct OS2
return_trace (false);
}
+ if (c->plan->flags & HB_SUBSET_FLAGS_NO_PRUNE_UNICODE_RANGES)
+ return_trace (true);
+
+ /* when --gids option is not used, no need to do collect_mapping that is
+ * iterating all codepoints in each subtable, which is not efficient */
+ uint16_t min_cp, max_cp;
+ find_min_and_max_codepoint (c->plan->unicodes, &min_cp, &max_cp);
+ os2_prime->usFirstCharIndex = min_cp;
+ os2_prime->usLastCharIndex = max_cp;
+
+ _update_unicode_ranges (c->plan->unicodes, os2_prime->ulUnicodeRange);
+
return_trace (true);
}
diff --git a/thirdparty/harfbuzz/src/hb-ot-post-table-v2subset.hh b/thirdparty/harfbuzz/src/hb-ot-post-table-v2subset.hh
index 4d427e5431..951e6395d6 100644
--- a/thirdparty/harfbuzz/src/hb-ot-post-table-v2subset.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-post-table-v2subset.hh
@@ -78,14 +78,14 @@ HB_INTERNAL bool postV2Tail::subset (hb_subset_context_t *c) const
post::accelerator_t _post (c->plan->source);
- hb_hashmap_t<hb_bytes_t, unsigned, true> glyph_name_to_new_index;
+ hb_hashmap_t<hb_bytes_t, uint32_t, true> glyph_name_to_new_index;
for (hb_codepoint_t new_gid = 0; new_gid < num_glyphs; new_gid++)
{
hb_codepoint_t old_gid = reverse_glyph_map.get (new_gid);
unsigned old_index = glyphNameIndex[old_gid];
unsigned new_index;
- const unsigned *new_index2;
+ const uint32_t *new_index2;
if (old_index <= 257) new_index = old_index;
else if (old_new_index_map.has (old_index, &new_index2))
{
diff --git a/thirdparty/harfbuzz/src/hb-ot-post-table.hh b/thirdparty/harfbuzz/src/hb-ot-post-table.hh
index 80d02ffba7..a04b80357b 100644
--- a/thirdparty/harfbuzz/src/hb-ot-post-table.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-post-table.hh
@@ -84,7 +84,7 @@ struct post
post *post_prime = c->allocate_min<post> ();
if (unlikely (!post_prime)) return_trace (false);
- memcpy (post_prime, this, post::min_size);
+ hb_memcpy (post_prime, this, post::min_size);
if (!glyph_names)
return_trace (c->check_assign (post_prime->version.major, 3,
HB_SERIALIZE_ERROR_INT_OVERFLOW)); // Version 3 does not have any glyph names.
@@ -282,7 +282,7 @@ struct post
* 0x00020000 for version 2.0
* 0x00025000 for version 2.5 (deprecated)
* 0x00030000 for version 3.0 */
- HBFixed italicAngle; /* Italic angle in counter-clockwise degrees
+ F16DOT16 italicAngle; /* Italic angle in counter-clockwise degrees
* from the vertical. Zero for upright text,
* negative for text that leans to the right
* (forward). */
diff --git a/thirdparty/harfbuzz/src/hb-ot-shape-normalize.cc b/thirdparty/harfbuzz/src/hb-ot-shape-normalize.cc
index 7db0b25b73..897377aa15 100644
--- a/thirdparty/harfbuzz/src/hb-ot-shape-normalize.cc
+++ b/thirdparty/harfbuzz/src/hb-ot-shape-normalize.cc
@@ -341,7 +341,7 @@ _hb_ot_shape_normalize (const hb_ot_shape_plan_t *plan,
{
unsigned int end;
for (end = buffer->idx + 1; end < count; end++)
- if (unlikely (_hb_glyph_info_is_unicode_mark (&buffer->info[end])))
+ if (_hb_glyph_info_is_unicode_mark (&buffer->info[end]))
break;
if (end < count)
diff --git a/thirdparty/harfbuzz/src/hb-ot-shape.cc b/thirdparty/harfbuzz/src/hb-ot-shape.cc
index 7f679cfd39..bbdfc214a1 100644
--- a/thirdparty/harfbuzz/src/hb-ot-shape.cc
+++ b/thirdparty/harfbuzz/src/hb-ot-shape.cc
@@ -91,9 +91,11 @@ hb_ot_shape_planner_t::hb_ot_shape_planner_t (hb_face_t *fac
script_zero_marks = shaper->zero_width_marks != HB_OT_SHAPE_ZERO_WIDTH_MARKS_NONE;
script_fallback_mark_positioning = shaper->fallback_position;
+#ifndef HB_NO_AAT_SHAPE
/* https://github.com/harfbuzz/harfbuzz/issues/1528 */
if (apply_morx && shaper != &_hb_ot_shaper_default)
shaper = &_hb_ot_shaper_dumber;
+#endif
}
void
@@ -527,18 +529,20 @@ hb_set_unicode_props (hb_buffer_t *buffer)
}
#endif
/* Or part of the Other_Grapheme_Extend that is not marks.
- * As of Unicode 11 that is just:
+ * As of Unicode 15 that is just:
*
* 200C ; Other_Grapheme_Extend # Cf ZERO WIDTH NON-JOINER
* FF9E..FF9F ; Other_Grapheme_Extend # Lm [2] HALFWIDTH KATAKANA VOICED SOUND MARK..HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK
* E0020..E007F ; Other_Grapheme_Extend # Cf [96] TAG SPACE..CANCEL TAG
*
* ZWNJ is special, we don't want to merge it as there's no need, and keeping
- * it separate results in more granular clusters. Ignore Katakana for now.
+ * it separate results in more granular clusters.
* Tags are used for Emoji sub-region flag sequences:
* https://github.com/harfbuzz/harfbuzz/issues/1556
+ * Katakana ones were requested:
+ * https://github.com/harfbuzz/harfbuzz/issues/3844
*/
- else if (unlikely (hb_in_range<hb_codepoint_t> (info[i].codepoint, 0xE0020u, 0xE007Fu)))
+ else if (unlikely (hb_in_ranges<hb_codepoint_t> (info[i].codepoint, 0xFF9Eu, 0xFF9Fu, 0xE0020u, 0xE007Fu)))
_hb_glyph_info_set_continuation (&info[i]);
}
}
@@ -862,7 +866,7 @@ hb_ot_hide_default_ignorables (hb_buffer_t *buffer,
}
}
else
- hb_ot_layout_delete_glyphs_inplace (buffer, _hb_glyph_info_is_default_ignorable);
+ buffer->delete_glyphs_inplace (_hb_glyph_info_is_default_ignorable);
}
diff --git a/thirdparty/harfbuzz/src/hb-ot-shape.hh b/thirdparty/harfbuzz/src/hb-ot-shape.hh
index cd6f15cbe2..ace28602f6 100644
--- a/thirdparty/harfbuzz/src/hb-ot-shape.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-shape.hh
@@ -51,7 +51,7 @@ struct hb_ot_shape_plan_key_t
bool equal (const hb_ot_shape_plan_key_t *other)
{
- return 0 == memcmp (this, other, sizeof (*this));
+ return 0 == hb_memcmp (this, other, sizeof (*this));
}
};
diff --git a/thirdparty/harfbuzz/src/hb-ot-shaper-arabic.cc b/thirdparty/harfbuzz/src/hb-ot-shaper-arabic.cc
index ed2748b828..2332ae3697 100644
--- a/thirdparty/harfbuzz/src/hb-ot-shaper-arabic.cc
+++ b/thirdparty/harfbuzz/src/hb-ot-shaper-arabic.cc
@@ -161,22 +161,23 @@ static const struct arabic_state_table_entry {
};
-static void
+static bool
arabic_fallback_shape (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer);
-static void
+static bool
record_stch (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer);
-static void
+static bool
deallocate_buffer_var (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer)
{
HB_BUFFER_DEALLOCATE_VAR (buffer, arabic_shaping_action);
+ return false;
}
static void
@@ -412,19 +413,19 @@ setup_masks_arabic (const hb_ot_shape_plan_t *plan,
setup_masks_arabic_plan (arabic_plan, buffer, plan->props.script);
}
-static void
+static bool
arabic_fallback_shape (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer)
{
#ifdef HB_NO_OT_SHAPER_ARABIC_FALLBACK
- return;
+ return false;
#endif
const arabic_shape_plan_t *arabic_plan = (const arabic_shape_plan_t *) plan->data;
if (!arabic_plan->do_fallback)
- return;
+ return false;
retry:
arabic_fallback_plan_t *fallback_plan = arabic_plan->fallback_plan;
@@ -440,6 +441,7 @@ retry:
}
arabic_fallback_plan_shape (fallback_plan, font, buffer);
+ return true;
}
/*
@@ -450,14 +452,14 @@ retry:
* marks can use it as well.
*/
-static void
+static bool
record_stch (const hb_ot_shape_plan_t *plan,
hb_font_t *font HB_UNUSED,
hb_buffer_t *buffer)
{
const arabic_shape_plan_t *arabic_plan = (const arabic_shape_plan_t *) plan->data;
if (!arabic_plan->has_stch)
- return;
+ return false;
/* 'stch' feature was just applied. Look for anything that multiplied,
* and record it for stch treatment later. Note that rtlm, frac, etc
@@ -473,6 +475,7 @@ record_stch (const hb_ot_shape_plan_t *plan,
info[i].arabic_shaping_action() = comp % 2 ? STCH_REPEATING : STCH_FIXED;
buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_ARABIC_HAS_STCH;
}
+ return false;
}
static void
diff --git a/thirdparty/harfbuzz/src/hb-ot-shaper-default.cc b/thirdparty/harfbuzz/src/hb-ot-shaper-default.cc
index 2f6f499eec..f0404a4d2c 100644
--- a/thirdparty/harfbuzz/src/hb-ot-shaper-default.cc
+++ b/thirdparty/harfbuzz/src/hb-ot-shaper-default.cc
@@ -49,6 +49,7 @@ const hb_ot_shaper_t _hb_ot_shaper_default =
true, /* fallback_position */
};
+#ifndef HB_NO_AAT_SHAPE
/* Same as default but no mark advance zeroing / fallback positioning.
* Dumbest shaper ever, basically. */
const hb_ot_shaper_t _hb_ot_shaper_dumber =
@@ -68,6 +69,7 @@ const hb_ot_shaper_t _hb_ot_shaper_dumber =
HB_OT_SHAPE_ZERO_WIDTH_MARKS_NONE,
false, /* fallback_position */
};
+#endif
#endif
diff --git a/thirdparty/harfbuzz/src/hb-ot-shaper-indic-machine.hh b/thirdparty/harfbuzz/src/hb-ot-shaper-indic-machine.hh
index d52b13f616..d6c67b81bd 100644
--- a/thirdparty/harfbuzz/src/hb-ot-shaper-indic-machine.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-shaper-indic-machine.hh
@@ -53,7 +53,7 @@ enum indic_syllable_type_t {
};
-#line 57 "hb-ot-shaper-indic-machine.hh"
+#line 54 "hb-ot-shaper-indic-machine.hh"
#define indic_syllable_machine_ex_A 9u
#define indic_syllable_machine_ex_C 1u
#define indic_syllable_machine_ex_CM 16u
@@ -61,6 +61,7 @@ enum indic_syllable_type_t {
#define indic_syllable_machine_ex_DOTTEDCIRCLE 11u
#define indic_syllable_machine_ex_H 4u
#define indic_syllable_machine_ex_M 7u
+#define indic_syllable_machine_ex_MPst 13u
#define indic_syllable_machine_ex_N 3u
#define indic_syllable_machine_ex_PLACEHOLDER 10u
#define indic_syllable_machine_ex_RS 12u
@@ -75,268 +76,302 @@ enum indic_syllable_type_t {
#define indic_syllable_machine_ex_ZWNJ 5u
-#line 79 "hb-ot-shaper-indic-machine.hh"
+#line 75 "hb-ot-shaper-indic-machine.hh"
static const unsigned char _indic_syllable_machine_trans_keys[] = {
- 8u, 8u, 4u, 8u, 5u, 7u, 5u, 8u, 4u, 8u, 4u, 12u, 4u, 8u, 8u, 8u,
- 5u, 7u, 5u, 8u, 4u, 8u, 4u, 12u, 4u, 12u, 4u, 12u, 8u, 8u, 5u, 7u,
- 5u, 8u, 4u, 8u, 4u, 8u, 4u, 12u, 8u, 8u, 5u, 7u, 5u, 8u, 4u, 8u,
- 4u, 8u, 5u, 8u, 8u, 8u, 1u, 18u, 3u, 16u, 3u, 16u, 4u, 16u, 1u, 15u,
- 5u, 9u, 5u, 9u, 9u, 9u, 5u, 9u, 1u, 15u, 1u, 15u, 1u, 15u, 3u, 9u,
- 4u, 9u, 5u, 9u, 4u, 9u, 5u, 9u, 3u, 9u, 5u, 9u, 3u, 16u, 3u, 16u,
- 3u, 16u, 3u, 16u, 4u, 16u, 1u, 15u, 3u, 16u, 3u, 16u, 4u, 16u, 1u, 15u,
- 5u, 9u, 9u, 9u, 5u, 9u, 1u, 15u, 1u, 15u, 3u, 9u, 4u, 9u, 5u, 9u,
- 4u, 9u, 5u, 9u, 5u, 9u, 3u, 9u, 5u, 9u, 3u, 16u, 3u, 16u, 4u, 8u,
- 3u, 16u, 3u, 16u, 4u, 16u, 1u, 15u, 3u, 16u, 1u, 15u, 5u, 9u, 9u, 9u,
- 5u, 9u, 1u, 15u, 1u, 15u, 3u, 9u, 4u, 9u, 5u, 9u, 3u, 16u, 4u, 9u,
- 5u, 9u, 5u, 9u, 3u, 9u, 5u, 9u, 3u, 16u, 4u, 12u, 4u, 8u, 3u, 16u,
- 3u, 16u, 4u, 16u, 1u, 15u, 3u, 16u, 1u, 15u, 5u, 9u, 9u, 9u, 5u, 9u,
- 1u, 15u, 1u, 15u, 3u, 9u, 4u, 9u, 5u, 9u, 3u, 16u, 4u, 9u, 5u, 9u,
- 5u, 9u, 3u, 9u, 5u, 9u, 1u, 16u, 3u, 16u, 1u, 16u, 4u, 12u, 5u, 9u,
- 9u, 9u, 5u, 9u, 1u, 15u, 3u, 9u, 5u, 9u, 5u, 9u, 9u, 9u, 5u, 9u,
- 1u, 15u, 0
+ 8u, 8u, 4u, 13u, 5u, 13u, 5u, 13u, 13u, 13u, 4u, 13u, 4u, 13u, 4u, 13u,
+ 8u, 8u, 5u, 13u, 5u, 13u, 13u, 13u, 4u, 13u, 4u, 13u, 4u, 13u, 4u, 13u,
+ 8u, 8u, 5u, 13u, 5u, 13u, 13u, 13u, 4u, 13u, 4u, 13u, 4u, 13u, 8u, 8u,
+ 5u, 13u, 5u, 13u, 13u, 13u, 4u, 13u, 4u, 13u, 5u, 13u, 8u, 8u, 1u, 18u,
+ 3u, 16u, 3u, 16u, 4u, 16u, 1u, 15u, 5u, 9u, 5u, 9u, 9u, 9u, 5u, 9u,
+ 1u, 15u, 1u, 15u, 1u, 15u, 3u, 13u, 4u, 13u, 5u, 13u, 5u, 13u, 4u, 13u,
+ 5u, 9u, 3u, 9u, 5u, 9u, 3u, 16u, 3u, 16u, 3u, 16u, 3u, 16u, 4u, 16u,
+ 1u, 15u, 3u, 16u, 3u, 16u, 4u, 16u, 1u, 15u, 5u, 9u, 9u, 9u, 5u, 9u,
+ 1u, 15u, 1u, 15u, 3u, 13u, 4u, 13u, 5u, 13u, 5u, 13u, 4u, 13u, 5u, 9u,
+ 5u, 9u, 3u, 9u, 5u, 9u, 3u, 16u, 3u, 16u, 4u, 13u, 3u, 16u, 3u, 16u,
+ 4u, 16u, 1u, 15u, 3u, 16u, 1u, 15u, 5u, 9u, 9u, 9u, 5u, 9u, 1u, 15u,
+ 1u, 15u, 3u, 13u, 4u, 13u, 5u, 13u, 5u, 13u, 3u, 16u, 4u, 13u, 5u, 9u,
+ 5u, 9u, 3u, 9u, 5u, 9u, 3u, 16u, 4u, 13u, 4u, 13u, 3u, 16u, 3u, 16u,
+ 4u, 16u, 1u, 15u, 3u, 16u, 1u, 15u, 5u, 9u, 9u, 9u, 5u, 9u, 1u, 15u,
+ 1u, 15u, 3u, 13u, 4u, 13u, 5u, 13u, 5u, 13u, 3u, 16u, 4u, 13u, 5u, 9u,
+ 5u, 9u, 3u, 9u, 5u, 9u, 1u, 16u, 3u, 16u, 1u, 16u, 4u, 13u, 5u, 13u,
+ 5u, 13u, 9u, 9u, 5u, 9u, 1u, 15u, 3u, 9u, 5u, 9u, 5u, 9u, 9u, 9u,
+ 5u, 9u, 1u, 15u, 0
};
static const char _indic_syllable_machine_key_spans[] = {
- 1, 5, 3, 4, 5, 9, 5, 1,
- 3, 4, 5, 9, 9, 9, 1, 3,
- 4, 5, 5, 9, 1, 3, 4, 5,
- 5, 4, 1, 18, 14, 14, 13, 15,
- 5, 5, 1, 5, 15, 15, 15, 7,
- 6, 5, 6, 5, 7, 5, 14, 14,
- 14, 14, 13, 15, 14, 14, 13, 15,
- 5, 1, 5, 15, 15, 7, 6, 5,
- 6, 5, 5, 7, 5, 14, 14, 5,
- 14, 14, 13, 15, 14, 15, 5, 1,
- 5, 15, 15, 7, 6, 5, 14, 6,
- 5, 5, 7, 5, 14, 9, 5, 14,
- 14, 13, 15, 14, 15, 5, 1, 5,
- 15, 15, 7, 6, 5, 14, 6, 5,
- 5, 7, 5, 16, 14, 16, 9, 5,
- 1, 5, 15, 7, 5, 5, 1, 5,
- 15
+ 1, 10, 9, 9, 1, 10, 10, 10,
+ 1, 9, 9, 1, 10, 10, 10, 10,
+ 1, 9, 9, 1, 10, 10, 10, 1,
+ 9, 9, 1, 10, 10, 9, 1, 18,
+ 14, 14, 13, 15, 5, 5, 1, 5,
+ 15, 15, 15, 11, 10, 9, 9, 10,
+ 5, 7, 5, 14, 14, 14, 14, 13,
+ 15, 14, 14, 13, 15, 5, 1, 5,
+ 15, 15, 11, 10, 9, 9, 10, 5,
+ 5, 7, 5, 14, 14, 10, 14, 14,
+ 13, 15, 14, 15, 5, 1, 5, 15,
+ 15, 11, 10, 9, 9, 14, 10, 5,
+ 5, 7, 5, 14, 10, 10, 14, 14,
+ 13, 15, 14, 15, 5, 1, 5, 15,
+ 15, 11, 10, 9, 9, 14, 10, 5,
+ 5, 7, 5, 16, 14, 16, 10, 9,
+ 9, 1, 5, 15, 7, 5, 5, 1,
+ 5, 15
};
static const short _indic_syllable_machine_index_offsets[] = {
- 0, 2, 8, 12, 17, 23, 33, 39,
- 41, 45, 50, 56, 66, 76, 86, 88,
- 92, 97, 103, 109, 119, 121, 125, 130,
- 136, 142, 147, 149, 168, 183, 198, 212,
- 228, 234, 240, 242, 248, 264, 280, 296,
- 304, 311, 317, 324, 330, 338, 344, 359,
- 374, 389, 404, 418, 434, 449, 464, 478,
- 494, 500, 502, 508, 524, 540, 548, 555,
- 561, 568, 574, 580, 588, 594, 609, 624,
- 630, 645, 660, 674, 690, 705, 721, 727,
- 729, 735, 751, 767, 775, 782, 788, 803,
- 810, 816, 822, 830, 836, 851, 861, 867,
- 882, 897, 911, 927, 942, 958, 964, 966,
- 972, 988, 1004, 1012, 1019, 1025, 1040, 1047,
- 1053, 1059, 1067, 1073, 1090, 1105, 1122, 1132,
- 1138, 1140, 1146, 1162, 1170, 1176, 1182, 1184,
- 1190
+ 0, 2, 13, 23, 33, 35, 46, 57,
+ 68, 70, 80, 90, 92, 103, 114, 125,
+ 136, 138, 148, 158, 160, 171, 182, 193,
+ 195, 205, 215, 217, 228, 239, 249, 251,
+ 270, 285, 300, 314, 330, 336, 342, 344,
+ 350, 366, 382, 398, 410, 421, 431, 441,
+ 452, 458, 466, 472, 487, 502, 517, 532,
+ 546, 562, 577, 592, 606, 622, 628, 630,
+ 636, 652, 668, 680, 691, 701, 711, 722,
+ 728, 734, 742, 748, 763, 778, 789, 804,
+ 819, 833, 849, 864, 880, 886, 888, 894,
+ 910, 926, 938, 949, 959, 969, 984, 995,
+ 1001, 1007, 1015, 1021, 1036, 1047, 1058, 1073,
+ 1088, 1102, 1118, 1133, 1149, 1155, 1157, 1163,
+ 1179, 1195, 1207, 1218, 1228, 1238, 1253, 1264,
+ 1270, 1276, 1284, 1290, 1307, 1322, 1339, 1350,
+ 1360, 1370, 1372, 1378, 1394, 1402, 1408, 1414,
+ 1416, 1422
};
static const unsigned char _indic_syllable_machine_indicies[] = {
- 1, 0, 2, 3, 3, 4, 1, 0,
- 3, 3, 4, 0, 3, 3, 4, 1,
- 0, 5, 3, 3, 4, 1, 0, 2,
- 3, 3, 4, 1, 0, 0, 0, 6,
- 0, 8, 9, 9, 10, 11, 7, 11,
- 7, 9, 9, 10, 7, 9, 9, 10,
- 11, 7, 12, 9, 9, 10, 11, 7,
- 8, 9, 9, 10, 11, 7, 7, 7,
- 13, 7, 8, 9, 9, 10, 11, 7,
- 7, 7, 14, 7, 16, 17, 17, 18,
- 19, 15, 15, 15, 20, 15, 19, 15,
- 17, 17, 18, 21, 17, 17, 18, 19,
- 15, 16, 17, 17, 18, 19, 15, 22,
- 17, 17, 18, 19, 15, 24, 25, 25,
- 26, 27, 23, 23, 23, 28, 23, 27,
- 23, 25, 25, 26, 23, 25, 25, 26,
- 27, 23, 24, 25, 25, 26, 27, 23,
- 29, 25, 25, 26, 27, 23, 17, 17,
- 18, 1, 0, 31, 30, 33, 34, 35,
- 36, 37, 38, 18, 19, 39, 40, 40,
- 20, 32, 41, 42, 43, 44, 45, 32,
- 47, 48, 49, 50, 4, 1, 51, 46,
- 46, 6, 46, 46, 46, 52, 46, 53,
- 48, 54, 54, 4, 1, 51, 46, 46,
- 46, 46, 46, 46, 52, 46, 48, 54,
- 54, 4, 1, 51, 46, 46, 46, 46,
- 46, 46, 52, 46, 33, 46, 46, 46,
- 55, 56, 46, 1, 51, 46, 46, 46,
- 46, 46, 33, 46, 57, 57, 46, 1,
- 51, 46, 51, 46, 46, 58, 51, 46,
- 51, 46, 51, 46, 46, 46, 51, 46,
- 33, 46, 59, 46, 57, 57, 46, 1,
- 51, 46, 46, 46, 46, 46, 33, 46,
- 33, 46, 46, 46, 57, 57, 46, 1,
- 51, 46, 46, 46, 46, 46, 33, 46,
- 33, 46, 46, 46, 57, 56, 46, 1,
- 51, 46, 46, 46, 46, 46, 33, 46,
- 60, 61, 62, 62, 4, 1, 51, 46,
- 61, 62, 62, 4, 1, 51, 46, 62,
- 62, 4, 1, 51, 46, 63, 64, 64,
- 4, 1, 51, 46, 55, 65, 46, 1,
- 51, 46, 55, 46, 57, 57, 46, 1,
- 51, 46, 57, 65, 46, 1, 51, 46,
- 47, 48, 54, 54, 4, 1, 51, 46,
- 46, 46, 46, 46, 46, 52, 46, 47,
- 48, 49, 54, 4, 1, 51, 46, 46,
- 6, 46, 46, 46, 52, 46, 67, 68,
- 69, 70, 10, 11, 71, 66, 66, 14,
- 66, 66, 66, 72, 66, 73, 68, 74,
- 70, 10, 11, 71, 66, 66, 66, 66,
- 66, 66, 72, 66, 68, 74, 70, 10,
- 11, 71, 66, 66, 66, 66, 66, 66,
- 72, 66, 75, 66, 66, 66, 76, 77,
- 66, 11, 71, 66, 66, 66, 66, 66,
- 75, 66, 78, 68, 79, 80, 10, 11,
- 71, 66, 66, 13, 66, 66, 66, 72,
- 66, 81, 68, 74, 74, 10, 11, 71,
- 66, 66, 66, 66, 66, 66, 72, 66,
- 68, 74, 74, 10, 11, 71, 66, 66,
- 66, 66, 66, 66, 72, 66, 75, 66,
- 66, 66, 82, 77, 66, 11, 71, 66,
- 66, 66, 66, 66, 75, 66, 71, 66,
- 66, 83, 71, 66, 71, 66, 71, 66,
- 66, 66, 71, 66, 75, 66, 84, 66,
- 82, 82, 66, 11, 71, 66, 66, 66,
- 66, 66, 75, 66, 75, 66, 66, 66,
- 82, 82, 66, 11, 71, 66, 66, 66,
- 66, 66, 75, 66, 85, 86, 87, 87,
- 10, 11, 71, 66, 86, 87, 87, 10,
- 11, 71, 66, 87, 87, 10, 11, 71,
- 66, 88, 89, 89, 10, 11, 71, 66,
- 76, 90, 66, 11, 71, 66, 82, 82,
- 66, 11, 71, 66, 76, 66, 82, 82,
- 66, 11, 71, 66, 82, 90, 66, 11,
- 71, 66, 78, 68, 74, 74, 10, 11,
- 71, 66, 66, 66, 66, 66, 66, 72,
- 66, 78, 68, 79, 74, 10, 11, 71,
- 66, 66, 13, 66, 66, 66, 72, 66,
- 8, 9, 9, 10, 11, 66, 67, 68,
- 74, 70, 10, 11, 71, 66, 66, 66,
- 66, 66, 66, 72, 66, 92, 36, 93,
- 93, 18, 19, 39, 91, 91, 91, 91,
- 91, 91, 43, 91, 36, 93, 93, 18,
- 19, 39, 91, 91, 91, 91, 91, 91,
- 43, 91, 94, 91, 91, 91, 95, 96,
- 91, 19, 39, 91, 91, 91, 91, 91,
- 94, 91, 35, 36, 97, 98, 18, 19,
- 39, 91, 91, 20, 91, 91, 91, 43,
- 91, 94, 91, 91, 91, 99, 96, 91,
- 19, 39, 91, 91, 91, 91, 91, 94,
- 91, 39, 91, 91, 100, 39, 91, 39,
- 91, 39, 91, 91, 91, 39, 91, 94,
- 91, 101, 91, 99, 99, 91, 19, 39,
- 91, 91, 91, 91, 91, 94, 91, 94,
- 91, 91, 91, 99, 99, 91, 19, 39,
- 91, 91, 91, 91, 91, 94, 91, 102,
- 103, 104, 104, 18, 19, 39, 91, 103,
- 104, 104, 18, 19, 39, 91, 104, 104,
- 18, 19, 39, 91, 35, 36, 93, 93,
- 18, 19, 39, 91, 91, 91, 91, 91,
- 91, 43, 91, 105, 106, 106, 18, 19,
- 39, 91, 95, 107, 91, 19, 39, 91,
- 99, 99, 91, 19, 39, 91, 95, 91,
- 99, 99, 91, 19, 39, 91, 99, 107,
- 91, 19, 39, 91, 35, 36, 97, 93,
- 18, 19, 39, 91, 91, 20, 91, 91,
- 91, 43, 91, 16, 17, 17, 18, 19,
- 108, 108, 108, 20, 108, 16, 17, 17,
- 18, 19, 108, 110, 111, 112, 113, 26,
- 27, 114, 109, 109, 28, 109, 109, 109,
- 115, 109, 116, 111, 113, 113, 26, 27,
- 114, 109, 109, 109, 109, 109, 109, 115,
- 109, 111, 113, 113, 26, 27, 114, 109,
- 109, 109, 109, 109, 109, 115, 109, 117,
- 109, 109, 109, 118, 119, 109, 27, 114,
- 109, 109, 109, 109, 109, 117, 109, 110,
- 111, 112, 40, 26, 27, 114, 109, 109,
- 28, 109, 109, 109, 115, 109, 117, 109,
- 109, 109, 120, 119, 109, 27, 114, 109,
- 109, 109, 109, 109, 117, 109, 114, 109,
- 109, 121, 114, 109, 114, 109, 114, 109,
- 109, 109, 114, 109, 117, 109, 122, 109,
- 120, 120, 109, 27, 114, 109, 109, 109,
- 109, 109, 117, 109, 117, 109, 109, 109,
- 120, 120, 109, 27, 114, 109, 109, 109,
- 109, 109, 117, 109, 123, 124, 125, 125,
- 26, 27, 114, 109, 124, 125, 125, 26,
- 27, 114, 109, 125, 125, 26, 27, 114,
- 109, 110, 111, 113, 113, 26, 27, 114,
- 109, 109, 109, 109, 109, 109, 115, 109,
- 126, 127, 127, 26, 27, 114, 109, 118,
- 128, 109, 27, 114, 109, 120, 120, 109,
- 27, 114, 109, 118, 109, 120, 120, 109,
- 27, 114, 109, 120, 128, 109, 27, 114,
- 109, 33, 34, 35, 36, 97, 93, 18,
- 19, 39, 40, 40, 20, 91, 91, 33,
- 43, 91, 47, 129, 49, 50, 4, 1,
- 51, 46, 46, 6, 46, 46, 46, 52,
- 46, 33, 34, 35, 36, 130, 131, 18,
- 132, 133, 46, 40, 20, 46, 46, 33,
- 43, 46, 16, 134, 134, 18, 132, 51,
- 46, 46, 20, 46, 133, 46, 46, 135,
- 133, 46, 133, 46, 133, 46, 46, 46,
- 133, 46, 33, 46, 59, 16, 134, 134,
- 18, 132, 51, 46, 46, 46, 46, 46,
- 33, 46, 137, 136, 138, 138, 136, 31,
- 139, 136, 138, 138, 136, 31, 139, 136,
- 139, 136, 136, 140, 139, 136, 139, 136,
- 139, 136, 136, 136, 139, 136, 33, 108,
- 108, 108, 108, 108, 108, 108, 108, 40,
- 108, 108, 108, 108, 33, 108, 0
+ 1, 0, 2, 3, 3, 4, 5, 0,
+ 0, 0, 0, 4, 0, 3, 3, 4,
+ 6, 0, 0, 0, 0, 4, 0, 3,
+ 3, 4, 5, 0, 0, 0, 0, 4,
+ 0, 4, 0, 7, 3, 3, 4, 5,
+ 0, 0, 0, 0, 4, 0, 2, 3,
+ 3, 4, 5, 0, 0, 0, 8, 4,
+ 0, 10, 11, 11, 12, 13, 9, 9,
+ 9, 9, 12, 9, 14, 9, 11, 11,
+ 12, 15, 9, 9, 9, 9, 12, 9,
+ 11, 11, 12, 13, 9, 9, 9, 9,
+ 12, 9, 12, 9, 16, 11, 11, 12,
+ 13, 9, 9, 9, 9, 12, 9, 10,
+ 11, 11, 12, 13, 9, 9, 9, 17,
+ 12, 9, 10, 11, 11, 12, 13, 9,
+ 9, 9, 18, 12, 9, 20, 21, 21,
+ 22, 23, 19, 19, 19, 24, 22, 19,
+ 25, 19, 21, 21, 22, 27, 26, 26,
+ 26, 26, 22, 26, 21, 21, 22, 23,
+ 19, 19, 19, 19, 22, 19, 22, 26,
+ 20, 21, 21, 22, 23, 19, 19, 19,
+ 19, 22, 19, 28, 21, 21, 22, 23,
+ 19, 19, 19, 19, 22, 19, 30, 31,
+ 31, 32, 33, 29, 29, 29, 34, 32,
+ 29, 35, 29, 31, 31, 32, 36, 29,
+ 29, 29, 29, 32, 29, 31, 31, 32,
+ 33, 29, 29, 29, 29, 32, 29, 32,
+ 29, 30, 31, 31, 32, 33, 29, 29,
+ 29, 29, 32, 29, 37, 31, 31, 32,
+ 33, 29, 29, 29, 29, 32, 29, 21,
+ 21, 22, 38, 0, 0, 0, 0, 22,
+ 0, 40, 39, 42, 43, 44, 45, 46,
+ 47, 22, 23, 48, 49, 49, 24, 22,
+ 50, 51, 52, 53, 54, 41, 56, 57,
+ 58, 59, 4, 5, 60, 55, 55, 8,
+ 4, 55, 55, 61, 55, 62, 57, 63,
+ 63, 4, 5, 60, 55, 55, 55, 4,
+ 55, 55, 61, 55, 57, 63, 63, 4,
+ 5, 60, 55, 55, 55, 4, 55, 55,
+ 61, 55, 42, 55, 55, 55, 64, 65,
+ 55, 1, 60, 55, 55, 55, 55, 55,
+ 42, 55, 66, 66, 55, 1, 60, 55,
+ 60, 55, 55, 67, 60, 55, 60, 55,
+ 60, 55, 55, 55, 60, 55, 42, 55,
+ 68, 55, 66, 66, 55, 1, 60, 55,
+ 55, 55, 55, 55, 42, 55, 42, 55,
+ 55, 55, 66, 66, 55, 1, 60, 55,
+ 55, 55, 55, 55, 42, 55, 42, 55,
+ 55, 55, 66, 65, 55, 1, 60, 55,
+ 55, 55, 55, 55, 42, 55, 69, 70,
+ 71, 71, 4, 5, 60, 55, 55, 55,
+ 4, 55, 70, 71, 71, 4, 5, 60,
+ 55, 55, 55, 4, 55, 71, 71, 4,
+ 5, 60, 55, 55, 55, 4, 55, 60,
+ 55, 55, 67, 60, 55, 55, 55, 4,
+ 55, 72, 73, 73, 4, 5, 60, 55,
+ 55, 55, 4, 55, 64, 74, 55, 1,
+ 60, 55, 64, 55, 66, 66, 55, 1,
+ 60, 55, 66, 74, 55, 1, 60, 55,
+ 56, 57, 63, 63, 4, 5, 60, 55,
+ 55, 55, 4, 55, 55, 61, 55, 56,
+ 57, 58, 63, 4, 5, 60, 55, 55,
+ 8, 4, 55, 55, 61, 55, 76, 77,
+ 78, 79, 12, 13, 80, 75, 75, 18,
+ 12, 75, 75, 81, 75, 82, 77, 83,
+ 79, 12, 13, 80, 75, 75, 75, 12,
+ 75, 75, 81, 75, 77, 83, 79, 12,
+ 13, 80, 75, 75, 75, 12, 75, 75,
+ 81, 75, 84, 75, 75, 75, 85, 86,
+ 75, 14, 80, 75, 75, 75, 75, 75,
+ 84, 75, 87, 77, 88, 89, 12, 13,
+ 80, 75, 75, 17, 12, 75, 75, 81,
+ 75, 90, 77, 83, 83, 12, 13, 80,
+ 75, 75, 75, 12, 75, 75, 81, 75,
+ 77, 83, 83, 12, 13, 80, 75, 75,
+ 75, 12, 75, 75, 81, 75, 84, 75,
+ 75, 75, 91, 86, 75, 14, 80, 75,
+ 75, 75, 75, 75, 84, 75, 80, 75,
+ 75, 92, 80, 75, 80, 75, 80, 75,
+ 75, 75, 80, 75, 84, 75, 93, 75,
+ 91, 91, 75, 14, 80, 75, 75, 75,
+ 75, 75, 84, 75, 84, 75, 75, 75,
+ 91, 91, 75, 14, 80, 75, 75, 75,
+ 75, 75, 84, 75, 94, 95, 96, 96,
+ 12, 13, 80, 75, 75, 75, 12, 75,
+ 95, 96, 96, 12, 13, 80, 75, 75,
+ 75, 12, 75, 96, 96, 12, 13, 80,
+ 75, 75, 75, 12, 75, 80, 75, 75,
+ 92, 80, 75, 75, 75, 12, 75, 97,
+ 98, 98, 12, 13, 80, 75, 75, 75,
+ 12, 75, 85, 99, 75, 14, 80, 75,
+ 91, 91, 75, 14, 80, 75, 85, 75,
+ 91, 91, 75, 14, 80, 75, 91, 99,
+ 75, 14, 80, 75, 87, 77, 83, 83,
+ 12, 13, 80, 75, 75, 75, 12, 75,
+ 75, 81, 75, 87, 77, 88, 83, 12,
+ 13, 80, 75, 75, 17, 12, 75, 75,
+ 81, 75, 10, 11, 11, 12, 13, 75,
+ 75, 75, 75, 12, 75, 76, 77, 83,
+ 79, 12, 13, 80, 75, 75, 75, 12,
+ 75, 75, 81, 75, 101, 45, 102, 102,
+ 22, 23, 48, 100, 100, 100, 22, 100,
+ 100, 52, 100, 45, 102, 102, 22, 23,
+ 48, 100, 100, 100, 22, 100, 100, 52,
+ 100, 103, 100, 100, 100, 104, 105, 100,
+ 25, 48, 100, 100, 100, 100, 100, 103,
+ 100, 44, 45, 106, 107, 22, 23, 48,
+ 100, 100, 24, 22, 100, 100, 52, 100,
+ 103, 100, 100, 100, 108, 105, 100, 25,
+ 48, 100, 100, 100, 100, 100, 103, 100,
+ 48, 100, 100, 109, 48, 100, 48, 100,
+ 48, 100, 100, 100, 48, 100, 103, 100,
+ 110, 100, 108, 108, 100, 25, 48, 100,
+ 100, 100, 100, 100, 103, 100, 103, 100,
+ 100, 100, 108, 108, 100, 25, 48, 100,
+ 100, 100, 100, 100, 103, 100, 111, 112,
+ 113, 113, 22, 23, 48, 100, 100, 100,
+ 22, 100, 112, 113, 113, 22, 23, 48,
+ 100, 100, 100, 22, 100, 113, 113, 22,
+ 23, 48, 100, 100, 100, 22, 100, 48,
+ 100, 100, 109, 48, 100, 100, 100, 22,
+ 100, 44, 45, 102, 102, 22, 23, 48,
+ 100, 100, 100, 22, 100, 100, 52, 100,
+ 114, 115, 115, 22, 23, 48, 100, 100,
+ 100, 22, 100, 104, 116, 100, 25, 48,
+ 100, 108, 108, 100, 25, 48, 100, 104,
+ 100, 108, 108, 100, 25, 48, 100, 108,
+ 116, 100, 25, 48, 100, 44, 45, 106,
+ 102, 22, 23, 48, 100, 100, 24, 22,
+ 100, 100, 52, 100, 20, 21, 21, 22,
+ 23, 117, 117, 117, 24, 22, 117, 20,
+ 21, 21, 22, 23, 117, 117, 117, 117,
+ 22, 117, 119, 120, 121, 122, 32, 33,
+ 123, 118, 118, 34, 32, 118, 118, 124,
+ 118, 125, 120, 122, 122, 32, 33, 123,
+ 118, 118, 118, 32, 118, 118, 124, 118,
+ 120, 122, 122, 32, 33, 123, 118, 118,
+ 118, 32, 118, 118, 124, 118, 126, 118,
+ 118, 118, 127, 128, 118, 35, 123, 118,
+ 118, 118, 118, 118, 126, 118, 119, 120,
+ 121, 49, 32, 33, 123, 118, 118, 34,
+ 32, 118, 118, 124, 118, 126, 118, 118,
+ 118, 129, 128, 118, 35, 123, 118, 118,
+ 118, 118, 118, 126, 118, 123, 118, 118,
+ 130, 123, 118, 123, 118, 123, 118, 118,
+ 118, 123, 118, 126, 118, 131, 118, 129,
+ 129, 118, 35, 123, 118, 118, 118, 118,
+ 118, 126, 118, 126, 118, 118, 118, 129,
+ 129, 118, 35, 123, 118, 118, 118, 118,
+ 118, 126, 118, 132, 133, 134, 134, 32,
+ 33, 123, 118, 118, 118, 32, 118, 133,
+ 134, 134, 32, 33, 123, 118, 118, 118,
+ 32, 118, 134, 134, 32, 33, 123, 118,
+ 118, 118, 32, 118, 123, 118, 118, 130,
+ 123, 118, 118, 118, 32, 118, 119, 120,
+ 122, 122, 32, 33, 123, 118, 118, 118,
+ 32, 118, 118, 124, 118, 135, 136, 136,
+ 32, 33, 123, 118, 118, 118, 32, 118,
+ 127, 137, 118, 35, 123, 118, 129, 129,
+ 118, 35, 123, 118, 127, 118, 129, 129,
+ 118, 35, 123, 118, 129, 137, 118, 35,
+ 123, 118, 42, 43, 44, 45, 106, 102,
+ 22, 23, 48, 49, 49, 24, 22, 100,
+ 42, 52, 100, 56, 138, 58, 59, 4,
+ 5, 60, 55, 55, 8, 4, 55, 55,
+ 61, 55, 42, 43, 44, 45, 139, 140,
+ 22, 141, 142, 55, 49, 24, 22, 55,
+ 42, 52, 55, 20, 143, 143, 22, 141,
+ 60, 55, 55, 24, 22, 55, 60, 55,
+ 55, 67, 60, 55, 55, 55, 22, 55,
+ 142, 55, 55, 144, 142, 55, 55, 55,
+ 22, 55, 142, 55, 142, 55, 55, 55,
+ 142, 55, 42, 55, 68, 20, 143, 143,
+ 22, 141, 60, 55, 55, 55, 22, 55,
+ 42, 55, 146, 145, 147, 147, 145, 40,
+ 148, 145, 147, 147, 145, 40, 148, 145,
+ 148, 145, 145, 149, 148, 145, 148, 145,
+ 148, 145, 145, 145, 148, 145, 42, 117,
+ 117, 117, 117, 117, 117, 117, 117, 49,
+ 117, 117, 117, 117, 42, 117, 0
};
static const unsigned char _indic_syllable_machine_trans_targs[] = {
- 27, 33, 38, 2, 39, 45, 46, 27,
- 55, 8, 61, 56, 68, 69, 72, 27,
- 77, 15, 83, 78, 86, 27, 91, 27,
- 100, 21, 106, 101, 109, 114, 27, 125,
- 27, 28, 48, 73, 75, 93, 94, 79,
- 95, 115, 116, 87, 123, 128, 27, 29,
- 31, 5, 47, 34, 42, 30, 1, 32,
- 36, 0, 35, 37, 40, 41, 3, 43,
- 4, 44, 27, 49, 51, 12, 71, 57,
- 64, 50, 6, 52, 66, 59, 53, 11,
- 70, 54, 7, 58, 60, 62, 63, 9,
- 65, 10, 67, 27, 74, 17, 76, 89,
- 81, 13, 92, 14, 80, 82, 84, 85,
- 16, 88, 18, 90, 27, 27, 96, 98,
- 19, 23, 102, 110, 97, 99, 112, 104,
- 20, 103, 105, 107, 108, 22, 111, 24,
- 113, 117, 118, 122, 119, 120, 25, 121,
- 27, 124, 26, 126, 127
+ 31, 37, 42, 2, 43, 46, 4, 50,
+ 51, 31, 60, 9, 66, 69, 61, 11,
+ 74, 75, 78, 31, 83, 17, 89, 92,
+ 93, 84, 31, 19, 98, 31, 107, 24,
+ 113, 116, 117, 108, 26, 122, 127, 31,
+ 134, 31, 32, 53, 79, 81, 100, 101,
+ 85, 102, 123, 124, 94, 132, 137, 31,
+ 33, 35, 6, 52, 38, 47, 34, 1,
+ 36, 40, 0, 39, 41, 44, 45, 3,
+ 48, 5, 49, 31, 54, 56, 14, 77,
+ 62, 70, 55, 7, 57, 72, 64, 58,
+ 13, 76, 59, 8, 63, 65, 67, 68,
+ 10, 71, 12, 73, 31, 80, 20, 82,
+ 96, 87, 15, 99, 16, 86, 88, 90,
+ 91, 18, 95, 21, 97, 31, 31, 103,
+ 105, 22, 27, 109, 118, 104, 106, 120,
+ 111, 23, 110, 112, 114, 115, 25, 119,
+ 28, 121, 125, 126, 131, 128, 129, 29,
+ 130, 31, 133, 30, 135, 136
};
static const char _indic_syllable_machine_trans_actions[] = {
- 1, 0, 2, 0, 2, 2, 2, 3,
- 2, 0, 2, 0, 2, 2, 2, 4,
- 2, 0, 5, 0, 5, 6, 2, 7,
- 2, 0, 2, 0, 2, 2, 8, 0,
- 11, 2, 2, 5, 0, 12, 12, 0,
- 2, 5, 2, 5, 2, 0, 13, 2,
- 0, 0, 2, 0, 2, 2, 0, 2,
- 2, 0, 0, 2, 2, 2, 0, 0,
- 0, 2, 14, 2, 0, 0, 2, 0,
- 2, 2, 0, 2, 2, 2, 2, 0,
+ 1, 0, 2, 0, 2, 0, 0, 2,
+ 2, 3, 2, 0, 2, 0, 0, 0,
+ 2, 2, 2, 4, 2, 0, 5, 0,
+ 5, 0, 6, 0, 2, 7, 2, 0,
+ 2, 0, 2, 0, 0, 2, 0, 8,
+ 0, 11, 2, 2, 5, 0, 12, 12,
+ 0, 2, 5, 2, 5, 2, 0, 13,
+ 2, 0, 0, 2, 0, 2, 2, 0,
2, 2, 0, 0, 2, 2, 2, 0,
- 0, 0, 2, 15, 5, 0, 5, 2,
- 2, 0, 5, 0, 0, 2, 5, 5,
- 0, 0, 0, 2, 16, 17, 2, 0,
- 0, 0, 0, 2, 2, 2, 2, 2,
- 0, 0, 2, 2, 2, 0, 0, 0,
- 2, 0, 18, 18, 0, 0, 0, 0,
- 19, 2, 0, 0, 0
+ 0, 0, 2, 14, 2, 0, 0, 2,
+ 0, 2, 2, 0, 2, 2, 2, 2,
+ 0, 2, 2, 0, 0, 2, 2, 2,
+ 0, 0, 0, 2, 15, 5, 0, 5,
+ 2, 2, 0, 5, 0, 0, 2, 5,
+ 5, 0, 0, 0, 2, 16, 17, 2,
+ 0, 0, 0, 0, 2, 2, 2, 2,
+ 2, 0, 0, 2, 2, 2, 0, 0,
+ 0, 2, 0, 18, 18, 0, 0, 0,
+ 0, 19, 2, 0, 0, 0
};
static const char _indic_syllable_machine_to_state_actions[] = {
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 9, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 9,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
@@ -349,14 +384,16 @@ static const char _indic_syllable_machine_to_state_actions[] = {
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
- 0
+ 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0
};
static const char _indic_syllable_machine_from_state_actions[] = {
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 10, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 10,
+ 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
@@ -369,41 +406,42 @@ static const char _indic_syllable_machine_from_state_actions[] = {
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
- 0
+ 0, 0
};
static const short _indic_syllable_machine_eof_trans[] = {
- 1, 1, 1, 1, 1, 1, 8, 8,
- 8, 8, 8, 8, 8, 16, 16, 22,
- 16, 16, 16, 24, 24, 24, 24, 24,
- 24, 1, 31, 0, 47, 47, 47, 47,
- 47, 47, 47, 47, 47, 47, 47, 47,
- 47, 47, 47, 47, 47, 47, 47, 47,
- 67, 67, 67, 67, 67, 67, 67, 67,
- 67, 67, 67, 67, 67, 67, 67, 67,
- 67, 67, 67, 67, 67, 67, 67, 67,
- 67, 92, 92, 92, 92, 92, 92, 92,
- 92, 92, 92, 92, 92, 92, 92, 92,
- 92, 92, 92, 92, 92, 109, 109, 110,
- 110, 110, 110, 110, 110, 110, 110, 110,
- 110, 110, 110, 110, 110, 110, 110, 110,
- 110, 110, 110, 92, 47, 47, 47, 47,
- 47, 47, 47, 137, 137, 137, 137, 137,
- 109
+ 1, 1, 1, 1, 1, 1, 1, 10,
+ 10, 10, 10, 10, 10, 10, 10, 20,
+ 20, 27, 20, 27, 20, 20, 30, 30,
+ 30, 30, 30, 30, 30, 1, 40, 0,
+ 56, 56, 56, 56, 56, 56, 56, 56,
+ 56, 56, 56, 56, 56, 56, 56, 56,
+ 56, 56, 56, 56, 56, 76, 76, 76,
+ 76, 76, 76, 76, 76, 76, 76, 76,
+ 76, 76, 76, 76, 76, 76, 76, 76,
+ 76, 76, 76, 76, 76, 76, 76, 101,
+ 101, 101, 101, 101, 101, 101, 101, 101,
+ 101, 101, 101, 101, 101, 101, 101, 101,
+ 101, 101, 101, 101, 118, 118, 119, 119,
+ 119, 119, 119, 119, 119, 119, 119, 119,
+ 119, 119, 119, 119, 119, 119, 119, 119,
+ 119, 119, 119, 101, 56, 56, 56, 56,
+ 56, 56, 56, 56, 146, 146, 146, 146,
+ 146, 118
};
-static const int indic_syllable_machine_start = 27;
-static const int indic_syllable_machine_first_final = 27;
+static const int indic_syllable_machine_start = 31;
+static const int indic_syllable_machine_first_final = 31;
static const int indic_syllable_machine_error = -1;
-static const int indic_syllable_machine_en_main = 27;
+static const int indic_syllable_machine_en_main = 31;
#line 58 "hb-ot-shaper-indic-machine.rl"
-#line 117 "hb-ot-shaper-indic-machine.rl"
+#line 118 "hb-ot-shaper-indic-machine.rl"
#define found_syllable(syllable_type) \
@@ -412,7 +450,7 @@ static const int indic_syllable_machine_en_main = 27;
for (unsigned int i = ts; i < te; i++) \
info[i].syllable() = (syllable_serial << 4) | syllable_type; \
syllable_serial++; \
- if (unlikely (syllable_serial == 16)) syllable_serial = 1; \
+ if (syllable_serial == 16) syllable_serial = 1; \
} HB_STMT_END
inline void
@@ -422,7 +460,7 @@ find_syllables_indic (hb_buffer_t *buffer)
int cs;
hb_glyph_info_t *info = buffer->info;
-#line 426 "hb-ot-shaper-indic-machine.hh"
+#line 453 "hb-ot-shaper-indic-machine.hh"
{
cs = indic_syllable_machine_start;
ts = 0;
@@ -430,7 +468,7 @@ find_syllables_indic (hb_buffer_t *buffer)
act = 0;
}
-#line 137 "hb-ot-shaper-indic-machine.rl"
+#line 138 "hb-ot-shaper-indic-machine.rl"
p = 0;
@@ -438,7 +476,7 @@ find_syllables_indic (hb_buffer_t *buffer)
unsigned int syllable_serial = 1;
-#line 442 "hb-ot-shaper-indic-machine.hh"
+#line 465 "hb-ot-shaper-indic-machine.hh"
{
int _slen;
int _trans;
@@ -452,7 +490,7 @@ _resume:
#line 1 "NONE"
{ts = p;}
break;
-#line 456 "hb-ot-shaper-indic-machine.hh"
+#line 477 "hb-ot-shaper-indic-machine.hh"
}
_keys = _indic_syllable_machine_trans_keys + (cs<<1);
@@ -475,51 +513,51 @@ _eof_trans:
{te = p+1;}
break;
case 11:
-#line 113 "hb-ot-shaper-indic-machine.rl"
+#line 114 "hb-ot-shaper-indic-machine.rl"
{te = p+1;{ found_syllable (indic_non_indic_cluster); }}
break;
case 13:
-#line 108 "hb-ot-shaper-indic-machine.rl"
+#line 109 "hb-ot-shaper-indic-machine.rl"
{te = p;p--;{ found_syllable (indic_consonant_syllable); }}
break;
case 14:
-#line 109 "hb-ot-shaper-indic-machine.rl"
+#line 110 "hb-ot-shaper-indic-machine.rl"
{te = p;p--;{ found_syllable (indic_vowel_syllable); }}
break;
case 17:
-#line 110 "hb-ot-shaper-indic-machine.rl"
+#line 111 "hb-ot-shaper-indic-machine.rl"
{te = p;p--;{ found_syllable (indic_standalone_cluster); }}
break;
case 19:
-#line 111 "hb-ot-shaper-indic-machine.rl"
+#line 112 "hb-ot-shaper-indic-machine.rl"
{te = p;p--;{ found_syllable (indic_symbol_cluster); }}
break;
case 15:
-#line 112 "hb-ot-shaper-indic-machine.rl"
+#line 113 "hb-ot-shaper-indic-machine.rl"
{te = p;p--;{ found_syllable (indic_broken_cluster); buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_BROKEN_SYLLABLE; }}
break;
case 16:
-#line 113 "hb-ot-shaper-indic-machine.rl"
+#line 114 "hb-ot-shaper-indic-machine.rl"
{te = p;p--;{ found_syllable (indic_non_indic_cluster); }}
break;
case 1:
-#line 108 "hb-ot-shaper-indic-machine.rl"
+#line 109 "hb-ot-shaper-indic-machine.rl"
{{p = ((te))-1;}{ found_syllable (indic_consonant_syllable); }}
break;
case 3:
-#line 109 "hb-ot-shaper-indic-machine.rl"
+#line 110 "hb-ot-shaper-indic-machine.rl"
{{p = ((te))-1;}{ found_syllable (indic_vowel_syllable); }}
break;
case 7:
-#line 110 "hb-ot-shaper-indic-machine.rl"
+#line 111 "hb-ot-shaper-indic-machine.rl"
{{p = ((te))-1;}{ found_syllable (indic_standalone_cluster); }}
break;
case 8:
-#line 111 "hb-ot-shaper-indic-machine.rl"
+#line 112 "hb-ot-shaper-indic-machine.rl"
{{p = ((te))-1;}{ found_syllable (indic_symbol_cluster); }}
break;
case 4:
-#line 112 "hb-ot-shaper-indic-machine.rl"
+#line 113 "hb-ot-shaper-indic-machine.rl"
{{p = ((te))-1;}{ found_syllable (indic_broken_cluster); buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_BROKEN_SYLLABLE; }}
break;
case 6:
@@ -540,19 +578,19 @@ _eof_trans:
case 18:
#line 1 "NONE"
{te = p+1;}
-#line 108 "hb-ot-shaper-indic-machine.rl"
+#line 109 "hb-ot-shaper-indic-machine.rl"
{act = 1;}
break;
case 5:
#line 1 "NONE"
{te = p+1;}
-#line 112 "hb-ot-shaper-indic-machine.rl"
+#line 113 "hb-ot-shaper-indic-machine.rl"
{act = 5;}
break;
case 12:
#line 1 "NONE"
{te = p+1;}
-#line 113 "hb-ot-shaper-indic-machine.rl"
+#line 114 "hb-ot-shaper-indic-machine.rl"
{act = 6;}
break;
#line 559 "hb-ot-shaper-indic-machine.hh"
@@ -564,7 +602,7 @@ _again:
#line 1 "NONE"
{ts = 0;}
break;
-#line 568 "hb-ot-shaper-indic-machine.hh"
+#line 566 "hb-ot-shaper-indic-machine.hh"
}
if ( ++p != pe )
@@ -580,7 +618,7 @@ _again:
}
-#line 145 "hb-ot-shaper-indic-machine.rl"
+#line 146 "hb-ot-shaper-indic-machine.rl"
}
diff --git a/thirdparty/harfbuzz/src/hb-ot-shaper-indic-table.cc b/thirdparty/harfbuzz/src/hb-ot-shaper-indic-table.cc
index bf6a2757bb..d9fb0510e4 100644
--- a/thirdparty/harfbuzz/src/hb-ot-shaper-indic-table.cc
+++ b/thirdparty/harfbuzz/src/hb-ot-shaper-indic-table.cc
@@ -42,6 +42,7 @@
#define OT_PLACEHOLDER I_Cat(PLACEHOLDER)
#define OT_DOTTEDCIRCLE I_Cat(DOTTEDCIRCLE)
#define OT_RS I_Cat(RS)
+#define OT_MPst I_Cat(MPst)
#define OT_Repha I_Cat(Repha)
#define OT_Ra I_Cat(Ra)
#define OT_CM I_Cat(CM)
@@ -80,9 +81,10 @@ static_assert (OT_VPst == M_Cat(VPst), "");
#define _OT_CS OT_CS /* 2 chars; CS */
#define _OT_DC OT_DOTTEDCIRCLE /* 1 chars; DOTTEDCIRCLE */
#define _OT_H OT_H /* 11 chars; H */
-#define _OT_M OT_M /* 143 chars; M */
+#define _OT_M OT_M /* 142 chars; M */
#define _OT_MH OT_MH /* 1 chars; MH */
#define _OT_ML OT_ML /* 1 chars; ML */
+#define _OT_MP OT_MPst /* 1 chars; MPst */
#define _OT_MR OT_MR /* 1 chars; MR */
#define _OT_MW OT_MW /* 2 chars; MW */
#define _OT_MY OT_MY /* 3 chars; MY */
@@ -200,7 +202,7 @@ static const uint16_t indic_table[] = {
/* 0A28 */ _(C,C), _(X,X), _(C,C), _(C,C), _(C,C), _(C,C), _(C,C), _(C,C),
/* 0A30 */ _(R,C), _(X,X), _(C,C), _(C,C), _(X,X), _(C,C), _(C,C), _(X,X),
/* 0A38 */ _(C,C), _(C,C), _(X,X), _(X,X), _(N,X), _(X,X), _(M,AP), _(M,LM),
- /* 0A40 */ _(M,AP), _(M,AP), _(M,AP), _(X,X), _(X,X), _(X,X), _(X,X), _(M,AP),
+ /* 0A40 */_(MP,AP), _(M,AP), _(M,AP), _(X,X), _(X,X), _(X,X), _(X,X), _(M,AP),
/* 0A48 */ _(M,AP), _(X,X), _(X,X), _(M,AP), _(M,AP), _(H,B), _(X,X), _(X,X),
/* 0A50 */ _(X,X), _(M,B), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X),
/* 0A58 */ _(X,X), _(C,C), _(C,C), _(C,C), _(C,C), _(X,X), _(C,C), _(X,X),
@@ -451,15 +453,12 @@ static const uint16_t indic_table[] = {
/* Grantha */
/* 11300 */ _(X,X),_(SM,SM),_(SM,SM),_(SM,SM), _(X,X), _(X,X), _(X,X), _(X,X),
- /* 11308 */ _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X),
- /* 11310 */ _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X),
- /* 11318 */ _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X),
- /* 11320 */ _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X),
- /* 11328 */ _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X),
- /* 11330 */ _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X), _(X,X),
+
+#define indic_offset_0x11338u 1720
+
/* 11338 */ _(X,X), _(X,X), _(X,X), _(N,X), _(N,X), _(X,X), _(X,X), _(X,X),
-}; /* Table items: 1776; occupancy: 69% */
+}; /* Table items: 1728; occupancy: 71% */
uint16_t
hb_indic_get_categories (hb_codepoint_t u)
@@ -497,7 +496,8 @@ hb_indic_get_categories (hb_codepoint_t u)
break;
case 0x11u:
- if (hb_in_range<hb_codepoint_t> (u, 0x11300u, 0x1133Fu)) return indic_table[u - 0x11300u + indic_offset_0x11300u];
+ if (hb_in_range<hb_codepoint_t> (u, 0x11300u, 0x11307u)) return indic_table[u - 0x11300u + indic_offset_0x11300u];
+ if (hb_in_range<hb_codepoint_t> (u, 0x11338u, 0x1133Fu)) return indic_table[u - 0x11338u + indic_offset_0x11338u];
break;
default:
@@ -519,6 +519,7 @@ hb_indic_get_categories (hb_codepoint_t u)
#undef _OT_M
#undef _OT_MH
#undef _OT_ML
+#undef _OT_MP
#undef _OT_MR
#undef _OT_MW
#undef _OT_MY
diff --git a/thirdparty/harfbuzz/src/hb-ot-shaper-indic.cc b/thirdparty/harfbuzz/src/hb-ot-shaper-indic.cc
index 55509c1101..7652210d9d 100644
--- a/thirdparty/harfbuzz/src/hb-ot-shaper-indic.cc
+++ b/thirdparty/harfbuzz/src/hb-ot-shaper-indic.cc
@@ -223,15 +223,15 @@ enum {
INDIC_BASIC_FEATURES = INDIC_INIT, /* Don't forget to update this! */
};
-static void
+static bool
setup_syllables_indic (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer);
-static void
+static bool
initial_reordering_indic (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer);
-static void
+static bool
final_reordering_indic (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer);
@@ -413,7 +413,7 @@ setup_masks_indic (const hb_ot_shape_plan_t *plan HB_UNUSED,
set_indic_properties (info[i]);
}
-static void
+static bool
setup_syllables_indic (const hb_ot_shape_plan_t *plan HB_UNUSED,
hb_font_t *font HB_UNUSED,
hb_buffer_t *buffer)
@@ -422,6 +422,7 @@ setup_syllables_indic (const hb_ot_shape_plan_t *plan HB_UNUSED,
find_syllables_indic (buffer);
foreach_syllable (buffer, start, end)
buffer->unsafe_to_break (start, end);
+ return false;
}
static int
@@ -714,6 +715,9 @@ initial_reordering_consonant_syllable (const hb_ot_shape_plan_t *plan,
}
}
} else if (info[i].indic_position() != POS_SMVD) {
+ if (info[i].indic_category() == I_Cat(MPst) &&
+ i > start && info[i - 1].indic_category() == I_Cat(SM))
+ info[i - 1].indic_position() = info[i].indic_position();
last_pos = (indic_position_t) info[i].indic_position();
}
}
@@ -729,7 +733,7 @@ initial_reordering_consonant_syllable (const hb_ot_shape_plan_t *plan,
if (info[j].indic_position() < POS_SMVD)
info[j].indic_position() = info[i].indic_position();
last = i;
- } else if (info[i].indic_category() == I_Cat(M))
+ } else if (FLAG_UNSAFE (info[i].indic_category()) & (FLAG (I_Cat(M)) | FLAG (I_Cat(MPst))))
last = i;
}
@@ -742,14 +746,40 @@ initial_reordering_consonant_syllable (const hb_ot_shape_plan_t *plan,
/* Sit tight, rock 'n roll! */
hb_stable_sort (info + start, end - start, compare_indic_order);
- /* Find base again */
+
+ /* Find base again; also flip left-matra sequence. */
+ unsigned first_left_matra = end;
+ unsigned last_left_matra = end;
base = end;
for (unsigned int i = start; i < end; i++)
+ {
if (info[i].indic_position() == POS_BASE_C)
{
base = i;
break;
}
+ else if (info[i].indic_position() == POS_PRE_M)
+ {
+ if (first_left_matra == end)
+ first_left_matra = i;
+ last_left_matra = i;
+ }
+ }
+ /* https://github.com/harfbuzz/harfbuzz/issues/3863 */
+ if (first_left_matra < last_left_matra)
+ {
+ /* No need to merge clusters, handled later. */
+ buffer->reverse_range (first_left_matra, last_left_matra + 1);
+ /* Reverse back nuktas, etc. */
+ unsigned i = first_left_matra;
+ for (unsigned j = i; j <= last_left_matra; j++)
+ if (FLAG_UNSAFE (info[j].indic_category()) & (FLAG (I_Cat(M)) | FLAG (I_Cat(MPst))))
+ {
+ buffer->reverse_range (i, j + 1);
+ i = j + 1;
+ }
+ }
+
/* Things are out-of-control for post base positions, they may shuffle
* around like crazy. In old-spec mode, we move halants around, so in
* that case merge all clusters after base. Otherwise, check the sort
@@ -955,25 +985,29 @@ initial_reordering_syllable_indic (const hb_ot_shape_plan_t *plan,
}
}
-static void
+static bool
initial_reordering_indic (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer)
{
+ bool ret = false;
if (!buffer->message (font, "start reordering indic initial"))
- return;
+ return ret;
update_consonant_positions_indic (plan, font, buffer);
- hb_syllabic_insert_dotted_circles (font, buffer,
- indic_broken_cluster,
- I_Cat(DOTTEDCIRCLE),
- I_Cat(Repha),
- POS_END);
+ if (hb_syllabic_insert_dotted_circles (font, buffer,
+ indic_broken_cluster,
+ I_Cat(DOTTEDCIRCLE),
+ I_Cat(Repha),
+ POS_END))
+ ret = true;
foreach_syllable (buffer, start, end)
initial_reordering_syllable_indic (plan, font->face, buffer, start, end);
(void) buffer->message (font, "end reordering indic initial");
+
+ return ret;
}
static void
@@ -1116,7 +1150,7 @@ final_reordering_syllable_indic (const hb_ot_shape_plan_t *plan,
{
search:
while (new_pos > start &&
- !(is_one_of (info[new_pos], (FLAG (I_Cat(M)) | FLAG (I_Cat(H))))))
+ !(is_one_of (info[new_pos], (FLAG (I_Cat(M)) | FLAG (I_Cat(MPst)) | FLAG (I_Cat(H))))))
new_pos--;
/* If we found no Halant we are done.
@@ -1316,7 +1350,8 @@ final_reordering_syllable_indic (const hb_ot_shape_plan_t *plan,
unlikely (is_halant (info[new_reph_pos])))
{
for (unsigned int i = base + 1; i < new_reph_pos; i++)
- if (info[i].indic_category() == I_Cat(M)) {
+ if (FLAG_UNSAFE (info[i].indic_category()) & (FLAG (I_Cat(M)) | FLAG (I_Cat(MPst))))
+ {
/* Ok, got it. */
new_reph_pos--;
}
@@ -1376,7 +1411,7 @@ final_reordering_syllable_indic (const hb_ot_shape_plan_t *plan,
if (buffer->props.script != HB_SCRIPT_MALAYALAM && buffer->props.script != HB_SCRIPT_TAMIL)
{
while (new_pos > start &&
- !(is_one_of (info[new_pos - 1], FLAG(I_Cat(M)) | FLAG (I_Cat(H)))))
+ !(is_one_of (info[new_pos - 1], FLAG (I_Cat(M)) | FLAG (I_Cat(MPst)) | FLAG (I_Cat(H)))))
new_pos--;
}
@@ -1439,13 +1474,13 @@ final_reordering_syllable_indic (const hb_ot_shape_plan_t *plan,
}
-static void
+static bool
final_reordering_indic (const hb_ot_shape_plan_t *plan,
hb_font_t *font HB_UNUSED,
hb_buffer_t *buffer)
{
unsigned int count = buffer->len;
- if (unlikely (!count)) return;
+ if (unlikely (!count)) return false;
if (buffer->message (font, "start reordering indic final")) {
foreach_syllable (buffer, start, end)
@@ -1455,6 +1490,8 @@ final_reordering_indic (const hb_ot_shape_plan_t *plan,
HB_BUFFER_DEALLOCATE_VAR (buffer, indic_category);
HB_BUFFER_DEALLOCATE_VAR (buffer, indic_position);
+
+ return false;
}
diff --git a/thirdparty/harfbuzz/src/hb-ot-shaper-khmer-machine.hh b/thirdparty/harfbuzz/src/hb-ot-shaper-khmer-machine.hh
index e18bd75ef1..fd91ee0caf 100644
--- a/thirdparty/harfbuzz/src/hb-ot-shaper-khmer-machine.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-shaper-khmer-machine.hh
@@ -48,7 +48,7 @@ enum khmer_syllable_type_t {
};
-#line 52 "hb-ot-shaper-khmer-machine.hh"
+#line 49 "hb-ot-shaper-khmer-machine.hh"
#define khmer_syllable_machine_ex_C 1u
#define khmer_syllable_machine_ex_DOTTEDCIRCLE 11u
#define khmer_syllable_machine_ex_H 4u
@@ -66,7 +66,7 @@ enum khmer_syllable_type_t {
#define khmer_syllable_machine_ex_ZWNJ 5u
-#line 70 "hb-ot-shaper-khmer-machine.hh"
+#line 65 "hb-ot-shaper-khmer-machine.hh"
static const unsigned char _khmer_syllable_machine_trans_keys[] = {
5u, 26u, 5u, 26u, 1u, 15u, 5u, 26u, 5u, 26u, 5u, 26u, 5u, 26u, 5u, 26u,
5u, 26u, 5u, 26u, 5u, 26u, 5u, 26u, 5u, 26u, 1u, 15u, 5u, 26u, 5u, 26u,
@@ -284,7 +284,7 @@ static const int khmer_syllable_machine_en_main = 21;
for (unsigned int i = ts; i < te; i++) \
info[i].syllable() = (syllable_serial << 4) | syllable_type; \
syllable_serial++; \
- if (unlikely (syllable_serial == 16)) syllable_serial = 1; \
+ if (syllable_serial == 16) syllable_serial = 1; \
} HB_STMT_END
inline void
@@ -294,7 +294,7 @@ find_syllables_khmer (hb_buffer_t *buffer)
int cs;
hb_glyph_info_t *info = buffer->info;
-#line 298 "hb-ot-shaper-khmer-machine.hh"
+#line 287 "hb-ot-shaper-khmer-machine.hh"
{
cs = khmer_syllable_machine_start;
ts = 0;
@@ -310,7 +310,7 @@ find_syllables_khmer (hb_buffer_t *buffer)
unsigned int syllable_serial = 1;
-#line 314 "hb-ot-shaper-khmer-machine.hh"
+#line 299 "hb-ot-shaper-khmer-machine.hh"
{
int _slen;
int _trans;
@@ -324,7 +324,7 @@ _resume:
#line 1 "NONE"
{ts = p;}
break;
-#line 328 "hb-ot-shaper-khmer-machine.hh"
+#line 311 "hb-ot-shaper-khmer-machine.hh"
}
_keys = _khmer_syllable_machine_trans_keys + (cs<<1);
@@ -394,7 +394,7 @@ _eof_trans:
#line 98 "hb-ot-shaper-khmer-machine.rl"
{act = 3;}
break;
-#line 398 "hb-ot-shaper-khmer-machine.hh"
+#line 368 "hb-ot-shaper-khmer-machine.hh"
}
_again:
@@ -403,7 +403,7 @@ _again:
#line 1 "NONE"
{ts = 0;}
break;
-#line 407 "hb-ot-shaper-khmer-machine.hh"
+#line 375 "hb-ot-shaper-khmer-machine.hh"
}
if ( ++p != pe )
diff --git a/thirdparty/harfbuzz/src/hb-ot-shaper-khmer.cc b/thirdparty/harfbuzz/src/hb-ot-shaper-khmer.cc
index d9795589fa..019a285102 100644
--- a/thirdparty/harfbuzz/src/hb-ot-shaper-khmer.cc
+++ b/thirdparty/harfbuzz/src/hb-ot-shaper-khmer.cc
@@ -89,11 +89,11 @@ set_khmer_properties (hb_glyph_info_t &info)
info.khmer_category() = (khmer_category_t) (type & 0xFFu);
}
-static void
+static bool
setup_syllables_khmer (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer);
-static void
+static bool
reorder_khmer (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer);
@@ -192,7 +192,7 @@ setup_masks_khmer (const hb_ot_shape_plan_t *plan HB_UNUSED,
set_khmer_properties (info[i]);
}
-static void
+static bool
setup_syllables_khmer (const hb_ot_shape_plan_t *plan HB_UNUSED,
hb_font_t *font HB_UNUSED,
hb_buffer_t *buffer)
@@ -201,6 +201,7 @@ setup_syllables_khmer (const hb_ot_shape_plan_t *plan HB_UNUSED,
find_syllables_khmer (buffer);
foreach_syllable (buffer, start, end)
buffer->unsafe_to_break (start, end);
+ return false;
}
@@ -303,23 +304,27 @@ reorder_syllable_khmer (const hb_ot_shape_plan_t *plan,
}
}
-static void
+static bool
reorder_khmer (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer)
{
+ bool ret = false;
if (buffer->message (font, "start reordering khmer"))
{
- hb_syllabic_insert_dotted_circles (font, buffer,
- khmer_broken_cluster,
- K_Cat(DOTTEDCIRCLE),
- (unsigned) -1);
+ if (hb_syllabic_insert_dotted_circles (font, buffer,
+ khmer_broken_cluster,
+ K_Cat(DOTTEDCIRCLE),
+ (unsigned) -1))
+ ret = true;
foreach_syllable (buffer, start, end)
reorder_syllable_khmer (plan, font->face, buffer, start, end);
(void) buffer->message (font, "end reordering khmer");
}
HB_BUFFER_DEALLOCATE_VAR (buffer, khmer_category);
+
+ return ret;
}
diff --git a/thirdparty/harfbuzz/src/hb-ot-shaper-myanmar-machine.hh b/thirdparty/harfbuzz/src/hb-ot-shaper-myanmar-machine.hh
index b109708937..87cded4ed8 100644
--- a/thirdparty/harfbuzz/src/hb-ot-shaper-myanmar-machine.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-shaper-myanmar-machine.hh
@@ -50,7 +50,7 @@ enum myanmar_syllable_type_t {
};
-#line 54 "hb-ot-shaper-myanmar-machine.hh"
+#line 51 "hb-ot-shaper-myanmar-machine.hh"
#define myanmar_syllable_machine_ex_A 9u
#define myanmar_syllable_machine_ex_As 32u
#define myanmar_syllable_machine_ex_C 1u
@@ -77,7 +77,7 @@ enum myanmar_syllable_type_t {
#define myanmar_syllable_machine_ex_ZWNJ 5u
-#line 81 "hb-ot-shaper-myanmar-machine.hh"
+#line 76 "hb-ot-shaper-myanmar-machine.hh"
static const unsigned char _myanmar_syllable_machine_trans_keys[] = {
1u, 41u, 3u, 41u, 5u, 39u, 5u, 8u, 3u, 41u, 3u, 39u, 3u, 39u, 5u, 39u,
5u, 39u, 3u, 39u, 3u, 39u, 3u, 41u, 5u, 39u, 1u, 15u, 3u, 39u, 3u, 39u,
@@ -433,7 +433,7 @@ static const int myanmar_syllable_machine_en_main = 0;
for (unsigned int i = ts; i < te; i++) \
info[i].syllable() = (syllable_serial << 4) | syllable_type; \
syllable_serial++; \
- if (unlikely (syllable_serial == 16)) syllable_serial = 1; \
+ if (syllable_serial == 16) syllable_serial = 1; \
} HB_STMT_END
inline void
@@ -443,7 +443,7 @@ find_syllables_myanmar (hb_buffer_t *buffer)
int cs;
hb_glyph_info_t *info = buffer->info;
-#line 447 "hb-ot-shaper-myanmar-machine.hh"
+#line 436 "hb-ot-shaper-myanmar-machine.hh"
{
cs = myanmar_syllable_machine_start;
ts = 0;
@@ -459,7 +459,7 @@ find_syllables_myanmar (hb_buffer_t *buffer)
unsigned int syllable_serial = 1;
-#line 463 "hb-ot-shaper-myanmar-machine.hh"
+#line 448 "hb-ot-shaper-myanmar-machine.hh"
{
int _slen;
int _trans;
@@ -473,7 +473,7 @@ _resume:
#line 1 "NONE"
{ts = p;}
break;
-#line 477 "hb-ot-shaper-myanmar-machine.hh"
+#line 460 "hb-ot-shaper-myanmar-machine.hh"
}
_keys = _myanmar_syllable_machine_trans_keys + (cs<<1);
@@ -519,7 +519,7 @@ _eof_trans:
#line 113 "hb-ot-shaper-myanmar-machine.rl"
{te = p;p--;{ found_syllable (myanmar_non_myanmar_cluster); }}
break;
-#line 523 "hb-ot-shaper-myanmar-machine.hh"
+#line 498 "hb-ot-shaper-myanmar-machine.hh"
}
_again:
@@ -528,7 +528,7 @@ _again:
#line 1 "NONE"
{ts = 0;}
break;
-#line 532 "hb-ot-shaper-myanmar-machine.hh"
+#line 505 "hb-ot-shaper-myanmar-machine.hh"
}
if ( ++p != pe )
diff --git a/thirdparty/harfbuzz/src/hb-ot-shaper-myanmar.cc b/thirdparty/harfbuzz/src/hb-ot-shaper-myanmar.cc
index 78bd8de524..1b2a085a8d 100644
--- a/thirdparty/harfbuzz/src/hb-ot-shaper-myanmar.cc
+++ b/thirdparty/harfbuzz/src/hb-ot-shaper-myanmar.cc
@@ -98,11 +98,11 @@ is_consonant_myanmar (const hb_glyph_info_t &info)
}
-static void
+static bool
setup_syllables_myanmar (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer);
-static void
+static bool
reorder_myanmar (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer);
@@ -150,7 +150,7 @@ setup_masks_myanmar (const hb_ot_shape_plan_t *plan HB_UNUSED,
set_myanmar_properties (info[i]);
}
-static void
+static bool
setup_syllables_myanmar (const hb_ot_shape_plan_t *plan HB_UNUSED,
hb_font_t *font HB_UNUSED,
hb_buffer_t *buffer)
@@ -159,6 +159,7 @@ setup_syllables_myanmar (const hb_ot_shape_plan_t *plan HB_UNUSED,
find_syllables_myanmar (buffer);
foreach_syllable (buffer, start, end)
buffer->unsafe_to_break (start, end);
+ return false;
}
static int
@@ -270,6 +271,33 @@ initial_reordering_consonant_syllable (hb_buffer_t *buffer,
/* Sit tight, rock 'n roll! */
buffer->sort (start, end, compare_myanmar_order);
+
+ /* Flip left-matra sequence. */
+ unsigned first_left_matra = end;
+ unsigned last_left_matra = end;
+ for (unsigned int i = start; i < end; i++)
+ {
+ if (info[i].myanmar_position() == POS_PRE_M)
+ {
+ if (first_left_matra == end)
+ first_left_matra = i;
+ last_left_matra = i;
+ }
+ }
+ /* https://github.com/harfbuzz/harfbuzz/issues/3863 */
+ if (first_left_matra < last_left_matra)
+ {
+ /* No need to merge clusters, done already? */
+ buffer->reverse_range (first_left_matra, last_left_matra + 1);
+ /* Reverse back VS, etc. */
+ unsigned i = first_left_matra;
+ for (unsigned j = i; j <= last_left_matra; j++)
+ if (info[j].myanmar_category() == M_Cat(VPre))
+ {
+ buffer->reverse_range (i, j + 1);
+ i = j + 1;
+ }
+ }
}
static void
@@ -291,16 +319,18 @@ reorder_syllable_myanmar (const hb_ot_shape_plan_t *plan HB_UNUSED,
}
}
-static void
+static bool
reorder_myanmar (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer)
{
+ bool ret = false;
if (buffer->message (font, "start reordering myanmar"))
{
- hb_syllabic_insert_dotted_circles (font, buffer,
- myanmar_broken_cluster,
- M_Cat(DOTTEDCIRCLE));
+ if (hb_syllabic_insert_dotted_circles (font, buffer,
+ myanmar_broken_cluster,
+ M_Cat(DOTTEDCIRCLE)))
+ ret = true;
foreach_syllable (buffer, start, end)
reorder_syllable_myanmar (plan, font->face, buffer, start, end);
@@ -309,6 +339,8 @@ reorder_myanmar (const hb_ot_shape_plan_t *plan,
HB_BUFFER_DEALLOCATE_VAR (buffer, myanmar_category);
HB_BUFFER_DEALLOCATE_VAR (buffer, myanmar_position);
+
+ return ret;
}
@@ -331,6 +363,7 @@ const hb_ot_shaper_t _hb_ot_shaper_myanmar =
};
+#ifndef HB_NO_OT_SHAPER_MYANMAR_ZAWGYI
/* Ugly Zawgyi encoding.
* Disable all auto processing.
* https://github.com/harfbuzz/harfbuzz/issues/1162 */
@@ -351,6 +384,7 @@ const hb_ot_shaper_t _hb_ot_shaper_myanmar_zawgyi =
HB_OT_SHAPE_ZERO_WIDTH_MARKS_NONE,
false, /* fallback_position */
};
+#endif
#endif
diff --git a/thirdparty/harfbuzz/src/hb-ot-shaper-syllabic.cc b/thirdparty/harfbuzz/src/hb-ot-shaper-syllabic.cc
index a8e0d8e8c1..89226ae4a1 100644
--- a/thirdparty/harfbuzz/src/hb-ot-shaper-syllabic.cc
+++ b/thirdparty/harfbuzz/src/hb-ot-shaper-syllabic.cc
@@ -29,7 +29,7 @@
#include "hb-ot-shaper-syllabic.hh"
-void
+bool
hb_syllabic_insert_dotted_circles (hb_font_t *font,
hb_buffer_t *buffer,
unsigned int broken_syllable_type,
@@ -38,13 +38,13 @@ hb_syllabic_insert_dotted_circles (hb_font_t *font,
int dottedcircle_position)
{
if (unlikely (buffer->flags & HB_BUFFER_FLAG_DO_NOT_INSERT_DOTTED_CIRCLE))
- return;
+ return false;
if (likely (!(buffer->scratch_flags & HB_BUFFER_SCRATCH_FLAG_HAS_BROKEN_SYLLABLE)))
- return;
+ return false;
hb_codepoint_t dottedcircle_glyph;
if (!font->get_nominal_glyph (0x25CCu, &dottedcircle_glyph))
- return;
+ return false;
hb_glyph_info_t dottedcircle = {0};
dottedcircle.codepoint = 0x25CCu;
@@ -84,14 +84,16 @@ hb_syllabic_insert_dotted_circles (hb_font_t *font,
(void) buffer->next_glyph ();
}
buffer->sync ();
+ return true;
}
-HB_INTERNAL void
+HB_INTERNAL bool
hb_syllabic_clear_var (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer)
{
HB_BUFFER_DEALLOCATE_VAR (buffer, syllable);
+ return false;
}
diff --git a/thirdparty/harfbuzz/src/hb-ot-shaper-syllabic.hh b/thirdparty/harfbuzz/src/hb-ot-shaper-syllabic.hh
index e8a15bb48a..f240ad1c26 100644
--- a/thirdparty/harfbuzz/src/hb-ot-shaper-syllabic.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-shaper-syllabic.hh
@@ -30,7 +30,7 @@
#include "hb-ot-shaper.hh"
-HB_INTERNAL void
+HB_INTERNAL bool
hb_syllabic_insert_dotted_circles (hb_font_t *font,
hb_buffer_t *buffer,
unsigned int broken_syllable_type,
@@ -38,7 +38,7 @@ hb_syllabic_insert_dotted_circles (hb_font_t *font,
int repha_category = -1,
int dottedcircle_position = -1);
-HB_INTERNAL void
+HB_INTERNAL bool
hb_syllabic_clear_var (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer);
diff --git a/thirdparty/harfbuzz/src/hb-ot-shaper-thai.cc b/thirdparty/harfbuzz/src/hb-ot-shaper-thai.cc
index 15349b1e64..6cd67cde35 100644
--- a/thirdparty/harfbuzz/src/hb-ot-shaper-thai.cc
+++ b/thirdparty/harfbuzz/src/hb-ot-shaper-thai.cc
@@ -98,9 +98,9 @@ static hb_codepoint_t
thai_pua_shape (hb_codepoint_t u, thai_action_t action, hb_font_t *font)
{
struct thai_pua_mapping_t {
- hb_codepoint_t u;
- hb_codepoint_t win_pua;
- hb_codepoint_t mac_pua;
+ uint16_t u;
+ uint16_t win_pua;
+ uint16_t mac_pua;
} const *pua_mappings = nullptr;
static const thai_pua_mapping_t SD_mappings[] = {
{0x0E48u, 0xF70Au, 0xF88Bu}, /* MAI EK */
diff --git a/thirdparty/harfbuzz/src/hb-ot-shaper-use-machine.hh b/thirdparty/harfbuzz/src/hb-ot-shaper-use-machine.hh
index 41e8a34f3b..f2fbdb725b 100644
--- a/thirdparty/harfbuzz/src/hb-ot-shaper-use-machine.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-shaper-use-machine.hh
@@ -53,7 +53,7 @@ enum use_syllable_type_t {
};
-#line 57 "hb-ot-shaper-use-machine.hh"
+#line 54 "hb-ot-shaper-use-machine.hh"
#define use_syllable_machine_ex_B 1u
#define use_syllable_machine_ex_CGJ 6u
#define use_syllable_machine_ex_CMAbv 31u
@@ -97,7 +97,7 @@ enum use_syllable_type_t {
#define use_syllable_machine_ex_ZWNJ 14u
-#line 101 "hb-ot-shaper-use-machine.hh"
+#line 96 "hb-ot-shaper-use-machine.hh"
static const unsigned char _use_syllable_machine_trans_keys[] = {
0u, 53u, 11u, 53u, 11u, 53u, 1u, 53u, 14u, 48u, 14u, 47u, 14u, 47u, 14u, 47u,
14u, 46u, 14u, 46u, 14u, 14u, 14u, 48u, 14u, 48u, 14u, 48u, 1u, 14u, 14u, 48u,
@@ -839,7 +839,7 @@ static const int use_syllable_machine_en_main = 0;
for (unsigned i = (*ts).second.first; i < (*te).second.first; ++i) \
info[i].syllable() = (syllable_serial << 4) | syllable_type; \
syllable_serial++; \
- if (unlikely (syllable_serial == 16)) syllable_serial = 1; \
+ if (syllable_serial == 16) syllable_serial = 1; \
} HB_STMT_END
@@ -929,7 +929,7 @@ find_syllables_use (hb_buffer_t *buffer)
unsigned int act HB_UNUSED;
int cs;
-#line 933 "hb-ot-shaper-use-machine.hh"
+#line 922 "hb-ot-shaper-use-machine.hh"
{
cs = use_syllable_machine_start;
ts = 0;
@@ -942,7 +942,7 @@ find_syllables_use (hb_buffer_t *buffer)
unsigned int syllable_serial = 1;
-#line 946 "hb-ot-shaper-use-machine.hh"
+#line 931 "hb-ot-shaper-use-machine.hh"
{
int _slen;
int _trans;
@@ -956,7 +956,7 @@ _resume:
#line 1 "NONE"
{ts = p;}
break;
-#line 960 "hb-ot-shaper-use-machine.hh"
+#line 943 "hb-ot-shaper-use-machine.hh"
}
_keys = _use_syllable_machine_trans_keys + (cs<<1);
@@ -1046,7 +1046,7 @@ _eof_trans:
#line 178 "hb-ot-shaper-use-machine.rl"
{te = p;p--;{ found_syllable (use_non_cluster); }}
break;
-#line 1050 "hb-ot-shaper-use-machine.hh"
+#line 1014 "hb-ot-shaper-use-machine.hh"
}
_again:
@@ -1055,7 +1055,7 @@ _again:
#line 1 "NONE"
{ts = 0;}
break;
-#line 1059 "hb-ot-shaper-use-machine.hh"
+#line 1021 "hb-ot-shaper-use-machine.hh"
}
if ( ++p != pe )
diff --git a/thirdparty/harfbuzz/src/hb-ot-shaper-use-table.hh b/thirdparty/harfbuzz/src/hb-ot-shaper-use-table.hh
index 6395d689ae..6b6b552ee5 100644
--- a/thirdparty/harfbuzz/src/hb-ot-shaper-use-table.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-shaper-use-table.hh
@@ -25,6 +25,7 @@
* # Updated for Unicode 12.1 by Andrew Glass 2019-05-24
* # Updated for Unicode 13.0 by Andrew Glass 2020-07-28
* # Updated for Unicode 14.0 by Andrew Glass 2021-09-25
+ * # Updated for Unicode 15.0 by Andrew Glass 2022-09-16
* # Override values For Indic_Positional_Category
* # Not derivable
* # Initial version based on Unicode 7.0 by Andrew Glass 2014-03-17
@@ -34,6 +35,7 @@
* # Updated for Unicode 12.1 by Andrew Glass 2019-05-30
* # Updated for Unicode 13.0 by Andrew Glass 2020-07-28
* # Updated for Unicode 14.0 by Andrew Glass 2021-09-28
+ * # Updated for Unicode 15.0 by Andrew Glass 2022-09-16
* UnicodeData.txt does not have a header.
*/
@@ -89,8 +91,11 @@
#define VMPre USE(VMPre)
#pragma GCC diagnostic pop
+
+#ifndef HB_OPTIMIZE_SIZE
+
static const uint8_t
-hb_use_u8[3115] =
+hb_use_u8[3141] =
{
16, 50, 51, 51, 51, 52, 51, 83, 118, 131, 51, 57, 58, 179, 195, 61,
51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51,
@@ -125,11 +130,11 @@ hb_use_u8[3115] =
2, 2, 2, 2, 2, 2, 2, 2, 2, 88, 89, 2, 2, 2, 2, 2,
2, 2, 2, 90, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 91, 2, 2, 92, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 93, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 94, 94, 95, 96, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94,
- 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94,
- 94, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
+ 2, 2, 2, 91, 2, 2, 92, 2, 2, 2, 93, 2, 2, 2, 2, 2,
+ 2, 2, 2, 94, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 95, 95, 96, 97, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95,
+ 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95,
+ 95, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
0, 2, 2, 2, 2, 2, 0, 0, 0, 3, 0, 0, 0, 0, 0, 4,
0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 6, 7, 0, 0, 0, 0, 0, 0,
@@ -226,70 +231,72 @@ hb_use_u8[3115] =
0, 9, 47, 2, 2, 2, 2, 2, 2, 2, 2, 2, 125, 18, 20, 151,
20, 19, 152, 153, 2, 2, 2, 2, 2, 0, 0, 63, 154, 0, 0, 0,
0, 2, 11, 0, 0, 0, 0, 0, 0, 2, 63, 23, 18, 18, 18, 20,
- 20, 106, 155, 0, 0, 156, 157, 29, 158, 28, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 21, 17, 20, 20, 159, 42, 0, 0, 0,
+ 20, 106, 155, 0, 0, 54, 156, 29, 157, 28, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 21, 17, 20, 20, 158, 42, 0, 0, 0,
47, 125, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 7, 7, 2, 2,
28, 2, 2, 2, 2, 2, 2, 2, 28, 2, 2, 2, 2, 2, 2, 2,
- 8, 16, 17, 19, 20, 160, 29, 0, 0, 9, 9, 28, 2, 2, 2, 7,
+ 8, 16, 17, 19, 20, 159, 29, 0, 0, 9, 9, 28, 2, 2, 2, 7,
28, 7, 2, 28, 2, 2, 56, 15, 21, 14, 21, 45, 30, 31, 30, 32,
0, 0, 0, 0, 33, 0, 0, 0, 2, 2, 21, 0, 9, 9, 9, 44,
0, 9, 9, 44, 0, 0, 0, 0, 0, 2, 2, 63, 23, 18, 18, 18,
20, 21, 123, 13, 15, 0, 0, 0, 0, 2, 2, 2, 2, 2, 0, 0,
- 161, 162, 0, 0, 0, 0, 0, 0, 0, 16, 17, 18, 18, 64, 97, 23,
- 158, 9, 163, 7, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2,
- 63, 23, 18, 18, 0, 46, 46, 9, 164, 35, 0, 0, 0, 0, 0, 0,
+ 160, 161, 0, 0, 0, 0, 0, 0, 0, 16, 17, 18, 18, 64, 97, 23,
+ 157, 9, 162, 7, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2,
+ 63, 23, 18, 18, 0, 46, 46, 9, 163, 35, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 2, 2, 18, 0, 21, 17, 18, 18, 19, 14, 80,
- 164, 36, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 8, 165,
- 23, 18, 20, 20, 163, 7, 0, 0, 0, 2, 2, 2, 2, 2, 7, 41,
+ 163, 36, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 8, 164,
+ 23, 18, 20, 20, 162, 7, 0, 0, 0, 2, 2, 2, 2, 2, 7, 41,
133, 21, 20, 18, 74, 19, 20, 0, 0, 2, 2, 2, 7, 0, 0, 0,
- 0, 2, 2, 2, 2, 2, 2, 16, 17, 18, 19, 20, 103, 164, 35, 0,
+ 0, 2, 2, 2, 2, 2, 2, 16, 17, 18, 19, 20, 103, 163, 35, 0,
0, 2, 2, 2, 7, 28, 0, 2, 2, 2, 2, 28, 7, 2, 2, 2,
- 2, 21, 21, 16, 30, 31, 10, 166, 167, 168, 169, 0, 0, 0, 0, 0,
+ 2, 21, 21, 16, 30, 31, 10, 165, 166, 167, 168, 0, 0, 0, 0, 0,
0, 2, 2, 2, 2, 0, 2, 2, 2, 63, 23, 18, 18, 0, 20, 21,
27, 106, 0, 31, 0, 0, 0, 0, 0, 50, 18, 20, 20, 20, 137, 2,
- 2, 2, 170, 171, 9, 13, 172, 70, 173, 0, 0, 1, 144, 0, 0, 0,
- 0, 50, 18, 20, 14, 17, 18, 2, 2, 2, 2, 155, 155, 155, 174, 174,
- 174, 174, 174, 174, 13, 175, 0, 28, 0, 20, 18, 18, 29, 20, 20, 9,
- 164, 0, 59, 59, 59, 59, 59, 59, 59, 64, 19, 80, 44, 0, 0, 0,
+ 2, 2, 169, 170, 9, 13, 171, 70, 172, 0, 0, 1, 144, 0, 0, 0,
+ 0, 50, 18, 20, 14, 17, 18, 2, 2, 2, 2, 155, 155, 155, 173, 173,
+ 173, 173, 173, 173, 13, 174, 0, 28, 0, 20, 18, 18, 29, 20, 20, 9,
+ 163, 0, 59, 59, 59, 59, 59, 59, 59, 64, 19, 80, 44, 0, 0, 0,
0, 2, 2, 2, 7, 2, 28, 2, 2, 50, 20, 20, 29, 0, 36, 20,
- 25, 9, 157, 176, 172, 0, 0, 0, 0, 2, 2, 2, 28, 7, 2, 2,
+ 25, 9, 156, 175, 171, 0, 0, 0, 0, 2, 2, 2, 28, 7, 2, 2,
2, 2, 2, 2, 2, 2, 21, 21, 45, 20, 33, 80, 66, 0, 0, 0,
- 0, 2, 177, 64, 45, 0, 0, 0, 0, 9, 178, 2, 2, 2, 2, 2,
+ 0, 2, 176, 64, 45, 0, 0, 0, 0, 9, 177, 2, 2, 2, 2, 2,
2, 2, 2, 21, 20, 18, 29, 0, 46, 14, 140, 0, 0, 0, 0, 0,
- 0, 179, 179, 179, 106, 7, 0, 0, 0, 9, 9, 9, 44, 0, 0, 0,
- 0, 2, 2, 2, 2, 2, 7, 0, 56, 180, 18, 18, 18, 18, 18, 18,
+ 0, 178, 178, 178, 106, 179, 178, 0, 0, 145, 2, 2, 180, 114, 114, 114,
+ 114, 114, 114, 114, 0, 0, 0, 0, 0, 9, 9, 9, 44, 0, 0, 0,
+ 0, 2, 2, 2, 2, 2, 7, 0, 56, 181, 18, 18, 18, 18, 18, 18,
18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 0, 0, 0,
38, 114, 24, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0,
0, 2, 2, 2, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 0, 56,
35, 0, 4, 118, 118, 118, 119, 0, 0, 9, 9, 9, 47, 2, 2, 2,
0, 2, 2, 2, 2, 2, 0, 0, 2, 2, 2, 2, 2, 2, 2, 2,
- 44, 2, 2, 2, 2, 2, 2, 9, 9, 2, 2, 42, 42, 42, 90, 0,
- 0, O, O, O, GB, B, B, GB, O, O, WJ,FMPst,FMPst, O, CGJ, B,
- O, B,VMAbv,VMAbv,VMAbv, O,VMAbv, B,CMBlw,CMBlw,CMBlw,VMAbv,VMPst, VAbv, VPst,CMBlw,
- B, VPst, VPre, VPst, VBlw, VBlw, VBlw, VBlw, VAbv, VAbv, VAbv, VPst, VPst, VPst, H, VPre,
- VPst,VMBlw, O, O, VAbv, GB,VMAbv,VMPst,VMPst, O, B, VBlw, O, O, VPre, VPre,
- O, VPre, H, O, VPst,FMAbv, O,CMBlw, O, VAbv, O, VAbv, H, O,VMBlw,VMAbv,
- CMAbv, GB, GB, O, MBlw,CMAbv,CMAbv, VPst, VAbv,VMAbv, O, VPst, O, VPre, VPre,VMAbv,
- B, O, CS, CS,VMPst, B, VAbv, VAbv, B, R, O, HVM, O, O, FBlw, O,
- CMAbv, O,CMBlw, VAbv, VBlw, B, SUB, SUB, SUB, O, SUB, SUB, O, FBlw, O, B,
- VPst, VBlw, VPre,VMAbv,VMBlw,VMPst, IS, VAbv, MPst, MPre, MBlw, MBlw, B, MBlw, MBlw, VPst,
- VMPst,VMPst, B, MBlw, VPst, VPre, VAbv, VAbv,VMPst,VMPst,VMBlw, B,VMPst, VBlw, VPst, CGJ,
- CGJ, VPst,VMAbv,VMAbv,FMAbv, FAbv,CMAbv,FMAbv,VMAbv,FMAbv, VAbv, IS,FMAbv, B,FMAbv, B,
- CGJ, WJ, CGJ, GB,CMAbv,CMAbv, B, GB, B, VAbv, SUB, FPst, FPst,VMBlw, FPst, FPst,
- FBlw,VMAbv,FMBlw, VAbv, VPre, B, MPre, MBlw, SUB, FAbv, FAbv, MAbv, SUB, Sk, VPst, VAbv,
- VMAbv,VMAbv, FAbv,CMAbv, VPst, H, B, O,SMAbv,SMBlw,SMAbv,SMAbv,SMAbv, VPst, IS, VBlw,
- FAbv,VMPre,VMPre,FMAbv,CMBlw,VMBlw,VMBlw,VMAbv, CS, O,FMAbv, ZWNJ, CGJ, WJ, WJ, WJ,
- O,FMPst, O, O, H, MPst, VPst, H,VMAbv, VAbv,VMBlw, B, VBlw, FPst, VPst, FAbv,
- VMPst, B,CMAbv, VAbv, MBlw, MPst, MBlw, H, O, VBlw, MPst, MPre, MAbv, MBlw, O, B,
- FAbv, FAbv, FPst, VBlw, B, B, VPre, O,VMPst, IS, O,VMPst, VBlw, VPst,VMBlw,VMBlw,
- VMAbv, O, IS,VMBlw, B,VMPst,VMAbv,VMPst, CS, CS, B, N, N, O, HN, VPre,
- VBlw, VAbv, IS,CMAbv, O, VPst, B, R, R, O,FMBlw,CMBlw, VAbv, VPre,VMAbv,VMAbv,
- H, VAbv,CMBlw,FMAbv, B, CS, CS, H,CMBlw,VMPst, H,VMPst, VAbv,VMAbv, VPst, IS,
- R, MPst, R, MPst,CMBlw, B,FMBlw, VBlw,VMAbv, R, MBlw, MBlw, GB, FBlw, FBlw,CMAbv,
- IS, VBlw, IS, GB, VAbv, R,VMPst, H, H, O, VBlw,
+ 44, 2, 2, 2, 2, 2, 2, 9, 9, 2, 2, 2, 2, 2, 2, 20,
+ 20, 2, 2, 42, 42, 42, 90, 0, 0, O, O, O, GB, B, B, GB,
+ O, O, WJ,FMPst,FMPst, O, CGJ, B, O, B,VMAbv,VMAbv,VMAbv, O,VMAbv, B,
+ CMBlw,CMBlw,CMBlw,VMAbv,VMPst, VAbv, VPst,CMBlw, B, VPst, VPre, VPst, VBlw, VBlw, VBlw, VBlw,
+ VAbv, VAbv, VAbv, VPst, VPst, VPst, H, VPre, VPst,VMBlw, O, O, VAbv, GB,VMAbv,VMPst,
+ VMPst, O, B, VBlw, O, O, VPre, VPre, O, VPre, H, O, VPst,FMAbv, O,CMBlw,
+ O, VAbv, O, VAbv, H, O,VMBlw,VMAbv,CMAbv, GB, GB, O, MBlw,CMAbv,CMAbv, VPst,
+ VAbv,VMAbv, O, VPst, O, VPre, VPre,VMAbv, B, O, CS, CS,VMPst, B, VAbv, VAbv,
+ B, R, O, HVM, O, O,FMBlw, O,CMAbv, O,CMBlw, VAbv, VBlw, B, SUB, SUB,
+ SUB, O, SUB, SUB, O,FMBlw, O, B, VPst, VBlw, VPre,VMAbv,VMBlw,VMPst, IS, VAbv,
+ MPst, MPre, MBlw, MBlw, B, MBlw, MBlw, VPst,VMPst,VMPst, B, MBlw, VPst, VPre, VAbv, VAbv,
+ VMPst,VMPst,VMBlw, B,VMPst, VBlw, VPst, CGJ, CGJ, VPst,VMAbv,VMAbv,FMAbv, FAbv,CMAbv,FMAbv,
+ VMAbv,FMAbv, VAbv, IS,FMAbv, B,FMAbv, B, CGJ, WJ, CGJ, GB,CMAbv,CMAbv, B, GB,
+ B, VAbv, SUB, FPst, FPst,VMBlw, FPst, FPst, FBlw,VMAbv,FMBlw, VAbv, VPre, B, MPre, MBlw,
+ SUB, FAbv, FAbv, MAbv, SUB, Sk, VPst, VAbv,VMAbv,VMAbv, FAbv,CMAbv, VPst, H, B, O,
+ SMAbv,SMBlw,SMAbv,SMAbv,SMAbv, VPst, IS, VBlw, FAbv,VMPre,VMPre,FMAbv,CMBlw,VMBlw,VMBlw,VMAbv,
+ CS, O,FMAbv, ZWNJ, CGJ, WJ, WJ, WJ, O,FMPst, O, O, H, MPst, VPst, H,
+ VMAbv, VAbv,VMBlw, B, VBlw, FPst, VPst, FAbv,VMPst, B,CMAbv, VAbv, MBlw, MPst, MBlw, H,
+ O, VBlw, MPst, MPre, MAbv, MBlw, O, B, FAbv, FAbv, FPst, VBlw, B, B, VPre, O,
+ VMPst, IS, O,VMPst, VBlw, VPst,VMBlw,VMBlw,VMAbv, O, IS,VMBlw, B,VMPst,VMAbv,VMPst,
+ CS, CS, B, N, N, O, HN, VPre, VBlw, VAbv, IS,CMAbv, O, VPst, B, R,
+ R,CMBlw, VAbv, VPre,VMAbv,VMAbv, H, VAbv,CMBlw,FMAbv, B, CS, CS, H,CMBlw,VMPst,
+ H,VMPst, VAbv,VMAbv, VPst, IS, R, MPst, R, MPst,CMBlw, B,FMBlw, VBlw,VMAbv, R,
+ MBlw, MBlw, GB, FBlw, FBlw,CMAbv, IS, VBlw, IS, GB, VAbv, R,VMPst, H, H, B,
+ H, B,VMBlw, O, VBlw,
};
static const uint16_t
-hb_use_u16[776] =
+hb_use_u16[784] =
{
0, 0, 1, 2, 0, 0, 0, 0, 0, 0, 3, 4, 0, 5, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0,
@@ -332,14 +339,279 @@ hb_use_u16[776] =
9,242, 73,243, 0, 0, 0, 0,244, 9, 9,245,246, 2,247, 9,
248,249, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9,250,
251, 48, 9,252,253, 2, 0, 0, 9, 9, 9, 9, 9, 9, 9, 9,
- 9, 9, 98,254, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0,
- 9, 9, 9,255, 0, 0, 0, 0, 9, 9, 9, 9,256,257,258,258,
- 259,260, 0, 0, 0, 0,261, 0, 9, 9, 9, 9, 9,262, 0, 0,
- 9, 9, 9, 9, 9, 9,105, 70, 94,263, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0,264, 9, 9, 70,265,266, 0, 0, 0,
- 0, 9,267, 0, 9, 9,268, 2, 9, 9, 9, 9,269, 2, 0, 0,
- 129,129,129,129,129,129,129,129,160,160,160,160,160,160,160,160,
- 160,160,160,160,160,160,160,129,
+ 9, 9, 9,254,255,256, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0,
+ 9, 9, 9,257, 0, 0, 0, 0, 9, 9, 9, 9,258,259,260,260,
+ 261,262, 0, 0, 0, 0,263, 0, 9, 9, 9, 9, 9,264, 0, 0,
+ 9, 9, 9, 9, 9, 9,105, 70, 94,265, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0,266, 9, 9, 70,267,268, 0, 0, 0,
+ 0, 9,269, 0, 9, 9,270, 2, 0, 0, 0, 0, 0, 9,271, 2,
+ 9, 9, 9, 9,272, 2, 0, 0,129,129,129,129,129,129,129,129,
+ 160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,129,
+};
+
+static inline unsigned
+hb_use_b4 (const uint8_t* a, unsigned i)
+{
+ return (a[i>>1]>>((i&1u)<<2))&15u;
+}
+static inline uint_fast8_t
+hb_use_get_category (unsigned u)
+{
+ return u<921600u?hb_use_u8[2777+(((hb_use_u8[593+(((hb_use_u16[((hb_use_u8[113+(((hb_use_b4(hb_use_u8,u>>1>>3>>3>>5))<<5)+((u>>1>>3>>3)&31u))])<<3)+((u>>1>>3)&7u)])<<3)+((u>>1)&7u))])<<1)+((u)&1u))]:O;
+}
+
+
+#else
+
+static const uint8_t
+hb_use_u8[3413] =
+{
+ 16, 50, 51, 51, 51, 52, 51, 83, 118, 131, 51, 57, 58, 179, 195, 61,
+ 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51,
+ 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51,
+ 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51,
+ 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51,
+ 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51,
+ 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51,
+ 14, 0, 1, 1, 2, 1, 1, 3, 4, 5, 6, 7, 8, 9, 10, 1,
+ 11, 12, 1, 1, 1, 1, 1, 1, 13, 14, 15, 16, 17, 18, 19, 1,
+ 1, 20, 1, 1, 1, 1, 21, 1, 1, 1, 1, 1, 1, 1, 22, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 23, 24, 25, 26, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 27,
+ 28, 1, 1, 1, 1, 1, 29, 1, 1, 1, 1, 30, 31, 1, 32, 33,
+ 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 1, 46, 47, 48,
+ 49, 50, 50, 50, 50, 51, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 52, 53, 1, 1, 1,
+ 54, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 50, 55, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 56, 1, 1,
+ 1, 1, 57, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 58, 59, 1, 60, 1, 1, 1, 1, 61, 1, 1, 1, 1, 1,
+ 1, 62, 63, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62,
+ 62, 0, 1, 0, 0, 0, 2, 3, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 6, 7, 0, 0, 8, 0, 0, 0, 0,
+ 0, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
+ 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
+ 40, 41, 42, 43, 36, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
+ 0, 55, 56, 57, 58, 59, 0, 0, 0, 60, 61, 62, 63, 55, 64, 65,
+ 66, 67, 55, 55, 68, 69, 70, 0, 0, 71, 72, 73, 74, 55, 75, 76,
+ 0, 77, 55, 78, 79, 80, 0, 0, 0, 81, 82, 83, 84, 85, 86, 55,
+ 87, 55, 88, 89, 0, 0, 0, 90, 91, 0, 0, 0, 0, 0, 0, 0,
+ 92, 93, 94, 0, 95, 96, 0, 0, 97, 0, 0, 0, 0, 0, 0, 98,
+ 0, 0, 99, 55, 100, 0, 0, 0, 0, 101, 102, 55, 103, 104, 105, 106,
+ 107, 55, 108, 109, 0, 110, 111, 112, 113, 55, 114, 115, 116, 55, 117, 118,
+ 119, 0, 0, 0, 0, 0, 0, 55, 120, 121, 0, 0, 0, 0, 0, 0,
+ 122, 0, 0, 0, 0, 0, 0, 0, 123, 0, 0, 0, 124, 125, 126, 0,
+ 0, 127, 128, 129, 0, 0, 0, 50, 130, 0, 0, 0, 0, 131, 132, 0,
+ 0, 55, 133, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 134, 0,
+ 0, 0, 99, 135, 99, 136, 137, 138, 0, 139, 140, 141, 142, 143, 144, 145,
+ 0, 146, 147, 148, 149, 143, 150, 151, 152, 153, 154, 155, 0, 156, 157, 158,
+ 159, 160, 161, 162, 163, 0, 0, 0, 0, 55, 164, 165, 166, 167, 168, 169,
+ 0, 0, 0, 0, 0, 55, 170, 171, 0, 55, 172, 173, 0, 55, 174, 66,
+ 0, 175, 176, 177, 0, 0, 0, 0, 0, 55, 178, 0, 0, 0, 0, 0,
+ 0, 179, 180, 181, 0, 0, 182, 183, 184, 185, 186, 187, 55, 188, 0, 0,
+ 0, 189, 190, 191, 192, 193, 194, 0, 0, 195, 196, 197, 198, 199, 66, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 200, 201, 202, 203, 0, 0, 0, 0,
+ 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 204, 205, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 66, 0, 55, 206, 0, 0, 0, 0, 0,
+ 0, 55, 55, 207, 208, 209, 0, 0, 210, 55, 55, 55, 55, 55, 55, 211,
+ 0, 55, 55, 55, 212, 213, 0, 0, 0, 0, 0, 0, 214, 0, 0, 0,
+ 0, 55, 215, 216, 0, 0, 0, 0, 0, 0, 0, 0, 0, 99, 217, 55,
+ 218, 0, 0, 0, 0, 0, 0, 99, 219, 55, 55, 220, 0, 0, 0, 0,
+ 0, 221, 221, 221, 221, 221, 221, 221, 221, 222, 222, 222, 222, 222, 222, 222,
+ 223, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
+ 0, 2, 2, 2, 2, 2, 0, 0, 0, 3, 0, 0, 0, 0, 0, 4,
+ 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 6, 7, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 8, 9, 9, 9, 9, 0, 0, 0, 7, 10,
+ 0, 2, 2, 2, 2, 11, 12, 0, 0, 9, 13, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 14, 15, 16, 17, 18, 19, 20, 14, 21, 22,
+ 23, 10, 24, 25, 18, 2, 2, 2, 2, 2, 18, 0, 2, 2, 2, 2,
+ 2, 0, 2, 2, 2, 2, 2, 2, 2, 26, 27, 28, 2, 2, 2, 7,
+ 28, 7, 28, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 7, 2, 2,
+ 2, 7, 7, 0, 2, 2, 0, 15, 16, 17, 18, 29, 30, 31, 30, 32,
+ 0, 0, 0, 0, 33, 0, 0, 2, 28, 2, 0, 0, 0, 0, 0, 7,
+ 34, 10, 13, 28, 2, 2, 7, 0, 28, 7, 2, 28, 7, 2, 0, 35,
+ 16, 17, 29, 0, 25, 36, 25, 37, 0, 38, 0, 0, 0, 28, 2, 7,
+ 7, 0, 0, 0, 2, 2, 2, 2, 2, 39, 40, 41, 0, 0, 0, 0,
+ 0, 10, 13, 28, 2, 2, 2, 2, 28, 2, 28, 2, 2, 2, 2, 2,
+ 2, 7, 2, 28, 2, 2, 0, 15, 16, 17, 18, 19, 25, 20, 33, 22,
+ 0, 0, 0, 0, 0, 28, 39, 39, 42, 10, 27, 28, 2, 2, 2, 7,
+ 28, 7, 2, 28, 2, 2, 0, 15, 43, 0, 0, 25, 20, 0, 0, 2,
+ 28, 28, 0, 0, 0, 0, 0, 0, 0, 0, 44, 28, 2, 2, 7, 0,
+ 2, 7, 2, 2, 0, 28, 7, 7, 2, 0, 28, 7, 0, 2, 7, 0,
+ 2, 2, 2, 2, 2, 2, 0, 0, 21, 14, 45, 0, 46, 31, 46, 32,
+ 0, 0, 0, 0, 33, 0, 0, 0, 0, 13, 27, 47, 2, 2, 2, 7,
+ 2, 7, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 15,
+ 20, 14, 21, 45, 20, 36, 20, 37, 0, 0, 0, 25, 29, 2, 7, 0,
+ 0, 8, 27, 28, 2, 2, 2, 7, 2, 2, 2, 28, 2, 2, 0, 15,
+ 43, 0, 0, 33, 45, 0, 0, 0, 7, 48, 49, 0, 0, 0, 0, 0,
+ 0, 9, 27, 2, 2, 2, 2, 7, 2, 2, 2, 2, 2, 2, 50, 51,
+ 21, 21, 17, 29, 46, 31, 46, 32, 52, 0, 0, 0, 33, 0, 0, 0,
+ 28, 10, 27, 28, 2, 2, 2, 2, 2, 2, 2, 2, 7, 0, 2, 2,
+ 2, 2, 28, 2, 2, 2, 2, 28, 0, 2, 2, 2, 7, 0, 53, 0,
+ 33, 21, 20, 29, 29, 16, 46, 46, 23, 0, 21, 0, 0, 0, 0, 0,
+ 0, 2, 0, 2, 7, 0, 0, 0, 0, 0, 0, 0, 0, 18, 0, 0,
+ 0, 2, 2, 54, 54, 55, 0, 0, 16, 2, 2, 2, 2, 28, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 7, 0, 56, 19, 57, 20, 20, 18, 18,
+ 44, 19, 9, 29, 9, 2, 2, 58, 59, 59, 59, 59, 59, 60, 59, 59,
+ 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 61,
+ 0, 0, 0, 0, 62, 0, 0, 0, 0, 2, 2, 2, 2, 2, 63, 43,
+ 57, 64, 20, 20, 65, 66, 67, 68, 69, 2, 2, 2, 2, 2, 1, 0,
+ 3, 2, 2, 2, 21, 18, 2, 2, 70, 69, 71, 72, 63, 71, 27, 27,
+ 2, 50, 20, 51, 2, 2, 2, 2, 2, 2, 73, 74, 75, 27, 27, 76,
+ 77, 2, 2, 2, 2, 2, 27, 43, 0, 2, 57, 78, 0, 0, 0, 0,
+ 28, 2, 57, 45, 0, 0, 0, 0, 0, 2, 57, 0, 0, 0, 0, 0,
+ 0, 2, 2, 2, 2, 2, 2, 7, 2, 7, 57, 0, 0, 0, 0, 0,
+ 0, 2, 2, 79, 43, 20, 57, 18, 46, 46, 46, 46, 13, 80, 81, 82,
+ 83, 84, 85, 0, 0, 0, 0, 86, 0, 7, 0, 0, 28, 0, 87, 79,
+ 88, 2, 2, 2, 2, 7, 0, 0, 0, 40, 40, 89, 90, 2, 2, 2,
+ 2, 2, 2, 2, 2, 11, 7, 0, 0, 91, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 7, 20, 78, 43, 20, 92, 59, 0,
+ 0, 93, 94, 93, 93, 95, 96, 0, 0, 2, 2, 2, 2, 2, 2, 2,
+ 0, 2, 2, 7, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 0,
+ 0, 2, 2, 2, 2, 27, 0, 0, 0, 2, 2, 2, 2, 2, 7, 0,
+ 0, 2, 2, 2, 50, 97, 43, 0, 0, 2, 2, 98, 99, 100, 101, 59,
+ 61, 102, 14, 43, 20, 57, 19, 78, 46, 46, 74, 9, 9, 9, 103, 44,
+ 38, 9, 104, 72, 2, 2, 2, 2, 2, 2, 2, 105, 20, 18, 18, 20,
+ 46, 46, 20, 106, 2, 2, 2, 7, 0, 0, 0, 0, 0, 0, 107, 108,
+ 109, 109, 109, 0, 0, 0, 0, 0, 0, 104, 72, 2, 2, 2, 2, 2,
+ 2, 58, 59, 57, 23, 20, 110, 59, 2, 2, 2, 2, 105, 20, 21, 43,
+ 43, 100, 12, 0, 0, 0, 0, 0, 0, 2, 2, 59, 16, 46, 21, 111,
+ 100, 100, 100, 112, 113, 0, 0, 0, 0, 2, 2, 2, 2, 2, 0, 28,
+ 2, 9, 44, 114, 114, 114, 9, 114, 114, 13, 114, 114, 114, 24, 0, 38,
+ 0, 0, 0, 115, 49, 9, 3, 0, 0, 0, 0, 0, 0, 0, 116, 0,
+ 0, 0, 0, 0, 0, 0, 4, 117, 118, 40, 40, 3, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 118, 118, 119, 118, 118, 118, 118, 118, 118, 118,
+ 118, 0, 0, 120, 0, 0, 0, 0, 0, 0, 5, 120, 0, 0, 0, 0,
+ 0, 44, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7,
+ 0, 2, 2, 2, 2, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0,
+ 121, 2, 51, 2, 106, 2, 8, 2, 2, 2, 63, 17, 14, 0, 0, 29,
+ 0, 2, 2, 0, 0, 0, 0, 0, 0, 27, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 122, 21, 21, 21, 21, 21, 21, 21, 123, 0, 0, 0, 0,
+ 0, 9, 9, 9, 9, 9, 9, 9, 9, 9, 2, 0, 0, 0, 0, 0,
+ 50, 2, 2, 2, 20, 20, 124, 114, 0, 2, 2, 2, 125, 18, 57, 18,
+ 111, 100, 126, 0, 0, 0, 0, 0, 0, 9, 127, 2, 2, 2, 2, 2,
+ 2, 2, 128, 21, 20, 18, 46, 129, 130, 131, 0, 0, 0, 0, 0, 0,
+ 0, 2, 2, 50, 28, 2, 2, 2, 2, 2, 2, 2, 2, 8, 20, 57,
+ 97, 74, 132, 133, 134, 0, 0, 0, 0, 2, 135, 2, 2, 2, 2, 136,
+ 0, 28, 2, 40, 3, 0, 77, 13, 2, 51, 20, 137, 50, 51, 2, 2,
+ 103, 8, 7, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 138, 19,
+ 23, 0, 0, 139, 140, 0, 0, 0, 0, 2, 63, 43, 21, 78, 45, 141,
+ 0, 79, 79, 79, 79, 79, 79, 79, 79, 0, 0, 0, 0, 0, 0, 0,
+ 4, 118, 118, 118, 118, 119, 0, 0, 0, 2, 2, 2, 2, 2, 7, 2,
+ 2, 2, 7, 2, 28, 2, 2, 2, 2, 2, 28, 2, 2, 2, 28, 7,
+ 0, 125, 18, 25, 29, 0, 0, 142, 143, 2, 2, 28, 2, 28, 2, 2,
+ 2, 2, 2, 2, 0, 12, 35, 0, 144, 2, 2, 11, 35, 0, 28, 2,
+ 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 2, 2,
+ 7, 2, 2, 9, 39, 0, 0, 0, 0, 2, 2, 2, 2, 2, 25, 36,
+ 0, 2, 2, 2, 114, 114, 114, 114, 114, 145, 2, 7, 0, 0, 0, 0,
+ 0, 2, 12, 12, 0, 0, 0, 0, 0, 7, 2, 2, 7, 2, 2, 2,
+ 2, 28, 2, 7, 0, 28, 2, 0, 0, 146, 147, 148, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 20, 20, 18, 18, 18, 20, 20, 131, 0, 0, 0,
+ 0, 0, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 2, 2, 2, 2,
+ 2, 51, 50, 51, 0, 0, 0, 0, 150, 9, 72, 2, 2, 2, 2, 2,
+ 2, 16, 17, 19, 14, 22, 35, 0, 0, 0, 29, 0, 0, 0, 0, 0,
+ 0, 9, 47, 2, 2, 2, 2, 2, 2, 2, 2, 2, 125, 18, 20, 151,
+ 20, 19, 152, 153, 2, 2, 2, 2, 2, 0, 0, 63, 154, 0, 0, 0,
+ 0, 2, 11, 0, 0, 0, 0, 0, 0, 2, 63, 23, 18, 18, 18, 20,
+ 20, 106, 155, 0, 0, 54, 156, 29, 157, 28, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 21, 17, 20, 20, 158, 42, 0, 0, 0,
+ 47, 125, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 7, 7, 2, 2,
+ 28, 2, 2, 2, 2, 2, 2, 2, 28, 2, 2, 2, 2, 2, 2, 2,
+ 8, 16, 17, 19, 20, 159, 29, 0, 0, 9, 9, 28, 2, 2, 2, 7,
+ 28, 7, 2, 28, 2, 2, 56, 15, 21, 14, 21, 45, 30, 31, 30, 32,
+ 0, 0, 0, 0, 33, 0, 0, 0, 2, 2, 21, 0, 9, 9, 9, 44,
+ 0, 9, 9, 44, 0, 0, 0, 0, 0, 2, 2, 63, 23, 18, 18, 18,
+ 20, 21, 123, 13, 15, 0, 0, 0, 0, 2, 2, 2, 2, 2, 0, 0,
+ 160, 161, 0, 0, 0, 0, 0, 0, 0, 16, 17, 18, 18, 64, 97, 23,
+ 157, 9, 162, 7, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2,
+ 63, 23, 18, 18, 0, 46, 46, 9, 163, 35, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 2, 2, 18, 0, 21, 17, 18, 18, 19, 14, 80,
+ 163, 36, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 8, 164,
+ 23, 18, 20, 20, 162, 7, 0, 0, 0, 2, 2, 2, 2, 2, 7, 41,
+ 133, 21, 20, 18, 74, 19, 20, 0, 0, 2, 2, 2, 7, 0, 0, 0,
+ 0, 2, 2, 2, 2, 2, 2, 16, 17, 18, 19, 20, 103, 163, 35, 0,
+ 0, 2, 2, 2, 7, 28, 0, 2, 2, 2, 2, 28, 7, 2, 2, 2,
+ 2, 21, 21, 16, 30, 31, 10, 165, 166, 167, 168, 0, 0, 0, 0, 0,
+ 0, 2, 2, 2, 2, 0, 2, 2, 2, 63, 23, 18, 18, 0, 20, 21,
+ 27, 106, 0, 31, 0, 0, 0, 0, 0, 50, 18, 20, 20, 20, 137, 2,
+ 2, 2, 169, 170, 9, 13, 171, 70, 172, 0, 0, 1, 144, 0, 0, 0,
+ 0, 50, 18, 20, 14, 17, 18, 2, 2, 2, 2, 155, 155, 155, 173, 173,
+ 173, 173, 173, 173, 13, 174, 0, 28, 0, 20, 18, 18, 29, 20, 20, 9,
+ 163, 0, 59, 59, 59, 59, 59, 59, 59, 64, 19, 80, 44, 0, 0, 0,
+ 0, 2, 2, 2, 7, 2, 28, 2, 2, 50, 20, 20, 29, 0, 36, 20,
+ 25, 9, 156, 175, 171, 0, 0, 0, 0, 2, 2, 2, 28, 7, 2, 2,
+ 2, 2, 2, 2, 2, 2, 21, 21, 45, 20, 33, 80, 66, 0, 0, 0,
+ 0, 2, 176, 64, 45, 0, 0, 0, 0, 9, 177, 2, 2, 2, 2, 2,
+ 2, 2, 2, 21, 20, 18, 29, 0, 46, 14, 140, 0, 0, 0, 0, 0,
+ 0, 178, 178, 178, 106, 179, 178, 0, 0, 145, 2, 2, 180, 114, 114, 114,
+ 114, 114, 114, 114, 0, 0, 0, 0, 0, 9, 9, 9, 44, 0, 0, 0,
+ 0, 2, 2, 2, 2, 2, 7, 0, 56, 181, 18, 18, 18, 18, 18, 18,
+ 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 0, 0, 0,
+ 38, 114, 24, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0,
+ 0, 2, 2, 2, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 0, 56,
+ 35, 0, 4, 118, 118, 118, 119, 0, 0, 9, 9, 9, 47, 2, 2, 2,
+ 0, 2, 2, 2, 2, 2, 0, 0, 2, 2, 2, 2, 2, 2, 2, 2,
+ 44, 2, 2, 2, 2, 2, 2, 9, 9, 2, 2, 2, 2, 2, 2, 20,
+ 20, 2, 2, 42, 42, 42, 90, 0, 0, O, O, O, GB, B, B, GB,
+ O, O, WJ,FMPst,FMPst, O, CGJ, B, O, B,VMAbv,VMAbv,VMAbv, O,VMAbv, B,
+ CMBlw,CMBlw,CMBlw,VMAbv,VMPst, VAbv, VPst,CMBlw, B, VPst, VPre, VPst, VBlw, VBlw, VBlw, VBlw,
+ VAbv, VAbv, VAbv, VPst, VPst, VPst, H, VPre, VPst,VMBlw, O, O, VAbv, GB,VMAbv,VMPst,
+ VMPst, O, B, VBlw, O, O, VPre, VPre, O, VPre, H, O, VPst,FMAbv, O,CMBlw,
+ O, VAbv, O, VAbv, H, O,VMBlw,VMAbv,CMAbv, GB, GB, O, MBlw,CMAbv,CMAbv, VPst,
+ VAbv,VMAbv, O, VPst, O, VPre, VPre,VMAbv, B, O, CS, CS,VMPst, B, VAbv, VAbv,
+ B, R, O, HVM, O, O,FMBlw, O,CMAbv, O,CMBlw, VAbv, VBlw, B, SUB, SUB,
+ SUB, O, SUB, SUB, O,FMBlw, O, B, VPst, VBlw, VPre,VMAbv,VMBlw,VMPst, IS, VAbv,
+ MPst, MPre, MBlw, MBlw, B, MBlw, MBlw, VPst,VMPst,VMPst, B, MBlw, VPst, VPre, VAbv, VAbv,
+ VMPst,VMPst,VMBlw, B,VMPst, VBlw, VPst, CGJ, CGJ, VPst,VMAbv,VMAbv,FMAbv, FAbv,CMAbv,FMAbv,
+ VMAbv,FMAbv, VAbv, IS,FMAbv, B,FMAbv, B, CGJ, WJ, CGJ, GB,CMAbv,CMAbv, B, GB,
+ B, VAbv, SUB, FPst, FPst,VMBlw, FPst, FPst, FBlw,VMAbv,FMBlw, VAbv, VPre, B, MPre, MBlw,
+ SUB, FAbv, FAbv, MAbv, SUB, Sk, VPst, VAbv,VMAbv,VMAbv, FAbv,CMAbv, VPst, H, B, O,
+ SMAbv,SMBlw,SMAbv,SMAbv,SMAbv, VPst, IS, VBlw, FAbv,VMPre,VMPre,FMAbv,CMBlw,VMBlw,VMBlw,VMAbv,
+ CS, O,FMAbv, ZWNJ, CGJ, WJ, WJ, WJ, O,FMPst, O, O, H, MPst, VPst, H,
+ VMAbv, VAbv,VMBlw, B, VBlw, FPst, VPst, FAbv,VMPst, B,CMAbv, VAbv, MBlw, MPst, MBlw, H,
+ O, VBlw, MPst, MPre, MAbv, MBlw, O, B, FAbv, FAbv, FPst, VBlw, B, B, VPre, O,
+ VMPst, IS, O,VMPst, VBlw, VPst,VMBlw,VMBlw,VMAbv, O, IS,VMBlw, B,VMPst,VMAbv,VMPst,
+ CS, CS, B, N, N, O, HN, VPre, VBlw, VAbv, IS,CMAbv, O, VPst, B, R,
+ R,CMBlw, VAbv, VPre,VMAbv,VMAbv, H, VAbv,CMBlw,FMAbv, B, CS, CS, H,CMBlw,VMPst,
+ H,VMPst, VAbv,VMAbv, VPst, IS, R, MPst, R, MPst,CMBlw, B,FMBlw, VBlw,VMAbv, R,
+ MBlw, MBlw, GB, FBlw, FBlw,CMAbv, IS, VBlw, IS, GB, VAbv, R,VMPst, H, H, B,
+ H, B,VMBlw, O, VBlw,
+};
+static const uint16_t
+hb_use_u16[448] =
+{
+ 0, 0, 1, 2, 3, 4, 0, 5, 6, 0, 7, 0, 8, 9, 10, 11,
+ 9, 12, 13, 9, 9, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24,
+ 17, 25, 26, 20, 21, 27, 28, 29, 30, 31, 32, 33, 21, 34, 35, 0,
+ 17, 36, 37, 20, 21, 38, 23, 39, 17, 40, 41, 42, 43, 44, 45, 46,
+ 30, 0, 47, 48, 21, 49, 50, 51, 17, 0, 52, 48, 21, 53, 50, 54,
+ 17, 55, 56, 48, 9, 57, 58, 59, 60, 61, 9, 62, 63, 64, 30, 65,
+ 66, 67, 9, 68, 69, 9, 70, 71, 72, 73, 74, 75, 76, 0, 9, 9,
+ 77, 78, 79, 80, 81, 82, 83, 84, 9, 85, 9, 86, 9, 87, 88, 89,
+ 9, 90, 91, 92, 2, 0, 93, 0, 9, 94, 95, 9, 96, 0, 97, 98,
+ 99,100, 30, 9,101,102,103, 9,104,105, 9,106, 9,107,108,109,
+ 2, 2,110, 9, 9,111,112, 2,113,114,115, 9,116, 9,117,118,
+ 119,120,121, 0, 0,122,123,124, 0,125,126,127,128, 0,129,130,
+ 131, 0, 0,132,133, 0, 0, 9,134,135,136, 9,137, 0, 9,138,
+ 139, 9, 9,140,141, 2,142,143,144, 9,145,146,147, 9, 9,148,
+ 149, 2,150, 98,151,152,153, 2, 9,154, 9,155,156, 0,157,158,
+ 159, 2,160, 0, 0,161, 0,162, 0,163,163,164, 33,165,166,167,
+ 9,168, 94, 0,169, 0, 9,170,171, 0,172, 2,173,170,174,175,
+ 176, 0, 0,177,178, 0,179, 9, 9,180,181,182,183,184,185, 9,
+ 9,186,187, 0,188, 9,189,190,191, 9, 9,192, 9,193,194,105,
+ 195,102, 9, 33,196,197,198, 0,199,200, 94, 9, 9,201,202, 2,
+ 203, 20, 21,204,205,206,207,208, 9,209,210,211,212, 0,195, 9,
+ 9,213,214, 2,215,216,217,218, 9,219,220, 2,221,222, 9,223,
+ 224,103,225, 0,226,227,228,229, 9,230,231, 2,232, 9, 9,233,
+ 234, 0,235, 9, 9,236,237,238,239,240, 21, 9,215,241, 7, 9,
+ 70, 18, 9,242, 73,243,244, 9, 9,245,246, 2,247, 9,248,249,
+ 9,250,251, 48, 9,252,253, 2, 9,254,255,256, 9,257,258,259,
+ 260,260,261,262,263, 0, 9,264,105, 70, 94,265, 0,266, 70,267,
+ 268, 0,269, 0,270, 2,271, 2,272, 2,129,129,160,160,160,129,
};
static inline unsigned
@@ -350,9 +622,11 @@ hb_use_b4 (const uint8_t* a, unsigned i)
static inline uint_fast8_t
hb_use_get_category (unsigned u)
{
- return u<921600u?hb_use_u8[2753+(((hb_use_u8[593+(((hb_use_u16[((hb_use_u8[113+(((hb_use_b4(hb_use_u8,u>>1>>3>>3>>5))<<5)+((u>>1>>3>>3)&31u))])<<3)+((u>>1>>3)&7u)])<<3)+((u>>1)&7u))])<<1)+((u)&1u))]:O;
+ return u<921600u?hb_use_u8[3049+(((hb_use_u8[865+(((hb_use_u16[((hb_use_u8[353+(((hb_use_u8[113+(((hb_use_b4(hb_use_u8,u>>1>>3>>1>>3>>4))<<4)+((u>>1>>3>>1>>3)&15u))])<<3)+((u>>1>>3>>1)&7u))])<<1)+((u>>1>>3)&1u)])<<3)+((u>>1)&7u))])<<1)+((u)&1u))]:O;
}
+#endif
+
#undef B
#undef CGJ
#undef CS
diff --git a/thirdparty/harfbuzz/src/hb-ot-shaper-use.cc b/thirdparty/harfbuzz/src/hb-ot-shaper-use.cc
index c40ec52f9c..342aba1235 100644
--- a/thirdparty/harfbuzz/src/hb-ot-shaper-use.cc
+++ b/thirdparty/harfbuzz/src/hb-ot-shaper-use.cc
@@ -89,19 +89,19 @@ use_other_features[] =
HB_TAG('p','s','t','s'),
};
-static void
+static bool
setup_syllables_use (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer);
-static void
+static bool
record_rphf_use (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer);
-static void
+static bool
record_pref_use (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer);
-static void
+static bool
reorder_use (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer);
@@ -293,7 +293,7 @@ setup_topographical_masks (const hb_ot_shape_plan_t *plan,
}
}
-static void
+static bool
setup_syllables_use (const hb_ot_shape_plan_t *plan,
hb_font_t *font HB_UNUSED,
hb_buffer_t *buffer)
@@ -304,9 +304,10 @@ setup_syllables_use (const hb_ot_shape_plan_t *plan,
buffer->unsafe_to_break (start, end);
setup_rphf_mask (plan, buffer);
setup_topographical_masks (plan, buffer);
+ return false;
}
-static void
+static bool
record_rphf_use (const hb_ot_shape_plan_t *plan,
hb_font_t *font HB_UNUSED,
hb_buffer_t *buffer)
@@ -314,7 +315,7 @@ record_rphf_use (const hb_ot_shape_plan_t *plan,
const use_shape_plan_t *use_plan = (const use_shape_plan_t *) plan->data;
hb_mask_t mask = use_plan->rphf_mask;
- if (!mask) return;
+ if (!mask) return false;
hb_glyph_info_t *info = buffer->info;
foreach_syllable (buffer, start, end)
@@ -327,9 +328,10 @@ record_rphf_use (const hb_ot_shape_plan_t *plan,
break;
}
}
+ return false;
}
-static void
+static bool
record_pref_use (const hb_ot_shape_plan_t *plan HB_UNUSED,
hb_font_t *font HB_UNUSED,
hb_buffer_t *buffer)
@@ -346,6 +348,7 @@ record_pref_use (const hb_ot_shape_plan_t *plan HB_UNUSED,
break;
}
}
+ return false;
}
static inline bool
@@ -438,17 +441,19 @@ reorder_syllable_use (hb_buffer_t *buffer, unsigned int start, unsigned int end)
}
}
-static void
+static bool
reorder_use (const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer)
{
+ bool ret = false;
if (buffer->message (font, "start reordering USE"))
{
- hb_syllabic_insert_dotted_circles (font, buffer,
- use_broken_cluster,
- USE(B),
- USE(R));
+ if (hb_syllabic_insert_dotted_circles (font, buffer,
+ use_broken_cluster,
+ USE(B),
+ USE(R)))
+ ret = true;
foreach_syllable (buffer, start, end)
reorder_syllable_use (buffer, start, end);
@@ -457,6 +462,8 @@ reorder_use (const hb_ot_shape_plan_t *plan,
}
HB_BUFFER_DEALLOCATE_VAR (buffer, use_category);
+
+ return ret;
}
diff --git a/thirdparty/harfbuzz/src/hb-ot-shaper-vowel-constraints.cc b/thirdparty/harfbuzz/src/hb-ot-shaper-vowel-constraints.cc
index cb4db4a8b2..e76b554b00 100644
--- a/thirdparty/harfbuzz/src/hb-ot-shaper-vowel-constraints.cc
+++ b/thirdparty/harfbuzz/src/hb-ot-shaper-vowel-constraints.cc
@@ -342,6 +342,40 @@ _hb_preprocess_text_vowel_constraints (const hb_ot_shape_plan_t *plan HB_UNUSED,
}
break;
+ case HB_SCRIPT_KHOJKI:
+ for (buffer->idx = 0; buffer->idx + 1 < count && buffer->successful;)
+ {
+ bool matched = false;
+ switch (buffer->cur ().codepoint)
+ {
+ case 0x11200u:
+ switch (buffer->cur (1).codepoint)
+ {
+ case 0x1122Cu: case 0x11231u: case 0x11233u:
+ matched = true;
+ break;
+ }
+ break;
+ case 0x11206u:
+ matched = 0x1122Cu == buffer->cur (1).codepoint;
+ break;
+ case 0x1122Cu:
+ switch (buffer->cur (1).codepoint)
+ {
+ case 0x11230u: case 0x11231u:
+ matched = true;
+ break;
+ }
+ break;
+ case 0x11240u:
+ matched = 0x1122Eu == buffer->cur (1).codepoint;
+ break;
+ }
+ (void) buffer->next_glyph ();
+ if (matched) _output_with_dotted_circle (buffer);
+ }
+ break;
+
case HB_SCRIPT_KHUDAWADI:
for (buffer->idx = 0; buffer->idx + 1 < count && buffer->successful;)
{
diff --git a/thirdparty/harfbuzz/src/hb-ot-shaper.hh b/thirdparty/harfbuzz/src/hb-ot-shaper.hh
index b2d1acb39b..0207b2bbe3 100644
--- a/thirdparty/harfbuzz/src/hb-ot-shaper.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-shaper.hh
@@ -262,11 +262,13 @@ hb_ot_shaper_categorize (const hb_ot_shape_planner_t *planner)
return &_hb_ot_shaper_myanmar;
+#ifndef HB_NO_OT_SHAPER_MYANMAR_ZAWGYI
#define HB_SCRIPT_MYANMAR_ZAWGYI ((hb_script_t) HB_TAG ('Q','a','a','g'))
case HB_SCRIPT_MYANMAR_ZAWGYI:
/* https://github.com/harfbuzz/harfbuzz/issues/1162 */
return &_hb_ot_shaper_myanmar_zawgyi;
+#endif
/* Unicode-2.0 additions */
diff --git a/thirdparty/harfbuzz/src/hb-ot-stat-table.hh b/thirdparty/harfbuzz/src/hb-ot-stat-table.hh
index d83bf14219..59bb2daccd 100644
--- a/thirdparty/harfbuzz/src/hb-ot-stat-table.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-stat-table.hh
@@ -136,7 +136,7 @@ struct AxisValueFormat1
NameID valueNameID; /* The name ID for entries in the 'name' table
* that provide a display string for this
* attribute value. */
- HBFixed value; /* A numeric value for this attribute value. */
+ F16DOT16 value; /* A numeric value for this attribute value. */
public:
DEFINE_SIZE_STATIC (12);
};
@@ -195,10 +195,10 @@ struct AxisValueFormat2
NameID valueNameID; /* The name ID for entries in the 'name' table
* that provide a display string for this
* attribute value. */
- HBFixed nominalValue; /* A numeric value for this attribute value. */
- HBFixed rangeMinValue; /* The minimum value for a range associated
+ F16DOT16 nominalValue; /* A numeric value for this attribute value. */
+ F16DOT16 rangeMinValue; /* The minimum value for a range associated
* with the specified name ID. */
- HBFixed rangeMaxValue; /* The maximum value for a range associated
+ F16DOT16 rangeMaxValue; /* The maximum value for a range associated
* with the specified name ID. */
public:
DEFINE_SIZE_STATIC (20);
@@ -258,8 +258,8 @@ struct AxisValueFormat3
NameID valueNameID; /* The name ID for entries in the 'name' table
* that provide a display string for this
* attribute value. */
- HBFixed value; /* A numeric value for this attribute value. */
- HBFixed linkedValue; /* The numeric value for a style-linked mapping
+ F16DOT16 value; /* A numeric value for this attribute value. */
+ F16DOT16 linkedValue; /* The numeric value for a style-linked mapping
* from this value. */
public:
DEFINE_SIZE_STATIC (16);
@@ -280,7 +280,7 @@ struct AxisValueRecord
HBUINT16 axisIndex; /* Zero-base index into the axis record array
* identifying the axis to which this value
* applies. Must be less than designAxisCount. */
- HBFixed value; /* A numeric value for this attribute value. */
+ F16DOT16 value; /* A numeric value for this attribute value. */
public:
DEFINE_SIZE_STATIC (6);
};
@@ -320,7 +320,7 @@ struct AxisValueFormat4
unsigned total_size = min_size + axisCount * AxisValueRecord::static_size;
auto *out = c->serializer->allocate_size<AxisValueFormat4> (total_size);
if (unlikely (!out)) return_trace (false);
- memcpy (out, this, total_size);
+ hb_memcpy (out, this, total_size);
return_trace (true);
}
diff --git a/thirdparty/harfbuzz/src/hb-ot-tag.cc b/thirdparty/harfbuzz/src/hb-ot-tag.cc
index 1b18270cca..f6ba3b0d47 100644
--- a/thirdparty/harfbuzz/src/hb-ot-tag.cc
+++ b/thirdparty/harfbuzz/src/hb-ot-tag.cc
@@ -577,7 +577,7 @@ hb_ot_tags_to_script_and_language (hb_tag_t script_tag,
else
{
int shift;
- memcpy (buf, lang_str, len);
+ hb_memcpy (buf, lang_str, len);
if (lang_str[0] != 'x' || lang_str[1] != '-') {
buf[len++] = '-';
buf[len++] = 'x';
diff --git a/thirdparty/harfbuzz/src/hb-ot-var-avar-table.hh b/thirdparty/harfbuzz/src/hb-ot-var-avar-table.hh
index f60bc4a3ce..cc5c5c0068 100644
--- a/thirdparty/harfbuzz/src/hb-ot-var-avar-table.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-var-avar-table.hh
@@ -143,7 +143,7 @@ struct avar
TRACE_SANITIZE (this);
if (!(version.sanitize (c) &&
(version.major == 1
-#ifndef HB_NO_VARIATIONS2
+#ifndef HB_NO_AVAR2
|| version.major == 2
#endif
) &&
@@ -159,7 +159,7 @@ struct avar
map = &StructAfter<SegmentMaps> (*map);
}
-#ifndef HB_NO_VARIATIONS2
+#ifndef HB_NO_AVAR2
if (version.major < 2)
return_trace (true);
@@ -182,7 +182,7 @@ struct avar
map = &StructAfter<SegmentMaps> (*map);
}
-#ifndef HB_NO_VARIATIONS2
+#ifndef HB_NO_AVAR2
if (version.major < 2)
return;
diff --git a/thirdparty/harfbuzz/src/hb-ot-var-common.hh b/thirdparty/harfbuzz/src/hb-ot-var-common.hh
index 1d29e3e4f9..4997c2e2e8 100644
--- a/thirdparty/harfbuzz/src/hb-ot-var-common.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-var-common.hh
@@ -47,7 +47,7 @@ struct DeltaSetIndexMapFormat01
HBUINT8 *p = c->allocate_size<HBUINT8> (total_size);
if (unlikely (!p)) return_trace (nullptr);
- memcpy (p, this, HBUINT8::static_size * total_size);
+ hb_memcpy (p, this, HBUINT8::static_size * total_size);
return_trace (out);
}
@@ -219,6 +219,25 @@ struct DeltaSetIndexMap
DEFINE_SIZE_UNION (1, format);
};
+
+struct VarStoreInstancer
+{
+ VarStoreInstancer (const VariationStore &varStore,
+ const DeltaSetIndexMap &varIdxMap,
+ hb_array_t<int> coords) :
+ varStore (varStore), varIdxMap (varIdxMap), coords (coords) {}
+
+ operator bool () const { return bool (coords); }
+
+ float operator() (uint32_t varIdx, unsigned short offset = 0) const
+ { return varStore.get_delta (varIdxMap.map (VarIdx::add (varIdx, offset)), coords); }
+
+ const VariationStore &varStore;
+ const DeltaSetIndexMap &varIdxMap;
+ hb_array_t<int> coords;
+};
+
+
} /* namespace OT */
diff --git a/thirdparty/harfbuzz/src/hb-ot-var-fvar-table.hh b/thirdparty/harfbuzz/src/hb-ot-var-fvar-table.hh
index b9b49f2287..c1d57a002a 100644
--- a/thirdparty/harfbuzz/src/hb-ot-var-fvar-table.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-var-fvar-table.hh
@@ -44,9 +44,47 @@ struct InstanceRecord
{
friend struct fvar;
- hb_array_t<const HBFixed> get_coordinates (unsigned int axis_count) const
+ hb_array_t<const F16DOT16> get_coordinates (unsigned int axis_count) const
{ return coordinatesZ.as_array (axis_count); }
+ bool subset (hb_subset_context_t *c,
+ unsigned axis_count,
+ bool has_postscript_nameid) const
+ {
+ TRACE_SUBSET (this);
+ if (unlikely (!c->serializer->embed (subfamilyNameID))) return_trace (false);
+ if (unlikely (!c->serializer->embed (flags))) return_trace (false);
+
+ const hb_array_t<const F16DOT16> coords = get_coordinates (axis_count);
+ const hb_hashmap_t<hb_tag_t, float> *axes_location = c->plan->user_axes_location;
+ for (unsigned i = 0 ; i < axis_count; i++)
+ {
+ uint32_t *axis_tag;
+ // only keep instances whose coordinates == pinned axis location
+ if (!c->plan->axes_old_index_tag_map->has (i, &axis_tag)) continue;
+
+ if (axes_location->has (*axis_tag) &&
+ fabsf (axes_location->get (*axis_tag) - coords[i].to_float ()) > 0.001f)
+ return_trace (false);
+
+ if (!c->plan->axes_index_map->has (i))
+ continue;
+
+ if (!c->serializer->embed (coords[i]))
+ return_trace (false);
+ }
+
+ if (has_postscript_nameid)
+ {
+ NameID name_id;
+ name_id = StructAfter<NameID> (coords);
+ if (!c->serializer->embed (name_id))
+ return_trace (false);
+ }
+
+ return_trace (true);
+ }
+
bool sanitize (hb_sanitize_context_t *c, unsigned int axis_count) const
{
TRACE_SANITIZE (this);
@@ -58,7 +96,7 @@ struct InstanceRecord
NameID subfamilyNameID;/* The name ID for entries in the 'name' table
* that provide subfamily names for this instance. */
HBUINT16 flags; /* Reserved for future use — set to 0. */
- UnsizedArrayOf<HBFixed>
+ UnsizedArrayOf<F16DOT16>
coordinatesZ; /* The coordinates array for this instance. */
//NameID postScriptNameIDX;/*Optional. The name ID for entries in the 'name'
// * table that provide PostScript names for this
@@ -151,9 +189,9 @@ struct AxisRecord
public:
Tag axisTag; /* Tag identifying the design variation for the axis. */
protected:
- HBFixed minValue; /* The minimum coordinate value for the axis. */
- HBFixed defaultValue; /* The default coordinate value for the axis. */
- HBFixed maxValue; /* The maximum coordinate value for the axis. */
+ F16DOT16 minValue; /* The minimum coordinate value for the axis. */
+ F16DOT16 defaultValue; /* The default coordinate value for the axis. */
+ F16DOT16 maxValue; /* The maximum coordinate value for the axis. */
public:
HBUINT16 flags; /* Axis flags. */
NameID axisNameID; /* The name ID for entries in the 'name' table that
@@ -268,7 +306,7 @@ struct fvar
if (coords_length && *coords_length)
{
- hb_array_t<const HBFixed> instanceCoords = instance->get_coordinates (axisCount)
+ hb_array_t<const F16DOT16> instanceCoords = instance->get_coordinates (axisCount)
.sub_array (0, coords_length);
for (unsigned int i = 0; i < instanceCoords.length; i++)
coords[i] = instanceCoords.arrayZ[i].to_float ();
@@ -299,13 +337,13 @@ struct fvar
{
const InstanceRecord *instance = get_instance (i);
- if (hb_any (+ hb_zip (instance->get_coordinates (axisCount), hb_range ((unsigned)axisCount))
- | hb_filter (pinned_axes, hb_second)
- | hb_map ([&] (const hb_pair_t<const HBFixed&, unsigned>& _)
+ if (hb_any (+ hb_enumerate (instance->get_coordinates (axisCount))
+ | hb_filter (pinned_axes, hb_first)
+ | hb_map ([&] (const hb_pair_t<unsigned, const F16DOT16&>& _)
{
- hb_tag_t axis_tag = pinned_axes.get (_.second);
+ hb_tag_t axis_tag = pinned_axes.get (_.first);
float location = user_axes_location->get (axis_tag);
- if (fabs ((double)location - (double)_.first.to_float ()) > 0.001) return true;
+ if (fabs ((double)location - (double)_.second.to_float ()) > 0.001) return true;
return false;
})
))
@@ -321,6 +359,48 @@ struct fvar
}
}
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ unsigned retained_axis_count = c->plan->axes_index_map->get_population ();
+ if (!retained_axis_count) //all axes are pinned
+ return_trace (false);
+
+ fvar *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+
+ if (!c->serializer->check_assign (out->axisCount, retained_axis_count, HB_SERIALIZE_ERROR_INT_OVERFLOW))
+ return_trace (false);
+
+ bool has_postscript_nameid = false;
+ if (instanceSize >= axisCount * 4 + 6)
+ has_postscript_nameid = true;
+
+ if (!c->serializer->check_assign (out->instanceSize, retained_axis_count * 4 + (has_postscript_nameid ? 6 : 4),
+ HB_SERIALIZE_ERROR_INT_OVERFLOW))
+ return_trace (false);
+
+ auto axes_records = get_axes ();
+ for (unsigned i = 0 ; i < (unsigned)axisCount; i++)
+ {
+ if (!c->plan->axes_index_map->has (i)) continue;
+ if (unlikely (!c->serializer->embed (axes_records[i])))
+ return_trace (false);
+ }
+
+ if (!c->serializer->check_assign (out->firstAxis, get_size (), HB_SERIALIZE_ERROR_INT_OVERFLOW))
+ return_trace (false);
+
+ for (unsigned i = 0 ; i < (unsigned)instanceCount; i++)
+ {
+ const InstanceRecord *instance = get_instance (i);
+ auto snap = c->serializer->snapshot ();
+ if (!instance->subset (c, axisCount, has_postscript_nameid))
+ c->serializer->revert (snap);
+ }
+ return_trace (true);
+ }
+
public:
hb_array_t<const AxisRecord> get_axes () const
{ return hb_array (&(this+firstAxis), axisCount); }
@@ -346,8 +426,8 @@ struct fvar
HBUINT16 instanceCount; /* The number of named instances defined in the font
* (the number of records in the instances array). */
HBUINT16 instanceSize; /* The size in bytes of each InstanceRecord — set
- * to either axisCount * sizeof(HBFixed) + 4, or to
- * axisCount * sizeof(HBFixed) + 6. */
+ * to either axisCount * sizeof(F16DOT16) + 4, or to
+ * axisCount * sizeof(F16DOT16) + 6. */
public:
DEFINE_SIZE_STATIC (16);
diff --git a/thirdparty/harfbuzz/src/hb-ot-var-gvar-table.hh b/thirdparty/harfbuzz/src/hb-ot-var-gvar-table.hh
index bf1039d1d6..e02063ca43 100644
--- a/thirdparty/harfbuzz/src/hb-ot-var-gvar-table.hh
+++ b/thirdparty/harfbuzz/src/hb-ot-var-gvar-table.hh
@@ -56,12 +56,11 @@ struct contour_point_vector_t : hb_vector_t<contour_point_t>
void extend (const hb_array_t<contour_point_t> &a)
{
unsigned int old_len = length;
- if (unlikely (!resize (old_len + a.length)))
+ if (unlikely (!resize (old_len + a.length, false)))
return;
auto arrayZ = this->arrayZ + old_len;
unsigned count = a.length;
- for (unsigned int i = 0; i < count; i++)
- arrayZ[i] = a.arrayZ[i];
+ hb_memcpy (arrayZ, a.arrayZ, count * sizeof (arrayZ[0]));
}
void transform (const float (&matrix)[4])
@@ -231,8 +230,8 @@ struct GlyphVariationData
{
return (index < var_data->tupleVarCount.get_count ()) &&
var_data_bytes.check_range (current_tuple, TupleVariationHeader::min_size) &&
- var_data_bytes.check_range (current_tuple, hb_max (current_tuple->get_data_size (), current_tuple->get_size (axis_count))) &&
- current_tuple->get_size (axis_count);
+ var_data_bytes.check_range (current_tuple, hb_max (current_tuple->get_data_size (),
+ current_tuple->get_size (axis_count)));
}
bool move_to_next ()
@@ -281,42 +280,42 @@ struct GlyphVariationData
if (unlikely (p + 1 > end)) return false;
- uint16_t count = *p++;
+ unsigned count = *p++;
if (count & POINTS_ARE_WORDS)
{
if (unlikely (p + 1 > end)) return false;
count = ((count & POINT_RUN_COUNT_MASK) << 8) | *p++;
}
- if (unlikely (!points.resize (count))) return false;
+ if (unlikely (!points.resize (count, false))) return false;
- unsigned int n = 0;
- uint16_t i = 0;
+ unsigned n = 0;
+ unsigned i = 0;
while (i < count)
{
if (unlikely (p + 1 > end)) return false;
- uint16_t j;
- uint8_t control = *p++;
- uint16_t run_count = (control & POINT_RUN_COUNT_MASK) + 1;
+ unsigned control = *p++;
+ unsigned run_count = (control & POINT_RUN_COUNT_MASK) + 1;
+ if (unlikely (i + run_count > count)) return false;
+ unsigned j;
if (control & POINTS_ARE_WORDS)
{
- for (j = 0; j < run_count && i < count; j++, i++)
+ if (unlikely (p + run_count * HBUINT16::static_size > end)) return false;
+ for (j = 0; j < run_count; j++, i++)
{
- if (unlikely (p + HBUINT16::static_size > end)) return false;
n += *(const HBUINT16 *)p;
- points[i] = n;
+ points.arrayZ[i] = n;
p += HBUINT16::static_size;
}
}
else
{
- for (j = 0; j < run_count && i < count; j++, i++)
+ if (unlikely (p + run_count > end)) return false;
+ for (j = 0; j < run_count; j++, i++)
{
- if (unlikely (p + 1 > end)) return false;
n += *p++;
- points[i] = n;
+ points.arrayZ[i] = n;
}
}
- if (j < run_count) return false;
}
return true;
}
@@ -332,32 +331,37 @@ struct GlyphVariationData
DELTA_RUN_COUNT_MASK = 0x3F
};
- unsigned int i = 0;
- unsigned int count = deltas.length;
+ unsigned i = 0;
+ unsigned count = deltas.length;
while (i < count)
{
if (unlikely (p + 1 > end)) return false;
- uint8_t control = *p++;
- unsigned int run_count = (control & DELTA_RUN_COUNT_MASK) + 1;
- unsigned int j;
+ unsigned control = *p++;
+ unsigned run_count = (control & DELTA_RUN_COUNT_MASK) + 1;
+ if (unlikely (i + run_count > count)) return false;
+ unsigned j;
if (control & DELTAS_ARE_ZERO)
- for (j = 0; j < run_count && i < count; j++, i++)
- deltas[i] = 0;
+ {
+ for (j = 0; j < run_count; j++, i++)
+ deltas.arrayZ[i] = 0;
+ }
else if (control & DELTAS_ARE_WORDS)
- for (j = 0; j < run_count && i < count; j++, i++)
+ {
+ if (unlikely (p + run_count * HBUINT16::static_size > end)) return false;
+ for (j = 0; j < run_count; j++, i++)
{
- if (unlikely (p + HBUINT16::static_size > end)) return false;
- deltas[i] = *(const HBINT16 *) p;
+ deltas.arrayZ[i] = * (const HBINT16 *) p;
p += HBUINT16::static_size;
}
+ }
else
- for (j = 0; j < run_count && i < count; j++, i++)
+ {
+ if (unlikely (p + run_count > end)) return false;
+ for (j = 0; j < run_count; j++, i++)
{
- if (unlikely (p + 1 > end)) return false;
- deltas[i] = *(const HBINT8 *) p++;
+ deltas.arrayZ[i] = * (const HBINT8 *) p++;
}
- if (j < run_count)
- return false;
+ }
}
return true;
}
@@ -450,7 +454,7 @@ struct gvar
F2DOT14 *tuples = c->serializer->allocate_size<F2DOT14> (shared_tuple_size);
if (!tuples) return_trace (false);
out->sharedTuples = (char *) tuples - (char *) out;
- memcpy (tuples, this+sharedTuples, shared_tuple_size);
+ hb_memcpy (tuples, this+sharedTuples, shared_tuple_size);
}
char *subset_data = c->serializer->allocate_size<char> (subset_data_size);
@@ -473,7 +477,7 @@ struct gvar
((HBUINT16 *) subset_offsets)[gid] = glyph_offset / 2;
if (var_data_bytes.length > 0)
- memcpy (subset_data, var_data_bytes.arrayZ, var_data_bytes.length);
+ hb_memcpy (subset_data, var_data_bytes.arrayZ, var_data_bytes.length);
subset_data += var_data_bytes.length;
glyph_offset += var_data_bytes.length;
}
@@ -501,6 +505,7 @@ struct gvar
unsigned get_offset (unsigned i) const
{
if (unlikely (i > glyphCount)) return 0;
+ _hb_compiler_memory_r_barrier ();
return is_long_offset () ? get_long_offset_array ()[i] : get_short_offset_array ()[i] * 2;
}
@@ -521,11 +526,11 @@ struct gvar
unsigned int target, unsigned int prev, unsigned int next,
float contour_point_t::*m)
{
- float target_val = points[target].*m;
- float prev_val = points[prev].*m;
- float next_val = points[next].*m;
- float prev_delta = deltas[prev].*m;
- float next_delta = deltas[next].*m;
+ float target_val = points.arrayZ[target].*m;
+ float prev_val = points.arrayZ[prev].*m;
+ float next_val = points.arrayZ[next].*m;
+ float prev_delta = deltas.arrayZ[prev].*m;
+ float next_delta = deltas.arrayZ[next].*m;
if (prev_val == next_val)
return (prev_delta == next_delta) ? prev_delta : 0.f;
@@ -543,10 +548,11 @@ struct gvar
{ return (i >= end) ? start : (i + 1); }
public:
- bool apply_deltas_to_points (hb_codepoint_t glyph, hb_font_t *font,
+ bool apply_deltas_to_points (hb_codepoint_t glyph,
+ hb_array_t<int> coords,
const hb_array_t<contour_point_t> points) const
{
- if (!font->num_coords) return true;
+ if (!coords) return true;
if (unlikely (glyph >= table->glyphCount)) return true;
@@ -559,20 +565,20 @@ struct gvar
return true; /* so isn't applied at all */
/* Save original points for inferred delta calculation */
- contour_point_vector_t orig_points;
- if (unlikely (!orig_points.resize (points.length))) return false;
- for (unsigned int i = 0; i < orig_points.length; i++)
- orig_points.arrayZ[i] = points.arrayZ[i];
+ contour_point_vector_t orig_points_vec;
+ orig_points_vec.extend (points);
+ if (unlikely (orig_points_vec.in_error ())) return false;
+ auto orig_points = orig_points_vec.as_array ();
- contour_point_vector_t deltas; /* flag is used to indicate referenced point */
- if (unlikely (!deltas.resize (points.length))) return false;
+ contour_point_vector_t deltas_vec; /* flag is used to indicate referenced point */
+ if (unlikely (!deltas_vec.resize (points.length, false))) return false;
+ auto deltas = deltas_vec.as_array ();
hb_vector_t<unsigned> end_points;
for (unsigned i = 0; i < points.length; ++i)
- if (points[i].is_end_point)
+ if (points.arrayZ[i].is_end_point)
end_points.push (i);
- auto coords = hb_array (font->coords, font->num_coords);
unsigned num_coords = table->axisCount;
hb_array_t<const F2DOT14> shared_tuples = (table+table->sharedTuples).as_array (table->sharedTupleCount * table->axisCount);
@@ -598,70 +604,89 @@ struct gvar
bool apply_to_all = (indices.length == 0);
unsigned int num_deltas = apply_to_all ? points.length : indices.length;
- if (unlikely (!x_deltas.resize (num_deltas))) return false;
+ if (unlikely (!x_deltas.resize (num_deltas, false))) return false;
if (unlikely (!GlyphVariationData::unpack_deltas (p, x_deltas, end))) return false;
- if (unlikely (!y_deltas.resize (num_deltas))) return false;
+ if (unlikely (!y_deltas.resize (num_deltas, false))) return false;
if (unlikely (!GlyphVariationData::unpack_deltas (p, y_deltas, end))) return false;
- for (unsigned int i = 0; i < deltas.length; i++)
- deltas[i].init ();
- for (unsigned int i = 0; i < num_deltas; i++)
- {
- unsigned int pt_index = apply_to_all ? i : indices[i];
- if (unlikely (pt_index >= deltas.length)) continue;
- deltas.arrayZ[pt_index].flag = 1; /* this point is referenced, i.e., explicit deltas specified */
- deltas.arrayZ[pt_index].x += x_deltas.arrayZ[i] * scalar;
- deltas.arrayZ[pt_index].y += y_deltas.arrayZ[i] * scalar;
- }
+ hb_memset (deltas.arrayZ, 0, deltas.get_size ());
+
+ unsigned ref_points = 0;
+ if (scalar != 1.0f)
+ for (unsigned int i = 0; i < num_deltas; i++)
+ {
+ unsigned int pt_index = apply_to_all ? i : indices[i];
+ if (unlikely (pt_index >= deltas.length)) continue;
+ auto &delta = deltas.arrayZ[pt_index];
+ ref_points += !delta.flag;
+ delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */
+ delta.x += x_deltas.arrayZ[i] * scalar;
+ delta.y += y_deltas.arrayZ[i] * scalar;
+ }
+ else
+ for (unsigned int i = 0; i < num_deltas; i++)
+ {
+ unsigned int pt_index = apply_to_all ? i : indices[i];
+ if (unlikely (pt_index >= deltas.length)) continue;
+ auto &delta = deltas.arrayZ[pt_index];
+ ref_points += !delta.flag;
+ delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */
+ delta.x += x_deltas.arrayZ[i];
+ delta.y += y_deltas.arrayZ[i];
+ }
/* infer deltas for unreferenced points */
- unsigned start_point = 0;
- for (unsigned c = 0; c < end_points.length; c++)
+ if (ref_points && ref_points < orig_points.length)
{
- unsigned end_point = end_points[c];
+ unsigned start_point = 0;
+ for (unsigned c = 0; c < end_points.length; c++)
+ {
+ unsigned end_point = end_points.arrayZ[c];
- /* Check the number of unreferenced points in a contour. If no unref points or no ref points, nothing to do. */
- unsigned unref_count = 0;
- for (unsigned i = start_point; i <= end_point; i++)
- if (!deltas[i].flag) unref_count++;
+ /* Check the number of unreferenced points in a contour. If no unref points or no ref points, nothing to do. */
+ unsigned unref_count = 0;
+ for (unsigned i = start_point; i < end_point + 1; i++)
+ unref_count += deltas.arrayZ[i].flag;
+ unref_count = (end_point - start_point + 1) - unref_count;
- unsigned j = start_point;
- if (unref_count == 0 || unref_count > end_point - start_point)
- goto no_more_gaps;
+ unsigned j = start_point;
+ if (unref_count == 0 || unref_count > end_point - start_point)
+ goto no_more_gaps;
- for (;;)
- {
- /* Locate the next gap of unreferenced points between two referenced points prev and next.
- * Note that a gap may wrap around at left (start_point) and/or at right (end_point).
- */
- unsigned int prev, next, i;
- for (;;)
- {
- i = j;
- j = next_index (i, start_point, end_point);
- if (deltas[i].flag && !deltas[j].flag) break;
- }
- prev = j = i;
- for (;;)
- {
- i = j;
- j = next_index (i, start_point, end_point);
- if (!deltas[i].flag && deltas[j].flag) break;
- }
- next = j;
- /* Infer deltas for all unref points in the gap between prev and next */
- i = prev;
for (;;)
{
- i = next_index (i, start_point, end_point);
- if (i == next) break;
- deltas[i].x = infer_delta (orig_points.as_array (), deltas.as_array (), i, prev, next, &contour_point_t::x);
- deltas[i].y = infer_delta (orig_points.as_array (), deltas.as_array (), i, prev, next, &contour_point_t::y);
- if (--unref_count == 0) goto no_more_gaps;
+ /* Locate the next gap of unreferenced points between two referenced points prev and next.
+ * Note that a gap may wrap around at left (start_point) and/or at right (end_point).
+ */
+ unsigned int prev, next, i;
+ for (;;)
+ {
+ i = j;
+ j = next_index (i, start_point, end_point);
+ if (deltas.arrayZ[i].flag && !deltas.arrayZ[j].flag) break;
+ }
+ prev = j = i;
+ for (;;)
+ {
+ i = j;
+ j = next_index (i, start_point, end_point);
+ if (!deltas.arrayZ[i].flag && deltas.arrayZ[j].flag) break;
+ }
+ next = j;
+ /* Infer deltas for all unref points in the gap between prev and next */
+ i = prev;
+ for (;;)
+ {
+ i = next_index (i, start_point, end_point);
+ if (i == next) break;
+ deltas.arrayZ[i].x = infer_delta (orig_points, deltas, i, prev, next, &contour_point_t::x);
+ deltas.arrayZ[i].y = infer_delta (orig_points, deltas, i, prev, next, &contour_point_t::y);
+ if (--unref_count == 0) goto no_more_gaps;
+ }
}
+ no_more_gaps:
+ start_point = end_point + 1;
}
- no_more_gaps:
- start_point = end_point + 1;
}
/* apply specified / inferred deltas to points */
diff --git a/thirdparty/harfbuzz/src/hb-pool.hh b/thirdparty/harfbuzz/src/hb-pool.hh
index dcf8f6627d..e4bb64f627 100644
--- a/thirdparty/harfbuzz/src/hb-pool.hh
+++ b/thirdparty/harfbuzz/src/hb-pool.hh
@@ -35,15 +35,13 @@ template <typename T, unsigned ChunkLen = 16>
struct hb_pool_t
{
hb_pool_t () : next (nullptr) {}
- ~hb_pool_t () { fini (); }
-
- void fini ()
+ ~hb_pool_t ()
{
next = nullptr;
- for (chunk_t *_ : chunks) hb_free (_);
-
- chunks.fini ();
+ + hb_iter (chunks)
+ | hb_apply (hb_free)
+ ;
}
T* alloc ()
@@ -60,7 +58,7 @@ struct hb_pool_t
T* obj = next;
next = * ((T**) next);
- memset (obj, 0, sizeof (T));
+ hb_memset (obj, 0, sizeof (T));
return obj;
}
diff --git a/thirdparty/harfbuzz/src/hb-priority-queue.hh b/thirdparty/harfbuzz/src/hb-priority-queue.hh
index ffb86e30ae..ac76b7d955 100644
--- a/thirdparty/harfbuzz/src/hb-priority-queue.hh
+++ b/thirdparty/harfbuzz/src/hb-priority-queue.hh
@@ -63,7 +63,9 @@ struct hb_priority_queue_t
heap.arrayZ[0] = heap.arrayZ[heap.length - 1];
heap.shrink (heap.length - 1);
- bubble_down (0);
+
+ if (!is_empty ())
+ bubble_down (0);
return result;
}
@@ -100,7 +102,7 @@ struct hb_priority_queue_t
void bubble_down (unsigned index)
{
- assert (index <= heap.length);
+ assert (index < heap.length);
unsigned left = left_child (index);
unsigned right = right_child (index);
@@ -112,7 +114,7 @@ struct hb_priority_queue_t
bool has_right = right < heap.length;
if (heap.arrayZ[index].first <= heap.arrayZ[left].first
- && (!has_right || heap[index].first <= heap.arrayZ[right].first))
+ && (!has_right || heap.arrayZ[index].first <= heap.arrayZ[right].first))
return;
if (!has_right || heap.arrayZ[left].first < heap.arrayZ[right].first)
@@ -128,7 +130,7 @@ struct hb_priority_queue_t
void bubble_up (unsigned index)
{
- assert (index <= heap.length);
+ assert (index < heap.length);
if (index == 0) return;
@@ -142,8 +144,8 @@ struct hb_priority_queue_t
void swap (unsigned a, unsigned b)
{
- assert (a <= heap.length);
- assert (b <= heap.length);
+ assert (a < heap.length);
+ assert (b < heap.length);
hb_swap (heap.arrayZ[a], heap.arrayZ[b]);
}
};
diff --git a/thirdparty/harfbuzz/src/hb-repacker.hh b/thirdparty/harfbuzz/src/hb-repacker.hh
index 40a5326118..7a3143cec3 100644
--- a/thirdparty/harfbuzz/src/hb-repacker.hh
+++ b/thirdparty/harfbuzz/src/hb-repacker.hh
@@ -209,7 +209,7 @@ bool _try_isolating_subgraphs (const hb_vector_t<graph::overflow_record_t>& over
// Only move at most half of the roots in a space at a time.
unsigned extra = roots_to_isolate.get_population () - maximum_to_move;
while (extra--) {
- unsigned root = HB_SET_VALUE_INVALID;
+ uint32_t root = HB_SET_VALUE_INVALID;
roots_to_isolate.previous (&root);
roots_to_isolate.del (root);
}
@@ -244,7 +244,7 @@ bool _process_overflows (const hb_vector_t<graph::overflow_record_t>& overflows,
{
// The child object is shared, we may be able to eliminate the overflow
// by duplicating it.
- if (!sorted_graph.duplicate (r.parent, r.child)) continue;
+ if (sorted_graph.duplicate (r.parent, r.child) == (unsigned) -1) continue;
return true;
}
@@ -283,6 +283,11 @@ hb_resolve_graph_overflows (hb_tag_t table_tag,
graph_t& sorted_graph /* IN/OUT */)
{
sorted_graph.sort_shortest_distance ();
+ if (sorted_graph.in_error ())
+ {
+ DEBUG_MSG (SUBSET_REPACK, nullptr, "Sorted graph in error state after initial sort.");
+ return false;
+ }
bool will_overflow = graph::will_overflow (sorted_graph);
if (!will_overflow)
@@ -376,6 +381,26 @@ hb_resolve_overflows (const T& packed,
unsigned max_rounds = 20,
bool recalculate_extensions = false) {
graph_t sorted_graph (packed);
+ if (sorted_graph.in_error ())
+ {
+ // Invalid graph definition.
+ return nullptr;
+ }
+
+ if (!sorted_graph.is_fully_connected ())
+ {
+ sorted_graph.print_orphaned_nodes ();
+ return nullptr;
+ }
+
+ if (sorted_graph.in_error ())
+ {
+ // Allocations failed somewhere
+ DEBUG_MSG (SUBSET_REPACK, nullptr,
+ "Graph is in error, likely due to a memory allocation error.");
+ return nullptr;
+ }
+
if (!hb_resolve_graph_overflows (table_tag, max_rounds, recalculate_extensions, sorted_graph))
return nullptr;
diff --git a/thirdparty/harfbuzz/src/hb-sanitize.hh b/thirdparty/harfbuzz/src/hb-sanitize.hh
index 65c2772201..bd3250e580 100644
--- a/thirdparty/harfbuzz/src/hb-sanitize.hh
+++ b/thirdparty/harfbuzz/src/hb-sanitize.hh
@@ -198,10 +198,11 @@ struct hb_sanitize_context_t :
void start_processing ()
{
reset_object ();
- if (unlikely (hb_unsigned_mul_overflows (this->end - this->start, HB_SANITIZE_MAX_OPS_FACTOR)))
+ unsigned m;
+ if (unlikely (hb_unsigned_mul_overflows (this->end - this->start, HB_SANITIZE_MAX_OPS_FACTOR, &m)))
this->max_ops = HB_SANITIZE_MAX_OPS_MAX;
else
- this->max_ops = hb_clamp ((unsigned) (this->end - this->start) * HB_SANITIZE_MAX_OPS_FACTOR,
+ this->max_ops = hb_clamp (m,
(unsigned) HB_SANITIZE_MAX_OPS_MIN,
(unsigned) HB_SANITIZE_MAX_OPS_MAX);
this->edit_count = 0;
@@ -252,8 +253,9 @@ struct hb_sanitize_context_t :
unsigned int a,
unsigned int b) const
{
- return !hb_unsigned_mul_overflows (a, b) &&
- this->check_range (base, a * b);
+ unsigned m;
+ return !hb_unsigned_mul_overflows (a, b, &m) &&
+ this->check_range (base, m);
}
template <typename T>
@@ -262,8 +264,9 @@ struct hb_sanitize_context_t :
unsigned int b,
unsigned int c) const
{
- return !hb_unsigned_mul_overflows (a, b) &&
- this->check_range (base, a * b, c);
+ unsigned m;
+ return !hb_unsigned_mul_overflows (a, b, &m) &&
+ this->check_range (base, m, c);
}
template <typename T>
diff --git a/thirdparty/harfbuzz/src/hb-serialize.hh b/thirdparty/harfbuzz/src/hb-serialize.hh
index f47cde5eb5..d5573281f1 100644
--- a/thirdparty/harfbuzz/src/hb-serialize.hh
+++ b/thirdparty/harfbuzz/src/hb-serialize.hh
@@ -194,7 +194,6 @@ struct hb_serialize_context_t
current = current->next;
_->fini ();
}
- object_pool.fini ();
}
bool in_error () const { return bool (errors); }
@@ -224,6 +223,7 @@ struct hb_serialize_context_t
this->errors = HB_SERIALIZE_ERROR_NONE;
this->head = this->start;
this->tail = this->end;
+ this->zerocopy = nullptr;
this->debug_depth = 0;
fini ();
@@ -326,7 +326,8 @@ struct hb_serialize_context_t
if (unlikely (in_error() && !only_overflow ())) return;
current = current->next;
- revert (obj->head, obj->tail);
+ revert (zerocopy ? zerocopy : obj->head, obj->tail);
+ zerocopy = nullptr;
obj->fini ();
object_pool.release (obj);
}
@@ -344,8 +345,11 @@ struct hb_serialize_context_t
current = current->next;
obj->tail = head;
obj->next = nullptr;
+ assert (obj->head <= obj->tail);
unsigned len = obj->tail - obj->head;
- head = obj->head; /* Rewind head. */
+ head = zerocopy ? zerocopy : obj->head; /* Rewind head. */
+ bool was_zerocopy = zerocopy;
+ zerocopy = nullptr;
if (!len)
{
@@ -355,9 +359,11 @@ struct hb_serialize_context_t
}
objidx_t objidx;
+ uint32_t hash = 0;
if (share)
{
- objidx = packed_map.get (obj);
+ hash = hb_hash (obj);
+ objidx = packed_map.get_with_hash (obj, hash);
if (objidx)
{
merge_virtual_links (obj, objidx);
@@ -367,7 +373,10 @@ struct hb_serialize_context_t
}
tail -= len;
- memmove (tail, obj->head, len);
+ if (was_zerocopy)
+ assert (tail == obj->head);
+ else
+ memmove (tail, obj->head, len);
obj->head = tail;
obj->tail = tail + len;
@@ -385,7 +394,7 @@ struct hb_serialize_context_t
objidx = packed.length - 1;
- if (share) packed_map.set (obj, objidx);
+ if (share) packed_map.set_with_hash (obj, hash, objidx);
propagate_error (packed_map);
return objidx;
@@ -569,8 +578,26 @@ struct hb_serialize_context_t
return !bool ((errors = (errors | err_type)));
}
+ bool start_zerocopy (size_t size)
+ {
+ if (unlikely (in_error ())) return false;
+
+ if (unlikely (size > INT_MAX || this->tail - this->head < ptrdiff_t (size)))
+ {
+ err (HB_SERIALIZE_ERROR_OUT_OF_ROOM);
+ return false;
+ }
+
+ assert (!this->zerocopy);
+ this->zerocopy = this->head;
+
+ assert (this->current->head == this->head);
+ this->current->head = this->current->tail = this->head = this->tail - size;
+ return true;
+ }
+
template <typename Type>
- Type *allocate_size (size_t size)
+ Type *allocate_size (size_t size, bool clear = true)
{
if (unlikely (in_error ())) return nullptr;
@@ -579,7 +606,8 @@ struct hb_serialize_context_t
err (HB_SERIALIZE_ERROR_OUT_OF_ROOM);
return nullptr;
}
- hb_memset (this->head, 0, size);
+ if (clear)
+ hb_memset (this->head, 0, size);
char *ret = this->head;
this->head += size;
return reinterpret_cast<Type *> (ret);
@@ -593,9 +621,9 @@ struct hb_serialize_context_t
Type *embed (const Type *obj)
{
unsigned int size = obj->get_size ();
- Type *ret = this->allocate_size<Type> (size);
+ Type *ret = this->allocate_size<Type> (size, false);
if (unlikely (!ret)) return nullptr;
- memcpy (ret, obj, size);
+ hb_memcpy (ret, obj, size);
return ret;
}
template <typename Type>
@@ -616,7 +644,7 @@ struct hb_serialize_context_t
}
/* Like embed, but active: calls obj.operator=() or obj.copy() to transfer data
- * instead of memcpy(). */
+ * instead of hb_memcpy(). */
template <typename Type, typename ...Ts>
Type *copy (const Type &src, Ts&&... ds)
{ return _copy (src, hb_prioritize, std::forward<Ts> (ds)...); }
@@ -634,7 +662,7 @@ struct hb_serialize_context_t
hb_serialize_context_t& operator << (const Type &obj) & { embed (obj); return *this; }
template <typename Type>
- Type *extend_size (Type *obj, size_t size)
+ Type *extend_size (Type *obj, size_t size, bool clear = true)
{
if (unlikely (in_error ())) return nullptr;
@@ -642,12 +670,12 @@ struct hb_serialize_context_t
assert ((char *) obj <= this->head);
assert ((size_t) (this->head - (char *) obj) <= size);
if (unlikely (((char *) obj + size < (char *) obj) ||
- !this->allocate_size<Type> (((char *) obj) + size - this->head))) return nullptr;
+ !this->allocate_size<Type> (((char *) obj) + size - this->head, clear))) return nullptr;
return reinterpret_cast<Type *> (obj);
}
template <typename Type>
- Type *extend_size (Type &obj, size_t size)
- { return extend_size (std::addressof (obj), size); }
+ Type *extend_size (Type &obj, size_t size, bool clear = true)
+ { return extend_size (std::addressof (obj), size, clear); }
template <typename Type>
Type *extend_min (Type *obj) { return extend_size (obj, obj->min_size); }
@@ -676,8 +704,8 @@ struct hb_serialize_context_t
char *p = (char *) hb_malloc (len);
if (unlikely (!p)) return hb_bytes_t ();
- memcpy (p, this->start, this->head - this->start);
- memcpy (p + (this->head - this->start), this->tail, this->end - this->tail);
+ hb_memcpy (p, this->start, this->head - this->start);
+ hb_memcpy (p + (this->head - this->start), this->tail, this->end - this->tail);
return hb_bytes_t (p, len);
}
template <typename Type>
@@ -704,7 +732,7 @@ struct hb_serialize_context_t
}
public:
- char *start, *head, *tail, *end;
+ char *start, *head, *tail, *end, *zerocopy;
unsigned int debug_depth;
hb_serialize_error_t errors;
diff --git a/thirdparty/harfbuzz/src/hb-set-digest.hh b/thirdparty/harfbuzz/src/hb-set-digest.hh
index fab36216e4..e8409111f2 100644
--- a/thirdparty/harfbuzz/src/hb-set-digest.hh
+++ b/thirdparty/harfbuzz/src/hb-set-digest.hh
@@ -28,6 +28,7 @@
#define HB_SET_DIGEST_HH
#include "hb.hh"
+#include "hb-machinery.hh"
/*
* The set-digests here implement various "filters" that support
@@ -75,6 +76,8 @@ struct hb_set_digest_bits_pattern_t
void init () { mask = 0; }
+ void add (const hb_set_digest_bits_pattern_t &o) { mask |= o.mask; }
+
void add (hb_codepoint_t g) { mask |= mask_for (g); }
bool add_range (hb_codepoint_t a, hb_codepoint_t b)
@@ -95,7 +98,7 @@ struct hb_set_digest_bits_pattern_t
for (unsigned int i = 0; i < count; i++)
{
add (*array);
- array = (const T *) (stride + (const char *) array);
+ array = &StructAtOffsetUnaligned<T> ((const void *) array, stride);
}
}
template <typename T>
@@ -103,16 +106,15 @@ struct hb_set_digest_bits_pattern_t
template <typename T>
bool add_sorted_array (const T *array, unsigned int count, unsigned int stride=sizeof(T))
{
- for (unsigned int i = 0; i < count; i++)
- {
- add (*array);
- array = (const T *) (stride + (const char *) array);
- }
+ add_array (array, count, stride);
return true;
}
template <typename T>
bool add_sorted_array (const hb_sorted_array_t<const T>& arr) { return add_sorted_array (&arr, arr.len ()); }
+ bool may_have (const hb_set_digest_bits_pattern_t &o) const
+ { return mask & o.mask; }
+
bool may_have (hb_codepoint_t g) const
{ return mask & mask_for (g); }
@@ -132,6 +134,12 @@ struct hb_set_digest_combiner_t
tail.init ();
}
+ void add (const hb_set_digest_combiner_t &o)
+ {
+ head.add (o.head);
+ tail.add (o.tail);
+ }
+
void add (hb_codepoint_t g)
{
head.add (g);
@@ -140,9 +148,8 @@ struct hb_set_digest_combiner_t
bool add_range (hb_codepoint_t a, hb_codepoint_t b)
{
- head.add_range (a, b);
- tail.add_range (a, b);
- return true;
+ return head.add_range (a, b) &&
+ tail.add_range (a, b);
}
template <typename T>
void add_array (const T *array, unsigned int count, unsigned int stride=sizeof(T))
@@ -155,13 +162,17 @@ struct hb_set_digest_combiner_t
template <typename T>
bool add_sorted_array (const T *array, unsigned int count, unsigned int stride=sizeof(T))
{
- head.add_sorted_array (array, count, stride);
- tail.add_sorted_array (array, count, stride);
- return true;
+ return head.add_sorted_array (array, count, stride) &&
+ tail.add_sorted_array (array, count, stride);
}
template <typename T>
bool add_sorted_array (const hb_sorted_array_t<const T>& arr) { return add_sorted_array (&arr, arr.len ()); }
+ bool may_have (const hb_set_digest_combiner_t &o) const
+ {
+ return head.may_have (o.head) && tail.may_have (o.tail);
+ }
+
bool may_have (hb_codepoint_t g) const
{
return head.may_have (g) && tail.may_have (g);
diff --git a/thirdparty/harfbuzz/src/hb-set.hh b/thirdparty/harfbuzz/src/hb-set.hh
index 5d5576cb9e..f958a081fb 100644
--- a/thirdparty/harfbuzz/src/hb-set.hh
+++ b/thirdparty/harfbuzz/src/hb-set.hh
@@ -105,10 +105,8 @@ struct hb_sparseset_t
bool get (hb_codepoint_t g) const { return s.get (g); }
/* Has interface. */
- static constexpr bool SENTINEL = false;
- typedef bool value_t;
- value_t operator [] (hb_codepoint_t k) const { return get (k); }
- bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
+ bool operator [] (hb_codepoint_t k) const { return get (k); }
+ bool has (hb_codepoint_t k) const { return (*this)[k]; }
/* Predicate. */
bool operator () (hb_codepoint_t k) const { return has (k); }
diff --git a/thirdparty/harfbuzz/src/hb-shape-plan.cc b/thirdparty/harfbuzz/src/hb-shape-plan.cc
index 9e513c0458..cabcbd4e72 100644
--- a/thirdparty/harfbuzz/src/hb-shape-plan.cc
+++ b/thirdparty/harfbuzz/src/hb-shape-plan.cc
@@ -31,6 +31,8 @@
#include "hb-buffer.hh"
+#ifndef HB_NO_SHAPER
+
/**
* SECTION:hb-shape-plan
* @title: hb-shape-plan
@@ -74,7 +76,7 @@ hb_shape_plan_key_t::init (bool copy,
this->user_features = copy ? features : user_features;
if (copy && num_user_features)
{
- memcpy (features, user_features, num_user_features * sizeof (hb_feature_t));
+ hb_memcpy (features, user_features, num_user_features * sizeof (hb_feature_t));
/* Make start/end uniform to easier catch bugs. */
for (unsigned int i = 0; i < num_user_features; i++)
{
@@ -574,3 +576,6 @@ retry:
return hb_shape_plan_reference (shape_plan);
}
+
+
+#endif
diff --git a/thirdparty/harfbuzz/src/hb-shape.cc b/thirdparty/harfbuzz/src/hb-shape.cc
index 547d0afc47..7b5bf2c5ef 100644
--- a/thirdparty/harfbuzz/src/hb-shape.cc
+++ b/thirdparty/harfbuzz/src/hb-shape.cc
@@ -35,6 +35,8 @@
#include "hb-machinery.hh"
+#ifndef HB_NO_SHAPER
+
/**
* SECTION:hb-shape
* @title: hb-shape
@@ -192,3 +194,6 @@ hb_shape (hb_font_t *font,
{
hb_shape_full (font, buffer, features, num_features, nullptr);
}
+
+
+#endif
diff --git a/thirdparty/harfbuzz/src/hb-shaper.cc b/thirdparty/harfbuzz/src/hb-shaper.cc
index a900ac6991..c4885cda94 100644
--- a/thirdparty/harfbuzz/src/hb-shaper.cc
+++ b/thirdparty/harfbuzz/src/hb-shaper.cc
@@ -53,7 +53,7 @@ static struct hb_shapers_lazy_loader_t : hb_lazy_loader_t<hb_shaper_entry_t,
if (unlikely (!shapers))
return nullptr;
- memcpy (shapers, _hb_all_shapers, sizeof (_hb_all_shapers));
+ hb_memcpy (shapers, _hb_all_shapers, sizeof (_hb_all_shapers));
/* Reorder shaper list to prefer requested shapers. */
unsigned int i = 0;
diff --git a/thirdparty/harfbuzz/src/hb-static.cc b/thirdparty/harfbuzz/src/hb-static.cc
index 5d4c7cda1b..eac50754c2 100644
--- a/thirdparty/harfbuzz/src/hb-static.cc
+++ b/thirdparty/harfbuzz/src/hb-static.cc
@@ -33,6 +33,7 @@
#include "hb-aat-layout-feat-table.hh"
#include "hb-ot-layout-common.hh"
#include "hb-ot-cmap-table.hh"
+#include "hb-ot-color-colr-table.hh"
#include "hb-ot-glyf-table.hh"
#include "hb-ot-head-table.hh"
#include "hb-ot-maxp-table.hh"
@@ -47,6 +48,7 @@ DEFINE_NULL_NAMESPACE_BYTES (OT, Index) = {0xFF,0xFF};
DEFINE_NULL_NAMESPACE_BYTES (OT, VarIdx) = {0xFF,0xFF,0xFF,0xFF};
DEFINE_NULL_NAMESPACE_BYTES (OT, LangSys) = {0x00,0x00, 0xFF,0xFF, 0x00,0x00};
DEFINE_NULL_NAMESPACE_BYTES (OT, RangeRecord) = {0x01};
+DEFINE_NULL_NAMESPACE_BYTES (OT, ClipRecord) = {0x01};
DEFINE_NULL_NAMESPACE_BYTES (OT, CmapSubtableLongGroup) = {0x00,0x00,0x00,0x01, 0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00};
DEFINE_NULL_NAMESPACE_BYTES (AAT, SettingName) = {0xFF,0xFF, 0xFF,0xFF};
DEFINE_NULL_NAMESPACE_BYTES (AAT, Lookup) = {0xFF,0xFF};
diff --git a/thirdparty/harfbuzz/src/hb-subset-accelerator.hh b/thirdparty/harfbuzz/src/hb-subset-accelerator.hh
new file mode 100644
index 0000000000..63ae6d77e2
--- /dev/null
+++ b/thirdparty/harfbuzz/src/hb-subset-accelerator.hh
@@ -0,0 +1,121 @@
+/*
+ * Copyright © 2022 Google, Inc.
+ *
+ * This is part of HarfBuzz, a text shaping library.
+ *
+ * Permission is hereby granted, without written agreement and without
+ * license or royalty fees, to use, copy, modify, and distribute this
+ * software and its documentation for any purpose, provided that the
+ * above copyright notice and the following two paragraphs appear in
+ * all copies of this software.
+ *
+ * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
+ * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+ * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
+ * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+ * DAMAGE.
+ *
+ * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
+ * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
+ * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
+ * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+ *
+ * Google Author(s): Garret Rieger
+ */
+
+#ifndef HB_SUBSET_ACCELERATOR_HH
+#define HB_SUBSET_ACCELERATOR_HH
+
+
+#include "hb.hh"
+
+#include "hb-map.hh"
+#include "hb-multimap.hh"
+#include "hb-set.hh"
+
+extern HB_INTERNAL hb_user_data_key_t _hb_subset_accelerator_user_data_key;
+
+namespace CFF {
+struct cff_subset_accelerator_t;
+}
+
+namespace OT {
+struct SubtableUnicodesCache;
+};
+
+struct hb_subset_accelerator_t
+{
+ static hb_user_data_key_t* user_data_key()
+ {
+ return &_hb_subset_accelerator_user_data_key;
+ }
+
+ static hb_subset_accelerator_t* create(const hb_map_t& unicode_to_gid_,
+ const hb_multimap_t gid_to_unicodes_,
+ const hb_set_t& unicodes_,
+ bool has_seac_) {
+ hb_subset_accelerator_t* accel =
+ (hb_subset_accelerator_t*) hb_malloc (sizeof(hb_subset_accelerator_t));
+ new (accel) hb_subset_accelerator_t (unicode_to_gid_, gid_to_unicodes_, unicodes_);
+ accel->has_seac = has_seac_;
+ return accel;
+ }
+
+ static void destroy(void* value) {
+ if (!value) return;
+
+ hb_subset_accelerator_t* accel = (hb_subset_accelerator_t*) value;
+
+ if (accel->cff_accelerator && accel->destroy_cff_accelerator)
+ accel->destroy_cff_accelerator ((void*) accel->cff_accelerator);
+
+ if (accel->cmap_cache && accel->destroy_cmap_cache)
+ accel->destroy_cmap_cache ((void*) accel->cmap_cache);
+
+ accel->~hb_subset_accelerator_t ();
+ hb_free (accel);
+ }
+
+ hb_subset_accelerator_t (const hb_map_t& unicode_to_gid_,
+ const hb_multimap_t& gid_to_unicodes_,
+ const hb_set_t& unicodes_)
+ : unicode_to_gid(unicode_to_gid_), gid_to_unicodes (gid_to_unicodes_), unicodes(unicodes_),
+ cmap_cache(nullptr), destroy_cmap_cache(nullptr),
+ has_seac(false), cff_accelerator(nullptr), destroy_cff_accelerator(nullptr)
+ { sanitized_table_cache_lock.init (); }
+
+ ~hb_subset_accelerator_t ()
+ { sanitized_table_cache_lock.fini (); }
+
+ // Generic
+
+ mutable hb_mutex_t sanitized_table_cache_lock;
+ mutable hb_hashmap_t<hb_tag_t, hb::unique_ptr<hb_blob_t>> sanitized_table_cache;
+
+ const hb_map_t unicode_to_gid;
+ const hb_multimap_t gid_to_unicodes;
+ const hb_set_t unicodes;
+
+ // cmap
+ const OT::SubtableUnicodesCache* cmap_cache;
+ hb_destroy_func_t destroy_cmap_cache;
+
+ // CFF
+ bool has_seac;
+ const CFF::cff_subset_accelerator_t* cff_accelerator;
+ hb_destroy_func_t destroy_cff_accelerator;
+
+ // TODO(garretrieger): cumulative glyf checksum map
+
+ bool in_error () const
+ {
+ return unicode_to_gid.in_error () ||
+ gid_to_unicodes.in_error () ||
+ unicodes.in_error () ||
+ sanitized_table_cache.in_error ();
+ }
+};
+
+
+#endif /* HB_SUBSET_ACCELERATOR_HH */
diff --git a/thirdparty/harfbuzz/src/hb-subset-cff-common.cc b/thirdparty/harfbuzz/src/hb-subset-cff-common.cc
index 711b2236d6..6e1b6f713d 100644
--- a/thirdparty/harfbuzz/src/hb-subset-cff-common.cc
+++ b/thirdparty/harfbuzz/src/hb-subset-cff-common.cc
@@ -66,8 +66,7 @@ hb_plan_subset_cff_fdselect (const hb_subset_plan_t *plan,
{
/* use hb_set to determine the subset of font dicts */
- hb_set_t *set = hb_set_create ();
- if (unlikely (set == &Null (hb_set_t))) return false;
+ hb_set_t set;
hb_codepoint_t prev_fd = CFF_UNDEF_CODE;
for (hb_codepoint_t i = 0; i < subset_num_glyphs; i++)
{
@@ -79,7 +78,7 @@ hb_plan_subset_cff_fdselect (const hb_subset_plan_t *plan,
glyph = i;
}
fd = src.get_fd (glyph);
- set->add (fd);
+ set.add (fd);
if (fd != prev_fd)
{
@@ -90,12 +89,11 @@ hb_plan_subset_cff_fdselect (const hb_subset_plan_t *plan,
}
}
- subset_fd_count = set->get_population ();
+ subset_fd_count = set.get_population ();
if (subset_fd_count == fdCount)
{
/* all font dicts belong to the subset. no need to subset FDSelect & FDArray */
fdmap.identity (fdCount);
- hb_set_destroy (set);
}
else
{
@@ -103,9 +101,8 @@ hb_plan_subset_cff_fdselect (const hb_subset_plan_t *plan,
fdmap.reset ();
hb_codepoint_t fd = CFF_UNDEF_CODE;
- while (set->next (&fd))
+ while (set.next (&fd))
fdmap.add (fd);
- hb_set_destroy (set);
if (unlikely (fdmap.get_population () != subset_fd_count))
return false;
}
diff --git a/thirdparty/harfbuzz/src/hb-subset-cff-common.hh b/thirdparty/harfbuzz/src/hb-subset-cff-common.hh
index bb9f27eec1..8bbc017656 100644
--- a/thirdparty/harfbuzz/src/hb-subset-cff-common.hh
+++ b/thirdparty/harfbuzz/src/hb-subset-cff-common.hh
@@ -38,15 +38,16 @@ namespace CFF {
struct str_encoder_t
{
str_encoder_t (str_buff_t &buff_)
- : buff (buff_), error (false) {}
+ : buff (buff_) {}
void reset () { buff.reset (); }
void encode_byte (unsigned char b)
{
- buff.push (b);
- if (unlikely (buff.in_error ()))
- set_error ();
+ if (likely ((signed) buff.length < buff.allocated))
+ buff.arrayZ[buff.length++] = b;
+ else
+ buff.push (b);
}
void encode_int (int v)
@@ -108,27 +109,18 @@ struct str_encoder_t
encode_byte (op);
}
- void copy_str (const hb_ubytes_t &str)
+ void copy_str (const unsigned char *str, unsigned length)
{
- unsigned int offset = buff.length;
- /* Manually resize buffer since faster. */
- if ((signed) (buff.length + str.length) <= buff.allocated)
- buff.length += str.length;
- else if (unlikely (!buff.resize (offset + str.length)))
- {
- set_error ();
- return;
- }
- memcpy (buff.arrayZ + offset, &str[0], str.length);
+ assert ((signed) (buff.length + length) <= buff.allocated);
+ hb_memcpy (buff.arrayZ + buff.length, str, length);
+ buff.length += length;
}
- bool is_error () const { return error; }
+ bool in_error () const { return buff.in_error (); }
protected:
- void set_error () { error = true; }
str_buff_t &buff;
- bool error;
};
struct cff_sub_table_info_t {
@@ -187,9 +179,12 @@ struct cff_font_dict_op_serializer_t : op_serializer_t
}
else
{
- HBUINT8 *d = c->allocate_size<HBUINT8> (opstr.str.length);
+ unsigned char *d = c->allocate_size<unsigned char> (opstr.length);
if (unlikely (!d)) return_trace (false);
- memcpy (d, &opstr.str[0], opstr.str.length);
+ /* Faster than hb_memcpy for small strings. */
+ for (unsigned i = 0; i < opstr.length; i++)
+ d[i] = opstr.ptr[i];
+ //hb_memcpy (d, opstr.ptr, opstr.length);
}
return_trace (true);
}
@@ -239,11 +234,10 @@ struct subr_flattener_t
bool flatten (str_buff_vec_t &flat_charstrings)
{
- if (!flat_charstrings.resize (plan->num_output_glyphs ()))
+ unsigned count = plan->num_output_glyphs ();
+ if (!flat_charstrings.resize (count))
return false;
- for (unsigned int i = 0; i < plan->num_output_glyphs (); i++)
- flat_charstrings[i].init ();
- for (unsigned int i = 0; i < plan->num_output_glyphs (); i++)
+ for (unsigned int i = 0; i < count; i++)
{
hb_codepoint_t glyph;
if (!plan->old_gid_for_new_gid (i, &glyph))
@@ -259,7 +253,7 @@ struct subr_flattener_t
ENV env (str, acc, fd);
cs_interpreter_t<ENV, OPSET, flatten_param_t> interp (env);
flatten_param_t param = {
- flat_charstrings[i],
+ flat_charstrings.arrayZ[i],
(bool) (plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
};
if (unlikely (!interp.interpret (param)))
@@ -274,11 +268,9 @@ struct subr_flattener_t
struct subr_closures_t
{
- subr_closures_t (unsigned int fd_count) : valid (false), global_closure (), local_closures ()
+ subr_closures_t (unsigned int fd_count) : global_closure (), local_closures ()
{
- valid = true;
- if (!local_closures.resize (fd_count))
- valid = false;
+ local_closures.resize (fd_count);
}
void reset ()
@@ -288,47 +280,38 @@ struct subr_closures_t
local_closures[i].clear();
}
- bool is_valid () const { return valid; }
- bool valid;
+ bool in_error () const { return local_closures.in_error (); }
hb_set_t global_closure;
hb_vector_t<hb_set_t> local_closures;
};
struct parsed_cs_op_t : op_str_t
{
- void init (unsigned int subr_num_ = 0)
- {
- subr_num = subr_num_;
- drop_flag = false;
- keep_flag = false;
- skip_flag = false;
- }
-
- bool for_drop () const { return drop_flag; }
- void set_drop () { if (!for_keep ()) drop_flag = true; }
+ parsed_cs_op_t (unsigned int subr_num_ = 0) :
+ subr_num (subr_num_) {}
- bool for_keep () const { return keep_flag; }
- void set_keep () { keep_flag = true; }
+ bool is_hinting () const { return hinting_flag; }
+ void set_hinting () { hinting_flag = true; }
- bool for_skip () const { return skip_flag; }
- void set_skip () { skip_flag = true; }
-
- unsigned int subr_num;
+ /* The layout of this struct is designed to fit within the
+ * padding of op_str_t! */
protected:
- bool drop_flag;
- bool keep_flag;
- bool skip_flag;
+ bool hinting_flag = false;
+
+ public:
+ uint16_t subr_num;
};
struct parsed_cs_str_t : parsed_values_t<parsed_cs_op_t>
{
- void init ()
+ parsed_cs_str_t () :
+ parsed (false),
+ hint_dropped (false),
+ has_prefix_ (false),
+ has_calls_ (false)
{
SUPER::init ();
- parsed = false;
- hint_dropped = false;
- has_prefix_ = false;
}
void add_op (op_code_t op, const byte_str_ref_t& str_ref)
@@ -341,13 +324,12 @@ struct parsed_cs_str_t : parsed_values_t<parsed_cs_op_t>
{
if (!is_parsed ())
{
- unsigned int parsed_len = get_count ();
- if (likely (parsed_len > 0))
- values[parsed_len-1].set_skip ();
+ has_calls_ = true;
- parsed_cs_op_t val;
- val.init (subr_num);
- SUPER::add_op (op, str_ref, val);
+ /* Pop the subroutine number. */
+ values.pop ();
+
+ SUPER::add_op (op, str_ref, {subr_num});
}
}
@@ -377,11 +359,14 @@ struct parsed_cs_str_t : parsed_values_t<parsed_cs_op_t>
op_code_t prefix_op () const { return prefix_op_; }
const number_t &prefix_num () const { return prefix_num_; }
+ bool has_calls () const { return has_calls_; }
+
protected:
- bool parsed;
- bool hint_dropped;
- bool vsindex_dropped;
- bool has_prefix_;
+ bool parsed : 1;
+ bool hint_dropped : 1;
+ bool vsindex_dropped : 1;
+ bool has_prefix_ : 1;
+ bool has_calls_ : 1;
op_code_t prefix_op_;
number_t prefix_num_;
@@ -395,6 +380,59 @@ struct parsed_cs_str_vec_t : hb_vector_t<parsed_cs_str_t>
typedef hb_vector_t<parsed_cs_str_t> SUPER;
};
+struct cff_subset_accelerator_t
+{
+ static cff_subset_accelerator_t* create (
+ hb_blob_t* original_blob,
+ const parsed_cs_str_vec_t& parsed_charstrings,
+ const parsed_cs_str_vec_t& parsed_global_subrs,
+ const hb_vector_t<parsed_cs_str_vec_t>& parsed_local_subrs) {
+ cff_subset_accelerator_t* accel =
+ (cff_subset_accelerator_t*) hb_malloc (sizeof(cff_subset_accelerator_t));
+ new (accel) cff_subset_accelerator_t (original_blob,
+ parsed_charstrings,
+ parsed_global_subrs,
+ parsed_local_subrs);
+ return accel;
+ }
+
+ static void destroy (void* value) {
+ if (!value) return;
+
+ cff_subset_accelerator_t* accel = (cff_subset_accelerator_t*) value;
+ accel->~cff_subset_accelerator_t ();
+ hb_free (accel);
+ }
+
+ cff_subset_accelerator_t(
+ hb_blob_t* original_blob_,
+ const parsed_cs_str_vec_t& parsed_charstrings_,
+ const parsed_cs_str_vec_t& parsed_global_subrs_,
+ const hb_vector_t<parsed_cs_str_vec_t>& parsed_local_subrs_)
+ {
+ parsed_charstrings = parsed_charstrings_;
+ parsed_global_subrs = parsed_global_subrs_;
+ parsed_local_subrs = parsed_local_subrs_;
+
+ // the parsed charstrings point to memory in the original CFF table so we must hold a reference
+ // to it to keep the memory valid.
+ original_blob = hb_blob_reference (original_blob_);
+ }
+
+ ~cff_subset_accelerator_t() {
+ hb_blob_destroy (original_blob);
+ hb_map_destroy (glyph_to_sid_map.get_relaxed ());
+ }
+
+ parsed_cs_str_vec_t parsed_charstrings;
+ parsed_cs_str_vec_t parsed_global_subrs;
+ hb_vector_t<parsed_cs_str_vec_t> parsed_local_subrs;
+ mutable hb_atomic_ptr_t<hb_map_t> glyph_to_sid_map = nullptr;
+
+ private:
+ hb_blob_t* original_blob;
+};
+
struct subr_subset_param_t
{
subr_subset_param_t (parsed_cs_str_t *parsed_charstring_,
@@ -446,7 +484,11 @@ struct subr_subset_param_t
if (unlikely (calling && !parsed_str->is_parsed () && (parsed_str->values.length > 0)))
env.set_error ();
else
+ {
+ if (!parsed_str->is_parsed ())
+ parsed_str->alloc (env.str_ref.total_size () / 2);
current_parsed_str = parsed_str;
+ }
}
parsed_cs_str_t *current_parsed_str;
@@ -506,7 +548,7 @@ struct subr_remaps_t
{
global_remap.create (&closures.global_closure);
for (unsigned int i = 0; i < local_remaps.length; i++)
- local_remaps[i].create (&closures.local_closures[i]);
+ local_remaps.arrayZ[i].create (&closures.local_closures[i]);
}
subr_remap_t global_remap;
@@ -517,7 +559,8 @@ template <typename SUBSETTER, typename SUBRS, typename ACC, typename ENV, typena
struct subr_subsetter_t
{
subr_subsetter_t (ACC &acc_, const hb_subset_plan_t *plan_)
- : acc (acc_), plan (plan_), closures(acc_.fdCount), remaps(acc_.fdCount)
+ : acc (acc_), plan (plan_), closures(acc_.fdCount),
+ remaps(acc_.fdCount)
{}
/* Subroutine subsetting with --no-desubroutinize runs in phases:
@@ -536,55 +579,96 @@ struct subr_subsetter_t
*/
bool subset (void)
{
- parsed_charstrings.resize (plan->num_output_glyphs ());
- parsed_global_subrs.resize (acc.globalSubrs->count);
-
- if (unlikely (remaps.in_error()
- || parsed_charstrings.in_error ()
- || parsed_global_subrs.in_error ())) {
- return false;
+ unsigned fd_count = acc.fdCount;
+ const cff_subset_accelerator_t* cff_accelerator = nullptr;
+ if (plan->accelerator && plan->accelerator->cff_accelerator) {
+ cff_accelerator = plan->accelerator->cff_accelerator;
+ fd_count = cff_accelerator->parsed_local_subrs.length;
}
- if (unlikely (!parsed_local_subrs.resize (acc.fdCount))) return false;
+ if (cff_accelerator) {
+ // If we are not dropping hinting then charstrings are not modified so we can
+ // just use a reference to the cached copies.
+ cached_charstrings.resize (plan->num_output_glyphs ());
+ parsed_global_subrs = &cff_accelerator->parsed_global_subrs;
+ parsed_local_subrs = &cff_accelerator->parsed_local_subrs;
+ } else {
+ parsed_charstrings.resize (plan->num_output_glyphs ());
+ parsed_global_subrs_storage.resize (acc.globalSubrs->count);
- for (unsigned int i = 0; i < acc.fdCount; i++)
- {
- parsed_local_subrs[i].resize (acc.privateDicts[i].localSubrs->count);
- if (unlikely (parsed_local_subrs[i].in_error ())) return false;
+ if (unlikely (!parsed_local_subrs_storage.resize (fd_count))) return false;
+
+ for (unsigned int i = 0; i < acc.fdCount; i++)
+ {
+ unsigned count = acc.privateDicts[i].localSubrs->count;
+ parsed_local_subrs_storage[i].resize (count);
+ if (unlikely (parsed_local_subrs_storage[i].in_error ())) return false;
+ }
+
+ parsed_global_subrs = &parsed_global_subrs_storage;
+ parsed_local_subrs = &parsed_local_subrs_storage;
}
- if (unlikely (!closures.valid))
+
+ if (unlikely (remaps.in_error()
+ || cached_charstrings.in_error ()
+ || parsed_charstrings.in_error ()
+ || parsed_global_subrs->in_error ()
+ || closures.in_error ())) {
return false;
+ }
/* phase 1 & 2 */
for (unsigned int i = 0; i < plan->num_output_glyphs (); i++)
{
hb_codepoint_t glyph;
if (!plan->old_gid_for_new_gid (i, &glyph))
- continue;
+ continue;
+
const hb_ubytes_t str = (*acc.charStrings)[glyph];
unsigned int fd = acc.fdSelect->get_fd (glyph);
if (unlikely (fd >= acc.fdCount))
- return false;
+ return false;
+
+ if (cff_accelerator)
+ {
+ // parsed string already exists in accelerator, copy it and move
+ // on.
+ if (cached_charstrings)
+ cached_charstrings[i] = &cff_accelerator->parsed_charstrings[glyph];
+ else
+ parsed_charstrings[i] = cff_accelerator->parsed_charstrings[glyph];
+
+ continue;
+ }
ENV env (str, acc, fd);
cs_interpreter_t<ENV, OPSET, subr_subset_param_t> interp (env);
- parsed_charstrings[i].alloc (str.length);
+ parsed_charstrings[i].alloc (str.length / 2);
subr_subset_param_t param (&parsed_charstrings[i],
- &parsed_global_subrs,
- &parsed_local_subrs[fd],
- &closures.global_closure,
- &closures.local_closures[fd],
- plan->flags & HB_SUBSET_FLAGS_NO_HINTING);
+ &parsed_global_subrs_storage,
+ &parsed_local_subrs_storage[fd],
+ &closures.global_closure,
+ &closures.local_closures[fd],
+ plan->flags & HB_SUBSET_FLAGS_NO_HINTING);
if (unlikely (!interp.interpret (param)))
- return false;
+ return false;
/* complete parsed string esp. copy CFF1 width or CFF2 vsindex to the parsed charstring for encoding */
SUBSETTER::complete_parsed_str (interp.env, param, parsed_charstrings[i]);
}
- if (plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
+ // Since parsed strings were loaded from accelerator, we still need
+ // to compute the subroutine closures which would have normally happened during
+ // parsing.
+ if (cff_accelerator &&
+ !closure_subroutines(*parsed_global_subrs,
+ *parsed_local_subrs))
+ return false;
+
+ if ((plan->flags & HB_SUBSET_FLAGS_NO_HINTING && !cff_accelerator) ||
+ plan->inprogress_accelerator)
{
/* mark hint ops and arguments for drop */
for (unsigned int i = 0; i < plan->num_output_glyphs (); i++)
@@ -596,8 +680,8 @@ struct subr_subsetter_t
if (unlikely (fd >= acc.fdCount))
return false;
subr_subset_param_t param (&parsed_charstrings[i],
- &parsed_global_subrs,
- &parsed_local_subrs[fd],
+ &parsed_global_subrs_storage,
+ &parsed_local_subrs_storage[fd],
&closures.global_closure,
&closures.local_closures[fd],
plan->flags & HB_SUBSET_FLAGS_NO_HINTING);
@@ -612,27 +696,14 @@ struct subr_subsetter_t
}
/* after dropping hints recreate closures of actually used subrs */
- closures.reset ();
- for (unsigned int i = 0; i < plan->num_output_glyphs (); i++)
- {
- hb_codepoint_t glyph;
- if (!plan->old_gid_for_new_gid (i, &glyph))
- continue;
- unsigned int fd = acc.fdSelect->get_fd (glyph);
- if (unlikely (fd >= acc.fdCount))
- return false;
- subr_subset_param_t param (&parsed_charstrings[i],
- &parsed_global_subrs,
- &parsed_local_subrs[fd],
- &closures.global_closure,
- &closures.local_closures[fd],
- plan->flags & HB_SUBSET_FLAGS_NO_HINTING);
- collect_subr_refs_in_str (parsed_charstrings[i], param);
- }
+ if (plan->flags & HB_SUBSET_FLAGS_NO_HINTING &&
+ !cff_accelerator &&
+ !closure_subroutines(*parsed_global_subrs, *parsed_local_subrs)) return false;
}
remaps.create (closures);
+ populate_subset_accelerator ();
return true;
}
@@ -646,13 +717,13 @@ struct subr_subsetter_t
if (!plan->old_gid_for_new_gid (i, &glyph))
{
/* add an endchar only charstring for a missing glyph if CFF1 */
- if (endchar_op != OpCode_Invalid) buffArray[i].push (endchar_op);
+ if (endchar_op != OpCode_Invalid) buffArray.arrayZ[i].push (endchar_op);
continue;
}
unsigned int fd = acc.fdSelect->get_fd (glyph);
if (unlikely (fd >= acc.fdCount))
return false;
- if (unlikely (!encode_str (parsed_charstrings[i], fd, buffArray[i])))
+ if (unlikely (!encode_str (get_parsed_charstring (i), fd, buffArray.arrayZ[i])))
return false;
}
return true;
@@ -664,26 +735,25 @@ struct subr_subsetter_t
if (unlikely (!buffArray.resize (count)))
return false;
- for (unsigned int old_num = 0; old_num < subrs.length; old_num++)
+ for (unsigned int new_num = 0; new_num < count; new_num++)
{
- hb_codepoint_t new_num = remap[old_num];
- if (new_num != CFF_UNDEF_CODE)
- {
- if (unlikely (!encode_str (subrs[old_num], fd, buffArray[new_num])))
- return false;
- }
+ hb_codepoint_t old_num = remap.backward (new_num);
+ assert (old_num != CFF_UNDEF_CODE);
+
+ if (unlikely (!encode_str (subrs[old_num], fd, buffArray[new_num])))
+ return false;
}
return true;
}
bool encode_globalsubrs (str_buff_vec_t &buffArray)
{
- return encode_subrs (parsed_global_subrs, remaps.global_remap, 0, buffArray);
+ return encode_subrs (*parsed_global_subrs, remaps.global_remap, 0, buffArray);
}
bool encode_localsubrs (unsigned int fd, str_buff_vec_t &buffArray) const
{
- return encode_subrs (parsed_local_subrs[fd], remaps.local_remaps[fd], fd, buffArray);
+ return encode_subrs ((*parsed_local_subrs)[fd], remaps.local_remaps[fd], fd, buffArray);
}
protected:
@@ -712,7 +782,7 @@ struct subr_subsetter_t
* then this entire subroutine must be a hint. drop its call. */
if (drop.ends_in_hint)
{
- str.values[pos].set_drop ();
+ str.values[pos].set_hinting ();
/* if this subr call is at the end of the parent subr, propagate the flag
* otherwise reset the flag */
if (!str.at_end (pos))
@@ -720,7 +790,7 @@ struct subr_subsetter_t
}
else if (drop.all_dropped)
{
- str.values[pos].set_drop ();
+ str.values[pos].set_hinting ();
}
return has_hint;
@@ -731,20 +801,22 @@ struct subr_subsetter_t
{
bool seen_hint = false;
- for (unsigned int pos = 0; pos < str.values.length; pos++)
+ unsigned count = str.values.length;
+ auto *values = str.values.arrayZ;
+ for (unsigned int pos = 0; pos < count; pos++)
{
bool has_hint = false;
- switch (str.values[pos].op)
+ switch (values[pos].op)
{
case OpCode_callsubr:
has_hint = drop_hints_in_subr (str, pos,
- *param.parsed_local_subrs, str.values[pos].subr_num,
+ *param.parsed_local_subrs, values[pos].subr_num,
param, drop);
break;
case OpCode_callgsubr:
has_hint = drop_hints_in_subr (str, pos,
- *param.parsed_global_subrs, str.values[pos].subr_num,
+ *param.parsed_global_subrs, values[pos].subr_num,
param, drop);
break;
@@ -758,7 +830,7 @@ struct subr_subsetter_t
case OpCode_cntrmask:
if (drop.seen_moveto)
{
- str.values[pos].set_drop ();
+ values[pos].set_hinting ();
break;
}
HB_FALLTHROUGH;
@@ -768,13 +840,13 @@ struct subr_subsetter_t
case OpCode_hstem:
case OpCode_vstem:
has_hint = true;
- str.values[pos].set_drop ();
+ values[pos].set_hinting ();
if (str.at_end (pos))
drop.ends_in_hint = true;
break;
case OpCode_dotsection:
- str.values[pos].set_drop ();
+ values[pos].set_hinting ();
break;
default:
@@ -785,10 +857,10 @@ struct subr_subsetter_t
{
for (int i = pos - 1; i >= 0; i--)
{
- parsed_cs_op_t &csop = str.values[(unsigned)i];
- if (csop.for_drop ())
+ parsed_cs_op_t &csop = values[(unsigned)i];
+ if (csop.is_hinting ())
break;
- csop.set_drop ();
+ csop.set_hinting ();
if (csop.op == OpCode_vsindexcs)
drop.vsindex_dropped = true;
}
@@ -801,12 +873,12 @@ struct subr_subsetter_t
* only (usually one) hintmask operator, then calls to this subr can be dropped.
*/
drop.all_dropped = true;
- for (unsigned int pos = 0; pos < str.values.length; pos++)
+ for (unsigned int pos = 0; pos < count; pos++)
{
- parsed_cs_op_t &csop = str.values[pos];
+ parsed_cs_op_t &csop = values[pos];
if (csop.op == OpCode_return)
break;
- if (!csop.for_drop ())
+ if (!csop.is_hinting ())
{
drop.all_dropped = false;
break;
@@ -816,32 +888,62 @@ struct subr_subsetter_t
return seen_hint;
}
- void collect_subr_refs_in_subr (parsed_cs_str_t &str, unsigned int pos,
- unsigned int subr_num, parsed_cs_str_vec_t &subrs,
+ bool closure_subroutines (const parsed_cs_str_vec_t& global_subrs,
+ const hb_vector_t<parsed_cs_str_vec_t>& local_subrs)
+ {
+ closures.reset ();
+ for (unsigned int i = 0; i < plan->num_output_glyphs (); i++)
+ {
+ hb_codepoint_t glyph;
+ if (!plan->old_gid_for_new_gid (i, &glyph))
+ continue;
+ unsigned int fd = acc.fdSelect->get_fd (glyph);
+ if (unlikely (fd >= acc.fdCount))
+ return false;
+
+ // Note: const cast is safe here because the collect_subr_refs_in_str only performs a
+ // closure and does not modify any of the charstrings.
+ subr_subset_param_t param (const_cast<parsed_cs_str_t*> (&get_parsed_charstring (i)),
+ const_cast<parsed_cs_str_vec_t*> (&global_subrs),
+ const_cast<parsed_cs_str_vec_t*> (&local_subrs[fd]),
+ &closures.global_closure,
+ &closures.local_closures[fd],
+ plan->flags & HB_SUBSET_FLAGS_NO_HINTING);
+ collect_subr_refs_in_str (get_parsed_charstring (i), param);
+ }
+
+ return true;
+ }
+
+ void collect_subr_refs_in_subr (unsigned int subr_num, parsed_cs_str_vec_t &subrs,
hb_set_t *closure,
const subr_subset_param_t &param)
{
+ if (closure->has (subr_num))
+ return;
closure->add (subr_num);
collect_subr_refs_in_str (subrs[subr_num], param);
}
- void collect_subr_refs_in_str (parsed_cs_str_t &str, const subr_subset_param_t &param)
+ void collect_subr_refs_in_str (const parsed_cs_str_t &str,
+ const subr_subset_param_t &param)
{
- for (unsigned int pos = 0; pos < str.values.length; pos++)
+ if (!str.has_calls ())
+ return;
+
+ for (auto &opstr : str.values)
{
- if (!str.values[pos].for_drop ())
+ if (!param.drop_hints || !opstr.is_hinting ())
{
- switch (str.values[pos].op)
+ switch (opstr.op)
{
case OpCode_callsubr:
- collect_subr_refs_in_subr (str, pos,
- str.values[pos].subr_num, *param.parsed_local_subrs,
+ collect_subr_refs_in_subr (opstr.subr_num, *param.parsed_local_subrs,
param.local_closure, param);
break;
case OpCode_callgsubr:
- collect_subr_refs_in_subr (str, pos,
- str.values[pos].subr_num, *param.parsed_global_subrs,
+ collect_subr_refs_in_subr (opstr.subr_num, *param.parsed_global_subrs,
param.global_closure, param);
break;
@@ -853,42 +955,113 @@ struct subr_subsetter_t
bool encode_str (const parsed_cs_str_t &str, const unsigned int fd, str_buff_t &buff) const
{
- unsigned count = str.get_count ();
str_encoder_t encoder (buff);
encoder.reset ();
- buff.alloc (count * 3);
+ bool hinting = !(plan->flags & HB_SUBSET_FLAGS_NO_HINTING);
/* if a prefix (CFF1 width or CFF2 vsindex) has been removed along with hints,
* re-insert it at the beginning of charstreing */
- if (str.has_prefix () && str.is_hint_dropped ())
+ if (str.has_prefix () && !hinting && str.is_hint_dropped ())
{
encoder.encode_num (str.prefix_num ());
if (str.prefix_op () != OpCode_Invalid)
encoder.encode_op (str.prefix_op ());
}
- for (unsigned int i = 0; i < count; i++)
+
+ unsigned size = 0;
+ for (auto &opstr : str.values)
{
- const parsed_cs_op_t &opstr = str.values[i];
- if (!opstr.for_drop () && !opstr.for_skip ())
+ size += opstr.length;
+ if (opstr.op == OpCode_callsubr || opstr.op == OpCode_callgsubr)
+ size += 3;
+ }
+ if (!buff.alloc (buff.length + size))
+ return false;
+
+ for (auto &opstr : str.values)
+ {
+ if (hinting || !opstr.is_hinting ())
{
switch (opstr.op)
{
case OpCode_callsubr:
encoder.encode_int (remaps.local_remaps[fd].biased_num (opstr.subr_num));
- encoder.encode_op (OpCode_callsubr);
+ encoder.copy_str (opstr.ptr, opstr.length);
break;
case OpCode_callgsubr:
encoder.encode_int (remaps.global_remap.biased_num (opstr.subr_num));
- encoder.encode_op (OpCode_callgsubr);
+ encoder.copy_str (opstr.ptr, opstr.length);
break;
default:
- encoder.copy_str (opstr.str);
+ encoder.copy_str (opstr.ptr, opstr.length);
break;
}
}
}
- return !encoder.is_error ();
+ return !encoder.in_error ();
+ }
+
+ void compact_parsed_strings () const
+ {
+ for (auto &cs : parsed_charstrings)
+ compact_string (cs);
+ for (auto &cs : parsed_global_subrs_storage)
+ compact_string (cs);
+ for (auto &vec : parsed_local_subrs_storage)
+ for (auto &cs : vec)
+ compact_string (cs);
+ }
+
+ static void compact_string (parsed_cs_str_t &str)
+ {
+ unsigned count = str.values.length;
+ if (unlikely (!count)) return;
+ auto &opstr = str.values.arrayZ;
+ unsigned j = 0;
+ for (unsigned i = 1; i < count; i++)
+ {
+ /* See if we can combine op j and op i. */
+ bool combine =
+ (opstr[j].op != OpCode_callsubr && opstr[j].op != OpCode_callgsubr) &&
+ (opstr[i].op != OpCode_callsubr && opstr[i].op != OpCode_callgsubr) &&
+ (opstr[j].is_hinting () == opstr[i].is_hinting ()) &&
+ (opstr[j].ptr + opstr[j].length == opstr[i].ptr) &&
+ (opstr[j].length + opstr[i].length <= 255);
+
+ if (combine)
+ {
+ opstr[j].length += opstr[i].length;
+ opstr[j].op = OpCode_Invalid;
+ }
+ else
+ {
+ opstr[++j] = opstr[i];
+ }
+ }
+ str.values.shrink (j + 1);
+ }
+
+ void populate_subset_accelerator () const
+ {
+ if (!plan->inprogress_accelerator) return;
+
+ compact_parsed_strings ();
+
+ plan->inprogress_accelerator->cff_accelerator =
+ cff_subset_accelerator_t::create(acc.blob,
+ parsed_charstrings,
+ parsed_global_subrs_storage,
+ parsed_local_subrs_storage);
+ plan->inprogress_accelerator->destroy_cff_accelerator =
+ cff_subset_accelerator_t::destroy;
+
+ }
+
+ const parsed_cs_str_t& get_parsed_charstring (unsigned i) const
+ {
+ if (cached_charstrings) return *(cached_charstrings[i]);
+ return parsed_charstrings[i];
}
protected:
@@ -897,13 +1070,17 @@ struct subr_subsetter_t
subr_closures_t closures;
- parsed_cs_str_vec_t parsed_charstrings;
- parsed_cs_str_vec_t parsed_global_subrs;
- hb_vector_t<parsed_cs_str_vec_t> parsed_local_subrs;
+ hb_vector_t<const parsed_cs_str_t*> cached_charstrings;
+ const parsed_cs_str_vec_t* parsed_global_subrs;
+ const hb_vector_t<parsed_cs_str_vec_t>* parsed_local_subrs;
subr_remaps_t remaps;
private:
+
+ parsed_cs_str_vec_t parsed_charstrings;
+ parsed_cs_str_vec_t parsed_global_subrs_storage;
+ hb_vector_t<parsed_cs_str_vec_t> parsed_local_subrs_storage;
typedef typename SUBRS::count_type subr_count_type;
};
diff --git a/thirdparty/harfbuzz/src/hb-subset-cff1.cc b/thirdparty/harfbuzz/src/hb-subset-cff1.cc
index 52bb13d320..538f28f5aa 100644
--- a/thirdparty/harfbuzz/src/hb-subset-cff1.cc
+++ b/thirdparty/harfbuzz/src/hb-subset-cff1.cc
@@ -167,9 +167,10 @@ struct cff1_top_dict_op_serializer_t : cff_top_dict_op_serializer_t<cff1_top_dic
* for supplement, the original byte string is copied along with the op code */
op_str_t supp_op;
supp_op.op = op;
- if ( unlikely (!(opstr.str.length >= opstr.last_arg_offset + 3)))
+ if ( unlikely (!(opstr.length >= opstr.last_arg_offset + 3)))
return_trace (false);
- supp_op.str = hb_ubytes_t (&opstr.str + opstr.last_arg_offset, opstr.str.length - opstr.last_arg_offset);
+ supp_op.ptr = opstr.ptr + opstr.last_arg_offset;
+ supp_op.length = opstr.length - opstr.last_arg_offset;
return_trace (UnsizedByteStr::serialize_int2 (c, mod.nameSIDs[name_dict_values_t::registry]) &&
UnsizedByteStr::serialize_int2 (c, mod.nameSIDs[name_dict_values_t::ordering]) &&
copy_opstr (c, supp_op));
@@ -442,8 +443,17 @@ struct cff_subset_plan {
return;
}
- bool use_glyph_to_sid_map = plan->num_output_glyphs () > plan->source->get_num_glyphs () / 8.;
- hb_map_t *glyph_to_sid_map = use_glyph_to_sid_map ? acc.create_glyph_to_sid_map () : nullptr;
+ hb_map_t *glyph_to_sid_map = (plan->accelerator && plan->accelerator->cff_accelerator) ?
+ plan->accelerator->cff_accelerator->glyph_to_sid_map :
+ nullptr;
+ bool created_map = false;
+ if (!glyph_to_sid_map &&
+ ((plan->accelerator && plan->accelerator->cff_accelerator) ||
+ plan->num_output_glyphs () > plan->source->get_num_glyphs () / 8.))
+ {
+ created_map = true;
+ glyph_to_sid_map = acc.create_glyph_to_sid_map ();
+ }
unsigned int glyph;
for (glyph = 1; glyph < plan->num_output_glyphs (); glyph++)
@@ -467,8 +477,12 @@ struct cff_subset_plan {
last_sid = sid;
}
- if (glyph_to_sid_map)
- hb_map_destroy (glyph_to_sid_map);
+ if (created_map)
+ {
+ if (!(plan->accelerator && plan->accelerator->cff_accelerator) ||
+ !plan->accelerator->cff_accelerator->glyph_to_sid_map.cmpexch (nullptr, glyph_to_sid_map))
+ hb_map_destroy (glyph_to_sid_map);
+ }
bool two_byte = subset_charset_ranges.complete (glyph);
@@ -728,11 +742,17 @@ static bool _serialize_cff1 (hb_serialize_context_t *c,
/* CharStrings */
{
+ c->push<CFF1CharStrings> ();
+
+ unsigned total_size = CFF1CharStrings::total_size (plan.subset_charstrings);
+ if (unlikely (!c->start_zerocopy (total_size)))
+ return false;
+
CFF1CharStrings *cs = c->start_embed<CFF1CharStrings> ();
if (unlikely (!cs)) return false;
- c->push ();
+
if (likely (cs->serialize (c, plan.subset_charstrings)))
- plan.info.char_strings_link = c->pop_pack ();
+ plan.info.char_strings_link = c->pop_pack (false);
else
{
c->pop_discard ();
@@ -815,7 +835,7 @@ static bool _serialize_cff1 (hb_serialize_context_t *c,
CFF1Subrs *dest = c->start_embed <CFF1Subrs> ();
if (unlikely (!dest)) return false;
if (likely (dest->serialize (c, plan.subset_globalsubrs)))
- c->pop_pack ();
+ c->pop_pack (false);
else
{
c->pop_discard ();
diff --git a/thirdparty/harfbuzz/src/hb-subset-cff2.cc b/thirdparty/harfbuzz/src/hb-subset-cff2.cc
index 08e820efcf..60946b0281 100644
--- a/thirdparty/harfbuzz/src/hb-subset-cff2.cc
+++ b/thirdparty/harfbuzz/src/hb-subset-cff2.cc
@@ -334,7 +334,7 @@ static bool _serialize_cff2 (hb_serialize_context_t *c,
if (unlikely (!dest)) return false;
c->push ();
if (likely (dest->serialize (c, plan.subset_localsubrs[i])))
- subrs_link = c->pop_pack ();
+ subrs_link = c->pop_pack (false);
else
{
c->pop_discard ();
@@ -361,11 +361,17 @@ static bool _serialize_cff2 (hb_serialize_context_t *c,
/* CharStrings */
{
+ c->push ();
+
+ unsigned total_size = CFF2CharStrings::total_size (plan.subset_charstrings);
+ if (unlikely (!c->start_zerocopy (total_size)))
+ return false;
+
CFF2CharStrings *cs = c->start_embed<CFF2CharStrings> ();
if (unlikely (!cs)) return false;
- c->push ();
+
if (likely (cs->serialize (c, plan.subset_charstrings)))
- plan.info.char_strings_link = c->pop_pack ();
+ plan.info.char_strings_link = c->pop_pack (false);
else
{
c->pop_discard ();
@@ -377,9 +383,10 @@ static bool _serialize_cff2 (hb_serialize_context_t *c,
if (acc.fdSelect != &Null (CFF2FDSelect))
{
c->push ();
- if (likely (hb_serialize_cff_fdselect (c, num_glyphs, *(const FDSelect *)acc.fdSelect, plan.orig_fdcount,
- plan.subset_fdselect_format, plan.subset_fdselect_size,
- plan.subset_fdselect_ranges)))
+ if (likely (hb_serialize_cff_fdselect (c, num_glyphs, *(const FDSelect *)acc.fdSelect,
+ plan.orig_fdcount,
+ plan.subset_fdselect_format, plan.subset_fdselect_size,
+ plan.subset_fdselect_ranges)))
plan.info.fd_select.link = c->pop_pack ();
else
{
@@ -401,7 +408,7 @@ static bool _serialize_cff2 (hb_serialize_context_t *c,
hb_iter (private_dict_infos))
;
if (unlikely (!fda->serialize (c, it, fontSzr))) return false;
- plan.info.fd_array_link = c->pop_pack ();
+ plan.info.fd_array_link = c->pop_pack (false);
}
/* variation store */
@@ -410,7 +417,7 @@ static bool _serialize_cff2 (hb_serialize_context_t *c,
c->push ();
CFF2VariationStore *dest = c->start_embed<CFF2VariationStore> ();
if (unlikely (!dest || !dest->serialize (c, acc.varStore))) return false;
- plan.info.var_store_link = c->pop_pack ();
+ plan.info.var_store_link = c->pop_pack (false);
}
OT::cff2 *cff2 = c->allocate_min<OT::cff2> ();
diff --git a/thirdparty/harfbuzz/src/hb-subset-input.cc b/thirdparty/harfbuzz/src/hb-subset-input.cc
index 14ae210d49..42434ae0ef 100644
--- a/thirdparty/harfbuzz/src/hb-subset-input.cc
+++ b/thirdparty/harfbuzz/src/hb-subset-input.cc
@@ -26,7 +26,7 @@
#include "hb-subset.hh"
#include "hb-set.hh"
-
+#include "hb-utf.hh"
/**
* hb_subset_input_create_or_fail:
*
@@ -49,8 +49,15 @@ hb_subset_input_create_or_fail (void)
set = hb_set_create ();
input->axes_location = hb_hashmap_create<hb_tag_t, float> ();
-
- if (!input->axes_location || input->in_error ())
+#ifdef HB_EXPERIMENTAL_API
+ input->name_table_overrides = hb_hashmap_create<hb_ot_name_record_ids_t, hb_bytes_t> ();
+#endif
+
+ if (!input->axes_location ||
+#ifdef HB_EXPERIMENTAL_API
+ !input->name_table_overrides ||
+#endif
+ input->in_error ())
{
hb_subset_input_destroy (input);
return nullptr;
@@ -89,7 +96,6 @@ hb_subset_input_create_or_fail (void)
hb_tag_t default_no_subset_tables[] = {
HB_TAG ('a', 'v', 'a', 'r'),
- HB_TAG ('f', 'v', 'a', 'r'),
HB_TAG ('g', 'a', 's', 'p'),
HB_TAG ('c', 'v', 't', ' '),
HB_TAG ('f', 'p', 'g', 'm'),
@@ -249,6 +255,15 @@ hb_subset_input_destroy (hb_subset_input_t *input)
hb_hashmap_destroy (input->axes_location);
+#ifdef HB_EXPERIMENTAL_API
+ if (input->name_table_overrides)
+ {
+ for (auto _ : *input->name_table_overrides)
+ _.second.fini ();
+ }
+ hb_hashmap_destroy (input->name_table_overrides);
+#endif
+
hb_free (input);
}
@@ -380,7 +395,6 @@ hb_subset_input_get_user_data (const hb_subset_input_t *input,
return hb_object_get_user_data (input, key);
}
-#ifdef HB_EXPERIMENTAL_API
#ifndef HB_NO_VAR
/**
* hb_subset_input_pin_axis_to_default: (skip)
@@ -389,11 +403,14 @@ hb_subset_input_get_user_data (const hb_subset_input_t *input,
*
* Pin an axis to its default location in the given subset input object.
*
+ * Currently only works for fonts with 'glyf' tables. CFF and CFF2 is not
+ * yet supported. Additionally all axes in a font must be pinned.
+ *
* Return value: `true` if success, `false` otherwise
*
- * Since: EXPERIMENTAL
+ * Since: 6.0.0
**/
-hb_bool_t
+HB_EXTERN hb_bool_t
hb_subset_input_pin_axis_to_default (hb_subset_input_t *input,
hb_face_t *face,
hb_tag_t axis_tag)
@@ -413,11 +430,14 @@ hb_subset_input_pin_axis_to_default (hb_subset_input_t *input,
*
* Pin an axis to a fixed location in the given subset input object.
*
+ * Currently only works for fonts with 'glyf' tables. CFF and CFF2 is not
+ * yet supported. Additionally all axes in a font must be pinned.
+ *
* Return value: `true` if success, `false` otherwise
*
- * Since: EXPERIMENTAL
+ * Since: 6.0.0
**/
-hb_bool_t
+HB_EXTERN hb_bool_t
hb_subset_input_pin_axis_location (hb_subset_input_t *input,
hb_face_t *face,
hb_tag_t axis_tag,
@@ -431,4 +451,150 @@ hb_subset_input_pin_axis_location (hb_subset_input_t *input,
return input->axes_location->set (axis_tag, val);
}
#endif
+
+/**
+ * hb_subset_preprocess:
+ * @source: a #hb_face_t object.
+ *
+ * Preprocesses the face and attaches data that will be needed by the
+ * subsetter. Future subsetting operations can then use the precomputed data
+ * to speed up the subsetting operation.
+ *
+ * See [subset-preprocessing](https://github.com/harfbuzz/harfbuzz/blob/main/docs/subset-preprocessing.md)
+ * for more information.
+ *
+ * Note: the preprocessed face may contain sub-blobs that reference the memory
+ * backing the source #hb_face_t. Therefore in the case that this memory is not
+ * owned by the source face you will need to ensure that memory lives
+ * as long as the returned #hb_face_t.
+ *
+ * Returns: a new #hb_face_t.
+ *
+ * Since: 6.0.0
+ **/
+
+HB_EXTERN hb_face_t *
+hb_subset_preprocess (hb_face_t *source)
+{
+ hb_subset_input_t* input = hb_subset_input_create_or_fail ();
+ if (!input)
+ return source;
+
+ hb_set_clear (hb_subset_input_set(input, HB_SUBSET_SETS_UNICODE));
+ hb_set_invert (hb_subset_input_set(input, HB_SUBSET_SETS_UNICODE));
+
+ hb_set_clear (hb_subset_input_set(input, HB_SUBSET_SETS_GLYPH_INDEX));
+ hb_set_invert (hb_subset_input_set(input, HB_SUBSET_SETS_GLYPH_INDEX));
+
+ hb_set_clear (hb_subset_input_set(input,
+ HB_SUBSET_SETS_LAYOUT_FEATURE_TAG));
+ hb_set_invert (hb_subset_input_set(input,
+ HB_SUBSET_SETS_LAYOUT_FEATURE_TAG));
+
+ hb_set_clear (hb_subset_input_set(input,
+ HB_SUBSET_SETS_LAYOUT_SCRIPT_TAG));
+ hb_set_invert (hb_subset_input_set(input,
+ HB_SUBSET_SETS_LAYOUT_SCRIPT_TAG));
+
+ hb_set_clear (hb_subset_input_set(input,
+ HB_SUBSET_SETS_NAME_ID));
+ hb_set_invert (hb_subset_input_set(input,
+ HB_SUBSET_SETS_NAME_ID));
+
+ hb_set_clear (hb_subset_input_set(input,
+ HB_SUBSET_SETS_NAME_LANG_ID));
+ hb_set_invert (hb_subset_input_set(input,
+ HB_SUBSET_SETS_NAME_LANG_ID));
+
+ hb_subset_input_set_flags(input,
+ HB_SUBSET_FLAGS_NOTDEF_OUTLINE |
+ HB_SUBSET_FLAGS_GLYPH_NAMES |
+ HB_SUBSET_FLAGS_RETAIN_GIDS |
+ HB_SUBSET_FLAGS_NO_PRUNE_UNICODE_RANGES);
+ input->attach_accelerator_data = true;
+
+ // Always use long loca in the preprocessed version. This allows
+ // us to store the glyph bytes unpadded which allows the future subset
+ // operation to run faster by skipping the trim padding step.
+ input->force_long_loca = true;
+
+ hb_face_t* new_source = hb_subset_or_fail (source, input);
+ hb_subset_input_destroy (input);
+
+ if (!new_source) {
+ DEBUG_MSG (SUBSET, nullptr, "Preprocessing failed due to subset failure.");
+ return source;
+ }
+
+ return new_source;
+}
+
+#ifdef HB_EXPERIMENTAL_API
+/**
+ * hb_subset_input_override_name_table:
+ * @input: a #hb_subset_input_t object.
+ * @name_id: name_id of a nameRecord
+ * @platform_id: platform ID of a nameRecord
+ * @encoding_id: encoding ID of a nameRecord
+ * @language_id: language ID of a nameRecord
+ * @name_str: pointer to name string new value or null to indicate should remove
+ * @str_len: the size of @name_str, or -1 if it is `NULL`-terminated
+ *
+ * Override the name string of the NameRecord identified by name_id,
+ * platform_id, encoding_id and language_id. If a record with that name_id
+ * doesn't exist, create it and insert to the name table.
+ *
+ * Note: for mac platform, we only support name_str with all ascii characters,
+ * name_str with non-ascii characters will be ignored.
+ *
+ * Since: EXPERIMENTAL
+ **/
+HB_EXTERN hb_bool_t
+hb_subset_input_override_name_table (hb_subset_input_t *input,
+ hb_ot_name_id_t name_id,
+ unsigned platform_id,
+ unsigned encoding_id,
+ unsigned language_id,
+ const char *name_str,
+ int str_len /* -1 means nul-terminated */)
+{
+ if (!name_str)
+ {
+ str_len = 0;
+ }
+ else if (str_len == -1)
+ {
+ str_len = strlen (name_str);
+ }
+
+ hb_bytes_t name_bytes (nullptr, 0);
+ if (str_len)
+ {
+ if (platform_id == 1)
+ {
+ const uint8_t *src = reinterpret_cast<const uint8_t*> (name_str);
+ const uint8_t *src_end = src + str_len;
+
+ hb_codepoint_t unicode;
+ const hb_codepoint_t replacement = HB_BUFFER_REPLACEMENT_CODEPOINT_DEFAULT;
+ while (src < src_end)
+ {
+ src = hb_utf8_t::next (src, src_end, &unicode, replacement);
+ if (unicode >= 0x0080u)
+ {
+ printf ("Non-ascii character detected, ignored...This API supports acsii characters only for mac platform\n");
+ return false;
+ }
+ }
+ }
+ char *override_name = (char *) hb_malloc (str_len);
+ if (unlikely (!override_name)) return false;
+
+ hb_memcpy (override_name, name_str, str_len);
+ name_bytes = hb_bytes_t (override_name, str_len);
+ }
+ input->name_table_overrides->set (hb_ot_name_record_ids_t (platform_id, encoding_id, language_id, name_id), name_bytes);
+ return true;
+}
+
#endif
diff --git a/thirdparty/harfbuzz/src/hb-subset-input.hh b/thirdparty/harfbuzz/src/hb-subset-input.hh
index 2335f0634f..ecd47b7abf 100644
--- a/thirdparty/harfbuzz/src/hb-subset-input.hh
+++ b/thirdparty/harfbuzz/src/hb-subset-input.hh
@@ -36,6 +36,48 @@
#include "hb-font.hh"
+struct hb_ot_name_record_ids_t
+{
+ hb_ot_name_record_ids_t () = default;
+ hb_ot_name_record_ids_t (unsigned platform_id_,
+ unsigned encoding_id_,
+ unsigned language_id_,
+ unsigned name_id_)
+ :platform_id (platform_id_),
+ encoding_id (encoding_id_),
+ language_id (language_id_),
+ name_id (name_id_) {}
+
+ bool operator != (const hb_ot_name_record_ids_t o) const
+ { return !(*this == o); }
+
+ inline bool operator == (const hb_ot_name_record_ids_t& o) const
+ {
+ return platform_id == o.platform_id &&
+ encoding_id == o.encoding_id &&
+ language_id == o.language_id &&
+ name_id == o.name_id;
+ }
+
+ inline uint32_t hash () const
+ {
+ uint32_t current = 0;
+ current = current * 31 + hb_hash (platform_id);
+ current = current * 31 + hb_hash (encoding_id);
+ current = current * 31 + hb_hash (language_id);
+ current = current * 31 + hb_hash (name_id);
+ return current;
+ }
+
+ unsigned platform_id;
+ unsigned encoding_id;
+ unsigned language_id;
+ unsigned name_id;
+};
+
+typedef struct hb_ot_name_record_ids_t hb_ot_name_record_ids_t;
+
+
HB_MARK_AS_FLAG_T (hb_subset_flags_t);
struct hb_subset_input_t
@@ -59,7 +101,15 @@ struct hb_subset_input_t
};
unsigned flags;
+ bool attach_accelerator_data = false;
+
+ // If set loca format will always be the long version.
+ bool force_long_loca = false;
+
hb_hashmap_t<hb_tag_t, float> *axes_location;
+#ifdef HB_EXPERIMENTAL_API
+ hb_hashmap_t<hb_ot_name_record_ids_t, hb_bytes_t> *name_table_overrides;
+#endif
inline unsigned num_sets () const
{
@@ -79,7 +129,11 @@ struct hb_subset_input_t
return true;
}
- return axes_location->in_error ();
+ return axes_location->in_error ()
+#ifdef HB_EXPERIMENTAL_API
+ || name_table_overrides->in_error ()
+#endif
+ ;
}
};
diff --git a/thirdparty/harfbuzz/src/hb-subset-plan.cc b/thirdparty/harfbuzz/src/hb-subset-plan.cc
index 079ab8bf99..71bc908ffb 100644
--- a/thirdparty/harfbuzz/src/hb-subset-plan.cc
+++ b/thirdparty/harfbuzz/src/hb-subset-plan.cc
@@ -25,7 +25,9 @@
*/
#include "hb-subset-plan.hh"
+#include "hb-subset-accelerator.hh"
#include "hb-map.hh"
+#include "hb-multimap.hh"
#include "hb-set.hh"
#include "hb-ot-cmap-table.hh"
@@ -46,7 +48,7 @@ using OT::Layout::GPOS;
typedef hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> script_langsys_map;
#ifndef HB_NO_SUBSET_CFF
-static inline void
+static inline bool
_add_cff_seac_components (const OT::cff1::accelerator_t &cff,
hb_codepoint_t gid,
hb_set_t *gids_to_retain)
@@ -56,7 +58,9 @@ _add_cff_seac_components (const OT::cff1::accelerator_t &cff,
{
gids_to_retain->add (base_gid);
gids_to_retain->add (accent_gid);
+ return true;
}
+ return false;
}
#endif
@@ -81,10 +85,8 @@ static void
_remap_indexes (const hb_set_t *indexes,
hb_map_t *mapping /* OUT */)
{
- unsigned count = indexes->get_population ();
-
- for (auto _ : + hb_zip (indexes->iter (), hb_range (count)))
- mapping->set (_.first, _.second);
+ for (auto _ : + hb_enumerate (indexes->iter ()))
+ mapping->set (_.second, _.first);
}
@@ -129,7 +131,9 @@ template <typename T>
static void _collect_layout_indices (hb_subset_plan_t *plan,
const T& table,
hb_set_t *lookup_indices, /* OUT */
- hb_set_t *feature_indices /* OUT */)
+ hb_set_t *feature_indices, /* OUT */
+ hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map, /* OUT */
+ hb_hashmap_t<unsigned, const OT::Feature*> *feature_substitutes_map /* OUT */)
{
unsigned num_features = table.get_feature_count ();
hb_vector_t<hb_tag_t> features;
@@ -154,16 +158,37 @@ static void _collect_layout_indices (hb_subset_plan_t *plan,
retain_all_features ? nullptr : features.arrayZ,
feature_indices);
+#ifndef HB_NO_VAR
+ // collect feature substitutes with variations
+ if (!plan->user_axes_location->is_empty ())
+ {
+ hb_hashmap_t<hb::shared_ptr<hb_map_t>, unsigned> conditionset_map;
+ OT::hb_collect_feature_substitutes_with_var_context_t c =
+ {
+ plan->axes_old_index_tag_map,
+ plan->axes_location,
+ feature_record_cond_idx_map,
+ feature_substitutes_map,
+ feature_indices,
+ true,
+ 0,
+ &conditionset_map
+ };
+ table.collect_feature_substitutes_with_variations (&c);
+ }
+#endif
+
for (unsigned feature_index : *feature_indices)
{
- //TODO: replace HB_OT_LAYOUT_NO_VARIATIONS_INDEX with variation_index for
- //instancing
- const OT::Feature &f = table.get_feature_variation (feature_index, HB_OT_LAYOUT_NO_VARIATIONS_INDEX);
- f.add_lookup_indexes_to (lookup_indices);
+ const OT::Feature* f = &(table.get_feature (feature_index));
+ const OT::Feature **p = nullptr;
+ if (feature_substitutes_map->has (feature_index, &p))
+ f = *p;
+
+ f->add_lookup_indexes_to (lookup_indices);
}
- //TODO: update for instancing: only collect lookups from feature_indexes that have no variations
- table.feature_variation_collect_lookups (feature_indices, lookup_indices);
+ table.feature_variation_collect_lookups (feature_indices, feature_substitutes_map, lookup_indices);
}
@@ -171,6 +196,7 @@ static inline void
_GSUBGPOS_find_duplicate_features (const OT::GSUBGPOS &g,
const hb_map_t *lookup_indices,
const hb_set_t *feature_indices,
+ const hb_hashmap_t<unsigned, const OT::Feature*> *feature_substitutes_map,
hb_map_t *duplicate_feature_map /* OUT */)
{
if (feature_indices->is_empty ()) return;
@@ -195,16 +221,22 @@ _GSUBGPOS_find_duplicate_features (const OT::GSUBGPOS &g,
hb_set_t* same_tag_features = unique_features.get (t);
for (unsigned other_f_index : same_tag_features->iter ())
{
- const OT::Feature& f = g.get_feature (i);
- const OT::Feature& other_f = g.get_feature (other_f_index);
+ const OT::Feature* f = &(g.get_feature (i));
+ const OT::Feature **p = nullptr;
+ if (feature_substitutes_map->has (i, &p))
+ f = *p;
+
+ const OT::Feature* other_f = &(g.get_feature (other_f_index));
+ if (feature_substitutes_map->has (other_f_index, &p))
+ f = *p;
auto f_iter =
- + hb_iter (f.lookupIndex)
+ + hb_iter (f->lookupIndex)
| hb_filter (lookup_indices)
;
auto other_f_iter =
- + hb_iter (other_f.lookupIndex)
+ + hb_iter (other_f->lookupIndex)
| hb_filter (lookup_indices)
;
@@ -237,7 +269,9 @@ _closure_glyphs_lookups_features (hb_subset_plan_t *plan,
hb_set_t *gids_to_retain,
hb_map_t *lookups,
hb_map_t *features,
- script_langsys_map *langsys_map)
+ script_langsys_map *langsys_map,
+ hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map,
+ hb_hashmap_t<unsigned, const OT::Feature*> *feature_substitutes_map)
{
hb_blob_ptr_t<T> table = plan->source_table<T> ();
hb_tag_t table_tag = table->tableTag;
@@ -245,7 +279,9 @@ _closure_glyphs_lookups_features (hb_subset_plan_t *plan,
_collect_layout_indices<T> (plan,
*table,
&lookup_indices,
- &feature_indices);
+ &feature_indices,
+ feature_record_cond_idx_map,
+ feature_substitutes_map);
if (table_tag == HB_OT_TAG_GSUB)
hb_ot_layout_lookups_substitute_closure (plan->source,
@@ -257,9 +293,12 @@ _closure_glyphs_lookups_features (hb_subset_plan_t *plan,
_remap_indexes (&lookup_indices, lookups);
// prune features
- table->prune_features (lookups, &feature_indices);
+ table->prune_features (lookups,
+ plan->user_axes_location->is_empty () ? nullptr : feature_record_cond_idx_map,
+ feature_substitutes_map,
+ &feature_indices);
hb_map_t duplicate_feature_map;
- _GSUBGPOS_find_duplicate_features (*table, lookups, &feature_indices, &duplicate_feature_map);
+ _GSUBGPOS_find_duplicate_features (*table, lookups, &feature_indices, feature_substitutes_map, &duplicate_feature_map);
feature_indices.clear ();
table->prune_langsys (&duplicate_feature_map, plan->layout_scripts, langsys_map, &feature_indices);
@@ -306,7 +345,9 @@ _get_hb_font_with_variations (const hb_subset_plan_t *plan)
vars.push (var);
}
+#ifndef HB_NO_VAR
hb_font_set_variations (font, vars.arrayZ, plan->user_axes_location->get_population ());
+#endif
return font;
}
@@ -419,46 +460,108 @@ _populate_unicodes_to_retain (const hb_set_t *unicodes,
hb_subset_plan_t *plan)
{
OT::cmap::accelerator_t cmap (plan->source);
-
unsigned size_threshold = plan->source->get_num_glyphs ();
if (glyphs->is_empty () && unicodes->get_population () < size_threshold)
{
+
+ const hb_map_t* unicode_to_gid = nullptr;
+ if (plan->accelerator)
+ unicode_to_gid = &plan->accelerator->unicode_to_gid;
+
// This is approach to collection is faster, but can only be used if glyphs
// are not being explicitly added to the subset and the input unicodes set is
// not excessively large (eg. an inverted set).
plan->unicode_to_new_gid_list.alloc (unicodes->get_population ());
- for (hb_codepoint_t cp : *unicodes)
- {
- hb_codepoint_t gid;
- if (!cmap.get_nominal_glyph (cp, &gid))
+ if (!unicode_to_gid) {
+ for (hb_codepoint_t cp : *unicodes)
{
- DEBUG_MSG(SUBSET, nullptr, "Drop U+%04X; no gid", cp);
- continue;
+ hb_codepoint_t gid;
+ if (!cmap.get_nominal_glyph (cp, &gid))
+ {
+ DEBUG_MSG(SUBSET, nullptr, "Drop U+%04X; no gid", cp);
+ continue;
+ }
+
+ plan->codepoint_to_glyph->set (cp, gid);
+ plan->unicode_to_new_gid_list.push (hb_pair (cp, gid));
+ }
+ } else {
+ // Use in memory unicode to gid map it's faster then looking up from
+ // the map. This code is mostly duplicated from above to avoid doing
+ // conditionals on the presence of the unicode_to_gid map each
+ // iteration.
+ for (hb_codepoint_t cp : *unicodes)
+ {
+ hb_codepoint_t gid = unicode_to_gid->get (cp);
+ if (gid == HB_MAP_VALUE_INVALID)
+ {
+ DEBUG_MSG(SUBSET, nullptr, "Drop U+%04X; no gid", cp);
+ continue;
+ }
+
+ plan->codepoint_to_glyph->set (cp, gid);
+ plan->unicode_to_new_gid_list.push (hb_pair (cp, gid));
}
-
- plan->codepoint_to_glyph->set (cp, gid);
- plan->unicode_to_new_gid_list.push (hb_pair (cp, gid));
}
}
else
{
// This approach is slower, but can handle adding in glyphs to the subset and will match
// them with cmap entries.
- hb_map_t unicode_glyphid_map;
- hb_set_t cmap_unicodes;
- cmap.collect_mapping (&cmap_unicodes, &unicode_glyphid_map);
- plan->unicode_to_new_gid_list.alloc (hb_min(unicodes->get_population ()
- + glyphs->get_population (),
- cmap_unicodes.get_population ()));
-
- for (hb_codepoint_t cp : cmap_unicodes)
+
+ hb_map_t unicode_glyphid_map_storage;
+ hb_set_t cmap_unicodes_storage;
+ const hb_map_t* unicode_glyphid_map = &unicode_glyphid_map_storage;
+ const hb_set_t* cmap_unicodes = &cmap_unicodes_storage;
+
+ if (!plan->accelerator) {
+ cmap.collect_mapping (&cmap_unicodes_storage, &unicode_glyphid_map_storage);
+ plan->unicode_to_new_gid_list.alloc (hb_min(unicodes->get_population ()
+ + glyphs->get_population (),
+ cmap_unicodes->get_population ()));
+ } else {
+ unicode_glyphid_map = &plan->accelerator->unicode_to_gid;
+ cmap_unicodes = &plan->accelerator->unicodes;
+ }
+
+ if (plan->accelerator &&
+ unicodes->get_population () < cmap_unicodes->get_population () &&
+ glyphs->get_population () < cmap_unicodes->get_population ())
+ {
+ auto &gid_to_unicodes = plan->accelerator->gid_to_unicodes;
+ for (hb_codepoint_t gid : *glyphs)
+ {
+ auto unicodes = gid_to_unicodes.get (gid);
+
+ for (hb_codepoint_t cp : unicodes)
+ {
+ plan->codepoint_to_glyph->set (cp, gid);
+ plan->unicode_to_new_gid_list.push (hb_pair (cp, gid));
+ }
+ }
+ for (hb_codepoint_t cp : *unicodes)
+ {
+ /* Don't double-add entry. */
+ if (plan->codepoint_to_glyph->has (cp))
+ continue;
+
+ hb_codepoint_t gid = (*unicode_glyphid_map)[cp];
+ plan->codepoint_to_glyph->set (cp, gid);
+ plan->unicode_to_new_gid_list.push (hb_pair (cp, gid));
+ }
+ plan->unicode_to_new_gid_list.qsort ();
+ }
+ else
{
- hb_codepoint_t gid = unicode_glyphid_map[cp];
- if (!unicodes->has (cp) && !glyphs->has (gid))
- continue;
+ for (hb_codepoint_t cp : *cmap_unicodes)
+ {
+ hb_codepoint_t gid = (*unicode_glyphid_map)[cp];
+ if (!unicodes->has (cp) && !glyphs->has (gid))
+ continue;
- plan->codepoint_to_glyph->set (cp, gid);
- plan->unicode_to_new_gid_list.push (hb_pair (cp, gid));
+ plan->codepoint_to_glyph->set (cp, gid);
+ plan->unicode_to_new_gid_list.push (hb_pair (cp, gid));
+ }
}
/* Add gids which where requested, but not mapped in cmap */
@@ -509,9 +612,7 @@ _glyf_add_gid_and_children (const OT::glyf_accelerator_t &glyf,
static void
_populate_gids_to_retain (hb_subset_plan_t* plan,
- bool close_over_gsub,
- bool close_over_gpos,
- bool close_over_gdef)
+ hb_set_t* drop_tables)
{
OT::glyf_accelerator_t glyf (plan->source);
#ifndef HB_NO_SUBSET_CFF
@@ -523,32 +624,42 @@ _populate_gids_to_retain (hb_subset_plan_t* plan,
_cmap_closure (plan->source, plan->unicodes, plan->_glyphset_gsub);
#ifndef HB_NO_SUBSET_LAYOUT
- if (close_over_gsub)
+ if (!drop_tables->has (HB_OT_TAG_GSUB))
// closure all glyphs/lookups/features needed for GSUB substitutions.
_closure_glyphs_lookups_features<GSUB> (
plan,
plan->_glyphset_gsub,
plan->gsub_lookups,
plan->gsub_features,
- plan->gsub_langsys);
+ plan->gsub_langsys,
+ plan->gsub_feature_record_cond_idx_map,
+ plan->gsub_feature_substitutes_map);
- if (close_over_gpos)
+ if (!drop_tables->has (HB_OT_TAG_GPOS))
_closure_glyphs_lookups_features<GPOS> (
plan,
plan->_glyphset_gsub,
plan->gpos_lookups,
plan->gpos_features,
- plan->gpos_langsys);
+ plan->gpos_langsys,
+ plan->gpos_feature_record_cond_idx_map,
+ plan->gpos_feature_substitutes_map);
#endif
_remove_invalid_gids (plan->_glyphset_gsub, plan->source->get_num_glyphs ());
hb_set_set (plan->_glyphset_mathed, plan->_glyphset_gsub);
- _math_closure (plan, plan->_glyphset_mathed);
- _remove_invalid_gids (plan->_glyphset_mathed, plan->source->get_num_glyphs ());
+ if (!drop_tables->has (HB_OT_TAG_MATH))
+ {
+ _math_closure (plan, plan->_glyphset_mathed);
+ _remove_invalid_gids (plan->_glyphset_mathed, plan->source->get_num_glyphs ());
+ }
hb_set_t cur_glyphset = *plan->_glyphset_mathed;
- _colr_closure (plan->source, plan->colrv1_layers, plan->colr_palettes, &cur_glyphset);
- _remove_invalid_gids (&cur_glyphset, plan->source->get_num_glyphs ());
+ if (!drop_tables->has (HB_OT_TAG_COLR))
+ {
+ _colr_closure (plan->source, plan->colrv1_layers, plan->colr_palettes, &cur_glyphset);
+ _remove_invalid_gids (&cur_glyphset, plan->source->get_num_glyphs ());
+ }
hb_set_set (plan->_glyphset_colred, &cur_glyphset);
@@ -561,16 +672,22 @@ _populate_gids_to_retain (hb_subset_plan_t* plan,
else
plan->_glyphset->union_ (cur_glyphset);
#ifndef HB_NO_SUBSET_CFF
- if (cff.is_valid ())
- for (hb_codepoint_t gid : cur_glyphset)
- _add_cff_seac_components (cff, gid, plan->_glyphset);
+ if (!plan->accelerator || plan->accelerator->has_seac)
+ {
+ bool has_seac = false;
+ if (cff.is_valid ())
+ for (hb_codepoint_t gid : cur_glyphset)
+ if (_add_cff_seac_components (cff, gid, plan->_glyphset))
+ has_seac = true;
+ plan->has_seac = has_seac;
+ }
#endif
_remove_invalid_gids (plan->_glyphset, plan->source->get_num_glyphs ());
#ifndef HB_NO_VAR
- if (close_over_gdef)
+ if (!drop_tables->has (HB_OT_TAG_GDEF))
_collect_layout_variation_indices (plan);
#endif
}
@@ -659,18 +776,22 @@ _normalize_axes_location (hb_face_t *face, hb_subset_plan_t *plan)
seg_maps = face->table.avar->get_segment_maps ();
bool axis_not_pinned = false;
- unsigned axis_count = 0;
+ unsigned old_axis_idx = 0, new_axis_idx = 0;
for (const auto& axis : axes)
{
hb_tag_t axis_tag = axis.get_axis_tag ();
+ plan->axes_old_index_tag_map->set (old_axis_idx, axis_tag);
+
if (!plan->user_axes_location->has (axis_tag))
{
axis_not_pinned = true;
+ plan->axes_index_map->set (old_axis_idx, new_axis_idx);
+ new_axis_idx++;
}
else
{
int normalized_v = axis.normalize_axis_value (plan->user_axes_location->get (axis_tag));
- if (has_avar && axis_count < face->table.avar->get_axis_count ())
+ if (has_avar && old_axis_idx < face->table.avar->get_axis_count ())
{
normalized_v = seg_maps->map (normalized_v);
}
@@ -681,7 +802,7 @@ _normalize_axes_location (hb_face_t *face, hb_subset_plan_t *plan)
if (has_avar)
seg_maps = &StructAfter<OT::SegmentMaps> (*seg_maps);
- axis_count++;
+ old_axis_idx++;
}
plan->all_axes_pinned = !axis_not_pinned;
}
@@ -741,6 +862,13 @@ hb_subset_plan_create_or_fail (hb_face_t *face,
plan->gsub_features = hb_map_create ();
plan->gpos_features = hb_map_create ();
+
+ plan->check_success (plan->gsub_feature_record_cond_idx_map = hb_hashmap_create<unsigned, hb::shared_ptr<hb_set_t>> ());
+ plan->check_success (plan->gpos_feature_record_cond_idx_map = hb_hashmap_create<unsigned, hb::shared_ptr<hb_set_t>> ());
+
+ plan->check_success (plan->gsub_feature_substitutes_map = hb_hashmap_create<unsigned, const OT::Feature*> ());
+ plan->check_success (plan->gpos_feature_substitutes_map = hb_hashmap_create<unsigned, const OT::Feature*> ());
+
plan->colrv1_layers = hb_map_create ();
plan->colr_palettes = hb_map_create ();
plan->check_success (plan->layout_variation_idx_delta_map = hb_hashmap_create<unsigned, hb_pair_t<unsigned, int>> ());
@@ -751,12 +879,40 @@ hb_subset_plan_create_or_fail (hb_face_t *face,
plan->check_success (plan->user_axes_location = hb_hashmap_create<hb_tag_t, float> ());
if (plan->user_axes_location && input->axes_location)
*plan->user_axes_location = *input->axes_location;
+ plan->check_success (plan->axes_index_map = hb_map_create ());
+ plan->check_success (plan->axes_old_index_tag_map = hb_map_create ());
plan->all_axes_pinned = false;
plan->pinned_at_default = true;
plan->check_success (plan->vmtx_map = hb_hashmap_create<unsigned, hb_pair_t<unsigned, int>> ());
plan->check_success (plan->hmtx_map = hb_hashmap_create<unsigned, hb_pair_t<unsigned, int>> ());
+#ifdef HB_EXPERIMENTAL_API
+ plan->check_success (plan->name_table_overrides = hb_hashmap_create<hb_ot_name_record_ids_t, hb_bytes_t> ());
+ if (plan->name_table_overrides && input->name_table_overrides)
+ {
+ for (auto _ : *input->name_table_overrides)
+ {
+ hb_bytes_t name_bytes = _.second;
+ unsigned len = name_bytes.length;
+ char *name_str = (char *) hb_malloc (len);
+ if (unlikely (!plan->check_success (name_str)))
+ break;
+
+ hb_memcpy (name_str, name_bytes.arrayZ, len);
+ plan->name_table_overrides->set (_.first, hb_bytes_t (name_str, len));
+ }
+ }
+#endif
+
+ void* accel = hb_face_get_user_data(face, hb_subset_accelerator_t::user_data_key());
+
+ plan->attach_accelerator_data = input->attach_accelerator_data;
+ plan->force_long_loca = input->force_long_loca;
+ if (accel)
+ plan->accelerator = (hb_subset_accelerator_t*) accel;
+
+
if (unlikely (plan->in_error ())) {
hb_subset_plan_destroy (plan);
return nullptr;
@@ -768,10 +924,7 @@ hb_subset_plan_create_or_fail (hb_face_t *face,
_populate_unicodes_to_retain (input->sets.unicodes, input->sets.glyphs, plan);
- _populate_gids_to_retain (plan,
- !input->sets.drop_tables->has (HB_OT_TAG_GSUB),
- !input->sets.drop_tables->has (HB_OT_TAG_GPOS),
- !input->sets.drop_tables->has (HB_OT_TAG_GDEF));
+ _populate_gids_to_retain (plan, input->sets.drop_tables);
_create_old_gid_to_new_gid_map (face,
input->flags & HB_SUBSET_FLAGS_RETAIN_GIDS,
@@ -798,6 +951,28 @@ hb_subset_plan_create_or_fail (hb_face_t *face,
hb_subset_plan_destroy (plan);
return nullptr;
}
+
+
+ if (plan->attach_accelerator_data)
+ {
+ hb_multimap_t gid_to_unicodes;
+
+ hb_map_t &unicode_to_gid = *plan->codepoint_to_glyph;
+
+ for (auto unicode : *plan->unicodes)
+ {
+ auto gid = unicode_to_gid[unicode];
+ gid_to_unicodes.add (gid, unicode);
+ }
+
+ plan->inprogress_accelerator =
+ hb_subset_accelerator_t::create (*plan->codepoint_to_glyph,
+ gid_to_unicodes,
+ *plan->unicodes,
+ plan->has_seac);
+ }
+
+
return plan;
}
diff --git a/thirdparty/harfbuzz/src/hb-subset-plan.hh b/thirdparty/harfbuzz/src/hb-subset-plan.hh
index 98a45e5f6d..1b2aee7825 100644
--- a/thirdparty/harfbuzz/src/hb-subset-plan.hh
+++ b/thirdparty/harfbuzz/src/hb-subset-plan.hh
@@ -31,11 +31,16 @@
#include "hb-subset.h"
#include "hb-subset-input.hh"
+#include "hb-subset-accelerator.hh"
#include "hb-map.hh"
#include "hb-bimap.hh"
#include "hb-set.hh"
+namespace OT {
+struct Feature;
+}
+
struct hb_subset_plan_t
{
hb_subset_plan_t ()
@@ -67,15 +72,33 @@ struct hb_subset_plan_t
hb_map_destroy (gpos_features);
hb_map_destroy (colrv1_layers);
hb_map_destroy (colr_palettes);
+ hb_map_destroy (axes_index_map);
+ hb_map_destroy (axes_old_index_tag_map);
hb_hashmap_destroy (gsub_langsys);
hb_hashmap_destroy (gpos_langsys);
+ hb_hashmap_destroy (gsub_feature_record_cond_idx_map);
+ hb_hashmap_destroy (gpos_feature_record_cond_idx_map);
+ hb_hashmap_destroy (gsub_feature_substitutes_map);
+ hb_hashmap_destroy (gpos_feature_substitutes_map);
hb_hashmap_destroy (axes_location);
hb_hashmap_destroy (sanitized_table_cache);
hb_hashmap_destroy (hmtx_map);
hb_hashmap_destroy (vmtx_map);
hb_hashmap_destroy (layout_variation_idx_delta_map);
+#ifdef HB_EXPERIMENTAL_API
+ if (name_table_overrides)
+ {
+ for (auto _ : *name_table_overrides)
+ _.second.fini ();
+ }
+ hb_hashmap_destroy (name_table_overrides);
+#endif
+
+ if (inprogress_accelerator)
+ hb_subset_accelerator_t::destroy ((void*) inprogress_accelerator);
+
if (user_axes_location)
{
hb_object_destroy (user_axes_location);
@@ -87,10 +110,12 @@ struct hb_subset_plan_t
bool successful;
unsigned flags;
+ bool attach_accelerator_data = false;
+ bool force_long_loca = false;
// For each cp that we'd like to retain maps to the corresponding gid.
hb_set_t *unicodes;
- hb_vector_t<hb_pair_t<hb_codepoint_t, hb_codepoint_t>> unicode_to_new_gid_list;
+ hb_sorted_vector_t<hb_pair_t<hb_codepoint_t, hb_codepoint_t>> unicode_to_new_gid_list;
// name_ids we would like to retain
hb_set_t *name_ids;
@@ -143,6 +168,15 @@ struct hb_subset_plan_t
hb_map_t *gsub_features;
hb_map_t *gpos_features;
+ //active feature variation records/condition index with variations
+ hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *gsub_feature_record_cond_idx_map;
+ hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *gpos_feature_record_cond_idx_map;
+
+ //feature index-> address of substituation feature table mapping with
+ //variations
+ hb_hashmap_t<unsigned, const OT::Feature*> *gsub_feature_substitutes_map;
+ hb_hashmap_t<unsigned, const OT::Feature*> *gpos_feature_substitutes_map;
+
//active layers/palettes we'd like to retain
hb_map_t *colrv1_layers;
hb_map_t *colr_palettes;
@@ -158,31 +192,47 @@ struct hb_subset_plan_t
hb_hashmap_t<hb_tag_t, int> *axes_location;
//user specified axes location map
hb_hashmap_t<hb_tag_t, float> *user_axes_location;
+ //retained old axis index -> new axis index mapping in fvar axis array
+ hb_map_t *axes_index_map;
+ //axis_index->axis_tag mapping in fvar axis array
+ hb_map_t *axes_old_index_tag_map;
bool all_axes_pinned;
bool pinned_at_default;
+ bool has_seac;
//hmtx metrics map: new gid->(advance, lsb)
hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *hmtx_map;
//vmtx metrics map: new gid->(advance, lsb)
hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *vmtx_map;
+#ifdef HB_EXPERIMENTAL_API
+ // name table overrides map: hb_ot_name_record_ids_t-> name string new value or
+ // None to indicate should remove
+ hb_hashmap_t<hb_ot_name_record_ids_t, hb_bytes_t> *name_table_overrides;
+#endif
+
+ const hb_subset_accelerator_t* accelerator;
+ hb_subset_accelerator_t* inprogress_accelerator;
+
public:
template<typename T>
hb_blob_ptr_t<T> source_table()
{
- if (sanitized_table_cache
- && !sanitized_table_cache->in_error ()
- && sanitized_table_cache->has (T::tableTag)) {
- return hb_blob_reference (sanitized_table_cache->get (T::tableTag).get ());
+ hb_lock_t (accelerator ? &accelerator->sanitized_table_cache_lock : nullptr);
+
+ auto *cache = accelerator ? &accelerator->sanitized_table_cache : sanitized_table_cache;
+ if (cache
+ && !cache->in_error ()
+ && cache->has (+T::tableTag)) {
+ return hb_blob_reference (cache->get (+T::tableTag).get ());
}
hb::unique_ptr<hb_blob_t> table_blob {hb_sanitize_context_t ().reference_table<T> (source)};
hb_blob_t* ret = hb_blob_reference (table_blob.get ());
- if (likely (sanitized_table_cache))
- sanitized_table_cache->set (T::tableTag,
- std::move (table_blob));
+ if (likely (cache))
+ cache->set (+T::tableTag, std::move (table_blob));
return ret;
}
diff --git a/thirdparty/harfbuzz/src/hb-subset.cc b/thirdparty/harfbuzz/src/hb-subset.cc
index 1a0bcbd1fe..186b12dbb8 100644
--- a/thirdparty/harfbuzz/src/hb-subset.cc
+++ b/thirdparty/harfbuzz/src/hb-subset.cc
@@ -50,11 +50,13 @@
#include "hb-ot-color-cbdt-table.hh"
#include "hb-ot-layout-gsub-table.hh"
#include "hb-ot-layout-gpos-table.hh"
+#include "hb-ot-var-fvar-table.hh"
#include "hb-ot-var-gvar-table.hh"
#include "hb-ot-var-hvar-table.hh"
#include "hb-ot-math-table.hh"
#include "hb-ot-stat-table.hh"
#include "hb-repacker.hh"
+#include "hb-subset-accelerator.hh"
using OT::Layout::GSUB;
using OT::Layout::GPOS;
@@ -80,6 +82,10 @@ using OT::Layout::GPOS;
* retain glyph ids option and configure the subset to pass through the layout tables untouched.
*/
+
+hb_user_data_key_t _hb_subset_accelerator_user_data_key = {};
+
+
/*
* The list of tables in the open type spec. Used to check for tables that may need handling
* if we are unable to list the tables in a face.
@@ -407,20 +413,14 @@ _passthrough (hb_subset_plan_t *plan, hb_tag_t tag)
static bool
_dependencies_satisfied (hb_subset_plan_t *plan, hb_tag_t tag,
- hb_set_t &visited_set, hb_set_t &revisit_set)
+ const hb_set_t &subsetted_tags,
+ const hb_set_t &pending_subset_tags)
{
switch (tag)
{
case HB_OT_TAG_hmtx:
case HB_OT_TAG_vmtx:
- if (!plan->pinned_at_default &&
- !visited_set.has (HB_OT_TAG_glyf))
- {
- revisit_set.add (tag);
- return false;
- }
- return true;
-
+ return plan->pinned_at_default || !pending_subset_tags.has (HB_OT_TAG_glyf);
default:
return true;
}
@@ -475,6 +475,9 @@ _subset_table (hb_subset_plan_t *plan,
case HB_OT_TAG_HVAR: return _subset<const OT::HVAR> (plan, buf);
case HB_OT_TAG_VVAR: return _subset<const OT::VVAR> (plan, buf);
#endif
+ case HB_OT_TAG_fvar:
+ if (plan->user_axes_location->is_empty ()) return _passthrough (plan, tag);
+ return _subset<const OT::fvar> (plan, buf);
case HB_OT_TAG_STAT:
/*TODO(qxliu): change the condition as we support more complex
* instancing operation*/
@@ -490,6 +493,34 @@ _subset_table (hb_subset_plan_t *plan,
}
}
+static void _attach_accelerator_data (hb_subset_plan_t* plan,
+ hb_face_t* face /* IN/OUT */)
+{
+ if (!plan->inprogress_accelerator) return;
+
+ // Transfer the accelerator from the plan to us.
+ hb_subset_accelerator_t* accel = plan->inprogress_accelerator;
+ plan->inprogress_accelerator = nullptr;
+
+ if (accel->in_error ())
+ {
+ hb_subset_accelerator_t::destroy (accel);
+ return;
+ }
+
+ // Populate caches that need access to the final tables.
+ hb_blob_ptr_t<OT::cmap> cmap_ptr (hb_sanitize_context_t ().reference_table<OT::cmap> (face));
+ accel->cmap_cache = OT::cmap::create_filled_cache (cmap_ptr);
+ accel->destroy_cmap_cache = OT::SubtableUnicodesCache::destroy;
+
+ if (!hb_face_set_user_data(face,
+ hb_subset_accelerator_t::user_data_key(),
+ accel,
+ hb_subset_accelerator_t::destroy,
+ true))
+ hb_subset_accelerator_t::destroy (accel);
+}
+
/**
* hb_subset_or_fail:
* @source: font face data to be subset.
@@ -535,41 +566,67 @@ hb_subset_plan_execute_or_fail (hb_subset_plan_t *plan)
return nullptr;
}
- hb_set_t tags_set, revisit_set;
- bool success = true;
hb_tag_t table_tags[32];
unsigned offset = 0, num_tables = ARRAY_LENGTH (table_tags);
- hb_vector_t<char> buf;
- buf.alloc (4096 - 16);
+ hb_set_t subsetted_tags, pending_subset_tags;
while (((void) _get_table_tags (plan, offset, &num_tables, table_tags), num_tables))
{
for (unsigned i = 0; i < num_tables; ++i)
{
hb_tag_t tag = table_tags[i];
- if (_should_drop_table (plan, tag) && !tags_set.has (tag)) continue;
- if (!_dependencies_satisfied (plan, tag, tags_set, revisit_set)) continue;
- tags_set.add (tag);
- success = _subset_table (plan, buf, tag);
- if (unlikely (!success)) goto end;
+ if (_should_drop_table (plan, tag)) continue;
+ pending_subset_tags.add (tag);
+ }
+
+ offset += num_tables;
+ }
+
+ hb_vector_t<char> buf;
+ buf.alloc (4096 - 16);
+
+
+ bool success = true;
+
+ while (!pending_subset_tags.is_empty ())
+ {
+ if (subsetted_tags.in_error ()
+ || pending_subset_tags.in_error ()) {
+ success = false;
+ goto end;
}
- /*delayed subsetting for some tables since they might have dependency on other tables in some cases:
- e.g: during instantiating glyf tables, hmetrics/vmetrics are updated and saved in subset plan,
- hmtx/vmtx subsetting need to use these updated metrics values*/
- while (!revisit_set.is_empty ())
+ bool made_changes = false;
+ for (hb_tag_t tag : pending_subset_tags)
{
- hb_set_t revisit_temp;
- for (hb_tag_t tag : revisit_set)
+ if (!_dependencies_satisfied (plan, tag,
+ subsetted_tags,
+ pending_subset_tags))
{
- if (!_dependencies_satisfied (plan, tag, tags_set, revisit_temp)) continue;
- tags_set.add (tag);
- success = _subset_table (plan, buf, tag);
- if (unlikely (!success)) goto end;
+ // delayed subsetting for some tables since they might have dependency on other tables
+ // in some cases: e.g: during instantiating glyf tables, hmetrics/vmetrics are updated
+ // and saved in subset plan, hmtx/vmtx subsetting need to use these updated metrics values
+ continue;
}
- revisit_set = revisit_temp;
+
+ pending_subset_tags.del (tag);
+ subsetted_tags.add (tag);
+ made_changes = true;
+
+ success = _subset_table (plan, buf, tag);
+ if (unlikely (!success)) goto end;
}
- offset += num_tables;
+
+ if (!made_changes)
+ {
+ DEBUG_MSG (SUBSET, nullptr, "Table dependencies unable to be satisfied. Subset failed.");
+ success = false;
+ goto end;
+ }
+ }
+
+ if (success && plan->attach_accelerator_data) {
+ _attach_accelerator_data (plan, plan->dest);
}
end:
diff --git a/thirdparty/harfbuzz/src/hb-subset.h b/thirdparty/harfbuzz/src/hb-subset.h
index 08e52dbd2d..b83264ea5e 100644
--- a/thirdparty/harfbuzz/src/hb-subset.h
+++ b/thirdparty/harfbuzz/src/hb-subset.h
@@ -28,6 +28,7 @@
#define HB_SUBSET_H
#include "hb.h"
+#include "hb-ot.h"
HB_BEGIN_DECLS
@@ -70,6 +71,14 @@ typedef struct hb_subset_plan_t hb_subset_plan_t;
* in the final subset.
* @HB_SUBSET_FLAGS_NO_PRUNE_UNICODE_RANGES: If set then the unicode ranges in
* OS/2 will not be recalculated.
+ * @HB_SUBSET_FLAGS_PATCH_MODE: If set the subsetter behaviour will be modified
+ * to produce a subset that is better suited to patching. For example cmap
+ * subtable format will be kept stable.
+ * @HB_SUBSET_FLAGS_OMIT_GLYF: If set the subsetter won't actually produce the final
+ * glyf table bytes. The table directory will include and entry as if the table was
+ * there but the actual final font blob will be truncated prior to the glyf data. This
+ * is a useful performance optimization when a font aware binary patching algorithm
+ * is being used to diff two subsets.
*
* List of boolean properties that can be configured on the subset input.
*
@@ -86,6 +95,8 @@ typedef enum { /*< flags >*/
HB_SUBSET_FLAGS_NOTDEF_OUTLINE = 0x00000040u,
HB_SUBSET_FLAGS_GLYPH_NAMES = 0x00000080u,
HB_SUBSET_FLAGS_NO_PRUNE_UNICODE_RANGES = 0x00000100u,
+ // Not supported yet: HB_SUBSET_FLAGS_PATCH_MODE = 0x00000200u,
+ // Not supported yet: HB_SUBSET_FLAGS_OMIT_GLYF = 0x00000400u,
} hb_subset_flags_t;
/**
@@ -154,8 +165,6 @@ HB_EXTERN void
hb_subset_input_set_flags (hb_subset_input_t *input,
unsigned value);
-#ifdef HB_EXPERIMENTAL_API
-#ifndef HB_NO_VAR
HB_EXTERN hb_bool_t
hb_subset_input_pin_axis_to_default (hb_subset_input_t *input,
hb_face_t *face,
@@ -166,8 +175,21 @@ hb_subset_input_pin_axis_location (hb_subset_input_t *input,
hb_face_t *face,
hb_tag_t axis_tag,
float axis_value);
+
+#ifdef HB_EXPERIMENTAL_API
+HB_EXTERN hb_bool_t
+hb_subset_input_override_name_table (hb_subset_input_t *input,
+ hb_ot_name_id_t name_id,
+ unsigned platform_id,
+ unsigned encoding_id,
+ unsigned language_id,
+ const char *name_str,
+ int str_len);
+
#endif
-#endif
+
+HB_EXTERN hb_face_t *
+hb_subset_preprocess (hb_face_t *source);
HB_EXTERN hb_face_t *
hb_subset_or_fail (hb_face_t *source, const hb_subset_input_t *input);
diff --git a/thirdparty/harfbuzz/src/hb-ucd-table.hh b/thirdparty/harfbuzz/src/hb-ucd-table.hh
index a372a4afce..f7d76eee18 100644
--- a/thirdparty/harfbuzz/src/hb-ucd-table.hh
+++ b/thirdparty/harfbuzz/src/hb-ucd-table.hh
@@ -1069,7 +1069,7 @@ _hb_ucd_dm2_u64_map[388] =
#ifndef HB_OPTIMIZE_SIZE
static const uint8_t
-_hb_ucd_u8[18260] =
+_hb_ucd_u8[17868] =
{
0, 1, 2, 3, 4, 5, 6, 7, 7, 8, 7, 7, 7, 7, 7, 7,
7, 7, 7, 7, 9, 10, 7, 7, 7, 7, 11, 12, 13, 13, 13, 14,
@@ -1535,191 +1535,166 @@ _hb_ucd_u8[18260] =
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 37, 0, 0, 0, 0, 0, 0, 0,
0, 0, 38, 39, 0, 0, 0, 0, 0, 0, 40, 41, 42, 0, 43, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4, 0, 0, 0, 0,
- 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 6, 7, 8, 0,
- 9, 0, 10, 11, 0, 0, 12, 13, 14, 15, 16, 0, 0, 0, 0, 17,
- 18, 19, 20, 0, 21, 0, 22, 23, 0, 24, 25, 0, 0, 24, 26, 27,
- 0, 24, 26, 0, 0, 24, 26, 0, 0, 24, 26, 0, 0, 0, 26, 0,
- 0, 24, 28, 0, 0, 24, 26, 0, 0, 29, 26, 0, 0, 0, 30, 0,
- 0, 31, 32, 0, 0, 33, 34, 0, 35, 36, 0, 37, 38, 0, 39, 0,
- 0, 40, 0, 0, 41, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 43, 44, 0, 0, 0, 0, 45, 0,
- 0, 0, 0, 0, 0, 46, 0, 0, 0, 47, 0, 0, 0, 0, 0, 0,
- 48, 0, 0, 49, 0, 50, 51, 0, 0, 52, 53, 54, 0, 55, 0, 56,
- 0, 57, 0, 0, 0, 0, 58, 59, 0, 0, 0, 0, 0, 0, 60, 61,
- 0, 0, 0, 0, 0, 0, 62, 63, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 64, 0, 0, 0, 65, 0, 0, 0, 66,
- 0, 67, 0, 0, 68, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 69, 70, 0, 0, 71, 0, 0, 0, 0, 0, 0, 0, 0,
- 72, 73, 0, 0, 0, 0, 53, 74, 0, 75, 76, 0, 0, 77, 78, 0,
- 0, 0, 0, 0, 0, 79, 80, 81, 0, 0, 0, 0, 0, 0, 0, 26,
- 0, 0, 0, 0, 0, 0, 0, 0, 82, 0, 0, 0, 0, 0, 0, 0,
- 0, 83, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 84,
- 0, 0, 0, 0, 0, 0, 0, 85, 0, 0, 0, 86, 0, 0, 0, 0,
- 87, 88, 0, 0, 0, 0, 0, 89, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 90, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 91, 0, 92, 0, 0, 93, 0, 94, 0, 0, 0,
- 0, 0, 72, 95, 0, 96, 0, 0, 97, 98, 0, 77, 0, 0, 99, 0,
- 0,100, 0, 0, 0, 0, 0,101, 0,102, 26,103, 0, 0, 0, 0,
- 0, 0,104, 0, 0, 0,105, 0, 0, 0, 0, 0, 0, 65,106, 0,
- 0, 65, 0, 0, 0,107, 0, 0, 0,108, 0, 0, 0, 0, 0, 0,
- 0, 96, 0, 0, 0, 0, 0, 0, 0,109,110, 0, 0, 0, 0, 78,
- 0, 44,111, 0,112, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 65, 0, 0, 0, 0, 0, 0, 0, 0,113, 0,114, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,115, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0,116, 0,117, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,118,
- 0, 0, 0, 0,119, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,120,121,122, 0, 0,
- 0, 0,123, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 124,125, 0, 0,126, 0, 0, 0, 0,117, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0,127, 0,128, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0,129, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0,130, 0, 0, 0,131, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 3, 4,
- 5, 6, 7, 4, 4, 8, 9, 10, 1, 11, 12, 13, 14, 15, 16, 17,
- 18, 1, 1, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 20, 21, 22, 1, 23, 4, 21, 24, 25, 26, 27, 28,
- 29, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 31, 0,
- 0, 0, 32, 33, 34, 35, 1, 36, 0, 0, 0, 0, 37, 0, 0, 0,
- 0, 0, 0, 0, 0, 38, 1, 39, 14, 39, 40, 41, 0, 0, 0, 0,
- 0, 0, 0, 0, 42, 0, 0, 0, 0, 0, 0, 0, 43, 36, 44, 45,
- 21, 45, 46, 0, 0, 0, 0, 0, 0, 0, 19, 1, 21, 0, 0, 47,
- 0, 0, 0, 0, 0, 38, 48, 1, 1, 49, 49, 50, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 51, 0, 0, 0, 0, 0, 0, 0, 52, 1,
- 0, 0, 38, 14, 4, 1, 1, 1, 53, 21, 43, 52, 54, 21, 35, 1,
- 0, 0, 0, 0, 0, 0, 0, 55, 0, 0, 0, 56, 57, 58, 0, 0,
- 0, 0, 0, 56, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 59,
- 0, 0, 0, 56, 0, 60, 0, 0, 0, 0, 0, 0, 0, 0, 61, 62,
- 0, 0, 63, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 0,
- 0, 0, 65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 66, 0,
- 0, 0, 67, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 68, 0,
- 0, 0, 0, 0, 0, 69, 70, 0, 0, 0, 0, 0, 71, 72, 73, 74,
- 75, 76, 0, 0, 0, 0, 0, 0, 0, 77, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 78, 79, 0, 0, 0, 0, 47, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 49, 0, 0, 0, 0, 0, 80, 0, 0,
- 0, 0, 0, 0, 0, 62, 0, 0, 0, 0, 0, 0, 63, 0, 0, 81,
- 0, 0, 82, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 83, 0,
- 0, 0, 0, 0, 0, 19, 84, 0, 62, 0, 0, 0, 0, 49, 1, 85,
- 0, 0, 0, 0, 1, 52, 15, 86, 36, 10, 21, 87, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 55, 0, 0, 0, 62, 0, 0, 0, 0, 0, 0,
- 0, 0, 19, 10, 1, 0, 0, 0, 0, 0, 88, 0, 0, 0, 0, 0,
- 0, 89, 0, 0, 88, 0, 0, 0, 0, 0, 0, 0, 0, 78, 0, 0,
- 0, 0, 0, 0, 87, 9, 12, 4, 90, 8, 91, 47, 0, 58, 50, 0,
- 21, 1, 21, 92, 93, 1, 1, 1, 1, 1, 1, 1, 1, 94, 95, 96,
- 0, 0, 0, 0, 97, 1, 98, 58, 81, 99,100, 4, 58, 0, 0, 0,
- 0, 0, 0, 19, 50, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 61,
- 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,101,102, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0,103, 0, 0, 0, 0, 19, 0, 1, 1, 50,
- 0, 0, 0, 0, 0, 0, 0, 38, 0, 0, 0, 0, 50, 0, 0, 0,
- 0, 63, 0, 0, 0, 0, 0, 0, 0, 0, 0, 62, 0, 0, 0, 0,
- 1, 1, 1, 1, 50, 0, 0, 0, 0, 0,104, 68, 0, 0, 0, 0,
- 0, 0, 0, 0, 61, 0, 0, 0, 0, 0, 0, 0, 78, 0, 0, 0,
- 62, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,105,106, 58, 38,
- 81, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 63, 0, 0,
- 0, 0, 0, 0, 0, 0, 0,107, 1, 14, 4, 12, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 47, 84, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 38, 87, 0, 0, 0, 0,108, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0,109, 61, 0,110, 0, 0, 0, 0, 0, 0,
- 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 19, 58, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 51, 0,111, 14, 52, 84, 0, 0, 0,
- 112, 41, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 62, 0, 0, 61,
- 0, 0, 0, 0, 0, 0,113, 0, 87, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 61, 62, 0, 0, 62, 0, 89, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0,113, 0, 0, 0, 0,114, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 78, 55, 0, 38, 1, 58, 1, 58, 0, 0,
- 63, 89, 0, 0, 0, 0, 0, 59,115, 0, 0, 0, 0, 0, 0, 0,
- 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,115, 0, 0,
- 0, 0, 61, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 79,
- 78, 0, 0, 0, 0, 0, 0, 0, 0, 61, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 56, 0, 89, 80, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 61, 0, 0, 79, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 8, 91, 0, 0, 0, 0, 0, 0, 1, 87, 0, 0,
- 0, 0, 0, 0,116, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,117,
- 0,118,119,120,121, 0,104, 4,122, 49, 23, 0, 0, 0, 0, 0,
- 0, 0, 38, 50, 0, 0, 0, 0, 38, 58, 0, 0, 0, 0, 0, 0,
- 1, 87, 1, 1, 1, 1, 39, 1, 48,105, 87, 0, 0, 0, 0, 0,
- 0, 0, 0, 19, 0, 0, 0, 0, 0, 0, 0, 59, 0, 0, 0, 0,
- 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,123, 0, 0, 0, 0,
- 0, 0, 0, 0, 4,122, 0, 0, 0, 1,124, 0, 0, 0, 0, 0,
- 0, 0, 0, 0,230,230,230,230,230,232,220,220,220,220,232,216,
- 220,220,220,220,220,202,202,220,220,220,220,202,202,220,220,220,
- 1, 1, 1, 1, 1,220,220,220,220,230,230,230,230,240,230,220,
- 220,220,230,230,230,220,220, 0,230,230,230,220,220,220,220,230,
- 232,220,220,230,233,234,234,233,234,234,233,230, 0, 0, 0,230,
- 0,220,230,230,230,230,220,230,230,230,222,220,230,230,220,220,
- 230,222,228,230, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 19, 20,
- 21, 22, 0, 23, 0, 24, 25, 0,230,220, 0, 18, 30, 31, 32, 0,
- 0, 0, 0, 27, 28, 29, 30, 31, 32, 33, 34,230,230,220,220,230,
- 220,230,230,220, 35, 0, 0, 0, 0, 0,230,230,230, 0, 0,230,
- 230, 0,220,230,230,220, 0, 0, 0, 36, 0, 0,230,220,230,230,
- 220,220,230,220,220,230,220,230,220,230,230, 0, 0,220, 0, 0,
- 230,230, 0,230, 0,230,230,230,230,230, 0, 0, 0,220,220,220,
- 230,220,220,220,230,230, 0,220, 27, 28, 29,230, 7, 0, 0, 0,
- 0, 9, 0, 0, 0,230,220,230,230, 0, 0, 0, 0, 0,230, 0,
- 0, 84, 91, 0, 0, 0, 0, 9, 9, 0, 0, 0, 0, 0, 9, 0,
- 103,103, 9, 0,107,107,107,107,118,118, 9, 0,122,122,122,122,
- 220,220, 0, 0, 0,220, 0,220, 0,216, 0, 0, 0,129,130, 0,
- 132, 0, 0, 0, 0, 0,130,130,130,130, 0, 0,130, 0,230,230,
- 9, 0,230,230, 0, 0,220, 0, 0, 0, 0, 7, 0, 9, 9, 0,
- 9, 9, 0, 0, 0,230, 0, 0, 0,228, 0, 0, 0,222,230,220,
- 220, 0, 0, 0,230, 0, 0,220,230,220, 0,220,230,230,230, 0,
- 0, 0, 9, 9, 0, 0, 7, 0,230, 0, 1, 1, 1, 0, 0, 0,
- 230,234,214,220,202,230,230,230,230,230,232,228,228,220,218,230,
- 233,220,230,220,230,230, 1, 1, 1, 1, 1,230, 0, 1, 1,230,
- 220,230, 1, 1, 0, 0,218,228,232,222,224,224, 0, 8, 8, 0,
- 0, 0, 0,220,230, 0,230,230,220, 0, 0,230, 0, 0, 26, 0,
- 0,220, 0,230,230, 1,220, 0, 0,230,220, 0, 0, 0,220,220,
- 0, 0,230,220, 0, 9, 7, 0, 0, 7, 9, 0, 0, 0, 9, 7,
- 6, 6, 0, 0, 0, 0, 1, 0, 0,216,216, 1, 1, 1, 0, 0,
- 0,226,216,216,216,216,216, 0,220,220,220, 0,232,232,220,230,
- 230,230, 7, 0, 16, 17, 17, 17, 17, 17, 17, 33, 17, 17, 17, 19,
- 17, 17, 17, 17, 20,101, 17,113,129,169, 17, 27, 28, 17, 17, 17,
- 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 0,
+ 0, 0, 3, 0, 0, 0, 4, 5, 6, 7, 0, 8, 9, 10, 0, 11,
+ 12, 13, 14, 15, 16, 17, 16, 18, 16, 19, 16, 19, 16, 19, 0, 19,
+ 16, 20, 16, 19, 21, 19, 0, 22, 23, 24, 25, 26, 27, 28, 29, 30,
+ 31, 0, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 0, 0,
+ 0, 0, 0, 0, 34, 0, 0, 35, 0, 0, 36, 0, 37, 0, 0, 0,
+ 38, 39, 40, 41, 42, 43, 44, 45, 46, 0, 0, 47, 0, 0, 0, 48,
+ 0, 0, 0, 49, 0, 0, 0, 0, 0, 0, 0, 50, 0, 51, 0, 52,
+ 53, 0, 54, 0, 0, 0, 0, 0, 0, 55, 56, 57, 0, 0, 0, 0,
+ 58, 0, 0, 59, 60, 61, 62, 63, 0, 0, 64, 65, 0, 0, 0, 66,
+ 0, 0, 0, 0, 67, 0, 0, 0, 68, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 69, 0, 0, 0, 70, 0, 71, 0, 0,
+ 72, 0, 0, 73, 0, 0, 0, 0, 0, 0, 0, 0, 74, 0, 0, 0,
+ 0, 0, 75, 76, 0, 77, 78, 0, 0, 79, 80, 0, 81, 62, 0, 82,
+ 83, 0, 0, 84, 85, 86, 0, 0, 0, 87, 0, 88, 0, 0, 51, 89,
+ 51, 0, 90, 0, 91, 0, 0, 0, 80, 0, 0, 0, 92, 93, 0, 94,
+ 95, 96, 97, 0, 0, 0, 0, 0, 51, 0, 0, 0, 0, 98, 99, 0,
+ 0, 0, 0, 0, 0,100, 0, 0, 0, 0, 0,101,102, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0,103, 0, 0,104, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0,105,106, 0, 0,107, 0, 0, 0, 0, 0, 0,
+ 108, 0,109, 0,102, 0, 0, 0, 0, 0,110,111, 0, 0, 0, 0,
+ 0, 0, 0,112, 0, 0, 0, 0, 0, 0, 0,113, 0,114, 0, 0,
+ 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 0, 8, 0, 0, 0,
+ 0, 9, 10, 11, 12, 0, 0, 0, 0, 13, 0, 0, 14, 15, 0, 16,
+ 0, 17, 18, 0, 0, 19, 0, 20, 21, 0, 0, 0, 0, 0, 22, 23,
+ 0, 24, 25, 0, 0, 26, 0, 0, 0, 27, 0, 0, 28, 29, 30, 31,
+ 0, 0, 0, 32, 33, 34, 0, 0, 33, 0, 0, 35, 33, 0, 0, 0,
+ 33, 36, 0, 0, 0, 0, 0, 37, 38, 0, 0, 0, 0, 0, 0, 39,
+ 40, 0, 0, 0, 0, 0, 0, 41, 42, 0, 0, 0, 0, 43, 0, 44,
+ 0, 0, 0, 45, 46, 0, 0, 0, 47, 0, 0, 0, 0, 0, 0, 48,
+ 49, 0, 0, 0, 0, 50, 0, 0, 0, 51, 0, 52, 0, 53, 0, 0,
+ 0, 0, 54, 0, 0, 0, 0, 55, 0, 56, 0, 0, 0, 0, 57, 58,
+ 0, 0, 0, 59, 60, 0, 0, 0, 0, 0, 0, 61, 52, 0, 62, 63,
+ 0, 0, 64, 0, 0, 0, 65, 66, 0, 0, 0, 67, 0, 68, 69, 70,
+ 71, 72, 1, 73, 0, 74, 75, 76, 0, 0, 77, 78, 0, 0, 0, 79,
+ 0, 0, 1, 1, 0, 0, 80, 0, 0, 81, 0, 0, 0, 0, 77, 82,
+ 0, 83, 0, 0, 0, 0, 0, 78, 84, 0, 85, 0, 52, 0, 1, 78,
+ 0, 0, 86, 0, 0, 87, 0, 0, 0, 0, 0, 88, 57, 0, 0, 0,
+ 0, 0, 0, 89, 90, 0, 0, 84, 0, 0, 33, 0, 0, 91, 0, 0,
+ 0, 0, 92, 0, 0, 0, 0, 49, 0, 0, 93, 0, 0, 0, 0, 94,
+ 95, 0, 0, 96, 0, 0, 97, 0, 0, 0, 98, 0, 0, 0, 99, 0,
+ 0, 0, 0,100,101, 93, 0, 0,102, 0, 0, 0, 84, 0, 0,103,
+ 0, 0, 0,104,105, 0, 0,106,107, 0, 0, 0, 0, 0, 0,108,
+ 0, 0,109, 0, 0, 0, 0,110, 33, 0,111,112,113, 35, 0, 0,
+ 114, 0, 0, 0,115, 0, 0, 0, 0, 0, 0,116, 0, 0,117, 0,
+ 0, 0, 0,118, 88, 0, 0, 0, 0, 0, 57, 0, 0, 0, 0, 52,
+ 119, 0, 0, 0, 0,120, 0, 0,121, 0, 0, 0, 0,119, 0, 0,
+ 122, 0, 0, 0, 0, 0, 0,123, 0, 0, 0,124, 0, 0, 0,125,
+ 0,126, 0, 0, 0, 0,127,128,129, 0,130, 0,131, 0, 0, 0,
+ 132,133,134, 0, 77, 0, 0, 0, 0, 0, 35, 0, 0, 0,135, 0,
+ 0, 0,136, 0, 0,137, 0, 0,138, 0, 0, 0, 0, 0, 0, 0,
+ 1, 1, 1, 1, 1, 2, 3, 4, 5, 6, 7, 4, 4, 8, 9, 10,
+ 1, 11, 12, 13, 14, 15, 16, 17, 18, 1, 1, 1, 19, 1, 0, 0,
+ 20, 21, 22, 1, 23, 4, 21, 24, 25, 26, 27, 28, 29, 30, 0, 0,
+ 1, 1, 31, 0, 0, 0, 32, 33, 34, 35, 1, 36, 37, 0, 0, 0,
+ 0, 38, 1, 39, 14, 39, 40, 41, 42, 0, 0, 0, 43, 36, 44, 45,
+ 21, 45, 46, 0, 0, 0, 19, 1, 21, 0, 0, 47, 0, 38, 48, 1,
+ 1, 49, 49, 50, 0, 0, 51, 0, 0, 0, 52, 1, 0, 0, 38, 14,
+ 4, 1, 1, 1, 53, 21, 43, 52, 54, 21, 35, 1, 0, 0, 0, 55,
+ 0, 0, 0, 56, 57, 58, 0, 0, 0, 0, 0, 59, 0, 60, 0, 0,
+ 0, 0, 61, 62, 0, 0, 63, 0, 0, 0, 64, 0, 0, 0, 65, 0,
+ 0, 0, 66, 0, 0, 0, 67, 0, 0, 0, 68, 0, 0, 69, 70, 0,
+ 71, 72, 73, 74, 75, 76, 0, 0, 0, 77, 0, 0, 0, 78, 79, 0,
+ 0, 0, 0, 47, 0, 0, 0, 49, 0, 80, 0, 0, 0, 62, 0, 0,
+ 63, 0, 0, 81, 0, 0, 82, 0, 0, 0, 83, 0, 0, 19, 84, 0,
+ 62, 0, 0, 0, 0, 49, 1, 85, 1, 52, 15, 86, 36, 10, 21, 87,
+ 0, 55, 0, 0, 0, 0, 19, 10, 1, 0, 0, 0, 0, 0, 88, 0,
+ 0, 89, 0, 0, 88, 0, 0, 0, 0, 78, 0, 0, 87, 9, 12, 4,
+ 90, 8, 91, 47, 0, 58, 50, 0, 21, 1, 21, 92, 93, 1, 1, 1,
+ 1, 94, 95, 96, 97, 1, 98, 58, 81, 99,100, 4, 58, 0, 0, 0,
+ 0, 0, 0, 19, 50, 0, 0, 0, 0, 0, 0, 61, 0, 0,101,102,
+ 0, 0,103, 0, 0, 1, 1, 50, 0, 0, 0, 38, 0, 63, 0, 0,
+ 0, 0, 0, 62, 0, 0,104, 68, 61, 0, 0, 0, 78, 0, 0, 0,
+ 105,106, 58, 38, 81, 0, 0, 0, 0, 0, 0,107, 1, 14, 4, 12,
+ 84, 0, 0, 0, 0, 38, 87, 0, 0, 0, 0,108, 0, 0,109, 61,
+ 0,110, 0, 0, 0, 1, 0, 0, 0, 0, 19, 58, 0, 0, 0, 51,
+ 0,111, 14, 52,112, 41, 0, 0, 62, 0, 0, 61, 0, 0,113, 0,
+ 87, 0, 0, 0, 61, 62, 0, 0, 62, 0, 89, 0, 0,113, 0, 0,
+ 0, 0,114, 0, 0, 0, 78, 55, 0, 38, 1, 58, 1, 58, 0, 0,
+ 63, 89, 0, 0,115, 0, 0, 0, 55, 0, 0, 0, 0,115, 0, 0,
+ 0, 0, 61, 0, 0, 0, 0, 79, 0, 61, 0, 0, 0, 0, 56, 0,
+ 89, 80, 0, 0, 79, 0, 0, 0, 8, 91, 0, 0, 1, 87, 0, 0,
+ 116, 0, 0, 0, 0, 0, 0,117, 0,118,119,120,121, 0,104, 4,
+ 122, 49, 23, 0, 0, 0, 38, 50, 38, 58, 0, 0, 1, 87, 1, 1,
+ 1, 1, 39, 1, 48,105, 87, 0, 0, 0, 0, 1, 0, 0, 0,123,
+ 4,122, 0, 0, 0, 1,124, 0, 0, 0, 0, 0,230,230,230,230,
+ 230,232,220,220,220,220,232,216,220,220,220,220,220,202,202,220,
+ 220,220,220,202,202,220,220,220, 1, 1, 1, 1, 1,220,220,220,
+ 220,230,230,230,230,240,230,220,220,220,230,230,230,220,220, 0,
+ 230,230,230,220,220,220,220,230,232,220,220,230,233,234,234,233,
+ 234,234,233,230, 0, 0, 0,230, 0,220,230,230,230,230,220,230,
+ 230,230,222,220,230,230,220,220,230,222,228,230, 10, 11, 12, 13,
+ 14, 15, 16, 17, 18, 19, 19, 20, 21, 22, 0, 23, 0, 24, 25, 0,
+ 230,220, 0, 18, 30, 31, 32, 0, 0, 0, 0, 27, 28, 29, 30, 31,
+ 32, 33, 34,230,230,220,220,230,220,230,230,220, 35, 0, 0, 0,
+ 0, 0,230,230,230, 0, 0,230,230, 0,220,230,230,220, 0, 0,
+ 0, 36, 0, 0,230,220,230,230,220,220,230,220,220,230,220,230,
+ 220,230,230, 0, 0,220, 0, 0,230,230, 0,230, 0,230,230,230,
+ 230,230, 0, 0, 0,220,220,220,230,220,220,220,230,230, 0,220,
+ 27, 28, 29,230, 7, 0, 0, 0, 0, 9, 0, 0, 0,230,220,230,
+ 230, 0, 0, 0, 0, 0,230, 0, 0, 84, 91, 0, 0, 0, 0, 9,
+ 9, 0, 0, 0, 0, 0, 9, 0,103,103, 9, 0,107,107,107,107,
+ 118,118, 9, 0,122,122,122,122,220,220, 0, 0, 0,220, 0,220,
+ 0,216, 0, 0, 0,129,130, 0,132, 0, 0, 0, 0, 0,130,130,
+ 130,130, 0, 0,130, 0,230,230, 9, 0,230,230, 0, 0,220, 0,
+ 0, 0, 0, 7, 0, 9, 9, 0, 9, 9, 0, 0, 0,230, 0, 0,
+ 0,228, 0, 0, 0,222,230,220,220, 0, 0, 0,230, 0, 0,220,
+ 230,220, 0,220,230,230,230, 0, 0, 0, 9, 9, 0, 0, 7, 0,
+ 230, 0, 1, 1, 1, 0, 0, 0,230,234,214,220,202,230,230,230,
+ 230,230,232,228,228,220,218,230,233,220,230,220,230,230, 1, 1,
+ 1, 1, 1,230, 0, 1, 1,230,220,230, 1, 1, 0, 0,218,228,
+ 232,222,224,224, 0, 8, 8, 0, 0, 0, 0,220,230, 0,230,230,
+ 220, 0, 0,230, 0, 0, 26, 0, 0,220, 0,230,230, 1,220, 0,
+ 0,230,220, 0, 0, 0,220,220, 0, 0,230,220, 0, 9, 7, 0,
+ 0, 7, 9, 0, 0, 0, 9, 7, 6, 6, 0, 0, 0, 0, 1, 0,
+ 0,216,216, 1, 1, 1, 0, 0, 0,226,216,216,216,216,216, 0,
+ 220,220,220, 0,232,232,220,230,230,230, 7, 0, 16, 17, 17, 17,
+ 17, 17, 17, 33, 17, 17, 17, 19, 17, 17, 17, 17, 20,101, 17,113,
+ 129,169, 17, 27, 28, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
- 17, 17, 17,237, 0, 1, 2, 2, 0, 3, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 5, 0, 0, 0, 0, 6, 7, 8, 9, 0, 0, 0, 10, 11, 12, 13,
- 14, 15, 16, 17, 18, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20,
- 0, 0, 21, 22, 0, 0, 0, 0, 23, 24, 25, 26, 0, 27, 0, 28,
- 29, 30, 31, 32, 0, 0, 0, 0, 0, 0, 0, 33, 34, 35, 36, 0,
- 0, 0, 0, 0, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 38, 39,
- 0, 0, 0, 0, 1, 2, 40, 41, 0, 1, 2, 2, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 2, 0, 0, 0, 0,
- 0, 0, 3, 4, 0, 0, 5, 0, 0, 0, 6, 0, 0, 0, 0, 0,
- 0, 0, 7, 1, 0, 0, 0, 0, 0, 0, 8, 9, 0, 0, 0, 0,
- 0, 0, 10, 0, 0, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 10, 0, 0, 0, 10, 0, 0, 0, 0, 0, 0, 11, 12,
- 0, 13, 0, 14, 15, 16, 0, 0, 0, 0, 0, 1, 17, 18, 0, 19,
- 7, 1, 0, 0, 0, 20, 20, 7, 20, 20, 20, 20, 20, 20, 20, 8,
- 21, 0, 22, 0, 7, 23, 24, 0, 20, 20, 25, 0, 0, 0, 26, 27,
- 1, 7, 20, 20, 20, 20, 20, 1, 28, 29, 30, 31, 0, 0, 20, 0,
- 0, 0, 0, 0, 0, 0, 10, 0, 0, 0, 0, 0, 0, 0, 20, 20,
- 20, 1, 0, 0, 8, 21, 32, 4, 0, 10, 0, 33, 7, 20, 20, 20,
- 0, 0, 0, 0, 8, 34, 34, 35, 36, 34, 37, 0, 38, 1, 20, 20,
- 0, 0, 39, 0, 1, 1, 0, 8, 21, 1, 20, 0, 0, 0, 1, 0,
- 0, 40, 1, 1, 0, 0, 8, 21, 0, 1, 0, 1, 0, 1, 0, 0,
- 0, 0, 26, 34, 34, 34, 34, 34, 34, 34, 34, 34, 21, 7, 20, 41,
- 34, 34, 34, 34, 34, 34, 34, 34, 34, 21, 0, 42, 43, 44, 0, 45,
- 0, 8, 21, 0, 0, 0, 0, 0, 0, 0, 0, 46, 7, 1, 10, 1,
- 0, 0, 0, 1, 20, 20, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 26, 34, 9, 0, 0, 20, 20, 1, 20, 20, 0, 0, 0, 0, 0,
- 0, 0, 26, 21, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 3, 47, 48, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3,
- 4, 5, 6, 7, 7, 8, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
- 9, 10, 11, 11, 11, 11, 12, 13, 13, 13, 13, 14, 15, 16, 17, 18,
- 19, 20, 21, 13, 22, 13, 13, 13, 13, 23, 24, 24, 25, 26, 13, 13,
- 13, 27, 28, 29, 13, 30, 31, 32, 33, 34, 35, 36, 7, 7, 7, 7,
- 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
- 37, 7, 38, 39, 7, 40, 7, 7, 7, 41, 13, 42, 7, 7, 43, 7,
- 44, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
+ 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,237, 0, 1, 2, 2,
+ 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 6, 7, 8,
+ 9, 0, 0, 0, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 20, 0, 0, 21, 22, 0, 0, 0, 0,
+ 23, 24, 25, 26, 0, 27, 0, 28, 29, 30, 31, 32, 0, 0, 0, 0,
+ 0, 0, 0, 33, 34, 35, 36, 0, 0, 0, 0, 0, 37, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 38, 39, 0, 0, 0, 0, 1, 2, 40, 41,
+ 0, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0,
+ 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 3, 4, 0, 0, 5, 0,
+ 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0,
+ 0, 0, 8, 9, 0, 0, 0, 0, 0, 0, 10, 0, 0, 10, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 0, 0, 0, 10,
+ 0, 0, 0, 0, 0, 0, 11, 12, 0, 13, 0, 14, 15, 16, 0, 0,
+ 0, 0, 0, 1, 17, 18, 0, 19, 7, 1, 0, 0, 0, 20, 20, 7,
+ 20, 20, 20, 20, 20, 20, 20, 8, 21, 0, 22, 0, 7, 23, 24, 0,
+ 20, 20, 25, 0, 0, 0, 26, 27, 1, 7, 20, 20, 20, 20, 20, 1,
+ 28, 29, 30, 31, 0, 0, 20, 0, 0, 0, 0, 0, 0, 0, 10, 0,
+ 0, 0, 0, 0, 0, 0, 20, 20, 20, 1, 0, 0, 8, 21, 32, 4,
+ 0, 10, 0, 33, 7, 20, 20, 20, 0, 0, 0, 0, 8, 34, 34, 35,
+ 36, 34, 37, 0, 38, 1, 20, 20, 0, 0, 39, 0, 1, 1, 0, 8,
+ 21, 1, 20, 0, 0, 0, 1, 0, 0, 40, 1, 1, 0, 0, 8, 21,
+ 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 26, 34, 34, 34, 34, 34,
+ 34, 34, 34, 34, 21, 7, 20, 41, 34, 34, 34, 34, 34, 34, 34, 34,
+ 34, 21, 0, 42, 43, 44, 0, 45, 0, 8, 21, 0, 0, 0, 0, 0,
+ 0, 0, 0, 46, 7, 1, 10, 1, 0, 0, 0, 1, 20, 20, 1, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 34, 9, 0, 0, 20, 20,
+ 1, 20, 20, 0, 0, 0, 0, 0, 0, 0, 26, 21, 0, 1, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 47, 48, 0, 0, 0,
+ 0, 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 7, 8, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 9, 10, 11, 11, 11, 11, 12, 13,
+ 13, 13, 13, 14, 15, 16, 17, 18, 19, 20, 21, 13, 22, 13, 13, 13,
+ 13, 23, 24, 24, 25, 26, 13, 13, 13, 27, 28, 29, 13, 30, 31, 32,
+ 33, 34, 35, 36, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 37, 7, 38, 39, 7, 40, 7, 7,
+ 7, 41, 13, 42, 7, 7, 43, 7, 44, 13, 13, 13, 13, 13, 13, 13,
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
@@ -1740,420 +1715,420 @@ _hb_ucd_u8[18260] =
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
- 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 45, 0, 0, 1,
- 2, 2, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
- 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 32, 32, 33, 34, 35, 36, 37, 37, 37, 37, 37, 38, 39, 40, 41, 42,
- 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 2, 2, 53, 54, 55, 56,
- 57, 58, 59, 59, 59, 59, 60, 59, 59, 59, 59, 59, 59, 59, 61, 61,
- 59, 59, 59, 59, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
- 74, 75, 76, 77, 78, 59, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,
+ 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
+ 13, 13, 13, 13, 45, 0, 0, 1, 2, 2, 2, 3, 4, 5, 6, 7,
+ 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
+ 24, 25, 26, 27, 28, 29, 30, 31, 32, 32, 33, 34, 35, 36, 37, 37,
+ 37, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50,
+ 51, 52, 2, 2, 53, 54, 55, 56, 57, 58, 59, 59, 59, 59, 60, 59,
+ 59, 59, 59, 59, 59, 59, 61, 61, 59, 59, 59, 59, 62, 63, 64, 65,
+ 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 59, 70, 70,
70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,
- 70, 70, 70, 70, 70, 70, 70, 70, 70, 79, 70, 70, 70, 70, 80, 80,
- 80, 80, 80, 80, 80, 80, 80, 81, 82, 82, 83, 84, 85, 86, 87, 88,
- 89, 90, 91, 92, 93, 94, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32,
+ 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,
+ 70, 79, 70, 70, 70, 70, 80, 80, 80, 80, 80, 80, 80, 80, 80, 81,
+ 82, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 32, 32,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32,
- 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 95, 96, 96,
- 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,
- 70, 70, 97, 98, 99,100,101,101,102,103,104,105,106,107,108,109,
- 110,111, 96,112,113,114,115,116,117,118,119,119,120,121,122,123,
- 124,125,126,127,128,129,130,131,132, 96,133,134,135,136,137,138,
- 139,140,141,142,143, 96,144,145, 96,146,147,148,149, 96,150,151,
- 152,153,154,155,156, 96,157,158,159,160, 96,161,162,163,164,164,
- 164,164,164,164,164,165,166,164,167, 96, 96, 96, 96, 96, 96, 96,
- 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,168,169,169,
- 169,169,169,169,169,169,170, 96, 96, 96, 96, 96, 96, 96, 96, 96,
- 96, 96, 96, 96, 96, 96,171,171,171,171,172, 96, 96, 96,173,173,
- 173,173,174,175,176,177, 96, 96, 96, 96,178,179,180,181,182,182,
+ 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32,
+ 32, 32, 32, 32, 32, 95, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,
+ 96, 96, 96, 96, 96, 96, 96, 96, 70, 70, 97, 98, 99,100,101,101,
+ 102,103,104,105,106,107,108,109,110,111, 96,112,113,114,115,116,
+ 117,118,119,119,120,121,122,123,124,125,126,127,128,129,130,131,
+ 132, 96,133,134,135,136,137,138,139,140,141,142,143, 96,144,145,
+ 96,146,147,148,149, 96,150,151,152,153,154,155,156, 96,157,158,
+ 159,160, 96,161,162,163,164,164,164,164,164,164,164,165,166,164,
+ 167, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,
+ 96, 96, 96, 96, 96,168,169,169,169,169,169,169,169,169,170, 96,
+ 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,171,171,
+ 171,171,172, 96, 96, 96,173,173,173,173,174,175,176,177, 96, 96,
+ 96, 96,178,179,180,181,182,182,182,182,182,182,182,182,182,182,
182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,
- 182,182,182,182,182,182,182,182,182,182,182,182,182,183,182,182,
- 182,182,182,182,184,184,184,185,186, 96, 96, 96, 96, 96, 96, 96,
- 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,187,188,189,
- 190,191,191,192, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,
- 96, 96, 96, 96, 96, 96,193,194, 96, 96, 96, 96, 96, 96, 96, 96,
- 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,195,196, 59,197,
- 198,199,200,201,202, 96,203,204,205, 59, 59,206, 59,207,208,208,
- 208,208,208,209, 96, 96, 96, 96, 96, 96, 96, 96,210, 96,211,212,
- 213, 96, 96,214, 96, 96, 96,215, 96, 96, 96, 96, 96,216,217,218,
- 219, 96, 96, 96, 96, 96,220,221,222, 96,223,224, 96, 96,225,226,
- 59,227,228, 96, 59, 59, 59, 59, 59, 59, 59,229,230,231,232,233,
- 59, 59,234,235, 59,236, 96, 96, 96, 96, 96, 96, 96, 96, 70, 70,
- 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,237, 70, 70, 70, 70,
- 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,238, 70,239, 70,
+ 182,182,182,182,182,183,182,182,182,182,182,182,184,184,184,185,
+ 186, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,
+ 96, 96, 96, 96, 96,187,188,189,190,191,191,192, 96, 96, 96, 96,
+ 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,193,194,
+ 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,
+ 96, 96, 96, 96,195,196, 59,197,198,199,200,201,202, 96,203,204,
+ 205, 59, 59,206, 59,207,208,208,208,208,208,209, 96, 96, 96, 96,
+ 96, 96, 96, 96,210, 96,211,212,213, 96, 96,214, 96, 96, 96,215,
+ 96, 96, 96, 96, 96,216,217,218,219, 96, 96, 96, 96, 96,220,221,
+ 222, 96,223,224, 96, 96,225,226, 59,227,228, 96, 59, 59, 59, 59,
+ 59, 59, 59,229,230,231,232,233, 59, 59,234,235, 59,236, 96, 96,
+ 96, 96, 96, 96, 96, 96, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,
+ 70, 70, 70,237, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,
+ 70, 70, 70, 70,238, 70,239, 70, 70, 70, 70, 70, 70, 70, 70, 70,
70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,
- 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,240, 70, 70, 70, 70,
- 70, 70, 70, 70, 70,241, 96, 96, 96, 96, 96, 96, 96, 96, 70, 70,
- 70, 70,242, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 70, 70,
- 70, 70, 70, 70,243, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,
- 70, 70, 70, 70, 70,244, 96, 96, 96, 96, 96, 96, 96, 96,245, 96,
- 246,247, 0, 1, 2, 2, 0, 1, 2, 2, 2, 3, 4, 5, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 19, 19, 19, 19, 19, 19, 19, 19, 19,
- 19, 19, 19, 19, 19, 19, 19, 19, 19, 0, 0, 0, 0, 0, 0, 0,
- 19, 0, 0, 0, 0, 0, 19, 19, 19, 19, 19, 19, 19, 0, 19, 0,
- 0, 0, 0, 0, 0, 0, 19, 19, 19, 19, 19, 0, 0, 0, 0, 0,
- 26, 26, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 9, 9,
- 9, 9, 0, 9, 9, 9, 2, 2, 9, 9, 9, 9, 0, 9, 2, 2,
- 2, 2, 9, 0, 9, 0, 9, 9, 9, 2, 9, 2, 9, 9, 9, 9,
- 9, 9, 9, 9, 9, 9, 9, 9, 2, 9, 9, 9, 9, 9, 9, 9,
- 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 6, 6,
- 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 1, 1, 6, 2, 4,
- 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
- 4, 4, 4, 4, 4, 2, 4, 4, 4, 2, 2, 4, 4, 4, 2, 14,
- 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 2, 2,
- 2, 2, 2, 2, 2, 2, 14, 14, 14, 2, 2, 2, 2, 14, 14, 14,
- 14, 14, 14, 2, 2, 2, 3, 3, 3, 3, 3, 0, 3, 3, 3, 3,
- 3, 3, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
- 3, 0, 3, 3, 3, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3,
- 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 3, 1, 3,
- 3, 3, 3, 3, 3, 3, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37,
- 37, 37, 37, 37, 2, 37, 37, 37, 37, 2, 2, 37, 37, 37, 38, 38,
- 38, 38, 38, 38, 38, 38, 38, 38, 2, 2, 2, 2, 2, 2, 64, 64,
- 64, 64, 64, 64, 64, 64, 64, 64, 64, 2, 2, 64, 64, 64, 90, 90,
- 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 2, 2, 90, 90,
- 90, 90, 90, 90, 90, 2, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95,
- 95, 95, 2, 2, 95, 2, 37, 37, 37, 2, 2, 2, 2, 2, 3, 3,
- 3, 3, 3, 3, 3, 2, 3, 3, 2, 2, 2, 2, 2, 2, 3, 3,
- 0, 3, 3, 3, 3, 3, 7, 7, 7, 7, 7, 7, 7, 7, 7, 1,
- 1, 1, 1, 7, 7, 7, 7, 7, 7, 7, 0, 0, 7, 7, 5, 5,
- 5, 5, 2, 5, 5, 5, 5, 5, 5, 5, 5, 2, 2, 5, 5, 2,
- 2, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 2,
- 5, 5, 5, 5, 5, 5, 5, 2, 5, 2, 2, 2, 5, 5, 5, 5,
- 2, 2, 5, 5, 5, 5, 5, 2, 2, 5, 5, 5, 5, 2, 2, 2,
- 2, 2, 2, 2, 2, 5, 2, 2, 2, 2, 5, 5, 2, 5, 5, 5,
- 5, 5, 2, 2, 5, 5, 5, 5, 5, 5, 5, 5, 5, 2, 2, 11,
- 11, 11, 2, 11, 11, 11, 11, 11, 11, 2, 2, 2, 2, 11, 11, 2,
- 2, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 2,
- 11, 11, 11, 11, 11, 11, 11, 2, 11, 11, 2, 11, 11, 2, 11, 11,
- 2, 2, 11, 2, 11, 11, 11, 2, 2, 11, 11, 11, 2, 2, 2, 11,
- 2, 2, 2, 2, 2, 2, 2, 11, 11, 11, 11, 2, 11, 2, 2, 2,
- 2, 2, 2, 2, 11, 11, 11, 11, 11, 11, 11, 11, 11, 2, 2, 10,
- 10, 10, 2, 10, 10, 10, 10, 10, 10, 10, 10, 10, 2, 10, 10, 10,
- 2, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 2,
- 10, 10, 10, 10, 10, 10, 10, 2, 10, 10, 2, 10, 10, 10, 10, 10,
- 2, 2, 10, 10, 10, 10, 10, 10, 2, 10, 10, 10, 2, 2, 10, 2,
- 2, 2, 2, 2, 2, 2, 10, 10, 10, 10, 2, 2, 10, 10, 10, 10,
- 2, 2, 2, 2, 2, 2, 2, 10, 10, 10, 10, 10, 10, 10, 2, 21,
- 21, 21, 2, 21, 21, 21, 21, 21, 21, 21, 21, 2, 2, 21, 21, 2,
- 2, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 2,
- 21, 21, 21, 21, 21, 21, 21, 2, 21, 21, 2, 21, 21, 21, 21, 21,
- 2, 2, 21, 21, 21, 21, 21, 2, 2, 21, 21, 21, 2, 2, 2, 2,
- 2, 2, 2, 21, 21, 21, 2, 2, 2, 2, 21, 21, 2, 21, 21, 21,
- 21, 21, 2, 2, 21, 21, 2, 2, 22, 22, 2, 22, 22, 22, 22, 22,
- 22, 2, 2, 2, 22, 22, 22, 2, 22, 22, 22, 22, 2, 2, 2, 22,
- 22, 2, 22, 2, 22, 22, 2, 2, 2, 22, 22, 2, 2, 2, 22, 22,
- 22, 22, 22, 22, 22, 22, 22, 22, 2, 2, 2, 2, 22, 22, 22, 2,
- 2, 2, 2, 2, 2, 22, 2, 2, 2, 2, 2, 2, 22, 22, 22, 22,
- 22, 2, 2, 2, 2, 2, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23,
- 23, 23, 23, 2, 23, 23, 23, 2, 23, 23, 23, 23, 23, 23, 23, 23,
- 2, 2, 23, 23, 23, 23, 23, 2, 23, 23, 23, 23, 2, 2, 2, 2,
- 2, 2, 2, 23, 23, 2, 23, 23, 23, 2, 2, 23, 2, 2, 23, 23,
- 23, 23, 2, 2, 23, 23, 2, 2, 2, 2, 2, 2, 2, 23, 16, 16,
- 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 2, 16, 16, 16, 2,
- 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 2, 16, 16, 16, 16, 16,
- 2, 2, 16, 16, 16, 16, 16, 2, 16, 16, 16, 16, 2, 2, 2, 2,
- 2, 2, 2, 16, 16, 2, 16, 16, 16, 16, 2, 2, 16, 16, 2, 16,
- 16, 16, 2, 2, 2, 2, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
- 20, 20, 20, 2, 20, 20, 20, 2, 20, 20, 20, 20, 20, 20, 2, 2,
- 2, 2, 20, 20, 20, 20, 20, 20, 20, 20, 2, 2, 20, 20, 2, 36,
- 36, 36, 2, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36,
- 36, 36, 36, 36, 36, 2, 2, 2, 36, 36, 36, 36, 36, 36, 36, 36,
- 2, 36, 36, 36, 36, 36, 36, 36, 36, 36, 2, 36, 2, 2, 2, 2,
- 36, 2, 2, 2, 2, 36, 36, 36, 36, 36, 36, 2, 36, 2, 2, 2,
- 2, 2, 2, 2, 36, 36, 2, 2, 36, 36, 36, 2, 2, 2, 2, 24,
- 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
- 24, 2, 2, 2, 2, 0, 24, 24, 24, 24, 2, 2, 2, 2, 2, 18,
- 18, 2, 18, 2, 18, 18, 18, 18, 18, 2, 18, 18, 18, 18, 18, 18,
- 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 2, 18, 2, 18, 18, 18,
- 18, 18, 18, 18, 2, 2, 18, 18, 18, 18, 18, 2, 18, 2, 18, 18,
- 18, 18, 18, 18, 18, 2, 18, 18, 2, 2, 18, 18, 18, 18, 25, 25,
- 25, 25, 25, 25, 25, 25, 2, 25, 25, 25, 25, 25, 25, 25, 25, 25,
- 25, 25, 25, 2, 2, 2, 25, 25, 25, 25, 25, 2, 25, 25, 25, 25,
- 25, 25, 25, 0, 0, 0, 0, 25, 25, 2, 2, 2, 2, 2, 33, 33,
- 33, 33, 33, 33, 33, 33, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
- 8, 8, 8, 8, 2, 8, 2, 2, 2, 2, 2, 8, 2, 2, 8, 8,
- 8, 0, 8, 8, 8, 8, 12, 12, 12, 12, 12, 12, 12, 12, 30, 30,
- 30, 30, 30, 30, 30, 30, 30, 2, 30, 30, 30, 30, 2, 2, 30, 30,
- 30, 30, 30, 30, 30, 2, 30, 30, 30, 2, 2, 30, 30, 30, 30, 30,
- 30, 30, 30, 2, 2, 2, 30, 30, 2, 2, 2, 2, 2, 2, 29, 29,
- 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 2, 2, 28, 28,
- 28, 28, 28, 28, 28, 28, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
- 34, 34, 34, 2, 2, 2, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
- 35, 0, 0, 0, 35, 35, 35, 2, 2, 2, 2, 2, 2, 2, 45, 45,
- 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 45, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44,
- 44, 44, 44, 0, 0, 2, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43,
- 43, 43, 2, 2, 2, 2, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46,
- 46, 46, 46, 2, 46, 46, 46, 2, 46, 46, 2, 2, 2, 2, 31, 31,
- 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 2, 2, 31, 31,
- 2, 2, 2, 2, 2, 2, 32, 32, 0, 0, 32, 0, 32, 32, 32, 32,
- 32, 32, 32, 32, 32, 32, 32, 32, 2, 2, 2, 2, 2, 2, 32, 2,
- 2, 2, 2, 2, 2, 2, 32, 32, 32, 2, 2, 2, 2, 2, 28, 28,
- 28, 28, 28, 28, 2, 2, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48,
- 48, 48, 48, 48, 48, 2, 48, 48, 48, 48, 2, 2, 2, 2, 48, 2,
- 2, 2, 48, 48, 48, 48, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52,
- 52, 52, 52, 52, 2, 2, 52, 52, 52, 52, 52, 2, 2, 2, 58, 58,
- 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 2, 2, 2, 2, 58, 58,
- 2, 2, 2, 2, 2, 2, 58, 58, 58, 2, 2, 2, 58, 58, 54, 54,
- 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 2, 2, 54, 54, 91, 91,
- 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 2, 91, 91,
- 91, 91, 91, 2, 2, 91, 91, 91, 2, 2, 2, 2, 2, 2, 91, 91,
- 91, 91, 91, 91, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 62, 62,
- 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 2, 2, 2, 62, 62,
- 62, 62, 62, 62, 62, 2, 76, 76, 76, 76, 76, 76, 76, 76, 93, 93,
- 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 2, 2, 2, 2, 2, 2,
- 2, 2, 93, 93, 93, 93, 70, 70, 70, 70, 70, 70, 70, 70, 2, 2,
- 2, 70, 70, 70, 70, 70, 70, 70, 2, 2, 2, 70, 70, 70, 73, 73,
- 73, 73, 73, 73, 73, 73, 6, 2, 2, 2, 2, 2, 2, 2, 8, 8,
- 8, 2, 2, 8, 8, 8, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0,
- 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0,
- 0, 0, 1, 0, 0, 0, 1, 1, 0, 2, 2, 2, 2, 2, 19, 19,
- 19, 19, 19, 19, 9, 9, 9, 9, 9, 6, 19, 19, 19, 19, 19, 19,
- 19, 19, 19, 9, 9, 9, 9, 9, 19, 19, 19, 19, 9, 9, 9, 9,
- 9, 19, 19, 19, 19, 19, 6, 19, 19, 19, 19, 19, 19, 19, 19, 19,
- 19, 19, 19, 19, 19, 9, 9, 9, 9, 9, 9, 9, 2, 2, 2, 9,
- 2, 9, 2, 9, 2, 9, 9, 9, 9, 9, 9, 2, 9, 9, 9, 9,
- 9, 9, 2, 2, 9, 9, 9, 9, 9, 9, 2, 9, 9, 9, 2, 2,
- 9, 9, 9, 2, 9, 9, 9, 9, 9, 9, 9, 9, 9, 2, 0, 0,
- 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 19,
- 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 0, 0,
- 0, 0, 0, 0, 0, 2, 19, 19, 19, 19, 19, 2, 2, 2, 0, 2,
- 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 0, 0,
- 0, 0, 0, 0, 9, 0, 0, 0, 19, 19, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 19, 0, 19, 0, 0, 0, 2, 2, 2, 2, 0, 0,
- 0, 2, 2, 2, 2, 2, 27, 27, 27, 27, 27, 27, 27, 27, 0, 0,
- 0, 0, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 56, 56,
- 56, 56, 56, 56, 56, 56, 55, 55, 55, 55, 2, 2, 2, 2, 2, 55,
- 55, 55, 55, 55, 55, 55, 61, 61, 61, 61, 61, 61, 61, 61, 2, 2,
- 2, 2, 2, 2, 2, 61, 61, 2, 2, 2, 2, 2, 2, 2, 0, 0,
- 0, 0, 0, 0, 2, 2, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
- 2, 13, 13, 13, 13, 13, 13, 13, 13, 13, 2, 2, 2, 2, 13, 13,
- 13, 13, 13, 13, 2, 2, 0, 0, 0, 0, 2, 2, 2, 2, 0, 0,
- 0, 0, 0, 13, 0, 13, 0, 13, 13, 13, 13, 13, 13, 13, 13, 13,
- 1, 1, 1, 1, 12, 12, 13, 13, 13, 13, 0, 0, 0, 0, 2, 15,
- 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
- 15, 15, 15, 15, 15, 2, 2, 1, 1, 0, 0, 15, 15, 15, 0, 17,
- 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
- 17, 0, 0, 17, 17, 17, 2, 2, 2, 2, 2, 26, 26, 26, 26, 26,
- 26, 26, 26, 26, 26, 26, 2, 12, 12, 12, 12, 12, 12, 12, 12, 12,
- 12, 12, 12, 12, 12, 2, 12, 12, 12, 12, 12, 12, 12, 0, 17, 17,
- 17, 17, 17, 17, 17, 0, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39,
- 39, 39, 39, 2, 2, 2, 39, 39, 39, 39, 39, 39, 39, 2, 86, 86,
- 86, 86, 86, 86, 86, 86, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77,
- 77, 77, 2, 2, 2, 2, 79, 79, 79, 79, 79, 79, 79, 79, 0, 0,
- 19, 19, 19, 19, 19, 19, 0, 0, 0, 19, 19, 19, 19, 19, 19, 19,
- 19, 2, 2, 2, 2, 2, 19, 19, 2, 19, 2, 19, 19, 19, 19, 19,
- 2, 2, 2, 2, 2, 2, 2, 2, 19, 19, 19, 19, 19, 19, 60, 60,
- 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 2, 2, 2, 0, 0,
- 2, 2, 2, 2, 2, 2, 65, 65, 65, 65, 65, 65, 65, 65, 75, 75,
- 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 2, 2, 2, 2,
- 2, 2, 2, 2, 75, 75, 75, 75, 2, 2, 2, 2, 2, 2, 69, 69,
- 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 0, 69, 74, 74,
- 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 74, 12, 12, 12, 12, 12, 2, 2, 2, 84, 84,
- 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 2, 0, 84, 84,
- 2, 2, 2, 2, 84, 84, 33, 33, 33, 33, 33, 33, 33, 2, 68, 68,
- 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 2, 68, 68,
- 68, 68, 68, 68, 2, 2, 68, 68, 2, 2, 68, 68, 68, 68, 92, 92,
- 92, 92, 92, 92, 92, 92, 92, 92, 92, 2, 2, 2, 2, 2, 2, 2,
- 2, 92, 92, 92, 92, 92, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87,
- 87, 87, 87, 87, 87, 2, 2, 30, 30, 30, 30, 30, 30, 2, 19, 19,
- 19, 0, 19, 19, 19, 19, 19, 19, 19, 19, 19, 9, 19, 19, 19, 19,
- 0, 0, 2, 2, 2, 2, 87, 87, 87, 87, 87, 87, 2, 2, 87, 87,
- 2, 2, 2, 2, 2, 2, 12, 12, 12, 12, 2, 2, 2, 2, 2, 2,
- 2, 12, 12, 12, 12, 12, 13, 13, 2, 2, 2, 2, 2, 2, 19, 19,
- 19, 19, 19, 19, 19, 2, 2, 2, 2, 4, 4, 4, 4, 4, 2, 2,
- 2, 2, 2, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 2, 14, 14,
- 14, 14, 14, 2, 14, 2, 14, 14, 2, 14, 14, 2, 14, 14, 3, 3,
- 3, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3,
- 3, 3, 3, 3, 0, 0, 2, 2, 3, 3, 3, 3, 3, 3, 2, 2,
- 2, 2, 2, 2, 2, 3, 1, 1, 1, 1, 1, 1, 6, 6, 0, 0,
- 0, 2, 0, 0, 0, 0, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3,
- 3, 3, 3, 2, 2, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 17, 17, 17, 17, 17, 17, 17, 17, 0, 0, 2, 2,
- 12, 12, 12, 12, 12, 12, 2, 2, 12, 12, 12, 2, 2, 2, 2, 0,
- 0, 0, 0, 0, 2, 2, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49,
- 49, 49, 2, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 2, 49, 49,
- 49, 2, 49, 49, 2, 49, 49, 49, 49, 49, 49, 49, 2, 2, 49, 49,
- 49, 2, 2, 2, 2, 2, 0, 0, 0, 2, 2, 2, 2, 0, 0, 0,
- 0, 0, 2, 2, 2, 0, 0, 0, 0, 0, 0, 2, 2, 2, 9, 2,
- 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 1, 2, 2, 71, 71,
- 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 2, 2, 2, 67, 67,
- 67, 67, 67, 67, 67, 67, 67, 2, 2, 2, 2, 2, 2, 2, 1, 0,
- 0, 0, 0, 0, 0, 0, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42,
- 42, 42, 2, 2, 2, 2, 2, 2, 2, 2, 2, 42, 42, 42, 41, 41,
- 41, 41, 41, 41, 41, 41, 41, 41, 41, 2, 2, 2, 2, 2,118,118,
- 118,118,118,118,118,118,118,118,118, 2, 2, 2, 2, 2, 53, 53,
- 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 2, 53, 59, 59,
- 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 2, 2, 2, 2, 59, 59,
- 59, 59, 59, 59, 2, 2, 40, 40, 40, 40, 40, 40, 40, 40, 51, 51,
- 51, 51, 51, 51, 51, 51, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
- 50, 50, 50, 50, 2, 2, 50, 50, 2, 2, 2, 2, 2, 2,135,135,
- 135,135,135,135,135,135,135,135,135,135, 2, 2, 2, 2,106,106,
- 106,106,106,106,106,106,104,104,104,104,104,104,104,104,104,104,
- 104,104, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,104,161,161,
- 161,161,161,161,161,161,161,161,161, 2,161,161,161,161,161,161,
- 161, 2,161,161, 2,161,161,161, 2,161,161,161,161,161,161,161,
- 2,161,161, 2, 2, 2,110,110,110,110,110,110,110,110,110,110,
- 110,110,110,110,110, 2,110,110,110,110,110,110, 2, 2, 19, 19,
- 19, 19, 19, 19, 2, 19, 19, 2, 19, 19, 19, 19, 19, 19, 47, 47,
- 47, 47, 47, 47, 2, 2, 47, 2, 47, 47, 47, 47, 47, 47, 47, 47,
- 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 2, 47, 47, 2,
- 2, 2, 47, 2, 2, 47, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81,
- 81, 81, 81, 81, 2, 81,120,120,120,120,120,120,120,120,116,116,
- 116,116,116,116,116,116,116,116,116,116,116,116,116, 2, 2, 2,
- 2, 2, 2, 2, 2,116,128,128,128,128,128,128,128,128,128,128,
- 128, 2,128,128, 2, 2, 2, 2, 2,128,128,128,128,128, 66, 66,
- 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 2, 2, 2, 66, 72, 72,
- 72, 72, 72, 72, 72, 72, 72, 72, 2, 2, 2, 2, 2, 72, 98, 98,
- 98, 98, 98, 98, 98, 98, 97, 97, 97, 97, 97, 97, 97, 97, 2, 2,
- 2, 2, 97, 97, 97, 97, 2, 2, 97, 97, 97, 97, 97, 97, 57, 57,
- 57, 57, 2, 57, 57, 2, 2, 2, 2, 2, 57, 57, 57, 57, 57, 57,
- 57, 57, 2, 57, 57, 57, 2, 57, 57, 57, 57, 57, 57, 57, 57, 57,
- 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 2, 2, 57, 57,
- 57, 2, 2, 2, 2, 57, 57, 2, 2, 2, 2, 2, 2, 2, 88, 88,
- 88, 88, 88, 88, 88, 88,117,117,117,117,117,117,117,117,112,112,
- 112,112,112,112,112,112,112,112,112,112,112,112,112, 2, 2, 2,
- 2,112,112,112,112,112, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78,
- 78, 78, 78, 78, 2, 2, 2, 78, 78, 78, 78, 78, 78, 78, 83, 83,
- 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 2, 2, 82, 82,
- 82, 82, 82, 82, 82, 82, 82, 82, 82, 2, 2, 2, 2, 2,122,122,
- 122,122,122,122,122,122,122,122, 2, 2, 2, 2, 2, 2, 2,122,
- 122,122,122, 2, 2, 2, 2,122,122,122,122,122,122,122, 89, 89,
- 89, 89, 89, 89, 89, 89, 89, 2, 2, 2, 2, 2, 2, 2,130,130,
- 130,130,130,130,130,130,130,130,130, 2, 2, 2, 2, 2, 2, 2,
- 130,130,130,130,130,130,144,144,144,144,144,144,144,144,144,144,
- 2, 2, 2, 2, 2, 2,156,156,156,156,156,156,156,156,156,156,
- 2,156,156,156, 2, 2,156,156, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 3, 3, 3,147,147,147,147,147,147,147,147,148,148,
- 148,148,148,148,148,148,148,148, 2, 2, 2, 2, 2, 2,158,158,
- 158,158,158,158,158,158,158,158, 2, 2, 2, 2, 2, 2,153,153,
- 153,153,153,153,153,153,153,153,153,153, 2, 2, 2, 2,149,149,
- 149,149,149,149,149,149,149,149,149,149,149,149,149, 2, 94, 94,
- 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 2, 2, 2, 2,
- 94, 94, 94, 94, 94, 94, 2, 2, 2, 2, 2, 2, 2, 94, 85, 85,
- 85, 85, 85, 85, 85, 85, 85, 85, 85, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 85, 2, 2,101,101,101,101,101,101,101,101,101, 2,
- 2, 2, 2, 2, 2, 2,101,101, 2, 2, 2, 2, 2, 2, 96, 96,
- 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 2, 96, 96,111,111,
- 111,111,111,111,111,111,111,111,111,111,111,111,111, 2,100,100,
- 100,100,100,100,100,100, 2, 36, 36, 36, 36, 36, 36, 36, 36, 36,
- 36, 36, 36, 2, 2, 2,108,108,108,108,108,108,108,108,108,108,
- 2,108,108,108,108,108,108,108, 2, 2, 2, 2, 2, 2,129,129,
- 129,129,129,129,129, 2,129, 2,129,129,129,129, 2,129,129,129,
- 129,129,129,129,129,129,129,129,129,129,129,129, 2,129,129,129,
- 2, 2, 2, 2, 2, 2,109,109,109,109,109,109,109,109,109,109,
- 109, 2, 2, 2, 2, 2,109,109, 2, 2, 2, 2, 2, 2,107,107,
- 107,107, 2,107,107,107,107,107,107,107,107, 2, 2,107,107, 2,
- 2,107,107,107,107,107,107,107,107,107,107,107,107,107,107, 2,
- 107,107,107,107,107,107,107, 2,107,107, 2,107,107,107,107,107,
- 2, 1,107,107,107,107,107, 2, 2,107,107,107, 2, 2,107, 2,
- 2, 2, 2, 2, 2,107, 2, 2, 2, 2, 2,107,107,107,107,107,
- 107,107, 2, 2,107,107,107,107,107,107,107, 2, 2, 2,137,137,
- 137,137,137,137,137,137,137,137,137,137, 2,137,137,137,137,137,
- 2, 2, 2, 2, 2, 2,124,124,124,124,124,124,124,124,124,124,
- 2, 2, 2, 2, 2, 2,123,123,123,123,123,123,123,123,123,123,
- 123,123,123,123, 2, 2,114,114,114,114,114,114,114,114,114,114,
- 114,114,114, 2, 2, 2,114,114, 2, 2, 2, 2, 2, 2, 32, 32,
- 32, 32, 32, 2, 2, 2,102,102,102,102,102,102,102,102,102,102,
- 2, 2, 2, 2, 2, 2,126,126,126,126,126,126,126,126,126,126,
- 126, 2, 2,126,126,126,126,126,126,126, 2, 2, 2, 2,126,126,
- 126,126,126,126,126, 2,142,142,142,142,142,142,142,142,142,142,
- 142,142, 2, 2, 2, 2,125,125,125,125,125,125,125,125,125,125,
- 125, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,125,154,154,
- 154,154,154,154,154, 2, 2,154, 2, 2,154,154,154,154,154,154,
- 154,154, 2,154,154, 2,154,154,154,154,154,154,154,154,154,154,
- 154,154,154,154, 2,154,154, 2, 2,154,154,154,154,154,154,154,
- 2, 2, 2, 2, 2, 2,150,150,150,150,150,150,150,150, 2, 2,
- 150,150,150,150,150,150,150,150,150,150,150, 2, 2, 2,141,141,
- 141,141,141,141,141,141,140,140,140,140,140,140,140,140,140,140,
- 140, 2, 2, 2, 2, 2,121,121,121,121,121,121,121,121,121, 2,
- 2, 2, 2, 2, 2, 2, 7, 7, 2, 2, 2, 2, 2, 2,133,133,
- 133,133,133,133,133,133,133, 2,133,133,133,133,133,133,133,133,
- 133,133,133,133,133, 2,133,133,133,133,133,133, 2, 2,133,133,
- 133,133,133, 2, 2, 2,134,134,134,134,134,134,134,134, 2, 2,
- 134,134,134,134,134,134, 2,134,134,134,134,134,134,134,134,134,
- 134,134,134,134,134, 2,138,138,138,138,138,138,138, 2,138,138,
- 2,138,138,138,138,138,138,138,138,138,138,138,138,138, 2, 2,
- 138, 2,138,138, 2,138,138,138, 2, 2, 2, 2, 2, 2,143,143,
- 143,143,143,143, 2,143,143, 2,143,143,143,143,143,143,143,143,
- 143,143,143,143,143,143,143,143,143,143,143,143,143, 2,143,143,
- 2,143,143,143,143,143,143, 2, 2, 2, 2, 2, 2, 2,143,143,
- 2, 2, 2, 2, 2, 2,145,145,145,145,145,145,145,145,145, 2,
- 2, 2, 2, 2, 2, 2,163,163,163,163,163,163,163,163,163, 2,
- 163,163,163,163,163,163,163,163,163, 2, 2, 2,163,163,163,163,
- 2, 2, 2, 2, 2, 2, 86, 2, 2, 2, 2, 2, 2, 2, 22, 22,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 22, 63, 63,
- 63, 63, 63, 63, 63, 63, 63, 63, 2, 2, 2, 2, 2, 2, 63, 63,
- 63, 63, 63, 63, 63, 2, 63, 63, 63, 63, 63, 2, 2, 2, 63, 63,
- 63, 63, 2, 2, 2, 2,157,157,157,157,157,157,157,157,157,157,
- 157, 2, 2, 2, 2, 2, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80,
- 80, 80, 80, 80, 2, 2,127,127,127,127,127,127,127,127,127,127,
- 127,127,127,127,127, 2, 79, 2, 2, 2, 2, 2, 2, 2,115,115,
- 115,115,115,115,115,115,115,115,115,115,115,115,115, 2,115,115,
- 2, 2, 2, 2,115,115,159,159,159,159,159,159,159,159,159,159,
- 159,159,159,159,159, 2,159,159, 2, 2, 2, 2, 2, 2,103,103,
- 103,103,103,103,103,103,103,103,103,103,103,103, 2, 2,119,119,
- 119,119,119,119,119,119,119,119,119,119,119,119, 2, 2,119,119,
- 2,119,119,119,119,119, 2, 2, 2, 2, 2,119,119,119,146,146,
- 146,146,146,146,146,146,146,146,146, 2, 2, 2, 2, 2, 99, 99,
- 99, 99, 99, 99, 99, 99, 99, 99, 99, 2, 2, 2, 2, 99, 2, 2,
- 2, 2, 2, 2, 2, 99,136,139, 13, 13,155, 2, 2, 2,136,136,
- 136,136,136,136,136,136,155,155,155,155,155,155,155,155,155,155,
- 155,155,155,155, 2, 2,136, 2, 2, 2, 2, 2, 2, 2, 17, 17,
- 17, 17, 2, 17, 17, 17, 17, 17, 17, 17, 2, 17, 17, 2, 17, 15,
- 15, 15, 15, 15, 15, 15, 17, 17, 17, 2, 2, 2, 2, 2, 2, 2,
- 15, 2, 2, 2, 2, 2, 15, 15, 15, 2, 2, 17, 2, 2, 2, 2,
- 2, 2, 17, 17, 17, 17,139,139,139,139,139,139,139,139,139,139,
- 139,139, 2, 2, 2, 2,105,105,105,105,105,105,105,105,105,105,
- 105, 2, 2, 2, 2, 2,105,105,105,105,105, 2, 2, 2,105, 2,
- 2, 2, 2, 2, 2, 2,105,105, 2, 2,105,105,105,105, 1, 1,
- 1, 1, 1, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1,
- 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
- 1, 1, 1, 1, 0, 0, 2, 2, 0, 2, 2, 0, 0, 2, 2, 0,
- 0, 0, 0, 2, 0, 0, 0, 0, 2, 0, 2, 0, 0, 0, 0, 0,
- 0, 0, 2, 0, 0, 0, 0, 0, 0, 2, 2, 0, 0, 0, 0, 0,
- 2, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 2, 0, 2, 2, 2,
- 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0,131,131,
- 131,131,131,131,131,131,131,131,131,131, 2, 2, 2, 2, 2, 2,
- 2,131,131,131,131,131, 2,131,131,131,131,131,131,131, 2, 2,
- 2, 2, 2, 19, 19, 19, 56, 56, 56, 56, 56, 56, 56, 2, 56, 2,
- 2, 56, 56, 56, 56, 56, 56, 56, 2, 56, 56, 2, 56, 56, 56, 56,
- 56, 2, 2, 2, 2, 2, 6, 6, 6, 6, 6, 6, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 6,151,151,151,151,151,151,151,151,151,151,
- 151,151,151, 2, 2, 2,151,151,151,151,151,151, 2, 2,151,151,
- 2, 2, 2, 2,151,151,160,160,160,160,160,160,160,160,160,160,
- 160,160,160,160,160, 2,152,152,152,152,152,152,152,152,152,152,
- 2, 2, 2, 2, 2,152,164,164,164,164,164,164,164,164,164,164,
- 2, 2, 2, 2, 2, 2, 30, 30, 30, 30, 2, 30, 30, 2,113,113,
- 113,113,113,113,113,113,113,113,113,113,113, 2, 2,113,113,113,
- 113,113,113,113,113, 2,132,132,132,132,132,132,132,132,132,132,
- 132,132, 2, 2, 2, 2,132,132, 2, 2, 2, 2,132,132, 3, 3,
- 3, 3, 2, 3, 3, 3, 2, 3, 3, 2, 3, 2, 2, 3, 2, 3,
- 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 2, 3,
- 2, 3, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 3, 2, 3,
- 2, 3, 2, 3, 3, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3,
- 3, 3, 3, 2, 3, 2, 3, 3, 2, 3, 3, 3, 3, 3, 3, 3,
- 3, 3, 2, 2, 2, 2, 2, 3, 3, 3, 2, 3, 3, 3, 2, 2,
- 2, 2, 2, 2, 0, 0, 15, 0, 0, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 0, 0, 0, 0, 2, 2, 2, 0, 0, 0, 0, 0, 13, 2,
- 2, 2, 2, 2, 2, 2, 13, 13, 13, 2, 2, 2, 2, 2, 2, 0,
- 2, 2, 2, 2, 2, 2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
- 9, 9, 9, 10, 9, 11, 12, 13, 9, 9, 9, 14, 9, 9, 15, 9,
- 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
+ 70, 70, 70,240, 70, 70, 70, 70, 70, 70, 70, 70, 70,241, 96, 96,
+ 96, 96, 96, 96, 96, 96, 70, 70, 70, 70,242, 96, 96, 96, 96, 96,
+ 96, 96, 96, 96, 96, 96, 70, 70, 70, 70, 70, 70,243, 70, 70, 70,
+ 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,244, 96, 96,
+ 96, 96, 96, 96, 96, 96,245, 96,246,247, 0, 1, 2, 2, 0, 1,
+ 2, 2, 2, 3, 4, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19,
+ 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
+ 19, 0, 0, 0, 0, 0, 0, 0, 19, 0, 0, 0, 0, 0, 19, 19,
+ 19, 19, 19, 19, 19, 0, 19, 0, 0, 0, 0, 0, 0, 0, 19, 19,
+ 19, 19, 19, 0, 0, 0, 0, 0, 26, 26, 0, 0, 0, 0, 1, 1,
+ 1, 1, 1, 1, 1, 1, 9, 9, 9, 9, 0, 9, 9, 9, 2, 2,
+ 9, 9, 9, 9, 0, 9, 2, 2, 2, 2, 9, 0, 9, 0, 9, 9,
+ 9, 2, 9, 2, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
+ 2, 9, 9, 9, 9, 9, 9, 9, 55, 55, 55, 55, 55, 55, 55, 55,
+ 55, 55, 55, 55, 55, 55, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
+ 6, 6, 6, 1, 1, 6, 2, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 4, 4,
+ 4, 2, 2, 4, 4, 4, 2, 14, 14, 14, 14, 14, 14, 14, 14, 14,
+ 14, 14, 14, 14, 14, 14, 2, 2, 2, 2, 2, 2, 2, 2, 14, 14,
+ 14, 2, 2, 2, 2, 14, 14, 14, 14, 14, 14, 2, 2, 2, 3, 3,
+ 3, 3, 3, 0, 3, 3, 3, 3, 3, 3, 0, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 3, 3, 3, 0, 0, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 3, 3, 1, 3, 3, 3, 3, 3, 3, 3, 37, 37,
+ 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 2, 37, 37, 37,
+ 37, 2, 2, 37, 37, 37, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38,
+ 2, 2, 2, 2, 2, 2, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
+ 64, 2, 2, 64, 64, 64, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90,
+ 90, 90, 90, 90, 2, 2, 90, 90, 90, 90, 90, 90, 90, 2, 95, 95,
+ 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 2, 2, 95, 2, 37, 37,
+ 37, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3,
+ 2, 2, 2, 2, 2, 2, 3, 3, 0, 3, 3, 3, 3, 3, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 1, 1, 1, 1, 7, 7, 7, 7, 7,
+ 7, 7, 0, 0, 7, 7, 5, 5, 5, 5, 2, 5, 5, 5, 5, 5,
+ 5, 5, 5, 2, 2, 5, 5, 2, 2, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 2, 5, 5, 5, 5, 5, 5, 5, 2,
+ 5, 2, 2, 2, 5, 5, 5, 5, 2, 2, 5, 5, 5, 5, 5, 2,
+ 2, 5, 5, 5, 5, 2, 2, 2, 2, 2, 2, 2, 2, 5, 2, 2,
+ 2, 2, 5, 5, 2, 5, 5, 5, 5, 5, 2, 2, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 2, 2, 11, 11, 11, 2, 11, 11, 11, 11, 11,
+ 11, 2, 2, 2, 2, 11, 11, 2, 2, 11, 11, 11, 11, 11, 11, 11,
+ 11, 11, 11, 11, 11, 11, 11, 2, 11, 11, 11, 11, 11, 11, 11, 2,
+ 11, 11, 2, 11, 11, 2, 11, 11, 2, 2, 11, 2, 11, 11, 11, 2,
+ 2, 11, 11, 11, 2, 2, 2, 11, 2, 2, 2, 2, 2, 2, 2, 11,
+ 11, 11, 11, 2, 11, 2, 2, 2, 2, 2, 2, 2, 11, 11, 11, 11,
+ 11, 11, 11, 11, 11, 2, 2, 10, 10, 10, 2, 10, 10, 10, 10, 10,
+ 10, 10, 10, 10, 2, 10, 10, 10, 2, 10, 10, 10, 10, 10, 10, 10,
+ 10, 10, 10, 10, 10, 10, 10, 2, 10, 10, 10, 10, 10, 10, 10, 2,
+ 10, 10, 2, 10, 10, 10, 10, 10, 2, 2, 10, 10, 10, 10, 10, 10,
+ 2, 10, 10, 10, 2, 2, 10, 2, 2, 2, 2, 2, 2, 2, 10, 10,
+ 10, 10, 2, 2, 10, 10, 10, 10, 2, 2, 2, 2, 2, 2, 2, 10,
+ 10, 10, 10, 10, 10, 10, 2, 21, 21, 21, 2, 21, 21, 21, 21, 21,
+ 21, 21, 21, 2, 2, 21, 21, 2, 2, 21, 21, 21, 21, 21, 21, 21,
+ 21, 21, 21, 21, 21, 21, 21, 2, 21, 21, 21, 21, 21, 21, 21, 2,
+ 21, 21, 2, 21, 21, 21, 21, 21, 2, 2, 21, 21, 21, 21, 21, 2,
+ 2, 21, 21, 21, 2, 2, 2, 2, 2, 2, 2, 21, 21, 21, 2, 2,
+ 2, 2, 21, 21, 2, 21, 21, 21, 21, 21, 2, 2, 21, 21, 2, 2,
+ 22, 22, 2, 22, 22, 22, 22, 22, 22, 2, 2, 2, 22, 22, 22, 2,
+ 22, 22, 22, 22, 2, 2, 2, 22, 22, 2, 22, 2, 22, 22, 2, 2,
+ 2, 22, 22, 2, 2, 2, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22,
+ 2, 2, 2, 2, 22, 22, 22, 2, 2, 2, 2, 2, 2, 22, 2, 2,
+ 2, 2, 2, 2, 22, 22, 22, 22, 22, 2, 2, 2, 2, 2, 23, 23,
+ 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 2, 23, 23, 23, 2,
+ 23, 23, 23, 23, 23, 23, 23, 23, 2, 2, 23, 23, 23, 23, 23, 2,
+ 23, 23, 23, 23, 2, 2, 2, 2, 2, 2, 2, 23, 23, 2, 23, 23,
+ 23, 2, 2, 23, 2, 2, 23, 23, 23, 23, 2, 2, 23, 23, 2, 2,
+ 2, 2, 2, 2, 2, 23, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 2, 16, 16, 16, 2, 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 2, 16, 16, 16, 16, 16, 2, 2, 16, 16, 16, 16, 16, 2,
+ 16, 16, 16, 16, 2, 2, 2, 2, 2, 2, 2, 16, 16, 2, 16, 16,
+ 16, 16, 2, 2, 16, 16, 2, 16, 16, 16, 2, 2, 2, 2, 20, 20,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 2, 20, 20, 20, 2,
+ 20, 20, 20, 20, 20, 20, 2, 2, 2, 2, 20, 20, 20, 20, 20, 20,
+ 20, 20, 2, 2, 20, 20, 2, 36, 36, 36, 2, 36, 36, 36, 36, 36,
+ 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 2, 2, 2,
+ 36, 36, 36, 36, 36, 36, 36, 36, 2, 36, 36, 36, 36, 36, 36, 36,
+ 36, 36, 2, 36, 2, 2, 2, 2, 36, 2, 2, 2, 2, 36, 36, 36,
+ 36, 36, 36, 2, 36, 2, 2, 2, 2, 2, 2, 2, 36, 36, 2, 2,
+ 36, 36, 36, 2, 2, 2, 2, 24, 24, 24, 24, 24, 24, 24, 24, 24,
+ 24, 24, 24, 24, 24, 24, 24, 24, 24, 2, 2, 2, 2, 0, 24, 24,
+ 24, 24, 2, 2, 2, 2, 2, 18, 18, 2, 18, 2, 18, 18, 18, 18,
+ 18, 2, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
+ 18, 18, 2, 18, 2, 18, 18, 18, 18, 18, 18, 18, 2, 2, 18, 18,
+ 18, 18, 18, 2, 18, 2, 18, 18, 18, 18, 18, 18, 18, 2, 18, 18,
+ 2, 2, 18, 18, 18, 18, 25, 25, 25, 25, 25, 25, 25, 25, 2, 25,
+ 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 2, 2, 2, 25, 25,
+ 25, 25, 25, 2, 25, 25, 25, 25, 25, 25, 25, 0, 0, 0, 0, 25,
+ 25, 2, 2, 2, 2, 2, 33, 33, 33, 33, 33, 33, 33, 33, 8, 8,
+ 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 2, 8, 2, 2,
+ 2, 2, 2, 8, 2, 2, 8, 8, 8, 0, 8, 8, 8, 8, 12, 12,
+ 12, 12, 12, 12, 12, 12, 30, 30, 30, 30, 30, 30, 30, 30, 30, 2,
+ 30, 30, 30, 30, 2, 2, 30, 30, 30, 30, 30, 30, 30, 2, 30, 30,
+ 30, 2, 2, 30, 30, 30, 30, 30, 30, 30, 30, 2, 2, 2, 30, 30,
+ 2, 2, 2, 2, 2, 2, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
+ 29, 29, 29, 29, 2, 2, 28, 28, 28, 28, 28, 28, 28, 28, 34, 34,
+ 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 2, 2, 2, 35, 35,
+ 35, 35, 35, 35, 35, 35, 35, 35, 35, 0, 0, 0, 35, 35, 35, 2,
+ 2, 2, 2, 2, 2, 2, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
+ 45, 45, 45, 45, 2, 2, 2, 2, 2, 2, 2, 2, 2, 45, 44, 44,
+ 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 0, 0, 2, 43, 43,
+ 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 2, 2, 2, 2, 46, 46,
+ 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 2, 46, 46, 46, 2,
+ 46, 46, 2, 2, 2, 2, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31,
+ 31, 31, 31, 31, 2, 2, 31, 31, 2, 2, 2, 2, 2, 2, 32, 32,
+ 0, 0, 32, 0, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32,
+ 2, 2, 2, 2, 2, 2, 32, 2, 2, 2, 2, 2, 2, 2, 32, 32,
+ 32, 2, 2, 2, 2, 2, 28, 28, 28, 28, 28, 28, 2, 2, 48, 48,
+ 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 2, 48, 48,
+ 48, 48, 2, 2, 2, 2, 48, 2, 2, 2, 48, 48, 48, 48, 52, 52,
+ 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 2, 2, 52, 52,
+ 52, 52, 52, 2, 2, 2, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58,
+ 58, 58, 2, 2, 2, 2, 58, 58, 2, 2, 2, 2, 2, 2, 58, 58,
+ 58, 2, 2, 2, 58, 58, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54,
+ 54, 54, 2, 2, 54, 54, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91,
+ 91, 91, 91, 91, 91, 2, 91, 91, 91, 91, 91, 2, 2, 91, 91, 91,
+ 2, 2, 2, 2, 2, 2, 91, 91, 91, 91, 91, 91, 2, 2, 1, 1,
+ 1, 1, 1, 1, 1, 2, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62,
+ 62, 62, 62, 2, 2, 2, 62, 62, 62, 62, 62, 62, 62, 2, 76, 76,
+ 76, 76, 76, 76, 76, 76, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93,
+ 93, 93, 2, 2, 2, 2, 2, 2, 2, 2, 93, 93, 93, 93, 70, 70,
+ 70, 70, 70, 70, 70, 70, 2, 2, 2, 70, 70, 70, 70, 70, 70, 70,
+ 2, 2, 2, 70, 70, 70, 73, 73, 73, 73, 73, 73, 73, 73, 6, 2,
+ 2, 2, 2, 2, 2, 2, 8, 8, 8, 2, 2, 8, 8, 8, 1, 1,
+ 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0,
+ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1,
+ 0, 2, 2, 2, 2, 2, 19, 19, 19, 19, 19, 19, 9, 9, 9, 9,
+ 9, 6, 19, 19, 19, 19, 19, 19, 19, 19, 19, 9, 9, 9, 9, 9,
+ 19, 19, 19, 19, 9, 9, 9, 9, 9, 19, 19, 19, 19, 19, 6, 19,
+ 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 9, 9, 9,
+ 9, 9, 9, 9, 2, 2, 2, 9, 2, 9, 2, 9, 2, 9, 9, 9,
+ 9, 9, 9, 2, 9, 9, 9, 9, 9, 9, 2, 2, 9, 9, 9, 9,
+ 9, 9, 2, 9, 9, 9, 2, 2, 9, 9, 9, 2, 9, 9, 9, 9,
+ 9, 9, 9, 9, 9, 2, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0,
+ 0, 0, 0, 2, 0, 0, 0, 19, 2, 2, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 19, 0, 0, 0, 0, 0, 0, 0, 2, 19, 19,
+ 19, 19, 19, 2, 2, 2, 0, 2, 2, 2, 2, 2, 2, 2, 1, 2,
+ 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0,
+ 19, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 0, 19, 0,
+ 0, 0, 2, 2, 2, 2, 0, 0, 0, 2, 2, 2, 2, 2, 27, 27,
+ 27, 27, 27, 27, 27, 27, 0, 0, 0, 0, 2, 2, 0, 0, 0, 0,
+ 0, 0, 0, 0, 2, 0, 56, 56, 56, 56, 56, 56, 56, 56, 55, 55,
+ 55, 55, 2, 2, 2, 2, 2, 55, 55, 55, 55, 55, 55, 55, 61, 61,
+ 61, 61, 61, 61, 61, 61, 2, 2, 2, 2, 2, 2, 2, 61, 61, 2,
+ 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 2, 2, 13, 13,
+ 13, 13, 13, 13, 13, 13, 13, 13, 2, 13, 13, 13, 13, 13, 13, 13,
+ 13, 13, 2, 2, 2, 2, 13, 13, 13, 13, 13, 13, 2, 2, 0, 0,
+ 0, 0, 2, 2, 2, 2, 0, 0, 0, 0, 0, 13, 0, 13, 0, 13,
+ 13, 13, 13, 13, 13, 13, 13, 13, 1, 1, 1, 1, 12, 12, 13, 13,
+ 13, 13, 0, 0, 0, 0, 2, 15, 15, 15, 15, 15, 15, 15, 15, 15,
+ 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 2, 2, 1,
+ 1, 0, 0, 15, 15, 15, 0, 17, 17, 17, 17, 17, 17, 17, 17, 17,
+ 17, 17, 17, 17, 17, 17, 17, 17, 17, 0, 0, 17, 17, 17, 2, 2,
+ 2, 2, 2, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 2, 12,
+ 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 2, 12, 12,
+ 12, 12, 12, 12, 12, 0, 17, 17, 17, 17, 17, 17, 17, 0, 39, 39,
+ 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 2, 2, 2, 39, 39,
+ 39, 39, 39, 39, 39, 2, 86, 86, 86, 86, 86, 86, 86, 86, 77, 77,
+ 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 2, 2, 2, 2, 79, 79,
+ 79, 79, 79, 79, 79, 79, 0, 0, 19, 19, 19, 19, 19, 19, 0, 0,
+ 0, 19, 19, 19, 19, 19, 19, 19, 19, 2, 2, 2, 2, 2, 19, 19,
+ 2, 19, 2, 19, 19, 19, 19, 19, 2, 2, 2, 2, 2, 2, 2, 2,
+ 19, 19, 19, 19, 19, 19, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60,
+ 60, 60, 60, 2, 2, 2, 0, 0, 2, 2, 2, 2, 2, 2, 65, 65,
+ 65, 65, 65, 65, 65, 65, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75,
+ 75, 75, 75, 75, 2, 2, 2, 2, 2, 2, 2, 2, 75, 75, 75, 75,
+ 2, 2, 2, 2, 2, 2, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
+ 69, 69, 69, 69, 0, 69, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74,
+ 74, 74, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 74, 12, 12,
+ 12, 12, 12, 2, 2, 2, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84,
+ 84, 84, 84, 84, 2, 0, 84, 84, 2, 2, 2, 2, 84, 84, 33, 33,
+ 33, 33, 33, 33, 33, 2, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 2, 68, 68, 68, 68, 68, 68, 2, 2, 68, 68,
+ 2, 2, 68, 68, 68, 68, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92,
+ 92, 2, 2, 2, 2, 2, 2, 2, 2, 92, 92, 92, 92, 92, 87, 87,
+ 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 2, 2, 30,
+ 30, 30, 30, 30, 30, 2, 19, 19, 19, 0, 19, 19, 19, 19, 19, 19,
+ 19, 19, 19, 9, 19, 19, 19, 19, 0, 0, 2, 2, 2, 2, 87, 87,
+ 87, 87, 87, 87, 2, 2, 87, 87, 2, 2, 2, 2, 2, 2, 12, 12,
+ 12, 12, 2, 2, 2, 2, 2, 2, 2, 12, 12, 12, 12, 12, 13, 13,
+ 2, 2, 2, 2, 2, 2, 19, 19, 19, 19, 19, 19, 19, 2, 2, 2,
+ 2, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 14, 14, 14, 14, 14,
+ 14, 14, 14, 14, 14, 2, 14, 14, 14, 14, 14, 2, 14, 2, 14, 14,
+ 2, 14, 14, 2, 14, 14, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2,
+ 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 2, 2,
+ 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 3, 1, 1,
+ 1, 1, 1, 1, 6, 6, 0, 0, 0, 2, 0, 0, 0, 0, 3, 3,
+ 3, 3, 3, 2, 3, 3, 3, 3, 3, 3, 3, 2, 2, 0, 2, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 17, 17, 17,
+ 17, 17, 17, 17, 0, 0, 2, 2, 12, 12, 12, 12, 12, 12, 2, 2,
+ 12, 12, 12, 2, 2, 2, 2, 0, 0, 0, 0, 0, 2, 2, 49, 49,
+ 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 2, 49, 49, 49, 49, 49,
+ 49, 49, 49, 49, 49, 2, 49, 49, 49, 2, 49, 49, 2, 49, 49, 49,
+ 49, 49, 49, 49, 2, 2, 49, 49, 49, 2, 2, 2, 2, 2, 0, 0,
+ 0, 2, 2, 2, 2, 0, 0, 0, 0, 0, 2, 2, 2, 0, 0, 0,
+ 0, 0, 0, 2, 2, 2, 9, 2, 2, 2, 2, 2, 2, 2, 0, 0,
+ 0, 0, 0, 1, 2, 2, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
+ 71, 71, 71, 2, 2, 2, 67, 67, 67, 67, 67, 67, 67, 67, 67, 2,
+ 2, 2, 2, 2, 2, 2, 1, 0, 0, 0, 0, 0, 0, 0, 42, 42,
+ 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 42, 42, 42, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41,
+ 41, 2, 2, 2, 2, 2,118,118,118,118,118,118,118,118,118,118,
+ 118, 2, 2, 2, 2, 2, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53,
+ 53, 53, 53, 53, 2, 53, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59,
+ 59, 59, 2, 2, 2, 2, 59, 59, 59, 59, 59, 59, 2, 2, 40, 40,
+ 40, 40, 40, 40, 40, 40, 51, 51, 51, 51, 51, 51, 51, 51, 50, 50,
+ 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 2, 2, 50, 50,
+ 2, 2, 2, 2, 2, 2,135,135,135,135,135,135,135,135,135,135,
+ 135,135, 2, 2, 2, 2,106,106,106,106,106,106,106,106,104,104,
+ 104,104,104,104,104,104,104,104,104,104, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2,104,161,161,161,161,161,161,161,161,161,161,
+ 161, 2,161,161,161,161,161,161,161, 2,161,161, 2,161,161,161,
+ 2,161,161,161,161,161,161,161, 2,161,161, 2, 2, 2,110,110,
+ 110,110,110,110,110,110,110,110,110,110,110,110,110, 2,110,110,
+ 110,110,110,110, 2, 2, 19, 19, 19, 19, 19, 19, 2, 19, 19, 2,
+ 19, 19, 19, 19, 19, 19, 47, 47, 47, 47, 47, 47, 2, 2, 47, 2,
+ 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47,
+ 47, 47, 47, 47, 2, 47, 47, 2, 2, 2, 47, 2, 2, 47, 81, 81,
+ 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 2, 81,120,120,
+ 120,120,120,120,120,120,116,116,116,116,116,116,116,116,116,116,
+ 116,116,116,116,116, 2, 2, 2, 2, 2, 2, 2, 2,116,128,128,
+ 128,128,128,128,128,128,128,128,128, 2,128,128, 2, 2, 2, 2,
+ 2,128,128,128,128,128, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66,
+ 66, 66, 2, 2, 2, 66, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72,
+ 2, 2, 2, 2, 2, 72, 98, 98, 98, 98, 98, 98, 98, 98, 97, 97,
+ 97, 97, 97, 97, 97, 97, 2, 2, 2, 2, 97, 97, 97, 97, 2, 2,
+ 97, 97, 97, 97, 97, 97, 57, 57, 57, 57, 2, 57, 57, 2, 2, 2,
+ 2, 2, 57, 57, 57, 57, 57, 57, 57, 57, 2, 57, 57, 57, 2, 57,
+ 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57,
+ 57, 57, 57, 57, 2, 2, 57, 57, 57, 2, 2, 2, 2, 57, 57, 2,
+ 2, 2, 2, 2, 2, 2, 88, 88, 88, 88, 88, 88, 88, 88,117,117,
+ 117,117,117,117,117,117,112,112,112,112,112,112,112,112,112,112,
+ 112,112,112,112,112, 2, 2, 2, 2,112,112,112,112,112, 78, 78,
+ 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 2, 2, 2, 78,
+ 78, 78, 78, 78, 78, 78, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83,
+ 83, 83, 83, 83, 2, 2, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82,
+ 82, 2, 2, 2, 2, 2,122,122,122,122,122,122,122,122,122,122,
+ 2, 2, 2, 2, 2, 2, 2,122,122,122,122, 2, 2, 2, 2,122,
+ 122,122,122,122,122,122, 89, 89, 89, 89, 89, 89, 89, 89, 89, 2,
+ 2, 2, 2, 2, 2, 2,130,130,130,130,130,130,130,130,130,130,
+ 130, 2, 2, 2, 2, 2, 2, 2,130,130,130,130,130,130,144,144,
+ 144,144,144,144,144,144,144,144, 2, 2, 2, 2, 2, 2,156,156,
+ 156,156,156,156,156,156,156,156, 2,156,156,156, 2, 2,156,156,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3,147,147,
+ 147,147,147,147,147,147,148,148,148,148,148,148,148,148,148,148,
+ 2, 2, 2, 2, 2, 2,158,158,158,158,158,158,158,158,158,158,
+ 2, 2, 2, 2, 2, 2,153,153,153,153,153,153,153,153,153,153,
+ 153,153, 2, 2, 2, 2,149,149,149,149,149,149,149,149,149,149,
+ 149,149,149,149,149, 2, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94,
+ 94, 94, 94, 94, 2, 2, 2, 2, 94, 94, 94, 94, 94, 94, 2, 2,
+ 2, 2, 2, 2, 2, 94, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85,
+ 85, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 85, 2, 2,101,101,
+ 101,101,101,101,101,101,101, 2, 2, 2, 2, 2, 2, 2,101,101,
+ 2, 2, 2, 2, 2, 2, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,
+ 96, 96, 96, 2, 96, 96,111,111,111,111,111,111,111,111,111,111,
+ 111,111,111,111,111, 2,100,100,100,100,100,100,100,100, 2, 36,
+ 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 2, 2, 2,108,108,
+ 108,108,108,108,108,108,108,108, 2,108,108,108,108,108,108,108,
+ 2, 2, 2, 2, 2, 2,129,129,129,129,129,129,129, 2,129, 2,
+ 129,129,129,129, 2,129,129,129,129,129,129,129,129,129,129,129,
+ 129,129,129,129, 2,129,129,129, 2, 2, 2, 2, 2, 2,109,109,
+ 109,109,109,109,109,109,109,109,109, 2, 2, 2, 2, 2,109,109,
+ 2, 2, 2, 2, 2, 2,107,107,107,107, 2,107,107,107,107,107,
+ 107,107,107, 2, 2,107,107, 2, 2,107,107,107,107,107,107,107,
+ 107,107,107,107,107,107,107, 2,107,107,107,107,107,107,107, 2,
+ 107,107, 2,107,107,107,107,107, 2, 1,107,107,107,107,107, 2,
+ 2,107,107,107, 2, 2,107, 2, 2, 2, 2, 2, 2,107, 2, 2,
+ 2, 2, 2,107,107,107,107,107,107,107, 2, 2,107,107,107,107,
+ 107,107,107, 2, 2, 2,137,137,137,137,137,137,137,137,137,137,
+ 137,137, 2,137,137,137,137,137, 2, 2, 2, 2, 2, 2,124,124,
+ 124,124,124,124,124,124,124,124, 2, 2, 2, 2, 2, 2,123,123,
+ 123,123,123,123,123,123,123,123,123,123,123,123, 2, 2,114,114,
+ 114,114,114,114,114,114,114,114,114,114,114, 2, 2, 2,114,114,
+ 2, 2, 2, 2, 2, 2, 32, 32, 32, 32, 32, 2, 2, 2,102,102,
+ 102,102,102,102,102,102,102,102, 2, 2, 2, 2, 2, 2,126,126,
+ 126,126,126,126,126,126,126,126,126, 2, 2,126,126,126,126,126,
+ 126,126, 2, 2, 2, 2,126,126,126,126,126,126,126, 2,142,142,
+ 142,142,142,142,142,142,142,142,142,142, 2, 2, 2, 2,125,125,
+ 125,125,125,125,125,125,125,125,125, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2,125,154,154,154,154,154,154,154, 2, 2,154,
+ 2, 2,154,154,154,154,154,154,154,154, 2,154,154, 2,154,154,
+ 154,154,154,154,154,154,154,154,154,154,154,154, 2,154,154, 2,
+ 2,154,154,154,154,154,154,154, 2, 2, 2, 2, 2, 2,150,150,
+ 150,150,150,150,150,150, 2, 2,150,150,150,150,150,150,150,150,
+ 150,150,150, 2, 2, 2,141,141,141,141,141,141,141,141,140,140,
+ 140,140,140,140,140,140,140,140,140, 2, 2, 2, 2, 2,121,121,
+ 121,121,121,121,121,121,121, 2, 2, 2, 2, 2, 2, 2, 7, 7,
+ 2, 2, 2, 2, 2, 2,133,133,133,133,133,133,133,133,133, 2,
+ 133,133,133,133,133,133,133,133,133,133,133,133,133, 2,133,133,
+ 133,133,133,133, 2, 2,133,133,133,133,133, 2, 2, 2,134,134,
+ 134,134,134,134,134,134, 2, 2,134,134,134,134,134,134, 2,134,
+ 134,134,134,134,134,134,134,134,134,134,134,134,134, 2,138,138,
+ 138,138,138,138,138, 2,138,138, 2,138,138,138,138,138,138,138,
+ 138,138,138,138,138,138, 2, 2,138, 2,138,138, 2,138,138,138,
+ 2, 2, 2, 2, 2, 2,143,143,143,143,143,143, 2,143,143, 2,
+ 143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,
+ 143,143,143,143,143, 2,143,143, 2,143,143,143,143,143,143, 2,
+ 2, 2, 2, 2, 2, 2,143,143, 2, 2, 2, 2, 2, 2,145,145,
+ 145,145,145,145,145,145,145, 2, 2, 2, 2, 2, 2, 2,163,163,
+ 163,163,163,163,163,163,163, 2,163,163,163,163,163,163,163,163,
+ 163, 2, 2, 2,163,163,163,163, 2, 2, 2, 2, 2, 2, 86, 2,
+ 2, 2, 2, 2, 2, 2, 22, 22, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 22, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63,
+ 2, 2, 2, 2, 2, 2, 63, 63, 63, 63, 63, 63, 63, 2, 63, 63,
+ 63, 63, 63, 2, 2, 2, 63, 63, 63, 63, 2, 2, 2, 2,157,157,
+ 157,157,157,157,157,157,157,157,157, 2, 2, 2, 2, 2, 80, 80,
+ 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 2, 2,127,127,
+ 127,127,127,127,127,127,127,127,127,127,127,127,127, 2, 79, 2,
+ 2, 2, 2, 2, 2, 2,115,115,115,115,115,115,115,115,115,115,
+ 115,115,115,115,115, 2,115,115, 2, 2, 2, 2,115,115,159,159,
+ 159,159,159,159,159,159,159,159,159,159,159,159,159, 2,159,159,
+ 2, 2, 2, 2, 2, 2,103,103,103,103,103,103,103,103,103,103,
+ 103,103,103,103, 2, 2,119,119,119,119,119,119,119,119,119,119,
+ 119,119,119,119, 2, 2,119,119, 2,119,119,119,119,119, 2, 2,
+ 2, 2, 2,119,119,119,146,146,146,146,146,146,146,146,146,146,
+ 146, 2, 2, 2, 2, 2, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99,
+ 99, 2, 2, 2, 2, 99, 2, 2, 2, 2, 2, 2, 2, 99,136,139,
+ 13, 13,155, 2, 2, 2,136,136,136,136,136,136,136,136,155,155,
+ 155,155,155,155,155,155,155,155,155,155,155,155, 2, 2,136, 2,
+ 2, 2, 2, 2, 2, 2, 17, 17, 17, 17, 2, 17, 17, 17, 17, 17,
+ 17, 17, 2, 17, 17, 2, 17, 15, 15, 15, 15, 15, 15, 15, 17, 17,
+ 17, 2, 2, 2, 2, 2, 2, 2, 15, 2, 2, 2, 2, 2, 15, 15,
+ 15, 2, 2, 17, 2, 2, 2, 2, 2, 2, 17, 17, 17, 17,139,139,
+ 139,139,139,139,139,139,139,139,139,139, 2, 2, 2, 2,105,105,
+ 105,105,105,105,105,105,105,105,105, 2, 2, 2, 2, 2,105,105,
+ 105,105,105, 2, 2, 2,105, 2, 2, 2, 2, 2, 2, 2,105,105,
+ 2, 2,105,105,105,105, 1, 1, 1, 1, 1, 1, 2, 2, 0, 0,
+ 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1,
+ 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 2, 2,
+ 0, 2, 2, 0, 0, 2, 2, 0, 0, 0, 0, 2, 0, 0, 0, 0,
+ 2, 0, 2, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0,
+ 0, 2, 2, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 2, 0, 0,
+ 0, 0, 0, 2, 0, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 2,
+ 0, 0, 0, 0, 0, 0,131,131,131,131,131,131,131,131,131,131,
+ 131,131, 2, 2, 2, 2, 2, 2, 2,131,131,131,131,131, 2,131,
+ 131,131,131,131,131,131, 2, 2, 2, 2, 2, 19, 19, 19, 56, 56,
+ 56, 56, 56, 56, 56, 2, 56, 2, 2, 56, 56, 56, 56, 56, 56, 56,
+ 2, 56, 56, 2, 56, 56, 56, 56, 56, 2, 2, 2, 2, 2, 6, 6,
+ 6, 6, 6, 6, 2, 2, 2, 2, 2, 2, 2, 2, 2, 6,151,151,
+ 151,151,151,151,151,151,151,151,151,151,151, 2, 2, 2,151,151,
+ 151,151,151,151, 2, 2,151,151, 2, 2, 2, 2,151,151,160,160,
+ 160,160,160,160,160,160,160,160,160,160,160,160,160, 2,152,152,
+ 152,152,152,152,152,152,152,152, 2, 2, 2, 2, 2,152,164,164,
+ 164,164,164,164,164,164,164,164, 2, 2, 2, 2, 2, 2, 30, 30,
+ 30, 30, 2, 30, 30, 2,113,113,113,113,113,113,113,113,113,113,
+ 113,113,113, 2, 2,113,113,113,113,113,113,113,113, 2,132,132,
+ 132,132,132,132,132,132,132,132,132,132, 2, 2, 2, 2,132,132,
+ 2, 2, 2, 2,132,132, 3, 3, 3, 3, 2, 3, 3, 3, 2, 3,
+ 3, 2, 3, 2, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 2, 3, 3, 3, 3, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2,
+ 3, 2, 2, 2, 2, 3, 2, 3, 2, 3, 2, 3, 3, 3, 2, 3,
+ 2, 3, 2, 3, 2, 3, 2, 3, 3, 3, 3, 2, 3, 2, 3, 3,
+ 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 3,
+ 3, 3, 2, 3, 3, 3, 2, 2, 2, 2, 2, 2, 0, 0, 15, 0,
+ 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 2, 2,
+ 2, 0, 0, 0, 0, 0, 13, 2, 2, 2, 2, 2, 2, 2, 13, 13,
+ 13, 2, 2, 2, 2, 2, 2, 0, 2, 2, 2, 2, 2, 2, 0, 1,
+ 2, 3, 4, 5, 6, 7, 8, 9, 9, 9, 9, 10, 9, 11, 12, 13,
+ 9, 9, 9, 14, 9, 9, 15, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
- 9, 9, 16, 17, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 18, 19,
- 20, 9, 21, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
+ 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 16, 17, 9, 9, 9, 9,
+ 9, 9, 9, 9, 9, 9, 18, 19, 20, 9, 21, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
- 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 22, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
+ 9, 9, 9, 9, 9, 9, 22, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
@@ -2162,57 +2137,57 @@ _hb_ucd_u8[18260] =
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
- 9, 9, 23, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 0, 0, 13, 14,
- 15, 16, 17, 18, 19, 20, 21, 22, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 23, 0, 0, 24, 25, 26, 27, 28,
- 29, 30, 0, 0, 31, 32, 0, 33, 0, 34, 0, 35, 0, 0, 0, 0,
- 36, 37, 38, 39, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 40, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 41, 42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 23, 24, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8,
+ 9, 10, 11, 12, 0, 0, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 23, 0, 0, 24, 25, 26, 27, 28, 29, 30, 0, 0, 31, 32, 0, 33,
+ 0, 34, 0, 35, 0, 0, 0, 0, 36, 37, 38, 39, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 40, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 41, 42, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 43, 44, 0, 45, 0, 0, 0, 0, 0, 0,
- 46, 47, 0, 0, 0, 0, 0, 48, 0, 49, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 50, 51, 0, 0, 0, 52, 0, 0,
- 53, 0, 0, 0, 0, 0, 0, 0, 54, 0, 0, 0, 0, 0, 0, 0,
- 55, 0, 0, 0, 0, 0, 0, 0, 56, 0, 0, 0, 0, 0, 0, 0,
- 0, 57, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 58, 59, 60, 61, 62, 63, 64, 65,
- 0, 0, 0, 0, 0, 0, 66, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 43, 44,
+ 0, 45, 0, 0, 0, 0, 0, 0, 46, 47, 0, 0, 0, 0, 0, 48,
+ 0, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 50, 51, 0, 0, 0, 52, 0, 0, 53, 0, 0, 0, 0, 0, 0, 0,
+ 54, 0, 0, 0, 0, 0, 0, 0, 55, 0, 0, 0, 0, 0, 0, 0,
+ 56, 0, 0, 0, 0, 0, 0, 0, 0, 57, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 58, 59, 60, 61, 62, 63, 64, 65, 0, 0, 0, 0, 0, 0, 66, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 67, 68, 0, 69, 70, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82,
- 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98,
- 99,100,101,102,103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0,104, 0, 0, 0, 0, 0, 0,105,106, 0,
- 107, 0, 0, 0,108, 0,109, 0,110, 0,111,112,113, 0,114, 0,
- 0, 0,115, 0, 0, 0,116, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0,117, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,118,119,120,121, 0,122,123,124,
- 125,126, 0,127, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 67, 68, 0, 69,
+ 70, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 71, 72, 73, 74,
+ 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90,
+ 91, 92, 93, 94, 95, 96, 97, 98, 99,100,101,102,103, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,104, 0,
+ 0, 0, 0, 0, 0,105,106, 0,107, 0, 0, 0,108, 0,109, 0,
+ 110, 0,111,112,113, 0,114, 0, 0, 0,115, 0, 0, 0,116, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0,128,129,130,131,132,133,134,135,136,137,138,139,
- 140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,
- 156,157, 0, 0, 0,158,159,160,161, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,162,163, 0,
- 0, 0, 0, 0, 0, 0,164, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0,117, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,165, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,166,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,167,
+ 118,119,120,121, 0,122,123,124,125,126, 0,127, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0,168, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,128,129,130,131,
+ 132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,
+ 148,149,150,151,152,153,154,155,156,157, 0, 0, 0,158,159,160,
+ 161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0,162,163, 0, 0, 0, 0, 0, 0, 0,164, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0,169,170, 0, 0, 0, 0,171,
- 172, 0, 0, 0,173,174,175,176,177,178,179,180,181,182,183,184,
- 185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,
- 201,202,203,204,205,206, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 1, 2, 3, 4,
+ 165, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0,166, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0,167, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,168,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0,169,170, 0, 0, 0, 0,171,172, 0, 0, 0,173,174,175,176,
+ 177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,
+ 193,194,195,196,197,198,199,200,201,202,203,204,205,206, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4,
};
static const uint16_t
_hb_ucd_u16[9320] =
@@ -2827,7 +2802,7 @@ _hb_ucd_gc (unsigned u)
static inline uint_fast8_t
_hb_ucd_ccc (unsigned u)
{
- return u<125259u?_hb_ucd_u8[9184+(((_hb_ucd_u8[8128+(((_hb_ucd_u8[7424+(((_hb_ucd_u8[7178+(u>>2>>3>>4)])<<4)+((u>>2>>3)&15u))])<<3)+((u>>2)&7u))])<<2)+((u)&3u))]:0;
+ return u<125259u?_hb_ucd_u8[8792+(((_hb_ucd_u8[8236+(((_hb_ucd_u8[7776+(((_hb_ucd_u8[7424+(((_hb_ucd_u8[7178+(u>>2>>2>>2>>3)])<<3)+((u>>2>>2>>2)&7u))])<<2)+((u>>2>>2)&3u))])<<2)+((u>>2)&3u))])<<2)+((u)&3u))]:0;
}
static inline unsigned
_hb_ucd_b4 (const uint8_t* a, unsigned i)
@@ -2837,24 +2812,24 @@ _hb_ucd_b4 (const uint8_t* a, unsigned i)
static inline int_fast16_t
_hb_ucd_bmg (unsigned u)
{
- return u<65380u?_hb_ucd_i16[((_hb_ucd_u8[9932+(((_hb_ucd_u8[9812+(((_hb_ucd_b4(9684+_hb_ucd_u8,u>>2>>3>>3))<<3)+((u>>2>>3)&7u))])<<3)+((u>>2)&7u))])<<2)+((u)&3u)]:0;
+ return u<65380u?_hb_ucd_i16[((_hb_ucd_u8[9540+(((_hb_ucd_u8[9420+(((_hb_ucd_b4(9292+_hb_ucd_u8,u>>2>>3>>3))<<3)+((u>>2>>3)&7u))])<<3)+((u>>2)&7u))])<<2)+((u)&3u)]:0;
}
static inline uint_fast8_t
_hb_ucd_sc (unsigned u)
{
- return u<918000u?_hb_ucd_u8[11454+(((_hb_ucd_u16[2040+(((_hb_ucd_u8[10718+(((_hb_ucd_u8[10268+(u>>3>>4>>4)])<<4)+((u>>3>>4)&15u))])<<4)+((u>>3)&15u))])<<3)+((u)&7u))]:2;
+ return u<918000u?_hb_ucd_u8[11062+(((_hb_ucd_u16[2040+(((_hb_ucd_u8[10326+(((_hb_ucd_u8[9876+(u>>3>>4>>4)])<<4)+((u>>3>>4)&15u))])<<4)+((u>>3)&15u))])<<3)+((u)&7u))]:2;
}
static inline uint_fast16_t
_hb_ucd_dm (unsigned u)
{
- return u<195102u?_hb_ucd_u16[6008+(((_hb_ucd_u8[17460+(((_hb_ucd_u8[17078+(u>>4>>5)])<<5)+((u>>4)&31u))])<<4)+((u)&15u))]:0;
+ return u<195102u?_hb_ucd_u16[6008+(((_hb_ucd_u8[17068+(((_hb_ucd_u8[16686+(u>>4>>5)])<<5)+((u>>4)&31u))])<<4)+((u)&15u))]:0;
}
#elif !defined(HB_NO_UCD_UNASSIGNED)
static const uint8_t
-_hb_ucd_u8[17868] =
+_hb_ucd_u8[14744] =
{
0, 1, 2, 3, 4, 5, 6, 7, 7, 8, 7, 7, 7, 7, 7, 7,
7, 7, 7, 7, 9, 10, 7, 7, 7, 7, 11, 12, 13, 13, 13, 14,
@@ -3436,51 +3411,54 @@ _hb_ucd_u8[17868] =
0,230,220, 0, 0, 0,220,220, 0, 0,230,220, 0, 9, 7, 0,
0, 7, 9, 0, 0, 0, 9, 7, 6, 6, 0, 0, 0, 0, 1, 0,
0,216,216, 1, 1, 1, 0, 0, 0,226,216,216,216,216,216, 0,
- 220,220,220, 0,232,232,220,230,230,230, 7, 0, 16, 17, 17, 17,
- 17, 17, 17, 33, 17, 17, 17, 19, 17, 17, 17, 17, 20,101, 17,113,
- 129,169, 17, 27, 28, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
- 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
- 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
+ 220,220,220, 0,232,232,220,230,230,230, 7, 0, 16, 17, 17, 33,
+ 17, 49, 17, 17, 84, 97,135,145, 26, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
- 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
- 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,237, 0, 1, 2, 2,
- 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 6, 7, 8,
- 9, 0, 0, 0, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 20, 0, 0, 21, 22, 0, 0, 0, 0,
- 23, 24, 25, 26, 0, 27, 0, 28, 29, 30, 31, 32, 0, 0, 0, 0,
- 0, 0, 0, 33, 34, 35, 36, 0, 0, 0, 0, 0, 37, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 38, 39, 0, 0, 0, 0, 1, 2, 40, 41,
- 0, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0,
- 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 3, 4, 0, 0, 5, 0,
- 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0,
- 0, 0, 8, 9, 0, 0, 0, 0, 0, 0, 10, 0, 0, 10, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 0, 0, 0, 10,
- 0, 0, 0, 0, 0, 0, 11, 12, 0, 13, 0, 14, 15, 16, 0, 0,
- 0, 0, 0, 1, 17, 18, 0, 19, 7, 1, 0, 0, 0, 20, 20, 7,
- 20, 20, 20, 20, 20, 20, 20, 8, 21, 0, 22, 0, 7, 23, 24, 0,
- 20, 20, 25, 0, 0, 0, 26, 27, 1, 7, 20, 20, 20, 20, 20, 1,
- 28, 29, 30, 31, 0, 0, 20, 0, 0, 0, 0, 0, 0, 0, 10, 0,
- 0, 0, 0, 0, 0, 0, 20, 20, 20, 1, 0, 0, 8, 21, 32, 4,
- 0, 10, 0, 33, 7, 20, 20, 20, 0, 0, 0, 0, 8, 34, 34, 35,
- 36, 34, 37, 0, 38, 1, 20, 20, 0, 0, 39, 0, 1, 1, 0, 8,
- 21, 1, 20, 0, 0, 0, 1, 0, 0, 40, 1, 1, 0, 0, 8, 21,
- 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 26, 34, 34, 34, 34, 34,
- 34, 34, 34, 34, 21, 7, 20, 41, 34, 34, 34, 34, 34, 34, 34, 34,
- 34, 21, 0, 42, 43, 44, 0, 45, 0, 8, 21, 0, 0, 0, 0, 0,
- 0, 0, 0, 46, 7, 1, 10, 1, 0, 0, 0, 1, 20, 20, 1, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 34, 9, 0, 0, 20, 20,
- 1, 20, 20, 0, 0, 0, 0, 0, 0, 0, 26, 21, 0, 1, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 47, 48, 0, 0, 0,
- 0, 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 7, 8, 7, 7,
- 7, 7, 7, 7, 7, 7, 7, 7, 9, 10, 11, 11, 11, 11, 12, 13,
- 13, 13, 13, 14, 15, 16, 17, 18, 19, 20, 21, 13, 22, 13, 13, 13,
- 13, 23, 24, 24, 25, 26, 13, 13, 13, 27, 28, 29, 13, 30, 31, 32,
- 33, 34, 35, 36, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
- 7, 7, 7, 7, 7, 7, 7, 7, 37, 7, 38, 39, 7, 40, 7, 7,
- 7, 41, 13, 42, 7, 7, 43, 7, 44, 13, 13, 13, 13, 13, 13, 13,
- 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
+ 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,177, 0, 1, 2, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 4, 3, 3, 3, 3, 3, 5, 3, 3, 3, 3, 3, 6, 7, 8, 3,
+ 3, 3, 3, 3, 9, 10, 11, 12, 13, 3, 3, 3, 3, 3, 3, 3,
+ 3, 14, 3, 15, 3, 3, 3, 3, 3, 3, 16, 17, 18, 19, 20, 21,
+ 3, 3, 3, 22, 23, 24, 3, 3, 3, 3, 3, 3, 25, 3, 3, 3,
+ 3, 3, 3, 3, 3, 26, 3, 3, 27, 28, 0, 1, 0, 0, 0, 0,
+ 0, 1, 0, 2, 0, 0, 0, 3, 0, 0, 0, 3, 0, 0, 0, 0,
+ 0, 4, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 6, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 8, 9, 0, 0, 0, 0, 0, 0, 9, 0, 9, 0, 0,
+ 0, 0, 0, 0, 0, 10, 11, 12, 13, 0, 0, 14, 15, 16, 6, 0,
+ 17, 18, 19, 19, 19, 20, 21, 22, 23, 24, 19, 25, 0, 26, 27, 19,
+ 19, 28, 29, 30, 0, 31, 0, 0, 0, 8, 0, 0, 0, 0, 0, 0,
+ 0, 19, 28, 0, 32, 33, 9, 34, 35, 19, 0, 0, 36, 37, 38, 39,
+ 40, 19, 0, 41, 42, 43, 44, 31, 0, 1, 45, 42, 0, 0, 0, 0,
+ 0, 32, 14, 14, 0, 0, 0, 0, 14, 0, 0, 46, 47, 47, 47, 47,
+ 48, 49, 47, 47, 47, 47, 50, 51, 52, 53, 43, 21, 0, 0, 0, 0,
+ 0, 0, 0, 54, 6, 55, 0, 14, 19, 1, 0, 0, 0, 0, 56, 57,
+ 0, 0, 0, 0, 0, 19, 58, 31, 0, 0, 0, 0, 0, 0, 0, 59,
+ 14, 0, 0, 0, 0, 1, 0, 2, 0, 0, 0, 3, 0, 0, 0, 60,
+ 61, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 2, 3,
+ 0, 4, 5, 0, 0, 6, 0, 0, 0, 7, 0, 0, 0, 1, 1, 0,
+ 0, 8, 9, 0, 8, 9, 0, 0, 0, 0, 8, 9, 10, 11, 12, 0,
+ 0, 0, 13, 0, 0, 0, 0, 14, 15, 16, 17, 0, 0, 0, 1, 0,
+ 0, 18, 19, 0, 0, 0, 20, 0, 0, 0, 1, 1, 1, 1, 0, 1,
+ 1, 1, 1, 1, 1, 1, 0, 8, 21, 9, 0, 0, 22, 0, 0, 0,
+ 0, 1, 0, 23, 24, 25, 0, 0, 26, 0, 0, 0, 8, 21, 27, 0,
+ 1, 0, 0, 1, 1, 1, 1, 0, 1, 28, 29, 30, 0, 31, 32, 20,
+ 1, 1, 0, 0, 0, 8, 21, 9, 1, 4, 5, 0, 0, 0, 33, 9,
+ 0, 1, 1, 1, 0, 8, 21, 21, 21, 21, 34, 1, 35, 21, 21, 21,
+ 9, 36, 0, 0, 37, 38, 1, 0, 39, 0, 0, 0, 1, 0, 1, 0,
+ 0, 0, 0, 8, 21, 9, 1, 0, 0, 0, 40, 0, 8, 21, 21, 21,
+ 21, 21, 21, 21, 21, 9, 0, 1, 1, 1, 1, 8, 21, 21, 21, 9,
+ 0, 0, 0, 41, 0, 42, 43, 0, 0, 0, 1, 44, 0, 0, 0, 45,
+ 8, 9, 1, 0, 0, 0, 8, 21, 21, 21, 9, 0, 1, 0, 1, 1,
+ 8, 21, 21, 9, 0, 4, 5, 8, 9, 1, 0, 0, 0, 1, 2, 3,
+ 4, 5, 6, 7, 7, 8, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 9, 10, 11, 11, 11, 11, 12, 13, 13, 13, 13, 14, 15, 16, 17, 18,
+ 19, 20, 21, 13, 22, 13, 13, 13, 13, 23, 24, 24, 25, 26, 13, 13,
+ 13, 27, 28, 29, 13, 30, 31, 32, 33, 34, 35, 36, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 37, 7, 38, 39, 7, 40, 7, 7, 7, 41, 13, 42, 7, 7, 43, 7,
+ 44, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
@@ -3501,419 +3479,221 @@ _hb_ucd_u8[17868] =
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
- 13, 13, 13, 13, 45, 0, 0, 1, 2, 2, 2, 3, 4, 5, 6, 7,
- 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
- 24, 25, 26, 27, 28, 29, 30, 31, 32, 32, 33, 34, 35, 36, 37, 37,
- 37, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50,
- 51, 52, 2, 2, 53, 54, 55, 56, 57, 58, 59, 59, 59, 59, 60, 59,
- 59, 59, 59, 59, 59, 59, 61, 61, 59, 59, 59, 59, 62, 63, 64, 65,
- 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 59, 70, 70,
- 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,
+ 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 45, 0, 0, 1,
+ 2, 2, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
+ 32, 32, 33, 34, 35, 36, 37, 37, 37, 37, 37, 38, 39, 40, 41, 42,
+ 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 2, 2, 53, 54, 55, 56,
+ 57, 58, 59, 59, 59, 59, 60, 59, 59, 59, 59, 59, 59, 59, 61, 61,
+ 59, 59, 59, 59, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
+ 74, 75, 76, 77, 78, 59, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,
70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,
- 70, 79, 70, 70, 70, 70, 80, 80, 80, 80, 80, 80, 80, 80, 80, 81,
- 82, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 32, 32,
- 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32,
+ 70, 70, 70, 70, 70, 70, 70, 70, 70, 79, 70, 70, 70, 70, 80, 80,
+ 80, 80, 80, 80, 80, 80, 80, 81, 82, 82, 83, 84, 85, 86, 87, 88,
+ 89, 90, 91, 92, 93, 94, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32,
- 32, 32, 32, 32, 32, 95, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,
- 96, 96, 96, 96, 96, 96, 96, 96, 70, 70, 97, 98, 99,100,101,101,
- 102,103,104,105,106,107,108,109,110,111, 96,112,113,114,115,116,
- 117,118,119,119,120,121,122,123,124,125,126,127,128,129,130,131,
- 132, 96,133,134,135,136,137,138,139,140,141,142,143, 96,144,145,
- 96,146,147,148,149, 96,150,151,152,153,154,155,156, 96,157,158,
- 159,160, 96,161,162,163,164,164,164,164,164,164,164,165,166,164,
- 167, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,
- 96, 96, 96, 96, 96,168,169,169,169,169,169,169,169,169,170, 96,
- 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,171,171,
- 171,171,172, 96, 96, 96,173,173,173,173,174,175,176,177, 96, 96,
- 96, 96,178,179,180,181,182,182,182,182,182,182,182,182,182,182,
- 182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,
- 182,182,182,182,182,183,182,182,182,182,182,182,184,184,184,185,
- 186, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,
- 96, 96, 96, 96, 96,187,188,189,190,191,191,192, 96, 96, 96, 96,
- 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,193,194,
+ 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 95, 96, 96,
96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,
- 96, 96, 96, 96,195,196, 59,197,198,199,200,201,202, 96,203,204,
- 205, 59, 59,206, 59,207,208,208,208,208,208,209, 96, 96, 96, 96,
- 96, 96, 96, 96,210, 96,211,212,213, 96, 96,214, 96, 96, 96,215,
- 96, 96, 96, 96, 96,216,217,218,219, 96, 96, 96, 96, 96,220,221,
- 222, 96,223,224, 96, 96,225,226, 59,227,228, 96, 59, 59, 59, 59,
- 59, 59, 59,229,230,231,232,233, 59, 59,234,235, 59,236, 96, 96,
- 96, 96, 96, 96, 96, 96, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,
- 70, 70, 70,237, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,
- 70, 70, 70, 70,238, 70,239, 70, 70, 70, 70, 70, 70, 70, 70, 70,
+ 70, 70, 97, 98, 99,100,101,101,102,103,104,105,106,107,108,109,
+ 110,111, 96,112,113,114,115,116,117,118,119,119,120,121,122,123,
+ 124,125,126,127,128,129,130,131,132, 96,133,134,135,136,137,138,
+ 139,140,141,142,143, 96,144,145, 96,146,147,148,149, 96,150,151,
+ 152,153,154,155,156, 96,157,158,159,160, 96,161,162,163,164,164,
+ 164,164,164,164,164,165,166,164,167, 96, 96, 96, 96, 96, 96, 96,
+ 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,168,169,169,
+ 169,169,169,169,169,169,170, 96, 96, 96, 96, 96, 96, 96, 96, 96,
+ 96, 96, 96, 96, 96, 96,171,171,171,171,172, 96, 96, 96,173,173,
+ 173,173,174,175,176,177, 96, 96, 96, 96,178,179,180,181,182,182,
+ 182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,
+ 182,182,182,182,182,182,182,182,182,182,182,182,182,183,182,182,
+ 182,182,182,182,184,184,184,185,186, 96, 96, 96, 96, 96, 96, 96,
+ 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,187,188,189,
+ 190,191,191,192, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,
+ 96, 96, 96, 96, 96, 96,193,194, 96, 96, 96, 96, 96, 96, 96, 96,
+ 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,195,196, 59,197,
+ 198,199,200,201,202, 96,203,204,205, 59, 59,206, 59,207,208,208,
+ 208,208,208,209, 96, 96, 96, 96, 96, 96, 96, 96,210, 96,211,212,
+ 213, 96, 96,214, 96, 96, 96,215, 96, 96, 96, 96, 96,216,217,218,
+ 219, 96, 96, 96, 96, 96,220,221,222, 96,223,224, 96, 96,225,226,
+ 59,227,228, 96, 59, 59, 59, 59, 59, 59, 59,229,230,231,232,233,
+ 59, 59,234,235, 59,236, 96, 96, 96, 96, 96, 96, 96, 96, 70, 70,
+ 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,237, 70, 70, 70, 70,
+ 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,238, 70,239, 70,
70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,
- 70, 70, 70,240, 70, 70, 70, 70, 70, 70, 70, 70, 70,241, 96, 96,
- 96, 96, 96, 96, 96, 96, 70, 70, 70, 70,242, 96, 96, 96, 96, 96,
- 96, 96, 96, 96, 96, 96, 70, 70, 70, 70, 70, 70,243, 70, 70, 70,
- 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,244, 96, 96,
- 96, 96, 96, 96, 96, 96,245, 96,246,247, 0, 1, 2, 2, 0, 1,
- 2, 2, 2, 3, 4, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19,
- 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
- 19, 0, 0, 0, 0, 0, 0, 0, 19, 0, 0, 0, 0, 0, 19, 19,
- 19, 19, 19, 19, 19, 0, 19, 0, 0, 0, 0, 0, 0, 0, 19, 19,
- 19, 19, 19, 0, 0, 0, 0, 0, 26, 26, 0, 0, 0, 0, 1, 1,
- 1, 1, 1, 1, 1, 1, 9, 9, 9, 9, 0, 9, 9, 9, 2, 2,
- 9, 9, 9, 9, 0, 9, 2, 2, 2, 2, 9, 0, 9, 0, 9, 9,
- 9, 2, 9, 2, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
- 2, 9, 9, 9, 9, 9, 9, 9, 55, 55, 55, 55, 55, 55, 55, 55,
- 55, 55, 55, 55, 55, 55, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
- 6, 6, 6, 1, 1, 6, 2, 4, 4, 4, 4, 4, 4, 4, 4, 4,
- 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 4, 4,
- 4, 2, 2, 4, 4, 4, 2, 14, 14, 14, 14, 14, 14, 14, 14, 14,
- 14, 14, 14, 14, 14, 14, 2, 2, 2, 2, 2, 2, 2, 2, 14, 14,
- 14, 2, 2, 2, 2, 14, 14, 14, 14, 14, 14, 2, 2, 2, 3, 3,
- 3, 3, 3, 0, 3, 3, 3, 3, 3, 3, 0, 3, 3, 3, 3, 3,
- 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 3, 3, 3, 0, 0, 3,
- 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 3, 3, 1, 3, 3, 3, 3, 3, 3, 3, 37, 37,
- 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 2, 37, 37, 37,
- 37, 2, 2, 37, 37, 37, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38,
- 2, 2, 2, 2, 2, 2, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
- 64, 2, 2, 64, 64, 64, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90,
- 90, 90, 90, 90, 2, 2, 90, 90, 90, 90, 90, 90, 90, 2, 95, 95,
- 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 2, 2, 95, 2, 37, 37,
- 37, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3,
- 2, 2, 2, 2, 2, 2, 3, 3, 0, 3, 3, 3, 3, 3, 7, 7,
- 7, 7, 7, 7, 7, 7, 7, 1, 1, 1, 1, 7, 7, 7, 7, 7,
- 7, 7, 0, 0, 7, 7, 5, 5, 5, 5, 2, 5, 5, 5, 5, 5,
- 5, 5, 5, 2, 2, 5, 5, 2, 2, 5, 5, 5, 5, 5, 5, 5,
- 5, 5, 5, 5, 5, 5, 5, 2, 5, 5, 5, 5, 5, 5, 5, 2,
- 5, 2, 2, 2, 5, 5, 5, 5, 2, 2, 5, 5, 5, 5, 5, 2,
- 2, 5, 5, 5, 5, 2, 2, 2, 2, 2, 2, 2, 2, 5, 2, 2,
- 2, 2, 5, 5, 2, 5, 5, 5, 5, 5, 2, 2, 5, 5, 5, 5,
- 5, 5, 5, 5, 5, 2, 2, 11, 11, 11, 2, 11, 11, 11, 11, 11,
- 11, 2, 2, 2, 2, 11, 11, 2, 2, 11, 11, 11, 11, 11, 11, 11,
- 11, 11, 11, 11, 11, 11, 11, 2, 11, 11, 11, 11, 11, 11, 11, 2,
- 11, 11, 2, 11, 11, 2, 11, 11, 2, 2, 11, 2, 11, 11, 11, 2,
- 2, 11, 11, 11, 2, 2, 2, 11, 2, 2, 2, 2, 2, 2, 2, 11,
- 11, 11, 11, 2, 11, 2, 2, 2, 2, 2, 2, 2, 11, 11, 11, 11,
- 11, 11, 11, 11, 11, 2, 2, 10, 10, 10, 2, 10, 10, 10, 10, 10,
- 10, 10, 10, 10, 2, 10, 10, 10, 2, 10, 10, 10, 10, 10, 10, 10,
- 10, 10, 10, 10, 10, 10, 10, 2, 10, 10, 10, 10, 10, 10, 10, 2,
- 10, 10, 2, 10, 10, 10, 10, 10, 2, 2, 10, 10, 10, 10, 10, 10,
- 2, 10, 10, 10, 2, 2, 10, 2, 2, 2, 2, 2, 2, 2, 10, 10,
- 10, 10, 2, 2, 10, 10, 10, 10, 2, 2, 2, 2, 2, 2, 2, 10,
- 10, 10, 10, 10, 10, 10, 2, 21, 21, 21, 2, 21, 21, 21, 21, 21,
- 21, 21, 21, 2, 2, 21, 21, 2, 2, 21, 21, 21, 21, 21, 21, 21,
- 21, 21, 21, 21, 21, 21, 21, 2, 21, 21, 21, 21, 21, 21, 21, 2,
- 21, 21, 2, 21, 21, 21, 21, 21, 2, 2, 21, 21, 21, 21, 21, 2,
- 2, 21, 21, 21, 2, 2, 2, 2, 2, 2, 2, 21, 21, 21, 2, 2,
- 2, 2, 21, 21, 2, 21, 21, 21, 21, 21, 2, 2, 21, 21, 2, 2,
- 22, 22, 2, 22, 22, 22, 22, 22, 22, 2, 2, 2, 22, 22, 22, 2,
- 22, 22, 22, 22, 2, 2, 2, 22, 22, 2, 22, 2, 22, 22, 2, 2,
- 2, 22, 22, 2, 2, 2, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22,
- 2, 2, 2, 2, 22, 22, 22, 2, 2, 2, 2, 2, 2, 22, 2, 2,
- 2, 2, 2, 2, 22, 22, 22, 22, 22, 2, 2, 2, 2, 2, 23, 23,
- 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 2, 23, 23, 23, 2,
- 23, 23, 23, 23, 23, 23, 23, 23, 2, 2, 23, 23, 23, 23, 23, 2,
- 23, 23, 23, 23, 2, 2, 2, 2, 2, 2, 2, 23, 23, 2, 23, 23,
- 23, 2, 2, 23, 2, 2, 23, 23, 23, 23, 2, 2, 23, 23, 2, 2,
- 2, 2, 2, 2, 2, 23, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
- 16, 16, 16, 2, 16, 16, 16, 2, 16, 16, 16, 16, 16, 16, 16, 16,
- 16, 16, 2, 16, 16, 16, 16, 16, 2, 2, 16, 16, 16, 16, 16, 2,
- 16, 16, 16, 16, 2, 2, 2, 2, 2, 2, 2, 16, 16, 2, 16, 16,
- 16, 16, 2, 2, 16, 16, 2, 16, 16, 16, 2, 2, 2, 2, 20, 20,
- 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 2, 20, 20, 20, 2,
- 20, 20, 20, 20, 20, 20, 2, 2, 2, 2, 20, 20, 20, 20, 20, 20,
- 20, 20, 2, 2, 20, 20, 2, 36, 36, 36, 2, 36, 36, 36, 36, 36,
- 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 2, 2, 2,
- 36, 36, 36, 36, 36, 36, 36, 36, 2, 36, 36, 36, 36, 36, 36, 36,
- 36, 36, 2, 36, 2, 2, 2, 2, 36, 2, 2, 2, 2, 36, 36, 36,
- 36, 36, 36, 2, 36, 2, 2, 2, 2, 2, 2, 2, 36, 36, 2, 2,
- 36, 36, 36, 2, 2, 2, 2, 24, 24, 24, 24, 24, 24, 24, 24, 24,
- 24, 24, 24, 24, 24, 24, 24, 24, 24, 2, 2, 2, 2, 0, 24, 24,
- 24, 24, 2, 2, 2, 2, 2, 18, 18, 2, 18, 2, 18, 18, 18, 18,
- 18, 2, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
- 18, 18, 2, 18, 2, 18, 18, 18, 18, 18, 18, 18, 2, 2, 18, 18,
- 18, 18, 18, 2, 18, 2, 18, 18, 18, 18, 18, 18, 18, 2, 18, 18,
- 2, 2, 18, 18, 18, 18, 25, 25, 25, 25, 25, 25, 25, 25, 2, 25,
- 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 2, 2, 2, 25, 25,
- 25, 25, 25, 2, 25, 25, 25, 25, 25, 25, 25, 0, 0, 0, 0, 25,
- 25, 2, 2, 2, 2, 2, 33, 33, 33, 33, 33, 33, 33, 33, 8, 8,
- 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 2, 8, 2, 2,
- 2, 2, 2, 8, 2, 2, 8, 8, 8, 0, 8, 8, 8, 8, 12, 12,
- 12, 12, 12, 12, 12, 12, 30, 30, 30, 30, 30, 30, 30, 30, 30, 2,
- 30, 30, 30, 30, 2, 2, 30, 30, 30, 30, 30, 30, 30, 2, 30, 30,
- 30, 2, 2, 30, 30, 30, 30, 30, 30, 30, 30, 2, 2, 2, 30, 30,
- 2, 2, 2, 2, 2, 2, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
- 29, 29, 29, 29, 2, 2, 28, 28, 28, 28, 28, 28, 28, 28, 34, 34,
- 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 2, 2, 2, 35, 35,
- 35, 35, 35, 35, 35, 35, 35, 35, 35, 0, 0, 0, 35, 35, 35, 2,
- 2, 2, 2, 2, 2, 2, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
- 45, 45, 45, 45, 2, 2, 2, 2, 2, 2, 2, 2, 2, 45, 44, 44,
- 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 0, 0, 2, 43, 43,
- 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 2, 2, 2, 2, 46, 46,
- 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 2, 46, 46, 46, 2,
- 46, 46, 2, 2, 2, 2, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31,
- 31, 31, 31, 31, 2, 2, 31, 31, 2, 2, 2, 2, 2, 2, 32, 32,
- 0, 0, 32, 0, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32,
- 2, 2, 2, 2, 2, 2, 32, 2, 2, 2, 2, 2, 2, 2, 32, 32,
- 32, 2, 2, 2, 2, 2, 28, 28, 28, 28, 28, 28, 2, 2, 48, 48,
- 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 2, 48, 48,
- 48, 48, 2, 2, 2, 2, 48, 2, 2, 2, 48, 48, 48, 48, 52, 52,
- 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 2, 2, 52, 52,
- 52, 52, 52, 2, 2, 2, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58,
- 58, 58, 2, 2, 2, 2, 58, 58, 2, 2, 2, 2, 2, 2, 58, 58,
- 58, 2, 2, 2, 58, 58, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54,
- 54, 54, 2, 2, 54, 54, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91,
- 91, 91, 91, 91, 91, 2, 91, 91, 91, 91, 91, 2, 2, 91, 91, 91,
- 2, 2, 2, 2, 2, 2, 91, 91, 91, 91, 91, 91, 2, 2, 1, 1,
- 1, 1, 1, 1, 1, 2, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62,
- 62, 62, 62, 2, 2, 2, 62, 62, 62, 62, 62, 62, 62, 2, 76, 76,
- 76, 76, 76, 76, 76, 76, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93,
- 93, 93, 2, 2, 2, 2, 2, 2, 2, 2, 93, 93, 93, 93, 70, 70,
- 70, 70, 70, 70, 70, 70, 2, 2, 2, 70, 70, 70, 70, 70, 70, 70,
- 2, 2, 2, 70, 70, 70, 73, 73, 73, 73, 73, 73, 73, 73, 6, 2,
- 2, 2, 2, 2, 2, 2, 8, 8, 8, 2, 2, 8, 8, 8, 1, 1,
- 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0,
- 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1,
- 0, 2, 2, 2, 2, 2, 19, 19, 19, 19, 19, 19, 9, 9, 9, 9,
- 9, 6, 19, 19, 19, 19, 19, 19, 19, 19, 19, 9, 9, 9, 9, 9,
- 19, 19, 19, 19, 9, 9, 9, 9, 9, 19, 19, 19, 19, 19, 6, 19,
- 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 9, 9, 9,
- 9, 9, 9, 9, 2, 2, 2, 9, 2, 9, 2, 9, 2, 9, 9, 9,
- 9, 9, 9, 2, 9, 9, 9, 9, 9, 9, 2, 2, 9, 9, 9, 9,
- 9, 9, 2, 9, 9, 9, 2, 2, 9, 9, 9, 2, 9, 9, 9, 9,
- 9, 9, 9, 9, 9, 2, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0,
- 0, 0, 0, 2, 0, 0, 0, 19, 2, 2, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 19, 0, 0, 0, 0, 0, 0, 0, 2, 19, 19,
- 19, 19, 19, 2, 2, 2, 0, 2, 2, 2, 2, 2, 2, 2, 1, 2,
- 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0,
- 19, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 0, 19, 0,
- 0, 0, 2, 2, 2, 2, 0, 0, 0, 2, 2, 2, 2, 2, 27, 27,
- 27, 27, 27, 27, 27, 27, 0, 0, 0, 0, 2, 2, 0, 0, 0, 0,
- 0, 0, 0, 0, 2, 0, 56, 56, 56, 56, 56, 56, 56, 56, 55, 55,
- 55, 55, 2, 2, 2, 2, 2, 55, 55, 55, 55, 55, 55, 55, 61, 61,
- 61, 61, 61, 61, 61, 61, 2, 2, 2, 2, 2, 2, 2, 61, 61, 2,
- 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 2, 2, 13, 13,
- 13, 13, 13, 13, 13, 13, 13, 13, 2, 13, 13, 13, 13, 13, 13, 13,
- 13, 13, 2, 2, 2, 2, 13, 13, 13, 13, 13, 13, 2, 2, 0, 0,
- 0, 0, 2, 2, 2, 2, 0, 0, 0, 0, 0, 13, 0, 13, 0, 13,
- 13, 13, 13, 13, 13, 13, 13, 13, 1, 1, 1, 1, 12, 12, 13, 13,
- 13, 13, 0, 0, 0, 0, 2, 15, 15, 15, 15, 15, 15, 15, 15, 15,
- 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 2, 2, 1,
- 1, 0, 0, 15, 15, 15, 0, 17, 17, 17, 17, 17, 17, 17, 17, 17,
- 17, 17, 17, 17, 17, 17, 17, 17, 17, 0, 0, 17, 17, 17, 2, 2,
- 2, 2, 2, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 2, 12,
- 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 2, 12, 12,
- 12, 12, 12, 12, 12, 0, 17, 17, 17, 17, 17, 17, 17, 0, 39, 39,
- 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 2, 2, 2, 39, 39,
- 39, 39, 39, 39, 39, 2, 86, 86, 86, 86, 86, 86, 86, 86, 77, 77,
- 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 2, 2, 2, 2, 79, 79,
- 79, 79, 79, 79, 79, 79, 0, 0, 19, 19, 19, 19, 19, 19, 0, 0,
- 0, 19, 19, 19, 19, 19, 19, 19, 19, 2, 2, 2, 2, 2, 19, 19,
- 2, 19, 2, 19, 19, 19, 19, 19, 2, 2, 2, 2, 2, 2, 2, 2,
- 19, 19, 19, 19, 19, 19, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60,
- 60, 60, 60, 2, 2, 2, 0, 0, 2, 2, 2, 2, 2, 2, 65, 65,
- 65, 65, 65, 65, 65, 65, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75,
- 75, 75, 75, 75, 2, 2, 2, 2, 2, 2, 2, 2, 75, 75, 75, 75,
- 2, 2, 2, 2, 2, 2, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
- 69, 69, 69, 69, 0, 69, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74,
- 74, 74, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 74, 12, 12,
- 12, 12, 12, 2, 2, 2, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84,
- 84, 84, 84, 84, 2, 0, 84, 84, 2, 2, 2, 2, 84, 84, 33, 33,
- 33, 33, 33, 33, 33, 2, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
- 68, 68, 68, 68, 68, 2, 68, 68, 68, 68, 68, 68, 2, 2, 68, 68,
- 2, 2, 68, 68, 68, 68, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92,
- 92, 2, 2, 2, 2, 2, 2, 2, 2, 92, 92, 92, 92, 92, 87, 87,
- 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 2, 2, 30,
- 30, 30, 30, 30, 30, 2, 19, 19, 19, 0, 19, 19, 19, 19, 19, 19,
- 19, 19, 19, 9, 19, 19, 19, 19, 0, 0, 2, 2, 2, 2, 87, 87,
- 87, 87, 87, 87, 2, 2, 87, 87, 2, 2, 2, 2, 2, 2, 12, 12,
- 12, 12, 2, 2, 2, 2, 2, 2, 2, 12, 12, 12, 12, 12, 13, 13,
- 2, 2, 2, 2, 2, 2, 19, 19, 19, 19, 19, 19, 19, 2, 2, 2,
- 2, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 14, 14, 14, 14, 14,
- 14, 14, 14, 14, 14, 2, 14, 14, 14, 14, 14, 2, 14, 2, 14, 14,
- 2, 14, 14, 2, 14, 14, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2,
- 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 2, 2,
- 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 3, 1, 1,
- 1, 1, 1, 1, 6, 6, 0, 0, 0, 2, 0, 0, 0, 0, 3, 3,
- 3, 3, 3, 2, 3, 3, 3, 3, 3, 3, 3, 2, 2, 0, 2, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 17, 17, 17,
- 17, 17, 17, 17, 0, 0, 2, 2, 12, 12, 12, 12, 12, 12, 2, 2,
- 12, 12, 12, 2, 2, 2, 2, 0, 0, 0, 0, 0, 2, 2, 49, 49,
- 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 2, 49, 49, 49, 49, 49,
- 49, 49, 49, 49, 49, 2, 49, 49, 49, 2, 49, 49, 2, 49, 49, 49,
- 49, 49, 49, 49, 2, 2, 49, 49, 49, 2, 2, 2, 2, 2, 0, 0,
- 0, 2, 2, 2, 2, 0, 0, 0, 0, 0, 2, 2, 2, 0, 0, 0,
- 0, 0, 0, 2, 2, 2, 9, 2, 2, 2, 2, 2, 2, 2, 0, 0,
- 0, 0, 0, 1, 2, 2, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
- 71, 71, 71, 2, 2, 2, 67, 67, 67, 67, 67, 67, 67, 67, 67, 2,
- 2, 2, 2, 2, 2, 2, 1, 0, 0, 0, 0, 0, 0, 0, 42, 42,
- 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 42, 42, 42, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41,
- 41, 2, 2, 2, 2, 2,118,118,118,118,118,118,118,118,118,118,
- 118, 2, 2, 2, 2, 2, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53,
- 53, 53, 53, 53, 2, 53, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59,
- 59, 59, 2, 2, 2, 2, 59, 59, 59, 59, 59, 59, 2, 2, 40, 40,
- 40, 40, 40, 40, 40, 40, 51, 51, 51, 51, 51, 51, 51, 51, 50, 50,
- 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 2, 2, 50, 50,
- 2, 2, 2, 2, 2, 2,135,135,135,135,135,135,135,135,135,135,
- 135,135, 2, 2, 2, 2,106,106,106,106,106,106,106,106,104,104,
- 104,104,104,104,104,104,104,104,104,104, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2,104,161,161,161,161,161,161,161,161,161,161,
- 161, 2,161,161,161,161,161,161,161, 2,161,161, 2,161,161,161,
- 2,161,161,161,161,161,161,161, 2,161,161, 2, 2, 2,110,110,
- 110,110,110,110,110,110,110,110,110,110,110,110,110, 2,110,110,
- 110,110,110,110, 2, 2, 19, 19, 19, 19, 19, 19, 2, 19, 19, 2,
- 19, 19, 19, 19, 19, 19, 47, 47, 47, 47, 47, 47, 2, 2, 47, 2,
- 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47,
- 47, 47, 47, 47, 2, 47, 47, 2, 2, 2, 47, 2, 2, 47, 81, 81,
- 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 2, 81,120,120,
- 120,120,120,120,120,120,116,116,116,116,116,116,116,116,116,116,
- 116,116,116,116,116, 2, 2, 2, 2, 2, 2, 2, 2,116,128,128,
- 128,128,128,128,128,128,128,128,128, 2,128,128, 2, 2, 2, 2,
- 2,128,128,128,128,128, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66,
- 66, 66, 2, 2, 2, 66, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72,
- 2, 2, 2, 2, 2, 72, 98, 98, 98, 98, 98, 98, 98, 98, 97, 97,
- 97, 97, 97, 97, 97, 97, 2, 2, 2, 2, 97, 97, 97, 97, 2, 2,
- 97, 97, 97, 97, 97, 97, 57, 57, 57, 57, 2, 57, 57, 2, 2, 2,
- 2, 2, 57, 57, 57, 57, 57, 57, 57, 57, 2, 57, 57, 57, 2, 57,
- 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57,
- 57, 57, 57, 57, 2, 2, 57, 57, 57, 2, 2, 2, 2, 57, 57, 2,
- 2, 2, 2, 2, 2, 2, 88, 88, 88, 88, 88, 88, 88, 88,117,117,
- 117,117,117,117,117,117,112,112,112,112,112,112,112,112,112,112,
- 112,112,112,112,112, 2, 2, 2, 2,112,112,112,112,112, 78, 78,
- 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 2, 2, 2, 78,
- 78, 78, 78, 78, 78, 78, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83,
- 83, 83, 83, 83, 2, 2, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82,
- 82, 2, 2, 2, 2, 2,122,122,122,122,122,122,122,122,122,122,
- 2, 2, 2, 2, 2, 2, 2,122,122,122,122, 2, 2, 2, 2,122,
- 122,122,122,122,122,122, 89, 89, 89, 89, 89, 89, 89, 89, 89, 2,
- 2, 2, 2, 2, 2, 2,130,130,130,130,130,130,130,130,130,130,
- 130, 2, 2, 2, 2, 2, 2, 2,130,130,130,130,130,130,144,144,
- 144,144,144,144,144,144,144,144, 2, 2, 2, 2, 2, 2,156,156,
- 156,156,156,156,156,156,156,156, 2,156,156,156, 2, 2,156,156,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3,147,147,
- 147,147,147,147,147,147,148,148,148,148,148,148,148,148,148,148,
- 2, 2, 2, 2, 2, 2,158,158,158,158,158,158,158,158,158,158,
- 2, 2, 2, 2, 2, 2,153,153,153,153,153,153,153,153,153,153,
- 153,153, 2, 2, 2, 2,149,149,149,149,149,149,149,149,149,149,
- 149,149,149,149,149, 2, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94,
- 94, 94, 94, 94, 2, 2, 2, 2, 94, 94, 94, 94, 94, 94, 2, 2,
- 2, 2, 2, 2, 2, 94, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85,
- 85, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 85, 2, 2,101,101,
- 101,101,101,101,101,101,101, 2, 2, 2, 2, 2, 2, 2,101,101,
- 2, 2, 2, 2, 2, 2, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96,
- 96, 96, 96, 2, 96, 96,111,111,111,111,111,111,111,111,111,111,
- 111,111,111,111,111, 2,100,100,100,100,100,100,100,100, 2, 36,
- 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 2, 2, 2,108,108,
- 108,108,108,108,108,108,108,108, 2,108,108,108,108,108,108,108,
- 2, 2, 2, 2, 2, 2,129,129,129,129,129,129,129, 2,129, 2,
- 129,129,129,129, 2,129,129,129,129,129,129,129,129,129,129,129,
- 129,129,129,129, 2,129,129,129, 2, 2, 2, 2, 2, 2,109,109,
- 109,109,109,109,109,109,109,109,109, 2, 2, 2, 2, 2,109,109,
- 2, 2, 2, 2, 2, 2,107,107,107,107, 2,107,107,107,107,107,
- 107,107,107, 2, 2,107,107, 2, 2,107,107,107,107,107,107,107,
- 107,107,107,107,107,107,107, 2,107,107,107,107,107,107,107, 2,
- 107,107, 2,107,107,107,107,107, 2, 1,107,107,107,107,107, 2,
- 2,107,107,107, 2, 2,107, 2, 2, 2, 2, 2, 2,107, 2, 2,
- 2, 2, 2,107,107,107,107,107,107,107, 2, 2,107,107,107,107,
- 107,107,107, 2, 2, 2,137,137,137,137,137,137,137,137,137,137,
- 137,137, 2,137,137,137,137,137, 2, 2, 2, 2, 2, 2,124,124,
- 124,124,124,124,124,124,124,124, 2, 2, 2, 2, 2, 2,123,123,
- 123,123,123,123,123,123,123,123,123,123,123,123, 2, 2,114,114,
- 114,114,114,114,114,114,114,114,114,114,114, 2, 2, 2,114,114,
- 2, 2, 2, 2, 2, 2, 32, 32, 32, 32, 32, 2, 2, 2,102,102,
- 102,102,102,102,102,102,102,102, 2, 2, 2, 2, 2, 2,126,126,
- 126,126,126,126,126,126,126,126,126, 2, 2,126,126,126,126,126,
- 126,126, 2, 2, 2, 2,126,126,126,126,126,126,126, 2,142,142,
- 142,142,142,142,142,142,142,142,142,142, 2, 2, 2, 2,125,125,
- 125,125,125,125,125,125,125,125,125, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2,125,154,154,154,154,154,154,154, 2, 2,154,
- 2, 2,154,154,154,154,154,154,154,154, 2,154,154, 2,154,154,
- 154,154,154,154,154,154,154,154,154,154,154,154, 2,154,154, 2,
- 2,154,154,154,154,154,154,154, 2, 2, 2, 2, 2, 2,150,150,
- 150,150,150,150,150,150, 2, 2,150,150,150,150,150,150,150,150,
- 150,150,150, 2, 2, 2,141,141,141,141,141,141,141,141,140,140,
- 140,140,140,140,140,140,140,140,140, 2, 2, 2, 2, 2,121,121,
- 121,121,121,121,121,121,121, 2, 2, 2, 2, 2, 2, 2, 7, 7,
- 2, 2, 2, 2, 2, 2,133,133,133,133,133,133,133,133,133, 2,
- 133,133,133,133,133,133,133,133,133,133,133,133,133, 2,133,133,
- 133,133,133,133, 2, 2,133,133,133,133,133, 2, 2, 2,134,134,
- 134,134,134,134,134,134, 2, 2,134,134,134,134,134,134, 2,134,
- 134,134,134,134,134,134,134,134,134,134,134,134,134, 2,138,138,
- 138,138,138,138,138, 2,138,138, 2,138,138,138,138,138,138,138,
- 138,138,138,138,138,138, 2, 2,138, 2,138,138, 2,138,138,138,
- 2, 2, 2, 2, 2, 2,143,143,143,143,143,143, 2,143,143, 2,
- 143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,
- 143,143,143,143,143, 2,143,143, 2,143,143,143,143,143,143, 2,
- 2, 2, 2, 2, 2, 2,143,143, 2, 2, 2, 2, 2, 2,145,145,
- 145,145,145,145,145,145,145, 2, 2, 2, 2, 2, 2, 2,163,163,
- 163,163,163,163,163,163,163, 2,163,163,163,163,163,163,163,163,
- 163, 2, 2, 2,163,163,163,163, 2, 2, 2, 2, 2, 2, 86, 2,
- 2, 2, 2, 2, 2, 2, 22, 22, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 22, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63,
- 2, 2, 2, 2, 2, 2, 63, 63, 63, 63, 63, 63, 63, 2, 63, 63,
- 63, 63, 63, 2, 2, 2, 63, 63, 63, 63, 2, 2, 2, 2,157,157,
- 157,157,157,157,157,157,157,157,157, 2, 2, 2, 2, 2, 80, 80,
- 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 2, 2,127,127,
- 127,127,127,127,127,127,127,127,127,127,127,127,127, 2, 79, 2,
- 2, 2, 2, 2, 2, 2,115,115,115,115,115,115,115,115,115,115,
+ 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,240, 70, 70, 70, 70,
+ 70, 70, 70, 70, 70,241, 96, 96, 96, 96, 96, 96, 96, 96, 70, 70,
+ 70, 70,242, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 70, 70,
+ 70, 70, 70, 70,243, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70,
+ 70, 70, 70, 70, 70,244, 96, 96, 96, 96, 96, 96, 96, 96,245, 96,
+ 246,247, 0, 1, 2, 2, 0, 1, 2, 2, 2, 3, 4, 5, 0, 0,
+ 0, 0, 0, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 0, 0, 0,
+ 19, 0, 19, 0, 0, 0, 0, 0, 26, 26, 1, 1, 1, 1, 9, 9,
+ 9, 9, 0, 9, 9, 9, 2, 2, 9, 9, 9, 9, 0, 9, 2, 2,
+ 2, 2, 9, 0, 9, 0, 9, 9, 9, 2, 9, 2, 9, 9, 9, 9,
+ 2, 9, 9, 9, 55, 55, 55, 55, 55, 55, 6, 6, 6, 6, 6, 1,
+ 1, 6, 2, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 14,
+ 14, 14, 14, 14, 14, 14, 14, 14, 14, 2, 2, 2, 2, 14, 14, 2,
+ 2, 2, 3, 3, 3, 3, 3, 0, 3, 3, 0, 3, 3, 3, 3, 3,
+ 3, 0, 3, 3, 3, 1, 1, 1, 3, 3, 1, 3, 3, 3, 37, 37,
+ 37, 37, 37, 37, 2, 37, 37, 37, 37, 2, 2, 37, 37, 37, 38, 38,
+ 38, 38, 38, 38, 2, 2, 64, 64, 64, 64, 64, 64, 64, 2, 2, 64,
+ 64, 64, 90, 90, 90, 90, 90, 90, 2, 2, 90, 90, 90, 2, 95, 95,
+ 95, 95, 2, 2, 95, 2, 3, 3, 3, 2, 3, 3, 2, 2, 3, 3,
+ 0, 3, 7, 7, 7, 7, 7, 1, 1, 1, 1, 7, 7, 7, 0, 0,
+ 7, 7, 5, 5, 5, 5, 2, 5, 5, 5, 5, 2, 2, 5, 5, 2,
+ 5, 5, 5, 2, 5, 2, 2, 2, 5, 5, 5, 5, 2, 2, 5, 5,
+ 5, 2, 2, 2, 2, 5, 5, 5, 2, 5, 2, 11, 11, 11, 11, 11,
+ 11, 2, 2, 2, 2, 11, 11, 2, 2, 11, 11, 11, 11, 11, 11, 2,
+ 11, 11, 2, 11, 11, 2, 11, 11, 2, 2, 2, 11, 2, 2, 11, 2,
+ 11, 2, 2, 2, 11, 11, 2, 10, 10, 10, 10, 10, 10, 10, 10, 10,
+ 2, 10, 10, 2, 10, 10, 10, 10, 2, 2, 10, 2, 2, 2, 2, 2,
+ 10, 10, 2, 21, 21, 21, 21, 21, 21, 21, 21, 2, 2, 21, 21, 2,
+ 21, 21, 21, 21, 2, 2, 21, 21, 2, 21, 2, 2, 21, 21, 2, 2,
+ 22, 22, 2, 22, 22, 22, 22, 22, 22, 2, 22, 2, 22, 22, 22, 22,
+ 2, 2, 2, 22, 22, 2, 2, 2, 2, 22, 22, 2, 2, 2, 22, 22,
+ 22, 22, 23, 23, 23, 23, 23, 2, 23, 23, 23, 23, 2, 2, 2, 23,
+ 23, 2, 23, 23, 23, 2, 2, 23, 2, 2, 2, 2, 23, 23, 2, 2,
+ 2, 23, 16, 16, 16, 16, 16, 2, 16, 16, 2, 16, 16, 16, 16, 16,
+ 2, 2, 2, 16, 16, 2, 2, 2, 16, 16, 20, 20, 20, 20, 20, 2,
+ 20, 20, 2, 2, 20, 20, 2, 36, 36, 36, 36, 36, 36, 36, 36, 36,
+ 36, 2, 2, 2, 36, 36, 36, 36, 2, 36, 2, 36, 2, 2, 2, 2,
+ 36, 2, 2, 2, 2, 36, 36, 2, 36, 2, 36, 2, 2, 2, 2, 24,
+ 24, 24, 24, 24, 24, 24, 24, 24, 24, 2, 2, 2, 2, 0, 2, 18,
+ 18, 2, 18, 2, 18, 18, 18, 18, 18, 2, 18, 18, 18, 18, 2, 18,
+ 2, 18, 18, 18, 2, 2, 18, 2, 18, 2, 25, 25, 25, 25, 2, 25,
+ 25, 25, 25, 2, 2, 2, 25, 2, 25, 25, 25, 0, 0, 0, 0, 25,
+ 25, 2, 33, 33, 33, 33, 8, 8, 8, 8, 8, 8, 2, 8, 2, 8,
+ 2, 2, 8, 8, 8, 0, 12, 12, 12, 12, 30, 30, 30, 30, 30, 2,
+ 30, 30, 30, 30, 2, 2, 30, 30, 30, 2, 2, 30, 30, 30, 30, 2,
+ 2, 2, 29, 29, 29, 29, 29, 29, 2, 2, 28, 28, 28, 28, 34, 34,
+ 34, 34, 34, 2, 2, 2, 35, 35, 35, 35, 35, 35, 35, 0, 0, 0,
+ 35, 35, 35, 2, 2, 2, 45, 45, 45, 45, 45, 45, 2, 2, 2, 2,
+ 2, 45, 44, 44, 44, 44, 44, 0, 0, 2, 43, 43, 43, 43, 46, 46,
+ 46, 46, 46, 2, 46, 46, 31, 31, 31, 31, 31, 31, 2, 2, 32, 32,
+ 0, 0, 32, 0, 32, 32, 32, 32, 32, 32, 32, 32, 2, 2, 32, 2,
+ 2, 2, 32, 32, 32, 2, 28, 28, 2, 2, 48, 48, 48, 48, 48, 48,
+ 48, 2, 48, 2, 2, 2, 52, 52, 52, 52, 52, 52, 2, 2, 52, 2,
+ 2, 2, 58, 58, 58, 58, 58, 58, 2, 2, 58, 58, 58, 2, 2, 2,
+ 58, 58, 54, 54, 54, 54, 2, 2, 54, 54, 91, 91, 91, 91, 91, 91,
+ 91, 2, 91, 2, 2, 91, 91, 91, 2, 2, 1, 1, 1, 2, 62, 62,
+ 62, 62, 62, 2, 2, 2, 62, 62, 62, 2, 76, 76, 76, 76, 93, 93,
+ 93, 93, 70, 70, 70, 70, 2, 2, 2, 70, 70, 70, 2, 2, 2, 70,
+ 70, 70, 73, 73, 73, 73, 6, 2, 2, 2, 8, 8, 8, 2, 2, 8,
+ 8, 8, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1,
+ 0, 0, 1, 1, 0, 2, 19, 19, 9, 9, 9, 9, 9, 6, 19, 9,
+ 9, 9, 9, 9, 19, 19, 9, 9, 9, 19, 6, 19, 19, 19, 19, 19,
+ 19, 9, 9, 9, 2, 2, 2, 9, 2, 9, 2, 9, 9, 9, 1, 1,
+ 0, 0, 0, 2, 0, 0, 0, 19, 2, 2, 0, 0, 0, 19, 0, 0,
+ 0, 2, 19, 2, 2, 2, 0, 2, 2, 2, 1, 2, 2, 2, 0, 0,
+ 9, 0, 0, 0, 19, 19, 27, 27, 27, 27, 2, 2, 0, 0, 0, 0,
+ 2, 0, 56, 56, 56, 56, 2, 55, 55, 55, 61, 61, 61, 61, 2, 2,
+ 2, 61, 61, 2, 2, 2, 0, 0, 2, 2, 13, 13, 13, 13, 13, 13,
+ 2, 13, 13, 13, 2, 2, 0, 13, 0, 13, 0, 13, 13, 13, 13, 13,
+ 1, 1, 1, 1, 12, 12, 2, 15, 15, 15, 15, 15, 15, 15, 15, 15,
+ 15, 2, 2, 1, 1, 0, 0, 15, 15, 15, 0, 17, 17, 17, 17, 17,
+ 17, 17, 17, 17, 17, 0, 2, 26, 26, 26, 26, 26, 26, 26, 2, 12,
+ 12, 12, 12, 12, 12, 2, 12, 12, 12, 0, 39, 39, 39, 39, 39, 2,
+ 2, 2, 39, 39, 39, 2, 86, 86, 86, 86, 77, 77, 77, 77, 79, 79,
+ 79, 79, 19, 19, 19, 2, 19, 19, 2, 19, 2, 19, 19, 19, 19, 19,
+ 2, 2, 2, 2, 19, 19, 60, 60, 60, 60, 60, 2, 2, 2, 65, 65,
+ 65, 65, 75, 75, 75, 75, 75, 75, 2, 2, 2, 2, 75, 75, 69, 69,
+ 69, 69, 69, 69, 0, 69, 74, 74, 74, 74, 2, 2, 2, 74, 12, 2,
+ 2, 2, 84, 84, 84, 84, 84, 84, 2, 0, 84, 84, 2, 2, 2, 2,
+ 84, 84, 33, 33, 33, 2, 68, 68, 68, 68, 68, 68, 68, 2, 68, 68,
+ 2, 2, 92, 92, 92, 92, 92, 92, 92, 2, 2, 2, 2, 92, 87, 87,
+ 87, 87, 87, 87, 87, 2, 19, 9, 19, 19, 19, 19, 0, 0, 87, 87,
+ 2, 2, 2, 2, 2, 12, 2, 2, 2, 4, 14, 2, 14, 2, 14, 14,
+ 2, 14, 14, 2, 14, 14, 2, 2, 2, 3, 3, 3, 0, 0, 2, 2,
+ 3, 3, 1, 1, 6, 6, 3, 2, 3, 3, 3, 2, 2, 0, 2, 0,
+ 0, 0, 0, 0, 17, 17, 17, 17, 0, 0, 2, 2, 12, 12, 49, 49,
+ 49, 49, 2, 49, 49, 49, 49, 49, 49, 2, 49, 49, 2, 49, 49, 49,
+ 2, 2, 9, 2, 2, 2, 0, 1, 2, 2, 71, 71, 71, 71, 71, 2,
+ 2, 2, 67, 67, 67, 67, 67, 2, 2, 2, 42, 42, 42, 42, 2, 42,
+ 42, 42, 41, 41, 41, 41, 41, 41, 41, 2,118,118,118,118,118,118,
+ 118, 2, 53, 53, 53, 53, 53, 53, 2, 53, 59, 59, 59, 59, 59, 59,
+ 2, 2, 40, 40, 40, 40, 51, 51, 51, 51, 50, 50, 50, 50, 50, 50,
+ 2, 2,135,135,135,135,106,106,106,106,104,104,104,104, 2, 2,
+ 2,104,161,161,161,161,161,161,161, 2,161,161, 2,161,161, 2,
+ 2, 2,110,110,110,110,110,110,110, 2,110,110, 2, 2, 19, 2,
+ 19, 19, 47, 47, 47, 47, 47, 47, 2, 2, 47, 2, 47, 47, 47, 47,
+ 2, 47, 47, 2, 2, 2, 47, 2, 2, 47, 81, 81, 81, 81, 81, 81,
+ 2, 81,120,120,120,120,116,116,116,116,116,116,116, 2, 2, 2,
+ 2,116,128,128,128,128,128,128,128, 2,128,128, 2, 2, 2, 2,
+ 2,128, 66, 66, 66, 66, 2, 2, 2, 66, 72, 72, 72, 72, 72, 72,
+ 2, 2, 2, 2, 2, 72, 98, 98, 98, 98, 97, 97, 97, 97, 2, 2,
+ 97, 97, 57, 57, 57, 57, 2, 57, 57, 2, 2, 57, 57, 57, 57, 57,
+ 2, 2, 57, 57, 57, 2, 2, 2, 2, 57, 57, 2, 2, 2, 88, 88,
+ 88, 88,117,117,117,117,112,112,112,112,112,112,112, 2, 2, 2,
+ 2,112, 78, 78, 78, 78, 78, 78, 2, 2, 2, 78, 78, 78, 83, 83,
+ 83, 83, 83, 83, 2, 2, 82, 82, 82, 82, 82, 82, 82, 2,122,122,
+ 122,122,122,122, 2, 2, 2,122,122,122,122, 2, 2, 2, 89, 89,
+ 89, 89, 89, 2, 2, 2,130,130,130,130,130,130,130, 2, 2, 2,
+ 130,130,144,144,144,144,144,144, 2, 2,156,156,156,156,156,156,
+ 2,156,156,156, 2, 2, 2, 3, 3, 3,147,147,147,147,148,148,
+ 148,148,148,148, 2, 2,158,158,158,158,158,158, 2, 2,153,153,
+ 153,153,149,149,149,149,149,149,149, 2, 94, 94, 94, 94, 94, 94,
+ 2, 2, 2, 2, 94, 94, 2, 2, 2, 94, 85, 85, 85, 85, 85, 85,
+ 85, 2, 2, 85, 2, 2,101,101,101,101,101, 2, 2, 2,101,101,
+ 2, 2, 96, 96, 96, 96, 96, 2, 96, 96,111,111,111,111,111,111,
+ 111, 2,100,100,100,100,108,108,108,108,108,108, 2,108,108,108,
+ 2, 2,129,129,129,129,129,129,129, 2,129, 2,129,129,129,129,
+ 2,129,129,129, 2, 2,109,109,109,109,109,109,109, 2,109,109,
+ 2, 2,107,107,107,107, 2,107,107,107,107, 2, 2,107,107, 2,
+ 107,107,107,107, 2, 1,107,107, 2, 2,107, 2, 2, 2, 2, 2,
+ 2,107, 2, 2,107,107,137,137,137,137, 2,137,137,137,137,137,
+ 2, 2,124,124,124,124,124,124, 2, 2,123,123,123,123,123,123,
+ 2, 2,114,114,114,114,114, 2, 2, 2,114,114, 2, 2,102,102,
+ 102,102,102,102, 2, 2,126,126,126,126,126,126,126, 2, 2,126,
+ 126,126,142,142,142,142,125,125,125,125,125,125,125, 2, 2, 2,
+ 2,125,154,154,154,154,154,154,154, 2, 2,154, 2, 2, 2,154,
+ 154, 2,154,154, 2,154,154, 2, 2,154,154,154, 2, 2,150,150,
+ 150,150, 2, 2,150,150,150, 2, 2, 2,141,141,141,141,140,140,
+ 140,140,140,140,140, 2,121,121,121,121,121, 2, 2, 2, 7, 7,
+ 2, 2,133,133,133,133,133, 2,133,133,133,133,133, 2,133,133,
+ 2, 2,133, 2, 2, 2,134,134,134,134, 2, 2,134,134, 2,134,
+ 134,134,134,134,134, 2,138,138,138,138,138,138,138, 2,138,138,
+ 2,138, 2, 2,138, 2,138,138, 2, 2,143,143,143,143,143,143,
+ 2,143,143, 2,143,143,143,143,143, 2,143, 2, 2, 2,143,143,
+ 2, 2,145,145,145,145,145, 2, 2, 2,163,163,163,163,163, 2,
+ 163,163,163,163,163, 2, 2, 2,163,163,163,163, 2, 2, 86, 2,
+ 2, 2, 63, 63, 63, 63, 63, 63, 2, 2, 63, 63, 63, 2, 63, 2,
+ 2, 2,157,157,157,157,157,157,157, 2, 80, 80, 80, 80, 80, 80,
+ 2, 2,127,127,127,127,127,127,127, 2, 79, 2, 2, 2,115,115,
115,115,115,115,115, 2,115,115, 2, 2, 2, 2,115,115,159,159,
- 159,159,159,159,159,159,159,159,159,159,159,159,159, 2,159,159,
- 2, 2, 2, 2, 2, 2,103,103,103,103,103,103,103,103,103,103,
- 103,103,103,103, 2, 2,119,119,119,119,119,119,119,119,119,119,
- 119,119,119,119, 2, 2,119,119, 2,119,119,119,119,119, 2, 2,
- 2, 2, 2,119,119,119,146,146,146,146,146,146,146,146,146,146,
- 146, 2, 2, 2, 2, 2, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99,
- 99, 2, 2, 2, 2, 99, 2, 2, 2, 2, 2, 2, 2, 99,136,139,
- 13, 13,155, 2, 2, 2,136,136,136,136,136,136,136,136,155,155,
- 155,155,155,155,155,155,155,155,155,155,155,155, 2, 2,136, 2,
- 2, 2, 2, 2, 2, 2, 17, 17, 17, 17, 2, 17, 17, 17, 17, 17,
- 17, 17, 2, 17, 17, 2, 17, 15, 15, 15, 15, 15, 15, 15, 17, 17,
- 17, 2, 2, 2, 2, 2, 2, 2, 15, 2, 2, 2, 2, 2, 15, 15,
- 15, 2, 2, 17, 2, 2, 2, 2, 2, 2, 17, 17, 17, 17,139,139,
- 139,139,139,139,139,139,139,139,139,139, 2, 2, 2, 2,105,105,
- 105,105,105,105,105,105,105,105,105, 2, 2, 2, 2, 2,105,105,
- 105,105,105, 2, 2, 2,105, 2, 2, 2, 2, 2, 2, 2,105,105,
- 2, 2,105,105,105,105, 1, 1, 1, 1, 1, 1, 2, 2, 0, 0,
- 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1,
- 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 2, 2,
- 0, 2, 2, 0, 0, 2, 2, 0, 0, 0, 0, 2, 0, 0, 0, 0,
- 2, 0, 2, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0,
- 0, 2, 2, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 2, 0, 0,
- 0, 0, 0, 2, 0, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 2,
- 0, 0, 0, 0, 0, 0,131,131,131,131,131,131,131,131,131,131,
- 131,131, 2, 2, 2, 2, 2, 2, 2,131,131,131,131,131, 2,131,
- 131,131,131,131,131,131, 2, 2, 2, 2, 2, 19, 19, 19, 56, 56,
- 56, 56, 56, 56, 56, 2, 56, 2, 2, 56, 56, 56, 56, 56, 56, 56,
- 2, 56, 56, 2, 56, 56, 56, 56, 56, 2, 2, 2, 2, 2, 6, 6,
- 6, 6, 6, 6, 2, 2, 2, 2, 2, 2, 2, 2, 2, 6,151,151,
- 151,151,151,151,151,151,151,151,151,151,151, 2, 2, 2,151,151,
- 151,151,151,151, 2, 2,151,151, 2, 2, 2, 2,151,151,160,160,
- 160,160,160,160,160,160,160,160,160,160,160,160,160, 2,152,152,
- 152,152,152,152,152,152,152,152, 2, 2, 2, 2, 2,152,164,164,
- 164,164,164,164,164,164,164,164, 2, 2, 2, 2, 2, 2, 30, 30,
- 30, 30, 2, 30, 30, 2,113,113,113,113,113,113,113,113,113,113,
- 113,113,113, 2, 2,113,113,113,113,113,113,113,113, 2,132,132,
- 132,132,132,132,132,132,132,132,132,132, 2, 2, 2, 2,132,132,
- 2, 2, 2, 2,132,132, 3, 3, 3, 3, 2, 3, 3, 3, 2, 3,
- 3, 2, 3, 2, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3,
- 3, 2, 3, 3, 3, 3, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2,
- 3, 2, 2, 2, 2, 3, 2, 3, 2, 3, 2, 3, 3, 3, 2, 3,
- 2, 3, 2, 3, 2, 3, 2, 3, 3, 3, 3, 2, 3, 2, 3, 3,
- 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 3,
- 3, 3, 2, 3, 3, 3, 2, 2, 2, 2, 2, 2, 0, 0, 15, 0,
- 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 2, 2,
- 2, 0, 0, 0, 0, 0, 13, 2, 2, 2, 2, 2, 2, 2, 13, 13,
- 13, 2, 2, 2, 2, 2, 2, 0, 2, 2, 2, 2, 2, 2, 0, 1,
- 2, 3, 4, 5, 6, 7, 8, 9, 9, 9, 9, 10, 9, 11, 12, 13,
- 9, 9, 9, 14, 9, 9, 15, 9, 9, 9, 9, 9, 9, 9, 9, 9,
+ 159,159,159,159,159, 2,159,159, 2, 2,103,103,103,103,103,103,
+ 2, 2,119,119,119,119,119,119, 2, 2,119,119, 2,119, 2,119,
+ 119,119,146,146,146,146,146,146,146, 2, 99, 99, 99, 99, 99, 99,
+ 99, 2, 2, 2, 2, 99,136,139, 13, 13,155, 2, 2, 2,136,136,
+ 136,136,155,155,155,155,155,155, 2, 2,136, 2, 2, 2, 2, 17,
+ 17, 17, 2, 17, 17, 2, 17, 15, 15, 15, 17, 17, 17, 2, 2, 2,
+ 15, 2, 2, 17, 2, 2,139,139,139,139,105,105,105,105,105,105,
+ 105, 2,105, 2, 2, 2,105,105, 2, 2, 1, 1, 2, 2, 0, 0,
+ 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 2, 2, 0, 2, 2, 0,
+ 0, 2, 0, 2, 0, 2,131,131,131,131, 2, 2, 2,131, 2,131,
+ 131,131, 56, 56, 56, 2, 56, 2, 2, 56, 56, 56, 2, 56, 56, 2,
+ 56, 56, 6, 6, 2, 2, 2, 2, 2, 6,151,151,151,151,151, 2,
+ 2, 2,151,151, 2, 2, 2, 2,151,151,160,160,160,160,160,160,
+ 160, 2,152,152,152,152,152,152, 2, 2, 2, 2, 2,152,164,164,
+ 164,164,164,164, 2, 2, 2, 30, 30, 2,113,113,113,113,113, 2,
+ 2,113,113,113,113, 2,132,132,132,132,132,132, 2, 2, 2, 2,
+ 132,132, 2, 3, 3, 2, 3, 2, 2, 3, 2, 3, 2, 3, 2, 2,
+ 3, 2, 3, 2, 3, 2, 3, 3, 2, 3, 15, 0, 0, 2, 13, 2,
+ 2, 2, 13, 13, 13, 2, 2, 0, 2, 2, 0, 1, 2, 3, 4, 5,
+ 6, 7, 8, 9, 9, 9, 9, 10, 9, 11, 12, 13, 9, 9, 9, 14,
+ 9, 9, 15, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
- 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 16, 17, 9, 9, 9, 9,
- 9, 9, 9, 9, 9, 9, 18, 19, 20, 9, 21, 9, 9, 9, 9, 9,
+ 9, 9, 9, 9, 9, 9, 16, 17, 9, 9, 9, 9, 9, 9, 9, 9,
+ 9, 9, 18, 19, 20, 9, 21, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
- 9, 9, 9, 9, 9, 9, 22, 9, 9, 9, 9, 9, 9, 9, 9, 9,
+ 9, 9, 22, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
@@ -3922,60 +3702,60 @@ _hb_ucd_u8[17868] =
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
- 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 23, 24, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8,
- 9, 10, 11, 12, 0, 0, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 23, 0, 0, 24, 25, 26, 27, 28, 29, 30, 0, 0, 31, 32, 0, 33,
- 0, 34, 0, 35, 0, 0, 0, 0, 36, 37, 38, 39, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 40, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 41, 42, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 43, 44,
- 0, 45, 0, 0, 0, 0, 0, 0, 46, 47, 0, 0, 0, 0, 0, 48,
- 0, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 50, 51, 0, 0, 0, 52, 0, 0, 53, 0, 0, 0, 0, 0, 0, 0,
- 54, 0, 0, 0, 0, 0, 0, 0, 55, 0, 0, 0, 0, 0, 0, 0,
- 56, 0, 0, 0, 0, 0, 0, 0, 0, 57, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 58, 59, 60, 61, 62, 63, 64, 65, 0, 0, 0, 0, 0, 0, 66, 0,
+ 9, 9, 9, 9, 9, 9, 23, 24, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
+ 0, 0, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 0, 0, 24,
+ 25, 26, 27, 28, 29, 30, 0, 0, 31, 32, 0, 33, 0, 34, 0, 35,
+ 0, 0, 0, 0, 36, 37, 38, 39, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 40, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 41, 42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 43, 44, 0, 45, 0, 0,
+ 0, 0, 0, 0, 46, 47, 0, 0, 0, 0, 0, 48, 0, 49, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 50, 51, 0, 0,
+ 0, 52, 0, 0, 53, 0, 0, 0, 0, 0, 0, 0, 54, 0, 0, 0,
+ 0, 0, 0, 0, 55, 0, 0, 0, 0, 0, 0, 0, 56, 0, 0, 0,
+ 0, 0, 0, 0, 0, 57, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 58, 59, 60, 61,
+ 62, 63, 64, 65, 0, 0, 0, 0, 0, 0, 66, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 67, 68, 0, 69,
- 70, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 71, 72, 73, 74,
- 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90,
- 91, 92, 93, 94, 95, 96, 97, 98, 99,100,101,102,103, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,104, 0,
- 0, 0, 0, 0, 0,105,106, 0,107, 0, 0, 0,108, 0,109, 0,
- 110, 0,111,112,113, 0,114, 0, 0, 0,115, 0, 0, 0,116, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0,117, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 67, 68, 0, 69, 70, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 71, 72, 73, 74, 75, 76, 77, 78,
+ 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94,
+ 95, 96, 97, 98, 99,100,101,102,103, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,104, 0, 0, 0, 0, 0,
+ 0,105,106, 0,107, 0, 0, 0,108, 0,109, 0,110, 0,111,112,
+ 113, 0,114, 0, 0, 0,115, 0, 0, 0,116, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 118,119,120,121, 0,122,123,124,125,126, 0,127, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0,117, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,118,119,120,121,
+ 0,122,123,124,125,126, 0,127, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,128,129,130,131,
- 132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,
- 148,149,150,151,152,153,154,155,156,157, 0, 0, 0,158,159,160,
- 161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0,162,163, 0, 0, 0, 0, 0, 0, 0,164, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0,128,129,130,131,132,133,134,135,
+ 136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,
+ 152,153,154,155,156,157, 0, 0, 0,158,159,160,161, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0,162,163, 0, 0, 0, 0, 0, 0, 0,164, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 165, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0,166, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0,167, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,168,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,165, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0,166, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0,167, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,168, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0,169,170, 0, 0, 0, 0,171,172, 0, 0, 0,173,174,175,176,
- 177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,
- 193,194,195,196,197,198,199,200,201,202,203,204,205,206, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,169,170, 0,
+ 0, 0, 0,171,172, 0, 0, 0,173,174,175,176,177,178,179,180,
+ 181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,
+ 197,198,199,200,201,202,203,204,205,206, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4,
+ 0, 0, 0, 0, 1, 2, 3, 4,
};
static const uint16_t
-_hb_ucd_u16[9320] =
+_hb_ucd_u16[10040] =
{
0, 0, 1, 2, 3, 4, 5, 6, 0, 0, 7, 8, 9, 10, 11, 12,
13, 13, 13, 14, 15, 13, 13, 16, 17, 18, 19, 20, 21, 22, 13, 23,
@@ -4104,255 +3884,300 @@ _hb_ucd_u16[9320] =
48, 48, 48, 48, 71, 48, 48, 48, 48, 48, 48, 140, 140, 140, 140, 140,
683, 140, 570, 570, 570, 570, 570, 570, 32, 32, 32, 32, 32, 32, 32, 32,
32, 32, 32, 32, 32, 32, 32, 140, 391, 391, 391, 391, 391, 391, 391, 684,
- 391, 391, 391, 391, 391, 391, 391, 685, 0, 0, 0, 0, 0, 0, 0, 0,
- 1, 2, 2, 3, 1, 2, 2, 3, 0, 0, 0, 0, 0, 4, 0, 4,
- 2, 2, 5, 2, 2, 2, 5, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 6,
- 0, 0, 0, 0, 7, 8, 0, 0, 9, 9, 9, 9, 9, 9, 9, 9,
- 9, 9, 9, 9, 9, 9, 10, 11, 12, 13, 14, 14, 15, 14, 14, 14,
- 14, 14, 14, 14, 16, 17, 14, 14, 18, 18, 18, 18, 18, 18, 18, 18,
- 18, 18, 18, 18, 18, 18, 18, 18, 19, 18, 18, 18, 18, 18, 18, 18,
- 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 20, 21,
- 21, 21, 22, 20, 21, 21, 21, 21, 21, 23, 24, 25, 25, 25, 25, 25,
- 25, 26, 25, 25, 25, 27, 28, 26, 29, 30, 31, 32, 31, 31, 31, 31,
- 33, 34, 35, 31, 31, 31, 36, 31, 31, 31, 31, 31, 31, 31, 31, 31,
- 31, 31, 31, 29, 31, 31, 31, 31, 37, 38, 37, 37, 37, 37, 37, 37,
- 37, 39, 31, 31, 31, 31, 31, 31, 40, 40, 40, 40, 40, 40, 41, 26,
- 42, 42, 42, 42, 42, 42, 42, 43, 44, 44, 44, 44, 44, 45, 44, 46,
- 47, 47, 47, 48, 37, 49, 31, 31, 31, 50, 51, 31, 31, 31, 31, 31,
- 31, 31, 31, 31, 52, 31, 31, 31, 53, 53, 53, 53, 53, 53, 53, 53,
- 53, 53, 54, 53, 55, 53, 53, 53, 56, 57, 58, 59, 59, 60, 61, 62,
- 57, 63, 64, 65, 66, 59, 59, 67, 68, 69, 70, 71, 71, 72, 73, 74,
- 69, 75, 76, 77, 78, 71, 79, 26, 80, 81, 82, 83, 83, 84, 85, 86,
- 81, 87, 88, 26, 89, 83, 90, 91, 92, 93, 94, 95, 95, 96, 97, 98,
- 93, 99, 100, 101, 102, 95, 95, 26, 103, 104, 105, 106, 107, 104, 108, 109,
- 104, 105, 110, 26, 111, 108, 108, 112, 113, 114, 115, 113, 113, 115, 113, 116,
- 114, 117, 118, 119, 120, 113, 121, 113, 122, 123, 124, 122, 122, 124, 125, 126,
- 123, 127, 128, 128, 129, 122, 130, 26, 131, 132, 133, 131, 131, 131, 131, 131,
- 132, 133, 134, 131, 135, 131, 131, 131, 136, 137, 138, 139, 137, 137, 140, 141,
- 138, 142, 143, 137, 144, 137, 145, 26, 146, 147, 147, 147, 147, 147, 147, 148,
- 147, 147, 147, 149, 26, 26, 26, 26, 150, 151, 152, 152, 153, 152, 152, 154,
- 155, 156, 152, 157, 26, 26, 26, 26, 158, 158, 158, 158, 158, 158, 158, 158,
- 158, 159, 158, 158, 158, 160, 159, 158, 158, 158, 158, 159, 158, 158, 158, 161,
- 158, 161, 162, 163, 26, 26, 26, 26, 164, 164, 164, 164, 164, 164, 164, 164,
- 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 165, 165, 165, 165,
- 166, 167, 165, 165, 165, 165, 165, 168, 169, 169, 169, 169, 169, 169, 169, 169,
- 169, 169, 169, 169, 169, 169, 169, 169, 170, 170, 170, 170, 170, 170, 170, 170,
- 170, 171, 172, 171, 170, 170, 170, 170, 170, 171, 170, 170, 170, 170, 171, 172,
- 171, 170, 172, 170, 170, 170, 170, 170, 170, 170, 171, 170, 170, 170, 170, 170,
- 170, 170, 170, 173, 170, 170, 170, 174, 170, 170, 170, 175, 176, 176, 176, 176,
- 176, 176, 176, 176, 176, 176, 177, 177, 178, 178, 178, 178, 178, 178, 178, 178,
- 178, 178, 178, 178, 178, 178, 178, 178, 179, 179, 179, 180, 181, 181, 181, 181,
- 181, 181, 181, 181, 181, 182, 181, 183, 184, 184, 185, 186, 187, 187, 188, 26,
- 189, 189, 190, 26, 191, 192, 193, 26, 194, 194, 194, 194, 194, 194, 194, 194,
- 194, 194, 194, 195, 194, 196, 194, 196, 197, 198, 198, 199, 198, 198, 198, 198,
- 198, 198, 198, 198, 198, 198, 198, 200, 198, 198, 198, 198, 198, 201, 178, 178,
- 178, 178, 178, 178, 178, 178, 202, 26, 203, 203, 203, 204, 203, 205, 203, 205,
- 206, 203, 207, 207, 207, 208, 209, 26, 210, 210, 210, 210, 210, 211, 210, 210,
- 210, 212, 210, 213, 194, 194, 194, 194, 214, 214, 214, 215, 216, 216, 216, 216,
- 216, 216, 216, 217, 216, 216, 216, 218, 216, 219, 216, 219, 216, 220, 9, 9,
- 9, 221, 26, 26, 26, 26, 26, 26, 222, 222, 222, 222, 222, 222, 222, 222,
- 222, 223, 222, 222, 222, 222, 222, 224, 225, 225, 225, 225, 225, 225, 225, 225,
- 226, 226, 226, 226, 226, 226, 227, 228, 229, 229, 229, 229, 229, 229, 229, 230,
- 229, 231, 232, 232, 232, 232, 232, 232, 18, 233, 165, 165, 165, 165, 165, 234,
- 225, 26, 235, 9, 236, 237, 238, 239, 2, 2, 2, 2, 240, 241, 2, 2,
- 2, 2, 2, 242, 243, 244, 2, 245, 2, 2, 2, 2, 2, 2, 2, 246,
- 9, 9, 9, 9, 9, 9, 9, 9, 14, 14, 247, 247, 14, 14, 14, 14,
- 247, 247, 14, 248, 14, 14, 14, 247, 14, 14, 14, 14, 14, 14, 249, 14,
- 249, 14, 250, 251, 14, 14, 252, 253, 0, 254, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 255, 0, 256, 257, 0, 258, 2, 259, 0, 0, 0, 0,
- 260, 26, 9, 9, 9, 9, 261, 26, 0, 0, 0, 0, 262, 263, 4, 0,
- 0, 264, 0, 0, 2, 2, 2, 2, 2, 265, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 258, 26, 26, 26,
- 0, 266, 26, 26, 0, 0, 0, 0, 267, 267, 267, 267, 267, 267, 267, 267,
- 267, 267, 267, 267, 267, 267, 267, 267, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 268, 0, 0, 0, 269, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 270, 270, 270, 270, 270, 270, 270, 270,
- 270, 270, 270, 270, 2, 2, 2, 2, 17, 17, 17, 17, 17, 17, 17, 17,
- 17, 17, 17, 17, 17, 17, 271, 272, 165, 165, 165, 165, 166, 167, 273, 273,
- 273, 273, 273, 273, 273, 274, 275, 274, 170, 170, 172, 26, 172, 172, 172, 172,
- 172, 172, 172, 172, 18, 18, 18, 18, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 276, 26, 26, 26, 26, 277, 277, 277, 278, 277, 277, 277, 277,
- 277, 277, 277, 277, 277, 277, 279, 26, 277, 277, 277, 277, 277, 277, 277, 277,
- 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277,
- 277, 277, 280, 26, 26, 26, 0, 281, 282, 0, 0, 0, 283, 284, 0, 285,
- 286, 287, 287, 287, 287, 287, 287, 287, 287, 287, 288, 289, 290, 291, 291, 291,
- 291, 291, 291, 291, 291, 291, 291, 292, 293, 294, 294, 294, 294, 294, 295, 169,
- 169, 169, 169, 169, 169, 169, 169, 169, 169, 296, 0, 0, 294, 294, 294, 294,
- 0, 0, 0, 0, 281, 26, 291, 291, 169, 169, 169, 296, 0, 0, 0, 0,
- 0, 0, 0, 0, 169, 169, 169, 297, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 291, 291, 291, 291, 291, 298, 291, 291, 291, 291, 291, 291, 291, 291,
- 291, 291, 291, 0, 0, 0, 0, 0, 277, 277, 277, 277, 277, 277, 277, 277,
- 0, 0, 0, 0, 0, 0, 0, 0, 299, 299, 299, 299, 299, 299, 299, 299,
- 299, 299, 299, 299, 299, 299, 299, 299, 299, 300, 299, 299, 299, 299, 299, 299,
- 301, 26, 302, 302, 302, 302, 302, 302, 303, 303, 303, 303, 303, 303, 303, 303,
- 303, 303, 303, 303, 303, 303, 303, 303, 303, 303, 303, 303, 303, 304, 26, 26,
- 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 305, 305, 305, 305,
- 305, 305, 305, 305, 305, 305, 305, 26, 0, 0, 0, 0, 306, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 307, 2, 2, 2, 2, 2, 2,
- 2, 308, 309, 310, 26, 26, 311, 2, 312, 312, 312, 312, 312, 313, 0, 314,
- 315, 315, 315, 315, 315, 315, 315, 26, 316, 316, 316, 316, 316, 316, 316, 316,
- 317, 318, 316, 319, 53, 53, 53, 53, 320, 320, 320, 320, 320, 321, 322, 322,
- 322, 322, 323, 324, 169, 169, 169, 325, 326, 326, 326, 326, 326, 326, 326, 326,
- 326, 327, 326, 328, 164, 164, 164, 329, 330, 330, 330, 330, 330, 330, 331, 26,
- 330, 332, 330, 333, 164, 164, 164, 164, 334, 334, 334, 334, 334, 334, 334, 334,
- 335, 26, 26, 336, 337, 337, 338, 26, 339, 339, 339, 26, 172, 172, 2, 2,
- 2, 2, 2, 340, 341, 342, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176,
- 337, 337, 337, 337, 337, 343, 337, 344, 169, 169, 169, 169, 345, 26, 169, 169,
- 296, 346, 169, 169, 169, 169, 169, 345, 26, 26, 26, 26, 26, 26, 26, 26,
- 26, 26, 26, 26, 26, 26, 26, 26, 277, 277, 277, 277, 277, 277, 277, 277,
- 277, 277, 277, 277, 277, 280, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277,
- 277, 277, 277, 347, 26, 26, 26, 26, 348, 26, 349, 350, 25, 25, 351, 352,
- 353, 25, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31,
- 354, 26, 355, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31,
- 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 356,
- 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 357, 31, 31, 31, 31, 31,
- 31, 358, 26, 26, 26, 26, 31, 31, 9, 9, 0, 314, 9, 359, 0, 0,
- 0, 0, 360, 0, 258, 281, 361, 31, 31, 31, 31, 31, 31, 31, 31, 31,
- 31, 31, 31, 31, 31, 31, 31, 362, 363, 0, 0, 0, 1, 2, 2, 3,
- 1, 2, 2, 3, 364, 291, 290, 291, 291, 291, 291, 365, 169, 169, 169, 296,
- 366, 366, 366, 367, 258, 258, 26, 368, 369, 370, 369, 369, 371, 369, 369, 372,
- 369, 373, 369, 373, 26, 26, 26, 26, 369, 369, 369, 369, 369, 369, 369, 369,
- 369, 369, 369, 369, 369, 369, 369, 374, 375, 0, 0, 0, 0, 0, 376, 0,
- 14, 14, 14, 14, 14, 14, 14, 14, 14, 253, 0, 377, 378, 26, 26, 26,
- 26, 26, 0, 0, 0, 0, 0, 379, 380, 380, 380, 381, 382, 382, 382, 382,
- 382, 382, 383, 26, 384, 0, 0, 281, 385, 385, 385, 385, 386, 387, 388, 388,
- 388, 389, 390, 390, 390, 390, 390, 391, 392, 392, 392, 393, 394, 394, 394, 394,
- 395, 394, 396, 26, 26, 26, 26, 26, 397, 397, 397, 397, 397, 397, 397, 397,
- 397, 397, 398, 398, 398, 398, 398, 398, 399, 399, 399, 400, 399, 401, 402, 402,
- 402, 402, 403, 402, 402, 402, 402, 403, 404, 404, 404, 404, 404, 26, 405, 405,
- 405, 405, 405, 405, 406, 407, 408, 409, 408, 409, 410, 408, 411, 408, 411, 412,
- 26, 26, 26, 26, 26, 26, 26, 26, 413, 413, 413, 413, 413, 413, 413, 413,
- 413, 413, 413, 413, 413, 413, 413, 413, 413, 413, 413, 413, 413, 413, 414, 26,
- 413, 413, 415, 26, 413, 26, 26, 26, 416, 2, 2, 2, 2, 2, 417, 308,
- 26, 26, 26, 26, 26, 26, 26, 26, 418, 419, 420, 420, 420, 420, 421, 422,
- 423, 423, 424, 423, 425, 425, 425, 425, 426, 426, 426, 427, 428, 426, 26, 26,
- 26, 26, 26, 26, 429, 429, 430, 431, 432, 432, 432, 433, 434, 434, 434, 435,
- 26, 26, 26, 26, 26, 26, 26, 26, 436, 436, 436, 436, 437, 437, 437, 438,
- 437, 437, 439, 437, 437, 437, 437, 437, 440, 441, 442, 443, 444, 444, 445, 446,
- 444, 447, 444, 447, 448, 448, 448, 448, 449, 449, 449, 449, 26, 26, 26, 26,
- 450, 450, 450, 450, 451, 452, 451, 26, 453, 453, 453, 453, 453, 453, 454, 455,
- 456, 456, 457, 456, 458, 458, 459, 458, 460, 460, 461, 462, 26, 463, 26, 26,
- 26, 26, 26, 26, 26, 26, 26, 26, 464, 464, 464, 464, 464, 464, 464, 464,
- 464, 465, 26, 26, 26, 26, 26, 26, 466, 466, 466, 466, 466, 466, 467, 26,
- 466, 466, 466, 466, 466, 466, 467, 468, 469, 469, 469, 469, 469, 26, 469, 470,
- 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
- 26, 26, 26, 26, 31, 31, 31, 50, 471, 471, 471, 471, 471, 472, 473, 26,
- 26, 26, 26, 26, 26, 26, 26, 474, 475, 475, 475, 475, 475, 26, 476, 476,
- 476, 476, 476, 477, 26, 26, 478, 478, 478, 479, 26, 26, 26, 26, 480, 480,
- 480, 481, 26, 26, 482, 482, 483, 26, 484, 484, 484, 484, 484, 484, 484, 484,
- 484, 485, 486, 484, 484, 484, 485, 487, 488, 488, 488, 488, 488, 488, 488, 488,
- 489, 490, 491, 491, 491, 492, 491, 493, 494, 494, 494, 494, 494, 494, 495, 494,
- 494, 26, 496, 496, 496, 496, 497, 26, 498, 498, 498, 498, 498, 498, 498, 498,
- 498, 498, 498, 498, 499, 137, 500, 26, 501, 501, 502, 501, 501, 501, 501, 501,
- 503, 26, 26, 26, 26, 26, 26, 26, 504, 505, 506, 507, 506, 508, 509, 509,
- 509, 509, 509, 509, 509, 510, 509, 511, 512, 513, 514, 515, 515, 516, 517, 518,
- 513, 519, 520, 521, 522, 523, 523, 26, 524, 524, 524, 524, 524, 524, 524, 524,
- 524, 524, 524, 525, 526, 26, 26, 26, 527, 527, 527, 527, 527, 527, 527, 527,
- 527, 26, 527, 528, 26, 26, 26, 26, 529, 529, 529, 529, 529, 529, 530, 529,
- 529, 529, 529, 530, 26, 26, 26, 26, 531, 531, 531, 531, 531, 531, 531, 531,
- 532, 26, 531, 533, 198, 534, 26, 26, 535, 535, 535, 535, 535, 535, 535, 536,
- 535, 536, 26, 26, 26, 26, 26, 26, 537, 537, 537, 538, 537, 539, 537, 537,
- 540, 26, 26, 26, 26, 26, 26, 26, 541, 541, 541, 541, 541, 541, 541, 542,
- 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 543, 543, 543, 543,
- 543, 543, 543, 543, 543, 543, 544, 545, 546, 547, 548, 549, 549, 549, 550, 551,
- 546, 26, 549, 552, 26, 26, 26, 26, 26, 26, 26, 26, 553, 554, 553, 553,
- 553, 553, 553, 554, 555, 26, 26, 26, 556, 556, 556, 556, 556, 556, 556, 556,
- 556, 26, 557, 557, 557, 557, 557, 557, 557, 557, 557, 557, 558, 26, 178, 178,
- 559, 559, 559, 559, 559, 559, 559, 560, 53, 561, 26, 26, 26, 26, 26, 26,
- 26, 26, 26, 26, 26, 26, 26, 26, 562, 563, 562, 562, 562, 562, 564, 562,
- 565, 26, 562, 562, 562, 566, 567, 567, 567, 567, 568, 567, 567, 569, 570, 26,
- 26, 26, 26, 26, 26, 26, 26, 26, 571, 572, 573, 573, 573, 573, 571, 574,
- 573, 26, 573, 575, 576, 577, 578, 578, 578, 579, 580, 581, 578, 582, 26, 26,
- 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
- 26, 26, 26, 26, 583, 583, 583, 584, 585, 585, 586, 585, 585, 585, 585, 587,
- 585, 585, 585, 588, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 589, 26,
- 108, 108, 108, 108, 108, 108, 590, 591, 592, 592, 592, 592, 592, 592, 592, 592,
- 592, 592, 592, 592, 592, 592, 592, 592, 592, 592, 592, 593, 26, 26, 26, 26,
- 26, 26, 26, 26, 26, 26, 26, 26, 592, 592, 592, 592, 592, 592, 592, 592,
- 592, 592, 592, 592, 592, 594, 595, 26, 592, 592, 592, 592, 592, 592, 592, 592,
- 596, 26, 26, 26, 26, 26, 26, 26, 26, 26, 597, 597, 597, 597, 597, 597,
- 597, 597, 597, 597, 597, 597, 598, 26, 599, 599, 599, 599, 599, 599, 599, 599,
- 599, 599, 599, 599, 599, 599, 599, 599, 599, 599, 599, 599, 599, 599, 599, 599,
- 599, 599, 600, 26, 26, 26, 26, 26, 601, 601, 601, 601, 601, 601, 601, 601,
- 601, 601, 601, 601, 601, 601, 601, 601, 601, 601, 601, 601, 601, 601, 601, 601,
- 602, 26, 26, 26, 26, 26, 26, 26, 305, 305, 305, 305, 305, 305, 305, 305,
- 305, 305, 305, 305, 305, 305, 305, 305, 305, 305, 305, 305, 305, 305, 305, 603,
- 604, 604, 604, 605, 604, 606, 607, 607, 607, 607, 607, 607, 607, 607, 607, 608,
- 607, 609, 610, 610, 610, 611, 611, 26, 612, 612, 612, 612, 612, 612, 612, 612,
- 613, 26, 612, 614, 614, 612, 612, 615, 612, 612, 26, 26, 26, 26, 26, 26,
- 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
- 616, 616, 616, 616, 616, 616, 616, 616, 616, 616, 616, 617, 26, 26, 26, 26,
- 26, 26, 26, 26, 26, 26, 26, 26, 618, 618, 618, 618, 618, 618, 618, 618,
- 618, 619, 618, 618, 618, 618, 618, 618, 618, 620, 618, 618, 26, 26, 26, 26,
- 26, 26, 26, 26, 621, 26, 347, 26, 622, 622, 622, 622, 622, 622, 622, 622,
- 622, 622, 622, 622, 622, 622, 622, 622, 622, 622, 622, 622, 622, 622, 622, 622,
- 622, 622, 622, 622, 622, 622, 622, 26, 623, 623, 623, 623, 623, 623, 623, 623,
- 623, 623, 623, 623, 623, 623, 623, 623, 623, 623, 623, 623, 623, 623, 623, 623,
- 623, 623, 624, 26, 26, 26, 26, 26, 622, 625, 26, 26, 26, 26, 26, 26,
- 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
- 26, 26, 26, 26, 26, 26, 626, 627, 628, 287, 287, 287, 287, 287, 287, 287,
- 287, 287, 287, 287, 287, 287, 287, 287, 287, 287, 287, 287, 287, 287, 287, 287,
- 287, 287, 287, 287, 287, 287, 287, 287, 287, 287, 287, 287, 629, 26, 630, 26,
- 26, 26, 631, 26, 632, 26, 633, 633, 633, 633, 633, 633, 633, 633, 633, 633,
- 633, 633, 633, 633, 633, 633, 633, 633, 633, 633, 633, 633, 633, 633, 633, 633,
- 633, 633, 633, 633, 633, 633, 633, 634, 635, 635, 635, 635, 635, 635, 635, 635,
- 635, 635, 635, 635, 635, 636, 635, 637, 635, 638, 635, 639, 281, 26, 26, 26,
- 26, 26, 26, 26, 26, 26, 26, 26, 9, 9, 9, 9, 9, 640, 9, 9,
- 221, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 281, 26, 26, 26, 26, 26, 26, 26, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 276, 26, 0, 0, 0, 0, 258, 363, 0, 0,
- 0, 0, 0, 0, 641, 642, 0, 643, 644, 645, 0, 0, 0, 646, 0, 0,
- 0, 0, 0, 0, 0, 266, 26, 26, 14, 14, 14, 14, 14, 14, 14, 14,
- 247, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
- 0, 0, 281, 26, 0, 0, 281, 26, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 258, 26, 0, 0, 0, 260, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 255, 0, 0, 0, 0, 0, 0, 0, 0, 255, 647, 648, 0, 649,
- 650, 0, 0, 0, 0, 0, 0, 0, 269, 651, 255, 255, 0, 0, 0, 652,
- 653, 654, 655, 0, 0, 0, 0, 0, 0, 0, 0, 0, 276, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 268, 0, 0, 0, 0, 0, 0, 656, 656, 656, 656, 656, 656, 656, 656,
- 656, 656, 656, 656, 656, 656, 656, 656, 656, 657, 26, 658, 659, 656, 26, 26,
- 26, 26, 26, 26, 26, 26, 26, 26, 2, 2, 2, 348, 660, 308, 26, 26,
- 26, 26, 26, 26, 26, 26, 26, 26, 661, 270, 270, 662, 663, 664, 18, 18,
- 18, 18, 18, 18, 18, 665, 26, 26, 26, 666, 26, 26, 26, 26, 26, 26,
- 26, 26, 26, 26, 26, 26, 26, 26, 667, 667, 667, 667, 667, 668, 667, 669,
- 667, 670, 26, 26, 26, 26, 26, 26, 26, 26, 671, 671, 671, 672, 26, 26,
- 673, 673, 673, 673, 673, 673, 673, 674, 26, 26, 26, 26, 26, 26, 26, 26,
- 26, 26, 675, 675, 675, 675, 675, 676, 26, 26, 26, 26, 26, 26, 26, 26,
- 26, 26, 26, 26, 172, 677, 170, 172, 678, 678, 678, 678, 678, 678, 678, 678,
- 678, 678, 678, 678, 678, 678, 678, 678, 678, 678, 678, 678, 678, 678, 678, 678,
- 679, 678, 680, 26, 26, 26, 26, 26, 681, 681, 681, 681, 681, 681, 681, 681,
- 681, 682, 681, 683, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
- 26, 26, 26, 26, 26, 26, 363, 0, 0, 0, 0, 0, 0, 0, 377, 26,
- 26, 26, 26, 26, 26, 26, 26, 26, 363, 0, 0, 0, 0, 0, 0, 276,
- 26, 26, 26, 26, 26, 26, 26, 26, 684, 31, 31, 31, 685, 686, 687, 688,
- 689, 690, 685, 691, 685, 687, 687, 692, 31, 693, 31, 694, 695, 693, 31, 694,
- 26, 26, 26, 26, 26, 26, 51, 26, 0, 0, 0, 0, 0, 281, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 281, 26, 0, 258, 363, 0,
- 363, 0, 363, 0, 0, 0, 276, 26, 0, 0, 0, 0, 0, 276, 26, 26,
- 26, 26, 26, 26, 696, 0, 0, 0, 697, 26, 0, 0, 0, 0, 0, 281,
- 0, 260, 314, 26, 276, 26, 26, 26, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 698, 0, 377, 0, 377, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 258, 699, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 314, 0, 281, 260, 26, 0, 281, 0, 0, 0, 0, 0, 0,
- 0, 26, 0, 314, 0, 0, 0, 0, 0, 26, 0, 0, 0, 276, 314, 26,
- 26, 26, 26, 26, 26, 26, 26, 26, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 281, 26, 0, 276, 0, 377, 0, 260, 0, 0, 0, 0, 0, 269,
- 276, 696, 0, 281, 0, 260, 0, 260, 0, 0, 360, 0, 0, 0, 0, 0,
- 0, 266, 26, 26, 26, 26, 0, 314, 277, 277, 277, 277, 277, 277, 277, 277,
- 277, 277, 277, 277, 26, 26, 26, 26, 277, 277, 277, 277, 277, 277, 277, 347,
- 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 280, 277, 277, 277, 277,
- 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 347, 26, 277, 277,
- 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277,
- 277, 277, 277, 277, 700, 26, 26, 26, 277, 277, 277, 280, 26, 26, 26, 26,
- 26, 26, 26, 26, 26, 26, 26, 26, 277, 277, 277, 277, 277, 277, 277, 277,
- 277, 701, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 26, 26,
- 26, 26, 26, 26, 26, 26, 26, 26, 702, 26, 26, 26, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 9, 9, 9, 9, 9, 9, 9, 9,
- 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
- 9, 9, 9, 9, 9, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 391, 391, 391, 391, 391, 391, 391, 685, 0, 0, 0, 0, 1, 2, 1, 2,
+ 0, 0, 3, 3, 4, 5, 4, 5, 4, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 6, 0, 0, 7, 0, 8, 8, 8, 8, 8, 8, 8, 9,
+ 10, 11, 12, 11, 11, 11, 13, 11, 14, 14, 14, 14, 14, 14, 14, 14,
+ 15, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 16, 17, 18, 17, 17,
+ 19, 20, 21, 21, 22, 21, 23, 24, 25, 26, 27, 27, 28, 29, 27, 30,
+ 27, 27, 27, 27, 27, 31, 27, 27, 32, 33, 33, 33, 34, 27, 27, 27,
+ 35, 35, 35, 36, 37, 37, 37, 38, 39, 39, 40, 41, 42, 43, 44, 27,
+ 45, 46, 27, 27, 27, 27, 47, 27, 48, 48, 48, 48, 48, 49, 50, 48,
+ 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66,
+ 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82,
+ 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98,
+ 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 109, 110, 111, 112, 109,
+ 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 122, 123, 122, 124, 125, 125,
+ 126, 127, 128, 129, 130, 131, 125, 125, 132, 132, 132, 132, 133, 132, 134, 135,
+ 132, 133, 132, 136, 136, 137, 125, 125, 138, 138, 138, 138, 138, 138, 138, 138,
+ 138, 138, 139, 139, 140, 139, 139, 141, 142, 142, 142, 142, 142, 142, 142, 142,
+ 143, 143, 143, 143, 144, 145, 143, 143, 144, 143, 143, 146, 147, 148, 143, 143,
+ 143, 147, 143, 143, 143, 149, 143, 150, 143, 151, 152, 152, 152, 152, 152, 153,
+ 154, 154, 154, 154, 154, 154, 154, 154, 155, 156, 157, 157, 157, 157, 158, 159,
+ 160, 161, 162, 163, 164, 165, 166, 167, 168, 168, 168, 168, 168, 169, 170, 170,
+ 171, 172, 173, 173, 173, 173, 173, 174, 173, 173, 175, 154, 154, 154, 154, 176,
+ 177, 178, 179, 179, 180, 181, 182, 183, 184, 184, 185, 184, 186, 187, 168, 168,
+ 188, 189, 190, 190, 190, 191, 190, 192, 193, 193, 194, 8, 195, 125, 125, 125,
+ 196, 196, 196, 196, 197, 196, 196, 198, 199, 199, 199, 199, 200, 200, 200, 201,
+ 202, 202, 202, 203, 204, 205, 205, 205, 206, 139, 139, 207, 208, 209, 210, 211,
+ 4, 4, 212, 4, 4, 213, 214, 215, 4, 4, 4, 216, 8, 8, 8, 8,
+ 11, 217, 11, 11, 217, 218, 11, 219, 11, 11, 11, 220, 220, 221, 11, 222,
+ 223, 0, 0, 0, 0, 0, 224, 225, 226, 227, 0, 0, 228, 8, 8, 229,
+ 0, 0, 230, 231, 232, 0, 4, 4, 233, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 234, 125, 235, 125, 0, 0,
+ 236, 236, 236, 236, 236, 236, 236, 236, 0, 0, 0, 0, 0, 0, 0, 237,
+ 0, 238, 0, 0, 0, 0, 0, 0, 239, 239, 239, 239, 239, 239, 4, 4,
+ 240, 240, 240, 240, 240, 240, 240, 241, 139, 139, 140, 242, 242, 242, 243, 244,
+ 143, 245, 246, 246, 246, 246, 14, 14, 0, 0, 0, 0, 0, 247, 125, 125,
+ 248, 249, 248, 248, 248, 248, 248, 250, 248, 248, 248, 248, 248, 248, 248, 248,
+ 248, 248, 248, 248, 248, 251, 125, 252, 253, 0, 254, 255, 256, 257, 257, 257,
+ 257, 258, 259, 260, 260, 260, 260, 261, 262, 263, 263, 264, 142, 142, 142, 142,
+ 265, 0, 263, 263, 0, 0, 266, 260, 142, 265, 0, 0, 0, 0, 142, 267,
+ 0, 0, 0, 0, 0, 260, 260, 268, 260, 260, 260, 260, 260, 269, 0, 0,
+ 248, 248, 248, 248, 0, 0, 0, 0, 270, 270, 270, 270, 270, 270, 270, 270,
+ 271, 270, 270, 270, 272, 273, 273, 273, 274, 274, 274, 274, 274, 274, 274, 274,
+ 274, 274, 275, 125, 14, 14, 14, 14, 14, 14, 276, 276, 276, 276, 276, 277,
+ 0, 0, 278, 4, 4, 4, 4, 4, 279, 4, 4, 4, 280, 281, 125, 282,
+ 283, 283, 284, 285, 286, 286, 286, 287, 288, 288, 288, 288, 289, 290, 48, 48,
+ 291, 291, 292, 293, 293, 294, 142, 295, 296, 296, 296, 296, 297, 298, 138, 299,
+ 300, 300, 300, 301, 302, 303, 138, 138, 304, 304, 304, 304, 305, 306, 307, 308,
+ 309, 310, 246, 4, 4, 311, 312, 152, 152, 152, 152, 152, 307, 307, 313, 314,
+ 142, 142, 315, 142, 316, 142, 142, 317, 125, 125, 125, 125, 125, 125, 125, 125,
+ 248, 248, 248, 248, 248, 248, 318, 248, 248, 248, 248, 248, 248, 319, 125, 125,
+ 320, 321, 21, 322, 323, 27, 27, 27, 27, 27, 27, 27, 324, 325, 27, 27,
+ 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 326, 27, 27, 27, 27,
+ 27, 327, 27, 27, 328, 125, 125, 27, 8, 285, 329, 0, 0, 330, 331, 332,
+ 27, 27, 27, 27, 27, 27, 27, 333, 334, 0, 1, 2, 1, 2, 335, 259,
+ 260, 336, 142, 265, 337, 338, 339, 340, 341, 342, 343, 344, 345, 345, 125, 125,
+ 342, 342, 342, 342, 342, 342, 342, 346, 347, 0, 0, 348, 11, 11, 11, 11,
+ 349, 350, 351, 125, 125, 0, 0, 352, 353, 354, 355, 355, 355, 356, 357, 252,
+ 358, 358, 359, 360, 361, 362, 362, 363, 364, 365, 366, 366, 367, 368, 125, 125,
+ 369, 369, 369, 369, 369, 370, 370, 370, 371, 372, 373, 374, 374, 375, 374, 376,
+ 377, 377, 378, 379, 379, 379, 380, 381, 381, 382, 383, 384, 125, 125, 125, 125,
+ 385, 385, 385, 385, 385, 385, 385, 385, 385, 385, 385, 386, 385, 387, 388, 125,
+ 389, 4, 4, 390, 125, 125, 125, 125, 391, 392, 392, 393, 394, 395, 396, 396,
+ 397, 398, 399, 125, 125, 125, 400, 401, 402, 403, 404, 405, 125, 125, 125, 125,
+ 406, 406, 407, 408, 407, 409, 407, 407, 410, 411, 412, 413, 414, 414, 415, 415,
+ 416, 416, 125, 125, 417, 417, 418, 419, 420, 420, 420, 421, 422, 423, 424, 425,
+ 426, 427, 428, 125, 125, 125, 125, 125, 429, 429, 429, 429, 430, 125, 125, 125,
+ 431, 431, 431, 432, 431, 431, 431, 433, 434, 434, 435, 436, 125, 125, 125, 125,
+ 125, 125, 125, 125, 125, 125, 27, 45, 437, 437, 438, 439, 125, 125, 125, 440,
+ 441, 441, 442, 443, 443, 444, 125, 445, 446, 125, 125, 447, 448, 125, 449, 450,
+ 451, 451, 451, 451, 452, 453, 451, 454, 455, 455, 455, 455, 456, 457, 458, 459,
+ 460, 460, 460, 461, 462, 463, 463, 464, 465, 465, 465, 465, 465, 465, 466, 467,
+ 468, 469, 468, 468, 470, 125, 125, 125, 471, 472, 473, 474, 474, 474, 475, 476,
+ 477, 478, 479, 480, 481, 482, 483, 484, 485, 485, 485, 485, 485, 486, 487, 125,
+ 488, 488, 488, 488, 489, 490, 125, 125, 491, 491, 491, 492, 491, 493, 125, 125,
+ 494, 494, 494, 494, 495, 496, 497, 125, 498, 498, 498, 499, 499, 125, 125, 125,
+ 500, 501, 502, 500, 503, 125, 125, 125, 504, 504, 504, 505, 125, 125, 125, 125,
+ 125, 125, 506, 506, 506, 506, 506, 507, 508, 509, 510, 511, 512, 513, 125, 125,
+ 125, 125, 514, 515, 515, 514, 516, 125, 517, 517, 517, 517, 518, 519, 519, 519,
+ 519, 519, 520, 154, 521, 521, 521, 522, 523, 125, 125, 125, 125, 125, 125, 125,
+ 524, 525, 525, 526, 527, 525, 528, 529, 529, 530, 531, 532, 125, 125, 125, 125,
+ 533, 534, 534, 535, 536, 537, 538, 539, 540, 541, 542, 125, 125, 125, 125, 125,
+ 125, 125, 125, 125, 125, 125, 543, 544, 545, 546, 545, 547, 545, 548, 125, 125,
+ 125, 125, 125, 549, 550, 550, 550, 551, 552, 552, 552, 552, 552, 552, 552, 552,
+ 552, 553, 125, 125, 125, 125, 125, 125, 552, 552, 552, 552, 552, 552, 554, 555,
+ 552, 552, 552, 552, 556, 125, 125, 125, 125, 557, 557, 557, 557, 557, 557, 558,
+ 559, 559, 559, 559, 559, 559, 559, 559, 559, 559, 559, 559, 559, 560, 125, 125,
+ 561, 561, 561, 561, 561, 561, 561, 561, 561, 561, 561, 561, 562, 125, 125, 125,
+ 276, 276, 276, 276, 276, 276, 276, 276, 276, 276, 276, 563, 564, 565, 566, 567,
+ 567, 567, 567, 568, 569, 570, 571, 572, 573, 573, 573, 573, 574, 575, 576, 577,
+ 573, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 578, 578, 578, 578,
+ 578, 579, 125, 125, 125, 125, 125, 125, 580, 580, 580, 580, 581, 580, 580, 580,
+ 582, 580, 125, 125, 125, 125, 583, 584, 585, 585, 585, 585, 585, 585, 585, 585,
+ 585, 585, 585, 585, 585, 585, 585, 586, 587, 587, 587, 587, 587, 587, 587, 587,
+ 587, 587, 587, 587, 587, 588, 125, 125, 589, 125, 125, 125, 125, 125, 125, 125,
+ 125, 125, 125, 125, 125, 125, 125, 590, 591, 257, 257, 257, 257, 257, 257, 257,
+ 257, 257, 257, 257, 257, 257, 257, 257, 257, 257, 592, 593, 125, 594, 595, 596,
+ 596, 596, 596, 596, 596, 596, 596, 596, 596, 596, 596, 596, 596, 596, 596, 597,
+ 598, 598, 598, 598, 598, 598, 599, 600, 601, 602, 266, 125, 125, 125, 125, 125,
+ 8, 8, 603, 8, 604, 0, 0, 0, 0, 0, 0, 0, 266, 125, 125, 125,
+ 0, 0, 0, 0, 0, 0, 0, 605, 0, 0, 606, 0, 0, 0, 607, 608,
+ 609, 0, 610, 0, 0, 0, 235, 125, 11, 11, 11, 11, 611, 125, 125, 125,
+ 125, 125, 125, 125, 0, 266, 0, 266, 0, 0, 0, 0, 0, 234, 0, 612,
+ 0, 0, 0, 0, 0, 224, 0, 0, 0, 613, 614, 615, 616, 0, 0, 0,
+ 617, 618, 0, 619, 620, 621, 0, 0, 0, 0, 622, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 623, 0, 0, 0, 624, 624, 624, 624, 624, 624, 624, 624,
+ 625, 626, 627, 125, 125, 125, 125, 125, 4, 628, 629, 125, 125, 125, 125, 125,
+ 630, 631, 632, 14, 14, 14, 633, 125, 634, 125, 125, 125, 125, 125, 125, 125,
+ 635, 635, 636, 637, 638, 125, 125, 125, 125, 639, 640, 125, 641, 641, 641, 642,
+ 125, 125, 125, 125, 125, 643, 643, 644, 125, 125, 125, 125, 125, 125, 645, 646,
+ 647, 647, 647, 647, 647, 647, 647, 647, 647, 647, 647, 647, 648, 649, 125, 125,
+ 650, 650, 650, 650, 651, 652, 125, 125, 125, 125, 125, 125, 125, 125, 125, 334,
+ 0, 0, 0, 653, 125, 125, 125, 125, 334, 0, 0, 247, 125, 125, 125, 125,
+ 654, 27, 655, 656, 657, 658, 659, 660, 661, 662, 663, 662, 125, 125, 125, 664,
+ 0, 0, 252, 0, 0, 0, 0, 0, 0, 266, 226, 334, 334, 334, 0, 605,
+ 0, 0, 247, 125, 125, 125, 665, 0, 666, 0, 0, 252, 612, 667, 605, 125,
+ 0, 0, 0, 0, 0, 668, 350, 350, 0, 0, 0, 0, 0, 0, 0, 669,
+ 0, 0, 0, 0, 0, 285, 252, 228, 252, 0, 0, 0, 670, 285, 0, 0,
+ 670, 0, 247, 667, 125, 125, 125, 125, 0, 0, 0, 0, 0, 266, 247, 350,
+ 612, 0, 0, 671, 672, 252, 612, 612, 0, 330, 0, 0, 235, 125, 125, 285,
+ 248, 248, 248, 248, 248, 248, 125, 125, 248, 248, 248, 319, 248, 248, 248, 248,
+ 248, 318, 248, 248, 248, 248, 248, 248, 248, 248, 584, 248, 248, 248, 248, 248,
+ 248, 248, 248, 248, 248, 248, 673, 125, 248, 318, 125, 125, 125, 125, 125, 125,
+ 248, 248, 248, 248, 674, 248, 248, 248, 248, 248, 248, 125, 125, 125, 125, 125,
+ 675, 125, 0, 0, 0, 0, 0, 0, 8, 8, 8, 8, 8, 8, 8, 8,
+ 8, 8, 8, 8, 8, 8, 8, 0, 0, 0, 0, 0, 1, 2, 2, 2,
+ 2, 2, 3, 0, 0, 0, 4, 0, 2, 2, 2, 2, 2, 3, 2, 2,
+ 2, 2, 5, 0, 2, 5, 6, 0, 7, 7, 7, 7, 8, 9, 10, 11,
+ 12, 13, 14, 15, 8, 8, 8, 8, 16, 8, 8, 8, 17, 18, 18, 18,
+ 19, 19, 19, 19, 19, 20, 19, 19, 21, 22, 22, 22, 22, 22, 22, 22,
+ 22, 23, 21, 22, 22, 22, 23, 21, 24, 25, 25, 25, 25, 25, 25, 25,
+ 25, 25, 12, 12, 25, 25, 26, 27, 25, 28, 12, 12, 29, 30, 29, 31,
+ 29, 29, 32, 32, 29, 29, 29, 29, 31, 29, 33, 7, 7, 34, 29, 29,
+ 35, 29, 29, 29, 29, 29, 29, 30, 36, 36, 36, 37, 36, 36, 36, 36,
+ 36, 36, 38, 39, 40, 40, 40, 40, 41, 12, 12, 12, 42, 42, 42, 42,
+ 42, 42, 43, 44, 45, 45, 45, 45, 45, 45, 45, 46, 45, 45, 45, 47,
+ 48, 48, 48, 48, 48, 48, 48, 49, 36, 36, 38, 12, 29, 29, 29, 50,
+ 51, 12, 29, 29, 52, 29, 29, 29, 53, 53, 53, 53, 54, 55, 53, 53,
+ 53, 56, 53, 53, 57, 58, 57, 59, 59, 57, 57, 57, 57, 57, 60, 57,
+ 61, 62, 63, 57, 57, 59, 59, 64, 12, 65, 12, 66, 57, 62, 57, 57,
+ 57, 57, 57, 64, 67, 67, 68, 69, 70, 71, 71, 71, 71, 71, 72, 71,
+ 72, 73, 74, 72, 68, 69, 70, 74, 75, 12, 67, 76, 12, 77, 71, 71,
+ 71, 68, 12, 12, 78, 78, 79, 80, 80, 79, 79, 79, 79, 79, 81, 79,
+ 81, 78, 82, 79, 79, 80, 80, 82, 83, 12, 12, 12, 79, 84, 79, 79,
+ 82, 12, 78, 79, 85, 85, 86, 87, 87, 86, 86, 86, 86, 86, 88, 86,
+ 88, 85, 89, 86, 86, 87, 87, 89, 12, 85, 12, 90, 86, 91, 86, 86,
+ 86, 86, 12, 12, 92, 93, 94, 92, 95, 96, 97, 95, 98, 99, 94, 92,
+ 100, 100, 96, 92, 94, 92, 95, 96, 99, 98, 12, 12, 12, 92, 100, 100,
+ 100, 100, 94, 12, 101, 101, 101, 102, 102, 101, 101, 101, 101, 101, 102, 101,
+ 101, 101, 103, 101, 101, 102, 102, 103, 12, 104, 105, 106, 101, 107, 101, 101,
+ 12, 108, 101, 101, 109, 109, 109, 110, 110, 109, 109, 109, 109, 109, 110, 109,
+ 109, 111, 112, 109, 109, 110, 110, 112, 12, 113, 12, 113, 109, 114, 109, 109,
+ 111, 12, 12, 12, 115, 115, 115, 116, 116, 115, 115, 115, 115, 115, 115, 115,
+ 115, 116, 116, 115, 12, 115, 115, 115, 115, 117, 115, 115, 118, 118, 119, 119,
+ 119, 120, 121, 119, 119, 119, 119, 119, 122, 119, 119, 123, 119, 120, 124, 125,
+ 119, 126, 119, 119, 12, 121, 119, 119, 121, 127, 12, 12, 128, 129, 129, 129,
+ 129, 129, 129, 129, 129, 129, 130, 131, 129, 129, 129, 12, 12, 12, 12, 12,
+ 132, 133, 134, 135, 135, 135, 135, 135, 135, 136, 135, 135, 135, 135, 135, 137,
+ 135, 138, 135, 134, 135, 135, 137, 135, 139, 139, 139, 139, 139, 139, 140, 139,
+ 139, 139, 139, 141, 140, 139, 139, 139, 139, 139, 139, 142, 139, 143, 144, 12,
+ 145, 145, 145, 145, 146, 146, 146, 146, 146, 147, 12, 148, 146, 146, 149, 146,
+ 150, 150, 150, 150, 151, 151, 151, 151, 151, 151, 152, 153, 151, 154, 152, 153,
+ 152, 153, 151, 154, 152, 153, 151, 151, 151, 154, 151, 151, 151, 151, 154, 155,
+ 151, 151, 151, 156, 151, 151, 153, 12, 157, 157, 157, 157, 157, 158, 157, 158,
+ 159, 159, 159, 159, 160, 160, 160, 160, 160, 160, 160, 161, 162, 162, 162, 162,
+ 162, 162, 163, 164, 162, 162, 165, 12, 166, 166, 166, 166, 166, 167, 12, 168,
+ 169, 169, 169, 169, 169, 170, 12, 12, 171, 171, 171, 171, 171, 12, 12, 12,
+ 172, 172, 172, 173, 173, 12, 12, 12, 174, 174, 174, 174, 174, 174, 174, 175,
+ 174, 174, 175, 12, 176, 177, 178, 178, 178, 178, 179, 12, 178, 178, 178, 178,
+ 178, 178, 180, 12, 178, 178, 181, 12, 159, 182, 12, 12, 183, 183, 183, 183,
+ 183, 183, 183, 184, 183, 183, 183, 12, 185, 183, 183, 183, 186, 186, 186, 186,
+ 186, 186, 186, 187, 186, 188, 12, 12, 189, 189, 189, 189, 189, 189, 189, 12,
+ 189, 189, 190, 12, 189, 189, 191, 192, 193, 193, 193, 193, 193, 193, 193, 194,
+ 195, 195, 195, 195, 195, 195, 195, 196, 195, 195, 195, 197, 195, 195, 198, 12,
+ 195, 195, 195, 198, 7, 7, 7, 199, 200, 200, 200, 200, 200, 200, 200, 201,
+ 200, 200, 200, 202, 203, 203, 203, 203, 204, 204, 204, 204, 204, 12, 12, 204,
+ 205, 205, 205, 205, 205, 205, 206, 205, 205, 205, 207, 208, 209, 209, 209, 209,
+ 19, 19, 210, 12, 146, 146, 211, 212, 203, 203, 12, 12, 213, 7, 7, 7,
+ 214, 7, 215, 216, 0, 215, 217, 12, 2, 218, 219, 2, 2, 2, 2, 220,
+ 221, 218, 222, 2, 2, 2, 223, 2, 2, 2, 2, 224, 8, 225, 8, 225,
+ 8, 8, 226, 226, 8, 8, 8, 225, 8, 15, 8, 8, 8, 10, 8, 227,
+ 10, 15, 8, 14, 0, 0, 0, 228, 0, 229, 0, 0, 230, 0, 0, 231,
+ 0, 0, 0, 232, 2, 2, 2, 233, 234, 12, 12, 12, 235, 12, 12, 12,
+ 0, 236, 237, 0, 4, 0, 0, 0, 0, 0, 0, 4, 2, 2, 5, 12,
+ 0, 232, 12, 12, 0, 0, 232, 12, 238, 238, 238, 238, 0, 239, 0, 0,
+ 0, 240, 0, 0, 241, 241, 241, 241, 18, 18, 18, 18, 18, 12, 242, 18,
+ 243, 243, 243, 243, 243, 243, 12, 244, 245, 12, 12, 244, 151, 154, 12, 12,
+ 151, 154, 151, 154, 0, 0, 0, 246, 247, 247, 247, 247, 247, 247, 248, 247,
+ 247, 12, 12, 12, 247, 249, 12, 12, 0, 0, 0, 12, 0, 250, 0, 0,
+ 251, 247, 252, 253, 0, 0, 247, 0, 254, 255, 255, 255, 255, 255, 255, 255,
+ 255, 256, 257, 258, 259, 260, 260, 260, 260, 260, 260, 260, 260, 260, 261, 259,
+ 12, 262, 263, 263, 263, 263, 263, 263, 264, 150, 150, 150, 150, 150, 150, 265,
+ 0, 12, 12, 12, 150, 150, 150, 266, 260, 260, 260, 261, 260, 260, 0, 0,
+ 267, 267, 267, 267, 267, 267, 267, 268, 267, 269, 12, 12, 270, 270, 270, 270,
+ 271, 271, 271, 271, 271, 271, 271, 12, 272, 272, 272, 272, 272, 272, 12, 12,
+ 237, 2, 2, 2, 2, 2, 231, 2, 2, 2, 273, 12, 274, 275, 276, 12,
+ 277, 2, 2, 2, 278, 278, 278, 278, 278, 278, 278, 279, 0, 0, 246, 12,
+ 280, 280, 280, 280, 280, 280, 12, 12, 281, 281, 281, 281, 281, 282, 12, 283,
+ 281, 281, 282, 12, 284, 284, 284, 284, 284, 284, 284, 285, 286, 286, 286, 286,
+ 286, 12, 12, 287, 150, 150, 150, 288, 289, 289, 289, 289, 289, 289, 289, 290,
+ 289, 289, 291, 292, 145, 145, 145, 293, 294, 294, 294, 294, 294, 295, 12, 12,
+ 294, 294, 294, 296, 294, 294, 296, 294, 297, 297, 297, 297, 298, 12, 12, 12,
+ 12, 12, 299, 297, 300, 300, 300, 300, 300, 301, 12, 12, 155, 154, 155, 154,
+ 155, 154, 12, 12, 2, 2, 3, 2, 2, 302, 303, 12, 300, 300, 300, 304,
+ 300, 300, 304, 12, 150, 12, 12, 12, 150, 265, 305, 150, 150, 150, 150, 12,
+ 247, 247, 247, 249, 247, 247, 249, 12, 2, 273, 12, 12, 306, 22, 12, 24,
+ 25, 26, 25, 307, 308, 309, 25, 25, 50, 12, 12, 12, 310, 29, 29, 29,
+ 29, 29, 29, 311, 312, 29, 29, 29, 29, 29, 12, 310, 7, 7, 7, 313,
+ 232, 0, 0, 0, 0, 232, 0, 12, 29, 314, 29, 29, 29, 29, 29, 315,
+ 316, 0, 0, 0, 0, 317, 260, 260, 260, 260, 260, 318, 319, 150, 319, 150,
+ 319, 150, 319, 288, 0, 232, 0, 232, 12, 12, 316, 246, 320, 320, 320, 321,
+ 320, 320, 320, 320, 320, 322, 320, 320, 320, 320, 322, 323, 320, 320, 320, 324,
+ 320, 320, 322, 12, 232, 131, 0, 0, 0, 131, 0, 0, 8, 8, 8, 14,
+ 0, 0, 0, 234, 325, 12, 12, 12, 0, 0, 0, 326, 327, 327, 327, 327,
+ 327, 327, 327, 328, 329, 329, 329, 329, 330, 12, 12, 12, 215, 0, 0, 0,
+ 331, 331, 331, 331, 331, 12, 12, 332, 333, 333, 333, 333, 333, 333, 334, 12,
+ 335, 335, 335, 335, 335, 335, 336, 12, 337, 337, 337, 337, 337, 337, 337, 338,
+ 339, 339, 339, 339, 339, 12, 339, 339, 339, 340, 12, 12, 341, 341, 341, 341,
+ 342, 342, 342, 342, 343, 343, 343, 343, 343, 343, 343, 344, 343, 343, 344, 12,
+ 345, 345, 345, 345, 345, 12, 345, 345, 345, 345, 345, 12, 346, 346, 346, 346,
+ 346, 346, 12, 12, 347, 347, 347, 347, 347, 12, 12, 348, 349, 349, 350, 349,
+ 350, 351, 349, 349, 351, 349, 349, 349, 351, 349, 351, 352, 353, 353, 353, 353,
+ 353, 354, 12, 12, 353, 355, 12, 12, 353, 353, 12, 12, 2, 274, 2, 2,
+ 356, 2, 273, 12, 357, 358, 359, 357, 357, 357, 357, 357, 357, 360, 361, 362,
+ 363, 363, 363, 363, 363, 364, 363, 363, 365, 365, 365, 365, 366, 366, 366, 366,
+ 366, 366, 366, 367, 12, 368, 366, 366, 369, 369, 369, 369, 370, 371, 372, 369,
+ 373, 373, 373, 373, 373, 373, 373, 374, 375, 375, 375, 375, 375, 375, 376, 377,
+ 378, 378, 378, 378, 379, 379, 379, 379, 379, 379, 12, 379, 380, 379, 379, 379,
+ 381, 382, 12, 381, 381, 383, 383, 381, 381, 381, 381, 381, 381, 384, 385, 386,
+ 381, 381, 387, 12, 388, 388, 388, 388, 389, 389, 389, 389, 390, 390, 390, 390,
+ 390, 391, 392, 390, 390, 391, 12, 12, 393, 393, 393, 393, 393, 394, 395, 393,
+ 396, 396, 396, 396, 396, 397, 396, 396, 398, 398, 398, 398, 399, 12, 398, 398,
+ 400, 400, 400, 400, 401, 12, 402, 403, 12, 12, 402, 400, 404, 404, 404, 404,
+ 404, 404, 405, 12, 406, 406, 406, 406, 407, 12, 12, 12, 407, 12, 408, 406,
+ 409, 409, 409, 409, 409, 409, 12, 12, 409, 409, 410, 12, 411, 411, 411, 411,
+ 411, 411, 412, 413, 413, 12, 12, 12, 12, 12, 12, 414, 415, 415, 415, 415,
+ 415, 415, 12, 12, 416, 416, 416, 416, 416, 416, 417, 12, 418, 418, 418, 418,
+ 418, 418, 419, 12, 420, 420, 420, 420, 420, 420, 420, 12, 421, 421, 421, 421,
+ 421, 422, 12, 12, 423, 423, 423, 423, 423, 423, 423, 424, 425, 423, 423, 423,
+ 423, 424, 12, 426, 427, 427, 427, 427, 428, 12, 12, 429, 430, 430, 430, 430,
+ 430, 430, 431, 12, 430, 430, 432, 12, 433, 433, 433, 433, 433, 434, 433, 433,
+ 433, 433, 12, 12, 435, 435, 435, 435, 435, 436, 12, 12, 437, 437, 437, 437,
+ 118, 119, 119, 119, 119, 127, 12, 12, 438, 438, 438, 438, 439, 438, 438, 438,
+ 440, 12, 12, 12, 441, 442, 443, 444, 441, 441, 441, 444, 441, 441, 445, 12,
+ 446, 446, 446, 446, 446, 446, 447, 12, 446, 446, 448, 12, 449, 450, 449, 451,
+ 451, 449, 449, 449, 449, 449, 452, 449, 452, 450, 453, 449, 449, 451, 451, 454,
+ 455, 456, 12, 450, 449, 457, 449, 455, 449, 455, 12, 12, 458, 458, 458, 458,
+ 458, 458, 458, 459, 460, 12, 12, 12, 461, 461, 461, 461, 461, 461, 12, 12,
+ 461, 461, 462, 12, 463, 463, 463, 463, 463, 464, 463, 463, 463, 463, 463, 464,
+ 465, 465, 465, 465, 465, 466, 12, 12, 465, 465, 467, 12, 178, 178, 178, 180,
+ 468, 468, 468, 468, 468, 468, 469, 12, 470, 470, 470, 470, 470, 470, 471, 472,
+ 470, 470, 470, 12, 470, 471, 12, 12, 473, 473, 473, 473, 473, 473, 473, 12,
+ 474, 474, 474, 474, 475, 12, 12, 476, 477, 478, 479, 477, 477, 480, 477, 477,
+ 477, 477, 477, 477, 477, 481, 482, 477, 477, 478, 12, 12, 477, 477, 483, 12,
+ 484, 484, 485, 484, 484, 484, 484, 484, 484, 486, 12, 12, 487, 487, 487, 487,
+ 487, 487, 12, 12, 488, 488, 488, 488, 489, 12, 12, 12, 490, 490, 490, 490,
+ 490, 490, 491, 12, 53, 53, 492, 12, 493, 493, 494, 493, 493, 493, 493, 493,
+ 493, 495, 493, 493, 493, 496, 12, 12, 493, 493, 493, 497, 498, 498, 498, 498,
+ 499, 498, 498, 498, 498, 498, 500, 498, 498, 501, 12, 12, 502, 503, 504, 502,
+ 502, 502, 502, 502, 502, 503, 505, 504, 502, 502, 12, 12, 502, 502, 506, 12,
+ 507, 508, 509, 507, 507, 507, 507, 507, 507, 507, 507, 510, 508, 507, 511, 12,
+ 507, 507, 512, 12, 513, 513, 513, 513, 513, 513, 514, 12, 515, 515, 515, 515,
+ 516, 515, 515, 515, 515, 515, 517, 518, 515, 515, 519, 12, 520, 12, 12, 12,
+ 100, 100, 100, 100, 96, 12, 12, 98, 521, 521, 521, 521, 521, 521, 522, 12,
+ 521, 521, 521, 523, 521, 524, 12, 12, 521, 12, 12, 12, 525, 525, 525, 525,
+ 526, 12, 12, 12, 527, 527, 527, 527, 527, 528, 12, 12, 529, 529, 529, 529,
+ 529, 530, 12, 12, 272, 272, 531, 12, 532, 532, 532, 532, 532, 532, 532, 533,
+ 532, 532, 534, 535, 536, 536, 536, 536, 536, 536, 536, 537, 536, 536, 538, 12,
+ 539, 539, 539, 539, 539, 539, 539, 540, 539, 540, 12, 12, 541, 541, 541, 541,
+ 541, 542, 12, 12, 541, 541, 543, 541, 543, 541, 541, 541, 541, 541, 12, 544,
+ 545, 545, 545, 545, 545, 545, 546, 12, 547, 547, 547, 547, 547, 547, 548, 549,
+ 547, 547, 12, 549, 550, 551, 12, 12, 249, 12, 12, 12, 552, 552, 552, 552,
+ 552, 552, 12, 12, 553, 553, 553, 553, 553, 554, 12, 12, 552, 552, 555, 12,
+ 260, 556, 260, 557, 558, 255, 255, 255, 559, 12, 12, 12, 560, 12, 12, 12,
+ 256, 561, 12, 12, 12, 260, 12, 12, 562, 562, 562, 562, 562, 562, 562, 12,
+ 563, 563, 563, 563, 563, 563, 564, 12, 563, 563, 563, 565, 563, 563, 565, 12,
+ 563, 563, 566, 563, 7, 7, 7, 567, 7, 199, 12, 12, 0, 246, 12, 12,
+ 0, 232, 316, 0, 0, 568, 228, 0, 0, 0, 568, 7, 213, 569, 7, 0,
+ 0, 0, 570, 228, 8, 225, 12, 12, 0, 0, 234, 12, 0, 0, 0, 229,
+ 571, 572, 316, 229, 0, 0, 240, 316, 0, 316, 0, 0, 0, 240, 232, 316,
+ 0, 229, 0, 229, 0, 0, 240, 232, 0, 573, 239, 0, 229, 0, 0, 0,
+ 0, 246, 0, 0, 0, 0, 0, 239, 574, 574, 574, 574, 574, 574, 574, 12,
+ 12, 12, 575, 574, 576, 574, 574, 574, 2, 2, 2, 273, 12, 275, 273, 12,
+ 241, 577, 241, 241, 241, 241, 578, 241, 579, 580, 577, 12, 19, 19, 19, 581,
+ 12, 12, 12, 582, 583, 583, 583, 583, 583, 583, 583, 584, 583, 583, 583, 585,
+ 583, 583, 585, 586, 587, 587, 587, 587, 587, 587, 587, 588, 589, 589, 589, 589,
+ 589, 589, 590, 591, 592, 592, 592, 592, 592, 592, 593, 12, 151, 154, 151, 594,
+ 151, 151, 151, 154, 595, 595, 595, 595, 595, 596, 595, 595, 595, 597, 12, 12,
+ 598, 598, 598, 598, 598, 598, 598, 12, 598, 598, 599, 600, 0, 234, 12, 12,
+ 29, 414, 29, 29, 601, 602, 414, 29, 50, 29, 603, 12, 604, 310, 603, 414,
+ 601, 602, 603, 603, 601, 602, 50, 29, 50, 29, 414, 605, 29, 29, 606, 29,
+ 29, 29, 29, 12, 414, 414, 606, 29, 51, 12, 12, 12, 12, 239, 0, 0,
+ 607, 12, 12, 12, 246, 12, 12, 12, 0, 0, 12, 0, 0, 232, 131, 0,
+ 0, 0, 12, 12, 0, 0, 0, 240, 0, 246, 12, 239, 608, 12, 12, 12,
+ 247, 247, 609, 12, 610, 12, 12, 12, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 939, 940, 941, 942, 946, 948, 0, 962,
969, 970, 971, 976,1001,1002,1003,1008, 0,1033,1040,1041,1042,1043,1047, 0,
0,1080,1081,1082,1086,1110, 0, 0,1124,1125,1126,1127,1131,1133, 0,1147,
@@ -4562,21 +4387,14 @@ _hb_ucd_u16[9320] =
817, 818, 819, 820, 821, 935, 0, 0,
};
static const int16_t
-_hb_ucd_i16[196] =
+_hb_ucd_i16[92] =
{
- 0, 0, 0, 0, 1, -1, 0, 0, 2, 0, -2, 0, 0, 0, 0, 2,
- 0, -2, 0, 0, 0, 0, 0, 16, 0, 0, 0, -16, 0, 0, 1, -1,
- 0, 0, 0, 1, -1, 0, 0, 0, 0, 1, -1, 0, 3, 3, 3, -3,
- -3, -3, 0, 0, 0, 2016, 0, 0, 0, 0, 0, 2527, 1923, 1914, 1918, 0,
- 2250, 0, 0, 0, 0, 0, 0, 138, 0, 7, 0, 0, -7, 0, 0, 0,
- 1, -1, 1, -1, -1, 1, -1, 0, 1824, 0, 0, 0, 0, 0, 2104, 0,
- 2108, 2106, 0, 2106, 1316, 0, 0, 0, 0, 1, -1, 1, -1, -138, 0, 0,
- 1, -1, 8, 8, 8, 0, 7, 7, 0, 0, -8, -8, -8, -7, -7, 0,
- 1, -1, 0, 2,-1316, 1, -1, 0, -1, 1, -1, 1, -1, 3, 1, -1,
- -3, 1, -1, 1, -1, 0, 0,-1914,-1918, 0, 0,-1923,-1824, 0, 0, 0,
- 0,-2016, 0, 0, 1, -1, 0, 1, 0, 0,-2104, 0, 0, 0, 0,-2106,
- -2108,-2106, 0, 0, 1, -1,-2250, 0, 0, 0,-2527, 0, 0, -2, 0, 1,
- -1, 0, 1, -1,
+ 0, 0, 1, -1, 2, 0, -2, 0, 0, 2, 0, -2, 0, 16, 0, -16,
+ 0, 1, -1, 0, 3, 3, 3, -3, -3, -3, 0, 2016, 0, 2527, 1923, 1914,
+ 1918, 0, 2250, 0, 0, 138, 0, 7, -7, 0, -1, 1, 1824, 0, 2104, 0,
+ 2108, 2106, 0, 2106, 1316, 0, -1, -138, 8, 8, 8, 0, 7, 7, -8, -8,
+ -8, -7,-1316, 1, -1, 3, -3, 1, 0,-1914,-1918, 0, 0,-1923,-1824, 0,
+ 0,-2016,-2104, 0, 0,-2106,-2108,-2106,-2250, 0,-2527, 0,
};
static inline uint_fast8_t
@@ -4597,17 +4415,17 @@ _hb_ucd_b4 (const uint8_t* a, unsigned i)
static inline int_fast16_t
_hb_ucd_bmg (unsigned u)
{
- return u<65380u?_hb_ucd_i16[((_hb_ucd_u8[9540+(((_hb_ucd_u8[9420+(((_hb_ucd_b4(9292+_hb_ucd_u8,u>>2>>3>>3))<<3)+((u>>2>>3)&7u))])<<3)+((u>>2)&7u))])<<2)+((u)&3u)]:0;
+ return u<65380u?_hb_ucd_i16[((_hb_ucd_u8[9684+(((_hb_ucd_u8[9452+(((_hb_ucd_u8[9356+(((_hb_ucd_b4(9292+_hb_ucd_u8,u>>1>>2>>3>>3))<<3)+((u>>1>>2>>3)&7u))])<<3)+((u>>1>>2)&7u))])<<2)+((u>>1)&3u))])<<1)+((u)&1u)]:0;
}
static inline uint_fast8_t
_hb_ucd_sc (unsigned u)
{
- return u<918000u?_hb_ucd_u8[11062+(((_hb_ucd_u16[2040+(((_hb_ucd_u8[10326+(((_hb_ucd_u8[9876+(u>>3>>4>>4)])<<4)+((u>>3>>4)&15u))])<<4)+((u>>3)&15u))])<<3)+((u)&7u))]:2;
+ return u<918000u?_hb_ucd_u8[11118+(((_hb_ucd_u16[4024+(((_hb_ucd_u16[2040+(((_hb_ucd_u8[10382+(((_hb_ucd_u8[9932+(u>>2>>2>>3>>4)])<<4)+((u>>2>>2>>3)&15u))])<<3)+((u>>2>>2)&7u))])<<2)+((u>>2)&3u))])<<2)+((u)&3u))]:2;
}
static inline uint_fast16_t
_hb_ucd_dm (unsigned u)
{
- return u<195102u?_hb_ucd_u16[6008+(((_hb_ucd_u8[17068+(((_hb_ucd_u8[16686+(u>>4>>5)])<<5)+((u>>4)&31u))])<<4)+((u)&15u))]:0;
+ return u<195102u?_hb_ucd_u16[6728+(((_hb_ucd_u8[13944+(((_hb_ucd_u8[13562+(u>>4>>5)])<<5)+((u>>4)&31u))])<<4)+((u)&15u))]:0;
}
diff --git a/thirdparty/harfbuzz/src/hb-ucd.cc b/thirdparty/harfbuzz/src/hb-ucd.cc
index baea224a25..4c8b1ee5e6 100644
--- a/thirdparty/harfbuzz/src/hb-ucd.cc
+++ b/thirdparty/harfbuzz/src/hb-ucd.cc
@@ -129,12 +129,16 @@ hb_ucd_compose (hb_unicode_funcs_t *ufuncs HB_UNUSED,
hb_codepoint_t a, hb_codepoint_t b, hb_codepoint_t *ab,
void *user_data HB_UNUSED)
{
+ // Hangul is handled algorithmically.
if (_hb_ucd_compose_hangul (a, b, ab)) return true;
hb_codepoint_t u = 0;
if ((a & 0xFFFFF800u) == 0x0000u && (b & 0xFFFFFF80) == 0x0300u)
{
+ /* If "a" is small enough and "b" is in the U+0300 range,
+ * the composition data is encoded in a 32bit array sorted
+ * by "a,b" pair. */
uint32_t k = HB_CODEPOINT_ENCODE3_11_7_14 (a, b, 0);
const uint32_t *v = hb_bsearch (k,
_hb_ucd_dm2_u32_map,
@@ -146,6 +150,8 @@ hb_ucd_compose (hb_unicode_funcs_t *ufuncs HB_UNUSED,
}
else
{
+ /* Otherwise it is stored in a 64bit array sorted by
+ * "a,b" pair. */
uint64_t k = HB_CODEPOINT_ENCODE3 (a, b, 0);
const uint64_t *v = hb_bsearch (k,
_hb_ucd_dm2_u64_map,
@@ -170,15 +176,22 @@ hb_ucd_decompose (hb_unicode_funcs_t *ufuncs HB_UNUSED,
unsigned i = _hb_ucd_dm (ab);
+ /* If no data, there's no decomposition. */
if (likely (!i)) return false;
i--;
+ /* Check if it's a single-character decomposition. */
if (i < ARRAY_LENGTH (_hb_ucd_dm1_p0_map) + ARRAY_LENGTH (_hb_ucd_dm1_p2_map))
{
+ /* Single-character decompositions currently are only in plane 0 or plane 2. */
if (i < ARRAY_LENGTH (_hb_ucd_dm1_p0_map))
+ {
+ /* Plane 0. */
*a = _hb_ucd_dm1_p0_map[i];
+ }
else
{
+ /* Plane 2. */
i -= ARRAY_LENGTH (_hb_ucd_dm1_p0_map);
*a = 0x20000 | _hb_ucd_dm1_p2_map[i];
}
@@ -187,8 +200,10 @@ hb_ucd_decompose (hb_unicode_funcs_t *ufuncs HB_UNUSED,
}
i -= ARRAY_LENGTH (_hb_ucd_dm1_p0_map) + ARRAY_LENGTH (_hb_ucd_dm1_p2_map);
+ /* Otherwise they are encoded either in a 32bit array or a 64bit array. */
if (i < ARRAY_LENGTH (_hb_ucd_dm2_u32_map))
{
+ /* 32bit array. */
uint32_t v = _hb_ucd_dm2_u32_map[i];
*a = HB_CODEPOINT_DECODE3_11_7_14_1 (v);
*b = HB_CODEPOINT_DECODE3_11_7_14_2 (v);
@@ -196,6 +211,7 @@ hb_ucd_decompose (hb_unicode_funcs_t *ufuncs HB_UNUSED,
}
i -= ARRAY_LENGTH (_hb_ucd_dm2_u32_map);
+ /* 64bit array. */
uint64_t v = _hb_ucd_dm2_u64_map[i];
*a = HB_CODEPOINT_DECODE3_1 (v);
*b = HB_CODEPOINT_DECODE3_2 (v);
diff --git a/thirdparty/harfbuzz/src/hb-unicode.cc b/thirdparty/harfbuzz/src/hb-unicode.cc
index c1795dc7f2..9a6471e52c 100644
--- a/thirdparty/harfbuzz/src/hb-unicode.cc
+++ b/thirdparty/harfbuzz/src/hb-unicode.cc
@@ -377,20 +377,30 @@ hb_unicode_funcs_set_##name##_func (hb_unicode_funcs_t *ufuncs, \
hb_destroy_func_t destroy) \
{ \
if (hb_object_is_immutable (ufuncs)) \
- return; \
+ goto fail; \
+ \
+ if (!func) \
+ { \
+ if (destroy) \
+ destroy (user_data); \
+ destroy = nullptr; \
+ user_data = ufuncs->parent->user_data.name; \
+ } \
\
if (ufuncs->destroy.name) \
ufuncs->destroy.name (ufuncs->user_data.name); \
\
- if (func) { \
+ if (func) \
ufuncs->func.name = func; \
- ufuncs->user_data.name = user_data; \
- ufuncs->destroy.name = destroy; \
- } else { \
+ else \
ufuncs->func.name = ufuncs->parent->func.name; \
- ufuncs->user_data.name = ufuncs->parent->user_data.name; \
- ufuncs->destroy.name = nullptr; \
- } \
+ ufuncs->user_data.name = user_data; \
+ ufuncs->destroy.name = destroy; \
+ return; \
+ \
+fail: \
+ if (destroy) \
+ destroy (user_data); \
}
HB_UNICODE_FUNCS_IMPLEMENT_CALLBACKS
diff --git a/thirdparty/harfbuzz/src/hb-uniscribe.cc b/thirdparty/harfbuzz/src/hb-uniscribe.cc
index 50f71ce9ce..9648e02663 100644
--- a/thirdparty/harfbuzz/src/hb-uniscribe.cc
+++ b/thirdparty/harfbuzz/src/hb-uniscribe.cc
@@ -355,7 +355,7 @@ _hb_rename_font (hb_blob_t *blob, wchar_t *new_name)
return nullptr;
}
- memcpy(new_sfnt_data, orig_sfnt_data, length);
+ hb_memcpy(new_sfnt_data, orig_sfnt_data, length);
OT::name &name = StructAtOffset<OT::name> (new_sfnt_data, name_table_offset);
name.format = 0;
@@ -478,11 +478,11 @@ populate_log_font (LOGFONTW *lf,
hb_font_t *font,
unsigned int font_size)
{
- memset (lf, 0, sizeof (*lf));
+ hb_memset (lf, 0, sizeof (*lf));
lf->lfHeight = - (int) font_size;
lf->lfCharSet = DEFAULT_CHARSET;
- memcpy (lf->lfFaceName, font->face->data.uniscribe->face_name, sizeof (lf->lfFaceName));
+ hb_memcpy (lf->lfFaceName, font->face->data.uniscribe->face_name, sizeof (lf->lfFaceName));
return true;
}
diff --git a/thirdparty/harfbuzz/src/hb-vector.hh b/thirdparty/harfbuzz/src/hb-vector.hh
index a6d9f6b3fb..9b52f5ca95 100644
--- a/thirdparty/harfbuzz/src/hb-vector.hh
+++ b/thirdparty/harfbuzz/src/hb-vector.hh
@@ -35,7 +35,7 @@
template <typename Type,
bool sorted=false>
-struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty_t>::type
+struct hb_vector_t
{
typedef Type item_t;
static constexpr unsigned item_size = hb_static_size (Type);
@@ -53,9 +53,10 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
hb_requires (hb_is_iterable (Iterable))>
hb_vector_t (const Iterable &o) : hb_vector_t ()
{
- if (hb_iter (o).is_random_access_iterator)
- alloc (hb_len (hb_iter (o)));
- hb_copy (o, *this);
+ auto iter = hb_iter (o);
+ if (iter.is_random_access_iterator)
+ alloc (hb_len (iter));
+ hb_copy (iter, *this);
}
hb_vector_t (const hb_vector_t &o) : hb_vector_t ()
{
@@ -83,6 +84,9 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
allocated = length = 0;
arrayZ = nullptr;
}
+ void init0 ()
+ {
+ }
void fini ()
{
@@ -94,7 +98,11 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
void reset ()
{
if (unlikely (in_error ()))
- allocated = length; // Big hack!
+ /* Big Hack! We don't know the true allocated size before
+ * an allocation failure happened. But we know it was at
+ * least as big as length. Restore it to that and continue
+ * as if error did not happen. */
+ allocated = length;
resize (0);
}
@@ -122,7 +130,7 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
}
hb_bytes_t as_bytes () const
- { return hb_bytes_t ((const char *) arrayZ, length * item_size); }
+ { return hb_bytes_t ((const char *) arrayZ, get_size ()); }
bool operator == (const hb_vector_t &o) const { return as_array () == o.as_array (); }
bool operator != (const hb_vector_t &o) const { return !(*this == o); }
@@ -164,14 +172,10 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
operator iter_t () const { return iter (); }
operator writer_t () { return writer (); }
- c_array_t sub_array (unsigned int start_offset, unsigned int count) const
- { return as_array ().sub_array (start_offset, count); }
- c_array_t sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */) const
- { return as_array ().sub_array (start_offset, count); }
- array_t sub_array (unsigned int start_offset, unsigned int count)
- { return as_array ().sub_array (start_offset, count); }
- array_t sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */)
- { return as_array ().sub_array (start_offset, count); }
+ /* Faster range-based for loop. */
+ Type *begin () const { return arrayZ; }
+ Type *end () const { return arrayZ + length; }
+
hb_sorted_array_t<Type> as_sorted_array ()
{ return hb_sorted_array (arrayZ, length); }
@@ -240,12 +244,11 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
if (likely (new_array))
{
for (unsigned i = 0; i < length; i++)
+ {
new (std::addressof (new_array[i])) Type ();
- for (unsigned i = 0; i < (unsigned) length; i++)
new_array[i] = std::move (arrayZ[i]);
- unsigned old_length = length;
- shrink_vector (0);
- length = old_length;
+ arrayZ[i].~Type ();
+ }
hb_free (arrayZ);
}
return new_array;
@@ -277,7 +280,14 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
copy_vector (const hb_vector_t &other)
{
length = other.length;
- hb_memcpy ((void *) arrayZ, (const void *) other.arrayZ, length * item_size);
+#ifndef HB_OPTIMIZE_SIZE
+ if (sizeof (T) >= sizeof (long long))
+ /* This runs faster because of alignment. */
+ for (unsigned i = 0; i < length; i++)
+ arrayZ[i] = other.arrayZ[i];
+ else
+#endif
+ hb_memcpy ((void *) arrayZ, (const void *) other.arrayZ, length * item_size);
}
template <typename T = Type,
hb_enable_if (!hb_is_trivially_copyable (T) &&
@@ -309,15 +319,6 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
}
}
- template <typename T = Type,
- hb_enable_if (hb_is_trivially_destructible(T))>
- void
- shrink_vector (unsigned size)
- {
- length = size;
- }
- template <typename T = Type,
- hb_enable_if (!hb_is_trivially_destructible(T))>
void
shrink_vector (unsigned size)
{
@@ -328,17 +329,6 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
}
}
- template <typename T = Type,
- hb_enable_if (hb_is_trivially_copy_assignable(T))>
- void
- shift_down_vector (unsigned i)
- {
- memmove (static_cast<void *> (&arrayZ[i - 1]),
- static_cast<void *> (&arrayZ[i]),
- (length - i) * sizeof (Type));
- }
- template <typename T = Type,
- hb_enable_if (!hb_is_trivially_copy_assignable(T))>
void
shift_down_vector (unsigned i)
{
@@ -381,16 +371,22 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
return true;
}
- bool resize (int size_)
+ bool resize (int size_, bool initialize = true)
{
unsigned int size = size_ < 0 ? 0u : (unsigned int) size_;
if (!alloc (size))
return false;
if (size > length)
- grow_vector (size);
+ {
+ if (initialize)
+ grow_vector (size);
+ }
else if (size < length)
- shrink_vector (size);
+ {
+ if (initialize)
+ shrink_vector (size);
+ }
length = size;
return true;
@@ -399,13 +395,13 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
Type pop ()
{
if (!length) return Null (Type);
- Type v = arrayZ[length - 1];
+ Type v {std::move (arrayZ[length - 1])};
arrayZ[length - 1].~Type ();
length--;
return v;
}
- void remove (unsigned int i)
+ void remove_ordered (unsigned int i)
{
if (unlikely (i >= length))
return;
@@ -414,6 +410,18 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
length--;
}
+ template <bool Sorted = sorted,
+ hb_enable_if (!Sorted)>
+ void remove_unordered (unsigned int i)
+ {
+ if (unlikely (i >= length))
+ return;
+ if (i != length - 1)
+ arrayZ[i] = std::move (arrayZ[length - 1]);
+ arrayZ[length - 1].~Type ();
+ length--;
+ }
+
void shrink (int size_)
{
unsigned int size = size_ < 0 ? 0u : (unsigned int) size_;
@@ -425,10 +433,8 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
/* Sorting API. */
- void qsort (int (*cmp)(const void*, const void*))
+ void qsort (int (*cmp)(const void*, const void*) = Type::cmp)
{ as_array ().qsort (cmp); }
- void qsort (unsigned int start = 0, unsigned int end = (unsigned int) -1)
- { as_array ().qsort (start, end); }
/* Unsorted search API. */
template <typename T>
diff --git a/thirdparty/harfbuzz/src/hb-version.h b/thirdparty/harfbuzz/src/hb-version.h
index fa7403cae7..1070262a3b 100644
--- a/thirdparty/harfbuzz/src/hb-version.h
+++ b/thirdparty/harfbuzz/src/hb-version.h
@@ -41,13 +41,13 @@ HB_BEGIN_DECLS
*
* The major component of the library version available at compile-time.
*/
-#define HB_VERSION_MAJOR 5
+#define HB_VERSION_MAJOR 6
/**
* HB_VERSION_MINOR:
*
* The minor component of the library version available at compile-time.
*/
-#define HB_VERSION_MINOR 2
+#define HB_VERSION_MINOR 0
/**
* HB_VERSION_MICRO:
*
@@ -60,7 +60,7 @@ HB_BEGIN_DECLS
*
* A string literal containing the library version available at compile-time.
*/
-#define HB_VERSION_STRING "5.2.0"
+#define HB_VERSION_STRING "6.0.0"
/**
* HB_VERSION_ATLEAST:
diff --git a/thirdparty/harfbuzz/src/hb.hh b/thirdparty/harfbuzz/src/hb.hh
index 8ec638a2b4..410d090d36 100644
--- a/thirdparty/harfbuzz/src/hb.hh
+++ b/thirdparty/harfbuzz/src/hb.hh
@@ -126,6 +126,7 @@
/* Ignored intentionally. */
#ifndef HB_NO_PRAGMA_GCC_DIAGNOSTIC_IGNORED
#pragma GCC diagnostic ignored "-Wclass-memaccess"
+#pragma GCC diagnostic ignored "-Wcast-function-type-strict" // https://github.com/harfbuzz/harfbuzz/pull/3859#issuecomment-1295409126
#pragma GCC diagnostic ignored "-Wformat-nonliteral"
#pragma GCC diagnostic ignored "-Wformat-zero-length"
#pragma GCC diagnostic ignored "-Wmissing-field-initializers"
diff --git a/thirdparty/icu4c/common/appendable.cpp b/thirdparty/icu4c/common/appendable.cpp
index fca3c1e413..f9b20180eb 100644
--- a/thirdparty/icu4c/common/appendable.cpp
+++ b/thirdparty/icu4c/common/appendable.cpp
@@ -37,23 +37,23 @@ Appendable::appendString(const UChar *s, int32_t length) {
UChar c;
while((c=*s++)!=0) {
if(!appendCodeUnit(c)) {
- return FALSE;
+ return false;
}
}
} else if(length>0) {
const UChar *limit=s+length;
do {
if(!appendCodeUnit(*s++)) {
- return FALSE;
+ return false;
}
} while(s<limit);
}
- return TRUE;
+ return true;
}
UBool
Appendable::reserveAppendCapacity(int32_t /*appendCapacity*/) {
- return TRUE;
+ return true;
}
UChar *
diff --git a/thirdparty/icu4c/common/bmpset.cpp b/thirdparty/icu4c/common/bmpset.cpp
index bc79f5e5a6..14ab894267 100644
--- a/thirdparty/icu4c/common/bmpset.cpp
+++ b/thirdparty/icu4c/common/bmpset.cpp
@@ -309,9 +309,9 @@ BMPSet::contains(UChar32 c) const {
// surrogate or supplementary code point
return containsSlow(c, list4kStarts[0xd], list4kStarts[0x11]);
} else {
- // Out-of-range code points get FALSE, consistent with long-standing
+ // Out-of-range code points get false, consistent with long-standing
// behavior of UnicodeSet::contains(c).
- return FALSE;
+ return false;
}
}
diff --git a/thirdparty/icu4c/common/brkeng.cpp b/thirdparty/icu4c/common/brkeng.cpp
index dc9fb99bf1..bd6abbb55a 100644
--- a/thirdparty/icu4c/common/brkeng.cpp
+++ b/thirdparty/icu4c/common/brkeng.cpp
@@ -261,10 +261,10 @@ ICULanguageBreakFactory::loadDictionaryMatcherFor(UScriptCode script) {
const UChar *extStart = u_memrchr(dictfname, 0x002e, dictnlength); // last dot
if (extStart != NULL) {
int32_t len = (int32_t)(extStart - dictfname);
- ext.appendInvariantChars(UnicodeString(FALSE, extStart + 1, dictnlength - len - 1), status);
+ ext.appendInvariantChars(UnicodeString(false, extStart + 1, dictnlength - len - 1), status);
dictnlength = len;
}
- dictnbuf.appendInvariantChars(UnicodeString(FALSE, dictfname, dictnlength), status);
+ dictnbuf.appendInvariantChars(UnicodeString(false, dictfname, dictnlength), status);
ures_close(b);
UDataMemory *file = udata_open(U_ICUDATA_BRKITR, ext.data(), dictnbuf.data(), &status);
diff --git a/thirdparty/icu4c/common/brkiter.cpp b/thirdparty/icu4c/common/brkiter.cpp
index 8a1915880e..d6996734e3 100644
--- a/thirdparty/icu4c/common/brkiter.cpp
+++ b/thirdparty/icu4c/common/brkiter.cpp
@@ -279,7 +279,7 @@ ICUBreakIteratorService::~ICUBreakIteratorService() {}
// defined in ucln_cmn.h
U_NAMESPACE_END
-static icu::UInitOnce gInitOnceBrkiter = U_INITONCE_INITIALIZER;
+static icu::UInitOnce gInitOnceBrkiter {};
static icu::ICULocaleService* gService = NULL;
@@ -296,7 +296,7 @@ static UBool U_CALLCONV breakiterator_cleanup(void) {
}
gInitOnceBrkiter.reset();
#endif
- return TRUE;
+ return true;
}
U_CDECL_END
U_NAMESPACE_BEGIN
@@ -347,7 +347,7 @@ BreakIterator::unregister(URegistryKey key, UErrorCode& status)
}
status = U_MEMORY_ALLOCATION_ERROR;
}
- return FALSE;
+ return false;
}
// -------------------------------------
diff --git a/thirdparty/icu4c/common/bytesinkutil.cpp b/thirdparty/icu4c/common/bytesinkutil.cpp
index c64a845f87..a32254a7db 100644
--- a/thirdparty/icu4c/common/bytesinkutil.cpp
+++ b/thirdparty/icu4c/common/bytesinkutil.cpp
@@ -20,7 +20,7 @@ U_NAMESPACE_BEGIN
UBool
ByteSinkUtil::appendChange(int32_t length, const char16_t *s16, int32_t s16Length,
ByteSink &sink, Edits *edits, UErrorCode &errorCode) {
- if (U_FAILURE(errorCode)) { return FALSE; }
+ if (U_FAILURE(errorCode)) { return false; }
char scratch[200];
int32_t s8Length = 0;
for (int32_t i = 0; i < s16Length;) {
@@ -44,7 +44,7 @@ ByteSinkUtil::appendChange(int32_t length, const char16_t *s16, int32_t s16Lengt
}
if (j > (INT32_MAX - s8Length)) {
errorCode = U_INDEX_OUTOFBOUNDS_ERROR;
- return FALSE;
+ return false;
}
sink.Append(buffer, j);
s8Length += j;
@@ -52,17 +52,17 @@ ByteSinkUtil::appendChange(int32_t length, const char16_t *s16, int32_t s16Lengt
if (edits != nullptr) {
edits->addReplace(length, s8Length);
}
- return TRUE;
+ return true;
}
UBool
ByteSinkUtil::appendChange(const uint8_t *s, const uint8_t *limit,
const char16_t *s16, int32_t s16Length,
ByteSink &sink, Edits *edits, UErrorCode &errorCode) {
- if (U_FAILURE(errorCode)) { return FALSE; }
+ if (U_FAILURE(errorCode)) { return false; }
if ((limit - s) > INT32_MAX) {
errorCode = U_INDEX_OUTOFBOUNDS_ERROR;
- return FALSE;
+ return false;
}
return appendChange((int32_t)(limit - s), s16, s16Length, sink, edits, errorCode);
}
@@ -109,16 +109,16 @@ UBool
ByteSinkUtil::appendUnchanged(const uint8_t *s, const uint8_t *limit,
ByteSink &sink, uint32_t options, Edits *edits,
UErrorCode &errorCode) {
- if (U_FAILURE(errorCode)) { return FALSE; }
+ if (U_FAILURE(errorCode)) { return false; }
if ((limit - s) > INT32_MAX) {
errorCode = U_INDEX_OUTOFBOUNDS_ERROR;
- return FALSE;
+ return false;
}
int32_t length = (int32_t)(limit - s);
if (length > 0) {
appendNonEmptyUnchanged(s, length, sink, options, edits);
}
- return TRUE;
+ return true;
}
CharStringByteSink::CharStringByteSink(CharString* dest) : dest_(*dest) {
diff --git a/thirdparty/icu4c/common/bytesinkutil.h b/thirdparty/icu4c/common/bytesinkutil.h
index ab2516432d..929c71fbee 100644
--- a/thirdparty/icu4c/common/bytesinkutil.h
+++ b/thirdparty/icu4c/common/bytesinkutil.h
@@ -4,6 +4,9 @@
// bytesinkutil.h
// created: 2017sep14 Markus W. Scherer
+#ifndef BYTESINKUTIL_H
+#define BYTESINKUTIL_H
+
#include "unicode/utypes.h"
#include "unicode/bytestream.h"
#include "unicode/edits.h"
@@ -81,3 +84,5 @@ private:
};
U_NAMESPACE_END
+
+#endif //BYTESINKUTIL_H
diff --git a/thirdparty/icu4c/common/bytestream.cpp b/thirdparty/icu4c/common/bytestream.cpp
index 0d0e4dda39..c14f206dfe 100644
--- a/thirdparty/icu4c/common/bytestream.cpp
+++ b/thirdparty/icu4c/common/bytestream.cpp
@@ -30,14 +30,14 @@ void ByteSink::Flush() {}
CheckedArrayByteSink::CheckedArrayByteSink(char* outbuf, int32_t capacity)
: outbuf_(outbuf), capacity_(capacity < 0 ? 0 : capacity),
- size_(0), appended_(0), overflowed_(FALSE) {
+ size_(0), appended_(0), overflowed_(false) {
}
CheckedArrayByteSink::~CheckedArrayByteSink() {}
CheckedArrayByteSink& CheckedArrayByteSink::Reset() {
size_ = appended_ = 0;
- overflowed_ = FALSE;
+ overflowed_ = false;
return *this;
}
@@ -48,14 +48,14 @@ void CheckedArrayByteSink::Append(const char* bytes, int32_t n) {
if (n > (INT32_MAX - appended_)) {
// TODO: Report as integer overflow, not merely buffer overflow.
appended_ = INT32_MAX;
- overflowed_ = TRUE;
+ overflowed_ = true;
return;
}
appended_ += n;
int32_t available = capacity_ - size_;
if (n > available) {
n = available;
- overflowed_ = TRUE;
+ overflowed_ = true;
}
if (n > 0 && bytes != (outbuf_ + size_)) {
uprv_memcpy(outbuf_ + size_, bytes, n);
diff --git a/thirdparty/icu4c/common/bytestrie.cpp b/thirdparty/icu4c/common/bytestrie.cpp
index c4d498c4bf..c272cc4022 100644
--- a/thirdparty/icu4c/common/bytestrie.cpp
+++ b/thirdparty/icu4c/common/bytestrie.cpp
@@ -337,13 +337,13 @@ BytesTrie::findUniqueValueFromBranch(const uint8_t *pos, int32_t length,
}
} else {
uniqueValue=value;
- haveUniqueValue=TRUE;
+ haveUniqueValue=true;
}
} else {
if(!findUniqueValue(pos+value, haveUniqueValue, uniqueValue)) {
return NULL;
}
- haveUniqueValue=TRUE;
+ haveUniqueValue=true;
}
} while(--length>1);
return pos+1; // ignore the last comparison byte
@@ -359,9 +359,9 @@ BytesTrie::findUniqueValue(const uint8_t *pos, UBool haveUniqueValue, int32_t &u
}
pos=findUniqueValueFromBranch(pos, node+1, haveUniqueValue, uniqueValue);
if(pos==NULL) {
- return FALSE;
+ return false;
}
- haveUniqueValue=TRUE;
+ haveUniqueValue=true;
} else if(node<kMinValueLead) {
// linear-match node
pos+=node-kMinLinearMatch+1; // Ignore the match bytes.
@@ -370,14 +370,14 @@ BytesTrie::findUniqueValue(const uint8_t *pos, UBool haveUniqueValue, int32_t &u
int32_t value=readValue(pos, node>>1);
if(haveUniqueValue) {
if(value!=uniqueValue) {
- return FALSE;
+ return false;
}
} else {
uniqueValue=value;
- haveUniqueValue=TRUE;
+ haveUniqueValue=true;
}
if(isFinal) {
- return TRUE;
+ return true;
}
pos=skipValue(pos, node);
}
diff --git a/thirdparty/icu4c/common/bytestriebuilder.cpp b/thirdparty/icu4c/common/bytestriebuilder.cpp
index 82dad42ca5..ac7d3d867e 100644
--- a/thirdparty/icu4c/common/bytestriebuilder.cpp
+++ b/thirdparty/icu4c/common/bytestriebuilder.cpp
@@ -231,7 +231,7 @@ BytesTrieBuilder::buildBytes(UStringTrieBuildOption buildOption, UErrorCode &err
}
uprv_sortArray(elements, elementsLength, (int32_t)sizeof(BytesTrieElement),
compareElementStrings, strings,
- FALSE, // need not be a stable sort
+ false, // need not be a stable sort
&errorCode);
if(U_FAILURE(errorCode)) {
return;
@@ -375,7 +375,7 @@ BytesTrieBuilder::createLinearMatchNode(int32_t i, int32_t byteIndex, int32_t le
UBool
BytesTrieBuilder::ensureCapacity(int32_t length) {
if(bytes==NULL) {
- return FALSE; // previous memory allocation had failed
+ return false; // previous memory allocation had failed
}
if(length>bytesCapacity) {
int32_t newCapacity=bytesCapacity;
@@ -388,7 +388,7 @@ BytesTrieBuilder::ensureCapacity(int32_t length) {
uprv_free(bytes);
bytes=NULL;
bytesCapacity=0;
- return FALSE;
+ return false;
}
uprv_memcpy(newBytes+(newCapacity-bytesLength),
bytes+(bytesCapacity-bytesLength), bytesLength);
@@ -396,7 +396,7 @@ BytesTrieBuilder::ensureCapacity(int32_t length) {
bytes=newBytes;
bytesCapacity=newCapacity;
}
- return TRUE;
+ return true;
}
int32_t
@@ -463,7 +463,7 @@ int32_t
BytesTrieBuilder::writeValueAndType(UBool hasValue, int32_t value, int32_t node) {
int32_t offset=write(node);
if(hasValue) {
- offset=writeValueAndFinal(value, FALSE);
+ offset=writeValueAndFinal(value, false);
}
return offset;
}
diff --git a/thirdparty/icu4c/common/bytestrieiterator.cpp b/thirdparty/icu4c/common/bytestrieiterator.cpp
index e64961a1f1..eacb7eedb0 100644
--- a/thirdparty/icu4c/common/bytestrieiterator.cpp
+++ b/thirdparty/icu4c/common/bytestrieiterator.cpp
@@ -101,12 +101,12 @@ BytesTrie::Iterator::hasNext() const { return pos_!=NULL || !stack_->isEmpty();
UBool
BytesTrie::Iterator::next(UErrorCode &errorCode) {
if(U_FAILURE(errorCode)) {
- return FALSE;
+ return false;
}
const uint8_t *pos=pos_;
if(pos==NULL) {
if(stack_->isEmpty()) {
- return FALSE;
+ return false;
}
// Pop the state off the stack and continue with the next outbound edge of
// the branch node.
@@ -119,7 +119,7 @@ BytesTrie::Iterator::next(UErrorCode &errorCode) {
if(length>1) {
pos=branchNext(pos, length, errorCode);
if(pos==NULL) {
- return TRUE; // Reached a final value.
+ return true; // Reached a final value.
}
} else {
str_->append((char)*pos++, errorCode);
@@ -141,7 +141,7 @@ BytesTrie::Iterator::next(UErrorCode &errorCode) {
} else {
pos_=skipValue(pos, node);
}
- return TRUE;
+ return true;
}
if(maxLength_>0 && str_->length()==maxLength_) {
return truncateAndStop();
@@ -152,7 +152,7 @@ BytesTrie::Iterator::next(UErrorCode &errorCode) {
}
pos=branchNext(pos, node+1, errorCode);
if(pos==NULL) {
- return TRUE; // Reached a final value.
+ return true; // Reached a final value.
}
} else {
// Linear-match node, append length bytes to str_.
@@ -177,7 +177,7 @@ UBool
BytesTrie::Iterator::truncateAndStop() {
pos_=NULL;
value_=-1; // no real value for str
- return TRUE;
+ return true;
}
// Branch node, needs to take the first outbound edge and push state for the rest.
diff --git a/thirdparty/icu4c/common/caniter.cpp b/thirdparty/icu4c/common/caniter.cpp
index a2083afde3..81f17265fb 100644
--- a/thirdparty/icu4c/common/caniter.cpp
+++ b/thirdparty/icu4c/common/caniter.cpp
@@ -119,7 +119,7 @@ UnicodeString CanonicalIterator::getSource() {
* Resets the iterator so that one can start again from the beginning.
*/
void CanonicalIterator::reset() {
- done = FALSE;
+ done = false;
for (int i = 0; i < current_length; ++i) {
current[i] = 0;
}
@@ -151,7 +151,7 @@ UnicodeString CanonicalIterator::next() {
for (i = current_length - 1; ; --i) {
if (i < 0) {
- done = TRUE;
+ done = true;
break;
}
current[i]++;
@@ -176,7 +176,7 @@ void CanonicalIterator::setSource(const UnicodeString &newSource, UErrorCode &st
if(U_FAILURE(status)) {
return;
}
- done = FALSE;
+ done = false;
cleanPieces();
@@ -521,7 +521,7 @@ Hashtable *CanonicalIterator::extract(Hashtable *fillinResult, UChar32 comp, con
int32_t decompLen=decompString.length();
// See if it matches the start of segment (at segmentPos)
- UBool ok = FALSE;
+ UBool ok = false;
UChar32 cp;
int32_t decompPos = 0;
UChar32 decompCp;
@@ -537,7 +537,7 @@ Hashtable *CanonicalIterator::extract(Hashtable *fillinResult, UChar32 comp, con
if (decompPos == decompLen) { // done, have all decomp characters!
temp.append(segment+i, segLen-i);
- ok = TRUE;
+ ok = true;
break;
}
U16_NEXT(decomp, decompPos, decompLen, decompCp);
diff --git a/thirdparty/icu4c/common/characterproperties.cpp b/thirdparty/icu4c/common/characterproperties.cpp
index a84996b47c..2316a391a3 100644
--- a/thirdparty/icu4c/common/characterproperties.cpp
+++ b/thirdparty/icu4c/common/characterproperties.cpp
@@ -36,11 +36,11 @@ namespace {
UBool U_CALLCONV characterproperties_cleanup();
-constexpr int32_t NUM_INCLUSIONS = UPROPS_SRC_COUNT + UCHAR_INT_LIMIT - UCHAR_INT_START;
+constexpr int32_t NUM_INCLUSIONS = UPROPS_SRC_COUNT + (UCHAR_INT_LIMIT - UCHAR_INT_START);
struct Inclusion {
UnicodeSet *fSet = nullptr;
- UInitOnce fInitOnce = U_INITONCE_INITIALIZER;
+ UInitOnce fInitOnce {};
};
Inclusion gInclusions[NUM_INCLUSIONS]; // cached getInclusions()
@@ -85,7 +85,7 @@ UBool U_CALLCONV characterproperties_cleanup() {
ucptrie_close(reinterpret_cast<UCPTrie *>(maps[i]));
maps[i] = nullptr;
}
- return TRUE;
+ return true;
}
void U_CALLCONV initInclusion(UPropertySource src, UErrorCode &errorCode) {
@@ -210,7 +210,7 @@ const UnicodeSet *getInclusionsForSource(UPropertySource src, UErrorCode &errorC
void U_CALLCONV initIntPropInclusion(UProperty prop, UErrorCode &errorCode) {
// This function is invoked only via umtx_initOnce().
U_ASSERT(UCHAR_INT_START <= prop && prop < UCHAR_INT_LIMIT);
- int32_t inclIndex = UPROPS_SRC_COUNT + prop - UCHAR_INT_START;
+ int32_t inclIndex = UPROPS_SRC_COUNT + (prop - UCHAR_INT_START);
U_ASSERT(gInclusions[inclIndex].fSet == nullptr);
UPropertySource src = uprops_getSource(prop);
const UnicodeSet *incl = getInclusionsForSource(src, errorCode);
@@ -255,7 +255,7 @@ const UnicodeSet *CharacterProperties::getInclusionsForProperty(
UProperty prop, UErrorCode &errorCode) {
if (U_FAILURE(errorCode)) { return nullptr; }
if (UCHAR_INT_START <= prop && prop < UCHAR_INT_LIMIT) {
- int32_t inclIndex = UPROPS_SRC_COUNT + prop - UCHAR_INT_START;
+ int32_t inclIndex = UPROPS_SRC_COUNT + (prop - UCHAR_INT_START);
Inclusion &i = gInclusions[inclIndex];
umtx_initOnce(i.fInitOnce, &initIntPropInclusion, prop, errorCode);
return i.fSet;
diff --git a/thirdparty/icu4c/common/charstr.cpp b/thirdparty/icu4c/common/charstr.cpp
index c35622882c..8a0994c737 100644
--- a/thirdparty/icu4c/common/charstr.cpp
+++ b/thirdparty/icu4c/common/charstr.cpp
@@ -220,7 +220,7 @@ UBool CharString::ensureCapacity(int32_t capacity,
int32_t desiredCapacityHint,
UErrorCode &errorCode) {
if(U_FAILURE(errorCode)) {
- return FALSE;
+ return false;
}
if(capacity>buffer.getCapacity()) {
if(desiredCapacityHint==0) {
@@ -230,10 +230,10 @@ UBool CharString::ensureCapacity(int32_t capacity,
buffer.resize(capacity, len+1)==NULL
) {
errorCode=U_MEMORY_ALLOCATION_ERROR;
- return FALSE;
+ return false;
}
}
- return TRUE;
+ return true;
}
CharString &CharString::appendPathPart(StringPiece s, UErrorCode &errorCode) {
diff --git a/thirdparty/icu4c/common/charstr.h b/thirdparty/icu4c/common/charstr.h
index 175acd1c0a..92a75d3d2f 100644
--- a/thirdparty/icu4c/common/charstr.h
+++ b/thirdparty/icu4c/common/charstr.h
@@ -177,8 +177,8 @@ private:
UBool ensureCapacity(int32_t capacity, int32_t desiredCapacityHint, UErrorCode &errorCode);
- CharString(const CharString &other); // forbid copying of this class
- CharString &operator=(const CharString &other); // forbid copying of this class
+ CharString(const CharString &other) = delete; // forbid copying of this class
+ CharString &operator=(const CharString &other) = delete; // forbid copying of this class
/**
* Returns U_FILE_ALT_SEP_CHAR if found in string, and U_FILE_SEP_CHAR is not found.
diff --git a/thirdparty/icu4c/common/cmemory.cpp b/thirdparty/icu4c/common/cmemory.cpp
index 663c1411e4..64f5034921 100644
--- a/thirdparty/icu4c/common/cmemory.cpp
+++ b/thirdparty/icu4c/common/cmemory.cpp
@@ -134,5 +134,5 @@ U_CFUNC UBool cmemory_cleanup(void) {
pAlloc = NULL;
pRealloc = NULL;
pFree = NULL;
- return TRUE;
+ return true;
}
diff --git a/thirdparty/icu4c/common/cstr.h b/thirdparty/icu4c/common/cstr.h
index c7a77a0ae5..be21d910bb 100644
--- a/thirdparty/icu4c/common/cstr.h
+++ b/thirdparty/icu4c/common/cstr.h
@@ -51,8 +51,8 @@ class U_COMMON_API CStr : public UMemory {
private:
CharString s;
- CStr(const CStr &other); // Forbid copying of this class.
- CStr &operator =(const CStr &other); // Forbid assignment.
+ CStr(const CStr &other) = delete; // Forbid copying of this class.
+ CStr &operator =(const CStr &other) = delete; // Forbid assignment.
};
U_NAMESPACE_END
diff --git a/thirdparty/icu4c/common/dictbe.cpp b/thirdparty/icu4c/common/dictbe.cpp
index 4fdbdf2760..768eb49b95 100644
--- a/thirdparty/icu4c/common/dictbe.cpp
+++ b/thirdparty/icu4c/common/dictbe.cpp
@@ -119,7 +119,7 @@ public:
// Select the currently marked candidate, point after it in the text, and invalidate self
int32_t acceptMarked( UText *text );
- // Back up from the current candidate to the next shorter one; return TRUE if that exists
+ // Back up from the current candidate to the next shorter one; return true if that exists
// and point the text after it
UBool backUp( UText *text );
@@ -165,9 +165,9 @@ UBool
PossibleWord::backUp( UText *text ) {
if (current > 0) {
utext_setNativeIndex(text, offset + cuLengths[--current]);
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
}
/*
@@ -1146,7 +1146,7 @@ CjkBreakEngine::divideUpDictionaryRange( UText *inText,
// Input UText is in one contiguous UTF-16 chunk.
// Use Read-only aliasing UnicodeString.
- inString.setTo(FALSE,
+ inString.setTo(false,
inText->chunkContents + rangeStart - inText->chunkNativeStart,
rangeEnd - rangeStart);
} else {
diff --git a/thirdparty/icu4c/common/edits.cpp b/thirdparty/icu4c/common/edits.cpp
index 92ca36fb5d..21d7c3f006 100644
--- a/thirdparty/icu4c/common/edits.cpp
+++ b/thirdparty/icu4c/common/edits.cpp
@@ -221,7 +221,7 @@ UBool Edits::growArray() {
// Not U_BUFFER_OVERFLOW_ERROR because that could be confused on a string transform API
// with a result-string-buffer overflow.
errorCode_ = U_INDEX_OUTOFBOUNDS_ERROR;
- return FALSE;
+ return false;
} else if (capacity >= (INT32_MAX / 2)) {
newCapacity = INT32_MAX;
} else {
@@ -230,25 +230,25 @@ UBool Edits::growArray() {
// Grow by at least 5 units so that a maximal change record will fit.
if ((newCapacity - capacity) < 5) {
errorCode_ = U_INDEX_OUTOFBOUNDS_ERROR;
- return FALSE;
+ return false;
}
uint16_t *newArray = (uint16_t *)uprv_malloc((size_t)newCapacity * 2);
if (newArray == NULL) {
errorCode_ = U_MEMORY_ALLOCATION_ERROR;
- return FALSE;
+ return false;
}
uprv_memcpy(newArray, array, (size_t)length * 2);
releaseArray();
array = newArray;
capacity = newCapacity;
- return TRUE;
+ return true;
}
UBool Edits::copyErrorTo(UErrorCode &outErrorCode) const {
- if (U_FAILURE(outErrorCode)) { return TRUE; }
- if (U_SUCCESS(errorCode_)) { return FALSE; }
+ if (U_FAILURE(outErrorCode)) { return true; }
+ if (U_SUCCESS(errorCode_)) { return false; }
outErrorCode = errorCode_;
- return TRUE;
+ return true;
}
Edits &Edits::mergeAndAppend(const Edits &ab, const Edits &bc, UErrorCode &errorCode) {
@@ -257,7 +257,7 @@ Edits &Edits::mergeAndAppend(const Edits &ab, const Edits &bc, UErrorCode &error
// Parallel iteration over both Edits.
Iterator abIter = ab.getFineIterator();
Iterator bcIter = bc.getFineIterator();
- UBool abHasNext = TRUE, bcHasNext = TRUE;
+ UBool abHasNext = true, bcHasNext = true;
// Copy iterator state into local variables, so that we can modify and subdivide spans.
// ab old & new length, bc old & new length
int32_t aLength = 0, ab_bLength = 0, bc_bLength = 0, cLength = 0;
@@ -400,7 +400,7 @@ Edits &Edits::mergeAndAppend(const Edits &ab, const Edits &bc, UErrorCode &error
Edits::Iterator::Iterator(const uint16_t *a, int32_t len, UBool oc, UBool crs) :
array(a), index(0), length(len), remaining(0),
onlyChanges_(oc), coarse(crs),
- dir(0), changed(FALSE), oldLength_(0), newLength_(0),
+ dir(0), changed(false), oldLength_(0), newLength_(0),
srcIndex(0), replIndex(0), destIndex(0) {}
int32_t Edits::Iterator::readLength(int32_t head) {
@@ -441,16 +441,16 @@ void Edits::Iterator::updatePreviousIndexes() {
UBool Edits::Iterator::noNext() {
// No change before or beyond the string.
dir = 0;
- changed = FALSE;
+ changed = false;
oldLength_ = newLength_ = 0;
- return FALSE;
+ return false;
}
UBool Edits::Iterator::next(UBool onlyChanges, UErrorCode &errorCode) {
// Forward iteration: Update the string indexes to the limit of the current span,
// and post-increment-read array units to assemble a new span.
// Leaves the array index one after the last unit of that span.
- if (U_FAILURE(errorCode)) { return FALSE; }
+ if (U_FAILURE(errorCode)) { return false; }
// We have an errorCode in case we need to start guarding against integer overflows.
// It is also convenient for caller loops if we bail out when an error was set elsewhere.
if (dir > 0) {
@@ -464,7 +464,7 @@ UBool Edits::Iterator::next(UBool onlyChanges, UErrorCode &errorCode) {
// Stay on the current one of a sequence of compressed changes.
++index; // next() rests on the index after the sequence unit.
dir = 1;
- return TRUE;
+ return true;
}
}
dir = 1;
@@ -473,7 +473,7 @@ UBool Edits::Iterator::next(UBool onlyChanges, UErrorCode &errorCode) {
// Fine-grained iterator: Continue a sequence of compressed changes.
if (remaining > 1) {
--remaining;
- return TRUE;
+ return true;
}
remaining = 0;
}
@@ -483,7 +483,7 @@ UBool Edits::Iterator::next(UBool onlyChanges, UErrorCode &errorCode) {
int32_t u = array[index++];
if (u <= MAX_UNCHANGED) {
// Combine adjacent unchanged ranges.
- changed = FALSE;
+ changed = false;
oldLength_ = u + 1;
while (index < length && (u = array[index]) <= MAX_UNCHANGED) {
++index;
@@ -498,10 +498,10 @@ UBool Edits::Iterator::next(UBool onlyChanges, UErrorCode &errorCode) {
// already fetched u > MAX_UNCHANGED at index
++index;
} else {
- return TRUE;
+ return true;
}
}
- changed = TRUE;
+ changed = true;
if (u <= MAX_SHORT_CHANGE) {
int32_t oldLen = u >> 12;
int32_t newLen = (u >> 9) & MAX_SHORT_CHANGE_NEW_LENGTH;
@@ -516,14 +516,14 @@ UBool Edits::Iterator::next(UBool onlyChanges, UErrorCode &errorCode) {
if (num > 1) {
remaining = num; // This is the first of two or more changes.
}
- return TRUE;
+ return true;
}
} else {
U_ASSERT(u <= 0x7fff);
oldLength_ = readLength((u >> 6) & 0x3f);
newLength_ = readLength(u & 0x3f);
if (!coarse) {
- return TRUE;
+ return true;
}
}
// Combine adjacent changes.
@@ -539,14 +539,14 @@ UBool Edits::Iterator::next(UBool onlyChanges, UErrorCode &errorCode) {
newLength_ += readLength(u & 0x3f);
}
}
- return TRUE;
+ return true;
}
UBool Edits::Iterator::previous(UErrorCode &errorCode) {
// Backward iteration: Pre-decrement-read array units to assemble a new span,
// then update the string indexes to the start of that span.
// Leaves the array index on the head unit of that span.
- if (U_FAILURE(errorCode)) { return FALSE; }
+ if (U_FAILURE(errorCode)) { return false; }
// We have an errorCode in case we need to start guarding against integer overflows.
// It is also convenient for caller loops if we bail out when an error was set elsewhere.
if (dir >= 0) {
@@ -559,7 +559,7 @@ UBool Edits::Iterator::previous(UErrorCode &errorCode) {
// Stay on the current one of a sequence of compressed changes.
--index; // previous() rests on the sequence unit.
dir = -1;
- return TRUE;
+ return true;
}
updateNextIndexes();
}
@@ -572,7 +572,7 @@ UBool Edits::Iterator::previous(UErrorCode &errorCode) {
if (remaining <= (u & SHORT_CHANGE_NUM_MASK)) {
++remaining;
updatePreviousIndexes();
- return TRUE;
+ return true;
}
remaining = 0;
}
@@ -582,7 +582,7 @@ UBool Edits::Iterator::previous(UErrorCode &errorCode) {
int32_t u = array[--index];
if (u <= MAX_UNCHANGED) {
// Combine adjacent unchanged ranges.
- changed = FALSE;
+ changed = false;
oldLength_ = u + 1;
while (index > 0 && (u = array[index - 1]) <= MAX_UNCHANGED) {
--index;
@@ -591,9 +591,9 @@ UBool Edits::Iterator::previous(UErrorCode &errorCode) {
newLength_ = oldLength_;
// No need to handle onlyChanges as long as previous() is called only from findIndex().
updatePreviousIndexes();
- return TRUE;
+ return true;
}
- changed = TRUE;
+ changed = true;
if (u <= MAX_SHORT_CHANGE) {
int32_t oldLen = u >> 12;
int32_t newLen = (u >> 9) & MAX_SHORT_CHANGE_NEW_LENGTH;
@@ -609,7 +609,7 @@ UBool Edits::Iterator::previous(UErrorCode &errorCode) {
remaining = 1; // This is the last of two or more changes.
}
updatePreviousIndexes();
- return TRUE;
+ return true;
}
} else {
if (u <= 0x7fff) {
@@ -629,7 +629,7 @@ UBool Edits::Iterator::previous(UErrorCode &errorCode) {
}
if (!coarse) {
updatePreviousIndexes();
- return TRUE;
+ return true;
}
}
// Combine adjacent changes.
@@ -648,7 +648,7 @@ UBool Edits::Iterator::previous(UErrorCode &errorCode) {
}
}
updatePreviousIndexes();
- return TRUE;
+ return true;
}
int32_t Edits::Iterator::findIndex(int32_t i, UBool findSource, UErrorCode &errorCode) {
@@ -705,7 +705,7 @@ int32_t Edits::Iterator::findIndex(int32_t i, UBool findSource, UErrorCode &erro
// The index is in the current span.
return 0;
}
- while (next(FALSE, errorCode)) {
+ while (next(false, errorCode)) {
if (findSource) {
spanStart = srcIndex;
spanLength = oldLength_;
@@ -739,7 +739,7 @@ int32_t Edits::Iterator::findIndex(int32_t i, UBool findSource, UErrorCode &erro
}
int32_t Edits::Iterator::destinationIndexFromSourceIndex(int32_t i, UErrorCode &errorCode) {
- int32_t where = findIndex(i, TRUE, errorCode);
+ int32_t where = findIndex(i, true, errorCode);
if (where < 0) {
// Error or before the string.
return 0;
@@ -758,7 +758,7 @@ int32_t Edits::Iterator::destinationIndexFromSourceIndex(int32_t i, UErrorCode &
}
int32_t Edits::Iterator::sourceIndexFromDestinationIndex(int32_t i, UErrorCode &errorCode) {
- int32_t where = findIndex(i, FALSE, errorCode);
+ int32_t where = findIndex(i, false, errorCode);
if (where < 0) {
// Error or before the string.
return 0;
diff --git a/thirdparty/icu4c/common/emojiprops.cpp b/thirdparty/icu4c/common/emojiprops.cpp
index 2a05e8602d..d07e07c6cc 100644
--- a/thirdparty/icu4c/common/emojiprops.cpp
+++ b/thirdparty/icu4c/common/emojiprops.cpp
@@ -22,7 +22,7 @@ U_NAMESPACE_BEGIN
namespace {
EmojiProps *singleton = nullptr;
-icu::UInitOnce emojiInitOnce = U_INITONCE_INITIALIZER;
+icu::UInitOnce emojiInitOnce {};
UBool U_CALLCONV emojiprops_cleanup() {
delete singleton;
diff --git a/thirdparty/icu4c/common/filteredbrk.cpp b/thirdparty/icu4c/common/filteredbrk.cpp
index e4817367a5..baa1d4e42d 100644
--- a/thirdparty/icu4c/common/filteredbrk.cpp
+++ b/thirdparty/icu4c/common/filteredbrk.cpp
@@ -58,7 +58,7 @@ static int32_t U_CALLCONV compareUnicodeString(UElement t1, UElement t2) {
/**
* A UVector which implements a set of strings.
*/
-class U_COMMON_API UStringSet : public UVector {
+class UStringSet : public UVector {
public:
UStringSet(UErrorCode &status) : UVector(uprv_deleteUObject,
uhash_compareUnicodeString,
@@ -482,7 +482,7 @@ SimpleFilteredSentenceBreakIterator::last(void) {
/**
* Concrete implementation of builder class.
*/
-class U_COMMON_API SimpleFilteredBreakIteratorBuilder : public FilteredBreakIteratorBuilder {
+class SimpleFilteredBreakIteratorBuilder : public FilteredBreakIteratorBuilder {
public:
virtual ~SimpleFilteredBreakIteratorBuilder();
SimpleFilteredBreakIteratorBuilder(const Locale &fromLocale, UErrorCode &status);
@@ -614,11 +614,11 @@ SimpleFilteredBreakIteratorBuilder::build(BreakIterator* adoptBreakIterator, UEr
i++) {
const UnicodeString *abbr = fSet.getStringAt(i);
if(abbr) {
- FB_TRACE("build",abbr,TRUE,i);
+ FB_TRACE("build",abbr,true,i);
ustrs[n] = *abbr; // copy by value
- FB_TRACE("ustrs[n]",&ustrs[n],TRUE,i);
+ FB_TRACE("ustrs[n]",&ustrs[n],true,i);
} else {
- FB_TRACE("build",abbr,FALSE,i);
+ FB_TRACE("build",abbr,false,i);
status = U_MEMORY_ALLOCATION_ERROR;
return NULL;
}
@@ -629,37 +629,37 @@ SimpleFilteredBreakIteratorBuilder::build(BreakIterator* adoptBreakIterator, UEr
for(int i=0;i<subCount;i++) {
int nn = ustrs[i].indexOf(kFULLSTOP); // TODO: non-'.' abbreviations
if(nn>-1 && (nn+1)!=ustrs[i].length()) {
- FB_TRACE("partial",&ustrs[i],FALSE,i);
+ FB_TRACE("partial",&ustrs[i],false,i);
// is partial.
// is it unique?
int sameAs = -1;
for(int j=0;j<subCount;j++) {
if(j==i) continue;
if(ustrs[i].compare(0,nn+1,ustrs[j],0,nn+1)==0) {
- FB_TRACE("prefix",&ustrs[j],FALSE,nn+1);
+ FB_TRACE("prefix",&ustrs[j],false,nn+1);
//UBool otherIsPartial = ((nn+1)!=ustrs[j].length()); // true if ustrs[j] doesn't end at nn
if(partials[j]==0) { // hasn't been processed yet
partials[j] = kSuppressInReverse | kAddToForward;
- FB_TRACE("suppressing",&ustrs[j],FALSE,j);
+ FB_TRACE("suppressing",&ustrs[j],false,j);
} else if(partials[j] & kSuppressInReverse) {
sameAs = j; // the other entry is already in the reverse table.
}
}
}
- FB_TRACE("for partial same-",&ustrs[i],FALSE,sameAs);
- FB_TRACE(" == partial #",&ustrs[i],FALSE,partials[i]);
+ FB_TRACE("for partial same-",&ustrs[i],false,sameAs);
+ FB_TRACE(" == partial #",&ustrs[i],false,partials[i]);
UnicodeString prefix(ustrs[i], 0, nn+1);
if(sameAs == -1 && partials[i] == 0) {
// first one - add the prefix to the reverse table.
prefix.reverse();
builder->add(prefix, kPARTIAL, status);
revCount++;
- FB_TRACE("Added partial",&prefix,FALSE, i);
- FB_TRACE(u_errorName(status),&ustrs[i],FALSE,i);
+ FB_TRACE("Added partial",&prefix,false, i);
+ FB_TRACE(u_errorName(status),&ustrs[i],false,i);
partials[i] = kSuppressInReverse | kAddToForward;
} else {
- FB_TRACE("NOT adding partial",&prefix,FALSE, i);
- FB_TRACE(u_errorName(status),&ustrs[i],FALSE,i);
+ FB_TRACE("NOT adding partial",&prefix,false, i);
+ FB_TRACE(u_errorName(status),&ustrs[i],false,i);
}
}
}
@@ -668,9 +668,9 @@ SimpleFilteredBreakIteratorBuilder::build(BreakIterator* adoptBreakIterator, UEr
ustrs[i].reverse();
builder->add(ustrs[i], kMATCH, status);
revCount++;
- FB_TRACE(u_errorName(status), &ustrs[i], FALSE, i);
+ FB_TRACE(u_errorName(status), &ustrs[i], false, i);
} else {
- FB_TRACE("Adding fwd",&ustrs[i], FALSE, i);
+ FB_TRACE("Adding fwd",&ustrs[i], false, i);
// an optimization would be to only add the portion after the '.'
// for example, for "Ph.D." we store ".hP" in the reverse table. We could just store "D." in the forward,
@@ -682,12 +682,12 @@ SimpleFilteredBreakIteratorBuilder::build(BreakIterator* adoptBreakIterator, UEr
////if(debug2) u_printf("SUPPRESS- not Added(%d): /%S/ status=%s\n",partials[i], ustrs[i].getTerminatedBuffer(), u_errorName(status));
}
}
- FB_TRACE("AbbrCount",NULL,FALSE, subCount);
+ FB_TRACE("AbbrCount",NULL,false, subCount);
if(revCount>0) {
backwardsTrie.adoptInstead(builder->build(USTRINGTRIE_BUILD_FAST, status));
if(U_FAILURE(status)) {
- FB_TRACE(u_errorName(status),NULL,FALSE, -1);
+ FB_TRACE(u_errorName(status),NULL,false, -1);
return NULL;
}
}
@@ -695,7 +695,7 @@ SimpleFilteredBreakIteratorBuilder::build(BreakIterator* adoptBreakIterator, UEr
if(fwdCount>0) {
forwardsPartialTrie.adoptInstead(builder2->build(USTRINGTRIE_BUILD_FAST, status));
if(U_FAILURE(status)) {
- FB_TRACE(u_errorName(status),NULL,FALSE, -1);
+ FB_TRACE(u_errorName(status),NULL,false, -1);
return NULL;
}
}
diff --git a/thirdparty/icu4c/common/filterednormalizer2.cpp b/thirdparty/icu4c/common/filterednormalizer2.cpp
index 1a0914d3f7..63f01206e9 100644
--- a/thirdparty/icu4c/common/filterednormalizer2.cpp
+++ b/thirdparty/icu4c/common/filterednormalizer2.cpp
@@ -137,14 +137,14 @@ UnicodeString &
FilteredNormalizer2::normalizeSecondAndAppend(UnicodeString &first,
const UnicodeString &second,
UErrorCode &errorCode) const {
- return normalizeSecondAndAppend(first, second, TRUE, errorCode);
+ return normalizeSecondAndAppend(first, second, true, errorCode);
}
UnicodeString &
FilteredNormalizer2::append(UnicodeString &first,
const UnicodeString &second,
UErrorCode &errorCode) const {
- return normalizeSecondAndAppend(first, second, FALSE, errorCode);
+ return normalizeSecondAndAppend(first, second, false, errorCode);
}
UnicodeString &
@@ -224,7 +224,7 @@ UBool
FilteredNormalizer2::isNormalized(const UnicodeString &s, UErrorCode &errorCode) const {
uprv_checkCanGetBuffer(s, errorCode);
if(U_FAILURE(errorCode)) {
- return FALSE;
+ return false;
}
USetSpanCondition spanCondition=USET_SPAN_SIMPLE;
for(int32_t prevSpanLimit=0; prevSpanLimit<s.length();) {
@@ -235,19 +235,19 @@ FilteredNormalizer2::isNormalized(const UnicodeString &s, UErrorCode &errorCode)
if( !norm2.isNormalized(s.tempSubStringBetween(prevSpanLimit, spanLimit), errorCode) ||
U_FAILURE(errorCode)
) {
- return FALSE;
+ return false;
}
spanCondition=USET_SPAN_NOT_CONTAINED;
}
prevSpanLimit=spanLimit;
}
- return TRUE;
+ return true;
}
UBool
FilteredNormalizer2::isNormalizedUTF8(StringPiece sp, UErrorCode &errorCode) const {
if(U_FAILURE(errorCode)) {
- return FALSE;
+ return false;
}
const char *s = sp.data();
int32_t length = sp.length();
@@ -259,14 +259,14 @@ FilteredNormalizer2::isNormalizedUTF8(StringPiece sp, UErrorCode &errorCode) con
} else {
if (!norm2.isNormalizedUTF8(StringPiece(s, spanLength), errorCode) ||
U_FAILURE(errorCode)) {
- return FALSE;
+ return false;
}
spanCondition = USET_SPAN_NOT_CONTAINED;
}
s += spanLength;
length -= spanLength;
}
- return TRUE;
+ return true;
}
UNormalizationCheckResult
diff --git a/thirdparty/icu4c/common/hash.h b/thirdparty/icu4c/common/hash.h
index b927ddb3c3..c9afeaf562 100644
--- a/thirdparty/icu4c/common/hash.h
+++ b/thirdparty/icu4c/common/hash.h
@@ -115,8 +115,8 @@ public:
inline UBool equals(const Hashtable& that) const;
private:
- Hashtable(const Hashtable &other); // forbid copying of this class
- Hashtable &operator=(const Hashtable &other); // forbid copying of this class
+ Hashtable(const Hashtable &other) = delete; // forbid copying of this class
+ Hashtable &operator=(const Hashtable &other) = delete; // forbid copying of this class
};
/*********************************************************************
diff --git a/thirdparty/icu4c/common/icuplug.cpp b/thirdparty/icu4c/common/icuplug.cpp
index 52a0deced6..72b5d27175 100644
--- a/thirdparty/icu4c/common/icuplug.cpp
+++ b/thirdparty/icu4c/common/icuplug.cpp
@@ -59,8 +59,8 @@ struct UPlugData {
void *context; /**< user context data */
char name[UPLUG_NAME_MAX]; /**< name of plugin */
UPlugLevel level; /**< level of plugin */
- UBool awaitingLoad; /**< TRUE if the plugin is awaiting a load call */
- UBool dontUnload; /**< TRUE if plugin must stay resident (leak plugin and lib) */
+ UBool awaitingLoad; /**< true if the plugin is awaiting a load call */
+ UBool dontUnload; /**< true if plugin must stay resident (leak plugin and lib) */
UErrorCode pluginStatus; /**< status code of plugin */
};
@@ -304,11 +304,11 @@ static void uplug_queryPlug(UPlugData *plug, UErrorCode *status) {
if(U_SUCCESS(*status)) {
if(plug->level == UPLUG_LEVEL_INVALID) {
plug->pluginStatus = U_PLUGIN_DIDNT_SET_LEVEL;
- plug->awaitingLoad = FALSE;
+ plug->awaitingLoad = false;
}
} else {
plug->pluginStatus = U_INTERNAL_PROGRAM_ERROR;
- plug->awaitingLoad = FALSE;
+ plug->awaitingLoad = false;
}
}
@@ -322,7 +322,7 @@ static void uplug_loadPlug(UPlugData *plug, UErrorCode *status) {
return;
}
uplug_callPlug(plug, UPLUG_REASON_LOAD, status);
- plug->awaitingLoad = FALSE;
+ plug->awaitingLoad = false;
if(!U_SUCCESS(*status)) {
plug->pluginStatus = U_INTERNAL_PROGRAM_ERROR;
}
@@ -347,8 +347,8 @@ static UPlugData *uplug_allocateEmptyPlug(UErrorCode *status)
plug->structSize = sizeof(UPlugData);
plug->name[0]=0;
plug->level = UPLUG_LEVEL_UNKNOWN; /* initialize to null state */
- plug->awaitingLoad = TRUE;
- plug->dontUnload = FALSE;
+ plug->awaitingLoad = true;
+ plug->dontUnload = false;
plug->pluginStatus = U_ZERO_ERROR;
plug->libName[0] = 0;
plug->config[0]=0;
@@ -403,9 +403,9 @@ static void uplug_deallocatePlug(UPlugData *plug, UErrorCode *status) {
pluginCount = uplug_removeEntryAt(pluginList, pluginCount, sizeof(plug[0]), uplug_pluginNumber(plug));
} else {
/* not ok- leave as a message. */
- plug->awaitingLoad=FALSE;
+ plug->awaitingLoad=false;
plug->entrypoint=0;
- plug->dontUnload=TRUE;
+ plug->dontUnload=true;
}
}
@@ -558,8 +558,8 @@ uplug_initErrorPlug(const char *libName, const char *sym, const char *config, co
if(U_FAILURE(*status)) return NULL;
plug->pluginStatus = loadStatus;
- plug->awaitingLoad = FALSE; /* Won't load. */
- plug->dontUnload = TRUE; /* cannot unload. */
+ plug->awaitingLoad = false; /* Won't load. */
+ plug->dontUnload = true; /* cannot unload. */
if(sym!=NULL) {
uprv_strncpy(plug->sym, sym, UPLUG_NAME_MAX);
@@ -646,7 +646,7 @@ static UBool U_CALLCONV uplug_cleanup(void)
}
/* close other held libs? */
gCurrentLevel = UPLUG_LEVEL_LOW;
- return TRUE;
+ return true;
}
#if U_ENABLE_DYLOAD
@@ -678,7 +678,7 @@ static void uplug_loadWaitingPlugs(UErrorCode *status) {
currentLevel = newLevel;
}
}
- pluginToLoad->awaitingLoad = FALSE;
+ pluginToLoad->awaitingLoad = false;
}
}
}
@@ -694,7 +694,7 @@ static void uplug_loadWaitingPlugs(UErrorCode *status) {
} else {
uplug_loadPlug(pluginToLoad, &subStatus);
}
- pluginToLoad->awaitingLoad = FALSE;
+ pluginToLoad->awaitingLoad = false;
}
}
diff --git a/thirdparty/icu4c/common/loadednormalizer2impl.cpp b/thirdparty/icu4c/common/loadednormalizer2impl.cpp
index 905fc1decc..24ff629f84 100644
--- a/thirdparty/icu4c/common/loadednormalizer2impl.cpp
+++ b/thirdparty/icu4c/common/loadednormalizer2impl.cpp
@@ -67,9 +67,9 @@ LoadedNormalizer2Impl::isAcceptable(void * /*context*/,
) {
// Normalizer2Impl *me=(Normalizer2Impl *)context;
// uprv_memcpy(me->dataVersion, pInfo->dataVersion, 4);
- return TRUE;
+ return true;
} else {
- return FALSE;
+ return false;
}
}
@@ -134,14 +134,14 @@ U_CDECL_END
#if !NORM2_HARDCODE_NFC_DATA
static Norm2AllModes *nfcSingleton;
-static icu::UInitOnce nfcInitOnce = U_INITONCE_INITIALIZER;
+static icu::UInitOnce nfcInitOnce {};
#endif
static Norm2AllModes *nfkcSingleton;
-static icu::UInitOnce nfkcInitOnce = U_INITONCE_INITIALIZER;
+static icu::UInitOnce nfkcInitOnce {};
static Norm2AllModes *nfkc_cfSingleton;
-static icu::UInitOnce nfkc_cfInitOnce = U_INITONCE_INITIALIZER;
+static icu::UInitOnce nfkc_cfInitOnce {};
static UHashtable *cache=NULL;
@@ -185,7 +185,7 @@ static UBool U_CALLCONV uprv_loaded_normalizer2_cleanup() {
uhash_close(cache);
cache=NULL;
- return TRUE;
+ return true;
}
U_CDECL_END
diff --git a/thirdparty/icu4c/common/localebuilder.cpp b/thirdparty/icu4c/common/localebuilder.cpp
index a5f201e847..c1e1f2ad68 100644
--- a/thirdparty/icu4c/common/localebuilder.cpp
+++ b/thirdparty/icu4c/common/localebuilder.cpp
@@ -15,7 +15,7 @@ U_NAMESPACE_BEGIN
#define UPRV_ISDIGIT(c) (((c) >= '0') && ((c) <= '9'))
#define UPRV_ISALPHANUM(c) (uprv_isASCIILetter(c) || UPRV_ISDIGIT(c) )
-const char* kAttributeKey = "attribute";
+constexpr const char* kAttributeKey = "attribute";
static bool _isExtensionSubtags(char key, const char* s, int32_t len) {
switch (uprv_tolower(key)) {
@@ -459,7 +459,7 @@ Locale LocaleBuilder::build(UErrorCode& errorCode)
UBool LocaleBuilder::copyErrorTo(UErrorCode &outErrorCode) const {
if (U_FAILURE(outErrorCode)) {
// Do not overwrite the older error code
- return TRUE;
+ return true;
}
outErrorCode = status_;
return U_FAILURE(outErrorCode);
diff --git a/thirdparty/icu4c/common/localefallback_data.h b/thirdparty/icu4c/common/localefallback_data.h
new file mode 100644
index 0000000000..da725de42d
--- /dev/null
+++ b/thirdparty/icu4c/common/localefallback_data.h
@@ -0,0 +1,632 @@
+// © 2022 and later: Unicode, Inc. and others.
+// License & terms of use: http://www.unicode.org/copyright.html
+//
+// Internal static data tables used by uresbund.cpp
+// WARNING: This file is mechanically generated by the CLDR-to-ICU tool
+// (see tools/cldr/cldr-to-icu/src/main/java/org/unicode/tool/cldrtoicu/generator/ResourcFallbackCodeGenerator.java).
+// DO NOT HAND EDIT!!!
+
+#ifdef INCLUDED_FROM_URESBUND_CPP
+
+//======================================================================
+// Default script table
+const char scriptCodeChars[] =
+ "Aghb\0Ahom\0Arab\0Armi\0Armn\0Avst\0Bamu\0Bass\0Beng\0Brah\0Cakm\0"
+ "Cans\0Cari\0Cham\0Cher\0Chrs\0Copt\0Cprt\0Cyrl\0Deva\0Egyp\0Ethi\0"
+ "Geor\0Gong\0Gonm\0Goth\0Grek\0Gujr\0Guru\0Hans\0Hant\0Hebr\0Hluw\0"
+ "Hmnp\0Ital\0Jpan\0Kali\0Kana\0Kawi\0Khar\0Khmr\0Kits\0Knda\0Kore\0"
+ "Lana\0Laoo\0Lepc\0Lina\0Lisu\0Lyci\0Lydi\0Mand\0Mani\0Medf\0Merc\0"
+ "Mlym\0Mong\0Mroo\0Mymr\0Narb\0Nkoo\0Nshu\0Ogam\0Olck\0Orkh\0Orya\0"
+ "Osge\0Ougr\0Pauc\0Phli\0Phnx\0Plrd\0Prti\0Rohg\0Runr\0Samr\0Sarb\0"
+ "Saur\0Sgnw\0Sinh\0Sogd\0Sora\0Soyo\0Syrc\0Tale\0Talu\0Taml\0Tang\0"
+ "Tavt\0Telu\0Tfng\0Thaa\0Thai\0Tibt\0Tnsa\0Toto\0Ugar\0Vaii\0Wcho\0"
+ "Xpeo\0Xsux\0Yiii\0";
+
+const char dsLocaleIDChars[] =
+ "ab\0abq\0adp\0ady\0ae\0aeb\0aho\0ajt\0akk\0alt\0am\0apc\0apd\0"
+ "ar\0arc\0arq\0ars\0ary\0arz\0as\0ase\0av\0avl\0awa\0az_IQ\0az_IR\0"
+ "az_RU\0ba\0bal\0bap\0bax\0bcq\0be\0bej\0bfq\0bft\0bfy\0bg\0bgc\0"
+ "bgn\0bgx\0bhb\0bhi\0bho\0bji\0bjj\0blt\0bn\0bo\0bpy\0bqi\0bra\0"
+ "brh\0brx\0bsq\0bst\0btv\0bua\0byn\0ccp\0ce\0chm\0chr\0cja\0cjm\0"
+ "ckb\0cmg\0cop\0cr\0crh\0crk\0crl\0csw\0ctd\0cu\0cv\0dar\0dcc\0"
+ "dgl\0dmf\0doi\0drh\0drs\0dty\0dv\0dz\0egy\0eky\0el\0esg\0ett\0"
+ "fa\0fia\0fub\0gan\0gbm\0gbz\0gez\0ggn\0gjk\0gju\0glk\0gmv\0gof\0"
+ "gom\0gon\0got\0grc\0grt\0gu\0gvr\0gwc\0gwt\0ha_CM\0ha_SD\0hak\0"
+ "haz\0hdy\0he\0hi\0hlu\0hmd\0hnd\0hne\0hnj\0hno\0hoc\0hoj\0hsn\0"
+ "hy\0ii\0inh\0iu\0iw\0ja\0ji\0jml\0ka\0kaa\0kaw\0kbd\0kby\0kdt\0"
+ "kfr\0kfy\0khb\0khn\0kht\0khw\0kjg\0kk\0kk_AF\0kk_CN\0kk_IR\0kk_MN\0"
+ "km\0kn\0ko\0koi\0kok\0kqy\0krc\0kru\0ks\0ktb\0ku_LB\0kum\0kv\0"
+ "kvx\0kxc\0kxl\0kxm\0kxp\0ky\0ky_CN\0kzh\0lab\0lad\0lah\0lbe\0"
+ "lcp\0lep\0lez\0lif\0lis\0lki\0lmn\0lo\0lrc\0luz\0lwl\0lzh\0mag\0"
+ "mai\0man_GN\0mde\0mdf\0mdx\0mfa\0mgp\0mk\0mki\0ml\0mn\0mn_CN\0"
+ "mni\0mnw\0mr\0mrd\0mrj\0mro\0ms_CC\0mtr\0mvy\0mwr\0mww\0my\0mym\0"
+ "myv\0myz\0mzn\0nan\0ne\0new\0nnp\0nod\0noe\0non\0nqo\0nsk\0nst\0"
+ "oj\0ojs\0or\0oru\0os\0osa\0ota\0otk\0oui\0pa\0pa_PK\0pal\0peo\0"
+ "phl\0phn\0pka\0pnt\0ppa\0pra\0prd\0ps\0raj\0rhg\0rif\0rjs\0rkt\0"
+ "rmt\0ru\0rue\0ryu\0sa\0sah\0sat\0saz\0sck\0scl\0sd\0sd_IN\0sdh\0"
+ "sga\0sgw\0shi\0shn\0shu\0si\0skr\0smp\0sog\0sou\0sr\0srb\0srx\0"
+ "swb\0swv\0syl\0syr\0ta\0taj\0tcy\0tdd\0tdg\0tdh\0te\0tg\0tg_PK\0"
+ "th\0thl\0thq\0thr\0ti\0tig\0tkt\0trw\0tsd\0tsf\0tsj\0tt\0tts\0"
+ "txg\0txo\0tyv\0udi\0udm\0ug\0ug_KZ\0ug_MN\0uga\0uk\0unr\0unr_NP\0"
+ "unx\0ur\0uz_AF\0uz_CN\0vai\0wal\0wbq\0wbr\0wni\0wsg\0wtm\0wuu\0"
+ "xco\0xcr\0xlc\0xld\0xmf\0xmn\0xmr\0xna\0xnr\0xpr\0xsa\0xsr\0yi\0"
+ "yue\0yue_CN\0zdj\0zgh\0zh\0zh_AU\0zh_BN\0zh_GB\0zh_GF\0zh_HK\0"
+ "zh_ID\0zh_MO\0zh_PA\0zh_PF\0zh_PH\0zh_SR\0zh_TH\0zh_TW\0zh_US\0"
+ "zh_VN\0zhx\0zkt\0";
+
+const int32_t defaultScriptTable[] = {
+ 0, 90, // ab -> Cyrl
+ 3, 90, // abq -> Cyrl
+ 7, 465, // adp -> Tibt
+ 11, 90, // ady -> Cyrl
+ 15, 25, // ae -> Avst
+ 18, 10, // aeb -> Arab
+ 22, 5, // aho -> Ahom
+ 26, 10, // ajt -> Arab
+ 30, 500, // akk -> Xsux
+ 34, 90, // alt -> Cyrl
+ 38, 105, // am -> Ethi
+ 41, 10, // apc -> Arab
+ 45, 10, // apd -> Arab
+ 49, 10, // ar -> Arab
+ 52, 15, // arc -> Armi
+ 56, 10, // arq -> Arab
+ 60, 10, // ars -> Arab
+ 64, 10, // ary -> Arab
+ 68, 10, // arz -> Arab
+ 72, 40, // as -> Beng
+ 75, 390, // ase -> Sgnw
+ 79, 90, // av -> Cyrl
+ 82, 10, // avl -> Arab
+ 86, 95, // awa -> Deva
+ 90, 10, // az_IQ -> Arab
+ 96, 10, // az_IR -> Arab
+ 102, 90, // az_RU -> Cyrl
+ 108, 90, // ba -> Cyrl
+ 111, 10, // bal -> Arab
+ 115, 95, // bap -> Deva
+ 119, 30, // bax -> Bamu
+ 123, 105, // bcq -> Ethi
+ 127, 90, // be -> Cyrl
+ 130, 10, // bej -> Arab
+ 134, 430, // bfq -> Taml
+ 138, 10, // bft -> Arab
+ 142, 95, // bfy -> Deva
+ 146, 90, // bg -> Cyrl
+ 149, 95, // bgc -> Deva
+ 153, 10, // bgn -> Arab
+ 157, 130, // bgx -> Grek
+ 161, 95, // bhb -> Deva
+ 165, 95, // bhi -> Deva
+ 169, 95, // bho -> Deva
+ 173, 105, // bji -> Ethi
+ 177, 95, // bjj -> Deva
+ 181, 440, // blt -> Tavt
+ 185, 40, // bn -> Beng
+ 188, 465, // bo -> Tibt
+ 191, 40, // bpy -> Beng
+ 195, 10, // bqi -> Arab
+ 199, 95, // bra -> Deva
+ 203, 10, // brh -> Arab
+ 207, 95, // brx -> Deva
+ 211, 35, // bsq -> Bass
+ 215, 105, // bst -> Ethi
+ 219, 95, // btv -> Deva
+ 223, 90, // bua -> Cyrl
+ 227, 105, // byn -> Ethi
+ 231, 50, // ccp -> Cakm
+ 235, 90, // ce -> Cyrl
+ 238, 90, // chm -> Cyrl
+ 242, 70, // chr -> Cher
+ 246, 10, // cja -> Arab
+ 250, 65, // cjm -> Cham
+ 254, 10, // ckb -> Arab
+ 258, 410, // cmg -> Soyo
+ 262, 80, // cop -> Copt
+ 266, 55, // cr -> Cans
+ 269, 90, // crh -> Cyrl
+ 273, 55, // crk -> Cans
+ 277, 55, // crl -> Cans
+ 281, 55, // csw -> Cans
+ 285, 340, // ctd -> Pauc
+ 289, 90, // cu -> Cyrl
+ 292, 90, // cv -> Cyrl
+ 295, 90, // dar -> Cyrl
+ 299, 10, // dcc -> Arab
+ 303, 10, // dgl -> Arab
+ 307, 265, // dmf -> Medf
+ 311, 95, // doi -> Deva
+ 315, 280, // drh -> Mong
+ 319, 105, // drs -> Ethi
+ 323, 95, // dty -> Deva
+ 327, 455, // dv -> Thaa
+ 330, 465, // dz -> Tibt
+ 333, 100, // egy -> Egyp
+ 337, 180, // eky -> Kali
+ 341, 130, // el -> Grek
+ 344, 120, // esg -> Gonm
+ 348, 170, // ett -> Ital
+ 352, 10, // fa -> Arab
+ 355, 10, // fia -> Arab
+ 359, 10, // fub -> Arab
+ 363, 145, // gan -> Hans
+ 367, 95, // gbm -> Deva
+ 371, 10, // gbz -> Arab
+ 375, 105, // gez -> Ethi
+ 379, 95, // ggn -> Deva
+ 383, 10, // gjk -> Arab
+ 387, 10, // gju -> Arab
+ 391, 10, // glk -> Arab
+ 395, 105, // gmv -> Ethi
+ 399, 105, // gof -> Ethi
+ 403, 95, // gom -> Deva
+ 407, 445, // gon -> Telu
+ 411, 125, // got -> Goth
+ 415, 85, // grc -> Cprt
+ 419, 40, // grt -> Beng
+ 423, 135, // gu -> Gujr
+ 426, 95, // gvr -> Deva
+ 430, 10, // gwc -> Arab
+ 434, 10, // gwt -> Arab
+ 438, 10, // ha_CM -> Arab
+ 444, 10, // ha_SD -> Arab
+ 450, 145, // hak -> Hans
+ 454, 10, // haz -> Arab
+ 458, 105, // hdy -> Ethi
+ 462, 155, // he -> Hebr
+ 465, 95, // hi -> Deva
+ 468, 160, // hlu -> Hluw
+ 472, 355, // hmd -> Plrd
+ 476, 10, // hnd -> Arab
+ 480, 95, // hne -> Deva
+ 484, 165, // hnj -> Hmnp
+ 488, 10, // hno -> Arab
+ 492, 95, // hoc -> Deva
+ 496, 95, // hoj -> Deva
+ 500, 145, // hsn -> Hans
+ 504, 20, // hy -> Armn
+ 507, 505, // ii -> Yiii
+ 510, 90, // inh -> Cyrl
+ 514, 55, // iu -> Cans
+ 517, 155, // iw -> Hebr
+ 520, 175, // ja -> Jpan
+ 523, 155, // ji -> Hebr
+ 526, 95, // jml -> Deva
+ 530, 110, // ka -> Geor
+ 533, 90, // kaa -> Cyrl
+ 537, 190, // kaw -> Kawi
+ 541, 90, // kbd -> Cyrl
+ 545, 10, // kby -> Arab
+ 549, 460, // kdt -> Thai
+ 553, 95, // kfr -> Deva
+ 557, 95, // kfy -> Deva
+ 561, 425, // khb -> Talu
+ 565, 95, // khn -> Deva
+ 569, 290, // kht -> Mymr
+ 573, 10, // khw -> Arab
+ 577, 225, // kjg -> Laoo
+ 581, 90, // kk -> Cyrl
+ 584, 10, // kk_AF -> Arab
+ 590, 10, // kk_CN -> Arab
+ 596, 10, // kk_IR -> Arab
+ 602, 10, // kk_MN -> Arab
+ 608, 200, // km -> Khmr
+ 611, 210, // kn -> Knda
+ 614, 215, // ko -> Kore
+ 617, 90, // koi -> Cyrl
+ 621, 95, // kok -> Deva
+ 625, 105, // kqy -> Ethi
+ 629, 90, // krc -> Cyrl
+ 633, 95, // kru -> Deva
+ 637, 10, // ks -> Arab
+ 640, 105, // ktb -> Ethi
+ 644, 10, // ku_LB -> Arab
+ 650, 90, // kum -> Cyrl
+ 654, 90, // kv -> Cyrl
+ 657, 10, // kvx -> Arab
+ 661, 105, // kxc -> Ethi
+ 665, 95, // kxl -> Deva
+ 669, 460, // kxm -> Thai
+ 673, 10, // kxp -> Arab
+ 677, 90, // ky -> Cyrl
+ 680, 10, // ky_CN -> Arab
+ 686, 10, // kzh -> Arab
+ 690, 235, // lab -> Lina
+ 694, 155, // lad -> Hebr
+ 698, 10, // lah -> Arab
+ 702, 90, // lbe -> Cyrl
+ 706, 460, // lcp -> Thai
+ 710, 230, // lep -> Lepc
+ 714, 90, // lez -> Cyrl
+ 718, 95, // lif -> Deva
+ 722, 240, // lis -> Lisu
+ 726, 10, // lki -> Arab
+ 730, 445, // lmn -> Telu
+ 734, 225, // lo -> Laoo
+ 737, 10, // lrc -> Arab
+ 741, 10, // luz -> Arab
+ 745, 460, // lwl -> Thai
+ 749, 145, // lzh -> Hans
+ 753, 95, // mag -> Deva
+ 757, 95, // mai -> Deva
+ 761, 300, // man_GN -> Nkoo
+ 768, 10, // mde -> Arab
+ 772, 90, // mdf -> Cyrl
+ 776, 105, // mdx -> Ethi
+ 780, 10, // mfa -> Arab
+ 784, 95, // mgp -> Deva
+ 788, 90, // mk -> Cyrl
+ 791, 10, // mki -> Arab
+ 795, 275, // ml -> Mlym
+ 798, 90, // mn -> Cyrl
+ 801, 280, // mn_CN -> Mong
+ 807, 40, // mni -> Beng
+ 811, 290, // mnw -> Mymr
+ 815, 95, // mr -> Deva
+ 818, 95, // mrd -> Deva
+ 822, 90, // mrj -> Cyrl
+ 826, 285, // mro -> Mroo
+ 830, 10, // ms_CC -> Arab
+ 836, 95, // mtr -> Deva
+ 840, 10, // mvy -> Arab
+ 844, 95, // mwr -> Deva
+ 848, 165, // mww -> Hmnp
+ 852, 290, // my -> Mymr
+ 855, 105, // mym -> Ethi
+ 859, 90, // myv -> Cyrl
+ 863, 255, // myz -> Mand
+ 867, 10, // mzn -> Arab
+ 871, 145, // nan -> Hans
+ 875, 95, // ne -> Deva
+ 878, 95, // new -> Deva
+ 882, 490, // nnp -> Wcho
+ 886, 220, // nod -> Lana
+ 890, 95, // noe -> Deva
+ 894, 370, // non -> Runr
+ 898, 300, // nqo -> Nkoo
+ 902, 55, // nsk -> Cans
+ 906, 470, // nst -> Tnsa
+ 910, 55, // oj -> Cans
+ 913, 55, // ojs -> Cans
+ 917, 325, // or -> Orya
+ 920, 10, // oru -> Arab
+ 924, 90, // os -> Cyrl
+ 927, 330, // osa -> Osge
+ 931, 10, // ota -> Arab
+ 935, 320, // otk -> Orkh
+ 939, 335, // oui -> Ougr
+ 943, 140, // pa -> Guru
+ 946, 10, // pa_PK -> Arab
+ 952, 345, // pal -> Phli
+ 956, 495, // peo -> Xpeo
+ 960, 10, // phl -> Arab
+ 964, 350, // phn -> Phnx
+ 968, 45, // pka -> Brah
+ 972, 130, // pnt -> Grek
+ 976, 95, // ppa -> Deva
+ 980, 195, // pra -> Khar
+ 984, 10, // prd -> Arab
+ 988, 10, // ps -> Arab
+ 991, 95, // raj -> Deva
+ 995, 365, // rhg -> Rohg
+ 999, 450, // rif -> Tfng
+ 1003, 95, // rjs -> Deva
+ 1007, 40, // rkt -> Beng
+ 1011, 10, // rmt -> Arab
+ 1015, 90, // ru -> Cyrl
+ 1018, 90, // rue -> Cyrl
+ 1022, 185, // ryu -> Kana
+ 1026, 95, // sa -> Deva
+ 1029, 90, // sah -> Cyrl
+ 1033, 315, // sat -> Olck
+ 1037, 385, // saz -> Saur
+ 1041, 95, // sck -> Deva
+ 1045, 10, // scl -> Arab
+ 1049, 10, // sd -> Arab
+ 1052, 95, // sd_IN -> Deva
+ 1058, 10, // sdh -> Arab
+ 1062, 310, // sga -> Ogam
+ 1066, 105, // sgw -> Ethi
+ 1070, 450, // shi -> Tfng
+ 1074, 290, // shn -> Mymr
+ 1078, 10, // shu -> Arab
+ 1082, 395, // si -> Sinh
+ 1085, 10, // skr -> Arab
+ 1089, 375, // smp -> Samr
+ 1093, 400, // sog -> Sogd
+ 1097, 460, // sou -> Thai
+ 1101, 90, // sr -> Cyrl
+ 1104, 405, // srb -> Sora
+ 1108, 95, // srx -> Deva
+ 1112, 10, // swb -> Arab
+ 1116, 95, // swv -> Deva
+ 1120, 40, // syl -> Beng
+ 1124, 415, // syr -> Syrc
+ 1128, 430, // ta -> Taml
+ 1131, 95, // taj -> Deva
+ 1135, 210, // tcy -> Knda
+ 1139, 420, // tdd -> Tale
+ 1143, 95, // tdg -> Deva
+ 1147, 95, // tdh -> Deva
+ 1151, 445, // te -> Telu
+ 1154, 90, // tg -> Cyrl
+ 1157, 10, // tg_PK -> Arab
+ 1163, 460, // th -> Thai
+ 1166, 95, // thl -> Deva
+ 1170, 95, // thq -> Deva
+ 1174, 95, // thr -> Deva
+ 1178, 105, // ti -> Ethi
+ 1181, 105, // tig -> Ethi
+ 1185, 95, // tkt -> Deva
+ 1189, 10, // trw -> Arab
+ 1193, 130, // tsd -> Grek
+ 1197, 95, // tsf -> Deva
+ 1201, 465, // tsj -> Tibt
+ 1205, 90, // tt -> Cyrl
+ 1208, 460, // tts -> Thai
+ 1212, 435, // txg -> Tang
+ 1216, 475, // txo -> Toto
+ 1220, 90, // tyv -> Cyrl
+ 1224, 0, // udi -> Aghb
+ 1228, 90, // udm -> Cyrl
+ 1232, 10, // ug -> Arab
+ 1235, 90, // ug_KZ -> Cyrl
+ 1241, 90, // ug_MN -> Cyrl
+ 1247, 480, // uga -> Ugar
+ 1251, 90, // uk -> Cyrl
+ 1254, 40, // unr -> Beng
+ 1258, 95, // unr_NP -> Deva
+ 1265, 40, // unx -> Beng
+ 1269, 10, // ur -> Arab
+ 1272, 10, // uz_AF -> Arab
+ 1278, 90, // uz_CN -> Cyrl
+ 1284, 485, // vai -> Vaii
+ 1288, 105, // wal -> Ethi
+ 1292, 445, // wbq -> Telu
+ 1296, 95, // wbr -> Deva
+ 1300, 10, // wni -> Arab
+ 1304, 115, // wsg -> Gong
+ 1308, 95, // wtm -> Deva
+ 1312, 145, // wuu -> Hans
+ 1316, 75, // xco -> Chrs
+ 1320, 60, // xcr -> Cari
+ 1324, 245, // xlc -> Lyci
+ 1328, 250, // xld -> Lydi
+ 1332, 110, // xmf -> Geor
+ 1336, 260, // xmn -> Mani
+ 1340, 270, // xmr -> Merc
+ 1344, 295, // xna -> Narb
+ 1348, 95, // xnr -> Deva
+ 1352, 360, // xpr -> Prti
+ 1356, 380, // xsa -> Sarb
+ 1360, 95, // xsr -> Deva
+ 1364, 155, // yi -> Hebr
+ 1367, 150, // yue -> Hant
+ 1371, 145, // yue_CN -> Hans
+ 1378, 10, // zdj -> Arab
+ 1382, 450, // zgh -> Tfng
+ 1386, 145, // zh -> Hans
+ 1389, 150, // zh_AU -> Hant
+ 1395, 150, // zh_BN -> Hant
+ 1401, 150, // zh_GB -> Hant
+ 1407, 150, // zh_GF -> Hant
+ 1413, 150, // zh_HK -> Hant
+ 1419, 150, // zh_ID -> Hant
+ 1425, 150, // zh_MO -> Hant
+ 1431, 150, // zh_PA -> Hant
+ 1437, 150, // zh_PF -> Hant
+ 1443, 150, // zh_PH -> Hant
+ 1449, 150, // zh_SR -> Hant
+ 1455, 150, // zh_TH -> Hant
+ 1461, 150, // zh_TW -> Hant
+ 1467, 150, // zh_US -> Hant
+ 1473, 150, // zh_VN -> Hant
+ 1479, 305, // zhx -> Nshu
+ 1483, 205, // zkt -> Kits
+};
+
+//======================================================================
+// Parent locale table
+const char parentLocaleChars[] =
+ "az_Arab\0az_Cyrl\0bal_Latn\0blt_Latn\0bm_Nkoo\0bs_Cyrl\0byn_Latn\0"
+ "cu_Glag\0dje_Arab\0dyo_Arab\0en_001\0en_150\0en_AG\0en_AI\0en_AT\0"
+ "en_AU\0en_BB\0en_BE\0en_BM\0en_BS\0en_BW\0en_BZ\0en_CC\0en_CH\0"
+ "en_CK\0en_CM\0en_CX\0en_CY\0en_DE\0en_DG\0en_DK\0en_DM\0en_Dsrt\0"
+ "en_ER\0en_FI\0en_FJ\0en_FK\0en_FM\0en_GB\0en_GD\0en_GG\0en_GH\0"
+ "en_GI\0en_GM\0en_GY\0en_HK\0en_IE\0en_IL\0en_IM\0en_IN\0en_IO\0"
+ "en_JE\0en_JM\0en_KE\0en_KI\0en_KN\0en_KY\0en_LC\0en_LR\0en_LS\0"
+ "en_MG\0en_MO\0en_MS\0en_MT\0en_MU\0en_MV\0en_MW\0en_MY\0en_NA\0"
+ "en_NF\0en_NG\0en_NL\0en_NR\0en_NU\0en_NZ\0en_PG\0en_PK\0en_PN\0"
+ "en_PW\0en_RW\0en_SB\0en_SC\0en_SD\0en_SE\0en_SG\0en_SH\0en_SI\0"
+ "en_SL\0en_SS\0en_SX\0en_SZ\0en_Shaw\0en_TC\0en_TK\0en_TO\0en_TT\0"
+ "en_TV\0en_TZ\0en_UG\0en_VC\0en_VG\0en_VU\0en_WS\0en_ZA\0en_ZM\0"
+ "en_ZW\0es_419\0es_AR\0es_BO\0es_BR\0es_BZ\0es_CL\0es_CO\0es_CR\0"
+ "es_CU\0es_DO\0es_EC\0es_GT\0es_HN\0es_MX\0es_NI\0es_PA\0es_PE\0"
+ "es_PR\0es_PY\0es_SV\0es_US\0es_UY\0es_VE\0ff_Adlm\0ff_Arab\0fr_HT\0"
+ "ha_Arab\0hi_Latn\0ht\0iu_Latn\0kk_Arab\0ks_Deva\0ku_Arab\0ky_Arab\0"
+ "ky_Latn\0ml_Arab\0mn_Mong\0mni_Mtei\0ms_Arab\0nb\0nn\0no\0pa_Arab\0"
+ "pt_AO\0pt_CH\0pt_CV\0pt_FR\0pt_GQ\0pt_GW\0pt_LU\0pt_MO\0pt_MZ\0"
+ "pt_PT\0pt_ST\0pt_TL\0root\0sat_Deva\0sd_Deva\0sd_Khoj\0sd_Sind\0"
+ "shi_Latn\0so_Arab\0sr_Latn\0sw_Arab\0tg_Arab\0ug_Cyrl\0uz_Arab\0"
+ "uz_Cyrl\0vai_Latn\0wo_Arab\0yo_Arab\0yue_Hans\0zh_Hant\0zh_Hant_HK\0"
+ "zh_Hant_MO\0";
+
+const int32_t parentLocaleTable[] = {
+ 0, 1017, // az_Arab -> root
+ 8, 1017, // az_Cyrl -> root
+ 16, 1017, // bal_Latn -> root
+ 25, 1017, // blt_Latn -> root
+ 34, 1017, // bm_Nkoo -> root
+ 42, 1017, // bs_Cyrl -> root
+ 50, 1017, // byn_Latn -> root
+ 59, 1017, // cu_Glag -> root
+ 67, 1017, // dje_Arab -> root
+ 76, 1017, // dyo_Arab -> root
+ 92, 85, // en_150 -> en_001
+ 99, 85, // en_AG -> en_001
+ 105, 85, // en_AI -> en_001
+ 111, 92, // en_AT -> en_150
+ 117, 85, // en_AU -> en_001
+ 123, 85, // en_BB -> en_001
+ 129, 92, // en_BE -> en_150
+ 135, 85, // en_BM -> en_001
+ 141, 85, // en_BS -> en_001
+ 147, 85, // en_BW -> en_001
+ 153, 85, // en_BZ -> en_001
+ 159, 85, // en_CC -> en_001
+ 165, 92, // en_CH -> en_150
+ 171, 85, // en_CK -> en_001
+ 177, 85, // en_CM -> en_001
+ 183, 85, // en_CX -> en_001
+ 189, 85, // en_CY -> en_001
+ 195, 92, // en_DE -> en_150
+ 201, 85, // en_DG -> en_001
+ 207, 92, // en_DK -> en_150
+ 213, 85, // en_DM -> en_001
+ 219, 1017, // en_Dsrt -> root
+ 227, 85, // en_ER -> en_001
+ 233, 92, // en_FI -> en_150
+ 239, 85, // en_FJ -> en_001
+ 245, 85, // en_FK -> en_001
+ 251, 85, // en_FM -> en_001
+ 257, 85, // en_GB -> en_001
+ 263, 85, // en_GD -> en_001
+ 269, 85, // en_GG -> en_001
+ 275, 85, // en_GH -> en_001
+ 281, 85, // en_GI -> en_001
+ 287, 85, // en_GM -> en_001
+ 293, 85, // en_GY -> en_001
+ 299, 85, // en_HK -> en_001
+ 305, 85, // en_IE -> en_001
+ 311, 85, // en_IL -> en_001
+ 317, 85, // en_IM -> en_001
+ 323, 85, // en_IN -> en_001
+ 329, 85, // en_IO -> en_001
+ 335, 85, // en_JE -> en_001
+ 341, 85, // en_JM -> en_001
+ 347, 85, // en_KE -> en_001
+ 353, 85, // en_KI -> en_001
+ 359, 85, // en_KN -> en_001
+ 365, 85, // en_KY -> en_001
+ 371, 85, // en_LC -> en_001
+ 377, 85, // en_LR -> en_001
+ 383, 85, // en_LS -> en_001
+ 389, 85, // en_MG -> en_001
+ 395, 85, // en_MO -> en_001
+ 401, 85, // en_MS -> en_001
+ 407, 85, // en_MT -> en_001
+ 413, 85, // en_MU -> en_001
+ 419, 85, // en_MV -> en_001
+ 425, 85, // en_MW -> en_001
+ 431, 85, // en_MY -> en_001
+ 437, 85, // en_NA -> en_001
+ 443, 85, // en_NF -> en_001
+ 449, 85, // en_NG -> en_001
+ 455, 92, // en_NL -> en_150
+ 461, 85, // en_NR -> en_001
+ 467, 85, // en_NU -> en_001
+ 473, 85, // en_NZ -> en_001
+ 479, 85, // en_PG -> en_001
+ 485, 85, // en_PK -> en_001
+ 491, 85, // en_PN -> en_001
+ 497, 85, // en_PW -> en_001
+ 503, 85, // en_RW -> en_001
+ 509, 85, // en_SB -> en_001
+ 515, 85, // en_SC -> en_001
+ 521, 85, // en_SD -> en_001
+ 527, 92, // en_SE -> en_150
+ 533, 85, // en_SG -> en_001
+ 539, 85, // en_SH -> en_001
+ 545, 92, // en_SI -> en_150
+ 551, 85, // en_SL -> en_001
+ 557, 85, // en_SS -> en_001
+ 563, 85, // en_SX -> en_001
+ 569, 85, // en_SZ -> en_001
+ 575, 1017, // en_Shaw -> root
+ 583, 85, // en_TC -> en_001
+ 589, 85, // en_TK -> en_001
+ 595, 85, // en_TO -> en_001
+ 601, 85, // en_TT -> en_001
+ 607, 85, // en_TV -> en_001
+ 613, 85, // en_TZ -> en_001
+ 619, 85, // en_UG -> en_001
+ 625, 85, // en_VC -> en_001
+ 631, 85, // en_VG -> en_001
+ 637, 85, // en_VU -> en_001
+ 643, 85, // en_WS -> en_001
+ 649, 85, // en_ZA -> en_001
+ 655, 85, // en_ZM -> en_001
+ 661, 85, // en_ZW -> en_001
+ 674, 667, // es_AR -> es_419
+ 680, 667, // es_BO -> es_419
+ 686, 667, // es_BR -> es_419
+ 692, 667, // es_BZ -> es_419
+ 698, 667, // es_CL -> es_419
+ 704, 667, // es_CO -> es_419
+ 710, 667, // es_CR -> es_419
+ 716, 667, // es_CU -> es_419
+ 722, 667, // es_DO -> es_419
+ 728, 667, // es_EC -> es_419
+ 734, 667, // es_GT -> es_419
+ 740, 667, // es_HN -> es_419
+ 746, 667, // es_MX -> es_419
+ 752, 667, // es_NI -> es_419
+ 758, 667, // es_PA -> es_419
+ 764, 667, // es_PE -> es_419
+ 770, 667, // es_PR -> es_419
+ 776, 667, // es_PY -> es_419
+ 782, 667, // es_SV -> es_419
+ 788, 667, // es_US -> es_419
+ 794, 667, // es_UY -> es_419
+ 800, 667, // es_VE -> es_419
+ 806, 1017, // ff_Adlm -> root
+ 814, 1017, // ff_Arab -> root
+ 828, 1017, // ha_Arab -> root
+ 836, 323, // hi_Latn -> en_IN
+ 844, 822, // ht -> fr_HT
+ 847, 1017, // iu_Latn -> root
+ 855, 1017, // kk_Arab -> root
+ 863, 1017, // ks_Deva -> root
+ 871, 1017, // ku_Arab -> root
+ 879, 1017, // ky_Arab -> root
+ 887, 1017, // ky_Latn -> root
+ 895, 1017, // ml_Arab -> root
+ 903, 1017, // mn_Mong -> root
+ 911, 1017, // mni_Mtei -> root
+ 920, 1017, // ms_Arab -> root
+ 928, 934, // nb -> no
+ 931, 934, // nn -> no
+ 937, 1017, // pa_Arab -> root
+ 945, 999, // pt_AO -> pt_PT
+ 951, 999, // pt_CH -> pt_PT
+ 957, 999, // pt_CV -> pt_PT
+ 963, 999, // pt_FR -> pt_PT
+ 969, 999, // pt_GQ -> pt_PT
+ 975, 999, // pt_GW -> pt_PT
+ 981, 999, // pt_LU -> pt_PT
+ 987, 999, // pt_MO -> pt_PT
+ 993, 999, // pt_MZ -> pt_PT
+ 1005, 999, // pt_ST -> pt_PT
+ 1011, 999, // pt_TL -> pt_PT
+ 1022, 1017, // sat_Deva -> root
+ 1031, 1017, // sd_Deva -> root
+ 1039, 1017, // sd_Khoj -> root
+ 1047, 1017, // sd_Sind -> root
+ 1055, 1017, // shi_Latn -> root
+ 1064, 1017, // so_Arab -> root
+ 1072, 1017, // sr_Latn -> root
+ 1080, 1017, // sw_Arab -> root
+ 1088, 1017, // tg_Arab -> root
+ 1096, 1017, // ug_Cyrl -> root
+ 1104, 1017, // uz_Arab -> root
+ 1112, 1017, // uz_Cyrl -> root
+ 1120, 1017, // vai_Latn -> root
+ 1129, 1017, // wo_Arab -> root
+ 1137, 1017, // yo_Arab -> root
+ 1145, 1017, // yue_Hans -> root
+ 1154, 1017, // zh_Hant -> root
+ 1173, 1162, // zh_Hant_MO -> zh_Hant_HK
+};
+
+
+#endif // INCLUDED_FROM_URESBUND_CPP
diff --git a/thirdparty/icu4c/common/localematcher.cpp b/thirdparty/icu4c/common/localematcher.cpp
index 2cad708d99..2f8664b6f7 100644
--- a/thirdparty/icu4c/common/localematcher.cpp
+++ b/thirdparty/icu4c/common/localematcher.cpp
@@ -60,7 +60,7 @@ LocaleMatcher::Result::Result(LocaleMatcher::Result &&src) U_NOEXCEPT :
if (desiredIsOwned) {
src.desiredLocale = nullptr;
src.desiredIndex = -1;
- src.desiredIsOwned = FALSE;
+ src.desiredIsOwned = false;
}
}
@@ -82,7 +82,7 @@ LocaleMatcher::Result &LocaleMatcher::Result::operator=(LocaleMatcher::Result &&
if (desiredIsOwned) {
src.desiredLocale = nullptr;
src.desiredIndex = -1;
- src.desiredIsOwned = FALSE;
+ src.desiredIsOwned = false;
}
return *this;
}
@@ -287,10 +287,10 @@ LocaleMatcher::Builder &LocaleMatcher::Builder::internalSetThresholdDistance(int
#endif
UBool LocaleMatcher::Builder::copyErrorTo(UErrorCode &outErrorCode) const {
- if (U_FAILURE(outErrorCode)) { return TRUE; }
- if (U_SUCCESS(errorCode_)) { return FALSE; }
+ if (U_FAILURE(outErrorCode)) { return true; }
+ if (U_SUCCESS(errorCode_)) { return false; }
outErrorCode = errorCode_;
- return TRUE;
+ return true;
}
LocaleMatcher LocaleMatcher::Builder::build(UErrorCode &errorCode) const {
@@ -632,30 +632,30 @@ const Locale *LocaleMatcher::getBestMatchForListString(
LocaleMatcher::Result LocaleMatcher::getBestMatchResult(
const Locale &desiredLocale, UErrorCode &errorCode) const {
if (U_FAILURE(errorCode)) {
- return Result(nullptr, defaultLocale, -1, -1, FALSE);
+ return Result(nullptr, defaultLocale, -1, -1, false);
}
int32_t suppIndex = getBestSuppIndex(
getMaximalLsrOrUnd(likelySubtags, desiredLocale, errorCode),
nullptr, errorCode);
if (U_FAILURE(errorCode) || suppIndex < 0) {
- return Result(nullptr, defaultLocale, -1, -1, FALSE);
+ return Result(nullptr, defaultLocale, -1, -1, false);
} else {
- return Result(&desiredLocale, supportedLocales[suppIndex], 0, suppIndex, FALSE);
+ return Result(&desiredLocale, supportedLocales[suppIndex], 0, suppIndex, false);
}
}
LocaleMatcher::Result LocaleMatcher::getBestMatchResult(
Locale::Iterator &desiredLocales, UErrorCode &errorCode) const {
if (U_FAILURE(errorCode) || !desiredLocales.hasNext()) {
- return Result(nullptr, defaultLocale, -1, -1, FALSE);
+ return Result(nullptr, defaultLocale, -1, -1, false);
}
LocaleLsrIterator lsrIter(likelySubtags, desiredLocales, ULOCMATCH_TEMPORARY_LOCALES);
int32_t suppIndex = getBestSuppIndex(lsrIter.next(errorCode), &lsrIter, errorCode);
if (U_FAILURE(errorCode) || suppIndex < 0) {
- return Result(nullptr, defaultLocale, -1, -1, FALSE);
+ return Result(nullptr, defaultLocale, -1, -1, false);
} else {
return Result(lsrIter.orphanRemembered(), supportedLocales[suppIndex],
- lsrIter.getBestDesiredIndex(), suppIndex, TRUE);
+ lsrIter.getBestDesiredIndex(), suppIndex, true);
}
}
diff --git a/thirdparty/icu4c/common/localeprioritylist.cpp b/thirdparty/icu4c/common/localeprioritylist.cpp
index 4455eedb75..e5ba0a3c77 100644
--- a/thirdparty/icu4c/common/localeprioritylist.cpp
+++ b/thirdparty/icu4c/common/localeprioritylist.cpp
@@ -234,7 +234,7 @@ void LocalePriorityList::sort(UErrorCode &errorCode) {
// The comparator forces a stable sort via the item index.
if (U_FAILURE(errorCode) || getLength() <= 1 || !hasWeights) { return; }
uprv_sortArray(list->array.getAlias(), listLength, sizeof(LocaleAndWeight),
- compareLocaleAndWeight, nullptr, FALSE, &errorCode);
+ compareLocaleAndWeight, nullptr, false, &errorCode);
}
U_NAMESPACE_END
diff --git a/thirdparty/icu4c/common/locavailable.cpp b/thirdparty/icu4c/common/locavailable.cpp
index e8ec512e37..cf341e1f74 100644
--- a/thirdparty/icu4c/common/locavailable.cpp
+++ b/thirdparty/icu4c/common/locavailable.cpp
@@ -37,7 +37,7 @@ U_NAMESPACE_BEGIN
static icu::Locale* availableLocaleList = NULL;
static int32_t availableLocaleListCount;
-static icu::UInitOnce gInitOnceLocale = U_INITONCE_INITIALIZER;
+static icu::UInitOnce gInitOnceLocale {};
U_NAMESPACE_END
@@ -54,7 +54,7 @@ static UBool U_CALLCONV locale_available_cleanup(void)
availableLocaleListCount = 0;
gInitOnceLocale.reset();
- return TRUE;
+ return true;
}
U_CDECL_END
@@ -102,7 +102,7 @@ namespace {
// Enough capacity for the two lists in the res_index.res file
const char** gAvailableLocaleNames[2] = {};
int32_t gAvailableLocaleCounts[2] = {};
-icu::UInitOnce ginstalledLocalesInitOnce = U_INITONCE_INITIALIZER;
+icu::UInitOnce ginstalledLocalesInitOnce {};
class AvailableLocalesSink : public ResourceSink {
public:
@@ -203,7 +203,7 @@ static UBool U_CALLCONV uloc_cleanup(void) {
gAvailableLocaleCounts[i] = 0;
}
ginstalledLocalesInitOnce.reset();
- return TRUE;
+ return true;
}
// Load Installed Locales. This function will be called exactly once
diff --git a/thirdparty/icu4c/common/locdispnames.cpp b/thirdparty/icu4c/common/locdispnames.cpp
index c512a0164c..637556cc71 100644
--- a/thirdparty/icu4c/common/locdispnames.cpp
+++ b/thirdparty/icu4c/common/locdispnames.cpp
@@ -514,11 +514,11 @@ uloc_getDisplayName(const char *locale,
UChar formatCloseParen = 0x0029; // )
UChar formatReplaceCloseParen = 0x005D; // ]
- UBool haveLang = TRUE; /* assume true, set false if we find we don't have
+ UBool haveLang = true; /* assume true, set false if we find we don't have
a lang component in the locale */
- UBool haveRest = TRUE; /* assume true, set false if we find we don't have
+ UBool haveRest = true; /* assume true, set false if we find we don't have
any other component in the locale */
- UBool retry = FALSE; /* set true if we need to retry, see below */
+ UBool retry = false; /* set true if we need to retry, see below */
int32_t langi = 0; /* index of the language substitution (0 or 1), virtually always 0 */
@@ -625,7 +625,7 @@ uloc_getDisplayName(const char *locale,
}
for(int32_t subi=0,resti=0;subi<2;) { /* iterate through patterns 0 and 1*/
- UBool subdone = FALSE; /* set true when ready to move to next substitution */
+ UBool subdone = false; /* set true when ready to move to next substitution */
/* prep p and cap for calls to get display components, pin cap to 0 since
they complain if cap is negative */
@@ -643,10 +643,10 @@ uloc_getDisplayName(const char *locale,
length+=langLen;
haveLang=langLen>0;
}
- subdone=TRUE;
+ subdone=true;
} else { /* {1} */
if(!haveRest) {
- subdone=TRUE;
+ subdone=true;
} else {
int32_t len; /* length of component (plus other stuff) we just fetched */
switch(resti++) {
@@ -667,7 +667,7 @@ uloc_getDisplayName(const char *locale,
const char* kw=uenum_next(kenum.getAlias(), &len, pErrorCode);
if (kw == NULL) {
len=0; /* mark that we didn't add a component */
- subdone=TRUE;
+ subdone=true;
} else {
/* incorporating this behavior into the loop made it even more complex,
so just special case it here */
@@ -772,7 +772,7 @@ uloc_getDisplayName(const char *locale,
/* would have fit, but didn't because of pattern prefix. */
sub0Pos=0; /* stops initial padding (and a second retry,
so we won't end up here again) */
- retry=TRUE;
+ retry=true;
}
}
}
diff --git a/thirdparty/icu4c/common/locdistance.cpp b/thirdparty/icu4c/common/locdistance.cpp
index ff8892791b..fb22fe79ed 100644
--- a/thirdparty/icu4c/common/locdistance.cpp
+++ b/thirdparty/icu4c/common/locdistance.cpp
@@ -45,13 +45,13 @@ enum {
};
LocaleDistance *gLocaleDistance = nullptr;
-UInitOnce gInitOnce = U_INITONCE_INITIALIZER;
+UInitOnce gInitOnce {};
UBool U_CALLCONV cleanup() {
delete gLocaleDistance;
gLocaleDistance = nullptr;
gInitOnce.reset();
- return TRUE;
+ return true;
}
} // namespace
diff --git a/thirdparty/icu4c/common/locdspnm.cpp b/thirdparty/icu4c/common/locdspnm.cpp
index f73cedd728..401f1fecbf 100644
--- a/thirdparty/icu4c/common/locdspnm.cpp
+++ b/thirdparty/icu4c/common/locdspnm.cpp
@@ -403,7 +403,7 @@ struct LocaleDisplayNamesImpl::CapitalizationContextSink : public ResourceSink {
LocaleDisplayNamesImpl& parent;
CapitalizationContextSink(LocaleDisplayNamesImpl& _parent)
- : hasCapitalizationUsage(FALSE), parent(_parent) {}
+ : hasCapitalizationUsage(false), parent(_parent) {}
virtual ~CapitalizationContextSink();
virtual void put(const char *key, ResourceValue &value, UBool /*noFallback*/,
@@ -437,8 +437,8 @@ struct LocaleDisplayNamesImpl::CapitalizationContextSink : public ResourceSink {
int32_t titlecaseInt = (parent.capitalizationContext == UDISPCTX_CAPITALIZATION_FOR_UI_LIST_OR_MENU) ? intVector[0] : intVector[1];
if (titlecaseInt == 0) { continue; }
- parent.fCapitalization[usageEnum] = TRUE;
- hasCapitalizationUsage = TRUE;
+ parent.fCapitalization[usageEnum] = true;
+ hasCapitalizationUsage = true;
}
}
};
@@ -490,7 +490,7 @@ LocaleDisplayNamesImpl::initialize(void) {
#if !UCONFIG_NO_BREAK_ITERATION
// Only get the context data if we need it! This is a const object so we know now...
// Also check whether we will need a break iterator (depends on the data)
- UBool needBrkIter = FALSE;
+ UBool needBrkIter = false;
if (capitalizationContext == UDISPCTX_CAPITALIZATION_FOR_UI_LIST_OR_MENU || capitalizationContext == UDISPCTX_CAPITALIZATION_FOR_STANDALONE) {
LocalUResourceBundlePointer resource(ures_open(NULL, locale.getName(), &status));
if (U_FAILURE(status)) { return; }
@@ -593,8 +593,8 @@ LocaleDisplayNamesImpl::localeDisplayName(const Locale& loc,
ncat(buffer, ULOC_FULLNAME_CAPACITY, lang, "_", script, "_", country, (char *)0);
localeIdName(buffer, resultName, false);
if (!resultName.isBogus()) {
- hasScript = FALSE;
- hasCountry = FALSE;
+ hasScript = false;
+ hasCountry = false;
break;
}
}
@@ -602,7 +602,7 @@ LocaleDisplayNamesImpl::localeDisplayName(const Locale& loc,
ncat(buffer, ULOC_FULLNAME_CAPACITY, lang, "_", script, (char *)0);
localeIdName(buffer, resultName, false);
if (!resultName.isBogus()) {
- hasScript = FALSE;
+ hasScript = false;
break;
}
}
@@ -610,11 +610,11 @@ LocaleDisplayNamesImpl::localeDisplayName(const Locale& loc,
ncat(buffer, ULOC_FULLNAME_CAPACITY, lang, "_", country, (char*)0);
localeIdName(buffer, resultName, false);
if (!resultName.isBogus()) {
- hasCountry = FALSE;
+ hasCountry = false;
break;
}
}
- } while (FALSE);
+ } while (false);
}
if (resultName.isBogus() || resultName.isEmpty()) {
localeIdName(lang, resultName, substitute == UDISPCTX_SUBSTITUTE);
@@ -629,7 +629,7 @@ LocaleDisplayNamesImpl::localeDisplayName(const Locale& loc,
UErrorCode status = U_ZERO_ERROR;
if (hasScript) {
- UnicodeString script_str = scriptDisplayName(script, temp, TRUE);
+ UnicodeString script_str = scriptDisplayName(script, temp, true);
if (script_str.isBogus()) {
result.setToBogus();
return result;
@@ -637,7 +637,7 @@ LocaleDisplayNamesImpl::localeDisplayName(const Locale& loc,
resultRemainder.append(script_str);
}
if (hasCountry) {
- UnicodeString region_str = regionDisplayName(country, temp, TRUE);
+ UnicodeString region_str = regionDisplayName(country, temp, true);
if (region_str.isBogus()) {
result.setToBogus();
return result;
@@ -645,7 +645,7 @@ LocaleDisplayNamesImpl::localeDisplayName(const Locale& loc,
appendWithSep(resultRemainder, region_str);
}
if (hasVariant) {
- UnicodeString variant_str = variantDisplayName(variant, temp, TRUE);
+ UnicodeString variant_str = variantDisplayName(variant, temp, true);
if (variant_str.isBogus()) {
result.setToBogus();
return result;
@@ -666,10 +666,10 @@ LocaleDisplayNamesImpl::localeDisplayName(const Locale& loc,
if (U_FAILURE(status) || status == U_STRING_NOT_TERMINATED_WARNING) {
return result;
}
- keyDisplayName(key, temp, TRUE);
+ keyDisplayName(key, temp, true);
temp.findAndReplace(formatOpenParen, formatReplaceOpenParen);
temp.findAndReplace(formatCloseParen, formatReplaceCloseParen);
- keyValueDisplayName(key, value, temp2, TRUE);
+ keyValueDisplayName(key, value, temp2, true);
temp2.findAndReplace(formatOpenParen, formatReplaceOpenParen);
temp2.findAndReplace(formatCloseParen, formatReplaceCloseParen);
if (temp2 != UnicodeString(value, -1, US_INV)) {
@@ -797,13 +797,13 @@ LocaleDisplayNamesImpl::scriptDisplayName(const char* script,
UnicodeString&
LocaleDisplayNamesImpl::scriptDisplayName(const char* script,
UnicodeString& result) const {
- return scriptDisplayName(script, result, FALSE);
+ return scriptDisplayName(script, result, false);
}
UnicodeString&
LocaleDisplayNamesImpl::scriptDisplayName(UScriptCode scriptCode,
UnicodeString& result) const {
- return scriptDisplayName(uscript_getName(scriptCode), result, FALSE);
+ return scriptDisplayName(uscript_getName(scriptCode), result, false);
}
UnicodeString&
@@ -827,7 +827,7 @@ LocaleDisplayNamesImpl::regionDisplayName(const char* region,
UnicodeString&
LocaleDisplayNamesImpl::regionDisplayName(const char* region,
UnicodeString& result) const {
- return regionDisplayName(region, result, FALSE);
+ return regionDisplayName(region, result, false);
}
@@ -847,7 +847,7 @@ LocaleDisplayNamesImpl::variantDisplayName(const char* variant,
UnicodeString&
LocaleDisplayNamesImpl::variantDisplayName(const char* variant,
UnicodeString& result) const {
- return variantDisplayName(variant, result, FALSE);
+ return variantDisplayName(variant, result, false);
}
UnicodeString&
@@ -866,7 +866,7 @@ LocaleDisplayNamesImpl::keyDisplayName(const char* key,
UnicodeString&
LocaleDisplayNamesImpl::keyDisplayName(const char* key,
UnicodeString& result) const {
- return keyDisplayName(key, result, FALSE);
+ return keyDisplayName(key, result, false);
}
UnicodeString&
@@ -908,7 +908,7 @@ UnicodeString&
LocaleDisplayNamesImpl::keyValueDisplayName(const char* key,
const char* value,
UnicodeString& result) const {
- return keyValueDisplayName(key, value, result, FALSE);
+ return keyValueDisplayName(key, value, result, false);
}
////////////////////////////////////////////////////////////////////////////////////////////////////
diff --git a/thirdparty/icu4c/common/locid.cpp b/thirdparty/icu4c/common/locid.cpp
index 73bb8d8aec..5cd083866c 100644
--- a/thirdparty/icu4c/common/locid.cpp
+++ b/thirdparty/icu4c/common/locid.cpp
@@ -64,7 +64,7 @@ U_CDECL_END
U_NAMESPACE_BEGIN
static Locale *gLocaleCache = NULL;
-static UInitOnce gLocaleCacheInitOnce = U_INITONCE_INITIALIZER;
+static UInitOnce gLocaleCacheInitOnce {};
// gDefaultLocaleMutex protects all access to gDefaultLocalesHashT and gDefaultLocale.
static UMutex gDefaultLocaleMutex;
@@ -128,7 +128,7 @@ static UBool U_CALLCONV locale_cleanup(void)
gDefaultLocalesHashT = NULL;
}
gDefaultLocale = NULL;
- return TRUE;
+ return true;
}
@@ -171,7 +171,7 @@ Locale *locale_set_default_internal(const char *id, UErrorCode& status) {
// Synchronize this entire function.
Mutex lock(&gDefaultLocaleMutex);
- UBool canonicalize = FALSE;
+ UBool canonicalize = false;
// If given a NULL string for the locale id, grab the default
// name from the system.
@@ -179,7 +179,7 @@ Locale *locale_set_default_internal(const char *id, UErrorCode& status) {
// the current ICU default locale.)
if (id == NULL) {
id = uprv_getDefaultLocaleID(); // This function not thread safe? TODO: verify.
- canonicalize = TRUE; // always canonicalize host ID
+ canonicalize = true; // always canonicalize host ID
}
CharString localeNameBuf;
@@ -212,7 +212,7 @@ Locale *locale_set_default_internal(const char *id, UErrorCode& status) {
status = U_MEMORY_ALLOCATION_ERROR;
return gDefaultLocale;
}
- newDefault->init(localeNameBuf.data(), FALSE);
+ newDefault->init(localeNameBuf.data(), false);
uhash_put(gDefaultLocalesHashT, (char*) newDefault->getName(), newDefault, &status);
if (U_FAILURE(status)) {
return gDefaultLocale;
@@ -269,7 +269,7 @@ Locale::~Locale()
Locale::Locale()
: UObject(), fullName(fullNameBuffer), baseName(NULL)
{
- init(NULL, FALSE);
+ init(NULL, false);
}
/*
@@ -292,7 +292,7 @@ Locale::Locale( const char * newLanguage,
{
if( (newLanguage==NULL) && (newCountry == NULL) && (newVariant == NULL) )
{
- init(NULL, FALSE); /* shortcut */
+ init(NULL, false); /* shortcut */
}
else
{
@@ -397,7 +397,7 @@ Locale::Locale( const char * newLanguage,
}
// Parse it, because for example 'language' might really be a complete
// string.
- init(togo.data(), FALSE);
+ init(togo.data(), false);
}
}
@@ -491,7 +491,7 @@ Locale::operator==( const Locale& other) const
namespace {
-UInitOnce gKnownCanonicalizedInitOnce = U_INITONCE_INITIALIZER;
+UInitOnce gKnownCanonicalizedInitOnce {};
UHashtable *gKnownCanonicalized = nullptr;
static const char* const KNOWN_CANONICALIZED[] = {
@@ -521,7 +521,7 @@ static const char* const KNOWN_CANONICALIZED[] = {
static UBool U_CALLCONV cleanupKnownCanonicalized() {
gKnownCanonicalizedInitOnce.reset();
if (gKnownCanonicalized) { uhash_close(gKnownCanonicalized); }
- return TRUE;
+ return true;
}
static void U_CALLCONV loadKnownCanonicalized(UErrorCode &status) {
@@ -682,14 +682,14 @@ private:
const AliasData* AliasData::gSingleton = nullptr;
-UInitOnce AliasData::gInitOnce = U_INITONCE_INITIALIZER;
+UInitOnce AliasData::gInitOnce {};
UBool U_CALLCONV
AliasData::cleanup()
{
gInitOnce.reset();
delete gSingleton;
- return TRUE;
+ return true;
}
void
@@ -716,20 +716,19 @@ AliasDataBuilder::readAlias(
status = U_MEMORY_ALLOCATION_ERROR;
return;
}
- int i = 0;
- while (ures_hasNext(alias)) {
+ for (int i = 0; U_SUCCESS(status) && ures_hasNext(alias); i++) {
LocalUResourceBundlePointer res(
ures_getNextResource(alias, nullptr, &status));
const char* aliasFrom = ures_getKey(res.getAlias());
UnicodeString aliasTo =
ures_getUnicodeStringByKey(res.getAlias(), "replacement", &status);
+ if (U_FAILURE(status)) return;
checkType(aliasFrom);
checkReplacement(aliasTo);
rawTypes[i] = aliasFrom;
rawIndexes[i] = strings->add(aliasTo, status);
- i++;
}
}
@@ -1818,7 +1817,7 @@ ulocimp_isCanonicalizedLocaleForTest(const char* localeName)
/*This function initializes a Locale from a C locale ID*/
Locale& Locale::init(const char* localeID, UBool canonicalize)
{
- fIsBogus = FALSE;
+ fIsBogus = false;
/* Free our current storage */
if ((baseName != fullName) && (baseName != fullNameBuffer)) {
uprv_free(baseName);
@@ -2022,7 +2021,7 @@ Locale::setToBogus() {
*language = 0;
*script = 0;
*country = 0;
- fIsBogus = TRUE;
+ fIsBogus = true;
variantBegin = 0;
}
@@ -2072,7 +2071,7 @@ Locale::addLikelySubtags(UErrorCode& status) {
return;
}
- init(maximizedLocaleID.data(), /*canonicalize=*/FALSE);
+ init(maximizedLocaleID.data(), /*canonicalize=*/false);
if (isBogus()) {
status = U_ILLEGAL_ARGUMENT_ERROR;
}
@@ -2094,7 +2093,7 @@ Locale::minimizeSubtags(UErrorCode& status) {
return;
}
- init(minimizedLocaleID.data(), /*canonicalize=*/FALSE);
+ init(minimizedLocaleID.data(), /*canonicalize=*/false);
if (isBogus()) {
status = U_ILLEGAL_ARGUMENT_ERROR;
}
@@ -2113,7 +2112,7 @@ Locale::canonicalize(UErrorCode& status) {
if (U_FAILURE(status)) {
return;
}
- init(uncanonicalized.data(), /*canonicalize=*/TRUE);
+ init(uncanonicalized.data(), /*canonicalize=*/true);
if (isBogus()) {
status = U_ILLEGAL_ARGUMENT_ERROR;
}
@@ -2160,7 +2159,7 @@ Locale::forLanguageTag(StringPiece tag, UErrorCode& status)
return result;
}
- result.init(localeID.data(), /*canonicalize=*/FALSE);
+ result.init(localeID.data(), /*canonicalize=*/false);
if (result.isBogus()) {
status = U_ILLEGAL_ARGUMENT_ERROR;
}
@@ -2179,7 +2178,7 @@ Locale::toLanguageTag(ByteSink& sink, UErrorCode& status) const
return;
}
- ulocimp_toLanguageTag(fullName, sink, /*strict=*/FALSE, &status);
+ ulocimp_toLanguageTag(fullName, sink, /*strict=*/false, &status);
}
Locale U_EXPORT2
@@ -2187,7 +2186,7 @@ Locale::createFromName (const char *name)
{
if (name) {
Locale l("");
- l.init(name, FALSE);
+ l.init(name, false);
return l;
}
else {
@@ -2198,7 +2197,7 @@ Locale::createFromName (const char *name)
Locale U_EXPORT2
Locale::createCanonical(const char* name) {
Locale loc("");
- loc.init(name, TRUE);
+ loc.init(name, true);
return loc;
}
@@ -2241,7 +2240,7 @@ const char* const* U_EXPORT2 Locale::getISOLanguages()
// Set the locale's data based on a posix id.
void Locale::setFromPOSIXID(const char *posixID)
{
- init(posixID, TRUE);
+ init(posixID, true);
}
const Locale & U_EXPORT2
@@ -2531,7 +2530,7 @@ Locale::createKeywords(UErrorCode &status) const
if(assignment > variantStart) {
CharString keywords;
CharStringByteSink sink(&keywords);
- ulocimp_getKeywords(variantStart+1, '@', sink, FALSE, &status);
+ ulocimp_getKeywords(variantStart+1, '@', sink, false, &status);
if (U_SUCCESS(status) && !keywords.isEmpty()) {
result = new KeywordEnumeration(keywords.data(), keywords.length(), 0, status);
if (!result) {
@@ -2560,7 +2559,7 @@ Locale::createUnicodeKeywords(UErrorCode &status) const
if(assignment > variantStart) {
CharString keywords;
CharStringByteSink sink(&keywords);
- ulocimp_getKeywords(variantStart+1, '@', sink, FALSE, &status);
+ ulocimp_getKeywords(variantStart+1, '@', sink, false, &status);
if (U_SUCCESS(status) && !keywords.isEmpty()) {
result = new UnicodeKeywordEnumeration(keywords.data(), keywords.length(), 0, status);
if (!result) {
diff --git a/thirdparty/icu4c/common/loclikely.cpp b/thirdparty/icu4c/common/loclikely.cpp
index d80096b588..ec0dca28a4 100644
--- a/thirdparty/icu4c/common/loclikely.cpp
+++ b/thirdparty/icu4c/common/loclikely.cpp
@@ -201,7 +201,7 @@ createTagStringWithAlternates(
**/
char tagBuffer[ULOC_FULLNAME_CAPACITY];
int32_t tagLength = 0;
- UBool regionAppended = FALSE;
+ UBool regionAppended = false;
if (langLength > 0) {
appendTag(
@@ -209,7 +209,7 @@ createTagStringWithAlternates(
langLength,
tagBuffer,
&tagLength,
- /*withSeparator=*/FALSE);
+ /*withSeparator=*/false);
}
else if (alternateTags == NULL) {
/*
@@ -246,7 +246,7 @@ createTagStringWithAlternates(
alternateLangLength,
tagBuffer,
&tagLength,
- /*withSeparator=*/FALSE);
+ /*withSeparator=*/false);
}
}
@@ -256,7 +256,7 @@ createTagStringWithAlternates(
scriptLength,
tagBuffer,
&tagLength,
- /*withSeparator=*/TRUE);
+ /*withSeparator=*/true);
}
else if (alternateTags != NULL) {
/*
@@ -281,7 +281,7 @@ createTagStringWithAlternates(
alternateScriptLength,
tagBuffer,
&tagLength,
- /*withSeparator=*/TRUE);
+ /*withSeparator=*/true);
}
}
@@ -291,9 +291,9 @@ createTagStringWithAlternates(
regionLength,
tagBuffer,
&tagLength,
- /*withSeparator=*/TRUE);
+ /*withSeparator=*/true);
- regionAppended = TRUE;
+ regionAppended = true;
}
else if (alternateTags != NULL) {
/*
@@ -317,9 +317,9 @@ createTagStringWithAlternates(
alternateRegionLength,
tagBuffer,
&tagLength,
- /*withSeparator=*/TRUE);
+ /*withSeparator=*/true);
- regionAppended = TRUE;
+ regionAppended = true;
}
}
@@ -622,7 +622,7 @@ createLikelySubtagsString(
likelySubtags,
sink,
err);
- return TRUE;
+ return true;
}
}
@@ -678,7 +678,7 @@ createLikelySubtagsString(
likelySubtags,
sink,
err);
- return TRUE;
+ return true;
}
}
@@ -734,7 +734,7 @@ createLikelySubtagsString(
likelySubtags,
sink,
err);
- return TRUE;
+ return true;
}
}
@@ -789,11 +789,11 @@ createLikelySubtagsString(
likelySubtags,
sink,
err);
- return TRUE;
+ return true;
}
}
- return FALSE;
+ return false;
error:
@@ -801,7 +801,7 @@ error:
*err = U_ILLEGAL_ARGUMENT_ERROR;
}
- return FALSE;
+ return false;
}
#define CHECK_TRAILING_VARIANT_SIZE(trailing, trailingLength) UPRV_BLOCK_MACRO_BEGIN { \
@@ -836,7 +836,7 @@ _uloc_addLikelySubtags(const char* localeID,
const char* trailing = "";
int32_t trailingLength = 0;
int32_t trailingIndex = 0;
- UBool success = FALSE;
+ UBool success = false;
if(U_FAILURE(*err)) {
goto error;
@@ -901,7 +901,7 @@ error:
if (!U_FAILURE(*err)) {
*err = U_ILLEGAL_ARGUMENT_ERROR;
}
- return FALSE;
+ return false;
}
// Add likely subtags to the sink
@@ -925,7 +925,7 @@ _uloc_minimizeSubtags(const char* localeID,
const char* trailing = "";
int32_t trailingLength = 0;
int32_t trailingIndex = 0;
- UBool successGetMax = FALSE;
+ UBool successGetMax = false;
if(U_FAILURE(*err)) {
goto error;
@@ -1248,7 +1248,7 @@ _ulocimp_addLikelySubtags(const char* localeID,
if (U_SUCCESS(*status)) {
return _uloc_addLikelySubtags(localeBuffer.getBuffer(), sink, status);
} else {
- return FALSE;
+ return false;
}
}
@@ -1320,14 +1320,14 @@ uloc_isRightToLeft(const char *locale) {
char lang[8];
int32_t langLength = uloc_getLanguage(locale, lang, UPRV_LENGTHOF(lang), &errorCode);
if (U_FAILURE(errorCode) || errorCode == U_STRING_NOT_TERMINATED_WARNING) {
- return FALSE;
+ return false;
}
if (langLength > 0) {
const char* langPtr = uprv_strstr(LANG_DIR_STRING, lang);
if (langPtr != NULL) {
switch (langPtr[langLength]) {
- case '-': return FALSE;
- case '+': return TRUE;
+ case '-': return false;
+ case '+': return true;
default: break; // partial match of a longer code
}
}
@@ -1340,12 +1340,12 @@ uloc_isRightToLeft(const char *locale) {
ulocimp_addLikelySubtags(locale, sink, &errorCode);
}
if (U_FAILURE(errorCode) || errorCode == U_STRING_NOT_TERMINATED_WARNING) {
- return FALSE;
+ return false;
}
scriptLength = uloc_getScript(likely.data(), script, UPRV_LENGTHOF(script), &errorCode);
if (U_FAILURE(errorCode) || errorCode == U_STRING_NOT_TERMINATED_WARNING ||
scriptLength == 0) {
- return FALSE;
+ return false;
}
}
UScriptCode scriptCode = (UScriptCode)u_getPropertyValueEnum(UCHAR_SCRIPT, script);
@@ -1392,7 +1392,7 @@ ulocimp_getRegionForSupplementalData(const char *localeID, UBool inferRegion,
if (U_FAILURE(*status)) {
rgLen = 0;
} else if (rgLen == 0 && inferRegion) {
- // no unicode_region_subtag but inferRegion TRUE, try likely subtags
+ // no unicode_region_subtag but inferRegion true, try likely subtags
rgStatus = U_ZERO_ERROR;
icu::CharString locBuf;
{
diff --git a/thirdparty/icu4c/common/loclikelysubtags.cpp b/thirdparty/icu4c/common/loclikelysubtags.cpp
index aa592e6ea8..e913c66a35 100644
--- a/thirdparty/icu4c/common/loclikelysubtags.cpp
+++ b/thirdparty/icu4c/common/loclikelysubtags.cpp
@@ -233,7 +233,7 @@ private:
return false;
}
for (int i = 0; i < length; ++i) {
- stringArray.getValue(i, value); // returns TRUE because i < length
+ stringArray.getValue(i, value); // returns true because i < length
rawIndexes[i] = strings.add(value.getUnicodeString(errorCode), errorCode);
if (U_FAILURE(errorCode)) { return false; }
}
@@ -245,13 +245,13 @@ private:
namespace {
XLikelySubtags *gLikelySubtags = nullptr;
-UInitOnce gInitOnce = U_INITONCE_INITIALIZER;
+UInitOnce gInitOnce {};
UBool U_CALLCONV cleanup() {
delete gLikelySubtags;
gLikelySubtags = nullptr;
gInitOnce.reset();
- return TRUE;
+ return true;
}
} // namespace
diff --git a/thirdparty/icu4c/common/locmap.cpp b/thirdparty/icu4c/common/locmap.cpp
index 29a5646385..78cfd1ca86 100644
--- a/thirdparty/icu4c/common/locmap.cpp
+++ b/thirdparty/icu4c/common/locmap.cpp
@@ -1053,7 +1053,7 @@ uprv_convertToPosix(uint32_t hostid, char *posixID, int32_t posixIDCapacity, UEr
{
uint16_t langID;
uint32_t localeIndex;
- UBool bLookup = TRUE;
+ UBool bLookup = true;
const char *pPosixID = NULL;
#if U_PLATFORM_HAS_WIN32_API && UCONFIG_USE_WINDOWS_LCID_MAPPING_API
@@ -1074,7 +1074,7 @@ uprv_convertToPosix(uint32_t hostid, char *posixID, int32_t posixIDCapacity, UEr
if (tmpLen > 1) {
int32_t i = 0;
// Only need to look up in table if have _, eg for de-de_phoneb type alternate sort.
- bLookup = FALSE;
+ bLookup = false;
for (i = 0; i < UPRV_LENGTHOF(locName); i++)
{
locName[i] = (char)(windowsLocaleName[i]);
@@ -1088,7 +1088,7 @@ uprv_convertToPosix(uint32_t hostid, char *posixID, int32_t posixIDCapacity, UEr
// TODO: Should these be mapped from _phoneb to @collation=phonebook, etc.?
locName[i] = '\0';
tmpLen = i;
- bLookup = TRUE;
+ bLookup = true;
break;
}
else if (windowsLocaleName[i] == L'-')
@@ -1201,7 +1201,7 @@ uprv_convertToLCIDPlatform(const char* localeID, UErrorCode* status)
char asciiBCP47Tag[LOCALE_NAME_MAX_LENGTH] = {};
// this will change it from de_DE@collation=phonebook to de-DE-u-co-phonebk form
- (void)uloc_toLanguageTag(mylocaleID, asciiBCP47Tag, UPRV_LENGTHOF(asciiBCP47Tag), FALSE, status);
+ (void)uloc_toLanguageTag(mylocaleID, asciiBCP47Tag, UPRV_LENGTHOF(asciiBCP47Tag), false, status);
if (U_SUCCESS(*status))
{
diff --git a/thirdparty/icu4c/common/locutil.cpp b/thirdparty/icu4c/common/locutil.cpp
index 3d9d69ff7e..6e2bd497f8 100644
--- a/thirdparty/icu4c/common/locutil.cpp
+++ b/thirdparty/icu4c/common/locutil.cpp
@@ -21,7 +21,7 @@
#include "umutex.h"
// see LocaleUtility::getAvailableLocaleNames
-static icu::UInitOnce LocaleUtilityInitOnce = U_INITONCE_INITIALIZER;
+static icu::UInitOnce LocaleUtilityInitOnce {};
static icu::Hashtable * LocaleUtility_cache = NULL;
#define UNDERSCORE_CHAR ((UChar)0x005f)
@@ -41,7 +41,7 @@ static UBool U_CALLCONV service_cleanup(void) {
delete LocaleUtility_cache;
LocaleUtility_cache = NULL;
}
- return TRUE;
+ return true;
}
diff --git a/thirdparty/icu4c/common/messageimpl.h b/thirdparty/icu4c/common/messageimpl.h
index a56479066b..061df9189d 100644
--- a/thirdparty/icu4c/common/messageimpl.h
+++ b/thirdparty/icu4c/common/messageimpl.h
@@ -55,7 +55,7 @@ public:
UnicodeString &result);
private:
- MessageImpl(); // no constructor: all static methods
+ MessageImpl() = delete; // no constructor: all static methods
};
U_NAMESPACE_END
diff --git a/thirdparty/icu4c/common/messagepattern.cpp b/thirdparty/icu4c/common/messagepattern.cpp
index 66fd2f4c93..52afab5f02 100644
--- a/thirdparty/icu4c/common/messagepattern.cpp
+++ b/thirdparty/icu4c/common/messagepattern.cpp
@@ -97,9 +97,9 @@ public:
UBool ensureCapacityForOneMore(int32_t oldLength, UErrorCode &errorCode);
UBool equals(const MessagePatternList<T, stackCapacity> &other, int32_t length) const {
for(int32_t i=0; i<length; ++i) {
- if(a[i]!=other.a[i]) { return FALSE; }
+ if(a[i]!=other.a[i]) { return false; }
}
- return TRUE;
+ return true;
}
MaybeStackArray<T, stackCapacity> a;
@@ -124,13 +124,13 @@ template<typename T, int32_t stackCapacity>
UBool
MessagePatternList<T, stackCapacity>::ensureCapacityForOneMore(int32_t oldLength, UErrorCode &errorCode) {
if(U_FAILURE(errorCode)) {
- return FALSE;
+ return false;
}
if(a.getCapacity()>oldLength || a.resize(2*oldLength, oldLength)!=NULL) {
- return TRUE;
+ return true;
}
errorCode=U_MEMORY_ALLOCATION_ERROR;
- return FALSE;
+ return false;
}
// MessagePatternList specializations -------------------------------------- ***
@@ -147,7 +147,7 @@ MessagePattern::MessagePattern(UErrorCode &errorCode)
: aposMode(UCONFIG_MSGPAT_DEFAULT_APOSTROPHE_MODE),
partsList(NULL), parts(NULL), partsLength(0),
numericValuesList(NULL), numericValues(NULL), numericValuesLength(0),
- hasArgNames(FALSE), hasArgNumbers(FALSE), needsAutoQuoting(FALSE) {
+ hasArgNames(false), hasArgNumbers(false), needsAutoQuoting(false) {
init(errorCode);
}
@@ -155,7 +155,7 @@ MessagePattern::MessagePattern(UMessagePatternApostropheMode mode, UErrorCode &e
: aposMode(mode),
partsList(NULL), parts(NULL), partsLength(0),
numericValuesList(NULL), numericValues(NULL), numericValuesLength(0),
- hasArgNames(FALSE), hasArgNumbers(FALSE), needsAutoQuoting(FALSE) {
+ hasArgNames(false), hasArgNumbers(false), needsAutoQuoting(false) {
init(errorCode);
}
@@ -163,7 +163,7 @@ MessagePattern::MessagePattern(const UnicodeString &pattern, UParseError *parseE
: aposMode(UCONFIG_MSGPAT_DEFAULT_APOSTROPHE_MODE),
partsList(NULL), parts(NULL), partsLength(0),
numericValuesList(NULL), numericValues(NULL), numericValuesLength(0),
- hasArgNames(FALSE), hasArgNumbers(FALSE), needsAutoQuoting(FALSE) {
+ hasArgNames(false), hasArgNumbers(false), needsAutoQuoting(false) {
if(init(errorCode)) {
parse(pattern, parseError, errorCode);
}
@@ -172,15 +172,15 @@ MessagePattern::MessagePattern(const UnicodeString &pattern, UParseError *parseE
UBool
MessagePattern::init(UErrorCode &errorCode) {
if(U_FAILURE(errorCode)) {
- return FALSE;
+ return false;
}
partsList=new MessagePatternPartsList();
if(partsList==NULL) {
errorCode=U_MEMORY_ALLOCATION_ERROR;
- return FALSE;
+ return false;
}
parts=partsList->a.getAlias();
- return TRUE;
+ return true;
}
MessagePattern::MessagePattern(const MessagePattern &other)
@@ -215,7 +215,7 @@ MessagePattern::operator=(const MessagePattern &other) {
UBool
MessagePattern::copyStorage(const MessagePattern &other, UErrorCode &errorCode) {
if(U_FAILURE(errorCode)) {
- return FALSE;
+ return false;
}
parts=NULL;
partsLength=0;
@@ -225,14 +225,14 @@ MessagePattern::copyStorage(const MessagePattern &other, UErrorCode &errorCode)
partsList=new MessagePatternPartsList();
if(partsList==NULL) {
errorCode=U_MEMORY_ALLOCATION_ERROR;
- return FALSE;
+ return false;
}
parts=partsList->a.getAlias();
}
if(other.partsLength>0) {
partsList->copyFrom(*other.partsList, other.partsLength, errorCode);
if(U_FAILURE(errorCode)) {
- return FALSE;
+ return false;
}
parts=partsList->a.getAlias();
partsLength=other.partsLength;
@@ -242,19 +242,19 @@ MessagePattern::copyStorage(const MessagePattern &other, UErrorCode &errorCode)
numericValuesList=new MessagePatternDoubleList();
if(numericValuesList==NULL) {
errorCode=U_MEMORY_ALLOCATION_ERROR;
- return FALSE;
+ return false;
}
numericValues=numericValuesList->a.getAlias();
}
numericValuesList->copyFrom(
*other.numericValuesList, other.numericValuesLength, errorCode);
if(U_FAILURE(errorCode)) {
- return FALSE;
+ return false;
}
numericValues=numericValuesList->a.getAlias();
numericValuesLength=other.numericValuesLength;
}
- return TRUE;
+ return true;
}
MessagePattern::~MessagePattern() {
@@ -303,8 +303,8 @@ void
MessagePattern::clear() {
// Mostly the same as preParse().
msg.remove();
- hasArgNames=hasArgNumbers=FALSE;
- needsAutoQuoting=FALSE;
+ hasArgNames=hasArgNumbers=false;
+ needsAutoQuoting=false;
partsLength=0;
numericValuesLength=0;
}
@@ -414,8 +414,8 @@ MessagePattern::preParse(const UnicodeString &pattern, UParseError *parseError,
parseError->postContext[0]=0;
}
msg=pattern;
- hasArgNames=hasArgNumbers=FALSE;
- needsAutoQuoting=FALSE;
+ hasArgNames=hasArgNumbers=false;
+ needsAutoQuoting=false;
partsLength=0;
numericValuesLength=0;
}
@@ -458,7 +458,7 @@ MessagePattern::parseMessage(int32_t index, int32_t msgStartLength,
// Add a Part for auto-quoting.
addPart(UMSGPAT_PART_TYPE_INSERT_CHAR, index, 0,
u_apos, errorCode); // value=char to be inserted
- needsAutoQuoting=TRUE;
+ needsAutoQuoting=true;
} else {
c=msg.charAt(index);
if(c==u_apos) {
@@ -491,7 +491,7 @@ MessagePattern::parseMessage(int32_t index, int32_t msgStartLength,
// Add a Part for auto-quoting.
addPart(UMSGPAT_PART_TYPE_INSERT_CHAR, index, 0,
u_apos, errorCode); // value=char to be inserted
- needsAutoQuoting=TRUE;
+ needsAutoQuoting=true;
break;
}
}
@@ -500,7 +500,7 @@ MessagePattern::parseMessage(int32_t index, int32_t msgStartLength,
// Add a Part for auto-quoting.
addPart(UMSGPAT_PART_TYPE_INSERT_CHAR, index, 0,
u_apos, errorCode); // value=char to be inserted
- needsAutoQuoting=TRUE;
+ needsAutoQuoting=true;
}
}
} else if(UMSGPAT_ARG_TYPE_HAS_PLURAL_STYLE(parentType) && c==u_pound) {
@@ -560,7 +560,7 @@ MessagePattern::parseArg(int32_t index, int32_t argStartLength, int32_t nestingL
errorCode=U_INDEX_OUTOFBOUNDS_ERROR;
return 0;
}
- hasArgNumbers=TRUE;
+ hasArgNumbers=true;
addPart(UMSGPAT_PART_TYPE_ARG_NUMBER, nameIndex, length, number, errorCode);
} else if(number==UMSGPAT_ARG_NAME_NOT_NUMBER) {
int32_t length=index-nameIndex;
@@ -569,7 +569,7 @@ MessagePattern::parseArg(int32_t index, int32_t argStartLength, int32_t nestingL
errorCode=U_INDEX_OUTOFBOUNDS_ERROR;
return 0;
}
- hasArgNames=TRUE;
+ hasArgNames=true;
addPart(UMSGPAT_PART_TYPE_ARG_NAME, nameIndex, length, 0, errorCode);
} else { // number<-1 (ARG_NAME_NOT_VALID)
setParseError(parseError, nameIndex); // Bad argument syntax.
@@ -727,7 +727,7 @@ MessagePattern::parseChoiceStyle(int32_t index, int32_t nestingLevel,
errorCode=U_INDEX_OUTOFBOUNDS_ERROR;
return 0;
}
- parseDouble(numberIndex, index, TRUE, parseError, errorCode); // adds ARG_INT or ARG_DOUBLE
+ parseDouble(numberIndex, index, true, parseError, errorCode); // adds ARG_INT or ARG_DOUBLE
if(U_FAILURE(errorCode)) {
return 0;
}
@@ -774,8 +774,8 @@ MessagePattern::parsePluralOrSelectStyle(UMessagePatternArgType argType,
return 0;
}
int32_t start=index;
- UBool isEmpty=TRUE;
- UBool hasOther=FALSE;
+ UBool isEmpty=true;
+ UBool hasOther=false;
for(;;) {
// First, collect the selector looking for a small set of terminators.
// It would be a little faster to consider the syntax of each possible
@@ -811,7 +811,7 @@ MessagePattern::parsePluralOrSelectStyle(UMessagePatternArgType argType,
return 0;
}
addPart(UMSGPAT_PART_TYPE_ARG_SELECTOR, selectorIndex, length, 0, errorCode);
- parseDouble(selectorIndex+1, index, FALSE,
+ parseDouble(selectorIndex+1, index, false,
parseError, errorCode); // adds ARG_INT or ARG_DOUBLE
} else {
index=skipIdentifier(index);
@@ -845,12 +845,12 @@ MessagePattern::parsePluralOrSelectStyle(UMessagePatternArgType argType,
errorCode=U_INDEX_OUTOFBOUNDS_ERROR;
return 0;
}
- parseDouble(valueIndex, index, FALSE,
+ parseDouble(valueIndex, index, false,
parseError, errorCode); // adds ARG_INT or ARG_DOUBLE
if(U_FAILURE(errorCode)) {
return 0;
}
- isEmpty=FALSE;
+ isEmpty=false;
continue; // no message fragment after the offset
} else {
// normal selector word
@@ -861,7 +861,7 @@ MessagePattern::parsePluralOrSelectStyle(UMessagePatternArgType argType,
}
addPart(UMSGPAT_PART_TYPE_ARG_SELECTOR, selectorIndex, length, 0, errorCode);
if(0==msg.compare(selectorIndex, length, kOther, 0, 5)) {
- hasOther=TRUE;
+ hasOther=true;
}
}
}
@@ -880,7 +880,7 @@ MessagePattern::parsePluralOrSelectStyle(UMessagePatternArgType argType,
if(U_FAILURE(errorCode)) {
return 0;
}
- isEmpty=FALSE;
+ isEmpty=false;
}
}
@@ -901,11 +901,11 @@ MessagePattern::parseArgNumber(const UnicodeString &s, int32_t start, int32_t li
return 0;
} else {
number=0;
- badNumber=TRUE; // leading zero
+ badNumber=true; // leading zero
}
} else if(0x31<=c && c<=0x39) {
number=c-0x30;
- badNumber=FALSE;
+ badNumber=false;
} else {
return UMSGPAT_ARG_NAME_NOT_NUMBER;
}
@@ -913,7 +913,7 @@ MessagePattern::parseArgNumber(const UnicodeString &s, int32_t start, int32_t li
c=s.charAt(start++);
if(0x30<=c && c<=0x39) {
if(number>=INT32_MAX/10) {
- badNumber=TRUE; // overflow
+ badNumber=true; // overflow
}
number=number*10+(c-0x30);
} else {
diff --git a/thirdparty/icu4c/common/norm2_nfc_data.h b/thirdparty/icu4c/common/norm2_nfc_data.h
index a774868981..ebe3e6ba90 100644
--- a/thirdparty/icu4c/common/norm2_nfc_data.h
+++ b/thirdparty/icu4c/common/norm2_nfc_data.h
@@ -10,14 +10,14 @@
#ifdef INCLUDED_FROM_NORMALIZER2_CPP
static const UVersionInfo norm2_nfc_data_formatVersion={4,0,0,0};
-static const UVersionInfo norm2_nfc_data_dataVersion={0xe,0,0,0};
+static const UVersionInfo norm2_nfc_data_dataVersion={0xf,0,0,0};
static const int32_t norm2_nfc_data_indexes[Normalizer2Impl::IX_COUNT]={
-0x50,0x4c54,0x88bc,0x89bc,0x89bc,0x89bc,0x89bc,0x89bc,0xc0,0x300,0xae2,0x29e0,0x3c66,0xfc00,0x1288,0x3b9c,
+0x50,0x4cb8,0x8920,0x8a20,0x8a20,0x8a20,0x8a20,0x8a20,0xc0,0x300,0xae2,0x29e0,0x3c66,0xfc00,0x1288,0x3b9c,
0x3c34,0x3c66,0x300,0
};
-static const uint16_t norm2_nfc_data_trieIndex[1748]={
+static const uint16_t norm2_nfc_data_trieIndex[1788]={
0,0x40,0x7b,0xbb,0xfb,0x13a,0x17a,0x1b2,0x1f2,0x226,0x254,0x226,0x294,0x2d4,0x313,0x353,
0x393,0x3d2,0x40f,0x44e,0x226,0x226,0x488,0x4c8,0x4f8,0x530,0x226,0x570,0x59f,0x5de,0x226,0x5f3,
0x631,0x65f,0x687,0x6bd,0x6fd,0x73a,0x75a,0x799,0x7d8,0x815,0x834,0x871,0x75a,0x8aa,0x8d8,0x917,
@@ -82,14 +82,14 @@ static const uint16_t norm2_nfc_data_trieIndex[1748]={
0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
0x226,0x226,0x226,0x226,0x1880,0x18c0,0x1900,0x1940,0x1980,0x19c0,0x1a00,0x1a40,0x1a63,0x1aa3,0x226,0x226,
0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x1ac3,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
-0x657,0x666,0x67e,0x69d,0x6b2,0x6b2,0x6b2,0x6b6,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
+0x67e,0x68e,0x6a6,0x6c5,0x6da,0x6da,0x6da,0x6de,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
0x226,0x226,0x226,0x226,0x226,0x226,0x226,0xc0b,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
0x226,0x226,0x226,0x226,0x226,0x226,0x54f,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x40c,
0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x1af6,0x226,0x226,0x1b06,0x226,0x226,0x226,0x226,
0x226,0x226,0x226,0x226,0x226,0x226,0xdf7,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x1b16,0x226,0x226,0x226,0x226,0x226,0x226,
-0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x1607,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
+0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x1607,0x226,0x226,0x226,0x226,0x66b,0x226,0x226,0x226,
0x226,0x1b20,0x54f,0x226,0x226,0x1b30,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x81c,0x226,0x226,
0x1b40,0x226,0x1b50,0x1b5d,0x1b69,0x226,0x226,0x226,0x226,0x414,0x226,0x1b74,0x1b84,0x226,0x226,0x226,
0x811,0x226,0x226,0x226,0x226,0x1b94,0x226,0x226,0x226,0x1b9f,0x226,0x226,0x226,0x226,0x226,0x226,
@@ -104,33 +104,35 @@ static const uint16_t norm2_nfc_data_trieIndex[1748]={
0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x9eb,0x226,0x226,0x226,0x226,
0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0xc05,0x226,0x226,0x226,
0x226,0x81b,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
-0x226,0x1c3d,0x226,0x226,0x226,0xf2c,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
+0x226,0x226,0x226,0x226,0x226,0x226,0xc08,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
+0x226,0x226,0x226,0x226,0x226,0x226,0x1c3d,0x226,0x226,0x226,0xf2c,0x226,0x226,0x226,0x226,0x226,
0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
-0x226,0x226,0x226,0x226,0x226,0x1c4d,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x1c4f,
+0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x1c4d,0x226,0x226,0x226,0x226,0x226,
+0x226,0x226,0x226,0x226,0x1c4f,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
+0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x1c5e,0x1c6e,0x1c7c,0x1c89,0x226,
+0x1c95,0x1ca3,0x1cb3,0x226,0x226,0x226,0x226,0xd1b,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
-0x226,0x226,0x226,0x226,0x226,0x226,0x1c5e,0x1c6e,0x1c7c,0x1c89,0x226,0x1c95,0x1ca3,0x1cb3,0x226,0x226,
-0x226,0x226,0xd1b,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
-0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x1cc3,0x1ccb,
-0x1cd9,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
-0x226,0xf2c,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x7c9,0x226,
-0x226,0x226,0x4fc,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
-0x226,0x226,0x226,0x226,0x1ce9,0x226,0x226,0x226,0x226,0x226,0x226,0x1cf5,0x226,0x226,0x226,0x226,
-0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x1d05,0x1d15,0x1d25,0x1d35,0x1d45,0x1d55,0x1d65,0x1d75,0x1d85,
-0x1d95,0x1da5,0x1db5,0x1dc5,0x1dd5,0x1de5,0x1df5,0x1e05,0x1e15,0x1e25,0x1e35,0x1e45,0x1e55,0x1e65,0x1e75,0x1e85,
-0x1e95,0x1ea5,0x1eb5,0x1ec5,0x1ed5,0x1ee5,0x1ef5,0x1f05,0x1f15,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
+0x226,0x226,0x226,0x1cc3,0x1ccb,0x1cd9,0x226,0x226,0x226,0x226,0x226,0x4f9,0x226,0x226,0x226,0x226,
+0x226,0x226,0x226,0x226,0x226,0x226,0xf2c,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
+0x226,0x226,0x226,0x7c9,0x226,0x226,0x226,0x4fc,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
+0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x1ce4,0x226,0x226,0x226,0x226,0x226,0x226,
+0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x1cf4,0x226,0x226,0x226,0x226,
+0x226,0x226,0x1d00,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x1d10,0x1d20,
+0x1d30,0x1d40,0x1d50,0x1d60,0x1d70,0x1d80,0x1d90,0x1da0,0x1db0,0x1dc0,0x1dd0,0x1de0,0x1df0,0x1e00,0x1e10,0x1e20,
+0x1e30,0x1e40,0x1e50,0x1e60,0x1e70,0x1e80,0x1e90,0x1ea0,0x1eb0,0x1ec0,0x1ed0,0x1ee0,0x1ef0,0x1f00,0x1f10,0x1f20,
0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,
-0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x408,0x428,0xc4,0xc4,0xc4,0x448,0x457,0x46d,0x489,
-0x4a6,0x4c2,0x4df,0x4fc,0x51b,0x538,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,
-0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0x552,0xc4,0x566,0xc4,0xc4,
+0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x226,0x408,0x428,
+0xc4,0xc4,0xc4,0x448,0x457,0x46d,0x489,0x4a6,0x4c2,0x4df,0x4fc,0x51b,0x538,0x552,0xc4,0xc4,
0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,
-0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0x586,0xc4,0xc4,0xc4,
-0xc4,0xc4,0xc4,0xc4,0xc4,0x591,0x5ae,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0x5ce,0x5e4,0xc4,
-0xc4,0x5f7,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,
+0xc4,0xc4,0xc4,0x567,0xc4,0x57b,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,
0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,
-0xc4,0xc4,0x617,0x637
+0xc4,0xc4,0xc4,0xc4,0x59b,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0x5a6,0x5c3,0xc4,
+0xc4,0xc4,0xc4,0xc4,0xc4,0x5e3,0x5f9,0x60b,0xc4,0x61e,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,
+0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,
+0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0xc4,0x63e,0x65e
};
-static const uint16_t norm2_nfc_data_trieData[7974]={
+static const uint16_t norm2_nfc_data_trieData[7984]={
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
@@ -518,7 +520,7 @@ static const uint16_t norm2_nfc_data_trieData[7974]={
1,1,1,1,1,0x3c66,1,0x3c66,0x3c66,0x3c66,0x3c66,0x3c66,0x3c66,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0x3c66,
0x3c66,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-1,1,1,1,0x3c66,1,1,1,1,0x3c66,1,1,1,0x3c66,1,0x3c66,
+1,1,1,1,0x3c66,1,1,1,1,0x3c66,1,1,1,0x3c66,0x3c66,0x3c66,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
@@ -593,49 +595,49 @@ static const uint16_t norm2_nfc_data_trieData[7974]={
0x3304,0x331c,0x3334,0x334c,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,0xffcc,0xffcc,0xffcc,0xffcc,0xffcc,0xffcc,0xffcc,1,0xffcc,0xffcc,0xffcc,0xffcc,0xffcc,
0xffcc,0xffcc,0xffcc,0xffcc,1,1,0xffcc,0xffcc,0xffcc,0xffcc,0xffcc,1,0xffcc,0xffcc,1,0xffcc,
-0xffcc,0xffcc,0xffcc,0xffcc,1,1,1,1,1,0xffb8,0xffb8,0xffb8,0xffb8,0xffb8,0xffb8,0xffb8,
-1,1,1,1,1,1,1,1,1,0xffcc,0xffcc,0xffcc,0xffcc,0xffcc,0xffcc,0xfe0e,
-1,1,1,1,1,0x335b,0x335f,0x3363,0x3367,0x336d,0x2f4d,0x3371,0x3375,0x3379,0x337d,0x2f51,
-0x3381,0x3385,0x3389,0x2f55,0x338f,0x3393,0x3397,0x339b,0x33a1,0x33a5,0x33a9,0x33ad,0x33b3,0x33b7,0x33bb,0x33bf,
-0x303f,0x33c3,0x33c9,0x33cd,0x33d1,0x33d5,0x33d9,0x33dd,0x33e1,0x33e5,0x3053,0x2f59,0x2f5d,0x3057,0x33e9,0x33ed,
-0x2c59,0x33f1,0x2f61,0x33f5,0x33f9,0x33fd,0x3401,0x3401,0x3401,0x3405,0x340b,0x340f,0x3413,0x3417,0x341d,0x3421,
-0x3425,0x3429,0x342d,0x3431,0x3435,0x3439,0x343d,0x3441,0x3445,0x3449,0x344d,0x344d,0x305f,0x3451,0x3455,0x3459,
-0x345d,0x2f69,0x3461,0x3465,0x3469,0x2ebd,0x346d,0x3471,0x3475,0x3479,0x347d,0x3481,0x3485,0x3489,0x348d,0x3493,
-0x3497,0x349b,0x349f,0x34a3,0x34a7,0x34ab,0x34b1,0x34b7,0x34bb,0x34bf,0x34c3,0x34c7,0x34cb,0x34cf,0x34d3,0x34d7,
-0x34d7,0x34db,0x34e1,0x34e5,0x2c49,0x34e9,0x34ed,0x34f3,0x34f7,0x34fb,0x34ff,0x3503,0x3507,0x2f7d,0x350b,0x350f,
-0x3513,0x3519,0x351d,0x3523,0x3527,0x352b,0x352f,0x3533,0x3537,0x353b,0x353f,0x3543,0x3547,0x354b,0x354f,0x3555,
-0x3559,0x355d,0x3561,0x2b71,0x3565,0x356b,0x356f,0x356f,0x3575,0x3579,0x3579,0x357d,0x3581,0x3587,0x358d,0x3591,
-0x3595,0x3599,0x359d,0x35a1,0x35a5,0x35a9,0x35ad,0x35b1,0x2f81,0x35b5,0x35bb,0x35bf,0x35c3,0x308f,0x35c3,0x35c7,
-0x2f89,0x35cb,0x35cf,0x35d3,0x35d7,0x2f8d,0x2b05,0x35db,0x35df,0x35e3,0x35e7,0x35eb,0x35ef,0x35f3,0x35f9,0x35fd,
-0x3601,0x3605,0x3609,0x360d,0x3613,0x3617,0x361b,0x361f,0x3623,0x3627,0x362b,0x362f,0x3633,0x2f91,0x3637,0x363b,
-0x3641,0x3645,0x3649,0x364d,0x2f99,0x3651,0x3655,0x3659,0x365d,0x3661,0x3665,0x3669,0x366d,0x2b75,0x30af,0x3671,
-0x3675,0x3679,0x367d,0x3683,0x3687,0x368b,0x368f,0x2f9d,0x3693,0x3699,0x369d,0x36a1,0x3161,0x36a5,0x36a9,0x36ad,
-0x36b1,0x36b5,0x36bb,0x36bf,0x36c3,0x36c7,0x36cd,0x36d1,0x36d5,0x36d9,0x2c8d,0x36dd,0x36e1,0x36e7,0x36ed,0x36f3,
-0x36f7,0x36fd,0x3701,0x3705,0x3709,0x370d,0x2fa1,0x2de9,0x3711,0x3715,0x3719,0x371d,0x3723,0x3727,0x372b,0x372f,
-0x30bf,0x3733,0x3737,0x373d,0x3741,0x3745,0x374b,0x3751,0x3755,0x30c3,0x3759,0x375d,0x3761,0x3765,0x3769,0x376d,
-0x3771,0x3777,0x377b,0x3781,0x3785,0x378b,0x30cb,0x378f,0x3793,0x3799,0x379d,0x37a1,0x37a7,0x37ad,0x37b1,0x37b5,
-0x37b9,0x37bd,0x37bd,0x37c1,0x37c5,0x30d3,0x37c9,0x37cd,0x37d1,0x37d5,0x37d9,0x37df,0x37e3,0x2c55,0x37e9,0x37ef,
-0x37f3,0x37f9,0x37ff,0x3805,0x3809,0x30eb,0x380d,0x3813,0x3819,0x381f,0x3825,0x3829,0x3829,0x30ef,0x3169,0x382d,
-0x3831,0x3835,0x3839,0x383f,0x2bbd,0x30f7,0x3843,0x3847,0x2fcd,0x384d,0x3853,0x2f15,0x3859,0x385d,0x2fdd,0x3861,
-0x3865,0x3869,0x386f,0x386f,0x3875,0x3879,0x387d,0x3883,0x3887,0x388b,0x388f,0x3895,0x3899,0x389d,0x38a1,0x38a5,
-0x38a9,0x38af,0x38b3,0x38b7,0x38bb,0x38bf,0x38c3,0x38c7,0x38cd,0x38d3,0x38d7,0x38dd,0x38e1,0x38e7,0x38eb,0x2ff5,
-0x38ef,0x38f5,0x38fb,0x38ff,0x3905,0x3909,0x390f,0x3913,0x3917,0x391b,0x391f,0x3923,0x3927,0x392d,0x3933,0x3939,
-0x3575,0x393f,0x3943,0x3947,0x394b,0x394f,0x3953,0x3957,0x395b,0x395f,0x3963,0x3967,0x396b,0x2c9d,0x3971,0x3975,
-0x3979,0x397d,0x3981,0x3985,0x3001,0x3989,0x398d,0x3991,0x3995,0x3999,0x399f,0x39a5,0x39ab,0x39af,0x39b3,0x39b7,
-0x39bb,0x39c1,0x39c5,0x39cb,0x39cf,0x39d3,0x39d9,0x39df,0x39e3,0x2ba9,0x39e7,0x39eb,0x39ef,0x39f3,0x39f7,0x39fb,
-0x3113,0x39ff,0x3a03,0x3a07,0x3a0b,0x3a0f,0x3a13,0x3a17,0x3a1b,0x3a1f,0x3a23,0x3a29,0x3a2d,0x3a31,0x3a35,0x3a39,
-0x3a3d,0x3a43,0x3a49,0x3a4d,0x3a51,0x3127,0x312b,0x3a55,0x3a59,0x3a5f,0x3a63,0x3a67,0x3a6b,0x3a6f,0x3a75,0x3a7b,
-0x3a7f,0x3a83,0x3a87,0x3a8d,0x312f,0x3a91,0x3a97,0x3a9d,0x3aa1,0x3aa5,0x3aa9,0x3aaf,0x3ab3,0x3ab7,0x3abb,0x3abf,
-0x3ac3,0x3ac7,0x3acb,0x3ad1,0x3ad5,0x3ad9,0x3add,0x3ae3,0x3ae7,0x3aeb,0x3aef,0x3af3,0x3af9,0x3aff,0x3b03,0x3b07,
-0x3b0b,0x3b11,0x3b15,0x3147,0x3147,0x3b1b,0x3b1f,0x3b25,0x3b29,0x3b2d,0x3b31,0x3b35,0x3b39,0x3b3d,0x3b41,0x314b,
-0x3b47,0x3b4b,0x3b4f,0x3b53,0x3b57,0x3b5b,0x3b61,0x3b65,0x3b6b,0x3b71,0x3b77,0x3b7b,0x3b7f,0x3b83,0x3b87,0x3b8b,
-0x3b8f,0x3b93,0x3b97,1,1,1
+0xffcc,0xffcc,0xffcc,0xffcc,1,1,1,1,1,1,1,1,1,1,1,1,
+0xffd0,0xffd0,0xffb8,0xffcc,0xffb8,0xffb8,0xffb8,0xffb8,0xffb8,0xffb8,0xffb8,1,1,1,1,1,
+1,1,1,1,0xffcc,0xffcc,0xffcc,0xffcc,0xffcc,0xffcc,0xfe0e,1,1,1,1,1,
+0x335b,0x335f,0x3363,0x3367,0x336d,0x2f4d,0x3371,0x3375,0x3379,0x337d,0x2f51,0x3381,0x3385,0x3389,0x2f55,0x338f,
+0x3393,0x3397,0x339b,0x33a1,0x33a5,0x33a9,0x33ad,0x33b3,0x33b7,0x33bb,0x33bf,0x303f,0x33c3,0x33c9,0x33cd,0x33d1,
+0x33d5,0x33d9,0x33dd,0x33e1,0x33e5,0x3053,0x2f59,0x2f5d,0x3057,0x33e9,0x33ed,0x2c59,0x33f1,0x2f61,0x33f5,0x33f9,
+0x33fd,0x3401,0x3401,0x3401,0x3405,0x340b,0x340f,0x3413,0x3417,0x341d,0x3421,0x3425,0x3429,0x342d,0x3431,0x3435,
+0x3439,0x343d,0x3441,0x3445,0x3449,0x344d,0x344d,0x305f,0x3451,0x3455,0x3459,0x345d,0x2f69,0x3461,0x3465,0x3469,
+0x2ebd,0x346d,0x3471,0x3475,0x3479,0x347d,0x3481,0x3485,0x3489,0x348d,0x3493,0x3497,0x349b,0x349f,0x34a3,0x34a7,
+0x34ab,0x34b1,0x34b7,0x34bb,0x34bf,0x34c3,0x34c7,0x34cb,0x34cf,0x34d3,0x34d7,0x34d7,0x34db,0x34e1,0x34e5,0x2c49,
+0x34e9,0x34ed,0x34f3,0x34f7,0x34fb,0x34ff,0x3503,0x3507,0x2f7d,0x350b,0x350f,0x3513,0x3519,0x351d,0x3523,0x3527,
+0x352b,0x352f,0x3533,0x3537,0x353b,0x353f,0x3543,0x3547,0x354b,0x354f,0x3555,0x3559,0x355d,0x3561,0x2b71,0x3565,
+0x356b,0x356f,0x356f,0x3575,0x3579,0x3579,0x357d,0x3581,0x3587,0x358d,0x3591,0x3595,0x3599,0x359d,0x35a1,0x35a5,
+0x35a9,0x35ad,0x35b1,0x2f81,0x35b5,0x35bb,0x35bf,0x35c3,0x308f,0x35c3,0x35c7,0x2f89,0x35cb,0x35cf,0x35d3,0x35d7,
+0x2f8d,0x2b05,0x35db,0x35df,0x35e3,0x35e7,0x35eb,0x35ef,0x35f3,0x35f9,0x35fd,0x3601,0x3605,0x3609,0x360d,0x3613,
+0x3617,0x361b,0x361f,0x3623,0x3627,0x362b,0x362f,0x3633,0x2f91,0x3637,0x363b,0x3641,0x3645,0x3649,0x364d,0x2f99,
+0x3651,0x3655,0x3659,0x365d,0x3661,0x3665,0x3669,0x366d,0x2b75,0x30af,0x3671,0x3675,0x3679,0x367d,0x3683,0x3687,
+0x368b,0x368f,0x2f9d,0x3693,0x3699,0x369d,0x36a1,0x3161,0x36a5,0x36a9,0x36ad,0x36b1,0x36b5,0x36bb,0x36bf,0x36c3,
+0x36c7,0x36cd,0x36d1,0x36d5,0x36d9,0x2c8d,0x36dd,0x36e1,0x36e7,0x36ed,0x36f3,0x36f7,0x36fd,0x3701,0x3705,0x3709,
+0x370d,0x2fa1,0x2de9,0x3711,0x3715,0x3719,0x371d,0x3723,0x3727,0x372b,0x372f,0x30bf,0x3733,0x3737,0x373d,0x3741,
+0x3745,0x374b,0x3751,0x3755,0x30c3,0x3759,0x375d,0x3761,0x3765,0x3769,0x376d,0x3771,0x3777,0x377b,0x3781,0x3785,
+0x378b,0x30cb,0x378f,0x3793,0x3799,0x379d,0x37a1,0x37a7,0x37ad,0x37b1,0x37b5,0x37b9,0x37bd,0x37bd,0x37c1,0x37c5,
+0x30d3,0x37c9,0x37cd,0x37d1,0x37d5,0x37d9,0x37df,0x37e3,0x2c55,0x37e9,0x37ef,0x37f3,0x37f9,0x37ff,0x3805,0x3809,
+0x30eb,0x380d,0x3813,0x3819,0x381f,0x3825,0x3829,0x3829,0x30ef,0x3169,0x382d,0x3831,0x3835,0x3839,0x383f,0x2bbd,
+0x30f7,0x3843,0x3847,0x2fcd,0x384d,0x3853,0x2f15,0x3859,0x385d,0x2fdd,0x3861,0x3865,0x3869,0x386f,0x386f,0x3875,
+0x3879,0x387d,0x3883,0x3887,0x388b,0x388f,0x3895,0x3899,0x389d,0x38a1,0x38a5,0x38a9,0x38af,0x38b3,0x38b7,0x38bb,
+0x38bf,0x38c3,0x38c7,0x38cd,0x38d3,0x38d7,0x38dd,0x38e1,0x38e7,0x38eb,0x2ff5,0x38ef,0x38f5,0x38fb,0x38ff,0x3905,
+0x3909,0x390f,0x3913,0x3917,0x391b,0x391f,0x3923,0x3927,0x392d,0x3933,0x3939,0x3575,0x393f,0x3943,0x3947,0x394b,
+0x394f,0x3953,0x3957,0x395b,0x395f,0x3963,0x3967,0x396b,0x2c9d,0x3971,0x3975,0x3979,0x397d,0x3981,0x3985,0x3001,
+0x3989,0x398d,0x3991,0x3995,0x3999,0x399f,0x39a5,0x39ab,0x39af,0x39b3,0x39b7,0x39bb,0x39c1,0x39c5,0x39cb,0x39cf,
+0x39d3,0x39d9,0x39df,0x39e3,0x2ba9,0x39e7,0x39eb,0x39ef,0x39f3,0x39f7,0x39fb,0x3113,0x39ff,0x3a03,0x3a07,0x3a0b,
+0x3a0f,0x3a13,0x3a17,0x3a1b,0x3a1f,0x3a23,0x3a29,0x3a2d,0x3a31,0x3a35,0x3a39,0x3a3d,0x3a43,0x3a49,0x3a4d,0x3a51,
+0x3127,0x312b,0x3a55,0x3a59,0x3a5f,0x3a63,0x3a67,0x3a6b,0x3a6f,0x3a75,0x3a7b,0x3a7f,0x3a83,0x3a87,0x3a8d,0x312f,
+0x3a91,0x3a97,0x3a9d,0x3aa1,0x3aa5,0x3aa9,0x3aaf,0x3ab3,0x3ab7,0x3abb,0x3abf,0x3ac3,0x3ac7,0x3acb,0x3ad1,0x3ad5,
+0x3ad9,0x3add,0x3ae3,0x3ae7,0x3aeb,0x3aef,0x3af3,0x3af9,0x3aff,0x3b03,0x3b07,0x3b0b,0x3b11,0x3b15,0x3147,0x3147,
+0x3b1b,0x3b1f,0x3b25,0x3b29,0x3b2d,0x3b31,0x3b35,0x3b39,0x3b3d,0x3b41,0x314b,0x3b47,0x3b4b,0x3b4f,0x3b53,0x3b57,
+0x3b5b,0x3b61,0x3b65,0x3b6b,0x3b71,0x3b77,0x3b7b,0x3b7f,0x3b83,0x3b87,0x3b8b,0x3b8f,0x3b93,0x3b97,1,1
};
static const UCPTrie norm2_nfc_data_trie={
norm2_nfc_data_trieIndex,
{ norm2_nfc_data_trieData },
- 1748, 7974,
+ 1788, 7984,
0x2fc00, 0x30,
0, 0,
0, 0,
diff --git a/thirdparty/icu4c/common/normalizer2.cpp b/thirdparty/icu4c/common/normalizer2.cpp
index 6be7e0b21a..3617264490 100644
--- a/thirdparty/icu4c/common/normalizer2.cpp
+++ b/thirdparty/icu4c/common/normalizer2.cpp
@@ -62,7 +62,7 @@ Normalizer2::normalizeUTF8(uint32_t /*options*/, StringPiece src, ByteSink &sink
UBool
Normalizer2::getRawDecomposition(UChar32, UnicodeString &) const {
- return FALSE;
+ return false;
}
UChar32
@@ -142,7 +142,7 @@ class NoopNormalizer2 : public Normalizer2 {
}
virtual UBool
getDecomposition(UChar32, UnicodeString &) const U_OVERRIDE {
- return FALSE;
+ return false;
}
// No need to U_OVERRIDE the default getRawDecomposition().
virtual UBool
@@ -161,9 +161,9 @@ class NoopNormalizer2 : public Normalizer2 {
spanQuickCheckYes(const UnicodeString &s, UErrorCode &) const U_OVERRIDE {
return s.length();
}
- virtual UBool hasBoundaryBefore(UChar32) const U_OVERRIDE { return TRUE; }
- virtual UBool hasBoundaryAfter(UChar32) const U_OVERRIDE { return TRUE; }
- virtual UBool isInert(UChar32) const U_OVERRIDE { return TRUE; }
+ virtual UBool hasBoundaryBefore(UChar32) const U_OVERRIDE { return true; }
+ virtual UBool hasBoundaryAfter(UChar32) const U_OVERRIDE { return true; }
+ virtual UBool isInert(UChar32) const U_OVERRIDE { return true; }
};
NoopNormalizer2::~NoopNormalizer2() {}
@@ -183,7 +183,7 @@ static UBool U_CALLCONV uprv_normalizer2_cleanup();
U_CDECL_END
static Normalizer2 *noopSingleton;
-static icu::UInitOnce noopInitOnce = U_INITONCE_INITIALIZER;
+static icu::UInitOnce noopInitOnce {};
static void U_CALLCONV initNoopSingleton(UErrorCode &errorCode) {
if(U_FAILURE(errorCode)) {
@@ -245,7 +245,7 @@ Norm2AllModes::createNFCInstance(UErrorCode &errorCode) {
static Norm2AllModes *nfcSingleton;
-static icu::UInitOnce nfcInitOnce = U_INITONCE_INITIALIZER;
+static icu::UInitOnce nfcInitOnce {};
static void U_CALLCONV initNFCSingleton(UErrorCode &errorCode) {
nfcSingleton=Norm2AllModes::createNFCInstance(errorCode);
@@ -299,7 +299,7 @@ static UBool U_CALLCONV uprv_normalizer2_cleanup() {
nfcSingleton = NULL;
nfcInitOnce.reset();
#endif
- return TRUE;
+ return true;
}
U_CDECL_END
@@ -423,7 +423,7 @@ unorm2_normalizeSecondAndAppend(const UNormalizer2 *norm2,
return normalizeSecondAndAppend(norm2,
first, firstLength, firstCapacity,
second, secondLength,
- TRUE, pErrorCode);
+ true, pErrorCode);
}
U_CAPI int32_t U_EXPORT2
@@ -434,7 +434,7 @@ unorm2_append(const UNormalizer2 *norm2,
return normalizeSecondAndAppend(norm2,
first, firstLength, firstCapacity,
second, secondLength,
- FALSE, pErrorCode);
+ false, pErrorCode);
}
U_CAPI int32_t U_EXPORT2
diff --git a/thirdparty/icu4c/common/normalizer2impl.cpp b/thirdparty/icu4c/common/normalizer2impl.cpp
index e6bd75e717..d7e05e44d7 100644
--- a/thirdparty/icu4c/common/normalizer2impl.cpp
+++ b/thirdparty/icu4c/common/normalizer2impl.cpp
@@ -185,7 +185,7 @@ UBool ReorderingBuffer::init(int32_t destCapacity, UErrorCode &errorCode) {
if(start==NULL) {
// getBuffer() already did str.setToBogus()
errorCode=U_MEMORY_ALLOCATION_ERROR;
- return FALSE;
+ return false;
}
limit=start+length;
remainingCapacity=str.getCapacity()-length;
@@ -201,7 +201,7 @@ UBool ReorderingBuffer::init(int32_t destCapacity, UErrorCode &errorCode) {
}
reorderStart=codePointLimit;
}
- return TRUE;
+ return true;
}
UBool ReorderingBuffer::equals(const UChar *otherStart, const UChar *otherLimit) const {
@@ -217,7 +217,7 @@ UBool ReorderingBuffer::equals(const uint8_t *otherStart, const uint8_t *otherLi
int32_t otherLength = (int32_t)(otherLimit - otherStart);
// For equal strings, UTF-8 is at least as long as UTF-16, and at most three times as long.
if (otherLength < length || (otherLength / 3) > length) {
- return FALSE;
+ return false;
}
// Compare valid strings from between normalization boundaries.
// (Invalid sequences are normalization-inert.)
@@ -225,21 +225,21 @@ UBool ReorderingBuffer::equals(const uint8_t *otherStart, const uint8_t *otherLi
if (i >= length) {
return j >= otherLength;
} else if (j >= otherLength) {
- return FALSE;
+ return false;
}
// Not at the end of either string yet.
UChar32 c, other;
U16_NEXT_UNSAFE(start, i, c);
U8_NEXT_UNSAFE(otherStart, j, other);
if (c != other) {
- return FALSE;
+ return false;
}
}
}
UBool ReorderingBuffer::appendSupplementary(UChar32 c, uint8_t cc, UErrorCode &errorCode) {
if(remainingCapacity<2 && !resize(2, errorCode)) {
- return FALSE;
+ return false;
}
if(lastCC<=cc || cc==0) {
limit[0]=U16_LEAD(c);
@@ -253,17 +253,17 @@ UBool ReorderingBuffer::appendSupplementary(UChar32 c, uint8_t cc, UErrorCode &e
insert(c, cc);
}
remainingCapacity-=2;
- return TRUE;
+ return true;
}
UBool ReorderingBuffer::append(const UChar *s, int32_t length, UBool isNFD,
uint8_t leadCC, uint8_t trailCC,
UErrorCode &errorCode) {
if(length==0) {
- return TRUE;
+ return true;
}
if(remainingCapacity<length && !resize(length, errorCode)) {
- return FALSE;
+ return false;
}
remainingCapacity-=length;
if(lastCC<=leadCC || leadCC==0) {
@@ -294,13 +294,13 @@ UBool ReorderingBuffer::append(const UChar *s, int32_t length, UBool isNFD,
append(c, leadCC, errorCode);
}
}
- return TRUE;
+ return true;
}
UBool ReorderingBuffer::appendZeroCC(UChar32 c, UErrorCode &errorCode) {
int32_t cpLength=U16_LENGTH(c);
if(remainingCapacity<cpLength && !resize(cpLength, errorCode)) {
- return FALSE;
+ return false;
}
remainingCapacity-=cpLength;
if(cpLength==1) {
@@ -312,23 +312,23 @@ UBool ReorderingBuffer::appendZeroCC(UChar32 c, UErrorCode &errorCode) {
}
lastCC=0;
reorderStart=limit;
- return TRUE;
+ return true;
}
UBool ReorderingBuffer::appendZeroCC(const UChar *s, const UChar *sLimit, UErrorCode &errorCode) {
if(s==sLimit) {
- return TRUE;
+ return true;
}
int32_t length=(int32_t)(sLimit-s);
if(remainingCapacity<length && !resize(length, errorCode)) {
- return FALSE;
+ return false;
}
u_memcpy(limit, s, length);
limit+=length;
remainingCapacity-=length;
lastCC=0;
reorderStart=limit;
- return TRUE;
+ return true;
}
void ReorderingBuffer::remove() {
@@ -365,12 +365,12 @@ UBool ReorderingBuffer::resize(int32_t appendLength, UErrorCode &errorCode) {
if(start==NULL) {
// getBuffer() already did str.setToBogus()
errorCode=U_MEMORY_ALLOCATION_ERROR;
- return FALSE;
+ return false;
}
reorderStart=start+reorderStartIndex;
limit=start+length;
remainingCapacity=str.getCapacity()-length;
- return TRUE;
+ return true;
}
void ReorderingBuffer::skipPrevious() {
@@ -728,7 +728,7 @@ UBool Normalizer2Impl::decompose(UChar32 c, uint16_t norm16,
} else {
leadCC=0;
}
- return buffer.append((const UChar *)mapping+1, length, TRUE, leadCC, trailCC, errorCode);
+ return buffer.append((const UChar *)mapping+1, length, true, leadCC, trailCC, errorCode);
}
// Dual functionality:
@@ -820,11 +820,11 @@ Normalizer2Impl::decomposeUTF8(uint32_t options,
if (U_FAILURE(errorCode)) {
break;
}
- decomposeShort(prevBoundary, src, STOP_AT_LIMIT, FALSE /* onlyContiguous */,
+ decomposeShort(prevBoundary, src, STOP_AT_LIMIT, false /* onlyContiguous */,
buffer, errorCode);
// Decompose until the next boundary.
if (buffer.getLastCC() > 1) {
- src = decomposeShort(src, limit, STOP_AT_DECOMP_BOUNDARY, FALSE /* onlyContiguous */,
+ src = decomposeShort(src, limit, STOP_AT_DECOMP_BOUNDARY, false /* onlyContiguous */,
buffer, errorCode);
}
if (U_FAILURE(errorCode)) {
@@ -931,7 +931,7 @@ Normalizer2Impl::decomposeShort(const uint8_t *src, const uint8_t *limit,
if (leadCC == 0 && stopAt == STOP_AT_DECOMP_BOUNDARY) {
return prevSrc;
}
- if (!buffer.append((const char16_t *)mapping+1, length, TRUE, leadCC, trailCC, errorCode)) {
+ if (!buffer.append((const char16_t *)mapping+1, length, true, leadCC, trailCC, errorCode)) {
return nullptr;
}
}
@@ -1052,7 +1052,7 @@ void Normalizer2Impl::decomposeAndAppend(const UChar *src, const UChar *limit,
limit=u_strchr(p, 0);
}
- if (buffer.append(src, (int32_t)(p - src), FALSE, firstCC, prevCC, errorCode)) {
+ if (buffer.append(src, (int32_t)(p - src), false, firstCC, prevCC, errorCode)) {
buffer.appendZeroCC(p, limit, errorCode);
}
}
@@ -1064,7 +1064,7 @@ UBool Normalizer2Impl::hasDecompBoundaryBefore(UChar32 c) const {
UBool Normalizer2Impl::norm16HasDecompBoundaryBefore(uint16_t norm16) const {
if (norm16 < minNoNoCompNoMaybeCC) {
- return TRUE;
+ return true;
}
if (norm16 >= limitNoNo) {
return norm16 <= MIN_NORMAL_MAYBE_YES || norm16 == JAMO_VT;
@@ -1072,23 +1072,23 @@ UBool Normalizer2Impl::norm16HasDecompBoundaryBefore(uint16_t norm16) const {
// c decomposes, get everything from the variable-length extra data
const uint16_t *mapping=getMapping(norm16);
uint16_t firstUnit=*mapping;
- // TRUE if leadCC==0 (hasFCDBoundaryBefore())
+ // true if leadCC==0 (hasFCDBoundaryBefore())
return (firstUnit&MAPPING_HAS_CCC_LCCC_WORD)==0 || (*(mapping-1)&0xff00)==0;
}
UBool Normalizer2Impl::hasDecompBoundaryAfter(UChar32 c) const {
if (c < minDecompNoCP) {
- return TRUE;
+ return true;
}
if (c <= 0xffff && !singleLeadMightHaveNonZeroFCD16(c)) {
- return TRUE;
+ return true;
}
return norm16HasDecompBoundaryAfter(getNorm16(c));
}
UBool Normalizer2Impl::norm16HasDecompBoundaryAfter(uint16_t norm16) const {
if(norm16 <= minYesNo || isHangulLVT(norm16)) {
- return TRUE;
+ return true;
}
if (norm16 >= limitNoNo) {
if (isMaybeOrNonZeroCC(norm16)) {
@@ -1103,13 +1103,13 @@ UBool Normalizer2Impl::norm16HasDecompBoundaryAfter(uint16_t norm16) const {
// decomp after-boundary: same as hasFCDBoundaryAfter(),
// fcd16<=1 || trailCC==0
if(firstUnit>0x1ff) {
- return FALSE; // trailCC>1
+ return false; // trailCC>1
}
if(firstUnit<=0xff) {
- return TRUE; // trailCC==0
+ return true; // trailCC==0
}
// if(trailCC==1) test leadCC==0, same as checking for before-boundary
- // TRUE if leadCC==0 (hasFCDBoundaryBefore())
+ // true if leadCC==0 (hasFCDBoundaryBefore())
return (firstUnit&MAPPING_HAS_CCC_LCCC_WORD)==0 || (*(mapping-1)&0xff00)==0;
}
@@ -1235,7 +1235,7 @@ void Normalizer2Impl::recompose(ReorderingBuffer &buffer, int32_t recomposeStart
// and are only initialized now to avoid compiler warnings.
compositionsList=NULL; // used as indicator for whether we have a forward-combining starter
starter=NULL;
- starterIsSupplementary=FALSE;
+ starterIsSupplementary=false;
prevCC=0;
for(;;) {
@@ -1301,7 +1301,7 @@ void Normalizer2Impl::recompose(ReorderingBuffer &buffer, int32_t recomposeStart
*starter=(UChar)composite;
// The composite is shorter than the starter,
// move the intermediate characters forward one.
- starterIsSupplementary=FALSE;
+ starterIsSupplementary=false;
q=starter+1;
r=q+1;
while(r<pRemove) {
@@ -1312,7 +1312,7 @@ void Normalizer2Impl::recompose(ReorderingBuffer &buffer, int32_t recomposeStart
} else if(U_IS_SUPPLEMENTARY(composite)) {
// The composite is longer than the starter,
// move the intermediate characters back one.
- starterIsSupplementary=TRUE;
+ starterIsSupplementary=true;
++starter; // temporarily increment for the loop boundary
q=pRemove;
r=++pRemove;
@@ -1366,10 +1366,10 @@ void Normalizer2Impl::recompose(ReorderingBuffer &buffer, int32_t recomposeStart
if((compositionsList=getCompositionsListForDecompYes(norm16))!=NULL) {
// It may combine with something, prepare for it.
if(U_IS_BMP(c)) {
- starterIsSupplementary=FALSE;
+ starterIsSupplementary=false;
starter=p-1;
} else {
- starterIsSupplementary=TRUE;
+ starterIsSupplementary=true;
starter=p-2;
}
}
@@ -1447,7 +1447,7 @@ Normalizer2Impl::compose(const UChar *src, const UChar *limit,
doCompose ? &buffer : NULL,
errorCode);
if(U_FAILURE(errorCode)) {
- return FALSE;
+ return false;
}
limit=u_strchr(src, 0);
if (prevBoundary != src) {
@@ -1471,7 +1471,7 @@ Normalizer2Impl::compose(const UChar *src, const UChar *limit,
if (prevBoundary != limit && doCompose) {
buffer.appendZeroCC(prevBoundary, limit, errorCode);
}
- return TRUE;
+ return true;
}
if( (c=*src)<minNoMaybeCP ||
isCompYesAndZeroCC(norm16=UCPTRIE_FAST_BMP_GET(normTrie, UCPTRIE_16, c))
@@ -1503,7 +1503,7 @@ Normalizer2Impl::compose(const UChar *src, const UChar *limit,
// Medium-fast path: Handle cases that do not require full decomposition and recomposition.
if (!isMaybeOrNonZeroCC(norm16)) { // minNoNo <= norm16 < minMaybeYes
if (!doCompose) {
- return FALSE;
+ return false;
}
// Fast path for mapping a character that is immediately surrounded by boundaries.
// In this case, we need not decompose around the current character.
@@ -1559,7 +1559,7 @@ Normalizer2Impl::compose(const UChar *src, const UChar *limit,
UChar l = (UChar)(prev-Hangul::JAMO_L_BASE);
if(l<Hangul::JAMO_L_COUNT) {
if (!doCompose) {
- return FALSE;
+ return false;
}
int32_t t;
if (src != limit &&
@@ -1599,7 +1599,7 @@ Normalizer2Impl::compose(const UChar *src, const UChar *limit,
// The current character is a Jamo Trailing consonant,
// compose with previous Hangul LV that does not contain a Jamo T.
if (!doCompose) {
- return FALSE;
+ return false;
}
UChar32 syllable = prev + c - Hangul::JAMO_T_BASE;
--prevSrc; // Replace the Hangul LV as well.
@@ -1622,7 +1622,7 @@ Normalizer2Impl::compose(const UChar *src, const UChar *limit,
if (onlyContiguous /* FCC */ && getPreviousTrailCC(prevBoundary, prevSrc) > cc) {
// Fails FCD test, need to decompose and contiguously recompose.
if (!doCompose) {
- return FALSE;
+ return false;
}
} else {
// If !onlyContiguous (not FCC), then we ignore the tccc of
@@ -1634,7 +1634,7 @@ Normalizer2Impl::compose(const UChar *src, const UChar *limit,
if (doCompose) {
buffer.appendZeroCC(prevBoundary, limit, errorCode);
}
- return TRUE;
+ return true;
}
uint8_t prevCC = cc;
nextSrc = src;
@@ -1643,7 +1643,7 @@ Normalizer2Impl::compose(const UChar *src, const UChar *limit,
cc = getCCFromNormalYesOrMaybe(n16);
if (prevCC > cc) {
if (!doCompose) {
- return FALSE;
+ return false;
}
break;
}
@@ -1678,28 +1678,28 @@ Normalizer2Impl::compose(const UChar *src, const UChar *limit,
}
int32_t recomposeStartIndex=buffer.length();
// We know there is not a boundary here.
- decomposeShort(prevSrc, src, FALSE /* !stopAtCompBoundary */, onlyContiguous,
+ decomposeShort(prevSrc, src, false /* !stopAtCompBoundary */, onlyContiguous,
buffer, errorCode);
// Decompose until the next boundary.
- src = decomposeShort(src, limit, TRUE /* stopAtCompBoundary */, onlyContiguous,
+ src = decomposeShort(src, limit, true /* stopAtCompBoundary */, onlyContiguous,
buffer, errorCode);
if (U_FAILURE(errorCode)) {
break;
}
if ((src - prevSrc) > INT32_MAX) { // guard before buffer.equals()
errorCode = U_INDEX_OUTOFBOUNDS_ERROR;
- return TRUE;
+ return true;
}
recompose(buffer, recomposeStartIndex, onlyContiguous);
if(!doCompose) {
if(!buffer.equals(prevSrc, src)) {
- return FALSE;
+ return false;
}
buffer.remove();
}
prevBoundary=src;
}
- return TRUE;
+ return true;
}
// Very similar to compose(): Make the same changes in both places if relevant.
@@ -1846,7 +1846,7 @@ void Normalizer2Impl::composeAndAppend(const UChar *src, const UChar *limit,
middle.append(src, (int32_t)(firstStarterInSrc-src));
const UChar *middleStart=middle.getBuffer();
compose(middleStart, middleStart+middle.length(), onlyContiguous,
- TRUE, buffer, errorCode);
+ true, buffer, errorCode);
if(U_FAILURE(errorCode)) {
return;
}
@@ -1854,7 +1854,7 @@ void Normalizer2Impl::composeAndAppend(const UChar *src, const UChar *limit,
}
}
if(doCompose) {
- compose(src, limit, onlyContiguous, TRUE, buffer, errorCode);
+ compose(src, limit, onlyContiguous, true, buffer, errorCode);
} else {
if(limit==NULL) { // appendZeroCC() needs limit!=NULL
limit=u_strchr(src, 0);
@@ -1883,7 +1883,7 @@ Normalizer2Impl::composeUTF8(uint32_t options, UBool onlyContiguous,
ByteSinkUtil::appendUnchanged(prevBoundary, limit,
*sink, options, edits, errorCode);
}
- return TRUE;
+ return true;
}
if (*src < minNoMaybeLead) {
++src;
@@ -1904,7 +1904,7 @@ Normalizer2Impl::composeUTF8(uint32_t options, UBool onlyContiguous,
// Medium-fast path: Handle cases that do not require full decomposition and recomposition.
if (!isMaybeOrNonZeroCC(norm16)) { // minNoNo <= norm16 < minMaybeYes
if (sink == nullptr) {
- return FALSE;
+ return false;
}
// Fast path for mapping a character that is immediately surrounded by boundaries.
// In this case, we need not decompose around the current character.
@@ -1972,7 +1972,7 @@ Normalizer2Impl::composeUTF8(uint32_t options, UBool onlyContiguous,
UChar32 l = prev - Hangul::JAMO_L_BASE;
if ((uint32_t)l < Hangul::JAMO_L_COUNT) {
if (sink == nullptr) {
- return FALSE;
+ return false;
}
int32_t t = getJamoTMinusBase(src, limit);
if (t >= 0) {
@@ -2008,7 +2008,7 @@ Normalizer2Impl::composeUTF8(uint32_t options, UBool onlyContiguous,
// The current character is a Jamo Trailing consonant,
// compose with previous Hangul LV that does not contain a Jamo T.
if (sink == nullptr) {
- return FALSE;
+ return false;
}
UChar32 syllable = prev + getJamoTMinusBase(prevSrc, src);
prevSrc -= 3; // Replace the Hangul LV as well.
@@ -2031,7 +2031,7 @@ Normalizer2Impl::composeUTF8(uint32_t options, UBool onlyContiguous,
if (onlyContiguous /* FCC */ && getPreviousTrailCC(prevBoundary, prevSrc) > cc) {
// Fails FCD test, need to decompose and contiguously recompose.
if (sink == nullptr) {
- return FALSE;
+ return false;
}
} else {
// If !onlyContiguous (not FCC), then we ignore the tccc of
@@ -2044,7 +2044,7 @@ Normalizer2Impl::composeUTF8(uint32_t options, UBool onlyContiguous,
ByteSinkUtil::appendUnchanged(prevBoundary, limit,
*sink, options, edits, errorCode);
}
- return TRUE;
+ return true;
}
uint8_t prevCC = cc;
nextSrc = src;
@@ -2053,7 +2053,7 @@ Normalizer2Impl::composeUTF8(uint32_t options, UBool onlyContiguous,
cc = getCCFromNormalYesOrMaybe(n16);
if (prevCC > cc) {
if (sink == nullptr) {
- return FALSE;
+ return false;
}
break;
}
@@ -2098,12 +2098,12 @@ Normalizer2Impl::composeUTF8(uint32_t options, UBool onlyContiguous,
}
if ((src - prevSrc) > INT32_MAX) { // guard before buffer.equals()
errorCode = U_INDEX_OUTOFBOUNDS_ERROR;
- return TRUE;
+ return true;
}
recompose(buffer, 0, onlyContiguous);
if (!buffer.equals(prevSrc, src)) {
if (sink == nullptr) {
- return FALSE;
+ return false;
}
if (prevBoundary != prevSrc &&
!ByteSinkUtil::appendUnchanged(prevBoundary, prevSrc,
@@ -2117,12 +2117,12 @@ Normalizer2Impl::composeUTF8(uint32_t options, UBool onlyContiguous,
prevBoundary = src;
}
}
- return TRUE;
+ return true;
}
UBool Normalizer2Impl::hasCompBoundaryBefore(const UChar *src, const UChar *limit) const {
if (src == limit || *src < minCompNoMaybeCP) {
- return TRUE;
+ return true;
}
UChar32 c;
uint16_t norm16;
@@ -2132,7 +2132,7 @@ UBool Normalizer2Impl::hasCompBoundaryBefore(const UChar *src, const UChar *limi
UBool Normalizer2Impl::hasCompBoundaryBefore(const uint8_t *src, const uint8_t *limit) const {
if (src == limit) {
- return TRUE;
+ return true;
}
uint16_t norm16;
UCPTRIE_FAST_U8_NEXT(normTrie, UCPTRIE_16, src, limit, norm16);
@@ -2142,7 +2142,7 @@ UBool Normalizer2Impl::hasCompBoundaryBefore(const uint8_t *src, const uint8_t *
UBool Normalizer2Impl::hasCompBoundaryAfter(const UChar *start, const UChar *p,
UBool onlyContiguous) const {
if (start == p) {
- return TRUE;
+ return true;
}
UChar32 c;
uint16_t norm16;
@@ -2153,7 +2153,7 @@ UBool Normalizer2Impl::hasCompBoundaryAfter(const UChar *start, const UChar *p,
UBool Normalizer2Impl::hasCompBoundaryAfter(const uint8_t *start, const uint8_t *p,
UBool onlyContiguous) const {
if (start == p) {
- return TRUE;
+ return true;
}
uint16_t norm16;
UCPTRIE_FAST_U8_PREV(normTrie, UCPTRIE_16, start, p, norm16);
@@ -2399,7 +2399,7 @@ Normalizer2Impl::makeFCD(const UChar *src, const UChar *limit,
* The source text does not fulfill the conditions for FCD.
* Decompose and reorder a limited piece of the text.
*/
- decomposeShort(prevBoundary, src, FALSE, FALSE, *buffer, errorCode);
+ decomposeShort(prevBoundary, src, false, false, *buffer, errorCode);
if (U_FAILURE(errorCode)) {
break;
}
@@ -2665,7 +2665,7 @@ UBool Normalizer2Impl::isCanonSegmentStarter(UChar32 c) const {
UBool Normalizer2Impl::getCanonStartSet(UChar32 c, UnicodeSet &set) const {
int32_t canonValue=getCanonValue(c)&~CANON_NOT_SEGMENT_STARTER;
if(canonValue==0) {
- return FALSE;
+ return false;
}
set.clear();
int32_t value=canonValue&CANON_VALUE_MASK;
@@ -2684,7 +2684,7 @@ UBool Normalizer2Impl::getCanonStartSet(UChar32 c, UnicodeSet &set) const {
addComposites(getCompositionsList(norm16), set);
}
}
- return TRUE;
+ return true;
}
U_NAMESPACE_END
diff --git a/thirdparty/icu4c/common/normalizer2impl.h b/thirdparty/icu4c/common/normalizer2impl.h
index 7c85448b71..449e778384 100644
--- a/thirdparty/icu4c/common/normalizer2impl.h
+++ b/thirdparty/icu4c/common/normalizer2impl.h
@@ -131,7 +131,7 @@ public:
}
}
private:
- Hangul(); // no instantiation
+ Hangul() = delete; // no instantiation
};
class Normalizer2Impl;
@@ -730,7 +730,7 @@ private:
const uint16_t *extraData; // mappings and/or compositions for yesYes, yesNo & noNo characters
const uint8_t *smallFCD; // [0x100] one bit per 32 BMP code points, set if any FCD!=0
- UInitOnce fCanonIterDataInitOnce = U_INITONCE_INITIALIZER;
+ UInitOnce fCanonIterDataInitOnce {};
CanonIterData *fCanonIterData;
};
@@ -759,7 +759,7 @@ public:
// Must be used only when it is known that norm2 is a Normalizer2WithImpl instance.
static const Normalizer2Impl *getImpl(const Normalizer2 *norm2);
private:
- Normalizer2Factory(); // No instantiation.
+ Normalizer2Factory() = delete; // No instantiation.
};
U_NAMESPACE_END
diff --git a/thirdparty/icu4c/common/normlzr.cpp b/thirdparty/icu4c/common/normlzr.cpp
index 1f4fa15179..58de61591f 100644
--- a/thirdparty/icu4c/common/normlzr.cpp
+++ b/thirdparty/icu4c/common/normlzr.cpp
@@ -205,7 +205,7 @@ Normalizer::isNormalized(const UnicodeString& source,
return n2->isNormalized(source, status);
}
} else {
- return FALSE;
+ return false;
}
}
@@ -483,7 +483,7 @@ Normalizer::nextNormalize() {
currentIndex=nextIndex;
text->setIndex(nextIndex);
if(!text->hasNext()) {
- return FALSE;
+ return false;
}
// Skip at least one character so we make progress.
UnicodeString segment(text->next32PostInc());
@@ -507,7 +507,7 @@ Normalizer::previousNormalize() {
nextIndex=currentIndex;
text->setIndex(currentIndex);
if(!text->hasPrevious()) {
- return FALSE;
+ return false;
}
UnicodeString segment;
while(text->hasPrevious()) {
diff --git a/thirdparty/icu4c/common/patternprops.cpp b/thirdparty/icu4c/common/patternprops.cpp
index c38a7e276d..da3243d301 100644
--- a/thirdparty/icu4c/common/patternprops.cpp
+++ b/thirdparty/icu4c/common/patternprops.cpp
@@ -118,49 +118,49 @@ static const uint32_t syntaxOrWhiteSpace2000[]={
UBool
PatternProps::isSyntax(UChar32 c) {
if(c<0) {
- return FALSE;
+ return false;
} else if(c<=0xff) {
return (UBool)(latin1[c]>>1)&1;
} else if(c<0x2010) {
- return FALSE;
+ return false;
} else if(c<=0x3030) {
uint32_t bits=syntax2000[index2000[(c-0x2000)>>5]];
return (UBool)((bits>>(c&0x1f))&1);
} else if(0xfd3e<=c && c<=0xfe46) {
return c<=0xfd3f || 0xfe45<=c;
} else {
- return FALSE;
+ return false;
}
}
UBool
PatternProps::isSyntaxOrWhiteSpace(UChar32 c) {
if(c<0) {
- return FALSE;
+ return false;
} else if(c<=0xff) {
return (UBool)(latin1[c]&1);
} else if(c<0x200e) {
- return FALSE;
+ return false;
} else if(c<=0x3030) {
uint32_t bits=syntaxOrWhiteSpace2000[index2000[(c-0x2000)>>5]];
return (UBool)((bits>>(c&0x1f))&1);
} else if(0xfd3e<=c && c<=0xfe46) {
return c<=0xfd3f || 0xfe45<=c;
} else {
- return FALSE;
+ return false;
}
}
UBool
PatternProps::isWhiteSpace(UChar32 c) {
if(c<0) {
- return FALSE;
+ return false;
} else if(c<=0xff) {
return (UBool)(latin1[c]>>2)&1;
} else if(0x200e<=c && c<=0x2029) {
return c<=0x200f || 0x2028<=c;
} else {
- return FALSE;
+ return false;
}
}
@@ -207,15 +207,15 @@ PatternProps::trimWhiteSpace(const UChar *s, int32_t &length) {
UBool
PatternProps::isIdentifier(const UChar *s, int32_t length) {
if(length<=0) {
- return FALSE;
+ return false;
}
const UChar *limit=s+length;
do {
if(isSyntaxOrWhiteSpace(*s++)) {
- return FALSE;
+ return false;
}
} while(s<limit);
- return TRUE;
+ return true;
}
const UChar *
diff --git a/thirdparty/icu4c/common/patternprops.h b/thirdparty/icu4c/common/patternprops.h
index 95898d580c..4ead56e1cd 100644
--- a/thirdparty/icu4c/common/patternprops.h
+++ b/thirdparty/icu4c/common/patternprops.h
@@ -90,7 +90,7 @@ public:
static const UChar *skipIdentifier(const UChar *s, int32_t length);
private:
- PatternProps(); // no constructor: all static methods
+ PatternProps() = delete; // no constructor: all static methods
};
U_NAMESPACE_END
diff --git a/thirdparty/icu4c/common/propname.cpp b/thirdparty/icu4c/common/propname.cpp
index a12eb7d913..8f0045fdac 100644
--- a/thirdparty/icu4c/common/propname.cpp
+++ b/thirdparty/icu4c/common/propname.cpp
@@ -218,7 +218,7 @@ const char *PropNameData::getName(const char *nameGroup, int32_t nameIndex) {
UBool PropNameData::containsName(BytesTrie &trie, const char *name) {
if(name==NULL) {
- return FALSE;
+ return false;
}
UStringTrieResult result=USTRINGTRIE_NO_VALUE;
char c;
@@ -229,7 +229,7 @@ UBool PropNameData::containsName(BytesTrie &trie, const char *name) {
continue;
}
if(!USTRINGTRIE_HAS_NEXT(result)) {
- return FALSE;
+ return false;
}
result=trie.next((uint8_t)c);
}
diff --git a/thirdparty/icu4c/common/propname_data.h b/thirdparty/icu4c/common/propname_data.h
index c98a0f790b..1e247874b6 100644
--- a/thirdparty/icu4c/common/propname_data.h
+++ b/thirdparty/icu4c/common/propname_data.h
@@ -11,9 +11,9 @@
U_NAMESPACE_BEGIN
-const int32_t PropNameData::indexes[8]={0x20,0x163c,0x521b,0xab62,0xab62,0xab62,0x31,0};
+const int32_t PropNameData::indexes[8]={0x20,0x1660,0x5294,0xacd0,0xacd0,0xacd0,0x31,0};
-const int32_t PropNameData::valueMaps[1415]={
+const int32_t PropNameData::valueMaps[1424]={
6,0,0x48,0,0xf1,0x368,0xf1,0x37e,0xf1,0x393,0xf1,0x3a9,0xf1,0x3b4,0xf1,0x3d5,
0xf1,0x3e5,0xf1,0x3f4,0xf1,0x402,0xf1,0x426,0xf1,0x43d,0xf1,0x455,0xf1,0x46c,0xf1,0x47b,
0xf1,0x48a,0xf1,0x49b,0xf1,0x4a9,0xf1,0x4bb,0xf1,0x4d5,0xf1,0x4f0,0xf1,0x505,0xf1,0x522,
@@ -23,12 +23,12 @@ const int32_t PropNameData::valueMaps[1415]={
0xf1,0x71a,0xf1,0x722,0xf1,0x72b,0xf1,0x738,0xf1,0x74b,0xf1,0x768,0xf1,0x785,0xf1,0x7a2,
0xf1,0x7c0,0xf1,0x7de,0xf1,0x802,0xf1,0x80f,0xf1,0x829,0xf1,0x83e,0xf1,0x859,0xf1,0x870,
0xf1,0x887,0xf1,0x8a9,0xf1,0x8c8,0xf1,0x8e1,0xf1,0x90e,0xf1,0x947,0xf1,0x978,0xf1,0x9a7,
-0xf1,0x9d6,0xf1,0x1000,0x1019,0x9eb,0x16d,0xc0b,0x188,0x3196,0xf7,0x31b5,0x2cd,0x32f3,0x2e3,0x334d,
-0x2ed,0x35aa,0x30f,0x3ed5,0x37b,0x3f45,0x385,0x41df,0x3b4,0x421d,0x3bc,0x4d50,0x486,0x4dce,0x490,0x4df3,
-0x496,0x4e0d,0x49c,0x4e2e,0x4a3,0x4e48,0xf7,0x4e6d,0xf7,0x4e93,0x4aa,0x4f3d,0x4c0,0x4fb6,0x4d3,0x5068,
-0x4ee,0x509f,0x4f5,0x527f,0x509,0x56ff,0x531,0x2000,0x2001,0x575e,0x539,0x3000,0x3001,0x57ea,0,0x4000,
-0x400e,0x57fc,0,0x5805,0,0x581f,0,0x5830,0,0x5841,0,0x5857,0,0x5860,0,0x587d,
-0,0x589b,0,0x58b9,0,0x58d7,0,0x58ed,0,0x5901,0,0x5917,0,0x7000,0x7001,0x5930,
+0xf1,0x9d6,0xf1,0x1000,0x1019,0x9eb,0x16d,0xc0b,0x188,0x3279,0xf7,0x3298,0x2d4,0x33d6,0x2ea,0x3430,
+0x2f4,0x368d,0x316,0x3fb8,0x382,0x4028,0x38c,0x42c2,0x3bb,0x4300,0x3c3,0x4e45,0x48f,0x4ec3,0x499,0x4ee8,
+0x49f,0x4f02,0x4a5,0x4f23,0x4ac,0x4f3d,0xf7,0x4f62,0xf7,0x4f88,0x4b3,0x5032,0x4c9,0x50ab,0x4dc,0x515d,
+0x4f7,0x5194,0x4fe,0x5374,0x512,0x57f4,0x53a,0x2000,0x2001,0x5853,0x542,0x3000,0x3001,0x58df,0,0x4000,
+0x400e,0x58f1,0,0x58fa,0,0x5914,0,0x5925,0,0x5936,0,0x594c,0,0x5955,0,0x5972,
+0,0x5990,0,0x59ae,0,0x59cc,0,0x59e2,0,0x59f6,0,0x5a0c,0,0x7000,0x7001,0x5a25,
0,0x844,0x12,0,1,0x12,0x20,0x862,0x4a,0,1,6,7,8,9,0xa,
0xb,0xc,0xd,0xe,0xf,0x10,0x11,0x12,0x13,0x14,0x15,0x16,0x17,0x18,0x19,0x1a,
0x1b,0x1c,0x1d,0x1e,0x1f,0x20,0x21,0x22,0x23,0x24,0x54,0x5b,0x67,0x6b,0x76,0x7a,
@@ -38,7 +38,7 @@ const int32_t PropNameData::valueMaps[1415]={
0x1af,0x1bc,0x1c9,0x1d6,0x1e3,0x1f0,0x1fd,0x20c,0x21b,0x22a,0x239,0x248,0x257,0x266,0x275,0x28f,
0x2a3,0x2b7,0x2d2,0x2e1,0x2ea,0x2fa,0x302,0x30b,0x31a,0x323,0x333,0x344,0x355,0xa03,1,0,
0x17,0x9fa,0xa0b,0xa1c,0xa30,0xa47,0xa5f,0xa71,0xa86,0xa9d,0xab2,0xac2,0xad4,0xaf1,0xb0d,0xb1f,
-0xb3c,0xb58,0xb74,0xb89,0xb9e,0xbb8,0xbd3,0xbee,0xba5,1,0,0x141,0xc16,0xc23,0xc36,0xc5e,
+0xb3c,0xb58,0xb74,0xb89,0xb9e,0xbb8,0xbd3,0xbee,0xba5,1,0,0x148,0xc16,0xc23,0xc36,0xc5e,
0xc7c,0xc9a,0xcb2,0xcdd,0xd07,0xd1f,0xd32,0xd45,0xd54,0xd63,0xd72,0xd81,0xd98,0xda9,0xdbc,0xdcf,
0xddc,0xde9,0xdf8,0xe09,0xe1e,0xe2f,0xe3a,0xe43,0xe54,0xe65,0xe78,0xe8a,0xe9d,0xeb0,0xeef,0xefc,
0xf09,0xf16,0xf2b,0xf5b,0xf75,0xf96,0xfc1,0xfe4,0x1042,0x1069,0x1084,0x1093,0x10ba,0x10e2,0x1105,0x1128,
@@ -58,54 +58,54 @@ const int32_t PropNameData::valueMaps[1415]={
0x2a34,0x2a51,0x2a93,0x2aba,0x2ac7,0x2adc,0x2b00,0x2b26,0x2b5f,0x2b70,0x2b94,0x2b9f,0x2bac,0x2bbb,0x2be0,0x2c0e,
0x2c2a,0x2c47,0x2c54,0x2c65,0x2c83,0x2ca6,0x2cc3,0x2cd0,0x2cf0,0x2d0d,0x2d2e,0x2d57,0x2d68,0x2d87,0x2da0,0x2db9,
0x2dca,0x2e13,0x2e24,0x2e3d,0x2e6c,0x2e99,0x2ebe,0x2f00,0x2f1c,0x2f2b,0x2f42,0x2f70,0x2f89,0x2fb2,0x2fcc,0x3007,
-0x3025,0x3034,0x3054,0x306f,0x3093,0x30af,0x30cd,0x30eb,0x3102,0x3111,0x311c,0x3159,0x316c,0x1f6b,1,0,
-0x12,0x31cc,0x31dc,0x31ef,0x31ff,0x320f,0x321e,0x322e,0x3240,0x3253,0x3265,0x3275,0x3285,0x3294,0x32a3,0x32b3,
-0x32c0,0x32cf,0x32e3,0x2029,1,0,6,0x3308,0x3313,0x3320,0x332d,0x333a,0x3345,0x206d,1,0,
-0x1e,0x3362,0x3371,0x3386,0x339b,0x33b0,0x33c4,0x33d5,0x33e9,0x33fc,0x340d,0x3426,0x3438,0x3449,0x345d,0x3470,
-0x3488,0x349a,0x34a5,0x34b5,0x34c3,0x34d8,0x34ed,0x3503,0x351d,0x3533,0x3543,0x3557,0x356b,0x357c,0x3594,0x2298,
-1,0,0x68,0x35bc,0x35df,0x35e8,0x35f5,0x3600,0x3609,0x3614,0x361d,0x3636,0x363b,0x3644,0x3661,0x366a,
-0x3677,0x3680,0x36a4,0x36ab,0x36b4,0x36c7,0x36d2,0x36db,0x36e6,0x36ff,0x3708,0x3717,0x3722,0x372b,0x3736,0x373f,
-0x3746,0x374f,0x375a,0x3763,0x377c,0x3785,0x3792,0x379d,0x37ae,0x37b9,0x37ce,0x37e5,0x37ee,0x37f7,0x3810,0x381b,
-0x3824,0x382d,0x3844,0x3861,0x386c,0x387d,0x3888,0x388f,0x389c,0x38a9,0x38d6,0x38eb,0x38f4,0x390f,0x3932,0x3953,
-0x3974,0x3999,0x39c0,0x39e1,0x3a04,0x3a25,0x3a4c,0x3a6d,0x3a92,0x3ab1,0x3ad0,0x3aef,0x3b0c,0x3b2d,0x3b4e,0x3b71,
-0x3b96,0x3bb5,0x3bd4,0x3bf5,0x3c1c,0x3c41,0x3c60,0x3c81,0x3ca4,0x3cbf,0x3cd8,0x3cf3,0x3d0c,0x3d29,0x3d44,0x3d61,
-0x3d80,0x3d9d,0x3dba,0x3dd9,0x3df6,0x3e11,0x3e2e,0x3e4b,0x3e7e,0x3ea5,0x3eb8,0x25fb,1,0,6,0x3ee6,
-0x3ef5,0x3f05,0x3f15,0x3f25,0x3f36,0x2659,1,0,0x2b,0x3f54,0x3f60,0x3f6e,0x3f7d,0x3f8c,0x3f9c,0x3fad,
-0x3fc1,0x3fd6,0x3fec,0x3fff,0x4013,0x4023,0x402c,0x4037,0x4047,0x4063,0x4075,0x4083,0x4092,0x409e,0x40b3,0x40c7,
-0x40da,0x40e8,0x40fc,0x410a,0x4114,0x4126,0x4132,0x4140,0x4150,0x4157,0x415e,0x4165,0x416c,0x4173,0x4189,0x41aa,
-0x870,0x41bc,0x41c7,0x41d6,0x28b2,1,0,4,0x41f0,0x41fb,0x4207,0x4211,0x28d8,1,0,0xc6,
-0x4228,0x4235,0x424a,0x4257,0x4266,0x4274,0x4283,0x4292,0x42a4,0x42b3,0x42c1,0x42d2,0x42e1,0x42f0,0x42fd,0x4309,
-0x4318,0x4327,0x4331,0x433e,0x434b,0x435a,0x4368,0x4377,0x4383,0x438d,0x4399,0x43a9,0x43b9,0x43c7,0x43d3,0x43e4,
-0x43f0,0x43fc,0x440a,0x4417,0x4423,0x4430,0xe2f,0x443d,0x444b,0x4465,0x446e,0x447c,0x448a,0x4496,0x44a5,0x44b3,
-0x44c1,0x44cd,0x44dc,0x44ea,0x44f8,0x4505,0x4514,0x452f,0x453e,0x454f,0x4560,0x4573,0x4585,0x4594,0x45a6,0x45b5,
-0x45c1,0x45cc,0x1f3f,0x45d9,0x45e4,0x45ef,0x45fa,0x4605,0x4620,0x462b,0x4636,0x4641,0x4654,0x4668,0x4673,0x4682,
-0x4691,0x469c,0x46a7,0x46b4,0x46c3,0x46d1,0x46dc,0x46f7,0x4701,0x4712,0x4723,0x4732,0x4743,0x474e,0x4759,0x4764,
-0x476f,0x477a,0x4785,0x4790,0x479a,0x47a5,0x47b5,0x47c0,0x47ce,0x47db,0x47e6,0x47f5,0x4802,0x480f,0x481e,0x482b,
-0x483c,0x484e,0x485e,0x4869,0x487c,0x4893,0x48a1,0x48ae,0x48b9,0x48c6,0x48d7,0x48f3,0x4909,0x4914,0x4931,0x4941,
-0x4950,0x495b,0x4966,0x2059,0x4972,0x497d,0x4995,0x49a5,0x49b4,0x49c2,0x49d0,0x49db,0x49e6,0x49fa,0x4a11,0x4a29,
-0x4a39,0x4a49,0x4a59,0x4a6b,0x4a76,0x4a81,0x4a8b,0x4a97,0x4aa5,0x4ab8,0x4ac4,0x4ad1,0x4adc,0x4af8,0x4b05,0x4b13,
-0x4b2c,0x2959,0x4b3b,0x277a,0x4b48,0x4b56,0x4b68,0x4b76,0x4b82,0x4b92,0x2b94,0x4ba0,0x4bac,0x4bb7,0x4bc2,0x4bcd,
-0x4be1,0x4bef,0x4c06,0x4c12,0x4c26,0x4c34,0x4c46,0x4c5c,0x4c6a,0x4c7c,0x4c8a,0x4ca7,0x4cb9,0x4cc6,0x4cd7,0x4ce9,
-0x4d03,0x4d10,0x4d23,0x4d34,0x3111,0x4d41,0x331b,1,0,6,0x4d6a,0x4d7d,0x4d8d,0x4d9b,0x4dac,0x4dbc,
-0x3377,0x12,0,1,0x4de6,0x4dec,0x3384,0x12,0,1,0x4de6,0x4dec,0x3391,1,0,3,
-0x4de6,0x4dec,0x4e25,0x33a7,1,0,3,0x4de6,0x4dec,0x4e25,0x33bd,1,0,0x12,0x4eaf,0x4eb9,
-0x4ec5,0x4ecc,0x4ed7,0x4edc,0x4ee3,0x4eea,0x4ef3,0x4ef8,0x4efd,0x4f0d,0x870,0x41bc,0x4f19,0x41c7,0x4f29,0x41d6,
-0x3466,1,0,0xf,0x4eaf,0x4f50,0x4f5a,0x4f64,0x4f6f,0x4092,0x4f79,0x4f85,0x4f8d,0x4f94,0x4f9e,0x4ec5,
-0x4ecc,0x4edc,0x4fa8,0x34ed,1,0,0x17,0x4eaf,0x4fc5,0x4f64,0x4fd1,0x4fde,0x4fec,0x4092,0x4ff7,0x4ec5,
-0x5008,0x4edc,0x5017,0x5025,0x870,0x41aa,0x5031,0x5042,0x41bc,0x4f19,0x41c7,0x4f29,0x41d6,0x5053,0x360a,1,
-0,3,0x5086,0x508e,0x5096,0x3623,1,0,0x10,0x50bf,0x50c6,0x50d5,0x50f6,0x5119,0x5124,0x5143,
-0x515a,0x5167,0x5170,0x518f,0x51c2,0x51dd,0x520c,0x5229,0x524e,0x36bc,1,0,0x24,0x529d,0x52aa,0x52bd,
-0x52ca,0x52f7,0x531c,0x5331,0x5350,0x5371,0x539e,0x53d7,0x53fa,0x541d,0x544a,0x547f,0x54a6,0x54cf,0x5506,0x5535,
-0x5556,0x557b,0x558a,0x55ad,0x55c4,0x55d1,0x55e0,0x55fd,0x5616,0x5639,0x565e,0x5677,0x568c,0x569b,0x56ac,0x56b9,
-0x56da,0x388c,1,0,4,0x5718,0x5723,0x573b,0x5753,0x38c8,0x36,1,2,4,8,0xe,
-0x10,0x20,0x3e,0x40,0x80,0x100,0x1c0,0x200,0x400,0x800,0xe00,0x1000,0x2000,0x4000,0x7000,0x8000,
-0x10000,0x20000,0x40000,0x78001,0x80000,0x100000,0x200000,0x400000,0x800000,0x1000000,0x2000000,0x4000000,0x8000000,0xf000000,0x10000000,0x20000000,
-0x30f80000,0x3362,0x3371,0x3386,0x339b,0x578c,0x33b0,0x33c4,0x5782,0x33d5,0x33e9,0x33fc,0x579d,0x340d,0x3426,0x3438,
-0x57b4,0x3449,0x345d,0x3470,0x57dd,0x3488,0x349a,0x34a5,0x34b5,0x5779,0x34c3,0x34d8,0x34ed,0x3503,0x351d,0x3533,
-0x3543,0x3557,0x356b,0x57d3,0x357c,0x3594,0x57be
+0x3025,0x3034,0x3054,0x306f,0x3093,0x30af,0x30cd,0x30eb,0x3102,0x3111,0x311c,0x3159,0x316c,0x3196,0x31b6,0x31e4,
+0x3208,0x3230,0x3255,0x3260,0x1fa9,1,0,0x12,0x32af,0x32bf,0x32d2,0x32e2,0x32f2,0x3301,0x3311,0x3323,
+0x3336,0x3348,0x3358,0x3368,0x3377,0x3386,0x3396,0x33a3,0x33b2,0x33c6,0x2067,1,0,6,0x33eb,0x33f6,
+0x3403,0x3410,0x341d,0x3428,0x20ab,1,0,0x1e,0x3445,0x3454,0x3469,0x347e,0x3493,0x34a7,0x34b8,0x34cc,
+0x34df,0x34f0,0x3509,0x351b,0x352c,0x3540,0x3553,0x356b,0x357d,0x3588,0x3598,0x35a6,0x35bb,0x35d0,0x35e6,0x3600,
+0x3616,0x3626,0x363a,0x364e,0x365f,0x3677,0x22d6,1,0,0x68,0x369f,0x36c2,0x36cb,0x36d8,0x36e3,0x36ec,
+0x36f7,0x3700,0x3719,0x371e,0x3727,0x3744,0x374d,0x375a,0x3763,0x3787,0x378e,0x3797,0x37aa,0x37b5,0x37be,0x37c9,
+0x37e2,0x37eb,0x37fa,0x3805,0x380e,0x3819,0x3822,0x3829,0x3832,0x383d,0x3846,0x385f,0x3868,0x3875,0x3880,0x3891,
+0x389c,0x38b1,0x38c8,0x38d1,0x38da,0x38f3,0x38fe,0x3907,0x3910,0x3927,0x3944,0x394f,0x3960,0x396b,0x3972,0x397f,
+0x398c,0x39b9,0x39ce,0x39d7,0x39f2,0x3a15,0x3a36,0x3a57,0x3a7c,0x3aa3,0x3ac4,0x3ae7,0x3b08,0x3b2f,0x3b50,0x3b75,
+0x3b94,0x3bb3,0x3bd2,0x3bef,0x3c10,0x3c31,0x3c54,0x3c79,0x3c98,0x3cb7,0x3cd8,0x3cff,0x3d24,0x3d43,0x3d64,0x3d87,
+0x3da2,0x3dbb,0x3dd6,0x3def,0x3e0c,0x3e27,0x3e44,0x3e63,0x3e80,0x3e9d,0x3ebc,0x3ed9,0x3ef4,0x3f11,0x3f2e,0x3f61,
+0x3f88,0x3f9b,0x2639,1,0,6,0x3fc9,0x3fd8,0x3fe8,0x3ff8,0x4008,0x4019,0x2697,1,0,0x2b,
+0x4037,0x4043,0x4051,0x4060,0x406f,0x407f,0x4090,0x40a4,0x40b9,0x40cf,0x40e2,0x40f6,0x4106,0x410f,0x411a,0x412a,
+0x4146,0x4158,0x4166,0x4175,0x4181,0x4196,0x41aa,0x41bd,0x41cb,0x41df,0x41ed,0x41f7,0x4209,0x4215,0x4223,0x4233,
+0x423a,0x4241,0x4248,0x424f,0x4256,0x426c,0x428d,0x870,0x429f,0x42aa,0x42b9,0x28f0,1,0,4,0x42d3,
+0x42de,0x42ea,0x42f4,0x2916,1,0,0xc8,0x430b,0x4318,0x432d,0x433a,0x4349,0x4357,0x4366,0x4375,0x4387,
+0x4396,0x43a4,0x43b5,0x43c4,0x43d3,0x43e0,0x43ec,0x43fb,0x440a,0x4414,0x4421,0x442e,0x443d,0x444b,0x445a,0x4466,
+0x4470,0x447c,0x448c,0x449c,0x44aa,0x44b6,0x44c7,0x44d3,0x44df,0x44ed,0x44fa,0x4506,0x4513,0xe2f,0x4520,0x452e,
+0x4548,0x4551,0x455f,0x456d,0x4579,0x4588,0x4596,0x45a4,0x45b0,0x45bf,0x45cd,0x45db,0x45e8,0x45f7,0x4612,0x4621,
+0x4632,0x4643,0x4656,0x4668,0x4677,0x4689,0x4698,0x46a4,0x46af,0x1f3f,0x46bc,0x46c7,0x46d2,0x46dd,0x46e8,0x4703,
+0x470e,0x4719,0x4724,0x4737,0x474b,0x4756,0x4765,0x4774,0x477f,0x478a,0x4797,0x47a6,0x47b4,0x47bf,0x47da,0x47e4,
+0x47f5,0x4806,0x4815,0x4826,0x4831,0x483c,0x4847,0x4852,0x485d,0x4868,0x4873,0x487d,0x4888,0x4898,0x48a3,0x48b1,
+0x48be,0x48c9,0x48d8,0x48e5,0x48f2,0x4901,0x490e,0x491f,0x4931,0x4941,0x494c,0x495f,0x4976,0x4984,0x4991,0x499c,
+0x49a9,0x49ba,0x49d6,0x49ec,0x49f7,0x4a14,0x4a24,0x4a33,0x4a3e,0x4a49,0x2059,0x4a55,0x4a60,0x4a78,0x4a88,0x4a97,
+0x4aa5,0x4ab3,0x4abe,0x4ac9,0x4add,0x4af4,0x4b0c,0x4b1c,0x4b2c,0x4b3c,0x4b4e,0x4b59,0x4b64,0x4b6e,0x4b7a,0x4b88,
+0x4b9b,0x4ba7,0x4bb4,0x4bbf,0x4bdb,0x4be8,0x4bf6,0x4c0f,0x2959,0x4c1e,0x277a,0x4c2b,0x4c39,0x4c4b,0x4c59,0x4c65,
+0x4c75,0x2b94,0x4c83,0x4c8f,0x4c9a,0x4ca5,0x4cb0,0x4cc4,0x4cd2,0x4ce9,0x4cf5,0x4d09,0x4d17,0x4d29,0x4d3f,0x4d4d,
+0x4d5f,0x4d6d,0x4d8a,0x4d9c,0x4da9,0x4dba,0x4dcc,0x4de6,0x4df3,0x4e06,0x4e17,0x3111,0x4e24,0x3255,0x4e33,0x3370,
+1,0,6,0x4e5f,0x4e72,0x4e82,0x4e90,0x4ea1,0x4eb1,0x33cc,0x12,0,1,0x4edb,0x4ee1,0x33d9,
+0x12,0,1,0x4edb,0x4ee1,0x33e6,1,0,3,0x4edb,0x4ee1,0x4f1a,0x33fc,1,0,3,
+0x4edb,0x4ee1,0x4f1a,0x3412,1,0,0x12,0x4fa4,0x4fae,0x4fba,0x4fc1,0x4fcc,0x4fd1,0x4fd8,0x4fdf,0x4fe8,
+0x4fed,0x4ff2,0x5002,0x870,0x429f,0x500e,0x42aa,0x501e,0x42b9,0x34bb,1,0,0xf,0x4fa4,0x5045,0x504f,
+0x5059,0x5064,0x4175,0x506e,0x507a,0x5082,0x5089,0x5093,0x4fba,0x4fc1,0x4fd1,0x509d,0x3542,1,0,0x17,
+0x4fa4,0x50ba,0x5059,0x50c6,0x50d3,0x50e1,0x4175,0x50ec,0x4fba,0x50fd,0x4fd1,0x510c,0x511a,0x870,0x428d,0x5126,
+0x5137,0x429f,0x500e,0x42aa,0x501e,0x42b9,0x5148,0x365f,1,0,3,0x517b,0x5183,0x518b,0x3678,1,
+0,0x10,0x51b4,0x51bb,0x51ca,0x51eb,0x520e,0x5219,0x5238,0x524f,0x525c,0x5265,0x5284,0x52b7,0x52d2,0x5301,
+0x531e,0x5343,0x3711,1,0,0x24,0x5392,0x539f,0x53b2,0x53bf,0x53ec,0x5411,0x5426,0x5445,0x5466,0x5493,
+0x54cc,0x54ef,0x5512,0x553f,0x5574,0x559b,0x55c4,0x55fb,0x562a,0x564b,0x5670,0x567f,0x56a2,0x56b9,0x56c6,0x56d5,
+0x56f2,0x570b,0x572e,0x5753,0x576c,0x5781,0x5790,0x57a1,0x57ae,0x57cf,0x38e1,1,0,4,0x580d,0x5818,
+0x5830,0x5848,0x391d,0x36,1,2,4,8,0xe,0x10,0x20,0x3e,0x40,0x80,0x100,0x1c0,
+0x200,0x400,0x800,0xe00,0x1000,0x2000,0x4000,0x7000,0x8000,0x10000,0x20000,0x40000,0x78001,0x80000,0x100000,0x200000,
+0x400000,0x800000,0x1000000,0x2000000,0x4000000,0x8000000,0xf000000,0x10000000,0x20000000,0x30f80000,0x3445,0x3454,0x3469,0x347e,0x5881,0x3493,
+0x34a7,0x5877,0x34b8,0x34cc,0x34df,0x5892,0x34f0,0x3509,0x351b,0x58a9,0x352c,0x3540,0x3553,0x58d2,0x356b,0x357d,
+0x3588,0x3598,0x586e,0x35a6,0x35bb,0x35d0,0x35e6,0x3600,0x3616,0x3626,0x363a,0x364e,0x58c8,0x365f,0x3677,0x58b3
};
-const uint8_t PropNameData::bytesTries[15327]={
+const uint8_t PropNameData::bytesTries[15412]={
0,0x15,0x6d,0xc3,0xc7,0x73,0xc2,0x12,0x76,0x7a,0x76,0x6a,0x77,0xa2,0x52,0x78,
1,0x64,0x50,0x69,0x10,0x64,1,0x63,0x30,0x73,0x62,0x13,0x74,0x61,0x72,0x74,
0x63,0x60,0x16,0x6f,0x6e,0x74,0x69,0x6e,0x75,0x65,0x61,0x13,0x69,0x67,0x69,0x74,
@@ -292,7 +292,7 @@ const uint8_t PropNameData::bytesTries[15327]={
0x74,0x6f,0x72,0x2d,2,0x6c,0x3b,0x6e,0x2b,0x72,0x13,0x61,0x62,0x69,0x63,1,
0x6c,0x30,0x6e,0x14,0x75,0x6d,0x62,0x65,0x72,0x2b,0x14,0x65,0x74,0x74,0x65,0x72,
0x3b,0x2e,1,0x6e,0x45,0x6f,0x1c,0x75,0x6e,0x64,0x61,0x72,0x79,0x6e,0x65,0x75,
-0x74,0x72,0x61,0x6c,0x45,0,0x16,0x6d,0xc9,0x14,0x74,0xc2,0x30,0x77,0x89,0x77,
+0x74,0x72,0x61,0x6c,0x45,0,0x16,0x6d,0xc9,0x20,0x74,0xc2,0x30,0x77,0x89,0x77,
0x86,0x79,0xa2,0x46,0x7a,1,0x61,0x58,0x6e,0x1a,0x61,0x6d,0x65,0x6e,0x6e,0x79,
0x6d,0x75,0x73,0x69,0x63,0xa4,0x40,0x19,0x61,0x6c,0x6e,0x6f,0x74,0x61,0x74,0x69,
0x6f,0x6e,0xa5,0x40,0x1c,0x6e,0x61,0x62,0x61,0x7a,0x61,0x72,0x73,0x71,0x75,0x61,
@@ -436,637 +436,643 @@ const uint8_t PropNameData::bytesTries[15327]={
0x6e,0x67,0xa5,0x27,0x10,0x6f,0xa3,0x92,0x14,0x62,0x6c,0x6f,0x63,0x6b,0x21,1,
0x6d,0x2c,0x73,0x11,0x68,0x75,0xa5,0x15,0x17,0x62,0x65,0x72,0x66,0x6f,0x72,0x6d,
0x73,0x7b,0x61,0x44,0x62,0x21,0x65,0x10,0x77,1,0x61,0xa5,0xe,0x74,0x14,0x61,
-0x69,0x6c,0x75,0x65,0xa3,0x8b,1,0x62,0x38,0x6e,0x17,0x64,0x69,0x6e,0x61,0x67,
-0x61,0x72,0x69,0xa5,0x26,0x15,0x61,0x74,0x61,0x65,0x61,0x6e,0xa3,0xef,0x67,0xc4,
-0x43,0x6a,0xc1,0xca,0x6a,0xa2,0xdf,0x6b,0xa2,0xf8,0x6c,4,0x61,0x54,0x65,0xa2,
-0x6b,0x69,0xa2,0x82,0x6f,0xa2,0xc1,0x79,1,0x63,0x2e,0x64,0x12,0x69,0x61,0x6e,
-0xa3,0xa9,0x12,0x69,0x61,0x6e,0xa3,0xa7,1,0x6f,0x55,0x74,0x11,0x69,0x6e,1,
-0x31,0x96,0x65,0x11,0x78,0x74,6,0x64,0x21,0x64,0xa3,0x95,0x65,0x2c,0x66,0xa5,
-0x39,0x67,0xa5,0x3a,0xa2,0xe7,0x13,0x6e,0x64,0x65,0x64,6,0x64,0xc,0x64,0xa3,
-0x95,0x65,0xa3,0xe7,0x66,0xa5,0x39,0x67,0xa5,0x3a,0x61,0x2a,0x62,0x29,0x63,0xa3,
-0x94,0x26,0x18,0x64,0x64,0x69,0x74,0x69,0x6f,0x6e,0x61,0x6c,0x6d,0x24,0x12,0x73,
-0x75,0x70,0x24,0x16,0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0x25,1,0x70,0x42,0x74,
-0x1d,0x74,0x65,0x72,0x6c,0x69,0x6b,0x65,0x73,0x79,0x6d,0x62,0x6f,0x6c,0x73,0x79,
-0x12,0x63,0x68,0x61,0xa3,0x9c,2,0x6d,0x4e,0x6e,0x54,0x73,0x10,0x75,0xa2,0xb0,
-0x12,0x73,0x75,0x70,0xa4,0x31,0x16,0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0xa5,0x31,
-0x11,0x62,0x75,0xa3,0x6f,0x12,0x65,0x61,0x72,1,0x61,0xa3,0xe8,0x62,1,0x69,
-0x38,0x73,0x17,0x79,0x6c,0x6c,0x61,0x62,0x61,0x72,0x79,0xa3,0x75,0x17,0x64,0x65,
-0x6f,0x67,0x72,0x61,0x6d,0x73,0xa3,0x76,0x1a,0x77,0x73,0x75,0x72,0x72,0x6f,0x67,
-0x61,0x74,0x65,0x73,0xa3,0x4d,0x10,0x61,1,0x6d,0x32,0x76,0x14,0x61,0x6e,0x65,
-0x73,0x65,0xa3,0xb5,0x10,0x6f,0x5c,0x12,0x65,0x78,0x74,1,0x61,0xa3,0xb4,0x62,
-0xa3,0xb9,1,0x61,0xa2,0x43,0x68,4,0x61,0x40,0x69,0x50,0x6d,0x6e,0x6f,0x86,
-0x75,0x15,0x64,0x61,0x77,0x61,0x64,0x69,0xa3,0xe6,0x16,0x72,0x6f,0x73,0x68,0x74,
-0x68,0x69,0xa3,0x89,0x1d,0x74,0x61,0x6e,0x73,0x6d,0x61,0x6c,0x6c,0x73,0x63,0x72,
-0x69,0x70,0x74,0xa5,0x30,0x11,0x65,0x72,0x68,0x16,0x73,0x79,0x6d,0x62,0x6f,0x6c,
-0x73,0xa3,0x71,0x12,0x6a,0x6b,0x69,0xa3,0xe5,3,0x69,0x3a,0x6e,0x42,0x74,0xa2,
-0x58,0x79,0x13,0x61,0x68,0x6c,0x69,0xa3,0xa2,0x12,0x74,0x68,0x69,0xa3,0xc1,3,
-0x61,0x34,0x62,0x84,0x67,0x8a,0x6e,0x12,0x61,0x64,0x61,0x4d,1,0x65,0x40,0x73,
-0x11,0x75,0x70,0xa2,0xcb,0x16,0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0xa3,0xcb,0x11,
-0x78,0x74,2,0x61,0xa5,0x13,0x62,0xa5,0x38,0x65,0x13,0x6e,0x64,0x65,0x64,1,
-0x61,0xa5,0x13,0x62,0xa5,0x38,0x11,0x75,0x6e,0xa3,0x42,0x11,0x78,0x69,0x96,0x17,
-0x72,0x61,0x64,0x69,0x63,0x61,0x6c,0x73,0x97,0x14,0x61,0x6b,0x61,0x6e,0x61,0x9e,
-1,0x65,0x4c,0x70,0x10,0x68,0x1f,0x6f,0x6e,0x65,0x74,0x69,0x63,0x65,0x78,0x74,
-0x65,0x6e,0x73,0x69,0x6f,0x6e,0x73,0xa3,0x6b,0x11,0x78,0x74,0xa3,0x6b,0x67,0xa2,
-0xb5,0x68,0xa4,0x84,0x69,3,0x64,0x4c,0x6d,0xa2,0x55,0x6e,0xa2,0x62,0x70,0x13,
-0x61,0x65,0x78,0x74,0x2a,0x16,0x65,0x6e,0x73,0x69,0x6f,0x6e,0x73,0x2b,1,0x63,
-0x99,0x65,0x17,0x6f,0x67,0x72,0x61,0x70,0x68,0x69,0x63,1,0x64,0x56,0x73,0x15,
-0x79,0x6d,0x62,0x6f,0x6c,0x73,0xa4,0xb,0x1d,0x61,0x6e,0x64,0x70,0x75,0x6e,0x63,
-0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0xa5,0xb,0x13,0x65,0x73,0x63,0x72,0x1f,0x69,
-0x70,0x74,0x69,0x6f,0x6e,0x63,0x68,0x61,0x72,0x61,0x63,0x74,0x65,0x72,0x73,0x99,
-0x1c,0x70,0x65,0x72,0x69,0x61,0x6c,0x61,0x72,0x61,0x6d,0x61,0x69,0x63,0xa3,0xba,
-1,0x64,0x62,0x73,0x1b,0x63,0x72,0x69,0x70,0x74,0x69,0x6f,0x6e,0x61,0x6c,0x70,
-0x61,1,0x68,0x32,0x72,0x14,0x74,0x68,0x69,0x61,0x6e,0xa3,0xbd,0x13,0x6c,0x61,
-0x76,0x69,0xa3,0xbe,0x11,0x69,0x63,1,0x6e,0x3e,0x73,0x1a,0x69,0x79,0x61,0x71,
-0x6e,0x75,0x6d,0x62,0x65,0x72,0x73,0xa5,0x1e,0x19,0x75,0x6d,0x62,0x65,0x72,0x66,
-0x6f,0x72,0x6d,0x73,0xa3,0xb2,4,0x65,0x74,0x6c,0xa2,0x82,0x6f,0xa2,0x9a,0x72,
-0xa2,0x9e,0x75,2,0x6a,0x34,0x6e,0x3e,0x72,0x14,0x6d,0x75,0x6b,0x68,0x69,0x43,
-0x14,0x61,0x72,0x61,0x74,0x69,0x45,0x18,0x6a,0x61,0x6c,0x61,0x67,0x6f,0x6e,0x64,
-0x69,0xa5,0x1c,1,0x6e,0xa2,0x46,0x6f,1,0x6d,0x6e,0x72,0x13,0x67,0x69,0x61,
-0x6e,0x5a,1,0x65,0x40,0x73,0x11,0x75,0x70,0xa2,0x87,0x16,0x70,0x6c,0x65,0x6d,
-0x65,0x6e,0x74,0xa3,0x87,0x11,0x78,0x74,0xa4,0x1b,0x14,0x65,0x6e,0x64,0x65,0x64,
-0xa5,0x1b,0x1a,0x65,0x74,0x72,0x69,0x63,0x73,0x68,0x61,0x70,0x65,0x73,0x8c,0x12,
-0x65,0x78,0x74,0xa2,0xe3,0x14,0x65,0x6e,0x64,0x65,0x64,0xa3,0xe3,0x1e,0x65,0x72,
-0x61,0x6c,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0x71,0x17,0x61,
-0x67,0x6f,0x6c,0x69,0x74,0x69,0x63,0xa2,0x88,0x12,0x73,0x75,0x70,0xa4,0xa,0x16,
-0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0xa5,0xa,0x13,0x74,0x68,0x69,0x63,0xa3,0x59,
-1,0x61,0x5c,0x65,0x11,0x65,0x6b,0x30,1,0x61,0x38,0x65,0x11,0x78,0x74,0x6e,
-0x14,0x65,0x6e,0x64,0x65,0x64,0x6f,0x17,0x6e,0x64,0x63,0x6f,0x70,0x74,0x69,0x63,
-0x31,0x13,0x6e,0x74,0x68,0x61,0xa3,0xe4,2,0x61,0xa2,0x48,0x65,0xa2,0xdf,0x69,
-1,0x67,0x30,0x72,0x14,0x61,0x67,0x61,0x6e,0x61,0x9d,0x10,0x68,1,0x70,0x3a,
-0x73,0x18,0x75,0x72,0x72,0x6f,0x67,0x61,0x74,0x65,0x73,0xa3,0x4b,1,0x72,0x3c,
-0x75,0x19,0x73,0x75,0x72,0x72,0x6f,0x67,0x61,0x74,0x65,0x73,0xa3,0x4c,0x11,0x69,
-0x76,0x1f,0x61,0x74,0x65,0x75,0x73,0x65,0x73,0x75,0x72,0x72,0x6f,0x67,0x61,0x74,
-0x65,0x73,0xa3,0x4c,2,0x6c,0x32,0x6e,0x9a,0x74,0x12,0x72,0x61,0x6e,0xa5,2,
-0x10,0x66,2,0x61,0x58,0x6d,0x70,0x77,0x14,0x69,0x64,0x74,0x68,0x61,0x1f,0x6e,
-0x64,0x66,0x75,0x6c,0x6c,0x77,0x69,0x64,0x74,0x68,0x66,0x6f,0x72,0x6d,0x73,0xa3,
-0x57,0x1a,0x6e,0x64,0x66,0x75,0x6c,0x6c,0x66,0x6f,0x72,0x6d,0x73,0xa3,0x57,0x13,
-0x61,0x72,0x6b,0x73,0xa3,0x52,2,0x67,0x34,0x69,0xa2,0x45,0x75,0x12,0x6e,0x6f,
-0x6f,0xa3,0x63,0x11,0x75,0x6c,0xa2,0x4a,2,0x63,0x3c,0x6a,0x5e,0x73,0x17,0x79,
-0x6c,0x6c,0x61,0x62,0x6c,0x65,0x73,0xa3,0x4a,0x1f,0x6f,0x6d,0x70,0x61,0x74,0x69,
-0x62,0x69,0x6c,0x69,0x74,0x79,0x6a,0x61,0x6d,0x6f,0xa3,0x41,0x12,0x61,0x6d,0x6f,
-0x5c,0x17,0x65,0x78,0x74,0x65,0x6e,0x64,0x65,0x64,1,0x61,0xa3,0xb4,0x62,0xa3,
-0xb9,0x19,0x66,0x69,0x72,0x6f,0x68,0x69,0x6e,0x67,0x79,0x61,0xa5,0x1d,0x13,0x62,
-0x72,0x65,0x77,0x37,0x61,0xa4,0xc,0x62,0xa6,0x53,0x63,0xa8,0x28,0x64,0xac,0xd3,
-0x65,5,0x6d,0xa9,0x6d,0x94,0x6e,0xa2,0x41,0x74,0x15,0x68,0x69,0x6f,0x70,0x69,
-0x63,0x5e,1,0x65,0x40,0x73,0x11,0x75,0x70,0xa2,0x86,0x16,0x70,0x6c,0x65,0x6d,
-0x65,0x6e,0x74,0xa3,0x86,0x11,0x78,0x74,0xa2,0x85,2,0x61,0xa3,0xc8,0x62,0xa5,
-0x37,0x65,0x13,0x6e,0x64,0x65,0x64,0xa2,0x85,1,0x61,0xa3,0xc8,0x62,0xa5,0x37,
-0x16,0x6f,0x74,0x69,0x63,0x6f,0x6e,0x73,0xa3,0xce,0x15,0x63,0x6c,0x6f,0x73,0x65,
-0x64,2,0x61,0x5a,0x63,0x9e,0x69,0x1c,0x64,0x65,0x6f,0x67,0x72,0x61,0x70,0x68,
-0x69,0x63,0x73,0x75,0x70,0xa2,0xc4,0x16,0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0xa3,
-0xc4,0x16,0x6c,0x70,0x68,0x61,0x6e,0x75,0x6d,0x86,1,0x65,0x2c,0x73,0x11,0x75,
-0x70,0xa3,0xc3,0x13,0x72,0x69,0x63,0x73,0x86,0x18,0x75,0x70,0x70,0x6c,0x65,0x6d,
-0x65,0x6e,0x74,0xa3,0xc3,0x11,0x6a,0x6b,0xa2,0x44,0x1f,0x6c,0x65,0x74,0x74,0x65,
-0x72,0x73,0x61,0x6e,0x64,0x6d,0x6f,0x6e,0x74,0x68,0x73,0xa3,0x44,0x61,0x4a,0x67,
-0x76,0x6c,1,0x62,0x30,0x79,0x13,0x6d,0x61,0x69,0x63,0xa5,0x25,0x13,0x61,0x73,
-0x61,0x6e,0xa3,0xe2,0x13,0x72,0x6c,0x79,0x64,0x1f,0x79,0x6e,0x61,0x73,0x74,0x69,
-0x63,0x63,0x75,0x6e,0x65,0x69,0x66,0x6f,0x72,0x6d,0xa5,1,0x1f,0x79,0x70,0x74,
-0x69,0x61,0x6e,0x68,0x69,0x65,0x72,0x6f,0x67,0x6c,0x79,0x70,0x68,1,0x66,0x26,
-0x73,0xa3,0xc2,0x1c,0x6f,0x72,0x6d,0x61,0x74,0x63,0x6f,0x6e,0x74,0x72,0x6f,0x6c,
-0x73,0xa5,0x24,7,0x6e,0xc0,0xec,0x6e,0x3e,0x72,0xa2,0x5d,0x73,0xa2,0xdf,0x76,
-0x14,0x65,0x73,0x74,0x61,0x6e,0xa3,0xbc,1,0x61,0x92,0x63,0x13,0x69,0x65,0x6e,
-0x74,1,0x67,0x34,0x73,0x15,0x79,0x6d,0x62,0x6f,0x6c,0x73,0xa3,0xa5,0x13,0x72,
-0x65,0x65,0x6b,1,0x6d,0x34,0x6e,0x15,0x75,0x6d,0x62,0x65,0x72,0x73,0xa3,0x7f,
-0x13,0x75,0x73,0x69,0x63,0xa2,0x7e,0x19,0x61,0x6c,0x6e,0x6f,0x74,0x61,0x74,0x69,
-0x6f,0x6e,0xa3,0x7e,0x10,0x74,0x1f,0x6f,0x6c,0x69,0x61,0x6e,0x68,0x69,0x65,0x72,
-0x6f,0x67,0x6c,0x79,0x70,0x68,0x73,0xa3,0xfe,2,0x61,0x32,0x6d,0xa2,0x78,0x72,
-0x12,0x6f,0x77,0x73,0x7d,0x12,0x62,0x69,0x63,0x38,3,0x65,0x4a,0x6d,0x74,0x70,
-0xa2,0x4a,0x73,0x11,0x75,0x70,0xa2,0x80,0x16,0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,
-0xa3,0x80,0x11,0x78,0x74,2,0x61,0xa3,0xd2,0x62,0xa5,0x35,0x65,0x13,0x6e,0x64,
-0x65,0x64,1,0x61,0xa3,0xd2,0x62,0xa5,0x35,0x12,0x61,0x74,0x68,0xa2,0xd3,0x18,
-0x65,0x6d,0x61,0x74,0x69,0x63,0x61,0x6c,0x61,0x1f,0x6c,0x70,0x68,0x61,0x62,0x65,
-0x74,0x69,0x63,0x73,0x79,0x6d,0x62,0x6f,0x6c,0x73,0xa3,0xd3,1,0x66,0x42,0x72,
-0x1e,0x65,0x73,0x65,0x6e,0x74,0x61,0x74,0x69,0x6f,0x6e,0x66,0x6f,0x72,0x6d,0x73,
-1,0x61,0xa3,0x51,0x62,0xa3,0x55,0x14,0x65,0x6e,0x69,0x61,0x6e,0x35,0x12,0x63,
-0x69,0x69,0x23,0x64,0x9e,0x65,0xa2,0x42,0x68,0xa2,0x4d,0x6c,1,0x63,0x62,0x70,
-0x17,0x68,0x61,0x62,0x65,0x74,0x69,0x63,0x70,1,0x66,0xa3,0x50,0x72,0x1e,0x65,
-0x73,0x65,0x6e,0x74,0x61,0x74,0x69,0x6f,0x6e,0x66,0x6f,0x72,0x6d,0x73,0xa3,0x50,
-0x16,0x68,0x65,0x6d,0x69,0x63,0x61,0x6c,0xa2,0xd0,0x16,0x73,0x79,0x6d,0x62,0x6f,
-0x6c,0x73,0xa3,0xd0,0x12,0x6c,0x61,0x6d,0xa5,7,0x1a,0x67,0x65,0x61,0x6e,0x6e,
-0x75,0x6d,0x62,0x65,0x72,0x73,0xa3,0x77,0x11,0x6f,0x6d,0xa3,0xfd,7,0x6f,0x71,
-0x6f,0x64,0x72,0xa2,0x41,0x75,0xa2,0x58,0x79,0x1b,0x7a,0x61,0x6e,0x74,0x69,0x6e,
-0x65,0x6d,0x75,0x73,0x69,0x63,0xa2,0x5b,0x18,0x61,0x6c,0x73,0x79,0x6d,0x62,0x6f,
-0x6c,0x73,0xa3,0x5b,1,0x70,0x34,0x78,0x16,0x64,0x72,0x61,0x77,0x69,0x6e,0x67,
-0x89,0x14,0x6f,0x6d,0x6f,0x66,0x6f,0xa0,0x12,0x65,0x78,0x74,0xa2,0x43,0x14,0x65,
-0x6e,0x64,0x65,0x64,0xa3,0x43,0x10,0x61,1,0x68,0x40,0x69,0x12,0x6c,0x6c,0x65,
-0x92,0x17,0x70,0x61,0x74,0x74,0x65,0x72,0x6e,0x73,0x93,0x11,0x6d,0x69,0xa3,0xc9,
-1,0x67,0x2c,0x68,0x11,0x69,0x64,0xa3,0x64,0x14,0x69,0x6e,0x65,0x73,0x65,0xa3,
-0x81,0x61,0x48,0x65,0xa2,0x4e,0x68,0xa2,0x52,0x6c,0x1a,0x6f,0x63,0x6b,0x65,0x6c,
-0x65,0x6d,0x65,0x6e,0x74,0x73,0x8b,3,0x6c,0x34,0x6d,0x40,0x73,0x66,0x74,0x11,
-0x61,0x6b,0xa3,0xc7,0x14,0x69,0x6e,0x65,0x73,0x65,0xa3,0x93,0x11,0x75,0x6d,0xa2,
-0xb1,0x12,0x73,0x75,0x70,0xa2,0xca,0x16,0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0xa3,
-0xca,1,0x69,0x30,0x73,0x13,0x61,0x76,0x61,0x68,0xa3,0xdd,0x15,0x63,0x6c,0x61,
-0x74,0x69,0x6e,0x23,0x14,0x6e,0x67,0x61,0x6c,0x69,0x41,0x16,0x61,0x69,0x6b,0x73,
-0x75,0x6b,0x69,0xa5,8,5,0x6f,0xc1,0x59,0x6f,0xa2,0x62,0x75,0xa4,0x1d,0x79,
-1,0x70,0x9c,0x72,0x14,0x69,0x6c,0x6c,0x69,0x63,0x32,1,0x65,0x4c,0x73,0x11,
-0x75,0x70,0xa2,0x61,0x16,0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0xa2,0x61,0x12,0x61,
-0x72,0x79,0xa3,0x61,0x11,0x78,0x74,3,0x61,0xa3,0x9e,0x62,0xa3,0xa0,0x63,0xa5,
-9,0x65,0x13,0x6e,0x64,0x65,0x64,2,0x61,0xa3,0x9e,0x62,0xa3,0xa0,0x63,0xa5,
-9,0x10,0x72,1,0x69,0x34,0x6f,0x15,0x6d,0x69,0x6e,0x6f,0x61,0x6e,0xa5,0x36,
-0x1a,0x6f,0x74,0x73,0x79,0x6c,0x6c,0x61,0x62,0x61,0x72,0x79,0xa3,0x7b,3,0x6d,
-0x5a,0x6e,0xa2,0x95,0x70,0xa2,0xa0,0x75,0x17,0x6e,0x74,0x69,0x6e,0x67,0x72,0x6f,
-0x64,0xa2,0x9a,0x17,0x6e,0x75,0x6d,0x65,0x72,0x61,0x6c,0x73,0xa3,0x9a,2,0x62,
-0x3a,0x6d,0xa2,0x5f,0x70,0x15,0x61,0x74,0x6a,0x61,0x6d,0x6f,0xa3,0x41,0x14,0x69,
-0x6e,0x69,0x6e,0x67,2,0x64,0x46,0x68,0x9e,0x6d,0x1d,0x61,0x72,0x6b,0x73,0x66,
-0x6f,0x72,0x73,0x79,0x6d,0x62,0x6f,0x6c,0x73,0x77,0x1e,0x69,0x61,0x63,0x72,0x69,
-0x74,0x69,0x63,0x61,0x6c,0x6d,0x61,0x72,0x6b,0x73,0x2e,2,0x65,0x40,0x66,0xa6,
-0x41,0x73,0x18,0x75,0x70,0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0xa3,0x83,0x16,0x78,
-0x74,0x65,0x6e,0x64,0x65,0x64,0xa3,0xe0,0x17,0x61,0x6c,0x66,0x6d,0x61,0x72,0x6b,
-0x73,0xa3,0x52,0x11,0x6f,0x6e,0x1f,0x69,0x6e,0x64,0x69,0x63,0x6e,0x75,0x6d,0x62,
-0x65,0x72,0x66,0x6f,0x72,0x6d,0x73,0xa3,0xb2,0x1b,0x74,0x72,0x6f,0x6c,0x70,0x69,
-0x63,0x74,0x75,0x72,0x65,0x73,0x83,0x12,0x74,0x69,0x63,0xa2,0x84,0x1b,0x65,0x70,
-0x61,0x63,0x74,0x6e,0x75,0x6d,0x62,0x65,0x72,0x73,0xa3,0xdf,1,0x6e,0x3e,0x72,
-0x1b,0x72,0x65,0x6e,0x63,0x79,0x73,0x79,0x6d,0x62,0x6f,0x6c,0x73,0x75,0x15,0x65,
-0x69,0x66,0x6f,0x72,0x6d,0xa2,0x98,0x16,0x6e,0x75,0x6d,0x62,0x65,0x72,0x73,0xa2,
-0x99,0x1d,0x61,0x6e,0x64,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,
-0xa3,0x99,0x61,0xa2,0xe1,0x68,0xa4,0xb,0x6a,0x10,0x6b,0xa2,0x47,4,0x63,0x8c,
-0x65,0xa2,0x80,0x72,0xa2,0x98,0x73,0xa2,0xaa,0x75,0x1f,0x6e,0x69,0x66,0x69,0x65,
-0x64,0x69,0x64,0x65,0x6f,0x67,0x72,0x61,0x70,0x68,0x73,0xa2,0x47,0x18,0x65,0x78,
-0x74,0x65,0x6e,0x73,0x69,0x6f,0x6e,6,0x64,0x6b,0x64,0xa3,0xd1,0x65,0xa5,0,
-0x66,0xa5,0x12,0x67,0xa5,0x2e,0x14,0x6f,0x6d,0x70,0x61,0x74,0xa2,0x45,1,0x66,
-0x96,0x69,1,0x62,0x44,0x64,0x17,0x65,0x6f,0x67,0x72,0x61,0x70,0x68,0x73,0xa2,
-0x4f,0x12,0x73,0x75,0x70,0xa3,0x5f,0x14,0x69,0x6c,0x69,0x74,0x79,0xa2,0x45,1,
-0x66,0x54,0x69,0x18,0x64,0x65,0x6f,0x67,0x72,0x61,0x70,0x68,0x73,0xa2,0x4f,0x19,
-0x73,0x75,0x70,0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0xa3,0x5f,0x13,0x6f,0x72,0x6d,
-0x73,0xa3,0x53,0x11,0x78,0x74,6,0x64,0xc,0x64,0xa3,0xd1,0x65,0xa5,0,0x66,
-0xa5,0x12,0x67,0xa5,0x2e,0x61,0xa3,0x46,0x62,0xa3,0x5e,0x63,0xa3,0xc5,0x19,0x61,
-0x64,0x69,0x63,0x61,0x6c,0x73,0x73,0x75,0x70,0x94,0x16,0x70,0x6c,0x65,0x6d,0x65,
-0x6e,0x74,0x95,1,0x74,0x50,0x79,0x14,0x6d,0x62,0x6f,0x6c,0x73,0x9a,0x1d,0x61,
-0x6e,0x64,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0x9b,0x14,0x72,
-0x6f,0x6b,0x65,0x73,0xa3,0x82,2,0x6e,0x48,0x72,0x64,0x75,0x1d,0x63,0x61,0x73,
-0x69,0x61,0x6e,0x61,0x6c,0x62,0x61,0x6e,0x69,0x61,0x6e,0xa3,0xde,0x1d,0x61,0x64,
-0x69,0x61,0x6e,0x73,0x79,0x6c,0x6c,0x61,0x62,0x69,0x63,0x73,0x63,0x12,0x69,0x61,
-0x6e,0xa3,0xa8,2,0x61,0x3a,0x65,0x4c,0x6f,0x16,0x72,0x61,0x73,0x6d,0x69,0x61,
-0x6e,0xa5,0x2d,1,0x6b,0x26,0x6d,0xa3,0xa4,0x11,0x6d,0x61,0xa3,0xd4,1,0x72,
-0x38,0x73,0x17,0x73,0x73,0x79,0x6d,0x62,0x6f,0x6c,0x73,0xa5,0x19,0x13,0x6f,0x6b,
-0x65,0x65,0x60,0x12,0x73,0x75,0x70,0xa2,0xff,0x16,0x70,0x6c,0x65,0x6d,0x65,0x6e,
-0x74,0xa3,0xff,3,0x65,0x3e,0x69,0x7e,0x6f,0xa2,0x69,0x75,0x15,0x70,0x6c,0x6f,
-0x79,0x61,0x6e,0xa3,0xe1,1,0x73,0x50,0x76,0x16,0x61,0x6e,0x61,0x67,0x61,0x72,
-0x69,0x3e,0x12,0x65,0x78,0x74,0xa2,0xb3,0x14,0x65,0x6e,0x64,0x65,0x64,0xa3,0xb3,
-0x13,0x65,0x72,0x65,0x74,0xa3,0x5a,2,0x61,0x3a,0x6e,0x82,0x76,0x16,0x65,0x73,
-0x61,0x6b,0x75,0x72,0x75,0xa5,0x2f,0x18,0x63,0x72,0x69,0x74,0x69,0x63,0x61,0x6c,
-0x73,0x2e,2,0x65,0x30,0x66,0x36,0x73,0x11,0x75,0x70,0xa3,0x83,0x11,0x78,0x74,
-0xa3,0xe0,0x18,0x6f,0x72,0x73,0x79,0x6d,0x62,0x6f,0x6c,0x73,0x77,0x14,0x67,0x62,
-0x61,0x74,0x73,0x91,1,0x67,0x3e,0x6d,0x12,0x69,0x6e,0x6f,0xa2,0xab,0x14,0x74,
-0x69,0x6c,0x65,0x73,0xa3,0xab,0x11,0x72,0x61,0xa5,0x1a,8,0x6d,0x5f,0x6d,0x3a,
-0x6e,0x48,0x73,0x7a,0x76,0xa2,0x4b,0x77,0x12,0x69,0x64,0x65,0x43,0x11,0x65,0x64,
-0x32,0x12,0x69,0x61,0x6c,0x33,2,0x61,0x40,0x62,0x37,0x6f,1,0x62,0x28,0x6e,
-0x10,0x65,0x21,0x13,0x72,0x65,0x61,0x6b,0x37,0x10,0x72,0x34,0x12,0x72,0x6f,0x77,
-0x35,2,0x6d,0x38,0x71,0x46,0x75,1,0x62,0x3d,0x70,0x3e,0x11,0x65,0x72,0x3f,
-1,0x61,0x24,0x6c,0x39,0x11,0x6c,0x6c,0x39,1,0x72,0x3b,0x75,0x12,0x61,0x72,
-0x65,0x3b,0x12,0x65,0x72,0x74,0x40,0x13,0x69,0x63,0x61,0x6c,0x41,0x63,0x58,0x65,
-0x92,0x66,0x96,0x69,1,0x6e,0x36,0x73,0x10,0x6f,0x30,0x14,0x6c,0x61,0x74,0x65,
-0x64,0x31,0x11,0x69,0x74,0x2e,0x12,0x69,0x61,0x6c,0x2f,2,0x61,0x36,0x69,0x48,
-0x6f,0x10,0x6d,0x24,0x12,0x70,0x61,0x74,0x25,0x10,0x6e,0x22,0x15,0x6f,0x6e,0x69,
-0x63,0x61,0x6c,0x23,0x13,0x72,0x63,0x6c,0x65,0x27,0x11,0x6e,0x63,0x27,2,0x69,
-0x3a,0x6f,0x44,0x72,0x10,0x61,0x2c,0x14,0x63,0x74,0x69,0x6f,0x6e,0x2d,0x10,0x6e,
-0x28,0x11,0x61,0x6c,0x29,0x11,0x6e,0x74,0x2b,4,0x61,0x3a,0x66,0x4c,0x68,0x5e,
-0x6e,0x70,0x77,0x2a,0x12,0x69,0x64,0x65,0x2b,0x22,0x17,0x6d,0x62,0x69,0x67,0x75,
-0x6f,0x75,0x73,0x23,0x26,0x17,0x75,0x6c,0x6c,0x77,0x69,0x64,0x74,0x68,0x27,0x24,
-0x17,0x61,0x6c,0x66,0x77,0x69,0x64,0x74,0x68,0x25,0x20,1,0x61,0x30,0x65,0x14,
-0x75,0x74,0x72,0x61,0x6c,0x21,0x28,0x13,0x72,0x72,0x6f,0x77,0x29,0xd,0x6e,0xc0,
-0xfb,0x73,0x6d,0x73,0x3a,0x74,0x98,0x75,0xa2,0x49,0x7a,2,0x6c,0x3b,0x70,0x3d,
-0x73,0x39,5,0x6f,0x28,0x6f,0x57,0x70,0x34,0x75,0x16,0x72,0x72,0x6f,0x67,0x61,
-0x74,0x65,0x45,0x11,0x61,0x63,1,0x65,0x32,0x69,0x15,0x6e,0x67,0x6d,0x61,0x72,
-0x6b,0x31,0x18,0x73,0x65,0x70,0x61,0x72,0x61,0x74,0x6f,0x72,0x39,0x63,0x53,0x6b,
-0x55,0x6d,0x51,0x1d,0x69,0x74,0x6c,0x65,0x63,0x61,0x73,0x65,0x6c,0x65,0x74,0x74,
-0x65,0x72,0x27,1,0x6e,0x40,0x70,0x1c,0x70,0x65,0x72,0x63,0x61,0x73,0x65,0x6c,
-0x65,0x74,0x74,0x65,0x72,0x23,0x17,0x61,0x73,0x73,0x69,0x67,0x6e,0x65,0x64,0x21,
-0x6e,0x8a,0x6f,0xa2,0x47,0x70,8,0x66,0x14,0x66,0x5b,0x69,0x59,0x6f,0x4f,0x72,
-0x24,0x73,0x49,0x17,0x69,0x76,0x61,0x74,0x65,0x75,0x73,0x65,0x43,0x61,0x2c,0x63,
-0x4d,0x64,0x47,0x65,0x4b,0x1f,0x72,0x61,0x67,0x72,0x61,0x70,0x68,0x73,0x65,0x70,
-0x61,0x72,0x61,0x74,0x6f,0x72,0x3d,2,0x64,0x33,0x6c,0x35,0x6f,0x36,0x1b,0x6e,
-0x73,0x70,0x61,0x63,0x69,0x6e,0x67,0x6d,0x61,0x72,0x6b,0x2d,1,0x70,0x7c,0x74,
-0x12,0x68,0x65,0x72,3,0x6c,0x38,0x6e,0x42,0x70,0x4c,0x73,0x14,0x79,0x6d,0x62,
-0x6f,0x6c,0x57,0x14,0x65,0x74,0x74,0x65,0x72,0x2b,0x14,0x75,0x6d,0x62,0x65,0x72,
-0x37,0x19,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0x4f,0x1c,0x65,0x6e,
-0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0x49,0x66,0x9e,0x66,0x88,
-0x69,0xa2,0x4b,0x6c,0xa2,0x5c,0x6d,4,0x61,0x60,0x63,0x31,0x65,0x2f,0x6e,0x2d,
-0x6f,0x15,0x64,0x69,0x66,0x69,0x65,0x72,1,0x6c,0x30,0x73,0x14,0x79,0x6d,0x62,
-0x6f,0x6c,0x55,0x14,0x65,0x74,0x74,0x65,0x72,0x29,0x17,0x74,0x68,0x73,0x79,0x6d,
-0x62,0x6f,0x6c,0x51,1,0x69,0x2e,0x6f,0x13,0x72,0x6d,0x61,0x74,0x41,0x1d,0x6e,
-0x61,0x6c,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0x5b,0x10,0x6e,
-0x1f,0x69,0x74,0x69,0x61,0x6c,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,
-0x6e,0x59,6,0x6d,0x18,0x6d,0x29,0x6f,0x28,0x74,0x27,0x75,0x23,0x2a,0x1c,0x77,
-0x65,0x72,0x63,0x61,0x73,0x65,0x6c,0x65,0x74,0x74,0x65,0x72,0x25,0x65,0x28,0x69,
-0x3c,0x6c,0x25,0x19,0x74,0x74,0x65,0x72,0x6e,0x75,0x6d,0x62,0x65,0x72,0x35,0x1a,
-0x6e,0x65,0x73,0x65,0x70,0x61,0x72,0x61,0x74,0x6f,0x72,0x3b,0x63,0x44,0x64,0xa2,
-0x60,0x65,0x1b,0x6e,0x63,0x6c,0x6f,0x73,0x69,0x6e,0x67,0x6d,0x61,0x72,0x6b,0x2f,
-6,0x6e,0x39,0x6e,0x46,0x6f,0x4e,0x73,0x45,0x75,0x1b,0x72,0x72,0x65,0x6e,0x63,
-0x79,0x73,0x79,0x6d,0x62,0x6f,0x6c,0x53,0x20,0x12,0x74,0x72,0x6c,0x3f,0x42,0x10,
-0x6e,1,0x6e,0x2c,0x74,0x12,0x72,0x6f,0x6c,0x3f,0x1f,0x65,0x63,0x74,0x6f,0x72,
-0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0x4d,0x63,0x3f,0x66,0x41,
-0x6c,0x1d,0x6f,0x73,0x65,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,
-0x4b,2,0x61,0x30,0x65,0x4a,0x69,0x12,0x67,0x69,0x74,0x33,0x1c,0x73,0x68,0x70,
-0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0x47,0x1a,0x63,0x69,0x6d,0x61,
-0x6c,0x6e,0x75,0x6d,0x62,0x65,0x72,0x33,0,0x13,0x6e,0xc1,0xf,0x74,0x76,0x74,
-0x4c,0x76,0x9a,0x77,0xa2,0x48,0x79,0xa2,0x49,0x7a,1,0x61,0x2c,0x68,0x12,0x61,
-0x69,0x6e,0x8b,0x11,0x69,0x6e,0x85,2,0x61,0x36,0x65,0x3c,0x68,0x14,0x69,0x6e,
-0x79,0x65,0x68,0xa3,0x66,1,0x68,0x71,0x77,0x73,1,0x68,0x28,0x74,0x10,0x68,
-0x77,0x16,0x6d,0x61,0x72,0x62,0x75,0x74,0x61,0x74,0x13,0x67,0x6f,0x61,0x6c,0x3d,
-0x1a,0x65,0x72,0x74,0x69,0x63,0x61,0x6c,0x74,0x61,0x69,0x6c,0xa3,0x67,0x11,0x61,
-0x77,0x79,1,0x65,0x32,0x75,0x11,0x64,0x68,0x80,0x11,0x68,0x65,0x83,0x10,0x68,
-0x7a,1,0x62,0x34,0x77,0x16,0x69,0x74,0x68,0x74,0x61,0x69,0x6c,0x7f,0x14,0x61,
-0x72,0x72,0x65,0x65,0x7d,0x6e,0xa2,0x4c,0x70,0xa2,0x69,0x71,0xa2,0x69,0x72,0xa2,
-0x6f,0x73,5,0x74,0x22,0x74,0x38,0x77,0x4c,0x79,0x16,0x72,0x69,0x61,0x63,0x77,
-0x61,0x77,0x6f,0x18,0x72,0x61,0x69,0x67,0x68,0x74,0x77,0x61,0x77,0xa3,0x55,0x15,
-0x61,0x73,0x68,0x6b,0x61,0x66,0x6d,0x61,0x2e,0x65,0x38,0x68,0x11,0x69,0x6e,0x6b,
-0x10,0x64,0x62,0x11,0x68,0x65,0x65,1,0x65,0x2e,0x6d,0x13,0x6b,0x61,0x74,0x68,
-0x69,0x10,0x6e,0x67,2,0x6f,0x2c,0x75,0x50,0x79,0x10,0x61,0x91,1,0x6a,0x28,
-0x6f,0x10,0x6e,0x55,0x1a,0x6f,0x69,0x6e,0x69,0x6e,0x67,0x67,0x72,0x6f,0x75,0x70,
-0x21,0x10,0x6e,0x57,0x10,0x65,0x59,0x10,0x61,1,0x66,0x5b,0x70,0x10,0x68,0x5d,
-1,0x65,0x38,0x6f,0x18,0x68,0x69,0x6e,0x67,0x79,0x61,0x79,0x65,0x68,0x93,1,
-0x68,0x5f,0x76,0x16,0x65,0x72,0x73,0x65,0x64,0x70,0x65,0x61,0x67,0xc1,0xc7,0x67,
-0xa4,0x52,0x68,0xa4,0x59,0x6b,0xa4,0x99,0x6c,0xa4,0xb2,0x6d,2,0x61,0x2e,0x65,
-0xa4,0x3e,0x69,0x10,0x6d,0x53,1,0x6c,0xa2,0xe7,0x6e,0x16,0x69,0x63,0x68,0x61,
-0x65,0x61,0x6e,0,0x12,0x6e,0x76,0x73,0x51,0x73,0x3e,0x74,0x5c,0x77,0xa0,0x79,
-0xa2,0x42,0x7a,0x13,0x61,0x79,0x69,0x6e,0xa3,0x54,0x10,0x61,1,0x64,0x2e,0x6d,
-0x12,0x65,0x6b,0x68,0xa3,0x4c,0x11,0x68,0x65,0xa3,0x4b,3,0x61,0x38,0x65,0x3c,
-0x68,0x4a,0x77,0x13,0x65,0x6e,0x74,0x79,0xa3,0x51,0x10,0x77,0xa3,0x4d,1,0x6e,
-0xa3,0x4e,0x74,0x10,0x68,0xa3,0x4f,0x14,0x61,0x6d,0x65,0x64,0x68,0xa3,0x50,0x11,
-0x61,0x77,0xa3,0x52,0x12,0x6f,0x64,0x68,0xa3,0x53,0x6e,0x3a,0x6f,0x40,0x70,0x46,
-0x71,0x4a,0x72,0x12,0x65,0x73,0x68,0xa3,0x4a,0x11,0x75,0x6e,0xa3,0x46,0x11,0x6e,
-0x65,0xa3,0x47,0x10,0x65,0xa3,0x48,0x12,0x6f,0x70,0x68,0xa3,0x49,0x67,0x33,0x67,
-0x38,0x68,0x40,0x6b,0x5e,0x6c,0x66,0x6d,0x11,0x65,0x6d,0xa3,0x45,0x13,0x69,0x6d,
-0x65,0x6c,0xa1,1,0x65,0x32,0x75,0x14,0x6e,0x64,0x72,0x65,0x64,0xa3,0x42,0x11,
-0x74,0x68,0xa3,0x41,0x12,0x61,0x70,0x68,0xa3,0x43,0x14,0x61,0x6d,0x65,0x64,0x68,
-0xa3,0x44,0x61,0x34,0x62,0x4a,0x64,0x50,0x66,0x12,0x69,0x76,0x65,0x9f,1,0x6c,
-0x2a,0x79,0x11,0x69,0x6e,0x97,0x12,0x65,0x70,0x68,0x95,0x12,0x65,0x74,0x68,0x99,
-1,0x61,0x30,0x68,0x14,0x61,0x6d,0x65,0x64,0x68,0x9d,0x13,0x6c,0x65,0x74,0x68,
-0x9b,0x15,0x61,0x79,0x61,0x6c,0x61,0x6d,6,0x6e,0x2c,0x6e,0x34,0x72,0x5e,0x73,
-0x62,0x74,0x11,0x74,0x61,0xa3,0x63,2,0x67,0x2e,0x6e,0x32,0x79,0x10,0x61,0xa3,
-0x60,0x10,0x61,0xa3,0x5d,1,0x61,0xa3,0x5e,0x6e,0x10,0x61,0xa3,0x5f,0x10,0x61,
-0xa3,0x61,0x11,0x73,0x61,0xa3,0x62,0x62,0x3c,0x6a,0x42,0x6c,0x10,0x6c,1,0x61,
-0xa3,0x5b,0x6c,0x10,0x61,0xa3,0x5c,0x11,0x68,0x61,0xa3,0x59,0x10,0x61,0xa3,0x5a,
-0x11,0x65,0x6d,0x51,0x10,0x61,1,0x66,0x37,0x6d,0x11,0x61,0x6c,0x39,1,0x61,
-0x40,0x65,0x3e,1,0x68,0x28,0x74,0x10,0x68,0x45,0x40,0x13,0x67,0x6f,0x61,0x6c,
-0x43,2,0x68,0x3b,0x6d,0x5c,0x6e,0x1a,0x69,0x66,0x69,0x72,0x6f,0x68,0x69,0x6e,
-0x67,0x79,0x61,1,0x6b,0x2a,0x70,0x10,0x61,0xa3,0x65,0x15,0x69,0x6e,0x6e,0x61,
-0x79,0x61,0xa3,0x64,0x1a,0x7a,0x61,0x6f,0x6e,0x68,0x65,0x68,0x67,0x6f,0x61,0x6c,
-0x3d,2,0x61,0x3a,0x68,0x44,0x6e,0x17,0x6f,0x74,0x74,0x65,0x64,0x68,0x65,0x68,
-0x4b,1,0x66,0x47,0x70,0x10,0x68,0x49,0x12,0x61,0x70,0x68,0x89,0x11,0x61,0x6d,
-0x4c,0x12,0x61,0x64,0x68,0x4f,0x61,0x6e,0x62,0xa2,0x54,0x64,0xa2,0x70,0x65,0x31,
-0x66,2,0x61,0x3e,0x65,0x4a,0x69,0x19,0x6e,0x61,0x6c,0x73,0x65,0x6d,0x6b,0x61,
-0x74,0x68,0x35,0x15,0x72,0x73,0x69,0x79,0x65,0x68,0x8f,0x86,0x10,0x68,0x33,2,
-0x66,0x3c,0x69,0x70,0x6c,1,0x61,0x28,0x65,0x10,0x66,0x27,0x11,0x70,0x68,0x25,
-0x14,0x72,0x69,0x63,0x61,0x6e,2,0x66,0x30,0x6e,0x36,0x71,0x11,0x61,0x66,0xa3,
-0x58,0x11,0x65,0x68,0xa3,0x56,0x12,0x6f,0x6f,0x6e,0xa3,0x57,0x10,0x6e,0x23,1,
-0x65,0x4a,0x75,0x10,0x72,0x1f,0x75,0x73,0x68,0x61,0x73,0x6b,0x69,0x79,0x65,0x68,
-0x62,0x61,0x72,0x72,0x65,0x65,0x8d,1,0x68,0x29,0x74,0x10,0x68,0x2b,0x11,0x61,
-0x6c,0x2c,0x16,0x61,0x74,0x68,0x72,0x69,0x73,0x68,0x2f,7,0x6e,0x2e,0x6e,0x2c,
-0x72,0x3e,0x74,0x56,0x75,0x21,0x18,0x6f,0x6e,0x6a,0x6f,0x69,0x6e,0x69,0x6e,0x67,
-0x21,0x28,0x1a,0x69,0x67,0x68,0x74,0x6a,0x6f,0x69,0x6e,0x69,0x6e,0x67,0x29,0x2a,
-0x19,0x72,0x61,0x6e,0x73,0x70,0x61,0x72,0x65,0x6e,0x74,0x2b,0x63,0x23,0x64,0x40,
-0x6a,0x56,0x6c,0x26,0x19,0x65,0x66,0x74,0x6a,0x6f,0x69,0x6e,0x69,0x6e,0x67,0x27,
-0x24,0x19,0x75,0x61,0x6c,0x6a,0x6f,0x69,0x6e,0x69,0x6e,0x67,0x25,0x19,0x6f,0x69,
-0x6e,0x63,0x61,0x75,0x73,0x69,0x6e,0x67,0x23,0,0x13,0x6e,0xc0,0xd0,0x73,0x49,
-0x73,0x48,0x75,0x78,0x77,0x84,0x78,0x9c,0x7a,0x10,0x77,0x58,1,0x6a,0x75,0x73,
-0x13,0x70,0x61,0x63,0x65,0x59,4,0x61,0x51,0x67,0x53,0x70,0x28,0x75,0x30,0x79,
-0x57,0x54,0x12,0x61,0x63,0x65,0x55,0x16,0x72,0x72,0x6f,0x67,0x61,0x74,0x65,0x53,
-0x15,0x6e,0x6b,0x6e,0x6f,0x77,0x6e,0x21,1,0x6a,0x5d,0x6f,0x17,0x72,0x64,0x6a,
-0x6f,0x69,0x6e,0x65,0x72,0x5d,0x10,0x78,0x21,0x6e,0x60,0x6f,0xa2,0x41,0x70,0xa2,
-0x50,0x71,0xa2,0x6e,0x72,1,0x65,0x24,0x69,0x6f,0x1e,0x67,0x69,0x6f,0x6e,0x61,
-0x6c,0x69,0x6e,0x64,0x69,0x63,0x61,0x74,0x6f,0x72,0x6f,4,0x65,0x3e,0x6c,0x5b,
-0x6f,0x46,0x73,0x45,0x75,0x46,0x14,0x6d,0x65,0x72,0x69,0x63,0x47,0x15,0x78,0x74,
-0x6c,0x69,0x6e,0x65,0x5b,0x17,0x6e,0x73,0x74,0x61,0x72,0x74,0x65,0x72,0x45,0x10,
-0x70,0x48,0x1c,0x65,0x6e,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,
-0x49,1,0x6f,0x3e,0x72,0x4c,0x1a,0x65,0x66,0x69,0x78,0x6e,0x75,0x6d,0x65,0x72,
-0x69,0x63,0x4d,0x4a,0x1b,0x73,0x74,0x66,0x69,0x78,0x6e,0x75,0x6d,0x65,0x72,0x69,
-0x63,0x4b,0x10,0x75,0x4e,0x16,0x6f,0x74,0x61,0x74,0x69,0x6f,0x6e,0x4f,0x68,0x7b,
-0x68,0x50,0x69,0x86,0x6a,0xa2,0x61,0x6c,0xa2,0x65,0x6d,0x1c,0x61,0x6e,0x64,0x61,
-0x74,0x6f,0x72,0x79,0x62,0x72,0x65,0x61,0x6b,0x2d,4,0x32,0x5f,0x33,0x61,0x65,
-0x34,0x6c,0x6d,0x79,0x3a,0x13,0x70,0x68,0x65,0x6e,0x3b,0x19,0x62,0x72,0x65,0x77,
-0x6c,0x65,0x74,0x74,0x65,0x72,0x6d,2,0x64,0x28,0x6e,0x3c,0x73,0x41,0x3c,0x18,
-0x65,0x6f,0x67,0x72,0x61,0x70,0x68,0x69,0x63,0x3d,0x3e,1,0x66,0x3e,0x73,0x11,
-0x65,0x70,1,0x61,0x22,0x65,0x14,0x72,0x61,0x62,0x6c,0x65,0x3f,0x18,0x69,0x78,
-0x6e,0x75,0x6d,0x65,0x72,0x69,0x63,0x41,2,0x6c,0x63,0x74,0x65,0x76,0x67,1,
-0x66,0x43,0x69,0x15,0x6e,0x65,0x66,0x65,0x65,0x64,0x43,0x61,0x40,0x62,0x70,0x63,
-0xa2,0x55,0x65,0xa2,0xdb,0x67,0x10,0x6c,0x38,0x11,0x75,0x65,0x39,2,0x69,0x23,
-0x6c,0x34,0x6d,0x16,0x62,0x69,0x67,0x75,0x6f,0x75,0x73,0x23,0x24,0x17,0x70,0x68,
-0x61,0x62,0x65,0x74,0x69,0x63,0x25,4,0x32,0x27,0x61,0x29,0x62,0x2b,0x6b,0x2d,
-0x72,0x12,0x65,0x61,0x6b,2,0x61,0x36,0x62,0x3e,0x73,0x15,0x79,0x6d,0x62,0x6f,
-0x6c,0x73,0x57,0x13,0x66,0x74,0x65,0x72,0x29,1,0x65,0x2a,0x6f,0x11,0x74,0x68,
-0x27,0x13,0x66,0x6f,0x72,0x65,0x2b,7,0x6d,0x51,0x6d,0x33,0x6f,0x28,0x70,0x69,
-0x72,0x35,1,0x6d,0x76,0x6e,1,0x64,0x3c,0x74,0x1a,0x69,0x6e,0x67,0x65,0x6e,
-0x74,0x62,0x72,0x65,0x61,0x6b,0x2f,0x15,0x69,0x74,0x69,0x6f,0x6e,0x61,0x1f,0x6c,
-0x6a,0x61,0x70,0x61,0x6e,0x65,0x73,0x65,0x73,0x74,0x61,0x72,0x74,0x65,0x72,0x6b,
-1,0x62,0x3a,0x70,0x19,0x6c,0x65,0x78,0x63,0x6f,0x6e,0x74,0x65,0x78,0x74,0x51,
-0x18,0x69,0x6e,0x69,0x6e,0x67,0x6d,0x61,0x72,0x6b,0x33,0x61,0x6a,0x62,0x2f,0x6a,
-0x6b,0x6c,0x30,0x13,0x6f,0x73,0x65,0x70,1,0x61,0x38,0x75,0x18,0x6e,0x63,0x74,
-0x75,0x61,0x74,0x69,0x6f,0x6e,0x31,0x18,0x72,0x65,0x6e,0x74,0x68,0x65,0x73,0x69,
-0x73,0x69,0x1b,0x72,0x72,0x69,0x61,0x67,0x65,0x72,0x65,0x74,0x75,0x72,0x6e,0x35,
-2,0x62,0x3e,0x6d,0x46,0x78,0x36,0x18,0x63,0x6c,0x61,0x6d,0x61,0x74,0x69,0x6f,
-0x6e,0x37,0x70,0x12,0x61,0x73,0x65,0x71,0x72,0x16,0x6f,0x64,0x69,0x66,0x69,0x65,
-0x72,0x73,1,0x64,0x42,0x6e,1,0x6f,0x32,0x75,0x26,0x14,0x6d,0x65,0x72,0x69,
-0x63,0x27,0x11,0x6e,0x65,0x21,1,0x65,0x2e,0x69,0x24,0x12,0x67,0x69,0x74,0x25,
-0x22,0x14,0x63,0x69,0x6d,0x61,0x6c,0x23,0,0x18,0x6e,0xc4,0x60,0x74,0xc1,0x91,
-0x77,0x96,0x77,0xa2,0x4c,0x78,0xa2,0x70,0x79,0xa2,0x7a,0x7a,6,0x73,0x1e,0x73,
-0x34,0x78,0x42,0x79,0x48,0x7a,0x11,0x7a,0x7a,0xa3,0x67,0x10,0x79,1,0x65,0xa3,
-0xae,0x6d,0xa3,0x81,0x11,0x78,0x78,0xa3,0x66,0x11,0x79,0x79,0x21,0x61,0x30,0x69,
-0x58,0x6d,0x11,0x74,0x68,0xa3,0x80,0x10,0x6e,1,0x61,0x26,0x62,0xa3,0xb1,0x1a,
-0x62,0x61,0x7a,0x61,0x72,0x73,0x71,0x75,0x61,0x72,0x65,0xa3,0xb1,0x11,0x6e,0x68,
-0x23,2,0x61,0x30,0x63,0x5a,0x6f,0x11,0x6c,0x65,0xa3,0x9b,1,0x6e,0x3c,0x72,
-0x10,0x61,0xa2,0x92,0x15,0x6e,0x67,0x63,0x69,0x74,0x69,0xa3,0x92,0x12,0x63,0x68,
-0x6f,0xa3,0xbc,0x11,0x68,0x6f,0xa3,0xbc,1,0x70,0x2c,0x73,0x11,0x75,0x78,0xa3,
-0x65,0x11,0x65,0x6f,0x9b,1,0x65,0x2c,0x69,0x72,0x11,0x69,0x69,0x73,0x11,0x7a,
-0x69,0xa2,0xc0,0x11,0x64,0x69,0xa3,0xc0,0x74,0x66,0x75,0xa2,0xde,0x76,1,0x61,
-0x48,0x69,1,0x73,0x38,0x74,0x10,0x68,0xa2,0xc5,0x13,0x6b,0x75,0x71,0x69,0xa3,
-0xc5,0x10,0x70,0xa3,0x64,0x10,0x69,0xa2,0x63,0x10,0x69,0xa3,0x63,7,0x68,0x3e,
-0x68,0x34,0x69,0x48,0x6e,0x86,0x6f,0x11,0x74,0x6f,0xa3,0xc4,0x10,0x61,1,0x61,
-0x24,0x69,0x6d,0x6a,0x11,0x6e,0x61,0x6b,2,0x62,0x3a,0x66,0x4a,0x72,0x10,0x68,
-0xa2,0x9e,0x12,0x75,0x74,0x61,0xa3,0x9e,1,0x65,0x24,0x74,0x6f,0x12,0x74,0x61,
-0x6e,0x6f,0x14,0x69,0x6e,0x61,0x67,0x68,0x99,0x11,0x73,0x61,0xa3,0xc3,0x61,0x36,
-0x65,0xa2,0x65,0x66,0xa2,0x71,0x67,0x11,0x6c,0x67,0x75,6,0x6c,0x28,0x6c,0x32,
-0x6d,0x38,0x6e,0x44,0x76,0x10,0x74,0xa3,0x7f,1,0x65,0x89,0x75,0x97,1,0x69,
-0x24,0x6c,0x67,0x10,0x6c,0x67,0x10,0x67,0xa2,0x9a,1,0x73,0x2a,0x75,0x10,0x74,
-0xa3,0x9a,0x10,0x61,0xa3,0xc3,0x67,0x36,0x69,0x52,0x6b,0x10,0x72,0xa2,0x99,0x10,
-0x69,0xa3,0x99,1,0x61,0x30,0x62,0x7a,0x13,0x61,0x6e,0x77,0x61,0x7b,0x12,0x6c,
-0x6f,0x67,0x75,2,0x6c,0x32,0x74,0x34,0x76,0x12,0x69,0x65,0x74,0xa3,0x7f,0x10,
-0x65,0x89,0x12,0x68,0x61,0x6d,0xa3,0x6a,1,0x6c,0x2a,0x6e,0x10,0x67,0xa3,0x62,
-0x10,0x75,0x68,0x11,0x67,0x75,0x69,0x11,0x6e,0x67,0x99,1,0x67,0x32,0x6e,0x14,
-0x6b,0x6e,0x6f,0x77,0x6e,0xa3,0x67,0x11,0x61,0x72,0x8a,0x13,0x69,0x74,0x69,0x63,
-0x8b,0x71,0xc1,0x13,0x71,0xa2,0xde,0x72,0xa2,0xe3,0x73,6,0x69,0x8a,0x69,0x72,
-0x6f,0xa2,0x4c,0x75,0xa2,0x75,0x79,1,0x6c,0x46,0x72,4,0x63,0x65,0x65,0xa3,
-0x5f,0x69,0x2c,0x6a,0xa3,0x60,0x6e,0xa3,0x61,0x11,0x61,0x63,0x65,0x10,0x6f,0x94,
-0x16,0x74,0x69,0x6e,0x61,0x67,0x72,0x69,0x95,2,0x64,0x3c,0x67,0x4c,0x6e,1,
-0x64,0xa3,0x91,0x68,0x62,0x12,0x61,0x6c,0x61,0x63,0x10,0x64,0xa2,0xa6,0x12,0x68,
-0x61,0x6d,0xa3,0xa6,0x17,0x6e,0x77,0x72,0x69,0x74,0x69,0x6e,0x67,0xa3,0x70,2,
-0x67,0x3a,0x72,0x52,0x79,0x10,0x6f,0xa2,0xb0,0x12,0x6d,0x62,0x6f,0xa3,0xb0,1,
-0x64,0x26,0x6f,0xa3,0xb8,0xa2,0xb7,0x12,0x69,0x61,0x6e,0xa3,0xb7,0x10,0x61,0xa2,
-0x98,0x16,0x73,0x6f,0x6d,0x70,0x65,0x6e,0x67,0xa3,0x98,0x11,0x6e,0x64,0xa2,0x71,
-0x14,0x61,0x6e,0x65,0x73,0x65,0xa3,0x71,0x61,0x5c,0x67,0xa2,0x43,0x68,1,0x61,
-0x2a,0x72,0x10,0x64,0xa3,0x97,2,0x72,0x28,0x76,0x30,0x77,0x87,0x12,0x61,0x64,
-0x61,0xa3,0x97,0x12,0x69,0x61,0x6e,0x87,2,0x6d,0x40,0x72,0x58,0x75,0x10,0x72,
-0xa2,0x6f,0x15,0x61,0x73,0x68,0x74,0x72,0x61,0xa3,0x6f,1,0x61,0x26,0x72,0xa3,
-0x7e,0x14,0x72,0x69,0x74,0x61,0x6e,0xa3,0x7e,1,0x61,0xa3,0x5e,0x62,0xa3,0x85,
-0x11,0x6e,0x77,0xa3,0x70,0x11,0x61,0x61,1,0x63,0x2f,0x69,0x23,3,0x65,0x3e,
-0x6a,0x48,0x6f,0x4e,0x75,0x10,0x6e,1,0x69,0x24,0x72,0x61,0x10,0x63,0x61,0x13,
-0x6a,0x61,0x6e,0x67,0xa3,0x6e,0x11,0x6e,0x67,0xa3,0x6e,1,0x68,0x2a,0x72,0x10,
-0x6f,0xa3,0x5d,0x10,0x67,0xa3,0xb6,0x6e,0xa2,0x83,0x6f,0xa2,0xf2,0x70,5,0x6c,
-0x1e,0x6c,0x44,0x72,0x4a,0x73,0x1b,0x61,0x6c,0x74,0x65,0x72,0x70,0x61,0x68,0x6c,
-0x61,0x76,0x69,0xa3,0x7b,0x11,0x72,0x64,0xa3,0x5c,0x11,0x74,0x69,0xa3,0x7d,0x61,
-0x7c,0x65,0xa2,0x54,0x68,3,0x61,0x3e,0x6c,0x4e,0x6e,0x5e,0x6f,0x16,0x65,0x6e,
-0x69,0x63,0x69,0x61,0x6e,0xa3,0x5b,0x10,0x67,0xa2,0x5a,0x12,0x73,0x70,0x61,0xa3,
-0x5a,2,0x69,0xa3,0x7a,0x70,0xa3,0x7b,0x76,0xa3,0x7c,0x10,0x78,0xa3,0x5b,2,
-0x68,0x3e,0x6c,0x50,0x75,0x10,0x63,0xa2,0xa5,0x14,0x69,0x6e,0x68,0x61,0x75,0xa3,
-0xa5,0x17,0x61,0x77,0x68,0x68,0x6d,0x6f,0x6e,0x67,0xa3,0x4b,0x10,0x6d,0xa2,0x90,
-0x14,0x79,0x72,0x65,0x6e,0x65,0xa3,0x90,0x11,0x72,0x6d,0xa3,0x59,6,0x6b,0x36,
-0x6b,0x56,0x73,0x6e,0x75,0x74,0x79,0x11,0x69,0x61,0x1f,0x6b,0x65,0x6e,0x67,0x70,
-0x75,0x61,0x63,0x68,0x75,0x65,0x68,0x6d,0x6f,0x6e,0x67,0xa3,0xba,1,0x67,0x2e,
-0x6f,0xa2,0x57,0x10,0x6f,0xa3,0x57,0x10,0x62,0xa3,0x84,0x11,0x68,0x75,0xa3,0x96,
-0x12,0x73,0x68,0x75,0xa3,0x96,0x61,0x42,0x62,0x80,0x65,0x10,0x77,1,0x61,0xa3,
-0xaa,0x74,0x14,0x61,0x69,0x6c,0x75,0x65,0x97,2,0x62,0x2e,0x6e,0x3c,0x72,0x10,
-0x62,0xa3,0x8e,0x15,0x61,0x74,0x61,0x65,0x61,0x6e,0xa3,0x8f,0x10,0x64,0xa2,0xbb,
-0x16,0x69,0x6e,0x61,0x67,0x61,0x72,0x69,0xa3,0xbb,0x11,0x61,0x74,0xa3,0x8f,4,
-0x67,0x3c,0x6c,0x4e,0x72,0xa2,0x8e,0x73,0xa2,0x9c,0x75,0x11,0x67,0x72,0xa3,0xc2,
-1,0x61,0x2a,0x68,0x11,0x61,0x6d,0x5b,0x10,0x6d,0x5b,1,0x63,0xa2,0x6a,0x64,
-6,0x70,0x41,0x70,0x3a,0x73,0x58,0x74,0x86,0x75,0x14,0x79,0x67,0x68,0x75,0x72,
-0xa3,0xc2,0x11,0x65,0x72,1,0x6d,0x2c,0x73,0x12,0x69,0x61,0x6e,0x9b,0x11,0x69,
-0x63,0xa3,0x59,0x10,0x6f,1,0x67,0x3a,0x75,0x18,0x74,0x68,0x61,0x72,0x61,0x62,
-0x69,0x61,0x6e,0xa3,0x85,0x13,0x64,0x69,0x61,0x6e,0xa3,0xb8,0x14,0x75,0x72,0x6b,
-0x69,0x63,0xa3,0x58,0x68,0x42,0x69,0x54,0x6e,0x1a,0x6f,0x72,0x74,0x68,0x61,0x72,
-0x61,0x62,0x69,0x61,0x6e,0xa3,0x8e,0x17,0x75,0x6e,0x67,0x61,0x72,0x69,0x61,0x6e,
-0xa3,0x4c,0x14,0x74,0x61,0x6c,0x69,0x63,0x5d,1,0x68,0x26,0x6b,0xa3,0x6d,0x12,
-0x69,0x6b,0x69,0xa3,0x6d,2,0x69,0x2c,0x6b,0x30,0x79,0x10,0x61,0x5f,0x11,0x79,
-0x61,0x5f,0x10,0x68,0xa3,0x58,2,0x61,0x36,0x67,0x3c,0x6d,0x10,0x61,0x84,0x12,
-0x6e,0x79,0x61,0x85,0x11,0x67,0x65,0xa3,0xab,0x10,0x65,0xa3,0xab,0x68,0xc3,0xd,
-0x6b,0xc2,0x24,0x6b,0xa4,0x17,0x6c,0xa4,0xb2,0x6d,8,0x6f,0x46,0x6f,0x48,0x72,
-0x74,0x74,0x80,0x75,0x86,0x79,1,0x61,0x28,0x6d,0x10,0x72,0x59,0x13,0x6e,0x6d,
-0x61,0x72,0x59,2,0x64,0x2e,0x6e,0x32,0x6f,0x10,0x6e,0xa3,0x72,0x10,0x69,0xa3,
-0xa3,0x10,0x67,0x56,0x14,0x6f,0x6c,0x69,0x61,0x6e,0x57,0x10,0x6f,0xa2,0x95,0x10,
-0x6f,0xa3,0x95,0x11,0x65,0x69,0xa3,0x73,0x11,0x6c,0x74,0xa2,0xa4,0x12,0x61,0x6e,
-0x69,0xa3,0xa4,0x61,0x36,0x65,0xa2,0x67,0x69,0xa2,0xbd,0x6c,0x11,0x79,0x6d,0x55,
-6,0x6e,0x38,0x6e,0x32,0x72,0x5c,0x73,0x6c,0x79,0x10,0x61,0xa3,0x55,1,0x64,
-0x38,0x69,0xa2,0x79,0x15,0x63,0x68,0x61,0x65,0x61,0x6e,0xa3,0x79,0xa2,0x54,0x12,
-0x61,0x69,0x63,0xa3,0x54,0x10,0x63,0xa2,0xa9,0x12,0x68,0x65,0x6e,0xa3,0xa9,0x18,
-0x61,0x72,0x61,0x6d,0x67,0x6f,0x6e,0x64,0x69,0xa3,0xaf,0x68,0x36,0x6b,0x4c,0x6c,
-0x15,0x61,0x79,0x61,0x6c,0x61,0x6d,0x55,1,0x61,0x26,0x6a,0xa3,0xa0,0x13,0x6a,
-0x61,0x6e,0x69,0xa3,0xa0,0x10,0x61,0xa2,0xb4,0x12,0x73,0x61,0x72,0xa3,0xb4,3,
-0x64,0x78,0x65,0x94,0x6e,0xa2,0x42,0x72,1,0x63,0xa3,0x8d,0x6f,0xa2,0x56,0x13,
-0x69,0x74,0x69,0x63,1,0x63,0x3c,0x68,0x19,0x69,0x65,0x72,0x6f,0x67,0x6c,0x79,
-0x70,0x68,0x73,0xa3,0x56,0x15,0x75,0x72,0x73,0x69,0x76,0x65,0xa3,0x8d,1,0x65,
-0x26,0x66,0xa3,0xb5,0x16,0x66,0x61,0x69,0x64,0x72,0x69,0x6e,0xa3,0xb5,0x17,0x74,
-0x65,0x69,0x6d,0x61,0x79,0x65,0x6b,0xa3,0x73,0x10,0x64,0xa2,0x8c,0x17,0x65,0x6b,
-0x69,0x6b,0x61,0x6b,0x75,0x69,0xa3,0x8c,0x11,0x61,0x6f,0xa3,0x5c,6,0x6e,0x1a,
-0x6e,0x34,0x6f,0x38,0x70,0x3e,0x74,0x11,0x68,0x69,0xa3,0x78,0x11,0x64,0x61,0x4b,
-0x11,0x72,0x65,0xa3,0x77,0x11,0x65,0x6c,0xa3,0x8a,0x61,0x30,0x68,0x9a,0x69,0x11,
-0x74,0x73,0xa3,0xbf,4,0x69,0x3c,0x6c,0x44,0x6e,0x48,0x74,0x56,0x79,0x13,0x61,
-0x68,0x6c,0x69,0xa3,0x4f,0x12,0x74,0x68,0x69,0xa3,0x78,0x10,0x69,0xa3,0x4f,1,
-0x61,0x4d,0x6e,0x12,0x61,0x64,0x61,0x4b,0x14,0x61,0x6b,0x61,0x6e,0x61,0x4c,0x19,
-0x6f,0x72,0x68,0x69,0x72,0x61,0x67,0x61,0x6e,0x61,0x8d,4,0x61,0x40,0x69,0x52,
-0x6d,0x70,0x6f,0x7c,0x75,0x15,0x64,0x61,0x77,0x61,0x64,0x69,0xa3,0x91,0x10,0x72,
-0x92,0x15,0x6f,0x73,0x68,0x74,0x68,0x69,0x93,0x1d,0x74,0x61,0x6e,0x73,0x6d,0x61,
-0x6c,0x6c,0x73,0x63,0x72,0x69,0x70,0x74,0xa3,0xbf,1,0x65,0x24,0x72,0x4f,0x10,
-0x72,0x4f,0x10,0x6a,0xa2,0x9d,0x11,0x6b,0x69,0xa3,0x9d,4,0x61,0x5c,0x65,0x90,
-0x69,0xa0,0x6f,0xa2,0x5d,0x79,1,0x63,0x34,0x64,0x10,0x69,0xa2,0x6c,0x11,0x61,
-0x6e,0xa3,0x6c,0x10,0x69,0xa2,0x6b,0x11,0x61,0x6e,0xa3,0x6b,2,0x6e,0x42,0x6f,
-0x46,0x74,3,0x66,0xa3,0x50,0x67,0xa3,0x51,0x69,0x24,0x6e,0x53,0x10,0x6e,0x53,
-0x10,0x61,0xa3,0x6a,0x50,0x10,0x6f,0x51,0x11,0x70,0x63,0xa2,0x52,0x11,0x68,0x61,
-0xa3,0x52,2,0x6d,0x2e,0x6e,0x36,0x73,0x10,0x75,0xa3,0x83,0x10,0x62,0x80,0x10,
-0x75,0x81,2,0x61,0xa3,0x53,0x62,0x83,0x65,0x11,0x61,0x72,1,0x61,0xa3,0x53,
-0x62,0x83,0x11,0x6d,0x61,0xa3,0x8b,0x68,0x6e,0x69,0xa2,0x95,0x6a,2,0x61,0x30,
-0x70,0x52,0x75,0x11,0x72,0x63,0xa3,0x94,1,0x6d,0x38,0x76,0x10,0x61,0xa2,0x4e,
-0x13,0x6e,0x65,0x73,0x65,0xa3,0x4e,0x10,0x6f,0xa3,0xad,0x11,0x61,0x6e,0xa3,0x69,
-6,0x6c,0x1e,0x6c,0x34,0x6d,0x3a,0x72,0x48,0x75,0x11,0x6e,0x67,0xa3,0x4c,0x11,
-0x75,0x77,0xa3,0x9c,0x10,0x6e,1,0x67,0xa3,0x4b,0x70,0xa3,0xba,0x11,0x6b,0x74,
-0x8d,0x61,0x3c,0x65,0xa2,0x43,0x69,0x11,0x72,0x61,0x48,0x13,0x67,0x61,0x6e,0x61,
-0x49,1,0x6e,0x34,0x74,0x10,0x72,0xa2,0xa2,0x11,0x61,0x6e,0xa3,0xa2,0x42,6,
-0x6f,0xe,0x6f,0x77,0x73,0xa3,0x49,0x74,0xa3,0x4a,0x75,0x12,0x6e,0x6f,0x6f,0x77,
-0x62,0xa3,0xac,0x67,0x3e,0x69,0x42,0x19,0x66,0x69,0x72,0x6f,0x68,0x69,0x6e,0x67,
-0x79,0x61,0xa3,0xb6,0x44,0x11,0x75,0x6c,0x45,0x11,0x62,0x72,0x46,0x11,0x65,0x77,
-0x47,2,0x6d,0x2e,0x6e,0x4a,0x74,0x11,0x61,0x6c,0x5d,0x1c,0x70,0x65,0x72,0x69,
-0x61,0x6c,0x61,0x72,0x61,0x6d,0x61,0x69,0x63,0xa3,0x74,2,0x64,0x66,0x68,0x6a,
-0x73,0x1b,0x63,0x72,0x69,0x70,0x74,0x69,0x6f,0x6e,0x61,0x6c,0x70,0x61,1,0x68,
-0x32,0x72,0x14,0x74,0x68,0x69,0x61,0x6e,0xa3,0x7d,0x13,0x6c,0x61,0x76,0x69,0xa3,
-0x7a,0x10,0x73,0xa3,0x4d,0x15,0x65,0x72,0x69,0x74,0x65,0x64,0x23,0x64,0xc1,0xd,
-0x64,0xa2,0x7a,0x65,0xa2,0xc1,0x67,4,0x65,0x82,0x6c,0x9a,0x6f,0xa2,0x46,0x72,
-0xa2,0x55,0x75,2,0x6a,0x3c,0x6e,0x4e,0x72,1,0x6d,0x24,0x75,0x41,0x13,0x75,
-0x6b,0x68,0x69,0x41,1,0x61,0x24,0x72,0x3f,0x13,0x72,0x61,0x74,0x69,0x3f,0x18,
-0x6a,0x61,0x6c,0x61,0x67,0x6f,0x6e,0x64,0x69,0xa3,0xb3,0x10,0x6f,1,0x6b,0xa3,
-0x48,0x72,0x38,0x13,0x67,0x69,0x61,0x6e,0x39,0x11,0x61,0x67,0x90,0x15,0x6f,0x6c,
-0x69,0x74,0x69,0x63,0x91,1,0x6e,0x30,0x74,0x10,0x68,0x3a,0x11,0x69,0x63,0x3b,
-1,0x67,0xa3,0xb3,0x6d,0xa3,0xaf,1,0x61,0x32,0x65,1,0x65,0x24,0x6b,0x3d,
-0x10,0x6b,0x3d,0x10,0x6e,0xa2,0x89,0x12,0x74,0x68,0x61,0xa3,0x89,4,0x65,0x46,
-0x69,0x6c,0x6f,0x8c,0x73,0x9a,0x75,0x11,0x70,0x6c,0xa2,0x87,0x13,0x6f,0x79,0x61,
-0x6e,0xa3,0x87,1,0x73,0x38,0x76,0x10,0x61,0x34,0x15,0x6e,0x61,0x67,0x61,0x72,
-0x69,0x35,0x13,0x65,0x72,0x65,0x74,0x33,1,0x61,0x36,0x76,0x16,0x65,0x73,0x61,
-0x6b,0x75,0x72,0x75,0xa3,0xbe,0x10,0x6b,0xa3,0xbe,0x11,0x67,0x72,0xa2,0xb2,0x10,
-0x61,0xa3,0xb2,0x11,0x72,0x74,0x33,2,0x67,0x3a,0x6c,0x72,0x74,0x11,0x68,0x69,
-0x36,0x13,0x6f,0x70,0x69,0x63,0x37,0x10,0x79,2,0x64,0xa3,0x45,0x68,0xa3,0x46,
-0x70,0xa2,0x47,0x1e,0x74,0x69,0x61,0x6e,0x68,0x69,0x65,0x72,0x6f,0x67,0x6c,0x79,
-0x70,0x68,0x73,0xa3,0x47,1,0x62,0x36,0x79,0x10,0x6d,0xa2,0xb9,0x12,0x61,0x69,
-0x63,0xa3,0xb9,0x10,0x61,0xa2,0x88,0x12,0x73,0x61,0x6e,0xa3,0x88,0x61,0xa2,0xc9,
-0x62,0xa4,0x2e,0x63,6,0x6f,0x52,0x6f,0x76,0x70,0x92,0x75,0xa2,0x41,0x79,1,
-0x70,0x3e,0x72,2,0x69,0x2a,0x6c,0x31,0x73,0xa3,0x44,0x13,0x6c,0x6c,0x69,0x63,
-0x31,0x10,0x72,1,0x69,0x34,0x6f,0x15,0x6d,0x69,0x6e,0x6f,0x61,0x6e,0xa3,0xc1,
-0x11,0x6f,0x74,0x7f,1,0x6d,0x30,0x70,0x10,0x74,0x2e,0x11,0x69,0x63,0x2f,0x12,
-0x6d,0x6f,0x6e,0x21,1,0x6d,0x28,0x72,0x10,0x74,0x7f,0x10,0x6e,0xa3,0xc1,0x16,
-0x6e,0x65,0x69,0x66,0x6f,0x72,0x6d,0xa3,0x65,0x61,0x32,0x68,0xa2,0x41,0x69,0x11,
-0x72,0x74,0xa3,0x43,3,0x6b,0x4c,0x6e,0x50,0x72,0x76,0x75,0x1d,0x63,0x61,0x73,
-0x69,0x61,0x6e,0x61,0x6c,0x62,0x61,0x6e,0x69,0x61,0x6e,0xa3,0x9f,0x10,0x6d,0xa3,
-0x76,1,0x61,0x24,0x73,0x71,0x1d,0x64,0x69,0x61,0x6e,0x61,0x62,0x6f,0x72,0x69,
-0x67,0x69,0x6e,0x61,0x6c,0x71,0x10,0x69,0xa2,0x68,0x11,0x61,0x6e,0xa3,0x68,3,
-0x61,0x32,0x65,0x44,0x6f,0x52,0x72,0x10,0x73,0xa3,0xbd,1,0x6b,0x26,0x6d,0xa3,
-0x42,0x11,0x6d,0x61,0xa3,0x76,0x10,0x72,0x2c,0x13,0x6f,0x6b,0x65,0x65,0x2d,0x16,
-0x72,0x61,0x73,0x6d,0x69,0x61,0x6e,0xa3,0xbd,6,0x68,0x4a,0x68,0x48,0x6e,0x4e,
-0x72,0x76,0x76,1,0x65,0x2a,0x73,0x10,0x74,0xa3,0x75,0x13,0x73,0x74,0x61,0x6e,
-0xa3,0x75,0x11,0x6f,0x6d,0xa3,0xa1,0x11,0x61,0x74,0x1f,0x6f,0x6c,0x69,0x61,0x6e,
-0x68,0x69,0x65,0x72,0x6f,0x67,0x6c,0x79,0x70,0x68,0x73,0xa3,0x9c,1,0x61,0x3e,
-0x6d,2,0x65,0x2a,0x69,0xa3,0x74,0x6e,0x27,0x13,0x6e,0x69,0x61,0x6e,0x27,0x10,
-0x62,0x24,0x11,0x69,0x63,0x25,0x64,0x30,0x66,0x44,0x67,0x11,0x68,0x62,0xa3,0x9f,
-0x10,0x6c,1,0x61,0x26,0x6d,0xa3,0xa7,0x10,0x6d,0xa3,0xa7,0x11,0x61,0x6b,0xa3,
-0x93,6,0x6c,0x3c,0x6c,0x52,0x6f,0x56,0x72,0x66,0x75,1,0x67,0x30,0x68,1,
-0x64,0x79,0x69,0x10,0x64,0x79,0x10,0x69,0x8e,0x13,0x6e,0x65,0x73,0x65,0x8f,0x11,
-0x69,0x73,0xa1,0x11,0x70,0x6f,0x2a,0x13,0x6d,0x6f,0x66,0x6f,0x2b,0x10,0x61,1,
-0x68,0x2e,0x69,0x7c,0x12,0x6c,0x6c,0x65,0x7d,0xa2,0x41,0x11,0x6d,0x69,0xa3,0x41,
-0x61,0x48,0x65,0x9c,0x68,1,0x61,0x2a,0x6b,0x10,0x73,0xa3,0xa8,0x15,0x69,0x6b,
-0x73,0x75,0x6b,0x69,0xa3,0xa8,3,0x6c,0x3a,0x6d,0x48,0x73,0x54,0x74,1,0x61,
-0x24,0x6b,0x9f,0x10,0x6b,0x9f,0x10,0x69,0x9c,0x13,0x6e,0x65,0x73,0x65,0x9d,0x10,
-0x75,0xa2,0x82,0x10,0x6d,0xa3,0x82,0x10,0x73,0xa2,0x86,0x13,0x61,0x76,0x61,0x68,
-0xa3,0x86,0x11,0x6e,0x67,0x28,0x12,0x61,0x6c,0x69,0x29,3,0x6c,0x42,0x6e,0x90,
-0x74,0xa2,0x46,0x76,0x24,0x17,0x6f,0x77,0x65,0x6c,0x6a,0x61,0x6d,0x6f,0x25,0x22,
-1,0x65,0x54,0x76,0x28,1,0x73,0x38,0x74,0x2a,0x17,0x73,0x79,0x6c,0x6c,0x61,
-0x62,0x6c,0x65,0x2b,0x16,0x79,0x6c,0x6c,0x61,0x62,0x6c,0x65,0x29,0x18,0x61,0x64,
-0x69,0x6e,0x67,0x6a,0x61,0x6d,0x6f,0x23,1,0x61,0x21,0x6f,0x1a,0x74,0x61,0x70,
-0x70,0x6c,0x69,0x63,0x61,0x62,0x6c,0x65,0x21,0x26,0x1a,0x72,0x61,0x69,0x6c,0x69,
-0x6e,0x67,0x6a,0x61,0x6d,0x6f,0x27,1,0x6e,0x2c,0x79,0x22,0x11,0x65,0x73,0x23,
-0x20,0x10,0x6f,0x21,1,0x6e,0x2c,0x79,0x22,0x11,0x65,0x73,0x23,0x20,0x10,0x6f,
-0x21,2,0x6d,0x30,0x6e,0x3a,0x79,0x22,0x11,0x65,0x73,0x23,0x24,0x13,0x61,0x79,
-0x62,0x65,0x25,0x20,0x10,0x6f,0x21,2,0x6d,0x30,0x6e,0x3a,0x79,0x22,0x11,0x65,
-0x73,0x23,0x24,0x13,0x61,0x79,0x62,0x65,0x25,0x20,0x10,0x6f,0x21,0xb,0x72,0x39,
-0x76,0xc,0x76,0x33,0x78,0x2a,0x7a,0x11,0x77,0x6a,0x43,0x10,0x78,0x21,0x72,0x28,
-0x73,0x50,0x74,0x31,1,0x65,0x24,0x69,0x39,0x1e,0x67,0x69,0x6f,0x6e,0x61,0x6c,
-0x69,0x6e,0x64,0x69,0x63,0x61,0x74,0x6f,0x72,0x39,1,0x6d,0x35,0x70,0x18,0x61,
-0x63,0x69,0x6e,0x67,0x6d,0x61,0x72,0x6b,0x35,0x6c,0x1f,0x6c,0x3c,0x6f,0x4a,0x70,
-1,0x70,0x37,0x72,0x14,0x65,0x70,0x65,0x6e,0x64,0x37,0x28,1,0x66,0x2b,0x76,
-0x2c,0x10,0x74,0x2f,0x13,0x74,0x68,0x65,0x72,0x21,0x63,0x4c,0x65,0x64,0x67,1,
-0x61,0x3a,0x6c,0x19,0x75,0x65,0x61,0x66,0x74,0x65,0x72,0x7a,0x77,0x6a,0x41,0x10,
-0x7a,0x41,2,0x6e,0x23,0x6f,0x24,0x72,0x25,0x14,0x6e,0x74,0x72,0x6f,0x6c,0x23,
-2,0x62,0x34,0x6d,0x4e,0x78,0x26,0x13,0x74,0x65,0x6e,0x64,0x27,0x3a,1,0x61,
-0x24,0x67,0x3d,0x11,0x73,0x65,0x3a,0x12,0x67,0x61,0x7a,0x3d,0x3e,0x16,0x6f,0x64,
-0x69,0x66,0x69,0x65,0x72,0x3f,9,0x6e,0x4a,0x6e,0x34,0x6f,0x44,0x73,0x60,0x75,
-0x94,0x78,0x10,0x78,0x21,0x10,0x75,0x2a,0x14,0x6d,0x65,0x72,0x69,0x63,0x2b,1,
-0x6c,0x2c,0x74,0x12,0x68,0x65,0x72,0x21,0x14,0x65,0x74,0x74,0x65,0x72,0x2d,3,
-0x63,0x36,0x65,0x46,0x70,0x31,0x74,0x32,0x12,0x65,0x72,0x6d,0x33,0x3c,0x16,0x6f,
-0x6e,0x74,0x69,0x6e,0x75,0x65,0x3d,0x2e,0x10,0x70,0x2f,0x10,0x70,0x34,0x12,0x70,
-0x65,0x72,0x35,0x61,0x46,0x63,0x52,0x65,0x64,0x66,0x72,0x6c,2,0x65,0x2d,0x66,
-0x3b,0x6f,0x28,0x12,0x77,0x65,0x72,0x29,0x10,0x74,0x22,0x12,0x65,0x72,0x6d,0x23,
-1,0x6c,0x24,0x72,0x37,0x24,0x12,0x6f,0x73,0x65,0x25,0x10,0x78,0x38,0x13,0x74,
-0x65,0x6e,0x64,0x39,0x10,0x6f,0x26,0x13,0x72,0x6d,0x61,0x74,0x27,0,0x10,0x6c,
-0x88,0x72,0x40,0x72,0x36,0x73,0x5e,0x77,0x7a,0x78,0x8a,0x7a,0x11,0x77,0x6a,0x4b,
-1,0x65,0x24,0x69,0x3b,0x1e,0x67,0x69,0x6f,0x6e,0x61,0x6c,0x69,0x6e,0x64,0x69,
-0x63,0x61,0x74,0x6f,0x72,0x3b,1,0x69,0x24,0x71,0x3f,0x18,0x6e,0x67,0x6c,0x65,
-0x71,0x75,0x6f,0x74,0x65,0x3f,0x17,0x73,0x65,0x67,0x73,0x70,0x61,0x63,0x65,0x4d,
-0x10,0x78,0x21,0x6c,0x36,0x6d,0x3c,0x6e,0x76,0x6f,0x13,0x74,0x68,0x65,0x72,0x21,
-1,0x65,0x23,0x66,0x35,3,0x62,0x37,0x69,0x28,0x6c,0x29,0x6e,0x2b,0x10,0x64,
-1,0x6c,0x34,0x6e,0x11,0x75,0x6d,0x2a,0x12,0x6c,0x65,0x74,0x37,0x14,0x65,0x74,
-0x74,0x65,0x72,0x29,2,0x65,0x36,0x6c,0x39,0x75,0x2c,0x14,0x6d,0x65,0x72,0x69,
-0x63,0x2d,0x14,0x77,0x6c,0x69,0x6e,0x65,0x39,0x66,0x3f,0x66,0x40,0x67,0x4e,0x68,
-0x70,0x6b,0x10,0x61,0x26,0x15,0x74,0x61,0x6b,0x61,0x6e,0x61,0x27,0x10,0x6f,0x24,
-0x13,0x72,0x6d,0x61,0x74,0x25,1,0x61,0x3a,0x6c,0x19,0x75,0x65,0x61,0x66,0x74,
-0x65,0x72,0x7a,0x77,0x6a,0x49,0x10,0x7a,0x49,1,0x65,0x24,0x6c,0x3d,0x19,0x62,
-0x72,0x65,0x77,0x6c,0x65,0x74,0x74,0x65,0x72,0x3d,0x61,0x86,0x63,0x92,0x64,0x94,
-0x65,2,0x62,0x44,0x6d,0x5e,0x78,0x2e,0x13,0x74,0x65,0x6e,0x64,0x32,0x15,0x6e,
-0x75,0x6d,0x6c,0x65,0x74,0x2f,0x42,1,0x61,0x24,0x67,0x45,0x11,0x73,0x65,0x42,
-0x12,0x67,0x61,0x7a,0x45,0x46,0x16,0x6f,0x64,0x69,0x66,0x69,0x65,0x72,0x47,0x15,
-0x6c,0x65,0x74,0x74,0x65,0x72,0x23,0x10,0x72,0x31,1,0x6f,0x24,0x71,0x41,0x18,
-0x75,0x62,0x6c,0x65,0x71,0x75,0x6f,0x74,0x65,0x41,2,0x63,0x32,0x6e,0x3c,0x6f,
-0x22,0x12,0x70,0x65,0x6e,0x23,0x24,0x13,0x6c,0x6f,0x73,0x65,0x25,0x20,0x12,0x6f,
-0x6e,0x65,0x21,6,0x6f,0x65,0x6f,0x4a,0x72,0x5c,0x74,0x64,0x76,0x1d,0x69,0x73,
-0x75,0x61,0x6c,0x6f,0x72,0x64,0x65,0x72,0x6c,0x65,0x66,0x74,0x3d,0x18,0x76,0x65,
-0x72,0x73,0x74,0x72,0x75,0x63,0x6b,0x2d,0x13,0x69,0x67,0x68,0x74,0x2f,0x11,0x6f,
-0x70,0x30,0x12,0x61,0x6e,0x64,2,0x62,0x32,0x6c,0x62,0x72,0x13,0x69,0x67,0x68,
-0x74,0x3b,0x14,0x6f,0x74,0x74,0x6f,0x6d,0x32,0x12,0x61,0x6e,0x64,1,0x6c,0x2e,
-0x72,0x13,0x69,0x67,0x68,0x74,0x35,0x12,0x65,0x66,0x74,0x3f,0x12,0x65,0x66,0x74,
-0x36,0x17,0x61,0x6e,0x64,0x72,0x69,0x67,0x68,0x74,0x39,0x62,0x2c,0x6c,0x5c,0x6e,
-0x10,0x61,0x21,0x14,0x6f,0x74,0x74,0x6f,0x6d,0x22,0x12,0x61,0x6e,0x64,1,0x6c,
-0x2e,0x72,0x13,0x69,0x67,0x68,0x74,0x27,0x12,0x65,0x66,0x74,0x25,0x12,0x65,0x66,
-0x74,0x28,0x17,0x61,0x6e,0x64,0x72,0x69,0x67,0x68,0x74,0x2b,0xd,0x6e,0xaa,0x72,
-0x70,0x72,0x92,0x73,0xa2,0x46,0x74,0xa2,0x54,0x76,1,0x69,0x60,0x6f,0x12,0x77,
-0x65,0x6c,0x62,1,0x64,0x3a,0x69,0x19,0x6e,0x64,0x65,0x70,0x65,0x6e,0x64,0x65,
-0x6e,0x74,0x67,0x17,0x65,0x70,0x65,0x6e,0x64,0x65,0x6e,0x74,0x65,1,0x72,0x2e,
-0x73,0x13,0x61,0x72,0x67,0x61,0x61,0x12,0x61,0x6d,0x61,0x5f,0x1d,0x65,0x67,0x69,
-0x73,0x74,0x65,0x72,0x73,0x68,0x69,0x66,0x74,0x65,0x72,0x57,0x1e,0x79,0x6c,0x6c,
-0x61,0x62,0x6c,0x65,0x6d,0x6f,0x64,0x69,0x66,0x69,0x65,0x72,0x59,0x12,0x6f,0x6e,
-0x65,1,0x6c,0x2c,0x6d,0x12,0x61,0x72,0x6b,0x5d,0x14,0x65,0x74,0x74,0x65,0x72,
-0x5b,0x6e,0x3c,0x6f,0x7c,0x70,0x18,0x75,0x72,0x65,0x6b,0x69,0x6c,0x6c,0x65,0x72,
-0x55,1,0x6f,0x4c,0x75,1,0x6b,0x3c,0x6d,0x12,0x62,0x65,0x72,0x50,0x15,0x6a,
-0x6f,0x69,0x6e,0x65,0x72,0x53,0x11,0x74,0x61,0x4f,0x16,0x6e,0x6a,0x6f,0x69,0x6e,
-0x65,0x72,0x4d,0x13,0x74,0x68,0x65,0x72,0x21,0x67,0x3e,0x67,0x4a,0x69,0x64,0x6a,
-0x82,0x6d,0x1d,0x6f,0x64,0x69,0x66,0x79,0x69,0x6e,0x67,0x6c,0x65,0x74,0x74,0x65,
-0x72,0x4b,0x1c,0x65,0x6d,0x69,0x6e,0x61,0x74,0x69,0x6f,0x6e,0x6d,0x61,0x72,0x6b,
-0x45,0x1e,0x6e,0x76,0x69,0x73,0x69,0x62,0x6c,0x65,0x73,0x74,0x61,0x63,0x6b,0x65,
-0x72,0x47,0x14,0x6f,0x69,0x6e,0x65,0x72,0x49,0x61,0xa2,0xba,0x62,0xa2,0xc0,0x63,
-1,0x61,0xa2,0xa2,0x6f,0x16,0x6e,0x73,0x6f,0x6e,0x61,0x6e,0x74,0x2a,8,0x6b,
-0x67,0x6b,0x48,0x6d,0x52,0x70,0x5c,0x73,0xa2,0x42,0x77,0x19,0x69,0x74,0x68,0x73,
-0x74,0x61,0x63,0x6b,0x65,0x72,0x43,0x14,0x69,0x6c,0x6c,0x65,0x72,0x35,0x14,0x65,
-0x64,0x69,0x61,0x6c,0x37,1,0x6c,0x52,0x72,0x10,0x65,1,0x63,0x2e,0x66,0x13,
-0x69,0x78,0x65,0x64,0x3d,0x19,0x65,0x64,0x69,0x6e,0x67,0x72,0x65,0x70,0x68,0x61,
-0x3b,0x18,0x61,0x63,0x65,0x68,0x6f,0x6c,0x64,0x65,0x72,0x39,0x10,0x75,1,0x62,
-0x3e,0x63,0x1b,0x63,0x65,0x65,0x64,0x69,0x6e,0x67,0x72,0x65,0x70,0x68,0x61,0x41,
-0x15,0x6a,0x6f,0x69,0x6e,0x65,0x64,0x3f,0x64,0x4c,0x66,0x52,0x68,0x5a,0x69,0x1e,
-0x6e,0x69,0x74,0x69,0x61,0x6c,0x70,0x6f,0x73,0x74,0x66,0x69,0x78,0x65,0x64,0x33,
-0x12,0x65,0x61,0x64,0x2d,0x13,0x69,0x6e,0x61,0x6c,0x2f,0x18,0x65,0x61,0x64,0x6c,
-0x65,0x74,0x74,0x65,0x72,0x31,0x1d,0x6e,0x74,0x69,0x6c,0x6c,0x61,0x74,0x69,0x6f,
-0x6e,0x6d,0x61,0x72,0x6b,0x29,0x16,0x76,0x61,0x67,0x72,0x61,0x68,0x61,0x23,1,
-0x69,0x4a,0x72,0x10,0x61,0x1f,0x68,0x6d,0x69,0x6a,0x6f,0x69,0x6e,0x69,0x6e,0x67,
-0x6e,0x75,0x6d,0x62,0x65,0x72,0x27,0x12,0x6e,0x64,0x75,0x25,2,0x72,0x38,0x74,
-0x46,0x75,0x26,0x15,0x70,0x72,0x69,0x67,0x68,0x74,0x27,0x20,0x15,0x6f,0x74,0x61,
-0x74,0x65,0x64,0x21,1,0x72,0x24,0x75,0x25,0x22,0x18,0x61,0x6e,0x73,0x66,0x6f,
-0x72,0x6d,0x65,0x64,1,0x72,0x32,0x75,0x15,0x70,0x72,0x69,0x67,0x68,0x74,0x25,
-0x15,0x6f,0x74,0x61,0x74,0x65,0x64,0x23,0xd,0x6e,0xc1,0x86,0x73,0xa8,0x73,0x4c,
-0x74,0xa2,0x76,0x75,0xa2,0x83,0x7a,0xd8,0x70,0,2,0x6c,0xd9,0x20,0,0x70,
-0xd9,0x40,0,0x73,0xc3,0,0xfe,0xf,0,0,0,7,0x6f,0x3c,0x6f,0xff,
-8,0,0,0,0x70,0x3a,0x75,0x6e,0x79,0x13,0x6d,0x62,0x6f,0x6c,0xff,0xf,
-0,0,0,0x11,0x61,0x63,1,0x65,0x34,0x69,0x15,0x6e,0x67,0x6d,0x61,0x72,
-0x6b,0xa5,0,0x18,0x73,0x65,0x70,0x61,0x72,0x61,0x74,0x6f,0x72,0xc3,0,0x16,
-0x72,0x72,0x6f,0x67,0x61,0x74,0x65,0xe1,0,0,0x63,0xff,2,0,0,0,
-0x65,0x38,0x6b,0xff,4,0,0,0,0x6d,0xff,1,0,0,0,0x16,0x70,
-0x61,0x72,0x61,0x74,0x6f,0x72,0xd9,0x70,0,0x1d,0x69,0x74,0x6c,0x65,0x63,0x61,
-0x73,0x65,0x6c,0x65,0x74,0x74,0x65,0x72,0x31,1,0x6e,0x40,0x70,0x1c,0x70,0x65,
-0x72,0x63,0x61,0x73,0x65,0x6c,0x65,0x74,0x74,0x65,0x72,0x25,0x17,0x61,0x73,0x73,
-0x69,0x67,0x6e,0x65,0x64,0x23,0x6e,0xa2,0x69,0x6f,0xa2,0x89,0x70,0xfe,0x30,0xf8,
-0,0,9,0x69,0x33,0x69,0xff,0x10,0,0,0,0x6f,0xfd,0x80,0,0,
-0x72,0x54,0x73,0xf9,0,0,0x75,0x12,0x6e,0x63,0x74,0xfe,0x30,0xf8,0,0,
-0x15,0x75,0x61,0x74,0x69,0x6f,0x6e,0xff,0x30,0xf8,0,0,0x17,0x69,0x76,0x61,
-0x74,0x65,0x75,0x73,0x65,0xdd,0,0,0x61,0x48,0x63,0xfd,0x40,0,0,0x64,
-0xe9,0,0,0x65,0xfd,0x20,0,0,0x66,0xff,0x20,0,0,0,0x1f,0x72,
-0x61,0x67,0x72,0x61,0x70,0x68,0x73,0x65,0x70,0x61,0x72,0x61,0x74,0x6f,0x72,0xd9,
-0x40,0,0xbe,0,3,0x64,0xa7,0,0x6c,0xab,0,0x6f,0x30,0x75,0x13,0x6d,
-0x62,0x65,0x72,0xbf,0,0xb2,0,0x1b,0x6e,0x73,0x70,0x61,0x63,0x69,0x6e,0x67,
-0x6d,0x61,0x72,0x6b,0xa1,1,0x70,0x92,0x74,0x12,0x68,0x65,0x72,0xe6,0x80,1,
-3,0x6c,0x40,0x6e,0x4a,0x70,0x56,0x73,0x14,0x79,0x6d,0x62,0x6f,0x6c,0xff,8,
-0,0,0,0x14,0x65,0x74,0x74,0x65,0x72,0x61,0x14,0x75,0x6d,0x62,0x65,0x72,
-0xb3,0,0x19,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0xfd,0x80,0,
-0,0x1c,0x65,0x6e,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0xf9,
-0,0,0x66,0xc0,0xc4,0x66,0xa2,0x47,0x69,0xa2,0x64,0x6c,0xa2,0x79,0x6d,0xa4,
-0xc0,4,0x61,0x6c,0x63,0xa5,0,0x65,0xa3,0x80,0x6e,0xa1,0x6f,0x15,0x64,0x69,
-0x66,0x69,0x65,0x72,1,0x6c,0x38,0x73,0x14,0x79,0x6d,0x62,0x6f,0x6c,0xff,4,
-0,0,0,0x14,0x65,0x74,0x74,0x65,0x72,0x41,1,0x72,0x3c,0x74,0x16,0x68,
-0x73,0x79,0x6d,0x62,0x6f,0x6c,0xff,1,0,0,0,0x10,0x6b,0xa5,0xc0,1,
-0x69,0x32,0x6f,0x13,0x72,0x6d,0x61,0x74,0xdb,0,0,0x1d,0x6e,0x61,0x6c,0x70,
-0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0xff,0x20,0,0,0,0x10,
-0x6e,0x1f,0x69,0x74,0x69,0x61,0x6c,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,
-0x6f,0x6e,0xff,0x10,0,0,0,0x9c,7,0x6d,0x18,0x6d,0x41,0x6f,0x28,0x74,
-0x31,0x75,0x25,0x60,0x1c,0x77,0x65,0x72,0x63,0x61,0x73,0x65,0x6c,0x65,0x74,0x74,
-0x65,0x72,0x29,0x63,0x3d,0x65,0x28,0x69,0x42,0x6c,0x29,0x13,0x74,0x74,0x65,0x72,
-0x9c,0x15,0x6e,0x75,0x6d,0x62,0x65,0x72,0xab,0,0x1a,0x6e,0x65,0x73,0x65,0x70,
-0x61,0x72,0x61,0x74,0x6f,0x72,0xd9,0x20,0,0x63,0x46,0x64,0xa2,0x96,0x65,0x1b,
-0x6e,0x63,0x6c,0x6f,0x73,0x69,0x6e,0x67,0x6d,0x61,0x72,0x6b,0xa3,0x80,0xe6,0x80,
-1,7,0x6e,0x57,0x6e,0x52,0x6f,0x5e,0x73,0xe1,0,0,0x75,0x1b,0x72,0x72,
-0x65,0x6e,0x63,0x79,0x73,0x79,0x6d,0x62,0x6f,0x6c,0xff,2,0,0,0,0x22,
-0x12,0x74,0x72,0x6c,0xd9,0x80,0,0xdc,0,0,1,0x6d,0x62,0x6e,1,0x6e,
-0x30,0x74,0x12,0x72,0x6f,0x6c,0xd9,0x80,0,0x1f,0x65,0x63,0x74,0x6f,0x72,0x70,
-0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0xfd,0x40,0,0,0x19,0x62,
-0x69,0x6e,0x69,0x6e,0x67,0x6d,0x61,0x72,0x6b,0xa5,0xc0,0x61,0x58,0x63,0xd9,0x80,
-0,0x66,0xdb,0,0,0x6c,0x1d,0x6f,0x73,0x65,0x70,0x75,0x6e,0x63,0x74,0x75,
-0x61,0x74,0x69,0x6f,0x6e,0xfd,0x20,0,0,0x18,0x73,0x65,0x64,0x6c,0x65,0x74,
-0x74,0x65,0x72,0x3d,2,0x61,0x32,0x65,0x50,0x69,0x12,0x67,0x69,0x74,0xa7,0,
-0x1c,0x73,0x68,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0xe9,0,
-0,0x1a,0x63,0x69,0x6d,0x61,0x6c,0x6e,0x75,0x6d,0x62,0x65,0x72,0xa7,0
+0x69,0x6c,0x75,0x65,0xa3,0x8b,2,0x62,0x3c,0x67,0x4a,0x6e,0x17,0x64,0x69,0x6e,
+0x61,0x67,0x61,0x72,0x69,0xa5,0x26,0x15,0x61,0x74,0x61,0x65,0x61,0x6e,0xa3,0xef,
+0x16,0x6d,0x75,0x6e,0x64,0x61,0x72,0x69,0xa5,0x47,0x67,0xc4,0x5d,0x6a,0xc1,0xe4,
+0x6a,0xa2,0xdf,0x6b,0xa2,0xf8,0x6c,4,0x61,0x54,0x65,0xa2,0x6b,0x69,0xa2,0x82,
+0x6f,0xa2,0xc1,0x79,1,0x63,0x2e,0x64,0x12,0x69,0x61,0x6e,0xa3,0xa9,0x12,0x69,
+0x61,0x6e,0xa3,0xa7,1,0x6f,0x55,0x74,0x11,0x69,0x6e,1,0x31,0x96,0x65,0x11,
+0x78,0x74,6,0x64,0x21,0x64,0xa3,0x95,0x65,0x2c,0x66,0xa5,0x39,0x67,0xa5,0x3a,
+0xa2,0xe7,0x13,0x6e,0x64,0x65,0x64,6,0x64,0xc,0x64,0xa3,0x95,0x65,0xa3,0xe7,
+0x66,0xa5,0x39,0x67,0xa5,0x3a,0x61,0x2a,0x62,0x29,0x63,0xa3,0x94,0x26,0x18,0x64,
+0x64,0x69,0x74,0x69,0x6f,0x6e,0x61,0x6c,0x6d,0x24,0x12,0x73,0x75,0x70,0x24,0x16,
+0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0x25,1,0x70,0x42,0x74,0x1d,0x74,0x65,0x72,
+0x6c,0x69,0x6b,0x65,0x73,0x79,0x6d,0x62,0x6f,0x6c,0x73,0x79,0x12,0x63,0x68,0x61,
+0xa3,0x9c,2,0x6d,0x4e,0x6e,0x54,0x73,0x10,0x75,0xa2,0xb0,0x12,0x73,0x75,0x70,
+0xa4,0x31,0x16,0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0xa5,0x31,0x11,0x62,0x75,0xa3,
+0x6f,0x12,0x65,0x61,0x72,1,0x61,0xa3,0xe8,0x62,1,0x69,0x38,0x73,0x17,0x79,
+0x6c,0x6c,0x61,0x62,0x61,0x72,0x79,0xa3,0x75,0x17,0x64,0x65,0x6f,0x67,0x72,0x61,
+0x6d,0x73,0xa3,0x76,0x1a,0x77,0x73,0x75,0x72,0x72,0x6f,0x67,0x61,0x74,0x65,0x73,
+0xa3,0x4d,0x10,0x61,1,0x6d,0x32,0x76,0x14,0x61,0x6e,0x65,0x73,0x65,0xa3,0xb5,
+0x10,0x6f,0x5c,0x12,0x65,0x78,0x74,1,0x61,0xa3,0xb4,0x62,0xa3,0xb9,1,0x61,
+0xa2,0x43,0x68,4,0x61,0x40,0x69,0x50,0x6d,0x6e,0x6f,0x86,0x75,0x15,0x64,0x61,
+0x77,0x61,0x64,0x69,0xa3,0xe6,0x16,0x72,0x6f,0x73,0x68,0x74,0x68,0x69,0xa3,0x89,
+0x1d,0x74,0x61,0x6e,0x73,0x6d,0x61,0x6c,0x6c,0x73,0x63,0x72,0x69,0x70,0x74,0xa5,
+0x30,0x11,0x65,0x72,0x68,0x16,0x73,0x79,0x6d,0x62,0x6f,0x6c,0x73,0xa3,0x71,0x12,
+0x6a,0x6b,0x69,0xa3,0xe5,5,0x74,0x35,0x74,0x34,0x77,0x7a,0x79,0x13,0x61,0x68,
+0x6c,0x69,0xa3,0xa2,0x14,0x61,0x6b,0x61,0x6e,0x61,0x9e,1,0x65,0x4c,0x70,0x10,
+0x68,0x1f,0x6f,0x6e,0x65,0x74,0x69,0x63,0x65,0x78,0x74,0x65,0x6e,0x73,0x69,0x6f,
+0x6e,0x73,0xa3,0x6b,0x11,0x78,0x74,0xa3,0x6b,0x10,0x69,0xa5,0x46,0x69,0xa2,0x4e,
+0x6b,0xa2,0x51,0x6e,3,0x61,0x34,0x62,0x84,0x67,0x8a,0x6e,0x12,0x61,0x64,0x61,
+0x4d,1,0x65,0x40,0x73,0x11,0x75,0x70,0xa2,0xcb,0x16,0x70,0x6c,0x65,0x6d,0x65,
+0x6e,0x74,0xa3,0xcb,0x11,0x78,0x74,2,0x61,0xa5,0x13,0x62,0xa5,0x38,0x65,0x13,
+0x6e,0x64,0x65,0x64,1,0x61,0xa5,0x13,0x62,0xa5,0x38,0x11,0x75,0x6e,0xa3,0x42,
+0x11,0x78,0x69,0x96,0x17,0x72,0x61,0x64,0x69,0x63,0x61,0x6c,0x73,0x97,0x12,0x74,
+0x68,0x69,0xa3,0xc1,0x1c,0x74,0x6f,0x76,0x69,0x6b,0x6e,0x75,0x6d,0x65,0x72,0x61,
+0x6c,0x73,0xa5,0x45,0x67,0xa2,0xb5,0x68,0xa4,0x84,0x69,3,0x64,0x4c,0x6d,0xa2,
+0x55,0x6e,0xa2,0x62,0x70,0x13,0x61,0x65,0x78,0x74,0x2a,0x16,0x65,0x6e,0x73,0x69,
+0x6f,0x6e,0x73,0x2b,1,0x63,0x99,0x65,0x17,0x6f,0x67,0x72,0x61,0x70,0x68,0x69,
+0x63,1,0x64,0x56,0x73,0x15,0x79,0x6d,0x62,0x6f,0x6c,0x73,0xa4,0xb,0x1d,0x61,
+0x6e,0x64,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0xa5,0xb,0x13,
+0x65,0x73,0x63,0x72,0x1f,0x69,0x70,0x74,0x69,0x6f,0x6e,0x63,0x68,0x61,0x72,0x61,
+0x63,0x74,0x65,0x72,0x73,0x99,0x1c,0x70,0x65,0x72,0x69,0x61,0x6c,0x61,0x72,0x61,
+0x6d,0x61,0x69,0x63,0xa3,0xba,1,0x64,0x62,0x73,0x1b,0x63,0x72,0x69,0x70,0x74,
+0x69,0x6f,0x6e,0x61,0x6c,0x70,0x61,1,0x68,0x32,0x72,0x14,0x74,0x68,0x69,0x61,
+0x6e,0xa3,0xbd,0x13,0x6c,0x61,0x76,0x69,0xa3,0xbe,0x11,0x69,0x63,1,0x6e,0x3e,
+0x73,0x1a,0x69,0x79,0x61,0x71,0x6e,0x75,0x6d,0x62,0x65,0x72,0x73,0xa5,0x1e,0x19,
+0x75,0x6d,0x62,0x65,0x72,0x66,0x6f,0x72,0x6d,0x73,0xa3,0xb2,4,0x65,0x74,0x6c,
+0xa2,0x82,0x6f,0xa2,0x9a,0x72,0xa2,0x9e,0x75,2,0x6a,0x34,0x6e,0x3e,0x72,0x14,
+0x6d,0x75,0x6b,0x68,0x69,0x43,0x14,0x61,0x72,0x61,0x74,0x69,0x45,0x18,0x6a,0x61,
+0x6c,0x61,0x67,0x6f,0x6e,0x64,0x69,0xa5,0x1c,1,0x6e,0xa2,0x46,0x6f,1,0x6d,
+0x6e,0x72,0x13,0x67,0x69,0x61,0x6e,0x5a,1,0x65,0x40,0x73,0x11,0x75,0x70,0xa2,
+0x87,0x16,0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0xa3,0x87,0x11,0x78,0x74,0xa4,0x1b,
+0x14,0x65,0x6e,0x64,0x65,0x64,0xa5,0x1b,0x1a,0x65,0x74,0x72,0x69,0x63,0x73,0x68,
+0x61,0x70,0x65,0x73,0x8c,0x12,0x65,0x78,0x74,0xa2,0xe3,0x14,0x65,0x6e,0x64,0x65,
+0x64,0xa3,0xe3,0x1e,0x65,0x72,0x61,0x6c,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,
+0x69,0x6f,0x6e,0x71,0x17,0x61,0x67,0x6f,0x6c,0x69,0x74,0x69,0x63,0xa2,0x88,0x12,
+0x73,0x75,0x70,0xa4,0xa,0x16,0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0xa5,0xa,0x13,
+0x74,0x68,0x69,0x63,0xa3,0x59,1,0x61,0x5c,0x65,0x11,0x65,0x6b,0x30,1,0x61,
+0x38,0x65,0x11,0x78,0x74,0x6e,0x14,0x65,0x6e,0x64,0x65,0x64,0x6f,0x17,0x6e,0x64,
+0x63,0x6f,0x70,0x74,0x69,0x63,0x31,0x13,0x6e,0x74,0x68,0x61,0xa3,0xe4,2,0x61,
+0xa2,0x48,0x65,0xa2,0xdf,0x69,1,0x67,0x30,0x72,0x14,0x61,0x67,0x61,0x6e,0x61,
+0x9d,0x10,0x68,1,0x70,0x3a,0x73,0x18,0x75,0x72,0x72,0x6f,0x67,0x61,0x74,0x65,
+0x73,0xa3,0x4b,1,0x72,0x3c,0x75,0x19,0x73,0x75,0x72,0x72,0x6f,0x67,0x61,0x74,
+0x65,0x73,0xa3,0x4c,0x11,0x69,0x76,0x1f,0x61,0x74,0x65,0x75,0x73,0x65,0x73,0x75,
+0x72,0x72,0x6f,0x67,0x61,0x74,0x65,0x73,0xa3,0x4c,2,0x6c,0x32,0x6e,0x9a,0x74,
+0x12,0x72,0x61,0x6e,0xa5,2,0x10,0x66,2,0x61,0x58,0x6d,0x70,0x77,0x14,0x69,
+0x64,0x74,0x68,0x61,0x1f,0x6e,0x64,0x66,0x75,0x6c,0x6c,0x77,0x69,0x64,0x74,0x68,
+0x66,0x6f,0x72,0x6d,0x73,0xa3,0x57,0x1a,0x6e,0x64,0x66,0x75,0x6c,0x6c,0x66,0x6f,
+0x72,0x6d,0x73,0xa3,0x57,0x13,0x61,0x72,0x6b,0x73,0xa3,0x52,2,0x67,0x34,0x69,
+0xa2,0x45,0x75,0x12,0x6e,0x6f,0x6f,0xa3,0x63,0x11,0x75,0x6c,0xa2,0x4a,2,0x63,
+0x3c,0x6a,0x5e,0x73,0x17,0x79,0x6c,0x6c,0x61,0x62,0x6c,0x65,0x73,0xa3,0x4a,0x1f,
+0x6f,0x6d,0x70,0x61,0x74,0x69,0x62,0x69,0x6c,0x69,0x74,0x79,0x6a,0x61,0x6d,0x6f,
+0xa3,0x41,0x12,0x61,0x6d,0x6f,0x5c,0x17,0x65,0x78,0x74,0x65,0x6e,0x64,0x65,0x64,
+1,0x61,0xa3,0xb4,0x62,0xa3,0xb9,0x19,0x66,0x69,0x72,0x6f,0x68,0x69,0x6e,0x67,
+0x79,0x61,0xa5,0x1d,0x13,0x62,0x72,0x65,0x77,0x37,0x61,0xa4,0xc,0x62,0xa6,0x59,
+0x63,0xa8,0x2e,0x64,0xac,0xe3,0x65,5,0x6d,0xa9,0x6d,0x94,0x6e,0xa2,0x41,0x74,
+0x15,0x68,0x69,0x6f,0x70,0x69,0x63,0x5e,1,0x65,0x40,0x73,0x11,0x75,0x70,0xa2,
+0x86,0x16,0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0xa3,0x86,0x11,0x78,0x74,0xa2,0x85,
+2,0x61,0xa3,0xc8,0x62,0xa5,0x37,0x65,0x13,0x6e,0x64,0x65,0x64,0xa2,0x85,1,
+0x61,0xa3,0xc8,0x62,0xa5,0x37,0x16,0x6f,0x74,0x69,0x63,0x6f,0x6e,0x73,0xa3,0xce,
+0x15,0x63,0x6c,0x6f,0x73,0x65,0x64,2,0x61,0x5a,0x63,0x9e,0x69,0x1c,0x64,0x65,
+0x6f,0x67,0x72,0x61,0x70,0x68,0x69,0x63,0x73,0x75,0x70,0xa2,0xc4,0x16,0x70,0x6c,
+0x65,0x6d,0x65,0x6e,0x74,0xa3,0xc4,0x16,0x6c,0x70,0x68,0x61,0x6e,0x75,0x6d,0x86,
+1,0x65,0x2c,0x73,0x11,0x75,0x70,0xa3,0xc3,0x13,0x72,0x69,0x63,0x73,0x86,0x18,
+0x75,0x70,0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0xa3,0xc3,0x11,0x6a,0x6b,0xa2,0x44,
+0x1f,0x6c,0x65,0x74,0x74,0x65,0x72,0x73,0x61,0x6e,0x64,0x6d,0x6f,0x6e,0x74,0x68,
+0x73,0xa3,0x44,0x61,0x4a,0x67,0x76,0x6c,1,0x62,0x30,0x79,0x13,0x6d,0x61,0x69,
+0x63,0xa5,0x25,0x13,0x61,0x73,0x61,0x6e,0xa3,0xe2,0x13,0x72,0x6c,0x79,0x64,0x1f,
+0x79,0x6e,0x61,0x73,0x74,0x69,0x63,0x63,0x75,0x6e,0x65,0x69,0x66,0x6f,0x72,0x6d,
+0xa5,1,0x1f,0x79,0x70,0x74,0x69,0x61,0x6e,0x68,0x69,0x65,0x72,0x6f,0x67,0x6c,
+0x79,0x70,0x68,1,0x66,0x26,0x73,0xa3,0xc2,0x1c,0x6f,0x72,0x6d,0x61,0x74,0x63,
+0x6f,0x6e,0x74,0x72,0x6f,0x6c,0x73,0xa5,0x24,7,0x6e,0xc0,0xf2,0x6e,0x3e,0x72,
+0xa2,0x5d,0x73,0xa2,0xe5,0x76,0x14,0x65,0x73,0x74,0x61,0x6e,0xa3,0xbc,1,0x61,
+0x92,0x63,0x13,0x69,0x65,0x6e,0x74,1,0x67,0x34,0x73,0x15,0x79,0x6d,0x62,0x6f,
+0x6c,0x73,0xa3,0xa5,0x13,0x72,0x65,0x65,0x6b,1,0x6d,0x34,0x6e,0x15,0x75,0x6d,
+0x62,0x65,0x72,0x73,0xa3,0x7f,0x13,0x75,0x73,0x69,0x63,0xa2,0x7e,0x19,0x61,0x6c,
+0x6e,0x6f,0x74,0x61,0x74,0x69,0x6f,0x6e,0xa3,0x7e,0x10,0x74,0x1f,0x6f,0x6c,0x69,
+0x61,0x6e,0x68,0x69,0x65,0x72,0x6f,0x67,0x6c,0x79,0x70,0x68,0x73,0xa3,0xfe,2,
+0x61,0x32,0x6d,0xa2,0x7e,0x72,0x12,0x6f,0x77,0x73,0x7d,0x12,0x62,0x69,0x63,0x38,
+3,0x65,0x4a,0x6d,0x80,0x70,0xa2,0x50,0x73,0x11,0x75,0x70,0xa2,0x80,0x16,0x70,
+0x6c,0x65,0x6d,0x65,0x6e,0x74,0xa3,0x80,0x11,0x78,0x74,3,0x61,0xa3,0xd2,0x62,
+0xa5,0x35,0x63,0xa5,0x41,0x65,0x13,0x6e,0x64,0x65,0x64,2,0x61,0xa3,0xd2,0x62,
+0xa5,0x35,0x63,0xa5,0x41,0x12,0x61,0x74,0x68,0xa2,0xd3,0x18,0x65,0x6d,0x61,0x74,
+0x69,0x63,0x61,0x6c,0x61,0x1f,0x6c,0x70,0x68,0x61,0x62,0x65,0x74,0x69,0x63,0x73,
+0x79,0x6d,0x62,0x6f,0x6c,0x73,0xa3,0xd3,1,0x66,0x42,0x72,0x1e,0x65,0x73,0x65,
+0x6e,0x74,0x61,0x74,0x69,0x6f,0x6e,0x66,0x6f,0x72,0x6d,0x73,1,0x61,0xa3,0x51,
+0x62,0xa3,0x55,0x14,0x65,0x6e,0x69,0x61,0x6e,0x35,0x12,0x63,0x69,0x69,0x23,0x64,
+0x9e,0x65,0xa2,0x42,0x68,0xa2,0x4d,0x6c,1,0x63,0x62,0x70,0x17,0x68,0x61,0x62,
+0x65,0x74,0x69,0x63,0x70,1,0x66,0xa3,0x50,0x72,0x1e,0x65,0x73,0x65,0x6e,0x74,
+0x61,0x74,0x69,0x6f,0x6e,0x66,0x6f,0x72,0x6d,0x73,0xa3,0x50,0x16,0x68,0x65,0x6d,
+0x69,0x63,0x61,0x6c,0xa2,0xd0,0x16,0x73,0x79,0x6d,0x62,0x6f,0x6c,0x73,0xa3,0xd0,
+0x12,0x6c,0x61,0x6d,0xa5,7,0x1a,0x67,0x65,0x61,0x6e,0x6e,0x75,0x6d,0x62,0x65,
+0x72,0x73,0xa3,0x77,0x11,0x6f,0x6d,0xa3,0xfd,7,0x6f,0x71,0x6f,0x64,0x72,0xa2,
+0x41,0x75,0xa2,0x58,0x79,0x1b,0x7a,0x61,0x6e,0x74,0x69,0x6e,0x65,0x6d,0x75,0x73,
+0x69,0x63,0xa2,0x5b,0x18,0x61,0x6c,0x73,0x79,0x6d,0x62,0x6f,0x6c,0x73,0xa3,0x5b,
+1,0x70,0x34,0x78,0x16,0x64,0x72,0x61,0x77,0x69,0x6e,0x67,0x89,0x14,0x6f,0x6d,
+0x6f,0x66,0x6f,0xa0,0x12,0x65,0x78,0x74,0xa2,0x43,0x14,0x65,0x6e,0x64,0x65,0x64,
+0xa3,0x43,0x10,0x61,1,0x68,0x40,0x69,0x12,0x6c,0x6c,0x65,0x92,0x17,0x70,0x61,
+0x74,0x74,0x65,0x72,0x6e,0x73,0x93,0x11,0x6d,0x69,0xa3,0xc9,1,0x67,0x2c,0x68,
+0x11,0x69,0x64,0xa3,0x64,0x14,0x69,0x6e,0x65,0x73,0x65,0xa3,0x81,0x61,0x48,0x65,
+0xa2,0x4e,0x68,0xa2,0x52,0x6c,0x1a,0x6f,0x63,0x6b,0x65,0x6c,0x65,0x6d,0x65,0x6e,
+0x74,0x73,0x8b,3,0x6c,0x34,0x6d,0x40,0x73,0x66,0x74,0x11,0x61,0x6b,0xa3,0xc7,
+0x14,0x69,0x6e,0x65,0x73,0x65,0xa3,0x93,0x11,0x75,0x6d,0xa2,0xb1,0x12,0x73,0x75,
+0x70,0xa2,0xca,0x16,0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0xa3,0xca,1,0x69,0x30,
+0x73,0x13,0x61,0x76,0x61,0x68,0xa3,0xdd,0x15,0x63,0x6c,0x61,0x74,0x69,0x6e,0x23,
+0x14,0x6e,0x67,0x61,0x6c,0x69,0x41,0x16,0x61,0x69,0x6b,0x73,0x75,0x6b,0x69,0xa5,
+8,5,0x6f,0xc1,0x60,0x6f,0xa2,0x69,0x75,0xa4,0x24,0x79,1,0x70,0xa2,0x44,
+0x72,0x14,0x69,0x6c,0x6c,0x69,0x63,0x32,1,0x65,0x4c,0x73,0x11,0x75,0x70,0xa2,
+0x61,0x16,0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0xa2,0x61,0x12,0x61,0x72,0x79,0xa3,
+0x61,0x11,0x78,0x74,4,0x61,0xa3,0x9e,0x62,0xa3,0xa0,0x63,0xa5,9,0x64,0xa5,
+0x43,0x65,0x13,0x6e,0x64,0x65,0x64,3,0x61,0xa3,0x9e,0x62,0xa3,0xa0,0x63,0xa5,
+9,0x64,0xa5,0x43,0x10,0x72,1,0x69,0x34,0x6f,0x15,0x6d,0x69,0x6e,0x6f,0x61,
+0x6e,0xa5,0x36,0x1a,0x6f,0x74,0x73,0x79,0x6c,0x6c,0x61,0x62,0x61,0x72,0x79,0xa3,
+0x7b,3,0x6d,0x5a,0x6e,0xa2,0x95,0x70,0xa2,0xa0,0x75,0x17,0x6e,0x74,0x69,0x6e,
+0x67,0x72,0x6f,0x64,0xa2,0x9a,0x17,0x6e,0x75,0x6d,0x65,0x72,0x61,0x6c,0x73,0xa3,
+0x9a,2,0x62,0x3a,0x6d,0xa2,0x5f,0x70,0x15,0x61,0x74,0x6a,0x61,0x6d,0x6f,0xa3,
+0x41,0x14,0x69,0x6e,0x69,0x6e,0x67,2,0x64,0x46,0x68,0x9e,0x6d,0x1d,0x61,0x72,
+0x6b,0x73,0x66,0x6f,0x72,0x73,0x79,0x6d,0x62,0x6f,0x6c,0x73,0x77,0x1e,0x69,0x61,
+0x63,0x72,0x69,0x74,0x69,0x63,0x61,0x6c,0x6d,0x61,0x72,0x6b,0x73,0x2e,2,0x65,
+0x40,0x66,0xa6,0x4c,0x73,0x18,0x75,0x70,0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0xa3,
+0x83,0x16,0x78,0x74,0x65,0x6e,0x64,0x65,0x64,0xa3,0xe0,0x17,0x61,0x6c,0x66,0x6d,
+0x61,0x72,0x6b,0x73,0xa3,0x52,0x11,0x6f,0x6e,0x1f,0x69,0x6e,0x64,0x69,0x63,0x6e,
+0x75,0x6d,0x62,0x65,0x72,0x66,0x6f,0x72,0x6d,0x73,0xa3,0xb2,0x1b,0x74,0x72,0x6f,
+0x6c,0x70,0x69,0x63,0x74,0x75,0x72,0x65,0x73,0x83,0x12,0x74,0x69,0x63,0xa2,0x84,
+0x1b,0x65,0x70,0x61,0x63,0x74,0x6e,0x75,0x6d,0x62,0x65,0x72,0x73,0xa3,0xdf,1,
+0x6e,0x3e,0x72,0x1b,0x72,0x65,0x6e,0x63,0x79,0x73,0x79,0x6d,0x62,0x6f,0x6c,0x73,
+0x75,0x15,0x65,0x69,0x66,0x6f,0x72,0x6d,0xa2,0x98,0x16,0x6e,0x75,0x6d,0x62,0x65,
+0x72,0x73,0xa2,0x99,0x1d,0x61,0x6e,0x64,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,
+0x69,0x6f,0x6e,0xa3,0x99,0x61,0xa2,0xe4,0x68,0xa4,0xe,0x6a,0x10,0x6b,0xa2,0x47,
+4,0x63,0x8c,0x65,0xa2,0x80,0x72,0xa2,0x9b,0x73,0xa2,0xad,0x75,0x1f,0x6e,0x69,
+0x66,0x69,0x65,0x64,0x69,0x64,0x65,0x6f,0x67,0x72,0x61,0x70,0x68,0x73,0xa2,0x47,
+0x18,0x65,0x78,0x74,0x65,0x6e,0x73,0x69,0x6f,0x6e,7,0x65,0x6b,0x65,0xa5,0,
+0x66,0xa5,0x12,0x67,0xa5,0x2e,0x68,0xa5,0x42,0x14,0x6f,0x6d,0x70,0x61,0x74,0xa2,
+0x45,1,0x66,0x96,0x69,1,0x62,0x44,0x64,0x17,0x65,0x6f,0x67,0x72,0x61,0x70,
+0x68,0x73,0xa2,0x4f,0x12,0x73,0x75,0x70,0xa3,0x5f,0x14,0x69,0x6c,0x69,0x74,0x79,
+0xa2,0x45,1,0x66,0x54,0x69,0x18,0x64,0x65,0x6f,0x67,0x72,0x61,0x70,0x68,0x73,
+0xa2,0x4f,0x19,0x73,0x75,0x70,0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0xa3,0x5f,0x13,
+0x6f,0x72,0x6d,0x73,0xa3,0x53,0x11,0x78,0x74,7,0x65,0xc,0x65,0xa5,0,0x66,
+0xa5,0x12,0x67,0xa5,0x2e,0x68,0xa5,0x42,0x61,0xa3,0x46,0x62,0xa3,0x5e,0x63,0xa3,
+0xc5,0x64,0xa3,0xd1,0x19,0x61,0x64,0x69,0x63,0x61,0x6c,0x73,0x73,0x75,0x70,0x94,
+0x16,0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0x95,1,0x74,0x50,0x79,0x14,0x6d,0x62,
+0x6f,0x6c,0x73,0x9a,0x1d,0x61,0x6e,0x64,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,
+0x69,0x6f,0x6e,0x9b,0x14,0x72,0x6f,0x6b,0x65,0x73,0xa3,0x82,2,0x6e,0x48,0x72,
+0x64,0x75,0x1d,0x63,0x61,0x73,0x69,0x61,0x6e,0x61,0x6c,0x62,0x61,0x6e,0x69,0x61,
+0x6e,0xa3,0xde,0x1d,0x61,0x64,0x69,0x61,0x6e,0x73,0x79,0x6c,0x6c,0x61,0x62,0x69,
+0x63,0x73,0x63,0x12,0x69,0x61,0x6e,0xa3,0xa8,2,0x61,0x3a,0x65,0x4c,0x6f,0x16,
+0x72,0x61,0x73,0x6d,0x69,0x61,0x6e,0xa5,0x2d,1,0x6b,0x26,0x6d,0xa3,0xa4,0x11,
+0x6d,0x61,0xa3,0xd4,1,0x72,0x38,0x73,0x17,0x73,0x73,0x79,0x6d,0x62,0x6f,0x6c,
+0x73,0xa5,0x19,0x13,0x6f,0x6b,0x65,0x65,0x60,0x12,0x73,0x75,0x70,0xa2,0xff,0x16,
+0x70,0x6c,0x65,0x6d,0x65,0x6e,0x74,0xa3,0xff,3,0x65,0x3e,0x69,0x8e,0x6f,0xa2,
+0x71,0x75,0x15,0x70,0x6c,0x6f,0x79,0x61,0x6e,0xa3,0xe1,1,0x73,0x60,0x76,0x16,
+0x61,0x6e,0x61,0x67,0x61,0x72,0x69,0x3e,0x12,0x65,0x78,0x74,0xa2,0xb3,1,0x61,
+0xa5,0x44,0x65,0x13,0x6e,0x64,0x65,0x64,0xa2,0xb3,0x10,0x61,0xa5,0x44,0x13,0x65,
+0x72,0x65,0x74,0xa3,0x5a,2,0x61,0x3a,0x6e,0x82,0x76,0x16,0x65,0x73,0x61,0x6b,
+0x75,0x72,0x75,0xa5,0x2f,0x18,0x63,0x72,0x69,0x74,0x69,0x63,0x61,0x6c,0x73,0x2e,
+2,0x65,0x30,0x66,0x36,0x73,0x11,0x75,0x70,0xa3,0x83,0x11,0x78,0x74,0xa3,0xe0,
+0x18,0x6f,0x72,0x73,0x79,0x6d,0x62,0x6f,0x6c,0x73,0x77,0x14,0x67,0x62,0x61,0x74,
+0x73,0x91,1,0x67,0x3e,0x6d,0x12,0x69,0x6e,0x6f,0xa2,0xab,0x14,0x74,0x69,0x6c,
+0x65,0x73,0xa3,0xab,0x11,0x72,0x61,0xa5,0x1a,8,0x6d,0x5f,0x6d,0x3a,0x6e,0x48,
+0x73,0x7a,0x76,0xa2,0x4b,0x77,0x12,0x69,0x64,0x65,0x43,0x11,0x65,0x64,0x32,0x12,
+0x69,0x61,0x6c,0x33,2,0x61,0x40,0x62,0x37,0x6f,1,0x62,0x28,0x6e,0x10,0x65,
+0x21,0x13,0x72,0x65,0x61,0x6b,0x37,0x10,0x72,0x34,0x12,0x72,0x6f,0x77,0x35,2,
+0x6d,0x38,0x71,0x46,0x75,1,0x62,0x3d,0x70,0x3e,0x11,0x65,0x72,0x3f,1,0x61,
+0x24,0x6c,0x39,0x11,0x6c,0x6c,0x39,1,0x72,0x3b,0x75,0x12,0x61,0x72,0x65,0x3b,
+0x12,0x65,0x72,0x74,0x40,0x13,0x69,0x63,0x61,0x6c,0x41,0x63,0x58,0x65,0x92,0x66,
+0x96,0x69,1,0x6e,0x36,0x73,0x10,0x6f,0x30,0x14,0x6c,0x61,0x74,0x65,0x64,0x31,
+0x11,0x69,0x74,0x2e,0x12,0x69,0x61,0x6c,0x2f,2,0x61,0x36,0x69,0x48,0x6f,0x10,
+0x6d,0x24,0x12,0x70,0x61,0x74,0x25,0x10,0x6e,0x22,0x15,0x6f,0x6e,0x69,0x63,0x61,
+0x6c,0x23,0x13,0x72,0x63,0x6c,0x65,0x27,0x11,0x6e,0x63,0x27,2,0x69,0x3a,0x6f,
+0x44,0x72,0x10,0x61,0x2c,0x14,0x63,0x74,0x69,0x6f,0x6e,0x2d,0x10,0x6e,0x28,0x11,
+0x61,0x6c,0x29,0x11,0x6e,0x74,0x2b,4,0x61,0x3a,0x66,0x4c,0x68,0x5e,0x6e,0x70,
+0x77,0x2a,0x12,0x69,0x64,0x65,0x2b,0x22,0x17,0x6d,0x62,0x69,0x67,0x75,0x6f,0x75,
+0x73,0x23,0x26,0x17,0x75,0x6c,0x6c,0x77,0x69,0x64,0x74,0x68,0x27,0x24,0x17,0x61,
+0x6c,0x66,0x77,0x69,0x64,0x74,0x68,0x25,0x20,1,0x61,0x30,0x65,0x14,0x75,0x74,
+0x72,0x61,0x6c,0x21,0x28,0x13,0x72,0x72,0x6f,0x77,0x29,0xd,0x6e,0xc0,0xfb,0x73,
+0x6d,0x73,0x3a,0x74,0x98,0x75,0xa2,0x49,0x7a,2,0x6c,0x3b,0x70,0x3d,0x73,0x39,
+5,0x6f,0x28,0x6f,0x57,0x70,0x34,0x75,0x16,0x72,0x72,0x6f,0x67,0x61,0x74,0x65,
+0x45,0x11,0x61,0x63,1,0x65,0x32,0x69,0x15,0x6e,0x67,0x6d,0x61,0x72,0x6b,0x31,
+0x18,0x73,0x65,0x70,0x61,0x72,0x61,0x74,0x6f,0x72,0x39,0x63,0x53,0x6b,0x55,0x6d,
+0x51,0x1d,0x69,0x74,0x6c,0x65,0x63,0x61,0x73,0x65,0x6c,0x65,0x74,0x74,0x65,0x72,
+0x27,1,0x6e,0x40,0x70,0x1c,0x70,0x65,0x72,0x63,0x61,0x73,0x65,0x6c,0x65,0x74,
+0x74,0x65,0x72,0x23,0x17,0x61,0x73,0x73,0x69,0x67,0x6e,0x65,0x64,0x21,0x6e,0x8a,
+0x6f,0xa2,0x47,0x70,8,0x66,0x14,0x66,0x5b,0x69,0x59,0x6f,0x4f,0x72,0x24,0x73,
+0x49,0x17,0x69,0x76,0x61,0x74,0x65,0x75,0x73,0x65,0x43,0x61,0x2c,0x63,0x4d,0x64,
+0x47,0x65,0x4b,0x1f,0x72,0x61,0x67,0x72,0x61,0x70,0x68,0x73,0x65,0x70,0x61,0x72,
+0x61,0x74,0x6f,0x72,0x3d,2,0x64,0x33,0x6c,0x35,0x6f,0x36,0x1b,0x6e,0x73,0x70,
+0x61,0x63,0x69,0x6e,0x67,0x6d,0x61,0x72,0x6b,0x2d,1,0x70,0x7c,0x74,0x12,0x68,
+0x65,0x72,3,0x6c,0x38,0x6e,0x42,0x70,0x4c,0x73,0x14,0x79,0x6d,0x62,0x6f,0x6c,
+0x57,0x14,0x65,0x74,0x74,0x65,0x72,0x2b,0x14,0x75,0x6d,0x62,0x65,0x72,0x37,0x19,
+0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0x4f,0x1c,0x65,0x6e,0x70,0x75,
+0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0x49,0x66,0x9e,0x66,0x88,0x69,0xa2,
+0x4b,0x6c,0xa2,0x5c,0x6d,4,0x61,0x60,0x63,0x31,0x65,0x2f,0x6e,0x2d,0x6f,0x15,
+0x64,0x69,0x66,0x69,0x65,0x72,1,0x6c,0x30,0x73,0x14,0x79,0x6d,0x62,0x6f,0x6c,
+0x55,0x14,0x65,0x74,0x74,0x65,0x72,0x29,0x17,0x74,0x68,0x73,0x79,0x6d,0x62,0x6f,
+0x6c,0x51,1,0x69,0x2e,0x6f,0x13,0x72,0x6d,0x61,0x74,0x41,0x1d,0x6e,0x61,0x6c,
+0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0x5b,0x10,0x6e,0x1f,0x69,
+0x74,0x69,0x61,0x6c,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0x59,
+6,0x6d,0x18,0x6d,0x29,0x6f,0x28,0x74,0x27,0x75,0x23,0x2a,0x1c,0x77,0x65,0x72,
+0x63,0x61,0x73,0x65,0x6c,0x65,0x74,0x74,0x65,0x72,0x25,0x65,0x28,0x69,0x3c,0x6c,
+0x25,0x19,0x74,0x74,0x65,0x72,0x6e,0x75,0x6d,0x62,0x65,0x72,0x35,0x1a,0x6e,0x65,
+0x73,0x65,0x70,0x61,0x72,0x61,0x74,0x6f,0x72,0x3b,0x63,0x44,0x64,0xa2,0x60,0x65,
+0x1b,0x6e,0x63,0x6c,0x6f,0x73,0x69,0x6e,0x67,0x6d,0x61,0x72,0x6b,0x2f,6,0x6e,
+0x39,0x6e,0x46,0x6f,0x4e,0x73,0x45,0x75,0x1b,0x72,0x72,0x65,0x6e,0x63,0x79,0x73,
+0x79,0x6d,0x62,0x6f,0x6c,0x53,0x20,0x12,0x74,0x72,0x6c,0x3f,0x42,0x10,0x6e,1,
+0x6e,0x2c,0x74,0x12,0x72,0x6f,0x6c,0x3f,0x1f,0x65,0x63,0x74,0x6f,0x72,0x70,0x75,
+0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0x4d,0x63,0x3f,0x66,0x41,0x6c,0x1d,
+0x6f,0x73,0x65,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0x4b,2,
+0x61,0x30,0x65,0x4a,0x69,0x12,0x67,0x69,0x74,0x33,0x1c,0x73,0x68,0x70,0x75,0x6e,
+0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0x47,0x1a,0x63,0x69,0x6d,0x61,0x6c,0x6e,
+0x75,0x6d,0x62,0x65,0x72,0x33,0,0x13,0x6e,0xc1,0xf,0x74,0x76,0x74,0x4c,0x76,
+0x9a,0x77,0xa2,0x48,0x79,0xa2,0x49,0x7a,1,0x61,0x2c,0x68,0x12,0x61,0x69,0x6e,
+0x8b,0x11,0x69,0x6e,0x85,2,0x61,0x36,0x65,0x3c,0x68,0x14,0x69,0x6e,0x79,0x65,
+0x68,0xa3,0x66,1,0x68,0x71,0x77,0x73,1,0x68,0x28,0x74,0x10,0x68,0x77,0x16,
+0x6d,0x61,0x72,0x62,0x75,0x74,0x61,0x74,0x13,0x67,0x6f,0x61,0x6c,0x3d,0x1a,0x65,
+0x72,0x74,0x69,0x63,0x61,0x6c,0x74,0x61,0x69,0x6c,0xa3,0x67,0x11,0x61,0x77,0x79,
+1,0x65,0x32,0x75,0x11,0x64,0x68,0x80,0x11,0x68,0x65,0x83,0x10,0x68,0x7a,1,
+0x62,0x34,0x77,0x16,0x69,0x74,0x68,0x74,0x61,0x69,0x6c,0x7f,0x14,0x61,0x72,0x72,
+0x65,0x65,0x7d,0x6e,0xa2,0x4c,0x70,0xa2,0x69,0x71,0xa2,0x69,0x72,0xa2,0x6f,0x73,
+5,0x74,0x22,0x74,0x38,0x77,0x4c,0x79,0x16,0x72,0x69,0x61,0x63,0x77,0x61,0x77,
+0x6f,0x18,0x72,0x61,0x69,0x67,0x68,0x74,0x77,0x61,0x77,0xa3,0x55,0x15,0x61,0x73,
+0x68,0x6b,0x61,0x66,0x6d,0x61,0x2e,0x65,0x38,0x68,0x11,0x69,0x6e,0x6b,0x10,0x64,
+0x62,0x11,0x68,0x65,0x65,1,0x65,0x2e,0x6d,0x13,0x6b,0x61,0x74,0x68,0x69,0x10,
+0x6e,0x67,2,0x6f,0x2c,0x75,0x50,0x79,0x10,0x61,0x91,1,0x6a,0x28,0x6f,0x10,
+0x6e,0x55,0x1a,0x6f,0x69,0x6e,0x69,0x6e,0x67,0x67,0x72,0x6f,0x75,0x70,0x21,0x10,
+0x6e,0x57,0x10,0x65,0x59,0x10,0x61,1,0x66,0x5b,0x70,0x10,0x68,0x5d,1,0x65,
+0x38,0x6f,0x18,0x68,0x69,0x6e,0x67,0x79,0x61,0x79,0x65,0x68,0x93,1,0x68,0x5f,
+0x76,0x16,0x65,0x72,0x73,0x65,0x64,0x70,0x65,0x61,0x67,0xc1,0xc7,0x67,0xa4,0x52,
+0x68,0xa4,0x59,0x6b,0xa4,0x99,0x6c,0xa4,0xb2,0x6d,2,0x61,0x2e,0x65,0xa4,0x3e,
+0x69,0x10,0x6d,0x53,1,0x6c,0xa2,0xe7,0x6e,0x16,0x69,0x63,0x68,0x61,0x65,0x61,
+0x6e,0,0x12,0x6e,0x76,0x73,0x51,0x73,0x3e,0x74,0x5c,0x77,0xa0,0x79,0xa2,0x42,
+0x7a,0x13,0x61,0x79,0x69,0x6e,0xa3,0x54,0x10,0x61,1,0x64,0x2e,0x6d,0x12,0x65,
+0x6b,0x68,0xa3,0x4c,0x11,0x68,0x65,0xa3,0x4b,3,0x61,0x38,0x65,0x3c,0x68,0x4a,
+0x77,0x13,0x65,0x6e,0x74,0x79,0xa3,0x51,0x10,0x77,0xa3,0x4d,1,0x6e,0xa3,0x4e,
+0x74,0x10,0x68,0xa3,0x4f,0x14,0x61,0x6d,0x65,0x64,0x68,0xa3,0x50,0x11,0x61,0x77,
+0xa3,0x52,0x12,0x6f,0x64,0x68,0xa3,0x53,0x6e,0x3a,0x6f,0x40,0x70,0x46,0x71,0x4a,
+0x72,0x12,0x65,0x73,0x68,0xa3,0x4a,0x11,0x75,0x6e,0xa3,0x46,0x11,0x6e,0x65,0xa3,
+0x47,0x10,0x65,0xa3,0x48,0x12,0x6f,0x70,0x68,0xa3,0x49,0x67,0x33,0x67,0x38,0x68,
+0x40,0x6b,0x5e,0x6c,0x66,0x6d,0x11,0x65,0x6d,0xa3,0x45,0x13,0x69,0x6d,0x65,0x6c,
+0xa1,1,0x65,0x32,0x75,0x14,0x6e,0x64,0x72,0x65,0x64,0xa3,0x42,0x11,0x74,0x68,
+0xa3,0x41,0x12,0x61,0x70,0x68,0xa3,0x43,0x14,0x61,0x6d,0x65,0x64,0x68,0xa3,0x44,
+0x61,0x34,0x62,0x4a,0x64,0x50,0x66,0x12,0x69,0x76,0x65,0x9f,1,0x6c,0x2a,0x79,
+0x11,0x69,0x6e,0x97,0x12,0x65,0x70,0x68,0x95,0x12,0x65,0x74,0x68,0x99,1,0x61,
+0x30,0x68,0x14,0x61,0x6d,0x65,0x64,0x68,0x9d,0x13,0x6c,0x65,0x74,0x68,0x9b,0x15,
+0x61,0x79,0x61,0x6c,0x61,0x6d,6,0x6e,0x2c,0x6e,0x34,0x72,0x5e,0x73,0x62,0x74,
+0x11,0x74,0x61,0xa3,0x63,2,0x67,0x2e,0x6e,0x32,0x79,0x10,0x61,0xa3,0x60,0x10,
+0x61,0xa3,0x5d,1,0x61,0xa3,0x5e,0x6e,0x10,0x61,0xa3,0x5f,0x10,0x61,0xa3,0x61,
+0x11,0x73,0x61,0xa3,0x62,0x62,0x3c,0x6a,0x42,0x6c,0x10,0x6c,1,0x61,0xa3,0x5b,
+0x6c,0x10,0x61,0xa3,0x5c,0x11,0x68,0x61,0xa3,0x59,0x10,0x61,0xa3,0x5a,0x11,0x65,
+0x6d,0x51,0x10,0x61,1,0x66,0x37,0x6d,0x11,0x61,0x6c,0x39,1,0x61,0x40,0x65,
+0x3e,1,0x68,0x28,0x74,0x10,0x68,0x45,0x40,0x13,0x67,0x6f,0x61,0x6c,0x43,2,
+0x68,0x3b,0x6d,0x5c,0x6e,0x1a,0x69,0x66,0x69,0x72,0x6f,0x68,0x69,0x6e,0x67,0x79,
+0x61,1,0x6b,0x2a,0x70,0x10,0x61,0xa3,0x65,0x15,0x69,0x6e,0x6e,0x61,0x79,0x61,
+0xa3,0x64,0x1a,0x7a,0x61,0x6f,0x6e,0x68,0x65,0x68,0x67,0x6f,0x61,0x6c,0x3d,2,
+0x61,0x3a,0x68,0x44,0x6e,0x17,0x6f,0x74,0x74,0x65,0x64,0x68,0x65,0x68,0x4b,1,
+0x66,0x47,0x70,0x10,0x68,0x49,0x12,0x61,0x70,0x68,0x89,0x11,0x61,0x6d,0x4c,0x12,
+0x61,0x64,0x68,0x4f,0x61,0x6e,0x62,0xa2,0x54,0x64,0xa2,0x70,0x65,0x31,0x66,2,
+0x61,0x3e,0x65,0x4a,0x69,0x19,0x6e,0x61,0x6c,0x73,0x65,0x6d,0x6b,0x61,0x74,0x68,
+0x35,0x15,0x72,0x73,0x69,0x79,0x65,0x68,0x8f,0x86,0x10,0x68,0x33,2,0x66,0x3c,
+0x69,0x70,0x6c,1,0x61,0x28,0x65,0x10,0x66,0x27,0x11,0x70,0x68,0x25,0x14,0x72,
+0x69,0x63,0x61,0x6e,2,0x66,0x30,0x6e,0x36,0x71,0x11,0x61,0x66,0xa3,0x58,0x11,
+0x65,0x68,0xa3,0x56,0x12,0x6f,0x6f,0x6e,0xa3,0x57,0x10,0x6e,0x23,1,0x65,0x4a,
+0x75,0x10,0x72,0x1f,0x75,0x73,0x68,0x61,0x73,0x6b,0x69,0x79,0x65,0x68,0x62,0x61,
+0x72,0x72,0x65,0x65,0x8d,1,0x68,0x29,0x74,0x10,0x68,0x2b,0x11,0x61,0x6c,0x2c,
+0x16,0x61,0x74,0x68,0x72,0x69,0x73,0x68,0x2f,7,0x6e,0x2e,0x6e,0x2c,0x72,0x3e,
+0x74,0x56,0x75,0x21,0x18,0x6f,0x6e,0x6a,0x6f,0x69,0x6e,0x69,0x6e,0x67,0x21,0x28,
+0x1a,0x69,0x67,0x68,0x74,0x6a,0x6f,0x69,0x6e,0x69,0x6e,0x67,0x29,0x2a,0x19,0x72,
+0x61,0x6e,0x73,0x70,0x61,0x72,0x65,0x6e,0x74,0x2b,0x63,0x23,0x64,0x40,0x6a,0x56,
+0x6c,0x26,0x19,0x65,0x66,0x74,0x6a,0x6f,0x69,0x6e,0x69,0x6e,0x67,0x27,0x24,0x19,
+0x75,0x61,0x6c,0x6a,0x6f,0x69,0x6e,0x69,0x6e,0x67,0x25,0x19,0x6f,0x69,0x6e,0x63,
+0x61,0x75,0x73,0x69,0x6e,0x67,0x23,0,0x13,0x6e,0xc0,0xd0,0x73,0x49,0x73,0x48,
+0x75,0x78,0x77,0x84,0x78,0x9c,0x7a,0x10,0x77,0x58,1,0x6a,0x75,0x73,0x13,0x70,
+0x61,0x63,0x65,0x59,4,0x61,0x51,0x67,0x53,0x70,0x28,0x75,0x30,0x79,0x57,0x54,
+0x12,0x61,0x63,0x65,0x55,0x16,0x72,0x72,0x6f,0x67,0x61,0x74,0x65,0x53,0x15,0x6e,
+0x6b,0x6e,0x6f,0x77,0x6e,0x21,1,0x6a,0x5d,0x6f,0x17,0x72,0x64,0x6a,0x6f,0x69,
+0x6e,0x65,0x72,0x5d,0x10,0x78,0x21,0x6e,0x60,0x6f,0xa2,0x41,0x70,0xa2,0x50,0x71,
+0xa2,0x6e,0x72,1,0x65,0x24,0x69,0x6f,0x1e,0x67,0x69,0x6f,0x6e,0x61,0x6c,0x69,
+0x6e,0x64,0x69,0x63,0x61,0x74,0x6f,0x72,0x6f,4,0x65,0x3e,0x6c,0x5b,0x6f,0x46,
+0x73,0x45,0x75,0x46,0x14,0x6d,0x65,0x72,0x69,0x63,0x47,0x15,0x78,0x74,0x6c,0x69,
+0x6e,0x65,0x5b,0x17,0x6e,0x73,0x74,0x61,0x72,0x74,0x65,0x72,0x45,0x10,0x70,0x48,
+0x1c,0x65,0x6e,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0x49,1,
+0x6f,0x3e,0x72,0x4c,0x1a,0x65,0x66,0x69,0x78,0x6e,0x75,0x6d,0x65,0x72,0x69,0x63,
+0x4d,0x4a,0x1b,0x73,0x74,0x66,0x69,0x78,0x6e,0x75,0x6d,0x65,0x72,0x69,0x63,0x4b,
+0x10,0x75,0x4e,0x16,0x6f,0x74,0x61,0x74,0x69,0x6f,0x6e,0x4f,0x68,0x7b,0x68,0x50,
+0x69,0x86,0x6a,0xa2,0x61,0x6c,0xa2,0x65,0x6d,0x1c,0x61,0x6e,0x64,0x61,0x74,0x6f,
+0x72,0x79,0x62,0x72,0x65,0x61,0x6b,0x2d,4,0x32,0x5f,0x33,0x61,0x65,0x34,0x6c,
+0x6d,0x79,0x3a,0x13,0x70,0x68,0x65,0x6e,0x3b,0x19,0x62,0x72,0x65,0x77,0x6c,0x65,
+0x74,0x74,0x65,0x72,0x6d,2,0x64,0x28,0x6e,0x3c,0x73,0x41,0x3c,0x18,0x65,0x6f,
+0x67,0x72,0x61,0x70,0x68,0x69,0x63,0x3d,0x3e,1,0x66,0x3e,0x73,0x11,0x65,0x70,
+1,0x61,0x22,0x65,0x14,0x72,0x61,0x62,0x6c,0x65,0x3f,0x18,0x69,0x78,0x6e,0x75,
+0x6d,0x65,0x72,0x69,0x63,0x41,2,0x6c,0x63,0x74,0x65,0x76,0x67,1,0x66,0x43,
+0x69,0x15,0x6e,0x65,0x66,0x65,0x65,0x64,0x43,0x61,0x40,0x62,0x70,0x63,0xa2,0x55,
+0x65,0xa2,0xdb,0x67,0x10,0x6c,0x38,0x11,0x75,0x65,0x39,2,0x69,0x23,0x6c,0x34,
+0x6d,0x16,0x62,0x69,0x67,0x75,0x6f,0x75,0x73,0x23,0x24,0x17,0x70,0x68,0x61,0x62,
+0x65,0x74,0x69,0x63,0x25,4,0x32,0x27,0x61,0x29,0x62,0x2b,0x6b,0x2d,0x72,0x12,
+0x65,0x61,0x6b,2,0x61,0x36,0x62,0x3e,0x73,0x15,0x79,0x6d,0x62,0x6f,0x6c,0x73,
+0x57,0x13,0x66,0x74,0x65,0x72,0x29,1,0x65,0x2a,0x6f,0x11,0x74,0x68,0x27,0x13,
+0x66,0x6f,0x72,0x65,0x2b,7,0x6d,0x51,0x6d,0x33,0x6f,0x28,0x70,0x69,0x72,0x35,
+1,0x6d,0x76,0x6e,1,0x64,0x3c,0x74,0x1a,0x69,0x6e,0x67,0x65,0x6e,0x74,0x62,
+0x72,0x65,0x61,0x6b,0x2f,0x15,0x69,0x74,0x69,0x6f,0x6e,0x61,0x1f,0x6c,0x6a,0x61,
+0x70,0x61,0x6e,0x65,0x73,0x65,0x73,0x74,0x61,0x72,0x74,0x65,0x72,0x6b,1,0x62,
+0x3a,0x70,0x19,0x6c,0x65,0x78,0x63,0x6f,0x6e,0x74,0x65,0x78,0x74,0x51,0x18,0x69,
+0x6e,0x69,0x6e,0x67,0x6d,0x61,0x72,0x6b,0x33,0x61,0x6a,0x62,0x2f,0x6a,0x6b,0x6c,
+0x30,0x13,0x6f,0x73,0x65,0x70,1,0x61,0x38,0x75,0x18,0x6e,0x63,0x74,0x75,0x61,
+0x74,0x69,0x6f,0x6e,0x31,0x18,0x72,0x65,0x6e,0x74,0x68,0x65,0x73,0x69,0x73,0x69,
+0x1b,0x72,0x72,0x69,0x61,0x67,0x65,0x72,0x65,0x74,0x75,0x72,0x6e,0x35,2,0x62,
+0x3e,0x6d,0x46,0x78,0x36,0x18,0x63,0x6c,0x61,0x6d,0x61,0x74,0x69,0x6f,0x6e,0x37,
+0x70,0x12,0x61,0x73,0x65,0x71,0x72,0x16,0x6f,0x64,0x69,0x66,0x69,0x65,0x72,0x73,
+1,0x64,0x42,0x6e,1,0x6f,0x32,0x75,0x26,0x14,0x6d,0x65,0x72,0x69,0x63,0x27,
+0x11,0x6e,0x65,0x21,1,0x65,0x2e,0x69,0x24,0x12,0x67,0x69,0x74,0x25,0x22,0x14,
+0x63,0x69,0x6d,0x61,0x6c,0x23,0,0x18,0x6e,0xc4,0x6f,0x74,0xc1,0x91,0x77,0x96,
+0x77,0xa2,0x4c,0x78,0xa2,0x70,0x79,0xa2,0x7a,0x7a,6,0x73,0x1e,0x73,0x34,0x78,
+0x42,0x79,0x48,0x7a,0x11,0x7a,0x7a,0xa3,0x67,0x10,0x79,1,0x65,0xa3,0xae,0x6d,
+0xa3,0x81,0x11,0x78,0x78,0xa3,0x66,0x11,0x79,0x79,0x21,0x61,0x30,0x69,0x58,0x6d,
+0x11,0x74,0x68,0xa3,0x80,0x10,0x6e,1,0x61,0x26,0x62,0xa3,0xb1,0x1a,0x62,0x61,
+0x7a,0x61,0x72,0x73,0x71,0x75,0x61,0x72,0x65,0xa3,0xb1,0x11,0x6e,0x68,0x23,2,
+0x61,0x30,0x63,0x5a,0x6f,0x11,0x6c,0x65,0xa3,0x9b,1,0x6e,0x3c,0x72,0x10,0x61,
+0xa2,0x92,0x15,0x6e,0x67,0x63,0x69,0x74,0x69,0xa3,0x92,0x12,0x63,0x68,0x6f,0xa3,
+0xbc,0x11,0x68,0x6f,0xa3,0xbc,1,0x70,0x2c,0x73,0x11,0x75,0x78,0xa3,0x65,0x11,
+0x65,0x6f,0x9b,1,0x65,0x2c,0x69,0x72,0x11,0x69,0x69,0x73,0x11,0x7a,0x69,0xa2,
+0xc0,0x11,0x64,0x69,0xa3,0xc0,0x74,0x66,0x75,0xa2,0xde,0x76,1,0x61,0x48,0x69,
+1,0x73,0x38,0x74,0x10,0x68,0xa2,0xc5,0x13,0x6b,0x75,0x71,0x69,0xa3,0xc5,0x10,
+0x70,0xa3,0x64,0x10,0x69,0xa2,0x63,0x10,0x69,0xa3,0x63,7,0x68,0x3e,0x68,0x34,
+0x69,0x48,0x6e,0x86,0x6f,0x11,0x74,0x6f,0xa3,0xc4,0x10,0x61,1,0x61,0x24,0x69,
+0x6d,0x6a,0x11,0x6e,0x61,0x6b,2,0x62,0x3a,0x66,0x4a,0x72,0x10,0x68,0xa2,0x9e,
+0x12,0x75,0x74,0x61,0xa3,0x9e,1,0x65,0x24,0x74,0x6f,0x12,0x74,0x61,0x6e,0x6f,
+0x14,0x69,0x6e,0x61,0x67,0x68,0x99,0x11,0x73,0x61,0xa3,0xc3,0x61,0x36,0x65,0xa2,
+0x65,0x66,0xa2,0x71,0x67,0x11,0x6c,0x67,0x75,6,0x6c,0x28,0x6c,0x32,0x6d,0x38,
+0x6e,0x44,0x76,0x10,0x74,0xa3,0x7f,1,0x65,0x89,0x75,0x97,1,0x69,0x24,0x6c,
+0x67,0x10,0x6c,0x67,0x10,0x67,0xa2,0x9a,1,0x73,0x2a,0x75,0x10,0x74,0xa3,0x9a,
+0x10,0x61,0xa3,0xc3,0x67,0x36,0x69,0x52,0x6b,0x10,0x72,0xa2,0x99,0x10,0x69,0xa3,
+0x99,1,0x61,0x30,0x62,0x7a,0x13,0x61,0x6e,0x77,0x61,0x7b,0x12,0x6c,0x6f,0x67,
+0x75,2,0x6c,0x32,0x74,0x34,0x76,0x12,0x69,0x65,0x74,0xa3,0x7f,0x10,0x65,0x89,
+0x12,0x68,0x61,0x6d,0xa3,0x6a,1,0x6c,0x2a,0x6e,0x10,0x67,0xa3,0x62,0x10,0x75,
+0x68,0x11,0x67,0x75,0x69,0x11,0x6e,0x67,0x99,1,0x67,0x32,0x6e,0x14,0x6b,0x6e,
+0x6f,0x77,0x6e,0xa3,0x67,0x11,0x61,0x72,0x8a,0x13,0x69,0x74,0x69,0x63,0x8b,0x71,
+0xc1,0x13,0x71,0xa2,0xde,0x72,0xa2,0xe3,0x73,6,0x69,0x8a,0x69,0x72,0x6f,0xa2,
+0x4c,0x75,0xa2,0x75,0x79,1,0x6c,0x46,0x72,4,0x63,0x65,0x65,0xa3,0x5f,0x69,
+0x2c,0x6a,0xa3,0x60,0x6e,0xa3,0x61,0x11,0x61,0x63,0x65,0x10,0x6f,0x94,0x16,0x74,
+0x69,0x6e,0x61,0x67,0x72,0x69,0x95,2,0x64,0x3c,0x67,0x4c,0x6e,1,0x64,0xa3,
+0x91,0x68,0x62,0x12,0x61,0x6c,0x61,0x63,0x10,0x64,0xa2,0xa6,0x12,0x68,0x61,0x6d,
+0xa3,0xa6,0x17,0x6e,0x77,0x72,0x69,0x74,0x69,0x6e,0x67,0xa3,0x70,2,0x67,0x3a,
+0x72,0x52,0x79,0x10,0x6f,0xa2,0xb0,0x12,0x6d,0x62,0x6f,0xa3,0xb0,1,0x64,0x26,
+0x6f,0xa3,0xb8,0xa2,0xb7,0x12,0x69,0x61,0x6e,0xa3,0xb7,0x10,0x61,0xa2,0x98,0x16,
+0x73,0x6f,0x6d,0x70,0x65,0x6e,0x67,0xa3,0x98,0x11,0x6e,0x64,0xa2,0x71,0x14,0x61,
+0x6e,0x65,0x73,0x65,0xa3,0x71,0x61,0x5c,0x67,0xa2,0x43,0x68,1,0x61,0x2a,0x72,
+0x10,0x64,0xa3,0x97,2,0x72,0x28,0x76,0x30,0x77,0x87,0x12,0x61,0x64,0x61,0xa3,
+0x97,0x12,0x69,0x61,0x6e,0x87,2,0x6d,0x40,0x72,0x58,0x75,0x10,0x72,0xa2,0x6f,
+0x15,0x61,0x73,0x68,0x74,0x72,0x61,0xa3,0x6f,1,0x61,0x26,0x72,0xa3,0x7e,0x14,
+0x72,0x69,0x74,0x61,0x6e,0xa3,0x7e,1,0x61,0xa3,0x5e,0x62,0xa3,0x85,0x11,0x6e,
+0x77,0xa3,0x70,0x11,0x61,0x61,1,0x63,0x2f,0x69,0x23,3,0x65,0x3e,0x6a,0x48,
+0x6f,0x4e,0x75,0x10,0x6e,1,0x69,0x24,0x72,0x61,0x10,0x63,0x61,0x13,0x6a,0x61,
+0x6e,0x67,0xa3,0x6e,0x11,0x6e,0x67,0xa3,0x6e,1,0x68,0x2a,0x72,0x10,0x6f,0xa3,
+0x5d,0x10,0x67,0xa3,0xb6,0x6e,0xa2,0x83,0x6f,0xa4,1,0x70,5,0x6c,0x1e,0x6c,
+0x44,0x72,0x4a,0x73,0x1b,0x61,0x6c,0x74,0x65,0x72,0x70,0x61,0x68,0x6c,0x61,0x76,
+0x69,0xa3,0x7b,0x11,0x72,0x64,0xa3,0x5c,0x11,0x74,0x69,0xa3,0x7d,0x61,0x7c,0x65,
+0xa2,0x54,0x68,3,0x61,0x3e,0x6c,0x4e,0x6e,0x5e,0x6f,0x16,0x65,0x6e,0x69,0x63,
+0x69,0x61,0x6e,0xa3,0x5b,0x10,0x67,0xa2,0x5a,0x12,0x73,0x70,0x61,0xa3,0x5a,2,
+0x69,0xa3,0x7a,0x70,0xa3,0x7b,0x76,0xa3,0x7c,0x10,0x78,0xa3,0x5b,2,0x68,0x3e,
+0x6c,0x50,0x75,0x10,0x63,0xa2,0xa5,0x14,0x69,0x6e,0x68,0x61,0x75,0xa3,0xa5,0x17,
+0x61,0x77,0x68,0x68,0x6d,0x6f,0x6e,0x67,0xa3,0x4b,0x10,0x6d,0xa2,0x90,0x14,0x79,
+0x72,0x65,0x6e,0x65,0xa3,0x90,0x11,0x72,0x6d,0xa3,0x59,6,0x6b,0x36,0x6b,0x56,
+0x73,0x6e,0x75,0x74,0x79,0x11,0x69,0x61,0x1f,0x6b,0x65,0x6e,0x67,0x70,0x75,0x61,
+0x63,0x68,0x75,0x65,0x68,0x6d,0x6f,0x6e,0x67,0xa3,0xba,1,0x67,0x2e,0x6f,0xa2,
+0x57,0x10,0x6f,0xa3,0x57,0x10,0x62,0xa3,0x84,0x11,0x68,0x75,0xa3,0x96,0x12,0x73,
+0x68,0x75,0xa3,0x96,0x61,0x42,0x62,0x9e,0x65,0x10,0x77,1,0x61,0xa3,0xaa,0x74,
+0x14,0x61,0x69,0x6c,0x75,0x65,0x97,3,0x62,0x32,0x67,0x40,0x6e,0x56,0x72,0x10,
+0x62,0xa3,0x8e,0x15,0x61,0x74,0x61,0x65,0x61,0x6e,0xa3,0x8f,0x10,0x6d,0xa2,0xc7,
+0x15,0x75,0x6e,0x64,0x61,0x72,0x69,0xa3,0xc7,0x10,0x64,0xa2,0xbb,0x16,0x69,0x6e,
+0x61,0x67,0x61,0x72,0x69,0xa3,0xbb,0x11,0x61,0x74,0xa3,0x8f,4,0x67,0x3c,0x6c,
+0x4e,0x72,0xa2,0x8e,0x73,0xa2,0x9c,0x75,0x11,0x67,0x72,0xa3,0xc2,1,0x61,0x2a,
+0x68,0x11,0x61,0x6d,0x5b,0x10,0x6d,0x5b,1,0x63,0xa2,0x6a,0x64,6,0x70,0x41,
+0x70,0x3a,0x73,0x58,0x74,0x86,0x75,0x14,0x79,0x67,0x68,0x75,0x72,0xa3,0xc2,0x11,
+0x65,0x72,1,0x6d,0x2c,0x73,0x12,0x69,0x61,0x6e,0x9b,0x11,0x69,0x63,0xa3,0x59,
+0x10,0x6f,1,0x67,0x3a,0x75,0x18,0x74,0x68,0x61,0x72,0x61,0x62,0x69,0x61,0x6e,
+0xa3,0x85,0x13,0x64,0x69,0x61,0x6e,0xa3,0xb8,0x14,0x75,0x72,0x6b,0x69,0x63,0xa3,
+0x58,0x68,0x42,0x69,0x54,0x6e,0x1a,0x6f,0x72,0x74,0x68,0x61,0x72,0x61,0x62,0x69,
+0x61,0x6e,0xa3,0x8e,0x17,0x75,0x6e,0x67,0x61,0x72,0x69,0x61,0x6e,0xa3,0x4c,0x14,
+0x74,0x61,0x6c,0x69,0x63,0x5d,1,0x68,0x26,0x6b,0xa3,0x6d,0x12,0x69,0x6b,0x69,
+0xa3,0x6d,2,0x69,0x2c,0x6b,0x30,0x79,0x10,0x61,0x5f,0x11,0x79,0x61,0x5f,0x10,
+0x68,0xa3,0x58,2,0x61,0x36,0x67,0x3c,0x6d,0x10,0x61,0x84,0x12,0x6e,0x79,0x61,
+0x85,0x11,0x67,0x65,0xa3,0xab,0x10,0x65,0xa3,0xab,0x68,0xc3,0x15,0x6b,0xc2,0x2c,
+0x6b,0xa4,0x17,0x6c,0xa4,0xba,0x6d,8,0x6f,0x46,0x6f,0x48,0x72,0x74,0x74,0x80,
+0x75,0x86,0x79,1,0x61,0x28,0x6d,0x10,0x72,0x59,0x13,0x6e,0x6d,0x61,0x72,0x59,
+2,0x64,0x2e,0x6e,0x32,0x6f,0x10,0x6e,0xa3,0x72,0x10,0x69,0xa3,0xa3,0x10,0x67,
+0x56,0x14,0x6f,0x6c,0x69,0x61,0x6e,0x57,0x10,0x6f,0xa2,0x95,0x10,0x6f,0xa3,0x95,
+0x11,0x65,0x69,0xa3,0x73,0x11,0x6c,0x74,0xa2,0xa4,0x12,0x61,0x6e,0x69,0xa3,0xa4,
+0x61,0x36,0x65,0xa2,0x67,0x69,0xa2,0xbd,0x6c,0x11,0x79,0x6d,0x55,6,0x6e,0x38,
+0x6e,0x32,0x72,0x5c,0x73,0x6c,0x79,0x10,0x61,0xa3,0x55,1,0x64,0x38,0x69,0xa2,
+0x79,0x15,0x63,0x68,0x61,0x65,0x61,0x6e,0xa3,0x79,0xa2,0x54,0x12,0x61,0x69,0x63,
+0xa3,0x54,0x10,0x63,0xa2,0xa9,0x12,0x68,0x65,0x6e,0xa3,0xa9,0x18,0x61,0x72,0x61,
+0x6d,0x67,0x6f,0x6e,0x64,0x69,0xa3,0xaf,0x68,0x36,0x6b,0x4c,0x6c,0x15,0x61,0x79,
+0x61,0x6c,0x61,0x6d,0x55,1,0x61,0x26,0x6a,0xa3,0xa0,0x13,0x6a,0x61,0x6e,0x69,
+0xa3,0xa0,0x10,0x61,0xa2,0xb4,0x12,0x73,0x61,0x72,0xa3,0xb4,3,0x64,0x78,0x65,
+0x94,0x6e,0xa2,0x42,0x72,1,0x63,0xa3,0x8d,0x6f,0xa2,0x56,0x13,0x69,0x74,0x69,
+0x63,1,0x63,0x3c,0x68,0x19,0x69,0x65,0x72,0x6f,0x67,0x6c,0x79,0x70,0x68,0x73,
+0xa3,0x56,0x15,0x75,0x72,0x73,0x69,0x76,0x65,0xa3,0x8d,1,0x65,0x26,0x66,0xa3,
+0xb5,0x16,0x66,0x61,0x69,0x64,0x72,0x69,0x6e,0xa3,0xb5,0x17,0x74,0x65,0x69,0x6d,
+0x61,0x79,0x65,0x6b,0xa3,0x73,0x10,0x64,0xa2,0x8c,0x17,0x65,0x6b,0x69,0x6b,0x61,
+0x6b,0x75,0x69,0xa3,0x8c,0x11,0x61,0x6f,0xa3,0x5c,6,0x6e,0x1a,0x6e,0x34,0x6f,
+0x38,0x70,0x3e,0x74,0x11,0x68,0x69,0xa3,0x78,0x11,0x64,0x61,0x4b,0x11,0x72,0x65,
+0xa3,0x77,0x11,0x65,0x6c,0xa3,0x8a,0x61,0x32,0x68,0xa2,0x44,0x69,0x11,0x74,0x73,
+0xa3,0xbf,5,0x74,0x23,0x74,0x34,0x77,0x56,0x79,0x13,0x61,0x68,0x6c,0x69,0xa3,
+0x4f,0x14,0x61,0x6b,0x61,0x6e,0x61,0x4c,0x19,0x6f,0x72,0x68,0x69,0x72,0x61,0x67,
+0x61,0x6e,0x61,0x8d,0x10,0x69,0xa3,0xc6,0x69,0x38,0x6c,0x40,0x6e,1,0x61,0x4d,
+0x6e,0x12,0x61,0x64,0x61,0x4b,0x12,0x74,0x68,0x69,0xa3,0x78,0x10,0x69,0xa3,0x4f,
+4,0x61,0x40,0x69,0x52,0x6d,0x70,0x6f,0x7c,0x75,0x15,0x64,0x61,0x77,0x61,0x64,
+0x69,0xa3,0x91,0x10,0x72,0x92,0x15,0x6f,0x73,0x68,0x74,0x68,0x69,0x93,0x1d,0x74,
+0x61,0x6e,0x73,0x6d,0x61,0x6c,0x6c,0x73,0x63,0x72,0x69,0x70,0x74,0xa3,0xbf,1,
+0x65,0x24,0x72,0x4f,0x10,0x72,0x4f,0x10,0x6a,0xa2,0x9d,0x11,0x6b,0x69,0xa3,0x9d,
+4,0x61,0x5c,0x65,0x90,0x69,0xa0,0x6f,0xa2,0x5d,0x79,1,0x63,0x34,0x64,0x10,
+0x69,0xa2,0x6c,0x11,0x61,0x6e,0xa3,0x6c,0x10,0x69,0xa2,0x6b,0x11,0x61,0x6e,0xa3,
+0x6b,2,0x6e,0x42,0x6f,0x46,0x74,3,0x66,0xa3,0x50,0x67,0xa3,0x51,0x69,0x24,
+0x6e,0x53,0x10,0x6e,0x53,0x10,0x61,0xa3,0x6a,0x50,0x10,0x6f,0x51,0x11,0x70,0x63,
+0xa2,0x52,0x11,0x68,0x61,0xa3,0x52,2,0x6d,0x2e,0x6e,0x36,0x73,0x10,0x75,0xa3,
+0x83,0x10,0x62,0x80,0x10,0x75,0x81,2,0x61,0xa3,0x53,0x62,0x83,0x65,0x11,0x61,
+0x72,1,0x61,0xa3,0x53,0x62,0x83,0x11,0x6d,0x61,0xa3,0x8b,0x68,0x6e,0x69,0xa2,
+0x95,0x6a,2,0x61,0x30,0x70,0x52,0x75,0x11,0x72,0x63,0xa3,0x94,1,0x6d,0x38,
+0x76,0x10,0x61,0xa2,0x4e,0x13,0x6e,0x65,0x73,0x65,0xa3,0x4e,0x10,0x6f,0xa3,0xad,
+0x11,0x61,0x6e,0xa3,0x69,6,0x6c,0x1e,0x6c,0x34,0x6d,0x3a,0x72,0x48,0x75,0x11,
+0x6e,0x67,0xa3,0x4c,0x11,0x75,0x77,0xa3,0x9c,0x10,0x6e,1,0x67,0xa3,0x4b,0x70,
+0xa3,0xba,0x11,0x6b,0x74,0x8d,0x61,0x3c,0x65,0xa2,0x43,0x69,0x11,0x72,0x61,0x48,
+0x13,0x67,0x61,0x6e,0x61,0x49,1,0x6e,0x34,0x74,0x10,0x72,0xa2,0xa2,0x11,0x61,
+0x6e,0xa3,0xa2,0x42,6,0x6f,0xe,0x6f,0x77,0x73,0xa3,0x49,0x74,0xa3,0x4a,0x75,
+0x12,0x6e,0x6f,0x6f,0x77,0x62,0xa3,0xac,0x67,0x3e,0x69,0x42,0x19,0x66,0x69,0x72,
+0x6f,0x68,0x69,0x6e,0x67,0x79,0x61,0xa3,0xb6,0x44,0x11,0x75,0x6c,0x45,0x11,0x62,
+0x72,0x46,0x11,0x65,0x77,0x47,2,0x6d,0x2e,0x6e,0x4a,0x74,0x11,0x61,0x6c,0x5d,
+0x1c,0x70,0x65,0x72,0x69,0x61,0x6c,0x61,0x72,0x61,0x6d,0x61,0x69,0x63,0xa3,0x74,
+2,0x64,0x66,0x68,0x6a,0x73,0x1b,0x63,0x72,0x69,0x70,0x74,0x69,0x6f,0x6e,0x61,
+0x6c,0x70,0x61,1,0x68,0x32,0x72,0x14,0x74,0x68,0x69,0x61,0x6e,0xa3,0x7d,0x13,
+0x6c,0x61,0x76,0x69,0xa3,0x7a,0x10,0x73,0xa3,0x4d,0x15,0x65,0x72,0x69,0x74,0x65,
+0x64,0x23,0x64,0xc1,0xd,0x64,0xa2,0x7a,0x65,0xa2,0xc1,0x67,4,0x65,0x82,0x6c,
+0x9a,0x6f,0xa2,0x46,0x72,0xa2,0x55,0x75,2,0x6a,0x3c,0x6e,0x4e,0x72,1,0x6d,
+0x24,0x75,0x41,0x13,0x75,0x6b,0x68,0x69,0x41,1,0x61,0x24,0x72,0x3f,0x13,0x72,
+0x61,0x74,0x69,0x3f,0x18,0x6a,0x61,0x6c,0x61,0x67,0x6f,0x6e,0x64,0x69,0xa3,0xb3,
+0x10,0x6f,1,0x6b,0xa3,0x48,0x72,0x38,0x13,0x67,0x69,0x61,0x6e,0x39,0x11,0x61,
+0x67,0x90,0x15,0x6f,0x6c,0x69,0x74,0x69,0x63,0x91,1,0x6e,0x30,0x74,0x10,0x68,
+0x3a,0x11,0x69,0x63,0x3b,1,0x67,0xa3,0xb3,0x6d,0xa3,0xaf,1,0x61,0x32,0x65,
+1,0x65,0x24,0x6b,0x3d,0x10,0x6b,0x3d,0x10,0x6e,0xa2,0x89,0x12,0x74,0x68,0x61,
+0xa3,0x89,4,0x65,0x46,0x69,0x6c,0x6f,0x8c,0x73,0x9a,0x75,0x11,0x70,0x6c,0xa2,
+0x87,0x13,0x6f,0x79,0x61,0x6e,0xa3,0x87,1,0x73,0x38,0x76,0x10,0x61,0x34,0x15,
+0x6e,0x61,0x67,0x61,0x72,0x69,0x35,0x13,0x65,0x72,0x65,0x74,0x33,1,0x61,0x36,
+0x76,0x16,0x65,0x73,0x61,0x6b,0x75,0x72,0x75,0xa3,0xbe,0x10,0x6b,0xa3,0xbe,0x11,
+0x67,0x72,0xa2,0xb2,0x10,0x61,0xa3,0xb2,0x11,0x72,0x74,0x33,2,0x67,0x3a,0x6c,
+0x72,0x74,0x11,0x68,0x69,0x36,0x13,0x6f,0x70,0x69,0x63,0x37,0x10,0x79,2,0x64,
+0xa3,0x45,0x68,0xa3,0x46,0x70,0xa2,0x47,0x1e,0x74,0x69,0x61,0x6e,0x68,0x69,0x65,
+0x72,0x6f,0x67,0x6c,0x79,0x70,0x68,0x73,0xa3,0x47,1,0x62,0x36,0x79,0x10,0x6d,
+0xa2,0xb9,0x12,0x61,0x69,0x63,0xa3,0xb9,0x10,0x61,0xa2,0x88,0x12,0x73,0x61,0x6e,
+0xa3,0x88,0x61,0xa2,0xc9,0x62,0xa4,0x2e,0x63,6,0x6f,0x52,0x6f,0x76,0x70,0x92,
+0x75,0xa2,0x41,0x79,1,0x70,0x3e,0x72,2,0x69,0x2a,0x6c,0x31,0x73,0xa3,0x44,
+0x13,0x6c,0x6c,0x69,0x63,0x31,0x10,0x72,1,0x69,0x34,0x6f,0x15,0x6d,0x69,0x6e,
+0x6f,0x61,0x6e,0xa3,0xc1,0x11,0x6f,0x74,0x7f,1,0x6d,0x30,0x70,0x10,0x74,0x2e,
+0x11,0x69,0x63,0x2f,0x12,0x6d,0x6f,0x6e,0x21,1,0x6d,0x28,0x72,0x10,0x74,0x7f,
+0x10,0x6e,0xa3,0xc1,0x16,0x6e,0x65,0x69,0x66,0x6f,0x72,0x6d,0xa3,0x65,0x61,0x32,
+0x68,0xa2,0x41,0x69,0x11,0x72,0x74,0xa3,0x43,3,0x6b,0x4c,0x6e,0x50,0x72,0x76,
+0x75,0x1d,0x63,0x61,0x73,0x69,0x61,0x6e,0x61,0x6c,0x62,0x61,0x6e,0x69,0x61,0x6e,
+0xa3,0x9f,0x10,0x6d,0xa3,0x76,1,0x61,0x24,0x73,0x71,0x1d,0x64,0x69,0x61,0x6e,
+0x61,0x62,0x6f,0x72,0x69,0x67,0x69,0x6e,0x61,0x6c,0x71,0x10,0x69,0xa2,0x68,0x11,
+0x61,0x6e,0xa3,0x68,3,0x61,0x32,0x65,0x44,0x6f,0x52,0x72,0x10,0x73,0xa3,0xbd,
+1,0x6b,0x26,0x6d,0xa3,0x42,0x11,0x6d,0x61,0xa3,0x76,0x10,0x72,0x2c,0x13,0x6f,
+0x6b,0x65,0x65,0x2d,0x16,0x72,0x61,0x73,0x6d,0x69,0x61,0x6e,0xa3,0xbd,6,0x68,
+0x4a,0x68,0x48,0x6e,0x4e,0x72,0x76,0x76,1,0x65,0x2a,0x73,0x10,0x74,0xa3,0x75,
+0x13,0x73,0x74,0x61,0x6e,0xa3,0x75,0x11,0x6f,0x6d,0xa3,0xa1,0x11,0x61,0x74,0x1f,
+0x6f,0x6c,0x69,0x61,0x6e,0x68,0x69,0x65,0x72,0x6f,0x67,0x6c,0x79,0x70,0x68,0x73,
+0xa3,0x9c,1,0x61,0x3e,0x6d,2,0x65,0x2a,0x69,0xa3,0x74,0x6e,0x27,0x13,0x6e,
+0x69,0x61,0x6e,0x27,0x10,0x62,0x24,0x11,0x69,0x63,0x25,0x64,0x30,0x66,0x44,0x67,
+0x11,0x68,0x62,0xa3,0x9f,0x10,0x6c,1,0x61,0x26,0x6d,0xa3,0xa7,0x10,0x6d,0xa3,
+0xa7,0x11,0x61,0x6b,0xa3,0x93,6,0x6c,0x3c,0x6c,0x52,0x6f,0x56,0x72,0x66,0x75,
+1,0x67,0x30,0x68,1,0x64,0x79,0x69,0x10,0x64,0x79,0x10,0x69,0x8e,0x13,0x6e,
+0x65,0x73,0x65,0x8f,0x11,0x69,0x73,0xa1,0x11,0x70,0x6f,0x2a,0x13,0x6d,0x6f,0x66,
+0x6f,0x2b,0x10,0x61,1,0x68,0x2e,0x69,0x7c,0x12,0x6c,0x6c,0x65,0x7d,0xa2,0x41,
+0x11,0x6d,0x69,0xa3,0x41,0x61,0x48,0x65,0x9c,0x68,1,0x61,0x2a,0x6b,0x10,0x73,
+0xa3,0xa8,0x15,0x69,0x6b,0x73,0x75,0x6b,0x69,0xa3,0xa8,3,0x6c,0x3a,0x6d,0x48,
+0x73,0x54,0x74,1,0x61,0x24,0x6b,0x9f,0x10,0x6b,0x9f,0x10,0x69,0x9c,0x13,0x6e,
+0x65,0x73,0x65,0x9d,0x10,0x75,0xa2,0x82,0x10,0x6d,0xa3,0x82,0x10,0x73,0xa2,0x86,
+0x13,0x61,0x76,0x61,0x68,0xa3,0x86,0x11,0x6e,0x67,0x28,0x12,0x61,0x6c,0x69,0x29,
+3,0x6c,0x42,0x6e,0x90,0x74,0xa2,0x46,0x76,0x24,0x17,0x6f,0x77,0x65,0x6c,0x6a,
+0x61,0x6d,0x6f,0x25,0x22,1,0x65,0x54,0x76,0x28,1,0x73,0x38,0x74,0x2a,0x17,
+0x73,0x79,0x6c,0x6c,0x61,0x62,0x6c,0x65,0x2b,0x16,0x79,0x6c,0x6c,0x61,0x62,0x6c,
+0x65,0x29,0x18,0x61,0x64,0x69,0x6e,0x67,0x6a,0x61,0x6d,0x6f,0x23,1,0x61,0x21,
+0x6f,0x1a,0x74,0x61,0x70,0x70,0x6c,0x69,0x63,0x61,0x62,0x6c,0x65,0x21,0x26,0x1a,
+0x72,0x61,0x69,0x6c,0x69,0x6e,0x67,0x6a,0x61,0x6d,0x6f,0x27,1,0x6e,0x2c,0x79,
+0x22,0x11,0x65,0x73,0x23,0x20,0x10,0x6f,0x21,1,0x6e,0x2c,0x79,0x22,0x11,0x65,
+0x73,0x23,0x20,0x10,0x6f,0x21,2,0x6d,0x30,0x6e,0x3a,0x79,0x22,0x11,0x65,0x73,
+0x23,0x24,0x13,0x61,0x79,0x62,0x65,0x25,0x20,0x10,0x6f,0x21,2,0x6d,0x30,0x6e,
+0x3a,0x79,0x22,0x11,0x65,0x73,0x23,0x24,0x13,0x61,0x79,0x62,0x65,0x25,0x20,0x10,
+0x6f,0x21,0xb,0x72,0x39,0x76,0xc,0x76,0x33,0x78,0x2a,0x7a,0x11,0x77,0x6a,0x43,
+0x10,0x78,0x21,0x72,0x28,0x73,0x50,0x74,0x31,1,0x65,0x24,0x69,0x39,0x1e,0x67,
+0x69,0x6f,0x6e,0x61,0x6c,0x69,0x6e,0x64,0x69,0x63,0x61,0x74,0x6f,0x72,0x39,1,
+0x6d,0x35,0x70,0x18,0x61,0x63,0x69,0x6e,0x67,0x6d,0x61,0x72,0x6b,0x35,0x6c,0x1f,
+0x6c,0x3c,0x6f,0x4a,0x70,1,0x70,0x37,0x72,0x14,0x65,0x70,0x65,0x6e,0x64,0x37,
+0x28,1,0x66,0x2b,0x76,0x2c,0x10,0x74,0x2f,0x13,0x74,0x68,0x65,0x72,0x21,0x63,
+0x4c,0x65,0x64,0x67,1,0x61,0x3a,0x6c,0x19,0x75,0x65,0x61,0x66,0x74,0x65,0x72,
+0x7a,0x77,0x6a,0x41,0x10,0x7a,0x41,2,0x6e,0x23,0x6f,0x24,0x72,0x25,0x14,0x6e,
+0x74,0x72,0x6f,0x6c,0x23,2,0x62,0x34,0x6d,0x4e,0x78,0x26,0x13,0x74,0x65,0x6e,
+0x64,0x27,0x3a,1,0x61,0x24,0x67,0x3d,0x11,0x73,0x65,0x3a,0x12,0x67,0x61,0x7a,
+0x3d,0x3e,0x16,0x6f,0x64,0x69,0x66,0x69,0x65,0x72,0x3f,9,0x6e,0x4a,0x6e,0x34,
+0x6f,0x44,0x73,0x60,0x75,0x94,0x78,0x10,0x78,0x21,0x10,0x75,0x2a,0x14,0x6d,0x65,
+0x72,0x69,0x63,0x2b,1,0x6c,0x2c,0x74,0x12,0x68,0x65,0x72,0x21,0x14,0x65,0x74,
+0x74,0x65,0x72,0x2d,3,0x63,0x36,0x65,0x46,0x70,0x31,0x74,0x32,0x12,0x65,0x72,
+0x6d,0x33,0x3c,0x16,0x6f,0x6e,0x74,0x69,0x6e,0x75,0x65,0x3d,0x2e,0x10,0x70,0x2f,
+0x10,0x70,0x34,0x12,0x70,0x65,0x72,0x35,0x61,0x46,0x63,0x52,0x65,0x64,0x66,0x72,
+0x6c,2,0x65,0x2d,0x66,0x3b,0x6f,0x28,0x12,0x77,0x65,0x72,0x29,0x10,0x74,0x22,
+0x12,0x65,0x72,0x6d,0x23,1,0x6c,0x24,0x72,0x37,0x24,0x12,0x6f,0x73,0x65,0x25,
+0x10,0x78,0x38,0x13,0x74,0x65,0x6e,0x64,0x39,0x10,0x6f,0x26,0x13,0x72,0x6d,0x61,
+0x74,0x27,0,0x10,0x6c,0x88,0x72,0x40,0x72,0x36,0x73,0x5e,0x77,0x7a,0x78,0x8a,
+0x7a,0x11,0x77,0x6a,0x4b,1,0x65,0x24,0x69,0x3b,0x1e,0x67,0x69,0x6f,0x6e,0x61,
+0x6c,0x69,0x6e,0x64,0x69,0x63,0x61,0x74,0x6f,0x72,0x3b,1,0x69,0x24,0x71,0x3f,
+0x18,0x6e,0x67,0x6c,0x65,0x71,0x75,0x6f,0x74,0x65,0x3f,0x17,0x73,0x65,0x67,0x73,
+0x70,0x61,0x63,0x65,0x4d,0x10,0x78,0x21,0x6c,0x36,0x6d,0x3c,0x6e,0x76,0x6f,0x13,
+0x74,0x68,0x65,0x72,0x21,1,0x65,0x23,0x66,0x35,3,0x62,0x37,0x69,0x28,0x6c,
+0x29,0x6e,0x2b,0x10,0x64,1,0x6c,0x34,0x6e,0x11,0x75,0x6d,0x2a,0x12,0x6c,0x65,
+0x74,0x37,0x14,0x65,0x74,0x74,0x65,0x72,0x29,2,0x65,0x36,0x6c,0x39,0x75,0x2c,
+0x14,0x6d,0x65,0x72,0x69,0x63,0x2d,0x14,0x77,0x6c,0x69,0x6e,0x65,0x39,0x66,0x3f,
+0x66,0x40,0x67,0x4e,0x68,0x70,0x6b,0x10,0x61,0x26,0x15,0x74,0x61,0x6b,0x61,0x6e,
+0x61,0x27,0x10,0x6f,0x24,0x13,0x72,0x6d,0x61,0x74,0x25,1,0x61,0x3a,0x6c,0x19,
+0x75,0x65,0x61,0x66,0x74,0x65,0x72,0x7a,0x77,0x6a,0x49,0x10,0x7a,0x49,1,0x65,
+0x24,0x6c,0x3d,0x19,0x62,0x72,0x65,0x77,0x6c,0x65,0x74,0x74,0x65,0x72,0x3d,0x61,
+0x86,0x63,0x92,0x64,0x94,0x65,2,0x62,0x44,0x6d,0x5e,0x78,0x2e,0x13,0x74,0x65,
+0x6e,0x64,0x32,0x15,0x6e,0x75,0x6d,0x6c,0x65,0x74,0x2f,0x42,1,0x61,0x24,0x67,
+0x45,0x11,0x73,0x65,0x42,0x12,0x67,0x61,0x7a,0x45,0x46,0x16,0x6f,0x64,0x69,0x66,
+0x69,0x65,0x72,0x47,0x15,0x6c,0x65,0x74,0x74,0x65,0x72,0x23,0x10,0x72,0x31,1,
+0x6f,0x24,0x71,0x41,0x18,0x75,0x62,0x6c,0x65,0x71,0x75,0x6f,0x74,0x65,0x41,2,
+0x63,0x32,0x6e,0x3c,0x6f,0x22,0x12,0x70,0x65,0x6e,0x23,0x24,0x13,0x6c,0x6f,0x73,
+0x65,0x25,0x20,0x12,0x6f,0x6e,0x65,0x21,6,0x6f,0x65,0x6f,0x4a,0x72,0x5c,0x74,
+0x64,0x76,0x1d,0x69,0x73,0x75,0x61,0x6c,0x6f,0x72,0x64,0x65,0x72,0x6c,0x65,0x66,
+0x74,0x3d,0x18,0x76,0x65,0x72,0x73,0x74,0x72,0x75,0x63,0x6b,0x2d,0x13,0x69,0x67,
+0x68,0x74,0x2f,0x11,0x6f,0x70,0x30,0x12,0x61,0x6e,0x64,2,0x62,0x32,0x6c,0x62,
+0x72,0x13,0x69,0x67,0x68,0x74,0x3b,0x14,0x6f,0x74,0x74,0x6f,0x6d,0x32,0x12,0x61,
+0x6e,0x64,1,0x6c,0x2e,0x72,0x13,0x69,0x67,0x68,0x74,0x35,0x12,0x65,0x66,0x74,
+0x3f,0x12,0x65,0x66,0x74,0x36,0x17,0x61,0x6e,0x64,0x72,0x69,0x67,0x68,0x74,0x39,
+0x62,0x2c,0x6c,0x5c,0x6e,0x10,0x61,0x21,0x14,0x6f,0x74,0x74,0x6f,0x6d,0x22,0x12,
+0x61,0x6e,0x64,1,0x6c,0x2e,0x72,0x13,0x69,0x67,0x68,0x74,0x27,0x12,0x65,0x66,
+0x74,0x25,0x12,0x65,0x66,0x74,0x28,0x17,0x61,0x6e,0x64,0x72,0x69,0x67,0x68,0x74,
+0x2b,0xd,0x6e,0xaa,0x72,0x70,0x72,0x92,0x73,0xa2,0x46,0x74,0xa2,0x54,0x76,1,
+0x69,0x60,0x6f,0x12,0x77,0x65,0x6c,0x62,1,0x64,0x3a,0x69,0x19,0x6e,0x64,0x65,
+0x70,0x65,0x6e,0x64,0x65,0x6e,0x74,0x67,0x17,0x65,0x70,0x65,0x6e,0x64,0x65,0x6e,
+0x74,0x65,1,0x72,0x2e,0x73,0x13,0x61,0x72,0x67,0x61,0x61,0x12,0x61,0x6d,0x61,
+0x5f,0x1d,0x65,0x67,0x69,0x73,0x74,0x65,0x72,0x73,0x68,0x69,0x66,0x74,0x65,0x72,
+0x57,0x1e,0x79,0x6c,0x6c,0x61,0x62,0x6c,0x65,0x6d,0x6f,0x64,0x69,0x66,0x69,0x65,
+0x72,0x59,0x12,0x6f,0x6e,0x65,1,0x6c,0x2c,0x6d,0x12,0x61,0x72,0x6b,0x5d,0x14,
+0x65,0x74,0x74,0x65,0x72,0x5b,0x6e,0x3c,0x6f,0x7c,0x70,0x18,0x75,0x72,0x65,0x6b,
+0x69,0x6c,0x6c,0x65,0x72,0x55,1,0x6f,0x4c,0x75,1,0x6b,0x3c,0x6d,0x12,0x62,
+0x65,0x72,0x50,0x15,0x6a,0x6f,0x69,0x6e,0x65,0x72,0x53,0x11,0x74,0x61,0x4f,0x16,
+0x6e,0x6a,0x6f,0x69,0x6e,0x65,0x72,0x4d,0x13,0x74,0x68,0x65,0x72,0x21,0x67,0x3e,
+0x67,0x4a,0x69,0x64,0x6a,0x82,0x6d,0x1d,0x6f,0x64,0x69,0x66,0x79,0x69,0x6e,0x67,
+0x6c,0x65,0x74,0x74,0x65,0x72,0x4b,0x1c,0x65,0x6d,0x69,0x6e,0x61,0x74,0x69,0x6f,
+0x6e,0x6d,0x61,0x72,0x6b,0x45,0x1e,0x6e,0x76,0x69,0x73,0x69,0x62,0x6c,0x65,0x73,
+0x74,0x61,0x63,0x6b,0x65,0x72,0x47,0x14,0x6f,0x69,0x6e,0x65,0x72,0x49,0x61,0xa2,
+0xba,0x62,0xa2,0xc0,0x63,1,0x61,0xa2,0xa2,0x6f,0x16,0x6e,0x73,0x6f,0x6e,0x61,
+0x6e,0x74,0x2a,8,0x6b,0x67,0x6b,0x48,0x6d,0x52,0x70,0x5c,0x73,0xa2,0x42,0x77,
+0x19,0x69,0x74,0x68,0x73,0x74,0x61,0x63,0x6b,0x65,0x72,0x43,0x14,0x69,0x6c,0x6c,
+0x65,0x72,0x35,0x14,0x65,0x64,0x69,0x61,0x6c,0x37,1,0x6c,0x52,0x72,0x10,0x65,
+1,0x63,0x2e,0x66,0x13,0x69,0x78,0x65,0x64,0x3d,0x19,0x65,0x64,0x69,0x6e,0x67,
+0x72,0x65,0x70,0x68,0x61,0x3b,0x18,0x61,0x63,0x65,0x68,0x6f,0x6c,0x64,0x65,0x72,
+0x39,0x10,0x75,1,0x62,0x3e,0x63,0x1b,0x63,0x65,0x65,0x64,0x69,0x6e,0x67,0x72,
+0x65,0x70,0x68,0x61,0x41,0x15,0x6a,0x6f,0x69,0x6e,0x65,0x64,0x3f,0x64,0x4c,0x66,
+0x52,0x68,0x5a,0x69,0x1e,0x6e,0x69,0x74,0x69,0x61,0x6c,0x70,0x6f,0x73,0x74,0x66,
+0x69,0x78,0x65,0x64,0x33,0x12,0x65,0x61,0x64,0x2d,0x13,0x69,0x6e,0x61,0x6c,0x2f,
+0x18,0x65,0x61,0x64,0x6c,0x65,0x74,0x74,0x65,0x72,0x31,0x1d,0x6e,0x74,0x69,0x6c,
+0x6c,0x61,0x74,0x69,0x6f,0x6e,0x6d,0x61,0x72,0x6b,0x29,0x16,0x76,0x61,0x67,0x72,
+0x61,0x68,0x61,0x23,1,0x69,0x4a,0x72,0x10,0x61,0x1f,0x68,0x6d,0x69,0x6a,0x6f,
+0x69,0x6e,0x69,0x6e,0x67,0x6e,0x75,0x6d,0x62,0x65,0x72,0x27,0x12,0x6e,0x64,0x75,
+0x25,2,0x72,0x38,0x74,0x46,0x75,0x26,0x15,0x70,0x72,0x69,0x67,0x68,0x74,0x27,
+0x20,0x15,0x6f,0x74,0x61,0x74,0x65,0x64,0x21,1,0x72,0x24,0x75,0x25,0x22,0x18,
+0x61,0x6e,0x73,0x66,0x6f,0x72,0x6d,0x65,0x64,1,0x72,0x32,0x75,0x15,0x70,0x72,
+0x69,0x67,0x68,0x74,0x25,0x15,0x6f,0x74,0x61,0x74,0x65,0x64,0x23,0xd,0x6e,0xc1,
+0x86,0x73,0xa8,0x73,0x4c,0x74,0xa2,0x76,0x75,0xa2,0x83,0x7a,0xd8,0x70,0,2,
+0x6c,0xd9,0x20,0,0x70,0xd9,0x40,0,0x73,0xc3,0,0xfe,0xf,0,0,0,
+7,0x6f,0x3c,0x6f,0xff,8,0,0,0,0x70,0x3a,0x75,0x6e,0x79,0x13,0x6d,
+0x62,0x6f,0x6c,0xff,0xf,0,0,0,0x11,0x61,0x63,1,0x65,0x34,0x69,0x15,
+0x6e,0x67,0x6d,0x61,0x72,0x6b,0xa5,0,0x18,0x73,0x65,0x70,0x61,0x72,0x61,0x74,
+0x6f,0x72,0xc3,0,0x16,0x72,0x72,0x6f,0x67,0x61,0x74,0x65,0xe1,0,0,0x63,
+0xff,2,0,0,0,0x65,0x38,0x6b,0xff,4,0,0,0,0x6d,0xff,1,
+0,0,0,0x16,0x70,0x61,0x72,0x61,0x74,0x6f,0x72,0xd9,0x70,0,0x1d,0x69,
+0x74,0x6c,0x65,0x63,0x61,0x73,0x65,0x6c,0x65,0x74,0x74,0x65,0x72,0x31,1,0x6e,
+0x40,0x70,0x1c,0x70,0x65,0x72,0x63,0x61,0x73,0x65,0x6c,0x65,0x74,0x74,0x65,0x72,
+0x25,0x17,0x61,0x73,0x73,0x69,0x67,0x6e,0x65,0x64,0x23,0x6e,0xa2,0x69,0x6f,0xa2,
+0x89,0x70,0xfe,0x30,0xf8,0,0,9,0x69,0x33,0x69,0xff,0x10,0,0,0,
+0x6f,0xfd,0x80,0,0,0x72,0x54,0x73,0xf9,0,0,0x75,0x12,0x6e,0x63,0x74,
+0xfe,0x30,0xf8,0,0,0x15,0x75,0x61,0x74,0x69,0x6f,0x6e,0xff,0x30,0xf8,0,
+0,0x17,0x69,0x76,0x61,0x74,0x65,0x75,0x73,0x65,0xdd,0,0,0x61,0x48,0x63,
+0xfd,0x40,0,0,0x64,0xe9,0,0,0x65,0xfd,0x20,0,0,0x66,0xff,0x20,
+0,0,0,0x1f,0x72,0x61,0x67,0x72,0x61,0x70,0x68,0x73,0x65,0x70,0x61,0x72,
+0x61,0x74,0x6f,0x72,0xd9,0x40,0,0xbe,0,3,0x64,0xa7,0,0x6c,0xab,0,
+0x6f,0x30,0x75,0x13,0x6d,0x62,0x65,0x72,0xbf,0,0xb2,0,0x1b,0x6e,0x73,0x70,
+0x61,0x63,0x69,0x6e,0x67,0x6d,0x61,0x72,0x6b,0xa1,1,0x70,0x92,0x74,0x12,0x68,
+0x65,0x72,0xe6,0x80,1,3,0x6c,0x40,0x6e,0x4a,0x70,0x56,0x73,0x14,0x79,0x6d,
+0x62,0x6f,0x6c,0xff,8,0,0,0,0x14,0x65,0x74,0x74,0x65,0x72,0x61,0x14,
+0x75,0x6d,0x62,0x65,0x72,0xb3,0,0x19,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,
+0x6f,0x6e,0xfd,0x80,0,0,0x1c,0x65,0x6e,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,
+0x74,0x69,0x6f,0x6e,0xf9,0,0,0x66,0xc0,0xc4,0x66,0xa2,0x47,0x69,0xa2,0x64,
+0x6c,0xa2,0x79,0x6d,0xa4,0xc0,4,0x61,0x6c,0x63,0xa5,0,0x65,0xa3,0x80,0x6e,
+0xa1,0x6f,0x15,0x64,0x69,0x66,0x69,0x65,0x72,1,0x6c,0x38,0x73,0x14,0x79,0x6d,
+0x62,0x6f,0x6c,0xff,4,0,0,0,0x14,0x65,0x74,0x74,0x65,0x72,0x41,1,
+0x72,0x3c,0x74,0x16,0x68,0x73,0x79,0x6d,0x62,0x6f,0x6c,0xff,1,0,0,0,
+0x10,0x6b,0xa5,0xc0,1,0x69,0x32,0x6f,0x13,0x72,0x6d,0x61,0x74,0xdb,0,0,
+0x1d,0x6e,0x61,0x6c,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0xff,
+0x20,0,0,0,0x10,0x6e,0x1f,0x69,0x74,0x69,0x61,0x6c,0x70,0x75,0x6e,0x63,
+0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0xff,0x10,0,0,0,0x9c,7,0x6d,0x18,
+0x6d,0x41,0x6f,0x28,0x74,0x31,0x75,0x25,0x60,0x1c,0x77,0x65,0x72,0x63,0x61,0x73,
+0x65,0x6c,0x65,0x74,0x74,0x65,0x72,0x29,0x63,0x3d,0x65,0x28,0x69,0x42,0x6c,0x29,
+0x13,0x74,0x74,0x65,0x72,0x9c,0x15,0x6e,0x75,0x6d,0x62,0x65,0x72,0xab,0,0x1a,
+0x6e,0x65,0x73,0x65,0x70,0x61,0x72,0x61,0x74,0x6f,0x72,0xd9,0x20,0,0x63,0x46,
+0x64,0xa2,0x96,0x65,0x1b,0x6e,0x63,0x6c,0x6f,0x73,0x69,0x6e,0x67,0x6d,0x61,0x72,
+0x6b,0xa3,0x80,0xe6,0x80,1,7,0x6e,0x57,0x6e,0x52,0x6f,0x5e,0x73,0xe1,0,
+0,0x75,0x1b,0x72,0x72,0x65,0x6e,0x63,0x79,0x73,0x79,0x6d,0x62,0x6f,0x6c,0xff,
+2,0,0,0,0x22,0x12,0x74,0x72,0x6c,0xd9,0x80,0,0xdc,0,0,1,
+0x6d,0x62,0x6e,1,0x6e,0x30,0x74,0x12,0x72,0x6f,0x6c,0xd9,0x80,0,0x1f,0x65,
+0x63,0x74,0x6f,0x72,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0xfd,
+0x40,0,0,0x19,0x62,0x69,0x6e,0x69,0x6e,0x67,0x6d,0x61,0x72,0x6b,0xa5,0xc0,
+0x61,0x58,0x63,0xd9,0x80,0,0x66,0xdb,0,0,0x6c,0x1d,0x6f,0x73,0x65,0x70,
+0x75,0x6e,0x63,0x74,0x75,0x61,0x74,0x69,0x6f,0x6e,0xfd,0x20,0,0,0x18,0x73,
+0x65,0x64,0x6c,0x65,0x74,0x74,0x65,0x72,0x3d,2,0x61,0x32,0x65,0x50,0x69,0x12,
+0x67,0x69,0x74,0xa7,0,0x1c,0x73,0x68,0x70,0x75,0x6e,0x63,0x74,0x75,0x61,0x74,
+0x69,0x6f,0x6e,0xe9,0,0,0x1a,0x63,0x69,0x6d,0x61,0x6c,0x6e,0x75,0x6d,0x62,
+0x65,0x72,0xa7,0
};
-const char PropNameData::nameGroups[22855]={
+const char PropNameData::nameGroups[23100]={
2,'A','l','p','h','a',0,'A','l','p','h','a','b','e','t','i','c',0,
4,'N',0,'N','o',0,'F',0,'F','a','l','s','e',0,4,'Y',0,'Y','e','s',0,'T',0,'T','r','u','e',0,
2,'N','R',0,'N','o','t','_','R','e','o','r','d','e','r','e','d',0,
@@ -1553,9 +1559,18 @@ const char PropNameData::nameGroups[22855]={
'r','i','g','i','n','a','l','_','S','y','l','l','a','b','i','c','s','_','E','x','t','e','n','d','e','d','_','A',0,
2,'V','i','t','h','k','u','q','i',0,'V','i','t','h','k','u','q','i',0,
2,'Z','n','a','m','e','n','n','y','_','M','u','s','i','c',0,'Z','n','a','m','e','n','n','y','_','M','u','s','i','c','a','l',
-'_','N','o','t','a','t','i','o','n',0,2,'c','c','c',0,'C','a','n','o','n','i','c','a','l','_','C','o','m','b','i','n','i',
-'n','g','_','C','l','a','s','s',0,2,'d','t',0,'D','e','c','o','m','p','o','s','i','t','i','o','n','_','T','y','p','e',0,
-3,'N','o','n','e',0,'N','o','n','e',0,'n','o','n','e',0,
+'_','N','o','t','a','t','i','o','n',0,2,'A','r','a','b','i','c','_','E','x','t','_','C',0,
+'A','r','a','b','i','c','_','E','x','t','e','n','d','e','d','_','C',0,
+2,'C','J','K','_','E','x','t','_','H',0,'C','J','K','_','U','n','i','f','i','e','d','_','I','d','e','o','g','r','a','p','h',
+'s','_','E','x','t','e','n','s','i','o','n','_','H',0,2,'C','y','r','i','l','l','i','c','_','E','x','t','_','D',0,
+'C','y','r','i','l','l','i','c','_','E','x','t','e','n','d','e','d','_','D',0,
+2,'D','e','v','a','n','a','g','a','r','i','_','E','x','t','_','A',0,'D','e','v','a','n','a','g','a','r','i','_','E','x','t',
+'e','n','d','e','d','_','A',0,2,'K','a','k','t','o','v','i','k','_','N','u','m','e','r','a','l','s',0,
+'K','a','k','t','o','v','i','k','_','N','u','m','e','r','a','l','s',0,
+2,'K','a','w','i',0,'K','a','w','i',0,2,'N','a','g','_','M','u','n','d','a','r','i',0,
+'N','a','g','_','M','u','n','d','a','r','i',0,2,'c','c','c',0,'C','a','n','o','n','i','c','a','l','_','C','o','m','b','i',
+'n','i','n','g','_','C','l','a','s','s',0,2,'d','t',0,'D','e','c','o','m','p','o','s','i','t','i','o','n','_','T','y','p',
+'e',0,3,'N','o','n','e',0,'N','o','n','e',0,'n','o','n','e',0,
3,'C','a','n',0,'C','a','n','o','n','i','c','a','l',0,'c','a','n',0,
3,'C','o','m',0,'C','o','m','p','a','t',0,'c','o','m',0,
3,'E','n','c',0,'C','i','r','c','l','e',0,'e','n','c',0,
@@ -1844,6 +1859,7 @@ const char PropNameData::nameGroups[22855]={
2,'Y','e','z','i',0,'Y','e','z','i','d','i',0,2,'C','p','m','n',0,'C','y','p','r','o','_','M','i','n','o','a','n',0,
2,'O','u','g','r',0,'O','l','d','_','U','y','g','h','u','r',0,
2,'T','n','s','a',0,'T','a','n','g','s','a',0,2,'V','i','t','h',0,'V','i','t','h','k','u','q','i',0,
+2,'N','a','g','m',0,'N','a','g','_','M','u','n','d','a','r','i',0,
2,'h','s','t',0,'H','a','n','g','u','l','_','S','y','l','l','a','b','l','e','_','T','y','p','e',0,
2,'N','A',0,'N','o','t','_','A','p','p','l','i','c','a','b','l','e',0,
2,'L',0,'L','e','a','d','i','n','g','_','J','a','m','o',0,
diff --git a/thirdparty/icu4c/common/propsvec.cpp b/thirdparty/icu4c/common/propsvec.cpp
index 056fcda9cf..e5caa4b9d2 100644
--- a/thirdparty/icu4c/common/propsvec.cpp
+++ b/thirdparty/icu4c/common/propsvec.cpp
@@ -351,7 +351,7 @@ upvec_compact(UPropsVectors *pv, UPVecCompactHandler *handler, void *context, UE
}
/* Set the flag now: Sorting and compacting destroys the builder data structure. */
- pv->isCompacted=TRUE;
+ pv->isCompacted=true;
rows=pv->rows;
columns=pv->columns;
@@ -360,7 +360,7 @@ upvec_compact(UPropsVectors *pv, UPVecCompactHandler *handler, void *context, UE
/* sort the properties vectors to find unique vector values */
uprv_sortArray(pv->v, rows, columns*4,
- upvec_compareRows, pv, FALSE, pErrorCode);
+ upvec_compareRows, pv, false, pErrorCode);
if(U_FAILURE(*pErrorCode)) {
return;
}
@@ -503,7 +503,7 @@ upvec_compactToUTrie2Handler(void *context,
(void)columns;
UPVecToUTrie2Context *toUTrie2=(UPVecToUTrie2Context *)context;
if(start<UPVEC_FIRST_SPECIAL_CP) {
- utrie2_setRange32(toUTrie2->trie, start, end, (uint32_t)rowIndex, TRUE, pErrorCode);
+ utrie2_setRange32(toUTrie2->trie, start, end, (uint32_t)rowIndex, true, pErrorCode);
} else {
switch(start) {
case UPVEC_INITIAL_VALUE_CP:
diff --git a/thirdparty/icu4c/common/punycode.cpp b/thirdparty/icu4c/common/punycode.cpp
index 4832938ff7..f95722da27 100644
--- a/thirdparty/icu4c/common/punycode.cpp
+++ b/thirdparty/icu4c/common/punycode.cpp
@@ -573,7 +573,7 @@ u_strFromPunycode(const UChar *src, int32_t srcLength,
/* Case of last character determines uppercase flag: */
caseFlags[codeUnitIndex]=IS_BASIC_UPPERCASE(src[in-1]);
if(cpLength==2) {
- caseFlags[codeUnitIndex+1]=FALSE;
+ caseFlags[codeUnitIndex+1]=false;
}
}
}
diff --git a/thirdparty/icu4c/common/putil.cpp b/thirdparty/icu4c/common/putil.cpp
index 68be079b3a..f27c8737d2 100644
--- a/thirdparty/icu4c/common/putil.cpp
+++ b/thirdparty/icu4c/common/putil.cpp
@@ -244,7 +244,7 @@ u_signBit(double d) {
*/
UDate fakeClock_t0 = 0; /** Time to start the clock from **/
UDate fakeClock_dt = 0; /** Offset (fake time - real time) **/
-UBool fakeClock_set = FALSE; /** True if fake clock has spun up **/
+UBool fakeClock_set = false; /** True if fake clock has spun up **/
static UDate getUTCtime_real() {
struct timeval posixTime;
@@ -269,7 +269,7 @@ static UDate getUTCtime_fake() {
fprintf(stderr,"U_DEBUG_FAKETIME was set at compile time, but U_FAKETIME_START was not set.\n"
"Set U_FAKETIME_START to the number of milliseconds since 1/1/1970 to set the ICU clock.\n");
}
- fakeClock_set = TRUE;
+ fakeClock_set = true;
}
umtx_unlock(&fakeClockMutex);
@@ -905,7 +905,7 @@ static UBool compareBinaryFiles(const char* defaultTZFileName, const char* TZFil
int32_t sizeFileRead;
int32_t sizeFileToRead;
char bufferFile[MAX_READ_SIZE];
- UBool result = TRUE;
+ UBool result = true;
if (tzInfo->defaultTZFilePtr == NULL) {
tzInfo->defaultTZFilePtr = fopen(defaultTZFileName, "r");
@@ -925,7 +925,7 @@ static UBool compareBinaryFiles(const char* defaultTZFileName, const char* TZFil
sizeFileLeft = sizeFile;
if (sizeFile != tzInfo->defaultTZFileSize) {
- result = FALSE;
+ result = false;
} else {
/* Store the data from the files in separate buffers and
* compare each byte to determine equality.
@@ -942,7 +942,7 @@ static UBool compareBinaryFiles(const char* defaultTZFileName, const char* TZFil
sizeFileRead = fread(bufferFile, 1, sizeFileToRead, file);
if (memcmp(tzInfo->defaultTZBuffer + tzInfo->defaultTZPosition, bufferFile, sizeFileRead) != 0) {
- result = FALSE;
+ result = false;
break;
}
sizeFileLeft -= sizeFileRead;
@@ -950,7 +950,7 @@ static UBool compareBinaryFiles(const char* defaultTZFileName, const char* TZFil
}
}
} else {
- result = FALSE;
+ result = false;
}
if (file != NULL) {
@@ -1189,7 +1189,7 @@ uprv_tzname(int n)
tzInfo->defaultTZBuffer = NULL;
tzInfo->defaultTZFileSize = 0;
tzInfo->defaultTZFilePtr = NULL;
- tzInfo->defaultTZstatus = FALSE;
+ tzInfo->defaultTZstatus = false;
tzInfo->defaultTZPosition = 0;
gTimeZoneBufferPtr = searchForTZFile(TZZONEINFO, tzInfo);
@@ -1260,10 +1260,10 @@ uprv_tzname(int n)
/* Get and set the ICU data directory --------------------------------------- */
-static icu::UInitOnce gDataDirInitOnce = U_INITONCE_INITIALIZER;
+static icu::UInitOnce gDataDirInitOnce {};
static char *gDataDirectory = NULL;
-UInitOnce gTimeZoneFilesInitOnce = U_INITONCE_INITIALIZER;
+UInitOnce gTimeZoneFilesInitOnce {};
static CharString *gTimeZoneFilesDirectory = NULL;
#if U_POSIX_LOCALE || U_PLATFORM_USES_ONLY_WIN32_API
@@ -1295,7 +1295,7 @@ static UBool U_CALLCONV putil_cleanup(void)
gCorrectedPOSIXLocaleHeapAllocated = false;
}
#endif
- return TRUE;
+ return true;
}
/*
@@ -1344,16 +1344,16 @@ U_CAPI UBool U_EXPORT2
uprv_pathIsAbsolute(const char *path)
{
if(!path || !*path) {
- return FALSE;
+ return false;
}
if(*path == U_FILE_SEP_CHAR) {
- return TRUE;
+ return true;
}
#if (U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR)
if(*path == U_FILE_ALT_SEP_CHAR) {
- return TRUE;
+ return true;
}
#endif
@@ -1361,11 +1361,11 @@ uprv_pathIsAbsolute(const char *path)
if( (((path[0] >= 'A') && (path[0] <= 'Z')) ||
((path[0] >= 'a') && (path[0] <= 'z'))) &&
path[1] == ':' ) {
- return TRUE;
+ return true;
}
#endif
- return FALSE;
+ return false;
}
/* Backup setting of ICU_DATA_DIR_PREFIX_ENV_VAR
@@ -1402,12 +1402,12 @@ static BOOL U_CALLCONV getIcuDataDirectoryUnderWindowsDirectory(char* directoryB
if ((windowsPathUtf8Len + UPRV_LENGTHOF(ICU_DATA_DIR_WINDOWS)) < bufferLength) {
uprv_strcpy(directoryBuffer, windowsPathUtf8);
uprv_strcat(directoryBuffer, ICU_DATA_DIR_WINDOWS);
- return TRUE;
+ return true;
}
}
}
- return FALSE;
+ return false;
}
#endif
diff --git a/thirdparty/icu4c/common/rbbi.cpp b/thirdparty/icu4c/common/rbbi.cpp
index cae8d154b3..2769263894 100644
--- a/thirdparty/icu4c/common/rbbi.cpp
+++ b/thirdparty/icu4c/common/rbbi.cpp
@@ -39,7 +39,7 @@
#include "uvectr32.h"
#ifdef RBBI_DEBUG
-static UBool gTrace = FALSE;
+static UBool gTrace = false;
#endif
U_NAMESPACE_BEGIN
@@ -267,7 +267,7 @@ RuleBasedBreakIterator::operator=(const RuleBasedBreakIterator& that) {
}
// TODO: clone fLanguageBreakEngines from "that"
UErrorCode status = U_ZERO_ERROR;
- utext_clone(&fText, &that.fText, FALSE, TRUE, &status);
+ utext_clone(&fText, &that.fText, false, true, &status);
if (fCharIter != &fSCharIter) {
delete fCharIter;
@@ -354,13 +354,13 @@ void RuleBasedBreakIterator::init(UErrorCode &status) {
}
#ifdef RBBI_DEBUG
- static UBool debugInitDone = FALSE;
- if (debugInitDone == FALSE) {
+ static UBool debugInitDone = false;
+ if (debugInitDone == false) {
char *debugEnv = getenv("U_RBBIDEBUG");
if (debugEnv && uprv_strstr(debugEnv, "trace")) {
- gTrace = TRUE;
+ gTrace = true;
}
- debugInitDone = TRUE;
+ debugInitDone = true;
}
#endif
}
@@ -439,7 +439,7 @@ void RuleBasedBreakIterator::setText(UText *ut, UErrorCode &status) {
}
fBreakCache->reset();
fDictionaryCache->reset();
- utext_clone(&fText, ut, FALSE, TRUE, &status);
+ utext_clone(&fText, ut, false, true, &status);
// Set up a dummy CharacterIterator to be returned if anyone
// calls getText(). With input from UText, there is no reasonable
@@ -460,7 +460,7 @@ void RuleBasedBreakIterator::setText(UText *ut, UErrorCode &status) {
UText *RuleBasedBreakIterator::getUText(UText *fillIn, UErrorCode &status) const {
- UText *result = utext_clone(fillIn, &fText, FALSE, TRUE, &status);
+ UText *result = utext_clone(fillIn, &fText, false, true, &status);
return result;
}
@@ -548,7 +548,7 @@ RuleBasedBreakIterator &RuleBasedBreakIterator::refreshInputText(UText *input, U
}
int64_t pos = utext_getNativeIndex(&fText);
// Shallow read-only clone of the new UText into the existing input UText
- utext_clone(&fText, input, FALSE, TRUE, &status);
+ utext_clone(&fText, input, false, true, &status);
if (U_FAILURE(status)) {
return *this;
}
@@ -696,7 +696,7 @@ UBool RuleBasedBreakIterator::isBoundary(int32_t offset) {
// out-of-range indexes are never boundary positions
if (offset < 0) {
first(); // For side effects on current position, tag values.
- return FALSE;
+ return false;
}
// Adjust offset to be on a code point boundary and not beyond the end of the text.
@@ -713,9 +713,9 @@ UBool RuleBasedBreakIterator::isBoundary(int32_t offset) {
}
if (result && adjustedOffset < offset && utext_char32At(&fText, offset) == U_SENTINEL) {
- // Original offset is beyond the end of the text. Return FALSE, it's not a boundary,
+ // Original offset is beyond the end of the text. Return false, it's not a boundary,
// but the iteration position remains set to the end of the text, which is a boundary.
- return FALSE;
+ return false;
}
if (!result) {
// Not on a boundary. isBoundary() must leave iterator on the following boundary.
@@ -838,7 +838,7 @@ int32_t RuleBasedBreakIterator::handleNext() {
result = initialPosition;
c = UTEXT_NEXT32(&fText);
if (c==U_SENTINEL) {
- fDone = TRUE;
+ fDone = true;
return UBRK_DONE;
}
@@ -1153,8 +1153,8 @@ U_NAMESPACE_END
static icu::UStack *gLanguageBreakFactories = nullptr;
static const icu::UnicodeString *gEmptyString = nullptr;
-static icu::UInitOnce gLanguageBreakFactoriesInitOnce = U_INITONCE_INITIALIZER;
-static icu::UInitOnce gRBBIInitOnce = U_INITONCE_INITIALIZER;
+static icu::UInitOnce gLanguageBreakFactoriesInitOnce {};
+static icu::UInitOnce gRBBIInitOnce {};
/**
* Release all static memory held by breakiterator.
@@ -1167,7 +1167,7 @@ UBool U_CALLCONV rbbi_cleanup(void) {
gEmptyString = nullptr;
gLanguageBreakFactoriesInitOnce.reset();
gRBBIInitOnce.reset();
- return TRUE;
+ return true;
}
U_CDECL_END
diff --git a/thirdparty/icu4c/common/rbbi_cache.cpp b/thirdparty/icu4c/common/rbbi_cache.cpp
index 26d82df781..45e02528cf 100644
--- a/thirdparty/icu4c/common/rbbi_cache.cpp
+++ b/thirdparty/icu4c/common/rbbi_cache.cpp
@@ -45,7 +45,7 @@ void RuleBasedBreakIterator::DictionaryCache::reset() {
UBool RuleBasedBreakIterator::DictionaryCache::following(int32_t fromPos, int32_t *result, int32_t *statusIndex) {
if (fromPos >= fLimit || fromPos < fStart) {
fPositionInCache = -1;
- return FALSE;
+ return false;
}
// Sequential iteration, move from previous boundary to the following
@@ -55,13 +55,13 @@ UBool RuleBasedBreakIterator::DictionaryCache::following(int32_t fromPos, int32_
++fPositionInCache;
if (fPositionInCache >= fBreaks.size()) {
fPositionInCache = -1;
- return FALSE;
+ return false;
}
r = fBreaks.elementAti(fPositionInCache);
U_ASSERT(r > fromPos);
*result = r;
*statusIndex = fOtherRuleStatusIndex;
- return TRUE;
+ return true;
}
// Random indexing. Linear search for the boundary following the given position.
@@ -71,7 +71,7 @@ UBool RuleBasedBreakIterator::DictionaryCache::following(int32_t fromPos, int32_
if (r > fromPos) {
*result = r;
*statusIndex = fOtherRuleStatusIndex;
- return TRUE;
+ return true;
}
}
UPRV_UNREACHABLE_EXIT;
@@ -81,7 +81,7 @@ UBool RuleBasedBreakIterator::DictionaryCache::following(int32_t fromPos, int32_
UBool RuleBasedBreakIterator::DictionaryCache::preceding(int32_t fromPos, int32_t *result, int32_t *statusIndex) {
if (fromPos <= fStart || fromPos > fLimit) {
fPositionInCache = -1;
- return FALSE;
+ return false;
}
if (fromPos == fLimit) {
@@ -98,12 +98,12 @@ UBool RuleBasedBreakIterator::DictionaryCache::preceding(int32_t fromPos, int32_
U_ASSERT(r < fromPos);
*result = r;
*statusIndex = ( r== fStart) ? fFirstRuleStatusIndex : fOtherRuleStatusIndex;
- return TRUE;
+ return true;
}
if (fPositionInCache == 0) {
fPositionInCache = -1;
- return FALSE;
+ return false;
}
for (fPositionInCache = fBreaks.size()-1; fPositionInCache >= 0; --fPositionInCache) {
@@ -111,7 +111,7 @@ UBool RuleBasedBreakIterator::DictionaryCache::preceding(int32_t fromPos, int32_
if (r < fromPos) {
*result = r;
*statusIndex = ( r == fStart) ? fFirstRuleStatusIndex : fOtherRuleStatusIndex;
- return TRUE;
+ return true;
}
}
UPRV_UNREACHABLE_EXIT;
@@ -227,7 +227,7 @@ void RuleBasedBreakIterator::BreakCache::reset(int32_t pos, int32_t ruleStatus)
int32_t RuleBasedBreakIterator::BreakCache::current() {
fBI->fPosition = fTextIdx;
fBI->fRuleStatusIndex = fStatuses[fBufIdx];
- fBI->fDone = FALSE;
+ fBI->fDone = false;
return fTextIdx;
}
@@ -302,18 +302,18 @@ void RuleBasedBreakIterator::BreakCache::previous(UErrorCode &status) {
UBool RuleBasedBreakIterator::BreakCache::seek(int32_t pos) {
if (pos < fBoundaries[fStartBufIdx] || pos > fBoundaries[fEndBufIdx]) {
- return FALSE;
+ return false;
}
if (pos == fBoundaries[fStartBufIdx]) {
// Common case: seek(0), from BreakIterator::first()
fBufIdx = fStartBufIdx;
fTextIdx = fBoundaries[fBufIdx];
- return TRUE;
+ return true;
}
if (pos == fBoundaries[fEndBufIdx]) {
fBufIdx = fEndBufIdx;
fTextIdx = fBoundaries[fBufIdx];
- return TRUE;
+ return true;
}
int32_t min = fStartBufIdx;
@@ -331,51 +331,97 @@ UBool RuleBasedBreakIterator::BreakCache::seek(int32_t pos) {
fBufIdx = modChunkSize(max - 1);
fTextIdx = fBoundaries[fBufIdx];
U_ASSERT(fTextIdx <= pos);
- return TRUE;
+ return true;
}
UBool RuleBasedBreakIterator::BreakCache::populateNear(int32_t position, UErrorCode &status) {
if (U_FAILURE(status)) {
- return FALSE;
+ return false;
}
U_ASSERT(position < fBoundaries[fStartBufIdx] || position > fBoundaries[fEndBufIdx]);
- // Find a boundary somewhere in the vicinity of the requested position.
- // Depending on the safe rules and the text data, it could be either before, at, or after
- // the requested position.
-
+ // Add boundaries to the cache near the specified position.
+ // The given position need not be a boundary itself.
+ // The input position must be within the range of the text, and
+ // on a code point boundary.
+ // If the requested position is a break boundary, leave the iteration
+ // position on it.
+ // If the requested position is not a boundary, leave the iteration
+ // position on the preceding boundary and include both the
+ // preceding and following boundaries in the cache.
+ // Additional boundaries, either preceding or following, may be added
+ // to the cache as a side effect.
// If the requested position is not near already cached positions, clear the existing cache,
// find a near-by boundary and begin new cache contents there.
- if ((position < fBoundaries[fStartBufIdx] - 15) || position > (fBoundaries[fEndBufIdx] + 15)) {
- int32_t aBoundary = 0;
- int32_t ruleStatusIndex = 0;
- if (position > 20) {
- int32_t backupPos = fBI->handleSafePrevious(position);
-
- if (backupPos > 0) {
- // Advance to the boundary following the backup position.
- // There is a complication: the safe reverse rules identify pairs of code points
- // that are safe. If advancing from the safe point moves forwards by less than
- // two code points, we need to advance one more time to ensure that the boundary
- // is good, including a correct rules status value.
- //
- fBI->fPosition = backupPos;
- aBoundary = fBI->handleNext();
- if (aBoundary <= backupPos + 4) {
- // +4 is a quick test for possibly having advanced only one codepoint.
- // Four being the length of the longest potential code point, a supplementary in UTF-8
- utext_setNativeIndex(&fBI->fText, aBoundary);
- if (backupPos == utext_getPreviousNativeIndex(&fBI->fText)) {
- // The initial handleNext() only advanced by a single code point. Go again.
- aBoundary = fBI->handleNext(); // Safe rules identify safe pairs.
- }
+ // Threshold for a text position to be considered near to existing cache contents.
+ // TODO: See issue ICU-22024 "perf tuning of Cache needed."
+ // This value is subject to change. See the ticket for more details.
+ static constexpr int32_t CACHE_NEAR = 15;
+
+ int32_t aBoundary = -1;
+ int32_t ruleStatusIndex = 0;
+ bool retainCache = false;
+ if ((position > fBoundaries[fStartBufIdx] - CACHE_NEAR) && position < (fBoundaries[fEndBufIdx] + CACHE_NEAR)) {
+ // Requested position is near the existing cache. Retain it.
+ retainCache = true;
+ } else if (position <= CACHE_NEAR) {
+ // Requested position is near the start of the text. Fill cache from start, skipping
+ // the need to find a safe point.
+ retainCache = false;
+ aBoundary = 0;
+ } else {
+ // Requested position is not near the existing cache.
+ // Find a safe point to refill the cache from.
+ int32_t backupPos = fBI->handleSafePrevious(position);
+
+ if (fBoundaries[fEndBufIdx] < position && fBoundaries[fEndBufIdx] >= (backupPos - CACHE_NEAR)) {
+ // The requested position is beyond the end of the existing cache, but the
+ // reverse rules produced a position near or before the cached region.
+ // Retain the existing cache, and fill from the end of it.
+ retainCache = true;
+ } else if (backupPos < CACHE_NEAR) {
+ // The safe reverse rules moved us to near the start of text.
+ // Take that (index 0) as the backup boundary, avoiding the complication
+ // (in the following block) of moving forward from the safe point to a known boundary.
+ //
+ // Retain the cache if it begins not too far from the requested position.
+ aBoundary = 0;
+ retainCache = (fBoundaries[fStartBufIdx] <= (position + CACHE_NEAR));
+ } else {
+ // The safe reverse rules produced a position that is neither near the existing
+ // cache, nor near the start of text.
+ // Advance to the boundary following.
+ // There is a complication: the safe reverse rules identify pairs of code points
+ // that are safe. If advancing from the safe point moves forwards by less than
+ // two code points, we need to advance one more time to ensure that the boundary
+ // is good, including a correct rules status value.
+ retainCache = false;
+ fBI->fPosition = backupPos;
+ aBoundary = fBI->handleNext();
+ if (aBoundary != UBRK_DONE && aBoundary <= backupPos + 4) {
+ // +4 is a quick test for possibly having advanced only one codepoint.
+ // Four being the length of the longest potential code point, a supplementary in UTF-8
+ utext_setNativeIndex(&fBI->fText, aBoundary);
+ if (backupPos == utext_getPreviousNativeIndex(&fBI->fText)) {
+ // The initial handleNext() only advanced by a single code point. Go again.
+ aBoundary = fBI->handleNext(); // Safe rules identify safe pairs.
}
- ruleStatusIndex = fBI->fRuleStatusIndex;
}
+ if (aBoundary == UBRK_DONE) {
+ // Note (Andy Heninger): I don't think this condition can occur, but it's hard
+ // to prove that it can't. We ran off the end of the string looking a boundary
+ // following a safe point; choose the end of the string as that boundary.
+ aBoundary = utext_nativeLength(&fBI->fText);
+ }
+ ruleStatusIndex = fBI->fRuleStatusIndex;
}
+ }
+
+ if (!retainCache) {
+ U_ASSERT(aBoundary != -1);
reset(aBoundary, ruleStatusIndex); // Reset cache to hold aBoundary as a single starting point.
}
@@ -430,13 +476,13 @@ UBool RuleBasedBreakIterator::BreakCache::populateFollowing() {
if (fBI->fDictionaryCache->following(fromPosition, &pos, &ruleStatusIdx)) {
addFollowing(pos, ruleStatusIdx, UpdateCachePosition);
- return TRUE;
+ return true;
}
fBI->fPosition = fromPosition;
pos = fBI->handleNext();
if (pos == UBRK_DONE) {
- return FALSE;
+ return false;
}
ruleStatusIdx = fBI->fRuleStatusIndex;
@@ -446,7 +492,7 @@ UBool RuleBasedBreakIterator::BreakCache::populateFollowing() {
fBI->fDictionaryCache->populateDictionary(fromPosition, pos, fromRuleStatusIdx, ruleStatusIdx);
if (fBI->fDictionaryCache->following(fromPosition, &pos, &ruleStatusIdx)) {
addFollowing(pos, ruleStatusIdx, UpdateCachePosition);
- return TRUE;
+ return true;
// TODO: may want to move a sizable chunk of dictionary cache to break cache at this point.
// But be careful with interactions with populateNear().
}
@@ -469,18 +515,18 @@ UBool RuleBasedBreakIterator::BreakCache::populateFollowing() {
addFollowing(pos, fBI->fRuleStatusIndex, RetainCachePosition);
}
- return TRUE;
+ return true;
}
UBool RuleBasedBreakIterator::BreakCache::populatePreceding(UErrorCode &status) {
if (U_FAILURE(status)) {
- return FALSE;
+ return false;
}
int32_t fromPosition = fBoundaries[fStartBufIdx];
if (fromPosition == 0) {
- return FALSE;
+ return false;
}
int32_t position = 0;
@@ -488,7 +534,7 @@ UBool RuleBasedBreakIterator::BreakCache::populatePreceding(UErrorCode &status)
if (fBI->fDictionaryCache->preceding(fromPosition, &position, &positionStatusIdx)) {
addPreceding(position, positionStatusIdx, UpdateCachePosition);
- return TRUE;
+ return true;
}
int32_t backupPosition = fromPosition;
@@ -542,7 +588,7 @@ UBool RuleBasedBreakIterator::BreakCache::populatePreceding(UErrorCode &status)
break;
}
- UBool segmentHandledByDictionary = FALSE;
+ UBool segmentHandledByDictionary = false;
if (fBI->fDictionaryCharCount != 0) {
// Segment from the rules includes dictionary characters.
// Subdivide it, with subdivided results going into the dictionary cache.
@@ -569,12 +615,12 @@ UBool RuleBasedBreakIterator::BreakCache::populatePreceding(UErrorCode &status)
} while (position < fromPosition);
// Move boundaries from the side buffer to the main circular buffer.
- UBool success = FALSE;
+ UBool success = false;
if (!fSideBuffer.isEmpty()) {
positionStatusIdx = fSideBuffer.popi();
position = fSideBuffer.popi();
addPreceding(position, positionStatusIdx, UpdateCachePosition);
- success = TRUE;
+ success = true;
}
while (!fSideBuffer.isEmpty()) {
diff --git a/thirdparty/icu4c/common/rbbidata.cpp b/thirdparty/icu4c/common/rbbidata.cpp
index 6338ed3ed8..f50fc458a5 100644
--- a/thirdparty/icu4c/common/rbbidata.cpp
+++ b/thirdparty/icu4c/common/rbbidata.cpp
@@ -38,7 +38,7 @@ RBBIDataWrapper::RBBIDataWrapper(const RBBIDataHeader *data, UErrorCode &status)
RBBIDataWrapper::RBBIDataWrapper(const RBBIDataHeader *data, enum EDontAdopt, UErrorCode &status) {
init0();
init(data, status);
- fDontFreeData = TRUE;
+ fDontFreeData = true;
}
RBBIDataWrapper::RBBIDataWrapper(UDataMemory* udm, UErrorCode &status) {
@@ -86,7 +86,7 @@ void RBBIDataWrapper::init0() {
fTrie = NULL;
fUDataMem = NULL;
fRefCount = 0;
- fDontFreeData = TRUE;
+ fDontFreeData = true;
}
void RBBIDataWrapper::init(const RBBIDataHeader *data, UErrorCode &status) {
@@ -102,7 +102,7 @@ void RBBIDataWrapper::init(const RBBIDataHeader *data, UErrorCode &status) {
// that is no longer supported. At that time fFormatVersion was
// an int32_t field, rather than an array of 4 bytes.
- fDontFreeData = FALSE;
+ fDontFreeData = false;
if (data->fFTableLen != 0) {
fForwardTable = (RBBIStateTable *)((char *)data + fHeader->fFTable);
}
diff --git a/thirdparty/icu4c/common/rbbinode.cpp b/thirdparty/icu4c/common/rbbinode.cpp
index 27bcd8f8fe..da5937cafd 100644
--- a/thirdparty/icu4c/common/rbbinode.cpp
+++ b/thirdparty/icu4c/common/rbbinode.cpp
@@ -58,10 +58,10 @@ RBBINode::RBBINode(NodeType t) : UMemory() {
fInputSet = NULL;
fFirstPos = 0;
fLastPos = 0;
- fNullable = FALSE;
- fLookAheadEnd = FALSE;
- fRuleRoot = FALSE;
- fChainIn = FALSE;
+ fNullable = false;
+ fLookAheadEnd = false;
+ fRuleRoot = false;
+ fChainIn = false;
fVal = 0;
fPrecedence = precZero;
@@ -92,7 +92,7 @@ RBBINode::RBBINode(const RBBINode &other) : UMemory(other) {
fLastPos = other.fLastPos;
fNullable = other.fNullable;
fVal = other.fVal;
- fRuleRoot = FALSE;
+ fRuleRoot = false;
fChainIn = other.fChainIn;
UErrorCode status = U_ZERO_ERROR;
fFirstPosSet = new UVector(status); // TODO - get a real status from somewhere
@@ -355,11 +355,11 @@ void RBBINode::printTree(const RBBINode *node, UBool printHeading) {
// Unconditionally dump children of all other node types.
if (node->fType != varRef) {
if (node->fLeftChild != NULL) {
- printTree(node->fLeftChild, FALSE);
+ printTree(node->fLeftChild, false);
}
if (node->fRightChild != NULL) {
- printTree(node->fRightChild, FALSE);
+ printTree(node->fRightChild, false);
}
}
}
diff --git a/thirdparty/icu4c/common/rbbirb.cpp b/thirdparty/icu4c/common/rbbirb.cpp
index e5c250dfe4..a9d76f2482 100644
--- a/thirdparty/icu4c/common/rbbirb.cpp
+++ b/thirdparty/icu4c/common/rbbirb.cpp
@@ -65,9 +65,9 @@ RBBIRuleBuilder::RBBIRuleBuilder(const UnicodeString &rules,
fDefaultTree = &fForwardTree;
fForwardTable = NULL;
fRuleStatusVals = NULL;
- fChainRules = FALSE;
- fLBCMNoChain = FALSE;
- fLookAheadHardBreak = FALSE;
+ fChainRules = false;
+ fLBCMNoChain = false;
+ fLookAheadHardBreak = false;
fUSetNodes = NULL;
fRuleStatusVals = NULL;
fScanner = NULL;
diff --git a/thirdparty/icu4c/common/rbbirb.h b/thirdparty/icu4c/common/rbbirb.h
index 11dbf343ec..d983a184b6 100644
--- a/thirdparty/icu4c/common/rbbirb.h
+++ b/thirdparty/icu4c/common/rbbirb.h
@@ -54,8 +54,8 @@ public: // of these structs for each ent
~RBBISymbolTableEntry();
private:
- RBBISymbolTableEntry(const RBBISymbolTableEntry &other); // forbid copying of this class
- RBBISymbolTableEntry &operator=(const RBBISymbolTableEntry &other); // forbid copying of this class
+ RBBISymbolTableEntry(const RBBISymbolTableEntry &other) = delete; // forbid copying of this class
+ RBBISymbolTableEntry &operator=(const RBBISymbolTableEntry &other) = delete; // forbid copying of this class
};
@@ -177,8 +177,8 @@ public:
RBBIDataHeader *flattenData(); // Create the flattened (runtime format)
// data tables..
private:
- RBBIRuleBuilder(const RBBIRuleBuilder &other); // forbid copying of this class
- RBBIRuleBuilder &operator=(const RBBIRuleBuilder &other); // forbid copying of this class
+ RBBIRuleBuilder(const RBBIRuleBuilder &other) = delete; // forbid copying of this class
+ RBBIRuleBuilder &operator=(const RBBIRuleBuilder &other) = delete; // forbid copying of this class
};
diff --git a/thirdparty/icu4c/common/rbbirpt.h b/thirdparty/icu4c/common/rbbirpt.h
index 586953c90c..ca1bcf45dc 100644
--- a/thirdparty/icu4c/common/rbbirpt.h
+++ b/thirdparty/icu4c/common/rbbirpt.h
@@ -79,110 +79,110 @@ struct RBBIRuleTableEl {
};
static const struct RBBIRuleTableEl gRuleParseStateTable[] = {
- {doNOP, 0, 0, 0, TRUE}
- , {doExprStart, 254, 29, 9, FALSE} // 1 start
- , {doNOP, 132, 1,0, TRUE} // 2
- , {doNoChain, 94 /* ^ */, 12, 9, TRUE} // 3
- , {doExprStart, 36 /* $ */, 88, 98, FALSE} // 4
- , {doNOP, 33 /* ! */, 19,0, TRUE} // 5
- , {doNOP, 59 /* ; */, 1,0, TRUE} // 6
- , {doNOP, 252, 0,0, FALSE} // 7
- , {doExprStart, 255, 29, 9, FALSE} // 8
- , {doEndOfRule, 59 /* ; */, 1,0, TRUE} // 9 break-rule-end
- , {doNOP, 132, 9,0, TRUE} // 10
- , {doRuleError, 255, 103,0, FALSE} // 11
- , {doExprStart, 254, 29,0, FALSE} // 12 start-after-caret
- , {doNOP, 132, 12,0, TRUE} // 13
- , {doRuleError, 94 /* ^ */, 103,0, FALSE} // 14
- , {doExprStart, 36 /* $ */, 88, 37, FALSE} // 15
- , {doRuleError, 59 /* ; */, 103,0, FALSE} // 16
- , {doRuleError, 252, 103,0, FALSE} // 17
- , {doExprStart, 255, 29,0, FALSE} // 18
- , {doNOP, 33 /* ! */, 21,0, TRUE} // 19 rev-option
- , {doReverseDir, 255, 28, 9, FALSE} // 20
- , {doOptionStart, 130, 23,0, TRUE} // 21 option-scan1
- , {doRuleError, 255, 103,0, FALSE} // 22
- , {doNOP, 129, 23,0, TRUE} // 23 option-scan2
- , {doOptionEnd, 255, 25,0, FALSE} // 24
- , {doNOP, 59 /* ; */, 1,0, TRUE} // 25 option-scan3
- , {doNOP, 132, 25,0, TRUE} // 26
- , {doRuleError, 255, 103,0, FALSE} // 27
- , {doExprStart, 255, 29, 9, FALSE} // 28 reverse-rule
- , {doRuleChar, 254, 38,0, TRUE} // 29 term
- , {doNOP, 132, 29,0, TRUE} // 30
- , {doRuleChar, 131, 38,0, TRUE} // 31
- , {doNOP, 91 /* [ */, 94, 38, FALSE} // 32
- , {doLParen, 40 /* ( */, 29, 38, TRUE} // 33
- , {doNOP, 36 /* $ */, 88, 37, FALSE} // 34
- , {doDotAny, 46 /* . */, 38,0, TRUE} // 35
- , {doRuleError, 255, 103,0, FALSE} // 36
- , {doCheckVarDef, 255, 38,0, FALSE} // 37 term-var-ref
- , {doNOP, 132, 38,0, TRUE} // 38 expr-mod
- , {doUnaryOpStar, 42 /* * */, 43,0, TRUE} // 39
- , {doUnaryOpPlus, 43 /* + */, 43,0, TRUE} // 40
- , {doUnaryOpQuestion, 63 /* ? */, 43,0, TRUE} // 41
- , {doNOP, 255, 43,0, FALSE} // 42
- , {doExprCatOperator, 254, 29,0, FALSE} // 43 expr-cont
- , {doNOP, 132, 43,0, TRUE} // 44
- , {doExprCatOperator, 131, 29,0, FALSE} // 45
- , {doExprCatOperator, 91 /* [ */, 29,0, FALSE} // 46
- , {doExprCatOperator, 40 /* ( */, 29,0, FALSE} // 47
- , {doExprCatOperator, 36 /* $ */, 29,0, FALSE} // 48
- , {doExprCatOperator, 46 /* . */, 29,0, FALSE} // 49
- , {doExprCatOperator, 47 /* / */, 55,0, FALSE} // 50
- , {doExprCatOperator, 123 /* { */, 67,0, TRUE} // 51
- , {doExprOrOperator, 124 /* | */, 29,0, TRUE} // 52
- , {doExprRParen, 41 /* ) */, 255,0, TRUE} // 53
- , {doExprFinished, 255, 255,0, FALSE} // 54
- , {doSlash, 47 /* / */, 57,0, TRUE} // 55 look-ahead
- , {doNOP, 255, 103,0, FALSE} // 56
- , {doExprCatOperator, 254, 29,0, FALSE} // 57 expr-cont-no-slash
- , {doNOP, 132, 43,0, TRUE} // 58
- , {doExprCatOperator, 131, 29,0, FALSE} // 59
- , {doExprCatOperator, 91 /* [ */, 29,0, FALSE} // 60
- , {doExprCatOperator, 40 /* ( */, 29,0, FALSE} // 61
- , {doExprCatOperator, 36 /* $ */, 29,0, FALSE} // 62
- , {doExprCatOperator, 46 /* . */, 29,0, FALSE} // 63
- , {doExprOrOperator, 124 /* | */, 29,0, TRUE} // 64
- , {doExprRParen, 41 /* ) */, 255,0, TRUE} // 65
- , {doExprFinished, 255, 255,0, FALSE} // 66
- , {doNOP, 132, 67,0, TRUE} // 67 tag-open
- , {doStartTagValue, 128, 70,0, FALSE} // 68
- , {doTagExpectedError, 255, 103,0, FALSE} // 69
- , {doNOP, 132, 74,0, TRUE} // 70 tag-value
- , {doNOP, 125 /* } */, 74,0, FALSE} // 71
- , {doTagDigit, 128, 70,0, TRUE} // 72
- , {doTagExpectedError, 255, 103,0, FALSE} // 73
- , {doNOP, 132, 74,0, TRUE} // 74 tag-close
- , {doTagValue, 125 /* } */, 77,0, TRUE} // 75
- , {doTagExpectedError, 255, 103,0, FALSE} // 76
- , {doExprCatOperator, 254, 29,0, FALSE} // 77 expr-cont-no-tag
- , {doNOP, 132, 77,0, TRUE} // 78
- , {doExprCatOperator, 131, 29,0, FALSE} // 79
- , {doExprCatOperator, 91 /* [ */, 29,0, FALSE} // 80
- , {doExprCatOperator, 40 /* ( */, 29,0, FALSE} // 81
- , {doExprCatOperator, 36 /* $ */, 29,0, FALSE} // 82
- , {doExprCatOperator, 46 /* . */, 29,0, FALSE} // 83
- , {doExprCatOperator, 47 /* / */, 55,0, FALSE} // 84
- , {doExprOrOperator, 124 /* | */, 29,0, TRUE} // 85
- , {doExprRParen, 41 /* ) */, 255,0, TRUE} // 86
- , {doExprFinished, 255, 255,0, FALSE} // 87
- , {doStartVariableName, 36 /* $ */, 90,0, TRUE} // 88 scan-var-name
- , {doNOP, 255, 103,0, FALSE} // 89
- , {doNOP, 130, 92,0, TRUE} // 90 scan-var-start
- , {doVariableNameExpectedErr, 255, 103,0, FALSE} // 91
- , {doNOP, 129, 92,0, TRUE} // 92 scan-var-body
- , {doEndVariableName, 255, 255,0, FALSE} // 93
- , {doScanUnicodeSet, 91 /* [ */, 255,0, TRUE} // 94 scan-unicode-set
- , {doScanUnicodeSet, 112 /* p */, 255,0, TRUE} // 95
- , {doScanUnicodeSet, 80 /* P */, 255,0, TRUE} // 96
- , {doNOP, 255, 103,0, FALSE} // 97
- , {doNOP, 132, 98,0, TRUE} // 98 assign-or-rule
- , {doStartAssign, 61 /* = */, 29, 101, TRUE} // 99
- , {doNOP, 255, 37, 9, FALSE} // 100
- , {doEndAssign, 59 /* ; */, 1,0, TRUE} // 101 assign-end
- , {doRuleErrorAssignExpr, 255, 103,0, FALSE} // 102
- , {doExit, 255, 103,0, TRUE} // 103 errorDeath
+ {doNOP, 0, 0, 0, true}
+ , {doExprStart, 254, 29, 9, false} // 1 start
+ , {doNOP, 132, 1,0, true} // 2
+ , {doNoChain, 94 /* ^ */, 12, 9, true} // 3
+ , {doExprStart, 36 /* $ */, 88, 98, false} // 4
+ , {doNOP, 33 /* ! */, 19,0, true} // 5
+ , {doNOP, 59 /* ; */, 1,0, true} // 6
+ , {doNOP, 252, 0,0, false} // 7
+ , {doExprStart, 255, 29, 9, false} // 8
+ , {doEndOfRule, 59 /* ; */, 1,0, true} // 9 break-rule-end
+ , {doNOP, 132, 9,0, true} // 10
+ , {doRuleError, 255, 103,0, false} // 11
+ , {doExprStart, 254, 29,0, false} // 12 start-after-caret
+ , {doNOP, 132, 12,0, true} // 13
+ , {doRuleError, 94 /* ^ */, 103,0, false} // 14
+ , {doExprStart, 36 /* $ */, 88, 37, false} // 15
+ , {doRuleError, 59 /* ; */, 103,0, false} // 16
+ , {doRuleError, 252, 103,0, false} // 17
+ , {doExprStart, 255, 29,0, false} // 18
+ , {doNOP, 33 /* ! */, 21,0, true} // 19 rev-option
+ , {doReverseDir, 255, 28, 9, false} // 20
+ , {doOptionStart, 130, 23,0, true} // 21 option-scan1
+ , {doRuleError, 255, 103,0, false} // 22
+ , {doNOP, 129, 23,0, true} // 23 option-scan2
+ , {doOptionEnd, 255, 25,0, false} // 24
+ , {doNOP, 59 /* ; */, 1,0, true} // 25 option-scan3
+ , {doNOP, 132, 25,0, true} // 26
+ , {doRuleError, 255, 103,0, false} // 27
+ , {doExprStart, 255, 29, 9, false} // 28 reverse-rule
+ , {doRuleChar, 254, 38,0, true} // 29 term
+ , {doNOP, 132, 29,0, true} // 30
+ , {doRuleChar, 131, 38,0, true} // 31
+ , {doNOP, 91 /* [ */, 94, 38, false} // 32
+ , {doLParen, 40 /* ( */, 29, 38, true} // 33
+ , {doNOP, 36 /* $ */, 88, 37, false} // 34
+ , {doDotAny, 46 /* . */, 38,0, true} // 35
+ , {doRuleError, 255, 103,0, false} // 36
+ , {doCheckVarDef, 255, 38,0, false} // 37 term-var-ref
+ , {doNOP, 132, 38,0, true} // 38 expr-mod
+ , {doUnaryOpStar, 42 /* * */, 43,0, true} // 39
+ , {doUnaryOpPlus, 43 /* + */, 43,0, true} // 40
+ , {doUnaryOpQuestion, 63 /* ? */, 43,0, true} // 41
+ , {doNOP, 255, 43,0, false} // 42
+ , {doExprCatOperator, 254, 29,0, false} // 43 expr-cont
+ , {doNOP, 132, 43,0, true} // 44
+ , {doExprCatOperator, 131, 29,0, false} // 45
+ , {doExprCatOperator, 91 /* [ */, 29,0, false} // 46
+ , {doExprCatOperator, 40 /* ( */, 29,0, false} // 47
+ , {doExprCatOperator, 36 /* $ */, 29,0, false} // 48
+ , {doExprCatOperator, 46 /* . */, 29,0, false} // 49
+ , {doExprCatOperator, 47 /* / */, 55,0, false} // 50
+ , {doExprCatOperator, 123 /* { */, 67,0, true} // 51
+ , {doExprOrOperator, 124 /* | */, 29,0, true} // 52
+ , {doExprRParen, 41 /* ) */, 255,0, true} // 53
+ , {doExprFinished, 255, 255,0, false} // 54
+ , {doSlash, 47 /* / */, 57,0, true} // 55 look-ahead
+ , {doNOP, 255, 103,0, false} // 56
+ , {doExprCatOperator, 254, 29,0, false} // 57 expr-cont-no-slash
+ , {doNOP, 132, 43,0, true} // 58
+ , {doExprCatOperator, 131, 29,0, false} // 59
+ , {doExprCatOperator, 91 /* [ */, 29,0, false} // 60
+ , {doExprCatOperator, 40 /* ( */, 29,0, false} // 61
+ , {doExprCatOperator, 36 /* $ */, 29,0, false} // 62
+ , {doExprCatOperator, 46 /* . */, 29,0, false} // 63
+ , {doExprOrOperator, 124 /* | */, 29,0, true} // 64
+ , {doExprRParen, 41 /* ) */, 255,0, true} // 65
+ , {doExprFinished, 255, 255,0, false} // 66
+ , {doNOP, 132, 67,0, true} // 67 tag-open
+ , {doStartTagValue, 128, 70,0, false} // 68
+ , {doTagExpectedError, 255, 103,0, false} // 69
+ , {doNOP, 132, 74,0, true} // 70 tag-value
+ , {doNOP, 125 /* } */, 74,0, false} // 71
+ , {doTagDigit, 128, 70,0, true} // 72
+ , {doTagExpectedError, 255, 103,0, false} // 73
+ , {doNOP, 132, 74,0, true} // 74 tag-close
+ , {doTagValue, 125 /* } */, 77,0, true} // 75
+ , {doTagExpectedError, 255, 103,0, false} // 76
+ , {doExprCatOperator, 254, 29,0, false} // 77 expr-cont-no-tag
+ , {doNOP, 132, 77,0, true} // 78
+ , {doExprCatOperator, 131, 29,0, false} // 79
+ , {doExprCatOperator, 91 /* [ */, 29,0, false} // 80
+ , {doExprCatOperator, 40 /* ( */, 29,0, false} // 81
+ , {doExprCatOperator, 36 /* $ */, 29,0, false} // 82
+ , {doExprCatOperator, 46 /* . */, 29,0, false} // 83
+ , {doExprCatOperator, 47 /* / */, 55,0, false} // 84
+ , {doExprOrOperator, 124 /* | */, 29,0, true} // 85
+ , {doExprRParen, 41 /* ) */, 255,0, true} // 86
+ , {doExprFinished, 255, 255,0, false} // 87
+ , {doStartVariableName, 36 /* $ */, 90,0, true} // 88 scan-var-name
+ , {doNOP, 255, 103,0, false} // 89
+ , {doNOP, 130, 92,0, true} // 90 scan-var-start
+ , {doVariableNameExpectedErr, 255, 103,0, false} // 91
+ , {doNOP, 129, 92,0, true} // 92 scan-var-body
+ , {doEndVariableName, 255, 255,0, false} // 93
+ , {doScanUnicodeSet, 91 /* [ */, 255,0, true} // 94 scan-unicode-set
+ , {doScanUnicodeSet, 112 /* p */, 255,0, true} // 95
+ , {doScanUnicodeSet, 80 /* P */, 255,0, true} // 96
+ , {doNOP, 255, 103,0, false} // 97
+ , {doNOP, 132, 98,0, true} // 98 assign-or-rule
+ , {doStartAssign, 61 /* = */, 29, 101, true} // 99
+ , {doNOP, 255, 37, 9, false} // 100
+ , {doEndAssign, 59 /* ; */, 1,0, true} // 101 assign-end
+ , {doRuleErrorAssignExpr, 255, 103,0, false} // 102
+ , {doExit, 255, 103,0, true} // 103 errorDeath
};
#ifdef RBBI_DEBUG
static const char * const RBBIRuleStateNames[] = { 0,
diff --git a/thirdparty/icu4c/common/rbbiscan.cpp b/thirdparty/icu4c/common/rbbiscan.cpp
index 1304f7e37e..92cf77664f 100644
--- a/thirdparty/icu4c/common/rbbiscan.cpp
+++ b/thirdparty/icu4c/common/rbbiscan.cpp
@@ -92,7 +92,7 @@ RBBIRuleScanner::RBBIRuleScanner(RBBIRuleBuilder *rb)
fRB = rb;
fScanIndex = 0;
fNextIndex = 0;
- fQuoteMode = FALSE;
+ fQuoteMode = false;
fLineNum = 1;
fCharNum = 0;
fLastChar = 0;
@@ -103,9 +103,9 @@ RBBIRuleScanner::RBBIRuleScanner(RBBIRuleBuilder *rb)
fNodeStack[0] = NULL;
fNodeStackPtr = 0;
- fReverseRule = FALSE;
- fLookAheadRule = FALSE;
- fNoChainInRule = FALSE;
+ fReverseRule = false;
+ fLookAheadRule = false;
+ fNoChainInRule = false;
fSymbolTable = NULL;
fSetTable = NULL;
@@ -201,7 +201,7 @@ UBool RBBIRuleScanner::doParseActions(int32_t action)
{
RBBINode *n = NULL;
- UBool returnVal = TRUE;
+ UBool returnVal = true;
switch (action) {
@@ -213,7 +213,7 @@ UBool RBBIRuleScanner::doParseActions(int32_t action)
case doNoChain:
// Scanned a '^' while on the rule start state.
- fNoChainInRule = TRUE;
+ fNoChainInRule = true;
break;
@@ -345,7 +345,7 @@ UBool RBBIRuleScanner::doParseActions(int32_t action)
catNode->fRightChild = endNode;
fNodeStack[fNodeStackPtr] = catNode;
endNode->fVal = fRuleNum;
- endNode->fLookAheadEnd = TRUE;
+ endNode->fLookAheadEnd = true;
thisRule = catNode;
// TODO: Disable chaining out of look-ahead (hard break) rules.
@@ -354,13 +354,13 @@ UBool RBBIRuleScanner::doParseActions(int32_t action)
}
// Mark this node as being the root of a rule.
- thisRule->fRuleRoot = TRUE;
+ thisRule->fRuleRoot = true;
// Flag if chaining into this rule is wanted.
//
if (fRB->fChainRules && // If rule chaining is enabled globally via !!chain
!fNoChainInRule) { // and no '^' chain-in inhibit was on this rule
- thisRule->fChainIn = TRUE;
+ thisRule->fChainIn = true;
}
@@ -398,9 +398,9 @@ UBool RBBIRuleScanner::doParseActions(int32_t action)
// Just move its parse tree from the stack to *destRules.
*destRules = fNodeStack[fNodeStackPtr];
}
- fReverseRule = FALSE; // in preparation for the next rule.
- fLookAheadRule = FALSE;
- fNoChainInRule = FALSE;
+ fReverseRule = false; // in preparation for the next rule.
+ fLookAheadRule = false;
+ fNoChainInRule = false;
fNodeStackPtr = 0;
}
break;
@@ -408,7 +408,7 @@ UBool RBBIRuleScanner::doParseActions(int32_t action)
case doRuleError:
error(U_BRK_RULE_SYNTAX);
- returnVal = FALSE;
+ returnVal = false;
break;
@@ -484,7 +484,7 @@ UBool RBBIRuleScanner::doParseActions(int32_t action)
if (U_FAILURE(*fRB->fStatus)) {
break;
}
- findSetFor(UnicodeString(TRUE, kAny, 3), n);
+ findSetFor(UnicodeString(true, kAny, 3), n);
n->fFirstPos = fScanIndex;
n->fLastPos = fNextIndex;
fRB->fRules.extractBetween(n->fFirstPos, n->fLastPos, n->fText);
@@ -501,7 +501,7 @@ UBool RBBIRuleScanner::doParseActions(int32_t action)
n->fFirstPos = fScanIndex;
n->fLastPos = fNextIndex;
fRB->fRules.extractBetween(n->fFirstPos, n->fLastPos, n->fText);
- fLookAheadRule = TRUE;
+ fLookAheadRule = true;
break;
@@ -534,7 +534,7 @@ UBool RBBIRuleScanner::doParseActions(int32_t action)
case doTagExpectedError:
error(U_BRK_MALFORMED_RULE_TAG);
- returnVal = FALSE;
+ returnVal = false;
break;
case doOptionStart:
@@ -546,9 +546,9 @@ UBool RBBIRuleScanner::doParseActions(int32_t action)
{
UnicodeString opt(fRB->fRules, fOptionStart, fScanIndex-fOptionStart);
if (opt == UNICODE_STRING("chain", 5)) {
- fRB->fChainRules = TRUE;
+ fRB->fChainRules = true;
} else if (opt == UNICODE_STRING("LBCMNoChain", 11)) {
- fRB->fLBCMNoChain = TRUE;
+ fRB->fLBCMNoChain = true;
} else if (opt == UNICODE_STRING("forward", 7)) {
fRB->fDefaultTree = &fRB->fForwardTree;
} else if (opt == UNICODE_STRING("reverse", 7)) {
@@ -558,7 +558,7 @@ UBool RBBIRuleScanner::doParseActions(int32_t action)
} else if (opt == UNICODE_STRING("safe_reverse", 12)) {
fRB->fDefaultTree = &fRB->fSafeRevTree;
} else if (opt == UNICODE_STRING("lookAheadHardBreak", 18)) {
- fRB->fLookAheadHardBreak = TRUE;
+ fRB->fLookAheadHardBreak = true;
} else if (opt == UNICODE_STRING("quoted_literals_only", 20)) {
fRuleSets[kRuleSet_rule_char-128].clear();
} else if (opt == UNICODE_STRING("unquoted_literals", 17)) {
@@ -570,7 +570,7 @@ UBool RBBIRuleScanner::doParseActions(int32_t action)
break;
case doReverseDir:
- fReverseRule = TRUE;
+ fReverseRule = true;
break;
case doStartVariableName:
@@ -600,7 +600,7 @@ UBool RBBIRuleScanner::doParseActions(int32_t action)
n = fNodeStack[fNodeStackPtr];
if (n->fLeftChild == NULL) {
error(U_BRK_UNDEFINED_VARIABLE);
- returnVal = FALSE;
+ returnVal = false;
}
break;
@@ -609,11 +609,11 @@ UBool RBBIRuleScanner::doParseActions(int32_t action)
case doRuleErrorAssignExpr:
error(U_BRK_ASSIGN_ERROR);
- returnVal = FALSE;
+ returnVal = false;
break;
case doExit:
- returnVal = FALSE;
+ returnVal = false;
break;
case doScanUnicodeSet:
@@ -622,7 +622,7 @@ UBool RBBIRuleScanner::doParseActions(int32_t action)
default:
error(U_BRK_INTERNAL_ERROR);
- returnVal = FALSE;
+ returnVal = false;
break;
}
return returnVal && U_SUCCESS(*fRB->fStatus);
@@ -872,7 +872,7 @@ UChar32 RBBIRuleScanner::nextCharLL() {
fCharNum=0;
if (fQuoteMode) {
error(U_BRK_NEW_LINE_IN_QUOTED_STRING);
- fQuoteMode = FALSE;
+ fQuoteMode = false;
}
}
else {
@@ -901,7 +901,7 @@ void RBBIRuleScanner::nextChar(RBBIRuleChar &c) {
fScanIndex = fNextIndex;
c.fChar = nextCharLL();
- c.fEscaped = FALSE;
+ c.fEscaped = false;
//
// check for '' sequence.
@@ -910,7 +910,7 @@ void RBBIRuleScanner::nextChar(RBBIRuleChar &c) {
if (c.fChar == chApos) {
if (fRB->fRules.char32At(fNextIndex) == chApos) {
c.fChar = nextCharLL(); // get nextChar officially so character counts
- c.fEscaped = TRUE; // stay correct.
+ c.fEscaped = true; // stay correct.
}
else
{
@@ -918,18 +918,18 @@ void RBBIRuleScanner::nextChar(RBBIRuleChar &c) {
// Toggle quoting mode.
// Return either '(' or ')', because quotes cause a grouping of the quoted text.
fQuoteMode = !fQuoteMode;
- if (fQuoteMode == TRUE) {
+ if (fQuoteMode == true) {
c.fChar = chLParen;
} else {
c.fChar = chRParen;
}
- c.fEscaped = FALSE; // The paren that we return is not escaped.
+ c.fEscaped = false; // The paren that we return is not escaped.
return;
}
}
if (fQuoteMode) {
- c.fEscaped = TRUE;
+ c.fEscaped = true;
}
else
{
@@ -963,7 +963,7 @@ void RBBIRuleScanner::nextChar(RBBIRuleChar &c) {
// Use UnicodeString::unescapeAt() to handle them.
//
if (c.fChar == chBackSlash) {
- c.fEscaped = TRUE;
+ c.fEscaped = true;
int32_t startX = fNextIndex;
c.fChar = fRB->fRules.unescapeAt(fNextIndex);
if (fNextIndex == startX) {
@@ -1032,7 +1032,7 @@ void RBBIRuleScanner::parse() {
#ifdef RBBI_DEBUG
if (fRB->fDebugEnv && uprv_strstr(fRB->fDebugEnv, "scan")) { RBBIDebugPrintf("."); fflush(stdout);}
#endif
- if (tableEl->fCharClass < 127 && fC.fEscaped == FALSE && tableEl->fCharClass == fC.fChar) {
+ if (tableEl->fCharClass < 127 && fC.fEscaped == false && tableEl->fCharClass == fC.fChar) {
// Table row specified an individual character, not a set, and
// the input character is not escaped, and
// the input character matched it.
@@ -1057,7 +1057,7 @@ void RBBIRuleScanner::parse() {
}
if (tableEl->fCharClass >= 128 && tableEl->fCharClass < 240 && // Table specs a char class &&
- fC.fEscaped == FALSE && // char is not escaped &&
+ fC.fEscaped == false && // char is not escaped &&
fC.fChar != (UChar32)-1) { // char is not EOF
U_ASSERT((tableEl->fCharClass-128) < UPRV_LENGTHOF(fRuleSets));
if (fRuleSets[tableEl->fCharClass-128].contains(fC.fChar)) {
@@ -1076,7 +1076,7 @@ void RBBIRuleScanner::parse() {
// We've found the row of the state table that matches the current input
// character from the rules string.
// Perform any action specified by this row in the state table.
- if (doParseActions((int32_t)tableEl->fAction) == FALSE) {
+ if (doParseActions((int32_t)tableEl->fAction) == false) {
// Break out of the state machine loop if the
// the action signalled some kind of error, or
// the action was to exit, occurs on normal end-of-rules-input.
@@ -1133,13 +1133,13 @@ void RBBIRuleScanner::parse() {
if (fRB->fDebugEnv && uprv_strstr(fRB->fDebugEnv, "symbols")) {fSymbolTable->rbbiSymtablePrint();}
if (fRB->fDebugEnv && uprv_strstr(fRB->fDebugEnv, "ptree")) {
RBBIDebugPrintf("Completed Forward Rules Parse Tree...\n");
- RBBINode::printTree(fRB->fForwardTree, TRUE);
+ RBBINode::printTree(fRB->fForwardTree, true);
RBBIDebugPrintf("\nCompleted Reverse Rules Parse Tree...\n");
- RBBINode::printTree(fRB->fReverseTree, TRUE);
+ RBBINode::printTree(fRB->fReverseTree, true);
RBBIDebugPrintf("\nCompleted Safe Point Forward Rules Parse Tree...\n");
- RBBINode::printTree(fRB->fSafeFwdTree, TRUE);
+ RBBINode::printTree(fRB->fSafeFwdTree, true);
RBBIDebugPrintf("\nCompleted Safe Point Reverse Rules Parse Tree...\n");
- RBBINode::printTree(fRB->fSafeRevTree, TRUE);
+ RBBINode::printTree(fRB->fSafeRevTree, true);
}
#endif
}
@@ -1154,7 +1154,7 @@ void RBBIRuleScanner::parse() {
void RBBIRuleScanner::printNodeStack(const char *title) {
int i;
RBBIDebugPrintf("%s. Dumping node stack...\n", title);
- for (i=fNodeStackPtr; i>0; i--) {RBBINode::printTree(fNodeStack[i], TRUE);}
+ for (i=fNodeStackPtr; i>0; i--) {RBBINode::printTree(fNodeStack[i], true);}
}
#endif
diff --git a/thirdparty/icu4c/common/rbbiscan.h b/thirdparty/icu4c/common/rbbiscan.h
index 33fdf03354..bf3203880b 100644
--- a/thirdparty/icu4c/common/rbbiscan.h
+++ b/thirdparty/icu4c/common/rbbiscan.h
@@ -158,8 +158,8 @@ private:
UnicodeSet *gRuleSet_name_char;
UnicodeSet *gRuleSet_name_start_char;
- RBBIRuleScanner(const RBBIRuleScanner &other); // forbid copying of this class
- RBBIRuleScanner &operator=(const RBBIRuleScanner &other); // forbid copying of this class
+ RBBIRuleScanner(const RBBIRuleScanner &other) = delete; // forbid copying of this class
+ RBBIRuleScanner &operator=(const RBBIRuleScanner &other) = delete; // forbid copying of this class
};
U_NAMESPACE_END
diff --git a/thirdparty/icu4c/common/rbbisetb.cpp b/thirdparty/icu4c/common/rbbisetb.cpp
index 29faeb8c45..11c47156d6 100644
--- a/thirdparty/icu4c/common/rbbisetb.cpp
+++ b/thirdparty/icu4c/common/rbbisetb.cpp
@@ -261,7 +261,7 @@ void RBBISetBuilder::buildRanges() {
}
if (inputSet->contains(bofString)) {
addValToSet(usetNode, 2);
- fSawBOF = TRUE;
+ fSawBOF = true;
}
}
@@ -569,7 +569,7 @@ void RBBISetBuilder::printSets() {
RBBI_DEBUG_printUnicodeString(usetNode->fText);
RBBIDebugPrintf("\n");
if (usetNode->fLeftChild != NULL) {
- RBBINode::printTree(usetNode->fLeftChild, TRUE);
+ RBBINode::printTree(usetNode->fLeftChild, true);
}
}
RBBIDebugPrintf("\n");
diff --git a/thirdparty/icu4c/common/rbbisetb.h b/thirdparty/icu4c/common/rbbisetb.h
index 6409a4ea57..cd09d3317a 100644
--- a/thirdparty/icu4c/common/rbbisetb.h
+++ b/thirdparty/icu4c/common/rbbisetb.h
@@ -134,8 +134,8 @@ private:
UBool fSawBOF;
- RBBISetBuilder(const RBBISetBuilder &other); // forbid copying of this class
- RBBISetBuilder &operator=(const RBBISetBuilder &other); // forbid copying of this class
+ RBBISetBuilder(const RBBISetBuilder &other) = delete; // forbid copying of this class
+ RBBISetBuilder &operator=(const RBBISetBuilder &other) = delete; // forbid copying of this class
};
diff --git a/thirdparty/icu4c/common/rbbistbl.cpp b/thirdparty/icu4c/common/rbbistbl.cpp
index 627ec1827c..554aeb793f 100644
--- a/thirdparty/icu4c/common/rbbistbl.cpp
+++ b/thirdparty/icu4c/common/rbbistbl.cpp
@@ -254,8 +254,8 @@ void RBBISymbolTable::rbbiSymtablePrint() const {
}
RBBISymbolTableEntry *s = (RBBISymbolTableEntry *)e->value.pointer;
RBBIDebugPrintf("%s\n", CStr(s->key)());
- RBBINode::printTree(s->val, TRUE);
- RBBINode::printTree(s->val->fLeftChild, FALSE);
+ RBBINode::printTree(s->val, true);
+ RBBINode::printTree(s->val->fLeftChild, false);
RBBIDebugPrintf("\n");
}
}
diff --git a/thirdparty/icu4c/common/rbbitblb.cpp b/thirdparty/icu4c/common/rbbitblb.cpp
index a495f17a87..0e3ec7999f 100644
--- a/thirdparty/icu4c/common/rbbitblb.cpp
+++ b/thirdparty/icu4c/common/rbbitblb.cpp
@@ -85,7 +85,7 @@ void RBBITableBuilder::buildForwardTable() {
#ifdef RBBI_DEBUG
if (fRB->fDebugEnv && uprv_strstr(fRB->fDebugEnv, "ftree")) {
RBBIDebugPuts("\nParse tree after flattening variable references.");
- RBBINode::printTree(fTree, TRUE);
+ RBBINode::printTree(fTree, true);
}
#endif
@@ -143,7 +143,7 @@ void RBBITableBuilder::buildForwardTable() {
#ifdef RBBI_DEBUG
if (fRB->fDebugEnv && uprv_strstr(fRB->fDebugEnv, "stree")) {
RBBIDebugPuts("\nParse tree after flattening Unicode Set references.");
- RBBINode::printTree(fTree, TRUE);
+ RBBINode::printTree(fTree, true);
}
#endif
@@ -209,14 +209,14 @@ void RBBITableBuilder::calcNullable(RBBINode *n) {
if (n->fType == RBBINode::setRef ||
n->fType == RBBINode::endMark ) {
// These are non-empty leaf node types.
- n->fNullable = FALSE;
+ n->fNullable = false;
return;
}
if (n->fType == RBBINode::lookAhead || n->fType == RBBINode::tag) {
// Lookahead marker node. It's a leaf, so no recursion on children.
// It's nullable because it does not match any literal text from the input stream.
- n->fNullable = TRUE;
+ n->fNullable = true;
return;
}
@@ -234,10 +234,10 @@ void RBBITableBuilder::calcNullable(RBBINode *n) {
n->fNullable = n->fLeftChild->fNullable && n->fRightChild->fNullable;
}
else if (n->fType == RBBINode::opStar || n->fType == RBBINode::opQuestion) {
- n->fNullable = TRUE;
+ n->fNullable = true;
}
else {
- n->fNullable = FALSE;
+ n->fNullable = false;
}
}
@@ -618,7 +618,7 @@ void RBBITableBuilder::buildStateTable() {
for (tx=1; tx<fDStates->size(); tx++) {
RBBIStateDescriptor *temp;
temp = (RBBIStateDescriptor *)fDStates->elementAt(tx);
- if (temp->fMarked == FALSE) {
+ if (temp->fMarked == false) {
T = temp;
break;
}
@@ -628,7 +628,7 @@ void RBBITableBuilder::buildStateTable() {
}
// mark T;
- T->fMarked = TRUE;
+ T->fMarked = true;
// for each input symbol a do begin
int32_t a;
@@ -655,7 +655,7 @@ void RBBITableBuilder::buildStateTable() {
// if U is not empty and not in DStates then
int32_t ux = 0;
- UBool UinDstates = FALSE;
+ UBool UinDstates = false;
if (U != NULL) {
U_ASSERT(U->size() > 0);
int ix;
@@ -666,7 +666,7 @@ void RBBITableBuilder::buildStateTable() {
delete U;
U = temp2->fPositions;
ux = ix;
- UinDstates = TRUE;
+ UinDstates = true;
break;
}
}
@@ -1131,7 +1131,7 @@ void RBBITableBuilder::printPosSets(RBBINode *n) {
printf("\n");
RBBINode::printNodeHeader();
RBBINode::printNode(n);
- RBBIDebugPrintf(" Nullable: %s\n", n->fNullable?"TRUE":"FALSE");
+ RBBIDebugPrintf(" Nullable: %s\n", n->fNullable?"true":"false");
RBBIDebugPrintf(" firstpos: ");
printSet(n->fFirstPosSet);
@@ -1773,7 +1773,7 @@ void RBBITableBuilder::printRuleStatusTable() {
//-----------------------------------------------------------------------------
RBBIStateDescriptor::RBBIStateDescriptor(int lastInputSymbol, UErrorCode *fStatus) {
- fMarked = FALSE;
+ fMarked = false;
fAccepting = 0;
fLookAhead = 0;
fTagsIdx = 0;
diff --git a/thirdparty/icu4c/common/rbbitblb.h b/thirdparty/icu4c/common/rbbitblb.h
index 38607a819a..2ac66da11f 100644
--- a/thirdparty/icu4c/common/rbbitblb.h
+++ b/thirdparty/icu4c/common/rbbitblb.h
@@ -219,8 +219,8 @@ public:
~RBBIStateDescriptor();
private:
- RBBIStateDescriptor(const RBBIStateDescriptor &other); // forbid copying of this class
- RBBIStateDescriptor &operator=(const RBBIStateDescriptor &other); // forbid copying of this class
+ RBBIStateDescriptor(const RBBIStateDescriptor &other) = delete; // forbid copying of this class
+ RBBIStateDescriptor &operator=(const RBBIStateDescriptor &other) = delete; // forbid copying of this class
};
diff --git a/thirdparty/icu4c/common/resbund.cpp b/thirdparty/icu4c/common/resbund.cpp
index 47c0fe1c6e..8591a625f9 100644
--- a/thirdparty/icu4c/common/resbund.cpp
+++ b/thirdparty/icu4c/common/resbund.cpp
@@ -254,7 +254,7 @@ ResourceBundle::clone() const {
UnicodeString ResourceBundle::getString(UErrorCode& status) const {
int32_t len = 0;
const UChar *r = ures_getString(fResource, &len, &status);
- return UnicodeString(TRUE, r, len);
+ return UnicodeString(true, r, len);
}
const uint8_t *ResourceBundle::getBinary(int32_t& len, UErrorCode& status) const {
@@ -312,13 +312,13 @@ ResourceBundle ResourceBundle::getNext(UErrorCode& status) {
UnicodeString ResourceBundle::getNextString(UErrorCode& status) {
int32_t len = 0;
const UChar* r = ures_getNextString(fResource, &len, 0, &status);
- return UnicodeString(TRUE, r, len);
+ return UnicodeString(true, r, len);
}
UnicodeString ResourceBundle::getNextString(const char ** key, UErrorCode& status) {
int32_t len = 0;
const UChar* r = ures_getNextString(fResource, &len, key, &status);
- return UnicodeString(TRUE, r, len);
+ return UnicodeString(true, r, len);
}
ResourceBundle ResourceBundle::get(int32_t indexR, UErrorCode& status) const {
@@ -336,7 +336,7 @@ ResourceBundle ResourceBundle::get(int32_t indexR, UErrorCode& status) const {
UnicodeString ResourceBundle::getStringEx(int32_t indexS, UErrorCode& status) const {
int32_t len = 0;
const UChar* r = ures_getStringByIndex(fResource, indexS, &len, &status);
- return UnicodeString(TRUE, r, len);
+ return UnicodeString(true, r, len);
}
ResourceBundle ResourceBundle::get(const char* key, UErrorCode& status) const {
@@ -364,7 +364,7 @@ ResourceBundle ResourceBundle::getWithFallback(const char* key, UErrorCode& stat
UnicodeString ResourceBundle::getStringEx(const char* key, UErrorCode& status) const {
int32_t len = 0;
const UChar* r = ures_getStringByKey(fResource, key, &len, &status);
- return UnicodeString(TRUE, r, len);
+ return UnicodeString(true, r, len);
}
const char*
diff --git a/thirdparty/icu4c/common/resource.h b/thirdparty/icu4c/common/resource.h
index 48f5b9fa6e..1483f7d6bc 100644
--- a/thirdparty/icu4c/common/resource.h
+++ b/thirdparty/icu4c/common/resource.h
@@ -286,8 +286,8 @@ public:
UErrorCode &errorCode) = 0;
private:
- ResourceSink(const ResourceSink &); // no copy constructor
- ResourceSink &operator=(const ResourceSink &); // no assignment operator
+ ResourceSink(const ResourceSink &) = delete; // no copy constructor
+ ResourceSink &operator=(const ResourceSink &) = delete; // no assignment operator
};
U_NAMESPACE_END
diff --git a/thirdparty/icu4c/common/ruleiter.cpp b/thirdparty/icu4c/common/ruleiter.cpp
index 41eea23c0d..33ffd3d833 100644
--- a/thirdparty/icu4c/common/ruleiter.cpp
+++ b/thirdparty/icu4c/common/ruleiter.cpp
@@ -39,7 +39,7 @@ UChar32 RuleCharacterIterator::next(int32_t options, UBool& isEscaped, UErrorCod
if (U_FAILURE(ec)) return DONE;
UChar32 c = DONE;
- isEscaped = FALSE;
+ isEscaped = false;
for (;;) {
c = _current();
@@ -75,7 +75,7 @@ UChar32 RuleCharacterIterator::next(int32_t options, UBool& isEscaped, UErrorCod
int32_t offset = 0;
c = lookahead(tempEscape, MAX_U_NOTATION_LEN).unescapeAt(offset);
jumpahead(offset);
- isEscaped = TRUE;
+ isEscaped = true;
if (c < 0) {
ec = U_MALFORMED_UNICODE_ESCAPE;
return DONE;
diff --git a/thirdparty/icu4c/common/ruleiter.h b/thirdparty/icu4c/common/ruleiter.h
index 09af129784..41731407da 100644
--- a/thirdparty/icu4c/common/ruleiter.h
+++ b/thirdparty/icu4c/common/ruleiter.h
@@ -66,28 +66,28 @@ public:
/**
* Value returned when there are no more characters to iterate.
*/
- enum { DONE = -1 };
+ static constexpr int32_t DONE = -1;
/**
* Bitmask option to enable parsing of variable names. If (options &
* PARSE_VARIABLES) != 0, then an embedded variable will be expanded to
* its value. Variables are parsed using the SymbolTable API.
*/
- enum { PARSE_VARIABLES = 1 };
+ static constexpr int32_t PARSE_VARIABLES = 1;
/**
* Bitmask option to enable parsing of escape sequences. If (options &
* PARSE_ESCAPES) != 0, then an embedded escape sequence will be expanded
* to its value. Escapes are parsed using Utility.unescapeAt().
*/
- enum { PARSE_ESCAPES = 2 };
+ static constexpr int32_t PARSE_ESCAPES = 2;
/**
* Bitmask option to enable skipping of whitespace. If (options &
* SKIP_WHITESPACE) != 0, then Pattern_White_Space characters will be silently
* skipped, as if they were not present in the input.
*/
- enum { SKIP_WHITESPACE = 4 };
+ static constexpr int32_t SKIP_WHITESPACE = 4;
/**
* Constructs an iterator over the given text, starting at the given
diff --git a/thirdparty/icu4c/common/serv.cpp b/thirdparty/icu4c/common/serv.cpp
index c26dbca1a9..9d8c04149c 100644
--- a/thirdparty/icu4c/common/serv.cpp
+++ b/thirdparty/icu4c/common/serv.cpp
@@ -64,7 +64,7 @@ ICUServiceKey::currentDescriptor(UnicodeString& result) const
UBool
ICUServiceKey::fallback()
{
- return FALSE;
+ return false;
}
UBool
@@ -249,7 +249,7 @@ public:
}
/**
- * Return TRUE if there is at least one reference to this and the
+ * Return true if there is at least one reference to this and the
* resource has not been released.
*/
UBool isShared() const {
@@ -454,11 +454,11 @@ ICUService::getKey(ICUServiceKey& key, UnicodeString* actualReturn, const ICUSer
UnicodeString currentDescriptor;
LocalPointer<UVector> cacheDescriptorList;
- UBool putInCache = FALSE;
+ UBool putInCache = false;
int32_t startIndex = 0;
int32_t limit = factories->size();
- UBool cacheResult = TRUE;
+ UBool cacheResult = true;
if (factory != NULL) {
for (int32_t i = 0; i < limit; ++i) {
@@ -472,7 +472,7 @@ ICUService::getKey(ICUServiceKey& key, UnicodeString* actualReturn, const ICUSer
status = U_ILLEGAL_ARGUMENT_ERROR;
return NULL;
}
- cacheResult = FALSE;
+ cacheResult = false;
}
do {
@@ -486,7 +486,7 @@ ICUService::getKey(ICUServiceKey& key, UnicodeString* actualReturn, const ICUSer
// first test of cache failed, so we'll have to update
// the cache if we eventually succeed-- that is, if we're
// going to update the cache at all.
- putInCache = TRUE;
+ putInCache = true;
int32_t index = startIndex;
while (index < limit) {
@@ -796,7 +796,7 @@ ICUService::getDisplayNames(UVector& result,
URegistryKey
ICUService::registerInstance(UObject* objToAdopt, const UnicodeString& id, UErrorCode& status)
{
- return registerInstance(objToAdopt, id, TRUE, status);
+ return registerInstance(objToAdopt, id, true, status);
}
URegistryKey
@@ -864,13 +864,13 @@ UBool
ICUService::unregister(URegistryKey rkey, UErrorCode& status)
{
ICUServiceFactory *factory = (ICUServiceFactory*)rkey;
- UBool result = FALSE;
+ UBool result = false;
if (factory != NULL && factories != NULL) {
Mutex mutex(&lock);
if (factories->removeElement(factory)) {
clearCaches();
- result = TRUE;
+ result = true;
} else {
status = U_ILLEGAL_ARGUMENT_ERROR;
delete factory;
diff --git a/thirdparty/icu4c/common/servlk.cpp b/thirdparty/icu4c/common/servlk.cpp
index 538982ca36..7021806659 100644
--- a/thirdparty/icu4c/common/servlk.cpp
+++ b/thirdparty/icu4c/common/servlk.cpp
@@ -126,24 +126,24 @@ LocaleKey::fallback() {
int x = _currentID.lastIndexOf(UNDERSCORE_CHAR);
if (x != -1) {
_currentID.remove(x); // truncate current or fallback, whichever we're pointing to
- return TRUE;
+ return true;
}
if (!_fallbackID.isBogus()) {
_currentID = _fallbackID;
_fallbackID.setToBogus();
- return TRUE;
+ return true;
}
if (_currentID.length() > 0) {
_currentID.remove(0); // completely truncate
- return TRUE;
+ return true;
}
_currentID.setToBogus();
}
- return FALSE;
+ return false;
}
UBool
diff --git a/thirdparty/icu4c/common/servlkf.cpp b/thirdparty/icu4c/common/servlkf.cpp
index 84f2347cdd..7ccb0c72aa 100644
--- a/thirdparty/icu4c/common/servlkf.cpp
+++ b/thirdparty/icu4c/common/servlkf.cpp
@@ -65,7 +65,7 @@ LocaleKeyFactory::handlesKey(const ICUServiceKey& key, UErrorCode& status) const
key.currentID(id);
return supported->get(id) != NULL;
}
- return FALSE;
+ return false;
}
void
diff --git a/thirdparty/icu4c/common/servls.cpp b/thirdparty/icu4c/common/servls.cpp
index 98f0a8a12b..19481122ef 100644
--- a/thirdparty/icu4c/common/servls.cpp
+++ b/thirdparty/icu4c/common/servls.cpp
@@ -215,11 +215,11 @@ public:
UBool upToDate(UErrorCode& status) const {
if (U_SUCCESS(status)) {
if (_timestamp == _service->getTimestamp()) {
- return TRUE;
+ return true;
}
status = U_ENUM_OUT_OF_SYNC_ERROR;
}
- return FALSE;
+ return false;
}
virtual int32_t count(UErrorCode& status) const override {
diff --git a/thirdparty/icu4c/common/sharedobject.h b/thirdparty/icu4c/common/sharedobject.h
index 6ccfb27b01..6298662bba 100644
--- a/thirdparty/icu4c/common/sharedobject.h
+++ b/thirdparty/icu4c/common/sharedobject.h
@@ -38,8 +38,8 @@ public:
virtual ~UnifiedCacheBase();
private:
- UnifiedCacheBase(const UnifiedCacheBase &);
- UnifiedCacheBase &operator=(const UnifiedCacheBase &);
+ UnifiedCacheBase(const UnifiedCacheBase &) = delete;
+ UnifiedCacheBase &operator=(const UnifiedCacheBase &) = delete;
};
/**
diff --git a/thirdparty/icu4c/common/simpleformatter.cpp b/thirdparty/icu4c/common/simpleformatter.cpp
index f7f7aead61..01d3024cfc 100644
--- a/thirdparty/icu4c/common/simpleformatter.cpp
+++ b/thirdparty/icu4c/common/simpleformatter.cpp
@@ -65,7 +65,7 @@ UBool SimpleFormatter::applyPatternMinMaxArguments(
int32_t min, int32_t max,
UErrorCode &errorCode) {
if (U_FAILURE(errorCode)) {
- return FALSE;
+ return false;
}
// Parse consistent with MessagePattern, but
// - support only simple numbered arguments
@@ -76,7 +76,7 @@ UBool SimpleFormatter::applyPatternMinMaxArguments(
compiledPattern.setTo((UChar)0);
int32_t textLength = 0;
int32_t maxArg = -1;
- UBool inQuote = FALSE;
+ UBool inQuote = false;
for (int32_t i = 0; i < patternLength;) {
UChar c = patternBuffer[i++];
if (c == APOS) {
@@ -85,12 +85,12 @@ UBool SimpleFormatter::applyPatternMinMaxArguments(
++i;
} else if (inQuote) {
// skip the quote-ending apostrophe
- inQuote = FALSE;
+ inQuote = false;
continue;
} else if (c == OPEN_BRACE || c == CLOSE_BRACE) {
// Skip the quote-starting apostrophe, find the end of the quoted literal text.
++i;
- inQuote = TRUE;
+ inQuote = true;
} else {
// The apostrophe is part of literal text.
c = APOS;
@@ -123,7 +123,7 @@ UBool SimpleFormatter::applyPatternMinMaxArguments(
}
if (argNumber < 0 || c != CLOSE_BRACE) {
errorCode = U_ILLEGAL_ARGUMENT_ERROR;
- return FALSE;
+ return false;
}
}
if (argNumber > maxArg) {
@@ -149,10 +149,10 @@ UBool SimpleFormatter::applyPatternMinMaxArguments(
int32_t argCount = maxArg + 1;
if (argCount < min || max < argCount) {
errorCode = U_ILLEGAL_ARGUMENT_ERROR;
- return FALSE;
+ return false;
}
compiledPattern.setCharAt(0, (UChar)argCount);
- return TRUE;
+ return true;
}
UnicodeString& SimpleFormatter::format(
@@ -192,7 +192,7 @@ UnicodeString& SimpleFormatter::formatAndAppend(
return appendTo;
}
return format(compiledPattern.getBuffer(), compiledPattern.length(), values,
- appendTo, NULL, TRUE,
+ appendTo, NULL, true,
offsets, offsetsLength, errorCode);
}
@@ -241,7 +241,7 @@ UnicodeString &SimpleFormatter::formatAndReplace(
result.remove();
}
return format(cp, cpLength, values,
- result, &resultCopy, FALSE,
+ result, &resultCopy, false,
offsets, offsetsLength, errorCode);
}
diff --git a/thirdparty/icu4c/common/static_unicode_sets.cpp b/thirdparty/icu4c/common/static_unicode_sets.cpp
index 5dab3931a7..db9432f49a 100644
--- a/thirdparty/icu4c/common/static_unicode_sets.cpp
+++ b/thirdparty/icu4c/common/static_unicode_sets.cpp
@@ -31,7 +31,7 @@ alignas(UnicodeSet)
char gEmptyUnicodeSet[sizeof(UnicodeSet)];
// Whether the gEmptyUnicodeSet is initialized and ready to use.
-UBool gEmptyUnicodeSetInitialized = FALSE;
+UBool gEmptyUnicodeSetInitialized = false;
inline UnicodeSet* getImpl(Key key) {
UnicodeSet* candidate = gUnicodeSets[key];
@@ -118,7 +118,7 @@ class ParseDataSink : public ResourceSink {
} else {
// Unknown class of parse lenients
// TODO(ICU-20428): Make ICU automatically accept new classes?
- U_ASSERT(FALSE);
+ U_ASSERT(false);
}
if (U_FAILURE(status)) { return; }
}
@@ -129,19 +129,19 @@ class ParseDataSink : public ResourceSink {
};
-icu::UInitOnce gNumberParseUniSetsInitOnce = U_INITONCE_INITIALIZER;
+icu::UInitOnce gNumberParseUniSetsInitOnce {};
UBool U_CALLCONV cleanupNumberParseUniSets() {
if (gEmptyUnicodeSetInitialized) {
reinterpret_cast<UnicodeSet*>(gEmptyUnicodeSet)->~UnicodeSet();
- gEmptyUnicodeSetInitialized = FALSE;
+ gEmptyUnicodeSetInitialized = false;
}
for (int32_t i = 0; i < UNISETS_KEY_COUNT; i++) {
delete gUnicodeSets[i];
gUnicodeSets[i] = nullptr;
}
gNumberParseUniSetsInitOnce.reset();
- return TRUE;
+ return true;
}
void U_CALLCONV initNumberParseUniSets(UErrorCode& status) {
@@ -150,7 +150,7 @@ void U_CALLCONV initNumberParseUniSets(UErrorCode& status) {
// Initialize the empty instance for well-defined fallback behavior
new(gEmptyUnicodeSet) UnicodeSet();
reinterpret_cast<UnicodeSet*>(gEmptyUnicodeSet)->freeze();
- gEmptyUnicodeSetInitialized = TRUE;
+ gEmptyUnicodeSetInitialized = true;
// These sets were decided after discussion with icu-design@. See tickets #13084 and #13309.
// Zs+TAB is "horizontal whitespace" according to UTS #18 (blank property).
diff --git a/thirdparty/icu4c/common/stringtriebuilder.cpp b/thirdparty/icu4c/common/stringtriebuilder.cpp
index 4d52a88af7..e6670d1cb7 100644
--- a/thirdparty/icu4c/common/stringtriebuilder.cpp
+++ b/thirdparty/icu4c/common/stringtriebuilder.cpp
@@ -85,16 +85,16 @@ StringTrieBuilder::build(UStringTrieBuildOption buildOption, int32_t elementsLen
// have a common prefix of length unitIndex.
int32_t
StringTrieBuilder::writeNode(int32_t start, int32_t limit, int32_t unitIndex) {
- UBool hasValue=FALSE;
+ UBool hasValue=false;
int32_t value=0;
int32_t type;
if(unitIndex==getElementStringLength(start)) {
// An intermediate or final value.
value=getElementValue(start++);
if(start==limit) {
- return writeValueAndFinal(value, TRUE); // final-value node
+ return writeValueAndFinal(value, true); // final-value node
}
- hasValue=TRUE;
+ hasValue=true;
}
// Now all [start..limit[ strings are longer than unitIndex.
int32_t minUnit=getElementUnit(start, unitIndex);
@@ -209,7 +209,7 @@ StringTrieBuilder::makeNode(int32_t start, int32_t limit, int32_t unitIndex, UEr
if(U_FAILURE(errorCode)) {
return NULL;
}
- UBool hasValue=FALSE;
+ UBool hasValue=false;
int32_t value=0;
if(unitIndex==getElementStringLength(start)) {
// An intermediate or final value.
@@ -217,7 +217,7 @@ StringTrieBuilder::makeNode(int32_t start, int32_t limit, int32_t unitIndex, UEr
if(start==limit) {
return registerFinalValue(value, errorCode);
}
- hasValue=TRUE;
+ hasValue=true;
}
Node *node;
// Now all [start..limit[ strings are longer than unitIndex.
@@ -410,7 +410,7 @@ StringTrieBuilder::FinalValueNode::operator==(const Node &other) const {
void
StringTrieBuilder::FinalValueNode::write(StringTrieBuilder &builder) {
- offset=builder.writeValueAndFinal(value, TRUE);
+ offset=builder.writeValueAndFinal(value, true);
}
bool
@@ -448,7 +448,7 @@ StringTrieBuilder::IntermediateValueNode::markRightEdgesFirst(int32_t edgeNumber
void
StringTrieBuilder::IntermediateValueNode::write(StringTrieBuilder &builder) {
next->write(builder);
- offset=builder.writeValueAndFinal(value, FALSE);
+ offset=builder.writeValueAndFinal(value, false);
}
bool
@@ -526,7 +526,7 @@ StringTrieBuilder::ListBranchNode::write(StringTrieBuilder &builder) {
// not jump for it at all.
unitNumber=length-1;
if(rightEdge==NULL) {
- builder.writeValueAndFinal(values[unitNumber], TRUE);
+ builder.writeValueAndFinal(values[unitNumber], true);
} else {
rightEdge->write(builder);
}
@@ -538,12 +538,12 @@ StringTrieBuilder::ListBranchNode::write(StringTrieBuilder &builder) {
if(equal[unitNumber]==NULL) {
// Write the final value for the one string ending with this unit.
value=values[unitNumber];
- isFinal=TRUE;
+ isFinal=true;
} else {
// Write the delta to the start position of the sub-node.
U_ASSERT(equal[unitNumber]->getOffset()>0);
value=offset-equal[unitNumber]->getOffset();
- isFinal=FALSE;
+ isFinal=false;
}
builder.writeValueAndFinal(value, isFinal);
offset=builder.write(units[unitNumber]);
diff --git a/thirdparty/icu4c/common/uarrsort.cpp b/thirdparty/icu4c/common/uarrsort.cpp
index c17dbb2e2b..17b6964ffe 100644
--- a/thirdparty/icu4c/common/uarrsort.cpp
+++ b/thirdparty/icu4c/common/uarrsort.cpp
@@ -75,7 +75,7 @@ U_CAPI int32_t U_EXPORT2
uprv_stableBinarySearch(char *array, int32_t limit, void *item, int32_t itemSize,
UComparator *cmp, const void *context) {
int32_t start=0;
- UBool found=FALSE;
+ UBool found=false;
/* Binary search until we get down to a tiny sub-array. */
while((limit-start)>=MIN_QSORT) {
@@ -90,10 +90,10 @@ uprv_stableBinarySearch(char *array, int32_t limit, void *item, int32_t itemSize
* However, if there are many equal items, then it should be
* faster to continue with the binary search.
* It seems likely that we either have all unique items
- * (where found will never become TRUE in the insertion sort)
+ * (where found will never become true in the insertion sort)
* or potentially many duplicates.
*/
- found=TRUE;
+ found=true;
start=i+1;
} else if(diff<0) {
limit=i;
@@ -106,7 +106,7 @@ uprv_stableBinarySearch(char *array, int32_t limit, void *item, int32_t itemSize
while(start<limit) {
int32_t diff=cmp(context, item, array+start*itemSize);
if(diff==0) {
- found=TRUE;
+ found=true;
} else if(diff<0) {
break;
}
diff --git a/thirdparty/icu4c/common/ubidi.cpp b/thirdparty/icu4c/common/ubidi.cpp
index 6d27827c50..eb40a212e1 100644
--- a/thirdparty/icu4c/common/ubidi.cpp
+++ b/thirdparty/icu4c/common/ubidi.cpp
@@ -149,7 +149,7 @@ ubidi_openSized(int32_t maxLength, int32_t maxRunCount, UErrorCode *pErrorCode)
return NULL;
}
- /* reset the object, all pointers NULL, all flags FALSE, all sizes 0 */
+ /* reset the object, all pointers NULL, all flags false, all sizes 0 */
uprv_memset(pBiDi, 0, sizeof(UBiDi));
/* allocate memory for arrays as requested */
@@ -160,7 +160,7 @@ ubidi_openSized(int32_t maxLength, int32_t maxRunCount, UErrorCode *pErrorCode)
*pErrorCode=U_MEMORY_ALLOCATION_ERROR;
}
} else {
- pBiDi->mayAllocateText=TRUE;
+ pBiDi->mayAllocateText=true;
}
if(maxRunCount>0) {
@@ -171,7 +171,7 @@ ubidi_openSized(int32_t maxLength, int32_t maxRunCount, UErrorCode *pErrorCode)
*pErrorCode=U_MEMORY_ALLOCATION_ERROR;
}
} else {
- pBiDi->mayAllocateRuns=TRUE;
+ pBiDi->mayAllocateRuns=true;
}
if(U_SUCCESS(*pErrorCode)) {
@@ -184,7 +184,7 @@ ubidi_openSized(int32_t maxLength, int32_t maxRunCount, UErrorCode *pErrorCode)
/*
* We are allowed to allocate memory if memory==NULL or
- * mayAllocate==TRUE for each array that we need.
+ * mayAllocate==true for each array that we need.
* We also try to grow memory as needed if we
* allocate it.
*
@@ -203,18 +203,18 @@ ubidi_getMemory(BidiMemoryForAllocation *bidiMem, int32_t *pSize, UBool mayAlloc
/* we need to allocate memory */
if(mayAllocate && (*pMemory=uprv_malloc(sizeNeeded))!=NULL) {
*pSize=sizeNeeded;
- return TRUE;
+ return true;
} else {
- return FALSE;
+ return false;
}
} else {
if(sizeNeeded<=*pSize) {
/* there is already enough memory */
- return TRUE;
+ return true;
}
else if(!mayAllocate) {
/* not enough memory, and we must not allocate */
- return FALSE;
+ return false;
} else {
/* we try to grow */
void *memory;
@@ -225,10 +225,10 @@ ubidi_getMemory(BidiMemoryForAllocation *bidiMem, int32_t *pSize, UBool mayAlloc
if((memory=uprv_realloc(*pMemory, sizeNeeded))!=NULL) {
*pMemory=memory;
*pSize=sizeNeeded;
- return TRUE;
+ return true;
} else {
/* we failed to grow */
- return FALSE;
+ return false;
}
}
}
@@ -280,7 +280,7 @@ ubidi_isInverse(UBiDi *pBiDi) {
if(pBiDi!=NULL) {
return pBiDi->isInverse;
} else {
- return FALSE;
+ return false;
}
}
@@ -403,17 +403,17 @@ checkParaCount(UBiDi *pBiDi) {
int32_t count=pBiDi->paraCount;
if(pBiDi->paras==pBiDi->simpleParas) {
if(count<=SIMPLE_PARAS_COUNT)
- return TRUE;
+ return true;
if(!getInitialParasMemory(pBiDi, SIMPLE_PARAS_COUNT * 2))
- return FALSE;
+ return false;
pBiDi->paras=pBiDi->parasMemory;
uprv_memcpy(pBiDi->parasMemory, pBiDi->simpleParas, SIMPLE_PARAS_COUNT * sizeof(Para));
- return TRUE;
+ return true;
}
if(!getInitialParasMemory(pBiDi, count * 2))
- return FALSE;
+ return false;
pBiDi->paras=pBiDi->parasMemory;
- return TRUE;
+ return true;
}
/*
@@ -579,8 +579,8 @@ getDirProps(UBiDi *pBiDi) {
}
if(i<originalLength) { /* B not last char in text */
pBiDi->paraCount++;
- if(checkParaCount(pBiDi)==FALSE) /* not enough memory for a new para entry */
- return FALSE;
+ if(checkParaCount(pBiDi)==false) /* not enough memory for a new para entry */
+ return false;
if(isDefaultLevel) {
pBiDi->paras[pBiDi->paraCount-1].level=defaultParaLevel;
state=SEEKING_STRONG_FOR_PARA;
@@ -636,7 +636,7 @@ getDirProps(UBiDi *pBiDi) {
}
pBiDi->flags=flags;
pBiDi->lastArabicPos=lastArabicPos;
- return TRUE;
+ return true;
}
/* determine the paragraph level at position index */
@@ -743,14 +743,14 @@ bracketProcessPDI(BracketData *bd) {
}
/* newly found opening bracket: create an openings entry */
-static UBool /* return TRUE if success */
+static UBool /* return true if success */
bracketAddOpening(BracketData *bd, UChar match, int32_t position) {
IsoRun *pLastIsoRun=&bd->isoRuns[bd->isoRunLast];
Opening *pOpening;
if(pLastIsoRun->limit>=bd->openingsCount) { /* no available new entry */
UBiDi *pBiDi=bd->pBiDi;
if(!getInitialOpeningsMemory(pBiDi, pLastIsoRun->limit * 2))
- return FALSE;
+ return false;
if(bd->openings==bd->simpleOpenings)
uprv_memcpy(pBiDi->openingsMemory, bd->simpleOpenings,
SIMPLE_OPENINGS_COUNT * sizeof(Opening));
@@ -764,7 +764,7 @@ bracketAddOpening(BracketData *bd, UChar match, int32_t position) {
pOpening->contextPos=pLastIsoRun->contextPos;
pOpening->flags=0;
pLastIsoRun->limit++;
- return TRUE;
+ return true;
}
/* change N0c1 to N0c2 when a preceding bracket is assigned the embedding level */
@@ -804,7 +804,7 @@ bracketProcessClosing(BracketData *bd, int32_t openIdx, int32_t position) {
DirProp newProp;
pOpening=&bd->openings[openIdx];
direction=(UBiDiDirection)(pLastIsoRun->level&1);
- stable=TRUE; /* assume stable until proved otherwise */
+ stable=true; /* assume stable until proved otherwise */
/* The stable flag is set when brackets are paired and their
level is resolved and cannot be changed by what will be
@@ -873,7 +873,7 @@ bracketProcessClosing(BracketData *bd, int32_t openIdx, int32_t position) {
}
/* handle strong characters, digits and candidates for closing brackets */
-static UBool /* return TRUE if success */
+static UBool /* return true if success */
bracketProcessChar(BracketData *bd, int32_t position) {
IsoRun *pLastIsoRun=&bd->isoRuns[bd->isoRunLast];
DirProp *dirProps, dirProp, newProp;
@@ -912,7 +912,7 @@ bracketProcessChar(BracketData *bd, int32_t position) {
}
/* matching brackets are not overridden by LRO/RLO */
bd->pBiDi->levels[bd->openings[idx].position]&=~UBIDI_LEVEL_OVERRIDE;
- return TRUE;
+ return true;
}
/* We get here only if the ON character is not a matching closing
bracket or it is a case of N0d */
@@ -927,14 +927,14 @@ bracketProcessChar(BracketData *bd, int32_t position) {
create an opening entry for each synonym */
if(match==0x232A) { /* RIGHT-POINTING ANGLE BRACKET */
if(!bracketAddOpening(bd, 0x3009, position))
- return FALSE;
+ return false;
}
else if(match==0x3009) { /* RIGHT ANGLE BRACKET */
if(!bracketAddOpening(bd, 0x232A, position))
- return FALSE;
+ return false;
}
if(!bracketAddOpening(bd, match, position))
- return FALSE;
+ return false;
}
}
level=bd->pBiDi->levels[position];
@@ -998,7 +998,7 @@ bracketProcessChar(BracketData *bd, int32_t position) {
if(position>bd->openings[i].position)
bd->openings[i].flags|=flag;
}
- return TRUE;
+ return true;
}
/* perform (X1)..(X9) ------------------------------------------------------- */
@@ -2432,11 +2432,11 @@ setParaRunsOnly(UBiDi *pBiDi, const UChar *text, int32_t length,
* than the original text. But we don't want the levels memory to be
* reallocated shorter than the original length, since we need to restore
* the levels as after the first call to ubidi_setpara() before returning.
- * We will force mayAllocateText to FALSE before the second call to
+ * We will force mayAllocateText to false before the second call to
* ubidi_setpara(), and will restore it afterwards.
*/
saveMayAllocateText=pBiDi->mayAllocateText;
- pBiDi->mayAllocateText=FALSE;
+ pBiDi->mayAllocateText=false;
ubidi_setPara(pBiDi, visualText, visualLength, paraLevel, NULL, pErrorCode);
pBiDi->mayAllocateText=saveMayAllocateText;
ubidi_getRuns(pBiDi, pErrorCode);
@@ -2866,7 +2866,7 @@ ubidi_isOrderParagraphsLTR(UBiDi *pBiDi) {
if(pBiDi!=NULL) {
return pBiDi->orderParagraphsLTR;
} else {
- return FALSE;
+ return false;
}
}
diff --git a/thirdparty/icu4c/common/ubidi_props.cpp b/thirdparty/icu4c/common/ubidi_props.cpp
index afcc4aaf4f..3ba58f7af9 100644
--- a/thirdparty/icu4c/common/ubidi_props.cpp
+++ b/thirdparty/icu4c/common/ubidi_props.cpp
@@ -53,7 +53,7 @@ _enumPropertyStartsRange(const void *context, UChar32 start, UChar32 end, uint32
/* add the start code point to the USet */
const USetAdder *sa=(const USetAdder *)context;
sa->add(sa->set, start);
- return TRUE;
+ return true;
}
U_CFUNC void
diff --git a/thirdparty/icu4c/common/ubidi_props_data.h b/thirdparty/icu4c/common/ubidi_props_data.h
index aad919fa2e..01fcc968cb 100644
--- a/thirdparty/icu4c/common/ubidi_props_data.h
+++ b/thirdparty/icu4c/common/ubidi_props_data.h
@@ -9,11 +9,11 @@
#ifdef INCLUDED_FROM_UBIDI_PROPS_C
-static const UVersionInfo ubidi_props_dataVersion={0xe,0,0,0};
+static const UVersionInfo ubidi_props_dataVersion={0xf,0,0,0};
-static const int32_t ubidi_props_indexes[UBIDI_IX_TOP]={0x10,0x6a98,0x64a8,0x28,0x620,0x8cc,0x10ac0,0x10d24,0,0,0,0,0,0,0,0x6702b6};
+static const int32_t ubidi_props_indexes[UBIDI_IX_TOP]={0x10,0x6bc0,0x65d0,0x28,0x620,0x8cc,0x10ac0,0x10d24,0,0,0,0,0,0,0,0x6702b6};
-static const uint16_t ubidi_props_trieIndex[12876]={
+static const uint16_t ubidi_props_trieIndex[13024]={
0x387,0x38f,0x397,0x39f,0x3b7,0x3bf,0x3c7,0x3cf,0x3a7,0x3af,0x3a7,0x3af,0x3a7,0x3af,0x3a7,0x3af,
0x3a7,0x3af,0x3a7,0x3af,0x3d5,0x3dd,0x3e5,0x3ed,0x3f5,0x3fd,0x3f9,0x401,0x409,0x411,0x40c,0x414,
0x3a7,0x3af,0x3a7,0x3af,0x41c,0x424,0x3a7,0x3af,0x3a7,0x3af,0x3a7,0x3af,0x42a,0x432,0x43a,0x442,
@@ -185,61 +185,61 @@ static const uint16_t ubidi_props_trieIndex[12876]={
0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x9a6,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,
0x9ae,0x9b2,0x43c,0x43c,0x43c,0x43c,0x9c2,0x9ba,0x43c,0x9ca,0x43c,0x43c,0x9d2,0x9d8,0x43c,0x43c,
0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x9e8,0x9e0,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,
-0x43c,0x43c,0x43c,0x9f0,0x43c,0x9f8,0x43c,0x43c,0x43c,0x9fc,0xa03,0xa09,0xa11,0xa15,0xa1d,0x43c,
-0x51b,0xa25,0xa2c,0xa33,0x41e,0xa3b,0x569,0x3a7,0x501,0xa42,0x3a7,0xa48,0x41e,0xa4d,0xa55,0x3a7,
-0x3a7,0xa5a,0x3a7,0x3a7,0x3a7,0x3a7,0x836,0xa62,0x41e,0x5a3,0x57e,0xa69,0x3a7,0x3a7,0x3a7,0x3a7,
-0x3a7,0xa25,0xa71,0x3a7,0x3a7,0xa79,0xa81,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0xa85,0xa8d,0x3a7,
-0x3a7,0xa95,0x57e,0xa9d,0x3a7,0xaa3,0x3a7,0x3a7,0x60f,0xaab,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
-0xab0,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0xab7,0xabf,0x3a7,0x3a7,0x3a7,0xac2,0x57e,0xaca,
-0xace,0xad6,0x3a7,0xadd,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
-0xae4,0x3a7,0x3a7,0xaf2,0xaec,0x3a7,0x3a7,0x3a7,0xafa,0xb02,0x3a7,0xb06,0x3a7,0x3a7,0x3a7,0x3a7,
-0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x5a5,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0xb13,0xb0e,0x3a7,
+0x43c,0x43c,0x43c,0x9f0,0x43c,0x9f8,0x4a6,0xa00,0x43c,0xa08,0xa0f,0xa15,0xa1d,0xa21,0xa29,0x43c,
+0x51b,0xa31,0xa38,0xa3f,0x41e,0xa47,0x569,0x3a7,0x501,0xa4e,0x3a7,0xa54,0x41e,0xa59,0xa61,0x3a7,
+0x3a7,0xa66,0x51b,0x3a7,0x3a7,0x3a7,0x836,0xa6e,0x41e,0x5a3,0x57e,0xa75,0x3a7,0x3a7,0x3a7,0x3a7,
+0x3a7,0xa31,0xa7d,0x3a7,0x3a7,0xa85,0xa8d,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0xa91,0xa99,0x3a7,
+0x3a7,0xaa1,0x57e,0xaa9,0x3a7,0xaaf,0x3a7,0x3a7,0x60f,0xab7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
+0xabc,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0xac3,0xacb,0x3a7,0x3a7,0x3a7,0xace,0x57e,0xad6,
+0xada,0xae2,0x3a7,0xae9,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
+0xaf0,0x3a7,0x3a7,0xafe,0xaf8,0x3a7,0x3a7,0x3a7,0xb06,0xb0e,0x3a7,0xb12,0x3a7,0x3a7,0x3a7,0x3a7,
+0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x5a5,0x41e,0x99e,0xb1a,0x3a7,0x3a7,0x3a7,0xb27,0xb22,0x3a7,
0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
-0xb1b,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
-0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0xb22,
-0x3a7,0xb28,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
+0xb2f,0xb37,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
+0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0xb3d,
+0x3a7,0xb43,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
-0x3a7,0x3a7,0xa49,0x3a7,0xb2e,0x3a7,0x3a7,0xb36,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
+0x3a7,0x3a7,0xa55,0x3a7,0xb49,0x3a7,0x3a7,0xb51,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
-0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x535,0xb3e,0x3a7,0x3a7,
+0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x535,0xb59,0x3a7,0x3a7,
0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
-0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3f5,0xb46,0x500,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
-0x3a7,0x3a7,0x3a7,0x3a7,0xb4e,0xb56,0xb5c,0x3a7,0xb62,0x67c,0x67c,0xb6a,0x3a7,0x3a7,0x3a7,0x3a7,
-0x3a7,0x67c,0x67c,0xb72,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
-0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0xb78,
-0x3a7,0xb7f,0x3a7,0xb7b,0x3a7,0xb82,0x3a7,0xb8a,0xb8e,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
-0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3f5,0xb96,0x3f5,0xb9d,0xba4,0xbac,0x3a7,
+0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3f5,0xb61,0x500,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
+0x3a7,0x3a7,0x3a7,0x3a7,0xb69,0xb71,0xb77,0x3a7,0xb7d,0x67c,0x67c,0xb85,0x3a7,0x3a7,0x3a7,0x3a7,
+0x3a7,0x67c,0x67c,0xb8d,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
+0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0xb93,
+0x3a7,0xb9a,0x3a7,0xb96,0x3a7,0xb9d,0x3a7,0xba5,0xba9,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
+0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3f5,0xbb1,0x3f5,0xbb8,0xbbf,0xbc7,0x3a7,
0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
-0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0xbb4,0xbbc,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
-0x3a7,0x3a7,0xb28,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0xa75,0x3a7,
-0xbc1,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
-0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
-0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0xbc9,
-0x43c,0xbd1,0xbd1,0xbd8,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,
+0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0xbcf,0xbd7,0x3a7,0x3a7,0xa55,0x3a7,0x3a7,
+0x3a7,0x3a7,0xb43,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0xa81,0x3a7,
+0xbdc,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
+0xbe4,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
+0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0xbec,
+0x43c,0xbf4,0xbf4,0xbfb,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,
0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x91e,0x4a6,0x4a6,0x43c,
-0x43c,0x4a6,0x4a6,0xbe0,0x43c,0x43c,0x43c,0x43c,0x43c,0x4a6,0x4a6,0x4a6,0x4a6,0x4a6,0x4a6,0x4a6,
-0xbe8,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x67c,0xbf0,0x67c,0x67c,0x67f,0xbf5,0xbf9,
-0x858,0xc01,0x3c9,0x3a7,0xc07,0x3a7,0xc0c,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x783,0x3a7,0x3a7,0x3a7,
+0x43c,0x4a6,0x4a6,0xc03,0x43c,0x43c,0x43c,0x43c,0x43c,0x4a6,0x4a6,0x4a6,0x4a6,0x4a6,0x4a6,0x4a6,
+0xc0b,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x43c,0x67c,0xc13,0x67c,0x67c,0x67f,0xc18,0xc1c,
+0x858,0xc24,0x3c9,0x3a7,0xc2a,0x3a7,0xc2f,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x783,0x3a7,0x3a7,0x3a7,
0x3a7,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,
-0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0xc14,
-0x98f,0x67c,0x67c,0x67c,0x67f,0x67c,0x67c,0xc1c,0xc24,0xbf0,0x67c,0xc2c,0x67c,0xc34,0xc39,0x3a7,
-0x3a7,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67f,0xc41,0xc4d,0xc52,0xc5a,
-0xc49,0x67c,0x67c,0x67c,0x67c,0xc62,0x67c,0x792,0xc6a,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
+0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0xc37,
+0x98f,0x67c,0x67c,0x67c,0xc3e,0x67c,0x67c,0xc45,0xc4d,0xc13,0x67c,0xc55,0x67c,0xc5d,0xc62,0x3a7,
+0x3a7,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67c,0x67f,0xc6a,0xc73,0xc77,0xc7f,
+0xc6f,0x67c,0x67c,0x67c,0x67c,0xc87,0x67c,0x792,0xc8f,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
-0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0xc71,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
+0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0xc96,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,
-0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0xc71,0xc81,0xc79,0xc79,0xc79,0xc82,0xc82,0xc82,
-0xc82,0x3f5,0x3f5,0x3f5,0x3f5,0x3f5,0x3f5,0x3f5,0xc8a,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,
-0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,
-0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,
-0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,
-0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0xc82,0x386,0x386,0x386,0x12,0x12,0x12,0x12,
+0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0x3a7,0xc96,0xca6,0xc9e,0xc9e,0xc9e,0xca7,0xca7,0xca7,
+0xca7,0x3f5,0x3f5,0x3f5,0x3f5,0x3f5,0x3f5,0x3f5,0xcaf,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,
+0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,
+0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,
+0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,
+0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0xca7,0x386,0x386,0x386,0x12,0x12,0x12,0x12,
0x12,0x12,0x12,0x12,0x12,8,7,8,9,7,0x12,0x12,0x12,0x12,0x12,0x12,
0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,7,7,7,8,9,0xa,0xa,4,
4,4,0xa,0xa,0x310a,0xf20a,0xa,3,6,3,6,6,2,2,2,2,
@@ -384,7 +384,7 @@ static const uint16_t ubidi_props_trieIndex[12876]={
0xb1,0xb1,0xb1,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0xb1,0,0,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,
0xb1,0,0,0,0,0,0,0,0,0,0,0,0xb1,0xb1,0xb1,0xb1,
-0xb1,0xb1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0xb1,0xb1,0xb1,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0xb1,0xb1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0xb1,0,0xb1,0,0xb1,0x310a,0xf20a,0x310a,0xf20a,0,0,
@@ -654,6 +654,9 @@ static const uint16_t ubidi_props_trieIndex[12876]={
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,1,
1,1,1,1,1,1,1,1,1,1,1,0xb1,0xb1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,
+0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xb1,0xb1,0xb1,
+1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
0x4d,0x4d,0x4d,0x8d,0x4d,0x4d,0x4d,0x4d,0x4d,0x4d,0x4d,0x4d,0x4d,0x4d,0x4d,0x4d,
0x4d,0xd,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0x4d,0x4d,0x4d,
0x8d,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,
@@ -721,104 +724,110 @@ static const uint16_t ubidi_props_trieIndex[12876]={
0,0,0xb1,0,0xb1,0xb1,0,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0,0xb1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0xb1,0xb1,0,0,0,0xb1,0,0xb1,
+0,0,0,0,0,0,0,0,0xb1,0,0xb1,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,4,0xa,0xa,0xa,0xa,0xa,0xa,0xa,
0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,4,4,4,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0xa0,0xa0,0xa0,0xa0,
-0xa0,0xa0,0xa0,0xa0,0xa0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0xb1,0xb1,0xb1,0xb1,0xb1,0,0,0,
+0xa0,0xa0,0xa0,0xa0,0xa0,0xa0,0xa0,0xa0,0xa0,0xa0,0xa0,0xa0,0xb1,0,0,0,
+0,0,0,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,
+0xb1,0xb1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0xb1,0xb1,0xb1,0xb1,0xb1,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0xb1,0xb1,0xb1,0xb1,
+0xb1,0xb1,0xb1,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0xb1,0xb1,0xb1,0xb1,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0xa,0,0xb1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0xb1,0xb1,0xb1,0xb1,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0xa,0,0xb1,0,0,0,
+0,0,0,0,0xb2,0xb2,0xb2,0xb2,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0xb2,0xb2,0xb2,0xb2,0,0,0,0,
+0,0,0,0,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,
+0xb1,0xb1,0,0,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,
+0xb1,0xb1,0xb1,0xb1,0,0,0,0,0,0,0,0xb1,0xb1,0xb1,0,0,
+0,0,0,0,0,0,0,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb1,
+0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0,0,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,
-0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0,0,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,
-0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0,0,0,0,0,0,0,0xb1,
-0xb1,0xb1,0,0,0,0,0,0,0,0,0,0xb2,0xb2,0xb2,0xb2,0xb2,
-0xb2,0xb2,0xb2,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0,0,0xb1,0xb1,0xb1,
-0xb1,0xb1,0xb1,0xb1,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0xb1,0xb1,0xb1,0xb1,0,0,
+0,0,0,0,0,0,0xb1,0xb1,0xb1,0xb1,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0xa,0xa,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0xa,0xa,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0xa,0xa,0xb1,0xb1,0xb1,0xa,0,0,
+0,0,0,0,0xa,0xa,0xb1,0xb1,0xb1,0xa,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,
-0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,
+0,0,0,0,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,
+0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0x100a,0,0,0,0,
+0,0,0,0,0,0,0,0x100a,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0x100a,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0x100a,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0x100a,0,0,0,0,
-0,0,0,0,0,0,2,2,2,2,2,2,2,2,2,2,
+0,0,0,0,0,0,0,0x100a,0,0,0,0,0,0,0,0,
+0,0,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,2,2,2,2,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,
-0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0,
-0,0,0,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,
-0xb1,0,0,0,0,0,0,0,0,0xb1,0,0,0,0,0,0,
-0,0,0,0,0xb1,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0xb1,0xb1,0xb1,0xb1,0xb1,
-0,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,
-0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0,0,0xb1,0xb1,0xb1,0xb1,0xb1,
-0xb1,0xb1,0,0xb1,0xb1,0,0xb1,0xb1,0xb1,0xb1,0xb1,0,0,0,0,0,
+2,2,2,2,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,
+0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0,0,0,0,0xb1,
+0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0,0,0,
+0,0,0,0,0,0xb1,0,0,0,0,0,0,0,0,0,0,
+0xb1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0xb1,0xb1,0xb1,0xb1,0xb1,0,0xb1,0xb1,0xb1,
+0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0xb1,0xb1,0xb1,0xb1,
+0xb1,0xb1,0xb1,0,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,
+0xb1,0xb1,0xb1,0xb1,0xb1,0,0,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0,0xb1,
+0xb1,0,0xb1,0xb1,0xb1,0xb1,0xb1,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0xb1,0xb1,0xb1,0xb1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,
+0,0,0,0,0,0,0,0,0,0,0,0,0xb1,0xb1,0xb1,0xb1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0xb1,0xb1,0xb1,0xb1,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,4,1,1,1,1,1,1,1,1,1,1,1,1,
-1,1,1,1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,1,1,1,1,1,
-1,1,1,1,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,
-0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,
-0x41,0x41,0x41,0x41,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xa1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,
-1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,
-0xa,0xa,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,
-0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0,0,0,
-0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,
-0xa,0xa,0xa,0,0,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,
-0xa,0xa,0xa,0xa,0,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,
-0xa,0xa,0xa,0xa,2,2,2,2,2,2,2,2,2,2,2,0xa,
-0xa,0xa,0xa,0xa,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0xa,0xa,0xa,0xa,0xa,0xa,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0xa,0,0,
+0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,1,1,1,1,1,1,1,1,1,
+0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,
+0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,0x41,
+0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xa1,1,1,1,1,1,1,1,1,
+1,1,1,1,1,1,1,1,1,1,1,1,0xd,0xd,0xd,0xd,
+0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,1,1,1,1,
+1,1,1,1,1,1,1,1,1,1,1,1,0xd,0xd,0xd,0xd,
+0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xa,0xa,0xd,0xd,
+0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xd,0xa,0xa,0xa,0xa,
+0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0,0,0,0xa,0xa,0xa,0xa,
+0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,
+0,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,
+0,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,
+2,2,2,2,2,2,2,2,2,2,2,0xa,0xa,0xa,0xa,0xa,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0xa,0xa,0xa,0xa,0xa,0xa,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0xa,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0xa,0xa,0xa,0xa,
0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,
-0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0,0,0,0,0xa,0xa,0xa,
-0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,
-0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0,0,0,0,0,0,
-0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0,0,0,
-0xa,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0,0,0,0,0,0,0,
-0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0,0,0,0,0,
-0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0,0,0,0,0,0,0,
-0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,
-0xa,0xa,0,0,0xa,0xa,0,0,0,0,0,0,0,0,0,0,
+0xa,0xa,0xa,0xa,0,0,0,0,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,
+0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,
+0,0,0,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,
+0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0,
0,0,0,0,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,
-0xa,0xa,0,0,0xa,0xa,0xa,0xa,0xa,0,0,0,0xa,0xa,0xa,0xa,
-0xa,0,0,0,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0,0,0,
-0,0,0,0,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0,0,0,0,
+0,0,0,0,0xa,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0,0,0,
+0,0,0,0,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0,
+0,0,0,0,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0,0,0,
0,0,0,0,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,
-0xa,0xa,0xa,0xa,0xa,0,0,0,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,
-0xa,0xa,0xa,0,0,0,0,0,0xa,0xa,0xa,0xa,0xa,0xa,0,0,
+0xa,0xa,0xa,0xa,0xa,0xa,0,0,0xa,0xa,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,
-0xa,0xa,0,0,0,0,0,0,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,
-0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0xa,0xa,0xa,0xa,
-0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,2,2,2,2,2,2,2,2,
-2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0xa,0xa,0xa,0xa,0xa,0xa,0,0,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,
+0xa,0xa,0xa,0xa,0xa,0,0,0,0,0,0,0,0xa,0xa,0xa,0xa,
+0xa,0xa,0xa,0xa,0xa,0,0,0,0,0,0,0,0xa,0xa,0xa,0xa,
+0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,
+0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0xa,0xa,0xa,0xa,0xa,
+0xa,0xa,0,0,0,0,0,0,0,0,0xa,0xa,0xa,0xa,0xa,0xa,
+0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0,0,0,0xa,0xa,0xa,0xa,
+0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,
+0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0xa,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,
+2,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0x12,0x12,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,
+0,0,0,0,0,0,0x12,0x12,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,
0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,
-0xb2,0xb2,0xb2,0xb2,0x12,0xb2,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,
+0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0xb2,0x12,0xb2,0x12,0x12,0x12,0x12,0x12,0x12,
0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,
-0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,
-0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,
-0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0,0,0,0
+0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0xb1,0xb1,0xb1,0xb1,
+0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0x12,0x12,0x12,0x12,
+0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0,0,0,0
};
static const uint32_t ubidi_props_mirrors[40]={
@@ -926,14 +935,14 @@ static const UBiDiProps ubidi_props_singleton={
ubidi_props_trieIndex+3612,
NULL,
3612,
- 9264,
+ 9412,
0x1a0,
0xe9c,
0x0,
0x0,
0x110000,
- 0x3248,
- NULL, 0, FALSE, FALSE, 0, NULL
+ 0x32dc,
+ NULL, 0, false, false, 0, NULL
},
{ 2,2,0,0 }
};
diff --git a/thirdparty/icu4c/common/ubidiln.cpp b/thirdparty/icu4c/common/ubidiln.cpp
index fea239380a..430ece39d2 100644
--- a/thirdparty/icu4c/common/ubidiln.cpp
+++ b/thirdparty/icu4c/common/ubidiln.cpp
@@ -101,7 +101,7 @@ setTrailingWSStart(UBiDi *pBiDi) {
are already set to paragraph level.
Setting trailingWSStart to pBidi->length will avoid changing the
level of B chars from 0 to paraLevel in ubidi_getLevels when
- orderParagraphsLTR==TRUE.
+ orderParagraphsLTR==true.
*/
if(dirProps[start-1]==B) {
pBiDi->trailingWSStart=start; /* currently == pBiDi->length */
@@ -535,7 +535,7 @@ static int32_t getRunFromLogicalIndex(UBiDi *pBiDi, int32_t logicalIndex) {
/*
* Compute the runs array from the levels array.
- * After ubidi_getRuns() returns TRUE, runCount is guaranteed to be >0
+ * After ubidi_getRuns() returns true, runCount is guaranteed to be >0
* and the runs are reordered.
* Odd-level runs have visualStart on their visual right edge and
* they progress visually to the left.
@@ -551,7 +551,7 @@ ubidi_getRuns(UBiDi *pBiDi, UErrorCode*) {
* includes the case of length==0 (handled in setPara)..
*/
if (pBiDi->runCount>=0) {
- return TRUE;
+ return true;
}
if(pBiDi->direction!=UBIDI_MIXED) {
@@ -608,7 +608,7 @@ ubidi_getRuns(UBiDi *pBiDi, UErrorCode*) {
if(getRunsMemory(pBiDi, runCount)) {
runs=pBiDi->runsMemory;
} else {
- return FALSE;
+ return false;
}
/* set the runs */
@@ -703,7 +703,7 @@ ubidi_getRuns(UBiDi *pBiDi, UErrorCode*) {
}
}
- return TRUE;
+ return true;
}
static UBool
@@ -714,7 +714,7 @@ prepareReorder(const UBiDiLevel *levels, int32_t length,
UBiDiLevel level, minLevel, maxLevel;
if(levels==NULL || length<=0) {
- return FALSE;
+ return false;
}
/* determine minLevel and maxLevel */
@@ -723,7 +723,7 @@ prepareReorder(const UBiDiLevel *levels, int32_t length,
for(start=length; start>0;) {
level=levels[--start];
if(level>UBIDI_MAX_EXPLICIT_LEVEL+1) {
- return FALSE;
+ return false;
}
if(level<minLevel) {
minLevel=level;
@@ -741,7 +741,7 @@ prepareReorder(const UBiDiLevel *levels, int32_t length,
indexMap[start]=start;
}
- return TRUE;
+ return true;
}
/* reorder a line based on a levels array (L2) ------------------------------ */
diff --git a/thirdparty/icu4c/common/ubiditransform.cpp b/thirdparty/icu4c/common/ubiditransform.cpp
index d56bf1518b..24fffd9c46 100644
--- a/thirdparty/icu4c/common/ubiditransform.cpp
+++ b/thirdparty/icu4c/common/ubiditransform.cpp
@@ -130,7 +130,7 @@ action_resolve(UBiDiTransform *pTransform, UErrorCode *pErrorCode)
{
ubidi_setPara(pTransform->pBidi, pTransform->src, pTransform->srcLength,
pTransform->pActiveScheme->baseLevel, NULL, pErrorCode);
- return FALSE;
+ return false;
}
/**
@@ -150,7 +150,7 @@ action_reorder(UBiDiTransform *pTransform, UErrorCode *pErrorCode)
*pTransform->pDestLength = pTransform->srcLength;
pTransform->reorderingOptions = UBIDI_REORDER_DEFAULT;
- return TRUE;
+ return true;
}
/**
@@ -166,9 +166,9 @@ static UBool
action_setInverse(UBiDiTransform *pTransform, UErrorCode *pErrorCode)
{
(void)pErrorCode;
- ubidi_setInverse(pTransform->pBidi, TRUE);
+ ubidi_setInverse(pTransform->pBidi, true);
ubidi_setReorderingMode(pTransform->pBidi, UBIDI_REORDER_INVERSE_LIKE_DIRECT);
- return FALSE;
+ return false;
}
/**
@@ -186,7 +186,7 @@ action_setRunsOnly(UBiDiTransform *pTransform, UErrorCode *pErrorCode)
{
(void)pErrorCode;
ubidi_setReorderingMode(pTransform->pBidi, UBIDI_REORDER_RUNS_ONLY);
- return FALSE;
+ return false;
}
/**
@@ -205,7 +205,7 @@ action_reverse(UBiDiTransform *pTransform, UErrorCode *pErrorCode)
pTransform->dest, pTransform->destSize,
UBIDI_REORDER_DEFAULT, pErrorCode);
*pTransform->pDestLength = pTransform->srcLength;
- return TRUE;
+ return true;
}
/**
@@ -274,7 +274,7 @@ static UBool
action_shapeArabic(UBiDiTransform *pTransform, UErrorCode *pErrorCode)
{
if ((pTransform->letters | pTransform->digits) == 0) {
- return FALSE;
+ return false;
}
if (pTransform->pActiveScheme->lettersDir == pTransform->pActiveScheme->digitsDir) {
doShape(pTransform, pTransform->letters | pTransform->digits | pTransform->pActiveScheme->lettersDir,
@@ -288,7 +288,7 @@ action_shapeArabic(UBiDiTransform *pTransform, UErrorCode *pErrorCode)
pErrorCode);
}
}
- return TRUE;
+ return true;
}
/**
@@ -306,11 +306,11 @@ action_mirror(UBiDiTransform *pTransform, UErrorCode *pErrorCode)
UChar32 c;
uint32_t i = 0, j = 0;
if (0 == (pTransform->reorderingOptions & UBIDI_DO_MIRRORING)) {
- return FALSE;
+ return false;
}
if (pTransform->destSize < pTransform->srcLength) {
*pErrorCode = U_BUFFER_OVERFLOW_ERROR;
- return FALSE;
+ return false;
}
do {
UBool isOdd = ubidi_getLevelAt(pTransform->pBidi, i) & 1;
@@ -320,7 +320,7 @@ action_mirror(UBiDiTransform *pTransform, UErrorCode *pErrorCode)
*pTransform->pDestLength = pTransform->srcLength;
pTransform->reorderingOptions = UBIDI_REORDER_DEFAULT;
- return TRUE;
+ return true;
}
/**
@@ -444,7 +444,7 @@ ubiditransform_transform(UBiDiTransform *pBiDiTransform,
UErrorCode *pErrorCode)
{
uint32_t destLength = 0;
- UBool textChanged = FALSE;
+ UBool textChanged = false;
const UBiDiTransform *pOrigTransform = pBiDiTransform;
const UBiDiAction *action = NULL;
@@ -503,10 +503,10 @@ ubiditransform_transform(UBiDiTransform *pBiDiTransform,
updateSrc(pBiDiTransform, pBiDiTransform->dest, *pBiDiTransform->pDestLength,
*pBiDiTransform->pDestLength, pErrorCode);
}
- textChanged = TRUE;
+ textChanged = true;
}
}
- ubidi_setInverse(pBiDiTransform->pBidi, FALSE);
+ ubidi_setInverse(pBiDiTransform->pBidi, false);
if (!textChanged && U_SUCCESS(*pErrorCode)) {
/* Text was not changed - just copy src to dest */
diff --git a/thirdparty/icu4c/common/ucase.cpp b/thirdparty/icu4c/common/ucase.cpp
index 388c86b1bb..3d1750265b 100644
--- a/thirdparty/icu4c/common/ucase.cpp
+++ b/thirdparty/icu4c/common/ucase.cpp
@@ -40,7 +40,7 @@ _enumPropertyStartsRange(const void *context, UChar32 start, UChar32 /*end*/, ui
/* add the start code point to the USet */
const USetAdder *sa=(const USetAdder *)context;
sa->add(sa->set, start);
- return TRUE;
+ return true;
}
U_CFUNC void U_EXPORT2
@@ -354,7 +354,7 @@ ucase_addStringCaseClosure(const UChar *s, int32_t length, const USetAdder *sa)
int32_t i, start, limit, result, unfoldRows, unfoldRowWidth, unfoldStringWidth;
if(ucase_props_singleton.unfold==NULL || s==NULL) {
- return FALSE; /* no reverse case folding data, or no string */
+ return false; /* no reverse case folding data, or no string */
}
if(length<=1) {
/* the string is too short to find any match */
@@ -364,7 +364,7 @@ ucase_addStringCaseClosure(const UChar *s, int32_t length, const USetAdder *sa)
* but this does not make much practical difference because
* a single supplementary code point would just not be found
*/
- return FALSE;
+ return false;
}
const uint16_t *unfold=ucase_props_singleton.unfold;
@@ -375,7 +375,7 @@ ucase_addStringCaseClosure(const UChar *s, int32_t length, const USetAdder *sa)
if(length>unfoldStringWidth) {
/* the string is too long to find any match */
- return FALSE;
+ return false;
}
/* do a binary search for the string */
@@ -395,7 +395,7 @@ ucase_addStringCaseClosure(const UChar *s, int32_t length, const USetAdder *sa)
sa->add(sa->set, c);
ucase_addCaseClosure(c, sa);
}
- return TRUE;
+ return true;
} else if(result<0) {
limit=i;
} else /* result>0 */ {
@@ -403,7 +403,7 @@ ucase_addStringCaseClosure(const UChar *s, int32_t length, const USetAdder *sa)
}
}
- return FALSE; /* string not found */
+ return false; /* string not found */
}
U_NAMESPACE_BEGIN
@@ -431,7 +431,7 @@ FullCaseFoldingIterator::next(UnicodeString &full) {
// Set "full" to the NUL-terminated string in the first unfold column.
int32_t length=unfoldStringWidth;
while(length>0 && p[length-1]==0) { --length; }
- full.setTo(FALSE, p, length);
+ full.setTo(false, p, length);
// Return the code point.
UChar32 c;
U16_NEXT_UNSAFE(p, rowCpIndex, c);
@@ -905,7 +905,7 @@ isFollowedByCasedLetter(UCaseContextIterator *iter, void *context, int8_t dir) {
UChar32 c;
if(iter==NULL) {
- return FALSE;
+ return false;
}
for(/* dir!=0 sets direction */; (c=iter(context, dir))>=0; dir=0) {
@@ -913,13 +913,13 @@ isFollowedByCasedLetter(UCaseContextIterator *iter, void *context, int8_t dir) {
if(type&4) {
/* case-ignorable, continue with the loop */
} else if(type!=UCASE_NONE) {
- return TRUE; /* followed by cased letter */
+ return true; /* followed by cased letter */
} else {
- return FALSE; /* uncased and not case-ignorable */
+ return false; /* uncased and not case-ignorable */
}
}
- return FALSE; /* not followed by cased letter */
+ return false; /* not followed by cased letter */
}
/* Is preceded by Soft_Dotted character with no intervening cc=230 ? */
@@ -930,19 +930,19 @@ isPrecededBySoftDotted(UCaseContextIterator *iter, void *context) {
int8_t dir;
if(iter==NULL) {
- return FALSE;
+ return false;
}
for(dir=-1; (c=iter(context, dir))>=0; dir=0) {
dotType=getDotType(c);
if(dotType==UCASE_SOFT_DOTTED) {
- return TRUE; /* preceded by TYPE_i */
+ return true; /* preceded by TYPE_i */
} else if(dotType!=UCASE_OTHER_ACCENT) {
- return FALSE; /* preceded by different base character (not TYPE_i), or intervening cc==230 */
+ return false; /* preceded by different base character (not TYPE_i), or intervening cc==230 */
}
}
- return FALSE; /* not preceded by TYPE_i */
+ return false; /* not preceded by TYPE_i */
}
/*
@@ -987,20 +987,20 @@ isPrecededBy_I(UCaseContextIterator *iter, void *context) {
int8_t dir;
if(iter==NULL) {
- return FALSE;
+ return false;
}
for(dir=-1; (c=iter(context, dir))>=0; dir=0) {
if(c==0x49) {
- return TRUE; /* preceded by I */
+ return true; /* preceded by I */
}
dotType=getDotType(c);
if(dotType!=UCASE_OTHER_ACCENT) {
- return FALSE; /* preceded by different base character (not I), or intervening cc==230 */
+ return false; /* preceded by different base character (not I), or intervening cc==230 */
}
}
- return FALSE; /* not preceded by I */
+ return false; /* not preceded by I */
}
/* Is followed by one or more cc==230 ? */
@@ -1011,19 +1011,19 @@ isFollowedByMoreAbove(UCaseContextIterator *iter, void *context) {
int8_t dir;
if(iter==NULL) {
- return FALSE;
+ return false;
}
for(dir=1; (c=iter(context, dir))>=0; dir=0) {
dotType=getDotType(c);
if(dotType==UCASE_ABOVE) {
- return TRUE; /* at least one cc==230 following */
+ return true; /* at least one cc==230 following */
} else if(dotType!=UCASE_OTHER_ACCENT) {
- return FALSE; /* next base character, no more cc==230 following */
+ return false; /* next base character, no more cc==230 following */
}
}
- return FALSE; /* no more cc==230 following */
+ return false; /* no more cc==230 following */
}
/* Is followed by a dot above (without cc==230 in between) ? */
@@ -1034,20 +1034,20 @@ isFollowedByDotAbove(UCaseContextIterator *iter, void *context) {
int8_t dir;
if(iter==NULL) {
- return FALSE;
+ return false;
}
for(dir=1; (c=iter(context, dir))>=0; dir=0) {
if(c==0x307) {
- return TRUE;
+ return true;
}
dotType=getDotType(c);
if(dotType!=UCASE_OTHER_ACCENT) {
- return FALSE; /* next base character or cc==230 in between */
+ return false; /* next base character or cc==230 in between */
}
}
- return FALSE; /* no dot above following */
+ return false; /* no dot above following */
}
U_CAPI int32_t U_EXPORT2
@@ -1317,7 +1317,7 @@ ucase_toFullUpper(UChar32 c,
UCaseContextIterator *iter, void *context,
const UChar **pString,
int32_t caseLocale) {
- return toUpperOrTitle(c, iter, context, pString, caseLocale, TRUE);
+ return toUpperOrTitle(c, iter, context, pString, caseLocale, true);
}
U_CAPI int32_t U_EXPORT2
@@ -1325,7 +1325,7 @@ ucase_toFullTitle(UChar32 c,
UCaseContextIterator *iter, void *context,
const UChar **pString,
int32_t caseLocale) {
- return toUpperOrTitle(c, iter, context, pString, caseLocale, FALSE);
+ return toUpperOrTitle(c, iter, context, pString, caseLocale, false);
}
/* case folding ------------------------------------------------------------- */
@@ -1601,6 +1601,6 @@ ucase_hasBinaryProperty(UChar32 c, UProperty which) {
ucase_toFullUpper(c, NULL, NULL, &resultString, UCASE_LOC_ROOT)>=0 ||
ucase_toFullTitle(c, NULL, NULL, &resultString, UCASE_LOC_ROOT)>=0);
default:
- return FALSE;
+ return false;
}
}
diff --git a/thirdparty/icu4c/common/ucase.h b/thirdparty/icu4c/common/ucase.h
index 7bf57fd370..049f042912 100644
--- a/thirdparty/icu4c/common/ucase.h
+++ b/thirdparty/icu4c/common/ucase.h
@@ -139,8 +139,8 @@ public:
*/
UChar32 next(UnicodeString &full);
private:
- FullCaseFoldingIterator(const FullCaseFoldingIterator &); // no copy
- FullCaseFoldingIterator &operator=(const FullCaseFoldingIterator &); // no assignment
+ FullCaseFoldingIterator(const FullCaseFoldingIterator &) = delete; // no copy
+ FullCaseFoldingIterator &operator=(const FullCaseFoldingIterator &) = delete; // no assignment
const UChar *unfold;
int32_t unfoldRows;
diff --git a/thirdparty/icu4c/common/ucase_props_data.h b/thirdparty/icu4c/common/ucase_props_data.h
index 8bbf403cdc..b7797d14d7 100644
--- a/thirdparty/icu4c/common/ucase_props_data.h
+++ b/thirdparty/icu4c/common/ucase_props_data.h
@@ -9,145 +9,145 @@
#ifdef INCLUDED_FROM_UCASE_CPP
-static const UVersionInfo ucase_props_dataVersion={0xe,0,0,0};
+static const UVersionInfo ucase_props_dataVersion={0xf,0,0,0};
-static const int32_t ucase_props_indexes[UCASE_IX_TOP]={0x10,0x7512,0x64e8,0x683,0x172,0,0,0,0,0,0,0,0,0,0,3};
+static const int32_t ucase_props_indexes[UCASE_IX_TOP]={0x10,0x76f2,0x66c8,0x683,0x172,0,0,0,0,0,0,0,0,0,0,3};
-static const uint16_t ucase_props_trieIndex[12908]={
-0x354,0x35c,0x364,0x36c,0x37a,0x382,0x38a,0x392,0x39a,0x3a2,0x3a9,0x3b1,0x3b9,0x3c1,0x3c9,0x3d1,
-0x3d7,0x3df,0x3e7,0x3ef,0x3f7,0x3ff,0x407,0x40f,0x417,0x41f,0x427,0x42f,0x437,0x43f,0x447,0x44f,
-0x457,0x45f,0x467,0x46f,0x477,0x47f,0x487,0x48f,0x48b,0x493,0x498,0x4a0,0x4a7,0x4af,0x4b7,0x4bf,
-0x4c7,0x4cf,0x4d7,0x4df,0x373,0x37b,0x4e4,0x4ec,0x4f1,0x4f9,0x501,0x509,0x508,0x510,0x515,0x51d,
-0x525,0x52c,0x530,0x373,0x537,0x354,0x547,0x53f,0x54f,0x551,0x559,0x561,0x565,0x566,0x56e,0x576,
-0x57e,0x566,0x586,0x58b,0x57e,0x566,0x593,0x59b,0x565,0x5a3,0x5ab,0x5b3,0x5bb,0x373,0x5c3,0x373,
-0x5cb,0x5cd,0x5d5,0x5b3,0x565,0x5a3,0x5dc,0x5b3,0x5e4,0x5e6,0x56e,0x5b3,0x565,0x373,0x5ee,0x373,
-0x373,0x5f4,0x5fb,0x373,0x373,0x5ff,0x607,0x373,0x60b,0x612,0x373,0x619,0x621,0x628,0x630,0x373,
-0x373,0x635,0x63d,0x645,0x64d,0x655,0x65c,0x664,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x66c,0x373,0x373,0x67c,0x67c,0x674,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x684,0x68a,0x572,0x572,0x373,0x690,0x698,0x373,
-0x6a0,0x373,0x6a8,0x373,0x6af,0x6b5,0x373,0x373,0x373,0x6bd,0x373,0x373,0x373,0x373,0x373,0x373,
-0x6c4,0x373,0x6cb,0x6d3,0x373,0x6db,0x6e3,0x373,0x5a2,0x6e7,0x6ef,0x6f5,0x5e4,0x6fd,0x373,0x704,
-0x373,0x709,0x373,0x70f,0x717,0x71b,0x723,0x72b,0x733,0x738,0x73b,0x743,0x753,0x74b,0x763,0x75b,
-0x39a,0x76b,0x39a,0x773,0x776,0x39a,0x77e,0x39a,0x786,0x78e,0x796,0x79e,0x7a6,0x7ae,0x7b6,0x7be,
-0x7c6,0x7cd,0x373,0x7d5,0x7dd,0x373,0x7e5,0x7ed,0x7f5,0x7fd,0x805,0x80d,0x815,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x818,0x81e,0x824,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x82c,0x830,0x834,0x83c,0x39a,0x39a,0x39a,0x844,0x84c,0x853,0x373,0x858,0x373,0x373,0x373,0x860,
-0x373,0x6a5,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x564,0x868,0x373,0x373,0x86f,0x373,0x373,0x877,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x87f,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x70f,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x885,0x373,0x88d,0x892,0x89a,0x373,0x373,0x8a2,0x8aa,0x8b2,0x39a,0x8b7,0x8bf,0x8c5,0x8cc,0x8d3,
-0x8db,0x8e2,0x373,0x373,0x373,0x373,0x8e9,0x8f1,0x373,0x8f9,0x900,0x373,0x54f,0x905,0x90d,0x6af,
-0x373,0x913,0x91b,0x91f,0x373,0x927,0x92f,0x937,0x373,0x93d,0x941,0x949,0x959,0x951,0x373,0x961,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x969,0x373,0x373,0x373,0x373,0x971,0x54f,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x976,0x97e,0x982,0x373,0x373,0x373,0x373,0x356,0x35c,0x98a,0x992,0x999,0x50a,0x373,0x373,0x9a1,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0xdd0,0xdd0,0xde8,0xe28,0xe68,0xea4,0xee4,0xf24,0xf5c,0xf9c,0xfdc,0x101c,0x105c,0x109c,0x10dc,0x111c,
-0x115c,0x119c,0x11dc,0x121c,0x122c,0x1260,0x129c,0x12dc,0x131c,0x135c,0xdcc,0x1390,0x13c4,0x1404,0x1420,0x1454,
-0x9e1,0xa1f,0xa5f,0xa9e,0x188,0x188,0xad6,0x188,0x188,0x188,0x188,0x188,0x188,0xaff,0x188,0x188,
-0x188,0x188,0x188,0x188,0x188,0xb3f,0x188,0xb7f,0x188,0xba5,0xbe0,0xc1f,0xc5f,0xc99,0xcd0,0x188,
+static const uint16_t ucase_props_trieIndex[13148]={
+0x355,0x35d,0x365,0x36d,0x37b,0x383,0x38b,0x393,0x39b,0x3a3,0x3aa,0x3b2,0x3ba,0x3c2,0x3ca,0x3d2,
+0x3d8,0x3e0,0x3e8,0x3f0,0x3f8,0x400,0x408,0x410,0x418,0x420,0x428,0x430,0x438,0x440,0x448,0x450,
+0x458,0x460,0x468,0x470,0x478,0x480,0x488,0x490,0x48c,0x494,0x499,0x4a1,0x4a8,0x4b0,0x4b8,0x4c0,
+0x4c8,0x4d0,0x4d8,0x4e0,0x374,0x37c,0x4e5,0x4ed,0x4f2,0x4fa,0x502,0x50a,0x509,0x511,0x516,0x51e,
+0x526,0x52d,0x531,0x374,0x538,0x355,0x548,0x540,0x550,0x552,0x55a,0x562,0x566,0x567,0x56f,0x577,
+0x57f,0x567,0x587,0x58c,0x57f,0x567,0x594,0x59c,0x566,0x5a4,0x5ac,0x5b4,0x5bc,0x374,0x5c4,0x374,
+0x5cc,0x5ce,0x5d6,0x5b4,0x566,0x5a4,0x5dd,0x5b4,0x5e5,0x5e7,0x56f,0x5b4,0x566,0x374,0x5ef,0x374,
+0x374,0x5f5,0x5fc,0x374,0x374,0x600,0x608,0x374,0x60c,0x613,0x374,0x61a,0x622,0x629,0x631,0x374,
+0x374,0x636,0x63e,0x646,0x64e,0x656,0x65d,0x665,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x66d,0x374,0x374,0x67d,0x67d,0x675,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x685,0x68b,0x573,0x573,0x374,0x691,0x699,0x374,
+0x6a1,0x374,0x6a9,0x374,0x6b0,0x6b6,0x374,0x374,0x374,0x6be,0x374,0x374,0x374,0x374,0x374,0x374,
+0x6c5,0x374,0x6cc,0x6d4,0x374,0x6dc,0x6e4,0x374,0x5a3,0x6e8,0x6f0,0x6f6,0x5e5,0x6fe,0x374,0x705,
+0x374,0x70a,0x374,0x710,0x718,0x71c,0x724,0x72c,0x734,0x739,0x73c,0x744,0x754,0x74c,0x764,0x75c,
+0x39b,0x76c,0x39b,0x774,0x777,0x39b,0x77f,0x39b,0x787,0x78f,0x797,0x79f,0x7a7,0x7af,0x7b7,0x7bf,
+0x7c7,0x7ce,0x374,0x7d6,0x7de,0x374,0x7e6,0x7ee,0x7f6,0x7fe,0x806,0x80e,0x816,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x819,0x81f,0x825,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x82d,0x831,0x835,0x83d,0x39b,0x39b,0x39b,0x845,0x84d,0x854,0x374,0x859,0x374,0x374,0x374,0x861,
+0x374,0x6a6,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x565,0x869,0x374,0x374,0x870,0x374,0x374,0x878,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x880,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x710,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x886,0x374,0x88e,0x893,0x89b,0x374,0x374,0x8a3,0x8ab,0x8b3,0x39b,0x8b8,0x8c0,0x8c6,0x8cd,0x8d4,
+0x8dc,0x8e3,0x374,0x374,0x374,0x374,0x8ea,0x8f2,0x374,0x8fa,0x901,0x374,0x550,0x906,0x90e,0x6b0,
+0x374,0x914,0x91c,0x920,0x374,0x928,0x930,0x938,0x374,0x93e,0x942,0x94a,0x95a,0x952,0x374,0x962,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x96a,0x374,0x374,0x374,0x374,0x972,0x550,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x977,0x97f,0x983,0x374,0x374,0x374,0x374,0x357,0x35d,0x98b,0x993,0x99a,0x50b,0x374,0x374,0x9a2,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0xdd4,0xdd4,0xdec,0xe2c,0xe6c,0xea8,0xee8,0xf28,0xf60,0xfa0,0xfe0,0x1020,0x1060,0x10a0,0x10e0,0x1120,
+0x1160,0x11a0,0x11e0,0x1220,0x1230,0x1264,0x12a0,0x12e0,0x1320,0x1360,0xdd0,0x1394,0x13c8,0x1408,0x1424,0x1458,
+0x9e1,0xa1f,0xa5f,0xa9e,0x188,0x188,0xad9,0x188,0x188,0x188,0x188,0x188,0x188,0xb02,0x188,0x188,
+0x188,0x188,0x188,0x188,0x188,0xb42,0x188,0xb82,0x188,0xba8,0xbe3,0xc22,0xc62,0xc9c,0xcd3,0x188,
0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,
0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,
0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,
@@ -172,642 +172,657 @@ static const uint16_t ucase_props_trieIndex[12908]={
0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,
0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,
0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,0x188,
-0xd10,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x9a8,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x9b0,0x373,0x373,0x373,0x9b3,0x373,0x373,0x373,
-0x373,0x9bb,0x9c1,0x9c5,0x373,0x373,0x9c9,0x9cd,0x9d3,0x373,0x373,0x373,0x9da,0x9de,0x9e6,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x9f6,0x9ee,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x9fe,
-0xa02,0x373,0x373,0x373,0x373,0x373,0xa0a,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0xa12,0xa16,0xa1e,0xa22,0x373,0xa29,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0xa2f,0x373,0x373,0x373,0x373,0xa36,0x373,0xa3e,0x373,0x373,0x373,0x565,
-0xa40,0xa47,0xa4b,0x5e4,0xa53,0xa5b,0x373,0xa63,0xa6a,0x373,0xa70,0x5e4,0xa75,0xa7d,0x373,0x373,
-0xa82,0x373,0x373,0x373,0x373,0x356,0xa8a,0x5e4,0x5e6,0xa92,0xa99,0x373,0x373,0x373,0x373,0x373,
-0xa40,0xaa1,0x373,0x373,0xaa9,0xab1,0x373,0x373,0x373,0x373,0x373,0x373,0xab5,0xabd,0x373,0x373,
-0xac5,0x4ce,0x373,0x373,0xacd,0x373,0x373,0xad3,0xadb,0x373,0x373,0x373,0x373,0x373,0x373,0xae0,
-0x373,0x373,0x373,0xae8,0xaf0,0x373,0x373,0xaf8,0xb00,0x373,0x373,0x373,0xb03,0x9b0,0xb0b,0xb0f,
-0xb17,0x373,0xb1e,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0xb25,
-0x373,0x373,0x971,0xb2d,0x373,0x373,0x373,0xb33,0xb3b,0x373,0xb3f,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0xb45,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0xb4b,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0xb52,0x373,0xb58,0x5a2,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0xae8,0xaf0,0x373,0x373,0x373,0x373,0x373,0x373,0x6a5,0x373,0xb5e,0x373,0x373,0xb66,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0xb6b,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0xb73,0x5a2,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x8aa,0xb7b,0xb82,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0xb89,0xb91,0xb97,0x373,0x373,
-0x373,0x373,0xb9f,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0xba7,0xbaf,0xbb4,0xbba,0xbc2,0xbca,0xbd2,0xbab,0xbda,0xbe2,0xbea,0xbf1,0xbac,0xba7,0xbaf,0xbaa,
-0xbba,0xbad,0xba8,0xbf9,0xbab,0xc01,0xc09,0xc11,0xc18,0xc04,0xc0c,0xc14,0xc1b,0xc07,0xc23,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x8aa,
-0xc2b,0x8aa,0xc32,0xc39,0xc41,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0xc51,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0xc49,
-0xc59,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0xc5d,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0xaa5,0x373,0xa27,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0xc65,
-0x373,0xc6d,0xc75,0xc7c,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0xba3,0xc84,0xc84,0xc8a,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0xa65,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x565,0x8aa,0x8aa,0x8aa,0x373,0x373,0x373,0x373,0x8aa,0x8aa,0x8aa,0x8aa,0x8aa,0x8aa,0x8aa,0xc92,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,0x373,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,4,0,0,0,0,0,0,4,0,
-0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,
-0,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0xa,0x5a,0x7a,0x1012,0x1012,0x1012,0x1012,
-0x1012,0x1012,0x1012,0xba,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0,0,0,4,0,
-4,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf9,0xf031,0x149,0xf011,0xf011,0xf011,0xf011,
-0xf011,0xf011,0xf011,0x189,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-4,0,1,0,0,4,0,4,0,0,0,0,4,0x1c9,0,4,
-4,0,1,0,0,0,0,0,0x1012,0x1012,0x1012,0x1012,0x1012,0x1fa,0x1012,0x1012,
-0x1012,0x1012,0x1012,0x1012,0x5a,0x5a,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0,
-0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x239,0xf011,0xf011,0xf011,0xf011,0xf011,0x2d9,0xf011,0xf011,
-0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0,
-0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0x3c91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
+0xd13,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x9a9,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x9b1,0x374,0x374,0x374,0x9b4,0x374,0x374,0x374,
+0x374,0x9bc,0x9c2,0x9c6,0x374,0x374,0x9ca,0x9ce,0x9d4,0x374,0x374,0x374,0x9db,0x9df,0x9e7,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x9f7,0x9ef,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x9ff,
+0xa03,0x374,0x374,0x374,0x374,0x374,0xa0b,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0xa13,0xa17,0xa1f,0xa23,0x374,0xa2a,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0xa30,0x374,0xa34,0x374,0x374,0xa3c,0x374,0xa44,0x374,0x374,0x374,0x566,
+0xa46,0xa4d,0xa51,0x5e5,0xa59,0xa61,0x374,0xa69,0xa70,0x374,0xa76,0x5e5,0xa7b,0xa83,0x374,0x374,
+0xa88,0x566,0x374,0x374,0x374,0x357,0xa90,0x5e5,0x5e7,0xa98,0xa9f,0x374,0x374,0x374,0x374,0x374,
+0xa46,0xaa7,0x374,0x374,0xaaf,0xab7,0x374,0x374,0x374,0x374,0x374,0x374,0xabb,0xac3,0x374,0x374,
+0xacb,0x4cf,0x374,0x374,0xad3,0x374,0x374,0xad9,0xae1,0x374,0x374,0x374,0x374,0x374,0x374,0xae6,
+0x374,0x374,0x374,0xaee,0xaf6,0x374,0x374,0xafe,0xb06,0x374,0x374,0x374,0xb09,0x9b1,0xb11,0xb15,
+0xb1d,0x374,0xb24,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0xb2b,
+0x374,0x374,0x972,0xb33,0x374,0x374,0x374,0xb39,0xb41,0x374,0xb45,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0xb4b,0x5e5,0xb51,0xb59,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0xb5d,0xb65,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0xb6b,0x374,0xb71,0x5a3,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0xaee,0xaf6,0x374,0x374,0x374,0x374,0x374,0x374,0x6a6,0x374,0xb77,0x374,
+0x374,0xb7f,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0xb84,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0xb8c,0x5a3,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x8ab,0xb94,0xb9b,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0xba2,0xbaa,
+0xbb0,0x374,0x374,0x374,0x374,0xbb8,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0xbc0,0xbc8,0xbcd,0xbd3,0xbdb,0xbe3,0xbeb,0xbc4,0xbf3,0xbfb,0xc03,0xc0a,0xbc5,
+0xbc0,0xbc8,0xbc3,0xbd3,0xbc6,0xbc1,0xc12,0xbc4,0xc1a,0xc22,0xc2a,0xc31,0xc1d,0xc25,0xc2d,0xc34,
+0xc20,0xc3c,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x8ab,0xc44,0x8ab,0xc4b,0xc52,0xc5a,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0xc6a,0xc72,0x374,0x374,0x374,0x374,
+0x374,0x374,0xc62,0xc7a,0xc8d,0xc80,0xc85,0x374,0x374,0x374,0x374,0xc95,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0xaab,0x374,0xa28,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0xc9d,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0xca1,0x374,0xca9,0xcb1,0xcb8,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0xbbc,0xcc0,0xcc0,0xcc6,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0xa6b,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x566,0x8ab,0x8ab,0x8ab,0x374,0x374,0x374,0x374,0x8ab,0x8ab,0x8ab,0x8ab,0x8ab,
+0x8ab,0x8ab,0xcce,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,0x374,
+0x374,0x374,0x374,0x354,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,
+0,0,4,0,0,0,0,0,0,0,0,0,0,0,4,0,
+0,0,0,0,0,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0xa,0x5a,0x7a,
+0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0xba,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0,
+0,0,4,0,4,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf9,0xf031,0x149,
+0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0x189,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,4,0,1,0,0,4,0,4,0,0,0,0,
+4,0x1c9,0,4,4,0,1,0,0,0,0,0,0x1012,0x1012,0x1012,0x1012,
+0x1012,0x1fa,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x5a,0x5a,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,
+0x1012,0x1012,0x1012,0,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x239,0xf011,0xf011,0xf011,0xf011,
+0xf011,0x2d9,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,
+0xf011,0xf011,0xf011,0,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0x3c91,0x92,0xff91,0x92,0xff91,
0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
-0x31a,0xff91,0x92,0xff91,0x92,0xff91,0x31a,0xffb1,0x33a,0x389,0x92,0xff91,0x92,0xff91,0x92,0xff91,
-1,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x3d9,0x92,0xff91,
+0x92,0xff91,0x92,0xff91,0x31a,0xff91,0x92,0xff91,0x92,0xff91,0x31a,0xffb1,0x33a,0x389,0x92,0xff91,
+0x92,0xff91,0x92,0xff91,1,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,
+0xff91,0x3d9,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
-0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0xc392,0x92,0xff91,0x92,
-0xff91,0x92,0xff91,0x459,0x6191,0x6912,0x92,0xff91,0x92,0xff91,0x6712,0x92,0xff91,0x6692,0x6692,0x92,
-0xff91,1,0x2792,0x6512,0x6592,0x92,0xff91,0x6692,0x6792,0x3091,0x6992,0x6892,0x92,0xff91,0x5191,1,
-0x6992,0x6a92,0x4111,0x6b12,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x6d12,0x92,0xff91,0x6d12,1,1,
-0x92,0xff91,0x6d12,0x92,0xff91,0x6c92,0x6c92,0x92,0xff91,0x92,0xff91,0x6d92,0x92,0xff91,1,0,
-0x92,0xff91,1,0x1c11,0,0,0,0,0x48a,0x4bb,0x4f9,0x52a,0x55b,0x599,0x5ca,0x5fb,
-0x639,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,
-0xff91,0xd891,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
-0x92,0xff91,0x92,0xff91,0x669,0x6ea,0x71b,0x759,0x92,0xff91,0xcf92,0xe412,0x92,0xff91,0x92,0xff91,
+0xc392,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x459,0x6191,0x6912,0x92,0xff91,0x92,0xff91,0x6712,0x92,
+0xff91,0x6692,0x6692,0x92,0xff91,1,0x2792,0x6512,0x6592,0x92,0xff91,0x6692,0x6792,0x3091,0x6992,0x6892,
+0x92,0xff91,0x5191,1,0x6992,0x6a92,0x4111,0x6b12,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x6d12,0x92,
+0xff91,0x6d12,1,1,0x92,0xff91,0x6d12,0x92,0xff91,0x6c92,0x6c92,0x92,0xff91,0x92,0xff91,0x6d92,
+0x92,0xff91,1,0,0x92,0xff91,1,0x1c11,0,0,0,0,0x48a,0x4bb,0x4f9,0x52a,
+0x55b,0x599,0x5ca,0x5fb,0x639,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,
+0xff91,0x92,0xff91,0x92,0xff91,0xd891,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
+0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x669,0x6ea,0x71b,0x759,0x92,0xff91,0xcf92,0xe412,
0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
-0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0xbf12,1,0x92,0xff91,
0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
-1,1,1,1,1,1,0x78a,0x92,0xff91,0xae92,0x7aa,0x7c9,0x7c9,0x92,0xff91,0x9e92,
-0x2292,0x2392,0x92,0xff91,0x92,0xffb1,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x7e9,0x809,0x829,0x9711,
-0x9911,1,0x9991,0x9991,1,0x9b11,1,0x9a91,0x849,1,1,1,0x9991,0x869,1,0x9891,
-1,0x889,0x8a9,1,0x97b1,0x9691,0x8a9,0x8c9,0x8e9,1,1,0x9691,1,0x909,0x9591,1,
-1,0x9511,1,1,1,1,1,1,1,0x929,1,1,0x9311,1,0x949,0x9311,
-1,1,1,0x969,0x9311,0xdd91,0x9391,0x9391,0xdc91,1,1,1,1,1,0x9291,1,
-0,1,1,1,1,1,1,1,1,0x989,0x9a9,1,1,1,1,1,
-1,1,1,1,1,1,1,1,1,1,1,1,5,5,0x25,5,
-5,5,5,5,5,4,4,4,0x14,4,0x14,4,5,5,4,4,
+0xbf12,1,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
+0x92,0xff91,0x92,0xff91,1,1,1,1,1,1,0x78a,0x92,0xff91,0xae92,0x7aa,0x7c9,
+0x7c9,0x92,0xff91,0x9e92,0x2292,0x2392,0x92,0xff91,0x92,0xffb1,0x92,0xff91,0x92,0xff91,0x92,0xff91,
+0x7e9,0x809,0x829,0x9711,0x9911,1,0x9991,0x9991,1,0x9b11,1,0x9a91,0x849,1,1,1,
+0x9991,0x869,1,0x9891,1,0x889,0x8a9,1,0x97b1,0x9691,0x8a9,0x8c9,0x8e9,1,1,0x9691,
+1,0x909,0x9591,1,1,0x9511,1,1,1,1,1,1,1,0x929,1,1,
+0x9311,1,0x949,0x9311,1,1,1,0x969,0x9311,0xdd91,0x9391,0x9391,0xdc91,1,1,1,
+1,1,0x9291,1,0,1,1,1,1,1,1,1,1,0x989,0x9a9,1,
+1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+5,5,0x25,5,5,5,5,5,5,4,4,4,0x14,4,0x14,4,
+5,5,4,4,4,4,4,4,4,4,4,4,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,
-4,4,4,4,4,4,4,4,4,4,4,4,5,5,5,5,
-5,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,
-4,4,4,4,4,4,4,4,4,4,4,4,0x54,0x54,0x44,0x44,
-0x44,0x44,0x44,0x9cc,0x54,0x44,0x54,0x44,0x54,0x44,0x44,0x44,0x44,0x44,0x44,0x54,
-0x44,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,
-0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x74,0x64,0x64,
-0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x44,0x44,0x44,0x44,0x44,0x54,0x44,
-0x44,0x9dd,0x44,0x64,0x64,0x64,0x44,0x44,0x44,0x64,0x64,4,0x44,0x44,0x44,0x64,
-0x64,0x64,0x64,0x44,0x64,0x64,0x64,0x44,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x44,
-0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x92,0xff91,0x92,0xff91,
-4,4,0x92,0xff91,0,0,5,0x4111,0x4111,0x4111,0,0x3a12,0,0,0,0,
-4,4,0x1312,4,0x1292,0x1292,0x1292,0,0x2012,0,0x1f92,0x1f92,0xa29,0x1012,0xafa,0x1012,
-0x1012,0xb3a,0x1012,0x1012,0xb7a,0xbca,0xc1a,0x1012,0xc5a,0x1012,0x1012,0x1012,0xc9a,0xcda,0,0xd1a,
-0x1012,0x1012,0xd5a,0x1012,0x1012,0xd9a,0x1012,0x1012,0xed11,0xed91,0xed91,0xed91,0xdd9,0xf011,0xea9,0xf011,
-0xf011,0xee9,0xf011,0xf011,0xf29,0xf79,0xfc9,0xf011,0x1009,0xf011,0xf011,0xf011,0x1049,0x1089,0x10c9,0x10f9,
-0xf011,0xf011,0x1139,0xf011,0xf011,0x1179,0xf011,0xf011,0xe011,0xe091,0xe091,0x412,0x11b9,0x11e9,2,2,
-2,0x1239,0x1269,0xfc11,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
-0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x1299,0x12c9,0x391,0xc631,
-0x12fa,0x1349,0,0x92,0xff91,0xfc92,0x92,0xff91,1,0xbf12,0xbf12,0xbf12,0x2812,0x2812,0x2812,0x2812,
-0x2812,0x2812,0x2812,0x2812,0x2812,0x2812,0x2812,0x2812,0x2812,0x2812,0x2812,0x2812,0x1012,0x1012,0x137a,0x1012,
-0x13ba,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x13fa,0x1012,0x1012,0x143a,0x147a,0x1012,
-0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x14ca,0x1012,0x1012,0x1012,0x1012,0x1012,0xf011,0xf011,0x1509,0xf011,
-0x1549,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0x1589,0xf011,0xf011,0x15c9,0x1609,0xf011,
-0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0x1659,0xf011,0xf011,0xf011,0xf011,0xf011,0xd811,0xd811,0xd811,0xd811,
-0xd811,0xd811,0xd831,0xd811,0xd831,0xd811,0xd811,0xd811,0xd811,0xd811,0xd811,0xd811,0x92,0xff91,0x169a,0x16d9,
+5,5,5,5,5,4,4,4,4,4,4,4,4,4,4,4,
+4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,
+0x54,0x54,0x44,0x44,0x44,0x44,0x44,0x9cc,0x54,0x44,0x54,0x44,0x54,0x44,0x44,0x44,
+0x44,0x44,0x44,0x54,0x44,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,
+0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,
+0x64,0x74,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x44,0x44,0x44,
+0x44,0x44,0x54,0x44,0x44,0x9dd,0x44,0x64,0x64,0x64,0x44,0x44,0x44,0x64,0x64,4,
+0x44,0x44,0x44,0x64,0x64,0x64,0x64,0x44,0x64,0x64,0x64,0x44,0x64,0x64,0x64,0x64,
+0x64,0x64,0x64,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,
+0x92,0xff91,0x92,0xff91,4,4,0x92,0xff91,0,0,5,0x4111,0x4111,0x4111,0,0x3a12,
+0,0,0,0,4,4,0x1312,4,0x1292,0x1292,0x1292,0,0x2012,0,0x1f92,0x1f92,
+0xa29,0x1012,0xafa,0x1012,0x1012,0xb3a,0x1012,0x1012,0xb7a,0xbca,0xc1a,0x1012,0xc5a,0x1012,0x1012,0x1012,
+0xc9a,0xcda,0,0xd1a,0x1012,0x1012,0xd5a,0x1012,0x1012,0xd9a,0x1012,0x1012,0xed11,0xed91,0xed91,0xed91,
+0xdd9,0xf011,0xea9,0xf011,0xf011,0xee9,0xf011,0xf011,0xf29,0xf79,0xfc9,0xf011,0x1009,0xf011,0xf011,0xf011,
+0x1049,0x1089,0x10c9,0x10f9,0xf011,0xf011,0x1139,0xf011,0xf011,0x1179,0xf011,0xf011,0xe011,0xe091,0xe091,0x412,
+0x11b9,0x11e9,2,2,2,0x1239,0x1269,0xfc11,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
-0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0,0x44,
-0x44,0x44,0x44,0x44,4,4,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
+0x1299,0x12c9,0x391,0xc631,0x12fa,0x1349,0,0x92,0xff91,0xfc92,0x92,0xff91,1,0xbf12,0xbf12,0xbf12,
+0x2812,0x2812,0x2812,0x2812,0x2812,0x2812,0x2812,0x2812,0x2812,0x2812,0x2812,0x2812,0x2812,0x2812,0x2812,0x2812,
+0x1012,0x1012,0x137a,0x1012,0x13ba,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x13fa,0x1012,
+0x1012,0x143a,0x147a,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x14ca,0x1012,0x1012,0x1012,0x1012,0x1012,
+0xf011,0xf011,0x1509,0xf011,0x1549,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0x1589,0xf011,
+0xf011,0x15c9,0x1609,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0x1659,0xf011,0xf011,0xf011,0xf011,0xf011,
+0xd811,0xd811,0xd811,0xd811,0xd811,0xd811,0xd831,0xd811,0xd831,0xd811,0xd811,0xd811,0xd811,0xd811,0xd811,0xd811,
+0x92,0xff91,0x169a,0x16d9,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
+0x92,0xff91,0,0x44,0x44,0x44,0x44,0x44,4,4,0x92,0xff91,0x92,0xff91,0x92,0xff91,
0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
-0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x792,0x92,0xff91,0x92,
-0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0xf891,0x92,0xff91,0x92,0xff91,
0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
-0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0,0x1812,0x1812,0x1812,
-0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,
-0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0,0,4,0,0,0,0,0,4,
-1,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,
+0x792,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0xf891,
+0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
+0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
+0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
+0,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,
+0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0,0,4,0,0,
+0,0,0,4,1,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,
0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,
-0xe811,0xe811,0xe811,0x1719,1,0,0,0,0,0,0,0,0,0x64,0x44,0x44,
-0x44,0x44,0x64,0x44,0x44,0x44,0x64,0x64,0x44,0x44,0x44,0x44,0x44,0x44,0x64,0x64,
-0x64,0x64,0x64,0x64,0x44,0x44,0x64,0x44,0x44,0x64,0x64,0x44,0x64,0x64,0x64,0x64,
-0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0,0x64,0,0x64,0x64,0,
-0x44,0x64,0,0x64,0,0,0,0,0,0,0,0,0,0,0,0,
+0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0x1719,1,0,0,0,0,0,0,0,
+0,0x64,0x44,0x44,0x44,0x44,0x64,0x44,0x44,0x44,0x64,0x64,0x44,0x44,0x44,0x44,
+0x44,0x44,0x64,0x64,0x64,0x64,0x64,0x64,0x44,0x44,0x64,0x44,0x44,0x64,0x64,0x44,
+0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0,0x64,
+0,0x64,0x64,0,0x44,0x64,0,0x64,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-4,0,0,0,0,0,0,0,0,0,0,0,4,4,4,4,
-4,4,0,0,0,0,0,0,0,0,0,0,0x44,0x44,0x44,0x44,
-0x44,0x44,0x44,0x44,0x64,0x64,0x64,0,4,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,
-0,0,0,0,0,0,0,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x44,
-0x44,0x64,0x64,0x44,0x44,0x44,0x44,0x44,0x64,0x44,0x44,0x64,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0x64,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0x44,0x44,0x44,0x44,0x44,0x44,0x44,4,0,0x44,
-0x44,0x44,0x44,0x64,0x44,4,4,0x44,0x44,0,0x64,0x44,0x44,0x64,0,0,
+0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,
+4,4,4,4,4,4,0,0,0,0,0,0,0,0,0,0,
+0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x64,0x64,0x64,0,4,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,4,0,0x64,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x44,0x64,0x44,0x44,0x64,0x44,0x44,0x64,0x64,0x64,0x44,0x64,
-0x64,0x44,0x64,0x44,0x44,0x44,0x64,0x44,0x64,0x44,0x64,0x44,0x64,0x44,0x44,0,
+4,0,0,0,0,0,0,0,0,0,0,0x64,0x64,0x64,0x64,0x64,
+0x64,0x64,0x64,0x44,0x44,0x64,0x64,0x44,0x44,0x44,0x44,0x44,0x64,0x44,0x44,0x64,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0x64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0x44,0x44,0x44,0x44,0x44,0x44,
+0x44,4,0,0x44,0x44,0x44,0x44,0x64,0x44,4,4,0x44,0x44,0,0x64,0x44,
+0x44,0x64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,4,0,0x64,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0x44,0x64,0x44,0x44,0x64,0x44,0x44,0x64,
+0x64,0x64,0x44,0x64,0x64,0x44,0x64,0x44,0x44,0x44,0x64,0x44,0x64,0x44,0x64,0x44,
+0x64,0x44,0x44,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,4,4,4,4,4,4,4,4,4,4,
-4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0x44,
-0x44,0x44,0x44,0x44,0x44,0x44,0x64,0x44,4,4,0,0,0,0,4,0,
-0,0x64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0x44,0x44,0x44,0x44,4,0x44,
-0x44,0x44,0x44,0x44,4,0x44,0x44,0x44,4,0x44,0x44,0x44,0x44,0x44,0,0,
+0,0,0,0,0,0,0,0,0,0,4,4,4,4,4,4,
+4,4,4,4,4,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0x64,0x64,0x64,0,0,0,0,
-0,0,0,0,4,0,0,0,0,0,0,0,4,4,0,0,
-0,0,0,0,0x44,0x64,0x64,0x64,0x44,0x44,0x44,0x44,0x44,0x44,4,0x64,
-0x44,0x44,0x64,0x44,0x44,0x64,0x44,0x44,0x44,0x64,0x64,0x64,0x64,0x64,0x64,0x44,
-0x44,0x44,0x64,0x44,0x44,0x64,0x64,0x44,0x44,0x44,0x44,0x44,0,0,0,0,
-0,0,0,0,0,4,0x44,0x44,0x44,0x44,0x44,0x64,0x64,0x64,0x64,0x64,
-0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,4,4,4,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,
-0x64,0,0,0,0,4,4,4,4,4,4,4,4,0,0,0,
-0,0x64,0,0,0,0x44,0x64,0x44,0x44,4,4,4,0,0,0,0,
-0,0,0,0,0,0,4,4,0,0,0,0,0,0,0,0,
-0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x64,0x44,4,4,0,0,
+0,0,4,0,0,0x64,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0x44,0x44,
+0x44,0x44,4,0x44,0x44,0x44,0x44,0x44,4,0x44,0x44,0x44,4,0x44,0x44,0x44,
+0x44,0x44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0x64,0x64,0x64,
+0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,
+4,4,0,0,0,0,0,0,0x44,0x64,0x64,0x64,0x44,0x44,0x44,0x44,
+0x44,0x44,4,0x64,0x44,0x44,0x64,0x44,0x44,0x64,0x44,0x44,0x44,0x64,0x64,0x64,
+0x64,0x64,0x64,0x44,0x44,0x44,0x64,0x44,0x44,0x64,0x64,0x44,0x44,0x44,0x44,0x44,
+0,0,0,0,0,0,0,0,0,4,0x44,0x44,0x44,0x44,0x44,0x64,
+0x64,0x64,0x64,0x64,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,
+4,4,4,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x64,0,0,0,0,4,4,4,4,0,0,0,
-0,0,0,0,0,0x64,0,0,0,0,0,0,0,0,0,0,
+0,0,4,0,0x64,0,0,0,0,4,4,4,4,4,4,4,
+4,0,0,0,0,0x64,0,0,0,0x44,0x64,0x44,0x44,4,4,4,
0,0,0,0,0,0,0,0,0,0,4,4,0,0,0,0,
+0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0x44,0,0,4,4,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,4,4,0,0,0,0,4,
-4,0,0,4,4,0x64,0,0,0,4,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,4,4,0,0,
-0,4,0,0,0,0,0,0,0,0,0,0,0,4,4,4,
-4,4,0,4,4,0,0,0,0,0x64,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0x64,0,0,0,0,4,4,4,
+4,0,0,0,0,0,0,0,0,0x64,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,4,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,4,4,4,4,4,4,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0x44,0,0,4,4,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0x64,0,0,4,0,4,4,4,
-4,0,0,0,0,0,0,0,0,0x64,0,0,0,0,0,0,
-0,4,4,0,0,0,0,0,0,0,0,0,0,0,4,4,
+0,0,0,0,0,0,0,0,0,0,0,0,0,4,4,0,
+0,0,0,4,4,0,0,4,4,0x64,0,0,0,4,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,
+4,4,0,0,0,4,0,0,0,0,0,0,0,0,0,0,
+0,4,4,4,4,4,0,4,4,0,0,0,0,0x64,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,
-0,0,0,0,0,0,0,0,0,0x64,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,
-4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,4,4,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,4,4,4,4,4,4,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0x64,0,4,4,4,0,0,0,0,0,4,4,4,0,4,4,
-4,0x64,0,0,0,0,0,0,0,0x64,0x64,0,0,0,0,0,
-0,0,0,0,0,0,4,0,0,0,0,0,4,0x64,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0x64,0,0,4,
+0,4,4,4,4,0,0,0,0,0,0,0,0,0x64,0,0,
+0,0,0,0,0,4,4,0,0,0,0,0,0,0,0,0,
+0,0,4,4,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-4,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0x64,0x64,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0x64,0,0,0,0,0,0,0,4,4,4,0,4,0,
+4,0,0,0,0,0,0,0,0,0,0,0,0,0x64,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,4,0,0,4,4,4,4,0x64,0x64,0x64,0,0,0,0,0,
-0,0,4,4,0x64,0x64,0x64,0x64,4,4,4,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,
-4,4,4,4,0x64,0x64,0x64,4,4,0,0,0,0,0,0,0,
-0,0,4,0,0x64,0x64,0x64,0x64,4,4,0,0,0,0,0,0,
+4,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x64,0x64,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0x64,0,0x64,
-0,0x64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0x64,0x64,4,0x64,4,4,4,4,4,0x64,0x64,
-0x64,0x64,4,0,0x64,4,0x44,0x44,0x64,0,0x44,0x44,0,0,0,0,
-0,4,4,4,4,4,4,4,4,4,4,4,0,4,4,4,
-4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,
-4,4,4,4,4,4,4,4,4,4,4,4,4,0,0,0,
-0,0,0,0,0,0,0x64,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x64,0,4,4,4,0,0,0,0,0,4,4,
+4,0,4,4,4,0x64,0,0,0,0,0,0,0,0x64,0x64,0,
+0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,
+4,0x64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,4,4,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,4,4,4,4,0,4,4,4,4,4,0x64,0,0x64,0x64,0,
-0,4,4,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,4,4,0,0,
-0,0,4,4,4,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,4,4,4,4,0,0,0,0,0,0,0,
-0,0,0,0,0,0,4,0,0,4,4,0,0,0,0,0,
-0,0x64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,4,0,0,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,
-0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,
-0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0,0x175a,0,0,0,0,0,0x175a,0,0,
-0x1779,0x17a9,0x17d9,0x1809,0x1839,0x1869,0x1899,0x18c9,0x18f9,0x1929,0x1959,0x1989,0x19b9,0x19e9,0x1a19,0x1a49,
-0x1a79,0x1aa9,0x1ad9,0x1b09,0x1b39,0x1b69,0x1b99,0x1bc9,0x1bf9,0x1c29,0x1c59,0x1c89,0x1cb9,0x1ce9,0x1d19,0x1d49,
-0x1d79,0x1da9,0x1dd9,0x1e09,0x1e39,0x1e69,0x1e99,0x1ec9,0x1ef9,0x1f29,0x1f59,0,4,0x1f89,0x1fb9,0x1fe9,
+0,0,0,0,0,0,0,0x64,0x64,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0x64,0,0,0,0,0,0,0,4,4,
+4,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,4,0,0,4,4,4,4,0x64,0x64,0x64,0,
+0,0,0,0,0,0,4,4,0x64,0x64,0x64,0x64,4,4,4,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0x44,0x44,0x44,
-0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,
-0x203a,0x203a,0x203a,0x203a,0x203a,0x203a,0,0,0x2059,0x2089,0x20b9,0x20e9,0x2119,0x2149,0,0,
-0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,
-0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,
+0,4,0,0,4,4,4,4,0x64,0x64,0x64,4,4,0,0,0,
+0,0,0,0,0,0,4,0,0x64,0x64,0x64,0x64,4,4,4,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,4,4,0x64,0x60,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,4,4,0x60,0,0,0,
+0,0,0,0,0,0,0,0,0x64,0x64,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,4,4,0,4,4,4,4,4,4,4,0,0,
-0,0,0,0,0,0,4,0,0,4,4,4,4,4,4,4,
-4,4,0x64,4,0,0,0,4,0,0,0,0,0,0x44,0,0,
-0,0,0,0,0,0,0,0,0,0,0,4,4,4,4,4,
+0,0x64,0,0x64,0,0x64,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0x64,0x64,4,0x64,4,4,4,
+4,4,0x64,0x64,0x64,0x64,4,0,0x64,4,0x44,0x44,0x64,0,0x44,0x44,
+0,0,0,0,0,4,4,4,4,4,4,4,4,4,4,4,
+0,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,
+4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,
+4,0,0,0,0,0,0,0,0,0,0x64,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,4,4,4,4,0,4,4,4,4,4,0x64,
+0,0x64,0x64,0,0,4,4,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,4,4,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0x64,0,0,
+4,4,0,0,0,0,4,4,4,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,4,4,4,4,0,0,0,
+0,0,0,0,0,0,0,0,0,0,4,0,0,4,4,0,
+0,0,0,0,0,0x64,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,4,0,0,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,
+0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,
+0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0x175a,0,0x175a,0,0,0,0,
+0,0x175a,0,0,0x1779,0x17a9,0x17d9,0x1809,0x1839,0x1869,0x1899,0x18c9,0x18f9,0x1929,0x1959,0x1989,
+0x19b9,0x19e9,0x1a19,0x1a49,0x1a79,0x1aa9,0x1ad9,0x1b09,0x1b39,0x1b69,0x1b99,0x1bc9,0x1bf9,0x1c29,0x1c59,0x1c89,
+0x1cb9,0x1ce9,0x1d19,0x1d49,0x1d79,0x1da9,0x1dd9,0x1e09,0x1e39,0x1e69,0x1e99,0x1ec9,0x1ef9,0x1f29,0x1f59,0,
+5,0x1f89,0x1fb9,0x1fe9,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0x44,0x44,0x44,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,
+0x201a,0x201a,0x201a,0x201a,0x203a,0x203a,0x203a,0x203a,0x203a,0x203a,0,0,0x2059,0x2089,0x20b9,0x20e9,
+0x2119,0x2149,0,0,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,
+0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,0x201a,
+0x201a,0x201a,0x201a,0x201a,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,4,4,0x64,0x60,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,4,
+0x60,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,4,4,0,4,4,4,4,4,
+4,4,0,0,0,0,0,0,0,0,4,0,0,4,4,4,
+4,4,4,4,4,4,0x64,4,0,0,0,4,0,0,0,0,
+0,0x44,0,0,0,0,0,0,0,0,0,0,0,0,0,4,
+4,4,4,4,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,4,4,4,0,0,0,0,4,4,0,0,0,
-0,0,0,0,0,0,4,0,0,0,0,0,0,0x64,0x44,0x64,
+0,0,0,0,0,4,4,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0x44,0x64,0,0,4,0,0,0,0,
+0,0x64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,4,4,4,0,0,0,0,4,
+4,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,
+0,0x64,0x44,0x64,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0x44,0x64,0,0,4,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,4,0,4,4,4,4,4,4,4,0,
+0x64,0,4,0,0,4,4,4,4,4,4,4,4,0,0,0,
+0,0,0,4,4,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0,0,0x64,
+0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,
+0x44,0x44,0x44,0x44,0x44,0x64,0x64,0x64,0x64,0x64,0x64,0x44,0x44,0x64,4,0x64,
+0x64,0x44,0x44,0x64,0x64,0x44,0x44,0x44,0x44,0x44,0x64,0x44,0x44,0x44,0x44,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,4,0,4,4,4,4,4,4,4,0,0x64,0,4,0,
-0,4,4,4,4,4,4,4,4,0,0,0,0,0,0,4,
-4,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0,0,0x64,0,0,0,0,
-0,0,0,4,0,0,0,0,0,0,0,0,0x44,0x44,0x44,0x44,
-0x44,0x64,0x64,0x64,0x64,0x64,0x64,0x44,0x44,0x64,4,0x64,0x64,0x44,0x44,0x64,
-0x64,0x44,0x44,0x44,0x44,0x44,0x64,0x44,0x44,0x44,0x44,0,0,0,0,0,
+0,0,0,0,0x64,0,4,4,4,4,4,0,4,0,0,0,
+0,0,4,0,0x60,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0x64,0,4,4,4,4,4,0,4,0,0,0,0,0,4,0,
-0x60,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0x44,
-0x64,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,4,4,4,4,0,0,4,4,0x60,0x64,
-4,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0x64,0,4,4,0,0,0,4,0,4,
-4,4,0x60,0x60,0,0,0,0,0,0,0,0,0,0,0,0,
-4,4,4,4,4,4,4,4,0,0,4,0x64,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,4,4,4,4,4,4,0,0,0x2179,0x21a9,0x21d9,0x2209,
-0x2239,0x2289,0x22d9,0x2309,0x2339,0,0,0,0,0,0,0,0x236a,0x236a,0x236a,0x236a,
+0,0,0,0x44,0x64,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,4,4,4,4,0,0,
+4,4,0x60,0x64,4,4,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0x64,0,4,4,0,0,
+0,4,0,4,4,4,0x60,0x60,0,0,0,0,0,0,0,0,
+0,0,0,0,4,4,4,4,4,4,4,4,0,0,4,0x64,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,4,4,4,4,4,4,0,0,
+0x2179,0x21a9,0x21d9,0x2209,0x2239,0x2289,0x22d9,0x2309,0x2339,0,0,0,0,0,0,0,
0x236a,0x236a,0x236a,0x236a,0x236a,0x236a,0x236a,0x236a,0x236a,0x236a,0x236a,0x236a,0x236a,0x236a,0x236a,0x236a,
-0x236a,0x236a,0x236a,0x236a,0x236a,0x236a,0x236a,0,0,0x236a,0x236a,0x236a,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0x44,0x44,0x44,0,
-0x64,0x64,0x64,0x64,0x64,0x64,0x44,0x44,0x64,0x64,0x64,0x64,0x44,0,0x64,0x64,
-0x64,0x64,0x64,0x64,0x64,0,0,0,0,0x64,0,0,0,0,0,0,
-0x44,0,0,0,0x44,0x44,0,0,0,0,0,0,1,1,1,1,
+0x236a,0x236a,0x236a,0x236a,0x236a,0x236a,0x236a,0x236a,0x236a,0x236a,0x236a,0,0,0x236a,0x236a,0x236a,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0x44,0x44,0x44,0,0x64,0x64,0x64,0x64,0x64,0x64,0x44,0x44,0x64,0x64,0x64,0x64,
+0x44,0,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0,0,0,0,0x64,0,0,
+0,0,0,0,0x44,0,0,0,0x44,0x44,0,0,0,0,0,0,
+1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-1,1,1,1,1,1,1,1,1,1,1,1,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,0x25,5,
-5,5,5,5,5,5,5,1,1,1,1,1,1,1,1,1,
-1,1,1,1,5,0x2389,1,1,1,0x23a9,1,1,5,5,5,5,
-0x25,5,5,5,0x25,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,1,1,1,1,
-1,1,1,1,1,1,1,1,1,1,0x23c9,1,1,1,1,1,
-1,1,0x21,1,1,1,1,5,5,5,5,5,0x44,0x44,0x44,0x44,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,0x25,5,5,5,5,5,5,5,5,1,1,1,1,1,
+1,1,1,1,1,1,1,1,5,0x2389,1,1,1,0x23a9,1,1,
+5,5,5,5,0x25,5,5,5,0x25,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+1,1,1,1,1,1,1,1,1,1,1,1,1,1,0x23c9,1,
+1,1,1,1,1,1,0x21,1,1,1,1,5,5,5,5,5,
0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,
-0x44,0x44,0x64,0x64,0x64,0x64,0x64,0x44,0x64,0x64,0x44,0x64,0x44,0x44,0x64,0x44,
-0x44,0x44,0x44,0x44,0x44,0x44,0x64,0x44,0x44,0x64,0x64,0x64,0x64,0x44,0x44,0x44,
-0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x92,0xff91,0x92,0xff91,
-0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xffb1,0x92,0xff91,0x92,0xff91,0x92,0xff91,
-0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x23ea,0x2429,0x92,0xff91,
+0x44,0x44,0x44,0x44,0x44,0x44,0x64,0x64,0x64,0x64,0x64,0x44,0x64,0x64,0x44,0x64,
+0x44,0x44,0x64,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x64,0x44,0x44,0x64,0x64,0x64,
+0x64,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,
+0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xffb1,0x92,0xff91,
0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
-0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x2469,0x24e9,
-0x2569,0x25e9,0x2669,0x26e9,1,1,0x271a,1,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
-0x92,0xff91,0x92,0xffb1,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
-0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x411,0x411,0x411,0x411,0x411,0x411,0x411,0x411,
-0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0x411,0x411,0x411,0x411,0x411,0x411,0,0,
-0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0,0,0x411,0x411,0x411,0x411,0x411,0x411,0x411,0x411,
-0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0x411,0x411,0x411,0x411,0x411,0x411,0x411,0x411,
-0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0x411,0x411,0x411,0x411,0x411,0x411,0,0,
-0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0,0,0x2769,0x411,0x27e9,0x411,0x2899,0x411,0x2949,0x411,
-0,0xfc12,0,0xfc12,0,0xfc12,0,0xfc12,0x411,0x411,0x411,0x411,0x411,0x411,0x411,0x411,
-0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0x2511,0x2511,0x2b11,0x2b11,0x2b11,0x2b11,0x3211,0x3211,
-0x4011,0x4011,0x3811,0x3811,0x3f11,0x3f11,0,0,0x29f9,0x2a69,0x2ad9,0x2b49,0x2bb9,0x2c29,0x2c99,0x2d09,
-0x2d7b,0x2deb,0x2e5b,0x2ecb,0x2f3b,0x2fab,0x301b,0x308b,0x30f9,0x3169,0x31d9,0x3249,0x32b9,0x3329,0x3399,0x3409,
-0x347b,0x34eb,0x355b,0x35cb,0x363b,0x36ab,0x371b,0x378b,0x37f9,0x3869,0x38d9,0x3949,0x39b9,0x3a29,0x3a99,0x3b09,
-0x3b7b,0x3beb,0x3c5b,0x3ccb,0x3d3b,0x3dab,0x3e1b,0x3e8b,0x411,0x411,0x3ef9,0x3f79,0x3fe9,0,0x4069,0x40e9,
-0xfc12,0xfc12,0xdb12,0xdb12,0x419b,4,0x4209,4,4,4,0x4259,0x42d9,0x4349,0,0x43c9,0x4449,
-0xd512,0xd512,0xd512,0xd512,0x44fb,4,4,4,0x411,0x411,0x4569,0x4619,0,0,0x46e9,0x4769,
-0xfc12,0xfc12,0xce12,0xce12,0,4,4,4,0x411,0x411,0x4819,0x48c9,0x4999,0x391,0x4a19,0x4a99,
-0xfc12,0xfc12,0xc812,0xc812,0xfc92,4,4,4,0,0,0x4b49,0x4bc9,0x4c39,0,0x4cb9,0x4d39,
-0xc012,0xc012,0xc112,0xc112,0x4deb,4,4,0,0,0,0,0,0,0,0,0,
-0,0,0,4,4,4,4,4,0,0,0,0,0,0,0,0,
-4,4,0,0,0,0,0,0,4,0,0,4,0,0,4,4,
-4,4,4,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,4,4,4,4,4,0,4,4,4,4,4,4,
-4,4,4,4,0,0x25,0,0,0,0,0,0,0,0,0,0,
-0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
-5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x44,0x44,0x64,0x64,0x44,0x44,0x44,0x44,0x64,0x64,0x64,0x44,
-0x44,4,4,4,4,0x44,4,4,4,0x64,0x64,0x44,0x64,0x44,0x64,0x64,
-0x64,0x64,0x64,0x64,0x44,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,2,0,0,0,0,2,0,0,1,2,
-2,2,1,1,2,2,2,1,0,2,0,0,0,2,2,2,
-2,2,0,0,0,0,0,0,2,0,0x4e5a,0,2,0,0x4e9a,0x4eda,
-2,2,0,1,2,2,0xe12,2,1,0,0,0,0,1,0,0,
-1,1,2,2,0,0,0,0,0,2,1,1,0x21,0x21,0,0,
-0,0,0xf211,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x812,0x812,0x812,0x812,0x812,0x812,0x812,0x812,0x812,0x812,0x812,0x812,
-0x812,0x812,0x812,0x812,0xf811,0xf811,0xf811,0xf811,0xf811,0xf811,0xf811,0xf811,0xf811,0xf811,0xf811,0xf811,
-0xf811,0xf811,0xf811,0xf811,0,0,0,0x92,0xff91,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0xd12,0xd12,0xd12,0xd12,0xd12,0xd12,0xd12,0xd12,0xd12,0xd12,
-0xd12,0xd12,0xd12,0xd12,0xd12,0xd12,0xd12,0xd12,0xf311,0xf311,0xf311,0xf311,0xf311,0xf311,0xf311,0xf311,
-0xf311,0xf311,0xf311,0xf311,0xf311,0xf311,0xf311,0xf311,0xf311,0xf311,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,
+0x23ea,0x2429,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
+0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
+0x92,0xff91,0x2469,0x24e9,0x2569,0x25e9,0x2669,0x26e9,1,1,0x271a,1,0x92,0xff91,0x92,0xff91,
+0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xffb1,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
+0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x411,0x411,0x411,0x411,
+0x411,0x411,0x411,0x411,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0x411,0x411,0x411,0x411,
+0x411,0x411,0,0,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0,0,0x411,0x411,0x411,0x411,
+0x411,0x411,0x411,0x411,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0x411,0x411,0x411,0x411,
+0x411,0x411,0x411,0x411,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0x411,0x411,0x411,0x411,
+0x411,0x411,0,0,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0,0,0x2769,0x411,0x27e9,0x411,
+0x2899,0x411,0x2949,0x411,0,0xfc12,0,0xfc12,0,0xfc12,0,0xfc12,0x411,0x411,0x411,0x411,
+0x411,0x411,0x411,0x411,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0xfc12,0x2511,0x2511,0x2b11,0x2b11,
+0x2b11,0x2b11,0x3211,0x3211,0x4011,0x4011,0x3811,0x3811,0x3f11,0x3f11,0,0,0x29f9,0x2a69,0x2ad9,0x2b49,
+0x2bb9,0x2c29,0x2c99,0x2d09,0x2d7b,0x2deb,0x2e5b,0x2ecb,0x2f3b,0x2fab,0x301b,0x308b,0x30f9,0x3169,0x31d9,0x3249,
+0x32b9,0x3329,0x3399,0x3409,0x347b,0x34eb,0x355b,0x35cb,0x363b,0x36ab,0x371b,0x378b,0x37f9,0x3869,0x38d9,0x3949,
+0x39b9,0x3a29,0x3a99,0x3b09,0x3b7b,0x3beb,0x3c5b,0x3ccb,0x3d3b,0x3dab,0x3e1b,0x3e8b,0x411,0x411,0x3ef9,0x3f79,
+0x3fe9,0,0x4069,0x40e9,0xfc12,0xfc12,0xdb12,0xdb12,0x419b,4,0x4209,4,4,4,0x4259,0x42d9,
+0x4349,0,0x43c9,0x4449,0xd512,0xd512,0xd512,0xd512,0x44fb,4,4,4,0x411,0x411,0x4569,0x4619,
+0,0,0x46e9,0x4769,0xfc12,0xfc12,0xce12,0xce12,0,4,4,4,0x411,0x411,0x4819,0x48c9,
+0x4999,0x391,0x4a19,0x4a99,0xfc12,0xfc12,0xc812,0xc812,0xfc92,4,4,4,0,0,0x4b49,0x4bc9,
+0x4c39,0,0x4cb9,0x4d39,0xc012,0xc012,0xc112,0xc112,0x4deb,4,4,0,0,0,0,0,
+0,0,0,0,0,0,0,4,4,4,4,4,0,0,0,0,
+0,0,0,0,4,4,0,0,0,0,0,0,4,0,0,4,
+0,0,4,4,4,4,4,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,4,4,4,4,4,0,4,4,
+4,4,4,4,4,4,4,4,0,0x25,0,0,0,0,0,0,
+0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,5,5,5,5,5,5,5,5,
+5,5,5,5,5,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0x44,0x44,0x64,0x64,0x44,0x44,0x44,0x44,
+0x64,0x64,0x64,0x44,0x44,4,4,4,4,0x44,4,4,4,0x64,0x64,0x44,
+0x64,0x44,0x64,0x64,0x64,0x64,0x64,0x64,0x44,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,2,
+0,0,1,2,2,2,1,1,2,2,2,1,0,2,0,0,
+0,2,2,2,2,2,0,0,0,0,0,0,2,0,0x4e5a,0,
+2,0,0x4e9a,0x4eda,2,2,0,1,2,2,0xe12,2,1,0,0,0,
+0,1,0,0,1,1,2,2,0,0,0,0,0,2,1,1,
+0x21,0x21,0,0,0,0,0xf211,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0x812,0x812,0x812,0x812,0x812,0x812,0x812,0x812,
+0x812,0x812,0x812,0x812,0x812,0x812,0x812,0x812,0xf811,0xf811,0xf811,0xf811,0xf811,0xf811,0xf811,0xf811,
+0xf811,0xf811,0xf811,0xf811,0xf811,0xf811,0xf811,0xf811,0,0,0,0x92,0xff91,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0xd12,0xd12,0xd12,0xd12,0xd12,0xd12,
+0xd12,0xd12,0xd12,0xd12,0xd12,0xd12,0xd12,0xd12,0xd12,0xd12,0xd12,0xd12,0xf311,0xf311,0xf311,0xf311,
+0xf311,0xf311,0xf311,0xf311,0xf311,0xf311,0xf311,0xf311,0xf311,0xf311,0xf311,0xf311,0xf311,0xf311,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,
0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,
+0x1812,0x1812,0x1812,0x1812,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,
0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,
-0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,0xe811,
-0x92,0xff91,0x4f1a,0x4f3a,0x4f5a,0x4f79,0x4f99,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x4fba,0x4fda,0x4ffa,
-0x501a,1,0x92,0xff91,1,0x92,0xff91,1,1,1,1,1,0x25,5,0x503a,0x503a,
-0x92,0xff91,0x92,0xff91,1,0,0,0,0,0,0,0x92,0xff91,0x92,0xff91,0x44,
-0x44,0x44,0x92,0xff91,0,0,0,0,0,0,0,0,0,0,0,0,
-0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,
+0xe811,0xe811,0xe811,0xe811,0x92,0xff91,0x4f1a,0x4f3a,0x4f5a,0x4f79,0x4f99,0x92,0xff91,0x92,0xff91,0x92,
+0xff91,0x4fba,0x4fda,0x4ffa,0x501a,1,0x92,0xff91,1,0x92,0xff91,1,1,1,1,1,
+0x25,5,0x503a,0x503a,0x92,0xff91,0x92,0xff91,1,0,0,0,0,0,0,0x92,
+0xff91,0x92,0xff91,0x44,0x44,0x44,0x92,0xff91,0,0,0,0,0,0,0,0,
+0,0,0,0,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,
0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,
-0x5059,0x5059,0,0x5059,0,0,0,0,0,0x5059,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0x64,
-0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,
+0x5059,0x5059,0x5059,0x5059,0x5059,0x5059,0,0x5059,0,0,0,0,0,0x5059,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0x64,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,
0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,
-0,0,0,0,0,0,0,0,0,0,0x64,0x64,0x64,0x64,0x60,0x60,
-0,4,4,4,4,4,0,0,0,0,0,4,0,0,0,0,
+0x44,0x44,0x44,0x44,0,0,0,0,0,0,0,0,0,0,0x64,0x64,
+0x64,0x64,0x60,0x60,0,4,4,4,4,4,0,0,0,0,0,4,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0x64,0x64,4,4,4,4,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0x64,0x64,4,4,4,4,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,4,4,4,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,4,4,4,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x507a,0x50b9,
-0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
-0x92,0xff91,0x92,0xff91,0x92,0xff91,0,0x44,4,4,4,0,0x44,0x44,0x44,0x44,
-0x44,0x44,0x44,0x44,0x44,0x44,0,4,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
+0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
+0x92,0xff91,0x507a,0x50b9,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
+0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0,0x44,4,4,4,0,
+0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0,4,0x92,0xff91,0x92,0xff91,
0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
-0x92,0xff91,0x92,0xff91,5,5,0x44,0x44,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0x44,0x44,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,4,4,4,4,4,4,4,4,
+0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,5,5,0x44,0x44,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0x44,0x44,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,4,4,4,4,
4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,
-4,4,4,4,4,4,4,4,4,4,0x92,0xff91,0x92,0xff91,0x92,0xff91,
-0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,1,1,0x92,0xff91,0x92,0xff91,0x92,0xff91,
-0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,5,1,1,1,
-1,1,1,1,1,0x92,0xff91,0x92,0xff91,0x50fa,0x92,0xff91,0x92,0xff91,0x92,0xff91,
-0x92,0xff91,0x92,0xff91,4,4,4,0x92,0xff91,0x511a,1,0,0x92,0xff91,0x92,0xff91,
-0x1811,1,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x513a,0x515a,
-0x517a,0x519a,0x513a,1,0x51ba,0x51da,0x51fa,0x521a,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
-0x92,0xff91,0x92,0xff91,0xe812,0x523a,0x525a,0x92,0xff91,0x92,0xff91,0,0,0,0,0,
-0x92,0xff91,0,1,0,1,0x92,0xff91,0x92,0xff91,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,4,
-4,0x92,0xff91,0,5,5,1,0,0,0,0,0,0,0,4,0,
-0,0,0x64,0,0,0,0,4,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,4,4,0,
-0,0,0,0,0x64,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0x64,4,0,0,0,0,0,0,
+4,4,4,4,4,4,4,4,4,4,4,4,4,4,0x92,0xff91,
+0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,1,1,0x92,0xff91,
+0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
+5,1,1,1,1,1,1,1,1,0x92,0xff91,0x92,0xff91,0x50fa,0x92,0xff91,
+0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,4,4,4,0x92,0xff91,0x511a,1,0,
+0x92,0xff91,0x92,0xff91,0x1811,1,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,
+0x92,0xff91,0x513a,0x515a,0x517a,0x519a,0x513a,1,0x51ba,0x51da,0x51fa,0x521a,0x92,0xff91,0x92,0xff91,
+0x92,0xff91,0x92,0xff91,0x92,0xff91,0x92,0xff91,0xe812,0x523a,0x525a,0x92,0xff91,0x92,0xff91,0,
+0,0,0,0,0x92,0xff91,0,1,0,1,0x92,0xff91,0x92,0xff91,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,5,5,5,0x92,0xff91,0,5,5,1,0,0,0,0,0,
+0,0,4,0,0,0,0x64,0,0,0,0,4,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,4,4,0,0,0,0,0,0x64,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0x64,4,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,
+0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0,0,0,0,0,0,
+0,0,0,0,0,0,0,4,0,0,0,0,0,0,4,4,
+4,4,4,0x64,0x64,0x64,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,4,4,4,4,4,
+4,4,4,4,4,4,0,0x60,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0x64,0,0,4,4,
+4,4,0,0,4,4,0,0,0x60,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,4,4,4,4,4,4,0,
+0,4,4,0,0,4,4,0,0,0,0,0,0,0,0,0,
+0,0,0,4,0,0,0,0,0,0,0,0,4,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,
-0x44,0x44,0x44,0x44,0x44,0x44,0,0,0,0,0,0,0,0,0,0,
-0,0,0,4,0,0,0,0,0,0,4,4,4,4,4,0x64,
-0x64,0x64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,4,4,4,4,4,4,4,4,4,
-4,4,0,0x60,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0x64,0,0,4,4,4,4,0,0,
-4,4,0,0,0x60,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,4,4,4,4,4,4,0,0,4,4,0,
-0,4,4,0,0,0,0,0,0,0,0,0,0,0,0,4,
-0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,
-0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0x44,0,0x44,0x44,
-0x64,0,0,0x44,0x44,0,0,0,0,0,0x44,0x44,0,0x44,0,0,
+4,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,4,4,0,0,0,0,0,4,
-4,0,0x64,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,
-1,1,1,1,1,1,1,0x5279,1,1,1,1,1,1,1,4,
-5,5,5,5,1,1,1,1,1,1,1,1,1,4,4,4,
-0,0,0,0,0x5299,0x52c9,0x52f9,0x5329,0x5359,0x5389,0x53b9,0x53e9,0x5419,0x5449,0x5479,0x54a9,
-0x54d9,0x5509,0x5539,0x5569,0x5b99,0x5bc9,0x5bf9,0x5c29,0x5c59,0x5c89,0x5cb9,0x5ce9,0x5d19,0x5d49,0x5d79,0x5da9,
-0x5dd9,0x5e09,0x5e39,0x5e69,0x5e99,0x5ec9,0x5ef9,0x5f29,0x5f59,0x5f89,0x5fb9,0x5fe9,0x6019,0x6049,0x6079,0x60a9,
-0x60d9,0x6109,0x6139,0x6169,0x5599,0x55c9,0x55f9,0x5629,0x5659,0x5689,0x56b9,0x56e9,0x5719,0x5749,0x5779,0x57a9,
-0x57d9,0x5809,0x5839,0x5869,0x5899,0x58c9,0x58f9,0x5929,0x5959,0x5989,0x59b9,0x59e9,0x5a19,0x5a49,0x5a79,0x5aa9,
-0x5ad9,0x5b09,0x5b39,0x5b69,0,0,0,0,0,4,0,0,4,0,0,0,
-0,0x64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x6199,0x6219,0x6299,0x6319,0x63c9,0x6479,0x6519,0,0,0,0,0,
-0,0,0,0,0,0,0,0x65b9,0x6639,0x66b9,0x6739,0x67b9,0,0,0,0,
-0,0,0x64,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,4,4,4,4,4,4,4,4,4,4,
-4,4,4,4,4,4,4,4,0,0,0,4,0,0,0,0,
-0,0,0,0,0,0,0,0,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x64,
-0x64,0x64,0x64,0x64,0x64,0x64,0x44,0x44,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,4,0,0,4,0,0,
-0,0,0,0,0,0,0,0,0,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,
+0x44,0,0x44,0x44,0x64,0,0,0x44,0x44,0,0,0,0,0,0x44,0x44,
+0,0x44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,4,4,0,0,
+0,0,0,4,4,0,0x64,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,
+1,1,1,1,1,1,1,1,1,1,1,0x5279,1,1,1,1,
+1,1,1,4,5,5,5,5,1,1,1,1,1,1,1,1,
+1,5,4,4,0,0,0,0,0x5299,0x52c9,0x52f9,0x5329,0x5359,0x5389,0x53b9,0x53e9,
+0x5419,0x5449,0x5479,0x54a9,0x54d9,0x5509,0x5539,0x5569,0x5b99,0x5bc9,0x5bf9,0x5c29,0x5c59,0x5c89,0x5cb9,0x5ce9,
+0x5d19,0x5d49,0x5d79,0x5da9,0x5dd9,0x5e09,0x5e39,0x5e69,0x5e99,0x5ec9,0x5ef9,0x5f29,0x5f59,0x5f89,0x5fb9,0x5fe9,
+0x6019,0x6049,0x6079,0x60a9,0x60d9,0x6109,0x6139,0x6169,0x5599,0x55c9,0x55f9,0x5629,0x5659,0x5689,0x56b9,0x56e9,
+0x5719,0x5749,0x5779,0x57a9,0x57d9,0x5809,0x5839,0x5869,0x5899,0x58c9,0x58f9,0x5929,0x5959,0x5989,0x59b9,0x59e9,
+0x5a19,0x5a49,0x5a79,0x5aa9,0x5ad9,0x5b09,0x5b39,0x5b69,0,0,0,0,0,4,0,0,
+4,0,0,0,0,0x64,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0x6199,0x6219,0x6299,0x6319,0x63c9,0x6479,0x6519,0,
+0,0,0,0,0,0,0,0,0,0,0,0x65b9,0x6639,0x66b9,0x6739,0x67b9,
+0,0,0,0,0,0,0x64,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,4,4,4,4,4,4,
+4,4,4,4,4,4,4,4,4,4,4,4,0,0,0,4,
+0,0,0,0,0,0,0,0,0,0,0,0,0x44,0x44,0x44,0x44,
+0x44,0x44,0x44,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x44,0x44,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,
+0,4,0,0,0,0,0,0,0,0,0,0,0,0x1012,0x1012,0x1012,
0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,
-0x1012,0x1012,0x1012,0,0,0,4,0,4,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,
+0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0,0,0,4,0,4,0xf011,0xf011,0xf011,
0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,
-0xf011,0xf011,0xf011,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,4,4,4,
+0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0x64,0,0,
-0x64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,4,4,4,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0x44,0x44,0x44,0x44,0x44,0,0,0,0,0,0x1412,0x1412,0x1412,0x1412,
+0,0x64,0,0,0x64,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0x44,0x44,0x44,0x44,0x44,0,0,0,0,0,
+0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,
0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,
-0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0xec11,0xec11,0xec11,0xec11,
-0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,
-0xec11,0xec11,0xec11,0xec11,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,
-0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0,0,0,0,0xec11,0xec11,0xec11,0xec11,
0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,
0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0x1392,0x1392,0x1392,0x1392,0x1392,0x1392,0x1392,0x1392,
-0x1392,0x1392,0x1392,0,0x1392,0x1392,0x1392,0x1392,0x1392,0x1392,0x1392,0,0x1392,0x1392,0,0xec91,
-0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0,0xec91,0xec91,0xec91,0xec91,0xec91,
-0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0,0xec91,0xec91,0xec91,0xec91,0xec91,
-0xec91,0xec91,0,0xec91,0xec91,0,0,0,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,0,5,5,5,5,5,5,
-5,5,5,0,0,0,0,0,5,4,4,5,5,5,0,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,0,4,4,4,0,4,4,0,
-0,0,0,0,4,0x64,4,0x44,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0x44,0x64,0x64,0,0,0,0,0x64,0,0,0,0,0,0x44,0x64,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,
+0,0,0,0,0,0,0,0,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,
+0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0x1412,0,0,0,0,
+0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,
+0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0xec11,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0x1392,0x1392,0x1392,0x1392,
+0x1392,0x1392,0x1392,0x1392,0x1392,0x1392,0x1392,0,0x1392,0x1392,0x1392,0x1392,0x1392,0x1392,0x1392,0,
+0x1392,0x1392,0,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0,0xec91,
+0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0,0xec91,
+0xec91,0xec91,0xec91,0xec91,0xec91,0xec91,0,0xec91,0xec91,0,0,0,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,0,5,5,
+5,5,5,5,5,5,5,0,0,0,0,0,5,4,4,5,
+5,5,0,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,0,4,4,4,
+0,4,4,0,0,0,0,0,4,0x64,4,0x44,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x44,0x64,0x64,0,0,0,0,0x64,0,0,0,0,
+0,0x44,0x64,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0x2012,0x2012,0x2012,0x2012,
0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,
-0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,
+0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0x2012,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0xe011,0xe011,0xe011,0xe011,
0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,
-0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0x44,0x44,0x44,0x44,0,0,0,0,
+0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0xe011,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0x44,0x44,0x44,0x44,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0x44,0x44,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0x64,0x64,
-0x44,0x44,0x44,0x64,0x44,0x64,0x64,0x64,0x64,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0x44,0x64,0x44,0x64,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0x44,0x44,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,4,4,4,4,4,4,4,4,
-4,4,0x64,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0x64,0,0,4,
-4,0,0,0,0,0,0,0,0,0,0,0x64,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,
-4,4,4,0,0,0x64,0x64,0,0,4,0,0,0,0,4,0,
-0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0x44,0x44,0x44,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0x64,0x64,0x64,
+0,0,0,0,0,0,0x64,0x64,0x44,0x44,0x44,0x64,0x44,0x64,0x64,0x64,
+0x64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0x44,0x64,0x44,0x64,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,
-4,4,4,4,0,4,4,4,4,4,4,0x64,0x64,0,0,0,
+4,4,4,4,4,4,4,4,4,4,0x64,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x64,0,0,4,4,0,0,0,0,0,0,0,
0,0,0,0x64,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,4,4,4,4,4,4,
-4,4,4,0,0x60,0,0,0,0,0,0,0,0,4,0x64,4,
-4,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,4,4,4,0,0,4,0x60,0x64,4,
-0,0,0,0,0,0,4,0,0,0,0,4,4,4,4,4,
-4,0x64,0x64,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,
-0,0,0,0,0,0x60,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0x44,0x44,0x44,0x44,0x44,0x44,
-0x44,0,0,0,0x44,0x44,0x44,0x44,0x44,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0x64,4,4,0,0x64,0,0,0,0,0,
+0,0,0,0,0,0,0,4,4,4,4,0,0,0x64,0x64,0,
+0,4,0,0,0,0,4,0,0,0,0,0,0,0,0,0,
+0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x44,0x44,0x44,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0x44,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,4,4,4,4,4,4,0,4,0,
-0,0,0,4,4,0,0x64,0x64,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,4,4,4,4,4,0,4,4,4,
+4,4,4,0x64,0x64,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0x64,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,4,4,4,4,0,0,0,0,0,0,
-4,4,0,0x64,0x64,0,0,0,0,0,0,0,0,0,0,0,
+0,0,4,4,4,4,4,4,4,4,4,0,0x60,0,0,0,
+0,0,0,0,0,4,0x64,4,4,0,0,4,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,
+4,4,0,0,4,0x60,0x64,4,0,0,0,0,0,0,4,0,
+0,0,0,4,4,4,4,4,4,0x64,0x64,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-4,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,4,4,4,4,4,4,4,4,0,
-0,4,0,0x64,0,0,0,0,0,0,0,0,0,0,0,4,
-0,4,0,0,4,4,4,4,4,4,0x60,0x64,0,0,0,0,
+4,0,0,0,0,0,0,0,0,0,0,0,0,0x60,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,4,4,4,0,0,4,4,
-4,4,0,4,4,4,4,0x64,0,0,0,0,0,0,0,0,
+0,0,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0,0,0,0x44,0x44,0x44,0x44,
+0x44,0,0,0,0,0,0,0,0,0,0,0,0,0,0x64,4,
+4,0,0x64,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0x44,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,
-4,4,4,4,4,4,4,4,0,0x64,0x64,0,0,0,0,0,
-0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,
-0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,
-0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,
-0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,
+4,4,4,4,4,0,4,0,0,0,0,4,4,0,0x64,0x64,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,4,4,0x60,0x64,0,
-0,0,0,0x64,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,4,
+4,4,0,0,0,0,0,0,4,4,0,0x64,0x64,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-4,4,4,4,0,0,4,4,0,0,0,0,0,4,4,4,
-4,4,4,4,4,4,4,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,4,4,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,
-0x64,4,4,4,4,0,0,4,4,4,4,0,0,0,0,0,
-0,0,0,0x64,0,0,0,0,0,0,0,0,0,4,4,4,
-4,4,4,0,0,4,4,4,0,0,0,0,0,0,0,0,
-0,0,4,4,4,4,4,4,4,4,4,4,4,4,4,0,
-4,0x64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,4,4,4,4,4,4,4,0,4,4,4,4,
-4,4,0,0x64,4,4,4,4,4,4,4,4,0,0,4,4,
-4,4,4,4,4,0,4,4,0,4,4,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,4,4,4,
-4,4,4,0,0,0,4,0,4,4,0,4,4,4,0x64,4,
-0x64,0x64,0,4,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,4,4,0,0,
-0,4,0,0x64,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,4,4,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,4,4,4,4,
-4,4,4,4,4,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0x64,0x64,0x64,0x64,0x64,0,0,0,
+4,4,4,4,4,4,4,0,0,4,0,0x64,0,0,0,0,
+0,0,0,0,0,0,0,4,0,4,0,0,4,4,4,4,
+4,4,0x60,0x64,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,4,4,4,0,0,4,4,4,4,0,4,4,4,4,0x64,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0x44,0x44,0x44,0x44,0x44,0x44,0x44,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,4,4,4,4,4,4,4,4,4,
-4,4,4,4,4,4,4,4,4,4,0,4,4,0,0,0,
-0,0,0,0,0,0,0,0,0x60,0x60,0,0,0,0,0,0,
+0,0x64,0x64,0,0,0,0,0,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,
+0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,
+0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0x1012,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,
+0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,
+0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0xf011,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,4,4,0x60,0x64,0,0,0,0,0x64,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,4,4,4,4,0,0,4,4,
+0,0,0,0,0,4,4,4,4,4,4,4,4,4,4,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,4,0x64,4,4,4,4,0,0,4,
+4,4,4,0,0,0,0,0,0,0,0,0x64,0,0,0,0,
+0,0,0,0,0,4,4,4,4,4,4,0,0,4,4,4,
+0,0,0,0,0,0,0,0,0,0,4,4,4,4,4,4,
+4,4,4,4,4,4,4,0,4,0x64,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,4,4,4,4,
-0,4,4,4,4,4,4,4,0,4,4,0,0,0,0,0,
+4,4,4,0,4,4,4,4,4,4,0,0x64,4,4,4,4,
+4,4,4,4,0,0,4,4,4,4,4,4,4,0,4,4,
+0,4,4,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,4,4,4,4,4,4,0,0,0,4,0,
+4,4,0,4,4,4,0x64,4,0x64,0x64,0,4,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,4,0x64,0,4,4,4,4,
-4,4,4,4,4,4,4,4,4,4,0,0,4,4,4,4,
-4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,0,
+0,0,0,0,4,4,0,0,0,4,0,0x64,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,
+4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,4,4,4,4,4,0,
+0,0,0,0,4,0x60,0x64,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0x60,0x60,0x64,0x64,0x64,0,0,
-0,0x60,0x60,0x60,0x60,0x60,0x60,4,4,4,4,4,4,4,4,0x64,
-0x64,0x64,0x64,0x64,0x64,0x64,0x64,0,0,0x44,0x44,0x44,0x44,0x44,0x64,0x64,
+0,0,0,0,4,4,4,4,4,4,4,4,4,4,4,4,
+4,4,4,4,4,0,0,0,0,0,0,4,4,4,4,4,
+4,4,4,4,4,4,4,4,4,4,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0x64,0x64,0x64,0x64,
+0x64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,4,4,4,4,4,
+4,4,4,4,4,4,4,4,4,4,4,4,4,4,0,4,
+4,0,0,0,0,0,0,0,0,0,0,0,0x60,0x60,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+4,4,4,4,0,4,4,4,4,4,4,4,0,4,4,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,4,0x64,0,
+4,4,4,4,4,4,4,4,4,4,4,4,4,4,0,0,
+4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,
+4,4,4,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0x60,0x60,0x64,
+0x64,0x64,0,0,0,0x60,0x60,0x60,0x60,0x60,0x60,4,4,4,4,4,
+4,4,4,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0,0,0x44,0x44,0x44,
+0x44,0x44,0x64,0x64,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0x44,0x44,0x44,0x44,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0x44,0x44,0x44,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0x44,0x44,0x44,0x44,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0x44,0x44,
-0x44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,
-2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,2,2,1,1,1,1,1,1,1,1,0x21,0x21,
-1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,1,
-1,1,1,1,1,0,0x21,0x21,1,1,1,1,1,1,1,1,
2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,1,1,1,1,1,1,
1,1,0x21,0x21,1,1,1,1,1,1,1,1,1,1,1,1,
-1,1,1,1,2,0,2,2,0,0,2,0,0,2,2,0,
-0,2,2,2,2,0,2,2,2,2,2,2,2,2,1,1,
-1,1,0,1,0,1,0x21,0x21,1,1,1,1,0,1,1,1,
-1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,
-2,2,2,2,2,2,2,2,1,1,1,1,2,2,0,2,
-2,2,2,0,0,2,2,2,2,2,2,2,2,0,2,2,
-2,2,2,2,2,0,1,1,1,1,1,1,1,1,0x21,0x21,
-1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-2,2,0,2,2,2,2,0,2,2,2,2,2,0,2,0,
-0,0,2,2,2,2,2,2,2,0,1,1,1,1,1,1,
+1,1,1,1,2,2,2,2,2,2,2,2,2,2,2,2,
+2,2,1,1,1,1,1,1,1,0,0x21,0x21,1,1,1,1,
+1,1,1,1,2,2,2,2,2,2,2,2,2,2,2,2,
+2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,1,
+1,1,1,1,1,1,0x21,0x21,1,1,1,1,1,1,1,1,
+1,1,1,1,1,1,1,1,2,0,2,2,0,0,2,0,
+0,2,2,0,0,2,2,2,2,0,2,2,2,2,2,2,
+2,2,1,1,1,1,0,1,0,1,0x21,0x21,1,1,1,1,
+0,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,
+2,2,2,2,2,2,2,2,2,2,2,2,1,1,1,1,
+2,2,0,2,2,2,2,0,0,2,2,2,2,2,2,2,
+2,0,2,2,2,2,2,2,2,0,1,1,1,1,1,1,
1,1,0x21,0x21,1,1,1,1,1,1,1,1,1,1,1,1,
-2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,
+1,1,1,1,2,2,0,2,2,2,2,0,2,2,2,2,
+2,0,2,0,0,0,2,2,2,2,2,2,2,0,1,1,
+1,1,1,1,1,1,0x21,0x21,1,1,1,1,1,1,1,1,
1,1,1,1,2,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,1,1,1,1,1,1,0,0,2,2,2,2,
+2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,
+1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,
+2,2,2,2,2,2,2,2,1,1,1,1,1,1,0,0,
2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,2,0,1,1,1,1,1,1,1,1,1,1,
-1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
-1,1,1,1,1,1,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,
+2,2,2,2,2,2,2,2,2,0,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-1,1,1,1,1,0,1,1,1,1,1,1,2,2,2,2,
-2,2,2,2,2,2,2,2,2,2,2,2,1,1,1,0,
-1,1,1,1,1,1,2,1,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,4,4,4,4,
+1,1,1,0,1,1,1,1,1,1,2,2,2,2,2,2,
+2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
+2,2,2,0,1,1,1,1,1,1,1,1,1,1,1,1,
+1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,
+2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
+1,1,1,0,1,1,1,1,1,1,2,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,
-4,4,4,0,0,0,0,4,4,4,4,4,4,4,4,4,
-4,4,4,4,4,0,0,0,0,0,0,0,0,4,0,0,
-0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,
-4,4,4,4,0,4,4,4,4,4,4,4,4,4,4,4,
-4,4,4,4,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0,0x44,0x44,0x44,0x44,
-0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0,0,0x44,
-0x44,0x44,0x44,0x44,1,1,1,1,1,1,1,1,1,1,0,1,
-1,1,1,1,1,1,1,1,1,1,1,1,1,1,0x21,1,
-1,1,1,0,0x44,0x44,0,0x44,0x44,0,0x44,0x44,0x44,0x44,0x44,0,
+4,4,4,4,4,4,4,0,0,0,0,4,4,4,4,4,
+4,4,4,4,4,4,4,4,4,0,0,0,0,0,0,0,
+0,4,0,0,0,0,0,0,0,0,0,0,4,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,4,4,4,4,4,0,4,4,4,4,4,4,4,
+4,4,4,4,4,4,4,4,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0,
+0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,0x44,
+0x44,0,0,0x44,0x44,0x44,0x44,0x44,1,1,1,1,1,1,1,1,
+1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,
+1,1,0x21,1,1,1,1,0,0,0,0,0,0,1,1,1,
+1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0x44,0x44,0,0x44,0x44,0,0x44,0x44,
+0x44,0x44,0x44,0,0,0,0,0,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,0x25,5,5,5,5,5,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0x44,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
+0x25,0x25,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0x44,0x44,0x44,0x44,0x44,0x44,0x44,4,4,4,4,4,
-4,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+4,4,0,0,0,0,0,0,0,0,0,0,0,0,0,4,
+0x64,0x64,0x64,0x44,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0x64,0x64,0x64,0x64,0x64,0x64,0x64,0,0,0,0,0,
0,0,0,0,0x1112,0x1112,0x1112,0x1112,0x1112,0x1112,0x1112,0x1112,0x1112,0x1112,0x1112,0x1112,
0x1112,0x1112,0x1112,0x1112,0x1112,0x1112,0x1112,0x1112,0x1112,0x1112,0x1112,0x1112,0x1112,0x1112,0x1112,0x1112,
@@ -965,17 +980,17 @@ static const UCaseProps ucase_props_singleton={
ucase_props_unfold,
{
ucase_props_trieIndex,
- ucase_props_trieIndex+3408,
+ ucase_props_trieIndex+3412,
NULL,
- 3408,
- 9500,
+ 3412,
+ 9736,
0x188,
- 0xdcc,
+ 0xdd0,
0x0,
0x0,
0xe0800,
- 0x3268,
- NULL, 0, FALSE, FALSE, 0, NULL
+ 0x3358,
+ NULL, 0, false, false, 0, NULL
},
{ 4,0,0,0 }
};
diff --git a/thirdparty/icu4c/common/ucasemap.cpp b/thirdparty/icu4c/common/ucasemap.cpp
index 95b55d56a0..fc0439db0f 100644
--- a/thirdparty/icu4c/common/ucasemap.cpp
+++ b/thirdparty/icu4c/common/ucasemap.cpp
@@ -157,7 +157,7 @@ appendResult(int32_t cpLength, int32_t result, const UChar *s,
ByteSinkUtil::appendCodePoint(cpLength, result, sink, edits);
}
}
- return TRUE;
+ return true;
}
// See unicode/utf8.h U8_APPEND_UNSAFE().
@@ -525,14 +525,14 @@ ucasemap_internalUTF8ToTitle(
csc.p=(void *)src;
csc.limit=srcLength;
int32_t prev=0;
- UBool isFirstIndex=TRUE;
+ UBool isFirstIndex=true;
/* titlecasing loop */
while(prev<srcLength) {
/* find next index where to titlecase */
int32_t index;
if(isFirstIndex) {
- isFirstIndex=FALSE;
+ isFirstIndex=false;
index=iter->first();
} else {
index=iter->next();
@@ -643,12 +643,12 @@ UBool isFollowedByCasedLetter(const uint8_t *s, int32_t i, int32_t length) {
if ((type & UCASE_IGNORABLE) != 0) {
// Case-ignorable, continue with the loop.
} else if (type != UCASE_NONE) {
- return TRUE; // Followed by cased letter.
+ return true; // Followed by cased letter.
} else {
- return FALSE; // Uncased and not case-ignorable.
+ return false; // Uncased and not case-ignorable.
}
}
- return FALSE; // Not followed by cased letter.
+ return false; // Not followed by cased letter.
}
// Keep this consistent with the UTF-16 version in ustrcase.cpp and the Java version in CaseMap.java.
@@ -707,7 +707,7 @@ void toUpper(uint32_t options,
nextState |= AFTER_VOWEL_WITH_ACCENT;
}
// Map according to Greek rules.
- UBool addTonos = FALSE;
+ UBool addTonos = false;
if (upper == 0x397 &&
(data & HAS_ACCENT) != 0 &&
numYpogegrammeni == 0 &&
@@ -718,7 +718,7 @@ void toUpper(uint32_t options,
if (i == nextIndex) {
upper = 0x389; // Preserve the precomposed form.
} else {
- addTonos = TRUE;
+ addTonos = true;
}
} else if ((data & HAS_DIALYTIKA) != 0) {
// Preserve a vowel with dialytika in precomposed form if it exists.
@@ -733,7 +733,7 @@ void toUpper(uint32_t options,
UBool change;
if (edits == nullptr && (options & U_OMIT_UNCHANGED_TEXT) == 0) {
- change = TRUE; // common, simple usage
+ change = true; // common, simple usage
} else {
// Find out first whether we are changing the text.
U_ASSERT(0x370 <= upper && upper <= 0x3ff); // 2-byte UTF-8, main Greek block
diff --git a/thirdparty/icu4c/common/uchar.cpp b/thirdparty/icu4c/common/uchar.cpp
index 61e9c3d900..7789a3b88a 100644
--- a/thirdparty/icu4c/common/uchar.cpp
+++ b/thirdparty/icu4c/common/uchar.cpp
@@ -126,7 +126,7 @@ u_isxdigit(UChar32 c) {
(c<=0x66 && c>=0x41 && (c<=0x46 || c>=0x61)) ||
(c>=0xff21 && c<=0xff46 && (c<=0xff26 || c>=0xff41))
) {
- return TRUE;
+ return true;
}
GET_PROPS(c, props);
@@ -249,7 +249,7 @@ U_CAPI UBool U_EXPORT2
u_isprint(UChar32 c) {
uint32_t props;
GET_PROPS(c, props);
- /* comparing ==0 returns FALSE for the categories mentioned */
+ /* comparing ==0 returns false for the categories mentioned */
return (UBool)((CAT_MASK(props)&U_GC_C_MASK)==0);
}
@@ -273,7 +273,7 @@ U_CAPI UBool U_EXPORT2
u_isgraph(UChar32 c) {
uint32_t props;
GET_PROPS(c, props);
- /* comparing ==0 returns FALSE for the categories mentioned */
+ /* comparing ==0 returns false for the categories mentioned */
return (UBool)((CAT_MASK(props)&
(U_GC_CC_MASK|U_GC_CF_MASK|U_GC_CS_MASK|U_GC_CN_MASK|U_GC_Z_MASK))
==0);
@@ -291,7 +291,7 @@ u_isgraphPOSIX(UChar32 c) {
uint32_t props;
GET_PROPS(c, props);
/* \p{space}\p{gc=Control} == \p{gc=Z}\p{Control} */
- /* comparing ==0 returns FALSE for the categories mentioned */
+ /* comparing ==0 returns false for the categories mentioned */
return (UBool)((CAT_MASK(props)&
(U_GC_CC_MASK|U_GC_CS_MASK|U_GC_CN_MASK|U_GC_Z_MASK))
==0);
@@ -591,7 +591,7 @@ uscript_hasScript(UChar32 c, UScriptCode sc) {
uint32_t sc32=sc;
if(sc32>0x7fff) {
/* Guard against bogus input that would make us go past the Script_Extensions terminator. */
- return FALSE;
+ return false;
}
while(sc32>*scx) {
++scx;
@@ -654,7 +654,7 @@ _enumPropertyStartsRange(const void *context, UChar32 start, UChar32 end, uint32
sa->add(sa->set, start);
(void)end;
(void)value;
- return TRUE;
+ return true;
}
#define USET_ADD_CP_AND_NEXT(sa, cp) sa->add(sa->set, cp); sa->add(sa->set, cp+1)
diff --git a/thirdparty/icu4c/common/uchar_props_data.h b/thirdparty/icu4c/common/uchar_props_data.h
index c45b9da3fd..acbeadd249 100644
--- a/thirdparty/icu4c/common/uchar_props_data.h
+++ b/thirdparty/icu4c/common/uchar_props_data.h
@@ -9,146 +9,146 @@
#ifdef INCLUDED_FROM_UCHAR_C
-static const UVersionInfo dataVersion={0xe,0,0,0};
+static const UVersionInfo dataVersion={0xf,0,0,0};
-static const uint16_t propsTrie_index[22688]={
-0x48c,0x494,0x49c,0x4a4,0x4bc,0x4c4,0x4cc,0x4d4,0x4dc,0x4e4,0x4ea,0x4f2,0x4fa,0x502,0x50a,0x512,
-0x518,0x520,0x528,0x530,0x533,0x53b,0x543,0x54b,0x553,0x55b,0x557,0x55f,0x567,0x56f,0x574,0x57c,
-0x584,0x58c,0x590,0x598,0x5a0,0x5a8,0x5b0,0x5b8,0x5b4,0x5bc,0x5c1,0x5c9,0x5cf,0x5d7,0x5df,0x5e7,
-0x5ef,0x5f7,0x5ff,0x607,0x60c,0x614,0x617,0x61f,0x627,0x62f,0x635,0x63d,0x63c,0x644,0x64c,0x654,
-0x664,0x65c,0x66c,0x674,0x67a,0x5f7,0x68a,0x682,0x69a,0x69c,0x6a4,0x692,0x6b4,0x6ba,0x6c2,0x6ac,
-0x6d2,0x6d8,0x6e0,0x6ca,0x6f0,0x6f6,0x6fe,0x6e8,0x70e,0x714,0x71c,0x706,0x72c,0x734,0x73c,0x724,
-0x74c,0x752,0x75a,0x744,0x76a,0x770,0x778,0x762,0x788,0x78d,0x795,0x780,0x7a5,0x7ac,0x7b4,0x79d,
-0x638,0x7bc,0x7c4,0x4ac,0x7cc,0x7d3,0x7db,0x4ac,0x7e3,0x7eb,0x7f3,0x7f8,0x800,0x807,0x80f,0x4ac,
-0x5f7,0x817,0x81f,0x827,0x82f,0x584,0x83f,0x837,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x847,0x5f7,0x84f,0x853,0x85b,0x5f7,0x861,0x5f7,0x867,0x86f,0x877,0x584,0x584,0x87f,
-0x887,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x88c,0x894,0x5f7,0x5f7,0x89c,0x8a4,0x8ac,0x8b4,0x8bc,0x5f7,0x8c4,0x8cc,0x8d4,
-0x8e4,0x5f7,0x8ec,0x8ee,0x8f6,0x8dc,0x5f7,0x8f9,0x90d,0x901,0x909,0x915,0x5f7,0x91d,0x923,0x92b,
-0x933,0x5f7,0x943,0x94b,0x953,0x93b,0x963,0x4ac,0x96b,0x96e,0x976,0x95b,0x986,0x97e,0x5f7,0x98d,
-0x5f7,0x99c,0x995,0x9a4,0x9ac,0x9b0,0x9b8,0x9c0,0x52c,0x9c8,0x9cb,0x9d1,0x9d8,0x9cb,0x553,0x553,
-0x4dc,0x4dc,0x4dc,0x4dc,0x9e0,0x4dc,0x4dc,0x4dc,0x9f0,0x9f8,0xa00,0xa08,0xa10,0xa14,0xa1c,0x9e8,
-0xa34,0xa3c,0xa24,0xa2c,0xa44,0xa4c,0xa54,0xa5c,0xa74,0xa64,0xa6c,0xa7c,0xa84,0xa93,0xa98,0xa8b,
-0xaa0,0xaa0,0xaa0,0xaa0,0xaa0,0xaa0,0xaa0,0xaa0,0xaa8,0xab0,0x92b,0xab3,0xabb,0xac2,0xac7,0xacf,
-0x92b,0xad6,0xad5,0xae6,0xae9,0x92b,0x92b,0xade,0x92b,0x92b,0x92b,0x92b,0x92b,0xaf8,0xb00,0xaf0,
-0x92b,0x92b,0x92b,0xb05,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0xb0b,0xb13,0x92b,0xb1b,0xb22,
-0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0xaa0,0xaa0,0xaa0,0xaa0,0xb2a,0xaa0,0xb31,0xb38,
-0xaa0,0xaa0,0xaa0,0xaa0,0xaa0,0xaa0,0xaa0,0xaa0,0x92b,0xb40,0xb47,0xb4b,0xb51,0x92b,0x92b,0x92b,
-0x584,0x58c,0x52c,0xb59,0x4dc,0x4dc,0x4dc,0xb61,0x52c,0xb69,0x5f7,0xb6f,0xb7f,0xb77,0xb77,0x553,
-0xb87,0xb8f,0xb97,0x4ac,0xb9f,0x92b,0x92b,0xba6,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0xbae,0xbb4,
-0xbc4,0xbbc,0x638,0x5f7,0xbcc,0x887,0x5f7,0xbd4,0xbdc,0xbe0,0x5f7,0x5f7,0xbe5,0x5f7,0x92b,0xbec,
-0xad0,0xbf4,0xbfa,0x92b,0xbf4,0xc02,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,
-0xc0a,0x5f7,0x5f7,0x5f7,0xc12,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0xc18,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0xc1d,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x92b,0x92b,
-0xc25,0x5f7,0xc28,0x5f7,0xc30,0xc36,0xc3e,0xc46,0xc4b,0x5f7,0x5f7,0xc4f,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0xc56,0x5f7,0xc5d,0xc63,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0xc6b,0x5f7,0x5f7,0x5f7,0xc73,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0xc75,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0xc7c,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0xc83,0x5f7,0x5f7,0x5f7,0xc8a,0xc92,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0xc97,0x5f7,0x5f7,0xc9f,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0xca3,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0xca6,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0xca9,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0xcaf,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0xcb7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0xcbc,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0xcc1,0x5f7,0x5f7,0x5f7,0xcc6,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0xcce,0xcd5,0xcd9,0x5f7,0x5f7,0x5f7,0xce0,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0xcee,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0xce6,0x92b,0xcf6,0x9a4,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0xcfb,0xd03,0x4dc,0xd13,0xd0b,0x5f7,0x5f7,0xd1b,0xd23,0xd33,0x4dc,0xd38,0xd40,0xd46,0xd4d,0xd2b,
-0xd55,0xd5d,0x5f7,0xd65,0xd75,0xd78,0xd6d,0xd80,0x64c,0xd88,0xd8f,0x8ed,0x69a,0xd9f,0xd97,0xda7,
-0x5f7,0xdaf,0xdb7,0xdbf,0x5f7,0xdc7,0xdcf,0xdd7,0xddf,0xde7,0xdeb,0xdf3,0x52c,0x52c,0x5f7,0xdfb,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0xe03,0xe0f,0xe07,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,
-0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,
-0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,
-0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,
-0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,
-0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,
-0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,
-0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,
-0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,
-0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,
-0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,
-0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,
-0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,
-0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,
-0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0x5f7,0x5f7,0x5f7,0xe27,0x5f7,0xce1,0xe2e,0xe33,
-0x5f7,0x5f7,0x5f7,0xe3b,0x5f7,0x5f7,0x8f8,0x4ac,0xe51,0xe41,0xe49,0x5f7,0x5f7,0xe59,0xe61,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0xe66,0x92f,0x5f7,0xe6e,0x5f7,0xe74,0xe78,
-0xe80,0xe88,0xe8f,0xe97,0x5f7,0x5f7,0x5f7,0xe9d,0xeb5,0x49c,0xebd,0xec5,0xeca,0x90d,0xea5,0xead,
-0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,
-0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,0xe17,
-0x12b0,0x12b0,0x12f0,0x1330,0x1370,0x13a8,0x13e8,0x1428,0x1460,0x14a0,0x14cc,0x150c,0x154c,0x155c,0x159c,0x15d0,
-0x1610,0x1640,0x1680,0x16c0,0x16d0,0x1704,0x173c,0x177c,0x17bc,0x17fc,0x1830,0x185c,0x189c,0x18d4,0x18f0,0x1930,
-0xa80,0xac0,0xb00,0xb40,0xb80,0xbab,0xbeb,0xa40,0xc0d,0xa40,0xa40,0xa40,0xa40,0xc4d,0x1db,0x1db,
-0xc8d,0xccd,0xa40,0xa40,0xa40,0xcf6,0xd36,0xd56,0xa40,0xd7c,0xdbc,0xdfc,0xe3c,0xe7c,0xebc,0xefc,
-0xf3c,0xf73,0x1db,0x1db,0xf97,0xfcb,0x1db,0xff3,0x1db,0x1db,0x1db,0x1db,0x1020,0x1db,0x1db,0x1db,
-0x1db,0x1db,0x1db,0x1db,0x1034,0x1db,0x106c,0x10ac,0x1db,0x10b7,0x1db,0x1db,0x1db,0x10ed,0xa40,0x112d,
-0x1db,0x1db,0x116d,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,
+static const uint16_t propsTrie_index[23016]={
+0x495,0x49d,0x4a5,0x4ad,0x4c5,0x4cd,0x4d5,0x4dd,0x4e5,0x4ed,0x4f3,0x4fb,0x503,0x50b,0x513,0x51b,
+0x521,0x529,0x531,0x539,0x53c,0x544,0x54c,0x554,0x55c,0x564,0x560,0x568,0x570,0x578,0x57d,0x585,
+0x58d,0x595,0x599,0x5a1,0x5a9,0x5b1,0x5b9,0x5c1,0x5bd,0x5c5,0x5ca,0x5d2,0x5d8,0x5e0,0x5e8,0x5f0,
+0x5f8,0x600,0x608,0x610,0x615,0x61d,0x620,0x628,0x630,0x638,0x63e,0x646,0x645,0x64d,0x655,0x65d,
+0x66d,0x665,0x675,0x67d,0x683,0x600,0x693,0x68b,0x6a3,0x6a5,0x6ad,0x69b,0x6bd,0x6c3,0x6cb,0x6b5,
+0x6db,0x6e1,0x6e9,0x6d3,0x6f9,0x6ff,0x707,0x6f1,0x717,0x71d,0x725,0x70f,0x735,0x73d,0x745,0x72d,
+0x755,0x75b,0x763,0x74d,0x773,0x779,0x781,0x76b,0x791,0x796,0x79e,0x789,0x7ae,0x7b5,0x7bd,0x7a6,
+0x641,0x7c5,0x7cd,0x4b5,0x7d5,0x7dc,0x7e4,0x4b5,0x7ec,0x7f4,0x7fc,0x801,0x809,0x810,0x818,0x4b5,
+0x600,0x820,0x828,0x830,0x838,0x58d,0x848,0x840,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x850,0x600,0x858,0x85c,0x864,0x600,0x86a,0x600,0x870,0x878,0x880,0x58d,0x58d,0x888,
+0x890,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x895,0x89d,0x600,0x600,0x8a5,0x8ad,0x8b5,0x8bd,0x8c5,0x600,0x8cd,0x8d5,0x8dd,
+0x8ed,0x600,0x8f5,0x8f7,0x8ff,0x8e5,0x600,0x902,0x916,0x90a,0x912,0x91e,0x600,0x926,0x92c,0x934,
+0x93c,0x600,0x94c,0x954,0x95c,0x944,0x96c,0x4b5,0x974,0x977,0x97f,0x964,0x98f,0x987,0x600,0x996,
+0x600,0x9a5,0x99e,0x9ad,0x9b5,0x9b9,0x9c1,0x9c9,0x535,0x9d1,0x9d4,0x9da,0x9e1,0x9d4,0x55c,0x55c,
+0x4e5,0x4e5,0x4e5,0x4e5,0x9e9,0x4e5,0x4e5,0x4e5,0x9f9,0xa01,0xa09,0xa11,0xa19,0xa1d,0xa25,0x9f1,
+0xa3d,0xa45,0xa2d,0xa35,0xa4d,0xa55,0xa5d,0xa65,0xa7d,0xa6d,0xa75,0xa85,0xa8d,0xa9c,0xaa1,0xa94,
+0xaa9,0xaa9,0xaa9,0xaa9,0xaa9,0xaa9,0xaa9,0xaa9,0xab1,0xab9,0x934,0xabc,0xac4,0xacb,0xad0,0xad8,
+0x934,0xadf,0xade,0xaef,0xaf2,0x934,0x934,0xae7,0x934,0x934,0x934,0x934,0x934,0xb01,0xb09,0xaf9,
+0x934,0x934,0x934,0xb0e,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0xb14,0xb1c,0x934,0xb24,0xb2b,
+0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0xaa9,0xaa9,0xaa9,0xaa9,0xb33,0xaa9,0xb3a,0xb41,
+0xaa9,0xaa9,0xaa9,0xaa9,0xaa9,0xaa9,0xaa9,0xaa9,0x934,0xb49,0xb50,0xb54,0xb5a,0x934,0x934,0x934,
+0x58d,0x595,0x535,0xb62,0x4e5,0x4e5,0x4e5,0xb6a,0x535,0xb72,0x600,0xb78,0xb88,0xb80,0xb80,0x55c,
+0xb90,0xb98,0xba0,0x4b5,0xba8,0x934,0x934,0xbaf,0x934,0x934,0x934,0x934,0x934,0x934,0xbb7,0xbbd,
+0xbcd,0xbc5,0x641,0x600,0xbd5,0x890,0x600,0xbdd,0xbe5,0xbe9,0x600,0x600,0xbee,0x600,0x934,0xbf5,
+0xad9,0xbfd,0xc03,0x934,0xbfd,0xc0b,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,
+0xc13,0x600,0x600,0x600,0xc1b,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0xc21,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0xc26,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x934,0x934,
+0xc2e,0x600,0xc31,0x600,0xc39,0xc3f,0xc47,0xc4f,0xc54,0x600,0x600,0xc58,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0xc5f,0x600,0xc66,0xc6c,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0xc74,0x600,0x600,0x600,0xc7c,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0xc7e,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0xc85,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0xc8c,0x600,0x600,0x600,0xc93,0xc9b,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0xca0,0x600,0x600,0xca8,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0xcac,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0xcaf,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0xcb2,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0xcb8,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0xcc0,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0xcc5,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0xcca,0x600,0x600,0x600,0xccf,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0xcd7,0xcde,0xce2,0x600,0x600,0x600,0xce9,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0xcf7,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0xcef,0x934,0xcff,0x9ad,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0xd04,0xd0c,0x4e5,0xd1c,0xd14,0x600,0x600,0xd24,0xd2c,0xd3c,0x4e5,0xd41,0xd49,0xd4f,0xd56,0xd34,
+0xd5e,0xd66,0x600,0xd6e,0xd7e,0xd81,0xd76,0xd89,0x655,0xd91,0xd98,0x8f6,0x6a3,0xda8,0xda0,0xdb0,
+0x600,0xdb8,0xdc0,0xdc8,0x600,0xdd0,0xdd8,0xde0,0xde8,0xdf0,0xdf4,0xdfc,0x535,0x535,0x600,0xe04,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0xe0c,0xe18,0xe10,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,
+0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,
+0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,
+0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,
+0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,
+0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,
+0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,
+0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,
+0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,
+0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,
+0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,
+0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,
+0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,
+0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,
+0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0x600,0x600,0x600,0xe30,0x600,0xcea,0xe37,0xe3c,
+0x600,0x600,0x600,0xe44,0x600,0x600,0x901,0x4b5,0xe5a,0xe4a,0xe52,0x600,0x600,0xe62,0xe6a,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0xe6f,0x938,0x600,0xe77,0x600,0xe7d,0xe81,
+0xe89,0xe91,0xe98,0xea0,0x600,0x600,0x600,0xea6,0xebe,0x4a5,0xec6,0xece,0xed3,0x916,0xeae,0xeb6,
+0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,
+0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,0xe20,
+0x12d4,0x12d4,0x1314,0x1354,0x1394,0x13cc,0x140c,0x144c,0x1484,0x14c4,0x14f0,0x1530,0x1570,0x1580,0x15c0,0x15f4,
+0x1634,0x1664,0x16a4,0x16e4,0x16f4,0x1728,0x1760,0x17a0,0x17e0,0x1820,0x1854,0x1880,0x18c0,0x18f8,0x1914,0x1954,
+0xa80,0xac0,0xb00,0xb40,0xb80,0xbab,0xbeb,0xa40,0xc0e,0xa40,0xa40,0xa40,0xa40,0xc4e,0x1db,0x1db,
+0xc8e,0xcce,0xa40,0xa40,0xa40,0xcf7,0xd37,0xd57,0xa40,0xd7d,0xdbd,0xdfd,0xe3d,0xe7d,0xebd,0xefd,
+0xf3d,0xf74,0x1db,0x1db,0xf98,0xfcc,0x1db,0xff4,0x1db,0x1db,0x1db,0x1db,0x1021,0x1db,0x1db,0x1db,
+0x1db,0x1db,0x1db,0x1db,0x1035,0x1db,0x106d,0x10ad,0x1db,0x10b8,0x1db,0x1db,0x1db,0x10ee,0xa40,0x112e,
+0x1db,0x1db,0x116e,0x1db,0x1191,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,
0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,
0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,
0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,
@@ -170,1033 +170,1044 @@ static const uint16_t propsTrie_index[22688]={
0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,
0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,
0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,
-0x11ad,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,
+0x11d1,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,
0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,
0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,
-0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x11ed,
+0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x1211,
0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,
-0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x11ed,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0xed2,0xed9,0xee1,0x4ac,0x5f7,0x5f7,0x5f7,0xee9,0xef9,0xef1,0xf10,0xf01,0xf08,0xf18,0xf1c,0xf20,
-0x4ac,0x4ac,0x4ac,0x4ac,0x8ed,0x5f7,0xf28,0xf30,0x5f7,0xf38,0xf40,0xf44,0xf4c,0x5f7,0xf54,0x4ac,
-0x584,0x58e,0xf5c,0x5f7,0xf60,0xf68,0xf78,0xf70,0x5f7,0xf80,0x5f7,0xf87,0xf97,0xf8f,0x4ac,0x4ac,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0xb7f,0x8f9,0xf9f,0xfaf,0xfa7,0x4ac,0x4ac,
-0xfbf,0xfb7,0xfc2,0xfca,0x90d,0xfd2,0x4ac,0xfda,0xfe2,0xfea,0x4ac,0x4ac,0x5f7,0xffa,0x1002,0xff2,
-0x1012,0x1019,0x100a,0x1021,0x1029,0x4ac,0x1039,0x1031,0x5f7,0x103c,0x1044,0x104c,0x1054,0x105c,0x4ac,0x4ac,
-0x5f7,0x5f7,0x1064,0x4ac,0x584,0x106c,0x52c,0x1074,0x5f7,0x107c,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x1084,0x5f7,0x108c,0x4ac,0x4ac,0x1094,0x109c,0x10a3,0xfd6,0x10ab,0xfd6,0x10b3,0xb7f,
-0x10c3,0x62d,0x10cb,0x10bb,0x986,0x10d3,0x10db,0x10e1,0x10f9,0x10e9,0x10f1,0x10fd,0x986,0x110d,0x1105,0x1115,
-0x1125,0x111d,0x4ac,0x4ac,0x112c,0x1134,0x64f,0x113c,0x114c,0x1152,0x115a,0x1144,0x4ac,0x4ac,0x4ac,0x4ac,
-0x5f7,0x1162,0x116a,0x1172,0x5f7,0x117a,0x1182,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x5f7,0x118a,0x1192,0x4ac,
-0x5f7,0x119a,0x11a2,0x11aa,0x5f7,0x11ba,0x11b2,0x4ac,0x867,0x11c2,0x11ca,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x5f7,0x11d2,0x4ac,0x4ac,0x4ac,0x584,0x52c,0x11da,0x11ea,0x11f0,0x11e2,0x4ac,0x4ac,0x1200,0x1204,0x11f8,
-0x121c,0x120c,0x1214,0x5f7,0x122a,0x1224,0x5f7,0x8ee,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x1240,0x1245,0x1232,0x123a,0x1255,0x124d,0x4ac,0x4ac,0x1264,0x1268,0x125c,0x1278,0x1270,0x11b2,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x127c,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x1283,0x1293,0x128b,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x8f8,0x4ac,0x4ac,0x4ac,
-0x12a3,0x12ab,0x12b3,0x129b,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x12bb,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0xfd6,0x5f7,0x5f7,0x12c3,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x12cb,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x11ca,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x8ee,0x90d,
-0xd9b,0x5f7,0x90d,0x12d3,0x12d8,0x5f7,0x12e8,0x12f0,0x12f8,0x12e0,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x584,
-0x52c,0x1300,0x4ac,0x4ac,0x4ac,0x5f7,0x5f7,0x1308,0x130d,0x1313,0x4ac,0x4ac,0x131b,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x1323,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x8f9,0x4ac,0x1064,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x1329,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x1331,
-0x1335,0x133c,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0xe07,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x5f7,0x5f7,0x5f7,0x1342,0x1347,0x134f,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x553,0x135f,0x1366,0x92b,0x92b,0x92b,0x1357,0x4ac,0x92b,0x92b,0x92b,0x92b,
-0x92b,0x92b,0x92b,0xbae,0x92b,0x136d,0x92b,0x1374,0x137c,0x1382,0x92b,0xad5,0x92b,0x92b,0x138a,0x4ac,
-0x4ac,0x4ac,0x4ac,0x1392,0x92b,0x92b,0xad2,0x139a,0x4ac,0x4ac,0x4ac,0x4ac,0x13aa,0x13b1,0x13b6,0x13bc,
-0x13c4,0x13cc,0x13d4,0x13ae,0x13dc,0x13e4,0x13ec,0x13f1,0x13c3,0x13aa,0x13b1,0x13ad,0x13bc,0x13f9,0x13ab,0x13fc,
-0x13ae,0x1404,0x140c,0x1414,0x141b,0x1407,0x140f,0x1417,0x141e,0x140a,0x1426,0x13a2,0x92b,0x92b,0x92b,0x92b,
-0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x553,0x1436,0x553,0x143d,
-0x1444,0x142e,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x1453,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x144b,0x145b,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x5f7,0x146b,0x1463,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0xfd6,0x1473,0x5f7,0x147b,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x1483,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x148b,0x4ac,0x584,0x149b,0x1493,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x14a3,
-0x14b3,0x14ab,0x4ac,0x4ac,0x14c3,0x14bb,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x14d3,0x14db,0x14e3,0x14eb,
-0x14f3,0x14fb,0x4ac,0x14cb,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x92b,0x1503,0x92b,0x92b,
-0xba6,0x136b,0x150b,0xbae,0x1513,0x92b,0x92b,0x92b,0x92b,0xbb0,0x4ac,0x151b,0x1523,0x1527,0x152f,0x1537,
-0x4ac,0x4ac,0x4ac,0x4ac,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x153f,0x92b,0x92b,0x92b,0x92b,
-0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,
-0x92b,0x92b,0x1547,0x154f,0x92b,0x92b,0x92b,0xba6,0x92b,0x92b,0x1557,0x155f,0x1503,0x92b,0x1567,0x92b,
-0x156f,0x1574,0x4ac,0x4ac,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0x92b,0xba6,0x157c,
-0x1588,0x158d,0x1595,0x1584,0x92b,0x92b,0x92b,0x92b,0x159d,0x92b,0xad5,0x11ae,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x15a5,0x5f7,0x5f7,0x15ac,
-0x5f7,0x5f7,0x5f7,0x15b4,0x5f7,0x15bc,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0xc87,0x5f7,0x5f7,0x15c4,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x15cc,0x15d4,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0xcc6,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x15db,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x15e2,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x15e9,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x4ac,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x8ee,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0xf60,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x1090,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x15f1,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x5f7,0x5f7,0x5f7,
-0x5f7,0x15f9,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0xf60,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,
-0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x15ff,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x160f,0x1607,0x1607,
-0x1607,0x4ac,0x4ac,0x4ac,0x4ac,0x553,0x553,0x553,0x553,0x553,0x553,0x553,0x1617,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,
-0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0x4ac,0xe1f,0xe1f,0xe1f,
-0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,
-0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,
-0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,
-0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0x161f,0x48b,0x48b,0x48b,
-0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,
+0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x700,0x1211,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0xedb,0xee2,0xeea,0x4b5,0x600,0x600,0x600,0xef2,0xf02,0xefa,0xf19,0xf0a,0xf11,0xf21,0xf25,0xf29,
+0x4b5,0x4b5,0x4b5,0x4b5,0x8f6,0x600,0xf31,0xf39,0x600,0xf41,0xf49,0xf4d,0xf55,0x600,0xf5d,0x4b5,
+0x58d,0x597,0xf65,0x600,0xf69,0xf71,0xf81,0xf79,0x600,0xf89,0x600,0xf90,0xfa0,0xf98,0x4b5,0x4b5,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0xb88,0x902,0xfa8,0xfb8,0xfb0,0x4b5,0x4b5,
+0xfc8,0xfc0,0xfcb,0xfd3,0x916,0xfdb,0x4b5,0xfe3,0xfeb,0xff3,0x4b5,0x4b5,0x600,0x1003,0x100b,0xffb,
+0x101b,0x1022,0x1013,0x102a,0x1032,0x4b5,0x1042,0x103a,0x600,0x1045,0x104d,0x1055,0x105d,0x1065,0x4b5,0x4b5,
+0x600,0x600,0x106d,0x4b5,0x58d,0x1075,0x535,0x107d,0x600,0x1085,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x108d,0x600,0x1095,0x4b5,0x109a,0x10a2,0x10aa,0x10b1,0xfdf,0x10b9,0xfdf,0x10c1,0xb88,
+0x10d1,0x636,0x10d9,0x10c9,0x98f,0x10e1,0x10e9,0x10ef,0x1107,0x10f7,0x10ff,0x110b,0x98f,0x111b,0x1113,0x1123,
+0x113b,0x112b,0x1133,0x4b5,0x1142,0x114a,0x658,0x1152,0x1162,0x1168,0x1170,0x115a,0x4b5,0x4b5,0x4b5,0x4b5,
+0x600,0x1178,0x1180,0x1099,0x600,0x1188,0x1190,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x600,0x1198,0x11a0,0x4b5,
+0x600,0x11a8,0x11b0,0x11b8,0x600,0x11c8,0x11c0,0x4b5,0x870,0x11d0,0x11d8,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x600,0x11e0,0x4b5,0x4b5,0x4b5,0x58d,0x535,0x11e8,0x11f8,0x11fe,0x11f0,0x4b5,0x4b5,0x120e,0x1212,0x1206,
+0x122a,0x121a,0x1222,0x600,0x1238,0x1232,0x600,0x8f7,0x1248,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x1256,0x125b,0x1240,0x1250,0x126b,0x1263,0x4b5,0x4b5,0x127a,0x127e,0x1272,0x128e,0x1286,0x11c0,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x1292,0x12a2,0x12a7,0x129a,0x4b5,0x4b5,0x12af,0x12bf,0x12b7,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x901,0x4b5,0x4b5,0x4b5,
+0x12cf,0x12d7,0x12df,0x12c7,0x600,0x600,0x600,0x600,0x600,0x600,0x12e7,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0xfdf,0x600,0x600,0x12ef,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x12f7,0x12ff,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x11d8,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x8f7,
+0x916,0xda4,0x600,0x916,0x1307,0x130c,0x600,0x131c,0x1324,0x132c,0x1314,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x58d,0x535,0x1334,0x4b5,0x4b5,0x4b5,0x600,0x600,0x133c,0x1341,0x1347,0x4b5,0x4b5,0x134f,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x1357,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x902,0x4b5,0x106d,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x135d,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x1365,0x136a,0x1371,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0xe10,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x600,0x600,0x600,0x1377,0x137c,0x1384,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x55c,0x1394,0x139b,0x934,0x934,0x934,0x138c,0x4b5,0x934,0x934,0x934,
+0x934,0x934,0x934,0x934,0xbb7,0x934,0x13a2,0x934,0x13a9,0x13b1,0x13b7,0x934,0xade,0x934,0x934,0x13bf,
+0x4b5,0x4b5,0x4b5,0x13c7,0x13c7,0x934,0x934,0xadb,0x13cf,0x4b5,0x4b5,0x4b5,0x4b5,0x13df,0x13e6,0x13eb,
+0x13f1,0x13f9,0x1401,0x1409,0x13e3,0x1411,0x1419,0x1421,0x1426,0x13f8,0x13df,0x13e6,0x13e2,0x13f1,0x142e,0x13e0,
+0x1431,0x13e3,0x1439,0x1441,0x1449,0x1450,0x143c,0x1444,0x144c,0x1453,0x143f,0x145b,0x13d7,0x934,0x934,0x934,
+0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x55c,0x146b,0x55c,
+0x1472,0x1479,0x1463,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x1488,0x1490,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x1480,0x1498,0x9d4,
+0x14a8,0x14a0,0x4b5,0x4b5,0x4b5,0x600,0x14b8,0x14b0,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0xfdf,0x14c0,0x600,0x14c8,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0xfdf,0x14d0,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x14d8,0x600,0x600,0x600,
+0x600,0x600,0x600,0x14e0,0x4b5,0x58d,0x14f0,0x14e8,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x14f8,0x1508,0x1500,0x4b5,0x4b5,0x1518,0x1510,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x1528,0x1530,0x1538,
+0x1540,0x1548,0x1550,0x4b5,0x1520,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x934,0x1558,0x934,
+0x934,0xbaf,0x13a0,0x1560,0xbb7,0x1568,0x934,0x934,0x934,0x934,0xbb9,0x4b5,0x1570,0x1578,0x157c,0x1584,
+0x158c,0x4b5,0x4b5,0x4b5,0x4b5,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x1594,0x934,0x934,0x934,
+0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,
+0x934,0x934,0x934,0x157d,0x159c,0x934,0x934,0x934,0x15a4,0x934,0x934,0x15ab,0x15b3,0x1558,0x934,0x15bb,
+0x934,0x15c3,0x15c8,0x4b5,0x4b5,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0x934,0xbaf,
+0x15d0,0x15d9,0x15dd,0x15e5,0x15d5,0x934,0x934,0x934,0x934,0x15ed,0x934,0xade,0x11bc,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x15f5,0x600,0x600,
+0x15fc,0x600,0x600,0x600,0x1604,0x600,0x160c,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0xc90,0x600,0x600,
+0x1614,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x161c,0x1624,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0xccf,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x162b,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x1632,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x1639,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x4b5,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x901,0x600,0x600,0x600,0x600,0x600,0x600,0xf69,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x1641,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x1649,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x600,0x600,
+0x600,0x600,0x1651,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0xf69,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x67d,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x1314,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x1661,0x1659,0x1659,0x1659,0x4b5,0x4b5,0x4b5,0x4b5,0x55c,0x55c,0x55c,0x55c,0x55c,0x55c,0x55c,
+0x1669,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,0x4b5,
+0x4b5,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,
+0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,
+0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,
+0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,
+0x1671,0x494,0x494,0x494,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,
0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,
-0xc,0x17,0x17,0x17,0x19,0x17,0x17,0x17,0x14,0x15,0x17,0x18,0x17,0x13,0x17,0x17,
-0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0x17,0x17,0x18,0x18,0x18,0x17,
-0x17,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-1,1,1,1,1,1,1,1,1,1,1,0x14,0x17,0x15,0x1a,0x16,
-0x1a,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,2,2,2,2,2,2,2,0x14,0x18,0x15,0x18,0xf,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0xf,0xf,0xf,0xf,0xc,0x17,0x17,0x17,0x19,0x17,0x17,0x17,0x14,0x15,0x17,0x18,
+0x17,0x13,0x17,0x17,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0x17,0x17,
+0x18,0x18,0x18,0x17,0x17,1,1,1,1,1,1,1,1,1,1,1,
+1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0x14,
+0x17,0x15,0x1a,0x16,0x1a,2,2,2,2,2,2,2,2,2,2,2,
+2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0x14,
+0x18,0x15,0x18,0xf,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,
0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,
-0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,
-0xc,0x17,0x19,0x19,0x19,0x19,0x1b,0x17,0x1a,0x1b,5,0x1c,0x18,0x10,0x1b,0x1a,
-0x1b,0x18,0x34b,0x38b,0x1a,2,0x17,0x17,0x1a,0x30b,5,0x1d,0x34cb,0x344b,0x3ccb,0x17,
-1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-1,1,1,1,1,1,1,0x18,1,1,1,1,1,1,1,2,
-2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,2,2,2,0x18,2,2,2,2,2,2,2,2,
-1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
-1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
-1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
-1,2,1,2,1,2,1,2,2,1,2,1,2,1,2,1,
-2,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
+0xf,0xf,0xf,0xf,0xc,0x17,0x19,0x19,0x19,0x19,0x1b,0x17,0x1a,0x1b,5,0x1c,
+0x18,0x10,0x1b,0x1a,0x1b,0x18,0x34b,0x38b,0x1a,2,0x17,0x17,0x1a,0x30b,5,0x1d,
+0x34cb,0x344b,0x3ccb,0x17,1,1,1,1,1,1,1,1,1,1,1,1,
+1,1,1,1,1,1,1,1,1,1,1,0x18,1,1,1,1,
+1,1,1,2,2,2,2,2,2,2,2,2,2,2,2,2,
+2,2,2,2,2,2,2,2,2,2,2,0x18,2,2,2,2,
+2,2,2,2,1,2,1,2,1,2,1,2,1,2,1,2,
1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
-1,1,2,1,2,1,2,2,2,1,1,2,1,2,1,1,
-2,1,1,1,2,2,1,1,1,1,2,1,1,2,1,1,
-1,2,2,2,1,1,2,1,1,2,1,2,1,2,1,1,
-2,1,2,2,1,2,1,1,2,1,1,1,2,1,2,1,
-1,2,2,5,1,2,2,2,5,5,5,5,1,3,2,1,
-3,2,1,3,2,1,2,1,2,1,2,1,2,1,2,1,
+1,2,1,2,1,2,1,2,1,2,1,2,2,1,2,1,
2,1,2,1,2,2,1,2,1,2,1,2,1,2,1,2,
-1,2,1,2,1,2,1,2,2,1,3,2,1,2,1,1,
1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
+1,2,1,2,1,1,2,1,2,1,2,2,2,1,1,2,
+1,2,1,1,2,1,1,1,2,2,1,1,1,1,2,1,
+1,2,1,1,1,2,2,2,1,1,2,1,1,2,1,2,
+1,2,1,1,2,1,2,2,1,2,1,1,2,1,1,1,
+2,1,2,1,1,2,2,5,1,2,2,2,5,5,5,5,
+1,3,2,1,3,2,1,3,2,1,2,1,2,1,2,1,
+2,1,2,1,2,1,2,1,2,2,1,2,1,2,1,2,
+1,2,1,2,1,2,1,2,1,2,1,2,2,1,3,2,
+1,2,1,1,1,2,1,2,1,2,1,2,1,2,1,2,
1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
-1,2,1,2,2,2,2,2,2,2,1,1,2,1,1,2,
-2,1,2,1,1,1,1,2,1,2,1,2,1,2,1,2,
+1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
+1,2,1,2,1,2,1,2,2,2,2,2,2,2,1,1,
+2,1,1,2,2,1,2,1,1,1,1,2,1,2,1,2,
+1,2,1,2,2,2,2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
+2,2,2,2,5,2,2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-5,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,2,2,2,2,2,2,2,2,4,4,4,4,
-4,4,4,4,4,4,4,4,4,4,4,4,4,4,0x1a,0x1a,
-0x1a,0x1a,4,4,4,4,4,4,4,4,4,4,4,4,0x1a,0x1a,
-0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,4,4,4,4,
-4,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,4,0x1a,4,0x1a,0x1a,0x1a,0x1a,0x1a,
-0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,6,6,6,6,
+4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,
+4,4,0x1a,0x1a,0x1a,0x1a,4,4,4,4,4,4,4,4,4,4,
+4,4,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,
+4,4,4,4,4,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,4,0x1a,4,0x1a,
+0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,
+6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
-6,6,6,6,6,6,6,6,6,6,6,6,1,2,1,2,
-4,0x1a,1,2,0,0,4,2,2,2,0x17,1,0,0,0,0,
-0x1a,0x1a,1,0x17,1,1,1,0,1,0,1,1,2,1,1,1,
-1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,
-1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,
-2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,
-2,2,1,1,1,2,2,2,1,2,1,2,1,2,1,2,
+1,2,1,2,4,0x1a,1,2,0,0,4,2,2,2,0x17,1,
+0,0,0,0,0x1a,0x1a,1,0x17,1,1,1,0,1,0,1,1,
+2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+1,1,0,1,1,1,1,1,1,1,1,1,2,2,2,2,
+2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
+2,2,2,1,2,2,1,1,1,2,2,2,1,2,1,2,
1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
-2,2,2,2,1,2,0x18,1,2,1,1,2,2,1,1,1,
-1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+1,2,1,2,2,2,2,2,1,2,0x18,1,2,1,1,2,
+2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+1,1,1,1,2,2,2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
+2,2,2,2,1,2,1,2,1,2,1,2,1,2,1,2,
1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
-1,2,0x1b,6,6,6,6,6,7,7,1,2,1,2,1,2,
+1,2,1,2,1,2,0x1b,6,6,6,6,6,7,7,1,2,
1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
-1,1,2,1,2,1,2,1,2,1,2,1,2,1,2,2,
+1,2,1,2,1,1,2,1,2,1,2,1,2,1,2,1,
+2,1,2,2,1,2,1,2,1,2,1,2,1,2,1,2,
1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
-1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
-0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-1,1,1,1,1,1,1,1,1,1,1,0,0,4,0x17,0x17,
-0x17,0x17,0x17,0x17,2,2,2,2,2,2,2,2,2,2,2,2,
+1,2,1,2,0,1,1,1,1,1,1,1,1,1,1,1,
+1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+0,4,0x17,0x17,0x17,0x17,0x17,0x17,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,2,0x17,0x13,0,0,0x1b,0x1b,0x19,0,6,6,6,
-6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
+2,2,2,2,2,2,2,2,2,0x17,0x13,0,0,0x1b,0x1b,0x19,
+0,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
-6,6,6,6,6,6,6,6,6,6,0x13,6,0x17,6,6,0x17,
-6,6,0x17,6,0,0,0,0,0,0,0,0,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,0,0,0,0,5,5,5,5,0x17,
-0x17,0,0,0,0,0,0,0,0,0,0,0,0x10,0x10,0x10,0x10,
-0x10,0x10,0x18,0x18,0x18,0x17,0x17,0x19,0x17,0x17,0x1b,0x1b,6,6,6,6,
-6,6,6,6,6,6,6,0x17,0x10,0x17,0x17,0x17,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,4,5,5,5,
-5,5,5,5,5,5,5,6,6,6,6,6,6,6,6,6,
-6,6,6,6,6,6,6,6,6,6,6,6,0x49,0x89,0xc9,0x109,
-0x149,0x189,0x1c9,0x209,0x249,0x289,0x17,0x17,0x17,0x17,5,5,6,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+6,6,6,6,6,6,6,6,6,6,6,6,6,6,0x13,6,
+0x17,6,6,0x17,6,6,0x17,6,0,0,0,0,0,0,0,0,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,0,0,0,0,5,
+5,5,5,0x17,0x17,0,0,0,0,0,0,0,0,0,0,0,
+0x10,0x10,0x10,0x10,0x10,0x10,0x18,0x18,0x18,0x17,0x17,0x19,0x17,0x17,0x1b,0x1b,
+6,6,6,6,6,6,6,6,6,6,6,0x17,0x10,0x17,0x17,0x17,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-0x17,5,6,6,6,6,6,6,6,0x10,0x1b,6,6,6,6,6,
-6,4,4,6,6,0x1b,6,6,6,6,5,5,0x49,0x89,0xc9,0x109,
-0x149,0x189,0x1c9,0x209,0x249,0x289,5,5,5,0x1b,0x1b,5,0x17,0x17,0x17,0x17,
-0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0,0x10,5,6,5,5,
+4,5,5,5,5,5,5,5,5,5,5,6,6,6,6,6,
+6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0x17,0x17,0x17,0x17,5,5,
+6,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,6,6,6,6,
-6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,0,
-0,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,6,6,6,6,6,6,6,6,6,6,
-6,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,5,5,5,5,5,5,
+5,5,5,5,0x17,5,6,6,6,6,6,6,6,0x10,0x1b,6,
+6,6,6,6,6,4,4,6,6,0x1b,6,6,6,6,5,5,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,5,5,5,0x1b,0x1b,5,
+0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0,0x10,
+5,6,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,6,6,6,6,6,
-6,6,6,6,4,4,0x1b,0x17,0x17,0x17,4,0,0,6,0x19,0x19,
-6,6,6,6,4,6,6,6,4,6,6,6,6,6,0,0,
-0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0,
+6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
+6,6,6,0,0,5,5,5,5,5,5,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,6,6,6,6,4,6,6,6,6,6,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,6,6,6,0,0,0x17,0,
-5,5,5,5,5,5,5,5,5,5,5,0,0,0,0,0,
+5,5,5,5,5,5,5,5,5,5,6,6,6,6,6,6,
+6,6,6,6,6,5,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-0x1a,5,5,5,5,5,5,0,0x10,0x10,0,0,0,0,0,0,
-6,6,6,6,6,6,6,6,6,6,0x10,6,6,6,6,6,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,6,
+6,6,6,6,6,6,6,6,4,4,0x1b,0x17,0x17,0x17,4,0,
+0,6,0x19,0x19,6,6,6,6,4,6,6,6,4,6,6,6,
+6,6,0,0,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,
+0x17,0x17,0x17,0,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,6,6,6,6,4,6,
+6,6,6,6,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,6,6,6,
+0,0,0x17,0,5,5,5,5,5,5,5,5,5,5,5,0,
+0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,0x1a,5,5,5,5,5,5,0,0x10,0x10,0,0,
+0,0,0,0,6,6,6,6,6,6,6,6,6,6,0x10,6,
6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
-6,6,6,6,6,6,6,6,5,5,5,5,5,5,5,5,
-5,4,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
-6,6,6,6,6,6,6,6,5,5,6,6,0x17,0x17,0x49,0x89,
-0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0x17,4,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,6,6,6,8,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,6,8,6,5,8,8,
-8,6,6,6,6,6,6,6,6,8,8,8,8,6,8,8,
-5,6,6,6,6,6,6,6,5,5,5,5,5,5,5,5,
-5,5,6,6,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,
-5,5,0x19,0x19,0x37cb,0x35cb,0x3fcb,0x34cb,0x3ccb,0x94b,0x1b,0x19,5,0x17,6,0,
-5,6,8,8,0,5,5,5,5,5,5,5,5,0,0,5,
-5,0,0,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,0,5,5,5,5,5,5,5,0,5,0,0,0,5,5,
-5,5,0,0,6,5,8,8,8,6,6,6,6,0,0,8,
-8,0,0,8,8,6,5,0,0,0,0,0,0,0,0,8,
-0,0,0,0,5,5,0,5,0,0,0,0,0,0,0x49,0x89,
-0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,6,6,5,5,5,6,0x17,0,
-0,0,0,0,0,0,0,0,0,6,6,8,0,5,5,5,
-5,5,5,0,0,0,0,5,5,0,0,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,0,5,5,5,5,5,5,
-5,0,5,5,0,5,5,0,5,5,0,0,6,0,8,8,
-8,6,6,0,0,0,0,6,6,0,0,6,6,6,0,0,
-0,6,0,0,0,0,0,0,0,5,5,5,5,0,5,0,
-5,5,6,6,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,
-0x17,0x19,0,0,0,0,0,0,0,5,6,6,6,6,6,6,
-0,6,6,8,0,5,5,5,5,5,5,5,5,5,0,5,
-5,5,0,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,0,5,5,5,5,5,5,5,0,5,5,0,5,5,5,
-5,5,0,0,6,5,8,8,8,6,6,6,6,6,0,6,
-6,8,0,8,8,6,0,0,5,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,5,5,6,6,0,0,0x49,0x89,
-0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0x1b,5,0x34cb,0x344b,0x3ccb,0x37cb,0x35cb,0x3fcb,
+6,6,6,6,6,6,6,6,6,6,6,6,5,5,5,5,
+5,5,5,5,5,4,6,6,6,6,6,6,6,6,6,6,
+6,6,6,6,6,6,6,6,6,6,6,6,5,5,6,6,
+0x17,0x17,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0x17,4,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,6,6,6,8,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,6,8,
+6,5,8,8,8,6,6,6,6,6,6,6,6,8,8,8,
+8,6,8,8,5,6,6,6,6,6,6,6,5,5,5,5,
+5,5,5,5,5,5,6,6,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,
+0x1c9,0x209,0x249,0x289,5,5,0x19,0x19,0x37cb,0x35cb,0x3fcb,0x34cb,0x3ccb,0x94b,0x1b,0x19,
+5,0x17,6,0,5,6,8,8,0,5,5,5,5,5,5,5,
+5,0,0,5,5,0,0,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,0,5,5,5,5,5,5,5,0,5,0,
+0,0,5,5,5,5,0,0,6,5,8,8,8,6,6,6,
+6,0,0,8,8,0,0,8,8,6,5,0,0,0,0,0,
+0,0,0,8,0,0,0,0,5,5,0,5,0,0,0,0,
+0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,6,6,5,5,
+5,6,0x17,0,0,0,0,0,0,0,0,0,0,6,6,8,
+0,5,5,5,5,5,5,0,0,0,0,5,5,0,0,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,0,5,5,
+5,5,5,5,5,0,5,5,0,5,5,0,5,5,0,0,
+6,0,8,8,8,6,6,0,0,0,0,6,6,0,0,6,
+6,6,0,0,0,6,0,0,0,0,0,0,0,5,5,5,
+5,0,5,0,5,5,6,6,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,
+0x1c9,0x209,0x249,0x289,0x17,0x19,0,0,0,0,0,0,0,5,6,6,
+6,6,6,6,0,6,6,8,0,5,5,5,5,5,5,5,
+5,5,0,5,5,5,0,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,0,5,5,5,5,5,5,5,0,5,5,
+0,5,5,5,5,5,0,0,6,5,8,8,8,6,6,6,
+6,6,0,6,6,8,0,8,8,6,0,0,5,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,5,5,6,6,
+0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0x1b,5,0x34cb,0x344b,
+0x3ccb,0x37cb,0x35cb,0x3fcb,0,0,0,0,0,0,0,0,0,6,8,8,
+0,5,5,5,5,5,5,5,5,0,0,5,5,0,0,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,0,5,5,
+5,5,5,5,5,0,5,5,0,5,5,5,5,5,0,0,
+6,5,8,6,8,6,6,6,6,0,0,8,8,0,0,8,
+8,6,0,0,0,0,0,0,0,6,6,8,0,0,0,0,
+5,5,0,5,0,0,0,0,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,
+0x1c9,0x209,0x249,0x289,0x7cb,0x1e4b,0x784b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x19,0x1b,0,
+0,0,0,0,0,0,6,5,0,5,5,5,5,5,5,0,
+0,0,5,5,5,0,5,5,5,5,0,0,0,5,5,0,
+5,0,5,5,0,0,0,5,5,0,0,0,5,5,5,0,
+0,0,5,5,5,5,5,5,5,5,5,5,5,5,0,0,
+0,0,8,8,6,8,8,0,0,0,8,8,8,0,8,8,
+8,6,0,0,5,0,0,0,0,0,0,8,0,0,0,0,
+0,0,0,0,5,5,6,6,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,
+0x1c9,0x209,0x249,0x289,0,0,0,0,0,0,0,0x17,0x54b,0x58b,0x5cb,0x60b,
+0x58b,0x5cb,0x60b,0x1b,6,8,8,8,6,5,5,5,5,5,5,5,
+5,0,5,5,5,0,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,0,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,0,0,6,5,6,6,6,8,8,8,
+8,0,6,6,6,0,6,6,6,6,0,0,0,0,0,0,
+0,6,6,0,5,5,5,0,0,5,0,0,5,5,6,6,
+0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,5,5,8,
+0,0,0,0,0,0,0,0,0,0,0,0,5,6,8,8,
+0x17,5,5,5,5,5,5,5,5,0,5,5,5,0,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,0,5,5,
+5,5,5,5,5,5,5,5,0,5,5,5,5,5,0,0,
+6,5,8,6,8,8,8,8,8,0,6,8,8,0,8,8,
+6,6,0,0,0,0,0,0,0,8,8,0,0,0,0,0,
+0,5,5,0,5,5,6,6,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,
+0x1c9,0x209,0x249,0x289,0x7cb,0x1e4b,0x784b,0x34cb,0x344b,0x3ccb,0x37cb,0x35cb,0x3fcb,0x1b,5,5,
+5,5,5,5,6,6,8,8,5,5,5,5,5,5,5,5,
+5,0,5,5,5,0,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,6,6,5,8,8,8,6,6,6,6,0,8,8,
+8,0,8,8,8,6,5,0x1b,0,0,0,0,5,5,5,8,
+0xcc0b,0xca0b,0xcb4b,0xc90b,0x364b,0xc94b,0x350b,5,0,0,0,0,0,0,0x49,0x89,
+0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,8,8,0x17,0,0,0,
0,0,0,0,0,0,0,0,0,6,8,8,0,5,5,5,
-5,5,5,5,5,0,0,5,5,0,0,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,0,5,5,5,5,5,5,
-5,0,5,5,0,5,5,5,5,5,0,0,6,5,8,6,
-8,6,6,6,6,0,0,8,8,0,0,8,8,6,0,0,
-0,0,0,0,0,6,6,8,0,0,0,0,5,5,0,5,
-0,0,0,0,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,
-0x7cb,0x1e4b,0x784b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x19,0x1b,0,0,0,0,0,
-0,0,6,5,0,5,5,5,5,5,5,0,0,0,5,5,
-5,0,5,5,5,5,0,0,0,5,5,0,5,0,5,5,
-0,0,0,5,5,0,0,0,5,5,5,0,0,0,5,5,
-5,5,5,5,5,5,5,5,5,5,0,0,0,0,8,8,
-6,8,8,0,0,0,8,8,8,0,8,8,8,6,0,0,
-5,0,0,0,0,0,0,8,0,0,0,0,0,0,0,0,
-5,5,6,6,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,
-0,0,0,0,0,0,0,0x17,0x54b,0x58b,0x5cb,0x60b,0x58b,0x5cb,0x60b,0x1b,
-6,8,8,8,6,5,5,5,5,5,5,5,5,0,5,5,
-5,0,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,0,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,0,0,6,5,6,6,6,8,8,8,8,0,6,6,
-6,0,6,6,6,6,0,0,0,0,0,0,0,6,6,0,
-5,5,5,0,0,5,0,0,5,5,6,6,0,0,0x49,0x89,
-0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,5,5,0,0,0,0,0,
-0,0,0,0,0,0,0,0,5,6,8,8,0x17,5,5,5,
-5,5,5,5,5,0,5,5,5,0,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,0,5,5,5,5,5,5,
-5,5,5,5,0,5,5,5,5,5,0,0,6,5,8,6,
-8,8,8,8,8,0,6,8,8,0,8,8,6,6,0,0,
-0,0,0,0,0,8,8,0,0,0,0,0,0,5,5,0,
-5,5,6,6,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,
-0x7cb,0x1e4b,0x784b,0x34cb,0x344b,0x3ccb,0x37cb,0x35cb,0x3fcb,0x1b,5,5,5,5,5,5,
-6,6,8,8,5,5,5,5,5,5,5,5,5,0,5,5,
-5,0,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,6,
-6,5,8,8,8,6,6,6,6,0,8,8,8,0,8,8,
-8,6,5,0x1b,0,0,0,0,5,5,5,8,0xcc0b,0xca0b,0xcb4b,0xc90b,
-0x364b,0xc94b,0x350b,5,0,0,0,0,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,
-0x1c9,0x209,0x249,0x289,0,0,8,8,0x17,0,0,0,0,0,0,0,
-0,0,0,0,0,6,8,8,0,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,0,0,0,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,0,5,5,5,5,5,5,5,5,5,0,5,0,0,
-5,5,5,5,5,5,5,0,0,0,6,0,0,0,0,8,
-8,8,6,6,6,0,6,0,8,8,8,8,8,8,8,8,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,6,5,5,6,6,6,6,6,6,6,0,0,0,0,0x19,
-5,5,5,5,5,5,4,6,6,6,6,6,6,6,6,0x17,
-0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0x17,0x17,0,0,0,0,
-0,5,5,0,5,0,5,5,5,5,5,0,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-0,5,0,5,5,5,5,5,5,5,5,5,5,6,5,5,
-6,6,6,6,6,6,6,6,6,5,0,0,5,5,5,5,
-5,0,4,0,6,6,6,6,6,6,0,0,0x49,0x89,0xc9,0x109,
-0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,5,5,5,5,5,0x1b,0x1b,0x1b,
-0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x1b,
-0x17,0x1b,0x1b,0x1b,6,6,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x49,0x89,0xc9,0x109,
-0x149,0x189,0x1c9,0x209,0x249,0x289,0x344b,0x3c4b,0x444b,0x4c4b,0x544b,0x5c4b,0x644b,0x6c4b,0x744b,0x2c4b,
-0x1b,6,0x1b,6,0x1b,6,0x14,0x15,0x14,0x15,8,8,5,5,5,5,
-5,5,5,5,0,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,0,0,0,
-0,6,6,6,6,6,6,6,6,6,6,6,6,6,6,8,
-6,6,6,6,6,0x17,6,6,5,5,5,5,5,6,6,6,
-6,6,6,6,6,6,6,6,0,6,6,6,6,6,6,6,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,0,
+0,0,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,0,5,5,5,5,5,5,5,5,5,
+0,5,0,0,5,5,5,5,5,5,5,0,0,0,6,0,
+0,0,0,8,8,8,6,6,6,0,6,0,8,8,8,8,
+8,8,8,8,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,6,5,5,6,6,6,6,6,6,6,0,
+0,0,0,0x19,5,5,5,5,5,5,4,6,6,6,6,6,
+6,6,6,0x17,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0x17,0x17,
+0,0,0,0,0,5,5,0,5,0,5,5,5,5,5,0,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,0,5,0,5,5,5,5,5,5,5,5,5,
+5,6,5,5,6,6,6,6,6,6,6,6,6,5,0,0,
+5,5,5,5,5,0,4,0,6,6,6,6,6,6,6,0,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,5,5,5,5,
+5,0x1b,0x1b,0x1b,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,
+0x17,0x17,0x17,0x1b,0x17,0x1b,0x1b,0x1b,6,6,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0x344b,0x3c4b,0x444b,0x4c4b,0x544b,0x5c4b,
+0x644b,0x6c4b,0x744b,0x2c4b,0x1b,6,0x1b,6,0x1b,6,0x14,0x15,0x14,0x15,8,8,
+5,5,5,5,5,5,5,5,0,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,0,0,0,0,6,6,6,6,6,6,6,6,6,6,6,
+6,6,6,8,6,6,6,6,6,0x17,6,6,5,5,5,5,
+5,6,6,6,6,6,6,6,6,6,6,6,0,6,6,6,
6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
-6,6,6,6,6,6,6,6,6,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,6,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0x1b,0x1b,0x17,0x17,0x17,0x17,
-0x17,0x1b,0x1b,0x1b,0x1b,0x17,0x17,0,0,0,0,0,5,5,5,5,
-5,5,5,5,5,5,5,8,8,6,6,6,6,8,6,6,
-6,6,6,6,8,6,6,8,8,6,6,5,0x49,0x89,0xc9,0x109,
-0x149,0x189,0x1c9,0x209,0x249,0x289,0x17,0x17,0x17,0x17,0x17,0x17,5,5,5,5,
-5,5,8,8,6,6,5,5,5,5,6,6,6,5,8,8,
-8,5,5,8,8,8,8,8,8,8,5,5,5,6,6,6,
-6,5,5,5,5,5,5,5,5,5,5,5,5,5,6,8,
-8,6,6,8,8,8,8,8,8,6,5,8,0x49,0x89,0xc9,0x109,
-0x149,0x189,0x1c9,0x209,0x249,0x289,8,8,8,6,0x1b,0x1b,2,2,2,2,
+6,6,6,6,6,6,6,6,6,6,6,6,6,0,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,6,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0x1b,0x1b,
+0x17,0x17,0x17,0x17,0x17,0x1b,0x1b,0x1b,0x1b,0x17,0x17,0,0,0,0,0,
+5,5,5,5,5,5,5,5,5,5,5,8,8,6,6,6,
+6,8,6,6,6,6,6,6,8,6,6,8,8,6,6,5,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0x17,0x17,0x17,0x17,0x17,0x17,
+5,5,5,5,5,5,8,8,6,6,5,5,5,5,6,6,
+6,5,8,8,8,5,5,8,8,8,8,8,8,8,5,5,
+5,6,6,6,6,5,5,5,5,5,5,5,5,5,5,5,
+5,5,6,8,8,6,6,8,8,8,8,8,8,6,5,8,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,8,8,8,6,0x1b,0x1b,
2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,2,2,2,0x17,4,2,2,2,1,1,1,1,
-1,1,0,1,0,0,0,0,0,1,0,0,2,2,2,2,
-2,2,2,2,2,2,2,2,2,2,2,2,5,5,5,5,
-5,5,5,5,5,0,5,5,5,5,0,0,5,5,5,5,
-5,5,5,0,5,0,5,5,5,5,0,0,5,5,5,5,
-5,5,5,5,5,0,5,5,5,5,0,0,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,0,5,5,
-5,5,0,0,5,5,5,5,5,5,5,0,5,0,5,5,
-5,5,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,0,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,0,5,5,5,5,0,0,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,0,0,6,6,6,0x17,0x17,0x17,0x17,
-0x17,0x17,0x17,0x17,0x17,0x30b,0x34b,0x38b,0x3cb,0x40b,0x44b,0x48b,0x4cb,0x50b,0x7cb,0xa4b,
-0xccb,0xf4b,0x11cb,0x144b,0x16cb,0x194b,0x1bcb,0x1e4b,0x788b,0,0,0,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0,0,0,1,1,1,1,
-1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-1,1,0,0,2,2,2,2,2,2,0,0,0x13,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,0x1b,0x17,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-0xc,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,0x14,0x15,0,0,0,
-5,5,5,5,5,5,5,5,5,5,5,0x17,0x17,0x17,0x98a,0x9ca,
-0xa0a,5,5,5,5,5,5,5,5,0,0,0,0,0,0,0,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,6,6,6,8,0,0,0,0,0,0,0,0,0,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,6,6,8,0x17,0x17,0,0,0,0,0,0,0,0,0,
+2,2,2,2,2,2,2,2,2,2,2,0x17,4,2,2,2,
+1,1,1,1,1,1,0,1,0,0,0,0,0,1,0,0,
+2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
+5,5,5,5,5,5,5,5,5,0,5,5,5,5,0,0,
+5,5,5,5,5,5,5,0,5,0,5,5,5,5,0,0,
+5,5,5,5,5,5,5,5,5,0,5,5,5,5,0,0,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,6,6,0,0,0,0,0,0,0,0,0,0,0,0,
-5,5,5,5,5,5,5,5,5,5,5,5,5,0,5,5,
-5,0,6,6,0,0,0,0,0,0,0,0,0,0,0,0,
+5,0,5,5,5,5,0,0,5,5,5,5,5,5,5,0,
+5,0,5,5,5,5,0,0,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,0,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,0,5,5,5,5,0,0,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,6,6,8,6,6,6,6,6,6,6,8,8,
-8,8,8,8,8,8,6,8,8,6,6,6,6,6,6,6,
-6,6,6,6,0x17,0x17,0x17,4,0x17,0x17,0x17,0x19,5,6,0,0,
-0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0,0,
-0x54b,0x58b,0x5cb,0x60b,0x64b,0x68b,0x6cb,0x70b,0x74b,0x78b,0,0,0,0,0,0,
-5,5,5,5,5,5,5,5,5,6,5,0,0,0,0,0,
+5,5,5,5,5,5,5,5,5,5,5,0,0,6,6,6,
+0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x30b,0x34b,0x38b,0x3cb,0x40b,0x44b,0x48b,
+0x4cb,0x50b,0x7cb,0xa4b,0xccb,0xf4b,0x11cb,0x144b,0x16cb,0x194b,0x1bcb,0x1e4b,0x788b,0,0,0,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-0x17,0x17,0x17,0x17,0x17,0x17,0x13,0x17,0x17,0x17,0x17,6,6,6,0x10,6,
-0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0,0,
-5,5,5,4,5,5,5,5,5,5,5,5,5,5,5,5,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0,0,0,
+1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+1,1,1,1,1,1,0,0,2,2,2,2,2,2,0,0,
+0x13,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,0x1b,0x17,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,0xc,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,0x14,
+0x15,0,0,0,5,5,5,5,5,5,5,5,5,5,5,0x17,
+0x17,0x17,0x98a,0x9ca,0xa0a,5,5,5,5,5,5,5,5,0,0,0,
+0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,6,6,6,8,0,0,0,0,0,0,
+0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,6,6,8,0x17,0x17,0,0,0,0,0,
+0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,6,6,0,0,0,0,0,0,0,0,
+0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
+5,0,5,5,5,0,6,6,0,0,0,0,0,0,0,0,
+0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,6,6,8,6,6,6,6,6,
+6,6,8,8,8,8,8,8,8,8,6,8,8,6,6,6,
+6,6,6,6,6,6,6,6,0x17,0x17,0x17,4,0x17,0x17,0x17,0x19,
+5,6,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,
+0,0,0,0,0x54b,0x58b,0x5cb,0x60b,0x64b,0x68b,0x6cb,0x70b,0x74b,0x78b,0,0,
+0,0,0,0,5,5,5,5,5,5,5,5,5,6,5,0,
+0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,0x17,0x17,0x17,0x17,0x17,0x17,0x13,0x17,0x17,0x17,0x17,6,
+6,6,0x10,6,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,
+0,0,0,0,5,5,5,4,5,5,5,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,0,0,0,0,0,0,0,5,5,5,5,5,6,6,5,
+5,5,5,5,5,0,0,0,0,0,0,0,5,5,5,5,
+5,6,6,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,0,0,
+0,0,0,0,0,0,0,0,6,6,6,8,8,8,8,6,
+6,8,8,8,0,0,0,0,8,8,6,8,8,8,8,8,
+8,6,6,6,0,0,0,0,0x1b,0,0,0,0x17,0x17,0x49,0x89,
+0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,5,5,5,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,0,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,0,0,5,5,5,5,5,0,0,0,
+0,0,0,0,0,0,0,0,5,5,5,5,5,5,5,5,
+5,5,5,5,0,0,0,0,5,5,5,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,0,0,0,0,0,0,
-0,0,0,0,6,6,6,8,8,8,8,6,6,8,8,8,
-0,0,0,0,8,8,6,8,8,8,8,8,8,6,6,6,
-0,0,0,0,0x1b,0,0,0,0x17,0x17,0x49,0x89,0xc9,0x109,0x149,0x189,
-0x1c9,0x209,0x249,0x289,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,0,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,0,0,5,5,5,5,5,0,0,0,0,0,0,0,
-0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
-0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,0,0,0,0,0,0,0x49,0x89,0xc9,0x109,
-0x149,0x189,0x1c9,0x209,0x249,0x289,0x30b,0,0,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0x30b,0,0,0,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,6,6,8,8,6,0,0,0x17,0x17,0x17,0x17,0x17,0x17,
-0x17,0x17,0x17,4,0x17,0x17,0x17,0x17,0x17,0x17,0,0,6,6,6,6,
-6,6,6,6,6,6,6,6,6,6,7,6,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,8,6,8,6,6,6,6,6,6,6,0,6,8,6,8,
-8,6,6,6,6,6,6,6,6,8,8,8,8,8,8,6,
-6,6,6,6,6,6,6,6,6,0,0,6,0x49,0x89,0xc9,0x109,
-0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0,0,0x49,0x89,0xc9,0x109,
-0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0,0,0x17,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,6,6,6,6,6,6,6,6,6,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x17,0x17,0,6,6,6,6,
-6,6,6,6,6,6,6,6,6,6,6,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,6,6,6,6,
-8,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,6,8,6,6,
-6,6,6,8,6,8,8,8,8,8,6,8,8,5,5,5,
-5,5,5,5,5,0,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,
-0x249,0x289,0x17,0x17,0x17,0x17,0x17,0x17,5,8,6,6,6,6,8,8,
-6,6,8,6,6,6,5,5,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,
-0x249,0x289,5,5,5,5,5,5,6,6,8,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,6,8,6,6,8,8,
-8,6,8,6,6,6,8,8,0,0,0,0,0,0,0,0,
-0x17,0x17,0x17,0x17,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,
-0,5,5,5,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,5,5,
-5,5,5,5,8,8,8,8,8,8,8,8,6,6,6,6,
-6,6,6,6,8,8,6,6,0,0,0,0x17,0x17,0x17,0x17,0x17,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,4,4,4,4,4,4,0x17,0x17,
-2,2,2,2,2,2,2,2,2,0,0,0,0,0,0,0,
-1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,
-0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0,0,0,0,0,0,0,0,
-6,6,6,0x17,6,6,6,6,6,6,6,6,6,6,6,6,
-6,8,6,6,6,6,6,6,6,5,5,5,5,6,5,5,
-5,5,5,5,6,5,5,8,6,6,5,0,0,0,0,0,
-2,2,2,2,2,2,2,2,2,2,2,2,4,4,4,4,
+5,5,5,5,5,5,5,6,6,8,8,6,0,0,0x17,0x17,
+0x17,0x17,0x17,0x17,0x17,0x17,0x17,4,0x17,0x17,0x17,0x17,0x17,0x17,0,0,
+6,6,6,6,6,6,6,6,6,6,6,6,6,6,7,6,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,8,6,8,6,6,6,6,6,6,6,0,
+6,8,6,8,8,6,6,6,6,6,6,6,6,8,8,8,
+8,8,8,6,6,6,6,6,6,6,6,6,6,0,0,6,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0,0,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0,0,
+0x17,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,6,6,6,6,6,
+6,6,6,6,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x17,0x17,0,
+6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+6,6,6,6,8,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+6,8,6,6,6,6,6,8,6,8,8,8,8,8,6,8,
+8,5,5,5,5,5,5,5,5,0,0,0,0x49,0x89,0xc9,0x109,
+0x149,0x189,0x1c9,0x209,0x249,0x289,0x17,0x17,0x17,0x17,0x17,0x17,5,8,6,6,
+6,6,8,8,6,6,8,6,6,6,5,5,0x49,0x89,0xc9,0x109,
+0x149,0x189,0x1c9,0x209,0x249,0x289,5,5,5,5,5,5,6,6,8,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,6,8,
+6,6,8,8,8,6,8,6,6,6,8,8,0,0,0,0,
+0,0,0,0,0x17,0x17,0x17,0x17,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,
+0x249,0x289,0,0,0,5,5,5,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,
+0x249,0x289,5,5,5,5,5,5,8,8,8,8,8,8,8,8,
+6,6,6,6,6,6,6,6,8,8,6,6,0,0,0,0x17,
+0x17,0x17,0x17,0x17,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,4,4,4,4,
+4,4,0x17,0x17,2,2,2,2,2,2,2,2,2,0,0,0,
+0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,
+1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+0,1,1,1,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0,0,0,0,
+0,0,0,0,6,6,6,0x17,6,6,6,6,6,6,6,6,
+6,6,6,6,6,8,6,6,6,6,6,6,6,5,5,5,
+5,6,5,5,5,5,5,5,6,5,5,8,6,6,5,0,
+0,0,0,0,2,2,2,2,2,2,2,2,2,2,2,2,
4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,
-4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,2,
-2,2,2,2,2,2,2,2,2,2,2,2,4,2,2,2,
-2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,
-1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
-1,2,1,2,1,2,2,2,2,2,2,2,2,2,1,2,
-2,2,2,2,2,2,2,2,1,1,1,1,1,0x1a,0x1a,0x1a,
-0,0,2,2,2,0,2,2,1,1,1,1,3,0x1a,0x1a,0,
-2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,
-2,2,2,2,2,2,0,0,1,1,1,1,1,1,0,0,
-2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,
-2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,
-2,2,2,2,2,2,0,0,1,1,1,1,1,1,0,0,
-2,2,2,2,2,2,2,2,0,1,0,1,0,1,0,1,
-2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,
-2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,0,
-2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3,
-2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3,
-2,2,2,2,2,0,2,2,1,1,1,1,3,0x1a,2,0x1a,
-0x1a,0x1a,2,2,2,0,2,2,1,1,1,1,3,0x1a,0x1a,0x1a,
-2,2,2,2,0,0,2,2,1,1,1,1,0,0x1a,0x1a,0x1a,
-0x16,0x17,0x17,0x17,0x18,0x14,0x15,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,
-0x17,0x17,0x18,0x17,0x16,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0xc,
-0x10,0x10,0x10,0x10,0x10,0,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,
-0x2cb,4,0,0,0x3cb,0x40b,0x44b,0x48b,0x4cb,0x50b,0x18,0x18,0x18,0x14,0x15,4,
-0xc,0xc,0xc,0xc,0xc,0xc,0xc,0xc,0xc,0xc,0xc,0x10,0x10,0x10,0x10,0x10,
-0x13,0x13,0x13,0x13,0x13,0x13,0x17,0x17,0x1c,0x1d,0x14,0x1c,0x1c,0x1d,0x14,0x1c,
-0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0xd,0xe,0x10,0x10,0x10,0x10,0x10,0xc,
-0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x1c,0x1d,0x17,0x17,0x17,0x17,0x16,
-0x2cb,0x30b,0x34b,0x38b,0x3cb,0x40b,0x44b,0x48b,0x4cb,0x50b,0x18,0x18,0x18,0x14,0x15,0,
-4,4,4,4,4,4,4,4,4,4,4,4,4,0,0,0,
-0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,
+4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,
+4,4,4,2,2,2,2,2,2,2,2,2,2,2,2,2,
+4,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
+2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,4,
+4,4,4,4,1,2,1,2,1,2,1,2,1,2,1,2,
+1,2,1,2,1,2,1,2,1,2,2,2,2,2,2,2,
+2,2,1,2,2,2,2,2,2,2,2,2,1,1,1,1,
+1,0x1a,0x1a,0x1a,0,0,2,2,2,0,2,2,1,1,1,1,
+3,0x1a,0x1a,0,2,2,2,2,2,2,2,2,1,1,1,1,
+1,1,1,1,2,2,2,2,2,2,0,0,1,1,1,1,
+1,1,0,0,2,2,2,2,2,2,2,2,1,1,1,1,
+1,1,1,1,2,2,2,2,2,2,2,2,1,1,1,1,
+1,1,1,1,2,2,2,2,2,2,0,0,1,1,1,1,
+1,1,0,0,2,2,2,2,2,2,2,2,0,1,0,1,
+0,1,0,1,2,2,2,2,2,2,2,2,1,1,1,1,
+1,1,1,1,2,2,2,2,2,2,2,2,2,2,2,2,
+2,2,0,0,2,2,2,2,2,2,2,2,3,3,3,3,
+3,3,3,3,2,2,2,2,2,2,2,2,3,3,3,3,
+3,3,3,3,2,2,2,2,2,0,2,2,1,1,1,1,
+3,0x1a,2,0x1a,0x1a,0x1a,2,2,2,0,2,2,1,1,1,1,
+3,0x1a,0x1a,0x1a,2,2,2,2,0,0,2,2,1,1,1,1,
+0,0x1a,0x1a,0x1a,0x16,0x17,0x17,0x17,0x18,0x14,0x15,0x17,0x17,0x17,0x17,0x17,
+0x17,0x17,0x17,0x17,0x17,0x17,0x18,0x17,0x16,0x17,0x17,0x17,0x17,0x17,0x17,0x17,
+0x17,0x17,0x17,0xc,0x10,0x10,0x10,0x10,0x10,0,0x10,0x10,0x10,0x10,0x10,0x10,
+0x10,0x10,0x10,0x10,0x2cb,4,0,0,0x3cb,0x40b,0x44b,0x48b,0x4cb,0x50b,0x18,0x18,
+0x18,0x14,0x15,4,0xc,0xc,0xc,0xc,0xc,0xc,0xc,0xc,0xc,0xc,0xc,0x10,
+0x10,0x10,0x10,0x10,0x13,0x13,0x13,0x13,0x13,0x13,0x17,0x17,0x1c,0x1d,0x14,0x1c,
+0x1c,0x1d,0x14,0x1c,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0xd,0xe,0x10,0x10,
+0x10,0x10,0x10,0xc,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x1c,0x1d,0x17,
+0x17,0x17,0x17,0x16,0x2cb,0x30b,0x34b,0x38b,0x3cb,0x40b,0x44b,0x48b,0x4cb,0x50b,0x18,0x18,
+0x18,0x14,0x15,0,4,4,4,4,4,4,4,4,4,4,4,4,
+4,0,0,0,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,
0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,0x19,
-0x19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-6,6,6,6,6,6,6,6,6,6,6,6,6,7,7,7,
-7,6,7,7,7,6,6,6,6,6,6,6,6,6,6,6,
-6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0x1b,0x1b,0x1b,0x1b,1,0x1b,1,0x1b,1,0x1b,1,1,1,1,0x1b,2,
-1,1,1,1,2,5,5,5,5,2,0x1b,0x1b,2,2,1,1,
-0x18,0x18,0x18,0x18,0x18,1,2,2,2,2,0x1b,0x18,0x1b,0x1b,2,0x1b,
-0x358b,0x360b,0x364b,0x348b,0x388b,0x350b,0x390b,0x3d0b,0x410b,0x354b,0x454b,0x35cb,0x3dcb,0x45cb,0x4dcb,0x58b,
-0x1b,0x1b,1,0x1b,0x1b,0x1b,0x1b,1,0x1b,0x1b,2,1,1,1,2,2,
-1,1,1,2,0x1b,1,0x1b,0x1b,0x18,1,1,1,1,1,0x1b,0x1b,
-0x58a,0x5ca,0x60a,0x64a,0x68a,0x6ca,0x70a,0x74a,0x78a,0x7ca,0x80a,0x84a,0x11ca,0x1e4a,0x980a,0x784a,
-0x58a,0x5ca,0x60a,0x64a,0x68a,0x6ca,0x70a,0x74a,0x78a,0x7ca,0x80a,0x84a,0x11ca,0x1e4a,0x980a,0x784a,
-0x784a,0x984a,0x788a,1,2,0x6ca,0x11ca,0x988a,0x78ca,0x54b,0x1b,0x1b,0,0,0,0,
-0x18,0x18,0x18,0x18,0x18,0x1b,0x1b,0x1b,0x1b,0x1b,0x18,0x18,0x1b,0x1b,0x1b,0x1b,
+0x19,0x19,0x19,0x19,0x19,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,6,6,6,6,6,6,6,6,6,6,6,6,
+6,7,7,7,7,6,7,7,7,6,6,6,6,6,6,6,
+6,6,6,6,6,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x1b,0x1b,0x1b,0x1b,1,0x1b,1,0x1b,1,0x1b,1,1,
+1,1,0x1b,2,1,1,1,1,2,5,5,5,5,2,0x1b,0x1b,
+2,2,1,1,0x18,0x18,0x18,0x18,0x18,1,2,2,2,2,0x1b,0x18,
+0x1b,0x1b,2,0x1b,0x358b,0x360b,0x364b,0x348b,0x388b,0x350b,0x390b,0x3d0b,0x410b,0x354b,0x454b,0x35cb,
+0x3dcb,0x45cb,0x4dcb,0x58b,0x1b,0x1b,1,0x1b,0x1b,0x1b,0x1b,1,0x1b,0x1b,2,1,
+1,1,2,2,1,1,1,2,0x1b,1,0x1b,0x1b,0x18,1,1,1,
+1,1,0x1b,0x1b,0x58a,0x5ca,0x60a,0x64a,0x68a,0x6ca,0x70a,0x74a,0x78a,0x7ca,0x80a,0x84a,
+0x11ca,0x1e4a,0x980a,0x784a,0x58a,0x5ca,0x60a,0x64a,0x68a,0x6ca,0x70a,0x74a,0x78a,0x7ca,0x80a,0x84a,
+0x11ca,0x1e4a,0x980a,0x784a,0x784a,0x984a,0x788a,1,2,0x6ca,0x11ca,0x988a,0x78ca,0x54b,0x1b,0x1b,
+0,0,0,0,0x18,0x18,0x18,0x18,0x18,0x1b,0x1b,0x1b,0x1b,0x1b,0x18,0x18,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x1b,0x1b,0x18,
-0x1b,0x1b,0x18,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x18,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x18,0x18,
-0x1b,0x1b,0x18,0x1b,0x18,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,
+0x1b,0x1b,0x1b,0x1b,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,
+0x18,0x1b,0x1b,0x18,0x1b,0x1b,0x18,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x18,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x18,0x18,0x1b,0x1b,0x18,0x1b,0x18,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,
0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x14,0x15,0x14,0x15,0x1b,0x1b,0x1b,0x1b,
+0x18,0x18,0x18,0x18,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x14,0x15,0x14,0x15,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x18,0x18,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x14,0x15,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x18,0x18,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x14,0x15,0x1b,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x18,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x18,0x1b,0x1b,0x1b,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,
-0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x18,0x18,0x18,0x18,0x18,
+0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x18,0x18,0x18,0x18,0x18,0x18,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x18,0x18,0x18,0x18,
+0x18,0x18,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x2cb,0x80b,0x84b,0x88b,0x8cb,0x90b,0x94b,0x98b,0x9cb,0xa0b,0xa4b,0x30b,0x34b,0x38b,
-0x3cb,0x40b,0x44b,0x48b,0x4cb,0x50b,0x7cb,0x2cb,0x30b,0x34b,0x38b,0x3cb,0x40b,0x44b,0x48b,0x4cb,
-0x50b,0x7cb,0x80b,0x84b,0x88b,0x8cb,0x90b,0x94b,0x98b,0x9cb,0xa0b,0xa4b,0x30b,0x34b,0x38b,0x3cb,
+0x1b,0x1b,0x1b,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x2cb,0x80b,0x84b,0x88b,0x8cb,0x90b,0x94b,0x98b,0x9cb,0xa0b,
+0xa4b,0x30b,0x34b,0x38b,0x3cb,0x40b,0x44b,0x48b,0x4cb,0x50b,0x7cb,0x2cb,0x30b,0x34b,0x38b,0x3cb,
0x40b,0x44b,0x48b,0x4cb,0x50b,0x7cb,0x80b,0x84b,0x88b,0x8cb,0x90b,0x94b,0x98b,0x9cb,0xa0b,0xa4b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,
+0x30b,0x34b,0x38b,0x3cb,0x40b,0x44b,0x48b,0x4cb,0x50b,0x7cb,0x80b,0x84b,0x88b,0x8cb,0x90b,0x94b,
+0x98b,0x9cb,0xa0b,0xa4b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x18,0x18,0x18,0x18,
+0x18,0x18,0x18,0x18,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x18,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x18,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x18,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x18,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x18,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x14,0x15,0x14,0x15,0x14,0x15,0x14,0x15,0x14,0x15,0x14,0x15,
-0x14,0x15,0x30b,0x34b,0x38b,0x3cb,0x40b,0x44b,0x48b,0x4cb,0x50b,0x7cb,0x30b,0x34b,0x38b,0x3cb,
-0x40b,0x44b,0x48b,0x4cb,0x50b,0x7cb,0x30b,0x34b,0x38b,0x3cb,0x40b,0x44b,0x48b,0x4cb,0x50b,0x7cb,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x18,0x18,0x18,0x18,
-0x18,0x14,0x15,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,
-0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x14,0x15,
-0x14,0x15,0x14,0x15,0x14,0x15,0x14,0x15,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,
-0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x14,0x15,0x14,0x15,0x14,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x14,0x15,0x14,0x15,0x14,0x15,0x14,0x15,
+0x14,0x15,0x14,0x15,0x14,0x15,0x30b,0x34b,0x38b,0x3cb,0x40b,0x44b,0x48b,0x4cb,0x50b,0x7cb,
+0x30b,0x34b,0x38b,0x3cb,0x40b,0x44b,0x48b,0x4cb,0x50b,0x7cb,0x30b,0x34b,0x38b,0x3cb,0x40b,0x44b,
+0x48b,0x4cb,0x50b,0x7cb,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x18,0x18,0x18,0x18,0x18,0x14,0x15,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,
+0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,
+0x18,0x18,0x14,0x15,0x14,0x15,0x14,0x15,0x14,0x15,0x14,0x15,0x18,0x18,0x18,0x18,
+0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x14,
0x15,0x14,0x15,0x14,0x15,0x14,0x15,0x14,0x15,0x14,0x15,0x14,0x15,0x14,0x15,0x14,
-0x15,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,
-0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x14,0x15,0x14,0x15,
+0x15,0x14,0x15,0x14,0x15,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,
0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,
-0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x14,0x15,0x18,0x18,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x14,0x15,0x14,0x15,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,
0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,
-0x18,0x1b,0x1b,0x18,0x18,0x18,0x18,0x18,0x18,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x14,0x15,0x18,0x18,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,
+0x18,0x18,0x18,0x18,0x18,0x1b,0x1b,0x18,0x18,0x18,0x18,0x18,0x18,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,1,2,1,1,1,2,2,1,
+2,1,2,1,2,1,1,1,1,2,1,2,2,1,2,2,
+2,2,2,2,4,4,1,1,1,2,1,2,2,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,1,2,1,2,6,6,6,1,2,0,0,0,0,
+0,0x17,0x17,0x17,0x17,0x344b,0x17,0x17,2,2,2,2,2,2,0,2,
+0,0,0,0,0,2,0,0,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,0,0,0,0,0,0,0,4,
+0x17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,
+5,5,5,5,5,5,5,0,5,5,5,5,5,5,5,0,
+5,5,5,5,5,5,5,0,5,5,5,5,5,5,5,0,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,0,0,0,0,0,0,0,0,0,
+0x17,0x17,0x1c,0x1d,0x1c,0x1d,0x17,0x17,0x17,0x1c,0x1d,0x17,0x1c,0x1d,0x17,0x17,
+0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x13,0x17,0x17,0x13,0x17,0x1c,0x1d,0x17,0x17,
+0x1c,0x1d,0x14,0x15,0x14,0x15,0x14,0x15,0x14,0x15,0x17,0x17,0x17,0x17,0x17,4,
+0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x13,0x13,0x17,0x17,0x17,0x17,
+0x13,0x17,0x14,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,
+0x1b,0x1b,0x17,0x17,0x17,0x14,0x15,0x14,0x15,0x14,0x15,0x14,0x15,0x13,0,0,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,1,2,1,1,1,2,2,1,2,1,2,1,
-2,1,1,1,1,2,1,2,2,1,2,2,2,2,2,2,
-4,4,1,1,1,2,1,2,2,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,1,
-2,1,2,6,6,6,1,2,0,0,0,0,0,0x17,0x17,0x17,
-0x17,0x344b,0x17,0x17,2,2,2,2,2,2,0,2,0,0,0,0,
-0,2,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,0,0,0,0,0,0,0,4,0x17,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,6,5,5,5,5,
-5,5,5,0,5,5,5,5,5,5,5,0,5,5,5,5,
-5,5,5,0,5,5,5,5,5,5,5,0,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,0,0,0,0,0,0,0,0,0,0x17,0x17,0x1c,0x1d,
-0x1c,0x1d,0x17,0x17,0x17,0x1c,0x1d,0x17,0x1c,0x1d,0x17,0x17,0x17,0x17,0x17,0x17,
-0x17,0x17,0x17,0x13,0x17,0x17,0x13,0x17,0x1c,0x1d,0x17,0x17,0x1c,0x1d,0x14,0x15,
-0x14,0x15,0x14,0x15,0x14,0x15,0x17,0x17,0x17,0x17,0x17,4,0x17,0x17,0x17,0x17,
-0x17,0x17,0x17,0x17,0x17,0x17,0x13,0x13,0x17,0x17,0x17,0x17,0x13,0x17,0x14,0x17,
-0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x1b,0x1b,0x17,0x17,
-0x17,0x14,0x15,0x14,0x15,0x14,0x15,0x14,0x15,0x13,0,0,0x1b,0x1b,0x1b,0x1b,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0,
-0,0,0,0,0,0,0,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0,
-0x1b,0x58a,0x5ca,0x60a,0x64a,0x68a,0x6ca,0x70a,0x74a,0x78a,6,6,6,6,8,8,
-0x13,4,4,4,4,4,0x1b,0x1b,0x7ca,0xa4a,0xcca,4,5,0x17,0x1b,0x1b,
-0xc,0x17,0x17,0x17,0x1b,4,5,0x54a,0x14,0x15,0x14,0x15,0x14,0x15,0x14,0x15,
-0x14,0x15,0x1b,0x1b,0x14,0x15,0x14,0x15,0x14,0x15,0x14,0x15,0x13,0x14,0x15,0x15,
+0,0,0,0,0,0,0,0,0,0,0,0,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0,0,0,0,0x1b,0x58a,0x5ca,0x60a,0x64a,0x68a,0x6ca,0x70a,0x74a,0x78a,6,6,
+6,6,8,8,0x13,4,4,4,4,4,0x1b,0x1b,0x7ca,0xa4a,0xcca,4,
+5,0x17,0x1b,0x1b,0xc,0x17,0x17,0x17,0x1b,4,5,0x54a,0x14,0x15,0x14,0x15,
+0x14,0x15,0x14,0x15,0x14,0x15,0x1b,0x1b,0x14,0x15,0x14,0x15,0x14,0x15,0x14,0x15,
+0x13,0x14,0x15,0x15,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,0,0,6,6,0x1a,
+0x1a,4,4,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,0x17,
+4,4,4,5,0,0,0,0,0,5,5,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,0,0,6,6,0x1a,0x1a,4,4,5,
+5,5,5,5,0,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,0,0x1b,0x1b,0x58b,0x5cb,0x60b,0x64b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0,0,0,0,0,
+0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,0x58b,0x5cb,0x60b,0x64b,0x68b,0x6cb,0x70b,0x74b,0x78b,0x7cb,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x7cb,0xa4b,0xccb,0xf4b,0x11cb,0x144b,0x16cb,0x194b,0x1b,0xa8b,0xacb,0xb0b,
+0xb4b,0xb8b,0xbcb,0xc0b,0xc4b,0xc8b,0xccb,0xd0b,0xd4b,0xd8b,0xdcb,0xe0b,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0xe4b,0xe8b,0xecb,
+0xf0b,0xf4b,0xf8b,0xfcb,0x100b,0x104b,0x108b,0x10cb,0x110b,0x114b,0x118b,0x11cb,5,5,5,5,
+5,0x685,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,0x5c5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,0x17,4,4,4,5,
-0,0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,0x685,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-0,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,0,0x1b,0x1b,0x58b,0x5cb,0x60b,0x64b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0,0,0,0,0,0,0,0,0,0,0,0,
+5,5,5,5,5,0x705,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,0x585,5,5,0x705,5,5,5,0x7885,
+5,0x605,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-0x58b,0x5cb,0x60b,0x64b,0x68b,0x6cb,0x70b,0x74b,0x78b,0x7cb,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x7cb,0xa4b,0xccb,0xf4b,0x11cb,0x144b,0x16cb,0x194b,0x1b,0xa8b,0xacb,0xb0b,0xb4b,0xb8b,0xbcb,0xc0b,
-0xc4b,0xc8b,0xccb,0xd0b,0xd4b,0xd8b,0xdcb,0xe0b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0xe4b,0xe8b,0xecb,0xf0b,0xf4b,0xf8b,0xfcb,
-0x100b,0x104b,0x108b,0x10cb,0x110b,0x114b,0x118b,0x11cb,5,5,5,5,5,0x685,5,5,
+5,0x785,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+0x5c5,5,5,5,5,5,5,5,0x685,5,0x645,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,0x5c5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,0x7985,0x7c5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,0x685,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,0x7845,5,5,5,5,
+5,5,5,5,0x605,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,0x685,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,0x705,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,0x585,5,5,0x705,5,5,5,0x7885,5,0x605,5,5,
+0x1e45,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+0x7985,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,0x7a85,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,0x785,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,0x5c5,5,5,5,
-5,5,5,5,0x685,5,0x645,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,0x5c5,5,0x745,5,0x6c5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,0x7985,0x7c5,5,5,5,5,5,5,5,
+5,0x7c5,5,0x7845,0xa45,0xcc5,5,5,5,5,5,5,0xf45,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,0x7845,5,5,5,5,5,5,5,5,
-0x605,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,0x685,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,0x1e45,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,0x7985,5,5,5,
+5,0x605,0x605,0x605,0x605,5,5,5,5,5,5,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,0x7a85,5,5,5,5,5,
+5,5,5,0x645,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,0x585,5,5,5,5,5,5,5,0x585,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,0x5c5,5,0x745,5,0x6c5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,0x7c5,5,0x7845,
-0xa45,0xcc5,5,5,5,5,5,5,0xf45,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,0x605,0x605,0x605,
-0x605,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,0x645,
+5,5,5,5,5,5,5,5,5,5,0x585,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,0x585,5,5,5,5,5,5,5,0x585,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,0x785,0xa45,5,5,5,5,
+5,5,5,5,5,5,5,5,0x585,0x5c5,0x605,5,0x5c5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,0x585,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,0x7c5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,0x745,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,0x785,0xa45,5,5,5,5,5,5,5,5,
-5,5,5,5,0x585,0x5c5,0x605,5,0x5c5,5,5,5,5,5,5,5,
+5,5,0x705,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,0x785,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,0x7c5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,0x745,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,0x705,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,0x1e45,5,
+5,5,5,5,5,5,0x645,5,5,5,5,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,0x785,5,5,5,5,5,
+0x7885,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,0x5c5,5,5,5,5,0x5c5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,0x5c5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,0x7845,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,0x1e45,5,5,5,5,5,
-5,5,0x645,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,0x7885,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,0x6c5,5,
+5,5,5,5,0x1e45,5,5,5,5,5,5,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,0x5c5,5,5,5,5,0x5c5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,0x5c5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,0x7845,5,5,5,5,5,5,
+0x6c5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,0x545,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,0,0,0,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,0x6c5,5,5,5,5,5,
-0x1e45,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,0x6c5,5,5,5,
+5,4,5,5,5,5,5,5,5,5,5,5,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0,0,0,0,0,0,0,0,0,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,4,0x17,0x17,0x17,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,0x545,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,0,0,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,4,5,5,
-5,5,5,5,5,5,5,5,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,
-0,0,0,0,0,0,0,0,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,4,0x17,0x17,0x17,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,0x49,0x89,0xc9,0x109,
-0x149,0x189,0x1c9,0x209,0x249,0x289,5,5,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,1,2,1,2,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,5,5,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
-1,2,1,2,1,2,1,2,4,4,6,6,1,2,1,2,
-1,2,1,2,1,2,1,2,1,2,5,6,7,7,7,0x17,
-6,6,6,6,6,6,6,6,6,6,0x17,4,5,5,5,5,
-5,5,0x58a,0x5ca,0x60a,0x64a,0x68a,0x6ca,0x70a,0x74a,0x78a,0x54a,6,6,0x17,0x17,
-0x17,0x17,0x17,0x17,0,0,0,0,0,0,0,0,0x1a,0x1a,0x1a,0x1a,
+1,2,1,2,1,2,1,2,1,2,1,2,4,4,6,6,
+1,2,1,2,1,2,1,2,1,2,1,2,1,2,5,6,
+7,7,7,0x17,6,6,6,6,6,6,6,6,6,6,0x17,4,
+5,5,5,5,5,5,0x58a,0x5ca,0x60a,0x64a,0x68a,0x6ca,0x70a,0x74a,0x78a,0x54a,
+6,6,0x17,0x17,0x17,0x17,0x17,0x17,0,0,0,0,0,0,0,0,
0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,
-0x1a,0x1a,0x1a,4,4,4,4,4,4,4,4,4,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,4,
-4,1,2,5,4,4,2,5,5,5,5,5,0x1a,0x1a,1,2,
-1,2,1,2,1,2,1,2,1,2,1,2,2,2,1,2,
-1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
-4,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,
-1,2,1,2,1,2,1,2,4,0x1a,0x1a,1,2,1,2,5,
-1,2,1,2,2,2,1,2,1,2,1,2,1,2,1,2,
-1,2,1,1,1,1,1,2,1,1,1,1,1,2,1,2,
-1,2,1,2,1,2,1,2,1,1,1,1,2,1,2,0,
-0,0,0,0,1,2,0,2,0,2,1,2,1,2,0,0,
-0,0,0,0,5,5,6,5,5,5,6,5,5,5,5,6,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,8,8,6,6,8,0x1b,0x1b,0x1b,0x1b,
-6,0,0,0,0x34cb,0x344b,0x3ccb,0x37cb,0x35cb,0x3fcb,0x1b,0x1b,0x19,0x1b,0,0,
-0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,0x17,0x17,0x17,0x17,0,0,0,0,
-0,0,0,0,8,8,8,8,6,6,0,0,0,0,0,0,
-0,0,0x17,0x17,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,
-0,0,0,0,8,8,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,8,8,8,8,8,8,8,8,8,8,8,8,
-6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
-6,6,5,5,5,5,5,5,0x17,0x17,0x17,5,0x17,5,5,6,
-5,5,5,5,5,5,6,6,6,6,6,6,6,6,0x17,0x17,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,6,6,6,6,6,6,6,6,6,6,6,8,8,
-0,0,0,0,0,0,0,0,0,0,0,0x17,8,0x17,0x17,0x17,
-0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0,4,0x49,0x89,0xc9,0x109,
-0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0x17,0x17,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,6,
-8,8,6,6,6,6,8,8,6,6,8,8,5,5,5,5,
-5,6,4,5,5,5,5,5,5,5,5,5,0x49,0x89,0xc9,0x109,
-0x149,0x189,0x1c9,0x209,0x249,0x289,5,5,5,5,5,0,5,5,5,5,
-5,5,5,5,5,6,6,6,6,6,6,8,8,6,6,8,
-8,6,6,0,0,0,0,0,0,0,0,0,5,5,5,6,
-5,5,5,5,5,5,5,5,6,8,0,0,0x49,0x89,0xc9,0x109,
-0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0x17,0x17,0x17,0x17,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,4,5,5,5,
-5,5,5,0x1b,0x1b,0x1b,5,8,6,8,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,6,5,6,6,
-6,5,5,6,6,5,5,5,5,5,6,6,5,6,5,0,
+0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,4,4,4,4,4,4,4,4,4,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,5,5,4,0x17,0x17,5,5,5,5,
-5,5,5,5,5,5,5,8,6,6,8,8,0x17,0x17,5,4,
-4,8,6,0,0,0,0,0,0,0,0,0,0,5,5,5,
-5,5,5,0,0,5,5,5,5,5,5,0,0,5,5,5,
-5,5,5,0,0,0,0,0,0,0,0,0,5,5,5,5,
-5,5,5,0,5,5,5,5,5,5,5,0,2,2,2,2,
+0,0,4,4,4,1,2,5,4,4,2,5,5,5,5,5,
+0x1a,0x1a,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
+2,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,
+1,2,1,2,4,2,2,2,2,2,2,2,2,1,2,1,
+2,1,1,2,1,2,1,2,1,2,1,2,4,0x1a,0x1a,1,
+2,1,2,5,1,2,1,2,2,2,1,2,1,2,1,2,
+1,2,1,2,1,2,1,1,1,1,1,2,1,1,1,1,
+1,2,1,2,1,2,1,2,1,2,1,2,1,1,1,1,
+2,1,2,0,0,0,0,0,1,2,0,2,0,2,1,2,
+1,2,0,0,0,0,0,0,5,5,6,5,5,5,6,5,
+5,5,5,6,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,8,8,6,6,8,
+0x1b,0x1b,0x1b,0x1b,6,0,0,0,0x34cb,0x344b,0x3ccb,0x37cb,0x35cb,0x3fcb,0x1b,0x1b,
+0x19,0x1b,0,0,0,0,0,0,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,0x17,0x17,0x17,0x17,
+0,0,0,0,0,0,0,0,8,8,8,8,6,6,0,0,
+0,0,0,0,0,0,0x17,0x17,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,
+0x249,0x289,0,0,0,0,0,0,8,8,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,8,8,8,8,8,8,8,8,
+8,8,8,8,6,6,6,6,6,6,6,6,6,6,6,6,
+6,6,6,6,6,6,5,5,5,5,5,5,0x17,0x17,0x17,5,
+0x17,5,5,6,5,5,5,5,5,5,6,6,6,6,6,6,
+6,6,0x17,0x17,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,6,6,6,6,6,6,6,6,6,
+6,6,8,8,0,0,0,0,0,0,0,0,0,0,0,0x17,
+8,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0,4,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0x17,0x17,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,6,8,8,6,6,6,6,8,8,6,6,8,8,
+5,5,5,5,5,6,4,5,5,5,5,5,5,5,5,5,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,5,5,5,5,5,0,
+5,5,5,5,5,5,5,5,5,6,6,6,6,6,6,8,
+8,6,6,8,8,6,6,0,0,0,0,0,0,0,0,0,
+5,5,5,6,5,5,5,5,5,5,5,5,6,8,0,0,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0x17,0x17,0x17,0x17,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+4,5,5,5,5,5,5,0x1b,0x1b,0x1b,5,8,6,8,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+6,5,6,6,6,5,5,6,6,5,5,5,5,5,6,6,
+5,6,5,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,5,5,4,0x17,0x17,
+5,5,5,5,5,5,5,5,5,5,5,8,6,6,8,8,
+0x17,0x17,5,4,4,8,6,0,0,0,0,0,0,0,0,0,
+0,5,5,5,5,5,5,0,0,5,5,5,5,5,5,0,
+0,5,5,5,5,5,5,0,0,0,0,0,0,0,0,0,
+5,5,5,5,5,5,5,0,5,5,5,5,5,5,5,0,
2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,2,2,2,0x1a,4,4,4,4,2,2,2,2,
-2,2,2,2,2,4,0x1a,0x1a,0,0,0,0,2,2,2,2,
-2,2,2,2,2,2,2,2,2,2,2,2,5,5,5,8,
-8,6,8,8,6,8,8,0x17,8,6,0,0,0x49,0x89,0xc9,0x109,
-0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0,0,5,5,5,5,
-0,0,0,0,0,0,0,0,0,0,0,0,5,5,5,5,
+2,2,2,2,2,2,2,2,2,2,2,0x1a,4,4,4,4,
+2,2,2,2,2,2,2,2,2,4,0x1a,0x1a,0,0,0,0,
+2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
+5,5,5,8,8,6,8,8,6,8,8,0x17,8,6,0,0,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0,0,
+5,5,5,5,0,0,0,0,0,0,0,0,0,0,0,0,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,0,0,0,0,
+5,5,5,5,5,5,5,0,0,0,0,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,0,0,0,0,5,5,5,5,
-5,5,5,0,0,0,0,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,0x12,0x12,0x12,0x12,
0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,
-0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x11,0x11,0x11,0x11,
+0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,0x12,
0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,
-0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,5,5,5,5,
-5,5,5,5,5,5,5,0x605,5,5,5,5,5,5,5,0x7c5,
-5,5,5,5,0x5c5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,0x6c5,5,0x6c5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,0x7c5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,0,0,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,0x18,5,5,
-5,5,5,5,5,5,5,5,5,5,5,0,5,5,5,5,
-5,0,5,0,5,5,0,5,5,0,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,2,2,2,2,2,2,2,0,0,0,0,0,
-0,0,0,0,0,0,0,2,2,2,2,2,0,0,0,0,
-0,5,6,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,
-0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,0x15,0x14,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,0,0,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,0,0,0,0,0,0,0,0x1b,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-5,5,5,5,5,5,5,5,5,5,5,5,0x19,0x1b,0x1b,0x1b,
-6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
-0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x14,0x15,0x17,0,0,0,0,0,0,
-6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
-0x17,0x13,0x13,0x16,0x16,0x14,0x15,0x14,0x15,0x14,0x15,0x14,0x15,0x14,0x15,0x14,
-0x15,0x17,0x17,0x14,0x15,0x17,0x17,0x17,0x17,0x16,0x16,0x16,0x17,0x17,0x17,0,
-0x17,0x17,0x17,0x17,0x13,0x14,0x15,0x14,0x15,0x14,0x15,0x17,0x17,0x17,0x18,0x13,
-0x18,0x18,0x18,0,0x17,0x19,0x17,0x17,0,0,0,0,5,5,5,5,
-5,0,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,
+5,5,5,5,5,5,5,5,5,5,5,0x605,5,5,5,5,
+5,5,5,0x7c5,5,5,5,5,0x5c5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,0x6c5,5,0x6c5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,0,0,0x10,0,0,5,5,5,5,5,5,0,0,5,5,
-5,5,5,5,0,0,5,5,5,5,5,5,0,0,5,5,
-5,0,0,0,0x19,0x19,0x18,0x1a,0x1b,0x19,0x19,0,0x1b,0x18,0x18,0x18,
-0x18,0x1b,0x1b,0,0,0,0,0,0,0,0,0,0,0x10,0x10,0x10,
-0x1b,0x1b,0,0,0,0x17,0x17,0x17,0x19,0x17,0x17,0x17,0x14,0x15,0x17,0x18,
-0x17,0x13,0x17,0x17,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0x17,0x17,
-0x18,0x18,0x18,0x17,0x1a,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0x14,
-0x18,0x15,0x18,0x14,0x15,0x17,0x14,0x15,0x17,0x17,5,5,5,5,5,5,
-5,5,5,5,4,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,0x7c5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,0,0,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,4,4,5,5,5,5,5,5,5,5,
-5,5,5,5,0,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,0,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,0,
-5,5,0,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,0,
-0,0,0,0,0xb00b,0xb80b,0x784b,0x804b,0x884b,0x904b,0x984b,0xa04b,0xa84b,0xb04b,0xb84b,0x788b,
-0x808b,0x888b,0x908b,0x988b,0xa08b,0xa88b,0xb08b,0xb88b,0,0,0,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x17,0x17,0x17,0,0,0,0,0x58b,0x5cb,0x60b,0x64b,0x68b,
-0x6cb,0x70b,0x74b,0x78b,0x7cb,0xa4b,0xccb,0xf4b,0x11cb,0x144b,0x16cb,0x194b,0x1bcb,0x1e4b,0x800b,0x880b,
-0x900b,0x980b,0xa00b,0xa80b,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0xcca,0x11ca,0x11ca,0x11ca,0x11ca,0x1e4a,0x880a,
-0x980a,0x980a,0x980a,0x980a,0x980a,0x784a,0x984a,0x68a,0x11ca,0x344b,0x344b,0x388b,0x3ccb,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x54b,0x34cb,0x1b,0x1b,0x1b,0,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,
-0x34ca,0x344a,0x58a,0x68a,0x11ca,0x980a,0x984a,0x988a,0x68a,0x7ca,0x11ca,0x1e4a,0x980a,0x784a,0x984a,0x68a,
-0x7ca,0x11ca,0x1e4a,0x980a,0x784a,0x788a,0x988a,0x7ca,0x58a,0x58a,0x58a,0x5ca,0x5ca,0x5ca,0x5ca,0x68a,
-0x1b,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,6,0,0,
+5,0x18,5,5,5,5,5,5,5,5,5,5,5,5,5,0,
+5,5,5,5,5,0,5,0,5,5,0,5,5,0,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-6,0x58b,0x5cb,0x60b,0x64b,0x68b,0x6cb,0x70b,0x74b,0x78b,0x7cb,0xa4b,0xccb,0xf4b,0x11cb,0x144b,
-0x16cb,0x194b,0x1bcb,0x1e4b,0x800b,0x880b,0x900b,0x980b,0xa00b,0xa80b,0xb00b,0xb80b,0,0,0,0,
-0x58b,0x68b,0x7cb,0x11cb,0,0,0,0,0,0,0,0,0,5,5,5,
+5,5,5,5,5,5,5,5,2,2,2,2,2,2,2,0,
+0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,
+0,0,0,0,0,5,6,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,
+0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0x1a,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,0x1bca,5,5,5,5,5,5,5,5,0xb80a,0,0,0,0,0,
+5,5,5,5,5,5,5,5,5,5,0x15,0x14,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,0,0,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,0,0,0,0,
+0,0,0,0x1b,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
+0x19,0x1b,0x1b,0x1b,6,6,6,6,6,6,6,6,6,6,6,6,
+6,6,6,6,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x14,0x15,0x17,0,0,
+0,0,0,0,6,6,6,6,6,6,6,6,6,6,6,6,
+6,6,6,6,0x17,0x13,0x13,0x16,0x16,0x14,0x15,0x14,0x15,0x14,0x15,0x14,
+0x15,0x14,0x15,0x14,0x15,0x17,0x17,0x14,0x15,0x17,0x17,0x17,0x17,0x16,0x16,0x16,
+0x17,0x17,0x17,0,0x17,0x17,0x17,0x17,0x13,0x14,0x15,0x14,0x15,0x14,0x15,0x17,
+0x17,0x17,0x18,0x13,0x18,0x18,0x18,0,0x17,0x19,0x17,0x17,0,0,0,0,
+5,5,5,5,5,0,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,0,0,0x10,0,0,5,5,5,5,5,5,
+0,0,5,5,5,5,5,5,0,0,5,5,5,5,5,5,
+0,0,5,5,5,0,0,0,0x19,0x19,0x18,0x1a,0x1b,0x19,0x19,0,
+0x1b,0x18,0x18,0x18,0x18,0x1b,0x1b,0,0,0,0,0,0,0,0,0,
+0,0x10,0x10,0x10,0x1b,0x1b,0,0,0,0x17,0x17,0x17,0x19,0x17,0x17,0x17,
+0x14,0x15,0x17,0x18,0x17,0x13,0x17,0x17,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,
+0x249,0x289,0x17,0x17,0x18,0x18,0x18,0x17,0x1a,2,2,2,2,2,2,2,
+2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
+2,2,2,0x14,0x18,0x15,0x18,0x14,0x15,0x17,0x14,0x15,0x17,0x17,5,5,
+5,5,5,5,5,5,5,5,4,5,5,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,6,6,6,6,6,0,0,0,0,0,
+5,5,5,5,5,5,5,5,5,5,4,4,5,5,5,5,
+5,5,5,5,5,5,5,5,0,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,0,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,0,0x17,
-5,5,5,5,0,0,0,0,5,5,5,5,5,5,5,5,
-0x17,0x58a,0x5ca,0x7ca,0xa4a,0x1e4a,0,0,0,0,0,0,0,0,0,0,
-2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
+5,5,5,0,5,5,0,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,0,0,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,0,0,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,0,0,0,0,0,0xb00b,0xb80b,0x784b,0x804b,0x884b,0x904b,0x984b,0xa04b,
+0xa84b,0xb04b,0xb84b,0x788b,0x808b,0x888b,0x908b,0x988b,0xa08b,0xa88b,0xb08b,0xb88b,0,0,0,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x17,0x17,0x17,0,0,0,0,0x58b,
+0x5cb,0x60b,0x64b,0x68b,0x6cb,0x70b,0x74b,0x78b,0x7cb,0xa4b,0xccb,0xf4b,0x11cb,0x144b,0x16cb,0x194b,
+0x1bcb,0x1e4b,0x800b,0x880b,0x900b,0x980b,0xa00b,0xa80b,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0xcca,0x11ca,0x11ca,
+0x11ca,0x11ca,0x1e4a,0x880a,0x980a,0x980a,0x980a,0x980a,0x980a,0x784a,0x984a,0x68a,0x11ca,0x344b,0x344b,0x388b,
+0x3ccb,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x54b,0x34cb,
+0x1b,0x1b,0x1b,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x1b,0,0,0,0x34ca,0x344a,0x58a,0x68a,0x11ca,0x980a,0x984a,0x988a,0x68a,0x7ca,0x11ca,0x1e4a,
+0x980a,0x784a,0x984a,0x68a,0x7ca,0x11ca,0x1e4a,0x980a,0x784a,0x788a,0x988a,0x7ca,0x58a,0x58a,0x58a,0x5ca,
+0x5ca,0x5ca,0x5ca,0x68a,0x1b,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x1b,6,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,6,0x58b,0x5cb,0x60b,0x64b,0x68b,0x6cb,0x70b,0x74b,0x78b,0x7cb,0xa4b,
+0xccb,0xf4b,0x11cb,0x144b,0x16cb,0x194b,0x1bcb,0x1e4b,0x800b,0x880b,0x900b,0x980b,0xa00b,0xa80b,0xb00b,0xb80b,
+0,0,0,0,0x58b,0x68b,0x7cb,0x11cb,0,0,0,0,0,0,0,0,
+0,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,0x1bca,5,5,5,5,5,5,5,5,0xb80a,0,
+0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,6,6,6,6,6,0,
+0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,0,0,
-0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0,0,
-1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+5,5,0,0x17,5,5,5,5,0,0,0,0,5,5,5,5,
+5,5,5,5,0x17,0x58a,0x5ca,0x7ca,0xa4a,0x1e4a,0,0,0,0,0,0,
+0,0,0,0,2,2,2,2,2,2,2,2,2,2,2,2,
+2,2,2,2,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,
+0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,
+1,1,1,1,2,2,2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,2,2,2,2,2,2,2,2,0,0,0,0,
-1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-1,1,1,1,0,0,0,0,2,2,2,2,2,2,2,2,
+0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,
+1,1,1,1,1,1,1,1,0,0,0,0,2,2,2,2,
+2,2,2,2,5,5,5,5,5,5,5,5,0,0,0,0,
+0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,0,0,0,0,0,0,0,0,0,0,0,0x17,
+1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,
+2,2,0,2,2,2,2,2,2,2,2,2,2,2,2,2,
+2,2,0,2,2,2,2,2,2,2,0,2,2,0,0,0,
+1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,
+1,1,1,0,1,1,0,2,2,2,2,2,2,2,2,2,
5,5,5,5,5,5,5,5,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,
+4,0,4,4,4,4,4,4,4,4,4,0,0,0,0,0,
+4,4,4,4,4,4,0,4,4,4,4,4,4,4,4,4,
+4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-0,0,0,0,0,0,0,0,0,0,0,0x17,1,1,1,1,
-1,1,1,1,1,1,1,0,1,1,1,1,2,2,0,2,
-2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,2,
-2,2,2,2,2,2,0,2,2,0,0,0,1,1,1,1,
-1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,0,
-1,1,0,2,2,2,2,2,2,2,2,2,5,5,5,5,
-5,5,5,5,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,4,4,4,4,
-4,4,4,4,4,4,4,4,4,4,4,4,4,0,4,4,
-4,4,4,4,4,4,4,0,0,0,0,0,4,4,4,4,
-4,4,0,4,4,4,4,4,4,4,4,4,4,4,4,4,
-4,4,4,4,4,4,4,4,4,4,4,4,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,0,5,5,0,0,0,5,0,0,5,5,5,5,5,
-5,5,0,0,5,0,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,0,0x17,
-0x58b,0x5cb,0x60b,0x7cb,0xa4b,0x1e4b,0x784b,0x788b,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,0x1b,
-0x1b,0x58b,0x5cb,0x60b,0x64b,0x68b,0x7cb,0xa4b,0,0,0,0,0,0,0,0x58b,
-0x5cb,0x60b,0x64b,0x64b,0x68b,0x7cb,0xa4b,0x1e4b,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,0,5,5,0,0,
-0,0,0,0x58b,0x68b,0x7cb,0xa4b,0x1e4b,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,0x58b,0x7cb,
-0xa4b,0x1e4b,0x5cb,0x60b,0,0,0,0x17,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,0,0,0,0,0,0x17,0xa04b,0xa84b,0xb04b,0xb84b,0x788b,0x808b,0x888b,0x908b,
-0x988b,0xa08b,0xa88b,0xb08b,0xb88b,0x78cb,0x80cb,0x88cb,0x90cb,0x98cb,0xa0cb,0xa8cb,0xb0cb,0xb8cb,0x36cb,0x354b,
-0x34cb,0x348b,0x46cb,0x344b,0x4ecb,0x388b,0x3ccb,0x454b,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-0,0,0,0,0x5ecb,0x344b,5,5,0x58b,0x5cb,0x60b,0x64b,0x68b,0x6cb,0x70b,0x74b,
-0x78b,0x7cb,0xa4b,0xccb,0xf4b,0x11cb,0x144b,0x16cb,0,0,0x1e4b,0x800b,0x880b,0x900b,0x980b,0xa00b,
-0xa80b,0xb00b,0xb80b,0x784b,0x804b,0x884b,0x904b,0x984b,0x30b,0x34b,0x38b,0x3cb,0x7cb,0xa4b,0x1e4b,0x784b,
-0x344b,0,0,0,0,0,0,0,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,
-0x17,0,0,0,0,0,0,0,5,6,6,6,0,6,6,0,
-0,0,0,0,6,6,6,6,5,5,5,5,0,5,5,5,
-0,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,0,0,6,6,6,0,
-0,0,0,6,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,0,5,5,0,0,0,5,0,0,5,
+5,5,5,5,5,5,0,0,5,0,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,0,0x17,0x58b,0x5cb,0x60b,0x7cb,0xa4b,0x1e4b,0x784b,0x788b,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,0x58b,0x11cb,0x17,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,0x1b,0x1b,0x58b,0x5cb,0x60b,0x64b,0x68b,0x7cb,0xa4b,0,0,0,0,
+0,0,0,0x58b,0x5cb,0x60b,0x64b,0x64b,0x68b,0x7cb,0xa4b,0x1e4b,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,0,
+5,5,0,0,0,0,0,0x58b,0x68b,0x7cb,0xa4b,0x1e4b,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,0x58b,0x7cb,0xa4b,5,5,5,5,5,6,6,0,0,0,0,0x58b,
-0x68b,0x7cb,0xa4b,0x1e4b,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0,0,0,0,0,
-0,0,0,0,5,5,5,5,5,5,5,5,0x1b,5,5,5,
+5,5,0x58b,0x7cb,0xa4b,0x1e4b,0x5cb,0x60b,0,0,0,0x17,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,0,0,0,0x17,0x17,0x17,0x17,0x17,0x17,0x17,
+5,5,5,5,5,5,0,0,0,0,0,0x17,0xa04b,0xa84b,0xb04b,0xb84b,
+0x788b,0x808b,0x888b,0x908b,0x988b,0xa08b,0xa88b,0xb08b,0xb88b,0x78cb,0x80cb,0x88cb,0x90cb,0x98cb,0xa0cb,0xa8cb,
+0xb0cb,0xb8cb,0x36cb,0x354b,0x34cb,0x348b,0x46cb,0x344b,0x4ecb,0x388b,0x3ccb,0x454b,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,0,0,0x58b,0x5cb,0x60b,0x64b,0x7cb,0xa4b,0x1e4b,0x784b,
+5,5,5,5,0,0,0,0,0x5ecb,0x344b,5,5,0x58b,0x5cb,0x60b,0x64b,
+0x68b,0x6cb,0x70b,0x74b,0x78b,0x7cb,0xa4b,0xccb,0xf4b,0x11cb,0x144b,0x16cb,0,0,0x1e4b,0x800b,
+0x880b,0x900b,0x980b,0xa00b,0xa80b,0xb00b,0xb80b,0x784b,0x804b,0x884b,0x904b,0x984b,0x30b,0x34b,0x38b,0x3cb,
+0x7cb,0xa4b,0x1e4b,0x784b,0x344b,0,0,0,0,0,0,0,0x17,0x17,0x17,0x17,
+0x17,0x17,0x17,0x17,0x17,0,0,0,0,0,0,0,5,6,6,6,
+0,6,6,0,0,0,0,0,6,6,6,6,5,5,5,5,
+0,5,5,5,0,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,0,0,
+6,6,6,0,0,0,0,6,5,5,5,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,0,0,0,0,0,0x58b,0x5cb,0x60b,0x64b,0x7cb,0xa4b,0x1e4b,0x784b,
+5,5,5,5,5,0x58b,0x11cb,0x17,5,5,5,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,0,0,0,0,0,0,0,0x17,0x17,0x17,0x17,0,0,0,
-0,0,0,0,0,0,0,0,0,0x58b,0x5cb,0x60b,0x64b,0x7cb,0xa4b,0x1e4b,
+5,5,5,5,5,0x58b,0x7cb,0xa4b,5,5,5,5,5,6,6,0,
+0,0,0,0x58b,0x68b,0x7cb,0xa4b,0x1e4b,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0,
+0,0,0,0,0,0,0,0,5,5,5,5,5,5,5,5,
+0x1b,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,0,0,0,0x17,0x17,0x17,
+0x17,0x17,0x17,0x17,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,0,0,0x58b,0x5cb,0x60b,0x64b,
+0x7cb,0xa4b,0x1e4b,0x784b,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,0,0,0,0,0,0x58b,0x5cb,0x60b,0x64b,
+0x7cb,0xa4b,0x1e4b,0x784b,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,0,0,0,0,0,0,0,0x17,0x17,0x17,
+0x17,0,0,0,0,0,0,0,0,0,0,0,0,0x58b,0x5cb,0x60b,
+0x64b,0x7cb,0xa4b,0x1e4b,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,5,5,5,5,5,5,5,5,5,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-5,5,5,5,5,5,5,5,5,0,0,0,0,0,0,0,
+0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,
+1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,
+0,0,0,0,2,2,2,2,2,2,2,2,2,2,2,2,
+2,2,2,2,2,2,2,0,0,0,0,0,0,0,0x58b,0x68b,
+0x7cb,0x11cb,0x1e4b,0x784b,5,5,5,5,6,6,6,6,0,0,0,0,
+0,0,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,
+0,0,0,0,0x30b,0x34b,0x38b,0x3cb,0x40b,0x44b,0x48b,0x4cb,0x50b,0x7cb,0xa4b,0xccb,
+0xf4b,0x11cb,0x144b,0x16cb,0x194b,0x1bcb,0x1e4b,0x800b,0x880b,0x900b,0x980b,0xa00b,0xa80b,0xb00b,0xb80b,0x344b,
+0x34cb,0x348b,0x388b,0,5,5,5,5,5,5,5,5,5,5,0,6,
+6,0x13,0,0,5,5,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,0,0,0,0,0,0,0,0x58b,0x68b,0x7cb,0x11cb,0x1e4b,0x784b,
-5,5,5,5,6,6,6,6,0,0,0,0,0,0,0,0,
-0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0,0,
-0x30b,0x34b,0x38b,0x3cb,0x40b,0x44b,0x48b,0x4cb,0x50b,0x7cb,0xa4b,0xccb,0xf4b,0x11cb,0x144b,0x16cb,
-0x194b,0x1bcb,0x1e4b,0x800b,0x880b,0x900b,0x980b,0xa00b,0xa80b,0xb00b,0xb80b,0x344b,0x34cb,0x348b,0x388b,0,
-5,5,5,5,5,5,5,5,5,5,0,6,6,0x13,0,0,
-5,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,6,6,6,5,5,5,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,0x58b,0x5cb,0x60b,
-0x64b,0x68b,0x7cb,0xa4b,0xccb,0x1e4b,0x344b,5,0,0,0,0,0,0,0,0,
+5,5,5,5,5,0x58b,0x5cb,0x60b,0x64b,0x68b,0x7cb,0xa4b,0xccb,0x1e4b,0x344b,5,
+0,0,0,0,0,0,0,0,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,6,6,6,6,6,6,
+6,6,6,6,6,0x58b,0x7cb,0xa4b,0x1e4b,0x17,0x17,0x17,0x17,0x17,0,0,
+0,0,0,0,5,5,6,6,6,6,0x17,0x17,0x17,0x17,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,5,5,5,5,5,0x58b,0x5cb,0x60b,0x64b,0x7cb,0xa4b,0x1e4b,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x144b,0x16cb,0x194b,0x1bcb,0x1e4b,0x784b,0x49,0x89,0xc9,0x109,0x149,0x189,
+0x1c9,0x209,0x249,0x289,6,5,5,6,6,5,0,0,0,0,0,0,
+0,0,0,6,8,6,8,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,6,6,6,6,6,6,6,0x17,0x17,0x17,0x17,0x17,
+0x17,0x17,0,0,0,0,0x30b,0x34b,0x38b,0x3cb,0x40b,0x44b,0x48b,0x4cb,0x50b,0x7cb,
+0xa4b,0xccb,0xf4b,0x11cb,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,8,8,8,6,6,6,6,8,8,6,6,0x17,
+0x17,0x10,0x17,0x17,0x17,0x17,6,0,0,0,0,0,0,0,0,0,
+0,0x10,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,0,0,0,0,0,0,0,0x49,0x89,0xc9,0x109,
+0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0,0,5,5,5,5,
+5,5,5,6,6,6,6,6,8,6,6,6,6,6,6,6,
+6,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0x17,0x17,0x17,0x17,
+5,8,8,5,0,0,0,0,0,0,0,0,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,6,6,6,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,6,6,6,6,6,6,6,6,6,6,6,0x58b,0x7cb,0xa4b,
-0x1e4b,0x17,0x17,0x17,0x17,0x17,0,0,0,0,0,0,5,5,6,6,
-6,6,0x17,0x17,0x17,0x17,0,0,0,0,0,0,0,0,0,0,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,6,
+0x17,0x17,5,0,0,0,0,0,0,0,0,0,8,5,5,5,
+5,0x17,0x17,0x17,0x17,6,6,6,6,0x17,8,6,0x49,0x89,0xc9,0x109,
+0x149,0x189,0x1c9,0x209,0x249,0x289,5,0x17,5,0x17,0x17,0x17,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,8,
+8,8,6,6,6,6,6,6,6,6,6,8,0,0x58b,0x5cb,0x60b,
+0x64b,0x68b,0x6cb,0x70b,0x74b,0x78b,0x7cb,0xa4b,0xccb,0xf4b,0x11cb,0x144b,0x16cb,0x194b,0x1bcb,0x1e4b,
+0x784b,0,0,0,0,0,0,0,0,0,0,0,5,5,5,5,
+5,5,5,5,5,5,5,5,8,8,8,6,6,6,8,8,
+6,8,6,6,0x17,0x17,0x17,0x17,0x17,0x17,6,5,5,6,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,5,5,5,5,
-5,0x58b,0x5cb,0x60b,0x64b,0x7cb,0xa4b,0x1e4b,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0x144b,0x16cb,0x194b,0x1bcb,
-0x1e4b,0x784b,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,6,5,5,6,
-6,5,0,0,0,0,0,0,0,0,0,6,8,6,8,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,6,6,6,6,
-6,6,6,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0,0,0,0,0x30b,0x34b,
-0x38b,0x3cb,0x40b,0x44b,0x48b,0x4cb,0x50b,0x7cb,0xa4b,0xccb,0xf4b,0x11cb,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,8,8,8,6,
-6,6,6,8,8,6,6,0x17,0x17,0x10,0x17,0x17,0x17,0x17,6,0,
-0,0,0,0,0,0,0,0,0,0x10,0,0,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,0,0,0,
-0,0,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,
-0,0,0,0,5,5,5,5,5,5,5,6,6,6,6,6,
-8,6,6,6,6,6,6,6,6,0,0x49,0x89,0xc9,0x109,0x149,0x189,
-0x1c9,0x209,0x249,0x289,0x17,0x17,0x17,0x17,5,8,8,5,0,0,0,0,
-0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,6,6,6,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,6,0x17,0x17,5,0,0,0,0,0,
-0,0,0,0,8,5,5,5,5,0x17,0x17,0x17,0x17,6,6,6,
-6,0x17,8,6,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,5,0x17,
-5,0x17,0x17,0x17,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,8,8,8,6,6,6,6,6,6,
-6,6,6,8,0,0x58b,0x5cb,0x60b,0x64b,0x68b,0x6cb,0x70b,0x74b,0x78b,0x7cb,0xa4b,
-0xccb,0xf4b,0x11cb,0x144b,0x16cb,0x194b,0x1bcb,0x1e4b,0x784b,0,0,0,0,0,0,0,
-0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
-8,8,8,6,6,6,8,8,6,8,6,6,0x17,0x17,0x17,0x17,
-0x17,0x17,6,0,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,0,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,0,5,0,5,5,5,5,0,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,0,5,
-5,5,5,5,5,5,5,5,5,0x17,0,0,0,0,0,0,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,0,
+5,0,5,5,5,5,0,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,0,5,5,5,5,5,5,5,5,5,
+5,0x17,0,0,0,0,0,0,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,8,8,8,6,6,6,6,6,
+6,6,6,0,0,0,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,
+0x249,0x289,0,0,0,0,0,0,5,5,8,8,0,0,6,6,
+6,6,6,6,6,0,0,0,6,6,6,6,6,0,0,0,
+0,0,0,0,0,0,0,0,6,6,8,8,0,5,5,5,
+5,5,5,5,5,0,0,5,5,0,0,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,0,5,5,5,5,5,5,
+5,0,5,5,0,5,5,5,5,5,0,6,6,5,8,8,
+6,8,8,8,8,0,0,8,8,0,0,8,8,8,0,0,
+5,0,0,0,0,0,0,8,0,0,0,0,0,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,8,8,8,6,6,6,6,6,6,6,6,
+8,8,6,6,6,8,6,5,5,5,5,0x17,0x17,0x17,0x17,0x17,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0x17,0x17,0,0x17,6,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-8,8,8,6,6,6,6,6,6,6,6,0,0,0,0,0,
+8,8,8,6,6,6,6,6,6,8,6,8,8,8,8,6,
+6,8,6,6,5,5,0x17,5,0,0,0,0,0,0,0,0,
0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0,0,
-5,5,8,8,0,0,6,6,6,6,6,6,6,0,0,0,
-6,6,6,6,6,0,0,0,0,0,0,0,0,0,0,0,
-6,6,8,8,0,5,5,5,5,5,5,5,5,0,0,5,
-5,0,0,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,0,5,5,5,5,5,5,5,0,5,5,0,5,5,5,
-5,5,0,6,6,5,8,8,6,8,8,8,8,0,0,8,
-8,0,0,8,8,8,0,0,5,0,0,0,0,0,0,8,
-0,0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,8,8,8,
-6,6,6,6,6,6,6,6,8,8,6,6,6,8,6,5,
-5,5,5,0x17,0x17,0x17,0x17,0x17,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,
-0x249,0x289,0x17,0x17,0,0x17,6,5,5,5,0,0,0,0,0,0,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,8,
+8,8,6,6,6,6,0,0,8,8,8,8,6,6,8,6,
+6,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,
+0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,5,5,5,5,6,6,0,0,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+8,8,8,6,6,6,6,6,6,6,6,8,8,6,8,6,
+6,0x17,0x17,0x17,5,0,0,0,0,0,0,0,0,0,0,0,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0,0,
+0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+5,5,5,5,5,5,5,5,5,5,5,6,8,6,8,8,
+6,6,6,6,6,6,8,6,5,0x17,0,0,0,0,0,0,
+8,8,6,6,6,6,8,6,6,6,6,6,0,0,0,0,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0x7cb,0xa4b,0x17,0x17,0x17,0x1b,
+5,5,5,5,5,5,5,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+5,5,5,5,5,5,5,5,5,5,5,5,8,8,8,6,
+6,6,6,6,6,6,6,6,8,6,6,0x17,0,0,0,0,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0x7cb,0xa4b,0xccb,0xf4b,0x11cb,0x144b,
+0x16cb,0x194b,0x1bcb,0,0,0,0,0,0,0,0,0,0,0,0,5,
+8,5,8,6,0x17,0x17,0x17,0,0,0,0,0,0,0,0,0,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0,0,
+5,5,5,5,5,5,5,0,0,5,0,0,5,5,5,5,
+5,5,5,5,0,5,5,0,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,8,8,8,8,8,8,0,8,
+8,0,0,6,6,8,6,5,6,5,0x17,5,8,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,8,8,8,6,6,6,6,6,
-6,8,6,8,8,8,8,6,6,8,6,6,5,5,0x17,5,
-0,0,0,0,0,0,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,
-0x249,0x289,0,0,0,0,0,0,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,8,8,8,6,6,6,6,0,0,
-8,8,8,8,6,6,8,6,6,0x17,0x17,0x17,0x17,0x17,0x17,0x17,
-0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,
-5,5,5,5,6,6,0,0,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,8,8,8,6,6,6,6,6,
-6,6,6,8,8,6,8,6,6,0x17,0x17,0x17,5,0,0,0,
-0,0,0,0,0,0,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,
-0x249,0x289,0,0,0,0,0,0,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,
-0x17,0x17,0x17,0x17,0x17,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,
-0x249,0x289,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,8,8,8,6,6,6,6,
+0,0,6,6,8,8,8,8,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,6,6,6,6,6,
+6,8,5,6,6,6,6,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,6,
+0,0,0,0,0,0,0,0,5,6,6,6,6,6,6,8,
+8,6,6,6,5,5,5,5,5,6,6,6,6,6,6,6,
+6,6,6,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,0x17,0x17,0x17,0,0,0,0,0,
0,0,0,0,0,0,0,0,5,5,5,5,5,5,5,5,
-5,5,5,6,8,6,8,8,6,6,6,6,6,6,8,6,
-5,0x17,0,0,0,0,0,0,8,8,6,6,6,6,8,6,
-6,6,6,6,0,0,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,
-0x249,0x289,0x7cb,0xa4b,0x17,0x17,0x17,0x1b,5,5,5,5,5,5,5,0,
+5,5,5,5,5,5,5,5,5,5,6,6,6,6,6,6,
+6,6,6,6,6,6,6,8,6,6,0x17,0x17,0x17,5,0x17,0x17,
+5,0x17,0x17,0x17,0x17,0x17,0,0,0,0,0,0,0,0,0,0,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0x58b,0x5cb,0x60b,0x64b,0x68b,0x6cb,
+0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,5,5,5,5,5,5,5,5,
-5,5,5,5,8,8,8,6,6,6,6,6,6,6,6,6,
-8,6,6,0x17,0,0,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,
-0x249,0x289,0x7cb,0xa4b,0xccb,0xf4b,0x11cb,0x144b,0x16cb,0x194b,0x1bcb,0,0,0,0,0,
-0,0,0,0,0,0,0,5,8,5,8,6,0x17,0x17,0x17,0,
+0x70b,0x74b,0x78b,0x7cb,0xa4b,0xccb,0xf4b,0x11cb,0x144b,0x16cb,0x194b,0x1bcb,0x1e4b,0,0,0,
+0x17,0x17,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,0,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,8,6,6,6,6,
+6,6,6,0,6,6,6,6,6,6,8,6,6,6,6,6,
+6,6,6,6,0,8,6,6,6,6,6,6,6,8,6,6,
+8,6,6,0,0,0,0,0,0,0,0,0,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,0,0,6,6,
+6,6,6,6,6,6,6,6,6,6,6,6,6,6,5,6,
0,0,0,0,0,0,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,
0x249,0x289,0,0,0,0,0,0,5,5,5,5,5,5,5,0,
-0,5,0,0,5,5,5,5,5,5,5,5,0,5,5,0,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-8,8,8,8,8,8,0,8,8,0,0,6,6,8,6,5,
-6,5,0x17,5,8,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-5,5,5,5,5,5,5,5,0,0,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,8,8,8,6,6,6,6,0,0,6,6,8,8,8,8,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,6,6,6,6,6,6,8,5,6,6,6,6,0x17,
-0x17,0x17,0x17,0x17,0x17,0x17,0x17,6,0,0,0,0,0,0,0,0,
-5,6,6,6,6,6,6,8,8,6,6,6,5,5,5,5,
-5,6,6,6,6,6,6,6,6,6,6,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-0x17,0x17,0x17,0,0,0,0,0,0,0,0,0,0,0,0,0,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,6,6,6,6,6,6,6,6,6,6,6,6,6,8,
-6,6,0x17,0x17,0x17,5,0x17,0x17,5,0x17,0x17,0x17,0x17,0x17,0,0,
-0,0,0,0,0,0,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,
-0x249,0x289,0x58b,0x5cb,0x60b,0x64b,0x68b,0x6cb,0x70b,0x74b,0x78b,0x7cb,0xa4b,0xccb,0xf4b,0x11cb,
-0x144b,0x16cb,0x194b,0x1bcb,0x1e4b,0,0,0,0x17,0x17,5,5,5,5,5,5,
+5,5,0,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,6,6,6,6,6,6,0,
+0,0,6,0,6,6,0,6,5,5,5,5,5,5,5,5,
+5,5,8,8,8,8,8,0,6,6,0,8,8,6,8,6,
+5,0,0,0,0,0,0,0,5,5,5,5,5,5,0,5,
+5,0,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,6,6,8,8,0x17,
+0x17,0,0,0,0,0,0,0,6,8,6,0x17,0x17,0x17,0x17,0x17,
+0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x17,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,
+0x249,0x289,0,0,0,0,0,0,6,6,5,8,5,5,5,5,
5,5,5,5,5,5,5,5,5,0,5,5,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,8,6,6,6,6,6,6,6,0,6,6,6,6,
-6,6,8,6,6,6,6,6,6,6,6,6,0,8,6,6,
-6,6,6,6,6,8,6,6,8,6,6,0,0,0,0,0,
-0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,0,0,6,6,6,6,6,6,6,6,6,6,
-6,6,6,6,6,6,5,6,0,0,0,0,0,0,0,0,
-0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0,0,
-5,5,5,5,5,5,5,0,5,5,0,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,6,6,6,6,6,6,0,0,0,6,0,6,6,0,6,
-5,5,5,5,5,5,5,5,5,5,8,8,8,8,8,0,
-6,6,0,8,8,6,8,6,5,0,0,0,0,0,0,0,
-5,5,5,5,5,5,0,5,5,0,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,6,6,8,8,0x17,0x17,0,0,0,0,0,0,0,
+8,8,6,6,6,6,6,0,0,0,8,8,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0x19,0x1b,0x1b,0x1b,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,
@@ -1215,8 +1226,10 @@ static const uint16_t propsTrie_index[22688]={
0,0,0,0,0,0,0,0,0,0,0,0,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,0x17,0x17,0,
0,0,0,0,0,0,0,0,0,0,0,0,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,0,0x10,0x10,0x10,0x10,
-0x10,0x10,0x10,0x10,0x10,0,0,0,0,0,0,0,0x49,0x89,0xc9,0x109,
+5,5,5,5,5,5,5,5,5,5,5,5,0x10,0x10,0x10,0x10,
+0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,6,5,5,5,
+5,5,5,6,6,6,6,6,6,6,6,6,6,6,6,6,
+6,6,0,0,0,0,0,0,0,0,0,0,0x49,0x89,0xc9,0x109,
0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0,0,5,5,5,5,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,0,0,
6,6,6,6,6,0x17,0,0,0,0,0,0,0,0,0,0,
@@ -1241,145 +1254,153 @@ static const uint16_t propsTrie_index[22688]={
5,5,5,5,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,4,4,4,4,0,4,4,4,4,4,4,4,
0,4,4,0,5,5,5,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,5,5,5,0,0,5,0,0,
+0,0,0,0,0,0,0,0,5,5,5,5,0,0,0,0,
+0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,0,0,0,0,0,5,5,5,5,
+5,5,5,5,5,5,5,5,5,0,0,0,0,0,0,0,
+5,5,5,5,5,5,5,5,5,5,0,0,0x1b,6,6,0x17,
+0x10,0x10,0x10,0x10,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,5,5,5,0,0,0,0,0,0,0,0,0,
-0,0,0,0,5,5,5,5,0,0,0,0,0,0,0,0,
-5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,0,0,0,0,0,5,5,5,5,5,5,5,5,
-5,5,5,5,5,0,0,0,0,0,0,0,5,5,5,5,
-5,5,5,5,5,5,0,0,0x1b,6,6,0x17,0x10,0x10,0x10,0x10,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,6,6,6,6,
-6,6,6,6,6,6,6,6,6,6,0,0,6,6,6,6,
-6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,0,
-0,0,0,0,0,0,0,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,8,8,6,6,6,0x1b,0x1b,0x1b,8,8,8,
-8,8,8,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,6,6,6,6,6,
-6,6,6,0x1b,0x1b,6,6,6,6,6,6,6,0x1b,0x1b,0x1b,0x1b,
+6,6,6,6,6,6,6,6,6,6,6,6,6,6,0,0,
+6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
+6,6,6,0,0,0,0,0,0,0,0,0,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,
+0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,8,8,6,6,6,0x1b,0x1b,
+0x1b,8,8,8,8,8,8,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,6,
+6,6,6,6,6,6,6,0x1b,0x1b,6,6,6,6,6,6,6,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,6,6,6,6,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,6,6,6,0x1b,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0x54b,0x58b,0x5cb,0x60b,0x64b,0x68b,0x6cb,0x70b,
-0x74b,0x78b,0x7cb,0x80b,0x84b,0x88b,0x8cb,0x90b,0x94b,0x98b,0x9cb,0xa0b,0,0,0,0,
-0,0,0,0,0,0,0,0,0x58b,0x5cb,0x60b,0x64b,0x68b,0x6cb,0x70b,0x74b,
-0x78b,0x7cb,0xa4b,0xccb,0xf4b,0x11cb,0x144b,0x16cb,0x194b,0x1bcb,0x58b,0x5cb,0x60b,0x64b,0x68b,0x58b,
-0x68b,0,0,0,0,0,0,0,0x249,0x289,0x49,0x89,0xc9,0x109,0x149,0x189,
-0x1c9,0x209,0x249,0x289,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0x49,0x89,
-0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,1,1,1,1,1,1,1,1,
-1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-1,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,
-1,1,1,1,1,1,2,2,2,2,2,2,2,0,2,2,
-2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,6,6,6,6,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,6,6,
+6,0x1b,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0x54b,0x58b,0x5cb,0x60b,
+0x64b,0x68b,0x6cb,0x70b,0x74b,0x78b,0x7cb,0x80b,0x84b,0x88b,0x8cb,0x90b,0x94b,0x98b,0x9cb,0xa0b,
+0,0,0,0,0,0,0,0,0,0,0,0,0x58b,0x5cb,0x60b,0x64b,
+0x68b,0x6cb,0x70b,0x74b,0x78b,0x7cb,0xa4b,0xccb,0xf4b,0x11cb,0x144b,0x16cb,0x194b,0x1bcb,0x58b,0x5cb,
+0x60b,0x64b,0x68b,0x58b,0x68b,0,0,0,0,0,0,0,0x249,0x289,0x49,0x89,
+0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,
+0x249,0x289,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-1,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,2,2,2,2,2,2,2,2,1,0,1,1,
-0,0,1,0,0,1,1,0,0,1,1,1,1,0,1,1,
-1,1,1,1,1,1,2,2,2,2,0,2,0,2,2,2,
-2,2,2,2,0,2,2,2,2,2,2,2,2,2,2,2,
+1,1,1,1,1,1,2,2,2,2,2,2,2,2,2,2,
+2,2,2,2,2,2,2,2,2,2,2,2,1,1,1,1,
+1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,
+2,0,2,2,2,2,2,2,2,2,2,2,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-2,2,2,2,1,1,0,1,1,1,1,0,0,1,1,1,
-1,1,1,1,1,0,1,1,1,1,1,1,1,0,2,2,
+1,1,1,1,1,1,2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,2,2,2,2,1,1,0,1,1,1,1,0,
-1,1,1,1,1,0,1,0,0,0,1,1,1,1,1,1,
+1,0,1,1,0,0,1,0,0,1,1,0,0,1,1,1,
+1,0,1,1,1,1,1,1,1,1,2,2,2,2,0,2,
+0,2,2,2,2,2,2,2,0,2,2,2,2,2,2,2,
+2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,
+1,1,1,1,2,2,2,2,1,1,0,1,1,1,1,0,
+0,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,
1,0,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-1,1,1,1,1,1,2,2,2,2,2,2,2,2,2,2,
+2,2,2,2,2,2,2,2,2,2,2,2,1,1,0,1,
+1,1,1,0,1,1,1,1,1,0,1,0,0,0,1,1,
+1,1,1,1,1,0,2,2,2,2,2,2,2,2,2,2,
+2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,
+1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
+2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,
+1,1,1,1,2,2,2,2,2,2,0,0,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-2,2,2,2,2,2,0,0,1,1,1,1,1,1,1,1,
-1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-1,0x18,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,2,2,2,2,2,2,2,0x18,2,2,2,2,
-2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
-1,1,1,1,1,1,1,1,1,1,1,0x18,2,2,2,2,
+1,1,1,1,1,0x18,2,2,2,2,2,2,2,2,2,2,
+2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0x18,
+2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,
+1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0x18,
2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-2,0x18,2,2,2,2,2,2,1,1,1,1,1,1,1,1,
-1,1,1,1,1,1,1,1,2,2,2,0x18,2,2,2,2,
-2,2,1,2,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,
-0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0,6,6,6,6,6,6,6,
-6,6,6,6,6,6,6,6,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,6,6,6,6,6,6,6,6,
-6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,0x1b,
-0x1b,0x1b,0x1b,6,6,6,6,6,6,6,6,6,6,6,6,6,
-6,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,6,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,6,0x1b,0x1b,0x17,0x17,0x17,0x17,0x17,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,6,6,6,6,6,
-6,6,6,0,6,6,6,6,6,6,6,6,6,6,6,6,
-6,6,6,6,6,0,0,6,6,6,6,6,2,2,2,2,
-2,2,2,2,2,2,5,2,2,2,2,2,2,2,2,2,
-2,2,2,2,2,2,2,2,2,2,2,0,6,6,0,6,
-6,0,6,6,6,6,6,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0x49,0x89,0xc9,0x109,
-0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,5,0x1b,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,5,5,5,5,
-5,5,5,5,5,5,5,5,5,0,0,0,6,6,6,6,
-6,6,6,4,4,4,4,4,4,4,0,0,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,6,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,5,5,5,5,
-5,5,5,5,5,5,5,5,6,6,6,6,0x49,0x89,0xc9,0x109,
-0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0,0x19,5,5,5,5,
-5,5,5,0,5,5,5,5,0,5,5,0,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,0,5,5,5,5,
-5,0,0,0x58b,0x5cb,0x60b,0x64b,0x68b,0x6cb,0x70b,0x74b,0x78b,6,6,6,6,
-6,6,6,0,0,0,0,0,0,0,0,0,2,2,2,2,
-6,6,6,6,6,6,6,4,0,0,0,0,0x49,0x89,0xc9,0x109,
-0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0x17,0x17,1,1,2,2,
+2,2,2,2,2,0x18,2,2,2,2,2,2,1,1,1,1,
+1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,0x18,
+2,2,2,2,2,2,1,2,0,0,0x49,0x89,0xc9,0x109,0x149,0x189,
+0x1c9,0x209,0x249,0x289,0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0,6,6,6,
+6,6,6,6,6,6,6,6,6,6,6,6,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,6,6,6,6,
+6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
+6,6,6,0x1b,0x1b,0x1b,0x1b,6,6,6,6,6,6,6,6,6,
+6,6,6,6,6,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,6,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,6,0x1b,0x1b,0x17,0x17,0x17,0x17,0x17,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,
+6,6,6,6,6,6,6,0,6,6,6,6,6,6,6,6,
+6,6,6,6,6,6,6,6,6,0,0,6,6,6,6,6,
+2,2,2,2,2,2,2,2,2,2,5,2,2,2,2,2,
+2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,
+0,0,0,0,0,2,2,2,2,2,2,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+6,6,0,6,6,0,6,6,6,6,6,0,0,0,0,0,
+4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+4,4,4,4,4,4,4,4,4,4,4,4,4,4,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,5,0x1b,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+5,5,5,5,5,5,5,5,5,5,5,5,5,0,0,0,
+6,6,6,6,6,6,6,4,4,4,4,4,4,4,0,0,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,6,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+5,5,5,5,5,5,5,5,5,5,5,5,6,6,6,6,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0,0x19,
+5,5,5,5,5,5,5,5,5,5,5,4,6,6,6,6,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0,0,
+5,5,5,5,5,5,5,0,5,5,5,5,0,5,5,0,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,0,
+5,5,5,5,5,0,0,0x58b,0x5cb,0x60b,0x64b,0x68b,0x6cb,0x70b,0x74b,0x78b,
+6,6,6,6,6,6,6,0,0,0,0,0,0,0,0,0,
+2,2,2,2,6,6,6,6,6,6,6,4,0,0,0,0,
+0x49,0x89,0xc9,0x109,0x149,0x189,0x1c9,0x209,0x249,0x289,0,0,0,0,0x17,0x17,
+1,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
-2,2,2,2,2,2,2,2,2,2,2,2,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0x58b,0x5cb,0x60b,
-0x64b,0x68b,0x6cb,0x70b,0x74b,0x78b,0x7cb,0xa4b,0xccb,0xf4b,0x11cb,0x144b,0x78cb,0x794b,0x814b,0x58b,
-0x5cb,0x60b,0x64b,0x68b,0x6cb,0x70b,0x74b,0x78b,0x1b,0x34cb,0x344b,0x3ccb,0x19,0x58b,0x5cb,0x788b,
-0x78cb,0,0,0,0,0,0,0,0,0,0,0,0x16cb,0x194b,0x1bcb,0x1e4b,
-0x800b,0x880b,0x900b,0x980b,0xa00b,0xa80b,0xb00b,0xb80b,0x784b,0x804b,0x884b,0x904b,0x984b,0xa04b,0xa84b,0xb04b,
-0xb84b,0x788b,0x808b,0x888b,0x908b,0x988b,0xa08b,0xa88b,0xb08b,0xb88b,0x78cb,0x80cb,0x984b,0xa04b,0xa84b,0xb04b,
-0xb84b,0x788b,0x808b,0x888b,0x908b,0x988b,0xa08b,0xa88b,0xb08b,0xb88b,0x1b,0x5cb,0x60b,0x64b,0x68b,0x6cb,
-0x70b,0x74b,0x78b,0x7cb,0x900b,0xa00b,0x804b,0x788b,0x344b,0x354b,0,0,0,0x58b,0x5cb,0x60b,
-0x64b,0x68b,0x6cb,0x70b,0x74b,0x78b,0x7cb,0xa4b,0xccb,0xf4b,0x11cb,0x144b,0x16cb,0x194b,0x1bcb,0x1e4b,
-0x800b,0x880b,0x900b,0x980b,0xa00b,0xa80b,0xb00b,0xb80b,0x784b,0x804b,0x884b,0x904b,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0x18,0x18,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,5,5,5,5,
-0,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,5,5,5,5,0,5,5,0,
-5,0,0,5,0,5,5,5,5,5,5,5,5,5,5,0,
-5,5,5,5,0,5,0,5,0,0,0,0,0,0,5,0,
-0,0,0,5,0,5,0,5,0,5,5,5,0,5,5,0,
-5,0,0,5,0,5,0,5,0,5,0,5,0,5,5,0,
-5,0,0,5,5,5,5,0,5,5,5,5,5,5,5,0,
-5,5,5,5,0,5,5,5,5,0,5,0,5,5,5,5,
-5,5,5,5,5,5,0,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,0,0,0,0,0,5,5,5,
-0,5,5,5,5,5,0,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,5,0,0,0,0,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x2cb,0x2cb,0x30b,0x34b,
-0x38b,0x3cb,0x40b,0x44b,0x48b,0x4cb,0x50b,0x54b,0x54b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0x58b,0x5cb,0x60b,0x64b,0x68b,0x6cb,0x70b,0x74b,0x78b,0x7cb,0xa4b,0xccb,0xf4b,0x11cb,0x144b,
+0x78cb,0x794b,0x814b,0x58b,0x5cb,0x60b,0x64b,0x68b,0x6cb,0x70b,0x74b,0x78b,0x1b,0x34cb,0x344b,0x3ccb,
+0x19,0x58b,0x5cb,0x788b,0x78cb,0,0,0,0,0,0,0,0,0,0,0,
+0x16cb,0x194b,0x1bcb,0x1e4b,0x800b,0x880b,0x900b,0x980b,0xa00b,0xa80b,0xb00b,0xb80b,0x784b,0x804b,0x884b,0x904b,
+0x984b,0xa04b,0xa84b,0xb04b,0xb84b,0x788b,0x808b,0x888b,0x908b,0x988b,0xa08b,0xa88b,0xb08b,0xb88b,0x78cb,0x80cb,
+0x984b,0xa04b,0xa84b,0xb04b,0xb84b,0x788b,0x808b,0x888b,0x908b,0x988b,0xa08b,0xa88b,0xb08b,0xb88b,0x1b,0x5cb,
+0x60b,0x64b,0x68b,0x6cb,0x70b,0x74b,0x78b,0x7cb,0x900b,0xa00b,0x804b,0x788b,0x344b,0x354b,0,0,
+0,0x58b,0x5cb,0x60b,0x64b,0x68b,0x6cb,0x70b,0x74b,0x78b,0x7cb,0xa4b,0xccb,0xf4b,0x11cb,0x144b,
+0x16cb,0x194b,0x1bcb,0x1e4b,0x800b,0x880b,0x900b,0x980b,0xa00b,0xa80b,0xb00b,0xb80b,0x784b,0x804b,0x884b,0x904b,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0x18,0x18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+5,5,5,5,0,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+0,5,5,0,5,0,0,5,0,5,5,5,5,5,5,5,
+5,5,5,0,5,5,5,5,0,5,0,5,0,0,0,0,
+0,0,5,0,0,0,0,5,0,5,0,5,0,5,5,5,
+0,5,5,0,5,0,0,5,0,5,0,5,0,5,0,5,
+0,5,5,0,5,0,0,5,5,5,5,0,5,5,5,5,
+5,5,5,0,5,5,5,5,0,5,5,5,5,0,5,0,
+5,5,5,5,5,5,5,5,5,5,0,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,0,0,0,0,
+0,5,5,5,0,5,5,5,5,5,0,5,5,5,5,5,
+5,5,5,5,5,5,5,5,5,5,5,5,0,0,0,0,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0,
-0,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0x1b,0x1b,0x1b,0x1b,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0,0,0,0,0x1b,0x1b,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0x1b,0x1b,0x1b,0x1b,
+0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x2cb,0x2cb,0x30b,0x34b,0x38b,0x3cb,0x40b,0x44b,0x48b,0x4cb,0x50b,0x54b,0x54b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0,0,0,0,0,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0,0,0,0,
0x1b,0x1b,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1a,0x1a,0x1a,0x1a,0x1a,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1a,0x1a,0x1a,0x1a,0x1a,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0,0,0,0,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0,0x1b,0x1b,0x1b,0x1b,0x1b,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0,0,0,0,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0,0,0,0x1b,0x1b,0x1b,0x1b,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0,0x1b,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0x1b,0x1b,0x1b,0x1b,
0x1b,0x1b,0x1b,0x1b,0,0,0,0,0,0,0,0,0x1b,0x1b,0x1b,0x1b,
@@ -1388,13 +1409,12 @@ static const uint16_t propsTrie_index[22688]={
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,
0x1b,0x1b,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,
-0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0,0,0,0,0,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0,0,0,0,0,0,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,
+0,0,0,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,
+0,0,0,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0,0,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,
-0,0,0,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0,0,0,
-0,0,0,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,
+0x1b,0x1b,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0,0,0,0,0,
+0,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
0,0,0,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
0x1b,0x1b,0x1b,0x1b,5,0x705,5,5,5,5,5,5,5,5,5,5,
@@ -1416,176 +1436,177 @@ static const uint16_t propsTrie_index[22688]={
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
5,5,5,0x605,5,5,5,5,5,5,5,5,5,5,5,5,
5,0x645,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
+5,5,5,5,5,5,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
5,5,5,5,5,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,5,5,5,5,5,5,5,5,5,5,5,5,
5,5,5,5,0x785,5,5,5,5,5,5,5,5,5,5,5,
-5,5,5,5,5,5,5,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0x10,0x10,0x10,0x10,
+5,5,5,5,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,
0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,
-0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0,0x10,0,0,
+0x10,0x10,0x10,0x10,0,0x10,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,6,6,6,6,
-6,6,6,6,6,6,6,6,6,6,6,6,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0x11,0x11,0x11,0x11,
+0,0,0,0,6,6,6,6,6,6,6,6,6,6,6,6,
+6,6,6,6,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,
0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,
-0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0x11,0,0,0,0,0,0
+0x11,0x11,0,0,0,0,0,0
};
static const UTrie2 propsTrie={
propsTrie_index,
- propsTrie_index+4656,
+ propsTrie_index+4692,
NULL,
- 4656,
- 18032,
+ 4692,
+ 18324,
0xa40,
- 0x12b0,
+ 0x12d4,
0x0,
0x0,
0x110000,
- 0x589c,
- NULL, 0, FALSE, FALSE, 0, NULL
+ 0x59e4,
+ NULL, 0, false, false, 0, NULL
};
-static const uint16_t propsVectorsTrie_index[32060]={
-0x511,0x519,0x521,0x529,0x541,0x549,0x551,0x559,0x561,0x569,0x571,0x579,0x581,0x589,0x591,0x599,
-0x5a0,0x5a8,0x5b0,0x5b8,0x5bb,0x5c3,0x5cb,0x5d3,0x5db,0x5e3,0x5eb,0x5f3,0x5fb,0x603,0x60b,0x613,
-0x61b,0x623,0x62a,0x632,0x63a,0x642,0x64a,0x652,0x65a,0x662,0x667,0x66f,0x676,0x67e,0x686,0x68e,
-0x696,0x69e,0x6a6,0x6ae,0x6b5,0x6bd,0x6c5,0x6cd,0x6d5,0x6dd,0x6e5,0x6ed,0x6f5,0x6fd,0x705,0x70d,
-0x1aba,0xd5a,0xe2e,0x1162,0x1299,0x1c7b,0x1e12,0x1c73,0x1367,0x1377,0x135f,0x136f,0x7da,0x7e0,0x7e8,0x7f0,
-0x7f8,0x7fe,0x806,0x80e,0x816,0x81c,0x824,0x82c,0x834,0x83a,0x842,0x84a,0x852,0x85a,0x862,0x869,
-0x871,0x877,0x87f,0x887,0x88f,0x895,0x89d,0x8a5,0x8ad,0x137f,0x8b5,0x8bd,0x8c5,0x8cc,0x8d4,0x8dc,
-0x8e4,0x8e8,0x8f0,0x8f7,0x8ff,0x907,0x90f,0x917,0x169a,0x16a2,0x91f,0x927,0x92f,0x937,0x93f,0x946,
-0x1700,0x16f0,0x16f8,0x19f5,0x19fd,0x138f,0x94e,0x1387,0x15e3,0x15e3,0x15e5,0x13a3,0x13a4,0x1397,0x1399,0x139b,
-0x1708,0x170a,0x956,0x170a,0x95e,0x963,0x96b,0x170f,0x971,0x170a,0x977,0x97f,0xc3a,0x1717,0x1717,0x987,
-0x1727,0x1728,0x1728,0x1728,0x1728,0x1728,0x1728,0x1728,0x1728,0x1728,0x1728,0x1728,0x1728,0x1728,0x1728,0x1728,
-0x1728,0x1728,0x1728,0x171f,0x98f,0x1730,0x1730,0x997,0xb62,0xb6a,0xb72,0xb7a,0x1740,0x1738,0x99f,0x9a7,
-0x9af,0x174a,0x1752,0x9b7,0x1748,0x9bf,0x1ac2,0xd62,0xb82,0xb8a,0xb92,0xb97,0x195b,0xc61,0xc68,0x18b7,
-0xc12,0x1aca,0xd6a,0xd72,0xd7a,0xd82,0xf38,0xf3c,0x19bb,0x19c0,0xca0,0xca8,0x1a31,0x1a39,0x1b93,0xe36,
-0x1a41,0xcee,0xcf6,0x1a49,0x10e2,0x118a,0xf10,0xd8a,0x18d7,0x18bf,0x18cf,0x18c7,0x1973,0x196b,0x1927,0x19b3,
-0x13ac,0x13ac,0x13ac,0x13ac,0x13af,0x13ac,0x13ac,0x13b7,0x9c7,0x13bf,0x9cb,0x9d3,0x13bf,0x9db,0x9e3,0x9eb,
-0x13cf,0x13c7,0x13d7,0x9f3,0x9fb,0x13df,0xa03,0xa0b,0x13e7,0x13ef,0x13f7,0x13ff,0xa13,0x1407,0x140e,0x1416,
-0x141e,0x1426,0x142e,0x1436,0x143e,0x1445,0x144d,0x1455,0x145d,0x1465,0x1468,0x146a,0x175a,0x184d,0x1853,0x19a3,
-0x1472,0xa1b,0xa23,0x1598,0x159d,0x15a0,0x15a6,0x147a,0x15ae,0x15ae,0x148a,0x1482,0x1492,0x149a,0x14a2,0x14aa,
-0x14b2,0x14ba,0x14c2,0x14ca,0x185b,0x18af,0x1a05,0x1b5b,0x14da,0x14e0,0x14e8,0x14f0,0x14d2,0x14f8,0x1863,0x186a,
-0x1762,0x1762,0x1762,0x1762,0x1762,0x1762,0x1762,0x1762,0x1872,0x1872,0x1872,0x1872,0x187a,0x1881,0x1883,0x188a,
-0x1892,0x1896,0x1896,0x1899,0x1896,0x1896,0x189f,0x1896,0x18df,0x19ab,0x1a0d,0xb9f,0xba5,0x1cbf,0x1cc7,0x1d9d,
-0x194b,0x193f,0x1943,0x19c8,0x192f,0x192f,0x192f,0xc22,0x1937,0xc42,0x198b,0xc90,0xc2a,0xc32,0xc32,0x1a51,
-0x197b,0x1a15,0xc78,0xc80,0xa2b,0x176a,0x176a,0xa33,0x1772,0x1772,0x1772,0x1772,0x1772,0x1772,0xa3b,0x715,
-0x15cb,0x15ed,0xa43,0x15f5,0xa4b,0x15fd,0x1605,0x160d,0xa53,0xa58,0x1615,0x161c,0xa5d,0x177a,0x199b,0xc1a,
-0xa65,0x1677,0x167e,0x1624,0x1686,0x168a,0x162c,0x1630,0x1649,0x1649,0x164b,0x1638,0x1640,0x1640,0x1641,0x1692,
-0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,
-0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,
-0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,
-0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,
-0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,
-0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,
-0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,
-0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,
-0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,
-0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,
-0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,
-0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,
-0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1782,0x1785,0x18e7,0x18e7,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,
-0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x1653,0x165a,0x1ab2,0x1e7e,
-0x178d,0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,
-0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,0x1793,
-0x1793,0x1793,0x1793,0x1793,0xa6d,0x179b,0xa75,0x1ad2,0x1a5d,0x1a5d,0x1a5d,0x1a5d,0x1a5d,0x1a5d,0x1a5d,0x1a5d,
-0x1a59,0xcfe,0x1a6d,0x1a65,0x1a6f,0x1ada,0x1ada,0xd92,0x1953,0x19d0,0x1a25,0x1a29,0x1a1d,0x1b8b,0xcb0,0xcb7,
-0x1983,0xc88,0x19d8,0xcbf,0x1a77,0x1a7a,0xd06,0x1ae2,0x1a8a,0x1a82,0xd0e,0xd9a,0x1aea,0x1aee,0xda2,0xfec,
-0x1a92,0xd16,0xd1e,0x1af6,0x1b06,0x1afe,0xdaa,0xee0,0xe3e,0xe46,0x1d0d,0xf9c,0x1dba,0x1dba,0x1b0e,0xdb2,
-0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,
-0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,
-0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,
-0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,
-0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,
-0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,
-0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,
-0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,
-0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,
-0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,
-0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,
-0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,
-0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,
-0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,
-0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,
-0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,
-0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,
-0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,
-0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,
-0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,
-0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,
-0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0x16e8,0x16e2,0x16e3,0x16e4,0x16e5,0x16e6,0x16e7,0xa7d,0xdba,0xdbd,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,
-0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,0x16ba,
-0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,
-0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,
-0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,
-0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,
-0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,
-0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,
-0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,
-0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,
-0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,
-0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,
-0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,
-0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,
-0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x15b6,0x1662,0x1662,0x1662,0x1662,0x1662,0x1662,0x1662,0x1662,
-0x1667,0x166f,0x18a7,0x132c,0x1993,0x1993,0x1330,0x1337,0xa85,0xa8d,0xa95,0x1518,0x151f,0x1527,0xa9d,0x152f,
-0x156d,0x156d,0x1500,0x1508,0x1537,0x1564,0x1565,0x1575,0x153f,0x1544,0x154c,0x1554,0xaa5,0x155c,0xaad,0x1510,
-0xc98,0x157d,0xab5,0xabd,0x1585,0x158b,0x1590,0xac5,0xad5,0x15d3,0x15db,0x15be,0x15c3,0xadd,0xae5,0xacd,
-0x16aa,0x16aa,0x16aa,0x16aa,0x16aa,0x16aa,0x16aa,0x16aa,0x16aa,0x16aa,0x16aa,0x16aa,0x16aa,0x16aa,0x16aa,0x16aa,
-0x16aa,0x16aa,0x16aa,0x16aa,0x16aa,0x16aa,0x16aa,0x16aa,0x16aa,0x16aa,0x16aa,0x16aa,0x16b2,0x16b2,0x16b2,0x16b2,
-0x14c4,0x14c4,0x1504,0x1544,0x1584,0x15c4,0x1604,0x1644,0x1680,0x16c0,0x16ec,0x172c,0x176c,0x17ac,0x17ec,0x182c,
-0x186c,0x18a8,0x18e8,0x1928,0x1968,0x199c,0x19d8,0x1a18,0x1a58,0x1a98,0x1ad4,0x1b14,0x1b54,0x1b94,0x1bd4,0x1c14,
-0xe59,0xa80,0xac0,0xb00,0xb40,0xb6b,0xed9,0xa40,0xefb,0xa40,0xa40,0xa40,0xa40,0xbab,0x1381,0x1381,
-0xf3b,0xfbb,0xa40,0xa40,0xa40,0xbeb,0xf7b,0xc2b,0xa40,0xc51,0xc91,0xcd1,0xd11,0xd51,0xe99,0xdc9,
-0x12c1,0x12c1,0x12c1,0x12c1,0x12c1,0x12c1,0x12c1,0x12c1,0x12c1,0x12c1,0x12c1,0x12c1,0x12c1,0x12c1,0x12c1,0x12c1,
-0x12c1,0x12c1,0x12c1,0x12c1,0xffb,0x1301,0x1136,0x1176,0x1341,0x1181,0x13c1,0x13c1,0x13c1,0x103b,0x105b,0x109b,
-0x1401,0x1401,0x10db,0x105b,0x105b,0x105b,0x105b,0x105b,0x105b,0x105b,0x105b,0x105b,0x105b,0x105b,0x105b,0x105b,
-0x105b,0x105b,0x105b,0x105b,0x105b,0x105b,0x105b,0x105b,0x105b,0x105b,0x105b,0x105b,0x105b,0x105b,0x105b,0x10f6,
+static const uint16_t propsVectorsTrie_index[32692]={
+0x539,0x541,0x549,0x551,0x569,0x571,0x579,0x581,0x589,0x591,0x599,0x5a1,0x5a9,0x5b1,0x5b9,0x5c1,
+0x5c8,0x5d0,0x5d8,0x5e0,0x5e3,0x5eb,0x5f3,0x5fb,0x603,0x60b,0x613,0x61b,0x623,0x62b,0x633,0x63b,
+0x643,0x64b,0x652,0x65a,0x662,0x66a,0x672,0x67a,0x682,0x68a,0x68f,0x697,0x69e,0x6a6,0x6ae,0x6b6,
+0x6be,0x6c6,0x6ce,0x6d6,0x6dd,0x6e5,0x6ed,0x6f5,0x6fd,0x705,0x70d,0x715,0x71d,0x725,0x72d,0x735,
+0x1b39,0xd8a,0xe56,0x118d,0x12cc,0x1d01,0x1ea0,0x1cf9,0x13e6,0x13f6,0x13de,0x13ee,0x80a,0x810,0x818,0x820,
+0x828,0x82e,0x836,0x83e,0x846,0x84c,0x854,0x85c,0x864,0x86a,0x872,0x87a,0x882,0x88a,0x892,0x899,
+0x8a1,0x8a7,0x8af,0x8b7,0x8bf,0x8c5,0x8cd,0x8d5,0x8dd,0x13fe,0x8e5,0x8ed,0x8f5,0x8fc,0x904,0x90c,
+0x914,0x918,0x920,0x927,0x92f,0x937,0x93f,0x947,0x1719,0x1721,0x94f,0x957,0x95f,0x967,0x96f,0x976,
+0x177f,0x176f,0x1777,0x1a74,0x1a7c,0x140e,0x97e,0x1406,0x1662,0x1662,0x1664,0x1422,0x1423,0x1416,0x1418,0x141a,
+0x1787,0x1789,0x986,0x1789,0x98e,0x993,0x99b,0x178e,0x9a1,0x1789,0x9a7,0x9af,0xc6a,0x1796,0x1796,0x9b7,
+0x17a6,0x17a7,0x17a7,0x17a7,0x17a7,0x17a7,0x17a7,0x17a7,0x17a7,0x17a7,0x17a7,0x17a7,0x17a7,0x17a7,0x17a7,0x17a7,
+0x17a7,0x17a7,0x17a7,0x179e,0x9bf,0x17af,0x17af,0x9c7,0xb92,0xb9a,0xba2,0xbaa,0x17bf,0x17b7,0x9cf,0x9d7,
+0x9df,0x17c9,0x17d1,0x9e7,0x17c7,0x9ef,0x1b41,0xd92,0xbb2,0xbba,0xbc2,0xbc7,0x19da,0xc91,0xc98,0x1936,
+0xc42,0x1b49,0xd9a,0xda2,0xdaa,0xdb2,0xf60,0xf64,0x1a3a,0x1a3f,0xcd0,0xcd8,0x1ab0,0x1ab8,0x1c19,0xe5e,
+0x1ac0,0xd1e,0xd26,0x1ac8,0x1105,0x11b5,0xf38,0xdba,0x1956,0x193e,0x194e,0x1946,0x19f2,0x19ea,0x19a6,0x1a32,
+0x142b,0x142b,0x142b,0x142b,0x142e,0x142b,0x142b,0x1436,0x9f7,0x143e,0x9fb,0xa03,0x143e,0xa0b,0xa13,0xa1b,
+0x144e,0x1446,0x1456,0xa23,0xa2b,0x145e,0xa33,0xa3b,0x1466,0x146e,0x1476,0x147e,0xa43,0x1486,0x148d,0x1495,
+0x149d,0x14a5,0x14ad,0x14b5,0x14bd,0x14c4,0x14cc,0x14d4,0x14dc,0x14e4,0x14e7,0x14e9,0x17d9,0x18cc,0x18d2,0x1a22,
+0x14f1,0xa4b,0xa53,0x1617,0x161c,0x161f,0x1625,0x14f9,0x162d,0x162d,0x1509,0x1501,0x1511,0x1519,0x1521,0x1529,
+0x1531,0x1539,0x1541,0x1549,0x18da,0x192e,0x1a84,0x1be1,0x1559,0x155f,0x1567,0x156f,0x1551,0x1577,0x18e2,0x18e9,
+0x17e1,0x17e1,0x17e1,0x17e1,0x17e1,0x17e1,0x17e1,0x17e1,0x18f1,0x18f1,0x18f1,0x18f1,0x18f9,0x1900,0x1902,0x1909,
+0x1911,0x1915,0x1915,0x1918,0x1915,0x1915,0x191e,0x1915,0x195e,0x1a2a,0x1a8c,0xbcf,0xbd5,0x1d45,0x1d4d,0x1e2b,
+0x19ca,0x19be,0x19c2,0x1a47,0x19ae,0x19ae,0x19ae,0xc52,0x19b6,0xc72,0x1a0a,0xcc0,0xc5a,0xc62,0xc62,0x1ad0,
+0x19fa,0x1a94,0xca8,0xcb0,0xa5b,0x17e9,0x17e9,0xa63,0x17f1,0x17f1,0x17f1,0x17f1,0x17f1,0x17f1,0xa6b,0x73d,
+0x164a,0x166c,0xa73,0x1674,0xa7b,0x167c,0x1684,0x168c,0xa83,0xa88,0x1694,0x169b,0xa8d,0x17f9,0x1a1a,0xc4a,
+0xa95,0x16f6,0x16fd,0x16a3,0x1705,0x1709,0x16ab,0x16af,0x16c8,0x16c8,0x16ca,0x16b7,0x16bf,0x16bf,0x16c0,0x1711,
+0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,
+0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,
+0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,
+0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,
+0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,
+0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,
+0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,
+0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,
+0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,
+0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,
+0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,
+0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,
+0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1801,0x1804,0x1966,0x1966,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
+0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d9,0x1b31,0x1f0c,
+0x180c,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,
+0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,
+0x1812,0x1812,0x1812,0x1812,0xa9d,0x181a,0xaa5,0x1b51,0x1adc,0x1adc,0x1adc,0x1adc,0x1adc,0x1adc,0x1adc,0x1adc,
+0x1ad8,0xd2e,0x1aec,0x1ae4,0x1aee,0x1b59,0x1b59,0xdc2,0x19d2,0x1a4f,0x1aa4,0x1aa8,0x1a9c,0x1c11,0xce0,0xce7,
+0x1a02,0xcb8,0x1a57,0xcef,0x1af6,0x1af9,0xd36,0x1b61,0x1b09,0x1b01,0xd3e,0xdca,0x1b69,0x1b6d,0xdd2,0x100f,
+0x1b11,0xd46,0xd4e,0x1b75,0x1b85,0x1b7d,0xdda,0xf08,0xe66,0xe6e,0x1d9b,0xfbf,0x1e48,0x1e48,0x1b8d,0xde2,
+0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,
+0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,
+0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,
+0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,
+0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,
+0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,
+0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,
+0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,
+0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,
+0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,
+0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,
+0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,
+0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,
+0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,
+0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,
+0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,
+0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,
+0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,
+0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,
+0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,
+0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,
+0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0x1767,0x1761,0x1762,0x1763,0x1764,0x1765,0x1766,0xaad,0xdea,0xded,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,
+0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,0x1739,
+0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,
+0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,
+0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,
+0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,
+0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,
+0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,
+0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,
+0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,
+0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,
+0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,
+0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,
+0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,
+0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x16e1,0x16e1,0x16e1,0x16e1,0x16e1,0x16e1,0x16e1,0x16e1,
+0x16e6,0x16ee,0x1926,0x13a3,0x1a12,0x1a12,0x13a7,0x13ae,0xab5,0xabd,0xac5,0x1597,0x159e,0x15a6,0xacd,0x15ae,
+0x15ec,0x15ec,0x157f,0x1587,0x15b6,0x15e3,0x15e4,0x15f4,0x15be,0x15c3,0x15cb,0x15d3,0xad5,0x15db,0xadd,0x158f,
+0xcc8,0x15fc,0xae5,0xaed,0x1604,0x160a,0x160f,0xaf5,0xb05,0x1652,0x165a,0x163d,0x1642,0xb0d,0xb15,0xafd,
+0x1729,0x1729,0x1729,0x1729,0x1729,0x1729,0x1729,0x1729,0x1729,0x1729,0x1729,0x1729,0x1729,0x1729,0x1729,0x1729,
+0x1729,0x1729,0x1729,0x1729,0x1729,0x1729,0x1729,0x1729,0x1729,0x1729,0x1729,0x1729,0x1731,0x1731,0x1731,0x1731,
+0x1564,0x1564,0x15a4,0x15e4,0x1624,0x1664,0x16a4,0x16e4,0x1720,0x1760,0x178c,0x17cc,0x180c,0x184c,0x188c,0x18cc,
+0x190c,0x1948,0x1988,0x19c8,0x1a08,0x1a3c,0x1a78,0x1ab8,0x1af8,0x1b38,0x1b74,0x1bb4,0x1bf4,0x1c34,0x1c74,0x1cb4,
+0xe59,0xa80,0xac0,0xb00,0xb40,0xb6b,0xf99,0xa40,0xed9,0xa40,0xa40,0xa40,0xa40,0xbab,0x13e2,0x13e2,
+0xf19,0xfd9,0xa40,0xa40,0xa40,0xbeb,0xf59,0xc2b,0xa40,0xc51,0xc91,0xcd1,0xd11,0xd51,0xe99,0xdc9,
+0x1322,0x1322,0x1322,0x1322,0x1322,0x1322,0x1322,0x1322,0x1322,0x1322,0x1322,0x1322,0x1322,0x1322,0x1322,0x1322,
+0x1322,0x1322,0x1322,0x1322,0x1019,0x1362,0x1157,0x1197,0x13a2,0x11a2,0x1422,0x1422,0x1422,0x1059,0x1079,0x10b9,
+0x1462,0x1462,0x11e2,0x14a2,0x10f9,0x1079,0x1079,0x1079,0x1079,0x1079,0x1079,0x1079,0x1079,0x1079,0x1079,0x1079,
+0x1079,0x1079,0x1079,0x1079,0x1079,0x1079,0x1079,0x1079,0x1079,0x1079,0x1079,0x1079,0x1079,0x1079,0x1079,0x1117,
0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,
0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xd89,
0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,
@@ -1608,1868 +1629,1908 @@ static const uint16_t propsVectorsTrie_index[32060]={
0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xd89,
0xe09,0xe19,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,
0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xa40,0xd89,
-0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,
-0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,0x1241,0x11c1,
-0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,
-0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,0x1281,0x1201,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0xbf7,0xbfa,0xdc5,0x1d5d,0xff4,0x71d,0x531,0x108e,0xcc7,0xd46,0x531,0x531,0x1c8b,0xee8,0xef0,0x1da5,
-0xc4a,0xc51,0xc59,0x1b16,0x1d3d,0x531,0x1d1d,0xfc4,0x1b1e,0xdcd,0xdd5,0xddd,0x101c,0x725,0x531,0x531,
-0x1b26,0x1b26,0x72d,0x531,0x1dd2,0x10a6,0x1dca,0x10ae,0x1ebe,0x11a0,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0xde5,0x1f16,0x1291,0x531,0x531,0x1ede,0x11c8,0x11cf,0x11d6,0x12cd,0x12d1,0x1248,0x11de,
-0x1b9b,0x1b9d,0xe4e,0xe55,0x1b2e,0x1b36,0xded,0xf08,0x1c83,0xed0,0xed8,0xfbc,0x1ca3,0x1ca7,0x1caf,0x103c,
-0xf87,0xf8c,0x735,0x531,0x1096,0x109e,0x1d05,0xf94,0xf69,0xf6f,0xf77,0xf7f,0x531,0x531,0x531,0x531,
-0x1e42,0x1e3a,0x1110,0x1118,0x1d85,0x1d7d,0x1064,0x531,0x531,0x531,0x531,0x531,0x1d6d,0x1024,0x102c,0x1034,
-0x1d35,0x1d2d,0xfd4,0x1108,0x1cb7,0xf18,0x73d,0x531,0x1074,0x107c,0x745,0x531,0x531,0x531,0x531,0x531,
-0x1eb6,0x1182,0x74d,0x531,0x531,0x1d95,0x1d8d,0x106c,0x1250,0x1256,0x125e,0x531,0x531,0x11e6,0x11ea,0x11f2,
-0x1e76,0x1e6e,0x116a,0x1e66,0x1e5e,0x115a,0x1d65,0x1014,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x10c6,0x10cb,0x10d3,0x10da,0x10fa,0x1100,0x531,0x531,0x113e,0x1142,0x114a,0x1192,0x1198,0x755,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x11b0,0x531,0x531,0x531,0x531,0x531,0x759,0x1efe,0x1238,
-0x19e0,0x19e0,0x19e0,0x19e0,0x19e0,0x19e0,0x19e0,0x19e0,0x19e0,0x19e0,0x19e0,0x19e0,0x19e0,0x19e0,0x19e0,0x19e0,
-0x19e0,0x19e0,0x19e0,0x19e0,0x19e0,0x19e0,0x19e0,0x19e0,0x19e0,0x19e0,0x19e0,0x19e5,0xccf,0xcd6,0xcd6,0xcd6,
-0x19ed,0x19ed,0x19ed,0xcde,0x1dc2,0x1dc2,0x1dc2,0x1dc2,0x1dc2,0x1dc2,0x761,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x765,0x1f2e,0x1f2e,0x12a1,0x1ba5,0x1ba5,0x1ba5,0x1ba5,0x1ba5,
-0x1ba5,0x1ba5,0x1ba5,0x1ba5,0x1ba5,0x1ba5,0x1ba5,0x1ba5,0x1ba5,0x1ba5,0x1ba5,0x1ba5,0xe5d,0xfdc,0xfe4,0x1f36,
-0x12d9,0x12e1,0xf20,0x1d55,0x1d4d,0x1004,0x100c,0x76d,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x1ed6,0x1ece,0x11c0,
-0x531,0x531,0x531,0x1c9b,0x1c9b,0xef8,0x1c93,0xf00,0x531,0x531,0x10f2,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x771,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x1ced,0x1ced,0x1ced,0xf44,0xf49,
-0x779,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x1f46,0x1301,0x1308,0x1f3e,0x1f3e,0x1f3e,0x781,
-0x531,0x17c0,0x17c0,0x17c0,0x17c0,0x17c0,0x17c0,0x17c0,0xafb,0x17d0,0xb03,0x17d1,0x17c8,0x17d9,0x17df,0x17e7,
-0xb0b,0x190f,0x190f,0x789,0x531,0x531,0x531,0x531,0x11b8,0x18ff,0x18ff,0xc02,0xce6,0x531,0x531,0x531,
-0x531,0x1818,0x181f,0xb13,0x1822,0xb1b,0xb23,0xb2b,0x181c,0xb33,0xb3b,0xb43,0x1821,0x1829,0x1818,0x181f,
-0x181b,0x1822,0x182a,0x1819,0x1820,0x181c,0xb4a,0x17ef,0x17f7,0x17fe,0x1805,0x17f2,0x17fa,0x1801,0x1808,0xb52,
-0x1810,0x1dea,0x1dea,0x1dea,0x1dea,0x1dea,0x1dea,0x1dea,0x1dea,0x1dea,0x1dea,0x1dea,0x1dea,0x1dea,0x1dea,0x1dea,
-0x1dea,0x1dda,0x1ddd,0x1dda,0x1de4,0x10b6,0x791,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x12bd,0x12c5,0x12c5,0x12c5,0x12c5,0x12c5,0x12c5,
-0x12c5,0x10ea,0x799,0x531,0x531,0x531,0x531,0x531,0x531,0x1ef6,0x11fa,0x7a1,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x7a5,0x12e9,0x1f06,0x1240,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x12a9,0x1d25,0x1d25,0x1d25,0x1d25,0x1d25,0x1d25,0xfcc,0x531,0x1e32,0x1e2a,0x10be,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x7ad,0x1ec6,0x11a8,0x531,0x531,0x1202,0x1203,0x7b5,0x531,0x531,0x531,0x531,
-0x531,0xe95,0xe9d,0xea5,0xead,0xeb5,0xebd,0xec4,0xec8,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x7b9,0x1044,0x1d75,0x104a,0x1d75,0x1052,0x1057,0x105c,
-0x105c,0x1dfa,0x1e1a,0x1e22,0x1e8e,0x1e02,0x1ee6,0x1e0a,0x1e96,0x1eee,0x1eee,0x1172,0x117a,0x121a,0x1220,0x1228,
-0x1230,0x1f0e,0x1f0e,0x1f0e,0x1f0e,0x1274,0x1f0e,0x127a,0x127e,0x7c1,0x7c1,0x7c1,0x7c1,0x7c1,0x7c1,0x7c1,
-0x7c1,0x7c1,0x7c1,0x7c1,0x7c1,0x7c1,0x7c1,0x7c1,0x7c1,0x7c1,0x7c1,0x7c1,0x7c1,0x7c1,0x7c1,0x7c1,
-0x7c1,0x7c1,0x7c1,0x7c1,0x7c1,0x7c1,0x7c1,0x7c1,0x7c2,0xb5a,0x1832,0x1832,0x1832,0x7ca,0x7ca,0x7ca,
-0x7ca,0x1907,0x1907,0x1907,0x1907,0x1907,0x1907,0x1907,0x7d2,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,
-0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,
-0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,
-0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,
-0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0x7ca,0xbad,0xbb4,0xbbc,0xbc4,0x18ef,0x18ef,0x18ef,
-0xbcc,0xbd4,0xbd7,0x191f,0x1917,0xc0a,0xd26,0xd2a,0xd2e,0x531,0x531,0x531,0x531,0xd36,0x1a9a,0xd3e,
-0xf30,0x17a3,0xaed,0xaf3,0xffc,0xbdf,0x1963,0xc70,0x531,0x17b8,0x17ab,0x17b0,0x18f7,0xbe7,0xbef,0x1120,
-0x1126,0x1cf5,0xf51,0x1ce5,0xf28,0x12f1,0x12f9,0x531,0x531,0x1d15,0x1d15,0x1d15,0x1d15,0x1d15,0x1d15,0x1d15,
-0x1d15,0x1d15,0xfa4,0xfac,0xfb4,0x12b1,0x12b5,0x531,0x531,0x1aa2,0xd4e,0x1aaa,0x1aaa,0xd52,0xe65,0xe6d,
-0xe75,0x1b6b,0x1b53,0x1b73,0x1b7b,0x1b63,0xdfd,0xe01,0xe08,0xe10,0xe14,0xe1c,0xe24,0xe26,0xe26,0xe26,
-0xe26,0x1bdc,0x1be4,0x1bdc,0x1bea,0x1bf2,0x1bbd,0x1bfa,0x1c02,0x1bdc,0x1c0a,0x1c12,0x1c19,0x1c21,0x1bc5,0x1bdc,
-0x1c26,0x1bcd,0x1bd4,0x1c2e,0x1c34,0x1cd6,0x1cdd,0x1ccf,0x1c3b,0x1c43,0x1c4b,0x1c53,0x1d45,0x1c5b,0x1c63,0xe7d,
-0xe85,0x1bad,0x1bad,0x1bad,0xe8d,0x1cfd,0x1cfd,0xf59,0xf61,0x1b3e,0x1b3e,0x1b3e,0x1b3e,0x1b3e,0x1b3e,0x1b3e,
-0x1b3e,0x1b3e,0x1b3e,0x1b3e,0x1b3e,0x1b3e,0x1b3e,0x1b3e,0x1b3e,0x1b3e,0x1b3e,0x1b40,0x1b3e,0x1b48,0x1b3e,0x1b3e,
-0x1b3e,0x1b3e,0x1b3e,0x1b3e,0x1b4b,0x1b3e,0x1b3e,0x1b3e,0x1b3e,0x1b3e,0xdf5,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x1dad,0x1dad,0x1dad,0x1dad,0x1dad,
-0x1dad,0x1dad,0x1dad,0x1dad,0x1dad,0x1dad,0x1dad,0x1dad,0x1dad,0x1db2,0x1dad,0x1dad,0x1dad,0x1084,0x1086,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,
-0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,
-0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,
-0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,
-0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x112e,0x1bb5,0x1e86,0x1e86,0x1e86,0x1e86,
-0x1e86,0x1e86,0x1e86,0x1ea6,0x1136,0x120b,0x1212,0x1eae,0x1eae,0x1eae,0x1eae,0x1eae,0x1eae,0x1eae,0x1eae,0x1eae,
-0x1eae,0x1eae,0x1152,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x1e52,0x1e52,0x1e52,0x1e52,0x1e52,
-0x1e52,0x1e52,0x1e52,0x1e52,0x1e52,0x1e52,0x1e52,0x1e52,0x1e52,0x1e52,0x1e52,0x1e52,0x1e52,0x1e52,0x1e52,0x1e52,
-0x1e52,0x1e52,0x1e56,0x1f26,0x1f26,0x1f26,0x1f26,0x1f26,0x1f26,0x1f26,0x1f26,0x1f26,0x1f26,0x1f26,0x1f26,0x1f26,
-0x1f26,0x1266,0x126c,0x1286,0x1289,0x1289,0x1289,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,
-0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x531,0x183a,0x183a,0x183a,0x183a,0x183a,
-0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,
-0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,
-0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,
-0x183a,0x183d,0x1310,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,
-0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,
-0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1318,0x1310,0x1310,0x1310,0x1310,0x1310,
-0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,
-0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,
-0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,
-0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1845,0x1845,0x1845,0x1845,0x1845,
-0x1845,0x1845,0x1845,0x1845,0x1845,0x1845,0x1845,0x1845,0x1845,0x1845,0x1845,0x133f,0x1310,0x1310,0x1310,0x1310,
-0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,
-0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,
-0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x131c,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,
-0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,
-0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1324,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,
-0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,
-0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,
-0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,0x1310,
-0x1310,0x1310,0x1310,0x1310,0x1310,0x131c,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,
-0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,
-0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,
-0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1347,
-0x1c6b,0x1c6b,0x1c6b,0x1c6b,0x1c6b,0x1c6b,0x134f,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,
-0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,
-0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,
-0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,
-0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1357,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,
-0x1e9e,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
-0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
-0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
-0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
-0x16c2,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,
-0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,
-0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,
-0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,
-0x16ca,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
-0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
-0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
-0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,0x16d2,
-0x16d2,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,
-0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,
-0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,
-0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,0x16da,
-0x16da,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,
-0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,
-0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,
-0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,0x183a,
-0x183a,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,
-0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,
-0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,
-0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,0x1b83,
-0x1b83,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,
-0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,
-0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,
-0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,0x1df2,
-0x1df2,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,
-0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,
-0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,
-0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,0x1e4a,
-0x1e4a,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,
-0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,
-0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,
-0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,0x1e9e,
-0x1e9e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,
-0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,
-0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,
-0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,0x1f1e,
-0x1f1e,0x510,0x510,0x510,0x2d3,0x2d3,0x2d3,0x2d3,0x2d3,0x2d3,0x2d3,0x2d3,0x2d3,0x2d6,0x2df,0x2d9,
-0x2d9,0x2dc,0x2d3,0x2d3,0x2d3,0x2d3,0x2d3,0x2d3,0x2d3,0x2d3,0x2d3,0x2d3,0x2d3,0x2d3,0x2d3,0x2d3,
-0x2d3,0x2d3,0x2d3,0x2d3,0x7cb,0x7c5,0x7aa,0x78f,0x79b,0x798,0x78f,0x7a7,0x795,0x7a1,0x78f,0x7bc,
-0x7b3,0x7a4,0x7c8,0x79e,0x78c,0x78c,0x78c,0x78c,0x78c,0x78c,0x78c,0x78c,0x78c,0x78c,0x7b0,0x7ad,
-0x7b6,0x7b6,0x7b6,0x7c5,0x78f,0x7d7,0x7d7,0x7d7,0x7d7,0x7d7,0x7d7,0x7d1,0x7d1,0x7d1,0x7d1,0x7d1,
-0x7d1,0x7d1,0x7d1,0x7d1,0x7d1,0x7d1,0x7d1,0x7d1,0x7d1,0x7d1,0x7d1,0x7d1,0x7d1,0x7d1,0x7d1,0x795,
-0x79b,0x7a1,0x7c2,0x789,0x7bf,0x7d4,0x7d4,0x7d4,0x7d4,0x7d4,0x7d4,0x7ce,0x7ce,0x7ce,0x7ce,0x7ce,
-0x7ce,0x7ce,0x7ce,0x7ce,0x7ce,0x7ce,0x7ce,0x7ce,0x7ce,0x7ce,0x7ce,0x7ce,0x7ce,0x7ce,0x7ce,0x795,
-0x7b9,0x792,0x7b6,0x2d3,0,0,0,0,0,0,0,0,0,0,0,0,
+0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,
+0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x12a2,0x1222,
+0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,
+0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x12e2,0x1262,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0xc27,0xc2a,0xdf5,0x1deb,0x1017,0x745,0x559,0x10b1,0xcf7,0xd76,0x559,0x559,0x1d11,0xf10,0xf18,0x1e33,
+0xc7a,0xc81,0xc89,0x1b95,0x1dcb,0x559,0x1dab,0xfe7,0x1b9d,0xdfd,0xe05,0xe0d,0x103f,0x74d,0x559,0x559,
+0x1ba5,0x1ba5,0x755,0x559,0x1e60,0x10c9,0x1e58,0x10d1,0x1f4c,0x11cb,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0xe15,0x1fa4,0x12c4,0x1346,0x1347,0x1f6c,0x11f3,0x11fa,0x1201,0x1303,0x1307,0x127b,0x1211,
+0x1c21,0x1c23,0xe76,0xe7d,0x1bad,0x1bb5,0xe1d,0xf30,0x1d09,0xef8,0xf00,0xfdf,0x1d29,0x1d2d,0x1d35,0x105f,
+0xfaf,0x1d8b,0x75d,0x559,0x10b9,0x10c1,0x1d93,0xfb7,0xf91,0xf97,0xf9f,0xfa7,0x559,0x559,0x559,0x559,
+0x1ed0,0x1ec8,0x113b,0x1143,0x1e13,0x1e0b,0x1087,0x559,0x559,0x559,0x559,0x559,0x1dfb,0x1047,0x104f,0x1057,
+0x1dc3,0x1dbb,0xff7,0x1133,0x1d3d,0xf40,0x765,0x559,0x1097,0x109f,0x76d,0x559,0x559,0x559,0x559,0x559,
+0x1f44,0x11ad,0x775,0x559,0x559,0x1e23,0x1e1b,0x108f,0x1283,0x1289,0x1291,0x559,0x559,0x1219,0x121d,0x1225,
+0x1f04,0x1efc,0x1195,0x1ef4,0x1eec,0x1185,0x1df3,0x1037,0x1357,0x135a,0x135a,0x559,0x559,0x559,0x559,0x559,
+0x10e9,0x10ee,0x10f6,0x10fd,0x1125,0x112b,0x559,0x559,0x1169,0x116d,0x1175,0x11bd,0x11c3,0x77d,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x11db,0x136a,0x136f,0x1377,0x559,0x559,0x781,0x1f8c,0x126b,
+0x1a5f,0x1a5f,0x1a5f,0x1a5f,0x1a5f,0x1a5f,0x1a5f,0x1a5f,0x1a5f,0x1a5f,0x1a5f,0x1a5f,0x1a5f,0x1a5f,0x1a5f,0x1a5f,
+0x1a5f,0x1a5f,0x1a5f,0x1a5f,0x1a5f,0x1a5f,0x1a5f,0x1a5f,0x1a5f,0x1a5f,0x1a5f,0x1a64,0xcff,0xd06,0xd06,0xd06,
+0x1a6c,0x1a6c,0x1a6c,0xd0e,0x1e50,0x1e50,0x1e50,0x1e50,0x1e50,0x1e50,0x789,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x78d,0x1fbc,0x1fbc,0x12d4,0x1c2b,0x1c2b,0x1c2b,0x1c2b,0x1c2b,
+0x1c2b,0x1c2b,0x1c2b,0x1c2b,0x1c2b,0x1c2b,0x1c2b,0x1c2b,0x1c2b,0x1c2b,0x1c2b,0x1c2b,0xe85,0xfff,0x1007,0x1fc4,
+0x130f,0x1317,0xf48,0x1de3,0x1ddb,0x1027,0x102f,0x795,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x1f64,0x1f5c,0x11eb,
+0x559,0x559,0x559,0x1d21,0x1d21,0xf20,0x1d19,0xf28,0x559,0x559,0x111d,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x799,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x1d73,0x1d73,0x1d73,0xf6c,0xf71,
+0x7a1,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x1fd4,0x1337,0x133e,0x1fcc,0x1fcc,0x1fcc,0x7a9,
+0x559,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0xb2b,0x184f,0xb33,0x1850,0x1847,0x1858,0x185e,0x1866,
+0xb3b,0x198e,0x198e,0x7b1,0x559,0x559,0x559,0x1362,0x11e3,0x197e,0x197e,0xc32,0xd16,0x559,0x559,0x559,
+0x559,0x1897,0x189e,0xb43,0x18a1,0xb4b,0xb53,0xb5b,0x189b,0xb63,0xb6b,0xb73,0x18a0,0x18a8,0x1897,0x189e,
+0x189a,0x18a1,0x18a9,0x1898,0x189f,0x189b,0xb7a,0x186e,0x1876,0x187d,0x1884,0x1871,0x1879,0x1880,0x1887,0xb82,
+0x188f,0x1e78,0x1e78,0x1e78,0x1e78,0x1e78,0x1e78,0x1e78,0x1e78,0x1e78,0x1e78,0x1e78,0x1e78,0x1e78,0x1e78,0x1e78,
+0x1e78,0x1e68,0x1e6b,0x1e68,0x1e72,0x10d9,0x7b9,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x12f0,0x12f8,0x12fb,0x12fb,0x12fb,0x12fb,0x12fb,
+0x12fb,0x110d,0x1115,0x1fdc,0x134f,0x7c1,0x559,0x559,0x559,0x1f84,0x122d,0x7c9,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x7cd,0x131f,0x1f94,0x1273,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x7d5,0x137f,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x12dc,0x1db3,0x1db3,0x1db3,0x1db3,0x1db3,0x1db3,0xfef,0x559,0x1ec0,0x1eb8,0x10e1,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x7dd,0x1f54,0x11d3,0x559,0x559,0x1235,0x1236,0x7e5,0x559,0x559,0x559,0x559,
+0x559,0xebd,0xec5,0xecd,0xed5,0xedd,0xee5,0xeec,0xef0,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x7e9,0x1067,0x1e03,0x106d,0x1e03,0x1075,0x107a,0x107f,
+0x107f,0x1e88,0x1ea8,0x1eb0,0x1f1c,0x1e90,0x1f74,0x1e98,0x1f24,0x1f7c,0x1f7c,0x119d,0x11a5,0x124d,0x1253,0x125b,
+0x1263,0x1f9c,0x1f9c,0x1f9c,0x1f9c,0x12a7,0x1f9c,0x12ad,0x12b1,0x7f1,0x7f1,0x7f1,0x7f1,0x7f1,0x7f1,0x7f1,
+0x7f1,0x7f1,0x7f1,0x7f1,0x7f1,0x7f1,0x7f1,0x7f1,0x7f1,0x7f1,0x7f1,0x7f1,0x7f1,0x7f1,0x7f1,0x7f1,
+0x7f1,0x7f1,0x7f1,0x7f1,0x7f1,0x7f1,0x7f1,0x7f1,0x7f2,0xb8a,0x18b1,0x18b1,0x18b1,0x7fa,0x7fa,0x7fa,
+0x7fa,0x1986,0x1986,0x1986,0x1986,0x1986,0x1986,0x1986,0x802,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,
+0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,
+0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,
+0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,
+0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0x7fa,0xbdd,0xbe4,0xbec,0xbf4,0x196e,0x196e,0x196e,
+0xbfc,0xc04,0xc07,0x199e,0x1996,0xc3a,0xd56,0xd5a,0xd5e,0x559,0x559,0x559,0x559,0xd66,0x1b19,0xd6e,
+0xf58,0x1822,0xb1d,0xb23,0x101f,0xc0f,0x19e2,0xca0,0x559,0x1837,0x182a,0x182f,0x1976,0xc17,0xc1f,0x114b,
+0x1151,0x1d7b,0xf79,0x1d6b,0xf50,0x1327,0x132f,0x559,0x559,0x1da3,0x1da3,0x1da3,0x1da3,0x1da3,0x1da3,0x1da3,
+0x1da3,0x1da3,0xfc7,0xfcf,0xfd7,0x12e4,0x12e8,0x559,0x559,0x1b21,0xd7e,0x1b29,0x1b29,0xd82,0xe8d,0xe95,
+0xe9d,0x1bf1,0x1bd9,0x1bf9,0x1c01,0x1be9,0xe25,0xe29,0xe30,0xe38,0xe3c,0xe44,0xe4c,0xe4e,0xe4e,0xe4e,
+0xe4e,0x1c62,0x1c6a,0x1c62,0x1c70,0x1c78,0x1c43,0x1c80,0x1c88,0x1c62,0x1c90,0x1c98,0x1c9f,0x1ca7,0x1c4b,0x1c62,
+0x1cac,0x1c53,0x1c5a,0x1cb4,0x1cba,0x1d5c,0x1d63,0x1d55,0x1cc1,0x1cc9,0x1cd1,0x1cd9,0x1dd3,0x1ce1,0x1ce9,0xea5,
+0xead,0x1c33,0x1c33,0x1c33,0xeb5,0x1d83,0x1d83,0xf81,0xf89,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x1e3b,0x1e3b,0x1e3b,0x1e3b,0x1e3b,0x1e3b,0x1e3b,
+0x1e3b,0x1e3b,0x1e3b,0x1e3b,0x1e3b,0x1e3b,0x1e3b,0x1e40,0x1e3b,0x1e3b,0x1e3b,0x10a7,0x10a9,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,
+0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,
+0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,
+0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,
+0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1159,0x1c3b,0x1f14,0x1f14,0x1f14,0x1f14,0x1f14,0x1f14,
+0x1f14,0x1f34,0x1161,0x123e,0x1245,0x1f3c,0x1f3c,0x1f3c,0x1f3c,0x1f3c,0x1f3c,0x1f3c,0x1f3c,0x1f3c,0x1f3c,0x1f3c,
+0x117d,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x1bbd,0x1bbd,0x1bbd,0x1bbd,0x1bbd,0x1bbd,0x1bbd,
+0x1bbd,0x1bbd,0x1bbd,0x1bbd,0x1bbd,0x1bbd,0x1bbd,0x1bbd,0x1bbd,0x1bbd,0x1bbd,0x1bbf,0x1bbd,0x1bc7,0x1bbd,0x1bbd,
+0x1bbd,0x1bbd,0x1bbd,0x1bbd,0x1bca,0x1bbd,0x1bbd,0x1bbd,0x1bbd,0x1bbd,0x1bd1,0x1209,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x1ee0,0x1ee0,0x1ee0,0x1ee0,0x1ee0,0x1ee0,0x1ee0,
+0x1ee0,0x1ee0,0x1ee0,0x1ee0,0x1ee0,0x1ee0,0x1ee0,0x1ee0,0x1ee0,0x1ee0,0x1ee0,0x1ee0,0x1ee0,0x1ee0,0x1ee0,0x1ee0,
+0x1ee4,0x1fb4,0x1fb4,0x1fb4,0x1fb4,0x1fb4,0x1fb4,0x1fb4,0x1fb4,0x1fb4,0x1fb4,0x1fb4,0x1fb4,0x1fb4,0x1fb4,0x1299,
+0x129f,0x12b9,0x12bc,0x12bc,0x12bc,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,
+0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x559,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,
+0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,
+0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,
+0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18bc,
+0x1387,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,
+0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,
+0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x138f,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,
+0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,
+0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,
+0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,
+0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x18c4,0x18c4,0x18c4,0x18c4,0x18c4,0x18c4,0x18c4,
+0x18c4,0x18c4,0x18c4,0x18c4,0x18c4,0x18c4,0x18c4,0x18c4,0x18c4,0x13b6,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,
+0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,
+0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,
+0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1393,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,
+0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,
+0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x139b,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,
+0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,
+0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,
+0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,
+0x1387,0x1387,0x1387,0x1387,0x1387,0x1387,0x1393,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,
+0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,
+0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,
+0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,
+0x13be,0x1cf1,0x1cf1,0x1cf1,0x1cf1,0x1cf1,0x1cf1,0x13c6,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,
+0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,
+0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,
+0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,
+0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x13ce,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,
+0x1f2c,0x1f2c,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,
+0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x13d6,0x1fe4,0x1fe4,0x1fe4,
+0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,
+0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,
+0x1fe4,0x1fe4,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,
+0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,
+0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,
+0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,
+0x1751,0x1741,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,
+0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,
+0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,
+0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,
+0x1759,0x1749,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,
+0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,
+0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,
+0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,0x1751,
+0x1751,0x1751,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,
+0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,
+0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,
+0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,0x1759,
+0x1759,0x1759,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,
+0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,
+0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,
+0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,0x18b9,
+0x18b9,0x18b9,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,
+0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,
+0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,
+0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,0x1c09,
+0x1c09,0x1c09,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,
+0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,
+0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,
+0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,0x1e80,
+0x1e80,0x1e80,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,
+0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,
+0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,
+0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,0x1ed8,
+0x1ed8,0x1ed8,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,
+0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,
+0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,
+0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,0x1f2c,
+0x1f2c,0x1f2c,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,
+0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,
+0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,
+0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,0x1fac,
+0x1fac,0x1fac,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,
+0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,
+0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,
+0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,0x1fe4,
+0x1fe4,0x1fe4,0x538,0x538,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e5,0x2ee,0x2e8,
+0x2e8,0x2eb,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,
+0x2e2,0x2e2,0x2e2,0x2e2,0x7da,0x7d4,0x7b9,0x79e,0x7aa,0x7a7,0x79e,0x7b6,0x7a4,0x7b0,0x79e,0x7cb,
+0x7c2,0x7b3,0x7d7,0x7ad,0x79b,0x79b,0x79b,0x79b,0x79b,0x79b,0x79b,0x79b,0x79b,0x79b,0x7bf,0x7bc,
+0x7c5,0x7c5,0x7c5,0x7d4,0x79e,0x7e6,0x7e6,0x7e6,0x7e6,0x7e6,0x7e6,0x7e0,0x7e0,0x7e0,0x7e0,0x7e0,
+0x7e0,0x7e0,0x7e0,0x7e0,0x7e0,0x7e0,0x7e0,0x7e0,0x7e0,0x7e0,0x7e0,0x7e0,0x7e0,0x7e0,0x7e0,0x7a4,
+0x7aa,0x7b0,0x7d1,0x798,0x7ce,0x7e3,0x7e3,0x7e3,0x7e3,0x7e3,0x7e3,0x7dd,0x7dd,0x7dd,0x7dd,0x7dd,
+0x7dd,0x7dd,0x7dd,0x7dd,0x7dd,0x7dd,0x7dd,0x7dd,0x7dd,0x7dd,0x7dd,0x7dd,0x7dd,0x7dd,0x7dd,0x7a4,
+0x7c8,0x7a1,0x7c5,0x2e2,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2f1,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,
-0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,0x2e2,
-0x2e2,0x2e2,0x2e2,0x2e2,0x2e5,0x63c,0x7e0,0x7e3,0x642,0x7e3,0x7dd,0x636,0x62d,0x2eb,0x64b,0x2ee,
-0x7e6,0x624,0x639,0x7da,0x63f,0x648,0x62a,0x62a,0x630,0x2e8,0x636,0x633,0x62d,0x62a,0x64b,0x2ee,
-0x627,0x627,0x627,0x63c,0x2f7,0x2f7,0x2f7,0x2f7,0x2f7,0x2f7,0x654,0x2f7,0x2f7,0x2f7,0x2f7,0x2f7,
-0x2f7,0x2f7,0x2f7,0x2f7,0x654,0x2f7,0x2f7,0x2f7,0x2f7,0x2f7,0x2f7,0x645,0x654,0x2f7,0x2f7,0x2f7,
-0x2f7,0x2f7,0x654,0x64e,0x651,0x651,0x2f4,0x2f4,0x2f4,0x2f4,0x64e,0x2f4,0x651,0x651,0x651,0x2f4,
-0x651,0x651,0x2f4,0x2f4,0x64e,0x2f4,0x651,0x651,0x2f4,0x2f4,0x2f4,0x645,0x64e,0x651,0x651,0x2f4,
-0x651,0x2f4,0x64e,0x2f4,0x303,0x65a,0x303,0x2fa,0x303,0x2fa,0x303,0x2fa,0x303,0x2fa,0x303,0x2fa,
-0x303,0x2fa,0x303,0x2fa,0x300,0x657,0x303,0x65a,0x303,0x2fa,0x303,0x2fa,0x303,0x2fa,0x303,0x65a,
-0x303,0x2fa,0x303,0x2fa,0x303,0x2fa,0x303,0x2fa,0x303,0x2fa,0x660,0x657,0x303,0x2fa,0x303,0x65a,
-0x303,0x2fa,0x303,0x2fa,0x303,0x657,0x663,0x65d,0x303,0x2fa,0x303,0x2fa,0x657,0x303,0x2fa,0x303,
-0x2fa,0x303,0x2fa,0x663,0x65d,0x660,0x657,0x303,0x65a,0x303,0x2fa,0x303,0x65a,0x666,0x660,0x657,
-0x303,0x65a,0x303,0x2fa,0x303,0x2fa,0x660,0x657,0x303,0x2fa,0x303,0x2fa,0x303,0x2fa,0x303,0x2fa,
-0x303,0x2fa,0x303,0x2fa,0x303,0x2fa,0x303,0x2fa,0x303,0x2fa,0x660,0x657,0x303,0x2fa,0x303,0x65a,
-0x303,0x2fa,0x303,0x2fa,0x303,0x2fa,0x303,0x2fa,0x303,0x2fa,0x303,0x2fa,0x303,0x303,0x2fa,0x303,
-0x2fa,0x303,0x2fa,0x2fd,0x306,0x312,0x312,0x306,0x312,0x306,0x312,0x312,0x306,0x312,0x312,0x312,
-0x306,0x306,0x312,0x312,0x312,0x312,0x306,0x312,0x312,0x306,0x312,0x312,0x312,0x306,0x306,0x306,
-0x312,0x312,0x306,0x312,0x315,0x309,0x312,0x306,0x312,0x306,0x312,0x312,0x306,0x312,0x306,0x306,
-0x312,0x306,0x312,0x315,0x309,0x312,0x312,0x312,0x306,0x312,0x306,0x312,0x312,0x306,0x306,0x30f,
-0x312,0x306,0x306,0x306,0x30f,0x30f,0x30f,0x30f,0x318,0x318,0x30c,0x318,0x318,0x30c,0x318,0x318,
-0x30c,0x315,0x669,0x315,0x669,0x315,0x669,0x315,0x669,0x315,0x669,0x315,0x669,0x315,0x669,0x315,
-0x669,0x306,0x315,0x309,0x315,0x309,0x315,0x309,0x312,0x306,0x315,0x309,0x315,0x309,0x315,0x309,
-0x315,0x309,0x315,0x309,0x309,0x318,0x318,0x30c,0x315,0x309,0x9bd,0x9bd,0x9c0,0x9ba,0x315,0x309,
-0x315,0x309,0x315,0x309,0x315,0x309,0x315,0x309,0x315,0x309,0x315,0x309,0x315,0x309,0x315,0x309,
-0x315,0x309,0x315,0x309,0x315,0x309,0x315,0x309,0x9c0,0x9ba,0x9c0,0x9ba,0x9bd,0x9b7,0x9c0,0x9ba,
-0xb79,0xc72,0x9bd,0x9b7,0x9bd,0x9b7,0x9c0,0x9ba,0x9c0,0x9ba,0x9c0,0x9ba,0x9c0,0x9ba,0x9c0,0x9ba,
-0x9c0,0x9ba,0x9c0,0x9ba,0xc72,0xc72,0xc72,0xd6b,0xd6b,0xd6b,0xd6e,0xd6e,0xd6b,0xd6e,0xd6e,0xd6b,
-0xd6b,0xd6e,0xeac,0xeaf,0xeaf,0xeaf,0xeaf,0xeac,0xeaf,0xeac,0xeaf,0xeac,0xeaf,0xeac,0xeaf,0xeac,
-0x31b,0x66c,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,
-0x31b,0x66c,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,
-0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,
-0x31e,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,0x31b,
-0x31b,0x31b,0x31b,0x31b,0x31b,0x9c3,0x9c3,0x9c3,0x9c3,0x9c3,0xc75,0xc75,0x333,0x333,0x333,0x333,
-0x333,0x333,0x333,0x333,0x333,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x327,0x327,0x324,0x324,
-0x672,0x324,0x32a,0x675,0x32d,0x675,0x675,0x675,0x32d,0x675,0x32a,0x32a,0x678,0x330,0x324,0x324,
-0x324,0x324,0x324,0x324,0x66f,0x66f,0x66f,0x66f,0x321,0x66f,0x324,0xaef,0x333,0x333,0x333,0x333,
-0x333,0x324,0x324,0x324,0x324,0x324,0x9cc,0x9cc,0x9c9,0x9c6,0x9c9,0xc78,0xc78,0xc78,0xc78,0xc78,
-0xc78,0xc78,0xc78,0xc78,0xc78,0xc78,0xc78,0xc78,0xc78,0xc78,0xc78,0xc78,0x67b,0x67b,0x67b,0x67b,
-0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,
-0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,
-0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,
-0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67b,0x67e,0x67e,0x91e,0x67e,
-0x67e,0x921,0xaf2,0xaf2,0xaf2,0xaf2,0xaf2,0xaf2,0xaf2,0xaf2,0xaf2,0xc2a,0xd35,0xd35,0xd35,0xd35,
-0xd35,0xd35,0xd35,0xd35,0xe70,0xe70,0xe70,0xe70,0xe73,0xd38,0xd38,0xd38,0x681,0x681,0xaf5,0xc6f,
-0xc6f,0xc6f,0xc6f,0xc6f,0xc6f,0xc6f,0xc6f,0xc6f,0xc6f,0xc6f,0xc6f,0xc6f,0xf5a,0xf57,0xf5a,0xf57,
-0x33f,0x348,0xf5a,0xf57,9,9,0x34e,0xeb2,0xeb2,0xeb2,0x336,0x149d,9,9,9,9,
-0x34b,0x339,0x35d,0x33c,0x35d,0x35d,0x35d,9,0x35d,9,0x35d,0x35d,0x354,0x687,0x687,0x687,
-0x687,0x687,0x687,0x687,0x687,0x687,0x687,0x687,0x687,0x687,0x687,0x687,0x687,0x687,9,0x687,
-0x687,0x687,0x687,0x687,0x687,0x687,0x35d,0x35d,0x354,0x354,0x354,0x354,0x354,0x684,0x684,0x684,
-0x684,0x684,0x684,0x684,0x684,0x684,0x684,0x684,0x684,0x684,0x684,0x684,0x684,0x684,0x351,0x684,
-0x684,0x684,0x684,0x684,0x684,0x684,0x354,0x354,0x354,0x354,0x354,0xf5a,0x360,0x360,0x363,0x35d,
-0x35d,0x360,0x357,0x9cf,0xb82,0xb7f,0x35a,0x9cf,0x35a,0x9cf,0x35a,0x9cf,0x35a,0x9cf,0x345,0x342,
-0x345,0x342,0x345,0x342,0x345,0x342,0x345,0x342,0x345,0x342,0x345,0x342,0x360,0x360,0x357,0x351,
-0xb31,0xb2e,0xb7c,0xc7e,0xc7b,0xc81,0xc7e,0xc7b,0xd71,0xd74,0xd74,0xd74,0x9de,0x693,0x36f,0x372,
-0x36f,0x36f,0x36f,0x372,0x36f,0x36f,0x36f,0x36f,0x372,0x9de,0x372,0x36f,0x690,0x690,0x690,0x690,
-0x690,0x690,0x690,0x690,0x690,0x693,0x690,0x690,0x690,0x690,0x690,0x690,0x690,0x690,0x690,0x690,
-0x690,0x690,0x690,0x690,0x690,0x690,0x690,0x690,0x690,0x690,0x690,0x690,0x68a,0x68a,0x68a,0x68a,
-0x68a,0x68a,0x68a,0x68a,0x68a,0x68d,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,
-0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x9d8,0x68d,0x369,0x36c,0x369,0x369,0x369,0x36c,
-0x369,0x369,0x369,0x369,0x36c,0x9d8,0x36c,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x369,
-0x36f,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x369,0x372,0x36c,
-0x36f,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x369,0x366,0x92a,0x92d,0x90f,0x90f,0x1101,
-0x9d2,0x9d2,0xb88,0xb85,0x9db,0x9d5,0x9db,0x9d5,0x36f,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x369,
-0x36f,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x369,
-0x36f,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x369,
-0x36f,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x369,0x36f,0x372,0x36c,0x36f,0x369,0xb88,0xb85,0x36f,
-0x369,0xb88,0xb85,0x36f,0x369,0xb88,0xb85,0xeb5,0x372,0x36c,0x372,0x36c,0x36f,0x369,0x372,0x36c,
-0x36f,0x369,0x372,0x36c,0x372,0x36c,0x372,0x36c,0x36f,0x369,0x372,0x36c,0x372,0x36c,0x372,0x36c,
-0x36f,0x369,0x372,0x36c,0x9de,0x9d8,0x372,0x36c,0x372,0x36c,0x372,0x36c,0x372,0x36c,0xd7a,0xd77,
-0x372,0x36c,0xeb8,0xeb5,0xeb8,0xeb5,0xeb8,0xeb5,0xbee,0xbeb,0xbee,0xbeb,0xbee,0xbeb,0xbee,0xbeb,
-0xbee,0xbeb,0xbee,0xbeb,0xbee,0xbeb,0xbee,0xbeb,0xee5,0xee2,0xee5,0xee2,0xfd5,0xfd2,0xfd5,0xfd2,
-0xfd5,0xfd2,0xfd5,0xfd2,0xfd5,0xfd2,0xfd5,0xfd2,0xfd5,0xfd2,0xfd5,0xfd2,0x113a,0x1137,0x1317,0x1314,
-0x14d3,0x14d0,0x14d3,0x14d0,0x14d3,0x14d0,0x14d3,0x14d0,0xc,0x384,0x384,0x384,0x384,0x384,0x384,0x384,
-0x384,0x384,0x384,0x384,0x384,0x384,0x384,0x384,0x384,0x384,0x384,0x384,0x384,0x384,0x384,0x384,
-0x384,0x384,0x384,0xc,0xc,0x387,0x375,0x375,0x375,0x37b,0x375,0x378,0x18d8,0x37e,0x37e,0x37e,
-0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,
-0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,0x37e,0x381,
-0x18d8,0x38a,0x9e1,0xc,0xc,0x14a0,0x14a0,0x13bc,0xf,0x951,0x951,0x951,0x951,0x951,0x951,0x951,
-0x951,0x951,0x951,0x951,0x951,0x951,0x951,0x951,0x951,0x951,0xd7d,0x951,0x951,0x951,0x951,0x951,
-0x951,0x951,0x951,0x951,0x951,0x951,0x951,0x951,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,
-0x38d,0x38d,0xebb,0x38d,0x38d,0x38d,0x399,0x38d,0x390,0x38d,0x38d,0x39c,0x954,0xd80,0xd83,0xd80,
-0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0x39f,0x39f,0x39f,0x39f,0x39f,0x39f,0x39f,0x39f,
-0x39f,0x39f,0x39f,0x39f,0x39f,0x39f,0x39f,0x39f,0x39f,0x39f,0x39f,0x39f,0x39f,0x39f,0x39f,0x39f,
-0x39f,0x39f,0x39f,0xf,0xf,0xf,0xf,0x18db,0x39f,0x39f,0x39f,0x396,0x393,0xf,0xf,0xf,
-0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xc96,0xc96,0xc96,0xc96,0x13bf,0x14a3,0xf63,0xf63,
-0xf63,0xf60,0xf60,0xd89,0x897,0xc90,0xc8d,0xc8d,0xc84,0xc84,0xc84,0xc84,0xc84,0xc84,0xf5d,0xf5d,
-0xf5d,0xf5d,0xf5d,0x894,0x149a,0x1aeb,0xd8c,0x89a,0x12de,0x3ba,0x3bd,0x3bd,0x3bd,0x3bd,0x3bd,0x3ba,
-0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,
-0x3ba,0x3ba,0x3ba,0xf66,0xf66,0xf66,0xf66,0xf66,0x89d,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,
-0x3ba,0x3ba,0x3ba,0x915,0x915,0x915,0x915,0x915,0x915,0x915,0x915,0xb28,0xb28,0xb28,0xc84,0xc8a,
-0xc87,0xd86,0xd86,0xd86,0xd86,0xd86,0xd86,0x12db,0x930,0x930,0x930,0x930,0x930,0x930,0x930,0x930,
-0x930,0x930,0x3b4,0x3b1,0x3ae,0x3ab,0xb8b,0xb8b,0x912,0x3ba,0x3ba,0x3c6,0x3ba,0x3c0,0x3c0,0x3c0,
-0x3c0,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,
-0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,
-0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,
-0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x9e7,0x9e7,0x3ba,0x3ba,
-0x3ba,0x3ba,0x3ba,0x9e7,0x3bd,0x3ba,0x3bd,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,0x3ba,
-0x3ba,0x3ba,0x3ba,0x9e7,0x3ba,0x3ba,0x3ba,0x3bd,0x933,0x3ba,0x3a5,0x3a5,0x3a5,0x3a5,0x3a5,0x3a5,
-0x3a5,0x3a2,0x3ab,0x3a8,0x3a8,0x3a5,0x3a5,0x3a5,0x3a5,0x3c3,0x3c3,0x3a5,0x3a5,0x3ab,0x3a8,0x3a8,
-0x3a8,0x3a5,0xc93,0xc93,0x3b7,0x3b7,0x3b7,0x3b7,0x3b7,0x3b7,0x3b7,0x3b7,0x3b7,0x3b7,0x9e7,0x9e7,
-0x9e7,0x9e4,0x9e4,0xc93,0x9fc,0x9fc,0x9fc,0x9f6,0x9f6,0x9f6,0x9f6,0x9f6,0x9f6,0x9f6,0x9f6,0x9f3,
-0x9f6,0x9f3,0x12,0x9ff,0x9f9,0x9ea,0x9f9,0x9f9,0x9f9,0x9f9,0x9f9,0x9f9,0x9f9,0x9f9,0x9f9,0x9f9,
-0x9f9,0x9f9,0x9f9,0x9f9,0x9f9,0x9f9,0x9f9,0x9f9,0x9f9,0x9f9,0x9f9,0x9f9,0x9f9,0x9f9,0x9f9,0x9f9,
-0x9f9,0xc99,0xc99,0xc99,0x9f0,0x9f0,0x9f0,0x9f0,0x9f0,0x9f0,0x9f0,0x9f0,0x9f0,0x9f0,0x9f0,0x9f0,
-0x9f0,0x9f0,0x9f0,0x9f0,0x9ed,0x9ed,0x9ed,0x9ed,0x9ed,0x9ed,0x9ed,0x9ed,0x9ed,0x9ed,0x9ed,0x12,
-0x12,0xc99,0xc99,0xc99,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,
-0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,
-0xde9,0xde9,0xfe7,0xfe7,0xfe7,0xfe7,0xfe7,0xfe7,0xfe7,0xfe7,0xfe7,0xfe7,0xfe7,0xfe7,0xfe7,0xfe7,
-0xfe7,0xfe7,0xfe7,0xfe7,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,
-0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,
-0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa05,0xa02,0xa02,0xa02,0xa02,0xa02,0xa02,
-0xa02,0xa02,0xa02,0xa02,0xa02,0xb8e,0x15,0x15,0x15,0x15,0x15,0x15,0x15,0x15,0x15,0x15,
-0x15,0x15,0x15,0x15,0xefd,0xefd,0xefd,0xefd,0xefd,0xefd,0xefd,0xefd,0xefd,0xefd,0xf00,0xf00,
-0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,
-0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,0xf00,0xef4,
-0xef4,0xef4,0xef4,0xef4,0xef4,0xef4,0xef4,0xef4,0xf03,0xf03,0xef7,0xef7,0xefa,0xf09,0xf06,0x102,
-0x102,0x18ff,0x1902,0x1902,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0xb01,0xb01,0xb04,0xb04,0xb01,0xb01,0xb01,0xb01,0xb01,0xb01,0xb01,0xb01,
-0x6f,0x6f,0x6f,0x6f,0x1e3,0x1e3,0x1e3,0x1e3,0x1e3,0x1e3,0x1e3,0x1605,0x1605,0x1605,0x1605,0x1605,
-0x1605,0x1605,0x1605,0x1605,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x1ef,0x1ef,0x1ef,0x1ef,0x1ef,0x1ef,0x1ef,0x1ef,0x1ef,0x163e,0x163e,0x163e,
-0x163e,0x163e,0x163e,0x163e,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,0x16b,0x16b,0x16b,
+0,0,0,0,0x2f1,0x2f1,0x2f1,0x2f1,0x2f1,0x300,0x2f1,0x2f1,0x2f1,0x2f1,0x2f1,0x2f1,
+0x2f1,0x2f1,0x2f1,0x2f1,0x2f1,0x2f1,0x2f1,0x2f1,0x2f1,0x2f1,0x2f1,0x2f1,0x2f1,0x2f1,0x2f1,0x2f1,
+0x2f1,0x2f1,0x2f1,0x2f1,0x2f4,0x64b,0x7ef,0x7f2,0x651,0x7f2,0x7ec,0x645,0x63c,0x2fa,0x65a,0x2fd,
+0x7f5,0x633,0x648,0x7e9,0x64e,0x657,0x639,0x639,0x63f,0x2f7,0x645,0x642,0x63c,0x639,0x65a,0x2fd,
+0x636,0x636,0x636,0x64b,0x306,0x306,0x306,0x306,0x306,0x306,0x663,0x306,0x306,0x306,0x306,0x306,
+0x306,0x306,0x306,0x306,0x663,0x306,0x306,0x306,0x306,0x306,0x306,0x654,0x663,0x306,0x306,0x306,
+0x306,0x306,0x663,0x65d,0x660,0x660,0x303,0x303,0x303,0x303,0x65d,0x303,0x660,0x660,0x660,0x303,
+0x660,0x660,0x303,0x303,0x65d,0x303,0x660,0x660,0x303,0x303,0x303,0x654,0x65d,0x660,0x660,0x303,
+0x660,0x303,0x65d,0x303,0x312,0x669,0x312,0x309,0x312,0x309,0x312,0x309,0x312,0x309,0x312,0x309,
+0x312,0x309,0x312,0x309,0x30f,0x666,0x312,0x669,0x312,0x309,0x312,0x309,0x312,0x309,0x312,0x669,
+0x312,0x309,0x312,0x309,0x312,0x309,0x312,0x309,0x312,0x309,0x66f,0x666,0x312,0x309,0x312,0x669,
+0x312,0x309,0x312,0x309,0x312,0x666,0x672,0x66c,0x312,0x309,0x312,0x309,0x666,0x312,0x309,0x312,
+0x309,0x312,0x309,0x672,0x66c,0x66f,0x666,0x312,0x669,0x312,0x309,0x312,0x669,0x675,0x66f,0x666,
+0x312,0x669,0x312,0x309,0x312,0x309,0x66f,0x666,0x312,0x309,0x312,0x309,0x312,0x309,0x312,0x309,
+0x312,0x309,0x312,0x309,0x312,0x309,0x312,0x309,0x312,0x309,0x66f,0x666,0x312,0x309,0x312,0x669,
+0x312,0x309,0x312,0x309,0x312,0x309,0x312,0x309,0x312,0x309,0x312,0x309,0x312,0x312,0x309,0x312,
+0x309,0x312,0x309,0x30c,0x315,0x321,0x321,0x315,0x321,0x315,0x321,0x321,0x315,0x321,0x321,0x321,
+0x315,0x315,0x321,0x321,0x321,0x321,0x315,0x321,0x321,0x315,0x321,0x321,0x321,0x315,0x315,0x315,
+0x321,0x321,0x315,0x321,0x324,0x318,0x321,0x315,0x321,0x315,0x321,0x321,0x315,0x321,0x315,0x315,
+0x321,0x315,0x321,0x324,0x318,0x321,0x321,0x321,0x315,0x321,0x315,0x321,0x321,0x315,0x315,0x31e,
+0x321,0x315,0x315,0x315,0x31e,0x31e,0x31e,0x31e,0x327,0x327,0x31b,0x327,0x327,0x31b,0x327,0x327,
+0x31b,0x324,0x678,0x324,0x678,0x324,0x678,0x324,0x678,0x324,0x678,0x324,0x678,0x324,0x678,0x324,
+0x678,0x315,0x324,0x318,0x324,0x318,0x324,0x318,0x321,0x315,0x324,0x318,0x324,0x318,0x324,0x318,
+0x324,0x318,0x324,0x318,0x318,0x327,0x327,0x31b,0x324,0x318,0x9cf,0x9cf,0x9d2,0x9cc,0x324,0x318,
+0x324,0x318,0x324,0x318,0x324,0x318,0x324,0x318,0x324,0x318,0x324,0x318,0x324,0x318,0x324,0x318,
+0x324,0x318,0x324,0x318,0x324,0x318,0x324,0x318,0x9d2,0x9cc,0x9d2,0x9cc,0x9cf,0x9c9,0x9d2,0x9cc,
+0xb8b,0xc84,0x9cf,0x9c9,0x9cf,0x9c9,0x9d2,0x9cc,0x9d2,0x9cc,0x9d2,0x9cc,0x9d2,0x9cc,0x9d2,0x9cc,
+0x9d2,0x9cc,0x9d2,0x9cc,0xc84,0xc84,0xc84,0xd7d,0xd7d,0xd7d,0xd80,0xd80,0xd7d,0xd80,0xd80,0xd7d,
+0xd7d,0xd80,0xebe,0xec1,0xec1,0xec1,0xec1,0xebe,0xec1,0xebe,0xec1,0xebe,0xec1,0xebe,0xec1,0xebe,
+0x32a,0x67b,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,
+0x32a,0x67b,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,
+0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,
+0x32d,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,0x32a,
+0x32a,0x32a,0x32a,0x32a,0x32a,0x9d5,0x9d5,0x9d5,0x9d5,0x9d5,0xc87,0xc87,0x342,0x342,0x342,0x342,
+0x342,0x342,0x342,0x342,0x342,0x339,0x339,0x339,0x339,0x339,0x339,0x339,0x336,0x336,0x333,0x333,
+0x681,0x333,0x339,0x684,0x33c,0x684,0x684,0x684,0x33c,0x684,0x339,0x339,0x687,0x33f,0x333,0x333,
+0x333,0x333,0x333,0x333,0x67e,0x67e,0x67e,0x67e,0x330,0x67e,0x333,0xb01,0x342,0x342,0x342,0x342,
+0x342,0x333,0x333,0x333,0x333,0x333,0x9de,0x9de,0x9db,0x9d8,0x9db,0xc8a,0xc8a,0xc8a,0xc8a,0xc8a,
+0xc8a,0xc8a,0xc8a,0xc8a,0xc8a,0xc8a,0xc8a,0xc8a,0xc8a,0xc8a,0xc8a,0xc8a,0x68a,0x68a,0x68a,0x68a,
+0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,
+0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,
+0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,
+0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68a,0x68d,0x68d,0x92d,0x68d,
+0x68d,0x930,0xb04,0xb04,0xb04,0xb04,0xb04,0xb04,0xb04,0xb04,0xb04,0xc3c,0xd47,0xd47,0xd47,0xd47,
+0xd47,0xd47,0xd47,0xd47,0xe82,0xe82,0xe82,0xe82,0xe85,0xd4a,0xd4a,0xd4a,0x690,0x690,0xb07,0xc81,
+0xc81,0xc81,0xc81,0xc81,0xc81,0xc81,0xc81,0xc81,0xc81,0xc81,0xc81,0xc81,0xf6c,0xf69,0xf6c,0xf69,
+0x34e,0x357,0xf6c,0xf69,9,9,0x35d,0xec4,0xec4,0xec4,0x345,0x14af,9,9,9,9,
+0x35a,0x348,0x36c,0x34b,0x36c,0x36c,0x36c,9,0x36c,9,0x36c,0x36c,0x363,0x696,0x696,0x696,
+0x696,0x696,0x696,0x696,0x696,0x696,0x696,0x696,0x696,0x696,0x696,0x696,0x696,0x696,9,0x696,
+0x696,0x696,0x696,0x696,0x696,0x696,0x36c,0x36c,0x363,0x363,0x363,0x363,0x363,0x693,0x693,0x693,
+0x693,0x693,0x693,0x693,0x693,0x693,0x693,0x693,0x693,0x693,0x693,0x693,0x693,0x693,0x360,0x693,
+0x693,0x693,0x693,0x693,0x693,0x693,0x363,0x363,0x363,0x363,0x363,0xf6c,0x36f,0x36f,0x372,0x36c,
+0x36c,0x36f,0x366,0x9e1,0xb94,0xb91,0x369,0x9e1,0x369,0x9e1,0x369,0x9e1,0x369,0x9e1,0x354,0x351,
+0x354,0x351,0x354,0x351,0x354,0x351,0x354,0x351,0x354,0x351,0x354,0x351,0x36f,0x36f,0x366,0x360,
+0xb43,0xb40,0xb8e,0xc90,0xc8d,0xc93,0xc90,0xc8d,0xd83,0xd86,0xd86,0xd86,0x9f0,0x6a2,0x37e,0x381,
+0x37e,0x37e,0x37e,0x381,0x37e,0x37e,0x37e,0x37e,0x381,0x9f0,0x381,0x37e,0x69f,0x69f,0x69f,0x69f,
+0x69f,0x69f,0x69f,0x69f,0x69f,0x6a2,0x69f,0x69f,0x69f,0x69f,0x69f,0x69f,0x69f,0x69f,0x69f,0x69f,
+0x69f,0x69f,0x69f,0x69f,0x69f,0x69f,0x69f,0x69f,0x69f,0x69f,0x69f,0x69f,0x699,0x699,0x699,0x699,
+0x699,0x699,0x699,0x699,0x699,0x69c,0x699,0x699,0x699,0x699,0x699,0x699,0x699,0x699,0x699,0x699,
+0x699,0x699,0x699,0x699,0x699,0x699,0x699,0x699,0x9ea,0x69c,0x378,0x37b,0x378,0x378,0x378,0x37b,
+0x378,0x378,0x378,0x378,0x37b,0x9ea,0x37b,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x378,
+0x37e,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x378,0x381,0x37b,
+0x37e,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x378,0x375,0x939,0x93c,0x91e,0x91e,0x1116,
+0x9e4,0x9e4,0xb9a,0xb97,0x9ed,0x9e7,0x9ed,0x9e7,0x37e,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x378,
+0x37e,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x378,
+0x37e,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x378,
+0x37e,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x378,0x37e,0x381,0x37b,0x37e,0x378,0xb9a,0xb97,0x37e,
+0x378,0xb9a,0xb97,0x37e,0x378,0xb9a,0xb97,0xec7,0x381,0x37b,0x381,0x37b,0x37e,0x378,0x381,0x37b,
+0x37e,0x378,0x381,0x37b,0x381,0x37b,0x381,0x37b,0x37e,0x378,0x381,0x37b,0x381,0x37b,0x381,0x37b,
+0x37e,0x378,0x381,0x37b,0x9f0,0x9ea,0x381,0x37b,0x381,0x37b,0x381,0x37b,0x381,0x37b,0xd8c,0xd89,
+0x381,0x37b,0xeca,0xec7,0xeca,0xec7,0xeca,0xec7,0xc00,0xbfd,0xc00,0xbfd,0xc00,0xbfd,0xc00,0xbfd,
+0xc00,0xbfd,0xc00,0xbfd,0xc00,0xbfd,0xc00,0xbfd,0xef7,0xef4,0xef7,0xef4,0xfe7,0xfe4,0xfe7,0xfe4,
+0xfe7,0xfe4,0xfe7,0xfe4,0xfe7,0xfe4,0xfe7,0xfe4,0xfe7,0xfe4,0xfe7,0xfe4,0x114f,0x114c,0x1329,0x1326,
+0x14e5,0x14e2,0x14e5,0x14e2,0x14e5,0x14e2,0x14e5,0x14e2,0xc,0x393,0x393,0x393,0x393,0x393,0x393,0x393,
+0x393,0x393,0x393,0x393,0x393,0x393,0x393,0x393,0x393,0x393,0x393,0x393,0x393,0x393,0x393,0x393,
+0x393,0x393,0x393,0xc,0xc,0x396,0x384,0x384,0x384,0x38a,0x384,0x387,0x18ea,0x38d,0x38d,0x38d,
+0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,
+0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x38d,0x390,
+0x18ea,0x399,0x9f3,0xc,0xc,0x14b2,0x14b2,0x13ce,0xf,0x960,0x960,0x960,0x960,0x960,0x960,0x960,
+0x960,0x960,0x960,0x960,0x960,0x960,0x960,0x960,0x960,0x960,0xd8f,0x960,0x960,0x960,0x960,0x960,
+0x960,0x960,0x960,0x960,0x960,0x960,0x960,0x960,0x39c,0x39c,0x39c,0x39c,0x39c,0x39c,0x39c,0x39c,
+0x39c,0x39c,0xecd,0x39c,0x39c,0x39c,0x3a8,0x39c,0x39f,0x39c,0x39c,0x3ab,0x963,0xd92,0xd95,0xd92,
+0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0x3ae,0x3ae,0x3ae,0x3ae,0x3ae,0x3ae,0x3ae,0x3ae,
+0x3ae,0x3ae,0x3ae,0x3ae,0x3ae,0x3ae,0x3ae,0x3ae,0x3ae,0x3ae,0x3ae,0x3ae,0x3ae,0x3ae,0x3ae,0x3ae,
+0x3ae,0x3ae,0x3ae,0xf,0xf,0xf,0xf,0x18ed,0x3ae,0x3ae,0x3ae,0x3a5,0x3a2,0xf,0xf,0xf,
+0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xf,0xca8,0xca8,0xca8,0xca8,0x13d1,0x14b5,0xf75,0xf75,
+0xf75,0xf72,0xf72,0xd9b,0x8a6,0xca2,0xc9f,0xc9f,0xc96,0xc96,0xc96,0xc96,0xc96,0xc96,0xf6f,0xf6f,
+0xf6f,0xf6f,0xf6f,0x8a3,0x14ac,0x1afd,0xd9e,0x8a9,0x12f0,0x3c9,0x3cc,0x3cc,0x3cc,0x3cc,0x3cc,0x3c9,
+0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,
+0x3c9,0x3c9,0x3c9,0xf78,0xf78,0xf78,0xf78,0xf78,0x8ac,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,
+0x3c9,0x3c9,0x3c9,0x924,0x924,0x924,0x924,0x924,0x924,0x924,0x924,0xb3a,0xb3a,0xb3a,0xc96,0xc9c,
+0xc99,0xd98,0xd98,0xd98,0xd98,0xd98,0xd98,0x12ed,0x93f,0x93f,0x93f,0x93f,0x93f,0x93f,0x93f,0x93f,
+0x93f,0x93f,0x3c3,0x3c0,0x3bd,0x3ba,0xb9d,0xb9d,0x921,0x3c9,0x3c9,0x3d5,0x3c9,0x3cf,0x3cf,0x3cf,
+0x3cf,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,
+0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,
+0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,
+0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x9f9,0x9f9,0x3c9,0x3c9,
+0x3c9,0x3c9,0x3c9,0x9f9,0x3cc,0x3c9,0x3cc,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,0x3c9,
+0x3c9,0x3c9,0x3c9,0x9f9,0x3c9,0x3c9,0x3c9,0x3cc,0x942,0x3c9,0x3b4,0x3b4,0x3b4,0x3b4,0x3b4,0x3b4,
+0x3b4,0x3b1,0x3ba,0x3b7,0x3b7,0x3b4,0x3b4,0x3b4,0x3b4,0x3d2,0x3d2,0x3b4,0x3b4,0x3ba,0x3b7,0x3b7,
+0x3b7,0x3b4,0xca5,0xca5,0x3c6,0x3c6,0x3c6,0x3c6,0x3c6,0x3c6,0x3c6,0x3c6,0x3c6,0x3c6,0x9f9,0x9f9,
+0x9f9,0x9f6,0x9f6,0xca5,0xa0e,0xa0e,0xa0e,0xa08,0xa08,0xa08,0xa08,0xa08,0xa08,0xa08,0xa08,0xa05,
+0xa08,0xa05,0x12,0xa11,0xa0b,0x9fc,0xa0b,0xa0b,0xa0b,0xa0b,0xa0b,0xa0b,0xa0b,0xa0b,0xa0b,0xa0b,
+0xa0b,0xa0b,0xa0b,0xa0b,0xa0b,0xa0b,0xa0b,0xa0b,0xa0b,0xa0b,0xa0b,0xa0b,0xa0b,0xa0b,0xa0b,0xa0b,
+0xa0b,0xcab,0xcab,0xcab,0xa02,0xa02,0xa02,0xa02,0xa02,0xa02,0xa02,0xa02,0xa02,0xa02,0xa02,0xa02,
+0xa02,0xa02,0xa02,0xa02,0x9ff,0x9ff,0x9ff,0x9ff,0x9ff,0x9ff,0x9ff,0x9ff,0x9ff,0x9ff,0x9ff,0x12,
+0x12,0xcab,0xcab,0xcab,0xdfb,0xdfb,0xdfb,0xdfb,0xdfb,0xdfb,0xdfb,0xdfb,0xdfb,0xdfb,0xdfb,0xdfb,
+0xdfb,0xdfb,0xdfb,0xdfb,0xdfb,0xdfb,0xdfb,0xdfb,0xdfb,0xdfb,0xdfb,0xdfb,0xdfb,0xdfb,0xdfb,0xdfb,
+0xdfb,0xdfb,0xff9,0xff9,0xff9,0xff9,0xff9,0xff9,0xff9,0xff9,0xff9,0xff9,0xff9,0xff9,0xff9,0xff9,
+0xff9,0xff9,0xff9,0xff9,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,
+0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,
+0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa14,0xa14,0xa14,0xa14,0xa14,0xa14,
+0xa14,0xa14,0xa14,0xa14,0xa14,0xba0,0x15,0x15,0x15,0x15,0x15,0x15,0x15,0x15,0x15,0x15,
+0x15,0x15,0x15,0x15,0xf0f,0xf0f,0xf0f,0xf0f,0xf0f,0xf0f,0xf0f,0xf0f,0xf0f,0xf0f,0xf12,0xf12,
+0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,
+0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf06,
+0xf06,0xf06,0xf06,0xf06,0xf06,0xf06,0xf06,0xf06,0xf15,0xf15,0xf09,0xf09,0xf0c,0xf1b,0xf18,0x102,
+0x102,0x1911,0x1914,0x1914,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0xb13,0xb13,0xb16,0xb16,0xb13,0xb13,0xb13,0xb13,0xb13,0xb13,0xb13,0xb13,
+0x6f,0x6f,0x6f,0x6f,0x1e0,0x1e0,0x1e0,0x1e0,0x1e0,0x1e0,0x1e0,0x1617,0x1617,0x1617,0x1617,0x1617,
+0x1617,0x1617,0x1617,0x1617,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x1ec,0x1ec,0x1ec,0x1ec,0x1ec,0x1ec,0x1ec,0x1ec,0x1ec,0x1650,0x1650,0x1650,
+0x1650,0x1650,0x1650,0x1650,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,0x16b,0x16b,0x16b,
0x16b,0x16b,0x16b,0x16b,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x1c5,0x1c5,0x1c5,0x1c5,0x1c5,0x1c5,0x1c5,0x1c5,0x1c5,0x1c5,0x1c5,0x1c5,
-0x1c5,0x1c5,0x1c5,0x1c5,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x1476,0x1476,0x1476,0x1476,0x1476,0x1476,0x1476,0x1476,0x1476,0x1476,0x1aa,0x1aa,
-0x1aa,0x1aa,0x1aa,0x1aa,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x1b48,0x1b48,0x1b48,0x1b48,0x1b48,0x1b48,0x1b48,0x204,0x204,0x204,0x204,0x204,
-0x204,0x204,0x204,0x204,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x24c,0x24c,0x24c,0x24c,0x24c,0x24c,0x24c,0x24c,0x24c,0x24c,0x24c,0x24c,
-0x24c,0x24c,0x24c,0x24c,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x194d,0x194d,0x194d,0x194d,0x194d,0x194d,0x194d,0x194d,0x194d,0x194d,0x252,0x252,
-0x252,0x252,0x252,0x252,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x1aaf,0x28e,0x28e,0x28e,0x28e,0x28e,0x28e,0x28e,0x28e,0x28e,0x28e,0x28e,
-0x28e,0x28e,0x28e,0x28e,0x1740,0x1740,0x1740,0x1740,0x20a,0x20a,0x20a,0x20a,0x20a,0x20a,0x20a,0x20a,
-0x20a,0x20a,0x20a,0x20a,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,
-0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,
-0x162c,0x162c,0x162c,0x162c,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x1bb1,0x1bb1,0x1bb1,0x1bb1,0x2a3,0x1bb1,0x1bb1,0x1bb1,0x1bb1,0x1bb1,0x1bb1,0x1bb1,
-0x2a3,0x1bb1,0x1bb1,0x2a3,0x16a4,0x16a4,0x16a4,0x16a4,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,
-0x1f2,0x1f2,0x1f2,0x1f2,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,
-0x2b8,0x2b8,0x2b8,0x2b8,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0xde0,0xde0,0xddd,0xddd,0xddd,0xde0,0xd5,0xd5,0xd5,0xd5,0xd5,0xd5,
+0,0,0,0,0x1bdb,0x1bd8,0x1c2,0x1c2,0x1c2,0x1c2,0x1c2,0x1c2,0x1c2,0x1c2,0x1c2,0x1c2,
+0x1c2,0x1c2,0x1c2,0x1c2,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x1488,0x1488,0x1488,0x1488,0x1488,0x1488,0x1488,0x1488,0x1488,0x1488,0x1a7,0x1a7,
+0x1a7,0x1a7,0x1a7,0x1a7,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x201,0x201,0x201,0x201,0x201,
+0x201,0x201,0x201,0x201,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x249,0x249,0x249,0x249,0x249,0x249,0x249,0x249,0x249,0x249,0x249,0x249,
+0x249,0x249,0x249,0x249,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x195f,0x195f,0x195f,0x195f,0x195f,0x195f,0x195f,0x195f,0x195f,0x195f,0x24f,0x24f,
+0x24f,0x24f,0x24f,0x24f,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x1ac1,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,
+0x28b,0x28b,0x28b,0x28b,0x1752,0x1752,0x1752,0x1752,0x207,0x207,0x207,0x207,0x207,0x207,0x207,0x207,
+0x207,0x207,0x207,0x207,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,
+0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,
+0x163e,0x163e,0x163e,0x163e,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x1bc3,0x1bc3,0x1bc3,0x1bc3,0x2a0,0x1bc3,0x1bc3,0x1bc3,0x1bc3,0x1bc3,0x1bc3,0x1bc3,
+0x2a0,0x1bc3,0x1bc3,0x2a0,0x16b6,0x16b6,0x16b6,0x16b6,0x1ef,0x1ef,0x1ef,0x1ef,0x1ef,0x1ef,0x1ef,0x1ef,
+0x1ef,0x1ef,0x1ef,0x1ef,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x2b5,0x2b5,0x2b5,0x2b5,0x2b5,0x2b5,0x2b5,0x2b5,
+0x2b5,0x2b5,0x2b5,0x2b5,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0xdf2,0xdf2,0xdef,0xdef,0xdef,0xdf2,0xd5,0xd5,0xd5,0xd5,0xd5,0xd5,
0xd5,0xd5,0xd5,0xd5,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x216,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,
-0x1758,0x1758,0x1758,0x1758,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x17dc,0x17dc,0x222,0x17dc,0x17dc,0x222,0x17dc,0x17dc,0x17dc,0x17dc,0x17dc,0x222,
-0x222,0x222,0x222,0x222,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x1a01,0x1a01,0x1a01,0x1a01,0x1a01,0x1a01,0x1a01,0x1a01,0x1a01,0x1a01,0x273,0x273,
-0x273,0x273,0x1a04,0x19fe,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x1b81,0x1b81,0x1b81,0x1b81,0x1b81,0x1b81,0x1b81,0x1b81,0x1b81,0x1b81,0x1b81,0x1b81,
-0x1b81,0x1b81,0x1b81,0x1b81,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0x258,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,
-0x1962,0x1962,0x1962,0x1962,0x276,0x276,0x276,0x276,0x276,0x276,0x276,0x276,0x276,0x276,0x276,0x276,
-0x276,0x276,0x276,0x276,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x213,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,
+0x176a,0x176a,0x176a,0x176a,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,
+0x2bb,0x2bb,0x2bb,0x1bf6,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x1a13,0x1a13,0x1a13,0x1a13,0x1a13,0x1a13,0x1a13,0x1a13,0x1a13,0x1a13,0x270,0x270,
+0x270,0x270,0x1a16,0x1a10,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x1b93,0x1b93,0x1b93,0x1b93,0x1b93,0x1b93,0x1b93,0x1b93,0x1b93,0x1b93,0x1b93,0x1b93,
+0x1b93,0x1b93,0x1b93,0x1b93,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x1c26,0x1c26,0x1c26,0x1c26,0x1c26,0x1c26,0x1c26,0x1c26,0x1c26,0x1c26,0x1c26,0x1c26,
+0x1c26,0x1c26,0x1c26,0x1c26,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0x255,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,
+0x1974,0x1974,0x1974,0x1974,0x273,0x273,0x273,0x273,0x273,0x273,0x273,0x273,0x273,0x273,0x273,0x273,
+0x273,0x273,0x273,0x273,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0x94e,0x94e,3,3,3,3,3,3,3,3,3,3,3,3,
+0,0,0x95d,0x95d,3,3,3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,
-3,3,3,3,3,3,0x94e,0x94e,6,6,6,6,6,6,6,6,
+3,3,3,3,3,3,0x95d,0x95d,6,6,6,6,6,6,6,6,
6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
-6,6,6,6,6,6,6,6,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,
-0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,6,6,6,6,6,6,6,6,
-6,6,6,6,6,6,6,6,0x14a9,0x3e1,0x3f0,0x3f0,0x18,0x3f6,0x3f6,0x3f6,
-0x3f6,0x3f6,0x3f6,0x3f6,0x3f6,0x18,0x18,0x3f6,0x3f6,0x18,0x18,0x3f6,0x3f6,0x3f6,0x3f6,0x3f6,
-0x3f6,0x3f6,0x3f6,0x3f6,0x3f6,0x3f6,0x3f6,0x3f6,0x3f6,0x18,0x3f6,0x3f6,0x3f6,0x3f6,0x3f6,0x3f6,
-0x3f6,0x18,0x3f6,0x18,0x18,0x18,0x3f6,0x3f6,0x3f6,0x3f6,0x18,0x18,0x3e4,0xc9f,0x3e1,0x3f0,
-0x3f0,0x3e1,0x3e1,0x3e1,0x3e1,0x18,0x18,0x3f0,0x3f0,0x18,0x18,0x3f3,0x3f3,0x3e7,0xd92,0x18,
-0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x3e1,0x18,0x18,0x18,0x18,0x3f9,0x3f9,0x18,0x3f9,
-0x3f6,0x3f6,0x3e1,0x3e1,0x18,0x18,0x939,0x939,0x939,0x939,0x939,0x939,0x939,0x939,0x939,0x939,
-0x3f6,0x3f6,0x3ed,0x3ed,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ed,0x3ea,0x1110,0x1839,0x1836,0x18de,0x18,
-0x1b,0xca2,0x3fc,0xca5,0x1b,0x408,0x408,0x408,0x408,0x408,0x408,0x1b,0x1b,0x1b,0x1b,0x408,
-0x408,0x1b,0x1b,0x408,0x408,0x408,0x408,0x408,0x408,0x408,0x408,0x408,0x408,0x408,0x408,0x408,
-0x408,0x1b,0x408,0x408,0x408,0x408,0x408,0x408,0x408,0x1b,0x408,0x40b,0x1b,0x408,0x40b,0x1b,
-0x408,0x408,0x1b,0x1b,0x3ff,0x1b,0x405,0x405,0x405,0x3fc,0x3fc,0x1b,0x1b,0x1b,0x1b,0x3fc,
-0x3fc,0x1b,0x1b,0x3fc,0x3fc,0x402,0x1b,0x1b,0x1b,0xf6f,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
-0x1b,0x40b,0x40b,0x40b,0x408,0x1b,0x40b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x93c,0x93c,
-0x93c,0x93c,0x93c,0x93c,0x93c,0x93c,0x93c,0x93c,0x3fc,0x3fc,0x408,0x408,0x408,0xf6f,0x18e1,0x1b,
-0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1e,0x40e,0x40e,0x417,0x1e,0x41a,0x41a,0x41a,
-0x41a,0x41a,0x41a,0x41a,0xcae,0x41a,0x1e,0x41a,0x41a,0x41a,0x1e,0x41a,0x41a,0x41a,0x41a,0x41a,
-0x41a,0x41a,0x41a,0x41a,0x41a,0x41a,0x41a,0x41a,0x41a,0x1e,0x41a,0x41a,0x41a,0x41a,0x41a,0x41a,
-0x41a,0x1e,0x41a,0x41a,0x1e,0x41a,0x41a,0x41a,0x41a,0x41a,0x1e,0x1e,0x411,0x41a,0x417,0x417,
-0x417,0x40e,0x40e,0x40e,0x40e,0x40e,0x1e,0x40e,0x40e,0x417,0x1e,0x417,0x417,0x414,0x1e,0x1e,
-0x41a,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,
-0x41a,0xcae,0xca8,0xca8,0x1e,0x1e,0x93f,0x93f,0x93f,0x93f,0x93f,0x93f,0x93f,0x93f,0x93f,0x93f,
-0x13c2,0xcab,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x16b9,0x183c,0x183c,0x183c,0x183f,0x183f,0x183f,
-0x21,0x41d,0x42c,0x42c,0x21,0x432,0x432,0x432,0x432,0x432,0x432,0x432,0x432,0x21,0x21,0x432,
-0x432,0x21,0x21,0x432,0x432,0x432,0x432,0x432,0x432,0x432,0x432,0x432,0x432,0x432,0x432,0x432,
-0x432,0x21,0x432,0x432,0x432,0x432,0x432,0x432,0x432,0x21,0x432,0x432,0x21,0xcb1,0x432,0x432,
-0x432,0x432,0x21,0x21,0x420,0x432,0x41d,0x41d,0x42c,0x41d,0x41d,0x41d,0xf72,0x21,0x21,0x42c,
-0x42f,0x21,0x21,0x42f,0x42f,0x423,0x21,0x21,0x21,0x21,0x21,0x21,0x21,0x1a49,0x41d,0x41d,
-0x21,0x21,0x21,0x21,0x435,0x435,0x21,0x432,0x432,0x432,0xf72,0xf72,0x21,0x21,0x429,0x429,
-0x429,0x429,0x429,0x429,0x429,0x429,0x429,0x429,0x426,0xcb1,0x12ea,0x12ea,0x12ea,0x12ea,0x12ea,0x12ea,
-0x21,0x21,0x21,0x21,0x21,0x21,0x21,0x21,0x24,0x24,0x438,0x444,0x24,0x444,0x444,0x444,
-0x444,0x444,0x444,0x24,0x24,0x24,0x444,0x444,0x444,0x24,0x444,0x444,0x447,0x444,0x24,0x24,
-0x24,0x444,0x444,0x24,0x444,0x24,0x444,0x444,0x24,0x24,0x24,0x444,0x444,0x24,0x24,0x24,
-0x444,0x444,0x444,0x24,0x24,0x24,0x444,0x444,0x444,0x444,0x444,0x444,0x444,0x444,0xd95,0x444,
-0x444,0x444,0x24,0x24,0x24,0x24,0x438,0x43e,0x438,0x43e,0x43e,0x24,0x24,0x24,0x43e,0x43e,
-0x43e,0x24,0x441,0x441,0x441,0x43b,0x24,0x24,0xf75,0x24,0x24,0x24,0x24,0x24,0x24,0x438,
-0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24,0xea9,0x945,0x945,0x945,0x945,0x945,
-0x945,0x945,0x945,0x945,0x942,0x942,0x942,0xd65,0xcb4,0xcb4,0xcb4,0xcb4,0xcb4,0xcb7,0xcb4,0x24,
-0x24,0x24,0x24,0x24,0x14ac,0x456,0x456,0x456,0x18e4,0x459,0x459,0x459,0x459,0x459,0x459,0x459,
-0x459,0x27,0x459,0x459,0x459,0x27,0x459,0x459,0x459,0x459,0x459,0x459,0x459,0x459,0x459,0x459,
-0x459,0x459,0x459,0x459,0x459,0x27,0x459,0x459,0x459,0x459,0x459,0x459,0x459,0x459,0x459,0x459,
-0x14af,0x459,0x459,0x459,0x459,0x459,0x27,0x27,0x1aee,0xf7e,0x44a,0x44a,0x44a,0x456,0x456,0x456,
-0x456,0x27,0x44a,0x44a,0x44d,0x27,0x44a,0x44a,0x44a,0x450,0x27,0x27,0x27,0x27,0x27,0x27,
-0x27,0x44a,0x44a,0x27,0xf7e,0xf7e,0x16bc,0x27,0x27,0x1af1,0x27,0x27,0x459,0x459,0xf78,0xf78,
-0x27,0x27,0x453,0x453,0x453,0x453,0x453,0x453,0x453,0x453,0x453,0x453,0x27,0x27,0x27,0x27,
-0x27,0x27,0x27,0x19ad,0xf7b,0xf7b,0xf7b,0xf7b,0xf7b,0xf7b,0xf7b,0xf7b,0x177c,0x14b2,0x462,0x462,
-0x18e7,0x468,0x468,0x468,0x468,0x468,0x468,0x468,0x468,0x2a,0x468,0x468,0x468,0x2a,0x468,0x468,
-0x468,0x468,0x468,0x468,0x468,0x468,0x468,0x468,0x468,0x468,0x468,0x468,0x468,0x2a,0x468,0x468,
-0x468,0x468,0x468,0x468,0x468,0x468,0x468,0x468,0x2a,0x468,0x468,0x468,0x468,0x468,0x2a,0x2a,
-0xcba,0xcbd,0x462,0x45c,0x465,0x462,0x45c,0x462,0x462,0x2a,0x45c,0x465,0x465,0x2a,0x465,0x465,
-0x45c,0x45f,0x2a,0x2a,0x2a,0x2a,0x2a,0x2a,0x2a,0x45c,0x45c,0x2a,0x2a,0x2a,0x2a,0x2a,
-0x2a,0x1af4,0x468,0x2a,0x468,0x468,0xec1,0xec1,0x2a,0x2a,0x948,0x948,0x948,0x948,0x948,0x948,
-0x948,0x948,0x948,0x948,0x2a,0xec4,0xec4,0x2a,0x2a,0x2a,0x2a,0x2a,0x2a,0x2a,0x2a,0x2a,
-0x2a,0x2a,0x2a,0x2a,0x1842,0x14b5,0x474,0x474,0x1a4c,0x47a,0x47a,0x47a,0x47a,0x47a,0x47a,0x47a,
-0x47a,0x2d,0x47a,0x47a,0x47a,0x2d,0x47a,0x47a,0x47a,0x47a,0x47a,0x47a,0x47a,0x47a,0x47a,0x47a,
-0x47a,0x47a,0x47a,0x47a,0x474,0x46b,0x46b,0x46b,0xf81,0x2d,0x474,0x474,0x474,0x2d,0x477,0x477,
-0x477,0x46e,0x12f0,0x177f,0x2d,0x2d,0x2d,0x2d,0x1782,0x1782,0x1782,0x46b,0x177f,0x177f,0x177f,0x177f,
-0x177f,0x177f,0x177f,0x16bf,0x47a,0x47a,0xf81,0xf81,0x2d,0x2d,0x471,0x471,0x471,0x471,0x471,0x471,
-0x471,0x471,0x471,0x471,0xf84,0xf84,0xf84,0xf84,0xf84,0xf84,0x177f,0x177f,0x177f,0xf87,0xf8a,0xf8a,
-0xf8a,0xf8a,0xf8a,0xf8a,0x30,0x1a4f,0xa11,0xa11,0x30,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,
-0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0x30,0x30,0x30,0xa17,0xa17,
-0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,
-0xa17,0xa17,0x30,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0x30,0xa17,0x30,0x30,
-0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0xa17,0x30,0x30,0x30,0xa0b,0x30,0x30,0x30,0x30,0xa08,
-0xa11,0xa11,0xa08,0xa08,0xa08,0x30,0xa08,0x30,0xa11,0xa11,0xa14,0xa11,0xa14,0xa14,0xa14,0xa08,
-0x30,0x30,0x30,0x30,0x30,0x30,0x14b8,0x14b8,0x14b8,0x14b8,0x14b8,0x14b8,0x14b8,0x14b8,0x14b8,0x14b8,
-0x30,0x30,0xa11,0xa11,0xa0e,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,
-0x33,0x495,0x495,0x495,0x495,0x495,0x495,0x495,0x495,0x495,0x495,0x495,0x495,0x495,0x495,0x495,
-0x495,0x495,0x495,0x495,0x495,0x495,0x495,0x495,0x495,0x495,0x495,0x495,0x495,0x495,0x495,0x495,
-0x495,0x480,0x495,0x492,0x480,0x480,0x480,0x480,0x480,0x480,0x486,0x33,0x33,0x33,0x33,0x47d,
-0x49b,0x49b,0x49b,0x49b,0x49b,0x495,0x498,0x483,0x483,0x483,0x483,0x483,0x483,0x480,0x483,0x489,
-0x48f,0x48f,0x48f,0x48f,0x48f,0x48f,0x48f,0x48f,0x48f,0x48f,0x48c,0x48c,0x33,0x33,0x33,0x33,
+6,6,6,6,6,6,6,6,0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,
+0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,6,6,6,6,6,6,6,6,
+6,6,6,6,6,6,6,6,0x14bb,0x3f0,0x3ff,0x3ff,0x18,0x405,0x405,0x405,
+0x405,0x405,0x405,0x405,0x405,0x18,0x18,0x405,0x405,0x18,0x18,0x405,0x405,0x405,0x405,0x405,
+0x405,0x405,0x405,0x405,0x405,0x405,0x405,0x405,0x405,0x18,0x405,0x405,0x405,0x405,0x405,0x405,
+0x405,0x18,0x405,0x18,0x18,0x18,0x405,0x405,0x405,0x405,0x18,0x18,0x3f3,0xcb1,0x3f0,0x3ff,
+0x3ff,0x3f0,0x3f0,0x3f0,0x3f0,0x18,0x18,0x3ff,0x3ff,0x18,0x18,0x402,0x402,0x3f6,0xda4,0x18,
+0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x3f0,0x18,0x18,0x18,0x18,0x408,0x408,0x18,0x408,
+0x405,0x405,0x3f0,0x3f0,0x18,0x18,0x948,0x948,0x948,0x948,0x948,0x948,0x948,0x948,0x948,0x948,
+0x405,0x405,0x3fc,0x3fc,0x3f9,0x3f9,0x3f9,0x3f9,0x3f9,0x3fc,0x3f9,0x1125,0x184b,0x1848,0x18f0,0x18,
+0x1b,0xcb4,0x40b,0xcb7,0x1b,0x417,0x417,0x417,0x417,0x417,0x417,0x1b,0x1b,0x1b,0x1b,0x417,
+0x417,0x1b,0x1b,0x417,0x417,0x417,0x417,0x417,0x417,0x417,0x417,0x417,0x417,0x417,0x417,0x417,
+0x417,0x1b,0x417,0x417,0x417,0x417,0x417,0x417,0x417,0x1b,0x417,0x41a,0x1b,0x417,0x41a,0x1b,
+0x417,0x417,0x1b,0x1b,0x40e,0x1b,0x414,0x414,0x414,0x40b,0x40b,0x1b,0x1b,0x1b,0x1b,0x40b,
+0x40b,0x1b,0x1b,0x40b,0x40b,0x411,0x1b,0x1b,0x1b,0xf81,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,
+0x1b,0x41a,0x41a,0x41a,0x417,0x1b,0x41a,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x94b,0x94b,
+0x94b,0x94b,0x94b,0x94b,0x94b,0x94b,0x94b,0x94b,0x40b,0x40b,0x417,0x417,0x417,0xf81,0x18f3,0x1b,
+0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1b,0x1e,0x41d,0x41d,0x426,0x1e,0x429,0x429,0x429,
+0x429,0x429,0x429,0x429,0xcc0,0x429,0x1e,0x429,0x429,0x429,0x1e,0x429,0x429,0x429,0x429,0x429,
+0x429,0x429,0x429,0x429,0x429,0x429,0x429,0x429,0x429,0x1e,0x429,0x429,0x429,0x429,0x429,0x429,
+0x429,0x1e,0x429,0x429,0x1e,0x429,0x429,0x429,0x429,0x429,0x1e,0x1e,0x420,0x429,0x426,0x426,
+0x426,0x41d,0x41d,0x41d,0x41d,0x41d,0x1e,0x41d,0x41d,0x426,0x1e,0x426,0x426,0x423,0x1e,0x1e,
+0x429,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,
+0x429,0xcc0,0xcba,0xcba,0x1e,0x1e,0x94e,0x94e,0x94e,0x94e,0x94e,0x94e,0x94e,0x94e,0x94e,0x94e,
+0x13d4,0xcbd,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x16cb,0x184e,0x184e,0x184e,0x1851,0x1851,0x1851,
+0x21,0x42c,0x43b,0x43b,0x21,0x441,0x441,0x441,0x441,0x441,0x441,0x441,0x441,0x21,0x21,0x441,
+0x441,0x21,0x21,0x441,0x441,0x441,0x441,0x441,0x441,0x441,0x441,0x441,0x441,0x441,0x441,0x441,
+0x441,0x21,0x441,0x441,0x441,0x441,0x441,0x441,0x441,0x21,0x441,0x441,0x21,0xcc3,0x441,0x441,
+0x441,0x441,0x21,0x21,0x42f,0x441,0x42c,0x42c,0x43b,0x42c,0x42c,0x42c,0xf84,0x21,0x21,0x43b,
+0x43e,0x21,0x21,0x43e,0x43e,0x432,0x21,0x21,0x21,0x21,0x21,0x21,0x21,0x1a5b,0x42c,0x42c,
+0x21,0x21,0x21,0x21,0x444,0x444,0x21,0x441,0x441,0x441,0xf84,0xf84,0x21,0x21,0x438,0x438,
+0x438,0x438,0x438,0x438,0x438,0x438,0x438,0x438,0x435,0xcc3,0x12fc,0x12fc,0x12fc,0x12fc,0x12fc,0x12fc,
+0x21,0x21,0x21,0x21,0x21,0x21,0x21,0x21,0x24,0x24,0x447,0x453,0x24,0x453,0x453,0x453,
+0x453,0x453,0x453,0x24,0x24,0x24,0x453,0x453,0x453,0x24,0x453,0x453,0x456,0x453,0x24,0x24,
+0x24,0x453,0x453,0x24,0x453,0x24,0x453,0x453,0x24,0x24,0x24,0x453,0x453,0x24,0x24,0x24,
+0x453,0x453,0x453,0x24,0x24,0x24,0x453,0x453,0x453,0x453,0x453,0x453,0x453,0x453,0xda7,0x453,
+0x453,0x453,0x24,0x24,0x24,0x24,0x447,0x44d,0x447,0x44d,0x44d,0x24,0x24,0x24,0x44d,0x44d,
+0x44d,0x24,0x450,0x450,0x450,0x44a,0x24,0x24,0xf87,0x24,0x24,0x24,0x24,0x24,0x24,0x447,
+0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24,0xebb,0x954,0x954,0x954,0x954,0x954,
+0x954,0x954,0x954,0x954,0x951,0x951,0x951,0xd77,0xcc6,0xcc6,0xcc6,0xcc6,0xcc6,0xcc9,0xcc6,0x24,
+0x24,0x24,0x24,0x24,0x14be,0x465,0x465,0x465,0x18f6,0x468,0x468,0x468,0x468,0x468,0x468,0x468,
+0x468,0x27,0x468,0x468,0x468,0x27,0x468,0x468,0x468,0x468,0x468,0x468,0x468,0x468,0x468,0x468,
+0x468,0x468,0x468,0x468,0x468,0x27,0x468,0x468,0x468,0x468,0x468,0x468,0x468,0x468,0x468,0x468,
+0x14c1,0x468,0x468,0x468,0x468,0x468,0x27,0x27,0x1b00,0xf90,0x459,0x459,0x459,0x465,0x465,0x465,
+0x465,0x27,0x459,0x459,0x45c,0x27,0x459,0x459,0x459,0x45f,0x27,0x27,0x27,0x27,0x27,0x27,
+0x27,0x459,0x459,0x27,0xf90,0xf90,0x16ce,0x27,0x27,0x1b03,0x27,0x27,0x468,0x468,0xf8a,0xf8a,
+0x27,0x27,0x462,0x462,0x462,0x462,0x462,0x462,0x462,0x462,0x462,0x462,0x27,0x27,0x27,0x27,
+0x27,0x27,0x27,0x19bf,0xf8d,0xf8d,0xf8d,0xf8d,0xf8d,0xf8d,0xf8d,0xf8d,0x178e,0x14c4,0x471,0x471,
+0x18f9,0x477,0x477,0x477,0x477,0x477,0x477,0x477,0x477,0x2a,0x477,0x477,0x477,0x2a,0x477,0x477,
+0x477,0x477,0x477,0x477,0x477,0x477,0x477,0x477,0x477,0x477,0x477,0x477,0x477,0x2a,0x477,0x477,
+0x477,0x477,0x477,0x477,0x477,0x477,0x477,0x477,0x2a,0x477,0x477,0x477,0x477,0x477,0x2a,0x2a,
+0xccc,0xccf,0x471,0x46b,0x474,0x471,0x46b,0x471,0x471,0x2a,0x46b,0x474,0x474,0x2a,0x474,0x474,
+0x46b,0x46e,0x2a,0x2a,0x2a,0x2a,0x2a,0x2a,0x2a,0x46b,0x46b,0x2a,0x2a,0x2a,0x2a,0x2a,
+0x2a,0x1b06,0x477,0x2a,0x477,0x477,0xed3,0xed3,0x2a,0x2a,0x957,0x957,0x957,0x957,0x957,0x957,
+0x957,0x957,0x957,0x957,0x2a,0xed6,0xed6,0x1bc9,0x2a,0x2a,0x2a,0x2a,0x2a,0x2a,0x2a,0x2a,
+0x2a,0x2a,0x2a,0x2a,0x1854,0x14c7,0x483,0x483,0x1a5e,0x489,0x489,0x489,0x489,0x489,0x489,0x489,
+0x489,0x2d,0x489,0x489,0x489,0x2d,0x489,0x489,0x489,0x489,0x489,0x489,0x489,0x489,0x489,0x489,
+0x489,0x489,0x489,0x489,0x483,0x47a,0x47a,0x47a,0xf93,0x2d,0x483,0x483,0x483,0x2d,0x486,0x486,
+0x486,0x47d,0x1302,0x1791,0x2d,0x2d,0x2d,0x2d,0x1794,0x1794,0x1794,0x47a,0x1791,0x1791,0x1791,0x1791,
+0x1791,0x1791,0x1791,0x16d1,0x489,0x489,0xf93,0xf93,0x2d,0x2d,0x480,0x480,0x480,0x480,0x480,0x480,
+0x480,0x480,0x480,0x480,0xf96,0xf96,0xf96,0xf96,0xf96,0xf96,0x1791,0x1791,0x1791,0xf99,0xf9c,0xf9c,
+0xf9c,0xf9c,0xf9c,0xf9c,0x30,0x1a61,0xa23,0xa23,0x30,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,
+0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0x30,0x30,0x30,0xa29,0xa29,
+0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,
+0xa29,0xa29,0x30,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0x30,0xa29,0x30,0x30,
+0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0xa29,0x30,0x30,0x30,0xa1d,0x30,0x30,0x30,0x30,0xa1a,
+0xa23,0xa23,0xa1a,0xa1a,0xa1a,0x30,0xa1a,0x30,0xa23,0xa23,0xa26,0xa23,0xa26,0xa26,0xa26,0xa1a,
+0x30,0x30,0x30,0x30,0x30,0x30,0x14ca,0x14ca,0x14ca,0x14ca,0x14ca,0x14ca,0x14ca,0x14ca,0x14ca,0x14ca,
+0x30,0x30,0xa23,0xa23,0xa20,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x30,
+0x33,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,
+0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,
+0x4a4,0x48f,0x4a4,0x4a1,0x48f,0x48f,0x48f,0x48f,0x48f,0x48f,0x495,0x33,0x33,0x33,0x33,0x48c,
+0x4aa,0x4aa,0x4aa,0x4aa,0x4aa,0x4a4,0x4a7,0x492,0x492,0x492,0x492,0x492,0x492,0x48f,0x492,0x498,
+0x49e,0x49e,0x49e,0x49e,0x49e,0x49e,0x49e,0x49e,0x49e,0x49e,0x49b,0x49b,0x33,0x33,0x33,0x33,
0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x33,
-0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x36,0x4aa,0x4aa,0x36,
-0x4aa,0x36,0x19b3,0x4aa,0x4aa,0x19b3,0x4aa,0x36,0x19b3,0x4aa,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,
-0x4aa,0x4aa,0x4aa,0x4aa,0x19b3,0x4aa,0x4aa,0x4aa,0x4aa,0x4aa,0x4aa,0x4aa,0x19b3,0x4aa,0x4aa,0x4aa,
-0x36,0x4aa,0x36,0x4aa,0x19b3,0x19b3,0x4aa,0x4aa,0x19b3,0x4aa,0x4aa,0x4aa,0x4aa,0x49e,0x4aa,0x4a7,
-0x49e,0x49e,0x49e,0x49e,0x49e,0x49e,0x19b0,0x49e,0x49e,0x4aa,0x36,0x36,0x4b3,0x4b3,0x4b3,0x4b3,
-0x4b3,0x36,0x4b0,0x36,0x4a1,0x4a1,0x4a1,0x4a1,0x4a1,0x49e,0x36,0x36,0x4a4,0x4a4,0x4a4,0x4a4,
-0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x4a4,0x36,0x36,0x4ad,0x4ad,0x13c5,0x13c5,0x36,0x36,0x36,0x36,
+0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x33,0x36,0x4b9,0x4b9,0x36,
+0x4b9,0x36,0x19c5,0x4b9,0x4b9,0x19c5,0x4b9,0x36,0x19c5,0x4b9,0x19c5,0x19c5,0x19c5,0x19c5,0x19c5,0x19c5,
+0x4b9,0x4b9,0x4b9,0x4b9,0x19c5,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x19c5,0x4b9,0x4b9,0x4b9,
+0x36,0x4b9,0x36,0x4b9,0x19c5,0x19c5,0x4b9,0x4b9,0x19c5,0x4b9,0x4b9,0x4b9,0x4b9,0x4ad,0x4b9,0x4b6,
+0x4ad,0x4ad,0x4ad,0x4ad,0x4ad,0x4ad,0x19c2,0x4ad,0x4ad,0x4b9,0x36,0x36,0x4c2,0x4c2,0x4c2,0x4c2,
+0x4c2,0x36,0x4bf,0x36,0x4b0,0x4b0,0x4b0,0x4b0,0x4b0,0x4ad,0x1bcc,0x36,0x4b3,0x4b3,0x4b3,0x4b3,
+0x4b3,0x4b3,0x4b3,0x4b3,0x4b3,0x4b3,0x36,0x36,0x4bc,0x4bc,0x13d7,0x13d7,0x36,0x36,0x36,0x36,
0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x36,
-0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x98a,0x98a,0x98a,0x98d,
-0x98a,0x98a,0x98a,0x98a,0x39,0x98a,0x98a,0x98a,0x98a,0x98d,0x98a,0x98a,0x98a,0x98a,0x98d,0x98a,
-0x98a,0x98a,0x98a,0x98d,0x98a,0x98a,0x98a,0x98a,0x98d,0x98a,0x98a,0x98a,0x98a,0x98a,0x98a,0x98a,
-0x98a,0x98a,0x98a,0x98a,0x98a,0x98d,0xa26,0xf96,0xf96,0x39,0x39,0x39,0x39,0x957,0x957,0x95a,
-0x957,0x95a,0x95a,0x963,0x95a,0x963,0x957,0x957,0x957,0x957,0x957,0x984,0x957,0x95a,0x95d,0x95d,
-0x960,0x969,0x95d,0x95d,0x98a,0x98a,0x98a,0x98a,0x12f9,0x12f3,0x12f3,0x12f3,0x957,0x957,0x957,0x95a,
-0x957,0x957,0xa1a,0x957,0x39,0x957,0x957,0x957,0x957,0x95a,0x957,0x957,0x957,0x957,0x95a,0x957,
-0x957,0x957,0x957,0x95a,0x957,0x957,0x957,0x957,0x95a,0x957,0xa1a,0xa1a,0xa1a,0x957,0x957,0x957,
-0x957,0x957,0x957,0x957,0xa1a,0x95a,0xa1a,0xa1a,0xa1a,0x39,0xa23,0xa23,0xa20,0xa20,0xa20,0xa20,
-0xa20,0xa20,0xa1d,0xa20,0xa20,0xa20,0xa20,0xa20,0xa20,0x39,0xf8d,0xa20,0xd98,0xd98,0xf90,0xf93,
-0xf8d,0x1113,0x1113,0x1113,0x1113,0x12f6,0x12f6,0x39,0x39,0x39,0x39,0x39,0x39,0x39,0x39,0x39,
+0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x36,0x99c,0x99c,0x99c,0x99f,
+0x99c,0x99c,0x99c,0x99c,0x39,0x99c,0x99c,0x99c,0x99c,0x99f,0x99c,0x99c,0x99c,0x99c,0x99f,0x99c,
+0x99c,0x99c,0x99c,0x99f,0x99c,0x99c,0x99c,0x99c,0x99f,0x99c,0x99c,0x99c,0x99c,0x99c,0x99c,0x99c,
+0x99c,0x99c,0x99c,0x99c,0x99c,0x99f,0xa38,0xfa8,0xfa8,0x39,0x39,0x39,0x39,0x966,0x966,0x969,
+0x966,0x969,0x969,0x975,0x969,0x975,0x966,0x966,0x966,0x966,0x966,0x996,0x966,0x969,0x96f,0x96f,
+0x972,0x97b,0x96c,0x96c,0x99c,0x99c,0x99c,0x99c,0x130b,0x1305,0x1305,0x1305,0x966,0x966,0x966,0x969,
+0x966,0x966,0xa2c,0x966,0x39,0x966,0x966,0x966,0x966,0x969,0x966,0x966,0x966,0x966,0x969,0x966,
+0x966,0x966,0x966,0x969,0x966,0x966,0x966,0x966,0x969,0x966,0xa2c,0xa2c,0xa2c,0x966,0x966,0x966,
+0x966,0x966,0x966,0x966,0xa2c,0x969,0xa2c,0xa2c,0xa2c,0x39,0xa35,0xa35,0xa32,0xa32,0xa32,0xa32,
+0xa32,0xa32,0xa2f,0xa32,0xa32,0xa32,0xa32,0xa32,0xa32,0x39,0xf9f,0xa32,0xdaa,0xdaa,0xfa2,0xfa5,
+0xf9f,0x1128,0x1128,0x1128,0x1128,0x1308,0x1308,0x39,0x39,0x39,0x39,0x39,0x39,0x39,0x39,0x39,
0x39,0x39,0x39,0x39,0x39,0x39,0x39,0x39,0x39,0x39,0x39,0x39,0x39,0x39,0x39,0x39,
-0x39,0x39,0x39,0x39,0x39,0x39,0x39,0x39,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x3c,0x13cb,
-0x3c,0x3c,0x3c,0x3c,0x3c,0x13cb,0x3c,0x3c,0x4b6,0x4b6,0x4b6,0x4b6,0x4b6,0x4b6,0x4b6,0x4b6,
-0x4b6,0x4b6,0x4b6,0x4b6,0x4b6,0x4b6,0x4b6,0x4b6,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xda7,
-0xa50,0x3f,0xa50,0xa50,0xa50,0xa50,0x3f,0x3f,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0x3f,
-0xa50,0x3f,0xa50,0xa50,0xa50,0xa50,0x3f,0x3f,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xda7,
-0xa50,0x3f,0xa50,0xa50,0xa50,0xa50,0x3f,0x3f,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,
-0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xda7,0xa50,0x3f,0xa50,0xa50,
-0xa50,0xa50,0x3f,0x3f,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0x3f,0xa50,0x3f,0xa50,0xa50,
-0xa50,0xa50,0x3f,0x3f,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xda7,0xa50,0xa50,0xa50,0xa50,
-0xa50,0xa50,0xa50,0x3f,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,
-0xa50,0xa50,0xa50,0xda7,0xa50,0x3f,0xa50,0xa50,0xa50,0xa50,0x3f,0x3f,0xa50,0xa50,0xa50,0xa50,
-0xa50,0xa50,0xa50,0xda7,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,
-0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0x3f,0x3f,0x12fc,0x12fc,0xda1,0xda4,0xa4a,0xa53,0xa47,
-0xa47,0xa47,0xa47,0xa53,0xa53,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa44,0xa44,
-0xa44,0xa44,0xa44,0xa44,0xa44,0xa44,0xa44,0xa44,0xa44,0x3f,0x3f,0x3f,0xa56,0xa56,0xa56,0xa56,
-0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,
-0xa56,0x16c5,0x42,0x42,0x16c2,0x16c2,0x16c2,0x16c2,0x16c2,0x16c2,0x42,0x42,0xa68,0xa6b,0xa6b,0xa6b,
-0xa6b,0xa6b,0xa6b,0xa6b,0xa6b,0xa6b,0xa6b,0xa6b,0xa6b,0xa6b,0xa6b,0xa6b,0xa6b,0xa6b,0xa6b,0xa6b,
-0xa6b,0xa6b,0xa6b,0xa6b,0xa6b,0xa6b,0xa6b,0xa65,0xa62,0x45,0x45,0x45,0xa71,0xa71,0xa71,0xa71,
-0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa6e,0xa6e,0xa6e,0xa71,0xa71,0xa71,0x14bb,0x14bb,0x14bb,
-0x14bb,0x14bb,0x14bb,0x14bb,0x14bb,0x48,0x48,0x48,0x48,0x48,0x48,0x48,0xa92,0xa92,0xa92,0xa92,
-0xa92,0xa92,0xa74,0xa92,0xa92,0xa77,0xa77,0xa77,0xa77,0xa77,0xa77,0xa77,0xa77,0xa77,0xa7a,0xa77,
-0xa89,0xa89,0xa8c,0xa95,0xa83,0xa80,0xa89,0xa86,0xa95,0xcc0,0x4b,0x4b,0xa8f,0xa8f,0xa8f,0xa8f,
-0xa8f,0xa8f,0xa8f,0xa8f,0xa8f,0xa8f,0x4b,0x4b,0x4b,0x4b,0x4b,0x4b,0xcc3,0xcc3,0xcc3,0xcc3,
-0xcc3,0xcc3,0xcc3,0xcc3,0xcc3,0xcc3,0x4b,0x4b,0x4b,0x4b,0x4b,0x4b,0xaa4,0xaa4,0xb1c,0xb1f,
-0xaaa,0xb19,0xaa7,0xaa4,0xaad,0xabc,0xab0,0xabf,0xabf,0xabf,0xa9b,0x1af7,0xab3,0xab3,0xab3,0xab3,
-0xab3,0xab3,0xab3,0xab3,0xab3,0xab3,0x4e,0x4e,0x4e,0x4e,0x4e,0x4e,0xab6,0xab6,0xab6,0xab6,
-0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,
-0xab6,0xab6,0xab6,0xab6,0x18ea,0x4e,0x4e,0x4e,0x4e,0x4e,0x4e,0x4e,0xab6,0xab6,0xab6,0xab6,
-0xab6,0xab6,0xab6,0xab6,0xab6,0xa9e,0xfb4,0x4e,0x4e,0x4e,0x4e,0x4e,0x116a,0x116a,0x116a,0x116a,
-0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x4d7,0x4d7,0x4d7,0x4d7,
-0x4d7,0x4d7,0x4d7,0x4d7,0x4da,0x4da,0x4da,0x4da,0x4da,0x4da,0x4da,0x4da,0x4d7,0x4d7,0x4d7,0x4d7,
-0x4d7,0x4d7,0x51,0x51,0x4da,0x4da,0x4da,0x4da,0x4da,0x4da,0x51,0x51,0x4d7,0x4d7,0x4d7,0x4d7,
-0x4d7,0x4d7,0x4d7,0x4d7,0x51,0x4da,0x51,0x4da,0x51,0x4da,0x51,0x4da,0x4d7,0x4d7,0x4d7,0x4d7,
-0x4d7,0x4d7,0x4d7,0x4d7,0x4da,0x4da,0x4da,0x4da,0x4da,0x4da,0x4da,0x4da,0x4d7,0x4d7,0x4d7,0x4d7,
-0x4d7,0x4d7,0x4d7,0x4d7,0x4d7,0x4d7,0x4d7,0x4d7,0x4d7,0x4d7,0x51,0x51,0x4d7,0x4d7,0x4d7,0x4d7,
-0x4d7,0x4d7,0x4d7,0x4d7,0x4da,0x4da,0x4da,0x4da,0x4da,0x4da,0x4da,0x4da,0x4d7,0x4d7,0x4d7,0x4d7,
-0x4d7,0x51,0x4d7,0x4d7,0x4da,0x4da,0x4da,0x4da,0x4da,0x4d1,0x4d7,0x4d1,0x4d1,0x4ce,0x4d7,0x4d7,
-0x4d7,0x51,0x4d7,0x4d7,0x4da,0x4da,0x4da,0x4da,0x4da,0x4ce,0x4ce,0x4ce,0x4d7,0x4d7,0x4d7,0x4d7,
-0x51,0x51,0x4d7,0x4d7,0x4da,0x4da,0x4da,0x4da,0x51,0x4ce,0x4ce,0x4ce,0x4d7,0x4d7,0x4d7,0x4d7,
-0x4d7,0x4d7,0x4d7,0x4d7,0x4da,0x4da,0x4da,0x4da,0x4da,0x4ce,0x4ce,0x4ce,0x51,0x51,0x4d7,0x4d7,
-0x4d7,0x51,0x4d7,0x4d7,0x4da,0x4da,0x4da,0x4da,0x4da,0x4d4,0x4d1,0x51,0xb94,0xb97,0xb97,0xb97,
-0xfbd,0x54,0x1497,0x1497,0x1497,0x1497,0x4e3,0x4e3,0x4e3,0x4e3,0x4e3,0x4e3,0x52e,0xba9,0x57,0x57,
-0x6c9,0x52e,0x52e,0x52e,0x52e,0x52e,0x534,0x546,0x534,0x540,0x53a,0x6cc,0x52b,0x6c6,0x6c6,0x6c6,
-0x6c6,0x52b,0x52b,0x52b,0x52b,0x52b,0x531,0x543,0x531,0x53d,0x537,0x57,0xdb0,0xdb0,0xdb0,0xdb0,
-0xdb0,0x12ff,0x12ff,0x12ff,0x12ff,0x12ff,0x12ff,0x12ff,0x12ff,0x57,0x57,0x57,0x1afa,0x5a,0x5a,0x5a,
-0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x555,0x555,0x555,0x555,
-0x555,0x555,0x555,0x555,0x555,0x555,0x555,0x555,0x555,0x552,0x552,0x552,0x552,0x555,0xacb,0xacb,
-0xbaf,0xbb5,0xbb5,0xbb2,0xbb2,0xbb2,0xbb2,0xdb6,0xec7,0xec7,0xec7,0xec7,0x10fe,0x5d,0x5d,0x5d,
-0x5d,0x5d,0x5d,0x5d,0x5d,0x5d,0x5d,0x5d,0x5d,0x5d,0x5d,0x5d,0x585,0x585,0x585,0xad4,
-0xed0,0xfc3,0xfc3,0xfc3,0xfc3,0x125d,0x16cb,0x16cb,0x60,0x60,0x60,0x60,0x6f3,0x6f3,0x6f3,0x6f3,
-0x6f3,0x6f3,0x6f3,0x6f3,0x6f3,0x6f3,0x591,0x591,0x58e,0x58e,0x58e,0x58e,0x5b2,0x5b2,0x5b2,0x5b2,
-0x5b2,0xadd,0xadd,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,
-0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x5b5,0x5b5,0x5b5,0x5b5,
-0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x66,0x66,0x66,0x66,0x66,0x66,0x66,0x66,0x66,
-0x66,0x66,0x66,0x66,0x66,0x66,0x66,0x66,0x66,0x66,0x66,0x66,0xaf8,0xaf8,0xaf8,0xaf8,
-0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,
-0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0x69,0xaf8,0xaf8,0xaf8,0xaf8,0xafb,0xaf8,0xaf8,0xaf8,0xaf8,
-0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xafb,
-0x69,0x69,0x69,0x69,0x69,0x69,0x69,0x69,0x69,0x69,0x69,0x69,0xafe,0xafe,0xafe,0xafe,
-0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,
-0xafe,0xafe,0x6c,0x6c,0x6c,0x6c,0x6c,0x6c,0x6c,0x6c,0x6c,0x6c,0x72,0x813,0x80d,0x813,
-0x80d,0x813,0x80d,0x813,0x80d,0x813,0x80d,0x80d,0x810,0x80d,0x810,0x80d,0x810,0x80d,0x810,0x80d,
-0x810,0x80d,0x810,0x80d,0x810,0x80d,0x810,0x80d,0x810,0x80d,0x810,0x80d,0x80d,0x80d,0x80d,0x813,
-0x80d,0x813,0x80d,0x813,0x80d,0x80d,0x80d,0x80d,0x80d,0x80d,0x813,0x80d,0x80d,0x80d,0x80d,0x80d,
-0x810,0xc4e,0xc4e,0x72,0x72,0x927,0x927,0x8ee,0x8ee,0x816,0x819,0xc4b,0x75,0x75,0x75,0x75,
-0x75,0x82b,0x82b,0x82b,0x82b,0x82b,0x82b,0x82b,0x82b,0x82b,0x82b,0x82b,0x82b,0x82b,0x82b,0x82b,
-0x82b,0x82b,0x82b,0x82b,0x82b,0x82b,0x82b,0x82b,0x82b,0x82b,0x82b,0x82b,0x82b,0x10ec,0x18b1,0x1998,
-0x78,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,
-0x82e,0x82e,0x82e,0x78,0x8f7,0x8f7,0x8fa,0x8fa,0x8fa,0x8fa,0x8fa,0x8fa,0x8fa,0x8fa,0x8fa,0x8fa,
-0x8fa,0x8fa,0x8fa,0x8fa,0x837,0x837,0x837,0x837,0x837,0x837,0x837,0x837,0x837,0x837,0x837,0x837,
-0x837,0x837,0x837,0x837,0x837,0x837,0x837,0x837,0x837,0x837,0x837,0x837,0x837,0x837,0x837,0x837,
-0x837,0xd4a,0xd4a,0x7b,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,
-0xb10,0x7e,0x7e,0x7e,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,
-0xb16,0xb16,0xb16,0xb16,0xb16,0xc57,0xb16,0xb16,0xb16,0xc57,0xb16,0x81,0x81,0x81,0x81,0x81,
-0x81,0x81,0x81,0x81,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,
-0x1191,0x1191,0x1191,0x1191,0x9ae,0x9ae,0x9ae,0x9ae,0x84,0x84,0x84,0x84,0x84,0x84,0x84,0x84,
-0x84,0x84,0x84,0x84,0x1206,0x1206,0x1206,0x1206,0x1206,0x1206,0x1206,0x1206,0x1206,0x1206,0x1206,0x1206,
-0x1206,0x1206,0x1206,0x1206,0x5fa,0x5fa,0x5fa,0x5fa,0x5fa,0x5fa,0x5fa,0x87,0x87,0x87,0x87,0x87,
-0x87,0x87,0x87,0x87,0x87,0x87,0x87,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x87,0x87,0x87,0x87,
-0x87,0xae9,0x5eb,0x5f1,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x5ee,0x5f1,0x5f1,
-0x5f1,0x5f1,0x5f1,0x5f1,0x5f1,0x5f1,0x5f1,0x5f1,0x5f1,0x5f1,0x5f1,0x87,0x5f1,0x5f1,0x5f1,0x5f1,
-0x5f1,0x87,0x5f1,0x87,0x5f1,0x5f1,0x87,0x5f1,0x5f1,0x87,0x5f1,0x5f1,0x5f1,0x5f1,0x5f1,0x5f1,
-0x5f1,0x5f1,0x5f1,0x5f4,0x606,0x600,0x606,0x600,0x603,0x609,0x606,0x600,0x603,0x609,0x606,0x600,
-0x603,0x609,0x606,0x600,0x1311,0x1311,0x1afd,0x8a,0x8a,0x8a,0x8a,0x8a,0x8a,0x8a,0x8a,0x8a,
-0x8a,0x8a,0x8a,0x8a,0x8a,0x8a,0x8a,0x606,0x600,0x603,0x609,0x606,0x600,0x606,0x600,0x606,
-0x600,0x606,0x606,0x600,0x600,0x600,0x600,0x603,0x600,0x600,0x603,0x600,0x603,0x603,0x603,0x600,
-0x603,0x603,0x603,0x603,0x8a,0x8a,0x603,0x603,0x603,0x603,0x600,0x600,0x603,0x600,0x600,0x600,
-0x600,0x603,0x600,0x600,0x600,0x600,0x600,0x603,0x603,0x603,0x600,0x600,0x8a,0x8a,0x8a,0x8a,
-0x8a,0x8a,0x8a,0x1afd,0xb34,0xb34,0xb34,0xb34,0xb34,0xb34,0xb34,0xb34,0xb34,0xb34,0xb34,0xb34,
-0xb34,0xb34,0xb34,0xb34,0x84f,0x861,0x85e,0x861,0x85e,0xc6c,0xc6c,0xd56,0xd53,0x852,0x852,0x852,
-0x852,0x864,0x864,0x864,0x87c,0x87f,0x88e,0x8d,0x882,0x885,0x891,0x891,0x879,0x870,0x86a,0x870,
-0x86a,0x870,0x86a,0x86d,0x86d,0x888,0x888,0x88b,0x888,0x888,0x888,0x8d,0x888,0x876,0x873,0x86d,
-0x8d,0x8d,0x8d,0x8d,0x612,0x61e,0x612,0xbe8,0x612,0x90,0x612,0x61e,0x612,0x61e,0x612,0x61e,
-0x612,0x61e,0x612,0x61e,0x61e,0x61b,0x615,0x618,0x61e,0x61b,0x615,0x618,0x61e,0x61b,0x615,0x618,
-0x61e,0x61b,0x615,0x61b,0x615,0x61b,0x615,0x618,0x61e,0x61b,0x615,0x61b,0x615,0x61b,0x615,0x61b,
-0x615,0x90,0x90,0x60f,0x750,0x753,0x768,0x76b,0x74a,0x753,0x753,0x96,0x732,0x735,0x735,0x735,
-0x735,0x732,0x732,0x96,0x93,0x93,0x93,0x93,0x93,0x93,0x93,0x93,0x93,0xaec,0xaec,0xaec,
-0x9b1,0x72c,0x621,0x621,0x96,0x77a,0x759,0x74a,0x753,0x750,0x74a,0x75c,0x74d,0x747,0x74a,0x768,
-0x75f,0x756,0x777,0x74a,0x774,0x774,0x774,0x774,0x774,0x774,0x774,0x774,0x774,0x774,0x765,0x762,
-0x768,0x768,0x768,0x77a,0x73b,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,
-0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,
-0x738,0x738,0x738,0x96,0x96,0x96,0x738,0x738,0x738,0x738,0x738,0x738,0x96,0x96,0x738,0x738,
-0x738,0x738,0x738,0x738,0x96,0x96,0x738,0x738,0x738,0x738,0x738,0x738,0x96,0x96,0x738,0x738,
-0x738,0x96,0x96,0x96,0xb37,0xb37,0xb37,0xb37,0x99,0x99,0x99,0x99,0x99,0x99,0x99,0x99,
-0x99,0x184e,0x184e,0x184e,0xb3d,0xb3d,0xb3d,0xb3d,0xb3d,0xb3d,0xb3d,0xb3d,0xb3d,0xb3d,0xb3d,0xb3d,
-0xb3d,0xb3d,0xb3d,0xb3d,0xb3d,0xb3d,0xb3d,0x9c,0x9c,0x9c,0x9c,0x9c,0x1614,0x1614,0x1614,0x1614,
-0x1614,0x1614,0x1614,0x1614,0x1614,0x1614,0x1614,0x1614,0x1614,0x1614,0x1614,0x1614,0xb46,0xb46,0xb46,0xb46,
-0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,
-0xb46,0xb46,0x9f,0x9f,0x9f,0x9f,0x9f,0x9f,0x9f,0x9f,0x9f,0x9f,0xb52,0xb52,0xb52,0xb52,
-0xb52,0xb52,0xb52,0xa2,0xa2,0xfcf,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,
-0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0x16d1,0x16d1,0x16d1,0x16d1,
-0x16d1,0x16d1,0x16d1,0x16d1,0x16d1,0x1b00,0x1b00,0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,
-0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,0xb6a,0xb6a,0xb6a,0xb6a,
-0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,
-0xb67,0xa5,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb6a,0xb6a,0xb67,0xb67,
-0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,
-0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb6a,0xa5,0xb6a,0xb6a,0xa5,0xa5,0xb6a,0xa5,
-0xa5,0xb6a,0xb6a,0xa5,0xa5,0xb6a,0xb6a,0xb6a,0xb6a,0xa5,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,
-0xb6a,0xb6a,0xb67,0xb67,0xb67,0xb67,0xa5,0xb67,0xa5,0xb67,0xb67,0xb67,0xb67,0xcde,0xb67,0xb67,
-0xa5,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb6a,0xb6a,0xb6a,0xb6a,
-0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb67,0xb67,0xb67,0xb67,
-0xb6a,0xb6a,0xa5,0xb6a,0xb6a,0xb6a,0xb6a,0xa5,0xa5,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,
-0xb6a,0xa5,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xa5,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,
-0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,
-0xb67,0xb67,0xb67,0xb67,0xb6a,0xb6a,0xa5,0xb6a,0xb6a,0xb6a,0xb6a,0xa5,0xb6a,0xb6a,0xb6a,0xb6a,
-0xb6a,0xa5,0xb6a,0xa5,0xa5,0xa5,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xa5,0xb67,0xb67,
-0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xdc8,0xdc8,0xa5,0xa5,
-0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,
-0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb67,0xb67,0xb67,0xb61,0xb67,0xb67,0xb67,0xb67,
-0xb67,0xb67,0xedf,0xedc,0xa5,0xa5,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,
-0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xa8,0xb70,0xa8,0xa8,0xa8,0xa8,0xa8,0xa8,
+0x39,0x39,0x39,0x39,0x39,0x39,0x39,0x39,0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,0x3c,0x13dd,
+0x3c,0x3c,0x3c,0x3c,0x3c,0x13dd,0x3c,0x3c,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,
+0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xdb9,
+0xa62,0x3f,0xa62,0xa62,0xa62,0xa62,0x3f,0x3f,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0x3f,
+0xa62,0x3f,0xa62,0xa62,0xa62,0xa62,0x3f,0x3f,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xdb9,
+0xa62,0x3f,0xa62,0xa62,0xa62,0xa62,0x3f,0x3f,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,
+0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xdb9,0xa62,0x3f,0xa62,0xa62,
+0xa62,0xa62,0x3f,0x3f,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0x3f,0xa62,0x3f,0xa62,0xa62,
+0xa62,0xa62,0x3f,0x3f,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xdb9,0xa62,0xa62,0xa62,0xa62,
+0xa62,0xa62,0xa62,0x3f,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,
+0xa62,0xa62,0xa62,0xdb9,0xa62,0x3f,0xa62,0xa62,0xa62,0xa62,0x3f,0x3f,0xa62,0xa62,0xa62,0xa62,
+0xa62,0xa62,0xa62,0xdb9,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,
+0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0x3f,0x3f,0x130e,0x130e,0xdb3,0xdb6,0xa5c,0xa65,0xa59,
+0xa59,0xa59,0xa59,0xa65,0xa65,0xa5f,0xa5f,0xa5f,0xa5f,0xa5f,0xa5f,0xa5f,0xa5f,0xa5f,0xa56,0xa56,
+0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0x3f,0x3f,0x3f,0xa68,0xa68,0xa68,0xa68,
+0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,
+0xa68,0x16d7,0x42,0x42,0x16d4,0x16d4,0x16d4,0x16d4,0x16d4,0x16d4,0x42,0x42,0xa7a,0xa7d,0xa7d,0xa7d,
+0xa7d,0xa7d,0xa7d,0xa7d,0xa7d,0xa7d,0xa7d,0xa7d,0xa7d,0xa7d,0xa7d,0xa7d,0xa7d,0xa7d,0xa7d,0xa7d,
+0xa7d,0xa7d,0xa7d,0xa7d,0xa7d,0xa7d,0xa7d,0xa77,0xa74,0x45,0x45,0x45,0xa83,0xa83,0xa83,0xa83,
+0xa83,0xa83,0xa83,0xa83,0xa83,0xa83,0xa83,0xa80,0xa80,0xa80,0xa83,0xa83,0xa83,0x14cd,0x14cd,0x14cd,
+0x14cd,0x14cd,0x14cd,0x14cd,0x14cd,0x48,0x48,0x48,0x48,0x48,0x48,0x48,0xaa4,0xaa4,0xaa4,0xaa4,
+0xaa4,0xaa4,0xa86,0xaa4,0xaa4,0xa89,0xa89,0xa89,0xa89,0xa89,0xa89,0xa89,0xa89,0xa89,0xa8c,0xa89,
+0xa9b,0xa9b,0xa9e,0xaa7,0xa95,0xa92,0xa9b,0xa98,0xaa7,0xcd2,0x4b,0x4b,0xaa1,0xaa1,0xaa1,0xaa1,
+0xaa1,0xaa1,0xaa1,0xaa1,0xaa1,0xaa1,0x4b,0x4b,0x4b,0x4b,0x4b,0x4b,0xcd5,0xcd5,0xcd5,0xcd5,
+0xcd5,0xcd5,0xcd5,0xcd5,0xcd5,0xcd5,0x4b,0x4b,0x4b,0x4b,0x4b,0x4b,0xab6,0xab6,0xb2e,0xb31,
+0xabc,0xb2b,0xab9,0xab6,0xabf,0xace,0xac2,0xad1,0xad1,0xad1,0xaad,0x1b09,0xac5,0xac5,0xac5,0xac5,
+0xac5,0xac5,0xac5,0xac5,0xac5,0xac5,0x4e,0x4e,0x4e,0x4e,0x4e,0x4e,0xac8,0xac8,0xac8,0xac8,
+0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,
+0xac8,0xac8,0xac8,0xac8,0x18fc,0x4e,0x4e,0x4e,0x4e,0x4e,0x4e,0x4e,0xac8,0xac8,0xac8,0xac8,
+0xac8,0xac8,0xac8,0xac8,0xac8,0xab0,0xfc6,0x4e,0x4e,0x4e,0x4e,0x4e,0x117f,0x117f,0x117f,0x117f,
+0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x4e6,0x4e6,0x4e6,0x4e6,
+0x4e6,0x4e6,0x4e6,0x4e6,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e6,0x4e6,0x4e6,0x4e6,
+0x4e6,0x4e6,0x51,0x51,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x51,0x51,0x4e6,0x4e6,0x4e6,0x4e6,
+0x4e6,0x4e6,0x4e6,0x4e6,0x51,0x4e9,0x51,0x4e9,0x51,0x4e9,0x51,0x4e9,0x4e6,0x4e6,0x4e6,0x4e6,
+0x4e6,0x4e6,0x4e6,0x4e6,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e6,0x4e6,0x4e6,0x4e6,
+0x4e6,0x4e6,0x4e6,0x4e6,0x4e6,0x4e6,0x4e6,0x4e6,0x4e6,0x4e6,0x51,0x51,0x4e6,0x4e6,0x4e6,0x4e6,
+0x4e6,0x4e6,0x4e6,0x4e6,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e6,0x4e6,0x4e6,0x4e6,
+0x4e6,0x51,0x4e6,0x4e6,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e0,0x4e6,0x4e0,0x4e0,0x4dd,0x4e6,0x4e6,
+0x4e6,0x51,0x4e6,0x4e6,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4dd,0x4dd,0x4dd,0x4e6,0x4e6,0x4e6,0x4e6,
+0x51,0x51,0x4e6,0x4e6,0x4e9,0x4e9,0x4e9,0x4e9,0x51,0x4dd,0x4dd,0x4dd,0x4e6,0x4e6,0x4e6,0x4e6,
+0x4e6,0x4e6,0x4e6,0x4e6,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4dd,0x4dd,0x4dd,0x51,0x51,0x4e6,0x4e6,
+0x4e6,0x51,0x4e6,0x4e6,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e3,0x4e0,0x51,0xba6,0xba9,0xba9,0xba9,
+0xfcf,0x54,0x14a9,0x14a9,0x14a9,0x14a9,0x4f2,0x4f2,0x4f2,0x4f2,0x4f2,0x4f2,0x53d,0xbbb,0x57,0x57,
+0x6d8,0x53d,0x53d,0x53d,0x53d,0x53d,0x543,0x555,0x543,0x54f,0x549,0x6db,0x53a,0x6d5,0x6d5,0x6d5,
+0x6d5,0x53a,0x53a,0x53a,0x53a,0x53a,0x540,0x552,0x540,0x54c,0x546,0x57,0xdc2,0xdc2,0xdc2,0xdc2,
+0xdc2,0x1311,0x1311,0x1311,0x1311,0x1311,0x1311,0x1311,0x1311,0x57,0x57,0x57,0x1b0c,0x5a,0x5a,0x5a,
+0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x564,0x564,0x564,0x564,
+0x564,0x564,0x564,0x564,0x564,0x564,0x564,0x564,0x564,0x561,0x561,0x561,0x561,0x564,0xadd,0xadd,
+0xbc1,0xbc7,0xbc7,0xbc4,0xbc4,0xbc4,0xbc4,0xdc8,0xed9,0xed9,0xed9,0xed9,0x1113,0x5d,0x5d,0x5d,
+0x5d,0x5d,0x5d,0x5d,0x5d,0x5d,0x5d,0x5d,0x5d,0x5d,0x5d,0x5d,0x594,0x594,0x594,0xae6,
+0xee2,0xfd5,0xfd5,0xfd5,0xfd5,0x126f,0x16dd,0x16dd,0x60,0x60,0x60,0x60,0x702,0x702,0x702,0x702,
+0x702,0x702,0x702,0x702,0x702,0x702,0x5a0,0x5a0,0x59d,0x59d,0x59d,0x59d,0x5c1,0x5c1,0x5c1,0x5c1,
+0x5c1,0xaef,0xaef,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,
+0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x63,0x5c4,0x5c4,0x5c4,0x5c4,
+0x5c4,0x5c4,0x5c4,0x5c4,0x5c4,0x5c4,0x5c4,0x66,0x66,0x66,0x66,0x66,0x66,0x66,0x66,0x66,
+0x66,0x66,0x66,0x66,0x66,0x66,0x66,0x66,0x66,0x66,0x66,0x66,0xb0a,0xb0a,0xb0a,0xb0a,
+0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,
+0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0x69,0xb0a,0xb0a,0xb0a,0xb0a,0xb0d,0xb0a,0xb0a,0xb0a,0xb0a,
+0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0d,
+0x69,0x69,0x69,0x69,0x69,0x69,0x69,0x69,0x69,0x69,0x69,0x69,0xb10,0xb10,0xb10,0xb10,
+0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,
+0xb10,0xb10,0x6c,0x6c,0x6c,0x6c,0x6c,0x6c,0x6c,0x6c,0x6c,0x6c,0x72,0x822,0x81c,0x822,
+0x81c,0x822,0x81c,0x822,0x81c,0x822,0x81c,0x81c,0x81f,0x81c,0x81f,0x81c,0x81f,0x81c,0x81f,0x81c,
+0x81f,0x81c,0x81f,0x81c,0x81f,0x81c,0x81f,0x81c,0x81f,0x81c,0x81f,0x81c,0x81c,0x81c,0x81c,0x822,
+0x81c,0x822,0x81c,0x822,0x81c,0x81c,0x81c,0x81c,0x81c,0x81c,0x822,0x81c,0x81c,0x81c,0x81c,0x81c,
+0x81f,0xc60,0xc60,0x72,0x72,0x936,0x936,0x8fd,0x8fd,0x825,0x828,0xc5d,0x75,0x75,0x75,0x75,
+0x75,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,
+0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x1101,0x18c3,0x19aa,
+0x78,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,
+0x83d,0x83d,0x83d,0x78,0x906,0x906,0x909,0x909,0x909,0x909,0x909,0x909,0x909,0x909,0x909,0x909,
+0x909,0x909,0x909,0x909,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,
+0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,
+0x846,0xd5c,0xd5c,0x7b,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,
+0xb22,0x7e,0x7e,0x7e,0xb28,0xb28,0xb28,0xb28,0xb28,0xb28,0xb28,0xb28,0xb28,0xb28,0xb28,0xb28,
+0xb28,0xb28,0xb28,0xb28,0xb28,0xc69,0xb28,0xb28,0xb28,0xc69,0xb28,0x81,0x81,0x81,0x81,0x81,
+0x81,0x81,0x81,0x81,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,
+0x11a6,0x11a6,0x11a6,0x11a6,0x9c0,0x9c0,0x9c0,0x9c0,0x84,0x84,0x84,0x84,0x84,0x84,0x84,0x84,
+0x84,0x84,0x84,0x84,0x121b,0x121b,0x121b,0x121b,0x121b,0x121b,0x121b,0x121b,0x121b,0x121b,0x121b,0x121b,
+0x121b,0x121b,0x121b,0x121b,0x609,0x609,0x609,0x609,0x609,0x609,0x609,0x87,0x87,0x87,0x87,0x87,
+0x87,0x87,0x87,0x87,0x87,0x87,0x87,0x5f7,0x5f7,0x5f7,0x5f7,0x5f7,0x87,0x87,0x87,0x87,
+0x87,0xafb,0x5fa,0x600,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x5fd,0x600,0x600,
+0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x87,0x600,0x600,0x600,0x600,
+0x600,0x87,0x600,0x87,0x600,0x600,0x87,0x600,0x600,0x87,0x600,0x600,0x600,0x600,0x600,0x600,
+0x600,0x600,0x600,0x603,0x615,0x60f,0x615,0x60f,0x612,0x618,0x615,0x60f,0x612,0x618,0x615,0x60f,
+0x612,0x618,0x615,0x60f,0x1323,0x1323,0x1b0f,0x8a,0x8a,0x8a,0x8a,0x8a,0x8a,0x8a,0x8a,0x8a,
+0x8a,0x8a,0x8a,0x8a,0x8a,0x8a,0x8a,0x615,0x60f,0x612,0x618,0x615,0x60f,0x615,0x60f,0x615,
+0x60f,0x615,0x615,0x60f,0x60f,0x60f,0x60f,0x612,0x60f,0x60f,0x612,0x60f,0x612,0x612,0x612,0x60f,
+0x612,0x612,0x612,0x612,0x8a,0x8a,0x612,0x612,0x612,0x612,0x60f,0x60f,0x612,0x60f,0x60f,0x60f,
+0x60f,0x612,0x60f,0x60f,0x60f,0x60f,0x60f,0x612,0x612,0x612,0x60f,0x60f,0x8a,0x8a,0x8a,0x8a,
+0x8a,0x8a,0x8a,0x1b0f,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,
+0xb46,0xb46,0xb46,0xb46,0x85e,0x870,0x86d,0x870,0x86d,0xc7e,0xc7e,0xd68,0xd65,0x861,0x861,0x861,
+0x861,0x873,0x873,0x873,0x88b,0x88e,0x89d,0x8d,0x891,0x894,0x8a0,0x8a0,0x888,0x87f,0x879,0x87f,
+0x879,0x87f,0x879,0x87c,0x87c,0x897,0x897,0x89a,0x897,0x897,0x897,0x8d,0x897,0x885,0x882,0x87c,
+0x8d,0x8d,0x8d,0x8d,0x621,0x62d,0x621,0xbfa,0x621,0x90,0x621,0x62d,0x621,0x62d,0x621,0x62d,
+0x621,0x62d,0x621,0x62d,0x62d,0x62a,0x624,0x627,0x62d,0x62a,0x624,0x627,0x62d,0x62a,0x624,0x627,
+0x62d,0x62a,0x624,0x62a,0x624,0x62a,0x624,0x627,0x62d,0x62a,0x624,0x62a,0x624,0x62a,0x624,0x62a,
+0x624,0x90,0x90,0x61e,0x75f,0x762,0x777,0x77a,0x759,0x762,0x762,0x96,0x741,0x744,0x744,0x744,
+0x744,0x741,0x741,0x96,0x93,0x93,0x93,0x93,0x93,0x93,0x93,0x93,0x93,0xafe,0xafe,0xafe,
+0x9c3,0x73b,0x630,0x630,0x96,0x789,0x768,0x759,0x762,0x75f,0x759,0x76b,0x75c,0x756,0x759,0x777,
+0x76e,0x765,0x786,0x759,0x783,0x783,0x783,0x783,0x783,0x783,0x783,0x783,0x783,0x783,0x774,0x771,
+0x777,0x777,0x777,0x789,0x74a,0x747,0x747,0x747,0x747,0x747,0x747,0x747,0x747,0x747,0x747,0x747,
+0x747,0x747,0x747,0x747,0x747,0x747,0x747,0x747,0x747,0x747,0x747,0x747,0x747,0x747,0x747,0x747,
+0x747,0x747,0x747,0x96,0x96,0x96,0x747,0x747,0x747,0x747,0x747,0x747,0x96,0x96,0x747,0x747,
+0x747,0x747,0x747,0x747,0x96,0x96,0x747,0x747,0x747,0x747,0x747,0x747,0x96,0x96,0x747,0x747,
+0x747,0x96,0x96,0x96,0xb49,0xb49,0xb49,0xb49,0x99,0x99,0x99,0x99,0x99,0x99,0x99,0x99,
+0x99,0x1860,0x1860,0x1860,0xb4f,0xb4f,0xb4f,0xb4f,0xb4f,0xb4f,0xb4f,0xb4f,0xb4f,0xb4f,0xb4f,0xb4f,
+0xb4f,0xb4f,0xb4f,0xb4f,0xb4f,0xb4f,0xb4f,0x9c,0x9c,0x9c,0x9c,0x9c,0x1626,0x1626,0x1626,0x1626,
+0x1626,0x1626,0x1626,0x1626,0x1626,0x1626,0x1626,0x1626,0x1626,0x1626,0x1626,0x1626,0xb58,0xb58,0xb58,0xb58,
+0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,
+0xb58,0xb58,0x9f,0x9f,0x9f,0x9f,0x9f,0x9f,0x9f,0x9f,0x9f,0x9f,0xb64,0xb64,0xb64,0xb64,
+0xb64,0xb64,0xb64,0xa2,0xa2,0xfe1,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,
+0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0x16e3,0x16e3,0x16e3,0x16e3,
+0x16e3,0x16e3,0x16e3,0x16e3,0x16e3,0x1b12,0x1b12,0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,
+0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,0xa2,0xb7c,0xb7c,0xb7c,0xb7c,
+0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,
+0xb79,0xa5,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb7c,0xb7c,0xb79,0xb79,
+0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,
+0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb7c,0xa5,0xb7c,0xb7c,0xa5,0xa5,0xb7c,0xa5,
+0xa5,0xb7c,0xb7c,0xa5,0xa5,0xb7c,0xb7c,0xb7c,0xb7c,0xa5,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,
+0xb7c,0xb7c,0xb79,0xb79,0xb79,0xb79,0xa5,0xb79,0xa5,0xb79,0xb79,0xb79,0xb79,0xcf0,0xb79,0xb79,
+0xa5,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb7c,0xb7c,0xb7c,0xb7c,
+0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb79,0xb79,0xb79,0xb79,
+0xb7c,0xb7c,0xa5,0xb7c,0xb7c,0xb7c,0xb7c,0xa5,0xa5,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,
+0xb7c,0xa5,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xa5,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,
+0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,
+0xb79,0xb79,0xb79,0xb79,0xb7c,0xb7c,0xa5,0xb7c,0xb7c,0xb7c,0xb7c,0xa5,0xb7c,0xb7c,0xb7c,0xb7c,
+0xb7c,0xa5,0xb7c,0xa5,0xa5,0xa5,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xa5,0xb79,0xb79,
+0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xdda,0xdda,0xa5,0xa5,
+0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,
+0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb79,0xb79,0xb79,0xb73,0xb79,0xb79,0xb79,0xb79,
+0xb79,0xb79,0xef1,0xeee,0xa5,0xa5,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,
+0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xa8,0xb82,0xa8,0xa8,0xa8,0xa8,0xa8,0xa8,
0xa8,0xa8,0xa8,0xa8,0xa8,0xa8,0xa8,0xa8,0xa8,0xa8,0xa8,0xa8,0xa8,0xa8,0xa8,0xa8,
-0xa8,0xa8,0xa8,0xa8,0xa8,0xa8,0xa8,0xa8,0xbf7,0xbf7,0xbf7,0xbf7,0xbf7,0xbf7,0xbf7,0xbf7,
-0xbf7,0xbf7,0xbf7,0xbf7,0xbf7,0x1b06,0xbf7,0xbf7,0xbf7,0xbf7,0xbf1,0xbf1,0xbf4,0x1b03,0xab,0xab,
-0xab,0xab,0xab,0xab,0xab,0xab,0xab,0x1b06,0xc00,0xc00,0xc00,0xc00,0xc00,0xc00,0xc00,0xc00,
-0xc00,0xc00,0xc00,0xc00,0xc00,0xc00,0xc00,0xc00,0xc00,0xc00,0xbfa,0xbfa,0xbfd,0xc60,0xc60,0xae,
-0xae,0xae,0xae,0xae,0xae,0xae,0xae,0xae,0xc06,0xc06,0xc06,0xc06,0xc06,0xc06,0xc06,0xc06,
-0xc06,0xc06,0xc06,0xc06,0xc06,0xc06,0xc06,0xc06,0xc06,0xc06,0xc03,0xc03,0xb1,0xb1,0xb1,0xb1,
-0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xc0c,0xc0c,0xc0c,0xc0c,0xc0c,0xc0c,0xc0c,0xc0c,
-0xc0c,0xc0c,0xc0c,0xc0c,0xc0c,0xb4,0xc0c,0xc0c,0xc0c,0xb4,0xc09,0xc09,0xb4,0xb4,0xb4,0xb4,
-0xb4,0xb4,0xb4,0xb4,0xb4,0xb4,0xb4,0xb4,0xcf0,0xcf0,0xcf0,0xcf0,0xcf0,0xcf0,0xcf0,0xcf0,
-0xcf0,0xcf0,0xcf0,0xcf0,0xcf0,0xcf0,0xcf0,0xcf0,0xcf0,0xcf0,0xcf0,0xcf0,0xcf0,0xcf0,0xcf0,0xcf0,
-0xcf0,0xcf0,0xcf0,0xcf0,0xcf0,0x14d6,0x14d6,0xb7,0xce1,0xce1,0xce1,0xced,0xced,0xced,0xced,0xce1,
-0xce1,0xced,0xced,0xced,0xb7,0xb7,0xb7,0xb7,0xced,0xced,0xce1,0xced,0xced,0xced,0xced,0xced,
-0xced,0xce4,0xce4,0xce4,0xb7,0xb7,0xb7,0xb7,0xce7,0xb7,0xb7,0xb7,0xcf3,0xcf3,0xcea,0xcea,
-0xcea,0xcea,0xcea,0xcea,0xcea,0xcea,0xcea,0xcea,0xcf6,0xcf6,0xcf6,0xcf6,0xcf6,0xcf6,0xcf6,0xcf6,
-0xcf6,0xcf6,0xcf6,0xcf6,0xcf6,0xcf6,0xcf6,0xcf6,0xcf6,0xcf6,0xba,0xba,0xcf6,0xcf6,0xcf6,0xcf6,
-0xcf6,0xba,0xba,0xba,0xba,0xba,0xba,0xba,0xba,0xba,0xba,0xba,0x14d9,0x14d9,0x14d9,0x14d9,
-0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,
-0xbd,0xbd,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,
-0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0xbd,0x1a52,0x14d9,0x14d9,0x14d9,0x14d9,
-0x14d9,0x14d9,0x14d9,0x14d9,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,
-0xc0,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,
-0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xc0,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,
-0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xc0,0xd1a,0xd1a,0xc0,0xd1a,
-0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xc0,0xc0,
-0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xc0,0xc0,
+0xa8,0xa8,0xa8,0xa8,0xa8,0xa8,0xa8,0xa8,0xc09,0xc09,0xc09,0xc09,0xc09,0xc09,0xc09,0xc09,
+0xc09,0xc09,0xc09,0xc09,0xc09,0x1b18,0xc09,0xc09,0xc09,0xc09,0xc03,0xc03,0xc06,0x1b15,0xab,0xab,
+0xab,0xab,0xab,0xab,0xab,0xab,0xab,0x1b18,0xc12,0xc12,0xc12,0xc12,0xc12,0xc12,0xc12,0xc12,
+0xc12,0xc12,0xc12,0xc12,0xc12,0xc12,0xc12,0xc12,0xc12,0xc12,0xc0c,0xc0c,0xc0f,0xc72,0xc72,0xae,
+0xae,0xae,0xae,0xae,0xae,0xae,0xae,0xae,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,
+0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc15,0xc15,0xb1,0xb1,0xb1,0xb1,
+0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xb1,0xc1e,0xc1e,0xc1e,0xc1e,0xc1e,0xc1e,0xc1e,0xc1e,
+0xc1e,0xc1e,0xc1e,0xc1e,0xc1e,0xb4,0xc1e,0xc1e,0xc1e,0xb4,0xc1b,0xc1b,0xb4,0xb4,0xb4,0xb4,
+0xb4,0xb4,0xb4,0xb4,0xb4,0xb4,0xb4,0xb4,0xd02,0xd02,0xd02,0xd02,0xd02,0xd02,0xd02,0xd02,
+0xd02,0xd02,0xd02,0xd02,0xd02,0xd02,0xd02,0xd02,0xd02,0xd02,0xd02,0xd02,0xd02,0xd02,0xd02,0xd02,
+0xd02,0xd02,0xd02,0xd02,0xd02,0x14e8,0x14e8,0xb7,0xcf3,0xcf3,0xcf3,0xcff,0xcff,0xcff,0xcff,0xcf3,
+0xcf3,0xcff,0xcff,0xcff,0xb7,0xb7,0xb7,0xb7,0xcff,0xcff,0xcf3,0xcff,0xcff,0xcff,0xcff,0xcff,
+0xcff,0xcf6,0xcf6,0xcf6,0xb7,0xb7,0xb7,0xb7,0xcf9,0xb7,0xb7,0xb7,0xd05,0xd05,0xcfc,0xcfc,
+0xcfc,0xcfc,0xcfc,0xcfc,0xcfc,0xcfc,0xcfc,0xcfc,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,
+0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xba,0xba,0xd08,0xd08,0xd08,0xd08,
+0xd08,0xba,0xba,0xba,0xba,0xba,0xba,0xba,0xba,0xba,0xba,0xba,0x14eb,0x14eb,0x14eb,0x14eb,
+0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,
+0xbd,0xbd,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,
+0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0xbd,0x1a64,0x14eb,0x14eb,0x14eb,0x14eb,
+0x14eb,0x14eb,0x14eb,0x14eb,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,
+0xc0,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,
+0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xc0,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,
+0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xc0,0xd2c,0xd2c,0xc0,0xd2c,
+0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xc0,0xc0,
+0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xc0,0xc0,
0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,
0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,0xc0,
-0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,
-0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xc3,0xc3,0xc3,0xc3,0xc3,
-0xd5c,0xd5c,0xd62,0xc6,0xc6,0xc6,0xc6,0xd59,0xd59,0xd59,0xd59,0xd59,0xd59,0xd59,0xd59,0xd59,
-0xd59,0xd59,0xd59,0xd59,0xd59,0xd59,0xd59,0xd59,0xd59,0xd59,0xd59,0xd59,0xd59,0xd59,0xd59,0xd59,
-0xc6,0xc6,0xc6,0xd5f,0xd5f,0xd5f,0xd5f,0xd5f,0xd5f,0xd5f,0xd5f,0xd5f,0xd23,0xd23,0xd23,0xd23,
-0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,
-0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xc9,0xd20,0xd2c,0xd2c,0xd2c,0xd2c,
-0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,
-0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xd2c,0xcc,0xcc,0xd29,0xd29,0xd29,0xd29,
-0xd29,0xd29,0xd29,0xd29,0xd29,0xd29,0xcc,0xcc,0xcc,0xcc,0xcc,0xcc,0x1812,0x1812,0x1812,0x1812,
-0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0xd2f,0xd2f,0xd2f,0xd2f,
-0xd2f,0xd2f,0xcf,0xcf,0xd2f,0xcf,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,
-0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xcf,0xd2f,
-0xd2f,0xcf,0xcf,0xcf,0xd2f,0xcf,0xcf,0xd2f,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,
-0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd2,
-0xd2,0xd2,0xd2,0xd2,0xd2,0xd2,0xd2,0xd2,0xde3,0xde3,0xde3,0xde3,0xde3,0xde3,0xde3,0xde3,
-0xde3,0xde3,0xde3,0x14dc,0x14dc,0x1788,0x1788,0xd8,0x10cb,0x10cb,0x10cb,0x10cb,0x10cb,0x10cb,0x10cb,0x10cb,
-0x10cb,0x10cb,0x10cb,0x10cb,0x1a61,0x129,0x129,0x129,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,
-0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdec,
-0xdec,0xdf2,0xdf2,0xdec,0xdb,0xdb,0xdef,0xdef,0x10fb,0x10fb,0x10fb,0x10fb,0xde,0xde,0xde,0xde,
-0xde,0xde,0xde,0xde,0xde,0xde,0xde,0xde,0xc5d,0xc5d,0xc5d,0xc5d,0xc5d,0xc5d,0xc5d,0xc5d,
-0xc5d,0xc5d,0xc5d,0xc5d,0xc5d,0xc5d,0xc5d,0xc5d,0xe07,0xe04,0xe07,0xe04,0xe04,0xdfb,0xdfb,0xdfb,
-0xdfb,0xdfb,0xdfb,0x1146,0x1143,0x1146,0x1143,0x1140,0x1140,0x1140,0x13d4,0x13d1,0xe1,0xe1,0xe1,0xe1,
-0xe1,0xe01,0xdfe,0xdfe,0xdfe,0xdfb,0xe01,0xdfe,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,
-0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe4,
-0xe4,0xe4,0xe4,0xe4,0xe4,0xe4,0xe4,0xe4,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe4,
-0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe4,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe4,
-0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe0a,0xe4,0xe10,0xe10,0xe10,0xe10,0xe10,0xe10,0xe10,0xe10,
-0xe10,0xe10,0xe10,0xe10,0xe10,0xe10,0xe10,0xe10,0xe0d,0xe0d,0xe0d,0xe0d,0xe0d,0xe0d,0xe0d,0xe0d,
-0xe0d,0xe0d,0xe7,0xe7,0xe7,0xe7,0xe7,0xe7,0xe13,0xe13,0xe13,0xe13,0xe13,0xe13,0xea,0x13d7,
-0xea,0xea,0xea,0xea,0xea,0x13d7,0xea,0xea,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,
-0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe2b,0xe1f,0xe1f,0xe1f,0xed,0xe1f,0xe1f,0xed,
-0xed,0xed,0xed,0xed,0xe1f,0xe1f,0xe1f,0xe1f,0xe2b,0xe2b,0xe2b,0xe2b,0xed,0xe2b,0xe2b,0xe2b,
-0xed,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,
-0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0x18f3,0x18f3,0xed,0xed,0xe1c,0xe1c,0xe1c,0xed,
-0xed,0xed,0xed,0xe22,0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,0x18f0,0xed,0xed,0xed,
-0xed,0xed,0xed,0xed,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe2e,0xe2e,0xe25,0xed,0xed,0xed,
-0xed,0xed,0xed,0xed,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0x114c,0x114c,
-0xf0,0xf0,0xf0,0xf0,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3d,0xe3d,0xe3d,0xe3a,0xe3a,0xe3d,0xe3a,
-0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xf0,0xf0,0xf0,0xf0,0xf0,0xf0,
-0xe37,0xe37,0xe37,0xe37,0xe37,0xe37,0xe37,0xe37,0xe37,0xe37,0x1149,0xf0,0xf0,0xf0,0xe34,0xe34,
-0xe43,0xe43,0xe43,0xe43,0xf3,0xf3,0xf3,0xf3,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,
-0xe40,0xe43,0xe43,0xe43,0xe43,0xe43,0xf3,0xf3,0xf3,0xf3,0xf3,0xf3,0xf3,0xf3,0xf3,0xf3,
-0x14eb,0x14f1,0x14ee,0x1833,0x178e,0x1857,0x1857,0x1857,0x1857,0x1857,0x18f9,0x18f6,0x18fc,0x18f6,0x18fc,0x19b9,
-0x1a55,0x1a55,0x1a55,0x1b18,0x1b18,0x1b12,0x1b0f,0x1b12,0x1b0f,0x1b12,0x1b0f,0x1b12,0x1b0f,0x1b15,0xf6,0xf6,
+0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,
+0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xc3,0xc3,0xc3,0xc3,0xc3,
+0xd6e,0xd6e,0xd74,0xc6,0xc6,0xc6,0xc6,0xd6b,0xd6b,0xd6b,0xd6b,0xd6b,0xd6b,0xd6b,0xd6b,0xd6b,
+0xd6b,0xd6b,0xd6b,0xd6b,0xd6b,0xd6b,0xd6b,0xd6b,0xd6b,0xd6b,0xd6b,0xd6b,0xd6b,0xd6b,0xd6b,0xd6b,
+0xc6,0xc6,0xc6,0xd71,0xd71,0xd71,0xd71,0xd71,0xd71,0xd71,0xd71,0xd71,0xd35,0xd35,0xd35,0xd35,
+0xd35,0xd35,0xd35,0xd35,0xd35,0xd35,0xd35,0xd35,0xd35,0xd35,0xd35,0xd35,0xd35,0xd35,0xd35,0xd35,
+0xd35,0xd35,0xd35,0xd35,0xd35,0xd35,0xd35,0xd35,0xd35,0xd35,0xc9,0xd32,0xd3e,0xd3e,0xd3e,0xd3e,
+0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,
+0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xcc,0xcc,0xd3b,0xd3b,0xd3b,0xd3b,
+0xd3b,0xd3b,0xd3b,0xd3b,0xd3b,0xd3b,0xcc,0xcc,0xcc,0xcc,0xcc,0xcc,0x1824,0x1824,0x1824,0x1824,
+0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0xd41,0xd41,0xd41,0xd41,
+0xd41,0xd41,0xcf,0xcf,0xd41,0xcf,0xd41,0xd41,0xd41,0xd41,0xd41,0xd41,0xd41,0xd41,0xd41,0xd41,
+0xd41,0xd41,0xd41,0xd41,0xd41,0xd41,0xd41,0xd41,0xd41,0xd41,0xd41,0xd41,0xd41,0xd41,0xcf,0xd41,
+0xd41,0xcf,0xcf,0xcf,0xd41,0xcf,0xcf,0xd41,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,
+0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd2,
+0xd2,0xd2,0xd2,0xd2,0xd2,0xd2,0xd2,0xd2,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,
+0xdf5,0xdf5,0xdf5,0x14ee,0x14ee,0x179a,0x179a,0xd8,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,
+0x10e0,0x10e0,0x10e0,0x10e0,0x1a73,0x129,0x129,0x129,0xe07,0xe07,0xe07,0xe07,0xe07,0xe07,0xe07,0xe07,
+0xe07,0xe07,0xe07,0xe07,0xe07,0xe07,0xe07,0xe07,0xe07,0xe07,0xe07,0xe07,0xe07,0xe07,0xe07,0xdfe,
+0xdfe,0xe04,0xe04,0xdfe,0xdb,0xdb,0xe01,0xe01,0x1110,0x1110,0x1110,0x1110,0xde,0xde,0xde,0xde,
+0xde,0xde,0xde,0xde,0xde,0xde,0xde,0xde,0xc6f,0xc6f,0xc6f,0xc6f,0xc6f,0xc6f,0xc6f,0xc6f,
+0xc6f,0xc6f,0xc6f,0xc6f,0xc6f,0xc6f,0xc6f,0xc6f,0xe19,0xe16,0xe19,0xe16,0xe16,0xe0d,0xe0d,0xe0d,
+0xe0d,0xe0d,0xe0d,0x115b,0x1158,0x115b,0x1158,0x1155,0x1155,0x1155,0x13e6,0x13e3,0xe1,0xe1,0xe1,0xe1,
+0xe1,0xe13,0xe10,0xe10,0xe10,0xe0d,0xe13,0xe10,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,
+0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe4,
+0xe4,0xe4,0xe4,0xe4,0xe4,0xe4,0xe4,0xe4,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe4,
+0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe4,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe4,
+0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe1c,0xe4,0xe22,0xe22,0xe22,0xe22,0xe22,0xe22,0xe22,0xe22,
+0xe22,0xe22,0xe22,0xe22,0xe22,0xe22,0xe22,0xe22,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,0xe1f,
+0xe1f,0xe1f,0xe7,0xe7,0xe7,0xe7,0xe7,0xe7,0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,0xea,0x13e9,
+0xea,0xea,0xea,0xea,0xea,0x13e9,0xea,0xea,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,
+0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe3d,0xe31,0xe31,0xe31,0xed,0xe31,0xe31,0xed,
+0xed,0xed,0xed,0xed,0xe31,0xe31,0xe31,0xe31,0xe3d,0xe3d,0xe3d,0xe3d,0xed,0xe3d,0xe3d,0xe3d,
+0xed,0xe3d,0xe3d,0xe3d,0xe3d,0xe3d,0xe3d,0xe3d,0xe3d,0xe3d,0xe3d,0xe3d,0xe3d,0xe3d,0xe3d,0xe3d,
+0xe3d,0xe3d,0xe3d,0xe3d,0xe3d,0xe3d,0xe3d,0xe3d,0x1905,0x1905,0xed,0xed,0xe2e,0xe2e,0xe2e,0xed,
+0xed,0xed,0xed,0xe34,0xe37,0xe37,0xe37,0xe37,0xe37,0xe37,0xe37,0xe37,0x1902,0xed,0xed,0xed,
+0xed,0xed,0xed,0xed,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe40,0xe40,0xe37,0xed,0xed,0xed,
+0xed,0xed,0xed,0xed,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0x1161,0x1161,
+0xf0,0xf0,0xf0,0xf0,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4f,0xe4f,0xe4f,0xe4c,0xe4c,0xe4f,0xe4c,
+0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xf0,0xf0,0xf0,0xf0,0xf0,0xf0,
+0xe49,0xe49,0xe49,0xe49,0xe49,0xe49,0xe49,0xe49,0xe49,0xe49,0x115e,0xf0,0xf0,0xf0,0xe46,0xe46,
+0xe55,0xe55,0xe55,0xe55,0xf3,0xf3,0xf3,0xf3,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,
+0xe52,0xe55,0xe55,0xe55,0xe55,0xe55,0xf3,0xf3,0xf3,0xf3,0xf3,0xf3,0xf3,0xf3,0xf3,0xf3,
+0x14fd,0x1503,0x1500,0x1845,0x17a0,0x1869,0x1869,0x1869,0x1869,0x1869,0x190b,0x1908,0x190e,0x1908,0x190e,0x19cb,
+0x1a67,0x1a67,0x1a67,0x1b2a,0x1b2a,0x1b24,0x1b21,0x1b24,0x1b21,0x1b24,0x1b21,0x1b24,0x1b21,0x1b27,0xf6,0xf6,
0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,
0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,0xf6,
-0xe67,0xe67,0xe67,0xe64,0xe64,0xe5b,0xe5b,0xe64,0xe61,0xe61,0xe61,0xe61,0x1a58,0xf9,0xf9,0xf9,
-0x12ba,0x12ba,0x12ba,0x12bd,0x12bd,0x12bd,0x12b4,0x12b4,0x12b7,0x12b4,0x14d,0x14d,0x14d,0x14d,0x14d,0x14d,
-0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0x13e3,0x13e3,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0xe6d,
-0x1323,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0x1320,
-0xc30,0xc30,0xc30,0xc30,0xc30,0xc30,0xc30,0xc30,0xc30,0xc30,0xc30,0xc30,0xc30,0xc30,0xc30,0xc30,
-0xe9a,0xe8b,0xe85,0xe97,0xe94,0xe8e,0xe8e,0xe9d,0xe88,0xe91,0xff,0xff,0xff,0xff,0xff,0xff,
-0xf21,0xf21,0xf0c,0xf21,0xf24,0xf27,0xf27,0xf27,0xf27,0xf27,0xf27,0xf27,0x1b1b,0x105,0x105,0x105,
-0xf1b,0xf1b,0xf1b,0xf1b,0xf1b,0xf1b,0xf1b,0xf1b,0xf1b,0xf1b,0xf2d,0xf2d,0xf12,0xf18,0xf2d,0xf2d,
-0xf15,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf0f,0xf0f,0xf0f,0xf0f,0xf0f,
-0xf0f,0xf0f,0xf0f,0xf0f,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0xf12,0x1b1e,0x1b1e,0x105,
-0x1b27,0x1b21,0x19bf,0x19bc,0x19bf,0x19bf,0x19bf,0x1a5e,0x1a5b,0x1a5e,0x1a5b,0x108,0x108,0x108,0x108,0x108,
-0x1b27,0x1b21,0x108,0x1b21,0x108,0x1b21,0x1b27,0x1b21,0x1b27,0x1b21,0x108,0x108,0x108,0x108,0x108,0x108,
-0x108,0x108,0x108,0x108,0x108,0x108,0x108,0x108,0x108,0x108,0x108,0x108,0x108,0x108,0x1b24,0x1b24,
-0x1b24,0x1a5e,0x1a5b,0x14fa,0x13ec,0x13ec,0x1326,0x1026,0x1026,0x1026,0x1026,0x1026,0xf3c,0xf3c,0xf3c,0xf3c,
-0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,
-0xf39,0xf39,0xf3f,0xf3f,0x10b,0x10b,0x10b,0x10b,0x10b,0x10b,0x10b,0x10b,0xf48,0xf48,0xf48,0xf48,
-0xf48,0xf48,0xf48,0xf48,0xf48,0xf48,0xf48,0xf48,0xf48,0xf48,0xf48,0xf48,0xf48,0xf48,0xf48,0xf48,
-0xf48,0xf48,0xf42,0xf42,0xf42,0xf42,0x1155,0x1155,0x10e,0x10e,0x10e,0xf45,0x1500,0x1500,0x1500,0x1500,
-0x1500,0x1500,0x1500,0x1500,0x1500,0x1500,0x1500,0x1500,0x1500,0x1500,0x1500,0x1500,0x1500,0x1500,0x1500,0x1500,
-0x1500,0x1500,0x1500,0x1500,0x1500,0x16e0,0x111,0x111,0x111,0x111,0x111,0x111,0x111,0x111,0x111,0x111,
+0xe79,0xe79,0xe79,0xe76,0xe76,0xe6d,0xe6d,0xe76,0xe73,0xe73,0xe73,0xe73,0x1a6a,0xf9,0xf9,0xf9,
+0x12cc,0x12cc,0x12cc,0x12cf,0x12cf,0x12cf,0x12c6,0x12c6,0x12c9,0x12c6,0x14d,0x14d,0x14d,0x14d,0x14d,0x14d,
+0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0x13f5,0x13f5,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0xe7f,
+0x1335,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0xfc,0x1332,
+0xc42,0xc42,0xc42,0xc42,0xc42,0xc42,0xc42,0xc42,0xc42,0xc42,0xc42,0xc42,0xc42,0xc42,0xc42,0xc42,
+0xeac,0xe9d,0xe97,0xea9,0xea6,0xea0,0xea0,0xeaf,0xe9a,0xea3,0xff,0xff,0xff,0xff,0xff,0xff,
+0xf33,0xf33,0xf1e,0xf33,0xf36,0xf39,0xf39,0xf39,0xf39,0xf39,0xf39,0xf39,0x1b2d,0x105,0x105,0x105,
+0xf2d,0xf2d,0xf2d,0xf2d,0xf2d,0xf2d,0xf2d,0xf2d,0xf2d,0xf2d,0xf3f,0xf3f,0xf24,0xf2a,0xf3f,0xf3f,
+0xf27,0xf24,0xf24,0xf24,0xf24,0xf24,0xf24,0xf24,0xf24,0xf24,0xf24,0xf21,0xf21,0xf21,0xf21,0xf21,
+0xf21,0xf21,0xf21,0xf21,0xf24,0xf24,0xf24,0xf24,0xf24,0xf24,0xf24,0xf24,0xf24,0x1b30,0x1b30,0x105,
+0x1b39,0x1b33,0x19d1,0x19ce,0x19d1,0x19d1,0x19d1,0x1a70,0x1a6d,0x1a70,0x1a6d,0x108,0x108,0x108,0x108,0x108,
+0x1b39,0x1b33,0x108,0x1b33,0x108,0x1b33,0x1b39,0x1b33,0x1b39,0x1b33,0x108,0x108,0x108,0x108,0x108,0x108,
+0x108,0x108,0x108,0x108,0x108,0x108,0x108,0x108,0x108,0x108,0x108,0x108,0x108,0x108,0x1b36,0x1b36,
+0x1b36,0x1a70,0x1a6d,0x150c,0x13fe,0x13fe,0x1338,0x103b,0x103b,0x103b,0x103b,0x103b,0xf4e,0xf4e,0xf4e,0xf4e,
+0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,
+0xf4b,0xf4b,0xf51,0xf51,0x10b,0x10b,0x10b,0x10b,0x10b,0x10b,0x10b,0x10b,0xf5a,0xf5a,0xf5a,0xf5a,
+0xf5a,0xf5a,0xf5a,0xf5a,0xf5a,0xf5a,0xf5a,0xf5a,0xf5a,0xf5a,0xf5a,0xf5a,0xf5a,0xf5a,0xf5a,0xf5a,
+0xf5a,0xf5a,0xf54,0xf54,0xf54,0xf54,0x116a,0x116a,0x10e,0x10e,0x10e,0xf57,0x1512,0x1512,0x1512,0x1512,
+0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,
+0x1512,0x1512,0x1512,0x1512,0x1512,0x16f2,0x111,0x111,0x111,0x111,0x111,0x111,0x111,0x111,0x111,0x111,
0x111,0x111,0x111,0x111,0x111,0x111,0x111,0x111,0x111,0x111,0x111,0x111,0x111,0x111,0x111,0x111,
-0x111,0x111,0x111,0x111,0x111,0x111,0x111,0x111,0xf51,0xf51,0xf51,0x1506,0x1506,0x1506,0x1506,0x1506,
-0x1506,0x1506,0x1506,0x1506,0x1506,0x1506,0x1506,0x114,0xf4e,0xf4e,0xf4e,0xf4e,0x1503,0x114,0x114,0x114,
-0x114,0x114,0x114,0x114,0x114,0x114,0x114,0x114,0xf54,0xf54,0xf54,0xf54,0xf54,0xf54,0xf54,0xf54,
-0xf54,0xf54,0xf54,0xf54,0xf54,0xf54,0xf54,0xf54,0xf54,0xf54,0x190b,0x190b,0x190b,0x190b,0x190b,0x190b,
-0x190b,0x117,0x117,0x117,0x117,0x117,0x117,0x117,0x104d,0x104d,0x104d,0x104d,0x104a,0x104a,0x104a,0x104a,
-0x104a,0x104a,0x104a,0x104a,0x103b,0x103b,0x103b,0x103b,0x103b,0x103b,0x103b,0x103b,0x104a,0x104a,0x1041,0x103e,
-0x11a,0x11a,0x11a,0x1050,0x1050,0x1044,0x1044,0x1044,0x1047,0x1047,0x1047,0x1047,0x1047,0x1047,0x1047,0x1047,
-0x1047,0x1047,0x11a,0x11a,0x11a,0x104d,0x104d,0x104d,0x1053,0x1053,0x1053,0x1053,0x1053,0x1053,0x1053,0x1053,
-0x1053,0x1053,0x1056,0x1056,0x1056,0x1056,0x1056,0x1056,0x1068,0x1068,0x1068,0x1068,0x1068,0x1068,0x1068,0x1068,
-0x1068,0x1068,0x106b,0x106b,0x11d,0x11d,0x11d,0x11d,0x11d,0x11d,0x11d,0x11d,0x11d,0x11d,0x11d,0x11d,
-0x11d,0x11d,0x11d,0x11d,0x11d,0x11d,0x11d,0x11d,0x1092,0x1092,0x1092,0x1092,0x108c,0x1794,0x120,0x120,
-0x120,0x120,0x120,0x120,0x120,0x120,0x1098,0x1098,0x108f,0x108f,0x108f,0x108f,0x108f,0x108f,0x108f,0x108f,
-0x108f,0x108f,0x120,0x120,0x120,0x120,0x120,0x120,0x10b6,0x10b6,0x10b6,0x10b6,0x10b6,0x10b6,0x10b6,0x10aa,
-0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10b0,0x10b3,0x123,0x123,0x123,0x123,
-0x123,0x123,0x123,0x123,0x123,0x123,0x123,0x10ad,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,
-0x10c5,0x10b9,0x10b9,0x10b9,0x10b9,0x10b9,0x10b9,0x10c2,0x10c2,0x10b9,0x10b9,0x10c2,0x10c2,0x10b9,0x10b9,0x126,
-0x126,0x126,0x126,0x126,0x126,0x126,0x126,0x126,0x10c5,0x10c5,0x10c5,0x10b9,0x10c5,0x10c5,0x10c5,0x10c5,
-0x10c5,0x10c5,0x10c5,0x10c5,0x10b9,0x10c2,0x126,0x126,0x10bf,0x10bf,0x10bf,0x10bf,0x10bf,0x10bf,0x10bf,0x10bf,
-0x10bf,0x10bf,0x126,0x126,0x10bc,0x10c8,0x10c8,0x10c8,0x1512,0x129,0x129,0x129,0x129,0x129,0x129,0x129,
+0x111,0x111,0x111,0x111,0x111,0x111,0x111,0x111,0xf63,0xf63,0xf63,0x1518,0x1518,0x1518,0x1518,0x1518,
+0x1518,0x1518,0x1518,0x1518,0x1518,0x1518,0x1518,0x114,0xf60,0xf60,0xf60,0xf60,0x1515,0x114,0x114,0x114,
+0x114,0x114,0x114,0x114,0x114,0x114,0x114,0x114,0xf66,0xf66,0xf66,0xf66,0xf66,0xf66,0xf66,0xf66,
+0xf66,0xf66,0xf66,0xf66,0xf66,0xf66,0xf66,0xf66,0xf66,0xf66,0x191d,0x191d,0x191d,0x191d,0x191d,0x191d,
+0x191d,0x117,0x117,0x117,0x117,0x117,0x117,0x117,0x1062,0x1062,0x1062,0x1062,0x105f,0x105f,0x105f,0x105f,
+0x105f,0x105f,0x105f,0x105f,0x1050,0x1050,0x1050,0x1050,0x1050,0x1050,0x1050,0x1050,0x105f,0x105f,0x1056,0x1053,
+0x11a,0x11a,0x11a,0x1065,0x1065,0x1059,0x1059,0x1059,0x105c,0x105c,0x105c,0x105c,0x105c,0x105c,0x105c,0x105c,
+0x105c,0x105c,0x11a,0x11a,0x11a,0x1062,0x1062,0x1062,0x1068,0x1068,0x1068,0x1068,0x1068,0x1068,0x1068,0x1068,
+0x1068,0x1068,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x107d,0x107d,0x107d,0x107d,0x107d,0x107d,0x107d,0x107d,
+0x107d,0x107d,0x1080,0x1080,0x11d,0x11d,0x11d,0x11d,0x11d,0x11d,0x11d,0x11d,0x11d,0x11d,0x11d,0x11d,
+0x11d,0x11d,0x11d,0x11d,0x11d,0x11d,0x11d,0x11d,0x10a7,0x10a7,0x10a7,0x10a7,0x10a1,0x17a6,0x120,0x120,
+0x120,0x120,0x120,0x120,0x120,0x120,0x10ad,0x10ad,0x10a4,0x10a4,0x10a4,0x10a4,0x10a4,0x10a4,0x10a4,0x10a4,
+0x10a4,0x10a4,0x120,0x120,0x120,0x120,0x120,0x120,0x10cb,0x10cb,0x10cb,0x10cb,0x10cb,0x10cb,0x10cb,0x10bf,
+0x10bf,0x10bf,0x10bf,0x10bf,0x10bf,0x10bf,0x10bf,0x10bf,0x10bf,0x10bf,0x10c5,0x10c8,0x123,0x123,0x123,0x123,
+0x123,0x123,0x123,0x123,0x123,0x123,0x123,0x10c2,0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,
+0x10da,0x10ce,0x10ce,0x10ce,0x10ce,0x10ce,0x10ce,0x10d7,0x10d7,0x10ce,0x10ce,0x10d7,0x10d7,0x10ce,0x10ce,0x126,
+0x126,0x126,0x126,0x126,0x126,0x126,0x126,0x126,0x10da,0x10da,0x10da,0x10ce,0x10da,0x10da,0x10da,0x10da,
+0x10da,0x10da,0x10da,0x10da,0x10ce,0x10d7,0x126,0x126,0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,
+0x10d4,0x10d4,0x126,0x126,0x10d1,0x10dd,0x10dd,0x10dd,0x1524,0x129,0x129,0x129,0x129,0x129,0x129,0x129,
0x129,0x129,0x129,0x129,0x129,0x129,0x129,0x129,0x129,0x129,0x129,0x129,0x129,0x129,0x129,0x129,
-0x129,0x129,0x129,0x129,0x129,0x129,0x129,0x129,0x10ce,0x10ce,0x10ce,0x10ce,0x10ce,0x10ce,0x10ce,0x10ce,
-0x10ce,0x10ce,0x10ce,0x10ce,0x10ce,0x10ce,0x10ce,0x10ce,0x10ce,0x10ce,0x10ce,0x10ce,0x10ce,0x10ce,0x10ce,0x10ce,
-0x10ce,0x10ce,0x10ce,0x10ce,0x10ce,0x10d1,0x12c,0x12c,0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,
-0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,
-0x10d4,0x10d4,0x10d4,0x10d4,0x10d4,0x12f,0x12f,0x12f,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,
-0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x132,0x132,0x132,0x132,0x132,0x132,0x132,
-0x132,0x132,0x132,0x132,0x132,0x132,0x132,0x132,0x10dd,0x10dd,0x10dd,0x10dd,0x10dd,0x10dd,0x10dd,0x10dd,
-0x10dd,0x10dd,0x10dd,0x10dd,0x10dd,0x10dd,0x10dd,0x10dd,0x10dd,0x10dd,0x10dd,0x10dd,0x10dd,0x10dd,0x10dd,0x10dd,
-0x10dd,0x10dd,0x135,0x135,0x135,0x135,0x135,0x10da,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,
-0x10e0,0x10e0,0x10e0,0x10e0,0x138,0x138,0x138,0x138,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,
-0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x13b,0x13b,0x13b,0x13b,
-0x13b,0x13b,0x13b,0x13b,0x13b,0x13b,0x13b,0x13b,0x115b,0x115b,0x115b,0x115b,0x1164,0x115b,0x115b,0x115b,
-0x1164,0x115b,0x115b,0x115b,0x115b,0x1158,0x13e,0x13e,0x1161,0x1161,0x1161,0x1161,0x1161,0x1161,0x1161,0x1167,
-0x1161,0x1167,0x1161,0x1161,0x1161,0x1167,0x1167,0x13e,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,
-0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x141,0x141,
-0x141,0x141,0x141,0x141,0x141,0x141,0x141,0x141,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,
-0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1182,0x116d,0x1182,
-0x116d,0x116d,0x116d,0x116d,0x116d,0x116d,0x116d,0x144,0x1176,0x117f,0x116d,0x117f,0x117f,0x116d,0x116d,0x116d,
-0x116d,0x116d,0x116d,0x116d,0x116d,0x1182,0x1182,0x1182,0x1182,0x1182,0x1182,0x116d,0x116d,0x1173,0x1173,0x1173,
-0x1173,0x1173,0x1173,0x1173,0x1173,0x144,0x144,0x1170,0x117c,0x117c,0x117c,0x117c,0x117c,0x117c,0x117c,0x117c,
-0x117c,0x117c,0x144,0x144,0x144,0x144,0x144,0x144,0x117c,0x117c,0x117c,0x117c,0x117c,0x117c,0x117c,0x117c,
-0x117c,0x117c,0x144,0x144,0x144,0x144,0x144,0x144,0x1179,0x1179,0x1179,0x1179,0x1179,0x1179,0x1179,0x1188,
-0x118b,0x118b,0x118b,0x118b,0x1179,0x1179,0x144,0x144,0x1551,0x1551,0x1551,0x1551,0x1551,0x1551,0x1551,0x1551,
-0x1551,0x1551,0x1551,0x1551,0x1551,0x1551,0x154e,0x1a73,0x12cf,0x12a8,0x12c6,0x12c6,0x12c6,0x12c6,0x12c6,0x12c6,
-0x12c6,0x12ae,0x12ab,0x12a2,0x12a2,0x12cc,0x12a2,0x12a2,0x12a2,0x12a2,0x12b1,0x148b,0x1491,0x148e,0x148e,0x18d2,
-0x16a7,0x16a7,0x1a40,0x147,0x147,0x147,0x147,0x147,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,
-0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x1197,0x1197,0x119a,0x11a3,0x119d,0x119d,0x119d,0x11a3,
-0x14a,0x14a,0x14a,0x14a,0x14a,0x14a,0x14a,0x14a,0x1293,0x1293,0x1293,0x1293,0x1293,0x1293,0x1293,0x1293,
-0x1293,0x1293,0x1293,0x1293,0x1293,0x1293,0x1293,0x1293,0x1293,0x1293,0x1293,0x1293,0x1293,0x1293,0x1293,0x1293,
-0x1293,0x1293,0x1293,0x1293,0x1293,0x150,0x150,0x150,0x11c1,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b8,
-0x11c7,0x11c7,0x11b5,0x11b5,0x11b5,0x11b5,0x153,0x12c3,0x11bb,0x11bb,0x11bb,0x11bb,0x11bb,0x11bb,0x11bb,0x11bb,
-0x11bb,0x11bb,0x153,0x153,0x153,0x153,0x11b5,0x11b5,0x11e5,0x11d9,0x11e5,0x156,0x156,0x156,0x156,0x156,
+0x129,0x129,0x129,0x129,0x129,0x129,0x129,0x129,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,
+0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,
+0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e6,0x12c,0x12c,0x10e9,0x10e9,0x10e9,0x10e9,0x10e9,0x10e9,0x10e9,0x10e9,
+0x10e9,0x10e9,0x10e9,0x10e9,0x10e9,0x10e9,0x10e9,0x10e9,0x10e9,0x10e9,0x10e9,0x10e9,0x10e9,0x10e9,0x10e9,0x10e9,
+0x10e9,0x10e9,0x10e9,0x10e9,0x10e9,0x12f,0x12f,0x12f,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,
+0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x132,0x132,0x132,0x132,0x132,0x132,0x132,
+0x132,0x132,0x132,0x132,0x132,0x132,0x132,0x132,0x10f2,0x10f2,0x10f2,0x10f2,0x10f2,0x10f2,0x10f2,0x10f2,
+0x10f2,0x10f2,0x10f2,0x10f2,0x10f2,0x10f2,0x10f2,0x10f2,0x10f2,0x10f2,0x10f2,0x10f2,0x10f2,0x10f2,0x10f2,0x10f2,
+0x10f2,0x10f2,0x135,0x135,0x135,0x135,0x135,0x10ef,0x10f5,0x10f5,0x10f5,0x10f5,0x10f5,0x10f5,0x10f5,0x10f5,
+0x10f5,0x10f5,0x10f5,0x10f5,0x138,0x138,0x138,0x138,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,
+0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x13b,0x13b,0x13b,0x13b,
+0x13b,0x13b,0x13b,0x13b,0x13b,0x13b,0x13b,0x13b,0x1170,0x1170,0x1170,0x1170,0x1179,0x1170,0x1170,0x1170,
+0x1179,0x1170,0x1170,0x1170,0x1170,0x116d,0x13e,0x13e,0x1176,0x1176,0x1176,0x1176,0x1176,0x1176,0x1176,0x117c,
+0x1176,0x117c,0x1176,0x1176,0x1176,0x117c,0x117c,0x13e,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,
+0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x141,0x141,
+0x141,0x141,0x141,0x141,0x141,0x141,0x141,0x141,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,
+0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x1197,0x1182,0x1197,
+0x1182,0x1182,0x1182,0x1182,0x1182,0x1182,0x1182,0x144,0x118b,0x1194,0x1182,0x1194,0x1194,0x1182,0x1182,0x1182,
+0x1182,0x1182,0x1182,0x1182,0x1182,0x1197,0x1197,0x1197,0x1197,0x1197,0x1197,0x1182,0x1182,0x1188,0x1188,0x1188,
+0x1188,0x1188,0x1188,0x1188,0x1188,0x144,0x144,0x1185,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,
+0x1191,0x1191,0x144,0x144,0x144,0x144,0x144,0x144,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,
+0x1191,0x1191,0x144,0x144,0x144,0x144,0x144,0x144,0x118e,0x118e,0x118e,0x118e,0x118e,0x118e,0x118e,0x119d,
+0x11a0,0x11a0,0x11a0,0x11a0,0x118e,0x118e,0x144,0x144,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,
+0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1560,0x1a85,0x12e1,0x12ba,0x12d8,0x12d8,0x12d8,0x12d8,0x12d8,0x12d8,
+0x12d8,0x12c0,0x12bd,0x12b4,0x12b4,0x12de,0x12b4,0x12b4,0x12b4,0x12b4,0x12c3,0x149d,0x14a3,0x14a0,0x14a0,0x18e4,
+0x16b9,0x16b9,0x1a52,0x147,0x147,0x147,0x147,0x147,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,
+0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11ac,0x11ac,0x11af,0x11b8,0x11b2,0x11b2,0x11b2,0x11b8,
+0x14a,0x14a,0x14a,0x14a,0x14a,0x14a,0x14a,0x14a,0x12a5,0x12a5,0x12a5,0x12a5,0x12a5,0x12a5,0x12a5,0x12a5,
+0x12a5,0x12a5,0x12a5,0x12a5,0x12a5,0x12a5,0x12a5,0x12a5,0x12a5,0x12a5,0x12a5,0x12a5,0x12a5,0x12a5,0x12a5,0x12a5,
+0x12a5,0x12a5,0x12a5,0x12a5,0x12a5,0x150,0x150,0x150,0x11d6,0x11ca,0x11ca,0x11ca,0x11ca,0x11ca,0x11ca,0x11cd,
+0x11dc,0x11dc,0x11ca,0x11ca,0x11ca,0x11ca,0x153,0x12d5,0x11d0,0x11d0,0x11d0,0x11d0,0x11d0,0x11d0,0x11d0,0x11d0,
+0x11d0,0x11d0,0x153,0x153,0x153,0x153,0x11ca,0x11ca,0x11fa,0x11ee,0x11fa,0x156,0x156,0x156,0x156,0x156,
0x156,0x156,0x156,0x156,0x156,0x156,0x156,0x156,0x156,0x156,0x156,0x156,0x156,0x156,0x156,0x156,
-0x156,0x156,0x156,0x11e2,0x11e2,0x11e8,0x11dc,0x11df,0x11fd,0x11fd,0x11fd,0x11f7,0x11f7,0x11ee,0x11f7,0x11f7,
-0x11ee,0x11f7,0x11f7,0x1200,0x11fa,0x11f1,0x159,0x159,0x11f4,0x11f4,0x11f4,0x11f4,0x11f4,0x11f4,0x11f4,0x11f4,
-0x11f4,0x11f4,0x159,0x159,0x159,0x159,0x159,0x159,0x1206,0x1206,0x1206,0x1206,0x1206,0x1206,0x1206,0x15c,
-0x15c,0x15c,0x15c,0x1203,0x1203,0x1203,0x1203,0x1203,0x1203,0x1203,0x1203,0x1203,0x1203,0x1203,0x1203,0x1203,
-0x1203,0x1203,0x1203,0x1203,0x1203,0x1203,0x1203,0x1203,0x1203,0x1203,0x1203,0x1203,0x1203,0x1203,0x1203,0x1203,
-0x15c,0x15c,0x15c,0x15c,0x120f,0x120f,0x120f,0x120f,0x120f,0x120f,0x120f,0x120f,0x120f,0x120f,0x120f,0x120f,
-0x120f,0x120f,0x120f,0x120f,0x120f,0x120f,0x120f,0x120f,0x120f,0x120f,0x15f,0x120c,0x1209,0x1209,0x1209,0x1209,
-0x1209,0x1209,0x1209,0x1209,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,
-0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x162,0x162,0x162,0x1218,0x121b,0x121b,
-0x121b,0x121b,0x121b,0x121b,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,
-0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x165,0x165,0x1221,0x1221,0x1221,0x1221,
-0x1221,0x1221,0x1221,0x1221,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,
-0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x168,0x168,0x168,0x168,0x168,0x1227,0x1227,0x1227,0x1227,
-0x1227,0x1227,0x1227,0x1227,0x1230,0x1230,0x1230,0x1230,0x1230,0x1230,0x1230,0x1230,0x1230,0x1230,0x1230,0x1230,
-0x1230,0x1230,0x1230,0x1230,0x1230,0x1230,0x1230,0x1230,0x1230,0x1230,0x1230,0x1230,0x1230,0x1230,0x1230,0x1230,
-0x1230,0x1230,0x1230,0x16e,0x124b,0x124b,0x1b2a,0x171,0x171,0x171,0x171,0x171,0x171,0x171,0x171,0x171,
-0x171,0x1914,0x171,0x171,0x146a,0x146a,0x146a,0x146a,0x146a,0x146a,0x146a,0x146a,0x146a,0x146a,0x146a,0x146a,
-0x146a,0x146a,0x146a,0x146a,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,
-0x1251,0x1251,0x1251,0x174,0x19e3,0x19e3,0x19e3,0x19e3,0x19e3,0x19e3,0x19e3,0x19e6,0x19e0,0x26a,0x26a,0x26a,
-0x26a,0x26a,0x26a,0x26a,0x1815,0x1815,0x1815,0x1815,0x1815,0x1815,0x1815,0x1815,0x1815,0x1815,0x1815,0x1815,
-0x1815,0x1a64,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,
+0x156,0x156,0x156,0x11f7,0x11f7,0x11fd,0x11f1,0x11f4,0x1212,0x1212,0x1212,0x120c,0x120c,0x1203,0x120c,0x120c,
+0x1203,0x120c,0x120c,0x1215,0x120f,0x1206,0x159,0x159,0x1209,0x1209,0x1209,0x1209,0x1209,0x1209,0x1209,0x1209,
+0x1209,0x1209,0x159,0x159,0x159,0x159,0x159,0x159,0x121b,0x121b,0x121b,0x121b,0x121b,0x121b,0x121b,0x15c,
+0x15c,0x15c,0x15c,0x1218,0x1218,0x1218,0x1218,0x1218,0x1218,0x1218,0x1218,0x1218,0x1218,0x1218,0x1218,0x1218,
+0x1218,0x1218,0x1218,0x1218,0x1218,0x1218,0x1218,0x1218,0x1218,0x1218,0x1218,0x1218,0x1218,0x1218,0x1218,0x1218,
+0x15c,0x15c,0x15c,0x15c,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,
+0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x1224,0x15f,0x1221,0x121e,0x121e,0x121e,0x121e,
+0x121e,0x121e,0x121e,0x121e,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,
+0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x162,0x162,0x162,0x122d,0x1230,0x1230,
+0x1230,0x1230,0x1230,0x1230,0x1239,0x1239,0x1239,0x1239,0x1239,0x1239,0x1239,0x1239,0x1239,0x1239,0x1239,0x1239,
+0x1239,0x1239,0x1239,0x1239,0x1239,0x1239,0x1239,0x1239,0x1239,0x1239,0x165,0x165,0x1236,0x1236,0x1236,0x1236,
+0x1236,0x1236,0x1236,0x1236,0x123f,0x123f,0x123f,0x123f,0x123f,0x123f,0x123f,0x123f,0x123f,0x123f,0x123f,0x123f,
+0x123f,0x123f,0x123f,0x123f,0x123f,0x123f,0x123f,0x168,0x168,0x168,0x168,0x168,0x123c,0x123c,0x123c,0x123c,
+0x123c,0x123c,0x123c,0x123c,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,
+0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,
+0x1245,0x1245,0x1245,0x16e,0x125d,0x125d,0x1b3c,0x171,0x171,0x171,0x171,0x171,0x171,0x171,0x171,0x171,
+0x171,0x1926,0x171,0x171,0x147c,0x147c,0x147c,0x147c,0x147c,0x147c,0x147c,0x147c,0x147c,0x147c,0x147c,0x147c,
+0x147c,0x147c,0x147c,0x147c,0x1827,0x1827,0x1827,0x1827,0x1827,0x1827,0x1827,0x1827,0x1827,0x1827,0x1827,0x1827,
+0x1827,0x1a76,0x174,0x174,0x174,0x174,0x174,0x174,0x174,0x174,0x174,0x174,0x174,0x174,0x174,0x174,
+0x174,0x174,0x174,0x174,0x174,0x174,0x174,0x174,0x174,0x174,0x174,0x174,0x174,0x174,0x174,0x174,
+0x174,0x174,0x174,0x174,0x174,0x174,0x1344,0x1344,0x1344,0x1344,0x1344,0x1344,0x1344,0x1344,0x1344,0x1344,
+0x1344,0x1344,0x1344,0x1344,0x1344,0x1344,0x1344,0x1344,0x1344,0x1344,0x1344,0x1344,0x1344,0x1344,0x1344,0x1344,
+0x12ae,0x13a7,0x13a7,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,
+0x12ab,0x12ab,0x12ab,0x12ab,0x12ab,0x12ab,0x12ab,0x12ab,0x12ab,0x12ab,0x12ab,0x12ab,0x12ab,0x12ab,0x12ab,0x12ab,
+0x12ab,0x12ab,0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,0x182a,0x177,0x177,0x177,0x177,
+0x12a8,0x12a8,0x12a8,0x12a8,0x12a8,0x12a8,0x12a8,0x12a8,0x12a8,0x177,0x177,0x177,0x177,0x177,0x177,0x177,
+0x13cb,0x13cb,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,
+0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,
0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,
-0x177,0x177,0x177,0x177,0x177,0x177,0x1332,0x1332,0x1332,0x1332,0x1332,0x1332,0x1332,0x1332,0x1332,0x1332,
-0x1332,0x1332,0x1332,0x1332,0x1332,0x1332,0x1332,0x1332,0x1332,0x1332,0x1332,0x1332,0x1332,0x1332,0x1332,0x1332,
-0x129c,0x1395,0x1395,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,
-0x1299,0x1299,0x1299,0x1299,0x1299,0x1299,0x1299,0x1299,0x1299,0x1299,0x1299,0x1299,0x1299,0x1299,0x1299,0x1299,
-0x1299,0x1299,0x1395,0x1395,0x1395,0x1395,0x1395,0x1395,0x1395,0x1395,0x1395,0x1818,0x17a,0x17a,0x17a,0x17a,
-0x1296,0x1296,0x1296,0x1296,0x1296,0x1296,0x1296,0x1296,0x1296,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,
-0x13b9,0x13b9,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,
-0x18b7,0x18b7,0x18b7,0x18b7,0x18b7,0x18b7,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,
-0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,
-0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x17a,0x133b,0x133b,0x133b,0x133b,0x133b,0x133b,0x133b,0x133b,
-0x133b,0x133b,0x133b,0x133b,0x133b,0x133b,0x133b,0x133b,0x133b,0x133b,0x133b,0x133b,0x133b,0x133b,0x133b,0x133b,
-0x133b,0x1335,0x1335,0x1335,0x17d,0x17d,0x1338,0x17d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x133e,0x1347,
-0x1341,0x1341,0x1347,0x1347,0x1347,0x1341,0x1347,0x1341,0x1341,0x1341,0x134a,0x134a,0x180,0x180,0x180,0x180,
-0x180,0x180,0x180,0x180,0x1344,0x1344,0x1344,0x1344,0x183,0x1350,0x1350,0x1350,0x1350,0x1350,0x1350,0x183,
-0x183,0x1350,0x1350,0x1350,0x1350,0x1350,0x1350,0x183,0x183,0x1350,0x1350,0x1350,0x1350,0x1350,0x1350,0x183,
-0x183,0x183,0x183,0x183,0x183,0x183,0x183,0x183,0x1350,0x1350,0x1350,0x1350,0x1350,0x1350,0x1350,0x183,
-0x1350,0x1350,0x1350,0x1350,0x1350,0x1350,0x1350,0x183,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,
-0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x1353,0x1353,0x1353,0x1353,0x1353,0x1353,0x1356,0x1368,
-0x1368,0x135c,0x135c,0x135c,0x135c,0x135c,0x186,0x186,0x186,0x186,0x1359,0x1359,0x1359,0x1359,0x1359,0x1359,
-0x1359,0x1359,0x1359,0x1359,0x1359,0x1359,0x1359,0x1359,0x1359,0x1359,0x135f,0x135f,0x135f,0x135f,0x135f,0x135f,
-0x135f,0x135f,0x135f,0x135f,0x1b30,0x1b33,0x1b33,0x1b2d,0x1b2d,0x1b33,0x186,0x186,0x186,0x186,0x186,0x186,
-0x186,0x186,0x186,0x1521,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,
-0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x189,0x189,0x189,
-0x189,0x189,0x189,0x189,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,
-0x136e,0x136e,0x136e,0x18c,0x18c,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,
-0x136e,0x136e,0x136e,0x1524,0x18c,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,
-0x136e,0x136e,0x136e,0x139e,0x18c,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,0x136e,
-0x136e,0x136e,0x136e,0x136e,0x1524,0x1524,0x1524,0x1524,0x1524,0x1524,0x1524,0x1524,0x1524,0x1524,0x1524,0x1524,
-0x1524,0x1524,0x1524,0x1524,0x1524,0x1524,0x1524,0x1524,0x1524,0x1524,0x18c,0x18c,0x18c,0x18c,0x18c,0x18c,
-0x18c,0x18c,0x18c,0x18c,0x13b3,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x1530,0x1530,0x1530,0x1530,0x1530,0x1530,
-0x169e,0x1530,0x1530,0x1530,0x1770,0x1821,0x1821,0x185a,0x185a,0x1a22,0x1acd,0x1acd,0x18f,0x18f,0x18f,0x18f,
-0x18f,0x1b9c,0x1b9c,0x1b9c,0x1530,0x1530,0x1530,0x1530,0x1530,0x1530,0x1530,0x1530,0x1530,0x1530,0x1530,0x169b,
-0x169b,0x18f,0x18f,0x18f,0x1530,0x1530,0x1530,0x1530,0x1821,0x1821,0x1821,0x18bd,0x18bd,0x199e,0x1a22,0x1acd,
-0x1acd,0x18f,0x18f,0x18f,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,
-0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,
-0x192,0x192,0x192,0x192,0x140d,0x140d,0x140d,0x140d,0x195,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,
-0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,
-0x140d,0x140d,0x140d,0x140d,0x195,0x140d,0x140d,0x195,0x140d,0x195,0x195,0x140d,0x195,0x140d,0x140d,0x140d,
-0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x195,0x140d,0x140d,0x140d,0x140d,0x195,0x140d,0x195,0x140d,
-0x195,0x195,0x195,0x195,0x195,0x195,0x140d,0x195,0x195,0x195,0x195,0x140d,0x195,0x140d,0x195,0x140d,
-0x195,0x140d,0x140d,0x140d,0x195,0x140d,0x140d,0x195,0x140d,0x195,0x195,0x140d,0x195,0x140d,0x195,0x140d,
-0x195,0x140d,0x195,0x140d,0x195,0x140d,0x140d,0x195,0x140d,0x195,0x195,0x140d,0x140d,0x140d,0x140d,0x195,
-0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x195,0x140d,0x140d,0x140d,0x140d,0x195,0x140d,0x140d,0x140d,
-0x140d,0x195,0x140d,0x195,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x195,0x140d,
-0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,
-0x195,0x195,0x195,0x195,0x195,0x140d,0x140d,0x140d,0x195,0x140d,0x140d,0x140d,0x140d,0x140d,0x195,0x140d,
-0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,0x140d,
-0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,
-0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,
-0x140a,0x140a,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,
-0x1422,0x1422,0x1422,0x1422,0x1422,0x1422,0x1422,0x1410,0x1410,0x1410,0x1410,0x1410,0x141f,0x1410,0x1413,0x1413,
-0x1410,0x1410,0x1410,0x1416,0x1416,0x198,0x141c,0x141c,0x141c,0x141c,0x141c,0x141c,0x141c,0x141c,0x141c,0x141c,
-0x1419,0x1425,0x1425,0x1425,0x1920,0x191d,0x191d,0x1a6a,0x198,0x198,0x198,0x198,0x198,0x198,0x198,0x198,
-0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,
-0x1431,0x1431,0x1431,0x1431,0x1431,0x1431,0x1431,0x1431,0x1431,0x1431,0x1431,0x142e,0x1428,0x1428,0x142e,0x142e,
-0x1437,0x1437,0x1431,0x1434,0x1434,0x142e,0x142b,0x19b,0x19b,0x19b,0x19b,0x19b,0x19b,0x19b,0x19b,0x19b,
-0x143a,0x143a,0x143a,0x143a,0x143a,0x143a,0x143a,0x143a,0x143a,0x143a,0x143a,0x143a,0x143a,0x143a,0x143a,0x143a,
-0x143a,0x143a,0x143a,0x143a,0x143a,0x143a,0x143a,0x143a,0x19e,0x19e,0x19e,0x19e,0x16f5,0x16f5,0x143a,0x143a,
-0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,
-0x19e,0x19e,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,
-0x1446,0x1446,0x1446,0x1446,0x1446,0x19cb,0x19cb,0x19cb,0x19cb,0x19cb,0x19cb,0x1a1,0x1a1,0x1a1,0x1a1,0x19c5,
-0x1446,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,
-0x19c8,0x19c8,0x19c8,0x19c8,0x19c8,0x19c8,0x19c8,0x19c8,0x1a1,0x1a1,0x1a1,0x1a1,0x1a1,0x1a1,0x1a1,0x1440,
-0x1440,0x1440,0x1440,0x1449,0x1449,0x1449,0x1449,0x1449,0x1449,0x1449,0x1449,0x1449,0x1449,0x1449,0x1449,0x1449,
-0x146a,0x146a,0x146a,0x146a,0x146a,0x146a,0x146a,0x146a,0x146a,0x1a4,0x1a4,0x1a4,0x1a4,0x1a4,0x1a4,0x1a4,
-0x1467,0x1467,0x1467,0x1467,0x1467,0x1467,0x1467,0x1467,0x1467,0x1467,0x1a4,0x1a4,0x1a4,0x1a4,0x1a4,0x1a4,
-0x146d,0x146d,0x146d,0x146d,0x146d,0x146d,0x146d,0x146d,0x1a7,0x1a7,0x1a7,0x1a7,0x1a7,0x1a7,0x1a7,0x1a7,
-0x12c9,0x12c6,0x12c9,0x12a5,0x12c6,0x12cc,0x12cc,0x12cf,0x12cc,0x12cf,0x12d2,0x12c6,0x12cf,0x12cf,0x12c6,0x12c6,
-0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x1470,0x1479,0x1470,0x1479,0x1479,
-0x1470,0x1470,0x1470,0x1470,0x1470,0x1470,0x147c,0x1473,0x19ce,0x1b3f,0x1aa,0x1aa,0x1aa,0x1aa,0x1aa,0x1aa,
-0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x1ad,0x1ad,
-0x153f,0x153f,0x153f,0x153f,0x153f,0x1545,0x1ad,0x1ad,0x1ad,0x1ad,0x1ad,0x1ad,0x1ad,0x1ad,0x1ad,0x1ad,
-0x154b,0x154b,0x154b,0x154b,0x1b0,0x1b0,0x1b0,0x1b0,0x1b0,0x1b0,0x1b0,0x1b0,0x1b0,0x1b0,0x1b0,0x1548,
-0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x2b5,0x1b8a,0x1b8a,0x1b8a,0x1b8a,
-0x16aa,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,
-0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x1b3,0x1b3,0x1b3,0x1b3,
-0x1a73,0x1b45,0x1b45,0x1b45,0x1b45,0x1b45,0x1b45,0x1b45,0x1b45,0x1b45,0x1b45,0x1b45,0x1b42,0x1b42,0x1b42,0x1b6,
-0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,
-0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,
-0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x1b9,0x1b9,0x1b9,0x1b9,0x1b9,
-0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x1b9,0x1b9,0x1b9,
-0x1b9,0x1b9,0x1b9,0x1b9,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x1b9,0x1b9,
-0x155a,0x1554,0x1557,0x1560,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1bc,0x1bc,0x1bc,0x1bc,
-0x1bc,0x1bc,0x1bc,0x1bc,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,
-0x154b,0x154b,0x154b,0x154b,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,
-0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1923,0x1923,0x1923,0x1923,0x1bf,0x1bf,0x1bf,
-0x1bf,0x1bf,0x1bf,0x1bf,0x1a25,0x1a25,0x1a25,0x1a25,0x1a25,0x1a25,0x1a25,0x1a25,0x1a25,0x1a25,0x1a25,0x1a25,
-0x1bf,0x1bf,0x1bf,0x1bf,0x1b9f,0x1bf,0x1bf,0x1bf,0x1bf,0x1bf,0x1bf,0x1bf,0x1bf,0x1bf,0x1bf,0x1bf,
-0x1bf,0x1bf,0x1bf,0x1bf,0x170a,0x16ad,0x156f,0x16b3,0x1c2,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,
-0x1578,0x1c2,0x1c2,0x1578,0x1578,0x1c2,0x1c2,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,
-0x1578,0x1578,0x1578,0x1578,0x1578,0x1c2,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1c2,0x1578,0x1578,
-0x1c2,0x1578,0x1578,0x1578,0x1578,0x1578,0x1c2,0x19aa,0x16b0,0x1578,0x1569,0x156f,0x1569,0x156f,0x156f,0x156f,
-0x156f,0x1c2,0x1c2,0x156f,0x156f,0x1c2,0x1c2,0x1572,0x1572,0x1575,0x1c2,0x1c2,0x170d,0x1c2,0x1c2,0x1c2,
-0x1c2,0x1c2,0x1c2,0x1569,0x1c2,0x1c2,0x1c2,0x1c2,0x1c2,0x157b,0x1578,0x1578,0x1578,0x1578,0x156f,0x156f,
-0x1c2,0x1c2,0x156c,0x156c,0x156c,0x156c,0x156c,0x156c,0x156c,0x1c2,0x1c2,0x1c2,0x156c,0x156c,0x156c,0x156c,
-0x156c,0x1c2,0x1c2,0x1c2,0x1c2,0x1c2,0x1c2,0x1c2,0x1c2,0x1c2,0x1c2,0x1c2,0x1590,0x1590,0x1590,0x1590,
-0x1590,0x1590,0x1590,0x1590,0x1590,0x1590,0x1590,0x1590,0x1590,0x1590,0x1590,0x1590,0x1590,0x1590,0x1c5,0x1590,
-0x1590,0x1590,0x1590,0x1590,0x1590,0x1590,0x1590,0x1590,0x1590,0x1590,0x1590,0x1590,0x158a,0x158a,0x158a,0x157e,
-0x157e,0x157e,0x158a,0x158a,0x157e,0x158d,0x1581,0x157e,0x1593,0x1593,0x1587,0x1593,0x1593,0x1584,0x17a3,0x1c5,
-0x15a2,0x15a2,0x15a2,0x1596,0x1596,0x1596,0x1596,0x1596,0x1596,0x1599,0x159c,0x1c8,0x1c8,0x1c8,0x1c8,0x1c8,
-0x159f,0x159f,0x159f,0x159f,0x159f,0x159f,0x159f,0x159f,0x159f,0x159f,0x1c8,0x1c8,0x1c8,0x1c8,0x1c8,0x1c8,
-0x1710,0x1710,0x1710,0x1710,0x15ae,0x15ab,0x19d1,0x19d1,0x1a79,0x1a7c,0x1a76,0x1a76,0x1cb,0x1cb,0x1cb,0x1cb,
-0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,
-0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,
-0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,
-0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,
-0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,
-0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,
-0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,
-0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,
-0x15c0,0x15c0,0x15c0,0x15b7,0x15ba,0x15bd,0x15c0,0x1d1,0x1d1,0x1d1,0x1d1,0x1d1,0x1d1,0x1d1,0x1d1,0x1d1,
-0x15cf,0x15cf,0x15cf,0x15cf,0x15cf,0x15c3,0x15c3,0x1d4,0x1d4,0x1d4,0x1d4,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,
-0x15cc,0x15cc,0x16b6,0x15cc,0x15cc,0x15cc,0x15c9,0x1d4,0x1d4,0x1d4,0x1d4,0x1d4,0x1d4,0x1d4,0x1d4,0x1d4,
-0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,0x1d7,0x1d7,0x15d5,0x15d5,0x15d5,0x15d5,0x15d5,0x15d5,0x15d5,0x15d5,0x15d5,
-0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x1d7,0x1d7,0x1d7,0x1d7,0x1d7,0x1d7,0x1d7,0x1d7,0x1d7,
-0x15db,0x15ed,0x15ed,0x15e1,0x15ea,0x1da,0x1da,0x1da,0x1da,0x1da,0x1da,0x1da,0x1da,0x1da,0x1da,0x1da,
-0x15e4,0x15e4,0x15e4,0x15e4,0x15e4,0x15e4,0x15e4,0x15e4,0x15e4,0x15e4,0x1da,0x1da,0x1da,0x1da,0x1da,0x1da,
-0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,
-0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x15f3,0x1dd,
-0x15f0,0x15f0,0x15f0,0x15f0,0x15f0,0x15f0,0x15f0,0x15f0,0x15f0,0x15f0,0x1dd,0x1dd,0x1dd,0x1dd,0x15f6,0x15f6,
-0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,
-0x15ff,0x15ff,0x15ff,0x15ff,0x15ff,0x15f9,0x1602,0x15ff,0x15ff,0x15ff,0x15ff,0x15ff,0x15ff,0x15ff,0x15ff,0x15ff,
-0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15ff,0x15ff,0x15ff,0x15ff,0x15ff,0x1e0,
-0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,
-0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,0x1608,0x1e3,
-0x1614,0x1614,0x1614,0x1614,0x1614,0x1614,0x1614,0x1614,0x1614,0x1614,0x1614,0x1614,0x1614,0x1614,0x1614,0x1614,
-0x1614,0x1614,0x1614,0x1614,0x1614,0x1614,0x1611,0x1611,0x1611,0x1611,0x1611,0x1e6,0x1e6,0x1e6,0x1e6,0x1e6,
-0x162c,0x162c,0x162f,0x162f,0x1632,0x1623,0x1e9,0x1e9,0x1e9,0x1e9,0x1e9,0x1e9,0x1e9,0x1e9,0x1e9,0x1e9,
-0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1e9,0x1623,0x1623,0x1623,0x1623,0x1623,
-0x1623,0x1623,0x1e9,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,
-0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x1e9,0x1e9,0x1e9,0x1e9,0x1e9,0x162c,0x162c,0x162c,
-0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,
-0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x1ec,0x1ec,0x1ec,0x1ec,0x1ec,0x1ec,0x1ec,
-0x1644,0x1644,0x1644,0x1644,0x1644,0x1644,0x1644,0x1644,0x1644,0x1644,0x1644,0x1644,0x1644,0x1644,0x1644,0x1644,
-0x1644,0x1644,0x1ef,0x1ef,0x1ef,0x1ef,0x1ef,0x1ef,0x1ef,0x1641,0x1641,0x1641,0x1641,0x1ef,0x1ef,0x1ef,
-0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x1647,
-0x1659,0x1659,0x1647,0x1647,0x1647,0x1647,0x1f5,0x1f5,0x1659,0x1659,0x165c,0x165c,0x1647,0x1647,0x1659,0x164d,
-0x164a,0x1650,0x1662,0x1662,0x1653,0x1653,0x1656,0x1656,0x1656,0x1662,0x1719,0x1719,0x1719,0x1719,0x1719,0x1719,
-0x1719,0x1719,0x1719,0x1719,0x1719,0x1719,0x1719,0x1719,0x1716,0x1716,0x1716,0x1716,0x1713,0x1713,0x1f5,0x1f5,
-0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,
-0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,
-0x1f8,0x1665,0x1665,0x1665,0x1665,0x1665,0x1665,0x1665,0x1665,0x1665,0x1665,0x1665,0x1665,0x1665,0x1665,0x1665,
-0x1665,0x1665,0x1665,0x1665,0x1665,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,
-0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1fb,0x1fb,0x1fb,0x1fb,
-0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,
-0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,
-0x1668,0x1668,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,
-0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,
-0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1fb,0x1fb,0x1a7f,0x1a7f,0x1fb,0x1fb,
-0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,
-0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,
-0x166b,0x167a,0x1671,0x166e,0x1680,0x1680,0x1674,0x1680,0x1fe,0x1fe,0x1fe,0x1fe,0x1fe,0x1fe,0x1fe,0x1fe,
-0x1677,0x1677,0x1677,0x1677,0x1677,0x1677,0x1677,0x1677,0x1677,0x1677,0x1fe,0x1fe,0x1fe,0x1fe,0x1fe,0x1fe,
-0x1686,0x1686,0x1686,0x1686,0x1686,0x1686,0x1686,0x1686,0x1686,0x1686,0x1683,0x1683,0x1683,0x1683,0x1683,0x1683,
-0x1683,0x1683,0x1683,0x201,0x201,0x201,0x201,0x201,0x201,0x201,0x201,0x201,0x201,0x201,0x201,0x168c,
-0x172e,0x172e,0x172e,0x172e,0x172e,0x172e,0x172e,0x172e,0x172e,0x172e,0x172e,0x172e,0x172e,0x172e,0x172e,0x172e,
-0x172e,0x172e,0x172e,0x172e,0x172e,0x172e,0x172e,0x172e,0x172e,0x172e,0x1926,0x204,0x204,0x171c,0x171c,0x171c,
-0x1728,0x1728,0x171c,0x171c,0x171c,0x171c,0x172b,0x171c,0x171c,0x171c,0x171c,0x171f,0x204,0x204,0x204,0x204,
-0x1725,0x1725,0x1725,0x1725,0x1725,0x1725,0x1725,0x1725,0x1725,0x1725,0x1722,0x1722,0x1731,0x1731,0x1731,0x1722,
-0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x207,0x207,0x207,0x207,0x207,0x207,0x207,0x207,0x207,
-0x207,0x207,0x207,0x207,0x207,0x207,0x207,0x207,0x207,0x207,0x207,0x207,0x207,0x207,0x207,0x207,
-0x207,0x207,0x207,0x207,0x207,0x207,0x207,0x207,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,
-0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x20d,0x1746,0x1746,0x20d,0x20d,
-0x20d,0x20d,0x20d,0x1743,0x1743,0x1743,0x1743,0x1743,0x1749,0x1749,0x1749,0x1749,0x1749,0x1749,0x1749,0x210,
-0x1749,0x210,0x1749,0x1749,0x1749,0x1749,0x210,0x1749,0x1749,0x1749,0x1749,0x1749,0x1749,0x1749,0x1749,0x1749,
-0x1749,0x1749,0x1749,0x1749,0x1749,0x1749,0x210,0x1749,0x1749,0x1749,0x1749,0x1749,0x1749,0x1749,0x1749,0x1749,
-0x1749,0x174c,0x210,0x210,0x210,0x210,0x210,0x210,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,
-0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,
-0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x213,0x213,0x213,0x213,0x213,
-0x213,0x213,0x213,0x213,0x213,0x213,0x213,0x213,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,
-0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x213,0x213,0x213,0x213,0x213,
-0x213,0x213,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x175b,0x175b,0x175b,0x175b,0x1758,0x175b,0x175b,0x175e,
-0x1761,0x175e,0x175e,0x175b,0x216,0x216,0x216,0x216,0x216,0x216,0x216,0x216,0x216,0x216,0x216,0x216,
-0x216,0x216,0x216,0x1758,0x1758,0x1758,0x1758,0x1758,0x17b8,0x17b8,0x17b8,0x17b8,0x17af,0x17af,0x17af,0x17a9,
-0x17ac,0x17ac,0x17ac,0x19d4,0x219,0x219,0x219,0x219,0x17b5,0x17b5,0x17b5,0x17b5,0x17b5,0x17b5,0x17b5,0x17b5,
-0x17b5,0x17b5,0x219,0x219,0x219,0x219,0x17b2,0x17b2,0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,
-0x17d3,0x21c,0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,
-0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,0x17d3,0x17d0,0x17be,0x17be,0x17be,0x17be,
-0x17be,0x17be,0x17be,0x21c,0x17be,0x17be,0x17be,0x17be,0x17be,0x17be,0x17d0,0x17c1,0x17d3,0x17d6,0x17d6,0x17ca,
-0x17c7,0x17c7,0x21c,0x21c,0x21c,0x21c,0x21c,0x21c,0x21c,0x21c,0x21c,0x21c,0x17cd,0x17cd,0x17cd,0x17cd,
-0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17c4,0x17c4,0x17c4,0x17c4,0x17c4,0x17c4,0x17c4,0x17c4,0x17c4,0x17c4,
-0x17c4,0x17c4,0x17c4,0x17c4,0x17c4,0x21c,0x21c,0x21c,0x17e2,0x17e5,0x17eb,0x17eb,0x17eb,0x17eb,0x17eb,0x17eb,
-0x17eb,0x17eb,0x17eb,0x17eb,0x17eb,0x17eb,0x17eb,0x17eb,0x17d9,0x17d9,0x17d9,0x17d9,0x17d9,0x17d9,0x17d9,0x17d9,
-0x17d9,0x21f,0x21f,0x21f,0x21f,0x21f,0x21f,0x21f,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,
-0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x17dc,0x17dc,0x17dc,0x17dc,0x17dc,0x17dc,0x17dc,0x222,
-0x17dc,0x17dc,0x17dc,0x17dc,0x17dc,0x17dc,0x17dc,0x17dc,0x17dc,0x17dc,0x17dc,0x17dc,0x17dc,0x17dc,0x17dc,0x17dc,
-0x17dc,0x222,0x222,0x17dc,0x17dc,0x17dc,0x17dc,0x17dc,0x182a,0x18c6,0x1a2e,0x1a31,0x1ad9,0x225,0x225,0x225,
-0x225,0x225,0x225,0x225,0x225,0x225,0x225,0x225,0x1ad6,0x1ad6,0x225,0x225,0x225,0x225,0x225,0x225,
-0x225,0x225,0x225,0x225,0x225,0x225,0x225,0x225,0x17eb,0x17eb,0x17eb,0x17eb,0x17eb,0x17eb,0x17eb,0x17eb,
-0x17eb,0x17eb,0x17eb,0x17eb,0x17eb,0x17eb,0x17eb,0x17eb,0x228,0x228,0x17df,0x17df,0x17df,0x17df,0x17df,0x17df,
-0x17df,0x17df,0x17df,0x17df,0x17df,0x17df,0x17df,0x17df,0x228,0x17e8,0x17df,0x17df,0x17df,0x17df,0x17df,0x17df,
-0x17df,0x17e8,0x17df,0x17df,0x17e8,0x17df,0x17df,0x228,0x228,0x228,0x228,0x228,0x228,0x228,0x228,0x228,
-0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x22b,0x22b,0x22b,
+0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x177,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,
+0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,
+0x134d,0x1347,0x1347,0x1347,0x17a,0x17a,0x134a,0x17a,0x135f,0x135f,0x135f,0x135f,0x135f,0x135f,0x1350,0x1359,
+0x1353,0x1353,0x1359,0x1359,0x1359,0x1353,0x1359,0x1353,0x1353,0x1353,0x135c,0x135c,0x17d,0x17d,0x17d,0x17d,
+0x17d,0x17d,0x17d,0x17d,0x1356,0x1356,0x1356,0x1356,0x180,0x1362,0x1362,0x1362,0x1362,0x1362,0x1362,0x180,
+0x180,0x1362,0x1362,0x1362,0x1362,0x1362,0x1362,0x180,0x180,0x1362,0x1362,0x1362,0x1362,0x1362,0x1362,0x180,
+0x180,0x180,0x180,0x180,0x180,0x180,0x180,0x180,0x1362,0x1362,0x1362,0x1362,0x1362,0x1362,0x1362,0x180,
+0x1362,0x1362,0x1362,0x1362,0x1362,0x1362,0x1362,0x180,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,
+0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x1368,0x137a,
+0x137a,0x136e,0x136e,0x136e,0x136e,0x136e,0x183,0x183,0x183,0x183,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,
+0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,
+0x1371,0x1371,0x1371,0x1371,0x1b42,0x1b45,0x1b45,0x1b3f,0x1b3f,0x1b45,0x183,0x183,0x183,0x183,0x183,0x183,
+0x183,0x183,0x183,0x1533,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,
+0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x186,0x186,0x186,
+0x186,0x186,0x186,0x186,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,
+0x1380,0x1380,0x1380,0x189,0x189,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,
+0x1380,0x1380,0x1380,0x1536,0x189,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,
+0x1380,0x1380,0x1380,0x13b0,0x189,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,0x1380,
+0x1380,0x1380,0x1380,0x1380,0x1536,0x1536,0x1536,0x1536,0x1536,0x1536,0x1536,0x1536,0x1536,0x1536,0x1536,0x1536,
+0x1536,0x1536,0x1536,0x1536,0x1536,0x1536,0x1536,0x1536,0x1536,0x1536,0x189,0x189,0x189,0x189,0x189,0x189,
+0x189,0x189,0x189,0x189,0x13c5,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,
+0x16b0,0x1542,0x1542,0x1542,0x1782,0x1833,0x1833,0x186c,0x186c,0x1a34,0x1adf,0x1adf,0x18c,0x18c,0x18c,0x18c,
+0x1c2c,0x1bae,0x1bae,0x1bae,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x16ad,
+0x16ad,0x18c,0x18c,0x18c,0x1542,0x1542,0x1542,0x1542,0x1833,0x1833,0x1833,0x18cf,0x18cf,0x19b0,0x1a34,0x1adf,
+0x1adf,0x18c,0x18c,0x18c,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,
+0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1bd2,0x1bd2,0x1bd2,0x18f,0x18f,0x18f,0x18f,0x1bd2,
+0x1bd2,0x1bd2,0x1bd2,0x1bd2,0x141f,0x141f,0x141f,0x141f,0x192,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,
+0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,
+0x141f,0x141f,0x141f,0x141f,0x192,0x141f,0x141f,0x192,0x141f,0x192,0x192,0x141f,0x192,0x141f,0x141f,0x141f,
+0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x192,0x141f,0x141f,0x141f,0x141f,0x192,0x141f,0x192,0x141f,
+0x192,0x192,0x192,0x192,0x192,0x192,0x141f,0x192,0x192,0x192,0x192,0x141f,0x192,0x141f,0x192,0x141f,
+0x192,0x141f,0x141f,0x141f,0x192,0x141f,0x141f,0x192,0x141f,0x192,0x192,0x141f,0x192,0x141f,0x192,0x141f,
+0x192,0x141f,0x192,0x141f,0x192,0x141f,0x141f,0x192,0x141f,0x192,0x192,0x141f,0x141f,0x141f,0x141f,0x192,
+0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x192,0x141f,0x141f,0x141f,0x141f,0x192,0x141f,0x141f,0x141f,
+0x141f,0x192,0x141f,0x192,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x192,0x141f,
+0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,
+0x192,0x192,0x192,0x192,0x192,0x141f,0x141f,0x141f,0x192,0x141f,0x141f,0x141f,0x141f,0x141f,0x192,0x141f,
+0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,0x141f,
+0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,
+0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,
+0x141c,0x141c,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,0x192,
+0x1434,0x1434,0x1434,0x1434,0x1434,0x1434,0x1434,0x1422,0x1422,0x1422,0x1422,0x1422,0x1431,0x1422,0x1425,0x1425,
+0x1422,0x1422,0x1422,0x1428,0x1428,0x195,0x142e,0x142e,0x142e,0x142e,0x142e,0x142e,0x142e,0x142e,0x142e,0x142e,
+0x142b,0x1437,0x1437,0x1437,0x1932,0x192f,0x192f,0x1a7c,0x195,0x195,0x195,0x195,0x195,0x195,0x195,0x195,
+0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,
+0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1440,0x143a,0x143a,0x1440,0x1440,
+0x1449,0x1449,0x1443,0x1446,0x1446,0x1440,0x143d,0x198,0x198,0x198,0x198,0x198,0x198,0x198,0x198,0x198,
+0x144c,0x144c,0x144c,0x144c,0x144c,0x144c,0x144c,0x144c,0x144c,0x144c,0x144c,0x144c,0x144c,0x144c,0x144c,0x144c,
+0x144c,0x144c,0x144c,0x144c,0x144c,0x144c,0x144c,0x144c,0x19b,0x19b,0x19b,0x19b,0x1707,0x1707,0x144c,0x144c,
+0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,
+0x19b,0x19b,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,
+0x1458,0x1458,0x1458,0x1458,0x1458,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19e,0x19e,0x19e,0x19e,0x19d7,
+0x1458,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,
+0x19da,0x19da,0x19da,0x19da,0x19da,0x19da,0x19da,0x19da,0x19e,0x19e,0x19e,0x19e,0x19e,0x19e,0x19e,0x1452,
+0x1452,0x1452,0x1452,0x145b,0x145b,0x145b,0x145b,0x145b,0x145b,0x145b,0x145b,0x145b,0x145b,0x145b,0x145b,0x145b,
+0x147c,0x147c,0x147c,0x147c,0x147c,0x147c,0x147c,0x147c,0x147c,0x1a1,0x1a1,0x1a1,0x1a1,0x1a1,0x1a1,0x1a1,
+0x1479,0x1479,0x1479,0x1479,0x1479,0x1479,0x1479,0x1479,0x1479,0x1479,0x1a1,0x1a1,0x1a1,0x1a1,0x1a1,0x1a1,
+0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x1a4,0x1a4,0x1a4,0x1a4,0x1a4,0x1a4,0x1a4,0x1a4,
+0x12db,0x12d8,0x12db,0x12b7,0x12d8,0x12de,0x12de,0x12e1,0x12de,0x12e1,0x12e4,0x12d8,0x12e1,0x12e1,0x12d8,0x12d8,
+0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1482,0x148b,0x1482,0x148b,0x148b,
+0x1482,0x1482,0x1482,0x1482,0x1482,0x1482,0x148e,0x1485,0x19e0,0x1b51,0x1a7,0x1a7,0x1a7,0x1a7,0x1a7,0x1a7,
+0x1554,0x1554,0x1554,0x1554,0x1554,0x1554,0x1554,0x1554,0x1554,0x1554,0x1554,0x1554,0x1554,0x1554,0x1aa,0x1aa,
+0x1551,0x1551,0x1551,0x1551,0x1551,0x1557,0x1aa,0x1aa,0x1aa,0x1aa,0x1aa,0x1aa,0x1aa,0x1aa,0x1aa,0x1aa,
+0x155d,0x155d,0x155d,0x155d,0x1ad,0x1ad,0x1ad,0x1ad,0x1ad,0x1ad,0x1ad,0x1ad,0x1ad,0x1ad,0x1ad,0x155a,
+0x1b9c,0x1b9c,0x1b9c,0x1b9c,0x1b9c,0x1b9c,0x1b9c,0x1b9c,0x1b9c,0x1b9c,0x1b9c,0x2b2,0x1b9c,0x1b9c,0x1b9c,0x1b9c,
+0x16bc,0x16b3,0x16b3,0x16b3,0x16b3,0x16b3,0x16b3,0x16b3,0x16b3,0x16b3,0x16b3,0x16b3,0x16b3,0x16b3,0x16b3,0x16b3,
+0x16b3,0x16b3,0x16b3,0x16b3,0x16b3,0x16b3,0x16b3,0x16b3,0x16b3,0x16b3,0x16b3,0x16b3,0x1b0,0x1b0,0x1b0,0x1b0,
+0x1a85,0x1b57,0x1b57,0x1b57,0x1b57,0x1b57,0x1b57,0x1b57,0x1b57,0x1b57,0x1b57,0x1b57,0x1b54,0x1b54,0x1b54,0x1b3,
+0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,
+0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,0x1b3,
+0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x1b6,0x1b6,0x1b6,0x1b6,0x1b6,
+0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x1b6,0x1b6,0x1b6,
+0x1b6,0x1b6,0x1b6,0x1b6,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x1b6,0x1b6,
+0x156c,0x1566,0x1569,0x1572,0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,0x1b9,0x1b9,0x1b9,0x1b9,
+0x1b9,0x1b9,0x1b9,0x1b9,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,
+0x155d,0x155d,0x155d,0x155d,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,
+0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1935,0x1935,0x1935,0x1935,0x1bd5,0x1bc,0x1bc,
+0x1bc,0x1bc,0x1bc,0x1bc,0x1a37,0x1a37,0x1a37,0x1a37,0x1a37,0x1a37,0x1a37,0x1a37,0x1a37,0x1a37,0x1a37,0x1a37,
+0x1bc,0x1bc,0x1bc,0x1bc,0x1bb1,0x1bc,0x1bc,0x1bc,0x1bc,0x1bc,0x1bc,0x1bc,0x1bc,0x1bc,0x1bc,0x1bc,
+0x1bc,0x1bc,0x1bc,0x1bc,0x171c,0x16bf,0x1581,0x16c5,0x1bf,0x158a,0x158a,0x158a,0x158a,0x158a,0x158a,0x158a,
+0x158a,0x1bf,0x1bf,0x158a,0x158a,0x1bf,0x1bf,0x158a,0x158a,0x158a,0x158a,0x158a,0x158a,0x158a,0x158a,0x158a,
+0x158a,0x158a,0x158a,0x158a,0x158a,0x1bf,0x158a,0x158a,0x158a,0x158a,0x158a,0x158a,0x158a,0x1bf,0x158a,0x158a,
+0x1bf,0x158a,0x158a,0x158a,0x158a,0x158a,0x1bf,0x19bc,0x16c2,0x158a,0x157b,0x1581,0x157b,0x1581,0x1581,0x1581,
+0x1581,0x1bf,0x1bf,0x1581,0x1581,0x1bf,0x1bf,0x1584,0x1584,0x1587,0x1bf,0x1bf,0x171f,0x1bf,0x1bf,0x1bf,
+0x1bf,0x1bf,0x1bf,0x157b,0x1bf,0x1bf,0x1bf,0x1bf,0x1bf,0x158d,0x158a,0x158a,0x158a,0x158a,0x1581,0x1581,
+0x1bf,0x1bf,0x157e,0x157e,0x157e,0x157e,0x157e,0x157e,0x157e,0x1bf,0x1bf,0x1bf,0x157e,0x157e,0x157e,0x157e,
+0x157e,0x1bf,0x1bf,0x1bf,0x1bf,0x1bf,0x1bf,0x1bf,0x1bf,0x1bf,0x1bf,0x1bf,0x15a2,0x15a2,0x15a2,0x15a2,
+0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x1c2,0x15a2,
+0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x15b4,0x15b4,0x15b4,0x15a8,
+0x15a8,0x15a8,0x15a8,0x15a8,0x15a8,0x15ab,0x15ae,0x1c5,0x1c5,0x1c5,0x1c5,0x1c5,0x15b1,0x15b1,0x15b1,0x15b1,
+0x15b1,0x15b1,0x15b1,0x15b1,0x15b1,0x15b1,0x1c5,0x1c5,0x1c5,0x1c5,0x1c5,0x1c5,0x1722,0x1722,0x1722,0x1722,
+0x15c0,0x15bd,0x19e3,0x19e3,0x1a8b,0x1a8e,0x1a88,0x1a88,0x1c8,0x1c8,0x1c8,0x1c8,0x174f,0x174f,0x174f,0x174f,
+0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x15c6,0x15c6,0x15c6,0x15c6,
+0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,
+0x15c6,0x15c6,0x15c6,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x15c6,0x15c6,0x15c6,0x15c6,
+0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,
+0x15c6,0x15c6,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x15c6,0x15c6,0x15c6,0x15c6,
+0x15c6,0x15c6,0x15c6,0x15c6,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,
+0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x1cb,0x15d2,0x15d2,0x15d2,0x15d2,
+0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15d2,0x15c9,
+0x15cc,0x15cf,0x15d2,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x1ce,0x15e1,0x15e1,0x15e1,0x15e1,
+0x15e1,0x15d5,0x15d5,0x1d1,0x1d1,0x1d1,0x1d1,0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,0x15de,0x15de,0x16c8,0x15de,
+0x15de,0x15de,0x15db,0x1d1,0x1d1,0x1d1,0x1d1,0x1d1,0x1d1,0x1d1,0x1d1,0x1d1,0x15ea,0x15ea,0x15ea,0x15ea,
+0x15ea,0x1d4,0x1d4,0x15e7,0x15e7,0x15e7,0x15e7,0x15e7,0x15e7,0x15e7,0x15e7,0x15e7,0x15e4,0x15e4,0x15e4,0x15e4,
+0x15e4,0x15e4,0x15e4,0x1d4,0x1d4,0x1d4,0x1d4,0x1d4,0x1d4,0x1d4,0x1d4,0x1d4,0x15ed,0x15ff,0x15ff,0x15f3,
+0x15fc,0x1d7,0x1d7,0x1d7,0x1d7,0x1d7,0x1d7,0x1d7,0x1d7,0x1d7,0x1d7,0x1d7,0x15f6,0x15f6,0x15f6,0x15f6,
+0x15f6,0x15f6,0x15f6,0x15f6,0x15f6,0x15f6,0x1d7,0x1d7,0x1d7,0x1d7,0x1d7,0x1d7,0x1605,0x1605,0x1605,0x1605,
+0x1605,0x1605,0x1605,0x1605,0x1605,0x1605,0x1605,0x1605,0x1605,0x1605,0x1605,0x1605,0x1605,0x1605,0x1605,0x1605,
+0x1605,0x1605,0x1605,0x1605,0x1605,0x1605,0x1605,0x1605,0x1605,0x1605,0x1605,0x1da,0x1602,0x1602,0x1602,0x1602,
+0x1602,0x1602,0x1602,0x1602,0x1602,0x1602,0x1da,0x1da,0x1da,0x1da,0x1608,0x1608,0x1b8d,0x1b8d,0x1b8d,0x1b8d,
+0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1611,0x1611,0x1611,0x1611,
+0x1611,0x160b,0x1614,0x1611,0x1611,0x1611,0x1611,0x1611,0x1611,0x1611,0x1611,0x1611,0x160e,0x160e,0x160e,0x160e,
+0x160e,0x160e,0x160e,0x160e,0x160e,0x160e,0x1611,0x1611,0x1611,0x1611,0x1611,0x1dd,0x161a,0x161a,0x161a,0x161a,
+0x161a,0x161a,0x161a,0x161a,0x161a,0x161a,0x161a,0x161a,0x161a,0x161a,0x161a,0x161a,0x161a,0x161a,0x161a,0x161a,
+0x161a,0x161a,0x161a,0x161a,0x161a,0x161a,0x161a,0x161a,0x161a,0x161a,0x161a,0x1e0,0x1626,0x1626,0x1626,0x1626,
+0x1626,0x1626,0x1626,0x1626,0x1626,0x1626,0x1626,0x1626,0x1626,0x1626,0x1626,0x1626,0x1626,0x1626,0x1626,0x1626,
+0x1626,0x1626,0x1623,0x1623,0x1623,0x1623,0x1623,0x1e3,0x1e3,0x1e3,0x1e3,0x1e3,0x163e,0x163e,0x1641,0x1641,
+0x1644,0x1635,0x1e6,0x1e6,0x1e6,0x1e6,0x1e6,0x1e6,0x1e6,0x1e6,0x1e6,0x1e6,0x163b,0x163b,0x163b,0x163b,
+0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x1e6,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x1e6,0x163e,
+0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,
+0x163e,0x163e,0x163e,0x163e,0x1e6,0x1e6,0x1e6,0x1e6,0x1e6,0x163e,0x163e,0x163e,0x164d,0x164d,0x164d,0x164d,
+0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,
+0x164d,0x164d,0x164d,0x164d,0x164d,0x1e9,0x1e9,0x1e9,0x1e9,0x1e9,0x1e9,0x1e9,0x1656,0x1656,0x1656,0x1656,
+0x1656,0x1656,0x1656,0x1656,0x1656,0x1656,0x1656,0x1656,0x1656,0x1656,0x1656,0x1656,0x1656,0x1656,0x1ec,0x1ec,
+0x1ec,0x1ec,0x1ec,0x1ec,0x1ec,0x1653,0x1653,0x1653,0x1653,0x1ec,0x1ec,0x1ec,0x1671,0x1671,0x1671,0x1671,
+0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1659,0x166b,0x166b,0x1659,0x1659,
+0x1659,0x1659,0x1f2,0x1f2,0x166b,0x166b,0x166e,0x166e,0x1659,0x1659,0x166b,0x165f,0x165c,0x1662,0x1674,0x1674,
+0x1665,0x1665,0x1668,0x1668,0x1668,0x1674,0x172b,0x172b,0x172b,0x172b,0x172b,0x172b,0x172b,0x172b,0x172b,0x172b,
+0x172b,0x172b,0x172b,0x172b,0x1728,0x1728,0x1728,0x1728,0x1725,0x1725,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,
+0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,
+0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f2,0x1f5,0x1677,0x1677,0x1677,
+0x1677,0x1677,0x1677,0x1677,0x1677,0x1677,0x1677,0x1677,0x1677,0x1677,0x1677,0x1677,0x1677,0x1677,0x1677,0x1677,
+0x1677,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x1f5,0x167a,0x167a,0x167a,0x167a,
+0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x1f8,0x1f8,0x1f8,0x1f8,0x167a,0x167a,0x167a,0x167a,
+0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x1f8,0x1f8,0x1f8,0x1f8,
+0x1f8,0x1f8,0x1f8,0x1f8,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x1f8,0x1f8,
+0x1f8,0x1f8,0x1f8,0x1f8,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x1f8,0x1f8,0x1f8,0x1f8,
+0x1f8,0x1f8,0x1f8,0x1f8,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,
+0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x1f8,0x1f8,0x1a91,0x1a91,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,
+0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,
+0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x1f8,0x167d,0x168c,0x1683,0x1680,
+0x1692,0x1692,0x1686,0x1692,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1689,0x1689,0x1689,0x1689,
+0x1689,0x1689,0x1689,0x1689,0x1689,0x1689,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1fb,0x1698,0x1698,0x1698,0x1698,
+0x1698,0x1698,0x1698,0x1698,0x1698,0x1698,0x1695,0x1695,0x1695,0x1695,0x1695,0x1695,0x1695,0x1695,0x1695,0x1fe,
+0x1fe,0x1fe,0x1fe,0x1fe,0x1fe,0x1fe,0x1fe,0x1fe,0x1fe,0x1fe,0x1fe,0x169e,0x1740,0x1740,0x1740,0x1740,
+0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,
+0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1938,0x201,0x201,0x172e,0x172e,0x172e,0x173a,0x173a,0x172e,0x172e,
+0x172e,0x172e,0x173d,0x172e,0x172e,0x172e,0x172e,0x1731,0x201,0x201,0x201,0x201,0x1737,0x1737,0x1737,0x1737,
+0x1737,0x1737,0x1737,0x1737,0x1737,0x1737,0x1734,0x1734,0x1743,0x1743,0x1743,0x1734,0x1746,0x1746,0x1746,0x1746,
+0x1746,0x1746,0x1746,0x204,0x204,0x204,0x204,0x204,0x204,0x204,0x204,0x204,0x204,0x204,0x204,0x204,
+0x204,0x204,0x204,0x204,0x204,0x204,0x204,0x204,0x204,0x204,0x204,0x204,0x204,0x204,0x204,0x204,
+0x204,0x204,0x204,0x204,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,
+0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x20a,0x1758,0x1758,0x20a,0x20a,0x20a,0x20a,0x20a,0x1755,
+0x1755,0x1755,0x1755,0x1755,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x20d,0x175b,0x20d,0x175b,0x175b,
+0x175b,0x175b,0x20d,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,
+0x175b,0x175b,0x20d,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175e,0x20d,0x20d,
+0x20d,0x20d,0x20d,0x20d,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,
+0x15b7,0x15b7,0x15b7,0x15b7,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,
+0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x210,0x210,0x210,0x210,0x210,0x210,0x210,0x210,0x210,
+0x210,0x210,0x210,0x210,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,
+0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x210,0x210,0x210,0x210,0x210,0x210,0x210,0x1761,0x1761,
+0x1761,0x1761,0x1761,0x1761,0x176d,0x176d,0x176d,0x176d,0x176a,0x176d,0x176d,0x1770,0x1773,0x1770,0x1770,0x176d,
+0x213,0x213,0x213,0x213,0x213,0x213,0x213,0x213,0x213,0x213,0x213,0x213,0x213,0x213,0x213,0x176a,
+0x176a,0x176a,0x176a,0x176a,0x17ca,0x17ca,0x17ca,0x17ca,0x17c1,0x17c1,0x17c1,0x17bb,0x17be,0x17be,0x17be,0x19e6,
+0x216,0x216,0x216,0x216,0x17c7,0x17c7,0x17c7,0x17c7,0x17c7,0x17c7,0x17c7,0x17c7,0x17c7,0x17c7,0x216,0x216,
+0x216,0x216,0x17c4,0x17c4,0x17e5,0x17e5,0x17e5,0x17e5,0x17e5,0x17e5,0x17e5,0x17e5,0x17e5,0x219,0x17e5,0x17e5,
+0x17e5,0x17e5,0x17e5,0x17e5,0x17e5,0x17e5,0x17e5,0x17e5,0x17e5,0x17e5,0x17e5,0x17e5,0x17e5,0x17e5,0x17e5,0x17e5,
+0x17e5,0x17e5,0x17e5,0x17e5,0x17e5,0x17e5,0x17e5,0x17e2,0x17d0,0x17d0,0x17d0,0x17d0,0x17d0,0x17d0,0x17d0,0x219,
+0x17d0,0x17d0,0x17d0,0x17d0,0x17d0,0x17d0,0x17e2,0x17d3,0x17e5,0x17e8,0x17e8,0x17dc,0x17d9,0x17d9,0x219,0x219,
+0x219,0x219,0x219,0x219,0x219,0x219,0x219,0x219,0x17df,0x17df,0x17df,0x17df,0x17df,0x17df,0x17df,0x17df,
+0x17df,0x17df,0x17d6,0x17d6,0x17d6,0x17d6,0x17d6,0x17d6,0x17d6,0x17d6,0x17d6,0x17d6,0x17d6,0x17d6,0x17d6,0x17d6,
+0x17d6,0x219,0x219,0x219,0x17f4,0x17f7,0x17fd,0x17fd,0x17fd,0x17fd,0x17fd,0x17fd,0x17fd,0x17fd,0x17fd,0x17fd,
+0x17fd,0x17fd,0x17fd,0x17fd,0x17eb,0x17eb,0x17eb,0x17eb,0x17eb,0x17eb,0x17eb,0x17eb,0x17eb,0x21c,0x21c,0x21c,
+0x21c,0x21c,0x21c,0x21c,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,
+0x1956,0x1956,0x1956,0x1956,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x21f,0x17ee,0x17ee,0x17ee,0x17ee,
+0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x21f,0x21f,0x17ee,
+0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x21f,0x17ee,0x17ee,0x21f,0x17ee,0x17ee,0x17ee,0x17ee,0x17ee,0x21f,
+0x21f,0x21f,0x21f,0x21f,0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x1bfc,
+0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x183c,0x18d8,0x1a40,0x1a43,0x1aeb,0x222,0x222,0x222,0x222,0x222,0x222,0x222,
+0x222,0x222,0x222,0x222,0x1ae8,0x1ae8,0x222,0x222,0x222,0x222,0x222,0x222,0x222,0x222,0x222,0x222,
+0x222,0x222,0x222,0x222,0x17fd,0x17fd,0x17fd,0x17fd,0x17fd,0x17fd,0x17fd,0x17fd,0x17fd,0x17fd,0x17fd,0x17fd,
+0x17fd,0x17fd,0x17fd,0x17fd,0x225,0x225,0x17f1,0x17f1,0x17f1,0x17f1,0x17f1,0x17f1,0x17f1,0x17f1,0x17f1,0x17f1,
+0x17f1,0x17f1,0x17f1,0x17f1,0x225,0x17fa,0x17f1,0x17f1,0x17f1,0x17f1,0x17f1,0x17f1,0x17f1,0x17fa,0x17f1,0x17f1,
+0x17fa,0x17f1,0x17f1,0x225,0x225,0x225,0x225,0x225,0x225,0x225,0x225,0x225,0x1800,0x1800,0x1800,0x1800,
+0x1800,0x1800,0x1800,0x1800,0x1800,0x1800,0x1800,0x1800,0x1800,0x228,0x228,0x228,0x228,0x228,0x228,0x228,
+0x228,0x228,0x228,0x228,0x228,0x228,0x228,0x228,0x228,0x228,0x228,0x228,0x1818,0x1818,0x1809,0x1803,
+0x1803,0x1818,0x1806,0x181b,0x181b,0x181b,0x181b,0x181e,0x181e,0x1812,0x180f,0x180c,0x1815,0x1815,0x1815,0x1815,
+0x1815,0x1815,0x1815,0x1815,0x1815,0x1815,0x1a94,0x1812,0x22b,0x180c,0x193b,0x19e9,0x1a97,0x1a97,0x22b,0x22b,
0x22b,0x22b,0x22b,0x22b,0x22b,0x22b,0x22b,0x22b,0x22b,0x22b,0x22b,0x22b,0x22b,0x22b,0x22b,0x22b,
-0x1806,0x1806,0x17f7,0x17f1,0x17f1,0x1806,0x17f4,0x1809,0x1809,0x1809,0x1809,0x180c,0x180c,0x1800,0x17fd,0x17fa,
-0x1803,0x1803,0x1803,0x1803,0x1803,0x1803,0x1803,0x1803,0x1803,0x1803,0x1a82,0x1800,0x22e,0x17fa,0x1929,0x19d7,
-0x1a85,0x1a85,0x22e,0x22e,0x22e,0x22e,0x22e,0x22e,0x22e,0x22e,0x22e,0x22e,0x22e,0x22e,0x22e,0x22e,
-0x22e,0x22e,0x22e,0x22e,0x22e,0x22e,0x22e,0x22e,0x22e,0x22e,0x22e,0x22e,0x22e,0x22e,0x22e,0x22e,
-0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,0x1812,
-0x1812,0x1812,0x1812,0x1812,0x231,0x231,0x231,0x231,0x180f,0x180f,0x180f,0x180f,0x180f,0x180f,0x180f,0x180f,
-0x180f,0x180f,0x180f,0x180f,0x180f,0x180f,0x180f,0x180f,0x180f,0x180f,0x180f,0x180f,0x180f,0x180f,0x180f,0x180f,
-0x180f,0x180f,0x180f,0x180f,0x231,0x231,0x231,0x231,0x182d,0x182d,0x182d,0x182d,0x182d,0x182d,0x182d,0x182d,
-0x182d,0x182d,0x182d,0x182d,0x182d,0x19a7,0x19a7,0x19a7,0x19a7,0x19a7,0x1a34,0x1a34,0x1a34,0x1a34,0x1a34,0x1a34,
-0x234,0x234,0x234,0x234,0x234,0x234,0x234,0x234,0x1ba8,0x1ba8,0x1ba8,0x237,0x237,0x237,0x237,0x237,
-0x237,0x237,0x237,0x237,0x237,0x237,0x237,0x237,0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x279,
-0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x186c,0x186c,0x186c,0x186c,0x186c,0x186c,0x186c,0x23a,
-0x186c,0x186c,0x23a,0x186c,0x186c,0x186c,0x186c,0x186c,0x186c,0x186c,0x186c,0x186c,0x186c,0x186c,0x186c,0x186c,
-0x186c,0x186c,0x186c,0x186c,0x186c,0x186c,0x186c,0x186c,0x186c,0x1860,0x1860,0x1860,0x1860,0x1860,0x1860,0x23a,
-0x23a,0x23a,0x1860,0x23a,0x1860,0x1860,0x23a,0x1860,0x1860,0x1860,0x1863,0x1860,0x1866,0x1866,0x186f,0x1860,
-0x23a,0x23a,0x23a,0x23a,0x23a,0x23a,0x23a,0x23a,0x1869,0x1869,0x1869,0x1869,0x1869,0x1869,0x1869,0x1869,
-0x1869,0x1869,0x23a,0x23a,0x23a,0x23a,0x23a,0x23a,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,
-0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,
-0x18cf,0x18cf,0x18cf,0x18cf,0x23d,0x23d,0x23d,0x23d,0x187e,0x1881,0x1881,0x240,0x240,0x240,0x240,0x240,
-0x240,0x240,0x240,0x240,0x240,0x240,0x240,0x240,0x1b84,0x1b84,0x1b84,0x1b84,0x1b84,0x1b84,0x1b84,0x1b84,
-0x1b84,0x1b84,0x1b84,0x1b84,0x1b84,0x1b84,0x1b84,0x1b84,0x1890,0x1890,0x1890,0x1890,0x1890,0x1890,0x1890,0x1890,
-0x1890,0x1890,0x1890,0x243,0x243,0x243,0x243,0x243,0x1b51,0x1b51,0x1b51,0x1b51,0x1b51,0x1b51,0x1b51,0x1b51,
-0x1b51,0x1b51,0x1b51,0x1b51,0x1b51,0x1b51,0x1b51,0x1b51,0x189c,0x189f,0x18ae,0x18ae,0x189f,0x18a2,0x189c,0x1899,
-0x246,0x246,0x246,0x246,0x246,0x246,0x246,0x246,0x1887,0x1872,0x1872,0x1872,0x1872,0x1872,0x1872,0x1884,
-0x1884,0x1872,0x1872,0x1872,0x1887,0x1887,0x1887,0x1887,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,
-0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x249,0x249,0x249,0x249,
-0x249,0x249,0x249,0x249,0x249,0x249,0x249,0x249,0x192f,0x192f,0x192f,0x192f,0x192f,0x192f,0x192f,0x192f,
-0x192f,0x192f,0x192f,0x192f,0x192f,0x192f,0x249,0x249,0x1a3d,0x1a3d,0x1a3d,0x1a3d,0x1adf,0x27c,0x27c,0x27c,
-0x1a3d,0x1a3d,0x1a3d,0x1bab,0x1bab,0x27c,0x27c,0x27c,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,
-0x1941,0x1941,0x1941,0x1941,0x193e,0x193e,0x193e,0x1932,0x1932,0x1932,0x1932,0x1932,0x1932,0x1932,0x1932,0x1932,
-0x193e,0x1938,0x1935,0x193b,0x24c,0x24c,0x24c,0x24c,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,
-0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,
-0x1944,0x1944,0x1944,0x24f,0x24f,0x1944,0x1944,0x1944,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x252,0x1953,
-0x1953,0x252,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,
-0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1950,0x1950,0x1950,0x1950,0x1950,0x252,
-0x1947,0x1947,0x252,0x1950,0x1950,0x1947,0x1950,0x194a,0x1953,0x252,0x252,0x252,0x252,0x252,0x252,0x252,
-0x195c,0x195c,0x195f,0x195f,0x1956,0x1956,0x1956,0x1956,0x255,0x255,0x255,0x255,0x255,0x255,0x255,0x255,
-0x1959,0x1959,0x1959,0x1959,0x1959,0x1959,0x1959,0x1959,0x1959,0x1959,0x255,0x255,0x255,0x255,0x255,0x255,
-0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1965,0x1962,0x1962,0x1962,
-0x1965,0x1962,0x1962,0x1962,0x1962,0x258,0x258,0x258,0x258,0x258,0x258,0x258,0x258,0x258,0x258,0x258,
-0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,
-0x196e,0x196e,0x196e,0x1968,0x1968,0x196b,0x196b,0x1971,0x1971,0x25b,0x25b,0x25b,0x25b,0x25b,0x25b,0x25b,
-0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,
-0x1974,0x1974,0x1974,0x1974,0x25e,0x25e,0x25e,0x25e,0x25e,0x25e,0x25e,0x25e,0x25e,0x25e,0x25e,0x25e,
-0x1977,0x1977,0x1977,0x1977,0x1977,0x1977,0x1977,0x1977,0x1977,0x1977,0x1977,0x1977,0x1977,0x1977,0x1977,0x1977,
-0x1977,0x1977,0x1977,0x1977,0x1977,0x1977,0x1977,0x197a,0x1983,0x1977,0x1977,0x261,0x261,0x261,0x261,0x261,
-0x1986,0x1986,0x1986,0x1986,0x1986,0x1986,0x1986,0x1989,0x264,0x264,0x264,0x264,0x264,0x264,0x264,0x264,
-0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,
-0x1992,0x1992,0x198c,0x198c,0x198c,0x198c,0x198c,0x198c,0x198c,0x198c,0x198c,0x198c,0x198c,0x198f,0x198f,0x198f,
-0x198f,0x1995,0x1995,0x1995,0x1995,0x1995,0x267,0x267,0x267,0x267,0x267,0x267,0x267,0x267,0x267,0x267,
-0x267,0x267,0x267,0x267,0x267,0x267,0x267,0x267,0x1b72,0x1b72,0x1b72,0x1b72,0x1b72,0x1b72,0x1b72,0x1b72,
-0x1b72,0x1b72,0x1b72,0x1b72,0x1b72,0x1b72,0x1b72,0x1b72,0x19e9,0x19e9,0x19e9,0x19e9,0x19e9,0x19e9,0x19e9,0x19e9,
-0x19e9,0x19e9,0x19e9,0x19e9,0x19e9,0x19e9,0x19e9,0x19e9,0x19e9,0x19e9,0x19e9,0x19e9,0x19e9,0x19e9,0x19e9,0x26d,
-0x26d,0x26d,0x26d,0x26d,0x26d,0x26d,0x26d,0x26d,0x19f8,0x19f8,0x19f8,0x19f8,0x19f8,0x19f8,0x19f8,0x19f8,
-0x270,0x270,0x19f8,0x19f8,0x19f8,0x19f8,0x19f8,0x19f8,0x19f8,0x19f8,0x19f8,0x19f8,0x19f8,0x19f8,0x19f8,0x19f8,
-0x19f8,0x19f8,0x19f8,0x19f8,0x19f8,0x19f8,0x19f8,0x19f8,0x19f8,0x19f5,0x19f5,0x19f5,0x19ec,0x19ec,0x19ec,0x19ec,
-0x270,0x270,0x19ec,0x19ec,0x19f5,0x19f5,0x19f5,0x19f5,0x19ef,0x19f8,0x19f2,0x19f8,0x19f5,0x270,0x270,0x270,
-0x270,0x270,0x270,0x270,0x270,0x270,0x270,0x270,0x270,0x270,0x270,0x270,0x270,0x270,0x270,0x270,
-0x270,0x270,0x270,0x270,0x270,0x270,0x270,0x270,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,
-0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x273,0x273,0x273,0x19fb,0x19fb,0x19fb,0x19fb,0x19fb,0x19fb,0x19fb,0x1a04,
-0x1a04,0x1a04,0x1a04,0x1a04,0x1a07,0x1a07,0x273,0x273,0x276,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,
+0x22b,0x22b,0x22b,0x22b,0x22b,0x22b,0x22b,0x22b,0x22b,0x22b,0x22b,0x22b,0x1824,0x1824,0x1824,0x1824,
+0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,
+0x22e,0x22e,0x22e,0x22e,0x1821,0x1821,0x1821,0x1821,0x1821,0x1821,0x1821,0x1821,0x1821,0x1821,0x1821,0x1821,
+0x1821,0x1821,0x1821,0x1821,0x1821,0x1821,0x1821,0x1821,0x1821,0x1821,0x1821,0x1821,0x1821,0x1821,0x1821,0x1821,
+0x22e,0x22e,0x22e,0x22e,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,
+0x183f,0x19b9,0x19b9,0x19b9,0x19b9,0x19b9,0x1a46,0x1a46,0x1a46,0x1a46,0x1a46,0x1a46,0x231,0x231,0x231,0x231,
+0x231,0x231,0x231,0x231,0x1bba,0x1bba,0x1bba,0x234,0x234,0x234,0x234,0x234,0x234,0x234,0x234,0x234,
+0x234,0x234,0x234,0x234,0x276,0x276,0x1c2f,0x276,0x276,0x276,0x276,0x276,0x276,0x276,0x276,0x276,
+0x276,0x276,0x276,0x276,0x187e,0x187e,0x187e,0x187e,0x187e,0x187e,0x187e,0x237,0x187e,0x187e,0x237,0x187e,
+0x187e,0x187e,0x187e,0x187e,0x187e,0x187e,0x187e,0x187e,0x187e,0x187e,0x187e,0x187e,0x187e,0x187e,0x187e,0x187e,
+0x187e,0x187e,0x187e,0x187e,0x187e,0x1872,0x1872,0x1872,0x1872,0x1872,0x1872,0x237,0x237,0x237,0x1872,0x237,
+0x1872,0x1872,0x237,0x1872,0x1872,0x1872,0x1875,0x1872,0x1878,0x1878,0x1881,0x1872,0x237,0x237,0x237,0x237,
+0x237,0x237,0x237,0x237,0x187b,0x187b,0x187b,0x187b,0x187b,0x187b,0x187b,0x187b,0x187b,0x187b,0x237,0x237,
+0x237,0x237,0x237,0x237,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,
+0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,
+0x23a,0x23a,0x23a,0x23a,0x1890,0x1893,0x1893,0x23d,0x23d,0x23d,0x23d,0x23d,0x23d,0x23d,0x23d,0x23d,
+0x23d,0x23d,0x23d,0x23d,0x1b96,0x1b96,0x1b96,0x1b96,0x1b96,0x1b96,0x1b96,0x1b96,0x1b96,0x1b96,0x1b96,0x1b96,
+0x1b96,0x1b96,0x1b96,0x1b96,0x18a2,0x18a2,0x18a2,0x18a2,0x18a2,0x18a2,0x18a2,0x18a2,0x18a2,0x18a2,0x18a2,0x240,
+0x240,0x240,0x240,0x240,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,
+0x1b63,0x1b63,0x1b63,0x1b63,0x18ae,0x18b1,0x18c0,0x18c0,0x18b1,0x18b4,0x18ae,0x18ab,0x243,0x243,0x243,0x243,
+0x243,0x243,0x243,0x243,0x1899,0x1884,0x1884,0x1884,0x1884,0x1884,0x1884,0x1896,0x1896,0x1884,0x1884,0x1884,
+0x1899,0x1899,0x1899,0x1899,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,
+0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x246,0x246,0x246,0x246,0x246,0x246,0x246,0x246,
+0x246,0x246,0x246,0x246,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,
+0x1941,0x1941,0x246,0x246,0x1a4f,0x1a4f,0x1a4f,0x1a4f,0x1af1,0x1c35,0x1c35,0x1c35,0x1a4f,0x1a4f,0x1a4f,0x1bbd,
+0x1bbd,0x279,0x279,0x279,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,
+0x1950,0x1950,0x1950,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x1944,0x1950,0x194a,0x1947,0x194d,
+0x249,0x249,0x249,0x249,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,
+0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x1956,0x24c,
+0x24c,0x1956,0x1956,0x1956,0x1965,0x1965,0x1965,0x1965,0x1965,0x1965,0x24f,0x1965,0x1965,0x24f,0x1965,0x1965,
+0x1965,0x1965,0x1965,0x1965,0x1965,0x1965,0x1965,0x1965,0x1965,0x1965,0x1965,0x1965,0x1965,0x1965,0x1965,0x1965,
+0x1965,0x1965,0x1965,0x1965,0x1965,0x1965,0x1962,0x1962,0x1962,0x1962,0x1962,0x24f,0x1959,0x1959,0x24f,0x1962,
+0x1962,0x1959,0x1962,0x195c,0x1965,0x24f,0x24f,0x24f,0x24f,0x24f,0x24f,0x24f,0x196e,0x196e,0x1971,0x1971,
+0x1968,0x1968,0x1968,0x1968,0x252,0x252,0x252,0x252,0x252,0x252,0x252,0x252,0x196b,0x196b,0x196b,0x196b,
+0x196b,0x196b,0x196b,0x196b,0x196b,0x196b,0x252,0x252,0x252,0x252,0x252,0x252,0x1974,0x1974,0x1974,0x1974,
+0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1977,0x1974,0x1974,0x1974,0x1977,0x1974,0x1974,0x1974,
+0x1974,0x255,0x255,0x255,0x255,0x255,0x255,0x255,0x255,0x255,0x255,0x255,0x1980,0x1980,0x1980,0x1980,
+0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x197a,
+0x197a,0x197d,0x197d,0x1983,0x1983,0x258,0x258,0x258,0x258,0x258,0x258,0x258,0x1986,0x1986,0x1986,0x1986,
+0x1986,0x1986,0x1986,0x1986,0x1986,0x1986,0x1986,0x1986,0x1986,0x1986,0x1986,0x1986,0x1986,0x1986,0x1986,0x1986,
+0x25b,0x25b,0x25b,0x25b,0x25b,0x25b,0x25b,0x25b,0x25b,0x25b,0x25b,0x25b,0x1989,0x1989,0x1989,0x1989,
+0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,
+0x1989,0x1989,0x1989,0x198c,0x1995,0x1989,0x1989,0x25e,0x25e,0x25e,0x25e,0x25e,0x1998,0x1998,0x1998,0x1998,
+0x1998,0x1998,0x1998,0x199b,0x261,0x261,0x261,0x261,0x261,0x261,0x261,0x261,0x19a4,0x19a4,0x19a4,0x19a4,
+0x19a4,0x19a4,0x19a4,0x19a4,0x19a4,0x19a4,0x19a4,0x19a4,0x19a4,0x19a4,0x19a4,0x19a4,0x19a4,0x19a4,0x199e,0x199e,
+0x199e,0x199e,0x199e,0x199e,0x199e,0x199e,0x199e,0x199e,0x199e,0x19a1,0x19a1,0x19a1,0x19a1,0x19a7,0x19a7,0x19a7,
+0x19a7,0x19a7,0x264,0x264,0x264,0x264,0x264,0x264,0x264,0x264,0x264,0x264,0x264,0x264,0x264,0x264,
+0x264,0x264,0x264,0x264,0x1b84,0x1b84,0x1b84,0x1b84,0x1b84,0x1b84,0x1b84,0x1b84,0x1b84,0x1b84,0x1b84,0x1b84,
+0x1b84,0x1b84,0x1b84,0x1b84,0x1be7,0x1bed,0x1bed,0x1bed,0x1bed,0x1bed,0x1bed,0x1bea,0x1bea,0x1bea,0x1bea,0x1bea,
+0x1bea,0x1bea,0x1bea,0x1bea,0x1bea,0x1bea,0x1bea,0x1bea,0x1bea,0x1bea,0x267,0x267,0x267,0x267,0x267,0x267,
+0x267,0x267,0x267,0x267,0x19fb,0x19fb,0x19fb,0x19fb,0x19fb,0x19fb,0x19fb,0x19fb,0x19fb,0x19fb,0x19fb,0x19fb,
+0x19fb,0x19fb,0x19fb,0x19fb,0x19fb,0x19fb,0x19fb,0x19fb,0x19fb,0x19fb,0x19fb,0x26a,0x26a,0x26a,0x26a,0x26a,
+0x26a,0x26a,0x26a,0x26a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x26d,0x26d,0x1a0a,0x1a0a,
0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,
-0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x276,0x276,0x279,0x279,0x279,0x279,
-0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x1a37,0x1a37,0x1a37,0x279,
-0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x1a3a,0x1a3a,0x1a3a,0x1a3a,
-0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,
-0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x1a3d,0x1a3d,0x1a3d,0x1adf,0x1adf,0x1adf,0x1adf,0x27c,
-0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,0x1a3d,0x1a3d,0x1a3d,0x1a3d,0x1a3d,0x1a3d,0x1adf,0x1adf,
-0x1adf,0x1adf,0x1adf,0x1adf,0x1adf,0x1adf,0x1adf,0x1adf,0x1adf,0x1bab,0x1bab,0x1bab,0x1bab,0x27c,0x27c,0x27c,
-0x1adf,0x1adf,0x1adf,0x1adf,0x1adf,0x1adf,0x1adf,0x1bab,0x1bab,0x1bab,0x1bab,0x27c,0x27c,0x27c,0x27c,0x27c,
-0x1adf,0x1adf,0x1adf,0x1bae,0x1bae,0x1bae,0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,
-0x1adf,0x1adf,0x1adf,0x1adf,0x1adf,0x1adf,0x1adf,0x1bab,0x1bab,0x1bab,0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,
-0x1bab,0x1bab,0x1bab,0x1bab,0x1bab,0x1bab,0x1bab,0x1bab,0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,
-0x1bae,0x1bae,0x1bae,0x1bae,0x1bae,0x1bae,0x1bae,0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,
-0x1a13,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,
-0x1a0d,0x1a0d,0x27f,0x27f,0x27f,0x27f,0x27f,0x27f,0x27f,0x27f,0x27f,0x27f,0x27f,0x27f,0x27f,0x1a10,
-0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a16,0x1a16,0x1a16,0x1a16,
-0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x282,0x282,0x282,0x282,0x282,0x1a19,
-0x1a8b,0x1a8b,0x1a8b,0x1a8b,0x1a8b,0x1a88,0x1a88,0x1a88,0x1a88,0x1a88,0x1a88,0x1a88,0x285,0x285,0x285,0x285,
-0x285,0x285,0x285,0x285,0x285,0x285,0x285,0x285,0x285,0x285,0x285,0x285,0x285,0x285,0x285,0x285,
-0x1aa6,0x1aa6,0x1aa6,0x1aa6,0x1aa6,0x1aa6,0x1aa6,0x288,0x288,0x1aa6,0x288,0x288,0x1aa6,0x1aa6,0x1aa6,0x1aa6,
-0x1aa6,0x1aa6,0x1aa6,0x1aa6,0x288,0x1aa6,0x1aa6,0x288,0x1aa6,0x1aa6,0x1aa6,0x1aa6,0x1aa6,0x1aa6,0x1aa6,0x1aa6,
-0x1aa6,0x1aa6,0x1aa6,0x1aa6,0x1aa6,0x1aa6,0x1aa6,0x1aa6,0x1a8e,0x1a9d,0x1a9d,0x1a9d,0x1a9d,0x1a9d,0x288,0x1a9d,
-0x1aa0,0x288,0x288,0x1a8e,0x1a8e,0x1aa3,0x1a94,0x1aa9,0x1a9d,0x1aa9,0x1a9d,0x1a91,0x1aac,0x1a97,0x1aac,0x288,
-0x288,0x288,0x288,0x288,0x288,0x288,0x288,0x288,0x1a9a,0x1a9a,0x1a9a,0x1a9a,0x1a9a,0x1a9a,0x1a9a,0x1a9a,
-0x1a9a,0x1a9a,0x288,0x288,0x288,0x288,0x288,0x288,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,
-0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x28b,0x28b,
-0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,
-0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,0x28b,
-0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,
-0x1ab2,0x1ab2,0x1ab2,0x291,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,
-0x1ab2,0x1ab2,0x1ab2,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,
-0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x1ab5,0x1ab5,0x1ab5,0x1ab5,0x1ab5,0x1ab5,0x1ab5,0x1ab5,
-0x1ab5,0x1ab5,0x291,0x291,0x291,0x291,0x291,0x291,0x1ae8,0x1ae8,0x1ae8,0x1ae8,0x1ae8,0x1ae8,0x1ae8,0x1ae8,
-0x1ae8,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,
-0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,
-0x294,0x294,0x294,0x294,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x297,0x1ab8,
-0x1ab8,0x1abb,0x297,0x297,0x1abe,0x1abe,0x297,0x297,0x297,0x297,0x297,0x297,0x297,0x297,0x297,0x297,
-0x297,0x297,0x297,0x297,0x1b51,0x1b51,0x1b51,0x1b51,0x1b51,0x1b51,0x1b51,0x1b51,0x1b4e,0x1b51,0x1b51,0x1b51,
-0x1b51,0x1b51,0x1b51,0x29a,0x1b54,0x1b54,0x29a,0x29a,0x29a,0x29a,0x29a,0x29a,0x1b4b,0x1b4b,0x1b4b,0x1b4b,
-0x1b4b,0x1b4b,0x1b4b,0x1b4b,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,
-0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b57,0x1b57,0x29d,0x29d,0x29d,0x29d,0x29d,0x29d,0x29d,0x29d,0x29d,
-0x29d,0x29d,0x29d,0x29d,0x1b5d,0x1b5d,0x1b5d,0x1b5d,0x1b5d,0x1b5d,0x1b5d,0x2a0,0x1b5d,0x1b5d,0x1b5d,0x1b5d,
-0x2a0,0x1b5d,0x1b5d,0x2a0,0x1b5d,0x1b5d,0x1b5d,0x1b5d,0x1b5d,0x1b5d,0x1b5d,0x1b5d,0x1b5d,0x1b5d,0x1b5d,0x1b5d,
-0x1b5d,0x1b5d,0x1b5d,0x2a0,0x1b60,0x1b66,0x1b66,0x1b63,0x1b63,0x1b63,0x2a6,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,
-0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,
-0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x2a6,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x2a6,
-0x2a6,0x2a6,0x2a6,0x2a6,0x1b69,0x1b69,0x1b69,0x1b69,0x1b69,0x1b69,0x1b69,0x1b69,0x1b69,0x1b69,0x1b6c,0x1b69,
-0x1b69,0x1b69,0x1b69,0x1b69,0x1b69,0x1b69,0x1b69,0x1b69,0x1b69,0x1b69,0x1b69,0x1b69,0x1b69,0x1b69,0x1b69,0x1b69,
-0x1b69,0x1b69,0x1b69,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,
-0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,
-0x2a9,0x2a9,0x2a9,0x2a9,0x1b72,0x1b72,0x1b6f,0x1b6f,0x1b6f,0x1b6f,0x1b75,0x1b75,0x1b75,0x1b75,0x2ac,0x2ac,
-0x2ac,0x2ac,0x2ac,0x2ac,0x2ac,0x2ac,0x2ac,0x2ac,0x2ac,0x2ac,0x2ac,0x2ac,0x2ac,0x2ac,0x2ac,0x2ac,
-0x2ac,0x2ac,0x2ac,0x2ac,0x1a8b,0x1a8b,0x1a8b,0x1a8b,0x1a8b,0x1a8b,0x1a8b,0x1a8b,0x1a8b,0x1a8b,0x1a8b,0x1a8b,
-0x1a8b,0x1a8b,0x1a8b,0x1a8b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,
-0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,
-0x1b7b,0x1b7b,0x1b7b,0x2af,0x1b78,0x1b78,0x1b78,0x1b78,0x1b78,0x1b78,0x1b78,0x1b78,0x1b78,0x1b78,0x2af,0x2af,
-0x2af,0x2af,0x2af,0x2af,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,0x1542,
-0x1542,0x1542,0x1542,0x1542,0x1b81,0x1b81,0x1b81,0x1b81,0x1b81,0x1b81,0x1b81,0x1b81,0x1b81,0x1b81,0x1b81,0x1b81,
-0x1b81,0x1b81,0x1b7e,0x2b2,0x2b2,0x2b2,0x2b2,0x2b2,0x2b2,0x2b2,0x2b2,0x2b2,0x2b2,0x2b2,0x2b2,0x2b2,
-0x2b2,0x2b2,0x2b2,0x2b2,0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x2b5,
-0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x2b5,0x1b8a,0x1b8a,0x2b5,0x1b87,0x1b87,0x1b87,0x1b87,0x1b87,
-0x1b87,0x1b87,0x1b87,0x1b87,0x1b87,0x1b87,0x2b5,0x1b87,0x1b87,0x1b87,0x1b87,0x1b87,0x1b87,0x1b87,0x1b87,0x1b87,
-0x1b87,0x1b87,0x1b87,0x1b87,0x1b87,0x1b87,0x2b5,0x1b87,0x1b87,0x1b87,0x1b87,0x1b87,0x1b87,0x1b87,0x2b5,0x1b87,
-0x1b87,0x2b5,0x2b5,0x2b5,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,
-0x1b90,0x1b90,0x2b8,0x2b8,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,
-0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,
+0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a0a,0x1a07,0x1a07,0x1a07,0x19fe,0x19fe,0x19fe,0x19fe,0x26d,0x26d,0x19fe,0x19fe,
+0x1a07,0x1a07,0x1a07,0x1a07,0x1a01,0x1a0a,0x1a04,0x1a0a,0x1a07,0x26d,0x26d,0x26d,0x26d,0x26d,0x26d,0x26d,
+0x26d,0x26d,0x26d,0x26d,0x26d,0x26d,0x26d,0x26d,0x26d,0x26d,0x26d,0x26d,0x26d,0x26d,0x26d,0x26d,
+0x26d,0x26d,0x26d,0x26d,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,
+0x1a16,0x270,0x270,0x270,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,
+0x1a19,0x1a19,0x270,0x270,0x273,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,
+0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,
+0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x1a1c,0x273,0x273,0x276,0x276,0x276,0x276,0x276,0x276,0x276,0x276,
+0x276,0x276,0x276,0x276,0x276,0x276,0x276,0x276,0x1a49,0x1a49,0x1a49,0x276,0x276,0x1c32,0x276,0x276,
+0x276,0x276,0x276,0x276,0x276,0x276,0x276,0x276,0x1a4c,0x1a4c,0x1a4c,0x1a4c,0x276,0x276,0x276,0x276,
+0x276,0x276,0x276,0x276,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,
+0x18e1,0x18e1,0x18e1,0x18e1,0x1a4f,0x1a4f,0x1a4f,0x1af1,0x1af1,0x1af1,0x1af1,0x1c35,0x1c35,0x279,0x279,0x279,
+0x279,0x279,0x279,0x279,0x1a4f,0x1a4f,0x1a4f,0x1a4f,0x1a4f,0x1a4f,0x1af1,0x1af1,0x1af1,0x1af1,0x1af1,0x1af1,
+0x1af1,0x1af1,0x1af1,0x1af1,0x1af1,0x1bbd,0x1bbd,0x1bbd,0x1bbd,0x1c35,0x1c35,0x1c35,0x1af1,0x1af1,0x1af1,0x1af1,
+0x1af1,0x1af1,0x1af1,0x1bbd,0x1bbd,0x1bbd,0x1bbd,0x1c35,0x1c35,0x1c35,0x279,0x1c35,0x1af1,0x1af1,0x1af1,0x1bc0,
+0x1bc0,0x1bc0,0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x1c35,0x1c35,0x1af1,0x1af1,0x1af1,0x1af1,
+0x1af1,0x1af1,0x1af1,0x1bbd,0x1bbd,0x1bbd,0x1c35,0x1c35,0x279,0x279,0x279,0x279,0x1bbd,0x1bbd,0x1bbd,0x1bbd,
+0x1bbd,0x1bbd,0x1bbd,0x1bbd,0x1c35,0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x1bc0,0x1bc0,0x1bc0,0x1bc0,
+0x1bc0,0x1bc0,0x1bc0,0x1c38,0x1c38,0x279,0x279,0x279,0x279,0x279,0x279,0x279,0x1a25,0x1a1f,0x1a1f,0x1a1f,
+0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x27c,0x27c,
+0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,0x27c,0x1a22,0x1a31,0x1a31,0x1a31,0x1a31,
+0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a28,0x1a28,0x1a28,0x1a28,0x1a2e,0x1a2e,0x1a2e,0x1a2e,
+0x1a2e,0x1a2e,0x1a2e,0x1a2e,0x1a2e,0x1a2e,0x27f,0x27f,0x27f,0x27f,0x27f,0x1a2b,0x1a9d,0x1a9d,0x1a9d,0x1a9d,
+0x1a9d,0x1a9a,0x1a9a,0x1a9a,0x1a9a,0x1a9a,0x1a9a,0x1a9a,0x282,0x282,0x282,0x282,0x282,0x282,0x282,0x282,
+0x282,0x282,0x282,0x282,0x282,0x282,0x282,0x282,0x282,0x282,0x282,0x282,0x1ab8,0x1ab8,0x1ab8,0x1ab8,
+0x1ab8,0x1ab8,0x1ab8,0x285,0x285,0x1ab8,0x285,0x285,0x1ab8,0x1ab8,0x1ab8,0x1ab8,0x1ab8,0x1ab8,0x1ab8,0x1ab8,
+0x285,0x1ab8,0x1ab8,0x285,0x1ab8,0x1ab8,0x1ab8,0x1ab8,0x1ab8,0x1ab8,0x1ab8,0x1ab8,0x1ab8,0x1ab8,0x1ab8,0x1ab8,
+0x1ab8,0x1ab8,0x1ab8,0x1ab8,0x1aa0,0x1aaf,0x1aaf,0x1aaf,0x1aaf,0x1aaf,0x285,0x1aaf,0x1ab2,0x285,0x285,0x1aa0,
+0x1aa0,0x1ab5,0x1aa6,0x1abb,0x1aaf,0x1abb,0x1aaf,0x1aa3,0x1abe,0x1aa9,0x1abe,0x285,0x285,0x285,0x285,0x285,
+0x285,0x285,0x285,0x285,0x1aac,0x1aac,0x1aac,0x1aac,0x1aac,0x1aac,0x1aac,0x1aac,0x1aac,0x1aac,0x285,0x285,
+0x285,0x285,0x285,0x285,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,
+0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x288,0x288,0x288,0x288,0x288,0x288,
+0x288,0x288,0x288,0x288,0x288,0x288,0x288,0x288,0x288,0x288,0x288,0x288,0x288,0x288,0x288,0x288,
+0x288,0x288,0x288,0x288,0x288,0x288,0x288,0x288,0x288,0x288,0x288,0x288,0x1ac4,0x1ac4,0x1ac4,0x1ac4,
+0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x28e,
+0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x28e,
+0x28e,0x28e,0x28e,0x28e,0x28e,0x28e,0x28e,0x28e,0x28e,0x28e,0x28e,0x28e,0x28e,0x28e,0x28e,0x28e,
+0x28e,0x28e,0x28e,0x28e,0x1ac7,0x1ac7,0x1ac7,0x1ac7,0x1ac7,0x1ac7,0x1ac7,0x1ac7,0x1ac7,0x1ac7,0x28e,0x28e,
+0x28e,0x28e,0x28e,0x28e,0x1afa,0x1afa,0x1afa,0x1afa,0x1afa,0x1afa,0x1afa,0x1afa,0x1afa,0x291,0x291,0x291,
+0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,
+0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,0x291,
+0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x294,0x1aca,0x1aca,0x1acd,0x294,0x294,
+0x1ad0,0x1ad0,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,0x294,
+0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b60,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x1b63,0x297,
+0x1b66,0x1b66,0x297,0x297,0x297,0x297,0x297,0x297,0x1b5d,0x1b5d,0x1b5d,0x1b5d,0x1b5d,0x1b5d,0x1b5d,0x1b5d,
+0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,
+0x1b6c,0x1b69,0x1b69,0x29a,0x29a,0x29a,0x29a,0x29a,0x29a,0x29a,0x29a,0x29a,0x29a,0x29a,0x29a,0x29a,
+0x1b6f,0x1b6f,0x1b6f,0x1b6f,0x1b6f,0x1b6f,0x1b6f,0x29d,0x1b6f,0x1b6f,0x1b6f,0x1b6f,0x29d,0x1b6f,0x1b6f,0x29d,
+0x1b6f,0x1b6f,0x1b6f,0x1b6f,0x1b6f,0x1b6f,0x1b6f,0x1b6f,0x1b6f,0x1b6f,0x1b6f,0x1b6f,0x1b6f,0x1b6f,0x1b6f,0x29d,
+0x1b72,0x1b78,0x1b78,0x1b75,0x1b75,0x1b75,0x2a3,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,
+0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,
+0x1b75,0x2a3,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,0x1b75,0x2a3,0x2a3,0x2a3,0x2a3,0x2a3,
+0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7e,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,
+0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x2a6,
+0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,0x1bf0,0x1bf0,0x1bf0,0x1bf0,0x1bf0,0x1bf0,0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,
+0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,
+0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,0x2a6,0x1b84,0x1b84,0x1b81,0x1b81,
+0x1b81,0x1b81,0x1b87,0x1b87,0x1b87,0x1b87,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,
+0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x2a9,0x1a9d,0x1a9d,0x1a9d,0x1a9d,
+0x1a9d,0x1a9d,0x1a9d,0x1a9d,0x1a9d,0x1a9d,0x1a9d,0x1a9d,0x1a9d,0x1a9d,0x1a9d,0x1a9d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,
0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,
-0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,
-0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,
-0x18c9,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,
-0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,
-0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x94e,0x94e,
-0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x2d0,0x2d0,0x2d0,0x2d0,0x2d0,
-0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,
-0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0x128d,0x128d,0x128d,0x2be,0x2be,
-0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,
-0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,
+0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x2ac,0x1b8a,0x1b8a,0x1b8a,0x1b8a,
+0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x1b8a,0x2ac,0x2ac,0x2ac,0x2ac,0x2ac,0x2ac,0x1554,0x1554,0x1554,0x1554,
+0x1554,0x1554,0x1554,0x1554,0x1554,0x1554,0x1554,0x1554,0x1554,0x1554,0x1554,0x1554,0x1b93,0x1b93,0x1b93,0x1b93,
+0x1b93,0x1b93,0x1b93,0x1b93,0x1b93,0x1b93,0x1b93,0x1b93,0x1b93,0x1b93,0x1b90,0x2af,0x2af,0x2af,0x2af,0x2af,
+0x2af,0x2af,0x2af,0x2af,0x2af,0x2af,0x2af,0x2af,0x2af,0x2af,0x2af,0x2af,0x1b9c,0x1b9c,0x1b9c,0x1b9c,
+0x1b9c,0x1b9c,0x1b9c,0x1b9c,0x1b9c,0x1b9c,0x1b9c,0x2b2,0x1b9c,0x1b9c,0x1b9c,0x1b9c,0x1b9c,0x1b9c,0x1b9c,0x2b2,
+0x1b9c,0x1b9c,0x2b2,0x1b99,0x1b99,0x1b99,0x1b99,0x1b99,0x1b99,0x1b99,0x1b99,0x1b99,0x1b99,0x1b99,0x2b2,0x1b99,
+0x1b99,0x1b99,0x1b99,0x1b99,0x1b99,0x1b99,0x1b99,0x1b99,0x1b99,0x1b99,0x1b99,0x1b99,0x1b99,0x1b99,0x2b2,0x1b99,
+0x1b99,0x1b99,0x1b99,0x1b99,0x1b99,0x1b99,0x2b2,0x1b99,0x1b99,0x2b2,0x2b2,0x2b2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,
+0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x2b5,0x2b5,0x1ba2,0x1ba2,0x1ba2,0x1ba2,
+0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x2b5,
+0x2b5,0x2b5,0x2b5,0x2b5,0x2b5,0x2b5,0x2b5,0x2b5,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,
+0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,
+0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,
+0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x2b8,0x1bf3,0x1bf3,0x1bf3,0x1bf9,0x1bf9,0x1bf9,0x1bf9,
+0x1bf9,0x1bf9,0x1bf9,0x1bf9,0x1bf9,0x1bf9,0x1bf9,0x1bfc,0x1bfc,0x1bfc,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,
+0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x2bb,0x1bff,0x1bff,0x1bff,0x1bff,
+0x1bff,0x1bff,0x1bff,0x1bff,0x1bff,0x1bff,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,
0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,
-0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0xb76,0xb76,0xb76,0xb76,
-0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,
-0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0x2c1,0x2c1,0x129f,0x129f,0x129f,0x129f,
-0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,
-0x129f,0x1b99,0x1b99,0x1b99,0x1b99,0x2c4,0x2c4,0x2c4,0x2c4,0x2c4,0x2c4,0x2c4,0x13b6,0x13b6,0x13b6,0x13b6,
-0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,
-0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x2c7,0x2c7,0x1773,0x1773,0x2ca,0x2ca,
-0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x18c9,0x18c9,0x18c9,0x18c9,
-0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x3d8,0x3cc,0x3cc,0x3cc,
-0x3cc,0x3cc,0x3cc,0x3cc,0x3cc,0x3d8,0x3d8,0x3d8,0x3d8,0x3d2,0x110a,0x12e4,0x3db,0x918,0x91b,0x3c9,
-0x3c9,0x1107,0x12e1,0x12e1,0x3de,0x3de,0x3de,0x3de,0x3de,0x3de,0x3de,0x3de,0x1107,0x3cc,0x3cc,0x3d8,
-0xc9c,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,
-0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3cc,0x3cc,
-0x8a0,0x8a3,0x936,0x936,0x936,0x936,0x936,0x936,0x936,0x936,0x936,0x936,0x3d5,0xf6c,0xf69,0x12e7,
-0x12e7,0x12e7,0x12e7,0x12e7,0x14a6,0x110d,0x110d,0xebe,0xebe,0xd8f,0xebe,0xebe,0x3db,0x3db,0x3db,0x3db,
-0x3db,0x3db,0x3db,0x3db,0x3db,0x3de,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3de,0x3db,0x3db,
-0x3de,0x3db,0x3db,0x3db,0x3db,0x3db,0x12e1,0x12e4,0x3cf,0x3db,0x3d8,0x3d8,0x47a,0x47a,0x47a,0x47a,
-0x47a,0x47a,0x47a,0x47a,0x47a,0x12ed,0x47a,0x47a,0x47a,0x47a,0x47a,0x47a,0x47a,0x47a,0x47a,0x47a,
-0x47a,0x47a,0x47a,0x47a,0x47a,0x47a,0x12ed,0x1845,0x1845,0xf8a,0x46b,0x474,0x4b6,0x4b6,0x4b6,0x4b6,
-0x4b6,0x4b6,0x4b6,0x4b6,0x4b6,0x4b6,0x4b6,0x4b6,0x4b6,0x4b6,0x4b6,0x4b6,0x4b6,0x4b6,0x4b6,0x4b6,
-0x4b6,0x4b6,0x4b6,0xb91,0xb91,0xd9b,0xd9b,0x8a6,0xd9e,0x13c8,0x13c8,0x13c8,0x4b9,0x4b9,0x4b9,0x4b9,
-0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,
-0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4b9,0x4bf,0x4bf,0x4bf,0x1122,
-0x1122,0x1122,0x1122,0x1122,0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,
-0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,
-0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,0x4bc,0x111f,0x111f,0x111f,0x111f,0x111f,0x111f,0x4c2,0x4bf,0x4bf,0x4bf,
-0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,
-0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,0x4bf,
-0x4cb,0x4c5,0x4cb,0x4c5,0x4cb,0x4c5,0x4cb,0x4c5,0x4cb,0x4c5,0x4cb,0x4c5,0x4cb,0x4c5,0x4cb,0x4c5,
-0x4cb,0x4c5,0x4cb,0x4c5,0x4cb,0x4c5,0x4cb,0x4c5,0x4cb,0x4c5,0x4cb,0x4c5,0x4cb,0x4c5,0x4cb,0x4c5,
-0x4cb,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0x4c8,0x990,0xfb7,0xfb7,0xfba,0xfb7,0x4cb,0x4c5,0x4cb,0x4c5,
-0x4cb,0x4c5,0x4cb,0x4c5,0x4cb,0x4c5,0x4cb,0x4c5,0x4cb,0x4c5,0x4cb,0x4c5,0x4cb,0x4c5,0x4cb,0x4c5,
-0x4cb,0x4c5,0x4cb,0x4c5,0x4cb,0x4c5,0xfba,0xfb7,0xfba,0xfb7,0xfba,0xfb7,0x4d7,0x4d7,0x4d7,0x4d7,
-0x4d7,0x4d7,0x4d7,0x4d7,0x4da,0x4da,0x4da,0x4da,0x4da,0x4da,0x4da,0x4da,0x4d7,0x4d7,0x4d7,0x4d7,
-0x4d7,0x4d7,0x4d7,0x4d7,0x4da,0x4da,0x4da,0x4da,0x4da,0x4da,0x4da,0x4da,0x696,0x696,0x699,0x4f5,
-0x6a5,0x6a2,0x6a2,0x69f,0x51f,0x51f,0x4dd,0x4dd,0x4dd,0x4dd,0x4dd,0xb22,0x6a8,0x501,0x6c0,0x6c3,
-0x516,0x6a8,0x504,0x504,0x4f5,0x510,0x510,0x696,0x51c,0x519,0x69c,0x4ef,0x4e6,0x4e6,0x4e9,0x4e9,
-0x4e9,0x4e9,0x4e9,0x4ec,0x4e9,0x4e9,0x4e9,0x4e0,0x528,0x525,0x522,0x522,0x6b4,0x50a,0x507,0x6b1,
-0x6ae,0x6ab,0x6bd,0x4f8,0x6ba,0x6ba,0x50d,0x510,0x6b7,0x6b7,0x50d,0x510,0x4f2,0x4f5,0x4f5,0x4f5,
-0x513,0x4fe,0x4fb,0xba6,0xac5,0xac5,0xac2,0xac2,0xac2,0xac2,0xb9d,0xb9d,0xb9d,0xb9d,0xba3,0xcc9,
-0xcc6,0xdaa,0xdad,0xba0,0xdad,0xdad,0xdad,0xdad,0xdaa,0xdad,0xdad,0xb9a,0x54c,0x54c,0x54c,0x54c,
-0x54c,0x54c,0x54c,0x549,0x54f,0x72f,0x54c,0x993,0x9b4,0xac8,0xac8,0xac8,0xbac,0xbac,0xdb3,0xdb3,
-0xdb3,0xdb3,0x112b,0x112e,0x112e,0x1302,0x1494,0x14be,0x14c1,0x14c1,0x16c8,0x1848,0x55b,0x55b,0x573,0x6d5,
-0x558,0x6cf,0x55b,0x570,0x558,0x6d5,0x56a,0x573,0x573,0x573,0x56a,0x56a,0x573,0x573,0x573,0x6db,
-0x558,0x573,0x6d8,0x558,0x567,0x573,0x573,0x573,0x573,0x573,0x558,0x558,0x55e,0x6cf,0x6d2,0x558,
-0x573,0x558,0x6de,0x558,0x573,0x561,0x579,0x6e1,0x573,0x573,0x564,0x56a,0x573,0x573,0x576,0x573,
-0x56a,0x56d,0x56d,0x56d,0x56d,0xad1,0xace,0xccc,0xdbc,0xbc1,0xbc4,0xbc4,0xbbe,0xbbb,0xbbb,0xbbb,
-0xbbb,0xbc4,0xbc1,0xbc1,0xbc1,0xbc1,0xbb8,0xbbb,0xdb9,0xeca,0xecd,0xfc0,0x1131,0x1131,0x1131,0x6e7,
-0x6e4,0x57c,0x57f,0x57f,0x57f,0x57f,0x57f,0x6e4,0x6e7,0x6e7,0x6e4,0x57f,0x6ed,0x6ed,0x6ed,0x6ed,
-0x6ed,0x6ed,0x6ed,0x6ed,0x6ed,0x6ed,0x6ed,0x6ed,0x588,0x588,0x588,0x588,0x6ea,0x6ea,0x6ea,0x6ea,
-0x6ea,0x6ea,0x6ea,0x6ea,0x6ea,0x6ea,0x582,0x582,0x582,0x582,0x582,0x582,0x58e,0x58e,0x58e,0x58e,
-0x58e,0x58e,0x58e,0x58e,0x58b,0x58e,0x58e,0x58e,0x58e,0x58e,0x591,0x58b,0x58e,0x58e,0x58b,0x58b,
-0x58b,0x58b,0x58e,0x58e,0x6f0,0x6f0,0x58b,0x58b,0x58e,0x58e,0x58e,0x58e,0x58e,0x58e,0x58e,0x58e,
-0x58e,0x58e,0x58e,0x58e,0x58e,0x591,0x591,0x591,0x58e,0x58e,0x6f3,0x58e,0x6f3,0x58e,0x58e,0x58e,
-0x58e,0x58e,0x58e,0x58e,0x58b,0x58e,0x58b,0x58b,0x58b,0x58b,0x58b,0x58b,0x58e,0x58e,0x58b,0x6f0,
-0x58b,0x58b,0x58b,0xad7,0xad7,0xad7,0xad7,0xad7,0xad7,0xad7,0xad7,0xad7,0xbc7,0xbc7,0xbc7,0xbc7,
-0xbc7,0xbc7,0xbc7,0xbc7,0xbc7,0xbc7,0xbc7,0xbc7,0x6f6,0x594,0x6f6,0x6f6,0x597,0x594,0x594,0x6f6,
-0x6f6,0x597,0x594,0x6f6,0x597,0x594,0x594,0x6f6,0x594,0x6f6,0x5a3,0x5a0,0x594,0x6f6,0x594,0x594,
-0x594,0x594,0x6f6,0x594,0x594,0x6f6,0x6f6,0x6f6,0x6f6,0x594,0x594,0x6f6,0x597,0x6f6,0x597,0x6f6,
-0x6f6,0x6f6,0x6f6,0x6f6,0x6fc,0x59a,0x6f6,0x59a,0x59a,0x594,0x594,0x594,0x6f6,0x6f6,0x6f6,0x6f6,
-0x594,0x594,0x594,0x594,0x6f6,0x6f6,0x594,0x594,0x594,0x597,0x594,0x594,0x597,0x594,0x594,0x597,
-0x6f6,0x597,0x594,0x594,0x6f6,0x594,0x594,0x594,0x594,0x594,0x6f6,0x594,0x594,0x594,0x594,0x594,
-0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x6f9,0x6f6,0x597,0x594,0x6f6,0x6f6,0x6f6,0x6f6,
-0x594,0x594,0x6f6,0x6f6,0x594,0x597,0x6f9,0x6f9,0x597,0x597,0x594,0x594,0x597,0x597,0x594,0x594,
-0x597,0x597,0x594,0x594,0x594,0x594,0x594,0x594,0x597,0x597,0x6f6,0x6f6,0x597,0x597,0x6f6,0x6f6,
-0x597,0x597,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x6f6,0x594,0x594,
-0x594,0x6f6,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x6f6,0x594,0x594,0x594,0x594,0x594,0x594,
-0x597,0x597,0x597,0x597,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,
-0x594,0x594,0x594,0x6f6,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,
-0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,0x594,
-0x594,0x594,0x594,0x594,0x597,0x597,0x597,0x597,0x594,0x594,0x594,0x594,0x594,0x594,0x597,0x597,
-0x597,0x597,0x594,0x59d,0x594,0x594,0xbca,0xbca,0xbca,0xbca,0xbca,0xbca,0xbca,0xbca,0xbca,0xbca,
-0xbca,0xbca,0xbca,0xbca,0x5a6,0xada,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5af,0x5ac,0x5af,0x5ac,
-0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x6ff,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x7ef,0x7ef,
-0x5a6,0x5a6,0x5a6,0x5a6,0x5a9,0x5a9,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x7f5,0x7f2,0x5a6,
-0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,
-0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,0x5a6,
-0x5a6,0x5a6,0x5a6,0xada,0xbd0,0xada,0xada,0xada,0x5b2,0x5b2,0x5b2,0x5b2,0x5b2,0x5b2,0x5b2,0x5b2,
-0x5b2,0x5b2,0x5b2,0x5b2,0x5b2,0x5b2,0x5b2,0x5b2,0x5b2,0x5b2,0x5b2,0x5b2,0x5b2,0x5b2,0x5b2,0x5b2,
-0x5b2,0x5b2,0x5b2,0x5b2,0x5b2,0x5b2,0x5b2,0x5b2,0x708,0x708,0x708,0x708,0x708,0x708,0x708,0x708,
-0x708,0x708,0x5b8,0xc2d,0xc2d,0xc2d,0xc2d,0xc2d,0xc2d,0xc2d,0xc2d,0xc2d,0xc2d,0xc2d,0xc2d,0xc2d,
-0xc2d,0xc2d,0xc2d,0xc2d,0xc2d,0xc2d,0xc2d,0xd3b,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,
-0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x5bb,0x5be,0x5be,0x5be,
-0x5be,0x5be,0x5be,0x5be,0x5be,0x5be,0x5be,0x5be,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,
-0x70e,0x70e,0x70e,0x70e,0x5be,0x5be,0x5be,0x5be,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,
-0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x711,
-0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x5c1,0x5c1,0x711,0x711,0x711,0x711,0xbd3,0xbd3,
-0xbd3,0xbd3,0xbd3,0xbd3,0xbd3,0xbd3,0xbd3,0xbd3,0x717,0x717,0x5c4,0x714,0x714,0x714,0x714,0x714,
-0x714,0x714,0x5c4,0x5c4,0x5c4,0x5c4,0x5c7,0x5c7,0x5c7,0x5c7,0x717,0x717,0x5c7,0x5c7,0x717,0x717,
-0x5c4,0x5c4,0x5c4,0x5c4,0x717,0x717,0x5c7,0x5c7,0x717,0x717,0x5c4,0x5c4,0x5c4,0x5c4,0x717,0x717,
-0x714,0x5c4,0x5c7,0x717,0x5c4,0x5c4,0x714,0x717,0x717,0x717,0x5c7,0x5c7,0x5c4,0x5c4,0x5c4,0x5c4,
-0x5c4,0x5c4,0x5c4,0x5c4,0x5c4,0x5c4,0x5c4,0x5c4,0x5c4,0x5c4,0x717,0x714,0x717,0x714,0x5c4,0x5c7,
-0x5c7,0x5c7,0x5c7,0x5c7,0x5c7,0x5c4,0x5c4,0x714,0xae0,0xae0,0xae0,0xae0,0xae0,0xae0,0xae0,0xae0,
-0xbd6,0xbd6,0xbd6,0xbd6,0xbd6,0xc45,0xc45,0xbd6,0x5cd,0x5cd,0x5cd,0x5cd,0x5ca,0x720,0x720,0x5ca,
-0x5ca,0x71a,0x5ca,0x5ca,0x5ca,0x5ca,0x71a,0x71a,0x5ca,0x5ca,0x5ca,0x5ca,0xd44,0xd44,0xbd9,0xbd9,
-0xdc5,0xae3,0x5cd,0x5cd,0x71d,0x5d0,0x71d,0x5cd,0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,
-0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,
-0x5ca,0x5cd,0x5cd,0x5cd,0x5ca,0x5ca,0x5ca,0x5ca,0x720,0x5ca,0x720,0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,
-0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x5ca,0x5ca,0x5ca,0x5ca,
-0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,0x5ca,0x720,0x720,0x5d3,0x720,0x71a,0x71a,0x5ca,0x71a,
-0x71d,0x71a,0x71a,0x5ca,0x71a,0x720,0x5d3,0x720,0xae3,0xae3,0xbdc,0xbdc,0xbdc,0xbdc,0xbdc,0xbdc,
-0xbdc,0xbdc,0xbdc,0xbdc,0xbdc,0xbdc,0xdc2,0xe79,0x5d6,0x5d6,0x5d6,0x5d6,0x5d6,0x5d6,0x5d6,0x5d6,
-0x5d6,0x5d6,0x5d6,0x5d6,0x5d6,0x5d6,0x5d6,0x5d6,0x5d6,0x5d6,0x5d6,0x5d6,0x5d9,0x1389,0x1389,0x1389,
-0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x14c7,0x5df,0x5df,0x5df,0x5df,0x1389,0x5d9,0x5d9,
-0x5df,0x5df,0x138c,0x138c,0x5e5,0x5e5,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,
-0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x1389,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,
-0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x726,0x5d9,0x5d9,
-0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x1389,0x5d9,0x1389,0x5d9,
-0x5d9,0x5d9,0x5d9,0x1389,0x1389,0x1389,0x5d9,0x1287,0x5d9,0x5d9,0x5d9,0x5e2,0x5e2,0x5e2,0x5e2,0x130e,
-0x130e,0x5d9,0x5dc,0x5dc,0x5df,0x5d9,0x5d9,0x5d9,0xbe2,0xbdf,0xbe2,0xbdf,0xbe2,0xbdf,0xbe2,0xbdf,
-0xbe2,0xbdf,0xbe2,0xbdf,0xbe2,0xbdf,0x723,0x723,0x723,0x723,0x723,0x723,0x723,0x723,0x723,0x723,
-0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,
-0x1389,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x1389,
-0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,
-0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x5fd,0x5fd,
-0x5fd,0x5fd,0x5fd,0x5fd,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
-0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
-0xb34,0xb34,0xb34,0xb34,0xb34,0xb34,0xb34,0xb34,0xb34,0xb34,0xb34,0xb34,0xb34,0xb34,0xb34,0xb34,
-0x606,0x606,0x94b,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x5fd,0x5fd,0xbe5,0xd68,0x1afd,0x1afd,
-0x603,0x609,0x606,0x600,0x603,0x609,0x606,0x600,0x603,0x609,0x606,0x600,0x603,0x609,0x606,0x600,
-0x603,0x609,0x606,0x600,0x603,0x609,0x606,0x600,0x603,0x609,0x606,0x600,0x603,0x609,0x606,0x600,
-0x606,0x600,0x606,0x600,0x606,0x600,0x606,0x600,0x606,0x600,0x606,0x600,0x603,0x609,0x606,0x600,
-0x603,0x609,0x606,0x600,0x603,0x609,0x606,0x600,0x603,0x609,0x606,0x600,0x606,0x600,0x603,0x609,
-0x606,0x600,0x606,0x600,0x603,0x609,0x606,0x600,0x603,0x609,0x606,0x600,0x606,0x600,0x1311,0x1311,
-0x1311,0x1311,0x1311,0x1311,0x1311,0x1311,0x1311,0x1311,0x1311,0x1311,0x1311,0x1311,0x606,0x600,0x606,0x600,
-0x606,0x600,0x603,0x609,0x603,0x609,0x606,0x600,0x606,0x600,0x606,0x600,0x606,0x600,0x606,0x600,
-0x606,0x600,0x606,0x600,0x603,0x606,0x600,0x603,0x606,0x600,0x603,0x609,0x600,0x600,0x600,0x600,
-0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
-0x600,0x600,0x600,0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x606,0x606,0x606,0x606,
-0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x600,0x600,0x600,
-0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x603,0x603,0x603,
-0x603,0x603,0x603,0x603,0x609,0x609,0x609,0x609,0x609,0x609,0x609,0x609,0x600,0x606,0x909,0x90c,
-0x1afd,0x1afd,0x1afd,0x1afd,0x1afd,0x1afd,0x1afd,0x1afd,0x1afd,0x1afd,0x1afd,0x1afd,0x1afd,0x1afd,0x1afd,0x1afd,
-0x603,0x600,0x603,0x603,0x603,0x603,0x603,0x603,0x600,0x603,0x600,0x600,0x603,0x603,0x600,0x600,
-0x603,0x603,0x600,0x603,0x600,0x603,0x600,0x600,0x603,0x600,0x600,0x603,0x600,0x603,0x600,0x600,
-0x603,0x600,0x603,0x603,0x600,0x600,0x600,0x603,0x600,0x600,0x600,0x600,0x600,0x603,0x600,0x600,
-0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,0x600,
-0x600,0x600,0x600,0x600,0x603,0x603,0x600,0x600,0x603,0x600,0x603,0x600,0x600,0x600,0x600,0x600,
-0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x603,
-0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x603,0x603,
-0x603,0x603,0x603,0x609,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,
-0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,0x606,
-0x606,0x606,0x606,0x606,0x609,0x609,0x609,0x609,0x609,0x609,0x609,0x609,0x609,0x609,0x609,0x609,
-0x609,0x609,0x609,0x609,0x609,0x609,0x609,0x609,0x609,0x606,0x606,0x606,0x606,0x606,0x606,0x606,
-0x606,0x606,0x606,0x606,0x60c,0x60c,0x60c,0x60c,0xfcc,0xfcc,0xfcc,0x14ca,0x14ca,0x14ca,0x14ca,0x14ca,
-0x14ca,0x14ca,0x16ce,0x16ce,0x855,0x85b,0x85b,0x867,0x867,0x858,0x84f,0x858,0x84f,0x858,0x84f,0x858,
-0x84f,0x858,0x84f,0x858,0x61b,0x61b,0x615,0x61b,0x615,0x61b,0x615,0x61b,0x615,0x61b,0x615,0x618,
-0x61e,0x61b,0x615,0x61b,0x615,0x618,0x61e,0x61b,0x615,0x61b,0x615,0x618,0x61e,0x61b,0x615,0x618,
-0x61e,0x61b,0x615,0x618,0x61e,0x61b,0x615,0x61b,0x615,0x61b,0x615,0x61b,0x615,0x61b,0x615,0x618,
-0x61e,0x61b,0x615,0x618,0x61e,0x61b,0x615,0x618,0x61e,0x61b,0x615,0x618,0x61e,0x61b,0x615,0x618,
-0x61e,0x61b,0x615,0x618,0x61e,0x61b,0x615,0x618,0x61e,0x61b,0x615,0x618,0x61e,0x61b,0x615,0x618,
-0x705,0x705,0x705,0x705,0x705,0x705,0x705,0x705,0x705,0x705,0x705,0x705,0x705,0x705,0x705,0x705,
-0x705,0x705,0x705,0x705,0x702,0x702,0x702,0x702,0x702,0x702,0x702,0x702,0x702,0x702,0x702,0x702,
-0x702,0x702,0x702,0x702,0x702,0x702,0x702,0x702,0x702,0x702,0x702,0x702,0x702,0x702,0x702,0x702,
-0x702,0x702,0x702,0x702,0x702,0x702,0x70b,0x70b,0x70b,0x70b,0x70b,0x70b,0x70b,0x70b,0x70b,0x70b,
-0x70b,0x70b,0x70b,0x70b,0x70b,0x70b,0x70b,0x70b,0x708,0x708,0x708,0x708,0x708,0x708,0x708,0x708,
-0x708,0x708,0x708,0x708,0x708,0x708,0x708,0x708,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,
-0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,
-0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x70e,0x729,0x729,0x729,0x729,0x729,0x729,0x729,0x729,
-0x729,0x729,0x729,0x729,0x729,0x729,0x729,0x729,0x729,0x729,0x729,0x729,0x729,0x729,0x729,0x729,
-0x729,0x729,0x729,0x729,0x729,0x729,0x729,0x729,0xc33,0x8b8,0x8b2,0x8af,0x8b5,0x8ac,0x73e,0x741,
-0x741,0x741,0x741,0x741,0x741,0x741,0x741,0x741,0x8be,0x73e,0x73e,0x73e,0x73e,0x73e,0x73e,0x73e,
-0x73e,0x73e,0x73e,0x73e,0x73e,0x73e,0x73e,0x73e,0x73e,0x73e,0x73e,0x73e,0x73e,0x73e,0x73e,0x73e,
-0x73e,0x73e,0x73e,0x73e,0x73e,0x73e,0x73e,0x73e,0x73e,0x73e,0x8bb,0x8bb,0x744,0x8cd,0x8d0,0x8d6,
-0x7fb,0x807,0x8eb,0x804,0x8c4,0x8c1,0x8c4,0x8c1,0x8ca,0x8c7,0x8ca,0x8c7,0x8c4,0x8c1,0x801,0x8d6,
-0x8c4,0x8c1,0x8c4,0x8c1,0x8c4,0x8c1,0x8c4,0x8c1,0x8dc,0x8e2,0x8df,0x8df,0x74a,0x786,0x786,0x786,
-0x786,0x786,0x786,0x780,0x780,0x780,0x780,0x780,0x780,0x780,0x780,0x780,0x780,0x780,0x780,0x780,
-0x780,0x780,0x780,0x780,0x780,0x780,0x780,0x74d,0x768,0x747,0x76e,0x771,0x76b,0x783,0x783,0x783,
-0x783,0x783,0x783,0x77d,0x77d,0x77d,0x77d,0x77d,0x77d,0x77d,0x77d,0x77d,0x77d,0x77d,0x77d,0x77d,
-0x77d,0x77d,0x77d,0x77d,0x77d,0x77d,0x77d,0x74d,0x768,0x747,0x768,0xc36,0x7e9,0x7e9,0x7e9,0x7e9,
-0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,
-0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x7e9,0x1281,0x1281,
-0x1281,0x1281,0x1281,0x7ec,0x801,0x804,0x804,0x804,0x804,0x804,0x804,0x804,0x804,0x804,0x924,0x924,
-0x924,0x924,0x80a,0x80a,0x8d9,0x8e8,0x8e8,0x8e8,0x8e8,0x8e5,0x7fe,0x8d3,0xb07,0xb07,0xb07,0xc48,
-0xc66,0xc63,0xb25,0x8a9,0x810,0x80d,0x810,0x813,0x80d,0x810,0x80d,0x810,0x80d,0x810,0x80d,0x80d,
-0x80d,0x80d,0x80d,0x80d,0x810,0x810,0x80d,0x810,0x810,0x80d,0x810,0x810,0x80d,0x810,0x810,0x80d,
-0x810,0x810,0x80d,0x80d,0xc69,0x822,0x81c,0x822,0x81c,0x822,0x81c,0x822,0x81c,0x822,0x81c,0x81c,
-0x81f,0x81c,0x81f,0x81c,0x81f,0x81c,0x81f,0x81c,0x81f,0x81c,0x81f,0x81c,0x81f,0x81c,0x81f,0x81c,
-0x81f,0x81c,0x81f,0x81c,0x81f,0x81c,0x81f,0x822,0x81c,0x81f,0x81c,0x81f,0x81c,0x81f,0x81c,0x81c,
-0x81c,0x81c,0x81c,0x81c,0x81f,0x81f,0x81c,0x81f,0x81f,0x81c,0x81f,0x81f,0x81c,0x81f,0x81f,0x81c,
-0x81f,0x81f,0x81c,0x81c,0x81c,0x81c,0x81c,0x822,0x81c,0x822,0x81c,0x822,0x81c,0x81c,0x81c,0x81c,
-0x81c,0x81c,0x822,0x81c,0x81c,0x81c,0x81c,0x81c,0x81f,0x822,0x822,0x81f,0x81f,0x81f,0x81f,0x8f1,
-0x8f4,0x825,0x828,0xc51,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,
-0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,
-0x82e,0x82e,0x82e,0x82e,0x831,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,
-0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,0x82e,
-0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,
-0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0x83a,0xd4d,0xd4d,0xe7c,0x834,
-0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0xd47,0xd47,0xd47,0xd47,
+0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x2be,0x1c02,0x1c02,0x1c02,0x1c02,0x1c02,0x1c02,0x1c02,0x1c02,
+0x1c02,0x1c02,0x1c02,0x1c02,0x1c02,0x1c02,0x1c02,0x1c02,0x1c02,0x1c02,0x1c02,0x1c02,0x2c1,0x2c1,0x2c1,0x2c1,
+0x2c1,0x2c1,0x2c1,0x2c1,0x2c1,0x2c1,0x2c1,0x2c1,0x1c05,0x1c05,0x1c1a,0x1c11,0x1c17,0x1c17,0x1c17,0x1c17,
+0x1c17,0x1c17,0x1c17,0x1c17,0x1c17,0x1c17,0x1c17,0x1c17,0x1c17,0x2c4,0x1c17,0x1c17,0x1c17,0x1c17,0x1c17,0x1c17,
+0x1c17,0x1c17,0x1c17,0x1c17,0x1c17,0x1c17,0x1c17,0x1c17,0x1c17,0x1c17,0x1c17,0x1c17,0x1c17,0x1c17,0x1c17,0x1c17,
+0x1c11,0x1c11,0x1c05,0x1c05,0x1c05,0x1c05,0x1c05,0x2c4,0x2c4,0x2c4,0x1c11,0x1c11,0x1c05,0x1c14,0x1c08,0x1c1d,
+0x1c1d,0x1c0b,0x1c0b,0x1c0b,0x1c0b,0x1c0b,0x1c0b,0x1c0b,0x1c0b,0x1c0b,0x1c0b,0x1c0b,0x1c0e,0x1c0e,0x1c0e,0x1c0e,
+0x1c0e,0x1c0e,0x1c0e,0x1c0e,0x1c0e,0x1c0e,0x2c4,0x2c4,0x2c4,0x2c4,0x2c4,0x2c4,0x1c26,0x1c26,0x1c26,0x1c26,
+0x1c26,0x1c26,0x1c26,0x1c26,0x1c26,0x1c26,0x1c26,0x1c26,0x1c20,0x1c20,0x1c20,0x1c20,0x1c23,0x1c23,0x1c23,0x1c23,
+0x1c23,0x1c23,0x1c23,0x1c23,0x1c23,0x1c23,0x2c7,0x2c7,0x2c7,0x2c7,0x2c7,0x2c7,0x2ca,0x2ca,0x2ca,0x2ca,
+0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,
+0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x18db,0x2dc,0x2dc,0x2dc,
+0x2dc,0x2dc,0x2dc,0x2dc,0x2dc,0x2dc,0x2dc,0x2dc,0x2dc,0x2dc,0x2dc,0x2dc,0x2ca,0x2ca,0x2ca,0x2ca,
+0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,
+0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x95d,0x95d,0x1c3b,0x1c3b,0x1c3b,0x1c3b,
+0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x2ca,0x2ca,0x2ca,0x2ca,
+0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0x2ca,0xc6c,0xc6c,0xc6c,0xc6c,
+0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0x129f,0x129f,0x129f,0x2cd,0x2cd,0xe94,0xe94,0xe94,0xe94,
+0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,
+0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,
+0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,
+0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0x2cd,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,
+0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,
+0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0x2d0,0x2d0,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,
+0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x1bab,0x1bab,0x1bab,
+0x1bab,0x1c29,0x2d3,0x2d3,0x2d3,0x2d3,0x2d3,0x2d3,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,
+0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,
+0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x2d6,0x2d6,0x1785,0x1785,0x2d9,0x2d9,0x2d9,0x2d9,0x2d9,0x2d9,
+0x2d9,0x2d9,0x2d9,0x2d9,0x2d9,0x2d9,0x2d9,0x2d9,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,
+0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,
+0x1af4,0x1af4,0x1af4,0x2df,0x2df,0x2df,0x2df,0x2df,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,
+0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x3e7,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,0x3db,
+0x3db,0x3e7,0x3e7,0x3e7,0x3e7,0x3e1,0x111f,0x12f6,0x3ea,0x927,0x92a,0x3d8,0x3d8,0x111c,0x12f3,0x12f3,
+0x3ed,0x3ed,0x3ed,0x3ed,0x3ed,0x3ed,0x3ed,0x3ed,0x111c,0x3db,0x3db,0x3e7,0xcae,0x3ea,0x3ea,0x3ea,
+0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,
+0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3db,0x3db,0x8af,0x8b2,0x945,0x945,
+0x945,0x945,0x945,0x945,0x945,0x945,0x945,0x945,0x3e4,0xf7e,0xf7b,0x12f9,0x12f9,0x12f9,0x12f9,0x12f9,
+0x14b8,0x1122,0x1122,0xed0,0xed0,0xda1,0xed0,0xed0,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,
+0x3ea,0x3ed,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ea,0x3ed,0x3ea,0x3ea,0x3ed,0x3ea,0x3ea,0x3ea,
+0x3ea,0x3ea,0x12f3,0x12f6,0x3de,0x3ea,0x3e7,0x3e7,0x489,0x489,0x489,0x489,0x489,0x489,0x489,0x489,
+0x489,0x12ff,0x489,0x489,0x489,0x489,0x489,0x489,0x489,0x489,0x489,0x489,0x489,0x489,0x489,0x489,
+0x489,0x489,0x12ff,0x1857,0x1857,0xf9c,0x47a,0x483,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,
+0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0x4c5,0xba3,
+0xba3,0xdb0,0xdb0,0x8b5,0xdad,0x13da,0x13da,0x13da,0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,
+0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,
+0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,0x4c8,0x4ce,0x4ce,0x4ce,0x1137,0x1137,0x1137,0x1137,0x1137,
+0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,
+0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,0x4cb,
+0x4cb,0x4cb,0x1134,0x1134,0x1134,0x1134,0x1134,0x1134,0x4d1,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,
+0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,
+0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4ce,0x4da,0x4d4,0x4da,0x4d4,
+0x4da,0x4d4,0x4da,0x4d4,0x4da,0x4d4,0x4da,0x4d4,0x4da,0x4d4,0x4da,0x4d4,0x4da,0x4d4,0x4da,0x4d4,
+0x4da,0x4d4,0x4da,0x4d4,0x4da,0x4d4,0x4da,0x4d4,0x4da,0x4d4,0x4da,0x4d4,0x4da,0x4d4,0x4d4,0x4d4,
+0x4d4,0x4d4,0x4d7,0x9a2,0xfc9,0xfc9,0xfcc,0xfc9,0x4da,0x4d4,0x4da,0x4d4,0x4da,0x4d4,0x4da,0x4d4,
+0x4da,0x4d4,0x4da,0x4d4,0x4da,0x4d4,0x4da,0x4d4,0x4da,0x4d4,0x4da,0x4d4,0x4da,0x4d4,0x4da,0x4d4,
+0x4da,0x4d4,0xfcc,0xfc9,0xfcc,0xfc9,0xfcc,0xfc9,0x4e6,0x4e6,0x4e6,0x4e6,0x4e6,0x4e6,0x4e6,0x4e6,
+0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e6,0x4e6,0x4e6,0x4e6,0x4e6,0x4e6,0x4e6,0x4e6,
+0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x4e9,0x6a5,0x6a5,0x6a8,0x504,0x6b4,0x6b1,0x6b1,0x6ae,
+0x52e,0x52e,0x4ec,0x4ec,0x4ec,0x4ec,0x4ec,0xb34,0x6b7,0x510,0x6cf,0x6d2,0x525,0x6b7,0x513,0x513,
+0x504,0x51f,0x51f,0x6a5,0x52b,0x528,0x6ab,0x4fe,0x4f5,0x4f5,0x4f8,0x4f8,0x4f8,0x4f8,0x4f8,0x4fb,
+0x4f8,0x4f8,0x4f8,0x4ef,0x537,0x534,0x531,0x531,0x6c3,0x519,0x516,0x6c0,0x6bd,0x6ba,0x6cc,0x507,
+0x6c9,0x6c9,0x51c,0x51f,0x6c6,0x6c6,0x51c,0x51f,0x501,0x504,0x504,0x504,0x522,0x50d,0x50a,0xbb8,
+0xad7,0xad7,0xad4,0xad4,0xad4,0xad4,0xbaf,0xbaf,0xbaf,0xbaf,0xbb5,0xcdb,0xcd8,0xdbc,0xdbf,0xbb2,
+0xdbf,0xdbf,0xdbf,0xdbf,0xdbc,0xdbf,0xdbf,0xbac,0x55b,0x55b,0x55b,0x55b,0x55b,0x55b,0x55b,0x558,
+0x55e,0x73e,0x55b,0x9a5,0x9c6,0xada,0xada,0xada,0xbbe,0xbbe,0xdc5,0xdc5,0xdc5,0xdc5,0x1140,0x1143,
+0x1143,0x1314,0x14a6,0x14d0,0x14d3,0x14d3,0x16da,0x185a,0x56a,0x56a,0x582,0x6e4,0x567,0x6de,0x56a,0x57f,
+0x567,0x6e4,0x579,0x582,0x582,0x582,0x579,0x579,0x582,0x582,0x582,0x6ea,0x567,0x582,0x6e7,0x567,
+0x576,0x582,0x582,0x582,0x582,0x582,0x567,0x567,0x56d,0x6de,0x6e1,0x567,0x582,0x567,0x6ed,0x567,
+0x582,0x570,0x588,0x6f0,0x582,0x582,0x573,0x579,0x582,0x582,0x585,0x582,0x579,0x57c,0x57c,0x57c,
+0x57c,0xae3,0xae0,0xcde,0xdce,0xbd3,0xbd6,0xbd6,0xbd0,0xbcd,0xbcd,0xbcd,0xbcd,0xbd6,0xbd3,0xbd3,
+0xbd3,0xbd3,0xbca,0xbcd,0xdcb,0xedc,0xedf,0xfd2,0x1146,0x1146,0x1146,0x6f6,0x6f3,0x58b,0x58e,0x58e,
+0x58e,0x58e,0x58e,0x6f3,0x6f6,0x6f6,0x6f3,0x58e,0x6fc,0x6fc,0x6fc,0x6fc,0x6fc,0x6fc,0x6fc,0x6fc,
+0x6fc,0x6fc,0x6fc,0x6fc,0x597,0x597,0x597,0x597,0x6f9,0x6f9,0x6f9,0x6f9,0x6f9,0x6f9,0x6f9,0x6f9,
+0x6f9,0x6f9,0x591,0x591,0x591,0x591,0x591,0x591,0x59d,0x59d,0x59d,0x59d,0x59d,0x59d,0x59d,0x59d,
+0x59a,0x59d,0x59d,0x59d,0x59d,0x59d,0x5a0,0x59a,0x59d,0x59d,0x59a,0x59a,0x59a,0x59a,0x59d,0x59d,
+0x6ff,0x6ff,0x59a,0x59a,0x59d,0x59d,0x59d,0x59d,0x59d,0x59d,0x59d,0x59d,0x59d,0x59d,0x59d,0x59d,
+0x59d,0x5a0,0x5a0,0x5a0,0x59d,0x59d,0x702,0x59d,0x702,0x59d,0x59d,0x59d,0x59d,0x59d,0x59d,0x59d,
+0x59a,0x59d,0x59a,0x59a,0x59a,0x59a,0x59a,0x59a,0x59d,0x59d,0x59a,0x6ff,0x59a,0x59a,0x59a,0xae9,
+0xae9,0xae9,0xae9,0xae9,0xae9,0xae9,0xae9,0xae9,0xbd9,0xbd9,0xbd9,0xbd9,0xbd9,0xbd9,0xbd9,0xbd9,
+0xbd9,0xbd9,0xbd9,0xbd9,0x705,0x5a3,0x705,0x705,0x5a6,0x5a3,0x5a3,0x705,0x705,0x5a6,0x5a3,0x705,
+0x5a6,0x5a3,0x5a3,0x705,0x5a3,0x705,0x5b2,0x5af,0x5a3,0x705,0x5a3,0x5a3,0x5a3,0x5a3,0x705,0x5a3,
+0x5a3,0x705,0x705,0x705,0x705,0x5a3,0x5a3,0x705,0x5a6,0x705,0x5a6,0x705,0x705,0x705,0x705,0x705,
+0x70b,0x5a9,0x705,0x5a9,0x5a9,0x5a3,0x5a3,0x5a3,0x705,0x705,0x705,0x705,0x5a3,0x5a3,0x5a3,0x5a3,
+0x705,0x705,0x5a3,0x5a3,0x5a3,0x5a6,0x5a3,0x5a3,0x5a6,0x5a3,0x5a3,0x5a6,0x705,0x5a6,0x5a3,0x5a3,
+0x705,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x705,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,
+0x5a3,0x5a3,0x5a3,0x5a3,0x708,0x705,0x5a6,0x5a3,0x705,0x705,0x705,0x705,0x5a3,0x5a3,0x705,0x705,
+0x5a3,0x5a6,0x708,0x708,0x5a6,0x5a6,0x5a3,0x5a3,0x5a6,0x5a6,0x5a3,0x5a3,0x5a6,0x5a6,0x5a3,0x5a3,
+0x5a3,0x5a3,0x5a3,0x5a3,0x5a6,0x5a6,0x705,0x705,0x5a6,0x5a6,0x705,0x705,0x5a6,0x5a6,0x5a3,0x5a3,
+0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x705,0x5a3,0x5a3,0x5a3,0x705,0x5a3,0x5a3,
+0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x705,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a6,0x5a6,0x5a6,0x5a6,
+0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x705,
+0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,
+0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,
+0x5a6,0x5a6,0x5a6,0x5a6,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a3,0x5a6,0x5a6,0x5a6,0x5a6,0x5a3,0x5ac,
+0x5a3,0x5a3,0xbdc,0xbdc,0xbdc,0xbdc,0xbdc,0xbdc,0xbdc,0xbdc,0xbdc,0xbdc,0xbdc,0xbdc,0xbdc,0xbdc,
+0x5b5,0xaec,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5be,0x5bb,0x5be,0x5bb,0x5b5,0x5b5,0x5b5,0x5b5,
+0x5b5,0x5b5,0x70e,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x7fe,0x7fe,0x5b5,0x5b5,0x5b5,0x5b5,
+0x5b8,0x5b8,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x804,0x801,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,
+0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,
+0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0x5b5,0xaec,
+0xbe2,0xaec,0xaec,0xaec,0x5c1,0x5c1,0x5c1,0x5c1,0x5c1,0x5c1,0x5c1,0x5c1,0x5c1,0x5c1,0x5c1,0x5c1,
+0x5c1,0x5c1,0x5c1,0x5c1,0x5c1,0x5c1,0x5c1,0x5c1,0x5c1,0x5c1,0x5c1,0x5c1,0x5c1,0x5c1,0x5c1,0x5c1,
+0x5c1,0x5c1,0x5c1,0x5c1,0x717,0x717,0x717,0x717,0x717,0x717,0x717,0x717,0x717,0x717,0x5c7,0xc3f,
+0xc3f,0xc3f,0xc3f,0xc3f,0xc3f,0xc3f,0xc3f,0xc3f,0xc3f,0xc3f,0xc3f,0xc3f,0xc3f,0xc3f,0xc3f,0xc3f,
+0xc3f,0xc3f,0xc3f,0xd4d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,
+0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x5ca,0x5cd,0x5cd,0x5cd,0x5cd,0x5cd,0x5cd,0x5cd,
+0x5cd,0x5cd,0x5cd,0x5cd,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,
+0x5cd,0x5cd,0x5cd,0x5cd,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,
+0x71d,0x71d,0x71d,0x71d,0x720,0x720,0x720,0x720,0x720,0x720,0x720,0x720,0x720,0x720,0x720,0x720,
+0x720,0x720,0x720,0x720,0x5d0,0x5d0,0x720,0x720,0x720,0x720,0xbe5,0xbe5,0xbe5,0xbe5,0xbe5,0xbe5,
+0xbe5,0xbe5,0xbe5,0xbe5,0x726,0x726,0x5d3,0x723,0x723,0x723,0x723,0x723,0x723,0x723,0x5d3,0x5d3,
+0x5d3,0x5d3,0x5d6,0x5d6,0x5d6,0x5d6,0x726,0x726,0x5d6,0x5d6,0x726,0x726,0x5d3,0x5d3,0x5d3,0x5d3,
+0x726,0x726,0x5d6,0x5d6,0x726,0x726,0x5d3,0x5d3,0x5d3,0x5d3,0x726,0x726,0x723,0x5d3,0x5d6,0x726,
+0x5d3,0x5d3,0x723,0x726,0x726,0x726,0x5d6,0x5d6,0x5d3,0x5d3,0x5d3,0x5d3,0x5d3,0x5d3,0x5d3,0x5d3,
+0x5d3,0x5d3,0x5d3,0x5d3,0x5d3,0x5d3,0x726,0x723,0x726,0x723,0x5d3,0x5d6,0x5d6,0x5d6,0x5d6,0x5d6,
+0x5d6,0x5d3,0x5d3,0x723,0xaf2,0xaf2,0xaf2,0xaf2,0xaf2,0xaf2,0xaf2,0xaf2,0xbe8,0xbe8,0xbe8,0xbe8,
+0xbe8,0xc57,0xc57,0xbe8,0x5dc,0x5dc,0x5dc,0x5dc,0x5d9,0x72f,0x72f,0x5d9,0x5d9,0x729,0x5d9,0x5d9,
+0x5d9,0x5d9,0x729,0x729,0x5d9,0x5d9,0x5d9,0x5d9,0xd56,0xd56,0xbeb,0xbeb,0xdd7,0xaf5,0x5dc,0x5dc,
+0x72c,0x5df,0x72c,0x5dc,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,
+0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5dc,0x5dc,0x5dc,
+0x5d9,0x5d9,0x5d9,0x5d9,0x72f,0x5d9,0x72f,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x807,0x807,0x807,0x807,
+0x807,0x807,0x807,0x807,0x807,0x807,0x807,0x807,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,0x5d9,
+0x5d9,0x5d9,0x5d9,0x5d9,0x72f,0x72f,0x5e2,0x72f,0x729,0x729,0x5d9,0x729,0x72c,0x729,0x729,0x5d9,
+0x729,0x72f,0x5e2,0x72f,0xaf5,0xaf5,0xbee,0xbee,0xbee,0xbee,0xbee,0xbee,0xbee,0xbee,0xbee,0xbee,
+0xbee,0xbee,0xdd4,0xe8b,0x5e5,0x5e5,0x5e5,0x5e5,0x5e5,0x5e5,0x5e5,0x5e5,0x5e5,0x5e5,0x5e5,0x5e5,
+0x5e5,0x5e5,0x5e5,0x5e5,0x5e5,0x5e5,0x5e5,0x5e5,0x5e8,0x139b,0x139b,0x139b,0x5e8,0x5e8,0x5e8,0x5e8,
+0x5e8,0x5e8,0x5e8,0x5e8,0x14d9,0x5ee,0x5ee,0x5ee,0x5ee,0x139b,0x5e8,0x5e8,0x5ee,0x5ee,0x139e,0x139e,
+0x5f4,0x5f4,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,
+0x5e8,0x5e8,0x5e8,0x5e8,0x139b,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,
+0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x735,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,
+0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x139b,0x5e8,0x139b,0x5e8,0x5e8,0x5e8,0x5e8,0x139b,
+0x139b,0x139b,0x5e8,0x1299,0x5e8,0x5e8,0x5e8,0x5f1,0x5f1,0x5f1,0x5f1,0x1320,0x1320,0x5e8,0x5eb,0x5eb,
+0x5ee,0x5e8,0x5e8,0x5e8,0xbf4,0xbf1,0xbf4,0xbf1,0xbf4,0xbf1,0xbf4,0xbf1,0xbf4,0xbf1,0xbf4,0xbf1,
+0xbf4,0xbf1,0x732,0x732,0x732,0x732,0x732,0x732,0x732,0x732,0x732,0x732,0x5e8,0x5e8,0x5e8,0x5e8,
+0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x139b,0x5e8,0x5e8,0x5e8,
+0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x5e8,0x139b,0x615,0x615,0x615,0x615,
+0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,
+0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x60c,0x60c,0x60c,0x60c,0x60c,0x60c,
+0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,
+0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0xb46,0xb46,0xb46,0xb46,
+0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0x615,0x615,0x95a,0x615,
+0x615,0x615,0x615,0x615,0x615,0x615,0x60c,0x60c,0xbf7,0xd7a,0x1b0f,0x1b0f,0x612,0x618,0x615,0x60f,
+0x612,0x618,0x615,0x60f,0x612,0x618,0x615,0x60f,0x612,0x618,0x615,0x60f,0x612,0x618,0x615,0x60f,
+0x612,0x618,0x615,0x60f,0x612,0x618,0x615,0x60f,0x612,0x618,0x615,0x60f,0x615,0x60f,0x615,0x60f,
+0x615,0x60f,0x615,0x60f,0x615,0x60f,0x615,0x60f,0x612,0x618,0x615,0x60f,0x612,0x618,0x615,0x60f,
+0x612,0x618,0x615,0x60f,0x612,0x618,0x615,0x60f,0x615,0x60f,0x612,0x618,0x615,0x60f,0x615,0x60f,
+0x612,0x618,0x615,0x60f,0x612,0x618,0x615,0x60f,0x615,0x60f,0x1323,0x1323,0x1323,0x1323,0x1323,0x1323,
+0x1323,0x1323,0x1323,0x1323,0x1323,0x1323,0x1323,0x1323,0x615,0x60f,0x615,0x60f,0x615,0x60f,0x612,0x618,
+0x612,0x618,0x615,0x60f,0x615,0x60f,0x615,0x60f,0x615,0x60f,0x615,0x60f,0x615,0x60f,0x615,0x60f,
+0x612,0x615,0x60f,0x612,0x615,0x60f,0x612,0x618,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,
+0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x612,
+0x612,0x612,0x612,0x612,0x612,0x612,0x612,0x612,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,
+0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,
+0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x612,0x612,0x612,0x612,0x612,0x612,0x612,
+0x618,0x618,0x618,0x618,0x618,0x618,0x618,0x618,0x60f,0x615,0x918,0x91b,0x1b0f,0x1b0f,0x1b0f,0x1b0f,
+0x1b0f,0x1b0f,0x1b0f,0x1b0f,0x1b0f,0x1b0f,0x1b0f,0x1b0f,0x1b0f,0x1b0f,0x1b0f,0x1b0f,0x612,0x60f,0x612,0x612,
+0x612,0x612,0x612,0x612,0x60f,0x612,0x60f,0x60f,0x612,0x612,0x60f,0x60f,0x612,0x612,0x60f,0x612,
+0x60f,0x612,0x60f,0x60f,0x612,0x60f,0x60f,0x612,0x60f,0x612,0x60f,0x60f,0x612,0x60f,0x612,0x612,
+0x60f,0x60f,0x60f,0x612,0x60f,0x60f,0x60f,0x60f,0x60f,0x612,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,
+0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,0x60f,
+0x612,0x612,0x60f,0x60f,0x612,0x60f,0x612,0x60f,0x60f,0x60f,0x60f,0x60f,0x612,0x612,0x612,0x612,
+0x612,0x612,0x612,0x612,0x612,0x612,0x612,0x612,0x612,0x612,0x612,0x612,0x612,0x612,0x612,0x612,
+0x612,0x612,0x612,0x612,0x612,0x612,0x612,0x612,0x612,0x612,0x612,0x612,0x612,0x612,0x612,0x618,
+0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,
+0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,
+0x618,0x618,0x618,0x618,0x618,0x618,0x618,0x618,0x618,0x618,0x618,0x618,0x618,0x618,0x618,0x618,
+0x618,0x618,0x618,0x618,0x618,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,0x615,
+0x61b,0x61b,0x61b,0x61b,0xfde,0xfde,0xfde,0x14dc,0x14dc,0x14dc,0x14dc,0x14dc,0x14dc,0x14dc,0x16e0,0x16e0,
+0x864,0x86a,0x86a,0x876,0x876,0x867,0x85e,0x867,0x85e,0x867,0x85e,0x867,0x85e,0x867,0x85e,0x867,
+0x62a,0x62a,0x624,0x62a,0x624,0x62a,0x624,0x62a,0x624,0x62a,0x624,0x627,0x62d,0x62a,0x624,0x62a,
+0x624,0x627,0x62d,0x62a,0x624,0x62a,0x624,0x627,0x62d,0x62a,0x624,0x627,0x62d,0x62a,0x624,0x627,
+0x62d,0x62a,0x624,0x62a,0x624,0x62a,0x624,0x62a,0x624,0x62a,0x624,0x627,0x62d,0x62a,0x624,0x627,
+0x62d,0x62a,0x624,0x627,0x62d,0x62a,0x624,0x627,0x62d,0x62a,0x624,0x627,0x62d,0x62a,0x624,0x627,
+0x62d,0x62a,0x624,0x627,0x62d,0x62a,0x624,0x627,0x62d,0x62a,0x624,0x627,0x714,0x714,0x714,0x714,
+0x714,0x714,0x714,0x714,0x714,0x714,0x714,0x714,0x714,0x714,0x714,0x714,0x714,0x714,0x714,0x714,
+0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x711,
+0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x711,0x711,
+0x711,0x711,0x71a,0x71a,0x71a,0x71a,0x71a,0x71a,0x71a,0x71a,0x71a,0x71a,0x71a,0x71a,0x71a,0x71a,
+0x71a,0x71a,0x71a,0x71a,0x717,0x717,0x717,0x717,0x717,0x717,0x717,0x717,0x717,0x717,0x717,0x717,
+0x717,0x717,0x717,0x717,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,
+0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,0x71d,
+0x71d,0x71d,0x71d,0x71d,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,
+0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,0x738,
+0x738,0x738,0x738,0x738,0xc45,0x8c7,0x8c1,0x8be,0x8c4,0x8bb,0x74d,0x750,0x750,0x750,0x750,0x750,
+0x750,0x750,0x750,0x750,0x8cd,0x74d,0x74d,0x74d,0x74d,0x74d,0x74d,0x74d,0x74d,0x74d,0x74d,0x74d,
+0x74d,0x74d,0x74d,0x74d,0x74d,0x74d,0x74d,0x74d,0x74d,0x74d,0x74d,0x74d,0x74d,0x74d,0x74d,0x74d,
+0x74d,0x74d,0x74d,0x74d,0x74d,0x74d,0x8ca,0x8ca,0x753,0x8dc,0x8df,0x8e5,0x80a,0x816,0x8fa,0x813,
+0x8d3,0x8d0,0x8d3,0x8d0,0x8d9,0x8d6,0x8d9,0x8d6,0x8d3,0x8d0,0x810,0x8e5,0x8d3,0x8d0,0x8d3,0x8d0,
+0x8d3,0x8d0,0x8d3,0x8d0,0x8eb,0x8f1,0x8ee,0x8ee,0x759,0x795,0x795,0x795,0x795,0x795,0x795,0x78f,
+0x78f,0x78f,0x78f,0x78f,0x78f,0x78f,0x78f,0x78f,0x78f,0x78f,0x78f,0x78f,0x78f,0x78f,0x78f,0x78f,
+0x78f,0x78f,0x78f,0x75c,0x777,0x756,0x77d,0x780,0x77a,0x792,0x792,0x792,0x792,0x792,0x792,0x78c,
+0x78c,0x78c,0x78c,0x78c,0x78c,0x78c,0x78c,0x78c,0x78c,0x78c,0x78c,0x78c,0x78c,0x78c,0x78c,0x78c,
+0x78c,0x78c,0x78c,0x75c,0x777,0x756,0x777,0xc48,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,
+0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,
+0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x7f8,0x1293,0x1293,0x1293,0x1293,0x1293,0x7fb,
+0x810,0x813,0x813,0x813,0x813,0x813,0x813,0x813,0x813,0x813,0x933,0x933,0x933,0x933,0x819,0x819,
+0x8e8,0x8f7,0x8f7,0x8f7,0x8f7,0x8f4,0x80d,0x8e2,0xb19,0xb19,0xb19,0xc5a,0xc78,0xc75,0xb37,0x8b8,
+0x81f,0x81c,0x81f,0x822,0x81c,0x81f,0x81c,0x81f,0x81c,0x81f,0x81c,0x81c,0x81c,0x81c,0x81c,0x81c,
+0x81f,0x81f,0x81c,0x81f,0x81f,0x81c,0x81f,0x81f,0x81c,0x81f,0x81f,0x81c,0x81f,0x81f,0x81c,0x81c,
+0xc7b,0x831,0x82b,0x831,0x82b,0x831,0x82b,0x831,0x82b,0x831,0x82b,0x82b,0x82e,0x82b,0x82e,0x82b,
+0x82e,0x82b,0x82e,0x82b,0x82e,0x82b,0x82e,0x82b,0x82e,0x82b,0x82e,0x82b,0x82e,0x82b,0x82e,0x82b,
+0x82e,0x82b,0x82e,0x831,0x82b,0x82e,0x82b,0x82e,0x82b,0x82e,0x82b,0x82b,0x82b,0x82b,0x82b,0x82b,
+0x82e,0x82e,0x82b,0x82e,0x82e,0x82b,0x82e,0x82e,0x82b,0x82e,0x82e,0x82b,0x82e,0x82e,0x82b,0x82b,
+0x82b,0x82b,0x82b,0x831,0x82b,0x831,0x82b,0x831,0x82b,0x82b,0x82b,0x82b,0x82b,0x82b,0x831,0x82b,
+0x82b,0x82b,0x82b,0x82b,0x82e,0x831,0x831,0x82e,0x82e,0x82e,0x82e,0x900,0x903,0x834,0x837,0xc63,
0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,
-0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x1a46,
-0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x903,
-0x903,0x840,0x840,0x840,0x840,0x840,0x840,0xd50,0xd50,0xd50,0xd50,0x906,0x906,0x906,0x906,0x906,
-0x840,0x840,0x840,0x840,0x840,0x840,0x840,0x840,0x840,0x840,0x840,0x840,0x840,0x840,0x840,0x840,
-0x840,0x840,0x840,0x840,0x840,0x840,0x840,0x840,0x840,0x840,0x840,0x840,0x840,0x840,0x840,0x840,
-0x840,0x840,0xd50,0xd50,0x843,0x843,0x843,0x843,0x843,0x843,0x843,0x843,0x843,0x843,0x843,0x843,
-0x843,0x843,0x843,0x843,0x843,0x843,0x843,0x843,0x843,0x843,0x843,0x843,0x843,0x843,0x843,0x843,
-0x843,0x843,0x843,0x843,0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x846,0x846,0x846,0x846,
-0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,
-0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0x846,0xe7f,0xe7f,
-0xe7f,0xe7f,0xe7f,0xe7f,0xe7f,0xe7f,0xe7f,0xe7f,0xe7f,0xe7f,0xe7f,0xe7f,0xe7f,0xe7f,0xe7f,0xe7f,
-0xe7f,0xe7f,0xe7f,0xe7f,0x10ef,0x10ef,0x10ef,0x10ef,0x849,0x849,0x849,0x849,0x849,0x849,0x849,0x849,
+0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,
+0x840,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,
+0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x83d,0x849,0x849,0x849,0x849,
0x849,0x849,0x849,0x849,0x849,0x849,0x849,0x849,0x849,0x849,0x849,0x849,0x849,0x849,0x849,0x849,
-0x849,0x849,0x849,0x849,0x849,0x849,0x849,0x849,0x849,0x849,0x84c,0x84c,0x849,0x84c,0x849,0x84c,
-0x84c,0x849,0x849,0x849,0x849,0x849,0x849,0x849,0x849,0x849,0x849,0x84c,0x849,0x84c,0x849,0x84c,
-0x84c,0x849,0x849,0x84c,0x84c,0x84c,0x849,0x849,0x849,0x849,0x1485,0x1485,0xc5a,0xc5a,0xc5a,0xc5a,
-0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0x8fd,0x8fd,0x8fd,0x8fd,
-0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,
-0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x8fd,0x12c0,0x12c0,0x12c0,0x12c0,
-0x1269,0x1269,0x1269,0x1269,0x1269,0x1269,0x1269,0x1269,0xd47,0xc54,0xc54,0xc54,0xc54,0xc54,0xc54,0xc54,
-0xc54,0xc54,0xc54,0xc54,0xc54,0xc54,0xc54,0xc54,0x900,0x900,0x900,0x900,0x900,0x900,0x900,0x900,
-0x900,0x900,0x900,0x900,0x900,0x900,0x900,0x900,0x900,0x900,0x900,0x900,0x900,0x900,0x900,0x900,
-0x900,0x900,0x900,0x900,0x900,0x900,0x900,0x900,0x900,0xc54,0xc54,0xc54,0xc54,0xc54,0xc54,0xc54,
-0xc54,0xc54,0xc54,0xc54,0xc54,0xc54,0xc54,0xc54,0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x903,
-0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x903,0x903,
-0x903,0x903,0x903,0x903,0x903,0x903,0x903,0xd50,0x98a,0x96c,0x96c,0x96c,0x96c,0x966,0x96c,0x96c,
-0x97e,0x96c,0x96c,0x969,0x975,0x97b,0x97b,0x97b,0x97b,0x97b,0x97e,0x966,0x972,0x966,0x966,0x966,
-0x95d,0x95d,0x966,0x966,0x966,0x966,0x966,0x966,0x981,0x981,0x981,0x981,0x981,0x981,0x981,0x981,
-0x981,0x981,0x966,0x966,0x966,0x966,0x966,0x966,0x966,0x966,0x966,0x966,0x969,0x95d,0x966,0x95d,
-0x966,0x95d,0x978,0x96f,0x978,0x96f,0x987,0x987,0x996,0x996,0x996,0x996,0x996,0x996,0x996,0x996,
-0x996,0x996,0x996,0x996,0x996,0x996,0x996,0x996,0x996,0x996,0x996,0x996,0x996,0x996,0x996,0x996,
-0x996,0x996,0x996,0x996,0x996,0x996,0x996,0x996,0x999,0x999,0x999,0x999,0x999,0x999,0x999,0x999,
-0x999,0x999,0x999,0x999,0x999,0x999,0x999,0x999,0x999,0x999,0x999,0x999,0x999,0x999,0x999,0x999,
-0x999,0x999,0x999,0x999,0x999,0x999,0x999,0x999,0x99c,0x99c,0x99c,0x99c,0x99c,0x99c,0x99c,0x99c,
-0x99c,0x99c,0x99c,0x99c,0x99c,0x99c,0x99c,0x99c,0x99c,0x99c,0x99c,0x99c,0x99c,0x99c,0x99c,0x99c,
-0x99c,0x99c,0x99c,0x99c,0x99c,0x99c,0x99c,0x99c,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,
-0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,
-0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x99f,0x99f,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,
-0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,
-0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a2,0x9a2,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,
-0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,
-0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a5,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,
+0x849,0x849,0x849,0x849,0x849,0x849,0x849,0x849,0xd5f,0xd5f,0xe8e,0x843,0x90c,0x90c,0x90c,0x90c,
+0x90c,0x90c,0x90c,0x90c,0x90c,0x90c,0x90c,0x90c,0xd59,0xd59,0xd59,0xd59,0x84c,0x84c,0x84c,0x84c,
+0x84c,0x84c,0x84c,0x84c,0x84c,0x84c,0x84c,0x84c,0x84c,0x84c,0x84c,0x84c,0x84c,0x84c,0x84c,0x84c,
+0x84c,0x84c,0x84c,0x84c,0x84c,0x84c,0x84c,0x84c,0x84c,0x84c,0x84c,0x1a58,0x912,0x912,0x912,0x912,
+0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x84f,0x84f,0x84f,
+0x84f,0x84f,0x84f,0xd62,0xd62,0xd62,0xd62,0x915,0x915,0x915,0x915,0x915,0x84f,0x84f,0x84f,0x84f,
+0x84f,0x84f,0x84f,0x84f,0x84f,0x84f,0x84f,0x84f,0x84f,0x84f,0x84f,0x84f,0x84f,0x84f,0x84f,0x84f,
+0x84f,0x84f,0x84f,0x84f,0x84f,0x84f,0x84f,0x84f,0x84f,0x84f,0x84f,0x84f,0x84f,0x84f,0xd62,0xd62,
+0x852,0x852,0x852,0x852,0x852,0x852,0x852,0x852,0x852,0x852,0x852,0x852,0x852,0x852,0x852,0x852,
+0x852,0x852,0x852,0x852,0x852,0x852,0x852,0x852,0x852,0x852,0x852,0x852,0x852,0x852,0x852,0x852,
+0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x855,0x855,0x855,0x855,0x855,0x855,0x855,0x855,
+0x855,0x855,0x855,0x855,0x855,0x855,0x855,0x855,0x855,0x855,0x855,0x855,0x855,0x855,0x855,0x855,
+0x855,0x855,0x855,0x855,0x855,0x855,0x855,0x855,0x855,0x855,0xe91,0xe91,0xe91,0xe91,0xe91,0xe91,
+0xe91,0xe91,0xe91,0xe91,0xe91,0xe91,0xe91,0xe91,0xe91,0xe91,0xe91,0xe91,0xe91,0xe91,0xe91,0xe91,
+0x1104,0x1104,0x1104,0x1104,0x858,0x858,0x858,0x858,0x858,0x858,0x858,0x858,0x858,0x858,0x858,0x858,
+0x858,0x858,0x858,0x858,0x858,0x858,0x858,0x858,0x858,0x858,0x858,0x858,0x858,0x858,0x858,0x858,
+0x858,0x858,0x858,0x858,0x858,0x858,0x85b,0x85b,0x858,0x85b,0x858,0x85b,0x85b,0x858,0x858,0x858,
+0x858,0x858,0x858,0x858,0x858,0x858,0x858,0x85b,0x858,0x85b,0x858,0x85b,0x85b,0x858,0x858,0x85b,
+0x85b,0x85b,0x858,0x858,0x858,0x858,0x1497,0x1497,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,
+0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0x90c,0x90c,0x90c,0x90c,0x90c,0x90c,0x90c,0x90c,
+0x90c,0x90c,0x90c,0x90c,0x90c,0x90c,0x90c,0x90c,0x90c,0x90c,0x90c,0x90c,0x90c,0x90c,0x90c,0x90c,
+0x90c,0x90c,0x90c,0x90c,0x90c,0x90c,0x90c,0x90c,0x12d2,0x12d2,0x12d2,0x12d2,0x127b,0x127b,0x127b,0x127b,
+0x127b,0x127b,0x127b,0x127b,0xd59,0xc66,0xc66,0xc66,0xc66,0xc66,0xc66,0xc66,0xc66,0xc66,0xc66,0xc66,
+0xc66,0xc66,0xc66,0xc66,0x90f,0x90f,0x90f,0x90f,0x90f,0x90f,0x90f,0x90f,0x90f,0x90f,0x90f,0x90f,
+0x90f,0x90f,0x90f,0x90f,0x90f,0x90f,0x90f,0x90f,0x90f,0x90f,0x90f,0x90f,0x90f,0x90f,0x90f,0x90f,
+0x90f,0x90f,0x90f,0x90f,0x90f,0xc66,0xc66,0xc66,0xc66,0xc66,0xc66,0xc66,0xc66,0xc66,0xc66,0xc66,
+0xc66,0xc66,0xc66,0xc66,0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,
+0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,0x912,
+0x912,0x912,0x912,0xd62,0x99c,0x97e,0x97e,0x97e,0x97e,0x978,0x97e,0x97e,0x990,0x97e,0x97e,0x97b,
+0x987,0x98d,0x98d,0x98d,0x98d,0x98d,0x990,0x978,0x984,0x978,0x978,0x978,0x96c,0x96c,0x978,0x978,
+0x978,0x978,0x978,0x978,0x993,0x993,0x993,0x993,0x993,0x993,0x993,0x993,0x993,0x993,0x978,0x978,
+0x978,0x978,0x978,0x978,0x978,0x978,0x978,0x978,0x97b,0x96c,0x978,0x96c,0x978,0x96c,0x98a,0x981,
+0x98a,0x981,0x999,0x999,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,
0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,
-0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9a8,0x9ab,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,
+0x9a8,0x9a8,0x9a8,0x9a8,0x9ab,0x9ab,0x9ab,0x9ab,0x9ab,0x9ab,0x9ab,0x9ab,0x9ab,0x9ab,0x9ab,0x9ab,
+0x9ab,0x9ab,0x9ab,0x9ab,0x9ab,0x9ab,0x9ab,0x9ab,0x9ab,0x9ab,0x9ab,0x9ab,0x9ab,0x9ab,0x9ab,0x9ab,
+0x9ab,0x9ab,0x9ab,0x9ab,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,
0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,
-0x9ae,0x9ae,0x9ae,0x9ae,0x9ab,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,
-0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,0x9ae,
-0xa3b,0xa3b,0xfb1,0xa3b,0xa3b,0xa3b,0xa3e,0xa3b,0xfb1,0xa3b,0xa3b,0xfa8,0xa35,0xa29,0xa29,0xa29,
-0xa29,0xa38,0xa29,0xf99,0xf99,0xf99,0xa29,0xa2c,0xa35,0xa2f,0xf9f,0xfab,0xfab,0xf99,0xf99,0xfb1,
-0xb2b,0xb2b,0xb2b,0xb2b,0xb2b,0xb2b,0xb2b,0xb2b,0xb2b,0xb2b,0xa41,0xa41,0xa32,0xa32,0xa32,0xa32,
-0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa38,0xa38,0xa29,0xa29,0xfb1,0xfb1,0xfb1,0xfb1,0xf99,0xf99,
-0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,
-0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,0xa3b,
-0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xda7,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,
-0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,
-0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xda7,0xa50,0xa50,0xa50,0xa50,
-0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa50,0xa56,0xa56,0xa56,0xa56,
-0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,
-0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa56,0xa5c,0xa5c,0xa5c,0xa5c,
-0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa59,0xa5f,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,
-0xa5c,0xa5c,0xa5c,0x1128,0x1128,0x1128,0x1128,0x1128,0x1128,0x1128,0x1128,0x1128,0x1125,0xa5c,0xa5c,0xa5c,
-0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,
-0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,0xa5c,
-0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,
-0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,0xa71,
-0xa95,0xa95,0xa95,0xa98,0xa98,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,
-0xa95,0xa95,0xa95,0xa95,0xa7d,0xa7d,0xa92,0xa74,0xa74,0xa74,0xa74,0xa74,0xa74,0xa74,0xa92,0xa92,
-0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,
-0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,0xa95,
-0xab6,0xab6,0xab6,0xab6,0xab6,0xaa1,0xaa1,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,
-0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,
-0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab9,0xab6,0xab6,0xab6,0xab6,
-0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,
-0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xab6,0xada,0xada,0xada,0xada,0xada,0xada,0xada,0xada,
-0xada,0xada,0xada,0xada,0xada,0xada,0xada,0xada,0xada,0xada,0xada,0xada,0xada,0xada,0xada,0xada,
-0xada,0xada,0xada,0xbd0,0xbd0,0xbd0,0xbd0,0xbd0,0xae6,0xae6,0xae6,0xae6,0xae6,0xae6,0xae6,0xae6,
-0xae6,0xae6,0xae6,0xae6,0xae6,0xae6,0xae6,0xae6,0xae6,0xae6,0xae6,0xae6,0xae6,0xae6,0xae6,0xae6,
-0xae6,0xae6,0xae6,0xae6,0xae6,0xae6,0xae6,0xae6,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,
+0x9ae,0x9ae,0x9ae,0x9ae,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,
+0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,
+0x9b7,0x9b7,0x9b1,0x9b1,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,
+0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,
+0x9ba,0x9ba,0x9b4,0x9b4,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,
+0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,0x9b7,
+0x9b7,0x9b7,0x9b7,0x9b7,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,
+0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,0x9ba,
+0x9ba,0x9ba,0x9ba,0x9ba,0x9bd,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,
+0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,
+0x9bd,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,
+0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0x9c0,0xa4d,0xa4d,0xfc3,0xa4d,
+0xa4d,0xa4d,0xa50,0xa4d,0xfc3,0xa4d,0xa4d,0xfba,0xa47,0xa3b,0xa3b,0xa3b,0xa3b,0xa4a,0xa3b,0xfab,
+0xfab,0xfab,0xa3b,0xa3e,0xa47,0xa41,0xfb1,0xfbd,0xfbd,0xfab,0xfab,0xfc3,0xb3d,0xb3d,0xb3d,0xb3d,
+0xb3d,0xb3d,0xb3d,0xb3d,0xb3d,0xb3d,0xa53,0xa53,0xa44,0xa44,0xa44,0xa44,0xa4d,0xa4d,0xa4d,0xa4d,
+0xa4d,0xa4d,0xa4a,0xa4a,0xa3b,0xa3b,0xfc3,0xfc3,0xfc3,0xfc3,0xfab,0xfab,0xa4d,0xa4d,0xa4d,0xa4d,
+0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,
+0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa4d,0xa62,0xa62,0xa62,0xa62,
+0xa62,0xa62,0xa62,0xdb9,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,
+0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,
+0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xdb9,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,
+0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa62,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,
+0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,
+0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa68,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,
+0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6b,0xa71,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0x113d,
+0x113d,0x113d,0x113d,0x113d,0x113d,0x113d,0x113d,0x113d,0x113a,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,
+0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,
+0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa6e,0xa83,0xa83,0xa83,0xa83,
+0xa83,0xa83,0xa83,0xa83,0xa83,0xa83,0xa83,0xa83,0xa83,0xa83,0xa83,0xa83,0xa83,0xa83,0xa83,0xa83,
+0xa83,0xa83,0xa83,0xa83,0xa83,0xa83,0xa83,0xa83,0xa83,0xa83,0xa83,0xa83,0xaa7,0xaa7,0xaa7,0xaaa,
+0xaaa,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,
+0xa8f,0xa8f,0xaa4,0xa86,0xa86,0xa86,0xa86,0xa86,0xa86,0xa86,0xaa4,0xaa4,0xaa7,0xaa7,0xaa7,0xaa7,
+0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,
+0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xaa7,0xac8,0xac8,0xac8,0xac8,
+0xac8,0xab3,0xab3,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,
+0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,
+0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xacb,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,
+0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,0xac8,
+0xac8,0xac8,0xac8,0xac8,0xaec,0xaec,0xaec,0xaec,0xaec,0xaec,0xaec,0xaec,0xaec,0xaec,0xaec,0xaec,
+0xaec,0xaec,0xaec,0xaec,0xaec,0xaec,0xaec,0xaec,0xaec,0xaec,0xaec,0xaec,0xaec,0xaec,0xaec,0xbe2,
+0xbe2,0xbe2,0xbe2,0xbe2,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,
0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,
-0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xaf8,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,
-0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,
-0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xafe,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,
+0xaf8,0xaf8,0xaf8,0xaf8,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,
0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,0xb0a,
-0x138f,0x138f,0x138f,0x1ac1,0x1ac1,0x1ac1,0x1ac1,0x1ac1,0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,
-0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,
-0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,0xb0d,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,
-0x1ac4,0x1ac4,0x1ac4,0x1ac4,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,
-0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb13,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,
+0xb0a,0xb0a,0xb0a,0xb0a,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,
0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,
-0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb10,0xb16,0xb16,0xc57,0xc57,
-0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,
-0xc57,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb16,0xb3a,0xb3a,0xb3a,0xb3a,
-0xb3a,0xb3a,0xb3a,0xb3a,0xb3a,0xb3a,0xb3a,0xb3a,0xb3a,0xb3a,0xb3a,0xb3a,0xb3a,0xb3a,0xb3a,0xb3a,
-0xb3a,0xb3a,0xb3a,0xb3a,0xb3a,0xb3a,0xb3a,0xb3a,0xb3a,0xb3a,0xb3a,0x14cd,0xb43,0xb43,0xb43,0xb43,
-0xb43,0xb43,0xcdb,0xcdb,0xb40,0xb40,0xb40,0xb40,0xb40,0xb40,0xb40,0xb40,0xb40,0xb40,0xb40,0xb40,
-0xb40,0xb40,0xb40,0xb40,0xb40,0xb40,0xb40,0xb40,0xb40,0xb40,0xb40,0xb40,0xb40,0xb40,0xcd8,0xcd8,
-0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,
-0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,
-0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,0xb43,
-0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,
-0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,0xb46,
-0xb55,0xb55,0xb55,0xb55,0xb55,0xb4c,0xb58,0xb5e,0xb5e,0xb5e,0xb52,0xb52,0xb52,0xb5b,0xb4f,0xb4f,
-0xb4f,0xb4f,0xb4f,0xb49,0xb49,0xb49,0xb49,0xb49,0xb49,0xb49,0xb49,0xb5e,0xb5e,0xb5e,0xb5e,0xb5e,
-0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,
-0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,
-0xb52,0xb52,0xb55,0xb55,0xb5e,0xb5e,0xb5e,0xb52,0xb52,0xb5e,0xb5e,0xb5e,0xb5e,0xb5e,0xb5e,0xb5e,
-0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,
-0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb5e,0xb5e,0xb5e,0xb5e,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,
-0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb55,0xb55,0xb55,0xb55,0xb55,0xb55,0xb52,0xb52,0xb52,
+0xb10,0xb10,0xb10,0xb10,0xb1c,0xb1c,0xb1c,0xb1c,0xb1c,0xb1c,0xb1c,0xb1c,0xb1c,0xb1c,0xb1c,0xb1c,
+0xb1c,0xb1c,0xb1c,0xb1c,0xb1c,0xb1c,0xb1c,0xb1c,0xb1c,0xb1c,0xb1c,0xb1c,0x13a1,0x13a1,0x13a1,0x1ad3,
+0x1ad3,0x1ad3,0x1ad3,0x1ad3,0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,
+0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,
+0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,0xb1f,0x1ad6,0x1ad6,0x1ad6,0x1ad6,0x1ad6,0x1ad6,0x1ad6,0x1ad6,0x1ad6,0x1ad6,
+0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,
+0xb22,0xb22,0xb22,0xb22,0xb22,0xb25,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,
+0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,
+0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb22,0xb28,0xb28,0xc69,0xc69,0xb28,0xb28,0xb28,0xb28,
+0xb28,0xb28,0xb28,0xb28,0xb28,0xb28,0xb28,0xb28,0xb28,0xb28,0xb28,0xb28,0xc69,0xb28,0xb28,0xb28,
+0xb28,0xb28,0xb28,0xb28,0xb28,0xb28,0xb28,0xb28,0xb4c,0xb4c,0xb4c,0xb4c,0xb4c,0xb4c,0xb4c,0xb4c,
+0xb4c,0xb4c,0xb4c,0xb4c,0xb4c,0xb4c,0xb4c,0xb4c,0xb4c,0xb4c,0xb4c,0xb4c,0xb4c,0xb4c,0xb4c,0xb4c,
+0xb4c,0xb4c,0xb4c,0xb4c,0xb4c,0xb4c,0xb4c,0x14df,0xb55,0xb55,0xb55,0xb55,0xb55,0xb55,0xced,0xced,
0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,
-0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0x16d1,0x16d1,0xb6a,0xb61,0xb67,0xb67,
-0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,
-0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb61,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb6a,0xb6a,
-0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,
-0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb61,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,
-0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb61,0xb67,0xb67,
-0xb67,0xb67,0xb67,0xb67,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,
-0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb61,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,
-0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,
+0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xb52,0xcea,0xcea,0xd38,0xd38,0xd38,0xd38,
+0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xb55,0xb55,0xb55,0xb55,
+0xb55,0xb55,0xb55,0xb55,0xb55,0xb55,0xb55,0xb55,0xb55,0xb55,0xb55,0xb55,0xb55,0xb55,0xb55,0xb55,
+0xb55,0xb55,0xb55,0xb55,0xb55,0xb55,0xb55,0xb55,0xb55,0xb55,0xb55,0xb55,0xb58,0xb58,0xb58,0xb58,
+0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,
+0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb58,0xb67,0xb67,0xb67,0xb67,
+0xb67,0xb5e,0xb6a,0xb70,0xb70,0xb70,0xb64,0xb64,0xb64,0xb6d,0xb61,0xb61,0xb61,0xb61,0xb61,0xb5b,
+0xb5b,0xb5b,0xb5b,0xb5b,0xb5b,0xb5b,0xb5b,0xb70,0xb70,0xb70,0xb70,0xb70,0xb64,0xb64,0xb64,0xb64,
0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,
+0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb67,0xb67,
+0xb70,0xb70,0xb70,0xb64,0xb64,0xb70,0xb70,0xb70,0xb70,0xb70,0xb70,0xb70,0xb64,0xb64,0xb64,0xb64,
0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,
-0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,
-0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,
-0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,
-0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,
-0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb6a,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,
-0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,
-0xb67,0xb67,0xb67,0xb67,0xb6a,0xb6a,0xb6a,0xb6a,0xb6d,0xb6d,0xb6d,0xb6d,0xb6d,0xb6d,0xb6d,0xb6d,
-0xb6d,0xb6d,0xb6d,0xb6d,0xb6d,0xb6d,0xb6d,0xb6d,0xb6d,0xb6d,0xb6d,0xb6d,0xb6d,0xb6d,0xb6d,0xb6d,
-0xb6d,0xb6d,0xb6d,0xb6d,0xb6d,0xb6d,0xb6d,0xb6d,0xb73,0xb73,0xb73,0xb73,0xb73,0xb73,0xb73,0xb73,
-0xb73,0xb73,0xb73,0xb73,0xb73,0xb73,0xb73,0xb73,0xb73,0xb73,0xb73,0xb73,0xb73,0xb73,0xb73,0xb73,
-0xb73,0xb73,0xb73,0xb73,0xb73,0xb73,0xb73,0xb73,0xb73,0xb73,0xb73,0x1aca,0x1aca,0x1aca,0x1aca,0x1aca,
-0x1aca,0x1aca,0x1b96,0x1b96,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,
+0xb64,0xb64,0xb70,0xb70,0xb70,0xb70,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,
+0xb64,0xb64,0xb64,0xb67,0xb67,0xb67,0xb67,0xb67,0xb67,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,
+0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,
+0xb64,0xb64,0xb64,0xb64,0xb64,0xb64,0x16e3,0x16e3,0xb7c,0xb73,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,
+0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,
+0xb79,0xb79,0xb79,0xb73,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,
+0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,
+0xb7c,0xb7c,0xb7c,0xb73,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,
+0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb73,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,
+0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,
+0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb73,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,
+0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb76,0xb76,0xb76,0xb76,
0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,
-0xb76,0xb76,0xb76,0xb76,0xbd0,0xbd0,0xbd0,0xbd0,0xbd0,0xbd0,0xbd0,0xbd0,0xbd0,0xbd0,0xbd0,0xbd0,
-0xbd0,0xbd0,0xbd0,0xbd0,0xbd0,0xbd0,0xbd0,0xbd0,0xbd0,0xbd0,0xbcd,0xbd0,0xbcd,0xbcd,0xbcd,0xbcd,
-0xbcd,0xbcd,0xbcd,0xbcd,0xbcd,0xbcd,0xbcd,0xbcd,0xbcd,0xbcd,0xbcd,0xccf,0xcd2,0xdbf,0xdbf,0xdbf,
-0xdbf,0xdbf,0xdbf,0xdbf,0xdbf,0xdbf,0xdbf,0xdbf,0xed6,0xed6,0xed6,0xed6,0xbdc,0xbdc,0xbdc,0xbdc,
-0xbdc,0xbdc,0xbdc,0xbdc,0xbdc,0xbdc,0xcd5,0xcd5,0xcd5,0xcd5,0xcd5,0xcd5,0xcd5,0xcd5,0xdc2,0xe76,
-0xdc2,0xdc2,0xdc2,0xdc2,0xdc2,0xdc2,0xdc2,0xdc2,0xdc2,0xfc6,0x1260,0x1260,0xdcb,0xdcb,0xdcb,0xdcb,
-0xdcb,0xdd1,0xdce,0xee8,0xee8,0xee8,0xee8,0x13ce,0xfd8,0x13ce,0x131a,0x131a,0xc0f,0xc0f,0xc0f,0xc0f,
-0xc0f,0xc0f,0xc0f,0xc0f,0xc0f,0xc0f,0xc0f,0xc0f,0xc0f,0xc0f,0xc0f,0xc0f,0xc0f,0xc0f,0xc3c,0xc39,
-0xc3c,0xc39,0xc3c,0xc39,0x10e9,0x10e6,0xfde,0xfdb,0xc12,0xc12,0xc12,0xc12,0xc12,0xc12,0xc12,0xc12,
-0xc12,0xc12,0xc12,0xc12,0xc12,0xc12,0xc12,0xc12,0xc15,0xc15,0xc15,0xc15,0xc15,0xc15,0xc15,0xc15,
-0xc15,0xc15,0xc15,0xc15,0xc15,0xc15,0xc15,0xc15,0xc15,0xc15,0xc15,0xc15,0xc15,0xc15,0xc15,0xc15,
-0xc15,0xc15,0xc15,0xc15,0xc15,0xc15,0xc15,0xc15,0xc18,0xc18,0xc18,0xc1e,0xc1b,0xc42,0xc3f,0xc1e,
-0xc1b,0xc1e,0xc1b,0xc1e,0xc1b,0xc1e,0xc1b,0xc1e,0xc1b,0xc1e,0xc1b,0xc1e,0xc1b,0xc1e,0xc1b,0xc1e,
-0xc1b,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,
-0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,
-0xc18,0xc18,0xc18,0xc18,0xc1e,0xc1b,0xc1e,0xc1b,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,
-0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,0xc18,
-0xc18,0xc18,0xc18,0xc18,0xc1e,0xc1b,0xc18,0xc18,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,
-0xc21,0xc21,0xc21,0xc21,0xc27,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,
-0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,
-0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc27,0xc27,0xc27,0xc21,0xc21,0xc21,0xc21,0xc21,
-0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,
-0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc24,0xc21,0xc21,0xc21,0xc5a,0xc5a,0xc5a,0xc5a,
-0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,
-0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xc5a,0xcd5,0xd41,0xdc2,0xdc2,
-0xdc2,0xdc2,0xdc2,0xdc2,0xdc2,0xdc2,0xe76,0xe76,0xdc2,0xdc2,0xdc2,0xdc2,0xdc2,0xdc2,0xed9,0xfc6,
-0xfc6,0xfc6,0xfc6,0xfc6,0xfc6,0xfc6,0xfc6,0xfc6,0xfc6,0x1284,0x1284,0x1263,0xcf9,0xcf9,0xcf9,0xcf9,
-0xcf9,0xcf9,0xcf9,0xcf9,0xcf9,0xcf9,0xcf9,0xcf9,0xcf9,0xcf9,0xcf9,0xcf9,0xcf9,0xcf9,0xcf9,0xcf9,
-0xcf9,0xcf9,0xcf9,0xcf9,0xcf9,0xcf9,0xcf9,0xcf9,0xcf9,0xcf9,0xcf9,0xcf9,0xd08,0xd08,0xd08,0xd08,
-0xd08,0xd08,0xcff,0xcff,0xcff,0xcff,0xcff,0xcfc,0xd11,0xd11,0xd11,0xd0b,0xd11,0xd11,0xd11,0xd11,
-0xd11,0xd11,0xd11,0xd11,0xd11,0xd11,0xd11,0xd0b,0xd11,0xd11,0xd11,0xd11,0xd05,0xd05,0xd0e,0xd0e,
-0xd0e,0xd0e,0xd02,0xd02,0xd02,0xd02,0xd02,0xd08,0xdd7,0xdd7,0xdd7,0xdd7,0xdd7,0xdd7,0xdd7,0xdd7,
-0xdd7,0xdd7,0xdd7,0xdd7,0xdd4,0xdd7,0xdd7,0xdd7,0xdd7,0xdd7,0xdd7,0xdd7,0xd11,0xd11,0xd11,0xd11,
-0xd11,0xd11,0xd11,0xd11,0xd11,0xd11,0xd11,0xd11,0xd11,0xd11,0xd0b,0xd11,0xd11,0xd11,0xd11,0xd11,
-0xd11,0xd11,0xd11,0xd11,0xd11,0xd11,0xd11,0xd11,0xd11,0xd05,0xd05,0xd05,0xd08,0xd08,0xd08,0xd08,
-0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,
-0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd08,0xd14,0xd14,0xd14,0xd14,
-0xd14,0xd14,0xd14,0xd14,0xd14,0xd14,0xd14,0xd14,0xd14,0xd14,0xdda,0xdda,0xdda,0xdda,0xdda,0xdda,
-0xeeb,0xeeb,0xeeb,0xeeb,0xeeb,0xeeb,0xeeb,0x10f2,0x10f2,0xfe1,0xfe1,0xfe1,0xd17,0xd17,0xd17,0xd17,
-0xd17,0xd17,0xd17,0xd17,0xd17,0xd17,0xd17,0xd17,0xd17,0xd17,0xd17,0xd17,0xd17,0xd17,0xd17,0xd17,
-0xd17,0xd17,0xd17,0xd17,0xd17,0xd17,0xd17,0xd17,0xd17,0xd17,0xd17,0xd17,0xd1d,0xd1d,0xd1d,0xd1d,
-0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,
-0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd1d,0xd26,0xd26,0xd26,0xd26,
-0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,
-0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd32,0xd32,0xd32,0xd32,
-0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,
-0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd32,0xd3e,0xd3e,0xd3e,0xd3e,
-0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,
-0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xd3e,0xde0,0xde0,0xde0,0xde0,
-0xde0,0xde0,0xde0,0xde0,0xde0,0xde0,0xde0,0xde0,0xde0,0xde0,0xde0,0xde0,0xde0,0xde0,0xde0,0xde0,
-0xde0,0xde0,0xde0,0xde0,0xde0,0xde0,0xde0,0xde0,0xde0,0xde0,0xde0,0xde0,0xde6,0xde6,0xde6,0xde6,
-0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,
-0xde6,0xde3,0xde3,0xde3,0xde3,0xde3,0xde3,0xde3,0xde3,0xde3,0xde3,0xde3,0xde6,0xde6,0xde6,0xde6,
-0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,
-0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xde6,0xea6,0xea6,0xdf8,0xdf8,
-0xeee,0xeee,0xeee,0xeee,0xeee,0xeee,0xeee,0xfed,0xfed,0xfed,0xfed,0xfed,0xfea,0xfea,0xfea,0xfea,
-0xfea,0xfea,0xfea,0xfea,0xfea,0xfea,0xfea,0xfea,0xfea,0xfea,0xfea,0xfea,0xe07,0xe04,0xe07,0xe04,
-0xe07,0xe04,0xe07,0xe04,0xe07,0xe04,0xe07,0xe04,0xe07,0xe04,0xe07,0xe04,0xe07,0xe04,0xe07,0xe04,
-0xe07,0xe04,0xe07,0xe04,0xe07,0xe04,0xe07,0xe04,0xe07,0xe04,0xe07,0xe04,0xe13,0xe13,0xe13,0xe13,
-0xe13,0xe13,0xe13,0xe13,0xe13,0xe13,0xe13,0xe13,0xe13,0xe13,0xe13,0xe13,0xe13,0xe13,0xe13,0xe13,
-0xe13,0xe13,0xe13,0xe13,0xe13,0xe13,0xe13,0xe13,0xe13,0xe13,0xe13,0xe13,0xe19,0xe19,0xe19,0xe19,
-0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0x1b0c,0xe16,0xe16,0xe16,0xe16,
-0xe16,0xe16,0xe16,0xe16,0xe16,0xe16,0xe16,0xe16,0xe16,0xe16,0xe16,0xe16,0xe16,0xe16,0xe16,0xe16,
-0xe16,0xe16,0xe16,0xe16,0xe16,0xe16,0xe16,0xe16,0xe16,0xe16,0xe16,0x1b09,0xe19,0xe19,0xe19,0xe19,
-0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,
-0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xe19,0xea0,0xea0,0xea0,0xea0,
-0xea0,0xea0,0xea0,0xea0,0xe31,0xe31,0xe31,0xe31,0xe31,0xe31,0xe31,0xe31,0xe31,0xe31,0xe31,0xe31,
-0xe31,0xe31,0xe31,0xef1,0xef1,0xef1,0xef1,0xff0,0xff0,0xff0,0xff0,0xff0,0xe3a,0xe3a,0xe3a,0xe3a,
-0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,
-0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe3a,0xe43,0xe43,0xe43,0xe43,
-0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,
-0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe4c,0xe4c,0xe4c,0xe4c,
+0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb76,0xb7c,0xb7c,0xb7c,0xb7c,
+0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,
+0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,
+0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb7c,0xb7c,0xb7c,0xb7c,
+0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,
+0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb7c,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,
+0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,0xb79,
+0xb7c,0xb7c,0xb7c,0xb7c,0xb7f,0xb7f,0xb7f,0xb7f,0xb7f,0xb7f,0xb7f,0xb7f,0xb7f,0xb7f,0xb7f,0xb7f,
+0xb7f,0xb7f,0xb7f,0xb7f,0xb7f,0xb7f,0xb7f,0xb7f,0xb7f,0xb7f,0xb7f,0xb7f,0xb7f,0xb7f,0xb7f,0xb7f,
+0xb7f,0xb7f,0xb7f,0xb7f,0xb85,0xb85,0xb85,0xb85,0xb85,0xb85,0xb85,0xb85,0xb85,0xb85,0xb85,0xb85,
+0xb85,0xb85,0xb85,0xb85,0xb85,0xb85,0xb85,0xb85,0xb85,0xb85,0xb85,0xb85,0xb85,0xb85,0xb85,0xb85,
+0xb85,0xb85,0xb85,0xb85,0xb85,0xb85,0xb85,0x1adc,0x1adc,0x1adc,0x1adc,0x1adc,0x1adc,0x1adc,0x1ba8,0x1ba8,
+0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,
+0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,0xb88,
+0xbe2,0xbe2,0xbe2,0xbe2,0xbe2,0xbe2,0xbe2,0xbe2,0xbe2,0xbe2,0xbe2,0xbe2,0xbe2,0xbe2,0xbe2,0xbe2,
+0xbe2,0xbe2,0xbe2,0xbe2,0xbe2,0xbe2,0xbdf,0xbe2,0xbdf,0xbdf,0xbdf,0xbdf,0xbdf,0xbdf,0xbdf,0xbdf,
+0xbdf,0xbdf,0xbdf,0xbdf,0xbdf,0xbdf,0xbdf,0xce1,0xce4,0xdd1,0xdd1,0xdd1,0xdd1,0xdd1,0xdd1,0xdd1,
+0xdd1,0xdd1,0xdd1,0xdd1,0xee8,0xee8,0xee8,0xee8,0xbee,0xbee,0xbee,0xbee,0xbee,0xbee,0xbee,0xbee,
+0xbee,0xbee,0xce7,0xce7,0xce7,0xce7,0xce7,0xce7,0xce7,0xce7,0xdd4,0xe88,0xdd4,0xdd4,0xdd4,0xdd4,
+0xdd4,0xdd4,0xdd4,0xdd4,0xdd4,0xfd8,0x1272,0x1272,0xddd,0xddd,0xddd,0xddd,0xddd,0xde3,0xde0,0xefa,
+0xefa,0xefa,0xefa,0x13e0,0xfea,0x13e0,0x132c,0x132c,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,
+0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc21,0xc4e,0xc4b,0xc4e,0xc4b,0xc4e,0xc4b,
+0x10fe,0x10fb,0xff0,0xfed,0xc24,0xc24,0xc24,0xc24,0xc24,0xc24,0xc24,0xc24,0xc24,0xc24,0xc24,0xc24,
+0xc24,0xc24,0xc24,0xc24,0xc27,0xc27,0xc27,0xc27,0xc27,0xc27,0xc27,0xc27,0xc27,0xc27,0xc27,0xc27,
+0xc27,0xc27,0xc27,0xc27,0xc27,0xc27,0xc27,0xc27,0xc27,0xc27,0xc27,0xc27,0xc27,0xc27,0xc27,0xc27,
+0xc27,0xc27,0xc27,0xc27,0xc2a,0xc2a,0xc2a,0xc30,0xc2d,0xc54,0xc51,0xc30,0xc2d,0xc30,0xc2d,0xc30,
+0xc2d,0xc30,0xc2d,0xc30,0xc2d,0xc30,0xc2d,0xc30,0xc2d,0xc30,0xc2d,0xc30,0xc2d,0xc2a,0xc2a,0xc2a,
+0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,
+0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,
+0xc30,0xc2d,0xc30,0xc2d,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,
+0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,0xc2a,
+0xc30,0xc2d,0xc2a,0xc2a,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,
+0xc39,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,
+0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,
+0xc33,0xc33,0xc33,0xc33,0xc39,0xc39,0xc39,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,
+0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,0xc33,
+0xc33,0xc33,0xc33,0xc33,0xc36,0xc33,0xc33,0xc33,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,
+0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,
+0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xc6c,0xce7,0xd53,0xdd4,0xdd4,0xdd4,0xdd4,0xdd4,0xdd4,
+0xdd4,0xdd4,0xe88,0xe88,0xdd4,0xdd4,0xdd4,0xdd4,0xdd4,0xdd4,0xeeb,0xfd8,0xfd8,0xfd8,0xfd8,0xfd8,
+0xfd8,0xfd8,0xfd8,0xfd8,0xfd8,0x1296,0x1296,0x1275,0xd0b,0xd0b,0xd0b,0xd0b,0xd0b,0xd0b,0xd0b,0xd0b,
+0xd0b,0xd0b,0xd0b,0xd0b,0xd0b,0xd0b,0xd0b,0xd0b,0xd0b,0xd0b,0xd0b,0xd0b,0xd0b,0xd0b,0xd0b,0xd0b,
+0xd0b,0xd0b,0xd0b,0xd0b,0xd0b,0xd0b,0xd0b,0xd0b,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd11,0xd11,
+0xd11,0xd11,0xd11,0xd0e,0xd23,0xd23,0xd23,0xd1d,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,
+0xd23,0xd23,0xd23,0xd1d,0xd23,0xd23,0xd23,0xd23,0xd17,0xd17,0xd20,0xd20,0xd20,0xd20,0xd14,0xd14,
+0xd14,0xd14,0xd14,0xd1a,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,
+0xde6,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xde9,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,
+0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd1d,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,0xd23,
+0xd23,0xd23,0xd23,0xd23,0xd23,0xd17,0xd17,0xd17,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,
+0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,
+0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd1a,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,
+0xd26,0xd26,0xd26,0xd26,0xd26,0xd26,0xdec,0xdec,0xdec,0xdec,0xdec,0xdec,0xefd,0xefd,0xefd,0xefd,
+0xefd,0xefd,0xefd,0x1107,0x1107,0xff3,0xff3,0xff3,0xd29,0xd29,0xd29,0xd29,0xd29,0xd29,0xd29,0xd29,
+0xd29,0xd29,0xd29,0xd29,0xd29,0xd29,0xd29,0xd29,0xd29,0xd29,0xd29,0xd29,0xd29,0xd29,0xd29,0xd29,
+0xd29,0xd29,0xd29,0xd29,0xd29,0xd29,0xd29,0xd29,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,
+0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,
+0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd2f,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,
+0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,
+0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd38,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,
+0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,
+0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd44,0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,
+0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,
+0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,0xd50,0xdf2,0xdf2,0xdf2,0xdf2,0xdf2,0xdf2,0xdf2,0xdf2,
+0xdf2,0xdf2,0xdf2,0xdf2,0xdf2,0xdf2,0xdf2,0xdf2,0xdf2,0xdf2,0xdf2,0xdf2,0xdf2,0xdf2,0xdf2,0xdf2,
+0xdf2,0xdf2,0xdf2,0xdf2,0xdf2,0xdf2,0xdf2,0xdf2,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,
+0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf5,0xdf5,0xdf5,
+0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdf5,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,
+0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,
+0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xdf8,0xeb8,0xeb8,0xe0a,0xe0a,0xf00,0xf00,0xf00,0xf00,
+0xf00,0xf00,0xf00,0xfff,0xfff,0x1002,0xfff,0xfff,0xffc,0xffc,0xffc,0xffc,0xffc,0xffc,0xffc,0xffc,
+0xffc,0xffc,0xffc,0xffc,0xffc,0xffc,0xffc,0xffc,0xe19,0xe16,0xe19,0xe16,0xe19,0xe16,0xe19,0xe16,
+0xe19,0xe16,0xe19,0xe16,0xe19,0xe16,0xe19,0xe16,0xe19,0xe16,0xe19,0xe16,0xe19,0xe16,0xe19,0xe16,
+0xe19,0xe16,0xe19,0xe16,0xe19,0xe16,0xe19,0xe16,0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,
+0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,
+0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,0xe25,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,
+0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0x1b1e,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,
+0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,
+0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0xe28,0x1b1b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,
+0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,
+0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xe2b,0xeb2,0xeb2,0xeb2,0xeb2,0xeb2,0xeb2,0xeb2,0xeb2,
+0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xe43,0xf03,
+0xf03,0xf03,0xf03,0x1005,0x1005,0x1005,0x1005,0x1005,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,
0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,
-0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe46,0xe49,0xe49,0xe49,0xe49,
-0xe49,0xe49,0xe49,0xe49,0xe49,0xe49,0xe49,0xe49,0xe49,0xe49,0xe49,0xe49,0xe49,0xe49,0xe49,0xe49,
-0xe49,0xe49,0xe49,0xe49,0xe49,0xe49,0xe49,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe55,0xe55,0xe55,0xe55,
-0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe52,0xe52,0xe52,0xe52,0xe52,0xe52,
-0xe52,0xe52,0xe4f,0xe58,0xffc,0xff6,0x1005,0xff3,0xe55,0xe55,0xff3,0xff3,0xe67,0xe67,0xe5b,0xe67,
-0xe67,0xe67,0xe5e,0xe67,0xe67,0xe67,0xe67,0xe5b,0xe67,0xe67,0xe67,0xe67,0xe67,0xe67,0xe67,0xe67,
-0xe67,0xe67,0xe67,0xe67,0xe67,0xe67,0xe67,0xe67,0xe67,0xe67,0xe67,0xe67,0xe6a,0xe6a,0xe6a,0xe6a,
-0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,
-0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe6a,0xe82,0xe82,0xe82,0xe82,
-0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,
-0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xe82,0xea3,0xea3,0xea3,0xea3,
-0xea3,0xea3,0xea3,0xea3,0xea3,0xea3,0xea3,0xea3,0xea3,0xea3,0xea3,0xea3,0x10fb,0x10fb,0x10fb,0x10fb,
-0x10fb,0x10fb,0x10fb,0x10fb,0x10fb,0x10fb,0x10fb,0x10fb,0x10fb,0x10fb,0x10fb,0x10fb,0xed6,0xed6,0xed6,0xed3,
-0xed3,0xed3,0xed3,0xed3,0x1134,0x1380,0x1380,0x1380,0x1380,0x1305,0x1305,0x1305,0x1383,0x1308,0x1308,0x1383,
-0x14c4,0x14c4,0x14c4,0x14c4,0x14c4,0x14c4,0x14c4,0x1785,0x1785,0x1785,0x1785,0x184b,0xeeb,0xeeb,0xeeb,0xeeb,
-0xfe1,0xfe1,0xfe1,0xfe1,0xfe1,0xfe1,0xfe1,0xfe1,0xfe1,0xfe1,0xfe1,0xfe1,0xfe4,0xfe4,0xfe4,0xfe4,
-0xfe4,0xfe4,0xfe4,0xfe4,0xfe4,0xfe4,0xfe4,0xfe4,0xfe4,0xfe4,0xfe4,0xfe4,0xfea,0xfea,0xfea,0xfea,
-0xfea,0xfea,0xfea,0x14df,0x14df,0x14df,0x14df,0x14df,0x14df,0x14df,0x14df,0x14df,0x14df,0x14df,0x14df,0x14df,
-0x14df,0x14e2,0x1854,0x1854,0x18d5,0x1854,0x1bb4,0x178b,0x131d,0x113d,0xeee,0xeee,0xf0c,0xf0c,0xf0c,0xf0c,
-0xf1e,0xf27,0xf2a,0xf27,0xf2a,0xf27,0xf2a,0xf27,0xf2a,0xf27,0xf2a,0xf27,0xf27,0xf27,0xf2a,0xf27,
-0xf27,0xf27,0xf27,0xf27,0xf27,0xf27,0xf27,0xf27,0xf27,0xf27,0xf27,0xf27,0xf27,0xf27,0xf27,0xf27,
-0xf27,0xf27,0xf27,0xf27,0xf0f,0xf0c,0xf0c,0xf0c,0xf0c,0xf0c,0xf0c,0xf21,0xf0c,0xf21,0xf1e,0xf1e,
-0xf33,0xf30,0xf33,0xf33,0xf33,0xf30,0xf30,0xf33,0xf30,0xf33,0xf30,0xf33,0xf30,0x1017,0x1017,0x1017,
-0x1152,0x100e,0x1017,0x100e,0xf30,0xf33,0xf30,0xf30,0x100e,0x100e,0x100e,0x100e,0x1011,0x1014,0x1152,0x1152,
-0xf36,0xf36,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,
-0x1020,0x1020,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,
-0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,
-0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,0xf3c,
-0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,
-0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,0xf4b,
-0xf4b,0xf4b,0xf4b,0x1500,0x1500,0x1500,0x1500,0x1500,0x1500,0x1500,0x1500,0x1500,0x1500,0x1500,0x1500,0x1500,
-0x1500,0x1500,0x1500,0x1500,0xf51,0xf51,0xf51,0xf51,0xf51,0xf51,0xf51,0xf51,0xf51,0xf51,0xf51,0xf51,
-0xf51,0xf51,0xf51,0xf51,0xf51,0xf51,0xf51,0xf51,0xf51,0xf51,0xf51,0xf51,0xf51,0xf51,0xf51,0xf51,
-0xf51,0xf51,0xf51,0xf51,0xf99,0xfb1,0xfa8,0xfae,0xfae,0xfb1,0xfb1,0xfa8,0xfa8,0xfae,0xfae,0xfae,
-0xfae,0xfae,0xfb1,0xfb1,0xfb1,0xf99,0xf99,0xf99,0xf99,0xfb1,0xfb1,0xfb1,0xfb1,0xfb1,0xfb1,0xfb1,
-0xfb1,0xfb1,0xfb1,0xfb1,0xfb1,0xfb1,0xf99,0xfa8,0xfab,0xf99,0xf99,0xfae,0xfae,0xfae,0xfae,0xfae,
-0xfae,0xf9c,0xfb1,0xfae,0xfa5,0xfa5,0xfa5,0xfa5,0xfa5,0xfa5,0xfa5,0xfa5,0xfa5,0xfa5,0x111c,0x111c,
-0x1119,0x1116,0xfa2,0xfa2,0xfc9,0xfc9,0xfc9,0xfc9,0x1284,0x1284,0x1263,0x1263,0x1263,0x1260,0x1260,0x1260,
-0x1260,0x1263,0x1386,0x1263,0x1263,0x1263,0x1260,0x1263,0x1284,0x1260,0x1260,0x1260,0x1263,0x1263,0x1260,0x1260,
-0x1263,0x1260,0x1260,0x1263,0xfe4,0xfe4,0xfe4,0xfe4,0xfe4,0xfe1,0xfe1,0xfe4,0xfe4,0xfe4,0xfe4,0xfe4,
-0xfe4,0x14d9,0x14d9,0x14d9,0x10f2,0xfe1,0xfe1,0xfe1,0xfe1,0x1290,0x126c,0x126c,0x126c,0x126c,0x14d9,0x14d9,
-0x14d9,0x14d9,0x14d9,0x14d9,0x1002,0x1002,0xfff,0xff9,0xfff,0xff9,0xfff,0xff9,0xfff,0xff9,0xff6,0xff6,
-0xff6,0xff6,0x100b,0x1008,0xff6,0x114f,0x13da,0x13dd,0x13dd,0x13da,0x13da,0x13da,0x13da,0x13da,0x13e0,0x13e0,
-0x14f4,0x14e8,0x14e8,0x14e5,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,0x101d,0x101a,0x101a,0x1029,
-0x1020,0x1329,0x1326,0x16da,0x1329,0x1326,0x13e9,0x13e6,0x14f7,0x14f7,0x14fd,0x14f7,0x14fd,0x14f7,0x14fd,0x14f7,
-0x14fd,0x14f7,0x14fd,0x14f7,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,
-0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,0x1029,0x1020,
-0x1029,0x1020,0x1029,0x1020,0x1023,0x1020,0x1020,0x1020,0x1020,0x1020,0x1020,0x1020,0x1020,0x1029,0x1020,0x1029,
-0x1020,0x1029,0x1029,0x1020,0x102c,0x102c,0x1032,0x1038,0x1038,0x1038,0x1038,0x1038,0x1038,0x1038,0x1038,0x1038,
-0x1038,0x1038,0x1038,0x1038,0x1038,0x1038,0x1038,0x1038,0x1038,0x1038,0x1038,0x1038,0x1038,0x1038,0x1038,0x1038,
-0x1038,0x1038,0x1038,0x1038,0x1038,0x1032,0x102c,0x102c,0x102c,0x102c,0x1032,0x1032,0x102c,0x102c,0x1035,0x13f2,
-0x13ef,0x13ef,0x1038,0x1038,0x102f,0x102f,0x102f,0x102f,0x102f,0x102f,0x102f,0x102f,0x102f,0x102f,0x13f5,0x13f5,
-0x13f5,0x13f5,0x13f5,0x13f5,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,
+0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe4c,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,
+0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,
+0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe55,0xe5e,0xe5e,0xe5e,0xe5e,0xe5e,0xe5e,0xe5e,0xe5e,
+0xe5e,0xe5e,0xe5e,0xe5e,0xe5e,0xe5e,0xe5e,0xe5e,0xe5e,0xe5e,0xe5e,0xe5e,0xe5e,0xe5e,0xe5e,0xe5e,
+0xe5e,0xe5e,0xe5e,0xe5e,0xe5e,0xe5e,0xe5e,0xe58,0xe5b,0xe5b,0xe5b,0xe5b,0xe5b,0xe5b,0xe5b,0xe5b,
+0xe5b,0xe5b,0xe5b,0xe5b,0xe5b,0xe5b,0xe5b,0xe5b,0xe5b,0xe5b,0xe5b,0xe5b,0xe5b,0xe5b,0xe5b,0xe5b,
+0xe5b,0xe5b,0xe5b,0xe5e,0xe5e,0xe5e,0xe5e,0xe5e,0xe67,0xe67,0xe67,0xe67,0xe67,0xe67,0xe67,0xe67,
+0xe67,0xe67,0xe67,0xe67,0xe67,0xe67,0xe64,0xe64,0xe64,0xe64,0xe64,0xe64,0xe64,0xe64,0xe61,0xe6a,
+0x1011,0x100b,0x101a,0x1008,0xe67,0xe67,0x1008,0x1008,0xe79,0xe79,0xe6d,0xe79,0xe79,0xe79,0xe70,0xe79,
+0xe79,0xe79,0xe79,0xe6d,0xe79,0xe79,0xe79,0xe79,0xe79,0xe79,0xe79,0xe79,0xe79,0xe79,0xe79,0xe79,
+0xe79,0xe79,0xe79,0xe79,0xe79,0xe79,0xe79,0xe79,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,
+0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,
+0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe7c,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,
+0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,
+0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xe94,0xeb5,0xeb5,0xeb5,0xeb5,0xeb5,0xeb5,0xeb5,0xeb5,
+0xeb5,0xeb5,0xeb5,0xeb5,0xeb5,0xeb5,0xeb5,0xeb5,0x1110,0x1110,0x1110,0x1110,0x1110,0x1110,0x1110,0x1110,
+0x1110,0x1110,0x1110,0x1110,0x1110,0x1110,0x1110,0x1110,0xee8,0xee8,0xee8,0xee5,0xee5,0xee5,0xee5,0xee5,
+0x1149,0x1392,0x1392,0x1392,0x1392,0x1317,0x1317,0x1317,0x1395,0x131a,0x131a,0x1395,0x14d6,0x14d6,0x14d6,0x14d6,
+0x14d6,0x14d6,0x14d6,0x1797,0x1797,0x1797,0x1797,0x185d,0xefd,0xefd,0xefd,0xefd,0xff3,0xff3,0xff3,0xff3,
+0xff3,0xff3,0xff3,0xff3,0xff3,0xff3,0xff3,0xff3,0xff6,0xff6,0xff6,0xff6,0xff6,0xff6,0xff6,0xff6,
+0xff6,0xff6,0xff6,0xff6,0xff6,0xff6,0xff6,0xff6,0xffc,0xffc,0xffc,0xffc,0xffc,0xffc,0xffc,0x14f1,
+0x14f1,0x14f1,0x14f1,0x14f1,0x14f1,0x14f1,0x14f1,0x14f1,0x14f1,0x14f1,0x14f1,0x14f1,0x14f1,0x14f4,0x1866,0x1866,
+0x18e7,0x1866,0x1bc6,0x179d,0x132f,0x1152,0xf00,0xf00,0xf1e,0xf1e,0xf1e,0xf1e,0xf30,0xf39,0xf3c,0xf39,
+0xf3c,0xf39,0xf3c,0xf39,0xf3c,0xf39,0xf3c,0xf39,0xf39,0xf39,0xf3c,0xf39,0xf39,0xf39,0xf39,0xf39,
+0xf39,0xf39,0xf39,0xf39,0xf39,0xf39,0xf39,0xf39,0xf39,0xf39,0xf39,0xf39,0xf39,0xf39,0xf39,0xf39,
+0xf21,0xf1e,0xf1e,0xf1e,0xf1e,0xf1e,0xf1e,0xf33,0xf1e,0xf33,0xf30,0xf30,0xf45,0xf42,0xf45,0xf45,
+0xf45,0xf42,0xf42,0xf45,0xf42,0xf45,0xf42,0xf45,0xf42,0x102c,0x102c,0x102c,0x1167,0x1023,0x102c,0x1023,
+0xf42,0xf45,0xf42,0xf42,0x1023,0x1023,0x1023,0x1023,0x1026,0x1029,0x1167,0x1167,0xf48,0xf48,0x103e,0x1035,
+0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,0x1035,0x1035,0x103e,0x1035,
+0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,0xf4e,0xf4e,0xf4e,0xf4e,
+0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,
+0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf4e,0xf5d,0xf5d,0xf5d,0xf5d,
+0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,
+0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0xf5d,0x1512,
+0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,0x1512,
+0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,
+0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,0xf63,
+0xfab,0xfc3,0xfba,0xfc0,0xfc0,0xfc3,0xfc3,0xfba,0xfba,0xfc0,0xfc0,0xfc0,0xfc0,0xfc0,0xfc3,0xfc3,
+0xfc3,0xfab,0xfab,0xfab,0xfab,0xfc3,0xfc3,0xfc3,0xfc3,0xfc3,0xfc3,0xfc3,0xfc3,0xfc3,0xfc3,0xfc3,
+0xfc3,0xfc3,0xfab,0xfba,0xfbd,0xfab,0xfab,0xfc0,0xfc0,0xfc0,0xfc0,0xfc0,0xfc0,0xfae,0xfc3,0xfc0,
+0xfb7,0xfb7,0xfb7,0xfb7,0xfb7,0xfb7,0xfb7,0xfb7,0xfb7,0xfb7,0x1131,0x1131,0x112e,0x112b,0xfb4,0xfb4,
+0xfdb,0xfdb,0xfdb,0xfdb,0x1296,0x1296,0x1275,0x1275,0x1275,0x1272,0x1272,0x1272,0x1272,0x1275,0x1398,0x1275,
+0x1275,0x1275,0x1272,0x1275,0x1296,0x1272,0x1272,0x1272,0x1275,0x1275,0x1272,0x1272,0x1275,0x1272,0x1272,0x1275,
+0xff6,0xff6,0xff6,0xff6,0xff6,0xff3,0xff3,0xff6,0xff6,0xff6,0xff6,0xff6,0xff6,0x14eb,0x14eb,0x14eb,
+0x1107,0xff3,0xff3,0xff3,0xff3,0x12a2,0x127e,0x127e,0x127e,0x127e,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,
+0x1017,0x1017,0x1014,0x100e,0x1014,0x100e,0x1014,0x100e,0x1014,0x100e,0x100b,0x100b,0x100b,0x100b,0x1020,0x101d,
+0x100b,0x1164,0x13ec,0x13ef,0x13ef,0x13ec,0x13ec,0x13ec,0x13ec,0x13ec,0x13f2,0x13f2,0x1506,0x14fa,0x14fa,0x14f7,
+0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,0x1032,0x102f,0x102f,0x103e,0x1035,0x133b,0x1338,0x16ec,
+0x133b,0x1338,0x13fb,0x13f8,0x1509,0x1509,0x150f,0x1509,0x150f,0x1509,0x150f,0x1509,0x150f,0x1509,0x150f,0x1509,
+0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,
+0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,0x103e,0x1035,
+0x1038,0x1035,0x1035,0x1035,0x1035,0x1035,0x1035,0x1035,0x1035,0x103e,0x1035,0x103e,0x1035,0x103e,0x103e,0x1035,
+0x1041,0x1041,0x1047,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,
0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,0x104d,
-0x104d,0x104d,0x104d,0x104d,0x1056,0x1056,0x1056,0x1056,0x1056,0x1056,0x1056,0x1056,0x1056,0x1056,0x1056,0x1056,
-0x1056,0x1056,0x1056,0x1056,0x1056,0x1056,0x1056,0x1056,0x1056,0x1056,0x1056,0x1056,0x1059,0x1059,0x1059,0x105c,
-0x1059,0x1059,0x105f,0x105f,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,
+0x104d,0x1047,0x1041,0x1041,0x1041,0x1041,0x1047,0x1047,0x1041,0x1041,0x104a,0x1404,0x1401,0x1401,0x104d,0x104d,
+0x1044,0x1044,0x1044,0x1044,0x1044,0x1044,0x1044,0x1044,0x1044,0x1044,0x1407,0x1407,0x1407,0x1407,0x1407,0x1407,
+0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,
0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,0x1062,
-0x1062,0x1062,0x1062,0x1062,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,
-0x106e,0x1065,0x1074,0x1071,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,
0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,
-0x106b,0x106b,0x106b,0x106b,0x132f,0x132c,0x1086,0x1080,0x1086,0x1080,0x1086,0x1080,0x1086,0x1080,0x1086,0x1080,
-0x1086,0x1080,0x1083,0x1104,0x1077,0x1077,0x1077,0x107d,0x13f8,0x13f8,0x13f8,0x13f8,0x13f8,0x13f8,0x13f8,0x13f8,
-0x107a,0x107a,0x107d,0x1089,0x1086,0x1080,0x1086,0x1080,0x1086,0x1080,0x1086,0x1080,0x1086,0x1080,0x1086,0x1080,
-0x1086,0x1080,0x1086,0x1080,0x1086,0x1080,0x1086,0x1080,0x1086,0x1080,0x1086,0x1080,0x1086,0x1080,0x1086,0x1080,
-0x1086,0x1080,0x1086,0x1080,0x150c,0x1509,0x150c,0x1509,0x150f,0x150f,0x16e3,0x13f8,0x1092,0x1092,0x1095,0x1095,
-0x1095,0x1095,0x1095,0x1095,0x1095,0x1095,0x1095,0x1095,0x1095,0x1095,0x1095,0x1095,0x1095,0x1095,0x1095,0x1095,
-0x1095,0x1095,0x1095,0x1095,0x1095,0x1095,0x1095,0x1095,0x1095,0x1095,0x1095,0x1095,0x1092,0x1092,0x1092,0x1092,
-0x1092,0x1092,0x1092,0x1092,0x1092,0x1092,0x1092,0x1092,0x10a4,0x10a4,0x10a4,0x10a4,0x10a4,0x10a4,0x109b,0x109b,
-0x109b,0x109b,0x109b,0x109e,0x109e,0x109e,0x10f8,0x10a7,0x10b6,0x10b6,0x10b6,0x10b6,0x10b6,0x10b6,0x10b6,0x10b6,
-0x10b6,0x10b6,0x10b6,0x10b6,0x10b6,0x10b6,0x10b6,0x10b6,0x10a1,0x10a1,0x10a1,0x10a1,0x10a1,0x10a1,0x10a1,0x10a1,
-0x10a1,0x10a1,0x10a4,0x10a4,0x10a4,0x10a4,0x10a4,0x10a4,0x10a4,0x10a4,0x10a4,0x10a4,0x10a4,0x10a4,0x10a4,0x10a4,
-0x10a4,0x10a4,0x10a4,0x10a4,0x10a4,0x10a4,0x10a4,0x10a4,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,
-0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,
-0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10c5,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,
-0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,
-0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10d7,0x10e0,0x10e0,0x10e0,0x10e0,0x10f5,0x10e0,0x10e0,0x10e0,
-0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,
-0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e0,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,
-0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,
-0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10e3,0x10ef,0x10ef,0x10ef,0x10ef,0x128a,0x128a,0x128a,0x128a,
-0x128a,0x128a,0x128a,0x128a,0x1482,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x18b4,0x18b4,
-0x18b4,0x18b4,0x18b4,0x18b4,0x18b4,0x18b4,0x18b4,0x18b4,0x1164,0x1164,0x1164,0x1164,0x1164,0x1164,0x1164,0x1164,
-0x1164,0x1164,0x1164,0x1164,0x1164,0x1164,0x1164,0x1164,0x1164,0x1164,0x1164,0x1164,0x1164,0x1164,0x115b,0x115b,
-0x115e,0x115e,0x1164,0x115b,0x115b,0x115b,0x115b,0x115b,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,
-0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,
-0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x116a,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,
-0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,
-0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1185,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,
-0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,
-0x1191,0x1191,0x1191,0x1191,0x1191,0x1191,0x118e,0x1194,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,
-0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,
-0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a0,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,
-0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x12d5,0x11ac,0x12d8,0x11ac,0x11ac,0x11ac,0x11ac,
-0x11a9,0x11a9,0x11a9,0x11ac,0x16e6,0x16e9,0x1911,0x190e,0x11af,0x11af,0x11af,0x11be,0x11c4,0x11c4,0x11c4,0x11c4,
-0x11c4,0x11c4,0x11c4,0x11c4,0x11c4,0x11c4,0x11c4,0x11c4,0x11c4,0x11c4,0x11c4,0x11c4,0x11c4,0x11c4,0x11c4,0x11c4,
-0x11c4,0x11c4,0x11c4,0x11c4,0x11c4,0x11c4,0x11c4,0x11c4,0x11c4,0x11c4,0x11c4,0x11b2,0x11be,0x11be,0x11af,0x11af,
-0x11af,0x11af,0x11be,0x11be,0x11af,0x11af,0x11be,0x11be,0x11d0,0x11d0,0x11d0,0x11d0,0x11d0,0x11d0,0x11d0,0x11d0,
-0x11d0,0x11d0,0x11d0,0x11d0,0x11d0,0x11d0,0x11d0,0x11d0,0x11d3,0x11d0,0x11d0,0x11d0,0x11d0,0x11d0,0x11d0,0x11ca,
-0x11ca,0x11ca,0x11d0,0x11cd,0x1515,0x1518,0x151b,0x151b,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,
-0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11d6,0x11e2,0x11d6,0x11d6,0x11d6,0x11eb,0x11eb,0x11d6,
-0x11d6,0x11eb,0x11e2,0x11eb,0x11eb,0x11e2,0x11d6,0x11d9,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,
-0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,
-0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11e2,0x11fd,0x11fd,0x11fd,0x11fd,0x11fd,0x11fd,0x11fd,0x11fd,
-0x11fd,0x11fd,0x11fd,0x11fd,0x11fd,0x11fd,0x11fd,0x11fd,0x11fd,0x11fd,0x11fd,0x11fd,0x11fd,0x11fd,0x11fd,0x11fd,
-0x11fd,0x11fd,0x11fd,0x11fd,0x11fd,0x11fd,0x11fd,0x11fd,0x1215,0x1215,0x1215,0x1215,0x1215,0x1215,0x1215,0x1215,
-0x1215,0x1215,0x1215,0x1215,0x1215,0x1215,0x1215,0x1215,0x1215,0x1215,0x1215,0x1215,0x1215,0x1215,0x1215,0x1215,
-0x1215,0x1215,0x1215,0x1215,0x1215,0x1212,0x1212,0x1212,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,
-0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,
-0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x121e,0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,
-0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,
-0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,0x122d,0x1233,0x1233,0x1242,0x1245,0x1245,0x1245,0x1245,0x1245,
-0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,
-0x1245,0x1245,0x1248,0x1245,0x1248,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,0x1245,
-0x1245,0x1245,0x1245,0x1248,0x1245,0x1245,0x1245,0x1245,0x1242,0x1242,0x1242,0x1236,0x1236,0x1236,0x1236,0x1242,
-0x1242,0x123c,0x1239,0x123f,0x123f,0x124e,0x124b,0x124b,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,
-0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,
-0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1257,0x1257,0x1257,0x1254,0x1254,0x1254,0x1251,0x1251,
-0x1251,0x1251,0x1254,0x1251,0x1251,0x1251,0x1257,0x1254,0x1257,0x1254,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,
-0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,0x1251,
-0x1251,0x1251,0x1251,0x1251,0x1251,0x1257,0x1254,0x1254,0x1251,0x1251,0x1251,0x1251,0x1272,0x1272,0x1272,0x1272,
-0x1272,0x1272,0x1272,0x1272,0x1272,0x1272,0x1272,0x1275,0x1275,0x1275,0x125a,0x1917,0x137d,0x127e,0x137d,0x137d,
-0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x127e,0x137d,0x127e,0x1263,0x1263,0x130b,0x1260,
-0x130b,0x130b,0x130b,0x130b,0x1260,0x1260,0x1284,0x1260,0x1260,0x1260,0x1260,0x1260,0x1260,0x1263,0x1284,0x1284,
-0x1263,0x1284,0x1260,0x1263,0x1263,0x1266,0x1284,0x1260,0x1260,0x1284,0x1263,0x1263,0x137a,0x137a,0x137a,0x137a,
-0x137a,0x137a,0x137a,0x137a,0x137a,0x137a,0x126f,0x126f,0x126f,0x126f,0x1392,0x1374,0x1278,0x1392,0x1392,0x1392,
-0x1392,0x1392,0x1392,0x1392,0x1392,0x1392,0x1392,0x1815,0x1815,0x1815,0x1815,0x1815,0x1272,0x1272,0x1272,0x1272,
-0x1272,0x1272,0x1272,0x1272,0x1272,0x1272,0x1272,0x151e,0x151e,0x1a64,0x1a64,0x1a64,0x1272,0x1272,0x1272,0x1272,
-0x1272,0x1272,0x1272,0x1272,0x1272,0x1272,0x1272,0x1272,0x1272,0x1272,0x1272,0x1272,0x137d,0x137d,0x127e,0x137d,
-0x137d,0x137d,0x127e,0x137d,0x137d,0x137d,0x1278,0x1278,0x1278,0x1278,0x1278,0x1377,0x137a,0x137a,0x137a,0x137a,
-0x137a,0x137a,0x137a,0x127b,0x137a,0x137a,0x137a,0x137a,0x137a,0x137a,0x137a,0x127b,0x137a,0x137a,0x137a,0x137a,
-0x137a,0x137a,0x137a,0x137a,0x137a,0x137a,0x13fb,0x13fb,0x19c2,0x1a64,0x1a64,0x1a64,0x137a,0x137a,0x137a,0x137a,
-0x137a,0x137a,0x137a,0x137a,0x137a,0x127b,0x137a,0x127b,0x127b,0x137a,0x137a,0x127b,0x129f,0x129f,0x129f,0x129f,
-0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,
-0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x129f,0x1329,0x1326,0x1329,0x1326,
-0x1329,0x1326,0x1329,0x1326,0x1329,0x1326,0x13e9,0x14fd,0x14fd,0x14fd,0x1791,0x1905,0x14fd,0x14fd,0x16dd,0x16dd,
-0x16dd,0x16d7,0x16dd,0x16d7,0x1908,0x1905,0x19bf,0x19bc,0x19bf,0x19bc,0x19bf,0x19bc,0x134d,0x134d,0x134d,0x134d,
-0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,
-0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x134d,0x1362,0x1353,0x1362,0x1365,
-0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,
-0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x1353,0x1353,0x1353,0x1353,
-0x1353,0x1353,0x1353,0x1353,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,
-0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,0x136b,
-0x136b,0x136b,0x136b,0x136b,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,
-0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,0x1371,
-0x1371,0x1371,0x1371,0x1371,0x139b,0x1398,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,
-0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,
-0x18ba,0x18ba,0x18ba,0x18ba,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,
-0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a1,0x13a1,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,
-0x13a1,0x13a4,0x13a4,0x13a4,0x13a1,0x13a4,0x13a1,0x13a4,0x13a1,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a7,0x13a4,
-0x13a4,0x13a4,0x13a4,0x13a1,0x13a4,0x13a1,0x13a1,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,
-0x13a4,0x13a4,0x13a4,0x13a4,0x13a1,0x13a1,0x13a1,0x13a1,0x13a1,0x13a1,0x13a1,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,
-0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a1,0x13a1,0x13a1,0x13a1,0x13a1,
-0x13a1,0x13a1,0x13a1,0x13a1,0x13a1,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,
-0x13a4,0x13a4,0x13a1,0x13a1,0x13a1,0x13a1,0x13a1,0x13a1,0x13a1,0x13a1,0x13a1,0x13a1,0x13a1,0x13a1,0x1527,0x1527,
-0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,
-0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,
-0x13a4,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x1767,0x1767,0x1767,
-0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x152a,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,
-0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,
-0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x152a,0x1767,0x1767,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a7,0x13a4,0x13a4,
-0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x152a,0x152a,0x152a,0x152a,
-0x152a,0x152a,0x152a,0x152a,0x1527,0x1527,0x152a,0x152a,0x13a4,0x13a4,0x13a7,0x13a7,0x13a7,0x1692,0x13a4,0x13a7,
-0x13a4,0x13a4,0x13a7,0x152d,0x152d,0x152a,0x152a,0x1767,0x1767,0x1767,0x1767,0x1767,0x152a,0x152a,0x152a,0x152a,
-0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,
-0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x152a,0x152a,0x152a,0x1692,0x152a,0x152a,0x152a,
-0x1767,0x1767,0x1767,0x176a,0x176a,0x176a,0x176a,0x176a,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,
-0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,
-0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x152a,0x13a4,0x152a,0x13a7,0x13a7,0x13a4,0x13a4,0x13a7,0x13a7,
-0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,
-0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,
-0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,0x13a7,0x13a4,0x13a4,0x13a4,
-0x13a7,0x13a4,0x13a4,0x13a4,0x13a4,0x13a7,0x13a7,0x13a7,0x13a4,0x13a7,0x13a7,0x13a7,0x13a4,0x13a4,0x13a4,0x13a4,
-0x13a4,0x13a4,0x13a4,0x13a7,0x13a4,0x13a7,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,
-0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,
-0x1692,0x13a4,0x13a4,0x13a4,0x13a4,0x152a,0x152a,0x1767,0x13fe,0x13fe,0x13fe,0x13fe,0x1527,0x1527,0x1527,0x1527,
-0x1527,0x1527,0x152a,0x1767,0x1767,0x1767,0x1767,0x16ec,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,
-0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,
-0x152a,0x152a,0x152a,0x152a,0x152d,0x152d,0x152a,0x152a,0x152a,0x152a,0x181e,0x152a,0x152a,0x152a,0x152a,0x152a,
-0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,
-0x1527,0x1527,0x1527,0x1527,0x1527,0x1527,0x152a,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x1488,0x13aa,0x13aa,0x13aa,
-0x13aa,0x13aa,0x13aa,0x13aa,0x13aa,0x13aa,0x13aa,0x13aa,0x13aa,0x13aa,0x13aa,0x13aa,0x13aa,0x1488,0x13aa,0x13aa,
-0x13aa,0x1488,0x13aa,0x1488,0x13aa,0x1488,0x13aa,0x1488,0x13aa,0x13aa,0x13aa,0x1488,0x13aa,0x13aa,0x13aa,0x13aa,
-0x13aa,0x13aa,0x1488,0x1488,0x13aa,0x13aa,0x13aa,0x13aa,0x1488,0x13aa,0x1488,0x1488,0x13aa,0x13aa,0x13aa,0x13aa,
-0x1488,0x13aa,0x13aa,0x13aa,0x13aa,0x13aa,0x13aa,0x13aa,0x13aa,0x13aa,0x13aa,0x13aa,0x13aa,0x1698,0x1698,0x176d,
-0x176d,0x13ad,0x13ad,0x13ad,0x13aa,0x13aa,0x13aa,0x13ad,0x13ad,0x13ad,0x13ad,0x13ad,0x1617,0x1617,0x1617,0x1617,
-0x1617,0x1617,0x1617,0x1617,0x1617,0x1617,0x1617,0x1617,0x1617,0x1617,0x1617,0x1617,0x13b0,0x13b0,0x13b0,0x13b0,
-0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,
-0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b3,
-0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,
-0x13b3,0x13b3,0x13b3,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b0,0x13b6,0x13b6,0x13b6,0x13b6,
+0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106b,0x106e,0x106e,0x106e,0x1071,0x106e,0x106e,0x1074,0x1074,
+0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,
+0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,0x1077,
+0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1083,0x107a,0x1089,0x1086,
+0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,
+0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,0x1080,
+0x1341,0x133e,0x109b,0x1095,0x109b,0x1095,0x109b,0x1095,0x109b,0x1095,0x109b,0x1095,0x109b,0x1095,0x1098,0x1119,
+0x108c,0x108c,0x108c,0x1092,0x140a,0x140a,0x140a,0x140a,0x140a,0x140a,0x140a,0x140a,0x108f,0x108f,0x1092,0x109e,
+0x109b,0x1095,0x109b,0x1095,0x109b,0x1095,0x109b,0x1095,0x109b,0x1095,0x109b,0x1095,0x109b,0x1095,0x109b,0x1095,
+0x109b,0x1095,0x109b,0x1095,0x109b,0x1095,0x109b,0x1095,0x109b,0x1095,0x109b,0x1095,0x109b,0x1095,0x109b,0x1095,
+0x151e,0x151b,0x151e,0x151b,0x1521,0x1521,0x16f5,0x140a,0x10a7,0x10a7,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,
+0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,
+0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10aa,0x10a7,0x10a7,0x10a7,0x10a7,0x10a7,0x10a7,0x10a7,0x10a7,
+0x10a7,0x10a7,0x10a7,0x10a7,0x10b9,0x10b9,0x10b9,0x10b9,0x10b9,0x10b9,0x10b0,0x10b0,0x10b0,0x10b0,0x10b0,0x10b3,
+0x10b3,0x10b3,0x110d,0x10bc,0x10cb,0x10cb,0x10cb,0x10cb,0x10cb,0x10cb,0x10cb,0x10cb,0x10cb,0x10cb,0x10cb,0x10cb,
+0x10cb,0x10cb,0x10cb,0x10cb,0x10b6,0x10b6,0x10b6,0x10b6,0x10b6,0x10b6,0x10b6,0x10b6,0x10b6,0x10b6,0x10b9,0x10b9,
+0x10b9,0x10b9,0x10b9,0x10b9,0x10b9,0x10b9,0x10b9,0x10b9,0x10b9,0x10b9,0x10b9,0x10b9,0x10b9,0x10b9,0x10b9,0x10b9,
+0x10b9,0x10b9,0x10b9,0x10b9,0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,
+0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,0x10da,
+0x10da,0x10da,0x10da,0x10da,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,
+0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,0x10ec,
+0x10ec,0x10ec,0x10ec,0x10ec,0x10f5,0x10f5,0x10f5,0x10f5,0x110a,0x10f5,0x10f5,0x10f5,0x10f5,0x10f5,0x10f5,0x10f5,
+0x10f5,0x10f5,0x10f5,0x10f5,0x10f5,0x10f5,0x10f5,0x10f5,0x10f5,0x10f5,0x10f5,0x10f5,0x10f5,0x10f5,0x10f5,0x10f5,
+0x10f5,0x10f5,0x10f5,0x10f5,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,
+0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,0x10f8,
+0x10f8,0x10f8,0x10f8,0x10f8,0x1104,0x1104,0x1104,0x1104,0x129c,0x129c,0x129c,0x129c,0x129c,0x129c,0x129c,0x129c,
+0x1494,0x1776,0x1776,0x1776,0x1776,0x1776,0x1776,0x1776,0x1776,0x1776,0x18c6,0x18c6,0x18c6,0x18c6,0x18c6,0x18c6,
+0x18c6,0x18c6,0x18c6,0x18c6,0x1179,0x1179,0x1179,0x1179,0x1179,0x1179,0x1179,0x1179,0x1179,0x1179,0x1179,0x1179,
+0x1179,0x1179,0x1179,0x1179,0x1179,0x1179,0x1179,0x1179,0x1179,0x1179,0x1170,0x1170,0x1173,0x1173,0x1179,0x1170,
+0x1170,0x1170,0x1170,0x1170,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,
+0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,0x117f,
+0x117f,0x117f,0x117f,0x117f,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,
+0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,0x119a,
+0x119a,0x119a,0x119a,0x119a,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,
+0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,0x11a6,
+0x11a6,0x11a6,0x11a3,0x11a9,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,
+0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,0x11b5,
+0x11b5,0x11b5,0x11b5,0x11b5,0x11bb,0x11bb,0x11bb,0x11bb,0x11bb,0x11bb,0x11bb,0x11bb,0x11bb,0x11bb,0x11bb,0x11bb,
+0x11bb,0x11bb,0x11bb,0x11bb,0x11bb,0x12e7,0x11c1,0x12ea,0x11c1,0x11c1,0x11c1,0x11c1,0x11be,0x11be,0x11be,0x11c1,
+0x16f8,0x16fb,0x1923,0x1920,0x11c4,0x11c4,0x11c4,0x11d3,0x11d9,0x11d9,0x11d9,0x11d9,0x11d9,0x11d9,0x11d9,0x11d9,
+0x11d9,0x11d9,0x11d9,0x11d9,0x11d9,0x11d9,0x11d9,0x11d9,0x11d9,0x11d9,0x11d9,0x11d9,0x11d9,0x11d9,0x11d9,0x11d9,
+0x11d9,0x11d9,0x11d9,0x11d9,0x11d9,0x11d9,0x11d9,0x11c7,0x11d3,0x11d3,0x11c4,0x11c4,0x11c4,0x11c4,0x11d3,0x11d3,
+0x11c4,0x11c4,0x11d3,0x11d3,0x11e5,0x11e5,0x11e5,0x11e5,0x11e5,0x11e5,0x11e5,0x11e5,0x11e5,0x11e5,0x11e5,0x11e5,
+0x11e5,0x11e5,0x11e5,0x11e5,0x11e8,0x11e5,0x11e5,0x11e5,0x11e5,0x11e5,0x11e5,0x11df,0x11df,0x11df,0x11e5,0x11e2,
+0x1527,0x152a,0x152d,0x152d,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,
+0x11f7,0x11f7,0x11f7,0x11f7,0x11eb,0x11f7,0x11eb,0x11eb,0x11eb,0x1200,0x1200,0x11eb,0x11eb,0x1200,0x11f7,0x1200,
+0x1200,0x11f7,0x11eb,0x11ee,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,
+0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,0x11f7,
+0x11f7,0x11f7,0x11f7,0x11f7,0x1212,0x1212,0x1212,0x1212,0x1212,0x1212,0x1212,0x1212,0x1212,0x1212,0x1212,0x1212,
+0x1212,0x1212,0x1212,0x1212,0x1212,0x1212,0x1212,0x1212,0x1212,0x1212,0x1212,0x1212,0x1212,0x1212,0x1212,0x1212,
+0x1212,0x1212,0x1212,0x1212,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,
+0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,0x122a,
+0x122a,0x1227,0x1227,0x1227,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,
+0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,0x1233,
+0x1233,0x1233,0x1233,0x1233,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,
+0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,0x1242,
+0x1242,0x1242,0x1242,0x1242,0x1248,0x1248,0x1254,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,
+0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x125a,0x1257,
+0x125a,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x1257,0x125a,
+0x1257,0x1257,0x1257,0x1257,0x1254,0x1254,0x1254,0x1248,0x1248,0x1248,0x1248,0x1254,0x1254,0x124e,0x124b,0x1251,
+0x1251,0x1260,0x125d,0x125d,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,
+0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,
+0x1263,0x1263,0x1263,0x1263,0x1269,0x1269,0x1269,0x1266,0x1266,0x1266,0x1263,0x1263,0x1263,0x1263,0x1266,0x1263,
+0x1263,0x1263,0x1269,0x1266,0x1269,0x1266,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,
+0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,
+0x1263,0x1269,0x1266,0x1266,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,0x1263,
+0x1263,0x1263,0x1263,0x1bcf,0x19f5,0x19f5,0x19f5,0x19f5,0x19f5,0x19f5,0x19f5,0x19f8,0x19f2,0x1be1,0x1be1,0x1be1,
+0x1be4,0x1bde,0x1be4,0x1bde,0x1284,0x1284,0x1284,0x1284,0x1284,0x1284,0x1284,0x1284,0x1284,0x1284,0x1284,0x1287,
+0x1287,0x1287,0x126c,0x1929,0x138f,0x1290,0x138f,0x138f,0x138f,0x138f,0x138f,0x138f,0x138f,0x138f,0x138f,0x138f,
+0x138f,0x1290,0x138f,0x1290,0x1275,0x1275,0x131d,0x1272,0x131d,0x131d,0x131d,0x131d,0x1272,0x1272,0x1296,0x1272,
+0x1272,0x1272,0x1272,0x1272,0x1272,0x1275,0x1296,0x1296,0x1275,0x1296,0x1272,0x1275,0x1275,0x1278,0x1296,0x1272,
+0x1272,0x1296,0x1275,0x1275,0x138c,0x138c,0x138c,0x138c,0x138c,0x138c,0x138c,0x138c,0x138c,0x138c,0x1281,0x1281,
+0x1281,0x1281,0x13a4,0x1386,0x128a,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x13a4,0x1827,
+0x1827,0x1827,0x1827,0x1827,0x1284,0x1284,0x1284,0x1284,0x1284,0x1284,0x1284,0x1284,0x1284,0x1284,0x1284,0x1530,
+0x1530,0x1a76,0x1a76,0x1a76,0x1284,0x1284,0x1284,0x1284,0x1284,0x1284,0x1284,0x1284,0x1284,0x1284,0x1284,0x1284,
+0x1284,0x1284,0x1284,0x1284,0x138f,0x138f,0x1290,0x138f,0x138f,0x138f,0x1290,0x138f,0x138f,0x138f,0x128a,0x128a,
+0x128a,0x128a,0x128a,0x1389,0x138c,0x138c,0x138c,0x138c,0x138c,0x138c,0x138c,0x128d,0x138c,0x138c,0x138c,0x138c,
+0x138c,0x138c,0x138c,0x128d,0x138c,0x138c,0x138c,0x138c,0x138c,0x138c,0x138c,0x138c,0x138c,0x138c,0x140d,0x140d,
+0x19d4,0x1a76,0x1a76,0x1a76,0x138c,0x138c,0x138c,0x138c,0x138c,0x138c,0x138c,0x138c,0x138c,0x128d,0x138c,0x128d,
+0x128d,0x138c,0x138c,0x128d,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,
+0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,0x12b1,
+0x12b1,0x12b1,0x12b1,0x12b1,0x133b,0x1338,0x133b,0x1338,0x133b,0x1338,0x133b,0x1338,0x133b,0x1338,0x13fb,0x150f,
+0x150f,0x150f,0x17a3,0x1917,0x150f,0x150f,0x16ef,0x16ef,0x16ef,0x16e9,0x16ef,0x16e9,0x191a,0x1917,0x19d1,0x19ce,
+0x19d1,0x19ce,0x19d1,0x19ce,0x135f,0x135f,0x135f,0x135f,0x135f,0x135f,0x135f,0x135f,0x135f,0x135f,0x135f,0x135f,
+0x135f,0x135f,0x135f,0x135f,0x135f,0x135f,0x135f,0x135f,0x135f,0x135f,0x135f,0x135f,0x135f,0x135f,0x135f,0x135f,
+0x135f,0x135f,0x135f,0x135f,0x1374,0x1365,0x1374,0x1377,0x1377,0x1377,0x1377,0x1377,0x1377,0x1377,0x1377,0x1377,
+0x1377,0x1377,0x1377,0x1377,0x1377,0x1377,0x1377,0x1377,0x1377,0x1377,0x1377,0x1377,0x1377,0x1377,0x1377,0x1377,
+0x1377,0x1377,0x1377,0x1377,0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x1365,0x137d,0x137d,0x137d,0x137d,
+0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,
+0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x137d,0x1383,0x1383,0x1383,0x1383,
+0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,
+0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x1383,0x13ad,0x13aa,0x18cc,0x18cc,
+0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,
+0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x13b6,0x13b6,0x13b6,0x13b6,
+0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,
+0x13b6,0x13b3,0x13b3,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b3,0x13b6,0x13b6,0x13b6,0x13b3,0x13b6,0x13b3,0x13b6,
+0x13b3,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b9,0x13b6,0x13b6,0x13b6,0x13b6,0x13b3,0x13b6,0x13b3,0x13b3,0x13b6,
+0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b3,0x13b3,0x13b3,0x13b3,
+0x13b3,0x13b3,0x13b3,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,
+0x13b6,0x13b6,0x13b6,0x13b3,0x13b3,0x13b3,0x13b3,0x13b3,0x13b3,0x13b3,0x13b3,0x13b3,0x13b3,0x13b6,0x13b6,0x13b6,
+0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b3,0x13b3,0x13b3,0x13b3,0x13b3,0x13b3,
+0x13b3,0x13b3,0x13b3,0x13b3,0x13b3,0x13b3,0x1539,0x1539,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,
+0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,
+0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,
+0x153c,0x153c,0x153c,0x153c,0x153c,0x1779,0x1779,0x1779,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x153c,0x13b6,
+0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,
+0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x153c,0x1779,0x1779,
+0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b9,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,
+0x13b6,0x13b6,0x13b6,0x13b6,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x1539,0x1539,0x153c,0x153c,
+0x13b6,0x13b6,0x13b9,0x13b9,0x13b9,0x16a4,0x13b6,0x13b9,0x13b6,0x13b6,0x13b9,0x153f,0x153f,0x153c,0x153c,0x1779,
+0x1779,0x1779,0x1779,0x1779,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,
+0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,
+0x13b6,0x153c,0x153c,0x153c,0x16a4,0x153c,0x153c,0x153c,0x1779,0x1779,0x1779,0x177c,0x177c,0x177c,0x177c,0x177c,
0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,
-0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x179a,0x179a,0x1797,0x16ef,
-0x1404,0x1404,0x1404,0x1404,0x1404,0x1404,0x1401,0x1401,0x1401,0x1401,0x1401,0x1401,0x1404,0x1404,0x1404,0x1404,
-0x1404,0x1404,0x1404,0x1404,0x1404,0x1404,0x1404,0x1404,0x1404,0x1404,0x1404,0x1533,0x1407,0x1536,0x1407,0x1407,
-0x1407,0x1407,0x1407,0x1407,0x1407,0x1407,0x1407,0x1407,0x1407,0x1536,0x1536,0x1536,0x1536,0x1536,0x1536,0x16f2,
-0x16f2,0x1b39,0x17a0,0x17a0,0x17a0,0x17a0,0x17a0,0x17a0,0x17a0,0x17a0,0x1a67,0x1a67,0x1410,0x1410,0x1410,0x1422,
-0x1422,0x1422,0x1422,0x1422,0x1422,0x1422,0x1422,0x1422,0x1422,0x1422,0x1422,0x1422,0x1422,0x1422,0x1422,0x1422,
-0x1422,0x1422,0x1422,0x1422,0x1422,0x1422,0x1422,0x1422,0x1422,0x1422,0x1422,0x1422,0x143d,0x143d,0x143d,0x143d,
-0x143d,0x143d,0x143d,0x143d,0x143d,0x143d,0x143d,0x143d,0x143d,0x143d,0x143d,0x143d,0x143d,0x143d,0x143d,0x143d,
-0x143d,0x143d,0x143d,0x143d,0x143d,0x143d,0x143d,0x143d,0x143d,0x143d,0x143d,0x143d,0x1443,0x1443,0x1443,0x1443,
-0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,
-0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x1443,0x19c8,0x1446,0x1446,0x1446,0x1446,
-0x1446,0x1446,0x1446,0x1446,0x1446,0x1446,0x1446,0x1446,0x1446,0x1446,0x1446,0x1446,0x1446,0x1446,0x1446,0x1446,
-0x1446,0x1446,0x1446,0x1446,0x1446,0x1446,0x1446,0x1446,0x1446,0x1446,0x1446,0x1446,0x144c,0x144c,0x1458,0x145e,
-0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,
-0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,0x1458,
-0x1458,0x1458,0x144c,0x144c,0x144c,0x144c,0x144c,0x144c,0x144c,0x144c,0x144c,0x1458,0x145b,0x145e,0x1461,0x1461,
-0x145e,0x1464,0x1464,0x144f,0x1452,0x16f8,0x16fb,0x16fb,0x16fb,0x153c,0x1a70,0x1a6d,0x1455,0x1455,0x1455,0x1455,
-0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1539,0x1701,0x1704,0x16fe,0x1707,0x1707,0x147f,0x147f,0x147f,0x147f,
-0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,
-0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x147f,0x14d9,0x14d9,0x14d9,0x14d9,
-0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,
-0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x18ed,0x18ed,0x18ed,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,
-0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x19b6,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x14d9,0x1851,0x18ed,
-0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x152a,0x152a,0x152a,0x152a,
-0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,
-0x1527,0x1527,0x1527,0x1527,0x1527,0x1527,0x1527,0x1527,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,
-0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152d,0x152a,0x152a,0x152a,0x152a,0x1695,0x1695,0x152a,
-0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x181b,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,
-0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,0x152a,
-0x152a,0x152a,0x152a,0x152a,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,
-0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,0x154b,
-0x154b,0x154b,0x154b,0x154b,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,
+0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x153c,
+0x13b6,0x153c,0x13b9,0x13b9,0x13b6,0x13b6,0x13b9,0x13b9,0x13b9,0x13b9,0x13b9,0x13b9,0x13b9,0x13b9,0x13b9,0x13b9,
+0x13b9,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,
+0x13b6,0x13b6,0x13b9,0x13b9,0x13b9,0x13b9,0x13b9,0x13b9,0x13b9,0x13b9,0x13b9,0x13b9,0x13b9,0x13b9,0x13b9,0x13b9,
+0x13b9,0x13b9,0x13b9,0x13b9,0x13b9,0x13b6,0x13b6,0x13b6,0x13b9,0x13b6,0x13b6,0x13b6,0x13b6,0x13b9,0x13b9,0x13b9,
+0x13b6,0x13b9,0x13b9,0x13b9,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b9,0x13b6,0x13b9,0x13b6,0x13b6,
+0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,
+0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x16a4,0x13b6,0x13b6,0x13b6,0x13b6,0x153c,0x153c,0x1779,
+0x1410,0x1410,0x1410,0x1410,0x1539,0x1539,0x1539,0x1539,0x1539,0x1539,0x153c,0x1779,0x1779,0x1779,0x1779,0x16fe,
+0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,0x13b6,
+0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153f,0x153f,0x153c,0x153c,
+0x153c,0x153c,0x1830,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,
+0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x1539,0x1539,0x1539,0x1539,0x1539,0x1539,0x153c,0x13b6,
+0x13b6,0x13b6,0x13b6,0x13b6,0x149a,0x13bc,0x13bc,0x13bc,0x13bc,0x13bc,0x13bc,0x13bc,0x13bc,0x13bc,0x13bc,0x13bc,
+0x13bc,0x13bc,0x13bc,0x13bc,0x13bc,0x149a,0x13bc,0x13bc,0x13bc,0x149a,0x13bc,0x149a,0x13bc,0x149a,0x13bc,0x149a,
+0x13bc,0x13bc,0x13bc,0x149a,0x13bc,0x13bc,0x13bc,0x13bc,0x13bc,0x13bc,0x149a,0x149a,0x13bc,0x13bc,0x13bc,0x13bc,
+0x149a,0x13bc,0x149a,0x149a,0x13bc,0x13bc,0x13bc,0x13bc,0x149a,0x13bc,0x13bc,0x13bc,0x13bc,0x13bc,0x13bc,0x13bc,
+0x13bc,0x13bc,0x13bc,0x13bc,0x13bc,0x16aa,0x16aa,0x177f,0x177f,0x13bf,0x13bf,0x13bf,0x13bc,0x13bc,0x13bc,0x13bf,
+0x13bf,0x13bf,0x13bf,0x13bf,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,
+0x1629,0x1629,0x1629,0x1629,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,
+0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,
+0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c5,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,
+0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,0x13c5,0x13c5,0x13c5,0x13c2,0x13c2,0x13c2,0x13c2,0x13c2,
+0x13c2,0x13c2,0x13c2,0x13c2,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,
+0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,0x13c8,
+0x13c8,0x13c8,0x13c8,0x13c8,0x17ac,0x17ac,0x17a9,0x1701,0x1416,0x1416,0x1416,0x1416,0x1416,0x1416,0x1413,0x1413,
+0x1413,0x1413,0x1413,0x1413,0x1416,0x1416,0x1416,0x1416,0x1416,0x1416,0x1416,0x1416,0x1416,0x1416,0x1416,0x1416,
+0x1416,0x1416,0x1416,0x1545,0x1419,0x1548,0x1419,0x1419,0x1419,0x1419,0x1419,0x1419,0x1419,0x1419,0x1419,0x1419,
+0x1419,0x1548,0x1548,0x1548,0x1548,0x1548,0x1548,0x1704,0x1704,0x1b4b,0x17b2,0x17b2,0x17b2,0x17b2,0x17b2,0x17b2,
+0x17b2,0x17b2,0x1a79,0x1a79,0x1422,0x1422,0x1422,0x1434,0x1434,0x1434,0x1434,0x1434,0x1434,0x1434,0x1434,0x1434,
+0x1434,0x1434,0x1434,0x1434,0x1434,0x1434,0x1434,0x1434,0x1434,0x1434,0x1434,0x1434,0x1434,0x1434,0x1434,0x1434,
+0x1434,0x1434,0x1434,0x1434,0x144f,0x144f,0x144f,0x144f,0x144f,0x144f,0x144f,0x144f,0x144f,0x144f,0x144f,0x144f,
+0x144f,0x144f,0x144f,0x144f,0x144f,0x144f,0x144f,0x144f,0x144f,0x144f,0x144f,0x144f,0x144f,0x144f,0x144f,0x144f,
+0x144f,0x144f,0x144f,0x144f,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,
+0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,0x1455,
+0x1455,0x1455,0x1455,0x19da,0x1458,0x1458,0x1458,0x1458,0x1458,0x1458,0x1458,0x1458,0x1458,0x1458,0x1458,0x1458,
+0x1458,0x1458,0x1458,0x1458,0x1458,0x1458,0x1458,0x1458,0x1458,0x1458,0x1458,0x1458,0x1458,0x1458,0x1458,0x1458,
+0x1458,0x1458,0x1458,0x1458,0x145e,0x145e,0x146a,0x1470,0x1470,0x1470,0x1470,0x1470,0x1470,0x1470,0x1470,0x1470,
+0x1470,0x1470,0x1470,0x1470,0x1470,0x1470,0x1470,0x1470,0x1470,0x1470,0x1470,0x1470,0x1470,0x1470,0x1470,0x1470,
+0x1470,0x1470,0x1470,0x1470,0x1470,0x1470,0x1470,0x146a,0x146a,0x146a,0x145e,0x145e,0x145e,0x145e,0x145e,0x145e,
+0x145e,0x145e,0x145e,0x146a,0x146d,0x1470,0x1473,0x1473,0x1470,0x1476,0x1476,0x1461,0x1464,0x170a,0x170d,0x170d,
+0x170d,0x154e,0x1a82,0x1a7f,0x1467,0x1467,0x1467,0x1467,0x1467,0x1467,0x1467,0x1467,0x1467,0x1467,0x154b,0x1713,
+0x1716,0x1710,0x1719,0x1719,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,
+0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,0x1491,
+0x1491,0x1491,0x1491,0x1491,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,
+0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x18ff,0x18ff,
+0x18ff,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x19c8,0x14eb,0x14eb,
+0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x14eb,0x1863,0x18ff,0x18ff,0x18ff,0x18ff,0x18ff,0x18ff,0x18ff,0x18ff,0x18ff,
+0x18ff,0x18ff,0x18ff,0x18ff,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,
+0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x1539,0x1539,0x1539,0x1539,0x1539,0x1539,0x1539,0x1539,
+0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,
+0x153f,0x153c,0x153c,0x153c,0x153c,0x16a7,0x16a7,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,
+0x182d,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,
+0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x153c,0x155d,0x155d,0x155d,0x155d,
0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,
-0x155d,0x155d,0x155d,0x155d,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,
-0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,0x1563,
-0x1563,0x1563,0x1563,0x1563,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,
-0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,0x1566,
-0x1566,0x1566,0x1566,0x1566,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,
-0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,0x15a5,
-0x15a5,0x15a5,0x15a5,0x1596,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,
-0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15ae,0x15a8,
-0x15b1,0x15b1,0x15b1,0x15b1,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,
-0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,0x15b4,
-0x15b4,0x15b4,0x15b4,0x15b4,0x15cf,0x15cf,0x15cf,0x15cf,0x15cf,0x15cf,0x15cf,0x15cf,0x15c6,0x15cf,0x15cf,0x15cf,
-0x15cf,0x15cf,0x15cf,0x15cf,0x15cf,0x15cf,0x15cf,0x15cf,0x15cf,0x15cf,0x15cf,0x15cf,0x15cf,0x15cf,0x15cf,0x15cf,
-0x15cf,0x15cf,0x15cf,0x15cf,0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,
-0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,0x15d8,
-0x15d8,0x15d8,0x15d8,0x15d8,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,
-0x15ea,0x15ea,0x15ea,0x15ea,0x15e7,0x15e7,0x15e7,0x15db,0x15db,0x15db,0x15db,0x15db,0x15db,0x15db,0x15db,0x15e7,
-0x15e7,0x15db,0x15e7,0x15de,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,
+0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x155d,0x156f,0x156f,0x156f,0x156f,
+0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,
+0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x156f,0x1575,0x1575,0x1575,0x1575,
+0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,
+0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,0x1575,0x1578,0x1578,0x1578,0x1578,
+0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,
+0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x1578,0x15a2,0x15a2,0x15a2,0x15a2,
+0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x15a2,0x159c,0x159c,0x159c,0x1590,0x1590,0x1590,0x159c,0x159c,
+0x1590,0x159f,0x1593,0x1590,0x15a5,0x15a5,0x1599,0x15a5,0x15a5,0x1596,0x17b5,0x1bdb,0x15b7,0x15b7,0x15b7,0x15b7,
+0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,
+0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15b7,0x15a8,0x15c0,0x15c0,0x15c0,0x15c0,
+0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,
+0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15c0,0x15ba,0x15c3,0x15c3,0x15c3,0x15c3,0x15c6,0x15c6,0x15c6,0x15c6,
+0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,
+0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15c6,0x15e1,0x15e1,0x15e1,0x15e1,
+0x15e1,0x15e1,0x15e1,0x15e1,0x15d8,0x15e1,0x15e1,0x15e1,0x15e1,0x15e1,0x15e1,0x15e1,0x15e1,0x15e1,0x15e1,0x15e1,
+0x15e1,0x15e1,0x15e1,0x15e1,0x15e1,0x15e1,0x15e1,0x15e1,0x15e1,0x15e1,0x15e1,0x15e1,0x15ea,0x15ea,0x15ea,0x15ea,
0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,
-0x15ea,0x15ea,0x15ea,0x15ea,0x160e,0x160e,0x160e,0x160e,0x160e,0x160e,0x160e,0x160e,0x160e,0x160e,0x160e,0x160e,
-0x160e,0x160e,0x160e,0x160e,0x160e,0x160e,0x160e,0x160e,0x160e,0x160e,0x160e,0x160e,0x160e,0x160e,0x160e,0x160e,
-0x160e,0x160b,0x160b,0x160b,0x1617,0x1617,0x1617,0x1617,0x1617,0x1617,0x1617,0x1617,0x1617,0x1617,0x1617,0x1617,
-0x1617,0x1617,0x1617,0x1617,0x1617,0x1617,0x1617,0x1617,0x1617,0x1617,0x161d,0x161d,0x161d,0x161a,0x161a,0x161a,
-0x1617,0x1617,0x1617,0x1617,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,
-0x162c,0x162c,0x162c,0x162c,0x1620,0x1620,0x1620,0x1620,0x1620,0x1620,0x1620,0x1632,0x1632,0x1626,0x1623,0x1623,
-0x1623,0x1623,0x1623,0x1623,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,
-0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,0x162c,
-0x162c,0x162c,0x162c,0x162c,0x1638,0x1638,0x1638,0x1638,0x1638,0x1638,0x1638,0x1638,0x1638,0x1638,0x1638,0x1638,
-0x1638,0x1638,0x1638,0x1638,0x1638,0x1638,0x1638,0x1638,0x1638,0x1638,0x1638,0x1635,0x1635,0x1635,0x1635,0x1635,
-0x1635,0x1635,0x1635,0x1635,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,
-0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,0x163b,
-0x163b,0x163b,0x163b,0x163b,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,
-0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,0x165f,
-0x165f,0x165f,0x165f,0x165f,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,
-0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,0x1668,
-0x1668,0x1668,0x1668,0x1668,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,
-0x1680,0x1680,0x1680,0x1680,0x166b,0x167a,0x167a,0x166b,0x166b,0x166b,0x166b,0x166b,0x166b,0x167a,0x166b,0x167d,
-0x167d,0x166b,0x167d,0x166b,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,
-0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,0x1680,
-0x1680,0x1680,0x1680,0x1680,0x1689,0x1689,0x1689,0x1689,0x1689,0x1689,0x1689,0x1689,0x1689,0x1689,0x1689,0x1689,
-0x1689,0x1689,0x1689,0x1689,0x1689,0x1689,0x1689,0x1689,0x1689,0x1689,0x1689,0x1689,0x1689,0x1689,0x1689,0x1689,
-0x1689,0x1689,0x1689,0x1689,0x168f,0x168f,0x168f,0x168f,0x168f,0x168f,0x168f,0x168f,0x168f,0x168f,0x168f,0x168f,
-0x168f,0x168f,0x168f,0x168f,0x168f,0x168f,0x168f,0x168f,0x168f,0x168f,0x168f,0x168f,0x168f,0x168f,0x168f,0x168f,
-0x168f,0x168f,0x168f,0x168f,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,
-0x16d4,0x16d4,0x16d4,0x16d4,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,0x18ed,
-0x18ed,0x18ed,0x18ed,0x19b6,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,
-0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,0x16f5,
-0x16f5,0x16f5,0x16f5,0x16f5,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,
-0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,
-0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x173a,0x1737,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,
-0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x1734,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,
-0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,
-0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x173d,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,
-0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,
-0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1740,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,
+0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15ea,0x15fc,0x15fc,0x15fc,0x15fc,
+0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15f9,0x15f9,0x15f9,0x15ed,
+0x15ed,0x15ed,0x15ed,0x15ed,0x15ed,0x15ed,0x15ed,0x15f9,0x15f9,0x15ed,0x15f9,0x15f0,0x15fc,0x15fc,0x15fc,0x15fc,
+0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,
+0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x15fc,0x1620,0x1620,0x1620,0x1620,
+0x1620,0x1620,0x1620,0x1620,0x1620,0x1620,0x1620,0x1620,0x1620,0x1620,0x1620,0x1620,0x1620,0x1620,0x1620,0x1620,
+0x1620,0x1620,0x1620,0x1620,0x1620,0x1620,0x1620,0x1620,0x1620,0x161d,0x161d,0x161d,0x1629,0x1629,0x1629,0x1629,
+0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,0x1629,
+0x1629,0x1629,0x162f,0x162f,0x162f,0x162c,0x162c,0x162c,0x1629,0x1629,0x1629,0x1629,0x163e,0x163e,0x163e,0x163e,
+0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x1632,0x1632,0x1632,0x1632,
+0x1632,0x1632,0x1632,0x1644,0x1644,0x1638,0x1635,0x1635,0x1635,0x1635,0x1635,0x1635,0x163e,0x163e,0x163e,0x163e,
+0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,
+0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x163e,0x164a,0x164a,0x164a,0x164a,
+0x164a,0x164a,0x164a,0x164a,0x164a,0x164a,0x164a,0x164a,0x164a,0x164a,0x164a,0x164a,0x164a,0x164a,0x164a,0x164a,
+0x164a,0x164a,0x164a,0x1647,0x1647,0x1647,0x1647,0x1647,0x1647,0x1647,0x1647,0x1647,0x164d,0x164d,0x164d,0x164d,
+0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,
+0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x164d,0x1671,0x1671,0x1671,0x1671,
+0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,
+0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x1671,0x167a,0x167a,0x167a,0x167a,
+0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,
+0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x167a,0x1692,0x1692,0x1692,0x1692,
+0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x167d,0x168c,0x168c,0x167d,
+0x167d,0x167d,0x167d,0x167d,0x167d,0x168c,0x167d,0x168f,0x168f,0x167d,0x168f,0x167d,0x1692,0x1692,0x1692,0x1692,
+0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,
+0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x1692,0x169b,0x169b,0x169b,0x169b,
+0x169b,0x169b,0x169b,0x169b,0x169b,0x169b,0x169b,0x169b,0x169b,0x169b,0x169b,0x169b,0x169b,0x169b,0x169b,0x169b,
+0x169b,0x169b,0x169b,0x169b,0x169b,0x169b,0x169b,0x169b,0x169b,0x169b,0x169b,0x169b,0x16a1,0x16a1,0x16a1,0x16a1,
+0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,
+0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x16a1,0x18ff,0x18ff,0x18ff,0x18ff,
+0x18ff,0x18ff,0x18ff,0x18ff,0x18ff,0x18ff,0x18ff,0x18ff,0x16e6,0x16e6,0x16e6,0x16e6,0x18ff,0x18ff,0x18ff,0x18ff,
+0x18ff,0x18ff,0x18ff,0x18ff,0x18ff,0x18ff,0x18ff,0x18ff,0x18ff,0x18ff,0x18ff,0x19c8,0x1707,0x1707,0x1707,0x1707,
+0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,
+0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1707,0x1746,0x1746,0x1746,0x1746,
+0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,
+0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x174c,0x1749,
+0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,0x1746,
+0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,
+0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,0x174f,
0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,
-0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,
-0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,
-0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1755,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,
-0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,
-0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x175b,0x175b,0x175b,0x175b,0x1758,
-0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x1758,0x175b,0x175b,0x175b,
-0x175b,0x175b,0x175b,0x175b,0x175b,0x1758,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,
-0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,
-0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x175b,0x1773,0x1773,0x1773,0x1773,0x1773,0x1773,0x1773,0x1773,
-0x1773,0x1773,0x1773,0x1773,0x1773,0x1773,0x1773,0x1773,0x1773,0x1773,0x1773,0x1773,0x1773,0x1773,0x1773,0x1773,
-0x1773,0x1773,0x1773,0x1773,0x1773,0x1773,0x1773,0x1773,0x185d,0x185d,0x185d,0x185d,0x185d,0x185d,0x185d,0x185d,
-0x185d,0x185d,0x185d,0x185d,0x1ad3,0x1a28,0x1a28,0x1a2b,0x1776,0x1776,0x1776,0x1776,0x1776,0x1776,0x1776,0x1776,
-0x1779,0x1827,0x1827,0x1827,0x1827,0x1827,0x1827,0x18c3,0x1776,0x1776,0x1776,0x1776,0x1776,0x1824,0x1824,0x1824,
-0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x18c0,0x18c0,0x18c0,0x18c0,0x18c0,0x18c0,
-0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x1776,0x19a1,0x19a1,0x1a28,0x1a28,0x1a28,0x1a28,0x1a28,
-0x1a28,0x1a28,0x1a28,0x1ad0,0x1ba2,0x1a2b,0x1a2b,0x1a2b,0x18c0,0x18c3,0x18c3,0x18c3,0x18c3,0x18c3,0x18c3,0x18c3,
-0x18c3,0x18c3,0x18c3,0x18c3,0x18c3,0x18c3,0x18c0,0x18c0,0x1a67,0x1a67,0x1a67,0x1a67,0x1a67,0x1a67,0x1a67,0x1a67,
-0x1b39,0x1b3c,0x1b36,0x1b36,0x1b36,0x1b36,0x1b36,0x1b36,0x1b36,0x1b36,0x1b36,0x191a,0x179d,0x179d,0x179d,0x179d,
-0x179d,0x179d,0x179d,0x179d,0x179d,0x179d,0x179d,0x179d,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1827,0x1824,
-0x18c0,0x18c0,0x18c0,0x18c0,0x18c0,0x18c0,0x18c0,0x18c0,0x1827,0x18c3,0x18c3,0x1827,0x1827,0x1827,0x1827,0x1827,
-0x1827,0x1827,0x1824,0x17a6,0x1827,0x1827,0x1827,0x1a28,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x17a6,0x1824,
-0x1824,0x1824,0x1824,0x1824,0x18c0,0x19a1,0x19a1,0x19a1,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,
-0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x1824,0x18c0,0x17bb,0x17bb,0x17b8,0x17b8,0x17b8,0x17b8,0x17b8,0x17b8,
-0x17b8,0x17b8,0x17b8,0x17b8,0x17b8,0x17b8,0x17b8,0x17b8,0x17b8,0x17b8,0x17b8,0x17b8,0x17b8,0x17b8,0x17b8,0x17b8,
-0x17b8,0x17b8,0x17b8,0x17b8,0x17b8,0x17b8,0x17b8,0x17b8,0x17bb,0x17bb,0x17bb,0x17bb,0x17bb,0x17bb,0x17bb,0x17bb,
-0x17bb,0x17bb,0x17bb,0x17bb,0x17bb,0x17bb,0x17bb,0x17bb,0x17bb,0x17bb,0x17bb,0x17bb,0x17bb,0x17bb,0x17bb,0x17bb,
-0x17bb,0x17bb,0x17bb,0x17bb,0x17bb,0x17bb,0x17bb,0x17bb,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,
-0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1806,0x1806,0x1806,
-0x17f1,0x17f1,0x17f1,0x17f1,0x17f1,0x17f1,0x17f1,0x17f1,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,
-0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,
-0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x1809,0x182d,0x182d,0x182d,0x182d,0x182d,0x182d,0x182d,0x182d,
-0x182d,0x182d,0x182d,0x182d,0x182d,0x182d,0x182d,0x182d,0x182d,0x182d,0x182d,0x182d,0x182d,0x182d,0x182d,0x182d,
-0x182d,0x182d,0x182d,0x182d,0x182d,0x182d,0x182d,0x182d,0x1830,0x1830,0x1830,0x1830,0x1830,0x1830,0x1830,0x1830,
-0x1830,0x1830,0x1830,0x1830,0x1830,0x1830,0x1830,0x1830,0x1830,0x1830,0x1830,0x1830,0x1830,0x1830,0x1830,0x1830,
-0x1830,0x1830,0x1830,0x1830,0x1830,0x1830,0x1830,0x1830,0x1830,0x1830,0x1830,0x1adc,0x1adc,0x1adc,0x1adc,0x1adc,
-0x1adc,0x1adc,0x1adc,0x1adc,0x1adc,0x1adc,0x1adc,0x1adc,0x1887,0x1887,0x1887,0x1887,0x19da,0x19da,0x188a,0x188a,
-0x188a,0x188a,0x1872,0x1872,0x1872,0x1872,0x1872,0x1872,0x1872,0x1872,0x1872,0x1872,0x1872,0x1872,0x1872,0x1884,
-0x1875,0x1878,0x187b,0x188d,0x188d,0x192c,0x187e,0x187e,0x1887,0x1887,0x1887,0x1887,0x1887,0x1887,0x1887,0x1887,
-0x1887,0x1887,0x1887,0x1887,0x1887,0x1887,0x1887,0x1887,0x1887,0x1887,0x1887,0x1887,0x1887,0x1887,0x1887,0x1887,
-0x1887,0x1887,0x1887,0x1887,0x1887,0x1887,0x1887,0x1887,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,
-0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x1893,0x1899,0x1896,0x1896,0x1896,
-0x1896,0x18a5,0x18ab,0x1896,0x1896,0x1896,0x1896,0x18a2,0x18a8,0x1896,0x1896,0x1896,0x1896,0x1896,0x1896,0x1896,
-0x1896,0x1896,0x1896,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,
-0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18b4,0x18b4,0x18b4,0x18b4,0x18b4,0x18b4,0x18b4,0x18b4,
-0x18b4,0x18b4,0x18b4,0x199b,0x199b,0x199b,0x199b,0x199b,0x1ac7,0x1ac7,0x1ac7,0x1ac7,0x1ac7,0x1ac7,0x1ac7,0x1ac7,
-0x1ac7,0x1ac7,0x1ac7,0x1ac7,0x1ac7,0x1b93,0x1b93,0x1b93,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,
+0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,0x1752,
+0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,
+0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,0x1764,
+0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,
+0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,0x1767,
+0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,
+0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,
+0x176a,0x176a,0x176a,0x176d,0x176d,0x176d,0x176d,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,0x176a,
+0x176a,0x176a,0x176a,0x176a,0x176a,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176a,0x176d,0x176d,
+0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,
+0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,0x176d,
+0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,
+0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,0x1785,
+0x186f,0x186f,0x186f,0x186f,0x186f,0x186f,0x186f,0x186f,0x186f,0x186f,0x186f,0x186f,0x1ae5,0x1a3a,0x1a3a,0x1a3d,
+0x1788,0x1788,0x1788,0x1788,0x1788,0x1788,0x1788,0x1788,0x178b,0x1839,0x1839,0x1839,0x1839,0x1839,0x1839,0x18d5,
+0x1788,0x1788,0x1788,0x1788,0x1788,0x1836,0x1836,0x1836,0x1836,0x1836,0x1836,0x1836,0x1836,0x1836,0x1836,0x1836,
+0x1836,0x1836,0x18d2,0x18d2,0x18d2,0x18d2,0x18d2,0x18d2,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,
+0x1788,0x19b3,0x19b3,0x1a3a,0x1a3a,0x1a3a,0x1a3a,0x1a3a,0x1a3a,0x1a3a,0x1a3a,0x1ae2,0x1bb4,0x1a3d,0x1a3d,0x1a3d,
+0x18d2,0x18d5,0x18d5,0x18d5,0x18d5,0x18d5,0x18d5,0x18d5,0x18d5,0x18d5,0x18d5,0x18d5,0x18d5,0x18d5,0x18d2,0x18d2,
+0x1a79,0x1a79,0x1a79,0x1a79,0x1a79,0x1a79,0x1a79,0x1a79,0x1b4b,0x1b4e,0x1b48,0x1b48,0x1b48,0x1b48,0x1b48,0x1b48,
+0x1b48,0x1b48,0x1b48,0x192c,0x17af,0x17af,0x17af,0x17af,0x17af,0x17af,0x17af,0x17af,0x17af,0x17af,0x17af,0x17af,
+0x1836,0x1836,0x1836,0x1836,0x1836,0x1836,0x1839,0x1836,0x18d2,0x18d2,0x18d2,0x18d2,0x18d2,0x18d2,0x18d2,0x18d2,
+0x1839,0x18d5,0x18d5,0x1839,0x1839,0x1839,0x1839,0x1839,0x1839,0x1839,0x1836,0x17b8,0x1839,0x1839,0x1839,0x1a3a,
+0x1836,0x1836,0x1836,0x1836,0x1836,0x1836,0x17b8,0x1836,0x1836,0x1836,0x1836,0x1836,0x18d2,0x19b3,0x19b3,0x19b3,
+0x1836,0x1836,0x1836,0x1836,0x1836,0x1836,0x1836,0x1836,0x1836,0x1836,0x1836,0x1836,0x1836,0x1836,0x1836,0x18d2,
+0x17cd,0x17cd,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,
+0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,0x17ca,
+0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,
+0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,0x17cd,
+0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,
+0x181b,0x181b,0x181b,0x181b,0x181b,0x1818,0x1818,0x1818,0x1803,0x1803,0x1803,0x1803,0x1803,0x1803,0x1803,0x1803,
+0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,
+0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,0x181b,
+0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,
+0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,0x183f,
+0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,
+0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,0x1842,
+0x1842,0x1842,0x1842,0x1aee,0x1aee,0x1aee,0x1aee,0x1aee,0x1aee,0x1aee,0x1aee,0x1aee,0x1aee,0x1aee,0x1aee,0x1aee,
+0x1899,0x1899,0x1899,0x1899,0x19ec,0x19ec,0x189c,0x189c,0x189c,0x189c,0x1884,0x1884,0x1884,0x1884,0x1884,0x1884,
+0x1884,0x1884,0x1884,0x1884,0x1884,0x1884,0x1884,0x1896,0x1887,0x188a,0x188d,0x189f,0x189f,0x193e,0x1890,0x1890,
+0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,
+0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,0x1899,
+0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,
+0x18ba,0x18ba,0x18ba,0x18a5,0x18ab,0x18a8,0x18a8,0x18a8,0x18a8,0x18b7,0x18bd,0x18a8,0x18a8,0x18a8,0x18a8,0x18b4,
+0x18ba,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18a8,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,
0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,
-0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18ba,0x18c0,0x18c0,0x18c0,0x18c0,0x18c0,0x18c0,0x18c0,0x18c0,
-0x18c0,0x18c0,0x18c0,0x18c0,0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x1a28,0x1ad0,0x19a1,0x19a1,0x19a1,0x19a1,0x1ad3,
-0x1ad0,0x1ba2,0x19a1,0x1a28,0x19a1,0x19a1,0x19a1,0x19a1,0x18c0,0x18c0,0x18c0,0x18c0,0x18c0,0x18c0,0x18c0,0x19a1,
-0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,
-0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,
-0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,
-0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18c9,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,
+0x18c6,0x18c6,0x18c6,0x18c6,0x18c6,0x18c6,0x18c6,0x18c6,0x18c6,0x18c6,0x18c6,0x19ad,0x19ad,0x19ad,0x19ad,0x19ad,
+0x1ad9,0x1ad9,0x1ad9,0x1ad9,0x1ad9,0x1ad9,0x1ad9,0x1ad9,0x1ad9,0x1ad9,0x1ad9,0x1ad9,0x1ad9,0x1ba5,0x1ba5,0x1ba5,
0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,
-0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x1ba5,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,
-0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,
-0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x18cf,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,
-0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,
-0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x1941,0x195c,0x195c,0x195c,0x195c,0x195c,0x195c,0x195c,0x195c,
-0x195c,0x195c,0x195c,0x195c,0x195c,0x195c,0x195c,0x195c,0x195c,0x195c,0x195c,0x195c,0x195c,0x195c,0x195c,0x195c,
-0x195c,0x195c,0x195c,0x195c,0x195c,0x195c,0x195c,0x195c,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,
-0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,
-0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x1962,0x197d,0x197d,0x197d,0x197d,0x197d,0x197d,0x197d,0x197d,
-0x197d,0x197d,0x197d,0x197d,0x197d,0x197d,0x197d,0x197d,0x197d,0x197d,0x197d,0x197d,0x197d,0x197d,0x197d,0x197d,
-0x197d,0x197d,0x197d,0x197d,0x197d,0x197d,0x197d,0x197d,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,
-0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,
-0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1980,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,
-0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,0x1989,
-0x1989,0x1989,0x1989,0x1989,0x1989,0x1986,0x1986,0x1986,0x19a1,0x19a1,0x19a1,0x1ad0,0x1ad0,0x1a28,0x1a28,0x1a28,
-0x1a28,0x1a28,0x1a28,0x1ad0,0x1ad0,0x1ad0,0x1a28,0x1a28,0x19a1,0x19a1,0x19a1,0x19a1,0x19a1,0x19a4,0x19a4,0x19a1,
-0x19a4,0x19a4,0x1a28,0x1a2b,0x1a28,0x1a28,0x1a28,0x1a28,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,
-0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,
-0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x19dd,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,
-0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,
-0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a04,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,
-0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a43,0x1a43,0x1a0d,0x1a43,0x1a0d,0x1a0d,0x1a0d,0x1a0d,
-0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a0d,0x1a13,0x1a13,0x1a13,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,
+0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,0x18cc,
+0x18d2,0x18d2,0x18d2,0x18d2,0x18d2,0x18d2,0x18d2,0x18d2,0x18d2,0x18d2,0x18d2,0x18d2,0x19b3,0x19b3,0x19b3,0x19b3,
+0x19b3,0x1a3a,0x1ae2,0x19b3,0x19b3,0x19b3,0x19b3,0x1ae5,0x1ae2,0x1bb4,0x19b3,0x1a3a,0x19b3,0x19b3,0x19b3,0x19b3,
+0x18d2,0x18d2,0x18d2,0x18d2,0x18d2,0x18d2,0x18d2,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,
+0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,
+0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,
+0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,0x18db,
+0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,
+0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,0x18de,0x1bb7,
+0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,
+0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,0x18e1,
+0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,
+0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,0x1953,
+0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,
+0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,0x196e,
+0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,
+0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,0x1974,
+0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,
+0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,0x198f,
+0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,
+0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,0x1992,
+0x199b,0x199b,0x199b,0x199b,0x199b,0x199b,0x199b,0x199b,0x199b,0x199b,0x199b,0x199b,0x199b,0x199b,0x199b,0x199b,
+0x199b,0x199b,0x199b,0x199b,0x199b,0x199b,0x199b,0x199b,0x199b,0x199b,0x199b,0x199b,0x199b,0x1998,0x1998,0x1998,
+0x19b3,0x19b3,0x19b3,0x1ae2,0x1ae2,0x1a3a,0x1a3a,0x1a3a,0x1a3a,0x1a3a,0x1a3a,0x1ae2,0x1ae2,0x1ae2,0x1a3a,0x1a3a,
+0x19b3,0x19b3,0x19b3,0x19b3,0x19b3,0x19b6,0x19b6,0x19b3,0x19b6,0x19b6,0x1a3a,0x1a3d,0x1a3a,0x1a3a,0x1a3a,0x1a3a,
+0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,
+0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,0x19ef,
+0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,
+0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,0x1a16,
0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,
-0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,
-0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,
-0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1ab2,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,
-0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,
-0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1abe,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,
-0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,
-0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae2,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,
-0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,
-0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1ae5,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,
-0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,
-0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b5a,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,
-0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,
-0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b7b,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,
+0x1a55,0x1a55,0x1a1f,0x1a55,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a1f,0x1a25,0x1a25,0x1a25,
+0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,
+0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,0x1a31,
+0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,
+0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,0x1ac4,
+0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,
+0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,0x1ad0,
+0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,
+0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,0x1af4,
+0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,
+0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,0x1af7,
+0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,
+0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,0x1b6c,
+0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,
0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,
-0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b8d,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,
-0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,
-0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0x1b90,0,0,0,0
+0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,
+0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,0x1b9f,
+0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,
+0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,0x1ba2,
+0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x1bfc,0x1bfc,
+0x1bfc,0x1bf9,0x1bf9,0x1bf9,0x1bf9,0x1bf9,0x1bf9,0x1bf9,0x1bf9,0x1bf9,0x1bf9,0x1bf9,0x1bf9,0x1bf9,0x1bf9,0x1bf9,
+0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,
+0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,0x1c3b,
+0,0,0,0
};
static const UTrie2 propsVectorsTrie={
propsVectorsTrie_index,
- propsVectorsTrie_index+5188,
+ propsVectorsTrie_index+5348,
NULL,
- 5188,
- 26872,
+ 5348,
+ 27344,
0xa40,
- 0x14c4,
+ 0x1564,
0x0,
0x0,
0x110000,
- 0x7d38,
- NULL, 0, FALSE, FALSE, 0, NULL
+ 0x7fb0,
+ NULL, 0, false, false, 0, NULL
};
-static const uint32_t propsVectors[7095]={
+static const uint32_t propsVectors[7230]={
0x67,0,0,0x67,0,0xe00000,0x67,0x80000,0x20,0x867,0,0,0xa67,0,0,0xb67,
0,0,0xd67,0,0,0xe67,0,0,0x1067,0,0,0x1167,0,0,0x1267,0,
0,0x1367,0,0,0x1467,0,0,0x1567,0,0,0x1667,0,0,0x1767,0,0,
@@ -3493,429 +3554,437 @@ static const uint32_t propsVectors[7095]={
0,0xad67,0,0,0xae67,0,0,0xaf67,0,0,0xb167,0,0,0xb267,0,0,
0xb467,0,0,0xb567,0,0,0xb767,0,0,0xb867,0,0,0xb967,0,0,0xba67,
0,0,0xbc67,0,0,0xbd67,0,0,0xbe67,0,0,0xbf67,0,0,0xc067,0,
-0,0xc167,0,0,0xc267,0,0,0xc367,0,0xe00000,0xc467,0,0xe00000,0xc667,0,0,
-0xc767,0,0,0xc867,0,0,0xc967,0,0,0xca67,0,0,0xcc67,0,0xe00000,0xcf67,
-0,0xe00000,0xd067,0,0xe00000,0xd367,0,0,0xd467,0,0,0xd567,0,0,0xd667,0,
-0,0xd867,0,0,0xda67,0,0,0xdb67,0,0,0xdc67,0,0,0xdd67,0,0,
-0xde67,0,0,0xdf67,0,0,0xe067,0,0,0xe167,0,0,0xe267,0,0,0xe367,
-0,0xe00000,0xe467,0,0,0xe567,0,0,0xe667,0,0,0xe767,0,0,0xe867,0,
-0,0xe967,0,0,0xea67,0,0,0xeb67,0,0,0xec67,0,0,0xed67,0,0,
-0xee67,0,0,0xef67,0,0,0xf167,0,0,0xf367,0,0,0xf567,0,0,0xf667,
-0,0,0xf767,0,0,0xf867,0,0,0xf967,0,0,0xfa67,0,0xe00000,0xfb67,0,
-0,0xfc67,0,0,0xfd67,0,0,0xfe67,0,0,0x10167,0,0,0x10267,0,0,
-0x10367,0,0,0x10467,0,0,0x10667,0,0,0x10767,0,0,0x10867,0,0,0x10967,
-0,0,0x10a67,0,0,0x10b67,0,0,0x10c67,0,0,0x10d67,0,0,0x10e67,0,
-0,0x10f67,0,0,0x11067,0,0,0x11367,0,0,0x11467,0,0,0x11567,0,0,
-0x11667,0,0,0x11767,0,0,0x11867,0,0,0x11967,0,0xe00000,0x11a67,0,0,0x11b67,
-0,0,0x11c67,0,0,0x11d67,0,0,0x11e67,0,0,0x11f67,0,0,0x12067,0,
-0,0x12167,0,0,0x12267,0,0,0x12367,0,0,0x12467,0,0,0x12567,0,0,
-0x12667,0,0,0x12767,0,0,0x12867,0,0,0x12967,0,0,0x12a67,0,0xe00000,0x12b67,
-0,0,0x12c67,0,0,0x12d67,0,0,0x12f67,0,0,0x13067,0,0,0x13167,0,
-0,0x13267,0,0,0x13367,0,0,0x13467,0,0,0x13567,0,0,0x13667,0,0,
-0x13767,0,0,0x13867,0,0,0x13967,0,0,0x13a67,0,0,0x13b67,0,0,0x13c67,
-0,0,0x13d67,0,0,0x13f67,0,0,0x14067,0,0,0xa0067,0,0xe00000,0xa4f67,0,
-0xe00000,0xa5f67,0,0xe00000,0xac567,0,0xe00000,0xad167,0,0xe00000,0xb0067,0,0xe00000,0xb1267,0,0xe00000,
-0xb2e67,0,0xe00000,0x11000100,0,0x900020,0x11000100,0x40000001,0x440020,0x11000100,0x40000001,0x643020,0x11000100,0x40000001,0xa5a040,0x11000100,
-0x40000001,0x116a8a0,0x11000200,0,0x900020,0x11000200,0x4000001,0xc4000b,0x11000200,0x7c00100,0x220402,0x11000200,0x24000000,0x200000,0x11000200,0x24000008,
-0x1710000,0x11000200,0x40000001,0x1d3b020,0x11000219,0x7c00100,0x220401,0x11000219,0x7c00100,0x250401,0x11000319,0x7c00100,0x220401,0x11000319,0x7c00100,0x220402,
-0x11000319,0x7c00100,0x250400,0x11000319,0x7c00100,0x250401,0x11000419,0x7c00100,0x220400,0x11000419,0x7c00100,0x220401,0x11000419,0x7c00100,0x220402,0x11000419,
-0x7c00100,0x230400,0x11000419,0x7c00100,0x250400,0x11000419,0x7c00100,0x250401,0x11000419,0x7c00100,0x250402,0x11000519,0x7c00100,0x220400,0x11000519,0x7c00100,
-0x230400,0x11000600,0x4000400,0x200002,0x11000600,0x4000400,0x200400,0x11000600,0x7c00500,0x220400,0x11000600,0x7c00500,0x230400,0x11000600,0x7c00500,0x530400,
-0x11000600,0x7c00d00,0x230400,0x11000619,0x7c00500,0x22040f,0x11000800,0x4000010,0x1001401,0x11000800,0x4000400,0x200001,0x11000800,0x6800010,0x201001,0x11000800,
-0x7c00500,0x230401,0x11000807,0x7c00100,0x220400,0x11000807,0x7c00100,0x250400,0x1100080e,0x4000400,0x200000,0x1100080e,0x4000400,0x200002,0x1100080e,0x7000500,
-0x220402,0x1100080e,0x7c00100,0x220400,0x1100080e,0x7c00100,0x220401,0x1100080e,0x7c00100,0x220402,0x1100080e,0x7c00100,0x250400,0x1100080e,0x7c00100,0x250401,
-0x1100080e,0x7c00120,0x220402,0x1100080e,0x7c00120,0x250402,0x11000908,0x4000000,0x200000,0x11000908,0x7c00100,0x220400,0x11000908,0x7c00100,0x220401,0x11000908,
-0x7c00100,0x250400,0x11000908,0x7c00100,0x250401,0x11000a03,0x4000000,0x200400,0x11000a03,0x4000000,0x201000,0x11000a03,0x4000000,0x270000,0x11000a03,0x7c00100,
-0x220400,0x11000a03,0x7c00100,0x220402,0x11000a03,0x7c00100,0x250400,0x11000a03,0x7c00500,0x230400,0x11000a03,0xc000010,0x1049400,0x11000b13,0x2802500,0x962460,
-0x11000b13,0x4000000,0x200000,0x11000b13,0x4000000,0x201000,0x11000b13,0x4000000,0x230400,0x11000b13,0x4000002,0x400000,0x11000b13,0x4000010,0x200000,0x11000b13,
-0x7c00100,0x2633800,0x11000c00,0x80000000,0x218960,0x11000c02,0x2802100,0x962460,0x11000c02,0x2802400,0x962460,0x11000c02,0x4000000,0x200000,0x11000c02,0x4000000,
-0x1329400,0x11000c02,0x4000000,0x1329800,0x11000c02,0x4000000,0x1500000,0x11000c02,0x6800000,0x1329800,0x11000c02,0x7c00100,0x230400,0x11000c02,0x7c00100,0x230401,
-0x11000c02,0x7c00100,0x230402,0x11000c02,0x7c00500,0x230400,0x11000c02,0x7d00100,0x230400,0x11000f01,0x2802400,0x962460,0x11000f0a,0x2802100,0x962460,0x11000f0a,
-0x2802400,0x962460,0x11000f0a,0x2806400,0x962460,0x11000f0a,0x4000000,0x200000,0x11000f0a,0x6800100,0x962540,0x11000f0a,0x7c00100,0x230400,0x11000f0a,0x7c00100,
-0x230401,0x11001004,0x2802100,0x962460,0x11001004,0x2802400,0x962460,0x11001004,0x2806400,0x962460,0x11001004,0x4000000,0x200000,0x11001004,0x4000000,0x1500000,
-0x11001004,0x6800100,0x962540,0x11001004,0x6800100,0x962541,0x11001004,0x7c00100,0x230400,0x11001004,0x7c00100,0x230401,0x11001110,0x2802100,0x962460,0x11001110,
-0x2802400,0x962460,0x11001110,0x2806400,0x962460,0x11001110,0x6800100,0x962540,0x11001110,0x7c00100,0x230400,0x11001110,0x7c00100,0x230401,0x1100120f,0x2802100,
-0x962460,0x1100120f,0x2802400,0x962460,0x1100120f,0x2806400,0x962460,0x1100120f,0x6800100,0x962540,0x1100120f,0x7c00100,0x230400,0x1100131f,0x2802100,0x962460,
-0x1100131f,0x2802400,0x962460,0x1100131f,0x2806400,0x962460,0x1100131f,0x4000000,0x200000,0x1100131f,0x6800000,0x1329800,0x1100131f,0x6800100,0x962540,0x1100131f,
-0x6800100,0x962541,0x1100131f,0x7c00100,0x230400,0x1100131f,0x7c00100,0x230401,0x11001423,0x2802100,0x962460,0x11001423,0x2806400,0x962460,0x11001423,0x6800100,
-0x962540,0x11001423,0x6800100,0x962541,0x11001423,0x7c00100,0x230400,0x11001423,0x7c00100,0x230401,0x11001524,0x2802100,0x962460,0x11001524,0x2802100,0x962461,
-0x11001524,0x2806400,0x962460,0x11001524,0x6800000,0x1329800,0x11001524,0x6800100,0x962540,0x11001524,0x7c00100,0x230400,0x11001615,0x2802100,0x962460,0x11001615,
-0x2806400,0x962460,0x11001615,0x6800100,0x962540,0x11001615,0x6800100,0x962541,0x11001615,0x7c00100,0x230400,0x1100171a,0x2802100,0x962460,0x1100171a,0x2806400,
-0x962460,0x1100171a,0x6800000,0x1329800,0x1100171a,0x6800100,0x962540,0x1100171a,0x6800100,0x962541,0x1100171a,0x7c00100,0x230400,0x11001900,0x4000000,0x1600000,
-0x11001926,0x2802100,0x1862460,0x11001926,0x2802400,0x1862460,0x11001926,0x2806100,0x1862460,0x11001926,0x4000000,0x200000,0x11001926,0x4000010,0x400000,0x11001926,
-0x6800000,0x1329800,0x11001926,0x7800100,0x1830142,0x11001926,0x7c00100,0x1830000,0x11001926,0x7c00900,0x1830000,0x11001926,0x7e00100,0x1830000,0x11001a18,0x2802100,
-0x1862460,0x11001a18,0x2802400,0x1862460,0x11001a18,0x6800000,0x1329800,0x11001a18,0x7800100,0x1830142,0x11001a18,0x7c00100,0x1830000,0x11001a18,0x7c00100,0x1830002,
-0x11001a18,0x7c00900,0x1830000,0x11001a18,0x7e00100,0x1830000,0x11001d0c,0x7c00100,0x230400,0x11001d0c,0x7c00100,0x250400,0x11001e12,0x7c00100,0x2230500,0x11001e12,
-0x7c00100,0x2330520,0x11001e12,0x7c80100,0x2330520,0x11002619,0x7c00100,0x220401,0x11002619,0x7c00100,0x220402,0x11002619,0x7c00100,0x250401,0x1100270e,0x4000400,
-0x200001,0x1100270e,0x4000400,0x200002,0x1100270e,0x4000400,0x500001,0x1100270e,0x7c00100,0x220401,0x1100270e,0x7c00100,0x250401,0x11002800,0x80000,0x918820,
-0x11002800,0x80000,0x1c18020,0x11002800,0x180000,0x918820,0x11002800,0x4000001,0x445801,0x11002800,0x4000001,0x445802,0x11002800,0x4000001,0xc4000b,0x11002800,
-0x6800000,0x201c00,0x11002800,0x6800020,0x201c00,0x11002800,0x24000000,0x200000,0x11002800,0x24000000,0x200002,0x11002800,0x24000000,0x810000,0x11002800,0x24000000,
-0x1410000,0x11002800,0x24000000,0x1500000,0x11002800,0x24000000,0x1500002,0x11002800,0x24000002,0x400000,0x11002800,0x24000006,0xc0000b,0x11002800,0x24000008,0x1410000,
-0x11002800,0x24000008,0x1710000,0x11002800,0x24000020,0x1001400,0x11002800,0x24000020,0x1500002,0x11002800,0x2c000010,0x1248000,0x11002800,0x2c000010,0x1248002,0x11002800,
-0x40000001,0x63b020,0x11002800,0x40080000,0x918820,0x11002801,0x80000,0x2a65620,0x11002801,0x82000,0x962460,0x11002900,0x4000000,0x20000e,0x11002900,0x4000000,
-0x20000f,0x11002900,0x4000020,0x20000e,0x11002900,0x4000020,0x20000f,0x11002900,0x4000020,0x81000e,0x11002900,0x4000020,0x81000f,0x11002900,0x4000020,0x141000e,
-0x11002900,0x4000020,0x141000f,0x11002900,0x4000022,0x20000e,0x11002900,0x4000022,0x20000f,0x11002a00,0x4000000,0x1500000,0x11002a00,0x4000000,0x1600000,0x11002a00,
-0x4000000,0x1600002,0x11002b01,0x2000,0x962460,0x11002b01,0x2802020,0x962460,0x11002c00,0x4000000,0x200000,0x11002c00,0x4000000,0x200002,0x11002c00,0x4000000,
-0x20000f,0x11002c00,0x4000020,0x200000,0x11002c00,0x7c00000,0x200000,0x11002c00,0x7c00020,0x200000,0x11002c00,0x7c00120,0x220405,0x11002c00,0x7c00120,0x230402,
-0x11002c00,0x7c00120,0x250402,0x11002c00,0x7c00120,0x250405,0x11002c19,0x7c00100,0x250400,0x11002c19,0x7c00100,0x250401,0x11002d00,0x4000000,0x100006,0x11002d00,
-0x4000000,0x200006,0x11002d19,0x7c00100,0x220402,0x11002d19,0x7c00100,0x230400,0x11002d19,0x7c00100,0x250402,0x11002e00,0x24000000,0x200000,0x11002e00,0x24000020,
-0x200000,0x11002e00,0x24000020,0x200001,0x11002f00,0x24000020,0x200000,0x11002f00,0x24000020,0x200001,0x11002f00,0x24000020,0x200002,0x11002f00,0x24000020,0xf00000,
-0x11002f00,0x24000020,0x1600000,0x11002f00,0x24000022,0x1600000,0x11003000,0x24000000,0x200000,0x11003000,0x24000020,0x200000,0x11003000,0x24000020,0x810000,0x11003000,
-0x24000020,0x1410000,0x11003100,0x24000000,0x200000,0x11003200,0x24000000,0x200000,0x11003300,0x4000000,0x100003,0x11003400,0x24000000,0x100000,0x11003400,0x24000000,
-0x200000,0x11003500,0x24000000,0x200000,0x11003600,0x24000000,0x200000,0x11003600,0x24000020,0x200000,0x11003700,0x24000000,0x200000,0x11003700,0x24000000,0xe00000,
-0x11003700,0x24000000,0x2800000,0x11003700,0x24000020,0x200000,0x11003800,0x4000000,0x100000,0x11003800,0x24000000,0x200000,0x11003800,0x24000000,0xb00000,0x11003800,
-0x24000000,0xe00000,0x11003800,0x24000000,0x1710000,0x11003800,0x24000000,0x2800000,0x11005003,0x7c00100,0x220402,0x11005013,0x2802500,0x962460,0x11005013,0x4000020,
-0x200005,0x11005013,0x7c00100,0x2633801,0x11005013,0x7c00100,0x2633802,0x11005013,0x7c00100,0x2633805,0x11005019,0x7c00100,0x220402,0x11005102,0x7000100,0x230408,
-0x11005102,0x7c00100,0x230404,0x11005102,0x7c00100,0x230407,0x11005102,0x7c00100,0x230408,0x11005102,0x7c00100,0x230409,0x11005201,0x2802400,0x962460,0x11005500,
-0x80000,0x1e18820,0x11005502,0x7000100,0x230408,0x11005502,0x7c00100,0x230404,0x11005502,0x7c00100,0x230407,0x11005502,0x7c00100,0x230408,0x11005502,0x7c00100,
-0x230409,0x11005667,0x1000,0,0x11020200,0x80004,0x418820,0x11020200,0x4000000,0x100006,0x11020200,0x4000000,0x10000f,0x11020200,0x4000400,0x100002,
-0x11020200,0x4000400,0x500002,0x11020200,0x6800c00,0x101000,0x11020200,0x24000000,0x100000,0x11020200,0x24000000,0x200000,0x11020200,0x24000000,0x1400000,0x11020200,
-0x24000000,0x1500000,0x11020200,0x24000000,0x1600000,0x11020200,0x24000020,0x100000,0x11020200,0x24000020,0x1600000,0x11020219,0x7c00100,0x12040f,0x11020219,0x7c00100,
-0x220400,0x11020219,0x7c00100,0x220401,0x11020219,0x7c00100,0x250400,0x11020319,0x7c00100,0x220400,0x11020319,0x7c00100,0x220401,0x11020319,0x7c00100,0x220402,
-0x11020319,0x7c00100,0x250400,0x11020319,0x7c00100,0x250402,0x11020319,0x7d00100,0x220402,0x11020419,0x7c00100,0x220401,0x11020519,0x7c00100,0x220400,0x11020600,
-0x4000400,0x100002,0x11020600,0x4000400,0x200400,0x11020600,0x7c00500,0x130400,0x11020600,0x7c00d00,0x130400,0x11020701,0x2802400,0x962460,0x11020701,0x2802400,
-0x962461,0x11020701,0x2802400,0xc62460,0x1102080e,0x7c00100,0x220400,0x1102080e,0x7c00100,0x250400,0x11020908,0x7c00100,0x220400,0x11020908,0x7c00100,0x220401,
-0x11020908,0x7c00100,0x250400,0x11020908,0x7c00100,0x250401,0x11022800,0x24000000,0x100000,0x11022800,0x24000000,0x200000,0x11022800,0x24000000,0x200002,0x11022800,
-0x24000000,0x401000,0x11022800,0x24000000,0xf00002,0x11022800,0x24000000,0xf0ac02,0x11022800,0x24000000,0x1500000,0x11022800,0x24000002,0x100000,0x11022800,0x24000002,
-0x370000,0x11022800,0x24000002,0x470000,0x11022800,0x24000006,0x400000,0x11022800,0x24000008,0x1710000,0x11022800,0x24000008,0x1712c00,0x11022800,0x24000020,0x100000,
-0x11022800,0x24000020,0x1500000,0x11022800,0x24000020,0x1500002,0x11022900,0x4000000,0x10000e,0x11022900,0x4000000,0x10000f,0x11022919,0x7c00100,0x12040f,0x11022c00,
-0x4000000,0x100002,0x11022c00,0x4000000,0x10000f,0x11022c00,0x4000000,0x1500002,0x11022c00,0x4000000,0x1600002,0x11022c00,0x7c00120,0x120405,0x11022c0e,0x7c00100,
-0x250401,0x11022c19,0x7c00100,0x150401,0x11022d00,0x4000000,0x100006,0x11022d00,0x4000000,0x200006,0x11022d19,0x7c00100,0x120402,0x11022d19,0x7c00100,0x150402,
-0x11022e00,0x24000000,0x200000,0x11022e00,0x24000020,0x100000,0x11022f00,0x24000020,0x100000,0x11022f00,0x24000020,0x100001,0x11022f00,0x24000020,0x100002,0x11023000,
-0x24000000,0x100000,0x11023300,0x4000000,0x100002,0x11023300,0x4000000,0x100003,0x11023300,0x4000100,0x120403,0x11023300,0x4000100,0x150403,0x11023400,0x24000000,
-0x100000,0x11023500,0x24000000,0x100000,0x11023600,0x24000000,0x100000,0x11023600,0x24000020,0x100000,0x11023700,0x24000000,0x100000,0x11023700,0x24000000,0xe00000,
-0x11023700,0x24000020,0x100000,0x11023800,0x4000000,0x100000,0x11023800,0x24000000,0x200000,0x11024e67,0,0,0x11025600,0x4000000,0x100000,0x11042a00,
-0x4000000,0x1600000,0x11045700,0x4000000,0x20000a,0x11045700,0x4000020,0x20000a,0x11045712,0x7c00100,0xe3040a,0x11045712,0x7c80100,0xe3040a,0x11045716,0x7c00100,
-0xe30c0a,0x11045716,0x7c00100,0x2530c0a,0x11063d00,0x4000001,0x445811,0x11065700,0x4000000,0x810011,0x11065700,0x4000000,0xe00011,0x11065700,0x4000000,0x1410011,
-0x11065700,0x4000000,0x1500011,0x11065700,0x4000000,0x1600011,0x11065700,0x4000006,0xe70011,0x11065700,0x4000008,0xe00011,0x11065700,0x4000008,0xe02c11,0x11065700,
-0x4000010,0x871411,0x11065700,0x4000010,0x1201411,0x11065700,0x4000010,0x1271011,0x11065700,0x4000020,0xe00011,0x11065700,0x4000400,0xe00011,0x11065700,0x4000420,
-0xe00011,0x11065700,0x6800000,0xe01c11,0x11065700,0x6800040,0xe29811,0x11065700,0xc000010,0x80ac11,0x11065700,0xc000010,0xb48011,0x11065719,0x7c00100,0xe20411,
-0x11065719,0x7c00100,0xe50411,0x11065719,0x7c00140,0xe20411,0x11065719,0x7c00140,0xe50411,0x11080100,0x6800000,0x201c00,0x11080100,0x68000c0,0x1329800,0x11080100,
-0x24000000,0x200000,0x11080100,0x24000000,0x810000,0x11080100,0x24000000,0x1410000,0x11080100,0x24000000,0x1500000,0x11080100,0x24000000,0x1600000,0x11080100,0x24000000,
-0x1b00000,0x11080100,0x24000000,0x2410000,0x11080100,0x24000006,0xd70000,0x11080100,0x24000008,0x1713c00,0x11080100,0x24000008,0x1714000,0x11080100,0x24000010,0x1001400,
-0x11080100,0x24000010,0x1071000,0x11080100,0x24000010,0x1071400,0x11080100,0x24000020,0x200000,0x11080100,0x24000020,0x400000,0x11080100,0x24000020,0x1600000,0x11080100,
-0x24000400,0x200000,0x11080100,0x24000420,0x200000,0x11080100,0x2c000010,0xb48000,0x11080100,0x2c000010,0x100ac00,0x11080100,0x44000001,0x1a45800,0x11080119,0x7c00100,
-0x220400,0x11080119,0x7c00100,0x250400,0x11080119,0x7c001c0,0x220400,0x11080119,0x7c001c0,0x250400,0x11080200,0x4000400,0x200002,0x11080200,0x24000000,0x200000,
-0x11080200,0x24000000,0x1500000,0x11080200,0x24000000,0x1600000,0x11080200,0x24000020,0x200000,0x110a1e12,0x7c00100,0x2130480,0x110a1e12,0x7c80100,0x2130480,0x110a3000,
-0x24000000,0xe00000,0x110a3000,0x24100000,0x810001,0x110a3000,0x24100000,0x1410001,0x110a3700,0x24000000,0x200000,0x110a3d00,0x4000000,0xe00000,0x110a3d00,0x4000000,
-0xe00002,0x110a3d00,0x24000000,0xe00000,0x110a3d11,0x7c00300,0xe30000,0x110a3d11,0x7c00900,0x1230400,0x110a3d12,0x2802400,0x962460,0x110a3e14,0x7c00100,0xe30000,
-0x110a3e14,0x7c00100,0xe30001,0x110a3e14,0x7c00100,0x2530000,0x110a3e14,0x7c00900,0x1230000,0x110a3e14,0x7c00900,0x1230001,0x110a3f16,0x7c00100,0xe30c00,0x110a3f16,
-0x7c00100,0xe30c01,0x110a3f16,0x7c00100,0x2530c00,0x110a3f16,0x7c00900,0x1230c00,0x110a3f16,0x7c00900,0x1230c01,0x110a4005,0x7c00100,0xe30400,0x110a4112,0x7c00100,
-0xe30402,0x110a4112,0x7c80100,0xe30402,0x110a4400,0x4000000,0xe00000,0x110a4412,0x4000000,0xe00002,0x110a4412,0x4000000,0xe00003,0x110a4416,0x4000000,0xe00c03,
-0x110a4500,0x4000000,0xe0000d,0x110a4516,0x4000000,0xe00c0d,0x110a4711,0x7c40300,0xe30000,0x110a4f11,0x7c00300,0xe30001,0x110a4f11,0x7c40300,0xe30000,0x110a5300,
-0x4000000,0x810010,0x110a5300,0x4000000,0xe00002,0x110a5300,0x4000000,0xe00010,0x110a5300,0x4000000,0x1410010,0x110a5300,0x4000002,0xe70010,0x110a5300,0x4000008,
-0x810010,0x110a5300,0x4000008,0x1410010,0x110a5300,0x6800000,0xe01c02,0x110a5300,0x6800000,0xe01c10,0x110a5400,0x4000000,0x81000c,0x110a5400,0x4000000,0xe0000c,
-0x110a5400,0x4000000,0x141000c,0x110a5400,0x4000000,0x150000c,0x110a5400,0x4000000,0x160000c,0x110a5400,0x4000002,0xe7000c,0x110a5400,0x4000010,0x87140c,0x110a5400,
-0x4000010,0xe7000c,0x110a5400,0x4000010,0x120140c,0x110a5400,0x4000010,0x127100c,0x110a5400,0x4000020,0xe0000c,0x110a5400,0x4000026,0xe7000c,0x110a5400,0xc000010,
-0x80ac0c,0x110a5400,0xc000010,0xb4800c,0x11400c0c,0x4000010,0xb00000,0x11400c0c,0x4000010,0x1071400,0x11400c17,0xc000010,0xb48000,0x11400c1e,0x7c00900,0x230400,
-0x11400f4b,0xc000010,0x448000,0x11400f5f,0xc000010,0x448000,0x11401d94,0x4000000,0x200000,0x11403dca,0x4000000,0xe00000,0x114457bf,0x4000004,0x120000a,0x114457bf,
-0x4000008,0x81000a,0x114457bf,0x4000008,0x141000a,0x114457bf,0x4000010,0x87000a,0x114457bf,0xc000010,0x84800a,0x114457c8,0x3802500,0x126246a,0x114457c8,0x7c00d00,
-0x2530c0a,0x114a3dbf,0x24000000,0x810000,0x114a3dbf,0x24000000,0x1410000,0x114a3dbf,0x24000008,0x810000,0x114a3dbf,0x24000008,0x1410000,0x114a3dbf,0x24000010,0x870000,
-0x114a3dbf,0x2c000010,0x848000,0x114a3dc5,0x4000000,0xe00000,0x114a3dc5,0x24000000,0xe00000,0x114a3dc5,0x24000002,0xe00000,0x114a3dc5,0x24000002,0x1200000,0x114a3dc5,
-0x24000008,0x810000,0x114a3dc5,0x24000008,0x1410000,0x114a3dc8,0x7c00900,0x930c00,0x114a3dc8,0x7c00900,0xe30c00,0x114a3dca,0x7c00300,0xe30000,0x114a3ec8,0x7000400,
-0x1200c02,0x114a3fbf,0x4000004,0x1200000,0x114a3fc8,0x7c00d00,0x2530c00,0x114a42ca,0x4000000,0xe00000,0x114a42ca,0x4000000,0xe0000f,0x114a44ca,0x4000000,0xe00002,
-0x114a44ca,0x4000000,0xe00003,0x114a45ca,0x4000000,0xe00002,0x114a45ca,0x4000000,0xe0000d,0x11505103,0x24000000,0x810000,0x11505103,0x24000000,0x1410000,0x1180090a,
-0x2802400,0x962460,0x11800c27,0x2802100,0x962460,0x11800c27,0x2802500,0x962460,0x11800f32,0x2802400,0x962460,0x11800f3f,0x2802400,0x962460,0x11820700,0x2802400,
-0x962460,0x11820700,0x2802500,0x962460,0x118a3dcb,0x2802400,0x962460,0x118a3ec8,0x2802400,0x962460,0x11c00904,0x2802400,0x962460,0x11c00908,0x2802400,0x962460,
-0x11c00c2c,0x6800000,0x1329800,0x11c00c30,0xc000010,0xb48000,0x11c00f78,0x6800000,0x1329800,0x11c0107d,0x6800000,0x1329800,0x11c01181,0x6800000,0x1329800,0x11c01285,
-0x6800000,0x1329800,0x11c01489,0x4000000,0x200000,0x11c01489,0x6800000,0x1329800,0x11c0168d,0x6800000,0x1329800,0x11d05107,0x7c00100,0x230408,0x20000067,0x1000,
-0,0x20000b13,0x2802400,0x962460,0x20000b13,0x2802500,0x962460,0x20001b27,0x2802100,0x962460,0x20001b27,0x2802100,0x962461,0x20001b27,0x2802400,0x962460,
-0x20001b27,0x2806400,0x962460,0x20001b27,0x2902100,0x962462,0x20001b27,0x4000000,0x200000,0x20001b27,0x4000000,0x400000,0x20001b27,0x4000000,0x500000,0x20001b27,
-0x4000000,0x810000,0x20001b27,0x4000000,0xb00000,0x20001b27,0x4000000,0xc0000b,0x20001b27,0x4000000,0x1410000,0x20001b27,0x4000010,0xb00000,0x20001b27,0x4000010,
-0xc00000,0x20001b27,0x6800000,0x1329800,0x20001b27,0x6800100,0x462540,0x20001b27,0x6800400,0x962540,0x20001b27,0x7c00100,0x230400,0x20001b27,0x7c00100,0x230401,
-0x20002619,0x7c00100,0x220401,0x20002a00,0x4000000,0x1600000,0x20004b67,0,0x1900000,0x20004c67,0,0x1900000,0x20004d67,0,0x1900000,0x20006d67,
-0x1000,0,0x20006e67,0x1000,0,0x20026d67,0,0,0x20026e67,0,0,0x200a4a12,0x7c00100,0x1f304c1,0x200a4a12,0x7c00100,
-0x20304e1,0x21005600,0x4000000,0x700000,0x21022a00,0x4000000,0x1600000,0x30000419,0x7c00100,0x220400,0x30000419,0x7c00100,0x220401,0x30000419,0x7c00100,0x250400,
-0x30000419,0x7c00100,0x250401,0x30000519,0x7c00100,0x220400,0x30000600,0x4000400,0x200400,0x30000600,0x7c00500,0x230400,0x30000605,0x4000400,0x200400,0x3000080e,
-0x7c00100,0x220400,0x30000908,0x2000,0x962460,0x30000908,0x7c00100,0x220400,0x30000908,0x7c00100,0x220401,0x30000908,0x7c00100,0x250400,0x30000908,0x7c00100,
-0x250401,0x30000a03,0x4000006,0x400400,0x30000c02,0x4000000,0x200000,0x30000c02,0x7c00100,0x230400,0x30000d22,0x2802100,0x962460,0x30000d22,0x2802400,0x962460,
-0x30000d22,0x2802500,0x962460,0x30000d22,0x4000000,0x200000,0x30000d22,0x4000010,0x200000,0x30000d22,0x7c00100,0x230400,0x30000d22,0xc000010,0x248000,0x30000d22,
-0x80000000,0x218960,0x30000e25,0x2802500,0x962460,0x30000e25,0x7c00100,0x230400,0x30001821,0x2802100,0x962460,0x30001821,0x2806400,0x962460,0x30001821,0x4000000,
-0x200000,0x30001821,0x6800100,0x962540,0x30001821,0x6800100,0x962541,0x30001821,0x7c00100,0x230400,0x30001b27,0x2802100,0x962460,0x30001b27,0x2802400,0x962460,
-0x30001b27,0x4000000,0x200000,0x30001b27,0x4000000,0x400000,0x30001b27,0x7c00100,0x230400,0x30001c1c,0x2802100,0x1862460,0x30001c1c,0x2802400,0x1862460,0x30001c1c,
-0x2806400,0x1862460,0x30001c1c,0x4000000,0x200000,0x30001c1c,0x6800100,0x1862400,0x30001c1c,0x6800100,0x1862540,0x30001c1c,0x7c00100,0x1830000,0x30001c1c,0x7c00100,
-0x1830001,0x30001c1c,0xc000010,0x448000,0x30001f0b,0x4000000,0x200000,0x30001f0b,0x4000010,0x200000,0x30001f0b,0x4000010,0x400000,0x30001f0b,0x6800000,0x200000,
-0x30001f0b,0x7c00100,0x230400,0x30001f0b,0xc000010,0x248000,0x30002006,0x7c00100,0x250400,0x30002128,0x4000000,0x200000,0x30002128,0x7c00100,0x230400,0x30002128,
-0xc000010,0x248000,0x3000221d,0x4000000,0x810000,0x3000221d,0x4000000,0x1410000,0x3000221d,0x4000001,0x445800,0x3000221d,0x7c00100,0x230400,0x30002300,0x4000010,
-0x400000,0x30002320,0x7c00100,0x230400,0x30002417,0x2802100,0x1862460,0x30002417,0x2802400,0x1862460,0x30002417,0x2806400,0x1862460,0x30002417,0x2882000,0x1862460,
-0x30002417,0x4000000,0x200000,0x30002417,0x4000000,0x400000,0x30002417,0x4000000,0x1600000,0x30002417,0x4000010,0x400000,0x30002417,0x4000010,0x1200000,0x30002417,
-0x6800000,0x1329800,0x30002417,0x6800100,0x1862540,0x30002417,0x7c00100,0x1830000,0x30002417,0x7d00100,0x1830000,0x3000251b,0x80000,0xc18820,0x3000251b,0x2802100,
-0x962460,0x3000251b,0x3c02100,0x962460,0x3000251b,0x4000000,0x200000,0x3000251b,0x4000006,0x500000,0x3000251b,0x4000010,0x400000,0x3000251b,0x4000010,0xb70000,
-0x3000251b,0x4000800,0x200000,0x3000251b,0x6800000,0x1329800,0x3000251b,0x7c00100,0x230400,0x3000251b,0x7c00900,0x230400,0x3000251b,0xc000010,0xb48000,0x3000251b,
-0x12882000,0x962460,0x30002800,0x24000000,0x200000,0x30002800,0x2c000010,0x1248002,0x30002a00,0x4000000,0x1600000,0x30002b01,0x2000,0x962460,0x30002c00,0x4000000,
-0x200000,0x30002c00,0x7c00100,0x220405,0x30002d19,0x7c00100,0x250400,0x30002e00,0x24000000,0x200000,0x30003000,0x24000000,0x200000,0x30003100,0x24000000,0x200000,
-0x30003600,0x24000000,0x200000,0x30003700,0x24000000,0x200000,0x3000392e,0x24000000,0x200000,0x30005013,0x7c00100,0x2633801,0x30005600,0,0x918820,0x30020600,
-0x4000400,0x500400,0x30020701,0x2802400,0x962460,0x30020701,0x2802400,0xc62460,0x300a3a11,0x4020000,0xe00000,0x300a3a11,0x4020000,0xe00002,0x300a3b11,0x4020000,
-0xe00002,0x300a3c00,0x4008000,0xe00000,0x300a3c00,0x4010000,0xe00000,0x300a3d11,0x7c00300,0xe30002,0x300a4305,0x7c00100,0xe30400,0x300a4611,0x7c40300,0xe30000,
-0x300a4829,0x7c00100,0xe30400,0x300a4829,0x7c00900,0x1230400,0x300a4929,0x4000000,0xe00000,0x3040259a,0x4000010,0x400000,0x3040259a,0x4000010,0xb70000,0x3040259a,
-0xc000010,0xb48000,0x304028ba,0x4000001,0xc41c0b,0x304a3dca,0x4000000,0xe00000,0x30800c27,0x2802100,0x962460,0x30c01c92,0x6800000,0x1329800,0x3100080e,0x7c00120,
-0x220402,0x3100080e,0x7c00120,0x250402,0x31005167,0x1000,0,0x3100581e,0x4000000,0x200000,0x3100581e,0x7c00100,0x230400,0x3100590d,0x7c00100,0x230400,
-0x31005a09,0x7c00100,0x220400,0x31005a09,0x7c00100,0x250400,0x31005b00,0x4000000,0x200000,0x31005c00,0x80000,0x918820,0x31005c00,0x2802000,0x962460,0x31005c00,
-0x2802400,0x962460,0x31005c00,0x4000000,0x200000,0x31005c00,0x4000000,0x200001,0x31005c00,0x6800000,0x962540,0x31005c00,0x6800400,0x962540,0x31005c01,0x2802400,
-0x962460,0x31005d00,0x4000020,0x200005,0x31005d00,0x6800020,0x1329805,0x31005d00,0x7c00120,0x220405,0x31005d00,0x7c00120,0x250405,0x31006000,0x82000,0x962460,
-0x31006000,0x180000,0x918820,0x310a5e11,0x7c40300,0xe30000,0x310a5f11,0x7c00300,0xe30001,0x32000419,0x7c00100,0x250400,0x3200080e,0x4000020,0x200000,0x3200080e,
-0x7c00100,0x220400,0x3200080e,0x7c00100,0x250400,0x32000908,0x7c00100,0x220400,0x32000908,0x7c00100,0x250400,0x32000c02,0x7c00100,0x230400,0x32000e25,0x7c00100,
-0x230400,0x32001d0c,0x7c00100,0x230400,0x32002800,0x80000,0x1e18820,0x32002800,0x80020,0x218820,0x32002800,0x4000001,0x445802,0x32002800,0x24000000,0x200000,
-0x32002800,0x24000000,0x200002,0x32002800,0x24000020,0x200000,0x32002800,0x2c000010,0x1248002,0x32002919,0x7c00100,0x22040f,0x32002a00,0x4000000,0x1600000,0x32002b01,
-0x2000,0x962460,0x32002b01,0x2802000,0x962460,0x32002b01,0x2802020,0x962460,0x32002c00,0x4000000,0x200000,0x32002c00,0x4000020,0x200000,0x32002c00,0x4000020,
-0x200005,0x32002c00,0x7c00120,0x220405,0x32002c00,0x7c00120,0x250405,0x32002e00,0x24000020,0x200000,0x32002f00,0x24000020,0x200000,0x32003000,0x24000000,0x200000,
-0x32003000,0x24000020,0x200000,0x32003500,0x24000000,0x200000,0x32003600,0x24000020,0x200000,0x32003700,0x24000000,0x100000,0x32003700,0x24000000,0x200000,0x32003800,
-0x24000000,0x810000,0x32003800,0x24000000,0x1410000,0x32005102,0x4000000,0x1500008,0x32005502,0x7c00100,0x230400,0x32006108,0x7c00100,0x220400,0x32006108,0x7c00100,
-0x250400,0x3200622a,0x2802100,0x962460,0x3200622a,0x2806400,0x962460,0x3200622a,0x7c00100,0x230400,0x3200632b,0x2802100,0x962460,0x3200632b,0x6804000,0x962540,
-0x3200632b,0x7c00100,0x230400,0x3200642c,0x2802100,0x962460,0x3200642c,0x7c00100,0x230400,0x3200652d,0x2802100,0x962460,0x3200652d,0x7c00100,0x230400,0x32006600,
-0x24000020,0x200000,0x32006700,0x24000020,0x200000,0x32006800,0x24000020,0x200000,0x32006900,0x24000020,0x200000,0x32006900,0x24000020,0x810000,0x32006900,0x24000020,
-0x1410000,0x32006a00,0x24000020,0x200000,0x32006a00,0x24000020,0x200001,0x32006a00,0x24000020,0x200002,0x32020701,0x2882000,0xc62460,0x32023300,0x4000000,0x100000,
-0x32026c01,0x12882000,0x962460,0x32065700,0x4000000,0x810011,0x32065700,0x4000000,0x1410011,0x32086600,0x24000020,0x810000,0x32086600,0x24000020,0x1410000,0x32086900,
-0x24000020,0x810000,0x32086900,0x24000020,0x1410000,0x320a3600,0x24000020,0x200000,0x320a3d11,0x7c00100,0x1230400,0x320a3e14,0x7c00100,0xe30010,0x320a3e14,0x7c00100,
-0x2530000,0x320a3f16,0x7c00100,0xe30c10,0x320a4400,0x4000000,0xe00003,0x320a4929,0x4000000,0xe00000,0x320a4f11,0x7c00300,0xe30001,0x320a6b16,0x7c00100,0x2530c00,
-0x32406396,0xc000010,0x448000,0x324a3dcd,0x4000000,0xe00000,0x324a3dcd,0x7c00100,0x1230400,0x324a3fc8,0x4000002,0x1200c00,0x324a53c5,0x24000000,0xe00000,0x32820701,
-0x2802000,0x962460,0x40000419,0x7c00100,0x220400,0x40000519,0x7c00100,0x220400,0x40000600,0x4000400,0x200400,0x4000080e,0x7c00100,0x220400,0x4000080e,0x7c00100,
-0x250400,0x4000080e,0x7c00100,0x250402,0x40000c02,0x2802100,0x962460,0x40000c02,0x2802400,0x962460,0x40000c02,0x2802500,0x962460,0x40000c02,0x4000000,0x200000,
-0x40000c02,0x4000000,0x1071400,0x40000c02,0x7c00100,0x230400,0x40000c02,0x80000000,0x218960,0x40000d22,0x7c00100,0x230400,0x40000f0a,0x7c00100,0x230400,0x40001004,
-0x7c00100,0x230400,0x40001110,0x2802100,0x962460,0x40001110,0x6800100,0x962540,0x4000120f,0x2802100,0x962460,0x4000120f,0x4000000,0x1600000,0x4000120f,0x7c00100,
-0x230400,0x4000131f,0x7c00100,0x230400,0x40001423,0x4000000,0x200000,0x40001423,0x4000000,0x1600000,0x40001615,0x2802400,0x962460,0x40001615,0x7c00100,0x230400,
-0x40002417,0x2802400,0x1862460,0x40002417,0x4000000,0x200000,0x40002800,0x6800000,0x201c00,0x40002800,0x24000002,0x200000,0x40002c00,0x4000000,0x200002,0x40003000,
-0x24000000,0x200000,0x40003000,0x24000020,0x200000,0x40003700,0x24000000,0x200000,0x40005a09,0x7c00100,0x220400,0x40005a09,0x7c00100,0x250400,0x40005d00,0x7c00120,
-0x220405,0x40006f30,0x2802100,0x962460,0x40006f30,0x2802400,0x962460,0x40006f30,0x4000000,0x200000,0x40006f30,0x6800000,0x1329800,0x40006f30,0x6800100,0x962540,
-0x40006f30,0x7c00100,0x230400,0x40006f30,0xc000010,0xb48000,0x40007034,0x7c00100,0x1830000,0x40007117,0x4000000,0x200000,0x40007208,0x7c00100,0x220400,0x4000720e,
-0x7c00100,0x220400,0x4000720e,0x7c00500,0x22040e,0x4000720e,0x7c00500,0x22040f,0x40007219,0x7c00100,0x220400,0x40007219,0x7c00500,0x220400,0x40007219,0x7c00500,
-0x22040e,0x40007219,0x7c00500,0x22040f,0x40007300,0x24000000,0x200000,0x40007400,0x4000000,0x200000,0x40007531,0x7c00100,0x230400,0x40007631,0x7c00100,0x230400,
-0x40007835,0x4000010,0x400000,0x40007835,0x7c00100,0x230400,0x40007933,0x7c00100,0x230400,0x40007a32,0x6800000,0x1329800,0x40007a32,0x7c00100,0x230400,0x40007b2f,
-0x7c00100,0x230400,0x40007c00,0x4000000,0x200000,0x40020701,0x2802400,0x962460,0x40020701,0x2802400,0xc62460,0x40023300,0x4000000,0x200000,0x40027d01,0x12882000,
-0x962460,0x400a3700,0x24000000,0x200000,0x400a3700,0x24000000,0xe00000,0x400a4400,0x4000000,0xe0000d,0x400a4412,0x4000000,0xe00002,0x400a4412,0x4000000,0xe00003,
-0x400a4500,0x4000000,0xe0000d,0x400a5300,0x4000000,0x810010,0x400a5300,0x4000000,0x1410010,0x40507709,0x4000000,0x200000,0x4050770c,0x4000000,0x400000,0x4050770f,
-0x4000000,0x200000,0x4050770f,0x4000000,0x400000,0x40c01489,0x4000000,0x200000,0x40d05107,0x4000000,0x200000,0x41000419,0x7c00100,0x220400,0x41000419,0x7c00100,
-0x250400,0x4100080e,0x7c00100,0x220400,0x4100080e,0x7c00100,0x250400,0x41000908,0x7c00100,0x220400,0x41000908,0x7c00100,0x250400,0x41000b13,0x2802000,0x962460,
-0x41000b13,0x2802100,0x962460,0x41000b13,0x4000000,0xb00000,0x41000c02,0x2802100,0x962460,0x41000c02,0x4000000,0x1500000,0x41000c02,0xc000010,0xb48000,0x41000f0a,
-0x7c00100,0x230400,0x41001004,0x7c00100,0x230400,0x41001423,0x7c00100,0x230400,0x41001b27,0x4000000,0x500000,0x41001d0c,0x7c00100,0x230400,0x41001d0c,0x7c00100,
-0x23040f,0x41001f0b,0x2802400,0x962460,0x41001f0b,0x4000000,0x200000,0x41001f0b,0x7c00100,0x230400,0x41002800,0x24000000,0x200000,0x41002800,0x24000000,0x400000,
-0x41002919,0x7c00100,0x22040e,0x41002a00,0x4000000,0x1600000,0x41002b01,0x2802020,0x962460,0x41002c00,0x4000000,0x200000,0x41002c00,0x7c00120,0x220405,0x41003000,
-0x24000000,0x200000,0x41003700,0x24000000,0x200000,0x41003700,0x24000000,0xe00000,0x41005d00,0x7c00120,0x220405,0x41006600,0x24000020,0x200000,0x41006600,0x24000020,
-0x810000,0x41006600,0x24000020,0x1410000,0x41007208,0x7c00100,0x22040f,0x41007219,0x7c00100,0x220400,0x41007300,0x24000000,0x200000,0x41007e0e,0x2802000,0x962460,
-0x41007e0e,0x4000000,0x200000,0x41007f0e,0x4000000,0x200000,0x41007f0e,0x7c00100,0x230400,0x41008002,0x7c00100,0x230400,0x41008137,0x2802100,0x962460,0x41008137,
-0x4000000,0x200000,0x41008137,0x6800100,0x962540,0x41008137,0x7c00100,0x230400,0x41008301,0x2802000,0x962460,0x41008407,0x4000000,0x200000,0x41008407,0x4000000,
-0x400000,0x41008407,0x4000000,0xb00000,0x41008407,0x7c00100,0x220400,0x41008407,0x7c00100,0x250400,0x4100850b,0x7c00100,0x230400,0x4100860b,0x4000000,0x200000,
-0x4100860b,0x7c00100,0x230400,0x4100870c,0x7c00100,0x220400,0x41008838,0x7c00100,0x220400,0x41008838,0x7c00100,0x250400,0x41008939,0x2802000,0x962460,0x41008939,
-0x2802100,0x962460,0x41008939,0x2806000,0x962460,0x41008939,0x4000000,0x200000,0x41008939,0x4000000,0x400000,0x41008939,0x7c00100,0x230400,0x41008939,0xc000010,
-0x448000,0x41008a00,0x4000400,0x200400,0x41008b3b,0x4000000,0x1800000,0x41008b3b,0x6800000,0x1329800,0x41008b3b,0x7c00100,0x1830000,0x41008b3b,0x7e00100,0x1830000,
-0x41008c3d,0x4000010,0x400000,0x41008c3d,0x7c00100,0x230400,0x41008d0e,0x7c00100,0x22040f,0x41008d19,0x7c00100,0x220400,0x41008d19,0x7c00100,0x22040f,0x41008e00,
-0x24000000,0x200000,0x41008e00,0x24000000,0x400000,0x41008e00,0x24000000,0x1710000,0x41008e00,0x24000006,0x400000,0x41008f3a,0x2802100,0x962460,0x41008f3a,0x2806000,
-0x962460,0x41008f3a,0x4000000,0x200000,0x41008f3a,0x6800100,0x962540,0x41008f3a,0x7c00100,0x230400,0x4100903c,0x7c00100,0x230400,0x4100903c,0x7c00100,0x23040f,
-0x41020701,0x2802000,0x962460,0x41020701,0x2802000,0xc62460,0x410a3700,0x24000000,0x200000,0x410a3700,0x24000000,0xe00000,0x410a4412,0x4000000,0xe00003,0x410a4711,
-0x7c40300,0xe30000,0x410a4f11,0x7c00300,0xe30001,0x410a9100,0x4000000,0x800010,0x410a9100,0x4000000,0x810010,0x410a9100,0x4000000,0x870010,0x410a9100,0x4000000,
-0xb00010,0x410a9100,0x4000000,0xf00010,0x410a9100,0x4000000,0x1001410,0x410a9100,0x4000000,0x1071010,0x410a9100,0x4000000,0x1071410,0x410a9100,0x4000000,0x1410010,
-0x41408ad0,0x4000400,0x200000,0x414a82ca,0x4000000,0xe00000,0x41808300,0x2802000,0x962460,0x41c01489,0x6800000,0x1329800,0x50000419,0x7c00100,0x220400,0x50000419,
-0x7c00100,0x250400,0x5000080e,0x7c00100,0x220400,0x50000908,0x7c00100,0x220400,0x50000908,0x7c00100,0x250400,0x50000b13,0x2802500,0x962460,0x50000f0a,0x7c00100,
-0x230400,0x50001615,0x2802100,0x962460,0x50001615,0x7c00100,0x230400,0x50002b01,0x2802020,0x962460,0x50002c00,0x4000000,0x200000,0x50002c19,0x7c00100,0x220400,
-0x50002d19,0x7c00100,0x220400,0x50003000,0x24000000,0x200000,0x50003000,0x24000020,0x200000,0x50003700,0x24000000,0x200000,0x50005d00,0x7c00120,0x220405,0x50005d00,
-0x7c00120,0x250405,0x50006108,0x7c00100,0x220400,0x50006108,0x7c00100,0x250400,0x50006600,0x24000020,0x200000,0x50007300,0x24000000,0x200000,0x50008301,0x2802400,
-0x962460,0x50008a00,0x7c00500,0x230400,0x50009257,0x2802400,0x962460,0x50009257,0x4000000,0x200000,0x50009257,0x4000010,0x1071400,0x50009257,0x6800000,0x1329800,
-0x50009257,0x7c00100,0x230400,0x50009257,0x7c00500,0x230400,0x50009257,0x7c00900,0x230400,0x50009257,0xc000010,0xb48000,0x5000933e,0x2802100,0x962460,0x5000933e,
-0x2802400,0x962460,0x5000933e,0x4000000,0x200000,0x5000933e,0x4000000,0x400000,0x5000933e,0x4000010,0x400000,0x5000933e,0x6800000,0x1329800,0x5000933e,0x6800100,
-0x962540,0x5000933e,0x6800100,0x962541,0x5000933e,0x6804400,0x962540,0x5000933e,0x7c00100,0x230400,0x5000933e,0x7c00100,0x230401,0x5000933e,0xc000010,0x448000,
-0x50009419,0x7c00100,0x220400,0x50009419,0x7c00100,0x250400,0x50009500,0x4000400,0x200400,0x5000965a,0x4000000,0x500000,0x5000965a,0x7c00100,0x230400,0x5000965a,
-0xc000010,0xb48000,0x5000975b,0x4000000,0x200000,0x5000975b,0x4000010,0x400000,0x5000975b,0x7c00100,0x230400,0x50009865,0x7c00100,0x230400,0x50009965,0x4000010,
-0x400000,0x50009965,0x7c00100,0x230400,0x50409aca,0x4000000,0x200000,0x5100080e,0x7c00100,0x220400,0x5100080e,0x7c00100,0x250400,0x51000c02,0x2802100,0x962460,
-0x51000c02,0x4000000,0x1500000,0x51000c02,0x4000020,0x200000,0x51000c02,0x7c00100,0x230400,0x51000f0a,0x7c00100,0x230400,0x51000f0a,0x7c00500,0x230400,0x51001110,
-0x2802100,0x962460,0x5100131f,0x2802100,0x962460,0x51001423,0x7c00100,0x230400,0x51001524,0x2802100,0x962460,0x51001524,0x4000000,0x200000,0x51001524,0x7c00100,
-0x230400,0x5100171a,0x2802100,0x962460,0x5100171a,0x4000000,0x200000,0x5100171a,0x4000000,0x1500000,0x5100171a,0x7c00100,0x230400,0x51001b27,0x4000000,0x200000,
-0x51001b27,0x4000000,0x400000,0x51001b27,0x4000000,0x500000,0x51001b27,0x7c00100,0x230400,0x51001c1c,0x2802100,0x1862460,0x51001c1c,0x2802500,0x1862460,0x51001c1c,
-0x2806400,0x1862460,0x51001c1c,0x4000000,0x1800000,0x51001c1c,0x6800000,0x1329800,0x51001c1c,0x6800100,0x1862400,0x51001c1c,0x6800100,0x1862540,0x51001c1c,0x6800500,
-0x1862400,0x51001c1c,0x7c00100,0x1830000,0x5100251b,0x7c00100,0x230400,0x51002619,0x7c00100,0x220400,0x51002619,0x7c00100,0x250400,0x51002800,0x80020,0x218820,
-0x51002c00,0x4000000,0x200000,0x51002d19,0x7c00100,0x230400,0x51003700,0x24000000,0x200000,0x51003700,0x24000000,0xe00000,0x51005201,0x2802400,0x962460,0x51005c00,
-0x4000000,0x200000,0x51006108,0x7c00100,0x220400,0x51006108,0x7c00100,0x250400,0x51006600,0x24000020,0x200000,0x51006600,0x24000020,0x810000,0x51006600,0x24000020,
-0x1410000,0x51007300,0x24000000,0x200000,0x51007300,0x24000020,0x200000,0x51008002,0x7c00100,0x230400,0x51008301,0x2802000,0x962460,0x51008301,0x2802400,0x962460,
-0x51008a00,0x7c00500,0x230400,0x51008e00,0x24000000,0x200000,0x51008e00,0x24000000,0x400000,0x51008e00,0x24000000,0x810000,0x51008e00,0x24000000,0x1400000,0x51008e00,
-0x24000000,0x1410000,0x51008e00,0x24000000,0x1710000,0x51008e00,0x24000002,0x200000,0x51008e00,0x24000500,0x230400,0x51008e00,0x2c000010,0xb48000,0x51009419,0x7c00100,
-0x220400,0x51009419,0x7c00100,0x22040e,0x51009419,0x7c00100,0x22040f,0x51009419,0x7c00100,0x250400,0x51009500,0x4000400,0x200400,0x51009500,0x7c00500,0x230400,
-0x51009519,0x7c00100,0x220400,0x51009519,0x7c00100,0x22040f,0x51009519,0x7c00100,0x230400,0x51009519,0x7c00100,0x250400,0x51009b71,0x2802100,0x962460,0x51009b71,
-0x6800000,0x1329800,0x51009b71,0x6800100,0x962540,0x51009b71,0x6804400,0x962540,0x51009b71,0x7c00100,0x230400,0x51009c52,0x2802100,0x962460,0x51009c52,0x2802400,
-0x962460,0x51009c52,0x2802d00,0x962460,0x51009c52,0x4000010,0x400000,0x51009c52,0x6800000,0x1329800,0x51009c52,0x6800100,0x962540,0x51009c52,0x7c00100,0x230400,
-0x51009c52,0xc000010,0x448000,0x51009d6d,0x6800000,0x1329800,0x51009d6d,0x7c00100,0x230400,0x51009d6d,0x7c00500,0x230400,0x51009d6d,0x7c00d00,0x230400,0x51009d6d,
-0xc000010,0x448000,0x51009e08,0x2802100,0x962460,0x51009f63,0x4000010,0x400000,0x51009f63,0x6800000,0x1329800,0x51009f63,0x7c00100,0x230400,0x51009f63,0x7c00900,
-0x230400,0x51009f63,0xc000010,0x448000,0x51009f63,0xc000010,0xb48000,0x5100a008,0x2000,0x962460,0x5100a008,0x2802400,0x962460,0x5100a008,0x4000000,0x200000,
-0x5100a008,0x7c00100,0x220400,0x5100a008,0x7c00100,0x230400,0x5100a008,0x7c00100,0x250400,0x5100a008,0x7c00500,0x230400,0x5100a16f,0x2806400,0x962460,0x5100a16f,
-0x6800000,0x1329800,0x5100a16f,0x6800100,0x962540,0x5100a16f,0x7c00100,0x230400,0x5100a16f,0xc000010,0x448000,0x5100a24f,0x2802100,0x962460,0x5100a24f,0x2802400,
-0x962460,0x5100a24f,0x6800000,0x1329800,0x5100a24f,0x7c00100,0x230400,0x5100a24f,0xc000010,0x448000,0x5100a36e,0x2802100,0x962460,0x5100a36e,0x4000000,0x200000,
-0x5100a36e,0x6800100,0x962540,0x5100a36e,0x6804400,0x962540,0x5100a36e,0x7c00100,0x230400,0x5100a442,0x2802100,0x962460,0x5100a442,0x4000000,0x200000,0x5100a442,
-0x6800000,0x1329800,0x5100a442,0x6800100,0x962540,0x5100a442,0x7c00100,0x230400,0x5100a442,0xc000010,0x448000,0x5100a500,0x4000000,0x200000,0x5100a600,0x4000000,
-0x200000,0x5100a601,0x2802000,0x962460,0x5100a76b,0x7c00100,0x230400,0x5100a868,0x7c00100,0x230400,0x5100a96c,0x4000000,0x200000,0x5100a96c,0x7c00100,0x230400,
-0x5100aa00,0x4000000,0xe00000,0x5100ab00,0x4000000,0xe00000,0x51086600,0x24000020,0x810000,0x51086600,0x24000020,0x1410000,0x510a4005,0x7c00100,0xe30400,0x510a4711,
-0x7c40300,0xe30000,0x510a7300,0x24000000,0x200000,0x510aaa00,0x4000000,0xe00000,0x5140a2fe,0x4000400,0x400000,0x514a82ca,0x4000000,0xe00000,0x51802bbc,0x2802000,
-0x962460,0x51c00908,0x2802400,0x962460,0x51c0a008,0x2802400,0x962460,0x52000f0a,0x2802100,0x962460,0x52000f0a,0x6800100,0x962540,0x52000f0a,0x7c00100,0x230400,
-0x52001004,0x4000000,0x1600000,0x52001b00,0x4000000,0x200000,0x52001c1c,0x2802100,0x1862460,0x52001c1c,0x6800100,0x1862400,0x52001c1c,0x6800500,0x1862400,0x52001e12,
-0x7c00100,0x2230500,0x52001e12,0x7c00100,0x2330520,0x52002128,0x4000002,0x400000,0x52002128,0x7c00100,0x230400,0x52002a00,0x4000000,0x1500000,0x52002a00,0x4000000,
-0x1600000,0x52002d00,0x4000000,0x200006,0x52003000,0x24000000,0x200000,0x52006108,0x7c00100,0x220400,0x52006108,0x7c00100,0x250400,0x52008301,0x2802400,0x962460,
-0x52008407,0x2802400,0x962460,0x52008407,0x7c00100,0x220400,0x52008407,0x7c00100,0x250400,0x52008b3b,0x6800000,0x1800000,0x52008b3b,0x7c00100,0x1830000,0x52008e00,
-0x24000000,0x400000,0x52009419,0x7c00100,0x250400,0x5200975b,0x4000000,0x200000,0x5200ac7e,0x2802000,0x962460,0x5200ac7e,0x2802100,0x962460,0x5200ac7e,0x2802400,
-0x962460,0x5200ac7e,0x4000010,0x200000,0x5200ac7e,0x7c00100,0x230400,0x5200ac7e,0xc000010,0x248000,0x5200ad28,0x7c00100,0x230400,0x5200ae6a,0x2802100,0x1862460,
-0x5200ae6a,0x2802400,0x962460,0x5200ae6a,0x2802400,0x1862460,0x5200ae6a,0x2806000,0x1862460,0x5200ae6a,0x4000000,0x1800000,0x5200ae6a,0x6800000,0x1329800,0x5200ae6a,
-0x6800100,0x1862400,0x5200ae6a,0x6800100,0x1862540,0x5200ae6a,0x7c00100,0x1830000,0x5200ae6a,0x7c00900,0x1830000,0x5200ae6a,0xc000010,0x1848000,0x5200b083,0x4000010,
-0x400000,0x5200b083,0x7c00100,0x230400,0x5200b083,0xc000010,0x448000,0x5200b182,0x2802400,0x962460,0x5200b182,0x4000000,0x200000,0x5200b182,0x4000010,0x400000,
-0x5200b182,0x7c00100,0x230400,0x5200b182,0xc000010,0x448000,0x5200b30a,0x2802400,0x962460,0x5200b30a,0x4000000,0x200000,0x5200b30a,0x7c00100,0x230400,0x5200b54e,
-0x2802100,0x962460,0x5200b54e,0x2802400,0x962460,0x5200b54e,0x4000000,0x200000,0x5200b54e,0x4000010,0x400000,0x5200b54e,0x6800000,0x1329800,0x5200b54e,0x6800100,
-0x962540,0x5200b54e,0x6804400,0x962540,0x5200b54e,0x7c00100,0x230400,0x5200b54e,0xc000010,0x448000,0x5200b61c,0x4000000,0x1800000,0x5200b61c,0x6800500,0x1862400,
-0x5200b61c,0x7c00100,0x1830000,0x5200b61c,0x7c00900,0x1830000,0x5200b77f,0x2802100,0x1862460,0x5200b77f,0x2802400,0x1862460,0x5200b77f,0x4000000,0x1800000,0x5200b77f,
-0x4000010,0x1800000,0x5200b77f,0x7c00100,0x1830000,0x5200b77f,0x7c00500,0x1830000,0x5200b77f,0x7c00900,0x1830000,0x5200b77f,0x7e00100,0x1830000,0x5200b873,0x2802100,
-0x962460,0x5200b873,0x2806400,0x962460,0x5200b873,0x6800000,0x1329800,0x5200b873,0x6800100,0x962540,0x5200b873,0x6800400,0x962540,0x5200b873,0x7c00100,0x230400,
-0x5200b873,0xc000010,0x448000,0x5200b912,0x7c00100,0x2230500,0x5200b912,0x7c00100,0x2330520,0x5200ba74,0x4000000,0x200000,0x5200ba74,0x4000010,0x400000,0x5200ba74,
-0x7c00100,0x230400,0x5200bb85,0x4000000,0x200000,0x5200bb85,0x7c00100,0x230400,0x5200bc75,0x4000000,0x400000,0x5200bc75,0x4000010,0x400000,0x5200bc75,0x7c00100,
-0x230400,0x5200bd7d,0x4000000,0x200000,0x5200bd7d,0x7c00100,0x230400,0x5200be7a,0x4000000,0x200000,0x5200be7a,0x7c00100,0x230400,0x5200bf58,0x7c00100,0x230400,
-0x5200c002,0x4000000,0x200000,0x5200c178,0x2802000,0x962460,0x5200c178,0x2802100,0x962460,0x5200c178,0x2802400,0x962460,0x5200c178,0x2806400,0x962460,0x5200c178,
-0x4000000,0x200000,0x5200c178,0x6800100,0x962540,0x5200c178,0x7c00100,0x230400,0x5200c178,0x7c00100,0x230401,0x5200c178,0xc000010,0x448000,0x5200c178,0x80000000,
-0x218960,0x5200c247,0x7c00100,0x230400,0x5200c247,0x7c00100,0x830400,0x5200c247,0x7c00100,0x1430400,0x5200c300,0x4000000,0x200003,0x52022d00,0x4000000,0x100006,
-0x52023700,0x24000000,0x100000,0x52023700,0x24000000,0xe00000,0x52023700,0x24000000,0x2800000,0x52024400,0x4000000,0x100000,0x52027300,0x24000000,0x100000,0x5202c300,
-0x4000000,0x100000,0x5202c300,0x4000000,0x100002,0x5202c300,0x4000000,0x100003,0x5202c300,0x4000000,0x10000d,0x5202c300,0x4000100,0x150400,0x5202c300,0x4000100,
-0x15040d,0x520a1e12,0x7c00100,0x2130480,0x520a3700,0x24000000,0xe00000,0x520a3800,0x24000000,0x100000,0x520a4711,0x7c40300,0xe30000,0x520a4f11,0x7c00300,0xe30001,
-0x520a7300,0x24000000,0x100000,0x520ab412,0x7c00100,0x2130480,0x520ac400,0x4000000,0xe00002,0x520ac400,0x4000000,0xe0000d,0x520ac414,0x4000000,0xe0000d,0x520ac511,
-0x7c40300,0xe30000,0x5240af9c,0x7c00100,0x230400,0x5240afa1,0x4000400,0x200000,0x5240afa3,0x6800400,0x962540,0x5240afa3,0x7c00100,0x230400,0x5240afad,0x7c00100,
-0x230400,0x5240afaf,0x7c00100,0x230400,0x5240b2d2,0x4000000,0x200000,0x5240b2d2,0x4000000,0x1500000,0x5240b2dd,0x4000000,0x200000,0x5240b2eb,0x4000000,0x200000,
-0x524a44ca,0x4000000,0xe00003,0x5250b501,0x7c00900,0x230400,0x5280af9c,0x2802400,0x962460,0x5280af9d,0x2802400,0x962460,0x5280afa3,0x2802400,0x962460,0x5280afa5,
-0x2802400,0x962460,0x5280afa7,0x2802400,0x962460,0x52c0b3f8,0x2802400,0x962460,0x52c0b3fc,0x7c00100,0x230400,0x60000c02,0x2802100,0x962460,0x60000c02,0x7c00100,
-0x230400,0x60000f0a,0x2802100,0x962460,0x60000f0a,0x6800100,0x962540,0x60000f0a,0x7c00100,0x230400,0x6000131f,0x4000000,0x200000,0x6000171a,0x7c00100,0x230400,
-0x6000171a,0x7c00100,0x230560,0x60001b27,0x2802100,0x962460,0x60001b27,0x4000000,0xc00000,0x60001b27,0x7c00100,0x230400,0x60001f0b,0x2802400,0x962460,0x60002919,
-0x7c00100,0x22040e,0x60002a00,0x4000000,0x1600000,0x60003000,0x24000000,0x200000,0x60003000,0x24000000,0xe00000,0x60003700,0x24000000,0x200000,0x60003800,0x24000000,
-0x1710000,0x60005102,0x4000000,0x200000,0x60006108,0x7c00100,0x220400,0x60006108,0x7c00100,0x250400,0x60006600,0x24000020,0x200000,0x60008301,0x2802400,0x962460,
-0x6000903c,0x2806000,0x962460,0x6000903c,0x4000000,0x400000,0x60009519,0x7c00100,0x220400,0x60009519,0x7c00100,0x250400,0x6000a008,0x7c00100,0x220400,0x6000a008,
-0x7c00100,0x250400,0x6000c300,0x4000000,0x2703580,0x6000c654,0x2802000,0x962460,0x6000c654,0x4000010,0x200000,0x6000c654,0x7c00100,0x230400,0x6000c73f,0x2802000,
-0x962460,0x6000c73f,0x2802100,0x962460,0x6000c73f,0x4000000,0x200000,0x6000c73f,0x6800100,0x962540,0x6000c73f,0x6804000,0x962540,0x6000c73f,0x7c00100,0x230400,
-0x6000c80b,0x7c00100,0x230400,0x6000c941,0x2802100,0x962460,0x6000c941,0x2806400,0x962460,0x6000c941,0x4000000,0x200000,0x6000c941,0x4000010,0x200000,0x6000c941,
-0x6800000,0x1329800,0x6000c941,0x6800100,0x962540,0x6000c941,0x7c00100,0x230400,0x6000c941,0xc000010,0x448000,0x6000ca82,0x7c00100,0x230400,0x6000cc00,0x4000000,
-0xe00000,0x6000d000,0x4000000,0x200000,0x6002c300,0x4000000,0x100000,0x6002c300,0x4000000,0x10000d,0x6002c300,0x4000100,0x150400,0x6002c300,0x4000100,0x15040d,
-0x600a3000,0x24000000,0x200000,0x600a3000,0x24000000,0xe00000,0x600a3700,0x24000000,0x200000,0x600a3800,0x24000000,0x200000,0x600a3800,0x24000000,0x2800000,0x600a4305,
-0x7c00100,0xe30400,0x600ac300,0x4000000,0x100000,0x600ac400,0x4000000,0xe0000d,0x600acb14,0x7c00100,0xe30000,0x600acb16,0x7c00100,0xe30c00,0x600acc00,0x4000000,
-0xe00000,0x600acd00,0x4000000,0x200000,0x600acd00,0x4000000,0xe00000,0x600acd00,0x4000000,0x2800000,0x600ace00,0x4000000,0xe00000,0x600ace00,0x4000000,0x2800000,
-0x600acf00,0x4000000,0xe00000,0x600acf00,0x4000000,0x2800000,0x600ad111,0x7c40300,0xe30000,0x604ac4ca,0x4000000,0xe00003,0x61000a03,0x4000000,0x1600000,0x61000c02,
-0x80000000,0x218960,0x6100120f,0x4000000,0x200000,0x61001a18,0x7c00100,0x1830000,0x61001d0c,0x7c00100,0x230400,0x61001d0c,0x7c00100,0x250400,0x61006600,0x24000020,
-0x200000,0x61008407,0x7c00100,0x220400,0x61008407,0x7c00100,0x250400,0x6100870c,0x7c00100,0x220400,0x61008e00,0x24000000,0x200000,0x61008e00,0x24000000,0x400000,
-0x61008e00,0x24000002,0x300000,0x6100903c,0x7c00100,0x230400,0x61009519,0x7c00100,0x220400,0x61009519,0x7c00100,0x250400,0x61009519,0x7c00500,0x22040f,0x61009b71,
-0x2802100,0x962460,0x61009b71,0x2806400,0x962460,0x61009b71,0x7c00100,0x230400,0x6100a008,0x2802100,0x962460,0x6100c300,0x4000000,0x20000f,0x6100cd00,0x4000000,
-0x200000,0x6100d202,0x2802400,0x962460,0x6100d202,0x2802500,0x962460,0x6100d202,0x7c00100,0x230400,0x6100d302,0x4000020,0x200000,0x6100d302,0x7c00120,0x230405,
-0x6100d476,0x2802100,0x962460,0x6100d476,0x2802100,0x962461,0x6100d476,0x2806400,0x962460,0x6100d476,0x4000000,0x400000,0x6100d476,0x6800000,0x1329800,0x6100d476,
-0x6800100,0x962540,0x6100d476,0x7c00100,0x230400,0x6100d476,0xc000010,0x448000,0x6100d573,0x2802100,0x962460,0x6100d573,0x2806400,0x962460,0x6100d573,0x6800100,
-0x962540,0x6100d573,0x7c00100,0x230400,0x6100d573,0x7c00900,0x230400,0x6100d573,0xc000010,0x448000,0x6100d68d,0x7c00100,0x230400,0x6100d756,0x7c00100,0x230400,
-0x6100d85c,0x2802500,0x962460,0x6100d85c,0x6800100,0x962540,0x6100d85c,0x7c00100,0x230400,0x6100d85c,0x7c00500,0x230400,0x6100d997,0x2802100,0x962460,0x6100d997,
-0x4000000,0x200000,0x6100d997,0x4000000,0x400000,0x6100d997,0x6800000,0x1329800,0x6100d997,0x6800100,0x962540,0x6100d997,0x6804400,0x962540,0x6100d997,0x7c00100,
-0x230400,0x6100d997,0x7c00100,0x230560,0x6100d997,0xc000010,0x448000,0x6100da98,0x6800000,0x1329800,0x6100da98,0x7c00100,0x230400,0x6100db71,0x4000000,0x200000,
-0x6100dc99,0x2802100,0x962460,0x6100dc99,0x2802400,0x962460,0x6100dc99,0x6800000,0x1329800,0x6100dc99,0x6800100,0x962540,0x6100dc99,0x6804400,0x962540,0x6100dc99,
-0x7c00100,0x230400,0x610a4711,0x7c40300,0xe30000,0x610a4f11,0x7c00300,0xe30001,0x610ace00,0x4000000,0xe00000,0x6140afa1,0x7c00100,0x230400,0x6140afa3,0x7c00100,
-0x230400,0x6180af9e,0x2802400,0x962460,0x62002a00,0x4000000,0x1600000,0x63002800,0x80000,0x918820,0x63c00c15,0x80000,0x918820,0x7000080e,0x7c00100,0x250400,
-0x70000a03,0x4000000,0x200000,0x70000c00,0x80000000,0x218960,0x70000f0a,0x7c00100,0x230400,0x70001004,0x7c00100,0x230400,0x70001524,0x2802100,0x962460,0x70001524,
-0x7c00100,0x230400,0x70001615,0x2802100,0x962460,0x7000171a,0x2802100,0x962460,0x70001821,0x6800000,0x1329800,0x70002320,0x7c00100,0x230400,0x70002a00,0x4000000,
-0x1500000,0x70002a00,0x4000000,0x1600000,0x70003000,0x24000000,0x200000,0x70003800,0x24000000,0xe00000,0x70005201,0x2802400,0x962460,0x7000581e,0x7c00100,0x230400,
-0x70006108,0x7c00100,0x220400,0x70006108,0x7c00100,0x250400,0x70006f30,0x7c00100,0x230400,0x70007300,0x24000000,0x200000,0x70007f0e,0x4000000,0x200000,0x70008301,
-0x2802100,0x962460,0x70008301,0x2802400,0x962460,0x70008e00,0x24000000,0x200000,0x70008e00,0x24000000,0x400000,0x70008e00,0x24000002,0x400000,0x70008e00,0x24000008,
-0x1410000,0x70008e00,0x24000010,0x400000,0x70008e00,0x2c000010,0x448000,0x70009519,0x7c00100,0x220400,0x70009519,0x7c00100,0x230400,0x70009519,0x7c00100,0x250400,
-0x70009865,0x7c00100,0x230400,0x70009965,0x4000010,0x400000,0x70009965,0x7c00100,0x230400,0x7000a008,0x7c00100,0x220400,0x7000a008,0x7c00100,0x250400,0x7000a008,
-0x7c00500,0x22040f,0x7000a50e,0x4000000,0x200000,0x7000b61c,0x2802500,0x1862460,0x7000b61c,0x6800500,0x1862400,0x7000b61c,0x7c00100,0x1830000,0x7000c300,0x4000000,
-0x100000,0x7000c941,0x2806000,0x962460,0x7000cc00,0x4000000,0xe00000,0x7000cd00,0x4000000,0x200000,0x7000cd00,0x4000000,0xe00000,0x7000cd00,0x4000000,0x2800000,
-0x7000cf00,0x4000000,0xe00000,0x7000d202,0x2802100,0x962460,0x7000d202,0x7c00100,0x230400,0x7000d997,0x7c00100,0x230400,0x7000d997,0xc000010,0x248000,0x7000dd86,
-0x2802400,0x962460,0x7000dd86,0x7c00100,0x230400,0x7000dd86,0xc000010,0x448000,0x7000de9f,0x4000000,0x200000,0x7000de9f,0x7c00100,0x230400,0x7000e001,0x2400,
-0x962460,0x7000e001,0x2802400,0x962460,0x7000e187,0x2802000,0x962460,0x7000e187,0x2802100,0x962460,0x7000e187,0x4000000,0x200000,0x7000e187,0x7c00100,0x230400,
-0x7000e187,0xc000010,0x448000,0x7000e288,0x7c00100,0x230400,0x7000e300,0x4000000,0x200000,0x7000e489,0x2802100,0x962460,0x7000e489,0x2802400,0x962460,0x7000e489,
-0x6800100,0x962540,0x7000e489,0x6800100,0x962541,0x7000e489,0x6804400,0x962540,0x7000e489,0x7c00100,0x230400,0x7000e489,0x7c00900,0x230400,0x7000e59d,0x2802100,
-0x962460,0x7000e59d,0x2802400,0x962460,0x7000e59d,0x4000000,0x200000,0x7000e59d,0x4000010,0x200000,0x7000e59d,0x6800100,0x962540,0x7000e59d,0x6804400,0x962540,
-0x7000e59d,0x7c00100,0x230400,0x7000e59d,0xc000010,0x448000,0x7000e691,0x2802100,0x962460,0x7000e691,0x2802400,0x962460,0x7000e691,0x2806400,0x962460,0x7000e691,
-0x6800000,0x1329800,0x7000e691,0x6800100,0x962540,0x7000e691,0x7c00100,0x230400,0x7000e700,0x4000400,0x200400,0x7000e70e,0x7c00100,0x220400,0x7000e719,0x7c00100,
-0x220400,0x7000e719,0x7c00500,0x22040f,0x7000e853,0x7c00100,0x230400,0x7000e9a0,0x2802400,0x962460,0x7000e9a0,0x4000000,0x200000,0x7000e9a0,0x4000000,0x500000,
-0x7000e9a0,0x7c00100,0x230400,0x7000ea79,0x2802400,0x962460,0x7000ea79,0x4000000,0x200000,0x7000ea79,0x4000000,0xf00000,0x7000ea79,0x4000010,0x400000,0x7000ea79,
-0x7c00100,0x230400,0x7000eb8c,0x2802400,0x962460,0x7000eb8c,0x4000000,0x200000,0x7000eb8c,0x7c00100,0x230400,0x7000eca3,0x2802100,0x962460,0x7000eca3,0x2806400,
-0x962460,0x7000eca3,0x4000000,0x200000,0x7000eca3,0x6800000,0x1329800,0x7000eca3,0x6800100,0x962540,0x7000eca3,0x7c00100,0x230400,0x7000eca3,0xc000010,0x448000,
-0x7000ed95,0x6800000,0x1329800,0x7000ed95,0x7c00100,0x230400,0x7000ed95,0xc000010,0x448000,0x7000ee1c,0x2802500,0x1862460,0x7000ee1c,0x6800000,0x1329800,0x7000ee1c,
-0x7c00100,0x1830000,0x7000ee1c,0x7c00900,0x1830000,0x7000ef8f,0x4000000,0x200000,0x7000ef8f,0x7c00100,0x230400,0x7000f08e,0x4000000,0x200000,0x7000f08e,0x7c00100,
-0x230400,0x7000f159,0x2802100,0x962460,0x7000f159,0x7c00100,0x230400,0x7000f200,0x4000000,0x200000,0x7000f200,0x4000000,0x1200000,0x7000f200,0x4000000,0x1710000,
-0x7000f34b,0x2802400,0x962460,0x7000f34b,0x4000000,0x200000,0x7000f34b,0x4000010,0x400000,0x7000f34b,0x6800000,0x1329800,0x7000f34b,0x7c00100,0x230400,0x7000f34b,
-0x7c00900,0x230400,0x7000f34b,0xc000010,0x448000,0x7000f490,0x4000000,0x200000,0x7000f490,0x7c00100,0x230400,0x7000f5a5,0x7c00100,0x230400,0x7000f67b,0x4000000,
-0x200000,0x7000f67b,0x4000010,0x200000,0x7000f67b,0x7c00100,0x230400,0x7000f8a6,0x2802100,0x962460,0x7000f8a6,0x2802400,0x962460,0x7000f8a6,0x2806400,0x962460,
-0x7000f8a6,0x4000000,0x500000,0x7000f8a6,0x4000010,0xb00000,0x7000f8a6,0x4000800,0x200000,0x7000f8a6,0x6800100,0x962540,0x7000f8a6,0x6800100,0x962541,0x7000f8a6,
-0x7c00100,0x230400,0x7000f8a6,0xc000010,0x448000,0x7000f921,0x4000000,0x200000,0x7000fa00,0x4000000,0x200000,0x7000fb9e,0x2802100,0x962460,0x7000fb9e,0x2802400,
-0x962460,0x7000fb9e,0x2806400,0x962460,0x7000fb9e,0x4000000,0x200000,0x7000fb9e,0x6800000,0x1329800,0x7000fb9e,0x6800100,0x962540,0x7000fb9e,0x6800100,0x962541,
-0x7000fb9e,0x7c00100,0x230400,0x7000fc92,0x4000000,0x200000,0x7000fc92,0x6800000,0x1329800,0x7000fc92,0x7c00100,0x220400,0x7000fc92,0x7c00100,0x230400,0x7000fc92,
-0x7c00100,0x250400,0x700acd00,0x4000000,0xe00000,0x700acd00,0x4000000,0x2800000,0x700ace00,0x4000000,0xe00000,0x700acf00,0x4000000,0xe00000,0x700acf00,0x4000000,
-0x2800000,0x7050df11,0x4000000,0x200000,0x7050f719,0x80000,0x918820,0x7080afa1,0x2802400,0x962460,0x7090df11,0x2802400,0x962460,0x70d0e417,0x2802100,0x962460,
-0x70d0e417,0x2802400,0x962460,0x70d0e417,0x6800100,0x962540,0x70d0ea15,0x4000010,0x400000,0x8000120f,0x7c00100,0x230400,0x80001524,0x7c00100,0x230400,0x8000171a,
-0x7c00100,0x230400,0x80002006,0x7c00100,0x220400,0x80002006,0x7c00100,0x250400,0x80002a00,0x4000000,0x1500000,0x80002d00,0x4000000,0x200000,0x80005208,0x2802400,
-0x962460,0x80005c00,0x4000000,0x200000,0x80007300,0x24000000,0x200000,0x80009519,0x7c00100,0x220400,0x80009519,0x7c00100,0x230400,0x80009519,0x7c00100,0x250400,
-0x80009865,0x7c00100,0x230400,0x8000a008,0x2802100,0x962460,0x8000b30a,0x4000000,0x500000,0x8000b30a,0x7c00100,0x230400,0x8000cd00,0x4000000,0xe00000,0x8000d202,
-0x2802500,0x962460,0x8000d202,0x7c00100,0x230400,0x8000d68d,0x4000000,0x200000,0x8000d997,0x2802000,0x962460,0x8000d997,0x2802400,0x962460,0x8000d997,0x4000000,
-0x400000,0x8000d997,0x4000000,0x500000,0x8000d997,0x7c00100,0x230400,0x8000d997,0xc000010,0x448000,0x8000e489,0x2802100,0x962460,0x8000e489,0x7c00100,0x230400,
-0x8000e719,0x7c00100,0x220400,0x8000f8a6,0x2802100,0x962460,0x8000f8a6,0x7c00100,0x230400,0x8000f8a6,0xc000010,0x448000,0x8000fda1,0x2802100,0x1862460,0x8000fda1,
-0x2806400,0x1862460,0x8000fda1,0x4000000,0x1800000,0x8000fda1,0x6800000,0x1329800,0x8000fda1,0x6800100,0x1862400,0x8000fda1,0x6800100,0x1862540,0x8000fda1,0x7c00100,
-0x1830000,0x8000fda1,0xc000010,0x448000,0x8000fe9c,0x7c00100,0x230400,0x8000fe9c,0x7c00100,0x830400,0x8000fe9c,0x7c00100,0x1430400,0x8000ff06,0x7c00100,0x220400,
-0x80010165,0x7c00100,0x230400,0x800102a2,0x4000000,0x200000,0x800102a2,0x7c00100,0x230400,0x800103a4,0x7c00100,0x230400,0x800103a4,0xc000010,0x448000,0x8001044c,
-0x4000000,0x200000,0x8001044c,0x7c00100,0x220400,0x8001044c,0x7c00100,0x250400,0x80010670,0x2802000,0x962460,0x80010670,0x4000000,0x200000,0x80010670,0x4000010,
-0x400000,0x80010670,0xc000010,0x448000,0x800a4711,0x7c40300,0xe30000,0x800acd00,0x4000000,0xe00000,0x800acd00,0x4000000,0x2902460,0x800ace00,0x4000000,0xe00000,
-0x800acf00,0x4000000,0xe00000,0x800b0011,0x7c40300,0xe30000,0x800b0500,0x4000000,0xe00000,0x800b0500,0x4000000,0x2800000,0x90001615,0x7c00100,0x230400,0x9000171a,
-0x4000000,0x200000,0x9000171a,0x7c00100,0x230400,0x90003000,0x24000000,0x200000,0x90007f0e,0x4000000,0x200000,0x90008301,0x2802400,0x962460,0x90008e00,0x24000000,
-0x400000,0x90009519,0x7c00100,0x250400,0x9000a16f,0x2802100,0x962460,0x9000d200,0x80000000,0x218960,0x9000d202,0x2802000,0x962460,0x9000d202,0x2802100,0x962460,
-0x9000d202,0x7c00100,0x230400,0x9000e59d,0x2802100,0x962460,0x90010500,0x4000000,0xe00000,0x900107a7,0x2802100,0x962460,0x900107a7,0x2802400,0x962460,0x900107a7,
-0x2802c00,0x962460,0x900107a7,0x4000000,0x1400000,0x900107a7,0x6800000,0x1329800,0x900107a7,0x7c00100,0x220400,0x900107a7,0x7c00100,0x250400,0x900108a8,0x2802100,
-0x962460,0x900108a8,0x2806400,0x962460,0x900108a8,0x4000000,0x200000,0x900108a8,0x4000000,0x400000,0x900108a8,0x4000010,0x400000,0x900108a8,0x6800000,0x1329800,
-0x900108a8,0x6800100,0x962540,0x900108a8,0x7c00100,0x230400,0x900108a8,0xc000010,0x448000,0x90010908,0x7c00100,0x220400,0x90010a38,0x2802100,0x962460,0x90010ca9,
-0x2802100,0x962460,0x90010ca9,0x4000000,0x500000,0x90010ca9,0x4000010,0xb00000,0x90010ca9,0x6800100,0x962540,0x90010ca9,0x7c00100,0x230400,0x90010d1b,0x4000000,
-0x500000,0x90010eaa,0x2802100,0x962460,0x90010eaa,0x2802400,0x962460,0x90010eaa,0x2806400,0x962460,0x90010eaa,0x4000000,0x200000,0x90010eaa,0x4000000,0x400000,
-0x90010eaa,0x4000010,0x400000,0x90010eaa,0x6800000,0x1329800,0x90010eaa,0x6800100,0x962540,0x90010eaa,0x7c00100,0x230400,0x90010eaa,0xc000010,0x448000,0x90010fab,
-0x7c00100,0x220400,0x90010fab,0x7c00100,0x250400,0x9002c300,0x4000000,0x100000,0x900ac400,0x4000000,0xe0000d,0x900acd00,0x4000000,0xe00000,0x900acd00,0x4000000,
-0x2800000,0x900acf00,0x4000000,0xe00000,0x900b0500,0x4000000,0xe00000,0x900b0500,0x4000000,0x2800000,0x900b0b9a,0x7c00900,0x1230400,0x900b109a,0x7c00300,0xe30000,
-0x900b119a,0x7c00300,0xe30000,0x90408e06,0x24000000,0x400000,0xa0001004,0x4000000,0x200000,0xa0001004,0x7c00100,0x230400,0xa000120f,0x2802100,0x962460,0xa000120f,
-0x2802400,0x962460,0xa000171a,0x2802100,0x962460,0xa000171a,0x2806400,0x962460,0xa0002a00,0x4000000,0x1600000,0xa0003000,0x24000000,0x200000,0xa000581e,0x7c00100,
-0x230400,0xa0007300,0x24000000,0x200000,0xa0008301,0x2802400,0x962460,0xa0008e00,0x24000000,0x400000,0xa000cf00,0x4000000,0xe00000,0xa0010500,0x4000000,0x200000,
-0xa00114af,0x2802100,0x962460,0xa00114af,0x2802400,0x962460,0xa00114af,0x2806400,0x962460,0xa00114af,0x6800000,0x1329800,0xa00114af,0x7c00100,0x230400,0xa00114af,
-0x7c00100,0x230560,0xa00116b0,0x2802100,0x962460,0xa00116b0,0x2802800,0x962460,0xa00116b0,0x2806400,0x962460,0xa00116b0,0x4000000,0x400000,0xa00116b0,0x4000000,
-0x500000,0xa00116b0,0x4000010,0x400000,0xa00116b0,0x6800100,0x962540,0xa00116b0,0x7c00100,0x230400,0xa00116b0,0x7c00100,0x230560,0xa00116b0,0xc000010,0x448000,
-0xa0011722,0x7c00100,0x230400,0xa00118b1,0x2802000,0x962460,0xa00118b1,0x2802100,0x962460,0xa00118b1,0x2806400,0x962460,0xa00118b1,0x4000000,0x200000,0xa00118b1,
-0x4000000,0x400000,0xa00118b1,0x4000000,0x500000,0xa00118b1,0x6800100,0x962540,0xa00118b1,0x7c00100,0x230400,0xa00118b1,0x7c00100,0x230560,0xa00118b1,0xc000010,
-0x448000,0xa00a4005,0x7c00100,0xe30400,0xa00a4711,0x7c40300,0xe30000,0xa00ac400,0x4000000,0xe00000,0xa00acb14,0x7c00100,0xe30000,0xa00acf00,0x4000000,0xe00000,
-0xa00b0500,0x4000000,0xe00000,0xa00b0500,0x4000000,0x2800000,0xa00b0b96,0x7c00900,0x1230400,0xa00b1211,0x7c40300,0xe30000,0xa00b1314,0x7c00100,0xe30000,0xa00b1596,
-0x7c00300,0xe30000,0xa040afb7,0x6800400,0x962540,0xa08083b8,0x2802400,0x962460,0xb0000a03,0x7c00100,0x220400,0xb0000b13,0x7c00100,0x2633800,0xb0001004,0x2802000,
-0x962460,0xb0001110,0x4000000,0x200000,0xb0001524,0x2802000,0x962460,0xb0001615,0x4000000,0x500000,0xb000251b,0x7c00100,0x230400,0xb0007300,0x24000000,0x200000,
-0xb0008939,0x4000000,0x200000,0xb0008939,0x7c00100,0x230400,0xb0008e00,0x24000000,0x200000,0xb0008e00,0x24000000,0x400000,0xb0008e00,0x24000010,0x400000,0xb0009257,
-0x2802000,0x962460,0xb0009257,0x4000000,0x1600000,0xb0009519,0x7c00100,0x220400,0xb0009519,0x7c00100,0x250400,0xb0009a00,0x4000000,0x200000,0xb000b30a,0x2802100,
-0x962460,0xb000b30a,0x7c00100,0x230400,0xb000c178,0x80000000,0x218960,0xb000c300,0x4000000,0x200000,0xb000d202,0x2802000,0x962460,0xb000d476,0x6800100,0x962540,
-0xb000d476,0x7c00100,0x230400,0xb000e300,0x4000000,0xe00000,0xb000fda1,0x7c00100,0x1830000,0xb0010eaa,0x2802000,0x962460,0xb00116b0,0x7c00100,0x230400,0xb0011900,
-0x4000000,0xe00000,0xb0011ab2,0x2802100,0x962460,0xb0011ab2,0x2802400,0x962460,0xb0011ab2,0x2806400,0x962460,0xb0011ab2,0x4000000,0x200000,0xb0011ab2,0x6800100,
-0x962540,0xb0011ab2,0x7c00100,0x230400,0xb0011b0c,0x7c00100,0x230400,0xb0011cb3,0x2802100,0x962460,0xb0011cb3,0x2806400,0x962460,0xb0011cb3,0x6800000,0x1329800,
-0xb0011cb3,0x6800100,0x962540,0xb0011cb3,0x7c00100,0x230400,0xb0011db6,0x2802500,0x962460,0xb0011db6,0x6800000,0x1329800,0xb0011db6,0x7c00100,0x230400,0xb0011db6,
-0x7c00500,0x230400,0xb0011e00,0x4000000,0x200000,0xb0011e00,0x4000000,0x1500000,0xb0011fb4,0x2802100,0x962460,0xb0011fb4,0x6800100,0x962540,0xb0011fb4,0x7c00100,
-0x230400,0xb0011fb4,0xc000010,0x248000,0xb0012000,0x4000000,0x200000,0xb00121b5,0x4000000,0x200000,0xb00121b5,0x4000010,0x400000,0xb00121b5,0x7c00100,0x220400,
-0xb00121b5,0x7c00100,0x250400,0xb00121b5,0xc000010,0x448000,0xb00122b8,0x4000000,0x200000,0xb00122b8,0x7c00100,0x230400,0xb00123b7,0x2802400,0x962460,0xb00123b7,
-0x4000000,0x200000,0xb00123b7,0x7c00100,0x230400,0xb00123b7,0xc000010,0x248000,0xb00a4005,0x7c00100,0xe30400,0xb00a4711,0x7c40300,0xe30000,0xb00acf00,0x4000000,
-0xe00000,0xb00b0500,0x4000000,0xe00000,0xb00b0500,0x4000000,0x2800000,0xb00b109a,0x7c00300,0xe30000,0xb080e487,0x2802000,0x962460,0xc0001524,0x4000000,0x500000,
-0xc0001a18,0x2806400,0x1862460,0xc0001a18,0x7c00100,0x1830000,0xc0007300,0x24000000,0x200000,0xc0008e00,0x24000010,0x400000,0xc0009519,0x7c00100,0x220400,0xc0009519,
-0x7c00100,0x250400,0xc000c300,0x4000000,0x20000f,0xc000d85c,0x2802100,0x962460,0xc000d85c,0x6800100,0x962540,0xc000d85c,0x7c00100,0x230400,0xc000dc99,0x7c00100,
-0x230400,0xc000e719,0x7c00100,0x220400,0xc00107a7,0x7c00100,0x230400,0xc0010eaa,0x7c00100,0x230400,0xc00116b0,0x7c00100,0x230560,0xc0011900,0x4000000,0x200000,
-0xc0012447,0,0x818820,0xc0012447,0,0xc18820,0xc0012447,0,0x1418820,0xc00125b9,0x7c00100,0x230400,0xc00126bb,0x2802100,0x962460,0xc00126bb,
-0x2806400,0x962460,0xc00126bb,0x4000000,0x500000,0xc00126bb,0x6800100,0x962540,0xc00126bb,0x7c00100,0x230400,0xc00127ba,0x2802400,0x962460,0xc00127ba,0x4000000,
-0x200000,0xc00127ba,0x6800000,0x1329800,0xc00127ba,0x7c00100,0x230400,0xc00127ba,0x7c00900,0x230400,0xc0012800,0x4000000,0x200000,0xc0012b23,0x4000000,0x200000,
-0xc0012b23,0x4000000,0x400000,0xc0012b23,0x4000000,0x1500000,0xc0012cbc,0x2802400,0x962460,0xc0012cbc,0x4000000,0x1600000,0xc0012cbc,0x6800000,0x1329800,0xc0012cbc,
-0x7c00100,0x230400,0xc00acf00,0x4000000,0xe00000,0xc00ae300,0x4000000,0xe00000,0xc00b0500,0x4000000,0xe00000,0xc00b0500,0x4000000,0x2800000,0xc00b0b11,0x4000000,
-0x1200000,0xc00b0b11,0x7c00900,0x1230400,0xc00b109a,0x7c00300,0xe30000,0xc00b2914,0x7c00100,0x2530000,0xc00b2916,0x7c00100,0x2530c00,0xc00b2a00,0x4000000,0xe00000,
-0xc040af5e,0x7c00100,0x230400,0xc0c12b89,0x4000000,0x200000,0xc14a44ca,0x4000000,0xe0000d,0xd000131f,0x2802c00,0x962460,0xd000171a,0x7c00100,0x230400,0xd0001821,
-0x2802100,0x962460,0xd0007300,0x24000000,0x200000,0xd0008e00,0x24000000,0x200000,0xd0008f3a,0x2806000,0x962460,0xd0009519,0x7c00100,0x220400,0xd0009519,0x7c00100,
-0x250400,0xd000a500,0x4000000,0x200000,0xd000c300,0x4000000,0xe00000,0xd000d202,0x7c00100,0x230400,0xd000d476,0x7c00100,0x230400,0xd000d997,0x2802100,0x962460,
-0xd000d997,0x6800100,0x962540,0xd000e001,0x2802100,0x962460,0xd000e700,0x4000400,0x200000,0xd000e719,0x7c00100,0x220400,0xd000e719,0x7c00500,0x23040f,0xd000fa00,
-0x4000000,0xe00000,0xd0010eaa,0x4000010,0x400000,0xd0010eaa,0x7c00100,0x230400,0xd0012dbd,0x4000000,0x200000,0xd0012dbd,0x7c00100,0x230400,0xd0012fbe,0x2802100,
-0x962460,0xd0012fbe,0x2802400,0x962460,0xd0012fbe,0x2806400,0x962460,0xd0012fbe,0x4000000,0x400000,0xd0012fbe,0x6800000,0x1329800,0xd0012fbe,0x6800100,0x962540,
-0xd0012fbe,0x6800100,0x962541,0xd0012fbe,0x6804400,0x962540,0xd0012fbe,0x7c00100,0x230400,0xd0012fbe,0x7c00100,0x230560,0xd0012fbe,0xc000010,0x448000,0xd0013183,
-0x7c00100,0x230400,0xd0013200,0x4000000,0x200000,0xd0013200,0x6800000,0x1329805,0xd00134c0,0x2802100,0x962460,0xd00134c0,0x4000002,0x400000,0xd00134c0,0x7c00100,
-0x230400,0xd00a4305,0x7c00100,0xe30400,0xd00a4611,0x7c40300,0xe30000,0xd00a4711,0x7c40300,0xe30000,0xd00a5e11,0x7c40300,0xe30000,0xd00acf00,0x4000000,0xe00000,
-0xd00b0500,0x4000000,0xe00000,0xd00b0500,0x4000000,0x2800000,0xd00b0b11,0x6800500,0x962540,0xd00b0bbf,0x2802200,0xc62460,0xd00b119a,0x7c00300,0xe30000,0xd00b2a00,
-0x4000000,0xe00000,0xd00b2e11,0x7c40300,0xe30000,0xd00b30bf,0x7c00300,0x230000,0xd00b339a,0x7c00300,0xe30000,0xe0000c02,0xc000010,0xb48000,0xe0001524,0x2802400,
-0x962460,0xe0001524,0x7c00100,0x230400,0xe0001615,0x7c00100,0x230400,0xe000251b,0x12882000,0x962460,0xe0002a00,0x4000000,0x1500000,0xe0005102,0x4000000,0x200000,
-0xe0005c00,0x4000000,0x200000,0xe000622a,0x6804400,0x962540,0xe000622a,0x7c00100,0x230400,0xe0008838,0x7c00100,0x220400,0xe0008838,0x7c00100,0x250400,0xe0008e00,
-0x24000000,0x810000,0xe0008e00,0x24000000,0x1410000,0xe0008e00,0x24000002,0x400000,0xe0008e00,0x2c000010,0xb48000,0xe000933e,0x7c00100,0x230400,0xe000933e,0xc000010,
-0x448000,0xe0009519,0x7c00100,0x220400,0xe0009519,0x7c00100,0x23040f,0xe0009519,0x7c00100,0x250400,0xe000c178,0x2802100,0x962460,0xe000c941,0x2802100,0x962460,
-0xe000c941,0x2806400,0x962460,0xe000c941,0x7c00100,0x230400,0xe000d202,0x2802400,0x962460,0xe000d202,0x7c00100,0x230400,0xe000d202,0x7c00500,0x230400,0xe000dc99,
-0x4000000,0x200000,0xe000e001,0x2802100,0x962460,0xe000e001,0x2802400,0x962460,0xe000fda1,0x7c00100,0x1830000,0xe0013502,0x2802400,0x962460,0xe0013502,0x4000000,
-0x200000,0xe0013502,0x7c00100,0x230400,0xe0013502,0x80000000,0x218960,0xe00136c1,0x4000000,0x200000,0xe00136c1,0x7c00100,0x230400,0xe001370b,0x7c00100,0x230400,
-0xe0013919,0x7c00500,0x220400,0xe0013919,0x7c00500,0x22040f,0xe0013919,0x7c00d00,0x23040f,0xe0013a19,0x7c00100,0x220400,0xe0013a19,0x7c00100,0x230400,0xe0013bc2,
-0x2802400,0x962460,0xe0013bc2,0x7c00100,0x230400,0xe0013bc2,0xc000010,0x248000,0xe0013cc3,0x6800000,0x1329800,0xe0013cc3,0x7c00100,0x230400,0xe0013dc4,0x2802400,
-0x962460,0xe0013dc4,0x7c00100,0x230400,0xe0013e28,0x7c00100,0x230400,0xe0013fc5,0x7c00100,0x220400,0xe0013fc5,0x7c00100,0x250400,0xe0014000,0x4000000,0x200000,
-0xe0014001,0x2802400,0x962460,0xe00a4711,0x7c40300,0xe30000,0xe00a5e11,0x7c40300,0xe30000,0xe00ac511,0x7c40300,0xe30000,0xe00acf00,0x4000000,0xe00000,0xe00ae300,
-0x4000000,0xe00000,0xe00b0500,0x4000000,0xe00000,0xe00b1314,0x7c00100,0xe30000,0xe00b1316,0x7c00100,0xe30c00,0xe00b2a00,0x4000000,0xe00000,0xe00b2a00,0x4000000,
-0x2800000,0xe00b3816,0x7c00500,0x230c00,0xe0808328,0x2802400,0x962460};
+0,0xc167,0,0,0xc367,0,0xe00000,0xc467,0,0xe00000,0xc667,0,0,0xc767,0,0,
+0xc867,0,0,0xc967,0,0,0xca67,0,0,0xcc67,0,0xe00000,0xcf67,0,0xe00000,0xd067,
+0,0xe00000,0xd367,0,0,0xd467,0,0,0xd567,0,0,0xd667,0,0,0xd867,0,
+0,0xda67,0,0,0xdb67,0,0,0xdc67,0,0,0xdd67,0,0,0xde67,0,0,
+0xdf67,0,0,0xe067,0,0,0xe167,0,0,0xe267,0,0,0xe367,0,0xe00000,0xe467,
+0,0,0xe567,0,0,0xe667,0,0,0xe767,0,0,0xe867,0,0,0xe967,0,
+0,0xea67,0,0,0xeb67,0,0,0xec67,0,0,0xed67,0,0,0xee67,0,0,
+0xef67,0,0,0xf167,0,0,0xf367,0,0,0xf567,0,0,0xf667,0,0,0xf767,
+0,0,0xf867,0,0,0xf967,0,0,0xfa67,0,0xe00000,0xfb67,0,0,0xfc67,0,
+0,0xfd67,0,0,0xfe67,0,0,0x10167,0,0,0x10267,0,0,0x10367,0,0,
+0x10467,0,0,0x10667,0,0,0x10767,0,0,0x10867,0,0,0x10967,0,0,0x10a67,
+0,0,0x10b67,0,0,0x10c67,0,0,0x10d67,0,0,0x10e67,0,0,0x10f67,0,
+0,0x11067,0,0,0x11367,0,0,0x11467,0,0,0x11567,0,0,0x11667,0,0,
+0x11767,0,0,0x11867,0,0,0x11967,0,0xe00000,0x11a67,0,0,0x11b67,0,0,0x11c67,
+0,0,0x11d67,0,0,0x11e67,0,0,0x11f67,0,0,0x12067,0,0,0x12167,0,
+0,0x12267,0,0,0x12367,0,0,0x12467,0,0,0x12567,0,0,0x12667,0,0,
+0x12767,0,0,0x12867,0,0,0x12967,0,0,0x12a67,0,0xe00000,0x12b67,0,0,0x12c67,
+0,0,0x12d67,0,0,0x12f67,0,0,0x13067,0,0,0x13167,0,0,0x13267,0,
+0,0x13367,0,0,0x13467,0,0,0x13567,0,0,0x13667,0,0,0x13767,0,0,
+0x13867,0,0,0x13967,0,0,0x13a67,0,0,0x13b67,0,0,0x13c67,0,0,0x13d67,
+0,0,0x13f67,0,0,0x14067,0,0,0x14167,0,0,0x14367,0,0,0x14467,0,
+0,0x14567,0,0,0x14667,0,0,0x14767,0,0,0xa0067,0,0xe00000,0xa4f67,0,0xe00000,
+0xa5f67,0,0xe00000,0xac567,0,0xe00000,0xad167,0,0xe00000,0xb0067,0,0xe00000,0xb1267,0,0xe00000,0xb2e67,
+0,0xe00000,0x11000100,0,0x900020,0x11000100,0x40000001,0x440020,0x11000100,0x40000001,0x643020,0x11000100,0x40000001,0xa5a040,0x11000100,0x40000001,
+0x116a8a0,0x11000200,0,0x900020,0x11000200,0x4000001,0xc4000b,0x11000200,0x7c00100,0x220402,0x11000200,0x24000000,0x200000,0x11000200,0x24000008,0x1710000,
+0x11000200,0x40000001,0x1d3b020,0x11000219,0x7c00100,0x220401,0x11000219,0x7c00100,0x250401,0x11000319,0x7c00100,0x220401,0x11000319,0x7c00100,0x220402,0x11000319,
+0x7c00100,0x250400,0x11000319,0x7c00100,0x250401,0x11000419,0x7c00100,0x220400,0x11000419,0x7c00100,0x220401,0x11000419,0x7c00100,0x220402,0x11000419,0x7c00100,
+0x230400,0x11000419,0x7c00100,0x250400,0x11000419,0x7c00100,0x250401,0x11000419,0x7c00100,0x250402,0x11000519,0x7c00100,0x220400,0x11000519,0x7c00100,0x230400,
+0x11000600,0x4000400,0x200002,0x11000600,0x4000400,0x200400,0x11000600,0x7c00500,0x220400,0x11000600,0x7c00500,0x230400,0x11000600,0x7c00500,0x530400,0x11000600,
+0x7c00d00,0x230400,0x11000619,0x7c00500,0x22040f,0x11000800,0x4000010,0x1001401,0x11000800,0x4000400,0x200001,0x11000800,0x6800010,0x201001,0x11000800,0x7c00500,
+0x230401,0x11000807,0x7c00100,0x220400,0x11000807,0x7c00100,0x250400,0x1100080e,0x4000400,0x200000,0x1100080e,0x4000400,0x200002,0x1100080e,0x7000500,0x220402,
+0x1100080e,0x7c00100,0x220400,0x1100080e,0x7c00100,0x220401,0x1100080e,0x7c00100,0x220402,0x1100080e,0x7c00100,0x250400,0x1100080e,0x7c00100,0x250401,0x1100080e,
+0x7c00120,0x220402,0x1100080e,0x7c00120,0x250402,0x11000908,0x4000000,0x200000,0x11000908,0x7c00100,0x220400,0x11000908,0x7c00100,0x220401,0x11000908,0x7c00100,
+0x250400,0x11000908,0x7c00100,0x250401,0x11000a03,0x4000000,0x200400,0x11000a03,0x4000000,0x201000,0x11000a03,0x4000000,0x270000,0x11000a03,0x7c00100,0x220400,
+0x11000a03,0x7c00100,0x220402,0x11000a03,0x7c00100,0x250400,0x11000a03,0x7c00500,0x230400,0x11000a03,0xc000010,0x1049400,0x11000b13,0x2802500,0x962460,0x11000b13,
+0x4000000,0x200000,0x11000b13,0x4000000,0x201000,0x11000b13,0x4000000,0x230400,0x11000b13,0x4000002,0x400000,0x11000b13,0x4000010,0x200000,0x11000b13,0x7c00100,
+0x2633800,0x11000c00,0x80000000,0x218960,0x11000c02,0x2802100,0x962460,0x11000c02,0x2802400,0x962460,0x11000c02,0x4000000,0x200000,0x11000c02,0x4000000,0x1329400,
+0x11000c02,0x4000000,0x1329800,0x11000c02,0x4000000,0x1500000,0x11000c02,0x6800000,0x1329800,0x11000c02,0x7c00100,0x230400,0x11000c02,0x7c00100,0x230401,0x11000c02,
+0x7c00100,0x230402,0x11000c02,0x7c00500,0x230400,0x11000c02,0x7d00100,0x230400,0x11000f01,0x2802400,0x962460,0x11000f0a,0x2802100,0x962460,0x11000f0a,0x2802400,
+0x962460,0x11000f0a,0x2806400,0x962460,0x11000f0a,0x4000000,0x200000,0x11000f0a,0x6800100,0x962540,0x11000f0a,0x7c00100,0x230400,0x11000f0a,0x7c00100,0x230401,
+0x11001004,0x2802100,0x962460,0x11001004,0x2802400,0x962460,0x11001004,0x2806400,0x962460,0x11001004,0x4000000,0x200000,0x11001004,0x4000000,0x1500000,0x11001004,
+0x6800100,0x962540,0x11001004,0x6800100,0x962541,0x11001004,0x7c00100,0x230400,0x11001004,0x7c00100,0x230401,0x11001110,0x2802100,0x962460,0x11001110,0x2802400,
+0x962460,0x11001110,0x2806400,0x962460,0x11001110,0x6800100,0x962540,0x11001110,0x7c00100,0x230400,0x11001110,0x7c00100,0x230401,0x1100120f,0x2802100,0x962460,
+0x1100120f,0x2802400,0x962460,0x1100120f,0x2806400,0x962460,0x1100120f,0x6800100,0x962540,0x1100120f,0x7c00100,0x230400,0x1100131f,0x2802100,0x962460,0x1100131f,
+0x2802400,0x962460,0x1100131f,0x2806400,0x962460,0x1100131f,0x4000000,0x200000,0x1100131f,0x6800000,0x1329800,0x1100131f,0x6800100,0x962540,0x1100131f,0x6800100,
+0x962541,0x1100131f,0x7c00100,0x230400,0x1100131f,0x7c00100,0x230401,0x11001423,0x2802100,0x962460,0x11001423,0x2806400,0x962460,0x11001423,0x6800100,0x962540,
+0x11001423,0x6800100,0x962541,0x11001423,0x7c00100,0x230400,0x11001423,0x7c00100,0x230401,0x11001524,0x2802100,0x962460,0x11001524,0x2802100,0x962461,0x11001524,
+0x2806400,0x962460,0x11001524,0x6800000,0x1329800,0x11001524,0x6800100,0x962540,0x11001524,0x7c00100,0x230400,0x11001615,0x2802100,0x962460,0x11001615,0x2806400,
+0x962460,0x11001615,0x6800100,0x962540,0x11001615,0x6800100,0x962541,0x11001615,0x7c00100,0x230400,0x1100171a,0x2802100,0x962460,0x1100171a,0x2806400,0x962460,
+0x1100171a,0x6800000,0x1329800,0x1100171a,0x6800100,0x962540,0x1100171a,0x6800100,0x962541,0x1100171a,0x7c00100,0x230400,0x11001900,0x4000000,0x1600000,0x11001926,
+0x2802100,0x1862460,0x11001926,0x2802400,0x1862460,0x11001926,0x2806100,0x1862460,0x11001926,0x4000000,0x200000,0x11001926,0x4000010,0x400000,0x11001926,0x6800000,
+0x1329800,0x11001926,0x7800100,0x1830142,0x11001926,0x7c00100,0x1830000,0x11001926,0x7c00900,0x1830000,0x11001926,0x7e00100,0x1830000,0x11001a18,0x2802100,0x1862460,
+0x11001a18,0x2802400,0x1862460,0x11001a18,0x6800000,0x1329800,0x11001a18,0x7800100,0x1830142,0x11001a18,0x7c00100,0x1830000,0x11001a18,0x7c00100,0x1830002,0x11001a18,
+0x7c00900,0x1830000,0x11001a18,0x7e00100,0x1830000,0x11001d0c,0x7c00100,0x230400,0x11001d0c,0x7c00100,0x250400,0x11001e12,0x7c00100,0x2230500,0x11001e12,0x7c00100,
+0x2330520,0x11001e12,0x7c80100,0x2330520,0x11002619,0x7c00100,0x220401,0x11002619,0x7c00100,0x220402,0x11002619,0x7c00100,0x250401,0x1100270e,0x4000400,0x200001,
+0x1100270e,0x4000400,0x200002,0x1100270e,0x4000400,0x500001,0x1100270e,0x7c00100,0x220401,0x1100270e,0x7c00100,0x250401,0x11002800,0x80000,0x918820,0x11002800,
+0x80000,0x1c18020,0x11002800,0x180000,0x918820,0x11002800,0x4000001,0x445801,0x11002800,0x4000001,0x445802,0x11002800,0x4000001,0xc4000b,0x11002800,0x6800000,
+0x201c00,0x11002800,0x6800020,0x201c00,0x11002800,0x24000000,0x200000,0x11002800,0x24000000,0x200002,0x11002800,0x24000000,0x810000,0x11002800,0x24000000,0x1410000,
+0x11002800,0x24000000,0x1500000,0x11002800,0x24000000,0x1500002,0x11002800,0x24000002,0x400000,0x11002800,0x24000006,0xc0000b,0x11002800,0x24000008,0x1410000,0x11002800,
+0x24000008,0x1710000,0x11002800,0x24000020,0x1001400,0x11002800,0x24000020,0x1500002,0x11002800,0x2c000010,0x1248000,0x11002800,0x2c000010,0x1248002,0x11002800,0x40000001,
+0x63b020,0x11002800,0x40080000,0x918820,0x11002801,0x80000,0x2a65620,0x11002801,0x82000,0x962460,0x11002900,0x4000000,0x20000e,0x11002900,0x4000000,0x20000f,
+0x11002900,0x4000020,0x20000e,0x11002900,0x4000020,0x20000f,0x11002900,0x4000020,0x81000e,0x11002900,0x4000020,0x81000f,0x11002900,0x4000020,0x141000e,0x11002900,
+0x4000020,0x141000f,0x11002900,0x4000022,0x20000e,0x11002900,0x4000022,0x20000f,0x11002a00,0x4000000,0x1500000,0x11002a00,0x4000000,0x1600000,0x11002a00,0x4000000,
+0x1600002,0x11002b01,0x2000,0x962460,0x11002b01,0x2802020,0x962460,0x11002c00,0x4000000,0x200000,0x11002c00,0x4000000,0x200002,0x11002c00,0x4000000,0x20000f,
+0x11002c00,0x4000020,0x200000,0x11002c00,0x7c00000,0x200000,0x11002c00,0x7c00020,0x200000,0x11002c00,0x7c00120,0x220405,0x11002c00,0x7c00120,0x230402,0x11002c00,
+0x7c00120,0x250402,0x11002c00,0x7c00120,0x250405,0x11002c19,0x7c00100,0x250400,0x11002c19,0x7c00100,0x250401,0x11002d00,0x4000000,0x100006,0x11002d00,0x4000000,
+0x200006,0x11002d19,0x7c00100,0x220402,0x11002d19,0x7c00100,0x230400,0x11002d19,0x7c00100,0x250402,0x11002e00,0x24000000,0x200000,0x11002e00,0x24000020,0x200000,
+0x11002e00,0x24000020,0x200001,0x11002f00,0x24000020,0x200000,0x11002f00,0x24000020,0x200001,0x11002f00,0x24000020,0x200002,0x11002f00,0x24000020,0xf00000,0x11002f00,
+0x24000020,0x1600000,0x11002f00,0x24000022,0x1600000,0x11003000,0x24000000,0x200000,0x11003000,0x24000020,0x200000,0x11003000,0x24000020,0x810000,0x11003000,0x24000020,
+0x1410000,0x11003100,0x24000000,0x200000,0x11003200,0x24000000,0x200000,0x11003300,0x4000000,0x100003,0x11003400,0x24000000,0x100000,0x11003400,0x24000000,0x200000,
+0x11003500,0x24000000,0x200000,0x11003600,0x24000000,0x200000,0x11003600,0x24000020,0x200000,0x11003700,0x24000000,0x200000,0x11003700,0x24000000,0xe00000,0x11003700,
+0x24000000,0x2800000,0x11003700,0x24000020,0x200000,0x11003800,0x4000000,0x100000,0x11003800,0x24000000,0x200000,0x11003800,0x24000000,0xb00000,0x11003800,0x24000000,
+0xe00000,0x11003800,0x24000000,0x1710000,0x11003800,0x24000000,0x2800000,0x11005003,0x7c00100,0x220402,0x11005013,0x2802500,0x962460,0x11005013,0x4000020,0x200005,
+0x11005013,0x7c00100,0x2633801,0x11005013,0x7c00100,0x2633802,0x11005013,0x7c00100,0x2633805,0x11005019,0x7c00100,0x220402,0x11005102,0x7000100,0x230408,0x11005102,
+0x7c00100,0x230404,0x11005102,0x7c00100,0x230407,0x11005102,0x7c00100,0x230408,0x11005102,0x7c00100,0x230409,0x11005201,0x2802400,0x962460,0x11005500,0x80000,
+0x1e18820,0x11005502,0x7000100,0x230408,0x11005502,0x7c00100,0x230404,0x11005502,0x7c00100,0x230407,0x11005502,0x7c00100,0x230408,0x11005502,0x7c00100,0x230409,
+0x11005667,0x1000,0,0x11020200,0x80004,0x418820,0x11020200,0x4000000,0x100006,0x11020200,0x4000000,0x10000f,0x11020200,0x4000400,0x100002,0x11020200,
+0x4000400,0x500002,0x11020200,0x6800c00,0x101000,0x11020200,0x24000000,0x100000,0x11020200,0x24000000,0x200000,0x11020200,0x24000000,0x1400000,0x11020200,0x24000000,
+0x1500000,0x11020200,0x24000000,0x1600000,0x11020200,0x24000020,0x100000,0x11020200,0x24000020,0x1600000,0x11020219,0x7c00100,0x12040f,0x11020219,0x7c00100,0x220400,
+0x11020219,0x7c00100,0x220401,0x11020219,0x7c00100,0x250400,0x11020319,0x7c00100,0x220400,0x11020319,0x7c00100,0x220401,0x11020319,0x7c00100,0x220402,0x11020319,
+0x7c00100,0x250400,0x11020319,0x7c00100,0x250402,0x11020319,0x7d00100,0x220402,0x11020419,0x7c00100,0x220401,0x11020519,0x7c00100,0x220400,0x11020600,0x4000400,
+0x100002,0x11020600,0x4000400,0x200400,0x11020600,0x7c00500,0x130400,0x11020600,0x7c00d00,0x130400,0x11020701,0x2802400,0x962460,0x11020701,0x2802400,0x962461,
+0x11020701,0x2802400,0xc62460,0x1102080e,0x7c00100,0x220400,0x1102080e,0x7c00100,0x250400,0x11020908,0x7c00100,0x220400,0x11020908,0x7c00100,0x220401,0x11020908,
+0x7c00100,0x250400,0x11020908,0x7c00100,0x250401,0x11022800,0x24000000,0x100000,0x11022800,0x24000000,0x200000,0x11022800,0x24000000,0x200002,0x11022800,0x24000000,
+0x401000,0x11022800,0x24000000,0xf00002,0x11022800,0x24000000,0xf0ac02,0x11022800,0x24000000,0x1500000,0x11022800,0x24000002,0x100000,0x11022800,0x24000002,0x370000,
+0x11022800,0x24000002,0x470000,0x11022800,0x24000006,0x400000,0x11022800,0x24000008,0x1710000,0x11022800,0x24000008,0x1712c00,0x11022800,0x24000020,0x100000,0x11022800,
+0x24000020,0x1500000,0x11022800,0x24000020,0x1500002,0x11022900,0x4000000,0x10000e,0x11022900,0x4000000,0x10000f,0x11022919,0x7c00100,0x12040f,0x11022c00,0x4000000,
+0x100002,0x11022c00,0x4000000,0x10000f,0x11022c00,0x4000000,0x1500002,0x11022c00,0x4000000,0x1600002,0x11022c00,0x7c00120,0x120405,0x11022c0e,0x7c00100,0x250401,
+0x11022c19,0x7c00100,0x150401,0x11022d00,0x4000000,0x100006,0x11022d00,0x4000000,0x200006,0x11022d19,0x7c00100,0x120402,0x11022d19,0x7c00100,0x150402,0x11022e00,
+0x24000000,0x200000,0x11022e00,0x24000020,0x100000,0x11022f00,0x24000020,0x100000,0x11022f00,0x24000020,0x100001,0x11022f00,0x24000020,0x100002,0x11023000,0x24000000,
+0x100000,0x11023300,0x4000000,0x100002,0x11023300,0x4000000,0x100003,0x11023300,0x4000100,0x120403,0x11023300,0x4000100,0x150403,0x11023400,0x24000000,0x100000,
+0x11023500,0x24000000,0x100000,0x11023600,0x24000000,0x100000,0x11023600,0x24000020,0x100000,0x11023700,0x24000000,0x100000,0x11023700,0x24000000,0xe00000,0x11023700,
+0x24000020,0x100000,0x11023800,0x4000000,0x100000,0x11023800,0x24000000,0x200000,0x11024e67,0,0,0x11025600,0x4000000,0x100000,0x11042a00,0x4000000,
+0x1600000,0x11045700,0x4000000,0x20000a,0x11045700,0x4000020,0x20000a,0x11045712,0x7c00100,0xe3040a,0x11045712,0x7c80100,0xe3040a,0x11045716,0x7c00100,0xe30c0a,
+0x11045716,0x7c00100,0x2530c0a,0x11063d00,0x4000001,0x445811,0x11065700,0x4000000,0x810011,0x11065700,0x4000000,0xe00011,0x11065700,0x4000000,0x1410011,0x11065700,
+0x4000000,0x1500011,0x11065700,0x4000000,0x1600011,0x11065700,0x4000006,0xe70011,0x11065700,0x4000008,0xe00011,0x11065700,0x4000008,0xe02c11,0x11065700,0x4000010,
+0x871411,0x11065700,0x4000010,0x1201411,0x11065700,0x4000010,0x1271011,0x11065700,0x4000020,0xe00011,0x11065700,0x4000400,0xe00011,0x11065700,0x4000420,0xe00011,
+0x11065700,0x6800000,0xe01c11,0x11065700,0x6800040,0xe29811,0x11065700,0xc000010,0x80ac11,0x11065700,0xc000010,0xb48011,0x11065719,0x7c00100,0xe20411,0x11065719,
+0x7c00100,0xe50411,0x11065719,0x7c00140,0xe20411,0x11065719,0x7c00140,0xe50411,0x11080100,0x6800000,0x201c00,0x11080100,0x68000c0,0x1329800,0x11080100,0x24000000,
+0x200000,0x11080100,0x24000000,0x810000,0x11080100,0x24000000,0x1410000,0x11080100,0x24000000,0x1500000,0x11080100,0x24000000,0x1600000,0x11080100,0x24000000,0x1b00000,
+0x11080100,0x24000000,0x2410000,0x11080100,0x24000006,0xd70000,0x11080100,0x24000008,0x1713c00,0x11080100,0x24000008,0x1714000,0x11080100,0x24000010,0x1001400,0x11080100,
+0x24000010,0x1071000,0x11080100,0x24000010,0x1071400,0x11080100,0x24000020,0x200000,0x11080100,0x24000020,0x400000,0x11080100,0x24000020,0x1600000,0x11080100,0x24000400,
+0x200000,0x11080100,0x24000420,0x200000,0x11080100,0x2c000010,0xb48000,0x11080100,0x2c000010,0x100ac00,0x11080100,0x44000001,0x1a45800,0x11080119,0x7c00100,0x220400,
+0x11080119,0x7c00100,0x250400,0x11080119,0x7c001c0,0x220400,0x11080119,0x7c001c0,0x250400,0x11080200,0x4000400,0x200002,0x11080200,0x24000000,0x200000,0x11080200,
+0x24000000,0x1500000,0x11080200,0x24000000,0x1600000,0x11080200,0x24000020,0x200000,0x110a1e12,0x7c00100,0x2130480,0x110a1e12,0x7c80100,0x2130480,0x110a3000,0x24000000,
+0xe00000,0x110a3000,0x24100000,0x810001,0x110a3000,0x24100000,0x1410001,0x110a3700,0x24000000,0x200000,0x110a3d00,0x4000000,0xe00000,0x110a3d00,0x4000000,0xe00002,
+0x110a3d00,0x24000000,0xe00000,0x110a3d11,0x7c00300,0xe30000,0x110a3d11,0x7c00900,0x1230400,0x110a3d12,0x2802400,0x962460,0x110a3e14,0x7c00100,0xe30000,0x110a3e14,
+0x7c00100,0xe30001,0x110a3e14,0x7c00100,0x2530000,0x110a3e14,0x7c00900,0x1230000,0x110a3e14,0x7c00900,0x1230001,0x110a3f16,0x7c00100,0xe30c00,0x110a3f16,0x7c00100,
+0xe30c01,0x110a3f16,0x7c00100,0x2530c00,0x110a3f16,0x7c00900,0x1230c00,0x110a3f16,0x7c00900,0x1230c01,0x110a4005,0x7c00100,0xe30400,0x110a4112,0x7c00100,0xe30402,
+0x110a4112,0x7c80100,0xe30402,0x110a4400,0x4000000,0xe00000,0x110a4412,0x4000000,0xe00002,0x110a4412,0x4000000,0xe00003,0x110a4416,0x4000000,0xe00c03,0x110a4500,
+0x4000000,0xe0000d,0x110a4516,0x4000000,0xe00c0d,0x110a4711,0x7c40300,0xe30000,0x110a4f11,0x7c00300,0xe30001,0x110a4f11,0x7c40300,0xe30000,0x110a5300,0x4000000,
+0x810010,0x110a5300,0x4000000,0xe00002,0x110a5300,0x4000000,0xe00010,0x110a5300,0x4000000,0x1410010,0x110a5300,0x4000002,0xe70010,0x110a5300,0x4000008,0x810010,
+0x110a5300,0x4000008,0x1410010,0x110a5300,0x6800000,0xe01c02,0x110a5300,0x6800000,0xe01c10,0x110a5400,0x4000000,0x81000c,0x110a5400,0x4000000,0xe0000c,0x110a5400,
+0x4000000,0x141000c,0x110a5400,0x4000000,0x150000c,0x110a5400,0x4000000,0x160000c,0x110a5400,0x4000002,0xe7000c,0x110a5400,0x4000010,0x87140c,0x110a5400,0x4000010,
+0xe7000c,0x110a5400,0x4000010,0x120140c,0x110a5400,0x4000010,0x127100c,0x110a5400,0x4000020,0xe0000c,0x110a5400,0x4000026,0xe7000c,0x110a5400,0xc000010,0x80ac0c,
+0x110a5400,0xc000010,0xb4800c,0x11400c0c,0x4000010,0xb00000,0x11400c0c,0x4000010,0x1071400,0x11400c17,0xc000010,0xb48000,0x11400c1e,0x7c00900,0x230400,0x11400f4b,
+0xc000010,0x448000,0x11400f5f,0xc000010,0x448000,0x11401d94,0x4000000,0x200000,0x11403dca,0x4000000,0xe00000,0x114457bf,0x4000004,0x120000a,0x114457bf,0x4000008,
+0x81000a,0x114457bf,0x4000008,0x141000a,0x114457bf,0x4000010,0x87000a,0x114457bf,0xc000010,0x84800a,0x114457c8,0x3802500,0x126246a,0x114457c8,0x7c00d00,0x2530c0a,
+0x114a3dbf,0x24000000,0x810000,0x114a3dbf,0x24000000,0x1410000,0x114a3dbf,0x24000008,0x810000,0x114a3dbf,0x24000008,0x1410000,0x114a3dbf,0x24000010,0x870000,0x114a3dbf,
+0x2c000010,0x848000,0x114a3dc5,0x4000000,0xe00000,0x114a3dc5,0x24000000,0xe00000,0x114a3dc5,0x24000002,0xe00000,0x114a3dc5,0x24000002,0x1200000,0x114a3dc5,0x24000008,
+0x810000,0x114a3dc5,0x24000008,0x1410000,0x114a3dc8,0x7c00900,0x930c00,0x114a3dc8,0x7c00900,0xe30c00,0x114a3dca,0x7c00300,0xe30000,0x114a3ec8,0x7000400,0x1200c02,
+0x114a3fbf,0x4000004,0x1200000,0x114a3fc8,0x7c00d00,0x2530c00,0x114a42ca,0x4000000,0xe00000,0x114a42ca,0x4000000,0xe0000f,0x114a44ca,0x4000000,0xe00002,0x114a44ca,
+0x4000000,0xe00003,0x114a45ca,0x4000000,0xe00002,0x114a45ca,0x4000000,0xe0000d,0x11505103,0x24000000,0x810000,0x11505103,0x24000000,0x1410000,0x1180090a,0x2802400,
+0x962460,0x11800c27,0x2802100,0x962460,0x11800c27,0x2802500,0x962460,0x11800f32,0x2802400,0x962460,0x11800f3f,0x2802400,0x962460,0x11820700,0x2802400,0x962460,
+0x11820700,0x2802500,0x962460,0x118a3dcb,0x2802400,0x962460,0x118a3ec8,0x2802400,0x962460,0x11c00904,0x2802400,0x962460,0x11c00908,0x2802400,0x962460,0x11c00c2c,
+0x6800000,0x1329800,0x11c00c30,0xc000010,0xb48000,0x11c00f78,0x6800000,0x1329800,0x11c0107d,0x6800000,0x1329800,0x11c01181,0x6800000,0x1329800,0x11c01285,0x6800000,
+0x1329800,0x11c01489,0x4000000,0x200000,0x11c01489,0x6800000,0x1329800,0x11c0168d,0x6800000,0x1329800,0x11d05107,0x7c00100,0x230408,0x20000067,0x1000,0,
+0x20000b13,0x2802400,0x962460,0x20000b13,0x2802500,0x962460,0x20001b27,0x2802100,0x962460,0x20001b27,0x2802100,0x962461,0x20001b27,0x2802400,0x962460,0x20001b27,
+0x2802500,0x962460,0x20001b27,0x2806400,0x962460,0x20001b27,0x2902100,0x962462,0x20001b27,0x4000000,0x200000,0x20001b27,0x4000000,0x400000,0x20001b27,0x4000000,
+0x500000,0x20001b27,0x4000000,0x810000,0x20001b27,0x4000000,0xb00000,0x20001b27,0x4000000,0xc0000b,0x20001b27,0x4000000,0x1410000,0x20001b27,0x4000010,0xb00000,
+0x20001b27,0x4000010,0xc00000,0x20001b27,0x6800000,0x1329800,0x20001b27,0x6800100,0x462540,0x20001b27,0x6800400,0x962540,0x20001b27,0x7c00100,0x230400,0x20001b27,
+0x7c00100,0x230401,0x20002619,0x7c00100,0x220401,0x20002a00,0x4000000,0x1600000,0x20004b67,0,0x1900000,0x20004c67,0,0x1900000,0x20004d67,0,
+0x1900000,0x20006d67,0x1000,0,0x20006e67,0x1000,0,0x20026d67,0,0,0x20026e67,0,0,0x200a4a12,0x7c00100,0x1f304c1,
+0x200a4a12,0x7c00100,0x20304e1,0x21005600,0x4000000,0x700000,0x21022a00,0x4000000,0x1600000,0x30000419,0x7c00100,0x220400,0x30000419,0x7c00100,0x220401,0x30000419,
+0x7c00100,0x250400,0x30000419,0x7c00100,0x250401,0x30000519,0x7c00100,0x220400,0x30000600,0x4000400,0x200400,0x30000600,0x7c00500,0x230400,0x30000605,0x4000400,
+0x200400,0x3000080e,0x7c00100,0x220400,0x30000908,0x2000,0x962460,0x30000908,0x7c00100,0x220400,0x30000908,0x7c00100,0x220401,0x30000908,0x7c00100,0x250400,
+0x30000908,0x7c00100,0x250401,0x30000a03,0x4000006,0x400400,0x30000c02,0x4000000,0x200000,0x30000c02,0x7c00100,0x230400,0x30000d22,0x2802100,0x962460,0x30000d22,
+0x2802400,0x962460,0x30000d22,0x2802500,0x962460,0x30000d22,0x4000000,0x200000,0x30000d22,0x4000010,0x200000,0x30000d22,0x7c00100,0x230400,0x30000d22,0xc000010,
+0x248000,0x30000d22,0x80000000,0x218960,0x30000e25,0x2802500,0x962460,0x30000e25,0x7c00100,0x230400,0x30001821,0x2802100,0x962460,0x30001821,0x2806400,0x962460,
+0x30001821,0x4000000,0x200000,0x30001821,0x6800100,0x962540,0x30001821,0x6800100,0x962541,0x30001821,0x7c00100,0x230400,0x30001b27,0x2802100,0x962460,0x30001b27,
+0x2802400,0x962460,0x30001b27,0x4000000,0x200000,0x30001b27,0x4000000,0x400000,0x30001b27,0x7c00100,0x230400,0x30001c1c,0x2802100,0x1862460,0x30001c1c,0x2802400,
+0x1862460,0x30001c1c,0x2806400,0x1862460,0x30001c1c,0x4000000,0x200000,0x30001c1c,0x6800100,0x1862400,0x30001c1c,0x6800100,0x1862540,0x30001c1c,0x7c00100,0x1830000,
+0x30001c1c,0x7c00100,0x1830001,0x30001c1c,0xc000010,0x448000,0x30001f0b,0x4000000,0x200000,0x30001f0b,0x4000010,0x200000,0x30001f0b,0x4000010,0x400000,0x30001f0b,
+0x6800000,0x200000,0x30001f0b,0x7c00100,0x230400,0x30001f0b,0xc000010,0x248000,0x30002006,0x7c00100,0x250400,0x30002128,0x4000000,0x200000,0x30002128,0x7c00100,
+0x230400,0x30002128,0xc000010,0x248000,0x3000221d,0x4000000,0x810000,0x3000221d,0x4000000,0x1410000,0x3000221d,0x4000001,0x445800,0x3000221d,0x7c00100,0x230400,
+0x30002300,0x4000010,0x400000,0x30002320,0x7c00100,0x230400,0x30002417,0x2802100,0x1862460,0x30002417,0x2802400,0x1862460,0x30002417,0x2806400,0x1862460,0x30002417,
+0x2882000,0x1862460,0x30002417,0x4000000,0x200000,0x30002417,0x4000000,0x400000,0x30002417,0x4000000,0x1600000,0x30002417,0x4000010,0x400000,0x30002417,0x4000010,
+0x1200000,0x30002417,0x6800000,0x1329800,0x30002417,0x6800100,0x1862540,0x30002417,0x7c00100,0x1830000,0x30002417,0x7d00100,0x1830000,0x3000251b,0x80000,0xc18820,
+0x3000251b,0x2802100,0x962460,0x3000251b,0x3c02100,0x962460,0x3000251b,0x4000000,0x200000,0x3000251b,0x4000006,0x500000,0x3000251b,0x4000010,0x400000,0x3000251b,
+0x4000010,0xb70000,0x3000251b,0x4000800,0x200000,0x3000251b,0x6800000,0x1329800,0x3000251b,0x7c00100,0x230400,0x3000251b,0x7c00900,0x230400,0x3000251b,0xc000010,
+0xb48000,0x3000251b,0x12882000,0x962460,0x30002800,0x24000000,0x200000,0x30002800,0x2c000010,0x1248002,0x30002a00,0x4000000,0x1600000,0x30002b01,0x2000,0x962460,
+0x30002c00,0x4000000,0x200000,0x30002c00,0x7c00100,0x220405,0x30002d19,0x7c00100,0x250400,0x30002e00,0x24000000,0x200000,0x30003000,0x24000000,0x200000,0x30003100,
+0x24000000,0x200000,0x30003600,0x24000000,0x200000,0x30003700,0x24000000,0x200000,0x3000392e,0x24000000,0x200000,0x30005013,0x7c00100,0x2633801,0x30005600,0,
+0x918820,0x30020600,0x4000400,0x500400,0x30020701,0x2802400,0x962460,0x30020701,0x2802400,0xc62460,0x300a3a11,0x4020000,0xe00000,0x300a3a11,0x4020000,0xe00002,
+0x300a3b11,0x4020000,0xe00002,0x300a3c00,0x4008000,0xe00000,0x300a3c00,0x4010000,0xe00000,0x300a3d11,0x7c00300,0xe30002,0x300a4305,0x7c00100,0xe30400,0x300a4611,
+0x7c40300,0xe30000,0x300a4829,0x7c00100,0xe30400,0x300a4829,0x7c00900,0x1230400,0x300a4929,0x4000000,0xe00000,0x3040259a,0x4000010,0x400000,0x3040259a,0x4000010,
+0xb70000,0x3040259a,0xc000010,0xb48000,0x304028ba,0x4000001,0xc41c0b,0x304a3dca,0x4000000,0xe00000,0x30800c27,0x2802100,0x962460,0x30c01c92,0x6800000,0x1329800,
+0x3100080e,0x7c00120,0x220402,0x3100080e,0x7c00120,0x250402,0x31005167,0x1000,0,0x3100581e,0x4000000,0x200000,0x3100581e,0x7c00100,0x230400,0x3100590d,
+0x7c00100,0x230400,0x31005a09,0x7c00100,0x220400,0x31005a09,0x7c00100,0x250400,0x31005b00,0x4000000,0x200000,0x31005c00,0x80000,0x918820,0x31005c00,0x2802000,
+0x962460,0x31005c00,0x2802400,0x962460,0x31005c00,0x4000000,0x200000,0x31005c00,0x4000000,0x200001,0x31005c00,0x6800000,0x962540,0x31005c00,0x6800400,0x962540,
+0x31005c01,0x2802400,0x962460,0x31005d00,0x4000020,0x200005,0x31005d00,0x6800020,0x1329805,0x31005d00,0x7c00120,0x220405,0x31005d00,0x7c00120,0x250405,0x31006000,
+0x82000,0x962460,0x31006000,0x180000,0x918820,0x310a5e11,0x7c40300,0xe30000,0x310a5f11,0x7c00300,0xe30001,0x32000419,0x7c00100,0x250400,0x3200080e,0x4000020,
+0x200000,0x3200080e,0x7c00100,0x220400,0x3200080e,0x7c00100,0x250400,0x32000908,0x7c00100,0x220400,0x32000908,0x7c00100,0x250400,0x32000c02,0x7c00100,0x230400,
+0x32000e25,0x7c00100,0x230400,0x32001d0c,0x7c00100,0x230400,0x32002800,0x80000,0x1e18820,0x32002800,0x80020,0x218820,0x32002800,0x4000001,0x445802,0x32002800,
+0x24000000,0x200000,0x32002800,0x24000000,0x1500002,0x32002800,0x24000020,0x200000,0x32002800,0x2c000010,0x1248002,0x32002919,0x7c00100,0x22040f,0x32002a00,0x4000000,
+0x1600000,0x32002b01,0x2000,0x962460,0x32002b01,0x2802000,0x962460,0x32002b01,0x2802020,0x962460,0x32002c00,0x4000000,0x200000,0x32002c00,0x4000020,0x200000,
+0x32002c00,0x4000020,0x200005,0x32002c00,0x7c00120,0x220405,0x32002c00,0x7c00120,0x250405,0x32002e00,0x24000020,0x200000,0x32002f00,0x24000020,0x200000,0x32003000,
+0x24000000,0x200000,0x32003000,0x24000020,0x200000,0x32003500,0x24000000,0x200000,0x32003600,0x24000020,0x200000,0x32003700,0x24000000,0x100000,0x32003700,0x24000000,
+0x200000,0x32003800,0x24000000,0x810000,0x32003800,0x24000000,0x1410000,0x32005102,0x4000000,0x1500008,0x32005502,0x7c00100,0x230400,0x32006108,0x7c00100,0x220400,
+0x32006108,0x7c00100,0x250400,0x3200622a,0x2802100,0x962460,0x3200622a,0x2806400,0x962460,0x3200622a,0x7c00100,0x230400,0x3200632b,0x2802100,0x962460,0x3200632b,
+0x6804000,0x962540,0x3200632b,0x7c00100,0x230400,0x3200642c,0x2802100,0x962460,0x3200642c,0x7c00100,0x230400,0x3200652d,0x2802100,0x962460,0x3200652d,0x7c00100,
+0x230400,0x32006600,0x24000020,0x200000,0x32006700,0x24000020,0x200000,0x32006800,0x24000020,0x200000,0x32006900,0x24000020,0x200000,0x32006900,0x24000020,0x810000,
+0x32006900,0x24000020,0x1410000,0x32006a00,0x24000020,0x200000,0x32006a00,0x24000020,0x200001,0x32006a00,0x24000020,0x200002,0x32020701,0x2882000,0xc62460,0x32023300,
+0x4000000,0x100000,0x32026c01,0x12882000,0x962460,0x32065700,0x4000000,0x810011,0x32065700,0x4000000,0x1410011,0x32086600,0x24000020,0x810000,0x32086600,0x24000020,
+0x1410000,0x32086900,0x24000020,0x810000,0x32086900,0x24000020,0x1410000,0x320a3600,0x24000020,0x200000,0x320a3d11,0x7c00100,0x1230400,0x320a3e14,0x7c00100,0xe30010,
+0x320a3e14,0x7c00100,0x2530000,0x320a3f16,0x7c00100,0xe30c10,0x320a4400,0x4000000,0xe00003,0x320a4929,0x4000000,0xe00000,0x320a4f11,0x7c00300,0xe30001,0x320a6b16,
+0x7c00100,0x2530c00,0x32406396,0xc000010,0x448000,0x324a3dcd,0x4000000,0xe00000,0x324a3dcd,0x7c00100,0x1230400,0x324a3fc8,0x4000002,0x1200c00,0x324a53c5,0x24000000,
+0xe00000,0x32820701,0x2802000,0x962460,0x40000419,0x7c00100,0x220400,0x40000519,0x7c00100,0x220400,0x40000600,0x4000400,0x200400,0x4000080e,0x7c00100,0x220400,
+0x4000080e,0x7c00100,0x250400,0x4000080e,0x7c00100,0x250402,0x40000c02,0x2802100,0x962460,0x40000c02,0x2802400,0x962460,0x40000c02,0x2802500,0x962460,0x40000c02,
+0x4000000,0x200000,0x40000c02,0x4000000,0x1071400,0x40000c02,0x7c00100,0x230400,0x40000c02,0x80000000,0x218960,0x40000d22,0x7c00100,0x230400,0x40000f0a,0x7c00100,
+0x230400,0x40001004,0x7c00100,0x230400,0x40001110,0x2802100,0x962460,0x40001110,0x6800100,0x962540,0x4000120f,0x2802100,0x962460,0x4000120f,0x4000000,0x1600000,
+0x4000120f,0x7c00100,0x230400,0x4000131f,0x7c00100,0x230400,0x40001423,0x4000000,0x200000,0x40001423,0x4000000,0x1600000,0x40001615,0x2802400,0x962460,0x40001615,
+0x7c00100,0x230400,0x40002417,0x2802400,0x1862460,0x40002417,0x4000000,0x200000,0x40002800,0x6800000,0x201c00,0x40002800,0x24000002,0x200000,0x40002c00,0x4000000,
+0x200002,0x40003000,0x24000000,0x200000,0x40003000,0x24000020,0x200000,0x40003700,0x24000000,0x200000,0x40005a09,0x7c00100,0x220400,0x40005a09,0x7c00100,0x250400,
+0x40005d00,0x7c00120,0x220405,0x40006f30,0x2802100,0x962460,0x40006f30,0x2802400,0x962460,0x40006f30,0x4000000,0x200000,0x40006f30,0x6800000,0x1329800,0x40006f30,
+0x6800100,0x962540,0x40006f30,0x7c00100,0x230400,0x40006f30,0xc000010,0xb48000,0x40007034,0x7c00100,0x1830000,0x40007117,0x4000000,0x200000,0x40007208,0x7c00100,
+0x220400,0x4000720e,0x7c00100,0x220400,0x4000720e,0x7c00500,0x22040e,0x4000720e,0x7c00500,0x22040f,0x40007219,0x7c00100,0x220400,0x40007219,0x7c00500,0x220400,
+0x40007219,0x7c00500,0x22040e,0x40007219,0x7c00500,0x22040f,0x40007300,0x24000000,0x200000,0x40007400,0x4000000,0x200000,0x40007531,0x7c00100,0x230400,0x40007631,
+0x7c00100,0x230400,0x40007835,0x4000010,0x400000,0x40007835,0x7c00100,0x230400,0x40007933,0x7c00100,0x230400,0x40007a32,0x6800000,0x1329800,0x40007a32,0x7c00100,
+0x230400,0x40007b2f,0x7c00100,0x230400,0x40007c00,0x4000000,0x200000,0x40020701,0x2802400,0x962460,0x40020701,0x2802400,0xc62460,0x40023300,0x4000000,0x200000,
+0x40027d01,0x12882000,0x962460,0x400a3700,0x24000000,0x200000,0x400a3700,0x24000000,0xe00000,0x400a4400,0x4000000,0xe0000d,0x400a4412,0x4000000,0xe00002,0x400a4412,
+0x4000000,0xe00003,0x400a4500,0x4000000,0xe0000d,0x400a5300,0x4000000,0x810010,0x400a5300,0x4000000,0x1410010,0x40507709,0x4000000,0x200000,0x4050770c,0x4000000,
+0x400000,0x4050770f,0x4000000,0x200000,0x4050770f,0x4000000,0x400000,0x40c01489,0x4000000,0x200000,0x40d05107,0x4000000,0x200000,0x41000419,0x7c00100,0x220400,
+0x41000419,0x7c00100,0x250400,0x4100080e,0x7c00100,0x220400,0x4100080e,0x7c00100,0x250400,0x41000908,0x7c00100,0x220400,0x41000908,0x7c00100,0x250400,0x41000b13,
+0x2802000,0x962460,0x41000b13,0x2802100,0x962460,0x41000b13,0x4000000,0xb00000,0x41000c02,0x2802100,0x962460,0x41000c02,0x4000000,0x1500000,0x41000c02,0xc000010,
+0xb48000,0x41000f0a,0x7c00100,0x230400,0x41001004,0x7c00100,0x230400,0x41001423,0x7c00100,0x230400,0x41001b27,0x4000000,0x500000,0x41001d0c,0x7c00100,0x22040f,
+0x41001d0c,0x7c00100,0x230400,0x41001f0b,0x2802400,0x962460,0x41001f0b,0x4000000,0x200000,0x41001f0b,0x7c00100,0x230400,0x41002800,0x24000000,0x200000,0x41002800,
+0x24000000,0x400000,0x41002919,0x7c00100,0x22040e,0x41002a00,0x4000000,0x1600000,0x41002b01,0x2802020,0x962460,0x41002c00,0x4000000,0x200000,0x41002c00,0x7c00120,
+0x220405,0x41003000,0x24000000,0x200000,0x41003700,0x24000000,0x200000,0x41003700,0x24000000,0xe00000,0x41005d00,0x7c00120,0x220405,0x41006600,0x24000020,0x200000,
+0x41006600,0x24000020,0x810000,0x41006600,0x24000020,0x1410000,0x41007208,0x7c00100,0x22040f,0x41007219,0x7c00100,0x220400,0x41007300,0x24000000,0x200000,0x41007e0e,
+0x2802000,0x962460,0x41007e0e,0x4000000,0x200000,0x41007f0e,0x4000000,0x200000,0x41007f0e,0x7c00100,0x230400,0x41008002,0x7c00100,0x230400,0x41008137,0x2802100,
+0x962460,0x41008137,0x4000000,0x200000,0x41008137,0x6800100,0x962540,0x41008137,0x7c00100,0x230400,0x41008301,0x2802000,0x962460,0x41008407,0x4000000,0x200000,
+0x41008407,0x4000000,0x400000,0x41008407,0x4000000,0xb00000,0x41008407,0x7c00100,0x220400,0x41008407,0x7c00100,0x250400,0x4100850b,0x7c00100,0x230400,0x4100860b,
+0x4000000,0x200000,0x4100860b,0x7c00100,0x230400,0x4100870c,0x7c00100,0x220400,0x41008838,0x7c00100,0x220400,0x41008838,0x7c00100,0x250400,0x41008939,0x2802000,
+0x962460,0x41008939,0x2802100,0x962460,0x41008939,0x2806000,0x962460,0x41008939,0x4000000,0x200000,0x41008939,0x4000000,0x400000,0x41008939,0x7c00100,0x230400,
+0x41008939,0xc000010,0x448000,0x41008a00,0x4000400,0x200400,0x41008b3b,0x4000000,0x1800000,0x41008b3b,0x6800000,0x1329800,0x41008b3b,0x7c00100,0x1830000,0x41008b3b,
+0x7e00100,0x1830000,0x41008c3d,0x4000010,0x400000,0x41008c3d,0x7c00100,0x230400,0x41008d0e,0x7c00100,0x22040f,0x41008d19,0x7c00100,0x220400,0x41008d19,0x7c00100,
+0x22040f,0x41008e00,0x24000000,0x200000,0x41008e00,0x24000000,0x400000,0x41008e00,0x24000000,0x1710000,0x41008e00,0x24000006,0x400000,0x41008f3a,0x2802100,0x962460,
+0x41008f3a,0x2806000,0x962460,0x41008f3a,0x4000000,0x200000,0x41008f3a,0x6800100,0x962540,0x41008f3a,0x7c00100,0x230400,0x4100903c,0x7c00100,0x230400,0x4100903c,
+0x7c00100,0x23040f,0x41020701,0x2802000,0x962460,0x41020701,0x2802000,0xc62460,0x410a3700,0x24000000,0x200000,0x410a3700,0x24000000,0xe00000,0x410a4412,0x4000000,
+0xe00003,0x410a4711,0x7c40300,0xe30000,0x410a4f11,0x7c00300,0xe30001,0x410a9100,0x4000000,0x800010,0x410a9100,0x4000000,0x810010,0x410a9100,0x4000000,0x870010,
+0x410a9100,0x4000000,0xb00010,0x410a9100,0x4000000,0xf00010,0x410a9100,0x4000000,0x1001410,0x410a9100,0x4000000,0x1071010,0x410a9100,0x4000000,0x1071410,0x410a9100,
+0x4000000,0x1410010,0x41408ad0,0x4000400,0x200000,0x414a82ca,0x4000000,0xe00000,0x41808300,0x2802000,0x962460,0x41c01489,0x6800000,0x1329800,0x50000419,0x7c00100,
+0x220400,0x50000419,0x7c00100,0x250400,0x5000080e,0x7c00100,0x220400,0x50000908,0x7c00100,0x220400,0x50000908,0x7c00100,0x250400,0x50000b13,0x2802500,0x962460,
+0x50000f0a,0x7c00100,0x230400,0x50001615,0x2802100,0x962460,0x50001615,0x7c00100,0x230400,0x50002b01,0x2802020,0x962460,0x50002c00,0x4000000,0x200000,0x50002c19,
+0x7c00100,0x220400,0x50002d19,0x7c00100,0x220400,0x50003000,0x24000000,0x200000,0x50003000,0x24000020,0x200000,0x50003700,0x24000000,0x200000,0x50005d00,0x7c00120,
+0x220405,0x50005d00,0x7c00120,0x250405,0x50006108,0x7c00100,0x220400,0x50006108,0x7c00100,0x250400,0x50006600,0x24000020,0x200000,0x50007300,0x24000000,0x200000,
+0x50008301,0x2802400,0x962460,0x50008a00,0x7c00500,0x230400,0x50009257,0x2802400,0x962460,0x50009257,0x4000000,0x200000,0x50009257,0x4000010,0x1071400,0x50009257,
+0x6800000,0x1329800,0x50009257,0x7c00100,0x230400,0x50009257,0x7c00500,0x230400,0x50009257,0x7c00900,0x230400,0x50009257,0xc000010,0xb48000,0x5000933e,0x2802100,
+0x962460,0x5000933e,0x2802400,0x962460,0x5000933e,0x4000000,0x200000,0x5000933e,0x4000000,0x400000,0x5000933e,0x4000010,0x400000,0x5000933e,0x6800000,0x1329800,
+0x5000933e,0x6800100,0x962540,0x5000933e,0x6800100,0x962541,0x5000933e,0x6804400,0x962540,0x5000933e,0x7c00100,0x230400,0x5000933e,0x7c00100,0x230401,0x5000933e,
+0xc000010,0x448000,0x50009419,0x7c00100,0x220400,0x50009419,0x7c00100,0x250400,0x50009500,0x4000400,0x200400,0x5000965a,0x4000000,0x500000,0x5000965a,0x7c00100,
+0x230400,0x5000965a,0xc000010,0xb48000,0x5000975b,0x4000000,0x200000,0x5000975b,0x4000010,0x400000,0x5000975b,0x7c00100,0x230400,0x50009865,0x7c00100,0x230400,
+0x50009965,0x4000010,0x400000,0x50009965,0x7c00100,0x230400,0x50409aca,0x4000000,0x200000,0x5100080e,0x7c00100,0x220400,0x5100080e,0x7c00100,0x250400,0x51000c02,
+0x2802100,0x962460,0x51000c02,0x4000000,0x1500000,0x51000c02,0x4000020,0x200000,0x51000c02,0x7c00100,0x230400,0x51000f0a,0x7c00100,0x230400,0x51000f0a,0x7c00500,
+0x230400,0x51001110,0x2802100,0x962460,0x5100131f,0x2802100,0x962460,0x51001423,0x7c00100,0x230400,0x51001524,0x2802100,0x962460,0x51001524,0x4000000,0x200000,
+0x51001524,0x7c00100,0x230400,0x5100171a,0x2802100,0x962460,0x5100171a,0x4000000,0x200000,0x5100171a,0x4000000,0x1500000,0x5100171a,0x7c00100,0x230400,0x51001b27,
+0x4000000,0x200000,0x51001b27,0x4000000,0x400000,0x51001b27,0x4000000,0x500000,0x51001b27,0x7c00100,0x230400,0x51001c1c,0x2802100,0x1862460,0x51001c1c,0x2802500,
+0x1862460,0x51001c1c,0x2806400,0x1862460,0x51001c1c,0x4000000,0x1800000,0x51001c1c,0x6800000,0x1329800,0x51001c1c,0x6800100,0x1862400,0x51001c1c,0x6800100,0x1862540,
+0x51001c1c,0x6800500,0x1862400,0x51001c1c,0x7c00100,0x1830000,0x5100251b,0x7c00100,0x230400,0x51002619,0x7c00100,0x220400,0x51002619,0x7c00100,0x250400,0x51002800,
+0x80020,0x218820,0x51002c00,0x4000000,0x200000,0x51002d19,0x7c00100,0x230400,0x51003700,0x24000000,0x200000,0x51003700,0x24000000,0xe00000,0x51005201,0x2802400,
+0x962460,0x51005c00,0x4000000,0x200000,0x51006108,0x7c00100,0x220400,0x51006108,0x7c00100,0x250400,0x51006600,0x24000020,0x200000,0x51006600,0x24000020,0x810000,
+0x51006600,0x24000020,0x1410000,0x51007300,0x24000000,0x200000,0x51007300,0x24000020,0x200000,0x51008002,0x7c00100,0x230400,0x51008301,0x2802000,0x962460,0x51008301,
+0x2802400,0x962460,0x51008301,0x2802400,0xc62460,0x51008a00,0x7c00500,0x230400,0x51008e00,0x24000000,0x200000,0x51008e00,0x24000000,0x400000,0x51008e00,0x24000000,
+0x810000,0x51008e00,0x24000000,0x1400000,0x51008e00,0x24000000,0x1410000,0x51008e00,0x24000000,0x1710000,0x51008e00,0x24000002,0x200000,0x51008e00,0x24000500,0x230400,
+0x51008e00,0x2c000010,0xb48000,0x51009419,0x7c00100,0x220400,0x51009419,0x7c00100,0x22040e,0x51009419,0x7c00100,0x22040f,0x51009419,0x7c00100,0x250400,0x51009500,
+0x4000400,0x200400,0x51009500,0x7c00500,0x230400,0x51009519,0x7c00100,0x220400,0x51009519,0x7c00100,0x22040f,0x51009519,0x7c00100,0x230400,0x51009519,0x7c00100,
+0x250400,0x51009b71,0x2802100,0x962460,0x51009b71,0x6800000,0x1329800,0x51009b71,0x6800100,0x962540,0x51009b71,0x6804400,0x962540,0x51009b71,0x7c00100,0x230400,
+0x51009c52,0x2802100,0x962460,0x51009c52,0x2802400,0x962460,0x51009c52,0x2802d00,0x962460,0x51009c52,0x4000010,0x400000,0x51009c52,0x6800000,0x1329800,0x51009c52,
+0x6800100,0x962540,0x51009c52,0x7c00100,0x230400,0x51009c52,0xc000010,0x448000,0x51009d6d,0x6800000,0x1329800,0x51009d6d,0x7c00100,0x230400,0x51009d6d,0x7c00500,
+0x230400,0x51009d6d,0x7c00d00,0x230400,0x51009d6d,0xc000010,0x448000,0x51009e08,0x2802100,0x962460,0x51009f63,0x4000010,0x400000,0x51009f63,0x6800000,0x1329800,
+0x51009f63,0x7c00100,0x230400,0x51009f63,0x7c00900,0x230400,0x51009f63,0xc000010,0x448000,0x51009f63,0xc000010,0xb48000,0x5100a008,0x2000,0x962460,0x5100a008,
+0x2802400,0x962460,0x5100a008,0x4000000,0x200000,0x5100a008,0x7c00100,0x220400,0x5100a008,0x7c00100,0x230400,0x5100a008,0x7c00100,0x250400,0x5100a008,0x7c00500,
+0x230400,0x5100a16f,0x2806400,0x962460,0x5100a16f,0x6800000,0x1329800,0x5100a16f,0x6800100,0x962540,0x5100a16f,0x7c00100,0x230400,0x5100a16f,0xc000010,0x448000,
+0x5100a24f,0x2802100,0x962460,0x5100a24f,0x2802400,0x962460,0x5100a24f,0x6800000,0x1329800,0x5100a24f,0x7c00100,0x230400,0x5100a24f,0xc000010,0x448000,0x5100a36e,
+0x2802100,0x962460,0x5100a36e,0x4000000,0x200000,0x5100a36e,0x6800100,0x962540,0x5100a36e,0x6804400,0x962540,0x5100a36e,0x7c00100,0x230400,0x5100a442,0x2802100,
+0x962460,0x5100a442,0x4000000,0x200000,0x5100a442,0x6800000,0x1329800,0x5100a442,0x6800100,0x962540,0x5100a442,0x7c00100,0x230400,0x5100a442,0xc000010,0x448000,
+0x5100a500,0x4000000,0x200000,0x5100a600,0x4000000,0x200000,0x5100a601,0x2802000,0x962460,0x5100a76b,0x7c00100,0x230400,0x5100a868,0x7c00100,0x230400,0x5100a96c,
+0x4000000,0x200000,0x5100a96c,0x7c00100,0x230400,0x5100aa00,0x4000000,0xe00000,0x5100ab00,0x4000000,0xe00000,0x51086600,0x24000020,0x810000,0x51086600,0x24000020,
+0x1410000,0x510a4005,0x7c00100,0xe30400,0x510a4711,0x7c40300,0xe30000,0x510a7300,0x24000000,0x200000,0x510aaa00,0x4000000,0xe00000,0x5140a2fe,0x4000400,0x400000,
+0x514a82ca,0x4000000,0xe00000,0x51802bbc,0x2802000,0x962460,0x51c00908,0x2802400,0x962460,0x51c0a008,0x2802400,0x962460,0x52000f0a,0x2802100,0x962460,0x52000f0a,
+0x6800100,0x962540,0x52000f0a,0x7c00100,0x230400,0x52001004,0x4000000,0x1600000,0x52001b00,0x4000000,0x200000,0x52001c1c,0x2802100,0x1862460,0x52001c1c,0x6800100,
+0x1862400,0x52001c1c,0x6800500,0x1862400,0x52001e12,0x7c00100,0x2230500,0x52001e12,0x7c00100,0x2330520,0x52002128,0x4000002,0x400000,0x52002128,0x7c00100,0x230400,
+0x52002a00,0x4000000,0x1500000,0x52002a00,0x4000000,0x1600000,0x52002d00,0x4000000,0x200006,0x52003000,0x24000000,0x200000,0x52006108,0x7c00100,0x220400,0x52006108,
+0x7c00100,0x250400,0x52008301,0x2802400,0x962460,0x52008407,0x2802400,0x962460,0x52008407,0x7c00100,0x220400,0x52008407,0x7c00100,0x250400,0x52008b3b,0x6800000,
+0x1800000,0x52008b3b,0x7c00100,0x1830000,0x52008e00,0x24000000,0x400000,0x52009419,0x7c00100,0x250400,0x5200975b,0x4000000,0x200000,0x5200ac7e,0x2802000,0x962460,
+0x5200ac7e,0x2802100,0x962460,0x5200ac7e,0x2802400,0x962460,0x5200ac7e,0x4000010,0x200000,0x5200ac7e,0x7c00100,0x230400,0x5200ac7e,0xc000010,0x248000,0x5200ad28,
+0x7c00100,0x230400,0x5200ae6a,0x2802100,0x1862460,0x5200ae6a,0x2802400,0x962460,0x5200ae6a,0x2802400,0x1862460,0x5200ae6a,0x2806000,0x1862460,0x5200ae6a,0x4000000,
+0x1800000,0x5200ae6a,0x6800000,0x1329800,0x5200ae6a,0x6800100,0x1862400,0x5200ae6a,0x6800100,0x1862540,0x5200ae6a,0x7c00100,0x1830000,0x5200ae6a,0x7c00900,0x1830000,
+0x5200ae6a,0xc000010,0x1848000,0x5200b083,0x4000010,0x400000,0x5200b083,0x7c00100,0x230400,0x5200b083,0xc000010,0x448000,0x5200b182,0x2802400,0x962460,0x5200b182,
+0x4000000,0x200000,0x5200b182,0x4000010,0x400000,0x5200b182,0x7c00100,0x230400,0x5200b182,0xc000010,0x448000,0x5200b30a,0x2802400,0x962460,0x5200b30a,0x4000000,
+0x200000,0x5200b30a,0x7c00100,0x230400,0x5200b54e,0x2802100,0x962460,0x5200b54e,0x2802400,0x962460,0x5200b54e,0x4000000,0x200000,0x5200b54e,0x4000010,0x400000,
+0x5200b54e,0x6800000,0x1329800,0x5200b54e,0x6800100,0x962540,0x5200b54e,0x6804400,0x962540,0x5200b54e,0x7c00100,0x230400,0x5200b54e,0xc000010,0x448000,0x5200b61c,
+0x4000000,0x1800000,0x5200b61c,0x6800500,0x1862400,0x5200b61c,0x7c00100,0x1830000,0x5200b61c,0x7c00900,0x1830000,0x5200b77f,0x2802100,0x1862460,0x5200b77f,0x2802400,
+0x1862460,0x5200b77f,0x4000000,0x1800000,0x5200b77f,0x4000010,0x1800000,0x5200b77f,0x7c00100,0x1830000,0x5200b77f,0x7c00500,0x1830000,0x5200b77f,0x7c00900,0x1830000,
+0x5200b77f,0x7e00100,0x1830000,0x5200b873,0x2802100,0x962460,0x5200b873,0x2806400,0x962460,0x5200b873,0x6800000,0x1329800,0x5200b873,0x6800100,0x962540,0x5200b873,
+0x6800400,0x962540,0x5200b873,0x7c00100,0x230400,0x5200b873,0xc000010,0x448000,0x5200b912,0x7c00100,0x2230500,0x5200b912,0x7c00100,0x2330520,0x5200ba74,0x4000000,
+0x200000,0x5200ba74,0x4000010,0x400000,0x5200ba74,0x7c00100,0x230400,0x5200bb85,0x4000000,0x200000,0x5200bb85,0x7c00100,0x230400,0x5200bc75,0x4000000,0x400000,
+0x5200bc75,0x4000010,0x400000,0x5200bc75,0x7c00100,0x230400,0x5200bd7d,0x4000000,0x200000,0x5200bd7d,0x7c00100,0x230400,0x5200be7a,0x4000000,0x200000,0x5200be7a,
+0x7c00100,0x230400,0x5200bf58,0x7c00100,0x230400,0x5200c002,0x4000000,0x200000,0x5200c178,0x2802100,0x962460,0x5200c178,0x2802400,0x962460,0x5200c178,0x2806400,
+0x962460,0x5200c178,0x4000000,0x200000,0x5200c178,0x6800100,0x962540,0x5200c178,0x7c00100,0x230400,0x5200c178,0x7c00100,0x230401,0x5200c178,0xc000010,0x448000,
+0x5200c178,0x80000000,0x218960,0x5200c247,0x7c00100,0x230400,0x5200c247,0x7c00100,0x830400,0x5200c247,0x7c00100,0x1430400,0x5200c300,0x4000000,0x200003,0x52022d00,
+0x4000000,0x100006,0x52023700,0x24000000,0x100000,0x52023700,0x24000000,0xe00000,0x52023700,0x24000000,0x2800000,0x52024400,0x4000000,0x100000,0x52027300,0x24000000,
+0x100000,0x5202c300,0x4000000,0x100000,0x5202c300,0x4000000,0x100002,0x5202c300,0x4000000,0x100003,0x5202c300,0x4000000,0x10000d,0x5202c300,0x4000100,0x150400,
+0x5202c300,0x4000100,0x15040d,0x520a1e12,0x7c00100,0x2130480,0x520a3700,0x24000000,0xe00000,0x520a3800,0x24000000,0x100000,0x520a4711,0x7c40300,0xe30000,0x520a4f11,
+0x7c00300,0xe30001,0x520a7300,0x24000000,0x100000,0x520ab412,0x7c00100,0x2130480,0x520ac400,0x4000000,0xe00002,0x520ac400,0x4000000,0xe0000d,0x520ac414,0x4000000,
+0xe0000d,0x520ac511,0x7c40300,0xe30000,0x5240af9c,0x7c00100,0x230400,0x5240afa1,0x4000400,0x200000,0x5240afa3,0x6800400,0x962540,0x5240afa3,0x7c00100,0x230400,
+0x5240afad,0x7c00100,0x230400,0x5240afaf,0x7c00100,0x230400,0x5240b2d2,0x4000000,0x200000,0x5240b2d2,0x4000000,0x1500000,0x5240b2dd,0x4000000,0x200000,0x5240b2eb,
+0x4000000,0x200000,0x524a44ca,0x4000000,0xe00003,0x5250b501,0x7c00900,0x230400,0x5280af9c,0x2802400,0x962460,0x5280af9d,0x2802400,0x962460,0x5280afa3,0x2802400,
+0x962460,0x5280afa5,0x2802400,0x962460,0x5280afa7,0x2802400,0x962460,0x52c0b3f8,0x2802400,0x962460,0x52c0b3fc,0x7c00100,0x230400,0x60000c02,0x2802100,0x962460,
+0x60000c02,0x7c00100,0x230400,0x60000f0a,0x2802100,0x962460,0x60000f0a,0x6800100,0x962540,0x60000f0a,0x7c00100,0x230400,0x6000131f,0x4000000,0x200000,0x6000171a,
+0x7c00100,0x230400,0x6000171a,0x7c00100,0x230560,0x60001b27,0x2802100,0x962460,0x60001b27,0x4000000,0xc00000,0x60001b27,0x7c00100,0x230400,0x60001f0b,0x2802400,
+0x962460,0x60002919,0x7c00100,0x22040e,0x60002a00,0x4000000,0x1600000,0x60003000,0x24000000,0x200000,0x60003000,0x24000000,0xe00000,0x60003700,0x24000000,0x200000,
+0x60003800,0x24000000,0x1710000,0x60005102,0x4000000,0x200000,0x60006108,0x7c00100,0x220400,0x60006108,0x7c00100,0x250400,0x60006600,0x24000020,0x200000,0x60008301,
+0x2802400,0xc62460,0x6000903c,0x2806000,0x962460,0x6000903c,0x4000000,0x400000,0x60009519,0x7c00100,0x220400,0x60009519,0x7c00100,0x250400,0x6000a008,0x7c00100,
+0x220400,0x6000a008,0x7c00100,0x250400,0x6000c300,0x4000000,0x2703580,0x6000c654,0x2802000,0x962460,0x6000c654,0x4000010,0x200000,0x6000c654,0x7c00100,0x230400,
+0x6000c73f,0x2802000,0x962460,0x6000c73f,0x2802100,0x962460,0x6000c73f,0x4000000,0x200000,0x6000c73f,0x6800100,0x962540,0x6000c73f,0x6804000,0x962540,0x6000c73f,
+0x7c00100,0x230400,0x6000c80b,0x7c00100,0x230400,0x6000c941,0x2802100,0x962460,0x6000c941,0x2806400,0x962460,0x6000c941,0x4000000,0x200000,0x6000c941,0x4000010,
+0x200000,0x6000c941,0x6800000,0x1329800,0x6000c941,0x6800100,0x962540,0x6000c941,0x7c00100,0x230400,0x6000c941,0xc000010,0x448000,0x6000ca82,0x7c00100,0x230400,
+0x6000cc00,0x4000000,0xe00000,0x6000d000,0x4000000,0x200000,0x6002c300,0x4000000,0x100000,0x6002c300,0x4000000,0x10000d,0x6002c300,0x4000100,0x150400,0x6002c300,
+0x4000100,0x15040d,0x600a3000,0x24000000,0x200000,0x600a3000,0x24000000,0xe00000,0x600a3700,0x24000000,0x200000,0x600a3800,0x24000000,0x200000,0x600a3800,0x24000000,
+0x2800000,0x600a4305,0x7c00100,0xe30400,0x600ac300,0x4000000,0x100000,0x600ac400,0x4000000,0xe0000d,0x600acb14,0x7c00100,0xe30000,0x600acb16,0x7c00100,0xe30c00,
+0x600acc00,0x4000000,0xe00000,0x600acd00,0x4000000,0x200000,0x600acd00,0x4000000,0xe00000,0x600acd00,0x4000000,0x2800000,0x600ace00,0x4000000,0xe00000,0x600ace00,
+0x4000000,0x2800000,0x600acf00,0x4000000,0xe00000,0x600acf00,0x4000000,0x2800000,0x600ad111,0x7c40300,0xe30000,0x604ac4ca,0x4000000,0xe00003,0x61000a03,0x4000000,
+0x1600000,0x61000c02,0x80000000,0x218960,0x6100120f,0x4000000,0x200000,0x61001a18,0x7c00100,0x1830000,0x61001d0c,0x7c00100,0x230400,0x61001d0c,0x7c00100,0x250400,
+0x61006600,0x24000020,0x200000,0x61008407,0x7c00100,0x220400,0x61008407,0x7c00100,0x250400,0x6100870c,0x7c00100,0x220400,0x61008e00,0x24000000,0x200000,0x61008e00,
+0x24000000,0x400000,0x61008e00,0x24000002,0x300000,0x6100903c,0x7c00100,0x230400,0x61009519,0x7c00100,0x220400,0x61009519,0x7c00100,0x250400,0x61009519,0x7c00500,
+0x22040f,0x61009b71,0x2802100,0x962460,0x61009b71,0x2806400,0x962460,0x61009b71,0x7c00100,0x230400,0x6100a008,0x2802100,0x962460,0x6100c300,0x4000000,0x20000f,
+0x6100cd00,0x4000000,0x200000,0x6100d202,0x2802400,0x962460,0x6100d202,0x2802500,0x962460,0x6100d202,0x7c00100,0x230400,0x6100d302,0x4000020,0x200000,0x6100d302,
+0x7c00120,0x230405,0x6100d476,0x2802100,0x962460,0x6100d476,0x2802100,0x962461,0x6100d476,0x2806400,0x962460,0x6100d476,0x4000000,0x400000,0x6100d476,0x6800000,
+0x1329800,0x6100d476,0x6800100,0x962540,0x6100d476,0x7c00100,0x230400,0x6100d476,0xc000010,0x448000,0x6100d573,0x2802100,0x962460,0x6100d573,0x2806400,0x962460,
+0x6100d573,0x6800100,0x962540,0x6100d573,0x7c00100,0x230400,0x6100d573,0x7c00900,0x230400,0x6100d573,0xc000010,0x448000,0x6100d68d,0x7c00100,0x230400,0x6100d756,
+0x7c00100,0x230400,0x6100d85c,0x2802500,0x962460,0x6100d85c,0x6800100,0x962540,0x6100d85c,0x7c00100,0x230400,0x6100d85c,0x7c00500,0x230400,0x6100d997,0x2802100,
+0x962460,0x6100d997,0x4000000,0x200000,0x6100d997,0x4000000,0x400000,0x6100d997,0x6800000,0x1329800,0x6100d997,0x6800100,0x962540,0x6100d997,0x6804400,0x962540,
+0x6100d997,0x7c00100,0x230400,0x6100d997,0x7c00100,0x230560,0x6100d997,0xc000010,0x448000,0x6100da98,0x6800000,0x1329800,0x6100da98,0x7c00100,0x230400,0x6100db71,
+0x4000000,0x200000,0x6100dc99,0x2802100,0x962460,0x6100dc99,0x2802400,0x962460,0x6100dc99,0x6800000,0x1329800,0x6100dc99,0x6800100,0x962540,0x6100dc99,0x6804400,
+0x962540,0x6100dc99,0x7c00100,0x230400,0x610a4711,0x7c40300,0xe30000,0x610a4f11,0x7c00300,0xe30001,0x610ace00,0x4000000,0xe00000,0x6140afa1,0x7c00100,0x230400,
+0x6140afa3,0x7c00100,0x230400,0x6180af9e,0x2802400,0x962460,0x62002a00,0x4000000,0x1600000,0x63002800,0x80000,0x918820,0x63c00c15,0x80000,0x918820,0x7000080e,
+0x7c00100,0x250400,0x70000a03,0x4000000,0x200000,0x70000c00,0x80000000,0x218960,0x70000f0a,0x7c00100,0x230400,0x70001004,0x7c00100,0x230400,0x70001524,0x2802100,
+0x962460,0x70001524,0x7c00100,0x230400,0x70001615,0x2802100,0x962460,0x7000171a,0x2802100,0x962460,0x70001821,0x6800000,0x1329800,0x70002320,0x7c00100,0x230400,
+0x70002a00,0x4000000,0x1500000,0x70002a00,0x4000000,0x1600000,0x70003000,0x24000000,0x200000,0x70003800,0x24000000,0xe00000,0x70005201,0x2802400,0x962460,0x7000581e,
+0x7c00100,0x230400,0x70006108,0x7c00100,0x220400,0x70006108,0x7c00100,0x250400,0x70006f30,0x7c00100,0x230400,0x70007300,0x24000000,0x200000,0x70007f0e,0x4000000,
+0x200000,0x70008301,0x2802100,0x962460,0x70008301,0x2802400,0x962460,0x70008e00,0x24000000,0x200000,0x70008e00,0x24000000,0x400000,0x70008e00,0x24000002,0x400000,
+0x70008e00,0x24000008,0x1410000,0x70008e00,0x24000010,0x400000,0x70008e00,0x2c000010,0x448000,0x70009519,0x7c00100,0x220400,0x70009519,0x7c00100,0x230400,0x70009519,
+0x7c00100,0x250400,0x70009865,0x7c00100,0x230400,0x70009965,0x4000010,0x400000,0x70009965,0x7c00100,0x230400,0x7000a008,0x7c00100,0x220400,0x7000a008,0x7c00100,
+0x250400,0x7000a008,0x7c00500,0x22040f,0x7000a50e,0x4000000,0x200000,0x7000b61c,0x2802500,0x1862460,0x7000b61c,0x6800500,0x1862400,0x7000b61c,0x7c00100,0x1830000,
+0x7000c300,0x4000000,0x100000,0x7000c941,0x2806000,0x962460,0x7000cc00,0x4000000,0xe00000,0x7000cd00,0x4000000,0x200000,0x7000cd00,0x4000000,0xe00000,0x7000cd00,
+0x4000000,0x2800000,0x7000cf00,0x4000000,0xe00000,0x7000d202,0x2802100,0x962460,0x7000d202,0x7c00100,0x230400,0x7000d997,0x7c00100,0x230400,0x7000d997,0xc000010,
+0x248000,0x7000dd86,0x2802400,0x962460,0x7000dd86,0x7c00100,0x230400,0x7000dd86,0xc000010,0x448000,0x7000de9f,0x4000000,0x200000,0x7000de9f,0x7c00100,0x230400,
+0x7000e001,0x2400,0x962460,0x7000e001,0x2802400,0x962460,0x7000e187,0x2802000,0x962460,0x7000e187,0x2802100,0x962460,0x7000e187,0x4000000,0x200000,0x7000e187,
+0x7c00100,0x230400,0x7000e187,0xc000010,0x448000,0x7000e288,0x7c00100,0x230400,0x7000e300,0x4000000,0x200000,0x7000e489,0x2802100,0x962460,0x7000e489,0x2802400,
+0x962460,0x7000e489,0x6800100,0x962540,0x7000e489,0x6800100,0x962541,0x7000e489,0x6804400,0x962540,0x7000e489,0x7c00100,0x230400,0x7000e489,0x7c00900,0x230400,
+0x7000e59d,0x2802100,0x962460,0x7000e59d,0x2802400,0x962460,0x7000e59d,0x4000000,0x200000,0x7000e59d,0x4000010,0x200000,0x7000e59d,0x6800100,0x962540,0x7000e59d,
+0x6804400,0x962540,0x7000e59d,0x7c00100,0x230400,0x7000e59d,0xc000010,0x448000,0x7000e691,0x2802100,0x962460,0x7000e691,0x2802400,0x962460,0x7000e691,0x2806400,
+0x962460,0x7000e691,0x6800000,0x1329800,0x7000e691,0x6800100,0x962540,0x7000e691,0x7c00100,0x230400,0x7000e700,0x4000400,0x200400,0x7000e70e,0x7c00100,0x220400,
+0x7000e719,0x7c00100,0x220400,0x7000e719,0x7c00500,0x22040f,0x7000e853,0x7c00100,0x230400,0x7000e9a0,0x2802400,0x962460,0x7000e9a0,0x4000000,0x200000,0x7000e9a0,
+0x4000000,0x500000,0x7000e9a0,0x7c00100,0x230400,0x7000ea79,0x2802400,0x962460,0x7000ea79,0x4000000,0x200000,0x7000ea79,0x4000000,0xf00000,0x7000ea79,0x4000010,
+0x400000,0x7000ea79,0x7c00100,0x230400,0x7000eb8c,0x2802400,0x962460,0x7000eb8c,0x4000000,0x200000,0x7000eb8c,0x7c00100,0x230400,0x7000eca3,0x2802100,0x962460,
+0x7000eca3,0x2806400,0x962460,0x7000eca3,0x4000000,0x200000,0x7000eca3,0x6800000,0x1329800,0x7000eca3,0x6800100,0x962540,0x7000eca3,0x7c00100,0x230400,0x7000eca3,
+0xc000010,0x448000,0x7000ed95,0x6800000,0x1329800,0x7000ed95,0x7c00100,0x230400,0x7000ed95,0xc000010,0x448000,0x7000ee1c,0x2802500,0x1862460,0x7000ee1c,0x6800000,
+0x1329800,0x7000ee1c,0x7c00100,0x1830000,0x7000ee1c,0x7c00900,0x1830000,0x7000ef8f,0x4000000,0x200000,0x7000ef8f,0x7c00100,0x230400,0x7000f08e,0x4000000,0x200000,
+0x7000f08e,0x7c00100,0x230400,0x7000f159,0x2802100,0x962460,0x7000f159,0x7c00100,0x230400,0x7000f200,0x4000000,0x200000,0x7000f200,0x4000000,0x1200000,0x7000f200,
+0x4000000,0x1710000,0x7000f34b,0x2802400,0x962460,0x7000f34b,0x4000000,0x200000,0x7000f34b,0x4000010,0x400000,0x7000f34b,0x6800000,0x1329800,0x7000f34b,0x7c00100,
+0x230400,0x7000f34b,0x7c00900,0x230400,0x7000f34b,0xc000010,0x448000,0x7000f490,0x4000000,0x200000,0x7000f490,0x7c00100,0x230400,0x7000f5a5,0x7c00100,0x230400,
+0x7000f67b,0x4000000,0x200000,0x7000f67b,0x4000010,0x200000,0x7000f67b,0x7c00100,0x230400,0x7000f8a6,0x2802100,0x962460,0x7000f8a6,0x2802400,0x962460,0x7000f8a6,
+0x2806400,0x962460,0x7000f8a6,0x4000000,0x500000,0x7000f8a6,0x4000010,0xb00000,0x7000f8a6,0x4000800,0x200000,0x7000f8a6,0x6800100,0x962540,0x7000f8a6,0x6800100,
+0x962541,0x7000f8a6,0x7c00100,0x230400,0x7000f8a6,0xc000010,0x448000,0x7000f921,0x4000000,0x200000,0x7000fa00,0x4000000,0x200000,0x7000fb9e,0x2802100,0x962460,
+0x7000fb9e,0x2802400,0x962460,0x7000fb9e,0x2806400,0x962460,0x7000fb9e,0x4000000,0x200000,0x7000fb9e,0x6800000,0x1329800,0x7000fb9e,0x6800100,0x962540,0x7000fb9e,
+0x6800100,0x962541,0x7000fb9e,0x7c00100,0x230400,0x7000fc92,0x4000000,0x200000,0x7000fc92,0x6800000,0x1329800,0x7000fc92,0x7c00100,0x220400,0x7000fc92,0x7c00100,
+0x230400,0x7000fc92,0x7c00100,0x250400,0x700acd00,0x4000000,0xe00000,0x700acd00,0x4000000,0x2800000,0x700ace00,0x4000000,0xe00000,0x700acf00,0x4000000,0xe00000,
+0x700acf00,0x4000000,0x2800000,0x7050df11,0x4000000,0x200000,0x7050f719,0x80000,0x918820,0x7080afa1,0x2802400,0x962460,0x7090df11,0x2802400,0x962460,0x70d0e417,
+0x2802100,0x962460,0x70d0e417,0x2802400,0x962460,0x70d0e417,0x6800100,0x962540,0x70d0ea15,0x4000010,0x400000,0x8000120f,0x7c00100,0x230400,0x80001524,0x7c00100,
+0x230400,0x8000171a,0x7c00100,0x230400,0x80002006,0x7c00100,0x220400,0x80002006,0x7c00100,0x250400,0x80002a00,0x4000000,0x1500000,0x80002d00,0x4000000,0x200000,
+0x80005208,0x2802400,0x962460,0x80005c00,0x4000000,0x200000,0x80007300,0x24000000,0x200000,0x80009519,0x7c00100,0x220400,0x80009519,0x7c00100,0x230400,0x80009519,
+0x7c00100,0x250400,0x80009865,0x7c00100,0x230400,0x8000a008,0x2802100,0x962460,0x8000b30a,0x4000000,0x500000,0x8000b30a,0x7c00100,0x230400,0x8000cd00,0x4000000,
+0xe00000,0x8000d202,0x2802500,0x962460,0x8000d202,0x7c00100,0x230400,0x8000d68d,0x4000000,0x200000,0x8000d997,0x2802000,0x962460,0x8000d997,0x2802400,0x962460,
+0x8000d997,0x4000000,0x400000,0x8000d997,0x4000000,0x500000,0x8000d997,0x7c00100,0x230400,0x8000d997,0xc000010,0x448000,0x8000e489,0x2802100,0x962460,0x8000e489,
+0x7c00100,0x230400,0x8000e719,0x7c00100,0x220400,0x8000f8a6,0x2802100,0x962460,0x8000f8a6,0x7c00100,0x230400,0x8000f8a6,0xc000010,0x448000,0x8000fda1,0x2802100,
+0x1862460,0x8000fda1,0x2806400,0x1862460,0x8000fda1,0x4000000,0x1800000,0x8000fda1,0x6800000,0x1329800,0x8000fda1,0x6800100,0x1862400,0x8000fda1,0x6800100,0x1862540,
+0x8000fda1,0x7c00100,0x1830000,0x8000fda1,0xc000010,0x448000,0x8000fe9c,0x7c00100,0x230400,0x8000fe9c,0x7c00100,0x830400,0x8000fe9c,0x7c00100,0x1430400,0x8000ff06,
+0x7c00100,0x220400,0x80010165,0x7c00100,0x230400,0x800102a2,0x4000000,0x200000,0x800102a2,0x7c00100,0x230400,0x800103a4,0x7c00100,0x230400,0x800103a4,0xc000010,
+0x448000,0x8001044c,0x4000000,0x200000,0x8001044c,0x7c00100,0x220400,0x8001044c,0x7c00100,0x250400,0x80010670,0x2802000,0x962460,0x80010670,0x4000000,0x200000,
+0x80010670,0x4000010,0x400000,0x80010670,0xc000010,0x448000,0x800a4711,0x7c40300,0xe30000,0x800acd00,0x4000000,0xe00000,0x800acd00,0x4000000,0x2902460,0x800ace00,
+0x4000000,0xe00000,0x800acf00,0x4000000,0xe00000,0x800b0011,0x7c40300,0xe30000,0x800b0500,0x4000000,0xe00000,0x800b0500,0x4000000,0x2800000,0x90001615,0x7c00100,
+0x230400,0x9000171a,0x4000000,0x200000,0x9000171a,0x7c00100,0x230400,0x90003000,0x24000000,0x200000,0x90007f0e,0x4000000,0x200000,0x90008301,0x2802400,0x962460,
+0x90008e00,0x24000000,0x400000,0x90009519,0x7c00100,0x250400,0x9000a16f,0x2802100,0x962460,0x9000d200,0x80000000,0x218960,0x9000d202,0x2802000,0x962460,0x9000d202,
+0x2802100,0x962460,0x9000d202,0x7c00100,0x230400,0x9000e59d,0x2802100,0x962460,0x90010500,0x4000000,0xe00000,0x900107a7,0x2802100,0x962460,0x900107a7,0x2802400,
+0x962460,0x900107a7,0x2802c00,0x962460,0x900107a7,0x4000000,0x1400000,0x900107a7,0x6800000,0x1329800,0x900107a7,0x7c00100,0x220400,0x900107a7,0x7c00100,0x250400,
+0x900108a8,0x2802100,0x962460,0x900108a8,0x2806400,0x962460,0x900108a8,0x4000000,0x200000,0x900108a8,0x4000000,0x400000,0x900108a8,0x4000010,0x400000,0x900108a8,
+0x6800000,0x1329800,0x900108a8,0x6800100,0x962540,0x900108a8,0x7c00100,0x230400,0x900108a8,0xc000010,0x448000,0x90010908,0x7c00100,0x220400,0x90010a38,0x2802100,
+0x962460,0x90010ca9,0x2802100,0x962460,0x90010ca9,0x4000000,0x500000,0x90010ca9,0x4000010,0xb00000,0x90010ca9,0x6800100,0x962540,0x90010ca9,0x7c00100,0x230400,
+0x90010d1b,0x4000000,0x500000,0x90010eaa,0x2802100,0x962460,0x90010eaa,0x2802400,0x962460,0x90010eaa,0x2806400,0x962460,0x90010eaa,0x4000000,0x200000,0x90010eaa,
+0x4000000,0x400000,0x90010eaa,0x4000010,0x400000,0x90010eaa,0x6800000,0x1329800,0x90010eaa,0x6800100,0x962540,0x90010eaa,0x7c00100,0x230400,0x90010eaa,0xc000010,
+0x448000,0x90010fab,0x7c00100,0x220400,0x90010fab,0x7c00100,0x250400,0x9002c300,0x4000000,0x100000,0x900ac400,0x4000000,0xe0000d,0x900acd00,0x4000000,0xe00000,
+0x900acd00,0x4000000,0x2800000,0x900acf00,0x4000000,0xe00000,0x900b0500,0x4000000,0xe00000,0x900b0500,0x4000000,0x2800000,0x900b0b9a,0x7c00900,0x1230400,0x900b109a,
+0x7c00300,0xe30000,0x900b119a,0x7c00300,0xe30000,0x90408e06,0x24000000,0x400000,0xa0001004,0x4000000,0x200000,0xa0001004,0x7c00100,0x230400,0xa000120f,0x2802100,
+0x962460,0xa000120f,0x2802400,0x962460,0xa000171a,0x2802100,0x962460,0xa000171a,0x2806400,0x962460,0xa0002a00,0x4000000,0x1600000,0xa0003000,0x24000000,0x200000,
+0xa000581e,0x7c00100,0x230400,0xa0007300,0x24000000,0x200000,0xa0008301,0x2802400,0x962460,0xa0008e00,0x24000000,0x400000,0xa000cf00,0x4000000,0xe00000,0xa0010500,
+0x4000000,0x200000,0xa00114af,0x2802100,0x962460,0xa00114af,0x2802400,0x962460,0xa00114af,0x2806400,0x962460,0xa00114af,0x6800000,0x1329800,0xa00114af,0x7c00100,
+0x230400,0xa00114af,0x7c00100,0x230560,0xa00116b0,0x2802100,0x962460,0xa00116b0,0x2802800,0x962460,0xa00116b0,0x2806400,0x962460,0xa00116b0,0x4000000,0x400000,
+0xa00116b0,0x4000000,0x500000,0xa00116b0,0x4000010,0x400000,0xa00116b0,0x6800100,0x962540,0xa00116b0,0x7c00100,0x230400,0xa00116b0,0x7c00100,0x230560,0xa00116b0,
+0xc000010,0x448000,0xa0011722,0x7c00100,0x230400,0xa00118b1,0x2802000,0x962460,0xa00118b1,0x2802100,0x962460,0xa00118b1,0x2806400,0x962460,0xa00118b1,0x4000000,
+0x200000,0xa00118b1,0x4000000,0x400000,0xa00118b1,0x4000000,0x500000,0xa00118b1,0x6800100,0x962540,0xa00118b1,0x7c00100,0x230400,0xa00118b1,0x7c00100,0x230560,
+0xa00118b1,0xc000010,0x448000,0xa00a4005,0x7c00100,0xe30400,0xa00a4711,0x7c40300,0xe30000,0xa00ac400,0x4000000,0xe00000,0xa00acb14,0x7c00100,0xe30000,0xa00acf00,
+0x4000000,0xe00000,0xa00b0500,0x4000000,0xe00000,0xa00b0500,0x4000000,0x2800000,0xa00b0b96,0x7c00900,0x1230400,0xa00b1211,0x7c40300,0xe30000,0xa00b1314,0x7c00100,
+0xe30000,0xa00b1596,0x7c00300,0xe30000,0xa040afb7,0x6800400,0x962540,0xa08083b8,0x2802400,0x962460,0xb0000a03,0x7c00100,0x220400,0xb0000b13,0x7c00100,0x2633800,
+0xb0001004,0x2802000,0x962460,0xb0001110,0x4000000,0x200000,0xb0001524,0x2802100,0x962460,0xb0001615,0x4000000,0x500000,0xb000251b,0x7c00100,0x230400,0xb0007300,
+0x24000000,0x200000,0xb0008939,0x4000000,0x200000,0xb0008939,0x7c00100,0x230400,0xb0008e00,0x24000000,0x200000,0xb0008e00,0x24000000,0x400000,0xb0008e00,0x24000010,
+0x400000,0xb0009257,0x2802000,0x962460,0xb0009257,0x4000000,0x1600000,0xb0009519,0x7c00100,0x220400,0xb0009519,0x7c00100,0x250400,0xb0009a00,0x4000000,0x200000,
+0xb000b30a,0x2802100,0x962460,0xb000b30a,0x7c00100,0x230400,0xb000c178,0x80000000,0x218960,0xb000c300,0x4000000,0x200000,0xb000d202,0x2802000,0x962460,0xb000d476,
+0x6800100,0x962540,0xb000d476,0x7c00100,0x230400,0xb000e300,0x4000000,0xe00000,0xb000fda1,0x7c00100,0x1830000,0xb0010eaa,0x2802000,0x962460,0xb00116b0,0x7c00100,
+0x230400,0xb0011900,0x4000000,0xe00000,0xb0011ab2,0x2802100,0x962460,0xb0011ab2,0x2802400,0x962460,0xb0011ab2,0x2806400,0x962460,0xb0011ab2,0x4000000,0x200000,
+0xb0011ab2,0x6800100,0x962540,0xb0011ab2,0x7c00100,0x230400,0xb0011b0c,0x7c00100,0x230400,0xb0011cb3,0x2802100,0x962460,0xb0011cb3,0x2806400,0x962460,0xb0011cb3,
+0x6800000,0x1329800,0xb0011cb3,0x6800100,0x962540,0xb0011cb3,0x7c00100,0x230400,0xb0011db6,0x2802500,0x962460,0xb0011db6,0x6800000,0x1329800,0xb0011db6,0x7c00100,
+0x230400,0xb0011db6,0x7c00500,0x230400,0xb0011e00,0x4000000,0x200000,0xb0011e00,0x4000000,0x1500000,0xb0011fb4,0x2802100,0x962460,0xb0011fb4,0x6800100,0x962540,
+0xb0011fb4,0x7c00100,0x230400,0xb0011fb4,0xc000010,0x248000,0xb0012000,0x4000000,0x200000,0xb00121b5,0x4000000,0x200000,0xb00121b5,0x4000010,0x400000,0xb00121b5,
+0x7c00100,0x220400,0xb00121b5,0x7c00100,0x250400,0xb00121b5,0xc000010,0x448000,0xb00122b8,0x4000000,0x200000,0xb00122b8,0x7c00100,0x230400,0xb00123b7,0x2802400,
+0x962460,0xb00123b7,0x4000000,0x200000,0xb00123b7,0x7c00100,0x230400,0xb00123b7,0xc000010,0x248000,0xb00a4005,0x7c00100,0xe30400,0xb00a4711,0x7c40300,0xe30000,
+0xb00acf00,0x4000000,0xe00000,0xb00b0500,0x4000000,0xe00000,0xb00b0500,0x4000000,0x2800000,0xb00b109a,0x7c00300,0xe30000,0xb080e487,0x2802000,0x962460,0xc0001524,
+0x4000000,0x500000,0xc0001a18,0x2806400,0x1862460,0xc0001a18,0x7c00100,0x1830000,0xc0007300,0x24000000,0x200000,0xc0008e00,0x24000010,0x400000,0xc0009519,0x7c00100,
+0x220400,0xc0009519,0x7c00100,0x250400,0xc000c300,0x4000000,0x20000f,0xc000d85c,0x2802100,0x962460,0xc000d85c,0x6800100,0x962540,0xc000d85c,0x7c00100,0x230400,
+0xc000dc99,0x7c00100,0x230400,0xc000e719,0x7c00100,0x220400,0xc00107a7,0x7c00100,0x230400,0xc0010eaa,0x7c00100,0x230400,0xc00116b0,0x7c00100,0x230560,0xc0011900,
+0x4000000,0x200000,0xc0012447,0,0x818820,0xc0012447,0,0xc18820,0xc0012447,0,0x1418820,0xc00125b9,0x7c00100,0x230400,0xc00126bb,0x2802100,
+0x962460,0xc00126bb,0x2806400,0x962460,0xc00126bb,0x4000000,0x500000,0xc00126bb,0x6800100,0x962540,0xc00126bb,0x7c00100,0x230400,0xc00127ba,0x2802400,0x962460,
+0xc00127ba,0x4000000,0x200000,0xc00127ba,0x6800000,0x1329800,0xc00127ba,0x7c00100,0x230400,0xc00127ba,0x7c00900,0x230400,0xc0012800,0x4000000,0x200000,0xc0012b23,
+0x4000000,0x200000,0xc0012b23,0x4000000,0x400000,0xc0012b23,0x4000000,0x1500000,0xc0012cbc,0x2802400,0x962460,0xc0012cbc,0x4000000,0x1600000,0xc0012cbc,0x6800000,
+0x1329800,0xc0012cbc,0x7c00100,0x230400,0xc00acf00,0x4000000,0xe00000,0xc00ae300,0x4000000,0xe00000,0xc00b0500,0x4000000,0xe00000,0xc00b0500,0x4000000,0x2800000,
+0xc00b0b11,0x4000000,0x1200000,0xc00b0b11,0x7c00900,0x1230400,0xc00b109a,0x7c00300,0xe30000,0xc00b2914,0x7c00100,0x2530000,0xc00b2916,0x7c00100,0x2530c00,0xc00b2a00,
+0x4000000,0xe00000,0xc040af5e,0x7c00100,0x230400,0xc0c12b89,0x4000000,0x200000,0xc14a44ca,0x4000000,0xe0000d,0xd000131f,0x2802c00,0x962460,0xd000171a,0x7c00100,
+0x230400,0xd0001821,0x2802100,0x962460,0xd0007300,0x24000000,0x200000,0xd0008e00,0x24000000,0x200000,0xd0008f3a,0x2806000,0x962460,0xd0009519,0x7c00100,0x220400,
+0xd0009519,0x7c00100,0x250400,0xd000a500,0x4000000,0x200000,0xd000c300,0x4000000,0xe00000,0xd000d202,0x7c00100,0x230400,0xd000d476,0x7c00100,0x230400,0xd000d997,
+0x2802100,0x962460,0xd000d997,0x6800100,0x962540,0xd000e001,0x2802100,0x962460,0xd000e700,0x4000400,0x200000,0xd000e719,0x7c00100,0x220400,0xd000e719,0x7c00500,
+0x22040f,0xd000fa00,0x4000000,0xe00000,0xd0010eaa,0x4000010,0x400000,0xd0010eaa,0x7c00100,0x230400,0xd0012dbd,0x4000000,0x200000,0xd0012dbd,0x7c00100,0x230400,
+0xd0012fbe,0x2802100,0x962460,0xd0012fbe,0x2802400,0x962460,0xd0012fbe,0x2806400,0x962460,0xd0012fbe,0x4000000,0x400000,0xd0012fbe,0x6800000,0x1329800,0xd0012fbe,
+0x6800100,0x962540,0xd0012fbe,0x6800100,0x962541,0xd0012fbe,0x6804400,0x962540,0xd0012fbe,0x7c00100,0x230400,0xd0012fbe,0x7c00100,0x230560,0xd0012fbe,0xc000010,
+0x448000,0xd0013183,0x7c00100,0x230400,0xd0013200,0x4000000,0x200000,0xd0013200,0x6800000,0x1329805,0xd00134c0,0x2802100,0x962460,0xd00134c0,0x4000002,0x400000,
+0xd00134c0,0x7c00100,0x230400,0xd00a4305,0x7c00100,0xe30400,0xd00a4611,0x7c40300,0xe30000,0xd00a4711,0x7c40300,0xe30000,0xd00a5e11,0x7c40300,0xe30000,0xd00acf00,
+0x4000000,0xe00000,0xd00b0500,0x4000000,0xe00000,0xd00b0500,0x4000000,0x2800000,0xd00b0b11,0x6800500,0x962540,0xd00b0bbf,0x2802200,0xc62460,0xd00b119a,0x7c00300,
+0xe30000,0xd00b2a00,0x4000000,0xe00000,0xd00b2e11,0x7c40300,0xe30000,0xd00b30bf,0x7c00300,0x230000,0xd00b339a,0x7c00300,0xe30000,0xe0000c02,0xc000010,0xb48000,
+0xe0001524,0x2802400,0x962460,0xe0001524,0x7c00100,0x230400,0xe0001615,0x7c00100,0x230400,0xe000251b,0x12882000,0x962460,0xe0002a00,0x4000000,0x1500000,0xe0005102,
+0x4000000,0x200000,0xe0005c00,0x4000000,0x200000,0xe000622a,0x6804400,0x962540,0xe000622a,0x7c00100,0x230400,0xe0008838,0x7c00100,0x220400,0xe0008838,0x7c00100,
+0x250400,0xe0008e00,0x24000000,0x810000,0xe0008e00,0x24000000,0x1410000,0xe0008e00,0x24000002,0x400000,0xe0008e00,0x2c000010,0xb48000,0xe000933e,0x7c00100,0x230400,
+0xe000933e,0xc000010,0x448000,0xe0009519,0x7c00100,0x220400,0xe0009519,0x7c00100,0x22040f,0xe0009519,0x7c00100,0x250400,0xe000c178,0x2802100,0x962460,0xe000c941,
+0x2802100,0x962460,0xe000c941,0x2806400,0x962460,0xe000c941,0x7c00100,0x230400,0xe000d202,0x2802400,0x962460,0xe000d202,0x7c00100,0x230400,0xe000d202,0x7c00500,
+0x230400,0xe000dc99,0x4000000,0x200000,0xe000e001,0x2802100,0x962460,0xe000e001,0x2802400,0x962460,0xe000fda1,0x7c00100,0x1830000,0xe0013502,0x2802400,0x962460,
+0xe0013502,0x4000000,0x200000,0xe0013502,0x7c00100,0x230400,0xe0013502,0x80000000,0x218960,0xe00136c1,0x4000000,0x200000,0xe00136c1,0x7c00100,0x230400,0xe001370b,
+0x7c00100,0x230400,0xe0013919,0x7c00500,0x220400,0xe0013919,0x7c00500,0x22040f,0xe0013919,0x7c00d00,0x23040f,0xe0013a19,0x7c00100,0x220400,0xe0013a19,0x7c00100,
+0x230400,0xe0013bc2,0x2802400,0x962460,0xe0013bc2,0x7c00100,0x230400,0xe0013bc2,0xc000010,0x248000,0xe0013cc3,0x6800000,0x1329800,0xe0013cc3,0x7c00100,0x230400,
+0xe0013dc4,0x2802400,0x962460,0xe0013dc4,0x7c00100,0x230400,0xe0013e28,0x7c00100,0x230400,0xe0013fc5,0x7c00100,0x220400,0xe0013fc5,0x7c00100,0x250400,0xe0014000,
+0x4000000,0x200000,0xe0014001,0x2802400,0x962460,0xe00a4711,0x7c40300,0xe30000,0xe00a5e11,0x7c40300,0xe30000,0xe00ac511,0x7c40300,0xe30000,0xe00acf00,0x4000000,
+0xe00000,0xe00ae300,0x4000000,0xe00000,0xe00b0500,0x4000000,0xe00000,0xe00b1314,0x7c00100,0xe30000,0xe00b1316,0x7c00100,0xe30c00,0xe00b2a00,0x4000000,0xe00000,
+0xe00b2a00,0x4000000,0x2800000,0xe00b3816,0x7c00500,0x230c00,0xe0808328,0x2802400,0x962460,0xf0001615,0x6800100,0x962540,0xf0001a18,0x2802000,0x1862460,0xf000c247,
+0x7c00100,0x230400,0xf000d000,0x4000000,0xe00000,0xf000e300,0x4000000,0xe00000,0xf000e59d,0x2802100,0x962460,0xf000e59d,0x7c00100,0x230400,0xf0012447,0,
+0x818820,0xf0012447,0,0xc18820,0xf0012447,0,0x1418820,0xf0012447,0x2802000,0x962460,0xf0012447,0x2802400,0x962460,0xf0012447,0x7c00100,0x230400,
+0xf0013a19,0x7c00100,0x220400,0xf0014102,0x2802400,0x962460,0xf0014308,0x2802100,0x962460,0xf0014308,0x7c00500,0x22040e,0xf0014308,0x7c00500,0x22040f,0xf001440a,
+0x4000000,0x500000,0xf0014500,0x4000000,0x200000,0xf00146c6,0x2802100,0x962460,0xf00146c6,0x2806000,0x962460,0xf00146c6,0x4000000,0xe00000,0xf00146c6,0x6800000,
+0x1329800,0xf00146c6,0x6800100,0x962540,0xf00146c6,0x6804000,0x962540,0xf00146c6,0x7c00100,0x230400,0xf00146c6,0x7c00100,0x230560,0xf00146c6,0xc000010,0x448000,
+0xf00147c7,0x2802000,0x962460,0xf00147c7,0x6800000,0x1329800,0xf00147c7,0x7c00100,0x230400,0xf00ac511,0x7c40300,0xe30000,0xf00acf00,0x4000000,0xe00000,0xf00b2914,
+0x7c00100,0x2530000,0xf00b2916,0x7c00100,0x2530c00,0xf00b2a00,0x4000000,0xe00000,0xf00b2a00,0x4000000,0x2800000,0xf00b4211,0x7c40300,0xe30000};
-static const int32_t countPropsVectors=7095;
+static const int32_t countPropsVectors=7230;
static const int32_t propsVectorsColumns=3;
static const uint16_t scriptExtensions[282]={
0x800e,0x8019,8,0x8059,8,2,8,0x8038,8,6,8,0x8019,2,0x22,0x25,0x57,
@@ -3937,6 +4006,6 @@ static const uint16_t scriptExtensions[282]={
0x804f,0x37,0x804e,2,0x8057,2,0x8025,2,0x105,0x2f,0x31,0x8053,0x2f,0x31,0x80c1,0x2f,
0x8031,2,0x8007,0x79,0x80c2,0x79,0x113,0x89,0x87,0x8087};
-static const int32_t indexes[UPROPS_INDEX_COUNT]={0x2c64,0x2c64,0x2c64,0x2c64,0x6b06,3,0x86bd,0x874a,0x874a,0x874a,0xb40c5,0x2a75a31,0,0,0,0};
+static const int32_t indexes[UPROPS_INDEX_COUNT]={0x2d08,0x2d08,0x2d08,0x2d08,0x6ce6,3,0x8924,0x89b1,0x89b1,0x89b1,0xb47c7,0x2a75a31,0,0,0,0};
#endif // INCLUDED_FROM_UCHAR_C
diff --git a/thirdparty/icu4c/common/ucharstrie.cpp b/thirdparty/icu4c/common/ucharstrie.cpp
index e0b33af519..24ab425777 100644
--- a/thirdparty/icu4c/common/ucharstrie.cpp
+++ b/thirdparty/icu4c/common/ucharstrie.cpp
@@ -308,13 +308,13 @@ UCharsTrie::findUniqueValueFromBranch(const UChar *pos, int32_t length,
}
} else {
uniqueValue=value;
- haveUniqueValue=TRUE;
+ haveUniqueValue=true;
}
} else {
if(!findUniqueValue(pos+value, haveUniqueValue, uniqueValue)) {
return NULL;
}
- haveUniqueValue=TRUE;
+ haveUniqueValue=true;
}
} while(--length>1);
return pos+1; // ignore the last comparison unit
@@ -330,9 +330,9 @@ UCharsTrie::findUniqueValue(const UChar *pos, UBool haveUniqueValue, int32_t &un
}
pos=findUniqueValueFromBranch(pos, node+1, haveUniqueValue, uniqueValue);
if(pos==NULL) {
- return FALSE;
+ return false;
}
- haveUniqueValue=TRUE;
+ haveUniqueValue=true;
node=*pos++;
} else if(node<kMinValueLead) {
// linear-match node
@@ -348,14 +348,14 @@ UCharsTrie::findUniqueValue(const UChar *pos, UBool haveUniqueValue, int32_t &un
}
if(haveUniqueValue) {
if(value!=uniqueValue) {
- return FALSE;
+ return false;
}
} else {
uniqueValue=value;
- haveUniqueValue=TRUE;
+ haveUniqueValue=true;
}
if(isFinal) {
- return TRUE;
+ return true;
}
pos=skipNodeValue(pos, node);
node&=kNodeTypeMask;
diff --git a/thirdparty/icu4c/common/ucharstriebuilder.cpp b/thirdparty/icu4c/common/ucharstriebuilder.cpp
index 3871df6c27..be3260941e 100644
--- a/thirdparty/icu4c/common/ucharstriebuilder.cpp
+++ b/thirdparty/icu4c/common/ucharstriebuilder.cpp
@@ -163,7 +163,7 @@ UCharsTrieBuilder::buildUnicodeString(UStringTrieBuildOption buildOption, Unicod
UErrorCode &errorCode) {
buildUChars(buildOption, errorCode);
if(U_SUCCESS(errorCode)) {
- result.setTo(FALSE, uchars+(ucharsCapacity-ucharsLength), ucharsLength);
+ result.setTo(false, uchars+(ucharsCapacity-ucharsLength), ucharsLength);
}
return result;
}
@@ -188,7 +188,7 @@ UCharsTrieBuilder::buildUChars(UStringTrieBuildOption buildOption, UErrorCode &e
}
uprv_sortArray(elements, elementsLength, (int32_t)sizeof(UCharsTrieElement),
compareElementStrings, &strings,
- FALSE, // need not be a stable sort
+ false, // need not be a stable sort
&errorCode);
if(U_FAILURE(errorCode)) {
return;
@@ -322,7 +322,7 @@ UCharsTrieBuilder::createLinearMatchNode(int32_t i, int32_t unitIndex, int32_t l
UBool
UCharsTrieBuilder::ensureCapacity(int32_t length) {
if(uchars==NULL) {
- return FALSE; // previous memory allocation had failed
+ return false; // previous memory allocation had failed
}
if(length>ucharsCapacity) {
int32_t newCapacity=ucharsCapacity;
@@ -335,7 +335,7 @@ UCharsTrieBuilder::ensureCapacity(int32_t length) {
uprv_free(uchars);
uchars=NULL;
ucharsCapacity=0;
- return FALSE;
+ return false;
}
u_memcpy(newUChars+(newCapacity-ucharsLength),
uchars+(ucharsCapacity-ucharsLength), ucharsLength);
@@ -343,7 +343,7 @@ UCharsTrieBuilder::ensureCapacity(int32_t length) {
uchars=newUChars;
ucharsCapacity=newCapacity;
}
- return TRUE;
+ return true;
}
int32_t
diff --git a/thirdparty/icu4c/common/ucharstrieiterator.cpp b/thirdparty/icu4c/common/ucharstrieiterator.cpp
index b3132241fe..2ba43692dd 100644
--- a/thirdparty/icu4c/common/ucharstrieiterator.cpp
+++ b/thirdparty/icu4c/common/ucharstrieiterator.cpp
@@ -26,7 +26,7 @@ UCharsTrie::Iterator::Iterator(ConstChar16Ptr trieUChars, int32_t maxStringLengt
: uchars_(trieUChars),
pos_(uchars_), initialPos_(uchars_),
remainingMatchLength_(-1), initialRemainingMatchLength_(-1),
- skipValue_(FALSE),
+ skipValue_(false),
maxLength_(maxStringLength), value_(0), stack_(NULL) {
if(U_FAILURE(errorCode)) {
return;
@@ -48,7 +48,7 @@ UCharsTrie::Iterator::Iterator(const UCharsTrie &trie, int32_t maxStringLength,
: uchars_(trie.uchars_), pos_(trie.pos_), initialPos_(trie.pos_),
remainingMatchLength_(trie.remainingMatchLength_),
initialRemainingMatchLength_(trie.remainingMatchLength_),
- skipValue_(FALSE),
+ skipValue_(false),
maxLength_(maxStringLength), value_(0), stack_(NULL) {
if(U_FAILURE(errorCode)) {
return;
@@ -82,7 +82,7 @@ UCharsTrie::Iterator &
UCharsTrie::Iterator::reset() {
pos_=initialPos_;
remainingMatchLength_=initialRemainingMatchLength_;
- skipValue_=FALSE;
+ skipValue_=false;
int32_t length=remainingMatchLength_+1; // Remaining match length.
if(maxLength_>0 && length>maxLength_) {
length=maxLength_;
@@ -100,12 +100,12 @@ UCharsTrie::Iterator::hasNext() const { return pos_!=NULL || !stack_->isEmpty();
UBool
UCharsTrie::Iterator::next(UErrorCode &errorCode) {
if(U_FAILURE(errorCode)) {
- return FALSE;
+ return false;
}
const UChar *pos=pos_;
if(pos==NULL) {
if(stack_->isEmpty()) {
- return FALSE;
+ return false;
}
// Pop the state off the stack and continue with the next outbound edge of
// the branch node.
@@ -118,7 +118,7 @@ UCharsTrie::Iterator::next(UErrorCode &errorCode) {
if(length>1) {
pos=branchNext(pos, length, errorCode);
if(pos==NULL) {
- return TRUE; // Reached a final value.
+ return true; // Reached a final value.
}
} else {
str_.append(*pos++);
@@ -135,7 +135,7 @@ UCharsTrie::Iterator::next(UErrorCode &errorCode) {
if(skipValue_) {
pos=skipNodeValue(pos, node);
node&=kNodeTypeMask;
- skipValue_=FALSE;
+ skipValue_=false;
} else {
// Deliver value for the string so far.
UBool isFinal=(UBool)(node>>15);
@@ -152,9 +152,9 @@ UCharsTrie::Iterator::next(UErrorCode &errorCode) {
// next time.
// Instead, keep pos_ on the node lead unit itself.
pos_=pos-1;
- skipValue_=TRUE;
+ skipValue_=true;
}
- return TRUE;
+ return true;
}
}
if(maxLength_>0 && str_.length()==maxLength_) {
@@ -166,7 +166,7 @@ UCharsTrie::Iterator::next(UErrorCode &errorCode) {
}
pos=branchNext(pos, node+1, errorCode);
if(pos==NULL) {
- return TRUE; // Reached a final value.
+ return true; // Reached a final value.
}
} else {
// Linear-match node, append length units to str_.
diff --git a/thirdparty/icu4c/common/uchriter.cpp b/thirdparty/icu4c/common/uchriter.cpp
index 2967375a6a..f2a9953841 100644
--- a/thirdparty/icu4c/common/uchriter.cpp
+++ b/thirdparty/icu4c/common/uchriter.cpp
@@ -171,7 +171,7 @@ UCharCharacterIterator::nextPostInc() {
UBool
UCharCharacterIterator::hasNext() {
- return (UBool)(pos < end ? TRUE : FALSE);
+ return (UBool)(pos < end ? true : false);
}
UChar
@@ -185,7 +185,7 @@ UCharCharacterIterator::previous() {
UBool
UCharCharacterIterator::hasPrevious() {
- return (UBool)(pos > begin ? TRUE : FALSE);
+ return (UBool)(pos > begin ? true : false);
}
UChar32
diff --git a/thirdparty/icu4c/common/ucln_cmn.cpp b/thirdparty/icu4c/common/ucln_cmn.cpp
index f3e07c6b89..ea797d1344 100644
--- a/thirdparty/icu4c/common/ucln_cmn.cpp
+++ b/thirdparty/icu4c/common/ucln_cmn.cpp
@@ -120,5 +120,5 @@ U_CFUNC UBool ucln_lib_cleanup(void) {
#if !UCLN_NO_AUTO_CLEANUP && (defined(UCLN_AUTO_ATEXIT) || defined(UCLN_AUTO_LOCAL))
ucln_unRegisterAutomaticCleanup();
#endif
- return TRUE;
+ return true;
}
diff --git a/thirdparty/icu4c/common/ucnv.cpp b/thirdparty/icu4c/common/ucnv.cpp
index 019bcb6a79..26baa550c3 100644
--- a/thirdparty/icu4c/common/ucnv.cpp
+++ b/thirdparty/icu4c/common/ucnv.cpp
@@ -163,7 +163,7 @@ ucnv_safeClone(const UConverter* cnv, void *stackBuffer, int32_t *pBufferSize, U
UErrorCode cbErr;
UConverterToUnicodeArgs toUArgs = {
sizeof(UConverterToUnicodeArgs),
- TRUE,
+ true,
NULL,
NULL,
NULL,
@@ -173,7 +173,7 @@ ucnv_safeClone(const UConverter* cnv, void *stackBuffer, int32_t *pBufferSize, U
};
UConverterFromUnicodeArgs fromUArgs = {
sizeof(UConverterFromUnicodeArgs),
- TRUE,
+ true,
NULL,
NULL,
NULL,
@@ -269,7 +269,7 @@ ucnv_safeClone(const UConverter* cnv, void *stackBuffer, int32_t *pBufferSize, U
/* Copy initial state */
uprv_memcpy(localConverter, cnv, sizeof(UConverter));
- localConverter->isCopyLocal = localConverter->isExtraLocal = FALSE;
+ localConverter->isCopyLocal = localConverter->isExtraLocal = false;
/* copy the substitution string */
if (cnv->subChars == (uint8_t *)cnv->subUChars) {
@@ -306,7 +306,7 @@ ucnv_safeClone(const UConverter* cnv, void *stackBuffer, int32_t *pBufferSize, U
if(localConverter == (UConverter*)stackBuffer) {
/* we're using user provided data - set to not destroy */
- localConverter->isCopyLocal = TRUE;
+ localConverter->isCopyLocal = true;
}
/* allow callback functions to handle any memory allocation */
@@ -352,7 +352,7 @@ ucnv_close (UConverter * converter)
if (converter->fromCharErrorBehaviour != UCNV_TO_U_DEFAULT_CALLBACK) {
UConverterToUnicodeArgs toUArgs = {
sizeof(UConverterToUnicodeArgs),
- TRUE,
+ true,
NULL,
NULL,
NULL,
@@ -368,7 +368,7 @@ ucnv_close (UConverter * converter)
if (converter->fromUCharErrorBehaviour != UCNV_FROM_U_DEFAULT_CALLBACK) {
UConverterFromUnicodeArgs fromUArgs = {
sizeof(UConverterFromUnicodeArgs),
- TRUE,
+ true,
NULL,
NULL,
NULL,
@@ -580,7 +580,7 @@ static void _reset(UConverter *converter, UConverterResetChoice choice,
if(choice<=UCNV_RESET_TO_UNICODE && converter->fromCharErrorBehaviour != UCNV_TO_U_DEFAULT_CALLBACK) {
UConverterToUnicodeArgs toUArgs = {
sizeof(UConverterToUnicodeArgs),
- TRUE,
+ true,
NULL,
NULL,
NULL,
@@ -595,7 +595,7 @@ static void _reset(UConverter *converter, UConverterResetChoice choice,
if(choice!=UCNV_RESET_TO_UNICODE && converter->fromUCharErrorBehaviour != UCNV_FROM_U_DEFAULT_CALLBACK) {
UConverterFromUnicodeArgs fromUArgs = {
sizeof(UConverterFromUnicodeArgs),
- TRUE,
+ true,
NULL,
NULL,
NULL,
@@ -634,19 +634,19 @@ static void _reset(UConverter *converter, UConverterResetChoice choice,
U_CAPI void U_EXPORT2
ucnv_reset(UConverter *converter)
{
- _reset(converter, UCNV_RESET_BOTH, TRUE);
+ _reset(converter, UCNV_RESET_BOTH, true);
}
U_CAPI void U_EXPORT2
ucnv_resetToUnicode(UConverter *converter)
{
- _reset(converter, UCNV_RESET_TO_UNICODE, TRUE);
+ _reset(converter, UCNV_RESET_TO_UNICODE, true);
}
U_CAPI void U_EXPORT2
ucnv_resetFromUnicode(UConverter *converter)
{
- _reset(converter, UCNV_RESET_FROM_UNICODE, TRUE);
+ _reset(converter, UCNV_RESET_FROM_UNICODE, true);
}
U_CAPI int8_t U_EXPORT2
@@ -871,7 +871,7 @@ _fromUnicodeWithCallback(UConverterFromUnicodeArgs *pArgs, UErrorCode *err) {
/* avoid compiler warnings - not otherwise necessary, and the values do not matter */
realSourceLimit=NULL;
- realFlush=FALSE;
+ realFlush=false;
realSourceIndex=0;
} else {
/*
@@ -887,7 +887,7 @@ _fromUnicodeWithCallback(UConverterFromUnicodeArgs *pArgs, UErrorCode *err) {
uprv_memcpy(replay, cnv->preFromU, -cnv->preFromULength*U_SIZEOF_UCHAR);
pArgs->source=replay;
pArgs->sourceLimit=replay-cnv->preFromULength;
- pArgs->flush=FALSE;
+ pArgs->flush=false;
sourceIndex=-1;
cnv->preFromULength=0;
@@ -923,11 +923,11 @@ _fromUnicodeWithCallback(UConverterFromUnicodeArgs *pArgs, UErrorCode *err) {
cnv->fromUChar32==0);
} else {
/* handle error from ucnv_convertEx() */
- converterSawEndOfInput=FALSE;
+ converterSawEndOfInput=false;
}
/* no callback called yet for this iteration */
- calledCallback=FALSE;
+ calledCallback=false;
/* no sourceIndex adjustment for conversion, only for callback output */
errorInputLength=0;
@@ -976,7 +976,7 @@ _fromUnicodeWithCallback(UConverterFromUnicodeArgs *pArgs, UErrorCode *err) {
uprv_memcpy(replay, cnv->preFromU, -cnv->preFromULength*U_SIZEOF_UCHAR);
pArgs->source=replay;
pArgs->sourceLimit=replay-cnv->preFromULength;
- pArgs->flush=FALSE;
+ pArgs->flush=false;
if((sourceIndex+=cnv->preFromULength)<0) {
sourceIndex=-1;
}
@@ -1017,7 +1017,7 @@ _fromUnicodeWithCallback(UConverterFromUnicodeArgs *pArgs, UErrorCode *err) {
/* inject an error and continue with callback handling */
*err=U_TRUNCATED_CHAR_FOUND;
- calledCallback=FALSE; /* new error condition */
+ calledCallback=false; /* new error condition */
} else {
/* input consumed */
if(pArgs->flush) {
@@ -1033,7 +1033,7 @@ _fromUnicodeWithCallback(UConverterFromUnicodeArgs *pArgs, UErrorCode *err) {
}
/* reset the converter without calling the callback function */
- _reset(cnv, UCNV_RESET_FROM_UNICODE, FALSE);
+ _reset(cnv, UCNV_RESET_FROM_UNICODE, false);
}
/* done successfully */
@@ -1110,7 +1110,7 @@ _fromUnicodeWithCallback(UConverterFromUnicodeArgs *pArgs, UErrorCode *err) {
* that a callback was called;
* if the callback did not resolve the error, then we return
*/
- calledCallback=TRUE;
+ calledCallback=true;
}
}
}
@@ -1118,7 +1118,7 @@ _fromUnicodeWithCallback(UConverterFromUnicodeArgs *pArgs, UErrorCode *err) {
/*
* Output the fromUnicode overflow buffer.
* Call this function if(cnv->charErrorBufferLength>0).
- * @return TRUE if overflow
+ * @return true if overflow
*/
static UBool
ucnv_outputOverflowFromUnicode(UConverter *cnv,
@@ -1154,7 +1154,7 @@ ucnv_outputOverflowFromUnicode(UConverter *cnv,
*pOffsets=offsets;
}
*err=U_BUFFER_OVERFLOW_ERROR;
- return TRUE;
+ return true;
}
/* copy the overflow contents to the target */
@@ -1170,7 +1170,7 @@ ucnv_outputOverflowFromUnicode(UConverter *cnv,
if(offsets!=NULL) {
*pOffsets=offsets;
}
- return FALSE;
+ return false;
}
U_CAPI void U_EXPORT2
@@ -1316,7 +1316,7 @@ _toUnicodeWithCallback(UConverterToUnicodeArgs *pArgs, UErrorCode *err) {
/* avoid compiler warnings - not otherwise necessary, and the values do not matter */
realSourceLimit=NULL;
- realFlush=FALSE;
+ realFlush=false;
realSourceIndex=0;
} else {
/*
@@ -1332,7 +1332,7 @@ _toUnicodeWithCallback(UConverterToUnicodeArgs *pArgs, UErrorCode *err) {
uprv_memcpy(replay, cnv->preToU, -cnv->preToULength);
pArgs->source=replay;
pArgs->sourceLimit=replay-cnv->preToULength;
- pArgs->flush=FALSE;
+ pArgs->flush=false;
sourceIndex=-1;
cnv->preToULength=0;
@@ -1368,11 +1368,11 @@ _toUnicodeWithCallback(UConverterToUnicodeArgs *pArgs, UErrorCode *err) {
cnv->toULength==0);
} else {
/* handle error from getNextUChar() or ucnv_convertEx() */
- converterSawEndOfInput=FALSE;
+ converterSawEndOfInput=false;
}
/* no callback called yet for this iteration */
- calledCallback=FALSE;
+ calledCallback=false;
/* no sourceIndex adjustment for conversion, only for callback output */
errorInputLength=0;
@@ -1421,7 +1421,7 @@ _toUnicodeWithCallback(UConverterToUnicodeArgs *pArgs, UErrorCode *err) {
uprv_memcpy(replay, cnv->preToU, -cnv->preToULength);
pArgs->source=replay;
pArgs->sourceLimit=replay-cnv->preToULength;
- pArgs->flush=FALSE;
+ pArgs->flush=false;
if((sourceIndex+=cnv->preToULength)<0) {
sourceIndex=-1;
}
@@ -1462,7 +1462,7 @@ _toUnicodeWithCallback(UConverterToUnicodeArgs *pArgs, UErrorCode *err) {
/* inject an error and continue with callback handling */
*err=U_TRUNCATED_CHAR_FOUND;
- calledCallback=FALSE; /* new error condition */
+ calledCallback=false; /* new error condition */
} else {
/* input consumed */
if(pArgs->flush) {
@@ -1478,7 +1478,7 @@ _toUnicodeWithCallback(UConverterToUnicodeArgs *pArgs, UErrorCode *err) {
}
/* reset the converter without calling the callback function */
- _reset(cnv, UCNV_RESET_TO_UNICODE, FALSE);
+ _reset(cnv, UCNV_RESET_TO_UNICODE, false);
}
/* done successfully */
@@ -1556,7 +1556,7 @@ _toUnicodeWithCallback(UConverterToUnicodeArgs *pArgs, UErrorCode *err) {
* that a callback was called;
* if the callback did not resolve the error, then we return
*/
- calledCallback=TRUE;
+ calledCallback=true;
}
}
}
@@ -1564,7 +1564,7 @@ _toUnicodeWithCallback(UConverterToUnicodeArgs *pArgs, UErrorCode *err) {
/*
* Output the toUnicode overflow buffer.
* Call this function if(cnv->UCharErrorBufferLength>0).
- * @return TRUE if overflow
+ * @return true if overflow
*/
static UBool
ucnv_outputOverflowToUnicode(UConverter *cnv,
@@ -1600,7 +1600,7 @@ ucnv_outputOverflowToUnicode(UConverter *cnv,
*pOffsets=offsets;
}
*err=U_BUFFER_OVERFLOW_ERROR;
- return TRUE;
+ return true;
}
/* copy the overflow contents to the target */
@@ -1616,7 +1616,7 @@ ucnv_outputOverflowToUnicode(UConverter *cnv,
if(offsets!=NULL) {
*pOffsets=offsets;
}
- return FALSE;
+ return false;
}
U_CAPI void U_EXPORT2
@@ -1754,7 +1754,7 @@ ucnv_fromUChars(UConverter *cnv,
destLimit=dest+destCapacity;
/* perform the conversion */
- ucnv_fromUnicode(cnv, &dest, destLimit, &src, srcLimit, 0, TRUE, pErrorCode);
+ ucnv_fromUnicode(cnv, &dest, destLimit, &src, srcLimit, 0, true, pErrorCode);
destLength=(int32_t)(dest-originalDest);
/* if an overflow occurs, then get the preflighting length */
@@ -1765,7 +1765,7 @@ ucnv_fromUChars(UConverter *cnv,
do {
dest=buffer;
*pErrorCode=U_ZERO_ERROR;
- ucnv_fromUnicode(cnv, &dest, destLimit, &src, srcLimit, 0, TRUE, pErrorCode);
+ ucnv_fromUnicode(cnv, &dest, destLimit, &src, srcLimit, 0, true, pErrorCode);
destLength+=(int32_t)(dest-buffer);
} while(*pErrorCode==U_BUFFER_OVERFLOW_ERROR);
}
@@ -1810,7 +1810,7 @@ ucnv_toUChars(UConverter *cnv,
destLimit=dest+destCapacity;
/* perform the conversion */
- ucnv_toUnicode(cnv, &dest, destLimit, &src, srcLimit, 0, TRUE, pErrorCode);
+ ucnv_toUnicode(cnv, &dest, destLimit, &src, srcLimit, 0, true, pErrorCode);
destLength=(int32_t)(dest-originalDest);
/* if an overflow occurs, then get the preflighting length */
@@ -1822,7 +1822,7 @@ ucnv_toUChars(UConverter *cnv,
do {
dest=buffer;
*pErrorCode=U_ZERO_ERROR;
- ucnv_toUnicode(cnv, &dest, destLimit, &src, srcLimit, 0, TRUE, pErrorCode);
+ ucnv_toUnicode(cnv, &dest, destLimit, &src, srcLimit, 0, true, pErrorCode);
destLength+=(int32_t)(dest-buffer);
}
while(*pErrorCode==U_BUFFER_OVERFLOW_ERROR);
@@ -1907,15 +1907,15 @@ ucnv_getNextUChar(UConverter *cnv,
}
/*
- * flush==TRUE is implied for ucnv_getNextUChar()
+ * flush==true is implied for ucnv_getNextUChar()
*
* do not simply return even if s==sourceLimit because the converter may
- * not have seen flush==TRUE before
+ * not have seen flush==true before
*/
/* prepare the converter arguments */
args.converter=cnv;
- args.flush=TRUE;
+ args.flush=true;
args.offsets=NULL;
args.source=s;
args.sourceLimit=sourceLimit;
@@ -1937,7 +1937,7 @@ ucnv_getNextUChar(UConverter *cnv,
*source=s=args.source;
if(*err==U_INDEX_OUTOFBOUNDS_ERROR) {
/* reset the converter without calling the callback function */
- _reset(cnv, UCNV_RESET_TO_UNICODE, FALSE);
+ _reset(cnv, UCNV_RESET_TO_UNICODE, false);
return 0xffff; /* no output */
} else if(U_SUCCESS(*err) && c>=0) {
return c;
@@ -2176,7 +2176,7 @@ ucnv_convertEx(UConverter *targetCnv, UConverter *sourceCnv,
/* prepare the converter arguments */
fromUArgs.converter=targetCnv;
- fromUArgs.flush=FALSE;
+ fromUArgs.flush=false;
fromUArgs.offsets=NULL;
fromUArgs.target=*target;
fromUArgs.targetLimit=targetLimit;
@@ -2331,8 +2331,8 @@ ucnv_convertEx(UConverter *targetCnv, UConverter *sourceCnv,
/* input consumed */
if(flush) {
/* reset the converters without calling the callback functions */
- _reset(sourceCnv, UCNV_RESET_TO_UNICODE, FALSE);
- _reset(targetCnv, UCNV_RESET_FROM_UNICODE, FALSE);
+ _reset(sourceCnv, UCNV_RESET_TO_UNICODE, false);
+ _reset(targetCnv, UCNV_RESET_FROM_UNICODE, false);
}
/* done successfully */
@@ -2372,7 +2372,7 @@ ucnv_convertEx(UConverter *targetCnv, UConverter *sourceCnv,
sourceCnv->preToULength>=0 &&
sourceCnv->UCharErrorBufferLength==0
) {
- fromUArgs.flush=TRUE;
+ fromUArgs.flush=true;
}
}
@@ -2436,8 +2436,8 @@ ucnv_internalConvert(UConverter *outConverter, UConverter *inConverter,
&myTarget, targetLimit,
&source, sourceLimit,
pivotBuffer, &pivot, &pivot2, pivotBuffer+CHUNK_SIZE,
- FALSE,
- TRUE,
+ false,
+ true,
pErrorCode);
targetLength=(int32_t)(myTarget-target);
}
@@ -2459,8 +2459,8 @@ ucnv_internalConvert(UConverter *outConverter, UConverter *inConverter,
&myTarget, targetLimit,
&source, sourceLimit,
pivotBuffer, &pivot, &pivot2, pivotBuffer+CHUNK_SIZE,
- FALSE,
- TRUE,
+ false,
+ true,
pErrorCode);
targetLength+=(int32_t)(myTarget-targetBuffer);
} while(*pErrorCode==U_BUFFER_OVERFLOW_ERROR);
@@ -2585,7 +2585,7 @@ ucnv_toAlgorithmic(UConverterType algorithmicType,
char *target, int32_t targetCapacity,
const char *source, int32_t sourceLength,
UErrorCode *pErrorCode) {
- return ucnv_convertAlgorithmic(TRUE, algorithmicType, cnv,
+ return ucnv_convertAlgorithmic(true, algorithmicType, cnv,
target, targetCapacity,
source, sourceLength,
pErrorCode);
@@ -2597,7 +2597,7 @@ ucnv_fromAlgorithmic(UConverter *cnv,
char *target, int32_t targetCapacity,
const char *source, int32_t sourceLength,
UErrorCode *pErrorCode) {
- return ucnv_convertAlgorithmic(FALSE, algorithmicType, cnv,
+ return ucnv_convertAlgorithmic(false, algorithmicType, cnv,
target, targetCapacity,
source, sourceLength,
pErrorCode);
@@ -2885,12 +2885,12 @@ ucnv_toUCountPending(const UConverter* cnv, UErrorCode* status){
U_CAPI UBool U_EXPORT2
ucnv_isFixedWidth(UConverter *cnv, UErrorCode *status){
if (U_FAILURE(*status)) {
- return FALSE;
+ return false;
}
if (cnv == NULL) {
*status = U_ILLEGAL_ARGUMENT_ERROR;
- return FALSE;
+ return false;
}
switch (ucnv_getType(cnv)) {
@@ -2900,9 +2900,9 @@ ucnv_isFixedWidth(UConverter *cnv, UErrorCode *status){
case UCNV_UTF32_LittleEndian:
case UCNV_UTF32:
case UCNV_US_ASCII:
- return TRUE;
+ return true;
default:
- return FALSE;
+ return false;
}
}
#endif
diff --git a/thirdparty/icu4c/common/ucnv2022.cpp b/thirdparty/icu4c/common/ucnv2022.cpp
index aa1e169c99..ec096780e9 100644
--- a/thirdparty/icu4c/common/ucnv2022.cpp
+++ b/thirdparty/icu4c/common/ucnv2022.cpp
@@ -491,7 +491,7 @@ _ISO2022Open(UConverter *cnv, UConverterLoadArgs *pArgs, UErrorCode *errorCode){
uprv_memset(myConverterData, 0, sizeof(UConverterDataISO2022));
myConverterData->currentType = ASCII1;
- cnv->fromUnicodeStatus =FALSE;
+ cnv->fromUnicodeStatus =false;
if(pArgs->locale){
uprv_strncpy(myLocale, pArgs->locale, sizeof(myLocale)-1);
}
@@ -623,7 +623,7 @@ _ISO2022Open(UConverter *cnv, UConverterLoadArgs *pArgs, UErrorCode *errorCode){
#endif // !UCONFIG_ONLY_HTML_CONVERSION
else{
#ifdef U_ENABLE_GENERIC_ISO_2022
- myConverterData->isFirstBuffer = TRUE;
+ myConverterData->isFirstBuffer = true;
/* append the UTF-8 escape sequence */
cnv->charErrorBufferLength = 3;
@@ -682,7 +682,7 @@ _ISO2022Reset(UConverter *converter, UConverterResetChoice choice) {
if(choice<=UCNV_RESET_TO_UNICODE) {
uprv_memset(&myConverterData->toU2022State, 0, sizeof(ISO2022State));
myConverterData->key = 0;
- myConverterData->isEmptySegment = FALSE;
+ myConverterData->isEmptySegment = false;
}
if(choice!=UCNV_RESET_TO_UNICODE) {
uprv_memset(&myConverterData->fromU2022State, 0, sizeof(ISO2022State));
@@ -690,7 +690,7 @@ _ISO2022Reset(UConverter *converter, UConverterResetChoice choice) {
#ifdef U_ENABLE_GENERIC_ISO_2022
if(myConverterData->locale[0] == 0){
if(choice<=UCNV_RESET_TO_UNICODE) {
- myConverterData->isFirstBuffer = TRUE;
+ myConverterData->isFirstBuffer = true;
myConverterData->key = 0;
if (converter->mode == UCNV_SO){
ucnv_close (myConverterData->currentConverter);
@@ -1285,7 +1285,7 @@ T_UConverter_toUnicode_ISO_2022_OFFSETS_LOGIC(UConverterToUnicodeArgs* args,
}
/* convert to before the ESC or until the end of the buffer */
- myData->isFirstBuffer=FALSE;
+ myData->isFirstBuffer=false;
sourceStart = args->source;
myTargetStart = args->target;
args->converter = myData->currentConverter;
@@ -1848,7 +1848,7 @@ getTrail:
len = 1;
cs = cs0;
g = 0;
- useFallback = FALSE;
+ useFallback = false;
}
break;
case JISX208:
@@ -1864,7 +1864,7 @@ getTrail:
len = len2;
cs = cs0;
g = 0;
- useFallback = FALSE;
+ useFallback = false;
}
} else if(len == 0 && useFallback &&
(uint32_t)(sourceChar - HWKANA_START) <= (HWKANA_END - HWKANA_START)) {
@@ -1872,7 +1872,7 @@ getTrail:
len = -2;
cs = cs0;
g = 0;
- useFallback = FALSE;
+ useFallback = false;
}
break;
case ISO8859_7:
@@ -1886,7 +1886,7 @@ getTrail:
len = len2;
cs = cs0;
g = 2;
- useFallback = FALSE;
+ useFallback = false;
}
break;
default:
@@ -1911,7 +1911,7 @@ getTrail:
len = len2;
cs = cs0;
g = 0;
- useFallback = FALSE;
+ useFallback = false;
}
break;
}
@@ -2121,7 +2121,7 @@ UConverter_toUnicode_ISO_2022_JP_OFFSETS_LOGIC(UConverterToUnicodeArgs *args,
continue;
} else {
/* only JIS7 uses SI/SO, not ISO-2022-JP-x */
- myData->isEmptySegment = FALSE; /* reset this, we have a different error */
+ myData->isEmptySegment = false; /* reset this, we have a different error */
break;
}
@@ -2133,7 +2133,7 @@ UConverter_toUnicode_ISO_2022_JP_OFFSETS_LOGIC(UConverterToUnicodeArgs *args,
continue;
} else {
/* only JIS7 uses SI/SO, not ISO-2022-JP-x */
- myData->isEmptySegment = FALSE; /* reset this, we have a different error */
+ myData->isEmptySegment = false; /* reset this, we have a different error */
break;
}
@@ -2159,12 +2159,12 @@ escape:
if(U_FAILURE(*err)){
args->target = myTarget;
args->source = mySource;
- myData->isEmptySegment = FALSE; /* Reset to avoid future spurious errors */
+ myData->isEmptySegment = false; /* Reset to avoid future spurious errors */
return;
}
/* If we successfully completed an escape sequence, we begin a new segment, empty so far */
if(myData->key==0) {
- myData->isEmptySegment = TRUE;
+ myData->isEmptySegment = true;
}
continue;
@@ -2181,7 +2181,7 @@ escape:
U_FALLTHROUGH;
default:
/* convert one or two bytes */
- myData->isEmptySegment = FALSE;
+ myData->isEmptySegment = false;
cs = (StateEnum)pToU2022State->cs[pToU2022State->g];
if( (uint8_t)(mySourceChar - 0xa1) <= (0xdf - 0xa1) && myData->version==4 &&
!IS_JP_DBCS(cs)
@@ -2262,7 +2262,7 @@ getTrailByte:
tempBuf[0] = (char)(tmpSourceChar >> 8);
tempBuf[1] = (char)(tmpSourceChar);
}
- targetUniChar = ucnv_MBCSSimpleGetNextUChar(myData->myConverterArray[cs], tempBuf, 2, FALSE);
+ targetUniChar = ucnv_MBCSSimpleGetNextUChar(myData->myConverterArray[cs], tempBuf, 2, false);
} else if (!(trailIsOk || IS_2022_CONTROL(trailByte))) {
/* report a pair of illegal bytes if the second byte is not a DBCS starter */
++mySource;
@@ -2534,7 +2534,7 @@ getTrail:
int32_t sourceIndex;
/* we are switching to ASCII */
- isTargetByteDBCS=FALSE;
+ isTargetByteDBCS=false;
/* get the source index of the last input character */
/*
@@ -2712,7 +2712,7 @@ UConverter_toUnicode_ISO_2022_KR_OFFSETS_LOGIC(UConverterToUnicodeArgs *args,
if(mySourceChar==UCNV_SI){
myData->toU2022State.g = 0;
if (myData->isEmptySegment) {
- myData->isEmptySegment = FALSE; /* we are handling it, reset to avoid future spurious errors */
+ myData->isEmptySegment = false; /* we are handling it, reset to avoid future spurious errors */
*err = U_ILLEGAL_ESCAPE_SEQUENCE;
args->converter->toUCallbackReason = UCNV_IRREGULAR;
args->converter->toUBytes[0] = (uint8_t)mySourceChar;
@@ -2725,13 +2725,13 @@ UConverter_toUnicode_ISO_2022_KR_OFFSETS_LOGIC(UConverterToUnicodeArgs *args,
continue;
}else if(mySourceChar==UCNV_SO){
myData->toU2022State.g = 1;
- myData->isEmptySegment = TRUE; /* Begin a new segment, empty so far */
+ myData->isEmptySegment = true; /* Begin a new segment, empty so far */
/*consume the source */
continue;
}else if(mySourceChar==ESC_2022){
mySource--;
escape:
- myData->isEmptySegment = FALSE; /* Any invalid ESC sequences will be detected separately, so just reset this */
+ myData->isEmptySegment = false; /* Any invalid ESC sequences will be detected separately, so just reset this */
changeState_2022(args->converter,&(mySource),
mySourceLimit, ISO_2022_KR, err);
if(U_FAILURE(*err)){
@@ -2742,7 +2742,7 @@ escape:
continue;
}
- myData->isEmptySegment = FALSE; /* Any invalid char errors will be detected separately, so just reset this */
+ myData->isEmptySegment = false; /* Any invalid char errors will be detected separately, so just reset this */
if(myData->toU2022State.g == 1) {
if(mySource < mySourceLimit) {
int leadIsOk, trailIsOk;
@@ -3092,7 +3092,7 @@ getTrail:
len = 2;
} else {
len = -2;
- useFallback = FALSE;
+ useFallback = false;
}
if(cs == CNS_11643_1) {
g = 1;
@@ -3119,7 +3119,7 @@ getTrail:
len = len2;
cs = cs0;
g = 1;
- useFallback = FALSE;
+ useFallback = false;
}
}
}
@@ -3301,7 +3301,7 @@ UConverter_toUnicode_ISO_2022_CN_OFFSETS_LOGIC(UConverterToUnicodeArgs *args,
case UCNV_SI:
pToU2022State->g=0;
if (myData->isEmptySegment) {
- myData->isEmptySegment = FALSE; /* we are handling it, reset to avoid future spurious errors */
+ myData->isEmptySegment = false; /* we are handling it, reset to avoid future spurious errors */
*err = U_ILLEGAL_ESCAPE_SEQUENCE;
args->converter->toUCallbackReason = UCNV_IRREGULAR;
args->converter->toUBytes[0] = static_cast<uint8_t>(mySourceChar);
@@ -3315,11 +3315,11 @@ UConverter_toUnicode_ISO_2022_CN_OFFSETS_LOGIC(UConverterToUnicodeArgs *args,
case UCNV_SO:
if(pToU2022State->cs[1] != 0) {
pToU2022State->g=1;
- myData->isEmptySegment = TRUE; /* Begin a new segment, empty so far */
+ myData->isEmptySegment = true; /* Begin a new segment, empty so far */
continue;
} else {
/* illegal to have SO before a matching designator */
- myData->isEmptySegment = FALSE; /* Handling a different error, reset this to avoid future spurious errs */
+ myData->isEmptySegment = false; /* Handling a different error, reset this to avoid future spurious errs */
break;
}
@@ -3345,7 +3345,7 @@ escape:
if(U_FAILURE(*err)){
args->target = myTarget;
args->source = mySource;
- myData->isEmptySegment = FALSE; /* Reset to avoid future spurious errors */
+ myData->isEmptySegment = false; /* Reset to avoid future spurious errors */
return;
}
continue;
@@ -3358,7 +3358,7 @@ escape:
U_FALLTHROUGH;
default:
/* convert one or two bytes */
- myData->isEmptySegment = FALSE;
+ myData->isEmptySegment = false;
if(pToU2022State->g != 0) {
if(mySource < mySourceLimit) {
UConverterSharedData *cnv;
@@ -3397,7 +3397,7 @@ getTrailByte:
tempBuf[1] = (char) trailByte;
tempBufLen = 2;
}
- targetUniChar = ucnv_MBCSSimpleGetNextUChar(cnv, tempBuf, tempBufLen, FALSE);
+ targetUniChar = ucnv_MBCSSimpleGetNextUChar(cnv, tempBuf, tempBufLen, false);
mySourceChar = (mySourceChar << 8) | trailByte;
} else if (!(trailIsOk || IS_2022_CONTROL(trailByte))) {
/* report a pair of illegal bytes if the second byte is not a DBCS starter */
@@ -3609,7 +3609,7 @@ _ISO_2022_SafeClone(
uprv_memcpy(&localClone->mydata, cnvData, sizeof(UConverterDataISO2022));
localClone->cnv.extraInfo = &localClone->mydata; /* set pointer to extra data */
- localClone->cnv.isExtraLocal = TRUE;
+ localClone->cnv.isExtraLocal = true;
/* share the subconverters */
@@ -3808,8 +3808,8 @@ static const UConverterStaticData _ISO2022StaticData={
3, /* max 3 bytes per UChar from UTF-8 (4 bytes from surrogate _pair_) */
{ 0x1a, 0, 0, 0 },
1,
- FALSE,
- FALSE,
+ false,
+ false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 } /* reserved */
@@ -3853,8 +3853,8 @@ static const UConverterStaticData _ISO2022JPStaticData={
6, /* max 6 bytes per UChar: 4-byte escape sequence + DBCS */
{ 0x1a, 0, 0, 0 },
1,
- FALSE,
- FALSE,
+ false,
+ false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 } /* reserved */
@@ -3904,8 +3904,8 @@ static const UConverterStaticData _ISO2022KRStaticData={
8, /* max 8 bytes per UChar */
{ 0x1a, 0, 0, 0 },
1,
- FALSE,
- FALSE,
+ false,
+ false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 } /* reserved */
@@ -3955,8 +3955,8 @@ static const UConverterStaticData _ISO2022CNStaticData={
8, /* max 8 bytes per UChar: 4-byte CNS designator + 2 bytes for SS2/SS3 + DBCS */
{ 0x1a, 0, 0, 0 },
1,
- FALSE,
- FALSE,
+ false,
+ false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 } /* reserved */
diff --git a/thirdparty/icu4c/common/ucnv_bld.cpp b/thirdparty/icu4c/common/ucnv_bld.cpp
index d08eec7369..a0fbfe2d7f 100644
--- a/thirdparty/icu4c/common/ucnv_bld.cpp
+++ b/thirdparty/icu4c/common/ucnv_bld.cpp
@@ -200,7 +200,7 @@ static icu::UMutex cnvCacheMutex;
static const char **gAvailableConverters = NULL;
static uint16_t gAvailableConverterCount = 0;
-static icu::UInitOnce gAvailableConvertersInitOnce = U_INITONCE_INITIALIZER;
+static icu::UInitOnce gAvailableConvertersInitOnce {};
#if !U_CHARSET_IS_UTF8
@@ -254,7 +254,7 @@ static UBool U_CALLCONV ucnv_cleanup(void) {
#if !U_CHARSET_IS_UTF8
gDefaultConverterName = NULL;
gDefaultConverterNameBuffer[0] = 0;
- gDefaultConverterContainsOption = FALSE;
+ gDefaultConverterContainsOption = false;
gDefaultAlgorithmicSharedData = NULL;
#endif
@@ -318,7 +318,7 @@ ucnv_data_unFlattenClone(UConverterLoadArgs *pArgs, UDataMemory *pData, UErrorCo
data->staticData = source;
- data->sharedDataCached = FALSE;
+ data->sharedDataCached = false;
/* fill in fields from the loaded data */
data->dataMemory = (void*)pData; /* for future use */
@@ -462,7 +462,7 @@ ucnv_shareConverterData(UConverterSharedData * data)
*/
/* Mark it shared */
- data->sharedDataCached = TRUE;
+ data->sharedDataCached = true;
uhash_put(SHARED_DATA_HASHTABLE,
(void*) data->staticData->name, /* Okay to cast away const as long as
@@ -502,11 +502,11 @@ ucnv_getSharedConverterData(const char *name)
*/
/* Deletes (frees) the Shared data it's passed. first it checks the referenceCounter to
* see if anyone is using it, if not it frees all the memory stemming from sharedConverterData and
- * returns TRUE,
- * otherwise returns FALSE
+ * returns true,
+ * otherwise returns false
* @param sharedConverterData The shared data
* @return if not it frees all the memory stemming from sharedConverterData and
- * returns TRUE, otherwise returns FALSE
+ * returns true, otherwise returns false
*/
static UBool
ucnv_deleteSharedConverterData(UConverterSharedData * deadSharedData)
@@ -515,8 +515,8 @@ ucnv_deleteSharedConverterData(UConverterSharedData * deadSharedData)
UTRACE_DATA2(UTRACE_OPEN_CLOSE, "unload converter %s shared data %p", deadSharedData->staticData->name, deadSharedData);
if (deadSharedData->referenceCounter > 0) {
- UTRACE_EXIT_VALUE((int32_t)FALSE);
- return FALSE;
+ UTRACE_EXIT_VALUE((int32_t)false);
+ return false;
}
if (deadSharedData->impl->unload != NULL) {
@@ -531,8 +531,8 @@ ucnv_deleteSharedConverterData(UConverterSharedData * deadSharedData)
uprv_free(deadSharedData);
- UTRACE_EXIT_VALUE((int32_t)TRUE);
- return TRUE;
+ UTRACE_EXIT_VALUE((int32_t)true);
+ return true;
}
/**
@@ -589,7 +589,7 @@ ucnv_unload(UConverterSharedData *sharedData) {
sharedData->referenceCounter--;
}
- if((sharedData->referenceCounter <= 0)&&(sharedData->sharedDataCached == FALSE)) {
+ if((sharedData->referenceCounter <= 0)&&(sharedData->sharedDataCached == false)) {
ucnv_deleteSharedConverterData(sharedData);
}
}
@@ -703,10 +703,10 @@ parseConverterOptions(const char *inName,
/*Logic determines if the converter is Algorithmic AND/OR cached
*depending on that:
- * -we either go to get data from disk and cache it (Data=TRUE, Cached=False)
- * -Get it from a Hashtable (Data=X, Cached=TRUE)
- * -Call dataConverter initializer (Data=TRUE, Cached=TRUE)
- * -Call AlgorithmicConverter initializer (Data=FALSE, Cached=TRUE)
+ * -we either go to get data from disk and cache it (Data=true, Cached=false)
+ * -Get it from a Hashtable (Data=X, Cached=true)
+ * -Call dataConverter initializer (Data=true, Cached=true)
+ * -Call AlgorithmicConverter initializer (Data=false, Cached=true)
*/
U_CFUNC UConverterSharedData *
ucnv_loadSharedData(const char *converterName,
@@ -717,8 +717,8 @@ ucnv_loadSharedData(const char *converterName,
UConverterLoadArgs stackArgs;
UConverterSharedData *mySharedConverterData = NULL;
UErrorCode internalErrorCode = U_ZERO_ERROR;
- UBool mayContainOption = TRUE;
- UBool checkForAlgorithmic = TRUE;
+ UBool mayContainOption = true;
+ UBool checkForAlgorithmic = true;
if (U_FAILURE (*err)) {
return NULL;
@@ -762,7 +762,7 @@ ucnv_loadSharedData(const char *converterName,
return NULL;
}
mySharedConverterData = (UConverterSharedData *)gDefaultAlgorithmicSharedData;
- checkForAlgorithmic = FALSE;
+ checkForAlgorithmic = false;
mayContainOption = gDefaultConverterContainsOption;
/* the default converter name is already canonical */
#endif
@@ -866,7 +866,7 @@ ucnv_canCreateConverter(const char *converterName, UErrorCode *err) {
if(U_SUCCESS(*err)) {
UTRACE_DATA1(UTRACE_OPEN_CLOSE, "test if can open converter %s", converterName);
- stackArgs.onlyTestIsLoadable=TRUE;
+ stackArgs.onlyTestIsLoadable=true;
mySharedConverterData = ucnv_loadSharedData(converterName, &stackPieces, &stackArgs, err);
ucnv_createConverterFromSharedData(
&myUConverter, mySharedConverterData,
@@ -989,15 +989,15 @@ ucnv_createConverterFromSharedData(UConverter *myUConverter,
ucnv_unloadSharedDataIfReady(mySharedConverterData);
return NULL;
}
- isCopyLocal = FALSE;
+ isCopyLocal = false;
} else {
- isCopyLocal = TRUE;
+ isCopyLocal = true;
}
/* initialize the converter */
uprv_memset(myUConverter, 0, sizeof(UConverter));
myUConverter->isCopyLocal = isCopyLocal;
- /*myUConverter->isExtraLocal = FALSE;*/ /* Set by the memset call */
+ /*myUConverter->isExtraLocal = false;*/ /* Set by the memset call */
myUConverter->sharedData = mySharedConverterData;
myUConverter->options = pArgs->options;
if(!pArgs->onlyTestIsLoadable) {
@@ -1083,7 +1083,7 @@ ucnv_flushCache ()
UCNV_DEBUG_LOG("del",mySharedData->staticData->name,mySharedData);
uhash_removeElement(SHARED_DATA_HASHTABLE, e);
- mySharedData->sharedDataCached = FALSE;
+ mySharedData->sharedDataCached = false;
ucnv_deleteSharedConverterData (mySharedData);
} else {
++remaining;
@@ -1342,7 +1342,7 @@ ucnv_swap(const UDataSwapper *ds,
_MBCSHeader *outMBCSHeader;
_MBCSHeader mbcsHeader;
uint32_t mbcsHeaderLength;
- UBool noFromU=FALSE;
+ UBool noFromU=false;
uint8_t outputType;
diff --git a/thirdparty/icu4c/common/ucnv_cb.cpp b/thirdparty/icu4c/common/ucnv_cb.cpp
index 1bb0012014..7bfde82870 100644
--- a/thirdparty/icu4c/common/ucnv_cb.cpp
+++ b/thirdparty/icu4c/common/ucnv_cb.cpp
@@ -86,7 +86,7 @@ ucnv_cbFromUWriteUChars(UConverterFromUnicodeArgs *args,
source,
sourceLimit,
NULL, /* no offsets */
- FALSE, /* no flush */
+ false, /* no flush */
err);
if(args->offsets)
@@ -141,7 +141,7 @@ ucnv_cbFromUWriteUChars(UConverterFromUnicodeArgs *args,
source,
sourceLimit,
NULL,
- FALSE,
+ false,
&err2);
/* We can go ahead and overwrite the length here. We know just how
diff --git a/thirdparty/icu4c/common/ucnv_ct.cpp b/thirdparty/icu4c/common/ucnv_ct.cpp
index b40e1b2c97..c12e982b88 100644
--- a/thirdparty/icu4c/common/ucnv_ct.cpp
+++ b/thirdparty/icu4c/common/ucnv_ct.cpp
@@ -225,23 +225,23 @@ static COMPOUND_TEXT_CONVERTERS getState(int codepoint) {
static COMPOUND_TEXT_CONVERTERS findStateFromEscSeq(const char* source, const char* sourceLimit, const uint8_t* toUBytesBuffer, int32_t toUBytesBufferLength, UErrorCode *err) {
COMPOUND_TEXT_CONVERTERS state = INVALID;
- UBool matchFound = FALSE;
+ UBool matchFound = false;
int32_t i, n, offset = toUBytesBufferLength;
for (i = 0; i < NUM_OF_CONVERTERS; i++) {
- matchFound = TRUE;
+ matchFound = true;
for (n = 0; escSeqCompoundText[i][n] != 0; n++) {
if (n < toUBytesBufferLength) {
if (toUBytesBuffer[n] != escSeqCompoundText[i][n]) {
- matchFound = FALSE;
+ matchFound = false;
break;
}
} else if ((source + (n - offset)) >= sourceLimit) {
*err = U_TRUNCATED_CHAR_FOUND;
- matchFound = FALSE;
+ matchFound = false;
break;
} else if (*(source + (n - offset)) != escSeqCompoundText[i][n]) {
- matchFound = FALSE;
+ matchFound = false;
break;
}
}
@@ -634,8 +634,8 @@ static const UConverterStaticData _CompoundTextStaticData = {
6,
{ 0xef, 0, 0, 0 },
1,
- FALSE,
- FALSE,
+ false,
+ false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 } /* reserved */
diff --git a/thirdparty/icu4c/common/ucnv_ext.cpp b/thirdparty/icu4c/common/ucnv_ext.cpp
index 7dea4eef41..ffc3c93033 100644
--- a/thirdparty/icu4c/common/ucnv_ext.cpp
+++ b/thirdparty/icu4c/common/ucnv_ext.cpp
@@ -108,7 +108,7 @@ ucnv_extFindToU(const uint32_t *toUSection, int32_t length, uint8_t byte) {
}
/*
- * TRUE if not an SI/SO stateful converter,
+ * true if not an SI/SO stateful converter,
* or if the match length fits with the current converter state
*/
#define UCNV_EXT_TO_U_VERIFY_SISO_MATCH(sisoState, match) \
@@ -154,7 +154,7 @@ ucnv_extMatchToU(const int32_t *cx, int8_t sisoState,
srcLength=1;
}
}
- flush=TRUE;
+ flush=true;
}
/* we must not remember fallback matches when not using fallbacks */
@@ -302,7 +302,7 @@ ucnv_extInitialMatchToU(UConverter *cnv, const int32_t *cx,
target, targetLimit,
offsets, srcIndex,
pErrorCode);
- return TRUE;
+ return true;
} else if(match<0) {
/* save state for partial match */
const char *s;
@@ -323,9 +323,9 @@ ucnv_extInitialMatchToU(UConverter *cnv, const int32_t *cx,
}
*src=s; /* same as *src=srcLimit; because we reached the end of input */
cnv->preToULength=(int8_t)match;
- return TRUE;
+ return true;
} else /* match==0 no match */ {
- return FALSE;
+ return false;
}
}
@@ -345,7 +345,7 @@ ucnv_extSimpleMatchToU(const int32_t *cx,
source, length,
NULL, 0,
&value,
- useFallback, TRUE);
+ useFallback, true);
if(match==length) {
/* write result for simple, single-character conversion */
if(UCNV_EXT_TO_U_IS_CODE_POINT(value)) {
@@ -358,7 +358,7 @@ ucnv_extSimpleMatchToU(const int32_t *cx,
* - match>0 && value points to string: simple conversion cannot handle multiple code points
* - match>0 && match!=length: not all input consumed, forbidden for this function
* - match==0: no match found in the first place
- * - match<0: partial match, not supported for simple conversion (and flush==TRUE)
+ * - match<0: partial match, not supported for simple conversion (and flush==true)
*/
return 0xfffe;
}
@@ -516,13 +516,13 @@ ucnv_extFindFromU(const UChar *fromUSection, int32_t length, UChar u) {
* @param srcLength length of src, >=0
* @param pMatchValue [out] output result value for the match from the data structure
* @param useFallback "use fallback" flag, usually from cnv->useFallback
- * @param flush TRUE if the end of the input stream is reached
+ * @param flush true if the end of the input stream is reached
* @return >1: matched, return value=total match length (number of input units matched)
* 1: matched, no mapping but request for <subchar1>
* (only for the first code point)
* 0: no match
* <0: partial match, return value=negative total match length
- * (partial matches are never returned for flush==TRUE)
+ * (partial matches are never returned for flush==true)
* (partial matches are never returned as being longer than UCNV_EXT_MAX_UCHARS)
* the matchLength is 2 if only firstCP matched, and >2 if firstCP and
* further code units matched
@@ -778,7 +778,7 @@ ucnv_extInitialMatchFromU(UConverter *cnv, const int32_t *cx,
target, targetLimit,
offsets, srcIndex,
pErrorCode);
- return TRUE;
+ return true;
} else if(match<0) {
/* save state for partial match */
const UChar *s;
@@ -795,13 +795,13 @@ ucnv_extInitialMatchFromU(UConverter *cnv, const int32_t *cx,
}
*src=s; /* same as *src=srcLimit; because we reached the end of input */
cnv->preFromULength=(int8_t)match;
- return TRUE;
+ return true;
} else if(match==1) {
/* matched, no mapping but request for <subchar1> */
- cnv->useSubChar1=TRUE;
- return FALSE;
+ cnv->useSubChar1=true;
+ return false;
} else /* match==0 no match */ {
- return FALSE;
+ return false;
}
}
@@ -822,7 +822,7 @@ ucnv_extSimpleMatchFromU(const int32_t *cx,
NULL, 0,
NULL, 0,
&value,
- useFallback, TRUE);
+ useFallback, true);
if(match>=2) {
/* write result for simple, single-character conversion */
int32_t length;
@@ -854,7 +854,7 @@ ucnv_extSimpleMatchFromU(const int32_t *cx,
* - match>1 && resultLength>4: result too long for simple conversion
* - match==1: no match found, <subchar1> preferred
* - match==0: no match found in the first place
- * - match<0: partial match, not supported for simple conversion (and flush==TRUE)
+ * - match<0: partial match, not supported for simple conversion (and flush==true)
*/
return 0;
}
@@ -934,7 +934,7 @@ ucnv_extContinueMatchFromU(UConverter *cnv,
if(match==1) {
/* matched, no mapping but request for <subchar1> */
- cnv->useSubChar1=TRUE;
+ cnv->useSubChar1=true;
}
/* move the first code point to the error field */
@@ -961,12 +961,12 @@ extSetUseMapping(UConverterUnicodeSet which, int32_t minLength, uint32_t value)
// Do not add entries with reserved bits set.
if(((value&(UCNV_EXT_FROM_U_ROUNDTRIP_FLAG|UCNV_EXT_FROM_U_RESERVED_MASK))!=
UCNV_EXT_FROM_U_ROUNDTRIP_FLAG)) {
- return FALSE;
+ return false;
}
} else /* UCNV_ROUNDTRIP_AND_FALLBACK_SET */ {
// Do not add entries with reserved bits set.
if((value&UCNV_EXT_FROM_U_RESERVED_MASK)!=0) {
- return FALSE;
+ return false;
}
}
// Do not add <subchar1> entries or other (future?) pseudo-entries
diff --git a/thirdparty/icu4c/common/ucnv_io.cpp b/thirdparty/icu4c/common/ucnv_io.cpp
index 7a95a3f1e6..c9d20cb941 100644
--- a/thirdparty/icu4c/common/ucnv_io.cpp
+++ b/thirdparty/icu4c/common/ucnv_io.cpp
@@ -175,7 +175,7 @@ static const char DATA_NAME[] = "cnvalias";
static const char DATA_TYPE[] = "icu";
static UDataMemory *gAliasData=NULL;
-static icu::UInitOnce gAliasDataInitOnce = U_INITONCE_INITIALIZER;
+static icu::UInitOnce gAliasDataInitOnce {};
enum {
tocLengthIndex=0,
@@ -226,7 +226,7 @@ static UBool U_CALLCONV ucnv_io_cleanup(void)
uprv_memset(&gMainTable, 0, sizeof(gMainTable));
- return TRUE; /* Everything was cleaned up */
+ return true; /* Everything was cleaned up */
}
static void U_CALLCONV initAliasData(UErrorCode &errCode) {
@@ -319,7 +319,7 @@ static inline UBool
isAlias(const char *alias, UErrorCode *pErrorCode) {
if(alias==NULL) {
*pErrorCode=U_ILLEGAL_ARGUMENT_ERROR;
- return FALSE;
+ return false;
}
return (UBool)(*alias!=0);
}
@@ -388,13 +388,13 @@ ucnv_io_stripASCIIForCompare(char *dst, const char *name) {
char *dstItr = dst;
uint8_t type, nextType;
char c1;
- UBool afterDigit = FALSE;
+ UBool afterDigit = false;
while ((c1 = *name++) != 0) {
type = GET_ASCII_TYPE(c1);
switch (type) {
case UIGNORE:
- afterDigit = FALSE;
+ afterDigit = false;
continue; /* ignore all but letters and digits */
case ZERO:
if (!afterDigit) {
@@ -405,11 +405,11 @@ ucnv_io_stripASCIIForCompare(char *dst, const char *name) {
}
break;
case NONZERO:
- afterDigit = TRUE;
+ afterDigit = true;
break;
default:
c1 = (char)type; /* lowercased letter */
- afterDigit = FALSE;
+ afterDigit = false;
break;
}
*dstItr++ = c1;
@@ -423,13 +423,13 @@ ucnv_io_stripEBCDICForCompare(char *dst, const char *name) {
char *dstItr = dst;
uint8_t type, nextType;
char c1;
- UBool afterDigit = FALSE;
+ UBool afterDigit = false;
while ((c1 = *name++) != 0) {
type = GET_EBCDIC_TYPE(c1);
switch (type) {
case UIGNORE:
- afterDigit = FALSE;
+ afterDigit = false;
continue; /* ignore all but letters and digits */
case ZERO:
if (!afterDigit) {
@@ -440,11 +440,11 @@ ucnv_io_stripEBCDICForCompare(char *dst, const char *name) {
}
break;
case NONZERO:
- afterDigit = TRUE;
+ afterDigit = true;
break;
default:
c1 = (char)type; /* lowercased letter */
- afterDigit = FALSE;
+ afterDigit = false;
break;
}
*dstItr++ = c1;
@@ -479,14 +479,14 @@ ucnv_compareNames(const char *name1, const char *name2) {
int rc;
uint8_t type, nextType;
char c1, c2;
- UBool afterDigit1 = FALSE, afterDigit2 = FALSE;
+ UBool afterDigit1 = false, afterDigit2 = false;
for (;;) {
while ((c1 = *name1++) != 0) {
type = GET_CHAR_TYPE(c1);
switch (type) {
case UIGNORE:
- afterDigit1 = FALSE;
+ afterDigit1 = false;
continue; /* ignore all but letters and digits */
case ZERO:
if (!afterDigit1) {
@@ -497,11 +497,11 @@ ucnv_compareNames(const char *name1, const char *name2) {
}
break;
case NONZERO:
- afterDigit1 = TRUE;
+ afterDigit1 = true;
break;
default:
c1 = (char)type; /* lowercased letter */
- afterDigit1 = FALSE;
+ afterDigit1 = false;
break;
}
break; /* deliver c1 */
@@ -510,7 +510,7 @@ ucnv_compareNames(const char *name1, const char *name2) {
type = GET_CHAR_TYPE(c2);
switch (type) {
case UIGNORE:
- afterDigit2 = FALSE;
+ afterDigit2 = false;
continue; /* ignore all but letters and digits */
case ZERO:
if (!afterDigit2) {
@@ -521,11 +521,11 @@ ucnv_compareNames(const char *name1, const char *name2) {
}
break;
case NONZERO:
- afterDigit2 = TRUE;
+ afterDigit2 = true;
break;
default:
c2 = (char)type; /* lowercased letter */
- afterDigit2 = FALSE;
+ afterDigit2 = false;
break;
}
break; /* deliver c2 */
@@ -628,11 +628,11 @@ isAliasInList(const char *alias, uint32_t listOffset) {
if (currList[currAlias]
&& ucnv_compareNames(alias, GET_STRING(currList[currAlias]))==0)
{
- return TRUE;
+ return true;
}
}
}
- return FALSE;
+ return false;
}
/*
@@ -1288,7 +1288,7 @@ ucnv_swapAliases(const UDataSwapper *ds,
uprv_sortArray(tempTable.rows, (int32_t)count, sizeof(TempRow),
io_compareRows, &tempTable,
- FALSE, pErrorCode);
+ false, pErrorCode);
if(U_SUCCESS(*pErrorCode)) {
/* copy/swap/permutate items */
diff --git a/thirdparty/icu4c/common/ucnv_lmb.cpp b/thirdparty/icu4c/common/ucnv_lmb.cpp
index 6969727927..78b8e40700 100644
--- a/thirdparty/icu4c/common/ucnv_lmb.cpp
+++ b/thirdparty/icu4c/common/ucnv_lmb.cpp
@@ -610,7 +610,7 @@ static const UConverterStaticData _LMBCSStaticData##n={\
sizeof(UConverterStaticData),\
"LMBCS-" #n,\
0, UCNV_IBM, UCNV_LMBCS_##n, 1, 3,\
- { 0x3f, 0, 0, 0 },1,FALSE,FALSE,0,0,{0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0} \
+ { 0x3f, 0, 0, 0 },1,false,false,0,0,{0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0} \
};\
const UConverterSharedData _LMBCSData##n= \
UCNV_IMMUTABLE_SHARED_DATA_INITIALIZER(&_LMBCSStaticData##n, &_LMBCSImpl##n);
@@ -721,7 +721,7 @@ _LMBCSSafeClone(const UConverter *cnv,
}
newLMBCS->cnv.extraInfo = &newLMBCS->lmbcs;
- newLMBCS->cnv.isExtraLocal = TRUE;
+ newLMBCS->cnv.isExtraLocal = true;
return &newLMBCS->cnv;
}
@@ -763,14 +763,14 @@ LMBCSConversionWorker (
U_ASSERT(xcnv);
U_ASSERT(group<ULMBCS_GRP_UNICODE);
- bytesConverted = ucnv_MBCSFromUChar32(xcnv, *pUniChar, &value, FALSE);
+ bytesConverted = ucnv_MBCSFromUChar32(xcnv, *pUniChar, &value, false);
/* get the first result byte */
if(bytesConverted > 0) {
firstByte = (ulmbcs_byte_t)(value >> ((bytesConverted - 1) * 8));
} else {
/* most common failure mode is an unassigned character */
- groups_tried[group] = TRUE;
+ groups_tried[group] = true;
return 0;
}
@@ -1191,11 +1191,11 @@ _LMBCSGetNextUCharWorker(UConverterToUnicodeArgs* args,
if (*args->source == group) {
/* single byte */
++args->source;
- uniChar = ucnv_MBCSSimpleGetNextUChar(cnv, args->source, 1, FALSE);
+ uniChar = ucnv_MBCSSimpleGetNextUChar(cnv, args->source, 1, false);
++args->source;
} else {
/* double byte */
- uniChar = ucnv_MBCSSimpleGetNextUChar(cnv, args->source, 2, FALSE);
+ uniChar = ucnv_MBCSSimpleGetNextUChar(cnv, args->source, 2, false);
args->source += 2;
}
}
@@ -1220,7 +1220,7 @@ _LMBCSGetNextUCharWorker(UConverterToUnicodeArgs* args,
/* Lookup value must include opt group */
bytes[0] = group;
bytes[1] = CurByte;
- uniChar = ucnv_MBCSSimpleGetNextUChar(cnv, bytes, 2, FALSE);
+ uniChar = ucnv_MBCSSimpleGetNextUChar(cnv, bytes, 2, false);
}
}
}
@@ -1236,13 +1236,13 @@ _LMBCSGetNextUCharWorker(UConverterToUnicodeArgs* args,
CHECK_SOURCE_LIMIT(0);
/* let the MBCS conversion consume CurByte again */
- uniChar = ucnv_MBCSSimpleGetNextUChar(cnv, args->source - 1, 1, FALSE);
+ uniChar = ucnv_MBCSSimpleGetNextUChar(cnv, args->source - 1, 1, false);
}
else
{
CHECK_SOURCE_LIMIT(1);
/* let the MBCS conversion consume CurByte again */
- uniChar = ucnv_MBCSSimpleGetNextUChar(cnv, args->source - 1, 2, FALSE);
+ uniChar = ucnv_MBCSSimpleGetNextUChar(cnv, args->source - 1, 2, false);
++args->source;
}
}
diff --git a/thirdparty/icu4c/common/ucnv_u16.cpp b/thirdparty/icu4c/common/ucnv_u16.cpp
index a5e8367400..bebdede4c4 100644
--- a/thirdparty/icu4c/common/ucnv_u16.cpp
+++ b/thirdparty/icu4c/common/ucnv_u16.cpp
@@ -637,7 +637,7 @@ static const UConverterStaticData _UTF16BEStaticData={
sizeof(UConverterStaticData),
"UTF-16BE",
1200, UCNV_IBM, UCNV_UTF16_BigEndian, 2, 2,
- { 0xff, 0xfd, 0, 0 },2,FALSE,FALSE,
+ { 0xff, 0xfd, 0, 0 },2,false,false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 } /* reserved */
@@ -1239,7 +1239,7 @@ static const UConverterStaticData _UTF16LEStaticData={
sizeof(UConverterStaticData),
"UTF-16LE",
1202, UCNV_IBM, UCNV_UTF16_LittleEndian, 2, 2,
- { 0xfd, 0xff, 0, 0 },2,FALSE,FALSE,
+ { 0xfd, 0xff, 0, 0 },2,false,false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 } /* reserved */
@@ -1526,7 +1526,7 @@ static const UConverterStaticData _UTF16StaticData = {
#else
{ 0xfd, 0xff, 0, 0 }, 2,
#endif
- FALSE, FALSE,
+ false, false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 } /* reserved */
@@ -1567,7 +1567,7 @@ static const UConverterStaticData _UTF16v2StaticData = {
1204, /* CCSID for BOM sensitive UTF-16 */
UCNV_IBM, UCNV_UTF16, 2, 2,
{ 0xff, 0xfd, 0, 0 }, 2,
- FALSE, FALSE,
+ false, false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 } /* reserved */
diff --git a/thirdparty/icu4c/common/ucnv_u32.cpp b/thirdparty/icu4c/common/ucnv_u32.cpp
index bf6bd11dba..bc160b71dd 100644
--- a/thirdparty/icu4c/common/ucnv_u32.cpp
+++ b/thirdparty/icu4c/common/ucnv_u32.cpp
@@ -494,7 +494,7 @@ static const UConverterStaticData _UTF32BEStaticData = {
"UTF-32BE",
1232,
UCNV_IBM, UCNV_UTF32_BigEndian, 4, 4,
- { 0, 0, 0xff, 0xfd }, 4, FALSE, FALSE,
+ { 0, 0, 0xff, 0xfd }, 4, false, false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 } /* reserved */
@@ -989,7 +989,7 @@ static const UConverterStaticData _UTF32LEStaticData = {
"UTF-32LE",
1234,
UCNV_IBM, UCNV_UTF32_LittleEndian, 4, 4,
- { 0xfd, 0xff, 0, 0 }, 4, FALSE, FALSE,
+ { 0xfd, 0xff, 0, 0 }, 4, false, false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 } /* reserved */
@@ -1110,7 +1110,7 @@ _UTF32ToUnicodeWithOffsets(UConverterToUnicodeArgs *pArgs,
/* some of the bytes are from a previous buffer, replay those first */
pArgs->source=utf32BOM+(state&4); /* select the correct BOM */
pArgs->sourceLimit=pArgs->source+((state&3)-count); /* replay previous bytes */
- pArgs->flush=FALSE; /* this sourceLimit is not the real source stream limit */
+ pArgs->flush=false; /* this sourceLimit is not the real source stream limit */
/* no offsets: bytes from previous buffer, and not enough for output */
T_UConverter_toUnicode_UTF32_BE(pArgs, pErrorCode);
@@ -1241,7 +1241,7 @@ static const UConverterStaticData _UTF32StaticData = {
#else
{ 0xfd, 0xff, 0, 0 }, 4,
#endif
- FALSE, FALSE,
+ false, false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 } /* reserved */
diff --git a/thirdparty/icu4c/common/ucnv_u7.cpp b/thirdparty/icu4c/common/ucnv_u7.cpp
index de9f3f42ec..8964ca01de 100644
--- a/thirdparty/icu4c/common/ucnv_u7.cpp
+++ b/thirdparty/icu4c/common/ucnv_u7.cpp
@@ -184,12 +184,12 @@ static void U_CALLCONV
_UTF7Reset(UConverter *cnv, UConverterResetChoice choice) {
if(choice<=UCNV_RESET_TO_UNICODE) {
/* reset toUnicode */
- cnv->toUnicodeStatus=0x1000000; /* inDirectMode=TRUE */
+ cnv->toUnicodeStatus=0x1000000; /* inDirectMode=true */
cnv->toULength=0;
}
if(choice!=UCNV_RESET_TO_UNICODE) {
/* reset fromUnicode */
- cnv->fromUnicodeStatus=(cnv->fromUnicodeStatus&0xf0000000)|0x1000000; /* keep version, inDirectMode=TRUE */
+ cnv->fromUnicodeStatus=(cnv->fromUnicodeStatus&0xf0000000)|0x1000000; /* keep version, inDirectMode=true */
}
}
@@ -286,7 +286,7 @@ directMode:
} else /* PLUS */ {
/* switch to Unicode mode */
nextSourceIndex=++sourceIndex;
- inDirectMode=FALSE;
+ inDirectMode=false;
byteIndex=0;
bits=0;
base64Counter=-1;
@@ -329,7 +329,7 @@ unicodeMode:
* It may be for example, a plus which we need to deal with in direct mode.
* 2.2.2. Else if the current char is illegal, we might as well deal with it here.
*/
- inDirectMode=TRUE;
+ inDirectMode=true;
if(base64Counter==-1) {
/* illegal: + immediately followed by something other than base64 or minus sign */
/* include the plus sign in the reported sequence, but not the subsequent char */
@@ -411,7 +411,7 @@ unicodeMode:
}
} else /*base64Value==-2*/ {
/* minus sign terminates the base64 sequence */
- inDirectMode=TRUE;
+ inDirectMode=true;
if(base64Counter==-1) {
/* +- i.e. a minus immediately following a plus */
*target++=PLUS;
@@ -541,7 +541,7 @@ directMode:
if(offsets!=NULL) {
*offsets++=sourceIndex;
}
- inDirectMode=FALSE;
+ inDirectMode=false;
base64Counter=0;
goto unicodeMode;
}
@@ -558,7 +558,7 @@ unicodeMode:
c=*source++;
if(c<=127 && encodeDirectly[c]) {
/* encode directly */
- inDirectMode=TRUE;
+ inDirectMode=true;
/* trick: back out this character to make this easier */
--source;
@@ -719,7 +719,7 @@ unicodeMode:
}
}
/* reset the state for the next conversion */
- cnv->fromUnicodeStatus=(cnv->fromUnicodeStatus&0xf0000000)|0x1000000; /* keep version, inDirectMode=TRUE */
+ cnv->fromUnicodeStatus=(cnv->fromUnicodeStatus&0xf0000000)|0x1000000; /* keep version, inDirectMode=true */
} else {
/* set the converter state back into UConverter */
cnv->fromUnicodeStatus=
@@ -778,7 +778,7 @@ static const UConverterStaticData _UTF7StaticData={
UCNV_IBM, UCNV_UTF7,
1, 4,
{ 0x3f, 0, 0, 0 }, 1, /* the subchar is not used */
- FALSE, FALSE,
+ false, false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 } /* reserved */
@@ -971,7 +971,7 @@ directMode:
} else /* AMPERSAND */ {
/* switch to Unicode mode */
nextSourceIndex=++sourceIndex;
- inDirectMode=FALSE;
+ inDirectMode=false;
byteIndex=0;
bits=0;
base64Counter=-1;
@@ -1002,7 +1002,7 @@ unicodeMode:
++nextSourceIndex;
if(b>0x7e) {
/* illegal - test other illegal US-ASCII values by base64Value==-3 */
- inDirectMode=TRUE;
+ inDirectMode=true;
*pErrorCode=U_ILLEGAL_CHAR_FOUND;
break;
} else if((base64Value=FROM_BASE64_IMAP(b))>=0) {
@@ -1024,7 +1024,7 @@ unicodeMode:
c=(UChar)((bits<<4)|(base64Value>>2));
if(isLegalIMAP(c)) {
/* illegal */
- inDirectMode=TRUE;
+ inDirectMode=true;
*pErrorCode=U_ILLEGAL_CHAR_FOUND;
goto endloop;
}
@@ -1042,7 +1042,7 @@ unicodeMode:
c=(UChar)((bits<<2)|(base64Value>>4));
if(isLegalIMAP(c)) {
/* illegal */
- inDirectMode=TRUE;
+ inDirectMode=true;
*pErrorCode=U_ILLEGAL_CHAR_FOUND;
goto endloop;
}
@@ -1060,7 +1060,7 @@ unicodeMode:
c=(UChar)((bits<<6)|base64Value);
if(isLegalIMAP(c)) {
/* illegal */
- inDirectMode=TRUE;
+ inDirectMode=true;
*pErrorCode=U_ILLEGAL_CHAR_FOUND;
goto endloop;
}
@@ -1079,7 +1079,7 @@ unicodeMode:
}
} else if(base64Value==-2) {
/* minus sign terminates the base64 sequence */
- inDirectMode=TRUE;
+ inDirectMode=true;
if(base64Counter==-1) {
/* &- i.e. a minus immediately following an ampersand */
*target++=AMPERSAND;
@@ -1109,7 +1109,7 @@ unicodeMode:
/* base64Value==-1 for characters that are illegal only in Unicode mode */
/* base64Value==-3 for illegal characters */
/* illegal */
- inDirectMode=TRUE;
+ inDirectMode=true;
*pErrorCode=U_ILLEGAL_CHAR_FOUND;
break;
}
@@ -1144,7 +1144,7 @@ endloop:
}
/* else if(base64Counter!=-1) byteIndex remains 0 because there is no particular byte sequence */
- inDirectMode=TRUE; /* avoid looping */
+ inDirectMode=true; /* avoid looping */
*pErrorCode=U_TRUNCATED_CHAR_FOUND;
}
@@ -1240,7 +1240,7 @@ directMode:
if(offsets!=NULL) {
*offsets++=sourceIndex;
}
- inDirectMode=FALSE;
+ inDirectMode=false;
base64Counter=0;
goto unicodeMode;
}
@@ -1257,7 +1257,7 @@ unicodeMode:
c=*source++;
if(isLegalIMAP(c)) {
/* encode directly */
- inDirectMode=TRUE;
+ inDirectMode=true;
/* trick: back out this character to make this easier */
--source;
@@ -1431,7 +1431,7 @@ unicodeMode:
}
}
/* reset the state for the next conversion */
- cnv->fromUnicodeStatus=(cnv->fromUnicodeStatus&0xf0000000)|0x1000000; /* keep version, inDirectMode=TRUE */
+ cnv->fromUnicodeStatus=(cnv->fromUnicodeStatus&0xf0000000)|0x1000000; /* keep version, inDirectMode=true */
} else {
/* set the converter state back into UConverter */
cnv->fromUnicodeStatus=
@@ -1479,7 +1479,7 @@ static const UConverterStaticData _IMAPStaticData={
UCNV_IBM, UCNV_IMAP_MAILBOX,
1, 4,
{ 0x3f, 0, 0, 0 }, 1, /* the subchar is not used */
- FALSE, FALSE,
+ false, false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 } /* reserved */
diff --git a/thirdparty/icu4c/common/ucnv_u8.cpp b/thirdparty/icu4c/common/ucnv_u8.cpp
index 1ef7fa2f02..3c27f2e46e 100644
--- a/thirdparty/icu4c/common/ucnv_u8.cpp
+++ b/thirdparty/icu4c/common/ucnv_u8.cpp
@@ -56,7 +56,7 @@ static const uint32_t offsetsFromUTF8[5] = {0,
static UBool hasCESU8Data(const UConverter *cnv)
{
#if UCONFIG_ONLY_HTML_CONVERSION
- return FALSE;
+ return false;
#else
return (UBool)(cnv->sharedData == &_CESU8Data);
#endif
@@ -888,7 +888,7 @@ static const UConverterStaticData _UTF8StaticData={
"UTF-8",
1208, UCNV_IBM, UCNV_UTF8,
1, 3, /* max 3 bytes per UChar from UTF-8 (4 bytes from surrogate _pair_) */
- { 0xef, 0xbf, 0xbd, 0 },3,FALSE,FALSE,
+ { 0xef, 0xbf, 0xbd, 0 },3,false,false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 } /* reserved */
@@ -931,7 +931,7 @@ static const UConverterStaticData _CESU8StaticData={
"CESU-8",
9400, /* CCSID for CESU-8 */
UCNV_UNKNOWN, UCNV_CESU8, 1, 3,
- { 0xef, 0xbf, 0xbd, 0 },3,FALSE,FALSE,
+ { 0xef, 0xbf, 0xbd, 0 },3,false,false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 } /* reserved */
diff --git a/thirdparty/icu4c/common/ucnvbocu.cpp b/thirdparty/icu4c/common/ucnvbocu.cpp
index 7c2aab5655..edb49d36a9 100644
--- a/thirdparty/icu4c/common/ucnvbocu.cpp
+++ b/thirdparty/icu4c/common/ucnvbocu.cpp
@@ -195,7 +195,7 @@ bocu1TrailToByte[BOCU1_TRAIL_CONTROLS_COUNT]={
* what we need here.
* This macro adjust the results so that the modulo-value m is always >=0.
*
- * For positive n, the if() condition is always FALSE.
+ * For positive n, the if() condition is always false.
*
* @param n Number to be split into quotient and rest.
* Will be modified to contain the quotient.
@@ -1401,7 +1401,7 @@ static const UConverterStaticData _Bocu1StaticData={
UCNV_IBM, UCNV_BOCU1,
1, 4, /* one UChar generates at least 1 byte and at most 4 bytes */
{ 0x1a, 0, 0, 0 }, 1, /* BOCU-1 never needs to write a subchar */
- FALSE, FALSE,
+ false, false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 } /* reserved */
diff --git a/thirdparty/icu4c/common/ucnvhz.cpp b/thirdparty/icu4c/common/ucnvhz.cpp
index 6b2f5faaf0..e0d2f0775d 100644
--- a/thirdparty/icu4c/common/ucnvhz.cpp
+++ b/thirdparty/icu4c/common/ucnvhz.cpp
@@ -111,18 +111,18 @@ _HZReset(UConverter *cnv, UConverterResetChoice choice){
cnv->toUnicodeStatus = 0;
cnv->mode=0;
if(cnv->extraInfo != NULL){
- ((UConverterDataHZ*)cnv->extraInfo)->isStateDBCS = FALSE;
- ((UConverterDataHZ*)cnv->extraInfo)->isEmptySegment = FALSE;
+ ((UConverterDataHZ*)cnv->extraInfo)->isStateDBCS = false;
+ ((UConverterDataHZ*)cnv->extraInfo)->isEmptySegment = false;
}
}
if(choice!=UCNV_RESET_TO_UNICODE) {
cnv->fromUnicodeStatus= 0;
cnv->fromUChar32=0x0000;
if(cnv->extraInfo != NULL){
- ((UConverterDataHZ*)cnv->extraInfo)->isEscapeAppended = FALSE;
+ ((UConverterDataHZ*)cnv->extraInfo)->isEscapeAppended = false;
((UConverterDataHZ*)cnv->extraInfo)->targetIndex = 0;
((UConverterDataHZ*)cnv->extraInfo)->sourceIndex = 0;
- ((UConverterDataHZ*)cnv->extraInfo)->isTargetUCharDBCS = FALSE;
+ ((UConverterDataHZ*)cnv->extraInfo)->isTargetUCharDBCS = false;
}
}
}
@@ -189,13 +189,13 @@ UConverter_toUnicode_HZ_OFFSETS_LOGIC(UConverterToUnicodeArgs *args,
args->offsets[myTarget - args->target]=(int32_t)(mySource - args->source - 2);
}
*(myTarget++)=(UChar)mySourceChar;
- myData->isEmptySegment = FALSE;
+ myData->isEmptySegment = false;
continue;
case UCNV_OPEN_BRACE:
case UCNV_CLOSE_BRACE:
myData->isStateDBCS = (mySourceChar == UCNV_OPEN_BRACE);
if (myData->isEmptySegment) {
- myData->isEmptySegment = FALSE; /* we are handling it, reset to avoid future spurious errors */
+ myData->isEmptySegment = false; /* we are handling it, reset to avoid future spurious errors */
*err = U_ILLEGAL_ESCAPE_SEQUENCE;
args->converter->toUCallbackReason = UCNV_IRREGULAR;
args->converter->toUBytes[0] = UCNV_TILDE;
@@ -205,7 +205,7 @@ UConverter_toUnicode_HZ_OFFSETS_LOGIC(UConverterToUnicodeArgs *args,
args->source = mySource;
return;
}
- myData->isEmptySegment = TRUE;
+ myData->isEmptySegment = true;
continue;
default:
/* if the first byte is equal to TILDE and the trail byte
@@ -217,7 +217,7 @@ UConverter_toUnicode_HZ_OFFSETS_LOGIC(UConverterToUnicodeArgs *args,
* - If any of the non-initial bytes could be the start of a character,
* we stop the illegal sequence before the first one of those.
*/
- myData->isEmptySegment = FALSE; /* different error here, reset this to avoid spurious future error */
+ myData->isEmptySegment = false; /* different error here, reset this to avoid spurious future error */
*err = U_ILLEGAL_ESCAPE_SEQUENCE;
args->converter->toUBytes[0] = UCNV_TILDE;
if( myData->isStateDBCS ?
@@ -244,7 +244,7 @@ UConverter_toUnicode_HZ_OFFSETS_LOGIC(UConverterToUnicodeArgs *args,
} else {
/* add another bit to distinguish a 0 byte from not having seen a lead byte */
args->converter->toUnicodeStatus = (uint32_t) (mySourceChar | 0x100);
- myData->isEmptySegment = FALSE; /* the segment has something, either valid or will produce a different error, so reset this */
+ myData->isEmptySegment = false; /* the segment has something, either valid or will produce a different error, so reset this */
}
continue;
}
@@ -289,10 +289,10 @@ UConverter_toUnicode_HZ_OFFSETS_LOGIC(UConverterToUnicodeArgs *args,
continue;
} else if(mySourceChar <= 0x7f) {
targetUniChar = (UChar)mySourceChar; /* ASCII */
- myData->isEmptySegment = FALSE; /* the segment has something valid */
+ myData->isEmptySegment = false; /* the segment has something valid */
} else {
targetUniChar = 0xffff;
- myData->isEmptySegment = FALSE; /* different error here, reset this to avoid spurious future error */
+ myData->isEmptySegment = false; /* different error here, reset this to avoid spurious future error */
}
}
if(targetUniChar < 0xfffe){
@@ -396,13 +396,13 @@ UConverter_fromUnicode_HZ_OFFSETS_LOGIC (UConverterFromUnicodeArgs * args,
len =ESC_LEN;
escSeq = SB_ESCAPE;
CONCAT_ESCAPE_MACRO(args, myTargetIndex, targetLength, escSeq,err,len,mySourceIndex);
- myConverterData->isEscapeAppended = TRUE;
+ myConverterData->isEscapeAppended = true;
}
else{ /* Shifting from a single byte to double byte mode*/
len =ESC_LEN;
escSeq = DB_ESCAPE;
CONCAT_ESCAPE_MACRO(args, myTargetIndex, targetLength, escSeq,err,len,mySourceIndex);
- myConverterData->isEscapeAppended = TRUE;
+ myConverterData->isEscapeAppended = true;
}
}
@@ -507,7 +507,7 @@ _HZ_WriteSub(UConverterFromUnicodeArgs *args, int32_t offsetIndex, UErrorCode *e
if( convData->isTargetUCharDBCS){
*p++= UCNV_TILDE;
*p++= UCNV_CLOSE_BRACE;
- convData->isTargetUCharDBCS=FALSE;
+ convData->isTargetUCharDBCS=false;
}
*p++= (char)cnv->subChars[0];
@@ -550,7 +550,7 @@ _HZ_SafeClone(const UConverter *cnv,
uprv_memcpy(&localClone->mydata, cnv->extraInfo, sizeof(UConverterDataHZ));
localClone->cnv.extraInfo = &localClone->mydata;
- localClone->cnv.isExtraLocal = TRUE;
+ localClone->cnv.isExtraLocal = true;
/* deep-clone the sub-converter */
size = (int32_t)sizeof(UConverter);
@@ -611,8 +611,8 @@ static const UConverterStaticData _HZStaticData={
4,
{ 0x1a, 0, 0, 0 },
1,
- FALSE,
- FALSE,
+ false,
+ false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, /* reserved */
diff --git a/thirdparty/icu4c/common/ucnvisci.cpp b/thirdparty/icu4c/common/ucnvisci.cpp
index f303e7e24f..4d747e1ff8 100644
--- a/thirdparty/icu4c/common/ucnvisci.cpp
+++ b/thirdparty/icu4c/common/ucnvisci.cpp
@@ -172,7 +172,7 @@ static const uint8_t pnjMap[80] = {
static UBool
isPNJConsonant(UChar32 c) {
if (c < 0xa00 || 0xa50 <= c) {
- return FALSE;
+ return false;
} else {
return (UBool)(pnjMap[c - 0xa00] & 1);
}
@@ -181,7 +181,7 @@ isPNJConsonant(UChar32 c) {
static UBool
isPNJBindiTippi(UChar32 c) {
if (c < 0xa00 || 0xa50 <= c) {
- return FALSE;
+ return false;
} else {
return (UBool)(pnjMap[c - 0xa00] >> 1);
}
@@ -202,7 +202,7 @@ _ISCIIOpen(UConverter *cnv, UConverterLoadArgs *pArgs, UErrorCode *errorCode) {
converterData->contextCharToUnicode=NO_CHAR_MARKER;
cnv->toUnicodeStatus = missingCharMarker;
converterData->contextCharFromUnicode=0x0000;
- converterData->resetToDefaultToUnicode=FALSE;
+ converterData->resetToDefaultToUnicode=false;
/* check if the version requested is supported */
if ((pArgs->options & UCNV_OPTIONS_VERSION_MASK) < 9) {
/* initialize state variables */
@@ -214,7 +214,7 @@ _ISCIIOpen(UConverter *cnv, UConverterLoadArgs *pArgs, UErrorCode *errorCode) {
= converterData->currentMaskToUnicode
= converterData->defMaskToUnicode = lookupInitialData[pArgs->options & UCNV_OPTIONS_VERSION_MASK].maskEnum;
- converterData->isFirstBuffer=TRUE;
+ converterData->isFirstBuffer=true;
(void)uprv_strcpy(converterData->name, ISCII_CNV_PREFIX);
len = (int32_t)uprv_strlen(converterData->name);
converterData->name[len]= (char)((pArgs->options & UCNV_OPTIONS_VERSION_MASK) + '0');
@@ -267,8 +267,8 @@ _ISCIIReset(UConverter *cnv, UConverterResetChoice choice) {
data->contextCharFromUnicode=0x00;
data->currentMaskFromUnicode=data->defMaskToUnicode;
data->currentDeltaFromUnicode=data->defDeltaToUnicode;
- data->isFirstBuffer=TRUE;
- data->resetToDefaultToUnicode=FALSE;
+ data->isFirstBuffer=true;
+ data->resetToDefaultToUnicode=false;
}
}
@@ -906,7 +906,7 @@ UConverter_fromUnicode_ISCII_OFFSETS_LOGIC(
UConverterDataISCII *converterData;
uint16_t newDelta=0;
uint16_t range = 0;
- UBool deltaChanged = FALSE;
+ UBool deltaChanged = false;
if ((args->converter == NULL) || (args->targetLimit < args->target) || (args->sourceLimit < args->source)) {
*err = U_ILLEGAL_ARGUMENT_ERROR;
@@ -986,8 +986,8 @@ UConverter_fromUnicode_ISCII_OFFSETS_LOGIC(
if (newDelta!= converterData->currentDeltaFromUnicode || converterData->isFirstBuffer) {
converterData->currentDeltaFromUnicode = newDelta;
converterData->currentMaskFromUnicode = lookupInitialData[range].maskEnum;
- deltaChanged =TRUE;
- converterData->isFirstBuffer=FALSE;
+ deltaChanged =true;
+ converterData->isFirstBuffer=false;
}
if (converterData->currentDeltaFromUnicode == PNJ_DELTA) {
@@ -1024,7 +1024,7 @@ UConverter_fromUnicode_ISCII_OFFSETS_LOGIC(
temp =(uint16_t)(ATR<<8);
temp += (uint16_t)((uint8_t) lookupInitialData[range].isciiLang);
/* reset */
- deltaChanged=FALSE;
+ deltaChanged=false;
/* now append ATR and language code */
WRITE_TO_TARGET_FROM_U(args,offsets,source,target,targetLimit,temp,err);
if (U_FAILURE(*err)) {
@@ -1330,7 +1330,7 @@ UConverter_toUnicode_ISCII_OFFSETS_LOGIC(UConverterToUnicodeArgs *args, UErrorCo
break;
case 0x0A:
case 0x0D:
- data->resetToDefaultToUnicode = TRUE;
+ data->resetToDefaultToUnicode = true;
GET_MAPPING(sourceChar,targetUniChar,data)
;
*contextCharToUnicode = sourceChar;
@@ -1338,12 +1338,12 @@ UConverter_toUnicode_ISCII_OFFSETS_LOGIC(UConverterToUnicodeArgs *args, UErrorCo
case ISCII_VOWEL_SIGN_E:
i=1;
- found=FALSE;
+ found=false;
for (; i<vowelSignESpecialCases[0][0]; i++) {
U_ASSERT(i<UPRV_LENGTHOF(vowelSignESpecialCases));
if (vowelSignESpecialCases[i][0]==(uint8_t)*contextCharToUnicode) {
targetUniChar=vowelSignESpecialCases[i][1];
- found=TRUE;
+ found=true;
break;
}
}
@@ -1397,12 +1397,12 @@ UConverter_toUnicode_ISCII_OFFSETS_LOGIC(UConverterToUnicodeArgs *args, UErrorCo
} else {
/* try to handle <CHAR> + ISCII_NUKTA special mappings */
i=1;
- found =FALSE;
+ found =false;
for (; i<nuktaSpecialCases[0][0]; i++) {
if (nuktaSpecialCases[i][0]==(uint8_t)
*contextCharToUnicode) {
targetUniChar=nuktaSpecialCases[i][1];
- found =TRUE;
+ found =true;
break;
}
}
@@ -1472,10 +1472,10 @@ UConverter_toUnicode_ISCII_OFFSETS_LOGIC(UConverterToUnicodeArgs *args, UErrorCo
if (targetUniChar != missingCharMarker) {
/* now save the targetUniChar for delayed write */
*toUnicodeStatus = (UChar) targetUniChar;
- if (data->resetToDefaultToUnicode==TRUE) {
+ if (data->resetToDefaultToUnicode==true) {
data->currentDeltaToUnicode = data->defDeltaToUnicode;
data->currentMaskToUnicode = data->defMaskToUnicode;
- data->resetToDefaultToUnicode=FALSE;
+ data->resetToDefaultToUnicode=false;
}
} else {
@@ -1550,7 +1550,7 @@ _ISCII_SafeClone(const UConverter *cnv,
uprv_memcpy(&localClone->mydata, cnv->extraInfo, sizeof(UConverterDataISCII));
localClone->cnv.extraInfo = &localClone->mydata;
- localClone->cnv.isExtraLocal = TRUE;
+ localClone->cnv.isExtraLocal = true;
return &localClone->cnv;
}
@@ -1621,8 +1621,8 @@ static const UConverterStaticData _ISCIIStaticData={
4,
{ 0x1a, 0, 0, 0 },
0x1,
- FALSE,
- FALSE,
+ false,
+ false,
0x0,
0x0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, /* reserved */
diff --git a/thirdparty/icu4c/common/ucnvlat1.cpp b/thirdparty/icu4c/common/ucnvlat1.cpp
index 358bc0caa2..05aad6a0e0 100644
--- a/thirdparty/icu4c/common/ucnvlat1.cpp
+++ b/thirdparty/icu4c/common/ucnvlat1.cpp
@@ -465,7 +465,7 @@ static const UConverterStaticData _Latin1StaticData={
sizeof(UConverterStaticData),
"ISO-8859-1",
819, UCNV_IBM, UCNV_LATIN_1, 1, 1,
- { 0x1a, 0, 0, 0 }, 1, FALSE, FALSE,
+ { 0x1a, 0, 0, 0 }, 1, false, false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 } /* reserved */
@@ -744,7 +744,7 @@ static const UConverterStaticData _ASCIIStaticData={
sizeof(UConverterStaticData),
"US-ASCII",
367, UCNV_IBM, UCNV_US_ASCII, 1, 1,
- { 0x1a, 0, 0, 0 }, 1, FALSE, FALSE,
+ { 0x1a, 0, 0, 0 }, 1, false, false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 } /* reserved */
diff --git a/thirdparty/icu4c/common/ucnvmbcs.cpp b/thirdparty/icu4c/common/ucnvmbcs.cpp
index 420aa02af5..0e753c8ffb 100644
--- a/thirdparty/icu4c/common/ucnvmbcs.cpp
+++ b/thirdparty/icu4c/common/ucnvmbcs.cpp
@@ -373,7 +373,7 @@
* @param value contains 1..4 bytes of the first byte sequence, right-aligned
* @param codePoints resulting Unicode code points, or negative if a byte sequence does
* not map to anything
- * @return TRUE to continue enumeration, FALSE to stop
+ * @return true to continue enumeration, false to stop
*/
typedef UBool U_CALLCONV
UConverterEnumToUCallback(const void *context, uint32_t value, UChar32 codePoints[32]);
@@ -514,7 +514,7 @@ static const UConverterImpl _MBCSImpl={
const UConverterSharedData _MBCSData={
sizeof(UConverterSharedData), 1,
- NULL, NULL, FALSE, TRUE, &_MBCSImpl,
+ NULL, NULL, false, true, &_MBCSImpl,
0, UCNV_MBCS_TABLE_INITIALIZER
};
@@ -668,7 +668,7 @@ enumToU(UConverterMBCSTable *mbcsTable, int8_t stateProps[],
value|(uint32_t)b,
callback, context,
pErrorCode)) {
- return FALSE;
+ return false;
}
}
codePoints[b&0x1f]=U_SENTINEL;
@@ -719,13 +719,13 @@ enumToU(UConverterMBCSTable *mbcsTable, int8_t stateProps[],
if(((++b)&0x1f)==0) {
if(anyCodePoints>=0) {
if(!callback(context, value|(uint32_t)(b-0x20), codePoints)) {
- return FALSE;
+ return false;
}
anyCodePoints=-1;
}
}
}
- return TRUE;
+ return true;
}
/*
@@ -1111,7 +1111,7 @@ _extFromU(UConverter *cnv, const UConverterSharedData *sharedData,
UErrorCode *pErrorCode) {
const int32_t *cx;
- cnv->useSubChar1=FALSE;
+ cnv->useSubChar1=false;
if( (cx=sharedData->mbcs.extIndexes)!=NULL &&
ucnv_extInitialMatchFromU(
@@ -1286,7 +1286,7 @@ _EBCDICSwapLFNL(UConverterSharedData *sharedData, UErrorCode *pErrorCode) {
mbcsTable->stateTable[0][EBCDIC_LF]==MBCS_ENTRY_FINAL(0, MBCS_STATE_VALID_DIRECT_16, U_LF) &&
mbcsTable->stateTable[0][EBCDIC_NL]==MBCS_ENTRY_FINAL(0, MBCS_STATE_VALID_DIRECT_16, U_NL)
)) {
- return FALSE;
+ return false;
}
if(mbcsTable->outputType==MBCS_OUTPUT_1) {
@@ -1294,7 +1294,7 @@ _EBCDICSwapLFNL(UConverterSharedData *sharedData, UErrorCode *pErrorCode) {
EBCDIC_RT_LF==MBCS_SINGLE_RESULT_FROM_U(table, results, U_LF) &&
EBCDIC_RT_NL==MBCS_SINGLE_RESULT_FROM_U(table, results, U_NL)
)) {
- return FALSE;
+ return false;
}
} else /* MBCS_OUTPUT_2_SISO */ {
stage2Entry=MBCS_STAGE_2_FROM_U(table, U_LF);
@@ -1302,7 +1302,7 @@ _EBCDICSwapLFNL(UConverterSharedData *sharedData, UErrorCode *pErrorCode) {
MBCS_FROM_U_IS_ROUNDTRIP(stage2Entry, U_LF)!=0 &&
EBCDIC_LF==MBCS_VALUE_2_FROM_STAGE_2(bytes, stage2Entry, U_LF)
)) {
- return FALSE;
+ return false;
}
stage2Entry=MBCS_STAGE_2_FROM_U(table, U_NL);
@@ -1310,7 +1310,7 @@ _EBCDICSwapLFNL(UConverterSharedData *sharedData, UErrorCode *pErrorCode) {
MBCS_FROM_U_IS_ROUNDTRIP(stage2Entry, U_NL)!=0 &&
EBCDIC_NL==MBCS_VALUE_2_FROM_STAGE_2(bytes, stage2Entry, U_NL)
)) {
- return FALSE;
+ return false;
}
}
@@ -1334,7 +1334,7 @@ _EBCDICSwapLFNL(UConverterSharedData *sharedData, UErrorCode *pErrorCode) {
* ucnv_MBCSSizeofFromUBytes() function.
*/
*pErrorCode=U_INVALID_FORMAT_ERROR;
- return FALSE;
+ return false;
}
/*
@@ -1351,7 +1351,7 @@ _EBCDICSwapLFNL(UConverterSharedData *sharedData, UErrorCode *pErrorCode) {
p=(uint8_t *)uprv_malloc(size);
if(p==NULL) {
*pErrorCode=U_MEMORY_ALLOCATION_ERROR;
- return FALSE;
+ return false;
}
/* copy and modify the to-Unicode state table */
@@ -1397,7 +1397,7 @@ _EBCDICSwapLFNL(UConverterSharedData *sharedData, UErrorCode *pErrorCode) {
if(newStateTable!=NULL) {
uprv_free(newStateTable);
}
- return TRUE;
+ return true;
}
/* reconstitute omitted fromUnicode data ------------------------------------ */
@@ -1477,7 +1477,7 @@ writeStage3Roundtrip(const void *context, uint32_t value, UChar32 codePoints[32]
/* set the roundtrip flag */
*stage2|=(1UL<<(16+(c&0xf)));
}
- return TRUE;
+ return true;
}
static void
@@ -1561,7 +1561,7 @@ ucnv_MBCSLoad(UConverterSharedData *sharedData,
_MBCSHeader *header=(_MBCSHeader *)raw;
uint32_t offset;
uint32_t headerLength;
- UBool noFromU=FALSE;
+ UBool noFromU=false;
if(header->version[0]==4) {
headerLength=MBCS_HEADER_V4_LENGTH;
@@ -1726,7 +1726,7 @@ ucnv_MBCSLoad(UConverterSharedData *sharedData,
}
mbcsTable->stateTable=(const int32_t (*)[256])newStateTable;
mbcsTable->countStates=(uint8_t)(count+1);
- mbcsTable->stateTableOwned=TRUE;
+ mbcsTable->stateTableOwned=true;
mbcsTable->outputType=MBCS_OUTPUT_DBCS_ONLY;
}
@@ -1805,7 +1805,7 @@ ucnv_MBCSLoad(UConverterSharedData *sharedData,
(header->version[2]>=(MBCS_FAST_MAX>>8))
)
) {
- mbcsTable->utf8Friendly=TRUE;
+ mbcsTable->utf8Friendly=true;
if(mbcsTable->countStates==1) {
/*
@@ -2411,13 +2411,13 @@ hasValidTrailBytes(const int32_t (*stateTable)[256], uint8_t state) {
if( !MBCS_ENTRY_IS_TRANSITION(entry) &&
MBCS_ENTRY_FINAL_ACTION(entry)!=MBCS_STATE_ILLEGAL
) {
- return TRUE;
+ return true;
}
entry=row[0x41];
if( !MBCS_ENTRY_IS_TRANSITION(entry) &&
MBCS_ENTRY_FINAL_ACTION(entry)!=MBCS_STATE_ILLEGAL
) {
- return TRUE;
+ return true;
}
/* Then test for final entries in this state. */
for(b=0; b<=0xff; ++b) {
@@ -2425,7 +2425,7 @@ hasValidTrailBytes(const int32_t (*stateTable)[256], uint8_t state) {
if( !MBCS_ENTRY_IS_TRANSITION(entry) &&
MBCS_ENTRY_FINAL_ACTION(entry)!=MBCS_STATE_ILLEGAL
) {
- return TRUE;
+ return true;
}
}
/* Then recurse for transition entries. */
@@ -2434,10 +2434,10 @@ hasValidTrailBytes(const int32_t (*stateTable)[256], uint8_t state) {
if( MBCS_ENTRY_IS_TRANSITION(entry) &&
hasValidTrailBytes(stateTable, (uint8_t)MBCS_ENTRY_TRANSITION_STATE(entry))
) {
- return TRUE;
+ return true;
}
}
- return FALSE;
+ return false;
}
/*
@@ -2454,7 +2454,7 @@ isSingleOrLead(const int32_t (*stateTable)[256], uint8_t state, UBool isDBCSOnly
} else {
uint8_t action=(uint8_t)(MBCS_ENTRY_FINAL_ACTION(entry));
if(action==MBCS_STATE_CHANGE_ONLY && isDBCSOnly) {
- return FALSE; /* SI/SO are illegal for DBCS-only conversion */
+ return false; /* SI/SO are illegal for DBCS-only conversion */
} else {
return action!=MBCS_STATE_ILLEGAL;
}
@@ -5672,7 +5672,7 @@ ucnv_MBCSWriteSub(UConverterFromUnicodeArgs *pArgs,
}
/* reset the selector for the next code point */
- cnv->useSubChar1=FALSE;
+ cnv->useSubChar1=false;
if (cnv->sharedData->mbcs.outputType == MBCS_OUTPUT_2_SISO) {
p=buffer;
diff --git a/thirdparty/icu4c/common/ucnvscsu.cpp b/thirdparty/icu4c/common/ucnvscsu.cpp
index 7b580291e1..86e850a998 100644
--- a/thirdparty/icu4c/common/ucnvscsu.cpp
+++ b/thirdparty/icu4c/common/ucnvscsu.cpp
@@ -163,7 +163,7 @@ _SCSUReset(UConverter *cnv, UConverterResetChoice choice) {
/* reset toUnicode */
uprv_memcpy(scsu->toUDynamicOffsets, initialDynamicOffsets, 32);
- scsu->toUIsSingleByteMode=TRUE;
+ scsu->toUIsSingleByteMode=true;
scsu->toUState=readCommand;
scsu->toUQuoteWindow=scsu->toUDynamicWindow=0;
scsu->toUByteOne=0;
@@ -174,7 +174,7 @@ _SCSUReset(UConverter *cnv, UConverterResetChoice choice) {
/* reset fromUnicode */
uprv_memcpy(scsu->fromUDynamicOffsets, initialDynamicOffsets, 32);
- scsu->fromUIsSingleByteMode=TRUE;
+ scsu->fromUIsSingleByteMode=true;
scsu->fromUDynamicWindow=0;
scsu->nextWindowUseIndex=0;
@@ -371,7 +371,7 @@ singleByteMode:
state=quotePairOne;
} else if(b==SCU) {
sourceIndex=nextSourceIndex;
- isSingleByteMode=FALSE;
+ isSingleByteMode=false;
goto fastUnicode;
} else /* Srs */ {
/* callback(illegal) */
@@ -508,17 +508,17 @@ fastUnicode:
} else if(/* UC0<=b && */ b<=UC7) {
dynamicWindow=(int8_t)(b-UC0);
sourceIndex=nextSourceIndex;
- isSingleByteMode=TRUE;
+ isSingleByteMode=true;
goto fastSingle;
} else if(/* UD0<=b && */ b<=UD7) {
dynamicWindow=(int8_t)(b-UD0);
- isSingleByteMode=TRUE;
+ isSingleByteMode=true;
cnv->toUBytes[0]=b;
cnv->toULength=1;
state=defineOne;
goto singleByteMode;
} else if(b==UDX) {
- isSingleByteMode=TRUE;
+ isSingleByteMode=true;
cnv->toUBytes[0]=b;
cnv->toULength=1;
state=definePairOne;
@@ -695,7 +695,7 @@ singleByteMode:
} else if(b==SQU) {
state=quotePairOne;
} else if(b==SCU) {
- isSingleByteMode=FALSE;
+ isSingleByteMode=false;
goto fastUnicode;
} else /* Srs */ {
/* callback(illegal) */
@@ -805,17 +805,17 @@ fastUnicode:
state=quotePairTwo;
} else if(/* UC0<=b && */ b<=UC7) {
dynamicWindow=(int8_t)(b-UC0);
- isSingleByteMode=TRUE;
+ isSingleByteMode=true;
goto fastSingle;
} else if(/* UD0<=b && */ b<=UD7) {
dynamicWindow=(int8_t)(b-UD0);
- isSingleByteMode=TRUE;
+ isSingleByteMode=true;
cnv->toUBytes[0]=b;
cnv->toULength=1;
state=defineOne;
goto singleByteMode;
} else if(b==UDX) {
- isSingleByteMode=TRUE;
+ isSingleByteMode=true;
cnv->toUBytes[0]=b;
cnv->toULength=1;
state=definePairOne;
@@ -1159,7 +1159,7 @@ getTrailSingle:
goto outputBytes;
} else {
/* change to Unicode mode and output this (lead, trail) pair */
- isSingleByteMode=FALSE;
+ isSingleByteMode=false;
*target++=(uint8_t)SCU;
if(offsets!=NULL) {
*offsets++=sourceIndex;
@@ -1218,7 +1218,7 @@ getTrailSingle:
* switch to Unicode mode if this is the last character in the block
* or there is at least one more ideograph following immediately
*/
- isSingleByteMode=FALSE;
+ isSingleByteMode=false;
c|=SCU<<16;
length=3;
goto outputBytes;
@@ -1269,13 +1269,13 @@ getTrailSingle:
if(!(source<sourceLimit && (uint32_t)(*source-0x3400)<(0xd800-0x3400))) {
if(((uint32_t)(c-0x30)<10 || (uint32_t)(c-0x61)<26 || (uint32_t)(c-0x41)<26)) {
/* ASCII digit or letter */
- isSingleByteMode=TRUE;
+ isSingleByteMode=true;
c|=((uint32_t)(UC0+dynamicWindow)<<8)|c;
length=2;
goto outputBytes;
} else if((window=getWindow(scsu->fromUDynamicOffsets, c))>=0) {
/* there is a dynamic window that contains this character, change to it */
- isSingleByteMode=TRUE;
+ isSingleByteMode=true;
dynamicWindow=window;
currentOffset=scsu->fromUDynamicOffsets[dynamicWindow];
useDynamicWindow(scsu, dynamicWindow);
@@ -1284,7 +1284,7 @@ getTrailSingle:
goto outputBytes;
} else if((code=getDynamicOffset(c, &offset))>=0) {
/* define a dynamic window with this character */
- isSingleByteMode=TRUE;
+ isSingleByteMode=true;
dynamicWindow=getNextDynamicWindow(scsu);
currentOffset=scsu->fromUDynamicOffsets[dynamicWindow]=offset;
useDynamicWindow(scsu, dynamicWindow);
@@ -1337,7 +1337,7 @@ getTrailUnicode:
* the following character is not uncompressible,
* change to the window
*/
- isSingleByteMode=TRUE;
+ isSingleByteMode=true;
dynamicWindow=window;
currentOffset=scsu->fromUDynamicOffsets[dynamicWindow];
useDynamicWindow(scsu, dynamicWindow);
@@ -1348,7 +1348,7 @@ getTrailUnicode:
(code=getDynamicOffset(c, &offset))>=0
) {
/* two supplementary characters in (probably) the same window - define an extended one */
- isSingleByteMode=TRUE;
+ isSingleByteMode=true;
code-=0x200;
dynamicWindow=getNextDynamicWindow(scsu);
currentOffset=scsu->fromUDynamicOffsets[dynamicWindow]=offset;
@@ -1645,7 +1645,7 @@ getTrailSingle:
goto outputBytes;
} else {
/* change to Unicode mode and output this (lead, trail) pair */
- isSingleByteMode=FALSE;
+ isSingleByteMode=false;
*target++=(uint8_t)SCU;
--targetCapacity;
c=((uint32_t)lead<<16)|trail;
@@ -1701,7 +1701,7 @@ getTrailSingle:
* switch to Unicode mode if this is the last character in the block
* or there is at least one more ideograph following immediately
*/
- isSingleByteMode=FALSE;
+ isSingleByteMode=false;
c|=SCU<<16;
length=3;
goto outputBytes;
@@ -1746,13 +1746,13 @@ getTrailSingle:
if(!(source<sourceLimit && (uint32_t)(*source-0x3400)<(0xd800-0x3400))) {
if(((uint32_t)(c-0x30)<10 || (uint32_t)(c-0x61)<26 || (uint32_t)(c-0x41)<26)) {
/* ASCII digit or letter */
- isSingleByteMode=TRUE;
+ isSingleByteMode=true;
c|=((uint32_t)(UC0+dynamicWindow)<<8)|c;
length=2;
goto outputBytes;
} else if((window=getWindow(scsu->fromUDynamicOffsets, c))>=0) {
/* there is a dynamic window that contains this character, change to it */
- isSingleByteMode=TRUE;
+ isSingleByteMode=true;
dynamicWindow=window;
currentOffset=scsu->fromUDynamicOffsets[dynamicWindow];
useDynamicWindow(scsu, dynamicWindow);
@@ -1761,7 +1761,7 @@ getTrailSingle:
goto outputBytes;
} else if((code=getDynamicOffset(c, &offset))>=0) {
/* define a dynamic window with this character */
- isSingleByteMode=TRUE;
+ isSingleByteMode=true;
dynamicWindow=getNextDynamicWindow(scsu);
currentOffset=scsu->fromUDynamicOffsets[dynamicWindow]=offset;
useDynamicWindow(scsu, dynamicWindow);
@@ -1813,7 +1813,7 @@ getTrailUnicode:
* the following character is not uncompressible,
* change to the window
*/
- isSingleByteMode=TRUE;
+ isSingleByteMode=true;
dynamicWindow=window;
currentOffset=scsu->fromUDynamicOffsets[dynamicWindow];
useDynamicWindow(scsu, dynamicWindow);
@@ -1824,7 +1824,7 @@ getTrailUnicode:
(code=getDynamicOffset(c, &offset))>=0
) {
/* two supplementary characters in (probably) the same window - define an extended one */
- isSingleByteMode=TRUE;
+ isSingleByteMode=true;
code-=0x200;
dynamicWindow=getNextDynamicWindow(scsu);
currentOffset=scsu->fromUDynamicOffsets[dynamicWindow]=offset;
@@ -1991,7 +1991,7 @@ _SCSUSafeClone(const UConverter *cnv,
uprv_memcpy(&localClone->mydata, cnv->extraInfo, sizeof(SCSUData));
localClone->cnv.extraInfo = &localClone->mydata;
- localClone->cnv.isExtraLocal = TRUE;
+ localClone->cnv.isExtraLocal = true;
return &localClone->cnv;
}
@@ -2033,7 +2033,7 @@ static const UConverterStaticData _SCSUStaticData={
* substitution string.
*/
{ 0x0e, 0xff, 0xfd, 0 }, 3,
- FALSE, FALSE,
+ false, false,
0,
0,
{ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 } /* reserved */
diff --git a/thirdparty/icu4c/common/ucnvsel.cpp b/thirdparty/icu4c/common/ucnvsel.cpp
index 2dff5ac1bc..15ee596a23 100644
--- a/thirdparty/icu4c/common/ucnvsel.cpp
+++ b/thirdparty/icu4c/common/ucnvsel.cpp
@@ -142,7 +142,7 @@ static void generateSelectorData(UConverterSelector* result,
result->trie = upvec_compactToUTrie2WithRowIndexes(upvec, status);
result->pv = upvec_cloneArray(upvec, &result->pvCount, NULL, status);
result->pvCount *= columns; // number of uint32_t = rows * columns
- result->ownPv = TRUE;
+ result->ownPv = true;
}
/* open a selector. If converterListSize is 0, build for all converters.
@@ -212,7 +212,7 @@ ucnvsel_open(const char* const* converterList, int32_t converterListSize,
--encodingStrPadding;
}
- newSelector->ownEncodingStrings = TRUE;
+ newSelector->ownEncodingStrings = true;
newSelector->encodingsCount = converterListSize;
UPropsVectors *upvec = upvec_open((converterListSize+31)/32, status);
generateSelectorData(newSelector.getAlias(), upvec, excludedCodePoints, whichSet, status);
diff --git a/thirdparty/icu4c/common/ucol_swp.cpp b/thirdparty/icu4c/common/ucol_swp.cpp
index 1af19863fa..59704ff8f6 100644
--- a/thirdparty/icu4c/common/ucol_swp.cpp
+++ b/thirdparty/icu4c/common/ucol_swp.cpp
@@ -34,7 +34,7 @@ U_CAPI UBool U_EXPORT2
ucol_looksLikeCollationBinary(const UDataSwapper *ds,
const void *inData, int32_t length) {
if(ds==NULL || inData==NULL || length<-1) {
- return FALSE;
+ return false;
}
// First check for format version 4+ which has a standard data header.
@@ -46,7 +46,7 @@ ucol_looksLikeCollationBinary(const UDataSwapper *ds,
info.dataFormat[1]==0x43 &&
info.dataFormat[2]==0x6f &&
info.dataFormat[3]==0x6c) {
- return TRUE;
+ return true;
}
}
@@ -64,7 +64,7 @@ ucol_looksLikeCollationBinary(const UDataSwapper *ds,
if(length<0) {
header.size=udata_readInt32(ds, inHeader->size);
} else if((length<(42*4) || length<(header.size=udata_readInt32(ds, inHeader->size)))) {
- return FALSE;
+ return false;
}
header.magic=ds->readUInt32(inHeader->magic);
@@ -73,14 +73,14 @@ ucol_looksLikeCollationBinary(const UDataSwapper *ds,
inHeader->formatVersion[0]==3 /*&&
inHeader->formatVersion[1]>=0*/
)) {
- return FALSE;
+ return false;
}
if(inHeader->isBigEndian!=ds->inIsBigEndian || inHeader->charSetFamily!=ds->inCharset) {
- return FALSE;
+ return false;
}
- return TRUE;
+ return true;
}
namespace {
diff --git a/thirdparty/icu4c/common/ucptrie_impl.h b/thirdparty/icu4c/common/ucptrie_impl.h
index 048353c80d..a7a80a8f08 100644
--- a/thirdparty/icu4c/common/ucptrie_impl.h
+++ b/thirdparty/icu4c/common/ucptrie_impl.h
@@ -54,80 +54,76 @@ struct UCPTrieHeader {
uint16_t shiftedHighStart;
};
+// Constants for use with UCPTrieHeader.options.
+constexpr uint16_t UCPTRIE_OPTIONS_DATA_LENGTH_MASK = 0xf000;
+constexpr uint16_t UCPTRIE_OPTIONS_DATA_NULL_OFFSET_MASK = 0xf00;
+constexpr uint16_t UCPTRIE_OPTIONS_RESERVED_MASK = 0x38;
+constexpr uint16_t UCPTRIE_OPTIONS_VALUE_BITS_MASK = 7;
+
/**
- * Constants for use with UCPTrieHeader.options.
- * @internal
+ * Value for index3NullOffset which indicates that there is no index-3 null block.
+ * Bit 15 is unused for this value because this bit is used if the index-3 contains
+ * 18-bit indexes.
*/
-enum {
- UCPTRIE_OPTIONS_DATA_LENGTH_MASK = 0xf000,
- UCPTRIE_OPTIONS_DATA_NULL_OFFSET_MASK = 0xf00,
- UCPTRIE_OPTIONS_RESERVED_MASK = 0x38,
- UCPTRIE_OPTIONS_VALUE_BITS_MASK = 7,
- /**
- * Value for index3NullOffset which indicates that there is no index-3 null block.
- * Bit 15 is unused for this value because this bit is used if the index-3 contains
- * 18-bit indexes.
- */
- UCPTRIE_NO_INDEX3_NULL_OFFSET = 0x7fff,
- UCPTRIE_NO_DATA_NULL_OFFSET = 0xfffff
-};
+constexpr int32_t UCPTRIE_NO_INDEX3_NULL_OFFSET = 0x7fff;
+constexpr int32_t UCPTRIE_NO_DATA_NULL_OFFSET = 0xfffff;
// Internal constants.
-enum {
- /** The length of the BMP index table. 1024=0x400 */
- UCPTRIE_BMP_INDEX_LENGTH = 0x10000 >> UCPTRIE_FAST_SHIFT,
- UCPTRIE_SMALL_LIMIT = 0x1000,
- UCPTRIE_SMALL_INDEX_LENGTH = UCPTRIE_SMALL_LIMIT >> UCPTRIE_FAST_SHIFT,
+/** The length of the BMP index table. 1024=0x400 */
+constexpr int32_t UCPTRIE_BMP_INDEX_LENGTH = 0x10000 >> UCPTRIE_FAST_SHIFT;
- /** Shift size for getting the index-3 table offset. */
- UCPTRIE_SHIFT_3 = 4,
+constexpr int32_t UCPTRIE_SMALL_LIMIT = 0x1000;
+constexpr int32_t UCPTRIE_SMALL_INDEX_LENGTH = UCPTRIE_SMALL_LIMIT >> UCPTRIE_FAST_SHIFT;
- /** Shift size for getting the index-2 table offset. */
- UCPTRIE_SHIFT_2 = 5 + UCPTRIE_SHIFT_3,
+/** Shift size for getting the index-3 table offset. */
+constexpr int32_t UCPTRIE_SHIFT_3 = 4;
- /** Shift size for getting the index-1 table offset. */
- UCPTRIE_SHIFT_1 = 5 + UCPTRIE_SHIFT_2,
+/** Shift size for getting the index-2 table offset. */
+constexpr int32_t UCPTRIE_SHIFT_2 = 5 + UCPTRIE_SHIFT_3;
- /**
- * Difference between two shift sizes,
- * for getting an index-2 offset from an index-3 offset. 5=9-4
- */
- UCPTRIE_SHIFT_2_3 = UCPTRIE_SHIFT_2 - UCPTRIE_SHIFT_3,
+/** Shift size for getting the index-1 table offset. */
+constexpr int32_t UCPTRIE_SHIFT_1 = 5 + UCPTRIE_SHIFT_2;
- /**
- * Difference between two shift sizes,
- * for getting an index-1 offset from an index-2 offset. 5=14-9
- */
- UCPTRIE_SHIFT_1_2 = UCPTRIE_SHIFT_1 - UCPTRIE_SHIFT_2,
+/**
+ * Difference between two shift sizes,
+ * for getting an index-2 offset from an index-3 offset. 5=9-4
+ */
+constexpr int32_t UCPTRIE_SHIFT_2_3 = UCPTRIE_SHIFT_2 - UCPTRIE_SHIFT_3;
- /**
- * Number of index-1 entries for the BMP. (4)
- * This part of the index-1 table is omitted from the serialized form.
- */
- UCPTRIE_OMITTED_BMP_INDEX_1_LENGTH = 0x10000 >> UCPTRIE_SHIFT_1,
+/**
+ * Difference between two shift sizes,
+ * for getting an index-1 offset from an index-2 offset. 5=14-9
+ */
+constexpr int32_t UCPTRIE_SHIFT_1_2 = UCPTRIE_SHIFT_1 - UCPTRIE_SHIFT_2;
- /** Number of entries in an index-2 block. 32=0x20 */
- UCPTRIE_INDEX_2_BLOCK_LENGTH = 1 << UCPTRIE_SHIFT_1_2,
+/**
+ * Number of index-1 entries for the BMP. (4)
+ * This part of the index-1 table is omitted from the serialized form.
+ */
+constexpr int32_t UCPTRIE_OMITTED_BMP_INDEX_1_LENGTH = 0x10000 >> UCPTRIE_SHIFT_1;
- /** Mask for getting the lower bits for the in-index-2-block offset. */
- UCPTRIE_INDEX_2_MASK = UCPTRIE_INDEX_2_BLOCK_LENGTH - 1,
+/** Number of entries in an index-2 block. 32=0x20 */
+constexpr int32_t UCPTRIE_INDEX_2_BLOCK_LENGTH = 1 << UCPTRIE_SHIFT_1_2;
- /** Number of code points per index-2 table entry. 512=0x200 */
- UCPTRIE_CP_PER_INDEX_2_ENTRY = 1 << UCPTRIE_SHIFT_2,
+/** Mask for getting the lower bits for the in-index-2-block offset. */
+constexpr int32_t UCPTRIE_INDEX_2_MASK = UCPTRIE_INDEX_2_BLOCK_LENGTH - 1;
- /** Number of entries in an index-3 block. 32=0x20 */
- UCPTRIE_INDEX_3_BLOCK_LENGTH = 1 << UCPTRIE_SHIFT_2_3,
+/** Number of code points per index-2 table entry. 512=0x200 */
+constexpr int32_t UCPTRIE_CP_PER_INDEX_2_ENTRY = 1 << UCPTRIE_SHIFT_2;
- /** Mask for getting the lower bits for the in-index-3-block offset. */
- UCPTRIE_INDEX_3_MASK = UCPTRIE_INDEX_3_BLOCK_LENGTH - 1,
+/** Number of entries in an index-3 block. 32=0x20 */
+constexpr int32_t UCPTRIE_INDEX_3_BLOCK_LENGTH = 1 << UCPTRIE_SHIFT_2_3;
- /** Number of entries in a small data block. 16=0x10 */
- UCPTRIE_SMALL_DATA_BLOCK_LENGTH = 1 << UCPTRIE_SHIFT_3,
+/** Mask for getting the lower bits for the in-index-3-block offset. */
+constexpr int32_t UCPTRIE_INDEX_3_MASK = UCPTRIE_INDEX_3_BLOCK_LENGTH - 1;
+
+/** Number of entries in a small data block. 16=0x10 */
+constexpr int32_t UCPTRIE_SMALL_DATA_BLOCK_LENGTH = 1 << UCPTRIE_SHIFT_3;
+
+/** Mask for getting the lower bits for the in-small-data-block offset. */
+constexpr int32_t UCPTRIE_SMALL_DATA_MASK = UCPTRIE_SMALL_DATA_BLOCK_LENGTH - 1;
- /** Mask for getting the lower bits for the in-small-data-block offset. */
- UCPTRIE_SMALL_DATA_MASK = UCPTRIE_SMALL_DATA_BLOCK_LENGTH - 1
-};
typedef UChar32
UCPTrieGetRange(const void *trie, UChar32 start,
diff --git a/thirdparty/icu4c/common/ucurr.cpp b/thirdparty/icu4c/common/ucurr.cpp
index 6e489e0563..928d049fb5 100644
--- a/thirdparty/icu4c/common/ucurr.cpp
+++ b/thirdparty/icu4c/common/ucurr.cpp
@@ -97,11 +97,11 @@ static const char CURRENCYPLURALS[] = "CurrencyPlurals";
// ISO codes mapping table
static const UHashtable* gIsoCodes = NULL;
-static icu::UInitOnce gIsoCodesInitOnce = U_INITONCE_INITIALIZER;
+static icu::UInitOnce gIsoCodesInitOnce {};
// Currency symbol equivalances
static const icu::Hashtable* gCurrSymbolsEquiv = NULL;
-static icu::UInitOnce gCurrSymbolsEquivInitOnce = U_INITONCE_INITIALIZER;
+static icu::UInitOnce gCurrSymbolsEquivInitOnce {};
U_NAMESPACE_BEGIN
@@ -238,7 +238,7 @@ isoCodes_cleanup(void)
gIsoCodes = NULL;
}
gIsoCodesInitOnce.reset();
- return TRUE;
+ return true;
}
/**
@@ -250,7 +250,7 @@ currSymbolsEquiv_cleanup(void)
delete const_cast<icu::Hashtable *>(gCurrSymbolsEquiv);
gCurrSymbolsEquiv = NULL;
gCurrSymbolsEquivInitOnce.reset();
- return TRUE;
+ return true;
}
/**
@@ -349,7 +349,7 @@ _findMetaData(const UChar* currency, UErrorCode& ec) {
static void
idForLocale(const char* locale, char* countryAndVariant, int capacity, UErrorCode* ec)
{
- ulocimp_getRegionForSupplementalData(locale, FALSE, countryAndVariant, capacity, ec);
+ ulocimp_getRegionForSupplementalData(locale, false, countryAndVariant, capacity, ec);
}
// ------------------------------------------
@@ -409,7 +409,7 @@ struct CReg : public icu::UMemory {
}
static UBool unreg(UCurrRegistryKey key) {
- UBool found = FALSE;
+ UBool found = false;
umtx_lock(&gCRegLock);
CReg** p = &gCRegHead;
@@ -417,7 +417,7 @@ struct CReg : public icu::UMemory {
if (*p == key) {
*p = ((CReg*)key)->next;
delete (CReg*)key;
- found = TRUE;
+ found = true;
break;
}
p = &((*p)->next);
@@ -476,7 +476,7 @@ ucurr_unregister(UCurrRegistryKey key, UErrorCode* status)
if (status && U_SUCCESS(*status)) {
return CReg::unreg(key);
}
- return FALSE;
+ return false;
}
#endif /* UCONFIG_NO_SERVICE */
@@ -503,7 +503,7 @@ static UBool U_CALLCONV currency_cleanup(void) {
isoCodes_cleanup();
currSymbolsEquiv_cleanup();
- return TRUE;
+ return true;
}
U_CDECL_END
@@ -566,9 +566,32 @@ ucurr_forLocale(const char* locale,
UResourceBundle *rb = ures_openDirect(U_ICUDATA_CURR, CURRENCY_DATA, &localStatus);
UResourceBundle *cm = ures_getByKey(rb, CURRENCY_MAP, rb, &localStatus);
UResourceBundle *countryArray = ures_getByKey(rb, id, cm, &localStatus);
- UResourceBundle *currencyReq = ures_getByIndex(countryArray, 0, NULL, &localStatus);
- s = ures_getStringByKey(currencyReq, "id", &resLen, &localStatus);
- ures_close(currencyReq);
+ // https://unicode-org.atlassian.net/browse/ICU-21997
+ // Prefer to use currencies that are legal tender.
+ if (U_SUCCESS(localStatus)) {
+ int32_t arrayLength = ures_getSize(countryArray);
+ for (int32_t i = 0; i < arrayLength; ++i) {
+ LocalUResourceBundlePointer currencyReq(
+ ures_getByIndex(countryArray, i, nullptr, &localStatus));
+ // The currency is legal tender if it is *not* marked with tender{"false"}.
+ UErrorCode tenderStatus = localStatus;
+ const UChar *tender =
+ ures_getStringByKey(currencyReq.getAlias(), "tender", nullptr, &tenderStatus);
+ bool isTender = U_FAILURE(tenderStatus) || u_strcmp(tender, u"false") != 0;
+ if (!isTender && s != nullptr) {
+ // We already have a non-tender currency. Ignore all following non-tender ones.
+ continue;
+ }
+ // Fetch the currency code.
+ s = ures_getStringByKey(currencyReq.getAlias(), "id", &resLen, &localStatus);
+ if (isTender) {
+ break;
+ }
+ }
+ if (U_SUCCESS(localStatus) && s == nullptr) {
+ localStatus = U_MISSING_RESOURCE_ERROR;
+ }
+ }
ures_close(countryArray);
}
@@ -598,12 +621,12 @@ ucurr_forLocale(const char* locale,
* Modify the given locale name by removing the rightmost _-delimited
* element. If there is none, empty the string ("" == root).
* NOTE: The string "root" is not recognized; do not use it.
- * @return TRUE if the fallback happened; FALSE if locale is already
+ * @return true if the fallback happened; false if locale is already
* root ("").
*/
static UBool fallback(char *loc) {
if (!*loc) {
- return FALSE;
+ return false;
}
UErrorCode status = U_ZERO_ERROR;
if (uprv_strcmp(loc, "en_GB") == 0) {
@@ -623,7 +646,7 @@ static UBool fallback(char *loc) {
}
*i = 0;
*/
- return TRUE;
+ return true;
}
@@ -729,7 +752,7 @@ ucurr_getName(const UChar* currency,
// We no longer support choice format data in names. Data should not contain
// choice patterns.
if (isChoiceFormat != NULL) {
- *isChoiceFormat = FALSE;
+ *isChoiceFormat = false;
}
if (U_SUCCESS(ec2)) {
U_ASSERT(s != NULL);
@@ -896,7 +919,7 @@ getCurrencyNameCount(const char* loc, int32_t* total_currency_name_count, int32_
s = ures_getStringByIndex(names, UCURR_SYMBOL_NAME, &len, &ec2);
++(*total_currency_symbol_count); // currency symbol
if (currencySymbolsEquiv != NULL) {
- *total_currency_symbol_count += countEquivalent(*currencySymbolsEquiv, UnicodeString(TRUE, s, len));
+ *total_currency_symbol_count += countEquivalent(*currencySymbolsEquiv, UnicodeString(true, s, len));
}
++(*total_currency_symbol_count); // iso code
++(*total_currency_name_count); // long name
@@ -1017,7 +1040,7 @@ collectCurrencyNames(const char* locale,
(*currencySymbols)[(*total_currency_symbol_count)++].currencyNameLen = len;
// Add equivalent symbols
if (currencySymbolsEquiv != NULL) {
- UnicodeString str(TRUE, s, len);
+ UnicodeString str(true, s, len);
icu::EquivIterator iter(*currencySymbolsEquiv, str);
const UnicodeString *symbol;
while ((symbol = iter.next()) != NULL) {
@@ -1401,7 +1424,7 @@ currency_cache_cleanup(void) {
currCache[i] = 0;
}
}
- return TRUE;
+ return true;
}
@@ -1928,6 +1951,7 @@ static const struct CurrencyList {
{"SHP", UCURR_COMMON|UCURR_NON_DEPRECATED},
{"SIT", UCURR_COMMON|UCURR_DEPRECATED},
{"SKK", UCURR_COMMON|UCURR_DEPRECATED},
+ {"SLE", UCURR_COMMON|UCURR_NON_DEPRECATED},
{"SLL", UCURR_COMMON|UCURR_NON_DEPRECATED},
{"SOS", UCURR_COMMON|UCURR_NON_DEPRECATED},
{"SRD", UCURR_COMMON|UCURR_NON_DEPRECATED},
@@ -1965,6 +1989,7 @@ static const struct CurrencyList {
{"UYW", UCURR_UNCOMMON|UCURR_NON_DEPRECATED},
{"UZS", UCURR_COMMON|UCURR_NON_DEPRECATED},
{"VEB", UCURR_COMMON|UCURR_DEPRECATED},
+ {"VED", UCURR_UNCOMMON|UCURR_NON_DEPRECATED},
{"VEF", UCURR_COMMON|UCURR_NON_DEPRECATED},
{"VES", UCURR_COMMON|UCURR_NON_DEPRECATED},
{"VND", UCURR_COMMON|UCURR_NON_DEPRECATED},
@@ -2220,19 +2245,19 @@ U_CAPI UBool U_EXPORT2
ucurr_isAvailable(const UChar* isoCode, UDate from, UDate to, UErrorCode* eErrorCode) {
umtx_initOnce(gIsoCodesInitOnce, &initIsoCodes, *eErrorCode);
if (U_FAILURE(*eErrorCode)) {
- return FALSE;
+ return false;
}
IsoCodeEntry* result = (IsoCodeEntry *) uhash_get(gIsoCodes, isoCode);
if (result == NULL) {
- return FALSE;
+ return false;
} else if (from > to) {
*eErrorCode = U_ILLEGAL_ARGUMENT_ERROR;
- return FALSE;
+ return false;
} else if ((from > result->to) || (to < result->from)) {
- return FALSE;
+ return false;
}
- return TRUE;
+ return true;
}
static const icu::Hashtable* getCurrSymbolsEquiv() {
@@ -2537,7 +2562,7 @@ static const UEnumeration defaultKeywordValues = {
U_CAPI UEnumeration *U_EXPORT2 ucurr_getKeywordValuesForLocale(const char *key, const char *locale, UBool commonlyUsed, UErrorCode* status) {
// Resolve region
char prefRegion[ULOC_COUNTRY_CAPACITY];
- ulocimp_getRegionForSupplementalData(locale, TRUE, prefRegion, sizeof(prefRegion), status);
+ ulocimp_getRegionForSupplementalData(locale, true, prefRegion, sizeof(prefRegion), status);
// Read value from supplementalData
UList *values = ulist_createEmptyList(status);
@@ -2570,7 +2595,7 @@ U_CAPI UEnumeration *U_EXPORT2 ucurr_getKeywordValuesForLocale(const char *key,
break;
}
const char *region = ures_getKey(&bundlekey);
- UBool isPrefRegion = uprv_strcmp(region, prefRegion) == 0 ? TRUE : FALSE;
+ UBool isPrefRegion = uprv_strcmp(region, prefRegion) == 0 ? true : false;
if (!isPrefRegion && commonlyUsed) {
// With commonlyUsed=true, we do not put
// currencies for other regions in the
@@ -2595,7 +2620,7 @@ U_CAPI UEnumeration *U_EXPORT2 ucurr_getKeywordValuesForLocale(const char *key,
}
#if U_CHARSET_FAMILY==U_ASCII_FAMILY
- ures_getUTF8StringByKey(&curbndl, "id", curID, &curIDLength, TRUE, status);
+ ures_getUTF8StringByKey(&curbndl, "id", curID, &curIDLength, true, status);
/* optimize - use the utf-8 string */
#else
{
@@ -2613,19 +2638,19 @@ U_CAPI UEnumeration *U_EXPORT2 ucurr_getKeywordValuesForLocale(const char *key,
if (U_FAILURE(*status)) {
break;
}
- UBool hasTo = FALSE;
+ UBool hasTo = false;
ures_getByKey(&curbndl, "to", &to, status);
if (U_FAILURE(*status)) {
// Do nothing here...
*status = U_ZERO_ERROR;
} else {
- hasTo = TRUE;
+ hasTo = true;
}
if (isPrefRegion && !hasTo && !ulist_containsString(values, curID, (int32_t)uprv_strlen(curID))) {
// Currently active currency for the target country
- ulist_addItemEndList(values, curID, TRUE, status);
+ ulist_addItemEndList(values, curID, true, status);
} else if (!ulist_containsString(otherValues, curID, (int32_t)uprv_strlen(curID)) && !commonlyUsed) {
- ulist_addItemEndList(otherValues, curID, TRUE, status);
+ ulist_addItemEndList(otherValues, curID, true, status);
} else {
uprv_free(curID);
}
@@ -2638,7 +2663,7 @@ U_CAPI UEnumeration *U_EXPORT2 ucurr_getKeywordValuesForLocale(const char *key,
// This could happen if no valid region is supplied in the input
// locale. In this case, we use the CLDR's default.
uenum_close(en);
- en = ucurr_getKeywordValuesForLocale(key, "und", TRUE, status);
+ en = ucurr_getKeywordValuesForLocale(key, "und", true, status);
}
} else {
// Consolidate the list
@@ -2648,7 +2673,7 @@ U_CAPI UEnumeration *U_EXPORT2 ucurr_getKeywordValuesForLocale(const char *key,
if (!ulist_containsString(values, value, (int32_t)uprv_strlen(value))) {
char *tmpValue = (char *)uprv_malloc(sizeof(char) * ULOC_KEYWORDS_CAPACITY);
uprv_memcpy(tmpValue, value, uprv_strlen(value) + 1);
- ulist_addItemEndList(values, tmpValue, TRUE, status);
+ ulist_addItemEndList(values, tmpValue, true, status);
if (U_FAILURE(*status)) {
break;
}
diff --git a/thirdparty/icu4c/common/udata.cpp b/thirdparty/icu4c/common/udata.cpp
index ec9c999cea..2bc74c9789 100644
--- a/thirdparty/icu4c/common/udata.cpp
+++ b/thirdparty/icu4c/common/udata.cpp
@@ -106,10 +106,10 @@ static UDataMemory *udata_findCachedData(const char *path, UErrorCode &err);
*/
static UDataMemory *gCommonICUDataArray[10] = { NULL }; // Access protected by icu global mutex.
-static u_atomic_int32_t gHaveTriedToLoadCommonData = ATOMIC_INT32_T_INITIALIZER(0); // See extendICUData().
+static u_atomic_int32_t gHaveTriedToLoadCommonData {0}; // See extendICUData().
static UHashtable *gCommonDataCache = NULL; /* Global hash table of opened ICU data files. */
-static icu::UInitOnce gCommonDataCacheInitOnce = U_INITONCE_INITIALIZER;
+static icu::UInitOnce gCommonDataCacheInitOnce {};
#if !defined(ICU_DATA_DIR_WINDOWS)
static UDataFileAccess gDataFileAccess = UDATA_DEFAULT_ACCESS; // Access not synchronized.
@@ -136,25 +136,25 @@ udata_cleanup(void)
}
gHaveTriedToLoadCommonData = 0;
- return TRUE; /* Everything was cleaned up */
+ return true; /* Everything was cleaned up */
}
static UBool U_CALLCONV
findCommonICUDataByName(const char *inBasename, UErrorCode &err)
{
- UBool found = FALSE;
+ UBool found = false;
int32_t i;
UDataMemory *pData = udata_findCachedData(inBasename, err);
if (U_FAILURE(err) || pData == NULL)
- return FALSE;
+ return false;
{
Mutex lock;
for (i = 0; i < UPRV_LENGTHOF(gCommonICUDataArray); ++i) {
if ((gCommonICUDataArray[i] != NULL) && (gCommonICUDataArray[i]->pHeader == pData->pHeader)) {
/* The data pointer is already in the array. */
- found = TRUE;
+ found = true;
break;
}
}
@@ -174,9 +174,9 @@ setCommonICUData(UDataMemory *pData, /* The new common data. Belongs to ca
{
UDataMemory *newCommonData = UDataMemory_createNewInstance(pErr);
int32_t i;
- UBool didUpdate = FALSE;
+ UBool didUpdate = false;
if (U_FAILURE(*pErr)) {
- return FALSE;
+ return false;
}
/* For the assignment, other threads must cleanly see either the old */
@@ -188,7 +188,7 @@ setCommonICUData(UDataMemory *pData, /* The new common data. Belongs to ca
for (i = 0; i < UPRV_LENGTHOF(gCommonICUDataArray); ++i) {
if (gCommonICUDataArray[i] == NULL) {
gCommonICUDataArray[i] = newCommonData;
- didUpdate = TRUE;
+ didUpdate = true;
break;
} else if (gCommonICUDataArray[i]->pHeader == pData->pHeader) {
/* The same data pointer is already in the array. */
@@ -216,7 +216,7 @@ setCommonICUDataPointer(const void *pData, UBool /*warn*/, UErrorCode *pErrorCod
UDataMemory_init(&tData);
UDataMemory_setData(&tData, pData);
udata_checkCommonData(&tData, pErrorCode);
- return setCommonICUData(&tData, FALSE, pErrorCode);
+ return setCommonICUData(&tData, false, pErrorCode);
}
#endif
@@ -429,7 +429,7 @@ private:
CharString pathBuffer; /* output path for this it'ion */
CharString packageStub; /* example: "/icudt28b". Will ignore that leaf in set paths. */
- UBool checkLastFour; /* if TRUE then allow paths such as '/foo/myapp.dat'
+ UBool checkLastFour; /* if true then allow paths such as '/foo/myapp.dat'
* to match, checks last 4 chars of suffix with
* last 4 of path, then previous chars. */
};
@@ -501,7 +501,7 @@ UDataPathIterator::UDataPathIterator(const char *inPath, const char *pkg,
suffix.data(),
itemPath.data(),
nextPath,
- checkLastFour?"TRUE":"false");
+ checkLastFour?"true":"false");
#endif
}
@@ -568,7 +568,7 @@ const char *UDataPathIterator::next(UErrorCode *pErrorCode)
/* check for .dat files */
pathBasename = findBasename(pathBuffer.data());
- if(checkLastFour == TRUE &&
+ if(checkLastFour == true &&
(pathLen>=4) &&
uprv_strncmp(pathBuffer.data() +(pathLen-4), suffix.data(), 4)==0 && /* suffix matches */
uprv_strncmp(findBasename(pathBuffer.data()), basename, basenameLen)==0 && /* base matches */
@@ -711,15 +711,15 @@ openCommonData(const char *path, /* Path from OpenChoice? */
*/
/*
if (uprv_getICUData_collation) {
- setCommonICUDataPointer(uprv_getICUData_collation(), FALSE, pErrorCode);
+ setCommonICUDataPointer(uprv_getICUData_collation(), false, pErrorCode);
}
if (uprv_getICUData_conversion) {
- setCommonICUDataPointer(uprv_getICUData_conversion(), FALSE, pErrorCode);
+ setCommonICUDataPointer(uprv_getICUData_conversion(), false, pErrorCode);
}
*/
#if !defined(ICU_DATA_DIR_WINDOWS)
// When using the Windows system data, we expect only a single data file.
- setCommonICUDataPointer(&U_ICUDATA_ENTRY_POINT, FALSE, pErrorCode);
+ setCommonICUDataPointer(&U_ICUDATA_ENTRY_POINT, false, pErrorCode);
{
Mutex lock;
return gCommonICUDataArray[commonDataIndex];
@@ -761,9 +761,9 @@ openCommonData(const char *path, /* Path from OpenChoice? */
* Hunt it down, trying all the path locations
*/
- UDataPathIterator iter(u_getDataDirectory(), inBasename, path, ".dat", TRUE, pErrorCode);
+ UDataPathIterator iter(u_getDataDirectory(), inBasename, path, ".dat", true, pErrorCode);
- while ((UDataMemory_isLoaded(&tData)==FALSE) && (pathBuffer = iter.next(pErrorCode)) != NULL)
+ while ((UDataMemory_isLoaded(&tData)==false) && (pathBuffer = iter.next(pErrorCode)) != NULL)
{
#ifdef UDATA_DEBUG
fprintf(stderr, "ocd: trying path %s - ", pathBuffer);
@@ -822,7 +822,7 @@ static UBool extendICUData(UErrorCode *pErr)
{
UDataMemory *pData;
UDataMemory copyPData;
- UBool didUpdate = FALSE;
+ UBool didUpdate = false;
/*
* There is a chance for a race condition here.
@@ -859,7 +859,7 @@ static UBool extendICUData(UErrorCode *pErr)
didUpdate = /* no longer using this result */
setCommonICUData(&copyPData,/* The new common data. */
- FALSE, /* No warnings if write didn't happen */
+ false, /* No warnings if write didn't happen */
pErr); /* setCommonICUData honors errors; NOP if error set */
}
@@ -906,7 +906,7 @@ udata_setCommonData(const void *data, UErrorCode *pErrorCode) {
/* we have good data */
/* Set it up as the ICU Common Data. */
- setCommonICUData(&dataMemory, TRUE, pErrorCode);
+ setCommonICUData(&dataMemory, true, pErrorCode);
}
/*---------------------------------------------------------------------------
@@ -999,7 +999,7 @@ static UDataMemory *doLoadFromIndividualFiles(const char *pkgName,
/* look in ind. files: package\nam.typ ========================= */
/* init path iterator for individual files */
- UDataPathIterator iter(dataPath, pkgName, path, tocEntryPathSuffix, FALSE, pErrorCode);
+ UDataPathIterator iter(dataPath, pkgName, path, tocEntryPathSuffix, false, pErrorCode);
while ((pathBuffer = iter.next(pErrorCode)) != NULL)
{
@@ -1055,7 +1055,7 @@ static UDataMemory *doLoadFromCommonData(UBool isICUData, const char * /*pkgName
const DataHeader *pHeader;
UDataMemory *pCommonData;
int32_t commonDataIndex;
- UBool checkedExtendedICUData = FALSE;
+ UBool checkedExtendedICUData = false;
/* try to get common data. The loop is for platforms such as the 390 that do
* not initially load the full set of ICU data. If the lookup of an ICU data item
* fails, the full (but slower to load) set is loaded, the and the loop repeats,
@@ -1104,7 +1104,7 @@ static UDataMemory *doLoadFromCommonData(UBool isICUData, const char * /*pkgName
} else if (pCommonData != NULL) {
++commonDataIndex; /* try the next data package */
} else if ((!checkedExtendedICUData) && extendICUData(subErrorCode)) {
- checkedExtendedICUData = TRUE;
+ checkedExtendedICUData = true;
/* try this data package slot again: it changed from NULL to non-NULL */
} else {
return NULL;
@@ -1169,7 +1169,7 @@ doOpenChoice(const char *path, const char *type, const char *name,
UErrorCode subErrorCode=U_ZERO_ERROR;
const char *treeChar;
- UBool isICUData = FALSE;
+ UBool isICUData = false;
FileTracer::traceOpen(path, type, name);
@@ -1182,7 +1182,7 @@ doOpenChoice(const char *path, const char *type, const char *name,
uprv_strlen(U_ICUDATA_NAME U_TREE_SEPARATOR_STRING)) ||
!uprv_strncmp(path, U_ICUDATA_ALIAS U_TREE_SEPARATOR_STRING, /* "ICUDATA-" */
uprv_strlen(U_ICUDATA_ALIAS U_TREE_SEPARATOR_STRING))) {
- isICUData = TRUE;
+ isICUData = true;
}
#if (U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR) /* Windows: try "foo\bar" and "foo/bar" */
diff --git a/thirdparty/icu4c/common/udatamem.cpp b/thirdparty/icu4c/common/udatamem.cpp
index 6bf7c01235..0f80de28eb 100644
--- a/thirdparty/icu4c/common/udatamem.cpp
+++ b/thirdparty/icu4c/common/udatamem.cpp
@@ -49,7 +49,7 @@ U_CFUNC UDataMemory *UDataMemory_createNewInstance(UErrorCode *pErr) {
*pErr = U_MEMORY_ALLOCATION_ERROR; }
else {
UDataMemory_init(This);
- This->heapAllocated = TRUE;
+ This->heapAllocated = true;
}
return This;
}
diff --git a/thirdparty/icu4c/common/udatamem.h b/thirdparty/icu4c/common/udatamem.h
index a05dd69756..3db2af43aa 100644
--- a/thirdparty/icu4c/common/udatamem.h
+++ b/thirdparty/icu4c/common/udatamem.h
@@ -44,7 +44,7 @@ struct UDataMemory {
int32_t length; /* Length of the data in bytes; -1 if unknown. */
};
-U_CFUNC UDataMemory *UDataMemory_createNewInstance(UErrorCode *pErr);
+U_CAPI UDataMemory* U_EXPORT2 UDataMemory_createNewInstance(UErrorCode *pErr);
U_CFUNC void UDatamemory_assign (UDataMemory *dest, UDataMemory *source);
U_CFUNC void UDataMemory_init (UDataMemory *This);
U_CFUNC UBool UDataMemory_isLoaded(const UDataMemory *This);
diff --git a/thirdparty/icu4c/common/uhash.cpp b/thirdparty/icu4c/common/uhash.cpp
index 2e331b7172..a04f9606c5 100644
--- a/thirdparty/icu4c/common/uhash.cpp
+++ b/thirdparty/icu4c/common/uhash.cpp
@@ -265,7 +265,7 @@ _uhash_init(UHashtable *result,
result->valueComparator = valueComp;
result->keyDeleter = NULL;
result->valueDeleter = NULL;
- result->allocated = FALSE;
+ result->allocated = false;
_uhash_internalSetResizePolicy(result, U_GROW);
_uhash_allocate(result, primeIndex, status);
@@ -294,7 +294,7 @@ _uhash_create(UHashFunction *keyHash,
}
_uhash_init(result, keyHash, keyComp, valueComp, primeIndex, status);
- result->allocated = TRUE;
+ result->allocated = true;
if (U_FAILURE(*status)) {
uprv_free(result);
@@ -949,7 +949,7 @@ uhash_equals(const UHashtable* hash1, const UHashtable* hash2){
int32_t count1, count2, pos, i;
if(hash1==hash2){
- return TRUE;
+ return true;
}
/*
@@ -967,15 +967,15 @@ uhash_equals(const UHashtable* hash1, const UHashtable* hash2){
{
/*
Normally we would return an error here about incompatible hash tables,
- but we return FALSE instead.
+ but we return false instead.
*/
- return FALSE;
+ return false;
}
count1 = uhash_count(hash1);
count2 = uhash_count(hash2);
if(count1!=count2){
- return FALSE;
+ return false;
}
pos=UHASH_FIRST;
@@ -989,11 +989,11 @@ uhash_equals(const UHashtable* hash1, const UHashtable* hash2){
*/
const UHashElement* elem2 = _uhash_find(hash2, key1, hash2->keyHasher(key1));
const UHashTok val2 = elem2->value;
- if(hash1->valueComparator(val1, val2)==FALSE){
- return FALSE;
+ if(hash1->valueComparator(val1, val2)==false){
+ return false;
}
}
- return TRUE;
+ return true;
}
/********************************************************************
@@ -1005,10 +1005,10 @@ uhash_compareUChars(const UHashTok key1, const UHashTok key2) {
const UChar *p1 = (const UChar*) key1.pointer;
const UChar *p2 = (const UChar*) key2.pointer;
if (p1 == p2) {
- return TRUE;
+ return true;
}
if (p1 == NULL || p2 == NULL) {
- return FALSE;
+ return false;
}
while (*p1 != 0 && *p1 == *p2) {
++p1;
@@ -1022,10 +1022,10 @@ uhash_compareChars(const UHashTok key1, const UHashTok key2) {
const char *p1 = (const char*) key1.pointer;
const char *p2 = (const char*) key2.pointer;
if (p1 == p2) {
- return TRUE;
+ return true;
}
if (p1 == NULL || p2 == NULL) {
- return FALSE;
+ return false;
}
while (*p1 != 0 && *p1 == *p2) {
++p1;
@@ -1039,10 +1039,10 @@ uhash_compareIChars(const UHashTok key1, const UHashTok key2) {
const char *p1 = (const char*) key1.pointer;
const char *p2 = (const char*) key2.pointer;
if (p1 == p2) {
- return TRUE;
+ return true;
}
if (p1 == NULL || p2 == NULL) {
- return FALSE;
+ return false;
}
while (*p1 != 0 && uprv_tolower(*p1) == uprv_tolower(*p2)) {
++p1;
diff --git a/thirdparty/icu4c/common/uidna.cpp b/thirdparty/icu4c/common/uidna.cpp
index ac2f9c3c8c..1cbdeec327 100644
--- a/thirdparty/icu4c/common/uidna.cpp
+++ b/thirdparty/icu4c/common/uidna.cpp
@@ -58,15 +58,15 @@ toASCIILower(UChar ch){
inline static UBool
startsWithPrefix(const UChar* src , int32_t srcLength){
if(srcLength < ACE_PREFIX_LENGTH){
- return FALSE;
+ return false;
}
for(int8_t i=0; i< ACE_PREFIX_LENGTH; i++){
if(toASCIILower(src[i]) != ACE_PREFIX[i]){
- return FALSE;
+ return false;
}
}
- return TRUE;
+ return true;
}
@@ -132,9 +132,9 @@ static inline UBool isLabelSeparator(UChar ch){
case 0x3002:
case 0xFF0E:
case 0xFF61:
- return TRUE;
+ return true;
default:
- return FALSE;
+ return false;
}
}
@@ -149,7 +149,7 @@ getNextSeparator(UChar *src, int32_t srcLength,
for(i=0 ; ;i++){
if(src[i] == 0){
*limit = src + i; // point to null
- *done = TRUE;
+ *done = true;
return i;
}
if(isLabelSeparator(src[i])){
@@ -169,7 +169,7 @@ getNextSeparator(UChar *src, int32_t srcLength,
// we have not found the delimiter
// if(i==srcLength)
*limit = src+srcLength;
- *done = TRUE;
+ *done = true;
return i;
}
@@ -177,7 +177,7 @@ getNextSeparator(UChar *src, int32_t srcLength,
static inline UBool isLDHChar(UChar ch){
// high runner case
if(ch>0x007A){
- return FALSE;
+ return false;
}
//[\\u002D \\u0030-\\u0039 \\u0041-\\u005A \\u0061-\\u007A]
if( (ch==0x002D) ||
@@ -185,9 +185,9 @@ static inline UBool isLDHChar(UChar ch){
(0x0041 <= ch && ch <= 0x005A) ||
(0x0061 <= ch && ch <= 0x007A)
){
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
}
static int32_t
@@ -212,9 +212,9 @@ _internal_toASCII(const UChar* src, int32_t srcLength,
UBool* caseFlags = NULL;
// the source contains all ascii codepoints
- UBool srcIsASCII = TRUE;
+ UBool srcIsASCII = true;
// assume the source contains all LDH codepoints
- UBool srcIsLDH = TRUE;
+ UBool srcIsLDH = true;
int32_t j=0;
@@ -239,13 +239,13 @@ _internal_toASCII(const UChar* src, int32_t srcLength,
// step 1
for( j=0;j<srcLength;j++){
if(src[j] > 0x7F){
- srcIsASCII = FALSE;
+ srcIsASCII = false;
}
b1[b1Len++] = src[j];
}
// step 2 is performed only if the source contains non ASCII
- if(srcIsASCII == FALSE){
+ if(srcIsASCII == false){
// step 2
b1Len = usprep_prepare(nameprep, src, srcLength, b1, b1Capacity, namePrepOptions, parseError, status);
@@ -277,29 +277,29 @@ _internal_toASCII(const UChar* src, int32_t srcLength,
}
// for step 3 & 4
- srcIsASCII = TRUE;
+ srcIsASCII = true;
for( j=0;j<b1Len;j++){
// check if output of usprep_prepare is all ASCII
if(b1[j] > 0x7F){
- srcIsASCII = FALSE;
- }else if(isLDHChar(b1[j])==FALSE){ // if the char is in ASCII range verify that it is an LDH character
- srcIsLDH = FALSE;
+ srcIsASCII = false;
+ }else if(isLDHChar(b1[j])==false){ // if the char is in ASCII range verify that it is an LDH character
+ srcIsLDH = false;
failPos = j;
}
}
- if(useSTD3ASCIIRules == TRUE){
+ if(useSTD3ASCIIRules == true){
// verify 3a and 3b
// 3(a) Verify the absence of non-LDH ASCII code points; that is, the
// absence of 0..2C, 2E..2F, 3A..40, 5B..60, and 7B..7F.
// 3(b) Verify the absence of leading and trailing hyphen-minus; that
// is, the absence of U+002D at the beginning and end of the
// sequence.
- if( srcIsLDH == FALSE /* source at this point should not contain anyLDH characters */
+ if( srcIsLDH == false /* source at this point should not contain anyLDH characters */
|| b1[0] == HYPHEN || b1[b1Len-1] == HYPHEN){
*status = U_IDNA_STD3_ASCII_RULES_ERROR;
/* populate the parseError struct */
- if(srcIsLDH==FALSE){
+ if(srcIsLDH==false){
// failPos is always set the index of failure
uprv_syntaxError(b1,failPos, b1Len,parseError);
}else if(b1[0] == HYPHEN){
@@ -331,7 +331,7 @@ _internal_toASCII(const UChar* src, int32_t srcLength,
// do not preserve the case flags for now!
// TODO: Preserve the case while implementing the RFE
// caseFlags = (UBool*) uprv_malloc(b1Len * sizeof(UBool));
- // uprv_memset(caseFlags,TRUE,b1Len);
+ // uprv_memset(caseFlags,true,b1Len);
b2Len = u_strToPunycode(b1,b1Len,b2,b2Capacity,caseFlags, status);
@@ -416,8 +416,8 @@ _internal_toUnicode(const UChar* src, int32_t srcLength,
UBool* caseFlags = NULL;
- UBool srcIsASCII = TRUE;
- /*UBool srcIsLDH = TRUE;
+ UBool srcIsASCII = true;
+ /*UBool srcIsLDH = true;
int32_t failPos =0;*/
// step 1: find out if all the codepoints in src are ASCII
@@ -425,12 +425,12 @@ _internal_toUnicode(const UChar* src, int32_t srcLength,
srcLength = 0;
for(;src[srcLength]!=0;){
if(src[srcLength]> 0x7f){
- srcIsASCII = FALSE;
- }/*else if(isLDHChar(src[srcLength])==FALSE){
+ srcIsASCII = false;
+ }/*else if(isLDHChar(src[srcLength])==false){
// here we do not assemble surrogates
// since we know that LDH code points
// are in the ASCII range only
- srcIsLDH = FALSE;
+ srcIsLDH = false;
failPos = srcLength;
}*/
srcLength++;
@@ -438,13 +438,13 @@ _internal_toUnicode(const UChar* src, int32_t srcLength,
}else if(srcLength > 0){
for(int32_t j=0; j<srcLength; j++){
if(src[j]> 0x7f){
- srcIsASCII = FALSE;
+ srcIsASCII = false;
break;
- }/*else if(isLDHChar(src[j])==FALSE){
+ }/*else if(isLDHChar(src[j])==false){
// here we do not assemble surrogates
// since we know that LDH code points
// are in the ASCII range only
- srcIsLDH = FALSE;
+ srcIsLDH = false;
failPos = j;
}*/
}
@@ -452,7 +452,7 @@ _internal_toUnicode(const UChar* src, int32_t srcLength,
return 0;
}
- if(srcIsASCII == FALSE){
+ if(srcIsASCII == false){
// step 2: process the string
b1Len = usprep_prepare(nameprep, src, srcLength, b1, b1Capacity, namePrepOptions, parseError, status);
if(*status == U_BUFFER_OVERFLOW_ERROR){
@@ -548,13 +548,13 @@ _internal_toUnicode(const UChar* src, int32_t srcLength,
else{
// See the start of this if statement for why this is commented out.
// verify that STD3 ASCII rules are satisfied
- /*if(useSTD3ASCIIRules == TRUE){
- if( srcIsLDH == FALSE // source contains some non-LDH characters
+ /*if(useSTD3ASCIIRules == true){
+ if( srcIsLDH == false // source contains some non-LDH characters
|| src[0] == HYPHEN || src[srcLength-1] == HYPHEN){
*status = U_IDNA_STD3_ASCII_RULES_ERROR;
// populate the parseError struct
- if(srcIsLDH==FALSE){
+ if(srcIsLDH==false){
// failPos is always set the index of failure
uprv_syntaxError(src,failPos, srcLength,parseError);
}else if(src[0] == HYPHEN){
@@ -695,7 +695,7 @@ uidna_IDNToASCII( const UChar *src, int32_t srcLength,
int32_t remainingLen = srcLength;
int32_t remainingDestCapacity = destCapacity;
int32_t labelLen = 0, labelReqLength = 0;
- UBool done = FALSE;
+ UBool done = false;
for(;;){
@@ -731,7 +731,7 @@ uidna_IDNToASCII( const UChar *src, int32_t srcLength,
remainingDestCapacity = 0;
}
- if(done == TRUE){
+ if(done == true){
break;
}
@@ -788,7 +788,7 @@ uidna_IDNToUnicode( const UChar* src, int32_t srcLength,
int32_t remainingLen = srcLength;
int32_t remainingDestCapacity = destCapacity;
int32_t labelLen = 0, labelReqLength = 0;
- UBool done = FALSE;
+ UBool done = false;
for(;;){
@@ -800,7 +800,7 @@ uidna_IDNToUnicode( const UChar* src, int32_t srcLength,
// is returned immediately in that step.
// </quote>
// _internal_toUnicode will copy the label.
- /*if(labelLen==0 && done==FALSE){
+ /*if(labelLen==0 && done==false){
*status = U_IDNA_ZERO_LENGTH_LABEL_ERROR;
break;
}*/
@@ -829,7 +829,7 @@ uidna_IDNToUnicode( const UChar* src, int32_t srcLength,
remainingDestCapacity = 0;
}
- if(done == TRUE){
+ if(done == true){
break;
}
diff --git a/thirdparty/icu4c/common/uinit.cpp b/thirdparty/icu4c/common/uinit.cpp
index 624431be02..dc3867b17e 100644
--- a/thirdparty/icu4c/common/uinit.cpp
+++ b/thirdparty/icu4c/common/uinit.cpp
@@ -26,11 +26,11 @@
U_NAMESPACE_BEGIN
-static UInitOnce gICUInitOnce = U_INITONCE_INITIALIZER;
+static UInitOnce gICUInitOnce {};
static UBool U_CALLCONV uinit_cleanup() {
gICUInitOnce.reset();
- return TRUE;
+ return true;
}
static void U_CALLCONV
diff --git a/thirdparty/icu4c/common/uinvchar.cpp b/thirdparty/icu4c/common/uinvchar.cpp
index 52b8906568..ffce3ec158 100644
--- a/thirdparty/icu4c/common/uinvchar.cpp
+++ b/thirdparty/icu4c/common/uinvchar.cpp
@@ -207,7 +207,7 @@ u_UCharsToChars(const UChar *us, char *cs, int32_t length) {
while(length>0) {
u=*us++;
if(!UCHAR_IS_INVARIANT(u)) {
- U_ASSERT(FALSE); /* Variant characters were used. These are not portable in ICU. */
+ U_ASSERT(false); /* Variant characters were used. These are not portable in ICU. */
u=0;
}
*cs++=(char)UCHAR_TO_CHAR(u);
@@ -245,18 +245,18 @@ uprv_isInvariantString(const char *s, int32_t length) {
*/
#if U_CHARSET_FAMILY==U_ASCII_FAMILY
if(!UCHAR_IS_INVARIANT(c)) {
- return FALSE; /* found a variant char */
+ return false; /* found a variant char */
}
#elif U_CHARSET_FAMILY==U_EBCDIC_FAMILY
c=CHAR_TO_UCHAR(c);
if(c==0 || !UCHAR_IS_INVARIANT(c)) {
- return FALSE; /* found a variant char */
+ return false; /* found a variant char */
}
#else
# error U_CHARSET_FAMILY is not valid
#endif
}
- return TRUE;
+ return true;
}
U_CAPI UBool U_EXPORT2
@@ -284,10 +284,10 @@ uprv_isInvariantUString(const UChar *s, int32_t length) {
* for strings with variant characters
*/
if(!UCHAR_IS_INVARIANT(c)) {
- return FALSE; /* found a variant char */
+ return false; /* found a variant char */
}
}
- return TRUE;
+ return true;
}
/* UDataSwapFn implementations used in udataswp.c ------- */
diff --git a/thirdparty/icu4c/common/uiter.cpp b/thirdparty/icu4c/common/uiter.cpp
index b9252d81c2..c4ab7d6d56 100644
--- a/thirdparty/icu4c/common/uiter.cpp
+++ b/thirdparty/icu4c/common/uiter.cpp
@@ -47,7 +47,7 @@ noopMove(UCharIterator * /*iter*/, int32_t /*delta*/, UCharIteratorOrigin /*orig
static UBool U_CALLCONV
noopHasNext(UCharIterator * /*iter*/) {
- return FALSE;
+ return false;
}
static UChar32 U_CALLCONV
@@ -678,24 +678,24 @@ utf8IteratorMove(UCharIterator *iter, int32_t delta, UCharIteratorOrigin origin)
case UITER_ZERO:
case UITER_START:
pos=delta;
- havePos=TRUE;
+ havePos=true;
/* iter->index<0 (unknown) is possible */
break;
case UITER_CURRENT:
if(iter->index>=0) {
pos=iter->index+delta;
- havePos=TRUE;
+ havePos=true;
} else {
/* the current UTF-16 index is unknown after setState(), use only delta */
pos=0;
- havePos=FALSE;
+ havePos=false;
}
break;
case UITER_LIMIT:
case UITER_LENGTH:
if(iter->length>=0) {
pos=iter->length+delta;
- havePos=TRUE;
+ havePos=true;
} else {
/* pin to the end, avoid counting the length */
iter->index=-1;
@@ -706,7 +706,7 @@ utf8IteratorMove(UCharIterator *iter, int32_t delta, UCharIteratorOrigin origin)
} else {
/* the current UTF-16 index is unknown, use only delta */
pos=0;
- havePos=FALSE;
+ havePos=false;
}
}
break;
diff --git a/thirdparty/icu4c/common/ulist.cpp b/thirdparty/icu4c/common/ulist.cpp
index c5180431c3..57344715de 100644
--- a/thirdparty/icu4c/common/ulist.cpp
+++ b/thirdparty/icu4c/common/ulist.cpp
@@ -160,12 +160,12 @@ U_CAPI UBool U_EXPORT2 ulist_containsString(const UList *list, const char *data,
for (pointer = list->head; pointer != NULL; pointer = pointer->next) {
if (length == (int32_t)uprv_strlen((const char *)pointer->data)) {
if (uprv_memcmp(data, pointer->data, length) == 0) {
- return TRUE;
+ return true;
}
}
}
}
- return FALSE;
+ return false;
}
U_CAPI UBool U_EXPORT2 ulist_removeString(UList *list, const char *data) {
@@ -175,11 +175,11 @@ U_CAPI UBool U_EXPORT2 ulist_removeString(UList *list, const char *data) {
if (uprv_strcmp(data, (const char *)pointer->data) == 0) {
ulist_removeItem(list, pointer);
// Remove only the first occurrence, like Java LinkedList.remove(Object).
- return TRUE;
+ return true;
}
}
}
- return FALSE;
+ return false;
}
U_CAPI void *U_EXPORT2 ulist_getNext(UList *list) {
diff --git a/thirdparty/icu4c/common/uloc.cpp b/thirdparty/icu4c/common/uloc.cpp
index 99c6a0af39..1da2abc361 100644
--- a/thirdparty/icu4c/common/uloc.cpp
+++ b/thirdparty/icu4c/common/uloc.cpp
@@ -102,7 +102,7 @@ static const char * const LANGUAGES[] = {
"asa", "ase", "ast", "av", "avk", "awa", "ay", "az",
"ba", "bal", "ban", "bar", "bas", "bax", "bbc", "bbj",
"be", "bej", "bem", "bew", "bez", "bfd", "bfq", "bg",
- "bgn", "bho", "bi", "bik", "bin", "bjn", "bkm", "bla",
+ "bgc", "bgn", "bho", "bi", "bik", "bin", "bjn", "bkm", "bla",
"bm", "bn", "bo", "bpy", "bqi", "br", "bra", "brh",
"brx", "bs", "bss", "bua", "bug", "bum", "byn", "byv",
"ca", "cad", "car", "cay", "cch", "ccp", "ce", "ceb", "cgg",
@@ -219,7 +219,7 @@ static const char * const LANGUAGES_3[] = {
"asa", "ase", "ast", "ava", "avk", "awa", "aym", "aze",
"bak", "bal", "ban", "bar", "bas", "bax", "bbc", "bbj",
"bel", "bej", "bem", "bew", "bez", "bfd", "bfq", "bul",
- "bgn", "bho", "bis", "bik", "bin", "bjn", "bkm", "bla",
+ "bgc", "bgn", "bho", "bis", "bik", "bin", "bjn", "bkm", "bla",
"bam", "ben", "bod", "bpy", "bqi", "bre", "bra", "brh",
"brx", "bos", "bss", "bua", "bug", "bum", "byn", "byv",
"cat", "cad", "car", "cay", "cch", "ccp", "che", "ceb", "cgg",
@@ -502,20 +502,20 @@ static int32_t getShortestSubtagLength(const char *localeID) {
int32_t length = localeIDLength;
int32_t tmpLength = 0;
int32_t i;
- UBool reset = TRUE;
+ UBool reset = true;
for (i = 0; i < localeIDLength; i++) {
if (localeID[i] != '_' && localeID[i] != '-') {
if (reset) {
tmpLength = 0;
- reset = FALSE;
+ reset = false;
}
tmpLength++;
} else {
if (tmpLength != 0 && tmpLength < length) {
length = tmpLength;
}
- reset = TRUE;
+ reset = true;
}
}
@@ -620,7 +620,7 @@ ulocimp_getKeywords(const char *localeID,
if(prev == '@') { /* start of keyword definition */
/* we will grab pairs, trim spaces, lowercase keywords, sort and return */
do {
- UBool duplicate = FALSE;
+ UBool duplicate = false;
/* skip leading spaces */
while(*pos == ' ') {
pos++;
@@ -693,7 +693,7 @@ ulocimp_getKeywords(const char *localeID,
/* If this is a duplicate keyword, then ignore it */
for (j=0; j<numKeywords; ++j) {
if (uprv_strcmp(keywordList[j].keyword, keywordList[numKeywords].keyword) == 0) {
- duplicate = TRUE;
+ duplicate = true;
break;
}
}
@@ -704,7 +704,7 @@ ulocimp_getKeywords(const char *localeID,
/* now we have a list of keywords */
/* we need to sort it */
- uprv_sortArray(keywordList, numKeywords, sizeof(KeywordStruct), compareKeywordStructs, NULL, FALSE, status);
+ uprv_sortArray(keywordList, numKeywords, sizeof(KeywordStruct), compareKeywordStructs, NULL, false, status);
/* Now construct the keyword part */
for(i = 0; i < numKeywords; i++) {
@@ -881,7 +881,7 @@ uloc_setKeywordValue(const char* keywordName,
char* startSearchHere = NULL;
char* keywordStart = NULL;
CharString updatedKeysAndValues;
- UBool handledInputKeyAndValue = FALSE;
+ UBool handledInputKeyAndValue = false;
char keyValuePrefix = '@';
if(U_FAILURE(*status)) {
@@ -1020,7 +1020,7 @@ uloc_setKeywordValue(const char* keywordName,
updatedKeysAndValues.append('=', *status);
updatedKeysAndValues.append(keywordValueBuffer, keywordValueLen, *status);
} /* else removing this entry, don't emit anything */
- handledInputKeyAndValue = TRUE;
+ handledInputKeyAndValue = true;
} else {
/* input keyword sorts earlier than current entry, add before current entry */
if (rc < 0 && keywordValueLen > 0 && !handledInputKeyAndValue) {
@@ -1030,7 +1030,7 @@ uloc_setKeywordValue(const char* keywordName,
updatedKeysAndValues.append(keywordNameBuffer, keywordNameLen, *status);
updatedKeysAndValues.append('=', *status);
updatedKeysAndValues.append(keywordValueBuffer, keywordValueLen, *status);
- handledInputKeyAndValue = TRUE;
+ handledInputKeyAndValue = true;
}
/* copy the current entry */
updatedKeysAndValues.append(keyValuePrefix, *status);
@@ -1046,7 +1046,7 @@ uloc_setKeywordValue(const char* keywordName,
updatedKeysAndValues.append(keywordNameBuffer, keywordNameLen, *status);
updatedKeysAndValues.append('=', *status);
updatedKeysAndValues.append(keywordValueBuffer, keywordValueLen, *status);
- handledInputKeyAndValue = TRUE;
+ handledInputKeyAndValue = true;
}
keywordStart = nextSeparator;
} /* end loop searching */
@@ -1089,7 +1089,7 @@ uloc_setKeywordValue(const char* keywordName,
#define _isPrefixLetter(a) ((a=='x')||(a=='X')||(a=='i')||(a=='I'))
-/*returns TRUE if one of the special prefixes is here (s=string)
+/*returns true if one of the special prefixes is here (s=string)
'x-' or 'i-' */
#define _isIDPrefix(s) (_isPrefixLetter(s[0])&&_isIDSeparator(s[1]))
@@ -1270,7 +1270,7 @@ _getVariant(const char *localeID,
char prev,
ByteSink& sink,
UBool needSeparator) {
- UBool hasVariant = FALSE;
+ UBool hasVariant = false;
/* get one or more variant tags and separate them with '_' */
if(_isIDSeparator(prev)) {
@@ -1278,12 +1278,12 @@ _getVariant(const char *localeID,
while(!_isTerminator(*localeID)) {
if (needSeparator) {
sink.Append("_", 1);
- needSeparator = FALSE;
+ needSeparator = false;
}
char c = (char)uprv_toupper(*localeID);
if (c == '-') c = '_';
sink.Append(&c, 1);
- hasVariant = TRUE;
+ hasVariant = true;
localeID++;
}
}
@@ -1300,7 +1300,7 @@ _getVariant(const char *localeID,
while(!_isTerminator(*localeID)) {
if (needSeparator) {
sink.Append("_", 1);
- needSeparator = FALSE;
+ needSeparator = false;
}
char c = (char)uprv_toupper(*localeID);
if (c == '-' || c == ',') c = '_';
@@ -1453,7 +1453,7 @@ uloc_openKeywords(const char* localeID,
if((tmpLocaleID = locale_getKeywordsStart(tmpLocaleID)) != NULL) {
CharString keywords;
CharStringByteSink sink(&keywords);
- ulocimp_getKeywords(tmpLocaleID+1, '@', sink, FALSE, status);
+ ulocimp_getKeywords(tmpLocaleID+1, '@', sink, false, status);
if (U_FAILURE(*status)) {
return NULL;
}
@@ -1573,7 +1573,7 @@ _canonicalize(const char* localeID,
variantSize = -tag.length();
{
CharStringByteSink s(&tag);
- _getVariant(tmpLocaleID+1, *tmpLocaleID, s, FALSE);
+ _getVariant(tmpLocaleID+1, *tmpLocaleID, s, false);
}
variantSize += tag.length();
if (variantSize > 0) {
@@ -1585,13 +1585,13 @@ _canonicalize(const char* localeID,
/* Copy POSIX-style charset specifier, if any [mr.utf8] */
if (!OPTION_SET(options, _ULOC_CANONICALIZE) && *tmpLocaleID == '.') {
- UBool done = FALSE;
+ UBool done = false;
do {
char c = *tmpLocaleID;
switch (c) {
case 0:
case '@':
- done = TRUE;
+ done = true;
break;
default:
tag.append(c, *err);
@@ -1664,7 +1664,7 @@ _canonicalize(const char* localeID,
(!separatorIndicator || separatorIndicator > keywordAssign)) {
sink.Append("@", 1);
++fieldCount;
- ulocimp_getKeywords(tmpLocaleID+1, '@', sink, TRUE, err);
+ ulocimp_getKeywords(tmpLocaleID+1, '@', sink, true, err);
}
}
}
@@ -1847,7 +1847,7 @@ uloc_getVariant(const char* localeID,
}
CheckedArrayByteSink sink(variant, variantCapacity);
- _getVariant(tmpLocaleID+1, *tmpLocaleID, sink, FALSE);
+ _getVariant(tmpLocaleID+1, *tmpLocaleID, sink, false);
i = sink.NumberOfBytesAppended();
@@ -2158,11 +2158,11 @@ isWellFormedLegacyKey(const char* legacyKey)
const char* p = legacyKey;
while (*p) {
if (!UPRV_ISALPHANUM(*p)) {
- return FALSE;
+ return false;
}
p++;
}
- return TRUE;
+ return true;
}
static UBool
@@ -2173,13 +2173,13 @@ isWellFormedLegacyType(const char* legacyType)
while (*p) {
if (*p == '_' || *p == '/' || *p == '-') {
if (alphaNumLen == 0) {
- return FALSE;
+ return false;
}
alphaNumLen = 0;
} else if (UPRV_ISALPHANUM(*p)) {
alphaNumLen++;
} else {
- return FALSE;
+ return false;
}
p++;
}
diff --git a/thirdparty/icu4c/common/uloc_keytype.cpp b/thirdparty/icu4c/common/uloc_keytype.cpp
index 580244124e..12dc300492 100644
--- a/thirdparty/icu4c/common/uloc_keytype.cpp
+++ b/thirdparty/icu4c/common/uloc_keytype.cpp
@@ -24,7 +24,7 @@
#include "udataswp.h" /* for InvChar functions */
static UHashtable* gLocExtKeyMap = NULL;
-static icu::UInitOnce gLocExtKeyMapInitOnce = U_INITONCE_INITIALIZER;
+static icu::UInitOnce gLocExtKeyMapInitOnce {};
// bit flags for special types
typedef enum {
@@ -69,7 +69,7 @@ uloc_key_type_cleanup(void) {
gKeyTypeStringPool = NULL;
gLocExtKeyMapInitOnce.reset();
- return TRUE;
+ return true;
}
U_CDECL_END
@@ -356,9 +356,9 @@ init() {
UErrorCode sts = U_ZERO_ERROR;
umtx_initOnce(gLocExtKeyMapInitOnce, &initFromResourceBundle, sts);
if (U_FAILURE(sts)) {
- return FALSE;
+ return false;
}
- return TRUE;
+ return true;
}
static UBool
@@ -368,7 +368,7 @@ isSpecialTypeCodepoints(const char* val) {
while (*p) {
if (*p == '-') {
if (subtagLen < 4 || subtagLen > 6) {
- return FALSE;
+ return false;
}
subtagLen = 0;
} else if ((*p >= '0' && *p <= '9') ||
@@ -376,7 +376,7 @@ isSpecialTypeCodepoints(const char* val) {
(*p >= 'a' && *p <= 'f')) { // also in EBCDIC
subtagLen++;
} else {
- return FALSE;
+ return false;
}
p++;
}
@@ -390,13 +390,13 @@ isSpecialTypeReorderCode(const char* val) {
while (*p) {
if (*p == '-') {
if (subtagLen < 3 || subtagLen > 8) {
- return FALSE;
+ return false;
}
subtagLen = 0;
} else if (uprv_isASCIILetter(*p)) {
subtagLen++;
} else {
- return FALSE;
+ return false;
}
p++;
}
@@ -412,7 +412,7 @@ isSpecialTypeRgKeyValue(const char* val) {
(subtagLen >= 2 && (*p == 'Z' || *p == 'z')) ) {
subtagLen++;
} else {
- return FALSE;
+ return false;
}
p++;
}
@@ -448,10 +448,10 @@ ulocimp_toLegacyKey(const char* key) {
U_CFUNC const char*
ulocimp_toBcpType(const char* key, const char* type, UBool* isKnownKey, UBool* isSpecialType) {
if (isKnownKey != NULL) {
- *isKnownKey = FALSE;
+ *isKnownKey = false;
}
if (isSpecialType != NULL) {
- *isSpecialType = FALSE;
+ *isSpecialType = false;
}
if (!init()) {
@@ -461,14 +461,14 @@ ulocimp_toBcpType(const char* key, const char* type, UBool* isKnownKey, UBool* i
LocExtKeyData* keyData = (LocExtKeyData*)uhash_get(gLocExtKeyMap, key);
if (keyData != NULL) {
if (isKnownKey != NULL) {
- *isKnownKey = TRUE;
+ *isKnownKey = true;
}
LocExtType* t = (LocExtType*)uhash_get(keyData->typeMap.getAlias(), type);
if (t != NULL) {
return t->bcpId;
}
if (keyData->specialTypes != SPECIALTYPE_NONE) {
- UBool matched = FALSE;
+ UBool matched = false;
if (keyData->specialTypes & SPECIALTYPE_CODEPOINTS) {
matched = isSpecialTypeCodepoints(type);
}
@@ -480,7 +480,7 @@ ulocimp_toBcpType(const char* key, const char* type, UBool* isKnownKey, UBool* i
}
if (matched) {
if (isSpecialType != NULL) {
- *isSpecialType = TRUE;
+ *isSpecialType = true;
}
return type;
}
@@ -493,10 +493,10 @@ ulocimp_toBcpType(const char* key, const char* type, UBool* isKnownKey, UBool* i
U_CFUNC const char*
ulocimp_toLegacyType(const char* key, const char* type, UBool* isKnownKey, UBool* isSpecialType) {
if (isKnownKey != NULL) {
- *isKnownKey = FALSE;
+ *isKnownKey = false;
}
if (isSpecialType != NULL) {
- *isSpecialType = FALSE;
+ *isSpecialType = false;
}
if (!init()) {
@@ -506,14 +506,14 @@ ulocimp_toLegacyType(const char* key, const char* type, UBool* isKnownKey, UBool
LocExtKeyData* keyData = (LocExtKeyData*)uhash_get(gLocExtKeyMap, key);
if (keyData != NULL) {
if (isKnownKey != NULL) {
- *isKnownKey = TRUE;
+ *isKnownKey = true;
}
LocExtType* t = (LocExtType*)uhash_get(keyData->typeMap.getAlias(), type);
if (t != NULL) {
return t->legacyId;
}
if (keyData->specialTypes != SPECIALTYPE_NONE) {
- UBool matched = FALSE;
+ UBool matched = false;
if (keyData->specialTypes & SPECIALTYPE_CODEPOINTS) {
matched = isSpecialTypeCodepoints(type);
}
@@ -525,7 +525,7 @@ ulocimp_toLegacyType(const char* key, const char* type, UBool* isKnownKey, UBool
}
if (matched) {
if (isSpecialType != NULL) {
- *isSpecialType = TRUE;
+ *isSpecialType = true;
}
return type;
}
diff --git a/thirdparty/icu4c/common/uloc_tag.cpp b/thirdparty/icu4c/common/uloc_tag.cpp
index 0150e94cef..01a0e0028f 100644
--- a/thirdparty/icu4c/common/uloc_tag.cpp
+++ b/thirdparty/icu4c/common/uloc_tag.cpp
@@ -378,10 +378,10 @@ _isAlphaString(const char* s, int32_t len) {
int32_t i;
for (i = 0; i < len; i++) {
if (!ISALPHA(*(s + i))) {
- return FALSE;
+ return false;
}
}
- return TRUE;
+ return true;
}
static UBool
@@ -389,10 +389,10 @@ _isNumericString(const char* s, int32_t len) {
int32_t i;
for (i = 0; i < len; i++) {
if (!ISNUMERIC(*(s + i))) {
- return FALSE;
+ return false;
}
}
- return TRUE;
+ return true;
}
static UBool
@@ -400,10 +400,10 @@ _isAlphaNumericString(const char* s, int32_t len) {
int32_t i;
for (i = 0; i < len; i++) {
if (!ISALPHA(*(s + i)) && !ISNUMERIC(*(s + i))) {
- return FALSE;
+ return false;
}
}
- return TRUE;
+ return true;
}
static UBool
@@ -412,9 +412,9 @@ _isAlphaNumericStringLimitedLength(const char* s, int32_t len, int32_t min, int3
len = (int32_t)uprv_strlen(s);
}
if (len >= min && len <= max && _isAlphaNumericString(s, len)) {
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
}
U_CFUNC UBool
@@ -428,9 +428,9 @@ ultag_isLanguageSubtag(const char* s, int32_t len) {
len = (int32_t)uprv_strlen(s);
}
if (len >= 2 && len <= 8 && _isAlphaString(s, len)) {
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
}
static UBool
@@ -443,9 +443,9 @@ _isExtlangSubtag(const char* s, int32_t len) {
len = (int32_t)uprv_strlen(s);
}
if (len == 3 && _isAlphaString(s, len)) {
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
}
U_CFUNC UBool
@@ -457,9 +457,9 @@ ultag_isScriptSubtag(const char* s, int32_t len) {
len = (int32_t)uprv_strlen(s);
}
if (len == 4 && _isAlphaString(s, len)) {
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
}
U_CFUNC UBool
@@ -472,12 +472,12 @@ ultag_isRegionSubtag(const char* s, int32_t len) {
len = (int32_t)uprv_strlen(s);
}
if (len == 2 && _isAlphaString(s, len)) {
- return TRUE;
+ return true;
}
if (len == 3 && _isNumericString(s, len)) {
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
}
static UBool
@@ -490,12 +490,12 @@ _isVariantSubtag(const char* s, int32_t len) {
len = (int32_t)uprv_strlen(s);
}
if (_isAlphaNumericStringLimitedLength(s, len, 5, 8)) {
- return TRUE;
+ return true;
}
if (len == 4 && ISNUMERIC(*s) && _isAlphaNumericString(s + 1, 3)) {
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
}
static UBool
@@ -510,10 +510,10 @@ _isSepListOf(UBool (*test)(const char*, int32_t), const char* s, int32_t len) {
while ((p - s) < len) {
if (*p == SEP) {
if (pSubtag == NULL) {
- return FALSE;
+ return false;
}
if (!test(pSubtag, (int32_t)(p - pSubtag))) {
- return FALSE;
+ return false;
}
pSubtag = NULL;
} else if (pSubtag == NULL) {
@@ -522,7 +522,7 @@ _isSepListOf(UBool (*test)(const char*, int32_t), const char* s, int32_t len) {
p++;
}
if (pSubtag == NULL) {
- return FALSE;
+ return false;
}
return test(pSubtag, (int32_t)(p - pSubtag));
}
@@ -557,9 +557,9 @@ _isExtensionSingleton(const char* s, int32_t len) {
len = (int32_t)uprv_strlen(s);
}
if (len == 1 && (ISALPHA(*s) || ISNUMERIC(*s)) && (uprv_tolower(*s) != PRIVATEUSE)) {
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
}
static UBool
@@ -610,9 +610,9 @@ ultag_isUnicodeLocaleKey(const char* s, int32_t len) {
len = (int32_t)uprv_strlen(s);
}
if (len == 2 && (ISALPHA(*s) || ISNUMERIC(*s)) && ISALPHA(s[1])) {
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
}
U_CFUNC UBool
@@ -641,9 +641,9 @@ _isTKey(const char* s, int32_t len)
len = (int32_t)uprv_strlen(s);
}
if (len == 2 && ISALPHA(*s) && ISNUMERIC(*(s + 1))) {
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
}
U_CAPI const char * U_EXPORT2
@@ -694,23 +694,23 @@ _isTransformedExtensionSubtag(int32_t& state, const char* s, int32_t len)
case kStart:
if (ultag_isLanguageSubtag(s, len) && len != 4) {
state = kGotLanguage;
- return TRUE;
+ return true;
}
if (_isTKey(s, len)) {
state = kGotTKey;
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
case kGotLanguage:
if (ultag_isScriptSubtag(s, len)) {
state = kGotScript;
- return TRUE;
+ return true;
}
U_FALLTHROUGH;
case kGotScript:
if (ultag_isRegionSubtag(s, len)) {
state = kGotRegion;
- return TRUE;
+ return true;
}
U_FALLTHROUGH;
case kGotRegion:
@@ -718,30 +718,30 @@ _isTransformedExtensionSubtag(int32_t& state, const char* s, int32_t len)
case kGotVariant:
if (_isVariantSubtag(s, len)) {
state = kGotVariant;
- return TRUE;
+ return true;
}
if (_isTKey(s, len)) {
state = kGotTKey;
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
case kGotTKey:
if (_isTValue(s, len)) {
state = kGotTValue;
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
case kGotTValue:
if (_isTKey(s, len)) {
state = kGotTKey;
- return TRUE;
+ return true;
}
if (_isTValue(s, len)) {
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
}
- return FALSE;
+ return false;
}
static UBool
@@ -755,32 +755,32 @@ _isUnicodeExtensionSubtag(int32_t& state, const char* s, int32_t len)
case kStart:
if (ultag_isUnicodeLocaleKey(s, len)) {
state = kGotKey;
- return TRUE;
+ return true;
}
if (ultag_isUnicodeLocaleAttribute(s, len)) {
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
case kGotKey:
if (ultag_isUnicodeLocaleKey(s, len)) {
- return TRUE;
+ return true;
}
if (_isUnicodeLocaleTypeSubtag(s, len)) {
state = kGotType;
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
case kGotType:
if (ultag_isUnicodeLocaleKey(s, len)) {
state = kGotKey;
- return TRUE;
+ return true;
}
if (_isUnicodeLocaleTypeSubtag(s, len)) {
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
}
- return FALSE;
+ return false;
}
static UBool
@@ -798,7 +798,7 @@ _isStatefulSepListOf(UBool (*test)(int32_t&, const char*, int32_t), const char*
for (p = s; len > 0; p++, len--) {
if (*p == SEP) {
if (!test(state, start, subtagLen)) {
- return FALSE;
+ return false;
}
subtagLen = 0;
start = p + 1;
@@ -808,9 +808,9 @@ _isStatefulSepListOf(UBool (*test)(int32_t&, const char*, int32_t), const char*
}
if (test(state, start, subtagLen) && state >= 0) {
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
}
U_CFUNC UBool
@@ -835,7 +835,7 @@ ultag_isUnicodeExtensionSubtags(const char* s, int32_t len) {
static UBool
_addVariantToList(VariantListEntry **first, VariantListEntry *var) {
- UBool bAdded = TRUE;
+ UBool bAdded = true;
if (*first == NULL) {
var->next = NULL;
@@ -847,7 +847,7 @@ _addVariantToList(VariantListEntry **first, VariantListEntry *var) {
/* variants order should be preserved */
prev = NULL;
cur = *first;
- while (TRUE) {
+ while (true) {
if (cur == NULL) {
prev->next = var;
var->next = NULL;
@@ -858,7 +858,7 @@ _addVariantToList(VariantListEntry **first, VariantListEntry *var) {
cmp = uprv_compareInvCharsAsAscii(var->variant, cur->variant);
if (cmp == 0) {
/* duplicated variant */
- bAdded = FALSE;
+ bAdded = false;
break;
}
prev = cur;
@@ -871,7 +871,7 @@ _addVariantToList(VariantListEntry **first, VariantListEntry *var) {
static UBool
_addAttributeToList(AttributeListEntry **first, AttributeListEntry *attr) {
- UBool bAdded = TRUE;
+ UBool bAdded = true;
if (*first == NULL) {
attr->next = NULL;
@@ -883,7 +883,7 @@ _addAttributeToList(AttributeListEntry **first, AttributeListEntry *attr) {
/* reorder variants in alphabetical order */
prev = NULL;
cur = *first;
- while (TRUE) {
+ while (true) {
if (cur == NULL) {
prev->next = attr;
attr->next = NULL;
@@ -901,7 +901,7 @@ _addAttributeToList(AttributeListEntry **first, AttributeListEntry *attr) {
}
if (cmp == 0) {
/* duplicated variant */
- bAdded = FALSE;
+ bAdded = false;
break;
}
prev = cur;
@@ -915,7 +915,7 @@ _addAttributeToList(AttributeListEntry **first, AttributeListEntry *attr) {
static UBool
_addExtensionToList(ExtensionListEntry **first, ExtensionListEntry *ext, UBool localeToBCP) {
- UBool bAdded = TRUE;
+ UBool bAdded = true;
if (*first == NULL) {
ext->next = NULL;
@@ -927,7 +927,7 @@ _addExtensionToList(ExtensionListEntry **first, ExtensionListEntry *ext, UBool l
/* reorder variants in alphabetical order */
prev = NULL;
cur = *first;
- while (TRUE) {
+ while (true) {
if (cur == NULL) {
prev->next = ext;
ext->next = NULL;
@@ -979,7 +979,7 @@ _addExtensionToList(ExtensionListEntry **first, ExtensionListEntry *ext, UBool l
}
if (cmp == 0) {
/* duplicated extension key */
- bAdded = FALSE;
+ bAdded = false;
break;
}
prev = cur;
@@ -1164,7 +1164,7 @@ _appendVariantsToLanguageTag(const char* localeID, icu::ByteSink& sink, UBool st
if (len > 0) {
char *p, *pVar;
- UBool bNext = TRUE;
+ UBool bNext = true;
VariantListEntry *var;
VariantListEntry *varFirst = NULL;
@@ -1173,7 +1173,7 @@ _appendVariantsToLanguageTag(const char* localeID, icu::ByteSink& sink, UBool st
while (bNext) {
if (*p == SEP || *p == LOCALE_SEP || *p == 0) {
if (*p == 0) {
- bNext = FALSE;
+ bNext = false;
} else {
*p = 0; /* terminate */
}
@@ -1211,7 +1211,7 @@ _appendVariantsToLanguageTag(const char* localeID, icu::ByteSink& sink, UBool st
} else {
/* Special handling for POSIX variant, need to remember that we had it and then */
/* treat it like an extension later. */
- *hadPosix = TRUE;
+ *hadPosix = true;
}
} else if (strict) {
*status = U_ILLEGAL_ARGUMENT_ERROR;
@@ -1288,7 +1288,7 @@ _appendKeywordsToLanguageTag(const char* localeID, icu::ByteSink& sink, UBool st
int32_t keylen;
UBool isBcpUExt;
- while (TRUE) {
+ while (true) {
key = uenum_next(keywordEnum.getAlias(), NULL, status);
if (key == NULL) {
break;
@@ -1322,7 +1322,7 @@ _appendKeywordsToLanguageTag(const char* localeID, icu::ByteSink& sink, UBool st
if (uprv_strcmp(key, LOCALE_ATTRIBUTE_KEY) == 0) {
if (len > 0) {
int32_t i = 0;
- while (TRUE) {
+ while (true) {
attrBufLength = 0;
for (; i < len; i++) {
if (buf[i] != '-') {
@@ -1448,7 +1448,7 @@ _appendKeywordsToLanguageTag(const char* localeID, icu::ByteSink& sink, UBool st
ext->key = bcpKey;
ext->value = bcpValue;
- if (!_addExtensionToList(&firstExt, ext, TRUE)) {
+ if (!_addExtensionToList(&firstExt, ext, true)) {
if (strict) {
*status = U_ILLEGAL_ARGUMENT_ERROR;
break;
@@ -1467,18 +1467,18 @@ _appendKeywordsToLanguageTag(const char* localeID, icu::ByteSink& sink, UBool st
ext->key = POSIX_KEY;
ext->value = POSIX_VALUE;
- if (!_addExtensionToList(&firstExt, ext, TRUE)) {
+ if (!_addExtensionToList(&firstExt, ext, true)) {
// Silently ignore errors.
}
}
if (U_SUCCESS(*status) && (firstExt != NULL || firstAttr != NULL)) {
- UBool startLDMLExtension = FALSE;
+ UBool startLDMLExtension = false;
for (ext = firstExt; ext; ext = ext->next) {
if (!startLDMLExtension && uprv_strlen(ext->key) > 1) {
/* first LDML u singlton extension */
sink.Append("-u", 2);
- startLDMLExtension = TRUE;
+ startLDMLExtension = true;
}
/* write out the sorted BCP47 attributes, extensions and private use */
@@ -1520,7 +1520,7 @@ _appendLDMLExtensionAsKeywords(const char* ldmlext, ExtensionListEntry** appendT
int32_t len;
/* Reset the posixVariant value */
- *posixVariant = FALSE;
+ *posixVariant = false;
pTag = ldmlext;
pKwds = NULL;
@@ -1604,7 +1604,7 @@ _appendLDMLExtensionAsKeywords(const char* ldmlext, ExtensionListEntry** appendT
kwd->key = LOCALE_ATTRIBUTE_KEY;
kwd->value = value->data();
- if (!_addExtensionToList(&kwdFirst, kwd, FALSE)) {
+ if (!_addExtensionToList(&kwdFirst, kwd, false)) {
*status = U_ILLEGAL_ARGUMENT_ERROR;
return;
}
@@ -1616,14 +1616,14 @@ _appendLDMLExtensionAsKeywords(const char* ldmlext, ExtensionListEntry** appendT
const char *pBcpType = NULL; /* beginning of u extension type subtag(s) */
int32_t bcpKeyLen = 0;
int32_t bcpTypeLen = 0;
- UBool isDone = FALSE;
+ UBool isDone = false;
pTag = pKwds;
/* BCP47 representation of LDML key/type pairs */
while (!isDone) {
const char *pNextBcpKey = NULL;
int32_t nextBcpKeyLen = 0;
- UBool emitKeyword = FALSE;
+ UBool emitKeyword = false;
if (*pTag) {
/* locate next separator char */
@@ -1631,7 +1631,7 @@ _appendLDMLExtensionAsKeywords(const char* ldmlext, ExtensionListEntry** appendT
if (ultag_isUnicodeLocaleKey(pTag, len)) {
if (pBcpKey) {
- emitKeyword = TRUE;
+ emitKeyword = true;
pNextBcpKey = pTag;
nextBcpKeyLen = len;
} else {
@@ -1657,8 +1657,8 @@ _appendLDMLExtensionAsKeywords(const char* ldmlext, ExtensionListEntry** appendT
}
} else {
/* processing last one */
- emitKeyword = TRUE;
- isDone = TRUE;
+ emitKeyword = true;
+ isDone = true;
}
if (emitKeyword) {
@@ -1744,7 +1744,7 @@ _appendLDMLExtensionAsKeywords(const char* ldmlext, ExtensionListEntry** appendT
/* Special handling for u-va-posix, since we want to treat this as a variant,
not as a keyword */
if (!variantExists && !uprv_strcmp(pKey, POSIX_KEY) && !uprv_strcmp(pType, POSIX_VALUE) ) {
- *posixVariant = TRUE;
+ *posixVariant = true;
} else {
/* create an ExtensionListEntry for this keyword */
kwd = extPool.create();
@@ -1756,7 +1756,7 @@ _appendLDMLExtensionAsKeywords(const char* ldmlext, ExtensionListEntry** appendT
kwd->key = pKey;
kwd->value = pType;
- if (!_addExtensionToList(&kwdFirst, kwd, FALSE)) {
+ if (!_addExtensionToList(&kwdFirst, kwd, false)) {
// duplicate keyword is allowed, Only the first
// is honored.
}
@@ -1773,7 +1773,7 @@ _appendLDMLExtensionAsKeywords(const char* ldmlext, ExtensionListEntry** appendT
kwd = kwdFirst;
while (kwd != NULL) {
nextKwd = kwd->next;
- _addExtensionToList(appendTo, kwd, FALSE);
+ _addExtensionToList(appendTo, kwd, false);
kwd = nextKwd;
}
}
@@ -1788,7 +1788,7 @@ _appendKeywords(ULanguageTag* langtag, icu::ByteSink& sink, UErrorCode* status)
const char *key, *type;
icu::MemoryPool<ExtensionListEntry> extPool;
icu::MemoryPool<icu::CharString> kwdBuf;
- UBool posixVariant = FALSE;
+ UBool posixVariant = false;
if (U_FAILURE(*status)) {
return;
@@ -1803,7 +1803,7 @@ _appendKeywords(ULanguageTag* langtag, icu::ByteSink& sink, UErrorCode* status)
if (*key == LDMLEXT) {
/* Determine if variants already exists */
if (ultag_getVariantsSize(langtag)) {
- posixVariant = TRUE;
+ posixVariant = true;
}
_appendLDMLExtensionAsKeywords(type, &kwdFirst, extPool, kwdBuf, &posixVariant, status);
@@ -1818,7 +1818,7 @@ _appendKeywords(ULanguageTag* langtag, icu::ByteSink& sink, UErrorCode* status)
}
kwd->key = key;
kwd->value = type;
- if (!_addExtensionToList(&kwdFirst, kwd, FALSE)) {
+ if (!_addExtensionToList(&kwdFirst, kwd, false)) {
*status = U_ILLEGAL_ARGUMENT_ERROR;
break;
}
@@ -1835,7 +1835,7 @@ _appendKeywords(ULanguageTag* langtag, icu::ByteSink& sink, UErrorCode* status)
} else {
kwd->key = PRIVATEUSE_KEY;
kwd->value = type;
- if (!_addExtensionToList(&kwdFirst, kwd, FALSE)) {
+ if (!_addExtensionToList(&kwdFirst, kwd, false)) {
*status = U_ILLEGAL_ARGUMENT_ERROR;
}
}
@@ -1851,12 +1851,12 @@ _appendKeywords(ULanguageTag* langtag, icu::ByteSink& sink, UErrorCode* status)
if (U_SUCCESS(*status) && kwdFirst != NULL) {
/* write out the sorted keywords */
- UBool firstValue = TRUE;
+ UBool firstValue = true;
kwd = kwdFirst;
do {
if (firstValue) {
sink.Append("@", 1);
- firstValue = FALSE;
+ firstValue = false;
} else {
sink.Append(";", 1);
}
@@ -1899,17 +1899,17 @@ _appendPrivateuseToLanguageTag(const char* localeID, icu::ByteSink& sink, UBool
if (len > 0) {
char *p, *pPriv;
- UBool bNext = TRUE;
- UBool firstValue = TRUE;
+ UBool bNext = true;
+ UBool firstValue = true;
UBool writeValue;
pPriv = NULL;
p = buf;
while (bNext) {
- writeValue = FALSE;
+ writeValue = false;
if (*p == SEP || *p == LOCALE_SEP || *p == 0) {
if (*p == 0) {
- bNext = FALSE;
+ bNext = false;
} else {
*p = 0; /* terminate */
}
@@ -1923,10 +1923,10 @@ _appendPrivateuseToLanguageTag(const char* localeID, icu::ByteSink& sink, UBool
if (_isPrivateuseValueSubtag(pPriv, -1)) {
if (firstValue) {
if (!_isVariantSubtag(pPriv, -1)) {
- writeValue = TRUE;
+ writeValue = true;
}
} else {
- writeValue = TRUE;
+ writeValue = true;
}
} else if (strict) {
*status = U_ILLEGAL_ARGUMENT_ERROR;
@@ -1959,7 +1959,7 @@ _appendPrivateuseToLanguageTag(const char* localeID, icu::ByteSink& sink, UBool
tmpAppend[reslen++] = SEP;
}
- firstValue = FALSE;
+ firstValue = false;
}
len = (int32_t)uprv_strlen(pPriv);
@@ -2026,7 +2026,7 @@ ultag_parse(const char* tag, int32_t tagLen, int32_t* parsedLen, UErrorCode* sta
ExtensionListEntry *pExtension;
char *pExtValueSubtag, *pExtValueSubtagEnd;
int32_t i;
- UBool privateuseVar = FALSE;
+ UBool privateuseVar = false;
int32_t legacyLen = 0;
if (parsedLen != NULL) {
@@ -2124,7 +2124,7 @@ ultag_parse(const char* tag, int32_t tagLen, int32_t* parsedLen, UErrorCode* sta
if (*redundantTagEnd == '\0' || *redundantTagEnd == SEP) {
const char* preferredTag = REDUNDANT[i + 1];
size_t preferredTagLen = uprv_strlen(preferredTag);
- uprv_strncpy(t->buf, preferredTag, preferredTagLen);
+ uprv_memcpy(t->buf, preferredTag, preferredTagLen);
if (*redundantTagEnd == SEP) {
uprv_memmove(tagBuf + preferredTagLen,
redundantTagEnd,
@@ -2276,7 +2276,7 @@ ultag_parse(const char* tag, int32_t tagLen, int32_t* parsedLen, UErrorCode* sta
pExtension->value = T_CString_toLowerCase(pExtValueSubtag);
/* insert the extension to the list */
- if (_addExtensionToList(&(t->extensions), pExtension, FALSE)) {
+ if (_addExtensionToList(&(t->extensions), pExtension, false)) {
pLastGoodPosition = pExtValueSubtagEnd;
} else {
/* stop parsing here */
@@ -2339,7 +2339,7 @@ ultag_parse(const char* tag, int32_t tagLen, int32_t* parsedLen, UErrorCode* sta
pExtension->value = T_CString_toLowerCase(pExtValueSubtag);
/* insert the extension to the list */
- if (_addExtensionToList(&(t->extensions), pExtension, FALSE)) {
+ if (_addExtensionToList(&(t->extensions), pExtension, false)) {
pLastGoodPosition = pExtValueSubtagEnd;
pExtension = NULL;
} else {
@@ -2380,7 +2380,7 @@ ultag_parse(const char* tag, int32_t tagLen, int32_t* parsedLen, UErrorCode* sta
if (uprv_strncmp(pSubtag, PRIVUSE_VARIANT_PREFIX, uprv_strlen(PRIVUSE_VARIANT_PREFIX)) == 0) {
*pSep = 0;
next = VART;
- privateuseVar = TRUE;
+ privateuseVar = true;
break;
} else if (_isPrivateuseValueSubtag(pSubtag, subtagLen)) {
pLastGoodPosition = pSep;
@@ -2417,7 +2417,7 @@ ultag_parse(const char* tag, int32_t tagLen, int32_t* parsedLen, UErrorCode* sta
*pExtValueSubtagEnd = 0;
pExtension->value = T_CString_toLowerCase(pExtValueSubtag);
/* insert the extension to the list */
- if (_addExtensionToList(&(t->extensions), pExtension, FALSE)) {
+ if (_addExtensionToList(&(t->extensions), pExtension, false)) {
pLastGoodPosition = pExtValueSubtagEnd;
} else {
uprv_free(pExtension);
@@ -2535,7 +2535,7 @@ static int32_t
ultag_getVariantsSize(const ULanguageTag* langtag) {
int32_t size = 0;
VariantListEntry *cur = langtag->variants;
- while (TRUE) {
+ while (true) {
if (cur == NULL) {
break;
}
@@ -2581,7 +2581,7 @@ static int32_t
ultag_getExtensionsSize(const ULanguageTag* langtag) {
int32_t size = 0;
ExtensionListEntry *cur = langtag->extensions;
- while (TRUE) {
+ while (true) {
if (cur == NULL) {
break;
}
@@ -2648,7 +2648,7 @@ ulocimp_toLanguageTag(const char* localeID,
icu::CharString canonical;
int32_t reslen;
UErrorCode tmpStatus = U_ZERO_ERROR;
- UBool hadPosix = FALSE;
+ UBool hadPosix = false;
const char* pKeywordStart;
/* Note: uloc_canonicalize returns "en_US_POSIX" for input locale ID "". See #6835 */
@@ -2699,7 +2699,7 @@ ulocimp_toLanguageTag(const char* localeID,
pKeywordStart = locale_getKeywordsStart(canonical.data());
if (pKeywordStart == canonical.data()) {
int kwdCnt = 0;
- UBool done = FALSE;
+ UBool done = false;
icu::LocalUEnumerationPointer kwdEnum(uloc_openKeywords(canonical.data(), &tmpStatus));
if (U_SUCCESS(tmpStatus)) {
@@ -2720,15 +2720,15 @@ ulocimp_toLanguageTag(const char* localeID,
/* return private use only tag */
sink.Append("und-x-", 6);
sink.Append(buf.data(), buf.length());
- done = TRUE;
+ done = true;
} else if (strict) {
*status = U_ILLEGAL_ARGUMENT_ERROR;
- done = TRUE;
+ done = true;
}
/* if not strict mode, then "und" will be returned */
} else {
*status = U_ILLEGAL_ARGUMENT_ERROR;
- done = TRUE;
+ done = true;
}
}
}
@@ -2782,11 +2782,11 @@ ulocimp_forLanguageTag(const char* langtag,
icu::ByteSink& sink,
int32_t* parsedLength,
UErrorCode* status) {
- UBool isEmpty = TRUE;
+ UBool isEmpty = true;
const char *subtag, *p;
int32_t len;
int32_t i, n;
- UBool noRegion = TRUE;
+ UBool noRegion = true;
icu::LocalULanguageTagPointer lt(ultag_parse(langtag, tagLen, parsedLength, status));
if (U_FAILURE(*status)) {
@@ -2799,7 +2799,7 @@ ulocimp_forLanguageTag(const char* langtag,
len = (int32_t)uprv_strlen(subtag);
if (len > 0) {
sink.Append(subtag, len);
- isEmpty = FALSE;
+ isEmpty = false;
}
}
@@ -2808,7 +2808,7 @@ ulocimp_forLanguageTag(const char* langtag,
len = (int32_t)uprv_strlen(subtag);
if (len > 0) {
sink.Append("_", 1);
- isEmpty = FALSE;
+ isEmpty = false;
/* write out the script in title case */
char c = uprv_toupper(*subtag);
@@ -2821,7 +2821,7 @@ ulocimp_forLanguageTag(const char* langtag,
len = (int32_t)uprv_strlen(subtag);
if (len > 0) {
sink.Append("_", 1);
- isEmpty = FALSE;
+ isEmpty = false;
/* write out the region in upper case */
p = subtag;
@@ -2830,7 +2830,7 @@ ulocimp_forLanguageTag(const char* langtag,
sink.Append(&c, 1);
p++;
}
- noRegion = FALSE;
+ noRegion = false;
}
/* variants */
@@ -2839,7 +2839,7 @@ ulocimp_forLanguageTag(const char* langtag,
if (n > 0) {
if (noRegion) {
sink.Append("_", 1);
- isEmpty = FALSE;
+ isEmpty = false;
}
for (i = 0; i < n; i++) {
diff --git a/thirdparty/icu4c/common/umapfile.cpp b/thirdparty/icu4c/common/umapfile.cpp
index 3e714876a4..145582ea97 100644
--- a/thirdparty/icu4c/common/umapfile.cpp
+++ b/thirdparty/icu4c/common/umapfile.cpp
@@ -107,10 +107,10 @@ typedef HANDLE MemoryMap;
U_CFUNC UBool
uprv_mapFile(UDataMemory *pData, const char *path, UErrorCode *status) {
if (U_FAILURE(*status)) {
- return FALSE;
+ return false;
}
UDataMemory_init(pData); /* Clear the output struct. */
- return FALSE; /* no file access */
+ return false; /* no file access */
}
U_CFUNC void uprv_unmapFile(UDataMemory *pData) {
@@ -126,7 +126,7 @@ typedef HANDLE MemoryMap;
)
{
if (U_FAILURE(*status)) {
- return FALSE;
+ return false;
}
HANDLE map = nullptr;
@@ -150,12 +150,12 @@ typedef HANDLE MemoryMap;
u_strFromUTF8(reinterpret_cast<UChar*>(utf16Path), static_cast<int32_t>(UPRV_LENGTHOF(utf16Path)), &pathUtf16Len, path, -1, status);
if (U_FAILURE(*status)) {
- return FALSE;
+ return false;
}
if (*status == U_STRING_NOT_TERMINATED_WARNING) {
// Report back an error instead of a warning.
*status = U_BUFFER_OVERFLOW_ERROR;
- return FALSE;
+ return false;
}
file = CreateFileW(utf16Path, GENERIC_READ, FILE_SHARE_READ, nullptr,
@@ -168,7 +168,7 @@ typedef HANDLE MemoryMap;
if (HRESULT_FROM_WIN32(GetLastError()) == E_OUTOFMEMORY) {
*status = U_MEMORY_ALLOCATION_ERROR;
}
- return FALSE;
+ return false;
}
// Note: We use NULL/nullptr for lpAttributes parameter below.
@@ -183,17 +183,17 @@ typedef HANDLE MemoryMap;
if (HRESULT_FROM_WIN32(GetLastError()) == E_OUTOFMEMORY) {
*status = U_MEMORY_ALLOCATION_ERROR;
}
- return FALSE;
+ return false;
}
/* map a view of the file into our address space */
pData->pHeader = reinterpret_cast<const DataHeader *>(MapViewOfFile(map, FILE_MAP_READ, 0, 0, 0));
if (pData->pHeader == nullptr) {
CloseHandle(map);
- return FALSE;
+ return false;
}
pData->map = map;
- return TRUE;
+ return true;
}
U_CFUNC void
@@ -217,21 +217,21 @@ typedef HANDLE MemoryMap;
void *data;
if (U_FAILURE(*status)) {
- return FALSE;
+ return false;
}
UDataMemory_init(pData); /* Clear the output struct. */
/* determine the length of the file */
if(stat(path, &mystat)!=0 || mystat.st_size<=0) {
- return FALSE;
+ return false;
}
length=mystat.st_size;
/* open the file */
fd=open(path, O_RDONLY);
if(fd==-1) {
- return FALSE;
+ return false;
}
/* get a view of the mapping */
@@ -243,7 +243,7 @@ typedef HANDLE MemoryMap;
close(fd); /* no longer needed */
if(data==MAP_FAILED) {
// Possibly check the errno value for ENOMEM, and report U_MEMORY_ALLOCATION_ERROR?
- return FALSE;
+ return false;
}
pData->map = (char *)data + length;
@@ -252,7 +252,7 @@ typedef HANDLE MemoryMap;
#if U_PLATFORM == U_PF_IPHONE
posix_madvise(data, length, POSIX_MADV_RANDOM);
#endif
- return TRUE;
+ return true;
}
U_CFUNC void
@@ -291,21 +291,21 @@ typedef HANDLE MemoryMap;
void *p;
if (U_FAILURE(*status)) {
- return FALSE;
+ return false;
}
UDataMemory_init(pData); /* Clear the output struct. */
/* open the input file */
file=fopen(path, "rb");
if(file==nullptr) {
- return FALSE;
+ return false;
}
/* get the file length */
fileLength=umap_fsize(file);
if(ferror(file) || fileLength<=20) {
fclose(file);
- return FALSE;
+ return false;
}
/* allocate the memory to hold the file data */
@@ -313,21 +313,21 @@ typedef HANDLE MemoryMap;
if(p==nullptr) {
fclose(file);
*status = U_MEMORY_ALLOCATION_ERROR;
- return FALSE;
+ return false;
}
/* read the file */
if(fileLength!=fread(p, 1, fileLength, file)) {
uprv_free(p);
fclose(file);
- return FALSE;
+ return false;
}
fclose(file);
pData->map=p;
pData->pHeader=(const DataHeader *)p;
pData->mapAddr=p;
- return TRUE;
+ return true;
}
U_CFUNC void
@@ -427,7 +427,7 @@ typedef HANDLE MemoryMap;
void *val=0;
if (U_FAILURE(*status)) {
- return FALSE;
+ return false;
}
inBasename=uprv_strrchr(path, U_FILE_SEP_CHAR);
@@ -447,14 +447,14 @@ typedef HANDLE MemoryMap;
/* determine the length of the file */
if(stat(path, &mystat)!=0 || mystat.st_size<=0) {
- return FALSE;
+ return false;
}
length=mystat.st_size;
/* open the file */
fd=open(path, O_RDONLY);
if(fd==-1) {
- return FALSE;
+ return false;
}
/* get a view of the mapping */
@@ -462,12 +462,12 @@ typedef HANDLE MemoryMap;
close(fd); /* no longer needed */
if(data==MAP_FAILED) {
// Possibly check the errorno value for ENOMEM, and report U_MEMORY_ALLOCATION_ERROR?
- return FALSE;
+ return false;
}
pData->map = (char *)data + length;
pData->pHeader=(const DataHeader *)data;
pData->mapAddr = data;
- return TRUE;
+ return true;
}
# ifdef OS390BATCH
@@ -503,16 +503,16 @@ typedef HANDLE MemoryMap;
val=dllqueryvar((dllhandle*)handle, U_ICUDATA_ENTRY_NAME);
if(val == 0) {
/* failed... so keep looking */
- return FALSE;
+ return false;
}
# ifdef UDATA_DEBUG
fprintf(stderr, "dllqueryvar(%08X, %s) -> %08X\n", handle, U_ICUDATA_ENTRY_NAME, val);
# endif
pData->pHeader=(const DataHeader *)val;
- return TRUE;
+ return true;
} else {
- return FALSE; /* no handle */
+ return false; /* no handle */
}
}
diff --git a/thirdparty/icu4c/common/umapfile.h b/thirdparty/icu4c/common/umapfile.h
index adc265203d..042e71374c 100644
--- a/thirdparty/icu4c/common/umapfile.h
+++ b/thirdparty/icu4c/common/umapfile.h
@@ -29,7 +29,7 @@
#include "unicode/udata.h"
#include "putilimp.h"
-U_CFUNC UBool uprv_mapFile(UDataMemory *pdm, const char *path, UErrorCode *status);
+U_CAPI UBool U_EXPORT2 uprv_mapFile(UDataMemory *pdm, const char *path, UErrorCode *status);
U_CFUNC void uprv_unmapFile(UDataMemory *pData);
/* MAP_NONE: no memory mapping, no file access at all */
diff --git a/thirdparty/icu4c/common/umutex.h b/thirdparty/icu4c/common/umutex.h
index 8d76b3f3e6..1b8332409c 100644
--- a/thirdparty/icu4c/common/umutex.h
+++ b/thirdparty/icu4c/common/umutex.h
@@ -71,7 +71,6 @@ U_NAMESPACE_BEGIN
****************************************************************************/
typedef std::atomic<int32_t> u_atomic_int32_t;
-#define ATOMIC_INT32_T_INITIALIZER(val) ATOMIC_VAR_INIT(val)
inline int32_t umtx_loadAcquire(u_atomic_int32_t &var) {
return var.load(std::memory_order_acquire);
@@ -96,18 +95,15 @@ inline int32_t umtx_atomic_dec(u_atomic_int32_t *var) {
*
*************************************************************************************************/
-struct UInitOnce {
- u_atomic_int32_t fState;
- UErrorCode fErrCode;
+struct U_COMMON_API UInitOnce {
+ u_atomic_int32_t fState {0};
+ UErrorCode fErrCode {U_ZERO_ERROR};
void reset() {fState = 0;}
UBool isReset() {return umtx_loadAcquire(fState) == 0;}
// Note: isReset() is used by service registration code.
// Thread safety of this usage needs review.
};
-#define U_INITONCE_INITIALIZER {ATOMIC_INT32_T_INITIALIZER(0), U_ZERO_ERROR}
-
-
U_COMMON_API UBool U_EXPORT2 umtx_initImplPreInit(UInitOnce &);
U_COMMON_API void U_EXPORT2 umtx_initImplPostInit(UInitOnce &);
diff --git a/thirdparty/icu4c/common/unames.cpp b/thirdparty/icu4c/common/unames.cpp
index 5776058f95..b0ac991e1b 100644
--- a/thirdparty/icu4c/common/unames.cpp
+++ b/thirdparty/icu4c/common/unames.cpp
@@ -105,7 +105,7 @@ typedef struct {
static UDataMemory *uCharNamesData=NULL;
static UCharNames *uCharNames=NULL;
-static icu::UInitOnce gCharNamesInitOnce = U_INITONCE_INITIALIZER;
+static icu::UInitOnce gCharNamesInitOnce {};
/*
* Maximum length of character names (regular & 1.0).
@@ -173,7 +173,7 @@ static UBool U_CALLCONV unames_cleanup(void)
}
gCharNamesInitOnce.reset();
gMaxNameLength=0;
- return TRUE;
+ return true;
}
static UBool U_CALLCONV
@@ -371,7 +371,7 @@ compareName(UCharNames *names,
if(c!=';') {
/* implicit letter */
if((char)c!=*otherName++) {
- return FALSE;
+ return false;
}
} else {
/* finished */
@@ -388,7 +388,7 @@ compareName(UCharNames *names,
if(c!=';') {
/* explicit letter */
if((char)c!=*otherName++) {
- return FALSE;
+ return false;
}
} else {
/* stop, but skip the semicolon if we are seeking
@@ -407,7 +407,7 @@ compareName(UCharNames *names,
uint8_t *tokenString=tokenStrings+token;
while((c=*tokenString++)!=0) {
if((char)c!=*otherName++) {
- return FALSE;
+ return false;
}
}
}
@@ -616,7 +616,7 @@ enumGroupNames(UCharNames *names, const uint16_t *group,
/* here, we assume that the buffer is large enough */
if(length>0) {
if(!fn(context, start, nameChoice, buffer, length)) {
- return FALSE;
+ return false;
}
}
++start;
@@ -626,12 +626,12 @@ enumGroupNames(UCharNames *names, const uint16_t *group,
while(start<=end) {
if(compareName(names, s+offsets[start&GROUP_MASK], lengths[start&GROUP_MASK], nameChoice, otherName)) {
((FindName *)context)->code=start;
- return FALSE;
+ return false;
}
++start;
}
}
- return TRUE;
+ return true;
}
/*
@@ -653,14 +653,14 @@ enumExtNames(UChar32 start, UChar32 end,
/* here, we assume that the buffer is large enough */
if(length>0) {
if(!fn(context, start, U_EXTENDED_CHAR_NAME, buffer, length)) {
- return FALSE;
+ return false;
}
}
++start;
}
}
- return TRUE;
+ return true;
}
static UBool
@@ -684,7 +684,7 @@ enumNames(UCharNames *names,
extLimit=limit;
}
if(!enumExtNames(start, extLimit-1, fn, context)) {
- return FALSE;
+ return false;
}
start=extLimit;
}
@@ -705,7 +705,7 @@ enumNames(UCharNames *names,
if(!enumGroupNames(names, group,
start, ((UChar32)startGroupMSB<<GROUP_SHIFT)+LINES_PER_GROUP-1,
fn, context, nameChoice)) {
- return FALSE;
+ return false;
}
group=NEXT_GROUP(group); /* continue with the next group */
}
@@ -718,7 +718,7 @@ enumNames(UCharNames *names,
end = limit;
}
if (!enumExtNames(start, end - 1, fn, context)) {
- return FALSE;
+ return false;
}
}
group=nextGroup;
@@ -729,7 +729,7 @@ enumNames(UCharNames *names,
const uint16_t *nextGroup;
start=(UChar32)group[GROUP_MSB]<<GROUP_SHIFT;
if(!enumGroupNames(names, group, start, start+LINES_PER_GROUP-1, fn, context, nameChoice)) {
- return FALSE;
+ return false;
}
nextGroup=NEXT_GROUP(group);
if (nextGroup < groupLimit && nextGroup[GROUP_MSB] > group[GROUP_MSB] + 1 && nameChoice == U_EXTENDED_CHAR_NAME) {
@@ -738,7 +738,7 @@ enumNames(UCharNames *names,
end = limit;
}
if (!enumExtNames((group[GROUP_MSB] + 1) << GROUP_SHIFT, end - 1, fn, context)) {
- return FALSE;
+ return false;
}
}
group=nextGroup;
@@ -753,7 +753,7 @@ enumNames(UCharNames *names,
start = next;
}
} else {
- return TRUE;
+ return true;
}
}
@@ -766,7 +766,7 @@ enumNames(UCharNames *names,
return enumExtNames(start, limit - 1, fn, context);
}
- return TRUE;
+ return true;
}
static uint16_t
@@ -941,7 +941,7 @@ enumAlgNames(AlgorithmicRange *range,
uint16_t length;
if(nameChoice!=U_UNICODE_CHAR_NAME && nameChoice!=U_EXTENDED_CHAR_NAME) {
- return TRUE;
+ return true;
}
switch(range->type) {
@@ -952,12 +952,12 @@ enumAlgNames(AlgorithmicRange *range,
/* get the full name of the start character */
length=getAlgName(range, (uint32_t)start, nameChoice, buffer, sizeof(buffer));
if(length<=0) {
- return TRUE;
+ return true;
}
/* call the enumerator function with this first character */
if(!fn(context, start, nameChoice, buffer, length)) {
- return FALSE;
+ return false;
}
/* go to the end of the name; all these names have the same length */
@@ -984,7 +984,7 @@ enumAlgNames(AlgorithmicRange *range,
}
if(!fn(context, start, nameChoice, buffer, length)) {
- return FALSE;
+ return false;
}
}
break;
@@ -1018,7 +1018,7 @@ enumAlgNames(AlgorithmicRange *range,
/* call the enumerator function with this first character */
if(!fn(context, start, nameChoice, buffer, length)) {
- return FALSE;
+ return false;
}
/* enumerate the rest of the names */
@@ -1056,7 +1056,7 @@ enumAlgNames(AlgorithmicRange *range,
*t=0;
if(!fn(context, start, nameChoice, buffer, length)) {
- return FALSE;
+ return false;
}
}
break;
@@ -1066,7 +1066,7 @@ enumAlgNames(AlgorithmicRange *range,
break;
}
- return TRUE;
+ return true;
}
/*
@@ -1416,11 +1416,11 @@ calcNameSetsLengths(UErrorCode *pErrorCode) {
int32_t i, maxNameLength;
if(gMaxNameLength!=0) {
- return TRUE;
+ return true;
}
if(!isDataLoaded(pErrorCode)) {
- return FALSE;
+ return false;
}
/* set hex digits, used in various names, and <>-, used in extended names */
@@ -1437,7 +1437,7 @@ calcNameSetsLengths(UErrorCode *pErrorCode) {
/* set sets and lengths from group names, set global maximum values */
calcGroupNameSetsLengths(maxNameLength);
- return TRUE;
+ return true;
}
U_NAMESPACE_END
@@ -1809,7 +1809,7 @@ makeTokenMap(const UDataSwapper *ds,
/* enter the converted character into the map and mark it used */
map[c1]=c2;
- usedOutChar[c2]=TRUE;
+ usedOutChar[c2]=true;
}
}
diff --git a/thirdparty/icu4c/common/unicode/bytestrie.h b/thirdparty/icu4c/common/unicode/bytestrie.h
index 271a81d1b4..8fe66780f5 100644
--- a/thirdparty/icu4c/common/unicode/bytestrie.h
+++ b/thirdparty/icu4c/common/unicode/bytestrie.h
@@ -394,7 +394,7 @@ private:
pos_(bytes_), remainingMatchLength_(-1) {}
// No assignment operator.
- BytesTrie &operator=(const BytesTrie &other);
+ BytesTrie &operator=(const BytesTrie &other) = delete;
inline void stop() {
pos_=NULL;
diff --git a/thirdparty/icu4c/common/unicode/bytestriebuilder.h b/thirdparty/icu4c/common/unicode/bytestriebuilder.h
index 382f5e0095..ec9c625473 100644
--- a/thirdparty/icu4c/common/unicode/bytestriebuilder.h
+++ b/thirdparty/icu4c/common/unicode/bytestriebuilder.h
@@ -129,8 +129,8 @@ public:
private:
friend class ::BytesTrieTest;
- BytesTrieBuilder(const BytesTrieBuilder &other); // no copy constructor
- BytesTrieBuilder &operator=(const BytesTrieBuilder &other); // no assignment operator
+ BytesTrieBuilder(const BytesTrieBuilder &other) = delete; // no copy constructor
+ BytesTrieBuilder &operator=(const BytesTrieBuilder &other) = delete; // no assignment operator
void buildBytes(UStringTrieBuildOption buildOption, UErrorCode &errorCode);
diff --git a/thirdparty/icu4c/common/unicode/caniter.h b/thirdparty/icu4c/common/unicode/caniter.h
index 6e57ef5e3b..db400a531b 100644
--- a/thirdparty/icu4c/common/unicode/caniter.h
+++ b/thirdparty/icu4c/common/unicode/caniter.h
@@ -150,20 +150,20 @@ public:
private:
// ===================== PRIVATES ==============================
// private default constructor
- CanonicalIterator();
+ CanonicalIterator() = delete;
/**
* Copy constructor. Private for now.
* @internal (private)
*/
- CanonicalIterator(const CanonicalIterator& other);
+ CanonicalIterator(const CanonicalIterator& other) = delete;
/**
* Assignment operator. Private for now.
* @internal (private)
*/
- CanonicalIterator& operator=(const CanonicalIterator& other);
+ CanonicalIterator& operator=(const CanonicalIterator& other) = delete;
// fields
UnicodeString source;
diff --git a/thirdparty/icu4c/common/unicode/dtintrv.h b/thirdparty/icu4c/common/unicode/dtintrv.h
index 46a94ce349..8c172eb7a5 100644
--- a/thirdparty/icu4c/common/unicode/dtintrv.h
+++ b/thirdparty/icu4c/common/unicode/dtintrv.h
@@ -131,7 +131,7 @@ private:
/**
* Default constructor, not implemented.
*/
- DateInterval();
+ DateInterval() = delete;
UDate fromDate;
UDate toDate;
diff --git a/thirdparty/icu4c/common/unicode/idna.h b/thirdparty/icu4c/common/unicode/idna.h
index 1305dc6048..1c57205bae 100644
--- a/thirdparty/icu4c/common/unicode/idna.h
+++ b/thirdparty/icu4c/common/unicode/idna.h
@@ -305,8 +305,8 @@ public:
private:
friend class UTS46;
- IDNAInfo(const IDNAInfo &other); // no copying
- IDNAInfo &operator=(const IDNAInfo &other); // no copying
+ IDNAInfo(const IDNAInfo &other) = delete; // no copying
+ IDNAInfo &operator=(const IDNAInfo &other) = delete; // no copying
void reset() {
errors=labelErrors=0;
diff --git a/thirdparty/icu4c/common/unicode/normlzr.h b/thirdparty/icu4c/common/unicode/normlzr.h
index 93661990fe..14b2469885 100644
--- a/thirdparty/icu4c/common/unicode/normlzr.h
+++ b/thirdparty/icu4c/common/unicode/normlzr.h
@@ -740,8 +740,8 @@ private:
// Private functions
//-------------------------------------------------------------------------
- Normalizer(); // default constructor not implemented
- Normalizer &operator=(const Normalizer &that); // assignment operator not implemented
+ Normalizer() = delete; // default constructor not implemented
+ Normalizer &operator=(const Normalizer &that) = delete; // assignment operator not implemented
// Private utility methods for iteration
// For documentation, see the source code
diff --git a/thirdparty/icu4c/common/unicode/platform.h b/thirdparty/icu4c/common/unicode/platform.h
index b7e514442c..1605226a79 100644
--- a/thirdparty/icu4c/common/unicode/platform.h
+++ b/thirdparty/icu4c/common/unicode/platform.h
@@ -168,7 +168,7 @@
# define U_PLATFORM U_PF_LINUX
#elif defined(__APPLE__) && defined(__MACH__)
# include <TargetConditionals.h>
-# if defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE /* variant of TARGET_OS_MAC */
+# if (defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE) && (defined(TARGET_OS_MACCATALYST) && !TARGET_OS_MACCATALYST) /* variant of TARGET_OS_MAC */
# define U_PLATFORM U_PF_IPHONE
# else
# define U_PLATFORM U_PF_DARWIN
@@ -848,6 +848,21 @@ namespace std {
#endif
/**
+ * \def U_HIDDEN
+ * This is used to mark internal structs declared within external classes,
+ * to prevent the internal structs from having the same visibility as the
+ * class within which they are declared.
+ * @internal
+ */
+#ifdef U_HIDDEN
+ /* Use the predefined value. */
+#elif defined(__GNUC__)
+# define U_HIDDEN __attribute__((visibility("hidden")))
+#else
+# define U_HIDDEN
+#endif
+
+/**
* \def U_CALLCONV
* Similar to U_CDECL_BEGIN/U_CDECL_END, this qualifier is necessary
* in callback function typedefs to make sure that the calling convention
diff --git a/thirdparty/icu4c/common/unicode/rbbi.h b/thirdparty/icu4c/common/unicode/rbbi.h
index 0bad0d3897..d878243e3f 100644
--- a/thirdparty/icu4c/common/unicode/rbbi.h
+++ b/thirdparty/icu4c/common/unicode/rbbi.h
@@ -649,13 +649,6 @@ private:
// implementation
//=======================================================================
/**
- * Dumps caches and performs other actions associated with a complete change
- * in text or iteration position.
- * @internal (private)
- */
- void reset(void);
-
- /**
* Common initialization function, used by constructors and bufferClone.
* @internal (private)
*/
diff --git a/thirdparty/icu4c/common/unicode/resbund.h b/thirdparty/icu4c/common/unicode/resbund.h
index 7441592a0f..6e26a40591 100644
--- a/thirdparty/icu4c/common/unicode/resbund.h
+++ b/thirdparty/icu4c/common/unicode/resbund.h
@@ -484,7 +484,7 @@ public:
static UClassID U_EXPORT2 getStaticClassID();
private:
- ResourceBundle(); // default constructor not implemented
+ ResourceBundle() = delete; // default constructor not implemented
UResourceBundle *fResource;
void constructForLocale(const UnicodeString& path, const Locale& locale, UErrorCode& error);
diff --git a/thirdparty/icu4c/common/unicode/schriter.h b/thirdparty/icu4c/common/unicode/schriter.h
index 9dac099967..a2ab17982d 100644
--- a/thirdparty/icu4c/common/unicode/schriter.h
+++ b/thirdparty/icu4c/common/unicode/schriter.h
@@ -173,14 +173,6 @@ protected:
StringCharacterIterator();
/**
- * Sets the iterator to iterate over the provided string.
- * @param newText The string to be iterated over
- * @param newTextLength The length of the String
- * @stable ICU 2.0
- */
- void setText(const char16_t* newText, int32_t newTextLength);
-
- /**
* Copy of the iterated string object.
* @stable ICU 2.0
*/
diff --git a/thirdparty/icu4c/common/unicode/ubiditransform.h b/thirdparty/icu4c/common/unicode/ubiditransform.h
index 2dd7564010..24433aa8ac 100644
--- a/thirdparty/icu4c/common/unicode/ubiditransform.h
+++ b/thirdparty/icu4c/common/unicode/ubiditransform.h
@@ -28,7 +28,7 @@
/**
* \file
- * \brief Bidi Transformations
+ * \brief C API: Bidi Transformations
*/
/**
diff --git a/thirdparty/icu4c/common/unicode/uchar.h b/thirdparty/icu4c/common/unicode/uchar.h
index 253e6db173..6bb68e62a9 100644
--- a/thirdparty/icu4c/common/unicode/uchar.h
+++ b/thirdparty/icu4c/common/unicode/uchar.h
@@ -60,7 +60,7 @@ U_CDECL_BEGIN
* @see u_getUnicodeVersion
* @stable ICU 2.0
*/
-#define U_UNICODE_VERSION "14.0"
+#define U_UNICODE_VERSION "15.0"
/**
* \file
@@ -483,57 +483,55 @@ typedef enum UProperty {
* @stable ICU 62
*/
UCHAR_EXTENDED_PICTOGRAPHIC=64,
-#ifndef U_HIDE_DRAFT_API
/**
* Binary property of strings Basic_Emoji.
* See https://www.unicode.org/reports/tr51/#Emoji_Sets
*
- * @draft ICU 70
+ * @stable ICU 70
*/
UCHAR_BASIC_EMOJI=65,
/**
* Binary property of strings Emoji_Keycap_Sequence.
* See https://www.unicode.org/reports/tr51/#Emoji_Sets
*
- * @draft ICU 70
+ * @stable ICU 70
*/
UCHAR_EMOJI_KEYCAP_SEQUENCE=66,
/**
* Binary property of strings RGI_Emoji_Modifier_Sequence.
* See https://www.unicode.org/reports/tr51/#Emoji_Sets
*
- * @draft ICU 70
+ * @stable ICU 70
*/
UCHAR_RGI_EMOJI_MODIFIER_SEQUENCE=67,
/**
* Binary property of strings RGI_Emoji_Flag_Sequence.
* See https://www.unicode.org/reports/tr51/#Emoji_Sets
*
- * @draft ICU 70
+ * @stable ICU 70
*/
UCHAR_RGI_EMOJI_FLAG_SEQUENCE=68,
/**
* Binary property of strings RGI_Emoji_Tag_Sequence.
* See https://www.unicode.org/reports/tr51/#Emoji_Sets
*
- * @draft ICU 70
+ * @stable ICU 70
*/
UCHAR_RGI_EMOJI_TAG_SEQUENCE=69,
/**
* Binary property of strings RGI_Emoji_ZWJ_Sequence.
* See https://www.unicode.org/reports/tr51/#Emoji_Sets
*
- * @draft ICU 70
+ * @stable ICU 70
*/
UCHAR_RGI_EMOJI_ZWJ_SEQUENCE=70,
/**
* Binary property of strings RGI_Emoji.
* See https://www.unicode.org/reports/tr51/#Emoji_Sets
*
- * @draft ICU 70
+ * @stable ICU 70
*/
UCHAR_RGI_EMOJI=71,
-#endif // U_HIDE_DRAFT_API
#ifndef U_HIDE_DEPRECATED_API
/**
* One more than the last constant for binary Unicode properties.
@@ -1885,6 +1883,23 @@ enum UBlockCode {
/** @stable ICU 70 */
UBLOCK_ZNAMENNY_MUSICAL_NOTATION = 320, /*[1CF00]*/
+ // New blocks in Unicode 15.0
+
+ /** @stable ICU 72 */
+ UBLOCK_ARABIC_EXTENDED_C = 321, /*[10EC0]*/
+ /** @stable ICU 72 */
+ UBLOCK_CJK_UNIFIED_IDEOGRAPHS_EXTENSION_H = 322, /*[31350]*/
+ /** @stable ICU 72 */
+ UBLOCK_CYRILLIC_EXTENDED_D = 323, /*[1E030]*/
+ /** @stable ICU 72 */
+ UBLOCK_DEVANAGARI_EXTENDED_A = 324, /*[11B00]*/
+ /** @stable ICU 72 */
+ UBLOCK_KAKTOVIK_NUMERALS = 325, /*[1D2C0]*/
+ /** @stable ICU 72 */
+ UBLOCK_KAWI = 326, /*[11F00]*/
+ /** @stable ICU 72 */
+ UBLOCK_NAG_MUNDARI = 327, /*[1E4D0]*/
+
#ifndef U_HIDE_DEPRECATED_API
/**
* One more than the highest normal UBlockCode value.
@@ -1892,7 +1907,7 @@ enum UBlockCode {
*
* @deprecated ICU 58 The numeric value may change over time, see ICU ticket #12420.
*/
- UBLOCK_COUNT = 321,
+ UBLOCK_COUNT = 328,
#endif // U_HIDE_DEPRECATED_API
/** @stable ICU 2.0 */
@@ -2680,8 +2695,6 @@ typedef enum UVerticalOrientation {
U_CAPI UBool U_EXPORT2
u_hasBinaryProperty(UChar32 c, UProperty which);
-#ifndef U_HIDE_DRAFT_API
-
/**
* Returns true if the property is true for the string.
* Same as u_hasBinaryProperty(single code point, which)
@@ -2704,13 +2717,11 @@ u_hasBinaryProperty(UChar32 c, UProperty which);
* @see u_getBinaryPropertySet
* @see u_getIntPropertyValue
* @see u_getUnicodeVersion
- * @draft ICU 70
+ * @stable ICU 70
*/
U_CAPI UBool U_EXPORT2
u_stringHasBinaryProperty(const UChar *s, int32_t length, UProperty which);
-#endif // U_HIDE_DRAFT_API
-
/**
* Returns a frozen USet for a binary property.
* The library retains ownership over the returned object.
diff --git a/thirdparty/icu4c/common/unicode/ucharstrie.h b/thirdparty/icu4c/common/unicode/ucharstrie.h
index b6f9e3e075..064244a74c 100644
--- a/thirdparty/icu4c/common/unicode/ucharstrie.h
+++ b/thirdparty/icu4c/common/unicode/ucharstrie.h
@@ -409,7 +409,7 @@ private:
pos_(uchars_), remainingMatchLength_(-1) {}
// No assignment operator.
- UCharsTrie &operator=(const UCharsTrie &other);
+ UCharsTrie &operator=(const UCharsTrie &other) = delete;
inline void stop() {
pos_=NULL;
diff --git a/thirdparty/icu4c/common/unicode/ucharstriebuilder.h b/thirdparty/icu4c/common/unicode/ucharstriebuilder.h
index e0cb0acdc5..5c8aa33ffb 100644
--- a/thirdparty/icu4c/common/unicode/ucharstriebuilder.h
+++ b/thirdparty/icu4c/common/unicode/ucharstriebuilder.h
@@ -133,8 +133,8 @@ public:
}
private:
- UCharsTrieBuilder(const UCharsTrieBuilder &other); // no copy constructor
- UCharsTrieBuilder &operator=(const UCharsTrieBuilder &other); // no assignment operator
+ UCharsTrieBuilder(const UCharsTrieBuilder &other) = delete; // no copy constructor
+ UCharsTrieBuilder &operator=(const UCharsTrieBuilder &other) = delete; // no assignment operator
void buildUChars(UStringTrieBuildOption buildOption, UErrorCode &errorCode);
diff --git a/thirdparty/icu4c/common/unicode/ucnv_cb.h b/thirdparty/icu4c/common/unicode/ucnv_cb.h
index 41845d1bca..b4ef99208b 100644
--- a/thirdparty/icu4c/common/unicode/ucnv_cb.h
+++ b/thirdparty/icu4c/common/unicode/ucnv_cb.h
@@ -15,8 +15,8 @@
*/
/**
- * \file
- * \brief C UConverter functions to aid the writers of callbacks
+ * \file
+ * \brief C API: UConverter functions to aid the writers of callbacks
*
* <h2> Callback API for UConverter </h2>
*
diff --git a/thirdparty/icu4c/common/unicode/ucnv_err.h b/thirdparty/icu4c/common/unicode/ucnv_err.h
index a13d01db09..c743e5614f 100644
--- a/thirdparty/icu4c/common/unicode/ucnv_err.h
+++ b/thirdparty/icu4c/common/unicode/ucnv_err.h
@@ -12,7 +12,7 @@
/**
* \file
- * \brief C UConverter predefined error callbacks
+ * \brief C API: UConverter predefined error callbacks
*
* <h2>Error Behaviour Functions</h2>
* Defines some error behaviour functions called by ucnv_{from,to}Unicode
diff --git a/thirdparty/icu4c/common/unicode/ucnvsel.h b/thirdparty/icu4c/common/unicode/ucnvsel.h
index 3d7d3327f7..9373ec951b 100644
--- a/thirdparty/icu4c/common/unicode/ucnvsel.h
+++ b/thirdparty/icu4c/common/unicode/ucnvsel.h
@@ -36,6 +36,7 @@
/**
* \file
+ * \brief C API: Encoding/charset encoding selector
*
* A converter selector is built with a set of encoding/charset names
* and given an input string returns the set of names of the
diff --git a/thirdparty/icu4c/common/unicode/ucpmap.h b/thirdparty/icu4c/common/unicode/ucpmap.h
index 31e1365cac..a740bd160f 100644
--- a/thirdparty/icu4c/common/unicode/ucpmap.h
+++ b/thirdparty/icu4c/common/unicode/ucpmap.h
@@ -13,8 +13,7 @@ U_CDECL_BEGIN
/**
* \file
- *
- * This file defines an abstract map from Unicode code points to integer values.
+ * \brief C API: This file defines an abstract map from Unicode code points to integer values.
*
* @see UCPMap
* @see UCPTrie
diff --git a/thirdparty/icu4c/common/unicode/ucptrie.h b/thirdparty/icu4c/common/unicode/ucptrie.h
index e2547ae2ee..dadef79c51 100644
--- a/thirdparty/icu4c/common/unicode/ucptrie.h
+++ b/thirdparty/icu4c/common/unicode/ucptrie.h
@@ -19,8 +19,7 @@ U_CDECL_BEGIN
/**
* \file
- *
- * This file defines an immutable Unicode code point trie.
+ * \brief C API: This file defines an immutable Unicode code point trie.
*
* @see UCPTrie
* @see UMutableCPTrie
diff --git a/thirdparty/icu4c/common/unicode/uloc.h b/thirdparty/icu4c/common/unicode/uloc.h
index b0bdbe1b87..21179c1b62 100644
--- a/thirdparty/icu4c/common/unicode/uloc.h
+++ b/thirdparty/icu4c/common/unicode/uloc.h
@@ -26,9 +26,9 @@
#include "unicode/utypes.h"
#include "unicode/uenum.h"
-/**
+/**
* \file
- * \brief C API: Locale
+ * \brief C API: Locale ID functionality similar to C++ class Locale
*
* <h2> ULoc C API for Locale </h2>
* A <code>Locale</code> represents a specific geographical, political,
diff --git a/thirdparty/icu4c/common/unicode/umachine.h b/thirdparty/icu4c/common/unicode/umachine.h
index 09c887c80e..6640606272 100644
--- a/thirdparty/icu4c/common/unicode/umachine.h
+++ b/thirdparty/icu4c/common/unicode/umachine.h
@@ -282,14 +282,8 @@ typedef int8_t UBool;
*/
#ifdef U_DEFINE_FALSE_AND_TRUE
// Use the predefined value.
-#elif defined(U_COMBINED_IMPLEMENTATION) || \
- defined(U_COMMON_IMPLEMENTATION) || defined(U_I18N_IMPLEMENTATION) || \
- defined(U_IO_IMPLEMENTATION) || defined(U_LAYOUTEX_IMPLEMENTATION) || \
- defined(U_TOOLUTIL_IMPLEMENTATION)
- // Inside ICU: Keep FALSE & TRUE available.
-# define U_DEFINE_FALSE_AND_TRUE 1
#else
- // Outside ICU: Avoid collision with non-macro definitions of FALSE & TRUE.
+ // Default to avoiding collision with non-macro definitions of FALSE & TRUE.
# define U_DEFINE_FALSE_AND_TRUE 0
#endif
diff --git a/thirdparty/icu4c/common/unicode/umisc.h b/thirdparty/icu4c/common/unicode/umisc.h
index 213290b9af..4e9dda7450 100644
--- a/thirdparty/icu4c/common/unicode/umisc.h
+++ b/thirdparty/icu4c/common/unicode/umisc.h
@@ -21,9 +21,9 @@
/**
* \file
- * \brief C API:misc definitions
+ * \brief C API: Miscellaneous definitions
*
- * This file contains miscellaneous definitions for the C APIs.
+ * This file contains miscellaneous definitions for the C APIs.
*/
U_CDECL_BEGIN
diff --git a/thirdparty/icu4c/common/unicode/umutablecptrie.h b/thirdparty/icu4c/common/unicode/umutablecptrie.h
index 3b950055da..d60fd61819 100644
--- a/thirdparty/icu4c/common/unicode/umutablecptrie.h
+++ b/thirdparty/icu4c/common/unicode/umutablecptrie.h
@@ -21,8 +21,7 @@ U_CDECL_BEGIN
/**
* \file
- *
- * This file defines a mutable Unicode code point trie.
+ * \brief C API: This file defines a mutable Unicode code point trie.
*
* @see UCPTrie
* @see UMutableCPTrie
diff --git a/thirdparty/icu4c/common/unicode/uniset.h b/thirdparty/icu4c/common/unicode/uniset.h
index 310c7c8d20..33e35c4def 100644
--- a/thirdparty/icu4c/common/unicode/uniset.h
+++ b/thirdparty/icu4c/common/unicode/uniset.h
@@ -794,13 +794,11 @@ public:
*/
virtual UBool isEmpty(void) const;
-#ifndef U_HIDE_DRAFT_API
/**
* @return true if this set contains multi-character strings or the empty string.
- * @draft ICU 70
+ * @stable ICU 70
*/
UBool hasStrings() const;
-#endif // U_HIDE_DRAFT_API
/**
* Returns true if this set contains the given character.
@@ -1678,8 +1676,6 @@ private:
UnicodeString& rebuiltPat,
UErrorCode& ec);
- static const UnicodeSet* getInclusions(int32_t src, UErrorCode &status);
-
/**
* A filter that returns true if the given code point should be
* included in the UnicodeSet being constructed.
@@ -1700,11 +1696,6 @@ private:
const UnicodeSet* inclusions,
UErrorCode &status);
- // UCPMap is now stable ICU 63
- void applyIntPropertyValue(const UCPMap *map,
- UCPMapValueFilter *filter, const void *context,
- UErrorCode &errorCode);
-
/**
* Set the new pattern to cache.
*/
diff --git a/thirdparty/icu4c/common/unicode/urename.h b/thirdparty/icu4c/common/unicode/urename.h
index d9f9b8f336..d3e23b8fa7 100644
--- a/thirdparty/icu4c/common/unicode/urename.h
+++ b/thirdparty/icu4c/common/unicode/urename.h
@@ -925,6 +925,12 @@
#define udatpg_setDateTimeFormatForStyle U_ICU_ENTRY_POINT_RENAME(udatpg_setDateTimeFormatForStyle)
#define udatpg_setDecimal U_ICU_ENTRY_POINT_RENAME(udatpg_setDecimal)
#define udict_swap U_ICU_ENTRY_POINT_RENAME(udict_swap)
+#define udispopt_fromGrammaticalCaseIdentifier U_ICU_ENTRY_POINT_RENAME(udispopt_fromGrammaticalCaseIdentifier)
+#define udispopt_fromNounClassIdentifier U_ICU_ENTRY_POINT_RENAME(udispopt_fromNounClassIdentifier)
+#define udispopt_fromPluralCategoryIdentifier U_ICU_ENTRY_POINT_RENAME(udispopt_fromPluralCategoryIdentifier)
+#define udispopt_getGrammaticalCaseIdentifier U_ICU_ENTRY_POINT_RENAME(udispopt_getGrammaticalCaseIdentifier)
+#define udispopt_getNounClassIdentifier U_ICU_ENTRY_POINT_RENAME(udispopt_getNounClassIdentifier)
+#define udispopt_getPluralCategoryIdentifier U_ICU_ENTRY_POINT_RENAME(udispopt_getPluralCategoryIdentifier)
#define udtitvfmt_close U_ICU_ENTRY_POINT_RENAME(udtitvfmt_close)
#define udtitvfmt_closeResult U_ICU_ENTRY_POINT_RENAME(udtitvfmt_closeResult)
#define udtitvfmt_format U_ICU_ENTRY_POINT_RENAME(udtitvfmt_format)
@@ -1247,6 +1253,7 @@
#define unum_getLocaleByType U_ICU_ENTRY_POINT_RENAME(unum_getLocaleByType)
#define unum_getSymbol U_ICU_ENTRY_POINT_RENAME(unum_getSymbol)
#define unum_getTextAttribute U_ICU_ENTRY_POINT_RENAME(unum_getTextAttribute)
+#define unum_hasAttribute U_ICU_ENTRY_POINT_RENAME(unum_hasAttribute)
#define unum_open U_ICU_ENTRY_POINT_RENAME(unum_open)
#define unum_parse U_ICU_ENTRY_POINT_RENAME(unum_parse)
#define unum_parseDecimal U_ICU_ENTRY_POINT_RENAME(unum_parseDecimal)
diff --git a/thirdparty/icu4c/common/unicode/uscript.h b/thirdparty/icu4c/common/unicode/uscript.h
index 6cb1532808..dc97ab2ba5 100644
--- a/thirdparty/icu4c/common/unicode/uscript.h
+++ b/thirdparty/icu4c/common/unicode/uscript.h
@@ -495,6 +495,11 @@ typedef enum UScriptCode {
/** @stable ICU 70 */
USCRIPT_VITHKUQI = 197,/* Vith */
+ /** @stable ICU 72 */
+ USCRIPT_KAWI = 198,/* Kawi */
+ /** @stable ICU 72 */
+ USCRIPT_NAG_MUNDARI = 199,/* Nagm */
+
#ifndef U_HIDE_DEPRECATED_API
/**
* One more than the highest normal UScriptCode value.
@@ -502,7 +507,7 @@ typedef enum UScriptCode {
*
* @deprecated ICU 58 The numeric value may change over time, see ICU ticket #12420.
*/
- USCRIPT_CODE_LIMIT = 198
+ USCRIPT_CODE_LIMIT = 200
#endif // U_HIDE_DEPRECATED_API
} UScriptCode;
diff --git a/thirdparty/icu4c/common/unicode/uset.h b/thirdparty/icu4c/common/unicode/uset.h
index 33332f2d36..5dd890e148 100644
--- a/thirdparty/icu4c/common/unicode/uset.h
+++ b/thirdparty/icu4c/common/unicode/uset.h
@@ -850,15 +850,13 @@ uset_removeAllStrings(USet* set);
U_CAPI UBool U_EXPORT2
uset_isEmpty(const USet* set);
-#ifndef U_HIDE_DRAFT_API
/**
* @param set the set
* @return true if this set contains multi-character strings or the empty string.
- * @draft ICU 70
+ * @stable ICU 70
*/
U_CAPI UBool U_EXPORT2
uset_hasStrings(const USet *set);
-#endif // U_HIDE_DRAFT_API
/**
* Returns true if the given USet contains the given character.
@@ -941,18 +939,16 @@ uset_charAt(const USet* set, int32_t charIndex);
U_CAPI int32_t U_EXPORT2
uset_size(const USet* set);
-#ifndef U_HIDE_DRAFT_API
/**
* @param set the set
* @return the number of ranges in this set.
- * @draft ICU 70
+ * @stable ICU 70
* @see uset_getItemCount
* @see uset_getItem
* @see uset_size
*/
U_CAPI int32_t U_EXPORT2
uset_getRangeCount(const USet *set);
-#endif // U_HIDE_DRAFT_API
/**
* Returns the number of items in this set. An item is either a range
diff --git a/thirdparty/icu4c/common/unicode/usetiter.h b/thirdparty/icu4c/common/unicode/usetiter.h
index 5834c308e4..34992d94b7 100644
--- a/thirdparty/icu4c/common/unicode/usetiter.h
+++ b/thirdparty/icu4c/common/unicode/usetiter.h
@@ -164,14 +164,13 @@ class U_COMMON_API UnicodeSetIterator U_FINAL : public UObject {
*/
const UnicodeString& getString();
-#ifndef U_HIDE_DRAFT_API
/**
* Skips over the remaining code points/ranges, if any.
* A following call to next() or nextRange() will yield a string, if there is one.
* No-op if next() would return false, or if it would yield a string anyway.
*
* @return *this
- * @draft ICU 70
+ * @stable ICU 70
* @see UnicodeSet#strings()
*/
inline UnicodeSetIterator &skipToStrings() {
@@ -181,7 +180,6 @@ class U_COMMON_API UnicodeSetIterator U_FINAL : public UObject {
nextElement = 0;
return *this;
}
-#endif // U_HIDE_DRAFT_API
/**
* Advances the iteration position to the next element in the set,
diff --git a/thirdparty/icu4c/common/unicode/utf_old.h b/thirdparty/icu4c/common/unicode/utf_old.h
index 160f5ad0a9..6b868c7280 100644
--- a/thirdparty/icu4c/common/unicode/utf_old.h
+++ b/thirdparty/icu4c/common/unicode/utf_old.h
@@ -293,10 +293,10 @@ typedef int32_t UTextOffset;
#ifdef U_UTF8_IMPL
// No forward declaration if compiling utf_impl.cpp, which defines utf8_countTrailBytes.
#elif defined(U_STATIC_IMPLEMENTATION) || defined(U_COMMON_IMPLEMENTATION)
-U_CFUNC const uint8_t utf8_countTrailBytes[];
+U_CAPI const uint8_t utf8_countTrailBytes[];
#else
-U_CFUNC U_IMPORT const uint8_t utf8_countTrailBytes[]; /* U_IMPORT2? */ /*U_IMPORT*/
-#endif
+U_CFUNC U_IMPORT const uint8_t utf8_countTrailBytes[];
+#endif
/**
* Count the trail bytes for a UTF-8 lead byte.
diff --git a/thirdparty/icu4c/common/unicode/uvernum.h b/thirdparty/icu4c/common/unicode/uvernum.h
index 2706e0b060..a93f3509ef 100644
--- a/thirdparty/icu4c/common/unicode/uvernum.h
+++ b/thirdparty/icu4c/common/unicode/uvernum.h
@@ -31,19 +31,12 @@
* renaming macro, and copyright
*
* The following files need to be updated as well, which can be done
- * by running the UNIX makefile target 'update-windows-makefiles' in icu/source.
+ * by running the UNIX makefile target 'update-windows-makefiles' in icu4c/source.
*
- *
- * source/common/common_uwp.vcxproj
- * source/common/common.vcxproj - update 'Output file name' on the link tab so
- * that it contains the new major/minor combination
- * source/i18n/i18n.vcxproj - same as for the common.vcxproj
- * source/i18n/i18n_uwp.vcxproj - same as for the common_uwp.vcxproj
- * source/layoutex/layoutex.vcproj - same
- * source/stubdata/stubdata.vcproj - same as for the common.vcxproj
- * source/io/io.vcproj - same as for the common.vcxproj
+ * source/allinone/Build.Windows.IcuVersion.props - Update the IcuMajorVersion
* source/data/makedata.mak - change U_ICUDATA_NAME so that it contains
- * the new major/minor combination and the Unicode version.
+ * the new major/minor combination, and UNICODE_VERSION
+ * for the Unicode version.
*/
#ifndef UVERNUM_H
@@ -60,7 +53,7 @@
* This value will change in the subsequent releases of ICU
* @stable ICU 2.4
*/
-#define U_ICU_VERSION_MAJOR_NUM 71
+#define U_ICU_VERSION_MAJOR_NUM 72
/** The current ICU minor version as an integer.
* This value will change in the subsequent releases of ICU
@@ -86,7 +79,7 @@
* This value will change in the subsequent releases of ICU
* @stable ICU 2.6
*/
-#define U_ICU_VERSION_SUFFIX _71
+#define U_ICU_VERSION_SUFFIX _72
/**
* \def U_DEF2_ICU_ENTRY_POINT_RENAME
@@ -139,7 +132,7 @@
* This value will change in the subsequent releases of ICU
* @stable ICU 2.4
*/
-#define U_ICU_VERSION "71.1"
+#define U_ICU_VERSION "72.1"
/**
* The current ICU library major version number as a string, for library name suffixes.
@@ -152,13 +145,13 @@
*
* @stable ICU 2.6
*/
-#define U_ICU_VERSION_SHORT "71"
+#define U_ICU_VERSION_SHORT "72"
#ifndef U_HIDE_INTERNAL_API
/** Data version in ICU4C.
* @internal ICU 4.4 Internal Use Only
**/
-#define U_ICU_DATA_VERSION "71.1"
+#define U_ICU_DATA_VERSION "72.1"
#endif /* U_HIDE_INTERNAL_API */
/*===========================================================================
diff --git a/thirdparty/icu4c/common/unifiedcache.cpp b/thirdparty/icu4c/common/unifiedcache.cpp
index 493ab79f6d..cfb000b2c8 100644
--- a/thirdparty/icu4c/common/unifiedcache.cpp
+++ b/thirdparty/icu4c/common/unifiedcache.cpp
@@ -22,7 +22,7 @@
static icu::UnifiedCache *gCache = NULL;
static std::mutex *gCacheMutex = nullptr;
static std::condition_variable *gInProgressValueAddedCond;
-static icu::UInitOnce gCacheInitOnce = U_INITONCE_INITIALIZER;
+static icu::UInitOnce gCacheInitOnce {};
static const int32_t MAX_EVICT_ITERATIONS = 10;
static const int32_t DEFAULT_MAX_UNUSED = 1000;
@@ -38,27 +38,27 @@ static UBool U_CALLCONV unifiedcache_cleanup() {
gCacheMutex = nullptr;
gInProgressValueAddedCond->~condition_variable();
gInProgressValueAddedCond = nullptr;
- return TRUE;
+ return true;
}
U_CDECL_END
U_NAMESPACE_BEGIN
-U_CAPI int32_t U_EXPORT2
+int32_t U_EXPORT2
ucache_hashKeys(const UHashTok key) {
const CacheKeyBase *ckey = (const CacheKeyBase *) key.pointer;
return ckey->hashCode();
}
-U_CAPI UBool U_EXPORT2
+UBool U_EXPORT2
ucache_compareKeys(const UHashTok key1, const UHashTok key2) {
const CacheKeyBase *p1 = (const CacheKeyBase *) key1.pointer;
const CacheKeyBase *p2 = (const CacheKeyBase *) key2.pointer;
return *p1 == *p2;
}
-U_CAPI void U_EXPORT2
+void U_EXPORT2
ucache_deleteKey(void *obj) {
CacheKeyBase *p = (CacheKeyBase *) obj;
delete p;
@@ -161,7 +161,7 @@ void UnifiedCache::flush() const {
// Use a loop in case cache items that are flushed held hard references to
// other cache items making those additional cache items eligible for
// flushing.
- while (_flush(FALSE));
+ while (_flush(false));
}
void UnifiedCache::handleUnreferencedObject() const {
@@ -225,7 +225,7 @@ UnifiedCache::~UnifiedCache() {
// each other and entries with hard references from outside the cache.
// Nothing we can do about these so proceed to wipe out the cache.
std::lock_guard<std::mutex> lock(*gCacheMutex);
- _flush(TRUE);
+ _flush(true);
}
uhash_close(fHashtable);
fHashtable = nullptr;
@@ -244,7 +244,7 @@ UnifiedCache::_nextElement() const {
}
UBool UnifiedCache::_flush(UBool all) const {
- UBool result = FALSE;
+ UBool result = false;
int32_t origSize = uhash_count(fHashtable);
for (int32_t i = 0; i < origSize; ++i) {
const UHashElement *element = _nextElement();
@@ -257,7 +257,7 @@ UBool UnifiedCache::_flush(UBool all) const {
U_ASSERT(sharedObject->cachePtr == this);
uhash_removeElement(fHashtable, element);
removeSoftRef(sharedObject); // Deletes the sharedObject when softRefCount goes to zero.
- result = TRUE;
+ result = true;
}
}
return result;
@@ -365,14 +365,14 @@ UBool UnifiedCache::_poll(
// fetch out the contents and return them.
if (element != NULL) {
_fetch(element, value, status);
- return TRUE;
+ return true;
}
// The hash table contained nothing for this key.
// Insert an inProgress place holder value.
// Our caller will create the final value and update the hash table.
_putNew(key, fNoValue, U_ZERO_ERROR, status);
- return FALSE;
+ return false;
}
void UnifiedCache::_get(
@@ -471,7 +471,7 @@ UBool UnifiedCache::_isEvictable(const UHashElement *element) const
// Entries that are under construction are never evictable
if (_inProgress(theValue, theKey->fCreationStatus)) {
- return FALSE;
+ return false;
}
// We can evict entries that are either not a primary or have just
diff --git a/thirdparty/icu4c/common/unifiedcache.h b/thirdparty/icu4c/common/unifiedcache.h
index 07a734b8bd..4b9222124a 100644
--- a/thirdparty/icu4c/common/unifiedcache.h
+++ b/thirdparty/icu4c/common/unifiedcache.h
@@ -354,8 +354,8 @@ class U_COMMON_API UnifiedCache : public UnifiedCacheBase {
mutable int64_t fAutoEvictedCount;
SharedObject *fNoValue;
- UnifiedCache(const UnifiedCache &other);
- UnifiedCache &operator=(const UnifiedCache &other);
+ UnifiedCache(const UnifiedCache &other) = delete;
+ UnifiedCache &operator=(const UnifiedCache &other) = delete;
/**
* Flushes the contents of the cache. If cache values hold references to other
diff --git a/thirdparty/icu4c/common/uniset.cpp b/thirdparty/icu4c/common/uniset.cpp
index 92a81a1a02..4faace525c 100644
--- a/thirdparty/icu4c/common/uniset.cpp
+++ b/thirdparty/icu4c/common/uniset.cpp
@@ -82,7 +82,7 @@ static int32_t _dbgCount = 0;
static inline void _dbgct(UnicodeSet* set) {
UnicodeString str;
- set->toPattern(str, TRUE);
+ set->toPattern(str, true);
char buf[40];
str.extract(0, 39, buf, "");
printf("DEBUG UnicodeSet: ct 0x%08X; %d %s\n", set, ++_dbgCount, buf);
@@ -90,7 +90,7 @@ static inline void _dbgct(UnicodeSet* set) {
static inline void _dbgdt(UnicodeSet* set) {
UnicodeString str;
- set->toPattern(str, TRUE);
+ set->toPattern(str, true);
char buf[40];
str.extract(0, 39, buf, "");
printf("DEBUG UnicodeSet: dt 0x%08X; %d %s\n", set, --_dbgCount, buf);
@@ -204,7 +204,7 @@ UnicodeSet::~UnicodeSet() {
* Assigns this object to be a copy of another.
*/
UnicodeSet& UnicodeSet::operator=(const UnicodeSet& o) {
- return copyFrom(o, FALSE);
+ return copyFrom(o, false);
}
UnicodeSet& UnicodeSet::copyFrom(const UnicodeSet& o, UBool asThawed) {
@@ -265,7 +265,7 @@ UnicodeSet* UnicodeSet::clone() const {
}
UnicodeSet *UnicodeSet::cloneAsThawed() const {
- return new UnicodeSet(*this, TRUE);
+ return new UnicodeSet(*this, true);
}
/**
@@ -352,7 +352,7 @@ UBool UnicodeSet::contains(UChar32 c) const {
return stringSpan->contains(c);
}
if (c >= UNICODESET_HIGH) { // Don't need to check LOW bound
- return FALSE;
+ return false;
}
int32_t i = findCodePoint(c);
return (UBool)(i & 1); // return true if odd
@@ -447,7 +447,7 @@ UBool UnicodeSet::containsAll(const UnicodeSet& c) const {
int32_t n = c.getRangeCount();
for (int i=0; i<n; ++i) {
if (!contains(c.getRangeStart(i), c.getRangeEnd(i))) {
- return FALSE;
+ return false;
}
}
return !c.hasStrings() || (strings != nullptr && strings->containsAll(*c.strings));
@@ -493,7 +493,7 @@ UBool UnicodeSet::containsNone(const UnicodeSet& c) const {
int32_t n = c.getRangeCount();
for (int32_t i=0; i<n; ++i) {
if (!containsNone(c.getRangeStart(i), c.getRangeEnd(i))) {
- return FALSE;
+ return false;
}
}
return strings == nullptr || !c.hasStrings() || strings->containsNone(*c.strings);
@@ -531,10 +531,10 @@ UBool UnicodeSet::matchesIndexValue(uint8_t v) const {
UChar32 high = getRangeEnd(i);
if ((low & ~0xFF) == (high & ~0xFF)) {
if ((low & 0xFF) <= v && v <= (high & 0xFF)) {
- return TRUE;
+ return true;
}
} else if ((low & 0xFF) <= v || v <= (high & 0xFF)) {
- return TRUE;
+ return true;
}
}
if (hasStrings()) {
@@ -545,11 +545,11 @@ UBool UnicodeSet::matchesIndexValue(uint8_t v) const {
}
UChar32 c = s.char32At(0);
if ((c & 0xFF) == v) {
- return TRUE;
+ return true;
}
}
}
- return FALSE;
+ return false;
}
/**
@@ -1603,24 +1603,24 @@ int32_t UnicodeSet::serialize(uint16_t *dest, int32_t destCapacity, UErrorCode&
//----------------------------------------------------------------
/**
- * Allocate our strings vector and return TRUE if successful.
+ * Allocate our strings vector and return true if successful.
*/
UBool UnicodeSet::allocateStrings(UErrorCode &status) {
if (U_FAILURE(status)) {
- return FALSE;
+ return false;
}
strings = new UVector(uprv_deleteUObject,
uhash_compareUnicodeString, 1, status);
if (strings == NULL) { // Check for memory allocation error.
status = U_MEMORY_ALLOCATION_ERROR;
- return FALSE;
+ return false;
}
if (U_FAILURE(status)) {
delete strings;
strings = NULL;
- return FALSE;
+ return false;
}
- return TRUE;
+ return true;
}
int32_t UnicodeSet::nextCapacity(int32_t minCapacity) {
diff --git a/thirdparty/icu4c/common/uniset_closure.cpp b/thirdparty/icu4c/common/uniset_closure.cpp
index 882231ba1a..d7dab2a17b 100644
--- a/thirdparty/icu4c/common/uniset_closure.cpp
+++ b/thirdparty/icu4c/common/uniset_closure.cpp
@@ -74,7 +74,7 @@ UnicodeSet& UnicodeSet::applyPattern(const UnicodeString& pattern,
if (options & USET_IGNORE_SPACE) {
// Skip over trailing whitespace
- ICU_Utility::skipWhitespace(pattern, i, TRUE);
+ ICU_Utility::skipWhitespace(pattern, i, true);
}
if (i != pattern.length()) {
@@ -141,7 +141,7 @@ addCaseMapping(UnicodeSet &set, int32_t result, const UChar *full, UnicodeString
set.add(result);
} else {
// add a string case mapping from full with length result
- str.setTo((UBool)FALSE, full, result);
+ str.setTo((UBool)false, full, result);
set.add(str);
}
}
diff --git a/thirdparty/icu4c/common/uniset_props.cpp b/thirdparty/icu4c/common/uniset_props.cpp
index 49d6caabc7..48c0a26a71 100644
--- a/thirdparty/icu4c/common/uniset_props.cpp
+++ b/thirdparty/icu4c/common/uniset_props.cpp
@@ -62,7 +62,7 @@ U_CDECL_BEGIN
static UBool U_CALLCONV uset_cleanup();
static UnicodeSet *uni32Singleton;
-static icu::UInitOnce uni32InitOnce = U_INITONCE_INITIALIZER;
+static icu::UInitOnce uni32InitOnce {};
/**
* Cleanup function for UnicodeSet
@@ -71,7 +71,7 @@ static UBool U_CALLCONV uset_cleanup(void) {
delete uni32Singleton;
uni32Singleton = NULL;
uni32InitOnce.reset();
- return TRUE;
+ return true;
}
U_CDECL_END
@@ -170,7 +170,7 @@ UnicodeSet& UnicodeSet::applyPattern(const UnicodeString& pattern,
int32_t i = pos.getIndex();
// Skip over trailing whitespace
- ICU_Utility::skipWhitespace(pattern, i, TRUE);
+ ICU_Utility::skipWhitespace(pattern, i, true);
if (i != pattern.length()) {
status = U_ILLEGAL_ARGUMENT_ERROR;
}
@@ -279,7 +279,7 @@ void UnicodeSet::applyPattern(RuleCharacterIterator& chars,
}
UnicodeString patLocal, buf;
- UBool usePat = FALSE;
+ UBool usePat = false;
UnicodeSetPointer scratch;
RuleCharacterIterator::Pos backup;
@@ -289,7 +289,7 @@ void UnicodeSet::applyPattern(RuleCharacterIterator& chars,
UChar32 lastChar = 0;
UChar op = 0;
- UBool invert = FALSE;
+ UBool invert = false;
clear();
@@ -299,7 +299,7 @@ void UnicodeSet::applyPattern(RuleCharacterIterator& chars,
(lastItem == 2 && (op == 0 || op == u'-' || op == u'&')));
UChar32 c = 0;
- UBool literal = FALSE;
+ UBool literal = false;
UnicodeSet* nested = 0; // alias - do not delete
// -------- Check for property pattern
@@ -336,7 +336,7 @@ void UnicodeSet::applyPattern(RuleCharacterIterator& chars,
c = chars.next(opts, literal, ec);
if (U_FAILURE(ec)) return;
if (c == u'^' && !literal) {
- invert = TRUE;
+ invert = true;
patLocal.append(u'^');
chars.getPos(backup); // prepare to backup
c = chars.next(opts, literal, ec);
@@ -345,7 +345,7 @@ void UnicodeSet::applyPattern(RuleCharacterIterator& chars,
// Fall through to handle special leading '-';
// otherwise restart loop for nested [], \p{}, etc.
if (c == u'-') {
- literal = TRUE;
+ literal = true;
// Fall through to handle literal '-' below
} else {
chars.setPos(backup); // backup
@@ -381,7 +381,7 @@ void UnicodeSet::applyPattern(RuleCharacterIterator& chars,
return;
}
add(lastChar, lastChar);
- _appendToPat(patLocal, lastChar, FALSE);
+ _appendToPat(patLocal, lastChar, false);
lastItem = 0;
op = 0;
}
@@ -408,11 +408,11 @@ void UnicodeSet::applyPattern(RuleCharacterIterator& chars,
if (U_FAILURE(ec)) return;
break;
case 3: // `nested' already parsed
- nested->_toPattern(patLocal, FALSE);
+ nested->_toPattern(patLocal, false);
break;
}
- usePat = TRUE;
+ usePat = true;
if (mode == 0) {
// Entire pattern is a category; leave parse loop
@@ -454,7 +454,7 @@ void UnicodeSet::applyPattern(RuleCharacterIterator& chars,
case u']':
if (lastItem == 1) {
add(lastChar, lastChar);
- _appendToPat(patLocal, lastChar, FALSE);
+ _appendToPat(patLocal, lastChar, false);
}
// Treat final trailing '-' as a literal
if (op == u'-') {
@@ -508,17 +508,17 @@ void UnicodeSet::applyPattern(RuleCharacterIterator& chars,
}
if (lastItem == 1) {
add(lastChar, lastChar);
- _appendToPat(patLocal, lastChar, FALSE);
+ _appendToPat(patLocal, lastChar, false);
}
lastItem = 0;
buf.truncate(0);
{
- UBool ok = FALSE;
+ UBool ok = false;
while (!chars.atEnd()) {
c = chars.next(opts, literal, ec);
if (U_FAILURE(ec)) return;
if (c == u'}' && !literal) {
- ok = TRUE;
+ ok = true;
break;
}
buf.append(c);
@@ -534,7 +534,7 @@ void UnicodeSet::applyPattern(RuleCharacterIterator& chars,
// processing
add(buf);
patLocal.append(u'{');
- _appendToPat(patLocal, buf, FALSE);
+ _appendToPat(patLocal, buf, false);
patLocal.append(u'}');
continue;
case SymbolTable::SYMBOL_REF:
@@ -557,10 +557,10 @@ void UnicodeSet::applyPattern(RuleCharacterIterator& chars,
if (anchor && op == 0) {
if (lastItem == 1) {
add(lastChar, lastChar);
- _appendToPat(patLocal, lastChar, FALSE);
+ _appendToPat(patLocal, lastChar, false);
}
add(U_ETHER);
- usePat = TRUE;
+ usePat = true;
patLocal.append((UChar) SymbolTable::SYMBOL_REF);
patLocal.append(u']');
mode = 2;
@@ -594,14 +594,14 @@ void UnicodeSet::applyPattern(RuleCharacterIterator& chars,
return;
}
add(lastChar, c);
- _appendToPat(patLocal, lastChar, FALSE);
+ _appendToPat(patLocal, lastChar, false);
patLocal.append(op);
- _appendToPat(patLocal, c, FALSE);
+ _appendToPat(patLocal, c, false);
lastItem = 0;
op = 0;
} else {
add(lastChar, lastChar);
- _appendToPat(patLocal, lastChar, FALSE);
+ _appendToPat(patLocal, lastChar, false);
lastChar = c;
}
break;
@@ -646,7 +646,7 @@ void UnicodeSet::applyPattern(RuleCharacterIterator& chars,
if (usePat) {
rebuiltPat.append(patLocal);
} else {
- _generatePattern(rebuiltPat, FALSE);
+ _generatePattern(rebuiltPat, false);
}
if (isBogus() && U_SUCCESS(ec)) {
// We likely ran out of memory. AHHH!
@@ -756,12 +756,12 @@ static UBool mungeCharName(char* dst, const char* src, int32_t dstCapacity) {
if (ch == ' ' && (j==0 || (j>0 && dst[j-1]==' '))) {
continue;
}
- if (j >= dstCapacity) return FALSE;
+ if (j >= dstCapacity) return false;
dst[j++] = ch;
}
if (j > 0 && dst[j-1] == ' ') --j;
dst[j] = 0;
- return TRUE;
+ return true;
}
} // namespace
@@ -789,7 +789,7 @@ UnicodeSet::applyIntPropertyValue(UProperty prop, int32_t value, UErrorCode& ec)
if (value == 0 || value == 1) {
const USet *set = u_getBinaryPropertySet(prop, &ec);
if (U_FAILURE(ec)) { return *this; }
- copyFrom(*UnicodeSet::fromUSet(set), TRUE);
+ copyFrom(*UnicodeSet::fromUSet(set), true);
if (value == 0) {
complement().removeAllStrings(); // code point complement
}
@@ -830,7 +830,7 @@ UnicodeSet::applyPropertyAlias(const UnicodeString& prop,
UProperty p;
int32_t v;
- UBool invert = FALSE;
+ UBool invert = false;
if (value.length() > 0) {
p = u_getPropertyEnum(pname.data());
@@ -948,7 +948,7 @@ UnicodeSet::applyPropertyAlias(const UnicodeString& prop,
// [:Assigned:]=[:^Cn:]
p = UCHAR_GENERAL_CATEGORY_MASK;
v = U_GC_CN_MASK;
- invert = TRUE;
+ invert = true;
} else {
FAIL(ec);
}
@@ -980,7 +980,7 @@ UBool UnicodeSet::resemblesPropertyPattern(const UnicodeString& pattern,
int32_t pos) {
// Patterns are at least 5 characters long
if ((pos+5) > pattern.length()) {
- return FALSE;
+ return false;
}
// Look for an opening [:, [:^, \p, or \P
@@ -997,8 +997,8 @@ UBool UnicodeSet::resemblesPropertyPattern(const UnicodeString& pattern,
*/
UBool UnicodeSet::resemblesPropertyPattern(RuleCharacterIterator& chars,
int32_t iterOpts) {
- // NOTE: literal will always be FALSE, because we don't parse escapes.
- UBool result = FALSE, literal;
+ // NOTE: literal will always be false, because we don't parse escapes.
+ UBool result = false, literal;
UErrorCode ec = U_ZERO_ERROR;
iterOpts &= ~RuleCharacterIterator::PARSE_ESCAPES;
RuleCharacterIterator::Pos pos;
@@ -1022,9 +1022,9 @@ UnicodeSet& UnicodeSet::applyPropertyPattern(const UnicodeString& pattern,
UErrorCode &ec) {
int32_t pos = ppos.getIndex();
- UBool posix = FALSE; // true for [:pat:], false for \p{pat} \P{pat} \N{pat}
- UBool isName = FALSE; // true for \N{pat}, o/w false
- UBool invert = FALSE;
+ UBool posix = false; // true for [:pat:], false for \p{pat} \P{pat} \N{pat}
+ UBool isName = false; // true for \N{pat}, o/w false
+ UBool invert = false;
if (U_FAILURE(ec)) return *this;
@@ -1036,12 +1036,12 @@ UnicodeSet& UnicodeSet::applyPropertyPattern(const UnicodeString& pattern,
// On entry, ppos should point to one of the following locations:
// Look for an opening [:, [:^, \p, or \P
if (isPOSIXOpen(pattern, pos)) {
- posix = TRUE;
+ posix = true;
pos += 2;
pos = ICU_Utility::skipWhitespace(pattern, pos);
if (pos < pattern.length() && pattern.charAt(pos) == u'^') {
++pos;
- invert = TRUE;
+ invert = true;
}
} else if (isPerlOpen(pattern, pos) || isNameOpen(pattern, pos)) {
UChar c = pattern.charAt(pos+1);
diff --git a/thirdparty/icu4c/common/unisetspan.cpp b/thirdparty/icu4c/common/unisetspan.cpp
index fe0d74f5b2..e4277c5be6 100644
--- a/thirdparty/icu4c/common/unisetspan.cpp
+++ b/thirdparty/icu4c/common/unisetspan.cpp
@@ -98,7 +98,7 @@ public:
i-=capacity;
}
if(list[i]) {
- list[i]=FALSE;
+ list[i]=false;
--length;
}
start=i;
@@ -111,7 +111,7 @@ public:
if(i>=capacity) {
i-=capacity;
}
- list[i]=TRUE;
+ list[i]=true;
++length;
}
@@ -132,7 +132,7 @@ public:
int32_t i=start, result;
while(++i<capacity) {
if(list[i]) {
- list[i]=FALSE;
+ list[i]=false;
--length;
result=i-start;
start=i;
@@ -148,7 +148,7 @@ public:
while(!list[i]) {
++i;
}
- list[i]=FALSE;
+ list[i]=false;
--length;
start=i;
return result+=i;
@@ -226,7 +226,7 @@ UnicodeSetStringSpan::UnicodeSetStringSpan(const UnicodeSet &set,
int32_t stringsLength=strings.size();
int32_t i, spanLength;
- UBool someRelevant=FALSE;
+ UBool someRelevant=false;
for(i=0; i<stringsLength; ++i) {
const UnicodeString &string=*(const UnicodeString *)strings.elementAt(i);
const UChar *s16=string.getBuffer();
@@ -237,9 +237,9 @@ UnicodeSetStringSpan::UnicodeSetStringSpan(const UnicodeSet &set,
UBool thisRelevant;
spanLength=spanSet.span(s16, length16, USET_SPAN_CONTAINED);
if(spanLength<length16) { // Relevant string.
- someRelevant=thisRelevant=TRUE;
+ someRelevant=thisRelevant=true;
} else {
- thisRelevant=FALSE;
+ thisRelevant=false;
}
if((which&UTF16) && length16>maxLength16) {
maxLength16=length16;
@@ -284,7 +284,7 @@ UnicodeSetStringSpan::UnicodeSetStringSpan(const UnicodeSet &set,
} else {
utf8Lengths=(int32_t *)uprv_malloc(allocSize);
if(utf8Lengths==NULL) {
- maxLength16=maxLength8=0; // Prevent usage by making needsStringSpanUTF16/8() return FALSE.
+ maxLength16=maxLength8=0; // Prevent usage by making needsStringSpanUTF16/8() return false.
return; // Out of memory.
}
}
@@ -399,7 +399,7 @@ UnicodeSetStringSpan::UnicodeSetStringSpan(const UnicodeSetStringSpan &otherStri
utf8Lengths(NULL), spanLengths(NULL), utf8(NULL),
utf8Length(otherStringSpan.utf8Length),
maxLength16(otherStringSpan.maxLength16), maxLength8(otherStringSpan.maxLength8),
- all(TRUE) {
+ all(true) {
if(otherStringSpan.pSpanNotSet==&otherStringSpan.spanSet) {
pSpanNotSet=&spanSet;
} else {
@@ -415,7 +415,7 @@ UnicodeSetStringSpan::UnicodeSetStringSpan(const UnicodeSetStringSpan &otherStri
} else {
utf8Lengths=(int32_t *)uprv_malloc(allocSize);
if(utf8Lengths==NULL) {
- maxLength16=maxLength8=0; // Prevent usage by making needsStringSpanUTF16/8() return FALSE.
+ maxLength16=maxLength8=0; // Prevent usage by making needsStringSpanUTF16/8() return false.
return; // Out of memory.
}
}
@@ -454,20 +454,20 @@ static inline UBool
matches16(const UChar *s, const UChar *t, int32_t length) {
do {
if(*s++!=*t++) {
- return FALSE;
+ return false;
}
} while(--length>0);
- return TRUE;
+ return true;
}
static inline UBool
matches8(const uint8_t *s, const uint8_t *t, int32_t length) {
do {
if(*s++!=*t++) {
- return FALSE;
+ return false;
}
} while(--length>0);
- return TRUE;
+ return true;
}
// Compare 16-bit Unicode strings (which may be malformed UTF-16)
diff --git a/thirdparty/icu4c/common/unistr.cpp b/thirdparty/icu4c/common/unistr.cpp
index c18665928d..4125d19472 100644
--- a/thirdparty/icu4c/common/unistr.cpp
+++ b/thirdparty/icu4c/common/unistr.cpp
@@ -197,7 +197,7 @@ UnicodeString::UnicodeString(UChar ch) {
UnicodeString::UnicodeString(UChar32 ch) {
fUnion.fFields.fLengthAndFlags = kShortString;
int32_t i = 0;
- UBool isError = FALSE;
+ UBool isError = false;
U16_APPEND(fUnion.fStackFields.fBuffer, i, US_STACKBUF_SIZE, ch, isError);
// We test isError so that the compiler does not complain that we don't.
// If isError then i==0 which is what we want anyway.
@@ -270,7 +270,7 @@ UnicodeString::UnicodeString(const char *src, int32_t length, EInvariant) {
if(length<0) {
length=(int32_t)uprv_strlen(src);
}
- if(cloneArrayIfNeeded(length, length, FALSE)) {
+ if(cloneArrayIfNeeded(length, length, false)) {
u_charsToUChars(src, getArrayStart(), length);
setLength(length);
} else {
@@ -309,7 +309,7 @@ UnicodeString::UnicodeString(const UnicodeString& that) {
}
UnicodeString::UnicodeString(UnicodeString &&src) U_NOEXCEPT {
- copyFieldsFrom(src, TRUE);
+ copyFieldsFrom(src, true);
}
UnicodeString::UnicodeString(const UnicodeString& that,
@@ -370,7 +370,7 @@ UBool
UnicodeString::allocate(int32_t capacity) {
if(capacity <= US_STACKBUF_SIZE) {
fUnion.fFields.fLengthAndFlags = kShortString;
- return TRUE;
+ return true;
}
if(capacity <= kMaxCapacity) {
++capacity; // for the NUL
@@ -389,13 +389,13 @@ UnicodeString::allocate(int32_t capacity) {
fUnion.fFields.fArray = (UChar *)array;
fUnion.fFields.fCapacity = (int32_t)(numBytes / U_SIZEOF_UCHAR);
fUnion.fFields.fLengthAndFlags = kLongString;
- return TRUE;
+ return true;
}
}
fUnion.fFields.fLengthAndFlags = kIsBogus;
fUnion.fFields.fArray = 0;
fUnion.fFields.fCapacity = 0;
- return FALSE;
+ return false;
}
//========================================
@@ -476,7 +476,7 @@ UnicodeString UnicodeString::fromUTF32(const UChar32 *utf32, int32_t length) {
result.setToBogus();
}
break;
- } while(TRUE);
+ } while(true);
return result;
}
@@ -491,7 +491,7 @@ UnicodeString::operator=(const UnicodeString &src) {
UnicodeString &
UnicodeString::fastCopyFrom(const UnicodeString &src) {
- return copyFrom(src, TRUE);
+ return copyFrom(src, true);
}
UnicodeString &
@@ -576,7 +576,7 @@ UnicodeString &UnicodeString::operator=(UnicodeString &&src) U_NOEXCEPT {
// No explicit check for self move assignment, consistent with standard library.
// Self move assignment causes no crash nor leak but might make the object bogus.
releaseArray();
- copyFieldsFrom(src, TRUE);
+ copyFieldsFrom(src, true);
return *this;
}
@@ -610,9 +610,9 @@ void UnicodeString::copyFieldsFrom(UnicodeString &src, UBool setSrcToBogus) U_NO
void UnicodeString::swap(UnicodeString &other) U_NOEXCEPT {
UnicodeString temp; // Empty short string: Known not to need releaseArray().
// Copy fields without resetting source values in between.
- temp.copyFieldsFrom(*this, FALSE);
- this->copyFieldsFrom(other, FALSE);
- other.copyFieldsFrom(temp, FALSE);
+ temp.copyFieldsFrom(*this, false);
+ this->copyFieldsFrom(other, false);
+ other.copyFieldsFrom(temp, false);
// Set temp to an empty string so that other's memory is not released twice.
temp.fUnion.fFields.fLengthAndFlags = kShortString;
}
@@ -761,7 +761,7 @@ UnicodeString::doCompareCodePointOrder(int32_t start,
srcStart = srcLength = 0;
}
- int32_t diff = uprv_strCompare(getArrayStart() + start, length, (srcChars!=NULL)?(srcChars + srcStart):NULL, srcLength, FALSE, TRUE);
+ int32_t diff = uprv_strCompare(getArrayStart() + start, length, (srcChars!=NULL)?(srcChars + srcStart):NULL, srcLength, false, true);
/* translate the 32-bit result into an 8-bit one */
if(diff!=0) {
return (int8_t)(diff >> 15 | 1);
@@ -921,7 +921,7 @@ UnicodeString::tempSubString(int32_t start, int32_t len) const {
array=fUnion.fStackFields.fBuffer; // anything not NULL because that would make an empty string
len=-2; // bogus result string
}
- return UnicodeString(FALSE, array + start, len);
+ return UnicodeString(false, array + start, len);
}
int32_t
@@ -972,7 +972,7 @@ UnicodeString::toUTF8(ByteSink &sink) const {
if(length16 != 0) {
char stackBuffer[1024];
int32_t capacity = (int32_t)sizeof(stackBuffer);
- UBool utf8IsOwned = FALSE;
+ UBool utf8IsOwned = false;
char *utf8 = sink.GetAppendBuffer(length16 < capacity ? length16 : capacity,
3*length16,
stackBuffer, capacity,
@@ -987,7 +987,7 @@ UnicodeString::toUTF8(ByteSink &sink) const {
if(errorCode == U_BUFFER_OVERFLOW_ERROR) {
utf8 = (char *)uprv_malloc(length8);
if(utf8 != NULL) {
- utf8IsOwned = TRUE;
+ utf8IsOwned = true;
errorCode = U_ZERO_ERROR;
u_strToUTF8WithSub(utf8, length8, &length8,
getBuffer(), length16,
@@ -1225,7 +1225,7 @@ UnicodeString::getTerminatedBuffer() {
if(len < getCapacity()) {
if(fUnion.fFields.fLengthAndFlags & kBufferIsReadonly) {
// If len<capacity on a read-only alias, then array[len] is
- // either the original NUL (if constructed with (TRUE, s, length))
+ // either the original NUL (if constructed with (true, s, length))
// or one of the original string contents characters (if later truncated),
// therefore we can assume that array[len] is initialized memory.
if(array[len] == 0) {
@@ -1375,7 +1375,7 @@ UnicodeString::replace(int32_t start,
UChar32 srcChar) {
UChar buffer[U16_MAX_LENGTH];
int32_t count = 0;
- UBool isError = FALSE;
+ UBool isError = false;
U16_APPEND(buffer, count, U16_MAX_LENGTH, srcChar, isError);
// We test isError so that the compiler does not complain that we don't.
// If isError (srcChar is not a valid code point) then count==0 which means
@@ -1387,7 +1387,7 @@ UnicodeString&
UnicodeString::append(UChar32 srcChar) {
UChar buffer[U16_MAX_LENGTH];
int32_t _length = 0;
- UBool isError = FALSE;
+ UBool isError = false;
U16_APPEND(buffer, _length, U16_MAX_LENGTH, srcChar, isError);
// We test isError so that the compiler does not complain that we don't.
// If isError then _length==0 which turns the doAppend() into a no-op anyway.
@@ -1484,7 +1484,7 @@ UnicodeString::doReplace(int32_t start,
return doReplace(start, length, copy.getArrayStart(), 0, srcLength);
}
- // cloneArrayIfNeeded(doCopyArray=FALSE) may change fArray but will not copy the current contents;
+ // cloneArrayIfNeeded(doCopyArray=false) may change fArray but will not copy the current contents;
// therefore we need to keep the current fArray
UChar oldStackBuffer[US_STACKBUF_SIZE];
if((fUnion.fFields.fLengthAndFlags&kUsingStackBuffer) && (newLength > US_STACKBUF_SIZE)) {
@@ -1497,7 +1497,7 @@ UnicodeString::doReplace(int32_t start,
// clone our array and allocate a bigger array if needed
int32_t *bufferToDelete = 0;
if(!cloneArrayIfNeeded(newLength, getGrowCapacity(newLength),
- FALSE, &bufferToDelete)
+ false, &bufferToDelete)
) {
return *this;
}
@@ -1637,14 +1637,14 @@ UnicodeString::copy(int32_t start, int32_t limit, int32_t dest) {
* so we implement this function here.
*/
UBool Replaceable::hasMetaData() const {
- return TRUE;
+ return true;
}
/**
* Replaceable API
*/
UBool UnicodeString::hasMetaData() const {
- return FALSE;
+ return false;
}
UnicodeString&
@@ -1662,7 +1662,7 @@ UnicodeString::doReverse(int32_t start, int32_t length) {
UChar *left = getArrayStart() + start;
UChar *right = left + length - 1; // -1 for inclusive boundary (length>=2)
UChar swap;
- UBool hasSupplementary = FALSE;
+ UBool hasSupplementary = false;
// Before the loop we know left<right because length>=2.
do {
@@ -1699,7 +1699,7 @@ UnicodeString::padLeading(int32_t targetLength,
{
int32_t oldLength = length();
if(oldLength >= targetLength || !cloneArrayIfNeeded(targetLength)) {
- return FALSE;
+ return false;
} else {
// move contents up by padding width
UChar *array = getArrayStart();
@@ -1711,7 +1711,7 @@ UnicodeString::padLeading(int32_t targetLength,
array[start] = padChar;
}
setLength(targetLength);
- return TRUE;
+ return true;
}
}
@@ -1721,7 +1721,7 @@ UnicodeString::padTrailing(int32_t targetLength,
{
int32_t oldLength = length();
if(oldLength >= targetLength || !cloneArrayIfNeeded(targetLength)) {
- return FALSE;
+ return false;
} else {
// fill in padding character
UChar *array = getArrayStart();
@@ -1730,7 +1730,7 @@ UnicodeString::padTrailing(int32_t targetLength,
array[length] = padChar;
}
setLength(targetLength);
- return TRUE;
+ return true;
}
}
@@ -1800,10 +1800,10 @@ UnicodeString::cloneArrayIfNeeded(int32_t newCapacity,
}
// while a getBuffer(minCapacity) is "open",
- // prevent any modifications of the string by returning FALSE here
+ // prevent any modifications of the string by returning false here
// if the string is bogus, then only an assignment or similar can revive it
if(!isWritable()) {
- return FALSE;
+ return false;
}
/*
@@ -1811,7 +1811,7 @@ UnicodeString::cloneArrayIfNeeded(int32_t newCapacity,
* the buffer is read-only, or
* the buffer is refCounted (shared), and refCount>1, or
* the buffer is too small.
- * Return FALSE if memory could not be allocated.
+ * Return false if memory could not be allocated.
*/
if(forceClone ||
fUnion.fFields.fLengthAndFlags & kBufferIsReadonly ||
@@ -1890,10 +1890,10 @@ UnicodeString::cloneArrayIfNeeded(int32_t newCapacity,
}
fUnion.fFields.fLengthAndFlags = flags;
setToBogus();
- return FALSE;
+ return false;
}
}
- return TRUE;
+ return true;
}
// UnicodeStringAppendable ------------------------------------------------- ***
@@ -1909,7 +1909,7 @@ UBool
UnicodeStringAppendable::appendCodePoint(UChar32 c) {
UChar buffer[U16_MAX_LENGTH];
int32_t cLength = 0;
- UBool isError = FALSE;
+ UBool isError = false;
U16_APPEND(buffer, cLength, U16_MAX_LENGTH, c, isError);
return !isError && str.doAppend(buffer, 0, cLength).isWritable();
}
@@ -1961,10 +1961,10 @@ uhash_compareUnicodeString(const UElement key1, const UElement key2) {
const UnicodeString *str1 = (const UnicodeString*) key1.pointer;
const UnicodeString *str2 = (const UnicodeString*) key2.pointer;
if (str1 == str2) {
- return TRUE;
+ return true;
}
if (str1 == NULL || str2 == NULL) {
- return FALSE;
+ return false;
}
return *str1 == *str2;
}
diff --git a/thirdparty/icu4c/common/unistr_case.cpp b/thirdparty/icu4c/common/unistr_case.cpp
index 2138d60c01..f4c43b4889 100644
--- a/thirdparty/icu4c/common/unistr_case.cpp
+++ b/thirdparty/icu4c/common/unistr_case.cpp
@@ -123,7 +123,7 @@ UnicodeString::caseMap(int32_t caseLocale, uint32_t options, UCASEMAP_BREAK_ITER
capacity = getCapacity();
} else {
// Switch from the read-only alias or shared heap buffer to the stack buffer.
- if (!cloneArrayIfNeeded(US_STACKBUF_SIZE, US_STACKBUF_SIZE, /* doCopyArray= */ FALSE)) {
+ if (!cloneArrayIfNeeded(US_STACKBUF_SIZE, US_STACKBUF_SIZE, /* doCopyArray= */ false)) {
return *this;
}
U_ASSERT(fUnion.fFields.fLengthAndFlags & kUsingStackBuffer);
@@ -132,7 +132,7 @@ UnicodeString::caseMap(int32_t caseLocale, uint32_t options, UCASEMAP_BREAK_ITER
}
#if !UCONFIG_NO_BREAK_ITERATION
if (iter != nullptr) {
- oldString.setTo(FALSE, oldArray, oldLength);
+ oldString.setTo(false, oldArray, oldLength);
iter->setText(oldString);
}
#endif
@@ -158,7 +158,7 @@ UnicodeString::caseMap(int32_t caseLocale, uint32_t options, UCASEMAP_BREAK_ITER
UChar replacementChars[200];
#if !UCONFIG_NO_BREAK_ITERATION
if (iter != nullptr) {
- oldString.setTo(FALSE, oldArray, oldLength);
+ oldString.setTo(false, oldArray, oldLength);
iter->setText(oldString);
}
#endif
@@ -194,7 +194,7 @@ UnicodeString::caseMap(int32_t caseLocale, uint32_t options, UCASEMAP_BREAK_ITER
// and deletes the old array itself after it is done.
// In addition, we are forcing cloneArrayIfNeeded() to always allocate a new array.
int32_t *bufferToDelete = 0;
- if (!cloneArrayIfNeeded(newLength, newLength, FALSE, &bufferToDelete, TRUE)) {
+ if (!cloneArrayIfNeeded(newLength, newLength, false, &bufferToDelete, true)) {
return *this;
}
errorCode = U_ZERO_ERROR;
@@ -241,10 +241,10 @@ uhash_compareCaselessUnicodeString(const UElement key1, const UElement key2) {
const UnicodeString *str1 = (const UnicodeString*) key1.pointer;
const UnicodeString *str2 = (const UnicodeString*) key2.pointer;
if (str1 == str2) {
- return TRUE;
+ return true;
}
if (str1 == NULL || str2 == NULL) {
- return FALSE;
+ return false;
}
return str1->caseCompare(*str2, U_FOLD_CASE_DEFAULT) == 0;
}
diff --git a/thirdparty/icu4c/common/unistr_cnv.cpp b/thirdparty/icu4c/common/unistr_cnv.cpp
index 64d3c16801..e1f60d4487 100644
--- a/thirdparty/icu4c/common/unistr_cnv.cpp
+++ b/thirdparty/icu4c/common/unistr_cnv.cpp
@@ -225,13 +225,13 @@ UnicodeString::extract(char *dest, int32_t destCapacity,
// get the converter
UBool isDefaultConverter;
if(cnv==0) {
- isDefaultConverter=TRUE;
+ isDefaultConverter=true;
cnv=u_getDefaultConverter(&errorCode);
if(U_FAILURE(errorCode)) {
return 0;
}
} else {
- isDefaultConverter=FALSE;
+ isDefaultConverter=false;
ucnv_resetFromUnicode(cnv);
}
@@ -275,7 +275,7 @@ UnicodeString::doExtract(int32_t start, int32_t length,
}
// perform the conversion
- ucnv_fromUnicode(cnv, &dest, destLimit, &src, srcLimit, 0, TRUE, &errorCode);
+ ucnv_fromUnicode(cnv, &dest, destLimit, &src, srcLimit, 0, true, &errorCode);
length=(int32_t)(dest-originalDest);
// if an overflow occurs, then get the preflighting length
@@ -286,7 +286,7 @@ UnicodeString::doExtract(int32_t start, int32_t length,
do {
dest=buffer;
errorCode=U_ZERO_ERROR;
- ucnv_fromUnicode(cnv, &dest, destLimit, &src, srcLimit, 0, TRUE, &errorCode);
+ ucnv_fromUnicode(cnv, &dest, destLimit, &src, srcLimit, 0, true, &errorCode);
length+=(int32_t)(dest-buffer);
} while(errorCode==U_BUFFER_OVERFLOW_ERROR);
}
@@ -322,7 +322,7 @@ UnicodeString::doCodepageCreate(const char *codepageData,
converter = u_getDefaultConverter(&status);
} else if(*codepage == 0) {
// use the "invariant characters" conversion
- if(cloneArrayIfNeeded(dataLength, dataLength, FALSE)) {
+ if(cloneArrayIfNeeded(dataLength, dataLength, false)) {
u_charsToUChars(codepageData, getArrayStart(), dataLength);
setLength(dataLength);
} else {
@@ -379,7 +379,7 @@ UnicodeString::doCodepageCreate(const char *codepageData,
}
// we do not care about the current contents
- UBool doCopyArray = FALSE;
+ UBool doCopyArray = false;
for(;;) {
if(!cloneArrayIfNeeded(arraySize, arraySize, doCopyArray)) {
setToBogus();
@@ -390,7 +390,7 @@ UnicodeString::doCodepageCreate(const char *codepageData,
array = getArrayStart();
myTarget = array + length();
ucnv_toUnicode(converter, &myTarget, array + getCapacity(),
- &mySource, mySourceEnd, 0, TRUE, &status);
+ &mySource, mySourceEnd, 0, true, &status);
// update the conversion parameters
setLength((int32_t)(myTarget - array));
@@ -401,7 +401,7 @@ UnicodeString::doCodepageCreate(const char *codepageData,
status = U_ZERO_ERROR;
// keep the previous conversion results
- doCopyArray = TRUE;
+ doCopyArray = true;
// estimate the new size needed, larger than before
// try 2 UChar's per remaining source byte
diff --git a/thirdparty/icu4c/common/unorm.cpp b/thirdparty/icu4c/common/unorm.cpp
index 2d9f46052f..cf3915c27f 100644
--- a/thirdparty/icu4c/common/unorm.cpp
+++ b/thirdparty/icu4c/common/unorm.cpp
@@ -128,7 +128,7 @@ _iterate(UCharIterator *src, UBool forward,
}
if(pNeededToNormalize!=NULL) {
- *pNeededToNormalize=FALSE;
+ *pNeededToNormalize=false;
}
if(!(forward ? src->hasNext(src) : src->hasPrevious(src))) {
return u_terminateUChars(dest, destCapacity, 0, pErrorCode);
@@ -199,7 +199,7 @@ unorm_previous(UCharIterator *src,
UNormalizationMode mode, int32_t options,
UBool doNormalize, UBool *pNeededToNormalize,
UErrorCode *pErrorCode) {
- return unorm_iterate(src, FALSE,
+ return unorm_iterate(src, false,
dest, destCapacity,
mode, options,
doNormalize, pNeededToNormalize,
@@ -212,7 +212,7 @@ unorm_next(UCharIterator *src,
UNormalizationMode mode, int32_t options,
UBool doNormalize, UBool *pNeededToNormalize,
UErrorCode *pErrorCode) {
- return unorm_iterate(src, TRUE,
+ return unorm_iterate(src, true,
dest, destCapacity,
mode, options,
doNormalize, pNeededToNormalize,
diff --git a/thirdparty/icu4c/common/unormcmp.cpp b/thirdparty/icu4c/common/unormcmp.cpp
index 689b0b53b2..e224190972 100644
--- a/thirdparty/icu4c/common/unormcmp.cpp
+++ b/thirdparty/icu4c/common/unormcmp.cpp
@@ -536,7 +536,7 @@ UBool _normalize(const Normalizer2 *n2, const UChar *s, int32_t length,
// check if s fulfill the conditions
int32_t spanQCYes=n2->spanQuickCheckYes(str, *pErrorCode);
if (U_FAILURE(*pErrorCode)) {
- return FALSE;
+ return false;
}
/*
* ICU 2.4 had a further optimization:
@@ -548,13 +548,13 @@ UBool _normalize(const Normalizer2 *n2, const UChar *s, int32_t length,
*/
if(spanQCYes<str.length()) {
UnicodeString unnormalized=str.tempSubString(spanQCYes);
- normalized.setTo(FALSE, str.getBuffer(), spanQCYes);
+ normalized.setTo(false, str.getBuffer(), spanQCYes);
n2->normalizeSecondAndAppend(normalized, unnormalized, *pErrorCode);
if (U_SUCCESS(*pErrorCode)) {
- return TRUE;
+ return true;
}
}
- return FALSE;
+ return false;
}
U_CAPI int32_t U_EXPORT2
diff --git a/thirdparty/icu4c/common/uprops.cpp b/thirdparty/icu4c/common/uprops.cpp
index 5186a0c75c..26e950b876 100644
--- a/thirdparty/icu4c/common/uprops.cpp
+++ b/thirdparty/icu4c/common/uprops.cpp
@@ -49,7 +49,7 @@ U_NAMESPACE_USE
namespace {
-icu::UInitOnce gLayoutInitOnce = U_INITONCE_INITIALIZER;
+icu::UInitOnce gLayoutInitOnce {};
UDataMemory *gLayoutMemory = nullptr;
UCPTrie *gInpcTrie = nullptr; // Indic_Positional_Category
@@ -76,7 +76,7 @@ UBool U_CALLCONV uprops_cleanup() {
gMaxVoValue = 0;
gLayoutInitOnce.reset();
- return TRUE;
+ return true;
}
UBool U_CALLCONV
@@ -141,7 +141,7 @@ void U_CALLCONV ulayout_load(UErrorCode &errorCode) {
}
UBool ulayout_ensureData(UErrorCode &errorCode) {
- if (U_FAILURE(errorCode)) { return FALSE; }
+ if (U_FAILURE(errorCode)) { return false; }
umtx_initOnce(gLayoutInitOnce, &ulayout_load, errorCode);
return U_SUCCESS(errorCode);
}
@@ -188,7 +188,7 @@ static UBool isJoinControl(const BinaryProperty &/*prop*/, UChar32 c, UProperty
#if UCONFIG_NO_NORMALIZATION
static UBool hasFullCompositionExclusion(const BinaryProperty &, UChar32, UProperty) {
- return FALSE;
+ return false;
}
#else
static UBool hasFullCompositionExclusion(const BinaryProperty &/*prop*/, UChar32 c, UProperty /*which*/) {
@@ -202,7 +202,7 @@ static UBool hasFullCompositionExclusion(const BinaryProperty &/*prop*/, UChar32
// UCHAR_NF*_INERT properties
#if UCONFIG_NO_NORMALIZATION
static UBool isNormInert(const BinaryProperty &, UChar32, UProperty) {
- return FALSE;
+ return false;
}
#else
static UBool isNormInert(const BinaryProperty &/*prop*/, UChar32 c, UProperty which) {
@@ -215,7 +215,7 @@ static UBool isNormInert(const BinaryProperty &/*prop*/, UChar32 c, UProperty wh
#if UCONFIG_NO_NORMALIZATION
static UBool changesWhenCasefolded(const BinaryProperty &, UChar32, UProperty) {
- return FALSE;
+ return false;
}
#else
static UBool changesWhenCasefolded(const BinaryProperty &/*prop*/, UChar32 c, UProperty /*which*/) {
@@ -223,7 +223,7 @@ static UBool changesWhenCasefolded(const BinaryProperty &/*prop*/, UChar32 c, UP
UErrorCode errorCode=U_ZERO_ERROR;
const Normalizer2 *nfcNorm2=Normalizer2::getNFCInstance(errorCode);
if(U_FAILURE(errorCode)) {
- return FALSE;
+ return false;
}
if(nfcNorm2->getDecomposition(c, nfd)) {
/* c has a decomposition */
@@ -237,7 +237,7 @@ static UBool changesWhenCasefolded(const BinaryProperty &/*prop*/, UChar32 c, UP
c=U_SENTINEL;
}
} else if(c<0) {
- return FALSE; /* protect against bad input */
+ return false; /* protect against bad input */
}
if(c>=0) {
/* single code point */
@@ -252,21 +252,21 @@ static UBool changesWhenCasefolded(const BinaryProperty &/*prop*/, UChar32 c, UP
U_FOLD_CASE_DEFAULT, &errorCode);
return (UBool)(U_SUCCESS(errorCode) &&
0!=u_strCompare(nfd.getBuffer(), nfd.length(),
- dest, destLength, FALSE));
+ dest, destLength, false));
}
}
#endif
#if UCONFIG_NO_NORMALIZATION
static UBool changesWhenNFKC_Casefolded(const BinaryProperty &, UChar32, UProperty) {
- return FALSE;
+ return false;
}
#else
static UBool changesWhenNFKC_Casefolded(const BinaryProperty &/*prop*/, UChar32 c, UProperty /*which*/) {
UErrorCode errorCode=U_ZERO_ERROR;
const Normalizer2Impl *kcf=Normalizer2Factory::getNFKC_CFImpl(errorCode);
if(U_FAILURE(errorCode)) {
- return FALSE;
+ return false;
}
UnicodeString src(c);
UnicodeString dest;
@@ -277,8 +277,8 @@ static UBool changesWhenNFKC_Casefolded(const BinaryProperty &/*prop*/, UChar32
// Small destCapacity for NFKC_CF(c).
if(buffer.init(5, errorCode)) {
const UChar *srcArray=src.getBuffer();
- kcf->compose(srcArray, srcArray+src.length(), FALSE,
- TRUE, buffer, errorCode);
+ kcf->compose(srcArray, srcArray+src.length(), false,
+ true, buffer, errorCode);
}
}
return U_SUCCESS(errorCode) && dest!=src;
@@ -287,7 +287,7 @@ static UBool changesWhenNFKC_Casefolded(const BinaryProperty &/*prop*/, UChar32
#if UCONFIG_NO_NORMALIZATION
static UBool isCanonSegmentStarter(const BinaryProperty &, UChar32, UProperty) {
- return FALSE;
+ return false;
}
#else
static UBool isCanonSegmentStarter(const BinaryProperty &/*prop*/, UChar32 c, UProperty /*which*/) {
@@ -416,7 +416,7 @@ u_hasBinaryProperty(UChar32 c, UProperty which) {
/* c is range-checked in the functions that are called from here */
if(which<UCHAR_BINARY_START || UCHAR_BINARY_LIMIT<=which) {
/* not a known binary property */
- return FALSE;
+ return false;
} else {
const BinaryProperty &prop=binProps[which];
return prop.contains(prop, c, which);
@@ -670,7 +670,7 @@ U_CAPI int32_t U_EXPORT2
u_getIntPropertyMaxValue(UProperty which) {
if(which<UCHAR_INT_START) {
if(UCHAR_BINARY_START<=which && which<UCHAR_BINARY_LIMIT) {
- return 1; // maximum TRUE for all binary properties
+ return 1; // maximum true for all binary properties
}
} else if(which<UCHAR_INT_LIMIT) {
const IntProperty &prop=intProps[which-UCHAR_INT_START];
@@ -812,7 +812,7 @@ u_getFC_NFKC_Closure(UChar32 c, UChar *dest, int32_t destCapacity, UErrorCode *p
if(folded1Length>UCASE_MAX_STRING_LENGTH) {
folded1String.setTo(folded1Length);
} else {
- folded1String.setTo(FALSE, folded1, folded1Length);
+ folded1String.setTo(false, folded1, folded1Length);
}
}
UnicodeString kc1=nfkc->normalize(folded1String, *pErrorCode);
diff --git a/thirdparty/icu4c/common/uresbund.cpp b/thirdparty/icu4c/common/uresbund.cpp
index a9c6459418..17c0177a05 100644
--- a/thirdparty/icu4c/common/uresbund.cpp
+++ b/thirdparty/icu4c/common/uresbund.cpp
@@ -49,7 +49,7 @@ TODO: This cache should probably be removed when the deprecated code is
completely removed.
*/
static UHashtable *cache = NULL;
-static icu::UInitOnce gCacheInitOnce = U_INITONCE_INITIALIZER;
+static icu::UInitOnce gCacheInitOnce {};
static UMutex resbMutex;
@@ -85,10 +85,206 @@ static UBool chopLocale(char *name) {
if(i != NULL) {
*i = '\0';
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
+}
+
+static UBool hasVariant(const char* localeID) {
+ UErrorCode err = U_ZERO_ERROR;
+ int32_t variantLength = uloc_getVariant(localeID, NULL, 0, &err);
+ return variantLength != 0;
+}
+
+// This file contains the tables for doing locale fallback, which are generated
+// by the CLDR-to-ICU process directly from the CLDR data. This file should only
+// ever be included from here.
+#define INCLUDED_FROM_URESBUND_CPP
+#include "localefallback_data.h"
+
+static const char* performFallbackLookup(const char* key,
+ const char* keyStrs,
+ const char* valueStrs,
+ const int32_t* lookupTable,
+ int32_t lookupTableLength) {
+ const int32_t* bottom = lookupTable;
+ const int32_t* top = lookupTable + lookupTableLength;
+
+ while (bottom < top) {
+ // Effectively, divide by 2 and round down to an even index
+ const int32_t* middle = bottom + (((top - bottom) / 4) * 2);
+ const char* entryKey = &(keyStrs[*middle]);
+ int32_t strcmpResult = uprv_strcmp(key, entryKey);
+ if (strcmpResult == 0) {
+ return &(valueStrs[middle[1]]);
+ } else if (strcmpResult < 0) {
+ top = middle;
+ } else {
+ bottom = middle + 2;
+ }
+ }
+ return nullptr;
+}
+
+static CharString getDefaultScript(const CharString& language, const CharString& region) {
+ const char* defaultScript = nullptr;
+ UErrorCode err = U_ZERO_ERROR;
+
+ // the default script will be "Latn" if we don't find the locale ID in the tables
+ CharString result("Latn", err);
+
+ // if we were passed both language and region, make them into a locale ID and look that up in the default
+ // script table
+ if (!region.isEmpty()) {
+ CharString localeID;
+ localeID.append(language, err).append("_", err).append(region, err);
+ if (U_FAILURE(err)) {
+ return result;
+ }
+ defaultScript = performFallbackLookup(localeID.data(), dsLocaleIDChars, scriptCodeChars, defaultScriptTable, UPRV_LENGTHOF(defaultScriptTable));
+ }
+
+ // if we didn't find anything, look up just the language in the default script table
+ if (defaultScript == nullptr) {
+ defaultScript = performFallbackLookup(language.data(), dsLocaleIDChars, scriptCodeChars, defaultScriptTable, UPRV_LENGTHOF(defaultScriptTable));
+ }
+
+ // if either lookup above succeeded, copy the result from "defaultScript" into "result"; otherwise, return "Latn"
+ if (defaultScript != nullptr) {
+ result.clear();
+ result.append(defaultScript, err);
+ }
+ return result;
+}
+
+enum UResOpenType {
+ /**
+ * Open a resource bundle for the locale;
+ * if there is not even a base language bundle, then fall back to the default locale;
+ * if there is no bundle for that either, then load the root bundle.
+ *
+ * This is the default bundle loading behavior.
+ */
+ URES_OPEN_LOCALE_DEFAULT_ROOT,
+ // TODO: ICU ticket #11271 "consistent default locale across locale trees"
+ // Add an option to look at the main locale tree for whether to
+ // fall back to root directly (if the locale has main data) or
+ // fall back to the default locale first (if the locale does not even have main data).
+ /**
+ * Open a resource bundle for the locale;
+ * if there is not even a base language bundle, then load the root bundle;
+ * never fall back to the default locale.
+ *
+ * This is used for algorithms that have good pan-Unicode default behavior,
+ * such as case mappings, collation, and segmentation (BreakIterator).
+ */
+ URES_OPEN_LOCALE_ROOT,
+ /**
+ * Open a resource bundle for the exact bundle name as requested;
+ * no fallbacks, do not load parent bundles.
+ *
+ * This is used for supplemental (non-locale) data.
+ */
+ URES_OPEN_DIRECT
+};
+typedef enum UResOpenType UResOpenType;
+
+/**
+ * Internal function, determines the search path for resource bundle files.
+ * Currently, this function is used only by findFirstExisting() to help search for resource bundle files when a bundle for the specified
+ * locale doesn't exist. The code that supports inheritance of resources between existing resource bundle files continues to
+ * use chopLocale() below.
+ * @param name In-out parameter: On input, the locale ID to get a parent locale ID for (this is a locale's base name, without keywords); on output, the
+ * requested parent locale ID.
+ * @param origName The original locale ID the caller of findFirstExisting() requested. This is the same as `name` on the first call to this function,
+ * but as findFirstExisting() ascends the resource bundle's parent tree, this parameter will continue to be the original locale ID requested.
+ */
+static bool getParentLocaleID(char *name, const char *origName, UResOpenType openType) {
+ // early out if the locale ID has a variant code or ends with _
+ if (name[uprv_strlen(name) - 1] == '_' || hasVariant(name)) {
+ return chopLocale(name);
+ }
+
+ UErrorCode err = U_ZERO_ERROR;
+ const char* tempNamePtr = name;
+ CharString language = ulocimp_getLanguage(tempNamePtr, &tempNamePtr, err);
+ if (*tempNamePtr == '_') {
+ ++tempNamePtr;
+ }
+ CharString script = ulocimp_getScript(tempNamePtr, &tempNamePtr, err);
+ if (*tempNamePtr == '_') {
+ ++tempNamePtr;
+ }
+ CharString region = ulocimp_getCountry(tempNamePtr, &tempNamePtr, err);
+ CharString workingLocale;
+ if (U_FAILURE(err)) {
+ // hopefully this never happens...
+ return chopLocale(name);
+ }
+
+ // if the open type is URES_OPEN_LOCALE_DEFAULT_ROOT, first look the locale ID up in the parent locale table;
+ // if that table specifies a parent for it, return that (we don't do this for the other open types-- if we're not
+ // falling back through the system default locale, we also want to do straight truncation fallback instead
+ // of looking things up in the parent locale table-- see https://www.unicode.org/reports/tr35/tr35.html#Parent_Locales:
+ // "Collation data, however, is an exception...")
+ if (openType == URES_OPEN_LOCALE_DEFAULT_ROOT) {
+ const char* parentID = performFallbackLookup(name, parentLocaleChars, parentLocaleChars, parentLocaleTable, UPRV_LENGTHOF(parentLocaleTable));
+ if (parentID != NULL) {
+ uprv_strcpy(name, parentID);
+ return true;
+ }
+ }
+
+ // if it's not in the parent locale table, figure out the fallback script algorithmically
+ // (see CLDR-15265 for an explanation of the algorithm)
+ if (!script.isEmpty() && !region.isEmpty()) {
+ // if "name" has both script and region, is the script the default script?
+ // - if so, remove it and keep the region
+ // - if not, remove the region and keep the script
+ if (getDefaultScript(language, region) == script.toStringPiece()) {
+ workingLocale.append(language, err).append("_", err).append(region, err);
+ } else {
+ workingLocale.append(language, err).append("_", err).append(script, err);
+ }
+ } else if (!region.isEmpty()) {
+ // if "name" has region but not script, did the original locale ID specify a script?
+ // - if yes, replace the region with the script from the original locale ID
+ // - if no, replace the region with the default script for that language and region
+ UErrorCode err = U_ZERO_ERROR;
+ tempNamePtr = origName;
+ CharString origNameLanguage = ulocimp_getLanguage(tempNamePtr, &tempNamePtr, err);
+ if (*tempNamePtr == '_') {
+ ++tempNamePtr;
+ }
+ CharString origNameScript = ulocimp_getScript(origName, nullptr, err);
+ if (!origNameScript.isEmpty()) {
+ workingLocale.append(language, err).append("_", err).append(origNameScript, err);
+ } else {
+ workingLocale.append(language, err).append("_", err).append(getDefaultScript(language, region), err);
+ }
+ } else if (!script.isEmpty()) {
+ // if "name" has script but not region (and our open type if URES_OPEN_LOCALE_DEFAULT_ROOT), is the script
+ // the default script for the language?
+ // - if so, remove it from the locale ID
+ // - if not, return false to continue up the chain
+ // (we don't do this for other open types for the same reason we don't look things up in the parent
+ // locale table for other open types-- see the reference to UTS #35 above)
+ if (openType != URES_OPEN_LOCALE_DEFAULT_ROOT || getDefaultScript(language, CharString()) == script.toStringPiece()) {
+ workingLocale.append(language, err);
+ } else {
+ return false;
+ }
+ } else {
+ // if "name" just contains a language code, return false so the calling code falls back to "root"
+ return false;
+ }
+ if (U_SUCCESS(err) && !workingLocale.isEmpty()) {
+ uprv_strcpy(name, workingLocale.data());
+ return true;
+ } else {
+ return false;
+ }
}
/**
@@ -199,7 +395,7 @@ static int32_t ures_flushCache()
}
do {
- deletedMore = FALSE;
+ deletedMore = false;
/*creates an enumeration to iterate through every element in the table */
pos = UHASH_FIRST;
while ((e = uhash_nextElement(cache, &pos)) != NULL)
@@ -216,7 +412,7 @@ static int32_t ures_flushCache()
if (resB->fCountExisting == 0) {
rbDeletedNum++;
- deletedMore = TRUE;
+ deletedMore = true;
uhash_removeElement(cache, e);
free_entry(resB);
}
@@ -234,7 +430,7 @@ static int32_t ures_flushCache()
#include <stdio.h>
U_CAPI UBool U_EXPORT2 ures_dumpCacheContents(void) {
- UBool cacheNotEmpty = FALSE;
+ UBool cacheNotEmpty = false;
int32_t pos = UHASH_FIRST;
const UHashElement *e;
UResourceDataEntry *resB;
@@ -242,11 +438,11 @@ U_CAPI UBool U_EXPORT2 ures_dumpCacheContents(void) {
Mutex lock(&resbMutex);
if (cache == NULL) {
fprintf(stderr,"%s:%d: RB Cache is NULL.\n", __FILE__, __LINE__);
- return FALSE;
+ return false;
}
while ((e = uhash_nextElement(cache, &pos)) != NULL) {
- cacheNotEmpty=TRUE;
+ cacheNotEmpty=true;
resB = (UResourceDataEntry *) e->value.pointer;
fprintf(stderr,"%s:%d: RB Cache: Entry @0x%p, refcount %d, name %s:%s. Pool 0x%p, alias 0x%p, parent 0x%p\n",
__FILE__, __LINE__,
@@ -272,7 +468,7 @@ static UBool U_CALLCONV ures_cleanup(void)
cache = NULL;
}
gCacheInitOnce.reset();
- return TRUE;
+ return true;
}
/** INTERNAL: Initializes the cache for resources */
@@ -320,7 +516,7 @@ static UResourceDataEntry *init_entry(const char *localeID, const char *path, UE
const char *name;
char aliasName[100] = { 0 };
int32_t aliasLen = 0;
- /*UBool isAlias = FALSE;*/
+ /*UBool isAlias = false;*/
/*UHashTok hashkey; */
if(U_FAILURE(*status)) {
@@ -463,13 +659,15 @@ getPoolEntry(const char *path, UErrorCode *status) {
/* INTERNAL: */
/* CAUTION: resbMutex must be locked when calling this function! */
static UResourceDataEntry *
-findFirstExisting(const char* path, char* name, const char* defaultLocale,
- UBool *isRoot, UBool *hasChopped, UBool *isDefault, UErrorCode* status) {
+findFirstExisting(const char* path, char* name, const char* defaultLocale, UResOpenType openType,
+ UBool *isRoot, UBool *foundParent, UBool *isDefault, UErrorCode* status) {
UResourceDataEntry *r = NULL;
- UBool hasRealData = FALSE;
- *hasChopped = TRUE; /* we're starting with a fresh name */
+ UBool hasRealData = false;
+ *foundParent = true; /* we're starting with a fresh name */
+ char origName[ULOC_FULLNAME_CAPACITY];
- while(*hasChopped && !hasRealData) {
+ uprv_strcpy(origName, name);
+ while(*foundParent && !hasRealData) {
r = init_entry(name, path, status);
/* Null pointer test */
if (U_FAILURE(*status)) {
@@ -494,8 +692,14 @@ findFirstExisting(const char* path, char* name, const char* defaultLocale,
*isRoot = (UBool)(uprv_strcmp(name, kRootLocaleName) == 0);
/*Fallback data stuff*/
- *hasChopped = chopLocale(name);
- if (*hasChopped && *name == '\0') {
+ if (!hasRealData) {
+ *foundParent = getParentLocaleID(name, origName, openType);
+ } else {
+ // we've already found a real resource file; what we return to the caller is the parent
+ // locale ID for inheritance, which should come from chopLocale(), not getParentLocaleID()
+ *foundParent = chopLocale(name);
+ }
+ if (*foundParent && *name == '\0') {
uprv_strcpy(name, "und");
}
}
@@ -513,13 +717,13 @@ static void ures_setIsStackObject( UResourceBundle* resB, UBool state) {
}
static UBool ures_isStackObject(const UResourceBundle* resB) {
- return((resB->fMagic1 == MAGIC1 && resB->fMagic2 == MAGIC2)?FALSE:TRUE);
+ return((resB->fMagic1 == MAGIC1 && resB->fMagic2 == MAGIC2)?false:true);
}
U_CFUNC void ures_initStackObject(UResourceBundle* resB) {
uprv_memset(resB, 0, sizeof(UResourceBundle));
- ures_setIsStackObject(resB, TRUE);
+ ures_setIsStackObject(resB, true);
}
U_NAMESPACE_BEGIN
@@ -538,8 +742,8 @@ static UBool // returns U_SUCCESS(*status)
loadParentsExceptRoot(UResourceDataEntry *&t1,
char name[], int32_t nameCapacity,
UBool usingUSRData, char usrDataPath[], UErrorCode *status) {
- if (U_FAILURE(*status)) { return FALSE; }
- UBool checkParent = TRUE;
+ if (U_FAILURE(*status)) { return false; }
+ UBool checkParent = true;
while (checkParent && t1->fParent == NULL && !t1->fData.noFallback &&
res_getResource(&t1->fData,"%%ParentIsRoot") == RES_BOGUS) {
Resource parentRes = res_getResource(&t1->fData, "%%Parent");
@@ -550,7 +754,7 @@ loadParentsExceptRoot(UResourceDataEntry *&t1,
if(parentLocaleName != NULL && 0 < parentLocaleLen && parentLocaleLen < nameCapacity) {
u_UCharsToChars(parentLocaleName, name, parentLocaleLen + 1);
if (uprv_strcmp(name, kRootLocaleName) == 0) {
- return TRUE;
+ return true;
}
}
}
@@ -559,7 +763,7 @@ loadParentsExceptRoot(UResourceDataEntry *&t1,
UResourceDataEntry *t2 = init_entry(name, t1->fPath, &parentStatus);
if (U_FAILURE(parentStatus)) {
*status = parentStatus;
- return FALSE;
+ return false;
}
UResourceDataEntry *u2 = NULL;
UErrorCode usrStatus = U_ZERO_ERROR;
@@ -568,7 +772,7 @@ loadParentsExceptRoot(UResourceDataEntry *&t1,
// If we failed due to out-of-memory, report that to the caller and exit early.
if (usrStatus == U_MEMORY_ALLOCATION_ERROR) {
*status = usrStatus;
- return FALSE;
+ return false;
}
}
@@ -585,65 +789,33 @@ loadParentsExceptRoot(UResourceDataEntry *&t1,
t1 = t2;
checkParent = chopLocale(name) || mayHaveParent(name);
}
- return TRUE;
+ return true;
}
static UBool // returns U_SUCCESS(*status)
insertRootBundle(UResourceDataEntry *&t1, UErrorCode *status) {
- if (U_FAILURE(*status)) { return FALSE; }
+ if (U_FAILURE(*status)) { return false; }
UErrorCode parentStatus = U_ZERO_ERROR;
UResourceDataEntry *t2 = init_entry(kRootLocaleName, t1->fPath, &parentStatus);
if (U_FAILURE(parentStatus)) {
*status = parentStatus;
- return FALSE;
+ return false;
}
t1->fParent = t2;
t1 = t2;
- return TRUE;
+ return true;
}
-enum UResOpenType {
- /**
- * Open a resource bundle for the locale;
- * if there is not even a base language bundle, then fall back to the default locale;
- * if there is no bundle for that either, then load the root bundle.
- *
- * This is the default bundle loading behavior.
- */
- URES_OPEN_LOCALE_DEFAULT_ROOT,
- // TODO: ICU ticket #11271 "consistent default locale across locale trees"
- // Add an option to look at the main locale tree for whether to
- // fall back to root directly (if the locale has main data) or
- // fall back to the default locale first (if the locale does not even have main data).
- /**
- * Open a resource bundle for the locale;
- * if there is not even a base language bundle, then load the root bundle;
- * never fall back to the default locale.
- *
- * This is used for algorithms that have good pan-Unicode default behavior,
- * such as case mappings, collation, and segmentation (BreakIterator).
- */
- URES_OPEN_LOCALE_ROOT,
- /**
- * Open a resource bundle for the exact bundle name as requested;
- * no fallbacks, do not load parent bundles.
- *
- * This is used for supplemental (non-locale) data.
- */
- URES_OPEN_DIRECT
-};
-typedef enum UResOpenType UResOpenType;
-
static UResourceDataEntry *entryOpen(const char* path, const char* localeID,
UResOpenType openType, UErrorCode* status) {
U_ASSERT(openType != URES_OPEN_DIRECT);
UErrorCode intStatus = U_ZERO_ERROR;
UResourceDataEntry *r = NULL;
UResourceDataEntry *t1 = NULL;
- UBool isDefault = FALSE;
- UBool isRoot = FALSE;
- UBool hasRealData = FALSE;
- UBool hasChopped = TRUE;
+ UBool isDefault = false;
+ UBool isRoot = false;
+ UBool hasRealData = false;
+ UBool hasChopped = true;
UBool usingUSRData = U_USE_USRDATA && ( path == NULL || uprv_strncmp(path,U_ICUDATA_NAME,8) == 0);
char name[ULOC_FULLNAME_CAPACITY];
@@ -676,7 +848,7 @@ static UResourceDataEntry *entryOpen(const char* path, const char* localeID,
Mutex lock(&resbMutex); // Lock resbMutex until the end of this function.
/* We're going to skip all the locales that do not have any data */
- r = findFirstExisting(path, name, defaultLocale, &isRoot, &hasChopped, &isDefault, &intStatus);
+ r = findFirstExisting(path, name, defaultLocale, openType, &isRoot, &hasChopped, &isDefault, &intStatus);
// If we failed due to out-of-memory, report the failure and exit early.
if (intStatus == U_MEMORY_ALLOCATION_ERROR) {
@@ -686,7 +858,7 @@ static UResourceDataEntry *entryOpen(const char* path, const char* localeID,
if(r != NULL) { /* if there is one real locale, we can look for parents. */
t1 = r;
- hasRealData = TRUE;
+ hasRealData = true;
if ( usingUSRData ) { /* This code inserts user override data into the inheritance chain */
UErrorCode usrStatus = U_ZERO_ERROR;
UResourceDataEntry *u1 = init_entry(t1->fName, usrDataPath, &usrStatus);
@@ -717,7 +889,7 @@ static UResourceDataEntry *entryOpen(const char* path, const char* localeID,
if(r==NULL && openType == URES_OPEN_LOCALE_DEFAULT_ROOT && !isDefault && !isRoot) {
/* insert default locale */
uprv_strcpy(name, defaultLocale);
- r = findFirstExisting(path, name, defaultLocale, &isRoot, &hasChopped, &isDefault, &intStatus);
+ r = findFirstExisting(path, name, defaultLocale, openType, &isRoot, &hasChopped, &isDefault, &intStatus);
// If we failed due to out-of-memory, report the failure and exit early.
if (intStatus == U_MEMORY_ALLOCATION_ERROR) {
*status = intStatus;
@@ -726,8 +898,8 @@ static UResourceDataEntry *entryOpen(const char* path, const char* localeID,
intStatus = U_USING_DEFAULT_WARNING;
if(r != NULL) { /* the default locale exists */
t1 = r;
- hasRealData = TRUE;
- isDefault = TRUE;
+ hasRealData = true;
+ isDefault = true;
// TODO: Why not if (usingUSRData) { ... } like in the non-default-locale code path?
if ((hasChopped || mayHaveParent(name)) && !isRoot) {
if (!loadParentsExceptRoot(t1, name, UPRV_LENGTHOF(name), usingUSRData, usrDataPath, status)) {
@@ -741,7 +913,7 @@ static UResourceDataEntry *entryOpen(const char* path, const char* localeID,
/* present */
if(r == NULL) {
uprv_strcpy(name, kRootLocaleName);
- r = findFirstExisting(path, name, defaultLocale, &isRoot, &hasChopped, &isDefault, &intStatus);
+ r = findFirstExisting(path, name, defaultLocale, openType, &isRoot, &hasChopped, &isDefault, &intStatus);
// If we failed due to out-of-memory, report the failure and exit early.
if (intStatus == U_MEMORY_ALLOCATION_ERROR) {
*status = intStatus;
@@ -750,7 +922,7 @@ static UResourceDataEntry *entryOpen(const char* path, const char* localeID,
if(r != NULL) {
t1 = r;
intStatus = U_USING_DEFAULT_WARNING;
- hasRealData = TRUE;
+ hasRealData = true;
} else { /* we don't even have the root locale */
*status = U_MISSING_RESOURCE_ERROR;
goto finish;
@@ -826,7 +998,7 @@ entryOpenDirect(const char* path, const char* localeID, UErrorCode* status) {
char name[ULOC_FULLNAME_CAPACITY];
uprv_strcpy(name, localeID);
if(!chopLocale(name) || uprv_strcmp(name, kRootLocaleName) == 0 ||
- loadParentsExceptRoot(t1, name, UPRV_LENGTHOF(name), FALSE, NULL, status)) {
+ loadParentsExceptRoot(t1, name, UPRV_LENGTHOF(name), false, NULL, status)) {
if(uprv_strcmp(t1->fName, kRootLocaleName) != 0 && t1->fParent == NULL) {
insertRootBundle(t1, status);
}
@@ -956,7 +1128,7 @@ ures_closeBundle(UResourceBundle* resB, UBool freeBundleObj)
}
ures_freeResPath(resB);
- if(ures_isStackObject(resB) == FALSE && freeBundleObj) {
+ if(ures_isStackObject(resB) == false && freeBundleObj) {
uprv_free(resB);
}
#if 0 /*U_DEBUG*/
@@ -971,7 +1143,7 @@ ures_closeBundle(UResourceBundle* resB, UBool freeBundleObj)
U_CAPI void U_EXPORT2
ures_close(UResourceBundle* resB)
{
- ures_closeBundle(resB, TRUE);
+ ures_closeBundle(resB, true);
}
namespace {
@@ -1237,7 +1409,7 @@ UResourceBundle *init_resb_result(
*status = U_MEMORY_ALLOCATION_ERROR;
return NULL;
}
- ures_setIsStackObject(resB, FALSE);
+ ures_setIsStackObject(resB, false);
resB->fResPath = NULL;
resB->fResPathLen = 0;
} else {
@@ -1254,7 +1426,7 @@ UResourceBundle *init_resb_result(
treated the same
*/
/*
- if(ures_isStackObject(resB) != FALSE) {
+ if(ures_isStackObject(resB) != false) {
ures_initStackObject(resB);
}
*/
@@ -1264,8 +1436,8 @@ UResourceBundle *init_resb_result(
}
resB->fData = dataEntry;
entryIncrease(resB->fData);
- resB->fHasFallback = FALSE;
- resB->fIsTopLevel = FALSE;
+ resB->fHasFallback = false;
+ resB->fIsTopLevel = false;
resB->fIndex = -1;
resB->fKey = key;
resB->fValidLocaleDataEntry = validLocaleDataEntry;
@@ -1318,7 +1490,7 @@ UResourceBundle *ures_copyResb(UResourceBundle *r, const UResourceBundle *origin
}
if(original != NULL) {
if(r == NULL) {
- isStackObject = FALSE;
+ isStackObject = false;
r = (UResourceBundle *)uprv_malloc(sizeof(UResourceBundle));
/* test for NULL */
if (r == NULL) {
@@ -1327,7 +1499,7 @@ UResourceBundle *ures_copyResb(UResourceBundle *r, const UResourceBundle *origin
}
} else {
isStackObject = ures_isStackObject(r);
- ures_closeBundle(r, FALSE);
+ ures_closeBundle(r, false);
}
uprv_memcpy(r, original, sizeof(UResourceBundle));
r->fResPath = NULL;
@@ -1409,7 +1581,7 @@ ures_toUTF8String(const UChar *s16, int32_t length16,
* may store UTF-8 natively.
* (In which case dest would not be used at all.)
*
- * We do not do this if forceCopy=TRUE because then the caller
+ * We do not do this if forceCopy=true because then the caller
* expects the string to start exactly at dest.
*
* The test above for <= 0x2aaaaaaa prevents overflows.
@@ -1553,7 +1725,7 @@ U_CAPI void U_EXPORT2 ures_resetIterator(UResourceBundle *resB){
U_CAPI UBool U_EXPORT2 ures_hasNext(const UResourceBundle *resB) {
if(resB == NULL) {
- return FALSE;
+ return false;
}
return (UBool)(resB->fIndex < resB->fSize-1);
}
@@ -2133,7 +2305,7 @@ void getAllItemsWithFallback(
parentRef.fData = parentEntry;
parentRef.fValidLocaleDataEntry = bundle->fValidLocaleDataEntry;
parentRef.fHasFallback = !parentRef.getResData().noFallback;
- parentRef.fIsTopLevel = TRUE;
+ parentRef.fIsTopLevel = true;
parentRef.fRes = parentRef.getResData().rootRes;
parentRef.fSize = res_countArrayItems(&parentRef.getResData(), parentRef.fRes);
parentRef.fIndex = -1;
@@ -2276,7 +2448,7 @@ U_CAPI UResourceBundle* U_EXPORT2 ures_getByKey(const UResourceBundle *resB, con
res = res_getTableItemByKey(&resB->getResData(), resB->fRes, &t, &key);
if(res == RES_BOGUS) {
key = inKey;
- if(resB->fHasFallback == TRUE) {
+ if(resB->fHasFallback == true) {
dataEntry = getFallbackData(resB, &key, &res, status);
if(U_SUCCESS(*status)) {
/* check if resB->fResPath gives the right name here */
@@ -2294,7 +2466,7 @@ U_CAPI UResourceBundle* U_EXPORT2 ures_getByKey(const UResourceBundle *resB, con
#if 0
/* this is a kind of TODO item. If we have an array with an index table, we could do this. */
/* not currently */
- else if(RES_GET_TYPE(resB->fRes) == URES_ARRAY && resB->fHasFallback == TRUE) {
+ else if(RES_GET_TYPE(resB->fRes) == URES_ARRAY && resB->fHasFallback == true) {
/* here should go a first attempt to locate the key using index table */
dataEntry = getFallbackData(resB, &key, &res, status);
if(U_SUCCESS(*status)) {
@@ -2331,7 +2503,7 @@ U_CAPI const UChar* U_EXPORT2 ures_getStringByKey(const UResourceBundle *resB, c
if(res == RES_BOGUS) {
key = inKey;
- if(resB->fHasFallback == TRUE) {
+ if(resB->fHasFallback == true) {
dataEntry = getFallbackData(resB, &key, &res, status);
if(U_SUCCESS(*status)) {
switch (RES_GET_TYPE(res)) {
@@ -2376,7 +2548,7 @@ U_CAPI const UChar* U_EXPORT2 ures_getStringByKey(const UResourceBundle *resB, c
#if 0
/* this is a kind of TODO item. If we have an array with an index table, we could do this. */
/* not currently */
- else if(RES_GET_TYPE(resB->fRes) == URES_ARRAY && resB->fHasFallback == TRUE) {
+ else if(RES_GET_TYPE(resB->fRes) == URES_ARRAY && resB->fHasFallback == true) {
/* here should go a first attempt to locate the key using index table */
dataEntry = getFallbackData(resB, &key, &res, status);
if(U_SUCCESS(*status)) {
@@ -2510,17 +2682,17 @@ ures_openWithType(UResourceBundle *r, const char* path, const char* localeID,
*status = U_MEMORY_ALLOCATION_ERROR;
return NULL;
}
- isStackObject = FALSE;
+ isStackObject = false;
} else { // fill-in
isStackObject = ures_isStackObject(r);
- ures_closeBundle(r, FALSE);
+ ures_closeBundle(r, false);
}
uprv_memset(r, 0, sizeof(UResourceBundle));
ures_setIsStackObject(r, isStackObject);
r->fValidLocaleDataEntry = r->fData = entry;
r->fHasFallback = openType != URES_OPEN_DIRECT && !r->getResData().noFallback;
- r->fIsTopLevel = TRUE;
+ r->fIsTopLevel = true;
r->fRes = r->getResData().rootRes;
r->fSize = res_countArrayItems(&r->getResData(), r->fRes);
r->fIndex = -1;
@@ -2795,10 +2967,10 @@ static UBool isLocaleInList(UEnumeration *locEnum, const char *locToSearch, UErr
const char *loc;
while ((loc = uenum_next(locEnum, NULL, status)) != NULL) {
if (uprv_strcmp(loc, locToSearch) == 0) {
- return TRUE;
+ return true;
}
}
- return FALSE;
+ return false;
}
U_CAPI int32_t U_EXPORT2
@@ -2836,7 +3008,7 @@ ures_getFunctionalEquivalent(char *result, int32_t resultCapacity,
if(isAvailable) {
UEnumeration *locEnum = ures_openAvailableLocales(path, &subStatus);
- *isAvailable = TRUE;
+ *isAvailable = true;
if (U_SUCCESS(subStatus)) {
*isAvailable = isLocaleInList(locEnum, parent, &subStatus);
}
@@ -2854,7 +3026,7 @@ ures_getFunctionalEquivalent(char *result, int32_t resultCapacity,
if(((subStatus == U_USING_FALLBACK_WARNING) ||
(subStatus == U_USING_DEFAULT_WARNING)) && isAvailable)
{
- *isAvailable = FALSE;
+ *isAvailable = false;
}
isAvailable = NULL; /* only want to set this the first time around */
@@ -2910,7 +3082,7 @@ ures_getFunctionalEquivalent(char *result, int32_t resultCapacity,
subStatus = U_ZERO_ERROR;
res = ures_open(path, parent, &subStatus);
if((subStatus == U_USING_FALLBACK_WARNING) && isAvailable) {
- *isAvailable = FALSE;
+ *isAvailable = false;
}
isAvailable = NULL; /* only want to set this the first time around */
@@ -2991,7 +3163,7 @@ ures_getFunctionalEquivalent(char *result, int32_t resultCapacity,
subStatus = U_ZERO_ERROR;
res = ures_open(path, parent, &subStatus);
if((subStatus == U_USING_FALLBACK_WARNING) && isAvailable) {
- *isAvailable = FALSE;
+ *isAvailable = false;
}
isAvailable = NULL; /* only want to set this the first time around */
@@ -3224,32 +3396,32 @@ ures_equal(const UResourceBundle* res1, const UResourceBundle* res2){
return (res1->fKey==res2->fKey);
}else{
if(uprv_strcmp(res1->fKey, res2->fKey)!=0){
- return FALSE;
+ return false;
}
}
if(uprv_strcmp(res1->fData->fName, res2->fData->fName)!=0){
- return FALSE;
+ return false;
}
if(res1->fData->fPath == NULL|| res2->fData->fPath==NULL){
return (res1->fData->fPath == res2->fData->fPath);
}else{
if(uprv_strcmp(res1->fData->fPath, res2->fData->fPath)!=0){
- return FALSE;
+ return false;
}
}
if(uprv_strcmp(res1->fData->fParent->fName, res2->fData->fParent->fName)!=0){
- return FALSE;
+ return false;
}
if(uprv_strcmp(res1->fData->fParent->fPath, res2->fData->fParent->fPath)!=0){
- return FALSE;
+ return false;
}
if(uprv_strncmp(res1->fResPath, res2->fResPath, res1->fResPathLen)!=0){
- return FALSE;
+ return false;
}
if(res1->fRes != res2->fRes){
- return FALSE;
+ return false;
}
- return TRUE;
+ return true;
}
U_CAPI UResourceBundle* U_EXPORT2
ures_clone(const UResourceBundle* res, UErrorCode* status){
diff --git a/thirdparty/icu4c/common/uresdata.cpp b/thirdparty/icu4c/common/uresdata.cpp
index 9af081be40..a1222d415c 100644
--- a/thirdparty/icu4c/common/uresdata.cpp
+++ b/thirdparty/icu4c/common/uresdata.cpp
@@ -234,7 +234,7 @@ res_init(ResourceData *pResData,
* formatVersion 1: compare key strings in native-charset order
* formatVersion 2 and up: compare key strings in ASCII order
*/
- pResData->useNativeStrcmp=TRUE;
+ pResData->useNativeStrcmp=true;
}
}
@@ -377,10 +377,10 @@ UBool isNoInheritanceMarker(const ResourceData *pResData, Resource res) {
return p[1] == 0x2205 && p[2] == 0x2205 && p[3] == 0x2205;
} else {
// Assume that the string has not been stored with more length units than necessary.
- return FALSE;
+ return false;
}
}
- return FALSE;
+ return false;
}
int32_t getStringArray(const ResourceData *pResData, const icu::ResourceArray &array,
@@ -409,7 +409,7 @@ int32_t getStringArray(const ResourceData *pResData, const icu::ResourceArray &a
errorCode = U_RESOURCE_TYPE_MISMATCH;
return 0;
}
- dest[i].setTo(TRUE, s, sLength);
+ dest[i].setTo(true, s, sLength);
}
return length;
}
@@ -660,7 +660,7 @@ int32_t ResourceDataValue::getStringArrayOrStringAsArray(UnicodeString *dest, in
int32_t sLength;
const UChar *s = res_getString(fTraceInfo, &getData(), res, &sLength);
if(s != NULL) {
- dest[0].setTo(TRUE, s, sLength);
+ dest[0].setTo(true, s, sLength);
return 1;
}
errorCode = U_RESOURCE_TYPE_MISMATCH;
@@ -675,7 +675,7 @@ UnicodeString ResourceDataValue::getStringOrFirstOfArray(UErrorCode &errorCode)
int32_t sLength;
const UChar *s = res_getString(fTraceInfo, &getData(), res, &sLength);
if(s != NULL) {
- us.setTo(TRUE, s, sLength);
+ us.setTo(true, s, sLength);
return us;
}
ResourceArray array = getArray(errorCode);
@@ -686,7 +686,7 @@ UnicodeString ResourceDataValue::getStringOrFirstOfArray(UErrorCode &errorCode)
// Tracing is already performed above (unimportant for trace that this is an array)
s = res_getStringNoTrace(&getData(), array.internalGetResource(&getData(), 0), &sLength);
if(s != NULL) {
- us.setTo(TRUE, s, sLength);
+ us.setTo(true, s, sLength);
return us;
}
}
@@ -837,9 +837,9 @@ UBool icu::ResourceTable::getKeyAndValue(int32_t i,
// alive for the duration that fields are being read from it
// (including nested fields).
rdValue.setResource(res, ResourceTracer(fTraceInfo, key));
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
}
UBool icu::ResourceTable::findValue(const char *key, ResourceValue &value) const {
@@ -860,9 +860,9 @@ UBool icu::ResourceTable::findValue(const char *key, ResourceValue &value) const
}
// Same note about lifetime as in getKeyAndValue().
rdValue.setResource(res, ResourceTracer(fTraceInfo, key));
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
}
U_CAPI Resource U_EXPORT2
@@ -912,9 +912,9 @@ UBool icu::ResourceArray::getValue(int32_t i, icu::ResourceValue &value) const {
rdValue.setResource(
internalGetResource(&rdValue.getData(), i),
ResourceTracer(fTraceInfo, i));
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
}
U_CFUNC Resource
@@ -1222,7 +1222,7 @@ ures_swapResource(const UDataSwapper *ds,
}
uprv_sortArray(pTempTable->rows, count, sizeof(Row),
ures_compareRows, pTempTable->keyChars,
- FALSE, pErrorCode);
+ false, pErrorCode);
if(U_FAILURE(*pErrorCode)) {
udata_printError(ds, "ures_swapResource(table res=%08x).uprv_sortArray(%d items) failed\n",
res, count);
diff --git a/thirdparty/icu4c/common/usc_impl.cpp b/thirdparty/icu4c/common/usc_impl.cpp
index 111029b974..a4e2fc6069 100644
--- a/thirdparty/icu4c/common/usc_impl.cpp
+++ b/thirdparty/icu4c/common/usc_impl.cpp
@@ -261,7 +261,7 @@ uscript_nextRun(UScriptRun *scriptRun, int32_t *pRunStart, int32_t *pRunLimit, U
/* if we've fallen off the end of the text, we're done */
if (scriptRun == NULL || scriptRun->scriptLimit >= scriptRun->textLength) {
- return FALSE;
+ return false;
}
SYNC_FIXUP(scriptRun);
@@ -357,5 +357,5 @@ uscript_nextRun(UScriptRun *scriptRun, int32_t *pRunStart, int32_t *pRunLimit, U
*pRunScript = scriptRun->scriptCode;
}
- return TRUE;
+ return true;
}
diff --git a/thirdparty/icu4c/common/uscript.cpp b/thirdparty/icu4c/common/uscript.cpp
index f8bd7e7fdd..1ededbb268 100644
--- a/thirdparty/icu4c/common/uscript.cpp
+++ b/thirdparty/icu4c/common/uscript.cpp
@@ -113,14 +113,14 @@ uscript_getCode(const char* nameOrAbbrOrLocale,
return 0;
}
- triedCode = FALSE;
+ triedCode = false;
if(uprv_strchr(nameOrAbbrOrLocale, '-')==NULL && uprv_strchr(nameOrAbbrOrLocale, '_')==NULL ){
/* try long and abbreviated script names first */
UScriptCode code = (UScriptCode) u_getPropertyValueEnum(UCHAR_SCRIPT, nameOrAbbrOrLocale);
if(code!=USCRIPT_INVALID_CODE) {
return setOneCode(code, fillIn, capacity, err);
}
- triedCode = TRUE;
+ triedCode = true;
}
internalErrorCode = U_ZERO_ERROR;
length = getCodesFromLocale(nameOrAbbrOrLocale, fillIn, capacity, err);
diff --git a/thirdparty/icu4c/common/uscript_props.cpp b/thirdparty/icu4c/common/uscript_props.cpp
index 07bae251ea..886acfafa8 100644
--- a/thirdparty/icu4c/common/uscript_props.cpp
+++ b/thirdparty/icu4c/common/uscript_props.cpp
@@ -242,6 +242,8 @@ const int32_t SCRIPT_PROPS[] = {
0x16ABC | EXCLUSION, // Tnsa
0x1E290 | EXCLUSION, // Toto
0x10582 | EXCLUSION | CASED, // Vith
+ 0x11F1B | EXCLUSION | LB_LETTERS, // Kawi
+ 0x1E4E6 | EXCLUSION, // Nagm
// End copy-paste from parsescriptmetadata.py
};
diff --git a/thirdparty/icu4c/common/uset.cpp b/thirdparty/icu4c/common/uset.cpp
index 871a5d8986..2152693560 100644
--- a/thirdparty/icu4c/common/uset.cpp
+++ b/thirdparty/icu4c/common/uset.cpp
@@ -344,12 +344,12 @@ uset_getItem(const USet* uset, int32_t itemIndex,
//uset_getRange(const USet* set, int32_t rangeIndex,
// UChar32* pStart, UChar32* pEnd) {
// if ((uint32_t) rangeIndex >= (uint32_t) uset_getRangeCount(set)) {
-// return FALSE;
+// return false;
// }
// const UnicodeSet* us = (const UnicodeSet*) set;
// *pStart = us->getRangeStart(rangeIndex);
// *pEnd = us->getRangeEnd(rangeIndex);
-// return TRUE;
+// return true;
//}
/*
@@ -384,11 +384,11 @@ uset_getSerializedSet(USerializedSet* fillSet, const uint16_t* src, int32_t srcL
int32_t length;
if(fillSet==NULL) {
- return FALSE;
+ return false;
}
if(src==NULL || srcLength<=0) {
fillSet->length=fillSet->bmpLength=0;
- return FALSE;
+ return false;
}
length=*src++;
@@ -397,20 +397,20 @@ uset_getSerializedSet(USerializedSet* fillSet, const uint16_t* src, int32_t srcL
length&=0x7fff;
if(srcLength<(2+length)) {
fillSet->length=fillSet->bmpLength=0;
- return FALSE;
+ return false;
}
fillSet->bmpLength=*src++;
} else {
/* only BMP values */
if(srcLength<(1+length)) {
fillSet->length=fillSet->bmpLength=0;
- return FALSE;
+ return false;
}
fillSet->bmpLength=length;
}
fillSet->array=src;
fillSet->length=length;
- return TRUE;
+ return true;
}
U_CAPI void U_EXPORT2
@@ -451,7 +451,7 @@ uset_serializedContains(const USerializedSet* set, UChar32 c) {
const uint16_t* array;
if(set==NULL || (uint32_t)c>0x10ffff) {
- return FALSE;
+ return false;
}
array=set->array;
@@ -520,7 +520,7 @@ uset_getSerializedRange(const USerializedSet* set, int32_t rangeIndex,
int32_t bmpLength, length;
if(set==NULL || rangeIndex<0 || pStart==NULL || pEnd==NULL) {
- return FALSE;
+ return false;
}
array=set->array;
@@ -537,7 +537,7 @@ uset_getSerializedRange(const USerializedSet* set, int32_t rangeIndex,
} else {
*pEnd=0x10ffff;
}
- return TRUE;
+ return true;
} else {
rangeIndex-=bmpLength;
rangeIndex*=2; /* address pairs of pairs of units */
@@ -551,9 +551,9 @@ uset_getSerializedRange(const USerializedSet* set, int32_t rangeIndex,
} else {
*pEnd=0x10ffff;
}
- return TRUE;
+ return true;
} else {
- return FALSE;
+ return false;
}
}
}
@@ -591,14 +591,14 @@ uset_getSerializedRange(const USerializedSet* set, int32_t rangeIndex,
// int32_t i, length, more;
//
// if(set==NULL || (uint32_t)c>0x10ffff) {
-// return FALSE;
+// return false;
// }
//
// length=set->length;
// i=findChar(set->array, length, c);
// if((i&1)^doRemove) {
// /* c is already in the set */
-// return TRUE;
+// return true;
// }
//
// /* how many more array items do we need? */
@@ -615,7 +615,7 @@ uset_getSerializedRange(const USerializedSet* set, int32_t rangeIndex,
// }
// }
// }
-// return TRUE;
+// return true;
// } else if(i>0 && c==set->array[i-1]) {
// /* c is just after the previous range, extend that in-place by one */
// if(++c<=0x10ffff) {
@@ -632,7 +632,7 @@ uset_getSerializedRange(const USerializedSet* set, int32_t rangeIndex,
// /* extend the previous range (had limit 0x10ffff) to the end of Unicode */
// set->length=i-1;
// }
-// return TRUE;
+// return true;
// } else if(i==length && c==0x10ffff) {
// /* insert one range limit c */
// more=1;
@@ -647,7 +647,7 @@ uset_getSerializedRange(const USerializedSet* set, int32_t rangeIndex,
// int32_t newCapacity=set->capacity+set->capacity/2+USET_GROW_DELTA;
// UChar32* newArray=(UChar32* )uprv_malloc(newCapacity*4);
// if(newArray==NULL) {
-// return FALSE;
+// return false;
// }
// set->capacity=newCapacity;
// uprv_memcpy(newArray, set->array, length*4);
@@ -667,7 +667,7 @@ uset_getSerializedRange(const USerializedSet* set, int32_t rangeIndex,
// }
// set->length+=more;
//
-// return TRUE;
+// return true;
// }
//
// U_CAPI UBool U_EXPORT2
diff --git a/thirdparty/icu4c/common/usetiter.cpp b/thirdparty/icu4c/common/usetiter.cpp
index 7915169049..3cdece5500 100644
--- a/thirdparty/icu4c/common/usetiter.cpp
+++ b/thirdparty/icu4c/common/usetiter.cpp
@@ -50,19 +50,19 @@ UBool UnicodeSetIterator::next() {
if (nextElement <= endElement) {
codepoint = codepointEnd = nextElement++;
string = NULL;
- return TRUE;
+ return true;
}
if (range < endRange) {
loadRange(++range);
codepoint = codepointEnd = nextElement++;
string = NULL;
- return TRUE;
+ return true;
}
- if (nextString >= stringCount) return FALSE;
+ if (nextString >= stringCount) return false;
codepoint = (UChar32)IS_STRING; // signal that value is actually a string
string = (const UnicodeString*) set->strings->elementAt(nextString++);
- return TRUE;
+ return true;
}
/**
@@ -82,20 +82,20 @@ UBool UnicodeSetIterator::nextRange() {
codepointEnd = endElement;
codepoint = nextElement;
nextElement = endElement+1;
- return TRUE;
+ return true;
}
if (range < endRange) {
loadRange(++range);
codepointEnd = endElement;
codepoint = nextElement;
nextElement = endElement+1;
- return TRUE;
+ return true;
}
- if (nextString >= stringCount) return FALSE;
+ if (nextString >= stringCount) return false;
codepoint = (UChar32)IS_STRING; // signal that value is actually a string
string = (const UnicodeString*) set->strings->elementAt(nextString++);
- return TRUE;
+ return true;
}
/**
diff --git a/thirdparty/icu4c/common/ushape.cpp b/thirdparty/icu4c/common/ushape.cpp
index ae13b5c118..babbbe52a8 100644
--- a/thirdparty/icu4c/common/ushape.cpp
+++ b/thirdparty/icu4c/common/ushape.cpp
@@ -354,10 +354,10 @@ _shapeToArabicDigitsWithContext(UChar *s, int32_t length,
switch(ubidi_getClass(c)) {
case U_LEFT_TO_RIGHT: /* L */
case U_RIGHT_TO_LEFT: /* R */
- lastStrongWasAL=FALSE;
+ lastStrongWasAL=false;
break;
case U_RIGHT_TO_LEFT_ARABIC: /* AL */
- lastStrongWasAL=TRUE;
+ lastStrongWasAL=true;
break;
case U_EUROPEAN_NUMBER: /* EN */
if(lastStrongWasAL && (uint32_t)(c-0x30)<10) {
@@ -374,10 +374,10 @@ _shapeToArabicDigitsWithContext(UChar *s, int32_t length,
switch(ubidi_getClass(c)) {
case U_LEFT_TO_RIGHT: /* L */
case U_RIGHT_TO_LEFT: /* R */
- lastStrongWasAL=FALSE;
+ lastStrongWasAL=false;
break;
case U_RIGHT_TO_LEFT_ARABIC: /* AL */
- lastStrongWasAL=TRUE;
+ lastStrongWasAL=true;
break;
case U_EUROPEAN_NUMBER: /* EN */
if(lastStrongWasAL && (uint32_t)(c-0x30)<10) {
@@ -1710,13 +1710,13 @@ u_shapeArabic(const UChar *source, int32_t sourceLength,
_shapeToArabicDigitsWithContext(dest, destLength,
digitBase,
(UBool)((options&U_SHAPE_TEXT_DIRECTION_MASK)==U_SHAPE_TEXT_DIRECTION_LOGICAL),
- FALSE);
+ false);
break;
case U_SHAPE_DIGITS_ALEN2AN_INIT_AL:
_shapeToArabicDigitsWithContext(dest, destLength,
digitBase,
(UBool)((options&U_SHAPE_TEXT_DIRECTION_MASK)==U_SHAPE_TEXT_DIRECTION_LOGICAL),
- TRUE);
+ true);
break;
default:
/* will never occur because of validity checks above */
diff --git a/thirdparty/icu4c/common/usprep.cpp b/thirdparty/icu4c/common/usprep.cpp
index 41a1f56c77..50d16081d1 100644
--- a/thirdparty/icu4c/common/usprep.cpp
+++ b/thirdparty/icu4c/common/usprep.cpp
@@ -45,7 +45,7 @@ U_CDECL_BEGIN
Static cache for already opened StringPrep profiles
*/
static UHashtable *SHARED_DATA_HASHTABLE = NULL;
-static icu::UInitOnce gSharedDataInitOnce = U_INITONCE_INITIALIZER;
+static icu::UInitOnce gSharedDataInitOnce {};
static UMutex usprepMutex;
/* format version of spp file */
@@ -91,9 +91,9 @@ isSPrepAcceptable(void * /* context */,
) {
//uprv_memcpy(formatVersion, pInfo->formatVersion, 4);
uprv_memcpy(dataVersion, pInfo->dataVersion, 4);
- return TRUE;
+ return true;
} else {
- return FALSE;
+ return false;
}
}
@@ -159,8 +159,8 @@ usprep_internal_flushCache(UBool noRefCount){
profile = (UStringPrepProfile *) e->value.pointer;
key = (UStringPrepKey *) e->key.pointer;
- if ((noRefCount== FALSE && profile->refCount == 0) ||
- noRefCount== TRUE) {
+ if ((noRefCount== false && profile->refCount == 0) ||
+ noRefCount== true) {
deletedNum++;
uhash_removeElement(SHARED_DATA_HASHTABLE, e);
@@ -188,13 +188,13 @@ usprep_internal_flushCache(UBool noRefCount){
/* Works just like ucnv_flushCache()
static int32_t
usprep_flushCache(){
- return usprep_internal_flushCache(FALSE);
+ return usprep_internal_flushCache(false);
}
*/
static UBool U_CALLCONV usprep_cleanup(void){
if (SHARED_DATA_HASHTABLE != NULL) {
- usprep_internal_flushCache(TRUE);
+ usprep_internal_flushCache(true);
if (SHARED_DATA_HASHTABLE != NULL && uhash_count(SHARED_DATA_HASHTABLE) == 0) {
uhash_close(SHARED_DATA_HASHTABLE);
SHARED_DATA_HASHTABLE = NULL;
@@ -243,7 +243,7 @@ loadData(UStringPrepProfile* profile,
//TODO: change the path
dataMemory=udata_openChoice(path, type, name, isSPrepAcceptable, NULL, errorCode);
if(U_FAILURE(*errorCode)) {
- return FALSE;
+ return false;
}
p=(const int32_t *)udata_getMemory(dataMemory);
@@ -254,7 +254,7 @@ loadData(UStringPrepProfile* profile,
if(U_FAILURE(*errorCode)) {
udata_close(dataMemory);
- return FALSE;
+ return false;
}
/* in the mutex block, set the data for this process */
@@ -280,7 +280,7 @@ loadData(UStringPrepProfile* profile,
if(U_FAILURE(*errorCode)){
udata_close(dataMemory);
- return FALSE;
+ return false;
}
if( normUniVer < sprepUniVer && /* the Unicode version of SPREP file must be less than the Unicode Version of the normalization data */
normUniVer < normCorrVer && /* the Unicode version of the NormalizationCorrections.txt file should be less than the Unicode Version of the normalization data */
@@ -288,9 +288,9 @@ loadData(UStringPrepProfile* profile,
){
*errorCode = U_INVALID_FORMAT_ERROR;
udata_close(dataMemory);
- return FALSE;
+ return false;
}
- profile->isDataLoaded = TRUE;
+ profile->isDataLoaded = true;
/* if a different thread set it first, then close the extra data */
if(dataMemory!=NULL) {
@@ -474,28 +474,28 @@ getValues(uint16_t trieWord, int16_t& value, UBool& isIndex){
* the source codepoint is copied to the destination
*/
type = USPREP_TYPE_LIMIT;
- isIndex =FALSE;
+ isIndex =false;
value = 0;
}else if(trieWord >= _SPREP_TYPE_THRESHOLD){
type = (UStringPrepType) (trieWord - _SPREP_TYPE_THRESHOLD);
- isIndex =FALSE;
+ isIndex =false;
value = 0;
}else{
/* get the type */
type = USPREP_MAP;
/* ascertain if the value is index or delta */
if(trieWord & 0x02){
- isIndex = TRUE;
+ isIndex = true;
value = trieWord >> 2; //mask off the lower 2 bits and shift
}else{
- isIndex = FALSE;
+ isIndex = false;
value = (int16_t)trieWord;
value = (value >> 2);
}
if((trieWord>>2) == _SPREP_MAX_INDEX_VALUE){
type = USPREP_DELETE;
- isIndex =FALSE;
+ isIndex =false;
value = 0;
}
}
@@ -535,7 +535,7 @@ usprep_map( const UStringPrepProfile* profile,
type = getValues(result, value, isIndex);
// check if the source codepoint is unassigned
- if(type == USPREP_UNASSIGNED && allowUnassigned == FALSE){
+ if(type == USPREP_UNASSIGNED && allowUnassigned == false){
uprv_syntaxError(src,srcIndex-U16_LENGTH(ch), srcLength,parseError);
*status = U_STRINGPREP_UNASSIGNED_ERROR;
@@ -709,7 +709,7 @@ usprep_prepare( const UStringPrepProfile* profile,
const UChar *b2 = s2.getBuffer();
int32_t b2Len = s2.length();
UCharDirection direction=U_CHAR_DIRECTION_COUNT, firstCharDir=U_CHAR_DIRECTION_COUNT;
- UBool leftToRight=FALSE, rightToLeft=FALSE;
+ UBool leftToRight=false, rightToLeft=false;
int32_t rtlPos =-1, ltrPos =-1;
for(int32_t b2Index=0; b2Index<b2Len;){
@@ -737,31 +737,31 @@ usprep_prepare( const UStringPrepProfile* profile,
firstCharDir = direction;
}
if(direction == U_LEFT_TO_RIGHT){
- leftToRight = TRUE;
+ leftToRight = true;
ltrPos = b2Index-1;
}
if(direction == U_RIGHT_TO_LEFT || direction == U_RIGHT_TO_LEFT_ARABIC){
- rightToLeft = TRUE;
+ rightToLeft = true;
rtlPos = b2Index-1;
}
}
}
- if(profile->checkBiDi == TRUE){
+ if(profile->checkBiDi == true){
// satisfy 2
- if( leftToRight == TRUE && rightToLeft == TRUE){
+ if( leftToRight == true && rightToLeft == true){
*status = U_STRINGPREP_CHECK_BIDI_ERROR;
uprv_syntaxError(b2,(rtlPos>ltrPos) ? rtlPos : ltrPos, b2Len, parseError);
return 0;
}
//satisfy 3
- if( rightToLeft == TRUE &&
+ if( rightToLeft == true &&
!((firstCharDir == U_RIGHT_TO_LEFT || firstCharDir == U_RIGHT_TO_LEFT_ARABIC) &&
(direction == U_RIGHT_TO_LEFT || direction == U_RIGHT_TO_LEFT_ARABIC))
){
*status = U_STRINGPREP_CHECK_BIDI_ERROR;
uprv_syntaxError(b2, rtlPos, b2Len, parseError);
- return FALSE;
+ return false;
}
}
return s2.extract(dest, destCapacity, *status);
diff --git a/thirdparty/icu4c/common/ustr_cnv.cpp b/thirdparty/icu4c/common/ustr_cnv.cpp
index 9a25a9905a..97fbc527a3 100644
--- a/thirdparty/icu4c/common/ustr_cnv.cpp
+++ b/thirdparty/icu4c/common/ustr_cnv.cpp
@@ -144,7 +144,7 @@ u_uastrncpy(UChar *ucs1,
&s2,
s2+u_astrnlen(s2, n),
NULL,
- TRUE,
+ true,
&err);
ucnv_reset(cnv); /* be good citizens */
u_releaseDefaultConverter(cnv);
@@ -216,7 +216,7 @@ u_austrncpy(char *s1,
&ucs2,
ucs2+u_ustrnlen(ucs2, n),
NULL,
- TRUE,
+ true,
&err);
ucnv_reset(cnv); /* be good citizens */
u_releaseDefaultConverter(cnv);
diff --git a/thirdparty/icu4c/common/ustr_titlecase_brkiter.cpp b/thirdparty/icu4c/common/ustr_titlecase_brkiter.cpp
index 3002d64e34..85dfa0decb 100644
--- a/thirdparty/icu4c/common/ustr_titlecase_brkiter.cpp
+++ b/thirdparty/icu4c/common/ustr_titlecase_brkiter.cpp
@@ -110,7 +110,7 @@ int32_t WholeStringBreakIterator::next() { return length; }
int32_t WholeStringBreakIterator::current() const { return 0; }
int32_t WholeStringBreakIterator::following(int32_t /*offset*/) { return length; }
int32_t WholeStringBreakIterator::preceding(int32_t /*offset*/) { return 0; }
-UBool WholeStringBreakIterator::isBoundary(int32_t /*offset*/) { return FALSE; }
+UBool WholeStringBreakIterator::isBoundary(int32_t /*offset*/) { return false; }
int32_t WholeStringBreakIterator::next(int32_t /*n*/) { return length; }
WholeStringBreakIterator *WholeStringBreakIterator::createBufferClone(
diff --git a/thirdparty/icu4c/common/ustrcase.cpp b/thirdparty/icu4c/common/ustrcase.cpp
index 43910ea520..8037c09b4f 100644
--- a/thirdparty/icu4c/common/ustrcase.cpp
+++ b/thirdparty/icu4c/common/ustrcase.cpp
@@ -107,7 +107,7 @@ appendResult(UChar *dest, int32_t destIndex, int32_t destCapacity,
/* append the result */
if(c>=0) {
/* code point */
- UBool isError=FALSE;
+ UBool isError=false;
U16_APPEND(dest, destIndex, destCapacity, c, isError);
if(isError) {
/* overflow, nothing written */
@@ -1087,12 +1087,12 @@ UBool isFollowedByCasedLetter(const UChar *s, int32_t i, int32_t length) {
if ((type & UCASE_IGNORABLE) != 0) {
// Case-ignorable, continue with the loop.
} else if (type != UCASE_NONE) {
- return TRUE; // Followed by cased letter.
+ return true; // Followed by cased letter.
} else {
- return FALSE; // Uncased and not case-ignorable.
+ return false; // Uncased and not case-ignorable.
}
}
- return FALSE; // Not followed by cased letter.
+ return false; // Not followed by cased letter.
}
/**
@@ -1155,7 +1155,7 @@ int32_t toUpper(uint32_t options,
nextState |= AFTER_VOWEL_WITH_ACCENT;
}
// Map according to Greek rules.
- UBool addTonos = FALSE;
+ UBool addTonos = false;
if (upper == 0x397 &&
(data & HAS_ACCENT) != 0 &&
numYpogegrammeni == 0 &&
@@ -1166,7 +1166,7 @@ int32_t toUpper(uint32_t options,
if (i == nextIndex) {
upper = 0x389; // Preserve the precomposed form.
} else {
- addTonos = TRUE;
+ addTonos = true;
}
} else if ((data & HAS_DIALYTIKA) != 0) {
// Preserve a vowel with dialytika in precomposed form if it exists.
@@ -1181,7 +1181,7 @@ int32_t toUpper(uint32_t options,
UBool change;
if (edits == nullptr && (options & U_OMIT_UNCHANGED_TEXT) == 0) {
- change = TRUE; // common, simple usage
+ change = true; // common, simple usage
} else {
// Find out first whether we are changing the text.
change = src[i] != upper || numYpogegrammeni > 0;
diff --git a/thirdparty/icu4c/common/ustring.cpp b/thirdparty/icu4c/common/ustring.cpp
index 8477256389..5804976ef9 100644
--- a/thirdparty/icu4c/common/ustring.cpp
+++ b/thirdparty/icu4c/common/ustring.cpp
@@ -43,13 +43,13 @@ static inline UBool
isMatchAtCPBoundary(const UChar *start, const UChar *match, const UChar *matchLimit, const UChar *limit) {
if(U16_IS_TRAIL(*match) && start!=match && U16_IS_LEAD(*(match-1))) {
/* the leading edge of the match is in the middle of a surrogate pair */
- return FALSE;
+ return false;
}
if(U16_IS_LEAD(*(matchLimit-1)) && matchLimit!=limit && U16_IS_TRAIL(*matchLimit)) {
/* the trailing edge of the match is in the middle of a surrogate pair */
- return FALSE;
+ return false;
}
- return TRUE;
+ return true;
}
U_CAPI UChar * U_EXPORT2
@@ -461,7 +461,7 @@ u_memrchr32(const UChar *s, UChar32 c, int32_t count) {
/*
* Match each code point in a string against each code point in the matchSet.
* Return the index of the first string code point that
- * is (polarity==TRUE) or is not (FALSE) contained in the matchSet.
+ * is (polarity==true) or is not (false) contained in the matchSet.
* Return -(string length)-1 if there is no such code point.
*/
static int32_t
@@ -540,7 +540,7 @@ endloop:
U_CAPI UChar * U_EXPORT2
u_strpbrk(const UChar *string, const UChar *matchSet)
{
- int32_t idx = _matchFromSet(string, matchSet, TRUE);
+ int32_t idx = _matchFromSet(string, matchSet, true);
if(idx >= 0) {
return (UChar *)string + idx;
} else {
@@ -552,7 +552,7 @@ u_strpbrk(const UChar *string, const UChar *matchSet)
U_CAPI int32_t U_EXPORT2
u_strcspn(const UChar *string, const UChar *matchSet)
{
- int32_t idx = _matchFromSet(string, matchSet, TRUE);
+ int32_t idx = _matchFromSet(string, matchSet, true);
if(idx >= 0) {
return idx;
} else {
@@ -564,7 +564,7 @@ u_strcspn(const UChar *string, const UChar *matchSet)
U_CAPI int32_t U_EXPORT2
u_strspn(const UChar *string, const UChar *matchSet)
{
- int32_t idx = _matchFromSet(string, matchSet, FALSE);
+ int32_t idx = _matchFromSet(string, matchSet, false);
if(idx >= 0) {
return idx;
} else {
@@ -929,13 +929,13 @@ u_strCompare(const UChar *s1, int32_t length1,
if(s1==NULL || length1<-1 || s2==NULL || length2<-1) {
return 0;
}
- return uprv_strCompare(s1, length1, s2, length2, FALSE, codePointOrder);
+ return uprv_strCompare(s1, length1, s2, length2, false, codePointOrder);
}
/* String compare in code point order - u_strcmp() compares in code unit order. */
U_CAPI int32_t U_EXPORT2
u_strcmpCodePointOrder(const UChar *s1, const UChar *s2) {
- return uprv_strCompare(s1, -1, s2, -1, FALSE, TRUE);
+ return uprv_strCompare(s1, -1, s2, -1, false, true);
}
U_CAPI int32_t U_EXPORT2
@@ -960,7 +960,7 @@ u_strncmp(const UChar *s1,
U_CAPI int32_t U_EXPORT2
u_strncmpCodePointOrder(const UChar *s1, const UChar *s2, int32_t n) {
- return uprv_strCompare(s1, n, s2, n, TRUE, TRUE);
+ return uprv_strCompare(s1, n, s2, n, true, true);
}
U_CAPI UChar* U_EXPORT2
@@ -1049,10 +1049,10 @@ U_CAPI UBool U_EXPORT2
u_strHasMoreChar32Than(const UChar *s, int32_t length, int32_t number) {
if(number<0) {
- return TRUE;
+ return true;
}
if(s==NULL || length<-1) {
- return FALSE;
+ return false;
}
if(length==-1) {
@@ -1062,10 +1062,10 @@ u_strHasMoreChar32Than(const UChar *s, int32_t length, int32_t number) {
/* count code points until they exceed */
for(;;) {
if((c=*s++)==0) {
- return FALSE;
+ return false;
}
if(number==0) {
- return TRUE;
+ return true;
}
if(U16_IS_LEAD(c) && U16_IS_TRAIL(*s)) {
++s;
@@ -1079,13 +1079,13 @@ u_strHasMoreChar32Than(const UChar *s, int32_t length, int32_t number) {
/* s contains at least (length+1)/2 code points: <=2 UChars per cp */
if(((length+1)/2)>number) {
- return TRUE;
+ return true;
}
/* check if s does not even contain enough UChars */
maxSupplementary=length-number;
if(maxSupplementary<=0) {
- return FALSE;
+ return false;
}
/* there are maxSupplementary=length-number more UChars than asked-for code points */
@@ -1096,16 +1096,16 @@ u_strHasMoreChar32Than(const UChar *s, int32_t length, int32_t number) {
limit=s+length;
for(;;) {
if(s==limit) {
- return FALSE;
+ return false;
}
if(number==0) {
- return TRUE;
+ return true;
}
if(U16_IS_LEAD(*s++) && s!=limit && U16_IS_TRAIL(*s)) {
++s;
if(--maxSupplementary<=0) {
/* too many pairs - too few code points */
- return FALSE;
+ return false;
}
}
--number;
@@ -1162,7 +1162,7 @@ u_memcmp(const UChar *buf1, const UChar *buf2, int32_t count) {
U_CAPI int32_t U_EXPORT2
u_memcmpCodePointOrder(const UChar *s1, const UChar *s2, int32_t count) {
- return uprv_strCompare(s1, count, s2, count, FALSE, TRUE);
+ return uprv_strCompare(s1, count, s2, count, false, true);
}
/* u_unescape & support fns ------------------------------------------------- */
@@ -1223,7 +1223,7 @@ u_unescapeAt(UNESCAPE_CHAR_AT charAt,
int8_t maxDig = 0;
int8_t bitsPerDigit = 4;
int32_t dig;
- UBool braces = FALSE;
+ UBool braces = false;
/* Check that offset is in range */
if (*offset < 0 || *offset >= length) {
@@ -1245,7 +1245,7 @@ u_unescapeAt(UNESCAPE_CHAR_AT charAt,
minDig = 1;
if (*offset < length && charAt(*offset, context) == u'{') {
++(*offset);
- braces = TRUE;
+ braces = true;
maxDig = 8;
} else {
maxDig = 2;
diff --git a/thirdparty/icu4c/common/ustrtrns.cpp b/thirdparty/icu4c/common/ustrtrns.cpp
index 5dc032c02f..dcb9dc5878 100644
--- a/thirdparty/icu4c/common/ustrtrns.cpp
+++ b/thirdparty/icu4c/common/ustrtrns.cpp
@@ -119,7 +119,7 @@ u_strFromUTF32WithSub(UChar *dest,
} else {
++numSubstitutions;
}
- } while(TRUE);
+ } while(true);
}
reqLength += (int32_t)(pDest - dest);
diff --git a/thirdparty/icu4c/common/utext.cpp b/thirdparty/icu4c/common/utext.cpp
index ec79700ca8..548e6a60f3 100644
--- a/thirdparty/icu4c/common/utext.cpp
+++ b/thirdparty/icu4c/common/utext.cpp
@@ -49,14 +49,14 @@ utext_moveIndex32(UText *ut, int32_t delta) {
UChar32 c;
if (delta > 0) {
do {
- if(ut->chunkOffset>=ut->chunkLength && !utext_access(ut, ut->chunkNativeLimit, TRUE)) {
- return FALSE;
+ if(ut->chunkOffset>=ut->chunkLength && !utext_access(ut, ut->chunkNativeLimit, true)) {
+ return false;
}
c = ut->chunkContents[ut->chunkOffset];
if (U16_IS_SURROGATE(c)) {
c = utext_next32(ut);
if (c == U_SENTINEL) {
- return FALSE;
+ return false;
}
} else {
ut->chunkOffset++;
@@ -65,14 +65,14 @@ utext_moveIndex32(UText *ut, int32_t delta) {
} else if (delta<0) {
do {
- if(ut->chunkOffset<=0 && !utext_access(ut, ut->chunkNativeStart, FALSE)) {
- return FALSE;
+ if(ut->chunkOffset<=0 && !utext_access(ut, ut->chunkNativeStart, false)) {
+ return false;
}
c = ut->chunkContents[ut->chunkOffset-1];
if (U16_IS_SURROGATE(c)) {
c = utext_previous32(ut);
if (c == U_SENTINEL) {
- return FALSE;
+ return false;
}
} else {
ut->chunkOffset--;
@@ -80,7 +80,7 @@ utext_moveIndex32(UText *ut, int32_t delta) {
} while(++delta<0);
}
- return TRUE;
+ return true;
}
@@ -114,7 +114,7 @@ utext_setNativeIndex(UText *ut, int64_t index) {
// Access the new position. Assume a forward iteration from here,
// which will also be optimimum for a single random access.
// Reverse iterations may suffer slightly.
- ut->pFuncs->access(ut, index, TRUE);
+ ut->pFuncs->access(ut, index, true);
} else if((int32_t)(index - ut->chunkNativeStart) <= ut->nativeIndexingLimit) {
// utf-16 indexing.
ut->chunkOffset=(int32_t)(index-ut->chunkNativeStart);
@@ -127,7 +127,7 @@ utext_setNativeIndex(UText *ut, int64_t index) {
UChar c= ut->chunkContents[ut->chunkOffset];
if (U16_IS_TRAIL(c)) {
if (ut->chunkOffset==0) {
- ut->pFuncs->access(ut, ut->chunkNativeStart, FALSE);
+ ut->pFuncs->access(ut, ut->chunkNativeStart, false);
}
if (ut->chunkOffset>0) {
UChar lead = ut->chunkContents[ut->chunkOffset-1];
@@ -152,7 +152,7 @@ utext_getPreviousNativeIndex(UText *ut) {
int64_t result;
if (i >= 0) {
UChar c = ut->chunkContents[i];
- if (U16_IS_TRAIL(c) == FALSE) {
+ if (U16_IS_TRAIL(c) == false) {
if (i <= ut->nativeIndexingLimit) {
result = ut->chunkNativeStart + i;
} else {
@@ -189,14 +189,14 @@ utext_current32(UText *ut) {
UChar32 c;
if (ut->chunkOffset==ut->chunkLength) {
// Current position is just off the end of the chunk.
- if (ut->pFuncs->access(ut, ut->chunkNativeLimit, TRUE) == FALSE) {
+ if (ut->pFuncs->access(ut, ut->chunkNativeLimit, true) == false) {
// Off the end of the text.
return U_SENTINEL;
}
}
c = ut->chunkContents[ut->chunkOffset];
- if (U16_IS_LEAD(c) == FALSE) {
+ if (U16_IS_LEAD(c) == false) {
// Normal, non-supplementary case.
return c;
}
@@ -219,11 +219,11 @@ utext_current32(UText *ut) {
// the original position before the unpaired lead still needs to be restored.
int64_t nativePosition = ut->chunkNativeLimit;
int32_t originalOffset = ut->chunkOffset;
- if (ut->pFuncs->access(ut, nativePosition, TRUE)) {
+ if (ut->pFuncs->access(ut, nativePosition, true)) {
trail = ut->chunkContents[ut->chunkOffset];
}
- UBool r = ut->pFuncs->access(ut, nativePosition, FALSE); // reverse iteration flag loads preceding chunk
- U_ASSERT(r==TRUE);
+ UBool r = ut->pFuncs->access(ut, nativePosition, false); // reverse iteration flag loads preceding chunk
+ U_ASSERT(r==true);
ut->chunkOffset = originalOffset;
if(!r) {
return U_SENTINEL;
@@ -246,7 +246,7 @@ utext_char32At(UText *ut, int64_t nativeIndex) {
if (nativeIndex>=ut->chunkNativeStart && nativeIndex < ut->chunkNativeStart + ut->nativeIndexingLimit) {
ut->chunkOffset = (int32_t)(nativeIndex - ut->chunkNativeStart);
c = ut->chunkContents[ut->chunkOffset];
- if (U16_IS_SURROGATE(c) == FALSE) {
+ if (U16_IS_SURROGATE(c) == false) {
return c;
}
}
@@ -270,13 +270,13 @@ utext_next32(UText *ut) {
UChar32 c;
if (ut->chunkOffset >= ut->chunkLength) {
- if (ut->pFuncs->access(ut, ut->chunkNativeLimit, TRUE) == FALSE) {
+ if (ut->pFuncs->access(ut, ut->chunkNativeLimit, true) == false) {
return U_SENTINEL;
}
}
c = ut->chunkContents[ut->chunkOffset++];
- if (U16_IS_LEAD(c) == FALSE) {
+ if (U16_IS_LEAD(c) == false) {
// Normal case, not supplementary.
// (A trail surrogate seen here is just returned as is, as a surrogate value.
// It cannot be part of a pair.)
@@ -284,14 +284,14 @@ utext_next32(UText *ut) {
}
if (ut->chunkOffset >= ut->chunkLength) {
- if (ut->pFuncs->access(ut, ut->chunkNativeLimit, TRUE) == FALSE) {
+ if (ut->pFuncs->access(ut, ut->chunkNativeLimit, true) == false) {
// c is an unpaired lead surrogate at the end of the text.
// return it as it is.
return c;
}
}
UChar32 trail = ut->chunkContents[ut->chunkOffset];
- if (U16_IS_TRAIL(trail) == FALSE) {
+ if (U16_IS_TRAIL(trail) == false) {
// c was an unpaired lead surrogate, not at the end of the text.
// return it as it is (unpaired). Iteration position is on the
// following character, possibly in the next chunk, where the
@@ -310,13 +310,13 @@ utext_previous32(UText *ut) {
UChar32 c;
if (ut->chunkOffset <= 0) {
- if (ut->pFuncs->access(ut, ut->chunkNativeStart, FALSE) == FALSE) {
+ if (ut->pFuncs->access(ut, ut->chunkNativeStart, false) == false) {
return U_SENTINEL;
}
}
ut->chunkOffset--;
c = ut->chunkContents[ut->chunkOffset];
- if (U16_IS_TRAIL(c) == FALSE) {
+ if (U16_IS_TRAIL(c) == false) {
// Normal case, not supplementary.
// (A lead surrogate seen here is just returned as is, as a surrogate value.
// It cannot be part of a pair.)
@@ -324,7 +324,7 @@ utext_previous32(UText *ut) {
}
if (ut->chunkOffset <= 0) {
- if (ut->pFuncs->access(ut, ut->chunkNativeStart, FALSE) == FALSE) {
+ if (ut->pFuncs->access(ut, ut->chunkNativeStart, false) == false) {
// c is an unpaired trail surrogate at the start of the text.
// return it as it is.
return c;
@@ -332,7 +332,7 @@ utext_previous32(UText *ut) {
}
UChar32 lead = ut->chunkContents[ut->chunkOffset-1];
- if (U16_IS_LEAD(lead) == FALSE) {
+ if (U16_IS_LEAD(lead) == false) {
// c was an unpaired trail surrogate, not at the end of the text.
// return it as it is (unpaired). Iteration position is at c
return c;
@@ -351,7 +351,7 @@ utext_next32From(UText *ut, int64_t index) {
if(index<ut->chunkNativeStart || index>=ut->chunkNativeLimit) {
// Desired position is outside of the current chunk.
- if(!ut->pFuncs->access(ut, index, TRUE)) {
+ if(!ut->pFuncs->access(ut, index, true)) {
// no chunk available here
return U_SENTINEL;
}
@@ -391,7 +391,7 @@ utext_previous32From(UText *ut, int64_t index) {
//
if(index<=ut->chunkNativeStart || index>ut->chunkNativeLimit) {
// Requested native index is outside of the current chunk.
- if(!ut->pFuncs->access(ut, index, FALSE)) {
+ if(!ut->pFuncs->access(ut, index, false)) {
// no chunk available here
return U_SENTINEL;
}
@@ -400,7 +400,7 @@ utext_previous32From(UText *ut, int64_t index) {
ut->chunkOffset = (int32_t)(index - ut->chunkNativeStart);
} else {
ut->chunkOffset=ut->pFuncs->mapNativeIndexToUTF16(ut, index);
- if (ut->chunkOffset==0 && !ut->pFuncs->access(ut, index, FALSE)) {
+ if (ut->chunkOffset==0 && !ut->pFuncs->access(ut, index, false)) {
// no chunk available here
return U_SENTINEL;
}
@@ -438,24 +438,24 @@ utext_equals(const UText *a, const UText *b) {
a->magic != UTEXT_MAGIC ||
b->magic != UTEXT_MAGIC) {
// Null or invalid arguments don't compare equal to anything.
- return FALSE;
+ return false;
}
if (a->pFuncs != b->pFuncs) {
// Different types of text providers.
- return FALSE;
+ return false;
}
if (a->context != b->context) {
// Different sources (different strings)
- return FALSE;
+ return false;
}
if (utext_getNativeIndex(a) != utext_getNativeIndex(b)) {
// Different current position in the string.
- return FALSE;
+ return false;
}
- return TRUE;
+ return true;
}
U_CAPI UBool U_EXPORT2
@@ -987,7 +987,7 @@ utf8TextAccess(UText *ut, int64_t index, UBool forward) {
// Don't swap buffers, but do set the
// current buffer position.
ut->chunkOffset = ut->chunkLength;
- return FALSE;
+ return false;
} else {
// End of current buffer.
// check whether other buffer already has what we need.
@@ -1016,7 +1016,7 @@ utf8TextAccess(UText *ut, int64_t index, UBool forward) {
// Current buffer extends up to the end of the string.
// Leave it as the current buffer.
ut->chunkOffset = ut->chunkLength;
- return FALSE;
+ return false;
}
if (ix == u8b->bufNativeLimit) {
// Alternate buffer extends to the end of string.
@@ -1038,7 +1038,7 @@ utf8TextAccess(UText *ut, int64_t index, UBool forward) {
mapIndex = ix - u8b->toUCharsMapStart;
U_ASSERT(mapIndex < (int32_t)sizeof(UTF8Buf::mapToUChars));
ut->chunkOffset = u8b->mapToUChars[mapIndex] - u8b->bufStartIdx;
- return TRUE;
+ return true;
}
}
@@ -1055,7 +1055,7 @@ utf8TextAccess(UText *ut, int64_t index, UBool forward) {
// Don't swap buffers, but do set the
// current buffer position.
ut->chunkOffset = 0;
- return FALSE;
+ return false;
} else {
// Start of current buffer.
// check whether other buffer already has what we need.
@@ -1108,9 +1108,9 @@ utf8TextAccess(UText *ut, int64_t index, UBool forward) {
// one of the trailing bytes. Because there is no preceding ,
// character, this access fails. We can't pick up on the
// situation sooner because the requested index is not zero.
- return FALSE;
+ return false;
} else {
- return TRUE;
+ return true;
}
@@ -1139,7 +1139,7 @@ swapBuffers:
U_ASSERT(mapIndex<(int32_t)sizeof(u8b->mapToUChars));
ut->chunkOffset = u8b->mapToUChars[mapIndex] - u8b->bufStartIdx;
- return TRUE;
+ return true;
}
@@ -1170,7 +1170,7 @@ swapBuffers:
ut->chunkOffset = 0;
U_ASSERT(ix == u8b->bufNativeStart);
}
- return FALSE;
+ return false;
makeStubBuffer:
// The user has done a seek/access past the start or end
@@ -1203,10 +1203,10 @@ fillForward:
ut->p = u8b_swap;
int32_t strLen = ut->b;
- UBool nulTerminated = FALSE;
+ UBool nulTerminated = false;
if (strLen < 0) {
strLen = 0x7fffffff;
- nulTerminated = TRUE;
+ nulTerminated = true;
}
UChar *buf = u8b_swap->buf;
@@ -1214,7 +1214,7 @@ fillForward:
uint8_t *mapToUChars = u8b_swap->mapToUChars;
int32_t destIx = 0;
int32_t srcIx = ix;
- UBool seenNonAscii = FALSE;
+ UBool seenNonAscii = false;
UChar32 c = 0;
// Fill the chunk buffer and mapping arrays.
@@ -1230,8 +1230,8 @@ fillForward:
destIx++;
} else {
// General case, handle everything.
- if (seenNonAscii == FALSE) {
- seenNonAscii = TRUE;
+ if (seenNonAscii == false) {
+ seenNonAscii = true;
u8b_swap->bufNILimit = destIx;
}
@@ -1269,7 +1269,7 @@ fillForward:
u8b_swap->bufNativeLimit = srcIx;
u8b_swap->bufStartIdx = 0;
u8b_swap->bufLimitIdx = destIx;
- if (seenNonAscii == FALSE) {
+ if (seenNonAscii == false) {
u8b_swap->bufNILimit = destIx;
}
u8b_swap->toUCharsMapStart = u8b_swap->bufNativeStart;
@@ -1293,7 +1293,7 @@ fillForward:
ut->providerProperties &= ~I32_FLAG(UTEXT_PROVIDER_LENGTH_IS_EXPENSIVE);
}
}
- return TRUE;
+ return true;
}
@@ -1402,7 +1402,7 @@ fillReverse:
ut->chunkNativeStart = u8b_swap->bufNativeStart;
ut->chunkNativeLimit = u8b_swap->bufNativeLimit;
ut->nativeIndexingLimit = u8b_swap->bufNILimit;
- return TRUE;
+ return true;
}
}
@@ -1526,7 +1526,7 @@ utf8TextExtract(UText *ut,
utext_strFromUTF8(dest, destCapacity, &destLength,
(const char *)ut->context+start32, limit32-start32,
pErrorCode);
- utf8TextAccess(ut, limit32, TRUE);
+ utf8TextAccess(ut, limit32, true);
return destLength;
}
@@ -1760,13 +1760,13 @@ repTextAccess(UText *ut, int64_t index, UBool forward) {
if (index32>=ut->chunkNativeStart && index32<ut->chunkNativeLimit) {
// Buffer already contains the requested position.
ut->chunkOffset = (int32_t)(index - ut->chunkNativeStart);
- return TRUE;
+ return true;
}
if (index32>=length && ut->chunkNativeLimit==length) {
// Request for end of string, and buffer already extends up to it.
// Can't get the data, but don't change the buffer.
ut->chunkOffset = length - (int32_t)ut->chunkNativeStart;
- return FALSE;
+ return false;
}
ut->chunkNativeLimit = index + REP_TEXT_CHUNK_SIZE - 1;
@@ -1787,13 +1787,13 @@ repTextAccess(UText *ut, int64_t index, UBool forward) {
if (index32>ut->chunkNativeStart && index32<=ut->chunkNativeLimit) {
// Requested position already in buffer.
ut->chunkOffset = index32 - (int32_t)ut->chunkNativeStart;
- return TRUE;
+ return true;
}
if (index32==0 && ut->chunkNativeStart==0) {
// Request for start, buffer already begins at start.
// No data, but keep the buffer as is.
ut->chunkOffset = 0;
- return FALSE;
+ return false;
}
// Figure out the bounds of the chunk to extract for reverse iteration.
@@ -1849,7 +1849,7 @@ repTextAccess(UText *ut, int64_t index, UBool forward) {
// Use fast indexing for get/setNativeIndex()
ut->nativeIndexingLimit = ut->chunkLength;
- return TRUE;
+ return true;
}
@@ -1892,7 +1892,7 @@ repTextExtract(UText *ut,
}
UnicodeString buffer(dest, 0, destCapacity); // writable alias
rep->extractBetween(start32, limit32, buffer);
- repTextAccess(ut, limit32, TRUE);
+ repTextAccess(ut, limit32, true);
return u_terminateUChars(dest, destCapacity, length, status);
}
@@ -1948,7 +1948,7 @@ repTextReplace(UText *ut,
// set the iteration position to the end of the newly inserted replacement text.
int32_t newIndexPos = limit32 + lengthDelta;
- repTextAccess(ut, newIndexPos, TRUE);
+ repTextAccess(ut, newIndexPos, true);
return lengthDelta;
}
@@ -2012,7 +2012,7 @@ repTextCopy(UText *ut,
}
// Set position, reload chunk if needed.
- repTextAccess(ut, nativeIterIndex, TRUE);
+ repTextAccess(ut, nativeIterIndex, true);
}
static const struct UTextFuncs repFuncs =
@@ -2254,7 +2254,7 @@ unistrTextCopy(UText *ut,
// update chunk description, set iteration position.
ut->chunkContents = us->getBuffer();
- if (move==FALSE) {
+ if (move==false) {
// copy operation, string length grows
ut->chunkLength += limit32-start32;
ut->chunkNativeLimit = ut->chunkLength;
@@ -2525,7 +2525,7 @@ ucstrTextExtract(UText *ut,
// Access the start. Does two things we need:
// Pins 'start' to the length of the string, if it came in out-of-bounds.
// Snaps 'start' to the beginning of a code point.
- ucstrTextAccess(ut, start, TRUE);
+ ucstrTextAccess(ut, start, true);
const UChar *s=ut->chunkContents;
start32 = ut->chunkOffset;
@@ -2579,7 +2579,7 @@ ucstrTextExtract(UText *ut,
if (si <= ut->chunkNativeLimit) {
ut->chunkOffset = si;
} else {
- ucstrTextAccess(ut, si, TRUE);
+ ucstrTextAccess(ut, si, true);
}
// Add a terminating NUL if space in the buffer permits,
@@ -2698,11 +2698,11 @@ charIterTextAccess(UText *ut, int64_t index, UBool forward) {
neededIndex -= neededIndex % CIBufSize;
UChar *buf = NULL;
- UBool needChunkSetup = TRUE;
+ UBool needChunkSetup = true;
int i;
if (ut->chunkNativeStart == neededIndex) {
// The buffer we want is already the current chunk.
- needChunkSetup = FALSE;
+ needChunkSetup = false;
} else if (ut->b == neededIndex) {
// The first buffer (buffer p) has what we need.
buf = (UChar *)ut->p;
@@ -2809,7 +2809,7 @@ charIterTextExtract(UText *ut,
srci += len;
}
- charIterTextAccess(ut, copyLimit, TRUE);
+ charIterTextAccess(ut, copyLimit, true);
u_terminateUChars(dest, destCapacity, desti, status);
return desti;
diff --git a/thirdparty/icu4c/common/utf_impl.cpp b/thirdparty/icu4c/common/utf_impl.cpp
index 9dd241a12b..a1f9c6529a 100644
--- a/thirdparty/icu4c/common/utf_impl.cpp
+++ b/thirdparty/icu4c/common/utf_impl.cpp
@@ -55,7 +55,7 @@
* -finish:
* (BSR: Bit Scan Reverse, scans for a 1-bit, starting from the MSB)
*/
-extern "C" U_EXPORT const uint8_t
+U_CAPI const uint8_t
utf8_countTrailBytes[256]={
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
@@ -117,11 +117,11 @@ errorValue(int32_t count, int8_t strict) {
* Unicode 16-bit strings that are not well-formed UTF-16, that is, they
* contain unpaired surrogates.
* -3: All illegal byte sequences yield U+FFFD.
- * 0 Obsolete "safe" behavior of UTF8_NEXT_CHAR_SAFE(..., FALSE):
+ * 0 Obsolete "safe" behavior of UTF8_NEXT_CHAR_SAFE(..., false):
* All illegal byte sequences yield a positive code point such that this
* result code point would be encoded with the same number of bytes as
* the illegal sequence.
- * >0 Obsolete "strict" behavior of UTF8_NEXT_CHAR_SAFE(..., TRUE):
+ * >0 Obsolete "strict" behavior of UTF8_NEXT_CHAR_SAFE(..., true):
* Same as the obsolete "safe" behavior, but non-characters are also treated
* like illegal sequences.
*
@@ -214,7 +214,7 @@ utf8_appendCharSafeBody(uint8_t *s, int32_t i, int32_t length, UChar32 c, UBool
}
/* c>0x10ffff or not enough space, write an error value */
if(pIsError!=NULL) {
- *pIsError=TRUE;
+ *pIsError=true;
} else {
length-=i;
if(length>0) {
diff --git a/thirdparty/icu4c/common/util.cpp b/thirdparty/icu4c/common/util.cpp
index f342172259..3dcc05578b 100644
--- a/thirdparty/icu4c/common/util.cpp
+++ b/thirdparty/icu4c/common/util.cpp
@@ -214,14 +214,14 @@ int32_t ICU_Utility::skipWhitespace(const UnicodeString& str, int32_t& pos,
*/
UBool ICU_Utility::parseChar(const UnicodeString& id, int32_t& pos, UChar ch) {
int32_t start = pos;
- skipWhitespace(id, pos, TRUE);
+ skipWhitespace(id, pos, true);
if (pos == id.length() ||
id.charAt(pos) != ch) {
pos = start;
- return FALSE;
+ return false;
}
++pos;
- return TRUE;
+ return true;
}
/**
@@ -302,7 +302,7 @@ int32_t ICU_Utility::parseAsciiInteger(const UnicodeString& str, int32_t& pos) {
/**
* Append a character to a rule that is being built up. To flush
- * the quoteBuf to rule, make one final call with isLiteral == TRUE.
+ * the quoteBuf to rule, make one final call with isLiteral == true.
* If there is no final character, pass in (UChar32)-1 as c.
* @param rule the string to append the character to
* @param c the character to append, or (UChar32)-1 if none.
@@ -428,7 +428,7 @@ void ICU_Utility::appendToRule(UnicodeString& rule,
if (matcher != NULL) {
UnicodeString pat;
appendToRule(rule, matcher->toPattern(pat, escapeUnprintable),
- TRUE, escapeUnprintable, quoteBuf);
+ true, escapeUnprintable, quoteBuf);
}
}
diff --git a/thirdparty/icu4c/common/util.h b/thirdparty/icu4c/common/util.h
index 7e6d356899..4a9ae831de 100644
--- a/thirdparty/icu4c/common/util.h
+++ b/thirdparty/icu4c/common/util.h
@@ -258,7 +258,7 @@ class U_COMMON_API ICU_Utility /* not : public UObject because all methods are s
private:
// do not instantiate
- ICU_Utility();
+ ICU_Utility() = delete;
};
U_NAMESPACE_END
diff --git a/thirdparty/icu4c/common/utrace.cpp b/thirdparty/icu4c/common/utrace.cpp
index dfd2062f31..f7b8ade674 100644
--- a/thirdparty/icu4c/common/utrace.cpp
+++ b/thirdparty/icu4c/common/utrace.cpp
@@ -436,7 +436,7 @@ utrace_cleanup() {
pTraceDataFunc = NULL;
utrace_level = UTRACE_OFF;
gTraceContext = NULL;
- return TRUE;
+ return true;
}
diff --git a/thirdparty/icu4c/common/utrie.cpp b/thirdparty/icu4c/common/utrie.cpp
index ecf9b1cba7..96f2397ca1 100644
--- a/thirdparty/icu4c/common/utrie.cpp
+++ b/thirdparty/icu4c/common/utrie.cpp
@@ -72,14 +72,14 @@ utrie_open(UNewTrie *fillIn,
if(aliasData!=NULL) {
trie->data=aliasData;
- trie->isDataAllocated=FALSE;
+ trie->isDataAllocated=false;
} else {
trie->data=(uint32_t *)uprv_malloc(maxDataLength*4);
if(trie->data==NULL) {
uprv_free(trie);
return NULL;
}
- trie->isDataAllocated=TRUE;
+ trie->isDataAllocated=true;
}
/* preallocate and reset the first data block (block index 0) */
@@ -108,7 +108,7 @@ utrie_open(UNewTrie *fillIn,
trie->indexLength=UTRIE_MAX_INDEX_LENGTH;
trie->dataCapacity=maxDataLength;
trie->isLatin1Linear=latin1Linear;
- trie->isCompacted=FALSE;
+ trie->isCompacted=false;
return trie;
}
@@ -124,14 +124,14 @@ utrie_clone(UNewTrie *fillIn, const UNewTrie *other, uint32_t *aliasData, int32_
/* clone data */
if(aliasData!=NULL && aliasDataCapacity>=other->dataCapacity) {
- isDataAllocated=FALSE;
+ isDataAllocated=false;
} else {
aliasDataCapacity=other->dataCapacity;
aliasData=(uint32_t *)uprv_malloc(other->dataCapacity*4);
if(aliasData==NULL) {
return NULL;
}
- isDataAllocated=TRUE;
+ isDataAllocated=true;
}
trie=utrie_open(fillIn, aliasData, aliasDataCapacity,
@@ -216,7 +216,7 @@ utrie_getDataBlock(UNewTrie *trie, UChar32 c) {
}
/**
- * @return TRUE if the value was successfully set
+ * @return true if the value was successfully set
*/
U_CAPI UBool U_EXPORT2
utrie_set32(UNewTrie *trie, UChar32 c, uint32_t value) {
@@ -224,16 +224,16 @@ utrie_set32(UNewTrie *trie, UChar32 c, uint32_t value) {
/* valid, uncompacted trie and valid c? */
if(trie==NULL || trie->isCompacted || (uint32_t)c>0x10ffff) {
- return FALSE;
+ return false;
}
block=utrie_getDataBlock(trie, c);
if(block<0) {
- return FALSE;
+ return false;
}
trie->data[block+(c&UTRIE_MASK)]=value;
- return TRUE;
+ return true;
}
U_CAPI uint32_t U_EXPORT2
@@ -243,7 +243,7 @@ utrie_get32(UNewTrie *trie, UChar32 c, UBool *pInBlockZero) {
/* valid, uncompacted trie and valid c? */
if(trie==NULL || trie->isCompacted || (uint32_t)c>0x10ffff) {
if(pInBlockZero!=NULL) {
- *pInBlockZero=TRUE;
+ *pInBlockZero=true;
}
return 0;
}
@@ -294,10 +294,10 @@ utrie_setRange32(UNewTrie *trie, UChar32 start, UChar32 limit, uint32_t value, U
if( trie==NULL || trie->isCompacted ||
(uint32_t)start>0x10ffff || (uint32_t)limit>0x110000 || start>limit
) {
- return FALSE;
+ return false;
}
if(start==limit) {
- return TRUE; /* nothing to do */
+ return true; /* nothing to do */
}
initialValue=trie->data[0];
@@ -307,7 +307,7 @@ utrie_setRange32(UNewTrie *trie, UChar32 start, UChar32 limit, uint32_t value, U
/* set partial block at [start..following block boundary[ */
block=utrie_getDataBlock(trie, start);
if(block<0) {
- return FALSE;
+ return false;
}
nextStart=(start+UTRIE_DATA_BLOCK_LENGTH)&~UTRIE_MASK;
@@ -318,7 +318,7 @@ utrie_setRange32(UNewTrie *trie, UChar32 start, UChar32 limit, uint32_t value, U
} else {
utrie_fillBlock(trie->data+block, start&UTRIE_MASK, limit&UTRIE_MASK,
value, initialValue, overwrite);
- return TRUE;
+ return true;
}
}
@@ -348,12 +348,12 @@ utrie_setRange32(UNewTrie *trie, UChar32 start, UChar32 limit, uint32_t value, U
/* create and set and fill the repeatBlock */
repeatBlock=utrie_getDataBlock(trie, start);
if(repeatBlock<0) {
- return FALSE;
+ return false;
}
/* set the negative block number to indicate that it is a repeat block */
trie->index[start>>UTRIE_SHIFT]=-repeatBlock;
- utrie_fillBlock(trie->data+repeatBlock, 0, UTRIE_DATA_BLOCK_LENGTH, value, initialValue, TRUE);
+ utrie_fillBlock(trie->data+repeatBlock, 0, UTRIE_DATA_BLOCK_LENGTH, value, initialValue, true);
}
}
@@ -364,13 +364,13 @@ utrie_setRange32(UNewTrie *trie, UChar32 start, UChar32 limit, uint32_t value, U
/* set partial block at [last block boundary..limit[ */
block=utrie_getDataBlock(trie, start);
if(block<0) {
- return FALSE;
+ return false;
}
utrie_fillBlock(trie->data+block, 0, rest, value, initialValue, overwrite);
}
- return TRUE;
+ return true;
}
static int32_t
@@ -437,7 +437,7 @@ utrie_fold(UNewTrie *trie, UNewTrieGetFoldedValue *getFoldedValue, UErrorCode *p
*pErrorCode=U_MEMORY_ALLOCATION_ERROR;
return;
}
- utrie_fillBlock(trie->data+block, 0, UTRIE_DATA_BLOCK_LENGTH, trie->leadUnitValue, trie->data[0], TRUE);
+ utrie_fillBlock(trie->data+block, 0, UTRIE_DATA_BLOCK_LENGTH, trie->leadUnitValue, trie->data[0], true);
block=-block; /* negative block number to indicate that it is a repeat block */
}
for(c=(0xd800>>UTRIE_SHIFT); c<(0xdc00>>UTRIE_SHIFT); ++c) {
@@ -579,7 +579,7 @@ _findSameDataBlock(const uint32_t *data, int32_t dataLength,
*
* The compaction
* - removes blocks that are identical with earlier ones
- * - overlaps adjacent blocks as much as possible (if overlap==TRUE)
+ * - overlaps adjacent blocks as much as possible (if overlap==true)
* - moves blocks in steps of the data granularity
* - moves and overlaps blocks that overlap with multiple values in the overlap region
*
@@ -766,15 +766,15 @@ utrie_serialize(UNewTrie *trie, void *dt, int32_t capacity,
/* fold and compact if necessary, also checks that indexLength is within limits */
if(!trie->isCompacted) {
/* compact once without overlap to improve folding */
- utrie_compact(trie, FALSE, pErrorCode);
+ utrie_compact(trie, false, pErrorCode);
/* fold the supplementary part of the index array */
utrie_fold(trie, getFoldedValue, pErrorCode);
/* compact again with overlap for minimum data array length */
- utrie_compact(trie, TRUE, pErrorCode);
+ utrie_compact(trie, true, pErrorCode);
- trie->isCompacted=TRUE;
+ trie->isCompacted=true;
if(U_FAILURE(*pErrorCode)) {
return 0;
}
@@ -966,7 +966,7 @@ utrie_unserializeDummy(UTrie *trie,
return actualLength;
}
- trie->isLatin1Linear=TRUE;
+ trie->isLatin1Linear=true;
trie->initialValue=initialValue;
/* fill the index and data arrays */
diff --git a/thirdparty/icu4c/common/utrie2.cpp b/thirdparty/icu4c/common/utrie2.cpp
index 24ef5782c9..0fb74ba1c3 100644
--- a/thirdparty/icu4c/common/utrie2.cpp
+++ b/thirdparty/icu4c/common/utrie2.cpp
@@ -66,7 +66,7 @@ utrie2_get32(const UTrie2 *trie, UChar32 c) {
} else if((uint32_t)c>0x10ffff) {
return trie->errorValue;
} else {
- return get32(trie->newTrie, c, TRUE);
+ return get32(trie->newTrie, c, true);
}
}
@@ -80,7 +80,7 @@ utrie2_get32FromLeadSurrogateCodeUnit(const UTrie2 *trie, UChar32 c) {
} else if(trie->data32!=NULL) {
return UTRIE2_GET32_FROM_U16_SINGLE_LEAD(trie, c);
} else {
- return get32(trie->newTrie, c, FALSE);
+ return get32(trie->newTrie, c, false);
}
}
@@ -200,7 +200,7 @@ utrie2_openFromSerialized(UTrie2ValueBits valueBits,
uprv_memcpy(trie, &tempTrie, sizeof(tempTrie));
trie->memory=(uint32_t *)data;
trie->length=actualLength;
- trie->isMemoryOwned=FALSE;
+ trie->isMemoryOwned=false;
#ifdef UTRIE2_DEBUG
trie->name="fromSerialized";
#endif
@@ -279,7 +279,7 @@ utrie2_openDummy(UTrie2ValueBits valueBits,
return 0;
}
trie->length=length;
- trie->isMemoryOwned=TRUE;
+ trie->isMemoryOwned=true;
/* set the UTrie2 fields */
if(valueBits==UTRIE2_16_VALUE_BITS) {
diff --git a/thirdparty/icu4c/common/utrie2.h b/thirdparty/icu4c/common/utrie2.h
index d1e1e15a6e..ace52cce37 100644
--- a/thirdparty/icu4c/common/utrie2.h
+++ b/thirdparty/icu4c/common/utrie2.h
@@ -591,8 +591,8 @@ U_CDECL_END
#ifdef __cplusplus
+#include "unicode/uobject.h"
#include "unicode/utf.h"
-#include "mutex.h"
U_NAMESPACE_BEGIN
diff --git a/thirdparty/icu4c/common/utrie2_builder.cpp b/thirdparty/icu4c/common/utrie2_builder.cpp
index 8de824cc3d..2513332b80 100644
--- a/thirdparty/icu4c/common/utrie2_builder.cpp
+++ b/thirdparty/icu4c/common/utrie2_builder.cpp
@@ -152,7 +152,7 @@ utrie2_open(uint32_t initialValue, uint32_t errorValue, UErrorCode *pErrorCode)
newTrie->errorValue=errorValue;
newTrie->highStart=0x110000;
newTrie->firstFreeBlock=0; /* no free block in the list */
- newTrie->isCompacted=FALSE;
+ newTrie->isCompacted=false;
/*
* preallocate and reset
@@ -317,7 +317,7 @@ utrie2_clone(const UTrie2 *other, UErrorCode *pErrorCode) {
if(other->memory!=NULL) {
trie->memory=uprv_malloc(other->length);
if(trie->memory!=NULL) {
- trie->isMemoryOwned=TRUE;
+ trie->isMemoryOwned=true;
uprv_memcpy(trie->memory, other->memory, other->length);
/* make the clone's pointers point to its own memory */
@@ -357,11 +357,11 @@ copyEnumRange(const void *context, UChar32 start, UChar32 end, uint32_t value) {
if(start==end) {
utrie2_set32(nt->trie, start, value, &nt->errorCode);
} else {
- utrie2_setRange32(nt->trie, start, end, value, TRUE, &nt->errorCode);
+ utrie2_setRange32(nt->trie, start, end, value, true, &nt->errorCode);
}
return U_SUCCESS(nt->errorCode);
} else {
- return TRUE;
+ return true;
}
}
@@ -422,7 +422,7 @@ utrie2_cloneAsThawed(const UTrie2 *other, UErrorCode *pErrorCode) {
if(U_FAILURE(*pErrorCode)) {
return NULL;
}
- context.exclusiveLimit=FALSE;
+ context.exclusiveLimit=false;
context.errorCode=*pErrorCode;
utrie2_enum(other, NULL, copyEnumRange, &context);
*pErrorCode=context.errorCode;
@@ -461,7 +461,7 @@ utrie2_fromUTrie(const UTrie *trie1, uint32_t errorValue, UErrorCode *pErrorCode
if(U_FAILURE(*pErrorCode)) {
return NULL;
}
- context.exclusiveLimit=TRUE;
+ context.exclusiveLimit=true;
context.errorCode=*pErrorCode;
utrie_enum(trie1, NULL, copyEnumRange, &context);
*pErrorCode=context.errorCode;
@@ -649,7 +649,7 @@ getDataBlock(UNewTrie2 *trie, UChar32 c, UBool forLSCP) {
}
/**
- * @return TRUE if the value was successfully set
+ * @return true if the value was successfully set
*/
static void
set32(UNewTrie2 *trie,
@@ -683,7 +683,7 @@ utrie2_set32(UTrie2 *trie, UChar32 c, uint32_t value, UErrorCode *pErrorCode) {
*pErrorCode=U_ILLEGAL_ARGUMENT_ERROR;
return;
}
- set32(trie->newTrie, c, TRUE, value, pErrorCode);
+ set32(trie->newTrie, c, true, value, pErrorCode);
}
U_CAPI void U_EXPORT2
@@ -697,7 +697,7 @@ utrie2_set32ForLeadSurrogateCodeUnit(UTrie2 *trie,
*pErrorCode=U_ILLEGAL_ARGUMENT_ERROR;
return;
}
- set32(trie->newTrie, c, FALSE, value, pErrorCode);
+ set32(trie->newTrie, c, false, value, pErrorCode);
}
static void
@@ -709,7 +709,7 @@ writeBlock(uint32_t *block, uint32_t value) {
}
/**
- * initialValue is ignored if overwrite=TRUE
+ * initialValue is ignored if overwrite=true
* @internal
*/
static void
@@ -771,7 +771,7 @@ utrie2_setRange32(UTrie2 *trie,
UChar32 nextStart;
/* set partial block at [start..following block boundary[ */
- block=getDataBlock(newTrie, start, TRUE);
+ block=getDataBlock(newTrie, start, true);
if(block<0) {
*pErrorCode=U_MEMORY_ALLOCATION_ERROR;
return;
@@ -804,15 +804,15 @@ utrie2_setRange32(UTrie2 *trie,
while(start<limit) {
int32_t i2;
- UBool setRepeatBlock=FALSE;
+ UBool setRepeatBlock=false;
- if(value==newTrie->initialValue && isInNullBlock(newTrie, start, TRUE)) {
+ if(value==newTrie->initialValue && isInNullBlock(newTrie, start, true)) {
start+=UTRIE2_DATA_BLOCK_LENGTH; /* nothing to do */
continue;
}
/* get index value */
- i2=getIndex2Block(newTrie, start, TRUE);
+ i2=getIndex2Block(newTrie, start, true);
if(i2<0) {
*pErrorCode=U_INTERNAL_PROGRAM_ERROR;
return;
@@ -827,7 +827,7 @@ utrie2_setRange32(UTrie2 *trie,
* protected (ASCII-linear or 2-byte UTF-8) block:
* replace with the repeatBlock.
*/
- setRepeatBlock=TRUE;
+ setRepeatBlock=true;
} else {
/* !overwrite, or protected block: just write the values into this block */
fillBlock(newTrie->data+block,
@@ -851,14 +851,14 @@ utrie2_setRange32(UTrie2 *trie,
* and if we overwrite any data or if the data is all initial values
* (which is the same as the block being the null block, see above).
*/
- setRepeatBlock=TRUE;
+ setRepeatBlock=true;
}
if(setRepeatBlock) {
if(repeatBlock>=0) {
setIndex2Entry(newTrie, i2, repeatBlock);
} else {
/* create and set and fill the repeatBlock */
- repeatBlock=getDataBlock(newTrie, start, TRUE);
+ repeatBlock=getDataBlock(newTrie, start, true);
if(repeatBlock<0) {
*pErrorCode=U_MEMORY_ALLOCATION_ERROR;
return;
@@ -872,7 +872,7 @@ utrie2_setRange32(UTrie2 *trie,
if(rest>0) {
/* set partial block at [last block boundary..limit[ */
- block=getDataBlock(newTrie, start, TRUE);
+ block=getDataBlock(newTrie, start, true);
if(block<0) {
*pErrorCode=U_MEMORY_ALLOCATION_ERROR;
return;
@@ -1019,7 +1019,7 @@ findHighStart(UNewTrie2 *trie, uint32_t highValue) {
*
* The compaction
* - removes blocks that are identical with earlier ones
- * - overlaps adjacent blocks as much as possible (if overlap==TRUE)
+ * - overlaps adjacent blocks as much as possible (if overlap==true)
* - moves blocks in steps of the data granularity
* - moves and overlaps blocks that overlap with multiple values in the overlap region
*
@@ -1255,7 +1255,7 @@ compactTrie(UTrie2 *trie, UErrorCode *pErrorCode) {
if(highStart<0x110000) {
/* Blank out [highStart..10ffff] to release associated data blocks. */
suppHighStart= highStart<=0x10000 ? 0x10000 : highStart;
- utrie2_setRange32(trie, suppHighStart, 0x10ffff, trie->initialValue, TRUE, pErrorCode);
+ utrie2_setRange32(trie, suppHighStart, 0x10ffff, trie->initialValue, true, pErrorCode);
if(U_FAILURE(*pErrorCode)) {
return;
}
@@ -1281,7 +1281,7 @@ compactTrie(UTrie2 *trie, UErrorCode *pErrorCode) {
newTrie->data[newTrie->dataLength++]=trie->initialValue;
}
- newTrie->isCompacted=TRUE;
+ newTrie->isCompacted=true;
}
/* serialization ------------------------------------------------------------ */
@@ -1382,7 +1382,7 @@ utrie2_freeze(UTrie2 *trie, UTrie2ValueBits valueBits, UErrorCode *pErrorCode) {
return;
}
trie->length=length;
- trie->isMemoryOwned=TRUE;
+ trie->isMemoryOwned=true;
trie->indexLength=allIndexesLength;
trie->dataLength=newTrie->dataLength;
diff --git a/thirdparty/icu4c/common/utrie_swap.cpp b/thirdparty/icu4c/common/utrie_swap.cpp
index 6e8b138394..b01b94601e 100644
--- a/thirdparty/icu4c/common/utrie_swap.cpp
+++ b/thirdparty/icu4c/common/utrie_swap.cpp
@@ -294,8 +294,8 @@ namespace {
* @param data a pointer to 32-bit-aligned memory containing the serialized form of a trie
* @param length the number of bytes available at data;
* can be more than necessary (see return value)
- * @param anyEndianOk If FALSE, only platform-endian serialized forms are recognized.
- * If TRUE, opposite-endian serialized forms are recognized as well.
+ * @param anyEndianOk If false, only platform-endian serialized forms are recognized.
+ * If true, opposite-endian serialized forms are recognized as well.
* @return the trie version of the serialized form, or 0 if it is not
* recognized as a serialized trie
*/
@@ -334,7 +334,7 @@ utrie_swapAnyVersion(const UDataSwapper *ds,
const void *inData, int32_t length, void *outData,
UErrorCode *pErrorCode) {
if(U_FAILURE(*pErrorCode)) { return 0; }
- switch(getVersion(inData, length, TRUE)) {
+ switch(getVersion(inData, length, true)) {
case 1:
return utrie_swap(ds, inData, length, outData, pErrorCode);
case 2:
diff --git a/thirdparty/icu4c/common/uts46.cpp b/thirdparty/icu4c/common/uts46.cpp
index 6f93070339..10a4f56597 100644
--- a/thirdparty/icu4c/common/uts46.cpp
+++ b/thirdparty/icu4c/common/uts46.cpp
@@ -53,10 +53,10 @@ isASCIIString(const UnicodeString &dest) {
const UChar *limit=s+dest.length();
while(s<limit) {
if(*s++>0x7f) {
- return FALSE;
+ return false;
}
}
- return TRUE;
+ return true;
}
static UBool
@@ -224,19 +224,19 @@ UTS46::~UTS46() {}
UnicodeString &
UTS46::labelToASCII(const UnicodeString &label, UnicodeString &dest,
IDNAInfo &info, UErrorCode &errorCode) const {
- return process(label, TRUE, TRUE, dest, info, errorCode);
+ return process(label, true, true, dest, info, errorCode);
}
UnicodeString &
UTS46::labelToUnicode(const UnicodeString &label, UnicodeString &dest,
IDNAInfo &info, UErrorCode &errorCode) const {
- return process(label, TRUE, FALSE, dest, info, errorCode);
+ return process(label, true, false, dest, info, errorCode);
}
UnicodeString &
UTS46::nameToASCII(const UnicodeString &name, UnicodeString &dest,
IDNAInfo &info, UErrorCode &errorCode) const {
- process(name, FALSE, TRUE, dest, info, errorCode);
+ process(name, false, true, dest, info, errorCode);
if( dest.length()>=254 && (info.errors&UIDNA_ERROR_DOMAIN_NAME_TOO_LONG)==0 &&
isASCIIString(dest) &&
(dest.length()>254 || dest[253]!=0x2e)
@@ -249,31 +249,31 @@ UTS46::nameToASCII(const UnicodeString &name, UnicodeString &dest,
UnicodeString &
UTS46::nameToUnicode(const UnicodeString &name, UnicodeString &dest,
IDNAInfo &info, UErrorCode &errorCode) const {
- return process(name, FALSE, FALSE, dest, info, errorCode);
+ return process(name, false, false, dest, info, errorCode);
}
void
UTS46::labelToASCII_UTF8(StringPiece label, ByteSink &dest,
IDNAInfo &info, UErrorCode &errorCode) const {
- processUTF8(label, TRUE, TRUE, dest, info, errorCode);
+ processUTF8(label, true, true, dest, info, errorCode);
}
void
UTS46::labelToUnicodeUTF8(StringPiece label, ByteSink &dest,
IDNAInfo &info, UErrorCode &errorCode) const {
- processUTF8(label, TRUE, FALSE, dest, info, errorCode);
+ processUTF8(label, true, false, dest, info, errorCode);
}
void
UTS46::nameToASCII_UTF8(StringPiece name, ByteSink &dest,
IDNAInfo &info, UErrorCode &errorCode) const {
- processUTF8(name, FALSE, TRUE, dest, info, errorCode);
+ processUTF8(name, false, true, dest, info, errorCode);
}
void
UTS46::nameToUnicodeUTF8(StringPiece name, ByteSink &dest,
IDNAInfo &info, UErrorCode &errorCode) const {
- processUTF8(name, FALSE, FALSE, dest, info, errorCode);
+ processUTF8(name, false, false, dest, info, errorCode);
}
// UTS #46 data for ASCII characters.
@@ -561,7 +561,7 @@ UTS46::processUnicode(const UnicodeString &src,
} else if(c<0xdf) {
// pass
} else if(c<=0x200d && (c==0xdf || c==0x3c2 || c>=0x200c)) {
- info.isTransDiff=TRUE;
+ info.isTransDiff=true;
if(doMapDevChars) {
destLength=mapDevChars(dest, labelStart, labelLimit, errorCode);
if(U_FAILURE(errorCode)) {
@@ -569,7 +569,7 @@ UTS46::processUnicode(const UnicodeString &src,
}
destArray=dest.getBuffer();
// All deviation characters have been mapped, no need to check for them again.
- doMapDevChars=FALSE;
+ doMapDevChars=false;
// Do not increment labelLimit in case c was removed.
continue;
}
@@ -610,14 +610,14 @@ UTS46::mapDevChars(UnicodeString &dest, int32_t labelStart, int32_t mappingStart
return length;
}
int32_t capacity=dest.getCapacity();
- UBool didMapDevChars=FALSE;
+ UBool didMapDevChars=false;
int32_t readIndex=mappingStart, writeIndex=mappingStart;
do {
UChar c=s[readIndex++];
switch(c) {
case 0xdf:
// Map sharp s to ss.
- didMapDevChars=TRUE;
+ didMapDevChars=true;
s[writeIndex++]=0x73; // Replace sharp s with first s.
// Insert second s and account for possible buffer reallocation.
if(writeIndex==readIndex) {
@@ -637,12 +637,12 @@ UTS46::mapDevChars(UnicodeString &dest, int32_t labelStart, int32_t mappingStart
++length;
break;
case 0x3c2: // Map final sigma to nonfinal sigma.
- didMapDevChars=TRUE;
+ didMapDevChars=true;
s[writeIndex++]=0x3c3;
break;
case 0x200c: // Ignore/remove ZWNJ.
case 0x200d: // Ignore/remove ZWJ.
- didMapDevChars=TRUE;
+ didMapDevChars=true;
--length;
break;
default:
@@ -724,7 +724,7 @@ UTS46::processLabel(UnicodeString &dest,
info.labelErrors|=UIDNA_ERROR_INVALID_ACE_LABEL;
return markBadACELabel(dest, labelStart, labelLength, toASCII, info, errorCode);
}
- wasPunycode=TRUE;
+ wasPunycode=true;
UChar *unicodeBuffer=fromPunycode.getBuffer(-1); // capacity==-1: most labels should fit
if(unicodeBuffer==NULL) {
// Should never occur if we used capacity==-1 which uses the internal buffer.
@@ -772,7 +772,7 @@ UTS46::processLabel(UnicodeString &dest,
labelStart=0;
labelLength=fromPunycode.length();
} else {
- wasPunycode=FALSE;
+ wasPunycode=false;
labelString=&dest;
}
// Validity check
@@ -932,8 +932,8 @@ UTS46::markBadACELabel(UnicodeString &dest,
return 0;
}
UBool disallowNonLDHDot=(options&UIDNA_USE_STD3_RULES)!=0;
- UBool isASCII=TRUE;
- UBool onlyLDH=TRUE;
+ UBool isASCII=true;
+ UBool onlyLDH=true;
const UChar *label=dest.getBuffer()+labelStart;
const UChar *limit=label+labelLength;
// Start after the initial "xn--".
@@ -944,16 +944,16 @@ UTS46::markBadACELabel(UnicodeString &dest,
if(c==0x2e) {
info.labelErrors|=UIDNA_ERROR_LABEL_HAS_DOT;
*s=0xfffd;
- isASCII=onlyLDH=FALSE;
+ isASCII=onlyLDH=false;
} else if(asciiData[c]<0) {
- onlyLDH=FALSE;
+ onlyLDH=false;
if(disallowNonLDHDot) {
*s=0xfffd;
- isASCII=FALSE;
+ isASCII=false;
}
}
} else {
- isASCII=onlyLDH=FALSE;
+ isASCII=onlyLDH=false;
}
}
if(onlyLDH) {
@@ -1008,7 +1008,7 @@ UTS46::checkLabelBiDi(const UChar *label, int32_t labelLength, IDNAInfo &info) c
// or AL. If it has the R or AL property, it is an RTL label; if it
// has the L property, it is an LTR label.
if((firstMask&~L_R_AL_MASK)!=0) {
- info.isOkBiDi=FALSE;
+ info.isOkBiDi=false;
}
// Get the directionality of the last non-NSM character.
uint32_t lastMask;
@@ -1034,7 +1034,7 @@ UTS46::checkLabelBiDi(const UChar *label, int32_t labelLength, IDNAInfo &info) c
(lastMask&~L_EN_MASK)!=0 :
(lastMask&~R_AL_EN_AN_MASK)!=0
) {
- info.isOkBiDi=FALSE;
+ info.isOkBiDi=false;
}
// Add the directionalities of the intervening characters.
uint32_t mask=firstMask|lastMask;
@@ -1046,18 +1046,18 @@ UTS46::checkLabelBiDi(const UChar *label, int32_t labelLength, IDNAInfo &info) c
// 5. In an LTR label, only characters with the BIDI properties L, EN,
// ES, CS, ET, ON, BN and NSM are allowed.
if((mask&~L_EN_ES_CS_ET_ON_BN_NSM_MASK)!=0) {
- info.isOkBiDi=FALSE;
+ info.isOkBiDi=false;
}
} else {
// 2. In an RTL label, only characters with the BIDI properties R, AL,
// AN, EN, ES, CS, ET, ON, BN and NSM are allowed.
if((mask&~R_AL_AN_EN_ES_CS_ET_ON_BN_NSM_MASK)!=0) {
- info.isOkBiDi=FALSE;
+ info.isOkBiDi=false;
}
// 4. In an RTL label, if an EN is present, no AN may be present, and
// vice versa.
if((mask&EN_AN_MASK)==EN_AN_MASK) {
- info.isOkBiDi=FALSE;
+ info.isOkBiDi=false;
}
}
// An RTL label is a label that contains at least one character of type
@@ -1067,7 +1067,7 @@ UTS46::checkLabelBiDi(const UChar *label, int32_t labelLength, IDNAInfo &info) c
// The following rule, consisting of six conditions, applies to labels
// in BIDI domain names.
if((mask&R_AL_AN_MASK)!=0) {
- info.isBiDi=TRUE;
+ info.isBiDi=true;
}
}
@@ -1094,23 +1094,23 @@ isASCIIOkBiDi(const UChar *s, int32_t length) {
c=s[i-1];
if(!(0x61<=c && c<=0x7a) && !(0x30<=c && c<=0x39)) {
// Last character in the label is not an L or EN.
- return FALSE;
+ return false;
}
}
labelStart=i+1;
} else if(i==labelStart) {
if(!(0x61<=c && c<=0x7a)) {
// First character in the label is not an L.
- return FALSE;
+ return false;
}
} else {
if(c<=0x20 && (c>=0x1c || (9<=c && c<=0xd))) {
// Intermediate character in the label is a B, S or WS.
- return FALSE;
+ return false;
}
}
}
- return TRUE;
+ return true;
}
// UTF-8 version, called for source ASCII prefix.
@@ -1126,23 +1126,23 @@ isASCIIOkBiDi(const char *s, int32_t length) {
c=s[i-1];
if(!(0x61<=c && c<=0x7a) && !(0x41<=c && c<=0x5a) && !(0x30<=c && c<=0x39)) {
// Last character in the label is not an L or EN.
- return FALSE;
+ return false;
}
}
labelStart=i+1;
} else if(i==labelStart) {
if(!(0x61<=c && c<=0x7a) && !(0x41<=c && c<=0x5a)) {
// First character in the label is not an L.
- return FALSE;
+ return false;
}
} else {
if(c<=0x20 && (c>=0x1c || (9<=c && c<=0xd))) {
// Intermediate character in the label is a B, S or WS.
- return FALSE;
+ return false;
}
}
}
- return TRUE;
+ return true;
}
UBool
@@ -1158,7 +1158,7 @@ UTS46::isLabelOkContextJ(const UChar *label, int32_t labelLength) const {
// If RegExpMatch((Joining_Type:{L,D})(Joining_Type:T)*\u200C
// (Joining_Type:T)*(Joining_Type:{R,D})) Then True;
if(i==0) {
- return FALSE;
+ return false;
}
UChar32 c;
int32_t j=i;
@@ -1171,19 +1171,19 @@ UTS46::isLabelOkContextJ(const UChar *label, int32_t labelLength) const {
UJoiningType type=ubidi_getJoiningType(c);
if(type==U_JT_TRANSPARENT) {
if(j==0) {
- return FALSE;
+ return false;
}
U16_PREV_UNSAFE(label, j, c);
} else if(type==U_JT_LEFT_JOINING || type==U_JT_DUAL_JOINING) {
break; // precontext fulfilled
} else {
- return FALSE;
+ return false;
}
}
// check postcontext (Joining_Type:T)*(Joining_Type:{R,D})
for(j=i+1;;) {
if(j==labelLength) {
- return FALSE;
+ return false;
}
U16_NEXT_UNSAFE(label, j, c);
UJoiningType type=ubidi_getJoiningType(c);
@@ -1192,7 +1192,7 @@ UTS46::isLabelOkContextJ(const UChar *label, int32_t labelLength) const {
} else if(type==U_JT_RIGHT_JOINING || type==U_JT_DUAL_JOINING) {
break; // postcontext fulfilled
} else {
- return FALSE;
+ return false;
}
}
} else if(label[i]==0x200d) {
@@ -1201,17 +1201,17 @@ UTS46::isLabelOkContextJ(const UChar *label, int32_t labelLength) const {
// False;
// If Canonical_Combining_Class(Before(cp)) .eq. Virama Then True;
if(i==0) {
- return FALSE;
+ return false;
}
UChar32 c;
int32_t j=i;
U16_PREV_UNSAFE(label, j, c);
if(uts46Norm2.getCombiningClass(c)!=9) {
- return FALSE;
+ return false;
}
}
}
- return TRUE;
+ return true;
}
void
@@ -1338,23 +1338,23 @@ checkArgs(const void *label, int32_t length,
void *dest, int32_t capacity,
UIDNAInfo *pInfo, UErrorCode *pErrorCode) {
if(U_FAILURE(*pErrorCode)) {
- return FALSE;
+ return false;
}
// sizeof(UIDNAInfo)=16 in the first API version.
if(pInfo==NULL || pInfo->size<16) {
*pErrorCode=U_ILLEGAL_ARGUMENT_ERROR;
- return FALSE;
+ return false;
}
if( (label==NULL ? length!=0 : length<-1) ||
(dest==NULL ? capacity!=0 : capacity<0) ||
(dest==label && label!=NULL)
) {
*pErrorCode=U_ILLEGAL_ARGUMENT_ERROR;
- return FALSE;
+ return false;
}
// Set all *pInfo bytes to 0 except for the size field itself.
uprv_memset(&pInfo->size+1, 0, pInfo->size-sizeof(pInfo->size));
- return TRUE;
+ return true;
}
static void
diff --git a/thirdparty/icu4c/common/uvector.cpp b/thirdparty/icu4c/common/uvector.cpp
index 844463921e..729314ae95 100644
--- a/thirdparty/icu4c/common/uvector.cpp
+++ b/thirdparty/icu4c/common/uvector.cpp
@@ -193,40 +193,40 @@ int32_t UVector::elementAti(int32_t index) const {
UBool UVector::containsAll(const UVector& other) const {
for (int32_t i=0; i<other.size(); ++i) {
if (indexOf(other.elements[i]) < 0) {
- return FALSE;
+ return false;
}
}
- return TRUE;
+ return true;
}
UBool UVector::containsNone(const UVector& other) const {
for (int32_t i=0; i<other.size(); ++i) {
if (indexOf(other.elements[i]) >= 0) {
- return FALSE;
+ return false;
}
}
- return TRUE;
+ return true;
}
UBool UVector::removeAll(const UVector& other) {
- UBool changed = FALSE;
+ UBool changed = false;
for (int32_t i=0; i<other.size(); ++i) {
int32_t j = indexOf(other.elements[i]);
if (j >= 0) {
removeElementAt(j);
- changed = TRUE;
+ changed = true;
}
}
return changed;
}
UBool UVector::retainAll(const UVector& other) {
- UBool changed = FALSE;
+ UBool changed = false;
for (int32_t j=size()-1; j>=0; --j) {
int32_t i = other.indexOf(elements[j]);
if (i < 0) {
removeElementAt(j);
- changed = TRUE;
+ changed = true;
}
}
return changed;
@@ -243,9 +243,9 @@ UBool UVector::removeElement(void* obj) {
int32_t i = indexOf(obj);
if (i >= 0) {
removeElementAt(i);
- return TRUE;
+ return true;
}
- return FALSE;
+ return false;
}
void UVector::removeAllElements(void) {
@@ -263,12 +263,12 @@ UBool UVector::equals(const UVector &other) const {
int i;
if (this->count != other.count) {
- return FALSE;
+ return false;
}
if (comparer == nullptr) {
for (i=0; i<count; i++) {
if (elements[i].pointer != other.elements[i].pointer) {
- return FALSE;
+ return false;
}
}
} else {
@@ -276,11 +276,11 @@ UBool UVector::equals(const UVector &other) const {
for (i=0; i<count; i++) {
key.pointer = &other.elements[i];
if (!(*comparer)(key, elements[i])) {
- return FALSE;
+ return false;
}
}
}
- return TRUE;
+ return true;
}
@@ -525,7 +525,7 @@ sortiComparator(const void * /*context */, const void *left, const void *right)
void UVector::sorti(UErrorCode &ec) {
if (U_SUCCESS(ec)) {
uprv_sortArray(elements, count, sizeof(UElement),
- sortiComparator, nullptr, FALSE, &ec);
+ sortiComparator, nullptr, false, &ec);
}
}
@@ -547,7 +547,7 @@ void UVector::sorti(UErrorCode &ec) {
void UVector::sort(UElementComparator *compare, UErrorCode &ec) {
if (U_SUCCESS(ec)) {
uprv_sortArray(elements, count, sizeof(UElement),
- sortComparator, &compare, FALSE, &ec);
+ sortComparator, &compare, false, &ec);
}
}
@@ -558,7 +558,7 @@ void UVector::sort(UElementComparator *compare, UErrorCode &ec) {
void UVector::sortWithUComparator(UComparator *compare, const void *context, UErrorCode &ec) {
if (U_SUCCESS(ec)) {
uprv_sortArray(elements, count, sizeof(UElement),
- compare, context, TRUE, &ec);
+ compare, context, true, &ec);
}
}
diff --git a/thirdparty/icu4c/common/uvectr32.cpp b/thirdparty/icu4c/common/uvectr32.cpp
index 2b4d0b8a75..952f51792b 100644
--- a/thirdparty/icu4c/common/uvectr32.cpp
+++ b/thirdparty/icu4c/common/uvectr32.cpp
@@ -117,40 +117,40 @@ void UVector32::insertElementAt(int32_t elem, int32_t index, UErrorCode &status)
UBool UVector32::containsAll(const UVector32& other) const {
for (int32_t i=0; i<other.size(); ++i) {
if (indexOf(other.elements[i]) < 0) {
- return FALSE;
+ return false;
}
}
- return TRUE;
+ return true;
}
UBool UVector32::containsNone(const UVector32& other) const {
for (int32_t i=0; i<other.size(); ++i) {
if (indexOf(other.elements[i]) >= 0) {
- return FALSE;
+ return false;
}
}
- return TRUE;
+ return true;
}
UBool UVector32::removeAll(const UVector32& other) {
- UBool changed = FALSE;
+ UBool changed = false;
for (int32_t i=0; i<other.size(); ++i) {
int32_t j = indexOf(other.elements[i]);
if (j >= 0) {
removeElementAt(j);
- changed = TRUE;
+ changed = true;
}
}
return changed;
}
UBool UVector32::retainAll(const UVector32& other) {
- UBool changed = FALSE;
+ UBool changed = false;
for (int32_t j=size()-1; j>=0; --j) {
int32_t i = other.indexOf(elements[j]);
if (i < 0) {
removeElementAt(j);
- changed = TRUE;
+ changed = true;
}
}
return changed;
@@ -173,14 +173,14 @@ UBool UVector32::equals(const UVector32 &other) const {
int i;
if (this->count != other.count) {
- return FALSE;
+ return false;
}
for (i=0; i<count; i++) {
if (elements[i] != other.elements[i]) {
- return FALSE;
+ return false;
}
}
- return TRUE;
+ return true;
}
@@ -199,22 +199,22 @@ int32_t UVector32::indexOf(int32_t key, int32_t startIndex) const {
UBool UVector32::expandCapacity(int32_t minimumCapacity, UErrorCode &status) {
if (U_FAILURE(status)) {
- return FALSE;
+ return false;
}
if (minimumCapacity < 0) {
status = U_ILLEGAL_ARGUMENT_ERROR;
- return FALSE;
+ return false;
}
if (capacity >= minimumCapacity) {
- return TRUE;
+ return true;
}
if (maxCapacity>0 && minimumCapacity>maxCapacity) {
status = U_BUFFER_OVERFLOW_ERROR;
- return FALSE;
+ return false;
}
if (capacity > (INT32_MAX - 1) / 2) { // integer overflow check
status = U_ILLEGAL_ARGUMENT_ERROR;
- return FALSE;
+ return false;
}
int32_t newCap = capacity * 2;
if (newCap < minimumCapacity) {
@@ -226,17 +226,17 @@ UBool UVector32::expandCapacity(int32_t minimumCapacity, UErrorCode &status) {
if (newCap > (int32_t)(INT32_MAX / sizeof(int32_t))) { // integer overflow check
// We keep the original memory contents on bad minimumCapacity/maxCapacity.
status = U_ILLEGAL_ARGUMENT_ERROR;
- return FALSE;
+ return false;
}
int32_t* newElems = (int32_t *)uprv_realloc(elements, sizeof(int32_t)*newCap);
if (newElems == NULL) {
// We keep the original contents on the memory failure on realloc.
status = U_MEMORY_ALLOCATION_ERROR;
- return FALSE;
+ return false;
}
elements = newElems;
capacity = newCap;
- return TRUE;
+ return true;
}
void UVector32::setMaxCapacity(int32_t limit) {
diff --git a/thirdparty/icu4c/common/uvectr32.h b/thirdparty/icu4c/common/uvectr32.h
index ecefb7af3e..a7fada3833 100644
--- a/thirdparty/icu4c/common/uvectr32.h
+++ b/thirdparty/icu4c/common/uvectr32.h
@@ -187,10 +187,10 @@ private:
void _init(int32_t initialCapacity, UErrorCode &status);
// Disallow
- UVector32(const UVector32&);
+ UVector32(const UVector32&) = delete;
// Disallow
- UVector32& operator=(const UVector32&);
+ UVector32& operator=(const UVector32&) = delete;
// API Functions for Stack operations.
diff --git a/thirdparty/icu4c/common/uvectr64.cpp b/thirdparty/icu4c/common/uvectr64.cpp
index 57315c00ff..8bd5cd7839 100644
--- a/thirdparty/icu4c/common/uvectr64.cpp
+++ b/thirdparty/icu4c/common/uvectr64.cpp
@@ -117,22 +117,22 @@ void UVector64::removeAllElements(void) {
UBool UVector64::expandCapacity(int32_t minimumCapacity, UErrorCode &status) {
if (U_FAILURE(status)) {
- return FALSE;
+ return false;
}
if (minimumCapacity < 0) {
status = U_ILLEGAL_ARGUMENT_ERROR;
- return FALSE;
+ return false;
}
if (capacity >= minimumCapacity) {
- return TRUE;
+ return true;
}
if (maxCapacity>0 && minimumCapacity>maxCapacity) {
status = U_BUFFER_OVERFLOW_ERROR;
- return FALSE;
+ return false;
}
if (capacity > (INT32_MAX - 1) / 2) { // integer overflow check
status = U_ILLEGAL_ARGUMENT_ERROR;
- return FALSE;
+ return false;
}
int32_t newCap = capacity * 2;
if (newCap < minimumCapacity) {
@@ -144,17 +144,17 @@ UBool UVector64::expandCapacity(int32_t minimumCapacity, UErrorCode &status) {
if (newCap > (int32_t)(INT32_MAX / sizeof(int64_t))) { // integer overflow check
// We keep the original memory contents on bad minimumCapacity/maxCapacity.
status = U_ILLEGAL_ARGUMENT_ERROR;
- return FALSE;
+ return false;
}
int64_t* newElems = (int64_t *)uprv_realloc(elements, sizeof(int64_t)*newCap);
if (newElems == NULL) {
// We keep the original contents on the memory failure on realloc.
status = U_MEMORY_ALLOCATION_ERROR;
- return FALSE;
+ return false;
}
elements = newElems;
capacity = newCap;
- return TRUE;
+ return true;
}
void UVector64::setMaxCapacity(int32_t limit) {
diff --git a/thirdparty/icu4c/common/uvectr64.h b/thirdparty/icu4c/common/uvectr64.h
index ba3eca268d..070e2dd67d 100644
--- a/thirdparty/icu4c/common/uvectr64.h
+++ b/thirdparty/icu4c/common/uvectr64.h
@@ -176,10 +176,10 @@ private:
void _init(int32_t initialCapacity, UErrorCode &status);
// Disallow
- UVector64(const UVector64&);
+ UVector64(const UVector64&) = delete;
// Disallow
- UVector64& operator=(const UVector64&);
+ UVector64& operator=(const UVector64&) = delete;
// API Functions for Stack operations.
diff --git a/thirdparty/icu4c/common/wintz.cpp b/thirdparty/icu4c/common/wintz.cpp
index 84a29b8d36..1bc08ae654 100644
--- a/thirdparty/icu4c/common/wintz.cpp
+++ b/thirdparty/icu4c/common/wintz.cpp
@@ -274,7 +274,7 @@ uprv_detectWindowsTimeZone()
CharString winTZ;
UErrorCode status = U_ZERO_ERROR;
- winTZ.appendInvariantChars(UnicodeString(TRUE, windowsTimeZoneName, -1), status);
+ winTZ.appendInvariantChars(UnicodeString(true, windowsTimeZoneName, -1), status);
// Map Windows Timezone name (non-localized) to ICU timezone ID (~ Olson timezone id).
StackUResourceBundle winTZBundle;
diff --git a/thirdparty/icu4c/i18n/scriptset.cpp b/thirdparty/icu4c/i18n/scriptset.cpp
index 6a1db8c01c..236bf9d37f 100644
--- a/thirdparty/icu4c/i18n/scriptset.cpp
+++ b/thirdparty/icu4c/i18n/scriptset.cpp
@@ -55,11 +55,11 @@ bool ScriptSet::operator == (const ScriptSet &other) const {
UBool ScriptSet::test(UScriptCode script, UErrorCode &status) const {
if (U_FAILURE(status)) {
- return FALSE;
+ return false;
}
if (script < 0 || (int32_t)script >= SCRIPT_LIMIT) {
status = U_ILLEGAL_ARGUMENT_ERROR;
- return FALSE;
+ return false;
}
uint32_t index = script / 32;
uint32_t bit = 1 << (script & 31);
@@ -188,19 +188,19 @@ int32_t ScriptSet::nextSetBit(int32_t fromIndex) const {
UBool ScriptSet::isEmpty() const {
for (uint32_t i=0; i<UPRV_LENGTHOF(bits); i++) {
if (bits[i] != 0) {
- return FALSE;
+ return false;
}
}
- return TRUE;
+ return true;
}
UnicodeString &ScriptSet::displayScripts(UnicodeString &dest) const {
- UBool firstTime = TRUE;
+ UBool firstTime = true;
for (int32_t i = nextSetBit(0); i >= 0; i = nextSetBit(i + 1)) {
if (!firstTime) {
dest.append((UChar)0x20);
}
- firstTime = FALSE;
+ firstTime = false;
const char *scriptName = uscript_getShortName((UScriptCode(i)));
dest.append(UnicodeString(scriptName, -1, US_INV));
}
@@ -248,7 +248,7 @@ void ScriptSet::setScriptExtensions(UChar32 codePoint, UErrorCode& status) {
UErrorCode internalStatus = U_ZERO_ERROR;
int32_t script_count = -1;
- while (TRUE) {
+ while (true) {
script_count = uscript_getScriptExtensions(
codePoint, scripts.getAlias(), scripts.getCapacity(), &internalStatus);
if (internalStatus == U_BUFFER_OVERFLOW_ERROR) {
diff --git a/thirdparty/icu4c/i18n/scriptset.h b/thirdparty/icu4c/i18n/scriptset.h
index 51980ab7b3..df5cfdc748 100644
--- a/thirdparty/icu4c/i18n/scriptset.h
+++ b/thirdparty/icu4c/i18n/scriptset.h
@@ -83,4 +83,7 @@ uhash_hashScriptSet(const UElement key);
U_CAPI void U_EXPORT2
uhash_deleteScriptSet(void *obj);
-#endif // __SCRIPTSET_H__
+U_CAPI UBool U_EXPORT2
+uhash_equalsScriptSet(const UElement key1, const UElement key2);
+
+#endif // __SCRIPTSET_H_
diff --git a/thirdparty/icu4c/i18n/ucln_in.cpp b/thirdparty/icu4c/i18n/ucln_in.cpp
index f29cbe41dd..cdbd16a65e 100644
--- a/thirdparty/icu4c/i18n/ucln_in.cpp
+++ b/thirdparty/icu4c/i18n/ucln_in.cpp
@@ -45,7 +45,7 @@ static UBool U_CALLCONV i18n_cleanup(void)
#if !UCLN_NO_AUTO_CLEANUP && (defined(UCLN_AUTO_ATEXIT) || defined(UCLN_AUTO_LOCAL))
ucln_unRegisterAutomaticCleanup();
#endif
- return TRUE;
+ return true;
}
void ucln_i18n_registerCleanup(ECleanupI18NType type,
diff --git a/thirdparty/icu4c/i18n/unicode/uspoof.h b/thirdparty/icu4c/i18n/unicode/uspoof.h
index b674c91b2c..0aa887b706 100644
--- a/thirdparty/icu4c/i18n/unicode/uspoof.h
+++ b/thirdparty/icu4c/i18n/unicode/uspoof.h
@@ -35,7 +35,7 @@
/**
* \file
- * \brief Unicode Security and Spoofing Detection, C API.
+ * \brief C API: Unicode Security and Spoofing Detection
*
* <p>
* This class, based on <a href="http://unicode.org/reports/tr36">Unicode Technical Report #36</a> and
diff --git a/thirdparty/icu4c/i18n/uspoof.cpp b/thirdparty/icu4c/i18n/uspoof.cpp
index dd4618baa7..f894dc44ca 100644
--- a/thirdparty/icu4c/i18n/uspoof.cpp
+++ b/thirdparty/icu4c/i18n/uspoof.cpp
@@ -41,7 +41,7 @@ U_NAMESPACE_USE
static UnicodeSet *gInclusionSet = NULL;
static UnicodeSet *gRecommendedSet = NULL;
static const Normalizer2 *gNfdNormalizer = NULL;
-static UInitOnce gSpoofInitStaticsOnce = U_INITONCE_INITIALIZER;
+static UInitOnce gSpoofInitStaticsOnce {};
namespace {
@@ -53,13 +53,13 @@ uspoof_cleanup(void) {
gRecommendedSet = NULL;
gNfdNormalizer = NULL;
gSpoofInitStaticsOnce.reset();
- return TRUE;
+ return true;
}
void U_CALLCONV initializeStatics(UErrorCode &status) {
static const char16_t *inclusionPat =
- u"['\\-.\\:\\u00B7\\u0375\\u058A\\u05F3\\u05F4\\u06FD\\u06FE\\u0F0B\\u200C"
- u"\\u200D\\u2010\\u2019\\u2027\\u30A0\\u30FB]";
+ u"['\\-.\\:\\u00B7\\u0375\\u058A\\u05F3\\u05F4\\u06FD\\u06FE\\u0F0B\\u2010"
+ u"\\u2019\\u2027\\u30A0\\u30FB]";
gInclusionSet = new UnicodeSet(UnicodeString(inclusionPat), status);
if (gInclusionSet == NULL) {
status = U_MEMORY_ALLOCATION_ERROR;
@@ -103,14 +103,14 @@ void U_CALLCONV initializeStatics(UErrorCode &status) {
u"\\u0C56\\u0C5D\\u0C60\\u0C61\\u0C66-\\u0C6F\\u0C80\\u0C82\\u0C83\\u0C85-"
u"\\u0C8C\\u0C8E-\\u0C90\\u0C92-\\u0CA8\\u0CAA-\\u0CB3\\u0CB5-\\u0CB9\\u0CBC-"
u"\\u0CC4\\u0CC6-\\u0CC8\\u0CCA-\\u0CCD\\u0CD5\\u0CD6\\u0CDD\\u0CE0-\\u0CE3"
- u"\\u0CE6-\\u0CEF\\u0CF1\\u0CF2\\u0D00\\u0D02\\u0D03\\u0D05-\\u0D0C\\u0D0E-"
+ u"\\u0CE6-\\u0CEF\\u0CF1-\\u0CF3\\u0D00\\u0D02\\u0D03\\u0D05-\\u0D0C\\u0D0E-"
u"\\u0D10\\u0D12-\\u0D3A\\u0D3D-\\u0D43\\u0D46-\\u0D48\\u0D4A-\\u0D4E\\u0D54-"
u"\\u0D57\\u0D60\\u0D61\\u0D66-\\u0D6F\\u0D7A-\\u0D7F\\u0D82\\u0D83\\u0D85-"
u"\\u0D8E\\u0D91-\\u0D96\\u0D9A-\\u0DA5\\u0DA7-\\u0DB1\\u0DB3-\\u0DBB\\u0DBD"
u"\\u0DC0-\\u0DC6\\u0DCA\\u0DCF-\\u0DD4\\u0DD6\\u0DD8-\\u0DDE\\u0DF2\\u0E01-"
u"\\u0E32\\u0E34-\\u0E3A\\u0E40-\\u0E4E\\u0E50-\\u0E59\\u0E81\\u0E82\\u0E84"
u"\\u0E86-\\u0E8A\\u0E8C-\\u0EA3\\u0EA5\\u0EA7-\\u0EB2\\u0EB4-\\u0EBD\\u0EC0-"
- u"\\u0EC4\\u0EC6\\u0EC8-\\u0ECD\\u0ED0-\\u0ED9\\u0EDE\\u0EDF\\u0F00\\u0F20-"
+ u"\\u0EC4\\u0EC6\\u0EC8-\\u0ECE\\u0ED0-\\u0ED9\\u0EDE\\u0EDF\\u0F00\\u0F20-"
u"\\u0F29\\u0F35\\u0F37\\u0F3E-\\u0F42\\u0F44-\\u0F47\\u0F49-\\u0F4C\\u0F4E-"
u"\\u0F51\\u0F53-\\u0F56\\u0F58-\\u0F5B\\u0F5D-\\u0F68\\u0F6A-\\u0F6C\\u0F71"
u"\\u0F72\\u0F74\\u0F7A-\\u0F80\\u0F82-\\u0F84\\u0F86-\\u0F92\\u0F94-\\u0F97"
@@ -131,17 +131,18 @@ void U_CALLCONV initializeStatics(UErrorCode &status) {
u"\\u2DBE\\u2DC0-\\u2DC6\\u2DC8-\\u2DCE\\u2DD0-\\u2DD6\\u2DD8-\\u2DDE\\u3005-"
u"\\u3007\\u3041-\\u3096\\u3099\\u309A\\u309D\\u309E\\u30A1-\\u30FA\\u30FC-"
u"\\u30FE\\u3105-\\u312D\\u312F\\u31A0-\\u31BF\\u3400-\\u4DBF\\u4E00-\\u9FFF"
- u"\\uA67F\\uA717-\\uA71F\\uA788\\uA78D\\uA792\\uA793\\uA7AA\\uA7AE\\uA7B8"
- u"\\uA7B9\\uA7C0-\\uA7CA\\uA7D0\\uA7D1\\uA7D3\\uA7D5-\\uA7D9\\uA9E7-\\uA9FE"
- u"\\uAA60-\\uAA76\\uAA7A-\\uAA7F\\uAB01-\\uAB06\\uAB09-\\uAB0E\\uAB11-\\uAB16"
- u"\\uAB20-\\uAB26\\uAB28-\\uAB2E\\uAB66\\uAB67\\uAC00-\\uD7A3\\uFA0E\\uFA0F"
- u"\\uFA11\\uFA13\\uFA14\\uFA1F\\uFA21\\uFA23\\uFA24\\uFA27-\\uFA29\\U00011301"
- u"\\U00011303\\U0001133B\\U0001133C\\U00016FF0\\U00016FF1\\U0001B11F-"
- u"\\U0001B122\\U0001B150-\\U0001B152\\U0001B164-\\U0001B167\\U0001DF00-"
- u"\\U0001DF1E\\U0001E7E0-\\U0001E7E6\\U0001E7E8-\\U0001E7EB\\U0001E7ED"
- u"\\U0001E7EE\\U0001E7F0-\\U0001E7FE\\U00020000-\\U0002A6DF\\U0002A700-"
- u"\\U0002B738\\U0002B740-\\U0002B81D\\U0002B820-\\U0002CEA1\\U0002CEB0-"
- u"\\U0002EBE0\\U00030000-\\U0003134A]";
+ u"\\uA67F\\uA717-\\uA71F\\uA788\\uA78D\\uA792\\uA793\\uA7AA\\uA7C0-\\uA7CA"
+ u"\\uA7D0\\uA7D1\\uA7D3\\uA7D5-\\uA7D9\\uA9E7-\\uA9FE\\uAA60-\\uAA76\\uAA7A-"
+ u"\\uAA7F\\uAB01-\\uAB06\\uAB09-\\uAB0E\\uAB11-\\uAB16\\uAB20-\\uAB26\\uAB28-"
+ u"\\uAB2E\\uAB66\\uAB67\\uAC00-\\uD7A3\\uFA0E\\uFA0F\\uFA11\\uFA13\\uFA14"
+ u"\\uFA1F\\uFA21\\uFA23\\uFA24\\uFA27-\\uFA29\\U00011301\\U00011303"
+ u"\\U0001133B\\U0001133C\\U00016FF0\\U00016FF1\\U0001B11F-\\U0001B122"
+ u"\\U0001B132\\U0001B150-\\U0001B152\\U0001B155\\U0001B164-\\U0001B167"
+ u"\\U0001DF00-\\U0001DF1E\\U0001DF25-\\U0001DF2A\\U0001E08F\\U0001E7E0-"
+ u"\\U0001E7E6\\U0001E7E8-\\U0001E7EB\\U0001E7ED\\U0001E7EE\\U0001E7F0-"
+ u"\\U0001E7FE\\U00020000-\\U0002A6DF\\U0002A700-\\U0002B739\\U0002B740-"
+ u"\\U0002B81D\\U0002B820-\\U0002CEA1\\U0002CEB0-\\U0002EBE0\\U00030000-"
+ u"\\U0003134A\\U00031350-\\U000323AF]";
gRecommendedSet = new UnicodeSet(UnicodeString(recommendedPat), status);
if (gRecommendedSet == NULL) {
@@ -611,7 +612,7 @@ int32_t checkImpl(const SpoofImpl* This, const UnicodeString& id, CheckResult* c
int32_t i;
UChar32 c;
UChar32 firstNonspacingMark = 0;
- UBool haveMultipleMarks = FALSE;
+ UBool haveMultipleMarks = false;
UnicodeSet marksSeenSoFar; // Set of combining marks in a single combining sequence.
for (i=0; i<nfdLength ;) {
@@ -621,7 +622,7 @@ int32_t checkImpl(const SpoofImpl* This, const UnicodeString& id, CheckResult* c
firstNonspacingMark = 0;
if (haveMultipleMarks) {
marksSeenSoFar.clear();
- haveMultipleMarks = FALSE;
+ haveMultipleMarks = false;
}
continue;
}
@@ -631,7 +632,7 @@ int32_t checkImpl(const SpoofImpl* This, const UnicodeString& id, CheckResult* c
}
if (!haveMultipleMarks) {
marksSeenSoFar.add(firstNonspacingMark);
- haveMultipleMarks = TRUE;
+ haveMultipleMarks = true;
}
if (marksSeenSoFar.contains(c)) {
// report the error, and stop scanning.
@@ -656,13 +657,13 @@ uspoof_check2UnicodeString(const USpoofChecker *sc,
UErrorCode *status) {
const SpoofImpl *This = SpoofImpl::validateThis(sc, *status);
if (This == NULL) {
- return FALSE;
+ return false;
}
if (checkResult != NULL) {
CheckResult* ThisCheckResult = CheckResult::validateThis(checkResult, *status);
if (ThisCheckResult == NULL) {
- return FALSE;
+ return false;
}
return checkImpl(This, id, ThisCheckResult, status);
} else {
diff --git a/thirdparty/icu4c/i18n/uspoof_impl.cpp b/thirdparty/icu4c/i18n/uspoof_impl.cpp
index b283d81321..e50344c469 100644
--- a/thirdparty/icu4c/i18n/uspoof_impl.cpp
+++ b/thirdparty/icu4c/i18n/uspoof_impl.cpp
@@ -320,10 +320,10 @@ URestrictionLevel SpoofImpl::getRestrictionLevel(const UnicodeString& input, UEr
// Section 5.2 step 2
// Java use a static UnicodeSet for this test. In C++, avoid the static variable
// and just do a simple for loop.
- UBool allASCII = TRUE;
+ UBool allASCII = true;
for (int32_t i=0, length=input.length(); i<length; i++) {
if (input.charAt(i) > 0x7f) {
- allASCII = FALSE;
+ allASCII = false;
break;
}
}
@@ -495,9 +495,9 @@ UBool SpoofData::validateDataVersion(UErrorCode &status) const {
fRawData->fFormatVersion[2] != 0 ||
fRawData->fFormatVersion[3] != 0) {
status = U_INVALID_FORMAT_ERROR;
- return FALSE;
+ return false;
}
- return TRUE;
+ return true;
}
static UBool U_CALLCONV
@@ -518,9 +518,9 @@ spoofDataIsAcceptable(void *context,
if(version != NULL) {
uprv_memcpy(version, pInfo->dataVersion, 4);
}
- return TRUE;
+ return true;
} else {
- return FALSE;
+ return false;
}
}
@@ -538,7 +538,7 @@ spoofDataIsAcceptable(void *context,
// uspoof_cleanupDefaultData - Called during cleanup.
//
-static UInitOnce gSpoofInitDefaultOnce = U_INITONCE_INITIALIZER;
+static UInitOnce gSpoofInitDefaultOnce {};
static SpoofData* gDefaultSpoofData;
static UBool U_CALLCONV
@@ -549,7 +549,7 @@ uspoof_cleanupDefaultData(void) {
gDefaultSpoofData = nullptr;
gSpoofInitDefaultOnce.reset();
}
- return TRUE;
+ return true;
}
static void U_CALLCONV uspoof_loadDefaultData(UErrorCode& status) {
@@ -655,7 +655,7 @@ SpoofData::SpoofData(UErrorCode &status) {
// Called by constructors to put things in a known initial state.
void SpoofData::reset() {
fRawData = NULL;
- fDataOwned = FALSE;
+ fDataOwned = false;
fUDM = NULL;
fMemLimit = 0;
fRefCount = 1;
@@ -945,7 +945,7 @@ uspoof_swap(const UDataSwapper *ds, const void *inData, int32_t length, void *ou
uint32_t magic = ds->readUInt32(spoofDH->fMagic);
ds->writeUInt32((uint32_t *)&outputDH->fMagic, magic);
- if (outputDH->fFormatVersion != spoofDH->fFormatVersion) {
+ if (inBytes != outBytes) {
uprv_memcpy(outputDH->fFormatVersion, spoofDH->fFormatVersion, sizeof(spoofDH->fFormatVersion));
}
// swap starting at fLength
diff --git a/thirdparty/icu4c/i18n/uspoof_impl.h b/thirdparty/icu4c/i18n/uspoof_impl.h
index e75ae262bd..68d7bedae1 100644
--- a/thirdparty/icu4c/i18n/uspoof_impl.h
+++ b/thirdparty/icu4c/i18n/uspoof_impl.h
@@ -28,6 +28,7 @@
#ifdef __cplusplus
#include "capi_helper.h"
+#include "umutex.h"
U_NAMESPACE_BEGIN
diff --git a/thirdparty/icu4c/icudt71l.dat b/thirdparty/icu4c/icudt72l.dat
index 3fa3af9c23..69edf4293d 100644
--- a/thirdparty/icu4c/icudt71l.dat
+++ b/thirdparty/icu4c/icudt72l.dat
Binary files differ
diff --git a/thirdparty/libpng/arm/arm_init.c b/thirdparty/libpng/arm/arm_init.c
index ab22525b38..84d05556f8 100644
--- a/thirdparty/libpng/arm/arm_init.c
+++ b/thirdparty/libpng/arm/arm_init.c
@@ -36,7 +36,10 @@
#ifndef PNG_ARM_NEON_FILE
# if defined(__aarch64__) || defined(_M_ARM64)
/* ARM Neon is expected to be unconditionally available on ARM64. */
-# error "PNG_ARM_NEON_CHECK_SUPPORTED must not be defined on this platform"
+# error "PNG_ARM_NEON_CHECK_SUPPORTED must not be defined on ARM64"
+# elif defined(__ARM_NEON__) || defined(__ARM_NEON)
+ /* ARM Neon is expected to be available on the target CPU architecture. */
+# error "PNG_ARM_NEON_CHECK_SUPPORTED must not be defined on this CPU arch"
# elif defined(__linux__)
# define PNG_ARM_NEON_FILE "contrib/arm-neon/linux.c"
# else
diff --git a/thirdparty/libpng/png.c b/thirdparty/libpng/png.c
index fc09564262..4f3e8bbd31 100644
--- a/thirdparty/libpng/png.c
+++ b/thirdparty/libpng/png.c
@@ -14,7 +14,7 @@
#include "pngpriv.h"
/* Generate a compiler error if there is an old png.h in the search path. */
-typedef png_libpng_version_1_6_38 Your_png_h_is_not_version_1_6_38;
+typedef png_libpng_version_1_6_39 Your_png_h_is_not_version_1_6_39;
#ifdef __GNUC__
/* The version tests may need to be added to, but the problem warning has
@@ -815,7 +815,7 @@ png_get_copyright(png_const_structrp png_ptr)
return PNG_STRING_COPYRIGHT
#else
return PNG_STRING_NEWLINE \
- "libpng version 1.6.38" PNG_STRING_NEWLINE \
+ "libpng version 1.6.39" PNG_STRING_NEWLINE \
"Copyright (c) 2018-2022 Cosmin Truta" PNG_STRING_NEWLINE \
"Copyright (c) 1998-2002,2004,2006-2018 Glenn Randers-Pehrson" \
PNG_STRING_NEWLINE \
@@ -2710,7 +2710,7 @@ png_check_IHDR(png_const_structrp png_ptr,
int /* PRIVATE */
png_check_fp_number(png_const_charp string, size_t size, int *statep,
- png_size_tp whereami)
+ size_t *whereami)
{
int state = *statep;
size_t i = *whereami;
diff --git a/thirdparty/libpng/png.h b/thirdparty/libpng/png.h
index 5fb494fb1a..f109cdf336 100644
--- a/thirdparty/libpng/png.h
+++ b/thirdparty/libpng/png.h
@@ -1,7 +1,7 @@
/* png.h - header file for PNG reference library
*
- * libpng version 1.6.38 - September 14, 2022
+ * libpng version 1.6.39 - November 20, 2022
*
* Copyright (c) 2018-2022 Cosmin Truta
* Copyright (c) 1998-2002,2004,2006-2018 Glenn Randers-Pehrson
@@ -15,7 +15,7 @@
* libpng versions 0.89, June 1996, through 0.96, May 1997: Andreas Dilger
* libpng versions 0.97, January 1998, through 1.6.35, July 2018:
* Glenn Randers-Pehrson
- * libpng versions 1.6.36, December 2018, through 1.6.38, September 2022:
+ * libpng versions 1.6.36, December 2018, through 1.6.39, November 2022:
* Cosmin Truta
* See also "Contributing Authors", below.
*/
@@ -239,7 +239,7 @@
* ...
* 1.5.30 15 10530 15.so.15.30[.0]
* ...
- * 1.6.38 16 10638 16.so.16.38[.0]
+ * 1.6.39 16 10639 16.so.16.39[.0]
*
* Henceforth the source version will match the shared-library major and
* minor numbers; the shared-library major version number will be used for
@@ -278,8 +278,8 @@
*/
/* Version information for png.h - this should match the version in png.c */
-#define PNG_LIBPNG_VER_STRING "1.6.38"
-#define PNG_HEADER_VERSION_STRING " libpng version 1.6.38 - September 14, 2022\n"
+#define PNG_LIBPNG_VER_STRING "1.6.39"
+#define PNG_HEADER_VERSION_STRING " libpng version 1.6.39 - November 20, 2022\n"
#define PNG_LIBPNG_VER_SONUM 16
#define PNG_LIBPNG_VER_DLLNUM 16
@@ -287,7 +287,7 @@
/* These should match the first 3 components of PNG_LIBPNG_VER_STRING: */
#define PNG_LIBPNG_VER_MAJOR 1
#define PNG_LIBPNG_VER_MINOR 6
-#define PNG_LIBPNG_VER_RELEASE 38
+#define PNG_LIBPNG_VER_RELEASE 39
/* This should be zero for a public release, or non-zero for a
* development version. [Deprecated]
@@ -318,7 +318,7 @@
* From version 1.0.1 it is:
* XXYYZZ, where XX=major, YY=minor, ZZ=release
*/
-#define PNG_LIBPNG_VER 10638 /* 1.6.38 */
+#define PNG_LIBPNG_VER 10639 /* 1.6.39 */
/* Library configuration: these options cannot be changed after
* the library has been built.
@@ -428,7 +428,7 @@ extern "C" {
/* This triggers a compiler error in png.c, if png.c and png.h
* do not agree upon the version number.
*/
-typedef char* png_libpng_version_1_6_38;
+typedef char* png_libpng_version_1_6_39;
/* Basic control structions. Read libpng-manual.txt or libpng.3 for more info.
*
diff --git a/thirdparty/libpng/pngconf.h b/thirdparty/libpng/pngconf.h
index 89d28f83a0..fcb4b43069 100644
--- a/thirdparty/libpng/pngconf.h
+++ b/thirdparty/libpng/pngconf.h
@@ -1,7 +1,7 @@
/* pngconf.h - machine-configurable file for libpng
*
- * libpng version 1.6.38
+ * libpng version 1.6.39
*
* Copyright (c) 2018-2022 Cosmin Truta
* Copyright (c) 1998-2002,2004,2006-2016,2018 Glenn Randers-Pehrson
diff --git a/thirdparty/libpng/pnglibconf.h b/thirdparty/libpng/pnglibconf.h
index 89d5b4c8ad..e5948c8ce1 100644
--- a/thirdparty/libpng/pnglibconf.h
+++ b/thirdparty/libpng/pnglibconf.h
@@ -1,6 +1,6 @@
/* pnglibconf.h - library build configuration */
-/* libpng version 1.6.38 */
+/* libpng version 1.6.39 */
/* Copyright (c) 2018-2022 Cosmin Truta */
/* Copyright (c) 1998-2002,2004,2006-2018 Glenn Randers-Pehrson */
diff --git a/thirdparty/libpng/pngpriv.h b/thirdparty/libpng/pngpriv.h
index 2e426cf47d..b8a73b685d 100644
--- a/thirdparty/libpng/pngpriv.h
+++ b/thirdparty/libpng/pngpriv.h
@@ -1946,7 +1946,7 @@ PNG_INTERNAL_FUNCTION(void,png_ascii_from_fixed,(png_const_structrp png_ptr,
* the problem character.) This has not been tested within libpng.
*/
PNG_INTERNAL_FUNCTION(int,png_check_fp_number,(png_const_charp string,
- size_t size, int *statep, png_size_tp whereami),PNG_EMPTY);
+ size_t size, int *statep, size_t *whereami),PNG_EMPTY);
/* This is the same but it checks a complete string and returns true
* only if it just contains a floating point number. As of 1.5.4 this
diff --git a/thirdparty/libpng/pngread.c b/thirdparty/libpng/pngread.c
index 5ab9224038..96996ced5b 100644
--- a/thirdparty/libpng/pngread.c
+++ b/thirdparty/libpng/pngread.c
@@ -3762,13 +3762,13 @@ png_image_read_direct(png_voidp argument)
mode = PNG_ALPHA_PNG;
output_gamma = PNG_DEFAULT_sRGB;
}
-
+
if ((change & PNG_FORMAT_FLAG_ASSOCIATED_ALPHA) != 0)
{
mode = PNG_ALPHA_OPTIMIZED;
change &= ~PNG_FORMAT_FLAG_ASSOCIATED_ALPHA;
}
-
+
/* If 'do_local_background' is set check for the presence of gamma
* correction; this is part of the work-round for the libpng bug
* described above.
diff --git a/thirdparty/libpng/pngrutil.c b/thirdparty/libpng/pngrutil.c
index ca060dd15f..068ab193a3 100644
--- a/thirdparty/libpng/pngrutil.c
+++ b/thirdparty/libpng/pngrutil.c
@@ -3186,7 +3186,7 @@ png_check_chunk_length(png_const_structrp png_ptr, png_uint_32 length)
{
png_debug2(0," length = %lu, limit = %lu",
(unsigned long)length,(unsigned long)limit);
- png_chunk_error(png_ptr, "chunk data is too large");
+ png_benign_error(png_ptr, "chunk data is too large");
}
}
diff --git a/thirdparty/libpng/pngwrite.c b/thirdparty/libpng/pngwrite.c
index 06c45d16ab..4e58d776a9 100644
--- a/thirdparty/libpng/pngwrite.c
+++ b/thirdparty/libpng/pngwrite.c
@@ -75,10 +75,10 @@ write_unknown_chunks(png_structrp png_ptr, png_const_inforp info_ptr,
* library. If you have a new chunk to add, make a function to write it,
* and put it in the correct location here. If you want the chunk written
* after the image data, put it in png_write_end(). I strongly encourage
- * you to supply a PNG_INFO_ flag, and check info_ptr->valid before writing
- * the chunk, as that will keep the code from breaking if you want to just
- * write a plain PNG file. If you have long comments, I suggest writing
- * them in png_write_end(), and compressing them.
+ * you to supply a PNG_INFO_<chunk> flag, and check info_ptr->valid before
+ * writing the chunk, as that will keep the code from breaking if you want
+ * to just write a plain PNG file. If you have long comments, I suggest
+ * writing them in png_write_end(), and compressing them.
*/
void PNGAPI
png_write_info_before_PLTE(png_structrp png_ptr, png_const_inforp info_ptr)
diff --git a/thirdparty/libpng/pngwutil.c b/thirdparty/libpng/pngwutil.c
index 16345e4c0b..01f0607c70 100644
--- a/thirdparty/libpng/pngwutil.c
+++ b/thirdparty/libpng/pngwutil.c
@@ -1,7 +1,7 @@
/* pngwutil.c - utilities to write a PNG file
*
- * Copyright (c) 2018 Cosmin Truta
+ * Copyright (c) 2018-2022 Cosmin Truta
* Copyright (c) 1998-2002,2004,2006-2018 Glenn Randers-Pehrson
* Copyright (c) 1996-1997 Andreas Dilger
* Copyright (c) 1995-1996 Guy Eric Schalnat, Group 42, Inc.
@@ -1747,7 +1747,7 @@ png_write_pCAL(png_structrp png_ptr, png_charp purpose, png_int_32 X0,
{
png_uint_32 purpose_len;
size_t units_len, total_len;
- png_size_tp params_len;
+ size_t *params_len;
png_byte buf[10];
png_byte new_purpose[80];
int i;
@@ -1769,7 +1769,7 @@ png_write_pCAL(png_structrp png_ptr, png_charp purpose, png_int_32 X0,
png_debug1(3, "pCAL units length = %d", (int)units_len);
total_len = purpose_len + units_len + 10;
- params_len = (png_size_tp)png_malloc(png_ptr,
+ params_len = (size_t *)png_malloc(png_ptr,
(png_alloc_size_t)((png_alloc_size_t)nparams * (sizeof (size_t))));
/* Find the length of each parameter, making sure we don't count the
diff --git a/thirdparty/libtheora/LICENSE b/thirdparty/libtheora/LICENSE
index 5e5ec08469..97e8431790 100644
--- a/thirdparty/libtheora/LICENSE
+++ b/thirdparty/libtheora/LICENSE
@@ -4,13 +4,13 @@ In addition to and irrespective of the copyright license associated
with this software, On2 Technologies, Inc. makes the following statement
regarding technology used in this software:
- On2 represents and warrants that it shall not assert any rights
+ On2 represents and warrants that it shall not assert any rights
relating to infringement of On2's registered patents, nor initiate
any litigation asserting such rights, against any person who, or
- entity which utilizes the On2 VP3 Codec Software, including any
- use, distribution, and sale of said Software; which make changes,
+ entity which utilizes the On2 VP3 Codec Software, including any
+ use, distribution, and sale of said Software; which make changes,
modifications, and improvements in said Software; and to use,
- distribute, and sell said changes as well as applications for other
+ distribute, and sell said changes as well as applications for other
fields of use.
This reference implementation is originally derived from the On2 VP3
diff --git a/thirdparty/libtheora/analyze.c b/thirdparty/libtheora/analyze.c
index af01b60dff..19d7612d23 100644
--- a/thirdparty/libtheora/analyze.c
+++ b/thirdparty/libtheora/analyze.c
@@ -18,12 +18,12 @@
#include <string.h>
#include "encint.h"
#include "modedec.h"
+#if defined(OC_COLLECT_METRICS)
+# include "collect.c"
+#endif
-typedef struct oc_fr_state oc_fr_state;
-typedef struct oc_qii_state oc_qii_state;
-typedef struct oc_enc_pipeline_state oc_enc_pipeline_state;
typedef struct oc_rd_metric oc_rd_metric;
typedef struct oc_mode_choice oc_mode_choice;
@@ -42,7 +42,7 @@ typedef struct oc_mode_choice oc_mode_choice;
This is the inverse of the equivalent table OC_MODE_ALPHABETS in the
decoder.*/
static const unsigned char OC_MODE_RANKS[7][OC_NMODES]={
- /*Last MV dominates.*/
+ /*Last MV dominates.*/
/*L P M N I G GM 4*/
{3,4,2,0,1,5,6,7},
/*L P N M I G GM 4*/
@@ -87,6 +87,29 @@ static void oc_mode_scheme_chooser_reset(oc_mode_scheme_chooser *_chooser){
}
}
+/*Return the cost of coding _mb_mode in the specified scheme.*/
+static int oc_mode_scheme_chooser_scheme_mb_cost(
+ const oc_mode_scheme_chooser *_chooser,int _scheme,int _mb_mode){
+ int codebook;
+ int ri;
+ codebook=_scheme+1>>3;
+ /*For any scheme except 0, we can just use the bit cost of the mode's rank
+ in that scheme.*/
+ ri=_chooser->mode_ranks[_scheme][_mb_mode];
+ if(_scheme==0){
+ int mc;
+ /*For scheme 0, incrementing the mode count could potentially change the
+ mode's rank.
+ Find the index where the mode would be moved to in the optimal list,
+ and use its bit cost instead of the one for the mode's current
+ position in the list.*/
+ /*We don't actually reorder the list; this is for computing opportunity
+ cost, not an update.*/
+ mc=_chooser->mode_counts[_mb_mode];
+ while(ri>0&&mc>=_chooser->mode_counts[_chooser->scheme0_list[ri-1]])ri--;
+ }
+ return OC_MODE_BITS[codebook][ri];
+}
/*This is the real purpose of this data structure: not actually selecting a
mode scheme, but estimating the cost of coding a given mode given all the
@@ -108,46 +131,32 @@ static int oc_mode_scheme_chooser_cost(oc_mode_scheme_chooser *_chooser,
int best_bits;
int mode_bits;
int si;
- int scheme_bits;
+ int scheme0_bits;
+ int scheme1_bits;
scheme0=_chooser->scheme_list[0];
scheme1=_chooser->scheme_list[1];
- best_bits=_chooser->scheme_bits[scheme0];
- mode_bits=OC_MODE_BITS[scheme0+1>>3][_chooser->mode_ranks[scheme0][_mb_mode]];
+ scheme0_bits=_chooser->scheme_bits[scheme0];
+ scheme1_bits=_chooser->scheme_bits[scheme1];
+ mode_bits=oc_mode_scheme_chooser_scheme_mb_cost(_chooser,scheme0,_mb_mode);
/*Typical case: If the difference between the best scheme and the next best
is greater than 6 bits, then adding just one mode cannot change which
scheme we use.*/
- if(_chooser->scheme_bits[scheme1]-best_bits>6)return mode_bits;
+ if(scheme1_bits-scheme0_bits>6)return mode_bits;
/*Otherwise, check to see if adding this mode selects a different scheme as
the best.*/
si=1;
- best_bits+=mode_bits;
+ best_bits=scheme0_bits+mode_bits;
do{
- /*For any scheme except 0, we can just use the bit cost of the mode's rank
- in that scheme.*/
- if(scheme1!=0){
- scheme_bits=_chooser->scheme_bits[scheme1]+
- OC_MODE_BITS[scheme1+1>>3][_chooser->mode_ranks[scheme1][_mb_mode]];
- }
- else{
- int ri;
- /*For scheme 0, incrementing the mode count could potentially change the
- mode's rank.
- Find the index where the mode would be moved to in the optimal list,
- and use its bit cost instead of the one for the mode's current
- position in the list.*/
- /*We don't recompute scheme bits; this is computing opportunity cost, not
- an update.*/
- for(ri=_chooser->scheme0_ranks[_mb_mode];ri>0&&
- _chooser->mode_counts[_mb_mode]>=
- _chooser->mode_counts[_chooser->scheme0_list[ri-1]];ri--);
- scheme_bits=_chooser->scheme_bits[0]+OC_MODE_BITS[0][ri];
- }
- if(scheme_bits<best_bits)best_bits=scheme_bits;
+ int cur_bits;
+ cur_bits=scheme1_bits+
+ oc_mode_scheme_chooser_scheme_mb_cost(_chooser,scheme1,_mb_mode);
+ if(cur_bits<best_bits)best_bits=cur_bits;
if(++si>=8)break;
scheme1=_chooser->scheme_list[si];
+ scheme1_bits=_chooser->scheme_bits[scheme1];
}
- while(_chooser->scheme_bits[scheme1]-_chooser->scheme_bits[scheme0]<=6);
- return best_bits-_chooser->scheme_bits[scheme0];
+ while(scheme1_bits-scheme0_bits<=6);
+ return best_bits-scheme0_bits;
}
/*Incrementally update the mode counts and per-scheme bit counts and re-order
@@ -211,22 +220,6 @@ static int oc_block_run_bits(int _run_count){
-/*State to track coded block flags and their bit cost.*/
-struct oc_fr_state{
- ptrdiff_t bits;
- unsigned sb_partial_count:16;
- unsigned sb_full_count:16;
- unsigned b_coded_count_prev:8;
- unsigned b_coded_count:8;
- unsigned b_count:8;
- signed int sb_partial:2;
- signed int sb_full:2;
- signed int b_coded_prev:2;
- signed int b_coded:2;
-};
-
-
-
static void oc_fr_state_init(oc_fr_state *_fr){
_fr->bits=0;
_fr->sb_partial_count=0;
@@ -234,6 +227,8 @@ static void oc_fr_state_init(oc_fr_state *_fr){
_fr->b_coded_count_prev=0;
_fr->b_coded_count=0;
_fr->b_count=0;
+ _fr->sb_prefer_partial=0;
+ _fr->sb_bits=0;
_fr->sb_partial=-1;
_fr->sb_full=-1;
_fr->b_coded_prev=-1;
@@ -241,14 +236,14 @@ static void oc_fr_state_init(oc_fr_state *_fr){
}
-static void oc_fr_state_advance_sb(oc_fr_state *_fr,
+static int oc_fr_state_sb_cost(const oc_fr_state *_fr,
int _sb_partial,int _sb_full){
- ptrdiff_t bits;
- int sb_partial_count;
- int sb_full_count;
- bits=_fr->bits;
+ int bits;
+ int sb_partial_count;
+ int sb_full_count;
+ bits=0;
+ sb_partial_count=_fr->sb_partial_count;
/*Extend the sb_partial run, or start a new one.*/
- sb_partial_count=_fr->sb_partial;
if(_fr->sb_partial==_sb_partial){
if(sb_partial_count>=4129){
bits++;
@@ -257,8 +252,7 @@ static void oc_fr_state_advance_sb(oc_fr_state *_fr,
else bits-=oc_sb_run_bits(sb_partial_count);
}
else sb_partial_count=0;
- sb_partial_count++;
- bits+=oc_sb_run_bits(sb_partial_count);
+ bits+=oc_sb_run_bits(++sb_partial_count);
if(!_sb_partial){
/*Extend the sb_full run, or start a new one.*/
sb_full_count=_fr->sb_full_count;
@@ -270,98 +264,161 @@ static void oc_fr_state_advance_sb(oc_fr_state *_fr,
else bits-=oc_sb_run_bits(sb_full_count);
}
else sb_full_count=0;
+ bits+=oc_sb_run_bits(++sb_full_count);
+ }
+ return bits;
+}
+
+static void oc_fr_state_advance_sb(oc_fr_state *_fr,
+ int _sb_partial,int _sb_full){
+ int sb_partial_count;
+ int sb_full_count;
+ sb_partial_count=_fr->sb_partial_count;
+ if(_fr->sb_partial!=_sb_partial||sb_partial_count>=4129)sb_partial_count=0;
+ sb_partial_count++;
+ if(!_sb_partial){
+ sb_full_count=_fr->sb_full_count;
+ if(_fr->sb_full!=_sb_full||sb_full_count>=4129)sb_full_count=0;
sb_full_count++;
- bits+=oc_sb_run_bits(sb_full_count);
- _fr->sb_full=_sb_full;
_fr->sb_full_count=sb_full_count;
+ _fr->sb_full=_sb_full;
+ /*Roll back the partial block state.*/
+ _fr->b_coded=_fr->b_coded_prev;
+ _fr->b_coded_count=_fr->b_coded_count_prev;
+ }
+ else{
+ /*Commit back the partial block state.*/
+ _fr->b_coded_prev=_fr->b_coded;
+ _fr->b_coded_count_prev=_fr->b_coded_count;
}
- _fr->bits=bits;
- _fr->sb_partial=_sb_partial;
_fr->sb_partial_count=sb_partial_count;
+ _fr->sb_partial=_sb_partial;
+ _fr->b_count=0;
+ _fr->sb_prefer_partial=0;
+ _fr->sb_bits=0;
}
-/*Flush any outstanding block flags for a SB (e.g., one with fewer than 16
- blocks).*/
+/*Commit the state of the current super block and advance to the next.*/
static void oc_fr_state_flush_sb(oc_fr_state *_fr){
- ptrdiff_t bits;
- int sb_partial;
- int sb_full=sb_full;
- int b_coded_count;
- int b_coded;
- int b_count;
+ int sb_partial;
+ int sb_full;
+ int b_coded_count;
+ int b_count;
b_count=_fr->b_count;
- if(b_count>0){
- bits=_fr->bits;
- b_coded=_fr->b_coded;
- b_coded_count=_fr->b_coded_count;
- if(b_coded_count>=b_count){
- /*This SB was fully coded/uncoded; roll back the partial block flags.*/
- bits-=oc_block_run_bits(b_coded_count);
- if(b_coded_count>b_count)bits+=oc_block_run_bits(b_coded_count-b_count);
- sb_partial=0;
- sb_full=b_coded;
- b_coded=_fr->b_coded_prev;
- b_coded_count=_fr->b_coded_count_prev;
- }
- else{
- /*It was partially coded.*/
- sb_partial=1;
- /*sb_full is unused.*/
+ b_coded_count=_fr->b_coded_count;
+ sb_full=_fr->b_coded;
+ sb_partial=b_coded_count<b_count;
+ if(!sb_partial){
+ /*If the super block is fully coded/uncoded...*/
+ if(_fr->sb_prefer_partial){
+ /*So far coding this super block as partial was cheaper anyway.*/
+ if(b_coded_count>15||_fr->b_coded_prev<0){
+ int sb_bits;
+ /*If the block run is too long, this will limit how far it can be
+ extended into the next partial super block.
+ If we need to extend it farther, we don't want to have to roll all
+ the way back here (since there could be many full SBs between now
+ and then), so we disallow this.
+ Similarly, if this is the start of a stripe, we don't know how the
+ length of the outstanding block run from the previous stripe.*/
+ sb_bits=oc_fr_state_sb_cost(_fr,sb_partial,sb_full);
+ _fr->bits+=sb_bits-_fr->sb_bits;
+ _fr->sb_bits=sb_bits;
+ }
+ else sb_partial=1;
}
- _fr->bits=bits;
- _fr->b_coded_count=b_coded_count;
- _fr->b_coded_count_prev=b_coded_count;
- _fr->b_count=0;
- _fr->b_coded=b_coded;
- _fr->b_coded_prev=b_coded;
- oc_fr_state_advance_sb(_fr,sb_partial,sb_full);
}
+ oc_fr_state_advance_sb(_fr,sb_partial,sb_full);
}
static void oc_fr_state_advance_block(oc_fr_state *_fr,int _b_coded){
ptrdiff_t bits;
+ int sb_bits;
int b_coded_count;
int b_count;
- int sb_partial;
- int sb_full=sb_full;
- bits=_fr->bits;
- /*Extend the b_coded run, or start a new one.*/
+ int sb_prefer_partial;
+ sb_bits=_fr->sb_bits;
+ bits=_fr->bits-sb_bits;
+ b_count=_fr->b_count;
b_coded_count=_fr->b_coded_count;
- if(_fr->b_coded==_b_coded)bits-=oc_block_run_bits(b_coded_count);
- else b_coded_count=0;
- b_coded_count++;
- b_count=_fr->b_count+1;
- if(b_count>=16){
- /*We finished a superblock.*/
- if(b_coded_count>=16){
- /*It was fully coded/uncoded; roll back the partial block flags.*/
- if(b_coded_count>16)bits+=oc_block_run_bits(b_coded_count-16);
- sb_partial=0;
- sb_full=_b_coded;
- _b_coded=_fr->b_coded_prev;
- b_coded_count=_fr->b_coded_count_prev;
+ sb_prefer_partial=_fr->sb_prefer_partial;
+ if(b_coded_count>=b_count){
+ int sb_partial_bits;
+ /*This super block is currently fully coded/uncoded.*/
+ if(b_count<=0){
+ /*This is the first block in this SB.*/
+ b_count=1;
+ /*Check to see whether it's cheaper to code it partially or fully.*/
+ if(_fr->b_coded==_b_coded){
+ sb_partial_bits=-oc_block_run_bits(b_coded_count);
+ sb_partial_bits+=oc_block_run_bits(++b_coded_count);
+ }
+ else{
+ b_coded_count=1;
+ sb_partial_bits=2;
+ }
+ sb_partial_bits+=oc_fr_state_sb_cost(_fr,1,_b_coded);
+ sb_bits=oc_fr_state_sb_cost(_fr,0,_b_coded);
+ sb_prefer_partial=sb_partial_bits<sb_bits;
+ sb_bits^=(sb_partial_bits^sb_bits)&-sb_prefer_partial;
+ }
+ else if(_fr->b_coded==_b_coded){
+ b_coded_count++;
+ if(++b_count<16){
+ if(sb_prefer_partial){
+ /*Check to see if it's cheaper to code it fully.*/
+ sb_partial_bits=sb_bits;
+ sb_partial_bits+=oc_block_run_bits(b_coded_count);
+ if(b_coded_count>0){
+ sb_partial_bits-=oc_block_run_bits(b_coded_count-1);
+ }
+ sb_bits=oc_fr_state_sb_cost(_fr,0,_b_coded);
+ sb_prefer_partial=sb_partial_bits<sb_bits;
+ sb_bits^=(sb_partial_bits^sb_bits)&-sb_prefer_partial;
+ }
+ /*There's no need to check the converse (whether it's cheaper to code
+ this SB partially if we were coding it fully), since the cost to
+ code a SB partially can only increase as we add more blocks, whereas
+ the cost to code it fully stays constant.*/
+ }
+ else{
+ /*If we get to the end and this SB is still full, then force it to be
+ coded full.
+ Otherwise we might not be able to extend the block run far enough
+ into the next partial SB.*/
+ if(sb_prefer_partial){
+ sb_prefer_partial=0;
+ sb_bits=oc_fr_state_sb_cost(_fr,0,_b_coded);
+ }
+ }
}
else{
- bits+=oc_block_run_bits(b_coded_count);
- /*It was partially coded.*/
- sb_partial=1;
- /*sb_full is unused.*/
+ /*This SB was full, but now must be made partial.*/
+ if(!sb_prefer_partial){
+ sb_bits=oc_block_run_bits(b_coded_count);
+ if(b_coded_count>b_count){
+ sb_bits-=oc_block_run_bits(b_coded_count-b_count);
+ }
+ sb_bits+=oc_fr_state_sb_cost(_fr,1,_b_coded);
+ }
+ b_count++;
+ b_coded_count=1;
+ sb_prefer_partial=1;
+ sb_bits+=2;
}
- _fr->bits=bits;
- _fr->b_coded_count=b_coded_count;
- _fr->b_coded_count_prev=b_coded_count;
- _fr->b_count=0;
- _fr->b_coded=_b_coded;
- _fr->b_coded_prev=_b_coded;
- oc_fr_state_advance_sb(_fr,sb_partial,sb_full);
}
else{
- bits+=oc_block_run_bits(b_coded_count);
- _fr->bits=bits;
- _fr->b_coded_count=b_coded_count;
- _fr->b_count=b_count;
- _fr->b_coded=_b_coded;
+ b_count++;
+ if(_fr->b_coded==_b_coded)sb_bits-=oc_block_run_bits(b_coded_count);
+ else b_coded_count=0;
+ sb_bits+=oc_block_run_bits(++b_coded_count);
}
+ _fr->bits=bits+sb_bits;
+ _fr->b_coded_count=b_coded_count;
+ _fr->b_coded=_b_coded;
+ _fr->b_count=b_count;
+ _fr->sb_prefer_partial=sb_prefer_partial;
+ _fr->sb_bits=sb_bits;
}
static void oc_fr_skip_block(oc_fr_state *_fr){
@@ -395,16 +452,6 @@ static int oc_fr_cost4(const oc_fr_state *_pre,const oc_fr_state *_post){
-struct oc_qii_state{
- ptrdiff_t bits;
- unsigned qi01_count:14;
- signed int qi01:2;
- unsigned qi12_count:14;
- signed int qi12:2;
-};
-
-
-
static void oc_qii_state_init(oc_qii_state *_qs){
_qs->bits=0;
_qs->qi01_count=0;
@@ -458,49 +505,17 @@ static void oc_qii_state_advance(oc_qii_state *_qd,
-/*Temporary encoder state for the analysis pipeline.*/
-struct oc_enc_pipeline_state{
- int bounding_values[256];
- oc_fr_state fr[3];
- oc_qii_state qs[3];
- /*Condensed dequantization tables.*/
- const ogg_uint16_t *dequant[3][3][2];
- /*Condensed quantization tables.*/
- const oc_iquant *enquant[3][3][2];
- /*Skip SSD storage for the current MCU in each plane.*/
- unsigned *skip_ssd[3];
- /*Coded/uncoded fragment lists for each plane for the current MCU.*/
- ptrdiff_t *coded_fragis[3];
- ptrdiff_t *uncoded_fragis[3];
- ptrdiff_t ncoded_fragis[3];
- ptrdiff_t nuncoded_fragis[3];
- /*The starting fragment for the current MCU in each plane.*/
- ptrdiff_t froffset[3];
- /*The starting row for the current MCU in each plane.*/
- int fragy0[3];
- /*The ending row for the current MCU in each plane.*/
- int fragy_end[3];
- /*The starting superblock for the current MCU in each plane.*/
- unsigned sbi0[3];
- /*The ending superblock for the current MCU in each plane.*/
- unsigned sbi_end[3];
- /*The number of tokens for zzi=1 for each color plane.*/
- int ndct_tokens1[3];
- /*The outstanding eob_run count for zzi=1 for each color plane.*/
- int eob_run1[3];
- /*Whether or not the loop filter is enabled.*/
- int loop_filter;
-};
-
-
static void oc_enc_pipeline_init(oc_enc_ctx *_enc,oc_enc_pipeline_state *_pipe){
ptrdiff_t *coded_fragis;
unsigned mcu_nvsbs;
ptrdiff_t mcu_nfrags;
+ int flimit;
int hdec;
int vdec;
int pli;
+ int nqis;
int qii;
+ int qi0;
int qti;
/*Initialize the per-plane coded block flag trackers.
These are used for bit-estimation purposes only; the real flag bits span
@@ -529,24 +544,36 @@ static void oc_enc_pipeline_init(oc_enc_ctx *_enc,oc_enc_pipeline_state *_pipe){
memset(_pipe->ncoded_fragis,0,sizeof(_pipe->ncoded_fragis));
memset(_pipe->nuncoded_fragis,0,sizeof(_pipe->nuncoded_fragis));
/*Set up condensed quantizer tables.*/
+ qi0=_enc->state.qis[0];
+ nqis=_enc->state.nqis;
for(pli=0;pli<3;pli++){
- for(qii=0;qii<_enc->state.nqis;qii++){
+ for(qii=0;qii<nqis;qii++){
int qi;
qi=_enc->state.qis[qii];
for(qti=0;qti<2;qti++){
- _pipe->dequant[pli][qii][qti]=_enc->state.dequant_tables[qi][pli][qti];
- _pipe->enquant[pli][qii][qti]=_enc->enquant_tables[qi][pli][qti];
+ /*Set the DC coefficient in the dequantization table.*/
+ _enc->state.dequant_tables[qi][pli][qti][0]=
+ _enc->dequant_dc[qi0][pli][qti];
+ _enc->dequant[pli][qii][qti]=_enc->state.dequant_tables[qi][pli][qti];
+ /*Copy over the quantization table.*/
+ memcpy(_enc->enquant[pli][qii][qti],_enc->enquant_tables[qi][pli][qti],
+ _enc->opt_data.enquant_table_size);
}
}
}
+ /*Fix up the DC coefficients in the quantization tables.*/
+ oc_enc_enquant_table_fixup(_enc,_enc->enquant,nqis);
/*Initialize the tokenization state.*/
for(pli=0;pli<3;pli++){
_pipe->ndct_tokens1[pli]=0;
_pipe->eob_run1[pli]=0;
}
/*Initialize the bounding value array for the loop filter.*/
- _pipe->loop_filter=!oc_state_loop_filter_init(&_enc->state,
- _pipe->bounding_values);
+ flimit=_enc->state.loop_filter_limits[_enc->state.qis[0]];
+ _pipe->loop_filter=flimit!=0;
+ if(flimit!=0)oc_loop_filter_init(&_enc->state,_pipe->bounding_values,flimit);
+ /*Clear the temporary DCT scratch space.*/
+ memset(_pipe->dct_data,0,sizeof(_pipe->dct_data));
}
/*Sets the current MCU stripe to super block row _sby.
@@ -585,13 +612,17 @@ static int oc_enc_pipeline_set_stripe(oc_enc_ctx *_enc,
static void oc_enc_pipeline_finish_mcu_plane(oc_enc_ctx *_enc,
oc_enc_pipeline_state *_pipe,int _pli,int _sdelay,int _edelay){
- int refi;
/*Copy over all the uncoded fragments from this plane and advance the uncoded
fragment list.*/
- _pipe->uncoded_fragis[_pli]-=_pipe->nuncoded_fragis[_pli];
- oc_state_frag_copy_list(&_enc->state,_pipe->uncoded_fragis[_pli],
- _pipe->nuncoded_fragis[_pli],OC_FRAME_SELF,OC_FRAME_PREV,_pli);
- _pipe->nuncoded_fragis[_pli]=0;
+ if(_pipe->nuncoded_fragis[_pli]>0){
+ _pipe->uncoded_fragis[_pli]-=_pipe->nuncoded_fragis[_pli];
+ oc_frag_copy_list(&_enc->state,
+ _enc->state.ref_frame_data[OC_FRAME_SELF],
+ _enc->state.ref_frame_data[OC_FRAME_PREV],
+ _enc->state.ref_ystride[_pli],_pipe->uncoded_fragis[_pli],
+ _pipe->nuncoded_fragis[_pli],_enc->state.frag_buf_offs);
+ _pipe->nuncoded_fragis[_pli]=0;
+ }
/*Perform DC prediction.*/
oc_enc_pred_dc_frag_rows(_enc,_pli,
_pipe->fragy0[_pli],_pipe->fragy_end[_pli]);
@@ -606,17 +637,18 @@ static void oc_enc_pipeline_finish_mcu_plane(oc_enc_ctx *_enc,
_pipe->coded_fragis[_pli]+=_pipe->ncoded_fragis[_pli];
_pipe->ncoded_fragis[_pli]=0;
/*Apply the loop filter if necessary.*/
- refi=_enc->state.ref_frame_idx[OC_FRAME_SELF];
if(_pipe->loop_filter){
- oc_state_loop_filter_frag_rows(&_enc->state,_pipe->bounding_values,
- refi,_pli,_pipe->fragy0[_pli]-_sdelay,_pipe->fragy_end[_pli]-_edelay);
+ oc_state_loop_filter_frag_rows(&_enc->state,
+ _pipe->bounding_values,OC_FRAME_SELF,_pli,
+ _pipe->fragy0[_pli]-_sdelay,_pipe->fragy_end[_pli]-_edelay);
}
else _sdelay=_edelay=0;
/*To fill borders, we have an additional two pixel delay, since a fragment
in the next row could filter its top edge, using two pixels from a
fragment in this row.
But there's no reason to delay a full fragment between the two.*/
- oc_state_borders_fill_rows(&_enc->state,refi,_pli,
+ oc_state_borders_fill_rows(&_enc->state,
+ _enc->state.ref_frame_idx[OC_FRAME_SELF],_pli,
(_pipe->fragy0[_pli]-_sdelay<<3)-(_sdelay<<1),
(_pipe->fragy_end[_pli]-_edelay<<3)-(_edelay<<1));
}
@@ -634,62 +666,62 @@ struct oc_rd_metric{
static int oc_enc_block_transform_quantize(oc_enc_ctx *_enc,
- oc_enc_pipeline_state *_pipe,int _pli,ptrdiff_t _fragi,int _overhead_bits,
- oc_rd_metric *_mo,oc_token_checkpoint **_stack){
- OC_ALIGN16(ogg_int16_t dct[64]);
- OC_ALIGN16(ogg_int16_t data[64]);
- ogg_uint16_t dc_dequant;
+ oc_enc_pipeline_state *_pipe,int _pli,ptrdiff_t _fragi,
+ unsigned _rd_scale,unsigned _rd_iscale,oc_rd_metric *_mo,
+ oc_fr_state *_fr,oc_token_checkpoint **_stack){
+ ogg_int16_t *data;
+ ogg_int16_t *dct;
+ ogg_int16_t *idct;
+ oc_qii_state qs;
const ogg_uint16_t *dequant;
- const oc_iquant *enquant;
+ ogg_uint16_t dequant_dc;
ptrdiff_t frag_offs;
int ystride;
const unsigned char *src;
const unsigned char *ref;
unsigned char *dst;
- int frame_type;
int nonzero;
unsigned uncoded_ssd;
unsigned coded_ssd;
- int coded_dc;
oc_token_checkpoint *checkpoint;
oc_fragment *frags;
int mb_mode;
+ int refi;
int mv_offs[2];
int nmv_offs;
int ac_bits;
int borderi;
+ int nqis;
int qti;
int qii;
- int pi;
- int zzi;
- int v;
- int val;
- int d;
- int s;
int dc;
+ nqis=_enc->state.nqis;
frags=_enc->state.frags;
frag_offs=_enc->state.frag_buf_offs[_fragi];
ystride=_enc->state.ref_ystride[_pli];
src=_enc->state.ref_frame_data[OC_FRAME_IO]+frag_offs;
borderi=frags[_fragi].borderi;
qii=frags[_fragi].qii;
+ data=_enc->pipe.dct_data;
+ dct=data+64;
+ idct=data+128;
if(qii&~3){
#if !defined(OC_COLLECT_METRICS)
if(_enc->sp_level>=OC_SP_LEVEL_EARLY_SKIP){
/*Enable early skip detection.*/
frags[_fragi].coded=0;
+ frags[_fragi].refi=OC_FRAME_NONE;
+ oc_fr_skip_block(_fr);
return 0;
}
#endif
/*Try and code this block anyway.*/
qii&=3;
- frags[_fragi].qii=qii;
}
+ refi=frags[_fragi].refi;
mb_mode=frags[_fragi].mb_mode;
- ref=_enc->state.ref_frame_data[
- _enc->state.ref_frame_idx[OC_FRAME_FOR_MODE(mb_mode)]]+frag_offs;
- dst=_enc->state.ref_frame_data[_enc->state.ref_frame_idx[OC_FRAME_SELF]]
- +frag_offs;
+ ref=_enc->state.ref_frame_data[refi]+frag_offs;
+ dst=_enc->state.ref_frame_data[OC_FRAME_SELF]+frag_offs;
/*Motion compensation:*/
switch(mb_mode){
case OC_MODE_INTRA:{
@@ -704,9 +736,9 @@ static int oc_enc_block_transform_quantize(oc_enc_ctx *_enc,
}break;
default:{
const oc_mv *frag_mvs;
- frag_mvs=(const oc_mv *)_enc->state.frag_mvs;
- nmv_offs=oc_state_get_mv_offsets(&_enc->state,mv_offs,_pli,
- frag_mvs[_fragi][0],frag_mvs[_fragi][1]);
+ frag_mvs=_enc->state.frag_mvs;
+ nmv_offs=oc_state_get_mv_offsets(&_enc->state,mv_offs,
+ _pli,frag_mvs[_fragi]);
if(nmv_offs>1){
oc_enc_frag_copy2(_enc,dst,
ref+mv_offs[0],ref+mv_offs[1],ystride);
@@ -717,126 +749,121 @@ static int oc_enc_block_transform_quantize(oc_enc_ctx *_enc,
}
#if defined(OC_COLLECT_METRICS)
{
+ unsigned sad;
unsigned satd;
switch(nmv_offs){
- case 0:satd=oc_enc_frag_intra_satd(_enc,src,ystride);break;
+ case 0:{
+ sad=oc_enc_frag_intra_sad(_enc,src,ystride);
+ satd=oc_enc_frag_intra_satd(_enc,&dc,src,ystride);
+ }break;
case 1:{
- satd=oc_enc_frag_satd_thresh(_enc,src,ref+mv_offs[0],ystride,UINT_MAX);
+ sad=oc_enc_frag_sad_thresh(_enc,src,ref+mv_offs[0],ystride,UINT_MAX);
+ satd=oc_enc_frag_satd(_enc,&dc,src,ref+mv_offs[0],ystride);
+ satd+=abs(dc);
}break;
default:{
- satd=oc_enc_frag_satd_thresh(_enc,src,dst,ystride,UINT_MAX);
- }
+ sad=oc_enc_frag_sad_thresh(_enc,src,dst,ystride,UINT_MAX);
+ satd=oc_enc_frag_satd(_enc,&dc,src,dst,ystride);
+ satd+=abs(dc);
+ }break;
}
+ _enc->frag_sad[_fragi]=sad;
_enc->frag_satd[_fragi]=satd;
}
#endif
/*Transform:*/
oc_enc_fdct8x8(_enc,dct,data);
- /*Quantize the DC coefficient:*/
+ /*Quantize:*/
qti=mb_mode!=OC_MODE_INTRA;
- enquant=_pipe->enquant[_pli][0][qti];
- dc_dequant=_pipe->dequant[_pli][0][qti][0];
- v=dct[0];
- val=v<<1;
- s=OC_SIGNMASK(val);
- val+=dc_dequant+s^s;
- val=((enquant[0].m*(ogg_int32_t)val>>16)+val>>enquant[0].l)-s;
- dc=OC_CLAMPI(-580,val,580);
- nonzero=0;
- /*Quantize the AC coefficients:*/
- dequant=_pipe->dequant[_pli][qii][qti];
- enquant=_pipe->enquant[_pli][qii][qti];
- for(zzi=1;zzi<64;zzi++){
- v=dct[OC_FZIG_ZAG[zzi]];
- d=dequant[zzi];
- val=v<<1;
- v=abs(val);
- if(v>=d){
- s=OC_SIGNMASK(val);
- /*The bias added here rounds ties away from zero, since token
- optimization can only decrease the magnitude of the quantized
- value.*/
- val+=d+s^s;
- /*Note the arithmetic right shift is not guaranteed by ANSI C.
- Hopefully no one still uses ones-complement architectures.*/
- val=((enquant[zzi].m*(ogg_int32_t)val>>16)+val>>enquant[zzi].l)-s;
- data[zzi]=OC_CLAMPI(-580,val,580);
- nonzero=zzi;
- }
- else data[zzi]=0;
- }
+ dequant=_enc->dequant[_pli][qii][qti];
+ nonzero=oc_enc_quantize(_enc,data,dct,dequant,_enc->enquant[_pli][qii][qti]);
+ dc=data[0];
/*Tokenize.*/
checkpoint=*_stack;
- ac_bits=oc_enc_tokenize_ac(_enc,_pli,_fragi,data,dequant,dct,nonzero+1,
- _stack,qti?0:3);
+ if(_enc->sp_level<OC_SP_LEVEL_FAST_ANALYSIS){
+ ac_bits=oc_enc_tokenize_ac(_enc,_pli,_fragi,idct,data,dequant,dct,
+ nonzero+1,_stack,OC_RD_ISCALE(_enc->lambda,_rd_iscale),qti?0:3);
+ }
+ else{
+ ac_bits=oc_enc_tokenize_ac_fast(_enc,_pli,_fragi,idct,data,dequant,dct,
+ nonzero+1,_stack,OC_RD_ISCALE(_enc->lambda,_rd_iscale),qti?0:3);
+ }
/*Reconstruct.
TODO: nonzero may need to be adjusted after tokenization.*/
+ dequant_dc=dequant[0];
if(nonzero==0){
ogg_int16_t p;
int ci;
+ int qi01;
+ int qi12;
/*We round this dequant product (and not any of the others) because there's
no iDCT rounding.*/
- p=(ogg_int16_t)(dc*(ogg_int32_t)dc_dequant+15>>5);
+ p=(ogg_int16_t)(dc*(ogg_int32_t)dequant_dc+15>>5);
/*LOOP VECTORIZES.*/
for(ci=0;ci<64;ci++)data[ci]=p;
+ /*We didn't code any AC coefficients, so don't change the quantizer.*/
+ qi01=_pipe->qs[_pli].qi01;
+ qi12=_pipe->qs[_pli].qi12;
+ if(qi01>0)qii=1+qi12;
+ else if(qi01>=0)qii=0;
}
else{
- data[0]=dc*dc_dequant;
- oc_idct8x8(&_enc->state,data,nonzero+1);
+ idct[0]=dc*dequant_dc;
+ /*Note: This clears idct[] back to zero for the next block.*/
+ oc_idct8x8(&_enc->state,data,idct,nonzero+1);
+ }
+ frags[_fragi].qii=qii;
+ if(nqis>1){
+ oc_qii_state_advance(&qs,_pipe->qs+_pli,qii);
+ ac_bits+=qs.bits-_pipe->qs[_pli].bits;
}
if(!qti)oc_enc_frag_recon_intra(_enc,dst,ystride,data);
else{
oc_enc_frag_recon_inter(_enc,dst,
nmv_offs==1?ref+mv_offs[0]:dst,ystride,data);
}
- frame_type=_enc->state.frame_type;
+ /*If _fr is NULL, then this is an INTRA frame, and we can't skip blocks.*/
#if !defined(OC_COLLECT_METRICS)
- if(frame_type!=OC_INTRA_FRAME)
+ if(_fr!=NULL)
#endif
{
/*In retrospect, should we have skipped this block?*/
- oc_enc_frag_sub(_enc,data,src,dst,ystride);
- coded_ssd=coded_dc=0;
if(borderi<0){
- for(pi=0;pi<64;pi++){
- coded_ssd+=data[pi]*data[pi];
- coded_dc+=data[pi];
- }
+ coded_ssd=oc_enc_frag_ssd(_enc,src,dst,ystride);
}
else{
- ogg_int64_t mask;
- mask=_enc->state.borders[borderi].mask;
- for(pi=0;pi<64;pi++,mask>>=1)if(mask&1){
- coded_ssd+=data[pi]*data[pi];
- coded_dc+=data[pi];
- }
+ coded_ssd=oc_enc_frag_border_ssd(_enc,src,dst,ystride,
+ _enc->state.borders[borderi].mask);
}
/*Scale to match DCT domain.*/
coded_ssd<<=4;
- /*We actually only want the AC contribution to the SSD.*/
- coded_ssd-=coded_dc*coded_dc>>2;
#if defined(OC_COLLECT_METRICS)
_enc->frag_ssd[_fragi]=coded_ssd;
}
- if(frame_type!=OC_INTRA_FRAME){
+ if(_fr!=NULL){
#endif
+ coded_ssd=OC_RD_SCALE(coded_ssd,_rd_scale);
uncoded_ssd=_pipe->skip_ssd[_pli][_fragi-_pipe->froffset[_pli]];
- if(uncoded_ssd<UINT_MAX){
+ if(uncoded_ssd<UINT_MAX&&
+ /*Don't allow luma blocks to be skipped in 4MV mode when VP3 compatibility
+ is enabled.*/
+ (!_enc->vp3_compatible||mb_mode!=OC_MODE_INTER_MV_FOUR||_pli)){
+ int overhead_bits;
+ overhead_bits=oc_fr_cost1(_fr);
/*Although the fragment coding overhead determination is accurate, it is
greedy, using very coarse-grained local information.
Allowing it to mildly discourage coding turns out to be beneficial, but
it's not clear that allowing it to encourage coding through negative
coding overhead deltas is useful.
- For that reason, we disallow negative coding_overheads.*/
- if(_overhead_bits<0)_overhead_bits=0;
- if(uncoded_ssd<=coded_ssd+(_overhead_bits+ac_bits)*_enc->lambda&&
- /*Don't allow luma blocks to be skipped in 4MV mode when VP3
- compatibility is enabled.*/
- (!_enc->vp3_compatible||mb_mode!=OC_MODE_INTER_MV_FOUR||_pli)){
+ For that reason, we disallow negative coding overheads.*/
+ if(overhead_bits<0)overhead_bits=0;
+ if(uncoded_ssd<=coded_ssd+(overhead_bits+ac_bits)*_enc->lambda){
/*Hm, not worth it; roll back.*/
oc_enc_tokenlog_rollback(_enc,checkpoint,(*_stack)-checkpoint);
*_stack=checkpoint;
frags[_fragi].coded=0;
+ frags[_fragi].refi=OC_FRAME_NONE;
+ oc_fr_skip_block(_fr);
return 0;
}
}
@@ -844,15 +871,20 @@ static int oc_enc_block_transform_quantize(oc_enc_ctx *_enc,
_mo->uncoded_ac_ssd+=uncoded_ssd;
_mo->coded_ac_ssd+=coded_ssd;
_mo->ac_bits+=ac_bits;
+ oc_fr_code_block(_fr);
}
- oc_qii_state_advance(_pipe->qs+_pli,_pipe->qs+_pli,qii);
+ /*GCC 4.4.4 generates a warning here because it can't tell that
+ the init code in the nqis check above will run anytime this
+ line runs.*/
+ if(nqis>1)*(_pipe->qs+_pli)=*&qs;
frags[_fragi].dc=dc;
frags[_fragi].coded=1;
return 1;
}
-static int oc_enc_mb_transform_quantize_luma(oc_enc_ctx *_enc,
- oc_enc_pipeline_state *_pipe,unsigned _mbi,int _mode_overhead){
+static int oc_enc_mb_transform_quantize_inter_luma(oc_enc_ctx *_enc,
+ oc_enc_pipeline_state *_pipe,unsigned _mbi,int _mode_overhead,
+ const unsigned _rd_scale[4],const unsigned _rd_iscale[4]){
/*Worst case token stack usage for 4 fragments.*/
oc_token_checkpoint stack[64*4];
oc_token_checkpoint *stackptr;
@@ -867,6 +899,7 @@ static int oc_enc_mb_transform_quantize_luma(oc_enc_ctx *_enc,
oc_fr_state fr_checkpoint;
oc_qii_state qs_checkpoint;
int mb_mode;
+ int refi;
int ncoded;
ptrdiff_t fragi;
int bi;
@@ -880,78 +913,83 @@ static int oc_enc_mb_transform_quantize_luma(oc_enc_ctx *_enc,
uncoded_fragis=_pipe->uncoded_fragis[0];
nuncoded_fragis=_pipe->nuncoded_fragis[0];
mb_mode=mb_modes[_mbi];
+ refi=OC_FRAME_FOR_MODE(mb_mode);
ncoded=0;
stackptr=stack;
memset(&mo,0,sizeof(mo));
for(bi=0;bi<4;bi++){
fragi=sb_maps[_mbi>>2][_mbi&3][bi];
+ frags[fragi].refi=refi;
frags[fragi].mb_mode=mb_mode;
- if(oc_enc_block_transform_quantize(_enc,
- _pipe,0,fragi,oc_fr_cost1(_pipe->fr+0),&mo,&stackptr)){
- oc_fr_code_block(_pipe->fr+0);
+ if(oc_enc_block_transform_quantize(_enc,_pipe,0,fragi,
+ _rd_scale[bi],_rd_iscale[bi],&mo,_pipe->fr+0,&stackptr)){
coded_fragis[ncoded_fragis++]=fragi;
ncoded++;
}
- else{
- *(uncoded_fragis-++nuncoded_fragis)=fragi;
- oc_fr_skip_block(_pipe->fr+0);
- }
+ else *(uncoded_fragis-++nuncoded_fragis)=fragi;
}
- if(_enc->state.frame_type!=OC_INTRA_FRAME){
- if(ncoded>0&&!mo.dc_flag){
- int cost;
- /*Some individual blocks were worth coding.
- See if that's still true when accounting for mode and MV overhead.*/
- cost=mo.coded_ac_ssd+_enc->lambda*(mo.ac_bits
- +oc_fr_cost4(&fr_checkpoint,_pipe->fr+0)+_mode_overhead);
- if(mo.uncoded_ac_ssd<=cost){
- /*Taking macroblock overhead into account, it is not worth coding this
- MB.*/
- oc_enc_tokenlog_rollback(_enc,stack,stackptr-stack);
- *(_pipe->fr+0)=*&fr_checkpoint;
- *(_pipe->qs+0)=*&qs_checkpoint;
- for(bi=0;bi<4;bi++){
- fragi=sb_maps[_mbi>>2][_mbi&3][bi];
- if(frags[fragi].coded){
- *(uncoded_fragis-++nuncoded_fragis)=fragi;
- frags[fragi].coded=0;
- }
- oc_fr_skip_block(_pipe->fr+0);
+ if(ncoded>0&&!mo.dc_flag){
+ int cost;
+ /*Some individual blocks were worth coding.
+ See if that's still true when accounting for mode and MV overhead.*/
+ cost=mo.coded_ac_ssd+_enc->lambda*(mo.ac_bits
+ +oc_fr_cost4(&fr_checkpoint,_pipe->fr+0)+_mode_overhead);
+ if(mo.uncoded_ac_ssd<=cost){
+ /*Taking macroblock overhead into account, it is not worth coding this
+ MB.*/
+ oc_enc_tokenlog_rollback(_enc,stack,stackptr-stack);
+ *(_pipe->fr+0)=*&fr_checkpoint;
+ *(_pipe->qs+0)=*&qs_checkpoint;
+ for(bi=0;bi<4;bi++){
+ fragi=sb_maps[_mbi>>2][_mbi&3][bi];
+ if(frags[fragi].coded){
+ *(uncoded_fragis-++nuncoded_fragis)=fragi;
+ frags[fragi].coded=0;
+ frags[fragi].refi=OC_FRAME_NONE;
}
- ncoded_fragis-=ncoded;
- ncoded=0;
+ oc_fr_skip_block(_pipe->fr+0);
}
- }
- /*If no luma blocks coded, the mode is forced.*/
- if(ncoded==0)mb_modes[_mbi]=OC_MODE_INTER_NOMV;
- /*Assume that a 1MV with a single coded block is always cheaper than a 4MV
- with a single coded block.
- This may not be strictly true: a 4MV computes chroma MVs using (0,0) for
- skipped blocks, while a 1MV does not.*/
- else if(ncoded==1&&mb_mode==OC_MODE_INTER_MV_FOUR){
- mb_modes[_mbi]=OC_MODE_INTER_MV;
+ ncoded_fragis-=ncoded;
+ ncoded=0;
}
}
+ /*If no luma blocks coded, the mode is forced.*/
+ if(ncoded==0)mb_modes[_mbi]=OC_MODE_INTER_NOMV;
+ /*Assume that a 1MV with a single coded block is always cheaper than a 4MV
+ with a single coded block.
+ This may not be strictly true: a 4MV computes chroma MVs using (0,0) for
+ skipped blocks, while a 1MV does not.*/
+ else if(ncoded==1&&mb_mode==OC_MODE_INTER_MV_FOUR){
+ mb_modes[_mbi]=OC_MODE_INTER_MV;
+ }
_pipe->ncoded_fragis[0]=ncoded_fragis;
_pipe->nuncoded_fragis[0]=nuncoded_fragis;
return ncoded;
}
-static void oc_enc_sb_transform_quantize_chroma(oc_enc_ctx *_enc,
+static void oc_enc_sb_transform_quantize_inter_chroma(oc_enc_ctx *_enc,
oc_enc_pipeline_state *_pipe,int _pli,int _sbi_start,int _sbi_end){
- const oc_sb_map *sb_maps;
- oc_sb_flags *sb_flags;
- ptrdiff_t *coded_fragis;
- ptrdiff_t ncoded_fragis;
- ptrdiff_t *uncoded_fragis;
- ptrdiff_t nuncoded_fragis;
- int sbi;
+ const ogg_uint16_t *mcu_rd_scale;
+ const ogg_uint16_t *mcu_rd_iscale;
+ const oc_sb_map *sb_maps;
+ oc_sb_flags *sb_flags;
+ oc_fr_state *fr;
+ ptrdiff_t *coded_fragis;
+ ptrdiff_t ncoded_fragis;
+ ptrdiff_t *uncoded_fragis;
+ ptrdiff_t nuncoded_fragis;
+ ptrdiff_t froffset;
+ int sbi;
+ fr=_pipe->fr+_pli;
+ mcu_rd_scale=(const ogg_uint16_t *)_enc->mcu_rd_scale;
+ mcu_rd_iscale=(const ogg_uint16_t *)_enc->mcu_rd_iscale;
sb_maps=(const oc_sb_map *)_enc->state.sb_maps;
sb_flags=_enc->state.sb_flags;
coded_fragis=_pipe->coded_fragis[_pli];
ncoded_fragis=_pipe->ncoded_fragis[_pli];
uncoded_fragis=_pipe->uncoded_fragis[_pli];
nuncoded_fragis=_pipe->nuncoded_fragis[_pli];
+ froffset=_pipe->froffset[_pli];
for(sbi=_sbi_start;sbi<_sbi_end;sbi++){
/*Worst case token stack usage for 1 fragment.*/
oc_token_checkpoint stack[64];
@@ -964,21 +1002,21 @@ static void oc_enc_sb_transform_quantize_chroma(oc_enc_ctx *_enc,
fragi=sb_maps[sbi][quadi][bi];
if(fragi>=0){
oc_token_checkpoint *stackptr;
+ unsigned rd_scale;
+ unsigned rd_iscale;
+ rd_scale=mcu_rd_scale[fragi-froffset];
+ rd_iscale=mcu_rd_iscale[fragi-froffset];
stackptr=stack;
- if(oc_enc_block_transform_quantize(_enc,
- _pipe,_pli,fragi,oc_fr_cost1(_pipe->fr+_pli),&mo,&stackptr)){
+ if(oc_enc_block_transform_quantize(_enc,_pipe,_pli,fragi,
+ rd_scale,rd_iscale,&mo,fr,&stackptr)){
coded_fragis[ncoded_fragis++]=fragi;
- oc_fr_code_block(_pipe->fr+_pli);
- }
- else{
- *(uncoded_fragis-++nuncoded_fragis)=fragi;
- oc_fr_skip_block(_pipe->fr+_pli);
}
+ else *(uncoded_fragis-++nuncoded_fragis)=fragi;
}
}
- oc_fr_state_flush_sb(_pipe->fr+_pli);
- sb_flags[sbi].coded_fully=_pipe->fr[_pli].sb_full;
- sb_flags[sbi].coded_partially=_pipe->fr[_pli].sb_partial;
+ oc_fr_state_flush_sb(fr);
+ sb_flags[sbi].coded_fully=fr->sb_full;
+ sb_flags[sbi].coded_partially=fr->sb_partial;
}
_pipe->ncoded_fragis[_pli]=ncoded_fragis;
_pipe->nuncoded_fragis[_pli]=nuncoded_fragis;
@@ -1006,8 +1044,8 @@ static void oc_enc_sb_transform_quantize_chroma(oc_enc_ctx *_enc,
The bit counts and SSD measurements are obtained by examining actual encoded
frames, with appropriate lambda values and optimal Huffman codes selected.
EOB bits are assigned to the fragment that started the EOB run (as opposed to
- dividing them among all the blocks in the run; though the latter approach
- seems more theoretically correct, Monty's testing showed a small improvement
+ dividing them among all the blocks in the run; the latter approach seems
+ more theoretically correct, but Monty's testing showed a small improvement
with the former, though that may have been merely statistical noise).
@ARTICLE{Kim03,
@@ -1028,11 +1066,63 @@ static void oc_enc_sb_transform_quantize_chroma(oc_enc_ctx *_enc,
+(((_ssd)&(1<<OC_BIT_SCALE)-1)+((_rate)&(1<<OC_BIT_SCALE)-1)*(_lambda) \
+((1<<OC_BIT_SCALE)>>1)>>OC_BIT_SCALE)
+static void oc_enc_mode_rd_init(oc_enc_ctx *_enc){
+#if !defined(OC_COLLECT_METRICS)
+ const
+#endif
+ oc_mode_rd (*oc_mode_rd_table)[3][2][OC_COMP_BINS]=
+ _enc->sp_level<OC_SP_LEVEL_NOSATD?OC_MODE_RD_SATD:OC_MODE_RD_SAD;
+ int qii;
+#if defined(OC_COLLECT_METRICS)
+ oc_enc_mode_metrics_load(_enc);
+#endif
+ for(qii=0;qii<_enc->state.nqis;qii++){
+ int qi;
+ int pli;
+ qi=_enc->state.qis[qii];
+ for(pli=0;pli<3;pli++){
+ int qti;
+ for(qti=0;qti<2;qti++){
+ int log_plq;
+ int modeline;
+ int bin;
+ int dx;
+ int dq;
+ log_plq=_enc->log_plq[qi][pli][qti];
+ /*Find the pair of rows in the mode table that bracket this quantizer.
+ If it falls outside the range the table covers, then we just use a
+ pair on the edge for linear extrapolation.*/
+ for(modeline=0;modeline<OC_LOGQ_BINS-1&&
+ OC_MODE_LOGQ[modeline+1][pli][qti]>log_plq;modeline++);
+ /*Interpolate a row for this quantizer.*/
+ dx=OC_MODE_LOGQ[modeline][pli][qti]-log_plq;
+ dq=OC_MODE_LOGQ[modeline][pli][qti]-OC_MODE_LOGQ[modeline+1][pli][qti];
+ if(dq==0)dq=1;
+ for(bin=0;bin<OC_COMP_BINS;bin++){
+ int y0;
+ int z0;
+ int dy;
+ int dz;
+ y0=oc_mode_rd_table[modeline][pli][qti][bin].rate;
+ z0=oc_mode_rd_table[modeline][pli][qti][bin].rmse;
+ dy=oc_mode_rd_table[modeline+1][pli][qti][bin].rate-y0;
+ dz=oc_mode_rd_table[modeline+1][pli][qti][bin].rmse-z0;
+ _enc->mode_rd[qii][pli][qti][bin].rate=
+ (ogg_int16_t)OC_CLAMPI(-32768,y0+(dy*dx+(dq>>1))/dq,32767);
+ _enc->mode_rd[qii][pli][qti][bin].rmse=
+ (ogg_int16_t)OC_CLAMPI(-32768,z0+(dz*dx+(dq>>1))/dq,32767);
+ }
+ }
+ }
+ }
+}
+
/*Estimate the R-D cost of the DCT coefficients given the SATD of a block after
prediction.*/
-static unsigned oc_dct_cost2(unsigned *_ssd,
- int _qi,int _pli,int _qti,int _satd){
+static unsigned oc_dct_cost2(oc_enc_ctx *_enc,unsigned *_ssd,
+ int _qii,int _pli,int _qti,int _satd){
unsigned rmse;
+ int shift;
int bin;
int dx;
int y0;
@@ -1042,20 +1132,279 @@ static unsigned oc_dct_cost2(unsigned *_ssd,
/*SATD metrics for chroma planes vary much less than luma, so we scale them
by 4 to distribute them into the mode decision bins more evenly.*/
_satd<<=_pli+1&2;
- bin=OC_MINI(_satd>>OC_SAD_SHIFT,OC_SAD_BINS-2);
- dx=_satd-(bin<<OC_SAD_SHIFT);
- y0=OC_MODE_RD[_qi][_pli][_qti][bin].rate;
- z0=OC_MODE_RD[_qi][_pli][_qti][bin].rmse;
- dy=OC_MODE_RD[_qi][_pli][_qti][bin+1].rate-y0;
- dz=OC_MODE_RD[_qi][_pli][_qti][bin+1].rmse-z0;
- rmse=OC_MAXI(z0+(dz*dx>>OC_SAD_SHIFT),0);
+ shift=_enc->sp_level<OC_SP_LEVEL_NOSATD?OC_SATD_SHIFT:OC_SAD_SHIFT;
+ bin=OC_MINI(_satd>>shift,OC_COMP_BINS-2);
+ dx=_satd-(bin<<shift);
+ y0=_enc->mode_rd[_qii][_pli][_qti][bin].rate;
+ z0=_enc->mode_rd[_qii][_pli][_qti][bin].rmse;
+ dy=_enc->mode_rd[_qii][_pli][_qti][bin+1].rate-y0;
+ dz=_enc->mode_rd[_qii][_pli][_qti][bin+1].rmse-z0;
+ rmse=OC_MAXI(z0+(dz*dx>>shift),0);
*_ssd=rmse*rmse>>2*OC_RMSE_SCALE-OC_BIT_SCALE;
- return OC_MAXI(y0+(dy*dx>>OC_SAD_SHIFT),0);
+ return OC_MAXI(y0+(dy*dx>>shift),0);
+}
+
+/*activity_avg must be positive, or flat regions could get a zero weight, which
+ confounds analysis.
+ We set the minimum to this value so that it also avoids the need for divide
+ by zero checks in oc_mb_masking().*/
+# define OC_ACTIVITY_AVG_MIN (1<<OC_RD_SCALE_BITS)
+
+static unsigned oc_mb_activity(oc_enc_ctx *_enc,unsigned _mbi,
+ unsigned _activity[4]){
+ const unsigned char *src;
+ const ptrdiff_t *frag_buf_offs;
+ const ptrdiff_t *sb_map;
+ unsigned luma;
+ int ystride;
+ ptrdiff_t frag_offs;
+ ptrdiff_t fragi;
+ int bi;
+ frag_buf_offs=_enc->state.frag_buf_offs;
+ sb_map=_enc->state.sb_maps[_mbi>>2][_mbi&3];
+ src=_enc->state.ref_frame_data[OC_FRAME_IO];
+ ystride=_enc->state.ref_ystride[0];
+ luma=0;
+ for(bi=0;bi<4;bi++){
+ const unsigned char *s;
+ unsigned x;
+ unsigned x2;
+ unsigned act;
+ int i;
+ int j;
+ fragi=sb_map[bi];
+ frag_offs=frag_buf_offs[fragi];
+ /*TODO: This could be replaced with SATD^2, since we already have to
+ compute SATD.*/
+ x=x2=0;
+ s=src+frag_offs;
+ for(i=0;i<8;i++){
+ for(j=0;j<8;j++){
+ unsigned c;
+ c=s[j];
+ x+=c;
+ x2+=c*c;
+ }
+ s+=ystride;
+ }
+ luma+=x;
+ act=(x2<<6)-x*x;
+ if(act<8<<12){
+ /*The region is flat.*/
+ act=OC_MINI(act,5<<12);
+ }
+ else{
+ unsigned e1;
+ unsigned e2;
+ unsigned e3;
+ unsigned e4;
+ /*Test for an edge.
+ TODO: There are probably much simpler ways to do this (e.g., it could
+ probably be combined with the SATD calculation).
+ Alternatively, we could split the block around the mean and compute the
+ reduction in variance in each half.
+ For a Gaussian source the reduction should be
+ (1-2/pi) ~= 0.36338022763241865692446494650994.
+ Significantly more reduction is a good indication of a bi-level image.
+ This has the advantage of identifying, in addition to straight edges,
+ small text regions, which would otherwise be classified as "texture".*/
+ e1=e2=e3=e4=0;
+ s=src+frag_offs-1;
+ for(i=0;i<8;i++){
+ for(j=0;j<8;j++){
+ e1+=abs((s[j+2]-s[j]<<1)+(s-ystride)[j+2]-(s-ystride)[j]
+ +(s+ystride)[j+2]-(s+ystride)[j]);
+ e2+=abs(((s+ystride)[j+1]-(s-ystride)[j+1]<<1)
+ +(s+ystride)[j]-(s-ystride)[j]+(s+ystride)[j+2]-(s-ystride)[j+2]);
+ e3+=abs(((s+ystride)[j+2]-(s-ystride)[j]<<1)
+ +(s+ystride)[j+1]-s[j]+s[j+2]-(s-ystride)[j+1]);
+ e4+=abs(((s+ystride)[j]-(s-ystride)[j+2]<<1)
+ +(s+ystride)[j+1]-s[j+2]+s[j]-(s-ystride)[j+1]);
+ }
+ s+=ystride;
+ }
+ /*If the largest component of the edge energy is at least 40% of the
+ total, then classify the block as an edge block.*/
+ if(5*OC_MAXI(OC_MAXI(e1,e2),OC_MAXI(e3,e4))>2*(e1+e2+e3+e4)){
+ /*act=act_th*(act/act_th)**0.7
+ =exp(log(act_th)+0.7*(log(act)-log(act_th))).
+ Here act_th=5.0 and 0x394A=oc_blog32_q10(5<<12).*/
+ act=oc_bexp32_q10(0x394A+(7*(oc_blog32_q10(act)-0x394A+5)/10));
+ }
+ }
+ _activity[bi]=act;
+ }
+ return luma;
+}
+
+static void oc_mb_activity_fast(oc_enc_ctx *_enc,unsigned _mbi,
+ unsigned _activity[4],const unsigned _intra_satd[12]){
+ int bi;
+ for(bi=0;bi<4;bi++){
+ unsigned act;
+ act=(11*_intra_satd[bi]>>8)*_intra_satd[bi];
+ if(act<8<<12){
+ /*The region is flat.*/
+ act=OC_MINI(act,5<<12);
+ }
+ _activity[bi]=act;
+ }
+}
+
+/*Compute the masking scales for the blocks in a macro block.
+ All masking is computed from the luma blocks.
+ We derive scaling factors for the chroma blocks from these, and use the same
+ ones for all chroma blocks, regardless of the subsampling.
+ It's possible for luma to be perfectly flat and yet have high chroma energy,
+ but this is unlikely in non-artificial images, and not a case that has been
+ addressed by any research to my knowledge.
+ The output of the masking process is two scale factors, which are fed into
+ the various R-D optimizations.
+ The first, rd_scale, is applied to D in the equation
+ D*rd_scale+lambda*R.
+ This is the form that must be used to properly combine scores from multiple
+ blocks, and can be interpreted as scaling distortions by their visibility.
+ The inverse, rd_iscale, is applied to lambda in the equation
+ D+rd_iscale*lambda*R.
+ This is equivalent to the first form within a single block, but much faster
+ to use when evaluating many possible distortions (e.g., during actual
+ quantization, where separate distortions are evaluated for every
+ coefficient).
+ The two macros OC_RD_SCALE(rd_scale,d) and OC_RD_ISCALE(rd_iscale,lambda) are
+ used to perform the multiplications with the proper re-scaling for the range
+ of the scaling factors.
+ Many researchers apply masking values directly to the quantizers used, and
+ not to the R-D cost.
+ Since we generally use MSE for D, rd_scale must use the square of their
+ values to generate an equivalent effect.*/
+static unsigned oc_mb_masking(unsigned _rd_scale[5],unsigned _rd_iscale[5],
+ const ogg_uint16_t _chroma_rd_scale[2],const unsigned _activity[4],
+ unsigned _activity_avg,unsigned _luma,unsigned _luma_avg){
+ unsigned activity_sum;
+ unsigned la;
+ unsigned lb;
+ unsigned d;
+ int bi;
+ int bi_min;
+ int bi_min2;
+ /*The ratio lb/la is meant to approximate
+ ((((_luma-16)/219)*(255/128))**0.649**0.4**2), which is the
+ effective luminance masking from~\cite{LKW06} (including the self-masking
+ deflator).
+ The following actually turns out to be a pretty good approximation for
+ _luma>75 or so.
+ For smaller values luminance does not really follow Weber's Law anyway, and
+ this approximation gives a much less aggressive bitrate boost in this
+ region.
+ Though some researchers claim that contrast sensitivity actually decreases
+ for very low luminance values, in my experience excessive brightness on
+ LCDs or buggy color conversions (e.g., treating Y' as full-range instead
+ of the CCIR 601 range) make artifacts in such regions extremely visible.
+ We substitute _luma_avg for 128 to allow the strength of the masking to
+ vary with the actual average image luminance, within certain limits (the
+ caller has clamped _luma_avg to the range [90,160], inclusive).
+ @ARTICLE{LKW06,
+ author="Zhen Liu and Lina J. Karam and Andrew B. Watson",
+ title="{JPEG2000} Encoding With Perceptual Distortion Control",
+ journal="{IEEE} Transactions on Image Processing",
+ volume=15,
+ number=7,
+ pages="1763--1778",
+ month=Jul,
+ year=2006
+ }*/
+#if 0
+ la=_luma+4*_luma_avg;
+ lb=4*_luma+_luma_avg;
+#else
+ /*Disable luminance masking.*/
+ la=lb=1;
+#endif
+ activity_sum=0;
+ for(bi=0;bi<4;bi++){
+ unsigned a;
+ unsigned b;
+ activity_sum+=_activity[bi];
+ /*Apply activity masking.*/
+ a=_activity[bi]+4*_activity_avg;
+ b=4*_activity[bi]+_activity_avg;
+ d=OC_RD_SCALE(b,1);
+ /*And luminance masking.*/
+ d=(a+(d>>1))/d;
+ _rd_scale[bi]=(d*la+(lb>>1))/lb;
+ /*And now the inverse.*/
+ d=OC_MAXI(OC_RD_ISCALE(a,1),1);
+ d=(b+(d>>1))/d;
+ _rd_iscale[bi]=(d*lb+(la>>1))/la;
+ }
+ /*Now compute scaling factors for chroma blocks.
+ We start by finding the two smallest iscales from the luma blocks.*/
+ bi_min=_rd_iscale[1]<_rd_iscale[0];
+ bi_min2=1-bi_min;
+ for(bi=2;bi<4;bi++){
+ if(_rd_iscale[bi]<_rd_iscale[bi_min]){
+ bi_min2=bi_min;
+ bi_min=bi;
+ }
+ else if(_rd_iscale[bi]<_rd_iscale[bi_min2])bi_min2=bi;
+ }
+ /*If the minimum iscale is less than 1.0, use the second smallest instead,
+ and force the value to at least 1.0 (inflating chroma is a waste).*/
+ if(_rd_iscale[bi_min]<(1<<OC_RD_ISCALE_BITS))bi_min=bi_min2;
+ d=OC_MINI(_rd_scale[bi_min],1<<OC_RD_SCALE_BITS);
+ _rd_scale[4]=OC_RD_SCALE(d,_chroma_rd_scale[0]);
+ d=OC_MAXI(_rd_iscale[bi_min],1<<OC_RD_ISCALE_BITS);
+ _rd_iscale[4]=OC_RD_ISCALE(d,_chroma_rd_scale[1]);
+ return activity_sum;
+}
+
+static int oc_mb_intra_satd(oc_enc_ctx *_enc,unsigned _mbi,
+ unsigned _frag_satd[12]){
+ const unsigned char *src;
+ const ptrdiff_t *frag_buf_offs;
+ const ptrdiff_t *sb_map;
+ const oc_mb_map_plane *mb_map;
+ const unsigned char *map_idxs;
+ int map_nidxs;
+ int mapii;
+ int mapi;
+ int ystride;
+ int pli;
+ int bi;
+ ptrdiff_t fragi;
+ ptrdiff_t frag_offs;
+ unsigned luma;
+ int dc;
+ frag_buf_offs=_enc->state.frag_buf_offs;
+ sb_map=_enc->state.sb_maps[_mbi>>2][_mbi&3];
+ src=_enc->state.ref_frame_data[OC_FRAME_IO];
+ ystride=_enc->state.ref_ystride[0];
+ luma=0;
+ for(bi=0;bi<4;bi++){
+ fragi=sb_map[bi];
+ frag_offs=frag_buf_offs[fragi];
+ _frag_satd[bi]=oc_enc_frag_intra_satd(_enc,&dc,src+frag_offs,ystride);
+ luma+=dc;
+ }
+ mb_map=(const oc_mb_map_plane *)_enc->state.mb_maps[_mbi];
+ map_idxs=OC_MB_MAP_IDXS[_enc->state.info.pixel_fmt];
+ map_nidxs=OC_MB_MAP_NIDXS[_enc->state.info.pixel_fmt];
+ /*Note: This assumes ref_ystride[1]==ref_ystride[2].*/
+ ystride=_enc->state.ref_ystride[1];
+ for(mapii=4;mapii<map_nidxs;mapii++){
+ mapi=map_idxs[mapii];
+ pli=mapi>>2;
+ bi=mapi&3;
+ fragi=mb_map[pli][bi];
+ frag_offs=frag_buf_offs[fragi];
+ _frag_satd[mapii]=oc_enc_frag_intra_satd(_enc,&dc,src+frag_offs,ystride);
+ }
+ return luma;
}
/*Select luma block-level quantizers for a MB in an INTRA frame.*/
static unsigned oc_analyze_intra_mb_luma(oc_enc_ctx *_enc,
- const oc_qii_state *_qs,unsigned _mbi){
+ const oc_qii_state *_qs,unsigned _mbi,const unsigned _rd_scale[4]){
const unsigned char *src;
const ptrdiff_t *frag_buf_offs;
const oc_sb_map *sb_maps;
@@ -1068,6 +1417,7 @@ static unsigned oc_analyze_intra_mb_luma(oc_enc_ctx *_enc,
unsigned rate[4][3];
int prev[3][3];
unsigned satd;
+ int dc;
unsigned best_cost;
unsigned best_ssd;
unsigned best_rate;
@@ -1083,19 +1433,30 @@ static unsigned oc_analyze_intra_mb_luma(oc_enc_ctx *_enc,
ystride=_enc->state.ref_ystride[0];
fragi=sb_maps[_mbi>>2][_mbi&3][0];
frag_offs=frag_buf_offs[fragi];
- satd=oc_enc_frag_intra_satd(_enc,src+frag_offs,ystride);
+ if(_enc->sp_level<OC_SP_LEVEL_NOSATD){
+ satd=oc_enc_frag_intra_satd(_enc,&dc,src+frag_offs,ystride);
+ }
+ else{
+ satd=oc_enc_frag_intra_sad(_enc,src+frag_offs,ystride);
+ }
nqis=_enc->state.nqis;
lambda=_enc->lambda;
for(qii=0;qii<nqis;qii++){
oc_qii_state_advance(qs[0]+qii,_qs,qii);
- rate[0][qii]=oc_dct_cost2(ssd[0]+qii,_enc->state.qis[qii],0,0,satd)
+ rate[0][qii]=oc_dct_cost2(_enc,ssd[0]+qii,qii,0,0,satd)
+(qs[0][qii].bits-_qs->bits<<OC_BIT_SCALE);
+ ssd[0][qii]=OC_RD_SCALE(ssd[0][qii],_rd_scale[0]);
cost[0][qii]=OC_MODE_RD_COST(ssd[0][qii],rate[0][qii],lambda);
}
for(bi=1;bi<4;bi++){
fragi=sb_maps[_mbi>>2][_mbi&3][bi];
frag_offs=frag_buf_offs[fragi];
- satd=oc_enc_frag_intra_satd(_enc,src+frag_offs,ystride);
+ if(_enc->sp_level<OC_SP_LEVEL_NOSATD){
+ satd=oc_enc_frag_intra_satd(_enc,&dc,src+frag_offs,ystride);
+ }
+ else{
+ satd=oc_enc_frag_intra_sad(_enc,src+frag_offs,ystride);
+ }
for(qii=0;qii<nqis;qii++){
oc_qii_state qt[3];
unsigned cur_ssd;
@@ -1103,7 +1464,8 @@ static unsigned oc_analyze_intra_mb_luma(oc_enc_ctx *_enc,
int best_qij;
int qij;
oc_qii_state_advance(qt+0,qs[bi-1]+0,qii);
- cur_rate=oc_dct_cost2(&cur_ssd,_enc->state.qis[qii],0,0,satd);
+ cur_rate=oc_dct_cost2(_enc,&cur_ssd,qii,0,0,satd);
+ cur_ssd=OC_RD_SCALE(cur_ssd,_rd_scale[bi]);
best_ssd=ssd[bi-1][0]+cur_ssd;
best_rate=rate[bi-1][0]+cur_rate
+(qt[0].bits-qs[bi-1][0].bits<<OC_BIT_SCALE);
@@ -1152,13 +1514,14 @@ static unsigned oc_analyze_intra_mb_luma(oc_enc_ctx *_enc,
/*Select a block-level quantizer for a single chroma block in an INTRA frame.*/
static unsigned oc_analyze_intra_chroma_block(oc_enc_ctx *_enc,
- const oc_qii_state *_qs,int _pli,ptrdiff_t _fragi){
+ const oc_qii_state *_qs,int _pli,ptrdiff_t _fragi,unsigned _rd_scale){
const unsigned char *src;
oc_fragment *frags;
ptrdiff_t frag_offs;
oc_qii_state qt[3];
unsigned cost[3];
unsigned satd;
+ int dc;
unsigned best_cost;
int best_qii;
int qii;
@@ -1168,16 +1531,30 @@ static unsigned oc_analyze_intra_chroma_block(oc_enc_ctx *_enc,
src=_enc->state.ref_frame_data[OC_FRAME_IO];
ystride=_enc->state.ref_ystride[_pli];
frag_offs=_enc->state.frag_buf_offs[_fragi];
- satd=oc_enc_frag_intra_satd(_enc,src+frag_offs,ystride);
+ if(_enc->sp_level<OC_SP_LEVEL_NOSATD){
+ satd=oc_enc_frag_intra_satd(_enc,&dc,src+frag_offs,ystride);
+ }
+ else{
+ satd=oc_enc_frag_intra_sad(_enc,src+frag_offs,ystride);
+ }
+ /*Most chroma blocks have no AC coefficients to speak of anyway, so it's not
+ worth spending the bits to change the AC quantizer.
+ TODO: This may be worth revisiting when we separate out DC and AC
+ predictions from SATD.*/
+#if 0
nqis=_enc->state.nqis;
+#else
+ nqis=1;
+#endif
lambda=_enc->lambda;
best_qii=0;
for(qii=0;qii<nqis;qii++){
unsigned cur_rate;
unsigned cur_ssd;
oc_qii_state_advance(qt+qii,_qs,qii);
- cur_rate=oc_dct_cost2(&cur_ssd,_enc->state.qis[qii],_pli,0,satd)
+ cur_rate=oc_dct_cost2(_enc,&cur_ssd,qii,_pli,0,satd)
+(qt[qii].bits-_qs->bits<<OC_BIT_SCALE);
+ cur_ssd=OC_RD_SCALE(cur_ssd,_rd_scale);
cost[qii]=OC_MODE_RD_COST(cur_ssd,cur_rate,lambda);
}
best_cost=cost[0];
@@ -1192,17 +1569,49 @@ static unsigned oc_analyze_intra_chroma_block(oc_enc_ctx *_enc,
return best_cost;
}
+static void oc_enc_mb_transform_quantize_intra_luma(oc_enc_ctx *_enc,
+ oc_enc_pipeline_state *_pipe,unsigned _mbi,
+ const unsigned _rd_scale[4],const unsigned _rd_iscale[4]){
+ /*Worst case token stack usage for 4 fragments.*/
+ oc_token_checkpoint stack[64*4];
+ oc_token_checkpoint *stackptr;
+ const oc_sb_map *sb_maps;
+ oc_fragment *frags;
+ ptrdiff_t *coded_fragis;
+ ptrdiff_t ncoded_fragis;
+ ptrdiff_t fragi;
+ int bi;
+ sb_maps=(const oc_sb_map *)_enc->state.sb_maps;
+ frags=_enc->state.frags;
+ coded_fragis=_pipe->coded_fragis[0];
+ ncoded_fragis=_pipe->ncoded_fragis[0];
+ stackptr=stack;
+ for(bi=0;bi<4;bi++){
+ fragi=sb_maps[_mbi>>2][_mbi&3][bi];
+ frags[fragi].refi=OC_FRAME_SELF;
+ frags[fragi].mb_mode=OC_MODE_INTRA;
+ oc_enc_block_transform_quantize(_enc,_pipe,0,fragi,
+ _rd_scale[bi],_rd_iscale[bi],NULL,NULL,&stackptr);
+ coded_fragis[ncoded_fragis++]=fragi;
+ }
+ _pipe->ncoded_fragis[0]=ncoded_fragis;
+}
+
static void oc_enc_sb_transform_quantize_intra_chroma(oc_enc_ctx *_enc,
oc_enc_pipeline_state *_pipe,int _pli,int _sbi_start,int _sbi_end){
- const oc_sb_map *sb_maps;
- oc_sb_flags *sb_flags;
- ptrdiff_t *coded_fragis;
- ptrdiff_t ncoded_fragis;
- int sbi;
+ const ogg_uint16_t *mcu_rd_scale;
+ const ogg_uint16_t *mcu_rd_iscale;
+ const oc_sb_map *sb_maps;
+ ptrdiff_t *coded_fragis;
+ ptrdiff_t ncoded_fragis;
+ ptrdiff_t froffset;
+ int sbi;
+ mcu_rd_scale=(const ogg_uint16_t *)_enc->mcu_rd_scale;
+ mcu_rd_iscale=(const ogg_uint16_t *)_enc->mcu_rd_iscale;
sb_maps=(const oc_sb_map *)_enc->state.sb_maps;
- sb_flags=_enc->state.sb_flags;
coded_fragis=_pipe->coded_fragis[_pli];
ncoded_fragis=_pipe->ncoded_fragis[_pli];
+ froffset=_pipe->froffset[_pli];
for(sbi=_sbi_start;sbi<_sbi_end;sbi++){
/*Worst case token stack usage for 1 fragment.*/
oc_token_checkpoint stack[64];
@@ -1213,10 +1622,14 @@ static void oc_enc_sb_transform_quantize_intra_chroma(oc_enc_ctx *_enc,
fragi=sb_maps[sbi][quadi][bi];
if(fragi>=0){
oc_token_checkpoint *stackptr;
- oc_analyze_intra_chroma_block(_enc,_pipe->qs+_pli,_pli,fragi);
+ unsigned rd_scale;
+ unsigned rd_iscale;
+ rd_scale=mcu_rd_scale[fragi-froffset];
+ rd_iscale=mcu_rd_iscale[fragi-froffset];
+ oc_analyze_intra_chroma_block(_enc,_pipe->qs+_pli,_pli,fragi,rd_scale);
stackptr=stack;
- oc_enc_block_transform_quantize(_enc,
- _pipe,_pli,fragi,0,NULL,&stackptr);
+ oc_enc_block_transform_quantize(_enc,_pipe,_pli,fragi,
+ rd_scale,rd_iscale,NULL,NULL,&stackptr);
coded_fragis[ncoded_fragis++]=fragi;
}
}
@@ -1226,13 +1639,19 @@ static void oc_enc_sb_transform_quantize_intra_chroma(oc_enc_ctx *_enc,
/*Analysis stage for an INTRA frame.*/
void oc_enc_analyze_intra(oc_enc_ctx *_enc,int _recode){
- oc_enc_pipeline_state pipe;
+ ogg_int64_t activity_sum;
+ ogg_int64_t luma_sum;
+ unsigned activity_avg;
+ unsigned luma_avg;
+ const ogg_uint16_t *chroma_rd_scale;
+ ogg_uint16_t *mcu_rd_scale;
+ ogg_uint16_t *mcu_rd_iscale;
const unsigned char *map_idxs;
int nmap_idxs;
oc_sb_flags *sb_flags;
signed char *mb_modes;
const oc_mb_map *mb_maps;
- oc_mb_enc_info *embs;
+ const oc_sb_map *sb_maps;
oc_fragment *frags;
unsigned stripe_sby;
unsigned mcu_nvsbs;
@@ -1242,7 +1661,14 @@ void oc_enc_analyze_intra(oc_enc_ctx *_enc,int _recode){
int pli;
_enc->state.frame_type=OC_INTRA_FRAME;
oc_enc_tokenize_start(_enc);
- oc_enc_pipeline_init(_enc,&pipe);
+ oc_enc_pipeline_init(_enc,&_enc->pipe);
+ oc_enc_mode_rd_init(_enc);
+ activity_sum=luma_sum=0;
+ activity_avg=_enc->activity_avg;
+ luma_avg=OC_CLAMPI(90<<8,_enc->luma_avg,160<<8);
+ chroma_rd_scale=_enc->chroma_rd_scale[OC_INTRA_FRAME][_enc->state.qis[0]];
+ mcu_rd_scale=_enc->mcu_rd_scale;
+ mcu_rd_iscale=_enc->mcu_rd_iscale;
/*Choose MVs and MB modes and quantize and code luma.
Must be done in Hilbert order.*/
map_idxs=OC_MB_MAP_IDXS[_enc->state.info.pixel_fmt];
@@ -1253,52 +1679,91 @@ void oc_enc_analyze_intra(oc_enc_ctx *_enc,int _recode){
sb_flags=_enc->state.sb_flags;
mb_modes=_enc->state.mb_modes;
mb_maps=(const oc_mb_map *)_enc->state.mb_maps;
- embs=_enc->mb_info;
+ sb_maps=(const oc_sb_map *)_enc->state.sb_maps;
frags=_enc->state.frags;
notstart=0;
notdone=1;
mcu_nvsbs=_enc->mcu_nvsbs;
for(stripe_sby=0;notdone;stripe_sby+=mcu_nvsbs){
- unsigned sbi;
- unsigned sbi_end;
- notdone=oc_enc_pipeline_set_stripe(_enc,&pipe,stripe_sby);
- sbi_end=pipe.sbi_end[0];
- for(sbi=pipe.sbi0[0];sbi<sbi_end;sbi++){
+ ptrdiff_t cfroffset;
+ unsigned sbi;
+ unsigned sbi_end;
+ notdone=oc_enc_pipeline_set_stripe(_enc,&_enc->pipe,stripe_sby);
+ sbi_end=_enc->pipe.sbi_end[0];
+ cfroffset=_enc->pipe.froffset[1];
+ for(sbi=_enc->pipe.sbi0[0];sbi<sbi_end;sbi++){
int quadi;
/*Mode addressing is through Y plane, always 4 MB per SB.*/
for(quadi=0;quadi<4;quadi++)if(sb_flags[sbi].quad_valid&1<<quadi){
+ unsigned activity[4];
+ unsigned rd_scale[5];
+ unsigned rd_iscale[5];
+ unsigned luma;
unsigned mbi;
int mapii;
int mapi;
int bi;
ptrdiff_t fragi;
mbi=sbi<<2|quadi;
+ /*Activity masking.*/
+ if(_enc->sp_level<OC_SP_LEVEL_FAST_ANALYSIS){
+ luma=oc_mb_activity(_enc,mbi,activity);
+ }
+ else{
+ unsigned intra_satd[12];
+ luma=oc_mb_intra_satd(_enc,mbi,intra_satd);
+ oc_mb_activity_fast(_enc,mbi,activity,intra_satd);
+ for(bi=0;bi<4;bi++)frags[sb_maps[mbi>>2][mbi&3][bi]].qii=0;
+ }
+ activity_sum+=oc_mb_masking(rd_scale,rd_iscale,
+ chroma_rd_scale,activity,activity_avg,luma,luma_avg);
+ luma_sum+=luma;
/*Motion estimation:
- We always do a basic 1MV search for all macroblocks, coded or not,
- keyframe or not.*/
- if(!_recode&&_enc->state.curframe_num>0)oc_mcenc_search(_enc,mbi);
- oc_analyze_intra_mb_luma(_enc,pipe.qs+0,mbi);
+ We do a basic 1MV search for all macroblocks, coded or not,
+ keyframe or not, unless we aren't using motion estimation at all.*/
+ if(!_recode&&_enc->state.curframe_num>0&&
+ _enc->sp_level<OC_SP_LEVEL_NOMC&&_enc->keyframe_frequency_force>1){
+ oc_mcenc_search(_enc,mbi);
+ }
+ if(_enc->sp_level<OC_SP_LEVEL_FAST_ANALYSIS){
+ oc_analyze_intra_mb_luma(_enc,_enc->pipe.qs+0,mbi,rd_scale);
+ }
mb_modes[mbi]=OC_MODE_INTRA;
- oc_enc_mb_transform_quantize_luma(_enc,&pipe,mbi,0);
+ oc_enc_mb_transform_quantize_intra_luma(_enc,&_enc->pipe,
+ mbi,rd_scale,rd_iscale);
/*Propagate final MB mode and MVs to the chroma blocks.*/
for(mapii=4;mapii<nmap_idxs;mapii++){
mapi=map_idxs[mapii];
pli=mapi>>2;
bi=mapi&3;
fragi=mb_maps[mbi][pli][bi];
+ frags[fragi].refi=OC_FRAME_SELF;
frags[fragi].mb_mode=OC_MODE_INTRA;
}
+ /*Save masking scale factors for chroma blocks.*/
+ for(mapii=4;mapii<(nmap_idxs-4>>1)+4;mapii++){
+ mapi=map_idxs[mapii];
+ bi=mapi&3;
+ fragi=mb_maps[mbi][1][bi];
+ mcu_rd_scale[fragi-cfroffset]=(ogg_uint16_t)rd_scale[4];
+ mcu_rd_iscale[fragi-cfroffset]=(ogg_uint16_t)rd_iscale[4];
+ }
}
}
- oc_enc_pipeline_finish_mcu_plane(_enc,&pipe,0,notstart,notdone);
+ oc_enc_pipeline_finish_mcu_plane(_enc,&_enc->pipe,0,notstart,notdone);
/*Code chroma planes.*/
for(pli=1;pli<3;pli++){
- oc_enc_sb_transform_quantize_intra_chroma(_enc,&pipe,
- pli,pipe.sbi0[pli],pipe.sbi_end[pli]);
- oc_enc_pipeline_finish_mcu_plane(_enc,&pipe,pli,notstart,notdone);
+ oc_enc_sb_transform_quantize_intra_chroma(_enc,&_enc->pipe,
+ pli,_enc->pipe.sbi0[pli],_enc->pipe.sbi_end[pli]);
+ oc_enc_pipeline_finish_mcu_plane(_enc,&_enc->pipe,pli,notstart,notdone);
}
notstart=1;
}
+ /*Compute the average block activity and MB luma score for the frame.*/
+ _enc->activity_avg=OC_MAXI(OC_ACTIVITY_AVG_MIN,
+ (unsigned)((activity_sum+(_enc->state.fplanes[0].nfrags>>1))/
+ _enc->state.fplanes[0].nfrags));
+ _enc->luma_avg=(unsigned)((luma_sum+(_enc->state.nmbs>>1))/_enc->state.nmbs);
/*Finish filling in the reference frame borders.*/
refi=_enc->state.ref_frame_idx[OC_FRAME_SELF];
for(pli=0;pli<3;pli++)oc_state_borders_fill_caps(&_enc->state,refi,pli);
@@ -1339,27 +1804,21 @@ static const unsigned OC_NOSKIP[12]={
static void oc_analyze_mb_mode_luma(oc_enc_ctx *_enc,
oc_mode_choice *_modec,const oc_fr_state *_fr,const oc_qii_state *_qs,
- const unsigned _frag_satd[12],const unsigned _skip_ssd[12],int _qti){
+ const unsigned _frag_satd[12],const unsigned _skip_ssd[12],
+ const unsigned _rd_scale[4],int _qti){
oc_fr_state fr;
oc_qii_state qs;
unsigned ssd;
unsigned rate;
- int overhead;
unsigned satd;
unsigned best_ssd;
unsigned best_rate;
- int best_overhead;
int best_fri;
int best_qii;
- unsigned cur_cost;
- unsigned cur_ssd;
- unsigned cur_rate;
- int cur_overhead;
int lambda;
int nqis;
int nskipped;
int bi;
- int qii;
lambda=_enc->lambda;
nqis=_enc->state.nqis;
/*We could do a trellis optimization here, but we don't make final skip
@@ -1370,26 +1829,36 @@ static void oc_analyze_mb_mode_luma(oc_enc_ctx *_enc,
code the flags, anyway.*/
*&fr=*_fr;
*&qs=*_qs;
- ssd=rate=overhead=nskipped=0;
+ ssd=rate=nskipped=0;
for(bi=0;bi<4;bi++){
oc_fr_state ft[2];
oc_qii_state qt[3];
unsigned best_cost;
+ unsigned cur_cost;
+ unsigned cur_ssd;
+ unsigned cur_rate;
+ unsigned cur_overhead;
+ int qii;
satd=_frag_satd[bi];
*(ft+0)=*&fr;
oc_fr_code_block(ft+0);
- oc_qii_state_advance(qt+0,&qs,0);
- best_overhead=(ft[0].bits-fr.bits<<OC_BIT_SCALE);
- best_rate=oc_dct_cost2(&best_ssd,_enc->state.qis[0],0,_qti,satd)
- +(qt[0].bits-qs.bits<<OC_BIT_SCALE);
- best_cost=OC_MODE_RD_COST(ssd+best_ssd,rate+best_rate+best_overhead,lambda);
+ cur_overhead=ft[0].bits-fr.bits;
+ best_rate=oc_dct_cost2(_enc,&best_ssd,0,0,_qti,satd)
+ +(cur_overhead<<OC_BIT_SCALE);
+ if(nqis>1){
+ oc_qii_state_advance(qt+0,&qs,0);
+ best_rate+=qt[0].bits-qs.bits<<OC_BIT_SCALE;
+ }
+ best_ssd=OC_RD_SCALE(best_ssd,_rd_scale[bi]);
+ best_cost=OC_MODE_RD_COST(ssd+best_ssd,rate+best_rate,lambda);
best_fri=0;
best_qii=0;
for(qii=1;qii<nqis;qii++){
oc_qii_state_advance(qt+qii,&qs,qii);
- cur_rate=oc_dct_cost2(&cur_ssd,_enc->state.qis[qii],0,_qti,satd)
- +(qt[qii].bits-qs.bits<<OC_BIT_SCALE);
- cur_cost=OC_MODE_RD_COST(ssd+cur_ssd,rate+cur_rate+best_overhead,lambda);
+ cur_rate=oc_dct_cost2(_enc,&cur_ssd,qii,0,_qti,satd)
+ +(cur_overhead+qt[qii].bits-qs.bits<<OC_BIT_SCALE);
+ cur_ssd=OC_RD_SCALE(cur_ssd,_rd_scale[bi]);
+ cur_cost=OC_MODE_RD_COST(ssd+cur_ssd,rate+cur_rate,lambda);
if(cur_cost<best_cost){
best_cost=cur_cost;
best_ssd=cur_ssd;
@@ -1397,7 +1866,7 @@ static void oc_analyze_mb_mode_luma(oc_enc_ctx *_enc,
best_qii=qii;
}
}
- if(_skip_ssd[bi]<UINT_MAX&&nskipped<3){
+ if(_skip_ssd[bi]<(UINT_MAX>>OC_BIT_SCALE+2)&&nskipped<3){
*(ft+1)=*&fr;
oc_fr_skip_block(ft+1);
cur_overhead=ft[1].bits-fr.bits<<OC_BIT_SCALE;
@@ -1405,15 +1874,13 @@ static void oc_analyze_mb_mode_luma(oc_enc_ctx *_enc,
cur_cost=OC_MODE_RD_COST(ssd+cur_ssd,rate+cur_overhead,lambda);
if(cur_cost<=best_cost){
best_ssd=cur_ssd;
- best_rate=0;
- best_overhead=cur_overhead;
+ best_rate=cur_overhead;
best_fri=1;
best_qii+=4;
}
}
rate+=best_rate;
ssd+=best_ssd;
- overhead+=best_overhead;
*&fr=*(ft+best_fri);
if(best_fri==0)*&qs=*(qt+best_qii);
else nskipped++;
@@ -1421,12 +1888,12 @@ static void oc_analyze_mb_mode_luma(oc_enc_ctx *_enc,
}
_modec->ssd=ssd;
_modec->rate=rate;
- _modec->overhead=OC_MAXI(overhead,0);
}
static void oc_analyze_mb_mode_chroma(oc_enc_ctx *_enc,
oc_mode_choice *_modec,const oc_fr_state *_fr,const oc_qii_state *_qs,
- const unsigned _frag_satd[12],const unsigned _skip_ssd[12],int _qti){
+ const unsigned _frag_satd[12],const unsigned _skip_ssd[12],
+ unsigned _rd_scale,int _qti){
unsigned ssd;
unsigned rate;
unsigned satd;
@@ -1443,7 +1910,15 @@ static void oc_analyze_mb_mode_chroma(oc_enc_ctx *_enc,
int bi;
int qii;
lambda=_enc->lambda;
+ /*Most chroma blocks have no AC coefficients to speak of anyway, so it's not
+ worth spending the bits to change the AC quantizer.
+ TODO: This may be worth revisiting when we separate out DC and AC
+ predictions from SATD.*/
+#if 0
nqis=_enc->state.nqis;
+#else
+ nqis=1;
+#endif
ssd=_modec->ssd;
rate=_modec->rate;
/*Because (except in 4:4:4 mode) we aren't considering chroma blocks in coded
@@ -1455,13 +1930,15 @@ static void oc_analyze_mb_mode_chroma(oc_enc_ctx *_enc,
for(;bi<nblocks;bi++){
unsigned best_cost;
satd=_frag_satd[bi];
- best_rate=oc_dct_cost2(&best_ssd,_enc->state.qis[0],pli,_qti,satd)
+ best_rate=oc_dct_cost2(_enc,&best_ssd,0,pli,_qti,satd)
+OC_CHROMA_QII_RATE;
+ best_ssd=OC_RD_SCALE(best_ssd,_rd_scale);
best_cost=OC_MODE_RD_COST(ssd+best_ssd,rate+best_rate,lambda);
best_qii=0;
for(qii=1;qii<nqis;qii++){
- cur_rate=oc_dct_cost2(&cur_ssd,_enc->state.qis[qii],0,_qti,satd)
+ cur_rate=oc_dct_cost2(_enc,&cur_ssd,qii,pli,_qti,satd)
+OC_CHROMA_QII_RATE;
+ cur_ssd=OC_RD_SCALE(cur_ssd,_rd_scale);
cur_cost=OC_MODE_RD_COST(ssd+cur_ssd,rate+cur_rate,lambda);
if(cur_cost<best_cost){
best_cost=cur_cost;
@@ -1470,7 +1947,7 @@ static void oc_analyze_mb_mode_chroma(oc_enc_ctx *_enc,
best_qii=qii;
}
}
- if(_skip_ssd[bi]<UINT_MAX){
+ if(_skip_ssd[bi]<(UINT_MAX>>OC_BIT_SCALE+2)){
cur_ssd=_skip_ssd[bi]<<OC_BIT_SCALE;
cur_cost=OC_MODE_RD_COST(ssd+cur_ssd,rate,lambda);
if(cur_cost<=best_cost){
@@ -1490,65 +1967,50 @@ static void oc_analyze_mb_mode_chroma(oc_enc_ctx *_enc,
}
static void oc_skip_cost(oc_enc_ctx *_enc,oc_enc_pipeline_state *_pipe,
- unsigned _mbi,unsigned _ssd[12]){
- OC_ALIGN16(ogg_int16_t buffer[64]);
- const unsigned char *src;
- const unsigned char *ref;
- int ystride;
- const oc_fragment *frags;
- const ptrdiff_t *frag_buf_offs;
- const ptrdiff_t *sb_map;
- const oc_mb_map_plane *mb_map;
- const unsigned char *map_idxs;
- int map_nidxs;
- ogg_int64_t mask;
- unsigned uncoded_ssd;
- int uncoded_dc;
- unsigned dc_dequant;
- int dc_flag;
- int mapii;
- int mapi;
- int pli;
- int bi;
- ptrdiff_t fragi;
- ptrdiff_t frag_offs;
- int borderi;
- int pi;
+ unsigned _mbi,const unsigned _rd_scale[4],unsigned _ssd[12]){
+ const unsigned char *src;
+ const unsigned char *ref;
+ int ystride;
+ const oc_fragment *frags;
+ const ptrdiff_t *frag_buf_offs;
+ const ptrdiff_t *sb_map;
+ const oc_mb_map_plane *mb_map;
+ const unsigned char *map_idxs;
+ oc_mv *mvs;
+ int map_nidxs;
+ unsigned uncoded_ssd;
+ int mapii;
+ int mapi;
+ int pli;
+ int bi;
+ ptrdiff_t fragi;
+ ptrdiff_t frag_offs;
+ int borderi;
src=_enc->state.ref_frame_data[OC_FRAME_IO];
- ref=_enc->state.ref_frame_data[_enc->state.ref_frame_idx[OC_FRAME_PREV]];
+ ref=_enc->state.ref_frame_data[OC_FRAME_PREV];
ystride=_enc->state.ref_ystride[0];
frags=_enc->state.frags;
frag_buf_offs=_enc->state.frag_buf_offs;
sb_map=_enc->state.sb_maps[_mbi>>2][_mbi&3];
- dc_dequant=_enc->state.dequant_tables[_enc->state.qis[0]][0][1][0];
+ mvs=_enc->mb_info[_mbi].block_mv;
for(bi=0;bi<4;bi++){
fragi=sb_map[bi];
- frag_offs=frag_buf_offs[fragi];
- oc_enc_frag_sub(_enc,buffer,src+frag_offs,ref+frag_offs,ystride);
borderi=frags[fragi].borderi;
- uncoded_ssd=uncoded_dc=0;
+ frag_offs=frag_buf_offs[fragi];
if(borderi<0){
- for(pi=0;pi<64;pi++){
- uncoded_ssd+=buffer[pi]*buffer[pi];
- uncoded_dc+=buffer[pi];
- }
+ uncoded_ssd=oc_enc_frag_ssd(_enc,src+frag_offs,ref+frag_offs,ystride);
}
else{
- ogg_int64_t mask;
- mask=_enc->state.borders[borderi].mask;
- for(pi=0;pi<64;pi++,mask>>=1)if(mask&1){
- uncoded_ssd+=buffer[pi]*buffer[pi];
- uncoded_dc+=buffer[pi];
- }
+ uncoded_ssd=oc_enc_frag_border_ssd(_enc,
+ src+frag_offs,ref+frag_offs,ystride,_enc->state.borders[borderi].mask);
}
- /*Scale to match DCT domain.*/
- uncoded_ssd<<=4;
- /*We actually only want the AC contribution to the SSD.*/
- uncoded_ssd-=uncoded_dc*uncoded_dc>>2;
- /*DC is a special case; if there's more than a full-quantizer improvement
- in the effective DC component, always force-code the block.*/
- dc_flag=abs(uncoded_dc)>dc_dequant<<1;
- uncoded_ssd|=-dc_flag;
+ /*Scale to match DCT domain and RD.*/
+ uncoded_ssd=OC_RD_SKIP_SCALE(uncoded_ssd,_rd_scale[bi]);
+ /*Motion is a special case; if there is more than a full-pixel motion
+ against the prior frame, penalize skipping.
+ TODO: The factor of two here is a kludge, but it tested out better than a
+ hard limit.*/
+ if(mvs[bi]!=0)uncoded_ssd*=2;
_pipe->skip_ssd[0][fragi-_pipe->froffset[0]]=_ssd[bi]=uncoded_ssd;
}
mb_map=(const oc_mb_map_plane *)_enc->state.mb_maps[_mbi];
@@ -1556,96 +2018,52 @@ static void oc_skip_cost(oc_enc_ctx *_enc,oc_enc_pipeline_state *_pipe,
map_idxs=OC_MB_MAP_IDXS[_enc->state.info.pixel_fmt];
map_nidxs=(map_nidxs-4>>1)+4;
mapii=4;
+ mvs=_enc->mb_info[_mbi].unref_mv;
for(pli=1;pli<3;pli++){
ystride=_enc->state.ref_ystride[pli];
- dc_dequant=_enc->state.dequant_tables[_enc->state.qis[0]][pli][1][0];
for(;mapii<map_nidxs;mapii++){
mapi=map_idxs[mapii];
bi=mapi&3;
fragi=mb_map[pli][bi];
- frag_offs=frag_buf_offs[fragi];
- oc_enc_frag_sub(_enc,buffer,src+frag_offs,ref+frag_offs,ystride);
borderi=frags[fragi].borderi;
- uncoded_ssd=uncoded_dc=0;
+ frag_offs=frag_buf_offs[fragi];
if(borderi<0){
- for(pi=0;pi<64;pi++){
- uncoded_ssd+=buffer[pi]*buffer[pi];
- uncoded_dc+=buffer[pi];
- }
+ uncoded_ssd=oc_enc_frag_ssd(_enc,src+frag_offs,ref+frag_offs,ystride);
}
else{
- mask=_enc->state.borders[borderi].mask;
- for(pi=0;pi<64;pi++,mask>>=1)if(mask&1){
- uncoded_ssd+=buffer[pi]*buffer[pi];
- uncoded_dc+=buffer[pi];
- }
+ uncoded_ssd=oc_enc_frag_border_ssd(_enc,
+ src+frag_offs,ref+frag_offs,ystride,_enc->state.borders[borderi].mask);
}
- /*Scale to match DCT domain.*/
- uncoded_ssd<<=4;
- /*We actually only want the AC contribution to the SSD.*/
- uncoded_ssd-=uncoded_dc*uncoded_dc>>2;
- /*DC is a special case; if there's more than a full-quantizer improvement
- in the effective DC component, always force-code the block.*/
- dc_flag=abs(uncoded_dc)>dc_dequant<<1;
- uncoded_ssd|=-dc_flag;
+ /*Scale to match DCT domain and RD.*/
+ uncoded_ssd=OC_RD_SKIP_SCALE(uncoded_ssd,_rd_scale[4]);
+ /*Motion is a special case; if there is more than a full-pixel motion
+ against the prior frame, penalize skipping.
+ TODO: The factor of two here is a kludge, but it tested out better than
+ a hard limit*/
+ if(mvs[OC_FRAME_PREV]!=0)uncoded_ssd*=2;
_pipe->skip_ssd[pli][fragi-_pipe->froffset[pli]]=_ssd[mapii]=uncoded_ssd;
}
map_nidxs=(map_nidxs-4<<1)+4;
}
}
-static void oc_mb_intra_satd(oc_enc_ctx *_enc,unsigned _mbi,
- unsigned _frag_satd[12]){
- const unsigned char *src;
- const ptrdiff_t *frag_buf_offs;
- const ptrdiff_t *sb_map;
- const oc_mb_map_plane *mb_map;
- const unsigned char *map_idxs;
- int map_nidxs;
- int mapii;
- int mapi;
- int ystride;
- int pli;
- int bi;
- ptrdiff_t fragi;
- ptrdiff_t frag_offs;
- frag_buf_offs=_enc->state.frag_buf_offs;
- sb_map=_enc->state.sb_maps[_mbi>>2][_mbi&3];
- src=_enc->state.ref_frame_data[OC_FRAME_IO];
- ystride=_enc->state.ref_ystride[0];
- for(bi=0;bi<4;bi++){
- fragi=sb_map[bi];
- frag_offs=frag_buf_offs[fragi];
- _frag_satd[bi]=oc_enc_frag_intra_satd(_enc,src+frag_offs,ystride);
- }
- mb_map=(const oc_mb_map_plane *)_enc->state.mb_maps[_mbi];
- map_idxs=OC_MB_MAP_IDXS[_enc->state.info.pixel_fmt];
- map_nidxs=OC_MB_MAP_NIDXS[_enc->state.info.pixel_fmt];
- /*Note: This assumes ref_ystride[1]==ref_ystride[2].*/
- ystride=_enc->state.ref_ystride[1];
- for(mapii=4;mapii<map_nidxs;mapii++){
- mapi=map_idxs[mapii];
- pli=mapi>>2;
- bi=mapi&3;
- fragi=mb_map[pli][bi];
- frag_offs=frag_buf_offs[fragi];
- _frag_satd[mapii]=oc_enc_frag_intra_satd(_enc,src+frag_offs,ystride);
- }
-}
static void oc_cost_intra(oc_enc_ctx *_enc,oc_mode_choice *_modec,
unsigned _mbi,const oc_fr_state *_fr,const oc_qii_state *_qs,
- const unsigned _frag_satd[12],const unsigned _skip_ssd[12]){
- oc_analyze_mb_mode_luma(_enc,_modec,_fr,_qs,_frag_satd,_skip_ssd,0);
- oc_analyze_mb_mode_chroma(_enc,_modec,_fr,_qs,_frag_satd,_skip_ssd,0);
- _modec->overhead+=
+ const unsigned _frag_satd[12],const unsigned _skip_ssd[12],
+ const unsigned _rd_scale[5]){
+ oc_analyze_mb_mode_luma(_enc,_modec,_fr,_qs,_frag_satd,_skip_ssd,_rd_scale,0);
+ oc_analyze_mb_mode_chroma(_enc,_modec,_fr,_qs,
+ _frag_satd,_skip_ssd,_rd_scale[4],0);
+ _modec->overhead=
oc_mode_scheme_chooser_cost(&_enc->chooser,OC_MODE_INTRA)<<OC_BIT_SCALE;
oc_mode_set_cost(_modec,_enc->lambda);
}
static void oc_cost_inter(oc_enc_ctx *_enc,oc_mode_choice *_modec,
- unsigned _mbi,int _mb_mode,const signed char *_mv,
- const oc_fr_state *_fr,const oc_qii_state *_qs,const unsigned _skip_ssd[12]){
+ unsigned _mbi,int _mb_mode,oc_mv _mv,
+ const oc_fr_state *_fr,const oc_qii_state *_qs,
+ const unsigned _skip_ssd[12],const unsigned _rd_scale[5]){
unsigned frag_satd[12];
const unsigned char *src;
const unsigned char *ref;
@@ -1658,35 +2076,45 @@ static void oc_cost_inter(oc_enc_ctx *_enc,oc_mode_choice *_modec,
int mapii;
int mapi;
int mv_offs[2];
- int dx;
- int dy;
int pli;
int bi;
ptrdiff_t fragi;
ptrdiff_t frag_offs;
+ int dc;
src=_enc->state.ref_frame_data[OC_FRAME_IO];
- ref=_enc->state.ref_frame_data[
- _enc->state.ref_frame_idx[OC_FRAME_FOR_MODE(_mb_mode)]];
+ ref=_enc->state.ref_frame_data[OC_FRAME_FOR_MODE(_mb_mode)];
ystride=_enc->state.ref_ystride[0];
frag_buf_offs=_enc->state.frag_buf_offs;
sb_map=_enc->state.sb_maps[_mbi>>2][_mbi&3];
- dx=_mv[0];
- dy=_mv[1];
_modec->rate=_modec->ssd=0;
- if(oc_state_get_mv_offsets(&_enc->state,mv_offs,0,dx,dy)>1){
+ if(oc_state_get_mv_offsets(&_enc->state,mv_offs,0,_mv)>1){
for(bi=0;bi<4;bi++){
fragi=sb_map[bi];
frag_offs=frag_buf_offs[fragi];
- frag_satd[bi]=oc_enc_frag_satd2_thresh(_enc,src+frag_offs,
- ref+frag_offs+mv_offs[0],ref+frag_offs+mv_offs[1],ystride,UINT_MAX);
+ if(_enc->sp_level<OC_SP_LEVEL_NOSATD){
+ frag_satd[bi]=oc_enc_frag_satd2(_enc,&dc,src+frag_offs,
+ ref+frag_offs+mv_offs[0],ref+frag_offs+mv_offs[1],ystride);
+ frag_satd[bi]+=abs(dc);
+ }
+ else{
+ frag_satd[bi]=oc_enc_frag_sad2_thresh(_enc,src+frag_offs,
+ ref+frag_offs+mv_offs[0],ref+frag_offs+mv_offs[1],ystride,UINT_MAX);
+ }
}
}
else{
for(bi=0;bi<4;bi++){
fragi=sb_map[bi];
frag_offs=frag_buf_offs[fragi];
- frag_satd[bi]=oc_enc_frag_satd_thresh(_enc,src+frag_offs,
- ref+frag_offs+mv_offs[0],ystride,UINT_MAX);
+ if(_enc->sp_level<OC_SP_LEVEL_NOSATD){
+ frag_satd[bi]=oc_enc_frag_satd(_enc,&dc,src+frag_offs,
+ ref+frag_offs+mv_offs[0],ystride);
+ frag_satd[bi]+=abs(dc);
+ }
+ else{
+ frag_satd[bi]=oc_enc_frag_sad(_enc,src+frag_offs,
+ ref+frag_offs+mv_offs[0],ystride);
+ }
}
}
mb_map=(const oc_mb_map_plane *)_enc->state.mb_maps[_mbi];
@@ -1694,15 +2122,22 @@ static void oc_cost_inter(oc_enc_ctx *_enc,oc_mode_choice *_modec,
map_nidxs=OC_MB_MAP_NIDXS[_enc->state.info.pixel_fmt];
/*Note: This assumes ref_ystride[1]==ref_ystride[2].*/
ystride=_enc->state.ref_ystride[1];
- if(oc_state_get_mv_offsets(&_enc->state,mv_offs,1,dx,dy)>1){
+ if(oc_state_get_mv_offsets(&_enc->state,mv_offs,1,_mv)>1){
for(mapii=4;mapii<map_nidxs;mapii++){
mapi=map_idxs[mapii];
pli=mapi>>2;
bi=mapi&3;
fragi=mb_map[pli][bi];
frag_offs=frag_buf_offs[fragi];
- frag_satd[mapii]=oc_enc_frag_satd2_thresh(_enc,src+frag_offs,
- ref+frag_offs+mv_offs[0],ref+frag_offs+mv_offs[1],ystride,UINT_MAX);
+ if(_enc->sp_level<OC_SP_LEVEL_NOSATD){
+ frag_satd[mapii]=oc_enc_frag_satd2(_enc,&dc,src+frag_offs,
+ ref+frag_offs+mv_offs[0],ref+frag_offs+mv_offs[1],ystride);
+ frag_satd[mapii]+=abs(dc);
+ }
+ else{
+ frag_satd[mapii]=oc_enc_frag_sad2_thresh(_enc,src+frag_offs,
+ ref+frag_offs+mv_offs[0],ref+frag_offs+mv_offs[1],ystride,UINT_MAX);
+ }
}
}
else{
@@ -1712,30 +2147,38 @@ static void oc_cost_inter(oc_enc_ctx *_enc,oc_mode_choice *_modec,
bi=mapi&3;
fragi=mb_map[pli][bi];
frag_offs=frag_buf_offs[fragi];
- frag_satd[mapii]=oc_enc_frag_satd_thresh(_enc,src+frag_offs,
- ref+frag_offs+mv_offs[0],ystride,UINT_MAX);
+ if(_enc->sp_level<OC_SP_LEVEL_NOSATD){
+ frag_satd[mapii]=oc_enc_frag_satd(_enc,&dc,src+frag_offs,
+ ref+frag_offs+mv_offs[0],ystride);
+ frag_satd[mapii]+=abs(dc);
+ }
+ else{
+ frag_satd[mapii]=oc_enc_frag_sad(_enc,src+frag_offs,
+ ref+frag_offs+mv_offs[0],ystride);
+ }
}
}
- oc_analyze_mb_mode_luma(_enc,_modec,_fr,_qs,frag_satd,_skip_ssd,1);
- oc_analyze_mb_mode_chroma(_enc,_modec,_fr,_qs,frag_satd,_skip_ssd,1);
- _modec->overhead+=
+ oc_analyze_mb_mode_luma(_enc,_modec,_fr,_qs,frag_satd,_skip_ssd,_rd_scale,1);
+ oc_analyze_mb_mode_chroma(_enc,_modec,_fr,_qs,
+ frag_satd,_skip_ssd,_rd_scale[4],1);
+ _modec->overhead=
oc_mode_scheme_chooser_cost(&_enc->chooser,_mb_mode)<<OC_BIT_SCALE;
oc_mode_set_cost(_modec,_enc->lambda);
}
static void oc_cost_inter_nomv(oc_enc_ctx *_enc,oc_mode_choice *_modec,
unsigned _mbi,int _mb_mode,const oc_fr_state *_fr,const oc_qii_state *_qs,
- const unsigned _skip_ssd[12]){
- static const oc_mv OC_MV_ZERO;
- oc_cost_inter(_enc,_modec,_mbi,_mb_mode,OC_MV_ZERO,_fr,_qs,_skip_ssd);
+ const unsigned _skip_ssd[12],const unsigned _rd_scale[4]){
+ oc_cost_inter(_enc,_modec,_mbi,_mb_mode,0,_fr,_qs,_skip_ssd,_rd_scale);
}
static int oc_cost_inter1mv(oc_enc_ctx *_enc,oc_mode_choice *_modec,
- unsigned _mbi,int _mb_mode,const signed char *_mv,
- const oc_fr_state *_fr,const oc_qii_state *_qs,const unsigned _skip_ssd[12]){
+ unsigned _mbi,int _mb_mode,oc_mv _mv,
+ const oc_fr_state *_fr,const oc_qii_state *_qs,const unsigned _skip_ssd[12],
+ const unsigned _rd_scale[4]){
int bits0;
- oc_cost_inter(_enc,_modec,_mbi,_mb_mode,_mv,_fr,_qs,_skip_ssd);
- bits0=OC_MV_BITS[0][_mv[0]+31]+OC_MV_BITS[0][_mv[1]+31];
+ oc_cost_inter(_enc,_modec,_mbi,_mb_mode,_mv,_fr,_qs,_skip_ssd,_rd_scale);
+ bits0=OC_MV_BITS[0][OC_MV_X(_mv)+31]+OC_MV_BITS[0][OC_MV_Y(_mv)+31];
_modec->overhead+=OC_MINI(_enc->mv_bits[0]+bits0,_enc->mv_bits[1]+12)
-OC_MINI(_enc->mv_bits[0],_enc->mv_bits[1])<<OC_BIT_SCALE;
oc_mode_set_cost(_modec,_enc->lambda);
@@ -1749,7 +2192,7 @@ static const unsigned char OC_MB_PHASE[4][4]={
static void oc_cost_inter4mv(oc_enc_ctx *_enc,oc_mode_choice *_modec,
unsigned _mbi,oc_mv _mv[4],const oc_fr_state *_fr,const oc_qii_state *_qs,
- const unsigned _skip_ssd[12]){
+ const unsigned _skip_ssd[12],const unsigned _rd_scale[5]){
unsigned frag_satd[12];
oc_mv lbmvs[4];
oc_mv cbmvs[4];
@@ -1765,8 +2208,6 @@ static void oc_cost_inter4mv(oc_enc_ctx *_enc,oc_mode_choice *_modec,
int mapii;
int mapi;
int mv_offs[2];
- int dx;
- int dy;
int pli;
int bi;
ptrdiff_t fragi;
@@ -1774,8 +2215,9 @@ static void oc_cost_inter4mv(oc_enc_ctx *_enc,oc_mode_choice *_modec,
int bits0;
int bits1;
unsigned satd;
+ int dc;
src=_enc->state.ref_frame_data[OC_FRAME_IO];
- ref=_enc->state.ref_frame_data[_enc->state.ref_frame_idx[OC_FRAME_PREV]];
+ ref=_enc->state.ref_frame_data[OC_FRAME_PREV];
ystride=_enc->state.ref_ystride[0];
frag_buf_offs=_enc->state.frag_buf_offs;
frag_mvs=_enc->state.frag_mvs;
@@ -1783,41 +2225,36 @@ static void oc_cost_inter4mv(oc_enc_ctx *_enc,oc_mode_choice *_modec,
_modec->rate=_modec->ssd=0;
for(bi=0;bi<4;bi++){
fragi=mb_map[0][bi];
- dx=_mv[bi][0];
- dy=_mv[bi][1];
/*Save the block MVs as the current ones while we're here; we'll replace
them if we don't ultimately choose 4MV mode.*/
- frag_mvs[fragi][0]=(signed char)dx;
- frag_mvs[fragi][1]=(signed char)dy;
+ frag_mvs[fragi]=_mv[bi];
frag_offs=frag_buf_offs[fragi];
- if(oc_state_get_mv_offsets(&_enc->state,mv_offs,0,dx,dy)>1){
- satd=oc_enc_frag_satd2_thresh(_enc,src+frag_offs,
- ref+frag_offs+mv_offs[0],ref+frag_offs+mv_offs[1],ystride,UINT_MAX);
+ if(oc_state_get_mv_offsets(&_enc->state,mv_offs,0,_mv[bi])>1){
+ satd=oc_enc_frag_satd2(_enc,&dc,src+frag_offs,
+ ref+frag_offs+mv_offs[0],ref+frag_offs+mv_offs[1],ystride);
}
else{
- satd=oc_enc_frag_satd_thresh(_enc,src+frag_offs,
- ref+frag_offs+mv_offs[0],ystride,UINT_MAX);
+ satd=oc_enc_frag_satd(_enc,&dc,src+frag_offs,
+ ref+frag_offs+mv_offs[0],ystride);
}
- frag_satd[OC_MB_PHASE[_mbi&3][bi]]=satd;
+ frag_satd[OC_MB_PHASE[_mbi&3][bi]]=satd+abs(dc);
}
oc_analyze_mb_mode_luma(_enc,_modec,_fr,_qs,frag_satd,
- _enc->vp3_compatible?OC_NOSKIP:_skip_ssd,1);
+ _enc->vp3_compatible?OC_NOSKIP:_skip_ssd,_rd_scale,1);
/*Figure out which blocks are being skipped and give them (0,0) MVs.*/
bits0=0;
bits1=0;
nqis=_enc->state.nqis;
for(bi=0;bi<4;bi++){
- if(_modec->qii[OC_MB_PHASE[_mbi&3][bi]]>=nqis){
- memset(lbmvs+bi,0,sizeof(*lbmvs));
- }
+ if(_modec->qii[OC_MB_PHASE[_mbi&3][bi]]>=nqis)lbmvs[bi]=0;
else{
- memcpy(lbmvs+bi,_mv+bi,sizeof(*lbmvs));
- bits0+=OC_MV_BITS[0][_mv[bi][0]+31]+OC_MV_BITS[0][_mv[bi][1]+31];
+ lbmvs[bi]=_mv[bi];
+ bits0+=OC_MV_BITS[0][OC_MV_X(_mv[bi])+31]
+ +OC_MV_BITS[0][OC_MV_Y(_mv[bi])+31];
bits1+=12;
}
}
- (*OC_SET_CHROMA_MVS_TABLE[_enc->state.info.pixel_fmt])(cbmvs,
- (const oc_mv *)lbmvs);
+ (*OC_SET_CHROMA_MVS_TABLE[_enc->state.info.pixel_fmt])(cbmvs,lbmvs);
map_idxs=OC_MB_MAP_IDXS[_enc->state.info.pixel_fmt];
map_nidxs=OC_MB_MAP_NIDXS[_enc->state.info.pixel_fmt];
/*Note: This assumes ref_ystride[1]==ref_ystride[2].*/
@@ -1827,23 +2264,22 @@ static void oc_cost_inter4mv(oc_enc_ctx *_enc,oc_mode_choice *_modec,
pli=mapi>>2;
bi=mapi&3;
fragi=mb_map[pli][bi];
- dx=cbmvs[bi][0];
- dy=cbmvs[bi][1];
frag_offs=frag_buf_offs[fragi];
/*TODO: We could save half these calls by re-using the results for the Cb
and Cr planes; is it worth it?*/
- if(oc_state_get_mv_offsets(&_enc->state,mv_offs,pli,dx,dy)>1){
- satd=oc_enc_frag_satd2_thresh(_enc,src+frag_offs,
- ref+frag_offs+mv_offs[0],ref+frag_offs+mv_offs[1],ystride,UINT_MAX);
+ if(oc_state_get_mv_offsets(&_enc->state,mv_offs,pli,cbmvs[bi])>1){
+ satd=oc_enc_frag_satd2(_enc,&dc,src+frag_offs,
+ ref+frag_offs+mv_offs[0],ref+frag_offs+mv_offs[1],ystride);
}
else{
- satd=oc_enc_frag_satd_thresh(_enc,src+frag_offs,
- ref+frag_offs+mv_offs[0],ystride,UINT_MAX);
+ satd=oc_enc_frag_satd(_enc,&dc,src+frag_offs,
+ ref+frag_offs+mv_offs[0],ystride);
}
- frag_satd[mapii]=satd;
+ frag_satd[mapii]=satd+abs(dc);
}
- oc_analyze_mb_mode_chroma(_enc,_modec,_fr,_qs,frag_satd,_skip_ssd,1);
- _modec->overhead+=
+ oc_analyze_mb_mode_chroma(_enc,_modec,_fr,_qs,
+ frag_satd,_skip_ssd,_rd_scale[4],1);
+ _modec->overhead=
oc_mode_scheme_chooser_cost(&_enc->chooser,OC_MODE_INTER_MV_FOUR)
+OC_MINI(_enc->mv_bits[0]+bits0,_enc->mv_bits[1]+bits1)
-OC_MINI(_enc->mv_bits[0],_enc->mv_bits[1])<<OC_BIT_SCALE;
@@ -1852,12 +2288,18 @@ static void oc_cost_inter4mv(oc_enc_ctx *_enc,oc_mode_choice *_modec,
int oc_enc_analyze_inter(oc_enc_ctx *_enc,int _allow_keyframe,int _recode){
oc_set_chroma_mvs_func set_chroma_mvs;
- oc_enc_pipeline_state pipe;
oc_qii_state intra_luma_qs;
oc_mv last_mv;
oc_mv prior_mv;
ogg_int64_t interbits;
ogg_int64_t intrabits;
+ ogg_int64_t activity_sum;
+ ogg_int64_t luma_sum;
+ unsigned activity_avg;
+ unsigned luma_avg;
+ const ogg_uint16_t *chroma_rd_scale;
+ ogg_uint16_t *mcu_rd_scale;
+ ogg_uint16_t *mcu_rd_iscale;
const unsigned char *map_idxs;
int nmap_idxs;
unsigned *coded_mbis;
@@ -1871,30 +2313,36 @@ int oc_enc_analyze_inter(oc_enc_ctx *_enc,int _allow_keyframe,int _recode){
oc_mb_enc_info *embs;
oc_fragment *frags;
oc_mv *frag_mvs;
- int qi;
unsigned stripe_sby;
unsigned mcu_nvsbs;
int notstart;
int notdone;
- int vdec;
unsigned sbi;
unsigned sbi_end;
int refi;
int pli;
+ int sp_level;
+ sp_level=_enc->sp_level;
set_chroma_mvs=OC_SET_CHROMA_MVS_TABLE[_enc->state.info.pixel_fmt];
_enc->state.frame_type=OC_INTER_FRAME;
oc_mode_scheme_chooser_reset(&_enc->chooser);
oc_enc_tokenize_start(_enc);
- oc_enc_pipeline_init(_enc,&pipe);
+ oc_enc_pipeline_init(_enc,&_enc->pipe);
+ oc_enc_mode_rd_init(_enc);
if(_allow_keyframe)oc_qii_state_init(&intra_luma_qs);
_enc->mv_bits[0]=_enc->mv_bits[1]=0;
interbits=intrabits=0;
- last_mv[0]=last_mv[1]=prior_mv[0]=prior_mv[1]=0;
+ activity_sum=luma_sum=0;
+ activity_avg=_enc->activity_avg;
+ luma_avg=OC_CLAMPI(90<<8,_enc->luma_avg,160<<8);
+ chroma_rd_scale=_enc->chroma_rd_scale[OC_INTER_FRAME][_enc->state.qis[0]];
+ mcu_rd_scale=_enc->mcu_rd_scale;
+ mcu_rd_iscale=_enc->mcu_rd_iscale;
+ last_mv=prior_mv=0;
/*Choose MVs and MB modes and quantize and code luma.
Must be done in Hilbert order.*/
map_idxs=OC_MB_MAP_IDXS[_enc->state.info.pixel_fmt];
nmap_idxs=OC_MB_MAP_NIDXS[_enc->state.info.pixel_fmt];
- qi=_enc->state.qis[0];
coded_mbis=_enc->coded_mbis;
uncoded_mbis=coded_mbis+_enc->state.nmbs;
ncoded_mbis=0;
@@ -1909,37 +2357,51 @@ int oc_enc_analyze_inter(oc_enc_ctx *_enc,int _allow_keyframe,int _recode){
embs=_enc->mb_info;
frags=_enc->state.frags;
frag_mvs=_enc->state.frag_mvs;
- vdec=!(_enc->state.info.pixel_fmt&2);
notstart=0;
notdone=1;
mcu_nvsbs=_enc->mcu_nvsbs;
for(stripe_sby=0;notdone;stripe_sby+=mcu_nvsbs){
- notdone=oc_enc_pipeline_set_stripe(_enc,&pipe,stripe_sby);
- sbi_end=pipe.sbi_end[0];
- for(sbi=pipe.sbi0[0];sbi<sbi_end;sbi++){
+ ptrdiff_t cfroffset;
+ notdone=oc_enc_pipeline_set_stripe(_enc,&_enc->pipe,stripe_sby);
+ sbi_end=_enc->pipe.sbi_end[0];
+ cfroffset=_enc->pipe.froffset[1];
+ for(sbi=_enc->pipe.sbi0[0];sbi<sbi_end;sbi++){
int quadi;
/*Mode addressing is through Y plane, always 4 MB per SB.*/
for(quadi=0;quadi<4;quadi++)if(sb_flags[sbi].quad_valid&1<<quadi){
oc_mode_choice modes[8];
+ unsigned activity[4];
+ unsigned rd_scale[5];
+ unsigned rd_iscale[5];
unsigned skip_ssd[12];
unsigned intra_satd[12];
+ unsigned luma;
int mb_mv_bits_0;
int mb_gmv_bits_0;
int inter_mv_pref;
int mb_mode;
- int dx;
- int dy;
+ int refi;
+ int mv;
unsigned mbi;
int mapii;
int mapi;
int bi;
ptrdiff_t fragi;
mbi=sbi<<2|quadi;
+ luma=oc_mb_intra_satd(_enc,mbi,intra_satd);
+ /*Activity masking.*/
+ if(sp_level<OC_SP_LEVEL_FAST_ANALYSIS){
+ oc_mb_activity(_enc,mbi,activity);
+ }
+ else oc_mb_activity_fast(_enc,mbi,activity,intra_satd);
+ luma_sum+=luma;
+ activity_sum+=oc_mb_masking(rd_scale,rd_iscale,
+ chroma_rd_scale,activity,activity_avg,luma,luma_avg);
/*Motion estimation:
We always do a basic 1MV search for all macroblocks, coded or not,
keyframe or not.*/
- if(!_recode&&_enc->sp_level<OC_SP_LEVEL_NOMC)oc_mcenc_search(_enc,mbi);
- dx=dy=0;
+ if(!_recode&&sp_level<OC_SP_LEVEL_NOMC)oc_mcenc_search(_enc,mbi);
+ mv=0;
/*Find the block choice with the lowest estimated coding cost.
If a Cb or Cr block is coded but no Y' block from a macro block then
the mode MUST be OC_MODE_INTER_NOMV.
@@ -1948,15 +2410,16 @@ int oc_enc_analyze_inter(oc_enc_ctx *_enc,int _allow_keyframe,int _recode){
/*Block coding cost is estimated from correlated SATD metrics.*/
/*At this point, all blocks that are in frame are still marked coded.*/
if(!_recode){
- memcpy(embs[mbi].unref_mv,
- embs[mbi].analysis_mv[0],sizeof(embs[mbi].unref_mv));
+ embs[mbi].unref_mv[OC_FRAME_GOLD]=
+ embs[mbi].analysis_mv[0][OC_FRAME_GOLD];
+ embs[mbi].unref_mv[OC_FRAME_PREV]=
+ embs[mbi].analysis_mv[0][OC_FRAME_PREV];
embs[mbi].refined=0;
}
- oc_mb_intra_satd(_enc,mbi,intra_satd);
/*Estimate the cost of coding this MB in a keyframe.*/
if(_allow_keyframe){
oc_cost_intra(_enc,modes+OC_MODE_INTRA,mbi,
- pipe.fr+0,&intra_luma_qs,intra_satd,OC_NOSKIP);
+ _enc->pipe.fr+0,&intra_luma_qs,intra_satd,OC_NOSKIP,rd_scale);
intrabits+=modes[OC_MODE_INTRA].rate;
for(bi=0;bi<4;bi++){
oc_qii_state_advance(&intra_luma_qs,&intra_luma_qs,
@@ -1964,26 +2427,28 @@ int oc_enc_analyze_inter(oc_enc_ctx *_enc,int _allow_keyframe,int _recode){
}
}
/*Estimate the cost in a delta frame for various modes.*/
- oc_skip_cost(_enc,&pipe,mbi,skip_ssd);
- oc_cost_inter_nomv(_enc,modes+OC_MODE_INTER_NOMV,mbi,
- OC_MODE_INTER_NOMV,pipe.fr+0,pipe.qs+0,skip_ssd);
- if(_enc->sp_level<OC_SP_LEVEL_NOMC){
+ oc_skip_cost(_enc,&_enc->pipe,mbi,rd_scale,skip_ssd);
+ if(sp_level<OC_SP_LEVEL_NOMC){
+ oc_cost_inter_nomv(_enc,modes+OC_MODE_INTER_NOMV,mbi,
+ OC_MODE_INTER_NOMV,_enc->pipe.fr+0,_enc->pipe.qs+0,
+ skip_ssd,rd_scale);
oc_cost_intra(_enc,modes+OC_MODE_INTRA,mbi,
- pipe.fr+0,pipe.qs+0,intra_satd,skip_ssd);
+ _enc->pipe.fr+0,_enc->pipe.qs+0,intra_satd,skip_ssd,rd_scale);
mb_mv_bits_0=oc_cost_inter1mv(_enc,modes+OC_MODE_INTER_MV,mbi,
OC_MODE_INTER_MV,embs[mbi].unref_mv[OC_FRAME_PREV],
- pipe.fr+0,pipe.qs+0,skip_ssd);
+ _enc->pipe.fr+0,_enc->pipe.qs+0,skip_ssd,rd_scale);
oc_cost_inter(_enc,modes+OC_MODE_INTER_MV_LAST,mbi,
- OC_MODE_INTER_MV_LAST,last_mv,pipe.fr+0,pipe.qs+0,skip_ssd);
+ OC_MODE_INTER_MV_LAST,last_mv,_enc->pipe.fr+0,_enc->pipe.qs+0,
+ skip_ssd,rd_scale);
oc_cost_inter(_enc,modes+OC_MODE_INTER_MV_LAST2,mbi,
- OC_MODE_INTER_MV_LAST2,prior_mv,pipe.fr+0,pipe.qs+0,skip_ssd);
- oc_cost_inter4mv(_enc,modes+OC_MODE_INTER_MV_FOUR,mbi,
- embs[mbi].block_mv,pipe.fr+0,pipe.qs+0,skip_ssd);
+ OC_MODE_INTER_MV_LAST2,prior_mv,_enc->pipe.fr+0,_enc->pipe.qs+0,
+ skip_ssd,rd_scale);
oc_cost_inter_nomv(_enc,modes+OC_MODE_GOLDEN_NOMV,mbi,
- OC_MODE_GOLDEN_NOMV,pipe.fr+0,pipe.qs+0,skip_ssd);
+ OC_MODE_GOLDEN_NOMV,_enc->pipe.fr+0,_enc->pipe.qs+0,
+ skip_ssd,rd_scale);
mb_gmv_bits_0=oc_cost_inter1mv(_enc,modes+OC_MODE_GOLDEN_MV,mbi,
OC_MODE_GOLDEN_MV,embs[mbi].unref_mv[OC_FRAME_GOLD],
- pipe.fr+0,pipe.qs+0,skip_ssd);
+ _enc->pipe.fr+0,_enc->pipe.qs+0,skip_ssd,rd_scale);
/*The explicit MV modes (2,6,7) have not yet gone through halfpel
refinement.
We choose the explicit MV mode that's already furthest ahead on
@@ -1991,6 +2456,14 @@ int oc_enc_analyze_inter(oc_enc_ctx *_enc,int _allow_keyframe,int _recode){
We have to be careful to remember which ones we've refined so that
we don't refine it again if we re-encode this frame.*/
inter_mv_pref=_enc->lambda*3;
+ if(sp_level<OC_SP_LEVEL_FAST_ANALYSIS){
+ oc_cost_inter4mv(_enc,modes+OC_MODE_INTER_MV_FOUR,mbi,
+ embs[mbi].block_mv,_enc->pipe.fr+0,_enc->pipe.qs+0,
+ skip_ssd,rd_scale);
+ }
+ else{
+ modes[OC_MODE_INTER_MV_FOUR].cost=UINT_MAX;
+ }
if(modes[OC_MODE_INTER_MV_FOUR].cost<modes[OC_MODE_INTER_MV].cost&&
modes[OC_MODE_INTER_MV_FOUR].cost<modes[OC_MODE_GOLDEN_MV].cost){
if(!(embs[mbi].refined&0x80)){
@@ -1998,7 +2471,8 @@ int oc_enc_analyze_inter(oc_enc_ctx *_enc,int _allow_keyframe,int _recode){
embs[mbi].refined|=0x80;
}
oc_cost_inter4mv(_enc,modes+OC_MODE_INTER_MV_FOUR,mbi,
- embs[mbi].ref_mv,pipe.fr+0,pipe.qs+0,skip_ssd);
+ embs[mbi].ref_mv,_enc->pipe.fr+0,_enc->pipe.qs+0,
+ skip_ssd,rd_scale);
}
else if(modes[OC_MODE_GOLDEN_MV].cost+inter_mv_pref<
modes[OC_MODE_INTER_MV].cost){
@@ -2008,7 +2482,7 @@ int oc_enc_analyze_inter(oc_enc_ctx *_enc,int _allow_keyframe,int _recode){
}
mb_gmv_bits_0=oc_cost_inter1mv(_enc,modes+OC_MODE_GOLDEN_MV,mbi,
OC_MODE_GOLDEN_MV,embs[mbi].analysis_mv[0][OC_FRAME_GOLD],
- pipe.fr+0,pipe.qs+0,skip_ssd);
+ _enc->pipe.fr+0,_enc->pipe.qs+0,skip_ssd,rd_scale);
}
if(!(embs[mbi].refined&0x04)){
oc_mcenc_refine1mv(_enc,mbi,OC_FRAME_PREV);
@@ -2016,7 +2490,7 @@ int oc_enc_analyze_inter(oc_enc_ctx *_enc,int _allow_keyframe,int _recode){
}
mb_mv_bits_0=oc_cost_inter1mv(_enc,modes+OC_MODE_INTER_MV,mbi,
OC_MODE_INTER_MV,embs[mbi].analysis_mv[0][OC_FRAME_PREV],
- pipe.fr+0,pipe.qs+0,skip_ssd);
+ _enc->pipe.fr+0,_enc->pipe.qs+0,skip_ssd,rd_scale);
/*Finally, pick the mode with the cheapest estimated R-D cost.*/
mb_mode=OC_MODE_INTER_NOMV;
if(modes[OC_MODE_INTRA].cost<modes[OC_MODE_INTER_NOMV].cost){
@@ -2046,8 +2520,14 @@ int oc_enc_analyze_inter(oc_enc_ctx *_enc,int _allow_keyframe,int _recode){
}
}
else{
+ oc_cost_inter_nomv(_enc,modes+OC_MODE_INTER_NOMV,mbi,
+ OC_MODE_INTER_NOMV,_enc->pipe.fr+0,_enc->pipe.qs+0,
+ skip_ssd,rd_scale);
+ oc_cost_intra(_enc,modes+OC_MODE_INTRA,mbi,
+ _enc->pipe.fr+0,_enc->pipe.qs+0,intra_satd,skip_ssd,rd_scale);
oc_cost_inter_nomv(_enc,modes+OC_MODE_GOLDEN_NOMV,mbi,
- OC_MODE_GOLDEN_NOMV,pipe.fr+0,pipe.qs+0,skip_ssd);
+ OC_MODE_GOLDEN_NOMV,_enc->pipe.fr+0,_enc->pipe.qs+0,
+ skip_ssd,rd_scale);
mb_mode=OC_MODE_INTER_NOMV;
if(modes[OC_MODE_INTRA].cost<modes[OC_MODE_INTER_NOMV].cost){
mb_mode=OC_MODE_INTRA;
@@ -2062,67 +2542,55 @@ int oc_enc_analyze_inter(oc_enc_ctx *_enc,int _allow_keyframe,int _recode){
if(mb_mode!=OC_MODE_INTER_MV_FOUR){
switch(mb_mode){
case OC_MODE_INTER_MV:{
- dx=embs[mbi].analysis_mv[0][OC_FRAME_PREV][0];
- dy=embs[mbi].analysis_mv[0][OC_FRAME_PREV][1];
- }break;
- case OC_MODE_INTER_MV_LAST:{
- dx=last_mv[0];
- dy=last_mv[1];
- }break;
- case OC_MODE_INTER_MV_LAST2:{
- dx=prior_mv[0];
- dy=prior_mv[1];
+ mv=embs[mbi].analysis_mv[0][OC_FRAME_PREV];
}break;
+ case OC_MODE_INTER_MV_LAST:mv=last_mv;break;
+ case OC_MODE_INTER_MV_LAST2:mv=prior_mv;break;
case OC_MODE_GOLDEN_MV:{
- dx=embs[mbi].analysis_mv[0][OC_FRAME_GOLD][0];
- dy=embs[mbi].analysis_mv[0][OC_FRAME_GOLD][1];
+ mv=embs[mbi].analysis_mv[0][OC_FRAME_GOLD];
}break;
}
for(bi=0;bi<4;bi++){
fragi=mb_maps[mbi][0][bi];
- frag_mvs[fragi][0]=(signed char)dx;
- frag_mvs[fragi][1]=(signed char)dy;
+ frag_mvs[fragi]=mv;
}
}
for(bi=0;bi<4;bi++){
fragi=sb_maps[mbi>>2][mbi&3][bi];
frags[fragi].qii=modes[mb_mode].qii[bi];
}
- if(oc_enc_mb_transform_quantize_luma(_enc,&pipe,mbi,
- modes[mb_mode].overhead>>OC_BIT_SCALE)>0){
+ if(oc_enc_mb_transform_quantize_inter_luma(_enc,&_enc->pipe,mbi,
+ modes[mb_mode].overhead>>OC_BIT_SCALE,rd_scale,rd_iscale)>0){
int orig_mb_mode;
orig_mb_mode=mb_mode;
mb_mode=mb_modes[mbi];
+ refi=OC_FRAME_FOR_MODE(mb_mode);
switch(mb_mode){
case OC_MODE_INTER_MV:{
- memcpy(prior_mv,last_mv,sizeof(prior_mv));
+ prior_mv=last_mv;
/*If we're backing out from 4MV, find the MV we're actually
using.*/
if(orig_mb_mode==OC_MODE_INTER_MV_FOUR){
for(bi=0;;bi++){
fragi=mb_maps[mbi][0][bi];
if(frags[fragi].coded){
- memcpy(last_mv,frag_mvs[fragi],sizeof(last_mv));
- dx=frag_mvs[fragi][0];
- dy=frag_mvs[fragi][1];
+ mv=last_mv=frag_mvs[fragi];
break;
}
}
- mb_mv_bits_0=OC_MV_BITS[0][dx+31]+OC_MV_BITS[0][dy+31];
+ mb_mv_bits_0=OC_MV_BITS[0][OC_MV_X(mv)+31]
+ +OC_MV_BITS[0][OC_MV_Y(mv)+31];
}
/*Otherwise we used the original analysis MV.*/
- else{
- memcpy(last_mv,
- embs[mbi].analysis_mv[0][OC_FRAME_PREV],sizeof(last_mv));
- }
+ else last_mv=embs[mbi].analysis_mv[0][OC_FRAME_PREV];
_enc->mv_bits[0]+=mb_mv_bits_0;
_enc->mv_bits[1]+=12;
}break;
case OC_MODE_INTER_MV_LAST2:{
oc_mv tmp_mv;
- memcpy(tmp_mv,prior_mv,sizeof(tmp_mv));
- memcpy(prior_mv,last_mv,sizeof(prior_mv));
- memcpy(last_mv,tmp_mv,sizeof(last_mv));
+ tmp_mv=prior_mv;
+ prior_mv=last_mv;
+ last_mv=tmp_mv;
}break;
case OC_MODE_GOLDEN_MV:{
_enc->mv_bits[0]+=mb_gmv_bits_0;
@@ -2131,28 +2599,28 @@ int oc_enc_analyze_inter(oc_enc_ctx *_enc,int _allow_keyframe,int _recode){
case OC_MODE_INTER_MV_FOUR:{
oc_mv lbmvs[4];
oc_mv cbmvs[4];
- memcpy(prior_mv,last_mv,sizeof(prior_mv));
+ prior_mv=last_mv;
for(bi=0;bi<4;bi++){
fragi=mb_maps[mbi][0][bi];
if(frags[fragi].coded){
- memcpy(last_mv,frag_mvs[fragi],sizeof(last_mv));
- memcpy(lbmvs[bi],frag_mvs[fragi],sizeof(lbmvs[bi]));
- _enc->mv_bits[0]+=OC_MV_BITS[0][frag_mvs[fragi][0]+31]
- +OC_MV_BITS[0][frag_mvs[fragi][1]+31];
+ lbmvs[bi]=last_mv=frag_mvs[fragi];
+ _enc->mv_bits[0]+=OC_MV_BITS[0][OC_MV_X(last_mv)+31]
+ +OC_MV_BITS[0][OC_MV_Y(last_mv)+31];
_enc->mv_bits[1]+=12;
}
/*Replace the block MVs for not-coded blocks with (0,0).*/
- else memset(lbmvs[bi],0,sizeof(lbmvs[bi]));
+ else lbmvs[bi]=0;
}
- (*set_chroma_mvs)(cbmvs,(const oc_mv *)lbmvs);
+ (*set_chroma_mvs)(cbmvs,lbmvs);
for(mapii=4;mapii<nmap_idxs;mapii++){
mapi=map_idxs[mapii];
pli=mapi>>2;
bi=mapi&3;
fragi=mb_maps[mbi][pli][bi];
- frags[fragi].mb_mode=mb_mode;
frags[fragi].qii=modes[OC_MODE_INTER_MV_FOUR].qii[mapii];
- memcpy(frag_mvs[fragi],cbmvs[bi],sizeof(frag_mvs[fragi]));
+ frags[fragi].refi=refi;
+ frags[fragi].mb_mode=mb_mode;
+ frag_mvs[fragi]=cbmvs[bi];
}
}break;
}
@@ -2163,7 +2631,8 @@ int oc_enc_analyze_inter(oc_enc_ctx *_enc,int _allow_keyframe,int _recode){
else{
*(uncoded_mbis-++nuncoded_mbis)=mbi;
mb_mode=OC_MODE_INTER_NOMV;
- dx=dy=0;
+ refi=OC_FRAME_PREV;
+ mv=0;
}
/*Propagate final MB mode and MVs to the chroma blocks.
This has already been done for 4MV mode, since it requires individual
@@ -2174,43 +2643,56 @@ int oc_enc_analyze_inter(oc_enc_ctx *_enc,int _allow_keyframe,int _recode){
pli=mapi>>2;
bi=mapi&3;
fragi=mb_maps[mbi][pli][bi];
- frags[fragi].mb_mode=mb_mode;
/*If we switched from 4MV mode to INTER_MV mode, then the qii
values won't have been chosen with the right MV, but it's
probaby not worth re-estimating them.*/
frags[fragi].qii=modes[mb_mode].qii[mapii];
- frag_mvs[fragi][0]=(signed char)dx;
- frag_mvs[fragi][1]=(signed char)dy;
+ frags[fragi].refi=refi;
+ frags[fragi].mb_mode=mb_mode;
+ frag_mvs[fragi]=mv;
}
}
+ /*Save masking scale factors for chroma blocks.*/
+ for(mapii=4;mapii<(nmap_idxs-4>>1)+4;mapii++){
+ mapi=map_idxs[mapii];
+ bi=mapi&3;
+ fragi=mb_maps[mbi][1][bi];
+ mcu_rd_scale[fragi-cfroffset]=(ogg_uint16_t)rd_scale[4];
+ mcu_rd_iscale[fragi-cfroffset]=(ogg_uint16_t)rd_iscale[4];
+ }
}
- oc_fr_state_flush_sb(pipe.fr+0);
- sb_flags[sbi].coded_fully=pipe.fr[0].sb_full;
- sb_flags[sbi].coded_partially=pipe.fr[0].sb_partial;
+ oc_fr_state_flush_sb(_enc->pipe.fr+0);
+ sb_flags[sbi].coded_fully=_enc->pipe.fr[0].sb_full;
+ sb_flags[sbi].coded_partially=_enc->pipe.fr[0].sb_partial;
}
- oc_enc_pipeline_finish_mcu_plane(_enc,&pipe,0,notstart,notdone);
+ oc_enc_pipeline_finish_mcu_plane(_enc,&_enc->pipe,0,notstart,notdone);
/*Code chroma planes.*/
for(pli=1;pli<3;pli++){
- oc_enc_sb_transform_quantize_chroma(_enc,&pipe,
- pli,pipe.sbi0[pli],pipe.sbi_end[pli]);
- oc_enc_pipeline_finish_mcu_plane(_enc,&pipe,pli,notstart,notdone);
+ oc_enc_sb_transform_quantize_inter_chroma(_enc,&_enc->pipe,
+ pli,_enc->pipe.sbi0[pli],_enc->pipe.sbi_end[pli]);
+ oc_enc_pipeline_finish_mcu_plane(_enc,&_enc->pipe,pli,notstart,notdone);
}
notstart=1;
}
+ /*Update the average block activity and MB luma score for the frame.
+ We could use a Bessel follower here, but fast reaction is probably almost
+ always best.*/
+ _enc->activity_avg=OC_MAXI(OC_ACTIVITY_AVG_MIN,
+ (unsigned)((activity_sum+(_enc->state.fplanes[0].nfrags>>1))/
+ _enc->state.fplanes[0].nfrags));
+ _enc->luma_avg=(unsigned)((luma_sum+(_enc->state.nmbs>>1))/_enc->state.nmbs);
/*Finish filling in the reference frame borders.*/
refi=_enc->state.ref_frame_idx[OC_FRAME_SELF];
for(pli=0;pli<3;pli++)oc_state_borders_fill_caps(&_enc->state,refi,pli);
/*Finish adding flagging overhead costs to inter bit counts to determine if
we should have coded a key frame instead.*/
if(_allow_keyframe){
- if(interbits>intrabits)return 1;
/*Technically the chroma plane counts are over-estimations, because they
don't account for continuing runs from the luma planes, but the
- inaccuracy is small.*/
- for(pli=0;pli<3;pli++)interbits+=pipe.fr[pli].bits<<OC_BIT_SCALE;
- interbits+=OC_MINI(_enc->mv_bits[0],_enc->mv_bits[1])<<OC_BIT_SCALE;
- interbits+=
- _enc->chooser.scheme_bits[_enc->chooser.scheme_list[0]]<<OC_BIT_SCALE;
+ inaccuracy is small.
+ We don't need to add the luma plane coding flag costs, because they are
+ already included in the MB rate estimates.*/
+ for(pli=1;pli<3;pli++)interbits+=_enc->pipe.fr[pli].bits<<OC_BIT_SCALE;
if(interbits>intrabits)return 1;
}
_enc->ncoded_mbis=ncoded_mbis;
@@ -2228,482 +2710,3 @@ int oc_enc_analyze_inter(oc_enc_ctx *_enc,int _allow_keyframe,int _recode){
}
return 0;
}
-
-#if defined(OC_COLLECT_METRICS)
-# include <stdio.h>
-# include <math.h>
-
-/*TODO: It may be helpful (for block-level quantizers especially) to separate
- out the contributions from AC and DC into separate tables.*/
-
-# define OC_ZWEIGHT (0.25)
-
-static void oc_mode_metrics_add(oc_mode_metrics *_metrics,
- double _w,int _satd,int _rate,double _rmse){
- double rate;
- /*Accumulate statistics without the scaling; this lets us change the scale
- factor yet still use old data.*/
- rate=ldexp(_rate,-OC_BIT_SCALE);
- if(_metrics->fragw>0){
- double dsatd;
- double drate;
- double drmse;
- double w;
- dsatd=_satd-_metrics->satd/_metrics->fragw;
- drate=rate-_metrics->rate/_metrics->fragw;
- drmse=_rmse-_metrics->rmse/_metrics->fragw;
- w=_metrics->fragw*_w/(_metrics->fragw+_w);
- _metrics->satd2+=dsatd*dsatd*w;
- _metrics->satdrate+=dsatd*drate*w;
- _metrics->rate2+=drate*drate*w;
- _metrics->satdrmse+=dsatd*drmse*w;
- _metrics->rmse2+=drmse*drmse*w;
- }
- _metrics->fragw+=_w;
- _metrics->satd+=_satd*_w;
- _metrics->rate+=rate*_w;
- _metrics->rmse+=_rmse*_w;
-}
-
-static void oc_mode_metrics_merge(oc_mode_metrics *_dst,
- const oc_mode_metrics *_src,int _n){
- int i;
- /*Find a non-empty set of metrics.*/
- for(i=0;i<_n&&_src[i].fragw<=0;i++);
- if(i>=_n){
- memset(_dst,0,sizeof(*_dst));
- return;
- }
- memcpy(_dst,_src+i,sizeof(*_dst));
- /*And iterate over the remaining non-empty sets of metrics.*/
- for(i++;i<_n;i++)if(_src[i].fragw>0){
- double wa;
- double wb;
- double dsatd;
- double drate;
- double drmse;
- double w;
- wa=_dst->fragw;
- wb=_src[i].fragw;
- dsatd=_src[i].satd/wb-_dst->satd/wa;
- drate=_src[i].rate/wb-_dst->rate/wa;
- drmse=_src[i].rmse/wb-_dst->rmse/wa;
- w=wa*wb/(wa+wb);
- _dst->fragw+=_src[i].fragw;
- _dst->satd+=_src[i].satd;
- _dst->rate+=_src[i].rate;
- _dst->rmse+=_src[i].rmse;
- _dst->satd2+=_src[i].satd2+dsatd*dsatd*w;
- _dst->satdrate+=_src[i].satdrate+dsatd*drate*w;
- _dst->rate2+=_src[i].rate2+drate*drate*w;
- _dst->satdrmse+=_src[i].satdrmse+dsatd*drmse*w;
- _dst->rmse2+=_src[i].rmse2+drmse*drmse*w;
- }
-}
-
-/*Compile collected SATD/rate/RMSE metrics into a form that's immediately
- useful for mode decision.*/
-static void oc_enc_mode_metrics_update(oc_enc_ctx *_enc,int _qi){
- int pli;
- int qti;
- oc_restore_fpu(&_enc->state);
- /*Convert raw collected data into cleaned up sample points.*/
- for(pli=0;pli<3;pli++){
- for(qti=0;qti<2;qti++){
- double fragw;
- int bin0;
- int bin1;
- int bin;
- fragw=0;
- bin0=bin1=0;
- for(bin=0;bin<OC_SAD_BINS;bin++){
- oc_mode_metrics metrics;
- OC_MODE_RD[_qi][pli][qti][bin].rate=0;
- OC_MODE_RD[_qi][pli][qti][bin].rmse=0;
- /*Find some points on either side of the current bin.*/
- while((bin1<bin+1||fragw<OC_ZWEIGHT)&&bin1<OC_SAD_BINS-1){
- fragw+=OC_MODE_METRICS[_qi][pli][qti][bin1++].fragw;
- }
- while(bin0+1<bin&&bin0+1<bin1&&
- fragw-OC_MODE_METRICS[_qi][pli][qti][bin0].fragw>=OC_ZWEIGHT){
- fragw-=OC_MODE_METRICS[_qi][pli][qti][bin0++].fragw;
- }
- /*Merge statistics and fit lines.*/
- oc_mode_metrics_merge(&metrics,
- OC_MODE_METRICS[_qi][pli][qti]+bin0,bin1-bin0);
- if(metrics.fragw>0&&metrics.satd2>0){
- double a;
- double b;
- double msatd;
- double mrate;
- double mrmse;
- double rate;
- double rmse;
- msatd=metrics.satd/metrics.fragw;
- mrate=metrics.rate/metrics.fragw;
- mrmse=metrics.rmse/metrics.fragw;
- /*Compute the points on these lines corresponding to the actual bin
- value.*/
- b=metrics.satdrate/metrics.satd2;
- a=mrate-b*msatd;
- rate=ldexp(a+b*(bin<<OC_SAD_SHIFT),OC_BIT_SCALE);
- OC_MODE_RD[_qi][pli][qti][bin].rate=
- (ogg_int16_t)OC_CLAMPI(-32768,(int)(rate+0.5),32767);
- b=metrics.satdrmse/metrics.satd2;
- a=mrmse-b*msatd;
- rmse=ldexp(a+b*(bin<<OC_SAD_SHIFT),OC_RMSE_SCALE);
- OC_MODE_RD[_qi][pli][qti][bin].rmse=
- (ogg_int16_t)OC_CLAMPI(-32768,(int)(rmse+0.5),32767);
- }
- }
- }
- }
-}
-
-
-
-/*The following token skipping code used to also be used in the decoder (and
- even at one point other places in the encoder).
- However, it was obsoleted by other optimizations, and is now only used here.
- It has been moved here to avoid generating the code when it's not needed.*/
-
-/*Determines the number of blocks or coefficients to be skipped for a given
- token value.
- _token: The token value to skip.
- _extra_bits: The extra bits attached to this token.
- Return: A positive value indicates that number of coefficients are to be
- skipped in the current block.
- Otherwise, the negative of the return value indicates that number of
- blocks are to be ended.*/
-typedef ptrdiff_t (*oc_token_skip_func)(int _token,int _extra_bits);
-
-/*Handles the simple end of block tokens.*/
-static ptrdiff_t oc_token_skip_eob(int _token,int _extra_bits){
- int nblocks_adjust;
- nblocks_adjust=OC_UNIBBLE_TABLE32(0,1,2,3,7,15,0,0,_token)+1;
- return -_extra_bits-nblocks_adjust;
-}
-
-/*The last EOB token has a special case, where an EOB run of size zero ends all
- the remaining blocks in the frame.*/
-static ptrdiff_t oc_token_skip_eob6(int _token,int _extra_bits){
- /*Note: We want to return -PTRDIFF_MAX, but that requires C99, which is not
- yet available everywhere; this should be equivalent.*/
- if(!_extra_bits)return -(~(size_t)0>>1);
- return -_extra_bits;
-}
-
-/*Handles the pure zero run tokens.*/
-static ptrdiff_t oc_token_skip_zrl(int _token,int _extra_bits){
- return _extra_bits+1;
-}
-
-/*Handles a normal coefficient value token.*/
-static ptrdiff_t oc_token_skip_val(void){
- return 1;
-}
-
-/*Handles a category 1A zero run/coefficient value combo token.*/
-static ptrdiff_t oc_token_skip_run_cat1a(int _token){
- return _token-OC_DCT_RUN_CAT1A+2;
-}
-
-/*Handles category 1b, 1c, 2a, and 2b zero run/coefficient value combo tokens.*/
-static ptrdiff_t oc_token_skip_run(int _token,int _extra_bits){
- int run_cati;
- int ncoeffs_mask;
- int ncoeffs_adjust;
- run_cati=_token-OC_DCT_RUN_CAT1B;
- ncoeffs_mask=OC_BYTE_TABLE32(3,7,0,1,run_cati);
- ncoeffs_adjust=OC_BYTE_TABLE32(7,11,2,3,run_cati);
- return (_extra_bits&ncoeffs_mask)+ncoeffs_adjust;
-}
-
-/*A jump table for computing the number of coefficients or blocks to skip for
- a given token value.
- This reduces all the conditional branches, etc., needed to parse these token
- values down to one indirect jump.*/
-static const oc_token_skip_func OC_TOKEN_SKIP_TABLE[TH_NDCT_TOKENS]={
- oc_token_skip_eob,
- oc_token_skip_eob,
- oc_token_skip_eob,
- oc_token_skip_eob,
- oc_token_skip_eob,
- oc_token_skip_eob,
- oc_token_skip_eob6,
- oc_token_skip_zrl,
- oc_token_skip_zrl,
- (oc_token_skip_func)oc_token_skip_val,
- (oc_token_skip_func)oc_token_skip_val,
- (oc_token_skip_func)oc_token_skip_val,
- (oc_token_skip_func)oc_token_skip_val,
- (oc_token_skip_func)oc_token_skip_val,
- (oc_token_skip_func)oc_token_skip_val,
- (oc_token_skip_func)oc_token_skip_val,
- (oc_token_skip_func)oc_token_skip_val,
- (oc_token_skip_func)oc_token_skip_val,
- (oc_token_skip_func)oc_token_skip_val,
- (oc_token_skip_func)oc_token_skip_val,
- (oc_token_skip_func)oc_token_skip_val,
- (oc_token_skip_func)oc_token_skip_val,
- (oc_token_skip_func)oc_token_skip_val,
- (oc_token_skip_func)oc_token_skip_run_cat1a,
- (oc_token_skip_func)oc_token_skip_run_cat1a,
- (oc_token_skip_func)oc_token_skip_run_cat1a,
- (oc_token_skip_func)oc_token_skip_run_cat1a,
- (oc_token_skip_func)oc_token_skip_run_cat1a,
- oc_token_skip_run,
- oc_token_skip_run,
- oc_token_skip_run,
- oc_token_skip_run
-};
-
-/*Determines the number of blocks or coefficients to be skipped for a given
- token value.
- _token: The token value to skip.
- _extra_bits: The extra bits attached to this token.
- Return: A positive value indicates that number of coefficients are to be
- skipped in the current block.
- Otherwise, the negative of the return value indicates that number of
- blocks are to be ended.
- 0 will never be returned, so that at least one coefficient in one
- block will always be decoded for every token.*/
-static ptrdiff_t oc_dct_token_skip(int _token,int _extra_bits){
- return (*OC_TOKEN_SKIP_TABLE[_token])(_token,_extra_bits);
-}
-
-
-
-void oc_enc_mode_metrics_collect(oc_enc_ctx *_enc){
- static const unsigned char OC_ZZI_HUFF_OFFSET[64]={
- 0,16,16,16,16,16,32,32,
- 32,32,32,32,32,32,32,48,
- 48,48,48,48,48,48,48,48,
- 48,48,48,48,64,64,64,64,
- 64,64,64,64,64,64,64,64,
- 64,64,64,64,64,64,64,64,
- 64,64,64,64,64,64,64,64
- };
- const oc_fragment *frags;
- const unsigned *frag_satd;
- const unsigned *frag_ssd;
- const ptrdiff_t *coded_fragis;
- ptrdiff_t ncoded_fragis;
- ptrdiff_t fragii;
- double fragw;
- int qti;
- int qii;
- int qi;
- int pli;
- int zzi;
- int token;
- int eb;
- oc_restore_fpu(&_enc->state);
- /*Load any existing mode metrics if we haven't already.*/
- if(!oc_has_mode_metrics){
- FILE *fmetrics;
- memset(OC_MODE_METRICS,0,sizeof(OC_MODE_METRICS));
- fmetrics=fopen("modedec.stats","rb");
- if(fmetrics!=NULL){
- fread(OC_MODE_METRICS,sizeof(OC_MODE_METRICS),1,fmetrics);
- fclose(fmetrics);
- }
- for(qi=0;qi<64;qi++)oc_enc_mode_metrics_update(_enc,qi);
- oc_has_mode_metrics=1;
- }
- qti=_enc->state.frame_type;
- frags=_enc->state.frags;
- frag_satd=_enc->frag_satd;
- frag_ssd=_enc->frag_ssd;
- coded_fragis=_enc->state.coded_fragis;
- ncoded_fragis=fragii=0;
- /*Weight the fragments by the inverse frame size; this prevents HD content
- from dominating the statistics.*/
- fragw=1.0/_enc->state.nfrags;
- for(pli=0;pli<3;pli++){
- ptrdiff_t ti[64];
- int eob_token[64];
- int eob_run[64];
- /*Set up token indices and eob run counts.
- We don't bother trying to figure out the real cost of the runs that span
- coefficients; instead we use the costs that were available when R-D
- token optimization was done.*/
- for(zzi=0;zzi<64;zzi++){
- ti[zzi]=_enc->dct_token_offs[pli][zzi];
- if(ti[zzi]>0){
- token=_enc->dct_tokens[pli][zzi][0];
- eb=_enc->extra_bits[pli][zzi][0];
- eob_token[zzi]=token;
- eob_run[zzi]=-oc_dct_token_skip(token,eb);
- }
- else{
- eob_token[zzi]=OC_NDCT_EOB_TOKEN_MAX;
- eob_run[zzi]=0;
- }
- }
- /*Scan the list of coded fragments for this plane.*/
- ncoded_fragis+=_enc->state.ncoded_fragis[pli];
- for(;fragii<ncoded_fragis;fragii++){
- ptrdiff_t fragi;
- ogg_uint32_t frag_bits;
- int huffi;
- int skip;
- int mb_mode;
- unsigned satd;
- int bin;
- fragi=coded_fragis[fragii];
- frag_bits=0;
- for(zzi=0;zzi<64;){
- if(eob_run[zzi]>0){
- /*We've reached the end of the block.*/
- eob_run[zzi]--;
- break;
- }
- huffi=_enc->huff_idxs[qti][zzi>0][pli+1>>1]
- +OC_ZZI_HUFF_OFFSET[zzi];
- if(eob_token[zzi]<OC_NDCT_EOB_TOKEN_MAX){
- /*This token caused an EOB run to be flushed.
- Therefore it gets the bits associated with it.*/
- frag_bits+=_enc->huff_codes[huffi][eob_token[zzi]].nbits
- +OC_DCT_TOKEN_EXTRA_BITS[eob_token[zzi]];
- eob_token[zzi]=OC_NDCT_EOB_TOKEN_MAX;
- }
- token=_enc->dct_tokens[pli][zzi][ti[zzi]];
- eb=_enc->extra_bits[pli][zzi][ti[zzi]];
- ti[zzi]++;
- skip=oc_dct_token_skip(token,eb);
- if(skip<0){
- eob_token[zzi]=token;
- eob_run[zzi]=-skip;
- }
- else{
- /*A regular DCT value token; accumulate the bits for it.*/
- frag_bits+=_enc->huff_codes[huffi][token].nbits
- +OC_DCT_TOKEN_EXTRA_BITS[token];
- zzi+=skip;
- }
- }
- mb_mode=frags[fragi].mb_mode;
- qi=_enc->state.qis[frags[fragi].qii];
- satd=frag_satd[fragi]<<(pli+1&2);
- bin=OC_MINI(satd>>OC_SAD_SHIFT,OC_SAD_BINS-1);
- oc_mode_metrics_add(OC_MODE_METRICS[qi][pli][mb_mode!=OC_MODE_INTRA]+bin,
- fragw,satd,frag_bits<<OC_BIT_SCALE,sqrt(frag_ssd[fragi]));
- }
- }
- /*Update global SATD/rate/RMSE estimation matrix.*/
- for(qii=0;qii<_enc->state.nqis;qii++){
- oc_enc_mode_metrics_update(_enc,_enc->state.qis[qii]);
- }
-}
-
-void oc_enc_mode_metrics_dump(oc_enc_ctx *_enc){
- FILE *fmetrics;
- int qi;
- /*Generate sample points for complete list of QI values.*/
- for(qi=0;qi<64;qi++)oc_enc_mode_metrics_update(_enc,qi);
- fmetrics=fopen("modedec.stats","wb");
- if(fmetrics!=NULL){
- fwrite(OC_MODE_METRICS,sizeof(OC_MODE_METRICS),1,fmetrics);
- fclose(fmetrics);
- }
- fprintf(stdout,
- "/*File generated by libtheora with OC_COLLECT_METRICS"
- " defined at compile time.*/\n"
- "#if !defined(_modedec_H)\n"
- "# define _modedec_H (1)\n"
- "\n"
- "\n"
- "\n"
- "# if defined(OC_COLLECT_METRICS)\n"
- "typedef struct oc_mode_metrics oc_mode_metrics;\n"
- "# endif\n"
- "typedef struct oc_mode_rd oc_mode_rd;\n"
- "\n"
- "\n"
- "\n"
- "/*The number of extra bits of precision at which to store rate"
- " metrics.*/\n"
- "# define OC_BIT_SCALE (%i)\n"
- "/*The number of extra bits of precision at which to store RMSE metrics.\n"
- " This must be at least half OC_BIT_SCALE (rounded up).*/\n"
- "# define OC_RMSE_SCALE (%i)\n"
- "/*The number of bins to partition statistics into.*/\n"
- "# define OC_SAD_BINS (%i)\n"
- "/*The number of bits of precision to drop"
- " from SAD scores to assign them to a\n"
- " bin.*/\n"
- "# define OC_SAD_SHIFT (%i)\n"
- "\n"
- "\n"
- "\n"
- "# if defined(OC_COLLECT_METRICS)\n"
- "struct oc_mode_metrics{\n"
- " double fragw;\n"
- " double satd;\n"
- " double rate;\n"
- " double rmse;\n"
- " double satd2;\n"
- " double satdrate;\n"
- " double rate2;\n"
- " double satdrmse;\n"
- " double rmse2;\n"
- "};\n"
- "\n"
- "\n"
- "int oc_has_mode_metrics;\n"
- "oc_mode_metrics OC_MODE_METRICS[64][3][2][OC_SAD_BINS];\n"
- "# endif\n"
- "\n"
- "\n"
- "\n"
- "struct oc_mode_rd{\n"
- " ogg_int16_t rate;\n"
- " ogg_int16_t rmse;\n"
- "};\n"
- "\n"
- "\n"
- "# if !defined(OC_COLLECT_METRICS)\n"
- "static const\n"
- "# endif\n"
- "oc_mode_rd OC_MODE_RD[64][3][2][OC_SAD_BINS]={\n",
- OC_BIT_SCALE,OC_RMSE_SCALE,OC_SAD_BINS,OC_SAD_SHIFT);
- for(qi=0;qi<64;qi++){
- int pli;
- fprintf(stdout," {\n");
- for(pli=0;pli<3;pli++){
- int qti;
- fprintf(stdout," {\n");
- for(qti=0;qti<2;qti++){
- int bin;
- static const char *pl_names[3]={"Y'","Cb","Cr"};
- static const char *qti_names[2]={"INTRA","INTER"};
- fprintf(stdout," /*%s qi=%i %s*/\n",
- pl_names[pli],qi,qti_names[qti]);
- fprintf(stdout," {\n");
- fprintf(stdout," ");
- for(bin=0;bin<OC_SAD_BINS;bin++){
- if(bin&&!(bin&0x3))fprintf(stdout,"\n ");
- fprintf(stdout,"{%5i,%5i}",
- OC_MODE_RD[qi][pli][qti][bin].rate,
- OC_MODE_RD[qi][pli][qti][bin].rmse);
- if(bin+1<OC_SAD_BINS)fprintf(stdout,",");
- }
- fprintf(stdout,"\n }");
- if(qti<1)fprintf(stdout,",");
- fprintf(stdout,"\n");
- }
- fprintf(stdout," }");
- if(pli<2)fprintf(stdout,",");
- fprintf(stdout,"\n");
- }
- fprintf(stdout," }");
- if(qi<63)fprintf(stdout,",");
- fprintf(stdout,"\n");
- }
- fprintf(stdout,
- "};\n"
- "\n"
- "#endif\n");
-}
-#endif
diff --git a/thirdparty/libtheora/apiwrapper.c b/thirdparty/libtheora/apiwrapper.c
index dc959b8d13..87b4e939f2 100644
--- a/thirdparty/libtheora/apiwrapper.c
+++ b/thirdparty/libtheora/apiwrapper.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: apiwrapper.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
diff --git a/thirdparty/libtheora/apiwrapper.h b/thirdparty/libtheora/apiwrapper.h
index 93454d7bda..ff45e0a4d6 100644
--- a/thirdparty/libtheora/apiwrapper.h
+++ b/thirdparty/libtheora/apiwrapper.h
@@ -21,7 +21,7 @@
# include <theora/theora.h>
# include "theora/theoradec.h"
# include "theora/theoraenc.h"
-# include "internal.h"
+# include "state.h"
typedef struct th_api_wrapper th_api_wrapper;
typedef struct th_api_info th_api_info;
diff --git a/thirdparty/libtheora/bitpack.c b/thirdparty/libtheora/bitpack.c
index 8195003bad..5125dde6b0 100644
--- a/thirdparty/libtheora/bitpack.c
+++ b/thirdparty/libtheora/bitpack.c
@@ -11,7 +11,7 @@
********************************************************************
function: packing variable sized words into an octet stream
- last mod: $Id: bitpack.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
#include <string.h>
@@ -32,15 +32,18 @@ static oc_pb_window oc_pack_refill(oc_pack_buf *_b,int _bits){
const unsigned char *stop;
oc_pb_window window;
int available;
+ unsigned shift;
+ stop=_b->stop;
+ ptr=_b->ptr;
window=_b->window;
available=_b->bits;
- ptr=_b->ptr;
- stop=_b->stop;
- while(available<=OC_PB_WINDOW_SIZE-8&&ptr<stop){
- available+=8;
- window|=(oc_pb_window)*ptr++<<OC_PB_WINDOW_SIZE-available;
+ shift=OC_PB_WINDOW_SIZE-available;
+ while(7<shift&&ptr<stop){
+ shift-=8;
+ window|=(oc_pb_window)*ptr++<<shift;
}
_b->ptr=ptr;
+ available=OC_PB_WINDOW_SIZE-shift;
if(_bits>available){
if(ptr>=stop){
_b->eof=1;
@@ -67,7 +70,7 @@ void oc_pack_adv1(oc_pack_buf *_b){
}
/*Here we assume that 0<=_bits&&_bits<=32.*/
-long oc_pack_read(oc_pack_buf *_b,int _bits){
+long oc_pack_read_c(oc_pack_buf *_b,int _bits){
oc_pb_window window;
int available;
long result;
@@ -82,12 +85,12 @@ long oc_pack_read(oc_pack_buf *_b,int _bits){
available-=_bits;
window<<=1;
window<<=_bits-1;
- _b->bits=available;
_b->window=window;
+ _b->bits=available;
return result;
}
-int oc_pack_read1(oc_pack_buf *_b){
+int oc_pack_read1_c(oc_pack_buf *_b){
oc_pb_window window;
int available;
int result;
@@ -100,8 +103,8 @@ int oc_pack_read1(oc_pack_buf *_b){
result=window>>OC_PB_WINDOW_SIZE-1;
available--;
window<<=1;
- _b->bits=available;
_b->window=window;
+ _b->bits=available;
return result;
}
diff --git a/thirdparty/libtheora/bitpack.h b/thirdparty/libtheora/bitpack.h
index a020a292f5..237b584055 100644
--- a/thirdparty/libtheora/bitpack.h
+++ b/thirdparty/libtheora/bitpack.h
@@ -16,15 +16,32 @@
********************************************************************/
#if !defined(_bitpack_H)
# define _bitpack_H (1)
+# include <stddef.h>
# include <limits.h>
+# include "internal.h"
-typedef unsigned long oc_pb_window;
+typedef size_t oc_pb_window;
typedef struct oc_pack_buf oc_pack_buf;
+/*Custom bitpacker implementations.*/
+# if defined(OC_ARM_ASM)
+# include "arm/armbits.h"
+# endif
+
+# if !defined(oc_pack_read)
+# define oc_pack_read oc_pack_read_c
+# endif
+# if !defined(oc_pack_read1)
+# define oc_pack_read1 oc_pack_read1_c
+# endif
+# if !defined(oc_huff_token_decode)
+# define oc_huff_token_decode oc_huff_token_decode_c
+# endif
+
# define OC_PB_WINDOW_SIZE ((int)sizeof(oc_pb_window)*CHAR_BIT)
/*This is meant to be a large, positive constant that can still be efficiently
loaded as an immediate (on platforms like ARM, for example).
@@ -34,9 +51,9 @@ typedef struct oc_pack_buf oc_pack_buf;
struct oc_pack_buf{
- oc_pb_window window;
- const unsigned char *ptr;
const unsigned char *stop;
+ const unsigned char *ptr;
+ oc_pb_window window;
int bits;
int eof;
};
@@ -45,8 +62,8 @@ void oc_pack_readinit(oc_pack_buf *_b,unsigned char *_buf,long _bytes);
int oc_pack_look1(oc_pack_buf *_b);
void oc_pack_adv1(oc_pack_buf *_b);
/*Here we assume 0<=_bits&&_bits<=32.*/
-long oc_pack_read(oc_pack_buf *_b,int _bits);
-int oc_pack_read1(oc_pack_buf *_b);
+long oc_pack_read_c(oc_pack_buf *_b,int _bits);
+int oc_pack_read1_c(oc_pack_buf *_b);
/* returns -1 for read beyond EOF, or the number of whole bytes available */
long oc_pack_bytes_left(oc_pack_buf *_b);
diff --git a/thirdparty/libtheora/collect.c b/thirdparty/libtheora/collect.c
new file mode 100644
index 0000000000..c0d8a2733f
--- /dev/null
+++ b/thirdparty/libtheora/collect.c
@@ -0,0 +1,974 @@
+/********************************************************************
+ * *
+ * THIS FILE IS PART OF THE OggTheora SOFTWARE CODEC SOURCE CODE. *
+ * USE, DISTRIBUTION AND REPRODUCTION OF THIS LIBRARY SOURCE IS *
+ * GOVERNED BY A BSD-STYLE SOURCE LICENSE INCLUDED WITH THIS SOURCE *
+ * IN 'COPYING'. PLEASE READ THESE TERMS BEFORE DISTRIBUTING. *
+ * *
+ * THE Theora SOURCE CODE IS COPYRIGHT (C) 2002-2011 *
+ * by the Xiph.Org Foundation http://www.xiph.org/ *
+ * *
+ ********************************************************************
+
+ function: mode selection code
+ last mod: $Id$
+
+ ********************************************************************/
+#include <stdio.h>
+#include <limits.h>
+#include <math.h>
+#include <string.h>
+#include "collect.h"
+
+#if defined(OC_COLLECT_METRICS)
+
+int OC_HAS_MODE_METRICS;
+double OC_MODE_RD_WEIGHT_SATD[OC_LOGQ_BINS][3][2][OC_COMP_BINS];
+double OC_MODE_RD_WEIGHT_SAD[OC_LOGQ_BINS][3][2][OC_COMP_BINS];
+oc_mode_metrics OC_MODE_METRICS_SATD[OC_LOGQ_BINS-1][3][2][OC_COMP_BINS];
+oc_mode_metrics OC_MODE_METRICS_SAD[OC_LOGQ_BINS-1][3][2][OC_COMP_BINS];
+const char *OC_MODE_METRICS_FILENAME="modedec.stats";
+
+void oc_mode_metrics_add(oc_mode_metrics *_metrics,
+ double _w,int _s,int _q,int _r,double _d){
+ if(_metrics->w>0){
+ double ds;
+ double dq;
+ double dr;
+ double dd;
+ double ds2;
+ double dq2;
+ double s2;
+ double sq;
+ double q2;
+ double sr;
+ double qr;
+ double sd;
+ double qd;
+ double s2q;
+ double sq2;
+ double w;
+ double wa;
+ double rwa;
+ double rwa2;
+ double rwb;
+ double rwb2;
+ double rw2;
+ double rw3;
+ double rw4;
+ wa=_metrics->w;
+ ds=_s-_metrics->s/wa;
+ dq=_q-_metrics->q/wa;
+ dr=_r-_metrics->r/wa;
+ dd=_d-_metrics->d/wa;
+ ds2=ds*ds;
+ dq2=dq*dq;
+ s2=_metrics->s2;
+ sq=_metrics->sq;
+ q2=_metrics->q2;
+ sr=_metrics->sr;
+ qr=_metrics->qr;
+ sd=_metrics->sd;
+ qd=_metrics->qd;
+ s2q=_metrics->s2q;
+ sq2=_metrics->sq2;
+ w=wa+_w;
+ rwa=wa/w;
+ rwb=_w/w;
+ rwa2=rwa*rwa;
+ rwb2=rwb*rwb;
+ rw2=wa*rwb;
+ rw3=rw2*(rwa2-rwb2);
+ rw4=_w*rwa2*rwa2+wa*rwb2*rwb2;
+ _metrics->s2q2+=-2*(ds*sq2+dq*s2q)*rwb
+ +(ds2*q2+4*ds*dq*sq+dq2*s2)*rwb2+ds2*dq2*rw4;
+ _metrics->s2q+=(-2*ds*sq-dq*s2)*rwb+ds2*dq*rw3;
+ _metrics->sq2+=(-ds*q2-2*dq*sq)*rwb+ds*dq2*rw3;
+ _metrics->sqr+=(-ds*qr-dq*sr-dr*sq)*rwb+ds*dq*dr*rw3;
+ _metrics->sqd+=(-ds*qd-dq*sd-dd*sq)*rwb+ds*dq*dd*rw3;
+ _metrics->s2+=ds2*rw2;
+ _metrics->sq+=ds*dq*rw2;
+ _metrics->q2+=dq2*rw2;
+ _metrics->sr+=ds*dr*rw2;
+ _metrics->qr+=dq*dr*rw2;
+ _metrics->r2+=dr*dr*rw2;
+ _metrics->sd+=ds*dd*rw2;
+ _metrics->qd+=dq*dd*rw2;
+ _metrics->d2+=dd*dd*rw2;
+ }
+ _metrics->w+=_w;
+ _metrics->s+=_s*_w;
+ _metrics->q+=_q*_w;
+ _metrics->r+=_r*_w;
+ _metrics->d+=_d*_w;
+}
+
+void oc_mode_metrics_merge(oc_mode_metrics *_dst,
+ const oc_mode_metrics *_src,int _n){
+ int i;
+ /*Find a non-empty set of metrics.*/
+ for(i=0;i<_n&&_src[i].w==0;i++);
+ if(i>=_n){
+ memset(_dst,0,sizeof(*_dst));
+ return;
+ }
+ memcpy(_dst,_src+i,sizeof(*_dst));
+ /*And iterate over the remaining non-empty sets of metrics.*/
+ for(i++;i<_n;i++)if(_src[i].w!=0){
+ double ds;
+ double dq;
+ double dr;
+ double dd;
+ double ds2;
+ double dq2;
+ double s2a;
+ double s2b;
+ double sqa;
+ double sqb;
+ double q2a;
+ double q2b;
+ double sra;
+ double srb;
+ double qra;
+ double qrb;
+ double sda;
+ double sdb;
+ double qda;
+ double qdb;
+ double s2qa;
+ double s2qb;
+ double sq2a;
+ double sq2b;
+ double w;
+ double wa;
+ double wb;
+ double rwa;
+ double rwb;
+ double rwa2;
+ double rwb2;
+ double rw2;
+ double rw3;
+ double rw4;
+ wa=_dst->w;
+ wb=_src[i].w;
+ ds=_src[i].s/wb-_dst->s/wa;
+ dq=_src[i].q/wb-_dst->q/wa;
+ dr=_src[i].r/wb-_dst->r/wa;
+ dd=_src[i].d/wb-_dst->d/wa;
+ ds2=ds*ds;
+ dq2=dq*dq;
+ s2a=_dst->s2;
+ sqa=_dst->sq;
+ q2a=_dst->q2;
+ sra=_dst->sr;
+ qra=_dst->qr;
+ sda=_dst->sd;
+ qda=_dst->qd;
+ s2qa=_dst->s2q;
+ sq2a=_dst->sq2;
+ s2b=_src[i].s2;
+ sqb=_src[i].sq;
+ q2b=_src[i].q2;
+ srb=_src[i].sr;
+ qrb=_src[i].qr;
+ sdb=_src[i].sd;
+ qdb=_src[i].qd;
+ s2qb=_src[i].s2q;
+ sq2b=_src[i].sq2;
+ w=wa+wb;
+ if(w==0)rwa=rwb=0;
+ else{
+ rwa=wa/w;
+ rwb=wb/w;
+ }
+ rwa2=rwa*rwa;
+ rwb2=rwb*rwb;
+ rw2=wa*rwb;
+ rw3=rw2*(rwa2-rwb2);
+ rw4=wb*rwa2*rwa2+wa*rwb2*rwb2;
+ /*
+ (1,1,1) ->
+ (0,0,0)#
+ (1,0,0) C(1,1)*C(1,0)*C(1,0)-> d^{1,0,0}*(rwa*B_{0,1,1}-rwb*A_{0,1,1})
+ (0,1,0) C(1,0)*C(1,1)*C(1,0)-> d^{0,1,0}*(rwa*B_{1,0,1}-rwb*A_{1,0,1})
+ (0,0,1) C(1,0)*C(1,0)*C(1,1)-> d^{0,0,1}*(rwa*B_{1,1,0}-rwb*A_{1,1,0})
+ (1,1,0)*
+ (1,0,1)*
+ (0,1,1)*
+ (1,1,1) C(1,1)*C(1,1)*C(1,1)-> d^{1,1,1}*(rwa^3*wb-rwb^3*wa)
+ (2,1) ->
+ (0,0)#
+ (1,0) C(2,1)*C(1,1)->2*d^{1,0}*(rwa*B_{1,1}-rwb*A_{1,1})
+ (0,1) C(2,0)*C(1,1)-> d^{0,1}*(rwa*B_{2,0}-rwb*A_{2,0})
+ (2,0)*
+ (1,1)*
+ (2,1) C(2,2)*C(1,1)-> d^{2,1}*(rwa^3*wb-rwb^3*wa)
+ (2,2) ->
+ (0,0)#
+ (1,0) C(2,1)*C(2,0)->2*d^{1,0}*(rwa*B_{1,2}-rwb*A_{1,2})
+ (0,1) C(2,0)*C(2,1)->2*d^{0,1}*(rwa*B_{2,1}-rwb*A_{2,1})
+ (2,0) C(2,2)*C(2,0)-> d^{2,0}*(rwa^2*B_{0,2}+rwb^2*A_{0,2})
+ (1,1) C(2,1)*C(2,1)->4*d^{1,1}*(rwa^2*B_{1,1}+rwb^2*A_{1,1})
+ (0,2) C(2,0)*C(2,2)-> d^{0,2}*(rwa^2*B_{2,0}+rwb^2*A_{2,0})
+ (1,2)*
+ (2,1)*
+ (2,2) C(2,2)*C(2,2)*d^{2,2}*(rwa^4*wb+rwb^4*wa)
+ */
+ _dst->s2q2+=_src[i].s2q2+2*(ds*(rwa*sq2b-rwb*sq2a)+dq*(rwa*s2qb-rwb*s2qa))
+ +ds2*(rwa2*q2b+rwb2*q2a)+4*ds*dq*(rwa2*sqb+rwb2*sqa)
+ +dq2*(rwa2*s2b+rwb2*s2a)+ds2*dq2*rw4;
+ _dst->s2q+=_src[i].s2q+2*ds*(rwa*sqb-rwb*sqa)
+ +dq*(rwa*s2b-rwb*s2a)+ds2*dq*rw3;
+ _dst->sq2+=_src[i].sq2+ds*(rwa*q2b-rwb*q2a)
+ +2*dq*(rwa*sqb-rwb*sqa)+ds*dq2*rw3;
+ _dst->sqr+=_src[i].sqr+ds*(rwa*qrb-rwb*qra)+dq*(rwa*srb-rwb*sra)
+ +dr*(rwa*sqb-rwb*sqa)+ds*dq*dr*rw3;
+ _dst->sqd+=_src[i].sqd+ds*(rwa*qdb-rwb*qda)+dq*(rwa*sdb-rwb*sda)
+ +dd*(rwa*sqb-rwb*sqa)+ds*dq*dd*rw3;
+ _dst->s2+=_src[i].s2+ds2*rw2;
+ _dst->sq+=_src[i].sq+ds*dq*rw2;
+ _dst->q2+=_src[i].q2+dq2*rw2;
+ _dst->sr+=_src[i].sr+ds*dr*rw2;
+ _dst->qr+=_src[i].qr+dq*dr*rw2;
+ _dst->r2+=_src[i].r2+dr*dr*rw2;
+ _dst->sd+=_src[i].sd+ds*dd*rw2;
+ _dst->qd+=_src[i].qd+dq*dd*rw2;
+ _dst->d2+=_src[i].d2+dd*dd*rw2;
+ _dst->w+=_src[i].w;
+ _dst->s+=_src[i].s;
+ _dst->q+=_src[i].q;
+ _dst->r+=_src[i].r;
+ _dst->d+=_src[i].d;
+ }
+}
+
+/*Adjust a single corner of a set of metric bins to minimize the squared
+ prediction error of R and D.
+ Each bin is assumed to cover a quad like so:
+ (s0,q0) (s1,q0)
+ A----------B
+ | |
+ | |
+ | |
+ | |
+ C----------Z
+ (s0,q1) (s1,q1)
+ The values A, B, and C are fixed, and Z is the free parameter.
+ Then, for example, R_i is predicted via bilinear interpolation as
+ x_i=(s_i-s0)/(s1-s0)
+ y_i=(q_i-q0)/(q1-q0)
+ dRds1_i=A+(B-A)*x_i
+ dRds2_i=C+(Z-C)*x_i
+ R_i=dRds1_i+(dRds2_i-dRds1_i)*y_i
+ To find the Z that minimizes the squared prediction error over i, this can
+ be rewritten as
+ R_i-(A+(B-A)*x_i+(C-A)*y_i+(A-B-C)*x_i*y_i)=x_i*y_i*Z
+ Letting X={...,x_i*y_i,...}^T and
+ Y={...,R_i-(A+(B-A)*x_i+(C-A)*y_i+(A-B-C)*x_i*y_i),...}^T,
+ the optimal Z is given by Z=(X^T.Y)/(X^T.X).
+ Now, we need to compute these dot products without actually storing data for
+ each sample.
+ Starting with X^T.X, we have
+ X^T.X = sum(x_i^2*y_i^2) = sum((s_i-s0)^2*(q_i-q0)^2)/((s1-s0)^2*(q1-q0)^2).
+ Expanding the interior of the sum in a monomial basis of s_i and q_i gives
+ s0^2*q0^2 *(1)
+ -2*s0*q0^2*(s_i)
+ -2*s0^2*q0*(q_i)
+ +q0^2 *(s_i^2)
+ +4*s0*q0 *(s_i*q_i)
+ +s0^2 *(q_i^2)
+ -2*q0 *(s_i^2*q_i)
+ -2*s0 *(s_i*q_i^2)
+ +1 *(s_i^2*q_i^2).
+ However, computing things directly in this basis leads to gross numerical
+ errors, as most of the terms will have similar size and destructive
+ cancellation results.
+ A much better basis is the central (co-)moment basis:
+ {1,s_i-sbar,q_i-qbar,(s_i-sbar)^2,(s_i-sbar)*(q_i-qbar),(q_i-qbar)^2,
+ (s_i-sbar)^2*(q_i-qbar),(s_i-sbar)*(q_i-qbar)^2,(s_i-sbar)^2*(q_i-qbar)^2},
+ where sbar and qbar are the average s and q values over the bin,
+ respectively.
+ In that basis, letting ds=sbar-s0 and dq=qbar-q0, (s_i-s0)^2*(q_i-q0)^2 is
+ ds^2*dq^2*(1)
+ +dq^2 *((s_i-sbar)^2)
+ +4*ds*dq*((s_i-sbar)*(q_i-qbar))
+ +ds^2 *((q_i-qbar)^2)
+ +2*dq *((s_i-sbar)^2*(q_i-qbar))
+ +2*ds *((s_i-sbar)*(q_i-qbar)^2)
+ +1 *((s_i-sbar)^2*(q_i-qbar)^2).
+ With these expressions in the central (co-)moment bases, all we need to do
+ is compute sums over the (co-)moment terms, which can be done
+ incrementally (see oc_mode_metrics_add() and oc_mode_metrics_merge()),
+ with no need to store the individual samples.
+ Now, for X^T.Y, we have
+ X^T.Y = sum((R_i-A-((B-A)/(s1-s0))*(s_i-s0)-((C-A)/(q1-q0))*(q_i-q0)
+ -((A-B-C)/((s1-s0)*(q1-q0)))*(s_i-s0)*(q_i-q0))*(s_i-s0)*(q_i-q0))/
+ ((s1-s0)*(q1-q0)),
+ or, rewriting the constants to simplify notation,
+ X^T.Y = sum((C0+C1*(s_i-s0)+C2*(q_i-q0)
+ +C3*(s_i-s0)*(q_i-q0)+R_i)*(s_i-s0)*(q_i-q0))/((s1-s0)*(q1-q0)).
+ Again, converting to the central (co-)moment basis, the interior of the
+ above sum is
+ ds*dq*(rbar+C0+C1*ds+C2*dq+C3*ds*dq) *(1)
+ +(C1*dq+C3*dq^2) *((s_i-sbar)^2)
+ +(rbar+C0+2*C1*ds+2*C2*dq+4*C3*ds*dq)*((s_i-sbar)*(q_i-qbar))
+ +(C2*ds+C3*ds^2) *((q_i-qbar)^2)
+ +dq *((s_i-sbar)*(r_i-rbar))
+ +ds *((q_i-qbar)*(r_i-rbar))
+ +(C1+2*C3*dq) *((s_i-sbar)^2*(q_i-qbar))
+ +(C2+2*C3*ds) *((s_i-sbar)*(q_i-qbar)^2)
+ +1 *((s_i-sbar)*(q_i-qbar)*(r_i-rbar))
+ +C3 *((s_i-sbar)^2*(q_i-qbar)^2).
+ You might think it would be easier (if perhaps slightly less robust) to
+ accumulate terms directly around s0 and q0.
+ However, we update each corner of the bins in turn, so we would have to
+ change basis to move the sums from corner to corner anyway.*/
+double oc_mode_metrics_solve(double *_r,double *_d,
+ const oc_mode_metrics *_metrics,const int *_s0,const int *_s1,
+ const int *_q0,const int *_q1,
+ const double *_ra,const double *_rb,const double *_rc,
+ const double *_da,const double *_db,const double *_dc,int _n){
+ double xx;
+ double rxy;
+ double dxy;
+ double wt;
+ int i;
+ xx=rxy=dxy=wt=0;
+ for(i=0;i<_n;i++)if(_metrics[i].w>0){
+ double s10;
+ double q10;
+ double sq10;
+ double ds;
+ double dq;
+ double ds2;
+ double dq2;
+ double r;
+ double d;
+ double s2;
+ double sq;
+ double q2;
+ double sr;
+ double qr;
+ double sd;
+ double qd;
+ double s2q;
+ double sq2;
+ double sqr;
+ double sqd;
+ double s2q2;
+ double c0;
+ double c1;
+ double c2;
+ double c3;
+ double w;
+ w=_metrics[i].w;
+ wt+=w;
+ s10=_s1[i]-_s0[i];
+ q10=_q1[i]-_q0[i];
+ sq10=s10*q10;
+ ds=_metrics[i].s/w-_s0[i];
+ dq=_metrics[i].q/w-_q0[i];
+ ds2=ds*ds;
+ dq2=dq*dq;
+ s2=_metrics[i].s2;
+ sq=_metrics[i].sq;
+ q2=_metrics[i].q2;
+ s2q=_metrics[i].s2q;
+ sq2=_metrics[i].sq2;
+ s2q2=_metrics[i].s2q2;
+ xx+=(dq2*(ds2*w+s2)+4*ds*dq*sq+ds2*q2+2*(dq*s2q+ds*sq2)+s2q2)/(sq10*sq10);
+ r=_metrics[i].r/w;
+ sr=_metrics[i].sr;
+ qr=_metrics[i].qr;
+ sqr=_metrics[i].sqr;
+ c0=-_ra[i];
+ c1=-(_rb[i]-_ra[i])/s10;
+ c2=-(_rc[i]-_ra[i])/q10;
+ c3=-(_ra[i]-_rb[i]-_rc[i])/sq10;
+ rxy+=(ds*dq*(r+c0+c1*ds+c2*dq+c3*ds*dq)*w+(c1*dq+c3*dq2)*s2
+ +(r+c0+2*(c1*ds+(c2+2*c3*ds)*dq))*sq+(c2*ds+c3*ds2)*q2+dq*sr+ds*qr
+ +(c1+2*c3*dq)*s2q+(c2+2*c3*ds)*sq2+sqr+c3*s2q2)/sq10;
+ d=_metrics[i].d/w;
+ sd=_metrics[i].sd;
+ qd=_metrics[i].qd;
+ sqd=_metrics[i].sqd;
+ c0=-_da[i];
+ c1=-(_db[i]-_da[i])/s10;
+ c2=-(_dc[i]-_da[i])/q10;
+ c3=-(_da[i]-_db[i]-_dc[i])/sq10;
+ dxy+=(ds*dq*(d+c0+c1*ds+c2*dq+c3*ds*dq)*w+(c1*dq+c3*dq2)*s2
+ +(d+c0+2*(c1*ds+(c2+2*c3*ds)*dq))*sq+(c2*ds+c3*ds2)*q2+dq*sd+ds*qd
+ +(c1+2*c3*dq)*s2q+(c2+2*c3*ds)*sq2+sqd+c3*s2q2)/sq10;
+ }
+ if(xx>1E-3){
+ *_r=rxy/xx;
+ *_d=dxy/xx;
+ }
+ else{
+ *_r=0;
+ *_d=0;
+ }
+ return wt;
+}
+
+/*Compile collected SATD/logq/rate/RMSE metrics into a form that's immediately
+ useful for mode decision.*/
+void oc_mode_metrics_update(oc_mode_metrics (*_metrics)[3][2][OC_COMP_BINS],
+ int _niters_min,int _reweight,oc_mode_rd (*_table)[3][2][OC_COMP_BINS],
+ int _shift,double (*_weight)[3][2][OC_COMP_BINS]){
+ int niters;
+ int prevdr;
+ int prevdd;
+ int dr;
+ int dd;
+ int pli;
+ int qti;
+ int qi;
+ int si;
+ dd=dr=INT_MAX;
+ niters=0;
+ /*The encoder interpolates rate and RMSE terms bilinearly from an
+ OC_LOGQ_BINS by OC_COMP_BINS grid of sample points in _table.
+ To find the sample values at the grid points that minimize the total
+ squared prediction error actually requires solving a relatively sparse
+ linear system with a number of variables equal to the number of grid
+ points.
+ Instead of writing a general sparse linear system solver, we just use
+ Gauss-Seidel iteration, i.e., we update one grid point at time until
+ they stop changing.*/
+ do{
+ prevdr=dr;
+ prevdd=dd;
+ dd=dr=0;
+ for(pli=0;pli<3;pli++){
+ for(qti=0;qti<2;qti++){
+ for(qi=0;qi<OC_LOGQ_BINS;qi++){
+ for(si=0;si<OC_COMP_BINS;si++){
+ oc_mode_metrics m[4];
+ int s0[4];
+ int s1[4];
+ int q0[4];
+ int q1[4];
+ double ra[4];
+ double rb[4];
+ double rc[4];
+ double da[4];
+ double db[4];
+ double dc[4];
+ double r;
+ double d;
+ int rate;
+ int rmse;
+ int ds;
+ int n;
+ n=0;
+ /*Collect the statistics for the (up to) four bins grid point
+ (si,qi) touches.*/
+ if(qi>0&&si>0){
+ q0[n]=OC_MODE_LOGQ[qi-1][pli][qti];
+ q1[n]=OC_MODE_LOGQ[qi][pli][qti];
+ s0[n]=si-1<<_shift;
+ s1[n]=si<<_shift;
+ ra[n]=ldexp(_table[qi-1][pli][qti][si-1].rate,-OC_BIT_SCALE);
+ da[n]=ldexp(_table[qi-1][pli][qti][si-1].rmse,-OC_RMSE_SCALE);
+ rb[n]=ldexp(_table[qi-1][pli][qti][si].rate,-OC_BIT_SCALE);
+ db[n]=ldexp(_table[qi-1][pli][qti][si].rmse,-OC_RMSE_SCALE);
+ rc[n]=ldexp(_table[qi][pli][qti][si-1].rate,-OC_BIT_SCALE);
+ dc[n]=ldexp(_table[qi][pli][qti][si-1].rmse,-OC_RMSE_SCALE);
+ *(m+n++)=*(_metrics[qi-1][pli][qti]+si-1);
+ }
+ if(qi>0){
+ ds=si+1<OC_COMP_BINS?1:-1;
+ q0[n]=OC_MODE_LOGQ[qi-1][pli][qti];
+ q1[n]=OC_MODE_LOGQ[qi][pli][qti];
+ s0[n]=si+ds<<_shift;
+ s1[n]=si<<_shift;
+ ra[n]=ldexp(_table[qi-1][pli][qti][si+ds].rate,-OC_BIT_SCALE);
+ da[n]=
+ ldexp(_table[qi-1][pli][qti][si+ds].rmse,-OC_RMSE_SCALE);
+ rb[n]=ldexp(_table[qi-1][pli][qti][si].rate,-OC_BIT_SCALE);
+ db[n]=ldexp(_table[qi-1][pli][qti][si].rmse,-OC_RMSE_SCALE);
+ rc[n]=ldexp(_table[qi][pli][qti][si+ds].rate,-OC_BIT_SCALE);
+ dc[n]=ldexp(_table[qi][pli][qti][si+ds].rmse,-OC_RMSE_SCALE);
+ *(m+n++)=*(_metrics[qi-1][pli][qti]+si);
+ }
+ if(qi+1<OC_LOGQ_BINS&&si>0){
+ q0[n]=OC_MODE_LOGQ[qi+1][pli][qti];
+ q1[n]=OC_MODE_LOGQ[qi][pli][qti];
+ s0[n]=si-1<<_shift;
+ s1[n]=si<<_shift;
+ ra[n]=ldexp(_table[qi+1][pli][qti][si-1].rate,-OC_BIT_SCALE);
+ da[n]=ldexp(_table[qi+1][pli][qti][si-1].rmse,-OC_RMSE_SCALE);
+ rb[n]=ldexp(_table[qi+1][pli][qti][si].rate,-OC_BIT_SCALE);
+ db[n]=ldexp(_table[qi+1][pli][qti][si].rmse,-OC_RMSE_SCALE);
+ rc[n]=ldexp(_table[qi][pli][qti][si-1].rate,-OC_BIT_SCALE);
+ dc[n]=ldexp(_table[qi][pli][qti][si-1].rmse,-OC_RMSE_SCALE);
+ *(m+n++)=*(_metrics[qi][pli][qti]+si-1);
+ }
+ if(qi+1<OC_LOGQ_BINS){
+ ds=si+1<OC_COMP_BINS?1:-1;
+ q0[n]=OC_MODE_LOGQ[qi+1][pli][qti];
+ q1[n]=OC_MODE_LOGQ[qi][pli][qti];
+ s0[n]=si+ds<<_shift;
+ s1[n]=si<<_shift;
+ ra[n]=ldexp(_table[qi+1][pli][qti][si+ds].rate,-OC_BIT_SCALE);
+ da[n]=
+ ldexp(_table[qi+1][pli][qti][si+ds].rmse,-OC_RMSE_SCALE);
+ rb[n]=ldexp(_table[qi+1][pli][qti][si].rate,-OC_BIT_SCALE);
+ db[n]=ldexp(_table[qi+1][pli][qti][si].rmse,-OC_RMSE_SCALE);
+ rc[n]=ldexp(_table[qi][pli][qti][si+ds].rate,-OC_BIT_SCALE);
+ dc[n]=ldexp(_table[qi][pli][qti][si+ds].rmse,-OC_RMSE_SCALE);
+ *(m+n++)=*(_metrics[qi][pli][qti]+si);
+ }
+ /*On the first pass, initialize with a simple weighted average of
+ the neighboring bins.*/
+ if(!OC_HAS_MODE_METRICS&&niters==0){
+ double w;
+ w=r=d=0;
+ while(n-->0){
+ w+=m[n].w;
+ r+=m[n].r;
+ d+=m[n].d;
+ }
+ r=w>1E-3?r/w:0;
+ d=w>1E-3?d/w:0;
+ _weight[qi][pli][qti][si]=w;
+ }
+ else{
+ /*Update the grid point and save the weight for later.*/
+ _weight[qi][pli][qti][si]=
+ oc_mode_metrics_solve(&r,&d,m,s0,s1,q0,q1,ra,rb,rc,da,db,dc,n);
+ }
+ rate=OC_CLAMPI(-32768,(int)(ldexp(r,OC_BIT_SCALE)+0.5),32767);
+ rmse=OC_CLAMPI(-32768,(int)(ldexp(d,OC_RMSE_SCALE)+0.5),32767);
+ dr+=abs(rate-_table[qi][pli][qti][si].rate);
+ dd+=abs(rmse-_table[qi][pli][qti][si].rmse);
+ _table[qi][pli][qti][si].rate=(ogg_int16_t)rate;
+ _table[qi][pli][qti][si].rmse=(ogg_int16_t)rmse;
+ }
+ }
+ }
+ }
+ }
+ /*After a fixed number of initial iterations, only iterate so long as the
+ total change is decreasing.
+ This ensures we don't oscillate forever, which is a danger, as all of our
+ results are rounded fairly coarsely.*/
+ while((dr>0||dd>0)&&(niters++<_niters_min||(dr<prevdr&&dd<prevdd)));
+ if(_reweight){
+ /*Now, reduce the values of the optimal solution until we get enough
+ samples in each bin to overcome the constant OC_ZWEIGHT factor.
+ This encourages sampling under-populated bins and prevents a single large
+ sample early on from discouraging coding in that bin ever again.*/
+ for(pli=0;pli<3;pli++){
+ for(qti=0;qti<2;qti++){
+ for(qi=0;qi<OC_LOGQ_BINS;qi++){
+ for(si=0;si<OC_COMP_BINS;si++){
+ double wt;
+ wt=_weight[qi][pli][qti][si];
+ wt/=OC_ZWEIGHT+wt;
+ _table[qi][pli][qti][si].rate=(ogg_int16_t)
+ (_table[qi][pli][qti][si].rate*wt+0.5);
+ _table[qi][pli][qti][si].rmse=(ogg_int16_t)
+ (_table[qi][pli][qti][si].rmse*wt+0.5);
+ }
+ }
+ }
+ }
+ }
+}
+
+/*Dump the in memory mode metrics to a file.
+ Note this data format isn't portable between different platforms.*/
+void oc_mode_metrics_dump(void){
+ FILE *fmetrics;
+ fmetrics=fopen(OC_MODE_METRICS_FILENAME,"wb");
+ if(fmetrics!=NULL){
+ (void)fwrite(OC_MODE_LOGQ,sizeof(OC_MODE_LOGQ),1,fmetrics);
+ (void)fwrite(OC_MODE_METRICS_SATD,sizeof(OC_MODE_METRICS_SATD),1,fmetrics);
+ (void)fwrite(OC_MODE_METRICS_SAD,sizeof(OC_MODE_METRICS_SAD),1,fmetrics);
+ fclose(fmetrics);
+ }
+}
+
+void oc_mode_metrics_print_rd(FILE *_fout,const char *_table_name,
+#if !defined(OC_COLLECT_METRICS)
+ const oc_mode_rd (*_mode_rd_table)[3][2][OC_COMP_BINS]){
+#else
+ oc_mode_rd (*_mode_rd_table)[3][2][OC_COMP_BINS]){
+#endif
+ int qii;
+ fprintf(_fout,
+ "# if !defined(OC_COLLECT_METRICS)\n"
+ "static const\n"
+ "# endif\n"
+ "oc_mode_rd %s[OC_LOGQ_BINS][3][2][OC_COMP_BINS]={\n",_table_name);
+ for(qii=0;qii<OC_LOGQ_BINS;qii++){
+ int pli;
+ fprintf(_fout," {\n");
+ for(pli=0;pli<3;pli++){
+ int qti;
+ fprintf(_fout," {\n");
+ for(qti=0;qti<2;qti++){
+ int bin;
+ int qi;
+ static const char *pl_names[3]={"Y'","Cb","Cr"};
+ static const char *qti_names[2]={"INTRA","INTER"};
+ qi=(63*qii+(OC_LOGQ_BINS-1>>1))/(OC_LOGQ_BINS-1);
+ fprintf(_fout," /*%s qi=%i %s*/\n",
+ pl_names[pli],qi,qti_names[qti]);
+ fprintf(_fout," {\n");
+ fprintf(_fout," ");
+ for(bin=0;bin<OC_COMP_BINS;bin++){
+ if(bin&&!(bin&0x3))fprintf(_fout,"\n ");
+ fprintf(_fout,"{%5i,%5i}",
+ _mode_rd_table[qii][pli][qti][bin].rate,
+ _mode_rd_table[qii][pli][qti][bin].rmse);
+ if(bin+1<OC_COMP_BINS)fprintf(_fout,",");
+ }
+ fprintf(_fout,"\n }");
+ if(qti<1)fprintf(_fout,",");
+ fprintf(_fout,"\n");
+ }
+ fprintf(_fout," }");
+ if(pli<2)fprintf(_fout,",");
+ fprintf(_fout,"\n");
+ }
+ fprintf(_fout," }");
+ if(qii+1<OC_LOGQ_BINS)fprintf(_fout,",");
+ fprintf(_fout,"\n");
+ }
+ fprintf(_fout,
+ "};\n"
+ "\n");
+}
+
+void oc_mode_metrics_print(FILE *_fout){
+ int qii;
+ fprintf(_fout,
+ "/*File generated by libtheora with OC_COLLECT_METRICS"
+ " defined at compile time.*/\n"
+ "#if !defined(_modedec_H)\n"
+ "# define _modedec_H (1)\n"
+ "# include \"encint.h\"\n"
+ "\n"
+ "\n"
+ "\n"
+ "/*The log of the average quantizer for each of the OC_MODE_RD table rows\n"
+ " (e.g., for the represented qi's, and each pli and qti), in Q10 format.\n"
+ " The actual statistics used by the encoder will be interpolated from\n"
+ " that table based on log_plq for the actual quantization matrix used.*/\n"
+ "# if !defined(OC_COLLECT_METRICS)\n"
+ "static const\n"
+ "# endif\n"
+ "ogg_int16_t OC_MODE_LOGQ[OC_LOGQ_BINS][3][2]={\n");
+ for(qii=0;qii<OC_LOGQ_BINS;qii++){
+ fprintf(_fout," { {0x%04X,0x%04X},{0x%04X,0x%04X},{0x%04X,0x%04X} }%s\n",
+ OC_MODE_LOGQ[qii][0][0],OC_MODE_LOGQ[qii][0][1],OC_MODE_LOGQ[qii][1][0],
+ OC_MODE_LOGQ[qii][1][1],OC_MODE_LOGQ[qii][2][0],OC_MODE_LOGQ[qii][2][1],
+ qii+1<OC_LOGQ_BINS?",":"");
+ }
+ fprintf(_fout,
+ "};\n"
+ "\n");
+ oc_mode_metrics_print_rd(_fout,"OC_MODE_RD_SATD",OC_MODE_RD_SATD);
+ oc_mode_metrics_print_rd(_fout,"OC_MODE_RD_SAD",OC_MODE_RD_SAD);
+ fprintf(_fout,
+ "#endif\n");
+}
+
+
+# if !defined(OC_COLLECT_NO_ENC_FUNCS)
+void oc_enc_mode_metrics_load(oc_enc_ctx *_enc){
+ oc_restore_fpu(&_enc->state);
+ /*Load any existing mode metrics if we haven't already.*/
+ if(!OC_HAS_MODE_METRICS){
+ FILE *fmetrics;
+ memset(OC_MODE_METRICS_SATD,0,sizeof(OC_MODE_METRICS_SATD));
+ memset(OC_MODE_METRICS_SAD,0,sizeof(OC_MODE_METRICS_SAD));
+ fmetrics=fopen(OC_MODE_METRICS_FILENAME,"rb");
+ if(fmetrics!=NULL){
+ /*Read in the binary structures as written my oc_mode_metrics_dump().
+ Note this format isn't portable between different platforms.*/
+ (void)fread(OC_MODE_LOGQ,sizeof(OC_MODE_LOGQ),1,fmetrics);
+ (void)fread(OC_MODE_METRICS_SATD,sizeof(OC_MODE_METRICS_SATD),1,fmetrics);
+ (void)fread(OC_MODE_METRICS_SAD,sizeof(OC_MODE_METRICS_SAD),1,fmetrics);
+ fclose(fmetrics);
+ }
+ else{
+ int qii;
+ int qi;
+ int pli;
+ int qti;
+ for(qii=0;qii<OC_LOGQ_BINS;qii++){
+ qi=(63*qii+(OC_LOGQ_BINS-1>>1))/(OC_LOGQ_BINS-1);
+ for(pli=0;pli<3;pli++)for(qti=0;qti<2;qti++){
+ OC_MODE_LOGQ[qii][pli][qti]=_enc->log_plq[qi][pli][qti];
+ }
+ }
+ }
+ oc_mode_metrics_update(OC_MODE_METRICS_SATD,100,1,
+ OC_MODE_RD_SATD,OC_SATD_SHIFT,OC_MODE_RD_WEIGHT_SATD);
+ oc_mode_metrics_update(OC_MODE_METRICS_SAD,100,1,
+ OC_MODE_RD_SAD,OC_SAD_SHIFT,OC_MODE_RD_WEIGHT_SAD);
+ OC_HAS_MODE_METRICS=1;
+ }
+}
+
+/*The following token skipping code used to also be used in the decoder (and
+ even at one point other places in the encoder).
+ However, it was obsoleted by other optimizations, and is now only used here.
+ It has been moved here to avoid generating the code when it's not needed.*/
+
+/*Determines the number of blocks or coefficients to be skipped for a given
+ token value.
+ _token: The token value to skip.
+ _extra_bits: The extra bits attached to this token.
+ Return: A positive value indicates that number of coefficients are to be
+ skipped in the current block.
+ Otherwise, the negative of the return value indicates that number of
+ blocks are to be ended.*/
+typedef ptrdiff_t (*oc_token_skip_func)(int _token,int _extra_bits);
+
+/*Handles the simple end of block tokens.*/
+static ptrdiff_t oc_token_skip_eob(int _token,int _extra_bits){
+ int nblocks_adjust;
+ nblocks_adjust=OC_UNIBBLE_TABLE32(0,1,2,3,7,15,0,0,_token)+1;
+ return -_extra_bits-nblocks_adjust;
+}
+
+/*The last EOB token has a special case, where an EOB run of size zero ends all
+ the remaining blocks in the frame.*/
+static ptrdiff_t oc_token_skip_eob6(int _token,int _extra_bits){
+ /*Note: We want to return -PTRDIFF_MAX, but that requires C99, which is not
+ yet available everywhere; this should be equivalent.*/
+ if(!_extra_bits)return -(~(size_t)0>>1);
+ return -_extra_bits;
+}
+
+/*Handles the pure zero run tokens.*/
+static ptrdiff_t oc_token_skip_zrl(int _token,int _extra_bits){
+ return _extra_bits+1;
+}
+
+/*Handles a normal coefficient value token.*/
+static ptrdiff_t oc_token_skip_val(void){
+ return 1;
+}
+
+/*Handles a category 1A zero run/coefficient value combo token.*/
+static ptrdiff_t oc_token_skip_run_cat1a(int _token){
+ return _token-OC_DCT_RUN_CAT1A+2;
+}
+
+/*Handles category 1b, 1c, 2a, and 2b zero run/coefficient value combo tokens.*/
+static ptrdiff_t oc_token_skip_run(int _token,int _extra_bits){
+ int run_cati;
+ int ncoeffs_mask;
+ int ncoeffs_adjust;
+ run_cati=_token-OC_DCT_RUN_CAT1B;
+ ncoeffs_mask=OC_BYTE_TABLE32(3,7,0,1,run_cati);
+ ncoeffs_adjust=OC_BYTE_TABLE32(7,11,2,3,run_cati);
+ return (_extra_bits&ncoeffs_mask)+ncoeffs_adjust;
+}
+
+/*A jump table for computing the number of coefficients or blocks to skip for
+ a given token value.
+ This reduces all the conditional branches, etc., needed to parse these token
+ values down to one indirect jump.*/
+static const oc_token_skip_func OC_TOKEN_SKIP_TABLE[TH_NDCT_TOKENS]={
+ oc_token_skip_eob,
+ oc_token_skip_eob,
+ oc_token_skip_eob,
+ oc_token_skip_eob,
+ oc_token_skip_eob,
+ oc_token_skip_eob,
+ oc_token_skip_eob6,
+ oc_token_skip_zrl,
+ oc_token_skip_zrl,
+ (oc_token_skip_func)oc_token_skip_val,
+ (oc_token_skip_func)oc_token_skip_val,
+ (oc_token_skip_func)oc_token_skip_val,
+ (oc_token_skip_func)oc_token_skip_val,
+ (oc_token_skip_func)oc_token_skip_val,
+ (oc_token_skip_func)oc_token_skip_val,
+ (oc_token_skip_func)oc_token_skip_val,
+ (oc_token_skip_func)oc_token_skip_val,
+ (oc_token_skip_func)oc_token_skip_val,
+ (oc_token_skip_func)oc_token_skip_val,
+ (oc_token_skip_func)oc_token_skip_val,
+ (oc_token_skip_func)oc_token_skip_val,
+ (oc_token_skip_func)oc_token_skip_val,
+ (oc_token_skip_func)oc_token_skip_val,
+ (oc_token_skip_func)oc_token_skip_run_cat1a,
+ (oc_token_skip_func)oc_token_skip_run_cat1a,
+ (oc_token_skip_func)oc_token_skip_run_cat1a,
+ (oc_token_skip_func)oc_token_skip_run_cat1a,
+ (oc_token_skip_func)oc_token_skip_run_cat1a,
+ oc_token_skip_run,
+ oc_token_skip_run,
+ oc_token_skip_run,
+ oc_token_skip_run
+};
+
+/*Determines the number of blocks or coefficients to be skipped for a given
+ token value.
+ _token: The token value to skip.
+ _extra_bits: The extra bits attached to this token.
+ Return: A positive value indicates that number of coefficients are to be
+ skipped in the current block.
+ Otherwise, the negative of the return value indicates that number of
+ blocks are to be ended.
+ 0 will never be returned, so that at least one coefficient in one
+ block will always be decoded for every token.*/
+static ptrdiff_t oc_dct_token_skip(int _token,int _extra_bits){
+ return (*OC_TOKEN_SKIP_TABLE[_token])(_token,_extra_bits);
+}
+
+
+void oc_enc_mode_metrics_collect(oc_enc_ctx *_enc){
+ static const unsigned char OC_ZZI_HUFF_OFFSET[64]={
+ 0,16,16,16,16,16,32,32,
+ 32,32,32,32,32,32,32,48,
+ 48,48,48,48,48,48,48,48,
+ 48,48,48,48,64,64,64,64,
+ 64,64,64,64,64,64,64,64,
+ 64,64,64,64,64,64,64,64,
+ 64,64,64,64,64,64,64,64
+ };
+ const oc_fragment *frags;
+ const unsigned *frag_sad;
+ const unsigned *frag_satd;
+ const unsigned *frag_ssd;
+ const ptrdiff_t *coded_fragis;
+ ptrdiff_t ncoded_fragis;
+ ptrdiff_t fragii;
+ double fragw;
+ int modelines[3][3][2];
+ int qti;
+ int qii;
+ int qi;
+ int pli;
+ int zzi;
+ int token;
+ int eb;
+ oc_restore_fpu(&_enc->state);
+ /*Figure out which metric bins to use for this frame's quantizers.*/
+ for(qii=0;qii<_enc->state.nqis;qii++){
+ for(pli=0;pli<3;pli++){
+ for(qti=0;qti<2;qti++){
+ int log_plq;
+ int modeline;
+ log_plq=_enc->log_plq[_enc->state.qis[qii]][pli][qti];
+ for(modeline=0;modeline<OC_LOGQ_BINS-1&&
+ OC_MODE_LOGQ[modeline+1][pli][qti]>log_plq;modeline++);
+ modelines[qii][pli][qti]=modeline;
+ }
+ }
+ }
+ qti=_enc->state.frame_type;
+ frags=_enc->state.frags;
+ frag_sad=_enc->frag_sad;
+ frag_satd=_enc->frag_satd;
+ frag_ssd=_enc->frag_ssd;
+ coded_fragis=_enc->state.coded_fragis;
+ ncoded_fragis=fragii=0;
+ /*Weight the fragments by the inverse frame size; this prevents HD content
+ from dominating the statistics.*/
+ fragw=1.0/_enc->state.nfrags;
+ for(pli=0;pli<3;pli++){
+ ptrdiff_t ti[64];
+ int eob_token[64];
+ int eob_run[64];
+ /*Set up token indices and eob run counts.
+ We don't bother trying to figure out the real cost of the runs that span
+ coefficients; instead we use the costs that were available when R-D
+ token optimization was done.*/
+ for(zzi=0;zzi<64;zzi++){
+ ti[zzi]=_enc->dct_token_offs[pli][zzi];
+ if(ti[zzi]>0){
+ token=_enc->dct_tokens[pli][zzi][0];
+ eb=_enc->extra_bits[pli][zzi][0];
+ eob_token[zzi]=token;
+ eob_run[zzi]=-oc_dct_token_skip(token,eb);
+ }
+ else{
+ eob_token[zzi]=OC_NDCT_EOB_TOKEN_MAX;
+ eob_run[zzi]=0;
+ }
+ }
+ /*Scan the list of coded fragments for this plane.*/
+ ncoded_fragis+=_enc->state.ncoded_fragis[pli];
+ for(;fragii<ncoded_fragis;fragii++){
+ ptrdiff_t fragi;
+ int frag_bits;
+ int huffi;
+ int skip;
+ int mb_mode;
+ unsigned sad;
+ unsigned satd;
+ double sqrt_ssd;
+ int bin;
+ int qtj;
+ fragi=coded_fragis[fragii];
+ frag_bits=0;
+ for(zzi=0;zzi<64;){
+ if(eob_run[zzi]>0){
+ /*We've reached the end of the block.*/
+ eob_run[zzi]--;
+ break;
+ }
+ huffi=_enc->huff_idxs[qti][zzi>0][pli+1>>1]
+ +OC_ZZI_HUFF_OFFSET[zzi];
+ if(eob_token[zzi]<OC_NDCT_EOB_TOKEN_MAX){
+ /*This token caused an EOB run to be flushed.
+ Therefore it gets the bits associated with it.*/
+ frag_bits+=_enc->huff_codes[huffi][eob_token[zzi]].nbits
+ +OC_DCT_TOKEN_EXTRA_BITS[eob_token[zzi]];
+ eob_token[zzi]=OC_NDCT_EOB_TOKEN_MAX;
+ }
+ token=_enc->dct_tokens[pli][zzi][ti[zzi]];
+ eb=_enc->extra_bits[pli][zzi][ti[zzi]];
+ ti[zzi]++;
+ skip=oc_dct_token_skip(token,eb);
+ if(skip<0){
+ eob_token[zzi]=token;
+ eob_run[zzi]=-skip;
+ }
+ else{
+ /*A regular DCT value token; accumulate the bits for it.*/
+ frag_bits+=_enc->huff_codes[huffi][token].nbits
+ +OC_DCT_TOKEN_EXTRA_BITS[token];
+ zzi+=skip;
+ }
+ }
+ mb_mode=frags[fragi].mb_mode;
+ qii=frags[fragi].qii;
+ qi=_enc->state.qis[qii];
+ sad=frag_sad[fragi]<<(pli+1&2);
+ satd=frag_satd[fragi]<<(pli+1&2);
+ sqrt_ssd=sqrt(frag_ssd[fragi]);
+ qtj=mb_mode!=OC_MODE_INTRA;
+ /*Accumulate statistics.
+ The rate (frag_bits) and RMSE (sqrt(frag_ssd)) are not scaled by
+ OC_BIT_SCALE and OC_RMSE_SCALE; this lets us change the scale factor
+ yet still use old data.*/
+ bin=OC_MINI(satd>>OC_SATD_SHIFT,OC_COMP_BINS-1);
+ oc_mode_metrics_add(
+ OC_MODE_METRICS_SATD[modelines[qii][pli][qtj]][pli][qtj]+bin,
+ fragw,satd,_enc->log_plq[qi][pli][qtj],frag_bits,sqrt_ssd);
+ bin=OC_MINI(sad>>OC_SAD_SHIFT,OC_COMP_BINS-1);
+ oc_mode_metrics_add(
+ OC_MODE_METRICS_SAD[modelines[qii][pli][qtj]][pli][qtj]+bin,
+ fragw,sad,_enc->log_plq[qi][pli][qtj],frag_bits,sqrt_ssd);
+ }
+ }
+ /*Update global SA(T)D/logq/rate/RMSE estimation matrix.*/
+ oc_mode_metrics_update(OC_MODE_METRICS_SATD,4,1,
+ OC_MODE_RD_SATD,OC_SATD_SHIFT,OC_MODE_RD_WEIGHT_SATD);
+ oc_mode_metrics_update(OC_MODE_METRICS_SAD,4,1,
+ OC_MODE_RD_SAD,OC_SAD_SHIFT,OC_MODE_RD_WEIGHT_SAD);
+}
+# endif
+
+#endif
diff --git a/thirdparty/libtheora/collect.h b/thirdparty/libtheora/collect.h
new file mode 100644
index 0000000000..9458b84e3f
--- /dev/null
+++ b/thirdparty/libtheora/collect.h
@@ -0,0 +1,109 @@
+/********************************************************************
+ * *
+ * THIS FILE IS PART OF THE OggTheora SOFTWARE CODEC SOURCE CODE. *
+ * USE, DISTRIBUTION AND REPRODUCTION OF THIS LIBRARY SOURCE IS *
+ * GOVERNED BY A BSD-STYLE SOURCE LICENSE INCLUDED WITH THIS SOURCE *
+ * IN 'COPYING'. PLEASE READ THESE TERMS BEFORE DISTRIBUTING. *
+ * *
+ * THE Theora SOURCE CODE IS COPYRIGHT (C) 2002-2009 *
+ * by the Xiph.Org Foundation http://www.xiph.org/ *
+ * *
+ ********************************************************************
+
+ function: mode selection code
+ last mod: $Id$
+
+ ********************************************************************/
+#if !defined(_collect_H)
+# define _collect_H (1)
+# include "encint.h"
+# if defined(OC_COLLECT_METRICS)
+# include <stdio.h>
+
+
+
+typedef struct oc_mode_metrics oc_mode_metrics;
+
+
+
+/**Sets the file name to load/store mode metrics from/to.
+ * The file name string is stored by reference, and so must be valid for the
+ * lifetime of the encoder.
+ * Mode metric collection uses global tables; do not attempt to perform
+ * multiple collections at once.
+ * \param[in] _buf <tt>char[]</tt> The file name.
+ * \retval TH_EIMPL Not supported by this implementation.*/
+#define TH_ENCCTL_SET_METRICS_FILE (0x8000)
+
+
+
+/*Accumulates various weighted sums of the measurements.
+ w -> weight
+ s -> SATD
+ q -> log quantizer
+ r -> rate (in bits)
+ d -> RMSE
+ All of the single letters correspond to direct, weighted sums, e.g.,
+ w=sum(w_i), s=sum(s_i*w_i), etc.
+ The others correspond to central moments (or co-moments) of the given order,
+ e.g., sq=sum((s_i-s/w)*(q_i-q/w)*w_i).
+ Because we need some moments up to fourth order, we use central moments to
+ minimize the dynamic range and prevent rounding error from dominating the
+ calculations.*/
+struct oc_mode_metrics{
+ double w;
+ double s;
+ double q;
+ double r;
+ double d;
+ double s2;
+ double sq;
+ double q2;
+ double sr;
+ double qr;
+ double r2;
+ double sd;
+ double qd;
+ double d2;
+ double s2q;
+ double sq2;
+ double sqr;
+ double sqd;
+ double s2q2;
+};
+
+
+# define OC_ZWEIGHT (0.25)
+
+/*TODO: It may be helpful (for block-level quantizers especially) to separate
+ out the contributions from AC and DC into separate tables.*/
+
+extern ogg_int16_t OC_MODE_LOGQ[OC_LOGQ_BINS][3][2];
+extern oc_mode_rd OC_MODE_RD_SATD[OC_LOGQ_BINS][3][2][OC_COMP_BINS];
+extern oc_mode_rd OC_MODE_RD_SAD[OC_LOGQ_BINS][3][2][OC_COMP_BINS];
+
+extern int OC_HAS_MODE_METRICS;
+extern oc_mode_metrics OC_MODE_METRICS_SATD[OC_LOGQ_BINS-1][3][2][OC_COMP_BINS];
+extern oc_mode_metrics OC_MODE_METRICS_SAD[OC_LOGQ_BINS-1][3][2][OC_COMP_BINS];
+extern const char *OC_MODE_METRICS_FILENAME;
+
+void oc_mode_metrics_dump();
+void oc_mode_metrics_print(FILE *_fout);
+
+void oc_mode_metrics_add(oc_mode_metrics *_metrics,
+ double _w,int _s,int _q,int _r,double _d);
+void oc_mode_metrics_merge(oc_mode_metrics *_dst,
+ const oc_mode_metrics *_src,int _n);
+double oc_mode_metrics_solve(double *_r,double *_d,
+ const oc_mode_metrics *_metrics,const int *_s0,const int *_s1,
+ const int *_q0,const int *_q1,
+ const double *_ra,const double *_rb,const double *_rc,
+ const double *_da,const double *_db,const double *_dc,int _n);
+void oc_mode_metrics_update(oc_mode_metrics (*_metrics)[3][2][OC_COMP_BINS],
+ int _niters_min,int _reweight,oc_mode_rd (*_table)[3][2][OC_COMP_BINS],
+ int shift,double (*_weight)[3][2][OC_COMP_BINS]);
+void oc_enc_mode_metrics_load(oc_enc_ctx *_enc);
+void oc_enc_mode_metrics_collect(oc_enc_ctx *_enc);
+
+# endif
+#endif
diff --git a/thirdparty/libtheora/dct.h b/thirdparty/libtheora/dct.h
index 24ba6f111a..8052ea6bc1 100644
--- a/thirdparty/libtheora/dct.h
+++ b/thirdparty/libtheora/dct.h
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: dct.h 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
diff --git a/thirdparty/libtheora/decinfo.c b/thirdparty/libtheora/decinfo.c
index 845eb1361c..a91e740b15 100644
--- a/thirdparty/libtheora/decinfo.c
+++ b/thirdparty/libtheora/decinfo.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: decinfo.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
@@ -20,6 +20,11 @@
#include <limits.h>
#include "decint.h"
+/*Only used for fuzzing.*/
+#if defined(HAVE_MEMORY_CONSTRAINT)
+static const int MAX_FUZZING_WIDTH = 16384;
+static const int MAX_FUZZING_HEIGHT = 16384;
+#endif
/*Unpacks a series of octets from a given byte array into the pack buffer.
@@ -55,8 +60,8 @@ static int oc_info_unpack(oc_pack_buf *_opb,th_info *_info){
/*verify we can parse this bitstream version.
We accept earlier minors and all subminors, by spec*/
if(_info->version_major>TH_VERSION_MAJOR||
- _info->version_major==TH_VERSION_MAJOR&&
- _info->version_minor>TH_VERSION_MINOR){
+ (_info->version_major==TH_VERSION_MAJOR&&
+ _info->version_minor>TH_VERSION_MINOR)){
return TH_EVERSION;
}
/*Read the encoded frame description.*/
@@ -82,6 +87,11 @@ static int oc_info_unpack(oc_pack_buf *_opb,th_info *_info){
_info->fps_numerator==0||_info->fps_denominator==0){
return TH_EBADHEADER;
}
+#if defined(HAVE_MEMORY_CONSTRAINT)
+ if(_info->frame_width>=MAX_FUZZING_WIDTH&&_info->frame_height>=MAX_FUZZING_HEIGHT){
+ return TH_EBADHEADER;
+ }
+#endif
/*Note: The sense of pic_y is inverted in what we pass back to the
application compared to how it is stored in the bitstream.
This is because the bitstream uses a right-handed coordinate system, while
@@ -128,6 +138,10 @@ static int oc_comment_unpack(oc_pack_buf *_opb,th_comment *_tc){
_tc->comments*sizeof(_tc->comment_lengths[0]));
_tc->user_comments=(char **)_ogg_malloc(
_tc->comments*sizeof(_tc->user_comments[0]));
+ if(_tc->comment_lengths==NULL||_tc->user_comments==NULL){
+ _tc->comments=0;
+ return TH_EFAULT;
+ }
for(i=0;i<_tc->comments;i++){
len=oc_unpack_length(_opb);
if(len<0||len>oc_pack_bytes_left(_opb)){
@@ -168,9 +182,23 @@ static int oc_dec_headerin(oc_pack_buf *_opb,th_info *_info,
int ret;
val=oc_pack_read(_opb,8);
packtype=(int)val;
- /*If we're at a data packet and we have received all three headers, we're
- done.*/
- if(!(packtype&0x80)&&_info->frame_width>0&&_tc->vendor!=NULL&&*_setup!=NULL){
+ /*If we're at a data packet...*/
+ if(!(packtype&0x80)){
+ /*Check to make sure we received all three headers...
+ If we haven't seen any valid headers, assume this is not actually
+ Theora.*/
+ if(_info->frame_width<=0)return TH_ENOTFORMAT;
+ /*Follow our documentation, which says we'll return TH_EFAULT if this
+ are NULL (_info was checked by our caller).*/
+ if(_tc==NULL)return TH_EFAULT;
+ /*And if any other headers were missing, declare this packet "out of
+ sequence" instead.*/
+ if(_tc->vendor==NULL)return TH_EBADHEADER;
+ /*Don't check this until it's needed, since we allow passing NULL for the
+ arguments that we're not expecting the next header to fill in yet.*/
+ if(_setup==NULL)return TH_EFAULT;
+ if(*_setup==NULL)return TH_EBADHEADER;
+ /*If we got everything, we're done.*/
return 0;
}
/*Check the codec string.*/
diff --git a/thirdparty/libtheora/decint.h b/thirdparty/libtheora/decint.h
index 261b67631a..3cea6b1439 100644
--- a/thirdparty/libtheora/decint.h
+++ b/thirdparty/libtheora/decint.h
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: decint.h 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
@@ -19,15 +19,39 @@
#if !defined(_decint_H)
# define _decint_H (1)
# include "theora/theoradec.h"
-# include "internal.h"
+# include "state.h"
# include "bitpack.h"
-
-typedef struct th_setup_info oc_setup_info;
-typedef struct th_dec_ctx oc_dec_ctx;
-
# include "huffdec.h"
# include "dequant.h"
+typedef struct th_setup_info oc_setup_info;
+typedef struct oc_dec_opt_vtable oc_dec_opt_vtable;
+typedef struct oc_dec_pipeline_state oc_dec_pipeline_state;
+typedef struct th_dec_ctx oc_dec_ctx;
+
+
+
+/*Decoder-specific accelerated functions.*/
+# if defined(OC_C64X_ASM)
+# include "c64x/c64xdec.h"
+# endif
+
+# if !defined(oc_dec_accel_init)
+# define oc_dec_accel_init oc_dec_accel_init_c
+# endif
+# if defined(OC_DEC_USE_VTABLE)
+# if !defined(oc_dec_dc_unpredict_mcu_plane)
+# define oc_dec_dc_unpredict_mcu_plane(_dec,_pipe,_pli) \
+ ((*(_dec)->opt_vtable.dc_unpredict_mcu_plane)(_dec,_pipe,_pli))
+# endif
+# else
+# if !defined(oc_dec_dc_unpredict_mcu_plane)
+# define oc_dec_dc_unpredict_mcu_plane oc_dec_dc_unpredict_mcu_plane_c
+# endif
+# endif
+
+
+
/*Constants for the packet-in state machine specific to the decoder.*/
/*Next packet to read: Data packet.*/
@@ -37,71 +61,125 @@ typedef struct th_dec_ctx oc_dec_ctx;
struct th_setup_info{
/*The Huffman codes.*/
- oc_huff_node *huff_tables[TH_NHUFFMAN_TABLES];
+ ogg_int16_t *huff_tables[TH_NHUFFMAN_TABLES];
/*The quantization parameters.*/
th_quant_info qinfo;
};
+/*Decoder specific functions with accelerated variants.*/
+struct oc_dec_opt_vtable{
+ void (*dc_unpredict_mcu_plane)(oc_dec_ctx *_dec,
+ oc_dec_pipeline_state *_pipe,int _pli);
+};
+
+
+
+struct oc_dec_pipeline_state{
+ /*Decoded DCT coefficients.
+ These are placed here instead of on the stack so that they can persist
+ between blocks, which makes clearing them back to zero much faster when
+ only a few non-zero coefficients were decoded.
+ It requires at least 65 elements because the zig-zag index array uses the
+ 65th element as a dumping ground for out-of-range indices to protect us
+ from buffer overflow.
+ We make it fully twice as large so that the second half can serve as the
+ reconstruction buffer, which saves passing another parameter to all the
+ acceleration functios.
+ It also solves problems with 16-byte alignment for NEON on ARM.
+ gcc (as of 4.2.1) only seems to be able to give stack variables 8-byte
+ alignment, and silently produces incorrect results if you ask for 16.
+ Finally, keeping it off the stack means there's less likely to be a data
+ hazard beween the NEON co-processor and the regular ARM core, which avoids
+ unnecessary stalls.*/
+ OC_ALIGN16(ogg_int16_t dct_coeffs[128]);
+ OC_ALIGN16(signed char bounding_values[256]);
+ ptrdiff_t ti[3][64];
+ ptrdiff_t ebi[3][64];
+ ptrdiff_t eob_runs[3][64];
+ const ptrdiff_t *coded_fragis[3];
+ const ptrdiff_t *uncoded_fragis[3];
+ ptrdiff_t ncoded_fragis[3];
+ ptrdiff_t nuncoded_fragis[3];
+ const ogg_uint16_t *dequant[3][3][2];
+ int fragy0[3];
+ int fragy_end[3];
+ int pred_last[3][4];
+ int mcu_nvfrags;
+ int loop_filter;
+ int pp_level;
+};
+
+
struct th_dec_ctx{
/*Shared encoder/decoder state.*/
- oc_theora_state state;
+ oc_theora_state state;
/*Whether or not packets are ready to be emitted.
This takes on negative values while there are remaining header packets to
be emitted, reaches 0 when the codec is ready for input, and goes to 1
when a frame has been processed and a data packet is ready.*/
- int packet_state;
+ int packet_state;
/*Buffer in which to assemble packets.*/
- oc_pack_buf opb;
+ oc_pack_buf opb;
/*Huffman decode trees.*/
- oc_huff_node *huff_tables[TH_NHUFFMAN_TABLES];
+ ogg_int16_t *huff_tables[TH_NHUFFMAN_TABLES];
/*The index of the first token in each plane for each coefficient.*/
- ptrdiff_t ti0[3][64];
+ ptrdiff_t ti0[3][64];
/*The number of outstanding EOB runs at the start of each coefficient in each
plane.*/
- ptrdiff_t eob_runs[3][64];
+ ptrdiff_t eob_runs[3][64];
/*The DCT token lists.*/
- unsigned char *dct_tokens;
+ unsigned char *dct_tokens;
/*The extra bits associated with DCT tokens.*/
- unsigned char *extra_bits;
+ unsigned char *extra_bits;
/*The number of dct tokens unpacked so far.*/
- int dct_tokens_count;
+ int dct_tokens_count;
/*The out-of-loop post-processing level.*/
- int pp_level;
+ int pp_level;
/*The DC scale used for out-of-loop deblocking.*/
- int pp_dc_scale[64];
+ int pp_dc_scale[64];
/*The sharpen modifier used for out-of-loop deringing.*/
- int pp_sharp_mod[64];
+ int pp_sharp_mod[64];
/*The DC quantization index of each block.*/
- unsigned char *dc_qis;
+ unsigned char *dc_qis;
/*The variance of each block.*/
- int *variances;
+ int *variances;
/*The storage for the post-processed frame buffer.*/
- unsigned char *pp_frame_data;
+ unsigned char *pp_frame_data;
/*Whether or not the post-processsed frame buffer has space for chroma.*/
- int pp_frame_state;
+ int pp_frame_state;
/*The buffer used for the post-processed frame.
Note that this is _not_ guaranteed to have the same strides and offsets as
the reference frame buffers.*/
- th_ycbcr_buffer pp_frame_buf;
+ th_ycbcr_buffer pp_frame_buf;
/*The striped decode callback function.*/
- th_stripe_callback stripe_cb;
+ th_stripe_callback stripe_cb;
+ oc_dec_pipeline_state pipe;
+# if defined(OC_DEC_USE_VTABLE)
+ /*Table for decoder acceleration functions.*/
+ oc_dec_opt_vtable opt_vtable;
+# endif
# if defined(HAVE_CAIRO)
/*Output metrics for debugging.*/
- int telemetry;
- int telemetry_mbmode;
- int telemetry_mv;
- int telemetry_qi;
- int telemetry_bits;
- int telemetry_frame_bytes;
- int telemetry_coding_bytes;
- int telemetry_mode_bytes;
- int telemetry_mv_bytes;
- int telemetry_qi_bytes;
- int telemetry_dc_bytes;
- unsigned char *telemetry_frame_data;
+ int telemetry_mbmode;
+ int telemetry_mv;
+ int telemetry_qi;
+ int telemetry_bits;
+ int telemetry_frame_bytes;
+ int telemetry_coding_bytes;
+ int telemetry_mode_bytes;
+ int telemetry_mv_bytes;
+ int telemetry_qi_bytes;
+ int telemetry_dc_bytes;
+ unsigned char *telemetry_frame_data;
# endif
};
+/*Default pure-C implementations of decoder-specific accelerated functions.*/
+void oc_dec_accel_init_c(oc_dec_ctx *_dec);
+
+void oc_dec_dc_unpredict_mcu_plane_c(oc_dec_ctx *_dec,
+ oc_dec_pipeline_state *_pipe,int _pli);
+
#endif
diff --git a/thirdparty/libtheora/decode.c b/thirdparty/libtheora/decode.c
index bde967b794..fad26e0927 100644
--- a/thirdparty/libtheora/decode.c
+++ b/thirdparty/libtheora/decode.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: decode.c 16581 2009-09-25 22:56:16Z gmaxwell $
+ last mod: $Id$
********************************************************************/
@@ -118,7 +118,7 @@ static const unsigned char OC_INTERNAL_DCT_TOKEN_EXTRA_BITS[15]={
/*Whether or not an internal token needs any additional extra bits.*/
#define OC_DCT_TOKEN_NEEDS_MORE(token) \
- (token<(sizeof(OC_INTERNAL_DCT_TOKEN_EXTRA_BITS)/ \
+ (token<(int)(sizeof(OC_INTERNAL_DCT_TOKEN_EXTRA_BITS)/ \
sizeof(*OC_INTERNAL_DCT_TOKEN_EXTRA_BITS)))
/*This token (OC_DCT_REPEAT_RUN3_TOKEN) requires more than 8 extra bits.*/
@@ -129,7 +129,7 @@ static const unsigned char OC_INTERNAL_DCT_TOKEN_EXTRA_BITS[15]={
is not yet available everywhere; this should be equivalent.*/
#define OC_DCT_EOB_FINISH (~(size_t)0>>1)
-/*The location of the (6) run legth bits in the code word.
+/*The location of the (6) run length bits in the code word.
These are placed at index 0 and given 8 bits (even though 6 would suffice)
because it may be faster to extract the lower byte on some platforms.*/
#define OC_DCT_CW_RLEN_SHIFT (0)
@@ -297,8 +297,6 @@ static const ogg_int32_t OC_DCT_CODE_WORD[92]={
static int oc_sb_run_unpack(oc_pack_buf *_opb){
- long bits;
- int ret;
/*Coding scheme:
Codeword Run Length
0 1
@@ -308,32 +306,26 @@ static int oc_sb_run_unpack(oc_pack_buf *_opb){
11110xxx 10-17
111110xxxx 18-33
111111xxxxxxxxxxxx 34-4129*/
- bits=oc_pack_read1(_opb);
- if(bits==0)return 1;
- bits=oc_pack_read(_opb,2);
- if((bits&2)==0)return 2+(int)bits;
- else if((bits&1)==0){
- bits=oc_pack_read1(_opb);
- return 4+(int)bits;
- }
- bits=oc_pack_read(_opb,3);
- if((bits&4)==0)return 6+(int)bits;
- else if((bits&2)==0){
- ret=10+((bits&1)<<2);
- bits=oc_pack_read(_opb,2);
- return ret+(int)bits;
- }
- else if((bits&1)==0){
- bits=oc_pack_read(_opb,4);
- return 18+(int)bits;
+ static const ogg_int16_t OC_SB_RUN_TREE[22]={
+ 4,
+ -(1<<8|1),-(1<<8|1),-(1<<8|1),-(1<<8|1),
+ -(1<<8|1),-(1<<8|1),-(1<<8|1),-(1<<8|1),
+ -(3<<8|2),-(3<<8|2),-(3<<8|3),-(3<<8|3),
+ -(4<<8|4),-(4<<8|5),-(4<<8|2<<4|6-6),17,
+ 2,
+ -(2<<8|2<<4|10-6),-(2<<8|2<<4|14-6),-(2<<8|4<<4|18-6),-(2<<8|12<<4|34-6)
+ };
+ int ret;
+ ret=oc_huff_token_decode(_opb,OC_SB_RUN_TREE);
+ if(ret>=0x10){
+ int offs;
+ offs=ret&0x1F;
+ ret=6+offs+(int)oc_pack_read(_opb,ret-offs>>4);
}
- bits=oc_pack_read(_opb,12);
- return 34+(int)bits;
+ return ret;
}
static int oc_block_run_unpack(oc_pack_buf *_opb){
- long bits;
- long bits2;
/*Coding scheme:
Codeword Run Length
0x 1-2
@@ -342,26 +334,37 @@ static int oc_block_run_unpack(oc_pack_buf *_opb){
1110xx 7-10
11110xx 11-14
11111xxxx 15-30*/
- bits=oc_pack_read(_opb,2);
- if((bits&2)==0)return 1+(int)bits;
- else if((bits&1)==0){
- bits=oc_pack_read1(_opb);
- return 3+(int)bits;
- }
- bits=oc_pack_read(_opb,2);
- if((bits&2)==0)return 5+(int)bits;
- else if((bits&1)==0){
- bits=oc_pack_read(_opb,2);
- return 7+(int)bits;
- }
- bits=oc_pack_read(_opb,3);
- if((bits&4)==0)return 11+bits;
- bits2=oc_pack_read(_opb,2);
- return 15+((bits&3)<<2)+bits2;
+ static const ogg_int16_t OC_BLOCK_RUN_TREE[61]={
+ 5,
+ -(2<<8|1),-(2<<8|1),-(2<<8|1),-(2<<8|1),
+ -(2<<8|1),-(2<<8|1),-(2<<8|1),-(2<<8|1),
+ -(2<<8|2),-(2<<8|2),-(2<<8|2),-(2<<8|2),
+ -(2<<8|2),-(2<<8|2),-(2<<8|2),-(2<<8|2),
+ -(3<<8|3),-(3<<8|3),-(3<<8|3),-(3<<8|3),
+ -(3<<8|4),-(3<<8|4),-(3<<8|4),-(3<<8|4),
+ -(4<<8|5),-(4<<8|5),-(4<<8|6),-(4<<8|6),
+ 33, 36, 39, 44,
+ 1,-(1<<8|7),-(1<<8|8),
+ 1,-(1<<8|9),-(1<<8|10),
+ 2,-(2<<8|11),-(2<<8|12),-(2<<8|13),-(2<<8|14),
+ 4,
+ -(4<<8|15),-(4<<8|16),-(4<<8|17),-(4<<8|18),
+ -(4<<8|19),-(4<<8|20),-(4<<8|21),-(4<<8|22),
+ -(4<<8|23),-(4<<8|24),-(4<<8|25),-(4<<8|26),
+ -(4<<8|27),-(4<<8|28),-(4<<8|29),-(4<<8|30)
+ };
+ return oc_huff_token_decode(_opb,OC_BLOCK_RUN_TREE);
}
+void oc_dec_accel_init_c(oc_dec_ctx *_dec){
+# if defined(OC_DEC_USE_VTABLE)
+ _dec->opt_vtable.dc_unpredict_mcu_plane=
+ oc_dec_dc_unpredict_mcu_plane_c;
+# endif
+}
+
static int oc_dec_init(oc_dec_ctx *_dec,const th_info *_info,
const th_setup_info *_setup){
int qti;
@@ -371,7 +374,7 @@ static int oc_dec_init(oc_dec_ctx *_dec,const th_info *_info,
ret=oc_state_init(&_dec->state,_info,3);
if(ret<0)return ret;
ret=oc_huff_trees_copy(_dec->huff_tables,
- (const oc_huff_node *const *)_setup->huff_tables);
+ (const ogg_int16_t *const *)_setup->huff_tables);
if(ret<0){
oc_state_clear(&_dec->state);
return ret;
@@ -406,6 +409,7 @@ static int oc_dec_init(oc_dec_ctx *_dec,const th_info *_info,
}
memcpy(_dec->state.loop_filter_limits,_setup->qinfo.loop_filter_limits,
sizeof(_dec->state.loop_filter_limits));
+ oc_dec_accel_init(_dec);
_dec->pp_level=OC_PP_LEVEL_DISABLED;
_dec->dc_qis=NULL;
_dec->variances=NULL;
@@ -413,7 +417,6 @@ static int oc_dec_init(oc_dec_ctx *_dec,const th_info *_info,
_dec->stripe_cb.ctx=NULL;
_dec->stripe_cb.stripe_decoded=NULL;
#if defined(HAVE_CAIRO)
- _dec->telemetry=0;
_dec->telemetry_bits=0;
_dec->telemetry_qi=0;
_dec->telemetry_mbmode=0;
@@ -504,6 +507,7 @@ static void oc_dec_mark_all_intra(oc_dec_ctx *_dec){
fragi=sb_maps[sbi][quadi][bi];
if(fragi>=0){
frags[fragi].coded=1;
+ frags[fragi].refi=OC_FRAME_SELF;
frags[fragi].mb_mode=OC_MODE_INTRA;
coded_fragis[ncoded_fragis++]=fragi;
}
@@ -595,6 +599,7 @@ static void oc_dec_coded_sb_flags_unpack(oc_dec_ctx *_dec){
static void oc_dec_coded_flags_unpack(oc_dec_ctx *_dec){
const oc_sb_map *sb_maps;
const oc_sb_flags *sb_flags;
+ signed char *mb_modes;
oc_fragment *frags;
unsigned nsbs;
unsigned sbi;
@@ -617,6 +622,7 @@ static void oc_dec_coded_flags_unpack(oc_dec_ctx *_dec){
else flag=0;
sb_maps=(const oc_sb_map *)_dec->state.sb_maps;
sb_flags=_dec->state.sb_flags;
+ mb_modes=_dec->state.mb_modes;
frags=_dec->state.frags;
sbi=nsbs=run_count=0;
coded_fragis=_dec->state.coded_fragis;
@@ -627,7 +633,9 @@ static void oc_dec_coded_flags_unpack(oc_dec_ctx *_dec){
for(;sbi<nsbs;sbi++){
int quadi;
for(quadi=0;quadi<4;quadi++)if(sb_flags[sbi].quad_valid&1<<quadi){
+ int quad_coded;
int bi;
+ quad_coded=0;
for(bi=0;bi<4;bi++){
ptrdiff_t fragi;
fragi=sb_maps[sbi][quadi][bi];
@@ -645,9 +653,13 @@ static void oc_dec_coded_flags_unpack(oc_dec_ctx *_dec){
}
if(coded)coded_fragis[ncoded_fragis++]=fragi;
else *(uncoded_fragis-++nuncoded_fragis)=fragi;
+ quad_coded|=coded;
frags[fragi].coded=coded;
+ frags[fragi].refi=OC_FRAME_NONE;
}
}
+ /*Remember if there's a coded luma block in this macro block.*/
+ if(!pli)mb_modes[sbi<<2|quadi]=quad_coded;
}
}
_dec->state.ncoded_fragis[pli]=ncoded_fragis-prev_ncoded_fragis;
@@ -659,33 +671,39 @@ static void oc_dec_coded_flags_unpack(oc_dec_ctx *_dec){
}
+/*Coding scheme:
+ Codeword Mode Index
+ 0 0
+ 10 1
+ 110 2
+ 1110 3
+ 11110 4
+ 111110 5
+ 1111110 6
+ 1111111 7*/
+static const ogg_int16_t OC_VLC_MODE_TREE[26]={
+ 4,
+ -(1<<8|0),-(1<<8|0),-(1<<8|0),-(1<<8|0),
+ -(1<<8|0),-(1<<8|0),-(1<<8|0),-(1<<8|0),
+ -(2<<8|1),-(2<<8|1),-(2<<8|1),-(2<<8|1),
+ -(3<<8|2),-(3<<8|2),-(4<<8|3),17,
+ 3,
+ -(1<<8|4),-(1<<8|4),-(1<<8|4),-(1<<8|4),
+ -(2<<8|5),-(2<<8|5),-(3<<8|6),-(3<<8|7)
+};
-typedef int (*oc_mode_unpack_func)(oc_pack_buf *_opb);
-
-static int oc_vlc_mode_unpack(oc_pack_buf *_opb){
- long val;
- int i;
- for(i=0;i<7;i++){
- val=oc_pack_read1(_opb);
- if(!val)break;
- }
- return i;
-}
-
-static int oc_clc_mode_unpack(oc_pack_buf *_opb){
- long val;
- val=oc_pack_read(_opb,3);
- return (int)val;
-}
+static const ogg_int16_t OC_CLC_MODE_TREE[9]={
+ 3,
+ -(3<<8|0),-(3<<8|1),-(3<<8|2),-(3<<8|3),
+ -(3<<8|4),-(3<<8|5),-(3<<8|6),-(3<<8|7)
+};
/*Unpacks the list of macro block modes for INTER frames.*/
static void oc_dec_mb_modes_unpack(oc_dec_ctx *_dec){
- const oc_mb_map *mb_maps;
signed char *mb_modes;
- const oc_fragment *frags;
const unsigned char *alphabet;
unsigned char scheme0_alphabet[8];
- oc_mode_unpack_func mode_unpack;
+ const ogg_int16_t *mode_tree;
size_t nmbs;
size_t mbi;
long val;
@@ -707,65 +725,80 @@ static void oc_dec_mb_modes_unpack(oc_dec_ctx *_dec){
alphabet=scheme0_alphabet;
}
else alphabet=OC_MODE_ALPHABETS[mode_scheme-1];
- if(mode_scheme==7)mode_unpack=oc_clc_mode_unpack;
- else mode_unpack=oc_vlc_mode_unpack;
+ mode_tree=mode_scheme==7?OC_CLC_MODE_TREE:OC_VLC_MODE_TREE;
mb_modes=_dec->state.mb_modes;
- mb_maps=(const oc_mb_map *)_dec->state.mb_maps;
nmbs=_dec->state.nmbs;
- frags=_dec->state.frags;
for(mbi=0;mbi<nmbs;mbi++){
- if(mb_modes[mbi]!=OC_MODE_INVALID){
- int bi;
- /*Check for a coded luma block in this macro block.*/
- for(bi=0;bi<4&&!frags[mb_maps[mbi][0][bi]].coded;bi++);
- /*We found one, decode a mode.*/
- if(bi<4)mb_modes[mbi]=alphabet[(*mode_unpack)(&_dec->opb)];
- /*There were none: INTER_NOMV is forced.*/
- else mb_modes[mbi]=OC_MODE_INTER_NOMV;
+ if(mb_modes[mbi]>0){
+ /*We have a coded luma block; decode a mode.*/
+ mb_modes[mbi]=alphabet[oc_huff_token_decode(&_dec->opb,mode_tree)];
}
+ /*For other valid macro blocks, INTER_NOMV is forced, but we rely on the
+ fact that OC_MODE_INTER_NOMV is already 0.*/
}
}
-typedef int (*oc_mv_comp_unpack_func)(oc_pack_buf *_opb);
+static const ogg_int16_t OC_VLC_MV_COMP_TREE[101]={
+ 5,
+ -(3<<8|32+0),-(3<<8|32+0),-(3<<8|32+0),-(3<<8|32+0),
+ -(3<<8|32+1),-(3<<8|32+1),-(3<<8|32+1),-(3<<8|32+1),
+ -(3<<8|32-1),-(3<<8|32-1),-(3<<8|32-1),-(3<<8|32-1),
+ -(4<<8|32+2),-(4<<8|32+2),-(4<<8|32-2),-(4<<8|32-2),
+ -(4<<8|32+3),-(4<<8|32+3),-(4<<8|32-3),-(4<<8|32-3),
+ 33, 36, 39, 42,
+ 45, 50, 55, 60,
+ 65, 74, 83, 92,
+ 1,-(1<<8|32+4),-(1<<8|32-4),
+ 1,-(1<<8|32+5),-(1<<8|32-5),
+ 1,-(1<<8|32+6),-(1<<8|32-6),
+ 1,-(1<<8|32+7),-(1<<8|32-7),
+ 2,-(2<<8|32+8),-(2<<8|32-8),-(2<<8|32+9),-(2<<8|32-9),
+ 2,-(2<<8|32+10),-(2<<8|32-10),-(2<<8|32+11),-(2<<8|32-11),
+ 2,-(2<<8|32+12),-(2<<8|32-12),-(2<<8|32+13),-(2<<8|32-13),
+ 2,-(2<<8|32+14),-(2<<8|32-14),-(2<<8|32+15),-(2<<8|32-15),
+ 3,
+ -(3<<8|32+16),-(3<<8|32-16),-(3<<8|32+17),-(3<<8|32-17),
+ -(3<<8|32+18),-(3<<8|32-18),-(3<<8|32+19),-(3<<8|32-19),
+ 3,
+ -(3<<8|32+20),-(3<<8|32-20),-(3<<8|32+21),-(3<<8|32-21),
+ -(3<<8|32+22),-(3<<8|32-22),-(3<<8|32+23),-(3<<8|32-23),
+ 3,
+ -(3<<8|32+24),-(3<<8|32-24),-(3<<8|32+25),-(3<<8|32-25),
+ -(3<<8|32+26),-(3<<8|32-26),-(3<<8|32+27),-(3<<8|32-27),
+ 3,
+ -(3<<8|32+28),-(3<<8|32-28),-(3<<8|32+29),-(3<<8|32-29),
+ -(3<<8|32+30),-(3<<8|32-30),-(3<<8|32+31),-(3<<8|32-31)
+};
+
+static const ogg_int16_t OC_CLC_MV_COMP_TREE[65]={
+ 6,
+ -(6<<8|32 +0),-(6<<8|32 -0),-(6<<8|32 +1),-(6<<8|32 -1),
+ -(6<<8|32 +2),-(6<<8|32 -2),-(6<<8|32 +3),-(6<<8|32 -3),
+ -(6<<8|32 +4),-(6<<8|32 -4),-(6<<8|32 +5),-(6<<8|32 -5),
+ -(6<<8|32 +6),-(6<<8|32 -6),-(6<<8|32 +7),-(6<<8|32 -7),
+ -(6<<8|32 +8),-(6<<8|32 -8),-(6<<8|32 +9),-(6<<8|32 -9),
+ -(6<<8|32+10),-(6<<8|32-10),-(6<<8|32+11),-(6<<8|32-11),
+ -(6<<8|32+12),-(6<<8|32-12),-(6<<8|32+13),-(6<<8|32-13),
+ -(6<<8|32+14),-(6<<8|32-14),-(6<<8|32+15),-(6<<8|32-15),
+ -(6<<8|32+16),-(6<<8|32-16),-(6<<8|32+17),-(6<<8|32-17),
+ -(6<<8|32+18),-(6<<8|32-18),-(6<<8|32+19),-(6<<8|32-19),
+ -(6<<8|32+20),-(6<<8|32-20),-(6<<8|32+21),-(6<<8|32-21),
+ -(6<<8|32+22),-(6<<8|32-22),-(6<<8|32+23),-(6<<8|32-23),
+ -(6<<8|32+24),-(6<<8|32-24),-(6<<8|32+25),-(6<<8|32-25),
+ -(6<<8|32+26),-(6<<8|32-26),-(6<<8|32+27),-(6<<8|32-27),
+ -(6<<8|32+28),-(6<<8|32-28),-(6<<8|32+29),-(6<<8|32-29),
+ -(6<<8|32+30),-(6<<8|32-30),-(6<<8|32+31),-(6<<8|32-31)
+};
-static int oc_vlc_mv_comp_unpack(oc_pack_buf *_opb){
- long bits;
- int mask;
- int mv;
- bits=oc_pack_read(_opb,3);
- switch(bits){
- case 0:return 0;
- case 1:return 1;
- case 2:return -1;
- case 3:
- case 4:{
- mv=(int)(bits-1);
- bits=oc_pack_read1(_opb);
- }break;
- /*case 5:
- case 6:
- case 7:*/
- default:{
- mv=1<<bits-3;
- bits=oc_pack_read(_opb,bits-2);
- mv+=(int)(bits>>1);
- bits&=1;
- }break;
- }
- mask=-(int)bits;
- return mv+mask^mask;
-}
-static int oc_clc_mv_comp_unpack(oc_pack_buf *_opb){
- long bits;
- int mask;
- int mv;
- bits=oc_pack_read(_opb,6);
- mv=(int)bits>>1;
- mask=-((int)bits&1);
- return mv+mask^mask;
+static oc_mv oc_mv_unpack(oc_pack_buf *_opb,const ogg_int16_t *_tree){
+ int dx;
+ int dy;
+ dx=oc_huff_token_decode(_opb,_tree)-32;
+ dy=oc_huff_token_decode(_opb,_tree)-32;
+ return OC_MV(dx,dy);
}
/*Unpacks the list of motion vectors for INTER frames, and propagtes the macro
@@ -774,105 +807,93 @@ static void oc_dec_mv_unpack_and_frag_modes_fill(oc_dec_ctx *_dec){
const oc_mb_map *mb_maps;
const signed char *mb_modes;
oc_set_chroma_mvs_func set_chroma_mvs;
- oc_mv_comp_unpack_func mv_comp_unpack;
+ const ogg_int16_t *mv_comp_tree;
oc_fragment *frags;
oc_mv *frag_mvs;
const unsigned char *map_idxs;
int map_nidxs;
- oc_mv last_mv[2];
+ oc_mv last_mv;
+ oc_mv prior_mv;
oc_mv cbmvs[4];
size_t nmbs;
size_t mbi;
long val;
set_chroma_mvs=OC_SET_CHROMA_MVS_TABLE[_dec->state.info.pixel_fmt];
val=oc_pack_read1(&_dec->opb);
- mv_comp_unpack=val?oc_clc_mv_comp_unpack:oc_vlc_mv_comp_unpack;
+ mv_comp_tree=val?OC_CLC_MV_COMP_TREE:OC_VLC_MV_COMP_TREE;
map_idxs=OC_MB_MAP_IDXS[_dec->state.info.pixel_fmt];
map_nidxs=OC_MB_MAP_NIDXS[_dec->state.info.pixel_fmt];
- memset(last_mv,0,sizeof(last_mv));
+ prior_mv=last_mv=0;
frags=_dec->state.frags;
frag_mvs=_dec->state.frag_mvs;
mb_maps=(const oc_mb_map *)_dec->state.mb_maps;
mb_modes=_dec->state.mb_modes;
nmbs=_dec->state.nmbs;
for(mbi=0;mbi<nmbs;mbi++){
- int mb_mode;
+ int mb_mode;
mb_mode=mb_modes[mbi];
if(mb_mode!=OC_MODE_INVALID){
- oc_mv mbmv;
- ptrdiff_t fragi;
- int coded[13];
- int codedi;
- int ncoded;
- int mapi;
- int mapii;
- /*Search for at least one coded fragment.*/
- ncoded=mapii=0;
- do{
- mapi=map_idxs[mapii];
- fragi=mb_maps[mbi][mapi>>2][mapi&3];
- if(frags[fragi].coded)coded[ncoded++]=mapi;
- }
- while(++mapii<map_nidxs);
- if(ncoded<=0)continue;
- switch(mb_mode){
- case OC_MODE_INTER_MV_FOUR:{
- oc_mv lbmvs[4];
- int bi;
- /*Mark the tail of the list, so we don't accidentally go past it.*/
- coded[ncoded]=-1;
- for(bi=codedi=0;bi<4;bi++){
- if(coded[codedi]==bi){
- codedi++;
- fragi=mb_maps[mbi][0][bi];
- frags[fragi].mb_mode=mb_mode;
- lbmvs[bi][0]=(signed char)(*mv_comp_unpack)(&_dec->opb);
- lbmvs[bi][1]=(signed char)(*mv_comp_unpack)(&_dec->opb);
- memcpy(frag_mvs[fragi],lbmvs[bi],sizeof(lbmvs[bi]));
- }
- else lbmvs[bi][0]=lbmvs[bi][1]=0;
- }
- if(codedi>0){
- memcpy(last_mv[1],last_mv[0],sizeof(last_mv[1]));
- memcpy(last_mv[0],lbmvs[coded[codedi-1]],sizeof(last_mv[0]));
+ oc_mv mbmv;
+ ptrdiff_t fragi;
+ int mapi;
+ int mapii;
+ int refi;
+ if(mb_mode==OC_MODE_INTER_MV_FOUR){
+ oc_mv lbmvs[4];
+ int bi;
+ prior_mv=last_mv;
+ for(bi=0;bi<4;bi++){
+ fragi=mb_maps[mbi][0][bi];
+ if(frags[fragi].coded){
+ frags[fragi].refi=OC_FRAME_PREV;
+ frags[fragi].mb_mode=OC_MODE_INTER_MV_FOUR;
+ lbmvs[bi]=last_mv=oc_mv_unpack(&_dec->opb,mv_comp_tree);
+ frag_mvs[fragi]=lbmvs[bi];
}
- if(codedi<ncoded){
- (*set_chroma_mvs)(cbmvs,(const oc_mv *)lbmvs);
- for(;codedi<ncoded;codedi++){
- mapi=coded[codedi];
- bi=mapi&3;
- fragi=mb_maps[mbi][mapi>>2][bi];
- frags[fragi].mb_mode=mb_mode;
- memcpy(frag_mvs[fragi],cbmvs[bi],sizeof(cbmvs[bi]));
- }
+ else lbmvs[bi]=0;
+ }
+ (*set_chroma_mvs)(cbmvs,lbmvs);
+ for(mapii=4;mapii<map_nidxs;mapii++){
+ mapi=map_idxs[mapii];
+ bi=mapi&3;
+ fragi=mb_maps[mbi][mapi>>2][bi];
+ if(frags[fragi].coded){
+ frags[fragi].refi=OC_FRAME_PREV;
+ frags[fragi].mb_mode=OC_MODE_INTER_MV_FOUR;
+ frag_mvs[fragi]=cbmvs[bi];
}
- }break;
- case OC_MODE_INTER_MV:{
- memcpy(last_mv[1],last_mv[0],sizeof(last_mv[1]));
- mbmv[0]=last_mv[0][0]=(signed char)(*mv_comp_unpack)(&_dec->opb);
- mbmv[1]=last_mv[0][1]=(signed char)(*mv_comp_unpack)(&_dec->opb);
- }break;
- case OC_MODE_INTER_MV_LAST:memcpy(mbmv,last_mv[0],sizeof(mbmv));break;
- case OC_MODE_INTER_MV_LAST2:{
- memcpy(mbmv,last_mv[1],sizeof(mbmv));
- memcpy(last_mv[1],last_mv[0],sizeof(last_mv[1]));
- memcpy(last_mv[0],mbmv,sizeof(last_mv[0]));
- }break;
- case OC_MODE_GOLDEN_MV:{
- mbmv[0]=(signed char)(*mv_comp_unpack)(&_dec->opb);
- mbmv[1]=(signed char)(*mv_comp_unpack)(&_dec->opb);
- }break;
- default:memset(mbmv,0,sizeof(mbmv));break;
+ }
}
- /*4MV mode fills in the fragments itself.
- For all other modes we can use this common code.*/
- if(mb_mode!=OC_MODE_INTER_MV_FOUR){
- for(codedi=0;codedi<ncoded;codedi++){
- mapi=coded[codedi];
+ else{
+ switch(mb_mode){
+ case OC_MODE_INTER_MV:{
+ prior_mv=last_mv;
+ last_mv=mbmv=oc_mv_unpack(&_dec->opb,mv_comp_tree);
+ }break;
+ case OC_MODE_INTER_MV_LAST:mbmv=last_mv;break;
+ case OC_MODE_INTER_MV_LAST2:{
+ mbmv=prior_mv;
+ prior_mv=last_mv;
+ last_mv=mbmv;
+ }break;
+ case OC_MODE_GOLDEN_MV:{
+ mbmv=oc_mv_unpack(&_dec->opb,mv_comp_tree);
+ }break;
+ default:mbmv=0;break;
+ }
+ /*Fill in the MVs for the fragments.*/
+ refi=OC_FRAME_FOR_MODE(mb_mode);
+ mapii=0;
+ do{
+ mapi=map_idxs[mapii];
fragi=mb_maps[mbi][mapi>>2][mapi&3];
- frags[fragi].mb_mode=mb_mode;
- memcpy(frag_mvs[fragi],mbmv,sizeof(mbmv));
+ if(frags[fragi].coded){
+ frags[fragi].refi=refi;
+ frags[fragi].mb_mode=mb_mode;
+ frag_mvs[fragi]=mbmv;
+ }
}
+ while(++mapii<map_nidxs);
}
}
}
@@ -1181,6 +1202,9 @@ static void oc_dec_residual_tokens_unpack(oc_dec_ctx *_dec){
static int oc_dec_postprocess_init(oc_dec_ctx *_dec){
+ /*musl libc malloc()/realloc() calls might use floating point, so make sure
+ we've cleared the MMX state for them.*/
+ oc_restore_fpu(&_dec->state);
/*pp_level 0: disabled; free any memory used and return*/
if(_dec->pp_level<=OC_PP_LEVEL_DISABLED){
if(_dec->dc_qis!=NULL){
@@ -1301,34 +1325,16 @@ static int oc_dec_postprocess_init(oc_dec_ctx *_dec){
}
-
-typedef struct{
- int bounding_values[256];
- ptrdiff_t ti[3][64];
- ptrdiff_t eob_runs[3][64];
- const ptrdiff_t *coded_fragis[3];
- const ptrdiff_t *uncoded_fragis[3];
- ptrdiff_t ncoded_fragis[3];
- ptrdiff_t nuncoded_fragis[3];
- const ogg_uint16_t *dequant[3][3][2];
- int fragy0[3];
- int fragy_end[3];
- int pred_last[3][3];
- int mcu_nvfrags;
- int loop_filter;
- int pp_level;
-}oc_dec_pipeline_state;
-
-
-
/*Initialize the main decoding pipeline.*/
static void oc_dec_pipeline_init(oc_dec_ctx *_dec,
oc_dec_pipeline_state *_pipe){
const ptrdiff_t *coded_fragis;
const ptrdiff_t *uncoded_fragis;
+ int flimit;
int pli;
int qii;
int qti;
+ int zzi;
/*If chroma is sub-sampled in the vertical direction, we have to decode two
super block rows of Y' for each super block row of Cb and Cr.*/
_pipe->mcu_nvfrags=4<<!(_dec->state.info.pixel_fmt&2);
@@ -1360,8 +1366,9 @@ static void oc_dec_pipeline_init(oc_dec_ctx *_dec,
/*Set the previous DC predictor to 0 for all color planes and frame types.*/
memset(_pipe->pred_last,0,sizeof(_pipe->pred_last));
/*Initialize the bounding value array for the loop filter.*/
- _pipe->loop_filter=!oc_state_loop_filter_init(&_dec->state,
- _pipe->bounding_values);
+ flimit=_dec->state.loop_filter_limits[_dec->state.qis[0]];
+ _pipe->loop_filter=flimit!=0;
+ if(flimit!=0)oc_loop_filter_init(&_dec->state,_pipe->bounding_values,flimit);
/*Initialize any buffers needed for post-processing.
We also save the current post-processing level, to guard against the user
changing it from a callback.*/
@@ -1374,13 +1381,15 @@ static void oc_dec_pipeline_init(oc_dec_ctx *_dec,
_dec->state.ref_frame_bufs[_dec->state.ref_frame_idx[OC_FRAME_SELF]],
sizeof(_dec->pp_frame_buf[0])*3);
}
+ /*Clear down the DCT coefficient buffer for the first block.*/
+ for(zzi=0;zzi<64;zzi++)_pipe->dct_coeffs[zzi]=0;
}
/*Undo the DC prediction in a single plane of an MCU (one or two super block
rows).
As a side effect, the number of coded and uncoded fragments in this plane of
the MCU is also computed.*/
-static void oc_dec_dc_unpredict_mcu_plane(oc_dec_ctx *_dec,
+void oc_dec_dc_unpredict_mcu_plane_c(oc_dec_ctx *_dec,
oc_dec_pipeline_state *_pipe,int _pli){
const oc_fragment_plane *fplane;
oc_fragment *frags;
@@ -1408,9 +1417,9 @@ static void oc_dec_dc_unpredict_mcu_plane(oc_dec_ctx *_dec,
predictor for the same reference frame.*/
for(fragx=0;fragx<nhfrags;fragx++,fragi++){
if(frags[fragi].coded){
- int ref;
- ref=OC_FRAME_FOR_MODE(frags[fragi].mb_mode);
- pred_last[ref]=frags[fragi].dc+=pred_last[ref];
+ int refi;
+ refi=frags[fragi].refi;
+ pred_last[refi]=frags[fragi].dc+=pred_last[refi];
ncoded_fragis++;
}
}
@@ -1423,27 +1432,24 @@ static void oc_dec_dc_unpredict_mcu_plane(oc_dec_ctx *_dec,
u_frags=frags-nhfrags;
l_ref=-1;
ul_ref=-1;
- u_ref=u_frags[fragi].coded?OC_FRAME_FOR_MODE(u_frags[fragi].mb_mode):-1;
+ u_ref=u_frags[fragi].refi;
for(fragx=0;fragx<nhfrags;fragx++,fragi++){
int ur_ref;
if(fragx+1>=nhfrags)ur_ref=-1;
- else{
- ur_ref=u_frags[fragi+1].coded?
- OC_FRAME_FOR_MODE(u_frags[fragi+1].mb_mode):-1;
- }
+ else ur_ref=u_frags[fragi+1].refi;
if(frags[fragi].coded){
int pred;
- int ref;
- ref=OC_FRAME_FOR_MODE(frags[fragi].mb_mode);
+ int refi;
+ refi=frags[fragi].refi;
/*We break out a separate case based on which of our neighbors use
the same reference frames.
This is somewhat faster than trying to make a generic case which
handles all of them, since it reduces lots of poorly predicted
jumps to one switch statement, and also lets a number of the
multiplications be optimized out by strength reduction.*/
- switch((l_ref==ref)|(ul_ref==ref)<<1|
- (u_ref==ref)<<2|(ur_ref==ref)<<3){
- default:pred=pred_last[ref];break;
+ switch((l_ref==refi)|(ul_ref==refi)<<1|
+ (u_ref==refi)<<2|(ur_ref==refi)<<3){
+ default:pred=pred_last[refi];break;
case 1:
case 3:pred=frags[fragi-1].dc;break;
case 2:pred=u_frags[fragi-1].dc;break;
@@ -1455,6 +1461,7 @@ static void oc_dec_dc_unpredict_mcu_plane(oc_dec_ctx *_dec,
case 9:
case 11:
case 13:{
+ /*The TI compiler mis-compiles this line.*/
pred=(75*frags[fragi-1].dc+53*u_frags[fragi+1].dc)/128;
}break;
case 10:pred=(u_frags[fragi-1].dc+u_frags[fragi+1].dc)/2;break;
@@ -1476,9 +1483,9 @@ static void oc_dec_dc_unpredict_mcu_plane(oc_dec_ctx *_dec,
else if(abs(pred-p1)>128)pred=p1;
}break;
}
- pred_last[ref]=frags[fragi].dc+=pred;
+ pred_last[refi]=frags[fragi].dc+=pred;
ncoded_fragis++;
- l_ref=ref;
+ l_ref=refi;
}
else l_ref=-1;
ul_ref=u_ref;
@@ -1495,7 +1502,7 @@ static void oc_dec_dc_unpredict_mcu_plane(oc_dec_ctx *_dec,
/*Reconstructs all coded fragments in a single MCU (one or two super block
rows).
This requires that each coded fragment have a proper macro block mode and
- motion vector (if not in INTRA mode), and have it's DC value decoded, with
+ motion vector (if not in INTRA mode), and have its DC value decoded, with
the DC prediction process reversed, and the number of coded and uncoded
fragments in this plane of the MCU be counted.
The token lists for each color plane and coefficient should also be filled
@@ -1522,16 +1529,11 @@ static void oc_dec_frags_recon_mcu_plane(oc_dec_ctx *_dec,
eob_runs=_pipe->eob_runs[_pli];
for(qti=0;qti<2;qti++)dc_quant[qti]=_pipe->dequant[_pli][0][qti][0];
for(fragii=0;fragii<ncoded_fragis;fragii++){
- /*This array is made one element larger because the zig-zag index array
- uses the final element as a dumping ground for out-of-range indices
- to protect us from buffer overflow.*/
- OC_ALIGN8(ogg_int16_t dct_coeffs[65]);
const ogg_uint16_t *ac_quant;
ptrdiff_t fragi;
int last_zzi;
int zzi;
fragi=coded_fragis[fragii];
- for(zzi=0;zzi<64;zzi++)dct_coeffs[zzi]=0;
qti=frags[fragi].mb_mode!=OC_MODE_INTRA;
ac_quant=_pipe->dequant[_pli][frags[fragi].qii][qti];
/*Decode the AC coefficients.*/
@@ -1568,18 +1570,19 @@ static void oc_dec_frags_recon_mcu_plane(oc_dec_ctx *_dec,
eob_runs[zzi]=eob;
ti[zzi]=lti;
zzi+=rlen;
- dct_coeffs[dct_fzig_zag[zzi]]=(ogg_int16_t)(coeff*(int)ac_quant[zzi]);
+ _pipe->dct_coeffs[dct_fzig_zag[zzi]]=
+ (ogg_int16_t)(coeff*(int)ac_quant[zzi]);
zzi+=!eob;
}
}
/*TODO: zzi should be exactly 64 here.
If it's not, we should report some kind of warning.*/
zzi=OC_MINI(zzi,64);
- dct_coeffs[0]=(ogg_int16_t)frags[fragi].dc;
+ _pipe->dct_coeffs[0]=(ogg_int16_t)frags[fragi].dc;
/*last_zzi is always initialized.
If your compiler thinks otherwise, it is dumb.*/
oc_state_frag_recon(&_dec->state,fragi,_pli,
- dct_coeffs,last_zzi,dc_quant[qti]);
+ _pipe->dct_coeffs,last_zzi,dc_quant[qti]);
}
_pipe->coded_fragis[_pli]+=ncoded_fragis;
/*Right now the reconstructed MCU has only the coded blocks in it.*/
@@ -1593,9 +1596,14 @@ static void oc_dec_frags_recon_mcu_plane(oc_dec_ctx *_dec,
code, and the hard case (high bitrate, high resolution) is handled
correctly.*/
/*Copy the uncoded blocks from the previous reference frame.*/
- _pipe->uncoded_fragis[_pli]-=_pipe->nuncoded_fragis[_pli];
- oc_state_frag_copy_list(&_dec->state,_pipe->uncoded_fragis[_pli],
- _pipe->nuncoded_fragis[_pli],OC_FRAME_SELF,OC_FRAME_PREV,_pli);
+ if(_pipe->nuncoded_fragis[_pli]>0){
+ _pipe->uncoded_fragis[_pli]-=_pipe->nuncoded_fragis[_pli];
+ oc_frag_copy_list(&_dec->state,
+ _dec->state.ref_frame_data[OC_FRAME_SELF],
+ _dec->state.ref_frame_data[OC_FRAME_PREV],
+ _dec->state.ref_ystride[_pli],_pipe->uncoded_fragis[_pli],
+ _pipe->nuncoded_fragis[_pli],_dec->state.frag_buf_offs);
+ }
}
/*Filter a horizontal block edge.*/
@@ -1953,9 +1961,9 @@ static void oc_dec_dering_frag_rows(oc_dec_ctx *_dec,th_img_plane *_img,
th_dec_ctx *th_decode_alloc(const th_info *_info,const th_setup_info *_setup){
oc_dec_ctx *dec;
if(_info==NULL||_setup==NULL)return NULL;
- dec=_ogg_malloc(sizeof(*dec));
+ dec=oc_aligned_malloc(sizeof(*dec),16);
if(dec==NULL||oc_dec_init(dec,_info,_setup)<0){
- _ogg_free(dec);
+ oc_aligned_free(dec);
return NULL;
}
dec->state.curframe_num=0;
@@ -1965,7 +1973,7 @@ th_dec_ctx *th_decode_alloc(const th_info *_info,const th_setup_info *_setup){
void th_decode_free(th_dec_ctx *_dec){
if(_dec!=NULL){
oc_dec_clear(_dec);
- _ogg_free(_dec);
+ oc_aligned_free(_dec);
}
}
@@ -2013,28 +2021,24 @@ int th_decode_ctl(th_dec_ctx *_dec,int _req,void *_buf,
case TH_DECCTL_SET_TELEMETRY_MBMODE:{
if(_dec==NULL||_buf==NULL)return TH_EFAULT;
if(_buf_sz!=sizeof(int))return TH_EINVAL;
- _dec->telemetry=1;
_dec->telemetry_mbmode=*(int *)_buf;
return 0;
}break;
case TH_DECCTL_SET_TELEMETRY_MV:{
if(_dec==NULL||_buf==NULL)return TH_EFAULT;
if(_buf_sz!=sizeof(int))return TH_EINVAL;
- _dec->telemetry=1;
_dec->telemetry_mv=*(int *)_buf;
return 0;
}break;
case TH_DECCTL_SET_TELEMETRY_QI:{
if(_dec==NULL||_buf==NULL)return TH_EFAULT;
if(_buf_sz!=sizeof(int))return TH_EINVAL;
- _dec->telemetry=1;
_dec->telemetry_qi=*(int *)_buf;
return 0;
}break;
case TH_DECCTL_SET_TELEMETRY_BITS:{
if(_dec==NULL||_buf==NULL)return TH_EFAULT;
if(_buf_sz!=sizeof(int))return TH_EINVAL;
- _dec->telemetry=1;
_dec->telemetry_bits=*(int *)_buf;
return 0;
}break;
@@ -2047,63 +2051,751 @@ int th_decode_ctl(th_dec_ctx *_dec,int _req,void *_buf,
buffers (i.e., decoding did not start on a key frame).
We initialize them to a solid gray here.*/
static void oc_dec_init_dummy_frame(th_dec_ctx *_dec){
- th_info *info;
- size_t yplane_sz;
- size_t cplane_sz;
- int yhstride;
- int yheight;
- int chstride;
- int cheight;
+ th_info *info;
+ size_t yplane_sz;
+ size_t cplane_sz;
+ ptrdiff_t yoffset;
+ int yhstride;
+ int yheight;
+ int chstride;
+ int cheight;
_dec->state.ref_frame_idx[OC_FRAME_GOLD]=0;
_dec->state.ref_frame_idx[OC_FRAME_PREV]=0;
- _dec->state.ref_frame_idx[OC_FRAME_SELF]=1;
+ _dec->state.ref_frame_idx[OC_FRAME_SELF]=0;
+ _dec->state.ref_frame_data[OC_FRAME_GOLD]=
+ _dec->state.ref_frame_data[OC_FRAME_PREV]=
+ _dec->state.ref_frame_data[OC_FRAME_SELF]=
+ _dec->state.ref_frame_bufs[0][0].data;
+ memcpy(_dec->pp_frame_buf,_dec->state.ref_frame_bufs[0],
+ sizeof(_dec->pp_frame_buf[0])*3);
info=&_dec->state.info;
- yhstride=info->frame_width+2*OC_UMV_PADDING;
+ yhstride=abs(_dec->state.ref_ystride[0]);
yheight=info->frame_height+2*OC_UMV_PADDING;
- chstride=yhstride>>!(info->pixel_fmt&1);
+ chstride=abs(_dec->state.ref_ystride[1]);
cheight=yheight>>!(info->pixel_fmt&2);
- yplane_sz=yhstride*(size_t)yheight;
+ yplane_sz=yhstride*(size_t)yheight+16;
cplane_sz=chstride*(size_t)cheight;
- memset(_dec->state.ref_frame_data[0],0x80,yplane_sz+2*cplane_sz);
+ yoffset=yhstride*(ptrdiff_t)(yheight-OC_UMV_PADDING-1)+OC_UMV_PADDING;
+ memset(_dec->state.ref_frame_data[0]-yoffset,0x80,yplane_sz+2*cplane_sz);
+}
+
+#if defined(HAVE_CAIRO)
+static void oc_render_telemetry(th_dec_ctx *_dec,th_ycbcr_buffer _ycbcr,
+ int _telemetry){
+ /*Stuff the plane into cairo.*/
+ cairo_surface_t *cs;
+ unsigned char *data;
+ unsigned char *y_row;
+ unsigned char *u_row;
+ unsigned char *v_row;
+ unsigned char *rgb_row;
+ int cstride;
+ int w;
+ int h;
+ int x;
+ int y;
+ int hdec;
+ int vdec;
+ w=_ycbcr[0].width;
+ h=_ycbcr[0].height;
+ hdec=!(_dec->state.info.pixel_fmt&1);
+ vdec=!(_dec->state.info.pixel_fmt&2);
+ /*Lazy data buffer init.
+ We could try to re-use the post-processing buffer, which would save
+ memory, but complicate the allocation logic there.
+ I don't think anyone cares about memory usage when using telemetry; it is
+ not meant for embedded devices.*/
+ if(_dec->telemetry_frame_data==NULL){
+ _dec->telemetry_frame_data=_ogg_malloc(
+ (w*h+2*(w>>hdec)*(h>>vdec))*sizeof(*_dec->telemetry_frame_data));
+ if(_dec->telemetry_frame_data==NULL)return;
+ }
+ cs=cairo_image_surface_create(CAIRO_FORMAT_RGB24,w,h);
+ /*Sadly, no YUV support in Cairo (yet); convert into the RGB buffer.*/
+ data=cairo_image_surface_get_data(cs);
+ if(data==NULL){
+ cairo_surface_destroy(cs);
+ return;
+ }
+ cstride=cairo_image_surface_get_stride(cs);
+ y_row=_ycbcr[0].data;
+ u_row=_ycbcr[1].data;
+ v_row=_ycbcr[2].data;
+ rgb_row=data;
+ for(y=0;y<h;y++){
+ for(x=0;x<w;x++){
+ int r;
+ int g;
+ int b;
+ r=(1904000*y_row[x]+2609823*v_row[x>>hdec]-363703744)/1635200;
+ g=(3827562*y_row[x]-1287801*u_row[x>>hdec]
+ -2672387*v_row[x>>hdec]+447306710)/3287200;
+ b=(952000*y_row[x]+1649289*u_row[x>>hdec]-225932192)/817600;
+ rgb_row[4*x+0]=OC_CLAMP255(b);
+ rgb_row[4*x+1]=OC_CLAMP255(g);
+ rgb_row[4*x+2]=OC_CLAMP255(r);
+ }
+ y_row+=_ycbcr[0].stride;
+ u_row+=_ycbcr[1].stride&-((y&1)|!vdec);
+ v_row+=_ycbcr[2].stride&-((y&1)|!vdec);
+ rgb_row+=cstride;
+ }
+ /*Draw coded identifier for each macroblock (stored in Hilbert order).*/
+ {
+ cairo_t *c;
+ const oc_fragment *frags;
+ oc_mv *frag_mvs;
+ const signed char *mb_modes;
+ oc_mb_map *mb_maps;
+ size_t nmbs;
+ size_t mbi;
+ int row2;
+ int col2;
+ int qim[3]={0,0,0};
+ if(_dec->state.nqis==2){
+ int bqi;
+ bqi=_dec->state.qis[0];
+ if(_dec->state.qis[1]>bqi)qim[1]=1;
+ if(_dec->state.qis[1]<bqi)qim[1]=-1;
+ }
+ if(_dec->state.nqis==3){
+ int bqi;
+ int cqi;
+ int dqi;
+ bqi=_dec->state.qis[0];
+ cqi=_dec->state.qis[1];
+ dqi=_dec->state.qis[2];
+ if(cqi>bqi&&dqi>bqi){
+ if(dqi>cqi){
+ qim[1]=1;
+ qim[2]=2;
+ }
+ else{
+ qim[1]=2;
+ qim[2]=1;
+ }
+ }
+ else if(cqi<bqi&&dqi<bqi){
+ if(dqi<cqi){
+ qim[1]=-1;
+ qim[2]=-2;
+ }
+ else{
+ qim[1]=-2;
+ qim[2]=-1;
+ }
+ }
+ else{
+ if(cqi<bqi)qim[1]=-1;
+ else qim[1]=1;
+ if(dqi<bqi)qim[2]=-1;
+ else qim[2]=1;
+ }
+ }
+ c=cairo_create(cs);
+ frags=_dec->state.frags;
+ frag_mvs=_dec->state.frag_mvs;
+ mb_modes=_dec->state.mb_modes;
+ mb_maps=_dec->state.mb_maps;
+ nmbs=_dec->state.nmbs;
+ row2=0;
+ col2=0;
+ for(mbi=0;mbi<nmbs;mbi++){
+ float x;
+ float y;
+ int bi;
+ y=h-(row2+((col2+1>>1)&1))*16-16;
+ x=(col2>>1)*16;
+ cairo_set_line_width(c,1.);
+ /*Keyframe (all intra) red box.*/
+ if(_dec->state.frame_type==OC_INTRA_FRAME){
+ if(_dec->telemetry_mbmode&0x02){
+ cairo_set_source_rgba(c,1.,0,0,.5);
+ cairo_rectangle(c,x+2.5,y+2.5,11,11);
+ cairo_stroke_preserve(c);
+ cairo_set_source_rgba(c,1.,0,0,.25);
+ cairo_fill(c);
+ }
+ }
+ else{
+ ptrdiff_t fragi;
+ int frag_mvx;
+ int frag_mvy;
+ for(bi=0;bi<4;bi++){
+ fragi=mb_maps[mbi][0][bi];
+ if(fragi>=0&&frags[fragi].coded){
+ frag_mvx=OC_MV_X(frag_mvs[fragi]);
+ frag_mvy=OC_MV_Y(frag_mvs[fragi]);
+ break;
+ }
+ }
+ if(bi<4){
+ switch(mb_modes[mbi]){
+ case OC_MODE_INTRA:{
+ if(_dec->telemetry_mbmode&0x02){
+ cairo_set_source_rgba(c,1.,0,0,.5);
+ cairo_rectangle(c,x+2.5,y+2.5,11,11);
+ cairo_stroke_preserve(c);
+ cairo_set_source_rgba(c,1.,0,0,.25);
+ cairo_fill(c);
+ }
+ }break;
+ case OC_MODE_INTER_NOMV:{
+ if(_dec->telemetry_mbmode&0x01){
+ cairo_set_source_rgba(c,0,0,1.,.5);
+ cairo_rectangle(c,x+2.5,y+2.5,11,11);
+ cairo_stroke_preserve(c);
+ cairo_set_source_rgba(c,0,0,1.,.25);
+ cairo_fill(c);
+ }
+ }break;
+ case OC_MODE_INTER_MV:{
+ if(_dec->telemetry_mbmode&0x04){
+ cairo_rectangle(c,x+2.5,y+2.5,11,11);
+ cairo_set_source_rgba(c,0,1.,0,.5);
+ cairo_stroke(c);
+ }
+ if(_dec->telemetry_mv&0x04){
+ cairo_move_to(c,x+8+frag_mvx,y+8-frag_mvy);
+ cairo_set_source_rgba(c,1.,1.,1.,.9);
+ cairo_set_line_width(c,3.);
+ cairo_line_to(c,x+8+frag_mvx*.66,y+8-frag_mvy*.66);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,2.);
+ cairo_line_to(c,x+8+frag_mvx*.33,y+8-frag_mvy*.33);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,1.);
+ cairo_line_to(c,x+8,y+8);
+ cairo_stroke(c);
+ }
+ }break;
+ case OC_MODE_INTER_MV_LAST:{
+ if(_dec->telemetry_mbmode&0x08){
+ cairo_rectangle(c,x+2.5,y+2.5,11,11);
+ cairo_set_source_rgba(c,0,1.,0,.5);
+ cairo_move_to(c,x+13.5,y+2.5);
+ cairo_line_to(c,x+2.5,y+8);
+ cairo_line_to(c,x+13.5,y+13.5);
+ cairo_stroke(c);
+ }
+ if(_dec->telemetry_mv&0x08){
+ cairo_move_to(c,x+8+frag_mvx,y+8-frag_mvy);
+ cairo_set_source_rgba(c,1.,1.,1.,.9);
+ cairo_set_line_width(c,3.);
+ cairo_line_to(c,x+8+frag_mvx*.66,y+8-frag_mvy*.66);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,2.);
+ cairo_line_to(c,x+8+frag_mvx*.33,y+8-frag_mvy*.33);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,1.);
+ cairo_line_to(c,x+8,y+8);
+ cairo_stroke(c);
+ }
+ }break;
+ case OC_MODE_INTER_MV_LAST2:{
+ if(_dec->telemetry_mbmode&0x10){
+ cairo_rectangle(c,x+2.5,y+2.5,11,11);
+ cairo_set_source_rgba(c,0,1.,0,.5);
+ cairo_move_to(c,x+8,y+2.5);
+ cairo_line_to(c,x+2.5,y+8);
+ cairo_line_to(c,x+8,y+13.5);
+ cairo_move_to(c,x+13.5,y+2.5);
+ cairo_line_to(c,x+8,y+8);
+ cairo_line_to(c,x+13.5,y+13.5);
+ cairo_stroke(c);
+ }
+ if(_dec->telemetry_mv&0x10){
+ cairo_move_to(c,x+8+frag_mvx,y+8-frag_mvy);
+ cairo_set_source_rgba(c,1.,1.,1.,.9);
+ cairo_set_line_width(c,3.);
+ cairo_line_to(c,x+8+frag_mvx*.66,y+8-frag_mvy*.66);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,2.);
+ cairo_line_to(c,x+8+frag_mvx*.33,y+8-frag_mvy*.33);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,1.);
+ cairo_line_to(c,x+8,y+8);
+ cairo_stroke(c);
+ }
+ }break;
+ case OC_MODE_GOLDEN_NOMV:{
+ if(_dec->telemetry_mbmode&0x20){
+ cairo_set_source_rgba(c,1.,1.,0,.5);
+ cairo_rectangle(c,x+2.5,y+2.5,11,11);
+ cairo_stroke_preserve(c);
+ cairo_set_source_rgba(c,1.,1.,0,.25);
+ cairo_fill(c);
+ }
+ }break;
+ case OC_MODE_GOLDEN_MV:{
+ if(_dec->telemetry_mbmode&0x40){
+ cairo_rectangle(c,x+2.5,y+2.5,11,11);
+ cairo_set_source_rgba(c,1.,1.,0,.5);
+ cairo_stroke(c);
+ }
+ if(_dec->telemetry_mv&0x40){
+ cairo_move_to(c,x+8+frag_mvx,y+8-frag_mvy);
+ cairo_set_source_rgba(c,1.,1.,1.,.9);
+ cairo_set_line_width(c,3.);
+ cairo_line_to(c,x+8+frag_mvx*.66,y+8-frag_mvy*.66);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,2.);
+ cairo_line_to(c,x+8+frag_mvx*.33,y+8-frag_mvy*.33);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,1.);
+ cairo_line_to(c,x+8,y+8);
+ cairo_stroke(c);
+ }
+ }break;
+ case OC_MODE_INTER_MV_FOUR:{
+ if(_dec->telemetry_mbmode&0x80){
+ cairo_rectangle(c,x+2.5,y+2.5,4,4);
+ cairo_rectangle(c,x+9.5,y+2.5,4,4);
+ cairo_rectangle(c,x+2.5,y+9.5,4,4);
+ cairo_rectangle(c,x+9.5,y+9.5,4,4);
+ cairo_set_source_rgba(c,0,1.,0,.5);
+ cairo_stroke(c);
+ }
+ /*4mv is odd, coded in raster order.*/
+ fragi=mb_maps[mbi][0][0];
+ if(frags[fragi].coded&&_dec->telemetry_mv&0x80){
+ frag_mvx=OC_MV_X(frag_mvs[fragi]);
+ frag_mvx=OC_MV_Y(frag_mvs[fragi]);
+ cairo_move_to(c,x+4+frag_mvx,y+12-frag_mvy);
+ cairo_set_source_rgba(c,1.,1.,1.,.9);
+ cairo_set_line_width(c,3.);
+ cairo_line_to(c,x+4+frag_mvx*.66,y+12-frag_mvy*.66);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,2.);
+ cairo_line_to(c,x+4+frag_mvx*.33,y+12-frag_mvy*.33);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,1.);
+ cairo_line_to(c,x+4,y+12);
+ cairo_stroke(c);
+ }
+ fragi=mb_maps[mbi][0][1];
+ if(frags[fragi].coded&&_dec->telemetry_mv&0x80){
+ frag_mvx=OC_MV_X(frag_mvs[fragi]);
+ frag_mvx=OC_MV_Y(frag_mvs[fragi]);
+ cairo_move_to(c,x+12+frag_mvx,y+12-frag_mvy);
+ cairo_set_source_rgba(c,1.,1.,1.,.9);
+ cairo_set_line_width(c,3.);
+ cairo_line_to(c,x+12+frag_mvx*.66,y+12-frag_mvy*.66);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,2.);
+ cairo_line_to(c,x+12+frag_mvx*.33,y+12-frag_mvy*.33);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,1.);
+ cairo_line_to(c,x+12,y+12);
+ cairo_stroke(c);
+ }
+ fragi=mb_maps[mbi][0][2];
+ if(frags[fragi].coded&&_dec->telemetry_mv&0x80){
+ frag_mvx=OC_MV_X(frag_mvs[fragi]);
+ frag_mvx=OC_MV_Y(frag_mvs[fragi]);
+ cairo_move_to(c,x+4+frag_mvx,y+4-frag_mvy);
+ cairo_set_source_rgba(c,1.,1.,1.,.9);
+ cairo_set_line_width(c,3.);
+ cairo_line_to(c,x+4+frag_mvx*.66,y+4-frag_mvy*.66);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,2.);
+ cairo_line_to(c,x+4+frag_mvx*.33,y+4-frag_mvy*.33);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,1.);
+ cairo_line_to(c,x+4,y+4);
+ cairo_stroke(c);
+ }
+ fragi=mb_maps[mbi][0][3];
+ if(frags[fragi].coded&&_dec->telemetry_mv&0x80){
+ frag_mvx=OC_MV_X(frag_mvs[fragi]);
+ frag_mvx=OC_MV_Y(frag_mvs[fragi]);
+ cairo_move_to(c,x+12+frag_mvx,y+4-frag_mvy);
+ cairo_set_source_rgba(c,1.,1.,1.,.9);
+ cairo_set_line_width(c,3.);
+ cairo_line_to(c,x+12+frag_mvx*.66,y+4-frag_mvy*.66);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,2.);
+ cairo_line_to(c,x+12+frag_mvx*.33,y+4-frag_mvy*.33);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,1.);
+ cairo_line_to(c,x+12,y+4);
+ cairo_stroke(c);
+ }
+ }break;
+ }
+ }
+ }
+ /*qii illustration.*/
+ if(_dec->telemetry_qi&0x2){
+ cairo_set_line_cap(c,CAIRO_LINE_CAP_SQUARE);
+ for(bi=0;bi<4;bi++){
+ ptrdiff_t fragi;
+ int qiv;
+ int xp;
+ int yp;
+ xp=x+(bi&1)*8;
+ yp=y+8-(bi&2)*4;
+ fragi=mb_maps[mbi][0][bi];
+ if(fragi>=0&&frags[fragi].coded){
+ qiv=qim[frags[fragi].qii];
+ cairo_set_line_width(c,3.);
+ cairo_set_source_rgba(c,0.,0.,0.,.5);
+ switch(qiv){
+ /*Double plus:*/
+ case 2:{
+ if((bi&1)^((bi&2)>>1)){
+ cairo_move_to(c,xp+2.5,yp+1.5);
+ cairo_line_to(c,xp+2.5,yp+3.5);
+ cairo_move_to(c,xp+1.5,yp+2.5);
+ cairo_line_to(c,xp+3.5,yp+2.5);
+ cairo_move_to(c,xp+5.5,yp+4.5);
+ cairo_line_to(c,xp+5.5,yp+6.5);
+ cairo_move_to(c,xp+4.5,yp+5.5);
+ cairo_line_to(c,xp+6.5,yp+5.5);
+ cairo_stroke_preserve(c);
+ cairo_set_source_rgba(c,0.,1.,1.,1.);
+ }
+ else{
+ cairo_move_to(c,xp+5.5,yp+1.5);
+ cairo_line_to(c,xp+5.5,yp+3.5);
+ cairo_move_to(c,xp+4.5,yp+2.5);
+ cairo_line_to(c,xp+6.5,yp+2.5);
+ cairo_move_to(c,xp+2.5,yp+4.5);
+ cairo_line_to(c,xp+2.5,yp+6.5);
+ cairo_move_to(c,xp+1.5,yp+5.5);
+ cairo_line_to(c,xp+3.5,yp+5.5);
+ cairo_stroke_preserve(c);
+ cairo_set_source_rgba(c,0.,1.,1.,1.);
+ }
+ }break;
+ /*Double minus:*/
+ case -2:{
+ cairo_move_to(c,xp+2.5,yp+2.5);
+ cairo_line_to(c,xp+5.5,yp+2.5);
+ cairo_move_to(c,xp+2.5,yp+5.5);
+ cairo_line_to(c,xp+5.5,yp+5.5);
+ cairo_stroke_preserve(c);
+ cairo_set_source_rgba(c,1.,1.,1.,1.);
+ }break;
+ /*Plus:*/
+ case 1:{
+ if((bi&2)==0)yp-=2;
+ if((bi&1)==0)xp-=2;
+ cairo_move_to(c,xp+4.5,yp+2.5);
+ cairo_line_to(c,xp+4.5,yp+6.5);
+ cairo_move_to(c,xp+2.5,yp+4.5);
+ cairo_line_to(c,xp+6.5,yp+4.5);
+ cairo_stroke_preserve(c);
+ cairo_set_source_rgba(c,.1,1.,.3,1.);
+ break;
+ }
+ /*Fall through.*/
+ /*Minus:*/
+ case -1:{
+ cairo_move_to(c,xp+2.5,yp+4.5);
+ cairo_line_to(c,xp+6.5,yp+4.5);
+ cairo_stroke_preserve(c);
+ cairo_set_source_rgba(c,1.,.3,.1,1.);
+ }break;
+ default:continue;
+ }
+ cairo_set_line_width(c,1.);
+ cairo_stroke(c);
+ }
+ }
+ }
+ col2++;
+ if((col2>>1)>=_dec->state.nhmbs){
+ col2=0;
+ row2+=2;
+ }
+ }
+ /*Bit usage indicator[s]:*/
+ if(_dec->telemetry_bits){
+ int widths[6];
+ int fpsn;
+ int fpsd;
+ int mult;
+ int fullw;
+ int padw;
+ int i;
+ fpsn=_dec->state.info.fps_numerator;
+ fpsd=_dec->state.info.fps_denominator;
+ mult=(_dec->telemetry_bits>=0xFF?1:_dec->telemetry_bits);
+ fullw=250.f*h*fpsd*mult/fpsn;
+ padw=w-24;
+ /*Header and coded block bits.*/
+ if(_dec->telemetry_frame_bytes<0||
+ _dec->telemetry_frame_bytes==OC_LOTS_OF_BITS){
+ _dec->telemetry_frame_bytes=0;
+ }
+ if(_dec->telemetry_coding_bytes<0||
+ _dec->telemetry_coding_bytes>_dec->telemetry_frame_bytes){
+ _dec->telemetry_coding_bytes=0;
+ }
+ if(_dec->telemetry_mode_bytes<0||
+ _dec->telemetry_mode_bytes>_dec->telemetry_frame_bytes){
+ _dec->telemetry_mode_bytes=0;
+ }
+ if(_dec->telemetry_mv_bytes<0||
+ _dec->telemetry_mv_bytes>_dec->telemetry_frame_bytes){
+ _dec->telemetry_mv_bytes=0;
+ }
+ if(_dec->telemetry_qi_bytes<0||
+ _dec->telemetry_qi_bytes>_dec->telemetry_frame_bytes){
+ _dec->telemetry_qi_bytes=0;
+ }
+ if(_dec->telemetry_dc_bytes<0||
+ _dec->telemetry_dc_bytes>_dec->telemetry_frame_bytes){
+ _dec->telemetry_dc_bytes=0;
+ }
+ widths[0]=padw*
+ (_dec->telemetry_frame_bytes-_dec->telemetry_coding_bytes)/fullw;
+ widths[1]=padw*
+ (_dec->telemetry_coding_bytes-_dec->telemetry_mode_bytes)/fullw;
+ widths[2]=padw*
+ (_dec->telemetry_mode_bytes-_dec->telemetry_mv_bytes)/fullw;
+ widths[3]=padw*(_dec->telemetry_mv_bytes-_dec->telemetry_qi_bytes)/fullw;
+ widths[4]=padw*(_dec->telemetry_qi_bytes-_dec->telemetry_dc_bytes)/fullw;
+ widths[5]=padw*(_dec->telemetry_dc_bytes)/fullw;
+ for(i=0;i<6;i++)if(widths[i]>w)widths[i]=w;
+ cairo_set_source_rgba(c,.0,.0,.0,.6);
+ cairo_rectangle(c,10,h-33,widths[0]+1,5);
+ cairo_rectangle(c,10,h-29,widths[1]+1,5);
+ cairo_rectangle(c,10,h-25,widths[2]+1,5);
+ cairo_rectangle(c,10,h-21,widths[3]+1,5);
+ cairo_rectangle(c,10,h-17,widths[4]+1,5);
+ cairo_rectangle(c,10,h-13,widths[5]+1,5);
+ cairo_fill(c);
+ cairo_set_source_rgb(c,1,0,0);
+ cairo_rectangle(c,10.5,h-32.5,widths[0],4);
+ cairo_fill(c);
+ cairo_set_source_rgb(c,0,1,0);
+ cairo_rectangle(c,10.5,h-28.5,widths[1],4);
+ cairo_fill(c);
+ cairo_set_source_rgb(c,0,0,1);
+ cairo_rectangle(c,10.5,h-24.5,widths[2],4);
+ cairo_fill(c);
+ cairo_set_source_rgb(c,.6,.4,.0);
+ cairo_rectangle(c,10.5,h-20.5,widths[3],4);
+ cairo_fill(c);
+ cairo_set_source_rgb(c,.3,.3,.3);
+ cairo_rectangle(c,10.5,h-16.5,widths[4],4);
+ cairo_fill(c);
+ cairo_set_source_rgb(c,.5,.5,.8);
+ cairo_rectangle(c,10.5,h-12.5,widths[5],4);
+ cairo_fill(c);
+ }
+ /*Master qi indicator[s]:*/
+ if(_dec->telemetry_qi&0x1){
+ cairo_text_extents_t extents;
+ char buffer[10];
+ int p;
+ int y;
+ p=0;
+ y=h-7.5;
+ if(_dec->state.qis[0]>=10)buffer[p++]=48+_dec->state.qis[0]/10;
+ buffer[p++]=48+_dec->state.qis[0]%10;
+ if(_dec->state.nqis>=2){
+ buffer[p++]=' ';
+ if(_dec->state.qis[1]>=10)buffer[p++]=48+_dec->state.qis[1]/10;
+ buffer[p++]=48+_dec->state.qis[1]%10;
+ }
+ if(_dec->state.nqis==3){
+ buffer[p++]=' ';
+ if(_dec->state.qis[2]>=10)buffer[p++]=48+_dec->state.qis[2]/10;
+ buffer[p++]=48+_dec->state.qis[2]%10;
+ }
+ buffer[p++]='\0';
+ cairo_select_font_face(c,"sans",
+ CAIRO_FONT_SLANT_NORMAL,CAIRO_FONT_WEIGHT_BOLD);
+ cairo_set_font_size(c,18);
+ cairo_text_extents(c,buffer,&extents);
+ cairo_set_source_rgb(c,1,1,1);
+ cairo_move_to(c,w-extents.x_advance-10,y);
+ cairo_show_text(c,buffer);
+ cairo_set_source_rgb(c,0,0,0);
+ cairo_move_to(c,w-extents.x_advance-10,y);
+ cairo_text_path(c,buffer);
+ cairo_set_line_width(c,.8);
+ cairo_set_line_join(c,CAIRO_LINE_JOIN_ROUND);
+ cairo_stroke(c);
+ }
+ cairo_destroy(c);
+ }
+ /*Out of the Cairo plane into the telemetry YUV buffer.*/
+ _ycbcr[0].data=_dec->telemetry_frame_data;
+ _ycbcr[0].stride=_ycbcr[0].width;
+ _ycbcr[1].data=_ycbcr[0].data+h*_ycbcr[0].stride;
+ _ycbcr[1].stride=_ycbcr[1].width;
+ _ycbcr[2].data=_ycbcr[1].data+(h>>vdec)*_ycbcr[1].stride;
+ _ycbcr[2].stride=_ycbcr[2].width;
+ y_row=_ycbcr[0].data;
+ u_row=_ycbcr[1].data;
+ v_row=_ycbcr[2].data;
+ rgb_row=data;
+ /*This is one of the few places it's worth handling chroma on a
+ case-by-case basis.*/
+ switch(_dec->state.info.pixel_fmt){
+ case TH_PF_420:{
+ for(y=0;y<h;y+=2){
+ unsigned char *y_row2;
+ unsigned char *rgb_row2;
+ y_row2=y_row+_ycbcr[0].stride;
+ rgb_row2=rgb_row+cstride;
+ for(x=0;x<w;x+=2){
+ int y;
+ int u;
+ int v;
+ y=(65481*rgb_row[4*x+2]+128553*rgb_row[4*x+1]
+ +24966*rgb_row[4*x+0]+4207500)/255000;
+ y_row[x]=OC_CLAMP255(y);
+ y=(65481*rgb_row[4*x+6]+128553*rgb_row[4*x+5]
+ +24966*rgb_row[4*x+4]+4207500)/255000;
+ y_row[x+1]=OC_CLAMP255(y);
+ y=(65481*rgb_row2[4*x+2]+128553*rgb_row2[4*x+1]
+ +24966*rgb_row2[4*x+0]+4207500)/255000;
+ y_row2[x]=OC_CLAMP255(y);
+ y=(65481*rgb_row2[4*x+6]+128553*rgb_row2[4*x+5]
+ +24966*rgb_row2[4*x+4]+4207500)/255000;
+ y_row2[x+1]=OC_CLAMP255(y);
+ u=(-8372*(rgb_row[4*x+2]+rgb_row[4*x+6]
+ +rgb_row2[4*x+2]+rgb_row2[4*x+6])
+ -16436*(rgb_row[4*x+1]+rgb_row[4*x+5]
+ +rgb_row2[4*x+1]+rgb_row2[4*x+5])
+ +24808*(rgb_row[4*x+0]+rgb_row[4*x+4]
+ +rgb_row2[4*x+0]+rgb_row2[4*x+4])+29032005)/225930;
+ v=(39256*(rgb_row[4*x+2]+rgb_row[4*x+6]
+ +rgb_row2[4*x+2]+rgb_row2[4*x+6])
+ -32872*(rgb_row[4*x+1]+rgb_row[4*x+5]
+ +rgb_row2[4*x+1]+rgb_row2[4*x+5])
+ -6384*(rgb_row[4*x+0]+rgb_row[4*x+4]
+ +rgb_row2[4*x+0]+rgb_row2[4*x+4])+45940035)/357510;
+ u_row[x>>1]=OC_CLAMP255(u);
+ v_row[x>>1]=OC_CLAMP255(v);
+ }
+ y_row+=_ycbcr[0].stride<<1;
+ u_row+=_ycbcr[1].stride;
+ v_row+=_ycbcr[2].stride;
+ rgb_row+=cstride<<1;
+ }
+ }break;
+ case TH_PF_422:{
+ for(y=0;y<h;y++){
+ for(x=0;x<w;x+=2){
+ int y;
+ int u;
+ int v;
+ y=(65481*rgb_row[4*x+2]+128553*rgb_row[4*x+1]
+ +24966*rgb_row[4*x+0]+4207500)/255000;
+ y_row[x]=OC_CLAMP255(y);
+ y=(65481*rgb_row[4*x+6]+128553*rgb_row[4*x+5]
+ +24966*rgb_row[4*x+4]+4207500)/255000;
+ y_row[x+1]=OC_CLAMP255(y);
+ u=(-16744*(rgb_row[4*x+2]+rgb_row[4*x+6])
+ -32872*(rgb_row[4*x+1]+rgb_row[4*x+5])
+ +49616*(rgb_row[4*x+0]+rgb_row[4*x+4])+29032005)/225930;
+ v=(78512*(rgb_row[4*x+2]+rgb_row[4*x+6])
+ -65744*(rgb_row[4*x+1]+rgb_row[4*x+5])
+ -12768*(rgb_row[4*x+0]+rgb_row[4*x+4])+45940035)/357510;
+ u_row[x>>1]=OC_CLAMP255(u);
+ v_row[x>>1]=OC_CLAMP255(v);
+ }
+ y_row+=_ycbcr[0].stride;
+ u_row+=_ycbcr[1].stride;
+ v_row+=_ycbcr[2].stride;
+ rgb_row+=cstride;
+ }
+ }break;
+ /*case TH_PF_444:*/
+ default:{
+ for(y=0;y<h;y++){
+ for(x=0;x<w;x++){
+ int y;
+ int u;
+ int v;
+ y=(65481*rgb_row[4*x+2]+128553*rgb_row[4*x+1]
+ +24966*rgb_row[4*x+0]+4207500)/255000;
+ u=(-33488*rgb_row[4*x+2]-65744*rgb_row[4*x+1]
+ +99232*rgb_row[4*x+0]+29032005)/225930;
+ v=(157024*rgb_row[4*x+2]-131488*rgb_row[4*x+1]
+ -25536*rgb_row[4*x+0]+45940035)/357510;
+ y_row[x]=OC_CLAMP255(y);
+ u_row[x]=OC_CLAMP255(u);
+ v_row[x]=OC_CLAMP255(v);
+ }
+ y_row+=_ycbcr[0].stride;
+ u_row+=_ycbcr[1].stride;
+ v_row+=_ycbcr[2].stride;
+ rgb_row+=cstride;
+ }
+ }break;
+ }
+ /*Finished.
+ Destroy the surface.*/
+ cairo_surface_destroy(cs);
}
+#endif
int th_decode_packetin(th_dec_ctx *_dec,const ogg_packet *_op,
ogg_int64_t *_granpos){
int ret;
if(_dec==NULL||_op==NULL)return TH_EFAULT;
/*A completely empty packet indicates a dropped frame and is treated exactly
- like an inter frame with no coded blocks.
- Only proceed if we have a non-empty packet.*/
- if(_op->bytes!=0){
- oc_dec_pipeline_state pipe;
- th_ycbcr_buffer stripe_buf;
- int stripe_fragy;
- int refi;
- int pli;
- int notstart;
- int notdone;
+ like an inter frame with no coded blocks.*/
+ if(_op->bytes==0){
+ _dec->state.frame_type=OC_INTER_FRAME;
+ _dec->state.ntotal_coded_fragis=0;
+ }
+ else{
oc_pack_readinit(&_dec->opb,_op->packet,_op->bytes);
+ ret=oc_dec_frame_header_unpack(_dec);
+ if(ret<0)return ret;
+ if(_dec->state.frame_type==OC_INTRA_FRAME)oc_dec_mark_all_intra(_dec);
+ else oc_dec_coded_flags_unpack(_dec);
+ }
+ /*If there have been no reference frames, and we need one, initialize one.*/
+ if(_dec->state.frame_type!=OC_INTRA_FRAME&&
+ (_dec->state.ref_frame_idx[OC_FRAME_GOLD]<0||
+ _dec->state.ref_frame_idx[OC_FRAME_PREV]<0)){
+ oc_dec_init_dummy_frame(_dec);
+ }
+ /*If this was an inter frame with no coded blocks...*/
+ if(_dec->state.ntotal_coded_fragis<=0){
+ /*Just update the granule position and return.*/
+ _dec->state.granpos=(_dec->state.keyframe_num+_dec->state.granpos_bias<<
+ _dec->state.info.keyframe_granule_shift)
+ +(_dec->state.curframe_num-_dec->state.keyframe_num);
+ _dec->state.curframe_num++;
+ if(_granpos!=NULL)*_granpos=_dec->state.granpos;
+ return TH_DUPFRAME;
+ }
+ else{
+ th_ycbcr_buffer stripe_buf;
+ int stripe_fragy;
+ int refi;
+ int pli;
+ int notstart;
+ int notdone;
+#ifdef HAVE_CAIRO
+ int telemetry;
+ /*Save the current telemetry state.
+ This prevents it from being modified in the middle of decoding this
+ frame, which could cause us to skip calls to the striped decoding
+ callback.*/
+ telemetry=_dec->telemetry_mbmode||_dec->telemetry_mv||
+ _dec->telemetry_qi||_dec->telemetry_bits;
+#endif
+ /*Select a free buffer to use for the reconstructed version of this frame.*/
+ for(refi=0;refi==_dec->state.ref_frame_idx[OC_FRAME_GOLD]||
+ refi==_dec->state.ref_frame_idx[OC_FRAME_PREV];refi++);
+ _dec->state.ref_frame_idx[OC_FRAME_SELF]=refi;
+ _dec->state.ref_frame_data[OC_FRAME_SELF]=
+ _dec->state.ref_frame_bufs[refi][0].data;
#if defined(HAVE_CAIRO)
_dec->telemetry_frame_bytes=_op->bytes;
#endif
- ret=oc_dec_frame_header_unpack(_dec);
- if(ret<0)return ret;
- /*Select a free buffer to use for the reconstructed version of this
- frame.*/
- if(_dec->state.frame_type!=OC_INTRA_FRAME&&
- (_dec->state.ref_frame_idx[OC_FRAME_GOLD]<0||
- _dec->state.ref_frame_idx[OC_FRAME_PREV]<0)){
- /*No reference frames yet!*/
- oc_dec_init_dummy_frame(_dec);
- refi=_dec->state.ref_frame_idx[OC_FRAME_SELF];
- }
- else{
- for(refi=0;refi==_dec->state.ref_frame_idx[OC_FRAME_GOLD]||
- refi==_dec->state.ref_frame_idx[OC_FRAME_PREV];refi++);
- _dec->state.ref_frame_idx[OC_FRAME_SELF]=refi;
- }
if(_dec->state.frame_type==OC_INTRA_FRAME){
- oc_dec_mark_all_intra(_dec);
_dec->state.keyframe_num=_dec->state.curframe_num;
#if defined(HAVE_CAIRO)
_dec->telemetry_coding_bytes=
@@ -2112,7 +2804,6 @@ int th_decode_packetin(th_dec_ctx *_dec,const ogg_packet *_op,
#endif
}
else{
- oc_dec_coded_flags_unpack(_dec);
#if defined(HAVE_CAIRO)
_dec->telemetry_coding_bytes=oc_pack_bytes_left(&_dec->opb);
#endif
@@ -2160,15 +2851,15 @@ int th_decode_packetin(th_dec_ctx *_dec,const ogg_packet *_op,
An application callback allows further application processing (blitting
to video memory, color conversion, etc.) to also use the data while it's
in cache.*/
- oc_dec_pipeline_init(_dec,&pipe);
+ oc_dec_pipeline_init(_dec,&_dec->pipe);
oc_ycbcr_buffer_flip(stripe_buf,_dec->pp_frame_buf);
notstart=0;
notdone=1;
- for(stripe_fragy=0;notdone;stripe_fragy+=pipe.mcu_nvfrags){
+ for(stripe_fragy=0;notdone;stripe_fragy+=_dec->pipe.mcu_nvfrags){
int avail_fragy0;
int avail_fragy_end;
avail_fragy0=avail_fragy_end=_dec->state.fplanes[0].nvfrags;
- notdone=stripe_fragy+pipe.mcu_nvfrags<avail_fragy_end;
+ notdone=stripe_fragy+_dec->pipe.mcu_nvfrags<avail_fragy_end;
for(pli=0;pli<3;pli++){
oc_fragment_plane *fplane;
int frag_shift;
@@ -2179,45 +2870,46 @@ int th_decode_packetin(th_dec_ctx *_dec,const ogg_packet *_op,
/*Compute the first and last fragment row of the current MCU for this
plane.*/
frag_shift=pli!=0&&!(_dec->state.info.pixel_fmt&2);
- pipe.fragy0[pli]=stripe_fragy>>frag_shift;
- pipe.fragy_end[pli]=OC_MINI(fplane->nvfrags,
- pipe.fragy0[pli]+(pipe.mcu_nvfrags>>frag_shift));
- oc_dec_dc_unpredict_mcu_plane(_dec,&pipe,pli);
- oc_dec_frags_recon_mcu_plane(_dec,&pipe,pli);
+ _dec->pipe.fragy0[pli]=stripe_fragy>>frag_shift;
+ _dec->pipe.fragy_end[pli]=OC_MINI(fplane->nvfrags,
+ _dec->pipe.fragy0[pli]+(_dec->pipe.mcu_nvfrags>>frag_shift));
+ oc_dec_dc_unpredict_mcu_plane(_dec,&_dec->pipe,pli);
+ oc_dec_frags_recon_mcu_plane(_dec,&_dec->pipe,pli);
sdelay=edelay=0;
- if(pipe.loop_filter){
+ if(_dec->pipe.loop_filter){
sdelay+=notstart;
edelay+=notdone;
- oc_state_loop_filter_frag_rows(&_dec->state,pipe.bounding_values,
- refi,pli,pipe.fragy0[pli]-sdelay,pipe.fragy_end[pli]-edelay);
+ oc_state_loop_filter_frag_rows(&_dec->state,
+ _dec->pipe.bounding_values,OC_FRAME_SELF,pli,
+ _dec->pipe.fragy0[pli]-sdelay,_dec->pipe.fragy_end[pli]-edelay);
}
/*To fill the borders, we have an additional two pixel delay, since a
fragment in the next row could filter its top edge, using two pixels
from a fragment in this row.
But there's no reason to delay a full fragment between the two.*/
oc_state_borders_fill_rows(&_dec->state,refi,pli,
- (pipe.fragy0[pli]-sdelay<<3)-(sdelay<<1),
- (pipe.fragy_end[pli]-edelay<<3)-(edelay<<1));
+ (_dec->pipe.fragy0[pli]-sdelay<<3)-(sdelay<<1),
+ (_dec->pipe.fragy_end[pli]-edelay<<3)-(edelay<<1));
/*Out-of-loop post-processing.*/
pp_offset=3*(pli!=0);
- if(pipe.pp_level>=OC_PP_LEVEL_DEBLOCKY+pp_offset){
+ if(_dec->pipe.pp_level>=OC_PP_LEVEL_DEBLOCKY+pp_offset){
/*Perform de-blocking in one plane.*/
sdelay+=notstart;
edelay+=notdone;
oc_dec_deblock_frag_rows(_dec,_dec->pp_frame_buf,
_dec->state.ref_frame_bufs[refi],pli,
- pipe.fragy0[pli]-sdelay,pipe.fragy_end[pli]-edelay);
- if(pipe.pp_level>=OC_PP_LEVEL_DERINGY+pp_offset){
+ _dec->pipe.fragy0[pli]-sdelay,_dec->pipe.fragy_end[pli]-edelay);
+ if(_dec->pipe.pp_level>=OC_PP_LEVEL_DERINGY+pp_offset){
/*Perform de-ringing in one plane.*/
sdelay+=notstart;
edelay+=notdone;
oc_dec_dering_frag_rows(_dec,_dec->pp_frame_buf,pli,
- pipe.fragy0[pli]-sdelay,pipe.fragy_end[pli]-edelay);
+ _dec->pipe.fragy0[pli]-sdelay,_dec->pipe.fragy_end[pli]-edelay);
}
}
/*If no post-processing is done, we still need to delay a row for the
loop filter, thanks to the strange filtering order VP3 chose.*/
- else if(pipe.loop_filter){
+ else if(_dec->pipe.loop_filter){
sdelay+=notstart;
edelay+=notdone;
}
@@ -2226,11 +2918,16 @@ int th_decode_packetin(th_dec_ctx *_dec,const ogg_packet *_op,
doubled, but luma might have more post-processing filters enabled
than chroma, so we don't know up front which one is the limiting
factor.*/
- avail_fragy0=OC_MINI(avail_fragy0,pipe.fragy0[pli]-sdelay<<frag_shift);
+ avail_fragy0=OC_MINI(avail_fragy0,
+ _dec->pipe.fragy0[pli]-sdelay<<frag_shift);
avail_fragy_end=OC_MINI(avail_fragy_end,
- pipe.fragy_end[pli]-edelay<<frag_shift);
+ _dec->pipe.fragy_end[pli]-edelay<<frag_shift);
}
+#ifdef HAVE_CAIRO
+ if(_dec->stripe_cb.stripe_decoded!=NULL&&!telemetry){
+#else
if(_dec->stripe_cb.stripe_decoded!=NULL){
+#endif
/*The callback might want to use the FPU, so let's make sure they can.
We violate all kinds of ABI restrictions by not doing this until
now, but none of them actually matter since we don't use floating
@@ -2252,692 +2949,44 @@ int th_decode_packetin(th_dec_ctx *_dec,const ogg_packet *_op,
_dec->state.ref_frame_idx[OC_FRAME_GOLD]=
_dec->state.ref_frame_idx[OC_FRAME_PREV]=
_dec->state.ref_frame_idx[OC_FRAME_SELF];
+ _dec->state.ref_frame_data[OC_FRAME_GOLD]=
+ _dec->state.ref_frame_data[OC_FRAME_PREV]=
+ _dec->state.ref_frame_data[OC_FRAME_SELF];
}
else{
/*Otherwise, just replace the previous reference frame.*/
_dec->state.ref_frame_idx[OC_FRAME_PREV]=
_dec->state.ref_frame_idx[OC_FRAME_SELF];
+ _dec->state.ref_frame_data[OC_FRAME_PREV]=
+ _dec->state.ref_frame_data[OC_FRAME_SELF];
}
/*Restore the FPU before dump_frame, since that _does_ use the FPU (for PNG
gamma values, if nothing else).*/
oc_restore_fpu(&_dec->state);
+#ifdef HAVE_CAIRO
+ /*If telemetry ioctls are active, we need to draw to the output buffer.*/
+ if(telemetry){
+ oc_render_telemetry(_dec,stripe_buf,telemetry);
+ oc_ycbcr_buffer_flip(_dec->pp_frame_buf,stripe_buf);
+ /*If we had a striped decoding callback, we skipped calling it above
+ (because the telemetry wasn't rendered yet).
+ Call it now with the whole frame.*/
+ if(_dec->stripe_cb.stripe_decoded!=NULL){
+ (*_dec->stripe_cb.stripe_decoded)(_dec->stripe_cb.ctx,
+ stripe_buf,0,_dec->state.fplanes[0].nvfrags);
+ }
+ }
+#endif
#if defined(OC_DUMP_IMAGES)
- /*Don't dump images for dropped frames.*/
+ /*We only dump images if there were some coded blocks.*/
oc_state_dump_frame(&_dec->state,OC_FRAME_SELF,"dec");
#endif
return 0;
}
- else{
- if(_dec->state.ref_frame_idx[OC_FRAME_GOLD]<0||
- _dec->state.ref_frame_idx[OC_FRAME_PREV]<0){
- int refi;
- /*No reference frames yet!*/
- oc_dec_init_dummy_frame(_dec);
- refi=_dec->state.ref_frame_idx[OC_FRAME_PREV];
- _dec->state.ref_frame_idx[OC_FRAME_SELF]=refi;
- memcpy(_dec->pp_frame_buf,_dec->state.ref_frame_bufs[refi],
- sizeof(_dec->pp_frame_buf[0])*3);
- }
- /*Just update the granule position and return.*/
- _dec->state.granpos=(_dec->state.keyframe_num+_dec->state.granpos_bias<<
- _dec->state.info.keyframe_granule_shift)
- +(_dec->state.curframe_num-_dec->state.keyframe_num);
- _dec->state.curframe_num++;
- if(_granpos!=NULL)*_granpos=_dec->state.granpos;
- return TH_DUPFRAME;
- }
}
int th_decode_ycbcr_out(th_dec_ctx *_dec,th_ycbcr_buffer _ycbcr){
if(_dec==NULL||_ycbcr==NULL)return TH_EFAULT;
oc_ycbcr_buffer_flip(_ycbcr,_dec->pp_frame_buf);
-#if defined(HAVE_CAIRO)
- /*If telemetry ioctls are active, we need to draw to the output buffer.
- Stuff the plane into cairo.*/
- if(_dec->telemetry){
- cairo_surface_t *cs;
- unsigned char *data;
- unsigned char *y_row;
- unsigned char *u_row;
- unsigned char *v_row;
- unsigned char *rgb_row;
- int cstride;
- int w;
- int h;
- int x;
- int y;
- int hdec;
- int vdec;
- w=_ycbcr[0].width;
- h=_ycbcr[0].height;
- hdec=!(_dec->state.info.pixel_fmt&1);
- vdec=!(_dec->state.info.pixel_fmt&2);
- /*Lazy data buffer init.
- We could try to re-use the post-processing buffer, which would save
- memory, but complicate the allocation logic there.
- I don't think anyone cares about memory usage when using telemetry; it is
- not meant for embedded devices.*/
- if(_dec->telemetry_frame_data==NULL){
- _dec->telemetry_frame_data=_ogg_malloc(
- (w*h+2*(w>>hdec)*(h>>vdec))*sizeof(*_dec->telemetry_frame_data));
- if(_dec->telemetry_frame_data==NULL)return 0;
- }
- cs=cairo_image_surface_create(CAIRO_FORMAT_RGB24,w,h);
- /*Sadly, no YUV support in Cairo (yet); convert into the RGB buffer.*/
- data=cairo_image_surface_get_data(cs);
- if(data==NULL){
- cairo_surface_destroy(cs);
- return 0;
- }
- cstride=cairo_image_surface_get_stride(cs);
- y_row=_ycbcr[0].data;
- u_row=_ycbcr[1].data;
- v_row=_ycbcr[2].data;
- rgb_row=data;
- for(y=0;y<h;y++){
- for(x=0;x<w;x++){
- int r;
- int g;
- int b;
- r=(1904000*y_row[x]+2609823*v_row[x>>hdec]-363703744)/1635200;
- g=(3827562*y_row[x]-1287801*u_row[x>>hdec]
- -2672387*v_row[x>>hdec]+447306710)/3287200;
- b=(952000*y_row[x]+1649289*u_row[x>>hdec]-225932192)/817600;
- rgb_row[4*x+0]=OC_CLAMP255(b);
- rgb_row[4*x+1]=OC_CLAMP255(g);
- rgb_row[4*x+2]=OC_CLAMP255(r);
- }
- y_row+=_ycbcr[0].stride;
- u_row+=_ycbcr[1].stride&-((y&1)|!vdec);
- v_row+=_ycbcr[2].stride&-((y&1)|!vdec);
- rgb_row+=cstride;
- }
- /*Draw coded identifier for each macroblock (stored in Hilbert order).*/
- {
- cairo_t *c;
- const oc_fragment *frags;
- oc_mv *frag_mvs;
- const signed char *mb_modes;
- oc_mb_map *mb_maps;
- size_t nmbs;
- size_t mbi;
- int row2;
- int col2;
- int qim[3]={0,0,0};
- if(_dec->state.nqis==2){
- int bqi;
- bqi=_dec->state.qis[0];
- if(_dec->state.qis[1]>bqi)qim[1]=1;
- if(_dec->state.qis[1]<bqi)qim[1]=-1;
- }
- if(_dec->state.nqis==3){
- int bqi;
- int cqi;
- int dqi;
- bqi=_dec->state.qis[0];
- cqi=_dec->state.qis[1];
- dqi=_dec->state.qis[2];
- if(cqi>bqi&&dqi>bqi){
- if(dqi>cqi){
- qim[1]=1;
- qim[2]=2;
- }
- else{
- qim[1]=2;
- qim[2]=1;
- }
- }
- else if(cqi<bqi&&dqi<bqi){
- if(dqi<cqi){
- qim[1]=-1;
- qim[2]=-2;
- }
- else{
- qim[1]=-2;
- qim[2]=-1;
- }
- }
- else{
- if(cqi<bqi)qim[1]=-1;
- else qim[1]=1;
- if(dqi<bqi)qim[2]=-1;
- else qim[2]=1;
- }
- }
- c=cairo_create(cs);
- frags=_dec->state.frags;
- frag_mvs=_dec->state.frag_mvs;
- mb_modes=_dec->state.mb_modes;
- mb_maps=_dec->state.mb_maps;
- nmbs=_dec->state.nmbs;
- row2=0;
- col2=0;
- for(mbi=0;mbi<nmbs;mbi++){
- float x;
- float y;
- int bi;
- y=h-(row2+((col2+1>>1)&1))*16-16;
- x=(col2>>1)*16;
- cairo_set_line_width(c,1.);
- /*Keyframe (all intra) red box.*/
- if(_dec->state.frame_type==OC_INTRA_FRAME){
- if(_dec->telemetry_mbmode&0x02){
- cairo_set_source_rgba(c,1.,0,0,.5);
- cairo_rectangle(c,x+2.5,y+2.5,11,11);
- cairo_stroke_preserve(c);
- cairo_set_source_rgba(c,1.,0,0,.25);
- cairo_fill(c);
- }
- }
- else{
- const signed char *frag_mv;
- ptrdiff_t fragi;
- for(bi=0;bi<4;bi++){
- fragi=mb_maps[mbi][0][bi];
- if(fragi>=0&&frags[fragi].coded){
- frag_mv=frag_mvs[fragi];
- break;
- }
- }
- if(bi<4){
- switch(mb_modes[mbi]){
- case OC_MODE_INTRA:{
- if(_dec->telemetry_mbmode&0x02){
- cairo_set_source_rgba(c,1.,0,0,.5);
- cairo_rectangle(c,x+2.5,y+2.5,11,11);
- cairo_stroke_preserve(c);
- cairo_set_source_rgba(c,1.,0,0,.25);
- cairo_fill(c);
- }
- }break;
- case OC_MODE_INTER_NOMV:{
- if(_dec->telemetry_mbmode&0x01){
- cairo_set_source_rgba(c,0,0,1.,.5);
- cairo_rectangle(c,x+2.5,y+2.5,11,11);
- cairo_stroke_preserve(c);
- cairo_set_source_rgba(c,0,0,1.,.25);
- cairo_fill(c);
- }
- }break;
- case OC_MODE_INTER_MV:{
- if(_dec->telemetry_mbmode&0x04){
- cairo_rectangle(c,x+2.5,y+2.5,11,11);
- cairo_set_source_rgba(c,0,1.,0,.5);
- cairo_stroke(c);
- }
- if(_dec->telemetry_mv&0x04){
- cairo_move_to(c,x+8+frag_mv[0],y+8-frag_mv[1]);
- cairo_set_source_rgba(c,1.,1.,1.,.9);
- cairo_set_line_width(c,3.);
- cairo_line_to(c,x+8+frag_mv[0]*.66,y+8-frag_mv[1]*.66);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,2.);
- cairo_line_to(c,x+8+frag_mv[0]*.33,y+8-frag_mv[1]*.33);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,1.);
- cairo_line_to(c,x+8,y+8);
- cairo_stroke(c);
- }
- }break;
- case OC_MODE_INTER_MV_LAST:{
- if(_dec->telemetry_mbmode&0x08){
- cairo_rectangle(c,x+2.5,y+2.5,11,11);
- cairo_set_source_rgba(c,0,1.,0,.5);
- cairo_move_to(c,x+13.5,y+2.5);
- cairo_line_to(c,x+2.5,y+8);
- cairo_line_to(c,x+13.5,y+13.5);
- cairo_stroke(c);
- }
- if(_dec->telemetry_mv&0x08){
- cairo_move_to(c,x+8+frag_mv[0],y+8-frag_mv[1]);
- cairo_set_source_rgba(c,1.,1.,1.,.9);
- cairo_set_line_width(c,3.);
- cairo_line_to(c,x+8+frag_mv[0]*.66,y+8-frag_mv[1]*.66);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,2.);
- cairo_line_to(c,x+8+frag_mv[0]*.33,y+8-frag_mv[1]*.33);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,1.);
- cairo_line_to(c,x+8,y+8);
- cairo_stroke(c);
- }
- }break;
- case OC_MODE_INTER_MV_LAST2:{
- if(_dec->telemetry_mbmode&0x10){
- cairo_rectangle(c,x+2.5,y+2.5,11,11);
- cairo_set_source_rgba(c,0,1.,0,.5);
- cairo_move_to(c,x+8,y+2.5);
- cairo_line_to(c,x+2.5,y+8);
- cairo_line_to(c,x+8,y+13.5);
- cairo_move_to(c,x+13.5,y+2.5);
- cairo_line_to(c,x+8,y+8);
- cairo_line_to(c,x+13.5,y+13.5);
- cairo_stroke(c);
- }
- if(_dec->telemetry_mv&0x10){
- cairo_move_to(c,x+8+frag_mv[0],y+8-frag_mv[1]);
- cairo_set_source_rgba(c,1.,1.,1.,.9);
- cairo_set_line_width(c,3.);
- cairo_line_to(c,x+8+frag_mv[0]*.66,y+8-frag_mv[1]*.66);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,2.);
- cairo_line_to(c,x+8+frag_mv[0]*.33,y+8-frag_mv[1]*.33);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,1.);
- cairo_line_to(c,x+8,y+8);
- cairo_stroke(c);
- }
- }break;
- case OC_MODE_GOLDEN_NOMV:{
- if(_dec->telemetry_mbmode&0x20){
- cairo_set_source_rgba(c,1.,1.,0,.5);
- cairo_rectangle(c,x+2.5,y+2.5,11,11);
- cairo_stroke_preserve(c);
- cairo_set_source_rgba(c,1.,1.,0,.25);
- cairo_fill(c);
- }
- }break;
- case OC_MODE_GOLDEN_MV:{
- if(_dec->telemetry_mbmode&0x40){
- cairo_rectangle(c,x+2.5,y+2.5,11,11);
- cairo_set_source_rgba(c,1.,1.,0,.5);
- cairo_stroke(c);
- }
- if(_dec->telemetry_mv&0x40){
- cairo_move_to(c,x+8+frag_mv[0],y+8-frag_mv[1]);
- cairo_set_source_rgba(c,1.,1.,1.,.9);
- cairo_set_line_width(c,3.);
- cairo_line_to(c,x+8+frag_mv[0]*.66,y+8-frag_mv[1]*.66);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,2.);
- cairo_line_to(c,x+8+frag_mv[0]*.33,y+8-frag_mv[1]*.33);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,1.);
- cairo_line_to(c,x+8,y+8);
- cairo_stroke(c);
- }
- }break;
- case OC_MODE_INTER_MV_FOUR:{
- if(_dec->telemetry_mbmode&0x80){
- cairo_rectangle(c,x+2.5,y+2.5,4,4);
- cairo_rectangle(c,x+9.5,y+2.5,4,4);
- cairo_rectangle(c,x+2.5,y+9.5,4,4);
- cairo_rectangle(c,x+9.5,y+9.5,4,4);
- cairo_set_source_rgba(c,0,1.,0,.5);
- cairo_stroke(c);
- }
- /*4mv is odd, coded in raster order.*/
- fragi=mb_maps[mbi][0][0];
- if(frags[fragi].coded&&_dec->telemetry_mv&0x80){
- frag_mv=frag_mvs[fragi];
- cairo_move_to(c,x+4+frag_mv[0],y+12-frag_mv[1]);
- cairo_set_source_rgba(c,1.,1.,1.,.9);
- cairo_set_line_width(c,3.);
- cairo_line_to(c,x+4+frag_mv[0]*.66,y+12-frag_mv[1]*.66);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,2.);
- cairo_line_to(c,x+4+frag_mv[0]*.33,y+12-frag_mv[1]*.33);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,1.);
- cairo_line_to(c,x+4,y+12);
- cairo_stroke(c);
- }
- fragi=mb_maps[mbi][0][1];
- if(frags[fragi].coded&&_dec->telemetry_mv&0x80){
- frag_mv=frag_mvs[fragi];
- cairo_move_to(c,x+12+frag_mv[0],y+12-frag_mv[1]);
- cairo_set_source_rgba(c,1.,1.,1.,.9);
- cairo_set_line_width(c,3.);
- cairo_line_to(c,x+12+frag_mv[0]*.66,y+12-frag_mv[1]*.66);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,2.);
- cairo_line_to(c,x+12+frag_mv[0]*.33,y+12-frag_mv[1]*.33);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,1.);
- cairo_line_to(c,x+12,y+12);
- cairo_stroke(c);
- }
- fragi=mb_maps[mbi][0][2];
- if(frags[fragi].coded&&_dec->telemetry_mv&0x80){
- frag_mv=frag_mvs[fragi];
- cairo_move_to(c,x+4+frag_mv[0],y+4-frag_mv[1]);
- cairo_set_source_rgba(c,1.,1.,1.,.9);
- cairo_set_line_width(c,3.);
- cairo_line_to(c,x+4+frag_mv[0]*.66,y+4-frag_mv[1]*.66);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,2.);
- cairo_line_to(c,x+4+frag_mv[0]*.33,y+4-frag_mv[1]*.33);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,1.);
- cairo_line_to(c,x+4,y+4);
- cairo_stroke(c);
- }
- fragi=mb_maps[mbi][0][3];
- if(frags[fragi].coded&&_dec->telemetry_mv&0x80){
- frag_mv=frag_mvs[fragi];
- cairo_move_to(c,x+12+frag_mv[0],y+4-frag_mv[1]);
- cairo_set_source_rgba(c,1.,1.,1.,.9);
- cairo_set_line_width(c,3.);
- cairo_line_to(c,x+12+frag_mv[0]*.66,y+4-frag_mv[1]*.66);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,2.);
- cairo_line_to(c,x+12+frag_mv[0]*.33,y+4-frag_mv[1]*.33);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,1.);
- cairo_line_to(c,x+12,y+4);
- cairo_stroke(c);
- }
- }break;
- }
- }
- }
- /*qii illustration.*/
- if(_dec->telemetry_qi&0x2){
- cairo_set_line_cap(c,CAIRO_LINE_CAP_SQUARE);
- for(bi=0;bi<4;bi++){
- ptrdiff_t fragi;
- int qiv;
- int xp;
- int yp;
- xp=x+(bi&1)*8;
- yp=y+8-(bi&2)*4;
- fragi=mb_maps[mbi][0][bi];
- if(fragi>=0&&frags[fragi].coded){
- qiv=qim[frags[fragi].qii];
- cairo_set_line_width(c,3.);
- cairo_set_source_rgba(c,0.,0.,0.,.5);
- switch(qiv){
- /*Double plus:*/
- case 2:{
- if((bi&1)^((bi&2)>>1)){
- cairo_move_to(c,xp+2.5,yp+1.5);
- cairo_line_to(c,xp+2.5,yp+3.5);
- cairo_move_to(c,xp+1.5,yp+2.5);
- cairo_line_to(c,xp+3.5,yp+2.5);
- cairo_move_to(c,xp+5.5,yp+4.5);
- cairo_line_to(c,xp+5.5,yp+6.5);
- cairo_move_to(c,xp+4.5,yp+5.5);
- cairo_line_to(c,xp+6.5,yp+5.5);
- cairo_stroke_preserve(c);
- cairo_set_source_rgba(c,0.,1.,1.,1.);
- }
- else{
- cairo_move_to(c,xp+5.5,yp+1.5);
- cairo_line_to(c,xp+5.5,yp+3.5);
- cairo_move_to(c,xp+4.5,yp+2.5);
- cairo_line_to(c,xp+6.5,yp+2.5);
- cairo_move_to(c,xp+2.5,yp+4.5);
- cairo_line_to(c,xp+2.5,yp+6.5);
- cairo_move_to(c,xp+1.5,yp+5.5);
- cairo_line_to(c,xp+3.5,yp+5.5);
- cairo_stroke_preserve(c);
- cairo_set_source_rgba(c,0.,1.,1.,1.);
- }
- }break;
- /*Double minus:*/
- case -2:{
- cairo_move_to(c,xp+2.5,yp+2.5);
- cairo_line_to(c,xp+5.5,yp+2.5);
- cairo_move_to(c,xp+2.5,yp+5.5);
- cairo_line_to(c,xp+5.5,yp+5.5);
- cairo_stroke_preserve(c);
- cairo_set_source_rgba(c,1.,1.,1.,1.);
- }break;
- /*Plus:*/
- case 1:{
- if(bi&2==0)yp-=2;
- if(bi&1==0)xp-=2;
- cairo_move_to(c,xp+4.5,yp+2.5);
- cairo_line_to(c,xp+4.5,yp+6.5);
- cairo_move_to(c,xp+2.5,yp+4.5);
- cairo_line_to(c,xp+6.5,yp+4.5);
- cairo_stroke_preserve(c);
- cairo_set_source_rgba(c,.1,1.,.3,1.);
- break;
- }
- /*Fall through.*/
- /*Minus:*/
- case -1:{
- cairo_move_to(c,xp+2.5,yp+4.5);
- cairo_line_to(c,xp+6.5,yp+4.5);
- cairo_stroke_preserve(c);
- cairo_set_source_rgba(c,1.,.3,.1,1.);
- }break;
- default:continue;
- }
- cairo_set_line_width(c,1.);
- cairo_stroke(c);
- }
- }
- }
- col2++;
- if((col2>>1)>=_dec->state.nhmbs){
- col2=0;
- row2+=2;
- }
- }
- /*Bit usage indicator[s]:*/
- if(_dec->telemetry_bits){
- int widths[6];
- int fpsn;
- int fpsd;
- int mult;
- int fullw;
- int padw;
- int i;
- fpsn=_dec->state.info.fps_numerator;
- fpsd=_dec->state.info.fps_denominator;
- mult=(_dec->telemetry_bits>=0xFF?1:_dec->telemetry_bits);
- fullw=250.f*h*fpsd*mult/fpsn;
- padw=w-24;
- /*Header and coded block bits.*/
- if(_dec->telemetry_frame_bytes<0||
- _dec->telemetry_frame_bytes==OC_LOTS_OF_BITS){
- _dec->telemetry_frame_bytes=0;
- }
- if(_dec->telemetry_coding_bytes<0||
- _dec->telemetry_coding_bytes>_dec->telemetry_frame_bytes){
- _dec->telemetry_coding_bytes=0;
- }
- if(_dec->telemetry_mode_bytes<0||
- _dec->telemetry_mode_bytes>_dec->telemetry_frame_bytes){
- _dec->telemetry_mode_bytes=0;
- }
- if(_dec->telemetry_mv_bytes<0||
- _dec->telemetry_mv_bytes>_dec->telemetry_frame_bytes){
- _dec->telemetry_mv_bytes=0;
- }
- if(_dec->telemetry_qi_bytes<0||
- _dec->telemetry_qi_bytes>_dec->telemetry_frame_bytes){
- _dec->telemetry_qi_bytes=0;
- }
- if(_dec->telemetry_dc_bytes<0||
- _dec->telemetry_dc_bytes>_dec->telemetry_frame_bytes){
- _dec->telemetry_dc_bytes=0;
- }
- widths[0]=padw*(_dec->telemetry_frame_bytes-_dec->telemetry_coding_bytes)/fullw;
- widths[1]=padw*(_dec->telemetry_coding_bytes-_dec->telemetry_mode_bytes)/fullw;
- widths[2]=padw*(_dec->telemetry_mode_bytes-_dec->telemetry_mv_bytes)/fullw;
- widths[3]=padw*(_dec->telemetry_mv_bytes-_dec->telemetry_qi_bytes)/fullw;
- widths[4]=padw*(_dec->telemetry_qi_bytes-_dec->telemetry_dc_bytes)/fullw;
- widths[5]=padw*(_dec->telemetry_dc_bytes)/fullw;
- for(i=0;i<6;i++)if(widths[i]>w)widths[i]=w;
- cairo_set_source_rgba(c,.0,.0,.0,.6);
- cairo_rectangle(c,10,h-33,widths[0]+1,5);
- cairo_rectangle(c,10,h-29,widths[1]+1,5);
- cairo_rectangle(c,10,h-25,widths[2]+1,5);
- cairo_rectangle(c,10,h-21,widths[3]+1,5);
- cairo_rectangle(c,10,h-17,widths[4]+1,5);
- cairo_rectangle(c,10,h-13,widths[5]+1,5);
- cairo_fill(c);
- cairo_set_source_rgb(c,1,0,0);
- cairo_rectangle(c,10.5,h-32.5,widths[0],4);
- cairo_fill(c);
- cairo_set_source_rgb(c,0,1,0);
- cairo_rectangle(c,10.5,h-28.5,widths[1],4);
- cairo_fill(c);
- cairo_set_source_rgb(c,0,0,1);
- cairo_rectangle(c,10.5,h-24.5,widths[2],4);
- cairo_fill(c);
- cairo_set_source_rgb(c,.6,.4,.0);
- cairo_rectangle(c,10.5,h-20.5,widths[3],4);
- cairo_fill(c);
- cairo_set_source_rgb(c,.3,.3,.3);
- cairo_rectangle(c,10.5,h-16.5,widths[4],4);
- cairo_fill(c);
- cairo_set_source_rgb(c,.5,.5,.8);
- cairo_rectangle(c,10.5,h-12.5,widths[5],4);
- cairo_fill(c);
- }
- /*Master qi indicator[s]:*/
- if(_dec->telemetry_qi&0x1){
- cairo_text_extents_t extents;
- char buffer[10];
- int p;
- int y;
- p=0;
- y=h-7.5;
- if(_dec->state.qis[0]>=10)buffer[p++]=48+_dec->state.qis[0]/10;
- buffer[p++]=48+_dec->state.qis[0]%10;
- if(_dec->state.nqis>=2){
- buffer[p++]=' ';
- if(_dec->state.qis[1]>=10)buffer[p++]=48+_dec->state.qis[1]/10;
- buffer[p++]=48+_dec->state.qis[1]%10;
- }
- if(_dec->state.nqis==3){
- buffer[p++]=' ';
- if(_dec->state.qis[2]>=10)buffer[p++]=48+_dec->state.qis[2]/10;
- buffer[p++]=48+_dec->state.qis[2]%10;
- }
- buffer[p++]='\0';
- cairo_select_font_face(c,"sans",
- CAIRO_FONT_SLANT_NORMAL,CAIRO_FONT_WEIGHT_BOLD);
- cairo_set_font_size(c,18);
- cairo_text_extents(c,buffer,&extents);
- cairo_set_source_rgb(c,1,1,1);
- cairo_move_to(c,w-extents.x_advance-10,y);
- cairo_show_text(c,buffer);
- cairo_set_source_rgb(c,0,0,0);
- cairo_move_to(c,w-extents.x_advance-10,y);
- cairo_text_path(c,buffer);
- cairo_set_line_width(c,.8);
- cairo_set_line_join(c,CAIRO_LINE_JOIN_ROUND);
- cairo_stroke(c);
- }
- cairo_destroy(c);
- }
- /*Out of the Cairo plane into the telemetry YUV buffer.*/
- _ycbcr[0].data=_dec->telemetry_frame_data;
- _ycbcr[0].stride=_ycbcr[0].width;
- _ycbcr[1].data=_ycbcr[0].data+h*_ycbcr[0].stride;
- _ycbcr[1].stride=_ycbcr[1].width;
- _ycbcr[2].data=_ycbcr[1].data+(h>>vdec)*_ycbcr[1].stride;
- _ycbcr[2].stride=_ycbcr[2].width;
- y_row=_ycbcr[0].data;
- u_row=_ycbcr[1].data;
- v_row=_ycbcr[2].data;
- rgb_row=data;
- /*This is one of the few places it's worth handling chroma on a
- case-by-case basis.*/
- switch(_dec->state.info.pixel_fmt){
- case TH_PF_420:{
- for(y=0;y<h;y+=2){
- unsigned char *y_row2;
- unsigned char *rgb_row2;
- y_row2=y_row+_ycbcr[0].stride;
- rgb_row2=rgb_row+cstride;
- for(x=0;x<w;x+=2){
- int y;
- int u;
- int v;
- y=(65481*rgb_row[4*x+2]+128553*rgb_row[4*x+1]
- +24966*rgb_row[4*x+0]+4207500)/255000;
- y_row[x]=OC_CLAMP255(y);
- y=(65481*rgb_row[4*x+6]+128553*rgb_row[4*x+5]
- +24966*rgb_row[4*x+4]+4207500)/255000;
- y_row[x+1]=OC_CLAMP255(y);
- y=(65481*rgb_row2[4*x+2]+128553*rgb_row2[4*x+1]
- +24966*rgb_row2[4*x+0]+4207500)/255000;
- y_row2[x]=OC_CLAMP255(y);
- y=(65481*rgb_row2[4*x+6]+128553*rgb_row2[4*x+5]
- +24966*rgb_row2[4*x+4]+4207500)/255000;
- y_row2[x+1]=OC_CLAMP255(y);
- u=(-8372*(rgb_row[4*x+2]+rgb_row[4*x+6]
- +rgb_row2[4*x+2]+rgb_row2[4*x+6])
- -16436*(rgb_row[4*x+1]+rgb_row[4*x+5]
- +rgb_row2[4*x+1]+rgb_row2[4*x+5])
- +24808*(rgb_row[4*x+0]+rgb_row[4*x+4]
- +rgb_row2[4*x+0]+rgb_row2[4*x+4])+29032005)/225930;
- v=(39256*(rgb_row[4*x+2]+rgb_row[4*x+6]
- +rgb_row2[4*x+2]+rgb_row2[4*x+6])
- -32872*(rgb_row[4*x+1]+rgb_row[4*x+5]
- +rgb_row2[4*x+1]+rgb_row2[4*x+5])
- -6384*(rgb_row[4*x+0]+rgb_row[4*x+4]
- +rgb_row2[4*x+0]+rgb_row2[4*x+4])+45940035)/357510;
- u_row[x>>1]=OC_CLAMP255(u);
- v_row[x>>1]=OC_CLAMP255(v);
- }
- y_row+=_ycbcr[0].stride<<1;
- u_row+=_ycbcr[1].stride;
- v_row+=_ycbcr[2].stride;
- rgb_row+=cstride<<1;
- }
- }break;
- case TH_PF_422:{
- for(y=0;y<h;y++){
- for(x=0;x<w;x+=2){
- int y;
- int u;
- int v;
- y=(65481*rgb_row[4*x+2]+128553*rgb_row[4*x+1]
- +24966*rgb_row[4*x+0]+4207500)/255000;
- y_row[x]=OC_CLAMP255(y);
- y=(65481*rgb_row[4*x+6]+128553*rgb_row[4*x+5]
- +24966*rgb_row[4*x+4]+4207500)/255000;
- y_row[x+1]=OC_CLAMP255(y);
- u=(-16744*(rgb_row[4*x+2]+rgb_row[4*x+6])
- -32872*(rgb_row[4*x+1]+rgb_row[4*x+5])
- +49616*(rgb_row[4*x+0]+rgb_row[4*x+4])+29032005)/225930;
- v=(78512*(rgb_row[4*x+2]+rgb_row[4*x+6])
- -65744*(rgb_row[4*x+1]+rgb_row[4*x+5])
- -12768*(rgb_row[4*x+0]+rgb_row[4*x+4])+45940035)/357510;
- u_row[x>>1]=OC_CLAMP255(u);
- v_row[x>>1]=OC_CLAMP255(v);
- }
- y_row+=_ycbcr[0].stride;
- u_row+=_ycbcr[1].stride;
- v_row+=_ycbcr[2].stride;
- rgb_row+=cstride;
- }
- }break;
- /*case TH_PF_444:*/
- default:{
- for(y=0;y<h;y++){
- for(x=0;x<w;x++){
- int y;
- int u;
- int v;
- y=(65481*rgb_row[4*x+2]+128553*rgb_row[4*x+1]
- +24966*rgb_row[4*x+0]+4207500)/255000;
- u=(-33488*rgb_row[4*x+2]-65744*rgb_row[4*x+1]
- +99232*rgb_row[4*x+0]+29032005)/225930;
- v=(157024*rgb_row[4*x+2]-131488*rgb_row[4*x+1]
- -25536*rgb_row[4*x+0]+45940035)/357510;
- y_row[x]=OC_CLAMP255(y);
- u_row[x]=OC_CLAMP255(u);
- v_row[x]=OC_CLAMP255(v);
- }
- y_row+=_ycbcr[0].stride;
- u_row+=_ycbcr[1].stride;
- v_row+=_ycbcr[2].stride;
- rgb_row+=cstride;
- }
- }break;
- }
- /*Finished.
- Destroy the surface.*/
- cairo_surface_destroy(cs);
- }
-#endif
return 0;
}
diff --git a/thirdparty/libtheora/dequant.c b/thirdparty/libtheora/dequant.c
index e554872d4e..860536f72d 100644
--- a/thirdparty/libtheora/dequant.c
+++ b/thirdparty/libtheora/dequant.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: dequant.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
diff --git a/thirdparty/libtheora/dequant.h b/thirdparty/libtheora/dequant.h
index ef25838e35..9d6cd6be56 100644
--- a/thirdparty/libtheora/dequant.h
+++ b/thirdparty/libtheora/dequant.h
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: dequant.h 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
diff --git a/thirdparty/libtheora/encfrag.c b/thirdparty/libtheora/encfrag.c
index bb814c8e4a..0e18111ac7 100644
--- a/thirdparty/libtheora/encfrag.c
+++ b/thirdparty/libtheora/encfrag.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: encfrag.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
#include <stdlib.h>
@@ -19,11 +19,6 @@
#include "encint.h"
-void oc_enc_frag_sub(const oc_enc_ctx *_enc,ogg_int16_t _diff[64],
- const unsigned char *_src,const unsigned char *_ref,int _ystride){
- (*_enc->opt_vtable.frag_sub)(_diff,_src,_ref,_ystride);
-}
-
void oc_enc_frag_sub_c(ogg_int16_t _diff[64],const unsigned char *_src,
const unsigned char *_ref,int _ystride){
int i;
@@ -35,11 +30,6 @@ void oc_enc_frag_sub_c(ogg_int16_t _diff[64],const unsigned char *_src,
}
}
-void oc_enc_frag_sub_128(const oc_enc_ctx *_enc,ogg_int16_t _diff[64],
- const unsigned char *_src,int _ystride){
- (*_enc->opt_vtable.frag_sub_128)(_diff,_src,_ystride);
-}
-
void oc_enc_frag_sub_128_c(ogg_int16_t *_diff,
const unsigned char *_src,int _ystride){
int i;
@@ -50,11 +40,6 @@ void oc_enc_frag_sub_128_c(ogg_int16_t *_diff,
}
}
-unsigned oc_enc_frag_sad(const oc_enc_ctx *_enc,const unsigned char *_x,
- const unsigned char *_y,int _ystride){
- return (*_enc->opt_vtable.frag_sad)(_x,_y,_ystride);
-}
-
unsigned oc_enc_frag_sad_c(const unsigned char *_src,
const unsigned char *_ref,int _ystride){
unsigned sad;
@@ -69,12 +54,6 @@ unsigned oc_enc_frag_sad_c(const unsigned char *_src,
return sad;
}
-unsigned oc_enc_frag_sad_thresh(const oc_enc_ctx *_enc,
- const unsigned char *_src,const unsigned char *_ref,int _ystride,
- unsigned _thresh){
- return (*_enc->opt_vtable.frag_sad_thresh)(_src,_ref,_ystride,_thresh);
-}
-
unsigned oc_enc_frag_sad_thresh_c(const unsigned char *_src,
const unsigned char *_ref,int _ystride,unsigned _thresh){
unsigned sad;
@@ -90,13 +69,6 @@ unsigned oc_enc_frag_sad_thresh_c(const unsigned char *_src,
return sad;
}
-unsigned oc_enc_frag_sad2_thresh(const oc_enc_ctx *_enc,
- const unsigned char *_src,const unsigned char *_ref1,
- const unsigned char *_ref2,int _ystride,unsigned _thresh){
- return (*_enc->opt_vtable.frag_sad2_thresh)(_src,_ref1,_ref2,_ystride,
- _thresh);
-}
-
unsigned oc_enc_frag_sad2_thresh_c(const unsigned char *_src,
const unsigned char *_ref1,const unsigned char *_ref2,int _ystride,
unsigned _thresh){
@@ -114,6 +86,27 @@ unsigned oc_enc_frag_sad2_thresh_c(const unsigned char *_src,
return sad;
}
+unsigned oc_enc_frag_intra_sad_c(const unsigned char *_src, int _ystride){
+ const unsigned char *src = _src;
+ unsigned dc;
+ unsigned sad;
+ int i;
+ dc=0;
+ for(i=8;i-->0;){
+ int j;
+ for(j=0;j<8;j++)dc+=src[j];
+ src+=_ystride;
+ }
+ dc=dc+32>>6;
+ sad=0;
+ for(i=8;i-->0;){
+ int j;
+ for(j=0;j<8;j++)sad+=abs(_src[j]-dc);
+ _src+=_ystride;
+ }
+ return sad;
+}
+
static void oc_diff_hadamard(ogg_int16_t _buf[64],const unsigned char *_src,
const unsigned char *_ref,int _ystride){
int i;
@@ -269,19 +262,20 @@ static void oc_intra_hadamard(ogg_int16_t _buf[64],const unsigned char *_src,
}
}
-unsigned oc_hadamard_sad_thresh(const ogg_int16_t _buf[64],unsigned _thresh){
- unsigned sad;
- int t0;
- int t1;
- int t2;
- int t3;
- int t4;
- int t5;
- int t6;
- int t7;
- int r;
- int i;
- sad=0;
+unsigned oc_hadamard_sad(int *_dc,const ogg_int16_t _buf[64]){
+ unsigned sad;
+ int dc;
+ int t0;
+ int t1;
+ int t2;
+ int t3;
+ int t4;
+ int t5;
+ int t6;
+ int t7;
+ int r;
+ int i;
+ sad=dc=0;
for(i=0;i<8;i++){
/*Hadamard stage 1:*/
t0=_buf[i*8+0]+_buf[i*8+4];
@@ -306,7 +300,7 @@ unsigned oc_hadamard_sad_thresh(const ogg_int16_t _buf[64],unsigned _thresh){
t5+=t7;
t7=r-t7;
/*Hadamard stage 3:*/
- r=abs(t0+t1);
+ r=abs(t0+t1)&-(i>0);
r+=abs(t0-t1);
r+=abs(t2+t3);
r+=abs(t2-t3);
@@ -315,54 +309,61 @@ unsigned oc_hadamard_sad_thresh(const ogg_int16_t _buf[64],unsigned _thresh){
r+=abs(t6+t7);
r+=abs(t6-t7);
sad+=r;
- if(sad>_thresh)break;
}
+ dc=_buf[0]+_buf[1]+_buf[2]+_buf[3]+_buf[4]+_buf[5]+_buf[6]+_buf[7];
+ *_dc=dc;
return sad;
}
-unsigned oc_enc_frag_satd_thresh(const oc_enc_ctx *_enc,
- const unsigned char *_src,const unsigned char *_ref,int _ystride,
- unsigned _thresh){
- return (*_enc->opt_vtable.frag_satd_thresh)(_src,_ref,_ystride,_thresh);
-}
-
-unsigned oc_enc_frag_satd_thresh_c(const unsigned char *_src,
- const unsigned char *_ref,int _ystride,unsigned _thresh){
+unsigned oc_enc_frag_satd_c(int *_dc,const unsigned char *_src,
+ const unsigned char *_ref,int _ystride){
ogg_int16_t buf[64];
oc_diff_hadamard(buf,_src,_ref,_ystride);
- return oc_hadamard_sad_thresh(buf,_thresh);
-}
-
-unsigned oc_enc_frag_satd2_thresh(const oc_enc_ctx *_enc,
- const unsigned char *_src,const unsigned char *_ref1,
- const unsigned char *_ref2,int _ystride,unsigned _thresh){
- return (*_enc->opt_vtable.frag_satd2_thresh)(_src,_ref1,_ref2,_ystride,
- _thresh);
+ return oc_hadamard_sad(_dc,buf);
}
-unsigned oc_enc_frag_satd2_thresh_c(const unsigned char *_src,
- const unsigned char *_ref1,const unsigned char *_ref2,int _ystride,
- unsigned _thresh){
+unsigned oc_enc_frag_satd2_c(int *_dc,const unsigned char *_src,
+ const unsigned char *_ref1,const unsigned char *_ref2,int _ystride){
ogg_int16_t buf[64];
oc_diff_hadamard2(buf,_src,_ref1,_ref2,_ystride);
- return oc_hadamard_sad_thresh(buf,_thresh);
+ return oc_hadamard_sad(_dc,buf);
}
-unsigned oc_enc_frag_intra_satd(const oc_enc_ctx *_enc,
+unsigned oc_enc_frag_intra_satd_c(int *_dc,
const unsigned char *_src,int _ystride){
- return (*_enc->opt_vtable.frag_intra_satd)(_src,_ystride);
-}
-
-unsigned oc_enc_frag_intra_satd_c(const unsigned char *_src,int _ystride){
ogg_int16_t buf[64];
oc_intra_hadamard(buf,_src,_ystride);
- return oc_hadamard_sad_thresh(buf,UINT_MAX)
- -abs(buf[0]+buf[1]+buf[2]+buf[3]+buf[4]+buf[5]+buf[6]+buf[7]);
+ return oc_hadamard_sad(_dc,buf);
}
-void oc_enc_frag_copy2(const oc_enc_ctx *_enc,unsigned char *_dst,
- const unsigned char *_src1,const unsigned char *_src2,int _ystride){
- (*_enc->opt_vtable.frag_copy2)(_dst,_src1,_src2,_ystride);
+unsigned oc_enc_frag_ssd_c(const unsigned char *_src,
+ const unsigned char *_ref,int _ystride){
+ unsigned ret;
+ int y;
+ int x;
+ ret=0;
+ for(y=0;y<8;y++){
+ for(x=0;x<8;x++)ret+=(_src[x]-_ref[x])*(_src[x]-_ref[x]);
+ _src+=_ystride;
+ _ref+=_ystride;
+ }
+ return ret;
+}
+
+unsigned oc_enc_frag_border_ssd_c(const unsigned char *_src,
+ const unsigned char *_ref,int _ystride,ogg_int64_t _mask){
+ unsigned ret;
+ int y;
+ int x;
+ ret=0;
+ for(y=0;y<8;y++){
+ for(x=0;x<8;x++,_mask>>=1){
+ if(_mask&1)ret+=(_src[x]-_ref[x])*(_src[x]-_ref[x]);
+ }
+ _src+=_ystride;
+ _ref+=_ystride;
+ }
+ return ret;
}
void oc_enc_frag_copy2_c(unsigned char *_dst,
@@ -376,13 +377,3 @@ void oc_enc_frag_copy2_c(unsigned char *_dst,
_src2+=_ystride;
}
}
-
-void oc_enc_frag_recon_intra(const oc_enc_ctx *_enc,
- unsigned char *_dst,int _ystride,const ogg_int16_t _residue[64]){
- (*_enc->opt_vtable.frag_recon_intra)(_dst,_ystride,_residue);
-}
-
-void oc_enc_frag_recon_inter(const oc_enc_ctx *_enc,unsigned char *_dst,
- const unsigned char *_src,int _ystride,const ogg_int16_t _residue[64]){
- (*_enc->opt_vtable.frag_recon_inter)(_dst,_src,_ystride,_residue);
-}
diff --git a/thirdparty/libtheora/encinfo.c b/thirdparty/libtheora/encinfo.c
index 83be1dae72..41db6bad45 100644
--- a/thirdparty/libtheora/encinfo.c
+++ b/thirdparty/libtheora/encinfo.c
@@ -1,6 +1,6 @@
#include <stdlib.h>
#include <string.h>
-#include "internal.h"
+#include "state.h"
#include "enquant.h"
#include "huffenc.h"
diff --git a/thirdparty/libtheora/encint.h b/thirdparty/libtheora/encint.h
index 97897d5a04..d25de4b8f6 100644
--- a/thirdparty/libtheora/encint.h
+++ b/thirdparty/libtheora/encint.h
@@ -11,17 +11,13 @@
********************************************************************
function:
- last mod: $Id: encint.h 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
#if !defined(_encint_H)
# define _encint_H (1)
-# if defined(HAVE_CONFIG_H)
-# include "config.h"
-# endif
# include "theora/theoraenc.h"
-# include "internal.h"
-# include "ocintrin.h"
+# include "state.h"
# include "mathops.h"
# include "enquant.h"
# include "huffenc.h"
@@ -32,8 +28,13 @@
typedef oc_mv oc_mv2[2];
typedef struct oc_enc_opt_vtable oc_enc_opt_vtable;
+typedef struct oc_enc_opt_data oc_enc_opt_data;
typedef struct oc_mb_enc_info oc_mb_enc_info;
typedef struct oc_mode_scheme_chooser oc_mode_scheme_chooser;
+typedef struct oc_fr_state oc_fr_state;
+typedef struct oc_qii_state oc_qii_state;
+typedef struct oc_enc_pipeline_state oc_enc_pipeline_state;
+typedef struct oc_mode_rd oc_mode_rd;
typedef struct oc_iir_filter oc_iir_filter;
typedef struct oc_frame_metrics oc_frame_metrics;
typedef struct oc_rc_state oc_rc_state;
@@ -42,6 +43,170 @@ typedef struct oc_token_checkpoint oc_token_checkpoint;
+/*Encoder-specific accelerated functions.*/
+# if defined(OC_X86_ASM)
+# if defined(_MSC_VER)
+# include "x86_vc/x86enc.h"
+# else
+# include "x86/x86enc.h"
+# endif
+# endif
+# if defined(OC_ARM_ASM)
+# include "arm/armenc.h"
+# endif
+
+# if !defined(oc_enc_accel_init)
+# define oc_enc_accel_init oc_enc_accel_init_c
+# endif
+# if defined(OC_ENC_USE_VTABLE)
+# if !defined(oc_enc_frag_sub)
+# define oc_enc_frag_sub(_enc,_diff,_src,_ref,_ystride) \
+ ((*(_enc)->opt_vtable.frag_sub)(_diff,_src,_ref,_ystride))
+# endif
+# if !defined(oc_enc_frag_sub_128)
+# define oc_enc_frag_sub_128(_enc,_diff,_src,_ystride) \
+ ((*(_enc)->opt_vtable.frag_sub_128)(_diff,_src,_ystride))
+# endif
+# if !defined(oc_enc_frag_sad)
+# define oc_enc_frag_sad(_enc,_src,_ref,_ystride) \
+ ((*(_enc)->opt_vtable.frag_sad)(_src,_ref,_ystride))
+# endif
+# if !defined(oc_enc_frag_sad_thresh)
+# define oc_enc_frag_sad_thresh(_enc,_src,_ref,_ystride,_thresh) \
+ ((*(_enc)->opt_vtable.frag_sad_thresh)(_src,_ref,_ystride,_thresh))
+# endif
+# if !defined(oc_enc_frag_sad2_thresh)
+# define oc_enc_frag_sad2_thresh(_enc,_src,_ref1,_ref2,_ystride,_thresh) \
+ ((*(_enc)->opt_vtable.frag_sad2_thresh)(_src,_ref1,_ref2,_ystride,_thresh))
+# endif
+# if !defined(oc_enc_frag_intra_sad)
+# define oc_enc_frag_intra_sad(_enc,_src,_ystride) \
+ ((*(_enc)->opt_vtable.frag_intra_sad)(_src,_ystride))
+# endif
+# if !defined(oc_enc_frag_satd)
+# define oc_enc_frag_satd(_enc,_dc,_src,_ref,_ystride) \
+ ((*(_enc)->opt_vtable.frag_satd)(_dc,_src,_ref,_ystride))
+# endif
+# if !defined(oc_enc_frag_satd2)
+# define oc_enc_frag_satd2(_enc,_dc,_src,_ref1,_ref2,_ystride) \
+ ((*(_enc)->opt_vtable.frag_satd2)(_dc,_src,_ref1,_ref2,_ystride))
+# endif
+# if !defined(oc_enc_frag_intra_satd)
+# define oc_enc_frag_intra_satd(_enc,_dc,_src,_ystride) \
+ ((*(_enc)->opt_vtable.frag_intra_satd)(_dc,_src,_ystride))
+# endif
+# if !defined(oc_enc_frag_ssd)
+# define oc_enc_frag_ssd(_enc,_src,_ref,_ystride) \
+ ((*(_enc)->opt_vtable.frag_ssd)(_src,_ref,_ystride))
+# endif
+# if !defined(oc_enc_frag_border_ssd)
+# define oc_enc_frag_border_ssd(_enc,_src,_ref,_ystride,_mask) \
+ ((*(_enc)->opt_vtable.frag_border_ssd)(_src,_ref,_ystride,_mask))
+# endif
+# if !defined(oc_enc_frag_copy2)
+# define oc_enc_frag_copy2(_enc,_dst,_src1,_src2,_ystride) \
+ ((*(_enc)->opt_vtable.frag_copy2)(_dst,_src1,_src2,_ystride))
+# endif
+# if !defined(oc_enc_enquant_table_init)
+# define oc_enc_enquant_table_init(_enc,_enquant,_dequant) \
+ ((*(_enc)->opt_vtable.enquant_table_init)(_enquant,_dequant))
+# endif
+# if !defined(oc_enc_enquant_table_fixup)
+# define oc_enc_enquant_table_fixup(_enc,_enquant,_nqis) \
+ ((*(_enc)->opt_vtable.enquant_table_fixup)(_enquant,_nqis))
+# endif
+# if !defined(oc_enc_quantize)
+# define oc_enc_quantize(_enc,_qdct,_dct,_dequant,_enquant) \
+ ((*(_enc)->opt_vtable.quantize)(_qdct,_dct,_dequant,_enquant))
+# endif
+# if !defined(oc_enc_frag_recon_intra)
+# define oc_enc_frag_recon_intra(_enc,_dst,_ystride,_residue) \
+ ((*(_enc)->opt_vtable.frag_recon_intra)(_dst,_ystride,_residue))
+# endif
+# if !defined(oc_enc_frag_recon_inter)
+# define oc_enc_frag_recon_inter(_enc,_dst,_src,_ystride,_residue) \
+ ((*(_enc)->opt_vtable.frag_recon_inter)(_dst,_src,_ystride,_residue))
+# endif
+# if !defined(oc_enc_fdct8x8)
+# define oc_enc_fdct8x8(_enc,_y,_x) \
+ ((*(_enc)->opt_vtable.fdct8x8)(_y,_x))
+# endif
+# else
+# if !defined(oc_enc_frag_sub)
+# define oc_enc_frag_sub(_enc,_diff,_src,_ref,_ystride) \
+ oc_enc_frag_sub_c(_diff,_src,_ref,_ystride)
+# endif
+# if !defined(oc_enc_frag_sub_128)
+# define oc_enc_frag_sub_128(_enc,_diff,_src,_ystride) \
+ oc_enc_frag_sub_128_c(_diff,_src,_ystride)
+# endif
+# if !defined(oc_enc_frag_sad)
+# define oc_enc_frag_sad(_enc,_src,_ref,_ystride) \
+ oc_enc_frag_sad_c(_src,_ref,_ystride)
+# endif
+# if !defined(oc_enc_frag_sad_thresh)
+# define oc_enc_frag_sad_thresh(_enc,_src,_ref,_ystride,_thresh) \
+ oc_enc_frag_sad_thresh_c(_src,_ref,_ystride,_thresh)
+# endif
+# if !defined(oc_enc_frag_sad2_thresh)
+# define oc_enc_frag_sad2_thresh(_enc,_src,_ref1,_ref2,_ystride,_thresh) \
+ oc_enc_frag_sad2_thresh_c(_src,_ref1,_ref2,_ystride,_thresh)
+# endif
+# if !defined(oc_enc_frag_intra_sad)
+# define oc_enc_frag_intra_sad(_enc,_src,_ystride) \
+ oc_enc_frag_intra_sad_c(_src,_ystride)
+# endif
+# if !defined(oc_enc_frag_satd)
+# define oc_enc_frag_satd(_enc,_dc,_src,_ref,_ystride) \
+ oc_enc_frag_satd_c(_dc,_src,_ref,_ystride)
+# endif
+# if !defined(oc_enc_frag_satd2)
+# define oc_enc_frag_satd2(_enc,_dc,_src,_ref1,_ref2,_ystride) \
+ oc_enc_frag_satd2_c(_dc,_src,_ref1,_ref2,_ystride)
+# endif
+# if !defined(oc_enc_frag_intra_satd)
+# define oc_enc_frag_intra_satd(_enc,_dc,_src,_ystride) \
+ oc_enc_frag_intra_satd_c(_dc,_src,_ystride)
+# endif
+# if !defined(oc_enc_frag_ssd)
+# define oc_enc_frag_ssd(_enc,_src,_ref,_ystride) \
+ oc_enc_frag_ssd_c(_src,_ref,_ystride)
+# endif
+# if !defined(oc_enc_frag_border_ssd)
+# define oc_enc_frag_border_ssd(_enc,_src,_ref,_ystride,_mask) \
+ oc_enc_frag_border_ssd_c(_src,_ref,_ystride,_mask)
+# endif
+# if !defined(oc_enc_frag_copy2)
+# define oc_enc_frag_copy2(_enc,_dst,_src1,_src2,_ystride) \
+ oc_enc_frag_copy2_c(_dst,_src1,_src2,_ystride)
+# endif
+# if !defined(oc_enc_enquant_table_init)
+# define oc_enc_enquant_table_init(_enc,_enquant,_dequant) \
+ oc_enc_enquant_table_init_c(_enquant,_dequant)
+# endif
+# if !defined(oc_enc_enquant_table_fixup)
+# define oc_enc_enquant_table_fixup(_enc,_enquant,_nqis) \
+ oc_enc_enquant_table_fixup_c(_enquant,_nqis)
+# endif
+# if !defined(oc_enc_quantize)
+# define oc_enc_quantize(_enc,_qdct,_dct,_dequant,_enquant) \
+ oc_enc_quantize_c(_qdct,_dct,_dequant,_enquant)
+# endif
+# if !defined(oc_enc_frag_recon_intra)
+# define oc_enc_frag_recon_intra(_enc,_dst,_ystride,_residue) \
+ oc_frag_recon_intra_c(_dst,_ystride,_residue)
+# endif
+# if !defined(oc_enc_frag_recon_inter)
+# define oc_enc_frag_recon_inter(_enc,_dst,_src,_ystride,_residue) \
+ oc_frag_recon_inter_c(_dst,_src,_ystride,_residue)
+# endif
+# if !defined(oc_enc_fdct8x8)
+# define oc_enc_fdct8x8(_enc,_y,_x) oc_enc_fdct8x8_c(_y,_x)
+# endif
+# endif
+
+
+
/*Constants for the packet-out state machine specific to the encoder.*/
/*Next packet to emit: Data packet, but none are ready yet.*/
@@ -50,13 +215,61 @@ typedef struct oc_token_checkpoint oc_token_checkpoint;
#define OC_PACKET_READY (1)
/*All features enabled.*/
-#define OC_SP_LEVEL_SLOW (0)
+#define OC_SP_LEVEL_SLOW (0)
/*Enable early skip.*/
-#define OC_SP_LEVEL_EARLY_SKIP (1)
+#define OC_SP_LEVEL_EARLY_SKIP (1)
+/*Use analysis shortcuts, single quantizer, and faster tokenization.*/
+#define OC_SP_LEVEL_FAST_ANALYSIS (2)
+/*Use SAD instead of SATD*/
+#define OC_SP_LEVEL_NOSATD (3)
/*Disable motion compensation.*/
-#define OC_SP_LEVEL_NOMC (2)
+#define OC_SP_LEVEL_NOMC (4)
/*Maximum valid speed level.*/
-#define OC_SP_LEVEL_MAX (2)
+#define OC_SP_LEVEL_MAX (4)
+
+
+/*The number of extra bits of precision at which to store rate metrics.*/
+# define OC_BIT_SCALE (6)
+/*The number of extra bits of precision at which to store RMSE metrics.
+ This must be at least half OC_BIT_SCALE (rounded up).*/
+# define OC_RMSE_SCALE (5)
+/*The number of quantizer bins to partition statistics into.*/
+# define OC_LOGQ_BINS (8)
+/*The number of SAD/SATD bins to partition statistics into.*/
+# define OC_COMP_BINS (24)
+/*The number of bits of precision to drop from SAD and SATD scores
+ to assign them to a bin.*/
+# define OC_SAD_SHIFT (6)
+# define OC_SATD_SHIFT (9)
+
+/*Masking is applied by scaling the D used in R-D optimization (via rd_scale)
+ or the lambda parameter (via rd_iscale).
+ These are only equivalent within a single block; when more than one block is
+ being considered, the former is the interpretation used.*/
+
+/*This must be at least 4 for OC_RD_SKIP_SCALE() to work below.*/
+# define OC_RD_SCALE_BITS (12-OC_BIT_SCALE)
+# define OC_RD_ISCALE_BITS (11)
+
+/*This macro is applied to _ssd values with just 4 bits of headroom
+ ((15-OC_RMSE_SCALE)*2+OC_BIT_SCALE+2); since we want to allow rd_scales as
+ large as 16, and need additional fractional bits, our only recourse that
+ doesn't lose precision on blocks with very small SSDs is to use a wider
+ multiply.*/
+# if LONG_MAX>2147483647
+# define OC_RD_SCALE(_ssd,_rd_scale) \
+ ((unsigned)((unsigned long)(_ssd)*(_rd_scale) \
+ +((1<<OC_RD_SCALE_BITS)>>1)>>OC_RD_SCALE_BITS))
+# else
+# define OC_RD_SCALE(_ssd,_rd_scale) \
+ (((_ssd)>>OC_RD_SCALE_BITS)*(_rd_scale) \
+ +(((_ssd)&(1<<OC_RD_SCALE_BITS)-1)*(_rd_scale) \
+ +((1<<OC_RD_SCALE_BITS)>>1)>>OC_RD_SCALE_BITS))
+# endif
+# define OC_RD_SKIP_SCALE(_ssd,_rd_scale) \
+ ((_ssd)*(_rd_scale)+((1<<OC_RD_SCALE_BITS-4)>>1)>>OC_RD_SCALE_BITS-4)
+# define OC_RD_ISCALE(_lambda,_rd_iscale) \
+ ((_lambda)*(_rd_iscale)+((1<<OC_RD_ISCALE_BITS)>>1)>>OC_RD_ISCALE_BITS)
/*The bits used for each of the MB mode codebooks.*/
@@ -78,6 +291,10 @@ extern const unsigned char OC_BLOCK_RUN_CODE_NBITS[30];
/*Encoder specific functions with accelerated variants.*/
struct oc_enc_opt_vtable{
+ void (*frag_sub)(ogg_int16_t _diff[64],const unsigned char *_src,
+ const unsigned char *_ref,int _ystride);
+ void (*frag_sub_128)(ogg_int16_t _diff[64],
+ const unsigned char *_src,int _ystride);
unsigned (*frag_sad)(const unsigned char *_src,
const unsigned char *_ref,int _ystride);
unsigned (*frag_sad_thresh)(const unsigned char *_src,
@@ -85,18 +302,23 @@ struct oc_enc_opt_vtable{
unsigned (*frag_sad2_thresh)(const unsigned char *_src,
const unsigned char *_ref1,const unsigned char *_ref2,int _ystride,
unsigned _thresh);
- unsigned (*frag_satd_thresh)(const unsigned char *_src,
- const unsigned char *_ref,int _ystride,unsigned _thresh);
- unsigned (*frag_satd2_thresh)(const unsigned char *_src,
- const unsigned char *_ref1,const unsigned char *_ref2,int _ystride,
- unsigned _thresh);
- unsigned (*frag_intra_satd)(const unsigned char *_src,int _ystride);
- void (*frag_sub)(ogg_int16_t _diff[64],const unsigned char *_src,
+ unsigned (*frag_intra_sad)(const unsigned char *_src,int _ystride);
+ unsigned (*frag_satd)(int *_dc,const unsigned char *_src,
const unsigned char *_ref,int _ystride);
- void (*frag_sub_128)(ogg_int16_t _diff[64],
- const unsigned char *_src,int _ystride);
+ unsigned (*frag_satd2)(int *_dc,const unsigned char *_src,
+ const unsigned char *_ref1,const unsigned char *_ref2,int _ystride);
+ unsigned (*frag_intra_satd)(int *_dc,const unsigned char *_src,int _ystride);
+ unsigned (*frag_ssd)(const unsigned char *_src,
+ const unsigned char *_ref,int _ystride);
+ unsigned (*frag_border_ssd)(const unsigned char *_src,
+ const unsigned char *_ref,int _ystride,ogg_int64_t _mask);
void (*frag_copy2)(unsigned char *_dst,
const unsigned char *_src1,const unsigned char *_src2,int _ystride);
+ void (*enquant_table_init)(void *_enquant,
+ const ogg_uint16_t _dequant[64]);
+ void (*enquant_table_fixup)(void *_enquant[3][3][2],int _nqis);
+ int (*quantize)(ogg_int16_t _qdct[64],const ogg_int16_t _dct[64],
+ const ogg_uint16_t _dequant[64],const void *_enquant);
void (*frag_recon_intra)(unsigned char *_dst,int _ystride,
const ogg_int16_t _residue[64]);
void (*frag_recon_inter)(unsigned char *_dst,
@@ -105,7 +327,19 @@ struct oc_enc_opt_vtable{
};
-void oc_enc_vtable_init(oc_enc_ctx *_enc);
+/*Encoder specific data that varies according to which variants of the above
+ functions are used.*/
+struct oc_enc_opt_data{
+ /*The size of a single quantizer table.
+ This must be a multiple of enquant_table_alignment.*/
+ size_t enquant_table_size;
+ /*The alignment required for the quantizer tables.
+ This must be a positive power of two.*/
+ int enquant_table_alignment;
+};
+
+
+void oc_enc_accel_init(oc_enc_ctx *_enc);
@@ -158,7 +392,7 @@ struct oc_mode_scheme_chooser{
corresponds to the ranks above.*/
unsigned char scheme0_list[OC_NMODES];
/*The number of times each mode has been chosen so far.*/
- int mode_counts[OC_NMODES];
+ unsigned mode_counts[OC_NMODES];
/*The list of mode coding schemes, sorted in ascending order of bit cost.*/
unsigned char scheme_list[8];
/*The number of bits used by each mode coding scheme.*/
@@ -170,6 +404,106 @@ void oc_mode_scheme_chooser_init(oc_mode_scheme_chooser *_chooser);
+/*State to track coded block flags and their bit cost.
+ We use opportunity cost to measure the bits required to code or skip the next
+ block, using the cheaper of the cost to code it fully or partially, so long
+ as both are possible.*/
+struct oc_fr_state{
+ /*The number of bits required for the coded block flags so far this frame.*/
+ ptrdiff_t bits;
+ /*The length of the current run for the partial super block flag, not
+ including the current super block.*/
+ unsigned sb_partial_count:16;
+ /*The length of the current run for the full super block flag, not
+ including the current super block.*/
+ unsigned sb_full_count:16;
+ /*The length of the coded block flag run when the current super block
+ started.*/
+ unsigned b_coded_count_prev:6;
+ /*The coded block flag when the current super block started.*/
+ signed int b_coded_prev:2;
+ /*The length of the current coded block flag run.*/
+ unsigned b_coded_count:6;
+ /*The current coded block flag.*/
+ signed int b_coded:2;
+ /*The number of blocks processed in the current super block.*/
+ unsigned b_count:5;
+ /*Whether or not it is cheaper to code the current super block partially,
+ even if it could still be coded fully.*/
+ unsigned sb_prefer_partial:1;
+ /*Whether the last super block was coded partially.*/
+ signed int sb_partial:2;
+ /*The number of bits required for the flags for the current super block.*/
+ unsigned sb_bits:6;
+ /*Whether the last non-partial super block was coded fully.*/
+ signed int sb_full:2;
+};
+
+
+
+struct oc_qii_state{
+ ptrdiff_t bits;
+ unsigned qi01_count:14;
+ signed int qi01:2;
+ unsigned qi12_count:14;
+ signed int qi12:2;
+};
+
+
+
+/*Temporary encoder state for the analysis pipeline.*/
+struct oc_enc_pipeline_state{
+ /*DCT coefficient storage.
+ This is kept off the stack because a) gcc can't align things on the stack
+ reliably on ARM, and b) it avoids (unintentional) data hazards between
+ ARM and NEON code.*/
+ OC_ALIGN16(ogg_int16_t dct_data[64*3]);
+ OC_ALIGN16(signed char bounding_values[256]);
+ oc_fr_state fr[3];
+ oc_qii_state qs[3];
+ /*Skip SSD storage for the current MCU in each plane.*/
+ unsigned *skip_ssd[3];
+ /*Coded/uncoded fragment lists for each plane for the current MCU.*/
+ ptrdiff_t *coded_fragis[3];
+ ptrdiff_t *uncoded_fragis[3];
+ ptrdiff_t ncoded_fragis[3];
+ ptrdiff_t nuncoded_fragis[3];
+ /*The starting fragment for the current MCU in each plane.*/
+ ptrdiff_t froffset[3];
+ /*The starting row for the current MCU in each plane.*/
+ int fragy0[3];
+ /*The ending row for the current MCU in each plane.*/
+ int fragy_end[3];
+ /*The starting superblock for the current MCU in each plane.*/
+ unsigned sbi0[3];
+ /*The ending superblock for the current MCU in each plane.*/
+ unsigned sbi_end[3];
+ /*The number of tokens for zzi=1 for each color plane.*/
+ int ndct_tokens1[3];
+ /*The outstanding eob_run count for zzi=1 for each color plane.*/
+ int eob_run1[3];
+ /*Whether or not the loop filter is enabled.*/
+ int loop_filter;
+};
+
+
+
+/*Statistics used to estimate R-D cost of a block in a given coding mode.
+ See modedec.h for more details.*/
+struct oc_mode_rd{
+ /*The expected bits used by the DCT tokens, shifted by OC_BIT_SCALE.*/
+ ogg_int16_t rate;
+ /*The expected square root of the sum of squared errors, shifted by
+ OC_RMSE_SCALE.*/
+ ogg_int16_t rmse;
+};
+
+# if defined(OC_COLLECT_METRICS)
+# include "collect.h"
+# endif
+
+
+
/*A 2nd order low-pass Bessel follower.
We use this for rate control because it has fast reaction time, but is
critically damped.*/
@@ -190,6 +524,8 @@ struct oc_frame_metrics{
unsigned dup_count:31;
/*The frame type from pass 1.*/
unsigned frame_type:1;
+ /*The frame activity average from pass 1.*/
+ unsigned activity_avg;
};
@@ -335,10 +671,15 @@ struct th_enc_ctx{
size_t mv_bits[2];
/*The mode scheme chooser for estimating mode coding costs.*/
oc_mode_scheme_chooser chooser;
+ /*Temporary encoder state for the analysis pipeline.*/
+ oc_enc_pipeline_state pipe;
/*The number of vertical super blocks in an MCU.*/
int mcu_nvsbs;
/*The SSD error for skipping each fragment in the current MCU.*/
unsigned *mcu_skip_ssd;
+ /*The masking scale factors for chroma blocks in the current MCU.*/
+ ogg_uint16_t *mcu_rd_scale;
+ ogg_uint16_t *mcu_rd_iscale;
/*The DCT token lists for each coefficient and each plane.*/
unsigned char **dct_tokens[3];
/*The extra bits associated with each DCT token.*/
@@ -350,8 +691,10 @@ struct th_enc_ctx{
/*The offset of the first DCT token for each coefficient for each plane.*/
unsigned char dct_token_offs[3][64];
/*The last DC coefficient for each plane and reference frame.*/
- int dc_pred_last[3][3];
+ int dc_pred_last[3][4];
#if defined(OC_COLLECT_METRICS)
+ /*Fragment SAD statistics for MB mode estimation metrics.*/
+ unsigned *frag_sad;
/*Fragment SATD statistics for MB mode estimation metrics.*/
unsigned *frag_satd;
/*Fragment SSD statistics for MB mode estimation metrics.*/
@@ -359,32 +702,56 @@ struct th_enc_ctx{
#endif
/*The R-D optimization parameter.*/
int lambda;
+ /*The average block "activity" of the previous frame.*/
+ unsigned activity_avg;
+ /*The average MB luma of the previous frame.*/
+ unsigned luma_avg;
/*The huffman tables in use.*/
th_huff_code huff_codes[TH_NHUFFMAN_TABLES][TH_NDCT_TOKENS];
/*The quantization parameters in use.*/
th_quant_info qinfo;
- oc_iquant *enquant_tables[64][3][2];
- oc_iquant_table enquant_table_data[64][3][2];
- /*An "average" quantizer for each quantizer type (INTRA or INTER) and qi
- value.
- This is used to paramterize the rate control decisions.
+ /*The original DC coefficients saved off from the dequatization tables.*/
+ ogg_uint16_t dequant_dc[64][3][2];
+ /*Condensed dequantization tables.*/
+ const ogg_uint16_t *dequant[3][3][2];
+ /*Condensed quantization tables.*/
+ void *enquant[3][3][2];
+ /*The full set of quantization tables.*/
+ void *enquant_tables[64][3][2];
+ /*Storage for the quantization tables.*/
+ unsigned char *enquant_table_data;
+ /*An "average" quantizer for each frame type (INTRA or INTER) and qi value.
+ This is used to parameterize the rate control decisions.
They are kept in the log domain to simplify later processing.
- Keep in mind these are DCT domain quantizers, and so are scaled by an
- additional factor of 4 from the pixel domain.*/
+ These are DCT domain quantizers, and so are scaled by an additional factor
+ of 4 from the pixel domain.*/
ogg_int64_t log_qavg[2][64];
+ /*The "average" quantizer futher partitioned by color plane.
+ This is used to parameterize mode decision.
+ These are DCT domain quantizers, and so are scaled by an additional factor
+ of 4 from the pixel domain.*/
+ ogg_int16_t log_plq[64][3][2];
+ /*The R-D scale factors to apply to chroma blocks for a given frame type
+ (INTRA or INTER) and qi value.
+ The first is the "D" modifier (rd_scale), while the second is the "lambda"
+ modifier (rd_iscale).*/
+ ogg_uint16_t chroma_rd_scale[2][64][2];
+ /*The interpolated mode decision R-D lookup tables for the current
+ quantizers, color plane, and quantization type.*/
+ oc_mode_rd mode_rd[3][3][2][OC_COMP_BINS];
/*The buffer state used to drive rate control.*/
oc_rc_state rc;
+# if defined(OC_ENC_USE_VTABLE)
/*Table for encoder acceleration functions.*/
oc_enc_opt_vtable opt_vtable;
+# endif
+ /*Table for encoder data used by accelerated functions.*/
+ oc_enc_opt_data opt_data;
};
void oc_enc_analyze_intra(oc_enc_ctx *_enc,int _recode);
int oc_enc_analyze_inter(oc_enc_ctx *_enc,int _allow_keyframe,int _recode);
-#if defined(OC_COLLECT_METRICS)
-void oc_enc_mode_metrics_collect(oc_enc_ctx *_enc);
-void oc_enc_mode_metrics_dump(oc_enc_ctx *_enc);
-#endif
@@ -415,8 +782,13 @@ struct oc_token_checkpoint{
void oc_enc_tokenize_start(oc_enc_ctx *_enc);
int oc_enc_tokenize_ac(oc_enc_ctx *_enc,int _pli,ptrdiff_t _fragi,
- ogg_int16_t *_qdct,const ogg_uint16_t *_dequant,const ogg_int16_t *_dct,
- int _zzi,oc_token_checkpoint **_stack,int _acmin);
+ ogg_int16_t *_qdct_out,const ogg_int16_t *_qdct_in,
+ const ogg_uint16_t *_dequant,const ogg_int16_t *_dct,
+ int _zzi,oc_token_checkpoint **_stack,int _lambda,int _acmin);
+int oc_enc_tokenize_ac_fast(oc_enc_ctx *_enc,int _pli,ptrdiff_t _fragi,
+ ogg_int16_t *_qdct_out,const ogg_int16_t *_qdct_in,
+ const ogg_uint16_t *_dequant,const ogg_int16_t *_dct,
+ int _zzi,oc_token_checkpoint **_stack,int _lambda,int _acmin);
void oc_enc_tokenlog_rollback(oc_enc_ctx *_enc,
const oc_token_checkpoint *_stack,int _n);
void oc_enc_pred_dc_frag_rows(oc_enc_ctx *_enc,
@@ -436,45 +808,13 @@ int oc_state_flushheader(oc_theora_state *_state,int *_packet_state,
-/*Encoder-specific accelerated functions.*/
-void oc_enc_frag_sub(const oc_enc_ctx *_enc,ogg_int16_t _diff[64],
- const unsigned char *_src,const unsigned char *_ref,int _ystride);
-void oc_enc_frag_sub_128(const oc_enc_ctx *_enc,ogg_int16_t _diff[64],
- const unsigned char *_src,int _ystride);
-unsigned oc_enc_frag_sad(const oc_enc_ctx *_enc,const unsigned char *_src,
- const unsigned char *_ref,int _ystride);
-unsigned oc_enc_frag_sad_thresh(const oc_enc_ctx *_enc,
- const unsigned char *_src,const unsigned char *_ref,int _ystride,
- unsigned _thresh);
-unsigned oc_enc_frag_sad2_thresh(const oc_enc_ctx *_enc,
- const unsigned char *_src,const unsigned char *_ref1,
- const unsigned char *_ref2,int _ystride,unsigned _thresh);
-unsigned oc_enc_frag_satd_thresh(const oc_enc_ctx *_enc,
- const unsigned char *_src,const unsigned char *_ref,int _ystride,
- unsigned _thresh);
-unsigned oc_enc_frag_satd2_thresh(const oc_enc_ctx *_enc,
- const unsigned char *_src,const unsigned char *_ref1,
- const unsigned char *_ref2,int _ystride,unsigned _thresh);
-unsigned oc_enc_frag_intra_satd(const oc_enc_ctx *_enc,
- const unsigned char *_src,int _ystride);
-void oc_enc_frag_copy2(const oc_enc_ctx *_enc,unsigned char *_dst,
- const unsigned char *_src1,const unsigned char *_src2,int _ystride);
-void oc_enc_frag_recon_intra(const oc_enc_ctx *_enc,
- unsigned char *_dst,int _ystride,const ogg_int16_t _residue[64]);
-void oc_enc_frag_recon_inter(const oc_enc_ctx *_enc,unsigned char *_dst,
- const unsigned char *_src,int _ystride,const ogg_int16_t _residue[64]);
-void oc_enc_fdct8x8(const oc_enc_ctx *_enc,ogg_int16_t _y[64],
- const ogg_int16_t _x[64]);
-
-/*Default pure-C implementations.*/
-void oc_enc_vtable_init_c(oc_enc_ctx *_enc);
+/*Default pure-C implementations of encoder-specific accelerated functions.*/
+void oc_enc_accel_init_c(oc_enc_ctx *_enc);
void oc_enc_frag_sub_c(ogg_int16_t _diff[64],
const unsigned char *_src,const unsigned char *_ref,int _ystride);
void oc_enc_frag_sub_128_c(ogg_int16_t _diff[64],
const unsigned char *_src,int _ystride);
-void oc_enc_frag_copy2_c(unsigned char *_dst,
- const unsigned char *_src1,const unsigned char *_src2,int _ystride);
unsigned oc_enc_frag_sad_c(const unsigned char *_src,
const unsigned char *_ref,int _ystride);
unsigned oc_enc_frag_sad_thresh_c(const unsigned char *_src,
@@ -482,12 +822,24 @@ unsigned oc_enc_frag_sad_thresh_c(const unsigned char *_src,
unsigned oc_enc_frag_sad2_thresh_c(const unsigned char *_src,
const unsigned char *_ref1,const unsigned char *_ref2,int _ystride,
unsigned _thresh);
-unsigned oc_enc_frag_satd_thresh_c(const unsigned char *_src,
- const unsigned char *_ref,int _ystride,unsigned _thresh);
-unsigned oc_enc_frag_satd2_thresh_c(const unsigned char *_src,
- const unsigned char *_ref1,const unsigned char *_ref2,int _ystride,
- unsigned _thresh);
-unsigned oc_enc_frag_intra_satd_c(const unsigned char *_src,int _ystride);
+unsigned oc_enc_frag_intra_sad_c(const unsigned char *_src, int _ystride);
+unsigned oc_enc_frag_satd_c(int *_dc,const unsigned char *_src,
+ const unsigned char *_ref,int _ystride);
+unsigned oc_enc_frag_satd2_c(int *_dc,const unsigned char *_src,
+ const unsigned char *_ref1,const unsigned char *_ref2,int _ystride);
+unsigned oc_enc_frag_intra_satd_c(int *_dc,
+ const unsigned char *_src,int _ystride);
+unsigned oc_enc_frag_ssd_c(const unsigned char *_src,
+ const unsigned char *_ref,int _ystride);
+unsigned oc_enc_frag_border_ssd_c(const unsigned char *_src,
+ const unsigned char *_ref,int _ystride,ogg_int64_t _mask);
+void oc_enc_frag_copy2_c(unsigned char *_dst,
+ const unsigned char *_src1,const unsigned char *_src2,int _ystride);
+void oc_enc_enquant_table_init_c(void *_enquant,
+ const ogg_uint16_t _dequant[64]);
+void oc_enc_enquant_table_fixup_c(void *_enquant[3][3][2],int _nqis);
+int oc_enc_quantize_c(ogg_int16_t _qdct[64],const ogg_int16_t _dct[64],
+ const ogg_uint16_t _dequant[64],const void *_enquant);
void oc_enc_fdct8x8_c(ogg_int16_t _y[64],const ogg_int16_t _x[64]);
#endif
diff --git a/thirdparty/libtheora/encode.c b/thirdparty/libtheora/encode.c
index 0c5ea6a172..3309f97c03 100644
--- a/thirdparty/libtheora/encode.c
+++ b/thirdparty/libtheora/encode.c
@@ -11,15 +11,13 @@
********************************************************************
function:
- last mod: $Id: encode.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
#include <stdlib.h>
#include <string.h>
#include "encint.h"
-#if defined(OC_X86_ASM)
-# include "x86/x86enc.h"
-#endif
+#include "dequant.h"
@@ -288,12 +286,12 @@ const th_quant_info TH_DEF_QUANT_INFO={
28, 25, 24, 22, 20, 17, 14, 10
},
{
- 30,25,20,20,15,15,14,14,
- 13,13,12,12,11,11,10,10,
- 9, 9, 8, 8, 7, 7, 7, 7,
- 6, 6, 6, 6, 5, 5, 5, 5,
- 4, 4, 4, 4, 3, 3, 3, 3,
+ 15,12, 9, 8, 6, 6, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5,
+ 4, 4, 4, 4, 4, 4, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3,
2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0
},
@@ -623,11 +621,15 @@ static void oc_enc_mb_modes_pack(oc_enc_ctx *_enc){
}
}
-static void oc_enc_mv_pack(oc_enc_ctx *_enc,int _mv_scheme,int _dx,int _dy){
+static void oc_enc_mv_pack(oc_enc_ctx *_enc,int _mv_scheme,oc_mv _mv){
+ int dx;
+ int dy;
+ dx=OC_MV_X(_mv);
+ dy=OC_MV_Y(_mv);
oggpackB_write(&_enc->opb,
- OC_MV_CODES[_mv_scheme][_dx+31],OC_MV_BITS[_mv_scheme][_dx+31]);
+ OC_MV_CODES[_mv_scheme][dx+31],OC_MV_BITS[_mv_scheme][dx+31]);
oggpackB_write(&_enc->opb,
- OC_MV_CODES[_mv_scheme][_dy+31],OC_MV_BITS[_mv_scheme][_dy+31]);
+ OC_MV_CODES[_mv_scheme][dy+31],OC_MV_BITS[_mv_scheme][dy+31]);
}
static void oc_enc_mvs_pack(oc_enc_ctx *_enc){
@@ -650,7 +652,7 @@ static void oc_enc_mvs_pack(oc_enc_ctx *_enc){
mb_modes=_enc->state.mb_modes;
mb_maps=(const oc_mb_map *)_enc->state.mb_maps;
frags=_enc->state.frags;
- frag_mvs=(const oc_mv *)_enc->state.frag_mvs;
+ frag_mvs=_enc->state.frag_mvs;
for(mbii=0;mbii<ncoded_mbis;mbii++){
ptrdiff_t fragi;
unsigned mbi;
@@ -662,8 +664,7 @@ static void oc_enc_mvs_pack(oc_enc_ctx *_enc){
for(bi=0;;bi++){
fragi=mb_maps[mbi][0][bi];
if(frags[fragi].coded){
- oc_enc_mv_pack(_enc,mv_scheme,
- frag_mvs[fragi][0],frag_mvs[fragi][1]);
+ oc_enc_mv_pack(_enc,mv_scheme,frag_mvs[fragi]);
/*Only code a single MV for this macro block.*/
break;
}
@@ -673,8 +674,7 @@ static void oc_enc_mvs_pack(oc_enc_ctx *_enc){
for(bi=0;bi<4;bi++){
fragi=mb_maps[mbi][0][bi];
if(frags[fragi].coded){
- oc_enc_mv_pack(_enc,mv_scheme,
- frag_mvs[fragi][0],frag_mvs[fragi][1]);
+ oc_enc_mv_pack(_enc,mv_scheme,frag_mvs[fragi]);
/*Keep coding all the MVs for this macro block.*/
}
}
@@ -863,11 +863,55 @@ static void oc_enc_residual_tokens_pack(oc_enc_ctx *_enc){
}
}
+/*Packs an explicit drop frame, instead of using the more efficient 0-byte
+ packet.
+ This is only enabled in VP3-compatibility mode, even though it is not
+ strictly required for VP3 compatibility (VP3 could be encoded in AVI, which
+ also supports dropping frames by inserting 0 byte packets).
+ However, almost every _Theora_ player used to get this wrong (and many still
+ do), and it wasn't until we started shipping a post-VP3 encoder that
+ actually used non-VP3 features that this began to be discovered and fixed,
+ despite being in the standard since 2004.
+ The pack buffer must be reset before calling this function.*/
+static void oc_enc_drop_frame_pack(oc_enc_ctx *_enc){
+ unsigned nsbs;
+ /*Mark this as a data packet.*/
+ oggpackB_write(&_enc->opb,0,1);
+ /*Output the frame type (key frame or delta frame).*/
+ oggpackB_write(&_enc->opb,OC_INTER_FRAME,1);
+ /*Write out the current qi list.
+ We always use just 1 qi, to avoid wasting bits on the others.*/
+ oggpackB_write(&_enc->opb,_enc->state.qis[0],6);
+ oggpackB_write(&_enc->opb,0,1);
+ /*Coded block flags: everything is uncoded.*/
+ nsbs=_enc->state.nsbs;
+ /*No partially coded SBs.*/
+ oggpackB_write(&_enc->opb,0,1);
+ oc_sb_run_pack(&_enc->opb,nsbs,0,1);
+ /*No fully coded SBs.*/
+ oggpackB_write(&_enc->opb,0,1);
+ oc_sb_run_pack(&_enc->opb,nsbs,0,1);
+ /*MB modes: just need write which scheme to use.
+ Since we have no coded MBs, we can pick any of them except 0, which would
+ require writing out an additional mode list.*/
+ oggpackB_write(&_enc->opb,7,3);
+ /*MVs: just need write which scheme to use.
+ We can pick either one, since we have no MVs.*/
+ oggpackB_write(&_enc->opb,1,1);
+ /*Write the chosen DC token tables.*/
+ oggpackB_write(&_enc->opb,_enc->huff_idxs[OC_INTER_FRAME][0][0],4);
+ oggpackB_write(&_enc->opb,_enc->huff_idxs[OC_INTER_FRAME][0][1],4);
+ /*Write the chosen AC token tables.*/
+ oggpackB_write(&_enc->opb,_enc->huff_idxs[OC_INTER_FRAME][1][0],4);
+ oggpackB_write(&_enc->opb,_enc->huff_idxs[OC_INTER_FRAME][1][1],4);
+}
+
static void oc_enc_frame_pack(oc_enc_ctx *_enc){
+ /*musl libc malloc()/realloc() calls might use floating point, so make sure
+ we've cleared the MMX state for them.*/
+ oc_restore_fpu(&_enc->state);
oggpackB_reset(&_enc->opb);
- /*Only proceed if we have some coded blocks.
- If there are no coded blocks, we can drop this frame simply by emitting a
- 0 byte packet.*/
+ /*Only proceed if we have some coded blocks.*/
if(_enc->state.ntotal_coded_fragis>0){
oc_enc_frame_header_pack(_enc);
if(_enc->state.frame_type==OC_INTER_FRAME){
@@ -880,6 +924,10 @@ static void oc_enc_frame_pack(oc_enc_ctx *_enc){
oc_enc_tokenize_finish(_enc);
oc_enc_residual_tokens_pack(_enc);
}
+ /*If there are no coded blocks, we can drop this frame simply by emitting a
+ 0 byte packet.
+ We emit an inter frame with no coded blocks in VP3-compatibility mode.*/
+ else if(_enc->vp3_compatible)oc_enc_drop_frame_pack(_enc);
/*Success: Mark the packet as ready to be flushed.*/
_enc->packet_state=OC_PACKET_READY;
#if defined(OC_COLLECT_METRICS)
@@ -888,21 +936,31 @@ static void oc_enc_frame_pack(oc_enc_ctx *_enc){
}
-void oc_enc_vtable_init_c(oc_enc_ctx *_enc){
+void oc_enc_accel_init_c(oc_enc_ctx *_enc){
/*The implementations prefixed with oc_enc_ are encoder-specific.
The rest we re-use from the decoder.*/
+# if defined(OC_ENC_USE_VTABLE)
+ _enc->opt_vtable.frag_sub=oc_enc_frag_sub_c;
+ _enc->opt_vtable.frag_sub_128=oc_enc_frag_sub_128_c;
_enc->opt_vtable.frag_sad=oc_enc_frag_sad_c;
_enc->opt_vtable.frag_sad_thresh=oc_enc_frag_sad_thresh_c;
_enc->opt_vtable.frag_sad2_thresh=oc_enc_frag_sad2_thresh_c;
- _enc->opt_vtable.frag_satd_thresh=oc_enc_frag_satd_thresh_c;
- _enc->opt_vtable.frag_satd2_thresh=oc_enc_frag_satd2_thresh_c;
+ _enc->opt_vtable.frag_intra_sad=oc_enc_frag_intra_sad_c;
+ _enc->opt_vtable.frag_satd=oc_enc_frag_satd_c;
+ _enc->opt_vtable.frag_satd2=oc_enc_frag_satd2_c;
_enc->opt_vtable.frag_intra_satd=oc_enc_frag_intra_satd_c;
- _enc->opt_vtable.frag_sub=oc_enc_frag_sub_c;
- _enc->opt_vtable.frag_sub_128=oc_enc_frag_sub_128_c;
+ _enc->opt_vtable.frag_ssd=oc_enc_frag_ssd_c;
+ _enc->opt_vtable.frag_border_ssd=oc_enc_frag_border_ssd_c;
_enc->opt_vtable.frag_copy2=oc_enc_frag_copy2_c;
+ _enc->opt_vtable.enquant_table_init=oc_enc_enquant_table_init_c;
+ _enc->opt_vtable.enquant_table_fixup=oc_enc_enquant_table_fixup_c;
+ _enc->opt_vtable.quantize=oc_enc_quantize_c;
_enc->opt_vtable.frag_recon_intra=oc_frag_recon_intra_c;
_enc->opt_vtable.frag_recon_inter=oc_frag_recon_inter_c;
_enc->opt_vtable.fdct8x8=oc_enc_fdct8x8_c;
+# endif
+ _enc->opt_data.enquant_table_size=64*sizeof(oc_iquant);
+ _enc->opt_data.enquant_table_alignment=16;
}
/*Initialize the macro block neighbor lists for MC analysis.
@@ -1003,6 +1061,55 @@ static int oc_enc_set_huffman_codes(oc_enc_ctx *_enc,
return 0;
}
+static void oc_enc_enquant_tables_init(oc_enc_ctx *_enc,
+ const th_quant_info *_qinfo){
+ unsigned char *etd;
+ size_t ets;
+ int align;
+ int qii;
+ int qi;
+ int pli;
+ int qti;
+ for(qi=0;qi<64;qi++)for(pli=0;pli<3;pli++)for(qti=0;qti<2;qti++){
+ _enc->state.dequant_tables[qi][pli][qti]=
+ _enc->state.dequant_table_data[qi][pli][qti];
+ }
+ /*Initialize the dequantization tables.*/
+ oc_dequant_tables_init(_enc->state.dequant_tables,NULL,_qinfo);
+ /*And save off the DC values.*/
+ for(qi=0;qi<64;qi++)for(pli=0;pli<3;pli++)for(qti=0;qti<2;qti++){
+ _enc->dequant_dc[qi][pli][qti]=_enc->state.dequant_tables[qi][pli][qti][0];
+ }
+ /*Set up storage for the quantization tables.*/
+ etd=_enc->enquant_table_data;
+ ets=_enc->opt_data.enquant_table_size;
+ align=-(etd-(unsigned char *)0)&_enc->opt_data.enquant_table_alignment-1;
+ etd+=align;
+ /*Set up the main tables.*/
+ for(qi=0;qi<64;qi++)for(pli=0;pli<3;pli++)for(qti=0;qti<2;qti++){
+ _enc->enquant_tables[qi][pli][qti]=etd;
+ oc_enc_enquant_table_init(_enc,etd,
+ _enc->state.dequant_tables[qi][pli][qti]);
+ etd+=ets;
+ }
+ /*Set up storage for the local copies we modify for each frame.*/
+ for(pli=0;pli<3;pli++)for(qii=0;qii<3;qii++)for(qti=0;qti<2;qti++){
+ _enc->enquant[pli][qii][qti]=etd;
+ etd+=ets;
+ }
+}
+
+/*Updates the encoder state after the quantization parameters have been
+ changed.*/
+static void oc_enc_quant_params_updated(oc_enc_ctx *_enc,
+ const th_quant_info *_qinfo){
+ oc_enc_enquant_tables_init(_enc,_qinfo);
+ memcpy(_enc->state.loop_filter_limits,_qinfo->loop_filter_limits,
+ sizeof(_enc->state.loop_filter_limits));
+ oc_enquant_qavg_init(_enc->log_qavg,_enc->log_plq,_enc->chroma_rd_scale,
+ _enc->state.dequant_tables,_enc->state.info.pixel_fmt);
+}
+
/*Sets the quantization parameters to use.
This may only be called before the setup header is written.
If it is called multiple times, only the last call has any effect.
@@ -1012,25 +1119,20 @@ static int oc_enc_set_huffman_codes(oc_enc_ctx *_enc,
will be used.*/
static int oc_enc_set_quant_params(oc_enc_ctx *_enc,
const th_quant_info *_qinfo){
- int qi;
- int pli;
- int qti;
+ th_quant_info old_qinfo;
+ int ret;
if(_enc==NULL)return TH_EFAULT;
if(_enc->packet_state>OC_PACKET_SETUP_HDR)return TH_EINVAL;
if(_qinfo==NULL)_qinfo=&TH_DEF_QUANT_INFO;
- /*TODO: Analyze for packing purposes instead of just doing a shallow copy.*/
- memcpy(&_enc->qinfo,_qinfo,sizeof(_enc->qinfo));
- for(qi=0;qi<64;qi++)for(pli=0;pli<3;pli++)for(qti=0;qti<2;qti++){
- _enc->state.dequant_tables[qi][pli][qti]=
- _enc->state.dequant_table_data[qi][pli][qti];
- _enc->enquant_tables[qi][pli][qti]=_enc->enquant_table_data[qi][pli][qti];
+ memcpy(&old_qinfo,&_enc->qinfo,sizeof(old_qinfo));
+ ret=oc_quant_params_clone(&_enc->qinfo,_qinfo);
+ if(ret<0){
+ oc_quant_params_clear(&_enc->qinfo);
+ memcpy(&_enc->qinfo,&old_qinfo,sizeof(old_qinfo));
+ return ret;
}
- oc_enquant_tables_init(_enc->state.dequant_tables,
- _enc->enquant_tables,_qinfo);
- memcpy(_enc->state.loop_filter_limits,_qinfo->loop_filter_limits,
- sizeof(_enc->state.loop_filter_limits));
- oc_enquant_qavg_init(_enc->log_qavg,_enc->state.dequant_tables,
- _enc->state.info.pixel_fmt);
+ else oc_quant_params_clear(&old_qinfo);
+ oc_enc_quant_params_updated(_enc,_qinfo);
return 0;
}
@@ -1039,6 +1141,7 @@ static void oc_enc_clear(oc_enc_ctx *_enc);
static int oc_enc_init(oc_enc_ctx *_enc,const th_info *_info){
th_info info;
size_t mcu_nmbs;
+ ptrdiff_t mcu_ncfrags;
ptrdiff_t mcu_nfrags;
int hdec;
int vdec;
@@ -1053,8 +1156,9 @@ static int oc_enc_init(oc_enc_ctx *_enc,const th_info *_info){
if(info.quality<0)info.quality=32;
if(info.target_bitrate<0)info.target_bitrate=0;
/*Initialize the shared encoder/decoder state.*/
- ret=oc_state_init(&_enc->state,&info,4);
+ ret=oc_state_init(&_enc->state,&info,6);
if(ret<0)return ret;
+ oc_enc_accel_init(_enc);
_enc->mb_info=_ogg_calloc(_enc->state.nmbs,sizeof(*_enc->mb_info));
_enc->frag_dc=_ogg_calloc(_enc->state.nfrags,sizeof(*_enc->frag_dc));
_enc->coded_mbis=
@@ -1065,9 +1169,14 @@ static int oc_enc_init(oc_enc_ctx *_enc,const th_info *_info){
super block rows of Y' for each super block row of Cb and Cr.*/
_enc->mcu_nvsbs=1<<vdec;
mcu_nmbs=_enc->mcu_nvsbs*_enc->state.fplanes[0].nhsbs*(size_t)4;
- mcu_nfrags=4*mcu_nmbs+(8*mcu_nmbs>>hdec+vdec);
+ mcu_ncfrags=mcu_nmbs<<3-(hdec+vdec);
+ mcu_nfrags=4*mcu_nmbs+mcu_ncfrags;
_enc->mcu_skip_ssd=(unsigned *)_ogg_malloc(
mcu_nfrags*sizeof(*_enc->mcu_skip_ssd));
+ _enc->mcu_rd_scale=(ogg_uint16_t *)_ogg_malloc(
+ (mcu_ncfrags>>1)*sizeof(*_enc->mcu_rd_scale));
+ _enc->mcu_rd_iscale=(ogg_uint16_t *)_ogg_malloc(
+ (mcu_ncfrags>>1)*sizeof(*_enc->mcu_rd_iscale));
for(pli=0;pli<3;pli++){
_enc->dct_tokens[pli]=(unsigned char **)oc_malloc_2d(64,
_enc->state.fplanes[pli].nfrags,sizeof(**_enc->dct_tokens));
@@ -1075,34 +1184,22 @@ static int oc_enc_init(oc_enc_ctx *_enc,const th_info *_info){
_enc->state.fplanes[pli].nfrags,sizeof(**_enc->extra_bits));
}
#if defined(OC_COLLECT_METRICS)
+ _enc->frag_sad=_ogg_calloc(_enc->state.nfrags,sizeof(*_enc->frag_sad));
_enc->frag_satd=_ogg_calloc(_enc->state.nfrags,sizeof(*_enc->frag_satd));
_enc->frag_ssd=_ogg_calloc(_enc->state.nfrags,sizeof(*_enc->frag_ssd));
#endif
-#if defined(OC_X86_ASM)
- oc_enc_vtable_init_x86(_enc);
-#else
- oc_enc_vtable_init_c(_enc);
-#endif
+ _enc->enquant_table_data=(unsigned char *)_ogg_malloc(
+ (64+3)*3*2*_enc->opt_data.enquant_table_size
+ +_enc->opt_data.enquant_table_alignment-1);
_enc->keyframe_frequency_force=1<<_enc->state.info.keyframe_granule_shift;
_enc->state.qis[0]=_enc->state.info.quality;
_enc->state.nqis=1;
+ _enc->activity_avg=90<<12;
+ _enc->luma_avg=128<<8;
oc_rc_state_init(&_enc->rc,_enc);
oggpackB_writeinit(&_enc->opb);
- if(_enc->mb_info==NULL||_enc->frag_dc==NULL||_enc->coded_mbis==NULL||
- _enc->mcu_skip_ssd==NULL||_enc->dct_tokens[0]==NULL||
- _enc->dct_tokens[1]==NULL||_enc->dct_tokens[2]==NULL||
- _enc->extra_bits[0]==NULL||_enc->extra_bits[1]==NULL||
- _enc->extra_bits[2]==NULL
-#if defined(OC_COLLECT_METRICS)
- ||_enc->frag_satd==NULL||_enc->frag_ssd==NULL
-#endif
- ){
- oc_enc_clear(_enc);
- return TH_EFAULT;
- }
- oc_mode_scheme_chooser_init(&_enc->chooser);
- oc_enc_mb_info_init(_enc);
- memset(_enc->huff_idxs,0,sizeof(_enc->huff_idxs));
+ memcpy(_enc->huff_codes,TH_VP31_HUFF_CODES,sizeof(_enc->huff_codes));
+ memset(_enc->qinfo.qi_ranges,0,sizeof(_enc->qinfo.qi_ranges));
/*Reset the packet-out state machine.*/
_enc->packet_state=OC_PACKET_INFO_HDR;
_enc->dup_count=0;
@@ -1114,26 +1211,45 @@ static int oc_enc_init(oc_enc_ctx *_enc,const th_info *_info){
_enc->vp3_compatible=0;
/*No INTER frames coded yet.*/
_enc->coded_inter_frame=0;
- memcpy(_enc->huff_codes,TH_VP31_HUFF_CODES,sizeof(_enc->huff_codes));
- oc_enc_set_quant_params(_enc,NULL);
+ if(_enc->mb_info==NULL||_enc->frag_dc==NULL||_enc->coded_mbis==NULL
+ ||_enc->mcu_skip_ssd==NULL||_enc->dct_tokens[0]==NULL
+ ||_enc->dct_tokens[1]==NULL||_enc->dct_tokens[2]==NULL
+ ||_enc->extra_bits[0]==NULL||_enc->extra_bits[1]==NULL
+ ||_enc->extra_bits[2]==NULL
+#if defined(OC_COLLECT_METRICS)
+ ||_enc->frag_sad==NULL||_enc->frag_satd==NULL||_enc->frag_ssd==NULL
+#endif
+ ||oc_enc_set_quant_params(_enc,NULL)<0){
+ oc_enc_clear(_enc);
+ return TH_EFAULT;
+ }
+ oc_mode_scheme_chooser_init(&_enc->chooser);
+ oc_enc_mb_info_init(_enc);
+ memset(_enc->huff_idxs,0,sizeof(_enc->huff_idxs));
return 0;
}
static void oc_enc_clear(oc_enc_ctx *_enc){
int pli;
oc_rc_state_clear(&_enc->rc);
-#if defined(OC_COLLECT_METRICS)
- oc_enc_mode_metrics_dump(_enc);
-#endif
oggpackB_writeclear(&_enc->opb);
+ oc_quant_params_clear(&_enc->qinfo);
+ _ogg_free(_enc->enquant_table_data);
#if defined(OC_COLLECT_METRICS)
+ /*Save the collected metrics from this run.
+ Use tools/process_modedec_stats to actually generate modedec.h from the
+ resulting file.*/
+ oc_mode_metrics_dump();
_ogg_free(_enc->frag_ssd);
_ogg_free(_enc->frag_satd);
+ _ogg_free(_enc->frag_sad);
#endif
for(pli=3;pli-->0;){
oc_free_2d(_enc->extra_bits[pli]);
oc_free_2d(_enc->dct_tokens[pli]);
}
+ _ogg_free(_enc->mcu_rd_iscale);
+ _ogg_free(_enc->mcu_rd_scale);
_ogg_free(_enc->mcu_skip_ssd);
_ogg_free(_enc->coded_mbis);
_ogg_free(_enc->frag_dc);
@@ -1145,10 +1261,14 @@ static void oc_enc_drop_frame(th_enc_ctx *_enc){
/*Use the previous frame's reconstruction.*/
_enc->state.ref_frame_idx[OC_FRAME_SELF]=
_enc->state.ref_frame_idx[OC_FRAME_PREV];
+ _enc->state.ref_frame_data[OC_FRAME_SELF]=
+ _enc->state.ref_frame_data[OC_FRAME_PREV];
/*Flag motion vector analysis about the frame drop.*/
_enc->prevframe_dropped=1;
/*Zero the packet.*/
oggpackB_reset(&_enc->opb);
+ /*Emit an inter frame with no coded blocks in VP3-compatibility mode.*/
+ if(_enc->vp3_compatible)oc_enc_drop_frame_pack(_enc);
}
static void oc_enc_compress_keyframe(oc_enc_ctx *_enc,int _recode){
@@ -1222,9 +1342,9 @@ static void oc_enc_set_granpos(oc_enc_ctx *_enc){
th_enc_ctx *th_encode_alloc(const th_info *_info){
oc_enc_ctx *enc;
if(_info==NULL)return NULL;
- enc=_ogg_malloc(sizeof(*enc));
+ enc=oc_aligned_malloc(sizeof(*enc),16);
if(enc==NULL||oc_enc_init(enc,_info)<0){
- _ogg_free(enc);
+ oc_aligned_free(enc);
return NULL;
}
return enc;
@@ -1233,7 +1353,7 @@ th_enc_ctx *th_encode_alloc(const th_info *_info){
void th_encode_free(th_enc_ctx *_enc){
if(_enc!=NULL){
oc_enc_clear(_enc);
- _ogg_free(_enc);
+ oc_aligned_free(_enc);
}
}
@@ -1272,12 +1392,17 @@ int th_encode_ctl(th_enc_ctx *_enc,int _req,void *_buf,size_t _buf_sz){
}break;
case TH_ENCCTL_SET_VP3_COMPATIBLE:{
int vp3_compatible;
+ int ret;
if(_enc==NULL||_buf==NULL)return TH_EFAULT;
if(_buf_sz!=sizeof(vp3_compatible))return TH_EINVAL;
+ /*Try this before we change anything else, because it can fail.*/
+ ret=oc_enc_set_quant_params(_enc,&TH_VP31_QUANT_INFO);
+ /*If we can't allocate enough memory, don't change any of the state.*/
+ if(ret==TH_EFAULT)return ret;
vp3_compatible=*(int *)_buf;
_enc->vp3_compatible=vp3_compatible;
if(oc_enc_set_huffman_codes(_enc,TH_VP31_HUFF_CODES)<0)vp3_compatible=0;
- if(oc_enc_set_quant_params(_enc,&TH_VP31_QUANT_INFO)<0)vp3_compatible=0;
+ if(ret<0)vp3_compatible=0;
if(_enc->state.info.pixel_fmt!=TH_PF_420||
_enc->state.info.pic_width<_enc->state.info.frame_width||
_enc->state.info.pic_height<_enc->state.info.frame_height||
@@ -1386,6 +1511,44 @@ int th_encode_ctl(th_enc_ctx *_enc,int _req,void *_buf,size_t _buf_sz){
}
return oc_enc_rc_2pass_in(_enc,_buf,_buf_sz);
}break;
+ case TH_ENCCTL_SET_COMPAT_CONFIG:{
+ unsigned char buf[7];
+ oc_pack_buf opb;
+ th_quant_info qinfo;
+ th_huff_code huff_codes[TH_NHUFFMAN_TABLES][TH_NDCT_TOKENS];
+ int ret;
+ int i;
+ if(_enc==NULL||_buf==NULL)return TH_EFAULT;
+ if(_enc->packet_state>OC_PACKET_SETUP_HDR)return TH_EINVAL;
+ oc_pack_readinit(&opb,_buf,_buf_sz);
+ /*Validate the setup packet header.*/
+ for(i=0;i<7;i++)buf[i]=(unsigned char)oc_pack_read(&opb,8);
+ if(!(buf[0]&0x80)||memcmp(buf+1,"theora",6)!=0)return TH_ENOTFORMAT;
+ if(buf[0]!=0x82)return TH_EBADHEADER;
+ /*Reads its contents.*/
+ ret=oc_quant_params_unpack(&opb,&qinfo);
+ if(ret<0){
+ oc_quant_params_clear(&qinfo);
+ return ret;
+ }
+ ret=oc_huff_codes_unpack(&opb,huff_codes);
+ if(ret<0){
+ oc_quant_params_clear(&qinfo);
+ return ret;
+ }
+ /*Install the new state.*/
+ oc_quant_params_clear(&_enc->qinfo);
+ memcpy(&_enc->qinfo,&qinfo,sizeof(qinfo));
+ oc_enc_quant_params_updated(_enc,&qinfo);
+ memcpy(_enc->huff_codes,huff_codes,sizeof(_enc->huff_codes));
+ return 0;
+ }
+#if defined(OC_COLLECT_METRICS)
+ case TH_ENCCTL_SET_METRICS_FILE:{
+ OC_MODE_METRICS_FILENAME=(const char *)_buf;
+ return 0;
+ }
+#endif
default:return TH_EIMPL;
}
}
@@ -1477,6 +1640,12 @@ static void oc_img_plane_copy_pad(th_img_plane *_dst,th_img_plane *_src,
int th_encode_ycbcr_in(th_enc_ctx *_enc,th_ycbcr_buffer _img){
th_ycbcr_buffer img;
+ int frame_width;
+ int frame_height;
+ int pic_width;
+ int pic_height;
+ int pic_x;
+ int pic_y;
int cframe_width;
int cframe_height;
int cpic_width;
@@ -1492,53 +1661,94 @@ int th_encode_ycbcr_in(th_enc_ctx *_enc,th_ycbcr_buffer _img){
if(_enc==NULL||_img==NULL)return TH_EFAULT;
if(_enc->packet_state==OC_PACKET_DONE)return TH_EINVAL;
if(_enc->rc.twopass&&_enc->rc.twopass_buffer_bytes==0)return TH_EINVAL;
- if((ogg_uint32_t)_img[0].width!=_enc->state.info.frame_width||
- (ogg_uint32_t)_img[0].height!=_enc->state.info.frame_height){
- return TH_EINVAL;
- }
hdec=!(_enc->state.info.pixel_fmt&1);
vdec=!(_enc->state.info.pixel_fmt&2);
- cframe_width=_enc->state.info.frame_width>>hdec;
- cframe_height=_enc->state.info.frame_height>>vdec;
- if(_img[1].width!=cframe_width||_img[2].width!=cframe_width||
- _img[1].height!=cframe_height||_img[2].height!=cframe_height){
- return TH_EINVAL;
- }
- /*Step 2: Copy the input to our internal buffer.
- This lets us add padding, if necessary, so we don't have to worry about
- dereferencing possibly invalid addresses, and allows us to use the same
- strides and fragment offsets for both the input frame and the reference
- frames.*/
+ frame_width=_enc->state.info.frame_width;
+ frame_height=_enc->state.info.frame_height;
+ pic_x=_enc->state.info.pic_x;
+ pic_y=_enc->state.info.pic_y;
+ pic_width=_enc->state.info.pic_width;
+ pic_height=_enc->state.info.pic_height;
+ cframe_width=frame_width>>hdec;
+ cframe_height=frame_height>>vdec;
+ cpic_x=pic_x>>hdec;
+ cpic_y=pic_y>>vdec;
+ cpic_width=(pic_x+pic_width+hdec>>hdec)-cpic_x;
+ cpic_height=(pic_y+pic_height+vdec>>vdec)-cpic_y;
/*Flip the input buffer upside down.*/
oc_ycbcr_buffer_flip(img,_img);
- oc_img_plane_copy_pad(_enc->state.ref_frame_bufs[OC_FRAME_IO]+0,img+0,
- _enc->state.info.pic_x,_enc->state.info.pic_y,
- _enc->state.info.pic_width,_enc->state.info.pic_height);
- cpic_x=_enc->state.info.pic_x>>hdec;
- cpic_y=_enc->state.info.pic_y>>vdec;
- cpic_width=(_enc->state.info.pic_x+_enc->state.info.pic_width+hdec>>hdec)
- -cpic_x;
- cpic_height=(_enc->state.info.pic_y+_enc->state.info.pic_height+vdec>>vdec)
- -cpic_y;
- for(pli=1;pli<3;pli++){
- oc_img_plane_copy_pad(_enc->state.ref_frame_bufs[OC_FRAME_IO]+pli,img+pli,
- cpic_x,cpic_y,cpic_width,cpic_height);
+ if(img[0].width!=frame_width||img[0].height!=frame_height||
+ img[1].width!=cframe_width||img[2].width!=cframe_width||
+ img[1].height!=cframe_height||img[2].height!=cframe_height){
+ /*The buffer does not match the frame size.
+ Check to see if it matches the picture size.*/
+ if(img[0].width!=pic_width||img[0].height!=pic_height||
+ img[1].width!=cpic_width||img[2].width!=cpic_width||
+ img[1].height!=cpic_height||img[2].height!=cpic_height){
+ /*It doesn't; we don't know how to handle it.*/
+ return TH_EINVAL;
+ }
+ /*Adjust the pointers to address a full frame.
+ We still only use the picture region, however.*/
+ img[0].data-=pic_y*(ptrdiff_t)img[0].stride+pic_x;
+ img[1].data-=cpic_y*(ptrdiff_t)img[1].stride+cpic_x;
+ img[2].data-=cpic_y*(ptrdiff_t)img[2].stride+cpic_x;
}
- /*Step 3: Update the buffer state.*/
+ /*Step 2: Update the buffer state.*/
if(_enc->state.ref_frame_idx[OC_FRAME_SELF]>=0){
_enc->state.ref_frame_idx[OC_FRAME_PREV]=
_enc->state.ref_frame_idx[OC_FRAME_SELF];
+ _enc->state.ref_frame_data[OC_FRAME_PREV]=
+ _enc->state.ref_frame_data[OC_FRAME_SELF];
if(_enc->state.frame_type==OC_INTRA_FRAME){
/*The new frame becomes both the previous and gold reference frames.*/
_enc->state.keyframe_num=_enc->state.curframe_num;
_enc->state.ref_frame_idx[OC_FRAME_GOLD]=
_enc->state.ref_frame_idx[OC_FRAME_SELF];
+ _enc->state.ref_frame_data[OC_FRAME_GOLD]=
+ _enc->state.ref_frame_data[OC_FRAME_SELF];
+ }
+ }
+ if(_enc->state.ref_frame_idx[OC_FRAME_IO]>=0&&_enc->prevframe_dropped==0){
+ _enc->state.ref_frame_idx[OC_FRAME_PREV_ORIG]=
+ _enc->state.ref_frame_idx[OC_FRAME_IO];
+ _enc->state.ref_frame_data[OC_FRAME_PREV_ORIG]=
+ _enc->state.ref_frame_data[OC_FRAME_IO];
+ if(_enc->state.frame_type==OC_INTRA_FRAME){
+ /*The new input frame becomes both the previous and gold
+ original-reference frames.*/
+ _enc->state.ref_frame_idx[OC_FRAME_GOLD_ORIG]=
+ _enc->state.ref_frame_idx[OC_FRAME_IO];
+ _enc->state.ref_frame_data[OC_FRAME_GOLD_ORIG]=
+ _enc->state.ref_frame_data[OC_FRAME_IO];
}
}
+ /*Select a free buffer to use for the incoming frame*/
+ for(refi=3;refi==_enc->state.ref_frame_idx[OC_FRAME_GOLD_ORIG]||
+ refi==_enc->state.ref_frame_idx[OC_FRAME_PREV_ORIG];refi++);
+ _enc->state.ref_frame_idx[OC_FRAME_IO]=refi;
+ _enc->state.ref_frame_data[OC_FRAME_IO]=
+ _enc->state.ref_frame_bufs[refi][0].data;
+ /*Step 3: Copy the input to our internal buffer.
+ This lets us add padding, so we don't have to worry about dereferencing
+ possibly invalid addresses, and allows us to use the same strides and
+ fragment offsets for both the input frame and the reference frames.*/
+ oc_img_plane_copy_pad(_enc->state.ref_frame_bufs[refi]+0,img+0,
+ pic_x,pic_y,pic_width,pic_height);
+ oc_state_borders_fill_rows(&_enc->state,refi,0,0,frame_height);
+ oc_state_borders_fill_caps(&_enc->state,refi,0);
+ for(pli=1;pli<3;pli++){
+ oc_img_plane_copy_pad(_enc->state.ref_frame_bufs[refi]+pli,img+pli,
+ cpic_x,cpic_y,cpic_width,cpic_height);
+ oc_state_borders_fill_rows(&_enc->state,refi,pli,0,cframe_height);
+ oc_state_borders_fill_caps(&_enc->state,refi,pli);
+ }
/*Select a free buffer to use for the reconstructed version of this frame.*/
for(refi=0;refi==_enc->state.ref_frame_idx[OC_FRAME_GOLD]||
refi==_enc->state.ref_frame_idx[OC_FRAME_PREV];refi++);
_enc->state.ref_frame_idx[OC_FRAME_SELF]=refi;
+ _enc->state.ref_frame_data[OC_FRAME_SELF]=
+ _enc->state.ref_frame_bufs[refi][0].data;
_enc->state.curframe_num+=_enc->prev_dup_count+1;
/*Step 4: Compress the frame.*/
/*Start with a keyframe, and don't allow the generation of invalid files that
@@ -1575,11 +1785,11 @@ int th_encode_ycbcr_in(th_enc_ctx *_enc,th_ycbcr_buffer _img){
}
int th_encode_packetout(th_enc_ctx *_enc,int _last_p,ogg_packet *_op){
+ unsigned char *packet;
if(_enc==NULL||_op==NULL)return TH_EFAULT;
if(_enc->packet_state==OC_PACKET_READY){
_enc->packet_state=OC_PACKET_EMPTY;
if(_enc->rc.twopass!=1){
- unsigned char *packet;
packet=oggpackB_get_buffer(&_enc->opb);
/*If there's no packet, malloc failed while writing; it's lost forever.*/
if(packet==NULL)return TH_EFAULT;
@@ -1595,8 +1805,22 @@ int th_encode_packetout(th_enc_ctx *_enc,int _last_p,ogg_packet *_op){
else if(_enc->packet_state==OC_PACKET_EMPTY){
if(_enc->nqueued_dups>0){
_enc->nqueued_dups--;
- _op->packet=NULL;
- _op->bytes=0;
+ /*Emit an inter frame with no coded blocks in VP3-compatibility mode.*/
+ if(_enc->vp3_compatible){
+ oggpackB_reset(&_enc->opb);
+ oc_enc_drop_frame_pack(_enc);
+ packet=oggpackB_get_buffer(&_enc->opb);
+ /*If there's no packet, malloc failed while writing; it's lost
+ forever.*/
+ if(packet==NULL)return TH_EFAULT;
+ _op->packet=packet;
+ _op->bytes=oggpackB_bytes(&_enc->opb);
+ }
+ /*Otherwise emit a 0-byte packet.*/
+ else{
+ _op->packet=NULL;
+ _op->bytes=0;
+ }
}
else{
if(_last_p)_enc->packet_state=OC_PACKET_DONE;
diff --git a/thirdparty/libtheora/encoder_disabled.c b/thirdparty/libtheora/encoder_disabled.c
index 0cbf6645ac..ba6d995505 100644
--- a/thirdparty/libtheora/encoder_disabled.c
+++ b/thirdparty/libtheora/encoder_disabled.c
@@ -11,12 +11,15 @@
********************************************************************
function:
- last mod: $Id: encoder_disabled.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
#include "apiwrapper.h"
#include "encint.h"
+const th_quant_info TH_VP31_QUANT_INFO = {};
+const th_huff_code TH_VP31_HUFF_CODES[TH_NHUFFMAN_TABLES][TH_NDCT_TOKENS];
+
th_enc_ctx *th_encode_alloc(const th_info *_info){
return NULL;
}
diff --git a/thirdparty/libtheora/enquant.c b/thirdparty/libtheora/enquant.c
index 3372fed221..8fd220edd7 100644
--- a/thirdparty/libtheora/enquant.c
+++ b/thirdparty/libtheora/enquant.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: enquant.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
#include <stdlib.h>
@@ -20,6 +20,69 @@
+int oc_quant_params_clone(th_quant_info *_dst,const th_quant_info *_src){
+ int i;
+ memcpy(_dst,_src,sizeof(*_dst));
+ memset(_dst->qi_ranges,0,sizeof(_dst->qi_ranges));
+ for(i=0;i<6;i++){
+ int nranges;
+ int qti;
+ int pli;
+ int qtj;
+ int plj;
+ int pdup;
+ int qdup;
+ qti=i/3;
+ pli=i%3;
+ qtj=(i-1)/3;
+ plj=(i-1)%3;
+ nranges=_src->qi_ranges[qti][pli].nranges;
+ /*Check for those duplicates that can be cleanly handled by
+ oc_quant_params_clear().*/
+ pdup=i>0&&nranges<=_src->qi_ranges[qtj][plj].nranges;
+ qdup=qti>0&&nranges<=_src->qi_ranges[0][pli].nranges;
+ _dst->qi_ranges[qti][pli].nranges=nranges;
+ if(pdup&&_src->qi_ranges[qti][pli].sizes==_src->qi_ranges[qtj][plj].sizes){
+ _dst->qi_ranges[qti][pli].sizes=_dst->qi_ranges[qtj][plj].sizes;
+ }
+ else if(qdup&&_src->qi_ranges[1][pli].sizes==_src->qi_ranges[0][pli].sizes){
+ _dst->qi_ranges[1][pli].sizes=_dst->qi_ranges[0][pli].sizes;
+ }
+ else{
+ int *sizes;
+ sizes=(int *)_ogg_malloc(nranges*sizeof(*sizes));
+ /*Note: The caller is responsible for cleaning up any partially
+ constructed qinfo.*/
+ if(sizes==NULL)return TH_EFAULT;
+ memcpy(sizes,_src->qi_ranges[qti][pli].sizes,nranges*sizeof(*sizes));
+ _dst->qi_ranges[qti][pli].sizes=sizes;
+ }
+ if(pdup&&_src->qi_ranges[qti][pli].base_matrices==
+ _src->qi_ranges[qtj][plj].base_matrices){
+ _dst->qi_ranges[qti][pli].base_matrices=
+ _dst->qi_ranges[qtj][plj].base_matrices;
+ }
+ else if(qdup&&_src->qi_ranges[1][pli].base_matrices==
+ _src->qi_ranges[0][pli].base_matrices){
+ _dst->qi_ranges[1][pli].base_matrices=
+ _dst->qi_ranges[0][pli].base_matrices;
+ }
+ else{
+ th_quant_base *base_matrices;
+ base_matrices=(th_quant_base *)_ogg_malloc(
+ (nranges+1)*sizeof(*base_matrices));
+ /*Note: The caller is responsible for cleaning up any partially
+ constructed qinfo.*/
+ if(base_matrices==NULL)return TH_EFAULT;
+ memcpy(base_matrices,_src->qi_ranges[qti][pli].base_matrices,
+ (nranges+1)*sizeof(*base_matrices));
+ _dst->qi_ranges[qti][pli].base_matrices=
+ (const th_quant_base *)base_matrices;
+ }
+ }
+ return 0;
+}
+
void oc_quant_params_pack(oggpack_buffer *_opb,const th_quant_info *_qinfo){
const th_quant_ranges *qranges;
const th_quant_base *base_mats[2*3*64];
@@ -119,7 +182,7 @@ void oc_quant_params_pack(oggpack_buffer *_opb,const th_quant_info *_qinfo){
}
}
-static void oc_iquant_init(oc_iquant *_this,ogg_uint16_t _d){
+void oc_iquant_init(oc_iquant *_this,ogg_uint16_t _d){
ogg_uint32_t t;
int l;
_d<<=1;
@@ -129,48 +192,61 @@ static void oc_iquant_init(oc_iquant *_this,ogg_uint16_t _d){
_this->l=l;
}
-/*See comments at oc_dequant_tables_init() for how the quantization tables'
- storage should be initialized.*/
-void oc_enquant_tables_init(ogg_uint16_t *_dequant[64][3][2],
- oc_iquant *_enquant[64][3][2],const th_quant_info *_qinfo){
- int qi;
+void oc_enc_enquant_table_init_c(void *_enquant,
+ const ogg_uint16_t _dequant[64]){
+ oc_iquant *enquant;
+ int zzi;
+ /*In the original VP3.2 code, the rounding offset and the size of the
+ dead zone around 0 were controlled by a "sharpness" parameter.
+ We now R-D optimize the tokens for each block after quantization,
+ so the rounding offset should always be 1/2, and an explicit dead
+ zone is unnecessary.
+ Hence, all of that VP3.2 code is gone from here, and the remaining
+ floating point code has been implemented as equivalent integer
+ code with exact precision.*/
+ enquant=(oc_iquant *)_enquant;
+ for(zzi=0;zzi<64;zzi++)oc_iquant_init(enquant+zzi,_dequant[zzi]);
+}
+
+void oc_enc_enquant_table_fixup_c(void *_enquant[3][3][2],int _nqis){
int pli;
+ int qii;
int qti;
- /*Initialize the dequantization tables first.*/
- oc_dequant_tables_init(_dequant,NULL,_qinfo);
- /*Derive the quantization tables directly from the dequantization tables.*/
- for(qi=0;qi<64;qi++)for(qti=0;qti<2;qti++)for(pli=0;pli<3;pli++){
- int zzi;
- int plj;
- int qtj;
- int dupe;
- dupe=0;
- for(qtj=0;qtj<=qti;qtj++){
- for(plj=0;plj<(qtj<qti?3:pli);plj++){
- if(_dequant[qi][pli][qti]==_dequant[qi][plj][qtj]){
- dupe=1;
- break;
- }
- }
- if(dupe)break;
- }
- if(dupe){
- _enquant[qi][pli][qti]=_enquant[qi][plj][qtj];
- continue;
- }
- /*In the original VP3.2 code, the rounding offset and the size of the
- dead zone around 0 were controlled by a "sharpness" parameter.
- We now R-D optimize the tokens for each block after quantization,
- so the rounding offset should always be 1/2, and an explicit dead
- zone is unnecessary.
- Hence, all of that VP3.2 code is gone from here, and the remaining
- floating point code has been implemented as equivalent integer
- code with exact precision.*/
- for(zzi=0;zzi<64;zzi++){
- oc_iquant_init(_enquant[qi][pli][qti]+zzi,
- _dequant[qi][pli][qti][zzi]);
+ for(pli=0;pli<3;pli++)for(qii=1;qii<_nqis;qii++)for(qti=0;qti<2;qti++){
+ *((oc_iquant *)_enquant[pli][qii][qti])=
+ *((oc_iquant *)_enquant[pli][0][qti]);
+ }
+}
+
+int oc_enc_quantize_c(ogg_int16_t _qdct[64],const ogg_int16_t _dct[64],
+ const ogg_uint16_t _dequant[64],const void *_enquant){
+ const oc_iquant *enquant;
+ int nonzero;
+ int zzi;
+ int val;
+ int d;
+ int s;
+ enquant=(const oc_iquant *)_enquant;
+ nonzero=0;
+ for(zzi=0;zzi<64;zzi++){
+ val=_dct[zzi];
+ d=_dequant[zzi];
+ val=val<<1;
+ if(abs(val)>=d){
+ s=OC_SIGNMASK(val);
+ /*The bias added here rounds ties away from zero, since token
+ optimization can only decrease the magnitude of the quantized
+ value.*/
+ val+=d+s^s;
+ /*Note the arithmetic right shift is not guaranteed by ANSI C.
+ Hopefully no one still uses ones-complement architectures.*/
+ val=((enquant[zzi].m*(ogg_int32_t)val>>16)+val>>enquant[zzi].l)-s;
+ _qdct[zzi]=(ogg_int16_t)val;
+ nonzero=zzi;
}
+ else _qdct[zzi]=0;
}
+ return nonzero;
}
@@ -226,7 +302,7 @@ static const ogg_uint16_t OC_RPSD[2][64]={
relative to the total, scaled by 2**16, for each pixel format.
These values were measured after motion-compensated prediction, before
quantization, over a large set of test video encoded at all possible rates.
- TODO: These values are only from INTER frames; it should be re-measured for
+ TODO: These values are only from INTER frames; they should be re-measured for
INTRA frames.*/
static const ogg_uint16_t OC_PCD[4][3]={
{59926, 3038, 2572},
@@ -236,38 +312,58 @@ static const ogg_uint16_t OC_PCD[4][3]={
};
-/*Compute an "average" quantizer for each qi level.
- We do one for INTER and one for INTRA, since their behavior is very
- different, but average across chroma channels.
+/*Compute "average" quantizers for each qi level to use for rate control.
+ We do one for each color channel, as well as an average across color
+ channels, separately for INTER and INTRA, since their behavior is very
+ different.
The basic approach is to compute a harmonic average of the squared quantizer,
weighted by the expected squared magnitude of the DCT coefficients.
Under the (not quite true) assumption that DCT coefficients are
Laplacian-distributed, this preserves the product Q*lambda, where
lambda=sqrt(2/sigma**2) is the Laplacian distribution parameter (not to be
confused with the lambda used in R-D optimization throughout most of the
- rest of the code).
- The value Q*lambda completely determines the entropy of the coefficients.*/
+ rest of the code), when the distributions from multiple coefficients are
+ pooled.
+ The value Q*lambda completely determines the entropy of coefficients drawn
+ from a Laplacian distribution, and thus the expected bitrate.*/
void oc_enquant_qavg_init(ogg_int64_t _log_qavg[2][64],
+ ogg_int16_t _log_plq[64][3][2],ogg_uint16_t _chroma_rd_scale[2][64][2],
ogg_uint16_t *_dequant[64][3][2],int _pixel_fmt){
int qi;
int pli;
int qti;
int ci;
for(qti=0;qti<2;qti++)for(qi=0;qi<64;qi++){
- ogg_int64_t q2;
+ ogg_int64_t q2;
+ ogg_uint32_t qp[3];
+ ogg_uint32_t cqp;
+ ogg_uint32_t d;
q2=0;
for(pli=0;pli<3;pli++){
- ogg_uint32_t qp;
- qp=0;
+ qp[pli]=0;
for(ci=0;ci<64;ci++){
unsigned rq;
unsigned qd;
qd=_dequant[qi][pli][qti][OC_IZIG_ZAG[ci]];
rq=(OC_RPSD[qti][ci]+(qd>>1))/qd;
- qp+=rq*(ogg_uint32_t)rq;
+ qp[pli]+=rq*(ogg_uint32_t)rq;
}
- q2+=OC_PCD[_pixel_fmt][pli]*(ogg_int64_t)qp;
+ q2+=OC_PCD[_pixel_fmt][pli]*(ogg_int64_t)qp[pli];
+ /*plq=1.0/sqrt(qp)*/
+ _log_plq[qi][pli][qti]=
+ (ogg_int16_t)(OC_Q10(32)-oc_blog32_q10(qp[pli])>>1);
}
+ d=OC_PCD[_pixel_fmt][1]+OC_PCD[_pixel_fmt][2];
+ cqp=(ogg_uint32_t)((OC_PCD[_pixel_fmt][1]*(ogg_int64_t)qp[1]+
+ OC_PCD[_pixel_fmt][2]*(ogg_int64_t)qp[2]+(d>>1))/d);
+ /*chroma_rd_scale=clamp(0.25,cqp/qp[0],4)*/
+ d=OC_MAXI(qp[0]+(1<<OC_RD_SCALE_BITS-1)>>OC_RD_SCALE_BITS,1);
+ d=OC_CLAMPI(1<<OC_RD_SCALE_BITS-2,(cqp+(d>>1))/d,4<<OC_RD_SCALE_BITS);
+ _chroma_rd_scale[qti][qi][0]=(ogg_int16_t)d;
+ /*chroma_rd_iscale=clamp(0.25,qp[0]/cqp,4)*/
+ d=OC_MAXI(OC_RD_ISCALE(cqp,1),1);
+ d=OC_CLAMPI(1<<OC_RD_ISCALE_BITS-2,(qp[0]+(d>>1))/d,4<<OC_RD_ISCALE_BITS);
+ _chroma_rd_scale[qti][qi][1]=(ogg_int16_t)d;
/*qavg=1.0/sqrt(q2).*/
_log_qavg[qti][qi]=OC_Q57(48)-oc_blog64(q2)>>1;
}
diff --git a/thirdparty/libtheora/enquant.h b/thirdparty/libtheora/enquant.h
index d62df10d1a..e5f78144cc 100644
--- a/thirdparty/libtheora/enquant.h
+++ b/thirdparty/libtheora/enquant.h
@@ -14,14 +14,13 @@ struct oc_iquant{
ogg_int16_t l;
};
-typedef oc_iquant oc_iquant_table[64];
-
+int oc_quant_params_clone(th_quant_info *_dst,const th_quant_info *_src);
void oc_quant_params_pack(oggpack_buffer *_opb,const th_quant_info *_qinfo);
-void oc_enquant_tables_init(ogg_uint16_t *_dequant[64][3][2],
- oc_iquant *_enquant[64][3][2],const th_quant_info *_qinfo);
+void oc_iquant_init(oc_iquant *_this,ogg_uint16_t _d);
void oc_enquant_qavg_init(ogg_int64_t _log_qavg[2][64],
+ ogg_int16_t _log_plq[64][3][2],ogg_uint16_t _pl_rd_scale[2][64][2],
ogg_uint16_t *_dequant[64][3][2],int _pixel_fmt);
#endif
diff --git a/thirdparty/libtheora/fdct.c b/thirdparty/libtheora/fdct.c
index dc3a66f245..9c2f8b0446 100644
--- a/thirdparty/libtheora/fdct.c
+++ b/thirdparty/libtheora/fdct.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: fdct.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
#include "encint.h"
@@ -120,11 +120,6 @@ static void oc_fdct8(ogg_int16_t _y[8],const ogg_int16_t *_x){
_y[7]=v;
}
-void oc_enc_fdct8x8(const oc_enc_ctx *_enc,ogg_int16_t _y[64],
- const ogg_int16_t _x[64]){
- (*_enc->opt_vtable.fdct8x8)(_y,_x);
-}
-
/*Performs a forward 8x8 Type-II DCT transform.
The output is scaled by a factor of 4 relative to the orthonormal version
of the transform.
@@ -152,7 +147,7 @@ void oc_enc_fdct8x8_c(ogg_int16_t _y[64],const ogg_int16_t _x[64]){
/*Round the result back to the external working precision (which is still
scaled by four relative to the orthogonal result).
TODO: We should just update the external working precision.*/
- for(i=0;i<64;i++)_y[i]=w[i]+2>>2;
+ for(i=0;i<64;i++)_y[i]=w[OC_FZIG_ZAG[i]]+2>>2;
}
diff --git a/thirdparty/libtheora/fragment.c b/thirdparty/libtheora/fragment.c
index 15372e9d9f..14c38be507 100644
--- a/thirdparty/libtheora/fragment.c
+++ b/thirdparty/libtheora/fragment.c
@@ -11,17 +11,12 @@
********************************************************************
function:
- last mod: $Id: fragment.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
#include <string.h>
#include "internal.h"
-void oc_frag_copy(const oc_theora_state *_state,unsigned char *_dst,
- const unsigned char *_src,int _ystride){
- (*_state->opt_vtable.frag_copy)(_dst,_src,_ystride);
-}
-
void oc_frag_copy_c(unsigned char *_dst,const unsigned char *_src,int _ystride){
int i;
for(i=8;i-->0;){
@@ -31,9 +26,24 @@ void oc_frag_copy_c(unsigned char *_dst,const unsigned char *_src,int _ystride){
}
}
-void oc_frag_recon_intra(const oc_theora_state *_state,unsigned char *_dst,
- int _ystride,const ogg_int16_t _residue[64]){
- _state->opt_vtable.frag_recon_intra(_dst,_ystride,_residue);
+/*Copies the fragments specified by the lists of fragment indices from one
+ frame to another.
+ _dst_frame: The reference frame to copy to.
+ _src_frame: The reference frame to copy from.
+ _ystride: The row stride of the reference frames.
+ _fragis: A pointer to a list of fragment indices.
+ _nfragis: The number of fragment indices to copy.
+ _frag_buf_offs: The offsets of fragments in the reference frames.*/
+void oc_frag_copy_list_c(unsigned char *_dst_frame,
+ const unsigned char *_src_frame,int _ystride,
+ const ptrdiff_t *_fragis,ptrdiff_t _nfragis,const ptrdiff_t *_frag_buf_offs){
+ ptrdiff_t fragii;
+ for(fragii=0;fragii<_nfragis;fragii++){
+ ptrdiff_t frag_buf_off;
+ frag_buf_off=_frag_buf_offs[_fragis[fragii]];
+ oc_frag_copy_c(_dst_frame+frag_buf_off,
+ _src_frame+frag_buf_off,_ystride);
+ }
}
void oc_frag_recon_intra_c(unsigned char *_dst,int _ystride,
@@ -46,11 +56,6 @@ void oc_frag_recon_intra_c(unsigned char *_dst,int _ystride,
}
}
-void oc_frag_recon_inter(const oc_theora_state *_state,unsigned char *_dst,
- const unsigned char *_src,int _ystride,const ogg_int16_t _residue[64]){
- _state->opt_vtable.frag_recon_inter(_dst,_src,_ystride,_residue);
-}
-
void oc_frag_recon_inter_c(unsigned char *_dst,
const unsigned char *_src,int _ystride,const ogg_int16_t _residue[64]){
int i;
@@ -62,12 +67,6 @@ void oc_frag_recon_inter_c(unsigned char *_dst,
}
}
-void oc_frag_recon_inter2(const oc_theora_state *_state,unsigned char *_dst,
- const unsigned char *_src1,const unsigned char *_src2,int _ystride,
- const ogg_int16_t _residue[64]){
- _state->opt_vtable.frag_recon_inter2(_dst,_src1,_src2,_ystride,_residue);
-}
-
void oc_frag_recon_inter2_c(unsigned char *_dst,const unsigned char *_src1,
const unsigned char *_src2,int _ystride,const ogg_int16_t _residue[64]){
int i;
@@ -80,8 +79,4 @@ void oc_frag_recon_inter2_c(unsigned char *_dst,const unsigned char *_src1,
}
}
-void oc_restore_fpu(const oc_theora_state *_state){
- _state->opt_vtable.restore_fpu();
-}
-
void oc_restore_fpu_c(void){}
diff --git a/thirdparty/libtheora/huffdec.c b/thirdparty/libtheora/huffdec.c
index 8cf27f0341..5a83c5f150 100644
--- a/thirdparty/libtheora/huffdec.c
+++ b/thirdparty/libtheora/huffdec.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: huffdec.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
@@ -22,14 +22,60 @@
#include "decint.h"
-/*The ANSI offsetof macro is broken on some platforms (e.g., older DECs).*/
-#define _ogg_offsetof(_type,_field)\
- ((size_t)((char *)&((_type *)0)->_field-(char *)0))
-/*The number of internal tokens associated with each of the spec tokens.*/
-static const unsigned char OC_DCT_TOKEN_MAP_ENTRIES[TH_NDCT_TOKENS]={
- 1,1,1,4,8,1,1,8,1,1,1,1,1,2,2,2,2,4,8,2,2,2,4,2,2,2,2,2,8,2,4,8
-};
+/*Instead of storing every branching in the tree, subtrees can be collapsed
+ into one node, with a table of size 1<<nbits pointing directly to its
+ descedents nbits levels down.
+ This allows more than one bit to be read at a time, and avoids following all
+ the intermediate branches with next to no increased code complexity once
+ the collapsed tree has been built.
+ We do _not_ require that a subtree be complete to be collapsed, but instead
+ store duplicate pointers in the table, and record the actual depth of the
+ node below its parent.
+ This tells us the number of bits to advance the stream after reaching it.
+
+ This turns out to be equivalent to the method described in \cite{Hash95},
+ without the requirement that codewords be sorted by length.
+ If the codewords were sorted by length (so-called ``canonical-codes''), they
+ could be decoded much faster via either Lindell and Moffat's approach or
+ Hashemian's Condensed Huffman Code approach, the latter of which has an
+ extremely small memory footprint.
+ We can't use Choueka et al.'s finite state machine approach, which is
+ extremely fast, because we can't allow multiple symbols to be output at a
+ time; the codebook can and does change between symbols.
+ It also has very large memory requirements, which impairs cache coherency.
+
+ We store the tree packed in an array of 16-bit integers (words).
+ Each node consists of a single word, followed consecutively by two or more
+ indices of its children.
+ Let n be the value of this first word.
+ This is the number of bits that need to be read to traverse the node, and
+ must be positive.
+ 1<<n entries follow in the array, each an index to a child node.
+ If the child is positive, then it is the index of another internal node in
+ the table.
+ If the child is negative or zero, then it is a leaf node.
+ These are stored directly in the child pointer to save space, since they only
+ require a single word.
+ If a leaf node would have been encountered before reading n bits, then it is
+ duplicated the necessary number of times in this table.
+ Leaf nodes pack both a token value and their actual depth in the tree.
+ The token in the leaf node is (-leaf&255).
+ The number of bits that need to be consumed to reach the leaf, starting from
+ the current node, is (-leaf>>8).
+
+ @ARTICLE{Hash95,
+ author="Reza Hashemian",
+ title="Memory Efficient and High-Speed Search {Huffman} Coding",
+ journal="{IEEE} Transactions on Communications",
+ volume=43,
+ number=10,
+ pages="2576--2581",
+ month=Oct,
+ year=1995
+ }*/
+
+
/*The map from external spec-defined tokens to internal tokens.
This is constructed so that any extra bits read with the original token value
@@ -99,391 +145,371 @@ static const unsigned char OC_DCT_TOKEN_MAP[TH_NDCT_TOKENS]={
40
};
-/*These three functions are really part of the bitpack.c module, but
- they are only used here.
- Declaring local static versions so they can be inlined saves considerable
- function call overhead.*/
-
-static oc_pb_window oc_pack_refill(oc_pack_buf *_b,int _bits){
- const unsigned char *ptr;
- const unsigned char *stop;
- oc_pb_window window;
- int available;
- window=_b->window;
- available=_b->bits;
- ptr=_b->ptr;
- stop=_b->stop;
- /*This version of _refill() doesn't bother setting eof because we won't
- check for it after we've started decoding DCT tokens.*/
- if(ptr>=stop)available=OC_LOTS_OF_BITS;
- while(available<=OC_PB_WINDOW_SIZE-8){
- available+=8;
- window|=(oc_pb_window)*ptr++<<OC_PB_WINDOW_SIZE-available;
- if(ptr>=stop)available=OC_LOTS_OF_BITS;
- }
- _b->ptr=ptr;
- if(_bits>available)window|=*ptr>>(available&7);
- _b->bits=available;
- return window;
-}
-
-
-/*Read in bits without advancing the bit pointer.
- Here we assume 0<=_bits&&_bits<=32.*/
-static long oc_pack_look(oc_pack_buf *_b,int _bits){
- oc_pb_window window;
- int available;
- long result;
- window=_b->window;
- available=_b->bits;
- if(_bits==0)return 0;
- if(_bits>available)_b->window=window=oc_pack_refill(_b,_bits);
- result=window>>OC_PB_WINDOW_SIZE-_bits;
- return result;
-}
-
-/*Advance the bit pointer.*/
-static void oc_pack_adv(oc_pack_buf *_b,int _bits){
- /*We ignore the special cases for _bits==0 and _bits==32 here, since they are
- never used actually used.
- OC_HUFF_SLUSH (defined below) would have to be at least 27 to actually read
- 32 bits in a single go, and would require a 32 GB lookup table (assuming
- 8 byte pointers, since 4 byte pointers couldn't fit such a table).*/
- _b->window<<=_bits;
- _b->bits-=_bits;
-}
+/*The log base 2 of number of internal tokens associated with each of the spec
+ tokens (i.e., how many of the extra bits are folded into the token value).
+ Increasing the maximum value beyond 3 will enlarge the amount of stack
+ required for tree construction.*/
+static const unsigned char OC_DCT_TOKEN_MAP_LOG_NENTRIES[TH_NDCT_TOKENS]={
+ 0,0,0,2,3,0,0,3,0,0,0,0,0,1,1,1,1,2,3,1,1,1,2,1,1,1,1,1,3,1,2,3
+};
-/*The log_2 of the size of a lookup table is allowed to grow to relative to
- the number of unique nodes it contains.
- E.g., if OC_HUFF_SLUSH is 2, then at most 75% of the space in the tree is
- wasted (each node will have an amortized cost of at most 20 bytes when using
- 4-byte pointers).
+/*The size a lookup table is allowed to grow to relative to the number of
+ unique nodes it contains.
+ E.g., if OC_HUFF_SLUSH is 4, then at most 75% of the space in the tree is
+ wasted (1/4 of the space must be used).
Larger numbers can decode tokens with fewer read operations, while smaller
- numbers may save more space (requiring as little as 8 bytes amortized per
- node, though there will be more nodes).
+ numbers may save more space.
With a sample file:
32233473 read calls are required when no tree collapsing is done (100.0%).
- 19269269 read calls are required when OC_HUFF_SLUSH is 0 (59.8%).
- 11144969 read calls are required when OC_HUFF_SLUSH is 1 (34.6%).
- 10538563 read calls are required when OC_HUFF_SLUSH is 2 (32.7%).
- 10192578 read calls are required when OC_HUFF_SLUSH is 3 (31.6%).
- Since a value of 1 gets us the vast majority of the speed-up with only a
- small amount of wasted memory, this is what we use.*/
-#define OC_HUFF_SLUSH (1)
-
-
-/*Determines the size in bytes of a Huffman tree node that represents a
- subtree of depth _nbits.
- _nbits: The depth of the subtree.
- If this is 0, the node is a leaf node.
- Otherwise 1<<_nbits pointers are allocated for children.
- Return: The number of bytes required to store the node.*/
-static size_t oc_huff_node_size(int _nbits){
- size_t size;
- size=_ogg_offsetof(oc_huff_node,nodes);
- if(_nbits>0)size+=sizeof(oc_huff_node *)*(1<<_nbits);
- return size;
-}
-
-static oc_huff_node *oc_huff_node_init(char **_storage,size_t _size,int _nbits){
- oc_huff_node *ret;
- ret=(oc_huff_node *)*_storage;
- ret->nbits=(unsigned char)_nbits;
- (*_storage)+=_size;
- return ret;
-}
-
-
-/*Determines the size in bytes of a Huffman tree.
- _nbits: The depth of the subtree.
- If this is 0, the node is a leaf node.
- Otherwise storage for 1<<_nbits pointers are added for children.
- Return: The number of bytes required to store the tree.*/
-static size_t oc_huff_tree_size(const oc_huff_node *_node){
- size_t size;
- size=oc_huff_node_size(_node->nbits);
- if(_node->nbits){
- int nchildren;
- int i;
- nchildren=1<<_node->nbits;
- for(i=0;i<nchildren;i+=1<<_node->nbits-_node->nodes[i]->depth){
- size+=oc_huff_tree_size(_node->nodes[i]);
- }
- }
- return size;
-}
-
-
-/*Unpacks a sub-tree from the given buffer.
- _opb: The buffer to unpack from.
- _binodes: The nodes to store the sub-tree in.
- _nbinodes: The number of nodes available for the sub-tree.
- Return: 0 on success, or a negative value on error.*/
-static int oc_huff_tree_unpack(oc_pack_buf *_opb,
- oc_huff_node *_binodes,int _nbinodes){
- oc_huff_node *binode;
- long bits;
- int nused;
- if(_nbinodes<1)return TH_EBADHEADER;
- binode=_binodes;
- nused=0;
- bits=oc_pack_read1(_opb);
- if(oc_pack_bytes_left(_opb)<0)return TH_EBADHEADER;
- /*Read an internal node:*/
- if(!bits){
- int ret;
- nused++;
- binode->nbits=1;
- binode->depth=1;
- binode->nodes[0]=_binodes+nused;
- ret=oc_huff_tree_unpack(_opb,_binodes+nused,_nbinodes-nused);
- if(ret>=0){
- nused+=ret;
- binode->nodes[1]=_binodes+nused;
- ret=oc_huff_tree_unpack(_opb,_binodes+nused,_nbinodes-nused);
- }
- if(ret<0)return ret;
- nused+=ret;
- }
- /*Read a leaf node:*/
- else{
- int ntokens;
- int token;
- int i;
- bits=oc_pack_read(_opb,OC_NDCT_TOKEN_BITS);
+ 19269269 read calls are required when OC_HUFF_SLUSH is 1 (59.8%).
+ 11144969 read calls are required when OC_HUFF_SLUSH is 2 (34.6%).
+ 10538563 read calls are required when OC_HUFF_SLUSH is 4 (32.7%).
+ 10192578 read calls are required when OC_HUFF_SLUSH is 8 (31.6%).
+ Since a value of 2 gets us the vast majority of the speed-up with only a
+ small amount of wasted memory, this is what we use.
+ This value must be less than 128, or you could create a tree with more than
+ 32767 entries, which would overflow the 16-bit words used to index it.*/
+#define OC_HUFF_SLUSH (2)
+/*The root of the tree is on the fast path, and a larger value here is more
+ beneficial than elsewhere in the tree.
+ 7 appears to give the best performance, trading off between increased use of
+ the single-read fast path and cache footprint for the tables, though
+ obviously this will depend on your cache size.
+ Using 7 here, the VP3 tables are about twice as large compared to using 2.*/
+#define OC_ROOT_HUFF_SLUSH (7)
+
+
+
+/*Unpacks a Huffman codebook.
+ _opb: The buffer to unpack from.
+ _tokens: Stores a list of internal tokens, in the order they were found in
+ the codebook, and the lengths of their corresponding codewords.
+ This is enough to completely define the codebook, while minimizing
+ stack usage and avoiding temporary allocations (for platforms
+ where free() is a no-op).
+ Return: The number of internal tokens in the codebook, or a negative value
+ on error.*/
+int oc_huff_tree_unpack(oc_pack_buf *_opb,unsigned char _tokens[256][2]){
+ ogg_uint32_t code;
+ int len;
+ int ntokens;
+ int nleaves;
+ code=0;
+ len=ntokens=nleaves=0;
+ for(;;){
+ long bits;
+ bits=oc_pack_read1(_opb);
+ /*Only process nodes so long as there's more bits in the buffer.*/
if(oc_pack_bytes_left(_opb)<0)return TH_EBADHEADER;
- /*Find out how many internal tokens we translate this external token into.*/
- ntokens=OC_DCT_TOKEN_MAP_ENTRIES[bits];
- if(_nbinodes<2*ntokens-1)return TH_EBADHEADER;
- /*Fill in a complete binary tree pointing to the internal tokens.*/
- for(i=1;i<ntokens;i<<=1){
- int j;
- binode=_binodes+nused;
- nused+=i;
- for(j=0;j<i;j++){
- binode[j].nbits=1;
- binode[j].depth=1;
- binode[j].nodes[0]=_binodes+nused+2*j;
- binode[j].nodes[1]=_binodes+nused+2*j+1;
- }
+ /*Read an internal node:*/
+ if(!bits){
+ len++;
+ /*Don't allow codewords longer than 32 bits.*/
+ if(len>32)return TH_EBADHEADER;
}
- /*And now the leaf nodes with those tokens.*/
- token=OC_DCT_TOKEN_MAP[bits];
- for(i=0;i<ntokens;i++){
- binode=_binodes+nused++;
- binode->nbits=0;
- binode->depth=1;
- binode->token=token+i;
+ /*Read a leaf node:*/
+ else{
+ ogg_uint32_t code_bit;
+ int neb;
+ int nentries;
+ int token;
+ /*Don't allow more than 32 spec-tokens per codebook.*/
+ if(++nleaves>32)return TH_EBADHEADER;
+ bits=oc_pack_read(_opb,OC_NDCT_TOKEN_BITS);
+ neb=OC_DCT_TOKEN_MAP_LOG_NENTRIES[bits];
+ token=OC_DCT_TOKEN_MAP[bits];
+ nentries=1<<neb;
+ while(nentries-->0){
+ _tokens[ntokens][0]=(unsigned char)token++;
+ _tokens[ntokens][1]=(unsigned char)(len+neb);
+ ntokens++;
+ }
+ code_bit=0x80000000U>>len-1;
+ while(len>0&&(code&code_bit)){
+ code^=code_bit;
+ code_bit<<=1;
+ len--;
+ }
+ if(len<=0)break;
+ code|=code_bit;
}
}
- return nused;
-}
-
-/*Finds the depth of shortest branch of the given sub-tree.
- The tree must be binary.
- _binode: The root of the given sub-tree.
- _binode->nbits must be 0 or 1.
- Return: The smallest depth of a leaf node in this sub-tree.
- 0 indicates this sub-tree is a leaf node.*/
-static int oc_huff_tree_mindepth(oc_huff_node *_binode){
- int depth0;
- int depth1;
- if(_binode->nbits==0)return 0;
- depth0=oc_huff_tree_mindepth(_binode->nodes[0]);
- depth1=oc_huff_tree_mindepth(_binode->nodes[1]);
- return OC_MINI(depth0,depth1)+1;
-}
-
-/*Finds the number of internal nodes at a given depth, plus the number of
- leaves at that depth or shallower.
- The tree must be binary.
- _binode: The root of the given sub-tree.
- _binode->nbits must be 0 or 1.
- Return: The number of entries that would be contained in a jump table of the
- given depth.*/
-static int oc_huff_tree_occupancy(oc_huff_node *_binode,int _depth){
- if(_binode->nbits==0||_depth<=0)return 1;
- else{
- return oc_huff_tree_occupancy(_binode->nodes[0],_depth-1)+
- oc_huff_tree_occupancy(_binode->nodes[1],_depth-1);
- }
+ return ntokens;
}
-/*Makes a copy of the given Huffman tree.
- _node: The Huffman tree to copy.
- Return: The copy of the Huffman tree.*/
-static oc_huff_node *oc_huff_tree_copy(const oc_huff_node *_node,
- char **_storage){
- oc_huff_node *ret;
- ret=oc_huff_node_init(_storage,oc_huff_node_size(_node->nbits),_node->nbits);
- ret->depth=_node->depth;
- if(_node->nbits){
- int nchildren;
- int i;
- int inext;
- nchildren=1<<_node->nbits;
- for(i=0;i<nchildren;){
- ret->nodes[i]=oc_huff_tree_copy(_node->nodes[i],_storage);
- inext=i+(1<<_node->nbits-ret->nodes[i]->depth);
- while(++i<inext)ret->nodes[i]=ret->nodes[i-1];
+/*Count how many tokens would be required to fill a subtree at depth _depth.
+ _tokens: A list of internal tokens, in the order they are found in the
+ codebook, and the lengths of their corresponding codewords.
+ _depth: The depth of the desired node in the corresponding tree structure.
+ Return: The number of tokens that belong to that subtree.*/
+static int oc_huff_subtree_tokens(unsigned char _tokens[][2],int _depth){
+ ogg_uint32_t code;
+ int ti;
+ code=0;
+ ti=0;
+ do{
+ if(_tokens[ti][1]-_depth<32)code+=0x80000000U>>_tokens[ti++][1]-_depth;
+ else{
+ /*Because of the expanded internal tokens, we can have codewords as long
+ as 35 bits.
+ A single recursion here is enough to advance past them.*/
+ code++;
+ ti+=oc_huff_subtree_tokens(_tokens+ti,_depth+31);
}
}
- else ret->token=_node->token;
- return ret;
+ while(code<0x80000000U);
+ return ti;
}
-static size_t oc_huff_tree_collapse_size(oc_huff_node *_binode,int _depth){
- size_t size;
- int mindepth;
- int depth;
- int loccupancy;
- int occupancy;
- if(_binode->nbits!=0&&_depth>0){
- return oc_huff_tree_collapse_size(_binode->nodes[0],_depth-1)+
- oc_huff_tree_collapse_size(_binode->nodes[1],_depth-1);
- }
- depth=mindepth=oc_huff_tree_mindepth(_binode);
- occupancy=1<<mindepth;
+/*Compute the number of bits to use for a collapsed tree node at the given
+ depth.
+ _tokens: A list of internal tokens, in the order they are found in the
+ codebook, and the lengths of their corresponding codewords.
+ _ntokens: The number of tokens corresponding to this tree node.
+ _depth: The depth of this tree node.
+ Return: The number of bits to use for a collapsed tree node rooted here.
+ This is always at least one, even if this was a leaf node.*/
+static int oc_huff_tree_collapse_depth(unsigned char _tokens[][2],
+ int _ntokens,int _depth){
+ int got_leaves;
+ int loccupancy;
+ int occupancy;
+ int slush;
+ int nbits;
+ int best_nbits;
+ slush=_depth>0?OC_HUFF_SLUSH:OC_ROOT_HUFF_SLUSH;
+ /*It's legal to have a tree with just a single node, which requires no bits
+ to decode and always returns the same token.
+ However, no encoder actually does this (yet).
+ To avoid a special case in oc_huff_token_decode(), we force the number of
+ lookahead bits to be at least one.
+ This will produce a tree that looks ahead one bit and then advances the
+ stream zero bits.*/
+ nbits=1;
+ occupancy=2;
+ got_leaves=1;
do{
+ int ti;
+ if(got_leaves)best_nbits=nbits;
+ nbits++;
+ got_leaves=0;
loccupancy=occupancy;
- occupancy=oc_huff_tree_occupancy(_binode,++depth);
- }
- while(occupancy>loccupancy&&occupancy>=1<<OC_MAXI(depth-OC_HUFF_SLUSH,0));
- depth--;
- size=oc_huff_node_size(depth);
- if(depth>0){
- size+=oc_huff_tree_collapse_size(_binode->nodes[0],depth-1);
- size+=oc_huff_tree_collapse_size(_binode->nodes[1],depth-1);
+ for(occupancy=ti=0;ti<_ntokens;occupancy++){
+ if(_tokens[ti][1]<_depth+nbits)ti++;
+ else if(_tokens[ti][1]==_depth+nbits){
+ got_leaves=1;
+ ti++;
+ }
+ else ti+=oc_huff_subtree_tokens(_tokens+ti,_depth+nbits);
+ }
}
- return size;
+ while(occupancy>loccupancy&&occupancy*slush>=1<<nbits);
+ return best_nbits;
}
-static oc_huff_node *oc_huff_tree_collapse(oc_huff_node *_binode,
- char **_storage);
-
-/*Fills the given nodes table with all the children in the sub-tree at the
- given depth.
- The nodes in the sub-tree with a depth less than that stored in the table
- are freed.
- The sub-tree must be binary and complete up until the given depth.
- _nodes: The nodes table to fill.
- _binode: The root of the sub-tree to fill it with.
- _binode->nbits must be 0 or 1.
- _level: The current level in the table.
- 0 indicates that the current node should be stored, regardless of
- whether it is a leaf node or an internal node.
- _depth: The depth of the nodes to fill the table with, relative to their
- parent.*/
-static void oc_huff_node_fill(oc_huff_node **_nodes,
- oc_huff_node *_binode,int _level,int _depth,char **_storage){
- if(_level<=0||_binode->nbits==0){
- int i;
- _binode->depth=(unsigned char)(_depth-_level);
- _nodes[0]=oc_huff_tree_collapse(_binode,_storage);
- for(i=1;i<1<<_level;i++)_nodes[i]=_nodes[0];
- }
- else{
- _level--;
- oc_huff_node_fill(_nodes,_binode->nodes[0],_level,_depth,_storage);
- _nodes+=1<<_level;
- oc_huff_node_fill(_nodes,_binode->nodes[1],_level,_depth,_storage);
- }
+/*Determines the size in words of a Huffman tree node that represents a
+ subtree of depth _nbits.
+ _nbits: The depth of the subtree.
+ This must be greater than zero.
+ Return: The number of words required to store the node.*/
+static size_t oc_huff_node_size(int _nbits){
+ return 1+(1<<_nbits);
}
-/*Finds the largest complete sub-tree rooted at the current node and collapses
- it into a single node.
- This procedure is then applied recursively to all the children of that node.
- _binode: The root of the sub-tree to collapse.
- _binode->nbits must be 0 or 1.
- Return: The new root of the collapsed sub-tree.*/
-static oc_huff_node *oc_huff_tree_collapse(oc_huff_node *_binode,
- char **_storage){
- oc_huff_node *root;
- size_t size;
- int mindepth;
- int depth;
- int loccupancy;
- int occupancy;
- depth=mindepth=oc_huff_tree_mindepth(_binode);
- occupancy=1<<mindepth;
+/*Produces a collapsed-tree representation of the given token list.
+ _tree: The storage for the collapsed Huffman tree.
+ This may be NULL to compute the required storage size instead of
+ constructing the tree.
+ _tokens: A list of internal tokens, in the order they are found in the
+ codebook, and the lengths of their corresponding codewords.
+ _ntokens: The number of tokens corresponding to this tree node.
+ Return: The number of words required to store the tree.*/
+static size_t oc_huff_tree_collapse(ogg_int16_t *_tree,
+ unsigned char _tokens[][2],int _ntokens){
+ ogg_int16_t node[34];
+ unsigned char depth[34];
+ unsigned char last[34];
+ size_t ntree;
+ int ti;
+ int l;
+ depth[0]=0;
+ last[0]=(unsigned char)(_ntokens-1);
+ ntree=0;
+ ti=0;
+ l=0;
do{
- loccupancy=occupancy;
- occupancy=oc_huff_tree_occupancy(_binode,++depth);
+ int nbits;
+ nbits=oc_huff_tree_collapse_depth(_tokens+ti,last[l]+1-ti,depth[l]);
+ node[l]=(ogg_int16_t)ntree;
+ ntree+=oc_huff_node_size(nbits);
+ if(_tree!=NULL)_tree[node[l]++]=(ogg_int16_t)nbits;
+ do{
+ while(ti<=last[l]&&_tokens[ti][1]<=depth[l]+nbits){
+ if(_tree!=NULL){
+ ogg_int16_t leaf;
+ int nentries;
+ nentries=1<<depth[l]+nbits-_tokens[ti][1];
+ leaf=(ogg_int16_t)-(_tokens[ti][1]-depth[l]<<8|_tokens[ti][0]);
+ while(nentries-->0)_tree[node[l]++]=leaf;
+ }
+ ti++;
+ }
+ if(ti<=last[l]){
+ /*We need to recurse*/
+ depth[l+1]=(unsigned char)(depth[l]+nbits);
+ if(_tree!=NULL)_tree[node[l]++]=(ogg_int16_t)ntree;
+ l++;
+ last[l]=
+ (unsigned char)(ti+oc_huff_subtree_tokens(_tokens+ti,depth[l])-1);
+ break;
+ }
+ /*Pop back up a level of recursion.*/
+ else if(l-->0)nbits=depth[l+1]-depth[l];
+ }
+ while(l>=0);
}
- while(occupancy>loccupancy&&occupancy>=1<<OC_MAXI(depth-OC_HUFF_SLUSH,0));
- depth--;
- if(depth<=1)return oc_huff_tree_copy(_binode,_storage);
- size=oc_huff_node_size(depth);
- root=oc_huff_node_init(_storage,size,depth);
- root->depth=_binode->depth;
- oc_huff_node_fill(root->nodes,_binode,depth,depth,_storage);
- return root;
+ while(l>=0);
+ return ntree;
}
/*Unpacks a set of Huffman trees, and reduces them to a collapsed
representation.
_opb: The buffer to unpack the trees from.
_nodes: The table to fill with the Huffman trees.
- Return: 0 on success, or a negative value on error.*/
+ Return: 0 on success, or a negative value on error.
+ The caller is responsible for cleaning up any partially initialized
+ _nodes on failure.*/
int oc_huff_trees_unpack(oc_pack_buf *_opb,
- oc_huff_node *_nodes[TH_NHUFFMAN_TABLES]){
+ ogg_int16_t *_nodes[TH_NHUFFMAN_TABLES]){
int i;
for(i=0;i<TH_NHUFFMAN_TABLES;i++){
- oc_huff_node nodes[511];
- char *storage;
- size_t size;
- int ret;
+ unsigned char tokens[256][2];
+ int ntokens;
+ ogg_int16_t *tree;
+ size_t size;
/*Unpack the full tree into a temporary buffer.*/
- ret=oc_huff_tree_unpack(_opb,nodes,sizeof(nodes)/sizeof(*nodes));
- if(ret<0)return ret;
- /*Figure out how big the collapsed tree will be.*/
- size=oc_huff_tree_collapse_size(nodes,0);
- storage=(char *)_ogg_calloc(1,size);
- if(storage==NULL)return TH_EFAULT;
- /*And collapse it.*/
- _nodes[i]=oc_huff_tree_collapse(nodes,&storage);
+ ntokens=oc_huff_tree_unpack(_opb,tokens);
+ if(ntokens<0)return ntokens;
+ /*Figure out how big the collapsed tree will be and allocate space for it.*/
+ size=oc_huff_tree_collapse(NULL,tokens,ntokens);
+ /*This should never happen; if it does it means you set OC_HUFF_SLUSH or
+ OC_ROOT_HUFF_SLUSH too large.*/
+ if(size>32767)return TH_EIMPL;
+ tree=(ogg_int16_t *)_ogg_malloc(size*sizeof(*tree));
+ if(tree==NULL)return TH_EFAULT;
+ /*Construct the collapsed the tree.*/
+ oc_huff_tree_collapse(tree,tokens,ntokens);
+ _nodes[i]=tree;
}
return 0;
}
+/*Determines the size in words of a Huffman subtree.
+ _tree: The complete Huffman tree.
+ _node: The index of the root of the desired subtree.
+ Return: The number of words required to store the tree.*/
+static size_t oc_huff_tree_size(const ogg_int16_t *_tree,int _node){
+ size_t size;
+ int nchildren;
+ int n;
+ int i;
+ n=_tree[_node];
+ size=oc_huff_node_size(n);
+ nchildren=1<<n;
+ i=0;
+ do{
+ int child;
+ child=_tree[_node+i+1];
+ if(child<=0)i+=1<<n-(-child>>8);
+ else{
+ size+=oc_huff_tree_size(_tree,child);
+ i++;
+ }
+ }
+ while(i<nchildren);
+ return size;
+}
+
/*Makes a copy of the given set of Huffman trees.
_dst: The array to store the copy in.
_src: The array of trees to copy.*/
-int oc_huff_trees_copy(oc_huff_node *_dst[TH_NHUFFMAN_TABLES],
- const oc_huff_node *const _src[TH_NHUFFMAN_TABLES]){
+int oc_huff_trees_copy(ogg_int16_t *_dst[TH_NHUFFMAN_TABLES],
+ const ogg_int16_t *const _src[TH_NHUFFMAN_TABLES]){
+ int total;
int i;
+ total=0;
for(i=0;i<TH_NHUFFMAN_TABLES;i++){
- size_t size;
- char *storage;
- size=oc_huff_tree_size(_src[i]);
- storage=(char *)_ogg_calloc(1,size);
- if(storage==NULL){
+ size_t size;
+ size=oc_huff_tree_size(_src[i],0);
+ total+=size;
+ _dst[i]=(ogg_int16_t *)_ogg_malloc(size*sizeof(*_dst[i]));
+ if(_dst[i]==NULL){
while(i-->0)_ogg_free(_dst[i]);
return TH_EFAULT;
}
- _dst[i]=oc_huff_tree_copy(_src[i],&storage);
+ memcpy(_dst[i],_src[i],size*sizeof(*_dst[i]));
}
return 0;
}
/*Frees the memory used by a set of Huffman trees.
_nodes: The array of trees to free.*/
-void oc_huff_trees_clear(oc_huff_node *_nodes[TH_NHUFFMAN_TABLES]){
+void oc_huff_trees_clear(ogg_int16_t *_nodes[TH_NHUFFMAN_TABLES]){
int i;
for(i=0;i<TH_NHUFFMAN_TABLES;i++)_ogg_free(_nodes[i]);
}
+
/*Unpacks a single token using the given Huffman tree.
_opb: The buffer to unpack the token from.
_node: The tree to unpack the token with.
Return: The token value.*/
-int oc_huff_token_decode(oc_pack_buf *_opb,const oc_huff_node *_node){
- long bits;
- while(_node->nbits!=0){
- bits=oc_pack_look(_opb,_node->nbits);
- _node=_node->nodes[bits];
- oc_pack_adv(_opb,_node->depth);
+int oc_huff_token_decode_c(oc_pack_buf *_opb,const ogg_int16_t *_tree){
+ const unsigned char *ptr;
+ const unsigned char *stop;
+ oc_pb_window window;
+ int available;
+ long bits;
+ int node;
+ int n;
+ ptr=_opb->ptr;
+ window=_opb->window;
+ stop=_opb->stop;
+ available=_opb->bits;
+ node=0;
+ for(;;){
+ n=_tree[node];
+ if(n>available){
+ unsigned shift;
+ shift=OC_PB_WINDOW_SIZE-available;
+ do{
+ /*We don't bother setting eof because we won't check for it after we've
+ started decoding DCT tokens.*/
+ if(ptr>=stop){
+ shift=(unsigned)-OC_LOTS_OF_BITS;
+ break;
+ }
+ shift-=8;
+ window|=(oc_pb_window)*ptr++<<shift;
+ }
+ while(shift>=8);
+ /*Note: We never request more than 24 bits, so there's no need to fill in
+ the last partial byte here.*/
+ available=OC_PB_WINDOW_SIZE-shift;
+ }
+ bits=window>>OC_PB_WINDOW_SIZE-n;
+ node=_tree[node+1+bits];
+ if(node<=0)break;
+ window<<=n;
+ available-=n;
}
- return _node->token;
+ node=-node;
+ n=node>>8;
+ window<<=n;
+ available-=n;
+ _opb->ptr=ptr;
+ _opb->window=window;
+ _opb->bits=available;
+ return node&255;
}
diff --git a/thirdparty/libtheora/huffdec.h b/thirdparty/libtheora/huffdec.h
index d7ffa0e99b..03d25dcd1e 100644
--- a/thirdparty/libtheora/huffdec.h
+++ b/thirdparty/libtheora/huffdec.h
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: huffdec.h 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
@@ -22,71 +22,11 @@
-typedef struct oc_huff_node oc_huff_node;
-
-/*A node in the Huffman tree.
- Instead of storing every branching in the tree, subtrees can be collapsed
- into one node, with a table of size 1<<nbits pointing directly to its
- descedents nbits levels down.
- This allows more than one bit to be read at a time, and avoids following all
- the intermediate branches with next to no increased code complexity once
- the collapsed tree has been built.
- We do _not_ require that a subtree be complete to be collapsed, but instead
- store duplicate pointers in the table, and record the actual depth of the
- node below its parent.
- This tells us the number of bits to advance the stream after reaching it.
-
- This turns out to be equivalent to the method described in \cite{Hash95},
- without the requirement that codewords be sorted by length.
- If the codewords were sorted by length (so-called ``canonical-codes''), they
- could be decoded much faster via either Lindell and Moffat's approach or
- Hashemian's Condensed Huffman Code approach, the latter of which has an
- extremely small memory footprint.
- We can't use Choueka et al.'s finite state machine approach, which is
- extremely fast, because we can't allow multiple symbols to be output at a
- time; the codebook can and does change between symbols.
- It also has very large memory requirements, which impairs cache coherency.
-
- @ARTICLE{Hash95,
- author="Reza Hashemian",
- title="Memory Efficient and High-Speed Search {Huffman} Coding",
- journal="{IEEE} Transactions on Communications",
- volume=43,
- number=10,
- pages="2576--2581",
- month=Oct,
- year=1995
- }*/
-struct oc_huff_node{
- /*The number of bits of the code needed to descend through this node.
- 0 indicates a leaf node.
- Otherwise there are 1<<nbits nodes in the nodes table, which can be
- indexed by reading nbits bits from the stream.*/
- unsigned char nbits;
- /*The value of a token stored in a leaf node.
- The value in non-leaf nodes is undefined.*/
- unsigned char token;
- /*The depth of the current node, relative to its parent in the collapsed
- tree.
- This can be less than its parent's nbits value, in which case there are
- 1<<nbits-depth copies of this node in the table, and the bitstream should
- only be advanced depth bits after reaching this node.*/
- unsigned char depth;
- /*The table of child nodes.
- The ACTUAL size of this array is 1<<nbits, despite what the declaration
- below claims.
- The exception is that for leaf nodes the size is 0.*/
- oc_huff_node *nodes[2];
-};
-
-
-
int oc_huff_trees_unpack(oc_pack_buf *_opb,
- oc_huff_node *_nodes[TH_NHUFFMAN_TABLES]);
-int oc_huff_trees_copy(oc_huff_node *_dst[TH_NHUFFMAN_TABLES],
- const oc_huff_node *const _src[TH_NHUFFMAN_TABLES]);
-void oc_huff_trees_clear(oc_huff_node *_nodes[TH_NHUFFMAN_TABLES]);
-int oc_huff_token_decode(oc_pack_buf *_opb,const oc_huff_node *_node);
-
+ ogg_int16_t *_nodes[TH_NHUFFMAN_TABLES]);
+int oc_huff_trees_copy(ogg_int16_t *_dst[TH_NHUFFMAN_TABLES],
+ const ogg_int16_t *const _src[TH_NHUFFMAN_TABLES]);
+void oc_huff_trees_clear(ogg_int16_t *_nodes[TH_NHUFFMAN_TABLES]);
+int oc_huff_token_decode_c(oc_pack_buf *_opb,const ogg_int16_t *_node);
#endif
diff --git a/thirdparty/libtheora/huffenc.c b/thirdparty/libtheora/huffenc.c
index bf624e0523..77ab584a19 100644
--- a/thirdparty/libtheora/huffenc.c
+++ b/thirdparty/libtheora/huffenc.c
@@ -859,9 +859,10 @@ int oc_huff_codes_pack(oggpack_buffer *_opb,
/*First, find the maximum code length so we can align all the bit
patterns.*/
maxlen=_codes[i][0].nbits;
- for(j=1;j<TH_NDCT_TOKENS;j++){
- maxlen=OC_MAXI(_codes[i][j].nbits,maxlen);
- }
+ for(j=1;j<TH_NDCT_TOKENS;j++)maxlen=OC_MAXI(_codes[i][j].nbits,maxlen);
+ /*It's improbable that a code with more than 32 bits could pass the
+ validation below, but abort early in any case.*/
+ if(maxlen>32)return TH_EINVAL;
mask=(1<<(maxlen>>1)<<(maxlen+1>>1))-1;
/*Copy over the codes into our temporary workspace.
The bit patterns are aligned, and the original entry each code is from
@@ -877,34 +878,89 @@ int oc_huff_codes_pack(oggpack_buffer *_opb,
/*For each leaf of the tree:*/
bpos=maxlen;
for(j=0;j<TH_NDCT_TOKENS;j++){
- int bit;
- /*If this code has any bits at all.*/
- if(entries[j].shift<maxlen){
- /*Descend into the tree, writing a bit for each branch.*/
- for(;bpos>entries[j].shift;bpos--)oggpackB_write(_opb,0,1);
- /*Mark this as a leaf node, and write its value.*/
- oggpackB_write(_opb,1,1);
- oggpackB_write(_opb,entries[j].token,5);
- /*For each 1 branch we've descended, back up the tree until we reach a
- 0 branch.*/
- bit=1<<bpos;
- for(;entries[j].pattern&bit;bpos++)bit<<=1;
- /*Validate the code.*/
- if(j+1<TH_NDCT_TOKENS){
- mask=~(bit-1)<<1;
- /*The next entry should have a 1 bit where we had a 0, and should
- match our code above that bit.
- This verifies both fullness and prefix-freeness simultaneously.*/
- if(!(entries[j+1].pattern&bit)||
- (entries[j].pattern&mask)!=(entries[j+1].pattern&mask)){
- return TH_EINVAL;
- }
+ ogg_uint32_t bit;
+ /*Fail if this code has no bits at all.
+ Technically a codebook with a single 0-bit entry is legal, but the
+ encoder currently does not support codebooks which do not contain all
+ the tokens.*/
+ if(entries[j].shift>=maxlen)return TH_EINVAL;
+ /*Descend into the tree, writing a bit for each branch.*/
+ for(;bpos>entries[j].shift;bpos--)oggpackB_write(_opb,0,1);
+ /*Mark this as a leaf node, and write its value.*/
+ oggpackB_write(_opb,1,1);
+ oggpackB_write(_opb,entries[j].token,5);
+ /*For each 1 branch we've descended, back up the tree until we reach a
+ 0 branch.*/
+ bit=(ogg_uint32_t)1<<bpos;
+ for(;entries[j].pattern&bit;bpos++)bit<<=1;
+ /*Validate the code.*/
+ if(j+1<TH_NDCT_TOKENS){
+ mask=~(bit-1)<<1;
+ /*The next entry should have a 1 bit where we had a 0, and should
+ match our code above that bit.
+ This verifies both fullness and prefix-freeness simultaneously.*/
+ if(!(entries[j+1].pattern&bit)||
+ (entries[j].pattern&mask)!=(entries[j+1].pattern&mask)){
+ return TH_EINVAL;
+ }
+ }
+ /*If there are no more codes, we should have ascended back to the top
+ of the tree.*/
+ else if(bpos<maxlen)return TH_EINVAL;
+ }
+ }
+ return 0;
+}
+
+/*This is used to copy the configuration of an existing setup header for use by
+ the encoder.
+ The decoder uses a completely different data structure for the Huffman
+ codebooks.*/
+int oc_huff_codes_unpack(oc_pack_buf *_opb,
+ th_huff_code _codes[TH_NHUFFMAN_TABLES][TH_NDCT_TOKENS]){
+ int i;
+ for(i=0;i<TH_NHUFFMAN_TABLES;i++){
+ ogg_uint32_t code;
+ int len;
+ int nleaves;
+ code=0;
+ len=nleaves=0;
+ memset(_codes[i],0,TH_NDCT_TOKENS*sizeof(*_codes[i]));
+ for(;;){
+ long bits;
+ bits=oc_pack_read1(_opb);
+ /*Only process nodes so long as there's more bits in the buffer.*/
+ if(oc_pack_bytes_left(_opb)<0)return TH_EBADHEADER;
+ /*Read an internal node:*/
+ if(!bits){
+ len++;
+ /*Don't allow codewords longer than 32 bits.*/
+ if(len>32)return TH_EBADHEADER;
+ }
+ /*Read a leaf node:*/
+ else{
+ ogg_uint32_t code_bit;
+ /*Don't allow more than 32 tokens per codebook.*/
+ if(++nleaves>32)return TH_EBADHEADER;
+ bits=oc_pack_read(_opb,OC_NDCT_TOKEN_BITS);
+ /*The current encoder does not support codebooks that do not contain
+ all of the tokens.*/
+ if(_codes[i][bits].nbits>0)return TH_EINVAL;
+ _codes[i][bits].pattern=code>>32-len;
+ _codes[i][bits].nbits=len;
+ code_bit=0x80000000U>>len-1;
+ while(len>0&&(code&code_bit)){
+ code^=code_bit;
+ code_bit<<=1;
+ len--;
}
- /*If there are no more codes, we should have ascended back to the top
- of the tree.*/
- else if(bpos<maxlen)return TH_EINVAL;
+ if(len<=0)break;
+ code|=code_bit;
}
}
+ /*The current encoder does not support codebooks that do not contain all of
+ the tokens.*/
+ if(nleaves<32)return TH_EINVAL;
}
return 0;
}
diff --git a/thirdparty/libtheora/huffenc.h b/thirdparty/libtheora/huffenc.h
index c5a3956f1f..0554cc4060 100644
--- a/thirdparty/libtheora/huffenc.h
+++ b/thirdparty/libtheora/huffenc.h
@@ -1,6 +1,7 @@
#if !defined(_huffenc_H)
# define _huffenc_H (1)
# include "huffman.h"
+# include "bitpack.h"
@@ -15,5 +16,7 @@ extern const th_huff_code
int oc_huff_codes_pack(oggpack_buffer *_opb,
const th_huff_code _codes[TH_NHUFFMAN_TABLES][TH_NDCT_TOKENS]);
+int oc_huff_codes_unpack(oc_pack_buf *_opb,
+ th_huff_code _codes[TH_NHUFFMAN_TABLES][TH_NDCT_TOKENS]);
#endif
diff --git a/thirdparty/libtheora/huffman.h b/thirdparty/libtheora/huffman.h
index 36cf7572e5..eb805866b9 100644
--- a/thirdparty/libtheora/huffman.h
+++ b/thirdparty/libtheora/huffman.h
@@ -11,12 +11,12 @@
********************************************************************
function:
- last mod: $Id: huffman.h 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
#if !defined(_huffman_H)
-# define _hufffman_H (1)
+# define _huffman_H (1)
# include "theora/codec.h"
# include "ocintrin.h"
diff --git a/thirdparty/libtheora/idct.c b/thirdparty/libtheora/idct.c
index 0e68ac7658..838e3ad8ca 100644
--- a/thirdparty/libtheora/idct.c
+++ b/thirdparty/libtheora/idct.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: idct.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
@@ -231,18 +231,18 @@ static void idct8_1(ogg_int16_t *_y,const ogg_int16_t _x[1]){
_y: The buffer to store the result in.
This may be the same as _x.
_x: The input coefficients.*/
-static void oc_idct8x8_3(ogg_int16_t _y[64],const ogg_int16_t _x[64]){
- const ogg_int16_t *in;
- ogg_int16_t *end;
- ogg_int16_t *out;
- ogg_int16_t w[64];
+static void oc_idct8x8_3(ogg_int16_t _y[64],ogg_int16_t _x[64]){
+ ogg_int16_t w[64];
+ int i;
/*Transform rows of x into columns of w.*/
idct8_2(w,_x);
idct8_1(w+1,_x+8);
/*Transform rows of w into columns of y.*/
- for(in=w,out=_y,end=out+8;out<end;in+=8,out++)idct8_2(out,in);
+ for(i=0;i<8;i++)idct8_2(_y+i,w+i*8);
/*Adjust for the scale factor.*/
- for(out=_y,end=out+64;out<end;out++)*out=(ogg_int16_t)(*out+8>>4);
+ for(i=0;i<64;i++)_y[i]=(ogg_int16_t)(_y[i]+8>>4);
+ /*Clear input data for next block.*/
+ _x[0]=_x[1]=_x[8]=0;
}
/*Performs an inverse 8x8 Type-II DCT transform.
@@ -260,20 +260,20 @@ static void oc_idct8x8_3(ogg_int16_t _y[64],const ogg_int16_t _x[64]){
_y: The buffer to store the result in.
This may be the same as _x.
_x: The input coefficients.*/
-static void oc_idct8x8_10(ogg_int16_t _y[64],const ogg_int16_t _x[64]){
- const ogg_int16_t *in;
- ogg_int16_t *end;
- ogg_int16_t *out;
- ogg_int16_t w[64];
+static void oc_idct8x8_10(ogg_int16_t _y[64],ogg_int16_t _x[64]){
+ ogg_int16_t w[64];
+ int i;
/*Transform rows of x into columns of w.*/
idct8_4(w,_x);
idct8_3(w+1,_x+8);
idct8_2(w+2,_x+16);
idct8_1(w+3,_x+24);
/*Transform rows of w into columns of y.*/
- for(in=w,out=_y,end=out+8;out<end;in+=8,out++)idct8_4(out,in);
+ for(i=0;i<8;i++)idct8_4(_y+i,w+i*8);
/*Adjust for the scale factor.*/
- for(out=_y,end=out+64;out<end;out++)*out=(ogg_int16_t)(*out+8>>4);
+ for(i=0;i<64;i++)_y[i]=(ogg_int16_t)(_y[i]+8>>4);
+ /*Clear input data for next block.*/
+ _x[0]=_x[1]=_x[2]=_x[3]=_x[8]=_x[9]=_x[10]=_x[16]=_x[17]=_x[24]=0;
}
/*Performs an inverse 8x8 Type-II DCT transform.
@@ -282,28 +282,23 @@ static void oc_idct8x8_10(ogg_int16_t _y[64],const ogg_int16_t _x[64]){
_y: The buffer to store the result in.
This may be the same as _x.
_x: The input coefficients.*/
-static void oc_idct8x8_slow(ogg_int16_t _y[64],const ogg_int16_t _x[64]){
- const ogg_int16_t *in;
- ogg_int16_t *end;
- ogg_int16_t *out;
- ogg_int16_t w[64];
+static void oc_idct8x8_slow(ogg_int16_t _y[64],ogg_int16_t _x[64]){
+ ogg_int16_t w[64];
+ int i;
/*Transform rows of x into columns of w.*/
- for(in=_x,out=w,end=out+8;out<end;in+=8,out++)idct8(out,in);
+ for(i=0;i<8;i++)idct8(w+i,_x+i*8);
/*Transform rows of w into columns of y.*/
- for(in=w,out=_y,end=out+8;out<end;in+=8,out++)idct8(out,in);
+ for(i=0;i<8;i++)idct8(_y+i,w+i*8);
/*Adjust for the scale factor.*/
- for(out=_y,end=out+64;out<end;out++)*out=(ogg_int16_t)(*out+8>>4);
-}
-
-void oc_idct8x8(const oc_theora_state *_state,ogg_int16_t _y[64],
- int _last_zzi){
- (*_state->opt_vtable.idct8x8)(_y,_last_zzi);
+ for(i=0;i<64;i++)_y[i]=(ogg_int16_t)(_y[i]+8>>4);
+ /*Clear input data for next block.*/
+ for(i=0;i<64;i++)_x[i]=0;
}
/*Performs an inverse 8x8 Type-II DCT transform.
The input is assumed to be scaled by a factor of 4 relative to orthonormal
version of the transform.*/
-void oc_idct8x8_c(ogg_int16_t _y[64],int _last_zzi){
+void oc_idct8x8_c(ogg_int16_t _y[64],ogg_int16_t _x[64],int _last_zzi){
/*_last_zzi is subtly different from an actual count of the number of
coefficients we decoded for this block.
It contains the value of zzi BEFORE the final token in the block was
@@ -329,7 +324,7 @@ void oc_idct8x8_c(ogg_int16_t _y[64],int _last_zzi){
gets.
Needless to say we inherited this approach from VP3.*/
/*Then perform the iDCT.*/
- if(_last_zzi<3)oc_idct8x8_3(_y,_y);
- else if(_last_zzi<10)oc_idct8x8_10(_y,_y);
- else oc_idct8x8_slow(_y,_y);
+ if(_last_zzi<=3)oc_idct8x8_3(_y,_x);
+ else if(_last_zzi<=10)oc_idct8x8_10(_y,_x);
+ else oc_idct8x8_slow(_y,_x);
}
diff --git a/thirdparty/libtheora/info.c b/thirdparty/libtheora/info.c
index 6b9762978b..e5cecd2de5 100644
--- a/thirdparty/libtheora/info.c
+++ b/thirdparty/libtheora/info.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: info.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
@@ -54,7 +54,7 @@ void th_comment_init(th_comment *_tc){
memset(_tc,0,sizeof(*_tc));
}
-void th_comment_add(th_comment *_tc,char *_comment){
+void th_comment_add(th_comment *_tc,const char *_comment){
char **user_comments;
int *comment_lengths;
int comment_len;
@@ -75,7 +75,7 @@ void th_comment_add(th_comment *_tc,char *_comment){
_tc->user_comments[_tc->comments]=NULL;
}
-void th_comment_add_tag(th_comment *_tc,char *_tag,char *_val){
+void th_comment_add_tag(th_comment *_tc,const char *_tag,const char *_val){
char *comment;
int tag_len;
int val_len;
@@ -91,7 +91,7 @@ void th_comment_add_tag(th_comment *_tc,char *_tag,char *_val){
_ogg_free(comment);
}
-char *th_comment_query(th_comment *_tc,char *_tag,int _count){
+char *th_comment_query(th_comment *_tc,const char *_tag,int _count){
long i;
int found;
int tag_len;
@@ -107,7 +107,7 @@ char *th_comment_query(th_comment *_tc,char *_tag,int _count){
return NULL;
}
-int th_comment_query_count(th_comment *_tc,char *_tag){
+int th_comment_query_count(th_comment *_tc,const char *_tag){
long i;
int tag_len;
int count;
diff --git a/thirdparty/libtheora/internal.c b/thirdparty/libtheora/internal.c
index 0fe4f63e72..afbb6efae7 100644
--- a/thirdparty/libtheora/internal.c
+++ b/thirdparty/libtheora/internal.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: internal.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
@@ -97,79 +97,29 @@ int oc_ilog(unsigned _v){
-/*The function used to fill in the chroma plane motion vectors for a macro
- block when 4 different motion vectors are specified in the luma plane.
- This version is for use with chroma decimated in the X and Y directions
- (4:2:0).
- _cbmvs: The chroma block-level motion vectors to fill in.
- _lbmvs: The luma block-level motion vectors.*/
-static void oc_set_chroma_mvs00(oc_mv _cbmvs[4],const oc_mv _lbmvs[4]){
- int dx;
- int dy;
- dx=_lbmvs[0][0]+_lbmvs[1][0]+_lbmvs[2][0]+_lbmvs[3][0];
- dy=_lbmvs[0][1]+_lbmvs[1][1]+_lbmvs[2][1]+_lbmvs[3][1];
- _cbmvs[0][0]=(signed char)OC_DIV_ROUND_POW2(dx,2,2);
- _cbmvs[0][1]=(signed char)OC_DIV_ROUND_POW2(dy,2,2);
-}
-
-/*The function used to fill in the chroma plane motion vectors for a macro
- block when 4 different motion vectors are specified in the luma plane.
- This version is for use with chroma decimated in the Y direction.
- _cbmvs: The chroma block-level motion vectors to fill in.
- _lbmvs: The luma block-level motion vectors.*/
-static void oc_set_chroma_mvs01(oc_mv _cbmvs[4],const oc_mv _lbmvs[4]){
- int dx;
- int dy;
- dx=_lbmvs[0][0]+_lbmvs[2][0];
- dy=_lbmvs[0][1]+_lbmvs[2][1];
- _cbmvs[0][0]=(signed char)OC_DIV_ROUND_POW2(dx,1,1);
- _cbmvs[0][1]=(signed char)OC_DIV_ROUND_POW2(dy,1,1);
- dx=_lbmvs[1][0]+_lbmvs[3][0];
- dy=_lbmvs[1][1]+_lbmvs[3][1];
- _cbmvs[1][0]=(signed char)OC_DIV_ROUND_POW2(dx,1,1);
- _cbmvs[1][1]=(signed char)OC_DIV_ROUND_POW2(dy,1,1);
-}
-
-/*The function used to fill in the chroma plane motion vectors for a macro
- block when 4 different motion vectors are specified in the luma plane.
- This version is for use with chroma decimated in the X direction (4:2:2).
- _cbmvs: The chroma block-level motion vectors to fill in.
- _lbmvs: The luma block-level motion vectors.*/
-static void oc_set_chroma_mvs10(oc_mv _cbmvs[4],const oc_mv _lbmvs[4]){
- int dx;
- int dy;
- dx=_lbmvs[0][0]+_lbmvs[1][0];
- dy=_lbmvs[0][1]+_lbmvs[1][1];
- _cbmvs[0][0]=(signed char)OC_DIV_ROUND_POW2(dx,1,1);
- _cbmvs[0][1]=(signed char)OC_DIV_ROUND_POW2(dy,1,1);
- dx=_lbmvs[2][0]+_lbmvs[3][0];
- dy=_lbmvs[2][1]+_lbmvs[3][1];
- _cbmvs[2][0]=(signed char)OC_DIV_ROUND_POW2(dx,1,1);
- _cbmvs[2][1]=(signed char)OC_DIV_ROUND_POW2(dy,1,1);
+void *oc_aligned_malloc(size_t _sz,size_t _align){
+ unsigned char *p;
+ if(_align-1>UCHAR_MAX||(_align&_align-1)||_sz>~(size_t)0-_align)return NULL;
+ p=(unsigned char *)_ogg_malloc(_sz+_align);
+ if(p!=NULL){
+ int offs;
+ offs=((p-(unsigned char *)0)-1&_align-1);
+ p[offs]=offs;
+ p+=offs+1;
+ }
+ return p;
}
-/*The function used to fill in the chroma plane motion vectors for a macro
- block when 4 different motion vectors are specified in the luma plane.
- This version is for use with no chroma decimation (4:4:4).
- _cbmvs: The chroma block-level motion vectors to fill in.
- _lmbmv: The luma macro-block level motion vector to fill in for use in
- prediction.
- _lbmvs: The luma block-level motion vectors.*/
-static void oc_set_chroma_mvs11(oc_mv _cbmvs[4],const oc_mv _lbmvs[4]){
- memcpy(_cbmvs,_lbmvs,4*sizeof(_lbmvs[0]));
+void oc_aligned_free(void *_ptr){
+ unsigned char *p;
+ p=(unsigned char *)_ptr;
+ if(p!=NULL){
+ int offs;
+ offs=*--p;
+ _ogg_free(p-offs);
+ }
}
-/*A table of functions used to fill in the chroma plane motion vectors for a
- macro block when 4 different motion vectors are specified in the luma
- plane.*/
-const oc_set_chroma_mvs_func OC_SET_CHROMA_MVS_TABLE[TH_PF_NFORMATS]={
- (oc_set_chroma_mvs_func)oc_set_chroma_mvs00,
- (oc_set_chroma_mvs_func)oc_set_chroma_mvs01,
- (oc_set_chroma_mvs_func)oc_set_chroma_mvs10,
- (oc_set_chroma_mvs_func)oc_set_chroma_mvs11
-};
-
-
void **oc_malloc_2d(size_t _height,size_t _width,size_t _sz){
size_t rowsz;
@@ -181,7 +131,6 @@ void **oc_malloc_2d(size_t _height,size_t _width,size_t _sz){
datsz=rowsz*_height;
/*Alloc array and row pointers.*/
ret=(char *)_ogg_malloc(datsz+colsz);
- if(ret==NULL)return NULL;
/*Initialize the array.*/
if(ret!=NULL){
size_t i;
@@ -204,7 +153,6 @@ void **oc_calloc_2d(size_t _height,size_t _width,size_t _sz){
datsz=rowsz*_height;
/*Alloc array and row pointers.*/
ret=(char *)_ogg_calloc(datsz+colsz,1);
- if(ret==NULL)return NULL;
/*Initialize the array.*/
if(ret!=NULL){
size_t i;
diff --git a/thirdparty/libtheora/internal.h b/thirdparty/libtheora/internal.h
index d81263e13e..53c77b88be 100644
--- a/thirdparty/libtheora/internal.h
+++ b/thirdparty/libtheora/internal.h
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: internal.h 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
#if !defined(_internal_H)
@@ -19,10 +19,20 @@
# include <stdlib.h>
# include <limits.h>
# if defined(HAVE_CONFIG_H)
-# include <config.h>
+# include "config.h"
# endif
# include "theora/codec.h"
# include "theora/theora.h"
+# include "ocintrin.h"
+
+# if !defined(__GNUC_PREREQ)
+# if defined(__GNUC__)&&defined(__GNUC_MINOR__)
+# define __GNUC_PREREQ(_maj,_min) \
+ ((__GNUC__<<16)+__GNUC_MINOR__>=((_maj)<<16)+(_min))
+# else
+# define __GNUC_PREREQ(_maj,_min) 0
+# endif
+# endif
# if defined(_MSC_VER)
/*Disable missing EMMS warnings.*/
@@ -31,24 +41,25 @@
# pragma warning(disable:4554)
# endif
/*You, too, gcc.*/
-# if defined(__GNUC_PREREQ)
-# if __GNUC_PREREQ(4,2)
-# pragma GCC diagnostic ignored "-Wparentheses"
-# endif
+# if __GNUC_PREREQ(4,2)
+# pragma GCC diagnostic ignored "-Wparentheses"
# endif
-# include "ocintrin.h"
-# include "huffman.h"
-# include "quant.h"
-
-/*Some assembly constructs require aligned operands.*/
-# if defined(OC_X86_ASM)
+/*Some assembly constructs require aligned operands.
+ The following macros are _only_ intended for structure member declarations.
+ Although they will sometimes work on stack variables, gcc will often silently
+ ignore them.
+ A separate set of macros could be made for manual stack alignment, but we
+ don't actually require it anywhere.*/
+# if defined(OC_X86_ASM)||defined(OC_ARM_ASM)
# if defined(__GNUC__)
# define OC_ALIGN8(expr) expr __attribute__((aligned(8)))
# define OC_ALIGN16(expr) expr __attribute__((aligned(16)))
# elif defined(_MSC_VER)
# define OC_ALIGN8(expr) __declspec (align(8)) expr
# define OC_ALIGN16(expr) __declspec (align(16)) expr
+# else
+# error "Alignment macros required for this platform."
# endif
# endif
# if !defined(OC_ALIGN8)
@@ -60,19 +71,8 @@
-typedef struct oc_sb_flags oc_sb_flags;
-typedef struct oc_border_info oc_border_info;
-typedef struct oc_fragment oc_fragment;
-typedef struct oc_fragment_plane oc_fragment_plane;
-typedef struct oc_base_opt_vtable oc_base_opt_vtable;
-typedef struct oc_base_opt_data oc_base_opt_data;
-typedef struct oc_state_dispatch_vtable oc_state_dispatch_vtable;
-typedef struct oc_theora_state oc_theora_state;
-
-
-
/*This library's version.*/
-# define OC_VENDOR_STRING "Xiph.Org libtheora 1.1 20090822 (Thusnelda)"
+# define OC_VENDOR_STRING "Xiph.Org libtheora 1.2.0alpha 20100924 (Ptalarbvorm)"
/*Theora bitstream version.*/
# define TH_VERSION_MAJOR (3)
@@ -83,315 +83,6 @@ typedef struct oc_theora_state oc_theora_state;
((_info)->version_minor>(_min)||(_info)->version_minor==(_min)&& \
(_info)->version_subminor>=(_sub)))
-/*A keyframe.*/
-#define OC_INTRA_FRAME (0)
-/*A predicted frame.*/
-#define OC_INTER_FRAME (1)
-/*A frame of unknown type (frame type decision has not yet been made).*/
-#define OC_UNKWN_FRAME (-1)
-
-/*The amount of padding to add to the reconstructed frame buffers on all
- sides.
- This is used to allow unrestricted motion vectors without special casing.
- This must be a multiple of 2.*/
-#define OC_UMV_PADDING (16)
-
-/*Frame classification indices.*/
-/*The previous golden frame.*/
-#define OC_FRAME_GOLD (0)
-/*The previous frame.*/
-#define OC_FRAME_PREV (1)
-/*The current frame.*/
-#define OC_FRAME_SELF (2)
-
-/*The input or output buffer.*/
-#define OC_FRAME_IO (3)
-
-/*Macroblock modes.*/
-/*Macro block is invalid: It is never coded.*/
-#define OC_MODE_INVALID (-1)
-/*Encoded difference from the same macro block in the previous frame.*/
-#define OC_MODE_INTER_NOMV (0)
-/*Encoded with no motion compensated prediction.*/
-#define OC_MODE_INTRA (1)
-/*Encoded difference from the previous frame offset by the given motion
- vector.*/
-#define OC_MODE_INTER_MV (2)
-/*Encoded difference from the previous frame offset by the last coded motion
- vector.*/
-#define OC_MODE_INTER_MV_LAST (3)
-/*Encoded difference from the previous frame offset by the second to last
- coded motion vector.*/
-#define OC_MODE_INTER_MV_LAST2 (4)
-/*Encoded difference from the same macro block in the previous golden
- frame.*/
-#define OC_MODE_GOLDEN_NOMV (5)
-/*Encoded difference from the previous golden frame offset by the given motion
- vector.*/
-#define OC_MODE_GOLDEN_MV (6)
-/*Encoded difference from the previous frame offset by the individual motion
- vectors given for each block.*/
-#define OC_MODE_INTER_MV_FOUR (7)
-/*The number of (coded) modes.*/
-#define OC_NMODES (8)
-
-/*Determines the reference frame used for a given MB mode.*/
-#define OC_FRAME_FOR_MODE(_x) \
- OC_UNIBBLE_TABLE32(OC_FRAME_PREV,OC_FRAME_SELF,OC_FRAME_PREV,OC_FRAME_PREV, \
- OC_FRAME_PREV,OC_FRAME_GOLD,OC_FRAME_GOLD,OC_FRAME_PREV,(_x))
-
-/*Constants for the packet state machine common between encoder and decoder.*/
-
-/*Next packet to emit/read: Codec info header.*/
-#define OC_PACKET_INFO_HDR (-3)
-/*Next packet to emit/read: Comment header.*/
-#define OC_PACKET_COMMENT_HDR (-2)
-/*Next packet to emit/read: Codec setup header.*/
-#define OC_PACKET_SETUP_HDR (-1)
-/*No more packets to emit/read.*/
-#define OC_PACKET_DONE (INT_MAX)
-
-
-
-/*Super blocks are 32x32 segments of pixels in a single color plane indexed
- in image order.
- Internally, super blocks are broken up into four quadrants, each of which
- contains a 2x2 pattern of blocks, each of which is an 8x8 block of pixels.
- Quadrants, and the blocks within them, are indexed in a special order called
- a "Hilbert curve" within the super block.
-
- In order to differentiate between the Hilbert-curve indexing strategy and
- the regular image order indexing strategy, blocks indexed in image order
- are called "fragments".
- Fragments are indexed in image order, left to right, then bottom to top,
- from Y' plane to Cb plane to Cr plane.
-
- The co-located fragments in all image planes corresponding to the location
- of a single quadrant of a luma plane super block form a macro block.
- Thus there is only a single set of macro blocks for all planes, each of which
- contains between 6 and 12 fragments, depending on the pixel format.
- Therefore macro block information is kept in a separate set of arrays from
- super blocks to avoid unused space in the other planes.
- The lists are indexed in super block order.
- That is, the macro block corresponding to the macro block mbi in (luma plane)
- super block sbi is at index (sbi<<2|mbi).
- Thus the number of macro blocks in each dimension is always twice the number
- of super blocks, even when only an odd number fall inside the coded frame.
- These "extra" macro blocks are just an artifact of our internal data layout,
- and not part of the coded stream; they are flagged with a negative MB mode.*/
-
-
-
-/*A single quadrant of the map from a super block to fragment numbers.*/
-typedef ptrdiff_t oc_sb_map_quad[4];
-/*A map from a super block to fragment numbers.*/
-typedef oc_sb_map_quad oc_sb_map[4];
-/*A single plane of the map from a macro block to fragment numbers.*/
-typedef ptrdiff_t oc_mb_map_plane[4];
-/*A map from a macro block to fragment numbers.*/
-typedef oc_mb_map_plane oc_mb_map[3];
-/*A motion vector.*/
-typedef signed char oc_mv[2];
-
-
-
-/*Super block information.*/
-struct oc_sb_flags{
- unsigned char coded_fully:1;
- unsigned char coded_partially:1;
- unsigned char quad_valid:4;
-};
-
-
-
-/*Information about a fragment which intersects the border of the displayable
- region.
- This marks which pixels belong to the displayable region.*/
-struct oc_border_info{
- /*A bit mask marking which pixels are in the displayable region.
- Pixel (x,y) corresponds to bit (y<<3|x).*/
- ogg_int64_t mask;
- /*The number of pixels in the displayable region.
- This is always positive, and always less than 64.*/
- int npixels;
-};
-
-
-
-/*Fragment information.*/
-struct oc_fragment{
- /*A flag indicating whether or not this fragment is coded.*/
- unsigned coded:1;
- /*A flag indicating that this entire fragment lies outside the displayable
- region of the frame.
- Note the contrast with an invalid macro block, which is outside the coded
- frame, not just the displayable one.
- There are no fragments outside the coded frame by construction.*/
- unsigned invalid:1;
- /*The index of the quality index used for this fragment's AC coefficients.*/
- unsigned qii:6;
- /*The mode of the macroblock this fragment belongs to.*/
- unsigned mb_mode:3;
- /*The index of the associated border information for fragments which lie
- partially outside the displayable region.
- For fragments completely inside or outside this region, this is -1.
- Note that the C standard requires an explicit signed keyword for bitfield
- types, since some compilers may treat them as unsigned without it.*/
- signed int borderi:5;
- /*The prediction-corrected DC component.
- Note that the C standard requires an explicit signed keyword for bitfield
- types, since some compilers may treat them as unsigned without it.*/
- signed int dc:16;
-};
-
-
-
-/*A description of each fragment plane.*/
-struct oc_fragment_plane{
- /*The number of fragments in the horizontal direction.*/
- int nhfrags;
- /*The number of fragments in the vertical direction.*/
- int nvfrags;
- /*The offset of the first fragment in the plane.*/
- ptrdiff_t froffset;
- /*The total number of fragments in the plane.*/
- ptrdiff_t nfrags;
- /*The number of super blocks in the horizontal direction.*/
- unsigned nhsbs;
- /*The number of super blocks in the vertical direction.*/
- unsigned nvsbs;
- /*The offset of the first super block in the plane.*/
- unsigned sboffset;
- /*The total number of super blocks in the plane.*/
- unsigned nsbs;
-};
-
-
-
-/*The shared (encoder and decoder) functions that have accelerated variants.*/
-struct oc_base_opt_vtable{
- void (*frag_copy)(unsigned char *_dst,
- const unsigned char *_src,int _ystride);
- void (*frag_recon_intra)(unsigned char *_dst,int _ystride,
- const ogg_int16_t _residue[64]);
- void (*frag_recon_inter)(unsigned char *_dst,
- const unsigned char *_src,int _ystride,const ogg_int16_t _residue[64]);
- void (*frag_recon_inter2)(unsigned char *_dst,const unsigned char *_src1,
- const unsigned char *_src2,int _ystride,const ogg_int16_t _residue[64]);
- void (*idct8x8)(ogg_int16_t _y[64],int _last_zzi);
- void (*state_frag_recon)(const oc_theora_state *_state,ptrdiff_t _fragi,
- int _pli,ogg_int16_t _dct_coeffs[64],int _last_zzi,ogg_uint16_t _dc_quant);
- void (*state_frag_copy_list)(const oc_theora_state *_state,
- const ptrdiff_t *_fragis,ptrdiff_t _nfragis,
- int _dst_frame,int _src_frame,int _pli);
- void (*state_loop_filter_frag_rows)(const oc_theora_state *_state,
- int _bv[256],int _refi,int _pli,int _fragy0,int _fragy_end);
- void (*restore_fpu)(void);
-};
-
-/*The shared (encoder and decoder) tables that vary according to which variants
- of the above functions are used.*/
-struct oc_base_opt_data{
- const unsigned char *dct_fzig_zag;
-};
-
-
-/*State information common to both the encoder and decoder.*/
-struct oc_theora_state{
- /*The stream information.*/
- th_info info;
- /*Table for shared accelerated functions.*/
- oc_base_opt_vtable opt_vtable;
- /*Table for shared data used by accelerated functions.*/
- oc_base_opt_data opt_data;
- /*CPU flags to detect the presence of extended instruction sets.*/
- ogg_uint32_t cpu_flags;
- /*The fragment plane descriptions.*/
- oc_fragment_plane fplanes[3];
- /*The list of fragments, indexed in image order.*/
- oc_fragment *frags;
- /*The the offset into the reference frame buffer to the upper-left pixel of
- each fragment.*/
- ptrdiff_t *frag_buf_offs;
- /*The motion vector for each fragment.*/
- oc_mv *frag_mvs;
- /*The total number of fragments in a single frame.*/
- ptrdiff_t nfrags;
- /*The list of super block maps, indexed in image order.*/
- oc_sb_map *sb_maps;
- /*The list of super block flags, indexed in image order.*/
- oc_sb_flags *sb_flags;
- /*The total number of super blocks in a single frame.*/
- unsigned nsbs;
- /*The fragments from each color plane that belong to each macro block.
- Fragments are stored in image order (left to right then top to bottom).
- When chroma components are decimated, the extra fragments have an index of
- -1.*/
- oc_mb_map *mb_maps;
- /*The list of macro block modes.
- A negative number indicates the macro block lies entirely outside the
- coded frame.*/
- signed char *mb_modes;
- /*The number of macro blocks in the X direction.*/
- unsigned nhmbs;
- /*The number of macro blocks in the Y direction.*/
- unsigned nvmbs;
- /*The total number of macro blocks.*/
- size_t nmbs;
- /*The list of coded fragments, in coded order.
- Uncoded fragments are stored in reverse order from the end of the list.*/
- ptrdiff_t *coded_fragis;
- /*The number of coded fragments in each plane.*/
- ptrdiff_t ncoded_fragis[3];
- /*The total number of coded fragments.*/
- ptrdiff_t ntotal_coded_fragis;
- /*The index of the buffers being used for each OC_FRAME_* reference frame.*/
- int ref_frame_idx[4];
- /*The actual buffers used for the previously decoded frames.*/
- th_ycbcr_buffer ref_frame_bufs[4];
- /*The storage for the reference frame buffers.*/
- unsigned char *ref_frame_data[4];
- /*The strides for each plane in the reference frames.*/
- int ref_ystride[3];
- /*The number of unique border patterns.*/
- int nborders;
- /*The unique border patterns for all border fragments.
- The borderi field of fragments which straddle the border indexes this
- list.*/
- oc_border_info borders[16];
- /*The frame number of the last keyframe.*/
- ogg_int64_t keyframe_num;
- /*The frame number of the current frame.*/
- ogg_int64_t curframe_num;
- /*The granpos of the current frame.*/
- ogg_int64_t granpos;
- /*The type of the current frame.*/
- unsigned char frame_type;
- /*The bias to add to the frame count when computing granule positions.*/
- unsigned char granpos_bias;
- /*The number of quality indices used in the current frame.*/
- unsigned char nqis;
- /*The quality indices of the current frame.*/
- unsigned char qis[3];
- /*The dequantization tables, stored in zig-zag order, and indexed by
- qi, pli, qti, and zzi.*/
- ogg_uint16_t *dequant_tables[64][3][2];
- OC_ALIGN16(oc_quant_table dequant_table_data[64][3][2]);
- /*Loop filter strength parameters.*/
- unsigned char loop_filter_limits[64];
-};
-
-
-
-/*The function type used to fill in the chroma plane motion vectors for a
- macro block when 4 different motion vectors are specified in the luma
- plane.
- _cbmvs: The chroma block-level motion vectors to fill in.
- _lmbmv: The luma macro-block level motion vector to fill in for use in
- prediction.
- _lbmvs: The luma block-level motion vectors.*/
-typedef void (*oc_set_chroma_mvs_func)(oc_mv _cbmvs[4],const oc_mv _lbmvs[4]);
-
/*A map from the index in the zig zag scan to the coefficient number in a
@@ -409,14 +100,12 @@ extern const unsigned char OC_MB_MAP_IDXS[TH_PF_NFORMATS][12];
/*The number of indices in the oc_mb_map array that can be valid for each of
the various chroma decimation types.*/
extern const unsigned char OC_MB_MAP_NIDXS[TH_PF_NFORMATS];
-/*A table of functions used to fill in the Cb,Cr plane motion vectors for a
- macro block when 4 different motion vectors are specified in the luma
- plane.*/
-extern const oc_set_chroma_mvs_func OC_SET_CHROMA_MVS_TABLE[TH_PF_NFORMATS];
int oc_ilog(unsigned _v);
+void *oc_aligned_malloc(size_t _sz,size_t _align);
+void oc_aligned_free(void *_ptr);
void **oc_malloc_2d(size_t _height,size_t _width,size_t _sz);
void **oc_calloc_2d(size_t _height,size_t _width,size_t _sz);
void oc_free_2d(void *_ptr);
@@ -424,86 +113,4 @@ void oc_free_2d(void *_ptr);
void oc_ycbcr_buffer_flip(th_ycbcr_buffer _dst,
const th_ycbcr_buffer _src);
-int oc_state_init(oc_theora_state *_state,const th_info *_info,int _nrefs);
-void oc_state_clear(oc_theora_state *_state);
-void oc_state_vtable_init_c(oc_theora_state *_state);
-void oc_state_borders_fill_rows(oc_theora_state *_state,int _refi,int _pli,
- int _y0,int _yend);
-void oc_state_borders_fill_caps(oc_theora_state *_state,int _refi,int _pli);
-void oc_state_borders_fill(oc_theora_state *_state,int _refi);
-void oc_state_fill_buffer_ptrs(oc_theora_state *_state,int _buf_idx,
- th_ycbcr_buffer _img);
-int oc_state_mbi_for_pos(oc_theora_state *_state,int _mbx,int _mby);
-int oc_state_get_mv_offsets(const oc_theora_state *_state,int _offsets[2],
- int _pli,int _dx,int _dy);
-
-int oc_state_loop_filter_init(oc_theora_state *_state,int *_bv);
-void oc_state_loop_filter(oc_theora_state *_state,int _frame);
-#if defined(OC_DUMP_IMAGES)
-int oc_state_dump_frame(const oc_theora_state *_state,int _frame,
- const char *_suf);
-#endif
-
-/*Shared accelerated functions.*/
-void oc_frag_copy(const oc_theora_state *_state,unsigned char *_dst,
- const unsigned char *_src,int _ystride);
-void oc_frag_recon_intra(const oc_theora_state *_state,
- unsigned char *_dst,int _dst_ystride,const ogg_int16_t _residue[64]);
-void oc_frag_recon_inter(const oc_theora_state *_state,unsigned char *_dst,
- const unsigned char *_src,int _ystride,const ogg_int16_t _residue[64]);
-void oc_frag_recon_inter2(const oc_theora_state *_state,
- unsigned char *_dst,const unsigned char *_src1,const unsigned char *_src2,
- int _ystride,const ogg_int16_t _residue[64]);
-void oc_idct8x8(const oc_theora_state *_state,ogg_int16_t _y[64],int _last_zzi);
-void oc_state_frag_recon(const oc_theora_state *_state,ptrdiff_t _fragi,
- int _pli,ogg_int16_t _dct_coeffs[64],int _last_zzi,ogg_uint16_t _dc_quant);
-void oc_state_frag_copy_list(const oc_theora_state *_state,
- const ptrdiff_t *_fragis,ptrdiff_t _nfragis,
- int _dst_frame,int _src_frame,int _pli);
-void oc_state_loop_filter_frag_rows(const oc_theora_state *_state,
- int _bv[256],int _refi,int _pli,int _fragy0,int _fragy_end);
-void oc_restore_fpu(const oc_theora_state *_state);
-
-/*Default pure-C implementations.*/
-void oc_frag_copy_c(unsigned char *_dst,
- const unsigned char *_src,int _src_ystride);
-void oc_frag_recon_intra_c(unsigned char *_dst,int _dst_ystride,
- const ogg_int16_t _residue[64]);
-void oc_frag_recon_inter_c(unsigned char *_dst,
- const unsigned char *_src,int _ystride,const ogg_int16_t _residue[64]);
-void oc_frag_recon_inter2_c(unsigned char *_dst,const unsigned char *_src1,
- const unsigned char *_src2,int _ystride,const ogg_int16_t _residue[64]);
-void oc_idct8x8_c(ogg_int16_t _y[64],int _last_zzi);
-void oc_state_frag_recon_c(const oc_theora_state *_state,ptrdiff_t _fragi,
- int _pli,ogg_int16_t _dct_coeffs[64],int _last_zzi,ogg_uint16_t _dc_quant);
-void oc_state_frag_copy_list_c(const oc_theora_state *_state,
- const ptrdiff_t *_fragis,ptrdiff_t _nfragis,
- int _dst_frame,int _src_frame,int _pli);
-void oc_state_loop_filter_frag_rows_c(const oc_theora_state *_state,
- int _bv[256],int _refi,int _pli,int _fragy0,int _fragy_end);
-void oc_restore_fpu_c(void);
-
-/*We need a way to call a few encoder functions without introducing a link-time
- dependency into the decoder, while still allowing the old alpha API which
- does not distinguish between encoder and decoder objects to be used.
- We do this by placing a function table at the start of the encoder object
- which can dispatch into the encoder library.
- We do a similar thing for the decoder in case we ever decide to split off a
- common base library.*/
-typedef void (*oc_state_clear_func)(theora_state *_th);
-typedef int (*oc_state_control_func)(theora_state *th,int _req,
- void *_buf,size_t _buf_sz);
-typedef ogg_int64_t (*oc_state_granule_frame_func)(theora_state *_th,
- ogg_int64_t _granulepos);
-typedef double (*oc_state_granule_time_func)(theora_state *_th,
- ogg_int64_t _granulepos);
-
-
-struct oc_state_dispatch_vtable{
- oc_state_clear_func clear;
- oc_state_control_func control;
- oc_state_granule_frame_func granule_frame;
- oc_state_granule_time_func granule_time;
-};
-
#endif
diff --git a/thirdparty/libtheora/mathops.c b/thirdparty/libtheora/mathops.c
index d3fb909194..23c8f6e1ba 100644
--- a/thirdparty/libtheora/mathops.c
+++ b/thirdparty/libtheora/mathops.c
@@ -1,10 +1,8 @@
+#include "internal.h"
#include "mathops.h"
-#include <limits.h>
/*The fastest fallback strategy for platforms with fast multiplication appears
to be based on de Bruijn sequences~\cite{LP98}.
- Tests confirmed this to be true even on an ARM11, where it is actually faster
- than using the native clz instruction.
Define OC_ILOG_NODEBRUIJN to use a simpler fallback on platforms where
multiplication or table lookups are too expensive.
@@ -15,8 +13,7 @@
year=1998,
note="\url{http://supertech.csail.mit.edu/papers/debruijn.pdf}"
}*/
-#if !defined(OC_ILOG_NODEBRUIJN)&& \
- !defined(OC_CLZ32)||!defined(OC_CLZ64)&&LONG_MAX<9223372036854775807LL
+#if !defined(OC_ILOG_NODEBRUIJN)&&!defined(OC_CLZ32)
static const unsigned char OC_DEBRUIJN_IDX32[32]={
0, 1,28, 2,29,14,24, 3,30,22,20,15,25,17, 4, 8,
31,27,13,23,21,19,16, 7,26,12,18, 6,11, 5,10, 9
@@ -25,7 +22,7 @@ static const unsigned char OC_DEBRUIJN_IDX32[32]={
int oc_ilog32(ogg_uint32_t _v){
#if defined(OC_CLZ32)
- return (OC_CLZ32_OFFS-OC_CLZ32(_v))&-!!_v;
+ return OC_CLZ32_OFFS-OC_CLZ32(_v)&-!!_v;
#else
/*On a Pentium M, this branchless version tested as the fastest version without
multiplications on 1,000,000,000 random 32-bit integers, edging out a
@@ -51,12 +48,12 @@ int oc_ilog32(ogg_uint32_t _v){
/*This de Bruijn sequence version is faster if you have a fast multiplier.*/
# else
int ret;
- ret=_v>0;
_v|=_v>>1;
_v|=_v>>2;
_v|=_v>>4;
_v|=_v>>8;
_v|=_v>>16;
+ ret=_v&1;
_v=(_v>>1)+1;
ret+=OC_DEBRUIJN_IDX32[_v*0x77CB531U>>27&0x1F];
return ret;
@@ -66,16 +63,21 @@ int oc_ilog32(ogg_uint32_t _v){
int oc_ilog64(ogg_int64_t _v){
#if defined(OC_CLZ64)
- return (OC_CLZ64_OFFS-OC_CLZ64(_v))&-!!_v;
+ return OC_CLZ64_OFFS-OC_CLZ64(_v)&-!!_v;
#else
-# if defined(OC_ILOG_NODEBRUIJN)
+/*If we don't have a fast 64-bit word implementation, split it into two 32-bit
+ halves.*/
+# if defined(OC_ILOG_NODEBRUIJN)|| \
+ defined(OC_CLZ32)||LONG_MAX<9223372036854775807LL
ogg_uint32_t v;
int ret;
int m;
- ret=_v>0;
m=(_v>0xFFFFFFFFU)<<5;
v=(ogg_uint32_t)(_v>>m);
- ret|=m;
+# if defined(OC_CLZ32)
+ ret=m+OC_CLZ32_OFFS-OC_CLZ32(v)&-!!v;
+# elif defined(OC_ILOG_NODEBRUIJN)
+ ret=v>0|m;
m=(v>0xFFFFU)<<4;
v>>=m;
ret|=m;
@@ -90,26 +92,19 @@ int oc_ilog64(ogg_int64_t _v){
ret|=m;
ret+=v>1;
return ret;
-# else
-/*If we don't have a 64-bit word, split it into two 32-bit halves.*/
-# if LONG_MAX<9223372036854775807LL
- ogg_uint32_t v;
- int ret;
- int m;
- ret=_v>0;
- m=(_v>0xFFFFFFFFU)<<5;
- v=(ogg_uint32_t)(_v>>m);
- ret|=m;
+# else
v|=v>>1;
v|=v>>2;
v|=v>>4;
v|=v>>8;
v|=v>>16;
+ ret=v&1|m;
v=(v>>1)+1;
ret+=OC_DEBRUIJN_IDX32[v*0x77CB531U>>27&0x1F];
+# endif
return ret;
-/*Otherwise do it in one 64-bit operation.*/
-# else
+/*Otherwise do it in one 64-bit multiply.*/
+# else
static const unsigned char OC_DEBRUIJN_IDX64[64]={
0, 1, 2, 7, 3,13, 8,19, 4,25,14,28, 9,34,20,40,
5,17,26,38,15,46,29,48,10,31,35,54,21,50,41,57,
@@ -117,17 +112,16 @@ int oc_ilog64(ogg_int64_t _v){
62,11,23,32,36,44,52,55,61,22,43,51,60,42,59,58
};
int ret;
- ret=_v>0;
_v|=_v>>1;
_v|=_v>>2;
_v|=_v>>4;
_v|=_v>>8;
_v|=_v>>16;
_v|=_v>>32;
+ ret=(int)_v&1;
_v=(_v>>1)+1;
ret+=OC_DEBRUIJN_IDX64[_v*0x218A392CD3D5DBF>>58&0x3F];
return ret;
-# endif
# endif
#endif
}
@@ -294,3 +288,27 @@ ogg_int64_t oc_blog64(ogg_int64_t _w){
}
return OC_Q57(ipart)+z;
}
+
+/*Polynomial approximation of a binary exponential.
+ Q10 input, Q0 output.*/
+ogg_uint32_t oc_bexp32_q10(int _z){
+ unsigned n;
+ int ipart;
+ ipart=_z>>10;
+ n=(_z&(1<<10)-1)<<4;
+ n=(n*((n*((n*((n*3548>>15)+6817)>>15)+15823)>>15)+22708)>>15)+16384;
+ return 14-ipart>0?n+(1<<13-ipart)>>14-ipart:n<<ipart-14;
+}
+
+/*Polynomial approximation of a binary logarithm.
+ Q0 input, Q10 output.*/
+int oc_blog32_q10(ogg_uint32_t _w){
+ int n;
+ int ipart;
+ int fpart;
+ if(_w<=0)return -1;
+ ipart=OC_ILOGNZ_32(_w);
+ n=(ipart-16>0?_w>>ipart-16:_w<<16-ipart)-32768-16384;
+ fpart=(n*((n*((n*((n*-1402>>15)+2546)>>15)-5216)>>15)+15745)>>15)-6793;
+ return (ipart<<10)+(fpart>>4);
+}
diff --git a/thirdparty/libtheora/mathops.h b/thirdparty/libtheora/mathops.h
index efbc5377b0..a1a4f9df0e 100644
--- a/thirdparty/libtheora/mathops.h
+++ b/thirdparty/libtheora/mathops.h
@@ -2,29 +2,27 @@
# define _mathops_H (1)
# include <ogg/ogg.h>
-# ifdef __GNUC_PREREQ
-# if __GNUC_PREREQ(3,4)
-# include <limits.h>
+# if __GNUC_PREREQ(3,4)
+# include <limits.h>
/*Note the casts to (int) below: this prevents OC_CLZ{32|64}_OFFS from
"upgrading" the type of an entire expression to an (unsigned) size_t.*/
-# if INT_MAX>=2147483647
-# define OC_CLZ32_OFFS ((int)sizeof(unsigned)*CHAR_BIT)
-# define OC_CLZ32(_x) (__builtin_clz(_x))
-# elif LONG_MAX>=2147483647L
-# define OC_CLZ32_OFFS ((int)sizeof(unsigned long)*CHAR_BIT)
-# define OC_CLZ32(_x) (__builtin_clzl(_x))
-# endif
-# if INT_MAX>=9223372036854775807LL
-# define OC_CLZ64_OFFS ((int)sizeof(unsigned)*CHAR_BIT)
-# define OC_CLZ64(_x) (__builtin_clz(_x))
-# elif LONG_MAX>=9223372036854775807LL
-# define OC_CLZ64_OFFS ((int)sizeof(unsigned long)*CHAR_BIT)
-# define OC_CLZ64(_x) (__builtin_clzl(_x))
-# elif LLONG_MAX>=9223372036854775807LL|| \
- __LONG_LONG_MAX__>=9223372036854775807LL
-# define OC_CLZ64_OFFS ((int)sizeof(unsigned long long)*CHAR_BIT)
-# define OC_CLZ64(_x) (__builtin_clzll(_x))
-# endif
+# if INT_MAX>=2147483647
+# define OC_CLZ32_OFFS ((int)sizeof(unsigned)*CHAR_BIT)
+# define OC_CLZ32(_x) (__builtin_clz(_x))
+# elif LONG_MAX>=2147483647L
+# define OC_CLZ32_OFFS ((int)sizeof(unsigned long)*CHAR_BIT)
+# define OC_CLZ32(_x) (__builtin_clzl(_x))
+# endif
+# if INT_MAX>=9223372036854775807LL
+# define OC_CLZ64_OFFS ((int)sizeof(unsigned)*CHAR_BIT)
+# define OC_CLZ64(_x) (__builtin_clz(_x))
+# elif LONG_MAX>=9223372036854775807LL
+# define OC_CLZ64_OFFS ((int)sizeof(unsigned long)*CHAR_BIT)
+# define OC_CLZ64(_x) (__builtin_clzl(_x))
+# elif LLONG_MAX>=9223372036854775807LL|| \
+ __LONG_LONG_MAX__>=9223372036854775807LL
+# define OC_CLZ64_OFFS ((int)sizeof(unsigned long long)*CHAR_BIT)
+# define OC_CLZ64(_x) (__builtin_clzll(_x))
# endif
# endif
@@ -134,8 +132,12 @@ int oc_ilog64(ogg_int64_t _v);
# define OC_STATIC_ILOG_64(_v) (OC_STATIC_ILOG6((ogg_int64_t)(_v)))
#define OC_Q57(_v) ((ogg_int64_t)(_v)<<57)
+#define OC_Q10(_v) ((_v)<<10)
ogg_int64_t oc_bexp64(ogg_int64_t _z);
ogg_int64_t oc_blog64(ogg_int64_t _w);
+ogg_uint32_t oc_bexp32_q10(int _z);
+int oc_blog32_q10(ogg_uint32_t _w);
+
#endif
diff --git a/thirdparty/libtheora/mcenc.c b/thirdparty/libtheora/mcenc.c
index 797e81f4f9..82eb824a80 100644
--- a/thirdparty/libtheora/mcenc.c
+++ b/thirdparty/libtheora/mcenc.c
@@ -88,9 +88,11 @@ static const int OC_SQUARE_SITES[11][8]={
};
-static void oc_mcenc_find_candidates(oc_enc_ctx *_enc,oc_mcenc_ctx *_mcenc,
- int _accum[2],int _mbi,int _frame){
+static void oc_mcenc_find_candidates_a(oc_enc_ctx *_enc,oc_mcenc_ctx *_mcenc,
+ oc_mv _accum,int _mbi,int _frame){
oc_mb_enc_info *embs;
+ int accum_x;
+ int accum_y;
int a[3][2];
int ncandidates;
unsigned nmbi;
@@ -102,20 +104,24 @@ static void oc_mcenc_find_candidates(oc_enc_ctx *_enc,oc_mcenc_ctx *_mcenc,
/*Fill in the first part of set A: the vectors from adjacent blocks.*/
for(i=0;i<embs[_mbi].ncneighbors;i++){
nmbi=embs[_mbi].cneighbors[i];
- _mcenc->candidates[ncandidates][0]=embs[nmbi].analysis_mv[0][_frame][0];
- _mcenc->candidates[ncandidates][1]=embs[nmbi].analysis_mv[0][_frame][1];
+ _mcenc->candidates[ncandidates][0]=
+ OC_MV_X(embs[nmbi].analysis_mv[0][_frame]);
+ _mcenc->candidates[ncandidates][1]=
+ OC_MV_Y(embs[nmbi].analysis_mv[0][_frame]);
ncandidates++;
}
}
+ accum_x=OC_MV_X(_accum);
+ accum_y=OC_MV_Y(_accum);
/*Add a few additional vectors to set A: the vectors used in the previous
frames and the (0,0) vector.*/
- _mcenc->candidates[ncandidates][0]=OC_CLAMPI(-31,_accum[0],31);
- _mcenc->candidates[ncandidates][1]=OC_CLAMPI(-31,_accum[1],31);
+ _mcenc->candidates[ncandidates][0]=accum_x;
+ _mcenc->candidates[ncandidates][1]=accum_y;
ncandidates++;
_mcenc->candidates[ncandidates][0]=OC_CLAMPI(-31,
- embs[_mbi].analysis_mv[1][_frame][0]+_accum[0],31);
+ OC_MV_X(embs[_mbi].analysis_mv[1][_frame])+accum_x,31);
_mcenc->candidates[ncandidates][1]=OC_CLAMPI(-31,
- embs[_mbi].analysis_mv[1][_frame][1]+_accum[1],31);
+ OC_MV_Y(embs[_mbi].analysis_mv[1][_frame])+accum_y,31);
ncandidates++;
_mcenc->candidates[ncandidates][0]=0;
_mcenc->candidates[ncandidates][1]=0;
@@ -131,30 +137,33 @@ static void oc_mcenc_find_candidates(oc_enc_ctx *_enc,oc_mcenc_ctx *_mcenc,
OC_SORT2I(a[0][1],a[1][1]);
_mcenc->candidates[0][0]=a[1][0];
_mcenc->candidates[0][1]=a[1][1];
- /*Fill in set B: accelerated predictors for this and adjacent macro blocks.*/
_mcenc->setb0=ncandidates;
- /*The first time through the loop use the current macro block.*/
- nmbi=_mbi;
- for(i=0;;i++){
- _mcenc->candidates[ncandidates][0]=OC_CLAMPI(-31,
- 2*embs[_mbi].analysis_mv[1][_frame][0]
- -embs[_mbi].analysis_mv[2][_frame][0]+_accum[0],31);
- _mcenc->candidates[ncandidates][1]=OC_CLAMPI(-31,
- 2*embs[_mbi].analysis_mv[1][_frame][1]
- -embs[_mbi].analysis_mv[2][_frame][1]+_accum[1],31);
- ncandidates++;
- if(i>=embs[_mbi].npneighbors)break;
- nmbi=embs[_mbi].pneighbors[i];
- }
- /*Truncate to full-pel positions.*/
- for(i=0;i<ncandidates;i++){
- _mcenc->candidates[i][0]=OC_DIV2(_mcenc->candidates[i][0]);
- _mcenc->candidates[i][1]=OC_DIV2(_mcenc->candidates[i][1]);
- }
+}
+
+static void oc_mcenc_find_candidates_b(oc_enc_ctx *_enc,oc_mcenc_ctx *_mcenc,
+ oc_mv _accum,int _mbi,int _frame){
+ oc_mb_enc_info *embs;
+ int accum_x;
+ int accum_y;
+ int ncandidates;
+ embs=_enc->mb_info;
+ accum_x=OC_MV_X(_accum);
+ accum_y=OC_MV_Y(_accum);
+ /*Fill in set B: accelerated predictors for this and adjacent macro blocks.*/
+ ncandidates=_mcenc->setb0;
+ /*Use only the current block. Using more did not appear to be helpful
+ with the current selection logic due to escaping the local search too
+ quickly.*/
+ _mcenc->candidates[ncandidates][0]=OC_CLAMPI(-31,
+ 2*OC_MV_X(embs[_mbi].analysis_mv[1][_frame])
+ -OC_MV_X(embs[_mbi].analysis_mv[2][_frame])+accum_x,31);
+ _mcenc->candidates[ncandidates][1]=OC_CLAMPI(-31,
+ 2*OC_MV_Y(embs[_mbi].analysis_mv[1][_frame])
+ -OC_MV_Y(embs[_mbi].analysis_mv[2][_frame])+accum_y,31);
+ ncandidates++;
_mcenc->ncandidates=ncandidates;
}
-#if 0
static unsigned oc_sad16_halfpel(const oc_enc_ctx *_enc,
const ptrdiff_t *_frag_buf_offs,const ptrdiff_t _fragis[4],
int _mvoffset0,int _mvoffset1,const unsigned char *_src,
@@ -170,20 +179,21 @@ static unsigned oc_sad16_halfpel(const oc_enc_ctx *_enc,
}
return err;
}
-#endif
static unsigned oc_satd16_halfpel(const oc_enc_ctx *_enc,
const ptrdiff_t *_frag_buf_offs,const ptrdiff_t _fragis[4],
int _mvoffset0,int _mvoffset1,const unsigned char *_src,
const unsigned char *_ref,int _ystride,unsigned _best_err){
unsigned err;
+ int dc;
int bi;
err=0;
for(bi=0;bi<4;bi++){
ptrdiff_t frag_offs;
frag_offs=_frag_buf_offs[_fragis[bi]];
- err+=oc_enc_frag_satd2_thresh(_enc,_src+frag_offs,_ref+frag_offs+_mvoffset0,
- _ref+frag_offs+_mvoffset1,_ystride,_best_err-err);
+ err+=oc_enc_frag_satd2(_enc,&dc,_src+frag_offs,
+ _ref+frag_offs+_mvoffset0,_ref+frag_offs+_mvoffset1,_ystride);
+ err+=abs(dc);
}
return err;
}
@@ -219,9 +229,17 @@ static int oc_mcenc_ysatd_check_mbcandidate_fullpel(const oc_enc_ctx *_enc,
err=0;
for(bi=0;bi<4;bi++){
ptrdiff_t frag_offs;
+ int dc;
frag_offs=_frag_buf_offs[_fragis[bi]];
- err+=oc_enc_frag_satd_thresh(_enc,
- _src+frag_offs,_ref+frag_offs+mvoffset,_ystride,UINT_MAX);
+ if(_enc->sp_level<OC_SP_LEVEL_NOSATD){
+ err+=oc_enc_frag_satd(_enc,&dc,
+ _src+frag_offs,_ref+frag_offs+mvoffset,_ystride);
+ err+=abs(dc);
+ }
+ else{
+ err+=oc_enc_frag_sad(_enc,
+ _src+frag_offs,_ref+frag_offs+mvoffset,_ystride);
+ }
}
return err;
}
@@ -229,8 +247,11 @@ static int oc_mcenc_ysatd_check_mbcandidate_fullpel(const oc_enc_ctx *_enc,
static unsigned oc_mcenc_ysatd_check_bcandidate_fullpel(const oc_enc_ctx *_enc,
ptrdiff_t _frag_offs,int _dx,int _dy,
const unsigned char *_src,const unsigned char *_ref,int _ystride){
- return oc_enc_frag_satd_thresh(_enc,
- _src+_frag_offs,_ref+_frag_offs+_dx+_dy*_ystride,_ystride,UINT_MAX);
+ unsigned err;
+ int dc;
+ err=oc_enc_frag_satd(_enc,&dc,
+ _src+_frag_offs,_ref+_frag_offs+_dx+_dy*_ystride,_ystride);
+ return err+abs(dc);
}
/*Perform a motion vector search for this macro block against a single
@@ -239,11 +260,14 @@ static unsigned oc_mcenc_ysatd_check_bcandidate_fullpel(const oc_enc_ctx *_enc,
the work can be shared.
The actual motion vector is stored in the appropriate place in the
oc_mb_enc_info structure.
- _mcenc: The motion compensation context.
- _accum: Drop frame/golden MV accumulators.
- _mbi: The macro block index.
- _frame: The frame to search, either OC_FRAME_PREV or OC_FRAME_GOLD.*/
-void oc_mcenc_search_frame(oc_enc_ctx *_enc,int _accum[2],int _mbi,int _frame){
+ _accum: Drop frame/golden MV accumulators.
+ _mbi: The macro block index.
+ _frame: The frame to use for SATD calculations and refinement,
+ either OC_FRAME_PREV or OC_FRAME_GOLD.
+ _frame_full: The frame to perform the 1px search on, one of OC_FRAME_PREV,
+ OC_FRAME_GOLD, OC_FRAME_PREV_ORIG, or OC_FRAME_GOLD_ORIG.*/
+void oc_mcenc_search_frame(oc_enc_ctx *_enc,oc_mv _accum,int _mbi,int _frame,
+ int _frame_full){
/*Note: Traditionally this search is done using a rate-distortion objective
function of the form D+lambda*R.
However, xiphmont tested this and found it produced a small degredation,
@@ -264,6 +288,7 @@ void oc_mcenc_search_frame(oc_enc_ctx *_enc,int _accum[2],int _mbi,int _frame){
const ptrdiff_t *fragis;
const unsigned char *src;
const unsigned char *ref;
+ const unsigned char *satd_ref;
int ystride;
oc_mb_enc_info *embs;
ogg_int32_t hit_cache[31];
@@ -278,17 +303,18 @@ void oc_mcenc_search_frame(oc_enc_ctx *_enc,int _accum[2],int _mbi,int _frame){
int bi;
embs=_enc->mb_info;
/*Find some candidate motion vectors.*/
- oc_mcenc_find_candidates(_enc,&mcenc,_accum,_mbi,_frame);
+ oc_mcenc_find_candidates_a(_enc,&mcenc,_accum,_mbi,_frame);
/*Clear the cache of locations we've examined.*/
memset(hit_cache,0,sizeof(hit_cache));
/*Start with the median predictor.*/
- candx=mcenc.candidates[0][0];
- candy=mcenc.candidates[0][1];
+ candx=OC_DIV2(mcenc.candidates[0][0]);
+ candy=OC_DIV2(mcenc.candidates[0][1]);
hit_cache[candy+15]|=(ogg_int32_t)1<<candx+15;
frag_buf_offs=_enc->state.frag_buf_offs;
fragis=_enc->state.mb_maps[_mbi][0];
src=_enc->state.ref_frame_data[OC_FRAME_IO];
- ref=_enc->state.ref_frame_data[_enc->state.ref_frame_idx[_frame]];
+ ref=_enc->state.ref_frame_data[_frame_full];
+ satd_ref=_enc->state.ref_frame_data[_frame];
ystride=_enc->state.ref_ystride[0];
/*TODO: customize error function for speed/(quality+size) tradeoff.*/
best_err=oc_mcenc_ysad_check_mbcandidate_fullpel(_enc,
@@ -317,8 +343,8 @@ void oc_mcenc_search_frame(oc_enc_ctx *_enc,int _accum[2],int _mbi,int _frame){
t2+=(t2>>OC_YSAD_THRESH2_SCALE_BITS)+OC_YSAD_THRESH2_OFFSET;
/*Examine the candidates in set A.*/
for(ci=1;ci<mcenc.setb0;ci++){
- candx=mcenc.candidates[ci][0];
- candy=mcenc.candidates[ci][1];
+ candx=OC_DIV2(mcenc.candidates[ci][0]);
+ candy=OC_DIV2(mcenc.candidates[ci][1]);
/*If we've already examined this vector, then we would be using it if it
was better than what we are using.*/
hitbit=(ogg_int32_t)1<<candx+15;
@@ -340,10 +366,11 @@ void oc_mcenc_search_frame(oc_enc_ctx *_enc,int _accum[2],int _mbi,int _frame){
}
}
if(best_err>t2){
+ oc_mcenc_find_candidates_b(_enc,&mcenc,_accum,_mbi,_frame);
/*Examine the candidates in set B.*/
for(;ci<mcenc.ncandidates;ci++){
- candx=mcenc.candidates[ci][0];
- candy=mcenc.candidates[ci][1];
+ candx=OC_DIV2(mcenc.candidates[ci][0]);
+ candy=OC_DIV2(mcenc.candidates[ci][1]);
hitbit=(ogg_int32_t)1<<candx+15;
if(hit_cache[candy+15]&hitbit)continue;
hit_cache[candy+15]|=hitbit;
@@ -475,58 +502,50 @@ void oc_mcenc_search_frame(oc_enc_ctx *_enc,int _accum[2],int _mbi,int _frame){
candx=best_vec[0];
candy=best_vec[1];
embs[_mbi].satd[_frame]=oc_mcenc_ysatd_check_mbcandidate_fullpel(_enc,
- frag_buf_offs,fragis,candx,candy,src,ref,ystride);
- embs[_mbi].analysis_mv[0][_frame][0]=(signed char)(candx<<1);
- embs[_mbi].analysis_mv[0][_frame][1]=(signed char)(candy<<1);
- if(_frame==OC_FRAME_PREV){
+ frag_buf_offs,fragis,candx,candy,src,satd_ref,ystride);
+ embs[_mbi].analysis_mv[0][_frame]=OC_MV(candx<<1,candy<<1);
+ if(_frame==OC_FRAME_PREV&&_enc->sp_level<OC_SP_LEVEL_FAST_ANALYSIS){
for(bi=0;bi<4;bi++){
candx=best_block_vec[bi][0];
candy=best_block_vec[bi][1];
embs[_mbi].block_satd[bi]=oc_mcenc_ysatd_check_bcandidate_fullpel(_enc,
- frag_buf_offs[fragis[bi]],candx,candy,src,ref,ystride);
- embs[_mbi].block_mv[bi][0]=(signed char)(candx<<1);
- embs[_mbi].block_mv[bi][1]=(signed char)(candy<<1);
+ frag_buf_offs[fragis[bi]],candx,candy,src,satd_ref,ystride);
+ embs[_mbi].block_mv[bi]=OC_MV(candx<<1,candy<<1);
}
}
}
void oc_mcenc_search(oc_enc_ctx *_enc,int _mbi){
- oc_mv2 *mvs;
- int accum_p[2];
- int accum_g[2];
+ oc_mv2 *mvs;
+ oc_mv accum_p;
+ oc_mv accum_g;
+ oc_mv mv2_p;
mvs=_enc->mb_info[_mbi].analysis_mv;
- if(_enc->prevframe_dropped){
- accum_p[0]=mvs[0][OC_FRAME_PREV][0];
- accum_p[1]=mvs[0][OC_FRAME_PREV][1];
- }
- else accum_p[1]=accum_p[0]=0;
- accum_g[0]=mvs[2][OC_FRAME_GOLD][0];
- accum_g[1]=mvs[2][OC_FRAME_GOLD][1];
- mvs[0][OC_FRAME_PREV][0]-=mvs[2][OC_FRAME_PREV][0];
- mvs[0][OC_FRAME_PREV][1]-=mvs[2][OC_FRAME_PREV][1];
+ if(_enc->prevframe_dropped)accum_p=mvs[0][OC_FRAME_PREV];
+ else accum_p=0;
+ accum_g=mvs[2][OC_FRAME_GOLD];
/*Move the motion vector predictors back a frame.*/
- memmove(mvs+1,mvs,2*sizeof(*mvs));
+ mv2_p=mvs[2][OC_FRAME_PREV];
+ mvs[2][OC_FRAME_GOLD]=mvs[1][OC_FRAME_GOLD];
+ mvs[2][OC_FRAME_PREV]=mvs[1][OC_FRAME_PREV];
+ mvs[1][OC_FRAME_GOLD]=mvs[0][OC_FRAME_GOLD];
+ mvs[1][OC_FRAME_PREV]=OC_MV_SUB(mvs[0][OC_FRAME_PREV],mv2_p);
/*Search the last frame.*/
- oc_mcenc_search_frame(_enc,accum_p,_mbi,OC_FRAME_PREV);
- mvs[2][OC_FRAME_PREV][0]=accum_p[0];
- mvs[2][OC_FRAME_PREV][1]=accum_p[1];
+ oc_mcenc_search_frame(_enc,accum_p,_mbi,OC_FRAME_PREV,OC_FRAME_PREV_ORIG);
+ mvs[2][OC_FRAME_PREV]=accum_p;
/*GOLDEN MVs are different from PREV MVs in that they're each absolute
offsets from some frame in the past rather than relative offsets from the
frame before.
For predictor calculation to make sense, we need them to be in the same
form as PREV MVs.*/
- mvs[1][OC_FRAME_GOLD][0]-=mvs[2][OC_FRAME_GOLD][0];
- mvs[1][OC_FRAME_GOLD][1]-=mvs[2][OC_FRAME_GOLD][1];
- mvs[2][OC_FRAME_GOLD][0]-=accum_g[0];
- mvs[2][OC_FRAME_GOLD][1]-=accum_g[1];
+ mvs[1][OC_FRAME_GOLD]=OC_MV_SUB(mvs[1][OC_FRAME_GOLD],mvs[2][OC_FRAME_GOLD]);
+ mvs[2][OC_FRAME_GOLD]=OC_MV_SUB(mvs[2][OC_FRAME_GOLD],accum_g);
/*Search the golden frame.*/
- oc_mcenc_search_frame(_enc,accum_g,_mbi,OC_FRAME_GOLD);
+ oc_mcenc_search_frame(_enc,accum_g,_mbi,OC_FRAME_GOLD,OC_FRAME_GOLD_ORIG);
/*Put GOLDEN MVs back into absolute offset form.
The newest MV is already an absolute offset.*/
- mvs[2][OC_FRAME_GOLD][0]+=accum_g[0];
- mvs[2][OC_FRAME_GOLD][1]+=accum_g[1];
- mvs[1][OC_FRAME_GOLD][0]+=mvs[2][OC_FRAME_GOLD][0];
- mvs[1][OC_FRAME_GOLD][1]+=mvs[2][OC_FRAME_GOLD][1];
+ mvs[2][OC_FRAME_GOLD]=OC_MV_ADD(mvs[2][OC_FRAME_GOLD],accum_g);
+ mvs[1][OC_FRAME_GOLD]=OC_MV_ADD(mvs[1][OC_FRAME_GOLD],mvs[2][OC_FRAME_GOLD]);
}
#if 0
@@ -543,7 +562,7 @@ static int oc_mcenc_ysad_halfpel_mbrefine(const oc_enc_ctx *_enc,int _mbi,
int sitei;
int err;
src=_enc->state.ref_frame_data[OC_FRAME_IO];
- ref=_enc->state.ref_frame_data[_enc->state.ref_frame_idx[_framei]];
+ ref=_enc->state.ref_frame_data[_framei];
frag_buf_offs=_enc->state.frag_buf_offs;
fragis=_enc->state.mb_maps[_mbi][0];
ystride=_enc->state.ref_ystride[0];
@@ -598,7 +617,7 @@ static unsigned oc_mcenc_ysatd_halfpel_mbrefine(const oc_enc_ctx *_enc,
int sitei;
int err;
src=_enc->state.ref_frame_data[OC_FRAME_IO];
- ref=_enc->state.ref_frame_data[_enc->state.ref_frame_idx[_frame]];
+ ref=_enc->state.ref_frame_data[_frame];
frag_buf_offs=_enc->state.frag_buf_offs;
fragis=_enc->state.mb_maps[_mbi][0];
ystride=_enc->state.ref_ystride[0];
@@ -627,8 +646,14 @@ static unsigned oc_mcenc_ysatd_halfpel_mbrefine(const oc_enc_ctx *_enc,
ymask=OC_SIGNMASK(((_vec[1]<<1)+dy)^dy);
mvoffset0=mvoffset_base+(dx&xmask)+(offset_y[site]&ymask);
mvoffset1=mvoffset_base+(dx&~xmask)+(offset_y[site]&~ymask);
- err=oc_satd16_halfpel(_enc,frag_buf_offs,fragis,
- mvoffset0,mvoffset1,src,ref,ystride,_best_err);
+ if(_enc->sp_level<OC_SP_LEVEL_NOSATD){
+ err=oc_satd16_halfpel(_enc,frag_buf_offs,fragis,
+ mvoffset0,mvoffset1,src,ref,ystride,_best_err);
+ }
+ else{
+ err=oc_sad16_halfpel(_enc,frag_buf_offs,fragis,
+ mvoffset0,mvoffset1,src,ref,ystride,_best_err);
+ }
if(err<_best_err){
_best_err=err;
best_site=site;
@@ -643,12 +668,11 @@ void oc_mcenc_refine1mv(oc_enc_ctx *_enc,int _mbi,int _frame){
oc_mb_enc_info *embs;
int vec[2];
embs=_enc->mb_info;
- vec[0]=OC_DIV2(embs[_mbi].analysis_mv[0][_frame][0]);
- vec[1]=OC_DIV2(embs[_mbi].analysis_mv[0][_frame][1]);
+ vec[0]=OC_DIV2(OC_MV_X(embs[_mbi].analysis_mv[0][_frame]));
+ vec[1]=OC_DIV2(OC_MV_Y(embs[_mbi].analysis_mv[0][_frame]));
embs[_mbi].satd[_frame]=oc_mcenc_ysatd_halfpel_mbrefine(_enc,
_mbi,vec,embs[_mbi].satd[_frame],_frame);
- embs[_mbi].analysis_mv[0][_frame][0]=(signed char)vec[0];
- embs[_mbi].analysis_mv[0][_frame][1]=(signed char)vec[1];
+ embs[_mbi].analysis_mv[0][_frame]=OC_MV(vec[0],vec[1]);
}
#if 0
@@ -704,6 +728,7 @@ static unsigned oc_mcenc_ysatd_halfpel_brefine(const oc_enc_ctx *_enc,
best_site=4;
for(sitei=0;sitei<8;sitei++){
unsigned err;
+ int dc;
int site;
int xmask;
int ymask;
@@ -723,8 +748,9 @@ static unsigned oc_mcenc_ysatd_halfpel_brefine(const oc_enc_ctx *_enc,
ymask=OC_SIGNMASK(((_vec[1]<<1)+dy)^dy);
mvoffset0=mvoffset_base+(dx&xmask)+(_offset_y[site]&ymask);
mvoffset1=mvoffset_base+(dx&~xmask)+(_offset_y[site]&~ymask);
- err=oc_enc_frag_satd2_thresh(_enc,_src,
- _ref+mvoffset0,_ref+mvoffset1,_ystride,_best_err);
+ err=oc_enc_frag_satd2(_enc,&dc,_src,
+ _ref+mvoffset0,_ref+mvoffset1,_ystride);
+ err+=abs(dc);
if(err<_best_err){
_best_err=err;
best_site=site;
@@ -748,7 +774,7 @@ void oc_mcenc_refine4mv(oc_enc_ctx *_enc,int _mbi){
frag_buf_offs=_enc->state.frag_buf_offs;
fragis=_enc->state.mb_maps[_mbi][0];
src=_enc->state.ref_frame_data[OC_FRAME_IO];
- ref=_enc->state.ref_frame_data[_enc->state.ref_frame_idx[OC_FRAME_PREV]];
+ ref=_enc->state.ref_frame_data[OC_FRAME_PREV];
offset_y[0]=offset_y[1]=offset_y[2]=-ystride;
offset_y[3]=offset_y[5]=0;
offset_y[6]=offset_y[7]=offset_y[8]=ystride;
@@ -757,11 +783,10 @@ void oc_mcenc_refine4mv(oc_enc_ctx *_enc,int _mbi){
ptrdiff_t frag_offs;
int vec[2];
frag_offs=frag_buf_offs[fragis[bi]];
- vec[0]=OC_DIV2(embs[_mbi].block_mv[bi][0]);
- vec[1]=OC_DIV2(embs[_mbi].block_mv[bi][1]);
+ vec[0]=OC_DIV2(OC_MV_X(embs[_mbi].block_mv[bi]));
+ vec[1]=OC_DIV2(OC_MV_Y(embs[_mbi].block_mv[bi]));
embs[_mbi].block_satd[bi]=oc_mcenc_ysatd_halfpel_brefine(_enc,vec,
src+frag_offs,ref+frag_offs,ystride,offset_y,embs[_mbi].block_satd[bi]);
- embs[_mbi].ref_mv[bi][0]=(signed char)vec[0];
- embs[_mbi].ref_mv[bi][1]=(signed char)vec[1];
+ embs[_mbi].ref_mv[bi]=OC_MV(vec[0],vec[1]);
}
}
diff --git a/thirdparty/libtheora/modedec.h b/thirdparty/libtheora/modedec.h
index ea12c64afd..efe640e263 100644
--- a/thirdparty/libtheora/modedec.h
+++ b/thirdparty/libtheora/modedec.h
@@ -1,614 +1,91 @@
/*File generated by libtheora with OC_COLLECT_METRICS defined at compile time.*/
#if !defined(_modedec_H)
# define _modedec_H (1)
+# include "encint.h"
-# if defined(OC_COLLECT_METRICS)
-typedef struct oc_mode_metrics oc_mode_metrics;
+/*The log of the average quantizer for each of the OC_MODE_RD table rows
+ (e.g., for the represented qi's, and each pli and qti), in Q10 format.
+ The actual statistics used by the encoder will be interpolated from
+ that table based on log_plq for the actual quantization matrix used.*/
+# if !defined(OC_COLLECT_METRICS)
+static const
# endif
-typedef struct oc_mode_rd oc_mode_rd;
-
-
-
-/*The number of extra bits of precision at which to store rate metrics.*/
-# define OC_BIT_SCALE (6)
-/*The number of extra bits of precision at which to store RMSE metrics.
- This must be at least half OC_BIT_SCALE (rounded up).*/
-# define OC_RMSE_SCALE (5)
-/*The number of bins to partition statistics into.*/
-# define OC_SAD_BINS (24)
-/*The number of bits of precision to drop from SAD scores to assign them to a
- bin.*/
-# define OC_SAD_SHIFT (9)
-
-
-
-# if defined(OC_COLLECT_METRICS)
-struct oc_mode_metrics{
- double fragw;
- double satd;
- double rate;
- double rmse;
- double satd2;
- double satdrate;
- double rate2;
- double satdrmse;
- double rmse2;
+ogg_int16_t OC_MODE_LOGQ[OC_LOGQ_BINS][3][2]={
+ { {0x1F05,0x2101},{0x206E,0x2101},{0x206E,0x2101} },
+ { {0x1C9A,0x1EAC},{0x1E0E,0x1EAC},{0x1E0E,0x1EAC} },
+ { {0x1A31,0x1C48},{0x1B6F,0x1C48},{0x1B6F,0x1C48} },
+ { {0x17B0,0x19E7},{0x1938,0x19E7},{0x1938,0x19E7} },
+ { {0x152F,0x178F},{0x16AB,0x178F},{0x16AB,0x178F} },
+ { {0x12F1,0x1534},{0x145D,0x1534},{0x145D,0x1534} },
+ { {0x0FF3,0x1321},{0x11BE,0x1321},{0x11BE,0x1321} },
+ { {0x0E1F,0x1073},{0x0E93,0x1073},{0x0E93,0x1073} }
};
-
-int oc_has_mode_metrics;
-oc_mode_metrics OC_MODE_METRICS[64][3][2][OC_SAD_BINS];
-# endif
-
-
-
-struct oc_mode_rd{
- ogg_int16_t rate;
- ogg_int16_t rmse;
-};
-
-
# if !defined(OC_COLLECT_METRICS)
static const
# endif
-oc_mode_rd OC_MODE_RD[64][3][2][OC_SAD_BINS]={
+oc_mode_rd OC_MODE_RD_SATD[OC_LOGQ_BINS][3][2][OC_COMP_BINS]={
{
{
/*Y' qi=0 INTRA*/
{
- { 87, -66},{ 132, 1611},{ 197, 3474},{ 285, 5130},
- { 376, 6419},{ 450, 7545},{ 521, 8587},{ 600, 9587},
- { 689,10498},{ 790,11348},{ 899,12158},{ 1030,12855},
- { 1166,13459},{ 1276,14052},{ 1353,14732},{ 1444,15425},
- { 1535,16101},{ 1609,16856},{ 1697,17532},{ 1823,17995},
- { 1962,18426},{ 2085,18919},{ 2201,19503},{ 2304,20307}
+ { 57, 1550},{ 121, 2460},{ 185, 3901},{ 336, 5189},
+ { 406, 6243},{ 501, 7329},{ 565, 8292},{ 674, 9257},
+ { 746,10219},{ 843,11056},{ 961,11822},{ 1120,12512},
+ { 1208,13233},{ 1394,13600},{ 1409,14381},{ 1492,15129},
+ { 1593,15804},{ 1639,16573},{ 1731,17161},{ 1844,17707},
+ { 1949,18300},{ 2073,18654},{ 2140,19465},{ 2278,19794}
},
/*Y' qi=0 INTER*/
{
- { 32, -105},{ 40, 1268},{ 54, 2919},{ 91, 4559},
- { 118, 6244},{ 132, 7932},{ 142, 9514},{ 149,10989},
- { 155,12375},{ 161,13679},{ 168,14958},{ 176,16215},
- { 187,17431},{ 196,18623},{ 207,19790},{ 218,20941},
- { 230,22083},{ 246,23213},{ 265,24333},{ 292,25439},
- { 328,26512},{ 372,27538},{ 427,28522},{ 494,29479}
+ { -18, 1274},{ 23, 2505},{ 32, 3612},{ 57, 5153},
+ { 79, 6636},{ 97, 8082},{ 109, 9505},{ 122,10924},
+ { 134,12293},{ 145,13634},{ 158,14942},{ 172,16212},
+ { 186,17422},{ 198,18604},{ 209,19757},{ 218,20875},
+ { 235,21980},{ 253,23056},{ 276,24121},{ 305,25184},
+ { 342,26202},{ 393,27140},{ 439,28140},{ 556,28659}
}
},
{
/*Cb qi=0 INTRA*/
{
- { 1, 6},{ 27, 368},{ 52, 738},{ 67, 1171},
- { 80, 1642},{ 99, 2134},{ 110, 2642},{ 112, 3144},
- { 126, 3578},{ 154, 3967},{ 167, 4387},{ 172, 4839},
- { 191, 5278},{ 208, 5666},{ 220, 6036},{ 223, 6398},
- { 227, 6814},{ 253, 7157},{ 284, 7403},{ 292, 7699},
- { 314, 7983},{ 339, 8203},{ 363, 8460},{ 399, 8919}
+ { 32, 1763},{ 56, 2150},{ 78, 2336},{ 88, 2608},
+ { 105, 2975},{ 121, 3297},{ 113, 3460},{ 126, 3993},
+ { 142, 4432},{ 177, 4733},{ 185, 5058},{ 194, 5447},
+ { 220, 5812},{ 227, 6202},{ 246, 6415},{ 269, 6821},
+ { 279, 7026},{ 313, 7313},{ 321, 7708},{ 316, 8021},
+ { 370, 8203},{ 389, 8573},{ 410, 8607},{ 431, 8816}
},
/*Cb qi=0 INTER*/
{
- { 68, -55},{ 63, 275},{ 58, 602},{ 53, 936},
- { 50, 1290},{ 54, 1691},{ 58, 2116},{ 62, 2553},
- { 67, 2992},{ 72, 3422},{ 78, 3843},{ 84, 4253},
- { 89, 4658},{ 94, 5062},{ 98, 5455},{ 100, 5848},
- { 102, 6231},{ 104, 6604},{ 104, 6982},{ 105, 7359},
- { 105, 7733},{ 104, 8104},{ 105, 8465},{ 111, 8828}
+ { 3, 282},{ 3, 1200},{ 3, 1605},{ 6, 2190},
+ { 15, 2519},{ 18, 2798},{ 21, 3115},{ 25, 3460},
+ { 33, 3839},{ 40, 4217},{ 47, 4592},{ 51, 4958},
+ { 56, 5326},{ 59, 5710},{ 63, 6066},{ 65, 6412},
+ { 67, 6762},{ 68, 7104},{ 70, 7461},{ 72, 7829},
+ { 77, 8200},{ 80, 8566},{ 86, 8906},{ 90, 9203}
}
},
{
/*Cr qi=0 INTRA*/
{
- { 1, 8},{ 23, 375},{ 47, 759},{ 63, 1220},
- { 71, 1693},{ 82, 2171},{ 94, 2652},{ 109, 3103},
- { 125, 3567},{ 133, 3995},{ 151, 4375},{ 168, 4819},
- { 174, 5244},{ 190, 5635},{ 215, 6005},{ 242, 6347},
- { 257, 6758},{ 280, 7068},{ 311, 7336},{ 326, 7652},
- { 346, 7968},{ 372, 8213},{ 388, 8515},{ 408, 9060}
+ { 27, 1720},{ 44, 1920},{ 66, 2255},{ 73, 2429},
+ { 95, 2988},{ 103, 3279},{ 123, 3691},{ 129, 4012},
+ { 151, 4415},{ 150, 4760},{ 183, 5008},{ 193, 5351},
+ { 211, 5788},{ 235, 6134},{ 263, 6400},{ 276, 6711},
+ { 291, 7100},{ 346, 7285},{ 329, 7616},{ 387, 7827},
+ { 361, 8214},{ 430, 8534},{ 429, 8608},{ 450, 8823}
},
/*Cr qi=0 INTER*/
{
- { 69, 0},{ 60, 314},{ 49, 624},{ 45, 943},
- { 45, 1285},{ 49, 1691},{ 55, 2130},{ 62, 2560},
- { 71, 2973},{ 79, 3385},{ 85, 3800},{ 89, 4207},
- { 92, 4620},{ 95, 5037},{ 96, 5436},{ 97, 5839},
- { 98, 6252},{ 99, 6653},{ 99, 7038},{ 103, 7426},
- { 107, 7810},{ 108, 8178},{ 107, 8539},{ 106, 8937}
- }
- }
- },
- {
- {
- /*Y' qi=1 INTRA*/
- {
- { 81, -71},{ 133, 1610},{ 203, 3460},{ 296, 5083},
- { 392, 6342},{ 467, 7454},{ 541, 8486},{ 625, 9466},
- { 716,10352},{ 823,11181},{ 940,11961},{ 1074,12643},
- { 1211,13233},{ 1324,13807},{ 1408,14489},{ 1504,15167},
- { 1598,15824},{ 1679,16544},{ 1788,17161},{ 1928,17579},
- { 2070,17991},{ 2202,18456},{ 2324,19021},{ 2425,19894}
- },
- /*Y' qi=1 INTER*/
- {
- { 34, 4},{ 40, 1307},{ 55, 2914},{ 93, 4555},
- { 120, 6243},{ 134, 7912},{ 144, 9468},{ 152,10918},
- { 158,12275},{ 164,13569},{ 171,14846},{ 180,16098},
- { 191,17310},{ 204,18484},{ 216,19636},{ 228,20779},
- { 242,21912},{ 261,23036},{ 286,24146},{ 320,25221},
- { 363,26265},{ 418,27261},{ 485,28203},{ 551,29148}
- }
- },
- {
- /*Cb qi=1 INTRA*/
- {
- { 1, 6},{ 28, 367},{ 52, 738},{ 68, 1172},
- { 86, 1644},{ 106, 2135},{ 115, 2642},{ 119, 3141},
- { 132, 3569},{ 157, 3951},{ 172, 4366},{ 177, 4819},
- { 194, 5258},{ 211, 5638},{ 224, 6006},{ 233, 6367},
- { 236, 6784},{ 258, 7121},{ 299, 7357},{ 319, 7637},
- { 337, 7921},{ 358, 8141},{ 381, 8367},{ 401, 8768}
- },
- /*Cb qi=1 INTER*/
- {
- { 95, -31},{ 81, 295},{ 67, 614},{ 53, 953},
- { 48, 1305},{ 51, 1700},{ 56, 2125},{ 61, 2563},
- { 67, 3008},{ 73, 3435},{ 79, 3844},{ 85, 4251},
- { 90, 4663},{ 95, 5073},{ 98, 5458},{ 100, 5844},
- { 101, 6231},{ 102, 6606},{ 102, 6980},{ 103, 7347},
- { 104, 7726},{ 105, 8096},{ 105, 8453},{ 105, 8789}
- }
- },
- {
- /*Cr qi=1 INTRA*/
- {
- { 1, 8},{ 25, 375},{ 50, 759},{ 65, 1221},
- { 74, 1695},{ 86, 2172},{ 101, 2651},{ 117, 3101},
- { 129, 3561},{ 135, 3985},{ 153, 4368},{ 171, 4807},
- { 182, 5223},{ 202, 5608},{ 225, 5964},{ 251, 6300},
- { 271, 6697},{ 295, 6978},{ 324, 7235},{ 348, 7558},
- { 367, 7877},{ 394, 8101},{ 413, 8386},{ 409, 8945}
- },
- /*Cr qi=1 INTER*/
- {
- { 66, 11},{ 59, 323},{ 51, 631},{ 44, 949},
- { 44, 1292},{ 49, 1703},{ 56, 2140},{ 62, 2566},
- { 69, 2991},{ 77, 3397},{ 84, 3799},{ 89, 4211},
- { 93, 4634},{ 94, 5049},{ 95, 5444},{ 96, 5854},
- { 94, 6260},{ 95, 6640},{ 96, 7032},{ 101, 7423},
- { 104, 7790},{ 105, 8158},{ 109, 8527},{ 108, 8872}
- }
- }
- },
- {
- {
- /*Y' qi=2 INTRA*/
- {
- { 87, -72},{ 139, 1607},{ 213, 3426},{ 315, 4992},
- { 416, 6217},{ 495, 7315},{ 574, 8317},{ 666, 9265},
- { 763,10124},{ 875,10906},{ 1001,11654},{ 1147,12305},
- { 1289,12865},{ 1407,13424},{ 1503,14076},{ 1610,14724},
- { 1720,15342},{ 1815,16020},{ 1937,16579},{ 2084,16981},
- { 2236,17371},{ 2385,17779},{ 2536,18250},{ 2689,18931}
- },
- /*Y' qi=2 INTER*/
- {
- { 30, -2},{ 40, 1308},{ 57, 2921},{ 96, 4567},
- { 122, 6260},{ 136, 7902},{ 148, 9418},{ 156,10826},
- { 162,12157},{ 169,13448},{ 177,14709},{ 188,15938},
- { 200,17133},{ 213,18295},{ 228,19433},{ 245,20564},
- { 264,21685},{ 289,22790},{ 323,23876},{ 368,24916},
- { 427,25906},{ 499,26837},{ 585,27700},{ 680,28514}
- }
- },
- {
- /*Cb qi=2 INTRA*/
- {
- { 1, 6},{ 30, 367},{ 58, 738},{ 77, 1172},
- { 93, 1645},{ 111, 2137},{ 123, 2642},{ 126, 3133},
- { 136, 3553},{ 162, 3934},{ 178, 4352},{ 183, 4803},
- { 199, 5231},{ 220, 5596},{ 235, 5957},{ 245, 6314},
- { 256, 6718},{ 286, 7048},{ 320, 7285},{ 336, 7568},
- { 366, 7829},{ 387, 8045},{ 405, 8261},{ 445, 8550}
- },
- /*Cb qi=2 INTER*/
- {
- { 115, -61},{ 93, 277},{ 71, 609},{ 54, 963},
- { 49, 1329},{ 53, 1715},{ 58, 2138},{ 63, 2583},
- { 69, 3017},{ 75, 3442},{ 81, 3857},{ 88, 4263},
- { 93, 4667},{ 96, 5065},{ 101, 5451},{ 101, 5832},
- { 102, 6213},{ 103, 6593},{ 103, 6968},{ 104, 7336},
- { 104, 7710},{ 105, 8076},{ 106, 8440},{ 106, 8822}
- }
- },
- {
- /*Cr qi=2 INTRA*/
- {
- { 1, 8},{ 27, 375},{ 54, 759},{ 70, 1222},
- { 79, 1696},{ 89, 2173},{ 106, 2652},{ 123, 3098},
- { 135, 3553},{ 143, 3972},{ 161, 4348},{ 181, 4782},
- { 194, 5189},{ 213, 5565},{ 235, 5907},{ 266, 6229},
- { 286, 6618},{ 311, 6897},{ 339, 7152},{ 362, 7454},
- { 392, 7721},{ 416, 7946},{ 429, 8227},{ 458, 8540}
- },
- /*Cr qi=2 INTER*/
- {
- { 74, 20},{ 63, 330},{ 51, 635},{ 44, 942},
- { 47, 1287},{ 54, 1710},{ 59, 2147},{ 65, 2571},
- { 72, 2996},{ 79, 3413},{ 86, 3820},{ 91, 4230},
- { 93, 4642},{ 95, 5046},{ 95, 5442},{ 95, 5839},
- { 96, 6243},{ 97, 6641},{ 99, 7021},{ 101, 7396},
- { 103, 7764},{ 106, 8138},{ 109, 8507},{ 114, 8851}
- }
- }
- },
- {
- {
- /*Y' qi=3 INTRA*/
- {
- { 91, -67},{ 141, 1606},{ 219, 3405},{ 328, 4929},
- { 433, 6122},{ 515, 7209},{ 598, 8204},{ 693, 9145},
- { 796, 9986},{ 912,10756},{ 1045,11471},{ 1200,12079},
- { 1345,12640},{ 1471,13179},{ 1571,13809},{ 1678,14450},
- { 1798,15047},{ 1905,15701},{ 2043,16205},{ 2202,16569},
- { 2351,16971},{ 2501,17393},{ 2660,17851},{ 2825,18455}
- },
- /*Y' qi=3 INTER*/
- {
- { 53, -164},{ 38, 1314},{ 59, 2917},{ 99, 4563},
- { 124, 6253},{ 139, 7882},{ 150, 9375},{ 159,10749},
- { 166,12059},{ 173,13349},{ 183,14608},{ 194,15826},
- { 208,17003},{ 223,18150},{ 240,19287},{ 259,20411},
- { 284,21508},{ 317,22593},{ 359,23656},{ 414,24671},
- { 483,25634},{ 569,26519},{ 670,27332},{ 786,28072}
- }
- },
- {
- /*Cb qi=3 INTRA*/
- {
- { 1, 5},{ 31, 367},{ 58, 739},{ 78, 1173},
- { 96, 1645},{ 113, 2134},{ 125, 2638},{ 133, 3127},
- { 148, 3542},{ 171, 3915},{ 184, 4328},{ 192, 4776},
- { 209, 5197},{ 230, 5556},{ 245, 5909},{ 252, 6261},
- { 272, 6641},{ 304, 6942},{ 330, 7184},{ 342, 7477},
- { 380, 7736},{ 404, 7962},{ 428, 8151},{ 469, 8430}
- },
- /*Cb qi=3 INTER*/
- {
- { 86, -29},{ 72, 296},{ 58, 618},{ 46, 964},
- { 47, 1338},{ 51, 1743},{ 56, 2158},{ 63, 2594},
- { 69, 3035},{ 77, 3455},{ 84, 3859},{ 89, 4266},
- { 94, 4673},{ 98, 5074},{ 101, 5460},{ 101, 5842},
- { 101, 6217},{ 101, 6593},{ 102, 6964},{ 104, 7325},
- { 103, 7696},{ 103, 8056},{ 104, 8430},{ 103, 8792}
- }
- },
- {
- /*Cr qi=3 INTRA*/
- {
- { 1, 8},{ 27, 374},{ 56, 759},{ 74, 1221},
- { 83, 1696},{ 96, 2173},{ 113, 2650},{ 127, 3091},
- { 140, 3542},{ 151, 3960},{ 164, 4334},{ 188, 4764},
- { 208, 5144},{ 224, 5493},{ 250, 5841},{ 278, 6162},
- { 298, 6548},{ 334, 6816},{ 365, 7045},{ 388, 7343},
- { 419, 7613},{ 443, 7836},{ 455, 8105},{ 484, 8445}
- },
- /*Cr qi=3 INTER*/
- {
- { 76, 26},{ 65, 332},{ 53, 638},{ 45, 945},
- { 45, 1304},{ 53, 1725},{ 60, 2153},{ 68, 2584},
- { 74, 3007},{ 81, 3425},{ 87, 3844},{ 91, 4253},
- { 94, 4657},{ 95, 5061},{ 94, 5462},{ 94, 5856},
- { 95, 6250},{ 96, 6635},{ 97, 7014},{ 101, 7393},
- { 104, 7761},{ 106, 8137},{ 109, 8506},{ 111, 8823}
- }
- }
- },
- {
- {
- /*Y' qi=4 INTRA*/
- {
- { 80, -67},{ 143, 1603},{ 227, 3378},{ 344, 4861},
- { 454, 6026},{ 537, 7104},{ 626, 8089},{ 725, 9006},
- { 830, 9827},{ 950,10581},{ 1089,11270},{ 1257,11826},
- { 1409,12366},{ 1535,12912},{ 1640,13528},{ 1753,14173},
- { 1884,14756},{ 2007,15368},{ 2148,15852},{ 2307,16212},
- { 2464,16591},{ 2614,17019},{ 2785,17455},{ 2970,17963}
- },
- /*Y' qi=4 INTER*/
- {
- { 50, -145},{ 38, 1324},{ 61, 2921},{ 102, 4566},
- { 127, 6248},{ 142, 7845},{ 154, 9300},{ 163,10656},
- { 169,11965},{ 177,13246},{ 188,14495},{ 202,15702},
- { 218,16864},{ 236,18003},{ 256,19124},{ 278,20233},
- { 307,21330},{ 347,22398},{ 398,23437},{ 463,24429},
- { 546,25343},{ 649,26170},{ 767,26935},{ 888,27674}
- }
- },
- {
- /*Cb qi=4 INTRA*/
- {
- { 1, 5},{ 33, 367},{ 61, 739},{ 80, 1173},
- { 98, 1646},{ 114, 2136},{ 126, 2639},{ 137, 3124},
- { 152, 3535},{ 176, 3903},{ 194, 4307},{ 206, 4753},
- { 222, 5165},{ 242, 5508},{ 260, 5857},{ 272, 6205},
- { 294, 6559},{ 332, 6848},{ 356, 7104},{ 364, 7389},
- { 396, 7637},{ 415, 7878},{ 446, 8064},{ 506, 8294}
- },
- /*Cb qi=4 INTER*/
- {
- { 86, -15},{ 73, 308},{ 60, 627},{ 46, 967},
- { 47, 1343},{ 51, 1754},{ 56, 2183},{ 63, 2615},
- { 70, 3044},{ 79, 3459},{ 85, 3866},{ 90, 4276},
- { 94, 4686},{ 97, 5088},{ 100, 5467},{ 102, 5837},
- { 102, 6205},{ 101, 6569},{ 103, 6939},{ 104, 7317},
- { 105, 7690},{ 107, 8043},{ 107, 8394},{ 111, 8736}
- }
- },
- {
- /*Cr qi=4 INTRA*/
- {
- { 1, 7},{ 28, 375},{ 57, 759},{ 79, 1221},
- { 92, 1697},{ 105, 2174},{ 122, 2648},{ 135, 3085},
- { 146, 3530},{ 157, 3947},{ 171, 4316},{ 195, 4737},
- { 218, 5117},{ 239, 5445},{ 268, 5767},{ 295, 6074},
- { 315, 6460},{ 355, 6735},{ 392, 6933},{ 418, 7218},
- { 448, 7495},{ 471, 7688},{ 481, 7954},{ 504, 8313}
- },
- /*Cr qi=4 INTER*/
- {
- { 68, 28},{ 57, 334},{ 47, 639},{ 43, 953},
- { 48, 1314},{ 54, 1736},{ 59, 2169},{ 69, 2592},
- { 78, 3017},{ 84, 3434},{ 88, 3850},{ 92, 4260},
- { 95, 4663},{ 96, 5068},{ 95, 5455},{ 95, 5839},
- { 96, 6243},{ 97, 6626},{ 98, 7006},{ 101, 7390},
- { 104, 7755},{ 108, 8115},{ 111, 8471},{ 110, 8825}
- }
- }
- },
- {
- {
- /*Y' qi=5 INTRA*/
- {
- { 84, -69},{ 147, 1599},{ 237, 3350},{ 360, 4796},
- { 475, 5934},{ 562, 6992},{ 657, 7953},{ 765, 8837},
- { 874, 9641},{ 998,10384},{ 1146,11047},{ 1322,11572},
- { 1484,12076},{ 1617,12609},{ 1731,13203},{ 1856,13806},
- { 1995,14367},{ 2132,14936},{ 2289,15386},{ 2460,15721},
- { 2635,16066},{ 2802,16442},{ 2980,16805},{ 3177,17272}
- },
- /*Y' qi=5 INTER*/
- {
- { 38, -86},{ 37, 1349},{ 64, 2920},{ 105, 4563},
- { 129, 6236},{ 145, 7809},{ 158, 9236},{ 167,10572},
- { 174,11871},{ 182,13141},{ 195,14368},{ 212,15558},
- { 230,16706},{ 250,17828},{ 274,18944},{ 303,20041},
- { 342,21116},{ 394,22152},{ 460,23144},{ 543,24073},
- { 648,24919},{ 773,25673},{ 922,26323},{ 1084,26924}
- }
- },
- {
- /*Cb qi=5 INTRA*/
- {
- { 1, 5},{ 34, 367},{ 63, 739},{ 82, 1174},
- { 102, 1647},{ 119, 2137},{ 134, 2639},{ 145, 3121},
- { 161, 3529},{ 189, 3891},{ 207, 4290},{ 216, 4721},
- { 232, 5113},{ 258, 5455},{ 277, 5798},{ 294, 6124},
- { 322, 6427},{ 352, 6697},{ 370, 6982},{ 384, 7283},
- { 423, 7529},{ 448, 7766},{ 478, 7943},{ 527, 8151}
- },
- /*Cb qi=5 INTER*/
- {
- { 83, -49},{ 69, 284},{ 55, 611},{ 48, 961},
- { 49, 1355},{ 52, 1769},{ 58, 2191},{ 65, 2616},
- { 73, 3041},{ 80, 3460},{ 87, 3868},{ 92, 4276},
- { 95, 4682},{ 98, 5077},{ 100, 5459},{ 102, 5827},
- { 102, 6200},{ 102, 6568},{ 103, 6930},{ 103, 7303},
- { 104, 7672},{ 106, 8032},{ 106, 8391},{ 106, 8727}
- }
- },
- {
- /*Cr qi=5 INTRA*/
- {
- { 1, 8},{ 28, 375},{ 57, 760},{ 81, 1222},
- { 99, 1696},{ 111, 2175},{ 125, 2648},{ 140, 3079},
- { 152, 3520},{ 162, 3927},{ 179, 4294},{ 203, 4714},
- { 225, 5080},{ 254, 5389},{ 286, 5703},{ 318, 5997},
- { 342, 6364},{ 380, 6640},{ 416, 6837},{ 445, 7103},
- { 473, 7370},{ 497, 7562},{ 514, 7811},{ 549, 8148}
- },
- /*Cr qi=5 INTER*/
- {
- { 60, 6},{ 54, 323},{ 46, 638},{ 43, 958},
- { 45, 1329},{ 54, 1749},{ 61, 2175},{ 70, 2600},
- { 79, 3021},{ 85, 3437},{ 89, 3847},{ 93, 4254},
- { 95, 4660},{ 96, 5065},{ 95, 5456},{ 95, 5849},
- { 96, 6243},{ 96, 6621},{ 97, 6996},{ 101, 7366},
- { 104, 7722},{ 107, 8088},{ 111, 8448},{ 119, 8816}
- }
- }
- },
- {
- {
- /*Y' qi=6 INTRA*/
- {
- { 88, -69},{ 151, 1593},{ 251, 3294},{ 387, 4681},
- { 507, 5790},{ 601, 6837},{ 702, 7787},{ 813, 8648},
- { 927, 9427},{ 1059,10152},{ 1213,10787},{ 1399,11284},
- { 1568,11781},{ 1705,12312},{ 1823,12890},{ 1957,13482},
- { 2106,14036},{ 2249,14600},{ 2411,15042},{ 2588,15359},
- { 2772,15699},{ 2947,16062},{ 3127,16429},{ 3320,16849}
- },
- /*Y' qi=6 INTER*/
- {
- { 44, -80},{ 36, 1346},{ 69, 2919},{ 111, 4563},
- { 136, 6216},{ 154, 7746},{ 168, 9139},{ 178,10461},
- { 185,11747},{ 195,13007},{ 211,14229},{ 230,15408},
- { 250,16547},{ 274,17663},{ 302,18769},{ 339,19851},
- { 386,20907},{ 446,21933},{ 527,22884},{ 631,23746},
- { 760,24512},{ 914,25178},{ 1087,25758},{ 1278,26262}
- }
- },
- {
- /*Cb qi=6 INTRA*/
- {
- { 1, 4},{ 36, 367},{ 66, 739},{ 84, 1174},
- { 105, 1648},{ 126, 2139},{ 140, 2639},{ 149, 3116},
- { 164, 3523},{ 194, 3880},{ 217, 4271},{ 226, 4694},
- { 243, 5077},{ 270, 5407},{ 291, 5742},{ 310, 6061},
- { 340, 6340},{ 373, 6609},{ 394, 6890},{ 409, 7189},
- { 444, 7434},{ 469, 7652},{ 499, 7853},{ 559, 8135}
- },
- /*Cb qi=6 INTER*/
- {
- { 68, -46},{ 60, 291},{ 50, 623},{ 49, 971},
- { 50, 1357},{ 55, 1781},{ 61, 2211},{ 69, 2634},
- { 78, 3052},{ 86, 3466},{ 91, 3882},{ 95, 4292},
- { 98, 4691},{ 101, 5080},{ 102, 5458},{ 103, 5830},
- { 103, 6192},{ 104, 6554},{ 104, 6916},{ 106, 7278},
- { 108, 7641},{ 110, 8004},{ 112, 8371},{ 112, 8758}
- }
- },
- {
- /*Cr qi=6 INTRA*/
- {
- { 1, 8},{ 29, 375},{ 59, 760},{ 84, 1223},
- { 99, 1698},{ 112, 2176},{ 129, 2647},{ 143, 3076},
- { 156, 3510},{ 168, 3906},{ 189, 4269},{ 220, 4682},
- { 241, 5047},{ 266, 5342},{ 299, 5649},{ 331, 5954},
- { 357, 6309},{ 393, 6579},{ 431, 6765},{ 467, 6997},
- { 501, 7276},{ 520, 7488},{ 525, 7749},{ 548, 8146}
- },
- /*Cr qi=6 INTER*/
- {
- { 94, 31},{ 69, 335},{ 47, 641},{ 43, 967},
- { 50, 1350},{ 57, 1772},{ 65, 2197},{ 74, 2625},
- { 83, 3043},{ 90, 3454},{ 94, 3867},{ 97, 4273},
- { 98, 4671},{ 99, 5068},{ 99, 5461},{ 98, 5857},
- { 98, 6245},{ 99, 6610},{ 103, 6975},{ 105, 7345},
- { 108, 7712},{ 111, 8073},{ 113, 8415},{ 119, 8768}
- }
- }
- },
- {
- {
- /*Y' qi=7 INTRA*/
- {
- { 92, -70},{ 156, 1590},{ 261, 3267},{ 403, 4618},
- { 529, 5704},{ 628, 6730},{ 736, 7657},{ 856, 8491},
- { 978, 9246},{ 1118, 9943},{ 1281,10550},{ 1472,11028},
- { 1645,11507},{ 1793,12008},{ 1924,12565},{ 2067,13130},
- { 2229,13638},{ 2388,14160},{ 2558,14584},{ 2744,14886},
- { 2932,15194},{ 3116,15531},{ 3311,15858},{ 3538,16197}
- },
- /*Y' qi=7 INTER*/
- {
- { 43, -8},{ 36, 1351},{ 71, 2923},{ 112, 4568},
- { 138, 6201},{ 157, 7705},{ 171, 9083},{ 181,10390},
- { 189,11664},{ 202,12910},{ 220,14121},{ 241,15281},
- { 266,16401},{ 295,17507},{ 328,18608},{ 371,19677},
- { 430,20701},{ 508,21676},{ 604,22588},{ 727,23397},
- { 878,24093},{ 1055,24690},{ 1263,25151},{ 1496,25504}
- }
- },
- {
- /*Cb qi=7 INTRA*/
- {
- { 1, 5},{ 40, 367},{ 72, 740},{ 89, 1175},
- { 108, 1649},{ 129, 2140},{ 143, 2637},{ 154, 3110},
- { 169, 3507},{ 198, 3860},{ 224, 4237},{ 235, 4652},
- { 253, 5037},{ 282, 5358},{ 307, 5674},{ 329, 5986},
- { 361, 6273},{ 393, 6527},{ 419, 6777},{ 435, 7078},
- { 467, 7342},{ 495, 7554},{ 529, 7757},{ 591, 8053}
- },
- /*Cb qi=7 INTER*/
- {
- { 79, -33},{ 68, 299},{ 56, 627},{ 50, 978},
- { 51, 1366},{ 55, 1786},{ 61, 2213},{ 70, 2642},
- { 80, 3062},{ 87, 3474},{ 92, 3886},{ 96, 4292},
- { 99, 4684},{ 102, 5072},{ 103, 5450},{ 104, 5814},
- { 104, 6176},{ 104, 6538},{ 107, 6905},{ 110, 7270},
- { 110, 7625},{ 110, 7978},{ 111, 8340},{ 117, 8674}
- }
- },
- {
- /*Cr qi=7 INTRA*/
- {
- { 2, 7},{ 31, 375},{ 62, 760},{ 87, 1223},
- { 103, 1698},{ 115, 2175},{ 131, 2644},{ 147, 3066},
- { 161, 3494},{ 175, 3889},{ 199, 4250},{ 229, 4653},
- { 250, 5001},{ 279, 5275},{ 311, 5577},{ 343, 5889},
- { 376, 6227},{ 417, 6486},{ 457, 6689},{ 484, 6925},
- { 518, 7174},{ 544, 7393},{ 549, 7662},{ 577, 8050}
- },
- /*Cr qi=7 INTER*/
- {
- { 89, 22},{ 62, 332},{ 45, 641},{ 47, 976},
- { 52, 1363},{ 59, 1779},{ 67, 2203},{ 76, 2628},
- { 84, 3046},{ 90, 3460},{ 94, 3875},{ 98, 4272},
- { 99, 4666},{ 98, 5063},{ 98, 5459},{ 98, 5849},
- { 99, 6226},{ 101, 6594},{ 104, 6957},{ 109, 7324},
- { 109, 7686},{ 111, 8042},{ 115, 8379},{ 119, 8699}
- }
- }
- },
- {
- {
- /*Y' qi=8 INTRA*/
- {
- { 91, -69},{ 160, 1585},{ 274, 3226},{ 423, 4538},
- { 557, 5596},{ 664, 6595},{ 778, 7506},{ 905, 8319},
- { 1038, 9035},{ 1186, 9701},{ 1355,10292},{ 1554,10754},
- { 1739,11196},{ 1904,11639},{ 2047,12184},{ 2194,12763},
- { 2361,13256},{ 2529,13753},{ 2709,14155},{ 2902,14433},
- { 3100,14723},{ 3292,15026},{ 3489,15327},{ 3714,15705}
- },
- /*Y' qi=8 INTER*/
- {
- { 32, -157},{ 33, 1346},{ 74, 2914},{ 116, 4554},
- { 142, 6172},{ 162, 7648},{ 177, 9004},{ 186,10300},
- { 196,11570},{ 210,12808},{ 231,14001},{ 256,15150},
- { 285,16259},{ 319,17352},{ 359,18435},{ 415,19475},
- { 489,20470},{ 584,21400},{ 703,22246},{ 852,22968},
- { 1038,23556},{ 1253,24032},{ 1503,24367},{ 1778,24628}
- }
- },
- {
- /*Cb qi=8 INTRA*/
- {
- { 1, 4},{ 42, 367},{ 75, 740},{ 93, 1176},
- { 111, 1649},{ 128, 2139},{ 144, 2635},{ 157, 3103},
- { 174, 3494},{ 206, 3844},{ 233, 4207},{ 251, 4605},
- { 277, 4980},{ 304, 5284},{ 335, 5584},{ 359, 5888},
- { 393, 6152},{ 432, 6398},{ 455, 6656},{ 471, 6956},
- { 502, 7193},{ 528, 7405},{ 562, 7630},{ 603, 7922}
- },
- /*Cb qi=8 INTER*/
- {
- { 77, -37},{ 68, 299},{ 58, 632},{ 50, 991},
- { 50, 1382},{ 55, 1799},{ 62, 2226},{ 73, 2647},
- { 82, 3066},{ 90, 3480},{ 94, 3891},{ 96, 4296},
- { 98, 4687},{ 101, 5073},{ 103, 5456},{ 104, 5817},
- { 105, 6170},{ 106, 6523},{ 107, 6886},{ 108, 7250},
- { 109, 7600},{ 110, 7955},{ 111, 8305},{ 112, 8641}
- }
- },
- {
- /*Cr qi=8 INTRA*/
- {
- { 2, 7},{ 33, 375},{ 64, 760},{ 92, 1224},
- { 111, 1700},{ 122, 2173},{ 137, 2637},{ 156, 3055},
- { 172, 3476},{ 186, 3856},{ 211, 4211},{ 242, 4597},
- { 263, 4939},{ 292, 5214},{ 335, 5489},{ 376, 5772},
- { 406, 6099},{ 440, 6378},{ 483, 6578},{ 517, 6797},
- { 550, 7049},{ 571, 7283},{ 583, 7560},{ 618, 7967}
- },
- /*Cr qi=8 INTER*/
- {
- { 74, 25},{ 58, 328},{ 43, 637},{ 45, 980},
- { 51, 1371},{ 59, 1788},{ 69, 2207},{ 79, 2630},
- { 86, 3051},{ 91, 3470},{ 95, 3880},{ 97, 4280},
- { 98, 4680},{ 97, 5074},{ 96, 5456},{ 97, 5839},
- { 99, 6219},{ 101, 6583},{ 103, 6945},{ 106, 7312},
- { 110, 7671},{ 114, 8009},{ 115, 8345},{ 117, 8686}
+ { 4, 439},{ 2, 1131},{ 3, 1593},{ 6, 2130},
+ { 14, 2535},{ 17, 2786},{ 21, 3128},{ 27, 3494},
+ { 35, 3875},{ 42, 4256},{ 48, 4637},{ 53, 5019},
+ { 57, 5395},{ 61, 5777},{ 64, 6156},{ 66, 6512},
+ { 68, 6853},{ 71, 7183},{ 77, 7511},{ 81, 7841},
+ { 83, 8192},{ 88, 8510},{ 93, 8834},{ 98, 9138}
}
}
},
@@ -616,557 +93,61 @@ oc_mode_rd OC_MODE_RD[64][3][2][OC_SAD_BINS]={
{
/*Y' qi=9 INTRA*/
{
- { 104, -68},{ 164, 1580},{ 288, 3173},{ 448, 4439},
- { 587, 5485},{ 702, 6465},{ 824, 7351},{ 958, 8148},
- { 1096, 8845},{ 1253, 9480},{ 1432,10047},{ 1640,10494},
- { 1835,10926},{ 2015,11350},{ 2166,11871},{ 2321,12428},
- { 2508,12876},{ 2684,13345},{ 2866,13741},{ 3069,13991},
- { 3281,14243},{ 3487,14518},{ 3689,14813},{ 3911,15175}
+ { 76, 777},{ 178, 1995},{ 340, 3162},{ 591, 4097},
+ { 746, 4973},{ 916, 5847},{ 1047, 6687},{ 1218, 7430},
+ { 1385, 8079},{ 1566, 8685},{ 1755, 9167},{ 1992, 9572},
+ { 2164,10023},{ 2395,10270},{ 2536,10755},{ 2694,11285},
+ { 2895,11580},{ 3029,12143},{ 3182,12543},{ 3377,12800},
+ { 3525,13228},{ 3718,13463},{ 3878,13852},{ 4077,14001}
},
/*Y' qi=9 INTER*/
{
- { 47, -140},{ 34, 1348},{ 77, 2915},{ 119, 4552},
- { 145, 6150},{ 166, 7600},{ 182, 8936},{ 192,10221},
- { 203,11482},{ 220,12711},{ 244,13886},{ 274,15012},
- { 308,16111},{ 349,17190},{ 401,18244},{ 470,19257},
- { 561,20209},{ 680,21069},{ 830,21822},{ 1010,22463},
- { 1227,22971},{ 1482,23328},{ 1769,23544},{ 2077,23655}
+ { 10, 770},{ 45, 1845},{ 59, 3227},{ 99, 4708},
+ { 135, 6092},{ 164, 7425},{ 190, 8729},{ 218, 9991},
+ { 246,11234},{ 281,12427},{ 315,13573},{ 354,14678},
+ { 402,15734},{ 467,16728},{ 543,17709},{ 639,18610},
+ { 736,19503},{ 855,20312},{ 995,21033},{ 1151,21656},
+ { 1341,22130},{ 1525,22582},{ 1735,22922},{ 1922,23102}
}
},
{
/*Cb qi=9 INTRA*/
{
- { 1, 5},{ 43, 367},{ 76, 740},{ 95, 1176},
- { 114, 1649},{ 135, 2138},{ 153, 2629},{ 165, 3091},
- { 184, 3481},{ 217, 3831},{ 244, 4187},{ 260, 4572},
- { 290, 4930},{ 320, 5231},{ 351, 5521},{ 379, 5812},
- { 414, 6055},{ 452, 6307},{ 483, 6564},{ 502, 6848},
- { 525, 7115},{ 554, 7321},{ 589, 7533},{ 626, 7833}
+ { 41, 1227},{ 70, 1452},{ 102, 1697},{ 110, 1967},
+ { 134, 2326},{ 153, 2695},{ 160, 3007},{ 196, 3393},
+ { 232, 3769},{ 266, 4067},{ 297, 4376},{ 326, 4728},
+ { 351, 5040},{ 390, 5299},{ 398, 5538},{ 443, 5900},
+ { 448, 6107},{ 506, 6370},{ 519, 6636},{ 525, 6953},
+ { 567, 7177},{ 625, 7386},{ 622, 7613},{ 654, 7764}
},
/*Cb qi=9 INTER*/
{
- { 101, -43},{ 81, 298},{ 62, 637},{ 49, 989},
- { 51, 1381},{ 56, 1806},{ 65, 2231},{ 74, 2653},
- { 84, 3071},{ 91, 3482},{ 95, 3892},{ 97, 4293},
- { 99, 4684},{ 101, 5066},{ 103, 5437},{ 103, 5793},
- { 103, 6148},{ 104, 6511},{ 105, 6867},{ 107, 7221},
- { 110, 7572},{ 111, 7926},{ 112, 8283},{ 116, 8625}
+ { 7, 377},{ 2, 1102},{ 7, 1262},{ 19, 1693},
+ { 22, 1957},{ 27, 2302},{ 35, 2654},{ 43, 3034},
+ { 52, 3431},{ 58, 3826},{ 63, 4207},{ 67, 4570},
+ { 71, 4927},{ 75, 5283},{ 79, 5624},{ 82, 5944},
+ { 85, 6279},{ 88, 6616},{ 94, 6955},{ 102, 7284},
+ { 108, 7622},{ 116, 7944},{ 124, 8293},{ 133, 8568}
}
},
{
/*Cr qi=9 INTRA*/
{
- { 2, 7},{ 35, 375},{ 66, 761},{ 93, 1224},
- { 112, 1700},{ 126, 2173},{ 144, 2633},{ 165, 3047},
- { 183, 3458},{ 199, 3835},{ 224, 4191},{ 257, 4558},
- { 283, 4887},{ 309, 5176},{ 351, 5446},{ 397, 5713},
- { 433, 6017},{ 469, 6283},{ 508, 6480},{ 546, 6687},
- { 579, 6945},{ 600, 7182},{ 610, 7434},{ 623, 7793}
+ { 38, 1217},{ 61, 1473},{ 88, 1650},{ 100, 1908},
+ { 137, 2400},{ 147, 2777},{ 176, 3149},{ 205, 3433},
+ { 227, 3772},{ 249, 4092},{ 286, 4370},{ 313, 4746},
+ { 342, 5053},{ 368, 5261},{ 411, 5530},{ 442, 5859},
+ { 494, 6061},{ 526, 6340},{ 532, 6646},{ 580, 6799},
+ { 567, 7203},{ 649, 7357},{ 625, 7559},{ 660, 7709}
},
/*Cr qi=9 INTER*/
{
- { 77, 15},{ 57, 330},{ 45, 640},{ 48, 980},
- { 54, 1380},{ 61, 1802},{ 70, 2220},{ 80, 2639},
- { 87, 3057},{ 92, 3474},{ 94, 3882},{ 98, 4282},
- { 98, 4675},{ 97, 5062},{ 97, 5450},{ 98, 5829},
- { 100, 6197},{ 101, 6561},{ 104, 6927},{ 107, 7289},
- { 113, 7638},{ 117, 7978},{ 119, 8311},{ 117, 8629}
- }
- }
- },
- {
- {
- /*Y' qi=10 INTRA*/
- {
- { 101, -69},{ 168, 1574},{ 299, 3143},{ 465, 4386},
- { 610, 5410},{ 736, 6353},{ 866, 7207},{ 1006, 7982},
- { 1153, 8655},{ 1319, 9261},{ 1504, 9812},{ 1719,10248},
- { 1928,10653},{ 2116,11056},{ 2282,11550},{ 2458,12070},
- { 2654,12492},{ 2846,12923},{ 3043,13291},{ 3249,13537},
- { 3466,13764},{ 3682,13999},{ 3896,14268},{ 4145,14548}
- },
- /*Y' qi=10 INTER*/
- {
- { 48, -94},{ 34, 1355},{ 81, 2920},{ 124, 4545},
- { 151, 6113},{ 174, 7532},{ 190, 8850},{ 201,10125},
- { 214,11379},{ 235,12591},{ 264,13745},{ 299,14859},
- { 338,15948},{ 388,17008},{ 456,18029},{ 546,18988},
- { 661,19877},{ 808,20666},{ 993,21321},{ 1218,21835},
- { 1481,22203},{ 1783,22420},{ 2117,22504},{ 2469,22481}
- }
- },
- {
- /*Cb qi=10 INTRA*/
- {
- { 2, 4},{ 44, 367},{ 79, 740},{ 99, 1178},
- { 117, 1652},{ 137, 2141},{ 156, 2630},{ 170, 3089},
- { 192, 3474},{ 227, 3813},{ 259, 4157},{ 282, 4526},
- { 310, 4860},{ 342, 5140},{ 377, 5425},{ 400, 5714},
- { 436, 5952},{ 475, 6194},{ 496, 6468},{ 522, 6748},
- { 559, 6996},{ 587, 7216},{ 617, 7433},{ 673, 7678}
- },
- /*Cb qi=10 INTER*/
- {
- { 87, -37},{ 72, 301},{ 58, 636},{ 49, 995},
- { 51, 1394},{ 57, 1819},{ 66, 2241},{ 78, 2660},
- { 87, 3074},{ 93, 3482},{ 97, 3891},{ 99, 4294},
- { 101, 4678},{ 103, 5050},{ 105, 5414},{ 106, 5773},
- { 107, 6134},{ 108, 6485},{ 110, 6832},{ 113, 7187},
- { 113, 7547},{ 114, 7887},{ 117, 8230},{ 112, 8590}
- }
- },
- {
- /*Cr qi=10 INTRA*/
- {
- { 2, 7},{ 38, 375},{ 69, 761},{ 96, 1224},
- { 116, 1701},{ 131, 2175},{ 148, 2634},{ 168, 3041},
- { 190, 3439},{ 211, 3802},{ 238, 4151},{ 271, 4506},
- { 297, 4824},{ 331, 5103},{ 373, 5360},{ 415, 5632},
- { 459, 5928},{ 500, 6176},{ 535, 6386},{ 573, 6586},
- { 608, 6834},{ 629, 7079},{ 642, 7337},{ 686, 7680}
- },
- /*Cr qi=10 INTER*/
- {
- { 81, 34},{ 63, 333},{ 50, 633},{ 48, 987},
- { 53, 1397},{ 61, 1820},{ 71, 2237},{ 83, 2651},
- { 91, 3065},{ 95, 3479},{ 98, 3882},{ 100, 4279},
- { 101, 4673},{ 101, 5054},{ 100, 5429},{ 101, 5801},
- { 102, 6173},{ 104, 6541},{ 108, 6904},{ 110, 7264},
- { 114, 7609},{ 119, 7945},{ 123, 8275},{ 128, 8615}
- }
- }
- },
- {
- {
- /*Y' qi=11 INTRA*/
- {
- { 110, -66},{ 176, 1564},{ 316, 3087},{ 492, 4296},
- { 645, 5299},{ 781, 6217},{ 924, 7039},{ 1075, 7776},
- { 1232, 8421},{ 1410, 9005},{ 1607, 9532},{ 1834, 9929},
- { 2053,10300},{ 2249,10697},{ 2427,11184},{ 2619,11682},
- { 2826,12083},{ 3019,12508},{ 3225,12869},{ 3452,13064},
- { 3670,13280},{ 3890,13519},{ 4123,13750},{ 4367,14059}
- },
- /*Y' qi=11 INTER*/
- {
- { 72, -115},{ 32, 1354},{ 83, 2911},{ 126, 4534},
- { 154, 6080},{ 178, 7475},{ 194, 8779},{ 205,10047},
- { 222,11290},{ 246,12488},{ 281,13621},{ 322,14714},
- { 372,15786},{ 436,16821},{ 519,17813},{ 628,18728},
- { 770,19549},{ 950,20254},{ 1175,20800},{ 1443,21197},
- { 1752,21446},{ 2095,21555},{ 2457,21553},{ 2808,21544}
- }
- },
- {
- /*Cb qi=11 INTRA*/
- {
- { 2, 4},{ 45, 367},{ 81, 740},{ 101, 1177},
- { 121, 1650},{ 142, 2136},{ 159, 2621},{ 174, 3075},
- { 199, 3451},{ 234, 3778},{ 265, 4117},{ 297, 4473},
- { 333, 4789},{ 367, 5054},{ 402, 5319},{ 427, 5613},
- { 462, 5871},{ 503, 6107},{ 532, 6336},{ 560, 6584},
- { 601, 6842},{ 631, 7092},{ 662, 7292},{ 721, 7497}
- },
- /*Cb qi=11 INTER*/
- {
- { 117, -24},{ 93, 308},{ 69, 638},{ 52, 993},
- { 52, 1395},{ 58, 1822},{ 68, 2246},{ 80, 2665},
- { 89, 3082},{ 94, 3492},{ 96, 3900},{ 98, 4299},
- { 101, 4679},{ 103, 5047},{ 104, 5405},{ 106, 5763},
- { 106, 6120},{ 107, 6474},{ 109, 6823},{ 112, 7163},
- { 115, 7516},{ 117, 7868},{ 118, 8213},{ 119, 8561}
- }
- },
- {
- /*Cr qi=11 INTRA*/
- {
- { 2, 7},{ 40, 375},{ 75, 761},{ 100, 1224},
- { 119, 1700},{ 137, 2169},{ 154, 2622},{ 178, 3025},
- { 198, 3416},{ 220, 3770},{ 255, 4114},{ 294, 4459},
- { 323, 4756},{ 359, 5028},{ 399, 5292},{ 438, 5556},
- { 483, 5827},{ 518, 6073},{ 551, 6298},{ 598, 6501},
- { 634, 6754},{ 652, 6997},{ 670, 7211},{ 689, 7560}
- },
- /*Cr qi=11 INTER*/
- {
- { 75, 30},{ 61, 334},{ 51, 639},{ 49, 995},
- { 53, 1403},{ 62, 1821},{ 73, 2237},{ 84, 2654},
- { 91, 3070},{ 95, 3485},{ 96, 3890},{ 98, 4287},
- { 98, 4672},{ 99, 5050},{ 99, 5427},{ 100, 5798},
- { 103, 6169},{ 105, 6528},{ 107, 6881},{ 113, 7233},
- { 118, 7580},{ 121, 7916},{ 125, 8240},{ 130, 8551}
- }
- }
- },
- {
- {
- /*Y' qi=12 INTRA*/
- {
- { 104, -69},{ 182, 1557},{ 335, 3040},{ 521, 4205},
- { 684, 5178},{ 831, 6068},{ 986, 6854},{ 1151, 7559},
- { 1323, 8169},{ 1523, 8704},{ 1736, 9192},{ 1978, 9558},
- { 2213, 9908},{ 2421,10298},{ 2613,10757},{ 2822,11208},
- { 3042,11585},{ 3250,11991},{ 3474,12308},{ 3710,12480},
- { 3939,12687},{ 4174,12902},{ 4416,13102},{ 4672,13369}
- },
- /*Y' qi=12 INTER*/
- {
- { 52, -91},{ 34, 1355},{ 86, 2911},{ 129, 4518},
- { 159, 6037},{ 184, 7405},{ 200, 8694},{ 213, 9955},
- { 232,11185},{ 263,12360},{ 304,13479},{ 354,14555},
- { 415,15601},{ 495,16608},{ 601,17549},{ 738,18400},
- { 915,19136},{ 1139,19724},{ 1414,20150},{ 1731,20412},
- { 2090,20520},{ 2473,20509},{ 2851,20442},{ 3227,20328}
- }
- },
- {
- /*Cb qi=12 INTRA*/
- {
- { 1, 4},{ 46, 367},{ 85, 740},{ 109, 1178},
- { 126, 1650},{ 145, 2134},{ 165, 2617},{ 182, 3061},
- { 209, 3428},{ 245, 3749},{ 281, 4077},{ 316, 4417},
- { 354, 4718},{ 392, 4970},{ 430, 5217},{ 456, 5501},
- { 490, 5771},{ 534, 5996},{ 571, 6207},{ 600, 6458},
- { 644, 6697},{ 675, 6942},{ 707, 7151},{ 766, 7342}
- },
- /*Cb qi=12 INTER*/
- {
- { 84, -24},{ 73, 311},{ 60, 644},{ 52, 998},
- { 53, 1398},{ 60, 1825},{ 71, 2249},{ 83, 2665},
- { 90, 3081},{ 94, 3490},{ 97, 3893},{ 99, 4286},
- { 102, 4663},{ 104, 5032},{ 105, 5393},{ 106, 5751},
- { 107, 6102},{ 108, 6445},{ 111, 6788},{ 113, 7136},
- { 114, 7483},{ 117, 7828},{ 121, 8163},{ 122, 8496}
- }
- },
- {
- /*Cr qi=12 INTRA*/
- {
- { 3, 7},{ 41, 375},{ 78, 761},{ 106, 1225},
- { 124, 1700},{ 140, 2167},{ 163, 2616},{ 188, 3010},
- { 213, 3385},{ 240, 3718},{ 271, 4062},{ 309, 4406},
- { 345, 4691},{ 387, 4956},{ 430, 5212},{ 469, 5467},
- { 513, 5729},{ 554, 5970},{ 587, 6176},{ 633, 6395},
- { 673, 6659},{ 692, 6868},{ 712, 7061},{ 758, 7259}
- },
- /*Cr qi=12 INTER*/
- {
- { 73, 31},{ 59, 335},{ 48, 638},{ 50, 998},
- { 56, 1410},{ 65, 1827},{ 75, 2240},{ 85, 2657},
- { 92, 3073},{ 95, 3485},{ 97, 3888},{ 99, 4279},
- { 98, 4663},{ 99, 5042},{ 101, 5412},{ 102, 5779},
- { 105, 6142},{ 107, 6498},{ 108, 6848},{ 113, 7198},
- { 118, 7540},{ 121, 7867},{ 127, 8188},{ 132, 8508}
- }
- }
- },
- {
- {
- /*Y' qi=13 INTRA*/
- {
- { 109, -68},{ 187, 1551},{ 347, 3010},{ 541, 4153},
- { 709, 5107},{ 864, 5975},{ 1026, 6745},{ 1194, 7433},
- { 1375, 8021},{ 1581, 8550},{ 1803, 9026},{ 2054, 9371},
- { 2301, 9713},{ 2522,10082},{ 2728,10515},{ 2949,10956},
- { 3184,11297},{ 3408,11653},{ 3643,11946},{ 3886,12100},
- { 4124,12277},{ 4377,12459},{ 4632,12635},{ 4898,12861}
- },
- /*Y' qi=13 INTER*/
- {
- { 48, -78},{ 35, 1357},{ 89, 2914},{ 133, 4512},
- { 164, 6004},{ 190, 7348},{ 207, 8627},{ 222, 9881},
- { 247,11096},{ 284,12251},{ 333,13350},{ 392,14407},
- { 466,15426},{ 565,16391},{ 696,17279},{ 865,18058},
- { 1085,18689},{ 1358,19156},{ 1684,19456},{ 2050,19605},
- { 2447,19614},{ 2855,19524},{ 3243,19398},{ 3611,19201}
- }
- },
- {
- /*Cb qi=13 INTRA*/
- {
- { 2, 4},{ 47, 367},{ 86, 741},{ 108, 1179},
- { 127, 1651},{ 150, 2133},{ 173, 2611},{ 194, 3050},
- { 222, 3417},{ 262, 3733},{ 303, 4048},{ 337, 4375},
- { 378, 4657},{ 420, 4897},{ 456, 5148},{ 486, 5422},
- { 518, 5682},{ 558, 5903},{ 592, 6113},{ 623, 6372},
- { 662, 6628},{ 700, 6833},{ 751, 6989},{ 805, 7147}
- },
- /*Cb qi=13 INTER*/
- {
- { 94, -34},{ 78, 303},{ 60, 638},{ 51, 994},
- { 54, 1406},{ 61, 1836},{ 73, 2253},{ 84, 2668},
- { 92, 3082},{ 96, 3492},{ 99, 3894},{ 101, 4284},
- { 103, 4659},{ 105, 5023},{ 106, 5376},{ 108, 5726},
- { 109, 6070},{ 110, 6418},{ 113, 6765},{ 117, 7105},
- { 119, 7448},{ 122, 7784},{ 126, 8119},{ 131, 8463}
- }
- },
- {
- /*Cr qi=13 INTRA*/
- {
- { 3, 7},{ 43, 375},{ 80, 762},{ 110, 1226},
- { 131, 1701},{ 149, 2166},{ 172, 2610},{ 196, 2999},
- { 221, 3359},{ 254, 3679},{ 292, 4005},{ 332, 4329},
- { 369, 4612},{ 408, 4880},{ 456, 5139},{ 500, 5388},
- { 544, 5631},{ 581, 5877},{ 615, 6101},{ 660, 6316},
- { 692, 6594},{ 714, 6795},{ 736, 6997},{ 789, 7290}
- },
- /*Cr qi=13 INTER*/
- {
- { 73, 28},{ 61, 336},{ 46, 642},{ 50, 1003},
- { 58, 1414},{ 67, 1832},{ 79, 2245},{ 87, 2660},
- { 93, 3075},{ 97, 3484},{ 99, 3888},{ 100, 4277},
- { 100, 4651},{ 100, 5027},{ 101, 5403},{ 102, 5765},
- { 105, 6116},{ 109, 6470},{ 113, 6825},{ 119, 7163},
- { 124, 7497},{ 127, 7827},{ 131, 8137},{ 135, 8437}
- }
- }
- },
- {
- {
- /*Y' qi=14 INTRA*/
- {
- { 113, -68},{ 191, 1545},{ 358, 2981},{ 559, 4104},
- { 733, 5044},{ 896, 5890},{ 1066, 6636},{ 1241, 7304},
- { 1428, 7886},{ 1642, 8402},{ 1872, 8871},{ 2128, 9219},
- { 2380, 9547},{ 2609, 9908},{ 2825,10321},{ 3055,10728},
- { 3294,11076},{ 3523,11425},{ 3766,11689},{ 4013,11845},
- { 4254,12022},{ 4506,12209},{ 4759,12383},{ 5013,12637}
- },
- /*Y' qi=14 INTER*/
- {
- { 58, -82},{ 38, 1362},{ 93, 2914},{ 138, 4492},
- { 171, 5962},{ 198, 7289},{ 216, 8559},{ 234, 9804},
- { 263,11005},{ 306,12143},{ 363,13222},{ 434,14259},
- { 523,15255},{ 639,16188},{ 794,17021},{ 1000,17717},
- { 1262,18260},{ 1575,18645},{ 1943,18841},{ 2356,18872},
- { 2782,18802},{ 3194,18682},{ 3576,18559},{ 3923,18447}
- }
- },
- {
- /*Cb qi=14 INTRA*/
- {
- { 2, 3},{ 50, 367},{ 91, 741},{ 114, 1180},
- { 134, 1651},{ 157, 2131},{ 181, 2601},{ 208, 3028},
- { 239, 3391},{ 279, 3706},{ 322, 4000},{ 361, 4309},
- { 406, 4587},{ 445, 4822},{ 482, 5067},{ 515, 5344},
- { 546, 5612},{ 589, 5821},{ 626, 6020},{ 655, 6276},
- { 701, 6523},{ 748, 6717},{ 796, 6876},{ 815, 7151}
- },
- /*Cb qi=14 INTER*/
- {
- { 80, -43},{ 68, 301},{ 56, 644},{ 50, 1004},
- { 54, 1412},{ 63, 1836},{ 75, 2253},{ 87, 2670},
- { 94, 3083},{ 98, 3487},{ 101, 3885},{ 103, 4271},
- { 106, 4645},{ 107, 5004},{ 108, 5358},{ 109, 5705},
- { 112, 6047},{ 115, 6388},{ 118, 6731},{ 121, 7081},
- { 126, 7421},{ 129, 7747},{ 132, 8076},{ 137, 8419}
- }
- },
- {
- /*Cr qi=14 INTRA*/
- {
- { 3, 6},{ 45, 375},{ 85, 762},{ 116, 1226},
- { 138, 1700},{ 158, 2163},{ 180, 2602},{ 206, 2985},
- { 236, 3333},{ 270, 3639},{ 310, 3956},{ 359, 4258},
- { 397, 4524},{ 430, 4802},{ 478, 5068},{ 527, 5316},
- { 572, 5560},{ 613, 5802},{ 654, 6012},{ 699, 6216},
- { 734, 6489},{ 755, 6707},{ 775, 6898},{ 841, 7111}
- },
- /*Cr qi=14 INTER*/
- {
- { 78, 0},{ 59, 322},{ 46, 649},{ 51, 1016},
- { 58, 1422},{ 68, 1839},{ 81, 2253},{ 90, 2666},
- { 95, 3080},{ 98, 3486},{ 101, 3881},{ 102, 4268},
- { 102, 4644},{ 103, 5017},{ 105, 5382},{ 106, 5743},
- { 108, 6093},{ 112, 6442},{ 118, 6791},{ 124, 7130},
- { 127, 7463},{ 133, 7784},{ 138, 8085},{ 142, 8395}
- }
- }
- },
- {
- {
- /*Y' qi=15 INTRA*/
- {
- { 111, -66},{ 197, 1538},{ 370, 2949},{ 579, 4050},
- { 762, 4968},{ 933, 5798},{ 1112, 6520},{ 1299, 7161},
- { 1497, 7725},{ 1723, 8219},{ 1967, 8654},{ 2234, 8990},
- { 2499, 9302},{ 2740, 9637},{ 2968,10039},{ 3215,10414},
- { 3473,10709},{ 3721,11015},{ 3971,11270},{ 4228,11402},
- { 4487,11543},{ 4752,11707},{ 5011,11871},{ 5290,12099}
- },
- /*Y' qi=15 INTER*/
- {
- { 59, -113},{ 37, 1349},{ 95, 2904},{ 139, 4478},
- { 174, 5929},{ 201, 7244},{ 220, 8505},{ 241, 9736},
- { 275,10922},{ 327,12040},{ 395,13097},{ 477,14114},
- { 585,15071},{ 730,15947},{ 917,16714},{ 1162,17326},
- { 1468,17770},{ 1833,18029},{ 2251,18111},{ 2694,18068},
- { 3125,17968},{ 3529,17845},{ 3908,17713},{ 4260,17587}
- }
- },
- {
- /*Cb qi=15 INTRA*/
- {
- { 2, 3},{ 51, 367},{ 94, 741},{ 120, 1180},
- { 140, 1651},{ 160, 2129},{ 184, 2591},{ 213, 3010},
- { 246, 3371},{ 289, 3680},{ 335, 3969},{ 374, 4274},
- { 418, 4546},{ 460, 4783},{ 498, 5019},{ 532, 5280},
- { 565, 5553},{ 608, 5765},{ 647, 5958},{ 683, 6193},
- { 732, 6433},{ 782, 6620},{ 832, 6769},{ 848, 7027}
- },
- /*Cb qi=15 INTER*/
- {
- { 71, -52},{ 63, 296},{ 54, 644},{ 50, 1010},
- { 53, 1417},{ 64, 1837},{ 77, 2253},{ 88, 2666},
- { 95, 3079},{ 98, 3487},{ 100, 3882},{ 103, 4264},
- { 106, 4633},{ 108, 4991},{ 109, 5343},{ 109, 5693},
- { 112, 6038},{ 114, 6371},{ 119, 6709},{ 123, 7051},
- { 125, 7385},{ 130, 7716},{ 135, 8050},{ 140, 8374}
- }
- },
- {
- /*Cr qi=15 INTRA*/
- {
- { 2, 6},{ 47, 375},{ 87, 763},{ 119, 1225},
- { 143, 1699},{ 162, 2158},{ 185, 2595},{ 213, 2971},
- { 246, 3315},{ 279, 3618},{ 320, 3920},{ 372, 4210},
- { 409, 4480},{ 446, 4756},{ 496, 5017},{ 542, 5263},
- { 590, 5487},{ 639, 5721},{ 687, 5923},{ 724, 6132},
- { 753, 6417},{ 781, 6622},{ 805, 6806},{ 856, 6977}
- },
- /*Cr qi=15 INTER*/
- {
- { 71, 3},{ 61, 326},{ 52, 651},{ 50, 1017},
- { 58, 1422},{ 69, 1837},{ 82, 2251},{ 90, 2668},
- { 95, 3080},{ 98, 3484},{ 101, 3877},{ 102, 4257},
- { 102, 4632},{ 101, 5005},{ 103, 5370},{ 106, 5733},
- { 110, 6082},{ 116, 6424},{ 120, 6774},{ 124, 7106},
- { 130, 7427},{ 135, 7748},{ 141, 8052},{ 147, 8333}
- }
- }
- },
- {
- {
- /*Y' qi=16 INTRA*/
- {
- { 114, -63},{ 206, 1525},{ 396, 2887},{ 618, 3945},
- { 816, 4832},{ 1002, 5626},{ 1196, 6319},{ 1401, 6923},
- { 1616, 7458},{ 1857, 7928},{ 2121, 8334},{ 2405, 8645},
- { 2685, 8934},{ 2938, 9255},{ 3175, 9638},{ 3433, 9990},
- { 3707,10263},{ 3958,10577},{ 4218,10807},{ 4488,10906},
- { 4760,11028},{ 5037,11148},{ 5306,11286},{ 5625,11463}
- },
- /*Y' qi=16 INTER*/
- {
- { 69, -153},{ 39, 1348},{ 98, 2894},{ 144, 4448},
- { 181, 5872},{ 209, 7167},{ 228, 8422},{ 254, 9644},
- { 297,10810},{ 359,11908},{ 438,12944},{ 539,13930},
- { 672,14842},{ 850,15650},{ 1085,16318},{ 1391,16793},
- { 1769,17082},{ 2200,17198},{ 2659,17174},{ 3116,17072},
- { 3547,16948},{ 3943,16819},{ 4299,16701},{ 4611,16644}
- }
- },
- {
- /*Cb qi=16 INTRA*/
- {
- { 3, 4},{ 54, 367},{ 97, 742},{ 122, 1181},
- { 143, 1651},{ 168, 2123},{ 197, 2575},{ 226, 2985},
- { 263, 3338},{ 314, 3631},{ 367, 3903},{ 409, 4200},
- { 453, 4468},{ 491, 4703},{ 528, 4932},{ 566, 5188},
- { 601, 5459},{ 647, 5672},{ 693, 5844},{ 734, 6058},
- { 784, 6305},{ 836, 6460},{ 882, 6602},{ 905, 6891}
- },
- /*Cb qi=16 INTER*/
- {
- { 75, -64},{ 67, 292},{ 56, 645},{ 51, 1016},
- { 54, 1421},{ 66, 1842},{ 79, 2257},{ 89, 2670},
- { 95, 3082},{ 98, 3488},{ 101, 3879},{ 104, 4258},
- { 106, 4623},{ 108, 4974},{ 109, 5321},{ 113, 5664},
- { 116, 6001},{ 117, 6341},{ 123, 6677},{ 128, 7004},
- { 130, 7336},{ 136, 7671},{ 143, 7996},{ 148, 8310}
- }
- },
- {
- /*Cr qi=16 INTRA*/
- {
- { 4, 7},{ 50, 375},{ 90, 763},{ 124, 1225},
- { 148, 1698},{ 168, 2154},{ 195, 2582},{ 227, 2948},
- { 263, 3279},{ 302, 3575},{ 343, 3865},{ 394, 4137},
- { 439, 4402},{ 482, 4672},{ 533, 4925},{ 579, 5165},
- { 626, 5382},{ 675, 5616},{ 725, 5812},{ 769, 5991},
- { 810, 6242},{ 848, 6430},{ 868, 6615},{ 944, 6732}
- },
- /*Cr qi=16 INTER*/
- {
- { 78, 11},{ 62, 327},{ 49, 650},{ 50, 1025},
- { 59, 1431},{ 72, 1841},{ 83, 2253},{ 90, 2671},
- { 95, 3084},{ 98, 3487},{ 100, 3879},{ 101, 4254},
- { 102, 4625},{ 103, 4994},{ 106, 5355},{ 108, 5708},
- { 111, 6058},{ 115, 6400},{ 121, 6733},{ 128, 7058},
- { 134, 7374},{ 140, 7691},{ 146, 7993},{ 146, 8317}
- }
- }
- },
- {
- {
- /*Y' qi=17 INTRA*/
- {
- { 112, -59},{ 210, 1515},{ 409, 2850},{ 640, 3882},
- { 844, 4748},{ 1038, 5529},{ 1240, 6206},{ 1452, 6803},
- { 1676, 7330},{ 1925, 7792},{ 2194, 8201},{ 2483, 8512},
- { 2766, 8801},{ 3027, 9121},{ 3279, 9482},{ 3548, 9810},
- { 3825,10069},{ 4088,10345},{ 4362,10544},{ 4638,10644},
- { 4915,10744},{ 5196,10850},{ 5471,10981},{ 5802,11136}
- },
- /*Y' qi=17 INTER*/
- {
- { 70, -147},{ 45, 1349},{ 106, 2894},{ 155, 4425},
- { 195, 5818},{ 225, 7099},{ 247, 8348},{ 278, 9565},
- { 328,10717},{ 399,11794},{ 491,12807},{ 609,13760},
- { 766,14623},{ 984,15349},{ 1274,15902},{ 1642,16256},
- { 2082,16411},{ 2563,16409},{ 3048,16315},{ 3508,16194},
- { 3924,16064},{ 4306,15938},{ 4656,15828},{ 4966,15733}
- }
- },
- {
- /*Cb qi=17 INTRA*/
- {
- { 3, 4},{ 57, 367},{ 101, 742},{ 126, 1182},
- { 148, 1650},{ 175, 2118},{ 207, 2565},{ 241, 2966},
- { 279, 3307},{ 331, 3588},{ 389, 3845},{ 435, 4132},
- { 474, 4408},{ 517, 4641},{ 560, 4869},{ 602, 5122},
- { 638, 5389},{ 672, 5610},{ 716, 5787},{ 758, 6002},
- { 817, 6226},{ 869, 6393},{ 916, 6530},{ 950, 6799}
- },
- /*Cb qi=17 INTER*/
- {
- { 105, -65},{ 86, 288},{ 66, 638},{ 54, 1014},
- { 59, 1427},{ 71, 1844},{ 86, 2257},{ 95, 2668},
- { 100, 3075},{ 103, 3476},{ 106, 3867},{ 110, 4241},
- { 112, 4598},{ 114, 4948},{ 117, 5294},{ 121, 5633},
- { 123, 5968},{ 126, 6301},{ 131, 6637},{ 136, 6968},
- { 144, 7287},{ 152, 7606},{ 158, 7931},{ 162, 8262}
- }
- },
- {
- /*Cr qi=17 INTRA*/
- {
- { 4, 6},{ 55, 376},{ 97, 765},{ 128, 1226},
- { 152, 1696},{ 175, 2144},{ 204, 2568},{ 241, 2928},
- { 282, 3250},{ 323, 3530},{ 368, 3811},{ 420, 4089},
- { 463, 4347},{ 505, 4609},{ 562, 4860},{ 609, 5094},
- { 655, 5303},{ 709, 5535},{ 759, 5740},{ 803, 5913},
- { 844, 6153},{ 879, 6350},{ 905, 6527},{ 972, 6637}
- },
- /*Cr qi=17 INTER*/
- {
- { 88, 8},{ 68, 330},{ 51, 653},{ 54, 1028},
- { 65, 1433},{ 77, 1845},{ 89, 2257},{ 96, 2669},
- { 100, 3081},{ 102, 3481},{ 105, 3867},{ 106, 4245},
- { 108, 4613},{ 110, 4971},{ 112, 5328},{ 115, 5679},
- { 120, 6019},{ 127, 6355},{ 133, 6686},{ 140, 7007},
- { 149, 7316},{ 158, 7618},{ 166, 7924},{ 170, 8232}
+ { 5, 408},{ 3, 1197},{ 7, 1275},{ 16, 1695},
+ { 22, 1979},{ 30, 2324},{ 38, 2691},{ 47, 3071},
+ { 53, 3462},{ 59, 3857},{ 64, 4255},{ 69, 4612},
+ { 74, 4975},{ 76, 5347},{ 81, 5694},{ 86, 6020},
+ { 91, 6357},{ 96, 6687},{ 102, 7020},{ 108, 7351},
+ { 115, 7663},{ 122, 7979},{ 125, 8298},{ 136, 8576}
}
}
},
@@ -1174,557 +155,61 @@ oc_mode_rd OC_MODE_RD[64][3][2][OC_SAD_BINS]={
{
/*Y' qi=18 INTRA*/
{
- { 122, -58},{ 216, 1506},{ 425, 2815},{ 665, 3822},
- { 882, 4666},{ 1088, 5425},{ 1301, 6084},{ 1529, 6653},
- { 1766, 7162},{ 2026, 7611},{ 2312, 7987},{ 2612, 8278},
- { 2913, 8551},{ 3196, 8840},{ 3454, 9184},{ 3734, 9490},
- { 4030, 9725},{ 4305, 9973},{ 4585,10162},{ 4864,10251},
- { 5150,10324},{ 5443,10420},{ 5727,10536},{ 6053,10682}
+ { 83, 534},{ 261, 1697},{ 507, 2691},{ 852, 3418},
+ { 1127, 4094},{ 1378, 4775},{ 1626, 5442},{ 1905, 5975},
+ { 2164, 6468},{ 2445, 6913},{ 2704, 7301},{ 3001, 7631},
+ { 3285, 7934},{ 3536, 8217},{ 3837, 8489},{ 4076, 8814},
+ { 4325, 9046},{ 4590, 9313},{ 4794, 9546},{ 5062, 9751},
+ { 5285, 9963},{ 5578,10079},{ 5777,10302},{ 6054,10296}
},
/*Y' qi=18 INTER*/
{
- { 66, -143},{ 47, 1351},{ 108, 2886},{ 158, 4401},
- { 200, 5775},{ 232, 7044},{ 256, 8288},{ 292, 9493},
- { 351,10625},{ 434,11679},{ 541,12665},{ 681,13578},
- { 875,14379},{ 1136,15025},{ 1483,15475},{ 1914,15709},
- { 2399,15767},{ 2907,15699},{ 3400,15579},{ 3852,15453},
- { 4259,15332},{ 4630,15221},{ 4976,15121},{ 5294,15061}
+ { 33, 490},{ 62, 1599},{ 96, 3015},{ 164, 4378},
+ { 225, 5633},{ 285, 6831},{ 351, 7999},{ 427, 9133},
+ { 526,10181},{ 652,11141},{ 829,11991},{ 1049,12732},
+ { 1310,13367},{ 1592,13896},{ 1881,14350},{ 2207,14667},
+ { 2529,14877},{ 2873,14980},{ 3231,14949},{ 3571,14926},
+ { 3922,14816},{ 4246,14715},{ 4559,14579},{ 4778,14590}
}
},
{
/*Cb qi=18 INTRA*/
{
- { 2, 3},{ 61, 367},{ 107, 743},{ 131, 1182},
- { 155, 1648},{ 183, 2110},{ 220, 2542},{ 260, 2927},
- { 303, 3265},{ 359, 3540},{ 416, 3785},{ 462, 4063},
- { 506, 4334},{ 553, 4567},{ 595, 4797},{ 636, 5049},
- { 676, 5304},{ 717, 5516},{ 759, 5698},{ 801, 5904},
- { 861, 6133},{ 911, 6311},{ 962, 6443},{ 1021, 6645}
+ { 55, 825},{ 95, 1021},{ 131, 1276},{ 150, 1618},
+ { 180, 1958},{ 220, 2306},{ 256, 2608},{ 322, 2939},
+ { 385, 3239},{ 436, 3530},{ 475, 3771},{ 518, 4078},
+ { 557, 4348},{ 604, 4592},{ 620, 4851},{ 676, 5083},
+ { 704, 5363},{ 739, 5582},{ 788, 5782},{ 819, 6000},
+ { 893, 6158},{ 940, 6418},{ 984, 6499},{ 1035, 6596}
},
/*Cb qi=18 INTER*/
{
- { 126, 5},{ 95, 326},{ 66, 643},{ 55, 1015},
- { 60, 1427},{ 73, 1843},{ 87, 2256},{ 96, 2667},
- { 101, 3073},{ 104, 3470},{ 108, 3853},{ 111, 4226},
- { 114, 4584},{ 117, 4928},{ 119, 5274},{ 122, 5612},
- { 126, 5942},{ 130, 6271},{ 136, 6606},{ 141, 6931},
- { 148, 7247},{ 156, 7568},{ 164, 7891},{ 173, 8211}
+ { -2, 642},{ 12, 771},{ 20, 1054},{ 29, 1394},
+ { 35, 1721},{ 45, 2080},{ 53, 2450},{ 63, 2835},
+ { 73, 3225},{ 81, 3596},{ 87, 3952},{ 95, 4300},
+ { 102, 4634},{ 109, 4959},{ 115, 5283},{ 120, 5608},
+ { 130, 5931},{ 139, 6254},{ 152, 6571},{ 163, 6887},
+ { 179, 7204},{ 191, 7508},{ 198, 7834},{ 224, 8066}
}
},
{
/*Cr qi=18 INTRA*/
{
- { 4, 6},{ 59, 376},{ 104, 765},{ 133, 1226},
- { 156, 1692},{ 184, 2136},{ 218, 2548},{ 260, 2893},
- { 308, 3204},{ 348, 3481},{ 397, 3751},{ 448, 4024},
- { 490, 4281},{ 541, 4523},{ 593, 4776},{ 634, 5022},
- { 685, 5236},{ 748, 5455},{ 812, 5638},{ 856, 5818},
- { 891, 6048},{ 928, 6230},{ 961, 6405},{ 1055, 6449}
+ { 49, 780},{ 86, 986},{ 120, 1261},{ 137, 1588},
+ { 183, 1998},{ 228, 2339},{ 291, 2670},{ 334, 2938},
+ { 376, 3239},{ 412, 3522},{ 459, 3783},{ 490, 4113},
+ { 547, 4321},{ 593, 4571},{ 640, 4828},{ 675, 5137},
+ { 730, 5254},{ 774, 5524},{ 821, 5754},{ 859, 5911},
+ { 887, 6178},{ 982, 6266},{ 941, 6536},{ 996, 6630}
},
/*Cr qi=18 INTER*/
{
- { 81, 34},{ 68, 342},{ 57, 652},{ 59, 1027},
- { 67, 1439},{ 80, 1848},{ 91, 2257},{ 97, 2670},
- { 100, 3076},{ 103, 3473},{ 106, 3857},{ 108, 4231},
- { 109, 4599},{ 110, 4958},{ 113, 5307},{ 119, 5650},
- { 125, 5991},{ 130, 6325},{ 138, 6651},{ 147, 6971},
- { 153, 7278},{ 162, 7578},{ 172, 7874},{ 177, 8156}
- }
- }
- },
- {
- {
- /*Y' qi=19 INTRA*/
- {
- { 128, -55},{ 228, 1495},{ 448, 2775},{ 699, 3758},
- { 931, 4571},{ 1154, 5296},{ 1386, 5914},{ 1636, 6450},
- { 1894, 6930},{ 2177, 7342},{ 2479, 7698},{ 2792, 7976},
- { 3099, 8235},{ 3392, 8517},{ 3658, 8853},{ 3938, 9155},
- { 4242, 9371},{ 4527, 9605},{ 4810, 9781},{ 5089, 9853},
- { 5378, 9920},{ 5674,10009},{ 5972,10110},{ 6336,10196}
- },
- /*Y' qi=19 INTER*/
- {
- { 69, -147},{ 49, 1353},{ 111, 2883},{ 162, 4381},
- { 205, 5737},{ 237, 6996},{ 264, 8232},{ 307, 9421},
- { 376,10534},{ 472,11567},{ 596,12525},{ 761,13395},
- { 990,14130},{ 1298,14694},{ 1695,15053},{ 2172,15195},
- { 2696,15173},{ 3213,15075},{ 3696,14948},{ 4141,14829},
- { 4541,14721},{ 4910,14609},{ 5245,14506},{ 5536,14399}
- }
- },
- {
- /*Cb qi=19 INTRA*/
- {
- { 3, 3},{ 61, 367},{ 109, 743},{ 135, 1182},
- { 161, 1646},{ 191, 2101},{ 229, 2524},{ 273, 2898},
- { 318, 3221},{ 376, 3490},{ 436, 3731},{ 487, 3994},
- { 539, 4251},{ 584, 4485},{ 621, 4721},{ 664, 4967},
- { 709, 5225},{ 752, 5431},{ 801, 5595},{ 846, 5796},
- { 912, 6011},{ 959, 6193},{ 1015, 6321},{ 1121, 6504}
- },
- /*Cb qi=19 INTER*/
- {
- { 126, 4},{ 97, 329},{ 69, 649},{ 56, 1017},
- { 61, 1432},{ 74, 1846},{ 88, 2255},{ 98, 2663},
- { 103, 3065},{ 106, 3460},{ 110, 3844},{ 114, 4211},
- { 117, 4564},{ 120, 4911},{ 122, 5253},{ 125, 5588},
- { 129, 5916},{ 135, 6241},{ 142, 6567},{ 149, 6885},
- { 155, 7206},{ 163, 7527},{ 174, 7843},{ 188, 8145}
- }
- },
- {
- /*Cr qi=19 INTRA*/
- {
- { 5, 6},{ 61, 376},{ 106, 765},{ 135, 1225},
- { 160, 1689},{ 192, 2126},{ 229, 2531},{ 271, 2869},
- { 321, 3168},{ 370, 3433},{ 421, 3704},{ 476, 3965},
- { 520, 4212},{ 572, 4452},{ 629, 4691},{ 671, 4939},
- { 724, 5152},{ 792, 5347},{ 858, 5510},{ 895, 5696},
- { 939, 5905},{ 991, 6056},{ 1027, 6244},{ 1127, 6333}
- },
- /*Cr qi=19 INTER*/
- {
- { 80, 45},{ 66, 344},{ 55, 654},{ 56, 1030},
- { 66, 1440},{ 80, 1850},{ 91, 2259},{ 98, 2668},
- { 102, 3072},{ 104, 3466},{ 107, 3845},{ 109, 4215},
- { 110, 4578},{ 112, 4933},{ 116, 5283},{ 122, 5625},
- { 129, 5963},{ 136, 6287},{ 143, 6611},{ 151, 6927},
- { 160, 7229},{ 170, 7528},{ 181, 7818},{ 191, 8092}
- }
- }
- },
- {
- {
- /*Y' qi=20 INTRA*/
- {
- { 129, -50},{ 238, 1481},{ 469, 2728},{ 730, 3684},
- { 974, 4473},{ 1213, 5171},{ 1463, 5763},{ 1729, 6281},
- { 2002, 6744},{ 2299, 7146},{ 2613, 7492},{ 2940, 7746},
- { 3265, 7978},{ 3571, 8228},{ 3853, 8543},{ 4156, 8815},
- { 4476, 9001},{ 4775, 9218},{ 5070, 9373},{ 5352, 9446},
- { 5649, 9510},{ 5956, 9580},{ 6268, 9660},{ 6647, 9705}
- },
- /*Y' qi=20 INTER*/
- {
- { 64, -93},{ 52, 1340},{ 116, 2862},{ 170, 4344},
- { 216, 5678},{ 249, 6928},{ 281, 8155},{ 333, 9326},
- { 418,10410},{ 533,11411},{ 683,12329},{ 890,13127},
- { 1183,13750},{ 1579,14162},{ 2066,14357},{ 2611,14370},
- { 3159,14284},{ 3675,14167},{ 4142,14053},{ 4568,13953},
- { 4961,13852},{ 5320,13755},{ 5649,13675},{ 5933,13610}
- }
- },
- {
- /*Cb qi=20 INTRA*/
- {
- { 3, 3},{ 62, 367},{ 112, 743},{ 140, 1183},
- { 165, 1646},{ 196, 2099},{ 235, 2517},{ 284, 2883},
- { 334, 3198},{ 393, 3460},{ 457, 3690},{ 509, 3945},
- { 560, 4198},{ 605, 4435},{ 647, 4658},{ 699, 4888},
- { 742, 5155},{ 788, 5350},{ 835, 5517},{ 880, 5730},
- { 956, 5914},{ 1007, 6060},{ 1053, 6199},{ 1158, 6358}
- },
- /*Cb qi=20 INTER*/
- {
- { 128, -6},{ 96, 322},{ 66, 653},{ 54, 1025},
- { 63, 1431},{ 79, 1844},{ 91, 2256},{ 99, 2665},
- { 104, 3065},{ 107, 3455},{ 111, 3831},{ 115, 4189},
- { 120, 4539},{ 123, 4885},{ 126, 5219},{ 130, 5548},
- { 135, 5876},{ 141, 6199},{ 149, 6519},{ 156, 6837},
- { 166, 7153},{ 179, 7468},{ 189, 7784},{ 194, 8102}
- }
- },
- {
- /*Cr qi=20 INTRA*/
- {
- { 4, 6},{ 63, 376},{ 109, 765},{ 139, 1225},
- { 165, 1689},{ 199, 2124},{ 239, 2523},{ 285, 2852},
- { 340, 3140},{ 388, 3398},{ 438, 3662},{ 499, 3914},
- { 547, 4155},{ 596, 4392},{ 652, 4634},{ 699, 4877},
- { 759, 5074},{ 824, 5257},{ 883, 5428},{ 936, 5589},
- { 986, 5790},{ 1030, 5960},{ 1074, 6119},{ 1172, 6191}
- },
- /*Cr qi=20 INTER*/
- {
- { 92, 40},{ 70, 345},{ 55, 658},{ 57, 1034},
- { 69, 1441},{ 84, 1852},{ 94, 2261},{ 98, 2669},
- { 102, 3074},{ 105, 3465},{ 107, 3841},{ 110, 4206},
- { 112, 4562},{ 116, 4915},{ 121, 5260},{ 127, 5591},
- { 134, 5920},{ 142, 6246},{ 153, 6562},{ 163, 6870},
- { 173, 7170},{ 186, 7463},{ 198, 7746},{ 199, 8030}
- }
- }
- },
- {
- {
- /*Y' qi=21 INTRA*/
- {
- { 130, -51},{ 244, 1476},{ 483, 2705},{ 756, 3635},
- { 1013, 4396},{ 1266, 5070},{ 1530, 5647},{ 1806, 6153},
- { 2093, 6600},{ 2411, 6976},{ 2739, 7299},{ 3079, 7534},
- { 3422, 7744},{ 3738, 7987},{ 4032, 8274},{ 4348, 8533},
- { 4675, 8721},{ 4989, 8909},{ 5291, 9051},{ 5577, 9111},
- { 5879, 9163},{ 6190, 9228},{ 6506, 9286},{ 6899, 9295}
- },
- /*Y' qi=21 INTER*/
- {
- { 64, -56},{ 55, 1341},{ 119, 2859},{ 174, 4324},
- { 223, 5640},{ 258, 6880},{ 295, 8096},{ 359, 9246},
- { 460,10302},{ 595,11268},{ 778,12131},{ 1032,12857},
- { 1387,13385},{ 1850,13683},{ 2399,13774},{ 2976,13729},
- { 3527,13619},{ 4034,13504},{ 4492,13401},{ 4912,13291},
- { 5298,13209},{ 5648,13137},{ 5974,13046},{ 6308,12977}
- }
- },
- {
- /*Cb qi=21 INTRA*/
- {
- { 4, 3},{ 64, 367},{ 114, 743},{ 141, 1183},
- { 166, 1645},{ 201, 2092},{ 247, 2502},{ 299, 2856},
- { 352, 3158},{ 413, 3412},{ 480, 3642},{ 536, 3893},
- { 588, 4137},{ 637, 4367},{ 678, 4598},{ 725, 4834},
- { 774, 5083},{ 827, 5269},{ 883, 5420},{ 930, 5633},
- { 999, 5829},{ 1057, 5959},{ 1113, 6082},{ 1200, 6265}
- },
- /*Cb qi=21 INTER*/
- {
- { 109, -8},{ 84, 321},{ 62, 654},{ 54, 1028},
- { 64, 1434},{ 80, 1847},{ 92, 2259},{ 100, 2664},
- { 105, 3060},{ 109, 3445},{ 114, 3815},{ 118, 4172},
- { 122, 4519},{ 126, 4861},{ 128, 5194},{ 133, 5520},
- { 139, 5847},{ 146, 6169},{ 155, 6487},{ 166, 6801},
- { 177, 7114},{ 189, 7423},{ 201, 7729},{ 208, 8035}
- }
- },
- {
- /*Cr qi=21 INTRA*/
- {
- { 4, 6},{ 64, 377},{ 111, 766},{ 144, 1225},
- { 174, 1683},{ 206, 2114},{ 248, 2506},{ 302, 2824},
- { 357, 3099},{ 404, 3357},{ 455, 3622},{ 519, 3867},
- { 573, 4098},{ 625, 4331},{ 683, 4571},{ 733, 4802},
- { 793, 4994},{ 863, 5173},{ 926, 5337},{ 978, 5492},
- { 1030, 5685},{ 1079, 5856},{ 1126, 6027},{ 1217, 6159}
- },
- /*Cr qi=21 INTER*/
- {
- { 82, 29},{ 67, 341},{ 55, 660},{ 58, 1038},
- { 71, 1443},{ 85, 1851},{ 95, 2258},{ 99, 2666},
- { 103, 3069},{ 107, 3456},{ 110, 3826},{ 112, 4188},
- { 114, 4544},{ 118, 4891},{ 124, 5231},{ 132, 5567},
- { 139, 5894},{ 148, 6210},{ 159, 6520},{ 171, 6822},
- { 185, 7111},{ 196, 7403},{ 209, 7691},{ 225, 7945}
- }
- }
- },
- {
- {
- /*Y' qi=22 INTRA*/
- {
- { 128, -45},{ 254, 1463},{ 507, 2662},{ 794, 3562},
- { 1070, 4292},{ 1340, 4941},{ 1622, 5492},{ 1920, 5968},
- { 2229, 6387},{ 2565, 6742},{ 2911, 7047},{ 3263, 7264},
- { 3615, 7464},{ 3944, 7689},{ 4258, 7950},{ 4591, 8183},
- { 4934, 8347},{ 5259, 8517},{ 5573, 8634},{ 5870, 8683},
- { 6186, 8723},{ 6508, 8762},{ 6831, 8801},{ 7232, 8830}
- },
- /*Y' qi=22 INTER*/
- {
- { 77, -48},{ 57, 1343},{ 122, 2853},{ 180, 4299},
- { 231, 5597},{ 269, 6826},{ 314, 8025},{ 393, 9150},
- { 512,10179},{ 673,11103},{ 894,11908},{ 1207,12542},
- { 1635,12956},{ 2166,13148},{ 2755,13167},{ 3345,13088},
- { 3895,12966},{ 4386,12848},{ 4832,12746},{ 5252,12647},
- { 5634,12563},{ 5978,12497},{ 6299,12412},{ 6633,12338}
- }
- },
- {
- /*Cb qi=22 INTRA*/
- {
- { 4, 3},{ 66, 367},{ 122, 744},{ 153, 1182},
- { 177, 1640},{ 213, 2080},{ 263, 2475},{ 323, 2811},
- { 382, 3103},{ 451, 3346},{ 522, 3568},{ 581, 3814},
- { 633, 4054},{ 674, 4288},{ 719, 4523},{ 768, 4756},
- { 823, 4979},{ 883, 5162},{ 937, 5325},{ 996, 5510},
- { 1070, 5687},{ 1129, 5807},{ 1193, 5929},{ 1311, 6099}
- },
- /*Cb qi=22 INTER*/
- {
- { 107, -5},{ 83, 322},{ 61, 653},{ 55, 1030},
- { 66, 1436},{ 81, 1845},{ 94, 2253},{ 102, 2656},
- { 107, 3050},{ 111, 3435},{ 115, 3804},{ 119, 4158},
- { 124, 4501},{ 128, 4835},{ 132, 5164},{ 138, 5490},
- { 146, 5812},{ 154, 6128},{ 163, 6442},{ 174, 6754},
- { 188, 7060},{ 205, 7361},{ 219, 7662},{ 233, 7953}
- }
- },
- {
- /*Cr qi=22 INTRA*/
- {
- { 4, 6},{ 67, 378},{ 118, 767},{ 151, 1222},
- { 182, 1675},{ 221, 2097},{ 269, 2476},{ 329, 2774},
- { 389, 3039},{ 444, 3292},{ 500, 3545},{ 560, 3788},
- { 615, 4020},{ 671, 4251},{ 734, 4484},{ 781, 4712},
- { 850, 4887},{ 925, 5060},{ 981, 5229},{ 1031, 5369},
- { 1092, 5549},{ 1148, 5715},{ 1200, 5861},{ 1291, 5943}
- },
- /*Cr qi=22 INTER*/
- {
- { 88, 34},{ 69, 340},{ 57, 657},{ 60, 1039},
- { 73, 1445},{ 87, 1851},{ 96, 2257},{ 100, 2662},
- { 103, 3058},{ 107, 3442},{ 111, 3812},{ 115, 4172},
- { 118, 4524},{ 123, 4864},{ 129, 5199},{ 136, 5531},
- { 145, 5855},{ 156, 6168},{ 170, 6468},{ 184, 6765},
- { 193, 7066},{ 207, 7353},{ 222, 7628},{ 230, 7900}
- }
- }
- },
- {
- {
- /*Y' qi=23 INTRA*/
- {
- { 126, -40},{ 257, 1458},{ 521, 2636},{ 825, 3501},
- { 1111, 4207},{ 1391, 4842},{ 1684, 5385},{ 1992, 5858},
- { 2311, 6277},{ 2653, 6626},{ 3005, 6929},{ 3366, 7134},
- { 3729, 7311},{ 4071, 7526},{ 4396, 7770},{ 4734, 7986},
- { 5086, 8131},{ 5421, 8286},{ 5735, 8404},{ 6033, 8456},
- { 6357, 8486},{ 6682, 8525},{ 7003, 8573},{ 7387, 8604}
- },
- /*Y' qi=23 INTER*/
- {
- { 64, -57},{ 60, 1345},{ 124, 2853},{ 185, 4284},
- { 239, 5565},{ 282, 6783},{ 336, 7967},{ 429, 9069},
- { 568,10063},{ 758,10943},{ 1028,11679},{ 1407,12216},
- { 1909,12520},{ 2502,12616},{ 3126,12573},{ 3722,12461},
- { 4258,12344},{ 4742,12236},{ 5185,12136},{ 5590,12052},
- { 5970,11980},{ 6315,11901},{ 6631,11826},{ 6954,11769}
- }
- },
- {
- /*Cb qi=23 INTRA*/
- {
- { 3, 3},{ 70, 367},{ 124, 744},{ 151, 1182},
- { 181, 1637},{ 222, 2071},{ 276, 2460},{ 343, 2785},
- { 403, 3072},{ 468, 3317},{ 542, 3534},{ 605, 3773},
- { 659, 4009},{ 703, 4243},{ 747, 4479},{ 795, 4707},
- { 852, 4923},{ 908, 5105},{ 972, 5254},{ 1043, 5423},
- { 1118, 5594},{ 1172, 5731},{ 1240, 5853},{ 1365, 6005}
- },
- /*Cb qi=23 INTER*/
- {
- { 109, -10},{ 87, 325},{ 63, 650},{ 57, 1031},
- { 67, 1439},{ 83, 1847},{ 96, 2253},{ 103, 2652},
- { 109, 3041},{ 114, 3421},{ 117, 3789},{ 122, 4141},
- { 128, 4480},{ 134, 4811},{ 139, 5138},{ 144, 5463},
- { 152, 5781},{ 161, 6096},{ 174, 6404},{ 185, 6714},
- { 198, 7023},{ 216, 7320},{ 233, 7621},{ 245, 7935}
- }
- },
- {
- /*Cr qi=23 INTRA*/
- {
- { 5, 6},{ 70, 379},{ 122, 768},{ 155, 1222},
- { 187, 1671},{ 231, 2088},{ 283, 2459},{ 346, 2750},
- { 411, 3009},{ 465, 3261},{ 523, 3509},{ 585, 3746},
- { 639, 3980},{ 695, 4219},{ 754, 4449},{ 803, 4671},
- { 873, 4840},{ 953, 5001},{ 1015, 5156},{ 1071, 5286},
- { 1137, 5464},{ 1191, 5629},{ 1249, 5782},{ 1359, 5885}
- },
- /*Cr qi=23 INTER*/
- {
- { 84, 29},{ 69, 343},{ 58, 660},{ 62, 1041},
- { 75, 1448},{ 88, 1853},{ 97, 2258},{ 102, 2659},
- { 105, 3050},{ 108, 3430},{ 113, 3799},{ 116, 4155},
- { 121, 4505},{ 126, 4845},{ 132, 5176},{ 142, 5504},
- { 153, 5826},{ 165, 6133},{ 180, 6432},{ 197, 6722},
- { 212, 7005},{ 226, 7287},{ 244, 7555},{ 258, 7828}
- }
- }
- },
- {
- {
- /*Y' qi=24 INTRA*/
- {
- { 125, -34},{ 268, 1444},{ 547, 2590},{ 866, 3422},
- { 1172, 4098},{ 1476, 4702},{ 1790, 5222},{ 2117, 5678},
- { 2453, 6080},{ 2811, 6418},{ 3178, 6700},{ 3552, 6895},
- { 3928, 7055},{ 4286, 7243},{ 4627, 7477},{ 4981, 7674},
- { 5344, 7802},{ 5683, 7944},{ 6009, 8043},{ 6313, 8082},
- { 6633, 8111},{ 6959, 8151},{ 7280, 8197},{ 7660, 8221}
- },
- /*Y' qi=24 INTER*/
- {
- { 62, -63},{ 68, 1345},{ 134, 2840},{ 199, 4245},
- { 256, 5508},{ 304, 6715},{ 371, 7880},{ 484, 8950},
- { 652, 9899},{ 892,10709},{ 1238,11334},{ 1722,11722},
- { 2326,11875},{ 2983,11864},{ 3616,11783},{ 4189,11678},
- { 4707,11570},{ 5178,11476},{ 5617,11395},{ 6017,11319},
- { 6380,11252},{ 6720,11185},{ 7044,11126},{ 7377,11118}
- }
- },
- {
- /*Cb qi=24 INTRA*/
- {
- { 4, 3},{ 75, 367},{ 132, 745},{ 159, 1182},
- { 187, 1634},{ 230, 2061},{ 289, 2439},{ 361, 2753},
- { 425, 3034},{ 492, 3278},{ 566, 3490},{ 630, 3720},
- { 686, 3956},{ 732, 4190},{ 777, 4420},{ 829, 4637},
- { 894, 4840},{ 958, 5012},{ 1023, 5155},{ 1090, 5326},
- { 1165, 5502},{ 1226, 5622},{ 1299, 5717},{ 1408, 5887}
- },
- /*Cb qi=24 INTER*/
- {
- { 110, 35},{ 92, 337},{ 70, 651},{ 63, 1033},
- { 74, 1440},{ 91, 1846},{ 102, 2248},{ 109, 2644},
- { 114, 3031},{ 120, 3404},{ 127, 3762},{ 133, 4109},
- { 138, 4445},{ 144, 4772},{ 151, 5094},{ 159, 5411},
- { 168, 5728},{ 180, 6037},{ 195, 6338},{ 210, 6640},
- { 227, 6944},{ 249, 7236},{ 272, 7528},{ 299, 7809}
- }
- },
- {
- /*Cr qi=24 INTRA*/
- {
- { 5, 6},{ 72, 380},{ 124, 770},{ 158, 1222},
- { 195, 1668},{ 240, 2079},{ 297, 2438},{ 367, 2715},
- { 433, 2966},{ 488, 3218},{ 549, 3467},{ 609, 3701},
- { 664, 3935},{ 728, 4165},{ 792, 4379},{ 845, 4586},
- { 917, 4744},{ 995, 4898},{ 1063, 5049},{ 1120, 5187},
- { 1190, 5359},{ 1249, 5522},{ 1304, 5672},{ 1397, 5806}
- },
- /*Cr qi=24 INTER*/
- {
- { 91, 56},{ 73, 353},{ 61, 664},{ 66, 1045},
- { 80, 1449},{ 95, 1851},{ 103, 2250},{ 107, 2648},
- { 111, 3038},{ 116, 3413},{ 120, 3774},{ 124, 4128},
- { 130, 4471},{ 138, 4802},{ 145, 5130},{ 156, 5453},
- { 171, 5764},{ 187, 6061},{ 204, 6355},{ 220, 6643},
- { 238, 6923},{ 254, 7204},{ 275, 7475},{ 289, 7752}
- }
- }
- },
- {
- {
- /*Y' qi=25 INTRA*/
- {
- { 125, -28},{ 285, 1426},{ 582, 2540},{ 917, 3351},
- { 1244, 3997},{ 1569, 4570},{ 1903, 5071},{ 2258, 5498},
- { 2626, 5866},{ 3002, 6182},{ 3382, 6448},{ 3770, 6623},
- { 4162, 6760},{ 4528, 6934},{ 4882, 7144},{ 5249, 7328},
- { 5610, 7453},{ 5958, 7578},{ 6291, 7672},{ 6597, 7708},
- { 6928, 7715},{ 7258, 7737},{ 7575, 7781},{ 7950, 7829}
- },
- /*Y' qi=25 INTER*/
- {
- { 64, -16},{ 72, 1348},{ 139, 2832},{ 206, 4218},
- { 268, 5465},{ 322, 6659},{ 403, 7803},{ 540, 8838},
- { 747, 9734},{ 1044,10465},{ 1473,10981},{ 2048,11249},
- { 2717,11311},{ 3397,11257},{ 4025,11161},{ 4589,11052},
- { 5099,10947},{ 5560,10859},{ 5989,10786},{ 6389,10717},
- { 6753,10652},{ 7078,10592},{ 7389,10535},{ 7697,10460}
- }
- },
- {
- /*Cb qi=25 INTRA*/
- {
- { 3, 3},{ 78, 368},{ 133, 745},{ 159, 1180},
- { 193, 1627},{ 242, 2046},{ 304, 2411},{ 381, 2714},
- { 456, 2983},{ 527, 3224},{ 598, 3437},{ 667, 3655},
- { 726, 3888},{ 776, 4117},{ 826, 4333},{ 883, 4543},
- { 954, 4727},{ 1019, 4878},{ 1095, 5014},{ 1171, 5187},
- { 1255, 5342},{ 1319, 5458},{ 1396, 5546},{ 1536, 5678}
- },
- /*Cb qi=25 INTER*/
- {
- { 117, 32},{ 89, 342},{ 67, 660},{ 64, 1037},
- { 77, 1441},{ 93, 1845},{ 105, 2243},{ 113, 2633},
- { 120, 3016},{ 125, 3387},{ 131, 3739},{ 137, 4080},
- { 144, 4416},{ 152, 4741},{ 160, 5057},{ 169, 5369},
- { 180, 5680},{ 193, 5990},{ 209, 6294},{ 227, 6594},
- { 249, 6888},{ 269, 7180},{ 294, 7467},{ 317, 7768}
- }
- },
- {
- /*Cr qi=25 INTRA*/
- {
- { 6, 6},{ 74, 380},{ 129, 770},{ 165, 1220},
- { 201, 1658},{ 253, 2061},{ 315, 2410},{ 388, 2676},
- { 462, 2920},{ 523, 3166},{ 584, 3404},{ 647, 3637},
- { 701, 3870},{ 769, 4086},{ 838, 4296},{ 898, 4491},
- { 980, 4627},{ 1065, 4759},{ 1126, 4920},{ 1187, 5058},
- { 1283, 5180},{ 1347, 5332},{ 1404, 5475},{ 1527, 5534}
- },
- /*Cr qi=25 INTER*/
- {
- { 92, 41},{ 75, 347},{ 64, 664},{ 70, 1045},
- { 85, 1448},{ 98, 1849},{ 105, 2245},{ 110, 2637},
- { 115, 3023},{ 120, 3395},{ 126, 3753},{ 131, 4102},
- { 136, 4439},{ 145, 4768},{ 156, 5094},{ 168, 5410},
- { 184, 5717},{ 203, 6010},{ 221, 6300},{ 239, 6577},
- { 262, 6847},{ 282, 7123},{ 303, 7390},{ 322, 7665}
- }
- }
- },
- {
- {
- /*Y' qi=26 INTRA*/
- {
- { 130, -24},{ 292, 1423},{ 594, 2525},{ 943, 3307},
- { 1289, 3921},{ 1633, 4467},{ 1991, 4943},{ 2368, 5348},
- { 2753, 5696},{ 3148, 5991},{ 3545, 6247},{ 3942, 6415},
- { 4342, 6535},{ 4726, 6690},{ 5093, 6883},{ 5466, 7047},
- { 5840, 7159},{ 6202, 7274},{ 6545, 7351},{ 6855, 7375},
- { 7186, 7384},{ 7517, 7416},{ 7840, 7447},{ 8238, 7450}
- },
- /*Y' qi=26 INTER*/
- {
- { 52, 16},{ 75, 1336},{ 143, 2815},{ 213, 4191},
- { 278, 5427},{ 339, 6611},{ 436, 7734},{ 600, 8732},
- { 843, 9579},{ 1195,10243},{ 1702,10660},{ 2355,10825},
- { 3070,10820},{ 3755,10743},{ 4372,10643},{ 4925,10538},
- { 5426,10440},{ 5882,10354},{ 6296,10290},{ 6686,10224},
- { 7049,10163},{ 7380,10113},{ 7672,10062},{ 7937,10021}
- }
- },
- {
- /*Cb qi=26 INTRA*/
- {
- { 4, 3},{ 79, 368},{ 138, 745},{ 167, 1180},
- { 200, 1623},{ 252, 2034},{ 322, 2389},{ 403, 2682},
- { 480, 2941},{ 558, 3176},{ 631, 3393},{ 700, 3608},
- { 766, 3825},{ 819, 4046},{ 868, 4265},{ 926, 4472},
- { 1002, 4645},{ 1070, 4800},{ 1151, 4924},{ 1242, 5063},
- { 1325, 5221},{ 1393, 5338},{ 1464, 5431},{ 1595, 5559}
- },
- /*Cb qi=26 INTER*/
- {
- { 98, 33},{ 83, 343},{ 65, 662},{ 65, 1037},
- { 80, 1437},{ 96, 1839},{ 107, 2238},{ 115, 2628},
- { 122, 3007},{ 128, 3373},{ 134, 3722},{ 142, 4060},
- { 149, 4390},{ 158, 4713},{ 167, 5029},{ 178, 5341},
- { 191, 5647},{ 208, 5948},{ 227, 6244},{ 247, 6539},
- { 269, 6833},{ 295, 7114},{ 328, 7388},{ 369, 7658}
- }
- },
- {
- /*Cr qi=26 INTRA*/
- {
- { 5, 6},{ 75, 380},{ 133, 769},{ 172, 1217},
- { 212, 1652},{ 266, 2048},{ 333, 2384},{ 412, 2643},
- { 490, 2880},{ 552, 3124},{ 616, 3365},{ 681, 3594},
- { 739, 3816},{ 810, 4024},{ 880, 4224},{ 945, 4405},
- { 1029, 4538},{ 1114, 4674},{ 1183, 4822},{ 1254, 4946},
- { 1346, 5063},{ 1417, 5201},{ 1478, 5345},{ 1597, 5411}
- },
- /*Cr qi=26 INTER*/
- {
- { 97, 29},{ 75, 342},{ 62, 667},{ 70, 1047},
- { 87, 1447},{ 100, 1846},{ 107, 2242},{ 113, 2633},
- { 118, 3016},{ 123, 3382},{ 128, 3737},{ 135, 4082},
- { 142, 4417},{ 151, 4746},{ 162, 5066},{ 176, 5377},
- { 194, 5679},{ 217, 5963},{ 239, 6244},{ 260, 6522},
- { 284, 6789},{ 309, 7052},{ 335, 7313},{ 355, 7582}
+ { 0, 741},{ 9, 743},{ 16, 1034},{ 26, 1385},
+ { 39, 1741},{ 48, 2090},{ 56, 2459},{ 64, 2850},
+ { 72, 3242},{ 81, 3622},{ 89, 3980},{ 98, 4323},
+ { 104, 4667},{ 110, 5005},{ 118, 5337},{ 126, 5675},
+ { 137, 5998},{ 146, 6311},{ 156, 6621},{ 170, 6914},
+ { 181, 7205},{ 196, 7490},{ 203, 7779},{ 232, 8012}
}
}
},
@@ -1732,557 +217,61 @@ oc_mode_rd OC_MODE_RD[64][3][2][OC_SAD_BINS]={
{
/*Y' qi=27 INTRA*/
{
- { 118, -10},{ 308, 1404},{ 630, 2473},{ 997, 3227},
- { 1360, 3819},{ 1719, 4354},{ 2086, 4829},{ 2470, 5233},
- { 2863, 5576},{ 3267, 5870},{ 3677, 6117},{ 4085, 6268},
- { 4499, 6376},{ 4888, 6521},{ 5257, 6705},{ 5638, 6865},
- { 6020, 6962},{ 6394, 7056},{ 6744, 7130},{ 7051, 7158},
- { 7386, 7164},{ 7717, 7185},{ 8042, 7209},{ 8444, 7206}
+ { 121, 378},{ 379, 1464},{ 810, 2335},{ 1447, 2725},
+ { 1851, 3194},{ 2311, 3655},{ 2747, 4081},{ 3211, 4393},
+ { 3640, 4672},{ 4056, 4933},{ 4427, 5150},{ 4842, 5259},
+ { 5220, 5381},{ 5584, 5443},{ 5925, 5648},{ 6233, 5783},
+ { 6547, 5944},{ 6905, 6056},{ 7203, 6181},{ 7526, 6207},
+ { 7800, 6330},{ 8175, 6312},{ 8415, 6437},{ 8705, 6459}
},
/*Y' qi=27 INTER*/
{
- { 54, 19},{ 77, 1333},{ 147, 2806},{ 221, 4166},
- { 290, 5390},{ 360, 6564},{ 474, 7665},{ 664, 8630},
- { 949, 9423},{ 1370,10002},{ 1958,10323},{ 2670,10414},
- { 3406,10375},{ 4086,10285},{ 4691,10182},{ 5233,10085},
- { 5724, 9994},{ 6169, 9918},{ 6582, 9863},{ 6962, 9813},
- { 7316, 9759},{ 7645, 9707},{ 7948, 9660},{ 8262, 9623}
+ { 48, 199},{ 90, 1458},{ 167, 2824},{ 291, 4050},
+ { 434, 5144},{ 638, 6133},{ 901, 7011},{ 1249, 7743},
+ { 1726, 8280},{ 2317, 8616},{ 2957, 8789},{ 3561, 8896},
+ { 4126, 8936},{ 4646, 8933},{ 5115, 8931},{ 5579, 8890},
+ { 6008, 8804},{ 6411, 8744},{ 6774, 8646},{ 7153, 8549},
+ { 7475, 8462},{ 7790, 8372},{ 8069, 8280},{ 8299, 8278}
}
},
{
/*Cb qi=27 INTRA*/
{
- { 4, 3},{ 79, 368},{ 137, 745},{ 166, 1180},
- { 200, 1622},{ 253, 2030},{ 324, 2381},{ 407, 2671},
- { 487, 2925},{ 567, 3156},{ 640, 3372},{ 712, 3580},
- { 782, 3792},{ 833, 4015},{ 887, 4227},{ 954, 4422},
- { 1031, 4592},{ 1103, 4738},{ 1187, 4856},{ 1280, 4990},
- { 1371, 5135},{ 1442, 5244},{ 1520, 5321},{ 1684, 5398}
+ { 75, 612},{ 117, 751},{ 160, 1068},{ 195, 1406},
+ { 240, 1741},{ 305, 2066},{ 364, 2359},{ 454, 2639},
+ { 538, 2899},{ 609, 3149},{ 664, 3384},{ 730, 3625},
+ { 785, 3860},{ 836, 4094},{ 872, 4312},{ 948, 4507},
+ { 1023, 4677},{ 1081, 4843},{ 1165, 4985},{ 1238, 5092},
+ { 1316, 5235},{ 1418, 5345},{ 1430, 5478},{ 1505, 5538}
},
/*Cb qi=27 INTER*/
{
- { 113, 20},{ 90, 338},{ 66, 661},{ 67, 1034},
- { 82, 1438},{ 97, 1842},{ 108, 2238},{ 115, 2624},
- { 123, 3000},{ 130, 3361},{ 138, 3708},{ 146, 4040},
- { 155, 4367},{ 164, 4688},{ 174, 4999},{ 186, 5306},
- { 203, 5609},{ 222, 5908},{ 243, 6202},{ 268, 6494},
- { 295, 6781},{ 326, 7058},{ 367, 7319},{ 420, 7551}
+ { 16, 637},{ 13, 634},{ 32, 869},{ 46, 1230},
+ { 55, 1583},{ 67, 1950},{ 79, 2320},{ 93, 2690},
+ { 107, 3052},{ 120, 3399},{ 133, 3733},{ 146, 4054},
+ { 162, 4367},{ 175, 4679},{ 191, 4984},{ 211, 5285},
+ { 232, 5581},{ 252, 5875},{ 276, 6155},{ 305, 6433},
+ { 333, 6706},{ 364, 6967},{ 398, 7244},{ 474, 7394}
}
},
{
/*Cr qi=27 INTRA*/
{
- { 5, 6},{ 75, 380},{ 133, 770},{ 173, 1217},
- { 214, 1650},{ 268, 2040},{ 337, 2375},{ 418, 2631},
- { 496, 2862},{ 558, 3104},{ 625, 3346},{ 692, 3571},
- { 753, 3786},{ 825, 3989},{ 896, 4182},{ 969, 4352},
- { 1059, 4479},{ 1144, 4614},{ 1212, 4757},{ 1284, 4871},
- { 1380, 4982},{ 1457, 5125},{ 1528, 5267},{ 1651, 5346}
+ { 64, 632},{ 107, 763},{ 147, 1054},{ 176, 1411},
+ { 255, 1770},{ 324, 2079},{ 411, 2359},{ 475, 2621},
+ { 545, 2880},{ 590, 3158},{ 647, 3425},{ 709, 3648},
+ { 766, 3878},{ 831, 4082},{ 911, 4260},{ 960, 4493},
+ { 1042, 4558},{ 1115, 4760},{ 1200, 4852},{ 1280, 4950},
+ { 1327, 5186},{ 1445, 5157},{ 1443, 5431},{ 1518, 5493}
},
/*Cr qi=27 INTER*/
{
- { 92, 24},{ 74, 341},{ 61, 669},{ 71, 1049},
- { 88, 1448},{ 100, 1849},{ 107, 2243},{ 113, 2631},
- { 119, 3010},{ 125, 3373},{ 131, 3723},{ 137, 4064},
- { 146, 4396},{ 159, 4720},{ 172, 5033},{ 189, 5340},
- { 210, 5636},{ 233, 5920},{ 256, 6197},{ 282, 6465},
- { 310, 6730},{ 332, 7000},{ 359, 7259},{ 385, 7515}
- }
- }
- },
- {
- {
- /*Y' qi=28 INTRA*/
- {
- { 116, -8},{ 314, 1400},{ 640, 2458},{ 1013, 3197},
- { 1386, 3768},{ 1762, 4279},{ 2151, 4733},{ 2558, 5117},
- { 2970, 5442},{ 3393, 5714},{ 3820, 5935},{ 4243, 6069},
- { 4671, 6161},{ 5074, 6289},{ 5456, 6457},{ 5849, 6598},
- { 6244, 6689},{ 6632, 6777},{ 6984, 6833},{ 7294, 6855},
- { 7625, 6862},{ 7961, 6875},{ 8302, 6890},{ 8720, 6883}
- },
- /*Y' qi=28 INTER*/
- {
- { 54, 8},{ 81, 1333},{ 154, 2793},{ 231, 4138},
- { 304, 5352},{ 384, 6512},{ 519, 7585},{ 743, 8508},
- { 1082, 9236},{ 1587, 9717},{ 2267, 9928},{ 3034, 9944},
- { 3775, 9878},{ 4438, 9786},{ 5031, 9686},{ 5563, 9601},
- { 6042, 9523},{ 6481, 9456},{ 6890, 9405},{ 7266, 9356},
- { 7614, 9313},{ 7933, 9265},{ 8238, 9220},{ 8545, 9193}
- }
- },
- {
- /*Cb qi=28 INTRA*/
- {
- { 3, 3},{ 80, 368},{ 138, 746},{ 168, 1179},
- { 208, 1615},{ 268, 2014},{ 345, 2354},{ 432, 2637},
- { 515, 2884},{ 595, 3108},{ 669, 3323},{ 745, 3533},
- { 818, 3740},{ 876, 3953},{ 932, 4160},{ 1003, 4349},
- { 1088, 4501},{ 1154, 4648},{ 1241, 4768},{ 1349, 4889},
- { 1441, 5023},{ 1524, 5113},{ 1611, 5187},{ 1783, 5283}
- },
- /*Cb qi=28 INTER*/
- {
- { 117, 29},{ 91, 341},{ 65, 663},{ 68, 1038},
- { 85, 1440},{ 100, 1841},{ 110, 2234},{ 119, 2616},
- { 127, 2985},{ 135, 3342},{ 142, 3685},{ 151, 4015},
- { 162, 4337},{ 174, 4652},{ 186, 4960},{ 201, 5264},
- { 218, 5567},{ 239, 5863},{ 266, 6149},{ 295, 6434},
- { 328, 6715},{ 371, 6976},{ 409, 7239},{ 460, 7477}
- }
- },
- {
- /*Cr qi=28 INTRA*/
- {
- { 6, 7},{ 79, 381},{ 138, 771},{ 178, 1215},
- { 222, 1644},{ 285, 2026},{ 359, 2347},{ 441, 2597},
- { 521, 2827},{ 588, 3066},{ 655, 3303},{ 725, 3523},
- { 791, 3728},{ 870, 3920},{ 950, 4103},{ 1030, 4265},
- { 1121, 4388},{ 1198, 4520},{ 1266, 4659},{ 1356, 4759},
- { 1461, 4865},{ 1540, 4993},{ 1619, 5115},{ 1786, 5160}
- },
- /*Cr qi=28 INTER*/
- {
- { 96, 18},{ 78, 340},{ 66, 672},{ 74, 1051},
- { 90, 1450},{ 103, 1845},{ 110, 2235},{ 116, 2619},
- { 122, 2995},{ 129, 3356},{ 137, 3702},{ 146, 4038},
- { 156, 4365},{ 168, 4684},{ 182, 4995},{ 203, 5297},
- { 227, 5588},{ 253, 5866},{ 282, 6131},{ 311, 6394},
- { 339, 6664},{ 366, 6918},{ 400, 7171},{ 424, 7450}
- }
- }
- },
- {
- {
- /*Y' qi=29 INTRA*/
- {
- { 112, 7},{ 334, 1382},{ 681, 2410},{ 1081, 3112},
- { 1484, 3650},{ 1894, 4128},{ 2316, 4547},{ 2749, 4905},
- { 3188, 5208},{ 3634, 5458},{ 4079, 5666},{ 4517, 5791},
- { 4952, 5870},{ 5359, 5983},{ 5754, 6137},{ 6165, 6268},
- { 6568, 6351},{ 6958, 6423},{ 7320, 6471},{ 7638, 6490},
- { 7979, 6490},{ 8313, 6499},{ 8651, 6517},{ 9085, 6499}
- },
- /*Y' qi=29 INTER*/
- {
- { 55, 15},{ 85, 1336},{ 160, 2780},{ 242, 4104},
- { 323, 5302},{ 418, 6443},{ 586, 7480},{ 859, 8342},
- { 1278, 8982},{ 1888, 9347},{ 2658, 9457},{ 3457, 9425},
- { 4192, 9343},{ 4842, 9247},{ 5417, 9162},{ 5935, 9086},
- { 6404, 9011},{ 6841, 8952},{ 7241, 8907},{ 7609, 8867},
- { 7953, 8832},{ 8267, 8792},{ 8562, 8740},{ 8836, 8701}
- }
- },
- {
- /*Cb qi=29 INTRA*/
- {
- { 5, 3},{ 84, 368},{ 144, 746},{ 176, 1175},
- { 219, 1604},{ 285, 1991},{ 372, 2318},{ 462, 2591},
- { 546, 2833},{ 628, 3058},{ 704, 3274},{ 788, 3473},
- { 870, 3664},{ 935, 3865},{ 995, 4059},{ 1072, 4239},
- { 1167, 4388},{ 1248, 4518},{ 1334, 4634},{ 1429, 4765},
- { 1536, 4884},{ 1628, 4964},{ 1716, 5038},{ 1885, 5128}
- },
- /*Cb qi=29 INTER*/
- {
- { 126, 25},{ 95, 340},{ 69, 662},{ 71, 1039},
- { 88, 1440},{ 102, 1839},{ 113, 2227},{ 122, 2604},
- { 132, 2969},{ 141, 3320},{ 151, 3659},{ 161, 3985},
- { 172, 4301},{ 186, 4612},{ 200, 4917},{ 219, 5213},
- { 241, 5509},{ 265, 5800},{ 296, 6081},{ 329, 6360},
- { 369, 6633},{ 414, 6899},{ 465, 7148},{ 520, 7387}
- }
- },
- {
- /*Cr qi=29 INTRA*/
- {
- { 6, 7},{ 82, 382},{ 142, 772},{ 185, 1211},
- { 233, 1632},{ 303, 2000},{ 388, 2306},{ 475, 2550},
- { 556, 2779},{ 627, 3007},{ 707, 3237},{ 778, 3459},
- { 843, 3654},{ 927, 3834},{ 1012, 4012},{ 1101, 4152},
- { 1197, 4262},{ 1275, 4399},{ 1359, 4511},{ 1455, 4596},
- { 1562, 4708},{ 1644, 4833},{ 1719, 4954},{ 1888, 4988}
- },
- /*Cr qi=29 INTER*/
- {
- { 101, 28},{ 81, 343},{ 67, 673},{ 75, 1053},
- { 93, 1450},{ 106, 1844},{ 113, 2230},{ 119, 2610},
- { 127, 2980},{ 135, 3334},{ 143, 3676},{ 153, 4007},
- { 165, 4330},{ 180, 4645},{ 201, 4951},{ 224, 5243},
- { 253, 5522},{ 284, 5794},{ 314, 6060},{ 345, 6322},
- { 381, 6578},{ 419, 6828},{ 455, 7073},{ 495, 7316}
- }
- }
- },
- {
- {
- /*Y' qi=30 INTRA*/
- {
- { 112, 8},{ 335, 1380},{ 682, 2401},{ 1083, 3093},
- { 1489, 3619},{ 1902, 4092},{ 2332, 4511},{ 2777, 4865},
- { 3231, 5156},{ 3693, 5394},{ 4153, 5585},{ 4605, 5689},
- { 5049, 5764},{ 5468, 5871},{ 5875, 6004},{ 6295, 6120},
- { 6706, 6201},{ 7099, 6273},{ 7461, 6311},{ 7785, 6320},
- { 8128, 6322},{ 8469, 6331},{ 8806, 6342},{ 9220, 6338}
- },
- /*Y' qi=30 INTER*/
- {
- { 58, 8},{ 90, 1340},{ 169, 2771},{ 257, 4079},
- { 345, 5266},{ 459, 6387},{ 660, 7383},{ 990, 8180},
- { 1496, 8726},{ 2203, 8992},{ 3029, 9038},{ 3833, 8984},
- { 4549, 8900},{ 5183, 8813},{ 5745, 8735},{ 6250, 8674},
- { 6715, 8619},{ 7138, 8565},{ 7529, 8528},{ 7899, 8495},
- { 8234, 8465},{ 8550, 8429},{ 8856, 8395},{ 9160, 8374}
- }
- },
- {
- /*Cb qi=30 INTRA*/
- {
- { 7, 3},{ 88, 369},{ 149, 747},{ 185, 1175},
- { 232, 1599},{ 304, 1976},{ 392, 2293},{ 486, 2557},
- { 573, 2797},{ 656, 3027},{ 735, 3243},{ 819, 3442},
- { 903, 3629},{ 966, 3828},{ 1025, 4027},{ 1105, 4204},
- { 1201, 4343},{ 1282, 4469},{ 1379, 4575},{ 1486, 4689},
- { 1588, 4813},{ 1678, 4900},{ 1767, 4969},{ 1911, 5080}
- },
- /*Cb qi=30 INTER*/
- {
- { 120, 23},{ 96, 336},{ 72, 661},{ 75, 1043},
- { 91, 1441},{ 105, 1837},{ 117, 2221},{ 127, 2592},
- { 137, 2953},{ 148, 3301},{ 159, 3635},{ 170, 3959},
- { 184, 4271},{ 199, 4578},{ 216, 4879},{ 238, 5175},
- { 262, 5466},{ 294, 5750},{ 332, 6027},{ 373, 6298},
- { 421, 6559},{ 473, 6805},{ 526, 7053},{ 587, 7298}
- }
- },
- {
- /*Cr qi=30 INTRA*/
- {
- { 10, 7},{ 89, 384},{ 147, 773},{ 192, 1211},
- { 245, 1627},{ 322, 1984},{ 412, 2280},{ 501, 2520},
- { 583, 2750},{ 654, 2982},{ 736, 3207},{ 810, 3419},
- { 873, 3614},{ 957, 3794},{ 1048, 3965},{ 1139, 4102},
- { 1237, 4208},{ 1327, 4328},{ 1408, 4448},{ 1496, 4545},
- { 1604, 4652},{ 1699, 4760},{ 1780, 4877},{ 1937, 4942}
- },
- /*Cr qi=30 INTER*/
- {
- { 115, 26},{ 89, 342},{ 70, 672},{ 79, 1055},
- { 96, 1451},{ 108, 1841},{ 116, 2222},{ 124, 2599},
- { 132, 2965},{ 141, 3316},{ 151, 3655},{ 163, 3984},
- { 178, 4301},{ 197, 4609},{ 219, 4909},{ 247, 5195},
- { 280, 5469},{ 317, 5734},{ 351, 5991},{ 383, 6248},
- { 423, 6500},{ 467, 6744},{ 502, 6995},{ 558, 7226}
- }
- }
- },
- {
- {
- /*Y' qi=31 INTRA*/
- {
- { 116, 20},{ 359, 1361},{ 732, 2350},{ 1162, 3010},
- { 1597, 3507},{ 2042, 3950},{ 2503, 4339},{ 2974, 4670},
- { 3446, 4951},{ 3922, 5179},{ 4394, 5357},{ 4858, 5454},
- { 5313, 5519},{ 5734, 5626},{ 6154, 5755},{ 6585, 5859},
- { 7004, 5928},{ 7408, 5998},{ 7775, 6039},{ 8102, 6048},
- { 8442, 6051},{ 8790, 6054},{ 9136, 6057},{ 9554, 6041}
- },
- /*Y' qi=31 INTER*/
- {
- { 53, 12},{ 90, 1340},{ 169, 2765},{ 259, 4062},
- { 353, 5236},{ 483, 6340},{ 713, 7305},{ 1086, 8059},
- { 1651, 8548},{ 2423, 8751},{ 3288, 8754},{ 4106, 8674},
- { 4827, 8572},{ 5451, 8482},{ 6007, 8407},{ 6514, 8344},
- { 6970, 8282},{ 7397, 8225},{ 7795, 8193},{ 8159, 8161},
- { 8498, 8120},{ 8814, 8093},{ 9127, 8066},{ 9432, 8040}
- }
- },
- {
- /*Cb qi=31 INTRA*/
- {
- { 7, 3},{ 88, 369},{ 149, 746},{ 185, 1173},
- { 234, 1595},{ 308, 1967},{ 399, 2278},{ 494, 2537},
- { 583, 2774},{ 669, 2997},{ 755, 3204},{ 847, 3390},
- { 936, 3569},{ 1008, 3759},{ 1078, 3942},{ 1162, 4104},
- { 1262, 4238},{ 1352, 4364},{ 1442, 4470},{ 1557, 4567},
- { 1676, 4674},{ 1759, 4781},{ 1850, 4853},{ 2043, 4897}
- },
- /*Cb qi=31 INTER*/
- {
- { 121, 23},{ 96, 335},{ 72, 660},{ 74, 1043},
- { 90, 1440},{ 105, 1834},{ 116, 2217},{ 127, 2586},
- { 138, 2945},{ 148, 3293},{ 159, 3626},{ 172, 3945},
- { 185, 4256},{ 202, 4559},{ 223, 4856},{ 245, 5150},
- { 272, 5440},{ 306, 5719},{ 346, 5989},{ 391, 6253},
- { 443, 6511},{ 510, 6743},{ 583, 6965},{ 651, 7182}
- }
- },
- {
- /*Cr qi=31 INTRA*/
- {
- { 10, 7},{ 88, 384},{ 147, 773},{ 192, 1209},
- { 247, 1622},{ 326, 1974},{ 417, 2262},{ 509, 2500},
- { 596, 2726},{ 670, 2949},{ 754, 3170},{ 836, 3370},
- { 912, 3548},{ 999, 3724},{ 1093, 3888},{ 1198, 4000},
- { 1304, 4095},{ 1384, 4230},{ 1470, 4347},{ 1577, 4422},
- { 1696, 4513},{ 1798, 4620},{ 1869, 4746},{ 1991, 4798}
- },
- /*Cr qi=31 INTER*/
- {
- { 113, 32},{ 88, 345},{ 69, 674},{ 79, 1055},
- { 96, 1451},{ 108, 1839},{ 115, 2218},{ 123, 2592},
- { 132, 2957},{ 141, 3308},{ 151, 3643},{ 163, 3968},
- { 179, 4285},{ 200, 4590},{ 225, 4886},{ 254, 5169},
- { 291, 5436},{ 330, 5696},{ 368, 5951},{ 409, 6200},
- { 452, 6448},{ 493, 6695},{ 536, 6940},{ 571, 7204}
- }
- }
- },
- {
- {
- /*Y' qi=32 INTRA*/
- {
- { 123, 26},{ 370, 1356},{ 756, 2321},{ 1211, 2944},
- { 1674, 3408},{ 2148, 3826},{ 2639, 4193},{ 3138, 4504},
- { 3634, 4765},{ 4133, 4973},{ 4625, 5137},{ 5101, 5225},
- { 5567, 5274},{ 6002, 5363},{ 6437, 5482},{ 6885, 5566},
- { 7312, 5625},{ 7723, 5686},{ 8101, 5721},{ 8429, 5732},
- { 8769, 5728},{ 9120, 5726},{ 9472, 5723},{ 9918, 5700}
- },
- /*Y' qi=32 INTER*/
- {
- { 54, -3},{ 95, 1343},{ 179, 2750},{ 276, 4027},
- { 382, 5185},{ 543, 6256},{ 830, 7161},{ 1301, 7815},
- { 2003, 8172},{ 2883, 8266},{ 3779, 8217},{ 4578, 8127},
- { 5274, 8035},{ 5886, 7952},{ 6430, 7887},{ 6929, 7835},
- { 7380, 7779},{ 7796, 7737},{ 8190, 7705},{ 8552, 7672},
- { 8896, 7640},{ 9210, 7612},{ 9510, 7589},{ 9746, 7552}
- }
- },
- {
- /*Cb qi=32 INTRA*/
- {
- { 6, 3},{ 89, 369},{ 153, 746},{ 193, 1167},
- { 247, 1577},{ 330, 1935},{ 429, 2236},{ 528, 2494},
- { 620, 2732},{ 712, 2948},{ 801, 3146},{ 898, 3325},
- { 999, 3489},{ 1078, 3664},{ 1155, 3832},{ 1251, 3985},
- { 1360, 4115},{ 1451, 4236},{ 1549, 4338},{ 1667, 4433},
- { 1797, 4522},{ 1891, 4613},{ 1989, 4687},{ 2162, 4776}
- },
- /*Cb qi=32 INTER*/
- {
- { 116, -1},{ 98, 321},{ 80, 656},{ 80, 1042},
- { 96, 1438},{ 110, 1827},{ 122, 2205},{ 133, 2570},
- { 144, 2925},{ 157, 3268},{ 170, 3597},{ 185, 3911},
- { 202, 4216},{ 221, 4516},{ 244, 4809},{ 273, 5096},
- { 308, 5376},{ 350, 5644},{ 401, 5907},{ 459, 6160},
- { 520, 6401},{ 592, 6630},{ 676, 6837},{ 758, 7050}
- }
- },
- {
- /*Cr qi=32 INTRA*/
- {
- { 12, 7},{ 91, 386},{ 152, 773},{ 201, 1202},
- { 261, 1603},{ 347, 1942},{ 447, 2223},{ 540, 2460},
- { 626, 2684},{ 711, 2901},{ 801, 3115},{ 887, 3312},
- { 969, 3480},{ 1068, 3633},{ 1176, 3779},{ 1283, 3885},
- { 1392, 3969},{ 1485, 4090},{ 1573, 4206},{ 1686, 4274},
- { 1813, 4354},{ 1911, 4459},{ 2004, 4563},{ 2162, 4590}
- },
- /*Cr qi=32 INTER*/
- {
- { 129, 5},{ 98, 334},{ 75, 673},{ 84, 1055},
- { 101, 1448},{ 113, 1832},{ 121, 2206},{ 129, 2577},
- { 140, 2937},{ 151, 3282},{ 163, 3614},{ 179, 3932},
- { 198, 4240},{ 221, 4542},{ 252, 4830},{ 290, 5102},
- { 329, 5364},{ 373, 5618},{ 420, 5864},{ 468, 6105},
- { 513, 6351},{ 564, 6587},{ 624, 6810},{ 697, 7017}
- }
- }
- },
- {
- {
- /*Y' qi=33 INTRA*/
- {
- { 115, 36},{ 388, 1338},{ 791, 2289},{ 1258, 2899},
- { 1732, 3352},{ 2220, 3760},{ 2730, 4117},{ 3244, 4415},
- { 3751, 4662},{ 4261, 4858},{ 4766, 5012},{ 5249, 5094},
- { 5719, 5141},{ 6159, 5225},{ 6597, 5333},{ 7044, 5416},
- { 7474, 5472},{ 7893, 5531},{ 8268, 5570},{ 8591, 5580},
- { 8931, 5578},{ 9283, 5579},{ 9634, 5582},{10067, 5560}
- },
- /*Y' qi=33 INTER*/
- {
- { 65, -14},{ 102, 1345},{ 190, 2736},{ 294, 3999},
- { 411, 5146},{ 597, 6192},{ 934, 7045},{ 1488, 7622},
- { 2281, 7895},{ 3213, 7937},{ 4108, 7871},{ 4883, 7784},
- { 5556, 7709},{ 6150, 7643},{ 6685, 7585},{ 7176, 7539},
- { 7620, 7502},{ 8034, 7466},{ 8427, 7435},{ 8793, 7409},
- { 9136, 7386},{ 9446, 7364},{ 9743, 7339},{10025, 7303}
- }
- },
- {
- /*Cb qi=33 INTRA*/
- {
- { 5, 3},{ 92, 369},{ 159, 746},{ 203, 1163},
- { 263, 1564},{ 353, 1911},{ 458, 2204},{ 557, 2460},
- { 650, 2697},{ 744, 2913},{ 836, 3110},{ 934, 3292},
- { 1036, 3454},{ 1125, 3616},{ 1204, 3781},{ 1298, 3932},
- { 1410, 4058},{ 1507, 4170},{ 1606, 4265},{ 1725, 4358},
- { 1853, 4445},{ 1955, 4535},{ 2067, 4597},{ 2258, 4663}
- },
- /*Cb qi=33 INTER*/
- {
- { 109, 37},{ 94, 343},{ 81, 662},{ 85, 1042},
- { 102, 1436},{ 116, 1823},{ 128, 2195},{ 141, 2554},
- { 154, 2906},{ 167, 3246},{ 183, 3570},{ 202, 3881},
- { 220, 4185},{ 241, 4482},{ 268, 4772},{ 302, 5053},
- { 341, 5328},{ 388, 5592},{ 446, 5846},{ 507, 6096},
- { 581, 6328},{ 670, 6534},{ 762, 6731},{ 842, 6922}
- }
- },
- {
- /*Cr qi=33 INTRA*/
- {
- { 11, 7},{ 93, 387},{ 158, 774},{ 211, 1197},
- { 278, 1589},{ 372, 1917},{ 475, 2191},{ 569, 2429},
- { 658, 2655},{ 744, 2868},{ 835, 3083},{ 926, 3271},
- { 1010, 3430},{ 1110, 3586},{ 1224, 3724},{ 1336, 3826},
- { 1449, 3908},{ 1547, 4021},{ 1636, 4136},{ 1751, 4200},
- { 1886, 4277},{ 1977, 4384},{ 2070, 4474},{ 2232, 4510}
- },
- /*Cr qi=33 INTER*/
- {
- { 77, 9},{ 90, 347},{ 80, 674},{ 91, 1053},
- { 107, 1444},{ 119, 1825},{ 127, 2196},{ 137, 2563},
- { 149, 2919},{ 161, 3259},{ 176, 3588},{ 194, 3905},
- { 217, 4209},{ 246, 4504},{ 280, 4786},{ 320, 5055},
- { 364, 5316},{ 409, 5565},{ 460, 5804},{ 517, 6039},
- { 578, 6264},{ 640, 6489},{ 701, 6721},{ 772, 6948}
- }
- }
- },
- {
- {
- /*Y' qi=34 INTRA*/
- {
- { 124, 40},{ 401, 1333},{ 823, 2262},{ 1318, 2842},
- { 1823, 3265},{ 2339, 3650},{ 2872, 3991},{ 3405, 4274},
- { 3926, 4513},{ 4448, 4704},{ 4961, 4845},{ 5450, 4921},
- { 5925, 4971},{ 6372, 5053},{ 6813, 5160},{ 7264, 5242},
- { 7704, 5291},{ 8124, 5346},{ 8500, 5382},{ 8831, 5384},
- { 9178, 5380},{ 9525, 5387},{ 9869, 5389},{10310, 5356}
- },
- /*Y' qi=34 INTER*/
- {
- { 64, -17},{ 101, 1344},{ 190, 2730},{ 299, 3981},
- { 430, 5110},{ 648, 6127},{ 1036, 6933},{ 1664, 7445},
- { 2535, 7652},{ 3504, 7653},{ 4402, 7572},{ 5173, 7479},
- { 5843, 7400},{ 6441, 7334},{ 6976, 7280},{ 7464, 7231},
- { 7910, 7189},{ 8332, 7157},{ 8730, 7125},{ 9091, 7103},
- { 9422, 7086},{ 9753, 7061},{10067, 7036},{10316, 7029}
- }
- },
- {
- /*Cb qi=34 INTRA*/
- {
- { 5, 3},{ 91, 369},{ 158, 746},{ 204, 1162},
- { 266, 1561},{ 358, 1903},{ 466, 2189},{ 570, 2439},
- { 665, 2671},{ 765, 2880},{ 864, 3069},{ 970, 3238},
- { 1079, 3392},{ 1174, 3545},{ 1265, 3693},{ 1360, 3841},
- { 1471, 3968},{ 1572, 4083},{ 1675, 4181},{ 1804, 4255},
- { 1939, 4332},{ 2048, 4411},{ 2155, 4484},{ 2339, 4584}
- },
- /*Cb qi=34 INTER*/
- {
- { 99, 44},{ 92, 345},{ 82, 661},{ 86, 1043},
- { 101, 1436},{ 116, 1821},{ 128, 2191},{ 140, 2549},
- { 154, 2898},{ 168, 3235},{ 185, 3556},{ 203, 3865},
- { 224, 4166},{ 248, 4457},{ 278, 4741},{ 315, 5021},
- { 361, 5289},{ 416, 5546},{ 483, 5792},{ 559, 6025},
- { 651, 6237},{ 752, 6432},{ 849, 6626},{ 967, 6790}
- }
- },
- {
- /*Cr qi=34 INTRA*/
- {
- { 11, 7},{ 93, 387},{ 158, 773},{ 212, 1195},
- { 282, 1584},{ 378, 1909},{ 483, 2179},{ 578, 2414},
- { 671, 2633},{ 766, 2837},{ 866, 3038},{ 960, 3223},
- { 1049, 3376},{ 1158, 3520},{ 1285, 3644},{ 1400, 3740},
- { 1505, 3828},{ 1616, 3928},{ 1713, 4030},{ 1820, 4104},
- { 1957, 4185},{ 2063, 4280},{ 2160, 4355},{ 2320, 4341}
- },
- /*Cr qi=34 INTER*/
- {
- { 78, 11},{ 89, 347},{ 79, 674},{ 90, 1053},
- { 106, 1444},{ 117, 1823},{ 127, 2192},{ 137, 2558},
- { 149, 2912},{ 163, 3249},{ 178, 3574},{ 197, 3888},
- { 222, 4189},{ 252, 4481},{ 293, 4755},{ 341, 5013},
- { 386, 5268},{ 436, 5512},{ 498, 5743},{ 563, 5970},
- { 622, 6200},{ 694, 6415},{ 776, 6622},{ 871, 6818}
- }
- }
- },
- {
- {
- /*Y' qi=35 INTRA*/
- {
- { 116, 51},{ 433, 1312},{ 881, 2221},{ 1406, 2771},
- { 1948, 3156},{ 2511, 3501},{ 3085, 3811},{ 3654, 4066},
- { 4212, 4273},{ 4763, 4444},{ 5298, 4572},{ 5799, 4638},
- { 6285, 4678},{ 6747, 4746},{ 7203, 4838},{ 7673, 4905},
- { 8124, 4950},{ 8552, 5003},{ 8938, 5027},{ 9275, 5026},
- { 9628, 5019},{ 9981, 5024},{10331, 5030},{10795, 5000}
- },
- /*Y' qi=35 INTER*/
- {
- { 71, -10},{ 108, 1348},{ 203, 2710},{ 325, 3938},
- { 485, 5040},{ 766, 6000},{ 1267, 6706},{ 2048, 7089},
- { 3037, 7191},{ 4032, 7146},{ 4903, 7061},{ 5648, 6977},
- { 6301, 6912},{ 6884, 6857},{ 7413, 6812},{ 7898, 6775},
- { 8342, 6739},{ 8764, 6710},{ 9160, 6688},{ 9519, 6668},
- { 9859, 6646},{10190, 6625},{10492, 6612},{10755, 6595}
- }
- },
- {
- /*Cb qi=35 INTRA*/
- {
- { 6, 3},{ 95, 369},{ 164, 746},{ 214, 1156},
- { 287, 1542},{ 390, 1869},{ 504, 2143},{ 611, 2388},
- { 712, 2613},{ 822, 2811},{ 937, 2987},{ 1055, 3147},
- { 1174, 3285},{ 1286, 3420},{ 1386, 3560},{ 1488, 3698},
- { 1604, 3814},{ 1714, 3916},{ 1825, 4008},{ 1958, 4088},
- { 2101, 4159},{ 2224, 4226},{ 2339, 4292},{ 2538, 4383}
- },
- /*Cb qi=35 INTER*/
- {
- { 98, 41},{ 90, 348},{ 86, 665},{ 92, 1042},
- { 108, 1432},{ 122, 1812},{ 136, 2175},{ 151, 2528},
- { 165, 2872},{ 182, 3202},{ 202, 3516},{ 225, 3819},
- { 251, 4112},{ 281, 4398},{ 320, 4675},{ 367, 4944},
- { 421, 5204},{ 493, 5450},{ 579, 5679},{ 672, 5892},
- { 785, 6082},{ 906, 6258},{ 1026, 6432},{ 1153, 6592}
- }
- },
- {
- /*Cr qi=35 INTRA*/
- {
- { 12, 7},{ 98, 388},{ 166, 773},{ 226, 1187},
- { 306, 1563},{ 411, 1874},{ 524, 2134},{ 622, 2365},
- { 721, 2577},{ 826, 2768},{ 947, 2946},{ 1066, 3106},
- { 1163, 3250},{ 1274, 3395},{ 1417, 3508},{ 1539, 3590},
- { 1639, 3671},{ 1754, 3765},{ 1865, 3855},{ 1979, 3921},
- { 2127, 3998},{ 2249, 4085},{ 2346, 4172},{ 2473, 4210}
- },
- /*Cr qi=35 INTER*/
- {
- { 86, 12},{ 94, 354},{ 85, 677},{ 96, 1052},
- { 113, 1439},{ 125, 1811},{ 135, 2177},{ 147, 2537},
- { 160, 2884},{ 177, 3215},{ 195, 3535},{ 219, 3842},
- { 252, 4133},{ 292, 4413},{ 339, 4680},{ 396, 4928},
- { 455, 5169},{ 514, 5408},{ 588, 5626},{ 672, 5835},
- { 750, 6051},{ 837, 6257},{ 943, 6442},{ 1073, 6595}
+ { 12, 688},{ 11, 660},{ 28, 869},{ 46, 1227},
+ { 60, 1598},{ 68, 1954},{ 79, 2318},{ 93, 2693},
+ { 108, 3054},{ 123, 3406},{ 138, 3748},{ 151, 4078},
+ { 165, 4400},{ 180, 4716},{ 197, 5024},{ 217, 5314},
+ { 243, 5599},{ 275, 5866},{ 301, 6128},{ 327, 6394},
+ { 352, 6644},{ 375, 6894},{ 376, 7180},{ 458, 7334}
}
}
},
@@ -2290,557 +279,61 @@ oc_mode_rd OC_MODE_RD[64][3][2][OC_SAD_BINS]={
{
/*Y' qi=36 INTRA*/
{
- { 116, 52},{ 432, 1312},{ 881, 2215},{ 1407, 2759},
- { 1948, 3140},{ 2511, 3484},{ 3090, 3789},{ 3672, 4036},
- { 4243, 4236},{ 4803, 4397},{ 5346, 4517},{ 5856, 4581},
- { 6350, 4614},{ 6821, 4675},{ 7286, 4763},{ 7754, 4832},
- { 8201, 4875},{ 8631, 4922},{ 9015, 4948},{ 9351, 4945},
- { 9706, 4941},{10061, 4948},{10408, 4949},{10878, 4923}
+ { 156, 263},{ 484, 1370},{ 1174, 2110},{ 1914, 2456},
+ { 2601, 2695},{ 3221, 2984},{ 3865, 3284},{ 4450, 3530},
+ { 4979, 3739},{ 5470, 3928},{ 5905, 4080},{ 6375, 4200},
+ { 6761, 4373},{ 7175, 4429},{ 7615, 4616},{ 8069, 4687},
+ { 8417, 4820},{ 8813, 4908},{ 9211, 5001},{ 9508, 5073},
+ { 9888, 5133},{10209, 5140},{10529, 5196},{10830, 5173}
},
/*Y' qi=36 INTER*/
{
- { 63, -16},{ 114, 1332},{ 216, 2690},{ 343, 3914},
- { 515, 5009},{ 829, 5939},{ 1399, 6586},{ 2263, 6901},
- { 3290, 6967},{ 4272, 6920},{ 5115, 6847},{ 5839, 6779},
- { 6478, 6726},{ 7051, 6685},{ 7571, 6649},{ 8050, 6614},
- { 8495, 6587},{ 8908, 6567},{ 9298, 6550},{ 9673, 6530},
- {10005, 6512},{10324, 6499},{10640, 6483},{10936, 6487}
+ { 68, 151},{ 107, 1413},{ 262, 2665},{ 542, 3715},
+ { 946, 4584},{ 1508, 5279},{ 2167, 5829},{ 2968, 6179},
+ { 3758, 6392},{ 4481, 6517},{ 5139, 6577},{ 5706, 6636},
+ { 6271, 6612},{ 6746, 6585},{ 7216, 6533},{ 7622, 6496},
+ { 8045, 6403},{ 8393, 6389},{ 8799, 6272},{ 9062, 6281},
+ { 9436, 6184},{ 9637, 6238},{ 9864, 6215},{10147, 6215}
}
},
{
/*Cb qi=36 INTRA*/
{
- { 6, 3},{ 98, 370},{ 170, 746},{ 225, 1150},
- { 306, 1527},{ 416, 1845},{ 534, 2116},{ 642, 2363},
- { 743, 2591},{ 851, 2794},{ 964, 2972},{ 1081, 3133},
- { 1198, 3275},{ 1311, 3410},{ 1411, 3547},{ 1519, 3680},
- { 1642, 3789},{ 1750, 3892},{ 1860, 3982},{ 1998, 4054},
- { 2141, 4129},{ 2256, 4204},{ 2372, 4278},{ 2567, 4356}
+ { 91, 385},{ 138, 613},{ 205, 932},{ 265, 1239},
+ { 353, 1549},{ 443, 1839},{ 518, 2104},{ 655, 2341},
+ { 764, 2559},{ 876, 2756},{ 967, 2950},{ 1088, 3107},
+ { 1184, 3266},{ 1295, 3396},{ 1375, 3548},{ 1502, 3664},
+ { 1610, 3764},{ 1731, 3844},{ 1839, 3938},{ 1954, 4016},
+ { 2069, 4100},{ 2207, 4167},{ 2274, 4253},{ 2374, 4289}
},
/*Cb qi=36 INTER*/
{
- { 107, 30},{ 96, 346},{ 88, 667},{ 100, 1039},
- { 115, 1426},{ 128, 1804},{ 142, 2164},{ 158, 2512},
- { 176, 2851},{ 195, 3178},{ 218, 3491},{ 243, 3791},
- { 270, 4084},{ 307, 4365},{ 348, 4638},{ 397, 4908},
- { 464, 5157},{ 545, 5392},{ 635, 5620},{ 734, 5831},
- { 854, 6015},{ 993, 6170},{ 1124, 6327},{ 1234, 6502}
+ { 59, 18},{ 56, 463},{ 50, 790},{ 76, 1155},
+ { 90, 1515},{ 108, 1877},{ 125, 2226},{ 150, 2562},
+ { 177, 2890},{ 203, 3203},{ 231, 3501},{ 259, 3789},
+ { 289, 4074},{ 325, 4348},{ 367, 4608},{ 418, 4857},
+ { 486, 5093},{ 574, 5307},{ 677, 5494},{ 784, 5688},
+ { 914, 5844},{ 1033, 6004},{ 1142, 6179},{ 1307, 6220}
}
},
{
/*Cr qi=36 INTRA*/
{
- { 12, 7},{ 102, 388},{ 172, 773},{ 239, 1182},
- { 328, 1546},{ 439, 1848},{ 554, 2106},{ 651, 2341},
- { 747, 2561},{ 850, 2757},{ 972, 2934},{ 1086, 3097},
- { 1182, 3245},{ 1302, 3382},{ 1447, 3491},{ 1572, 3567},
- { 1677, 3641},{ 1793, 3733},{ 1899, 3828},{ 2013, 3894},
- { 2163, 3967},{ 2283, 4059},{ 2387, 4142},{ 2559, 4145}
+ { 87, 376},{ 132, 616},{ 190, 931},{ 268, 1260},
+ { 358, 1550},{ 457, 1833},{ 592, 2082},{ 685, 2318},
+ { 781, 2548},{ 867, 2757},{ 968, 2953},{ 1080, 3124},
+ { 1173, 3255},{ 1282, 3390},{ 1410, 3477},{ 1528, 3593},
+ { 1645, 3612},{ 1766, 3739},{ 1885, 3789},{ 1954, 3892},
+ { 2115, 3987},{ 2202, 4052},{ 2280, 4172},{ 2379, 4213}
},
/*Cr qi=36 INTER*/
{
- { 98, -10},{ 96, 347},{ 89, 676},{ 102, 1048},
- { 118, 1433},{ 130, 1804},{ 141, 2167},{ 154, 2523},
- { 171, 2866},{ 190, 3194},{ 212, 3508},{ 240, 3809},
- { 276, 4099},{ 320, 4377},{ 372, 4638},{ 428, 4887},
- { 492, 5122},{ 560, 5353},{ 638, 5572},{ 725, 5779},
- { 814, 5985},{ 902, 6192},{ 1013, 6377},{ 1155, 6527}
- }
- }
- },
- {
- {
- /*Y' qi=37 INTRA*/
- {
- { 109, 58},{ 445, 1302},{ 927, 2177},{ 1489, 2689},
- { 2053, 3052},{ 2632, 3387},{ 3230, 3683},{ 3830, 3922},
- { 4417, 4114},{ 4992, 4266},{ 5546, 4375},{ 6067, 4430},
- { 6571, 4459},{ 7046, 4516},{ 7513, 4599},{ 7991, 4663},
- { 8445, 4706},{ 8883, 4749},{ 9273, 4771},{ 9612, 4770},
- { 9970, 4765},{10325, 4773},{10672, 4778},{11106, 4758}
- },
- /*Y' qi=37 INTER*/
- {
- { 56, -14},{ 114, 1333},{ 218, 2683},{ 354, 3894},
- { 550, 4966},{ 916, 5854},{ 1569, 6437},{ 2520, 6685},
- { 3596, 6704},{ 4585, 6635},{ 5424, 6556},{ 6147, 6489},
- { 6787, 6437},{ 7358, 6395},{ 7876, 6358},{ 8361, 6325},
- { 8807, 6294},{ 9229, 6271},{ 9631, 6253},{10002, 6238},
- {10356, 6228},{10678, 6212},{10975, 6197},{11274, 6185}
- }
- },
- {
- /*Cb qi=37 INTRA*/
- {
- { 6, 3},{ 99, 370},{ 171, 746},{ 227, 1149},
- { 309, 1522},{ 421, 1836},{ 541, 2104},{ 652, 2347},
- { 757, 2572},{ 871, 2768},{ 989, 2936},{ 1111, 3087},
- { 1238, 3223},{ 1357, 3352},{ 1465, 3486},{ 1576, 3612},
- { 1709, 3705},{ 1828, 3801},{ 1937, 3895},{ 2076, 3967},
- { 2220, 4035},{ 2345, 4104},{ 2466, 4173},{ 2680, 4265}
- },
- /*Cb qi=37 INTER*/
- {
- { 111, 27},{ 97, 344},{ 87, 667},{ 99, 1038},
- { 115, 1425},{ 128, 1802},{ 143, 2160},{ 159, 2506},
- { 176, 2843},{ 198, 3167},{ 220, 3477},{ 247, 3774},
- { 280, 4061},{ 321, 4338},{ 368, 4608},{ 427, 4867},
- { 501, 5109},{ 595, 5332},{ 701, 5544},{ 818, 5738},
- { 956, 5905},{ 1105, 6066},{ 1248, 6217},{ 1381, 6353}
- }
- },
- {
- /*Cr qi=37 INTRA*/
- {
- { 12, 7},{ 102, 388},{ 173, 773},{ 242, 1180},
- { 331, 1541},{ 444, 1839},{ 562, 2095},{ 662, 2326},
- { 763, 2540},{ 871, 2728},{ 1003, 2892},{ 1130, 3045},
- { 1230, 3188},{ 1350, 3321},{ 1503, 3418},{ 1634, 3492},
- { 1737, 3568},{ 1856, 3653},{ 1970, 3744},{ 2091, 3802},
- { 2247, 3871},{ 2371, 3962},{ 2477, 4041},{ 2655, 4052}
- },
- /*Cr qi=37 INTER*/
- {
- { 89, -9},{ 97, 347},{ 88, 677},{ 102, 1048},
- { 118, 1432},{ 130, 1802},{ 141, 2163},{ 154, 2517},
- { 172, 2857},{ 192, 3181},{ 216, 3494},{ 246, 3793},
- { 286, 4074},{ 337, 4343},{ 395, 4600},{ 464, 4837},
- { 534, 5066},{ 608, 5289},{ 694, 5501},{ 788, 5704},
- { 893, 5901},{ 1010, 6088},{ 1151, 6249},{ 1331, 6374}
- }
- }
- },
- {
- {
- /*Y' qi=38 INTRA*/
- {
- { 107, 65},{ 476, 1286},{ 968, 2148},{ 1548, 2641},
- { 2141, 2979},{ 2757, 3289},{ 3390, 3564},{ 4020, 3784},
- { 4632, 3957},{ 5224, 4097},{ 5794, 4201},{ 6326, 4250},
- { 6828, 4274},{ 7309, 4322},{ 7790, 4401},{ 8271, 4463},
- { 8729, 4498},{ 9165, 4540},{ 9552, 4566},{ 9901, 4560},
- {10266, 4552},{10617, 4563},{10964, 4572},{11393, 4567}
- },
- /*Y' qi=38 INTER*/
- {
- { 57, -13},{ 118, 1332},{ 233, 2665},{ 386, 3856},
- { 620, 4899},{ 1070, 5722},{ 1849, 6211},{ 2898, 6384},
- { 3989, 6376},{ 4947, 6311},{ 5754, 6249},{ 6454, 6199},
- { 7077, 6161},{ 7640, 6132},{ 8159, 6101},{ 8639, 6076},
- { 9081, 6054},{ 9502, 6037},{ 9900, 6027},{10274, 6012},
- {10621, 5999},{10938, 5991},{11237, 5977},{11557, 5966}
- }
- },
- {
- /*Cb qi=38 INTRA*/
- {
- { 8, 3},{ 104, 370},{ 179, 744},{ 243, 1139},
- { 338, 1498},{ 458, 1801},{ 584, 2060},{ 700, 2297},
- { 812, 2514},{ 935, 2699},{ 1061, 2858},{ 1189, 3007},
- { 1321, 3141},{ 1446, 3266},{ 1563, 3388},{ 1684, 3512},
- { 1816, 3614},{ 1942, 3702},{ 2055, 3793},{ 2201, 3857},
- { 2357, 3923},{ 2477, 3994},{ 2593, 4061},{ 2768, 4178}
- },
- /*Cb qi=38 INTER*/
- {
- { 118, 24},{ 102, 342},{ 91, 663},{ 101, 1040},
- { 116, 1427},{ 131, 1799},{ 147, 2152},{ 168, 2491},
- { 191, 2822},{ 215, 3139},{ 244, 3441},{ 276, 3731},
- { 316, 4013},{ 363, 4286},{ 423, 4546},{ 495, 4795},
- { 584, 5028},{ 691, 5242},{ 814, 5439},{ 959, 5608},
- { 1119, 5759},{ 1277, 5906},{ 1449, 6035},{ 1655, 6144}
- }
- },
- {
- /*Cr qi=38 INTRA*/
- {
- { 12, 6},{ 106, 387},{ 182, 771},{ 261, 1168},
- { 364, 1514},{ 483, 1802},{ 603, 2053},{ 707, 2282},
- { 817, 2489},{ 933, 2670},{ 1074, 2825},{ 1210, 2967},
- { 1320, 3104},{ 1444, 3229},{ 1599, 3324},{ 1735, 3396},
- { 1846, 3464},{ 1971, 3547},{ 2086, 3646},{ 2206, 3711},
- { 2366, 3773},{ 2499, 3859},{ 2603, 3945},{ 2766, 3952}
- },
- /*Cr qi=38 INTER*/
- {
- { 86, -9},{ 91, 352},{ 85, 680},{ 102, 1053},
- { 119, 1435},{ 132, 1799},{ 146, 2153},{ 162, 2501},
- { 183, 2835},{ 209, 3154},{ 240, 3458},{ 278, 3751},
- { 327, 4025},{ 388, 4284},{ 455, 4532},{ 529, 4766},
- { 616, 4980},{ 711, 5188},{ 815, 5386},{ 920, 5583},
- { 1042, 5770},{ 1186, 5936},{ 1348, 6080},{ 1542, 6196}
- }
- }
- },
- {
- {
- /*Y' qi=39 INTRA*/
- {
- { 103, 66},{ 479, 1283},{ 998, 2125},{ 1610, 2591},
- { 2223, 2913},{ 2855, 3214},{ 3501, 3482},{ 4146, 3698},
- { 4772, 3868},{ 5376, 3999},{ 5956, 4095},{ 6496, 4140},
- { 7008, 4162},{ 7499, 4209},{ 7987, 4282},{ 8478, 4338},
- { 8947, 4374},{ 9385, 4417},{ 9783, 4437},{10143, 4433},
- {10504, 4424},{10866, 4435},{11225, 4444},{11665, 4430}
- },
- /*Y' qi=39 INTER*/
- {
- { 56, 2},{ 118, 1332},{ 235, 2660},{ 395, 3843},
- { 653, 4867},{ 1153, 5652},{ 2003, 6089},{ 3113, 6214},
- { 4228, 6178},{ 5189, 6102},{ 6002, 6031},{ 6707, 5976},
- { 7336, 5936},{ 7901, 5900},{ 8424, 5870},{ 8915, 5844},
- { 9361, 5822},{ 9784, 5807},{10187, 5794},{10571, 5778},
- {10931, 5763},{11264, 5751},{11582, 5742},{11916, 5730}
- }
- },
- {
- /*Cb qi=39 INTRA*/
- {
- { 8, 3},{ 104, 370},{ 179, 744},{ 244, 1138},
- { 340, 1496},{ 461, 1796},{ 588, 2053},{ 705, 2288},
- { 820, 2503},{ 945, 2684},{ 1073, 2840},{ 1210, 2981},
- { 1352, 3106},{ 1480, 3225},{ 1603, 3342},{ 1728, 3464},
- { 1865, 3559},{ 1990, 3645},{ 2106, 3734},{ 2258, 3796},
- { 2413, 3856},{ 2540, 3920},{ 2667, 3986},{ 2887, 4060}
- },
- /*Cb qi=39 INTER*/
- {
- { 119, 19},{ 103, 340},{ 90, 664},{ 100, 1040},
- { 115, 1426},{ 131, 1797},{ 148, 2148},{ 169, 2486},
- { 192, 2816},{ 217, 3131},{ 247, 3432},{ 282, 3721},
- { 324, 3999},{ 374, 4268},{ 435, 4526},{ 520, 4766},
- { 621, 4990},{ 738, 5194},{ 878, 5376},{ 1035, 5543},
- { 1202, 5686},{ 1374, 5819},{ 1545, 5950},{ 1729, 6064}
- }
- },
- {
- /*Cr qi=39 INTRA*/
- {
- { 12, 6},{ 106, 387},{ 182, 771},{ 262, 1167},
- { 365, 1512},{ 486, 1798},{ 608, 2047},{ 713, 2274},
- { 824, 2479},{ 945, 2655},{ 1091, 2804},{ 1231, 2941},
- { 1346, 3073},{ 1475, 3194},{ 1633, 3282},{ 1778, 3345},
- { 1891, 3414},{ 2013, 3501},{ 2138, 3584},{ 2266, 3640},
- { 2428, 3701},{ 2568, 3782},{ 2674, 3863},{ 2816, 3894}
- },
- /*Cr qi=39 INTER*/
- {
- { 88, -7},{ 92, 352},{ 85, 680},{ 102, 1053},
- { 119, 1434},{ 132, 1797},{ 146, 2151},{ 163, 2498},
- { 185, 2830},{ 211, 3147},{ 243, 3451},{ 285, 3735},
- { 337, 4005},{ 401, 4260},{ 477, 4499},{ 565, 4721},
- { 655, 4937},{ 749, 5148},{ 858, 5344},{ 979, 5529},
- { 1110, 5710},{ 1264, 5871},{ 1460, 5990},{ 1677, 6086}
- }
- }
- },
- {
- {
- /*Y' qi=40 INTRA*/
- {
- { 98, 71},{ 491, 1274},{ 1023, 2103},{ 1641, 2559},
- { 2257, 2877},{ 2898, 3171},{ 3566, 3429},{ 4233, 3629},
- { 4881, 3784},{ 5499, 3906},{ 6088, 3997},{ 6631, 4040},
- { 7145, 4060},{ 7640, 4107},{ 8128, 4178},{ 8618, 4233},
- { 9077, 4267},{ 9514, 4304},{ 9919, 4324},{10277, 4317},
- {10635, 4312},{10985, 4324},{11338, 4331},{11792, 4334}
- },
- /*Y' qi=40 INTER*/
- {
- { 63, -26},{ 125, 1331},{ 256, 2640},{ 439, 3801},
- { 757, 4782},{ 1391, 5474},{ 2399, 5805},{ 3582, 5870},
- { 4678, 5824},{ 5600, 5763},{ 6386, 5710},{ 7076, 5667},
- { 7693, 5637},{ 8252, 5610},{ 8775, 5586},{ 9255, 5571},
- { 9694, 5556},{10115, 5541},{10530, 5530},{10903, 5522},
- {11242, 5515},{11596, 5501},{11904, 5482},{12205, 5475}
- }
- },
- {
- /*Cb qi=40 INTRA*/
- {
- { 8, 3},{ 108, 371},{ 189, 743},{ 265, 1128},
- { 371, 1475},{ 499, 1767},{ 628, 2022},{ 746, 2256},
- { 864, 2467},{ 991, 2647},{ 1124, 2801},{ 1270, 2933},
- { 1412, 3054},{ 1547, 3165},{ 1677, 3277},{ 1804, 3393},
- { 1946, 3483},{ 2078, 3569},{ 2201, 3651},{ 2352, 3711},
- { 2513, 3766},{ 2643, 3826},{ 2775, 3880},{ 3025, 3919}
- },
- /*Cb qi=40 INTER*/
- {
- { 114, 35},{ 104, 349},{ 96, 667},{ 106, 1040},
- { 121, 1423},{ 138, 1789},{ 158, 2132},{ 184, 2464},
- { 212, 2787},{ 242, 3095},{ 279, 3389},{ 321, 3671},
- { 374, 3941},{ 438, 4199},{ 517, 4446},{ 617, 4673},
- { 740, 4881},{ 891, 5064},{ 1058, 5225},{ 1239, 5372},
- { 1441, 5499},{ 1638, 5610},{ 1840, 5719},{ 2076, 5814}
- }
- },
- {
- /*Cr qi=40 INTRA*/
- {
- { 14, 7},{ 114, 389},{ 193, 771},{ 283, 1156},
- { 399, 1488},{ 523, 1768},{ 643, 2018},{ 752, 2245},
- { 865, 2450},{ 984, 2626},{ 1139, 2763},{ 1290, 2887},
- { 1413, 3014},{ 1550, 3128},{ 1711, 3211},{ 1865, 3268},
- { 1981, 3334},{ 2103, 3415},{ 2237, 3486},{ 2365, 3543},
- { 2529, 3610},{ 2666, 3700},{ 2775, 3779},{ 2929, 3803}
- },
- /*Cr qi=40 INTER*/
- {
- { 89, -8},{ 95, 353},{ 90, 681},{ 107, 1053},
- { 124, 1430},{ 139, 1787},{ 156, 2136},{ 177, 2477},
- { 203, 2803},{ 237, 3112},{ 276, 3406},{ 329, 3683},
- { 395, 3942},{ 475, 4182},{ 567, 4407},{ 665, 4624},
- { 767, 4834},{ 879, 5032},{ 1011, 5213},{ 1169, 5375},
- { 1348, 5525},{ 1547, 5654},{ 1785, 5743},{ 2066, 5787}
- }
- }
- },
- {
- {
- /*Y' qi=41 INTRA*/
- {
- { 98, 71},{ 495, 1272},{ 1040, 2090},{ 1675, 2533},
- { 2302, 2842},{ 2953, 3132},{ 3631, 3381},{ 4309, 3574},
- { 4966, 3726},{ 5593, 3846},{ 6189, 3934},{ 6738, 3972},
- { 7256, 3991},{ 7754, 4036},{ 8250, 4099},{ 8747, 4150},
- { 9207, 4185},{ 9650, 4222},{10057, 4242},{10411, 4237},
- {10771, 4230},{11127, 4244},{11486, 4254},{11933, 4252}
- },
- /*Y' qi=41 INTER*/
- {
- { 65, -25},{ 125, 1331},{ 260, 2633},{ 457, 3782},
- { 807, 4740},{ 1499, 5397},{ 2562, 5693},{ 3766, 5743},
- { 4859, 5695},{ 5776, 5638},{ 6556, 5590},{ 7243, 5554},
- { 7859, 5529},{ 8417, 5506},{ 8935, 5486},{ 9419, 5473},
- { 9869, 5460},{10296, 5446},{10711, 5436},{11089, 5430},
- {11445, 5421},{11802, 5412},{12129, 5404},{12465, 5393}
- }
- },
- {
- /*Cb qi=41 INTRA*/
- {
- { 8, 3},{ 108, 371},{ 189, 743},{ 267, 1126},
- { 374, 1471},{ 504, 1760},{ 635, 2011},{ 758, 2241},
- { 881, 2447},{ 1013, 2621},{ 1147, 2773},{ 1293, 2906},
- { 1441, 3023},{ 1580, 3131},{ 1712, 3243},{ 1844, 3360},
- { 1985, 3451},{ 2114, 3532},{ 2240, 3613},{ 2390, 3680},
- { 2550, 3740},{ 2687, 3800},{ 2825, 3862},{ 3052, 3944}
- },
- /*Cb qi=41 INTER*/
- {
- { 104, 39},{ 100, 350},{ 95, 667},{ 105, 1040},
- { 121, 1422},{ 137, 1787},{ 159, 2129},{ 185, 2459},
- { 216, 2778},{ 249, 3083},{ 287, 3374},{ 335, 3653},
- { 393, 3920},{ 462, 4175},{ 549, 4414},{ 660, 4636},
- { 791, 4839},{ 952, 5014},{ 1135, 5166},{ 1337, 5297},
- { 1552, 5411},{ 1752, 5530},{ 1972, 5634},{ 2224, 5724}
- }
- },
- {
- /*Cr qi=41 INTRA*/
- {
- { 15, 7},{ 115, 389},{ 193, 770},{ 284, 1154},
- { 401, 1484},{ 528, 1761},{ 652, 2005},{ 764, 2228},
- { 882, 2427},{ 1008, 2599},{ 1167, 2734},{ 1320, 2859},
- { 1443, 2990},{ 1580, 3103},{ 1743, 3181},{ 1894, 3241},
- { 2012, 3309},{ 2141, 3385},{ 2272, 3459},{ 2398, 3519},
- { 2566, 3584},{ 2707, 3680},{ 2816, 3762},{ 2991, 3770}
- },
- /*Cr qi=41 INTER*/
- {
- { 92, -9},{ 98, 354},{ 90, 682},{ 107, 1052},
- { 124, 1429},{ 139, 1786},{ 156, 2132},{ 178, 2471},
- { 207, 2794},{ 241, 3100},{ 285, 3391},{ 345, 3662},
- { 417, 3915},{ 503, 4151},{ 600, 4375},{ 703, 4589},
- { 815, 4791},{ 942, 4981},{ 1088, 5155},{ 1250, 5316},
- { 1432, 5462},{ 1653, 5575},{ 1930, 5639},{ 2250, 5655}
- }
- }
- },
- {
- {
- /*Y' qi=42 INTRA*/
- {
- { 109, 75},{ 534, 1257},{ 1114, 2047},{ 1793, 2456},
- { 2461, 2735},{ 3157, 2994},{ 3879, 3221},{ 4595, 3396},
- { 5282, 3531},{ 5931, 3638},{ 6546, 3714},{ 7105, 3749},
- { 7633, 3766},{ 8147, 3803},{ 8652, 3865},{ 9148, 3915},
- { 9613, 3946},{10075, 3976},{10489, 3997},{10835, 3994},
- {11195, 3985},{11553, 3997},{11909, 4004},{12369, 3990}
- },
- /*Y' qi=42 INTER*/
- {
- { 69, -23},{ 134, 1332},{ 287, 2611},{ 521, 3730},
- { 970, 4624},{ 1827, 5176},{ 3028, 5382},{ 4262, 5389},
- { 5325, 5338},{ 6214, 5291},{ 6976, 5255},{ 7651, 5228},
- { 8260, 5206},{ 8821, 5190},{ 9343, 5177},{ 9823, 5165},
- {10273, 5152},{10709, 5143},{11121, 5136},{11502, 5129},
- {11857, 5125},{12193, 5115},{12520, 5107},{12802, 5097}
- }
- },
- {
- /*Cb qi=42 INTRA*/
- {
- { 9, 3},{ 113, 371},{ 199, 743},{ 279, 1123},
- { 390, 1462},{ 525, 1743},{ 662, 1986},{ 789, 2208},
- { 916, 2406},{ 1057, 2571},{ 1204, 2712},{ 1362, 2835},
- { 1524, 2943},{ 1676, 3040},{ 1815, 3145},{ 1959, 3249},
- { 2117, 3325},{ 2249, 3406},{ 2377, 3488},{ 2537, 3547},
- { 2706, 3597},{ 2854, 3646},{ 2999, 3705},{ 3236, 3759}
- },
- /*Cb qi=42 INTER*/
- {
- { 114, 44},{ 107, 353},{ 101, 670},{ 111, 1041},
- { 129, 1418},{ 148, 1775},{ 174, 2110},{ 208, 2432},
- { 244, 2746},{ 283, 3046},{ 330, 3330},{ 388, 3602},
- { 460, 3858},{ 546, 4101},{ 655, 4326},{ 793, 4530},
- { 966, 4703},{ 1165, 4851},{ 1388, 4980},{ 1630, 5088},
- { 1869, 5189},{ 2122, 5268},{ 2403, 5328},{ 2667, 5417}
- }
- },
- {
- /*Cr qi=42 INTRA*/
- {
- { 15, 7},{ 120, 390},{ 202, 771},{ 298, 1150},
- { 421, 1473},{ 553, 1743},{ 681, 1982},{ 796, 2199},
- { 923, 2388},{ 1062, 2547},{ 1225, 2678},{ 1392, 2792},
- { 1531, 2907},{ 1682, 3007},{ 1856, 3074},{ 2009, 3134},
- { 2138, 3192},{ 2274, 3257},{ 2407, 3333},{ 2536, 3393},
- { 2711, 3455},{ 2875, 3531},{ 3000, 3598},{ 3186, 3599}
- },
- /*Cr qi=42 INTER*/
- {
- { 87, -4},{ 95, 358},{ 97, 683},{ 113, 1052},
- { 131, 1423},{ 148, 1774},{ 170, 2116},{ 198, 2448},
- { 234, 2762},{ 276, 3062},{ 331, 3343},{ 404, 3603},
- { 494, 3844},{ 598, 4067},{ 715, 4276},{ 842, 4471},
- { 977, 4661},{ 1128, 4840},{ 1311, 4991},{ 1516, 5127},
- { 1759, 5233},{ 2050, 5300},{ 2377, 5323},{ 2710, 5304}
- }
- }
- },
- {
- {
- /*Y' qi=43 INTRA*/
- {
- { 99, 79},{ 557, 1244},{ 1175, 2016},{ 1882, 2408},
- { 2570, 2677},{ 3288, 2926},{ 4030, 3141},{ 4760, 3307},
- { 5458, 3435},{ 6115, 3537},{ 6743, 3608},{ 7312, 3636},
- { 7841, 3652},{ 8357, 3687},{ 8870, 3742},{ 9376, 3788},
- { 9850, 3821},{10315, 3853},{10734, 3873},{11084, 3870},
- {11442, 3862},{11800, 3874},{12160, 3879},{12618, 3876}
- },
- /*Y' qi=43 INTER*/
- {
- { 69, -22},{ 134, 1331},{ 294, 2601},{ 551, 3703},
- { 1056, 4563},{ 2003, 5061},{ 3276, 5215},{ 4534, 5194},
- { 5599, 5133},{ 6488, 5083},{ 7257, 5044},{ 7938, 5014},
- { 8556, 4992},{ 9124, 4975},{ 9648, 4960},{10138, 4948},
- {10594, 4939},{11039, 4926},{11462, 4919},{11847, 4912},
- {12216, 4904},{12570, 4896},{12883, 4889},{13189, 4879}
- }
- },
- {
- /*Cb qi=43 INTRA*/
- {
- { 9, 3},{ 114, 371},{ 202, 740},{ 294, 1110},
- { 417, 1440},{ 558, 1716},{ 700, 1956},{ 833, 2172},
- { 966, 2365},{ 1116, 2524},{ 1269, 2661},{ 1431, 2781},
- { 1599, 2885},{ 1756, 2980},{ 1902, 3082},{ 2051, 3185},
- { 2209, 3261},{ 2337, 3342},{ 2464, 3420},{ 2633, 3475},
- { 2809, 3525},{ 2948, 3579},{ 3094, 3633},{ 3347, 3678}
- },
- /*Cb qi=43 INTER*/
- {
- { 111, 44},{ 106, 353},{ 102, 670},{ 112, 1040},
- { 128, 1416},{ 148, 1771},{ 176, 2104},{ 211, 2424},
- { 250, 2734},{ 293, 3030},{ 347, 3309},{ 411, 3575},
- { 490, 3828},{ 589, 4064},{ 716, 4278},{ 869, 4472},
- { 1050, 4640},{ 1264, 4781},{ 1512, 4895},{ 1775, 4991},
- { 2042, 5069},{ 2310, 5141},{ 2593, 5207},{ 2912, 5239}
- }
- },
- {
- /*Cr qi=43 INTRA*/
- {
- { 15, 7},{ 121, 390},{ 208, 767},{ 315, 1135},
- { 449, 1449},{ 586, 1715},{ 718, 1950},{ 843, 2158},
- { 977, 2342},{ 1120, 2501},{ 1290, 2632},{ 1466, 2739},
- { 1613, 2845},{ 1763, 2945},{ 1937, 3015},{ 2093, 3070},
- { 2225, 3126},{ 2366, 3194},{ 2501, 3267},{ 2634, 3324},
- { 2815, 3385},{ 2964, 3466},{ 3087, 3538},{ 3263, 3555}
- },
- /*Cr qi=43 INTER*/
- {
- { 84, -4},{ 93, 358},{ 95, 683},{ 113, 1052},
- { 131, 1421},{ 148, 1770},{ 171, 2110},{ 201, 2439},
- { 240, 2750},{ 287, 3046},{ 348, 3322},{ 429, 3576},
- { 527, 3811},{ 641, 4029},{ 767, 4230},{ 904, 4422},
- { 1053, 4603},{ 1225, 4765},{ 1433, 4903},{ 1661, 5030},
- { 1928, 5121},{ 2252, 5160},{ 2604, 5164},{ 2979, 5125}
- }
- }
- },
- {
- {
- /*Y' qi=44 INTRA*/
- {
- { 103, 80},{ 560, 1244},{ 1183, 2009},{ 1891, 2391},
- { 2586, 2649},{ 3324, 2884},{ 4093, 3089},{ 4850, 3243},
- { 5575, 3358},{ 6252, 3452},{ 6886, 3518},{ 7459, 3546},
- { 7993, 3562},{ 8515, 3594},{ 9030, 3645},{ 9534, 3691},
- {10004, 3723},{10469, 3750},{10887, 3765},{11236, 3766},
- {11596, 3762},{11960, 3775},{12317, 3784},{12766, 3789}
- },
- /*Y' qi=44 INTER*/
- {
- { 77, -24},{ 145, 1332},{ 332, 2580},{ 642, 3649},
- { 1270, 4438},{ 2360, 4860},{ 3685, 4982},{ 4910, 4966},
- { 5929, 4928},{ 6785, 4900},{ 7529, 4880},{ 8198, 4863},
- { 8804, 4850},{ 9361, 4842},{ 9882, 4836},{10371, 4830},
- {10827, 4822},{11262, 4816},{11672, 4811},{12052, 4807},
- {12431, 4806},{12780, 4798},{13095, 4792},{13401, 4791}
- }
- },
- {
- /*Cb qi=44 INTRA*/
- {
- { 9, 2},{ 122, 371},{ 214, 741},{ 307, 1109},
- { 433, 1432},{ 576, 1704},{ 718, 1939},{ 855, 2152},
- { 991, 2340},{ 1141, 2497},{ 1298, 2632},{ 1463, 2749},
- { 1636, 2851},{ 1796, 2944},{ 1947, 3041},{ 2101, 3140},
- { 2260, 3219},{ 2392, 3297},{ 2527, 3366},{ 2693, 3424},
- { 2872, 3477},{ 3025, 3525},{ 3175, 3584},{ 3451, 3626}
- },
- /*Cb qi=44 INTER*/
- {
- { 111, 14},{ 110, 339},{ 109, 671},{ 120, 1040},
- { 139, 1410},{ 162, 1758},{ 197, 2084},{ 243, 2397},
- { 291, 2702},{ 342, 2992},{ 405, 3265},{ 484, 3521},
- { 584, 3760},{ 705, 3983},{ 855, 4185},{ 1048, 4356},
- { 1274, 4500},{ 1531, 4617},{ 1816, 4707},{ 2111, 4783},
- { 2409, 4846},{ 2720, 4901},{ 3044, 4957},{ 3391, 4985}
- }
- },
- {
- /*Cr qi=44 INTRA*/
- {
- { 17, 7},{ 128, 392},{ 219, 770},{ 329, 1135},
- { 465, 1442},{ 601, 1703},{ 734, 1935},{ 862, 2142},
- { 998, 2325},{ 1147, 2482},{ 1321, 2606},{ 1496, 2710},
- { 1649, 2813},{ 1809, 2908},{ 1984, 2977},{ 2143, 3032},
- { 2279, 3087},{ 2423, 3152},{ 2559, 3225},{ 2684, 3288},
- { 2866, 3351},{ 3025, 3426},{ 3161, 3492},{ 3372, 3500}
- },
- /*Cr qi=44 INTER*/
- {
- { 89, 0},{ 101, 352},{ 104, 683},{ 121, 1051},
- { 141, 1414},{ 163, 1757},{ 192, 2092},{ 231, 2415},
- { 278, 2720},{ 336, 3007},{ 412, 3273},{ 510, 3516},
- { 633, 3733},{ 769, 3936},{ 914, 4130},{ 1076, 4307},
- { 1256, 4472},{ 1469, 4617},{ 1723, 4732},{ 2012, 4822},
- { 2347, 4871},{ 2716, 4875},{ 3082, 4866},{ 3422, 4826}
+ { 53, 45},{ 50, 467},{ 45, 789},{ 76, 1150},
+ { 92, 1531},{ 107, 1877},{ 125, 2219},{ 147, 2561},
+ { 176, 2893},{ 206, 3209},{ 231, 3514},{ 260, 3808},
+ { 298, 4085},{ 350, 4344},{ 411, 4587},{ 475, 4814},
+ { 532, 5037},{ 587, 5261},{ 647, 5480},{ 707, 5694},
+ { 793, 5900},{ 891, 6093},{ 1017, 6292},{ 1205, 6307}
}
}
},
@@ -2848,557 +341,61 @@ oc_mode_rd OC_MODE_RD[64][3][2][OC_SAD_BINS]={
{
/*Y' qi=45 INTRA*/
{
- { 119, 78},{ 610, 1226},{ 1271, 1965},{ 2026, 2319},
- { 2768, 2550},{ 3556, 2757},{ 4369, 2938},{ 5157, 3076},
- { 5901, 3182},{ 6598, 3268},{ 7253, 3326},{ 7844, 3343},
- { 8392, 3356},{ 8922, 3386},{ 9453, 3433},{ 9973, 3474},
- {10457, 3503},{10929, 3530},{11351, 3543},{11709, 3541},
- {12068, 3537},{12434, 3547},{12805, 3555},{13268, 3563}
+ { 47, 170},{ 955, 1217},{ 1713, 2014},{ 3050, 2094},
+ { 3954, 2179},{ 4801, 2357},{ 5629, 2494},{ 6313, 2614},
+ { 6962, 2716},{ 7566, 2820},{ 8138, 2886},{ 8613, 2949},
+ { 9097, 3031},{ 9574, 3044},{10053, 3142},{10514, 3134},
+ {10897, 3241},{11397, 3275},{11775, 3297},{12200, 3350},
+ {12527, 3350},{12959, 3393},{13246, 3401},{13573, 3397}
},
/*Y' qi=45 INTER*/
{
- { 77, -20},{ 146, 1330},{ 342, 2566},{ 699, 3604},
- { 1439, 4332},{ 2669, 4672},{ 4075, 4727},{ 5318, 4679},
- { 6345, 4630},{ 7209, 4595},{ 7963, 4570},{ 8644, 4551},
- { 9262, 4535},{ 9831, 4525},{10370, 4515},{10872, 4506},
- {11334, 4500},{11783, 4492},{12219, 4489},{12617, 4483},
- {12995, 4477},{13350, 4472},{13674, 4466},{13968, 4468}
+ { 53, 73},{ 175, 1343},{ 649, 2439},{ 1339, 3250},
+ { 2297, 3837},{ 3395, 4203},{ 4438, 4400},{ 5401, 4529},
+ { 6222, 4588},{ 7018, 4564},{ 7713, 4532},{ 8378, 4464},
+ { 8959, 4414},{ 9464, 4364},{ 9980, 4315},{10401, 4291},
+ {10805, 4260},{11172, 4260},{11501, 4231},{11798, 4248},
+ {12082, 4254},{12381, 4262},{12572, 4285},{12877, 4289}
}
},
{
/*Cb qi=45 INTRA*/
{
- { 9, 2},{ 122, 370},{ 219, 735},{ 324, 1096},
- { 465, 1414},{ 619, 1679},{ 771, 1905},{ 920, 2103},
- { 1070, 2276},{ 1236, 2419},{ 1410, 2539},{ 1595, 2644},
- { 1784, 2736},{ 1949, 2831},{ 2104, 2931},{ 2275, 3021},
- { 2443, 3092},{ 2586, 3166},{ 2735, 3234},{ 2904, 3288},
- { 3093, 3338},{ 3262, 3382},{ 3419, 3427},{ 3708, 3456}
+ { 112, -14},{ 173, 495},{ 260, 827},{ 355, 1122},
+ { 451, 1420},{ 579, 1695},{ 697, 1934},{ 917, 2101},
+ { 1104, 2244},{ 1266, 2381},{ 1417, 2520},{ 1609, 2611},
+ { 1801, 2689},{ 1973, 2764},{ 2108, 2864},{ 2298, 2948},
+ { 2452, 3008},{ 2588, 3080},{ 2732, 3161},{ 2888, 3203},
+ { 3052, 3266},{ 3240, 3294},{ 3342, 3351},{ 3467, 3373}
},
/*Cb qi=45 INTER*/
{
- { 103, 0},{ 109, 339},{ 109, 670},{ 119, 1039},
- { 137, 1408},{ 162, 1754},{ 199, 2076},{ 248, 2386},
- { 301, 2684},{ 360, 2967},{ 433, 3234},{ 525, 3481},
- { 640, 3713},{ 780, 3924},{ 956, 4110},{ 1176, 4266},
- { 1438, 4390},{ 1736, 4481},{ 2057, 4553},{ 2385, 4613},
- { 2718, 4656},{ 3056, 4698},{ 3416, 4733},{ 3799, 4755}
+ { 41, -49},{ 52, 385},{ 87, 743},{ 110, 1102},
+ { 135, 1453},{ 162, 1788},{ 207, 2096},{ 272, 2391},
+ { 330, 2677},{ 392, 2950},{ 464, 3205},{ 556, 3442},
+ { 674, 3656},{ 827, 3847},{ 1030, 4006},{ 1275, 4132},
+ { 1544, 4234},{ 1809, 4317},{ 2089, 4408},{ 2377, 4456},
+ { 2647, 4532},{ 2919, 4595},{ 3256, 4659},{ 3465, 4657}
}
},
{
/*Cr qi=45 INTRA*/
{
- { 16, 7},{ 128, 391},{ 225, 763},{ 350, 1120},
- { 500, 1420},{ 649, 1673},{ 792, 1893},{ 929, 2089},
- { 1084, 2257},{ 1250, 2401},{ 1440, 2518},{ 1633, 2614},
- { 1799, 2708},{ 1968, 2798},{ 2151, 2863},{ 2314, 2914},
- { 2453, 2968},{ 2611, 3025},{ 2759, 3095},{ 2887, 3160},
- { 3082, 3210},{ 3259, 3278},{ 3403, 3342},{ 3593, 3354}
+ { 99, -14},{ 164, 493},{ 247, 832},{ 358, 1123},
+ { 468, 1416},{ 599, 1680},{ 795, 1886},{ 958, 2063},
+ { 1133, 2211},{ 1300, 2345},{ 1480, 2461},{ 1664, 2554},
+ { 1807, 2656},{ 1995, 2742},{ 2146, 2799},{ 2331, 2856},
+ { 2440, 2894},{ 2592, 2996},{ 2751, 3033},{ 2865, 3112},
+ { 3073, 3162},{ 3210, 3208},{ 3330, 3306},{ 3454, 3332}
},
/*Cr qi=45 INTER*/
{
- { 92, 0},{ 101, 352},{ 103, 682},{ 120, 1049},
- { 140, 1412},{ 163, 1752},{ 193, 2083},{ 234, 2402},
- { 287, 2702},{ 353, 2983},{ 442, 3240},{ 557, 3471},
- { 694, 3680},{ 846, 3873},{ 1014, 4056},{ 1200, 4224},
- { 1414, 4369},{ 1664, 4495},{ 1946, 4595},{ 2278, 4654},
- { 2654, 4673},{ 3047, 4658},{ 3438, 4627},{ 3825, 4585}
- }
- }
- },
- {
- {
- /*Y' qi=46 INTRA*/
- {
- { 119, 78},{ 610, 1227},{ 1277, 1960},{ 2043, 2309},
- { 2805, 2529},{ 3618, 2719},{ 4452, 2887},{ 5257, 3016},
- { 6017, 3115},{ 6727, 3195},{ 7392, 3248},{ 7984, 3267},
- { 8528, 3281},{ 9059, 3310},{ 9593, 3354},{10119, 3395},
- {10599, 3425},{11064, 3450},{11493, 3464},{11850, 3466},
- {12207, 3462},{12578, 3471},{12948, 3480},{13407, 3487}
- },
- /*Y' qi=46 INTER*/
- {
- { 74, -14},{ 149, 1326},{ 382, 2538},{ 807, 3541},
- { 1670, 4211},{ 3000, 4499},{ 4416, 4533},{ 5628, 4490},
- { 6628, 4453},{ 7479, 4425},{ 8228, 4406},{ 8902, 4393},
- { 9521, 4380},{10090, 4371},{10623, 4364},{11124, 4356},
- {11586, 4351},{12043, 4344},{12476, 4341},{12863, 4340},
- {13244, 4337},{13610, 4329},{13936, 4324},{14246, 4329}
- }
- },
- {
- /*Cb qi=46 INTRA*/
- {
- { 11, 2},{ 132, 371},{ 234, 737},{ 340, 1094},
- { 481, 1405},{ 637, 1667},{ 791, 1891},{ 944, 2084},
- { 1099, 2253},{ 1268, 2392},{ 1444, 2507},{ 1633, 2610},
- { 1825, 2700},{ 1990, 2794},{ 2147, 2895},{ 2321, 2984},
- { 2493, 3053},{ 2640, 3126},{ 2787, 3198},{ 2954, 3253},
- { 3146, 3297},{ 3313, 3344},{ 3473, 3393},{ 3757, 3434}
- },
- /*Cb qi=46 INTER*/
- {
- { 97, 0},{ 109, 339},{ 108, 669},{ 120, 1035},
- { 142, 1398},{ 173, 1737},{ 221, 2052},{ 281, 2353},
- { 345, 2646},{ 415, 2924},{ 504, 3183},{ 616, 3421},
- { 749, 3643},{ 914, 3842},{ 1123, 4012},{ 1379, 4150},
- { 1685, 4250},{ 2014, 4327},{ 2366, 4382},{ 2731, 4426},
- { 3083, 4470},{ 3445, 4490},{ 3805, 4511},{ 4146, 4539}
- }
- },
- {
- /*Cr qi=46 INTRA*/
- {
- { 19, 7},{ 137, 393},{ 237, 765},{ 364, 1116},
- { 516, 1411},{ 665, 1662},{ 809, 1880},{ 951, 2072},
- { 1109, 2236},{ 1278, 2378},{ 1474, 2491},{ 1669, 2584},
- { 1835, 2678},{ 2014, 2766},{ 2203, 2828},{ 2366, 2880},
- { 2506, 2933},{ 2661, 2988},{ 2810, 3053},{ 2941, 3116},
- { 3131, 3175},{ 3310, 3243},{ 3461, 3303},{ 3656, 3321}
- },
- /*Cr qi=46 INTER*/
- {
- { 91, 1},{ 103, 351},{ 104, 681},{ 121, 1046},
- { 144, 1401},{ 173, 1736},{ 213, 2060},{ 265, 2373},
- { 330, 2666},{ 410, 2938},{ 517, 3185},{ 655, 3404},
- { 815, 3601},{ 989, 3784},{ 1183, 3951},{ 1400, 4104},
- { 1649, 4241},{ 1933, 4352},{ 2261, 4427},{ 2646, 4458},
- { 3057, 4446},{ 3453, 4418},{ 3820, 4385},{ 4171, 4352}
- }
- }
- },
- {
- {
- /*Y' qi=47 INTRA*/
- {
- { 117, 83},{ 670, 1205},{ 1408, 1904},{ 2239, 2219},
- { 3049, 2414},{ 3905, 2584},{ 4775, 2734},{ 5610, 2852},
- { 6393, 2944},{ 7121, 3017},{ 7804, 3066},{ 8407, 3081},
- { 8957, 3093},{ 9498, 3119},{10043, 3160},{10582, 3199},
- {11083, 3226},{11561, 3250},{11993, 3263},{12352, 3264},
- {12711, 3259},{13092, 3266},{13463, 3271},{13918, 3275}
- },
- /*Y' qi=47 INTER*/
- {
- { 74, -11},{ 148, 1325},{ 404, 2518},{ 910, 3478},
- { 1916, 4080},{ 3369, 4298},{ 4823, 4292},{ 6035, 4238},
- { 7037, 4197},{ 7894, 4168},{ 8650, 4146},{ 9337, 4129},
- { 9968, 4116},{10549, 4105},{11096, 4096},{11605, 4089},
- {12081, 4083},{12547, 4076},{12990, 4070},{13399, 4070},
- {13776, 4065},{14133, 4059},{14486, 4057},{14842, 4053}
- }
- },
- {
- /*Cb qi=47 INTRA*/
- {
- { 11, 2},{ 133, 370},{ 242, 731},{ 367, 1077},
- { 524, 1378},{ 692, 1630},{ 860, 1844},{ 1028, 2024},
- { 1203, 2178},{ 1393, 2305},{ 1582, 2413},{ 1787, 2507},
- { 1992, 2590},{ 2175, 2676},{ 2351, 2767},{ 2534, 2851},
- { 2707, 2923},{ 2862, 2994},{ 3021, 3060},{ 3193, 3111},
- { 3396, 3147},{ 3573, 3184},{ 3752, 3220},{ 4038, 3255}
- },
- /*Cb qi=47 INTER*/
- {
- { 101, 0},{ 107, 339},{ 108, 667},{ 120, 1033},
- { 142, 1394},{ 175, 1729},{ 227, 2040},{ 295, 2335},
- { 369, 2619},{ 452, 2888},{ 556, 3138},{ 686, 3368},
- { 850, 3574},{ 1050, 3758},{ 1299, 3910},{ 1605, 4024},
- { 1950, 4104},{ 2317, 4163},{ 2689, 4210},{ 3077, 4239},
- { 3466, 4258},{ 3840, 4278},{ 4205, 4298},{ 4515, 4340}
- }
- },
- {
- /*Cr qi=47 INTRA*/
- {
- { 19, 7},{ 138, 392},{ 248, 758},{ 396, 1094},
- { 563, 1378},{ 723, 1621},{ 881, 1829},{ 1037, 2011},
- { 1214, 2165},{ 1410, 2290},{ 1623, 2393},{ 1834, 2480},
- { 2016, 2564},{ 2203, 2647},{ 2405, 2707},{ 2569, 2757},
- { 2709, 2810},{ 2871, 2860},{ 3027, 2924},{ 3178, 2980},
- { 3375, 3034},{ 3563, 3097},{ 3724, 3151},{ 3952, 3153}
- },
- /*Cr qi=47 INTER*/
- {
- { 91, 1},{ 100, 351},{ 102, 681},{ 120, 1043},
- { 144, 1397},{ 175, 1729},{ 219, 2049},{ 277, 2356},
- { 353, 2640},{ 451, 2902},{ 579, 3136},{ 739, 3342},
- { 926, 3525},{ 1125, 3698},{ 1343, 3859},{ 1595, 3998},
- { 1881, 4113},{ 2208, 4205},{ 2589, 4253},{ 3014, 4250},
- { 3444, 4220},{ 3838, 4183},{ 4196, 4147},{ 4521, 4116}
- }
- }
- },
- {
- {
- /*Y' qi=48 INTRA*/
- {
- { 107, 87},{ 681, 1200},{ 1456, 1883},{ 2306, 2193},
- { 3122, 2386},{ 3984, 2548},{ 4862, 2693},{ 5704, 2808},
- { 6495, 2899},{ 7232, 2970},{ 7915, 3018},{ 8524, 3034},
- { 9085, 3043},{ 9635, 3068},{10192, 3108},{10735, 3145},
- {11237, 3171},{11719, 3194},{12153, 3207},{12516, 3206},
- {12888, 3202},{13266, 3210},{13637, 3218},{14101, 3219}
- },
- /*Y' qi=48 INTER*/
- {
- { 83, -18},{ 147, 1328},{ 398, 2519},{ 923, 3468},
- { 1979, 4047},{ 3472, 4246},{ 4936, 4232},{ 6148, 4178},
- { 7150, 4139},{ 8007, 4111},{ 8765, 4091},{ 9458, 4076},
- {10090, 4063},{10676, 4054},{11226, 4045},{11742, 4038},
- {12223, 4033},{12686, 4029},{13127, 4022},{13527, 4015},
- {13915, 4012},{14277, 4007},{14619, 4004},{14966, 4001}
- }
- },
- {
- /*Cb qi=48 INTRA*/
- {
- { 11, 2},{ 134, 369},{ 245, 730},{ 373, 1075},
- { 531, 1374},{ 698, 1625},{ 865, 1839},{ 1033, 2019},
- { 1207, 2173},{ 1397, 2300},{ 1588, 2408},{ 1795, 2501},
- { 2003, 2581},{ 2187, 2666},{ 2362, 2757},{ 2548, 2841},
- { 2719, 2912},{ 2876, 2983},{ 3034, 3047},{ 3209, 3097},
- { 3409, 3137},{ 3589, 3178},{ 3762, 3216},{ 4004, 3252}
- },
- /*Cb qi=48 INTER*/
- {
- { 113, 26},{ 112, 344},{ 111, 668},{ 120, 1032},
- { 141, 1392},{ 173, 1727},{ 224, 2036},{ 290, 2330},
- { 363, 2612},{ 447, 2880},{ 551, 3130},{ 685, 3358},
- { 852, 3563},{ 1061, 3742},{ 1332, 3884},{ 1654, 3993},
- { 2011, 4068},{ 2394, 4120},{ 2782, 4160},{ 3172, 4186},
- { 3557, 4209},{ 3932, 4228},{ 4306, 4237},{ 4675, 4236}
- }
- },
- {
- /*Cr qi=48 INTRA*/
- {
- { 18, 7},{ 139, 389},{ 252, 755},{ 404, 1090},
- { 573, 1372},{ 732, 1615},{ 889, 1823},{ 1045, 2005},
- { 1222, 2159},{ 1417, 2285},{ 1631, 2387},{ 1843, 2474},
- { 2027, 2558},{ 2212, 2639},{ 2413, 2697},{ 2578, 2746},
- { 2720, 2798},{ 2887, 2852},{ 3040, 2913},{ 3181, 2970},
- { 3381, 3024},{ 3581, 3081},{ 3743, 3130},{ 3948, 3133}
- },
- /*Cr qi=48 INTER*/
- {
- { 89, 0},{ 106, 352},{ 105, 682},{ 120, 1044},
- { 144, 1395},{ 174, 1724},{ 215, 2044},{ 270, 2350},
- { 343, 2635},{ 441, 2895},{ 571, 3129},{ 735, 3334},
- { 926, 3518},{ 1139, 3684},{ 1371, 3836},{ 1628, 3977},
- { 1933, 4089},{ 2279, 4164},{ 2672, 4204},{ 3105, 4205},
- { 3533, 4176},{ 3931, 4135},{ 4290, 4089},{ 4624, 4057}
- }
- }
- },
- {
- {
- /*Y' qi=49 INTRA*/
- {
- { 120, 85},{ 706, 1194},{ 1485, 1875},{ 2348, 2187},
- { 3190, 2372},{ 4076, 2521},{ 4967, 2658},{ 5819, 2771},
- { 6611, 2861},{ 7345, 2936},{ 8026, 2990},{ 8626, 3013},
- { 9182, 3030},{ 9723, 3059},{10266, 3100},{10802, 3143},
- {11293, 3179},{11768, 3206},{12201, 3221},{12556, 3225},
- {12914, 3226},{13281, 3237},{13639, 3247},{14089, 3257}
- },
- /*Y' qi=49 INTER*/
- {
- { 72, -11},{ 155, 1320},{ 458, 2485},{ 1090, 3386},
- { 2284, 3907},{ 3835, 4075},{ 5272, 4064},{ 6449, 4026},
- { 7426, 4003},{ 8267, 3987},{ 9017, 3976},{ 9698, 3967},
- {10328, 3962},{10913, 3959},{11452, 3954},{11961, 3950},
- {12442, 3947},{12904, 3946},{13347, 3945},{13749, 3943},
- {14123, 3941},{14490, 3941},{14826, 3939},{15153, 3937}
- }
- },
- {
- /*Cb qi=49 INTRA*/
- {
- { 11, 2},{ 145, 369},{ 262, 729},{ 393, 1070},
- { 557, 1363},{ 731, 1607},{ 907, 1811},{ 1085, 1983},
- { 1268, 2130},{ 1465, 2251},{ 1658, 2359},{ 1868, 2454},
- { 2079, 2534},{ 2264, 2621},{ 2440, 2717},{ 2625, 2802},
- { 2792, 2878},{ 2945, 2954},{ 3106, 3021},{ 3277, 3075},
- { 3466, 3119},{ 3638, 3170},{ 3824, 3213},{ 4100, 3243}
- },
- /*Cb qi=49 INTER*/
- {
- { 98, -6},{ 113, 343},{ 110, 669},{ 122, 1029},
- { 149, 1380},{ 192, 1706},{ 258, 2007},{ 340, 2293},
- { 426, 2569},{ 525, 2831},{ 653, 3071},{ 814, 3287},
- { 1013, 3478},{ 1262, 3637},{ 1575, 3761},{ 1936, 3851},
- { 2328, 3910},{ 2741, 3949},{ 3163, 3970},{ 3559, 3994},
- { 3936, 4025},{ 4300, 4050},{ 4655, 4060},{ 4962, 4062}
- }
- },
- {
- /*Cr qi=49 INTRA*/
- {
- { 19, 7},{ 151, 389},{ 270, 753},{ 427, 1084},
- { 602, 1360},{ 767, 1595},{ 933, 1794},{ 1098, 1968},
- { 1285, 2115},{ 1489, 2237},{ 1699, 2342},{ 1912, 2435},
- { 2101, 2519},{ 2288, 2601},{ 2486, 2663},{ 2651, 2715},
- { 2799, 2769},{ 2958, 2825},{ 3106, 2890},{ 3257, 2948},
- { 3452, 3007},{ 3634, 3075},{ 3786, 3136},{ 3959, 3164}
- },
- /*Cr qi=49 INTER*/
- {
- { 85, 1},{ 103, 352},{ 104, 681},{ 121, 1039},
- { 152, 1382},{ 195, 1702},{ 248, 2015},{ 316, 2316},
- { 403, 2595},{ 520, 2847},{ 676, 3068},{ 870, 3258},
- { 1091, 3429},{ 1329, 3585},{ 1597, 3725},{ 1894, 3849},
- { 2242, 3940},{ 2656, 3984},{ 3098, 3992},{ 3531, 3981},
- { 3936, 3950},{ 4304, 3915},{ 4646, 3879},{ 4915, 3861}
- }
- }
- },
- {
- {
- /*Y' qi=50 INTRA*/
- {
- { 122, 89},{ 798, 1170},{ 1682, 1812},{ 2613, 2096},
- { 3501, 2260},{ 4430, 2388},{ 5352, 2510},{ 6228, 2613},
- { 7043, 2698},{ 7793, 2770},{ 8486, 2823},{ 9092, 2846},
- { 9652, 2865},{10210, 2895},{10773, 2936},{11315, 2979},
- {11817, 3014},{12297, 3041},{12734, 3057},{13097, 3064},
- {13443, 3067},{13813, 3078},{14190, 3088},{14646, 3103}
- },
- /*Y' qi=50 INTER*/
- {
- { 73, -11},{ 154, 1318},{ 501, 2457},{ 1281, 3291},
- { 2685, 3719},{ 4356, 3810},{ 5811, 3769},{ 6988, 3726},
- { 7976, 3700},{ 8835, 3682},{ 9606, 3669},{10307, 3659},
- {10953, 3652},{11556, 3645},{12115, 3643},{12641, 3640},
- {13138, 3636},{13613, 3634},{14068, 3629},{14488, 3627},
- {14876, 3625},{15237, 3621},{15585, 3623},{15922, 3629}
- }
- },
- {
- /*Cb qi=50 INTRA*/
- {
- { 11, 2},{ 148, 368},{ 278, 724},{ 431, 1052},
- { 613, 1334},{ 806, 1567},{ 1004, 1756},{ 1203, 1915},
- { 1405, 2051},{ 1621, 2163},{ 1833, 2262},{ 2059, 2347},
- { 2280, 2424},{ 2476, 2512},{ 2670, 2598},{ 2864, 2679},
- { 3037, 2754},{ 3201, 2826},{ 3376, 2887},{ 3562, 2936},
- { 3756, 2976},{ 3932, 3022},{ 4117, 3065},{ 4385, 3094}
- },
- /*Cb qi=50 INTER*/
- {
- { 92, -3},{ 112, 343},{ 109, 669},{ 121, 1027},
- { 149, 1375},{ 196, 1697},{ 270, 1992},{ 366, 2267},
- { 471, 2532},{ 594, 2782},{ 747, 3011},{ 942, 3212},
- { 1189, 3384},{ 1497, 3521},{ 1875, 3613},{ 2297, 3673},
- { 2739, 3710},{ 3195, 3725},{ 3644, 3737},{ 4057, 3751},
- { 4445, 3763},{ 4841, 3769},{ 5211, 3779},{ 5568, 3769}
- }
- },
- {
- /*Cr qi=50 INTRA*/
- {
- { 19, 7},{ 155, 388},{ 290, 744},{ 474, 1060},
- { 666, 1324},{ 847, 1549},{ 1033, 1737},{ 1219, 1898},
- { 1428, 2034},{ 1653, 2147},{ 1885, 2245},{ 2115, 2329},
- { 2316, 2410},{ 2517, 2486},{ 2730, 2539},{ 2901, 2586},
- { 3042, 2638},{ 3199, 2693},{ 3366, 2755},{ 3534, 2805},
- { 3738, 2858},{ 3934, 2916},{ 4079, 2975},{ 4257, 2992}
- },
- /*Cr qi=50 INTER*/
- {
- { 87, 1},{ 102, 353},{ 103, 680},{ 121, 1036},
- { 153, 1377},{ 199, 1694},{ 260, 1999},{ 339, 2291},
- { 446, 2559},{ 590, 2797},{ 780, 3003},{ 1010, 3176},
- { 1267, 3331},{ 1547, 3474},{ 1874, 3594},{ 2245, 3688},
- { 2666, 3742},{ 3130, 3758},{ 3594, 3748},{ 4028, 3711},
- { 4415, 3674},{ 4771, 3641},{ 5122, 3605},{ 5482, 3569}
- }
- }
- },
- {
- {
- /*Y' qi=51 INTRA*/
- {
- { 115, 93},{ 819, 1164},{ 1739, 1806},{ 2695, 2101},
- { 3612, 2257},{ 4552, 2374},{ 5479, 2490},{ 6352, 2593},
- { 7158, 2683},{ 7898, 2761},{ 8580, 2823},{ 9177, 2854},
- { 9728, 2880},{10268, 2917},{10816, 2966},{11350, 3016},
- {11834, 3058},{12311, 3089},{12741, 3109},{13092, 3119},
- {13434, 3126},{13791, 3142},{14156, 3155},{14590, 3171}
- },
- /*Y' qi=51 INTER*/
- {
- { 58, 0},{ 171, 1307},{ 610, 2407},{ 1563, 3175},
- { 3116, 3545},{ 4789, 3624},{ 6185, 3602},{ 7320, 3583},
- { 8282, 3574},{ 9124, 3569},{ 9878, 3567},{10569, 3565},
- {11207, 3563},{11801, 3564},{12359, 3566},{12884, 3567},
- {13373, 3568},{13841, 3567},{14289, 3566},{14699, 3568},
- {15086, 3568},{15446, 3566},{15788, 3564},{16103, 3568}
- }
- },
- {
- /*Cb qi=51 INTRA*/
- {
- { 14, 3},{ 161, 369},{ 297, 722},{ 454, 1047},
- { 639, 1325},{ 833, 1554},{ 1033, 1742},{ 1236, 1897},
- { 1440, 2032},{ 1653, 2148},{ 1860, 2253},{ 2077, 2347},
- { 2288, 2432},{ 2476, 2525},{ 2661, 2621},{ 2841, 2714},
- { 3010, 2797},{ 3170, 2876},{ 3333, 2945},{ 3510, 3000},
- { 3696, 3054},{ 3865, 3114},{ 4046, 3164},{ 4317, 3200}
- },
- /*Cb qi=51 INTER*/
- {
- { 88, -11},{ 109, 341},{ 109, 668},{ 126, 1019},
- { 168, 1358},{ 233, 1670},{ 329, 1955},{ 451, 2219},
- { 584, 2472},{ 736, 2711},{ 931, 2923},{ 1179, 3104},
- { 1480, 3254},{ 1846, 3368},{ 2265, 3448},{ 2714, 3501},
- { 3180, 3524},{ 3638, 3529},{ 4074, 3543},{ 4485, 3560},
- { 4868, 3571},{ 5238, 3581},{ 5597, 3594},{ 5953, 3591}
- }
- },
- {
- /*Cr qi=51 INTRA*/
- {
- { 24, 7},{ 168, 388},{ 309, 742},{ 496, 1054},
- { 688, 1316},{ 873, 1538},{ 1063, 1723},{ 1252, 1882},
- { 1460, 2018},{ 1682, 2134},{ 1907, 2238},{ 2125, 2332},
- { 2317, 2422},{ 2507, 2510},{ 2705, 2575},{ 2869, 2630},
- { 3015, 2684},{ 3178, 2744},{ 3329, 2815},{ 3477, 2878},
- { 3667, 2945},{ 3848, 3016},{ 3997, 3082},{ 4174, 3121}
- },
- /*Cr qi=51 INTER*/
- {
- { 83, -2},{ 102, 351},{ 102, 680},{ 126, 1029},
- { 172, 1359},{ 238, 1665},{ 321, 1962},{ 422, 2246},
- { 552, 2505},{ 733, 2728},{ 970, 2912},{ 1247, 3069},
- { 1552, 3209},{ 1876, 3338},{ 2251, 3440},{ 2692, 3502},
- { 3161, 3529},{ 3637, 3525},{ 4084, 3509},{ 4487, 3479},
- { 4850, 3444},{ 5181, 3419},{ 5507, 3406},{ 5786, 3398}
- }
- }
- },
- {
- {
- /*Y' qi=52 INTRA*/
- {
- { 117, 93},{ 814, 1168},{ 1729, 1822},{ 2706, 2119},
- { 3655, 2262},{ 4604, 2374},{ 5528, 2490},{ 6394, 2596},
- { 7189, 2691},{ 7921, 2777},{ 8596, 2846},{ 9184, 2885},
- { 9728, 2918},{10260, 2961},{10796, 3014},{11316, 3069},
- {11793, 3115},{12267, 3150},{12692, 3172},{13037, 3185},
- {13367, 3196},{13717, 3214},{14087, 3227},{14521, 3249}
- },
- /*Y' qi=52 INTER*/
- {
- { 52, 0},{ 169, 1308},{ 668, 2382},{ 1735, 3112},
- { 3384, 3451},{ 5077, 3519},{ 6461, 3506},{ 7587, 3496},
- { 8545, 3494},{ 9384, 3494},{10142, 3498},{10838, 3501},
- {11475, 3503},{12078, 3508},{12640, 3511},{13162, 3513},
- {13654, 3517},{14130, 3521},{14576, 3522},{14980, 3523},
- {15369, 3523},{15737, 3522},{16071, 3521},{16382, 3516}
- }
- },
- {
- /*Cb qi=52 INTRA*/
- {
- { 14, 3},{ 163, 369},{ 299, 722},{ 457, 1044},
- { 645, 1319},{ 843, 1545},{ 1050, 1728},{ 1261, 1879},
- { 1468, 2013},{ 1678, 2132},{ 1883, 2240},{ 2093, 2338},
- { 2301, 2428},{ 2488, 2523},{ 2667, 2619},{ 2843, 2718},
- { 3010, 2805},{ 3163, 2887},{ 3323, 2963},{ 3490, 3028},
- { 3665, 3087},{ 3841, 3145},{ 4011, 3197},{ 4289, 3230}
- },
- /*Cb qi=52 INTER*/
- {
- { 98, -7},{ 109, 342},{ 109, 668},{ 126, 1018},
- { 170, 1355},{ 242, 1663},{ 352, 1941},{ 490, 2195},
- { 642, 2439},{ 823, 2666},{ 1052, 2868},{ 1333, 3039},
- { 1670, 3178},{ 2074, 3280},{ 2524, 3348},{ 2996, 3390},
- { 3469, 3410},{ 3923, 3420},{ 4355, 3434},{ 4771, 3451},
- { 5166, 3468},{ 5532, 3483},{ 5885, 3499},{ 6263, 3501}
- }
- },
- {
- /*Cr qi=52 INTRA*/
- {
- { 25, 7},{ 170, 388},{ 312, 741},{ 500, 1051},
- { 694, 1310},{ 883, 1529},{ 1082, 1709},{ 1280, 1864},
- { 1491, 1998},{ 1710, 2117},{ 1932, 2225},{ 2143, 2324},
- { 2328, 2418},{ 2516, 2506},{ 2708, 2578},{ 2870, 2637},
- { 3017, 2693},{ 3170, 2758},{ 3312, 2835},{ 3455, 2901},
- { 3644, 2972},{ 3827, 3049},{ 3968, 3121},{ 4115, 3166}
- },
- /*Cr qi=52 INTER*/
- {
- { 86, -2},{ 101, 352},{ 100, 680},{ 126, 1028},
- { 175, 1356},{ 247, 1657},{ 341, 1948},{ 458, 2224},
- { 615, 2471},{ 828, 2681},{ 1091, 2857},{ 1395, 3008},
- { 1732, 3140},{ 2095, 3257},{ 2502, 3348},{ 2968, 3402},
- { 3457, 3420},{ 3926, 3413},{ 4360, 3388},{ 4759, 3357},
- { 5128, 3329},{ 5449, 3306},{ 5741, 3295},{ 6071, 3296}
- }
- }
- },
- {
- {
- /*Y' qi=53 INTRA*/
- {
- { 138, 93},{ 850, 1161},{ 1773, 1810},{ 2763, 2103},
- { 3722, 2245},{ 4675, 2360},{ 5600, 2483},{ 6464, 2597},
- { 7255, 2700},{ 7982, 2792},{ 8652, 2867},{ 9237, 2913},
- { 9775, 2950},{10302, 2998},{10834, 3058},{11347, 3121},
- {11826, 3169},{12299, 3207},{12713, 3235},{13054, 3250},
- {13387, 3265},{13744, 3286},{14110, 3302},{14515, 3323}
- },
- /*Y' qi=53 INTER*/
- {
- { 52, 2},{ 169, 1308},{ 680, 2377},{ 1763, 3103},
- { 3410, 3450},{ 5094, 3531},{ 6469, 3526},{ 7590, 3525},
- { 8547, 3530},{ 9385, 3534},{10139, 3540},{10835, 3548},
- {11479, 3553},{12075, 3559},{12634, 3565},{13159, 3570},
- {13650, 3573},{14124, 3576},{14575, 3580},{14993, 3583},
- {15375, 3584},{15744, 3584},{16091, 3583},{16421, 3586}
- }
- },
- {
- /*Cb qi=53 INTRA*/
- {
- { 14, 3},{ 167, 367},{ 317, 717},{ 492, 1033},
- { 687, 1306},{ 887, 1531},{ 1095, 1715},{ 1309, 1866},
- { 1517, 2000},{ 1729, 2119},{ 1932, 2227},{ 2146, 2325},
- { 2358, 2414},{ 2544, 2511},{ 2724, 2611},{ 2902, 2711},
- { 3070, 2800},{ 3227, 2878},{ 3381, 2954},{ 3548, 3021},
- { 3724, 3077},{ 3888, 3140},{ 4065, 3196},{ 4359, 3225}
- },
- /*Cb qi=53 INTER*/
- {
- { 93, -8},{ 110, 342},{ 108, 668},{ 125, 1018},
- { 170, 1355},{ 242, 1663},{ 353, 1939},{ 494, 2192},
- { 651, 2433},{ 838, 2658},{ 1076, 2856},{ 1368, 3022},
- { 1716, 3158},{ 2123, 3260},{ 2575, 3330},{ 3042, 3373},
- { 3507, 3396},{ 3962, 3413},{ 4394, 3430},{ 4797, 3452},
- { 5169, 3476},{ 5547, 3496},{ 5914, 3510},{ 6235, 3525}
- }
- },
- {
- /*Cr qi=53 INTRA*/
- {
- { 25, 7},{ 175, 386},{ 335, 734},{ 541, 1037},
- { 737, 1296},{ 926, 1516},{ 1125, 1696},{ 1324, 1851},
- { 1540, 1984},{ 1763, 2102},{ 1989, 2210},{ 2202, 2310},
- { 2386, 2404},{ 2572, 2495},{ 2768, 2569},{ 2929, 2627},
- { 3071, 2684},{ 3231, 2749},{ 3374, 2825},{ 3514, 2894},
- { 3703, 2963},{ 3882, 3040},{ 4024, 3111},{ 4190, 3150}
- },
- /*Cr qi=53 INTER*/
- {
- { 87, -1},{ 99, 352},{ 100, 680},{ 125, 1027},
- { 175, 1355},{ 249, 1657},{ 343, 1946},{ 462, 2220},
- { 624, 2465},{ 844, 2671},{ 1122, 2841},{ 1435, 2989},
- { 1768, 3125},{ 2134, 3243},{ 2545, 3334},{ 3002, 3393},
- { 3490, 3412},{ 3965, 3405},{ 4401, 3384},{ 4797, 3359},
- { 5156, 3328},{ 5482, 3297},{ 5800, 3292},{ 6135, 3293}
+ { 39, -33},{ 48, 403},{ 86, 744},{ 110, 1101},
+ { 134, 1461},{ 165, 1779},{ 205, 2095},{ 259, 2401},
+ { 318, 2686},{ 386, 2958},{ 481, 3204},{ 610, 3415},
+ { 753, 3603},{ 908, 3780},{ 1055, 3959},{ 1220, 4132},
+ { 1422, 4281},{ 1656, 4419},{ 1939, 4512},{ 2259, 4574},
+ { 2593, 4593},{ 2950, 4569},{ 3339, 4505},{ 3542, 4497}
}
}
},
@@ -3406,557 +403,563 @@ oc_mode_rd OC_MODE_RD[64][3][2][OC_SAD_BINS]={
{
/*Y' qi=54 INTRA*/
{
- { 184, 94},{ 902, 1151},{ 1876, 1776},{ 2881, 2057},
- { 3832, 2200},{ 4785, 2315},{ 5709, 2442},{ 6570, 2562},
- { 7362, 2672},{ 8092, 2771},{ 8760, 2852},{ 9337, 2901},
- { 9874, 2943},{10402, 2995},{10928, 3059},{11443, 3126},
- {11926, 3178},{12396, 3220},{12805, 3251},{13139, 3266},
- {13466, 3280},{13822, 3304},{14184, 3322},{14585, 3342}
+ { 339, 30},{ 785, 1251},{ 2395, 1971},{ 4075, 2063},
+ { 4924, 2135},{ 5806, 2270},{ 6604, 2372},{ 7224, 2497},
+ { 7879, 2608},{ 8400, 2729},{ 8951, 2829},{ 9379, 2864},
+ { 9782, 2955},{10230, 3020},{10704, 3132},{11264, 3272},
+ {11618, 3284},{12034, 3394},{12500, 3482},{12767, 3484},
+ {13162, 3580},{13552, 3565},{13997, 3732},{14320, 3715}
},
/*Y' qi=54 INTER*/
{
- { 60, 5},{ 169, 1308},{ 683, 2375},{ 1791, 3090},
- { 3478, 3412},{ 5184, 3470},{ 6568, 3455},{ 7697, 3446},
- { 8659, 3446},{ 9503, 3447},{10266, 3450},{10971, 3454},
- {11619, 3458},{12223, 3462},{12789, 3467},{13315, 3471},
- {13811, 3475},{14291, 3479},{14743, 3479},{15148, 3481},
- {15535, 3483},{15913, 3481},{16252, 3479},{16569, 3472}
+ { 65, 95},{ 269, 1312},{ 1152, 2242},{ 2336, 2863},
+ { 3728, 3239},{ 4944, 3439},{ 6034, 3543},{ 7064, 3580},
+ { 7991, 3586},{ 8849, 3568},{ 9605, 3561},{10306, 3550},
+ {10919, 3544},{11466, 3530},{11972, 3528},{12401, 3536},
+ {12818, 3511},{13185, 3522},{13523, 3505},{13827, 3505},
+ {14114, 3522},{14395, 3521},{14625, 3533},{14909, 3532}
}
},
{
/*Cb qi=54 INTRA*/
{
- { 13, 2},{ 165, 367},{ 318, 715},{ 498, 1030},
- { 698, 1301},{ 906, 1523},{ 1121, 1703},{ 1336, 1853},
- { 1549, 1984},{ 1765, 2100},{ 1974, 2207},{ 2192, 2306},
- { 2402, 2396},{ 2587, 2493},{ 2773, 2591},{ 2953, 2691},
- { 3119, 2778},{ 3277, 2858},{ 3430, 2940},{ 3603, 3004},
- { 3788, 3059},{ 3950, 3121},{ 4128, 3173},{ 4398, 3215}
+ { 148, -3},{ 218, 480},{ 351, 787},{ 437, 1069},
+ { 550, 1350},{ 730, 1592},{ 931, 1784},{ 1243, 1884},
+ { 1499, 1984},{ 1680, 2115},{ 1864, 2244},{ 2062, 2334},
+ { 2278, 2407},{ 2442, 2496},{ 2602, 2603},{ 2783, 2686},
+ { 2928, 2771},{ 3073, 2856},{ 3207, 2938},{ 3368, 2998},
+ { 3516, 3077},{ 3699, 3122},{ 3818, 3202},{ 3939, 3230}
},
/*Cb qi=54 INTER*/
{
- { 100, -3},{ 109, 343},{ 107, 668},{ 125, 1018},
- { 169, 1354},{ 241, 1662},{ 353, 1938},{ 496, 2190},
- { 655, 2431},{ 843, 2655},{ 1082, 2851},{ 1381, 3015},
- { 1739, 3146},{ 2154, 3243},{ 2610, 3310},{ 3094, 3344},
- { 3581, 3358},{ 4034, 3371},{ 4457, 3384},{ 4867, 3399},
- { 5255, 3413},{ 5630, 3425},{ 6003, 3440},{ 6346, 3440}
+ { 48, -11},{ 54, 407},{ 86, 743},{ 122, 1083},
+ { 176, 1400},{ 241, 1699},{ 347, 1968},{ 496, 2208},
+ { 664, 2431},{ 863, 2637},{ 1120, 2816},{ 1442, 2961},
+ { 1835, 3066},{ 2261, 3140},{ 2676, 3203},{ 3092, 3245},
+ { 3480, 3266},{ 3862, 3286},{ 4254, 3305},{ 4604, 3316},
+ { 4989, 3335},{ 5306, 3351},{ 5654, 3339},{ 5855, 3345}
}
},
{
/*Cr qi=54 INTRA*/
{
- { 23, 7},{ 174, 386},{ 338, 732},{ 549, 1034},
- { 751, 1289},{ 947, 1506},{ 1150, 1685},{ 1353, 1837},
- { 1572, 1969},{ 1800, 2087},{ 2031, 2192},{ 2248, 2291},
- { 2434, 2387},{ 2622, 2477},{ 2815, 2549},{ 2976, 2607},
- { 3126, 2663},{ 3286, 2727},{ 3427, 2807},{ 3569, 2877},
- { 3761, 2941},{ 3942, 3016},{ 4084, 3093},{ 4226, 3131}
+ { 137, 10},{ 212, 492},{ 315, 795},{ 470, 1061},
+ { 612, 1333},{ 821, 1539},{ 1105, 1680},{ 1335, 1811},
+ { 1566, 1927},{ 1773, 2038},{ 1973, 2153},{ 2148, 2259},
+ { 2311, 2352},{ 2474, 2460},{ 2647, 2516},{ 2810, 2607},
+ { 2928, 2638},{ 3085, 2742},{ 3232, 2815},{ 3348, 2899},
+ { 3533, 2993},{ 3679, 3029},{ 3803, 3138},{ 3925, 3170}
},
/*Cr qi=54 INTER*/
{
- { 88, -2},{ 99, 351},{ 100, 680},{ 125, 1027},
- { 175, 1354},{ 248, 1656},{ 343, 1945},{ 463, 2219},
- { 626, 2463},{ 850, 2668},{ 1128, 2837},{ 1445, 2983},
- { 1791, 3111},{ 2168, 3224},{ 2597, 3309},{ 3075, 3351},
- { 3560, 3364},{ 4029, 3356},{ 4464, 3335},{ 4858, 3307},
- { 5218, 3275},{ 5547, 3256},{ 5850, 3247},{ 6171, 3214}
+ { 46, 2},{ 47, 419},{ 87, 746},{ 125, 1083},
+ { 177, 1401},{ 249, 1687},{ 342, 1964},{ 453, 2226},
+ { 627, 2454},{ 869, 2641},{ 1152, 2800},{ 1455, 2942},
+ { 1776, 3077},{ 2135, 3187},{ 2524, 3287},{ 2984, 3325},
+ { 3425, 3344},{ 3881, 3328},{ 4313, 3274},{ 4701, 3218},
+ { 5027, 3171},{ 5299, 3130},{ 5597, 3107},{ 5791, 3120}
}
}
},
{
{
- /*Y' qi=55 INTRA*/
- {
- { 178, 95},{ 968, 1137},{ 2000, 1747},{ 3013, 2027},
- { 3966, 2173},{ 4920, 2294},{ 5842, 2427},{ 6702, 2553},
- { 7489, 2668},{ 8213, 2773},{ 8875, 2858},{ 9452, 2913},
- { 9986, 2959},{10504, 3016},{11023, 3085},{11530, 3157},
- {12011, 3213},{12480, 3257},{12882, 3291},{13214, 3310},
- {13542, 3325},{13890, 3350},{14248, 3371},{14671, 3398}
+ /*Y' qi=63 INTRA*/
+ {
+ { -86, 167},{ 2070, 1104},{ 5138, 1428},{ 7014, 1535},
+ { 8430, 1629},{ 9663, 1690},{10576, 1745},{11277, 1809},
+ {12003, 1869},{12663, 1925},{13258, 1983},{13701, 2016},
+ {14228, 2073},{14756, 2088},{15203, 2164},{15993, 2175},
+ {16378, 2256},{16917, 2240},{17361, 2332},{17782, 2312},
+ {18376, 2381},{18728, 2362},{19224, 2408},{19705, 2392}
},
- /*Y' qi=55 INTER*/
- {
- { 59, 5},{ 170, 1307},{ 725, 2358},{ 1886, 3058},
- { 3589, 3385},{ 5284, 3459},{ 6654, 3458},{ 7771, 3461},
- { 8727, 3470},{ 9564, 3478},{10322, 3488},{11019, 3497},
- {11658, 3505},{12258, 3513},{12819, 3520},{13344, 3527},
- {13840, 3533},{14314, 3537},{14755, 3541},{15161, 3544},
- {15552, 3548},{15916, 3548},{16257, 3548},{16576, 3540}
+ /*Y' qi=63 INTER*/
+ {
+ { -529, 154},{ 967, 1233},{ 4201, 1610},{ 6285, 1800},
+ { 8058, 1908},{ 9439, 1968},{10737, 1987},{11999, 1979},
+ {13003, 1972},{13854, 1963},{14584, 1965},{15217, 1955},
+ {15773, 1956},{16229, 1949},{16735, 1952},{17085, 1956},
+ {17508, 1956},{17821, 1961},{18191, 1961},{18465, 1982},
+ {18792, 1975},{19158, 1995},{19378, 2010},{19817, 2021}
}
},
{
- /*Cb qi=55 INTRA*/
- {
- { 13, 2},{ 167, 366},{ 322, 714},{ 508, 1026},
- { 716, 1292},{ 930, 1511},{ 1148, 1690},{ 1366, 1839},
- { 1578, 1972},{ 1793, 2090},{ 2001, 2199},{ 2217, 2300},
- { 2427, 2393},{ 2609, 2495},{ 2784, 2600},{ 2961, 2704},
- { 3121, 2797},{ 3268, 2884},{ 3423, 2965},{ 3590, 3032},
- { 3764, 3096},{ 3926, 3165},{ 4101, 3223},{ 4405, 3258}
+ /*Cb qi=63 INTRA*/
+ {
+ { 136, 4},{ 338, 438},{ 593, 730},{ 835, 974},
+ { 1168, 1188},{ 1602, 1345},{ 2004, 1467},{ 2465, 1505},
+ { 2799, 1574},{ 3091, 1669},{ 3384, 1758},{ 3673, 1817},
+ { 3950, 1861},{ 4190, 1924},{ 4444, 1993},{ 4701, 2051},
+ { 4915, 2123},{ 5119, 2166},{ 5329, 2231},{ 5576, 2259},
+ { 5793, 2310},{ 6001, 2334},{ 6198, 2384},{ 6344, 2401}
},
- /*Cb qi=55 INTER*/
- {
- { 90, -4},{ 109, 344},{ 107, 668},{ 126, 1017},
- { 172, 1351},{ 249, 1657},{ 370, 1928},{ 527, 2174},
- { 702, 2407},{ 909, 2624},{ 1170, 2814},{ 1493, 2970},
- { 1869, 3097},{ 2292, 3192},{ 2752, 3258},{ 3232, 3295},
- { 3709, 3314},{ 4156, 3335},{ 4592, 3355},{ 5004, 3373},
- { 5377, 3389},{ 5737, 3411},{ 6092, 3432},{ 6473, 3423}
+ /*Cb qi=63 INTER*/
+ {
+ { 49, 4},{ 51, 403},{ 98, 729},{ 185, 1034},
+ { 352, 1304},{ 622, 1533},{ 1068, 1696},{ 1604, 1821},
+ { 2203, 1924},{ 2890, 1988},{ 3622, 2017},{ 4359, 2019},
+ { 5025, 2005},{ 5586, 2002},{ 6090, 1989},{ 6519, 1977},
+ { 6927, 1977},{ 7305, 1968},{ 7730, 1984},{ 8087, 1981},
+ { 8435, 1991},{ 8822, 1987},{ 9155, 2008},{ 9392, 2011}
}
},
{
- /*Cr qi=55 INTRA*/
- {
- { 23, 7},{ 175, 385},{ 342, 730},{ 561, 1028},
- { 771, 1279},{ 973, 1493},{ 1181, 1669},{ 1384, 1822},
- { 1602, 1956},{ 1830, 2076},{ 2057, 2184},{ 2270, 2288},
- { 2452, 2389},{ 2637, 2484},{ 2823, 2559},{ 2983, 2621},
- { 3129, 2682},{ 3280, 2753},{ 3417, 2833},{ 3554, 2904},
- { 3743, 2977},{ 3921, 3060},{ 4055, 3137},{ 4185, 3186}
+ /*Cr qi=63 INTRA*/
+ {
+ { 131, 11},{ 334, 448},{ 569, 739},{ 929, 946},
+ { 1285, 1145},{ 1718, 1274},{ 2176, 1343},{ 2531, 1424},
+ { 2866, 1504},{ 3176, 1580},{ 3475, 1657},{ 3736, 1728},
+ { 3962, 1807},{ 4232, 1872},{ 4425, 1921},{ 4657, 1976},
+ { 4817, 2009},{ 5063, 2082},{ 5281, 2129},{ 5480, 2199},
+ { 5743, 2258},{ 5887, 2283},{ 6124, 2358},{ 6273, 2378}
},
- /*Cr qi=55 INTER*/
- {
- { 85, 0},{ 99, 352},{ 100, 679},{ 126, 1025},
- { 178, 1351},{ 256, 1650},{ 359, 1935},{ 493, 2202},
- { 675, 2439},{ 921, 2636},{ 1220, 2799},{ 1552, 2941},
- { 1910, 3068},{ 2303, 3177},{ 2735, 3262},{ 3206, 3311},
- { 3689, 3333},{ 4152, 3327},{ 4588, 3299},{ 4978, 3272},
- { 5325, 3243},{ 5651, 3221},{ 5969, 3210},{ 6218, 3185}
+ /*Cr qi=63 INTER*/
+ {
+ { 47, 15},{ 40, 405},{ 100, 730},{ 189, 1037},
+ { 351, 1303},{ 625, 1526},{ 984, 1719},{ 1512, 1862},
+ { 2189, 1947},{ 2895, 2003},{ 3576, 2046},{ 4249, 2072},
+ { 4901, 2068},{ 5514, 2043},{ 6079, 2009},{ 6528, 1977},
+ { 6927, 1940},{ 7274, 1915},{ 7580, 1894},{ 7910, 1910},
+ { 8211, 1902},{ 8472, 1920},{ 8742, 1926},{ 8981, 1930}
}
}
- },
+ }
+};
+
+# if !defined(OC_COLLECT_METRICS)
+static const
+# endif
+oc_mode_rd OC_MODE_RD_SAD[OC_LOGQ_BINS][3][2][OC_COMP_BINS]={
{
{
- /*Y' qi=56 INTRA*/
- {
- { 137, 104},{ 1048, 1128},{ 2147, 1760},{ 3261, 2029},
- { 4319, 2131},{ 5310, 2234},{ 6245, 2351},{ 7101, 2464},
- { 7886, 2572},{ 8610, 2675},{ 9270, 2762},{ 9840, 2818},
- {10365, 2869},{10875, 2928},{11393, 2997},{11900, 3071},
- {12371, 3128},{12834, 3172},{13233, 3208},{13562, 3228},
- {13878, 3245},{14221, 3271},{14584, 3292},{15008, 3320}
+ /*Y' qi=0 INTRA*/
+ {
+ { 33, 122},{ 57, 1297},{ 13, 2226},{ 157, 3890},
+ { 227, 3682},{ 169, 3084},{ 197, 2700},{ 227, 3238},
+ { 290, 4294},{ 354, 5230},{ 406, 5615},{ 417, 5322},
+ { 452, 5462},{ 455, 5683},{ 493, 5938},{ 553, 6374},
+ { 558, 6464},{ 606, 6493},{ 616, 6417},{ 643, 6557},
+ { 641, 6664},{ 716, 7285},{ 748, 7518},{ 747, 7502}
},
- /*Y' qi=56 INTER*/
- {
- { 19, 21},{ 207, 1292},{ 1031, 2252},{ 2553, 2846},
- { 4463, 3085},{ 6137, 3131},{ 7441, 3151},{ 8526, 3172},
- { 9468, 3193},{10301, 3209},{11059, 3224},{11760, 3237},
- {12405, 3249},{13008, 3261},{13570, 3270},{14100, 3278},
- {14597, 3284},{15074, 3289},{15524, 3297},{15929, 3302},
- {16314, 3306},{16675, 3307},{17004, 3305},{17288, 3301}
+ /*Y' qi=0 INTER*/
+ {
+ { 16, 205},{ 5, 1338},{ 16, 2554},{ 6, 3809},
+ { 9, 5188},{ 58, 6446},{ 76, 7561},{ 95, 8648},
+ { 124, 9713},{ 158,10787},{ 193,11887},{ 233,12991},
+ { 270,14116},{ 307,15236},{ 341,16346},{ 372,17426},
+ { 398,18499},{ 422,19594},{ 448,20669},{ 479,21732},
+ { 526,22720},{ 583,23572},{ 655,24516},{ 758,24647}
}
},
{
- /*Cb qi=56 INTRA*/
- {
- { 16, 3},{ 188, 367},{ 353, 712},{ 546, 1017},
- { 765, 1275},{ 989, 1484},{ 1221, 1653},{ 1459, 1791},
- { 1681, 1920},{ 1893, 2046},{ 2102, 2160},{ 2323, 2257},
- { 2534, 2347},{ 2720, 2447},{ 2902, 2549},{ 3075, 2654},
- { 3239, 2749},{ 3392, 2835},{ 3544, 2920},{ 3712, 2988},
- { 3882, 3052},{ 4052, 3123},{ 4227, 3181},{ 4483, 3213}
+ /*Cb qi=0 INTRA*/
+ {
+ { 26, 40},{ 23, 589},{ 27, 784},{ 27, 1079},
+ { 24, 1186},{ 25, 1641},{ 25, 1915},{ 29, 2207},
+ { 39, 2361},{ 39, 2746},{ 32, 3020},{ 16, 3387},
+ { 31, 3604},{ 36, 4076},{ 69, 4426},{ 102, 4724},
+ { 139, 4923},{ 196, 5061},{ 211, 5103},{ 214, 5063},
+ { 161, 4466},{ 208, 4793},{ 218, 4537},{ 219, 4539}
},
- /*Cb qi=56 INTER*/
- {
- { 92, -1},{ 111, 343},{ 114, 665},{ 148, 1003},
- { 224, 1321},{ 345, 1609},{ 526, 1858},{ 754, 2077},
- { 1009, 2281},{ 1319, 2464},{ 1702, 2614},{ 2145, 2732},
- { 2625, 2824},{ 3123, 2890},{ 3634, 2933},{ 4137, 2954},
- { 4614, 2965},{ 5052, 2988},{ 5468, 3015},{ 5852, 3035},
- { 6213, 3060},{ 6557, 3081},{ 6906, 3094},{ 7243, 3112}
+ /*Cb qi=0 INTER*/
+ {
+ { 3, 164},{ 1, 535},{ 1, 779},{ 2, 1048},
+ { 3, 1267},{ 1, 1625},{ 2, 1921},{ 5, 2224},
+ { 8, 2481},{ 8, 2813},{ 4, 3089},{ -2, 3386},
+ { -9, 3642},{ -14, 3993},{ -11, 4300},{ -6, 4628},
+ { 4, 4929},{ 25, 5299},{ 44, 5623},{ 83, 5915},
+ { 93, 6186},{ 91, 6483},{ 90, 6775},{ 95, 6952}
}
},
{
- /*Cr qi=56 INTRA*/
- {
- { 28, 8},{ 195, 385},{ 373, 727},{ 598, 1019},
- { 816, 1263},{ 1033, 1465},{ 1260, 1630},{ 1482, 1773},
- { 1717, 1900},{ 1949, 2018},{ 2178, 2128},{ 2393, 2233},
- { 2570, 2338},{ 2749, 2435},{ 2937, 2514},{ 3097, 2577},
- { 3240, 2638},{ 3398, 2709},{ 3540, 2791},{ 3673, 2865},
- { 3869, 2938},{ 4049, 3019},{ 4179, 3095},{ 4330, 3137}
+ /*Cr qi=0 INTRA*/
+ {
+ { 22, 49},{ 26, 579},{ 23, 762},{ 15, 1050},
+ { 20, 1191},{ 24, 1608},{ 26, 1875},{ 35, 2173},
+ { 39, 2359},{ 30, 2736},{ 16, 2987},{ 0, 3334},
+ { 14, 3625},{ 11, 4095},{ 57, 4512},{ 95, 4793},
+ { 141, 4949},{ 206, 5242},{ 230, 5191},{ 242, 5177},
+ { 178, 4775},{ 237, 5010},{ 223, 4656},{ 224, 4657}
},
- /*Cr qi=56 INTER*/
- {
- { 83, 0},{ 99, 353},{ 103, 676},{ 146, 1010},
- { 232, 1320},{ 355, 1601},{ 512, 1866},{ 713, 2109},
- { 988, 2312},{ 1344, 2471},{ 1750, 2602},{ 2180, 2719},
- { 2642, 2819},{ 3141, 2892},{ 3653, 2939},{ 4159, 2961},
- { 4636, 2961},{ 5072, 2945},{ 5464, 2917},{ 5813, 2895},
- { 6134, 2890},{ 6458, 2883},{ 6735, 2881},{ 6953, 2902}
+ /*Cr qi=0 INTER*/
+ {
+ { 3, 163},{ 1, 536},{ 1, 773},{ 3, 1023},
+ { 2, 1225},{ 1, 1607},{ 1, 1900},{ 5, 2204},
+ { 9, 2453},{ 8, 2781},{ 3, 3049},{ -5, 3338},
+ { -13, 3570},{ -17, 3950},{ -13, 4255},{ -6, 4596},
+ { 7, 4893},{ 33, 5300},{ 53, 5632},{ 97, 5942},
+ { 103, 6216},{ 96, 6522},{ 91, 6849},{ 98, 6995}
}
}
},
{
{
- /*Y' qi=57 INTRA*/
- {
- { 170, 106},{ 1106, 1120},{ 2246, 1740},{ 3399, 1993},
- { 4482, 2077},{ 5492, 2167},{ 6446, 2273},{ 7324, 2379},
- { 8130, 2482},{ 8866, 2578},{ 9537, 2661},{10119, 2715},
- {10646, 2762},{11161, 2820},{11694, 2886},{12214, 2957},
- {12693, 3013},{13166, 3053},{13569, 3087},{13897, 3106},
- {14224, 3122},{14568, 3148},{14931, 3167},{15390, 3192}
+ /*Y' qi=9 INTRA*/
+ {
+ { 47, 152},{ 50, 1213},{ 144, 2543},{ 242, 2332},
+ { 210, 1894},{ 250, 2386},{ 328, 3094},{ 407, 3419},
+ { 464, 3507},{ 522, 3770},{ 613, 4194},{ 657, 4618},
+ { 753, 5137},{ 796, 5248},{ 842, 5110},{ 927, 5330},
+ { 994, 5487},{ 1008, 5463},{ 1101, 5794},{ 1169, 5966},
+ { 1208, 6121},{ 1331, 6447},{ 1445, 6618},{ 1449, 6616}
},
- /*Y' qi=57 INTER*/
- {
- { 19, 20},{ 205, 1292},{ 1096, 2229},{ 2775, 2766},
- { 4811, 2943},{ 6512, 2964},{ 7832, 2976},{ 8940, 2990},
- { 9903, 3004},{10755, 3017},{11532, 3029},{12243, 3039},
- {12891, 3047},{13502, 3058},{14073, 3065},{14603, 3071},
- {15097, 3078},{15581, 3083},{16036, 3086},{16452, 3090},
- {16855, 3093},{17222, 3094},{17552, 3092},{17851, 3098}
+ /*Y' qi=9 INTER*/
+ {
+ { 4, 218},{ 16, 1314},{ 4, 2563},{ 37, 3882},
+ { 83, 5058},{ 109, 6184},{ 161, 7292},{ 224, 8389},
+ { 287, 9485},{ 349,10565},{ 411,11608},{ 464,12648},
+ { 518,13664},{ 575,14650},{ 649,15585},{ 742,16451},
+ { 862,17214},{ 1003,17860},{ 1179,18325},{ 1372,18648},
+ { 1576,18878},{ 1795,18903},{ 2040,18880},{ 2116,18759}
}
},
{
- /*Cb qi=57 INTRA*/
- {
- { 16, 3},{ 197, 365},{ 384, 704},{ 603, 1001},
- { 837, 1252},{ 1077, 1455},{ 1326, 1618},{ 1581, 1748},
- { 1819, 1871},{ 2042, 1993},{ 2264, 2104},{ 2500, 2196},
- { 2722, 2280},{ 2916, 2375},{ 3103, 2473},{ 3290, 2575},
- { 3456, 2667},{ 3612, 2748},{ 3775, 2829},{ 3958, 2896},
- { 4145, 2947},{ 4307, 3012},{ 4476, 3070},{ 4733, 3110}
+ /*Cb qi=9 INTRA*/
+ {
+ { 27, 42},{ 23, 587},{ 34, 782},{ 37, 1079},
+ { 34, 1204},{ 42, 1630},{ 37, 1887},{ 25, 2210},
+ { 40, 2455},{ 71, 2880},{ 112, 3193},{ 156, 3427},
+ { 168, 3403},{ 217, 3488},{ 203, 3335},{ 224, 3200},
+ { 191, 2742},{ 195, 2810},{ 207, 2665},{ 201, 2661},
+ { 169, 2078},{ 211, 2720},{ 226, 2813},{ 228, 2824}
},
- /*Cb qi=57 INTER*/
- {
- { 94, -1},{ 111, 344},{ 112, 665},{ 147, 1002},
- { 227, 1319},{ 353, 1604},{ 543, 1849},{ 785, 2062},
- { 1066, 2257},{ 1408, 2430},{ 1827, 2568},{ 2320, 2670},
- { 2848, 2743},{ 3386, 2791},{ 3934, 2812},{ 4453, 2820},
- { 4929, 2830},{ 5368, 2842},{ 5787, 2856},{ 6190, 2875},
- { 6554, 2896},{ 6895, 2913},{ 7229, 2927},{ 7572, 2932}
+ /*Cb qi=9 INTER*/
+ {
+ { 4, 158},{ 2, 537},{ 3, 779},{ 2, 1045},
+ { 3, 1284},{ 7, 1629},{ 7, 1917},{ 1, 2218},
+ { -4, 2497},{ -3, 2845},{ 6, 3162},{ 23, 3482},
+ { 42, 3788},{ 62, 4116},{ 76, 4416},{ 84, 4700},
+ { 91, 4975},{ 95, 5259},{ 97, 5518},{ 94, 5790},
+ { 99, 6052},{ 111, 6311},{ 126, 6601},{ 136, 6719}
}
},
{
- /*Cr qi=57 INTRA*/
- {
- { 28, 8},{ 207, 383},{ 413, 716},{ 661, 999},
- { 889, 1237},{ 1123, 1433},{ 1365, 1592},{ 1603, 1731},
- { 1853, 1852},{ 2103, 1965},{ 2345, 2072},{ 2571, 2173},
- { 2763, 2271},{ 2949, 2364},{ 3146, 2438},{ 3315, 2497},
- { 3459, 2552},{ 3618, 2616},{ 3767, 2697},{ 3906, 2773},
- { 4099, 2841},{ 4281, 2916},{ 4429, 2987},{ 4569, 3030}
+ /*Cr qi=9 INTRA*/
+ {
+ { 25, 50},{ 32, 576},{ 32, 762},{ 21, 1049},
+ { 28, 1207},{ 41, 1603},{ 36, 1839},{ 26, 2170},
+ { 34, 2462},{ 59, 2872},{ 109, 3176},{ 157, 3364},
+ { 188, 3397},{ 231, 3418},{ 250, 3341},{ 261, 3228},
+ { 222, 2814},{ 258, 3091},{ 234, 2915},{ 228, 3042},
+ { 210, 2610},{ 273, 3210},{ 274, 3231},{ 276, 3239}
},
- /*Cr qi=57 INTER*/
- {
- { 85, 0},{ 99, 352},{ 102, 675},{ 147, 1008},
- { 235, 1317},{ 363, 1597},{ 529, 1858},{ 748, 2094},
- { 1050, 2287},{ 1439, 2436},{ 1877, 2557},{ 2352, 2660},
- { 2869, 2740},{ 3413, 2791},{ 3962, 2815},{ 4485, 2819},
- { 4955, 2816},{ 5382, 2800},{ 5769, 2772},{ 6107, 2748},
- { 6443, 2740},{ 6754, 2739},{ 7029, 2737},{ 7284, 2745}
+ /*Cr qi=9 INTER*/
+ {
+ { 4, 156},{ 2, 538},{ 3, 772},{ 2, 1028},
+ { 3, 1254},{ 7, 1613},{ 7, 1893},{ 0, 2191},
+ { -8, 2454},{ -4, 2811},{ 7, 3121},{ 27, 3442},
+ { 48, 3749},{ 72, 4101},{ 88, 4410},{ 91, 4698},
+ { 99, 4988},{ 99, 5279},{ 101, 5542},{ 95, 5813},
+ { 99, 6088},{ 114, 6367},{ 125, 6683},{ 137, 6761}
}
}
},
{
{
- /*Y' qi=58 INTRA*/
- {
- { 164, 109},{ 1198, 1111},{ 2396, 1737},{ 3606, 1978},
- { 4727, 2048},{ 5749, 2138},{ 6708, 2243},{ 7584, 2347},
- { 8388, 2449},{ 9122, 2549},{ 9784, 2635},{10354, 2691},
- {10876, 2740},{11385, 2800},{11912, 2869},{12429, 2941},
- {12902, 2997},{13375, 3040},{13779, 3075},{14103, 3096},
- {14435, 3112},{14783, 3140},{15141, 3160},{15599, 3186}
+ /*Y' qi=18 INTRA*/
+ {
+ { 51, 88},{ 88, 1344},{ 258, 1643},{ 228, 1325},
+ { 372, 2208},{ 443, 2371},{ 520, 2382},{ 584, 2477},
+ { 739, 2906},{ 859, 3348},{ 1008, 3697},{ 1131, 3884},
+ { 1278, 4110},{ 1349, 4229},{ 1431, 4329},{ 1544, 4395},
+ { 1602, 4439},{ 1669, 4535},{ 1814, 4656},{ 1883, 4716},
+ { 1957, 4940},{ 2101, 5019},{ 2259, 5249},{ 2265, 5246}
},
- /*Y' qi=58 INTER*/
- {
- { 14, 23},{ 210, 1290},{ 1277, 2178},{ 3118, 2677},
- { 5207, 2834},{ 6902, 2857},{ 8218, 2878},{ 9323, 2900},
- {10285, 2919},{11132, 2934},{11899, 2949},{12599, 2961},
- {13235, 2971},{13835, 2982},{14394, 2991},{14917, 2997},
- {15412, 3005},{15882, 3009},{16325, 3013},{16735, 3016},
- {17131, 3018},{17501, 3021},{17824, 3021},{18125, 3016}
+ /*Y' qi=18 INTER*/
+ {
+ { 26, 195},{ 1, 1317},{ 45, 2595},{ 103, 3750},
+ { 168, 4903},{ 281, 6007},{ 397, 7062},{ 513, 8064},
+ { 630, 9010},{ 758, 9902},{ 906,10732},{ 1095,11463},
+ { 1338,12060},{ 1629,12490},{ 1969,12724},{ 2313,12842},
+ { 2666,12828},{ 2993,12747},{ 3294,12670},{ 3558,12553},
+ { 3813,12440},{ 3990,12379},{ 4177,12291},{ 4226,12265}
}
},
{
- /*Cb qi=58 INTRA*/
- {
- { 17, 3},{ 200, 365},{ 389, 703},{ 613, 996},
- { 853, 1243},{ 1095, 1445},{ 1349, 1604},{ 1613, 1731},
- { 1853, 1853},{ 2074, 1978},{ 2292, 2091},{ 2526, 2184},
- { 2750, 2266},{ 2945, 2360},{ 3134, 2458},{ 3320, 2561},
- { 3482, 2654},{ 3641, 2737},{ 3804, 2818},{ 3985, 2881},
- { 4168, 2935},{ 4331, 3003},{ 4499, 3060},{ 4751, 3100}
+ /*Cb qi=18 INTRA*/
+ {
+ { 31, 43},{ 33, 585},{ 40, 781},{ 58, 1077},
+ { 45, 1189},{ 58, 1655},{ 66, 1983},{ 123, 2221},
+ { 168, 2193},{ 227, 2321},{ 241, 2246},{ 250, 2208},
+ { 221, 1786},{ 250, 2087},{ 247, 2036},{ 250, 2164},
+ { 241, 2054},{ 287, 2453},{ 302, 2551},{ 335, 2758},
+ { 279, 2511},{ 379, 2973},{ 404, 3028},{ 406, 3029}
},
- /*Cb qi=58 INTER*/
- {
- { 94, -1},{ 112, 345},{ 112, 665},{ 152, 998},
- { 247, 1307},{ 406, 1580},{ 644, 1810},{ 938, 2007},
- { 1271, 2189},{ 1668, 2348},{ 2151, 2470},{ 2691, 2558},
- { 3249, 2619},{ 3798, 2659},{ 4334, 2682},{ 4849, 2692},
- { 5314, 2700},{ 5747, 2721},{ 6167, 2742},{ 6547, 2765},
- { 6902, 2790},{ 7251, 2804},{ 7583, 2819},{ 7924, 2833}
+ /*Cb qi=18 INTER*/
+ {
+ { 7, 153},{ 4, 537},{ 3, 777},{ 9, 1034},
+ { 6, 1282},{ 0, 1630},{ 0, 1943},{ 21, 2252},
+ { 48, 2567},{ 67, 2881},{ 83, 3178},{ 89, 3463},
+ { 92, 3738},{ 99, 4024},{ 114, 4289},{ 131, 4552},
+ { 153, 4814},{ 179, 5081},{ 207, 5333},{ 241, 5581},
+ { 273, 5822},{ 303, 6068},{ 335, 6368},{ 353, 6432}
}
},
{
- /*Cr qi=58 INTRA*/
- {
- { 29, 8},{ 210, 382},{ 419, 714},{ 671, 993},
- { 903, 1229},{ 1141, 1422},{ 1390, 1578},{ 1635, 1713},
- { 1889, 1833},{ 2140, 1946},{ 2379, 2055},{ 2604, 2157},
- { 2794, 2256},{ 2977, 2349},{ 3174, 2422},{ 3339, 2482},
- { 3483, 2537},{ 3643, 2604},{ 3790, 2684},{ 3927, 2757},
- { 4112, 2826},{ 4294, 2900},{ 4451, 2975},{ 4600, 3011}
+ /*Cr qi=18 INTRA*/
+ {
+ { 31, 49},{ 42, 575},{ 42, 763},{ 38, 1045},
+ { 41, 1184},{ 56, 1631},{ 87, 1968},{ 163, 2177},
+ { 191, 2188},{ 236, 2264},{ 240, 2101},{ 234, 2047},
+ { 206, 1651},{ 222, 1966},{ 238, 2013},{ 240, 2176},
+ { 229, 2098},{ 321, 2592},{ 341, 2748},{ 378, 3025},
+ { 367, 2849},{ 442, 3283},{ 453, 3315},{ 455, 3313}
},
- /*Cr qi=58 INTER*/
- {
- { 86, 0},{ 99, 352},{ 103, 675},{ 151, 1004},
- { 256, 1306},{ 417, 1573},{ 628, 1819},{ 901, 2040},
- { 1262, 2217},{ 1705, 2353},{ 2191, 2466},{ 2713, 2556},
- { 3268, 2622},{ 3831, 2664},{ 4374, 2682},{ 4881, 2686},
- { 5339, 2685},{ 5747, 2668},{ 6123, 2646},{ 6465, 2630},
- { 6783, 2618},{ 7082, 2623},{ 7366, 2632},{ 7673, 2654}
+ /*Cr qi=18 INTER*/
+ {
+ { 6, 151},{ 3, 539},{ 3, 775},{ 8, 1027},
+ { 6, 1260},{ -3, 1619},{ 0, 1927},{ 24, 2238},
+ { 58, 2558},{ 76, 2871},{ 92, 3173},{ 96, 3461},
+ { 98, 3742},{ 104, 4032},{ 116, 4306},{ 136, 4578},
+ { 158, 4839},{ 185, 5123},{ 217, 5383},{ 250, 5642},
+ { 279, 5910},{ 306, 6169},{ 333, 6502},{ 350, 6522}
}
}
},
{
{
- /*Y' qi=59 INTRA*/
- {
- { 142, 112},{ 1259, 1100},{ 2552, 1711},{ 3815, 1933},
- { 4955, 1987},{ 5983, 2068},{ 6949, 2165},{ 7832, 2263},
- { 8645, 2359},{ 9392, 2454},{10066, 2536},{10643, 2589},
- {11174, 2636},{11696, 2693},{12230, 2758},{12752, 2826},
- {13239, 2883},{13721, 2926},{14139, 2959},{14479, 2978},
- {14811, 2993},{15166, 3020},{15532, 3039},{16000, 3062}
+ /*Y' qi=27 INTRA*/
+ {
+ { 10, 85},{ 280, 1349},{ 278, 815},{ 497, 1699},
+ { 600, 1569},{ 744, 1944},{ 894, 2114},{ 1040, 2292},
+ { 1216, 2484},{ 1485, 2816},{ 1778, 3065},{ 1990, 3243},
+ { 2199, 3381},{ 2326, 3515},{ 2370, 3422},{ 2512, 3581},
+ { 2548, 3526},{ 2656, 3615},{ 2803, 3679},{ 2946, 3766},
+ { 3023, 3824},{ 3179, 3908},{ 3374, 4035},{ 3377, 4030}
},
- /*Y' qi=59 INTER*/
- {
- { 8, 25},{ 211, 1289},{ 1394, 2144},{ 3421, 2580},
- { 5611, 2689},{ 7316, 2701},{ 8643, 2717},{ 9762, 2734},
- {10735, 2750},{11587, 2763},{12353, 2775},{13056, 2785},
- {13693, 2793},{14288, 2805},{14843, 2814},{15361, 2821},
- {15857, 2827},{16328, 2831},{16763, 2834},{17171, 2838},
- {17568, 2840},{17941, 2842},{18285, 2843},{18586, 2839}
+ /*Y' qi=27 INTER*/
+ {
+ { -2, 172},{ 31, 1347},{ 117, 2488},{ 245, 3651},
+ { 448, 4719},{ 668, 5679},{ 918, 6524},{ 1204, 7255},
+ { 1557, 7848},{ 1998, 8281},{ 2511, 8531},{ 3055, 8642},
+ { 3582, 8648},{ 4062, 8611},{ 4482, 8582},{ 4845, 8560},
+ { 5140, 8560},{ 5423, 8581},{ 5645, 8596},{ 5855, 8586},
+ { 6061, 8608},{ 6211, 8558},{ 6402, 8583},{ 6472, 8575}
}
},
{
- /*Cb qi=59 INTRA*/
- {
- { 17, 3},{ 224, 363},{ 441, 696},{ 689, 982},
- { 945, 1222},{ 1204, 1416},{ 1474, 1571},{ 1751, 1695},
- { 2001, 1816},{ 2228, 1941},{ 2453, 2055},{ 2693, 2147},
- { 2924, 2227},{ 3125, 2321},{ 3321, 2416},{ 3510, 2520},
- { 3676, 2616},{ 3839, 2699},{ 4008, 2778},{ 4193, 2842},
- { 4371, 2898},{ 4535, 2965},{ 4710, 3023},{ 4921, 3068}
+ /*Cb qi=27 INTRA*/
+ {
+ { 47, 49},{ 35, 580},{ 64, 778},{ 69, 1071},
+ { 98, 1289},{ 186, 1556},{ 177, 1654},{ 197, 1736},
+ { 211, 1373},{ 284, 1742},{ 321, 1840},{ 344, 2024},
+ { 321, 1969},{ 386, 2254},{ 397, 2281},{ 425, 2320},
+ { 396, 2088},{ 448, 2284},{ 462, 2213},{ 482, 2274},
+ { 410, 1894},{ 513, 2310},{ 546, 2332},{ 549, 2334}
},
- /*Cb qi=59 INTER*/
- {
- { 95, -5},{ 111, 343},{ 112, 664},{ 157, 995},
- { 258, 1302},{ 429, 1569},{ 691, 1790},{ 1017, 1977},
- { 1387, 2148},{ 1832, 2294},{ 2368, 2401},{ 2961, 2472},
- { 3553, 2518},{ 4133, 2545},{ 4688, 2557},{ 5198, 2563},
- { 5663, 2574},{ 6100, 2590},{ 6511, 2608},{ 6898, 2621},
- { 7274, 2634},{ 7631, 2655},{ 7984, 2669},{ 8361, 2669}
+ /*Cb qi=27 INTER*/
+ {
+ { 11, 145},{ 5, 539},{ 11, 771},{ 0, 1033},
+ { 9, 1334},{ 44, 1644},{ 70, 1934},{ 87, 2227},
+ { 96, 2508},{ 113, 2812},{ 139, 3085},{ 174, 3352},
+ { 216, 3614},{ 261, 3873},{ 305, 4123},{ 349, 4372},
+ { 396, 4611},{ 442, 4853},{ 493, 5088},{ 543, 5313},
+ { 600, 5537},{ 662, 5752},{ 737, 6018},{ 775, 6037}
}
},
{
- /*Cr qi=59 INTRA*/
- {
- { 31, 8},{ 240, 379},{ 480, 706},{ 748, 978},
- { 993, 1208},{ 1250, 1394},{ 1519, 1543},{ 1779, 1674},
- { 2047, 1792},{ 2307, 1904},{ 2552, 2013},{ 2780, 2116},
- { 2973, 2216},{ 3165, 2309},{ 3362, 2383},{ 3528, 2444},
- { 3677, 2499},{ 3841, 2566},{ 3995, 2646},{ 4139, 2720},
- { 4324, 2793},{ 4504, 2867},{ 4658, 2939},{ 4806, 2975}
+ /*Cr qi=27 INTRA*/
+ {
+ { 49, 52},{ 57, 570},{ 61, 762},{ 44, 1048},
+ { 80, 1291},{ 196, 1513},{ 224, 1522},{ 242, 1532},
+ { 213, 1293},{ 260, 1639},{ 253, 1691},{ 291, 1915},
+ { 294, 1897},{ 367, 2178},{ 395, 2258},{ 432, 2310},
+ { 407, 2105},{ 503, 2369},{ 492, 2293},{ 552, 2421},
+ { 496, 2099},{ 598, 2549},{ 624, 2531},{ 627, 2532}
},
- /*Cr qi=59 INTER*/
- {
- { 89, -3},{ 98, 352},{ 103, 674},{ 156, 1002},
- { 268, 1300},{ 441, 1562},{ 673, 1801},{ 980, 2010},
- { 1385, 2175},{ 1868, 2301},{ 2401, 2402},{ 2984, 2474},
- { 3591, 2520},{ 4179, 2545},{ 4729, 2555},{ 5232, 2553},
- { 5679, 2545},{ 6081, 2530},{ 6447, 2510},{ 6791, 2496},
- { 7101, 2487},{ 7393, 2489},{ 7684, 2499},{ 7950, 2501}
+ /*Cr qi=27 INTER*/
+ {
+ { 10, 147},{ 4, 538},{ 11, 769},{ 0, 1022},
+ { 9, 1318},{ 51, 1635},{ 80, 1925},{ 97, 2214},
+ { 101, 2493},{ 115, 2805},{ 143, 3083},{ 182, 3361},
+ { 226, 3625},{ 270, 3898},{ 319, 4157},{ 366, 4405},
+ { 418, 4649},{ 467, 4904},{ 509, 5157},{ 548, 5412},
+ { 589, 5659},{ 636, 5909},{ 683, 6208},{ 710, 6190}
}
}
},
{
{
- /*Y' qi=60 INTRA*/
- {
- { 92, 116},{ 1361, 1085},{ 2746, 1686},{ 4050, 1895},
- { 5209, 1939},{ 6244, 2012},{ 7213, 2103},{ 8105, 2197},
- { 8928, 2290},{ 9685, 2381},{10371, 2460},{10952, 2511},
- {11487, 2556},{12026, 2611},{12574, 2674},{13102, 2739},
- {13597, 2793},{14092, 2831},{14523, 2862},{14862, 2881},
- {15198, 2897},{15568, 2923},{15949, 2941},{16416, 2964}
+ /*Y' qi=36 INTRA*/
+ {
+ { 86, 252},{ 345, 662},{ 476, 1143},{ 698, 1169},
+ { 894, 1457},{ 1218, 1728},{ 1465, 1849},{ 1731, 2019},
+ { 2183, 2298},{ 2666, 2511},{ 3116, 2731},{ 3371, 2813},
+ { 3621, 2923},{ 3675, 2949},{ 3710, 2921},{ 3740, 2896},
+ { 3746, 2895},{ 3886, 2978},{ 4069, 2991},{ 4229, 3016},
+ { 4338, 3102},{ 4530, 3124},{ 4751, 3248},{ 4753, 3244}
},
- /*Y' qi=60 INTER*/
- {
- { 4, 30},{ 215, 1287},{ 1547, 2104},{ 3729, 2491},
- { 5973, 2568},{ 7672, 2577},{ 9001, 2591},{10123, 2606},
- {11094, 2620},{11943, 2632},{12709, 2643},{13409, 2652},
- {14044, 2660},{14641, 2669},{15193, 2677},{15709, 2684},
- {16201, 2689},{16675, 2693},{17118, 2696},{17522, 2701},
- {17920, 2704},{18293, 2706},{18620, 2702},{18923, 2700}
+ /*Y' qi=36 INTER*/
+ {
+ { 0, 208},{ 73, 1293},{ 248, 2449},{ 616, 3461},
+ { 1061, 4329},{ 1601, 4986},{ 2189, 5447},{ 2875, 5723},
+ { 3620, 5844},{ 4328, 5879},{ 4954, 5880},{ 5490, 5890},
+ { 5934, 5901},{ 6353, 5926},{ 6706, 5924},{ 7036, 5930},
+ { 7338, 5938},{ 7600, 5930},{ 7870, 5939},{ 8065, 5921},
+ { 8318, 5914},{ 8451, 5912},{ 8648, 5923},{ 8734, 5926}
}
},
{
- /*Cb qi=60 INTRA*/
- {
- { 18, 3},{ 227, 362},{ 447, 694},{ 708, 974},
- { 981, 1207},{ 1252, 1397},{ 1532, 1547},{ 1822, 1663},
- { 2082, 1780},{ 2316, 1903},{ 2548, 2013},{ 2794, 2101},
- { 3029, 2178},{ 3242, 2266},{ 3445, 2360},{ 3638, 2459},
- { 3816, 2547},{ 3980, 2628},{ 4146, 2708},{ 4344, 2766},
- { 4546, 2812},{ 4725, 2872},{ 4880, 2930},{ 5054, 2966}
+ /*Cb qi=36 INTRA*/
+ {
+ { 52, 54},{ 52, 575},{ 103, 776},{ 185, 1072},
+ { 172, 1069},{ 211, 1302},{ 217, 1413},{ 285, 1586},
+ { 330, 1463},{ 453, 1694},{ 500, 1741},{ 545, 1852},
+ { 501, 1650},{ 584, 1874},{ 587, 1856},{ 638, 1919},
+ { 581, 1742},{ 670, 1953},{ 688, 1934},{ 731, 2030},
+ { 637, 1794},{ 806, 2123},{ 840, 2091},{ 843, 2091}
},
- /*Cb qi=60 INTER*/
- {
- { 97, -4},{ 112, 343},{ 114, 664},{ 162, 993},
- { 273, 1294},{ 472, 1553},{ 774, 1762},{ 1138, 1939},
- { 1543, 2102},{ 2034, 2236},{ 2620, 2329},{ 3244, 2389},
- { 3860, 2423},{ 4443, 2440},{ 4997, 2449},{ 5502, 2455},
- { 5962, 2458},{ 6413, 2466},{ 6836, 2485},{ 7217, 2506},
- { 7592, 2518},{ 7957, 2533},{ 8291, 2543},{ 8574, 2545}
+ /*Cb qi=36 INTER*/
+ {
+ { 19, 142},{ 17, 534},{ 6, 772},{ 44, 1023},
+ { 82, 1296},{ 94, 1614},{ 117, 1903},{ 158, 2187},
+ { 218, 2450},{ 285, 2703},{ 352, 2943},{ 421, 3181},
+ { 489, 3415},{ 564, 3644},{ 647, 3861},{ 748, 4060},
+ { 861, 4246},{ 993, 4419},{ 1132, 4576},{ 1282, 4744},
+ { 1445, 4894},{ 1600, 5034},{ 1782, 5211},{ 1837, 5200}
}
},
{
- /*Cr qi=60 INTRA*/
- {
- { 32, 8},{ 243, 379},{ 488, 702},{ 771, 968},
- { 1030, 1192},{ 1300, 1373},{ 1581, 1517},{ 1854, 1643},
- { 2127, 1757},{ 2393, 1864},{ 2645, 1968},{ 2879, 2068},
- { 3078, 2166},{ 3277, 2256},{ 3484, 2325},{ 3660, 2381},
- { 3808, 2433},{ 3970, 2496},{ 4138, 2571},{ 4288, 2643},
- { 4475, 2710},{ 4655, 2778},{ 4810, 2843},{ 4959, 2879}
+ /*Cr qi=36 INTRA*/
+ {
+ { 62, 55},{ 90, 561},{ 56, 767},{ 148, 1014},
+ { 207, 981},{ 258, 1216},{ 273, 1253},{ 326, 1392},
+ { 338, 1383},{ 417, 1613},{ 443, 1629},{ 497, 1734},
+ { 466, 1525},{ 561, 1778},{ 577, 1787},{ 631, 1892},
+ { 591, 1706},{ 715, 1980},{ 730, 1958},{ 822, 2113},
+ { 755, 1935},{ 928, 2228},{ 935, 2205},{ 938, 2205}
},
- /*Cr qi=60 INTER*/
- {
- { 86, -2},{ 99, 352},{ 103, 673},{ 160, 998},
- { 284, 1292},{ 484, 1546},{ 753, 1774},{ 1100, 1973},
- { 1546, 2129},{ 2072, 2246},{ 2652, 2334},{ 3279, 2392},
- { 3911, 2425},{ 4504, 2440},{ 5044, 2443},{ 5536, 2440},
- { 5979, 2430},{ 6381, 2413},{ 6735, 2397},{ 7062, 2382},
- { 7383, 2376},{ 7680, 2375},{ 7962, 2373},{ 8203, 2379}
+ /*Cr qi=36 INTER*/
+ {
+ { 14, 145},{ 16, 535},{ 5, 772},{ 44, 1017},
+ { 91, 1296},{ 100, 1605},{ 122, 1891},{ 163, 2174},
+ { 225, 2443},{ 294, 2707},{ 362, 2962},{ 436, 3210},
+ { 518, 3437},{ 607, 3664},{ 702, 3876},{ 795, 4094},
+ { 886, 4310},{ 980, 4538},{ 1089, 4749},{ 1216, 4927},
+ { 1357, 5116},{ 1506, 5247},{ 1758, 5338},{ 1787, 5306}
}
}
},
{
{
- /*Y' qi=61 INTRA*/
- {
- { 54, 121},{ 1477, 1069},{ 3061, 1638},{ 4465, 1808},
- { 5649, 1827},{ 6710, 1884},{ 7716, 1958},{ 8648, 2037},
- { 9514, 2116},{10311, 2192},{11033, 2261},{11641, 2305},
- {12202, 2342},{12771, 2387},{13356, 2440},{13924, 2493},
- {14444, 2541},{14951, 2576},{15409, 2600},{15779, 2615},
- {16131, 2626},{16521, 2648},{16921, 2663},{17409, 2694}
+ /*Y' qi=45 INTRA*/
+ {
+ { 185, 246},{ 513, 647},{ 883, 891},{ 1313, 1142},
+ { 1760, 1351},{ 2368, 1595},{ 2828, 1718},{ 3097, 1780},
+ { 3762, 1951},{ 4454, 2121},{ 4986, 2227},{ 5281, 2281},
+ { 5477, 2299},{ 5431, 2288},{ 5425, 2283},{ 5439, 2290},
+ { 5324, 2249},{ 5509, 2279},{ 5703, 2321},{ 5896, 2348},
+ { 6049, 2370},{ 6253, 2425},{ 6415, 2432},{ 6419, 2430}
},
- /*Y' qi=61 INTER*/
- {
- { -1, 32},{ 216, 1286},{ 1806, 2036},{ 4279, 2327},
- { 6629, 2352},{ 8347, 2352},{ 9707, 2357},{10860, 2364},
- {11857, 2372},{12726, 2377},{13508, 2382},{14225, 2387},
- {14877, 2392},{15484, 2398},{16048, 2401},{16581, 2405},
- {17092, 2409},{17573, 2409},{18016, 2410},{18427, 2413},
- {18829, 2415},{19221, 2415},{19578, 2415},{19980, 2413}
+ /*Y' qi=45 INTER*/
+ {
+ { 6, 215},{ 152, 1261},{ 691, 2314},{ 1538, 3095},
+ { 2505, 3632},{ 3475, 3935},{ 4355, 4084},{ 5209, 4139},
+ { 5985, 4162},{ 6644, 4185},{ 7235, 4190},{ 7768, 4196},
+ { 8266, 4200},{ 8736, 4210},{ 9143, 4207},{ 9511, 4215},
+ { 9828, 4209},{10112, 4224},{10374, 4226},{10642, 4232},
+ {10842, 4219},{10971, 4208},{11200, 4211},{11299, 4216}
}
},
{
- /*Cb qi=61 INTRA*/
- {
- { 19, 3},{ 231, 362},{ 456, 693},{ 733, 965},
- { 1032, 1188},{ 1330, 1369},{ 1637, 1508},{ 1956, 1612},
- { 2241, 1718},{ 2496, 1832},{ 2750, 1932},{ 3019, 2007},
- { 3274, 2074},{ 3505, 2154},{ 3725, 2236},{ 3943, 2323},
- { 4138, 2403},{ 4323, 2476},{ 4505, 2543},{ 4706, 2592},
- { 4909, 2630},{ 5109, 2675},{ 5292, 2724},{ 5495, 2768}
+ /*Cb qi=45 INTRA*/
+ {
+ { 58, 71},{ 66, 548},{ 155, 762},{ 213, 944},
+ { 192, 731},{ 324, 1147},{ 401, 1366},{ 481, 1480},
+ { 508, 1238},{ 657, 1522},{ 727, 1563},{ 794, 1611},
+ { 761, 1470},{ 885, 1710},{ 893, 1700},{ 958, 1760},
+ { 893, 1543},{ 985, 1719},{ 1014, 1732},{ 1082, 1784},
+ { 963, 1519},{ 1152, 1800},{ 1221, 1830},{ 1226, 1830}
},
- /*Cb qi=61 INTER*/
- {
- { 91, -2},{ 111, 344},{ 114, 663},{ 166, 989},
- { 291, 1285},{ 522, 1534},{ 875, 1729},{ 1302, 1889},
- { 1786, 2031},{ 2368, 2141},{ 3042, 2207},{ 3734, 2243},
- { 4388, 2259},{ 4982, 2264},{ 5533, 2265},{ 6043, 2262},
- { 6524, 2264},{ 6982, 2274},{ 7422, 2283},{ 7831, 2295},
- { 8198, 2308},{ 8593, 2319},{ 8965, 2329},{ 9258, 2340}
+ /*Cb qi=45 INTER*/
+ {
+ { 35, 135},{ 12, 532},{ 54, 769},{ 106, 1007},
+ { 127, 1258},{ 198, 1565},{ 289, 1832},{ 398, 2082},
+ { 520, 2302},{ 653, 2511},{ 800, 2705},{ 956, 2897},
+ { 1143, 3064},{ 1358, 3220},{ 1623, 3335},{ 1913, 3444},
+ { 2198, 3534},{ 2502, 3626},{ 2787, 3711},{ 3114, 3783},
+ { 3454, 3831},{ 3711, 3871},{ 4163, 3901},{ 4221, 3890}
}
},
{
- /*Cr qi=61 INTRA*/
- {
- { 33, 9},{ 245, 378},{ 497, 699},{ 801, 958},
- { 1087, 1171},{ 1384, 1342},{ 1692, 1474},{ 1992, 1589},
- { 2290, 1692},{ 2576, 1789},{ 2852, 1884},{ 3109, 1973},
- { 3324, 2061},{ 3544, 2142},{ 3763, 2199},{ 3945, 2244},
- { 4103, 2292},{ 4283, 2349},{ 4469, 2413},{ 4635, 2476},
- { 4836, 2534},{ 5038, 2592},{ 5210, 2649},{ 5358, 2682}
+ /*Cr qi=45 INTRA*/
+ {
+ { 93, 68},{ 72, 541},{ 154, 769},{ 239, 848},
+ { 214, 623},{ 377, 1060},{ 437, 1200},{ 514, 1280},
+ { 512, 1160},{ 625, 1453},{ 657, 1470},{ 718, 1516},
+ { 692, 1331},{ 831, 1617},{ 875, 1609},{ 944, 1678},
+ { 886, 1469},{ 1061, 1699},{ 1082, 1714},{ 1226, 1823},
+ { 1113, 1581},{ 1324, 1872},{ 1370, 1925},{ 1374, 1924}
},
- /*Cr qi=61 INTER*/
- {
- { 82, 0},{ 97, 353},{ 104, 672},{ 165, 995},
- { 303, 1284},{ 532, 1529},{ 852, 1742},{ 1273, 1921},
- { 1798, 2057},{ 2409, 2154},{ 3090, 2212},{ 3794, 2240},
- { 4460, 2251},{ 5057, 2249},{ 5596, 2249},{ 6085, 2245},
- { 6519, 2234},{ 6908, 2220},{ 7269, 2203},{ 7618, 2196},
- { 7949, 2198},{ 8269, 2195},{ 8554, 2196},{ 8928, 2217}
+ /*Cr qi=45 INTER*/
+ {
+ { 31, 140},{ 13, 533},{ 52, 770},{ 109, 1000},
+ { 134, 1253},{ 201, 1555},{ 298, 1821},{ 411, 2076},
+ { 525, 2314},{ 659, 2545},{ 828, 2747},{ 1019, 2918},
+ { 1205, 3082},{ 1405, 3266},{ 1609, 3443},{ 1847, 3606},
+ { 2085, 3730},{ 2404, 3835},{ 2709, 3876},{ 3049, 3886},
+ { 3381, 3821},{ 3708, 3780},{ 4026, 3663},{ 4043, 3646}
}
}
},
{
{
- /*Y' qi=62 INTRA*/
- {
- { 29, 124},{ 1527, 1067},{ 3221, 1618},{ 4703, 1751},
- { 5909, 1744},{ 7001, 1779},{ 8057, 1829},{ 9049, 1885},
- { 9968, 1943},{10813, 1999},{11572, 2050},{12206, 2082},
- {12801, 2107},{13402, 2140},{14020, 2180},{14625, 2223},
- {15179, 2260},{15718, 2288},{16196, 2305},{16581, 2313},
- {16963, 2324},{17382, 2341},{17800, 2351},{18318, 2376}
+ /*Y' qi=54 INTRA*/
+ {
+ { 316, 203},{ 720, 585},{ 1596, 1077},{ 2316, 1289},
+ { 2687, 1439},{ 3133, 1593},{ 3495, 1706},{ 3836, 1775},
+ { 4249, 1892},{ 4804, 2031},{ 5320, 2139},{ 5617, 2203},
+ { 5726, 2199},{ 5726, 2176},{ 5682, 2146},{ 5677, 2127},
+ { 5717, 2124},{ 5707, 2129},{ 5853, 2148},{ 6110, 2180},
+ { 6454, 2247},{ 6714, 2287},{ 6845, 2304},{ 6854, 2303}
},
- /*Y' qi=62 INTER*/
- {
- { -8, 36},{ 218, 1284},{ 2073, 1965},{ 4814, 2159},
- { 7237, 2138},{ 8979, 2124},{10378, 2115},{11570, 2109},
- {12601, 2106},{13503, 2103},{14320, 2103},{15064, 2103},
- {15746, 2103},{16384, 2104},{16975, 2105},{17534, 2105},
- {18062, 2106},{18564, 2107},{19035, 2106},{19471, 2107},
- {19890, 2107},{20288, 2107},{20651, 2107},{21012, 2108}
+ /*Y' qi=54 INTER*/
+ {
+ { -48, 217},{ 314, 1261},{ 1450, 2126},{ 2761, 2728},
+ { 4275, 3012},{ 5408, 3167},{ 6305, 3245},{ 7165, 3290},
+ { 7966, 3325},{ 8698, 3359},{ 9352, 3377},{ 9907, 3391},
+ {10389, 3390},{10856, 3395},{11170, 3385},{11530, 3385},
+ {11780, 3362},{12018, 3362},{12266, 3361},{12443, 3339},
+ {12683, 3342},{12713, 3317},{12967, 3325},{13082, 3332}
}
},
{
- /*Cb qi=62 INTRA*/
- {
- { 21, 3},{ 283, 360},{ 565, 683},{ 907, 938},
- { 1269, 1143},{ 1611, 1311},{ 1949, 1441},{ 2290, 1535},
- { 2596, 1632},{ 2877, 1738},{ 3162, 1828},{ 3458, 1893},
- { 3745, 1948},{ 4011, 2016},{ 4253, 2089},{ 4506, 2164},
- { 4734, 2233},{ 4943, 2294},{ 5162, 2353},{ 5381, 2393},
- { 5593, 2420},{ 5807, 2454},{ 6003, 2496},{ 6210, 2543}
+ /*Cb qi=54 INTRA*/
+ {
+ { 94, 73},{ 83, 557},{ 152, 818},{ 304, 919},
+ { 341, 819},{ 506, 1128},{ 593, 1281},{ 700, 1389},
+ { 714, 1225},{ 907, 1502},{ 981, 1549},{ 1062, 1641},
+ { 1032, 1523},{ 1170, 1710},{ 1217, 1727},{ 1258, 1714},
+ { 1216, 1575},{ 1309, 1682},{ 1331, 1656},{ 1393, 1712},
+ { 1247, 1456},{ 1469, 1728},{ 1530, 1711},{ 1532, 1711}
},
- /*Cb qi=62 INTER*/
- {
- { 91, -1},{ 110, 344},{ 113, 663},{ 169, 987},
- { 306, 1279},{ 562, 1519},{ 961, 1701},{ 1450, 1845},
- { 2013, 1967},{ 2686, 2053},{ 3437, 2095},{ 4171, 2109},
- { 4841, 2109},{ 5441, 2105},{ 6002, 2097},{ 6542, 2089},
- { 7028, 2087},{ 7491, 2088},{ 7949, 2090},{ 8377, 2089},
- { 8789, 2095},{ 9195, 2103},{ 9569, 2104},{ 9937, 2102}
+ /*Cb qi=54 INTER*/
+ {
+ { 33, 133},{ 12, 532},{ 70, 770},{ 171, 996},
+ { 279, 1233},{ 427, 1503},{ 600, 1736},{ 824, 1939},
+ { 1101, 2097},{ 1411, 2237},{ 1735, 2374},{ 2097, 2493},
+ { 2486, 2606},{ 2916, 2691},{ 3297, 2771},{ 3715, 2826},
+ { 4088, 2855},{ 4460, 2886},{ 4849, 2911},{ 5198, 2932},
+ { 5489, 2940},{ 5875, 2981},{ 6208, 3017},{ 6270, 3012}
}
},
{
- /*Cr qi=62 INTRA*/
- {
- { 38, 8},{ 308, 374},{ 619, 685},{ 984, 925},
- { 1326, 1126},{ 1662, 1285},{ 1999, 1407},{ 2328, 1512},
- { 2659, 1604},{ 2976, 1691},{ 3285, 1774},{ 3570, 1853},
- { 3815, 1931},{ 4068, 1998},{ 4304, 2044},{ 4491, 2082},
- { 4666, 2124},{ 4870, 2174},{ 5078, 2231},{ 5262, 2285},
- { 5480, 2335},{ 5703, 2378},{ 5905, 2423},{ 6075, 2454}
+ /*Cr qi=54 INTRA*/
+ {
+ { 103, 63},{ 83, 580},{ 258, 796},{ 301, 802},
+ { 361, 675},{ 538, 1001},{ 625, 1097},{ 713, 1171},
+ { 699, 1103},{ 868, 1380},{ 915, 1400},{ 970, 1491},
+ { 923, 1365},{ 1070, 1603},{ 1154, 1655},{ 1206, 1677},
+ { 1157, 1541},{ 1366, 1736},{ 1391, 1723},{ 1506, 1797},
+ { 1388, 1556},{ 1616, 1828},{ 1655, 1797},{ 1658, 1796}
},
- /*Cr qi=62 INTER*/
- {
- { 79, 1},{ 95, 353},{ 102, 671},{ 169, 992},
- { 318, 1277},{ 569, 1515},{ 936, 1716},{ 1428, 1876},
- { 2034, 1993},{ 2738, 2067},{ 3511, 2095},{ 4268, 2094},
- { 4943, 2087},{ 5543, 2079},{ 6074, 2074},{ 6552, 2069},
- { 6985, 2057},{ 7366, 2043},{ 7728, 2030},{ 8086, 2021},
- { 8423, 2017},{ 8752, 2016},{ 9057, 2014},{ 9376, 2008}
+ /*Cr qi=54 INTER*/
+ {
+ { 30, 138},{ 14, 532},{ 63, 771},{ 176, 990},
+ { 299, 1226},{ 438, 1496},{ 606, 1735},{ 814, 1950},
+ { 1089, 2127},{ 1417, 2281},{ 1761, 2421},{ 2104, 2571},
+ { 2467, 2701},{ 2881, 2827},{ 3303, 2900},{ 3735, 2917},
+ { 4183, 2913},{ 4529, 2882},{ 4915, 2844},{ 5168, 2796},
+ { 5410, 2763},{ 5562, 2753},{ 5815, 2764},{ 5832, 2755}
}
}
},
@@ -3964,61 +967,61 @@ oc_mode_rd OC_MODE_RD[64][3][2][OC_SAD_BINS]={
{
/*Y' qi=63 INTRA*/
{
- { -59, 134},{ 1734, 1036},{ 3743, 1521},{ 5309, 1618},
- { 6520, 1597},{ 7664, 1609},{ 8809, 1630},{ 9894, 1657},
- {10907, 1687},{11838, 1717},{12673, 1744},{13379, 1758},
- {14038, 1767},{14698, 1784},{15379, 1806},{16062, 1831},
- {16694, 1852},{17300, 1867},{17827, 1878},{18250, 1881},
- {18702, 1884},{19199, 1892},{19665, 1896},{20273, 1908}
+ { 421, 194},{ 1272, 564},{ 3016, 943},{ 3831, 1079},
+ { 4282, 1174},{ 4799, 1290},{ 5166, 1348},{ 5259, 1350},
+ { 5720, 1426},{ 6501, 1539},{ 7048, 1606},{ 7328, 1642},
+ { 7374, 1622},{ 7349, 1612},{ 7192, 1578},{ 7207, 1571},
+ { 7161, 1555},{ 7259, 1573},{ 7432, 1592},{ 7710, 1613},
+ { 8167, 1672},{ 8425, 1697},{ 8597, 1710},{ 8602, 1710}
},
/*Y' qi=63 INTER*/
{
- { -7, 33},{ 209, 1285},{ 2309, 1904},{ 5274, 2025},
- { 7801, 1966},{ 9637, 1924},{11126, 1892},{12403, 1868},
- {13515, 1849},{14491, 1834},{15380, 1822},{16197, 1814},
- {16944, 1806},{17645, 1799},{18303, 1794},{18916, 1789},
- {19494, 1785},{20056, 1782},{20568, 1779},{21047, 1776},
- {21508, 1775},{21925, 1772},{22327, 1770},{22678, 1771}
+ { -584, 286},{ 1231, 1186},{ 3939, 1663},{ 6096, 1865},
+ { 7849, 1929},{ 8934, 1995},{ 9962, 2039},{11038, 2078},
+ {12016, 2092},{12889, 2100},{13617, 2096},{14221, 2089},
+ {14743, 2083},{15240, 2081},{15619, 2074},{15992, 2065},
+ {16314, 2065},{16529, 2059},{16822, 2056},{17041, 2049},
+ {17321, 2052},{17408, 2043},{17670, 2051},{17801, 2053}
}
},
{
/*Cb qi=63 INTRA*/
{
- { 20, 3},{ 294, 357},{ 608, 673},{ 1047, 908},
- { 1501, 1090},{ 1898, 1240},{ 2275, 1353},{ 2654, 1427},
- { 3014, 1502},{ 3366, 1579},{ 3726, 1637},{ 4084, 1674},
- { 4425, 1703},{ 4752, 1743},{ 5058, 1791},{ 5377, 1838},
- { 5676, 1877},{ 5946, 1912},{ 6213, 1945},{ 6458, 1969},
- { 6704, 1982},{ 6969, 1997},{ 7210, 2017},{ 7439, 2037}
+ { 154, 55},{ 280, 582},{ 507, 731},{ 788, 853},
+ { 763, 738},{ 1141, 1008},{ 1323, 1090},{ 1540, 1220},
+ { 1487, 1089},{ 1861, 1322},{ 1983, 1347},{ 2145, 1425},
+ { 2047, 1317},{ 2334, 1475},{ 2352, 1413},{ 2458, 1467},
+ { 2243, 1270},{ 2464, 1413},{ 2423, 1335},{ 2506, 1385},
+ { 2182, 1180},{ 2565, 1376},{ 2555, 1321},{ 2557, 1321}
},
/*Cb qi=63 INTER*/
{
- { 86, 1},{ 108, 345},{ 111, 663},{ 168, 985},
- { 307, 1276},{ 577, 1513},{ 1007, 1688},{ 1550, 1819},
- { 2189, 1921},{ 2938, 1981},{ 3744, 2002},{ 4512, 2002},
- { 5199, 1996},{ 5824, 1986},{ 6419, 1971},{ 6978, 1954},
- { 7507, 1940},{ 8015, 1932},{ 8502, 1928},{ 8978, 1920},
- { 9410, 1915},{ 9842, 1910},{10262, 1901},{10634, 1896}
+ { 34, 133},{ 6, 531},{ 139, 767},{ 344, 975},
+ { 608, 1180},{ 1048, 1367},{ 1651, 1495},{ 2376, 1572},
+ { 3103, 1609},{ 3752, 1646},{ 4373, 1680},{ 4980, 1718},
+ { 5540, 1744},{ 6023, 1764},{ 6431, 1766},{ 6800, 1769},
+ { 7149, 1775},{ 7529, 1777},{ 7920, 1817},{ 8198, 1808},
+ { 8691, 1848},{ 8965, 1845},{ 9372, 1865},{ 9459, 1863}
}
},
{
/*Cr qi=63 INTRA*/
{
- { 38, 7},{ 324, 367},{ 677, 670},{ 1136, 892},
- { 1562, 1070},{ 1951, 1209},{ 2326, 1313},{ 2694, 1399},
- { 3074, 1471},{ 3460, 1531},{ 3850, 1575},{ 4214, 1622},
- { 4522, 1679},{ 4819, 1723},{ 5089, 1749},{ 5315, 1769},
- { 5530, 1792},{ 5756, 1825},{ 6006, 1860},{ 6244, 1889},
- { 6514, 1924},{ 6792, 1946},{ 7026, 1962},{ 7191, 1971}
+ { 121, 59},{ 392, 570},{ 609, 654},{ 800, 760},
+ { 720, 598},{ 1192, 892},{ 1298, 897},{ 1470, 1027},
+ { 1411, 962},{ 1761, 1184},{ 1826, 1197},{ 1981, 1308},
+ { 1854, 1198},{ 2229, 1427},{ 2269, 1365},{ 2428, 1453},
+ { 2217, 1265},{ 2558, 1435},{ 2541, 1356},{ 2660, 1417},
+ { 2337, 1199},{ 2688, 1382},{ 2603, 1301},{ 2605, 1300}
},
/*Cr qi=63 INTER*/
{
- { 80, 2},{ 95, 354},{ 101, 671},{ 167, 990},
- { 321, 1274},{ 585, 1509},{ 984, 1702},{ 1534, 1849},
- { 2217, 1947},{ 3005, 1995},{ 3839, 1999},{ 4619, 1986},
- { 5310, 1973},{ 5933, 1961},{ 6486, 1952},{ 6988, 1942},
- { 7435, 1927},{ 7817, 1911},{ 8198, 1900},{ 8552, 1895},
- { 8881, 1890},{ 9253, 1883},{ 9598, 1876},{ 9923, 1859}
+ { 31, 137},{ 10, 531},{ 136, 768},{ 360, 971},
+ { 638, 1166},{ 1029, 1373},{ 1604, 1519},{ 2351, 1595},
+ { 3129, 1640},{ 3861, 1691},{ 4491, 1751},{ 5101, 1783},
+ { 5635, 1784},{ 6136, 1779},{ 6550, 1763},{ 6905, 1746},
+ { 7172, 1726},{ 7495, 1732},{ 7738, 1735},{ 7949, 1735},
+ { 8211, 1744},{ 8424, 1740},{ 8779, 1764},{ 8812, 1760}
}
}
}
diff --git a/thirdparty/libtheora/ocintrin.h b/thirdparty/libtheora/ocintrin.h
index d49ebb2159..b200ceafce 100644
--- a/thirdparty/libtheora/ocintrin.h
+++ b/thirdparty/libtheora/ocintrin.h
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: ocintrin.h 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
diff --git a/thirdparty/libtheora/patches/theora.git-0ae66d565e6bead8604d312bc1a4e9dccf245c88.patch b/thirdparty/libtheora/patches/theora.git-0ae66d565e6bead8604d312bc1a4e9dccf245c88.patch
deleted file mode 100644
index 1b9c8e20be..0000000000
--- a/thirdparty/libtheora/patches/theora.git-0ae66d565e6bead8604d312bc1a4e9dccf245c88.patch
+++ /dev/null
@@ -1,38 +0,0 @@
-From 0ae66d565e6bead8604d312bc1a4e9dccf245c88 Mon Sep 17 00:00:00 2001
-From: Tim Terriberry <tterribe@xiph.org>
-Date: Tue, 8 May 2012 02:51:57 +0000
-Subject: [PATCH] Fix pp_sharp_mod calculation.
-
-This was broken when the dequant_tables indexing changed in commit
- r16102, but it only affected post-processing quality, so we never
- noticed.
-With gcc 4.8.0, this can now trigger a segfault during decoder
- initialization.
-
-svn path=/trunk/theora/; revision=18268
----
- decode.c | 8 ++++----
- 1 file changed, 4 insertions(+), 4 deletions(-)
-
-diff --git a/decode.c b/decode.c
-index b803505..9f2516a 100644
---- a/decode.c
-+++ b/decode.c
-@@ -400,10 +400,10 @@ static int oc_dec_init(oc_dec_ctx *_dec,const th_info *_info,
- int qsum;
- qsum=0;
- for(qti=0;qti<2;qti++)for(pli=0;pli<3;pli++){
-- qsum+=_dec->state.dequant_tables[qti][pli][qi][12]+
-- _dec->state.dequant_tables[qti][pli][qi][17]+
-- _dec->state.dequant_tables[qti][pli][qi][18]+
-- _dec->state.dequant_tables[qti][pli][qi][24]<<(pli==0);
-+ qsum+=_dec->state.dequant_tables[qi][pli][qti][12]+
-+ _dec->state.dequant_tables[qi][pli][qti][17]+
-+ _dec->state.dequant_tables[qi][pli][qti][18]+
-+ _dec->state.dequant_tables[qi][pli][qti][24]<<(pli==0);
- }
- _dec->pp_sharp_mod[qi]=-(qsum>>11);
- }
---
-2.11.0
-
diff --git a/thirdparty/libtheora/quant.c b/thirdparty/libtheora/quant.c
index 8359f5abea..e206202844 100644
--- a/thirdparty/libtheora/quant.c
+++ b/thirdparty/libtheora/quant.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: quant.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
@@ -21,6 +21,14 @@
#include "quant.h"
#include "decint.h"
+/*The maximum output of the DCT with +/- 255 inputs is +/- 8157.
+ These minimum quantizers ensure the result after quantization (and after
+ prediction for DC) will be no more than +/- 510.
+ The tokenization system can handle values up to +/- 580, so there is no need
+ to do any coefficient clamping.
+ I would rather have allowed smaller quantizers and had to clamp, but these
+ minimums were required when constructing the original VP3 matrices and have
+ been formalized in the spec.*/
static const unsigned OC_DC_QUANT_MIN[2]={4<<2,8<<2};
static const unsigned OC_AC_QUANT_MIN[2]={2<<2,4<<2};
diff --git a/thirdparty/libtheora/quant.h b/thirdparty/libtheora/quant.h
index 49ce13a65c..247210eaae 100644
--- a/thirdparty/libtheora/quant.h
+++ b/thirdparty/libtheora/quant.h
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: quant.h 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
diff --git a/thirdparty/libtheora/rate.c b/thirdparty/libtheora/rate.c
index 4f43bb2e5f..bf2b1396a1 100644
--- a/thirdparty/libtheora/rate.c
+++ b/thirdparty/libtheora/rate.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: rate.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
#include <stdlib.h>
@@ -190,7 +190,8 @@ void oc_enc_calc_lambda(oc_enc_ctx *_enc,int _qti){
This may need to be revised if the R-D cost estimation or qii flag
optimization strategies change.*/
nqis=1;
- if(lq<(OC_Q57(56)>>3)&&!_enc->vp3_compatible){
+ if(lq<(OC_Q57(56)>>3)&&!_enc->vp3_compatible&&
+ _enc->sp_level<OC_SP_LEVEL_FAST_ANALYSIS){
qi1=oc_enc_find_qi_for_target(_enc,_qti,OC_MAXI(qi-1,0),0,
lq+(OC_Q57(7)+5)/10);
if(qi1!=qi)_enc->state.qis[nqis++]=qi1;
@@ -761,6 +762,7 @@ int oc_enc_update_rc_state(oc_enc_ctx *_enc,
_enc->rc.cur_metrics.log_scale=oc_q57_to_q24(log_scale);
_enc->rc.cur_metrics.dup_count=_enc->dup_count;
_enc->rc.cur_metrics.frame_type=_enc->state.frame_type;
+ _enc->rc.cur_metrics.activity_avg=_enc->activity_avg;
_enc->rc.twopass_buffer_bytes=0;
}break;
case 2:{
@@ -863,9 +865,9 @@ int oc_enc_update_rc_state(oc_enc_ctx *_enc,
return dropped;
}
-#define OC_RC_2PASS_VERSION (1)
+#define OC_RC_2PASS_VERSION (2)
#define OC_RC_2PASS_HDR_SZ (38)
-#define OC_RC_2PASS_PACKET_SZ (8)
+#define OC_RC_2PASS_PACKET_SZ (12)
static void oc_rc_buffer_val(oc_rc_state *_rc,ogg_int64_t _val,int _bytes){
while(_bytes-->0){
@@ -900,6 +902,7 @@ int oc_enc_rc_2pass_out(oc_enc_ctx *_enc,unsigned char **_buf){
oc_rc_buffer_val(&_enc->rc,
_enc->rc.cur_metrics.dup_count|_enc->rc.cur_metrics.frame_type<<31,4);
oc_rc_buffer_val(&_enc->rc,_enc->rc.cur_metrics.log_scale,4);
+ oc_rc_buffer_val(&_enc->rc,_enc->rc.cur_metrics.activity_avg,4);
}
}
else if(_enc->packet_state==OC_PACKET_DONE&&
@@ -1050,16 +1053,19 @@ int oc_enc_rc_2pass_in(oc_enc_ctx *_enc,unsigned char *_buf,size_t _bytes){
if(_enc->rc.twopass_buffer_fill>=OC_RC_2PASS_PACKET_SZ){
ogg_uint32_t dup_count;
ogg_int32_t log_scale;
+ unsigned activity;
int qti;
int arg;
/*Read the metrics for the next frame.*/
dup_count=oc_rc_unbuffer_val(&_enc->rc,4);
log_scale=oc_rc_unbuffer_val(&_enc->rc,4);
+ activity=oc_rc_unbuffer_val(&_enc->rc,4);
_enc->rc.cur_metrics.log_scale=log_scale;
qti=(dup_count&0x80000000)>>31;
_enc->rc.cur_metrics.dup_count=dup_count&0x7FFFFFFF;
_enc->rc.cur_metrics.frame_type=qti;
_enc->rc.twopass_force_kf=qti==OC_INTRA_FRAME;
+ _enc->activity_avg=_enc->rc.cur_metrics.activity_avg=activity;
/*"Helpfully" set the dup count back to what it was in pass 1.*/
arg=_enc->rc.cur_metrics.dup_count;
th_encode_ctl(_enc,TH_ENCCTL_SET_DUP_COUNT,&arg,sizeof(arg));
@@ -1070,8 +1076,8 @@ int oc_enc_rc_2pass_in(oc_enc_ctx *_enc,unsigned char *_buf,size_t _bytes){
else{
int frames_needed;
/*We're using a finite buffer:*/
- frames_needed=OC_CLAMPI(0,_enc->rc.buf_delay
- -(_enc->rc.scale_window_end-_enc->rc.scale_window0),
+ frames_needed=OC_MINI(_enc->rc.buf_delay-OC_MINI(_enc->rc.buf_delay,
+ _enc->rc.scale_window_end-_enc->rc.scale_window0),
_enc->rc.frames_left[0]+_enc->rc.frames_left[1]
-_enc->rc.nframes[0]-_enc->rc.nframes[1]);
while(frames_needed>0){
@@ -1087,9 +1093,11 @@ int oc_enc_rc_2pass_in(oc_enc_ctx *_enc,unsigned char *_buf,size_t _bytes){
ogg_uint32_t dup_count;
ogg_int32_t log_scale;
int qti;
+ unsigned activity;
/*Read the metrics for the next frame.*/
dup_count=oc_rc_unbuffer_val(&_enc->rc,4);
log_scale=oc_rc_unbuffer_val(&_enc->rc,4);
+ activity=oc_rc_unbuffer_val(&_enc->rc,4);
/*Add the to the circular buffer.*/
fmi=_enc->rc.frame_metrics_head+_enc->rc.nframe_metrics++;
if(fmi>=_enc->rc.cframe_metrics)fmi-=_enc->rc.cframe_metrics;
@@ -1098,6 +1106,7 @@ int oc_enc_rc_2pass_in(oc_enc_ctx *_enc,unsigned char *_buf,size_t _bytes){
qti=(dup_count&0x80000000)>>31;
m->dup_count=dup_count&0x7FFFFFFF;
m->frame_type=qti;
+ m->activity_avg=activity;
/*And accumulate the statistics over the window.*/
_enc->rc.nframes[qti]++;
_enc->rc.nframes[2]+=m->dup_count;
@@ -1105,8 +1114,8 @@ int oc_enc_rc_2pass_in(oc_enc_ctx *_enc,unsigned char *_buf,size_t _bytes){
_enc->rc.scale_window_end+=m->dup_count+1;
/*Compute an upper bound on the number of remaining packets needed
for the current window.*/
- frames_needed=OC_CLAMPI(0,_enc->rc.buf_delay
- -(_enc->rc.scale_window_end-_enc->rc.scale_window0),
+ frames_needed=OC_MINI(_enc->rc.buf_delay-OC_MINI(_enc->rc.buf_delay,
+ _enc->rc.scale_window_end-_enc->rc.scale_window0),
_enc->rc.frames_left[0]+_enc->rc.frames_left[1]
-_enc->rc.nframes[0]-_enc->rc.nframes[1]);
/*Clear the buffer for the next frame.*/
@@ -1124,6 +1133,7 @@ int oc_enc_rc_2pass_in(oc_enc_ctx *_enc,unsigned char *_buf,size_t _bytes){
*(_enc->rc.frame_metrics+_enc->rc.frame_metrics_head);
_enc->rc.twopass_force_kf=
_enc->rc.cur_metrics.frame_type==OC_INTRA_FRAME;
+ _enc->activity_avg=_enc->rc.cur_metrics.activity_avg;
/*"Helpfully" set the dup count back to what it was in pass 1.*/
arg=_enc->rc.cur_metrics.dup_count;
th_encode_ctl(_enc,TH_ENCCTL_SET_DUP_COUNT,&arg,sizeof(arg));
diff --git a/thirdparty/libtheora/state.c b/thirdparty/libtheora/state.c
index 42ed33a9a3..f4c6240387 100644
--- a/thirdparty/libtheora/state.c
+++ b/thirdparty/libtheora/state.c
@@ -11,25 +11,93 @@
********************************************************************
function:
- last mod: $Id: state.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
#include <stdlib.h>
#include <string.h>
-#include "internal.h"
-#if defined(OC_X86_ASM)
-#if defined(_MSC_VER)
-# include "x86_vc/x86int.h"
-#else
-# include "x86/x86int.h"
-#endif
-#endif
+#include "state.h"
#if defined(OC_DUMP_IMAGES)
# include <stdio.h>
# include "png.h"
+# include "zlib.h"
#endif
+/*The function used to fill in the chroma plane motion vectors for a macro
+ block when 4 different motion vectors are specified in the luma plane.
+ This version is for use with chroma decimated in the X and Y directions
+ (4:2:0).
+ _cbmvs: The chroma block-level motion vectors to fill in.
+ _lbmvs: The luma block-level motion vectors.*/
+static void oc_set_chroma_mvs00(oc_mv _cbmvs[4],const oc_mv _lbmvs[4]){
+ int dx;
+ int dy;
+ dx=OC_MV_X(_lbmvs[0])+OC_MV_X(_lbmvs[1])
+ +OC_MV_X(_lbmvs[2])+OC_MV_X(_lbmvs[3]);
+ dy=OC_MV_Y(_lbmvs[0])+OC_MV_Y(_lbmvs[1])
+ +OC_MV_Y(_lbmvs[2])+OC_MV_Y(_lbmvs[3]);
+ _cbmvs[0]=OC_MV(OC_DIV_ROUND_POW2(dx,2,2),OC_DIV_ROUND_POW2(dy,2,2));
+}
+
+/*The function used to fill in the chroma plane motion vectors for a macro
+ block when 4 different motion vectors are specified in the luma plane.
+ This version is for use with chroma decimated in the Y direction.
+ _cbmvs: The chroma block-level motion vectors to fill in.
+ _lbmvs: The luma block-level motion vectors.*/
+static void oc_set_chroma_mvs01(oc_mv _cbmvs[4],const oc_mv _lbmvs[4]){
+ int dx;
+ int dy;
+ dx=OC_MV_X(_lbmvs[0])+OC_MV_X(_lbmvs[2]);
+ dy=OC_MV_Y(_lbmvs[0])+OC_MV_Y(_lbmvs[2]);
+ _cbmvs[0]=OC_MV(OC_DIV_ROUND_POW2(dx,1,1),OC_DIV_ROUND_POW2(dy,1,1));
+ dx=OC_MV_X(_lbmvs[1])+OC_MV_X(_lbmvs[3]);
+ dy=OC_MV_Y(_lbmvs[1])+OC_MV_Y(_lbmvs[3]);
+ _cbmvs[1]=OC_MV(OC_DIV_ROUND_POW2(dx,1,1),OC_DIV_ROUND_POW2(dy,1,1));
+}
+
+/*The function used to fill in the chroma plane motion vectors for a macro
+ block when 4 different motion vectors are specified in the luma plane.
+ This version is for use with chroma decimated in the X direction (4:2:2).
+ _cbmvs: The chroma block-level motion vectors to fill in.
+ _lbmvs: The luma block-level motion vectors.*/
+static void oc_set_chroma_mvs10(oc_mv _cbmvs[4],const oc_mv _lbmvs[4]){
+ int dx;
+ int dy;
+ dx=OC_MV_X(_lbmvs[0])+OC_MV_X(_lbmvs[1]);
+ dy=OC_MV_Y(_lbmvs[0])+OC_MV_Y(_lbmvs[1]);
+ _cbmvs[0]=OC_MV(OC_DIV_ROUND_POW2(dx,1,1),OC_DIV_ROUND_POW2(dy,1,1));
+ dx=OC_MV_X(_lbmvs[2])+OC_MV_X(_lbmvs[3]);
+ dy=OC_MV_Y(_lbmvs[2])+OC_MV_Y(_lbmvs[3]);
+ _cbmvs[2]=OC_MV(OC_DIV_ROUND_POW2(dx,1,1),OC_DIV_ROUND_POW2(dy,1,1));
+}
+
+/*The function used to fill in the chroma plane motion vectors for a macro
+ block when 4 different motion vectors are specified in the luma plane.
+ This version is for use with no chroma decimation (4:4:4).
+ _cbmvs: The chroma block-level motion vectors to fill in.
+ _lmbmv: The luma macro-block level motion vector to fill in for use in
+ prediction.
+ _lbmvs: The luma block-level motion vectors.*/
+static void oc_set_chroma_mvs11(oc_mv _cbmvs[4],const oc_mv _lbmvs[4]){
+ _cbmvs[0]=_lbmvs[0];
+ _cbmvs[1]=_lbmvs[1];
+ _cbmvs[2]=_lbmvs[2];
+ _cbmvs[3]=_lbmvs[3];
+}
+
+/*A table of functions used to fill in the chroma plane motion vectors for a
+ macro block when 4 different motion vectors are specified in the luma
+ plane.*/
+const oc_set_chroma_mvs_func OC_SET_CHROMA_MVS_TABLE[TH_PF_NFORMATS]={
+ (oc_set_chroma_mvs_func)oc_set_chroma_mvs00,
+ (oc_set_chroma_mvs_func)oc_set_chroma_mvs01,
+ (oc_set_chroma_mvs_func)oc_set_chroma_mvs10,
+ (oc_set_chroma_mvs_func)oc_set_chroma_mvs11
+};
+
+
+
/*Returns the fragment index of the top-left block in a macro block.
This can be used to test whether or not the whole macro block is valid.
_sb_map: The super block map.
@@ -92,7 +160,7 @@ static void oc_sb_create_plane_mapping(oc_sb_map _sb_maps[],
if(jmax>4)jmax=4;
else if(jmax<=0)break;
/*By default, set all fragment indices to -1.*/
- memset(_sb_maps[sbi][0],0xFF,sizeof(_sb_maps[sbi]));
+ memset(_sb_maps[sbi],0xFF,sizeof(_sb_maps[sbi]));
/*Fill in the fragment map for this super block.*/
xfrag=yfrag+x;
for(i=0;i<imax;i++){
@@ -186,10 +254,14 @@ static void oc_mb_fill_cmapping10(oc_mb_map_plane _mb_map[3],
This version is for use with no chroma decimation (4:4:4).
This uses the already filled-in luma plane values.
_mb_map: The macro block map to fill.
- _fplanes: The descriptions of the fragment planes.*/
+ _fplanes: The descriptions of the fragment planes.
+ _xfrag0: The X location of the upper-left hand fragment in the luma plane.
+ _yfrag0: The Y location of the upper-left hand fragment in the luma plane.*/
static void oc_mb_fill_cmapping11(oc_mb_map_plane _mb_map[3],
- const oc_fragment_plane _fplanes[3]){
+ const oc_fragment_plane _fplanes[3],int _xfrag0,int _yfrag0){
int k;
+ (void)_xfrag0;
+ (void)_yfrag0;
for(k=0;k<4;k++){
_mb_map[1][k]=_mb_map[0][k]+_fplanes[1].froffset;
_mb_map[2][k]=_mb_map[0][k]+_fplanes[2].froffset;
@@ -211,7 +283,7 @@ static const oc_mb_fill_cmapping_func OC_MB_FILL_CMAPPING_TABLE[4]={
oc_mb_fill_cmapping00,
oc_mb_fill_cmapping01,
oc_mb_fill_cmapping10,
- (oc_mb_fill_cmapping_func)oc_mb_fill_cmapping11
+ oc_mb_fill_cmapping11
};
/*Fills in the mapping from macro blocks to their corresponding fragment
@@ -469,7 +541,7 @@ static void oc_state_frarray_clear(oc_theora_state *_state){
unrestricted motion vectors without special casing the boundary.
If chroma is decimated in either direction, the padding is reduced by a
factor of 2 on the appropriate sides.
- _nrefs: The number of reference buffers to init; must be 3 or 4.*/
+ _nrefs: The number of reference buffers to init; must be in the range 3...6.*/
static int oc_state_ref_bufs_init(oc_theora_state *_state,int _nrefs){
th_info *info;
unsigned char *ref_frame_data;
@@ -481,6 +553,7 @@ static int oc_state_ref_bufs_init(oc_theora_state *_state,int _nrefs){
int yheight;
int chstride;
int cheight;
+ ptrdiff_t align;
ptrdiff_t yoffset;
ptrdiff_t coffset;
ptrdiff_t *frag_buf_offs;
@@ -489,33 +562,38 @@ static int oc_state_ref_bufs_init(oc_theora_state *_state,int _nrefs){
int vdec;
int rfi;
int pli;
- if(_nrefs<3||_nrefs>4)return TH_EINVAL;
+ if(_nrefs<3||_nrefs>6)return TH_EINVAL;
info=&_state->info;
/*Compute the image buffer parameters for each plane.*/
hdec=!(info->pixel_fmt&1);
vdec=!(info->pixel_fmt&2);
yhstride=info->frame_width+2*OC_UMV_PADDING;
yheight=info->frame_height+2*OC_UMV_PADDING;
- chstride=yhstride>>hdec;
+ /*Require 16-byte aligned rows in the chroma planes.*/
+ chstride=(yhstride>>hdec)+15&~15;
cheight=yheight>>vdec;
yplane_sz=yhstride*(size_t)yheight;
cplane_sz=chstride*(size_t)cheight;
yoffset=OC_UMV_PADDING+OC_UMV_PADDING*(ptrdiff_t)yhstride;
coffset=(OC_UMV_PADDING>>hdec)+(OC_UMV_PADDING>>vdec)*(ptrdiff_t)chstride;
- ref_frame_sz=yplane_sz+2*cplane_sz;
+ /*Although we guarantee the rows of the chroma planes are a multiple of 16
+ bytes, the initial padding on the first row may only be 8 bytes.
+ Compute the offset needed to the actual image data to a multiple of 16.*/
+ align=-coffset&15;
+ ref_frame_sz=yplane_sz+2*cplane_sz+16;
ref_frame_data_sz=_nrefs*ref_frame_sz;
/*Check for overflow.
The same caveats apply as for oc_state_frarray_init().*/
- if(yplane_sz/yhstride!=yheight||2*cplane_sz<cplane_sz||
+ if(yplane_sz/yhstride!=(size_t)yheight||2*cplane_sz+16<cplane_sz||
ref_frame_sz<yplane_sz||ref_frame_data_sz/_nrefs!=ref_frame_sz){
return TH_EIMPL;
}
- ref_frame_data=_ogg_malloc(ref_frame_data_sz);
+ ref_frame_data=oc_aligned_malloc(ref_frame_data_sz,16);
frag_buf_offs=_state->frag_buf_offs=
_ogg_malloc(_state->nfrags*sizeof(*frag_buf_offs));
if(ref_frame_data==NULL||frag_buf_offs==NULL){
_ogg_free(frag_buf_offs);
- _ogg_free(ref_frame_data);
+ oc_aligned_free(ref_frame_data);
return TH_EFAULT;
}
/*Set up the width, height and stride for the image buffers.*/
@@ -532,15 +610,15 @@ static int oc_state_ref_bufs_init(oc_theora_state *_state,int _nrefs){
memcpy(_state->ref_frame_bufs[rfi],_state->ref_frame_bufs[0],
sizeof(_state->ref_frame_bufs[0]));
}
+ _state->ref_frame_handle=ref_frame_data;
/*Set up the data pointers for the image buffers.*/
for(rfi=0;rfi<_nrefs;rfi++){
- _state->ref_frame_data[rfi]=ref_frame_data;
_state->ref_frame_bufs[rfi][0].data=ref_frame_data+yoffset;
- ref_frame_data+=yplane_sz;
+ ref_frame_data+=yplane_sz+align;
_state->ref_frame_bufs[rfi][1].data=ref_frame_data+coffset;
ref_frame_data+=cplane_sz;
_state->ref_frame_bufs[rfi][2].data=ref_frame_data+coffset;
- ref_frame_data+=cplane_sz;
+ ref_frame_data+=cplane_sz+(16-align);
/*Flip the buffer upside down.
This allows us to decode Theora's bottom-up frames in their natural
order, yet return a top-down buffer with a positive stride to the user.*/
@@ -550,7 +628,7 @@ static int oc_state_ref_bufs_init(oc_theora_state *_state,int _nrefs){
_state->ref_ystride[0]=-yhstride;
_state->ref_ystride[1]=_state->ref_ystride[2]=-chstride;
/*Initialize the fragment buffer offsets.*/
- ref_frame_data=_state->ref_frame_data[0];
+ ref_frame_data=_state->ref_frame_bufs[0][0].data;
fragi=0;
for(pli=0;pli<3;pli++){
th_img_plane *iplane;
@@ -576,41 +654,44 @@ static int oc_state_ref_bufs_init(oc_theora_state *_state,int _nrefs){
vpix+=stride<<3;
}
}
- /*Initialize the reference frame indices.*/
+ /*Initialize the reference frame pointers and indices.*/
_state->ref_frame_idx[OC_FRAME_GOLD]=
_state->ref_frame_idx[OC_FRAME_PREV]=
- _state->ref_frame_idx[OC_FRAME_SELF]=-1;
- _state->ref_frame_idx[OC_FRAME_IO]=_nrefs>3?3:-1;
+ _state->ref_frame_idx[OC_FRAME_GOLD_ORIG]=
+ _state->ref_frame_idx[OC_FRAME_PREV_ORIG]=
+ _state->ref_frame_idx[OC_FRAME_SELF]=
+ _state->ref_frame_idx[OC_FRAME_IO]=-1;
+ _state->ref_frame_data[OC_FRAME_GOLD]=
+ _state->ref_frame_data[OC_FRAME_PREV]=
+ _state->ref_frame_data[OC_FRAME_GOLD_ORIG]=
+ _state->ref_frame_data[OC_FRAME_PREV_ORIG]=
+ _state->ref_frame_data[OC_FRAME_SELF]=
+ _state->ref_frame_data[OC_FRAME_IO]=NULL;
return 0;
}
static void oc_state_ref_bufs_clear(oc_theora_state *_state){
_ogg_free(_state->frag_buf_offs);
- _ogg_free(_state->ref_frame_data[0]);
+ oc_aligned_free(_state->ref_frame_handle);
}
-void oc_state_vtable_init_c(oc_theora_state *_state){
+void oc_state_accel_init_c(oc_theora_state *_state){
+ _state->cpu_flags=0;
+#if defined(OC_STATE_USE_VTABLE)
_state->opt_vtable.frag_copy=oc_frag_copy_c;
+ _state->opt_vtable.frag_copy_list=oc_frag_copy_list_c;
_state->opt_vtable.frag_recon_intra=oc_frag_recon_intra_c;
_state->opt_vtable.frag_recon_inter=oc_frag_recon_inter_c;
_state->opt_vtable.frag_recon_inter2=oc_frag_recon_inter2_c;
_state->opt_vtable.idct8x8=oc_idct8x8_c;
_state->opt_vtable.state_frag_recon=oc_state_frag_recon_c;
- _state->opt_vtable.state_frag_copy_list=oc_state_frag_copy_list_c;
+ _state->opt_vtable.loop_filter_init=oc_loop_filter_init_c;
_state->opt_vtable.state_loop_filter_frag_rows=
oc_state_loop_filter_frag_rows_c;
_state->opt_vtable.restore_fpu=oc_restore_fpu_c;
- _state->opt_data.dct_fzig_zag=OC_FZIG_ZAG;
-}
-
-/*Initialize the accelerated function pointers.*/
-void oc_state_vtable_init(oc_theora_state *_state){
-#if defined(OC_X86_ASM)
- oc_state_vtable_init_x86(_state);
-#else
- oc_state_vtable_init_c(_state);
#endif
+ _state->opt_data.dct_fzig_zag=OC_FZIG_ZAG;
}
@@ -626,7 +707,8 @@ int oc_state_init(oc_theora_state *_state,const th_info *_info,int _nrefs){
how it is specified in the bitstream, because the Y axis is flipped in
the bitstream.
The displayable frame must fit inside the encoded frame.
- The color space must be one known by the encoder.*/
+ The color space must be one known by the encoder.
+ The framerate ratio must not contain a zero value.*/
if((_info->frame_width&0xF)||(_info->frame_height&0xF)||
_info->frame_width<=0||_info->frame_width>=0x100000||
_info->frame_height<=0||_info->frame_height>=0x100000||
@@ -639,7 +721,8 @@ int oc_state_init(oc_theora_state *_state,const th_info *_info,int _nrefs){
but there are a number of compilers which will mis-optimize this.
It's better to live with the spurious warnings.*/
_info->colorspace<0||_info->colorspace>=TH_CS_NSPACES||
- _info->pixel_fmt<0||_info->pixel_fmt>=TH_PF_NFORMATS){
+ _info->pixel_fmt<0||_info->pixel_fmt>=TH_PF_NFORMATS||
+ _info->fps_numerator<1||_info->fps_denominator<1){
return TH_EINVAL;
}
memset(_state,0,sizeof(*_state));
@@ -648,7 +731,7 @@ int oc_state_init(oc_theora_state *_state,const th_info *_info,int _nrefs){
system.*/
_state->info.pic_y=_info->frame_height-_info->pic_height-_info->pic_y;
_state->frame_type=OC_UNKWN_FRAME;
- oc_state_vtable_init(_state);
+ oc_state_accel_init(_state);
ret=oc_state_frarray_init(_state);
if(ret>=0)ret=oc_state_ref_bufs_init(_state,_nrefs);
if(ret<0){
@@ -758,11 +841,10 @@ void oc_state_borders_fill(oc_theora_state *_state,int _refi){
_offsets[1] is set if the motion vector has non-zero fractional
components.
_pli: The color plane index.
- _dx: The X component of the motion vector.
- _dy: The Y component of the motion vector.
+ _mv: The motion vector.
Return: The number of offsets returned: 1 or 2.*/
int oc_state_get_mv_offsets(const oc_theora_state *_state,int _offsets[2],
- int _pli,int _dx,int _dy){
+ int _pli,oc_mv _mv){
/*Here is a brief description of how Theora handles motion vectors:
Motion vector components are specified to half-pixel accuracy in
undecimated directions of each plane, and quarter-pixel accuracy in
@@ -785,21 +867,25 @@ int oc_state_get_mv_offsets(const oc_theora_state *_state,int _offsets[2],
int xfrac;
int yfrac;
int offs;
+ int dx;
+ int dy;
ystride=_state->ref_ystride[_pli];
/*These two variables decide whether we are in half- or quarter-pixel
precision in each component.*/
xprec=1+(_pli!=0&&!(_state->info.pixel_fmt&1));
yprec=1+(_pli!=0&&!(_state->info.pixel_fmt&2));
+ dx=OC_MV_X(_mv);
+ dy=OC_MV_Y(_mv);
/*These two variables are either 0 if all the fractional bits are zero or -1
if any of them are non-zero.*/
- xfrac=OC_SIGNMASK(-(_dx&(xprec|1)));
- yfrac=OC_SIGNMASK(-(_dy&(yprec|1)));
- offs=(_dx>>xprec)+(_dy>>yprec)*ystride;
+ xfrac=OC_SIGNMASK(-(dx&(xprec|1)));
+ yfrac=OC_SIGNMASK(-(dy&(yprec|1)));
+ offs=(dx>>xprec)+(dy>>yprec)*ystride;
if(xfrac||yfrac){
int xmask;
int ymask;
- xmask=OC_SIGNMASK(_dx);
- ymask=OC_SIGNMASK(_dy);
+ xmask=OC_SIGNMASK(dx);
+ ymask=OC_SIGNMASK(dy);
yfrac&=ystride;
_offsets[0]=offs-(xfrac&xmask)+(yfrac&ymask);
_offsets[1]=offs-(xfrac&~xmask)+(yfrac&~ymask);
@@ -848,13 +934,17 @@ int oc_state_get_mv_offsets(const oc_theora_state *_state,int _offsets[2],
int mx2;
int my2;
int offs;
+ int dx;
+ int dy;
ystride=_state->ref_ystride[_pli];
qpy=_pli!=0&&!(_state->info.pixel_fmt&2);
- my=OC_MVMAP[qpy][_dy+31];
- my2=OC_MVMAP2[qpy][_dy+31];
+ dx=OC_MV_X(_mv);
+ dy=OC_MV_Y(_mv);
+ my=OC_MVMAP[qpy][dy+31];
+ my2=OC_MVMAP2[qpy][dy+31];
qpx=_pli!=0&&!(_state->info.pixel_fmt&1);
- mx=OC_MVMAP[qpx][_dx+31];
- mx2=OC_MVMAP2[qpx][_dx+31];
+ mx=OC_MVMAP[qpx][dx+31];
+ mx2=OC_MVMAP2[qpx][dx+31];
offs=my*ystride+mx;
if(mx2||my2){
_offsets[1]=offs+my2*ystride+mx2;
@@ -866,18 +956,12 @@ int oc_state_get_mv_offsets(const oc_theora_state *_state,int _offsets[2],
#endif
}
-void oc_state_frag_recon(const oc_theora_state *_state,ptrdiff_t _fragi,
- int _pli,ogg_int16_t _dct_coeffs[64],int _last_zzi,ogg_uint16_t _dc_quant){
- _state->opt_vtable.state_frag_recon(_state,_fragi,_pli,_dct_coeffs,
- _last_zzi,_dc_quant);
-}
-
void oc_state_frag_recon_c(const oc_theora_state *_state,ptrdiff_t _fragi,
- int _pli,ogg_int16_t _dct_coeffs[64],int _last_zzi,ogg_uint16_t _dc_quant){
+ int _pli,ogg_int16_t _dct_coeffs[128],int _last_zzi,ogg_uint16_t _dc_quant){
unsigned char *dst;
ptrdiff_t frag_buf_off;
int ystride;
- int mb_mode;
+ int refi;
/*Apply the inverse transform.*/
/*Special case only having a DC component.*/
if(_last_zzi<2){
@@ -887,69 +971,35 @@ void oc_state_frag_recon_c(const oc_theora_state *_state,ptrdiff_t _fragi,
no iDCT rounding.*/
p=(ogg_int16_t)(_dct_coeffs[0]*(ogg_int32_t)_dc_quant+15>>5);
/*LOOP VECTORIZES.*/
- for(ci=0;ci<64;ci++)_dct_coeffs[ci]=p;
+ for(ci=0;ci<64;ci++)_dct_coeffs[64+ci]=p;
}
else{
/*First, dequantize the DC coefficient.*/
_dct_coeffs[0]=(ogg_int16_t)(_dct_coeffs[0]*(int)_dc_quant);
- oc_idct8x8(_state,_dct_coeffs,_last_zzi);
+ oc_idct8x8(_state,_dct_coeffs+64,_dct_coeffs,_last_zzi);
}
/*Fill in the target buffer.*/
frag_buf_off=_state->frag_buf_offs[_fragi];
- mb_mode=_state->frags[_fragi].mb_mode;
+ refi=_state->frags[_fragi].refi;
ystride=_state->ref_ystride[_pli];
- dst=_state->ref_frame_data[_state->ref_frame_idx[OC_FRAME_SELF]]+frag_buf_off;
- if(mb_mode==OC_MODE_INTRA)oc_frag_recon_intra(_state,dst,ystride,_dct_coeffs);
+ dst=_state->ref_frame_data[OC_FRAME_SELF]+frag_buf_off;
+ if(refi==OC_FRAME_SELF)oc_frag_recon_intra(_state,dst,ystride,_dct_coeffs+64);
else{
const unsigned char *ref;
int mvoffsets[2];
- ref=
- _state->ref_frame_data[_state->ref_frame_idx[OC_FRAME_FOR_MODE(mb_mode)]]
- +frag_buf_off;
+ ref=_state->ref_frame_data[refi]+frag_buf_off;
if(oc_state_get_mv_offsets(_state,mvoffsets,_pli,
- _state->frag_mvs[_fragi][0],_state->frag_mvs[_fragi][1])>1){
+ _state->frag_mvs[_fragi])>1){
oc_frag_recon_inter2(_state,
- dst,ref+mvoffsets[0],ref+mvoffsets[1],ystride,_dct_coeffs);
+ dst,ref+mvoffsets[0],ref+mvoffsets[1],ystride,_dct_coeffs+64);
+ }
+ else{
+ oc_frag_recon_inter(_state,dst,ref+mvoffsets[0],ystride,_dct_coeffs+64);
}
- else oc_frag_recon_inter(_state,dst,ref+mvoffsets[0],ystride,_dct_coeffs);
- }
-}
-
-/*Copies the fragments specified by the lists of fragment indices from one
- frame to another.
- _fragis: A pointer to a list of fragment indices.
- _nfragis: The number of fragment indices to copy.
- _dst_frame: The reference frame to copy to.
- _src_frame: The reference frame to copy from.
- _pli: The color plane the fragments lie in.*/
-void oc_state_frag_copy_list(const oc_theora_state *_state,
- const ptrdiff_t *_fragis,ptrdiff_t _nfragis,
- int _dst_frame,int _src_frame,int _pli){
- _state->opt_vtable.state_frag_copy_list(_state,_fragis,_nfragis,_dst_frame,
- _src_frame,_pli);
-}
-
-void oc_state_frag_copy_list_c(const oc_theora_state *_state,
- const ptrdiff_t *_fragis,ptrdiff_t _nfragis,
- int _dst_frame,int _src_frame,int _pli){
- const ptrdiff_t *frag_buf_offs;
- const unsigned char *src_frame_data;
- unsigned char *dst_frame_data;
- ptrdiff_t fragii;
- int ystride;
- dst_frame_data=_state->ref_frame_data[_state->ref_frame_idx[_dst_frame]];
- src_frame_data=_state->ref_frame_data[_state->ref_frame_idx[_src_frame]];
- ystride=_state->ref_ystride[_pli];
- frag_buf_offs=_state->frag_buf_offs;
- for(fragii=0;fragii<_nfragis;fragii++){
- ptrdiff_t frag_buf_off;
- frag_buf_off=frag_buf_offs[_fragis[fragii]];
- oc_frag_copy(_state,dst_frame_data+frag_buf_off,
- src_frame_data+frag_buf_off,ystride);
}
}
-static void loop_filter_h(unsigned char *_pix,int _ystride,int *_bv){
+static void loop_filter_h(unsigned char *_pix,int _ystride,signed char *_bv){
int y;
_pix-=2;
for(y=0;y<8;y++){
@@ -965,7 +1015,7 @@ static void loop_filter_h(unsigned char *_pix,int _ystride,int *_bv){
}
}
-static void loop_filter_v(unsigned char *_pix,int _ystride,int *_bv){
+static void loop_filter_v(unsigned char *_pix,int _ystride,signed char *_bv){
int x;
_pix-=_ystride*2;
for(x=0;x<8;x++){
@@ -982,20 +1032,16 @@ static void loop_filter_v(unsigned char *_pix,int _ystride,int *_bv){
/*Initialize the bounding values array used by the loop filter.
_bv: Storage for the array.
- Return: 0 on success, or a non-zero value if no filtering need be applied.*/
-int oc_state_loop_filter_init(oc_theora_state *_state,int _bv[256]){
- int flimit;
+ _flimit: The filter limit as defined in Section 7.10 of the spec.*/
+void oc_loop_filter_init_c(signed char _bv[256],int _flimit){
int i;
- flimit=_state->loop_filter_limits[_state->qis[0]];
- if(flimit==0)return 1;
memset(_bv,0,sizeof(_bv[0])*256);
- for(i=0;i<flimit;i++){
- if(127-i-flimit>=0)_bv[127-i-flimit]=i-flimit;
- _bv[127-i]=-i;
- _bv[127+i]=i;
- if(127+i+flimit<256)_bv[127+i+flimit]=flimit-i;
+ for(i=0;i<_flimit;i++){
+ if(127-i-_flimit>=0)_bv[127-i-_flimit]=(signed char)(i-_flimit);
+ _bv[127-i]=(signed char)(-i);
+ _bv[127+i]=(signed char)(i);
+ if(127+i+_flimit<256)_bv[127+i+_flimit]=(signed char)(_flimit-i);
}
- return 0;
}
/*Apply the loop filter to a given set of fragment rows in the given plane.
@@ -1006,14 +1052,8 @@ int oc_state_loop_filter_init(oc_theora_state *_state,int _bv[256]){
_pli: The color plane to filter.
_fragy0: The Y coordinate of the first fragment row to filter.
_fragy_end: The Y coordinate of the fragment row to stop filtering at.*/
-void oc_state_loop_filter_frag_rows(const oc_theora_state *_state,int _bv[256],
- int _refi,int _pli,int _fragy0,int _fragy_end){
- _state->opt_vtable.state_loop_filter_frag_rows(_state,_bv,_refi,_pli,
- _fragy0,_fragy_end);
-}
-
-void oc_state_loop_filter_frag_rows_c(const oc_theora_state *_state,int *_bv,
- int _refi,int _pli,int _fragy0,int _fragy_end){
+void oc_state_loop_filter_frag_rows_c(const oc_theora_state *_state,
+ signed char *_bv,int _refi,int _pli,int _fragy0,int _fragy_end){
const oc_fragment_plane *fplane;
const oc_fragment *frags;
const ptrdiff_t *frag_buf_offs;
@@ -1030,7 +1070,7 @@ void oc_state_loop_filter_frag_rows_c(const oc_theora_state *_state,int *_bv,
fragi_top=fplane->froffset;
fragi_bot=fragi_top+fplane->nfrags;
fragi0=fragi_top+_fragy0*(ptrdiff_t)nhfrags;
- fragi0_end=fragi0+(_fragy_end-_fragy0)*(ptrdiff_t)nhfrags;
+ fragi0_end=fragi_top+_fragy_end*(ptrdiff_t)nhfrags;
ystride=_state->ref_ystride[_pli];
frags=_state->frags;
frag_buf_offs=_state->frag_buf_offs;
diff --git a/thirdparty/libtheora/state.h b/thirdparty/libtheora/state.h
new file mode 100644
index 0000000000..f176a53ce9
--- /dev/null
+++ b/thirdparty/libtheora/state.h
@@ -0,0 +1,552 @@
+/********************************************************************
+ * *
+ * THIS FILE IS PART OF THE OggTheora SOFTWARE CODEC SOURCE CODE. *
+ * USE, DISTRIBUTION AND REPRODUCTION OF THIS LIBRARY SOURCE IS *
+ * GOVERNED BY A BSD-STYLE SOURCE LICENSE INCLUDED WITH THIS SOURCE *
+ * IN 'COPYING'. PLEASE READ THESE TERMS BEFORE DISTRIBUTING. *
+ * *
+ * THE Theora SOURCE CODE IS COPYRIGHT (C) 2002-2009 *
+ * by the Xiph.Org Foundation and contributors http://www.xiph.org/ *
+ * *
+ ********************************************************************
+
+ function:
+ last mod: $Id: internal.h 17337 2010-07-19 16:08:54Z tterribe $
+
+ ********************************************************************/
+#if !defined(_state_H)
+# define _state_H (1)
+# include "internal.h"
+# include "huffman.h"
+# include "quant.h"
+
+
+
+/*A single quadrant of the map from a super block to fragment numbers.*/
+typedef ptrdiff_t oc_sb_map_quad[4];
+/*A map from a super block to fragment numbers.*/
+typedef oc_sb_map_quad oc_sb_map[4];
+/*A single plane of the map from a macro block to fragment numbers.*/
+typedef ptrdiff_t oc_mb_map_plane[4];
+/*A map from a macro block to fragment numbers.*/
+typedef oc_mb_map_plane oc_mb_map[3];
+/*A motion vector.*/
+typedef ogg_int16_t oc_mv;
+
+typedef struct oc_sb_flags oc_sb_flags;
+typedef struct oc_border_info oc_border_info;
+typedef struct oc_fragment oc_fragment;
+typedef struct oc_fragment_plane oc_fragment_plane;
+typedef struct oc_base_opt_vtable oc_base_opt_vtable;
+typedef struct oc_base_opt_data oc_base_opt_data;
+typedef struct oc_state_dispatch_vtable oc_state_dispatch_vtable;
+typedef struct oc_theora_state oc_theora_state;
+
+
+
+/*Shared accelerated functions.*/
+# if defined(OC_X86_ASM)
+# if defined(_MSC_VER)
+# include "x86_vc/x86int.h"
+# else
+# include "x86/x86int.h"
+# endif
+# endif
+# if defined(OC_ARM_ASM)
+# include "arm/armint.h"
+# endif
+# if defined(OC_C64X_ASM)
+# include "c64x/c64xint.h"
+# endif
+
+# if !defined(oc_state_accel_init)
+# define oc_state_accel_init oc_state_accel_init_c
+# endif
+# if defined(OC_STATE_USE_VTABLE)
+# if !defined(oc_frag_copy)
+# define oc_frag_copy(_state,_dst,_src,_ystride) \
+ ((*(_state)->opt_vtable.frag_copy)(_dst,_src,_ystride))
+# endif
+# if !defined(oc_frag_copy_list)
+# define oc_frag_copy_list(_state,_dst_frame,_src_frame,_ystride, \
+ _fragis,_nfragis,_frag_buf_offs) \
+ ((*(_state)->opt_vtable.frag_copy_list)(_dst_frame,_src_frame,_ystride, \
+ _fragis,_nfragis,_frag_buf_offs))
+# endif
+# if !defined(oc_frag_recon_intra)
+# define oc_frag_recon_intra(_state,_dst,_dst_ystride,_residue) \
+ ((*(_state)->opt_vtable.frag_recon_intra)(_dst,_dst_ystride,_residue))
+# endif
+# if !defined(oc_frag_recon_inter)
+# define oc_frag_recon_inter(_state,_dst,_src,_ystride,_residue) \
+ ((*(_state)->opt_vtable.frag_recon_inter)(_dst,_src,_ystride,_residue))
+# endif
+# if !defined(oc_frag_recon_inter2)
+# define oc_frag_recon_inter2(_state,_dst,_src1,_src2,_ystride,_residue) \
+ ((*(_state)->opt_vtable.frag_recon_inter2)(_dst, \
+ _src1,_src2,_ystride,_residue))
+# endif
+# if !defined(oc_idct8x8)
+# define oc_idct8x8(_state,_y,_x,_last_zzi) \
+ ((*(_state)->opt_vtable.idct8x8)(_y,_x,_last_zzi))
+# endif
+# if !defined(oc_state_frag_recon)
+# define oc_state_frag_recon(_state,_fragi, \
+ _pli,_dct_coeffs,_last_zzi,_dc_quant) \
+ ((*(_state)->opt_vtable.state_frag_recon)(_state,_fragi, \
+ _pli,_dct_coeffs,_last_zzi,_dc_quant))
+# endif
+# if !defined(oc_loop_filter_init)
+# define oc_loop_filter_init(_state,_bv,_flimit) \
+ ((*(_state)->opt_vtable.loop_filter_init)(_bv,_flimit))
+# endif
+# if !defined(oc_state_loop_filter_frag_rows)
+# define oc_state_loop_filter_frag_rows(_state, \
+ _bv,_refi,_pli,_fragy0,_fragy_end) \
+ ((*(_state)->opt_vtable.state_loop_filter_frag_rows)(_state, \
+ _bv,_refi,_pli,_fragy0,_fragy_end))
+# endif
+# if !defined(oc_restore_fpu)
+# define oc_restore_fpu(_state) \
+ ((*(_state)->opt_vtable.restore_fpu)())
+# endif
+# else
+# if !defined(oc_frag_copy)
+# define oc_frag_copy(_state,_dst,_src,_ystride) \
+ oc_frag_copy_c(_dst,_src,_ystride)
+# endif
+# if !defined(oc_frag_copy_list)
+# define oc_frag_copy_list(_state,_dst_frame,_src_frame,_ystride, \
+ _fragis,_nfragis,_frag_buf_offs) \
+ oc_frag_copy_list_c(_dst_frame,_src_frame,_ystride, \
+ _fragis,_nfragis,_frag_buf_offs)
+# endif
+# if !defined(oc_frag_recon_intra)
+# define oc_frag_recon_intra(_state,_dst,_dst_ystride,_residue) \
+ oc_frag_recon_intra_c(_dst,_dst_ystride,_residue)
+# endif
+# if !defined(oc_frag_recon_inter)
+# define oc_frag_recon_inter(_state,_dst,_src,_ystride,_residue) \
+ oc_frag_recon_inter_c(_dst,_src,_ystride,_residue)
+# endif
+# if !defined(oc_frag_recon_inter2)
+# define oc_frag_recon_inter2(_state,_dst,_src1,_src2,_ystride,_residue) \
+ oc_frag_recon_inter2_c(_dst,_src1,_src2,_ystride,_residue)
+# endif
+# if !defined(oc_idct8x8)
+# define oc_idct8x8(_state,_y,_x,_last_zzi) oc_idct8x8_c(_y,_x,_last_zzi)
+# endif
+# if !defined(oc_state_frag_recon)
+# define oc_state_frag_recon oc_state_frag_recon_c
+# endif
+# if !defined(oc_loop_filter_init)
+# define oc_loop_filter_init(_state,_bv,_flimit) \
+ oc_loop_filter_init_c(_bv,_flimit)
+# endif
+# if !defined(oc_state_loop_filter_frag_rows)
+# define oc_state_loop_filter_frag_rows oc_state_loop_filter_frag_rows_c
+# endif
+# if !defined(oc_restore_fpu)
+# define oc_restore_fpu(_state) do{}while(0)
+# endif
+# endif
+
+
+
+/*A keyframe.*/
+# define OC_INTRA_FRAME (0)
+/*A predicted frame.*/
+# define OC_INTER_FRAME (1)
+/*A frame of unknown type (frame type decision has not yet been made).*/
+# define OC_UNKWN_FRAME (-1)
+
+/*The amount of padding to add to the reconstructed frame buffers on all
+ sides.
+ This is used to allow unrestricted motion vectors without special casing.
+ This must be a multiple of 2.*/
+# define OC_UMV_PADDING (16)
+
+/*Frame classification indices.*/
+/*The previous golden frame.*/
+# define OC_FRAME_GOLD (0)
+/*The previous frame.*/
+# define OC_FRAME_PREV (1)
+/*The current frame.*/
+# define OC_FRAME_SELF (2)
+/*Used to mark uncoded fragments (for DC prediction).*/
+# define OC_FRAME_NONE (3)
+
+/*The input or output buffer.*/
+# define OC_FRAME_IO (3)
+/*Uncompressed prev golden frame.*/
+# define OC_FRAME_GOLD_ORIG (4)
+/*Uncompressed previous frame. */
+# define OC_FRAME_PREV_ORIG (5)
+
+/*Macroblock modes.*/
+/*Macro block is invalid: It is never coded.*/
+# define OC_MODE_INVALID (-1)
+/*Encoded difference from the same macro block in the previous frame.*/
+# define OC_MODE_INTER_NOMV (0)
+/*Encoded with no motion compensated prediction.*/
+# define OC_MODE_INTRA (1)
+/*Encoded difference from the previous frame offset by the given motion
+ vector.*/
+# define OC_MODE_INTER_MV (2)
+/*Encoded difference from the previous frame offset by the last coded motion
+ vector.*/
+# define OC_MODE_INTER_MV_LAST (3)
+/*Encoded difference from the previous frame offset by the second to last
+ coded motion vector.*/
+# define OC_MODE_INTER_MV_LAST2 (4)
+/*Encoded difference from the same macro block in the previous golden
+ frame.*/
+# define OC_MODE_GOLDEN_NOMV (5)
+/*Encoded difference from the previous golden frame offset by the given motion
+ vector.*/
+# define OC_MODE_GOLDEN_MV (6)
+/*Encoded difference from the previous frame offset by the individual motion
+ vectors given for each block.*/
+# define OC_MODE_INTER_MV_FOUR (7)
+/*The number of (coded) modes.*/
+# define OC_NMODES (8)
+
+/*Determines the reference frame used for a given MB mode.*/
+# define OC_FRAME_FOR_MODE(_x) \
+ OC_UNIBBLE_TABLE32(OC_FRAME_PREV,OC_FRAME_SELF,OC_FRAME_PREV,OC_FRAME_PREV, \
+ OC_FRAME_PREV,OC_FRAME_GOLD,OC_FRAME_GOLD,OC_FRAME_PREV,(_x))
+
+/*Constants for the packet state machine common between encoder and decoder.*/
+
+/*Next packet to emit/read: Codec info header.*/
+# define OC_PACKET_INFO_HDR (-3)
+/*Next packet to emit/read: Comment header.*/
+# define OC_PACKET_COMMENT_HDR (-2)
+/*Next packet to emit/read: Codec setup header.*/
+# define OC_PACKET_SETUP_HDR (-1)
+/*No more packets to emit/read.*/
+# define OC_PACKET_DONE (INT_MAX)
+
+
+
+#define OC_MV(_x,_y) ((oc_mv)((_x)&0xFF|(_y)<<8))
+#define OC_MV_X(_mv) ((signed char)(_mv))
+#define OC_MV_Y(_mv) ((_mv)>>8)
+#define OC_MV_ADD(_mv1,_mv2) \
+ OC_MV(OC_MV_X(_mv1)+OC_MV_X(_mv2), \
+ OC_MV_Y(_mv1)+OC_MV_Y(_mv2))
+#define OC_MV_SUB(_mv1,_mv2) \
+ OC_MV(OC_MV_X(_mv1)-OC_MV_X(_mv2), \
+ OC_MV_Y(_mv1)-OC_MV_Y(_mv2))
+
+
+
+/*Super blocks are 32x32 segments of pixels in a single color plane indexed
+ in image order.
+ Internally, super blocks are broken up into four quadrants, each of which
+ contains a 2x2 pattern of blocks, each of which is an 8x8 block of pixels.
+ Quadrants, and the blocks within them, are indexed in a special order called
+ a "Hilbert curve" within the super block.
+
+ In order to differentiate between the Hilbert-curve indexing strategy and
+ the regular image order indexing strategy, blocks indexed in image order
+ are called "fragments".
+ Fragments are indexed in image order, left to right, then bottom to top,
+ from Y' plane to Cb plane to Cr plane.
+
+ The co-located fragments in all image planes corresponding to the location
+ of a single quadrant of a luma plane super block form a macro block.
+ Thus there is only a single set of macro blocks for all planes, each of which
+ contains between 6 and 12 fragments, depending on the pixel format.
+ Therefore macro block information is kept in a separate set of arrays from
+ super blocks to avoid unused space in the other planes.
+ The lists are indexed in super block order.
+ That is, the macro block corresponding to the macro block mbi in (luma plane)
+ super block sbi is at index (sbi<<2|mbi).
+ Thus the number of macro blocks in each dimension is always twice the number
+ of super blocks, even when only an odd number fall inside the coded frame.
+ These "extra" macro blocks are just an artifact of our internal data layout,
+ and not part of the coded stream; they are flagged with a negative MB mode.*/
+
+
+
+/*Super block information.*/
+struct oc_sb_flags{
+ unsigned char coded_fully:1;
+ unsigned char coded_partially:1;
+ unsigned char quad_valid:4;
+};
+
+
+
+/*Information about a fragment which intersects the border of the displayable
+ region.
+ This marks which pixels belong to the displayable region.*/
+struct oc_border_info{
+ /*A bit mask marking which pixels are in the displayable region.
+ Pixel (x,y) corresponds to bit (y<<3|x).*/
+ ogg_int64_t mask;
+ /*The number of pixels in the displayable region.
+ This is always positive, and always less than 64.*/
+ int npixels;
+};
+
+
+
+/*Fragment information.*/
+struct oc_fragment{
+ /*A flag indicating whether or not this fragment is coded.*/
+ unsigned coded:1;
+ /*A flag indicating that this entire fragment lies outside the displayable
+ region of the frame.
+ Note the contrast with an invalid macro block, which is outside the coded
+ frame, not just the displayable one.
+ There are no fragments outside the coded frame by construction.*/
+ unsigned invalid:1;
+ /*The index of the quality index used for this fragment's AC coefficients.*/
+ unsigned qii:4;
+ /*The index of the reference frame this fragment is predicted from.*/
+ unsigned refi:2;
+ /*The mode of the macroblock this fragment belongs to.*/
+ unsigned mb_mode:3;
+ /*The index of the associated border information for fragments which lie
+ partially outside the displayable region.
+ For fragments completely inside or outside this region, this is -1.
+ Note that the C standard requires an explicit signed keyword for bitfield
+ types, since some compilers may treat them as unsigned without it.*/
+ signed int borderi:5;
+ /*The prediction-corrected DC component.
+ Note that the C standard requires an explicit signed keyword for bitfield
+ types, since some compilers may treat them as unsigned without it.*/
+ signed int dc:16;
+};
+
+
+
+/*A description of each fragment plane.*/
+struct oc_fragment_plane{
+ /*The number of fragments in the horizontal direction.*/
+ int nhfrags;
+ /*The number of fragments in the vertical direction.*/
+ int nvfrags;
+ /*The offset of the first fragment in the plane.*/
+ ptrdiff_t froffset;
+ /*The total number of fragments in the plane.*/
+ ptrdiff_t nfrags;
+ /*The number of super blocks in the horizontal direction.*/
+ unsigned nhsbs;
+ /*The number of super blocks in the vertical direction.*/
+ unsigned nvsbs;
+ /*The offset of the first super block in the plane.*/
+ unsigned sboffset;
+ /*The total number of super blocks in the plane.*/
+ unsigned nsbs;
+};
+
+
+typedef void (*oc_state_loop_filter_frag_rows_func)(
+ const oc_theora_state *_state,signed char _bv[256],int _refi,int _pli,
+ int _fragy0,int _fragy_end);
+
+/*The shared (encoder and decoder) functions that have accelerated variants.*/
+struct oc_base_opt_vtable{
+ void (*frag_copy)(unsigned char *_dst,
+ const unsigned char *_src,int _ystride);
+ void (*frag_copy_list)(unsigned char *_dst_frame,
+ const unsigned char *_src_frame,int _ystride,
+ const ptrdiff_t *_fragis,ptrdiff_t _nfragis,const ptrdiff_t *_frag_buf_offs);
+ void (*frag_recon_intra)(unsigned char *_dst,int _ystride,
+ const ogg_int16_t _residue[64]);
+ void (*frag_recon_inter)(unsigned char *_dst,
+ const unsigned char *_src,int _ystride,const ogg_int16_t _residue[64]);
+ void (*frag_recon_inter2)(unsigned char *_dst,const unsigned char *_src1,
+ const unsigned char *_src2,int _ystride,const ogg_int16_t _residue[64]);
+ void (*idct8x8)(ogg_int16_t _y[64],ogg_int16_t _x[64],int _last_zzi);
+ void (*state_frag_recon)(const oc_theora_state *_state,ptrdiff_t _fragi,
+ int _pli,ogg_int16_t _dct_coeffs[128],int _last_zzi,ogg_uint16_t _dc_quant);
+ void (*loop_filter_init)(signed char _bv[256],int _flimit);
+ oc_state_loop_filter_frag_rows_func state_loop_filter_frag_rows;
+ void (*restore_fpu)(void);
+};
+
+/*The shared (encoder and decoder) tables that vary according to which variants
+ of the above functions are used.*/
+struct oc_base_opt_data{
+ const unsigned char *dct_fzig_zag;
+};
+
+
+/*State information common to both the encoder and decoder.*/
+struct oc_theora_state{
+ /*The stream information.*/
+ th_info info;
+# if defined(OC_STATE_USE_VTABLE)
+ /*Table for shared accelerated functions.*/
+ oc_base_opt_vtable opt_vtable;
+# endif
+ /*Table for shared data used by accelerated functions.*/
+ oc_base_opt_data opt_data;
+ /*CPU flags to detect the presence of extended instruction sets.*/
+ ogg_uint32_t cpu_flags;
+ /*The fragment plane descriptions.*/
+ oc_fragment_plane fplanes[3];
+ /*The list of fragments, indexed in image order.*/
+ oc_fragment *frags;
+ /*The the offset into the reference frame buffer to the upper-left pixel of
+ each fragment.*/
+ ptrdiff_t *frag_buf_offs;
+ /*The motion vector for each fragment.*/
+ oc_mv *frag_mvs;
+ /*The total number of fragments in a single frame.*/
+ ptrdiff_t nfrags;
+ /*The list of super block maps, indexed in image order.*/
+ oc_sb_map *sb_maps;
+ /*The list of super block flags, indexed in image order.*/
+ oc_sb_flags *sb_flags;
+ /*The total number of super blocks in a single frame.*/
+ unsigned nsbs;
+ /*The fragments from each color plane that belong to each macro block.
+ Fragments are stored in image order (left to right then top to bottom).
+ When chroma components are decimated, the extra fragments have an index of
+ -1.*/
+ oc_mb_map *mb_maps;
+ /*The list of macro block modes.
+ A negative number indicates the macro block lies entirely outside the
+ coded frame.*/
+ signed char *mb_modes;
+ /*The number of macro blocks in the X direction.*/
+ unsigned nhmbs;
+ /*The number of macro blocks in the Y direction.*/
+ unsigned nvmbs;
+ /*The total number of macro blocks.*/
+ size_t nmbs;
+ /*The list of coded fragments, in coded order.
+ Uncoded fragments are stored in reverse order from the end of the list.*/
+ ptrdiff_t *coded_fragis;
+ /*The number of coded fragments in each plane.*/
+ ptrdiff_t ncoded_fragis[3];
+ /*The total number of coded fragments.*/
+ ptrdiff_t ntotal_coded_fragis;
+ /*The actual buffers used for the reference frames.*/
+ th_ycbcr_buffer ref_frame_bufs[6];
+ /*The index of the buffers being used for each OC_FRAME_* reference frame.*/
+ int ref_frame_idx[6];
+ /*The storage for the reference frame buffers.
+ This is just ref_frame_bufs[ref_frame_idx[i]][0].data, but is cached here
+ for faster look-up.*/
+ unsigned char *ref_frame_data[6];
+ /*The handle used to allocate the reference frame buffers.*/
+ unsigned char *ref_frame_handle;
+ /*The strides for each plane in the reference frames.*/
+ int ref_ystride[3];
+ /*The number of unique border patterns.*/
+ int nborders;
+ /*The unique border patterns for all border fragments.
+ The borderi field of fragments which straddle the border indexes this
+ list.*/
+ oc_border_info borders[16];
+ /*The frame number of the last keyframe.*/
+ ogg_int64_t keyframe_num;
+ /*The frame number of the current frame.*/
+ ogg_int64_t curframe_num;
+ /*The granpos of the current frame.*/
+ ogg_int64_t granpos;
+ /*The type of the current frame.*/
+ signed char frame_type;
+ /*The bias to add to the frame count when computing granule positions.*/
+ unsigned char granpos_bias;
+ /*The number of quality indices used in the current frame.*/
+ unsigned char nqis;
+ /*The quality indices of the current frame.*/
+ unsigned char qis[3];
+ /*The dequantization tables, stored in zig-zag order, and indexed by
+ qi, pli, qti, and zzi.*/
+ ogg_uint16_t *dequant_tables[64][3][2];
+ OC_ALIGN16(oc_quant_table dequant_table_data[64][3][2]);
+ /*Loop filter strength parameters.*/
+ unsigned char loop_filter_limits[64];
+};
+
+
+
+/*The function type used to fill in the chroma plane motion vectors for a
+ macro block when 4 different motion vectors are specified in the luma
+ plane.
+ _cbmvs: The chroma block-level motion vectors to fill in.
+ _lmbmv: The luma macro-block level motion vector to fill in for use in
+ prediction.
+ _lbmvs: The luma block-level motion vectors.*/
+typedef void (*oc_set_chroma_mvs_func)(oc_mv _cbmvs[4],const oc_mv _lbmvs[4]);
+
+
+
+/*A table of functions used to fill in the Cb,Cr plane motion vectors for a
+ macro block when 4 different motion vectors are specified in the luma
+ plane.*/
+extern const oc_set_chroma_mvs_func OC_SET_CHROMA_MVS_TABLE[TH_PF_NFORMATS];
+
+
+
+int oc_state_init(oc_theora_state *_state,const th_info *_info,int _nrefs);
+void oc_state_clear(oc_theora_state *_state);
+void oc_state_accel_init_c(oc_theora_state *_state);
+void oc_state_borders_fill_rows(oc_theora_state *_state,int _refi,int _pli,
+ int _y0,int _yend);
+void oc_state_borders_fill_caps(oc_theora_state *_state,int _refi,int _pli);
+void oc_state_borders_fill(oc_theora_state *_state,int _refi);
+void oc_state_fill_buffer_ptrs(oc_theora_state *_state,int _buf_idx,
+ th_ycbcr_buffer _img);
+int oc_state_mbi_for_pos(oc_theora_state *_state,int _mbx,int _mby);
+int oc_state_get_mv_offsets(const oc_theora_state *_state,int _offsets[2],
+ int _pli,oc_mv _mv);
+
+void oc_loop_filter_init_c(signed char _bv[256],int _flimit);
+void oc_state_loop_filter(oc_theora_state *_state,int _frame);
+# if defined(OC_DUMP_IMAGES)
+int oc_state_dump_frame(const oc_theora_state *_state,int _frame,
+ const char *_suf);
+# endif
+
+/*Default pure-C implementations of shared accelerated functions.*/
+void oc_frag_copy_c(unsigned char *_dst,
+ const unsigned char *_src,int _src_ystride);
+void oc_frag_copy_list_c(unsigned char *_dst_frame,
+ const unsigned char *_src_frame,int _ystride,
+ const ptrdiff_t *_fragis,ptrdiff_t _nfragis,const ptrdiff_t *_frag_buf_offs);
+void oc_frag_recon_intra_c(unsigned char *_dst,int _dst_ystride,
+ const ogg_int16_t _residue[64]);
+void oc_frag_recon_inter_c(unsigned char *_dst,
+ const unsigned char *_src,int _ystride,const ogg_int16_t _residue[64]);
+void oc_frag_recon_inter2_c(unsigned char *_dst,const unsigned char *_src1,
+ const unsigned char *_src2,int _ystride,const ogg_int16_t _residue[64]);
+void oc_idct8x8_c(ogg_int16_t _y[64],ogg_int16_t _x[64],int _last_zzi);
+void oc_state_frag_recon_c(const oc_theora_state *_state,ptrdiff_t _fragi,
+ int _pli,ogg_int16_t _dct_coeffs[128],int _last_zzi,ogg_uint16_t _dc_quant);
+void oc_state_loop_filter_frag_rows_c(const oc_theora_state *_state,
+ signed char _bv[256],int _refi,int _pli,int _fragy0,int _fragy_end);
+void oc_restore_fpu_c(void);
+
+/*We need a way to call a few encoder functions without introducing a link-time
+ dependency into the decoder, while still allowing the old alpha API which
+ does not distinguish between encoder and decoder objects to be used.
+ We do this by placing a function table at the start of the encoder object
+ which can dispatch into the encoder library.
+ We do a similar thing for the decoder in case we ever decide to split off a
+ common base library.*/
+typedef void (*oc_state_clear_func)(theora_state *_th);
+typedef int (*oc_state_control_func)(theora_state *th,int _req,
+ void *_buf,size_t _buf_sz);
+typedef ogg_int64_t (*oc_state_granule_frame_func)(theora_state *_th,
+ ogg_int64_t _granulepos);
+typedef double (*oc_state_granule_time_func)(theora_state *_th,
+ ogg_int64_t _granulepos);
+
+
+struct oc_state_dispatch_vtable{
+ oc_state_clear_func clear;
+ oc_state_control_func control;
+ oc_state_granule_frame_func granule_frame;
+ oc_state_granule_time_func granule_time;
+};
+
+#endif
diff --git a/thirdparty/libtheora/theora/codec.h b/thirdparty/libtheora/theora/codec.h
index 5c2669630c..29b8602325 100644
--- a/thirdparty/libtheora/theora/codec.h
+++ b/thirdparty/libtheora/theora/codec.h
@@ -16,11 +16,12 @@
********************************************************************/
/**\mainpage
- *
+ *
* \section intro Introduction
*
- * This is the documentation for <tt>libtheora</tt> C API.
- * The current reference
+ * This is the documentation for the <tt>libtheora</tt> C API.
+ *
+ * The \c libtheora package is the current reference
* implementation for <a href="http://www.theora.org/">Theora</a>, a free,
* patent-unencumbered video codec.
* Theora is derived from On2's VP3 codec with additional features and
@@ -30,29 +31,31 @@
* <a href="http://www.theora.org/doc/Theora.pdf">the Theora
* specification</a>.
*
- * \subsection Organization
+ * \section Organization
*
- * The functions documented here are actually subdivided into three
+ * The functions documented here are divided between two
* separate libraries:
- * - <tt>libtheoraenc</tt> contains the encoder interface,
+ * - \c libtheoraenc contains the encoder interface,
* described in \ref encfuncs.
- * - <tt>libtheoradec</tt> contains the decoder interface and
- * routines shared with the encoder.
- * You must also link to this if you link to <tt>libtheoraenc</tt>.
- * The routines in this library are described in \ref decfuncs and
- * \ref basefuncs.
- * - <tt>libtheora</tt> contains the \ref oldfuncs.
+ * - \c libtheoradec contains the decoder interface,
+ * described in \ref decfuncs, \n
+ * and additional \ref basefuncs.
+ *
+ * New code should link to \c libtheoradec. If using encoder
+ * features, it must also link to \c libtheoraenc.
*
- * New code should link to <tt>libtheoradec</tt> and, if using encoder
- * features, <tt>libtheoraenc</tt>. Together these two export both
- * the standard and the legacy API, so this is all that is needed by
- * any code. The older <tt>libtheora</tt> library is provided just for
- * compatibility with older build configurations.
+ * During initial development, prior to the 1.0 release,
+ * \c libtheora exported a different \ref oldfuncs which
+ * combined both encode and decode functions.
+ * In general, legacy API symbols can be indentified
+ * by their \c theora_ or \c OC_ namespace prefixes.
+ * The current API uses \c th_ or \c TH_ instead.
*
- * In general the recommended 1.x API symbols can be distinguished
- * by their <tt>th_</tt> or <tt>TH_</tt> namespace prefix.
- * The older, legacy API uses <tt>theora_</tt> or <tt>OC_</tt>
- * prefixes instead.
+ * While deprecated, \c libtheoraenc and \c libtheoradec
+ * together export the legacy api as well at the one documented above.
+ * Likewise, the legacy \c libtheora included with this package
+ * exports the new 1.x API. Older code and build scripts can therefore
+ * but updated independently to the current scheme.
*/
/**\file
@@ -168,7 +171,7 @@ typedef struct{
typedef th_img_plane th_ycbcr_buffer[3];
/**Theora bitstream information.
- * This contains the basic playback parameters for a stream, and corresponds to
+ * This contains the basic playback parameters for a stream, and corresponds to
* the initial 'info' header packet.
* To initialize an encoder, the application fills in this structure and
* passes it to th_encode_alloc().
@@ -317,7 +320,7 @@ typedef struct{
* In filling in this structure, th_decode_headerin() will null-terminate
* the user_comment strings for safety.
* However, the bitstream format itself treats them as 8-bit clean vectors,
- * possibly containing null characters, and so the length array should be
+ * possibly containing null characters, so the length array should be
* treated as their authoritative length.
*/
typedef struct th_comment{
@@ -448,7 +451,13 @@ typedef struct{
/**\defgroup basefuncs Functions Shared by Encode and Decode*/
/*@{*/
-/**\name Basic shared functions*/
+/**\name Basic shared functions
+ * These functions return information about the library itself,
+ * or provide high-level information about codec state
+ * and packet type.
+ *
+ * You must link to \c libtheoradec if you use any of the
+ * functions in this section.*/
/*@{*/
/**Retrieves a human-readable string to identify the library vendor and
* version.
@@ -510,7 +519,12 @@ extern int th_packet_iskeyframe(ogg_packet *_op);
/*@}*/
-/**\name Functions for manipulating header data*/
+/**\name Functions for manipulating header data
+ * These functions manipulate the #th_info and #th_comment structures
+ * which describe video parameters and key-value metadata, respectively.
+ *
+ * You must link to \c libtheoradec if you use any of the
+ * functions in this section.*/
/*@{*/
/**Initializes a th_info structure.
* This should be called on a freshly allocated #th_info structure before
@@ -537,7 +551,7 @@ extern void th_comment_init(th_comment *_tc);
* \param _tc The #th_comment struct to add the comment to.
* \param _comment Must be a null-terminated UTF-8 string containing the
* comment in "TAG=the value" form.*/
-extern void th_comment_add(th_comment *_tc, char *_comment);
+extern void th_comment_add(th_comment *_tc,const char *_comment);
/**Add a comment to an initialized #th_comment structure.
* \note Neither th_comment_add() nor th_comment_add_tag() support
* comments containing null values, although the bitstream format does
@@ -545,10 +559,11 @@ extern void th_comment_add(th_comment *_tc, char *_comment);
* To add such comments you will need to manipulate the #th_comment
* structure directly.
* \param _tc The #th_comment struct to add the comment to.
- * \param _tag A null-terminated string containing the tag associated with
+ * \param _tag A null-terminated string containing the tag associated with
* the comment.
* \param _val The corresponding value as a null-terminated string.*/
-extern void th_comment_add_tag(th_comment *_tc,char *_tag,char *_val);
+extern void th_comment_add_tag(th_comment *_tc,const char *_tag,
+ const char *_val);
/**Look up a comment value by its tag.
* \param _tc An initialized #th_comment structure.
* \param _tag The tag to look up.
@@ -564,15 +579,15 @@ extern void th_comment_add_tag(th_comment *_tc,char *_tag,char *_val);
* It should not be modified or freed by the application, and
* modifications to the structure may invalidate the pointer.
* \retval NULL If no matching tag is found.*/
-extern char *th_comment_query(th_comment *_tc,char *_tag,int _count);
+extern char *th_comment_query(th_comment *_tc,const char *_tag,int _count);
/**Look up the number of instances of a tag.
* Call this first when querying for a specific tag and then iterate over the
* number of instances with separate calls to th_comment_query() to
* retrieve all the values for that tag in order.
* \param _tc An initialized #th_comment structure.
* \param _tag The tag to look up.
- * \return The number on instances of this particular tag.*/
-extern int th_comment_query_count(th_comment *_tc,char *_tag);
+ * \return The number of instances of this particular tag.*/
+extern int th_comment_query_count(th_comment *_tc,const char *_tag);
/**Clears a #th_comment structure.
* This should be called on a #th_comment structure after it is no longer
* needed.
diff --git a/thirdparty/libtheora/theora/theora.h b/thirdparty/libtheora/theora/theora.h
index af6eb6f380..a729a76890 100644
--- a/thirdparty/libtheora/theora/theora.h
+++ b/thirdparty/libtheora/theora/theora.h
@@ -34,41 +34,41 @@ extern "C"
*
* \section intro Introduction
*
- * This is the documentation for the libtheora legacy C API, declared in
+ * This is the documentation for the libtheora legacy C API, declared in
* the theora.h header, which describes the old interface used before
* the 1.0 release. This API was widely deployed for several years and
- * remains supported, but for new code we recommend the cleaner API
+ * remains supported, but for new code we recommend the cleaner API
* declared in theoradec.h and theoraenc.h.
*
* libtheora is the reference implementation for
* <a href="http://www.theora.org/">Theora</a>, a free video codec.
* Theora is derived from On2's VP3 codec with improved integration with
* Ogg multimedia formats by <a href="http://www.xiph.org/">Xiph.Org</a>.
- *
+ *
* \section overview Overview
*
- * This library will both decode and encode theora packets to/from raw YUV
+ * This library will both decode and encode theora packets to/from raw YUV
* frames. In either case, the packets will most likely either come from or
- * need to be embedded in an Ogg stream. Use
- * <a href="http://xiph.org/ogg/">libogg</a> or
+ * need to be embedded in an Ogg stream. Use
+ * <a href="http://xiph.org/ogg/">libogg</a> or
* <a href="http://www.annodex.net/software/liboggz/index.html">liboggz</a>
* to extract/package these packets.
*
* \section decoding Decoding Process
*
* Decoding can be separated into the following steps:
- * -# initialise theora_info and theora_comment structures using
+ * -# initialise theora_info and theora_comment structures using
* theora_info_init() and theora_comment_init():
\verbatim
theora_info info;
theora_comment comment;
-
+
theora_info_init(&info);
theora_comment_init(&comment);
\endverbatim
- * -# retrieve header packets from Ogg stream (there should be 3) and decode
- * into theora_info and theora_comment structures using
- * theora_decode_header(). See \ref identification for more information on
+ * -# retrieve header packets from Ogg stream (there should be 3) and decode
+ * into theora_info and theora_comment structures using
+ * theora_decode_header(). See \ref identification for more information on
* identifying which packets are theora packets.
\verbatim
int i;
@@ -79,14 +79,14 @@ extern "C"
}
\endverbatim
* -# initialise the decoder based on the information retrieved into the
- * theora_info struct by theora_decode_header(). You will need a
+ * theora_info struct by theora_decode_header(). You will need a
* theora_state struct.
\verbatim
theora_state state;
-
+
theora_decode_init(&state, &info);
\endverbatim
- * -# pass in packets and retrieve decoded frames! See the yuv_buffer
+ * -# pass in packets and retrieve decoded frames! See the yuv_buffer
* documentation for information on how to retrieve raw YUV data.
\verbatim
yuf_buffer buffer;
@@ -96,20 +96,20 @@ extern "C"
theora_decode_YUVout(&state, &buffer);
}
\endverbatim
- *
+ *
*
* \subsection identification Identifying Theora Packets
*
- * All streams inside an Ogg file have a unique serial_no attached to the
- * stream. Typically, you will want to
- * - retrieve the serial_no for each b_o_s (beginning of stream) page
- * encountered within the Ogg file;
- * - test the first (only) packet on that page to determine if it is a theora
+ * All streams inside an Ogg file have a unique serial_no attached to the
+ * stream. Typically, you will want to
+ * - retrieve the serial_no for each b_o_s (beginning of stream) page
+ * encountered within the Ogg file;
+ * - test the first (only) packet on that page to determine if it is a theora
* packet;
- * - once you have found a theora b_o_s page then use the retrieved serial_no
+ * - once you have found a theora b_o_s page then use the retrieved serial_no
* to identify future packets belonging to the same theora stream.
- *
- * Note that you \e cannot use theora_packet_isheader() to determine if a
+ *
+ * Note that you \e cannot use theora_packet_isheader() to determine if a
* packet is a theora packet or not, as this function does not perform any
* checking beyond whether a header bit is present. Instead, use the
* theora_decode_header() function and check the return value; or examine the
@@ -124,9 +124,9 @@ extern "C"
* A YUV buffer for passing uncompressed frames to and from the codec.
* This holds a Y'CbCr frame in planar format. The CbCr planes can be
* subsampled and have their own separate dimensions and row stride
- * offsets. Note that the strides may be negative in some
+ * offsets. Note that the strides may be negative in some
* configurations. For theora the width and height of the largest plane
- * must be a multiple of 16. The actual meaningful picture size and
+ * must be a multiple of 16. The actual meaningful picture size and
* offset are stored in the theora_info structure; frames returned by
* the decoder may need to be cropped for display.
*
@@ -135,8 +135,8 @@ extern "C"
* are ordered from left to right.
*
* During decode, the yuv_buffer struct is allocated by the user, but all
- * fields (including luma and chroma pointers) are filled by the library.
- * These pointers address library-internal memory and their contents should
+ * fields (including luma and chroma pointers) are filled by the library.
+ * These pointers address library-internal memory and their contents should
* not be modified.
*
* Conversely, during encode the user allocates the struct and fills out all
@@ -179,14 +179,14 @@ typedef enum {
OC_PF_420, /**< Chroma subsampling by 2 in each direction (4:2:0) */
OC_PF_RSVD, /**< Reserved value */
OC_PF_422, /**< Horizonatal chroma subsampling by 2 (4:2:2) */
- OC_PF_444, /**< No chroma subsampling at all (4:4:4) */
+ OC_PF_444 /**< No chroma subsampling at all (4:4:4) */
} theora_pixelformat;
/**
* Theora bitstream info.
* Contains the basic playback parameters for a stream,
* corresponding to the initial 'info' header packet.
- *
+ *
* Encoded theora frames must be a multiple of 16 in width and height.
* To handle other frame sizes, a crop rectangle is specified in
* frame_height and frame_width, offset_x and * offset_y. The offset
@@ -198,10 +198,10 @@ typedef enum {
* fraction. Aspect ratio is also stored as a rational fraction, and
* refers to the aspect ratio of the frame pixels, not of the
* overall frame itself.
- *
+ *
* See <a href="http://svn.xiph.org/trunk/theora/examples/encoder_example.c">
* examples/encoder_example.c</a> for usage examples of the
- * other paramters and good default settings for the encoder parameters.
+ * other parameters and good default settings for the encoder parameters.
*/
typedef struct {
ogg_uint32_t width; /**< encoded frame width */
@@ -253,14 +253,14 @@ typedef struct{
} theora_state;
-/**
+/**
* Comment header metadata.
*
* This structure holds the in-stream metadata corresponding to
* the 'comment' header packet.
*
* Meta data is stored as a series of (tag, value) pairs, in
- * length-encoded string vectors. The first occurence of the
+ * length-encoded string vectors. The first occurence of the
* '=' character delimits the tag and value. A particular tag
* may occur more than once. The character set encoding for
* the strings is always UTF-8, but the tag names are limited
@@ -285,7 +285,7 @@ typedef struct theora_comment{
/* \anchor decctlcodes_old
* These are the available request codes for theora_control()
* when called with a decoder instance.
- * By convention decoder control codes are odd, to distinguish
+ * By convention decoder control codes are odd, to distinguish
* them from \ref encctlcodes_old "encoder control codes" which
* are even.
*
@@ -306,7 +306,7 @@ typedef struct theora_comment{
#define TH_DECCTL_GET_PPLEVEL_MAX (1)
/**Set the post-processing level.
- * Sets the level of post-processing to use when decoding the
+ * Sets the level of post-processing to use when decoding the
* compressed stream. This must be a value between zero (off)
* and the maximum returned by TH_DECCTL_GET_PPLEVEL_MAX.
*/
@@ -345,9 +345,9 @@ typedef struct theora_comment{
* \param[in] buf #th_quant_info
* \retval OC_FAULT \a theora_state is <tt>NULL</tt>.
* \retval OC_EINVAL Encoding has already begun, the quantization parameters
- * are not acceptable to this version of the encoder,
- * \a buf is <tt>NULL</tt> and \a buf_sz is not zero,
- * or \a buf is non-<tt>NULL</tt> and \a buf_sz is
+ * are not acceptable to this version of the encoder,
+ * \a buf is <tt>NULL</tt> and \a buf_sz is not zero,
+ * or \a buf is non-<tt>NULL</tt> and \a buf_sz is
* not <tt>sizeof(#th_quant_info)</tt>.
* \retval OC_IMPL Not supported by this implementation.*/
#define TH_ENCCTL_SET_QUANT_PARAMS (2)
@@ -424,7 +424,7 @@ typedef struct theora_comment{
#define OC_NEWPACKET -25 /**< Packet is an (ignorable) unhandled extension */
#define OC_DUPFRAME 1 /**< Packet is a dropped frame */
-/**
+/**
* Retrieve a human-readable string to identify the encoder vendor and version.
* \returns A version string.
*/
@@ -462,7 +462,7 @@ extern int theora_encode_init(theora_state *th, theora_info *ti);
extern int theora_encode_YUVin(theora_state *t, yuv_buffer *yuv);
/**
- * Request the next packet of encoded video.
+ * Request the next packet of encoded video.
* The encoded data is placed in a user-provided ogg_packet structure.
* \param t A theora_state handle previously initialized for encoding.
* \param last_p whether this is the last packet the encoder should produce.
@@ -496,7 +496,11 @@ extern int theora_encode_header(theora_state *t, ogg_packet *op);
* \param op An ogg_packet structure to fill. libtheora will set all
* elements of this structure, including a pointer to the encoded
* comment data. The memory for the comment data is owned by
- * libtheora.
+ * the application, and must be freed by it using _ogg_free().
+ * On some systems (such as Windows when using dynamic linking), this
+ * may mean the free is executed in a different module from the
+ * malloc, which will crash; there is no way to free this memory on
+ * such systems.
* \retval 0 Success
*/
extern int theora_encode_comment(theora_comment *tc, ogg_packet *op);
@@ -581,8 +585,8 @@ extern int theora_decode_packetin(theora_state *th,ogg_packet *op);
* \param th A theora_state handle previously initialized for decoding.
* \param yuv A yuv_buffer in which libtheora should place the decoded data.
* Note that the buffer struct itself is allocated by the user, but
- * that the luma and chroma pointers will be filled in by the
- * library. Also note that these luma and chroma regions should be
+ * that the luma and chroma pointers will be filled in by the
+ * library. Also note that these luma and chroma regions should be
* considered read-only by the user.
* \retval 0 Success
*/
@@ -617,22 +621,22 @@ extern int theora_packet_iskeyframe(ogg_packet *op);
/**
* Report the granulepos shift radix
*
- * When embedded in Ogg, Theora uses a two-part granulepos,
+ * When embedded in Ogg, Theora uses a two-part granulepos,
* splitting the 64-bit field into two pieces. The more-significant
* section represents the frame count at the last keyframe,
* and the less-significant section represents the count of
* frames since the last keyframe. In this way the overall
* field is still non-decreasing with time, but usefully encodes
* a pointer to the last keyframe, which is necessary for
- * correctly restarting decode after a seek.
+ * correctly restarting decode after a seek.
*
* This function reports the number of bits used to represent
* the distance to the last keyframe, and thus how the granulepos
* field must be shifted or masked to obtain the two parts.
- *
+ *
* Since libtheora returns compressed data in an ogg_packet
* structure, this may be generally useful even if the Theora
- * packets are not being used in an Ogg container.
+ * packets are not being used in an Ogg container.
*
* \param ti A previously initialized theora_info struct
* \returns The bit shift dividing the two granulepos fields
@@ -644,7 +648,7 @@ int theora_granule_shift(theora_info *ti);
/**
* Convert a granulepos to an absolute frame index, starting at 0.
* The granulepos is interpreted in the context of a given theora_state handle.
- *
+ *
* Note that while the granulepos encodes the frame count (i.e. starting
* from 1) this call returns the frame index, starting from zero. Thus
* One can calculate the presentation time by multiplying the index by
@@ -670,9 +674,7 @@ extern ogg_int64_t theora_granule_frame(theora_state *th,ogg_int64_t granulepos)
* This is the "end time" for the frame, or the latest time it should
* be displayed.
* It is not the presentation time.
- * \retval -1. The given granulepos is undefined (i.e. negative), or
- * \retval -1. The function has been disabled because floating
- * point support is not available.
+ * \retval -1. The given granulepos is undefined (i.e. negative).
*/
extern double theora_granule_time(theora_state *th,ogg_int64_t granulepos);
@@ -699,7 +701,7 @@ extern void theora_clear(theora_state *t);
/**
* Initialize an allocated theora_comment structure
- * \param tc An allocated theora_comment structure
+ * \param tc An allocated theora_comment structure
**/
extern void theora_comment_init(theora_comment *tc);
@@ -720,7 +722,7 @@ extern void theora_comment_add(theora_comment *tc, char *comment);
/**
* Add a comment to an initialized theora_comment structure.
* \param tc A previously initialized theora comment structure
- * \param tag A null-terminated string containing the tag
+ * \param tag A null-terminated string containing the tag
* associated with the comment.
* \param value The corresponding value as a null-terminated string
*
@@ -752,9 +754,9 @@ extern char *theora_comment_query(theora_comment *tc, char *tag, int count);
* \param tc An initialized theora_comment structure
* \param tag The tag to look up
* \returns The number on instances of a particular tag.
- *
+ *
* Call this first when querying for a specific tag and then interate
- * over the number of instances with separate calls to
+ * over the number of instances with separate calls to
* theora_comment_query() to retrieve all instances in order.
**/
extern int theora_comment_query_count(theora_comment *tc, char *tag);
@@ -769,7 +771,7 @@ extern void theora_comment_clear(theora_comment *tc);
* This is used to provide advanced control the encoding process.
* \param th A #theora_state handle.
* \param req The control code to process.
- * See \ref encctlcodes_old "the list of available
+ * See \ref encctlcodes_old "the list of available
* control codes" for details.
* \param buf The parameters for this control code.
* \param buf_sz The size of the parameter buffer.*/
diff --git a/thirdparty/libtheora/theora/theoradec.h b/thirdparty/libtheora/theora/theoradec.h
index b20f0e3a64..77bef81909 100644
--- a/thirdparty/libtheora/theora/theoradec.h
+++ b/thirdparty/libtheora/theora/theoradec.h
@@ -92,13 +92,17 @@ extern "C" {
* <tt>sizeof(th_stripe_callback)</tt>.*/
#define TH_DECCTL_SET_STRIPE_CB (7)
-/**Enables telemetry and sets the macroblock display mode */
+/**Sets the macroblock display mode. Set to 0 to disable displaying
+ * macroblocks.*/
#define TH_DECCTL_SET_TELEMETRY_MBMODE (9)
-/**Enables telemetry and sets the motion vector display mode */
+/**Sets the motion vector display mode. Set to 0 to disable displaying motion
+ * vectors.*/
#define TH_DECCTL_SET_TELEMETRY_MV (11)
-/**Enables telemetry and sets the adaptive quantization display mode */
+/**Sets the adaptive quantization display mode. Set to 0 to disable displaying
+ * adaptive quantization. */
#define TH_DECCTL_SET_TELEMETRY_QI (13)
-/**Enables telemetry and sets the bitstream breakdown visualization mode */
+/**Sets the bitstream breakdown visualization mode. Set to 0 to disable
+ * displaying bitstream breakdown.*/
#define TH_DECCTL_SET_TELEMETRY_BITS (15)
/*@}*/
@@ -171,7 +175,7 @@ typedef struct th_setup_info th_setup_info;
/**\defgroup decfuncs Functions for Decoding*/
/*@{*/
/**\name Functions for decoding
- * You must link to <tt>libtheoradec</tt> if you use any of the
+ * You must link to <tt>libtheoradec</tt> if you use any of the
* functions in this section.
*
* The functions are listed in the order they are used in a typical decode.
@@ -267,7 +271,10 @@ extern void th_setup_free(th_setup_info *_setup);
* See \ref decctlcodes "the list of available control codes"
* for details.
* \param _buf The parameters for this control code.
- * \param _buf_sz The size of the parameter buffer.*/
+ * \param _buf_sz The size of the parameter buffer.
+ * \return Possible return values depend on the control code used.
+ * See \ref decctlcodes "the list of control codes" for
+ * specific values. Generally 0 indicates success.*/
extern int th_decode_ctl(th_dec_ctx *_dec,int _req,void *_buf,
size_t _buf_sz);
/**Submits a packet containing encoded video data to the decoder.
@@ -283,7 +290,8 @@ extern int th_decode_ctl(th_dec_ctx *_dec,int _req,void *_buf,
* \retval 0 Success.
* A new decoded frame can be retrieved by calling
* th_decode_ycbcr_out().
- * \retval TH_DUPFRAME The packet represented a dropped (0-byte) frame.
+ * \retval TH_DUPFRAME The packet represented a dropped frame (either a
+ * 0-byte frame or an INTER frame with no coded blocks).
* The player can skip the call to th_decode_ycbcr_out(),
* as the contents of the decoded frame buffer have not
* changed.
diff --git a/thirdparty/libtheora/theora/theoraenc.h b/thirdparty/libtheora/theora/theoraenc.h
index fdf2ab21e2..79b1c2b880 100644
--- a/thirdparty/libtheora/theora/theoraenc.h
+++ b/thirdparty/libtheora/theora/theoraenc.h
@@ -43,7 +43,7 @@ extern "C" {
* <tt>NULL</tt> may be specified to revert to the default tables.
*
* \param[in] _buf <tt>#th_huff_code[#TH_NHUFFMAN_TABLES][#TH_NDCT_TOKENS]</tt>
- * \retval TH_EFAULT \a _enc_ctx is <tt>NULL</tt>.
+ * \retval TH_EFAULT \a _enc is <tt>NULL</tt>.
* \retval TH_EINVAL Encoding has already begun or one or more of the given
* tables is not full or prefix-free, \a _buf is
* <tt>NULL</tt> and \a _buf_sz is not zero, or \a _buf is
@@ -57,8 +57,8 @@ extern "C" {
* <tt>NULL</tt> may be specified to revert to the default parameters.
*
* \param[in] _buf #th_quant_info
- * \retval TH_EFAULT \a _enc_ctx is <tt>NULL</tt>.
- * \retval TH_EINVAL Encoding has already begun, \a _buf is
+ * \retval TH_EFAULT \a _enc is <tt>NULL</tt>.
+ * \retval TH_EINVAL Encoding has already begun, \a _buf is
* <tt>NULL</tt> and \a _buf_sz is not zero,
* or \a _buf is non-<tt>NULL</tt> and
* \a _buf_sz is not <tt>sizeof(#th_quant_info)</tt>.
@@ -73,7 +73,7 @@ extern "C" {
* \param[in] _buf <tt>ogg_uint32_t</tt>: The maximum distance between key
* frames.
* \param[out] _buf <tt>ogg_uint32_t</tt>: The actual maximum distance set.
- * \retval TH_EFAULT \a _enc_ctx or \a _buf is <tt>NULL</tt>.
+ * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
* \retval TH_EINVAL \a _buf_sz is not <tt>sizeof(ogg_uint32_t)</tt>.
* \retval TH_EIMPL Not supported by this implementation.*/
#define TH_ENCCTL_SET_KEYFRAME_FREQUENCY_FORCE (4)
@@ -101,7 +101,7 @@ extern "C" {
* 4:2:0, the picture region is smaller than the full frame,
* or if encoding has begun, preventing the quantization
* tables and codebooks from being set.
- * \retval TH_EFAULT \a _enc_ctx or \a _buf is <tt>NULL</tt>.
+ * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
* \retval TH_EINVAL \a _buf_sz is not <tt>sizeof(int)</tt>.
* \retval TH_EIMPL Not supported by this implementation.*/
#define TH_ENCCTL_SET_VP3_COMPATIBLE (10)
@@ -114,7 +114,7 @@ extern "C" {
* the current encoding mode (VBR vs. constant quality, etc.).
*
* \param[out] _buf <tt>int</tt>: The maximum encoding speed level.
- * \retval TH_EFAULT \a _enc_ctx or \a _buf is <tt>NULL</tt>.
+ * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
* \retval TH_EINVAL \a _buf_sz is not <tt>sizeof(int)</tt>.
* \retval TH_EIMPL Not supported by this implementation in the current
* encoding mode.*/
@@ -124,7 +124,7 @@ extern "C" {
*
* \param[in] _buf <tt>int</tt>: The new encoding speed level.
* 0 is slowest, larger values use less CPU.
- * \retval TH_EFAULT \a _enc_ctx or \a _buf is <tt>NULL</tt>.
+ * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
* \retval TH_EINVAL \a _buf_sz is not <tt>sizeof(int)</tt>, or the
* encoding speed level is out of bounds.
* The maximum encoding speed level may be
@@ -142,7 +142,7 @@ extern "C" {
*
* \param[out] _buf <tt>int</tt>: The current encoding speed level.
* 0 is slowest, larger values use less CPU.
- * \retval TH_EFAULT \a _enc_ctx or \a _buf is <tt>NULL</tt>.
+ * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
* \retval TH_EINVAL \a _buf_sz is not <tt>sizeof(int)</tt>.
* \retval TH_EIMPL Not supported by this implementation in the current
* encoding mode.*/
@@ -162,7 +162,7 @@ extern "C" {
*
* \param[in] _buf <tt>int</tt>: The number of duplicates to produce.
* If this is negative or zero, no duplicates will be produced.
- * \retval TH_EFAULT \a _enc_ctx or \a _buf is <tt>NULL</tt>.
+ * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
* \retval TH_EINVAL \a _buf_sz is not <tt>sizeof(int)</tt>, or the
* number of duplicates is greater than or equal to the
* maximum keyframe interval.
@@ -187,7 +187,7 @@ extern "C" {
* use.
* - #TH_RATECTL_CAP_UNDERFLOW: Don't try to make up shortfalls
* later.
- * \retval TH_EFAULT \a _enc_ctx or \a _buf is <tt>NULL</tt>.
+ * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
* \retval TH_EINVAL \a _buf_sz is not <tt>sizeof(int)</tt> or rate control
* is not enabled.
* \retval TH_EIMPL Not supported by this implementation in the current
@@ -211,7 +211,7 @@ extern "C" {
* \param[in] _buf <tt>int</tt>: Requested size of the reservoir measured in
* frames.
* \param[out] _buf <tt>int</tt>: The actual size of the reservoir set.
- * \retval TH_EFAULT \a _enc_ctx or \a _buf is <tt>NULL</tt>.
+ * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
* \retval TH_EINVAL \a _buf_sz is not <tt>sizeof(int)</tt>, or rate control
* is not enabled. The buffer has an implementation
* defined minimum and maximum size and the value in _buf
@@ -243,7 +243,7 @@ extern "C" {
* application.
* \retval >=0 The number of bytes of metric data available in the
* returned buffer.
- * \retval TH_EFAULT \a _enc_ctx or \a _buf is <tt>NULL</tt>.
+ * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
* \retval TH_EINVAL \a _buf_sz is not <tt>sizeof(char *)</tt>, no target
* bitrate has been set, or the first call was made after
* the first frame was submitted for encoding.
@@ -283,7 +283,7 @@ extern "C" {
* of bytes consumed.
* \retval >0 The number of bytes of metric data required/consumed.
* \retval 0 No more data is required before the next frame.
- * \retval TH_EFAULT \a _enc_ctx is <tt>NULL</tt>.
+ * \retval TH_EFAULT \a _enc is <tt>NULL</tt>.
* \retval TH_EINVAL No target bitrate has been set, or the first call was
* made after the first frame was submitted for
* encoding.
@@ -306,7 +306,7 @@ extern "C" {
* \param[in] _buf <tt>int</tt>: The new target quality, in the range 0...63,
* inclusive.
* \retval 0 Success.
- * \retval TH_EFAULT \a _enc_ctx or \a _buf is <tt>NULL</tt>.
+ * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
* \retval TH_EINVAL A target bitrate has already been specified, or the
* quality index was not in the range 0...63.
* \retval TH_EIMPL Not supported by this implementation.*/
@@ -328,10 +328,54 @@ extern "C" {
*
* \param[in] _buf <tt>long</tt>: The new target bitrate, in bits per second.
* \retval 0 Success.
- * \retval TH_EFAULT \a _enc_ctx or \a _buf is <tt>NULL</tt>.
+ * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
* \retval TH_EINVAL The target bitrate was not positive.
- * \retval TH_EIMPL Not supported by this implementation.*/
+ * A future version of this library may allow passing 0
+ * to disabled rate-controlled mode and return to a
+ * quality-based mode, in which case this function will
+ * not return an error for that value.
+ * \retval TH_EIMPL Not supported by this implementation.*/
#define TH_ENCCTL_SET_BITRATE (30)
+/**Sets the configuration to be compatible with that from the given setup
+ * header.
+ * This sets the Huffman codebooks and quantization parameters to match those
+ * found in the given setup header.
+ * This guarantees that packets encoded by this encoder will be decodable using
+ * a decoder configured with the passed-in setup header.
+ * It does <em>not</em> guarantee that th_encode_flushheader() will produce a
+ * bit-identical setup header, only that they will be compatible.
+ * If you need a bit-identical setup header, then use the one you passed into
+ * this command, and not the one returned by th_encode_flushheader().
+ *
+ * This also does <em>not</em> enable or disable VP3 compatibility; that is not
+ * signaled in the setup header (or anywhere else in the encoded stream), and
+ * is controlled independently by the #TH_ENCCTL_SET_VP3_COMPATIBLE function.
+ * If you wish to enable VP3 compatibility mode <em>and</em> want the codebooks
+ * and quantization parameters to match the given setup header, you should
+ * enable VP3 compatibility before invoking this command, otherwise the
+ * codebooks and quantization parameters will be reset to the VP3 defaults.
+ *
+ * The current encoder does not support Huffman codebooks which do not contain
+ * codewords for all 32 tokens.
+ * Such codebooks are legal, according to the specification, but cannot be
+ * configured with this function.
+ *
+ * \param[in] _buf <tt>unsigned char[]</tt>: The encoded setup header to copy
+ * the configuration from.
+ * This should be the original,
+ * undecoded setup header packet,
+ * and <em>not</em> a #th_setup_info
+ * structure filled in by
+ * th_decode_headerin().
+ * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
+ * \retval TH_EINVAL Encoding has already begun, so the codebooks and
+ * quantization parameters cannot be changed, or the
+ * data in the setup header was not supported by this
+ * encoder.
+ * \retval TH_EBADHEADER \a _buf did not contain a valid setup header packet.
+ * \retval TH_ENOTFORMAT \a _buf did not contain a Theora header at all.
+ * \retval TH_EIMPL Not supported by this implementation.*/
+#define TH_ENCCTL_SET_COMPAT_CONFIG (32)
/*@}*/
@@ -342,7 +386,8 @@ extern "C" {
/*@{*/
/**Drop frames to keep within bitrate buffer constraints.
* This can have a severe impact on quality, but is the only way to ensure that
- * bitrate targets are met at low rates during sudden bursts of activity.*/
+ * bitrate targets are met at low rates during sudden bursts of activity.
+ * It is enabled by default.*/
#define TH_RATECTL_DROP_FRAMES (0x1)
/**Ignore bitrate buffer overflows.
* If the encoder uses so few bits that the reservoir of available bits
@@ -350,14 +395,14 @@ extern "C" {
* The encoder will not try to use these extra bits in future frames.
* At high rates this may cause the result to be undersized, but allows a
* client to play the stream using a finite buffer; it should normally be
- * enabled.*/
+ * enabled, which is the default.*/
#define TH_RATECTL_CAP_OVERFLOW (0x2)
/**Ignore bitrate buffer underflows.
* If the encoder uses so many bits that the reservoir of available bits
* underflows, ignore the deficit.
* The encoder will not try to make up these extra bits in future frames.
* At low rates this may cause the result to be oversized; it should normally
- * be disabled.*/
+ * be disabled, which is the default.*/
#define TH_RATECTL_CAP_UNDERFLOW (0x4)
/*@}*/
@@ -401,8 +446,8 @@ typedef struct th_enc_ctx th_enc_ctx;
* packets.
* - For each uncompressed frame:
* - Submit the uncompressed frame via th_encode_ycbcr_in()
- * - Repeatedly call th_encode_packetout() to retrieve any video data packets
- * that are ready.
+ * - Repeatedly call th_encode_packetout() to retrieve any video
+ * data packets that are ready.
* - Call th_encode_free() to release all encoder memory.*/
/*@{*/
/**Allocates an encoder instance.
@@ -417,7 +462,10 @@ extern th_enc_ctx *th_encode_alloc(const th_info *_info);
* See \ref encctlcodes "the list of available control codes"
* for details.
* \param _buf The parameters for this control code.
- * \param _buf_sz The size of the parameter buffer.*/
+ * \param _buf_sz The size of the parameter buffer.
+ * \return Possible return values depend on the control code used.
+ * See \ref encctlcodes "the list of control codes" for
+ * specific values. Generally 0 indicates success.*/
extern int th_encode_ctl(th_enc_ctx *_enc,int _req,void *_buf,size_t _buf_sz);
/**Outputs the next header packet.
* This should be called repeatedly after encoder initialization until it
@@ -441,11 +489,25 @@ extern int th_encode_flushheader(th_enc_ctx *_enc,
/**Submits an uncompressed frame to the encoder.
* \param _enc A #th_enc_ctx handle.
* \param _ycbcr A buffer of Y'CbCr data to encode.
+ * If the width and height of the buffer matches the frame size
+ * the encoder was initialized with, the encoder will only
+ * reference the portion inside the picture region.
+ * Any data outside this region will be ignored, and need not map
+ * to a valid address.
+ * Alternatively, you can pass a buffer equal to the size of the
+ * picture region, if this is less than the full frame size.
+ * When using subsampled chroma planes, odd picture sizes or odd
+ * picture offsets may require an unexpected chroma plane size,
+ * and their use is generally discouraged, as they will not be
+ * well-supported by players and other media frameworks.
+ * See Section 4.4 of
+ * <a href="http://www.theora.org/doc/Theora.pdf">the Theora
+ * specification</a> for details if you wish to use them anyway.
* \retval 0 Success.
* \retval TH_EFAULT \a _enc or \a _ycbcr is <tt>NULL</tt>.
- * \retval TH_EINVAL The buffer size does not match the frame size the encoder
- * was initialized with, or encoding has already
- * completed.*/
+ * \retval TH_EINVAL The buffer size matches neither the frame size nor the
+ * picture size the encoder was initialized with, or
+ * encoding has already completed.*/
extern int th_encode_ycbcr_in(th_enc_ctx *_enc,th_ycbcr_buffer _ycbcr);
/**Retrieves encoded video data packets.
* This should be called repeatedly after each frame is submitted to flush any
diff --git a/thirdparty/libtheora/tokenize.c b/thirdparty/libtheora/tokenize.c
index 60574c3594..57b7aa8da9 100644
--- a/thirdparty/libtheora/tokenize.c
+++ b/thirdparty/libtheora/tokenize.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: tokenize.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
#include <stdlib.h>
@@ -20,27 +20,26 @@
+static unsigned char OC_DCT_EOB_TOKEN[31]={
+ 0,1,2,3,3,3,3,4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5
+};
+
static int oc_make_eob_token(int _run_count){
- if(_run_count<4)return OC_DCT_EOB1_TOKEN+_run_count-1;
- else{
- int cat;
- cat=OC_ILOGNZ_32(_run_count)-3;
- cat=OC_MINI(cat,3);
- return OC_DCT_REPEAT_RUN0_TOKEN+cat;
- }
+ return _run_count<32?OC_DCT_EOB_TOKEN[_run_count-1]:OC_DCT_REPEAT_RUN3_TOKEN;
}
+static unsigned char OC_DCT_EOB_EB[31]={
+ 0,0,0,0,1,2,3,0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
+};
+
static int oc_make_eob_token_full(int _run_count,int *_eb){
- if(_run_count<4){
- *_eb=0;
- return OC_DCT_EOB1_TOKEN+_run_count-1;
+ if(_run_count<32){
+ *_eb=OC_DCT_EOB_EB[_run_count-1];
+ return OC_DCT_EOB_TOKEN[_run_count-1];
}
else{
- int cat;
- cat=OC_ILOGNZ_32(_run_count)-3;
- cat=OC_MINI(cat,3);
- *_eb=_run_count-OC_BYTE_TABLE32(4,8,16,0,cat);
- return OC_DCT_REPEAT_RUN0_TOKEN+cat;
+ *_eb=_run_count;
+ return OC_DCT_REPEAT_RUN3_TOKEN;
}
}
@@ -49,86 +48,330 @@ static int oc_decode_eob_token(int _token,int _eb){
return (0x20820C41U>>_token*5&0x1F)+_eb;
}
-/*TODO: This is now only used during DCT tokenization, and never for runs; it
- should be simplified.*/
-static int oc_make_dct_token_full(int _zzi,int _zzj,int _val,int *_eb){
- int neg;
- int zero_run;
- int token;
- int eb;
- neg=_val<0;
- _val=abs(_val);
- zero_run=_zzj-_zzi;
- if(zero_run>0){
- int adj;
- /*Implement a minor restriction on stack 1 so that we know during DC fixups
- that extending a dctrun token from stack 1 will never overflow.*/
- adj=_zzi!=1;
- if(_val<2&&zero_run<17+adj){
- if(zero_run<6){
- token=OC_DCT_RUN_CAT1A+zero_run-1;
- eb=neg;
- }
- else if(zero_run<10){
- token=OC_DCT_RUN_CAT1B;
- eb=zero_run-6+(neg<<2);
- }
- else{
- token=OC_DCT_RUN_CAT1C;
- eb=zero_run-10+(neg<<3);
- }
- }
- else if(_val<4&&zero_run<3+adj){
- if(zero_run<2){
- token=OC_DCT_RUN_CAT2A;
- eb=_val-2+(neg<<1);
- }
- else{
- token=OC_DCT_RUN_CAT2B;
- eb=zero_run-2+(_val-2<<1)+(neg<<2);
- }
- }
- else{
- if(zero_run<9)token=OC_DCT_SHORT_ZRL_TOKEN;
- else token=OC_DCT_ZRL_TOKEN;
- eb=zero_run-1;
- }
- }
- else if(_val<3){
- token=OC_ONE_TOKEN+(_val-1<<1)+neg;
- eb=0;
- }
- else if(_val<7){
- token=OC_DCT_VAL_CAT2+_val-3;
- eb=neg;
- }
- else if(_val<9){
- token=OC_DCT_VAL_CAT3;
- eb=_val-7+(neg<<1);
- }
- else if(_val<13){
- token=OC_DCT_VAL_CAT4;
- eb=_val-9+(neg<<2);
- }
- else if(_val<21){
- token=OC_DCT_VAL_CAT5;
- eb=_val-13+(neg<<3);
- }
- else if(_val<37){
- token=OC_DCT_VAL_CAT6;
- eb=_val-21+(neg<<4);
- }
- else if(_val<69){
- token=OC_DCT_VAL_CAT7;
- eb=_val-37+(neg<<5);
- }
- else{
- token=OC_DCT_VAL_CAT8;
- eb=_val-69+(neg<<9);
- }
- *_eb=eb;
- return token;
-}
+/*Some tables for fast construction of value tokens.*/
+
+static const unsigned char OC_DCT_VALUE_TOKEN[1161]={
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,21,21,21,21,21,21,21,21,
+ 21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,
+ 21,21,21,21,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,
+ 19,19,19,19,19,19,19,19,18,18,18,18,17,17,16,15,14,13,12,10,
+ 7,
+ 9,11,13,14,15,16,17,17,18,18,18,18,19,19,19,19,19,19,19,19,
+ 20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,21,21,21,21,
+ 21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,
+ 21,21,21,21,21,21,21,21,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,
+ 22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22
+};
+
+static const ogg_uint16_t OC_DCT_VALUE_EB[1161]={
+ 1023,1022,1021,1020,1019,1018,1017,1016,1015,1014,
+ 1013,1012,1011,1010,1009,1008,1007,1006,1005,1004,
+ 1003,1002,1001,1000, 999, 998, 997, 996, 995, 994,
+ 993, 992, 991, 990, 989, 988, 987, 986, 985, 984,
+ 983, 982, 981, 980, 979, 978, 977, 976, 975, 974,
+ 973, 972, 971, 970, 969, 968, 967, 966, 965, 964,
+ 963, 962, 961, 960, 959, 958, 957, 956, 955, 954,
+ 953, 952, 951, 950, 949, 948, 947, 946, 945, 944,
+ 943, 942, 941, 940, 939, 938, 937, 936, 935, 934,
+ 933, 932, 931, 930, 929, 928, 927, 926, 925, 924,
+ 923, 922, 921, 920, 919, 918, 917, 916, 915, 914,
+ 913, 912, 911, 910, 909, 908, 907, 906, 905, 904,
+ 903, 902, 901, 900, 899, 898, 897, 896, 895, 894,
+ 893, 892, 891, 890, 889, 888, 887, 886, 885, 884,
+ 883, 882, 881, 880, 879, 878, 877, 876, 875, 874,
+ 873, 872, 871, 870, 869, 868, 867, 866, 865, 864,
+ 863, 862, 861, 860, 859, 858, 857, 856, 855, 854,
+ 853, 852, 851, 850, 849, 848, 847, 846, 845, 844,
+ 843, 842, 841, 840, 839, 838, 837, 836, 835, 834,
+ 833, 832, 831, 830, 829, 828, 827, 826, 825, 824,
+ 823, 822, 821, 820, 819, 818, 817, 816, 815, 814,
+ 813, 812, 811, 810, 809, 808, 807, 806, 805, 804,
+ 803, 802, 801, 800, 799, 798, 797, 796, 795, 794,
+ 793, 792, 791, 790, 789, 788, 787, 786, 785, 784,
+ 783, 782, 781, 780, 779, 778, 777, 776, 775, 774,
+ 773, 772, 771, 770, 769, 768, 767, 766, 765, 764,
+ 763, 762, 761, 760, 759, 758, 757, 756, 755, 754,
+ 753, 752, 751, 750, 749, 748, 747, 746, 745, 744,
+ 743, 742, 741, 740, 739, 738, 737, 736, 735, 734,
+ 733, 732, 731, 730, 729, 728, 727, 726, 725, 724,
+ 723, 722, 721, 720, 719, 718, 717, 716, 715, 714,
+ 713, 712, 711, 710, 709, 708, 707, 706, 705, 704,
+ 703, 702, 701, 700, 699, 698, 697, 696, 695, 694,
+ 693, 692, 691, 690, 689, 688, 687, 686, 685, 684,
+ 683, 682, 681, 680, 679, 678, 677, 676, 675, 674,
+ 673, 672, 671, 670, 669, 668, 667, 666, 665, 664,
+ 663, 662, 661, 660, 659, 658, 657, 656, 655, 654,
+ 653, 652, 651, 650, 649, 648, 647, 646, 645, 644,
+ 643, 642, 641, 640, 639, 638, 637, 636, 635, 634,
+ 633, 632, 631, 630, 629, 628, 627, 626, 625, 624,
+ 623, 622, 621, 620, 619, 618, 617, 616, 615, 614,
+ 613, 612, 611, 610, 609, 608, 607, 606, 605, 604,
+ 603, 602, 601, 600, 599, 598, 597, 596, 595, 594,
+ 593, 592, 591, 590, 589, 588, 587, 586, 585, 584,
+ 583, 582, 581, 580, 579, 578, 577, 576, 575, 574,
+ 573, 572, 571, 570, 569, 568, 567, 566, 565, 564,
+ 563, 562, 561, 560, 559, 558, 557, 556, 555, 554,
+ 553, 552, 551, 550, 549, 548, 547, 546, 545, 544,
+ 543, 542, 541, 540, 539, 538, 537, 536, 535, 534,
+ 533, 532, 531, 530, 529, 528, 527, 526, 525, 524,
+ 523, 522, 521, 520, 519, 518, 517, 516, 515, 514,
+ 513, 512, 63, 62, 61, 60, 59, 58, 57, 56,
+ 55, 54, 53, 52, 51, 50, 49, 48, 47, 46,
+ 45, 44, 43, 42, 41, 40, 39, 38, 37, 36,
+ 35, 34, 33, 32, 31, 30, 29, 28, 27, 26,
+ 25, 24, 23, 22, 21, 20, 19, 18, 17, 16,
+ 15, 14, 13, 12, 11, 10, 9, 8, 7, 6,
+ 5, 4, 3, 2, 1, 1, 1, 1, 0, 0,
+ 0,
+ 0, 0, 0, 0, 0, 0, 0, 1, 0, 1,
+ 2, 3, 0, 1, 2, 3, 4, 5, 6, 7,
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
+ 10, 11, 12, 13, 14, 15, 0, 1, 2, 3,
+ 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
+ 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
+ 24, 25, 26, 27, 28, 29, 30, 31, 0, 1,
+ 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
+ 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
+ 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
+ 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
+ 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
+ 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
+ 62, 63, 64, 65, 66, 67, 68, 69, 70, 71,
+ 72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
+ 82, 83, 84, 85, 86, 87, 88, 89, 90, 91,
+ 92, 93, 94, 95, 96, 97, 98, 99, 100, 101,
+ 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
+ 112, 113, 114, 115, 116, 117, 118, 119, 120, 121,
+ 122, 123, 124, 125, 126, 127, 128, 129, 130, 131,
+ 132, 133, 134, 135, 136, 137, 138, 139, 140, 141,
+ 142, 143, 144, 145, 146, 147, 148, 149, 150, 151,
+ 152, 153, 154, 155, 156, 157, 158, 159, 160, 161,
+ 162, 163, 164, 165, 166, 167, 168, 169, 170, 171,
+ 172, 173, 174, 175, 176, 177, 178, 179, 180, 181,
+ 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
+ 192, 193, 194, 195, 196, 197, 198, 199, 200, 201,
+ 202, 203, 204, 205, 206, 207, 208, 209, 210, 211,
+ 212, 213, 214, 215, 216, 217, 218, 219, 220, 221,
+ 222, 223, 224, 225, 226, 227, 228, 229, 230, 231,
+ 232, 233, 234, 235, 236, 237, 238, 239, 240, 241,
+ 242, 243, 244, 245, 246, 247, 248, 249, 250, 251,
+ 252, 253, 254, 255, 256, 257, 258, 259, 260, 261,
+ 262, 263, 264, 265, 266, 267, 268, 269, 270, 271,
+ 272, 273, 274, 275, 276, 277, 278, 279, 280, 281,
+ 282, 283, 284, 285, 286, 287, 288, 289, 290, 291,
+ 292, 293, 294, 295, 296, 297, 298, 299, 300, 301,
+ 302, 303, 304, 305, 306, 307, 308, 309, 310, 311,
+ 312, 313, 314, 315, 316, 317, 318, 319, 320, 321,
+ 322, 323, 324, 325, 326, 327, 328, 329, 330, 331,
+ 332, 333, 334, 335, 336, 337, 338, 339, 340, 341,
+ 342, 343, 344, 345, 346, 347, 348, 349, 350, 351,
+ 352, 353, 354, 355, 356, 357, 358, 359, 360, 361,
+ 362, 363, 364, 365, 366, 367, 368, 369, 370, 371,
+ 372, 373, 374, 375, 376, 377, 378, 379, 380, 381,
+ 382, 383, 384, 385, 386, 387, 388, 389, 390, 391,
+ 392, 393, 394, 395, 396, 397, 398, 399, 400, 401,
+ 402, 403, 404, 405, 406, 407, 408, 409, 410, 411,
+ 412, 413, 414, 415, 416, 417, 418, 419, 420, 421,
+ 422, 423, 424, 425, 426, 427, 428, 429, 430, 431,
+ 432, 433, 434, 435, 436, 437, 438, 439, 440, 441,
+ 442, 443, 444, 445, 446, 447, 448, 449, 450, 451,
+ 452, 453, 454, 455, 456, 457, 458, 459, 460, 461,
+ 462, 463, 464, 465, 466, 467, 468, 469, 470, 471,
+ 472, 473, 474, 475, 476, 477, 478, 479, 480, 481,
+ 482, 483, 484, 485, 486, 487, 488, 489, 490, 491,
+ 492, 493, 494, 495, 496, 497, 498, 499, 500, 501,
+ 502, 503, 504, 505, 506, 507, 508, 509, 510, 511
+};
+
+/*The first DCT coefficient that both has a smaller magnitude and gets coded
+ with a different token.*/
+static const ogg_int16_t OC_DCT_TRELLIS_ALT_VALUE[1161]={
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -68, -68, -68, -68, -68, -68, -68, -68,
+ -68, -68, -36, -36, -36, -36, -36, -36, -36, -36,
+ -36, -36, -36, -36, -36, -36, -36, -36, -36, -36,
+ -36, -36, -36, -36, -36, -36, -36, -36, -36, -36,
+ -36, -36, -36, -36, -20, -20, -20, -20, -20, -20,
+ -20, -20, -20, -20, -20, -20, -20, -20, -20, -20,
+ -12, -12, -12, -12, -12, -12, -12, -12, -8, -8,
+ -8, -8, -6, -6, -5, -4, -3, -2, -1, 0,
+ 0,
+ 0, 1, 2, 3, 4, 5, 6, 6, 8, 8,
+ 8, 8, 12, 12, 12, 12, 12, 12, 12, 12,
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
+ 20, 20, 20, 20, 20, 20, 36, 36, 36, 36,
+ 36, 36, 36, 36, 36, 36, 36, 36, 36, 36,
+ 36, 36, 36, 36, 36, 36, 36, 36, 36, 36,
+ 36, 36, 36, 36, 36, 36, 36, 36, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68, 68, 68
+};
+
+#define OC_DCT_VALUE_TOKEN_PTR (OC_DCT_VALUE_TOKEN+580)
+#define OC_DCT_VALUE_EB_PTR (OC_DCT_VALUE_EB+580)
+#define OC_DCT_TRELLIS_ALT_VALUE_PTR (OC_DCT_TRELLIS_ALT_VALUE+580)
+
+/*Some tables for fast construction of combo tokens.*/
+
+static const unsigned char OC_DCT_RUN_CAT1_TOKEN[17]={
+ 23,24,25,26,27,28,28,28,28,29,29,29,29,29,29,29,29
+};
+
+static const unsigned char OC_DCT_RUN_CAT1_EB[17][2]={
+ {0,1},{0,1},{0, 1},{0, 1},{0, 1},{0, 4},{1, 5},{2, 6},{3,7},
+ {0,8},{1,9},{2,10},{3,11},{4,12},{5,13},{6,14},{7,15}
+};
+
+static const unsigned char OC_DCT_RUN_CAT2_EB[3][2][2]={
+ { {0,1},{2,3} },{ {0,2},{4,6} },{ {1,3},{5,7} }
+};
/*Token logging to allow a few fragments of efficient rollback.
Late SKIP analysis is tied up in the tokenization process, so we need to be
@@ -211,10 +454,11 @@ struct oc_quant_token{
/*Tokenizes the AC coefficients, possibly adjusting the quantization, and then
dequantizes and de-zig-zags the result.
- The DC coefficient is not preserved; it should be restored by the caller.*/
+ The AC coefficients of _idct must be pre-initialized to zero.*/
int oc_enc_tokenize_ac(oc_enc_ctx *_enc,int _pli,ptrdiff_t _fragi,
- ogg_int16_t *_qdct,const ogg_uint16_t *_dequant,const ogg_int16_t *_dct,
- int _zzi,oc_token_checkpoint **_stack,int _acmin){
+ ogg_int16_t *_idct,const ogg_int16_t *_qdct,
+ const ogg_uint16_t *_dequant,const ogg_int16_t *_dct,
+ int _zzi,oc_token_checkpoint **_stack,int _lambda,int _acmin){
oc_token_checkpoint *stack;
ogg_int64_t zflags;
ogg_int64_t nzflags;
@@ -242,31 +486,29 @@ int oc_enc_tokenize_ac(oc_enc_ctx *_enc,int _pli,ptrdiff_t _fragi,
d2_accum[0]=0;
zzj=64;
for(zzi=OC_MINI(_zzi,63);zzi>0;zzi--){
- ogg_int32_t lambda;
ogg_uint32_t best_cost;
int best_bits=best_bits;
int best_next=best_next;
int best_token=best_token;
int best_eb=best_eb;
int best_qc=best_qc;
- int flush_bits;
ogg_uint32_t d2;
int dq;
+ int qc_m;
int e;
int c;
int s;
int tj;
- lambda=_enc->lambda;
qc=_qdct[zzi];
s=-(qc<0);
- qc=qc+s^s;
- c=_dct[OC_FZIG_ZAG[zzi]];
- if(qc<=1){
+ qc_m=qc+s^s;
+ c=_dct[zzi];
+ /*The hard case: try a zero run.*/
+ if(qc_m<=1){
ogg_uint32_t sum_d2;
int nzeros;
int dc_reserve;
- /*The hard case: try a zero run.*/
- if(!qc){
+ if(!qc_m){
/*Skip runs that are already quantized to zeros.
If we considered each zero coefficient in turn, we might
theoretically find a better way to partition long zero runs (e.g.,
@@ -281,15 +523,14 @@ int oc_enc_tokenize_ac(oc_enc_ctx *_enc,int _pli,ptrdiff_t _fragi,
d2=0;
}
else{
- c=c+s^s;
d2=c*(ogg_int32_t)c;
+ c=c+s^s;
}
eob=eob_run[zzi];
nzeros=zzj-zzi;
zzj&=63;
sum_d2=d2+d2_accum[zzj];
d2_accum[zzi]=sum_d2;
- flush_bits=eob>0?oc_token_bits(_enc,huffi,zzi,oc_make_eob_token(eob)):0;
/*We reserve 1 spot for combo run tokens that start in the 1st AC stack
to ensure they can be extended to include the DC coefficient if
necessary; this greatly simplifies stack-rewriting later on.*/
@@ -297,7 +538,6 @@ int oc_enc_tokenize_ac(oc_enc_ctx *_enc,int _pli,ptrdiff_t _fragi,
best_cost=0xFFFFFFFF;
for(;;){
if(nzflags>>zzj&1){
- int cat;
int val;
int val_s;
int zzk;
@@ -306,11 +546,10 @@ int oc_enc_tokenize_ac(oc_enc_ctx *_enc,int _pli,ptrdiff_t _fragi,
tk=next&1;
zzk=next>>1;
/*Try a pure zero run to this point.*/
- cat=nzeros+55>>6;
- token=OC_DCT_SHORT_ZRL_TOKEN+cat;
- bits=flush_bits+oc_token_bits(_enc,huffi,zzi,token);
+ token=OC_DCT_SHORT_ZRL_TOKEN+(nzeros+55>>6);
+ bits=oc_token_bits(_enc,huffi,zzi,token);
d2=sum_d2-d2_accum[zzj];
- cost=d2+lambda*bits+tokens[zzj][1].cost;
+ cost=d2+_lambda*bits+tokens[zzj][1].cost;
if(cost<=best_cost){
best_next=(zzj<<1)+1;
best_token=token;
@@ -319,25 +558,18 @@ int oc_enc_tokenize_ac(oc_enc_ctx *_enc,int _pli,ptrdiff_t _fragi,
best_bits=bits+tokens[zzj][1].bits;
best_qc=0;
}
- if(nzeros<16+dc_reserve){
+ if(nzeros<17+dc_reserve){
val=_qdct[zzj];
val_s=-(val<0);
val=val+val_s^val_s;
if(val<=2){
/*Try a +/- 1 combo token.*/
- if(nzeros<6){
- token=OC_DCT_RUN_CAT1A+nzeros-1;
- eb=-val_s;
- }
- else{
- cat=nzeros+54>>6;
- token=OC_DCT_RUN_CAT1B+cat;
- eb=(-val_s<<cat+2)+nzeros-6-(cat<<2);
- }
- e=(_dct[OC_FZIG_ZAG[zzj]]+val_s^val_s)-_dequant[zzj];
+ token=OC_DCT_RUN_CAT1_TOKEN[nzeros-1];
+ eb=OC_DCT_RUN_CAT1_EB[nzeros-1][-val_s];
+ e=_dct[zzj]-(_dequant[zzj]+val_s^val_s);
d2=e*(ogg_int32_t)e+sum_d2-d2_accum[zzj];
- bits=flush_bits+oc_token_bits(_enc,huffi,zzi,token);
- cost=d2+lambda*bits+tokens[zzk][tk].cost;
+ bits=oc_token_bits(_enc,huffi,zzi,token);
+ cost=d2+_lambda*bits+tokens[zzk][tk].cost;
if(cost<=best_cost){
best_next=next;
best_token=token;
@@ -347,22 +579,23 @@ int oc_enc_tokenize_ac(oc_enc_ctx *_enc,int _pli,ptrdiff_t _fragi,
best_qc=1+val_s^val_s;
}
}
- if(nzeros<2+dc_reserve&&2<=val&&val<=4){
+ if(nzeros<3+dc_reserve&&2<=val&&val<=4){
+ int sval;
/*Try a +/- 2/3 combo token.*/
- cat=nzeros>>1;
- token=OC_DCT_RUN_CAT2A+cat;
- bits=flush_bits+oc_token_bits(_enc,huffi,zzi,token);
- val=2+((val+val_s^val_s)>2);
- e=(_dct[OC_FZIG_ZAG[zzj]]+val_s^val_s)-_dequant[zzj]*val;
+ token=OC_DCT_RUN_CAT2A+(nzeros>>1);
+ bits=oc_token_bits(_enc,huffi,zzi,token);
+ val=2+(val>2);
+ sval=val+val_s^val_s;
+ e=_dct[zzj]-_dequant[zzj]*sval;
d2=e*(ogg_int32_t)e+sum_d2-d2_accum[zzj];
- cost=d2+lambda*bits+tokens[zzk][tk].cost;
+ cost=d2+_lambda*bits+tokens[zzk][tk].cost;
if(cost<=best_cost){
best_cost=cost;
best_bits=bits+tokens[zzk][tk].bits;
best_next=next;
best_token=token;
- best_eb=(-val_s<<1+cat)+(val-2<<cat)+(nzeros-1>>1);
- best_qc=val+val_s^val_s;
+ best_eb=OC_DCT_RUN_CAT2_EB[nzeros-1][-val_s][val-2];
+ best_qc=sval;
}
}
}
@@ -378,10 +611,10 @@ int oc_enc_tokenize_ac(oc_enc_ctx *_enc,int _pli,ptrdiff_t _fragi,
/*We made it all the way to the end of the block; try an EOB token.*/
if(eob<4095){
bits=oc_token_bits(_enc,huffi,zzi,oc_make_eob_token(eob+1))
- -flush_bits;
+ -(eob>0?oc_token_bits(_enc,huffi,zzi,oc_make_eob_token(eob)):0);
}
else bits=oc_token_bits(_enc,huffi,zzi,OC_DCT_EOB1_TOKEN);
- cost=sum_d2+bits*lambda;
+ cost=sum_d2+bits*_lambda;
/*If the best route so far is still a pure zero run to the end of the
block, force coding it as an EOB.
Even if it's not optimal for this block, it has a good chance of
@@ -408,20 +641,20 @@ int oc_enc_tokenize_ac(oc_enc_ctx *_enc,int _pli,ptrdiff_t _fragi,
tokens[zzi][0].bits=best_bits;
tokens[zzi][0].qc=best_qc;
zflags|=(ogg_int64_t)1<<zzi;
- if(qc){
+ if(qc_m){
dq=_dequant[zzi];
- if(zzi<_acmin)lambda=0;
+ if(zzi<_acmin)_lambda=0;
e=dq-c;
d2=e*(ogg_int32_t)e;
token=OC_ONE_TOKEN-s;
- bits=flush_bits+oc_token_bits(_enc,huffi,zzi,token);
+ bits=oc_token_bits(_enc,huffi,zzi,token);
zzj=zzi+1&63;
tj=best_flags>>zzj&1;
next=(zzj<<1)+tj;
tokens[zzi][1].next=(unsigned char)next;
tokens[zzi][1].token=(signed char)token;
tokens[zzi][1].eb=0;
- tokens[zzi][1].cost=d2+lambda*bits+tokens[zzj][tj].cost;
+ tokens[zzi][1].cost=d2+_lambda*bits+tokens[zzj][tj].cost;
tokens[zzi][1].bits=bits+tokens[zzj][tj].bits;
tokens[zzi][1].qc=1+s^s;
nzflags|=(ogg_int64_t)1<<zzi;
@@ -430,200 +663,38 @@ int oc_enc_tokenize_ac(oc_enc_ctx *_enc,int _pli,ptrdiff_t _fragi,
}
}
else{
+ int alt_qc;
eob=eob_run[zzi];
- if(zzi<_acmin)lambda=0;
- c=c+s^s;
+ if(zzi<_acmin)_lambda=0;
dq=_dequant[zzi];
/*No zero run can extend past this point.*/
d2_accum[zzi]=0;
- flush_bits=eob>0?oc_token_bits(_enc,huffi,zzi,oc_make_eob_token(eob)):0;
- if(qc<=2){
- e=2*dq-c;
- d2=e*(ogg_int32_t)e;
- best_token=OC_TWO_TOKEN-s;
- best_bits=flush_bits+oc_token_bits(_enc,huffi,zzi,best_token);
- best_cost=d2+lambda*best_bits;
- e-=dq;
- d2=e*(ogg_int32_t)e;
- token=OC_ONE_TOKEN-s;
- bits=flush_bits+oc_token_bits(_enc,huffi,zzi,token);
- cost=d2+lambda*bits;
- if(cost<=best_cost){
- best_token=token;
- best_bits=bits;
- best_cost=cost;
- qc--;
- }
- best_eb=0;
- }
- else if(qc<=3){
- e=3*dq-c;
- d2=e*(ogg_int32_t)e;
- best_token=OC_DCT_VAL_CAT2;
- best_eb=-s;
- best_bits=flush_bits+oc_token_bits(_enc,huffi,zzi,best_token);
- best_cost=d2+lambda*best_bits;
- e-=dq;
- d2=e*(ogg_int32_t)e;
- token=OC_TWO_TOKEN-s;
- bits=flush_bits+oc_token_bits(_enc,huffi,zzi,token);
- cost=d2+lambda*bits;
- if(cost<=best_cost){
- best_token=token;
- best_eb=0;
- best_bits=bits;
- best_cost=cost;
- qc--;
- }
- }
- else if(qc<=6){
- e=qc*dq-c;
- d2=e*(ogg_int32_t)e;
- best_token=OC_DCT_VAL_CAT2+qc-3;
- best_eb=-s;
- best_bits=flush_bits+oc_token_bits(_enc,huffi,zzi,best_token);
- best_cost=d2+lambda*best_bits;
- e-=dq;
- d2=e*(ogg_int32_t)e;
- token=best_token-1;
- bits=flush_bits+oc_token_bits(_enc,huffi,zzi,token);
- cost=d2+lambda*bits;
- if(cost<=best_cost){
- best_token=token;
- best_bits=bits;
- best_cost=cost;
- qc--;
- }
- }
- else if(qc<=8){
- e=qc*dq-c;
- d2=e*(ogg_int32_t)e;
- best_token=OC_DCT_VAL_CAT3;
- best_eb=(-s<<1)+qc-7;
- best_bits=flush_bits+oc_token_bits(_enc,huffi,zzi,best_token);
- best_cost=d2+lambda*best_bits;
- e=6*dq-c;
- d2=e*(ogg_int32_t)e;
- token=OC_DCT_VAL_CAT2+3;
- bits=flush_bits+oc_token_bits(_enc,huffi,zzi,token);
- cost=d2+lambda*bits;
- if(cost<=best_cost){
- best_token=token;
- best_eb=-s;
- best_bits=bits;
- best_cost=cost;
- qc=6;
- }
- }
- else if(qc<=12){
- e=qc*dq-c;
- d2=e*(ogg_int32_t)e;
- best_token=OC_DCT_VAL_CAT4;
- best_eb=(-s<<2)+qc-9;
- best_bits=flush_bits+oc_token_bits(_enc,huffi,zzi,best_token);
- best_cost=d2+lambda*best_bits;
- e=8*dq-c;
- d2=e*(ogg_int32_t)e;
- token=best_token-1;
- bits=flush_bits+oc_token_bits(_enc,huffi,zzi,token);
- cost=d2+lambda*bits;
- if(cost<=best_cost){
- best_token=token;
- best_eb=(-s<<1)+1;
- best_bits=bits;
- best_cost=cost;
- qc=8;
- }
- }
- else if(qc<=20){
- e=qc*dq-c;
- d2=e*(ogg_int32_t)e;
- best_token=OC_DCT_VAL_CAT5;
- best_eb=(-s<<3)+qc-13;
- best_bits=flush_bits+oc_token_bits(_enc,huffi,zzi,best_token);
- best_cost=d2+lambda*best_bits;
- e=12*dq-c;
- d2=e*(ogg_int32_t)e;
- token=best_token-1;
- bits=flush_bits+oc_token_bits(_enc,huffi,zzi,token);
- cost=d2+lambda*bits;
- if(cost<=best_cost){
- best_token=token;
- best_eb=(-s<<2)+3;
- best_bits=bits;
- best_cost=cost;
- qc=12;
- }
- }
- else if(qc<=36){
- e=qc*dq-c;
- d2=e*(ogg_int32_t)e;
- best_token=OC_DCT_VAL_CAT6;
- best_eb=(-s<<4)+qc-21;
- best_bits=flush_bits+oc_token_bits(_enc,huffi,zzi,best_token);
- best_cost=d2+lambda*best_bits;
- e=20*dq-c;
- d2=e*(ogg_int32_t)e;
- token=best_token-1;
- bits=flush_bits+oc_token_bits(_enc,huffi,zzi,token);
- cost=d2+lambda*bits;
- if(cost<=best_cost){
- best_token=token;
- best_eb=(-s<<3)+7;
- best_bits=bits;
- best_cost=cost;
- qc=20;
- }
- }
- else if(qc<=68){
- e=qc*dq-c;
- d2=e*(ogg_int32_t)e;
- best_token=OC_DCT_VAL_CAT7;
- best_eb=(-s<<5)+qc-37;
- best_bits=flush_bits+oc_token_bits(_enc,huffi,zzi,best_token);
- best_cost=d2+lambda*best_bits;
- e=36*dq-c;
- d2=e*(ogg_int32_t)e;
- token=best_token-1;
- bits=flush_bits+oc_token_bits(_enc,huffi,zzi,token);
- cost=d2+lambda*bits;
- if(cost<best_cost){
- best_token=token;
- best_eb=(-s<<4)+15;
- best_bits=bits;
- best_cost=cost;
- qc=36;
- }
- }
- else{
- e=qc*dq-c;
- d2=e*(ogg_int32_t)e;
- best_token=OC_DCT_VAL_CAT8;
- best_eb=(-s<<9)+qc-69;
- best_bits=flush_bits+oc_token_bits(_enc,huffi,zzi,best_token);
- best_cost=d2+lambda*best_bits;
- e=68*dq-c;
- d2=e*(ogg_int32_t)e;
- token=best_token-1;
- bits=flush_bits+oc_token_bits(_enc,huffi,zzi,token);
- cost=d2+lambda*bits;
- if(cost<best_cost){
- best_token=token;
- best_eb=(-s<<5)+31;
- best_bits=bits;
- best_cost=cost;
- qc=68;
- }
+ e=qc*dq-c;
+ d2=e*(ogg_int32_t)e;
+ best_token=*(OC_DCT_VALUE_TOKEN_PTR+qc);
+ best_bits=oc_token_bits(_enc,huffi,zzi,best_token);
+ best_cost=d2+_lambda*best_bits;
+ alt_qc=*(OC_DCT_TRELLIS_ALT_VALUE_PTR+qc);
+ e=alt_qc*dq-c;
+ d2=e*(ogg_int32_t)e;
+ token=*(OC_DCT_VALUE_TOKEN_PTR+alt_qc);
+ bits=oc_token_bits(_enc,huffi,zzi,token);
+ cost=d2+_lambda*bits;
+ if(cost<best_cost){
+ best_token=token;
+ best_bits=bits;
+ best_cost=cost;
+ qc=alt_qc;
}
zzj=zzi+1&63;
tj=best_flags>>zzj&1;
next=(zzj<<1)+tj;
tokens[zzi][1].next=(unsigned char)next;
tokens[zzi][1].token=(signed char)best_token;
- tokens[zzi][1].eb=best_eb;
+ tokens[zzi][1].eb=*(OC_DCT_VALUE_EB_PTR+qc);
tokens[zzi][1].cost=best_cost+tokens[zzj][tj].cost;
tokens[zzi][1].bits=best_bits+tokens[zzj][tj].bits;
- tokens[zzi][1].qc=qc+s^s;
+ tokens[zzi][1].qc=qc;
nzflags|=(ogg_int64_t)1<<zzi;
best_flags|=(ogg_int64_t)1<<zzi;
}
@@ -631,9 +702,6 @@ int oc_enc_tokenize_ac(oc_enc_ctx *_enc,int _pli,ptrdiff_t _fragi,
}
/*Emit the tokens from the best path through the trellis.*/
stack=*_stack;
- /*We blow away the first entry here so that things vectorize better.
- The DC coefficient is not actually stored in the array yet.*/
- for(zzi=0;zzi<64;zzi++)_qdct[zzi]=0;
dct_fzig_zag=_enc->state.opt_data.dct_fzig_zag;
zzi=1;
ti=best_flags>>1&1;
@@ -643,12 +711,15 @@ int oc_enc_tokenize_ac(oc_enc_ctx *_enc,int _pli,ptrdiff_t _fragi,
eob=eob_run[zzi];
if(tokens[zzi][ti].token<OC_NDCT_EOB_TOKEN_MAX){
if(++eob>=4095){
- oc_enc_eob_log(_enc,_pli,zzi,eob);
+ oc_enc_token_log(_enc,_pli,zzi,OC_DCT_REPEAT_RUN3_TOKEN,eob);
eob=0;
}
eob_run[zzi]=eob;
/*We don't include the actual EOB cost for this block in the return value.
- It will be paid for by the fragment that terminates the EOB run.*/
+ It is very likely to eventually be spread over several blocks, and
+ including it more harshly penalizes the first few blocks in a long EOB
+ run.
+ Omitting it here gives a small PSNR and SSIM gain.*/
bits-=tokens[zzi][ti].bits;
zzi=_zzi;
break;
@@ -664,7 +735,7 @@ int oc_enc_tokenize_ac(oc_enc_ctx *_enc,int _pli,ptrdiff_t _fragi,
zzj=(next>>1)-1&63;
/*TODO: It may be worth saving the dequantized coefficient in the trellis
above; we had to compute it to measure the error anyway.*/
- _qdct[dct_fzig_zag[zzj]]=(ogg_int16_t)(qc*(int)_dequant[zzj]);
+ _idct[dct_fzig_zag[zzj]]=(ogg_int16_t)(qc*(int)_dequant[zzj]);
zzi=next>>1;
ti=next&1;
}
@@ -673,6 +744,237 @@ int oc_enc_tokenize_ac(oc_enc_ctx *_enc,int _pli,ptrdiff_t _fragi,
return bits;
}
+/*Simplistic R/D tokenizer.
+ The AC coefficients of _idct must be pre-initialized to zero.
+ This could be made more accurate by using more sophisticated
+ rate predictions for zeros.
+ It could be made faster by switching from R/D decisions to static
+ lambda-derived rounding biases.*/
+int oc_enc_tokenize_ac_fast(oc_enc_ctx *_enc,int _pli,ptrdiff_t _fragi,
+ ogg_int16_t *_idct,const ogg_int16_t *_qdct,
+ const ogg_uint16_t *_dequant,const ogg_int16_t *_dct,
+ int _zzi,oc_token_checkpoint **_stack,int _lambda,int _acmin){
+ const unsigned char *dct_fzig_zag;
+ ogg_uint16_t *eob_run;
+ oc_token_checkpoint *stack;
+ int huffi;
+ int zzi;
+ int zzj;
+ int zzk;
+ int total_bits;
+ int zr[4];
+ stack=*_stack;
+ total_bits=0;
+ /*The apparent bit-cost of coding a zero from observing the trellis
+ quantizer is pre-combined with lambda.
+ Four predictive cases are considered: the last optimized value is zero (+2)
+ or non-zero and the non-optimized value is zero (+1) or non-zero.*/
+ zr[0]=3*_lambda>>1;
+ zr[1]=_lambda;
+ zr[2]=4*_lambda;
+ zr[3]=7*_lambda>>1;
+ eob_run=_enc->eob_run[_pli];
+ dct_fzig_zag=_enc->state.opt_data.dct_fzig_zag;
+ huffi=_enc->huff_idxs[_enc->state.frame_type][1][_pli+1>>1];
+ for(zzj=zzi=1;zzj<_zzi&&!_qdct[zzj];zzj++);
+ while(zzj<_zzi){
+ int v;
+ int d0;
+ int d1;
+ int sign;
+ int k;
+ int eob;
+ int dq0;
+ int dq1;
+ int dd0;
+ int dd1;
+ int next_zero;
+ int eob_bits;
+ int dct_fzig_zzj;
+ dct_fzig_zzj=dct_fzig_zag[zzj];
+ v=_dct[zzj];
+ d0=_qdct[zzj];
+ eob=eob_run[zzi];
+ for(zzk=zzj+1;zzk<_zzi&&!_qdct[zzk];zzk++);
+ next_zero=zzk-zzj+62>>6;
+ dq0=d0*_dequant[zzj];
+ dd0=dq0-v;
+ dd0*=dd0;
+ sign=-(d0<0);
+ k=d0+sign^sign;
+ d1=(k-(zzj>_acmin))+sign^sign;
+ dq1=d1*_dequant[zzj];
+ dd1=dq1-v;
+ dd1*=dd1;
+ /*The cost of ending an eob run is included when the alternative is to
+ extend this eob run.
+ A per qi/zzi weight would probably be useful.
+ Including it in the overall tokenization cost was not helpful.
+ The same is true at the far end of the zero run plus token case.*/
+ if(eob>0&&d1==0&&zzk==_zzi){
+ eob_bits=oc_token_bits(_enc,huffi,zzi,OC_DCT_EOB1_TOKEN);
+ }
+ else eob_bits=0;
+ if(zzj==zzi){
+ /*No active zero run.*/
+ int best_token;
+ int best_eb;
+ int token;
+ int best_bits;
+ int bits;
+ int cost;
+ best_token=*(OC_DCT_VALUE_TOKEN_PTR+d0);
+ best_bits=oc_token_bits(_enc,huffi,zzi,best_token);
+ if(d1!=0){
+ token=*(OC_DCT_VALUE_TOKEN_PTR+d1);
+ bits=oc_token_bits(_enc,huffi,zzi,token);
+ cost=dd1+(bits+eob_bits)*_lambda;
+ }
+ else{
+ token=bits=0;
+ cost=dd1+zr[next_zero];
+ }
+ if((dd0+(best_bits+eob_bits)*_lambda)>cost){
+ _idct[dct_fzig_zzj]=dq1;
+ if(d1==0){
+ zzj=zzk;
+ continue;
+ }
+ best_bits=bits;
+ best_token=token;
+ best_eb=*(OC_DCT_VALUE_EB_PTR+d1);
+ }
+ else{
+ best_eb=*(OC_DCT_VALUE_EB_PTR+d0);
+ _idct[dct_fzig_zzj]=dq0;
+ }
+ oc_enc_tokenlog_checkpoint(_enc,stack++,_pli,zzi);
+ if(eob>0){
+ oc_enc_eob_log(_enc,_pli,zzi,eob);
+ eob_run[zzi]=0;
+ }
+ oc_enc_token_log(_enc,_pli,zzi,best_token,best_eb);
+ total_bits+=best_bits;
+ }
+ else{
+ int d;
+ int dc_reserve;
+ int best_token;
+ int best_eb;
+ int best_bits;
+ int best_cost;
+ int best_bits1;
+ int best_token1;
+ int best_eb1;
+ int zr_bits;
+ int eob2;
+ int eob_bits2;
+ int bits;
+ int token;
+ int nzeros;
+ nzeros=zzj-zzi;
+ dc_reserve=zzi+62>>6;
+ /*A zero run, followed by the value alone.*/
+ best_token=best_token1=OC_DCT_SHORT_ZRL_TOKEN+(nzeros+55>>6);
+ best_eb=best_eb1=nzeros-1;
+ eob2=eob_run[zzj];
+ eob_bits2=eob2>0?oc_token_bits(_enc,huffi,zzj,OC_DCT_EOB1_TOKEN):0;
+ zr_bits=oc_token_bits(_enc,huffi,zzi,best_token)+eob_bits2;
+ best_bits=zr_bits
+ +oc_token_bits(_enc,huffi,zzj,*(OC_DCT_VALUE_TOKEN_PTR+d0));
+ d=d0;
+ best_bits1=0;
+ if(d1!=0){
+ best_bits1=zr_bits
+ +oc_token_bits(_enc,huffi,zzj,*(OC_DCT_VALUE_TOKEN_PTR+d1));
+ }
+ if(nzeros<17+dc_reserve){
+ if(k<=2){
+ /*+/- 1 combo token.*/
+ token=OC_DCT_RUN_CAT1_TOKEN[nzeros-1];
+ bits=oc_token_bits(_enc,huffi,zzi,token);
+ if(k==2&&bits<=best_bits1){
+ best_bits1=bits;
+ best_token1=token;
+ best_eb1=OC_DCT_RUN_CAT1_EB[nzeros-1][-sign];
+ }
+ if(k==1&&bits<=best_bits){
+ best_bits=bits;
+ best_token=token;
+ best_eb=OC_DCT_RUN_CAT1_EB[nzeros-1][-sign];
+ }
+ }
+ if(nzeros<3+dc_reserve&&2<=k&&k<=4){
+ /*+/- 2/3 combo token.*/
+ token=OC_DCT_RUN_CAT2A+(nzeros>>1);
+ bits=oc_token_bits(_enc,huffi,zzi,token);
+ if(k==4&&bits<=best_bits1){
+ best_bits1=bits;
+ best_token1=token;
+ best_eb1=OC_DCT_RUN_CAT2_EB[nzeros-1][-sign][1];
+ }
+ if(k!=4&&bits<=best_bits){
+ best_bits=bits;
+ best_token=token;
+ best_eb=OC_DCT_RUN_CAT2_EB[nzeros-1][-sign][k-2];
+ }
+ }
+ }
+ best_cost=dd0+(best_bits+eob_bits)*_lambda;
+ if(d1==0&&(dd1+zr[2+next_zero])<=best_cost){
+ zzj=zzk;
+ continue;
+ }
+ if(d1!=0&&dd1+(best_bits1+eob_bits)*_lambda<best_cost){
+ best_bits=best_bits1;
+ best_token=best_token1;
+ best_eb=best_eb1;
+ d=d1;
+ _idct[dct_fzig_zzj]=dq1;
+ }
+ else _idct[dct_fzig_zzj]=dq0;
+ oc_enc_tokenlog_checkpoint(_enc,stack++,_pli,zzi);
+ if(eob){
+ oc_enc_eob_log(_enc,_pli,zzi,eob);
+ eob_run[zzi]=0;
+ }
+ oc_enc_token_log(_enc,_pli,zzi,best_token,best_eb);
+ /*If a zero run won vs. the combo token we still need to code this
+ value.*/
+ if(best_token<=OC_DCT_ZRL_TOKEN){
+ oc_enc_tokenlog_checkpoint(_enc,stack++,_pli,zzj);
+ if(eob2){
+ oc_enc_eob_log(_enc,_pli,zzj,eob2);
+ /*The cost of any EOB run we disrupted is ignored because doing so
+ improved PSNR/SSIM by a small amount.*/
+ best_bits-=eob_bits2;
+ eob_run[zzj]=0;
+ }
+ oc_enc_token_log(_enc,_pli,zzj,
+ *(OC_DCT_VALUE_TOKEN_PTR+d),*(OC_DCT_VALUE_EB_PTR+d));
+ }
+ total_bits+=best_bits;
+ }
+ zzi=zzj+1;
+ zzj=zzk;
+ }
+ /*Code an EOB run to complete this block.
+ The cost of the EOB run is not included in the total as explained in
+ in a comment in the trellis tokenizer above.*/
+ if(zzi<64){
+ int eob;
+ eob=eob_run[zzi]+1;
+ oc_enc_tokenlog_checkpoint(_enc,stack++,_pli,zzi);
+ if(eob>=4095){
+ oc_enc_token_log(_enc,_pli,zzi,OC_DCT_REPEAT_RUN3_TOKEN,eob);
+ eob=0;
+ }
+ eob_run[zzi]=eob;
+ }
+ *_stack=stack;
+ return total_bits;
+}
+
void oc_enc_pred_dc_frag_rows(oc_enc_ctx *_enc,
int _pli,int _fragy0,int _frag_yend){
const oc_fragment_plane *fplane;
@@ -695,10 +997,10 @@ void oc_enc_pred_dc_frag_rows(oc_enc_ctx *_enc,
predictor for the same reference frame.*/
for(fragx=0;fragx<nhfrags;fragx++,fragi++){
if(frags[fragi].coded){
- int ref;
- ref=OC_FRAME_FOR_MODE(frags[fragi].mb_mode);
- frag_dc[fragi]=(ogg_int16_t)(frags[fragi].dc-pred_last[ref]);
- pred_last[ref]=frags[fragi].dc;
+ int refi;
+ refi=frags[fragi].refi;
+ frag_dc[fragi]=(ogg_int16_t)(frags[fragi].dc-pred_last[refi]);
+ pred_last[refi]=frags[fragi].dc;
}
}
}
@@ -710,27 +1012,24 @@ void oc_enc_pred_dc_frag_rows(oc_enc_ctx *_enc,
u_frags=frags-nhfrags;
l_ref=-1;
ul_ref=-1;
- u_ref=u_frags[fragi].coded?OC_FRAME_FOR_MODE(u_frags[fragi].mb_mode):-1;
+ u_ref=u_frags[fragi].refi;
for(fragx=0;fragx<nhfrags;fragx++,fragi++){
int ur_ref;
if(fragx+1>=nhfrags)ur_ref=-1;
- else{
- ur_ref=u_frags[fragi+1].coded?
- OC_FRAME_FOR_MODE(u_frags[fragi+1].mb_mode):-1;
- }
+ else ur_ref=u_frags[fragi+1].refi;
if(frags[fragi].coded){
int pred;
- int ref;
- ref=OC_FRAME_FOR_MODE(frags[fragi].mb_mode);
+ int refi;
+ refi=frags[fragi].refi;
/*We break out a separate case based on which of our neighbors use
the same reference frames.
This is somewhat faster than trying to make a generic case which
handles all of them, since it reduces lots of poorly predicted
jumps to one switch statement, and also lets a number of the
multiplications be optimized out by strength reduction.*/
- switch((l_ref==ref)|(ul_ref==ref)<<1|
- (u_ref==ref)<<2|(ur_ref==ref)<<3){
- default:pred=pred_last[ref];break;
+ switch((l_ref==refi)|(ul_ref==refi)<<1|
+ (u_ref==refi)<<2|(ur_ref==refi)<<3){
+ default:pred=pred_last[refi];break;
case 1:
case 3:pred=frags[fragi-1].dc;break;
case 2:pred=u_frags[fragi-1].dc;break;
@@ -764,8 +1063,8 @@ void oc_enc_pred_dc_frag_rows(oc_enc_ctx *_enc,
}break;
}
frag_dc[fragi]=(ogg_int16_t)(frags[fragi].dc-pred);
- pred_last[ref]=frags[fragi].dc;
- l_ref=ref;
+ pred_last[refi]=frags[fragi].dc;
+ l_ref=refi;
}
else l_ref=-1;
ul_ref=u_ref;
@@ -850,9 +1149,8 @@ void oc_enc_tokenize_dc_frag_list(oc_enc_ctx *_enc,int _pli,
ti0++;
eob_run0=0;
}
- token=oc_make_dct_token_full(0,0,val,&eb);
- dct_tokens0[ti0]=(unsigned char)token;
- extra_bits0[ti0]=(ogg_uint16_t)eb;
+ dct_tokens0[ti0]=*(OC_DCT_VALUE_TOKEN_PTR+val);
+ extra_bits0[ti0]=*(OC_DCT_VALUE_EB_PTR+val);
ti0++;
}
else{
@@ -863,9 +1161,8 @@ void oc_enc_tokenize_dc_frag_list(oc_enc_ctx *_enc,int _pli,
/*We're in the middle of an active EOB run in stack 1.
Move it to stack 0.*/
if(++eob_run0>=4095){
- token=oc_make_eob_token_full(eob_run0,&eb);
- dct_tokens0[ti0]=(unsigned char)token;
- extra_bits0[ti0]=(ogg_uint16_t)eb;
+ dct_tokens0[ti0]=OC_DCT_REPEAT_RUN3_TOKEN;
+ extra_bits0[ti0]=eob_run0;
ti0++;
eob_run0=0;
}
@@ -996,9 +1293,8 @@ void oc_enc_tokenize_dc_frag_list(oc_enc_ctx *_enc,int _pli,
neobs1--;
/*If we have more than 4095 EOBs outstanding in stack1, flush the run.*/
if(eob_run1-neobs1>=4095){
- token=oc_make_eob_token_full(4095,&eb);
- dct_tokens1[ti1w]=(unsigned char)token;
- extra_bits1[ti1w]=(ogg_uint16_t)eb;
+ dct_tokens1[ti1w]=OC_DCT_REPEAT_RUN3_TOKEN;
+ extra_bits1[ti1w]=4095;
ti1w++;
eob_run1-=4095;
}
diff --git a/thirdparty/libtheora/x86/mmxencfrag.c b/thirdparty/libtheora/x86/mmxencfrag.c
index c79ff01fcc..cc9be8d867 100644
--- a/thirdparty/libtheora/x86/mmxencfrag.c
+++ b/thirdparty/libtheora/x86/mmxencfrag.c
@@ -65,7 +65,7 @@ unsigned oc_enc_frag_sad_mmxext(const unsigned char *_src,
"paddw %%mm6,%%mm0\n\t"
"paddw %%mm2,%%mm0\n\t"
"movd %%mm0,%[ret]\n\t"
- :[ret]"=a"(ret),[src]"+%r"(_src),[ref]"+r"(_ref),[ystride3]"=&r"(ystride3)
+ :[ret]"=a"(ret),[src]"+r"(_src),[ref]"+r"(_ref),[ystride3]"=&r"(ystride3)
:[ystride]"r"((ptrdiff_t)_ystride)
);
return (unsigned)ret;
@@ -87,7 +87,9 @@ unsigned oc_enc_frag_sad_thresh_mmxext(const unsigned char *_src,
The latter is exactly 1 too large when the low bit of two corresponding \
bytes is only set in one of them. \
Therefore we pxor the operands, pand to mask out the low bits, and psubb to \
- correct the output of pavgb.*/ \
+ correct the output of pavgb. \
+ TODO: This should be rewritten to compute ~pavgb(~a,~b) instead, which \
+ schedules better; currently, however, this function is unused.*/ \
"movq %%mm0,%%mm6\n\t" \
"lea (%[ref1],%[ystride],2),%[ref1]\n\t" \
"pxor %%mm1,%%mm0\n\t" \
@@ -153,7 +155,7 @@ unsigned oc_enc_frag_sad2_thresh_mmxext(const unsigned char *_src,
OC_SAD2_LOOP
OC_SAD2_LOOP
OC_SAD2_TAIL
- :[ret]"=&a"(ret),[src]"+r"(_src),[ref1]"+%r"(_ref1),[ref2]"+r"(_ref2)
+ :[ret]"=&a"(ret),[src]"+r"(_src),[ref1]"+r"(_ref1),[ref2]"+r"(_ref2)
:[ystride]"r"((ptrdiff_t)_ystride)
);
return (unsigned)ret;
@@ -163,54 +165,54 @@ unsigned oc_enc_frag_sad2_thresh_mmxext(const unsigned char *_src,
16-bit difference in %%mm0...%%mm7.*/
#define OC_LOAD_SUB_8x4(_off) \
"#OC_LOAD_SUB_8x4\n\t" \
- "movd "_off"(%[src]),%%mm0\n\t" \
- "movd "_off"(%[ref]),%%mm4\n\t" \
- "movd "_off"(%[src],%[src_ystride]),%%mm1\n\t" \
+ "movd "#_off"(%[src]),%%mm0\n\t" \
+ "movd "#_off"(%[ref]),%%mm4\n\t" \
+ "movd "#_off"(%[src],%[src_ystride]),%%mm1\n\t" \
"lea (%[src],%[src_ystride],2),%[src]\n\t" \
- "movd "_off"(%[ref],%[ref_ystride]),%%mm5\n\t" \
+ "movd "#_off"(%[ref],%[ref_ystride]),%%mm5\n\t" \
"lea (%[ref],%[ref_ystride],2),%[ref]\n\t" \
- "movd "_off"(%[src]),%%mm2\n\t" \
- "movd "_off"(%[ref]),%%mm7\n\t" \
- "movd "_off"(%[src],%[src_ystride]),%%mm3\n\t" \
- "movd "_off"(%[ref],%[ref_ystride]),%%mm6\n\t" \
+ "movd "#_off"(%[src]),%%mm2\n\t" \
+ "movd "#_off"(%[ref]),%%mm7\n\t" \
+ "movd "#_off"(%[src],%[src_ystride]),%%mm3\n\t" \
+ "movd "#_off"(%[ref],%[ref_ystride]),%%mm6\n\t" \
"punpcklbw %%mm4,%%mm0\n\t" \
"lea (%[src],%[src_ystride],2),%[src]\n\t" \
"punpcklbw %%mm4,%%mm4\n\t" \
"lea (%[ref],%[ref_ystride],2),%[ref]\n\t" \
"psubw %%mm4,%%mm0\n\t" \
- "movd "_off"(%[src]),%%mm4\n\t" \
- "movq %%mm0,"_off"*2(%[buf])\n\t" \
- "movd "_off"(%[ref]),%%mm0\n\t" \
+ "movd "#_off"(%[src]),%%mm4\n\t" \
+ "movq %%mm0,"OC_MEM_OFFS(_off*2,buf)"\n\t" \
+ "movd "#_off"(%[ref]),%%mm0\n\t" \
"punpcklbw %%mm5,%%mm1\n\t" \
"punpcklbw %%mm5,%%mm5\n\t" \
"psubw %%mm5,%%mm1\n\t" \
- "movd "_off"(%[src],%[src_ystride]),%%mm5\n\t" \
+ "movd "#_off"(%[src],%[src_ystride]),%%mm5\n\t" \
"punpcklbw %%mm7,%%mm2\n\t" \
"punpcklbw %%mm7,%%mm7\n\t" \
"psubw %%mm7,%%mm2\n\t" \
- "movd "_off"(%[ref],%[ref_ystride]),%%mm7\n\t" \
+ "movd "#_off"(%[ref],%[ref_ystride]),%%mm7\n\t" \
"punpcklbw %%mm6,%%mm3\n\t" \
"lea (%[src],%[src_ystride],2),%[src]\n\t" \
"punpcklbw %%mm6,%%mm6\n\t" \
"psubw %%mm6,%%mm3\n\t" \
- "movd "_off"(%[src]),%%mm6\n\t" \
+ "movd "#_off"(%[src]),%%mm6\n\t" \
"punpcklbw %%mm0,%%mm4\n\t" \
"lea (%[ref],%[ref_ystride],2),%[ref]\n\t" \
"punpcklbw %%mm0,%%mm0\n\t" \
"lea (%[src],%[src_ystride],2),%[src]\n\t" \
"psubw %%mm0,%%mm4\n\t" \
- "movd "_off"(%[ref]),%%mm0\n\t" \
+ "movd "#_off"(%[ref]),%%mm0\n\t" \
"punpcklbw %%mm7,%%mm5\n\t" \
"neg %[src_ystride]\n\t" \
"punpcklbw %%mm7,%%mm7\n\t" \
"psubw %%mm7,%%mm5\n\t" \
- "movd "_off"(%[src],%[src_ystride]),%%mm7\n\t" \
+ "movd "#_off"(%[src],%[src_ystride]),%%mm7\n\t" \
"punpcklbw %%mm0,%%mm6\n\t" \
"lea (%[ref],%[ref_ystride],2),%[ref]\n\t" \
"punpcklbw %%mm0,%%mm0\n\t" \
"neg %[ref_ystride]\n\t" \
"psubw %%mm0,%%mm6\n\t" \
- "movd "_off"(%[ref],%[ref_ystride]),%%mm0\n\t" \
+ "movd "#_off"(%[ref],%[ref_ystride]),%%mm0\n\t" \
"lea (%[src],%[src_ystride],8),%[src]\n\t" \
"punpcklbw %%mm0,%%mm7\n\t" \
"neg %[src_ystride]\n\t" \
@@ -218,24 +220,24 @@ unsigned oc_enc_frag_sad2_thresh_mmxext(const unsigned char *_src,
"lea (%[ref],%[ref_ystride],8),%[ref]\n\t" \
"psubw %%mm0,%%mm7\n\t" \
"neg %[ref_ystride]\n\t" \
- "movq "_off"*2(%[buf]),%%mm0\n\t" \
+ "movq "OC_MEM_OFFS(_off*2,buf)",%%mm0\n\t" \
/*Load an 8x4 array of pixel values from %[src] into %%mm0...%%mm7.*/
#define OC_LOAD_8x4(_off) \
"#OC_LOAD_8x4\n\t" \
- "movd "_off"(%[src]),%%mm0\n\t" \
- "movd "_off"(%[src],%[ystride]),%%mm1\n\t" \
- "movd "_off"(%[src],%[ystride],2),%%mm2\n\t" \
+ "movd "#_off"(%[src]),%%mm0\n\t" \
+ "movd "#_off"(%[src],%[ystride]),%%mm1\n\t" \
+ "movd "#_off"(%[src],%[ystride],2),%%mm2\n\t" \
"pxor %%mm7,%%mm7\n\t" \
- "movd "_off"(%[src],%[ystride3]),%%mm3\n\t" \
+ "movd "#_off"(%[src],%[ystride3]),%%mm3\n\t" \
"punpcklbw %%mm7,%%mm0\n\t" \
- "movd "_off"(%[src4]),%%mm4\n\t" \
+ "movd "#_off"(%[src4]),%%mm4\n\t" \
"punpcklbw %%mm7,%%mm1\n\t" \
- "movd "_off"(%[src4],%[ystride]),%%mm5\n\t" \
+ "movd "#_off"(%[src4],%[ystride]),%%mm5\n\t" \
"punpcklbw %%mm7,%%mm2\n\t" \
- "movd "_off"(%[src4],%[ystride],2),%%mm6\n\t" \
+ "movd "#_off"(%[src4],%[ystride],2),%%mm6\n\t" \
"punpcklbw %%mm7,%%mm3\n\t" \
- "movd "_off"(%[src4],%[ystride3]),%%mm7\n\t" \
+ "movd "#_off"(%[src4],%[ystride3]),%%mm7\n\t" \
"punpcklbw %%mm4,%%mm4\n\t" \
"punpcklbw %%mm5,%%mm5\n\t" \
"psrlw $8,%%mm4\n\t" \
@@ -248,7 +250,7 @@ unsigned oc_enc_frag_sad2_thresh_mmxext(const unsigned char *_src,
/*Performs the first two stages of an 8-point 1-D Hadamard transform.
The transform is performed in place, except that outputs 0-3 are swapped with
outputs 4-7.
- Outputs 2, 3, 6 and 7 from the second stage are negated (which allows us to
+ Outputs 2, 3, 6, and 7 from the second stage are negated (which allows us to
perform this stage in place with no temporary registers).*/
#define OC_HADAMARD_AB_8x4 \
"#OC_HADAMARD_AB_8x4\n\t" \
@@ -281,7 +283,7 @@ unsigned oc_enc_frag_sad2_thresh_mmxext(const unsigned char *_src,
"psubw %%mm5,%%mm7\n\t" \
/*Performs the last stage of an 8-point 1-D Hadamard transform in place.
- Ouputs 1, 3, 5, and 7 are negated (which allows us to perform this stage in
+ Outputs 1, 3, 5, and 7 are negated (which allows us to perform this stage in
place with no temporary registers).*/
#define OC_HADAMARD_C_8x4 \
"#OC_HADAMARD_C_8x4\n\t" \
@@ -324,8 +326,8 @@ unsigned oc_enc_frag_sad2_thresh_mmxext(const unsigned char *_src,
Even with pabsw, it would be (3+1)*8+7=39 instructions (with no spills). \
This implementation is only 26 (+4 for spilling registers).*/ \
"#OC_HADAMARD_C_ABS_ACCUM_A_8x4\n\t" \
- "movq %%mm7,"_r7"(%[buf])\n\t" \
- "movq %%mm6,"_r6"(%[buf])\n\t" \
+ "movq %%mm7,"OC_MEM_OFFS(_r7,buf)"\n\t" \
+ "movq %%mm6,"OC_MEM_OFFS(_r6,buf)"\n\t" \
/*mm7={0x7FFF}x4 \
mm0=max(abs(mm0),abs(mm1))-0x7FFF*/ \
"pcmpeqb %%mm7,%%mm7\n\t" \
@@ -343,14 +345,14 @@ unsigned oc_enc_frag_sad2_thresh_mmxext(const unsigned char *_src,
"pmaxsw %%mm5,%%mm4\n\t" \
"paddw %%mm3,%%mm6\n\t" \
"paddw %%mm5,%%mm1\n\t" \
- "movq "_r7"(%[buf]),%%mm3\n\t" \
+ "movq "OC_MEM_OFFS(_r7,buf)",%%mm3\n\t" \
/*Performs the second part of the final stage of the Hadamard transform and
summing of absolute values.*/
#define OC_HADAMARD_C_ABS_ACCUM_B_8x4(_r6,_r7) \
"#OC_HADAMARD_C_ABS_ACCUM_B_8x4\n\t" \
"paddsw %%mm7,%%mm6\n\t" \
- "movq "_r6"(%[buf]),%%mm5\n\t" \
+ "movq "OC_MEM_OFFS(_r6,buf)",%%mm5\n\t" \
"paddsw %%mm7,%%mm1\n\t" \
"psubw %%mm6,%%mm2\n\t" \
"psubw %%mm1,%%mm4\n\t" \
@@ -391,7 +393,7 @@ unsigned oc_enc_frag_sad2_thresh_mmxext(const unsigned char *_src,
#define OC_TRANSPOSE_4x4x2(_off) \
"#OC_TRANSPOSE_4x4x2\n\t" \
/*First 4x4 transpose:*/ \
- "movq %%mm5,0x10+"_off"(%[buf])\n\t" \
+ "movq %%mm5,"OC_MEM_OFFS(0x10+(_off),buf)"\n\t" \
/*mm0 = e3 e2 e1 e0 \
mm1 = f3 f2 f1 f0 \
mm2 = g3 g2 g1 g0 \
@@ -411,13 +413,13 @@ unsigned oc_enc_frag_sad2_thresh_mmxext(const unsigned char *_src,
"punpckhdq %%mm2,%%mm1\n\t" \
"movq %%mm3,%%mm2\n\t" \
"punpckhdq %%mm5,%%mm3\n\t" \
- "movq %%mm0,0x40+"_off"(%[buf])\n\t" \
+ "movq %%mm0,"OC_MEM_OFFS(0x40+(_off),buf)"\n\t" \
"punpckldq %%mm5,%%mm2\n\t" \
/*mm0 = h0 g0 f0 e0 \
mm1 = h1 g1 f1 e1 \
mm2 = h2 g2 f2 e2 \
mm3 = h3 g3 f3 e3*/ \
- "movq 0x10+"_off"(%[buf]),%%mm5\n\t" \
+ "movq "OC_MEM_OFFS(0x10+(_off),buf)",%%mm5\n\t" \
/*Second 4x4 transpose:*/ \
/*mm4 = a3 a2 a1 a0 \
mm5 = b3 b2 b1 b0 \
@@ -425,11 +427,11 @@ unsigned oc_enc_frag_sad2_thresh_mmxext(const unsigned char *_src,
mm7 = d3 d2 d1 d0*/ \
"movq %%mm6,%%mm0\n\t" \
"punpcklwd %%mm7,%%mm6\n\t" \
- "movq %%mm1,0x50+"_off"(%[buf])\n\t" \
+ "movq %%mm1,"OC_MEM_OFFS(0x50+(_off),buf)"\n\t" \
"punpckhwd %%mm7,%%mm0\n\t" \
"movq %%mm4,%%mm7\n\t" \
"punpcklwd %%mm5,%%mm4\n\t" \
- "movq %%mm2,0x60+"_off"(%[buf])\n\t" \
+ "movq %%mm2,"OC_MEM_OFFS(0x60+(_off),buf)"\n\t" \
"punpckhwd %%mm5,%%mm7\n\t" \
/*mm4 = b1 a1 b0 a0 \
mm7 = b3 a3 b2 a2 \
@@ -437,7 +439,7 @@ unsigned oc_enc_frag_sad2_thresh_mmxext(const unsigned char *_src,
mm0 = d3 c3 d2 c2*/ \
"movq %%mm4,%%mm5\n\t" \
"punpckldq %%mm6,%%mm4\n\t" \
- "movq %%mm3,0x70+"_off"(%[buf])\n\t" \
+ "movq %%mm3,"OC_MEM_OFFS(0x70+(_off),buf)"\n\t" \
"punpckhdq %%mm6,%%mm5\n\t" \
"movq %%mm7,%%mm6\n\t" \
"punpckhdq %%mm0,%%mm7\n\t" \
@@ -447,100 +449,102 @@ unsigned oc_enc_frag_sad2_thresh_mmxext(const unsigned char *_src,
mm6 = d2 c2 b2 a2 \
mm7 = d3 c3 b3 a3*/ \
-static unsigned oc_int_frag_satd_thresh_mmxext(const unsigned char *_src,
- int _src_ystride,const unsigned char *_ref,int _ref_ystride,unsigned _thresh){
- OC_ALIGN8(ogg_int16_t buf[64]);
- ogg_int16_t *bufp;
- unsigned ret;
- unsigned ret2;
- bufp=buf;
+static unsigned oc_int_frag_satd_mmxext(int *_dc,
+ const unsigned char *_src,int _src_ystride,
+ const unsigned char *_ref,int _ref_ystride){
+ OC_ALIGN8(ogg_int16_t buf[64]);
+ unsigned ret;
+ unsigned ret2;
+ int dc;
__asm__ __volatile__(
- OC_LOAD_SUB_8x4("0x00")
+ OC_LOAD_SUB_8x4(0x00)
OC_HADAMARD_8x4
- OC_TRANSPOSE_4x4x2("0x00")
+ OC_TRANSPOSE_4x4x2(0x00)
/*Finish swapping out this 8x4 block to make room for the next one.
mm0...mm3 have been swapped out already.*/
- "movq %%mm4,0x00(%[buf])\n\t"
- "movq %%mm5,0x10(%[buf])\n\t"
- "movq %%mm6,0x20(%[buf])\n\t"
- "movq %%mm7,0x30(%[buf])\n\t"
- OC_LOAD_SUB_8x4("0x04")
+ "movq %%mm4,"OC_MEM_OFFS(0x00,buf)"\n\t"
+ "movq %%mm5,"OC_MEM_OFFS(0x10,buf)"\n\t"
+ "movq %%mm6,"OC_MEM_OFFS(0x20,buf)"\n\t"
+ "movq %%mm7,"OC_MEM_OFFS(0x30,buf)"\n\t"
+ OC_LOAD_SUB_8x4(0x04)
OC_HADAMARD_8x4
- OC_TRANSPOSE_4x4x2("0x08")
+ OC_TRANSPOSE_4x4x2(0x08)
/*Here the first 4x4 block of output from the last transpose is the second
4x4 block of input for the next transform.
We have cleverly arranged that it already be in the appropriate place, so
we only have to do half the loads.*/
- "movq 0x10(%[buf]),%%mm1\n\t"
- "movq 0x20(%[buf]),%%mm2\n\t"
- "movq 0x30(%[buf]),%%mm3\n\t"
- "movq 0x00(%[buf]),%%mm0\n\t"
- OC_HADAMARD_ABS_ACCUM_8x4("0x28","0x38")
+ "movq "OC_MEM_OFFS(0x10,buf)",%%mm1\n\t"
+ "movq "OC_MEM_OFFS(0x20,buf)",%%mm2\n\t"
+ "movq "OC_MEM_OFFS(0x30,buf)",%%mm3\n\t"
+ "movq "OC_MEM_OFFS(0x00,buf)",%%mm0\n\t"
+ /*We split out the stages here so we can save the DC coefficient in the
+ middle.*/
+ OC_HADAMARD_AB_8x4
+ OC_HADAMARD_C_ABS_ACCUM_A_8x4(0x28,0x38)
+ "movd %%mm1,%[dc]\n\t"
+ OC_HADAMARD_C_ABS_ACCUM_B_8x4(0x28,0x38)
/*Up to this point, everything fit in 16 bits (8 input + 1 for the
difference + 2*3 for the two 8-point 1-D Hadamards - 1 for the abs - 1
for the factor of two we dropped + 3 for the vertical accumulation).
Now we finally have to promote things to dwords.
We break this part out of OC_HADAMARD_ABS_ACCUM_8x4 to hide the long
latency of pmaddwd by starting the next series of loads now.*/
- "mov %[thresh],%[ret2]\n\t"
"pmaddwd %%mm7,%%mm0\n\t"
- "movq 0x50(%[buf]),%%mm1\n\t"
- "movq 0x58(%[buf]),%%mm5\n\t"
+ "movq "OC_MEM_OFFS(0x50,buf)",%%mm1\n\t"
+ "movq "OC_MEM_OFFS(0x58,buf)",%%mm5\n\t"
"movq %%mm0,%%mm4\n\t"
- "movq 0x60(%[buf]),%%mm2\n\t"
+ "movq "OC_MEM_OFFS(0x60,buf)",%%mm2\n\t"
"punpckhdq %%mm0,%%mm0\n\t"
- "movq 0x68(%[buf]),%%mm6\n\t"
+ "movq "OC_MEM_OFFS(0x68,buf)",%%mm6\n\t"
"paddd %%mm0,%%mm4\n\t"
- "movq 0x70(%[buf]),%%mm3\n\t"
- "movd %%mm4,%[ret]\n\t"
- "movq 0x78(%[buf]),%%mm7\n\t"
- /*The sums produced by OC_HADAMARD_ABS_ACCUM_8x4 each have an extra 4
- added to them, and a factor of two removed; correct the final sum here.*/
- "lea -32(%[ret],%[ret]),%[ret]\n\t"
- "movq 0x40(%[buf]),%%mm0\n\t"
- "cmp %[ret2],%[ret]\n\t"
- "movq 0x48(%[buf]),%%mm4\n\t"
- "jae 1f\n\t"
- OC_HADAMARD_ABS_ACCUM_8x4("0x68","0x78")
+ "movq "OC_MEM_OFFS(0x70,buf)",%%mm3\n\t"
+ "movd %%mm4,%[ret2]\n\t"
+ "movq "OC_MEM_OFFS(0x78,buf)",%%mm7\n\t"
+ "movq "OC_MEM_OFFS(0x40,buf)",%%mm0\n\t"
+ "movq "OC_MEM_OFFS(0x48,buf)",%%mm4\n\t"
+ OC_HADAMARD_ABS_ACCUM_8x4(0x68,0x78)
"pmaddwd %%mm7,%%mm0\n\t"
- /*There isn't much to stick in here to hide the latency this time, but the
- alternative to pmaddwd is movq->punpcklwd->punpckhwd->paddd, whose
- latency is even worse.*/
- "sub $32,%[ret]\n\t"
+ /*Subtract abs(dc) from 2*ret2.*/
+ "movsx %w[dc],%[dc]\n\t"
+ "cdq\n\t"
+ "lea (%[ret],%[ret2],2),%[ret2]\n\t"
"movq %%mm0,%%mm4\n\t"
"punpckhdq %%mm0,%%mm0\n\t"
+ "xor %[dc],%[ret]\n\t"
"paddd %%mm0,%%mm4\n\t"
- "movd %%mm4,%[ret2]\n\t"
- "lea (%[ret],%[ret2],2),%[ret]\n\t"
- ".p2align 4,,15\n\t"
- "1:\n\t"
- /*Although it looks like we're using 7 registers here, gcc can alias %[ret]
+ /*The sums produced by OC_HADAMARD_ABS_ACCUM_8x4 each have an extra 4
+ added to them, a factor of two removed, and the DC value included;
+ correct the final sum here.*/
+ "sub %[ret],%[ret2]\n\t"
+ "movd %%mm4,%[ret]\n\t"
+ "lea -64(%[ret2],%[ret],2),%[ret]\n\t"
+ /*Although it looks like we're using 8 registers here, gcc can alias %[ret]
and %[ret2] with some of the inputs, since for once we don't write to
- them until after we're done using everything but %[buf] (which is also
- listed as an output to ensure gcc _doesn't_ alias them against it).*/
+ them until after we're done using everything but %[buf].*/
/*Note that _src_ystride and _ref_ystride must be given non-overlapping
constraints, otherewise if gcc can prove they're equal it will allocate
them to the same register (which is bad); _src and _ref face a similar
problem, though those are never actually the same.*/
- :[ret]"=a"(ret),[ret2]"=r"(ret2),[buf]"+r"(bufp)
+ :[ret]"=d"(ret),[ret2]"=r"(ret2),[dc]"=a"(dc),
+ [buf]"=m"(OC_ARRAY_OPERAND(ogg_int16_t,buf,64))
:[src]"r"(_src),[src_ystride]"c"((ptrdiff_t)_src_ystride),
- [ref]"r"(_ref),[ref_ystride]"d"((ptrdiff_t)_ref_ystride),
- [thresh]"m"(_thresh)
+ [ref]"r"(_ref),[ref_ystride]"d"((ptrdiff_t)_ref_ystride)
/*We have to use neg, so we actually clobber the condition codes for once
(not to mention cmp, sub, and add).*/
:"cc"
);
+ *_dc=dc;
return ret;
}
-unsigned oc_enc_frag_satd_thresh_mmxext(const unsigned char *_src,
- const unsigned char *_ref,int _ystride,unsigned _thresh){
- return oc_int_frag_satd_thresh_mmxext(_src,_ystride,_ref,_ystride,_thresh);
+unsigned oc_enc_frag_satd_mmxext(int *_dc,const unsigned char *_src,
+ const unsigned char *_ref,int _ystride){
+ return oc_int_frag_satd_mmxext(_dc,_src,_ystride,_ref,_ystride);
}
/*Our internal implementation of frag_copy2 takes an extra stride parameter so
- we can share code with oc_enc_frag_satd2_thresh_mmxext().*/
-static void oc_int_frag_copy2_mmxext(unsigned char *_dst,int _dst_ystride,
+ we can share code with oc_enc_frag_satd2_mmxext().*/
+void oc_int_frag_copy2_mmxext(unsigned char *_dst,int _dst_ystride,
const unsigned char *_src1,const unsigned char *_src2,int _src_ystride){
__asm__ __volatile__(
/*Load the first 3 rows.*/
@@ -649,55 +653,53 @@ static void oc_int_frag_copy2_mmxext(unsigned char *_dst,int _dst_ystride,
"psubb %%mm4,%%mm2\n\t"
/*%%mm2 (row 7) is done, write it out.*/
"movq %%mm2,(%[dst],%[dst_ystride])\n\t"
- :[dst]"+r"(_dst),[src1]"+%r"(_src1),[src2]"+r"(_src2)
+ :[dst]"+r"(_dst),[src1]"+r"(_src1),[src2]"+r"(_src2)
:[dst_ystride]"r"((ptrdiff_t)_dst_ystride),
[src_ystride]"r"((ptrdiff_t)_src_ystride)
:"memory"
);
}
-unsigned oc_enc_frag_satd2_thresh_mmxext(const unsigned char *_src,
- const unsigned char *_ref1,const unsigned char *_ref2,int _ystride,
- unsigned _thresh){
+unsigned oc_enc_frag_satd2_mmxext(int *_dc,const unsigned char *_src,
+ const unsigned char *_ref1,const unsigned char *_ref2,int _ystride){
OC_ALIGN8(unsigned char ref[64]);
oc_int_frag_copy2_mmxext(ref,8,_ref1,_ref2,_ystride);
- return oc_int_frag_satd_thresh_mmxext(_src,_ystride,ref,8,_thresh);
+ return oc_int_frag_satd_mmxext(_dc,_src,_ystride,ref,8);
}
-unsigned oc_enc_frag_intra_satd_mmxext(const unsigned char *_src,
- int _ystride){
- OC_ALIGN8(ogg_int16_t buf[64]);
- ogg_int16_t *bufp;
- unsigned ret;
- unsigned ret2;
- bufp=buf;
+unsigned oc_enc_frag_intra_satd_mmxext(int *_dc,
+ const unsigned char *_src,int _ystride){
+ OC_ALIGN8(ogg_int16_t buf[64]);
+ unsigned ret;
+ unsigned ret2;
+ int dc;
__asm__ __volatile__(
- OC_LOAD_8x4("0x00")
+ OC_LOAD_8x4(0x00)
OC_HADAMARD_8x4
- OC_TRANSPOSE_4x4x2("0x00")
+ OC_TRANSPOSE_4x4x2(0x00)
/*Finish swapping out this 8x4 block to make room for the next one.
mm0...mm3 have been swapped out already.*/
- "movq %%mm4,0x00(%[buf])\n\t"
- "movq %%mm5,0x10(%[buf])\n\t"
- "movq %%mm6,0x20(%[buf])\n\t"
- "movq %%mm7,0x30(%[buf])\n\t"
- OC_LOAD_8x4("0x04")
+ "movq %%mm4,"OC_MEM_OFFS(0x00,buf)"\n\t"
+ "movq %%mm5,"OC_MEM_OFFS(0x10,buf)"\n\t"
+ "movq %%mm6,"OC_MEM_OFFS(0x20,buf)"\n\t"
+ "movq %%mm7,"OC_MEM_OFFS(0x30,buf)"\n\t"
+ OC_LOAD_8x4(0x04)
OC_HADAMARD_8x4
- OC_TRANSPOSE_4x4x2("0x08")
+ OC_TRANSPOSE_4x4x2(0x08)
/*Here the first 4x4 block of output from the last transpose is the second
4x4 block of input for the next transform.
We have cleverly arranged that it already be in the appropriate place, so
we only have to do half the loads.*/
- "movq 0x10(%[buf]),%%mm1\n\t"
- "movq 0x20(%[buf]),%%mm2\n\t"
- "movq 0x30(%[buf]),%%mm3\n\t"
- "movq 0x00(%[buf]),%%mm0\n\t"
+ "movq "OC_MEM_OFFS(0x10,buf)",%%mm1\n\t"
+ "movq "OC_MEM_OFFS(0x20,buf)",%%mm2\n\t"
+ "movq "OC_MEM_OFFS(0x30,buf)",%%mm3\n\t"
+ "movq "OC_MEM_OFFS(0x00,buf)",%%mm0\n\t"
/*We split out the stages here so we can save the DC coefficient in the
middle.*/
OC_HADAMARD_AB_8x4
- OC_HADAMARD_C_ABS_ACCUM_A_8x4("0x28","0x38")
- "movd %%mm1,%[ret]\n\t"
- OC_HADAMARD_C_ABS_ACCUM_B_8x4("0x28","0x38")
+ OC_HADAMARD_C_ABS_ACCUM_A_8x4(0x28,0x38)
+ "movd %%mm1,%[dc]\n\t"
+ OC_HADAMARD_C_ABS_ACCUM_B_8x4(0x28,0x38)
/*Up to this point, everything fit in 16 bits (8 input + 1 for the
difference + 2*3 for the two 8-point 1-D Hadamards - 1 for the abs - 1
for the factor of two we dropped + 3 for the vertical accumulation).
@@ -705,41 +707,43 @@ unsigned oc_enc_frag_intra_satd_mmxext(const unsigned char *_src,
We break this part out of OC_HADAMARD_ABS_ACCUM_8x4 to hide the long
latency of pmaddwd by starting the next series of loads now.*/
"pmaddwd %%mm7,%%mm0\n\t"
- "movq 0x50(%[buf]),%%mm1\n\t"
- "movq 0x58(%[buf]),%%mm5\n\t"
- "movq 0x60(%[buf]),%%mm2\n\t"
+ "movq "OC_MEM_OFFS(0x50,buf)",%%mm1\n\t"
+ "movq "OC_MEM_OFFS(0x58,buf)",%%mm5\n\t"
+ "movq "OC_MEM_OFFS(0x60,buf)",%%mm2\n\t"
"movq %%mm0,%%mm4\n\t"
- "movq 0x68(%[buf]),%%mm6\n\t"
+ "movq "OC_MEM_OFFS(0x68,buf)",%%mm6\n\t"
"punpckhdq %%mm0,%%mm0\n\t"
- "movq 0x70(%[buf]),%%mm3\n\t"
+ "movq "OC_MEM_OFFS(0x70,buf)",%%mm3\n\t"
"paddd %%mm0,%%mm4\n\t"
- "movq 0x78(%[buf]),%%mm7\n\t"
- "movd %%mm4,%[ret2]\n\t"
- "movq 0x40(%[buf]),%%mm0\n\t"
- "movq 0x48(%[buf]),%%mm4\n\t"
- OC_HADAMARD_ABS_ACCUM_8x4("0x68","0x78")
+ "movq "OC_MEM_OFFS(0x78,buf)",%%mm7\n\t"
+ "movd %%mm4,%[ret]\n\t"
+ "movq "OC_MEM_OFFS(0x40,buf)",%%mm0\n\t"
+ "movq "OC_MEM_OFFS(0x48,buf)",%%mm4\n\t"
+ OC_HADAMARD_ABS_ACCUM_8x4(0x68,0x78)
"pmaddwd %%mm7,%%mm0\n\t"
/*We assume that the DC coefficient is always positive (which is true,
because the input to the INTRA transform was not a difference).*/
- "movzx %w[ret],%[ret]\n\t"
- "add %[ret2],%[ret2]\n\t"
- "sub %[ret],%[ret2]\n\t"
+ "movzx %w[dc],%[dc]\n\t"
+ "add %[ret],%[ret]\n\t"
+ "sub %[dc],%[ret]\n\t"
"movq %%mm0,%%mm4\n\t"
"punpckhdq %%mm0,%%mm0\n\t"
"paddd %%mm0,%%mm4\n\t"
- "movd %%mm4,%[ret]\n\t"
- "lea -64(%[ret2],%[ret],2),%[ret]\n\t"
- /*Although it looks like we're using 7 registers here, gcc can alias %[ret]
+ "movd %%mm4,%[ret2]\n\t"
+ "lea -64(%[ret],%[ret2],2),%[ret]\n\t"
+ /*Although it looks like we're using 8 registers here, gcc can alias %[ret]
and %[ret2] with some of the inputs, since for once we don't write to
them until after we're done using everything but %[buf] (which is also
listed as an output to ensure gcc _doesn't_ alias them against it).*/
- :[ret]"=a"(ret),[ret2]"=r"(ret2),[buf]"+r"(bufp)
+ :[ret]"=a"(ret),[ret2]"=r"(ret2),[dc]"=r"(dc),
+ [buf]"=m"(OC_ARRAY_OPERAND(ogg_int16_t,buf,64))
:[src]"r"(_src),[src4]"r"(_src+4*_ystride),
[ystride]"r"((ptrdiff_t)_ystride),[ystride3]"r"((ptrdiff_t)3*_ystride)
/*We have to use sub, so we actually clobber the condition codes for once
(not to mention add).*/
:"cc"
);
+ *_dc=dc;
return ret;
}
diff --git a/thirdparty/libtheora/x86/mmxfdct.c b/thirdparty/libtheora/x86/mmxfdct.c
index 211875255e..17668358b8 100644
--- a/thirdparty/libtheora/x86/mmxfdct.c
+++ b/thirdparty/libtheora/x86/mmxfdct.c
@@ -12,6 +12,7 @@
/*MMX fDCT implementation for x86_32*/
/*$Id: fdct_ses2.c 14579 2008-03-12 06:42:40Z xiphmont $*/
#include "x86enc.h"
+#include "x86zigzag.h"
#if defined(OC_X86_ASM)
@@ -462,8 +463,9 @@
mm7 = d3 c3 b3 a3*/ \
/*MMX implementation of the fDCT.*/
-void oc_enc_fdct8x8_mmx(ogg_int16_t _y[64],const ogg_int16_t _x[64]){
- ptrdiff_t a;
+void oc_enc_fdct8x8_mmxext(ogg_int16_t _y[64],const ogg_int16_t _x[64]){
+ OC_ALIGN8(ogg_int16_t buf[64]);
+ ptrdiff_t a;
__asm__ __volatile__(
/*Add two extra bits of working precision to improve accuracy; any more and
we could overflow.*/
@@ -586,77 +588,88 @@ void oc_enc_fdct8x8_mmx(ogg_int16_t _y[64],const ogg_int16_t _x[64]){
"movq 0x30(%[y]),%%mm3\n\t"
OC_FDCT_STAGE1_8x4
OC_FDCT8x4("0x00","0x10","0x20","0x30","0x08","0x18","0x28","0x38")
- OC_TRANSPOSE8x4("0x00","0x10","0x20","0x30","0x08","0x18","0x28","0x38")
- /*mm0={-2}x4*/
- "pcmpeqw %%mm0,%%mm0\n\t"
- "paddw %%mm0,%%mm0\n\t"
- /*Round the results.*/
- "psubw %%mm0,%%mm1\n\t"
- "psubw %%mm0,%%mm2\n\t"
- "psraw $2,%%mm1\n\t"
- "psubw %%mm0,%%mm3\n\t"
- "movq %%mm1,0x18(%[y])\n\t"
- "psraw $2,%%mm2\n\t"
- "psubw %%mm0,%%mm4\n\t"
- "movq 0x08(%[y]),%%mm1\n\t"
- "psraw $2,%%mm3\n\t"
- "psubw %%mm0,%%mm5\n\t"
+ /*mm2={-2}x4*/
+ "pcmpeqw %%mm2,%%mm2\n\t"
+ "paddw %%mm2,%%mm2\n\t"
+ /*Round and store the results (no transpose).*/
+ "movq 0x10(%[y]),%%mm7\n\t"
+ "psubw %%mm2,%%mm4\n\t"
+ "psubw %%mm2,%%mm6\n\t"
"psraw $2,%%mm4\n\t"
- "psubw %%mm0,%%mm6\n\t"
- "psraw $2,%%mm5\n\t"
- "psubw %%mm0,%%mm7\n\t"
+ "psubw %%mm2,%%mm0\n\t"
+ "movq %%mm4,"OC_MEM_OFFS(0x00,buf)"\n\t"
+ "movq 0x30(%[y]),%%mm4\n\t"
"psraw $2,%%mm6\n\t"
- "psubw %%mm0,%%mm1\n\t"
+ "psubw %%mm2,%%mm5\n\t"
+ "movq %%mm6,"OC_MEM_OFFS(0x20,buf)"\n\t"
+ "psraw $2,%%mm0\n\t"
+ "psubw %%mm2,%%mm3\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x40,buf)"\n\t"
+ "psraw $2,%%mm5\n\t"
+ "psubw %%mm2,%%mm1\n\t"
+ "movq %%mm5,"OC_MEM_OFFS(0x50,buf)"\n\t"
+ "psraw $2,%%mm3\n\t"
+ "psubw %%mm2,%%mm7\n\t"
+ "movq %%mm3,"OC_MEM_OFFS(0x60,buf)"\n\t"
+ "psraw $2,%%mm1\n\t"
+ "psubw %%mm2,%%mm4\n\t"
+ "movq %%mm1,"OC_MEM_OFFS(0x70,buf)"\n\t"
"psraw $2,%%mm7\n\t"
+ "movq %%mm7,"OC_MEM_OFFS(0x10,buf)"\n\t"
+ "psraw $2,%%mm4\n\t"
+ "movq %%mm4,"OC_MEM_OFFS(0x30,buf)"\n\t"
+ /*Load the next block.*/
"movq 0x40(%[y]),%%mm0\n\t"
- "psraw $2,%%mm1\n\t"
- "movq %%mm7,0x30(%[y])\n\t"
"movq 0x78(%[y]),%%mm7\n\t"
- "movq %%mm1,0x08(%[y])\n\t"
"movq 0x50(%[y]),%%mm1\n\t"
- "movq %%mm6,0x20(%[y])\n\t"
"movq 0x68(%[y]),%%mm6\n\t"
- "movq %%mm2,0x28(%[y])\n\t"
"movq 0x60(%[y]),%%mm2\n\t"
- "movq %%mm5,0x10(%[y])\n\t"
"movq 0x58(%[y]),%%mm5\n\t"
- "movq %%mm3,0x38(%[y])\n\t"
"movq 0x70(%[y]),%%mm3\n\t"
- "movq %%mm4,0x00(%[y])\n\t"
"movq 0x48(%[y]),%%mm4\n\t"
OC_FDCT_STAGE1_8x4
OC_FDCT8x4("0x40","0x50","0x60","0x70","0x48","0x58","0x68","0x78")
- OC_TRANSPOSE8x4("0x40","0x50","0x60","0x70","0x48","0x58","0x68","0x78")
- /*mm0={-2}x4*/
- "pcmpeqw %%mm0,%%mm0\n\t"
- "paddw %%mm0,%%mm0\n\t"
- /*Round the results.*/
- "psubw %%mm0,%%mm1\n\t"
- "psubw %%mm0,%%mm2\n\t"
- "psraw $2,%%mm1\n\t"
- "psubw %%mm0,%%mm3\n\t"
- "movq %%mm1,0x58(%[y])\n\t"
- "psraw $2,%%mm2\n\t"
- "psubw %%mm0,%%mm4\n\t"
- "movq 0x48(%[y]),%%mm1\n\t"
- "psraw $2,%%mm3\n\t"
- "psubw %%mm0,%%mm5\n\t"
- "movq %%mm2,0x68(%[y])\n\t"
+ /*mm2={-2}x4*/
+ "pcmpeqw %%mm2,%%mm2\n\t"
+ "paddw %%mm2,%%mm2\n\t"
+ /*Round and store the results (no transpose).*/
+ "movq 0x50(%[y]),%%mm7\n\t"
+ "psubw %%mm2,%%mm4\n\t"
+ "psubw %%mm2,%%mm6\n\t"
"psraw $2,%%mm4\n\t"
- "psubw %%mm0,%%mm6\n\t"
- "movq %%mm3,0x78(%[y])\n\t"
- "psraw $2,%%mm5\n\t"
- "psubw %%mm0,%%mm7\n\t"
- "movq %%mm4,0x40(%[y])\n\t"
+ "psubw %%mm2,%%mm0\n\t"
+ "movq %%mm4,"OC_MEM_OFFS(0x08,buf)"\n\t"
+ "movq 0x70(%[y]),%%mm4\n\t"
"psraw $2,%%mm6\n\t"
- "psubw %%mm0,%%mm1\n\t"
- "movq %%mm5,0x50(%[y])\n\t"
- "psraw $2,%%mm7\n\t"
- "movq %%mm6,0x60(%[y])\n\t"
+ "psubw %%mm2,%%mm5\n\t"
+ "movq %%mm6,"OC_MEM_OFFS(0x28,buf)"\n\t"
+ "psraw $2,%%mm0\n\t"
+ "psubw %%mm2,%%mm3\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x48,buf)"\n\t"
+ "psraw $2,%%mm5\n\t"
+ "psubw %%mm2,%%mm1\n\t"
+ "movq %%mm5,"OC_MEM_OFFS(0x58,buf)"\n\t"
+ "psraw $2,%%mm3\n\t"
+ "psubw %%mm2,%%mm7\n\t"
+ "movq %%mm3,"OC_MEM_OFFS(0x68,buf)"\n\t"
"psraw $2,%%mm1\n\t"
- "movq %%mm7,0x70(%[y])\n\t"
- "movq %%mm1,0x48(%[y])\n\t"
- :[a]"=&r"(a)
+ "psubw %%mm2,%%mm4\n\t"
+ "movq %%mm1,"OC_MEM_OFFS(0x78,buf)"\n\t"
+ "psraw $2,%%mm7\n\t"
+ "movq %%mm7,"OC_MEM_OFFS(0x18,buf)"\n\t"
+ "psraw $2,%%mm4\n\t"
+ "movq %%mm4,"OC_MEM_OFFS(0x38,buf)"\n\t"
+ /*Final transpose and zig-zag.*/
+#define OC_ZZ_LOAD_ROW_LO(_row,_reg) \
+ "movq "OC_MEM_OFFS(16*_row,buf)","_reg"\n\t" \
+
+#define OC_ZZ_LOAD_ROW_HI(_row,_reg) \
+ "movq "OC_MEM_OFFS(16*_row+8,buf)","_reg"\n\t" \
+
+ OC_TRANSPOSE_ZIG_ZAG_MMXEXT
+#undef OC_ZZ_LOAD_ROW_LO
+#undef OC_ZZ_LOAD_ROW_HI
+ :[a]"=&r"(a),[buf]"=m"(OC_ARRAY_OPERAND(ogg_int16_t,buf,64))
:[y]"r"(_y),[x]"r"(_x)
:"memory"
);
diff --git a/thirdparty/libtheora/x86/mmxfrag.c b/thirdparty/libtheora/x86/mmxfrag.c
index 2c732939c3..b3ec508956 100644
--- a/thirdparty/libtheora/x86/mmxfrag.c
+++ b/thirdparty/libtheora/x86/mmxfrag.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: mmxfrag.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
@@ -22,17 +22,92 @@
The iteration each instruction belongs to is marked in the comments as #i.*/
#include <stddef.h>
#include "x86int.h"
-#include "mmxfrag.h"
#if defined(OC_X86_ASM)
/*Copies an 8x8 block of pixels from _src to _dst, assuming _ystride bytes
between rows.*/
+# define OC_FRAG_COPY_MMX(_dst,_src,_ystride) \
+ do{ \
+ const unsigned char *src; \
+ unsigned char *dst; \
+ ptrdiff_t ystride3; \
+ src=(_src); \
+ dst=(_dst); \
+ __asm__ __volatile__( \
+ /*src+0*ystride*/ \
+ "movq (%[src]),%%mm0\n\t" \
+ /*src+1*ystride*/ \
+ "movq (%[src],%[ystride]),%%mm1\n\t" \
+ /*ystride3=ystride*3*/ \
+ "lea (%[ystride],%[ystride],2),%[ystride3]\n\t" \
+ /*src+2*ystride*/ \
+ "movq (%[src],%[ystride],2),%%mm2\n\t" \
+ /*src+3*ystride*/ \
+ "movq (%[src],%[ystride3]),%%mm3\n\t" \
+ /*dst+0*ystride*/ \
+ "movq %%mm0,(%[dst])\n\t" \
+ /*dst+1*ystride*/ \
+ "movq %%mm1,(%[dst],%[ystride])\n\t" \
+ /*Pointer to next 4.*/ \
+ "lea (%[src],%[ystride],4),%[src]\n\t" \
+ /*dst+2*ystride*/ \
+ "movq %%mm2,(%[dst],%[ystride],2)\n\t" \
+ /*dst+3*ystride*/ \
+ "movq %%mm3,(%[dst],%[ystride3])\n\t" \
+ /*Pointer to next 4.*/ \
+ "lea (%[dst],%[ystride],4),%[dst]\n\t" \
+ /*src+0*ystride*/ \
+ "movq (%[src]),%%mm0\n\t" \
+ /*src+1*ystride*/ \
+ "movq (%[src],%[ystride]),%%mm1\n\t" \
+ /*src+2*ystride*/ \
+ "movq (%[src],%[ystride],2),%%mm2\n\t" \
+ /*src+3*ystride*/ \
+ "movq (%[src],%[ystride3]),%%mm3\n\t" \
+ /*dst+0*ystride*/ \
+ "movq %%mm0,(%[dst])\n\t" \
+ /*dst+1*ystride*/ \
+ "movq %%mm1,(%[dst],%[ystride])\n\t" \
+ /*dst+2*ystride*/ \
+ "movq %%mm2,(%[dst],%[ystride],2)\n\t" \
+ /*dst+3*ystride*/ \
+ "movq %%mm3,(%[dst],%[ystride3])\n\t" \
+ :[dst]"+r"(dst),[src]"+r"(src),[ystride3]"=&r"(ystride3) \
+ :[ystride]"r"((ptrdiff_t)(_ystride)) \
+ :"memory" \
+ ); \
+ } \
+ while(0)
+
+/*Copies an 8x8 block of pixels from _src to _dst, assuming _ystride bytes
+ between rows.*/
void oc_frag_copy_mmx(unsigned char *_dst,
const unsigned char *_src,int _ystride){
OC_FRAG_COPY_MMX(_dst,_src,_ystride);
}
+/*Copies the fragments specified by the lists of fragment indices from one
+ frame to another.
+ _dst_frame: The reference frame to copy to.
+ _src_frame: The reference frame to copy from.
+ _ystride: The row stride of the reference frames.
+ _fragis: A pointer to a list of fragment indices.
+ _nfragis: The number of fragment indices to copy.
+ _frag_buf_offs: The offsets of fragments in the reference frames.*/
+void oc_frag_copy_list_mmx(unsigned char *_dst_frame,
+ const unsigned char *_src_frame,int _ystride,
+ const ptrdiff_t *_fragis,ptrdiff_t _nfragis,const ptrdiff_t *_frag_buf_offs){
+ ptrdiff_t fragii;
+ for(fragii=0;fragii<_nfragis;fragii++){
+ ptrdiff_t frag_buf_off;
+ frag_buf_off=_frag_buf_offs[_fragis[fragii]];
+ OC_FRAG_COPY_MMX(_dst_frame+frag_buf_off,
+ _src_frame+frag_buf_off,_ystride);
+ }
+}
+
+
void oc_frag_recon_intra_mmx(unsigned char *_dst,int _ystride,
const ogg_int16_t *_residue){
__asm__ __volatile__(
@@ -280,7 +355,7 @@ void oc_frag_recon_inter2_mmx(unsigned char *_dst,const unsigned char *_src1,
/*Advance dest ptr.*/
"lea (%[dst],%[ystride],2),%[dst]\n\t"
:[dst]"+r"(_dst),[residue]"+r"(_residue),
- [src1]"+%r"(_src1),[src2]"+r"(_src2)
+ [src1]"+r"(_src1),[src2]"+r"(_src2)
:[ystride]"r"((ptrdiff_t)_ystride)
:"memory"
);
diff --git a/thirdparty/libtheora/x86/mmxfrag.h b/thirdparty/libtheora/x86/mmxfrag.h
deleted file mode 100644
index a398427629..0000000000
--- a/thirdparty/libtheora/x86/mmxfrag.h
+++ /dev/null
@@ -1,64 +0,0 @@
-#if !defined(_x86_mmxfrag_H)
-# define _x86_mmxfrag_H (1)
-# include <stddef.h>
-# include "x86int.h"
-
-#if defined(OC_X86_ASM)
-
-/*Copies an 8x8 block of pixels from _src to _dst, assuming _ystride bytes
- between rows.*/
-#define OC_FRAG_COPY_MMX(_dst,_src,_ystride) \
- do{ \
- const unsigned char *src; \
- unsigned char *dst; \
- ptrdiff_t ystride3; \
- src=(_src); \
- dst=(_dst); \
- __asm__ __volatile__( \
- /*src+0*ystride*/ \
- "movq (%[src]),%%mm0\n\t" \
- /*src+1*ystride*/ \
- "movq (%[src],%[ystride]),%%mm1\n\t" \
- /*ystride3=ystride*3*/ \
- "lea (%[ystride],%[ystride],2),%[ystride3]\n\t" \
- /*src+2*ystride*/ \
- "movq (%[src],%[ystride],2),%%mm2\n\t" \
- /*src+3*ystride*/ \
- "movq (%[src],%[ystride3]),%%mm3\n\t" \
- /*dst+0*ystride*/ \
- "movq %%mm0,(%[dst])\n\t" \
- /*dst+1*ystride*/ \
- "movq %%mm1,(%[dst],%[ystride])\n\t" \
- /*Pointer to next 4.*/ \
- "lea (%[src],%[ystride],4),%[src]\n\t" \
- /*dst+2*ystride*/ \
- "movq %%mm2,(%[dst],%[ystride],2)\n\t" \
- /*dst+3*ystride*/ \
- "movq %%mm3,(%[dst],%[ystride3])\n\t" \
- /*Pointer to next 4.*/ \
- "lea (%[dst],%[ystride],4),%[dst]\n\t" \
- /*src+0*ystride*/ \
- "movq (%[src]),%%mm0\n\t" \
- /*src+1*ystride*/ \
- "movq (%[src],%[ystride]),%%mm1\n\t" \
- /*src+2*ystride*/ \
- "movq (%[src],%[ystride],2),%%mm2\n\t" \
- /*src+3*ystride*/ \
- "movq (%[src],%[ystride3]),%%mm3\n\t" \
- /*dst+0*ystride*/ \
- "movq %%mm0,(%[dst])\n\t" \
- /*dst+1*ystride*/ \
- "movq %%mm1,(%[dst],%[ystride])\n\t" \
- /*dst+2*ystride*/ \
- "movq %%mm2,(%[dst],%[ystride],2)\n\t" \
- /*dst+3*ystride*/ \
- "movq %%mm3,(%[dst],%[ystride3])\n\t" \
- :[dst]"+r"(dst),[src]"+r"(src),[ystride3]"=&r"(ystride3) \
- :[ystride]"r"((ptrdiff_t)(_ystride)) \
- :"memory" \
- ); \
- } \
- while(0)
-
-# endif
-#endif
diff --git a/thirdparty/libtheora/x86/mmxidct.c b/thirdparty/libtheora/x86/mmxidct.c
index 76424e6364..b8e3077066 100644
--- a/thirdparty/libtheora/x86/mmxidct.c
+++ b/thirdparty/libtheora/x86/mmxidct.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: mmxidct.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
@@ -30,89 +30,66 @@
-/*A table of constants used by the MMX routines.*/
-static const ogg_uint16_t __attribute__((aligned(8),used))
- OC_IDCT_CONSTS[(7+1)*4]={
- (ogg_uint16_t)OC_C1S7,(ogg_uint16_t)OC_C1S7,
- (ogg_uint16_t)OC_C1S7,(ogg_uint16_t)OC_C1S7,
- (ogg_uint16_t)OC_C2S6,(ogg_uint16_t)OC_C2S6,
- (ogg_uint16_t)OC_C2S6,(ogg_uint16_t)OC_C2S6,
- (ogg_uint16_t)OC_C3S5,(ogg_uint16_t)OC_C3S5,
- (ogg_uint16_t)OC_C3S5,(ogg_uint16_t)OC_C3S5,
- (ogg_uint16_t)OC_C4S4,(ogg_uint16_t)OC_C4S4,
- (ogg_uint16_t)OC_C4S4,(ogg_uint16_t)OC_C4S4,
- (ogg_uint16_t)OC_C5S3,(ogg_uint16_t)OC_C5S3,
- (ogg_uint16_t)OC_C5S3,(ogg_uint16_t)OC_C5S3,
- (ogg_uint16_t)OC_C6S2,(ogg_uint16_t)OC_C6S2,
- (ogg_uint16_t)OC_C6S2,(ogg_uint16_t)OC_C6S2,
- (ogg_uint16_t)OC_C7S1,(ogg_uint16_t)OC_C7S1,
- (ogg_uint16_t)OC_C7S1,(ogg_uint16_t)OC_C7S1,
- 8, 8, 8, 8
-};
-
-/*Converts the expression in the argument to a string.*/
-#define OC_M2STR(_s) #_s
-
/*38 cycles*/
-#define OC_IDCT_BEGIN \
+#define OC_IDCT_BEGIN(_y,_x) \
"#OC_IDCT_BEGIN\n\t" \
- "movq "OC_I(3)",%%mm2\n\t" \
- "movq "OC_C(3)",%%mm6\n\t" \
+ "movq "OC_I(3,_x)",%%mm2\n\t" \
+ "movq "OC_MEM_OFFS(0x30,c)",%%mm6\n\t" \
"movq %%mm2,%%mm4\n\t" \
- "movq "OC_J(5)",%%mm7\n\t" \
+ "movq "OC_J(5,_x)",%%mm7\n\t" \
"pmulhw %%mm6,%%mm4\n\t" \
- "movq "OC_C(5)",%%mm1\n\t" \
+ "movq "OC_MEM_OFFS(0x50,c)",%%mm1\n\t" \
"pmulhw %%mm7,%%mm6\n\t" \
"movq %%mm1,%%mm5\n\t" \
"pmulhw %%mm2,%%mm1\n\t" \
- "movq "OC_I(1)",%%mm3\n\t" \
+ "movq "OC_I(1,_x)",%%mm3\n\t" \
"pmulhw %%mm7,%%mm5\n\t" \
- "movq "OC_C(1)",%%mm0\n\t" \
+ "movq "OC_MEM_OFFS(0x10,c)",%%mm0\n\t" \
"paddw %%mm2,%%mm4\n\t" \
"paddw %%mm7,%%mm6\n\t" \
"paddw %%mm1,%%mm2\n\t" \
- "movq "OC_J(7)",%%mm1\n\t" \
+ "movq "OC_J(7,_x)",%%mm1\n\t" \
"paddw %%mm5,%%mm7\n\t" \
"movq %%mm0,%%mm5\n\t" \
"pmulhw %%mm3,%%mm0\n\t" \
"paddw %%mm7,%%mm4\n\t" \
"pmulhw %%mm1,%%mm5\n\t" \
- "movq "OC_C(7)",%%mm7\n\t" \
+ "movq "OC_MEM_OFFS(0x70,c)",%%mm7\n\t" \
"psubw %%mm2,%%mm6\n\t" \
"paddw %%mm3,%%mm0\n\t" \
"pmulhw %%mm7,%%mm3\n\t" \
- "movq "OC_I(2)",%%mm2\n\t" \
+ "movq "OC_I(2,_x)",%%mm2\n\t" \
"pmulhw %%mm1,%%mm7\n\t" \
"paddw %%mm1,%%mm5\n\t" \
"movq %%mm2,%%mm1\n\t" \
- "pmulhw "OC_C(2)",%%mm2\n\t" \
+ "pmulhw "OC_MEM_OFFS(0x20,c)",%%mm2\n\t" \
"psubw %%mm5,%%mm3\n\t" \
- "movq "OC_J(6)",%%mm5\n\t" \
+ "movq "OC_J(6,_x)",%%mm5\n\t" \
"paddw %%mm7,%%mm0\n\t" \
"movq %%mm5,%%mm7\n\t" \
"psubw %%mm4,%%mm0\n\t" \
- "pmulhw "OC_C(2)",%%mm5\n\t" \
+ "pmulhw "OC_MEM_OFFS(0x20,c)",%%mm5\n\t" \
"paddw %%mm1,%%mm2\n\t" \
- "pmulhw "OC_C(6)",%%mm1\n\t" \
+ "pmulhw "OC_MEM_OFFS(0x60,c)",%%mm1\n\t" \
"paddw %%mm4,%%mm4\n\t" \
"paddw %%mm0,%%mm4\n\t" \
"psubw %%mm6,%%mm3\n\t" \
"paddw %%mm7,%%mm5\n\t" \
"paddw %%mm6,%%mm6\n\t" \
- "pmulhw "OC_C(6)",%%mm7\n\t" \
+ "pmulhw "OC_MEM_OFFS(0x60,c)",%%mm7\n\t" \
"paddw %%mm3,%%mm6\n\t" \
- "movq %%mm4,"OC_I(1)"\n\t" \
+ "movq %%mm4,"OC_I(1,_y)"\n\t" \
"psubw %%mm5,%%mm1\n\t" \
- "movq "OC_C(4)",%%mm4\n\t" \
+ "movq "OC_MEM_OFFS(0x40,c)",%%mm4\n\t" \
"movq %%mm3,%%mm5\n\t" \
"pmulhw %%mm4,%%mm3\n\t" \
"paddw %%mm2,%%mm7\n\t" \
- "movq %%mm6,"OC_I(2)"\n\t" \
+ "movq %%mm6,"OC_I(2,_y)"\n\t" \
"movq %%mm0,%%mm2\n\t" \
- "movq "OC_I(0)",%%mm6\n\t" \
+ "movq "OC_I(0,_x)",%%mm6\n\t" \
"pmulhw %%mm4,%%mm0\n\t" \
"paddw %%mm3,%%mm5\n\t" \
- "movq "OC_J(4)",%%mm3\n\t" \
+ "movq "OC_J(4,_x)",%%mm3\n\t" \
"psubw %%mm1,%%mm5\n\t" \
"paddw %%mm0,%%mm2\n\t" \
"psubw %%mm3,%%mm6\n\t" \
@@ -126,18 +103,18 @@ static const ogg_uint16_t __attribute__((aligned(8),used))
"paddw %%mm0,%%mm6\n\t" \
"psubw %%mm2,%%mm6\n\t" \
"paddw %%mm2,%%mm2\n\t" \
- "movq "OC_I(1)",%%mm0\n\t" \
+ "movq "OC_I(1,_y)",%%mm0\n\t" \
"paddw %%mm6,%%mm2\n\t" \
"paddw %%mm3,%%mm4\n\t" \
"psubw %%mm1,%%mm2\n\t" \
"#end OC_IDCT_BEGIN\n\t" \
/*38+8=46 cycles.*/
-#define OC_ROW_IDCT \
+#define OC_ROW_IDCT(_y,_x) \
"#OC_ROW_IDCT\n" \
- OC_IDCT_BEGIN \
+ OC_IDCT_BEGIN(_y,_x) \
/*r3=D'*/ \
- "movq "OC_I(2)",%%mm3\n\t" \
+ "movq "OC_I(2,_y)",%%mm3\n\t" \
/*r4=E'=E-G*/ \
"psubw %%mm7,%%mm4\n\t" \
/*r1=H'+H'*/ \
@@ -162,7 +139,7 @@ static const ogg_uint16_t __attribute__((aligned(8),used))
"psubw %%mm0,%%mm7\n\t" \
"paddw %%mm0,%%mm0\n\t" \
/*Save R1.*/ \
- "movq %%mm1,"OC_I(1)"\n\t" \
+ "movq %%mm1,"OC_I(1,_y)"\n\t" \
/*r0=R0=G.+C.*/ \
"paddw %%mm7,%%mm0\n\t" \
"#end OC_ROW_IDCT\n\t" \
@@ -195,11 +172,11 @@ static const ogg_uint16_t __attribute__((aligned(8),used))
Since r1 is free at entry, we calculate the Js first.*/
/*19 cycles.*/
-#define OC_TRANSPOSE \
+#define OC_TRANSPOSE(_y) \
"#OC_TRANSPOSE\n\t" \
"movq %%mm4,%%mm1\n\t" \
"punpcklwd %%mm5,%%mm4\n\t" \
- "movq %%mm0,"OC_I(0)"\n\t" \
+ "movq %%mm0,"OC_I(0,_y)"\n\t" \
"punpckhwd %%mm5,%%mm1\n\t" \
"movq %%mm6,%%mm0\n\t" \
"punpcklwd %%mm7,%%mm6\n\t" \
@@ -207,17 +184,17 @@ static const ogg_uint16_t __attribute__((aligned(8),used))
"punpckldq %%mm6,%%mm4\n\t" \
"punpckhdq %%mm6,%%mm5\n\t" \
"movq %%mm1,%%mm6\n\t" \
- "movq %%mm4,"OC_J(4)"\n\t" \
+ "movq %%mm4,"OC_J(4,_y)"\n\t" \
"punpckhwd %%mm7,%%mm0\n\t" \
- "movq %%mm5,"OC_J(5)"\n\t" \
+ "movq %%mm5,"OC_J(5,_y)"\n\t" \
"punpckhdq %%mm0,%%mm6\n\t" \
- "movq "OC_I(0)",%%mm4\n\t" \
+ "movq "OC_I(0,_y)",%%mm4\n\t" \
"punpckldq %%mm0,%%mm1\n\t" \
- "movq "OC_I(1)",%%mm5\n\t" \
+ "movq "OC_I(1,_y)",%%mm5\n\t" \
"movq %%mm4,%%mm0\n\t" \
- "movq %%mm6,"OC_J(7)"\n\t" \
+ "movq %%mm6,"OC_J(7,_y)"\n\t" \
"punpcklwd %%mm5,%%mm0\n\t" \
- "movq %%mm1,"OC_J(6)"\n\t" \
+ "movq %%mm1,"OC_J(6,_y)"\n\t" \
"punpckhwd %%mm5,%%mm4\n\t" \
"movq %%mm2,%%mm5\n\t" \
"punpcklwd %%mm3,%%mm2\n\t" \
@@ -225,20 +202,20 @@ static const ogg_uint16_t __attribute__((aligned(8),used))
"punpckldq %%mm2,%%mm0\n\t" \
"punpckhdq %%mm2,%%mm1\n\t" \
"movq %%mm4,%%mm2\n\t" \
- "movq %%mm0,"OC_I(0)"\n\t" \
+ "movq %%mm0,"OC_I(0,_y)"\n\t" \
"punpckhwd %%mm3,%%mm5\n\t" \
- "movq %%mm1,"OC_I(1)"\n\t" \
+ "movq %%mm1,"OC_I(1,_y)"\n\t" \
"punpckhdq %%mm5,%%mm4\n\t" \
"punpckldq %%mm5,%%mm2\n\t" \
- "movq %%mm4,"OC_I(3)"\n\t" \
- "movq %%mm2,"OC_I(2)"\n\t" \
+ "movq %%mm4,"OC_I(3,_y)"\n\t" \
+ "movq %%mm2,"OC_I(2,_y)"\n\t" \
"#end OC_TRANSPOSE\n\t" \
/*38+19=57 cycles.*/
-#define OC_COLUMN_IDCT \
+#define OC_COLUMN_IDCT(_y) \
"#OC_COLUMN_IDCT\n" \
- OC_IDCT_BEGIN \
- "paddw "OC_8",%%mm2\n\t" \
+ OC_IDCT_BEGIN(_y,_y) \
+ "paddw "OC_MEM_OFFS(0x00,c)",%%mm2\n\t" \
/*r1=H'+H'*/ \
"paddw %%mm1,%%mm1\n\t" \
/*r1=R1=A''+H'*/ \
@@ -250,18 +227,18 @@ static const ogg_uint16_t __attribute__((aligned(8),used))
/*r1=NR1*/ \
"psraw $4,%%mm1\n\t" \
/*r3=D'*/ \
- "movq "OC_I(2)",%%mm3\n\t" \
+ "movq "OC_I(2,_y)",%%mm3\n\t" \
/*r7=G+G*/ \
"paddw %%mm7,%%mm7\n\t" \
/*Store NR2 at I(2).*/ \
- "movq %%mm2,"OC_I(2)"\n\t" \
+ "movq %%mm2,"OC_I(2,_y)"\n\t" \
/*r7=G'=E+G*/ \
"paddw %%mm4,%%mm7\n\t" \
/*Store NR1 at I(1).*/ \
- "movq %%mm1,"OC_I(1)"\n\t" \
+ "movq %%mm1,"OC_I(1,_y)"\n\t" \
/*r4=R4=E'-D'*/ \
"psubw %%mm3,%%mm4\n\t" \
- "paddw "OC_8",%%mm4\n\t" \
+ "paddw "OC_MEM_OFFS(0x00,c)",%%mm4\n\t" \
/*r3=D'+D'*/ \
"paddw %%mm3,%%mm3\n\t" \
/*r3=R3=E'+D'*/ \
@@ -272,7 +249,7 @@ static const ogg_uint16_t __attribute__((aligned(8),used))
"psubw %%mm5,%%mm6\n\t" \
/*r3=NR3*/ \
"psraw $4,%%mm3\n\t" \
- "paddw "OC_8",%%mm6\n\t" \
+ "paddw "OC_MEM_OFFS(0x00,c)",%%mm6\n\t" \
/*r5=B''+B''*/ \
"paddw %%mm5,%%mm5\n\t" \
/*r5=R5=F'+B''*/ \
@@ -280,14 +257,14 @@ static const ogg_uint16_t __attribute__((aligned(8),used))
/*r6=NR6*/ \
"psraw $4,%%mm6\n\t" \
/*Store NR4 at J(4).*/ \
- "movq %%mm4,"OC_J(4)"\n\t" \
+ "movq %%mm4,"OC_J(4,_y)"\n\t" \
/*r5=NR5*/ \
"psraw $4,%%mm5\n\t" \
/*Store NR3 at I(3).*/ \
- "movq %%mm3,"OC_I(3)"\n\t" \
+ "movq %%mm3,"OC_I(3,_y)"\n\t" \
/*r7=R7=G'-C'*/ \
"psubw %%mm0,%%mm7\n\t" \
- "paddw "OC_8",%%mm7\n\t" \
+ "paddw "OC_MEM_OFFS(0x00,c)",%%mm7\n\t" \
/*r0=C'+C'*/ \
"paddw %%mm0,%%mm0\n\t" \
/*r0=R0=G'+C'*/ \
@@ -295,113 +272,121 @@ static const ogg_uint16_t __attribute__((aligned(8),used))
/*r7=NR7*/ \
"psraw $4,%%mm7\n\t" \
/*Store NR6 at J(6).*/ \
- "movq %%mm6,"OC_J(6)"\n\t" \
+ "movq %%mm6,"OC_J(6,_y)"\n\t" \
/*r0=NR0*/ \
"psraw $4,%%mm0\n\t" \
/*Store NR5 at J(5).*/ \
- "movq %%mm5,"OC_J(5)"\n\t" \
+ "movq %%mm5,"OC_J(5,_y)"\n\t" \
/*Store NR7 at J(7).*/ \
- "movq %%mm7,"OC_J(7)"\n\t" \
+ "movq %%mm7,"OC_J(7,_y)"\n\t" \
/*Store NR0 at I(0).*/ \
- "movq %%mm0,"OC_I(0)"\n\t" \
+ "movq %%mm0,"OC_I(0,_y)"\n\t" \
"#end OC_COLUMN_IDCT\n\t" \
-#define OC_MID(_m,_i) OC_M2STR(_m+(_i)*8)"(%[c])"
-#define OC_C(_i) OC_MID(OC_COSINE_OFFSET,_i-1)
-#define OC_8 OC_MID(OC_EIGHT_OFFSET,0)
-
-static void oc_idct8x8_slow(ogg_int16_t _y[64]){
+static void oc_idct8x8_slow_mmx(ogg_int16_t _y[64],ogg_int16_t _x[64]){
+ int i;
/*This routine accepts an 8x8 matrix, but in partially transposed form.
Every 4x4 block is transposed.*/
__asm__ __volatile__(
-#define OC_I(_k) OC_M2STR((_k*16))"(%[y])"
-#define OC_J(_k) OC_M2STR(((_k-4)*16)+8)"(%[y])"
- OC_ROW_IDCT
- OC_TRANSPOSE
+#define OC_I(_k,_y) OC_MEM_OFFS((_k)*16,_y)
+#define OC_J(_k,_y) OC_MEM_OFFS(((_k)-4)*16+8,_y)
+ OC_ROW_IDCT(y,x)
+ OC_TRANSPOSE(y)
#undef OC_I
#undef OC_J
-#define OC_I(_k) OC_M2STR((_k*16)+64)"(%[y])"
-#define OC_J(_k) OC_M2STR(((_k-4)*16)+72)"(%[y])"
- OC_ROW_IDCT
- OC_TRANSPOSE
+#define OC_I(_k,_y) OC_MEM_OFFS((_k)*16+64,_y)
+#define OC_J(_k,_y) OC_MEM_OFFS(((_k)-4)*16+72,_y)
+ OC_ROW_IDCT(y,x)
+ OC_TRANSPOSE(y)
#undef OC_I
#undef OC_J
-#define OC_I(_k) OC_M2STR((_k*16))"(%[y])"
-#define OC_J(_k) OC_I(_k)
- OC_COLUMN_IDCT
+#define OC_I(_k,_y) OC_MEM_OFFS((_k)*16,_y)
+#define OC_J(_k,_y) OC_I(_k,_y)
+ OC_COLUMN_IDCT(y)
#undef OC_I
#undef OC_J
-#define OC_I(_k) OC_M2STR((_k*16)+8)"(%[y])"
-#define OC_J(_k) OC_I(_k)
- OC_COLUMN_IDCT
+#define OC_I(_k,_y) OC_MEM_OFFS((_k)*16+8,_y)
+#define OC_J(_k,_y) OC_I(_k,_y)
+ OC_COLUMN_IDCT(y)
#undef OC_I
#undef OC_J
- :
- :[y]"r"(_y),[c]"r"(OC_IDCT_CONSTS)
+ :[y]"=m"OC_ARRAY_OPERAND(ogg_int16_t,_y,64)
+ :[x]"m"OC_CONST_ARRAY_OPERAND(ogg_int16_t,_x,64),
+ [c]"m"OC_CONST_ARRAY_OPERAND(ogg_int16_t,OC_IDCT_CONSTS,128)
);
+ __asm__ __volatile__("pxor %%mm0,%%mm0\n\t"::);
+ for(i=0;i<4;i++){
+ __asm__ __volatile__(
+ "movq %%mm0,"OC_MEM_OFFS(0x00,x)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x08,x)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x10,x)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x18,x)"\n\t"
+ :[x]"=m"OC_ARRAY_OPERAND(ogg_int16_t,_x+16*i,16)
+ );
+ }
}
/*25 cycles.*/
-#define OC_IDCT_BEGIN_10 \
+#define OC_IDCT_BEGIN_10(_y,_x) \
"#OC_IDCT_BEGIN_10\n\t" \
- "movq "OC_I(3)",%%mm2\n\t" \
+ "movq "OC_I(3,_x)",%%mm2\n\t" \
"nop\n\t" \
- "movq "OC_C(3)",%%mm6\n\t" \
+ "movq "OC_MEM_OFFS(0x30,c)",%%mm6\n\t" \
"movq %%mm2,%%mm4\n\t" \
- "movq "OC_C(5)",%%mm1\n\t" \
+ "movq "OC_MEM_OFFS(0x50,c)",%%mm1\n\t" \
"pmulhw %%mm6,%%mm4\n\t" \
- "movq "OC_I(1)",%%mm3\n\t" \
+ "movq "OC_I(1,_x)",%%mm3\n\t" \
"pmulhw %%mm2,%%mm1\n\t" \
- "movq "OC_C(1)",%%mm0\n\t" \
+ "movq "OC_MEM_OFFS(0x10,c)",%%mm0\n\t" \
"paddw %%mm2,%%mm4\n\t" \
"pxor %%mm6,%%mm6\n\t" \
"paddw %%mm1,%%mm2\n\t" \
- "movq "OC_I(2)",%%mm5\n\t" \
+ "movq "OC_I(2,_x)",%%mm5\n\t" \
"pmulhw %%mm3,%%mm0\n\t" \
"movq %%mm5,%%mm1\n\t" \
"paddw %%mm3,%%mm0\n\t" \
- "pmulhw "OC_C(7)",%%mm3\n\t" \
+ "pmulhw "OC_MEM_OFFS(0x70,c)",%%mm3\n\t" \
"psubw %%mm2,%%mm6\n\t" \
- "pmulhw "OC_C(2)",%%mm5\n\t" \
+ "pmulhw "OC_MEM_OFFS(0x20,c)",%%mm5\n\t" \
"psubw %%mm4,%%mm0\n\t" \
- "movq "OC_I(2)",%%mm7\n\t" \
+ "movq "OC_I(2,_x)",%%mm7\n\t" \
"paddw %%mm4,%%mm4\n\t" \
"paddw %%mm5,%%mm7\n\t" \
"paddw %%mm0,%%mm4\n\t" \
- "pmulhw "OC_C(6)",%%mm1\n\t" \
+ "pmulhw "OC_MEM_OFFS(0x60,c)",%%mm1\n\t" \
"psubw %%mm6,%%mm3\n\t" \
- "movq %%mm4,"OC_I(1)"\n\t" \
+ "movq %%mm4,"OC_I(1,_y)"\n\t" \
"paddw %%mm6,%%mm6\n\t" \
- "movq "OC_C(4)",%%mm4\n\t" \
+ "movq "OC_MEM_OFFS(0x40,c)",%%mm4\n\t" \
"paddw %%mm3,%%mm6\n\t" \
"movq %%mm3,%%mm5\n\t" \
"pmulhw %%mm4,%%mm3\n\t" \
- "movq %%mm6,"OC_I(2)"\n\t" \
+ "movq %%mm6,"OC_I(2,_y)"\n\t" \
"movq %%mm0,%%mm2\n\t" \
- "movq "OC_I(0)",%%mm6\n\t" \
+ "movq "OC_I(0,_x)",%%mm6\n\t" \
"pmulhw %%mm4,%%mm0\n\t" \
"paddw %%mm3,%%mm5\n\t" \
"paddw %%mm0,%%mm2\n\t" \
"psubw %%mm1,%%mm5\n\t" \
"pmulhw %%mm4,%%mm6\n\t" \
- "paddw "OC_I(0)",%%mm6\n\t" \
+ "paddw "OC_I(0,_x)",%%mm6\n\t" \
"paddw %%mm1,%%mm1\n\t" \
"movq %%mm6,%%mm4\n\t" \
"paddw %%mm5,%%mm1\n\t" \
"psubw %%mm2,%%mm6\n\t" \
"paddw %%mm2,%%mm2\n\t" \
- "movq "OC_I(1)",%%mm0\n\t" \
+ "movq "OC_I(1,_y)",%%mm0\n\t" \
"paddw %%mm6,%%mm2\n\t" \
"psubw %%mm1,%%mm2\n\t" \
"nop\n\t" \
"#end OC_IDCT_BEGIN_10\n\t" \
/*25+8=33 cycles.*/
-#define OC_ROW_IDCT_10 \
+#define OC_ROW_IDCT_10(_y,_x) \
"#OC_ROW_IDCT_10\n\t" \
- OC_IDCT_BEGIN_10 \
+ OC_IDCT_BEGIN_10(_y,_x) \
/*r3=D'*/ \
- "movq "OC_I(2)",%%mm3\n\t" \
+ "movq "OC_I(2,_y)",%%mm3\n\t" \
/*r4=E'=E-G*/ \
"psubw %%mm7,%%mm4\n\t" \
/*r1=H'+H'*/ \
@@ -426,16 +411,16 @@ static void oc_idct8x8_slow(ogg_int16_t _y[64]){
"psubw %%mm0,%%mm7\n\t" \
"paddw %%mm0,%%mm0\n\t" \
/*Save R1.*/ \
- "movq %%mm1,"OC_I(1)"\n\t" \
+ "movq %%mm1,"OC_I(1,_y)"\n\t" \
/*r0=R0=G'+C'*/ \
"paddw %%mm7,%%mm0\n\t" \
"#end OC_ROW_IDCT_10\n\t" \
/*25+19=44 cycles'*/
-#define OC_COLUMN_IDCT_10 \
+#define OC_COLUMN_IDCT_10(_y) \
"#OC_COLUMN_IDCT_10\n\t" \
- OC_IDCT_BEGIN_10 \
- "paddw "OC_8",%%mm2\n\t" \
+ OC_IDCT_BEGIN_10(_y,_y) \
+ "paddw "OC_MEM_OFFS(0x00,c)",%%mm2\n\t" \
/*r1=H'+H'*/ \
"paddw %%mm1,%%mm1\n\t" \
/*r1=R1=A''+H'*/ \
@@ -447,18 +432,18 @@ static void oc_idct8x8_slow(ogg_int16_t _y[64]){
/*r1=NR1*/ \
"psraw $4,%%mm1\n\t" \
/*r3=D'*/ \
- "movq "OC_I(2)",%%mm3\n\t" \
+ "movq "OC_I(2,_y)",%%mm3\n\t" \
/*r7=G+G*/ \
"paddw %%mm7,%%mm7\n\t" \
/*Store NR2 at I(2).*/ \
- "movq %%mm2,"OC_I(2)"\n\t" \
+ "movq %%mm2,"OC_I(2,_y)"\n\t" \
/*r7=G'=E+G*/ \
"paddw %%mm4,%%mm7\n\t" \
/*Store NR1 at I(1).*/ \
- "movq %%mm1,"OC_I(1)"\n\t" \
+ "movq %%mm1,"OC_I(1,_y)"\n\t" \
/*r4=R4=E'-D'*/ \
"psubw %%mm3,%%mm4\n\t" \
- "paddw "OC_8",%%mm4\n\t" \
+ "paddw "OC_MEM_OFFS(0x00,c)",%%mm4\n\t" \
/*r3=D'+D'*/ \
"paddw %%mm3,%%mm3\n\t" \
/*r3=R3=E'+D'*/ \
@@ -469,7 +454,7 @@ static void oc_idct8x8_slow(ogg_int16_t _y[64]){
"psubw %%mm5,%%mm6\n\t" \
/*r3=NR3*/ \
"psraw $4,%%mm3\n\t" \
- "paddw "OC_8",%%mm6\n\t" \
+ "paddw "OC_MEM_OFFS(0x00,c)",%%mm6\n\t" \
/*r5=B''+B''*/ \
"paddw %%mm5,%%mm5\n\t" \
/*r5=R5=F'+B''*/ \
@@ -477,14 +462,14 @@ static void oc_idct8x8_slow(ogg_int16_t _y[64]){
/*r6=NR6*/ \
"psraw $4,%%mm6\n\t" \
/*Store NR4 at J(4).*/ \
- "movq %%mm4,"OC_J(4)"\n\t" \
+ "movq %%mm4,"OC_J(4,_y)"\n\t" \
/*r5=NR5*/ \
"psraw $4,%%mm5\n\t" \
/*Store NR3 at I(3).*/ \
- "movq %%mm3,"OC_I(3)"\n\t" \
+ "movq %%mm3,"OC_I(3,_y)"\n\t" \
/*r7=R7=G'-C'*/ \
"psubw %%mm0,%%mm7\n\t" \
- "paddw "OC_8",%%mm7\n\t" \
+ "paddw "OC_MEM_OFFS(0x00,c)",%%mm7\n\t" \
/*r0=C'+C'*/ \
"paddw %%mm0,%%mm0\n\t" \
/*r0=R0=G'+C'*/ \
@@ -492,46 +477,55 @@ static void oc_idct8x8_slow(ogg_int16_t _y[64]){
/*r7=NR7*/ \
"psraw $4,%%mm7\n\t" \
/*Store NR6 at J(6).*/ \
- "movq %%mm6,"OC_J(6)"\n\t" \
+ "movq %%mm6,"OC_J(6,_y)"\n\t" \
/*r0=NR0*/ \
"psraw $4,%%mm0\n\t" \
/*Store NR5 at J(5).*/ \
- "movq %%mm5,"OC_J(5)"\n\t" \
+ "movq %%mm5,"OC_J(5,_y)"\n\t" \
/*Store NR7 at J(7).*/ \
- "movq %%mm7,"OC_J(7)"\n\t" \
+ "movq %%mm7,"OC_J(7,_y)"\n\t" \
/*Store NR0 at I(0).*/ \
- "movq %%mm0,"OC_I(0)"\n\t" \
+ "movq %%mm0,"OC_I(0,_y)"\n\t" \
"#end OC_COLUMN_IDCT_10\n\t" \
-static void oc_idct8x8_10(ogg_int16_t _y[64]){
+static void oc_idct8x8_10_mmx(ogg_int16_t _y[64],ogg_int16_t _x[64]){
__asm__ __volatile__(
-#define OC_I(_k) OC_M2STR((_k*16))"(%[y])"
-#define OC_J(_k) OC_M2STR(((_k-4)*16)+8)"(%[y])"
+#define OC_I(_k,_y) OC_MEM_OFFS((_k)*16,_y)
+#define OC_J(_k,_y) OC_MEM_OFFS(((_k)-4)*16+8,_y)
/*Done with dequant, descramble, and partial transpose.
Now do the iDCT itself.*/
- OC_ROW_IDCT_10
- OC_TRANSPOSE
+ OC_ROW_IDCT_10(y,x)
+ OC_TRANSPOSE(y)
#undef OC_I
#undef OC_J
-#define OC_I(_k) OC_M2STR((_k*16))"(%[y])"
-#define OC_J(_k) OC_I(_k)
- OC_COLUMN_IDCT_10
+#define OC_I(_k,_y) OC_MEM_OFFS((_k)*16,_y)
+#define OC_J(_k,_y) OC_I(_k,_y)
+ OC_COLUMN_IDCT_10(y)
#undef OC_I
#undef OC_J
-#define OC_I(_k) OC_M2STR((_k*16)+8)"(%[y])"
-#define OC_J(_k) OC_I(_k)
- OC_COLUMN_IDCT_10
+#define OC_I(_k,_y) OC_MEM_OFFS((_k)*16+8,_y)
+#define OC_J(_k,_y) OC_I(_k,_y)
+ OC_COLUMN_IDCT_10(y)
#undef OC_I
#undef OC_J
- :
- :[y]"r"(_y),[c]"r"(OC_IDCT_CONSTS)
+ :[y]"=m"OC_ARRAY_OPERAND(ogg_int16_t,_y,64)
+ :[x]"m"OC_CONST_ARRAY_OPERAND(ogg_int16_t,_x,64),
+ [c]"m"OC_CONST_ARRAY_OPERAND(ogg_int16_t,OC_IDCT_CONSTS,128)
+ );
+ __asm__ __volatile__(
+ "pxor %%mm0,%%mm0\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x00,x)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x10,x)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x20,x)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x30,x)"\n\t"
+ :[x]"+m"OC_ARRAY_OPERAND(ogg_int16_t,_x,28)
);
}
/*Performs an inverse 8x8 Type-II DCT transform.
The input is assumed to be scaled by a factor of 4 relative to orthonormal
version of the transform.*/
-void oc_idct8x8_mmx(ogg_int16_t _y[64],int _last_zzi){
+void oc_idct8x8_mmx(ogg_int16_t _y[64],ogg_int16_t _x[64],int _last_zzi){
/*_last_zzi is subtly different from an actual count of the number of
coefficients we decoded for this block.
It contains the value of zzi BEFORE the final token in the block was
@@ -557,8 +551,8 @@ void oc_idct8x8_mmx(ogg_int16_t _y[64],int _last_zzi){
gets.
Needless to say we inherited this approach from VP3.*/
/*Then perform the iDCT.*/
- if(_last_zzi<10)oc_idct8x8_10(_y);
- else oc_idct8x8_slow(_y);
+ if(_last_zzi<=10)oc_idct8x8_10_mmx(_y,_x);
+ else oc_idct8x8_slow_mmx(_y,_x);
}
#endif
diff --git a/thirdparty/libtheora/x86/mmxloop.h b/thirdparty/libtheora/x86/mmxloop.h
index 2e870c795d..1f6090b567 100644
--- a/thirdparty/libtheora/x86/mmxloop.h
+++ b/thirdparty/libtheora/x86/mmxloop.h
@@ -9,88 +9,191 @@
On exit, mm1={b0+lflim(R_0,L),...,b7+lflim(R_7,L)} and
mm2={c0-lflim(R_0,L),...,c7-lflim(R_7,L)}; mm0 and mm3 are clobbered.*/
#define OC_LOOP_FILTER8_MMX \
- "#OC_LOOP_FILTER8_MMX\n\t" \
- /*mm7=0*/ \
- "pxor %%mm7,%%mm7\n\t" \
- /*mm6:mm0={a0,...,a7}*/ \
- "movq %%mm0,%%mm6\n\t" \
- "punpcklbw %%mm7,%%mm0\n\t" \
- "punpckhbw %%mm7,%%mm6\n\t" \
- /*mm3:mm5={d0,...,d7}*/ \
- "movq %%mm3,%%mm5\n\t" \
- "punpcklbw %%mm7,%%mm3\n\t" \
- "punpckhbw %%mm7,%%mm5\n\t" \
- /*mm6:mm0={a0-d0,...,a7-d7}*/ \
- "psubw %%mm3,%%mm0\n\t" \
- "psubw %%mm5,%%mm6\n\t" \
- /*mm3:mm1={b0,...,b7}*/ \
- "movq %%mm1,%%mm3\n\t" \
- "punpcklbw %%mm7,%%mm1\n\t" \
- "movq %%mm2,%%mm4\n\t" \
- "punpckhbw %%mm7,%%mm3\n\t" \
- /*mm5:mm4={c0,...,c7}*/ \
- "movq %%mm2,%%mm5\n\t" \
- "punpcklbw %%mm7,%%mm4\n\t" \
- "punpckhbw %%mm7,%%mm5\n\t" \
- /*mm7={3}x4 \
- mm5:mm4={c0-b0,...,c7-b7}*/ \
- "pcmpeqw %%mm7,%%mm7\n\t" \
- "psubw %%mm1,%%mm4\n\t" \
- "psrlw $14,%%mm7\n\t" \
- "psubw %%mm3,%%mm5\n\t" \
- /*Scale by 3.*/ \
- "pmullw %%mm7,%%mm4\n\t" \
- "pmullw %%mm7,%%mm5\n\t" \
- /*mm7={4}x4 \
- mm5:mm4=f={a0-d0+3*(c0-b0),...,a7-d7+3*(c7-b7)}*/ \
- "psrlw $1,%%mm7\n\t" \
- "paddw %%mm0,%%mm4\n\t" \
- "psllw $2,%%mm7\n\t" \
- "movq (%[ll]),%%mm0\n\t" \
- "paddw %%mm6,%%mm5\n\t" \
- /*R_i has the range [-127,128], so we compute -R_i instead. \
- mm4=-R_i=-(f+4>>3)=0xFF^(f-4>>3)*/ \
- "psubw %%mm7,%%mm4\n\t" \
- "psubw %%mm7,%%mm5\n\t" \
- "psraw $3,%%mm4\n\t" \
- "psraw $3,%%mm5\n\t" \
- "pcmpeqb %%mm7,%%mm7\n\t" \
- "packsswb %%mm5,%%mm4\n\t" \
- "pxor %%mm6,%%mm6\n\t" \
- "pxor %%mm7,%%mm4\n\t" \
- "packuswb %%mm3,%%mm1\n\t" \
- /*Now compute lflim of -mm4 cf. Section 7.10 of the sepc.*/ \
- /*There's no unsigned byte+signed byte with unsigned saturation op code, so \
- we have to split things by sign (the other option is to work in 16 bits, \
- but working in 8 bits gives much better parallelism). \
- We compute abs(R_i), but save a mask of which terms were negative in mm6. \
- Then we compute mm4=abs(lflim(R_i,L))=min(abs(R_i),max(2*L-abs(R_i),0)). \
- Finally, we split mm4 into positive and negative pieces using the mask in \
- mm6, and add and subtract them as appropriate.*/ \
- /*mm4=abs(-R_i)*/ \
- /*mm7=255-2*L*/ \
- "pcmpgtb %%mm4,%%mm6\n\t" \
- "psubb %%mm0,%%mm7\n\t" \
- "pxor %%mm6,%%mm4\n\t" \
- "psubb %%mm0,%%mm7\n\t" \
- "psubb %%mm6,%%mm4\n\t" \
- /*mm7=255-max(2*L-abs(R_i),0)*/ \
- "paddusb %%mm4,%%mm7\n\t" \
- /*mm4=min(abs(R_i),max(2*L-abs(R_i),0))*/ \
- "paddusb %%mm7,%%mm4\n\t" \
- "psubusb %%mm7,%%mm4\n\t" \
- /*Now split mm4 by the original sign of -R_i.*/ \
- "movq %%mm4,%%mm5\n\t" \
- "pand %%mm6,%%mm4\n\t" \
- "pandn %%mm5,%%mm6\n\t" \
- /*mm1={b0+lflim(R_0,L),...,b7+lflim(R_7,L)}*/ \
- /*mm2={c0-lflim(R_0,L),...,c7-lflim(R_7,L)}*/ \
- "paddusb %%mm4,%%mm1\n\t" \
- "psubusb %%mm4,%%mm2\n\t" \
- "psubusb %%mm6,%%mm1\n\t" \
- "paddusb %%mm6,%%mm2\n\t" \
+ "#OC_LOOP_FILTER8_MMX\n\t" \
+ /*mm7=0*/ \
+ "pxor %%mm7,%%mm7\n\t" \
+ /*mm6:mm0={a0,...,a7}*/ \
+ "movq %%mm0,%%mm6\n\t" \
+ "punpcklbw %%mm7,%%mm0\n\t" \
+ "punpckhbw %%mm7,%%mm6\n\t" \
+ /*mm3:mm5={d0,...,d7}*/ \
+ "movq %%mm3,%%mm5\n\t" \
+ "punpcklbw %%mm7,%%mm3\n\t" \
+ "punpckhbw %%mm7,%%mm5\n\t" \
+ /*mm6:mm0={a0-d0,...,a7-d7}*/ \
+ "psubw %%mm3,%%mm0\n\t" \
+ "psubw %%mm5,%%mm6\n\t" \
+ /*mm3:mm1={b0,...,b7}*/ \
+ "movq %%mm1,%%mm3\n\t" \
+ "punpcklbw %%mm7,%%mm1\n\t" \
+ "movq %%mm2,%%mm4\n\t" \
+ "punpckhbw %%mm7,%%mm3\n\t" \
+ /*mm5:mm4={c0,...,c7}*/ \
+ "movq %%mm2,%%mm5\n\t" \
+ "punpcklbw %%mm7,%%mm4\n\t" \
+ "punpckhbw %%mm7,%%mm5\n\t" \
+ /*mm7={3}x4 \
+ mm5:mm4={c0-b0,...,c7-b7}*/ \
+ "pcmpeqw %%mm7,%%mm7\n\t" \
+ "psubw %%mm1,%%mm4\n\t" \
+ "psrlw $14,%%mm7\n\t" \
+ "psubw %%mm3,%%mm5\n\t" \
+ /*Scale by 3.*/ \
+ "pmullw %%mm7,%%mm4\n\t" \
+ "pmullw %%mm7,%%mm5\n\t" \
+ /*mm7={4}x4 \
+ mm5:mm4=f={a0-d0+3*(c0-b0),...,a7-d7+3*(c7-b7)}*/ \
+ "psrlw $1,%%mm7\n\t" \
+ "paddw %%mm0,%%mm4\n\t" \
+ "psllw $2,%%mm7\n\t" \
+ "movq (%[ll]),%%mm0\n\t" \
+ "paddw %%mm6,%%mm5\n\t" \
+ /*R_i has the range [-127,128], so we compute -R_i instead. \
+ mm4=-R_i=-(f+4>>3)=0xFF^(f-4>>3)*/ \
+ "psubw %%mm7,%%mm4\n\t" \
+ "psubw %%mm7,%%mm5\n\t" \
+ "psraw $3,%%mm4\n\t" \
+ "psraw $3,%%mm5\n\t" \
+ "pcmpeqb %%mm7,%%mm7\n\t" \
+ "packsswb %%mm5,%%mm4\n\t" \
+ "pxor %%mm6,%%mm6\n\t" \
+ "pxor %%mm7,%%mm4\n\t" \
+ "packuswb %%mm3,%%mm1\n\t" \
+ /*Now compute lflim of -mm4 cf. Section 7.10 of the sepc.*/ \
+ /*There's no unsigned byte+signed byte with unsigned saturation op code, so \
+ we have to split things by sign (the other option is to work in 16 bits, \
+ but working in 8 bits gives much better parallelism). \
+ We compute abs(R_i), but save a mask of which terms were negative in mm6. \
+ Then we compute mm4=abs(lflim(R_i,L))=min(abs(R_i),max(2*L-abs(R_i),0)). \
+ Finally, we split mm4 into positive and negative pieces using the mask in \
+ mm6, and add and subtract them as appropriate.*/ \
+ /*mm4=abs(-R_i)*/ \
+ /*mm7=255-2*L*/ \
+ "pcmpgtb %%mm4,%%mm6\n\t" \
+ "psubb %%mm0,%%mm7\n\t" \
+ "pxor %%mm6,%%mm4\n\t" \
+ "psubb %%mm0,%%mm7\n\t" \
+ "psubb %%mm6,%%mm4\n\t" \
+ /*mm7=255-max(2*L-abs(R_i),0)*/ \
+ "paddusb %%mm4,%%mm7\n\t" \
+ /*mm4=min(abs(R_i),max(2*L-abs(R_i),0))*/ \
+ "paddusb %%mm7,%%mm4\n\t" \
+ "psubusb %%mm7,%%mm4\n\t" \
+ /*Now split mm4 by the original sign of -R_i.*/ \
+ "movq %%mm4,%%mm5\n\t" \
+ "pand %%mm6,%%mm4\n\t" \
+ "pandn %%mm5,%%mm6\n\t" \
+ /*mm1={b0+lflim(R_0,L),...,b7+lflim(R_7,L)}*/ \
+ /*mm2={c0-lflim(R_0,L),...,c7-lflim(R_7,L)}*/ \
+ "paddusb %%mm4,%%mm1\n\t" \
+ "psubusb %%mm4,%%mm2\n\t" \
+ "psubusb %%mm6,%%mm1\n\t" \
+ "paddusb %%mm6,%%mm2\n\t" \
-#define OC_LOOP_FILTER_V_MMX(_pix,_ystride,_ll) \
+/*On entry, mm0={a0,...,a7}, mm1={b0,...,b7}, mm2={c0,...,c7}, mm3={d0,...d7}.
+ On exit, mm1={b0+lflim(R_0,L),...,b7+lflim(R_7,L)} and
+ mm2={c0-lflim(R_0,L),...,c7-lflim(R_7,L)}.
+ All other MMX registers are clobbered.*/
+#define OC_LOOP_FILTER8_MMXEXT \
+ "#OC_LOOP_FILTER8_MMXEXT\n\t" \
+ /*R_i=(a_i-3*b_i+3*c_i-d_i+4>>3) has the range [-127,128], so we compute \
+ -R_i=(-a_i+3*b_i-3*c_i+d_i+3>>3) instead.*/ \
+ /*This first part is based on the transformation \
+ f = -(3*(c-b)+a-d+4>>3) \
+ = -(3*(c+255-b)+(a+255-d)+4-1020>>3) \
+ = -(3*(c+~b)+(a+~d)-1016>>3) \
+ = 127-(3*(c+~b)+(a+~d)>>3) \
+ = 128+~(3*(c+~b)+(a+~d)>>3) (mod 256). \
+ Although pavgb(a,b) = (a+b+1>>1) (biased up), we rely heavily on the \
+ fact that ~pavgb(~a,~b) = (a+b>>1) (biased down). \
+ Using this, the last expression above can be computed in 8 bits of working \
+ precision via: \
+ u = ~pavgb(~b,c); \
+ v = pavgb(b,~c); \
+ This mask is 0 or 0xFF, and controls whether t is biased up or down: \
+ m = u-v; \
+ t = m^pavgb(m^~a,m^d); \
+ f = 128+pavgb(pavgb(t,u),v); \
+ This required some careful analysis to ensure that carries are propagated \
+ correctly in all cases, but has been checked exhaustively.*/ \
+ /*input (a, b, c, d, ., ., ., .)*/ \
+ /*ff=0xFF; \
+ u=b; \
+ v=c; \
+ ll=255-2*L;*/ \
+ "pcmpeqb %%mm7,%%mm7\n\t" \
+ "movq %%mm1,%%mm4\n\t" \
+ "movq %%mm2,%%mm5\n\t" \
+ "movq (%[ll]),%%mm6\n\t" \
+ /*allocated u, v, ll, ff: (a, b, c, d, u, v, ll, ff)*/ \
+ /*u^=ff; \
+ v^=ff;*/ \
+ "pxor %%mm7,%%mm4\n\t" \
+ "pxor %%mm7,%%mm5\n\t" \
+ /*allocated ll: (a, b, c, d, u, v, ll, ff)*/ \
+ /*u=pavgb(u,c); \
+ v=pavgb(v,b);*/ \
+ "pavgb %%mm2,%%mm4\n\t" \
+ "pavgb %%mm1,%%mm5\n\t" \
+ /*u^=ff; \
+ a^=ff;*/ \
+ "pxor %%mm7,%%mm4\n\t" \
+ "pxor %%mm7,%%mm0\n\t" \
+ /*m=u-v;*/ \
+ "psubb %%mm5,%%mm4\n\t" \
+ /*freed u, allocated m: (a, b, c, d, m, v, ll, ff)*/ \
+ /*a^=m; \
+ d^=m;*/ \
+ "pxor %%mm4,%%mm0\n\t" \
+ "pxor %%mm4,%%mm3\n\t" \
+ /*t=pavgb(a,d);*/ \
+ "pavgb %%mm3,%%mm0\n\t" \
+ "psllw $7,%%mm7\n\t" \
+ /*freed a, d, ff, allocated t, of: (t, b, c, ., m, v, ll, of)*/ \
+ /*t^=m; \
+ u=m+v;*/ \
+ "pxor %%mm4,%%mm0\n\t" \
+ "paddb %%mm5,%%mm4\n\t" \
+ /*freed t, m, allocated f, u: (f, b, c, ., u, v, ll, of)*/ \
+ /*f=pavgb(f,u); \
+ of=128;*/ \
+ "pavgb %%mm4,%%mm0\n\t" \
+ "packsswb %%mm7,%%mm7\n\t" \
+ /*freed u, ff, allocated ll: (f, b, c, ., ll, v, ll, of)*/ \
+ /*f=pavgb(f,v);*/ \
+ "pavgb %%mm5,%%mm0\n\t" \
+ "movq %%mm7,%%mm3\n\t" \
+ "movq %%mm6,%%mm4\n\t" \
+ /*freed v, allocated of: (f, b, c, of, ll, ., ll, of)*/ \
+ /*Now compute lflim of R_i=-(128+mm0) cf. Section 7.10 of the sepc.*/ \
+ /*There's no unsigned byte+signed byte with unsigned saturation op code, so \
+ we have to split things by sign (the other option is to work in 16 bits, \
+ but staying in 8 bits gives much better parallelism).*/ \
+ /*Instead of adding the offset of 128 in mm3, we use it to split mm0. \
+ This is the same number of instructions as computing a mask and splitting \
+ after the lflim computation, but has shorter dependency chains.*/ \
+ /*mm0=R_i<0?-R_i:0 (denoted abs(R_i<0))\
+ mm3=R_i>0?R_i:0* (denoted abs(R_i>0))*/ \
+ "psubusb %%mm0,%%mm3\n\t" \
+ "psubusb %%mm7,%%mm0\n\t" \
+ /*mm6=255-max(2*L-abs(R_i<0),0) \
+ mm4=255-max(2*L-abs(R_i>0),0)*/ \
+ "paddusb %%mm3,%%mm4\n\t" \
+ "paddusb %%mm0,%%mm6\n\t" \
+ /*mm0=min(abs(R_i<0),max(2*L-abs(R_i<0),0)) \
+ mm3=min(abs(R_i>0),max(2*L-abs(R_i>0),0))*/ \
+ "paddusb %%mm4,%%mm3\n\t" \
+ "paddusb %%mm6,%%mm0\n\t" \
+ "psubusb %%mm4,%%mm3\n\t" \
+ "psubusb %%mm6,%%mm0\n\t" \
+ /*mm1={b0+lflim(R_0,L),...,b7+lflim(R_7,L)}*/ \
+ /*mm2={c0-lflim(R_0,L),...,c7-lflim(R_7,L)}*/ \
+ "paddusb %%mm3,%%mm1\n\t" \
+ "psubusb %%mm3,%%mm2\n\t" \
+ "psubusb %%mm0,%%mm1\n\t" \
+ "paddusb %%mm0,%%mm2\n\t" \
+
+#define OC_LOOP_FILTER_V(_filter,_pix,_ystride,_ll) \
do{ \
ptrdiff_t ystride3__; \
__asm__ __volatile__( \
@@ -104,7 +207,7 @@
"movq (%[pix],%[ystride]),%%mm1\n\t" \
/*mm2={c0,...,c7}*/ \
"movq (%[pix],%[ystride],2),%%mm2\n\t" \
- OC_LOOP_FILTER8_MMX \
+ _filter \
/*Write it back out.*/ \
"movq %%mm1,(%[pix],%[ystride])\n\t" \
"movq %%mm2,(%[pix],%[ystride],2)\n\t" \
@@ -116,7 +219,7 @@
} \
while(0)
-#define OC_LOOP_FILTER_H_MMX(_pix,_ystride,_ll) \
+#define OC_LOOP_FILTER_H(_filter,_pix,_ystride,_ll) \
do{ \
unsigned char *pix__; \
ptrdiff_t ystride3__; \
@@ -174,7 +277,7 @@
"punpckldq %%mm5,%%mm2\n\t" \
/*mm3=d7 d6 d5 d4 d3 d2 d1 d0*/ \
"punpckhdq %%mm5,%%mm3\n\t" \
- OC_LOOP_FILTER8_MMX \
+ _filter \
/*mm2={b0+R_0'',...,b7+R_7''}*/ \
"movq %%mm1,%%mm0\n\t" \
/*mm1={b0+R_0'',c0-R_0'',...,b3+R_3'',c3-R_3''}*/ \
diff --git a/thirdparty/libtheora/x86/mmxstate.c b/thirdparty/libtheora/x86/mmxstate.c
index 808b0a789b..eebea14fba 100644
--- a/thirdparty/libtheora/x86/mmxstate.c
+++ b/thirdparty/libtheora/x86/mmxstate.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: mmxstate.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
@@ -19,23 +19,23 @@
Originally written by Rudolf Marek.*/
#include <string.h>
#include "x86int.h"
-#include "mmxfrag.h"
#include "mmxloop.h"
#if defined(OC_X86_ASM)
void oc_state_frag_recon_mmx(const oc_theora_state *_state,ptrdiff_t _fragi,
- int _pli,ogg_int16_t _dct_coeffs[64],int _last_zzi,ogg_uint16_t _dc_quant){
+ int _pli,ogg_int16_t _dct_coeffs[128],int _last_zzi,ogg_uint16_t _dc_quant){
unsigned char *dst;
ptrdiff_t frag_buf_off;
int ystride;
- int mb_mode;
+ int refi;
/*Apply the inverse transform.*/
/*Special case only having a DC component.*/
if(_last_zzi<2){
/*Note that this value must be unsigned, to keep the __asm__ block from
sign-extending it when it puts it in a register.*/
ogg_uint16_t p;
+ int i;
/*We round this dequant product (and not any of the others) because there's
no iDCT rounding.*/
p=(ogg_int16_t)(_dct_coeffs[0]*(ogg_int32_t)_dc_quant+15>>5);
@@ -47,81 +47,48 @@ void oc_state_frag_recon_mmx(const oc_theora_state *_state,ptrdiff_t _fragi,
"punpcklwd %%mm0,%%mm0\n\t"
/*mm0=AAAA AAAA AAAA AAAA*/
"punpckldq %%mm0,%%mm0\n\t"
- "movq %%mm0,(%[y])\n\t"
- "movq %%mm0,8(%[y])\n\t"
- "movq %%mm0,16(%[y])\n\t"
- "movq %%mm0,24(%[y])\n\t"
- "movq %%mm0,32(%[y])\n\t"
- "movq %%mm0,40(%[y])\n\t"
- "movq %%mm0,48(%[y])\n\t"
- "movq %%mm0,56(%[y])\n\t"
- "movq %%mm0,64(%[y])\n\t"
- "movq %%mm0,72(%[y])\n\t"
- "movq %%mm0,80(%[y])\n\t"
- "movq %%mm0,88(%[y])\n\t"
- "movq %%mm0,96(%[y])\n\t"
- "movq %%mm0,104(%[y])\n\t"
- "movq %%mm0,112(%[y])\n\t"
- "movq %%mm0,120(%[y])\n\t"
:
- :[y]"r"(_dct_coeffs),[p]"r"((unsigned)p)
- :"memory"
+ :[p]"r"((unsigned)p)
);
+ for(i=0;i<4;i++){
+ __asm__ __volatile__(
+ "movq %%mm0,"OC_MEM_OFFS(0x00,y)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x08,y)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x10,y)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x18,y)"\n\t"
+ :[y]"=m"OC_ARRAY_OPERAND(ogg_int16_t,_dct_coeffs+64+16*i,16)
+ );
+ }
}
else{
/*Dequantize the DC coefficient.*/
_dct_coeffs[0]=(ogg_int16_t)(_dct_coeffs[0]*(int)_dc_quant);
- oc_idct8x8_mmx(_dct_coeffs,_last_zzi);
+ oc_idct8x8(_state,_dct_coeffs+64,_dct_coeffs,_last_zzi);
}
/*Fill in the target buffer.*/
frag_buf_off=_state->frag_buf_offs[_fragi];
- mb_mode=_state->frags[_fragi].mb_mode;
+ refi=_state->frags[_fragi].refi;
ystride=_state->ref_ystride[_pli];
- dst=_state->ref_frame_data[_state->ref_frame_idx[OC_FRAME_SELF]]+frag_buf_off;
- if(mb_mode==OC_MODE_INTRA)oc_frag_recon_intra_mmx(dst,ystride,_dct_coeffs);
+ dst=_state->ref_frame_data[OC_FRAME_SELF]+frag_buf_off;
+ if(refi==OC_FRAME_SELF)oc_frag_recon_intra_mmx(dst,ystride,_dct_coeffs+64);
else{
const unsigned char *ref;
int mvoffsets[2];
- ref=
- _state->ref_frame_data[_state->ref_frame_idx[OC_FRAME_FOR_MODE(mb_mode)]]
- +frag_buf_off;
+ ref=_state->ref_frame_data[refi]+frag_buf_off;
if(oc_state_get_mv_offsets(_state,mvoffsets,_pli,
- _state->frag_mvs[_fragi][0],_state->frag_mvs[_fragi][1])>1){
+ _state->frag_mvs[_fragi])>1){
oc_frag_recon_inter2_mmx(dst,ref+mvoffsets[0],ref+mvoffsets[1],ystride,
- _dct_coeffs);
+ _dct_coeffs+64);
}
- else oc_frag_recon_inter_mmx(dst,ref+mvoffsets[0],ystride,_dct_coeffs);
+ else oc_frag_recon_inter_mmx(dst,ref+mvoffsets[0],ystride,_dct_coeffs+64);
}
}
/*We copy these entire function to inline the actual MMX routines so that we
use only a single indirect call.*/
-/*Copies the fragments specified by the lists of fragment indices from one
- frame to another.
- _fragis: A pointer to a list of fragment indices.
- _nfragis: The number of fragment indices to copy.
- _dst_frame: The reference frame to copy to.
- _src_frame: The reference frame to copy from.
- _pli: The color plane the fragments lie in.*/
-void oc_state_frag_copy_list_mmx(const oc_theora_state *_state,
- const ptrdiff_t *_fragis,ptrdiff_t _nfragis,
- int _dst_frame,int _src_frame,int _pli){
- const ptrdiff_t *frag_buf_offs;
- const unsigned char *src_frame_data;
- unsigned char *dst_frame_data;
- ptrdiff_t fragii;
- int ystride;
- dst_frame_data=_state->ref_frame_data[_state->ref_frame_idx[_dst_frame]];
- src_frame_data=_state->ref_frame_data[_state->ref_frame_idx[_src_frame]];
- ystride=_state->ref_ystride[_pli];
- frag_buf_offs=_state->frag_buf_offs;
- for(fragii=0;fragii<_nfragis;fragii++){
- ptrdiff_t frag_buf_off;
- frag_buf_off=frag_buf_offs[_fragis[fragii]];
- OC_FRAG_COPY_MMX(dst_frame_data+frag_buf_off,
- src_frame_data+frag_buf_off,ystride);
- }
+void oc_loop_filter_init_mmx(signed char _bv[256],int _flimit){
+ memset(_bv,_flimit,8);
}
/*Apply the loop filter to a given set of fragment rows in the given plane.
@@ -133,7 +100,7 @@ void oc_state_frag_copy_list_mmx(const oc_theora_state *_state,
_fragy0: The Y coordinate of the first fragment row to filter.
_fragy_end: The Y coordinate of the fragment row to stop filtering at.*/
void oc_state_loop_filter_frag_rows_mmx(const oc_theora_state *_state,
- int _bv[256],int _refi,int _pli,int _fragy0,int _fragy_end){
+ signed char _bv[256],int _refi,int _pli,int _fragy0,int _fragy_end){
OC_ALIGN8(unsigned char ll[8]);
const oc_fragment_plane *fplane;
const oc_fragment *frags;
@@ -170,13 +137,84 @@ void oc_state_loop_filter_frag_rows_mmx(const oc_theora_state *_state,
if(frags[fragi].coded){
unsigned char *ref;
ref=ref_frame_data+frag_buf_offs[fragi];
- if(fragi>fragi0)OC_LOOP_FILTER_H_MMX(ref,ystride,ll);
- if(fragi0>fragi_top)OC_LOOP_FILTER_V_MMX(ref,ystride,ll);
+ if(fragi>fragi0){
+ OC_LOOP_FILTER_H(OC_LOOP_FILTER8_MMX,ref,ystride,ll);
+ }
+ if(fragi0>fragi_top){
+ OC_LOOP_FILTER_V(OC_LOOP_FILTER8_MMX,ref,ystride,ll);
+ }
+ if(fragi+1<fragi_end&&!frags[fragi+1].coded){
+ OC_LOOP_FILTER_H(OC_LOOP_FILTER8_MMX,ref+8,ystride,ll);
+ }
+ if(fragi+nhfrags<fragi_bot&&!frags[fragi+nhfrags].coded){
+ OC_LOOP_FILTER_V(OC_LOOP_FILTER8_MMX,ref+(ystride<<3),ystride,ll);
+ }
+ }
+ fragi++;
+ }
+ fragi0+=nhfrags;
+ }
+}
+
+void oc_loop_filter_init_mmxext(signed char _bv[256],int _flimit){
+ memset(_bv,~(_flimit<<1),8);
+}
+
+/*Apply the loop filter to a given set of fragment rows in the given plane.
+ The filter may be run on the bottom edge, affecting pixels in the next row of
+ fragments, so this row also needs to be available.
+ _bv: The bounding values array.
+ _refi: The index of the frame buffer to filter.
+ _pli: The color plane to filter.
+ _fragy0: The Y coordinate of the first fragment row to filter.
+ _fragy_end: The Y coordinate of the fragment row to stop filtering at.*/
+void oc_state_loop_filter_frag_rows_mmxext(const oc_theora_state *_state,
+ signed char _bv[256],int _refi,int _pli,int _fragy0,int _fragy_end){
+ const oc_fragment_plane *fplane;
+ const oc_fragment *frags;
+ const ptrdiff_t *frag_buf_offs;
+ unsigned char *ref_frame_data;
+ ptrdiff_t fragi_top;
+ ptrdiff_t fragi_bot;
+ ptrdiff_t fragi0;
+ ptrdiff_t fragi0_end;
+ int ystride;
+ int nhfrags;
+ fplane=_state->fplanes+_pli;
+ nhfrags=fplane->nhfrags;
+ fragi_top=fplane->froffset;
+ fragi_bot=fragi_top+fplane->nfrags;
+ fragi0=fragi_top+_fragy0*(ptrdiff_t)nhfrags;
+ fragi0_end=fragi_top+_fragy_end*(ptrdiff_t)nhfrags;
+ ystride=_state->ref_ystride[_pli];
+ frags=_state->frags;
+ frag_buf_offs=_state->frag_buf_offs;
+ ref_frame_data=_state->ref_frame_data[_refi];
+ /*The following loops are constructed somewhat non-intuitively on purpose.
+ The main idea is: if a block boundary has at least one coded fragment on
+ it, the filter is applied to it.
+ However, the order that the filters are applied in matters, and VP3 chose
+ the somewhat strange ordering used below.*/
+ while(fragi0<fragi0_end){
+ ptrdiff_t fragi;
+ ptrdiff_t fragi_end;
+ fragi=fragi0;
+ fragi_end=fragi+nhfrags;
+ while(fragi<fragi_end){
+ if(frags[fragi].coded){
+ unsigned char *ref;
+ ref=ref_frame_data+frag_buf_offs[fragi];
+ if(fragi>fragi0){
+ OC_LOOP_FILTER_H(OC_LOOP_FILTER8_MMXEXT,ref,ystride,_bv);
+ }
+ if(fragi0>fragi_top){
+ OC_LOOP_FILTER_V(OC_LOOP_FILTER8_MMXEXT,ref,ystride,_bv);
+ }
if(fragi+1<fragi_end&&!frags[fragi+1].coded){
- OC_LOOP_FILTER_H_MMX(ref+8,ystride,ll);
+ OC_LOOP_FILTER_H(OC_LOOP_FILTER8_MMXEXT,ref+8,ystride,_bv);
}
if(fragi+nhfrags<fragi_bot&&!frags[fragi+nhfrags].coded){
- OC_LOOP_FILTER_V_MMX(ref+(ystride<<3),ystride,ll);
+ OC_LOOP_FILTER_V(OC_LOOP_FILTER8_MMXEXT,ref+(ystride<<3),ystride,_bv);
}
}
fragi++;
diff --git a/thirdparty/libtheora/x86/sse2encfrag.c b/thirdparty/libtheora/x86/sse2encfrag.c
new file mode 100644
index 0000000000..43aeb17711
--- /dev/null
+++ b/thirdparty/libtheora/x86/sse2encfrag.c
@@ -0,0 +1,501 @@
+/********************************************************************
+ * *
+ * THIS FILE IS PART OF THE OggTheora SOFTWARE CODEC SOURCE CODE. *
+ * USE, DISTRIBUTION AND REPRODUCTION OF THIS LIBRARY SOURCE IS *
+ * GOVERNED BY A BSD-STYLE SOURCE LICENSE INCLUDED WITH THIS SOURCE *
+ * IN 'COPYING'. PLEASE READ THESE TERMS BEFORE DISTRIBUTING. *
+ * *
+ * THE Theora SOURCE CODE IS COPYRIGHT (C) 2002-2009 *
+ * by the Xiph.Org Foundation http://www.xiph.org/ *
+ * *
+ ********************************************************************
+
+ function:
+ last mod: $Id: dsp_mmx.c 14579 2008-03-12 06:42:40Z xiphmont $
+
+ ********************************************************************/
+#include <stddef.h>
+#include "x86enc.h"
+#include "sse2trans.h"
+
+#if defined(OC_X86_ASM)
+
+/*Load a 4x8 array of pixels values from %[src] and %[ref] and compute their
+ 16-bit differences.
+ On output, these are stored in _m0, xmm1, xmm2, and xmm3.
+ xmm4 and xmm5 are clobbered.*/
+#define OC_LOAD_SUB_4x8(_m0) \
+ "#OC_LOAD_SUB_4x8\n\t" \
+ /*Load the first three rows.*/ \
+ "movq (%[src]),"_m0"\n\t" \
+ "movq (%[ref]),%%xmm4\n\t" \
+ "movq (%[src],%[ystride]),%%xmm1\n\t" \
+ "movq (%[ref],%[ystride]),%%xmm3\n\t" \
+ "movq (%[src],%[ystride],2),%%xmm2\n\t" \
+ "movq (%[ref],%[ystride],2),%%xmm5\n\t" \
+ /*Unpack and subtract.*/ \
+ "punpcklbw %%xmm4,"_m0"\n\t" \
+ "punpcklbw %%xmm4,%%xmm4\n\t" \
+ "punpcklbw %%xmm3,%%xmm1\n\t" \
+ "punpcklbw %%xmm3,%%xmm3\n\t" \
+ "psubw %%xmm4,"_m0"\n\t" \
+ "psubw %%xmm3,%%xmm1\n\t" \
+ /*Load the last row.*/ \
+ "movq (%[src],%[ystride3]),%%xmm3\n\t" \
+ "movq (%[ref],%[ystride3]),%%xmm4\n\t" \
+ /*Unpack, subtract, and advance the pointers.*/ \
+ "punpcklbw %%xmm5,%%xmm2\n\t" \
+ "punpcklbw %%xmm5,%%xmm5\n\t" \
+ "lea (%[src],%[ystride],4),%[src]\n\t" \
+ "psubw %%xmm5,%%xmm2\n\t" \
+ "punpcklbw %%xmm4,%%xmm3\n\t" \
+ "punpcklbw %%xmm4,%%xmm4\n\t" \
+ "lea (%[ref],%[ystride],4),%[ref]\n\t" \
+ "psubw %%xmm4,%%xmm3\n\t" \
+
+/*Square and accumulate four rows of differences in _m0, xmm1, xmm2, and xmm3.
+ On output, xmm0 contains the sum of two of the rows, and the other two are
+ added to xmm7.*/
+#define OC_SSD_4x8(_m0) \
+ "pmaddwd "_m0","_m0"\n\t" \
+ "pmaddwd %%xmm1,%%xmm1\n\t" \
+ "pmaddwd %%xmm2,%%xmm2\n\t" \
+ "pmaddwd %%xmm3,%%xmm3\n\t" \
+ "paddd %%xmm1,"_m0"\n\t" \
+ "paddd %%xmm3,%%xmm2\n\t" \
+ "paddd %%xmm2,%%xmm7\n\t" \
+
+unsigned oc_enc_frag_ssd_sse2(const unsigned char *_src,
+ const unsigned char *_ref,int _ystride){
+ unsigned ret;
+ __asm__ __volatile__(
+ OC_LOAD_SUB_4x8("%%xmm7")
+ OC_SSD_4x8("%%xmm7")
+ OC_LOAD_SUB_4x8("%%xmm0")
+ OC_SSD_4x8("%%xmm0")
+ "paddd %%xmm0,%%xmm7\n\t"
+ "movdqa %%xmm7,%%xmm6\n\t"
+ "punpckhqdq %%xmm7,%%xmm7\n\t"
+ "paddd %%xmm6,%%xmm7\n\t"
+ "pshufd $1,%%xmm7,%%xmm6\n\t"
+ "paddd %%xmm6,%%xmm7\n\t"
+ "movd %%xmm7,%[ret]\n\t"
+ :[ret]"=a"(ret)
+ :[src]"r"(_src),[ref]"r"(_ref),[ystride]"r"((ptrdiff_t)_ystride),
+ [ystride3]"r"((ptrdiff_t)_ystride*3)
+ );
+ return ret;
+}
+
+static const unsigned char __attribute__((aligned(16))) OC_MASK_CONSTS[8]={
+ 0x01,0x02,0x04,0x08,0x10,0x20,0x40,0x80
+};
+
+/*Load a 2x8 array of pixels values from %[src] and %[ref] and compute their
+ horizontal sums as well as their 16-bit differences subject to a mask.
+ %%xmm5 must contain OC_MASK_CONSTS[0...7] and %%xmm6 must contain 0.*/
+#define OC_LOAD_SUB_MASK_2x8 \
+ "#OC_LOAD_SUB_MASK_2x8\n\t" \
+ /*Start the loads and expand the next 8 bits of the mask.*/ \
+ "shl $8,%[m]\n\t" \
+ "movq (%[src]),%%xmm0\n\t" \
+ "mov %h[m],%b[m]\n\t" \
+ "movq (%[ref]),%%xmm2\n\t" \
+ "movd %[m],%%xmm4\n\t" \
+ "shr $8,%[m]\n\t" \
+ "pshuflw $0x00,%%xmm4,%%xmm4\n\t" \
+ "mov %h[m],%b[m]\n\t" \
+ "pand %%xmm6,%%xmm4\n\t" \
+ "pcmpeqb %%xmm6,%%xmm4\n\t" \
+ /*Perform the masking.*/ \
+ "pand %%xmm4,%%xmm0\n\t" \
+ "pand %%xmm4,%%xmm2\n\t" \
+ /*Finish the loads while unpacking the first set of rows, and expand the next
+ 8 bits of the mask.*/ \
+ "movd %[m],%%xmm4\n\t" \
+ "movq (%[src],%[ystride]),%%xmm1\n\t" \
+ "pshuflw $0x00,%%xmm4,%%xmm4\n\t" \
+ "movq (%[ref],%[ystride]),%%xmm3\n\t" \
+ "pand %%xmm6,%%xmm4\n\t" \
+ "punpcklbw %%xmm2,%%xmm0\n\t" \
+ "pcmpeqb %%xmm6,%%xmm4\n\t" \
+ "punpcklbw %%xmm2,%%xmm2\n\t" \
+ /*Mask and unpack the second set of rows.*/ \
+ "pand %%xmm4,%%xmm1\n\t" \
+ "pand %%xmm4,%%xmm3\n\t" \
+ "punpcklbw %%xmm3,%%xmm1\n\t" \
+ "punpcklbw %%xmm3,%%xmm3\n\t" \
+ "psubw %%xmm2,%%xmm0\n\t" \
+ "psubw %%xmm3,%%xmm1\n\t" \
+
+unsigned oc_enc_frag_border_ssd_sse2(const unsigned char *_src,
+ const unsigned char *_ref,int _ystride,ogg_int64_t _mask){
+ ptrdiff_t ystride;
+ unsigned ret;
+ int i;
+ ystride=_ystride;
+ __asm__ __volatile__(
+ "pxor %%xmm7,%%xmm7\n\t"
+ "movq %[c],%%xmm6\n\t"
+ :
+ :[c]"m"(OC_CONST_ARRAY_OPERAND(unsigned char,OC_MASK_CONSTS,8))
+ );
+ for(i=0;i<4;i++){
+ unsigned m;
+ m=_mask&0xFFFF;
+ _mask>>=16;
+ if(m){
+ __asm__ __volatile__(
+ OC_LOAD_SUB_MASK_2x8
+ "pmaddwd %%xmm0,%%xmm0\n\t"
+ "pmaddwd %%xmm1,%%xmm1\n\t"
+ "paddd %%xmm0,%%xmm7\n\t"
+ "paddd %%xmm1,%%xmm7\n\t"
+ :[src]"+r"(_src),[ref]"+r"(_ref),[ystride]"+r"(ystride),[m]"+Q"(m)
+ );
+ }
+ _src+=2*ystride;
+ _ref+=2*ystride;
+ }
+ __asm__ __volatile__(
+ "movdqa %%xmm7,%%xmm6\n\t"
+ "punpckhqdq %%xmm7,%%xmm7\n\t"
+ "paddd %%xmm6,%%xmm7\n\t"
+ "pshufd $1,%%xmm7,%%xmm6\n\t"
+ "paddd %%xmm6,%%xmm7\n\t"
+ "movd %%xmm7,%[ret]\n\t"
+ :[ret]"=a"(ret)
+ );
+ return ret;
+}
+
+
+/*Load an 8x8 array of pixel values from %[src] and %[ref] and compute their
+ 16-bit difference in %%xmm0...%%xmm7.*/
+#define OC_LOAD_SUB_8x8 \
+ "#OC_LOAD_SUB_8x8\n\t" \
+ "movq (%[src]),%%xmm0\n\t" \
+ "movq (%[ref]),%%xmm4\n\t" \
+ "movq (%[src],%[src_ystride]),%%xmm1\n\t" \
+ "lea (%[src],%[src_ystride],2),%[src]\n\t" \
+ "movq (%[ref],%[ref_ystride]),%%xmm5\n\t" \
+ "lea (%[ref],%[ref_ystride],2),%[ref]\n\t" \
+ "movq (%[src]),%%xmm2\n\t" \
+ "movq (%[ref]),%%xmm7\n\t" \
+ "movq (%[src],%[src_ystride]),%%xmm3\n\t" \
+ "movq (%[ref],%[ref_ystride]),%%xmm6\n\t" \
+ "punpcklbw %%xmm4,%%xmm0\n\t" \
+ "lea (%[src],%[src_ystride],2),%[src]\n\t" \
+ "punpcklbw %%xmm4,%%xmm4\n\t" \
+ "lea (%[ref],%[ref_ystride],2),%[ref]\n\t" \
+ "psubw %%xmm4,%%xmm0\n\t" \
+ "movq (%[src]),%%xmm4\n\t" \
+ "movdqa %%xmm0,"OC_MEM_OFFS(0x00,buf)"\n\t" \
+ "movq (%[ref]),%%xmm0\n\t" \
+ "punpcklbw %%xmm5,%%xmm1\n\t" \
+ "punpcklbw %%xmm5,%%xmm5\n\t" \
+ "psubw %%xmm5,%%xmm1\n\t" \
+ "movq (%[src],%[src_ystride]),%%xmm5\n\t" \
+ "punpcklbw %%xmm7,%%xmm2\n\t" \
+ "punpcklbw %%xmm7,%%xmm7\n\t" \
+ "psubw %%xmm7,%%xmm2\n\t" \
+ "movq (%[ref],%[ref_ystride]),%%xmm7\n\t" \
+ "punpcklbw %%xmm6,%%xmm3\n\t" \
+ "lea (%[src],%[src_ystride],2),%[src]\n\t" \
+ "punpcklbw %%xmm6,%%xmm6\n\t" \
+ "psubw %%xmm6,%%xmm3\n\t" \
+ "movq (%[src]),%%xmm6\n\t" \
+ "punpcklbw %%xmm0,%%xmm4\n\t" \
+ "lea (%[ref],%[ref_ystride],2),%[ref]\n\t" \
+ "punpcklbw %%xmm0,%%xmm0\n\t" \
+ "lea (%[src],%[src_ystride],2),%[src]\n\t" \
+ "psubw %%xmm0,%%xmm4\n\t" \
+ "movq (%[ref]),%%xmm0\n\t" \
+ "punpcklbw %%xmm7,%%xmm5\n\t" \
+ "neg %[src_ystride]\n\t" \
+ "punpcklbw %%xmm7,%%xmm7\n\t" \
+ "psubw %%xmm7,%%xmm5\n\t" \
+ "movq (%[src],%[src_ystride]),%%xmm7\n\t" \
+ "punpcklbw %%xmm0,%%xmm6\n\t" \
+ "lea (%[ref],%[ref_ystride],2),%[ref]\n\t" \
+ "punpcklbw %%xmm0,%%xmm0\n\t" \
+ "neg %[ref_ystride]\n\t" \
+ "psubw %%xmm0,%%xmm6\n\t" \
+ "movq (%[ref],%[ref_ystride]),%%xmm0\n\t" \
+ "punpcklbw %%xmm0,%%xmm7\n\t" \
+ "punpcklbw %%xmm0,%%xmm0\n\t" \
+ "psubw %%xmm0,%%xmm7\n\t" \
+ "movdqa "OC_MEM_OFFS(0x00,buf)",%%xmm0\n\t" \
+
+/*Load an 8x8 array of pixel values from %[src] into %%xmm0...%%xmm7.*/
+#define OC_LOAD_8x8 \
+ "#OC_LOAD_8x8\n\t" \
+ "movq (%[src]),%%xmm0\n\t" \
+ "movq (%[src],%[ystride]),%%xmm1\n\t" \
+ "movq (%[src],%[ystride],2),%%xmm2\n\t" \
+ "pxor %%xmm7,%%xmm7\n\t" \
+ "movq (%[src],%[ystride3]),%%xmm3\n\t" \
+ "punpcklbw %%xmm7,%%xmm0\n\t" \
+ "movq (%[src4]),%%xmm4\n\t" \
+ "punpcklbw %%xmm7,%%xmm1\n\t" \
+ "movq (%[src4],%[ystride]),%%xmm5\n\t" \
+ "punpcklbw %%xmm7,%%xmm2\n\t" \
+ "movq (%[src4],%[ystride],2),%%xmm6\n\t" \
+ "punpcklbw %%xmm7,%%xmm3\n\t" \
+ "movq (%[src4],%[ystride3]),%%xmm7\n\t" \
+ "punpcklbw %%xmm4,%%xmm4\n\t" \
+ "punpcklbw %%xmm5,%%xmm5\n\t" \
+ "psrlw $8,%%xmm4\n\t" \
+ "psrlw $8,%%xmm5\n\t" \
+ "punpcklbw %%xmm6,%%xmm6\n\t" \
+ "punpcklbw %%xmm7,%%xmm7\n\t" \
+ "psrlw $8,%%xmm6\n\t" \
+ "psrlw $8,%%xmm7\n\t" \
+
+/*Performs the first two stages of an 8-point 1-D Hadamard transform in place.
+ Outputs 1, 3, 4, and 5 from the second stage are negated (which allows us to
+ perform this stage in place with no temporary registers).*/
+#define OC_HADAMARD_AB_8x8 \
+ "#OC_HADAMARD_AB_8x8\n\t" \
+ /*Stage A:*/ \
+ "paddw %%xmm5,%%xmm1\n\t" \
+ "paddw %%xmm6,%%xmm2\n\t" \
+ "paddw %%xmm5,%%xmm5\n\t" \
+ "paddw %%xmm6,%%xmm6\n\t" \
+ "psubw %%xmm1,%%xmm5\n\t" \
+ "psubw %%xmm2,%%xmm6\n\t" \
+ "paddw %%xmm7,%%xmm3\n\t" \
+ "paddw %%xmm4,%%xmm0\n\t" \
+ "paddw %%xmm7,%%xmm7\n\t" \
+ "paddw %%xmm4,%%xmm4\n\t" \
+ "psubw %%xmm3,%%xmm7\n\t" \
+ "psubw %%xmm0,%%xmm4\n\t" \
+ /*Stage B:*/ \
+ "paddw %%xmm2,%%xmm0\n\t" \
+ "paddw %%xmm3,%%xmm1\n\t" \
+ "paddw %%xmm6,%%xmm4\n\t" \
+ "paddw %%xmm7,%%xmm5\n\t" \
+ "paddw %%xmm2,%%xmm2\n\t" \
+ "paddw %%xmm3,%%xmm3\n\t" \
+ "paddw %%xmm6,%%xmm6\n\t" \
+ "paddw %%xmm7,%%xmm7\n\t" \
+ "psubw %%xmm0,%%xmm2\n\t" \
+ "psubw %%xmm1,%%xmm3\n\t" \
+ "psubw %%xmm4,%%xmm6\n\t" \
+ "psubw %%xmm5,%%xmm7\n\t" \
+
+/*Performs the last stage of an 8-point 1-D Hadamard transform in place.
+ Outputs 1, 3, 5, and 7 are negated (which allows us to perform this stage in
+ place with no temporary registers).*/
+#define OC_HADAMARD_C_8x8 \
+ "#OC_HADAMARD_C_8x8\n\t" \
+ /*Stage C:*/ \
+ "paddw %%xmm1,%%xmm0\n\t" \
+ "paddw %%xmm3,%%xmm2\n\t" \
+ "paddw %%xmm5,%%xmm4\n\t" \
+ "paddw %%xmm7,%%xmm6\n\t" \
+ "paddw %%xmm1,%%xmm1\n\t" \
+ "paddw %%xmm3,%%xmm3\n\t" \
+ "paddw %%xmm5,%%xmm5\n\t" \
+ "paddw %%xmm7,%%xmm7\n\t" \
+ "psubw %%xmm0,%%xmm1\n\t" \
+ "psubw %%xmm2,%%xmm3\n\t" \
+ "psubw %%xmm4,%%xmm5\n\t" \
+ "psubw %%xmm6,%%xmm7\n\t" \
+
+/*Performs an 8-point 1-D Hadamard transform in place.
+ Outputs 1, 2, 4, and 7 are negated (which allows us to perform the transform
+ in place with no temporary registers).*/
+#define OC_HADAMARD_8x8 \
+ OC_HADAMARD_AB_8x8 \
+ OC_HADAMARD_C_8x8 \
+
+/*Performs the first part of the final stage of the Hadamard transform and
+ summing of absolute values.
+ At the end of this part, %%xmm1 will contain the DC coefficient of the
+ transform.*/
+#define OC_HADAMARD_C_ABS_ACCUM_A_8x8 \
+ /*We use the fact that \
+ (abs(a+b)+abs(a-b))/2=max(abs(a),abs(b)) \
+ to merge the final butterfly with the abs and the first stage of \
+ accumulation. \
+ Thus we can avoid using pabsw, which is not available until SSSE3. \
+ Emulating pabsw takes 3 instructions, so the straightforward SSE2 \
+ implementation would be (3+3)*8+7=55 instructions (+4 for spilling \
+ registers). \
+ Even with pabsw, it would be (3+1)*8+7=39 instructions (with no spills). \
+ This implementation is only 26 (+4 for spilling registers).*/ \
+ "#OC_HADAMARD_C_ABS_ACCUM_A_8x8\n\t" \
+ "movdqa %%xmm7,"OC_MEM_OFFS(0x10,buf)"\n\t" \
+ "movdqa %%xmm6,"OC_MEM_OFFS(0x00,buf)"\n\t" \
+ /*xmm7={0x7FFF}x4 \
+ xmm4=max(abs(xmm4),abs(xmm5))-0x7FFF*/ \
+ "pcmpeqb %%xmm7,%%xmm7\n\t" \
+ "movdqa %%xmm4,%%xmm6\n\t" \
+ "psrlw $1,%%xmm7\n\t" \
+ "paddw %%xmm5,%%xmm6\n\t" \
+ "pmaxsw %%xmm5,%%xmm4\n\t" \
+ "paddsw %%xmm7,%%xmm6\n\t" \
+ "psubw %%xmm6,%%xmm4\n\t" \
+ /*xmm2=max(abs(xmm2),abs(xmm3))-0x7FFF \
+ xmm0=max(abs(xmm0),abs(xmm1))-0x7FFF*/ \
+ "movdqa %%xmm2,%%xmm6\n\t" \
+ "movdqa %%xmm0,%%xmm5\n\t" \
+ "pmaxsw %%xmm3,%%xmm2\n\t" \
+ "pmaxsw %%xmm1,%%xmm0\n\t" \
+ "paddw %%xmm3,%%xmm6\n\t" \
+ "movdqa "OC_MEM_OFFS(0x10,buf)",%%xmm3\n\t" \
+ "paddw %%xmm5,%%xmm1\n\t" \
+ "movdqa "OC_MEM_OFFS(0x00,buf)",%%xmm5\n\t" \
+
+/*Performs the second part of the final stage of the Hadamard transform and
+ summing of absolute values.*/
+#define OC_HADAMARD_C_ABS_ACCUM_B_8x8 \
+ "#OC_HADAMARD_C_ABS_ACCUM_B_8x8\n\t" \
+ "paddsw %%xmm7,%%xmm6\n\t" \
+ "paddsw %%xmm7,%%xmm1\n\t" \
+ "psubw %%xmm6,%%xmm2\n\t" \
+ "psubw %%xmm1,%%xmm0\n\t" \
+ /*xmm7={1}x4 (needed for the horizontal add that follows) \
+ xmm0+=xmm2+xmm4+max(abs(xmm3),abs(xmm5))-0x7FFF*/ \
+ "movdqa %%xmm3,%%xmm6\n\t" \
+ "pmaxsw %%xmm5,%%xmm3\n\t" \
+ "paddw %%xmm2,%%xmm0\n\t" \
+ "paddw %%xmm5,%%xmm6\n\t" \
+ "paddw %%xmm4,%%xmm0\n\t" \
+ "paddsw %%xmm7,%%xmm6\n\t" \
+ "paddw %%xmm3,%%xmm0\n\t" \
+ "psrlw $14,%%xmm7\n\t" \
+ "psubw %%xmm6,%%xmm0\n\t" \
+
+/*Performs the last stage of an 8-point 1-D Hadamard transform, takes the
+ absolute value of each component, and accumulates everything into xmm0.*/
+#define OC_HADAMARD_C_ABS_ACCUM_8x8 \
+ OC_HADAMARD_C_ABS_ACCUM_A_8x8 \
+ OC_HADAMARD_C_ABS_ACCUM_B_8x8 \
+
+/*Performs an 8-point 1-D Hadamard transform, takes the absolute value of each
+ component, and accumulates everything into xmm0.
+ Note that xmm0 will have an extra 4 added to each column, and that after
+ removing this value, the remainder will be half the conventional value.*/
+#define OC_HADAMARD_ABS_ACCUM_8x8 \
+ OC_HADAMARD_AB_8x8 \
+ OC_HADAMARD_C_ABS_ACCUM_8x8
+
+static unsigned oc_int_frag_satd_sse2(int *_dc,
+ const unsigned char *_src,int _src_ystride,
+ const unsigned char *_ref,int _ref_ystride){
+ OC_ALIGN16(ogg_int16_t buf[16]);
+ unsigned ret;
+ unsigned ret2;
+ int dc;
+ __asm__ __volatile__(
+ OC_LOAD_SUB_8x8
+ OC_HADAMARD_8x8
+ OC_TRANSPOSE_8x8
+ /*We split out the stages here so we can save the DC coefficient in the
+ middle.*/
+ OC_HADAMARD_AB_8x8
+ OC_HADAMARD_C_ABS_ACCUM_A_8x8
+ "movd %%xmm1,%[dc]\n\t"
+ OC_HADAMARD_C_ABS_ACCUM_B_8x8
+ /*Up to this point, everything fit in 16 bits (8 input + 1 for the
+ difference + 2*3 for the two 8-point 1-D Hadamards - 1 for the abs - 1
+ for the factor of two we dropped + 3 for the vertical accumulation).
+ Now we finally have to promote things to dwords.
+ We break this part out of OC_HADAMARD_ABS_ACCUM_8x8 to hide the long
+ latency of pmaddwd by starting to compute abs(dc) here.*/
+ "pmaddwd %%xmm7,%%xmm0\n\t"
+ "movsx %w[dc],%[dc]\n\t"
+ "cdq\n\t"
+ "movdqa %%xmm0,%%xmm1\n\t"
+ "punpckhqdq %%xmm0,%%xmm0\n\t"
+ "paddd %%xmm1,%%xmm0\n\t"
+ "pshuflw $0xE,%%xmm0,%%xmm1\n\t"
+ "paddd %%xmm1,%%xmm0\n\t"
+ "movd %%xmm0,%[ret]\n\t"
+ /*The sums produced by OC_HADAMARD_ABS_ACCUM_8x8 each have an extra 4
+ added to them, a factor of two removed, and the DC value included;
+ correct the final sum here.*/
+ "lea -64(%[ret2],%[ret],2),%[ret]\n\t"
+ "xor %[dc],%[ret2]\n\t"
+ "sub %[ret2],%[ret]\n\t"
+ /*Although it looks like we're using 7 registers here, gcc can alias %[ret]
+ and %[dc] with some of the inputs, since for once we don't write to
+ them until after we're done using everything but %[buf].*/
+ /*Note that _src_ystride and _ref_ystride must be given non-overlapping
+ constraints, otherewise if gcc can prove they're equal it will allocate
+ them to the same register (which is bad); _src and _ref face a similar
+ problem.
+ All four are destructively modified, but if we list them as output
+ constraints, gcc can't alias them with other outputs.*/
+ :[ret]"=r"(ret),[ret2]"=d"(ret2),[dc]"=a"(dc),
+ [buf]"=m"(OC_ARRAY_OPERAND(ogg_int16_t,buf,16))
+ :[src]"S"(_src),[src_ystride]"c"((ptrdiff_t)_src_ystride),
+ [ref]"a"(_ref),[ref_ystride]"d"((ptrdiff_t)_ref_ystride)
+ /*We have to use neg, so we actually clobber the condition codes for once
+ (not to mention sub, and add).*/
+ :"cc"
+ );
+ *_dc=dc;
+ return ret;
+}
+
+unsigned oc_enc_frag_satd_sse2(int *_dc,const unsigned char *_src,
+ const unsigned char *_ref,int _ystride){
+ return oc_int_frag_satd_sse2(_dc,_src,_ystride,_ref,_ystride);
+}
+
+unsigned oc_enc_frag_satd2_sse2(int *_dc,const unsigned char *_src,
+ const unsigned char *_ref1,const unsigned char *_ref2,int _ystride){
+ OC_ALIGN8(unsigned char ref[64]);
+ oc_int_frag_copy2_mmxext(ref,8,_ref1,_ref2,_ystride);
+ return oc_int_frag_satd_sse2(_dc,_src,_ystride,ref,8);
+}
+
+unsigned oc_enc_frag_intra_satd_sse2(int *_dc,
+ const unsigned char *_src,int _ystride){
+ OC_ALIGN16(ogg_int16_t buf[16]);
+ unsigned ret;
+ int dc;
+ __asm__ __volatile__(
+ OC_LOAD_8x8
+ OC_HADAMARD_8x8
+ OC_TRANSPOSE_8x8
+ /*We split out the stages here so we can save the DC coefficient in the
+ middle.*/
+ OC_HADAMARD_AB_8x8
+ OC_HADAMARD_C_ABS_ACCUM_A_8x8
+ "movd %%xmm1,%[dc]\n\t"
+ OC_HADAMARD_C_ABS_ACCUM_B_8x8
+ /*Up to this point, everything fit in 16 bits (8 input + 1 for the
+ difference + 2*3 for the two 8-point 1-D Hadamards - 1 for the abs - 1
+ for the factor of two we dropped + 3 for the vertical accumulation).
+ Now we finally have to promote things to dwords.*/
+ "pmaddwd %%xmm7,%%xmm0\n\t"
+ /*We assume that the DC coefficient is always positive (which is true,
+ because the input to the INTRA transform was not a difference).*/
+ "movzx %w[dc],%[dc]\n\t"
+ "movdqa %%xmm0,%%xmm1\n\t"
+ "punpckhqdq %%xmm0,%%xmm0\n\t"
+ "paddd %%xmm1,%%xmm0\n\t"
+ "pshuflw $0xE,%%xmm0,%%xmm1\n\t"
+ "paddd %%xmm1,%%xmm0\n\t"
+ "movd %%xmm0,%[ret]\n\t"
+ "lea -64(%[ret],%[ret]),%[ret]\n\t"
+ "sub %[dc],%[ret]\n\t"
+ /*Although it looks like we're using 7 registers here, gcc can alias %[ret]
+ and %[dc] with some of the inputs, since for once we don't write to
+ them until after we're done using everything but %[buf].*/
+ :[ret]"=a"(ret),[dc]"=r"(dc),
+ [buf]"=m"(OC_ARRAY_OPERAND(ogg_int16_t,buf,16))
+ :[src]"r"(_src),[src4]"r"(_src+4*_ystride),
+ [ystride]"r"((ptrdiff_t)_ystride),[ystride3]"r"((ptrdiff_t)3*_ystride)
+ /*We have to use sub, so we actually clobber the condition codes for once.*/
+ :"cc"
+ );
+ *_dc=dc;
+ return ret;
+}
+
+#endif
diff --git a/thirdparty/libtheora/x86/sse2fdct.c b/thirdparty/libtheora/x86/sse2fdct.c
index 86c17d68b1..64c1d27372 100644
--- a/thirdparty/libtheora/x86/sse2fdct.c
+++ b/thirdparty/libtheora/x86/sse2fdct.c
@@ -13,12 +13,14 @@
/*$Id: fdct_ses2.c 14579 2008-03-12 06:42:40Z xiphmont $*/
#include <stddef.h>
#include "x86enc.h"
+#include "x86zigzag.h"
+#include "sse2trans.h"
#if defined(OC_X86_64_ASM)
-# define OC_FDCT8x8 \
+# define OC_FDCT_8x8 \
/*Note: xmm15={0}x8 and xmm14={-1}x8.*/ \
- "#OC_FDCT8x8\n\t" \
+ "#OC_FDCT_8x8\n\t" \
/*Stage 1:*/ \
"movdqa %%xmm0,%%xmm11\n\t" \
"movdqa %%xmm1,%%xmm10\n\t" \
@@ -349,81 +351,6 @@
"psubw %%xmm14,%%xmm10\n\t" \
"paddw %%xmm10,%%xmm7\n\t " \
-# define OC_TRANSPOSE8x8 \
- "#OC_TRANSPOSE8x8\n\t" \
- "movdqa %%xmm4,%%xmm8\n\t" \
- /*xmm4 = f3 e3 f2 e2 f1 e1 f0 e0*/ \
- "punpcklwd %%xmm5,%%xmm4\n\t" \
- /*xmm8 = f7 e7 f6 e6 f5 e5 f4 e4*/ \
- "punpckhwd %%xmm5,%%xmm8\n\t" \
- /*xmm5 is free.*/ \
- "movdqa %%xmm0,%%xmm5\n\t" \
- /*xmm0 = b3 a3 b2 a2 b1 a1 b0 a0*/ \
- "punpcklwd %%xmm1,%%xmm0\n\t" \
- /*xmm5 = b7 a7 b6 a6 b5 a5 b4 a4*/ \
- "punpckhwd %%xmm1,%%xmm5\n\t" \
- /*xmm1 is free.*/ \
- "movdqa %%xmm6,%%xmm1\n\t" \
- /*xmm6 = h3 g3 h2 g2 h1 g1 h0 g0*/ \
- "punpcklwd %%xmm7,%%xmm6\n\t" \
- /*xmm1 = h7 g7 h6 g6 h5 g5 h4 g4*/ \
- "punpckhwd %%xmm7,%%xmm1\n\t" \
- /*xmm7 is free.*/ \
- "movdqa %%xmm2,%%xmm7\n\t" \
- /*xmm7 = d3 c3 d2 c2 d1 c1 d0 c0*/ \
- "punpcklwd %%xmm3,%%xmm7\n\t" \
- /*xmm2 = d7 c7 d6 c6 d5 c5 d4 c4*/ \
- "punpckhwd %%xmm3,%%xmm2\n\t" \
- /*xmm3 is free.*/ \
- "movdqa %%xmm0,%%xmm3\n\t" \
- /*xmm0 = d1 c1 b1 a1 d0 c0 b0 a0*/ \
- "punpckldq %%xmm7,%%xmm0\n\t" \
- /*xmm3 = d3 c3 b3 a3 d2 c2 b2 a2*/ \
- "punpckhdq %%xmm7,%%xmm3\n\t" \
- /*xmm7 is free.*/ \
- "movdqa %%xmm5,%%xmm7\n\t" \
- /*xmm5 = d5 c5 b5 a5 d4 c4 b4 a4*/ \
- "punpckldq %%xmm2,%%xmm5\n\t" \
- /*xmm7 = d7 c7 b7 a7 d6 c6 b6 a6*/ \
- "punpckhdq %%xmm2,%%xmm7\n\t" \
- /*xmm2 is free.*/ \
- "movdqa %%xmm4,%%xmm2\n\t" \
- /*xmm2 = h1 g1 f1 e1 h0 g0 f0 e0*/ \
- "punpckldq %%xmm6,%%xmm2\n\t" \
- /*xmm4 = h3 g3 f3 e3 h2 g2 f2 e2*/ \
- "punpckhdq %%xmm6,%%xmm4\n\t" \
- /*xmm6 is free.*/ \
- "movdqa %%xmm8,%%xmm6\n\t" \
- /*xmm6 = h5 g5 f5 e5 h4 g4 f4 e4*/ \
- "punpckldq %%xmm1,%%xmm6\n\t" \
- /*xmm8 = h7 g7 f7 e7 h6 g6 f6 e6*/ \
- "punpckhdq %%xmm1,%%xmm8\n\t" \
- /*xmm1 is free.*/ \
- "movdqa %%xmm0,%%xmm1\n\t" \
- /*xmm0 = h0 g0 f0 e0 d0 c0 b0 a0*/ \
- "punpcklqdq %%xmm2,%%xmm0\n\t" \
- /*xmm1 = h1 g1 f1 e1 d1 c1 b1 a1*/ \
- "punpckhqdq %%xmm2,%%xmm1\n\t" \
- /*xmm2 is free.*/ \
- "movdqa %%xmm3,%%xmm2\n\t" \
- /*xmm2 = h2 g2 f2 e2 d2 c2 b2 a2*/ \
- "punpcklqdq %%xmm4,%%xmm2\n\t" \
- /*xmm3 = h3 g3 f3 e3 d3 c3 b3 a3*/ \
- "punpckhqdq %%xmm4,%%xmm3\n\t" \
- /*xmm4 is free.*/ \
- "movdqa %%xmm5,%%xmm4\n\t" \
- /*xmm4 = h4 g4 f4 e4 d4 c4 b4 a4*/ \
- "punpcklqdq %%xmm6,%%xmm4\n\t" \
- /*xmm5 = h5 g5 f5 e5 d5 c5 b5 a5*/ \
- "punpckhqdq %%xmm6,%%xmm5\n\t" \
- /*xmm6 is free.*/ \
- "movdqa %%xmm7,%%xmm6\n\t" \
- /*xmm6 = h6 g6 f6 e6 d6 c6 b6 a6*/ \
- "punpcklqdq %%xmm8,%%xmm6\n\t" \
- /*xmm7 = h7 g7 f7 e7 d7 c7 b7 a7*/ \
- "punpckhqdq %%xmm8,%%xmm7\n\t" \
- /*xmm8 is free.*/ \
-
/*SSE2 implementation of the fDCT for x86-64 only.
Because of the 8 extra XMM registers on x86-64, this version can operate
without any temporary stack access at all.*/
@@ -482,12 +409,10 @@ void oc_enc_fdct8x8_x86_64sse2(ogg_int16_t _y[64],const ogg_int16_t _x[64]){
/*xmm1=_x[15...8]-{0,0,0,0,0,0,0,1}*/
"psubw %%xmm9,%%xmm1\n\t"
/*Transform columns.*/
- OC_FDCT8x8
+ OC_FDCT_8x8
/*Transform rows.*/
- OC_TRANSPOSE8x8
- OC_FDCT8x8
- /*TODO: zig-zag ordering?*/
- OC_TRANSPOSE8x8
+ OC_TRANSPOSE_8x8
+ OC_FDCT_8x8
/*xmm14={-2,-2,-2,-2,-2,-2,-2,-2}*/
"paddw %%xmm14,%%xmm14\n\t"
"psubw %%xmm14,%%xmm0\n\t"
@@ -506,15 +431,19 @@ void oc_enc_fdct8x8_x86_64sse2(ogg_int16_t _y[64],const ogg_int16_t _x[64]){
"psubw %%xmm14,%%xmm7\n\t"
"psraw $2,%%xmm6\n\t"
"psraw $2,%%xmm7\n\t"
- /*Store the result.*/
- "movdqa %%xmm0,0x00(%[y])\n\t"
- "movdqa %%xmm1,0x10(%[y])\n\t"
- "movdqa %%xmm2,0x20(%[y])\n\t"
- "movdqa %%xmm3,0x30(%[y])\n\t"
- "movdqa %%xmm4,0x40(%[y])\n\t"
- "movdqa %%xmm5,0x50(%[y])\n\t"
- "movdqa %%xmm6,0x60(%[y])\n\t"
- "movdqa %%xmm7,0x70(%[y])\n\t"
+ /*Transpose, zig-zag, and store the result.*/
+ /*We could probably do better using SSSE3's palignr, but re-using MMXEXT
+ version will do for now.*/
+#define OC_ZZ_LOAD_ROW_LO(_row,_reg) \
+ "movdq2q %%xmm"#_row","_reg"\n\t" \
+
+#define OC_ZZ_LOAD_ROW_HI(_row,_reg) \
+ "punpckhqdq %%xmm"#_row",%%xmm"#_row"\n\t" \
+ "movdq2q %%xmm"#_row","_reg"\n\t" \
+
+ OC_TRANSPOSE_ZIG_ZAG_MMXEXT
+#undef OC_ZZ_LOAD_ROW_LO
+#undef OC_ZZ_LOAD_ROW_HI
:[a]"=&r"(a)
:[y]"r"(_y),[x]"r"(_x)
:"memory"
diff --git a/thirdparty/libtheora/x86/sse2idct.c b/thirdparty/libtheora/x86/sse2idct.c
new file mode 100644
index 0000000000..4597ab074f
--- /dev/null
+++ b/thirdparty/libtheora/x86/sse2idct.c
@@ -0,0 +1,456 @@
+/********************************************************************
+ * *
+ * THIS FILE IS PART OF THE OggTheora SOFTWARE CODEC SOURCE CODE. *
+ * USE, DISTRIBUTION AND REPRODUCTION OF THIS LIBRARY SOURCE IS *
+ * GOVERNED BY A BSD-STYLE SOURCE LICENSE INCLUDED WITH THIS SOURCE *
+ * IN 'COPYING'. PLEASE READ THESE TERMS BEFORE DISTRIBUTING. *
+ * *
+ * THE Theora SOURCE CODE IS COPYRIGHT (C) 2002-2009 *
+ * by the Xiph.Org Foundation and contributors http://www.xiph.org/ *
+ * *
+ ********************************************************************
+
+ function:
+ last mod: $Id: mmxidct.c 16503 2009-08-22 18:14:02Z giles $
+
+ ********************************************************************/
+
+/*SSE2 acceleration of Theora's iDCT.*/
+#include "x86int.h"
+#include "sse2trans.h"
+#include "../dct.h"
+
+#if defined(OC_X86_ASM)
+
+/*A table of constants used by the MMX routines.*/
+const unsigned short __attribute__((aligned(16),used)) OC_IDCT_CONSTS[64]={
+ 8, 8, 8, 8, 8, 8, 8, 8,
+ OC_C1S7,OC_C1S7,OC_C1S7,OC_C1S7,OC_C1S7,OC_C1S7,OC_C1S7,OC_C1S7,
+ OC_C2S6,OC_C2S6,OC_C2S6,OC_C2S6,OC_C2S6,OC_C2S6,OC_C2S6,OC_C2S6,
+ OC_C3S5,OC_C3S5,OC_C3S5,OC_C3S5,OC_C3S5,OC_C3S5,OC_C3S5,OC_C3S5,
+ OC_C4S4,OC_C4S4,OC_C4S4,OC_C4S4,OC_C4S4,OC_C4S4,OC_C4S4,OC_C4S4,
+ OC_C5S3,OC_C5S3,OC_C5S3,OC_C5S3,OC_C5S3,OC_C5S3,OC_C5S3,OC_C5S3,
+ OC_C6S2,OC_C6S2,OC_C6S2,OC_C6S2,OC_C6S2,OC_C6S2,OC_C6S2,OC_C6S2,
+ OC_C7S1,OC_C7S1,OC_C7S1,OC_C7S1,OC_C7S1,OC_C7S1,OC_C7S1,OC_C7S1
+};
+
+
+/*Performs the first three stages of the iDCT.
+ xmm2, xmm6, xmm3, and xmm5 must contain the corresponding rows of the input
+ (accessed in that order).
+ The remaining rows must be in _x at their corresponding locations.
+ On output, xmm7 down to xmm4 contain rows 0 through 3, and xmm0 up to xmm3
+ contain rows 4 through 7.*/
+#define OC_IDCT_8x8_ABC(_x) \
+ "#OC_IDCT_8x8_ABC\n\t" \
+ /*Stage 1:*/ \
+ /*2-3 rotation by 6pi/16. \
+ xmm4=xmm7=C6, xmm0=xmm1=C2, xmm2=X2, xmm6=X6.*/ \
+ "movdqa "OC_MEM_OFFS(0x20,c)",%%xmm1\n\t" \
+ "movdqa "OC_MEM_OFFS(0x60,c)",%%xmm4\n\t" \
+ "movdqa %%xmm1,%%xmm0\n\t" \
+ "pmulhw %%xmm2,%%xmm1\n\t" \
+ "movdqa %%xmm4,%%xmm7\n\t" \
+ "pmulhw %%xmm6,%%xmm0\n\t" \
+ "pmulhw %%xmm2,%%xmm7\n\t" \
+ "pmulhw %%xmm6,%%xmm4\n\t" \
+ "paddw %%xmm6,%%xmm0\n\t" \
+ "movdqa "OC_MEM_OFFS(0x30,c)",%%xmm6\n\t" \
+ "paddw %%xmm1,%%xmm2\n\t" \
+ "psubw %%xmm0,%%xmm7\n\t" \
+ "movdqa %%xmm7,"OC_MEM_OFFS(0x00,buf)"\n\t" \
+ "paddw %%xmm4,%%xmm2\n\t" \
+ "movdqa "OC_MEM_OFFS(0x50,c)",%%xmm4\n\t" \
+ "movdqa %%xmm2,"OC_MEM_OFFS(0x10,buf)"\n\t" \
+ /*5-6 rotation by 3pi/16. \
+ xmm4=xmm2=C5, xmm1=xmm6=C3, xmm3=X3, xmm5=X5.*/ \
+ "movdqa %%xmm4,%%xmm2\n\t" \
+ "movdqa %%xmm6,%%xmm1\n\t" \
+ "pmulhw %%xmm3,%%xmm4\n\t" \
+ "pmulhw %%xmm5,%%xmm1\n\t" \
+ "pmulhw %%xmm3,%%xmm6\n\t" \
+ "pmulhw %%xmm5,%%xmm2\n\t" \
+ "paddw %%xmm3,%%xmm4\n\t" \
+ "paddw %%xmm5,%%xmm3\n\t" \
+ "paddw %%xmm6,%%xmm3\n\t" \
+ "movdqa "OC_MEM_OFFS(0x70,_x)",%%xmm6\n\t" \
+ "paddw %%xmm5,%%xmm1\n\t" \
+ "movdqa "OC_MEM_OFFS(0x10,_x)",%%xmm5\n\t" \
+ "paddw %%xmm3,%%xmm2\n\t" \
+ "movdqa "OC_MEM_OFFS(0x70,c)",%%xmm3\n\t" \
+ "psubw %%xmm4,%%xmm1\n\t" \
+ "movdqa "OC_MEM_OFFS(0x10,c)",%%xmm4\n\t" \
+ /*4-7 rotation by 7pi/16. \
+ xmm4=xmm7=C1, xmm3=xmm0=C7, xmm5=X1, xmm6=X7.*/ \
+ "movdqa %%xmm3,%%xmm0\n\t" \
+ "movdqa %%xmm4,%%xmm7\n\t" \
+ "pmulhw %%xmm5,%%xmm3\n\t" \
+ "pmulhw %%xmm5,%%xmm7\n\t" \
+ "pmulhw %%xmm6,%%xmm4\n\t" \
+ "pmulhw %%xmm6,%%xmm0\n\t" \
+ "paddw %%xmm6,%%xmm4\n\t" \
+ "movdqa "OC_MEM_OFFS(0x40,_x)",%%xmm6\n\t" \
+ "paddw %%xmm5,%%xmm7\n\t" \
+ "psubw %%xmm4,%%xmm3\n\t" \
+ "movdqa "OC_MEM_OFFS(0x40,c)",%%xmm4\n\t" \
+ "paddw %%xmm7,%%xmm0\n\t" \
+ "movdqa "OC_MEM_OFFS(0x00,_x)",%%xmm7\n\t" \
+ /*0-1 butterfly. \
+ xmm4=xmm5=C4, xmm7=X0, xmm6=X4.*/ \
+ "paddw %%xmm7,%%xmm6\n\t" \
+ "movdqa %%xmm4,%%xmm5\n\t" \
+ "pmulhw %%xmm6,%%xmm4\n\t" \
+ "paddw %%xmm7,%%xmm7\n\t" \
+ "psubw %%xmm6,%%xmm7\n\t" \
+ "paddw %%xmm6,%%xmm4\n\t" \
+ /*Stage 2:*/ \
+ /*4-5 butterfly: xmm3=t[4], xmm1=t[5] \
+ 7-6 butterfly: xmm2=t[6], xmm0=t[7]*/ \
+ "movdqa %%xmm3,%%xmm6\n\t" \
+ "paddw %%xmm1,%%xmm3\n\t" \
+ "psubw %%xmm1,%%xmm6\n\t" \
+ "movdqa %%xmm5,%%xmm1\n\t" \
+ "pmulhw %%xmm7,%%xmm5\n\t" \
+ "paddw %%xmm7,%%xmm5\n\t" \
+ "movdqa %%xmm0,%%xmm7\n\t" \
+ "paddw %%xmm2,%%xmm0\n\t" \
+ "psubw %%xmm2,%%xmm7\n\t" \
+ "movdqa %%xmm1,%%xmm2\n\t" \
+ "pmulhw %%xmm6,%%xmm1\n\t" \
+ "pmulhw %%xmm7,%%xmm2\n\t" \
+ "paddw %%xmm6,%%xmm1\n\t" \
+ "movdqa "OC_MEM_OFFS(0x00,buf)",%%xmm6\n\t" \
+ "paddw %%xmm7,%%xmm2\n\t" \
+ "movdqa "OC_MEM_OFFS(0x10,buf)",%%xmm7\n\t" \
+ /*Stage 3: \
+ 6-5 butterfly: xmm1=t[5], xmm2=t[6] -> xmm1=t[6]+t[5], xmm2=t[6]-t[5] \
+ 0-3 butterfly: xmm4=t[0], xmm7=t[3] -> xmm7=t[0]+t[3], xmm4=t[0]-t[3] \
+ 1-2 butterfly: xmm5=t[1], xmm6=t[2] -> xmm6=t[1]+t[2], xmm5=t[1]-t[2]*/ \
+ "paddw %%xmm2,%%xmm1\n\t" \
+ "paddw %%xmm5,%%xmm6\n\t" \
+ "paddw %%xmm4,%%xmm7\n\t" \
+ "paddw %%xmm2,%%xmm2\n\t" \
+ "paddw %%xmm4,%%xmm4\n\t" \
+ "paddw %%xmm5,%%xmm5\n\t" \
+ "psubw %%xmm1,%%xmm2\n\t" \
+ "psubw %%xmm7,%%xmm4\n\t" \
+ "psubw %%xmm6,%%xmm5\n\t" \
+
+/*Performs the last stage of the iDCT.
+ On input, xmm7 down to xmm4 contain rows 0 through 3, and xmm0 up to xmm3
+ contain rows 4 through 7.
+ On output, xmm0 through xmm7 contain the corresponding rows.*/
+#define OC_IDCT_8x8_D \
+ "#OC_IDCT_8x8_D\n\t" \
+ /*Stage 4: \
+ 0-7 butterfly: xmm7=t[0], xmm0=t[7] -> xmm0=t[0]+t[7], xmm7=t[0]-t[7] \
+ 1-6 butterfly: xmm6=t[1], xmm1=t[6] -> xmm1=t[1]+t[6], xmm6=t[1]-t[6] \
+ 2-5 butterfly: xmm5=t[2], xmm2=t[5] -> xmm2=t[2]+t[5], xmm5=t[2]-t[5] \
+ 3-4 butterfly: xmm4=t[3], xmm3=t[4] -> xmm3=t[3]+t[4], xmm4=t[3]-t[4]*/ \
+ "psubw %%xmm0,%%xmm7\n\t" \
+ "psubw %%xmm1,%%xmm6\n\t" \
+ "psubw %%xmm2,%%xmm5\n\t" \
+ "psubw %%xmm3,%%xmm4\n\t" \
+ "paddw %%xmm0,%%xmm0\n\t" \
+ "paddw %%xmm1,%%xmm1\n\t" \
+ "paddw %%xmm2,%%xmm2\n\t" \
+ "paddw %%xmm3,%%xmm3\n\t" \
+ "paddw %%xmm7,%%xmm0\n\t" \
+ "paddw %%xmm6,%%xmm1\n\t" \
+ "paddw %%xmm5,%%xmm2\n\t" \
+ "paddw %%xmm4,%%xmm3\n\t" \
+
+/*Performs the last stage of the iDCT.
+ On input, xmm7 down to xmm4 contain rows 0 through 3, and xmm0 up to xmm3
+ contain rows 4 through 7.
+ On output, xmm0 through xmm7 contain the corresponding rows.*/
+#define OC_IDCT_8x8_D_STORE \
+ "#OC_IDCT_8x8_D_STORE\n\t" \
+ /*Stage 4: \
+ 0-7 butterfly: xmm7=t[0], xmm0=t[7] -> xmm0=t[0]+t[7], xmm7=t[0]-t[7] \
+ 1-6 butterfly: xmm6=t[1], xmm1=t[6] -> xmm1=t[1]+t[6], xmm6=t[1]-t[6] \
+ 2-5 butterfly: xmm5=t[2], xmm2=t[5] -> xmm2=t[2]+t[5], xmm5=t[2]-t[5] \
+ 3-4 butterfly: xmm4=t[3], xmm3=t[4] -> xmm3=t[3]+t[4], xmm4=t[3]-t[4]*/ \
+ "psubw %%xmm3,%%xmm4\n\t" \
+ "movdqa %%xmm4,"OC_MEM_OFFS(0x40,y)"\n\t" \
+ "movdqa "OC_MEM_OFFS(0x00,c)",%%xmm4\n\t" \
+ "psubw %%xmm0,%%xmm7\n\t" \
+ "psubw %%xmm1,%%xmm6\n\t" \
+ "psubw %%xmm2,%%xmm5\n\t" \
+ "paddw %%xmm4,%%xmm7\n\t" \
+ "paddw %%xmm4,%%xmm6\n\t" \
+ "paddw %%xmm4,%%xmm5\n\t" \
+ "paddw "OC_MEM_OFFS(0x40,y)",%%xmm4\n\t" \
+ "paddw %%xmm0,%%xmm0\n\t" \
+ "paddw %%xmm1,%%xmm1\n\t" \
+ "paddw %%xmm2,%%xmm2\n\t" \
+ "paddw %%xmm3,%%xmm3\n\t" \
+ "paddw %%xmm7,%%xmm0\n\t" \
+ "paddw %%xmm6,%%xmm1\n\t" \
+ "psraw $4,%%xmm0\n\t" \
+ "paddw %%xmm5,%%xmm2\n\t" \
+ "movdqa %%xmm0,"OC_MEM_OFFS(0x00,y)"\n\t" \
+ "psraw $4,%%xmm1\n\t" \
+ "paddw %%xmm4,%%xmm3\n\t" \
+ "movdqa %%xmm1,"OC_MEM_OFFS(0x10,y)"\n\t" \
+ "psraw $4,%%xmm2\n\t" \
+ "movdqa %%xmm2,"OC_MEM_OFFS(0x20,y)"\n\t" \
+ "psraw $4,%%xmm3\n\t" \
+ "movdqa %%xmm3,"OC_MEM_OFFS(0x30,y)"\n\t" \
+ "psraw $4,%%xmm4\n\t" \
+ "movdqa %%xmm4,"OC_MEM_OFFS(0x40,y)"\n\t" \
+ "psraw $4,%%xmm5\n\t" \
+ "movdqa %%xmm5,"OC_MEM_OFFS(0x50,y)"\n\t" \
+ "psraw $4,%%xmm6\n\t" \
+ "movdqa %%xmm6,"OC_MEM_OFFS(0x60,y)"\n\t" \
+ "psraw $4,%%xmm7\n\t" \
+ "movdqa %%xmm7,"OC_MEM_OFFS(0x70,y)"\n\t" \
+
+static void oc_idct8x8_slow_sse2(ogg_int16_t _y[64],ogg_int16_t _x[64]){
+ OC_ALIGN16(ogg_int16_t buf[16]);
+ int i;
+ /*This routine accepts an 8x8 matrix pre-transposed.*/
+ __asm__ __volatile__(
+ /*Load rows 2, 3, 5, and 6 for the first stage of the iDCT.*/
+ "movdqa "OC_MEM_OFFS(0x20,x)",%%xmm2\n\t"
+ "movdqa "OC_MEM_OFFS(0x60,x)",%%xmm6\n\t"
+ "movdqa "OC_MEM_OFFS(0x30,x)",%%xmm3\n\t"
+ "movdqa "OC_MEM_OFFS(0x50,x)",%%xmm5\n\t"
+ OC_IDCT_8x8_ABC(x)
+ OC_IDCT_8x8_D
+ OC_TRANSPOSE_8x8
+ /*Clear out rows 0, 1, 4, and 7 for the first stage of the iDCT.*/
+ "movdqa %%xmm7,"OC_MEM_OFFS(0x70,y)"\n\t"
+ "movdqa %%xmm4,"OC_MEM_OFFS(0x40,y)"\n\t"
+ "movdqa %%xmm1,"OC_MEM_OFFS(0x10,y)"\n\t"
+ "movdqa %%xmm0,"OC_MEM_OFFS(0x00,y)"\n\t"
+ OC_IDCT_8x8_ABC(y)
+ OC_IDCT_8x8_D_STORE
+ :[buf]"=m"(OC_ARRAY_OPERAND(ogg_int16_t,buf,16)),
+ [y]"=m"(OC_ARRAY_OPERAND(ogg_int16_t,_y,64))
+ :[x]"m"(OC_CONST_ARRAY_OPERAND(ogg_int16_t,_x,64)),
+ [c]"m"(OC_CONST_ARRAY_OPERAND(ogg_int16_t,OC_IDCT_CONSTS,128))
+ );
+ __asm__ __volatile__("pxor %%xmm0,%%xmm0\n\t"::);
+ /*Clear input data for next block (decoder only).*/
+ for(i=0;i<2;i++){
+ __asm__ __volatile__(
+ "movdqa %%xmm0,"OC_MEM_OFFS(0x00,x)"\n\t"
+ "movdqa %%xmm0,"OC_MEM_OFFS(0x10,x)"\n\t"
+ "movdqa %%xmm0,"OC_MEM_OFFS(0x20,x)"\n\t"
+ "movdqa %%xmm0,"OC_MEM_OFFS(0x30,x)"\n\t"
+ :[x]"=m"(OC_ARRAY_OPERAND(ogg_int16_t,_x+i*32,32))
+ );
+ }
+}
+
+/*For the first step of the 10-coefficient version of the 8x8 iDCT, we only
+ need to work with four columns at a time.
+ Doing this in MMX is faster on processors with a 64-bit data path.*/
+#define OC_IDCT_8x8_10_MMX \
+ "#OC_IDCT_8x8_10_MMX\n\t" \
+ /*Stage 1:*/ \
+ /*2-3 rotation by 6pi/16. \
+ mm7=C6, mm6=C2, mm2=X2, X6=0.*/ \
+ "movq "OC_MEM_OFFS(0x60,c)",%%mm7\n\t" \
+ "movq "OC_MEM_OFFS(0x20,c)",%%mm6\n\t" \
+ "pmulhw %%mm2,%%mm6\n\t" \
+ "pmulhw %%mm2,%%mm7\n\t" \
+ "movq "OC_MEM_OFFS(0x50,c)",%%mm5\n\t" \
+ "paddw %%mm6,%%mm2\n\t" \
+ "movq %%mm2,"OC_MEM_OFFS(0x10,buf)"\n\t" \
+ "movq "OC_MEM_OFFS(0x30,c)",%%mm2\n\t" \
+ "movq %%mm7,"OC_MEM_OFFS(0x00,buf)"\n\t" \
+ /*5-6 rotation by 3pi/16. \
+ mm5=C5, mm2=C3, mm3=X3, X5=0.*/ \
+ "pmulhw %%mm3,%%mm5\n\t" \
+ "pmulhw %%mm3,%%mm2\n\t" \
+ "movq "OC_MEM_OFFS(0x10,c)",%%mm7\n\t" \
+ "paddw %%mm3,%%mm5\n\t" \
+ "paddw %%mm3,%%mm2\n\t" \
+ "movq "OC_MEM_OFFS(0x70,c)",%%mm3\n\t" \
+ /*4-7 rotation by 7pi/16. \
+ mm7=C1, mm3=C7, mm1=X1, X7=0.*/ \
+ "pmulhw %%mm1,%%mm3\n\t" \
+ "pmulhw %%mm1,%%mm7\n\t" \
+ "movq "OC_MEM_OFFS(0x40,c)",%%mm4\n\t" \
+ "movq %%mm3,%%mm6\n\t" \
+ "paddw %%mm1,%%mm7\n\t" \
+ /*0-1 butterfly. \
+ mm4=C4, mm0=X0, X4=0.*/ \
+ /*Stage 2:*/ \
+ /*4-5 butterfly: mm3=t[4], mm5=t[5] \
+ 7-6 butterfly: mm2=t[6], mm7=t[7]*/ \
+ "psubw %%mm5,%%mm3\n\t" \
+ "paddw %%mm5,%%mm6\n\t" \
+ "movq %%mm4,%%mm1\n\t" \
+ "pmulhw %%mm0,%%mm4\n\t" \
+ "paddw %%mm0,%%mm4\n\t" \
+ "movq %%mm7,%%mm0\n\t" \
+ "movq %%mm4,%%mm5\n\t" \
+ "paddw %%mm2,%%mm0\n\t" \
+ "psubw %%mm2,%%mm7\n\t" \
+ "movq %%mm1,%%mm2\n\t" \
+ "pmulhw %%mm6,%%mm1\n\t" \
+ "pmulhw %%mm7,%%mm2\n\t" \
+ "paddw %%mm6,%%mm1\n\t" \
+ "movq "OC_MEM_OFFS(0x00,buf)",%%mm6\n\t" \
+ "paddw %%mm7,%%mm2\n\t" \
+ "movq "OC_MEM_OFFS(0x10,buf)",%%mm7\n\t" \
+ /*Stage 3: \
+ 6-5 butterfly: mm1=t[5], mm2=t[6] -> mm1=t[6]+t[5], mm2=t[6]-t[5] \
+ 0-3 butterfly: mm4=t[0], mm7=t[3] -> mm7=t[0]+t[3], mm4=t[0]-t[3] \
+ 1-2 butterfly: mm5=t[1], mm6=t[2] -> mm6=t[1]+t[2], mm5=t[1]-t[2]*/ \
+ "paddw %%mm2,%%mm1\n\t" \
+ "paddw %%mm5,%%mm6\n\t" \
+ "paddw %%mm4,%%mm7\n\t" \
+ "paddw %%mm2,%%mm2\n\t" \
+ "paddw %%mm4,%%mm4\n\t" \
+ "paddw %%mm5,%%mm5\n\t" \
+ "psubw %%mm1,%%mm2\n\t" \
+ "psubw %%mm7,%%mm4\n\t" \
+ "psubw %%mm6,%%mm5\n\t" \
+ /*Stage 4: \
+ 0-7 butterfly: mm7=t[0], mm0=t[7] -> mm0=t[0]+t[7], mm7=t[0]-t[7] \
+ 1-6 butterfly: mm6=t[1], mm1=t[6] -> mm1=t[1]+t[6], mm6=t[1]-t[6] \
+ 2-5 butterfly: mm5=t[2], mm2=t[5] -> mm2=t[2]+t[5], mm5=t[2]-t[5] \
+ 3-4 butterfly: mm4=t[3], mm3=t[4] -> mm3=t[3]+t[4], mm4=t[3]-t[4]*/ \
+ "psubw %%mm0,%%mm7\n\t" \
+ "psubw %%mm1,%%mm6\n\t" \
+ "psubw %%mm2,%%mm5\n\t" \
+ "psubw %%mm3,%%mm4\n\t" \
+ "paddw %%mm0,%%mm0\n\t" \
+ "paddw %%mm1,%%mm1\n\t" \
+ "paddw %%mm2,%%mm2\n\t" \
+ "paddw %%mm3,%%mm3\n\t" \
+ "paddw %%mm7,%%mm0\n\t" \
+ "paddw %%mm6,%%mm1\n\t" \
+ "paddw %%mm5,%%mm2\n\t" \
+ "paddw %%mm4,%%mm3\n\t" \
+
+#define OC_IDCT_8x8_10_ABC \
+ "#OC_IDCT_8x8_10_ABC\n\t" \
+ /*Stage 1:*/ \
+ /*2-3 rotation by 6pi/16. \
+ xmm7=C6, xmm6=C2, xmm2=X2, X6=0.*/ \
+ "movdqa "OC_MEM_OFFS(0x60,c)",%%xmm7\n\t" \
+ "movdqa "OC_MEM_OFFS(0x20,c)",%%xmm6\n\t" \
+ "pmulhw %%xmm2,%%xmm6\n\t" \
+ "pmulhw %%xmm2,%%xmm7\n\t" \
+ "movdqa "OC_MEM_OFFS(0x50,c)",%%xmm5\n\t" \
+ "paddw %%xmm6,%%xmm2\n\t" \
+ "movdqa %%xmm2,"OC_MEM_OFFS(0x10,buf)"\n\t" \
+ "movdqa "OC_MEM_OFFS(0x30,c)",%%xmm2\n\t" \
+ "movdqa %%xmm7,"OC_MEM_OFFS(0x00,buf)"\n\t" \
+ /*5-6 rotation by 3pi/16. \
+ xmm5=C5, xmm2=C3, xmm3=X3, X5=0.*/ \
+ "pmulhw %%xmm3,%%xmm5\n\t" \
+ "pmulhw %%xmm3,%%xmm2\n\t" \
+ "movdqa "OC_MEM_OFFS(0x10,c)",%%xmm7\n\t" \
+ "paddw %%xmm3,%%xmm5\n\t" \
+ "paddw %%xmm3,%%xmm2\n\t" \
+ "movdqa "OC_MEM_OFFS(0x70,c)",%%xmm3\n\t" \
+ /*4-7 rotation by 7pi/16. \
+ xmm7=C1, xmm3=C7, xmm1=X1, X7=0.*/ \
+ "pmulhw %%xmm1,%%xmm3\n\t" \
+ "pmulhw %%xmm1,%%xmm7\n\t" \
+ "movdqa "OC_MEM_OFFS(0x40,c)",%%xmm4\n\t" \
+ "movdqa %%xmm3,%%xmm6\n\t" \
+ "paddw %%xmm1,%%xmm7\n\t" \
+ /*0-1 butterfly. \
+ xmm4=C4, xmm0=X0, X4=0.*/ \
+ /*Stage 2:*/ \
+ /*4-5 butterfly: xmm3=t[4], xmm5=t[5] \
+ 7-6 butterfly: xmm2=t[6], xmm7=t[7]*/ \
+ "psubw %%xmm5,%%xmm3\n\t" \
+ "paddw %%xmm5,%%xmm6\n\t" \
+ "movdqa %%xmm4,%%xmm1\n\t" \
+ "pmulhw %%xmm0,%%xmm4\n\t" \
+ "paddw %%xmm0,%%xmm4\n\t" \
+ "movdqa %%xmm7,%%xmm0\n\t" \
+ "movdqa %%xmm4,%%xmm5\n\t" \
+ "paddw %%xmm2,%%xmm0\n\t" \
+ "psubw %%xmm2,%%xmm7\n\t" \
+ "movdqa %%xmm1,%%xmm2\n\t" \
+ "pmulhw %%xmm6,%%xmm1\n\t" \
+ "pmulhw %%xmm7,%%xmm2\n\t" \
+ "paddw %%xmm6,%%xmm1\n\t" \
+ "movdqa "OC_MEM_OFFS(0x00,buf)",%%xmm6\n\t" \
+ "paddw %%xmm7,%%xmm2\n\t" \
+ "movdqa "OC_MEM_OFFS(0x10,buf)",%%xmm7\n\t" \
+ /*Stage 3: \
+ 6-5 butterfly: xmm1=t[5], xmm2=t[6] -> xmm1=t[6]+t[5], xmm2=t[6]-t[5] \
+ 0-3 butterfly: xmm4=t[0], xmm7=t[3] -> xmm7=t[0]+t[3], xmm4=t[0]-t[3] \
+ 1-2 butterfly: xmm5=t[1], xmm6=t[2] -> xmm6=t[1]+t[2], xmm5=t[1]-t[2]*/ \
+ "paddw %%xmm2,%%xmm1\n\t" \
+ "paddw %%xmm5,%%xmm6\n\t" \
+ "paddw %%xmm4,%%xmm7\n\t" \
+ "paddw %%xmm2,%%xmm2\n\t" \
+ "paddw %%xmm4,%%xmm4\n\t" \
+ "paddw %%xmm5,%%xmm5\n\t" \
+ "psubw %%xmm1,%%xmm2\n\t" \
+ "psubw %%xmm7,%%xmm4\n\t" \
+ "psubw %%xmm6,%%xmm5\n\t" \
+
+static void oc_idct8x8_10_sse2(ogg_int16_t _y[64],ogg_int16_t _x[64]){
+ OC_ALIGN16(ogg_int16_t buf[16]);
+ /*This routine accepts an 8x8 matrix pre-transposed.*/
+ __asm__ __volatile__(
+ "movq "OC_MEM_OFFS(0x20,x)",%%mm2\n\t"
+ "movq "OC_MEM_OFFS(0x30,x)",%%mm3\n\t"
+ "movq "OC_MEM_OFFS(0x10,x)",%%mm1\n\t"
+ "movq "OC_MEM_OFFS(0x00,x)",%%mm0\n\t"
+ OC_IDCT_8x8_10_MMX
+ OC_TRANSPOSE_8x4_MMX2SSE
+ OC_IDCT_8x8_10_ABC
+ OC_IDCT_8x8_D_STORE
+ :[buf]"=m"(OC_ARRAY_OPERAND(short,buf,16)),
+ [y]"=m"(OC_ARRAY_OPERAND(ogg_int16_t,_y,64))
+ :[x]"m"OC_CONST_ARRAY_OPERAND(ogg_int16_t,_x,64),
+ [c]"m"(OC_CONST_ARRAY_OPERAND(ogg_int16_t,OC_IDCT_CONSTS,128))
+ );
+ /*Clear input data for next block (decoder only).*/
+ __asm__ __volatile__(
+ "pxor %%mm0,%%mm0\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x00,x)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x10,x)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x20,x)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x30,x)"\n\t"
+ :[x]"+m"(OC_ARRAY_OPERAND(ogg_int16_t,_x,28))
+ );
+}
+
+/*Performs an inverse 8x8 Type-II DCT transform.
+ The input is assumed to be scaled by a factor of 4 relative to orthonormal
+ version of the transform.*/
+void oc_idct8x8_sse2(ogg_int16_t _y[64],ogg_int16_t _x[64],int _last_zzi){
+ /*_last_zzi is subtly different from an actual count of the number of
+ coefficients we decoded for this block.
+ It contains the value of zzi BEFORE the final token in the block was
+ decoded.
+ In most cases this is an EOB token (the continuation of an EOB run from a
+ previous block counts), and so this is the same as the coefficient count.
+ However, in the case that the last token was NOT an EOB token, but filled
+ the block up with exactly 64 coefficients, _last_zzi will be less than 64.
+ Provided the last token was not a pure zero run, the minimum value it can
+ be is 46, and so that doesn't affect any of the cases in this routine.
+ However, if the last token WAS a pure zero run of length 63, then _last_zzi
+ will be 1 while the number of coefficients decoded is 64.
+ Thus, we will trigger the following special case, where the real
+ coefficient count would not.
+ Note also that a zero run of length 64 will give _last_zzi a value of 0,
+ but we still process the DC coefficient, which might have a non-zero value
+ due to DC prediction.
+ Although convoluted, this is arguably the correct behavior: it allows us to
+ use a smaller transform when the block ends with a long zero run instead
+ of a normal EOB token.
+ It could be smarter... multiple separate zero runs at the end of a block
+ will fool it, but an encoder that generates these really deserves what it
+ gets.
+ Needless to say we inherited this approach from VP3.*/
+ /*Then perform the iDCT.*/
+ if(_last_zzi<=10)oc_idct8x8_10_sse2(_y,_x);
+ else oc_idct8x8_slow_sse2(_y,_x);
+}
+
+#endif
diff --git a/thirdparty/libtheora/x86/sse2trans.h b/thirdparty/libtheora/x86/sse2trans.h
new file mode 100644
index 0000000000..e76da5140b
--- /dev/null
+++ b/thirdparty/libtheora/x86/sse2trans.h
@@ -0,0 +1,242 @@
+/********************************************************************
+ * *
+ * THIS FILE IS PART OF THE OggTheora SOFTWARE CODEC SOURCE CODE. *
+ * USE, DISTRIBUTION AND REPRODUCTION OF THIS LIBRARY SOURCE IS *
+ * GOVERNED BY A BSD-STYLE SOURCE LICENSE INCLUDED WITH THIS SOURCE *
+ * IN 'COPYING'. PLEASE READ THESE TERMS BEFORE DISTRIBUTING. *
+ * *
+ * THE Theora SOURCE CODE IS COPYRIGHT (C) 2002-2009 *
+ * by the Xiph.Org Foundation and contributors http://www.xiph.org/ *
+ * *
+ ********************************************************************
+
+ function:
+ last mod: $Id: sse2trans.h 15675 2009-02-06 09:43:27Z tterribe $
+
+ ********************************************************************/
+
+#if !defined(_x86_sse2trans_H)
+# define _x86_sse2trans_H (1)
+# include "x86int.h"
+
+# if defined(OC_X86_64_ASM)
+/*On x86-64 we can transpose in-place without spilling registers.
+ By clever choices of the order to apply the butterflies and the order of
+ their outputs, we can take the rows in order and output the columns in order
+ without any extra operations and using just one temporary register.*/
+# define OC_TRANSPOSE_8x8 \
+ "#OC_TRANSPOSE_8x8\n\t" \
+ "movdqa %%xmm4,%%xmm8\n\t" \
+ /*xmm4 = f3 e3 f2 e2 f1 e1 f0 e0*/ \
+ "punpcklwd %%xmm5,%%xmm4\n\t" \
+ /*xmm8 = f7 e7 f6 e6 f5 e5 f4 e4*/ \
+ "punpckhwd %%xmm5,%%xmm8\n\t" \
+ /*xmm5 is free.*/ \
+ "movdqa %%xmm0,%%xmm5\n\t" \
+ /*xmm0 = b3 a3 b2 a2 b1 a1 b0 a0*/ \
+ "punpcklwd %%xmm1,%%xmm0\n\t" \
+ /*xmm5 = b7 a7 b6 a6 b5 a5 b4 a4*/ \
+ "punpckhwd %%xmm1,%%xmm5\n\t" \
+ /*xmm1 is free.*/ \
+ "movdqa %%xmm6,%%xmm1\n\t" \
+ /*xmm6 = h3 g3 h2 g2 h1 g1 h0 g0*/ \
+ "punpcklwd %%xmm7,%%xmm6\n\t" \
+ /*xmm1 = h7 g7 h6 g6 h5 g5 h4 g4*/ \
+ "punpckhwd %%xmm7,%%xmm1\n\t" \
+ /*xmm7 is free.*/ \
+ "movdqa %%xmm2,%%xmm7\n\t" \
+ /*xmm2 = d7 c7 d6 c6 d5 c5 d4 c4*/ \
+ "punpckhwd %%xmm3,%%xmm2\n\t" \
+ /*xmm7 = d3 c3 d2 c2 d1 c1 d0 c0*/ \
+ "punpcklwd %%xmm3,%%xmm7\n\t" \
+ /*xmm3 is free.*/ \
+ "movdqa %%xmm0,%%xmm3\n\t" \
+ /*xmm0 = d1 c1 b1 a1 d0 c0 b0 a0*/ \
+ "punpckldq %%xmm7,%%xmm0\n\t" \
+ /*xmm3 = d3 c3 b3 a3 d2 c2 b2 a2*/ \
+ "punpckhdq %%xmm7,%%xmm3\n\t" \
+ /*xmm7 is free.*/ \
+ "movdqa %%xmm5,%%xmm7\n\t" \
+ /*xmm5 = d5 c5 b5 a5 d4 c4 b4 a4*/ \
+ "punpckldq %%xmm2,%%xmm5\n\t" \
+ /*xmm7 = d7 c7 b7 a7 d6 c6 b6 a6*/ \
+ "punpckhdq %%xmm2,%%xmm7\n\t" \
+ /*xmm2 is free.*/ \
+ "movdqa %%xmm4,%%xmm2\n\t" \
+ /*xmm4 = h3 g3 f3 e3 h2 g2 f2 e2*/ \
+ "punpckhdq %%xmm6,%%xmm4\n\t" \
+ /*xmm2 = h1 g1 f1 e1 h0 g0 f0 e0*/ \
+ "punpckldq %%xmm6,%%xmm2\n\t" \
+ /*xmm6 is free.*/ \
+ "movdqa %%xmm8,%%xmm6\n\t" \
+ /*xmm6 = h5 g5 f5 e5 h4 g4 f4 e4*/ \
+ "punpckldq %%xmm1,%%xmm6\n\t" \
+ /*xmm8 = h7 g7 f7 e7 h6 g6 f6 e6*/ \
+ "punpckhdq %%xmm1,%%xmm8\n\t" \
+ /*xmm1 is free.*/ \
+ "movdqa %%xmm0,%%xmm1\n\t" \
+ /*xmm0 = h0 g0 f0 e0 d0 c0 b0 a0*/ \
+ "punpcklqdq %%xmm2,%%xmm0\n\t" \
+ /*xmm1 = h1 g1 f1 e1 d1 c1 b1 a1*/ \
+ "punpckhqdq %%xmm2,%%xmm1\n\t" \
+ /*xmm2 is free.*/ \
+ "movdqa %%xmm3,%%xmm2\n\t" \
+ /*xmm3 = h3 g3 f3 e3 d3 c3 b3 a3*/ \
+ "punpckhqdq %%xmm4,%%xmm3\n\t" \
+ /*xmm2 = h2 g2 f2 e2 d2 c2 b2 a2*/ \
+ "punpcklqdq %%xmm4,%%xmm2\n\t" \
+ /*xmm4 is free.*/ \
+ "movdqa %%xmm5,%%xmm4\n\t" \
+ /*xmm5 = h5 g5 f5 e5 d5 c5 b5 a5*/ \
+ "punpckhqdq %%xmm6,%%xmm5\n\t" \
+ /*xmm4 = h4 g4 f4 e4 d4 c4 b4 a4*/ \
+ "punpcklqdq %%xmm6,%%xmm4\n\t" \
+ /*xmm6 is free.*/ \
+ "movdqa %%xmm7,%%xmm6\n\t" \
+ /*xmm7 = h7 g7 f7 e7 d7 c7 b7 a7*/ \
+ "punpckhqdq %%xmm8,%%xmm7\n\t" \
+ /*xmm6 = h6 g6 f6 e6 d6 c6 b6 a6*/ \
+ "punpcklqdq %%xmm8,%%xmm6\n\t" \
+ /*xmm8 is free.*/ \
+
+# else
+/*Otherwise, we need to spill some values to %[buf] temporarily.
+ Again, the butterflies are carefully arranged to get the columns to come out
+ in order, minimizing register spills and maximizing the delay between a load
+ and when the value loaded is actually used.*/
+# define OC_TRANSPOSE_8x8 \
+ "#OC_TRANSPOSE_8x8\n\t" \
+ /*buf[0] = a7 a6 a5 a4 a3 a2 a1 a0*/ \
+ "movdqa %%xmm0,"OC_MEM_OFFS(0x00,buf)"\n\t" \
+ /*xmm0 is free.*/ \
+ "movdqa %%xmm2,%%xmm0\n\t" \
+ /*xmm2 = d7 c7 d6 c6 d5 c5 d4 c4*/ \
+ "punpckhwd %%xmm3,%%xmm2\n\t" \
+ /*xmm0 = d3 c3 d2 c2 d1 c1 d0 c0*/ \
+ "punpcklwd %%xmm3,%%xmm0\n\t" \
+ /*xmm3 = a7 a6 a5 a4 a3 a2 a1 a0*/ \
+ "movdqa "OC_MEM_OFFS(0x00,buf)",%%xmm3\n\t" \
+ /*buf[1] = d7 c7 d6 c6 d5 c5 d4 c4*/ \
+ "movdqa %%xmm2,"OC_MEM_OFFS(0x10,buf)"\n\t" \
+ /*xmm2 is free.*/ \
+ "movdqa %%xmm6,%%xmm2\n\t" \
+ /*xmm6 = h3 g3 h2 g2 h1 g1 h0 g0*/ \
+ "punpcklwd %%xmm7,%%xmm6\n\t" \
+ /*xmm2 = h7 g7 h6 g6 h5 g5 h4 g4*/ \
+ "punpckhwd %%xmm7,%%xmm2\n\t" \
+ /*xmm7 is free.*/ \
+ "movdqa %%xmm4,%%xmm7\n\t" \
+ /*xmm4 = f3 e3 f2 e2 f1 e1 f0 e0*/ \
+ "punpcklwd %%xmm5,%%xmm4\n\t" \
+ /*xmm7 = f7 e7 f6 e6 f5 e5 f4 e4*/ \
+ "punpckhwd %%xmm5,%%xmm7\n\t" \
+ /*xmm5 is free.*/ \
+ "movdqa %%xmm3,%%xmm5\n\t" \
+ /*xmm3 = b3 a3 b2 a2 b1 a1 b0 a0*/ \
+ "punpcklwd %%xmm1,%%xmm3\n\t" \
+ /*xmm5 = b7 a7 b6 a6 b5 a5 b4 a4*/ \
+ "punpckhwd %%xmm1,%%xmm5\n\t" \
+ /*xmm1 is free.*/ \
+ "movdqa %%xmm7,%%xmm1\n\t" \
+ /*xmm7 = h5 g5 f5 e5 h4 g4 f4 e4*/ \
+ "punpckldq %%xmm2,%%xmm7\n\t" \
+ /*xmm1 = h7 g7 f7 e7 h6 g6 f6 e6*/ \
+ "punpckhdq %%xmm2,%%xmm1\n\t" \
+ /*xmm2 = d7 c7 d6 c6 d5 c5 d4 c4*/ \
+ "movdqa "OC_MEM_OFFS(0x10,buf)",%%xmm2\n\t" \
+ /*buf[0] = h7 g7 f7 e7 h6 g6 f6 e6*/ \
+ "movdqa %%xmm1,"OC_MEM_OFFS(0x00,buf)"\n\t" \
+ /*xmm1 is free.*/ \
+ "movdqa %%xmm3,%%xmm1\n\t" \
+ /*xmm3 = d3 c3 b3 a3 d2 c2 b2 a2*/ \
+ "punpckhdq %%xmm0,%%xmm3\n\t" \
+ /*xmm1 = d1 c1 b1 a1 d0 c0 b0 a0*/ \
+ "punpckldq %%xmm0,%%xmm1\n\t" \
+ /*xmm0 is free.*/ \
+ "movdqa %%xmm4,%%xmm0\n\t" \
+ /*xmm4 = h3 g3 f3 e3 h2 g2 f2 e2*/ \
+ "punpckhdq %%xmm6,%%xmm4\n\t" \
+ /*xmm0 = h1 g1 f1 e1 h0 g0 f0 e0*/ \
+ "punpckldq %%xmm6,%%xmm0\n\t" \
+ /*xmm6 is free.*/ \
+ "movdqa %%xmm5,%%xmm6\n\t" \
+ /*xmm5 = d5 c5 b5 a5 d4 c4 b4 a4*/ \
+ "punpckldq %%xmm2,%%xmm5\n\t" \
+ /*xmm6 = d7 c7 b7 a7 d6 c6 b6 a6*/ \
+ "punpckhdq %%xmm2,%%xmm6\n\t" \
+ /*xmm2 is free.*/ \
+ "movdqa %%xmm1,%%xmm2\n\t" \
+ /*xmm1 = h1 g1 f1 e1 d1 c1 b1 a1*/ \
+ "punpckhqdq %%xmm0,%%xmm1\n\t" \
+ /*xmm2 = h0 g0 f0 e0 d0 c0 b0 a0*/ \
+ "punpcklqdq %%xmm0,%%xmm2\n\t" \
+ /*xmm0 = h7 g7 f7 e7 h6 g6 f6 e6*/ \
+ "movdqa "OC_MEM_OFFS(0x00,buf)",%%xmm0\n\t" \
+ /*buf[1] = h0 g0 f0 e0 d0 c0 b0 a0*/ \
+ "movdqa %%xmm2,"OC_MEM_OFFS(0x10,buf)"\n\t" \
+ /*xmm2 is free.*/ \
+ "movdqa %%xmm3,%%xmm2\n\t" \
+ /*xmm3 = h3 g3 f3 e3 d3 c3 b3 a3*/ \
+ "punpckhqdq %%xmm4,%%xmm3\n\t" \
+ /*xmm2 = h2 g2 f2 e2 d2 c2 b2 a2*/ \
+ "punpcklqdq %%xmm4,%%xmm2\n\t" \
+ /*xmm4 is free.*/ \
+ "movdqa %%xmm5,%%xmm4\n\t" \
+ /*xmm5 = h5 g5 f5 e5 d5 c5 b5 a5*/ \
+ "punpckhqdq %%xmm7,%%xmm5\n\t" \
+ /*xmm4 = h4 g4 f4 e4 d4 c4 b4 a4*/ \
+ "punpcklqdq %%xmm7,%%xmm4\n\t" \
+ /*xmm7 is free.*/ \
+ "movdqa %%xmm6,%%xmm7\n\t" \
+ /*xmm6 = h6 g6 f6 e6 d6 c6 b6 a6*/ \
+ "punpcklqdq %%xmm0,%%xmm6\n\t" \
+ /*xmm7 = h7 g7 f7 e7 d7 c7 b7 a7*/ \
+ "punpckhqdq %%xmm0,%%xmm7\n\t" \
+ /*xmm0 = h0 g0 f0 e0 d0 c0 b0 a0*/ \
+ "movdqa "OC_MEM_OFFS(0x10,buf)",%%xmm0\n\t" \
+
+# endif
+
+/*Transpose 4 values in each of 8 MMX registers into 8 values in the first
+ four SSE registers.
+ No need to be clever here; we have plenty of room.*/
+# define OC_TRANSPOSE_8x4_MMX2SSE \
+ "#OC_TRANSPOSE_8x4_MMX2SSE\n\t" \
+ "movq2dq %%mm0,%%xmm0\n\t" \
+ "movq2dq %%mm1,%%xmm1\n\t" \
+ /*xmmA = b3 a3 b2 a2 b1 a1 b0 a0*/ \
+ "punpcklwd %%xmm1,%%xmm0\n\t" \
+ "movq2dq %%mm2,%%xmm3\n\t" \
+ "movq2dq %%mm3,%%xmm2\n\t" \
+ /*xmmC = d3 c3 d2 c2 d1 c1 d0 c0*/ \
+ "punpcklwd %%xmm2,%%xmm3\n\t" \
+ "movq2dq %%mm4,%%xmm4\n\t" \
+ "movq2dq %%mm5,%%xmm5\n\t" \
+ /*xmmE = f3 e3 f2 e2 f1 e1 f0 e0*/ \
+ "punpcklwd %%xmm5,%%xmm4\n\t" \
+ "movq2dq %%mm6,%%xmm7\n\t" \
+ "movq2dq %%mm7,%%xmm6\n\t" \
+ /*xmmG = h3 g3 h2 g2 h1 g1 h0 g0*/ \
+ "punpcklwd %%xmm6,%%xmm7\n\t" \
+ "movdqa %%xmm0,%%xmm2\n\t" \
+ /*xmm0 = d1 c1 b1 a1 d0 c0 b0 a0*/ \
+ "punpckldq %%xmm3,%%xmm0\n\t" \
+ /*xmm2 = d3 c3 b3 a3 d2 c2 b2 a2*/ \
+ "punpckhdq %%xmm3,%%xmm2\n\t" \
+ "movdqa %%xmm4,%%xmm5\n\t" \
+ /*xmm4 = h1 g1 f1 e1 h0 g0 f0 e0*/ \
+ "punpckldq %%xmm7,%%xmm4\n\t" \
+ /*xmm3 = h3 g3 f3 e3 h2 g2 f2 e2*/ \
+ "punpckhdq %%xmm7,%%xmm5\n\t" \
+ "movdqa %%xmm0,%%xmm1\n\t" \
+ /*xmm0 = h0 g0 f0 e0 d0 c0 b0 a0*/ \
+ "punpcklqdq %%xmm4,%%xmm0\n\t" \
+ /*xmm1 = h1 g1 f1 e1 d1 c1 b1 a1*/ \
+ "punpckhqdq %%xmm4,%%xmm1\n\t" \
+ "movdqa %%xmm2,%%xmm3\n\t" \
+ /*xmm2 = h2 g2 f2 e2 d2 c2 b2 a2*/ \
+ "punpcklqdq %%xmm5,%%xmm2\n\t" \
+ /*xmm3 = h3 g3 f3 e3 d3 c3 b3 a3*/ \
+ "punpckhqdq %%xmm5,%%xmm3\n\t" \
+
+#endif
diff --git a/thirdparty/libtheora/x86/x86cpu.c b/thirdparty/libtheora/x86/x86cpu.c
new file mode 100644
index 0000000000..49fd76d0ac
--- /dev/null
+++ b/thirdparty/libtheora/x86/x86cpu.c
@@ -0,0 +1,182 @@
+/********************************************************************
+ * *
+ * THIS FILE IS PART OF THE OggTheora SOFTWARE CODEC SOURCE CODE. *
+ * USE, DISTRIBUTION AND REPRODUCTION OF THIS LIBRARY SOURCE IS *
+ * GOVERNED BY A BSD-STYLE SOURCE LICENSE INCLUDED WITH THIS SOURCE *
+ * IN 'COPYING'. PLEASE READ THESE TERMS BEFORE DISTRIBUTING. *
+ * *
+ * THE Theora SOURCE CODE IS COPYRIGHT (C) 2002-2009 *
+ * by the Xiph.Org Foundation and contributors http://www.xiph.org/ *
+ * *
+ ********************************************************************
+
+ CPU capability detection for x86 processors.
+ Originally written by Rudolf Marek.
+
+ function:
+ last mod: $Id$
+
+ ********************************************************************/
+
+#include "x86cpu.h"
+
+#if !defined(OC_X86_ASM)
+ogg_uint32_t oc_cpu_flags_get(void){
+ return 0;
+}
+#else
+# if defined(__amd64__)||defined(__x86_64__)
+/*On x86-64, gcc seems to be able to figure out how to save %rbx for us when
+ compiling with -fPIC.*/
+# define cpuid(_op,_eax,_ebx,_ecx,_edx) \
+ __asm__ __volatile__( \
+ "cpuid\n\t" \
+ :[eax]"=a"(_eax),[ebx]"=b"(_ebx),[ecx]"=c"(_ecx),[edx]"=d"(_edx) \
+ :"a"(_op) \
+ :"cc" \
+ )
+# else
+/*On x86-32, not so much.*/
+# define cpuid(_op,_eax,_ebx,_ecx,_edx) \
+ __asm__ __volatile__( \
+ "xchgl %%ebx,%[ebx]\n\t" \
+ "cpuid\n\t" \
+ "xchgl %%ebx,%[ebx]\n\t" \
+ :[eax]"=a"(_eax),[ebx]"=r"(_ebx),[ecx]"=c"(_ecx),[edx]"=d"(_edx) \
+ :"a"(_op) \
+ :"cc" \
+ )
+# endif
+
+static ogg_uint32_t oc_parse_intel_flags(ogg_uint32_t _edx,ogg_uint32_t _ecx){
+ ogg_uint32_t flags;
+ /*If there isn't even MMX, give up.*/
+ if(!(_edx&0x00800000))return 0;
+ flags=OC_CPU_X86_MMX;
+ if(_edx&0x02000000)flags|=OC_CPU_X86_MMXEXT|OC_CPU_X86_SSE;
+ if(_edx&0x04000000)flags|=OC_CPU_X86_SSE2;
+ if(_ecx&0x00000001)flags|=OC_CPU_X86_PNI;
+ if(_ecx&0x00000100)flags|=OC_CPU_X86_SSSE3;
+ if(_ecx&0x00080000)flags|=OC_CPU_X86_SSE4_1;
+ if(_ecx&0x00100000)flags|=OC_CPU_X86_SSE4_2;
+ return flags;
+}
+
+static ogg_uint32_t oc_parse_amd_flags(ogg_uint32_t _edx,ogg_uint32_t _ecx){
+ ogg_uint32_t flags;
+ /*If there isn't even MMX, give up.*/
+ if(!(_edx&0x00800000))return 0;
+ flags=OC_CPU_X86_MMX;
+ if(_edx&0x00400000)flags|=OC_CPU_X86_MMXEXT;
+ if(_edx&0x80000000)flags|=OC_CPU_X86_3DNOW;
+ if(_edx&0x40000000)flags|=OC_CPU_X86_3DNOWEXT;
+ if(_ecx&0x00000040)flags|=OC_CPU_X86_SSE4A;
+ if(_ecx&0x00000800)flags|=OC_CPU_X86_SSE5;
+ return flags;
+}
+
+ogg_uint32_t oc_cpu_flags_get(void){
+ ogg_uint32_t flags;
+ ogg_uint32_t eax;
+ ogg_uint32_t ebx;
+ ogg_uint32_t ecx;
+ ogg_uint32_t edx;
+# if !defined(__amd64__)&&!defined(__x86_64__)
+ /*Not all x86-32 chips support cpuid, so we have to check.*/
+ __asm__ __volatile__(
+ "pushfl\n\t"
+ "pushfl\n\t"
+ "popl %[a]\n\t"
+ "movl %[a],%[b]\n\t"
+ "xorl $0x200000,%[a]\n\t"
+ "pushl %[a]\n\t"
+ "popfl\n\t"
+ "pushfl\n\t"
+ "popl %[a]\n\t"
+ "popfl\n\t"
+ :[a]"=r"(eax),[b]"=r"(ebx)
+ :
+ :"cc"
+ );
+ /*No cpuid.*/
+ if(eax==ebx)return 0;
+# endif
+ cpuid(0,eax,ebx,ecx,edx);
+ /* l e t n I e n i u n e G*/
+ if(ecx==0x6C65746E&&edx==0x49656E69&&ebx==0x756E6547||
+ /* 6 8 x M T e n i u n e G*/
+ ecx==0x3638784D&&edx==0x54656E69&&ebx==0x756E6547){
+ int family;
+ int model;
+ /*Intel, Transmeta (tested with Crusoe TM5800):*/
+ cpuid(1,eax,ebx,ecx,edx);
+ flags=oc_parse_intel_flags(edx,ecx);
+ family=(eax>>8)&0xF;
+ model=(eax>>4)&0xF;
+ /*The SSE unit on the Pentium M and Core Duo is much slower than the MMX
+ unit, so don't use it.*/
+ if(family==6&&(model==9||model==13||model==14)){
+ flags&=~(OC_CPU_X86_SSE2|OC_CPU_X86_PNI);
+ }
+ }
+ /* D M A c i t n e h t u A*/
+ else if(ecx==0x444D4163&&edx==0x69746E65&&ebx==0x68747541||
+ /* C S N y b e d o e G*/
+ ecx==0x43534e20&&edx==0x79622065&&ebx==0x646f6547){
+ /*AMD, Geode:*/
+ cpuid(0x80000000,eax,ebx,ecx,edx);
+ if(eax<0x80000001)flags=0;
+ else{
+ cpuid(0x80000001,eax,ebx,ecx,edx);
+ flags=oc_parse_amd_flags(edx,ecx);
+ }
+ /*Also check for SSE.*/
+ cpuid(1,eax,ebx,ecx,edx);
+ flags|=oc_parse_intel_flags(edx,ecx);
+ }
+ /*Technically some VIA chips can be configured in the BIOS to return any
+ string here the user wants.
+ There is a special detection method that can be used to identify such
+ processors, but in my opinion, if the user really wants to change it, they
+ deserve what they get.*/
+ /* s l u a H r u a t n e C*/
+ else if(ecx==0x736C7561&&edx==0x48727561&&ebx==0x746E6543){
+ /*VIA:*/
+ /*I only have documentation for the C7 (Esther) and Isaiah (forthcoming)
+ chips (thanks to the engineers from Centaur Technology who provided it).
+ These chips support Intel-like cpuid info.
+ The C3-2 (Nehemiah) cores appear to, as well.*/
+ cpuid(1,eax,ebx,ecx,edx);
+ flags=oc_parse_intel_flags(edx,ecx);
+ if(eax>=0x80000001){
+ /*The (non-Nehemiah) C3 processors support AMD-like cpuid info.
+ We need to check this even if the Intel test succeeds to pick up 3DNow!
+ support on these processors.
+ Unlike actual AMD processors, we cannot _rely_ on this info, since
+ some cores (e.g., the 693 stepping of the Nehemiah) claim to support
+ this function, yet return edx=0, despite the Intel test indicating
+ MMX support.
+ Therefore the features detected here are strictly added to those
+ detected by the Intel test.*/
+ /*TODO: How about earlier chips?*/
+ cpuid(0x80000001,eax,ebx,ecx,edx);
+ /*Note: As of the C7, this function returns Intel-style extended feature
+ flags, not AMD-style.
+ Currently, this only defines bits 11, 20, and 29 (0x20100800), which
+ do not conflict with any of the AMD flags we inspect.
+ For the remaining bits, Intel tells us, "Do not count on their value",
+ but VIA assures us that they will all be zero (at least on the C7 and
+ Isaiah chips).
+ In the (unlikely) event a future processor uses bits 18, 19, 30, or 31
+ (0xC0C00000) for something else, we will have to add code to detect
+ the model to decide when it is appropriate to inspect them.*/
+ flags|=oc_parse_amd_flags(edx,ecx);
+ }
+ }
+ else{
+ /*Implement me.*/
+ flags=0;
+ }
+ return flags;
+}
+#endif
diff --git a/thirdparty/libtheora/cpu.h b/thirdparty/libtheora/x86/x86cpu.h
index a43c957a39..e0192d52e2 100644
--- a/thirdparty/libtheora/cpu.h
+++ b/thirdparty/libtheora/x86/x86cpu.h
@@ -10,13 +10,13 @@
* *
********************************************************************
function:
- last mod: $Id: cpu.h 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
-#if !defined(_x86_cpu_H)
-# define _x86_cpu_H (1)
-#include "internal.h"
+#if !defined(_x86_x86cpu_H)
+# define _x86_x86cpu_H (1)
+#include "../internal.h"
#define OC_CPU_X86_MMX (1<<0)
#define OC_CPU_X86_3DNOW (1<<1)
@@ -31,4 +31,6 @@
#define OC_CPU_X86_SSE4A (1<<10)
#define OC_CPU_X86_SSE5 (1<<11)
+ogg_uint32_t oc_cpu_flags_get(void);
+
#endif
diff --git a/thirdparty/libtheora/x86/x86enc.c b/thirdparty/libtheora/x86/x86enc.c
index 43b7be3ea3..ffa9c14a42 100644
--- a/thirdparty/libtheora/x86/x86enc.c
+++ b/thirdparty/libtheora/x86/x86enc.c
@@ -18,32 +18,46 @@
#if defined(OC_X86_ASM)
-#include "../cpu.c"
-
-void oc_enc_vtable_init_x86(oc_enc_ctx *_enc){
+void oc_enc_accel_init_x86(oc_enc_ctx *_enc){
ogg_uint32_t cpu_flags;
- cpu_flags=oc_cpu_flags_get();
- oc_enc_vtable_init_c(_enc);
+ cpu_flags=_enc->state.cpu_flags;
+ oc_enc_accel_init_c(_enc);
+# if defined(OC_ENC_USE_VTABLE)
if(cpu_flags&OC_CPU_X86_MMX){
_enc->opt_vtable.frag_sub=oc_enc_frag_sub_mmx;
_enc->opt_vtable.frag_sub_128=oc_enc_frag_sub_128_mmx;
_enc->opt_vtable.frag_recon_intra=oc_frag_recon_intra_mmx;
_enc->opt_vtable.frag_recon_inter=oc_frag_recon_inter_mmx;
- _enc->opt_vtable.fdct8x8=oc_enc_fdct8x8_mmx;
}
if(cpu_flags&OC_CPU_X86_MMXEXT){
_enc->opt_vtable.frag_sad=oc_enc_frag_sad_mmxext;
_enc->opt_vtable.frag_sad_thresh=oc_enc_frag_sad_thresh_mmxext;
_enc->opt_vtable.frag_sad2_thresh=oc_enc_frag_sad2_thresh_mmxext;
- _enc->opt_vtable.frag_satd_thresh=oc_enc_frag_satd_thresh_mmxext;
- _enc->opt_vtable.frag_satd2_thresh=oc_enc_frag_satd2_thresh_mmxext;
+ _enc->opt_vtable.frag_satd=oc_enc_frag_satd_mmxext;
+ _enc->opt_vtable.frag_satd2=oc_enc_frag_satd2_mmxext;
_enc->opt_vtable.frag_intra_satd=oc_enc_frag_intra_satd_mmxext;
_enc->opt_vtable.frag_copy2=oc_enc_frag_copy2_mmxext;
+ _enc->opt_vtable.fdct8x8=oc_enc_fdct8x8_mmxext;
}
if(cpu_flags&OC_CPU_X86_SSE2){
-# if defined(OC_X86_64_ASM)
- /*_enc->opt_vtable.fdct8x8=oc_enc_fdct8x8_x86_64sse2;*/
+# if defined(OC_X86_64_ASM)
+ _enc->opt_vtable.fdct8x8=oc_enc_fdct8x8_x86_64sse2;
+# endif
+ _enc->opt_vtable.frag_ssd=oc_enc_frag_ssd_sse2;
+ _enc->opt_vtable.frag_border_ssd=oc_enc_frag_border_ssd_sse2;
+ _enc->opt_vtable.frag_satd=oc_enc_frag_satd_sse2;
+ _enc->opt_vtable.frag_satd2=oc_enc_frag_satd2_sse2;
+ _enc->opt_vtable.frag_intra_satd=oc_enc_frag_intra_satd_sse2;
+ _enc->opt_vtable.enquant_table_init=oc_enc_enquant_table_init_x86;
+ _enc->opt_vtable.enquant_table_fixup=oc_enc_enquant_table_fixup_x86;
+ _enc->opt_vtable.quantize=oc_enc_quantize_sse2;
+# else
+ (void) cpu_flags;
# endif
+ _enc->opt_data.enquant_table_size=128*sizeof(ogg_uint16_t);
+ _enc->opt_data.enquant_table_alignment=16;
+# if defined(OC_ENC_USE_VTABLE)
}
+# endif
}
#endif
diff --git a/thirdparty/libtheora/x86/x86enc.h b/thirdparty/libtheora/x86/x86enc.h
index 06c3908bcd..c258247d67 100644
--- a/thirdparty/libtheora/x86/x86enc.h
+++ b/thirdparty/libtheora/x86/x86enc.h
@@ -17,11 +17,62 @@
#if !defined(_x86_x86enc_H)
# define _x86_x86enc_H (1)
-# include "../encint.h"
# include "x86int.h"
-void oc_enc_vtable_init_x86(oc_enc_ctx *_enc);
+# if defined(OC_X86_ASM)
+# define oc_enc_accel_init oc_enc_accel_init_x86
+# if defined(OC_X86_64_ASM)
+/*x86-64 guarantees SIMD support up through at least SSE2.
+ If the best routine we have available only needs SSE2 (which at the moment
+ covers all of them), then we can avoid runtime detection and the indirect
+ call.*/
+# define oc_enc_frag_sub(_enc,_diff,_x,_y,_stride) \
+ oc_enc_frag_sub_mmx(_diff,_x,_y,_stride)
+# define oc_enc_frag_sub_128(_enc,_diff,_x,_stride) \
+ oc_enc_frag_sub_128_mmx(_diff,_x,_stride)
+# define oc_enc_frag_sad(_enc,_src,_ref,_ystride) \
+ oc_enc_frag_sad_mmxext(_src,_ref,_ystride)
+# define oc_enc_frag_sad_thresh(_enc,_src,_ref,_ystride,_thresh) \
+ oc_enc_frag_sad_thresh_mmxext(_src,_ref,_ystride,_thresh)
+# define oc_enc_frag_sad2_thresh(_enc,_src,_ref1,_ref2,_ystride,_thresh) \
+ oc_enc_frag_sad2_thresh_mmxext(_src,_ref1,_ref2,_ystride,_thresh)
+# define oc_enc_frag_satd(_enc,_dc,_src,_ref,_ystride) \
+ oc_enc_frag_satd_sse2(_dc,_src,_ref,_ystride)
+# define oc_enc_frag_satd2(_enc,_dc,_src,_ref1,_ref2,_ystride) \
+ oc_enc_frag_satd2_sse2(_dc,_src,_ref1,_ref2,_ystride)
+# define oc_enc_frag_intra_satd(_enc,_dc,_src,_ystride) \
+ oc_enc_frag_intra_satd_sse2(_dc,_src,_ystride)
+# define oc_enc_frag_ssd(_enc,_src,_ref,_ystride) \
+ oc_enc_frag_ssd_sse2(_src,_ref,_ystride)
+# define oc_enc_frag_border_ssd(_enc,_src,_ref,_ystride,_mask) \
+ oc_enc_frag_border_ssd_sse2(_src,_ref,_ystride,_mask)
+# define oc_enc_frag_copy2(_enc,_dst,_src1,_src2,_ystride) \
+ oc_int_frag_copy2_mmxext(_dst,_ystride,_src1,_src2,_ystride)
+# define oc_enc_enquant_table_init(_enc,_enquant,_dequant) \
+ oc_enc_enquant_table_init_x86(_enquant,_dequant)
+# define oc_enc_enquant_table_fixup(_enc,_enquant,_nqis) \
+ oc_enc_enquant_table_fixup_x86(_enquant,_nqis)
+# define oc_enc_quantize(_enc,_qdct,_dct,_dequant,_enquant) \
+ oc_enc_quantize_sse2(_qdct,_dct,_dequant,_enquant)
+# define oc_enc_frag_recon_intra(_enc,_dst,_ystride,_residue) \
+ oc_frag_recon_intra_mmx(_dst,_ystride,_residue)
+# define oc_enc_frag_recon_inter(_enc,_dst,_src,_ystride,_residue) \
+ oc_frag_recon_inter_mmx(_dst,_src,_ystride,_residue)
+# define oc_enc_fdct8x8(_enc,_y,_x) \
+ oc_enc_fdct8x8_x86_64sse2(_y,_x)
+# else
+# define OC_ENC_USE_VTABLE (1)
+# endif
+# endif
+
+# include "../encint.h"
+void oc_enc_accel_init_x86(oc_enc_ctx *_enc);
+
+void oc_enc_frag_sub_mmx(ogg_int16_t _diff[64],
+ const unsigned char *_x,const unsigned char *_y,int _stride);
+void oc_enc_frag_sub_128_mmx(ogg_int16_t _diff[64],
+ const unsigned char *_x,int _stride);
unsigned oc_enc_frag_sad_mmxext(const unsigned char *_src,
const unsigned char *_ref,int _ystride);
unsigned oc_enc_frag_sad_thresh_mmxext(const unsigned char *_src,
@@ -29,19 +80,35 @@ unsigned oc_enc_frag_sad_thresh_mmxext(const unsigned char *_src,
unsigned oc_enc_frag_sad2_thresh_mmxext(const unsigned char *_src,
const unsigned char *_ref1,const unsigned char *_ref2,int _ystride,
unsigned _thresh);
-unsigned oc_enc_frag_satd_thresh_mmxext(const unsigned char *_src,
- const unsigned char *_ref,int _ystride,unsigned _thresh);
-unsigned oc_enc_frag_satd2_thresh_mmxext(const unsigned char *_src,
- const unsigned char *_ref1,const unsigned char *_ref2,int _ystride,
- unsigned _thresh);
-unsigned oc_enc_frag_intra_satd_mmxext(const unsigned char *_src,int _ystride);
-void oc_enc_frag_sub_mmx(ogg_int16_t _diff[64],
- const unsigned char *_x,const unsigned char *_y,int _stride);
-void oc_enc_frag_sub_128_mmx(ogg_int16_t _diff[64],
- const unsigned char *_x,int _stride);
+unsigned oc_enc_frag_satd_mmxext(int *_dc,const unsigned char *_src,
+ const unsigned char *_ref,int _ystride);
+unsigned oc_enc_frag_satd_sse2(int *_dc,const unsigned char *_src,
+ const unsigned char *_ref,int _ystride);
+unsigned oc_enc_frag_satd2_mmxext(int *_dc,const unsigned char *_src,
+ const unsigned char *_ref1,const unsigned char *_ref2,int _ystride);
+unsigned oc_enc_frag_satd2_sse2(int *_dc,const unsigned char *_src,
+ const unsigned char *_ref1,const unsigned char *_ref2,int _ystride);
+unsigned oc_enc_frag_intra_satd_mmxext(int *_dc,
+ const unsigned char *_src,int _ystride);
+unsigned oc_enc_frag_intra_satd_sse2(int *_dc,
+ const unsigned char *_src,int _ystride);
+unsigned oc_enc_frag_ssd_sse2(const unsigned char *_src,
+ const unsigned char *_ref,int _ystride);
+unsigned oc_enc_frag_border_ssd_sse2(const unsigned char *_src,
+ const unsigned char *_ref,int _ystride,ogg_int64_t _mask);
+void oc_int_frag_copy2_mmxext(unsigned char *_dst,int _dst_ystride,
+ const unsigned char *_src1,const unsigned char *_src2,int _src_ystride);
void oc_enc_frag_copy2_mmxext(unsigned char *_dst,
const unsigned char *_src1,const unsigned char *_src2,int _ystride);
-void oc_enc_fdct8x8_mmx(ogg_int16_t _y[64],const ogg_int16_t _x[64]);
+void oc_enc_enquant_table_init_x86(void *_enquant,
+ const ogg_uint16_t _dequant[64]);
+void oc_enc_enquant_table_fixup_x86(void *_enquant[3][3][2],int _nqis);
+int oc_enc_quantize_sse2(ogg_int16_t _qdct[64],const ogg_int16_t _dct[64],
+ const ogg_uint16_t _dequant[64],const void *_enquant);
+void oc_enc_fdct8x8_mmxext(ogg_int16_t _y[64],const ogg_int16_t _x[64]);
+
+# if defined(OC_X86_64_ASM)
void oc_enc_fdct8x8_x86_64sse2(ogg_int16_t _y[64],const ogg_int16_t _x[64]);
+# endif
#endif
diff --git a/thirdparty/libtheora/x86/x86enquant.c b/thirdparty/libtheora/x86/x86enquant.c
new file mode 100644
index 0000000000..39477ecc21
--- /dev/null
+++ b/thirdparty/libtheora/x86/x86enquant.c
@@ -0,0 +1,149 @@
+/********************************************************************
+ * *
+ * THIS FILE IS PART OF THE OggTheora SOFTWARE CODEC SOURCE CODE. *
+ * USE, DISTRIBUTION AND REPRODUCTION OF THIS LIBRARY SOURCE IS *
+ * GOVERNED BY A BSD-STYLE SOURCE LICENSE INCLUDED WITH THIS SOURCE *
+ * IN 'COPYING'. PLEASE READ THESE TERMS BEFORE DISTRIBUTING. *
+ * *
+ * THE Theora SOURCE CODE IS COPYRIGHT (C) 2002-2009 *
+ * by the Xiph.Org Foundation and contributors http://www.xiph.org/ *
+ * *
+ ********************************************************************
+
+ function:
+ last mod: $Id: mmxstate.c 17247 2010-05-28 05:35:32Z tterribe $
+
+ ********************************************************************/
+
+#include "x86enc.h"
+
+#if defined(OC_X86_ASM)
+
+
+
+/*The default enquant table is not quite suitable for SIMD purposes.
+ First, the m and l parameters need to be separated so that an entire row full
+ of m's or l's can be loaded at a time.
+ Second, x86 SIMD has no element-wise arithmetic right-shift, so we have to
+ emulate one with a multiply.
+ Therefore we translate the shift count into a scale factor.*/
+void oc_enc_enquant_table_init_x86(void *_enquant,
+ const ogg_uint16_t _dequant[64]){
+ ogg_int16_t *m;
+ ogg_int16_t *l;
+ int zzi;
+ m=(ogg_int16_t *)_enquant;
+ l=m+64;
+ for(zzi=0;zzi<64;zzi++){
+ oc_iquant q;
+ oc_iquant_init(&q,_dequant[zzi]);
+ m[zzi]=q.m;
+ /*q.l must be at least 2 for this to work; fortunately, once all the scale
+ factors are baked in, the minimum quantizer is much larger than that.*/
+ l[zzi]=1<<16-q.l;
+ }
+}
+
+void oc_enc_enquant_table_fixup_x86(void *_enquant[3][3][2],int _nqis){
+ int pli;
+ int qii;
+ int qti;
+ for(pli=0;pli<3;pli++)for(qii=1;qii<_nqis;qii++)for(qti=0;qti<2;qti++){
+ ((ogg_int16_t *)_enquant[pli][qii][qti])[0]=
+ ((ogg_int16_t *)_enquant[pli][0][qti])[0];
+ ((ogg_int16_t *)_enquant[pli][qii][qti])[64]=
+ ((ogg_int16_t *)_enquant[pli][0][qti])[64];
+ }
+}
+
+int oc_enc_quantize_sse2(ogg_int16_t _qdct[64],const ogg_int16_t _dct[64],
+ const ogg_uint16_t _dequant[64],const void *_enquant){
+ ptrdiff_t r;
+ __asm__ __volatile__(
+ "xor %[r],%[r]\n\t"
+ /*Loop through two rows at a time.*/
+ ".p2align 4\n\t"
+ "0:\n\t"
+ /*Load the first two rows of the data and the quant matrices.*/
+ "movdqa 0x00(%[dct],%[r]),%%xmm0\n\t"
+ "movdqa 0x10(%[dct],%[r]),%%xmm1\n\t"
+ "movdqa 0x00(%[dq],%[r]),%%xmm2\n\t"
+ "movdqa 0x10(%[dq],%[r]),%%xmm3\n\t"
+ "movdqa 0x00(%[q],%[r]),%%xmm4\n\t"
+ "movdqa 0x10(%[q],%[r]),%%xmm5\n\t"
+ /*Double the input and propagate its sign to the rounding factor.
+ Using SSSE3's psignw would help here, but we need the mask later anyway.*/
+ "movdqa %%xmm0,%%xmm6\n\t"
+ "psraw $15,%%xmm0\n\t"
+ "movdqa %%xmm1,%%xmm7\n\t"
+ "paddw %%xmm6,%%xmm6\n\t"
+ "psraw $15,%%xmm1\n\t"
+ "paddw %%xmm7,%%xmm7\n\t"
+ "paddw %%xmm0,%%xmm2\n\t"
+ "paddw %%xmm1,%%xmm3\n\t"
+ "pxor %%xmm0,%%xmm2\n\t"
+ "pxor %%xmm1,%%xmm3\n\t"
+ /*Add the rounding factor and perform the first multiply.*/
+ "paddw %%xmm2,%%xmm6\n\t"
+ "paddw %%xmm3,%%xmm7\n\t"
+ "pmulhw %%xmm6,%%xmm4\n\t"
+ "pmulhw %%xmm7,%%xmm5\n\t"
+ "movdqa 0x80(%[q],%[r]),%%xmm2\n\t"
+ "movdqa 0x90(%[q],%[r]),%%xmm3\n\t"
+ "paddw %%xmm4,%%xmm6\n\t"
+ "paddw %%xmm5,%%xmm7\n\t"
+ /*Emulate an element-wise right-shift via a second multiply.*/
+ "pmulhw %%xmm2,%%xmm6\n\t"
+ "pmulhw %%xmm3,%%xmm7\n\t"
+ "add $32,%[r]\n\t"
+ "cmp $96,%[r]\n\t"
+ /*Correct for the sign.*/
+ "psubw %%xmm0,%%xmm6\n\t"
+ "psubw %%xmm1,%%xmm7\n\t"
+ /*Save the result.*/
+ "movdqa %%xmm6,-0x20(%[qdct],%[r])\n\t"
+ "movdqa %%xmm7,-0x10(%[qdct],%[r])\n\t"
+ "jle 0b\n\t"
+ /*Now find the location of the last non-zero value.*/
+ "movdqa 0x50(%[qdct]),%%xmm5\n\t"
+ "movdqa 0x40(%[qdct]),%%xmm4\n\t"
+ "packsswb %%xmm7,%%xmm6\n\t"
+ "packsswb %%xmm5,%%xmm4\n\t"
+ "pxor %%xmm0,%%xmm0\n\t"
+ "mov $-1,%k[dq]\n\t"
+ "pcmpeqb %%xmm0,%%xmm6\n\t"
+ "pcmpeqb %%xmm0,%%xmm4\n\t"
+ "pmovmskb %%xmm6,%k[q]\n\t"
+ "pmovmskb %%xmm4,%k[r]\n\t"
+ "shl $16,%k[q]\n\t"
+ "or %k[r],%k[q]\n\t"
+ "mov $32,%[r]\n\t"
+ /*We have to use xor here instead of not in order to set the flags.*/
+ "xor %k[dq],%k[q]\n\t"
+ "jnz 1f\n\t"
+ "movdqa 0x30(%[qdct]),%%xmm7\n\t"
+ "movdqa 0x20(%[qdct]),%%xmm6\n\t"
+ "movdqa 0x10(%[qdct]),%%xmm5\n\t"
+ "movdqa 0x00(%[qdct]),%%xmm4\n\t"
+ "packsswb %%xmm7,%%xmm6\n\t"
+ "packsswb %%xmm5,%%xmm4\n\t"
+ "pcmpeqb %%xmm0,%%xmm6\n\t"
+ "pcmpeqb %%xmm0,%%xmm4\n\t"
+ "pmovmskb %%xmm6,%k[q]\n\t"
+ "pmovmskb %%xmm4,%k[r]\n\t"
+ "shl $16,%k[q]\n\t"
+ "or %k[r],%k[q]\n\t"
+ "xor %[r],%[r]\n\t"
+ "not %k[q]\n\t"
+ "or $1,%k[q]\n\t"
+ "1:\n\t"
+ "bsr %k[q],%k[q]\n\t"
+ "add %k[q],%k[r]\n\t"
+ :[r]"=&a"(r),[q]"+r"(_enquant),[dq]"+r"(_dequant)
+ :[dct]"r"(_dct),[qdct]"r"(_qdct)
+ :"cc","memory"
+ );
+ return (int)r;
+}
+
+#endif
diff --git a/thirdparty/libtheora/x86/x86int.h b/thirdparty/libtheora/x86/x86int.h
index ede724f5aa..ceb2dbb0ec 100644
--- a/thirdparty/libtheora/x86/x86int.h
+++ b/thirdparty/libtheora/x86/x86int.h
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: x86int.h 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
@@ -19,24 +19,104 @@
# define _x86_x86int_H (1)
# include "../internal.h"
-void oc_state_vtable_init_x86(oc_theora_state *_state);
+# if defined(OC_X86_ASM)
+# define oc_state_accel_init oc_state_accel_init_x86
+# if defined(OC_X86_64_ASM)
+/*x86-64 guarantees SIMD support up through at least SSE2.
+ If the best routine we have available only needs SSE2 (which at the moment
+ covers all of them), then we can avoid runtime detection and the indirect
+ call.*/
+# define oc_frag_copy(_state,_dst,_src,_ystride) \
+ oc_frag_copy_mmx(_dst,_src,_ystride)
+# define oc_frag_copy_list(_state,_dst_frame,_src_frame,_ystride, \
+ _fragis,_nfragis,_frag_buf_offs) \
+ oc_frag_copy_list_mmx(_dst_frame,_src_frame,_ystride, \
+ _fragis,_nfragis,_frag_buf_offs)
+# define oc_frag_recon_intra(_state,_dst,_ystride,_residue) \
+ oc_frag_recon_intra_mmx(_dst,_ystride,_residue)
+# define oc_frag_recon_inter(_state,_dst,_src,_ystride,_residue) \
+ oc_frag_recon_inter_mmx(_dst,_src,_ystride,_residue)
+# define oc_frag_recon_inter2(_state,_dst,_src1,_src2,_ystride,_residue) \
+ oc_frag_recon_inter2_mmx(_dst,_src1,_src2,_ystride,_residue)
+# define oc_idct8x8(_state,_y,_x,_last_zzi) \
+ oc_idct8x8_sse2(_y,_x,_last_zzi)
+# define oc_state_frag_recon oc_state_frag_recon_mmx
+# define oc_loop_filter_init(_state,_bv,_flimit) \
+ oc_loop_filter_init_mmxext(_bv,_flimit)
+# define oc_state_loop_filter_frag_rows oc_state_loop_filter_frag_rows_mmxext
+# define oc_restore_fpu(_state) \
+ oc_restore_fpu_mmx()
+# else
+# define OC_STATE_USE_VTABLE (1)
+# endif
+# endif
+
+# include "../state.h"
+# include "x86cpu.h"
+
+/*Converts the expression in the argument to a string.*/
+#define OC_M2STR(_s) #_s
+
+/*Memory operands do not always include an offset.
+ To avoid warnings, we force an offset with %H (which adds 8).*/
+# if __GNUC_PREREQ(4,0)
+# define OC_MEM_OFFS(_offs,_name) \
+ OC_M2STR(_offs-8+%H[_name])
+# endif
+/*If your gcc version does't support %H, then you get to suffer the warnings.
+ Note that Apple's gas breaks on things like _offs+(%esp): it throws away the
+ whole offset, instead of substituting in 0 for the missing operand to +.*/
+# if !defined(OC_MEM_OFFS)
+# define OC_MEM_OFFS(_offs,_name) \
+ OC_M2STR(_offs+%[_name])
+# endif
+
+/*Declare an array operand with an exact size.
+ This tells gcc we're going to clobber this memory region, without having to
+ clobber all of "memory" and lets us access local buffers directly using the
+ stack pointer, without allocating a separate register to point to them.*/
+#define OC_ARRAY_OPERAND(_type,_ptr,_size) \
+ (*({ \
+ struct{_type array_value__[(_size)];} *array_addr__=(void *)(_ptr); \
+ array_addr__; \
+ }))
+
+/*Declare an array operand with an exact size.
+ This tells gcc we're going to clobber this memory region, without having to
+ clobber all of "memory" and lets us access local buffers directly using the
+ stack pointer, without allocating a separate register to point to them.*/
+#define OC_CONST_ARRAY_OPERAND(_type,_ptr,_size) \
+ (*({ \
+ const struct{_type array_value__[(_size)];} *array_addr__= \
+ (const void *)(_ptr); \
+ array_addr__; \
+ }))
+
+extern const unsigned short __attribute__((aligned(16))) OC_IDCT_CONSTS[64];
+
+void oc_state_accel_init_x86(oc_theora_state *_state);
void oc_frag_copy_mmx(unsigned char *_dst,
const unsigned char *_src,int _ystride);
+void oc_frag_copy_list_mmx(unsigned char *_dst_frame,
+ const unsigned char *_src_frame,int _ystride,
+ const ptrdiff_t *_fragis,ptrdiff_t _nfragis,const ptrdiff_t *_frag_buf_offs);
void oc_frag_recon_intra_mmx(unsigned char *_dst,int _ystride,
const ogg_int16_t *_residue);
void oc_frag_recon_inter_mmx(unsigned char *_dst,
const unsigned char *_src,int _ystride,const ogg_int16_t *_residue);
void oc_frag_recon_inter2_mmx(unsigned char *_dst,const unsigned char *_src1,
const unsigned char *_src2,int _ystride,const ogg_int16_t *_residue);
-void oc_idct8x8_mmx(ogg_int16_t _y[64],int _last_zzi);
+void oc_idct8x8_mmx(ogg_int16_t _y[64],ogg_int16_t _x[64],int _last_zzi);
+void oc_idct8x8_sse2(ogg_int16_t _y[64],ogg_int16_t _x[64],int _last_zzi);
void oc_state_frag_recon_mmx(const oc_theora_state *_state,ptrdiff_t _fragi,
- int _pli,ogg_int16_t _dct_coeffs[64],int _last_zzi,ogg_uint16_t _dc_quant);
-void oc_state_frag_copy_list_mmx(const oc_theora_state *_state,
- const ptrdiff_t *_fragis,ptrdiff_t _nfragis,
- int _dst_frame,int _src_frame,int _pli);
+ int _pli,ogg_int16_t _dct_coeffs[128],int _last_zzi,ogg_uint16_t _dc_quant);
+void oc_loop_filter_init_mmx(signed char _bv[256],int _flimit);
+void oc_loop_filter_init_mmxext(signed char _bv[256],int _flimit);
void oc_state_loop_filter_frag_rows_mmx(const oc_theora_state *_state,
- int _bv[256],int _refi,int _pli,int _fragy0,int _fragy_end);
+ signed char _bv[256],int _refi,int _pli,int _fragy0,int _fragy_end);
+void oc_state_loop_filter_frag_rows_mmxext(const oc_theora_state *_state,
+ signed char _bv[256],int _refi,int _pli,int _fragy0,int _fragy_end);
void oc_restore_fpu_mmx(void);
#endif
diff --git a/thirdparty/libtheora/x86/x86state.c b/thirdparty/libtheora/x86/x86state.c
index a786bec284..9f8bceb534 100644
--- a/thirdparty/libtheora/x86/x86state.c
+++ b/thirdparty/libtheora/x86/x86state.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: x86state.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
@@ -19,8 +19,7 @@
#if defined(OC_X86_ASM)
-#include "../cpu.c"
-
+#if defined(OC_STATE_USE_VTABLE)
/*This table has been modified from OC_FZIG_ZAG by baking a 4x4 transpose into
each quadrant of the destination.*/
static const unsigned char OC_FZIG_ZAG_MMX[128]={
@@ -39,24 +38,60 @@ static const unsigned char OC_FZIG_ZAG_MMX[128]={
64,64,64,64,64,64,64,64,
64,64,64,64,64,64,64,64,
64,64,64,64,64,64,64,64,
+ 64,64,64,64,64,64,64,64
+};
+#endif
+
+/*This table has been modified from OC_FZIG_ZAG by baking an 8x8 transpose into
+ the destination.*/
+static const unsigned char OC_FZIG_ZAG_SSE2[128]={
+ 0, 8, 1, 2, 9,16,24,17,
+ 10, 3, 4,11,18,25,32,40,
+ 33,26,19,12, 5, 6,13,20,
+ 27,34,41,48,56,49,42,35,
+ 28,21,14, 7,15,22,29,36,
+ 43,50,57,58,51,44,37,30,
+ 23,31,38,45,52,59,60,53,
+ 46,39,47,54,61,62,55,63,
+ 64,64,64,64,64,64,64,64,
+ 64,64,64,64,64,64,64,64,
64,64,64,64,64,64,64,64,
+ 64,64,64,64,64,64,64,64,
+ 64,64,64,64,64,64,64,64,
+ 64,64,64,64,64,64,64,64,
+ 64,64,64,64,64,64,64,64,
+ 64,64,64,64,64,64,64,64
};
-void oc_state_vtable_init_x86(oc_theora_state *_state){
+void oc_state_accel_init_x86(oc_theora_state *_state){
+ oc_state_accel_init_c(_state);
_state->cpu_flags=oc_cpu_flags_get();
+# if defined(OC_STATE_USE_VTABLE)
if(_state->cpu_flags&OC_CPU_X86_MMX){
_state->opt_vtable.frag_copy=oc_frag_copy_mmx;
+ _state->opt_vtable.frag_copy_list=oc_frag_copy_list_mmx;
_state->opt_vtable.frag_recon_intra=oc_frag_recon_intra_mmx;
_state->opt_vtable.frag_recon_inter=oc_frag_recon_inter_mmx;
_state->opt_vtable.frag_recon_inter2=oc_frag_recon_inter2_mmx;
_state->opt_vtable.idct8x8=oc_idct8x8_mmx;
_state->opt_vtable.state_frag_recon=oc_state_frag_recon_mmx;
- _state->opt_vtable.state_frag_copy_list=oc_state_frag_copy_list_mmx;
+ _state->opt_vtable.loop_filter_init=oc_loop_filter_init_mmx;
_state->opt_vtable.state_loop_filter_frag_rows=
oc_state_loop_filter_frag_rows_mmx;
_state->opt_vtable.restore_fpu=oc_restore_fpu_mmx;
_state->opt_data.dct_fzig_zag=OC_FZIG_ZAG_MMX;
}
- else oc_state_vtable_init_c(_state);
+ if(_state->cpu_flags&OC_CPU_X86_MMXEXT){
+ _state->opt_vtable.loop_filter_init=oc_loop_filter_init_mmxext;
+ _state->opt_vtable.state_loop_filter_frag_rows=
+ oc_state_loop_filter_frag_rows_mmxext;
+ }
+ if(_state->cpu_flags&OC_CPU_X86_SSE2){
+ _state->opt_vtable.idct8x8=oc_idct8x8_sse2;
+# endif
+ _state->opt_data.dct_fzig_zag=OC_FZIG_ZAG_SSE2;
+# if defined(OC_STATE_USE_VTABLE)
+ }
+# endif
}
#endif
diff --git a/thirdparty/libtheora/x86/x86zigzag.h b/thirdparty/libtheora/x86/x86zigzag.h
new file mode 100644
index 0000000000..fb21e0bb43
--- /dev/null
+++ b/thirdparty/libtheora/x86/x86zigzag.h
@@ -0,0 +1,244 @@
+/********************************************************************
+ * *
+ * THIS FILE IS PART OF THE OggTheora SOFTWARE CODEC SOURCE CODE. *
+ * USE, DISTRIBUTION AND REPRODUCTION OF THIS LIBRARY SOURCE IS *
+ * GOVERNED BY A BSD-STYLE SOURCE LICENSE INCLUDED WITH THIS SOURCE *
+ * IN 'COPYING'. PLEASE READ THESE TERMS BEFORE DISTRIBUTING. *
+ * *
+ * THE Theora SOURCE CODE IS COPYRIGHT (C) 2002-2009 *
+ * by the Xiph.Org Foundation and contributors http://www.xiph.org/ *
+ * *
+ ********************************************************************
+
+ function:
+ last mod: $Id: sse2trans.h 15675 2009-02-06 09:43:27Z tterribe $
+
+ ********************************************************************/
+
+#if !defined(_x86_x86zigzag_H)
+# define _x86_x86zigzag_H (1)
+# include "x86enc.h"
+
+
+/*Converts DCT coefficients from transposed order into zig-zag scan order and
+ stores them in %[y].
+ This relies on two macros to load the contents of each row:
+ OC_ZZ_LOAD_ROW_LO(row,"reg") and OC_ZZ_LOAD_ROW_HI(row,"reg"), which load
+ the first four and second four entries of each row into the specified
+ register, respectively.
+ OC_ZZ_LOAD_ROW_LO must be called before OC_ZZ_LOAD_ROW_HI for the same row
+ (because when the rows are already in SSE2 registers, loading the high half
+ destructively modifies the register).
+ The index of each output element in the original 64-element array should wind
+ up in the following 8x8 matrix (the letters indicate the order we compute
+ each 4-tuple below):
+ A 0 8 1 2 9 16 24 17 B
+ C 10 3 4 11 18 25 32 40 E
+ F 33 26 19 12 5 6 13 20 D
+ G 27 34 41 48 56 49 42 35 I
+ L 28 21 14 7 15 22 29 36 M
+ H 43 50 57 58 51 44 37 30 O
+ N 23 31 38 45 52 59 60 53 J
+ P 46 39 47 54 61 62 55 63 K
+ The order of the coefficients within each tuple is reversed in the comments
+ below to reflect the usual MSB to LSB notation.*/
+#define OC_TRANSPOSE_ZIG_ZAG_MMXEXT \
+ OC_ZZ_LOAD_ROW_LO(0,"%%mm0") /*mm0=03 02 01 00*/ \
+ OC_ZZ_LOAD_ROW_LO(1,"%%mm1") /*mm1=11 10 09 08*/ \
+ OC_ZZ_LOAD_ROW_LO(2,"%%mm2") /*mm2=19 18 17 16*/ \
+ OC_ZZ_LOAD_ROW_LO(3,"%%mm3") /*mm3=27 26 25 24*/ \
+ OC_ZZ_LOAD_ROW_HI(0,"%%mm4") /*mm4=07 06 05 04*/ \
+ OC_ZZ_LOAD_ROW_HI(1,"%%mm5") /*mm5=15 14 13 12*/ \
+ OC_ZZ_LOAD_ROW_HI(2,"%%mm6") /*mm6=23 22 21 20*/ \
+ "movq %%mm0,%%mm7\n\t" /*mm7=03 02 01 00*/ \
+ "punpckhdq %%mm1,%%mm0\n\t" /*mm0=11 10 03 02*/ \
+ "pshufw $0x39,%%mm4,%%mm4\n\t" /*mm4=04 07 06 05*/ \
+ "punpcklwd %%mm0,%%mm1\n\t" /*mm1=03 09 02 08*/ \
+ "pshufw $0x39,%%mm5,%%mm5\n\t" /*mm5=12 15 14 13*/ \
+ "punpcklwd %%mm1,%%mm7\n\t" /*mm7=02 01 08 00 *A*/ \
+ "movq %%mm7,0x00(%[y])\n\t" \
+ "punpckhwd %%mm4,%%mm1\n\t" /*mm1=04 03 07 09*/ \
+ "movq %%mm2,%%mm7\n\t" /*mm7=19 18 17 16*/ \
+ "punpckhdq %%mm1,%%mm0\n\t" /*mm0=04 03 11 10*/ \
+ "punpckhwd %%mm5,%%mm7\n\t" /*mm7=12 19 15 18*/ \
+ "punpcklwd %%mm3,%%mm1\n\t" /*mm1=25 07 24 09*/ \
+ "punpcklwd %%mm6,%%mm5\n\t" /*mm5=21 14 20 13*/ \
+ "punpcklwd %%mm2,%%mm1\n\t" /*mm1=17 24 16 09 *B*/ \
+ OC_ZZ_LOAD_ROW_LO(4,"%%mm2") /*mm2=35 34 33 32*/ \
+ "movq %%mm1,0x08(%[y])\n\t" \
+ OC_ZZ_LOAD_ROW_LO(5,"%%mm1") /*mm1=43 42 41 40*/ \
+ "pshufw $0x78,%%mm0,%%mm0\n\t" /*mm0=11 04 03 10 *C*/ \
+ "movq %%mm0,0x10(%[y])\n\t" \
+ "punpckhdq %%mm4,%%mm6\n\t" /*mm6=?? 07 23 22*/ \
+ "punpckldq %%mm5,%%mm4\n\t" /*mm4=20 13 06 05 *D*/ \
+ "movq %%mm4,0x28(%[y])\n\t" \
+ "psrlq $16,%%mm3\n\t" /*mm3=.. 27 26 25*/ \
+ "pshufw $0x0E,%%mm2,%%mm0\n\t" /*mm0=?? ?? 35 34*/ \
+ "movq %%mm7,%%mm4\n\t" /*mm4=12 19 15 18*/ \
+ "punpcklwd %%mm3,%%mm2\n\t" /*mm2=26 33 25 32*/ \
+ "punpcklwd %%mm1,%%mm4\n\t" /*mm4=41 15 40 18*/ \
+ "punpckhwd %%mm1,%%mm3\n\t" /*mm3=43 .. 42 27*/ \
+ "punpckldq %%mm2,%%mm4\n\t" /*mm4=25 32 40 18*/ \
+ "punpcklwd %%mm0,%%mm3\n\t" /*mm3=35 42 34 27*/ \
+ OC_ZZ_LOAD_ROW_LO(6,"%%mm0") /*mm0=51 50 49 48*/ \
+ "pshufw $0x6C,%%mm4,%%mm4\n\t" /*mm4=40 32 25 18 *E*/ \
+ "movq %%mm4,0x18(%[y])\n\t" \
+ OC_ZZ_LOAD_ROW_LO(7,"%%mm4") /*mm4=59 58 57 56*/ \
+ "punpckhdq %%mm7,%%mm2\n\t" /*mm2=12 19 26 33 *F*/ \
+ "movq %%mm2,0x20(%[y])\n\t" \
+ "pshufw $0xD0,%%mm1,%%mm1\n\t" /*mm1=43 41 ?? ??*/ \
+ "pshufw $0x87,%%mm0,%%mm0\n\t" /*mm0=50 48 49 51*/ \
+ "movq %%mm3,%%mm2\n\t" /*mm2=35 42 34 27*/ \
+ "punpckhwd %%mm0,%%mm1\n\t" /*mm1=50 43 48 41*/ \
+ "pshufw $0x93,%%mm4,%%mm4\n\t" /*mm4=58 57 56 59*/ \
+ "punpckldq %%mm1,%%mm3\n\t" /*mm3=48 41 34 27 *G*/ \
+ "movq %%mm3,0x30(%[y])\n\t" \
+ "punpckhdq %%mm4,%%mm1\n\t" /*mm1=58 57 50 43 *H*/ \
+ "movq %%mm1,0x50(%[y])\n\t" \
+ OC_ZZ_LOAD_ROW_HI(7,"%%mm1") /*mm1=63 62 61 60*/ \
+ "punpcklwd %%mm0,%%mm4\n\t" /*mm4=49 56 51 59*/ \
+ OC_ZZ_LOAD_ROW_HI(6,"%%mm0") /*mm0=55 54 53 52*/ \
+ "psllq $16,%%mm6\n\t" /*mm6=07 23 22 ..*/ \
+ "movq %%mm4,%%mm3\n\t" /*mm3=49 56 51 59*/ \
+ "punpckhdq %%mm2,%%mm4\n\t" /*mm4=35 42 49 56 *I*/ \
+ OC_ZZ_LOAD_ROW_HI(3,"%%mm2") /*mm2=31 30 29 28*/ \
+ "movq %%mm4,0x38(%[y])\n\t" \
+ "punpcklwd %%mm1,%%mm3\n\t" /*mm3=61 51 60 59*/ \
+ "punpcklwd %%mm6,%%mm7\n\t" /*mm7=22 15 .. ??*/ \
+ "movq %%mm3,%%mm4\n\t" /*mm4=61 51 60 59*/ \
+ "punpcklwd %%mm0,%%mm3\n\t" /*mm3=53 60 52 59*/ \
+ "punpckhwd %%mm0,%%mm4\n\t" /*mm4=55 61 54 51*/ \
+ OC_ZZ_LOAD_ROW_HI(4,"%%mm0") /*mm0=39 38 37 36*/ \
+ "pshufw $0xE1,%%mm3,%%mm3\n\t" /*mm3=53 60 59 52 *J*/ \
+ "movq %%mm3,0x68(%[y])\n\t" \
+ "movq %%mm4,%%mm3\n\t" /*mm3=?? ?? 54 51*/ \
+ "pshufw $0x39,%%mm2,%%mm2\n\t" /*mm2=28 31 30 29*/ \
+ "punpckhwd %%mm1,%%mm4\n\t" /*mm4=63 55 62 61 *K*/ \
+ OC_ZZ_LOAD_ROW_HI(5,"%%mm1") /*mm1=47 46 45 44*/ \
+ "movq %%mm4,0x78(%[y])\n\t" \
+ "punpckhwd %%mm2,%%mm6\n\t" /*mm6=28 07 31 23*/ \
+ "punpcklwd %%mm0,%%mm2\n\t" /*mm2=37 30 36 29*/ \
+ "punpckhdq %%mm6,%%mm5\n\t" /*mm5=28 07 21 14*/ \
+ "pshufw $0x4B,%%mm2,%%mm2\n\t" /*mm2=36 29 30 37*/ \
+ "pshufw $0x87,%%mm5,%%mm5\n\t" /*mm5=07 14 21 28 *L*/ \
+ "movq %%mm5,0x40(%[y])\n\t" \
+ "punpckhdq %%mm2,%%mm7\n\t" /*mm7=36 29 22 15 *M*/ \
+ "movq %%mm7,0x48(%[y])\n\t" \
+ "pshufw $0x9C,%%mm1,%%mm1\n\t" /*mm1=46 45 47 44*/ \
+ "punpckhwd %%mm1,%%mm0\n\t" /*mm0=46 39 45 38*/ \
+ "punpcklwd %%mm1,%%mm3\n\t" /*mm3=47 54 44 51*/ \
+ "punpckldq %%mm0,%%mm6\n\t" /*mm6=45 38 31 23 *N*/ \
+ "movq %%mm6,0x60(%[y])\n\t" \
+ "punpckhdq %%mm3,%%mm0\n\t" /*mm0=47 54 46 39*/ \
+ "punpckldq %%mm2,%%mm3\n\t" /*mm3=30 37 44 51 *O*/ \
+ "movq %%mm3,0x58(%[y])\n\t" \
+ "pshufw $0xB1,%%mm0,%%mm0\n\t" /*mm0=54 47 39 46 *P*/ \
+ "movq %%mm0,0x70(%[y])\n\t" \
+
+/*Converts DCT coefficients in %[dct] from natural order into zig-zag scan
+ order and stores them in %[qdct].
+ The index of each output element in the original 64-element array should wind
+ up in the following 8x8 matrix (the letters indicate the order we compute
+ each 4-tuple below):
+ A 0 1 8 16 9 2 3 10 B
+ C 17 24 32 25 18 11 4 5 D
+ E 12 19 26 33 40 48 41 34 I
+ H 27 20 13 6 7 14 21 28 G
+ K 35 42 49 56 57 50 43 36 J
+ F 29 22 15 23 30 37 44 51 M
+ P 58 59 52 45 38 31 39 46 L
+ N 53 60 61 54 47 55 62 63 O
+ The order of the coefficients within each tuple is reversed in the comments
+ below to reflect the usual MSB to LSB notation.*/
+#define OC_ZIG_ZAG_MMXEXT \
+ "movq 0x00(%[dct]),%%mm0\n\t" /*mm0=03 02 01 00*/ \
+ "movq 0x08(%[dct]),%%mm1\n\t" /*mm1=07 06 05 04*/ \
+ "movq 0x10(%[dct]),%%mm2\n\t" /*mm2=11 10 09 08*/ \
+ "movq 0x20(%[dct]),%%mm3\n\t" /*mm3=19 18 17 16*/ \
+ "movq 0x30(%[dct]),%%mm4\n\t" /*mm4=27 26 25 24*/ \
+ "movq 0x40(%[dct]),%%mm5\n\t" /*mm5=35 34 33 32*/ \
+ "movq %%mm2,%%mm7\n\t" /*mm7=11 10 09 08*/ \
+ "punpcklwd %%mm3,%%mm2\n\t" /*mm2=17 09 16 08*/ \
+ "movq %%mm0,%%mm6\n\t" /*mm6=03 02 01 00*/ \
+ "punpckldq %%mm2,%%mm0\n\t" /*mm0=16 08 01 00 *A*/ \
+ "movq %%mm0,0x00(%[qdct])\n\t" \
+ "movq 0x18(%[dct]),%%mm0\n\t" /*mm0=15 14 13 12*/ \
+ "punpckhdq %%mm6,%%mm6\n\t" /*mm6=03 02 03 02*/ \
+ "psrlq $16,%%mm7\n\t" /*mm7=.. 11 10 09*/ \
+ "punpckldq %%mm7,%%mm6\n\t" /*mm6=10 09 03 02*/ \
+ "punpckhwd %%mm7,%%mm3\n\t" /*mm3=.. 19 11 18*/ \
+ "pshufw $0xD2,%%mm6,%%mm6\n\t" /*mm6=10 03 02 09 *B*/ \
+ "movq %%mm6,0x08(%[qdct])\n\t" \
+ "psrlq $48,%%mm2\n\t" /*mm2=.. .. .. 17*/ \
+ "movq %%mm1,%%mm6\n\t" /*mm6=07 06 05 04*/ \
+ "punpcklwd %%mm5,%%mm2\n\t" /*mm2=33 .. 32 17*/ \
+ "movq %%mm3,%%mm7\n\t" /*mm7=.. 19 11 18*/ \
+ "punpckldq %%mm1,%%mm3\n\t" /*mm3=05 04 11 18 *C*/ \
+ "por %%mm2,%%mm7\n\t" /*mm7=33 19 ?? ??*/ \
+ "punpcklwd %%mm4,%%mm2\n\t" /*mm2=25 32 24 17 *D**/ \
+ "movq %%mm2,0x10(%[qdct])\n\t" \
+ "movq %%mm3,0x18(%[qdct])\n\t" \
+ "movq 0x28(%[dct]),%%mm2\n\t" /*mm2=23 22 21 20*/ \
+ "movq 0x38(%[dct]),%%mm1\n\t" /*mm1=31 30 29 28*/ \
+ "pshufw $0x9C,%%mm0,%%mm3\n\t" /*mm3=14 13 15 12*/ \
+ "punpckhdq %%mm7,%%mm7\n\t" /*mm7=33 19 33 19*/ \
+ "punpckhwd %%mm3,%%mm6\n\t" /*mm6=14 07 13 06*/ \
+ "punpckldq %%mm0,%%mm0\n\t" /*mm0=13 12 13 12*/ \
+ "punpcklwd %%mm1,%%mm3\n\t" /*mm3=29 15 28 12*/ \
+ "punpckhwd %%mm4,%%mm0\n\t" /*mm0=27 13 26 12*/ \
+ "pshufw $0xB4,%%mm3,%%mm3\n\t" /*mm3=15 29 28 12*/ \
+ "psrlq $48,%%mm4\n\t" /*mm4=.. .. .. 27*/ \
+ "punpcklwd %%mm7,%%mm0\n\t" /*mm0=33 26 19 12 *E*/ \
+ "punpcklwd %%mm1,%%mm4\n\t" /*mm4=29 .. 28 27*/ \
+ "punpckhwd %%mm2,%%mm3\n\t" /*mm3=23 15 22 29 *F*/ \
+ "movq %%mm0,0x20(%[qdct])\n\t" \
+ "movq %%mm3,0x50(%[qdct])\n\t" \
+ "movq 0x60(%[dct]),%%mm3\n\t" /*mm3=51 50 49 48*/ \
+ "movq 0x70(%[dct]),%%mm7\n\t" /*mm7=59 58 57 56*/ \
+ "movq 0x50(%[dct]),%%mm0\n\t" /*mm0=43 42 41 40*/ \
+ "punpcklwd %%mm4,%%mm2\n\t" /*mm2=28 21 27 20*/ \
+ "psrlq $32,%%mm5\n\t" /*mm5=.. .. 35 34*/ \
+ "movq %%mm2,%%mm4\n\t" /*mm4=28 21 27 20*/ \
+ "punpckldq %%mm6,%%mm2\n\t" /*mm2=13 06 27 20*/ \
+ "punpckhdq %%mm4,%%mm6\n\t" /*mm6=28 21 14 07 *G*/ \
+ "movq %%mm3,%%mm4\n\t" /*mm4=51 50 49 48*/ \
+ "pshufw $0xB1,%%mm2,%%mm2\n\t" /*mm2=06 13 20 27 *H*/ \
+ "movq %%mm2,0x30(%[qdct])\n\t" \
+ "movq %%mm6,0x38(%[qdct])\n\t" \
+ "movq 0x48(%[dct]),%%mm2\n\t" /*mm2=39 38 37 36*/ \
+ "punpcklwd %%mm5,%%mm4\n\t" /*mm4=35 49 34 48*/ \
+ "movq 0x58(%[dct]),%%mm5\n\t" /*mm5=47 46 45 44*/ \
+ "punpckldq %%mm7,%%mm6\n\t" /*mm6=57 56 14 07*/ \
+ "psrlq $32,%%mm3\n\t" /*mm3=.. .. 51 50*/ \
+ "punpckhwd %%mm0,%%mm6\n\t" /*mm6=43 57 42 56*/ \
+ "punpcklwd %%mm4,%%mm0\n\t" /*mm0=34 41 48 40 *I*/ \
+ "pshufw $0x4E,%%mm6,%%mm6\n\t" /*mm6=42 56 43 57*/ \
+ "movq %%mm0,0x28(%[qdct])\n\t" \
+ "punpcklwd %%mm2,%%mm3\n\t" /*mm3=37 51 36 50*/ \
+ "punpckhwd %%mm6,%%mm4\n\t" /*mm4=42 35 56 49*/ \
+ "punpcklwd %%mm3,%%mm6\n\t" /*mm6=36 43 50 57 *J*/ \
+ "pshufw $0x4E,%%mm4,%%mm4\n\t" /*mm4=56 49 42 35 *K*/ \
+ "movq %%mm4,0x40(%[qdct])\n\t" \
+ "movq %%mm6,0x48(%[qdct])\n\t" \
+ "movq 0x68(%[dct]),%%mm6\n\t" /*mm6=55 54 53 52*/ \
+ "movq 0x78(%[dct]),%%mm0\n\t" /*mm0=63 62 61 60*/ \
+ "psrlq $32,%%mm1\n\t" /*mm1=.. .. 31 30*/ \
+ "pshufw $0xD8,%%mm5,%%mm5\n\t" /*mm5=47 45 46 44*/ \
+ "pshufw $0x0B,%%mm3,%%mm3\n\t" /*mm3=50 50 51 37*/ \
+ "punpcklwd %%mm5,%%mm1\n\t" /*mm1=46 31 44 30*/ \
+ "pshufw $0xC9,%%mm6,%%mm6\n\t" /*mm6=55 52 54 53*/ \
+ "punpckhwd %%mm1,%%mm2\n\t" /*mm2=46 39 31 38 *L*/ \
+ "punpcklwd %%mm3,%%mm1\n\t" /*mm1=51 44 37 30 *M*/ \
+ "movq %%mm2,0x68(%[qdct])\n\t" \
+ "movq %%mm1,0x58(%[qdct])\n\t" \
+ "punpckhwd %%mm6,%%mm5\n\t" /*mm5=55 47 52 45*/ \
+ "punpckldq %%mm0,%%mm6\n\t" /*mm6=61 60 54 53*/ \
+ "pshufw $0x10,%%mm5,%%mm4\n\t" /*mm4=45 52 45 45*/ \
+ "pshufw $0x78,%%mm6,%%mm6\n\t" /*mm6=53 60 61 54 *N*/ \
+ "punpckhdq %%mm0,%%mm5\n\t" /*mm5=63 62 55 47 *O*/ \
+ "punpckhdq %%mm4,%%mm7\n\t" /*mm7=45 52 59 58 *P*/ \
+ "movq %%mm6,0x70(%[qdct])\n\t" \
+ "movq %%mm5,0x78(%[qdct])\n\t" \
+ "movq %%mm7,0x60(%[qdct])\n\t" \
+
+#endif
diff --git a/thirdparty/libtheora/x86_vc/mmxencfrag.c b/thirdparty/libtheora/x86_vc/mmxencfrag.c
index 94f1d06513..a6be819135 100644
--- a/thirdparty/libtheora/x86_vc/mmxencfrag.c
+++ b/thirdparty/libtheora/x86_vc/mmxencfrag.c
@@ -266,7 +266,7 @@ unsigned oc_enc_frag_sad2_thresh_mmxext(const unsigned char *_src,
/*Performs the first two stages of an 8-point 1-D Hadamard transform.
The transform is performed in place, except that outputs 0-3 are swapped with
outputs 4-7.
- Outputs 2, 3, 6 and 7 from the second stage are negated (which allows us to
+ Outputs 2, 3, 6, and 7 from the second stage are negated (which allows us to
perform this stage in place with no temporary registers).*/
#define OC_HADAMARD_AB_8x4 __asm{ \
/*Stage A: \
@@ -299,7 +299,7 @@ unsigned oc_enc_frag_sad2_thresh_mmxext(const unsigned char *_src,
}
/*Performs the last stage of an 8-point 1-D Hadamard transform in place.
- Ouputs 1, 3, 5, and 7 are negated (which allows us to perform this stage in
+ Outputs 1, 3, 5, and 7 are negated (which allows us to perform this stage in
place with no temporary registers).*/
#define OC_HADAMARD_C_8x4 __asm{ \
/*Stage C:*/ \
@@ -468,12 +468,14 @@ unsigned oc_enc_frag_sad2_thresh_mmxext(const unsigned char *_src,
mm7 = d3 c3 b3 a3*/ \
}
-static unsigned oc_int_frag_satd_thresh_mmxext(const unsigned char *_src,
- int _src_ystride,const unsigned char *_ref,int _ref_ystride,unsigned _thresh){
- OC_ALIGN8(ogg_int16_t buf[64]);
- ogg_int16_t *bufp;
- unsigned ret1;
- unsigned ret2;
+static unsigned oc_int_frag_satd_mmxext(int *_dc,
+ const unsigned char *_src,int _src_ystride,
+ const unsigned char *_ref,int _ref_ystride){
+ OC_ALIGN8(ogg_int16_t buf[64]);
+ ogg_int16_t *bufp;
+ unsigned ret;
+ unsigned ret2;
+ int dc;
bufp=buf;
__asm{
#define SRC esi
@@ -481,8 +483,10 @@ static unsigned oc_int_frag_satd_thresh_mmxext(const unsigned char *_src,
#define SRC_YSTRIDE ecx
#define REF_YSTRIDE edx
#define BUF edi
-#define RET eax
-#define RET2 edx
+#define RET edx
+#define RET2 ecx
+#define DC eax
+#define DC_WORD ax
mov SRC,_src
mov SRC_YSTRIDE,_src_ystride
mov REF,_ref
@@ -508,14 +512,18 @@ static unsigned oc_int_frag_satd_thresh_mmxext(const unsigned char *_src,
movq mm2,[0x20+BUF]
movq mm3,[0x30+BUF]
movq mm0,[0x00+BUF]
- OC_HADAMARD_ABS_ACCUM_8x4(0x28,0x38)
+ /*We split out the stages here so we can save the DC coefficient in the
+ middle.*/
+ OC_HADAMARD_AB_8x4
+ OC_HADAMARD_C_ABS_ACCUM_A_8x4(0x28,0x38)
+ movd DC,mm1
+ OC_HADAMARD_C_ABS_ACCUM_B_8x4(0x28,0x38)
/*Up to this point, everything fit in 16 bits (8 input + 1 for the
difference + 2*3 for the two 8-point 1-D Hadamards - 1 for the abs - 1
for the factor of two we dropped + 3 for the vertical accumulation).
Now we finally have to promote things to dwords.
We break this part out of OC_HADAMARD_ABS_ACCUM_8x4 to hide the long
latency of pmaddwd by starting the next series of loads now.*/
- mov RET2,_thresh
pmaddwd mm0,mm7
movq mm1,[0x50+BUF]
movq mm5,[0x58+BUF]
@@ -525,29 +533,28 @@ static unsigned oc_int_frag_satd_thresh_mmxext(const unsigned char *_src,
movq mm6,[0x68+BUF]
paddd mm4,mm0
movq mm3,[0x70+BUF]
- movd RET,mm4
+ movd RET2,mm4
movq mm7,[0x78+BUF]
- /*The sums produced by OC_HADAMARD_ABS_ACCUM_8x4 each have an extra 4
- added to them, and a factor of two removed; correct the final sum here.*/
- lea RET,[RET+RET-32]
movq mm0,[0x40+BUF]
- cmp RET,RET2
movq mm4,[0x48+BUF]
- jae at_end
OC_HADAMARD_ABS_ACCUM_8x4(0x68,0x78)
pmaddwd mm0,mm7
- /*There isn't much to stick in here to hide the latency this time, but the
- alternative to pmaddwd is movq->punpcklwd->punpckhwd->paddd, whose
- latency is even worse.*/
- sub RET,32
+ /*Subtract abs(dc) from 2*ret2.*/
+ movsx DC,DC_WORD
+ cdq
+ lea RET2,[RET+RET2*2]
movq mm4,mm0
punpckhdq mm0,mm0
+ xor RET,DC
paddd mm4,mm0
- movd RET2,mm4
- lea RET,[RET+RET2*2]
- align 16
-at_end:
- mov ret1,RET
+ /*The sums produced by OC_HADAMARD_ABS_ACCUM_8x4 each have an extra 4
+ added to them, a factor of two removed, and the DC value included;
+ correct the final sum here.*/
+ sub RET2,RET
+ movd RET,mm4
+ lea RET,[RET2+RET*2-64]
+ mov ret,RET
+ mov dc,DC
#undef SRC
#undef REF
#undef SRC_YSTRIDE
@@ -555,18 +562,21 @@ at_end:
#undef BUF
#undef RET
#undef RET2
+#undef DC
+#undef DC_WORD
}
- return ret1;
+ *_dc=dc;
+ return ret;
}
-unsigned oc_enc_frag_satd_thresh_mmxext(const unsigned char *_src,
- const unsigned char *_ref,int _ystride,unsigned _thresh){
- return oc_int_frag_satd_thresh_mmxext(_src,_ystride,_ref,_ystride,_thresh);
+unsigned oc_enc_frag_satd_mmxext(int *_dc,const unsigned char *_src,
+ const unsigned char *_ref,int _ystride){
+ return oc_int_frag_satd_mmxext(_dc,_src,_ystride,_ref,_ystride);
}
/*Our internal implementation of frag_copy2 takes an extra stride parameter so
- we can share code with oc_enc_frag_satd2_thresh_mmxext().*/
+ we can share code with oc_enc_frag_satd2_mmxext().*/
static void oc_int_frag_copy2_mmxext(unsigned char *_dst,int _dst_ystride,
const unsigned char *_src1,const unsigned char *_src2,int _src_ystride){
__asm{
@@ -694,30 +704,31 @@ static void oc_int_frag_copy2_mmxext(unsigned char *_dst,int _dst_ystride,
}
}
-unsigned oc_enc_frag_satd2_thresh_mmxext(const unsigned char *_src,
- const unsigned char *_ref1,const unsigned char *_ref2,int _ystride,
- unsigned _thresh){
+unsigned oc_enc_frag_satd2_mmxext(int *_dc,const unsigned char *_src,
+ const unsigned char *_ref1,const unsigned char *_ref2,int _ystride){
OC_ALIGN8(unsigned char ref[64]);
oc_int_frag_copy2_mmxext(ref,8,_ref1,_ref2,_ystride);
- return oc_int_frag_satd_thresh_mmxext(_src,_ystride,ref,8,_thresh);
+ return oc_int_frag_satd_mmxext(_dc,_src,_ystride,ref,8);
}
-unsigned oc_enc_frag_intra_satd_mmxext(const unsigned char *_src,
+unsigned oc_enc_frag_intra_satd_mmxext(int *_dc,const unsigned char *_src,
int _ystride){
- OC_ALIGN8(ogg_int16_t buf[64]);
- ogg_int16_t *bufp;
- unsigned ret1;
- unsigned ret2;
+ OC_ALIGN8(ogg_int16_t buf[64]);
+ ogg_int16_t *bufp;
+ unsigned ret1;
+ unsigned ret2;
+ int dc;
bufp=buf;
__asm{
#define SRC eax
#define SRC4 esi
#define BUF edi
-#define RET eax
-#define RET_WORD ax
-#define RET2 ecx
#define YSTRIDE edx
#define YSTRIDE3 ecx
+#define RET eax
+#define RET2 ecx
+#define DC edx
+#define DC_WORD dx
mov SRC,_src
mov BUF,bufp
mov YSTRIDE,_ystride
@@ -749,7 +760,7 @@ unsigned oc_enc_frag_intra_satd_mmxext(const unsigned char *_src,
middle.*/
OC_HADAMARD_AB_8x4
OC_HADAMARD_C_ABS_ACCUM_A_8x4(0x28,0x38)
- movd RET,mm1
+ movd DC,mm1
OC_HADAMARD_C_ABS_ACCUM_B_8x4(0x28,0x38)
/*Up to this point, everything fit in 16 bits (8 input + 1 for the
difference + 2*3 for the two 8-point 1-D Hadamards - 1 for the abs - 1
@@ -767,31 +778,34 @@ unsigned oc_enc_frag_intra_satd_mmxext(const unsigned char *_src,
movq mm3,[0x70+BUF]
paddd mm4,mm0
movq mm7,[0x78+BUF]
- movd RET2,mm4
+ movd RET,mm4
movq mm0,[0x40+BUF]
movq mm4,[0x48+BUF]
OC_HADAMARD_ABS_ACCUM_8x4(0x68,0x78)
pmaddwd mm0,mm7
/*We assume that the DC coefficient is always positive (which is true,
because the input to the INTRA transform was not a difference).*/
- movzx RET,RET_WORD
- add RET2,RET2
- sub RET2,RET
+ movzx DC,DC_WORD
+ add RET,RET
+ sub RET,DC
movq mm4,mm0
punpckhdq mm0,mm0
paddd mm4,mm0
- movd RET,mm4
- lea RET,[-64+RET2+RET*2]
+ movd RET2,mm4
+ lea RET,[-64+RET+RET2*2]
+ mov [dc],DC
mov [ret1],RET
#undef SRC
#undef SRC4
#undef BUF
-#undef RET
-#undef RET_WORD
-#undef RET2
#undef YSTRIDE
#undef YSTRIDE3
+#undef RET
+#undef RET2
+#undef DC
+#undef DC_WORD
}
+ *_dc=dc;
return ret1;
}
diff --git a/thirdparty/libtheora/x86_vc/mmxfdct.c b/thirdparty/libtheora/x86_vc/mmxfdct.c
index d908ce2413..c9ee530ea2 100644
--- a/thirdparty/libtheora/x86_vc/mmxfdct.c
+++ b/thirdparty/libtheora/x86_vc/mmxfdct.c
@@ -12,6 +12,7 @@
/*MMX fDCT implementation for x86_32*/
/*$Id: fdct_ses2.c 14579 2008-03-12 06:42:40Z xiphmont $*/
#include "x86enc.h"
+#include "x86zigzag.h"
#if defined(OC_X86_ASM)
@@ -462,18 +463,22 @@
}
/*MMX implementation of the fDCT.*/
-void oc_enc_fdct8x8_mmx(ogg_int16_t _y[64],const ogg_int16_t _x[64]){
- ptrdiff_t a;
+void oc_enc_fdct8x8_mmxext(ogg_int16_t _y[64],const ogg_int16_t _x[64]){
+ OC_ALIGN8(ogg_int16_t buf[64]);
+ ogg_int16_t *bufp;
+ bufp=buf;
__asm{
+#define X edx
#define Y eax
#define A ecx
-#define X edx
+#define BUF esi
/*Add two extra bits of working precision to improve accuracy; any more and
we could overflow.*/
/*We also add biases to correct for some systematic error that remains in
the full fDCT->iDCT round trip.*/
mov X, _x
mov Y, _y
+ mov BUF, bufp
movq mm0,[0x00+X]
movq mm1,[0x10+X]
movq mm2,[0x20+X]
@@ -591,79 +596,90 @@ void oc_enc_fdct8x8_mmx(ogg_int16_t _y[64],const ogg_int16_t _x[64]){
movq mm3,[0x30+Y]
OC_FDCT_STAGE1_8x4
OC_FDCT8x4(0x00,0x10,0x20,0x30,0x08,0x18,0x28,0x38)
- OC_TRANSPOSE8x4(0x00,0x10,0x20,0x30,0x08,0x18,0x28,0x38)
/*mm0={-2}x4*/
- pcmpeqw mm0,mm0
- paddw mm0,mm0
- /*Round the results.*/
- psubw mm1,mm0
- psubw mm2,mm0
- psraw mm1,2
- psubw mm3,mm0
- movq [0x18+Y],mm1
- psraw mm2,2
- psubw mm4,mm0
- movq mm1,[0x08+Y]
- psraw mm3,2
- psubw mm5,mm0
+ pcmpeqw mm2,mm2
+ paddw mm2,mm2
+ /*Round and store the results (no transpose).*/
+ movq mm7,[Y+0x10]
+ psubw mm4,mm2
+ psubw mm6,mm2
psraw mm4,2
- psubw mm6,mm0
- psraw mm5,2
- psubw mm7,mm0
+ psubw mm0,mm2
+ movq [BUF+0x00],mm4
+ movq mm4,[Y+0x30]
psraw mm6,2
- psubw mm1,mm0
+ psubw mm5,mm2
+ movq [BUF+0x20],mm6
+ psraw mm0,2
+ psubw mm3,mm2
+ movq [BUF+0x40],mm0
+ psraw mm5,2
+ psubw mm1,mm2
+ movq [BUF+0x50],mm5
+ psraw mm3,2
+ psubw mm7,mm2
+ movq [BUF+0x60],mm3
+ psraw mm1,2
+ psubw mm4,mm2
+ movq [BUF+0x70],mm1
psraw mm7,2
+ movq [BUF+0x10],mm7
+ psraw mm4,2
+ movq [BUF+0x30],mm4
+ /*Load the next block.*/
movq mm0,[0x40+Y]
- psraw mm1,2
- movq [0x30+Y],mm7
movq mm7,[0x78+Y]
- movq [0x08+Y],mm1
movq mm1,[0x50+Y]
- movq [0x20+Y],mm6
movq mm6,[0x68+Y]
- movq [0x28+Y],mm2
movq mm2,[0x60+Y]
- movq [0x10+Y],mm5
movq mm5,[0x58+Y]
- movq [0x38+Y],mm3
movq mm3,[0x70+Y]
- movq [0x00+Y],mm4
movq mm4,[0x48+Y]
OC_FDCT_STAGE1_8x4
OC_FDCT8x4(0x40,0x50,0x60,0x70,0x48,0x58,0x68,0x78)
- OC_TRANSPOSE8x4(0x40,0x50,0x60,0x70,0x48,0x58,0x68,0x78)
/*mm0={-2}x4*/
- pcmpeqw mm0,mm0
- paddw mm0,mm0
- /*Round the results.*/
- psubw mm1,mm0
- psubw mm2,mm0
- psraw mm1,2
- psubw mm3,mm0
- movq [0x58+Y],mm1
- psraw mm2,2
- psubw mm4,mm0
- movq mm1,[0x48+Y]
- psraw mm3,2
- psubw mm5,mm0
- movq [0x68+Y],mm2
+ pcmpeqw mm2,mm2
+ paddw mm2,mm2
+ /*Round and store the results (no transpose).*/
+ movq mm7,[Y+0x50]
+ psubw mm4,mm2
+ psubw mm6,mm2
psraw mm4,2
- psubw mm6,mm0
- movq [0x78+Y],mm3
- psraw mm5,2
- psubw mm7,mm0
- movq [0x40+Y],mm4
+ psubw mm0,mm2
+ movq [BUF+0x08],mm4
+ movq mm4,[Y+0x70]
psraw mm6,2
- psubw mm1,mm0
- movq [0x50+Y],mm5
- psraw mm7,2
- movq [0x60+Y],mm6
+ psubw mm5,mm2
+ movq [BUF+0x28],mm6
+ psraw mm0,2
+ psubw mm3,mm2
+ movq [BUF+0x48],mm0
+ psraw mm5,2
+ psubw mm1,mm2
+ movq [BUF+0x58],mm5
+ psraw mm3,2
+ psubw mm7,mm2
+ movq [BUF+0x68],mm3
psraw mm1,2
- movq [0x70+Y],mm7
- movq [0x48+Y],mm1
+ psubw mm4,mm2
+ movq [BUF+0x78],mm1
+ psraw mm7,2
+ movq [BUF+0x18],mm7
+ psraw mm4,2
+ movq [BUF+0x38],mm4
+#define OC_ZZ_LOAD_ROW_LO(_row,_reg) \
+ __asm movq _reg,[BUF+16*(_row)] \
+
+#define OC_ZZ_LOAD_ROW_HI(_row,_reg) \
+ __asm movq _reg,[BUF+16*(_row)+8] \
+
+ OC_TRANSPOSE_ZIG_ZAG_MMXEXT
+#undef OC_ZZ_LOAD_ROW_LO
+#undef OC_ZZ_LOAD_ROW_HI
+#undef X
#undef Y
#undef A
-#undef X
+#undef BUF
}
}
diff --git a/thirdparty/libtheora/x86_vc/mmxfrag.c b/thirdparty/libtheora/x86_vc/mmxfrag.c
index 4eb2084dc6..248312ff90 100644
--- a/thirdparty/libtheora/x86_vc/mmxfrag.c
+++ b/thirdparty/libtheora/x86_vc/mmxfrag.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: mmxfrag.c 16578 2009-09-25 19:50:48Z cristianadam $
+ last mod: $Id$
********************************************************************/
@@ -22,12 +22,63 @@
The iteration each instruction belongs to is marked in the comments as #i.*/
#include <stddef.h>
#include "x86int.h"
-#include "mmxfrag.h"
#if defined(OC_X86_ASM)
/*Copies an 8x8 block of pixels from _src to _dst, assuming _ystride bytes
between rows.*/
+# define OC_FRAG_COPY_MMX(_dst,_src,_ystride) \
+ do{ \
+ const unsigned char *src; \
+ unsigned char *dst; \
+ src=(_src); \
+ dst=(_dst); \
+ __asm mov SRC,src \
+ __asm mov DST,dst \
+ __asm mov YSTRIDE,_ystride \
+ /*src+0*ystride*/ \
+ __asm movq mm0,[SRC] \
+ /*src+1*ystride*/ \
+ __asm movq mm1,[SRC+YSTRIDE] \
+ /*ystride3=ystride*3*/ \
+ __asm lea YSTRIDE3,[YSTRIDE+YSTRIDE*2] \
+ /*src+2*ystride*/ \
+ __asm movq mm2,[SRC+YSTRIDE*2] \
+ /*src+3*ystride*/ \
+ __asm movq mm3,[SRC+YSTRIDE3] \
+ /*dst+0*ystride*/ \
+ __asm movq [DST],mm0 \
+ /*dst+1*ystride*/ \
+ __asm movq [DST+YSTRIDE],mm1 \
+ /*Pointer to next 4.*/ \
+ __asm lea SRC,[SRC+YSTRIDE*4] \
+ /*dst+2*ystride*/ \
+ __asm movq [DST+YSTRIDE*2],mm2 \
+ /*dst+3*ystride*/ \
+ __asm movq [DST+YSTRIDE3],mm3 \
+ /*Pointer to next 4.*/ \
+ __asm lea DST,[DST+YSTRIDE*4] \
+ /*src+0*ystride*/ \
+ __asm movq mm0,[SRC] \
+ /*src+1*ystride*/ \
+ __asm movq mm1,[SRC+YSTRIDE] \
+ /*src+2*ystride*/ \
+ __asm movq mm2,[SRC+YSTRIDE*2] \
+ /*src+3*ystride*/ \
+ __asm movq mm3,[SRC+YSTRIDE3] \
+ /*dst+0*ystride*/ \
+ __asm movq [DST],mm0 \
+ /*dst+1*ystride*/ \
+ __asm movq [DST+YSTRIDE],mm1 \
+ /*dst+2*ystride*/ \
+ __asm movq [DST+YSTRIDE*2],mm2 \
+ /*dst+3*ystride*/ \
+ __asm movq [DST+YSTRIDE3],mm3 \
+ } \
+ while(0)
+
+/*Copies an 8x8 block of pixels from _src to _dst, assuming _ystride bytes
+ between rows.*/
void oc_frag_copy_mmx(unsigned char *_dst,
const unsigned char *_src,int _ystride){
#define SRC edx
@@ -41,6 +92,34 @@ void oc_frag_copy_mmx(unsigned char *_dst,
#undef YSTRIDE3
}
+/*Copies the fragments specified by the lists of fragment indices from one
+ frame to another.
+ _dst_frame: The reference frame to copy to.
+ _src_frame: The reference frame to copy from.
+ _ystride: The row stride of the reference frames.
+ _fragis: A pointer to a list of fragment indices.
+ _nfragis: The number of fragment indices to copy.
+ _frag_buf_offs: The offsets of fragments in the reference frames.*/
+void oc_frag_copy_list_mmx(unsigned char *_dst_frame,
+ const unsigned char *_src_frame,int _ystride,
+ const ptrdiff_t *_fragis,ptrdiff_t _nfragis,const ptrdiff_t *_frag_buf_offs){
+ ptrdiff_t fragii;
+ for(fragii=0;fragii<_nfragis;fragii++){
+ ptrdiff_t frag_buf_off;
+ frag_buf_off=_frag_buf_offs[_fragis[fragii]];
+#define SRC edx
+#define DST eax
+#define YSTRIDE ecx
+#define YSTRIDE3 edi
+ OC_FRAG_COPY_MMX(_dst_frame+frag_buf_off,
+ _src_frame+frag_buf_off,_ystride);
+#undef SRC
+#undef DST
+#undef YSTRIDE
+#undef YSTRIDE3
+ }
+}
+
void oc_frag_recon_intra_mmx(unsigned char *_dst,int _ystride,
const ogg_int16_t *_residue){
__asm{
diff --git a/thirdparty/libtheora/x86_vc/mmxfrag.h b/thirdparty/libtheora/x86_vc/mmxfrag.h
deleted file mode 100644
index 45ee93e777..0000000000
--- a/thirdparty/libtheora/x86_vc/mmxfrag.h
+++ /dev/null
@@ -1,61 +0,0 @@
-#if !defined(_x86_vc_mmxfrag_H)
-# define _x86_vc_mmxfrag_H (1)
-# include <stddef.h>
-# include "x86int.h"
-
-#if defined(OC_X86_ASM)
-
-/*Copies an 8x8 block of pixels from _src to _dst, assuming _ystride bytes
- between rows.*/
-#define OC_FRAG_COPY_MMX(_dst,_src,_ystride) \
- do{ \
- const unsigned char *src; \
- unsigned char *dst; \
- src=(_src); \
- dst=(_dst); \
- __asm mov SRC,src \
- __asm mov DST,dst \
- __asm mov YSTRIDE,_ystride \
- /*src+0*ystride*/ \
- __asm movq mm0,[SRC] \
- /*src+1*ystride*/ \
- __asm movq mm1,[SRC+YSTRIDE] \
- /*ystride3=ystride*3*/ \
- __asm lea YSTRIDE3,[YSTRIDE+YSTRIDE*2] \
- /*src+2*ystride*/ \
- __asm movq mm2,[SRC+YSTRIDE*2] \
- /*src+3*ystride*/ \
- __asm movq mm3,[SRC+YSTRIDE3] \
- /*dst+0*ystride*/ \
- __asm movq [DST],mm0 \
- /*dst+1*ystride*/ \
- __asm movq [DST+YSTRIDE],mm1 \
- /*Pointer to next 4.*/ \
- __asm lea SRC,[SRC+YSTRIDE*4] \
- /*dst+2*ystride*/ \
- __asm movq [DST+YSTRIDE*2],mm2 \
- /*dst+3*ystride*/ \
- __asm movq [DST+YSTRIDE3],mm3 \
- /*Pointer to next 4.*/ \
- __asm lea DST,[DST+YSTRIDE*4] \
- /*src+0*ystride*/ \
- __asm movq mm0,[SRC] \
- /*src+1*ystride*/ \
- __asm movq mm1,[SRC+YSTRIDE] \
- /*src+2*ystride*/ \
- __asm movq mm2,[SRC+YSTRIDE*2] \
- /*src+3*ystride*/ \
- __asm movq mm3,[SRC+YSTRIDE3] \
- /*dst+0*ystride*/ \
- __asm movq [DST],mm0 \
- /*dst+1*ystride*/ \
- __asm movq [DST+YSTRIDE],mm1 \
- /*dst+2*ystride*/ \
- __asm movq [DST+YSTRIDE*2],mm2 \
- /*dst+3*ystride*/ \
- __asm movq [DST+YSTRIDE3],mm3 \
- } \
- while(0)
-
-# endif
-#endif
diff --git a/thirdparty/libtheora/x86_vc/mmxidct.c b/thirdparty/libtheora/x86_vc/mmxidct.c
index 8f5ff6803c..55e00aedcf 100644
--- a/thirdparty/libtheora/x86_vc/mmxidct.c
+++ b/thirdparty/libtheora/x86_vc/mmxidct.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: mmxidct.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
@@ -24,15 +24,15 @@
/*These are offsets into the table of constants below.*/
/*7 rows of cosines, in order: pi/16 * (1 ... 7).*/
-#define OC_COSINE_OFFSET (0)
+#define OC_COSINE_OFFSET (8)
/*A row of 8's.*/
-#define OC_EIGHT_OFFSET (56)
+#define OC_EIGHT_OFFSET (0)
/*A table of constants used by the MMX routines.*/
-static const __declspec(align(16))ogg_uint16_t
- OC_IDCT_CONSTS[(7+1)*4]={
+static const OC_ALIGN16(ogg_uint16_t) OC_IDCT_CONSTS[(1+7)*4]={
+ 8, 8, 8, 8,
(ogg_uint16_t)OC_C1S7,(ogg_uint16_t)OC_C1S7,
(ogg_uint16_t)OC_C1S7,(ogg_uint16_t)OC_C1S7,
(ogg_uint16_t)OC_C2S6,(ogg_uint16_t)OC_C2S6,
@@ -46,28 +46,27 @@ static const __declspec(align(16))ogg_uint16_t
(ogg_uint16_t)OC_C6S2,(ogg_uint16_t)OC_C6S2,
(ogg_uint16_t)OC_C6S2,(ogg_uint16_t)OC_C6S2,
(ogg_uint16_t)OC_C7S1,(ogg_uint16_t)OC_C7S1,
- (ogg_uint16_t)OC_C7S1,(ogg_uint16_t)OC_C7S1,
- 8, 8, 8, 8
+ (ogg_uint16_t)OC_C7S1,(ogg_uint16_t)OC_C7S1
};
/*38 cycles*/
-#define OC_IDCT_BEGIN __asm{ \
- __asm movq mm2,OC_I(3) \
+#define OC_IDCT_BEGIN(_y,_x) __asm{ \
+ __asm movq mm2,OC_I(3,_x) \
__asm movq mm6,OC_C(3) \
__asm movq mm4,mm2 \
- __asm movq mm7,OC_J(5) \
+ __asm movq mm7,OC_J(5,_x) \
__asm pmulhw mm4,mm6 \
__asm movq mm1,OC_C(5) \
__asm pmulhw mm6,mm7 \
__asm movq mm5,mm1 \
__asm pmulhw mm1,mm2 \
- __asm movq mm3,OC_I(1) \
+ __asm movq mm3,OC_I(1,_x) \
__asm pmulhw mm5,mm7 \
__asm movq mm0,OC_C(1) \
__asm paddw mm4,mm2 \
__asm paddw mm6,mm7 \
__asm paddw mm2,mm1 \
- __asm movq mm1,OC_J(7) \
+ __asm movq mm1,OC_J(7,_x) \
__asm paddw mm7,mm5 \
__asm movq mm5,mm0 \
__asm pmulhw mm0,mm3 \
@@ -77,13 +76,13 @@ static const __declspec(align(16))ogg_uint16_t
__asm psubw mm6,mm2 \
__asm paddw mm0,mm3 \
__asm pmulhw mm3,mm7 \
- __asm movq mm2,OC_I(2) \
+ __asm movq mm2,OC_I(2,_x) \
__asm pmulhw mm7,mm1 \
__asm paddw mm5,mm1 \
__asm movq mm1,mm2 \
__asm pmulhw mm2,OC_C(2) \
__asm psubw mm3,mm5 \
- __asm movq mm5,OC_J(6) \
+ __asm movq mm5,OC_J(6,_x) \
__asm paddw mm0,mm7 \
__asm movq mm7,mm5 \
__asm psubw mm0,mm4 \
@@ -97,18 +96,18 @@ static const __declspec(align(16))ogg_uint16_t
__asm paddw mm6,mm6 \
__asm pmulhw mm7,OC_C(6) \
__asm paddw mm6,mm3 \
- __asm movq OC_I(1),mm4 \
+ __asm movq OC_I(1,_y),mm4 \
__asm psubw mm1,mm5 \
__asm movq mm4,OC_C(4) \
__asm movq mm5,mm3 \
__asm pmulhw mm3,mm4 \
__asm paddw mm7,mm2 \
- __asm movq OC_I(2),mm6 \
+ __asm movq OC_I(2,_y),mm6 \
__asm movq mm2,mm0 \
- __asm movq mm6,OC_I(0) \
+ __asm movq mm6,OC_I(0,_x) \
__asm pmulhw mm0,mm4 \
__asm paddw mm5,mm3 \
- __asm movq mm3,OC_J(4) \
+ __asm movq mm3,OC_J(4,_x) \
__asm psubw mm5,mm1 \
__asm paddw mm2,mm0 \
__asm psubw mm6,mm3 \
@@ -122,17 +121,17 @@ static const __declspec(align(16))ogg_uint16_t
__asm paddw mm6,mm0 \
__asm psubw mm6,mm2 \
__asm paddw mm2,mm2 \
- __asm movq mm0,OC_I(1) \
+ __asm movq mm0,OC_I(1,_y) \
__asm paddw mm2,mm6 \
__asm paddw mm4,mm3 \
__asm psubw mm2,mm1 \
}
/*38+8=46 cycles.*/
-#define OC_ROW_IDCT __asm{ \
- OC_IDCT_BEGIN \
+#define OC_ROW_IDCT(_y,_x) __asm{ \
+ OC_IDCT_BEGIN(_y,_x) \
/*r3=D'*/ \
- __asm movq mm3,OC_I(2) \
+ __asm movq mm3,OC_I(2,_y) \
/*r4=E'=E-G*/ \
__asm psubw mm4,mm7 \
/*r1=H'+H'*/ \
@@ -157,7 +156,7 @@ static const __declspec(align(16))ogg_uint16_t
__asm psubw mm7,mm0 \
__asm paddw mm0,mm0 \
/*Save R1.*/ \
- __asm movq OC_I(1),mm1 \
+ __asm movq OC_I(1,_y),mm1 \
/*r0=R0=G.+C.*/ \
__asm paddw mm0,mm7 \
}
@@ -190,10 +189,10 @@ static const __declspec(align(16))ogg_uint16_t
Since r1 is free at entry, we calculate the Js first.*/
/*19 cycles.*/
-#define OC_TRANSPOSE __asm{ \
+#define OC_TRANSPOSE(_y) __asm{ \
__asm movq mm1,mm4 \
__asm punpcklwd mm4,mm5 \
- __asm movq OC_I(0),mm0 \
+ __asm movq OC_I(0,_y),mm0 \
__asm punpckhwd mm1,mm5 \
__asm movq mm0,mm6 \
__asm punpcklwd mm6,mm7 \
@@ -201,17 +200,17 @@ static const __declspec(align(16))ogg_uint16_t
__asm punpckldq mm4,mm6 \
__asm punpckhdq mm5,mm6 \
__asm movq mm6,mm1 \
- __asm movq OC_J(4),mm4 \
+ __asm movq OC_J(4,_y),mm4 \
__asm punpckhwd mm0,mm7 \
- __asm movq OC_J(5),mm5 \
+ __asm movq OC_J(5,_y),mm5 \
__asm punpckhdq mm6,mm0 \
- __asm movq mm4,OC_I(0) \
+ __asm movq mm4,OC_I(0,_y) \
__asm punpckldq mm1,mm0 \
- __asm movq mm5,OC_I(1) \
+ __asm movq mm5,OC_I(1,_y) \
__asm movq mm0,mm4 \
- __asm movq OC_J(7),mm6 \
+ __asm movq OC_J(7,_y),mm6 \
__asm punpcklwd mm0,mm5 \
- __asm movq OC_J(6),mm1 \
+ __asm movq OC_J(6,_y),mm1 \
__asm punpckhwd mm4,mm5 \
__asm movq mm5,mm2 \
__asm punpcklwd mm2,mm3 \
@@ -219,18 +218,18 @@ static const __declspec(align(16))ogg_uint16_t
__asm punpckldq mm0,mm2 \
__asm punpckhdq mm1,mm2 \
__asm movq mm2,mm4 \
- __asm movq OC_I(0),mm0 \
+ __asm movq OC_I(0,_y),mm0 \
__asm punpckhwd mm5,mm3 \
- __asm movq OC_I(1),mm1 \
+ __asm movq OC_I(1,_y),mm1 \
__asm punpckhdq mm4,mm5 \
__asm punpckldq mm2,mm5 \
- __asm movq OC_I(3),mm4 \
- __asm movq OC_I(2),mm2 \
+ __asm movq OC_I(3,_y),mm4 \
+ __asm movq OC_I(2,_y),mm2 \
}
/*38+19=57 cycles.*/
-#define OC_COLUMN_IDCT __asm{ \
- OC_IDCT_BEGIN \
+#define OC_COLUMN_IDCT(_y) __asm{ \
+ OC_IDCT_BEGIN(_y,_y) \
__asm paddw mm2,OC_8 \
/*r1=H'+H'*/ \
__asm paddw mm1,mm1 \
@@ -243,15 +242,15 @@ static const __declspec(align(16))ogg_uint16_t
/*r1=NR1*/ \
__asm psraw mm1,4 \
/*r3=D'*/ \
- __asm movq mm3,OC_I(2) \
+ __asm movq mm3,OC_I(2,_y) \
/*r7=G+G*/ \
__asm paddw mm7,mm7 \
/*Store NR2 at I(2).*/ \
- __asm movq OC_I(2),mm2 \
+ __asm movq OC_I(2,_y),mm2 \
/*r7=G'=E+G*/ \
__asm paddw mm7,mm4 \
/*Store NR1 at I(1).*/ \
- __asm movq OC_I(1),mm1 \
+ __asm movq OC_I(1,_y),mm1 \
/*r4=R4=E'-D'*/ \
__asm psubw mm4,mm3 \
__asm paddw mm4,OC_8 \
@@ -273,11 +272,11 @@ static const __declspec(align(16))ogg_uint16_t
/*r6=NR6*/ \
__asm psraw mm6,4 \
/*Store NR4 at J(4).*/ \
- __asm movq OC_J(4),mm4 \
+ __asm movq OC_J(4,_y),mm4 \
/*r5=NR5*/ \
__asm psraw mm5,4 \
/*Store NR3 at I(3).*/ \
- __asm movq OC_I(3),mm3 \
+ __asm movq OC_I(3,_y),mm3 \
/*r7=R7=G'-C'*/ \
__asm psubw mm7,mm0 \
__asm paddw mm7,OC_8 \
@@ -288,71 +287,89 @@ static const __declspec(align(16))ogg_uint16_t
/*r7=NR7*/ \
__asm psraw mm7,4 \
/*Store NR6 at J(6).*/ \
- __asm movq OC_J(6),mm6 \
+ __asm movq OC_J(6,_y),mm6 \
/*r0=NR0*/ \
__asm psraw mm0,4 \
/*Store NR5 at J(5).*/ \
- __asm movq OC_J(5),mm5 \
+ __asm movq OC_J(5,_y),mm5 \
/*Store NR7 at J(7).*/ \
- __asm movq OC_J(7),mm7 \
+ __asm movq OC_J(7,_y),mm7 \
/*Store NR0 at I(0).*/ \
- __asm movq OC_I(0),mm0 \
+ __asm movq OC_I(0,_y),mm0 \
}
#define OC_MID(_m,_i) [CONSTS+_m+(_i)*8]
#define OC_C(_i) OC_MID(OC_COSINE_OFFSET,_i-1)
#define OC_8 OC_MID(OC_EIGHT_OFFSET,0)
-static void oc_idct8x8_slow(ogg_int16_t _y[64]){
+static void oc_idct8x8_slow(ogg_int16_t _y[64],ogg_int16_t _x[64]){
+ int i;
/*This routine accepts an 8x8 matrix, but in partially transposed form.
Every 4x4 block is transposed.*/
__asm{
#define CONSTS eax
#define Y edx
+#define X ecx
mov CONSTS,offset OC_IDCT_CONSTS
mov Y,_y
-#define OC_I(_k) [Y+_k*16]
-#define OC_J(_k) [Y+(_k-4)*16+8]
- OC_ROW_IDCT
- OC_TRANSPOSE
+ mov X,_x
+#define OC_I(_k,_y) [(_y)+(_k)*16]
+#define OC_J(_k,_y) [(_y)+((_k)-4)*16+8]
+ OC_ROW_IDCT(Y,X)
+ OC_TRANSPOSE(Y)
#undef OC_I
#undef OC_J
-#define OC_I(_k) [Y+(_k*16)+64]
-#define OC_J(_k) [Y+(_k-4)*16+72]
- OC_ROW_IDCT
- OC_TRANSPOSE
+#define OC_I(_k,_y) [(_y)+(_k)*16+64]
+#define OC_J(_k,_y) [(_y)+((_k)-4)*16+72]
+ OC_ROW_IDCT(Y,X)
+ OC_TRANSPOSE(Y)
#undef OC_I
#undef OC_J
-#define OC_I(_k) [Y+_k*16]
-#define OC_J(_k) OC_I(_k)
- OC_COLUMN_IDCT
+#define OC_I(_k,_y) [(_y)+(_k)*16]
+#define OC_J(_k,_y) OC_I(_k,_y)
+ OC_COLUMN_IDCT(Y)
#undef OC_I
#undef OC_J
-#define OC_I(_k) [Y+_k*16+8]
-#define OC_J(_k) OC_I(_k)
- OC_COLUMN_IDCT
+#define OC_I(_k,_y) [(_y)+(_k)*16+8]
+#define OC_J(_k,_y) OC_I(_k,_y)
+ OC_COLUMN_IDCT(Y)
#undef OC_I
#undef OC_J
#undef CONSTS
#undef Y
+#undef X
+ }
+ __asm pxor mm0,mm0;
+ for(i=0;i<4;i++){
+ ogg_int16_t *x;
+ x=_x+16*i;
+#define X ecx
+ __asm{
+ mov X,x
+ movq [X+0x00],mm0
+ movq [X+0x08],mm0
+ movq [X+0x10],mm0
+ movq [X+0x18],mm0
+ }
+#undef X
}
}
/*25 cycles.*/
-#define OC_IDCT_BEGIN_10 __asm{ \
- __asm movq mm2,OC_I(3) \
+#define OC_IDCT_BEGIN_10(_y,_x) __asm{ \
+ __asm movq mm2,OC_I(3,_x) \
__asm nop \
__asm movq mm6,OC_C(3) \
__asm movq mm4,mm2 \
__asm movq mm1,OC_C(5) \
__asm pmulhw mm4,mm6 \
- __asm movq mm3,OC_I(1) \
+ __asm movq mm3,OC_I(1,_x) \
__asm pmulhw mm1,mm2 \
__asm movq mm0,OC_C(1) \
__asm paddw mm4,mm2 \
__asm pxor mm6,mm6 \
__asm paddw mm2,mm1 \
- __asm movq mm5,OC_I(2) \
+ __asm movq mm5,OC_I(2,_x) \
__asm pmulhw mm0,mm3 \
__asm movq mm1,mm5 \
__asm paddw mm0,mm3 \
@@ -360,43 +377,43 @@ static void oc_idct8x8_slow(ogg_int16_t _y[64]){
__asm psubw mm6,mm2 \
__asm pmulhw mm5,OC_C(2) \
__asm psubw mm0,mm4 \
- __asm movq mm7,OC_I(2) \
+ __asm movq mm7,OC_I(2,_x) \
__asm paddw mm4,mm4 \
__asm paddw mm7,mm5 \
__asm paddw mm4,mm0 \
__asm pmulhw mm1,OC_C(6) \
__asm psubw mm3,mm6 \
- __asm movq OC_I(1),mm4 \
+ __asm movq OC_I(1,_y),mm4 \
__asm paddw mm6,mm6 \
__asm movq mm4,OC_C(4) \
__asm paddw mm6,mm3 \
__asm movq mm5,mm3 \
__asm pmulhw mm3,mm4 \
- __asm movq OC_I(2),mm6 \
+ __asm movq OC_I(2,_y),mm6 \
__asm movq mm2,mm0 \
- __asm movq mm6,OC_I(0) \
+ __asm movq mm6,OC_I(0,_x) \
__asm pmulhw mm0,mm4 \
__asm paddw mm5,mm3 \
__asm paddw mm2,mm0 \
__asm psubw mm5,mm1 \
__asm pmulhw mm6,mm4 \
- __asm paddw mm6,OC_I(0) \
+ __asm paddw mm6,OC_I(0,_x) \
__asm paddw mm1,mm1 \
__asm movq mm4,mm6 \
__asm paddw mm1,mm5 \
__asm psubw mm6,mm2 \
__asm paddw mm2,mm2 \
- __asm movq mm0,OC_I(1) \
+ __asm movq mm0,OC_I(1,_y) \
__asm paddw mm2,mm6 \
__asm psubw mm2,mm1 \
__asm nop \
}
/*25+8=33 cycles.*/
-#define OC_ROW_IDCT_10 __asm{ \
- OC_IDCT_BEGIN_10 \
+#define OC_ROW_IDCT_10(_y,_x) __asm{ \
+ OC_IDCT_BEGIN_10(_y,_x) \
/*r3=D'*/ \
- __asm movq mm3,OC_I(2) \
+ __asm movq mm3,OC_I(2,_y) \
/*r4=E'=E-G*/ \
__asm psubw mm4,mm7 \
/*r1=H'+H'*/ \
@@ -421,14 +438,14 @@ static void oc_idct8x8_slow(ogg_int16_t _y[64]){
__asm psubw mm7,mm0 \
__asm paddw mm0,mm0 \
/*Save R1.*/ \
- __asm movq OC_I(1),mm1 \
+ __asm movq OC_I(1,_y),mm1 \
/*r0=R0=G'+C'*/ \
__asm paddw mm0,mm7 \
}
/*25+19=44 cycles'*/
-#define OC_COLUMN_IDCT_10 __asm{ \
- OC_IDCT_BEGIN_10 \
+#define OC_COLUMN_IDCT_10(_y) __asm{ \
+ OC_IDCT_BEGIN_10(_y,_y) \
__asm paddw mm2,OC_8 \
/*r1=H'+H'*/ \
__asm paddw mm1,mm1 \
@@ -441,15 +458,15 @@ static void oc_idct8x8_slow(ogg_int16_t _y[64]){
/*r1=NR1*/ \
__asm psraw mm1,4 \
/*r3=D'*/ \
- __asm movq mm3,OC_I(2) \
+ __asm movq mm3,OC_I(2,_y) \
/*r7=G+G*/ \
__asm paddw mm7,mm7 \
/*Store NR2 at I(2).*/ \
- __asm movq OC_I(2),mm2 \
+ __asm movq OC_I(2,_y),mm2 \
/*r7=G'=E+G*/ \
__asm paddw mm7,mm4 \
/*Store NR1 at I(1).*/ \
- __asm movq OC_I(1),mm1 \
+ __asm movq OC_I(1,_y),mm1 \
/*r4=R4=E'-D'*/ \
__asm psubw mm4,mm3 \
__asm paddw mm4,OC_8 \
@@ -471,11 +488,11 @@ static void oc_idct8x8_slow(ogg_int16_t _y[64]){
/*r6=NR6*/ \
__asm psraw mm6,4 \
/*Store NR4 at J(4).*/ \
- __asm movq OC_J(4),mm4 \
+ __asm movq OC_J(4,_y),mm4 \
/*r5=NR5*/ \
__asm psraw mm5,4 \
/*Store NR3 at I(3).*/ \
- __asm movq OC_I(3),mm3 \
+ __asm movq OC_I(3,_y),mm3 \
/*r7=R7=G'-C'*/ \
__asm psubw mm7,mm0 \
__asm paddw mm7,OC_8 \
@@ -486,50 +503,63 @@ static void oc_idct8x8_slow(ogg_int16_t _y[64]){
/*r7=NR7*/ \
__asm psraw mm7,4 \
/*Store NR6 at J(6).*/ \
- __asm movq OC_J(6),mm6 \
+ __asm movq OC_J(6,_y),mm6 \
/*r0=NR0*/ \
__asm psraw mm0,4 \
/*Store NR5 at J(5).*/ \
- __asm movq OC_J(5),mm5 \
+ __asm movq OC_J(5,_y),mm5 \
/*Store NR7 at J(7).*/ \
- __asm movq OC_J(7),mm7 \
+ __asm movq OC_J(7,_y),mm7 \
/*Store NR0 at I(0).*/ \
- __asm movq OC_I(0),mm0 \
+ __asm movq OC_I(0,_y),mm0 \
}
-static void oc_idct8x8_10(ogg_int16_t _y[64]){
+static void oc_idct8x8_10(ogg_int16_t _y[64],ogg_int16_t _x[64]){
__asm{
#define CONSTS eax
#define Y edx
+#define X ecx
mov CONSTS,offset OC_IDCT_CONSTS
mov Y,_y
-#define OC_I(_k) [Y+_k*16]
-#define OC_J(_k) [Y+(_k-4)*16+8]
+ mov X,_x
+#define OC_I(_k,_y) [(_y)+(_k)*16]
+#define OC_J(_k,_y) [(_y)+((_k)-4)*16+8]
/*Done with dequant, descramble, and partial transpose.
Now do the iDCT itself.*/
- OC_ROW_IDCT_10
- OC_TRANSPOSE
+ OC_ROW_IDCT_10(Y,X)
+ OC_TRANSPOSE(Y)
#undef OC_I
#undef OC_J
-#define OC_I(_k) [Y+_k*16]
-#define OC_J(_k) OC_I(_k)
- OC_COLUMN_IDCT_10
+#define OC_I(_k,_y) [(_y)+(_k)*16]
+#define OC_J(_k,_y) OC_I(_k,_y)
+ OC_COLUMN_IDCT_10(Y)
#undef OC_I
#undef OC_J
-#define OC_I(_k) [Y+_k*16+8]
-#define OC_J(_k) OC_I(_k)
- OC_COLUMN_IDCT_10
+#define OC_I(_k,_y) [(_y)+(_k)*16+8]
+#define OC_J(_k,_y) OC_I(_k,_y)
+ OC_COLUMN_IDCT_10(Y)
#undef OC_I
#undef OC_J
#undef CONSTS
#undef Y
+#undef X
+ }
+#define X ecx
+ __asm{
+ pxor mm0,mm0;
+ mov X,_x
+ movq [X+0x00],mm0
+ movq [X+0x10],mm0
+ movq [X+0x20],mm0
+ movq [X+0x30],mm0
}
+#undef X
}
/*Performs an inverse 8x8 Type-II DCT transform.
The input is assumed to be scaled by a factor of 4 relative to orthonormal
version of the transform.*/
-void oc_idct8x8_mmx(ogg_int16_t _y[64],int _last_zzi){
+void oc_idct8x8_mmx(ogg_int16_t _y[64],ogg_int16_t _x[64],int _last_zzi){
/*_last_zzi is subtly different from an actual count of the number of
coefficients we decoded for this block.
It contains the value of zzi BEFORE the final token in the block was
@@ -555,8 +585,8 @@ void oc_idct8x8_mmx(ogg_int16_t _y[64],int _last_zzi){
gets.
Needless to say we inherited this approach from VP3.*/
/*Perform the iDCT.*/
- if(_last_zzi<10)oc_idct8x8_10(_y);
- else oc_idct8x8_slow(_y);
+ if(_last_zzi<=10)oc_idct8x8_10(_y,_x);
+ else oc_idct8x8_slow(_y,_x);
}
#endif
diff --git a/thirdparty/libtheora/x86_vc/mmxstate.c b/thirdparty/libtheora/x86_vc/mmxstate.c
index 73bd1981cf..f532ee1b6f 100644
--- a/thirdparty/libtheora/x86_vc/mmxstate.c
+++ b/thirdparty/libtheora/x86_vc/mmxstate.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: mmxstate.c 16584 2009-09-26 19:35:55Z tterribe $
+ last mod: $Id$
********************************************************************/
@@ -19,17 +19,16 @@
Originally written by Rudolf Marek.*/
#include <string.h>
#include "x86int.h"
-#include "mmxfrag.h"
#include "mmxloop.h"
#if defined(OC_X86_ASM)
void oc_state_frag_recon_mmx(const oc_theora_state *_state,ptrdiff_t _fragi,
- int _pli,ogg_int16_t _dct_coeffs[64],int _last_zzi,ogg_uint16_t _dc_quant){
+ int _pli,ogg_int16_t _dct_coeffs[128],int _last_zzi,ogg_uint16_t _dc_quant){
unsigned char *dst;
ptrdiff_t frag_buf_off;
int ystride;
- int mb_mode;
+ int refi;
/*Apply the inverse transform.*/
/*Special case only having a DC component.*/
if(_last_zzi<2){
@@ -45,6 +44,7 @@ void oc_state_frag_recon_mmx(const oc_theora_state *_state,ptrdiff_t _fragi,
#define P ecx
mov Y,_dct_coeffs
movzx P,p
+ lea Y,[Y+128]
/*mm0=0000 0000 0000 AAAA*/
movd mm0,P
/*mm0=0000 0000 AAAA AAAA*/
@@ -74,65 +74,32 @@ void oc_state_frag_recon_mmx(const oc_theora_state *_state,ptrdiff_t _fragi,
else{
/*Dequantize the DC coefficient.*/
_dct_coeffs[0]=(ogg_int16_t)(_dct_coeffs[0]*(int)_dc_quant);
- oc_idct8x8_mmx(_dct_coeffs,_last_zzi);
+ oc_idct8x8_mmx(_dct_coeffs+64,_dct_coeffs,_last_zzi);
}
/*Fill in the target buffer.*/
frag_buf_off=_state->frag_buf_offs[_fragi];
- mb_mode=_state->frags[_fragi].mb_mode;
+ refi=_state->frags[_fragi].refi;
ystride=_state->ref_ystride[_pli];
- dst=_state->ref_frame_data[_state->ref_frame_idx[OC_FRAME_SELF]]+frag_buf_off;
- if(mb_mode==OC_MODE_INTRA)oc_frag_recon_intra_mmx(dst,ystride,_dct_coeffs);
+ dst=_state->ref_frame_data[OC_FRAME_SELF]+frag_buf_off;
+ if(refi==OC_FRAME_SELF)oc_frag_recon_intra_mmx(dst,ystride,_dct_coeffs+64);
else{
const unsigned char *ref;
int mvoffsets[2];
- ref=
- _state->ref_frame_data[_state->ref_frame_idx[OC_FRAME_FOR_MODE(mb_mode)]]
- +frag_buf_off;
+ ref=_state->ref_frame_data[refi]+frag_buf_off;
if(oc_state_get_mv_offsets(_state,mvoffsets,_pli,
- _state->frag_mvs[_fragi][0],_state->frag_mvs[_fragi][1])>1){
+ _state->frag_mvs[_fragi])>1){
oc_frag_recon_inter2_mmx(dst,ref+mvoffsets[0],ref+mvoffsets[1],ystride,
- _dct_coeffs);
+ _dct_coeffs+64);
}
- else oc_frag_recon_inter_mmx(dst,ref+mvoffsets[0],ystride,_dct_coeffs);
+ else oc_frag_recon_inter_mmx(dst,ref+mvoffsets[0],ystride,_dct_coeffs+64);
}
}
/*We copy these entire function to inline the actual MMX routines so that we
use only a single indirect call.*/
-/*Copies the fragments specified by the lists of fragment indices from one
- frame to another.
- _fragis: A pointer to a list of fragment indices.
- _nfragis: The number of fragment indices to copy.
- _dst_frame: The reference frame to copy to.
- _src_frame: The reference frame to copy from.
- _pli: The color plane the fragments lie in.*/
-void oc_state_frag_copy_list_mmx(const oc_theora_state *_state,
- const ptrdiff_t *_fragis,ptrdiff_t _nfragis,
- int _dst_frame,int _src_frame,int _pli){
- const ptrdiff_t *frag_buf_offs;
- const unsigned char *src_frame_data;
- unsigned char *dst_frame_data;
- ptrdiff_t fragii;
- int ystride;
- dst_frame_data=_state->ref_frame_data[_state->ref_frame_idx[_dst_frame]];
- src_frame_data=_state->ref_frame_data[_state->ref_frame_idx[_src_frame]];
- ystride=_state->ref_ystride[_pli];
- frag_buf_offs=_state->frag_buf_offs;
- for(fragii=0;fragii<_nfragis;fragii++){
- ptrdiff_t frag_buf_off;
- frag_buf_off=frag_buf_offs[_fragis[fragii]];
-#define SRC edx
-#define DST eax
-#define YSTRIDE ecx
-#define YSTRIDE3 edi
- OC_FRAG_COPY_MMX(dst_frame_data+frag_buf_off,
- src_frame_data+frag_buf_off,ystride);
-#undef SRC
-#undef DST
-#undef YSTRIDE
-#undef YSTRIDE3
- }
+void oc_loop_filter_init_mmx(signed char _bv[256],int _flimit){
+ memset(_bv,~(_flimit<<1),8);
}
/*Apply the loop filter to a given set of fragment rows in the given plane.
@@ -144,8 +111,7 @@ void oc_state_frag_copy_list_mmx(const oc_theora_state *_state,
_fragy0: The Y coordinate of the first fragment row to filter.
_fragy_end: The Y coordinate of the fragment row to stop filtering at.*/
void oc_state_loop_filter_frag_rows_mmx(const oc_theora_state *_state,
- int _bv[256],int _refi,int _pli,int _fragy0,int _fragy_end){
- OC_ALIGN8(unsigned char ll[8]);
+ signed char _bv[256],int _refi,int _pli,int _fragy0,int _fragy_end){
const oc_fragment_plane *fplane;
const oc_fragment *frags;
const ptrdiff_t *frag_buf_offs;
@@ -156,13 +122,12 @@ void oc_state_loop_filter_frag_rows_mmx(const oc_theora_state *_state,
ptrdiff_t fragi0_end;
int ystride;
int nhfrags;
- memset(ll,_state->loop_filter_limits[_state->qis[0]],sizeof(ll));
fplane=_state->fplanes+_pli;
nhfrags=fplane->nhfrags;
fragi_top=fplane->froffset;
fragi_bot=fragi_top+fplane->nfrags;
fragi0=fragi_top+_fragy0*(ptrdiff_t)nhfrags;
- fragi0_end=fragi0+(_fragy_end-_fragy0)*(ptrdiff_t)nhfrags;
+ fragi0_end=fragi_top+_fragy_end*(ptrdiff_t)nhfrags;
ystride=_state->ref_ystride[_pli];
frags=_state->frags;
frag_buf_offs=_state->frag_buf_offs;
@@ -187,13 +152,13 @@ void oc_state_loop_filter_frag_rows_mmx(const oc_theora_state *_state,
#define LL edx
#define D esi
#define D_WORD si
- if(fragi>fragi0)OC_LOOP_FILTER_H_MMX(ref,ystride,ll);
- if(fragi0>fragi_top)OC_LOOP_FILTER_V_MMX(ref,ystride,ll);
+ if(fragi>fragi0)OC_LOOP_FILTER_H_MMX(ref,ystride,_bv);
+ if(fragi0>fragi_top)OC_LOOP_FILTER_V_MMX(ref,ystride,_bv);
if(fragi+1<fragi_end&&!frags[fragi+1].coded){
- OC_LOOP_FILTER_H_MMX(ref+8,ystride,ll);
+ OC_LOOP_FILTER_H_MMX(ref+8,ystride,_bv);
}
if(fragi+nhfrags<fragi_bot&&!frags[fragi+nhfrags].coded){
- OC_LOOP_FILTER_V_MMX(ref+(ystride<<3),ystride,ll);
+ OC_LOOP_FILTER_V_MMX(ref+(ystride<<3),ystride,_bv);
}
#undef PIX
#undef YSTRIDE3
diff --git a/thirdparty/libtheora/cpu.c b/thirdparty/libtheora/x86_vc/x86cpu.c
index a863aad7f3..6a1d8d850c 100644
--- a/thirdparty/libtheora/cpu.c
+++ b/thirdparty/libtheora/x86_vc/x86cpu.c
@@ -14,41 +14,17 @@
Originally written by Rudolf Marek.
function:
- last mod: $Id: cpu.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
-#include "cpu.h"
+#include "x86cpu.h"
#if !defined(OC_X86_ASM)
-static ogg_uint32_t oc_cpu_flags_get(void){
+ogg_uint32_t oc_cpu_flags_get(void){
return 0;
}
#else
-# if !defined(_MSC_VER)
-# if defined(__amd64__)||defined(__x86_64__)
-/*On x86-64, gcc seems to be able to figure out how to save %rbx for us when
- compiling with -fPIC.*/
-# define cpuid(_op,_eax,_ebx,_ecx,_edx) \
- __asm__ __volatile__( \
- "cpuid\n\t" \
- :[eax]"=a"(_eax),[ebx]"=b"(_ebx),[ecx]"=c"(_ecx),[edx]"=d"(_edx) \
- :"a"(_op) \
- :"cc" \
- )
-# else
-/*On x86-32, not so much.*/
-# define cpuid(_op,_eax,_ebx,_ecx,_edx) \
- __asm__ __volatile__( \
- "xchgl %%ebx,%[ebx]\n\t" \
- "cpuid\n\t" \
- "xchgl %%ebx,%[ebx]\n\t" \
- :[eax]"=a"(_eax),[ebx]"=r"(_ebx),[ecx]"=c"(_ecx),[edx]"=d"(_edx) \
- :"a"(_op) \
- :"cc" \
- )
-# endif
-# else
/*Why does MSVC need this complicated rigamarole?
At this point I honestly do not care.*/
@@ -95,7 +71,6 @@ static void oc_detect_cpuid_helper(ogg_uint32_t *_eax,ogg_uint32_t *_ebx){
mov [ecx],ebx
}
}
-# endif
static ogg_uint32_t oc_parse_intel_flags(ogg_uint32_t _edx,ogg_uint32_t _ecx){
ogg_uint32_t flags;
@@ -124,7 +99,7 @@ static ogg_uint32_t oc_parse_amd_flags(ogg_uint32_t _edx,ogg_uint32_t _ecx){
return flags;
}
-static ogg_uint32_t oc_cpu_flags_get(void){
+ogg_uint32_t oc_cpu_flags_get(void){
ogg_uint32_t flags;
ogg_uint32_t eax;
ogg_uint32_t ebx;
@@ -132,25 +107,7 @@ static ogg_uint32_t oc_cpu_flags_get(void){
ogg_uint32_t edx;
# if !defined(__amd64__)&&!defined(__x86_64__)
/*Not all x86-32 chips support cpuid, so we have to check.*/
-# if !defined(_MSC_VER)
- __asm__ __volatile__(
- "pushfl\n\t"
- "pushfl\n\t"
- "popl %[a]\n\t"
- "movl %[a],%[b]\n\t"
- "xorl $0x200000,%[a]\n\t"
- "pushl %[a]\n\t"
- "popfl\n\t"
- "pushfl\n\t"
- "popl %[a]\n\t"
- "popfl\n\t"
- :[a]"=r"(eax),[b]"=r"(ebx)
- :
- :"cc"
- );
-# else
oc_detect_cpuid_helper(&eax,&ebx);
-# endif
/*No cpuid.*/
if(eax==ebx)return 0;
# endif
@@ -159,9 +116,18 @@ static ogg_uint32_t oc_cpu_flags_get(void){
if(ecx==0x6C65746E&&edx==0x49656E69&&ebx==0x756E6547||
/* 6 8 x M T e n i u n e G*/
ecx==0x3638784D&&edx==0x54656E69&&ebx==0x756E6547){
+ int family;
+ int model;
/*Intel, Transmeta (tested with Crusoe TM5800):*/
cpuid(1,eax,ebx,ecx,edx);
flags=oc_parse_intel_flags(edx,ecx);
+ family=(eax>>8)&0xF;
+ model=(eax>>4)&0xF;
+ /*The SSE unit on the Pentium M and Core Duo is much slower than the MMX
+ unit, so don't use it.*/
+ if(family==6&&(model==9||model==13||model==14)){
+ flags&=~(OC_CPU_X86_SSE2|OC_CPU_X86_PNI);
+ }
}
/* D M A c i t n e h t u A*/
else if(ecx==0x444D4163&&edx==0x69746E65&&ebx==0x68747541||
diff --git a/thirdparty/libtheora/x86_vc/x86cpu.h b/thirdparty/libtheora/x86_vc/x86cpu.h
new file mode 100644
index 0000000000..eea261d448
--- /dev/null
+++ b/thirdparty/libtheora/x86_vc/x86cpu.h
@@ -0,0 +1,36 @@
+/********************************************************************
+ * *
+ * THIS FILE IS PART OF THE OggTheora SOFTWARE CODEC SOURCE CODE. *
+ * USE, DISTRIBUTION AND REPRODUCTION OF THIS LIBRARY SOURCE IS *
+ * GOVERNED BY A BSD-STYLE SOURCE LICENSE INCLUDED WITH THIS SOURCE *
+ * IN 'COPYING'. PLEASE READ THESE TERMS BEFORE DISTRIBUTING. *
+ * *
+ * THE Theora SOURCE CODE IS COPYRIGHT (C) 2002-2009 *
+ * by the Xiph.Org Foundation and contributors http://www.xiph.org/ *
+ * *
+ ********************************************************************
+ function:
+ last mod: $Id$
+
+ ********************************************************************/
+
+#if !defined(_x86_vc_x86cpu_H)
+# define _x86_vc_x86cpu_H (1)
+#include "../internal.h"
+
+#define OC_CPU_X86_MMX (1<<0)
+#define OC_CPU_X86_3DNOW (1<<1)
+#define OC_CPU_X86_3DNOWEXT (1<<2)
+#define OC_CPU_X86_MMXEXT (1<<3)
+#define OC_CPU_X86_SSE (1<<4)
+#define OC_CPU_X86_SSE2 (1<<5)
+#define OC_CPU_X86_PNI (1<<6)
+#define OC_CPU_X86_SSSE3 (1<<7)
+#define OC_CPU_X86_SSE4_1 (1<<8)
+#define OC_CPU_X86_SSE4_2 (1<<9)
+#define OC_CPU_X86_SSE4A (1<<10)
+#define OC_CPU_X86_SSE5 (1<<11)
+
+ogg_uint32_t oc_cpu_flags_get(void);
+
+#endif
diff --git a/thirdparty/libtheora/x86_vc/x86enc.c b/thirdparty/libtheora/x86_vc/x86enc.c
index e1960e1f0b..e9d59e85e3 100644
--- a/thirdparty/libtheora/x86_vc/x86enc.c
+++ b/thirdparty/libtheora/x86_vc/x86enc.c
@@ -18,27 +18,25 @@
#if defined(OC_X86_ASM)
-#include "../cpu.c"
-
-void oc_enc_vtable_init_x86(oc_enc_ctx *_enc){
+void oc_enc_accel_init_x86(oc_enc_ctx *_enc){
ogg_uint32_t cpu_flags;
- cpu_flags=oc_cpu_flags_get();
- oc_enc_vtable_init_c(_enc);
+ cpu_flags=_enc->state.cpu_flags;
+ oc_enc_accel_init_c(_enc);
if(cpu_flags&OC_CPU_X86_MMX){
_enc->opt_vtable.frag_sub=oc_enc_frag_sub_mmx;
_enc->opt_vtable.frag_sub_128=oc_enc_frag_sub_128_mmx;
_enc->opt_vtable.frag_recon_intra=oc_frag_recon_intra_mmx;
_enc->opt_vtable.frag_recon_inter=oc_frag_recon_inter_mmx;
- _enc->opt_vtable.fdct8x8=oc_enc_fdct8x8_mmx;
}
if(cpu_flags&OC_CPU_X86_MMXEXT){
_enc->opt_vtable.frag_sad=oc_enc_frag_sad_mmxext;
_enc->opt_vtable.frag_sad_thresh=oc_enc_frag_sad_thresh_mmxext;
_enc->opt_vtable.frag_sad2_thresh=oc_enc_frag_sad2_thresh_mmxext;
- _enc->opt_vtable.frag_satd_thresh=oc_enc_frag_satd_thresh_mmxext;
- _enc->opt_vtable.frag_satd2_thresh=oc_enc_frag_satd2_thresh_mmxext;
+ _enc->opt_vtable.frag_satd=oc_enc_frag_satd_mmxext;
+ _enc->opt_vtable.frag_satd2=oc_enc_frag_satd2_mmxext;
_enc->opt_vtable.frag_intra_satd=oc_enc_frag_intra_satd_mmxext;
_enc->opt_vtable.frag_copy2=oc_enc_frag_copy2_mmxext;
+ _enc->opt_vtable.fdct8x8=oc_enc_fdct8x8_mmxext;
}
if(cpu_flags&OC_CPU_X86_SSE2){
# if defined(OC_X86_64_ASM)
diff --git a/thirdparty/libtheora/x86_vc/x86enc.h b/thirdparty/libtheora/x86_vc/x86enc.h
index 581484641f..885406a54d 100644
--- a/thirdparty/libtheora/x86_vc/x86enc.h
+++ b/thirdparty/libtheora/x86_vc/x86enc.h
@@ -17,10 +17,14 @@
#if !defined(_x86_vc_x86enc_H)
# define _x86_vc_x86enc_H (1)
-# include "../encint.h"
# include "x86int.h"
+# if defined(OC_X86_ASM)
+# define oc_enc_accel_init oc_enc_accel_init_x86
+# define OC_ENC_USE_VTABLE (1)
+# endif
+# include "../encint.h"
-void oc_enc_vtable_init_x86(oc_enc_ctx *_enc);
+void oc_enc_accel_init_x86(oc_enc_ctx *_enc);
unsigned oc_enc_frag_sad_mmxext(const unsigned char *_src,
const unsigned char *_ref,int _ystride);
@@ -29,19 +33,19 @@ unsigned oc_enc_frag_sad_thresh_mmxext(const unsigned char *_src,
unsigned oc_enc_frag_sad2_thresh_mmxext(const unsigned char *_src,
const unsigned char *_ref1,const unsigned char *_ref2,int _ystride,
unsigned _thresh);
-unsigned oc_enc_frag_satd_thresh_mmxext(const unsigned char *_src,
- const unsigned char *_ref,int _ystride,unsigned _thresh);
-unsigned oc_enc_frag_satd2_thresh_mmxext(const unsigned char *_src,
- const unsigned char *_ref1,const unsigned char *_ref2,int _ystride,
- unsigned _thresh);
-unsigned oc_enc_frag_intra_satd_mmxext(const unsigned char *_src,int _ystride);
+unsigned oc_enc_frag_satd_mmxext(unsigned *_dc,const unsigned char *_src,
+ const unsigned char *_ref,int _ystride);
+unsigned oc_enc_frag_satd2_mmxext(unsigned *_dc,const unsigned char *_src,
+ const unsigned char *_ref1,const unsigned char *_ref2,int _ystride);
+unsigned oc_enc_frag_intra_satd_mmxext(unsigned *_dc,
+ const unsigned char *_src,int _ystride);
void oc_enc_frag_sub_mmx(ogg_int16_t _diff[64],
const unsigned char *_x,const unsigned char *_y,int _stride);
void oc_enc_frag_sub_128_mmx(ogg_int16_t _diff[64],
const unsigned char *_x,int _stride);
void oc_enc_frag_copy2_mmxext(unsigned char *_dst,
const unsigned char *_src1,const unsigned char *_src2,int _ystride);
-void oc_enc_fdct8x8_mmx(ogg_int16_t _y[64],const ogg_int16_t _x[64]);
+void oc_enc_fdct8x8_mmxext(ogg_int16_t _y[64],const ogg_int16_t _x[64]);
void oc_enc_fdct8x8_x86_64sse2(ogg_int16_t _y[64],const ogg_int16_t _x[64]);
#endif
diff --git a/thirdparty/libtheora/x86_vc/x86int.h b/thirdparty/libtheora/x86_vc/x86int.h
index 4cca485311..318a09dca0 100644
--- a/thirdparty/libtheora/x86_vc/x86int.h
+++ b/thirdparty/libtheora/x86_vc/x86int.h
@@ -11,32 +11,39 @@
********************************************************************
function:
- last mod: $Id: x86int.h 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
#if !defined(_x86_vc_x86int_H)
# define _x86_vc_x86int_H (1)
# include "../internal.h"
+# if defined(OC_X86_ASM)
+# define oc_state_accel_init oc_state_accel_init_x86
+# define OC_STATE_USE_VTABLE (1)
+# endif
+# include "../state.h"
+# include "x86cpu.h"
-void oc_state_vtable_init_x86(oc_theora_state *_state);
+void oc_state_accel_init_x86(oc_theora_state *_state);
void oc_frag_copy_mmx(unsigned char *_dst,
const unsigned char *_src,int _ystride);
+void oc_frag_copy_list_mmx(unsigned char *_dst_frame,
+ const unsigned char *_src_frame,int _ystride,
+ const ptrdiff_t *_fragis,ptrdiff_t _nfragis,const ptrdiff_t *_frag_buf_offs);
void oc_frag_recon_intra_mmx(unsigned char *_dst,int _ystride,
const ogg_int16_t *_residue);
void oc_frag_recon_inter_mmx(unsigned char *_dst,
const unsigned char *_src,int _ystride,const ogg_int16_t *_residue);
void oc_frag_recon_inter2_mmx(unsigned char *_dst,const unsigned char *_src1,
const unsigned char *_src2,int _ystride,const ogg_int16_t *_residue);
-void oc_idct8x8_mmx(ogg_int16_t _y[64],int _last_zzi);
+void oc_idct8x8_mmx(ogg_int16_t _y[64],ogg_int16_t _x[64],int _last_zzi);
void oc_state_frag_recon_mmx(const oc_theora_state *_state,ptrdiff_t _fragi,
- int _pli,ogg_int16_t _dct_coeffs[64],int _last_zzi,ogg_uint16_t _dc_quant);
-void oc_state_frag_copy_list_mmx(const oc_theora_state *_state,
- const ptrdiff_t *_fragis,ptrdiff_t _nfragis,
- int _dst_frame,int _src_frame,int _pli);
+ int _pli,ogg_int16_t _dct_coeffs[128],int _last_zzi,ogg_uint16_t _dc_quant);
+void oc_loop_filter_init_mmx(signed char _bv[256],int _flimit);
void oc_state_loop_filter_frag_rows_mmx(const oc_theora_state *_state,
- int _bv[256],int _refi,int _pli,int _fragy0,int _fragy_end);
+ signed char _bv[256],int _refi,int _pli,int _fragy0,int _fragy_end);
void oc_restore_fpu_mmx(void);
#endif
diff --git a/thirdparty/libtheora/x86_vc/x86state.c b/thirdparty/libtheora/x86_vc/x86state.c
index a786bec284..fa3a0d42fc 100644
--- a/thirdparty/libtheora/x86_vc/x86state.c
+++ b/thirdparty/libtheora/x86_vc/x86state.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: x86state.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
@@ -19,8 +19,6 @@
#if defined(OC_X86_ASM)
-#include "../cpu.c"
-
/*This table has been modified from OC_FZIG_ZAG by baking a 4x4 transpose into
each quadrant of the destination.*/
static const unsigned char OC_FZIG_ZAG_MMX[128]={
@@ -42,21 +40,22 @@ static const unsigned char OC_FZIG_ZAG_MMX[128]={
64,64,64,64,64,64,64,64,
};
-void oc_state_vtable_init_x86(oc_theora_state *_state){
+void oc_state_accel_init_x86(oc_theora_state *_state){
_state->cpu_flags=oc_cpu_flags_get();
if(_state->cpu_flags&OC_CPU_X86_MMX){
_state->opt_vtable.frag_copy=oc_frag_copy_mmx;
+ _state->opt_vtable.frag_copy_list=oc_frag_copy_list_mmx;
_state->opt_vtable.frag_recon_intra=oc_frag_recon_intra_mmx;
_state->opt_vtable.frag_recon_inter=oc_frag_recon_inter_mmx;
_state->opt_vtable.frag_recon_inter2=oc_frag_recon_inter2_mmx;
_state->opt_vtable.idct8x8=oc_idct8x8_mmx;
_state->opt_vtable.state_frag_recon=oc_state_frag_recon_mmx;
- _state->opt_vtable.state_frag_copy_list=oc_state_frag_copy_list_mmx;
+ _state->opt_vtable.loop_filter_init=oc_loop_filter_init_mmx;
_state->opt_vtable.state_loop_filter_frag_rows=
oc_state_loop_filter_frag_rows_mmx;
_state->opt_vtable.restore_fpu=oc_restore_fpu_mmx;
_state->opt_data.dct_fzig_zag=OC_FZIG_ZAG_MMX;
}
- else oc_state_vtable_init_c(_state);
+ else oc_state_accel_init_c(_state);
}
#endif
diff --git a/thirdparty/libtheora/x86_vc/x86zigzag.h b/thirdparty/libtheora/x86_vc/x86zigzag.h
new file mode 100644
index 0000000000..26f5ed2ea5
--- /dev/null
+++ b/thirdparty/libtheora/x86_vc/x86zigzag.h
@@ -0,0 +1,244 @@
+/********************************************************************
+ * *
+ * THIS FILE IS PART OF THE OggTheora SOFTWARE CODEC SOURCE CODE. *
+ * USE, DISTRIBUTION AND REPRODUCTION OF THIS LIBRARY SOURCE IS *
+ * GOVERNED BY A BSD-STYLE SOURCE LICENSE INCLUDED WITH THIS SOURCE *
+ * IN 'COPYING'. PLEASE READ THESE TERMS BEFORE DISTRIBUTING. *
+ * *
+ * THE Theora SOURCE CODE IS COPYRIGHT (C) 2002-2009 *
+ * by the Xiph.Org Foundation and contributors http://www.xiph.org/ *
+ * *
+ ********************************************************************
+
+ function:
+ last mod: $Id: sse2trans.h 15675 2009-02-06 09:43:27Z tterribe $
+
+ ********************************************************************/
+
+#if !defined(_x86_vc_x86zigzag_H)
+# define _x86_vc_x86zigzag_H (1)
+# include "x86enc.h"
+
+
+/*Converts DCT coefficients from transposed order into zig-zag scan order and
+ stores them in Y.
+ This relies on two macros to load the contents of each row:
+ OC_ZZ_LOAD_ROW_LO(row,reg) and OC_ZZ_LOAD_ROW_HI(row,reg), which load the
+ first four and second four entries of each row into the specified register,
+ respectively.
+ OC_ZZ_LOAD_ROW_LO must be called before OC_ZZ_LOAD_ROW_HI for the same row
+ (because when the rows are already in SSE2 registers, loading the high half
+ destructively modifies the register).
+ The index of each output element in the original 64-element array should wind
+ up in the following 8x8 matrix (the letters indicate the order we compute
+ each 4-tuple below):
+ A 0 8 1 2 9 16 24 17 B
+ C 10 3 4 11 18 25 32 40 E
+ F 33 26 19 12 5 6 13 20 D
+ G 27 34 41 48 56 49 42 35 I
+ L 28 21 14 7 15 22 29 36 M
+ H 43 50 57 58 51 44 37 30 O
+ N 23 31 38 45 52 59 60 53 J
+ P 46 39 47 54 61 62 55 63 K
+ The order of the coefficients within each tuple is reversed in the comments
+ below to reflect the usual MSB to LSB notation.*/
+#define OC_TRANSPOSE_ZIG_ZAG_MMXEXT \
+ OC_ZZ_LOAD_ROW_LO(0,mm0) /*mm0=03 02 01 00*/ \
+ OC_ZZ_LOAD_ROW_LO(1,mm1) /*mm1=11 10 09 08*/ \
+ OC_ZZ_LOAD_ROW_LO(2,mm2) /*mm2=19 18 17 16*/ \
+ OC_ZZ_LOAD_ROW_LO(3,mm3) /*mm3=27 26 25 24*/ \
+ OC_ZZ_LOAD_ROW_HI(0,mm4) /*mm4=07 06 05 04*/ \
+ OC_ZZ_LOAD_ROW_HI(1,mm5) /*mm5=15 14 13 12*/ \
+ OC_ZZ_LOAD_ROW_HI(2,mm6) /*mm6=23 22 21 20*/ \
+ __asm movq mm7,mm0 /*mm7=03 02 01 00*/ \
+ __asm punpckhdq mm0,mm1 /*mm0=11 10 03 02*/ \
+ __asm pshufw mm4,mm4,0x39 /*mm4=04 07 06 05*/ \
+ __asm punpcklwd mm1,mm0 /*mm1=03 09 02 08*/ \
+ __asm pshufw mm5,mm5,0x39 /*mm5=12 15 14 13*/ \
+ __asm punpcklwd mm7,mm1 /*mm7=02 01 08 00 *A*/ \
+ __asm movq [Y+0x00],mm7 \
+ __asm punpckhwd mm1,mm4 /*mm1=04 03 07 09*/ \
+ __asm movq mm7,mm2 /*mm7=19 18 17 16*/ \
+ __asm punpckhdq mm0,mm1 /*mm0=04 03 11 10*/ \
+ __asm punpckhwd mm7,mm5 /*mm7=12 19 15 18*/ \
+ __asm punpcklwd mm1,mm3 /*mm1=25 07 24 09*/ \
+ __asm punpcklwd mm5,mm6 /*mm5=21 14 20 13*/ \
+ __asm punpcklwd mm1,mm2 /*mm1=17 24 16 09 *B*/ \
+ OC_ZZ_LOAD_ROW_LO(4,mm2) /*mm2=35 34 33 32*/ \
+ __asm movq [Y+0x08],mm1 \
+ OC_ZZ_LOAD_ROW_LO(5,mm1) /*mm1=43 42 41 40*/ \
+ __asm pshufw mm0,mm0,0x78 /*mm0=11 04 03 10 *C*/ \
+ __asm movq [Y+0x10],mm0 \
+ __asm punpckhdq mm6,mm4 /*mm6=?? 07 23 22*/ \
+ __asm punpckldq mm4,mm5 /*mm4=20 13 06 05 *D*/ \
+ __asm movq [Y+0x28],mm4 \
+ __asm psrlq mm3,16 /*mm3=.. 27 26 25*/ \
+ __asm pshufw mm0,mm2,0x0E /*mm0=?? ?? 35 34*/ \
+ __asm movq mm4,mm7 /*mm4=12 19 15 18*/ \
+ __asm punpcklwd mm2,mm3 /*mm2=26 33 25 32*/ \
+ __asm punpcklwd mm4,mm1 /*mm4=41 15 40 18*/ \
+ __asm punpckhwd mm3,mm1 /*mm3=43 .. 42 27*/ \
+ __asm punpckldq mm4,mm2 /*mm4=25 32 40 18*/ \
+ __asm punpcklwd mm3,mm0 /*mm3=35 42 34 27*/ \
+ OC_ZZ_LOAD_ROW_LO(6,mm0) /*mm0=51 50 49 48*/ \
+ __asm pshufw mm4,mm4,0x6C /*mm4=40 32 25 18 *E*/ \
+ __asm movq [Y+0x18],mm4 \
+ OC_ZZ_LOAD_ROW_LO(7,mm4) /*mm4=59 58 57 56*/ \
+ __asm punpckhdq mm2,mm7 /*mm2=12 19 26 33 *F*/ \
+ __asm movq [Y+0x20],mm2 \
+ __asm pshufw mm1,mm1,0xD0 /*mm1=43 41 ?? ??*/ \
+ __asm pshufw mm0,mm0,0x87 /*mm0=50 48 49 51*/ \
+ __asm movq mm2,mm3 /*mm2=35 42 34 27*/ \
+ __asm punpckhwd mm1,mm0 /*mm1=50 43 48 41*/ \
+ __asm pshufw mm4,mm4,0x93 /*mm4=58 57 56 59*/ \
+ __asm punpckldq mm3,mm1 /*mm3=48 41 34 27 *G*/ \
+ __asm movq [Y+0x30],mm3 \
+ __asm punpckhdq mm1,mm4 /*mm1=58 57 50 43 *H*/ \
+ __asm movq [Y+0x50],mm1 \
+ OC_ZZ_LOAD_ROW_HI(7,mm1) /*mm1=63 62 61 60*/ \
+ __asm punpcklwd mm4,mm0 /*mm4=49 56 51 59*/ \
+ OC_ZZ_LOAD_ROW_HI(6,mm0) /*mm0=55 54 53 52*/ \
+ __asm psllq mm6,16 /*mm6=07 23 22 ..*/ \
+ __asm movq mm3,mm4 /*mm3=49 56 51 59*/ \
+ __asm punpckhdq mm4,mm2 /*mm4=35 42 49 56 *I*/ \
+ OC_ZZ_LOAD_ROW_HI(3,mm2) /*mm2=31 30 29 28*/ \
+ __asm movq [Y+0x38],mm4 \
+ __asm punpcklwd mm3,mm1 /*mm3=61 51 60 59*/ \
+ __asm punpcklwd mm7,mm6 /*mm7=22 15 .. ??*/ \
+ __asm movq mm4,mm3 /*mm4=61 51 60 59*/ \
+ __asm punpcklwd mm3,mm0 /*mm3=53 60 52 59*/ \
+ __asm punpckhwd mm4,mm0 /*mm4=55 61 54 51*/ \
+ OC_ZZ_LOAD_ROW_HI(4,mm0) /*mm0=39 38 37 36*/ \
+ __asm pshufw mm3,mm3,0xE1 /*mm3=53 60 59 52 *J*/ \
+ __asm movq [Y+0x68],mm3 \
+ __asm movq mm3,mm4 /*mm3=?? ?? 54 51*/ \
+ __asm pshufw mm2,mm2,0x39 /*mm2=28 31 30 29*/ \
+ __asm punpckhwd mm4,mm1 /*mm4=63 55 62 61 *K*/ \
+ OC_ZZ_LOAD_ROW_HI(5,mm1) /*mm1=47 46 45 44*/ \
+ __asm movq [Y+0x78],mm4 \
+ __asm punpckhwd mm6,mm2 /*mm6=28 07 31 23*/ \
+ __asm punpcklwd mm2,mm0 /*mm2=37 30 36 29*/ \
+ __asm punpckhdq mm5,mm6 /*mm5=28 07 21 14*/ \
+ __asm pshufw mm2,mm2,0x4B /*mm2=36 29 30 37*/ \
+ __asm pshufw mm5,mm5,0x87 /*mm5=07 14 21 28 *L*/ \
+ __asm movq [Y+0x40],mm5 \
+ __asm punpckhdq mm7,mm2 /*mm7=36 29 22 15 *M*/ \
+ __asm movq [Y+0x48],mm7 \
+ __asm pshufw mm1,mm1,0x9C /*mm1=46 45 47 44*/ \
+ __asm punpckhwd mm0,mm1 /*mm0=46 39 45 38*/ \
+ __asm punpcklwd mm3,mm1 /*mm3=47 54 44 51*/ \
+ __asm punpckldq mm6,mm0 /*mm6=45 38 31 23 *N*/ \
+ __asm movq [Y+0x60],mm6 \
+ __asm punpckhdq mm0,mm3 /*mm0=47 54 46 39*/ \
+ __asm punpckldq mm3,mm2 /*mm3=30 37 44 51 *O*/ \
+ __asm movq [Y+0x58],mm3 \
+ __asm pshufw mm0,mm0,0xB1 /*mm0=54 47 39 46 *P*/ \
+ __asm movq [Y+0x70],mm0 \
+
+/*Converts DCT coefficients in %[dct] from natural order into zig-zag scan
+ order and stores them in %[qdct].
+ The index of each output element in the original 64-element array should wind
+ up in the following 8x8 matrix (the letters indicate the order we compute
+ each 4-tuple below):
+ A 0 1 8 16 9 2 3 10 B
+ C 17 24 32 25 18 11 4 5 D
+ E 12 19 26 33 40 48 41 34 I
+ H 27 20 13 6 7 14 21 28 G
+ K 35 42 49 56 57 50 43 36 J
+ F 29 22 15 23 30 37 44 51 M
+ P 58 59 52 45 38 31 39 46 L
+ N 53 60 61 54 47 55 62 63 O
+ The order of the coefficients within each tuple is reversed in the comments
+ below to reflect the usual MSB to LSB notation.*/
+#define OC_ZIG_ZAG_MMXEXT \
+ "movq 0x00(%[dct]),%%mm0\n\t" /*mm0=03 02 01 00*/ \
+ "movq 0x08(%[dct]),%%mm1\n\t" /*mm1=07 06 05 04*/ \
+ "movq 0x10(%[dct]),%%mm2\n\t" /*mm2=11 10 09 08*/ \
+ "movq 0x20(%[dct]),%%mm3\n\t" /*mm3=19 18 17 16*/ \
+ "movq 0x30(%[dct]),%%mm4\n\t" /*mm4=27 26 25 24*/ \
+ "movq 0x40(%[dct]),%%mm5\n\t" /*mm5=35 34 33 32*/ \
+ "movq %%mm2,%%mm7\n\t" /*mm7=11 10 09 08*/ \
+ "punpcklwd %%mm3,%%mm2\n\t" /*mm2=17 09 16 08*/ \
+ "movq %%mm0,%%mm6\n\t" /*mm6=03 02 01 00*/ \
+ "punpckldq %%mm2,%%mm0\n\t" /*mm0=16 08 01 00 *A*/ \
+ "movq %%mm0,0x00(%[qdct])\n\t" \
+ "movq 0x18(%[dct]),%%mm0\n\t" /*mm0=15 14 13 12*/ \
+ "punpckhdq %%mm6,%%mm6\n\t" /*mm6=03 02 03 02*/ \
+ "psrlq $16,%%mm7\n\t" /*mm7=.. 11 10 09*/ \
+ "punpckldq %%mm7,%%mm6\n\t" /*mm6=10 09 03 02*/ \
+ "punpckhwd %%mm7,%%mm3\n\t" /*mm3=.. 19 11 18*/ \
+ "pshufw $0xD2,%%mm6,%%mm6\n\t" /*mm6=10 03 02 09 *B*/ \
+ "movq %%mm6,0x08(%[qdct])\n\t" \
+ "psrlq $48,%%mm2\n\t" /*mm2=.. .. .. 17*/ \
+ "movq %%mm1,%%mm6\n\t" /*mm6=07 06 05 04*/ \
+ "punpcklwd %%mm5,%%mm2\n\t" /*mm2=33 .. 32 17*/ \
+ "movq %%mm3,%%mm7\n\t" /*mm7=.. 19 11 18*/ \
+ "punpckldq %%mm1,%%mm3\n\t" /*mm3=05 04 11 18 *C*/ \
+ "por %%mm2,%%mm7\n\t" /*mm7=33 19 ?? ??*/ \
+ "punpcklwd %%mm4,%%mm2\n\t" /*mm2=25 32 24 17 *D**/ \
+ "movq %%mm2,0x10(%[qdct])\n\t" \
+ "movq %%mm3,0x18(%[qdct])\n\t" \
+ "movq 0x28(%[dct]),%%mm2\n\t" /*mm2=23 22 21 20*/ \
+ "movq 0x38(%[dct]),%%mm1\n\t" /*mm1=31 30 29 28*/ \
+ "pshufw $0x9C,%%mm0,%%mm3\n\t" /*mm3=14 13 15 12*/ \
+ "punpckhdq %%mm7,%%mm7\n\t" /*mm7=33 19 33 19*/ \
+ "punpckhwd %%mm3,%%mm6\n\t" /*mm6=14 07 13 06*/ \
+ "punpckldq %%mm0,%%mm0\n\t" /*mm0=13 12 13 12*/ \
+ "punpcklwd %%mm1,%%mm3\n\t" /*mm3=29 15 28 12*/ \
+ "punpckhwd %%mm4,%%mm0\n\t" /*mm0=27 13 26 12*/ \
+ "pshufw $0xB4,%%mm3,%%mm3\n\t" /*mm3=15 29 28 12*/ \
+ "psrlq $48,%%mm4\n\t" /*mm4=.. .. .. 27*/ \
+ "punpcklwd %%mm7,%%mm0\n\t" /*mm0=33 26 19 12 *E*/ \
+ "punpcklwd %%mm1,%%mm4\n\t" /*mm4=29 .. 28 27*/ \
+ "punpckhwd %%mm2,%%mm3\n\t" /*mm3=23 15 22 29 *F*/ \
+ "movq %%mm0,0x20(%[qdct])\n\t" \
+ "movq %%mm3,0x50(%[qdct])\n\t" \
+ "movq 0x60(%[dct]),%%mm3\n\t" /*mm3=51 50 49 48*/ \
+ "movq 0x70(%[dct]),%%mm7\n\t" /*mm7=59 58 57 56*/ \
+ "movq 0x50(%[dct]),%%mm0\n\t" /*mm0=43 42 41 40*/ \
+ "punpcklwd %%mm4,%%mm2\n\t" /*mm2=28 21 27 20*/ \
+ "psrlq $32,%%mm5\n\t" /*mm5=.. .. 35 34*/ \
+ "movq %%mm2,%%mm4\n\t" /*mm4=28 21 27 20*/ \
+ "punpckldq %%mm6,%%mm2\n\t" /*mm2=13 06 27 20*/ \
+ "punpckhdq %%mm4,%%mm6\n\t" /*mm6=28 21 14 07 *G*/ \
+ "movq %%mm3,%%mm4\n\t" /*mm4=51 50 49 48*/ \
+ "pshufw $0xB1,%%mm2,%%mm2\n\t" /*mm2=06 13 20 27 *H*/ \
+ "movq %%mm2,0x30(%[qdct])\n\t" \
+ "movq %%mm6,0x38(%[qdct])\n\t" \
+ "movq 0x48(%[dct]),%%mm2\n\t" /*mm2=39 38 37 36*/ \
+ "punpcklwd %%mm5,%%mm4\n\t" /*mm4=35 49 34 48*/ \
+ "movq 0x58(%[dct]),%%mm5\n\t" /*mm5=47 46 45 44*/ \
+ "punpckldq %%mm7,%%mm6\n\t" /*mm6=57 56 14 07*/ \
+ "psrlq $32,%%mm3\n\t" /*mm3=.. .. 51 50*/ \
+ "punpckhwd %%mm0,%%mm6\n\t" /*mm6=43 57 42 56*/ \
+ "punpcklwd %%mm4,%%mm0\n\t" /*mm0=34 41 48 40 *I*/ \
+ "pshufw $0x4E,%%mm6,%%mm6\n\t" /*mm6=42 56 43 57*/ \
+ "movq %%mm0,0x28(%[qdct])\n\t" \
+ "punpcklwd %%mm2,%%mm3\n\t" /*mm3=37 51 36 50*/ \
+ "punpckhwd %%mm6,%%mm4\n\t" /*mm4=42 35 56 49*/ \
+ "punpcklwd %%mm3,%%mm6\n\t" /*mm6=36 43 50 57 *J*/ \
+ "pshufw $0x4E,%%mm4,%%mm4\n\t" /*mm4=56 49 42 35 *K*/ \
+ "movq %%mm4,0x40(%[qdct])\n\t" \
+ "movq %%mm6,0x48(%[qdct])\n\t" \
+ "movq 0x68(%[dct]),%%mm6\n\t" /*mm6=55 54 53 52*/ \
+ "movq 0x78(%[dct]),%%mm0\n\t" /*mm0=63 62 61 60*/ \
+ "psrlq $32,%%mm1\n\t" /*mm1=.. .. 31 30*/ \
+ "pshufw $0xD8,%%mm5,%%mm5\n\t" /*mm5=47 45 46 44*/ \
+ "pshufw $0x0B,%%mm3,%%mm3\n\t" /*mm3=50 50 51 37*/ \
+ "punpcklwd %%mm5,%%mm1\n\t" /*mm1=46 31 44 30*/ \
+ "pshufw $0xC9,%%mm6,%%mm6\n\t" /*mm6=55 52 54 53*/ \
+ "punpckhwd %%mm1,%%mm2\n\t" /*mm2=46 39 31 38 *L*/ \
+ "punpcklwd %%mm3,%%mm1\n\t" /*mm1=51 44 37 30 *M*/ \
+ "movq %%mm2,0x68(%[qdct])\n\t" \
+ "movq %%mm1,0x58(%[qdct])\n\t" \
+ "punpckhwd %%mm6,%%mm5\n\t" /*mm5=55 47 52 45*/ \
+ "punpckldq %%mm0,%%mm6\n\t" /*mm6=61 60 54 53*/ \
+ "pshufw $0x10,%%mm5,%%mm4\n\t" /*mm4=45 52 45 45*/ \
+ "pshufw $0x78,%%mm6,%%mm6\n\t" /*mm6=53 60 61 54 *N*/ \
+ "punpckhdq %%mm0,%%mm5\n\t" /*mm5=63 62 55 47 *O*/ \
+ "punpckhdq %%mm4,%%mm7\n\t" /*mm7=45 52 59 58 *P*/ \
+ "movq %%mm6,0x70(%[qdct])\n\t" \
+ "movq %%mm5,0x78(%[qdct])\n\t" \
+ "movq %%mm7,0x60(%[qdct])\n\t" \
+
+#endif
diff --git a/thirdparty/mbedtls/include/mbedtls/asn1write.h b/thirdparty/mbedtls/include/mbedtls/asn1write.h
index 44afae0e56..5da7654bb4 100644
--- a/thirdparty/mbedtls/include/mbedtls/asn1write.h
+++ b/thirdparty/mbedtls/include/mbedtls/asn1write.h
@@ -90,7 +90,7 @@ int mbedtls_asn1_write_raw_buffer( unsigned char **p, unsigned char *start,
#if defined(MBEDTLS_BIGNUM_C)
/**
- * \brief Write a arbitrary-precision number (#MBEDTLS_ASN1_INTEGER)
+ * \brief Write an arbitrary-precision number (#MBEDTLS_ASN1_INTEGER)
* in ASN.1 format.
*
* \note This function works backwards in data buffer.
diff --git a/thirdparty/mbedtls/include/mbedtls/bignum.h b/thirdparty/mbedtls/include/mbedtls/bignum.h
index dd594c512d..c61db82c6d 100644
--- a/thirdparty/mbedtls/include/mbedtls/bignum.h
+++ b/thirdparty/mbedtls/include/mbedtls/bignum.h
@@ -182,6 +182,20 @@
#endif /* !MBEDTLS_NO_UDBL_DIVISION */
#endif /* !MBEDTLS_HAVE_INT64 */
+/** \typedef mbedtls_mpi_uint
+ * \brief The type of machine digits in a bignum, called _limbs_.
+ *
+ * This is always an unsigned integer type with no padding bits. The size
+ * is platform-dependent.
+ */
+
+/** \typedef mbedtls_mpi_sint
+ * \brief The signed type corresponding to #mbedtls_mpi_uint.
+ *
+ * This is always a signed integer type with no padding bits. The size
+ * is platform-dependent.
+ */
+
#ifdef __cplusplus
extern "C" {
#endif
@@ -191,9 +205,27 @@ extern "C" {
*/
typedef struct mbedtls_mpi
{
- int s; /*!< Sign: -1 if the mpi is negative, 1 otherwise */
- size_t n; /*!< total # of limbs */
- mbedtls_mpi_uint *p; /*!< pointer to limbs */
+ /** Sign: -1 if the mpi is negative, 1 otherwise.
+ *
+ * The number 0 must be represented with `s = +1`. Although many library
+ * functions treat all-limbs-zero as equivalent to a valid representation
+ * of 0 regardless of the sign bit, there are exceptions, so bignum
+ * functions and external callers must always set \c s to +1 for the
+ * number zero.
+ *
+ * Note that this implies that calloc() or `... = {0}` does not create
+ * a valid MPI representation. You must call mbedtls_mpi_init().
+ */
+ int s;
+
+ /** Total number of limbs in \c p. */
+ size_t n;
+
+ /** Pointer to limbs.
+ *
+ * This may be \c NULL if \c n is 0.
+ */
+ mbedtls_mpi_uint *p;
}
mbedtls_mpi;
@@ -280,7 +312,7 @@ void mbedtls_mpi_swap( mbedtls_mpi *X, mbedtls_mpi *Y );
* \param Y The MPI to be assigned from. This must point to an
* initialized MPI.
* \param assign The condition deciding whether to perform the
- * assignment or not. Possible values:
+ * assignment or not. Must be either 0 or 1:
* * \c 1: Perform the assignment `X = Y`.
* * \c 0: Keep the original value of \p X.
*
@@ -291,6 +323,10 @@ void mbedtls_mpi_swap( mbedtls_mpi *X, mbedtls_mpi *Y );
* information through branch prediction and/or memory access
* patterns analysis).
*
+ * \warning If \p assign is neither 0 nor 1, the result of this function
+ * is indeterminate, and the resulting value in \p X might be
+ * neither its original value nor the value in \p Y.
+ *
* \return \c 0 if successful.
* \return #MBEDTLS_ERR_MPI_ALLOC_FAILED if memory allocation failed.
* \return Another negative error code on other kinds of failure.
@@ -303,24 +339,28 @@ int mbedtls_mpi_safe_cond_assign( mbedtls_mpi *X, const mbedtls_mpi *Y, unsigned
*
* \param X The first MPI. This must be initialized.
* \param Y The second MPI. This must be initialized.
- * \param assign The condition deciding whether to perform
- * the swap or not. Possible values:
+ * \param swap The condition deciding whether to perform
+ * the swap or not. Must be either 0 or 1:
* * \c 1: Swap the values of \p X and \p Y.
* * \c 0: Keep the original values of \p X and \p Y.
*
* \note This function is equivalent to
- * if( assign ) mbedtls_mpi_swap( X, Y );
+ * if( swap ) mbedtls_mpi_swap( X, Y );
* except that it avoids leaking any information about whether
- * the assignment was done or not (the above code may leak
+ * the swap was done or not (the above code may leak
* information through branch prediction and/or memory access
* patterns analysis).
*
+ * \warning If \p swap is neither 0 nor 1, the result of this function
+ * is indeterminate, and both \p X and \p Y might end up with
+ * values different to either of the original ones.
+ *
* \return \c 0 if successful.
* \return #MBEDTLS_ERR_MPI_ALLOC_FAILED if memory allocation failed.
* \return Another negative error code on other kinds of failure.
*
*/
-int mbedtls_mpi_safe_cond_swap( mbedtls_mpi *X, mbedtls_mpi *Y, unsigned char assign );
+int mbedtls_mpi_safe_cond_swap( mbedtls_mpi *X, mbedtls_mpi *Y, unsigned char swap );
/**
* \brief Store integer value in MPI.
@@ -753,11 +793,11 @@ int mbedtls_mpi_mul_int( mbedtls_mpi *X, const mbedtls_mpi *A,
*
* \param Q The destination MPI for the quotient.
* This may be \c NULL if the value of the
- * quotient is not needed.
+ * quotient is not needed. This must not alias A or B.
* \param R The destination MPI for the remainder value.
* This may be \c NULL if the value of the
- * remainder is not needed.
- * \param A The dividend. This must point to an initialized MPi.
+ * remainder is not needed. This must not alias A or B.
+ * \param A The dividend. This must point to an initialized MPI.
* \param B The divisor. This must point to an initialized MPI.
*
* \return \c 0 if successful.
@@ -774,10 +814,10 @@ int mbedtls_mpi_div_mpi( mbedtls_mpi *Q, mbedtls_mpi *R, const mbedtls_mpi *A,
*
* \param Q The destination MPI for the quotient.
* This may be \c NULL if the value of the
- * quotient is not needed.
+ * quotient is not needed. This must not alias A.
* \param R The destination MPI for the remainder value.
* This may be \c NULL if the value of the
- * remainder is not needed.
+ * remainder is not needed. This must not alias A.
* \param A The dividend. This must point to an initialized MPi.
* \param b The divisor.
*
@@ -832,6 +872,7 @@ int mbedtls_mpi_mod_int( mbedtls_mpi_uint *r, const mbedtls_mpi *A,
* \brief Perform a sliding-window exponentiation: X = A^E mod N
*
* \param X The destination MPI. This must point to an initialized MPI.
+ * This must not alias E or N.
* \param A The base of the exponentiation.
* This must point to an initialized MPI.
* \param E The exponent MPI. This must point to an initialized MPI.
diff --git a/thirdparty/mbedtls/include/mbedtls/bn_mul.h b/thirdparty/mbedtls/include/mbedtls/bn_mul.h
index 31137cd4c2..a3fc363815 100644
--- a/thirdparty/mbedtls/include/mbedtls/bn_mul.h
+++ b/thirdparty/mbedtls/include/mbedtls/bn_mul.h
@@ -95,12 +95,28 @@
( !defined(__ARMCC_VERSION) || __ARMCC_VERSION >= 6000000 )
/*
+ * GCC < 5.0 treated the x86 ebx (which is used for the GOT) as a
+ * fixed reserved register when building as PIC, leading to errors
+ * like: bn_mul.h:46:13: error: PIC register clobbered by 'ebx' in 'asm'
+ *
+ * This is fixed by an improved register allocator in GCC 5+. From the
+ * release notes:
+ * Register allocation improvements: Reuse of the PIC hard register,
+ * instead of using a fixed register, was implemented on x86/x86-64
+ * targets. This improves generated PIC code performance as more hard
+ * registers can be used.
+ */
+#if defined(__GNUC__) && __GNUC__ < 5 && defined(__PIC__)
+#define MULADDC_CANNOT_USE_EBX
+#endif
+
+/*
* Disable use of the i386 assembly code below if option -O0, to disable all
* compiler optimisations, is passed, detected with __OPTIMIZE__
* This is done as the number of registers used in the assembly code doesn't
* work with the -O0 option.
*/
-#if defined(__i386__) && defined(__OPTIMIZE__)
+#if defined(__i386__) && defined(__OPTIMIZE__) && !defined(MULADDC_CANNOT_USE_EBX)
#define MULADDC_INIT \
asm( \
@@ -563,10 +579,20 @@
"andi r7, r6, 0xffff \n\t" \
"bsrli r6, r6, 16 \n\t"
-#define MULADDC_CORE \
+#if(__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
+#define MULADDC_LHUI \
+ "lhui r9, r3, 0 \n\t" \
+ "addi r3, r3, 2 \n\t" \
+ "lhui r8, r3, 0 \n\t"
+#else
+#define MULADDC_LHUI \
"lhui r8, r3, 0 \n\t" \
"addi r3, r3, 2 \n\t" \
- "lhui r9, r3, 0 \n\t" \
+ "lhui r9, r3, 0 \n\t"
+#endif
+
+#define MULADDC_CORE \
+ MULADDC_LHUI \
"addi r3, r3, 2 \n\t" \
"mul r10, r9, r6 \n\t" \
"mul r11, r8, r7 \n\t" \
diff --git a/thirdparty/mbedtls/include/mbedtls/check_config.h b/thirdparty/mbedtls/include/mbedtls/check_config.h
index be5c548e56..7ae1ff94db 100644
--- a/thirdparty/mbedtls/include/mbedtls/check_config.h
+++ b/thirdparty/mbedtls/include/mbedtls/check_config.h
@@ -525,6 +525,20 @@
#error "MBEDTLS_PLATFORM_SNPRINTF_MACRO and MBEDTLS_PLATFORM_STD_SNPRINTF/MBEDTLS_PLATFORM_SNPRINTF_ALT cannot be defined simultaneously"
#endif
+#if defined(MBEDTLS_PLATFORM_VSNPRINTF_ALT) && !defined(MBEDTLS_PLATFORM_C)
+#error "MBEDTLS_PLATFORM_VSNPRINTF_ALT defined, but not all prerequisites"
+#endif
+
+#if defined(MBEDTLS_PLATFORM_VSNPRINTF_MACRO) && !defined(MBEDTLS_PLATFORM_C)
+#error "MBEDTLS_PLATFORM_VSNPRINTF_MACRO defined, but not all prerequisites"
+#endif
+
+#if defined(MBEDTLS_PLATFORM_VSNPRINTF_MACRO) &&\
+ ( defined(MBEDTLS_PLATFORM_STD_VSNPRINTF) ||\
+ defined(MBEDTLS_PLATFORM_VSNPRINTF_ALT) )
+#error "MBEDTLS_PLATFORM_VSNPRINTF_MACRO and MBEDTLS_PLATFORM_STD_VSNPRINTF/MBEDTLS_PLATFORM_VSNPRINTF_ALT cannot be defined simultaneously"
+#endif
+
#if defined(MBEDTLS_PLATFORM_STD_MEM_HDR) &&\
!defined(MBEDTLS_PLATFORM_NO_STD_FUNCTIONS)
#error "MBEDTLS_PLATFORM_STD_MEM_HDR defined, but not all prerequisites"
@@ -650,10 +664,9 @@
MBEDTLS_ECDSA_C requires MBEDTLS_PK_WRITE_C to be defined."
#endif
-#if defined(MBEDTLS_RSA_C) && defined(MBEDTLS_PKCS1_V15) && \
- !defined(MBEDTLS_PK_WRITE_C) && defined(MBEDTLS_PSA_CRYPTO_C)
-#error "MBEDTLS_PSA_CRYPTO_C, MBEDTLS_RSA_C and MBEDTLS_PKCS1_V15 defined, \
- but not all prerequisites"
+#if defined(MBEDTLS_PSA_CRYPTO_C) && defined(MBEDTLS_RSA_C) && \
+ !( defined(MBEDTLS_PK_PARSE_C) && defined(MBEDTLS_PK_WRITE_C) )
+#error "MBEDTLS_PSA_CRYPTO_C with MBEDTLS_RSA_C requires MBEDTLS_PK_PARSE_C and MBEDTLS_PK_WRITE_C"
#endif
#if defined(MBEDTLS_RSA_C) && ( !defined(MBEDTLS_BIGNUM_C) || \
@@ -812,6 +825,11 @@
#error "MBEDTLS_SSL_TICKET_C defined, but not all prerequisites"
#endif
+#if defined(MBEDTLS_SSL_TICKET_C) && \
+ !( defined(MBEDTLS_GCM_C) || defined(MBEDTLS_CCM_C) || defined(MBEDTLS_CHACHAPOLY_C) )
+#error "MBEDTLS_SSL_TICKET_C defined, but not all prerequisites"
+#endif
+
#if defined(MBEDTLS_SSL_CBC_RECORD_SPLITTING) && \
!defined(MBEDTLS_SSL_PROTO_SSL3) && !defined(MBEDTLS_SSL_PROTO_TLS1)
#error "MBEDTLS_SSL_CBC_RECORD_SPLITTING defined, but not all prerequisites"
@@ -926,6 +944,10 @@
#error "MBEDTLS_SSL_VARIABLE_BUFFER_LENGTH defined, but not all prerequisites"
#endif
+#if defined(MBEDTLS_SSL_CONTEXT_SERIALIZATION) && !( defined(MBEDTLS_GCM_C) || defined(MBEDTLS_CCM_C) || defined(MBEDTLS_CHACHAPOLY_C) )
+#error "MBEDTLS_SSL_CONTEXT_SERIALIZATION defined, but not all prerequisites"
+#endif
+
/*
* Avoid warning from -pedantic. This is a convenient place for this
* workaround since this is included by every single file before the
diff --git a/thirdparty/mbedtls/include/mbedtls/config.h b/thirdparty/mbedtls/include/mbedtls/config.h
index 1cd6eb6634..61db79362f 100644
--- a/thirdparty/mbedtls/include/mbedtls/config.h
+++ b/thirdparty/mbedtls/include/mbedtls/config.h
@@ -1329,7 +1329,7 @@
* Include backtrace information with each allocated block.
*
* Requires: MBEDTLS_MEMORY_BUFFER_ALLOC_C
- * GLIBC-compatible backtrace() an backtrace_symbols() support
+ * GLIBC-compatible backtrace() and backtrace_symbols() support
*
* Uncomment this macro to include backtrace information
*/
@@ -1620,6 +1620,8 @@
* saved after the handshake to allow for more efficient serialization, so if
* you don't need this feature you'll save RAM by disabling it.
*
+ * Requires: MBEDTLS_GCM_C or MBEDTLS_CCM_C or MBEDTLS_CHACHAPOLY_C
+ *
* Comment to disable the context serialization APIs.
*/
#define MBEDTLS_SSL_CONTEXT_SERIALIZATION
@@ -2425,7 +2427,7 @@
* MBEDTLS_TLS_PSK_WITH_RC4_128_SHA
*
* \warning ARC4 is considered a weak cipher and its use constitutes a
- * security risk. If possible, we recommend avoidng dependencies on
+ * security risk. If possible, we recommend avoiding dependencies on
* it, and considering stronger ciphers instead.
*
*/
@@ -3030,7 +3032,7 @@
*
* \note See also our Knowledge Base article about porting to a new
* environment:
- * https://tls.mbed.org/kb/how-to/how-do-i-port-mbed-tls-to-a-new-environment-OS
+ * https://mbed-tls.readthedocs.io/en/latest/kb/how-to/how-do-i-port-mbed-tls-to-a-new-environment-OS
*
* Module: library/net_sockets.c
*
@@ -3400,7 +3402,8 @@
* Module: library/ssl_ticket.c
* Caller:
*
- * Requires: MBEDTLS_CIPHER_C
+ * Requires: MBEDTLS_CIPHER_C &&
+ * ( MBEDTLS_GCM_C || MBEDTLS_CCM_C || MBEDTLS_CHACHAPOLY_C )
*/
#define MBEDTLS_SSL_TICKET_C
@@ -3456,7 +3459,7 @@
* contexts are not shared between threads. If you do intend to use contexts
* between threads, you will need to enable this layer to prevent race
* conditions. See also our Knowledge Base article about threading:
- * https://tls.mbed.org/kb/development/thread-safety-and-multi-threading
+ * https://mbed-tls.readthedocs.io/en/latest/kb/development/thread-safety-and-multi-threading
*
* Module: library/threading.c
*
@@ -3488,7 +3491,7 @@
*
* \note See also our Knowledge Base article about porting to a new
* environment:
- * https://tls.mbed.org/kb/how-to/how-do-i-port-mbed-tls-to-a-new-environment-OS
+ * https://mbed-tls.readthedocs.io/en/latest/kb/how-to/how-do-i-port-mbed-tls-to-a-new-environment-OS
*
* Module: library/timing.c
* Caller: library/havege.c
diff --git a/thirdparty/mbedtls/include/mbedtls/ecdsa.h b/thirdparty/mbedtls/include/mbedtls/ecdsa.h
index 264a638bb5..118f7cedb1 100644
--- a/thirdparty/mbedtls/include/mbedtls/ecdsa.h
+++ b/thirdparty/mbedtls/include/mbedtls/ecdsa.h
@@ -309,10 +309,8 @@ int mbedtls_ecdsa_sign_det_ext( mbedtls_ecp_group *grp, mbedtls_mpi *r,
* This must be initialized.
*
* \return \c 0 on success.
- * \return #MBEDTLS_ERR_ECP_BAD_INPUT_DATA if the signature
- * is invalid.
* \return An \c MBEDTLS_ERR_ECP_XXX or \c MBEDTLS_MPI_XXX
- * error code on failure for any other reason.
+ * error code on failure.
*/
int mbedtls_ecdsa_verify( mbedtls_ecp_group *grp,
const unsigned char *buf, size_t blen,
diff --git a/thirdparty/mbedtls/include/mbedtls/md.h b/thirdparty/mbedtls/include/mbedtls/md.h
index 84fafd2ac7..9cea40a89c 100644
--- a/thirdparty/mbedtls/include/mbedtls/md.h
+++ b/thirdparty/mbedtls/include/mbedtls/md.h
@@ -215,7 +215,7 @@ MBEDTLS_CHECK_RETURN_TYPICAL
int mbedtls_md_setup( mbedtls_md_context_t *ctx, const mbedtls_md_info_t *md_info, int hmac );
/**
- * \brief This function clones the state of an message-digest
+ * \brief This function clones the state of a message-digest
* context.
*
* \note You must call mbedtls_md_setup() on \c dst before calling
diff --git a/thirdparty/mbedtls/include/mbedtls/platform.h b/thirdparty/mbedtls/include/mbedtls/platform.h
index 06dd192eab..eaf5122bec 100644
--- a/thirdparty/mbedtls/include/mbedtls/platform.h
+++ b/thirdparty/mbedtls/include/mbedtls/platform.h
@@ -11,6 +11,13 @@
* implementations of these functions, or implementations specific to
* their platform, which can be statically linked to the library or
* dynamically configured at runtime.
+ *
+ * When all compilation options related to platform abstraction are
+ * disabled, this header just defines `mbedtls_xxx` function names
+ * as aliases to the standard `xxx` function.
+ *
+ * Most modules in the library and example programs are expected to
+ * include this header.
*/
/*
* Copyright The Mbed TLS Contributors
diff --git a/thirdparty/mbedtls/include/mbedtls/ripemd160.h b/thirdparty/mbedtls/include/mbedtls/ripemd160.h
index 63270d1239..f890aefaee 100644
--- a/thirdparty/mbedtls/include/mbedtls/ripemd160.h
+++ b/thirdparty/mbedtls/include/mbedtls/ripemd160.h
@@ -74,7 +74,7 @@ void mbedtls_ripemd160_init( mbedtls_ripemd160_context *ctx );
void mbedtls_ripemd160_free( mbedtls_ripemd160_context *ctx );
/**
- * \brief Clone (the state of) an RIPEMD-160 context
+ * \brief Clone (the state of) a RIPEMD-160 context
*
* \param dst The destination context
* \param src The context to be cloned
diff --git a/thirdparty/mbedtls/include/mbedtls/rsa.h b/thirdparty/mbedtls/include/mbedtls/rsa.h
index 062df73aa0..8559f67bb9 100644
--- a/thirdparty/mbedtls/include/mbedtls/rsa.h
+++ b/thirdparty/mbedtls/include/mbedtls/rsa.h
@@ -491,7 +491,7 @@ int mbedtls_rsa_check_pubkey( const mbedtls_rsa_context *ctx );
* the current function does not have access to them,
* and therefore cannot check them. See mbedtls_rsa_complete().
* If you want to check the consistency of the entire
- * content of an PKCS1-encoded RSA private key, for example, you
+ * content of a PKCS1-encoded RSA private key, for example, you
* should use mbedtls_rsa_validate_params() before setting
* up the RSA context.
* Additionally, if the implementation performs empirical checks,
diff --git a/thirdparty/mbedtls/include/mbedtls/ssl.h b/thirdparty/mbedtls/include/mbedtls/ssl.h
index 5064ec5689..aecac93f33 100644
--- a/thirdparty/mbedtls/include/mbedtls/ssl.h
+++ b/thirdparty/mbedtls/include/mbedtls/ssl.h
@@ -624,7 +624,7 @@ typedef int mbedtls_ssl_recv_t( void *ctx,
* \param ctx Context for the receive callback (typically a file descriptor)
* \param buf Buffer to write the received data to
* \param len Length of the receive buffer
- * \param timeout Maximum nomber of millisecondes to wait for data
+ * \param timeout Maximum number of milliseconds to wait for data
* 0 means no timeout (potentially waiting forever)
*
* \return The callback must return the number of bytes received,
@@ -652,7 +652,7 @@ typedef int mbedtls_ssl_recv_timeout_t( void *ctx,
* for the associated \c mbedtls_ssl_get_timer_t callback to
* return correct information.
*
- * \note If using a event-driven style of programming, an event must
+ * \note If using an event-driven style of programming, an event must
* be generated when the final delay is passed. The event must
* cause a call to \c mbedtls_ssl_handshake() with the proper
* SSL context to be scheduled. Care must be taken to ensure
@@ -2000,7 +2000,7 @@ int mbedtls_ssl_check_record( mbedtls_ssl_context const *ssl,
* here, except if using an event-driven style.
*
* \note See also the "DTLS tutorial" article in our knowledge base.
- * https://tls.mbed.org/kb/how-to/dtls-tutorial
+ * https://mbed-tls.readthedocs.io/en/latest/kb/how-to/dtls-tutorial
*/
void mbedtls_ssl_set_timer_cb( mbedtls_ssl_context *ssl,
void *p_timer,
diff --git a/thirdparty/mbedtls/include/mbedtls/ssl_internal.h b/thirdparty/mbedtls/include/mbedtls/ssl_internal.h
index 46ade67b9c..77ad755477 100644
--- a/thirdparty/mbedtls/include/mbedtls/ssl_internal.h
+++ b/thirdparty/mbedtls/include/mbedtls/ssl_internal.h
@@ -782,7 +782,7 @@ struct mbedtls_ssl_transform
#if defined(MBEDTLS_SSL_DTLS_CONNECTION_ID)
uint8_t in_cid_len;
uint8_t out_cid_len;
- unsigned char in_cid [ MBEDTLS_SSL_CID_OUT_LEN_MAX ];
+ unsigned char in_cid [ MBEDTLS_SSL_CID_IN_LEN_MAX ];
unsigned char out_cid[ MBEDTLS_SSL_CID_OUT_LEN_MAX ];
#endif /* MBEDTLS_SSL_DTLS_CONNECTION_ID */
diff --git a/thirdparty/mbedtls/include/mbedtls/version.h b/thirdparty/mbedtls/include/mbedtls/version.h
index 44adcbfe03..b3b441d46b 100644
--- a/thirdparty/mbedtls/include/mbedtls/version.h
+++ b/thirdparty/mbedtls/include/mbedtls/version.h
@@ -38,16 +38,16 @@
*/
#define MBEDTLS_VERSION_MAJOR 2
#define MBEDTLS_VERSION_MINOR 28
-#define MBEDTLS_VERSION_PATCH 1
+#define MBEDTLS_VERSION_PATCH 2
/**
* The single version number has the following structure:
* MMNNPP00
* Major version | Minor version | Patch version
*/
-#define MBEDTLS_VERSION_NUMBER 0x021C0100
-#define MBEDTLS_VERSION_STRING "2.28.1"
-#define MBEDTLS_VERSION_STRING_FULL "mbed TLS 2.28.1"
+#define MBEDTLS_VERSION_NUMBER 0x021C0200
+#define MBEDTLS_VERSION_STRING "2.28.2"
+#define MBEDTLS_VERSION_STRING_FULL "mbed TLS 2.28.2"
#if defined(MBEDTLS_VERSION_C)
diff --git a/thirdparty/mbedtls/library/aes.c b/thirdparty/mbedtls/library/aes.c
index 03d8b7ea61..74ea2672b0 100644
--- a/thirdparty/mbedtls/library/aes.c
+++ b/thirdparty/mbedtls/library/aes.c
@@ -40,14 +40,7 @@
#include "mbedtls/aesni.h"
#endif
-#if defined(MBEDTLS_SELF_TEST)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST */
#if !defined(MBEDTLS_AES_ALT)
@@ -1170,7 +1163,7 @@ int mbedtls_aes_crypt_xts( mbedtls_aes_xts_context *ctx,
{
/* We are on the last block in a decrypt operation that has
* leftover bytes, so we need to use the next tweak for this block,
- * and this tweak for the lefover bytes. Save the current tweak for
+ * and this tweak for the leftover bytes. Save the current tweak for
* the leftovers and then update the current tweak for use on this,
* the last full block. */
memcpy( prev_tweak, tweak, sizeof( tweak ) );
@@ -1770,7 +1763,8 @@ int mbedtls_aes_self_test( int verbose )
unsigned char key[32];
unsigned char buf[64];
const unsigned char *aes_tests;
-#if defined(MBEDTLS_CIPHER_MODE_CBC) || defined(MBEDTLS_CIPHER_MODE_CFB)
+#if defined(MBEDTLS_CIPHER_MODE_CBC) || defined(MBEDTLS_CIPHER_MODE_CFB) || \
+ defined(MBEDTLS_CIPHER_MODE_OFB)
unsigned char iv[16];
#endif
#if defined(MBEDTLS_CIPHER_MODE_CBC)
diff --git a/thirdparty/mbedtls/library/arc4.c b/thirdparty/mbedtls/library/arc4.c
index b34dc5e754..dcc13d539a 100644
--- a/thirdparty/mbedtls/library/arc4.c
+++ b/thirdparty/mbedtls/library/arc4.c
@@ -31,14 +31,7 @@
#include <string.h>
-#if defined(MBEDTLS_SELF_TEST)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST */
#if !defined(MBEDTLS_ARC4_ALT)
diff --git a/thirdparty/mbedtls/library/aria.c b/thirdparty/mbedtls/library/aria.c
index bc05c4a319..5e52eea91e 100644
--- a/thirdparty/mbedtls/library/aria.c
+++ b/thirdparty/mbedtls/library/aria.c
@@ -31,24 +31,12 @@
#include <string.h>
-#if defined(MBEDTLS_SELF_TEST)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST */
#if !defined(MBEDTLS_ARIA_ALT)
#include "mbedtls/platform_util.h"
-#if ( defined(__ARMCC_VERSION) || defined(_MSC_VER) ) && \
- !defined(inline) && !defined(__cplusplus)
-#define inline __inline
-#endif
-
/* Parameter validation macros */
#define ARIA_VALIDATE_RET( cond ) \
MBEDTLS_INTERNAL_VALIDATE_RET( cond, MBEDTLS_ERR_ARIA_BAD_INPUT_DATA )
@@ -895,15 +883,17 @@ static const uint8_t aria_test2_ctr_ct[3][48] = // CTR ciphertext
};
#endif /* MBEDTLS_CIPHER_MODE_CFB */
-#define ARIA_SELF_TEST_IF_FAIL \
- { \
- if( verbose ) \
- mbedtls_printf( "failed\n" ); \
- goto exit; \
- } else { \
- if( verbose ) \
- mbedtls_printf( "passed\n" ); \
- }
+#define ARIA_SELF_TEST_ASSERT( cond ) \
+ do { \
+ if( cond ) { \
+ if( verbose ) \
+ mbedtls_printf( "failed\n" ); \
+ goto exit; \
+ } else { \
+ if( verbose ) \
+ mbedtls_printf( "passed\n" ); \
+ } \
+ } while( 0 )
/*
* Checkup routine
@@ -937,16 +927,18 @@ int mbedtls_aria_self_test( int verbose )
mbedtls_printf( " ARIA-ECB-%d (enc): ", 128 + 64 * i );
mbedtls_aria_setkey_enc( &ctx, aria_test1_ecb_key, 128 + 64 * i );
mbedtls_aria_crypt_ecb( &ctx, aria_test1_ecb_pt, blk );
- if( memcmp( blk, aria_test1_ecb_ct[i], MBEDTLS_ARIA_BLOCKSIZE ) != 0 )
- ARIA_SELF_TEST_IF_FAIL;
+ ARIA_SELF_TEST_ASSERT(
+ memcmp( blk, aria_test1_ecb_ct[i], MBEDTLS_ARIA_BLOCKSIZE )
+ != 0 );
/* test ECB decryption */
if( verbose )
mbedtls_printf( " ARIA-ECB-%d (dec): ", 128 + 64 * i );
mbedtls_aria_setkey_dec( &ctx, aria_test1_ecb_key, 128 + 64 * i );
mbedtls_aria_crypt_ecb( &ctx, aria_test1_ecb_ct[i], blk );
- if( memcmp( blk, aria_test1_ecb_pt, MBEDTLS_ARIA_BLOCKSIZE ) != 0 )
- ARIA_SELF_TEST_IF_FAIL;
+ ARIA_SELF_TEST_ASSERT(
+ memcmp( blk, aria_test1_ecb_pt, MBEDTLS_ARIA_BLOCKSIZE )
+ != 0 );
}
if( verbose )
mbedtls_printf( "\n" );
@@ -965,8 +957,8 @@ int mbedtls_aria_self_test( int verbose )
memset( buf, 0x55, sizeof( buf ) );
mbedtls_aria_crypt_cbc( &ctx, MBEDTLS_ARIA_ENCRYPT, 48, iv,
aria_test2_pt, buf );
- if( memcmp( buf, aria_test2_cbc_ct[i], 48 ) != 0 )
- ARIA_SELF_TEST_IF_FAIL;
+ ARIA_SELF_TEST_ASSERT( memcmp( buf, aria_test2_cbc_ct[i], 48 )
+ != 0 );
/* Test CBC decryption */
if( verbose )
@@ -976,8 +968,7 @@ int mbedtls_aria_self_test( int verbose )
memset( buf, 0xAA, sizeof( buf ) );
mbedtls_aria_crypt_cbc( &ctx, MBEDTLS_ARIA_DECRYPT, 48, iv,
aria_test2_cbc_ct[i], buf );
- if( memcmp( buf, aria_test2_pt, 48 ) != 0 )
- ARIA_SELF_TEST_IF_FAIL;
+ ARIA_SELF_TEST_ASSERT( memcmp( buf, aria_test2_pt, 48 ) != 0 );
}
if( verbose )
mbedtls_printf( "\n" );
@@ -996,8 +987,7 @@ int mbedtls_aria_self_test( int verbose )
j = 0;
mbedtls_aria_crypt_cfb128( &ctx, MBEDTLS_ARIA_ENCRYPT, 48, &j, iv,
aria_test2_pt, buf );
- if( memcmp( buf, aria_test2_cfb_ct[i], 48 ) != 0 )
- ARIA_SELF_TEST_IF_FAIL;
+ ARIA_SELF_TEST_ASSERT( memcmp( buf, aria_test2_cfb_ct[i], 48 ) != 0 );
/* Test CFB decryption */
if( verbose )
@@ -1008,8 +998,7 @@ int mbedtls_aria_self_test( int verbose )
j = 0;
mbedtls_aria_crypt_cfb128( &ctx, MBEDTLS_ARIA_DECRYPT, 48, &j,
iv, aria_test2_cfb_ct[i], buf );
- if( memcmp( buf, aria_test2_pt, 48 ) != 0 )
- ARIA_SELF_TEST_IF_FAIL;
+ ARIA_SELF_TEST_ASSERT( memcmp( buf, aria_test2_pt, 48 ) != 0 );
}
if( verbose )
mbedtls_printf( "\n" );
@@ -1027,8 +1016,7 @@ int mbedtls_aria_self_test( int verbose )
j = 0;
mbedtls_aria_crypt_ctr( &ctx, 48, &j, iv, blk,
aria_test2_pt, buf );
- if( memcmp( buf, aria_test2_ctr_ct[i], 48 ) != 0 )
- ARIA_SELF_TEST_IF_FAIL;
+ ARIA_SELF_TEST_ASSERT( memcmp( buf, aria_test2_ctr_ct[i], 48 ) != 0 );
/* Test CTR decryption */
if( verbose )
@@ -1039,8 +1027,7 @@ int mbedtls_aria_self_test( int verbose )
j = 0;
mbedtls_aria_crypt_ctr( &ctx, 48, &j, iv, blk,
aria_test2_ctr_ct[i], buf );
- if( memcmp( buf, aria_test2_pt, 48 ) != 0 )
- ARIA_SELF_TEST_IF_FAIL;
+ ARIA_SELF_TEST_ASSERT( memcmp( buf, aria_test2_pt, 48 ) != 0 );
}
if( verbose )
mbedtls_printf( "\n" );
diff --git a/thirdparty/mbedtls/library/asn1parse.c b/thirdparty/mbedtls/library/asn1parse.c
index 22747d3ba4..bf97585289 100644
--- a/thirdparty/mbedtls/library/asn1parse.c
+++ b/thirdparty/mbedtls/library/asn1parse.c
@@ -31,13 +31,7 @@
#include "mbedtls/bignum.h"
#endif
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
/*
* ASN.1 DER decoding routines
diff --git a/thirdparty/mbedtls/library/asn1write.c b/thirdparty/mbedtls/library/asn1write.c
index afa26a6be9..4b59927cbb 100644
--- a/thirdparty/mbedtls/library/asn1write.c
+++ b/thirdparty/mbedtls/library/asn1write.c
@@ -26,13 +26,7 @@
#include <string.h>
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
int mbedtls_asn1_write_len( unsigned char **p, unsigned char *start, size_t len )
{
@@ -78,9 +72,11 @@ int mbedtls_asn1_write_len( unsigned char **p, unsigned char *start, size_t len
return( 4 );
}
+ int len_is_valid = 1;
#if SIZE_MAX > 0xFFFFFFFF
- if( len <= 0xFFFFFFFF )
+ len_is_valid = ( len <= 0xFFFFFFFF );
#endif
+ if( len_is_valid )
{
if( *p - start < 5 )
return( MBEDTLS_ERR_ASN1_BUF_TOO_SMALL );
@@ -93,9 +89,7 @@ int mbedtls_asn1_write_len( unsigned char **p, unsigned char *start, size_t len
return( 5 );
}
-#if SIZE_MAX > 0xFFFFFFFF
return( MBEDTLS_ERR_ASN1_INVALID_LENGTH );
-#endif
}
int mbedtls_asn1_write_tag( unsigned char **p, unsigned char *start, unsigned char tag )
diff --git a/thirdparty/mbedtls/library/base64.c b/thirdparty/mbedtls/library/base64.c
index 83daa0bcc6..9021a041bb 100644
--- a/thirdparty/mbedtls/library/base64.c
+++ b/thirdparty/mbedtls/library/base64.c
@@ -28,12 +28,7 @@
#if defined(MBEDTLS_SELF_TEST)
#include <string.h>
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_PLATFORM_C */
#endif /* MBEDTLS_SELF_TEST */
#define BASE64_SIZE_T_MAX ( (size_t) -1 ) /* SIZE_T_MAX is not standard */
diff --git a/thirdparty/mbedtls/library/bignum.c b/thirdparty/mbedtls/library/bignum.c
index 32578e2c68..37193f55a8 100644
--- a/thirdparty/mbedtls/library/bignum.c
+++ b/thirdparty/mbedtls/library/bignum.c
@@ -46,15 +46,7 @@
#include <limits.h>
#include <string.h>
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#include <stdlib.h>
-#define mbedtls_printf printf
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
#define MPI_VALIDATE_RET( cond ) \
MBEDTLS_INTERNAL_VALIDATE_RET( cond, MBEDTLS_ERR_MPI_BAD_INPUT_DATA )
@@ -270,6 +262,17 @@ void mbedtls_mpi_swap( mbedtls_mpi *X, mbedtls_mpi *Y )
memcpy( Y, &T, sizeof( mbedtls_mpi ) );
}
+static inline mbedtls_mpi_uint mpi_sint_abs( mbedtls_mpi_sint z )
+{
+ if( z >= 0 )
+ return( z );
+ /* Take care to handle the most negative value (-2^(biL-1)) correctly.
+ * A naive -z would have undefined behavior.
+ * Write this in a way that makes popular compilers happy (GCC, Clang,
+ * MSVC). */
+ return( (mbedtls_mpi_uint) 0 - (mbedtls_mpi_uint) z );
+}
+
/*
* Set value from integer
*/
@@ -281,7 +284,7 @@ int mbedtls_mpi_lset( mbedtls_mpi *X, mbedtls_mpi_sint z )
MBEDTLS_MPI_CHK( mbedtls_mpi_grow( X, 1 ) );
memset( X->p, 0, X->n * ciL );
- X->p[0] = ( z < 0 ) ? -z : z;
+ X->p[0] = mpi_sint_abs( z );
X->s = ( z < 0 ) ? -1 : 1;
cleanup:
@@ -1101,7 +1104,7 @@ int mbedtls_mpi_cmp_int( const mbedtls_mpi *X, mbedtls_mpi_sint z )
mbedtls_mpi_uint p[1];
MPI_VALIDATE_RET( X != NULL );
- *p = ( z < 0 ) ? -z : z;
+ *p = mpi_sint_abs( z );
Y.s = ( z < 0 ) ? -1 : 1;
Y.n = 1;
Y.p = p;
@@ -1138,6 +1141,11 @@ int mbedtls_mpi_add_abs( mbedtls_mpi *X, const mbedtls_mpi *A, const mbedtls_mpi
if( B->p[j - 1] != 0 )
break;
+ /* Exit early to avoid undefined behavior on NULL+0 when X->n == 0
+ * and B is 0 (of any size). */
+ if( j == 0 )
+ return( 0 );
+
MBEDTLS_MPI_CHK( mbedtls_mpi_grow( X, j ) );
o = B->p; p = X->p; c = 0;
@@ -1257,10 +1265,12 @@ cleanup:
return( ret );
}
-/*
- * Signed addition: X = A + B
+/* Common function for signed addition and subtraction.
+ * Calculate A + B * flip_B where flip_B is 1 or -1.
*/
-int mbedtls_mpi_add_mpi( mbedtls_mpi *X, const mbedtls_mpi *A, const mbedtls_mpi *B )
+static int add_sub_mpi( mbedtls_mpi *X,
+ const mbedtls_mpi *A, const mbedtls_mpi *B,
+ int flip_B )
{
int ret, s;
MPI_VALIDATE_RET( X != NULL );
@@ -1268,16 +1278,21 @@ int mbedtls_mpi_add_mpi( mbedtls_mpi *X, const mbedtls_mpi *A, const mbedtls_mpi
MPI_VALIDATE_RET( B != NULL );
s = A->s;
- if( A->s * B->s < 0 )
+ if( A->s * B->s * flip_B < 0 )
{
- if( mbedtls_mpi_cmp_abs( A, B ) >= 0 )
+ int cmp = mbedtls_mpi_cmp_abs( A, B );
+ if( cmp >= 0 )
{
MBEDTLS_MPI_CHK( mbedtls_mpi_sub_abs( X, A, B ) );
- X->s = s;
+ /* If |A| = |B|, the result is 0 and we must set the sign bit
+ * to +1 regardless of which of A or B was negative. Otherwise,
+ * since |A| > |B|, the sign is the sign of A. */
+ X->s = cmp == 0 ? 1 : s;
}
else
{
MBEDTLS_MPI_CHK( mbedtls_mpi_sub_abs( X, B, A ) );
+ /* Since |A| < |B|, the sign is the opposite of A. */
X->s = -s;
}
}
@@ -1293,38 +1308,19 @@ cleanup:
}
/*
+ * Signed addition: X = A + B
+ */
+int mbedtls_mpi_add_mpi( mbedtls_mpi *X, const mbedtls_mpi *A, const mbedtls_mpi *B )
+{
+ return( add_sub_mpi( X, A, B, 1 ) );
+}
+
+/*
* Signed subtraction: X = A - B
*/
int mbedtls_mpi_sub_mpi( mbedtls_mpi *X, const mbedtls_mpi *A, const mbedtls_mpi *B )
{
- int ret, s;
- MPI_VALIDATE_RET( X != NULL );
- MPI_VALIDATE_RET( A != NULL );
- MPI_VALIDATE_RET( B != NULL );
-
- s = A->s;
- if( A->s * B->s > 0 )
- {
- if( mbedtls_mpi_cmp_abs( A, B ) >= 0 )
- {
- MBEDTLS_MPI_CHK( mbedtls_mpi_sub_abs( X, A, B ) );
- X->s = s;
- }
- else
- {
- MBEDTLS_MPI_CHK( mbedtls_mpi_sub_abs( X, B, A ) );
- X->s = -s;
- }
- }
- else
- {
- MBEDTLS_MPI_CHK( mbedtls_mpi_add_abs( X, A, B ) );
- X->s = s;
- }
-
-cleanup:
-
- return( ret );
+ return( add_sub_mpi( X, A, B, -1 ) );
}
/*
@@ -1337,7 +1333,7 @@ int mbedtls_mpi_add_int( mbedtls_mpi *X, const mbedtls_mpi *A, mbedtls_mpi_sint
MPI_VALIDATE_RET( X != NULL );
MPI_VALIDATE_RET( A != NULL );
- p[0] = ( b < 0 ) ? -b : b;
+ p[0] = mpi_sint_abs( b );
B.s = ( b < 0 ) ? -1 : 1;
B.n = 1;
B.p = p;
@@ -1355,7 +1351,7 @@ int mbedtls_mpi_sub_int( mbedtls_mpi *X, const mbedtls_mpi *A, mbedtls_mpi_sint
MPI_VALIDATE_RET( X != NULL );
MPI_VALIDATE_RET( A != NULL );
- p[0] = ( b < 0 ) ? -b : b;
+ p[0] = mpi_sint_abs( b );
B.s = ( b < 0 ) ? -1 : 1;
B.n = 1;
B.p = p;
@@ -1776,7 +1772,7 @@ int mbedtls_mpi_div_int( mbedtls_mpi *Q, mbedtls_mpi *R,
mbedtls_mpi_uint p[1];
MPI_VALIDATE_RET( A != NULL );
- p[0] = ( b < 0 ) ? -b : b;
+ p[0] = mpi_sint_abs( b );
B.s = ( b < 0 ) ? -1 : 1;
B.n = 1;
B.p = p;
@@ -2009,11 +2005,11 @@ int mbedtls_mpi_exp_mod( mbedtls_mpi *X, const mbedtls_mpi *A,
mbedtls_mpi *prec_RR )
{
int ret = MBEDTLS_ERR_ERROR_CORRUPTION_DETECTED;
- size_t wbits, wsize, one = 1;
+ size_t window_bitsize;
size_t i, j, nblimbs;
size_t bufsize, nbits;
mbedtls_mpi_uint ei, mm, state;
- mbedtls_mpi RR, T, W[ 1 << MBEDTLS_MPI_WINDOW_SIZE ], WW, Apos;
+ mbedtls_mpi RR, T, W[ (size_t) 1 << MBEDTLS_MPI_WINDOW_SIZE ], WW, Apos;
int neg;
MPI_VALIDATE_RET( X != NULL );
@@ -2042,21 +2038,59 @@ int mbedtls_mpi_exp_mod( mbedtls_mpi *X, const mbedtls_mpi *A,
i = mbedtls_mpi_bitlen( E );
- wsize = ( i > 671 ) ? 6 : ( i > 239 ) ? 5 :
+ window_bitsize = ( i > 671 ) ? 6 : ( i > 239 ) ? 5 :
( i > 79 ) ? 4 : ( i > 23 ) ? 3 : 1;
#if( MBEDTLS_MPI_WINDOW_SIZE < 6 )
- if( wsize > MBEDTLS_MPI_WINDOW_SIZE )
- wsize = MBEDTLS_MPI_WINDOW_SIZE;
+ if( window_bitsize > MBEDTLS_MPI_WINDOW_SIZE )
+ window_bitsize = MBEDTLS_MPI_WINDOW_SIZE;
#endif
+ const size_t w_table_used_size = (size_t) 1 << window_bitsize;
+
+ /*
+ * This function is not constant-trace: its memory accesses depend on the
+ * exponent value. To defend against timing attacks, callers (such as RSA
+ * and DHM) should use exponent blinding. However this is not enough if the
+ * adversary can find the exponent in a single trace, so this function
+ * takes extra precautions against adversaries who can observe memory
+ * access patterns.
+ *
+ * This function performs a series of multiplications by table elements and
+ * squarings, and we want the prevent the adversary from finding out which
+ * table element was used, and from distinguishing between multiplications
+ * and squarings. Firstly, when multiplying by an element of the window
+ * W[i], we do a constant-trace table lookup to obfuscate i. This leaves
+ * squarings as having a different memory access patterns from other
+ * multiplications. So secondly, we put the accumulator X in the table as
+ * well, and also do a constant-trace table lookup to multiply by X.
+ *
+ * This way, all multiplications take the form of a lookup-and-multiply.
+ * The number of lookup-and-multiply operations inside each iteration of
+ * the main loop still depends on the bits of the exponent, but since the
+ * other operations in the loop don't have an easily recognizable memory
+ * trace, an adversary is unlikely to be able to observe the exact
+ * patterns.
+ *
+ * An adversary may still be able to recover the exponent if they can
+ * observe both memory accesses and branches. However, branch prediction
+ * exploitation typically requires many traces of execution over the same
+ * data, which is defeated by randomized blinding.
+ *
+ * To achieve this, we make a copy of X and we use the table entry in each
+ * calculation from this point on.
+ */
+ const size_t x_index = 0;
+ mbedtls_mpi_init( &W[x_index] );
+ mbedtls_mpi_copy( &W[x_index], X );
+
j = N->n + 1;
/* All W[i] and X must have at least N->n limbs for the mpi_montmul()
* and mpi_montred() calls later. Here we ensure that W[1] and X are
* large enough, and later we'll grow other W[i] to the same length.
* They must not be shrunk midway through this function!
*/
- MBEDTLS_MPI_CHK( mbedtls_mpi_grow( X, j ) );
+ MBEDTLS_MPI_CHK( mbedtls_mpi_grow( &W[x_index], j ) );
MBEDTLS_MPI_CHK( mbedtls_mpi_grow( &W[1], j ) );
MBEDTLS_MPI_CHK( mbedtls_mpi_grow( &T, j * 2 ) );
@@ -2105,28 +2139,36 @@ int mbedtls_mpi_exp_mod( mbedtls_mpi *X, const mbedtls_mpi *A,
mpi_montmul( &W[1], &RR, N, mm, &T );
/*
- * X = R^2 * R^-1 mod N = R mod N
+ * W[x_index] = R^2 * R^-1 mod N = R mod N
*/
- MBEDTLS_MPI_CHK( mbedtls_mpi_copy( X, &RR ) );
- mpi_montred( X, N, mm, &T );
+ MBEDTLS_MPI_CHK( mbedtls_mpi_copy( &W[x_index], &RR ) );
+ mpi_montred( &W[x_index], N, mm, &T );
+
- if( wsize > 1 )
+ if( window_bitsize > 1 )
{
/*
- * W[1 << (wsize - 1)] = W[1] ^ (wsize - 1)
+ * W[i] = W[1] ^ i
+ *
+ * The first bit of the sliding window is always 1 and therefore we
+ * only need to store the second half of the table.
+ *
+ * (There are two special elements in the table: W[0] for the
+ * accumulator/result and W[1] for A in Montgomery form. Both of these
+ * are already set at this point.)
*/
- j = one << ( wsize - 1 );
+ j = w_table_used_size / 2;
MBEDTLS_MPI_CHK( mbedtls_mpi_grow( &W[j], N->n + 1 ) );
MBEDTLS_MPI_CHK( mbedtls_mpi_copy( &W[j], &W[1] ) );
- for( i = 0; i < wsize - 1; i++ )
+ for( i = 0; i < window_bitsize - 1; i++ )
mpi_montmul( &W[j], &W[j], N, mm, &T );
/*
* W[i] = W[i - 1] * W[1]
*/
- for( i = j + 1; i < ( one << wsize ); i++ )
+ for( i = j + 1; i < w_table_used_size; i++ )
{
MBEDTLS_MPI_CHK( mbedtls_mpi_grow( &W[i], N->n + 1 ) );
MBEDTLS_MPI_CHK( mbedtls_mpi_copy( &W[i], &W[i - 1] ) );
@@ -2138,7 +2180,7 @@ int mbedtls_mpi_exp_mod( mbedtls_mpi *X, const mbedtls_mpi *A,
nblimbs = E->n;
bufsize = 0;
nbits = 0;
- wbits = 0;
+ size_t exponent_bits_in_window = 0;
state = 0;
while( 1 )
@@ -2166,9 +2208,10 @@ int mbedtls_mpi_exp_mod( mbedtls_mpi *X, const mbedtls_mpi *A,
if( ei == 0 && state == 1 )
{
/*
- * out of window, square X
+ * out of window, square W[x_index]
*/
- mpi_montmul( X, X, N, mm, &T );
+ MBEDTLS_MPI_CHK( mpi_select( &WW, W, w_table_used_size, x_index ) );
+ mpi_montmul( &W[x_index], &WW, N, mm, &T );
continue;
}
@@ -2178,25 +2221,30 @@ int mbedtls_mpi_exp_mod( mbedtls_mpi *X, const mbedtls_mpi *A,
state = 2;
nbits++;
- wbits |= ( ei << ( wsize - nbits ) );
+ exponent_bits_in_window |= ( ei << ( window_bitsize - nbits ) );
- if( nbits == wsize )
+ if( nbits == window_bitsize )
{
/*
- * X = X^wsize R^-1 mod N
+ * W[x_index] = W[x_index]^window_bitsize R^-1 mod N
*/
- for( i = 0; i < wsize; i++ )
- mpi_montmul( X, X, N, mm, &T );
+ for( i = 0; i < window_bitsize; i++ )
+ {
+ MBEDTLS_MPI_CHK( mpi_select( &WW, W, w_table_used_size,
+ x_index ) );
+ mpi_montmul( &W[x_index], &WW, N, mm, &T );
+ }
/*
- * X = X * W[wbits] R^-1 mod N
+ * W[x_index] = W[x_index] * W[exponent_bits_in_window] R^-1 mod N
*/
- MBEDTLS_MPI_CHK( mpi_select( &WW, W, (size_t) 1 << wsize, wbits ) );
- mpi_montmul( X, &WW, N, mm, &T );
+ MBEDTLS_MPI_CHK( mpi_select( &WW, W, w_table_used_size,
+ exponent_bits_in_window ) );
+ mpi_montmul( &W[x_index], &WW, N, mm, &T );
state--;
nbits = 0;
- wbits = 0;
+ exponent_bits_in_window = 0;
}
}
@@ -2205,31 +2253,45 @@ int mbedtls_mpi_exp_mod( mbedtls_mpi *X, const mbedtls_mpi *A,
*/
for( i = 0; i < nbits; i++ )
{
- mpi_montmul( X, X, N, mm, &T );
+ MBEDTLS_MPI_CHK( mpi_select( &WW, W, w_table_used_size, x_index ) );
+ mpi_montmul( &W[x_index], &WW, N, mm, &T );
- wbits <<= 1;
+ exponent_bits_in_window <<= 1;
- if( ( wbits & ( one << wsize ) ) != 0 )
- mpi_montmul( X, &W[1], N, mm, &T );
+ if( ( exponent_bits_in_window & ( (size_t) 1 << window_bitsize ) ) != 0 )
+ {
+ MBEDTLS_MPI_CHK( mpi_select( &WW, W, w_table_used_size, 1 ) );
+ mpi_montmul( &W[x_index], &WW, N, mm, &T );
+ }
}
/*
- * X = A^E * R * R^-1 mod N = A^E mod N
+ * W[x_index] = A^E * R * R^-1 mod N = A^E mod N
*/
- mpi_montred( X, N, mm, &T );
+ mpi_montred( &W[x_index], N, mm, &T );
if( neg && E->n != 0 && ( E->p[0] & 1 ) != 0 )
{
- X->s = -1;
- MBEDTLS_MPI_CHK( mbedtls_mpi_add_mpi( X, N, X ) );
+ W[x_index].s = -1;
+ MBEDTLS_MPI_CHK( mbedtls_mpi_add_mpi( &W[x_index], N, &W[x_index] ) );
}
+ /*
+ * Load the result in the output variable.
+ */
+ mbedtls_mpi_copy( X, &W[x_index] );
+
cleanup:
- for( i = ( one << ( wsize - 1 ) ); i < ( one << wsize ); i++ )
+ /* The first bit of the sliding window is always 1 and therefore the first
+ * half of the table was unused. */
+ for( i = w_table_used_size/2; i < w_table_used_size; i++ )
mbedtls_mpi_free( &W[i] );
- mbedtls_mpi_free( &W[1] ); mbedtls_mpi_free( &T ); mbedtls_mpi_free( &Apos );
+ mbedtls_mpi_free( &W[x_index] );
+ mbedtls_mpi_free( &W[1] );
+ mbedtls_mpi_free( &T );
+ mbedtls_mpi_free( &Apos );
mbedtls_mpi_free( &WW );
if( prec_RR == NULL || prec_RR->p == NULL )
@@ -2862,7 +2924,7 @@ int mbedtls_mpi_gen_prime( mbedtls_mpi *X, size_t nbits, int flags,
else
{
/*
- * An necessary condition for Y and X = 2Y + 1 to be prime
+ * A necessary condition for Y and X = 2Y + 1 to be prime
* is X = 2 mod 3 (which is equivalent to Y = 2 mod 3).
* Make sure it is satisfied, while keeping X = 3 mod 4
*/
diff --git a/thirdparty/mbedtls/library/camellia.c b/thirdparty/mbedtls/library/camellia.c
index 29d730ab53..e90cd7f134 100644
--- a/thirdparty/mbedtls/library/camellia.c
+++ b/thirdparty/mbedtls/library/camellia.c
@@ -32,14 +32,7 @@
#include <string.h>
-#if defined(MBEDTLS_SELF_TEST)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST */
#if !defined(MBEDTLS_CAMELLIA_ALT)
diff --git a/thirdparty/mbedtls/library/ccm.c b/thirdparty/mbedtls/library/ccm.c
index a21a37f55f..e0d43334f9 100644
--- a/thirdparty/mbedtls/library/ccm.c
+++ b/thirdparty/mbedtls/library/ccm.c
@@ -36,14 +36,7 @@
#include <string.h>
-#if defined(MBEDTLS_SELF_TEST) && defined(MBEDTLS_AES_C)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST && MBEDTLS_AES_C */
#if !defined(MBEDTLS_CCM_ALT)
diff --git a/thirdparty/mbedtls/library/chacha20.c b/thirdparty/mbedtls/library/chacha20.c
index 658f046901..bd0701482c 100644
--- a/thirdparty/mbedtls/library/chacha20.c
+++ b/thirdparty/mbedtls/library/chacha20.c
@@ -32,22 +32,10 @@
#include <stddef.h>
#include <string.h>
-#if defined(MBEDTLS_SELF_TEST)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST */
#if !defined(MBEDTLS_CHACHA20_ALT)
-#if ( defined(__ARMCC_VERSION) || defined(_MSC_VER) ) && \
- !defined(inline) && !defined(__cplusplus)
-#define inline __inline
-#endif
-
/* Parameter validation macros */
#define CHACHA20_VALIDATE_RET( cond ) \
MBEDTLS_INTERNAL_VALIDATE_RET( cond, MBEDTLS_ERR_CHACHA20_BAD_INPUT_DATA )
diff --git a/thirdparty/mbedtls/library/chachapoly.c b/thirdparty/mbedtls/library/chachapoly.c
index dc75b2030a..4adf846857 100644
--- a/thirdparty/mbedtls/library/chachapoly.c
+++ b/thirdparty/mbedtls/library/chachapoly.c
@@ -28,14 +28,7 @@
#include <string.h>
-#if defined(MBEDTLS_SELF_TEST)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST */
#if !defined(MBEDTLS_CHACHAPOLY_ALT)
diff --git a/thirdparty/mbedtls/library/cipher.c b/thirdparty/mbedtls/library/cipher.c
index f3b4bd29ce..67e3274587 100644
--- a/thirdparty/mbedtls/library/cipher.c
+++ b/thirdparty/mbedtls/library/cipher.c
@@ -63,12 +63,7 @@
#include "mbedtls/nist_kw.h"
#endif
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
#define CIPHER_VALIDATE_RET( cond ) \
MBEDTLS_INTERNAL_VALIDATE_RET( cond, MBEDTLS_ERR_CIPHER_BAD_INPUT_DATA )
diff --git a/thirdparty/mbedtls/library/cipher_wrap.c b/thirdparty/mbedtls/library/cipher_wrap.c
index 57eb3cb67f..c76bdcc0f8 100644
--- a/thirdparty/mbedtls/library/cipher_wrap.c
+++ b/thirdparty/mbedtls/library/cipher_wrap.c
@@ -76,13 +76,7 @@
#include <string.h>
#endif
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
#if defined(MBEDTLS_GCM_C)
/* shared by all GCM ciphers */
diff --git a/thirdparty/mbedtls/library/common.h b/thirdparty/mbedtls/library/common.h
index c06472418d..1663d50226 100644
--- a/thirdparty/mbedtls/library/common.h
+++ b/thirdparty/mbedtls/library/common.h
@@ -29,8 +29,15 @@
#include "mbedtls/config.h"
#endif
+#include <stddef.h>
#include <stdint.h>
+/* Define `inline` on some non-C99-compliant compilers. */
+#if ( defined(__ARMCC_VERSION) || defined(_MSC_VER) ) && \
+ !defined(inline) && !defined(__cplusplus)
+#define inline __inline
+#endif
+
/** Helper to define a function as static except when building invasive tests.
*
* If a function is only used inside its own source file and should be
@@ -52,6 +59,44 @@
#define MBEDTLS_STATIC_TESTABLE static
#endif
+/** Return an offset into a buffer.
+ *
+ * This is just the addition of an offset to a pointer, except that this
+ * function also accepts an offset of 0 into a buffer whose pointer is null.
+ * (`p + n` has undefined behavior when `p` is null, even when `n == 0`.
+ * A null pointer is a valid buffer pointer when the size is 0, for example
+ * as the result of `malloc(0)` on some platforms.)
+ *
+ * \param p Pointer to a buffer of at least n bytes.
+ * This may be \p NULL if \p n is zero.
+ * \param n An offset in bytes.
+ * \return Pointer to offset \p n in the buffer \p p.
+ * Note that this is only a valid pointer if the size of the
+ * buffer is at least \p n + 1.
+ */
+static inline unsigned char *mbedtls_buffer_offset(
+ unsigned char *p, size_t n )
+{
+ return( p == NULL ? NULL : p + n );
+}
+
+/** Return an offset into a read-only buffer.
+ *
+ * Similar to mbedtls_buffer_offset(), but for const pointers.
+ *
+ * \param p Pointer to a buffer of at least n bytes.
+ * This may be \p NULL if \p n is zero.
+ * \param n An offset in bytes.
+ * \return Pointer to offset \p n in the buffer \p p.
+ * Note that this is only a valid pointer if the size of the
+ * buffer is at least \p n + 1.
+ */
+static inline const unsigned char *mbedtls_buffer_offset_const(
+ const unsigned char *p, size_t n )
+{
+ return( p == NULL ? NULL : p + n );
+}
+
/** Byte Reading Macros
*
* Given a multi-byte integer \p x, MBEDTLS_BYTE_n retrieves the n-th
diff --git a/thirdparty/mbedtls/library/constant_time.c b/thirdparty/mbedtls/library/constant_time.c
index e276d23ca0..2401b0434a 100644
--- a/thirdparty/mbedtls/library/constant_time.c
+++ b/thirdparty/mbedtls/library/constant_time.c
@@ -81,7 +81,7 @@ unsigned mbedtls_ct_uint_mask( unsigned value )
#endif
}
-#if defined(MBEDTLS_SSL_SOME_SUITES_USE_TLS_CBC)
+#if defined(MBEDTLS_SSL_SOME_MODES_USE_MAC)
size_t mbedtls_ct_size_mask( size_t value )
{
@@ -97,7 +97,7 @@ size_t mbedtls_ct_size_mask( size_t value )
#endif
}
-#endif /* MBEDTLS_SSL_SOME_SUITES_USE_TLS_CBC */
+#endif /* MBEDTLS_SSL_SOME_MODES_USE_MAC */
#if defined(MBEDTLS_BIGNUM_C)
@@ -272,7 +272,7 @@ unsigned mbedtls_ct_uint_if( unsigned condition,
* \note if1 and if0 must be either 1 or -1, otherwise the result
* is undefined.
*
- * \param condition Condition to test.
+ * \param condition Condition to test; must be either 0 or 1.
* \param if1 The first sign; must be either +1 or -1.
* \param if0 The second sign; must be either +1 or -1.
*
@@ -404,8 +404,7 @@ static void mbedtls_ct_mem_move_to_left( void *start,
#endif /* MBEDTLS_PKCS1_V15 && MBEDTLS_RSA_C && ! MBEDTLS_RSA_ALT */
-#if defined(MBEDTLS_SSL_SOME_SUITES_USE_TLS_CBC)
-
+#if defined(MBEDTLS_SSL_SOME_MODES_USE_MAC)
void mbedtls_ct_memcpy_if_eq( unsigned char *dest,
const unsigned char *src,
size_t len,
@@ -527,7 +526,7 @@ cleanup:
return( ret );
}
-#endif /* MBEDTLS_SSL_SOME_SUITES_USE_TLS_CBC */
+#endif /* MBEDTLS_SSL_SOME_MODES_USE_MAC */
#if defined(MBEDTLS_BIGNUM_C)
diff --git a/thirdparty/mbedtls/library/constant_time_internal.h b/thirdparty/mbedtls/library/constant_time_internal.h
index a550b38fa5..ff2d0ff92c 100644
--- a/thirdparty/mbedtls/library/constant_time_internal.h
+++ b/thirdparty/mbedtls/library/constant_time_internal.h
@@ -32,7 +32,6 @@
#include <stddef.h>
-
/** Turn a value into a mask:
* - if \p value == 0, return the all-bits 0 mask, aka 0
* - otherwise, return the all-bits 1 mask, aka (unsigned) -1
@@ -46,7 +45,7 @@
*/
unsigned mbedtls_ct_uint_mask( unsigned value );
-#if defined(MBEDTLS_SSL_SOME_SUITES_USE_TLS_CBC)
+#if defined(MBEDTLS_SSL_SOME_MODES_USE_MAC)
/** Turn a value into a mask:
* - if \p value == 0, return the all-bits 0 mask, aka 0
@@ -61,7 +60,7 @@ unsigned mbedtls_ct_uint_mask( unsigned value );
*/
size_t mbedtls_ct_size_mask( size_t value );
-#endif /* MBEDTLS_SSL_SOME_SUITES_USE_TLS_CBC */
+#endif /* MBEDTLS_SSL_SOME_MODES_USE_MAC */
#if defined(MBEDTLS_BIGNUM_C)
@@ -196,7 +195,7 @@ signed char mbedtls_ct_base64_dec_value( unsigned char c );
#endif /* MBEDTLS_BASE64_C */
-#if defined(MBEDTLS_SSL_SOME_SUITES_USE_TLS_CBC)
+#if defined(MBEDTLS_SSL_SOME_MODES_USE_MAC)
/** Conditional memcpy without branches.
*
@@ -292,7 +291,7 @@ int mbedtls_ct_hmac( mbedtls_md_context_t *ctx,
size_t max_data_len,
unsigned char *output );
-#endif /* MBEDTLS_SSL_SOME_SUITES_USE_TLS_CBC */
+#endif /* MBEDTLS_SSL_SOME_MODES_USE_MAC */
#if defined(MBEDTLS_PKCS1_V15) && defined(MBEDTLS_RSA_C) && !defined(MBEDTLS_RSA_ALT)
diff --git a/thirdparty/mbedtls/library/ctr_drbg.c b/thirdparty/mbedtls/library/ctr_drbg.c
index a00d66ce87..ed31576a7b 100644
--- a/thirdparty/mbedtls/library/ctr_drbg.c
+++ b/thirdparty/mbedtls/library/ctr_drbg.c
@@ -36,14 +36,7 @@
#include <stdio.h>
#endif
-#if defined(MBEDTLS_SELF_TEST)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST */
/*
* CTR_DRBG context initialization
diff --git a/thirdparty/mbedtls/library/debug.c b/thirdparty/mbedtls/library/debug.c
index e1086008af..353b4bf07b 100644
--- a/thirdparty/mbedtls/library/debug.c
+++ b/thirdparty/mbedtls/library/debug.c
@@ -21,16 +21,7 @@
#if defined(MBEDTLS_DEBUG_C)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#define mbedtls_time_t time_t
-#define mbedtls_snprintf snprintf
-#define mbedtls_vsnprintf vsnprintf
-#endif
#include "mbedtls/debug.h"
#include "mbedtls/error.h"
@@ -39,11 +30,6 @@
#include <stdio.h>
#include <string.h>
-#if ( defined(__ARMCC_VERSION) || defined(_MSC_VER) ) && \
- !defined(inline) && !defined(__cplusplus)
-#define inline __inline
-#endif
-
#define DEBUG_BUF_SIZE 512
static int debug_threshold = 0;
diff --git a/thirdparty/mbedtls/library/des.c b/thirdparty/mbedtls/library/des.c
index 91d22b5d90..65f5681cf1 100644
--- a/thirdparty/mbedtls/library/des.c
+++ b/thirdparty/mbedtls/library/des.c
@@ -33,14 +33,7 @@
#include <string.h>
-#if defined(MBEDTLS_SELF_TEST)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST */
#if !defined(MBEDTLS_DES_ALT)
diff --git a/thirdparty/mbedtls/library/dhm.c b/thirdparty/mbedtls/library/dhm.c
index 88e148bb80..4d2e33e689 100644
--- a/thirdparty/mbedtls/library/dhm.c
+++ b/thirdparty/mbedtls/library/dhm.c
@@ -43,15 +43,7 @@
#include "mbedtls/asn1.h"
#endif
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#include <stdio.h>
-#define mbedtls_printf printf
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
#if !defined(MBEDTLS_DHM_ALT)
diff --git a/thirdparty/mbedtls/library/ecdh.c b/thirdparty/mbedtls/library/ecdh.c
index 60c6e429de..724c938a71 100644
--- a/thirdparty/mbedtls/library/ecdh.c
+++ b/thirdparty/mbedtls/library/ecdh.c
@@ -77,10 +77,12 @@ static int ecdh_gen_public_restartable( mbedtls_ecp_group *grp,
{
int ret = MBEDTLS_ERR_ERROR_CORRUPTION_DETECTED;
- /* If multiplication is in progress, we already generated a privkey */
+ int restarting = 0;
#if defined(MBEDTLS_ECP_RESTARTABLE)
- if( rs_ctx == NULL || rs_ctx->rsm == NULL )
+ restarting = ( rs_ctx != NULL && rs_ctx->rsm != NULL );
#endif
+ /* If multiplication is in progress, we already generated a privkey */
+ if( !restarting )
MBEDTLS_MPI_CHK( mbedtls_ecp_gen_privkey( grp, d, f_rng, p_rng ) );
MBEDTLS_MPI_CHK( mbedtls_ecp_mul_restartable( grp, Q, d, &grp->G,
diff --git a/thirdparty/mbedtls/library/ecdsa.c b/thirdparty/mbedtls/library/ecdsa.c
index 640eb24a26..4bae6a93fd 100644
--- a/thirdparty/mbedtls/library/ecdsa.c
+++ b/thirdparty/mbedtls/library/ecdsa.c
@@ -36,13 +36,7 @@
#include "mbedtls/hmac_drbg.h"
#endif
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
#include "mbedtls/platform_util.h"
#include "mbedtls/error.h"
diff --git a/thirdparty/mbedtls/library/ecjpake.c b/thirdparty/mbedtls/library/ecjpake.c
index 0b9bffb93e..fe0b9d93e9 100644
--- a/thirdparty/mbedtls/library/ecjpake.c
+++ b/thirdparty/mbedtls/library/ecjpake.c
@@ -794,12 +794,7 @@ cleanup:
#if defined(MBEDTLS_SELF_TEST)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif
#if !defined(MBEDTLS_ECP_DP_SECP256R1_ENABLED) || \
!defined(MBEDTLS_SHA256_C)
diff --git a/thirdparty/mbedtls/library/ecp.c b/thirdparty/mbedtls/library/ecp.c
index 890f364a08..ad19e05fb2 100644
--- a/thirdparty/mbedtls/library/ecp.c
+++ b/thirdparty/mbedtls/library/ecp.c
@@ -90,15 +90,7 @@
#define ECP_VALIDATE( cond ) \
MBEDTLS_INTERNAL_VALIDATE( cond )
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#include <stdio.h>
-#define mbedtls_printf printf
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
#include "mbedtls/ecp_internal.h"
@@ -112,11 +104,6 @@
#endif
#endif /* MBEDTLS_ECP_NO_INTERNAL_RNG */
-#if ( defined(__ARMCC_VERSION) || defined(_MSC_VER) ) && \
- !defined(inline) && !defined(__cplusplus)
-#define inline __inline
-#endif
-
#if defined(MBEDTLS_SELF_TEST)
/*
* Counts of point addition and doubling, and field multiplications.
@@ -2056,9 +2043,13 @@ static int ecp_mul_comb_core( const mbedtls_ecp_group *grp, mbedtls_ecp_point *R
i = d;
MBEDTLS_MPI_CHK( ecp_select_comb( grp, R, T, T_size, x[i] ) );
MBEDTLS_MPI_CHK( mbedtls_mpi_lset( &R->Z, 1 ) );
+
+ int have_rng = 1;
#if defined(MBEDTLS_ECP_NO_INTERNAL_RNG)
- if( f_rng != 0 )
+ if( f_rng == NULL )
+ have_rng = 0;
#endif
+ if( have_rng )
MBEDTLS_MPI_CHK( ecp_randomize_jac( grp, R, f_rng, p_rng ) );
}
@@ -2192,9 +2183,12 @@ final_norm:
*
* Avoid the leak by randomizing coordinates before we normalize them.
*/
+ int have_rng = 1;
#if defined(MBEDTLS_ECP_NO_INTERNAL_RNG)
- if( f_rng != 0 )
+ if( f_rng == NULL )
+ have_rng = 0;
#endif
+ if( have_rng )
MBEDTLS_MPI_CHK( ecp_randomize_jac( grp, RR, f_rng, p_rng ) );
MBEDTLS_MPI_CHK( ecp_normalize_jac( grp, RR ) );
@@ -2403,12 +2397,14 @@ cleanup:
mbedtls_free( T );
}
- /* don't free R while in progress in case R == P */
+ /* prevent caller from using invalid value */
+ int should_free_R = ( ret != 0 );
#if defined(MBEDTLS_ECP_RESTARTABLE)
- if( ret != MBEDTLS_ERR_ECP_IN_PROGRESS )
+ /* don't free R while in progress in case R == P */
+ if( ret == MBEDTLS_ERR_ECP_IN_PROGRESS )
+ should_free_R = 0;
#endif
- /* prevent caller from using invalid value */
- if( ret != 0 )
+ if( should_free_R )
mbedtls_ecp_point_free( R );
ECP_RS_LEAVE( rsm );
@@ -2596,13 +2592,16 @@ static int ecp_mul_mxz( mbedtls_ecp_group *grp, mbedtls_ecp_point *R,
MOD_ADD( RP.X );
/* Randomize coordinates of the starting point */
+ int have_rng = 1;
#if defined(MBEDTLS_ECP_NO_INTERNAL_RNG)
- if( f_rng != NULL )
+ if( f_rng == NULL )
+ have_rng = 0;
#endif
+ if( have_rng )
MBEDTLS_MPI_CHK( ecp_randomize_mxz( grp, &RP, f_rng, p_rng ) );
/* Loop invariant: R = result so far, RP = R + P */
- i = mbedtls_mpi_bitlen( m ); /* one past the (zero-based) most significant bit */
+ i = grp->nbits + 1; /* one past the (zero-based) required msb for private keys */
while( i-- > 0 )
{
b = mbedtls_mpi_get_bit( m, i );
@@ -2631,9 +2630,12 @@ static int ecp_mul_mxz( mbedtls_ecp_group *grp, mbedtls_ecp_point *R,
*
* Avoid the leak by randomizing coordinates before we normalize them.
*/
+ have_rng = 1;
#if defined(MBEDTLS_ECP_NO_INTERNAL_RNG)
- if( f_rng != NULL )
+ if( f_rng == NULL )
+ have_rng = 0;
#endif
+ if( have_rng )
MBEDTLS_MPI_CHK( ecp_randomize_mxz( grp, R, f_rng, p_rng ) );
MBEDTLS_MPI_CHK( ecp_normalize_mxz( grp, R ) );
@@ -2680,10 +2682,12 @@ int mbedtls_ecp_mul_restartable( mbedtls_ecp_group *grp, mbedtls_ecp_point *R,
MBEDTLS_MPI_CHK( mbedtls_internal_ecp_init( grp ) );
#endif /* MBEDTLS_ECP_INTERNAL_ALT */
+ int restarting = 0;
#if defined(MBEDTLS_ECP_RESTARTABLE)
- /* skip argument check when restarting */
- if( rs_ctx == NULL || rs_ctx->rsm == NULL )
+ restarting = ( rs_ctx != NULL && rs_ctx->rsm != NULL );
#endif
+ /* skip argument check when restarting */
+ if( !restarting )
{
/* check_privkey is free */
MBEDTLS_ECP_BUDGET( MBEDTLS_ECP_OPS_CHK );
@@ -2797,14 +2801,17 @@ static int mbedtls_ecp_mul_shortcuts( mbedtls_ecp_group *grp,
if( mbedtls_mpi_cmp_int( m, 0 ) == 0 )
{
+ MBEDTLS_MPI_CHK( mbedtls_ecp_check_pubkey( grp, P ) );
MBEDTLS_MPI_CHK( mbedtls_ecp_set_zero( R ) );
}
else if( mbedtls_mpi_cmp_int( m, 1 ) == 0 )
{
+ MBEDTLS_MPI_CHK( mbedtls_ecp_check_pubkey( grp, P ) );
MBEDTLS_MPI_CHK( mbedtls_ecp_copy( R, P ) );
}
else if( mbedtls_mpi_cmp_int( m, -1 ) == 0 )
{
+ MBEDTLS_MPI_CHK( mbedtls_ecp_check_pubkey( grp, P ) );
MBEDTLS_MPI_CHK( mbedtls_ecp_copy( R, P ) );
if( mbedtls_mpi_cmp_int( &R->Y, 0 ) != 0 )
MBEDTLS_MPI_CHK( mbedtls_mpi_sub_mpi( &R->Y, &grp->P, &R->Y ) );
diff --git a/thirdparty/mbedtls/library/ecp_curves.c b/thirdparty/mbedtls/library/ecp_curves.c
index 2199be6461..47761eef4e 100644
--- a/thirdparty/mbedtls/library/ecp_curves.c
+++ b/thirdparty/mbedtls/library/ecp_curves.c
@@ -38,11 +38,6 @@
#define ECP_VALIDATE( cond ) \
MBEDTLS_INTERNAL_VALIDATE( cond )
-#if ( defined(__ARMCC_VERSION) || defined(_MSC_VER) ) && \
- !defined(inline) && !defined(__cplusplus)
-#define inline __inline
-#endif
-
#define ECP_MPI_INIT(s, n, p) {s, (n), (mbedtls_mpi_uint *)(p)}
#define ECP_MPI_INIT_ARRAY(x) \
diff --git a/thirdparty/mbedtls/library/entropy.c b/thirdparty/mbedtls/library/entropy.c
index 12fd3b9b5f..e3b337ff35 100644
--- a/thirdparty/mbedtls/library/entropy.c
+++ b/thirdparty/mbedtls/library/entropy.c
@@ -38,18 +38,9 @@
#include <stdio.h>
#endif
-#if defined(MBEDTLS_ENTROPY_NV_SEED)
#include "mbedtls/platform.h"
-#endif
-#if defined(MBEDTLS_SELF_TEST)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST */
#if defined(MBEDTLS_HAVEGE_C)
#include "mbedtls/havege.h"
diff --git a/thirdparty/mbedtls/library/entropy_poll.c b/thirdparty/mbedtls/library/entropy_poll.c
index 40f23fd2a6..69ac29e4f7 100644
--- a/thirdparty/mbedtls/library/entropy_poll.c
+++ b/thirdparty/mbedtls/library/entropy_poll.c
@@ -38,9 +38,7 @@
#if defined(MBEDTLS_HAVEGE_C)
#include "mbedtls/havege.h"
#endif
-#if defined(MBEDTLS_ENTROPY_NV_SEED)
#include "mbedtls/platform.h"
-#endif
#if !defined(MBEDTLS_NO_PLATFORM_ENTROPY)
diff --git a/thirdparty/mbedtls/library/error.c b/thirdparty/mbedtls/library/error.c
index afad38904f..8573369a32 100644
--- a/thirdparty/mbedtls/library/error.c
+++ b/thirdparty/mbedtls/library/error.c
@@ -25,11 +25,7 @@
#if defined(MBEDTLS_ERROR_C)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#define mbedtls_snprintf snprintf
-#endif
#include <stdio.h>
#include <string.h>
@@ -961,7 +957,7 @@ void mbedtls_strerror( int ret, char *buf, size_t buflen )
#else /* MBEDTLS_ERROR_C */
/*
- * Provide an non-function in case MBEDTLS_ERROR_C is not defined
+ * Provide a dummy implementation when MBEDTLS_ERROR_C is not defined
*/
void mbedtls_strerror( int ret, char *buf, size_t buflen )
{
diff --git a/thirdparty/mbedtls/library/gcm.c b/thirdparty/mbedtls/library/gcm.c
index 43a5e1bec6..d0b73379a6 100644
--- a/thirdparty/mbedtls/library/gcm.c
+++ b/thirdparty/mbedtls/library/gcm.c
@@ -32,6 +32,7 @@
#if defined(MBEDTLS_GCM_C)
#include "mbedtls/gcm.h"
+#include "mbedtls/platform.h"
#include "mbedtls/platform_util.h"
#include "mbedtls/error.h"
@@ -41,15 +42,6 @@
#include "mbedtls/aesni.h"
#endif
-#if defined(MBEDTLS_SELF_TEST) && defined(MBEDTLS_AES_C)
-#include "mbedtls/aes.h"
-#include "mbedtls/platform.h"
-#if !defined(MBEDTLS_PLATFORM_C)
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST && MBEDTLS_AES_C */
-
#if !defined(MBEDTLS_GCM_ALT)
/* Parameter validation macros */
diff --git a/thirdparty/mbedtls/library/hmac_drbg.c b/thirdparty/mbedtls/library/hmac_drbg.c
index de9706885c..69272fa73d 100644
--- a/thirdparty/mbedtls/library/hmac_drbg.c
+++ b/thirdparty/mbedtls/library/hmac_drbg.c
@@ -37,14 +37,7 @@
#include <stdio.h>
#endif
-#if defined(MBEDTLS_SELF_TEST)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_SELF_TEST */
-#endif /* MBEDTLS_PLATFORM_C */
/*
* HMAC_DRBG context initialization
diff --git a/thirdparty/mbedtls/library/md.c b/thirdparty/mbedtls/library/md.c
index a10a835634..53a84b01c6 100644
--- a/thirdparty/mbedtls/library/md.c
+++ b/thirdparty/mbedtls/library/md.c
@@ -38,13 +38,7 @@
#include "mbedtls/sha256.h"
#include "mbedtls/sha512.h"
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
#include <string.h>
diff --git a/thirdparty/mbedtls/library/md2.c b/thirdparty/mbedtls/library/md2.c
index 7264e30313..f8293a1df3 100644
--- a/thirdparty/mbedtls/library/md2.c
+++ b/thirdparty/mbedtls/library/md2.c
@@ -33,14 +33,7 @@
#include <string.h>
-#if defined(MBEDTLS_SELF_TEST)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST */
#if !defined(MBEDTLS_MD2_ALT)
diff --git a/thirdparty/mbedtls/library/md4.c b/thirdparty/mbedtls/library/md4.c
index eaa679a0a6..a412213480 100644
--- a/thirdparty/mbedtls/library/md4.c
+++ b/thirdparty/mbedtls/library/md4.c
@@ -33,14 +33,7 @@
#include <string.h>
-#if defined(MBEDTLS_SELF_TEST)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST */
#if !defined(MBEDTLS_MD4_ALT)
diff --git a/thirdparty/mbedtls/library/md5.c b/thirdparty/mbedtls/library/md5.c
index 4b53fcf367..e53bfe6306 100644
--- a/thirdparty/mbedtls/library/md5.c
+++ b/thirdparty/mbedtls/library/md5.c
@@ -32,14 +32,7 @@
#include <string.h>
-#if defined(MBEDTLS_SELF_TEST)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST */
#if !defined(MBEDTLS_MD5_ALT)
diff --git a/thirdparty/mbedtls/library/mps_reader.c b/thirdparty/mbedtls/library/mps_reader.c
index 9af5073cc9..0c30a75d05 100644
--- a/thirdparty/mbedtls/library/mps_reader.c
+++ b/thirdparty/mbedtls/library/mps_reader.c
@@ -29,11 +29,6 @@
#include <string.h>
-#if ( defined(__ARMCC_VERSION) || defined(_MSC_VER) ) && \
- !defined(inline) && !defined(__cplusplus)
-#define inline __inline
-#endif
-
#if defined(MBEDTLS_MPS_ENABLE_TRACE)
static int mbedtls_mps_trace_id = MBEDTLS_MPS_TRACE_BIT_READER;
#endif /* MBEDTLS_MPS_ENABLE_TRACE */
@@ -535,7 +530,7 @@ int mbedtls_mps_reader_reclaim( mbedtls_mps_reader *rd,
* of the accumulator. */
memmove( acc, acc + acc_backup_offset, acc_backup_len );
- /* Copy uncmmitted parts of the current fragment to the
+ /* Copy uncommitted parts of the current fragment to the
* accumulator. */
memcpy( acc + acc_backup_len,
frag + frag_backup_offset, frag_backup_len );
diff --git a/thirdparty/mbedtls/library/mps_trace.h b/thirdparty/mbedtls/library/mps_trace.h
index 7c2360118a..820a1b66c2 100644
--- a/thirdparty/mbedtls/library/mps_trace.h
+++ b/thirdparty/mbedtls/library/mps_trace.h
@@ -30,13 +30,7 @@
#include "mps_common.h"
#include "mps_trace.h"
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#define mbedtls_vsnprintf vsnprintf
-#endif /* MBEDTLS_PLATFORM_C */
#if defined(MBEDTLS_MPS_ENABLE_TRACE)
diff --git a/thirdparty/mbedtls/library/net_sockets.c b/thirdparty/mbedtls/library/net_sockets.c
index 8c765e1c8c..b2cab503f2 100644
--- a/thirdparty/mbedtls/library/net_sockets.c
+++ b/thirdparty/mbedtls/library/net_sockets.c
@@ -37,11 +37,7 @@
#error "This module only works on Unix and Windows, see MBEDTLS_NET_C in config.h"
#endif
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#endif
#include "mbedtls/net_sockets.h"
#include "mbedtls/error.h"
diff --git a/thirdparty/mbedtls/library/nist_kw.c b/thirdparty/mbedtls/library/nist_kw.c
index 1aea0b6345..495c23d06a 100644
--- a/thirdparty/mbedtls/library/nist_kw.c
+++ b/thirdparty/mbedtls/library/nist_kw.c
@@ -39,14 +39,7 @@
#include <stdint.h>
#include <string.h>
-#if defined(MBEDTLS_SELF_TEST) && defined(MBEDTLS_AES_C)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST && MBEDTLS_AES_C */
#if !defined(MBEDTLS_NIST_KW_ALT)
diff --git a/thirdparty/mbedtls/library/oid.c b/thirdparty/mbedtls/library/oid.c
index 19c8ac207c..53e5350eb3 100644
--- a/thirdparty/mbedtls/library/oid.c
+++ b/thirdparty/mbedtls/library/oid.c
@@ -30,11 +30,7 @@
#include <stdio.h>
#include <string.h>
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#define mbedtls_snprintf snprintf
-#endif
/*
* Macro to automatically add the size of #define'd OIDs
diff --git a/thirdparty/mbedtls/library/pem.c b/thirdparty/mbedtls/library/pem.c
index fcfde94799..cb1c82b3e4 100644
--- a/thirdparty/mbedtls/library/pem.c
+++ b/thirdparty/mbedtls/library/pem.c
@@ -32,13 +32,7 @@
#include <string.h>
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
#if defined(MBEDTLS_PEM_PARSE_C)
void mbedtls_pem_init( mbedtls_pem_context *ctx )
diff --git a/thirdparty/mbedtls/library/pk_wrap.c b/thirdparty/mbedtls/library/pk_wrap.c
index 107e912ace..f35abf21a4 100644
--- a/thirdparty/mbedtls/library/pk_wrap.c
+++ b/thirdparty/mbedtls/library/pk_wrap.c
@@ -50,13 +50,7 @@
#include "mbedtls/asn1.h"
#endif
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
#include <limits.h>
#include <stdint.h>
@@ -872,7 +866,7 @@ static void *pk_opaque_alloc_wrap( void )
{
void *ctx = mbedtls_calloc( 1, sizeof( psa_key_id_t ) );
- /* no _init() function to call, an calloc() already zeroized */
+ /* no _init() function to call, as calloc() already zeroized */
return( ctx );
}
diff --git a/thirdparty/mbedtls/library/pkcs11.c b/thirdparty/mbedtls/library/pkcs11.c
index 4deccf3f60..a7207cfc93 100644
--- a/thirdparty/mbedtls/library/pkcs11.c
+++ b/thirdparty/mbedtls/library/pkcs11.c
@@ -29,13 +29,7 @@
#include "mbedtls/oid.h"
#include "mbedtls/x509_crt.h"
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
#include <string.h>
diff --git a/thirdparty/mbedtls/library/pkcs5.c b/thirdparty/mbedtls/library/pkcs5.c
index 2b014d91c8..f9d01371a7 100644
--- a/thirdparty/mbedtls/library/pkcs5.c
+++ b/thirdparty/mbedtls/library/pkcs5.c
@@ -42,12 +42,7 @@
#include <string.h>
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif
#if defined(MBEDTLS_ASN1_PARSE_C)
static int pkcs5_parse_pbkdf2_params( const mbedtls_asn1_buf *params,
diff --git a/thirdparty/mbedtls/library/pkparse.c b/thirdparty/mbedtls/library/pkparse.c
index ea5c6b69cb..6170d6d012 100644
--- a/thirdparty/mbedtls/library/pkparse.c
+++ b/thirdparty/mbedtls/library/pkparse.c
@@ -48,13 +48,7 @@
#include "mbedtls/pkcs12.h"
#endif
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
/* Parameter validation macros based on platform_util.h */
#define PK_VALIDATE_RET( cond ) \
diff --git a/thirdparty/mbedtls/library/pkwrite.c b/thirdparty/mbedtls/library/pkwrite.c
index 566153dd93..c1ce0e3f02 100644
--- a/thirdparty/mbedtls/library/pkwrite.c
+++ b/thirdparty/mbedtls/library/pkwrite.c
@@ -48,13 +48,7 @@
#include "psa/crypto.h"
#include "mbedtls/psa_util.h"
#endif
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
/* Parameter validation macros based on platform_util.h */
#define PK_VALIDATE_RET( cond ) \
diff --git a/thirdparty/mbedtls/library/poly1305.c b/thirdparty/mbedtls/library/poly1305.c
index 7375a0c572..a1c5b19d8e 100644
--- a/thirdparty/mbedtls/library/poly1305.c
+++ b/thirdparty/mbedtls/library/poly1305.c
@@ -28,22 +28,10 @@
#include <string.h>
-#if defined(MBEDTLS_SELF_TEST)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST */
#if !defined(MBEDTLS_POLY1305_ALT)
-#if ( defined(__ARMCC_VERSION) || defined(_MSC_VER) ) && \
- !defined(inline) && !defined(__cplusplus)
-#define inline __inline
-#endif
-
/* Parameter validation macros */
#define POLY1305_VALIDATE_RET( cond ) \
MBEDTLS_INTERNAL_VALIDATE_RET( cond, MBEDTLS_ERR_POLY1305_BAD_INPUT_DATA )
diff --git a/thirdparty/mbedtls/library/ripemd160.c b/thirdparty/mbedtls/library/ripemd160.c
index aed7322cff..55e259e192 100644
--- a/thirdparty/mbedtls/library/ripemd160.c
+++ b/thirdparty/mbedtls/library/ripemd160.c
@@ -33,14 +33,7 @@
#include <string.h>
-#if defined(MBEDTLS_SELF_TEST)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST */
#if !defined(MBEDTLS_RIPEMD160_ALT)
diff --git a/thirdparty/mbedtls/library/rsa.c b/thirdparty/mbedtls/library/rsa.c
index d1f6ddb177..9c39fa5d91 100644
--- a/thirdparty/mbedtls/library/rsa.c
+++ b/thirdparty/mbedtls/library/rsa.c
@@ -57,14 +57,7 @@
#include <stdlib.h>
#endif
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
#if !defined(MBEDTLS_RSA_ALT)
diff --git a/thirdparty/mbedtls/library/sha1.c b/thirdparty/mbedtls/library/sha1.c
index 0a5edafaff..7f0c8757db 100644
--- a/thirdparty/mbedtls/library/sha1.c
+++ b/thirdparty/mbedtls/library/sha1.c
@@ -32,14 +32,7 @@
#include <string.h>
-#if defined(MBEDTLS_SELF_TEST)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST */
#define SHA1_VALIDATE_RET(cond) \
MBEDTLS_INTERNAL_VALIDATE_RET( cond, MBEDTLS_ERR_SHA1_BAD_INPUT_DATA )
diff --git a/thirdparty/mbedtls/library/sha256.c b/thirdparty/mbedtls/library/sha256.c
index db675efd1b..6f1306ee66 100644
--- a/thirdparty/mbedtls/library/sha256.c
+++ b/thirdparty/mbedtls/library/sha256.c
@@ -32,17 +32,7 @@
#include <string.h>
-#if defined(MBEDTLS_SELF_TEST)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#include <stdlib.h>
-#define mbedtls_printf printf
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST */
#define SHA256_VALIDATE_RET(cond) \
MBEDTLS_INTERNAL_VALIDATE_RET( cond, MBEDTLS_ERR_SHA256_BAD_INPUT_DATA )
diff --git a/thirdparty/mbedtls/library/sha512.c b/thirdparty/mbedtls/library/sha512.c
index 02a135ca92..1a6872c8aa 100644
--- a/thirdparty/mbedtls/library/sha512.c
+++ b/thirdparty/mbedtls/library/sha512.c
@@ -38,17 +38,7 @@
#include <string.h>
-#if defined(MBEDTLS_SELF_TEST)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#include <stdlib.h>
-#define mbedtls_printf printf
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST */
#define SHA512_VALIDATE_RET(cond) \
MBEDTLS_INTERNAL_VALIDATE_RET( cond, MBEDTLS_ERR_SHA512_BAD_INPUT_DATA )
@@ -428,9 +418,11 @@ int mbedtls_sha512_finish_ret( mbedtls_sha512_context *ctx,
sha512_put_uint64_be( ctx->state[4], output, 32 );
sha512_put_uint64_be( ctx->state[5], output, 40 );
+ int truncated = 0;
#if !defined(MBEDTLS_SHA512_NO_SHA384)
- if( ctx->is384 == 0 )
+ truncated = ctx->is384;
#endif
+ if( !truncated )
{
sha512_put_uint64_be( ctx->state[6], output, 48 );
sha512_put_uint64_be( ctx->state[7], output, 56 );
diff --git a/thirdparty/mbedtls/library/ssl_cache.c b/thirdparty/mbedtls/library/ssl_cache.c
index 32188cf3f6..7a600cad18 100644
--- a/thirdparty/mbedtls/library/ssl_cache.c
+++ b/thirdparty/mbedtls/library/ssl_cache.c
@@ -25,13 +25,7 @@
#if defined(MBEDTLS_SSL_CACHE_C)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
#include "mbedtls/ssl_cache.h"
#include "mbedtls/ssl_internal.h"
diff --git a/thirdparty/mbedtls/library/ssl_ciphersuites.c b/thirdparty/mbedtls/library/ssl_ciphersuites.c
index ceec77efb0..2bc8a9bba0 100644
--- a/thirdparty/mbedtls/library/ssl_ciphersuites.c
+++ b/thirdparty/mbedtls/library/ssl_ciphersuites.c
@@ -23,11 +23,7 @@
#if defined(MBEDTLS_SSL_TLS_C)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#endif
#include "mbedtls/ssl_ciphersuites.h"
#include "mbedtls/ssl.h"
diff --git a/thirdparty/mbedtls/library/ssl_cli.c b/thirdparty/mbedtls/library/ssl_cli.c
index 72351c9757..b40ddb70b4 100644
--- a/thirdparty/mbedtls/library/ssl_cli.c
+++ b/thirdparty/mbedtls/library/ssl_cli.c
@@ -21,13 +21,7 @@
#if defined(MBEDTLS_SSL_CLI_C)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
#include "mbedtls/ssl.h"
#include "mbedtls/ssl_internal.h"
@@ -174,7 +168,7 @@ static int ssl_write_renegotiation_ext( mbedtls_ssl_context *ssl,
*olen = 0;
- /* We're always including an TLS_EMPTY_RENEGOTIATION_INFO_SCSV in the
+ /* We're always including a TLS_EMPTY_RENEGOTIATION_INFO_SCSV in the
* initial ClientHello, in which case also adding the renegotiation
* info extension is NOT RECOMMENDED as per RFC 5746 Section 3.4. */
if( ssl->renego_status != MBEDTLS_SSL_RENEGOTIATION_IN_PROGRESS )
@@ -1004,9 +998,12 @@ static int ssl_write_client_hello( mbedtls_ssl_context *ssl )
return( MBEDTLS_ERR_SSL_NO_RNG );
}
+ int renegotiating = 0;
#if defined(MBEDTLS_SSL_RENEGOTIATION)
- if( ssl->renego_status == MBEDTLS_SSL_INITIAL_HANDSHAKE )
+ if( ssl->renego_status != MBEDTLS_SSL_INITIAL_HANDSHAKE )
+ renegotiating = 1;
#endif
+ if( !renegotiating )
{
ssl->major_ver = ssl->conf->min_major_ver;
ssl->minor_ver = ssl->conf->min_minor_ver;
@@ -1092,9 +1089,7 @@ static int ssl_write_client_hello( mbedtls_ssl_context *ssl )
* RFC 5077 section 3.4: "When presenting a ticket, the client MAY
* generate and include a Session ID in the TLS ClientHello."
*/
-#if defined(MBEDTLS_SSL_RENEGOTIATION)
- if( ssl->renego_status == MBEDTLS_SSL_INITIAL_HANDSHAKE )
-#endif
+ if( !renegotiating )
{
if( ssl->session_negotiate->ticket != NULL &&
ssl->session_negotiate->ticket_len != 0 )
@@ -1209,9 +1204,7 @@ static int ssl_write_client_hello( mbedtls_ssl_context *ssl )
/*
* Add TLS_EMPTY_RENEGOTIATION_INFO_SCSV
*/
-#if defined(MBEDTLS_SSL_RENEGOTIATION)
- if( ssl->renego_status == MBEDTLS_SSL_INITIAL_HANDSHAKE )
-#endif
+ if( !renegotiating )
{
MBEDTLS_SSL_DEBUG_MSG( 3, ( "adding EMPTY_RENEGOTIATION_INFO_SCSV" ) );
MBEDTLS_SSL_CHK_BUF_PTR( p, end, 2 );
@@ -2062,6 +2055,30 @@ static int ssl_parse_hello_verify_request( mbedtls_ssl_context *ssl )
}
#endif /* MBEDTLS_SSL_PROTO_DTLS */
+static int is_compression_bad( mbedtls_ssl_context *ssl, unsigned char comp )
+{
+ int bad_comp = 0;
+
+ /* Suppress warnings in some configurations */
+ (void) ssl;
+#if defined(MBEDTLS_ZLIB_SUPPORT)
+ /* See comments in ssl_write_client_hello() */
+#if defined(MBEDTLS_SSL_PROTO_DTLS)
+ if( ssl->conf->transport == MBEDTLS_SSL_TRANSPORT_DATAGRAM &&
+ comp != MBEDTLS_SSL_COMPRESS_NULL )
+ bad_comp = 1;
+#endif
+
+ if( comp != MBEDTLS_SSL_COMPRESS_NULL &&
+ comp != MBEDTLS_SSL_COMPRESS_DEFLATE )
+ bad_comp = 1;
+#else /* MBEDTLS_ZLIB_SUPPORT */
+ if( comp != MBEDTLS_SSL_COMPRESS_NULL )
+ bad_comp = 1;
+#endif/* MBEDTLS_ZLIB_SUPPORT */
+ return bad_comp;
+}
+
MBEDTLS_CHECK_RETURN_CRITICAL
static int ssl_parse_server_hello( mbedtls_ssl_context *ssl )
{
@@ -2070,9 +2087,6 @@ static int ssl_parse_server_hello( mbedtls_ssl_context *ssl )
size_t ext_len;
unsigned char *buf, *ext;
unsigned char comp;
-#if defined(MBEDTLS_ZLIB_SUPPORT)
- int accept_comp;
-#endif
#if defined(MBEDTLS_SSL_RENEGOTIATION)
int renegotiation_info_seen = 0;
#endif
@@ -2241,20 +2255,7 @@ static int ssl_parse_server_hello( mbedtls_ssl_context *ssl )
*/
comp = buf[37 + n];
-#if defined(MBEDTLS_ZLIB_SUPPORT)
- /* See comments in ssl_write_client_hello() */
-#if defined(MBEDTLS_SSL_PROTO_DTLS)
- if( ssl->conf->transport == MBEDTLS_SSL_TRANSPORT_DATAGRAM )
- accept_comp = 0;
- else
-#endif
- accept_comp = 1;
-
- if( comp != MBEDTLS_SSL_COMPRESS_NULL &&
- ( comp != MBEDTLS_SSL_COMPRESS_DEFLATE || accept_comp == 0 ) )
-#else /* MBEDTLS_ZLIB_SUPPORT */
- if( comp != MBEDTLS_SSL_COMPRESS_NULL )
-#endif/* MBEDTLS_ZLIB_SUPPORT */
+ if( is_compression_bad( ssl, comp ) )
{
MBEDTLS_SSL_DEBUG_MSG( 1,
( "server hello, bad compression: %d", comp ) );
@@ -2687,7 +2688,7 @@ static int ssl_check_server_ecdh_params( const mbedtls_ssl_context *ssl )
grp_id = ssl->handshake->ecdh_ctx.grp.id;
#else
grp_id = ssl->handshake->ecdh_ctx.grp_id;
-#endif
+#endif /* MBEDTLS_ECDH_LEGACY_CONTEXT */
curve_info = mbedtls_ecp_curve_info_from_grp_id( grp_id );
if( curve_info == NULL )
@@ -2700,11 +2701,12 @@ static int ssl_check_server_ecdh_params( const mbedtls_ssl_context *ssl )
#if defined(MBEDTLS_ECP_C)
if( mbedtls_ssl_check_curve( ssl, grp_id ) != 0 )
+ return( -1 );
#else
if( ssl->handshake->ecdh_ctx.grp.nbits < 163 ||
ssl->handshake->ecdh_ctx.grp.nbits > 521 )
-#endif
return( -1 );
+#endif /* MBEDTLS_ECP_C */
MBEDTLS_SSL_DEBUG_ECDH( 3, &ssl->handshake->ecdh_ctx,
MBEDTLS_DEBUG_ECDH_QP );
@@ -2858,8 +2860,8 @@ static int ssl_parse_server_psk_hint( mbedtls_ssl_context *ssl,
}
/*
- * Note: we currently ignore the PKS identity hint, as we only allow one
- * PSK to be provisionned on the client. This could be changed later if
+ * Note: we currently ignore the PSK identity hint, as we only allow one
+ * PSK to be provisioned on the client. This could be changed later if
* someone needs that feature.
*/
*p += len;
@@ -3452,23 +3454,23 @@ start_processing:
#if defined(MBEDTLS_SSL_ECP_RESTARTABLE_ENABLED)
if( ssl->handshake->ecrs_enabled )
rs_ctx = &ssl->handshake->ecrs_ctx.pk;
-#endif
+#endif /* MBEDTLS_SSL_ECP_RESTARTABLE_ENABLED */
if( ( ret = mbedtls_pk_verify_restartable( peer_pk,
md_alg, hash, hashlen, p, sig_len, rs_ctx ) ) != 0 )
{
#if defined(MBEDTLS_SSL_ECP_RESTARTABLE_ENABLED)
- if( ret != MBEDTLS_ERR_ECP_IN_PROGRESS )
-#endif
- mbedtls_ssl_send_alert_message(
- ssl,
- MBEDTLS_SSL_ALERT_LEVEL_FATAL,
- MBEDTLS_SSL_ALERT_MSG_DECRYPT_ERROR );
- MBEDTLS_SSL_DEBUG_RET( 1, "mbedtls_pk_verify", ret );
-#if defined(MBEDTLS_SSL_ECP_RESTARTABLE_ENABLED)
if( ret == MBEDTLS_ERR_ECP_IN_PROGRESS )
- ret = MBEDTLS_ERR_SSL_CRYPTO_IN_PROGRESS;
-#endif
+ {
+ MBEDTLS_SSL_DEBUG_RET( 1, "mbedtls_pk_verify", ret );
+ return( MBEDTLS_ERR_SSL_CRYPTO_IN_PROGRESS );
+ }
+#endif /* MBEDTLS_SSL_ECP_RESTARTABLE_ENABLED */
+ mbedtls_ssl_send_alert_message(
+ ssl,
+ MBEDTLS_SSL_ALERT_LEVEL_FATAL,
+ MBEDTLS_SSL_ALERT_MSG_DECRYPT_ERROR );
+ MBEDTLS_SSL_DEBUG_RET( 1, "mbedtls_pk_verify", ret );
return( ret );
}
diff --git a/thirdparty/mbedtls/library/ssl_cookie.c b/thirdparty/mbedtls/library/ssl_cookie.c
index 3781796b72..f12f28e849 100644
--- a/thirdparty/mbedtls/library/ssl_cookie.c
+++ b/thirdparty/mbedtls/library/ssl_cookie.c
@@ -25,12 +25,7 @@
#if defined(MBEDTLS_SSL_COOKIE_C)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
#include "mbedtls/ssl_cookie.h"
#include "mbedtls/ssl_internal.h"
diff --git a/thirdparty/mbedtls/library/ssl_msg.c b/thirdparty/mbedtls/library/ssl_msg.c
index e47c538888..d7bebe04d6 100644
--- a/thirdparty/mbedtls/library/ssl_msg.c
+++ b/thirdparty/mbedtls/library/ssl_msg.c
@@ -30,13 +30,7 @@
#if defined(MBEDTLS_SSL_TLS_C)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
#include "mbedtls/ssl.h"
#include "mbedtls/ssl_internal.h"
@@ -441,9 +435,12 @@ static void ssl_extract_add_data_from_record( unsigned char* add_data,
unsigned char *cur = add_data;
+ int is_tls13 = 0;
#if defined(MBEDTLS_SSL_PROTO_TLS1_3_EXPERIMENTAL)
- if( minor_ver != MBEDTLS_SSL_MINOR_VERSION_4 )
+ if( minor_ver == MBEDTLS_SSL_MINOR_VERSION_4 )
+ is_tls13 = 1;
#endif /* MBEDTLS_SSL_PROTO_TLS1_3_EXPERIMENTAL */
+ if( !is_tls13 )
{
((void) minor_ver);
memcpy( cur, rec->ctr, sizeof( rec->ctr ) );
@@ -1887,8 +1884,7 @@ int mbedtls_ssl_fetch_input( mbedtls_ssl_context *ssl, size_t nb_want )
if( ssl->f_recv == NULL && ssl->f_recv_timeout == NULL )
{
- MBEDTLS_SSL_DEBUG_MSG( 1, ( "Bad usage of mbedtls_ssl_set_bio() "
- "or mbedtls_ssl_set_bio()" ) );
+ MBEDTLS_SSL_DEBUG_MSG( 1, ( "Bad usage of mbedtls_ssl_set_bio() " ) );
return( MBEDTLS_ERR_SSL_BAD_INPUT_DATA );
}
@@ -2103,8 +2099,7 @@ int mbedtls_ssl_flush_output( mbedtls_ssl_context *ssl )
if( ssl->f_send == NULL )
{
- MBEDTLS_SSL_DEBUG_MSG( 1, ( "Bad usage of mbedtls_ssl_set_bio() "
- "or mbedtls_ssl_set_bio()" ) );
+ MBEDTLS_SSL_DEBUG_MSG( 1, ( "Bad usage of mbedtls_ssl_set_bio() " ) );
return( MBEDTLS_ERR_SSL_BAD_INPUT_DATA );
}
@@ -3950,8 +3945,8 @@ int mbedtls_ssl_read_record( mbedtls_ssl_context *ssl,
if( ssl_record_is_in_progress( ssl ) == 0 )
{
+ int dtls_have_buffered = 0;
#if defined(MBEDTLS_SSL_PROTO_DTLS)
- int have_buffered = 0;
/* We only check for buffered messages if the
* current datagram is fully consumed. */
@@ -3959,11 +3954,11 @@ int mbedtls_ssl_read_record( mbedtls_ssl_context *ssl,
ssl_next_record_is_in_datagram( ssl ) == 0 )
{
if( ssl_load_buffered_message( ssl ) == 0 )
- have_buffered = 1;
+ dtls_have_buffered = 1;
}
- if( have_buffered == 0 )
#endif /* MBEDTLS_SSL_PROTO_DTLS */
+ if( dtls_have_buffered == 0 )
{
ret = ssl_get_next_record( ssl );
if( ret == MBEDTLS_ERR_SSL_CONTINUE_PROCESSING )
@@ -4037,7 +4032,7 @@ static int ssl_load_buffered_message( mbedtls_ssl_context *ssl )
if( hs == NULL )
return( -1 );
- MBEDTLS_SSL_DEBUG_MSG( 2, ( "=> ssl_load_buffered_messsage" ) );
+ MBEDTLS_SSL_DEBUG_MSG( 2, ( "=> ssl_load_buffered_message" ) );
if( ssl->state == MBEDTLS_SSL_CLIENT_CHANGE_CIPHER_SPEC ||
ssl->state == MBEDTLS_SSL_SERVER_CHANGE_CIPHER_SPEC )
diff --git a/thirdparty/mbedtls/library/ssl_srv.c b/thirdparty/mbedtls/library/ssl_srv.c
index 2efb13cc33..0563c0b590 100644
--- a/thirdparty/mbedtls/library/ssl_srv.c
+++ b/thirdparty/mbedtls/library/ssl_srv.c
@@ -21,13 +21,7 @@
#if defined(MBEDTLS_SSL_SRV_C)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
#include "mbedtls/ssl.h"
#include "mbedtls/ssl_internal.h"
@@ -1460,6 +1454,7 @@ static int ssl_parse_client_hello( mbedtls_ssl_context *ssl )
MBEDTLS_SSL_DEBUG_MSG( 2, ( "=> parse client hello" ) );
+ int renegotiating = 0;
#if defined(MBEDTLS_SSL_DTLS_ANTI_REPLAY)
read_record_header:
#endif
@@ -1469,8 +1464,10 @@ read_record_header:
* ClientHello, which doesn't use the same record layer format.
*/
#if defined(MBEDTLS_SSL_RENEGOTIATION)
- if( ssl->renego_status == MBEDTLS_SSL_INITIAL_HANDSHAKE )
+ if( ssl->renego_status != MBEDTLS_SSL_INITIAL_HANDSHAKE )
+ renegotiating = 1;
#endif
+ if( !renegotiating )
{
if( ( ret = mbedtls_ssl_fetch_input( ssl, 5 ) ) != 0 )
{
@@ -1483,9 +1480,12 @@ read_record_header:
buf = ssl->in_hdr;
#if defined(MBEDTLS_SSL_SRV_SUPPORT_SSLV2_CLIENT_HELLO)
+ int is_dtls = 0;
#if defined(MBEDTLS_SSL_PROTO_DTLS)
- if( ssl->conf->transport == MBEDTLS_SSL_TRANSPORT_STREAM )
+ if( ssl->conf->transport == MBEDTLS_SSL_TRANSPORT_DATAGRAM )
+ is_dtls = 1;
#endif
+ if( !is_dtls )
if( ( buf[0] & 0x80 ) != 0 )
return( ssl_parse_client_hello_v2( ssl ) );
#endif
@@ -3903,8 +3903,14 @@ static int ssl_decrypt_encrypted_pms( mbedtls_ssl_context *ssl,
size_t peer_pmssize )
{
int ret = MBEDTLS_ERR_ERROR_CORRUPTION_DETECTED;
+
+ mbedtls_x509_crt *own_cert = mbedtls_ssl_own_cert( ssl );
+ if( own_cert == NULL ) {
+ MBEDTLS_SSL_DEBUG_MSG( 1, ( "got no local certificate" ) );
+ return( MBEDTLS_ERR_SSL_NO_CLIENT_CERTIFICATE );
+ }
+ mbedtls_pk_context *public_key = &own_cert->pk;
mbedtls_pk_context *private_key = mbedtls_ssl_own_key( ssl );
- mbedtls_pk_context *public_key = &mbedtls_ssl_own_cert( ssl )->pk;
size_t len = mbedtls_pk_get_len( public_key );
#if defined(MBEDTLS_SSL_ASYNC_PRIVATE)
diff --git a/thirdparty/mbedtls/library/ssl_ticket.c b/thirdparty/mbedtls/library/ssl_ticket.c
index e0126cc9d1..8a57789f10 100644
--- a/thirdparty/mbedtls/library/ssl_ticket.c
+++ b/thirdparty/mbedtls/library/ssl_ticket.c
@@ -21,13 +21,7 @@
#if defined(MBEDTLS_SSL_TICKET_C)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
#include "mbedtls/ssl_internal.h"
#include "mbedtls/ssl_ticket.h"
@@ -152,27 +146,45 @@ int mbedtls_ssl_ticket_setup( mbedtls_ssl_ticket_context *ctx,
if( cipher_info->key_bitlen > 8 * MAX_KEY_BYTES )
return( MBEDTLS_ERR_SSL_BAD_INPUT_DATA );
+ int do_mbedtls_cipher_setup = 1;
#if defined(MBEDTLS_USE_PSA_CRYPTO)
ret = mbedtls_cipher_setup_psa( &ctx->keys[0].ctx,
cipher_info, TICKET_AUTH_TAG_BYTES );
- if( ret != 0 && ret != MBEDTLS_ERR_CIPHER_FEATURE_UNAVAILABLE )
- return( ret );
- /* We don't yet expect to support all ciphers through PSA,
- * so allow fallback to ordinary mbedtls_cipher_setup(). */
- if( ret == MBEDTLS_ERR_CIPHER_FEATURE_UNAVAILABLE )
+
+ switch( ret )
+ {
+ case 0:
+ do_mbedtls_cipher_setup = 0;
+ break;
+ case MBEDTLS_ERR_CIPHER_FEATURE_UNAVAILABLE:
+ /* We don't yet expect to support all ciphers through PSA,
+ * so allow fallback to ordinary mbedtls_cipher_setup(). */
+ do_mbedtls_cipher_setup = 1;
+ break;
+ default:
+ return( ret );
+ }
#endif /* MBEDTLS_USE_PSA_CRYPTO */
- if( ( ret = mbedtls_cipher_setup( &ctx->keys[0].ctx, cipher_info ) ) != 0 )
- return( ret );
+ if( do_mbedtls_cipher_setup )
+ if( ( ret = mbedtls_cipher_setup( &ctx->keys[0].ctx, cipher_info ) )
+ != 0 )
+ return( ret );
+ do_mbedtls_cipher_setup = 1;
#if defined(MBEDTLS_USE_PSA_CRYPTO)
+ do_mbedtls_cipher_setup = 0;
+
ret = mbedtls_cipher_setup_psa( &ctx->keys[1].ctx,
cipher_info, TICKET_AUTH_TAG_BYTES );
if( ret != 0 && ret != MBEDTLS_ERR_CIPHER_FEATURE_UNAVAILABLE )
return( ret );
if( ret == MBEDTLS_ERR_CIPHER_FEATURE_UNAVAILABLE )
+ do_mbedtls_cipher_setup = 1;
#endif /* MBEDTLS_USE_PSA_CRYPTO */
- if( ( ret = mbedtls_cipher_setup( &ctx->keys[1].ctx, cipher_info ) ) != 0 )
- return( ret );
+ if( do_mbedtls_cipher_setup )
+ if( ( ret = mbedtls_cipher_setup( &ctx->keys[1].ctx, cipher_info ) )
+ != 0 )
+ return( ret );
if( ( ret = ssl_ticket_gen_key( ctx, 0 ) ) != 0 ||
( ret = ssl_ticket_gen_key( ctx, 1 ) ) != 0 )
diff --git a/thirdparty/mbedtls/library/ssl_tls.c b/thirdparty/mbedtls/library/ssl_tls.c
index 7badec51ae..70196a4861 100644
--- a/thirdparty/mbedtls/library/ssl_tls.c
+++ b/thirdparty/mbedtls/library/ssl_tls.c
@@ -29,13 +29,7 @@
#if defined(MBEDTLS_SSL_TLS_C)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
#include "mbedtls/ssl.h"
#include "mbedtls/ssl_internal.h"
@@ -766,7 +760,9 @@ static int tls_prf_generic( mbedtls_md_type_t md_type,
exit:
mbedtls_md_free( &md_ctx );
- mbedtls_platform_zeroize( tmp, tmp_len );
+ if ( tmp != NULL )
+ mbedtls_platform_zeroize( tmp, tmp_len );
+
mbedtls_platform_zeroize( h_i, sizeof( h_i ) );
mbedtls_free( tmp );
@@ -985,6 +981,7 @@ static int ssl_populate_transform( mbedtls_ssl_transform *transform,
#if defined(MBEDTLS_USE_PSA_CRYPTO)
int psa_fallthrough;
#endif /* MBEDTLS_USE_PSA_CRYPTO */
+ int do_mbedtls_cipher_setup;
unsigned char keyblk[256];
unsigned char *key1;
unsigned char *key2;
@@ -1363,6 +1360,7 @@ static int ssl_populate_transform( mbedtls_ssl_transform *transform,
}
#endif
+ do_mbedtls_cipher_setup = 1;
#if defined(MBEDTLS_USE_PSA_CRYPTO)
/* Only use PSA-based ciphers for TLS-1.2.
@@ -1398,15 +1396,18 @@ static int ssl_populate_transform( mbedtls_ssl_transform *transform,
psa_fallthrough = 1;
#endif /* MBEDTLS_SSL_PROTO_TLS1_2 */
- if( psa_fallthrough == 1 )
+ if( psa_fallthrough == 0 )
+ do_mbedtls_cipher_setup = 0;
#endif /* MBEDTLS_USE_PSA_CRYPTO */
- if( ( ret = mbedtls_cipher_setup( &transform->cipher_ctx_enc,
- cipher_info ) ) != 0 )
+ if( do_mbedtls_cipher_setup &&
+ ( ret = mbedtls_cipher_setup( &transform->cipher_ctx_enc,
+ cipher_info ) ) != 0 )
{
MBEDTLS_SSL_DEBUG_RET( 1, "mbedtls_cipher_setup", ret );
goto end;
}
+ do_mbedtls_cipher_setup = 1;
#if defined(MBEDTLS_USE_PSA_CRYPTO)
/* Only use PSA-based ciphers for TLS-1.2.
* That's relevant at least for TLS-1.0, where
@@ -1441,10 +1442,12 @@ static int ssl_populate_transform( mbedtls_ssl_transform *transform,
psa_fallthrough = 1;
#endif /* MBEDTLS_SSL_PROTO_TLS1_2 */
- if( psa_fallthrough == 1 )
+ if( psa_fallthrough == 0 )
+ do_mbedtls_cipher_setup = 0;
#endif /* MBEDTLS_USE_PSA_CRYPTO */
- if( ( ret = mbedtls_cipher_setup( &transform->cipher_ctx_dec,
- cipher_info ) ) != 0 )
+ if( do_mbedtls_cipher_setup &&
+ ( ret = mbedtls_cipher_setup( &transform->cipher_ctx_dec,
+ cipher_info ) ) != 0 )
{
MBEDTLS_SSL_DEBUG_RET( 1, "mbedtls_cipher_setup", ret );
goto end;
@@ -3411,7 +3414,7 @@ static void ssl_calc_finished_tls_sha384(
sha512.state, sizeof( sha512.state ) );
#endif
/* mbedtls_sha512_finish_ret's output parameter is declared as a
- * 64-byte buffer, but sice we're using SHA-384, we know that the
+ * 64-byte buffer, but since we're using SHA-384, we know that the
* output fits in 48 bytes. This is correct C, but GCC 11.1 warns
* about it.
*/
@@ -4089,9 +4092,12 @@ int mbedtls_ssl_session_reset_int( mbedtls_ssl_context *ssl, int partial )
memset( ssl->out_buf, 0, out_buf_len );
+ int clear_in_buf = 1;
#if defined(MBEDTLS_SSL_DTLS_CLIENT_PORT_REUSE) && defined(MBEDTLS_SSL_SRV_C)
- if( partial == 0 )
+ if( partial != 0 )
+ clear_in_buf = 0;
#endif /* MBEDTLS_SSL_DTLS_CLIENT_PORT_REUSE && MBEDTLS_SSL_SRV_C */
+ if( clear_in_buf )
{
ssl->in_left = 0;
memset( ssl->in_buf, 0, in_buf_len );
@@ -4128,9 +4134,12 @@ int mbedtls_ssl_session_reset_int( mbedtls_ssl_context *ssl, int partial )
#endif
#if defined(MBEDTLS_SSL_DTLS_HELLO_VERIFY) && defined(MBEDTLS_SSL_SRV_C)
+ int free_cli_id = 1;
#if defined(MBEDTLS_SSL_DTLS_CLIENT_PORT_REUSE)
- if( partial == 0 )
+ if( partial != 0 )
+ free_cli_id = 0;
#endif
+ if( free_cli_id )
{
mbedtls_free( ssl->cli_id );
ssl->cli_id = NULL;
@@ -4471,7 +4480,7 @@ static void ssl_conf_remove_psk( mbedtls_ssl_config *conf )
conf->psk_opaque = MBEDTLS_SVC_KEY_ID_INIT;
}
/* This and the following branch should never
- * be taken simultaenously as we maintain the
+ * be taken simultaneously as we maintain the
* invariant that raw and opaque PSKs are never
* configured simultaneously. As a safeguard,
* though, `else` is omitted here. */
@@ -6335,7 +6344,7 @@ int mbedtls_ssl_context_save( mbedtls_ssl_context *ssl,
MBEDTLS_SSL_DEBUG_MSG( 1, ( "There is pending outgoing data" ) );
return( MBEDTLS_ERR_SSL_BAD_INPUT_DATA );
}
- /* Protocol must be DLTS, not TLS */
+ /* Protocol must be DTLS, not TLS */
if( ssl->conf->transport != MBEDTLS_SSL_TRANSPORT_DATAGRAM )
{
MBEDTLS_SSL_DEBUG_MSG( 1, ( "Only DTLS is supported" ) );
@@ -6510,24 +6519,41 @@ int mbedtls_ssl_context_save( mbedtls_ssl_context *ssl,
* Helper to get TLS 1.2 PRF from ciphersuite
* (Duplicates bits of logic from ssl_set_handshake_prfs().)
*/
+#if defined(MBEDTLS_SHA256_C) || \
+ (defined(MBEDTLS_SHA512_C) && !defined(MBEDTLS_SHA512_NO_SHA384))
typedef int (*tls_prf_fn)( const unsigned char *secret, size_t slen,
const char *label,
const unsigned char *random, size_t rlen,
unsigned char *dstbuf, size_t dlen );
static tls_prf_fn ssl_tls12prf_from_cs( int ciphersuite_id )
{
-#if defined(MBEDTLS_SHA512_C) && !defined(MBEDTLS_SHA512_NO_SHA384)
const mbedtls_ssl_ciphersuite_t * const ciphersuite_info =
mbedtls_ssl_ciphersuite_from_id( ciphersuite_id );
+ if( ciphersuite_info == NULL )
+ return( NULL );
+
+#if defined(MBEDTLS_SHA512_C) && !defined(MBEDTLS_SHA512_NO_SHA384)
if( ciphersuite_info->mac == MBEDTLS_MD_SHA384 )
return( tls_prf_sha384 );
-#else
- (void) ciphersuite_id;
+ else
+#endif
+#if defined(MBEDTLS_SHA256_C)
+ {
+ if( ciphersuite_info->mac == MBEDTLS_MD_SHA256 )
+ return( tls_prf_sha256 );
+ }
#endif
- return( tls_prf_sha256 );
+#if !defined(MBEDTLS_SHA256_C) && \
+ (!defined(MBEDTLS_SHA512_C) || defined(MBEDTLS_SHA512_NO_SHA384))
+ (void) ciphersuite_info;
+#endif
+ return( NULL );
}
+#endif /* MBEDTLS_SHA256_C ||
+ (MBEDTLS_SHA512_C && !MBEDTLS_SHA512_NO_SHA384) */
+
/*
* Deserialize context, see mbedtls_ssl_context_save() for format.
*
@@ -6543,6 +6569,7 @@ static int ssl_context_load( mbedtls_ssl_context *ssl,
const unsigned char * const end = buf + len;
size_t session_len;
int ret = MBEDTLS_ERR_ERROR_CORRUPTION_DETECTED;
+ tls_prf_fn prf_func = NULL;
/*
* The context should have been freshly setup or reset.
@@ -6630,6 +6657,10 @@ static int ssl_context_load( mbedtls_ssl_context *ssl,
ssl->transform_out = ssl->transform;
ssl->transform_negotiate = NULL;
+ prf_func = ssl_tls12prf_from_cs( ssl->session->ciphersuite );
+ if( prf_func == NULL )
+ return( MBEDTLS_ERR_SSL_BAD_INPUT_DATA );
+
/* Read random bytes and populate structure */
if( (size_t)( end - p ) < sizeof( ssl->transform->randbytes ) )
return( MBEDTLS_ERR_SSL_BAD_INPUT_DATA );
@@ -6648,7 +6679,7 @@ static int ssl_context_load( mbedtls_ssl_context *ssl,
#if defined(MBEDTLS_ZLIB_SUPPORT)
ssl->session->compression,
#endif
- ssl_tls12prf_from_cs( ssl->session->ciphersuite ),
+ prf_func,
p, /* currently pointing to randbytes */
MBEDTLS_SSL_MINOR_VERSION_3, /* (D)TLS 1.2 is forced */
ssl->conf->endpoint,
@@ -6921,7 +6952,7 @@ void mbedtls_ssl_free( mbedtls_ssl_context *ssl )
}
/*
- * Initialze mbedtls_ssl_config
+ * Initialize mbedtls_ssl_config
*/
void mbedtls_ssl_config_init( mbedtls_ssl_config *conf )
{
diff --git a/thirdparty/mbedtls/library/ssl_tls13_keys.c b/thirdparty/mbedtls/library/ssl_tls13_keys.c
index 3de6f03fb8..cc68773d3a 100644
--- a/thirdparty/mbedtls/library/ssl_tls13_keys.c
+++ b/thirdparty/mbedtls/library/ssl_tls13_keys.c
@@ -24,6 +24,7 @@
#include "mbedtls/hkdf.h"
#include "mbedtls/ssl_internal.h"
#include "ssl_tls13_keys.h"
+#include "psa/crypto_sizes.h"
#include <stdint.h>
#include <string.h>
@@ -31,6 +32,9 @@
#define MBEDTLS_SSL_TLS1_3_LABEL( name, string ) \
.name = string,
+#define TLS1_3_EVOLVE_INPUT_SIZE ( PSA_HASH_MAX_SIZE > PSA_RAW_KEY_AGREEMENT_OUTPUT_MAX_SIZE ) ? \
+ PSA_HASH_MAX_SIZE : PSA_RAW_KEY_AGREEMENT_OUTPUT_MAX_SIZE
+
struct mbedtls_ssl_tls1_3_labels_struct const mbedtls_ssl_tls1_3_labels =
{
/* This seems to work in C, despite the string literal being one
@@ -292,8 +296,8 @@ int mbedtls_ssl_tls1_3_evolve_secret(
{
int ret = MBEDTLS_ERR_SSL_INTERNAL_ERROR;
size_t hlen, ilen;
- unsigned char tmp_secret[ MBEDTLS_MD_MAX_SIZE ] = { 0 };
- unsigned char tmp_input [ MBEDTLS_MD_MAX_SIZE ] = { 0 };
+ unsigned char tmp_secret[ PSA_MAC_MAX_SIZE ] = { 0 };
+ unsigned char tmp_input [ TLS1_3_EVOLVE_INPUT_SIZE ] = { 0 };
const mbedtls_md_info_t *md;
md = mbedtls_md_info_from_type( hash_alg );
diff --git a/thirdparty/mbedtls/library/timing.c b/thirdparty/mbedtls/library/timing.c
index 78bfa10cfb..151292e325 100644
--- a/thirdparty/mbedtls/library/timing.c
+++ b/thirdparty/mbedtls/library/timing.c
@@ -19,12 +19,7 @@
#include "common.h"
-#if defined(MBEDTLS_SELF_TEST) && defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif
#if defined(MBEDTLS_TIMING_C)
@@ -269,7 +264,7 @@ static void TimerProc( void *TimerContext )
Sleep( alarmMs );
mbedtls_timing_alarmed = 1;
/* _endthread will be called implicitly on return
- * That ensures execution of thread funcition's epilogue */
+ * That ensures execution of thread function's epilogue */
}
void mbedtls_set_alarm( int seconds )
diff --git a/thirdparty/mbedtls/library/x509.c b/thirdparty/mbedtls/library/x509.c
index 3997ebd1f3..54c8666d23 100644
--- a/thirdparty/mbedtls/library/x509.c
+++ b/thirdparty/mbedtls/library/x509.c
@@ -43,16 +43,7 @@
#include "mbedtls/pem.h"
#endif
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#include <stdlib.h>
-#define mbedtls_free free
-#define mbedtls_calloc calloc
-#define mbedtls_printf printf
-#define mbedtls_snprintf snprintf
-#endif
#if defined(MBEDTLS_HAVE_TIME)
#include "mbedtls/platform_time.h"
@@ -198,7 +189,7 @@ static int x509_get_hash_alg( const mbedtls_x509_buf *alg, mbedtls_md_type_t *md
*
* RFC 4055 (which defines use of RSASSA-PSS in PKIX) states that the value
* of trailerField MUST be 1, and PKCS#1 v2.2 doesn't even define any other
- * option. Enfore this at parsing time.
+ * option. Enforce this at parsing time.
*/
int mbedtls_x509_get_rsassa_pss_params( const mbedtls_x509_buf *params,
mbedtls_md_type_t *md_alg, mbedtls_md_type_t *mgf_md,
@@ -424,6 +415,11 @@ static int x509_get_attr_type_value( unsigned char **p,
* For the general case we still use a flat list, but we mark elements of the
* same set so that they are "merged" together in the functions that consume
* this list, eg mbedtls_x509_dn_gets().
+ *
+ * On success, this function may allocate a linked list starting at cur->next
+ * that must later be free'd by the caller using mbedtls_free(). In error
+ * cases, this function frees all allocated memory internally and the caller
+ * has no freeing responsibilities.
*/
int mbedtls_x509_get_name( unsigned char **p, const unsigned char *end,
mbedtls_x509_name *cur )
@@ -431,6 +427,8 @@ int mbedtls_x509_get_name( unsigned char **p, const unsigned char *end,
int ret = MBEDTLS_ERR_ERROR_CORRUPTION_DETECTED;
size_t set_len;
const unsigned char *end_set;
+ mbedtls_x509_name *head = cur;
+ mbedtls_x509_name *prev, *allocated;
/* don't use recursion, we'd risk stack overflow if not optimized */
while( 1 )
@@ -440,14 +438,17 @@ int mbedtls_x509_get_name( unsigned char **p, const unsigned char *end,
*/
if( ( ret = mbedtls_asn1_get_tag( p, end, &set_len,
MBEDTLS_ASN1_CONSTRUCTED | MBEDTLS_ASN1_SET ) ) != 0 )
- return( MBEDTLS_ERROR_ADD( MBEDTLS_ERR_X509_INVALID_NAME, ret ) );
+ {
+ ret = MBEDTLS_ERROR_ADD( MBEDTLS_ERR_X509_INVALID_NAME, ret );
+ goto error;
+ }
end_set = *p + set_len;
while( 1 )
{
if( ( ret = x509_get_attr_type_value( p, end_set, cur ) ) != 0 )
- return( ret );
+ goto error;
if( *p == end_set )
break;
@@ -458,7 +459,10 @@ int mbedtls_x509_get_name( unsigned char **p, const unsigned char *end,
cur->next = mbedtls_calloc( 1, sizeof( mbedtls_x509_name ) );
if( cur->next == NULL )
- return( MBEDTLS_ERR_X509_ALLOC_FAILED );
+ {
+ ret = MBEDTLS_ERR_X509_ALLOC_FAILED;
+ goto error;
+ }
cur = cur->next;
}
@@ -472,10 +476,30 @@ int mbedtls_x509_get_name( unsigned char **p, const unsigned char *end,
cur->next = mbedtls_calloc( 1, sizeof( mbedtls_x509_name ) );
if( cur->next == NULL )
- return( MBEDTLS_ERR_X509_ALLOC_FAILED );
+ {
+ ret = MBEDTLS_ERR_X509_ALLOC_FAILED;
+ goto error;
+ }
cur = cur->next;
}
+
+error:
+ /* Skip the first element as we did not allocate it */
+ allocated = head->next;
+
+ while( allocated != NULL )
+ {
+ prev = allocated;
+ allocated = allocated->next;
+
+ mbedtls_platform_zeroize( prev, sizeof( *prev ) );
+ mbedtls_free( prev );
+ }
+
+ mbedtls_platform_zeroize( head, sizeof( *head ) );
+
+ return( ret );
}
static int x509_parse_int( unsigned char **p, size_t n, int *res )
diff --git a/thirdparty/mbedtls/library/x509_crl.c b/thirdparty/mbedtls/library/x509_crl.c
index d2d8042029..b943a8d6da 100644
--- a/thirdparty/mbedtls/library/x509_crl.c
+++ b/thirdparty/mbedtls/library/x509_crl.c
@@ -1,5 +1,5 @@
/*
- * X.509 Certidicate Revocation List (CRL) parsing
+ * X.509 Certificate Revocation List (CRL) parsing
*
* Copyright The Mbed TLS Contributors
* SPDX-License-Identifier: Apache-2.0
@@ -42,15 +42,7 @@
#include "mbedtls/pem.h"
#endif
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#include <stdio.h>
-#define mbedtls_free free
-#define mbedtls_calloc calloc
-#define mbedtls_snprintf snprintf
-#endif
#if defined(MBEDTLS_HAVE_TIME)
#if defined(_WIN32) && !defined(EFIX64) && !defined(EFI32)
diff --git a/thirdparty/mbedtls/library/x509_crt.c b/thirdparty/mbedtls/library/x509_crt.c
index 96477e4c9d..4361f43ed0 100644
--- a/thirdparty/mbedtls/library/x509_crt.c
+++ b/thirdparty/mbedtls/library/x509_crt.c
@@ -49,15 +49,7 @@
#include "mbedtls/psa_util.h"
#endif
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#include <stdlib.h>
-#define mbedtls_free free
-#define mbedtls_calloc calloc
-#define mbedtls_snprintf snprintf
-#endif
#if defined(MBEDTLS_THREADING_C)
#include "mbedtls/threading.h"
@@ -90,6 +82,7 @@
#include <sys/types.h>
#include <sys/stat.h>
#include <dirent.h>
+#include <errno.h>
#endif /* !_WIN32 || EFIX64 || EFI32 */
#endif
@@ -1278,9 +1271,12 @@ static int x509_crt_parse_der_core( mbedtls_x509_crt *crt,
}
}
+ int extensions_allowed = 1;
#if !defined(MBEDTLS_X509_ALLOW_EXTENSIONS_NON_V3)
- if( crt->version == 3 )
+ if( crt->version != 3 )
+ extensions_allowed = 0;
#endif
+ if( extensions_allowed )
{
ret = x509_get_crt_ext( &p, end, crt, cb, p_ctx );
if( ret != 0 )
@@ -1668,8 +1664,22 @@ cleanup:
}
else if( stat( entry_name, &sb ) == -1 )
{
- ret = MBEDTLS_ERR_X509_FILE_IO_ERROR;
- goto cleanup;
+ if( errno == ENOENT )
+ {
+ /* Broken symbolic link - ignore this entry.
+ stat(2) will return this error for either (a) a dangling
+ symlink or (b) a missing file.
+ Given that we have just obtained the filename from readdir,
+ assume that it does exist and therefore treat this as a
+ dangling symlink. */
+ continue;
+ }
+ else
+ {
+ /* Some other file error; report the error. */
+ ret = MBEDTLS_ERR_X509_FILE_IO_ERROR;
+ goto cleanup;
+ }
}
if( !S_ISREG( sb.st_mode ) )
@@ -1798,6 +1808,7 @@ static int x509_info_subject_alt_name( char **buf, size_t *size,
const char *prefix )
{
int ret = MBEDTLS_ERR_ERROR_CORRUPTION_DETECTED;
+ size_t i;
size_t n = *size;
char *p = *buf;
const mbedtls_x509_sequence *cur = subject_alt_name;
@@ -1850,18 +1861,11 @@ static int x509_info_subject_alt_name( char **buf, size_t *size,
ret = mbedtls_snprintf( p, n, "\n%s hardware serial number : ", prefix );
MBEDTLS_X509_SAFE_SNPRINTF;
- if( other_name->value.hardware_module_name.val.len >= n )
+ for( i = 0; i < other_name->value.hardware_module_name.val.len; i++ )
{
- *p = '\0';
- return( MBEDTLS_ERR_X509_BUFFER_TOO_SMALL );
+ ret = mbedtls_snprintf( p, n, "%02X", other_name->value.hardware_module_name.val.p[i] );
+ MBEDTLS_X509_SAFE_SNPRINTF;
}
-
- memcpy( p, other_name->value.hardware_module_name.val.p,
- other_name->value.hardware_module_name.val.len );
- p += other_name->value.hardware_module_name.val.len;
-
- n -= other_name->value.hardware_module_name.val.len;
-
}/* MBEDTLS_OID_ON_HW_MODULE_NAME */
}
break;
diff --git a/thirdparty/mbedtls/library/x509_csr.c b/thirdparty/mbedtls/library/x509_csr.c
index e259410d07..1a22b77086 100644
--- a/thirdparty/mbedtls/library/x509_csr.c
+++ b/thirdparty/mbedtls/library/x509_csr.c
@@ -42,15 +42,7 @@
#include "mbedtls/pem.h"
#endif
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#include <stdio.h>
-#define mbedtls_free free
-#define mbedtls_calloc calloc
-#define mbedtls_snprintf snprintf
-#endif
#if defined(MBEDTLS_FS_IO) || defined(EFIX64) || defined(EFI32)
#include <stdio.h>
diff --git a/thirdparty/mbedtls/library/x509write_csr.c b/thirdparty/mbedtls/library/x509write_csr.c
index afda950341..707dd001f0 100644
--- a/thirdparty/mbedtls/library/x509write_csr.c
+++ b/thirdparty/mbedtls/library/x509write_csr.c
@@ -44,13 +44,7 @@
#include "mbedtls/pem.h"
#endif
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdlib.h>
-#define mbedtls_calloc calloc
-#define mbedtls_free free
-#endif
void mbedtls_x509write_csr_init( mbedtls_x509write_csr *ctx )
{
diff --git a/thirdparty/mbedtls/library/xtea.c b/thirdparty/mbedtls/library/xtea.c
index 77f6cb6f67..28e6972aa8 100644
--- a/thirdparty/mbedtls/library/xtea.c
+++ b/thirdparty/mbedtls/library/xtea.c
@@ -1,5 +1,5 @@
/*
- * An 32-bit implementation of the XTEA algorithm
+ * A 32-bit implementation of the XTEA algorithm
*
* Copyright The Mbed TLS Contributors
* SPDX-License-Identifier: Apache-2.0
@@ -26,14 +26,7 @@
#include <string.h>
-#if defined(MBEDTLS_SELF_TEST)
-#if defined(MBEDTLS_PLATFORM_C)
#include "mbedtls/platform.h"
-#else
-#include <stdio.h>
-#define mbedtls_printf printf
-#endif /* MBEDTLS_PLATFORM_C */
-#endif /* MBEDTLS_SELF_TEST */
#if !defined(MBEDTLS_XTEA_ALT)
diff --git a/thirdparty/mbedtls/patches/1453.diff b/thirdparty/mbedtls/patches/1453.diff
index b1c9c43ed2..4a7ca1570b 100644
--- a/thirdparty/mbedtls/patches/1453.diff
+++ b/thirdparty/mbedtls/patches/1453.diff
@@ -1,8 +1,8 @@
-diff --git a/library/entropy_poll.c b/library/entropy_poll.c
-index 4556f88a5..ba56b70f7 100644
---- a/library/entropy_poll.c
-+++ b/library/entropy_poll.c
-@@ -61,28 +61,43 @@
+diff --git a/thirdparty/mbedtls/library/entropy_poll.c b/thirdparty/mbedtls/library/entropy_poll.c
+index a858c1892b..69ac29e4f7 100644
+--- a/thirdparty/mbedtls/library/entropy_poll.c
++++ b/thirdparty/mbedtls/library/entropy_poll.c
+@@ -54,28 +54,43 @@
#define _WIN32_WINNT 0x0400
#endif
#include <windows.h>
@@ -53,12 +53,12 @@ index 4556f88a5..ba56b70f7 100644
*olen = len;
return( 0 );
-diff --git a/library/x509_crt.c b/library/x509_crt.c
-index 76558342e..35a134950 100644
---- a/library/x509_crt.c
-+++ b/library/x509_crt.c
-@@ -65,6 +65,19 @@
-
+diff --git a/thirdparty/mbedtls/library/x509_crt.c b/thirdparty/mbedtls/library/x509_crt.c
+index def1414eca..4361f43ed0 100644
+--- a/thirdparty/mbedtls/library/x509_crt.c
++++ b/thirdparty/mbedtls/library/x509_crt.c
+@@ -58,6 +58,19 @@
+ #if defined(MBEDTLS_HAVE_TIME)
#if defined(_WIN32) && !defined(EFIX64) && !defined(EFI32)
#include <windows.h>
+#if defined(_MSC_VER) && _MSC_VER <= 1600
@@ -77,7 +77,7 @@ index 76558342e..35a134950 100644
#else
#include <time.h>
#endif
-@@ -1278,6 +1291,7 @@ int mbedtls_x509_crt_parse_path( mbedtls_x509_crt *chain, const char *path )
+@@ -1549,6 +1562,7 @@ int mbedtls_x509_crt_parse_path( mbedtls_x509_crt *chain, const char *path )
char filename[MAX_PATH];
char *p;
size_t len = strlen( path );
@@ -85,7 +85,7 @@ index 76558342e..35a134950 100644
WIN32_FIND_DATAW file_data;
HANDLE hFind;
-@@ -1292,7 +1306,18 @@ int mbedtls_x509_crt_parse_path( mbedtls_x509_crt *chain, const char *path )
+@@ -1563,7 +1577,18 @@ int mbedtls_x509_crt_parse_path( mbedtls_x509_crt *chain, const char *path )
p = filename + len;
filename[len++] = '*';
@@ -105,7 +105,7 @@ index 76558342e..35a134950 100644
MAX_PATH - 3 );
if( w_ret == 0 )
return( MBEDTLS_ERR_X509_BAD_INPUT_DATA );
-@@ -1309,8 +1334,11 @@ int mbedtls_x509_crt_parse_path( mbedtls_x509_crt *chain, const char *path )
+@@ -1580,8 +1605,11 @@ int mbedtls_x509_crt_parse_path( mbedtls_x509_crt *chain, const char *path )
if( file_data.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY )
continue;
diff --git a/thirdparty/mbedtls/patches/windows-arm64-hardclock.diff b/thirdparty/mbedtls/patches/windows-arm64-hardclock.diff
new file mode 100644
index 0000000000..c7c5a7e282
--- /dev/null
+++ b/thirdparty/mbedtls/patches/windows-arm64-hardclock.diff
@@ -0,0 +1,16 @@
+diff --git a/thirdparty/mbedtls/library/timing.c b/thirdparty/mbedtls/library/timing.c
+index 6c14a4fd01..151292e325 100644
+--- a/thirdparty/mbedtls/library/timing.c
++++ b/thirdparty/mbedtls/library/timing.c
+@@ -190,8 +190,10 @@ unsigned long mbedtls_timing_hardclock( void )
+ #endif /* !HAVE_HARDCLOCK && MBEDTLS_HAVE_ASM &&
+ __GNUC__ && __ia64__ */
+
+-#if !defined(HAVE_HARDCLOCK) && defined(_MSC_VER) && \
++// -- GODOT start --
++#if !defined(HAVE_HARDCLOCK) && defined(_WIN32) && \
+ !defined(EFIX64) && !defined(EFI32)
++// -- GODOT end --
+
+ #define HAVE_HARDCLOCK
+
diff --git a/thirdparty/miniupnpc/LICENSE b/thirdparty/miniupnpc/LICENSE
index fe9118c07e..67ff3bb627 100644
--- a/thirdparty/miniupnpc/LICENSE
+++ b/thirdparty/miniupnpc/LICENSE
@@ -1,26 +1,29 @@
-MiniUPnP Project
-Copyright (c) 2005-2020, Thomas BERNARD
+BSD 3-Clause License
+
+Copyright (c) 2005-2022, Thomas BERNARD
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- * Redistributions of source code must retain the above copyright notice,
- this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
- * The name of the author may not be used to endorse or promote products
- derived from this software without specific prior written permission.
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
-LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGE.
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/thirdparty/miniupnpc/include/miniupnpc.h b/thirdparty/miniupnpc/include/miniupnpc.h
index a10bd950a8..75fb8b702d 100644
--- a/thirdparty/miniupnpc/include/miniupnpc.h
+++ b/thirdparty/miniupnpc/include/miniupnpc.h
@@ -1,9 +1,9 @@
-/* $Id: miniupnpc.h,v 1.59 2021/09/28 21:39:17 nanard Exp $ */
+/* $Id: miniupnpc.h,v 1.61 2022/10/21 21:15:02 nanard Exp $ */
/* vim: tabstop=4 shiftwidth=4 noexpandtab
* Project: miniupnp
- * http://miniupnp.free.fr/
+ * http://miniupnp.free.fr/ or https://miniupnp.tuxfamily.org/
* Author: Thomas Bernard
- * Copyright (c) 2005-2021 Thomas Bernard
+ * Copyright (c) 2005-2022 Thomas Bernard
* This software is subjects to the conditions detailed
* in the LICENCE file provided within this distribution */
#ifndef MINIUPNPC_H_INCLUDED
@@ -20,7 +20,7 @@
#define UPNPDISCOVER_MEMORY_ERROR (-102)
/* versions : */
-#define MINIUPNPC_VERSION "2.2.3"
+#define MINIUPNPC_VERSION "2.2.4"
#define MINIUPNPC_API_VERSION 17
/* Source port:
diff --git a/thirdparty/miniupnpc/src/miniupnpcstrings.h b/thirdparty/miniupnpc/src/miniupnpcstrings.h
index eefbc8dbdd..25abc1016e 100644
--- a/thirdparty/miniupnpc/src/miniupnpcstrings.h
+++ b/thirdparty/miniupnpc/src/miniupnpcstrings.h
@@ -4,7 +4,7 @@
#include "core/version.h"
#define OS_STRING VERSION_NAME "/1.0"
-#define MINIUPNPC_VERSION_STRING "2.2.3"
+#define MINIUPNPC_VERSION_STRING "2.2.4"
#if 0
/* according to "UPnP Device Architecture 1.0" */
diff --git a/thirdparty/miniupnpc/src/minixml.c b/thirdparty/miniupnpc/src/minixml.c
index ed2d3c759c..935ec443e7 100644
--- a/thirdparty/miniupnpc/src/minixml.c
+++ b/thirdparty/miniupnpc/src/minixml.c
@@ -1,4 +1,4 @@
-/* $Id: minixml.c,v 1.10 2012/03/05 19:42:47 nanard Exp $ */
+/* $Id: minixml.c,v 1.12 2017/12/12 11:17:40 nanard Exp $ */
/* vim: tabstop=4 shiftwidth=4 noexpandtab
* minixml.c : the minimum size a xml parser can be ! */
/* Project : miniupnp
diff --git a/thirdparty/minizip/MiniZip_info.txt b/thirdparty/minizip/MiniZip64_info.txt
index 57d7152420..57d7152420 100644
--- a/thirdparty/minizip/MiniZip_info.txt
+++ b/thirdparty/minizip/MiniZip64_info.txt
diff --git a/thirdparty/minizip/crypt.h b/thirdparty/minizip/crypt.h
index 9da15373d8..1cc41f19d7 100644
--- a/thirdparty/minizip/crypt.h
+++ b/thirdparty/minizip/crypt.h
@@ -85,7 +85,7 @@ static void init_keys(const char* passwd,unsigned long* pkeys,const z_crc_t* pcr
#define RAND_HEAD_LEN 12
/* "last resort" source for second part of crypt seed pattern */
# ifndef ZCR_SEED2
-# define ZCR_SEED2 3141592654L /* use PI as default pattern */
+# define ZCR_SEED2 3141592654UL /* use PI as default pattern */
# endif
static unsigned crypthead(const char* passwd, /* password string */
diff --git a/thirdparty/minizip/ioapi.c b/thirdparty/minizip/ioapi.c
index db4c33b4b9..b50db35ac1 100644
--- a/thirdparty/minizip/ioapi.c
+++ b/thirdparty/minizip/ioapi.c
@@ -101,9 +101,9 @@ static int ZCALLBACK ferror_file_func OF((voidpf opaque, voidpf stream));
static voidpf ZCALLBACK fopen_file_func (voidpf opaque, const char* filename, int mode)
{
- (void)opaque;
FILE* file = NULL;
const char* mode_fopen = NULL;
+ (void)opaque;
if ((mode & ZLIB_FILEFUNC_MODE_READWRITEFILTER)==ZLIB_FILEFUNC_MODE_READ)
mode_fopen = "rb";
else
@@ -120,9 +120,9 @@ static voidpf ZCALLBACK fopen_file_func (voidpf opaque, const char* filename, in
static voidpf ZCALLBACK fopen64_file_func (voidpf opaque, const void* filename, int mode)
{
- (void)opaque;
FILE* file = NULL;
const char* mode_fopen = NULL;
+ (void)opaque;
if ((mode & ZLIB_FILEFUNC_MODE_READWRITEFILTER)==ZLIB_FILEFUNC_MODE_READ)
mode_fopen = "rb";
else
@@ -140,24 +140,24 @@ static voidpf ZCALLBACK fopen64_file_func (voidpf opaque, const void* filename,
static uLong ZCALLBACK fread_file_func (voidpf opaque, voidpf stream, void* buf, uLong size)
{
- (void)opaque;
uLong ret;
+ (void)opaque;
ret = (uLong)fread(buf, 1, (size_t)size, (FILE *)stream);
return ret;
}
static uLong ZCALLBACK fwrite_file_func (voidpf opaque, voidpf stream, const void* buf, uLong size)
{
- (void)opaque;
uLong ret;
+ (void)opaque;
ret = (uLong)fwrite(buf, 1, (size_t)size, (FILE *)stream);
return ret;
}
static long ZCALLBACK ftell_file_func (voidpf opaque, voidpf stream)
{
- (void)opaque;
long ret;
+ (void)opaque;
ret = ftell((FILE *)stream);
return ret;
}
@@ -165,17 +165,17 @@ static long ZCALLBACK ftell_file_func (voidpf opaque, voidpf stream)
static ZPOS64_T ZCALLBACK ftell64_file_func (voidpf opaque, voidpf stream)
{
- (void)opaque;
ZPOS64_T ret;
+ (void)opaque;
ret = (ZPOS64_T)FTELLO_FUNC((FILE *)stream);
return ret;
}
static long ZCALLBACK fseek_file_func (voidpf opaque, voidpf stream, uLong offset, int origin)
{
- (void)opaque;
int fseek_origin=0;
long ret;
+ (void)opaque;
switch (origin)
{
case ZLIB_FILEFUNC_SEEK_CUR :
@@ -197,9 +197,9 @@ static long ZCALLBACK fseek_file_func (voidpf opaque, voidpf stream, uLong offs
static long ZCALLBACK fseek64_file_func (voidpf opaque, voidpf stream, ZPOS64_T offset, int origin)
{
- (void)opaque;
int fseek_origin=0;
long ret;
+ (void)opaque;
switch (origin)
{
case ZLIB_FILEFUNC_SEEK_CUR :
@@ -215,7 +215,7 @@ static long ZCALLBACK fseek64_file_func (voidpf opaque, voidpf stream, ZPOS64_T
}
ret = 0;
- if(FSEEKO_FUNC((FILE *)stream, (long)offset, fseek_origin) != 0)
+ if(FSEEKO_FUNC((FILE *)stream, (z_off_t)offset, fseek_origin) != 0)
ret = -1;
return ret;
@@ -224,16 +224,16 @@ static long ZCALLBACK fseek64_file_func (voidpf opaque, voidpf stream, ZPOS64_T
static int ZCALLBACK fclose_file_func (voidpf opaque, voidpf stream)
{
- (void)opaque;
int ret;
+ (void)opaque;
ret = fclose((FILE *)stream);
return ret;
}
static int ZCALLBACK ferror_file_func (voidpf opaque, voidpf stream)
{
- (void)opaque;
int ret;
+ (void)opaque;
ret = ferror((FILE *)stream);
return ret;
}
diff --git a/thirdparty/minizip/ioapi.h b/thirdparty/minizip/ioapi.h
index e9e5899852..083062ffe6 100644
--- a/thirdparty/minizip/ioapi.h
+++ b/thirdparty/minizip/ioapi.h
@@ -66,7 +66,7 @@
#define ftello64 ftell
#define fseeko64 fseek
#else
-#ifdef __FreeBSD__
+#if defined(__FreeBSD__) || defined(__OpenBSD__) || defined(__NetBSD__)
#define fopen64 fopen
#define ftello64 ftello
#define fseeko64 fseeko
diff --git a/thirdparty/minizip/patches/godot-seek.patch b/thirdparty/minizip/patches/godot-seek.patch
index 24838c252a..279124f3f0 100644
--- a/thirdparty/minizip/patches/godot-seek.patch
+++ b/thirdparty/minizip/patches/godot-seek.patch
@@ -1,5 +1,5 @@
diff --git a/thirdparty/minizip/ioapi.c b/thirdparty/minizip/ioapi.c
-index d666e5a228..db4c33b4b9 100644
+index 814a6fd38c..b50db35ac1 100644
--- a/thirdparty/minizip/ioapi.c
+++ b/thirdparty/minizip/ioapi.c
@@ -80,8 +80,15 @@ void fill_zlib_filefunc64_32_def_from_filefunc32(zlib_filefunc64_32_def* p_filef
@@ -26,7 +26,7 @@ index d666e5a228..db4c33b4b9 100644
+*/
+/* GODOT end */
diff --git a/thirdparty/minizip/ioapi.h b/thirdparty/minizip/ioapi.h
-index 114bfab762..2f24a5b6a0 100644
+index ae9ca7e833..6c73fc4ec3 100644
--- a/thirdparty/minizip/ioapi.h
+++ b/thirdparty/minizip/ioapi.h
@@ -155,6 +155,10 @@ typedef struct zlib_filefunc_def_s
@@ -52,7 +52,7 @@ index 114bfab762..2f24a5b6a0 100644
void fill_fopen64_filefunc OF((zlib_filefunc64_def* pzlib_filefunc_def));
diff --git a/thirdparty/minizip/unzip.c b/thirdparty/minizip/unzip.c
-index 5e12e47474..3b191e827c 100644
+index 3036b470b7..e83aff2773 100644
--- a/thirdparty/minizip/unzip.c
+++ b/thirdparty/minizip/unzip.c
@@ -157,6 +157,9 @@ typedef struct
@@ -122,7 +122,7 @@ index 5e12e47474..3b191e827c 100644
}
while(acc < file_info.size_file_extra)
-@@ -1575,8 +1604,10 @@ extern int ZEXPORT unzOpenCurrentFile3 (unzFile file, int* method,
+@@ -1576,8 +1605,10 @@ extern int ZEXPORT unzOpenCurrentFile3 (unzFile file, int* method,
}
else if ((s->cur_file_info.compression_method==Z_DEFLATED) && (!raw))
{
@@ -135,7 +135,7 @@ index 5e12e47474..3b191e827c 100644
pfile_in_zip_read_info->stream.opaque = (voidpf)0;
pfile_in_zip_read_info->stream.next_in = 0;
pfile_in_zip_read_info->stream.avail_in = 0;
-@@ -1608,6 +1639,9 @@ extern int ZEXPORT unzOpenCurrentFile3 (unzFile file, int* method,
+@@ -1610,6 +1641,9 @@ extern int ZEXPORT unzOpenCurrentFile3 (unzFile file, int* method,
iSizeVar;
pfile_in_zip_read_info->stream.avail_in = (uInt)0;
@@ -145,7 +145,7 @@ index 5e12e47474..3b191e827c 100644
s->pfile_in_zip_read = pfile_in_zip_read_info;
s->encrypted = 0;
-@@ -1638,6 +1672,85 @@ extern int ZEXPORT unzOpenCurrentFile3 (unzFile file, int* method,
+@@ -1640,6 +1674,85 @@ extern int ZEXPORT unzOpenCurrentFile3 (unzFile file, int* method,
return UNZ_OK;
}
@@ -261,7 +261,7 @@ index 6f95e94d75..71a7d89692 100644
extern ZPOS64_T ZEXPORT unztell64 OF((unzFile file));
diff --git a/thirdparty/minizip/zip.c b/thirdparty/minizip/zip.c
-index 4e611e1163..6d1c26d9f8 100644
+index 66d693f85a..ddcc14132b 100644
--- a/thirdparty/minizip/zip.c
+++ b/thirdparty/minizip/zip.c
@@ -854,9 +854,11 @@ extern zipFile ZEXPORT zipOpen3 (const void *pathname, int append, zipcharpc* gl
diff --git a/thirdparty/minizip/patches/unbreak-gentoo.patch b/thirdparty/minizip/patches/unbreak-gentoo.patch
index 9292e32ac6..65230cd4c7 100644
--- a/thirdparty/minizip/patches/unbreak-gentoo.patch
+++ b/thirdparty/minizip/patches/unbreak-gentoo.patch
@@ -1,8 +1,8 @@
diff --git a/thirdparty/minizip/ioapi.h b/thirdparty/minizip/ioapi.h
-index f25ab6464..6043d34ce 100644
+index 6c73fc4ec3..083062ffe6 100644
--- a/thirdparty/minizip/ioapi.h
+++ b/thirdparty/minizip/ioapi.h
-@@ -44,6 +44,22 @@
+@@ -45,6 +45,22 @@
#include <stdlib.h>
#include "zlib.h"
diff --git a/thirdparty/minizip/unzip.c b/thirdparty/minizip/unzip.c
index 3b191e827c..e83aff2773 100644
--- a/thirdparty/minizip/unzip.c
+++ b/thirdparty/minizip/unzip.c
@@ -112,7 +112,7 @@
# define ALLOC(size) (malloc(size))
#endif
#ifndef TRYFREE
-# define TRYFREE(p) {if (p) free(p);}
+# define TRYFREE(p) { free(p);}
#endif
#define SIZECENTRALDIRITEM (0x2e)
@@ -1595,6 +1595,7 @@ extern int ZEXPORT unzOpenCurrentFile3 (unzFile file, int* method,
pfile_in_zip_read_info->stream_initialised=Z_BZIP2ED;
else
{
+ TRYFREE(pfile_in_zip_read_info->read_buffer);
TRYFREE(pfile_in_zip_read_info);
return err;
}
@@ -1617,6 +1618,7 @@ extern int ZEXPORT unzOpenCurrentFile3 (unzFile file, int* method,
pfile_in_zip_read_info->stream_initialised=Z_DEFLATED;
else
{
+ TRYFREE(pfile_in_zip_read_info->read_buffer);
TRYFREE(pfile_in_zip_read_info);
return err;
}
diff --git a/thirdparty/minizip/zip.c b/thirdparty/minizip/zip.c
index 6d1c26d9f8..ddcc14132b 100644
--- a/thirdparty/minizip/zip.c
+++ b/thirdparty/minizip/zip.c
@@ -1475,11 +1475,6 @@ extern int ZEXPORT zipWriteInFileInZip (zipFile file,const void* buf,unsigned in
{
uLong uTotalOutBefore = zi->ci.stream.total_out;
err=deflate(&zi->ci.stream, Z_NO_FLUSH);
- if(uTotalOutBefore > zi->ci.stream.total_out)
- {
- int bBreak = 0;
- bBreak++;
- }
zi->ci.pos_in_buffered_data += (uInt)(zi->ci.stream.total_out - uTotalOutBefore) ;
}
@@ -1963,7 +1958,7 @@ extern int ZEXPORT zipRemoveExtraInfoBlock (char* pData, int* dataLen, short sHe
int retVal = ZIP_OK;
- if(pData == NULL || *dataLen < 4)
+ if(pData == NULL || dataLen == NULL || *dataLen < 4)
return ZIP_PARAMERROR;
pNewHeader = (char*)ALLOC((unsigned)*dataLen);
diff --git a/thirdparty/openxr/include/openxr/openxr.h b/thirdparty/openxr/include/openxr/openxr.h
index 6f9b71aa68..3663f9f14d 100644
--- a/thirdparty/openxr/include/openxr/openxr.h
+++ b/thirdparty/openxr/include/openxr/openxr.h
@@ -25,7 +25,7 @@ extern "C" {
((((major) & 0xffffULL) << 48) | (((minor) & 0xffffULL) << 32) | ((patch) & 0xffffffffULL))
// OpenXR current version number.
-#define XR_CURRENT_API_VERSION XR_MAKE_VERSION(1, 0, 25)
+#define XR_CURRENT_API_VERSION XR_MAKE_VERSION(1, 0, 26)
#define XR_VERSION_MAJOR(version) (uint16_t)(((uint64_t)(version) >> 48)& 0xffffULL)
#define XR_VERSION_MINOR(version) (uint16_t)(((uint64_t)(version) >> 32) & 0xffffULL)
@@ -450,6 +450,15 @@ typedef enum XrStructureType {
XR_TYPE_VULKAN_SWAPCHAIN_CREATE_INFO_META = 1000227000,
XR_TYPE_PERFORMANCE_METRICS_STATE_META = 1000232001,
XR_TYPE_PERFORMANCE_METRICS_COUNTER_META = 1000232002,
+ XR_TYPE_SYSTEM_HEADSET_ID_PROPERTIES_META = 1000245000,
+ XR_TYPE_PASSTHROUGH_CREATE_INFO_HTC = 1000317001,
+ XR_TYPE_PASSTHROUGH_COLOR_HTC = 1000317002,
+ XR_TYPE_PASSTHROUGH_MESH_TRANSFORM_INFO_HTC = 1000317003,
+ XR_TYPE_COMPOSITION_LAYER_PASSTHROUGH_HTC = 1000317004,
+ XR_TYPE_FOVEATION_APPLY_INFO_HTC = 1000318000,
+ XR_TYPE_FOVEATION_DYNAMIC_MODE_INFO_HTC = 1000318001,
+ XR_TYPE_FOVEATION_CUSTOM_MODE_INFO_HTC = 1000318002,
+ XR_TYPE_ACTIVE_ACTION_SET_PRIORITIES_EXT = 1000373000,
XR_TYPE_GRAPHICS_BINDING_VULKAN2_KHR = XR_TYPE_GRAPHICS_BINDING_VULKAN_KHR,
XR_TYPE_SWAPCHAIN_IMAGE_VULKAN2_KHR = XR_TYPE_SWAPCHAIN_IMAGE_VULKAN_KHR,
XR_TYPE_GRAPHICS_REQUIREMENTS_VULKAN2_KHR = XR_TYPE_GRAPHICS_REQUIREMENTS_VULKAN_KHR,
@@ -536,6 +545,7 @@ typedef enum XrObjectType {
XR_OBJECT_TYPE_PASSTHROUGH_LAYER_FB = 1000118002,
XR_OBJECT_TYPE_GEOMETRY_INSTANCE_FB = 1000118004,
XR_OBJECT_TYPE_SPATIAL_ANCHOR_STORE_CONNECTION_MSFT = 1000142000,
+ XR_OBJECT_TYPE_PASSTHROUGH_HTC = 1000317000,
XR_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF
} XrObjectType;
typedef XrFlags64 XrInstanceCreateFlags;
@@ -1631,7 +1641,7 @@ typedef struct XrBindingModificationsKHR {
#define XR_EXT_performance_settings 1
-#define XR_EXT_performance_settings_SPEC_VERSION 3
+#define XR_EXT_performance_settings_SPEC_VERSION 4
#define XR_EXT_PERFORMANCE_SETTINGS_EXTENSION_NAME "XR_EXT_performance_settings"
typedef enum XrPerfSettingsDomainEXT {
@@ -3007,7 +3017,7 @@ XRAPI_ATTR XrResult XRAPI_CALL xrRequestDisplayRefreshRateFB(
#define XR_HTCX_vive_tracker_interaction 1
-#define XR_HTCX_vive_tracker_interaction_SPEC_VERSION 1
+#define XR_HTCX_vive_tracker_interaction_SPEC_VERSION 2
#define XR_HTCX_VIVE_TRACKER_INTERACTION_EXTENSION_NAME "XR_HTCX_vive_tracker_interaction"
typedef struct XrViveTrackerPathsHTCX {
XrStructureType type;
@@ -3043,7 +3053,7 @@ XRAPI_ATTR XrResult XRAPI_CALL xrEnumerateViveTrackerPathsHTCX(
#define XR_FACIAL_EXPRESSION_LIP_COUNT_HTC 37
XR_DEFINE_HANDLE(XrFacialTrackerHTC)
-#define XR_HTC_facial_tracking_SPEC_VERSION 1
+#define XR_HTC_facial_tracking_SPEC_VERSION 2
#define XR_HTC_FACIAL_TRACKING_EXTENSION_NAME "XR_HTC_facial_tracking"
typedef enum XrEyeExpressionHTC {
@@ -4508,7 +4518,7 @@ typedef struct XrCompositionLayerSettingsFB {
#define XR_META_performance_metrics 1
-#define XR_META_performance_metrics_SPEC_VERSION 1
+#define XR_META_performance_metrics_SPEC_VERSION 2
#define XR_META_PERFORMANCE_METRICS_EXTENSION_NAME "XR_META_performance_metrics"
typedef enum XrPerformanceMetricsCounterUnitMETA {
@@ -4570,10 +4580,167 @@ XRAPI_ATTR XrResult XRAPI_CALL xrQueryPerformanceMetricsCounterMETA(
#endif /* !XR_NO_PROTOTYPES */
+#define XR_META_headset_id 1
+#define XR_META_headset_id_SPEC_VERSION 1
+#define XR_META_HEADSET_ID_EXTENSION_NAME "XR_META_headset_id"
+// XrSystemHeadsetIdPropertiesMETA extends XrSystemProperties
+typedef struct XrSystemHeadsetIdPropertiesMETA {
+ XrStructureType type;
+ void* XR_MAY_ALIAS next;
+ XrUuidEXT id;
+} XrSystemHeadsetIdPropertiesMETA;
+
+
+
#define XR_EXT_uuid 1
#define XR_EXT_uuid_SPEC_VERSION 1
#define XR_EXT_UUID_EXTENSION_NAME "XR_EXT_uuid"
+
+#define XR_HTC_passthrough 1
+XR_DEFINE_HANDLE(XrPassthroughHTC)
+#define XR_HTC_passthrough_SPEC_VERSION 1
+#define XR_HTC_PASSTHROUGH_EXTENSION_NAME "XR_HTC_passthrough"
+
+typedef enum XrPassthroughFormHTC {
+ XR_PASSTHROUGH_FORM_PLANAR_HTC = 0,
+ XR_PASSTHROUGH_FORM_PROJECTED_HTC = 1,
+ XR_PASSTHROUGH_FORM_MAX_ENUM_HTC = 0x7FFFFFFF
+} XrPassthroughFormHTC;
+typedef struct XrPassthroughCreateInfoHTC {
+ XrStructureType type;
+ const void* XR_MAY_ALIAS next;
+ XrPassthroughFormHTC form;
+} XrPassthroughCreateInfoHTC;
+
+typedef struct XrPassthroughColorHTC {
+ XrStructureType type;
+ const void* XR_MAY_ALIAS next;
+ float alpha;
+} XrPassthroughColorHTC;
+
+// XrPassthroughMeshTransformInfoHTC extends XrCompositionLayerPassthroughHTC
+typedef struct XrPassthroughMeshTransformInfoHTC {
+ XrStructureType type;
+ const void* XR_MAY_ALIAS next;
+ uint32_t vertexCount;
+ const XrVector3f* vertices;
+ uint32_t indexCount;
+ const uint32_t* indices;
+ XrSpace baseSpace;
+ XrTime time;
+ XrPosef pose;
+ XrVector3f scale;
+} XrPassthroughMeshTransformInfoHTC;
+
+typedef struct XrCompositionLayerPassthroughHTC {
+ XrStructureType type;
+ const void* XR_MAY_ALIAS next;
+ XrCompositionLayerFlags layerFlags;
+ XrSpace space;
+ XrPassthroughHTC passthrough;
+ XrPassthroughColorHTC color;
+} XrCompositionLayerPassthroughHTC;
+
+typedef XrResult (XRAPI_PTR *PFN_xrCreatePassthroughHTC)(XrSession session, const XrPassthroughCreateInfoHTC* createInfo, XrPassthroughHTC* passthrough);
+typedef XrResult (XRAPI_PTR *PFN_xrDestroyPassthroughHTC)(XrPassthroughHTC passthrough);
+
+#ifndef XR_NO_PROTOTYPES
+#ifdef XR_EXTENSION_PROTOTYPES
+XRAPI_ATTR XrResult XRAPI_CALL xrCreatePassthroughHTC(
+ XrSession session,
+ const XrPassthroughCreateInfoHTC* createInfo,
+ XrPassthroughHTC* passthrough);
+
+XRAPI_ATTR XrResult XRAPI_CALL xrDestroyPassthroughHTC(
+ XrPassthroughHTC passthrough);
+#endif /* XR_EXTENSION_PROTOTYPES */
+#endif /* !XR_NO_PROTOTYPES */
+
+
+#define XR_HTC_foveation 1
+#define XR_HTC_foveation_SPEC_VERSION 1
+#define XR_HTC_FOVEATION_EXTENSION_NAME "XR_HTC_foveation"
+
+typedef enum XrFoveationModeHTC {
+ XR_FOVEATION_MODE_DISABLE_HTC = 0,
+ XR_FOVEATION_MODE_FIXED_HTC = 1,
+ XR_FOVEATION_MODE_DYNAMIC_HTC = 2,
+ XR_FOVEATION_MODE_CUSTOM_HTC = 3,
+ XR_FOVEATION_MODE_MAX_ENUM_HTC = 0x7FFFFFFF
+} XrFoveationModeHTC;
+
+typedef enum XrFoveationLevelHTC {
+ XR_FOVEATION_LEVEL_NONE_HTC = 0,
+ XR_FOVEATION_LEVEL_LOW_HTC = 1,
+ XR_FOVEATION_LEVEL_MEDIUM_HTC = 2,
+ XR_FOVEATION_LEVEL_HIGH_HTC = 3,
+ XR_FOVEATION_LEVEL_MAX_ENUM_HTC = 0x7FFFFFFF
+} XrFoveationLevelHTC;
+typedef XrFlags64 XrFoveationDynamicFlagsHTC;
+
+// Flag bits for XrFoveationDynamicFlagsHTC
+static const XrFoveationDynamicFlagsHTC XR_FOVEATION_DYNAMIC_LEVEL_ENABLED_BIT_HTC = 0x00000001;
+static const XrFoveationDynamicFlagsHTC XR_FOVEATION_DYNAMIC_CLEAR_FOV_ENABLED_BIT_HTC = 0x00000002;
+static const XrFoveationDynamicFlagsHTC XR_FOVEATION_DYNAMIC_FOCAL_CENTER_OFFSET_ENABLED_BIT_HTC = 0x00000004;
+
+typedef struct XrFoveationApplyInfoHTC {
+ XrStructureType type;
+ const void* XR_MAY_ALIAS next;
+ XrFoveationModeHTC mode;
+ uint32_t subImageCount;
+ XrSwapchainSubImage* subImages;
+} XrFoveationApplyInfoHTC;
+
+typedef struct XrFoveationConfigurationHTC {
+ XrFoveationLevelHTC level;
+ float clearFovDegree;
+ XrVector2f focalCenterOffset;
+} XrFoveationConfigurationHTC;
+
+// XrFoveationDynamicModeInfoHTC extends XrFoveationApplyInfoHTC
+typedef struct XrFoveationDynamicModeInfoHTC {
+ XrStructureType type;
+ const void* XR_MAY_ALIAS next;
+ XrFoveationDynamicFlagsHTC dynamicFlags;
+} XrFoveationDynamicModeInfoHTC;
+
+// XrFoveationCustomModeInfoHTC extends XrFoveationApplyInfoHTC
+typedef struct XrFoveationCustomModeInfoHTC {
+ XrStructureType type;
+ const void* XR_MAY_ALIAS next;
+ uint32_t configCount;
+ const XrFoveationConfigurationHTC* configs;
+} XrFoveationCustomModeInfoHTC;
+
+typedef XrResult (XRAPI_PTR *PFN_xrApplyFoveationHTC)(XrSession session, const XrFoveationApplyInfoHTC* applyInfo);
+
+#ifndef XR_NO_PROTOTYPES
+#ifdef XR_EXTENSION_PROTOTYPES
+XRAPI_ATTR XrResult XRAPI_CALL xrApplyFoveationHTC(
+ XrSession session,
+ const XrFoveationApplyInfoHTC* applyInfo);
+#endif /* XR_EXTENSION_PROTOTYPES */
+#endif /* !XR_NO_PROTOTYPES */
+
+
+#define XR_EXT_active_action_set_priority 1
+#define XR_EXT_active_action_set_priority_SPEC_VERSION 1
+#define XR_EXT_ACTIVE_ACTION_SET_PRIORITY_EXTENSION_NAME "XR_EXT_active_action_set_priority"
+typedef struct XrActiveActionSetPriorityEXT {
+ XrActionSet actionSet;
+ uint32_t priorityOverride;
+} XrActiveActionSetPriorityEXT;
+
+// XrActiveActionSetPrioritiesEXT extends XrActionsSyncInfo
+typedef struct XrActiveActionSetPrioritiesEXT {
+ XrStructureType type;
+ const void* XR_MAY_ALIAS next;
+ uint32_t actionSetPriorityCount;
+ const XrActiveActionSetPriorityEXT* actionSetPriorities;
+} XrActiveActionSetPrioritiesEXT;
+
+
#ifdef __cplusplus
}
#endif
diff --git a/thirdparty/openxr/include/openxr/openxr_reflection.h b/thirdparty/openxr/include/openxr/openxr_reflection.h
index ac6f452377..1a873c1770 100644
--- a/thirdparty/openxr/include/openxr/openxr_reflection.h
+++ b/thirdparty/openxr/include/openxr/openxr_reflection.h
@@ -351,6 +351,15 @@ XR_ENUM_STR(XrResult);
_(XR_TYPE_VULKAN_SWAPCHAIN_CREATE_INFO_META, 1000227000) \
_(XR_TYPE_PERFORMANCE_METRICS_STATE_META, 1000232001) \
_(XR_TYPE_PERFORMANCE_METRICS_COUNTER_META, 1000232002) \
+ _(XR_TYPE_SYSTEM_HEADSET_ID_PROPERTIES_META, 1000245000) \
+ _(XR_TYPE_PASSTHROUGH_CREATE_INFO_HTC, 1000317001) \
+ _(XR_TYPE_PASSTHROUGH_COLOR_HTC, 1000317002) \
+ _(XR_TYPE_PASSTHROUGH_MESH_TRANSFORM_INFO_HTC, 1000317003) \
+ _(XR_TYPE_COMPOSITION_LAYER_PASSTHROUGH_HTC, 1000317004) \
+ _(XR_TYPE_FOVEATION_APPLY_INFO_HTC, 1000318000) \
+ _(XR_TYPE_FOVEATION_DYNAMIC_MODE_INFO_HTC, 1000318001) \
+ _(XR_TYPE_FOVEATION_CUSTOM_MODE_INFO_HTC, 1000318002) \
+ _(XR_TYPE_ACTIVE_ACTION_SET_PRIORITIES_EXT, 1000373000) \
_(XR_STRUCTURE_TYPE_MAX_ENUM, 0x7FFFFFFF)
#define XR_LIST_ENUM_XrFormFactor(_) \
@@ -426,6 +435,7 @@ XR_ENUM_STR(XrResult);
_(XR_OBJECT_TYPE_PASSTHROUGH_LAYER_FB, 1000118002) \
_(XR_OBJECT_TYPE_GEOMETRY_INSTANCE_FB, 1000118004) \
_(XR_OBJECT_TYPE_SPATIAL_ANCHOR_STORE_CONNECTION_MSFT, 1000142000) \
+ _(XR_OBJECT_TYPE_PASSTHROUGH_HTC, 1000317000) \
_(XR_OBJECT_TYPE_MAX_ENUM, 0x7FFFFFFF)
#define XR_LIST_ENUM_XrAndroidThreadTypeKHR(_) \
@@ -748,6 +758,25 @@ XR_ENUM_STR(XrResult);
_(XR_PERFORMANCE_METRICS_COUNTER_UNIT_HERTZ_META, 4) \
_(XR_PERFORMANCE_METRICS_COUNTER_UNIT_MAX_ENUM_META, 0x7FFFFFFF)
+#define XR_LIST_ENUM_XrPassthroughFormHTC(_) \
+ _(XR_PASSTHROUGH_FORM_PLANAR_HTC, 0) \
+ _(XR_PASSTHROUGH_FORM_PROJECTED_HTC, 1) \
+ _(XR_PASSTHROUGH_FORM_MAX_ENUM_HTC, 0x7FFFFFFF)
+
+#define XR_LIST_ENUM_XrFoveationModeHTC(_) \
+ _(XR_FOVEATION_MODE_DISABLE_HTC, 0) \
+ _(XR_FOVEATION_MODE_FIXED_HTC, 1) \
+ _(XR_FOVEATION_MODE_DYNAMIC_HTC, 2) \
+ _(XR_FOVEATION_MODE_CUSTOM_HTC, 3) \
+ _(XR_FOVEATION_MODE_MAX_ENUM_HTC, 0x7FFFFFFF)
+
+#define XR_LIST_ENUM_XrFoveationLevelHTC(_) \
+ _(XR_FOVEATION_LEVEL_NONE_HTC, 0) \
+ _(XR_FOVEATION_LEVEL_LOW_HTC, 1) \
+ _(XR_FOVEATION_LEVEL_MEDIUM_HTC, 2) \
+ _(XR_FOVEATION_LEVEL_HIGH_HTC, 3) \
+ _(XR_FOVEATION_LEVEL_MAX_ENUM_HTC, 0x7FFFFFFF)
+
#define XR_LIST_BITS_XrInstanceCreateFlags(_)
#define XR_LIST_BITS_XrSessionCreateFlags(_)
@@ -891,6 +920,12 @@ XR_ENUM_STR(XrResult);
_(XR_PERFORMANCE_METRICS_COUNTER_UINT_VALUE_VALID_BIT_META, 0x00000002) \
_(XR_PERFORMANCE_METRICS_COUNTER_FLOAT_VALUE_VALID_BIT_META, 0x00000004) \
+#define XR_LIST_BITS_XrFoveationDynamicFlagsHTC(_) \
+ _(XR_FOVEATION_DYNAMIC_LEVEL_ENABLED_BIT_HTC, 0x00000001) \
+ _(XR_FOVEATION_DYNAMIC_CLEAR_FOV_ENABLED_BIT_HTC, 0x00000002) \
+ _(XR_FOVEATION_DYNAMIC_FOCAL_CENTER_OFFSET_ENABLED_BIT_HTC, 0x00000004) \
+
+/// Calls your macro with the name of each member of XrApiLayerProperties, in order.
#define XR_LIST_STRUCT_XrApiLayerProperties(_) \
_(type) \
_(next) \
@@ -899,12 +934,14 @@ XR_ENUM_STR(XrResult);
_(layerVersion) \
_(description) \
+/// Calls your macro with the name of each member of XrExtensionProperties, in order.
#define XR_LIST_STRUCT_XrExtensionProperties(_) \
_(type) \
_(next) \
_(extensionName) \
_(extensionVersion) \
+/// Calls your macro with the name of each member of XrApplicationInfo, in order.
#define XR_LIST_STRUCT_XrApplicationInfo(_) \
_(applicationName) \
_(applicationVersion) \
@@ -912,6 +949,7 @@ XR_ENUM_STR(XrResult);
_(engineVersion) \
_(apiVersion) \
+/// Calls your macro with the name of each member of XrInstanceCreateInfo, in order.
#define XR_LIST_STRUCT_XrInstanceCreateInfo(_) \
_(type) \
_(next) \
@@ -922,31 +960,37 @@ XR_ENUM_STR(XrResult);
_(enabledExtensionCount) \
_(enabledExtensionNames) \
+/// Calls your macro with the name of each member of XrInstanceProperties, in order.
#define XR_LIST_STRUCT_XrInstanceProperties(_) \
_(type) \
_(next) \
_(runtimeVersion) \
_(runtimeName) \
+/// Calls your macro with the name of each member of XrEventDataBuffer, in order.
#define XR_LIST_STRUCT_XrEventDataBuffer(_) \
_(type) \
_(next) \
_(varying) \
+/// Calls your macro with the name of each member of XrSystemGetInfo, in order.
#define XR_LIST_STRUCT_XrSystemGetInfo(_) \
_(type) \
_(next) \
_(formFactor) \
+/// Calls your macro with the name of each member of XrSystemGraphicsProperties, in order.
#define XR_LIST_STRUCT_XrSystemGraphicsProperties(_) \
_(maxSwapchainImageHeight) \
_(maxSwapchainImageWidth) \
_(maxLayerCount) \
+/// Calls your macro with the name of each member of XrSystemTrackingProperties, in order.
#define XR_LIST_STRUCT_XrSystemTrackingProperties(_) \
_(orientationTracking) \
_(positionTracking) \
+/// Calls your macro with the name of each member of XrSystemProperties, in order.
#define XR_LIST_STRUCT_XrSystemProperties(_) \
_(type) \
_(next) \
@@ -956,17 +1000,20 @@ XR_ENUM_STR(XrResult);
_(graphicsProperties) \
_(trackingProperties) \
+/// Calls your macro with the name of each member of XrSessionCreateInfo, in order.
#define XR_LIST_STRUCT_XrSessionCreateInfo(_) \
_(type) \
_(next) \
_(createFlags) \
_(systemId) \
+/// Calls your macro with the name of each member of XrVector3f, in order.
#define XR_LIST_STRUCT_XrVector3f(_) \
_(x) \
_(y) \
_(z) \
+/// Calls your macro with the name of each member of XrSpaceVelocity, in order.
#define XR_LIST_STRUCT_XrSpaceVelocity(_) \
_(type) \
_(next) \
@@ -974,26 +1021,31 @@ XR_ENUM_STR(XrResult);
_(linearVelocity) \
_(angularVelocity) \
+/// Calls your macro with the name of each member of XrQuaternionf, in order.
#define XR_LIST_STRUCT_XrQuaternionf(_) \
_(x) \
_(y) \
_(z) \
_(w) \
+/// Calls your macro with the name of each member of XrPosef, in order.
#define XR_LIST_STRUCT_XrPosef(_) \
_(orientation) \
_(position) \
+/// Calls your macro with the name of each member of XrReferenceSpaceCreateInfo, in order.
#define XR_LIST_STRUCT_XrReferenceSpaceCreateInfo(_) \
_(type) \
_(next) \
_(referenceSpaceType) \
_(poseInReferenceSpace) \
+/// Calls your macro with the name of each member of XrExtent2Df, in order.
#define XR_LIST_STRUCT_XrExtent2Df(_) \
_(width) \
_(height) \
+/// Calls your macro with the name of each member of XrActionSpaceCreateInfo, in order.
#define XR_LIST_STRUCT_XrActionSpaceCreateInfo(_) \
_(type) \
_(next) \
@@ -1001,18 +1053,21 @@ XR_ENUM_STR(XrResult);
_(subactionPath) \
_(poseInActionSpace) \
+/// Calls your macro with the name of each member of XrSpaceLocation, in order.
#define XR_LIST_STRUCT_XrSpaceLocation(_) \
_(type) \
_(next) \
_(locationFlags) \
_(pose) \
+/// Calls your macro with the name of each member of XrViewConfigurationProperties, in order.
#define XR_LIST_STRUCT_XrViewConfigurationProperties(_) \
_(type) \
_(next) \
_(viewConfigurationType) \
_(fovMutable) \
+/// Calls your macro with the name of each member of XrViewConfigurationView, in order.
#define XR_LIST_STRUCT_XrViewConfigurationView(_) \
_(type) \
_(next) \
@@ -1023,6 +1078,7 @@ XR_ENUM_STR(XrResult);
_(recommendedSwapchainSampleCount) \
_(maxSwapchainSampleCount) \
+/// Calls your macro with the name of each member of XrSwapchainCreateInfo, in order.
#define XR_LIST_STRUCT_XrSwapchainCreateInfo(_) \
_(type) \
_(next) \
@@ -1036,32 +1092,39 @@ XR_ENUM_STR(XrResult);
_(arraySize) \
_(mipCount) \
+/// Calls your macro with the name of each member of XrSwapchainImageBaseHeader, in order.
#define XR_LIST_STRUCT_XrSwapchainImageBaseHeader(_) \
_(type) \
_(next) \
+/// Calls your macro with the name of each member of XrSwapchainImageAcquireInfo, in order.
#define XR_LIST_STRUCT_XrSwapchainImageAcquireInfo(_) \
_(type) \
_(next) \
+/// Calls your macro with the name of each member of XrSwapchainImageWaitInfo, in order.
#define XR_LIST_STRUCT_XrSwapchainImageWaitInfo(_) \
_(type) \
_(next) \
_(timeout) \
+/// Calls your macro with the name of each member of XrSwapchainImageReleaseInfo, in order.
#define XR_LIST_STRUCT_XrSwapchainImageReleaseInfo(_) \
_(type) \
_(next) \
+/// Calls your macro with the name of each member of XrSessionBeginInfo, in order.
#define XR_LIST_STRUCT_XrSessionBeginInfo(_) \
_(type) \
_(next) \
_(primaryViewConfigurationType) \
+/// Calls your macro with the name of each member of XrFrameWaitInfo, in order.
#define XR_LIST_STRUCT_XrFrameWaitInfo(_) \
_(type) \
_(next) \
+/// Calls your macro with the name of each member of XrFrameState, in order.
#define XR_LIST_STRUCT_XrFrameState(_) \
_(type) \
_(next) \
@@ -1069,16 +1132,19 @@ XR_ENUM_STR(XrResult);
_(predictedDisplayPeriod) \
_(shouldRender) \
+/// Calls your macro with the name of each member of XrFrameBeginInfo, in order.
#define XR_LIST_STRUCT_XrFrameBeginInfo(_) \
_(type) \
_(next) \
+/// Calls your macro with the name of each member of XrCompositionLayerBaseHeader, in order.
#define XR_LIST_STRUCT_XrCompositionLayerBaseHeader(_) \
_(type) \
_(next) \
_(layerFlags) \
_(space) \
+/// Calls your macro with the name of each member of XrFrameEndInfo, in order.
#define XR_LIST_STRUCT_XrFrameEndInfo(_) \
_(type) \
_(next) \
@@ -1087,6 +1153,7 @@ XR_ENUM_STR(XrResult);
_(layerCount) \
_(layers) \
+/// Calls your macro with the name of each member of XrViewLocateInfo, in order.
#define XR_LIST_STRUCT_XrViewLocateInfo(_) \
_(type) \
_(next) \
@@ -1094,23 +1161,27 @@ XR_ENUM_STR(XrResult);
_(displayTime) \
_(space) \
+/// Calls your macro with the name of each member of XrViewState, in order.
#define XR_LIST_STRUCT_XrViewState(_) \
_(type) \
_(next) \
_(viewStateFlags) \
+/// Calls your macro with the name of each member of XrFovf, in order.
#define XR_LIST_STRUCT_XrFovf(_) \
_(angleLeft) \
_(angleRight) \
_(angleUp) \
_(angleDown) \
+/// Calls your macro with the name of each member of XrView, in order.
#define XR_LIST_STRUCT_XrView(_) \
_(type) \
_(next) \
_(pose) \
_(fov) \
+/// Calls your macro with the name of each member of XrActionSetCreateInfo, in order.
#define XR_LIST_STRUCT_XrActionSetCreateInfo(_) \
_(type) \
_(next) \
@@ -1118,6 +1189,7 @@ XR_ENUM_STR(XrResult);
_(localizedActionSetName) \
_(priority) \
+/// Calls your macro with the name of each member of XrActionCreateInfo, in order.
#define XR_LIST_STRUCT_XrActionCreateInfo(_) \
_(type) \
_(next) \
@@ -1127,10 +1199,12 @@ XR_ENUM_STR(XrResult);
_(subactionPaths) \
_(localizedActionName) \
+/// Calls your macro with the name of each member of XrActionSuggestedBinding, in order.
#define XR_LIST_STRUCT_XrActionSuggestedBinding(_) \
_(action) \
_(binding) \
+/// Calls your macro with the name of each member of XrInteractionProfileSuggestedBinding, in order.
#define XR_LIST_STRUCT_XrInteractionProfileSuggestedBinding(_) \
_(type) \
_(next) \
@@ -1138,23 +1212,27 @@ XR_ENUM_STR(XrResult);
_(countSuggestedBindings) \
_(suggestedBindings) \
+/// Calls your macro with the name of each member of XrSessionActionSetsAttachInfo, in order.
#define XR_LIST_STRUCT_XrSessionActionSetsAttachInfo(_) \
_(type) \
_(next) \
_(countActionSets) \
_(actionSets) \
+/// Calls your macro with the name of each member of XrInteractionProfileState, in order.
#define XR_LIST_STRUCT_XrInteractionProfileState(_) \
_(type) \
_(next) \
_(interactionProfile) \
+/// Calls your macro with the name of each member of XrActionStateGetInfo, in order.
#define XR_LIST_STRUCT_XrActionStateGetInfo(_) \
_(type) \
_(next) \
_(action) \
_(subactionPath) \
+/// Calls your macro with the name of each member of XrActionStateBoolean, in order.
#define XR_LIST_STRUCT_XrActionStateBoolean(_) \
_(type) \
_(next) \
@@ -1163,6 +1241,7 @@ XR_ENUM_STR(XrResult);
_(lastChangeTime) \
_(isActive) \
+/// Calls your macro with the name of each member of XrActionStateFloat, in order.
#define XR_LIST_STRUCT_XrActionStateFloat(_) \
_(type) \
_(next) \
@@ -1171,10 +1250,12 @@ XR_ENUM_STR(XrResult);
_(lastChangeTime) \
_(isActive) \
+/// Calls your macro with the name of each member of XrVector2f, in order.
#define XR_LIST_STRUCT_XrVector2f(_) \
_(x) \
_(y) \
+/// Calls your macro with the name of each member of XrActionStateVector2f, in order.
#define XR_LIST_STRUCT_XrActionStateVector2f(_) \
_(type) \
_(next) \
@@ -1183,67 +1264,81 @@ XR_ENUM_STR(XrResult);
_(lastChangeTime) \
_(isActive) \
+/// Calls your macro with the name of each member of XrActionStatePose, in order.
#define XR_LIST_STRUCT_XrActionStatePose(_) \
_(type) \
_(next) \
_(isActive) \
+/// Calls your macro with the name of each member of XrActiveActionSet, in order.
#define XR_LIST_STRUCT_XrActiveActionSet(_) \
_(actionSet) \
_(subactionPath) \
+/// Calls your macro with the name of each member of XrActionsSyncInfo, in order.
#define XR_LIST_STRUCT_XrActionsSyncInfo(_) \
_(type) \
_(next) \
_(countActiveActionSets) \
_(activeActionSets) \
+/// Calls your macro with the name of each member of XrBoundSourcesForActionEnumerateInfo, in order.
#define XR_LIST_STRUCT_XrBoundSourcesForActionEnumerateInfo(_) \
_(type) \
_(next) \
_(action) \
+/// Calls your macro with the name of each member of XrInputSourceLocalizedNameGetInfo, in order.
#define XR_LIST_STRUCT_XrInputSourceLocalizedNameGetInfo(_) \
_(type) \
_(next) \
_(sourcePath) \
_(whichComponents) \
+/// Calls your macro with the name of each member of XrHapticActionInfo, in order.
#define XR_LIST_STRUCT_XrHapticActionInfo(_) \
_(type) \
_(next) \
_(action) \
_(subactionPath) \
+/// Calls your macro with the name of each member of XrHapticBaseHeader, in order.
#define XR_LIST_STRUCT_XrHapticBaseHeader(_) \
_(type) \
_(next) \
+/// Calls your macro with the name of each member of XrBaseInStructure, in order.
#define XR_LIST_STRUCT_XrBaseInStructure(_) \
_(type) \
_(next) \
+/// Calls your macro with the name of each member of XrBaseOutStructure, in order.
#define XR_LIST_STRUCT_XrBaseOutStructure(_) \
_(type) \
_(next) \
+/// Calls your macro with the name of each member of XrOffset2Di, in order.
#define XR_LIST_STRUCT_XrOffset2Di(_) \
_(x) \
_(y) \
+/// Calls your macro with the name of each member of XrExtent2Di, in order.
#define XR_LIST_STRUCT_XrExtent2Di(_) \
_(width) \
_(height) \
+/// Calls your macro with the name of each member of XrRect2Di, in order.
#define XR_LIST_STRUCT_XrRect2Di(_) \
_(offset) \
_(extent) \
+/// Calls your macro with the name of each member of XrSwapchainSubImage, in order.
#define XR_LIST_STRUCT_XrSwapchainSubImage(_) \
_(swapchain) \
_(imageRect) \
_(imageArrayIndex) \
+/// Calls your macro with the name of each member of XrCompositionLayerProjectionView, in order.
#define XR_LIST_STRUCT_XrCompositionLayerProjectionView(_) \
_(type) \
_(next) \
@@ -1251,6 +1346,7 @@ XR_ENUM_STR(XrResult);
_(fov) \
_(subImage) \
+/// Calls your macro with the name of each member of XrCompositionLayerProjection, in order.
#define XR_LIST_STRUCT_XrCompositionLayerProjection(_) \
_(type) \
_(next) \
@@ -1259,6 +1355,7 @@ XR_ENUM_STR(XrResult);
_(viewCount) \
_(views) \
+/// Calls your macro with the name of each member of XrCompositionLayerQuad, in order.
#define XR_LIST_STRUCT_XrCompositionLayerQuad(_) \
_(type) \
_(next) \
@@ -1269,20 +1366,24 @@ XR_ENUM_STR(XrResult);
_(pose) \
_(size) \
+/// Calls your macro with the name of each member of XrEventDataBaseHeader, in order.
#define XR_LIST_STRUCT_XrEventDataBaseHeader(_) \
_(type) \
_(next) \
+/// Calls your macro with the name of each member of XrEventDataEventsLost, in order.
#define XR_LIST_STRUCT_XrEventDataEventsLost(_) \
_(type) \
_(next) \
_(lostEventCount) \
+/// Calls your macro with the name of each member of XrEventDataInstanceLossPending, in order.
#define XR_LIST_STRUCT_XrEventDataInstanceLossPending(_) \
_(type) \
_(next) \
_(lossTime) \
+/// Calls your macro with the name of each member of XrEventDataSessionStateChanged, in order.
#define XR_LIST_STRUCT_XrEventDataSessionStateChanged(_) \
_(type) \
_(next) \
@@ -1290,6 +1391,7 @@ XR_ENUM_STR(XrResult);
_(state) \
_(time) \
+/// Calls your macro with the name of each member of XrEventDataReferenceSpaceChangePending, in order.
#define XR_LIST_STRUCT_XrEventDataReferenceSpaceChangePending(_) \
_(type) \
_(next) \
@@ -1299,11 +1401,13 @@ XR_ENUM_STR(XrResult);
_(poseValid) \
_(poseInPreviousSpace) \
+/// Calls your macro with the name of each member of XrEventDataInteractionProfileChanged, in order.
#define XR_LIST_STRUCT_XrEventDataInteractionProfileChanged(_) \
_(type) \
_(next) \
_(session) \
+/// Calls your macro with the name of each member of XrHapticVibration, in order.
#define XR_LIST_STRUCT_XrHapticVibration(_) \
_(type) \
_(next) \
@@ -1311,26 +1415,31 @@ XR_ENUM_STR(XrResult);
_(frequency) \
_(amplitude) \
+/// Calls your macro with the name of each member of XrOffset2Df, in order.
#define XR_LIST_STRUCT_XrOffset2Df(_) \
_(x) \
_(y) \
+/// Calls your macro with the name of each member of XrRect2Df, in order.
#define XR_LIST_STRUCT_XrRect2Df(_) \
_(offset) \
_(extent) \
+/// Calls your macro with the name of each member of XrVector4f, in order.
#define XR_LIST_STRUCT_XrVector4f(_) \
_(x) \
_(y) \
_(z) \
_(w) \
+/// Calls your macro with the name of each member of XrColor4f, in order.
#define XR_LIST_STRUCT_XrColor4f(_) \
_(r) \
_(g) \
_(b) \
_(a) \
+/// Calls your macro with the name of each member of XrCompositionLayerCubeKHR, in order.
#define XR_LIST_STRUCT_XrCompositionLayerCubeKHR(_) \
_(type) \
_(next) \
@@ -1341,12 +1450,14 @@ XR_ENUM_STR(XrResult);
_(imageArrayIndex) \
_(orientation) \
+/// Calls your macro with the name of each member of XrInstanceCreateInfoAndroidKHR, in order.
#define XR_LIST_STRUCT_XrInstanceCreateInfoAndroidKHR(_) \
_(type) \
_(next) \
_(applicationVM) \
_(applicationActivity) \
+/// Calls your macro with the name of each member of XrCompositionLayerDepthInfoKHR, in order.
#define XR_LIST_STRUCT_XrCompositionLayerDepthInfoKHR(_) \
_(type) \
_(next) \
@@ -1356,12 +1467,14 @@ XR_ENUM_STR(XrResult);
_(nearZ) \
_(farZ) \
+/// Calls your macro with the name of each member of XrVulkanSwapchainFormatListCreateInfoKHR, in order.
#define XR_LIST_STRUCT_XrVulkanSwapchainFormatListCreateInfoKHR(_) \
_(type) \
_(next) \
_(viewFormatCount) \
_(viewFormats) \
+/// Calls your macro with the name of each member of XrCompositionLayerCylinderKHR, in order.
#define XR_LIST_STRUCT_XrCompositionLayerCylinderKHR(_) \
_(type) \
_(next) \
@@ -1374,6 +1487,7 @@ XR_ENUM_STR(XrResult);
_(centralAngle) \
_(aspectRatio) \
+/// Calls your macro with the name of each member of XrCompositionLayerEquirectKHR, in order.
#define XR_LIST_STRUCT_XrCompositionLayerEquirectKHR(_) \
_(type) \
_(next) \
@@ -1386,12 +1500,14 @@ XR_ENUM_STR(XrResult);
_(scale) \
_(bias) \
+/// Calls your macro with the name of each member of XrGraphicsBindingOpenGLWin32KHR, in order.
#define XR_LIST_STRUCT_XrGraphicsBindingOpenGLWin32KHR(_) \
_(type) \
_(next) \
_(hDC) \
_(hGLRC) \
+/// Calls your macro with the name of each member of XrGraphicsBindingOpenGLXlibKHR, in order.
#define XR_LIST_STRUCT_XrGraphicsBindingOpenGLXlibKHR(_) \
_(type) \
_(next) \
@@ -1401,6 +1517,7 @@ XR_ENUM_STR(XrResult);
_(glxDrawable) \
_(glxContext) \
+/// Calls your macro with the name of each member of XrGraphicsBindingOpenGLXcbKHR, in order.
#define XR_LIST_STRUCT_XrGraphicsBindingOpenGLXcbKHR(_) \
_(type) \
_(next) \
@@ -1411,22 +1528,26 @@ XR_ENUM_STR(XrResult);
_(glxDrawable) \
_(glxContext) \
+/// Calls your macro with the name of each member of XrGraphicsBindingOpenGLWaylandKHR, in order.
#define XR_LIST_STRUCT_XrGraphicsBindingOpenGLWaylandKHR(_) \
_(type) \
_(next) \
_(display) \
+/// Calls your macro with the name of each member of XrSwapchainImageOpenGLKHR, in order.
#define XR_LIST_STRUCT_XrSwapchainImageOpenGLKHR(_) \
_(type) \
_(next) \
_(image) \
+/// Calls your macro with the name of each member of XrGraphicsRequirementsOpenGLKHR, in order.
#define XR_LIST_STRUCT_XrGraphicsRequirementsOpenGLKHR(_) \
_(type) \
_(next) \
_(minApiVersionSupported) \
_(maxApiVersionSupported) \
+/// Calls your macro with the name of each member of XrGraphicsBindingOpenGLESAndroidKHR, in order.
#define XR_LIST_STRUCT_XrGraphicsBindingOpenGLESAndroidKHR(_) \
_(type) \
_(next) \
@@ -1434,17 +1555,20 @@ XR_ENUM_STR(XrResult);
_(config) \
_(context) \
+/// Calls your macro with the name of each member of XrSwapchainImageOpenGLESKHR, in order.
#define XR_LIST_STRUCT_XrSwapchainImageOpenGLESKHR(_) \
_(type) \
_(next) \
_(image) \
+/// Calls your macro with the name of each member of XrGraphicsRequirementsOpenGLESKHR, in order.
#define XR_LIST_STRUCT_XrGraphicsRequirementsOpenGLESKHR(_) \
_(type) \
_(next) \
_(minApiVersionSupported) \
_(maxApiVersionSupported) \
+/// Calls your macro with the name of each member of XrGraphicsBindingVulkanKHR, in order.
#define XR_LIST_STRUCT_XrGraphicsBindingVulkanKHR(_) \
_(type) \
_(next) \
@@ -1454,50 +1578,59 @@ XR_ENUM_STR(XrResult);
_(queueFamilyIndex) \
_(queueIndex) \
+/// Calls your macro with the name of each member of XrSwapchainImageVulkanKHR, in order.
#define XR_LIST_STRUCT_XrSwapchainImageVulkanKHR(_) \
_(type) \
_(next) \
_(image) \
+/// Calls your macro with the name of each member of XrGraphicsRequirementsVulkanKHR, in order.
#define XR_LIST_STRUCT_XrGraphicsRequirementsVulkanKHR(_) \
_(type) \
_(next) \
_(minApiVersionSupported) \
_(maxApiVersionSupported) \
+/// Calls your macro with the name of each member of XrGraphicsBindingD3D11KHR, in order.
#define XR_LIST_STRUCT_XrGraphicsBindingD3D11KHR(_) \
_(type) \
_(next) \
_(device) \
+/// Calls your macro with the name of each member of XrSwapchainImageD3D11KHR, in order.
#define XR_LIST_STRUCT_XrSwapchainImageD3D11KHR(_) \
_(type) \
_(next) \
_(texture) \
+/// Calls your macro with the name of each member of XrGraphicsRequirementsD3D11KHR, in order.
#define XR_LIST_STRUCT_XrGraphicsRequirementsD3D11KHR(_) \
_(type) \
_(next) \
_(adapterLuid) \
_(minFeatureLevel) \
+/// Calls your macro with the name of each member of XrGraphicsBindingD3D12KHR, in order.
#define XR_LIST_STRUCT_XrGraphicsBindingD3D12KHR(_) \
_(type) \
_(next) \
_(device) \
_(queue) \
+/// Calls your macro with the name of each member of XrSwapchainImageD3D12KHR, in order.
#define XR_LIST_STRUCT_XrSwapchainImageD3D12KHR(_) \
_(type) \
_(next) \
_(texture) \
+/// Calls your macro with the name of each member of XrGraphicsRequirementsD3D12KHR, in order.
#define XR_LIST_STRUCT_XrGraphicsRequirementsD3D12KHR(_) \
_(type) \
_(next) \
_(adapterLuid) \
_(minFeatureLevel) \
+/// Calls your macro with the name of each member of XrVisibilityMaskKHR, in order.
#define XR_LIST_STRUCT_XrVisibilityMaskKHR(_) \
_(type) \
_(next) \
@@ -1508,6 +1641,7 @@ XR_ENUM_STR(XrResult);
_(indexCountOutput) \
_(indices) \
+/// Calls your macro with the name of each member of XrEventDataVisibilityMaskChangedKHR, in order.
#define XR_LIST_STRUCT_XrEventDataVisibilityMaskChangedKHR(_) \
_(type) \
_(next) \
@@ -1515,22 +1649,26 @@ XR_ENUM_STR(XrResult);
_(viewConfigurationType) \
_(viewIndex) \
+/// Calls your macro with the name of each member of XrCompositionLayerColorScaleBiasKHR, in order.
#define XR_LIST_STRUCT_XrCompositionLayerColorScaleBiasKHR(_) \
_(type) \
_(next) \
_(colorScale) \
_(colorBias) \
+/// Calls your macro with the name of each member of XrLoaderInitInfoBaseHeaderKHR, in order.
#define XR_LIST_STRUCT_XrLoaderInitInfoBaseHeaderKHR(_) \
_(type) \
_(next) \
+/// Calls your macro with the name of each member of XrLoaderInitInfoAndroidKHR, in order.
#define XR_LIST_STRUCT_XrLoaderInitInfoAndroidKHR(_) \
_(type) \
_(next) \
_(applicationVM) \
_(applicationContext) \
+/// Calls your macro with the name of each member of XrVulkanInstanceCreateInfoKHR, in order.
#define XR_LIST_STRUCT_XrVulkanInstanceCreateInfoKHR(_) \
_(type) \
_(next) \
@@ -1540,6 +1678,7 @@ XR_ENUM_STR(XrResult);
_(vulkanCreateInfo) \
_(vulkanAllocator) \
+/// Calls your macro with the name of each member of XrVulkanDeviceCreateInfoKHR, in order.
#define XR_LIST_STRUCT_XrVulkanDeviceCreateInfoKHR(_) \
_(type) \
_(next) \
@@ -1550,12 +1689,14 @@ XR_ENUM_STR(XrResult);
_(vulkanCreateInfo) \
_(vulkanAllocator) \
+/// Calls your macro with the name of each member of XrVulkanGraphicsDeviceGetInfoKHR, in order.
#define XR_LIST_STRUCT_XrVulkanGraphicsDeviceGetInfoKHR(_) \
_(type) \
_(next) \
_(systemId) \
_(vulkanInstance) \
+/// Calls your macro with the name of each member of XrCompositionLayerEquirect2KHR, in order.
#define XR_LIST_STRUCT_XrCompositionLayerEquirect2KHR(_) \
_(type) \
_(next) \
@@ -1569,16 +1710,19 @@ XR_ENUM_STR(XrResult);
_(upperVerticalAngle) \
_(lowerVerticalAngle) \
+/// Calls your macro with the name of each member of XrBindingModificationBaseHeaderKHR, in order.
#define XR_LIST_STRUCT_XrBindingModificationBaseHeaderKHR(_) \
_(type) \
_(next) \
+/// Calls your macro with the name of each member of XrBindingModificationsKHR, in order.
#define XR_LIST_STRUCT_XrBindingModificationsKHR(_) \
_(type) \
_(next) \
_(bindingModificationCount) \
_(bindingModifications) \
+/// Calls your macro with the name of each member of XrEventDataPerfSettingsEXT, in order.
#define XR_LIST_STRUCT_XrEventDataPerfSettingsEXT(_) \
_(type) \
_(next) \
@@ -1587,6 +1731,7 @@ XR_ENUM_STR(XrResult);
_(fromLevel) \
_(toLevel) \
+/// Calls your macro with the name of each member of XrDebugUtilsObjectNameInfoEXT, in order.
#define XR_LIST_STRUCT_XrDebugUtilsObjectNameInfoEXT(_) \
_(type) \
_(next) \
@@ -1594,11 +1739,13 @@ XR_ENUM_STR(XrResult);
_(objectHandle) \
_(objectName) \
+/// Calls your macro with the name of each member of XrDebugUtilsLabelEXT, in order.
#define XR_LIST_STRUCT_XrDebugUtilsLabelEXT(_) \
_(type) \
_(next) \
_(labelName) \
+/// Calls your macro with the name of each member of XrDebugUtilsMessengerCallbackDataEXT, in order.
#define XR_LIST_STRUCT_XrDebugUtilsMessengerCallbackDataEXT(_) \
_(type) \
_(next) \
@@ -1610,6 +1757,7 @@ XR_ENUM_STR(XrResult);
_(sessionLabelCount) \
_(sessionLabels) \
+/// Calls your macro with the name of each member of XrDebugUtilsMessengerCreateInfoEXT, in order.
#define XR_LIST_STRUCT_XrDebugUtilsMessengerCreateInfoEXT(_) \
_(type) \
_(next) \
@@ -1618,28 +1766,33 @@ XR_ENUM_STR(XrResult);
_(userCallback) \
_(userData) \
+/// Calls your macro with the name of each member of XrSystemEyeGazeInteractionPropertiesEXT, in order.
#define XR_LIST_STRUCT_XrSystemEyeGazeInteractionPropertiesEXT(_) \
_(type) \
_(next) \
_(supportsEyeGazeInteraction) \
+/// Calls your macro with the name of each member of XrEyeGazeSampleTimeEXT, in order.
#define XR_LIST_STRUCT_XrEyeGazeSampleTimeEXT(_) \
_(type) \
_(next) \
_(time) \
+/// Calls your macro with the name of each member of XrSessionCreateInfoOverlayEXTX, in order.
#define XR_LIST_STRUCT_XrSessionCreateInfoOverlayEXTX(_) \
_(type) \
_(next) \
_(createFlags) \
_(sessionLayersPlacement) \
+/// Calls your macro with the name of each member of XrEventDataMainSessionVisibilityChangedEXTX, in order.
#define XR_LIST_STRUCT_XrEventDataMainSessionVisibilityChangedEXTX(_) \
_(type) \
_(next) \
_(visible) \
_(flags) \
+/// Calls your macro with the name of each member of XrSpatialAnchorCreateInfoMSFT, in order.
#define XR_LIST_STRUCT_XrSpatialAnchorCreateInfoMSFT(_) \
_(type) \
_(next) \
@@ -1647,17 +1800,20 @@ XR_ENUM_STR(XrResult);
_(pose) \
_(time) \
+/// Calls your macro with the name of each member of XrSpatialAnchorSpaceCreateInfoMSFT, in order.
#define XR_LIST_STRUCT_XrSpatialAnchorSpaceCreateInfoMSFT(_) \
_(type) \
_(next) \
_(anchor) \
_(poseInAnchorSpace) \
+/// Calls your macro with the name of each member of XrCompositionLayerImageLayoutFB, in order.
#define XR_LIST_STRUCT_XrCompositionLayerImageLayoutFB(_) \
_(type) \
_(next) \
_(flags) \
+/// Calls your macro with the name of each member of XrCompositionLayerAlphaBlendFB, in order.
#define XR_LIST_STRUCT_XrCompositionLayerAlphaBlendFB(_) \
_(type) \
_(next) \
@@ -1666,6 +1822,7 @@ XR_ENUM_STR(XrResult);
_(srcFactorAlpha) \
_(dstFactorAlpha) \
+/// Calls your macro with the name of each member of XrViewConfigurationDepthRangeEXT, in order.
#define XR_LIST_STRUCT_XrViewConfigurationDepthRangeEXT(_) \
_(type) \
_(next) \
@@ -1674,6 +1831,7 @@ XR_ENUM_STR(XrResult);
_(recommendedFarZ) \
_(maxFarZ) \
+/// Calls your macro with the name of each member of XrGraphicsBindingEGLMNDX, in order.
#define XR_LIST_STRUCT_XrGraphicsBindingEGLMNDX(_) \
_(type) \
_(next) \
@@ -1682,6 +1840,7 @@ XR_ENUM_STR(XrResult);
_(config) \
_(context) \
+/// Calls your macro with the name of each member of XrSpatialGraphNodeSpaceCreateInfoMSFT, in order.
#define XR_LIST_STRUCT_XrSpatialGraphNodeSpaceCreateInfoMSFT(_) \
_(type) \
_(next) \
@@ -1689,6 +1848,7 @@ XR_ENUM_STR(XrResult);
_(nodeId) \
_(pose) \
+/// Calls your macro with the name of each member of XrSpatialGraphStaticNodeBindingCreateInfoMSFT, in order.
#define XR_LIST_STRUCT_XrSpatialGraphStaticNodeBindingCreateInfoMSFT(_) \
_(type) \
_(next) \
@@ -1696,43 +1856,51 @@ XR_ENUM_STR(XrResult);
_(poseInSpace) \
_(time) \
+/// Calls your macro with the name of each member of XrSpatialGraphNodeBindingPropertiesGetInfoMSFT, in order.
#define XR_LIST_STRUCT_XrSpatialGraphNodeBindingPropertiesGetInfoMSFT(_) \
_(type) \
_(next) \
+/// Calls your macro with the name of each member of XrSpatialGraphNodeBindingPropertiesMSFT, in order.
#define XR_LIST_STRUCT_XrSpatialGraphNodeBindingPropertiesMSFT(_) \
_(type) \
_(next) \
_(nodeId) \
_(poseInNodeSpace) \
+/// Calls your macro with the name of each member of XrSystemHandTrackingPropertiesEXT, in order.
#define XR_LIST_STRUCT_XrSystemHandTrackingPropertiesEXT(_) \
_(type) \
_(next) \
_(supportsHandTracking) \
+/// Calls your macro with the name of each member of XrHandTrackerCreateInfoEXT, in order.
#define XR_LIST_STRUCT_XrHandTrackerCreateInfoEXT(_) \
_(type) \
_(next) \
_(hand) \
_(handJointSet) \
+/// Calls your macro with the name of each member of XrHandJointsLocateInfoEXT, in order.
#define XR_LIST_STRUCT_XrHandJointsLocateInfoEXT(_) \
_(type) \
_(next) \
_(baseSpace) \
_(time) \
+/// Calls your macro with the name of each member of XrHandJointLocationEXT, in order.
#define XR_LIST_STRUCT_XrHandJointLocationEXT(_) \
_(locationFlags) \
_(pose) \
_(radius) \
+/// Calls your macro with the name of each member of XrHandJointVelocityEXT, in order.
#define XR_LIST_STRUCT_XrHandJointVelocityEXT(_) \
_(velocityFlags) \
_(linearVelocity) \
_(angularVelocity) \
+/// Calls your macro with the name of each member of XrHandJointLocationsEXT, in order.
#define XR_LIST_STRUCT_XrHandJointLocationsEXT(_) \
_(type) \
_(next) \
@@ -1740,12 +1908,14 @@ XR_ENUM_STR(XrResult);
_(jointCount) \
_(jointLocations) \
+/// Calls your macro with the name of each member of XrHandJointVelocitiesEXT, in order.
#define XR_LIST_STRUCT_XrHandJointVelocitiesEXT(_) \
_(type) \
_(next) \
_(jointCount) \
_(jointVelocities) \
+/// Calls your macro with the name of each member of XrSystemHandTrackingMeshPropertiesMSFT, in order.
#define XR_LIST_STRUCT_XrSystemHandTrackingMeshPropertiesMSFT(_) \
_(type) \
_(next) \
@@ -1753,34 +1923,40 @@ XR_ENUM_STR(XrResult);
_(maxHandMeshIndexCount) \
_(maxHandMeshVertexCount) \
+/// Calls your macro with the name of each member of XrHandMeshSpaceCreateInfoMSFT, in order.
#define XR_LIST_STRUCT_XrHandMeshSpaceCreateInfoMSFT(_) \
_(type) \
_(next) \
_(handPoseType) \
_(poseInHandMeshSpace) \
+/// Calls your macro with the name of each member of XrHandMeshUpdateInfoMSFT, in order.
#define XR_LIST_STRUCT_XrHandMeshUpdateInfoMSFT(_) \
_(type) \
_(next) \
_(time) \
_(handPoseType) \
+/// Calls your macro with the name of each member of XrHandMeshIndexBufferMSFT, in order.
#define XR_LIST_STRUCT_XrHandMeshIndexBufferMSFT(_) \
_(indexBufferKey) \
_(indexCapacityInput) \
_(indexCountOutput) \
_(indices) \
+/// Calls your macro with the name of each member of XrHandMeshVertexMSFT, in order.
#define XR_LIST_STRUCT_XrHandMeshVertexMSFT(_) \
_(position) \
_(normal) \
+/// Calls your macro with the name of each member of XrHandMeshVertexBufferMSFT, in order.
#define XR_LIST_STRUCT_XrHandMeshVertexBufferMSFT(_) \
_(vertexUpdateTime) \
_(vertexCapacityInput) \
_(vertexCountOutput) \
_(vertices) \
+/// Calls your macro with the name of each member of XrHandMeshMSFT, in order.
#define XR_LIST_STRUCT_XrHandMeshMSFT(_) \
_(type) \
_(next) \
@@ -1790,29 +1966,34 @@ XR_ENUM_STR(XrResult);
_(indexBuffer) \
_(vertexBuffer) \
+/// Calls your macro with the name of each member of XrHandPoseTypeInfoMSFT, in order.
#define XR_LIST_STRUCT_XrHandPoseTypeInfoMSFT(_) \
_(type) \
_(next) \
_(handPoseType) \
+/// Calls your macro with the name of each member of XrSecondaryViewConfigurationSessionBeginInfoMSFT, in order.
#define XR_LIST_STRUCT_XrSecondaryViewConfigurationSessionBeginInfoMSFT(_) \
_(type) \
_(next) \
_(viewConfigurationCount) \
_(enabledViewConfigurationTypes) \
+/// Calls your macro with the name of each member of XrSecondaryViewConfigurationStateMSFT, in order.
#define XR_LIST_STRUCT_XrSecondaryViewConfigurationStateMSFT(_) \
_(type) \
_(next) \
_(viewConfigurationType) \
_(active) \
+/// Calls your macro with the name of each member of XrSecondaryViewConfigurationFrameStateMSFT, in order.
#define XR_LIST_STRUCT_XrSecondaryViewConfigurationFrameStateMSFT(_) \
_(type) \
_(next) \
_(viewConfigurationCount) \
_(viewConfigurationStates) \
+/// Calls your macro with the name of each member of XrSecondaryViewConfigurationLayerInfoMSFT, in order.
#define XR_LIST_STRUCT_XrSecondaryViewConfigurationLayerInfoMSFT(_) \
_(type) \
_(next) \
@@ -1821,28 +2002,33 @@ XR_ENUM_STR(XrResult);
_(layerCount) \
_(layers) \
+/// Calls your macro with the name of each member of XrSecondaryViewConfigurationFrameEndInfoMSFT, in order.
#define XR_LIST_STRUCT_XrSecondaryViewConfigurationFrameEndInfoMSFT(_) \
_(type) \
_(next) \
_(viewConfigurationCount) \
_(viewConfigurationLayersInfo) \
+/// Calls your macro with the name of each member of XrSecondaryViewConfigurationSwapchainCreateInfoMSFT, in order.
#define XR_LIST_STRUCT_XrSecondaryViewConfigurationSwapchainCreateInfoMSFT(_) \
_(type) \
_(next) \
_(viewConfigurationType) \
+/// Calls your macro with the name of each member of XrControllerModelKeyStateMSFT, in order.
#define XR_LIST_STRUCT_XrControllerModelKeyStateMSFT(_) \
_(type) \
_(next) \
_(modelKey) \
+/// Calls your macro with the name of each member of XrControllerModelNodePropertiesMSFT, in order.
#define XR_LIST_STRUCT_XrControllerModelNodePropertiesMSFT(_) \
_(type) \
_(next) \
_(parentNodeName) \
_(nodeName) \
+/// Calls your macro with the name of each member of XrControllerModelPropertiesMSFT, in order.
#define XR_LIST_STRUCT_XrControllerModelPropertiesMSFT(_) \
_(type) \
_(next) \
@@ -1850,11 +2036,13 @@ XR_ENUM_STR(XrResult);
_(nodeCountOutput) \
_(nodeProperties) \
+/// Calls your macro with the name of each member of XrControllerModelNodeStateMSFT, in order.
#define XR_LIST_STRUCT_XrControllerModelNodeStateMSFT(_) \
_(type) \
_(next) \
_(nodePose) \
+/// Calls your macro with the name of each member of XrControllerModelStateMSFT, in order.
#define XR_LIST_STRUCT_XrControllerModelStateMSFT(_) \
_(type) \
_(next) \
@@ -1862,23 +2050,27 @@ XR_ENUM_STR(XrResult);
_(nodeCountOutput) \
_(nodeStates) \
+/// Calls your macro with the name of each member of XrViewConfigurationViewFovEPIC, in order.
#define XR_LIST_STRUCT_XrViewConfigurationViewFovEPIC(_) \
_(type) \
_(next) \
_(recommendedFov) \
_(maxMutableFov) \
+/// Calls your macro with the name of each member of XrHolographicWindowAttachmentMSFT, in order.
#define XR_LIST_STRUCT_XrHolographicWindowAttachmentMSFT(_) \
_(type) \
_(next) \
_(holographicSpace) \
_(coreWindow) \
+/// Calls your macro with the name of each member of XrCompositionLayerReprojectionInfoMSFT, in order.
#define XR_LIST_STRUCT_XrCompositionLayerReprojectionInfoMSFT(_) \
_(type) \
_(next) \
_(reprojectionMode) \
+/// Calls your macro with the name of each member of XrCompositionLayerReprojectionPlaneOverrideMSFT, in order.
#define XR_LIST_STRUCT_XrCompositionLayerReprojectionPlaneOverrideMSFT(_) \
_(type) \
_(next) \
@@ -1886,20 +2078,24 @@ XR_ENUM_STR(XrResult);
_(normal) \
_(velocity) \
+/// Calls your macro with the name of each member of XrAndroidSurfaceSwapchainCreateInfoFB, in order.
#define XR_LIST_STRUCT_XrAndroidSurfaceSwapchainCreateInfoFB(_) \
_(type) \
_(next) \
_(createFlags) \
+/// Calls your macro with the name of each member of XrSwapchainStateBaseHeaderFB, in order.
#define XR_LIST_STRUCT_XrSwapchainStateBaseHeaderFB(_) \
_(type) \
_(next) \
+/// Calls your macro with the name of each member of XrCompositionLayerSecureContentFB, in order.
#define XR_LIST_STRUCT_XrCompositionLayerSecureContentFB(_) \
_(type) \
_(next) \
_(flags) \
+/// Calls your macro with the name of each member of XrInteractionProfileDpadBindingEXT, in order.
#define XR_LIST_STRUCT_XrInteractionProfileDpadBindingEXT(_) \
_(type) \
_(next) \
@@ -1913,6 +2109,7 @@ XR_ENUM_STR(XrResult);
_(onHaptic) \
_(offHaptic) \
+/// Calls your macro with the name of each member of XrInteractionProfileAnalogThresholdVALVE, in order.
#define XR_LIST_STRUCT_XrInteractionProfileAnalogThresholdVALVE(_) \
_(type) \
_(next) \
@@ -1923,35 +2120,43 @@ XR_ENUM_STR(XrResult);
_(onHaptic) \
_(offHaptic) \
+/// Calls your macro with the name of each member of XrHandJointsMotionRangeInfoEXT, in order.
#define XR_LIST_STRUCT_XrHandJointsMotionRangeInfoEXT(_) \
_(type) \
_(next) \
_(handJointsMotionRange) \
+/// Calls your macro with the name of each member of XrUuidMSFT, in order.
#define XR_LIST_STRUCT_XrUuidMSFT(_) \
_(bytes) \
+/// Calls your macro with the name of each member of XrSceneObserverCreateInfoMSFT, in order.
#define XR_LIST_STRUCT_XrSceneObserverCreateInfoMSFT(_) \
_(type) \
_(next) \
+/// Calls your macro with the name of each member of XrSceneCreateInfoMSFT, in order.
#define XR_LIST_STRUCT_XrSceneCreateInfoMSFT(_) \
_(type) \
_(next) \
+/// Calls your macro with the name of each member of XrSceneSphereBoundMSFT, in order.
#define XR_LIST_STRUCT_XrSceneSphereBoundMSFT(_) \
_(center) \
_(radius) \
+/// Calls your macro with the name of each member of XrSceneOrientedBoxBoundMSFT, in order.
#define XR_LIST_STRUCT_XrSceneOrientedBoxBoundMSFT(_) \
_(pose) \
_(extents) \
+/// Calls your macro with the name of each member of XrSceneFrustumBoundMSFT, in order.
#define XR_LIST_STRUCT_XrSceneFrustumBoundMSFT(_) \
_(pose) \
_(fov) \
_(farDistance) \
+/// Calls your macro with the name of each member of XrSceneBoundsMSFT, in order.
#define XR_LIST_STRUCT_XrSceneBoundsMSFT(_) \
_(space) \
_(time) \
@@ -1962,6 +2167,7 @@ XR_ENUM_STR(XrResult);
_(frustumCount) \
_(frustums) \
+/// Calls your macro with the name of each member of XrNewSceneComputeInfoMSFT, in order.
#define XR_LIST_STRUCT_XrNewSceneComputeInfoMSFT(_) \
_(type) \
_(next) \
@@ -1970,17 +2176,20 @@ XR_ENUM_STR(XrResult);
_(consistency) \
_(bounds) \
+/// Calls your macro with the name of each member of XrVisualMeshComputeLodInfoMSFT, in order.
#define XR_LIST_STRUCT_XrVisualMeshComputeLodInfoMSFT(_) \
_(type) \
_(next) \
_(lod) \
+/// Calls your macro with the name of each member of XrSceneComponentMSFT, in order.
#define XR_LIST_STRUCT_XrSceneComponentMSFT(_) \
_(componentType) \
_(id) \
_(parentId) \
_(updateTime) \
+/// Calls your macro with the name of each member of XrSceneComponentsMSFT, in order.
#define XR_LIST_STRUCT_XrSceneComponentsMSFT(_) \
_(type) \
_(next) \
@@ -1988,21 +2197,25 @@ XR_ENUM_STR(XrResult);
_(componentCountOutput) \
_(components) \
+/// Calls your macro with the name of each member of XrSceneComponentsGetInfoMSFT, in order.
#define XR_LIST_STRUCT_XrSceneComponentsGetInfoMSFT(_) \
_(type) \
_(next) \
_(componentType) \
+/// Calls your macro with the name of each member of XrSceneComponentLocationMSFT, in order.
#define XR_LIST_STRUCT_XrSceneComponentLocationMSFT(_) \
_(flags) \
_(pose) \
+/// Calls your macro with the name of each member of XrSceneComponentLocationsMSFT, in order.
#define XR_LIST_STRUCT_XrSceneComponentLocationsMSFT(_) \
_(type) \
_(next) \
_(locationCount) \
_(locations) \
+/// Calls your macro with the name of each member of XrSceneComponentsLocateInfoMSFT, in order.
#define XR_LIST_STRUCT_XrSceneComponentsLocateInfoMSFT(_) \
_(type) \
_(next) \
@@ -2011,63 +2224,75 @@ XR_ENUM_STR(XrResult);
_(componentIdCount) \
_(componentIds) \
+/// Calls your macro with the name of each member of XrSceneObjectMSFT, in order.
#define XR_LIST_STRUCT_XrSceneObjectMSFT(_) \
_(objectType) \
+/// Calls your macro with the name of each member of XrSceneObjectsMSFT, in order.
#define XR_LIST_STRUCT_XrSceneObjectsMSFT(_) \
_(type) \
_(next) \
_(sceneObjectCount) \
_(sceneObjects) \
+/// Calls your macro with the name of each member of XrSceneComponentParentFilterInfoMSFT, in order.
#define XR_LIST_STRUCT_XrSceneComponentParentFilterInfoMSFT(_) \
_(type) \
_(next) \
_(parentId) \
+/// Calls your macro with the name of each member of XrSceneObjectTypesFilterInfoMSFT, in order.
#define XR_LIST_STRUCT_XrSceneObjectTypesFilterInfoMSFT(_) \
_(type) \
_(next) \
_(objectTypeCount) \
_(objectTypes) \
+/// Calls your macro with the name of each member of XrScenePlaneMSFT, in order.
#define XR_LIST_STRUCT_XrScenePlaneMSFT(_) \
_(alignment) \
_(size) \
_(meshBufferId) \
_(supportsIndicesUint16) \
+/// Calls your macro with the name of each member of XrScenePlanesMSFT, in order.
#define XR_LIST_STRUCT_XrScenePlanesMSFT(_) \
_(type) \
_(next) \
_(scenePlaneCount) \
_(scenePlanes) \
+/// Calls your macro with the name of each member of XrScenePlaneAlignmentFilterInfoMSFT, in order.
#define XR_LIST_STRUCT_XrScenePlaneAlignmentFilterInfoMSFT(_) \
_(type) \
_(next) \
_(alignmentCount) \
_(alignments) \
+/// Calls your macro with the name of each member of XrSceneMeshMSFT, in order.
#define XR_LIST_STRUCT_XrSceneMeshMSFT(_) \
_(meshBufferId) \
_(supportsIndicesUint16) \
+/// Calls your macro with the name of each member of XrSceneMeshesMSFT, in order.
#define XR_LIST_STRUCT_XrSceneMeshesMSFT(_) \
_(type) \
_(next) \
_(sceneMeshCount) \
_(sceneMeshes) \
+/// Calls your macro with the name of each member of XrSceneMeshBuffersGetInfoMSFT, in order.
#define XR_LIST_STRUCT_XrSceneMeshBuffersGetInfoMSFT(_) \
_(type) \
_(next) \
_(meshBufferId) \
+/// Calls your macro with the name of each member of XrSceneMeshBuffersMSFT, in order.
#define XR_LIST_STRUCT_XrSceneMeshBuffersMSFT(_) \
_(type) \
_(next) \
+/// Calls your macro with the name of each member of XrSceneMeshVertexBufferMSFT, in order.
#define XR_LIST_STRUCT_XrSceneMeshVertexBufferMSFT(_) \
_(type) \
_(next) \
@@ -2075,6 +2300,7 @@ XR_ENUM_STR(XrResult);
_(vertexCountOutput) \
_(vertices) \
+/// Calls your macro with the name of each member of XrSceneMeshIndicesUint32MSFT, in order.
#define XR_LIST_STRUCT_XrSceneMeshIndicesUint32MSFT(_) \
_(type) \
_(next) \
@@ -2082,6 +2308,7 @@ XR_ENUM_STR(XrResult);
_(indexCountOutput) \
_(indices) \
+/// Calls your macro with the name of each member of XrSceneMeshIndicesUint16MSFT, in order.
#define XR_LIST_STRUCT_XrSceneMeshIndicesUint16MSFT(_) \
_(type) \
_(next) \
@@ -2089,44 +2316,52 @@ XR_ENUM_STR(XrResult);
_(indexCountOutput) \
_(indices) \
+/// Calls your macro with the name of each member of XrSerializedSceneFragmentDataGetInfoMSFT, in order.
#define XR_LIST_STRUCT_XrSerializedSceneFragmentDataGetInfoMSFT(_) \
_(type) \
_(next) \
_(sceneFragmentId) \
+/// Calls your macro with the name of each member of XrDeserializeSceneFragmentMSFT, in order.
#define XR_LIST_STRUCT_XrDeserializeSceneFragmentMSFT(_) \
_(bufferSize) \
_(buffer) \
+/// Calls your macro with the name of each member of XrSceneDeserializeInfoMSFT, in order.
#define XR_LIST_STRUCT_XrSceneDeserializeInfoMSFT(_) \
_(type) \
_(next) \
_(fragmentCount) \
_(fragments) \
+/// Calls your macro with the name of each member of XrEventDataDisplayRefreshRateChangedFB, in order.
#define XR_LIST_STRUCT_XrEventDataDisplayRefreshRateChangedFB(_) \
_(type) \
_(next) \
_(fromDisplayRefreshRate) \
_(toDisplayRefreshRate) \
+/// Calls your macro with the name of each member of XrViveTrackerPathsHTCX, in order.
#define XR_LIST_STRUCT_XrViveTrackerPathsHTCX(_) \
_(type) \
_(next) \
_(persistentPath) \
_(rolePath) \
+/// Calls your macro with the name of each member of XrEventDataViveTrackerConnectedHTCX, in order.
#define XR_LIST_STRUCT_XrEventDataViveTrackerConnectedHTCX(_) \
_(type) \
_(next) \
_(paths) \
+/// Calls your macro with the name of each member of XrSystemFacialTrackingPropertiesHTC, in order.
#define XR_LIST_STRUCT_XrSystemFacialTrackingPropertiesHTC(_) \
_(type) \
_(next) \
_(supportEyeFacialTracking) \
_(supportLipFacialTracking) \
+/// Calls your macro with the name of each member of XrFacialExpressionsHTC, in order.
#define XR_LIST_STRUCT_XrFacialExpressionsHTC(_) \
_(type) \
_(next) \
@@ -2135,22 +2370,26 @@ XR_ENUM_STR(XrResult);
_(expressionCount) \
_(expressionWeightings) \
+/// Calls your macro with the name of each member of XrFacialTrackerCreateInfoHTC, in order.
#define XR_LIST_STRUCT_XrFacialTrackerCreateInfoHTC(_) \
_(type) \
_(next) \
_(facialTrackingType) \
+/// Calls your macro with the name of each member of XrSystemColorSpacePropertiesFB, in order.
#define XR_LIST_STRUCT_XrSystemColorSpacePropertiesFB(_) \
_(type) \
_(next) \
_(colorSpace) \
+/// Calls your macro with the name of each member of XrVector4sFB, in order.
#define XR_LIST_STRUCT_XrVector4sFB(_) \
_(x) \
_(y) \
_(z) \
_(w) \
+/// Calls your macro with the name of each member of XrHandTrackingMeshFB, in order.
#define XR_LIST_STRUCT_XrHandTrackingMeshFB(_) \
_(type) \
_(next) \
@@ -2170,6 +2409,7 @@ XR_ENUM_STR(XrResult);
_(indexCountOutput) \
_(indices) \
+/// Calls your macro with the name of each member of XrHandTrackingScaleFB, in order.
#define XR_LIST_STRUCT_XrHandTrackingScaleFB(_) \
_(type) \
_(next) \
@@ -2178,6 +2418,7 @@ XR_ENUM_STR(XrResult);
_(overrideHandScale) \
_(overrideValueInput) \
+/// Calls your macro with the name of each member of XrHandTrackingAimStateFB, in order.
#define XR_LIST_STRUCT_XrHandTrackingAimStateFB(_) \
_(type) \
_(next) \
@@ -2188,21 +2429,25 @@ XR_ENUM_STR(XrResult);
_(pinchStrengthRing) \
_(pinchStrengthLittle) \
+/// Calls your macro with the name of each member of XrHandCapsuleFB, in order.
#define XR_LIST_STRUCT_XrHandCapsuleFB(_) \
_(points) \
_(radius) \
_(joint) \
+/// Calls your macro with the name of each member of XrHandTrackingCapsulesStateFB, in order.
#define XR_LIST_STRUCT_XrHandTrackingCapsulesStateFB(_) \
_(type) \
_(next) \
_(capsules) \
+/// Calls your macro with the name of each member of XrSystemSpatialEntityPropertiesFB, in order.
#define XR_LIST_STRUCT_XrSystemSpatialEntityPropertiesFB(_) \
_(type) \
_(next) \
_(supportsSpatialEntity) \
+/// Calls your macro with the name of each member of XrSpatialAnchorCreateInfoFB, in order.
#define XR_LIST_STRUCT_XrSpatialAnchorCreateInfoFB(_) \
_(type) \
_(next) \
@@ -2210,6 +2455,7 @@ XR_ENUM_STR(XrResult);
_(poseInSpace) \
_(time) \
+/// Calls your macro with the name of each member of XrSpaceComponentStatusSetInfoFB, in order.
#define XR_LIST_STRUCT_XrSpaceComponentStatusSetInfoFB(_) \
_(type) \
_(next) \
@@ -2217,15 +2463,18 @@ XR_ENUM_STR(XrResult);
_(enabled) \
_(timeout) \
+/// Calls your macro with the name of each member of XrSpaceComponentStatusFB, in order.
#define XR_LIST_STRUCT_XrSpaceComponentStatusFB(_) \
_(type) \
_(next) \
_(enabled) \
_(changePending) \
+/// Calls your macro with the name of each member of XrUuidEXT, in order.
#define XR_LIST_STRUCT_XrUuidEXT(_) \
_(data) \
+/// Calls your macro with the name of each member of XrEventDataSpatialAnchorCreateCompleteFB, in order.
#define XR_LIST_STRUCT_XrEventDataSpatialAnchorCreateCompleteFB(_) \
_(type) \
_(next) \
@@ -2234,6 +2483,7 @@ XR_ENUM_STR(XrResult);
_(space) \
_(uuid) \
+/// Calls your macro with the name of each member of XrEventDataSpaceSetStatusCompleteFB, in order.
#define XR_LIST_STRUCT_XrEventDataSpaceSetStatusCompleteFB(_) \
_(type) \
_(next) \
@@ -2244,21 +2494,25 @@ XR_ENUM_STR(XrResult);
_(componentType) \
_(enabled) \
+/// Calls your macro with the name of each member of XrFoveationProfileCreateInfoFB, in order.
#define XR_LIST_STRUCT_XrFoveationProfileCreateInfoFB(_) \
_(type) \
_(next) \
+/// Calls your macro with the name of each member of XrSwapchainCreateInfoFoveationFB, in order.
#define XR_LIST_STRUCT_XrSwapchainCreateInfoFoveationFB(_) \
_(type) \
_(next) \
_(flags) \
+/// Calls your macro with the name of each member of XrSwapchainStateFoveationFB, in order.
#define XR_LIST_STRUCT_XrSwapchainStateFoveationFB(_) \
_(type) \
_(next) \
_(flags) \
_(profile) \
+/// Calls your macro with the name of each member of XrFoveationLevelProfileCreateInfoFB, in order.
#define XR_LIST_STRUCT_XrFoveationLevelProfileCreateInfoFB(_) \
_(type) \
_(next) \
@@ -2266,27 +2520,32 @@ XR_ENUM_STR(XrResult);
_(verticalOffset) \
_(dynamic) \
+/// Calls your macro with the name of each member of XrSystemKeyboardTrackingPropertiesFB, in order.
#define XR_LIST_STRUCT_XrSystemKeyboardTrackingPropertiesFB(_) \
_(type) \
_(next) \
_(supportsKeyboardTracking) \
+/// Calls your macro with the name of each member of XrKeyboardTrackingDescriptionFB, in order.
#define XR_LIST_STRUCT_XrKeyboardTrackingDescriptionFB(_) \
_(trackedKeyboardId) \
_(size) \
_(flags) \
_(name) \
+/// Calls your macro with the name of each member of XrKeyboardSpaceCreateInfoFB, in order.
#define XR_LIST_STRUCT_XrKeyboardSpaceCreateInfoFB(_) \
_(type) \
_(next) \
_(trackedKeyboardId) \
+/// Calls your macro with the name of each member of XrKeyboardTrackingQueryFB, in order.
#define XR_LIST_STRUCT_XrKeyboardTrackingQueryFB(_) \
_(type) \
_(next) \
_(flags) \
+/// Calls your macro with the name of each member of XrTriangleMeshCreateInfoFB, in order.
#define XR_LIST_STRUCT_XrTriangleMeshCreateInfoFB(_) \
_(type) \
_(next) \
@@ -2297,21 +2556,25 @@ XR_ENUM_STR(XrResult);
_(triangleCount) \
_(indexBuffer) \
+/// Calls your macro with the name of each member of XrSystemPassthroughPropertiesFB, in order.
#define XR_LIST_STRUCT_XrSystemPassthroughPropertiesFB(_) \
_(type) \
_(next) \
_(supportsPassthrough) \
+/// Calls your macro with the name of each member of XrSystemPassthroughProperties2FB, in order.
#define XR_LIST_STRUCT_XrSystemPassthroughProperties2FB(_) \
_(type) \
_(next) \
_(capabilities) \
+/// Calls your macro with the name of each member of XrPassthroughCreateInfoFB, in order.
#define XR_LIST_STRUCT_XrPassthroughCreateInfoFB(_) \
_(type) \
_(next) \
_(flags) \
+/// Calls your macro with the name of each member of XrPassthroughLayerCreateInfoFB, in order.
#define XR_LIST_STRUCT_XrPassthroughLayerCreateInfoFB(_) \
_(type) \
_(next) \
@@ -2319,6 +2582,7 @@ XR_ENUM_STR(XrResult);
_(flags) \
_(purpose) \
+/// Calls your macro with the name of each member of XrCompositionLayerPassthroughFB, in order.
#define XR_LIST_STRUCT_XrCompositionLayerPassthroughFB(_) \
_(type) \
_(next) \
@@ -2326,6 +2590,7 @@ XR_ENUM_STR(XrResult);
_(space) \
_(layerHandle) \
+/// Calls your macro with the name of each member of XrGeometryInstanceCreateInfoFB, in order.
#define XR_LIST_STRUCT_XrGeometryInstanceCreateInfoFB(_) \
_(type) \
_(next) \
@@ -2335,6 +2600,7 @@ XR_ENUM_STR(XrResult);
_(pose) \
_(scale) \
+/// Calls your macro with the name of each member of XrGeometryInstanceTransformFB, in order.
#define XR_LIST_STRUCT_XrGeometryInstanceTransformFB(_) \
_(type) \
_(next) \
@@ -2343,22 +2609,26 @@ XR_ENUM_STR(XrResult);
_(pose) \
_(scale) \
+/// Calls your macro with the name of each member of XrPassthroughStyleFB, in order.
#define XR_LIST_STRUCT_XrPassthroughStyleFB(_) \
_(type) \
_(next) \
_(textureOpacityFactor) \
_(edgeColor) \
+/// Calls your macro with the name of each member of XrPassthroughColorMapMonoToRgbaFB, in order.
#define XR_LIST_STRUCT_XrPassthroughColorMapMonoToRgbaFB(_) \
_(type) \
_(next) \
_(textureColorMap) \
+/// Calls your macro with the name of each member of XrPassthroughColorMapMonoToMonoFB, in order.
#define XR_LIST_STRUCT_XrPassthroughColorMapMonoToMonoFB(_) \
_(type) \
_(next) \
_(textureColorMap) \
+/// Calls your macro with the name of each member of XrPassthroughBrightnessContrastSaturationFB, in order.
#define XR_LIST_STRUCT_XrPassthroughBrightnessContrastSaturationFB(_) \
_(type) \
_(next) \
@@ -2366,16 +2636,19 @@ XR_ENUM_STR(XrResult);
_(contrast) \
_(saturation) \
+/// Calls your macro with the name of each member of XrEventDataPassthroughStateChangedFB, in order.
#define XR_LIST_STRUCT_XrEventDataPassthroughStateChangedFB(_) \
_(type) \
_(next) \
_(flags) \
+/// Calls your macro with the name of each member of XrRenderModelPathInfoFB, in order.
#define XR_LIST_STRUCT_XrRenderModelPathInfoFB(_) \
_(type) \
_(next) \
_(path) \
+/// Calls your macro with the name of each member of XrRenderModelPropertiesFB, in order.
#define XR_LIST_STRUCT_XrRenderModelPropertiesFB(_) \
_(type) \
_(next) \
@@ -2385,6 +2658,7 @@ XR_ENUM_STR(XrResult);
_(modelVersion) \
_(flags) \
+/// Calls your macro with the name of each member of XrRenderModelBufferFB, in order.
#define XR_LIST_STRUCT_XrRenderModelBufferFB(_) \
_(type) \
_(next) \
@@ -2392,47 +2666,56 @@ XR_ENUM_STR(XrResult);
_(bufferCountOutput) \
_(buffer) \
+/// Calls your macro with the name of each member of XrRenderModelLoadInfoFB, in order.
#define XR_LIST_STRUCT_XrRenderModelLoadInfoFB(_) \
_(type) \
_(next) \
_(modelKey) \
+/// Calls your macro with the name of each member of XrSystemRenderModelPropertiesFB, in order.
#define XR_LIST_STRUCT_XrSystemRenderModelPropertiesFB(_) \
_(type) \
_(next) \
_(supportsRenderModelLoading) \
+/// Calls your macro with the name of each member of XrRenderModelCapabilitiesRequestFB, in order.
#define XR_LIST_STRUCT_XrRenderModelCapabilitiesRequestFB(_) \
_(type) \
_(next) \
_(flags) \
+/// Calls your macro with the name of each member of XrViewLocateFoveatedRenderingVARJO, in order.
#define XR_LIST_STRUCT_XrViewLocateFoveatedRenderingVARJO(_) \
_(type) \
_(next) \
_(foveatedRenderingActive) \
+/// Calls your macro with the name of each member of XrFoveatedViewConfigurationViewVARJO, in order.
#define XR_LIST_STRUCT_XrFoveatedViewConfigurationViewVARJO(_) \
_(type) \
_(next) \
_(foveatedRenderingActive) \
+/// Calls your macro with the name of each member of XrSystemFoveatedRenderingPropertiesVARJO, in order.
#define XR_LIST_STRUCT_XrSystemFoveatedRenderingPropertiesVARJO(_) \
_(type) \
_(next) \
_(supportsFoveatedRendering) \
+/// Calls your macro with the name of each member of XrCompositionLayerDepthTestVARJO, in order.
#define XR_LIST_STRUCT_XrCompositionLayerDepthTestVARJO(_) \
_(type) \
_(next) \
_(depthTestRangeNearZ) \
_(depthTestRangeFarZ) \
+/// Calls your macro with the name of each member of XrSystemMarkerTrackingPropertiesVARJO, in order.
#define XR_LIST_STRUCT_XrSystemMarkerTrackingPropertiesVARJO(_) \
_(type) \
_(next) \
_(supportsMarkerTracking) \
+/// Calls your macro with the name of each member of XrEventDataMarkerTrackingUpdateVARJO, in order.
#define XR_LIST_STRUCT_XrEventDataMarkerTrackingUpdateVARJO(_) \
_(type) \
_(next) \
@@ -2441,35 +2724,42 @@ XR_ENUM_STR(XrResult);
_(isPredicted) \
_(time) \
+/// Calls your macro with the name of each member of XrMarkerSpaceCreateInfoVARJO, in order.
#define XR_LIST_STRUCT_XrMarkerSpaceCreateInfoVARJO(_) \
_(type) \
_(next) \
_(markerId) \
_(poseInMarkerSpace) \
+/// Calls your macro with the name of each member of XrSpatialAnchorPersistenceNameMSFT, in order.
#define XR_LIST_STRUCT_XrSpatialAnchorPersistenceNameMSFT(_) \
_(name) \
+/// Calls your macro with the name of each member of XrSpatialAnchorPersistenceInfoMSFT, in order.
#define XR_LIST_STRUCT_XrSpatialAnchorPersistenceInfoMSFT(_) \
_(type) \
_(next) \
_(spatialAnchorPersistenceName) \
_(spatialAnchor) \
+/// Calls your macro with the name of each member of XrSpatialAnchorFromPersistedAnchorCreateInfoMSFT, in order.
#define XR_LIST_STRUCT_XrSpatialAnchorFromPersistedAnchorCreateInfoMSFT(_) \
_(type) \
_(next) \
_(spatialAnchorStore) \
_(spatialAnchorPersistenceName) \
+/// Calls your macro with the name of each member of XrSpaceQueryInfoBaseHeaderFB, in order.
#define XR_LIST_STRUCT_XrSpaceQueryInfoBaseHeaderFB(_) \
_(type) \
_(next) \
+/// Calls your macro with the name of each member of XrSpaceFilterInfoBaseHeaderFB, in order.
#define XR_LIST_STRUCT_XrSpaceFilterInfoBaseHeaderFB(_) \
_(type) \
_(next) \
+/// Calls your macro with the name of each member of XrSpaceQueryInfoFB, in order.
#define XR_LIST_STRUCT_XrSpaceQueryInfoFB(_) \
_(type) \
_(next) \
@@ -2479,26 +2769,31 @@ XR_ENUM_STR(XrResult);
_(filter) \
_(excludeFilter) \
+/// Calls your macro with the name of each member of XrSpaceStorageLocationFilterInfoFB, in order.
#define XR_LIST_STRUCT_XrSpaceStorageLocationFilterInfoFB(_) \
_(type) \
_(next) \
_(location) \
+/// Calls your macro with the name of each member of XrSpaceUuidFilterInfoFB, in order.
#define XR_LIST_STRUCT_XrSpaceUuidFilterInfoFB(_) \
_(type) \
_(next) \
_(uuidCount) \
_(uuids) \
+/// Calls your macro with the name of each member of XrSpaceComponentFilterInfoFB, in order.
#define XR_LIST_STRUCT_XrSpaceComponentFilterInfoFB(_) \
_(type) \
_(next) \
_(componentType) \
+/// Calls your macro with the name of each member of XrSpaceQueryResultFB, in order.
#define XR_LIST_STRUCT_XrSpaceQueryResultFB(_) \
_(space) \
_(uuid) \
+/// Calls your macro with the name of each member of XrSpaceQueryResultsFB, in order.
#define XR_LIST_STRUCT_XrSpaceQueryResultsFB(_) \
_(type) \
_(next) \
@@ -2506,17 +2801,20 @@ XR_ENUM_STR(XrResult);
_(resultCountOutput) \
_(results) \
+/// Calls your macro with the name of each member of XrEventDataSpaceQueryResultsAvailableFB, in order.
#define XR_LIST_STRUCT_XrEventDataSpaceQueryResultsAvailableFB(_) \
_(type) \
_(next) \
_(requestId) \
+/// Calls your macro with the name of each member of XrEventDataSpaceQueryCompleteFB, in order.
#define XR_LIST_STRUCT_XrEventDataSpaceQueryCompleteFB(_) \
_(type) \
_(next) \
_(requestId) \
_(result) \
+/// Calls your macro with the name of each member of XrSpaceSaveInfoFB, in order.
#define XR_LIST_STRUCT_XrSpaceSaveInfoFB(_) \
_(type) \
_(next) \
@@ -2524,12 +2822,14 @@ XR_ENUM_STR(XrResult);
_(location) \
_(persistenceMode) \
+/// Calls your macro with the name of each member of XrSpaceEraseInfoFB, in order.
#define XR_LIST_STRUCT_XrSpaceEraseInfoFB(_) \
_(type) \
_(next) \
_(space) \
_(location) \
+/// Calls your macro with the name of each member of XrEventDataSpaceSaveCompleteFB, in order.
#define XR_LIST_STRUCT_XrEventDataSpaceSaveCompleteFB(_) \
_(type) \
_(next) \
@@ -2539,6 +2839,7 @@ XR_ENUM_STR(XrResult);
_(uuid) \
_(location) \
+/// Calls your macro with the name of each member of XrEventDataSpaceEraseCompleteFB, in order.
#define XR_LIST_STRUCT_XrEventDataSpaceEraseCompleteFB(_) \
_(type) \
_(next) \
@@ -2548,6 +2849,7 @@ XR_ENUM_STR(XrResult);
_(uuid) \
_(location) \
+/// Calls your macro with the name of each member of XrSwapchainImageFoveationVulkanFB, in order.
#define XR_LIST_STRUCT_XrSwapchainImageFoveationVulkanFB(_) \
_(type) \
_(next) \
@@ -2555,12 +2857,14 @@ XR_ENUM_STR(XrResult);
_(width) \
_(height) \
+/// Calls your macro with the name of each member of XrSwapchainStateAndroidSurfaceDimensionsFB, in order.
#define XR_LIST_STRUCT_XrSwapchainStateAndroidSurfaceDimensionsFB(_) \
_(type) \
_(next) \
_(width) \
_(height) \
+/// Calls your macro with the name of each member of XrSwapchainStateSamplerOpenGLESFB, in order.
#define XR_LIST_STRUCT_XrSwapchainStateSamplerOpenGLESFB(_) \
_(type) \
_(next) \
@@ -2575,6 +2879,7 @@ XR_ENUM_STR(XrResult);
_(maxAnisotropy) \
_(borderColor) \
+/// Calls your macro with the name of each member of XrSwapchainStateSamplerVulkanFB, in order.
#define XR_LIST_STRUCT_XrSwapchainStateSamplerVulkanFB(_) \
_(type) \
_(next) \
@@ -2590,6 +2895,7 @@ XR_ENUM_STR(XrResult);
_(maxAnisotropy) \
_(borderColor) \
+/// Calls your macro with the name of each member of XrCompositionLayerSpaceWarpInfoFB, in order.
#define XR_LIST_STRUCT_XrCompositionLayerSpaceWarpInfoFB(_) \
_(type) \
_(next) \
@@ -2602,26 +2908,31 @@ XR_ENUM_STR(XrResult);
_(nearZ) \
_(farZ) \
+/// Calls your macro with the name of each member of XrSystemSpaceWarpPropertiesFB, in order.
#define XR_LIST_STRUCT_XrSystemSpaceWarpPropertiesFB(_) \
_(type) \
_(next) \
_(recommendedMotionVectorImageRectWidth) \
_(recommendedMotionVectorImageRectHeight) \
+/// Calls your macro with the name of each member of XrExtent3DfFB, in order.
#define XR_LIST_STRUCT_XrExtent3DfFB(_) \
_(width) \
_(height) \
_(depth) \
+/// Calls your macro with the name of each member of XrOffset3DfFB, in order.
#define XR_LIST_STRUCT_XrOffset3DfFB(_) \
_(x) \
_(y) \
_(z) \
+/// Calls your macro with the name of each member of XrRect3DfFB, in order.
#define XR_LIST_STRUCT_XrRect3DfFB(_) \
_(offset) \
_(extent) \
+/// Calls your macro with the name of each member of XrSemanticLabelsFB, in order.
#define XR_LIST_STRUCT_XrSemanticLabelsFB(_) \
_(type) \
_(next) \
@@ -2629,6 +2940,7 @@ XR_ENUM_STR(XrResult);
_(bufferCountOutput) \
_(buffer) \
+/// Calls your macro with the name of each member of XrRoomLayoutFB, in order.
#define XR_LIST_STRUCT_XrRoomLayoutFB(_) \
_(type) \
_(next) \
@@ -2638,6 +2950,7 @@ XR_ENUM_STR(XrResult);
_(wallUuidCountOutput) \
_(wallUuids) \
+/// Calls your macro with the name of each member of XrBoundary2DFB, in order.
#define XR_LIST_STRUCT_XrBoundary2DFB(_) \
_(type) \
_(next) \
@@ -2645,11 +2958,13 @@ XR_ENUM_STR(XrResult);
_(vertexCountOutput) \
_(vertices) \
+/// Calls your macro with the name of each member of XrDigitalLensControlALMALENCE, in order.
#define XR_LIST_STRUCT_XrDigitalLensControlALMALENCE(_) \
_(type) \
_(next) \
_(flags) \
+/// Calls your macro with the name of each member of XrSpaceContainerFB, in order.
#define XR_LIST_STRUCT_XrSpaceContainerFB(_) \
_(type) \
_(next) \
@@ -2657,28 +2972,33 @@ XR_ENUM_STR(XrResult);
_(uuidCountOutput) \
_(uuids) \
+/// Calls your macro with the name of each member of XrPassthroughKeyboardHandsIntensityFB, in order.
#define XR_LIST_STRUCT_XrPassthroughKeyboardHandsIntensityFB(_) \
_(type) \
_(next) \
_(leftHandIntensity) \
_(rightHandIntensity) \
+/// Calls your macro with the name of each member of XrCompositionLayerSettingsFB, in order.
#define XR_LIST_STRUCT_XrCompositionLayerSettingsFB(_) \
_(type) \
_(next) \
_(layerFlags) \
+/// Calls your macro with the name of each member of XrVulkanSwapchainCreateInfoMETA, in order.
#define XR_LIST_STRUCT_XrVulkanSwapchainCreateInfoMETA(_) \
_(type) \
_(next) \
_(additionalCreateFlags) \
_(additionalUsageFlags) \
+/// Calls your macro with the name of each member of XrPerformanceMetricsStateMETA, in order.
#define XR_LIST_STRUCT_XrPerformanceMetricsStateMETA(_) \
_(type) \
_(next) \
_(enabled) \
+/// Calls your macro with the name of each member of XrPerformanceMetricsCounterMETA, in order.
#define XR_LIST_STRUCT_XrPerformanceMetricsCounterMETA(_) \
_(type) \
_(next) \
@@ -2687,8 +3007,107 @@ XR_ENUM_STR(XrResult);
_(uintValue) \
_(floatValue) \
+/// Calls your macro with the name of each member of XrSystemHeadsetIdPropertiesMETA, in order.
+#define XR_LIST_STRUCT_XrSystemHeadsetIdPropertiesMETA(_) \
+ _(type) \
+ _(next) \
+ _(id) \
+
+/// Calls your macro with the name of each member of XrPassthroughCreateInfoHTC, in order.
+#define XR_LIST_STRUCT_XrPassthroughCreateInfoHTC(_) \
+ _(type) \
+ _(next) \
+ _(form) \
+
+/// Calls your macro with the name of each member of XrPassthroughColorHTC, in order.
+#define XR_LIST_STRUCT_XrPassthroughColorHTC(_) \
+ _(type) \
+ _(next) \
+ _(alpha) \
+
+/// Calls your macro with the name of each member of XrPassthroughMeshTransformInfoHTC, in order.
+#define XR_LIST_STRUCT_XrPassthroughMeshTransformInfoHTC(_) \
+ _(type) \
+ _(next) \
+ _(vertexCount) \
+ _(vertices) \
+ _(indexCount) \
+ _(indices) \
+ _(baseSpace) \
+ _(time) \
+ _(pose) \
+ _(scale) \
+
+/// Calls your macro with the name of each member of XrCompositionLayerPassthroughHTC, in order.
+#define XR_LIST_STRUCT_XrCompositionLayerPassthroughHTC(_) \
+ _(type) \
+ _(next) \
+ _(layerFlags) \
+ _(space) \
+ _(passthrough) \
+ _(color) \
+
+/// Calls your macro with the name of each member of XrFoveationApplyInfoHTC, in order.
+#define XR_LIST_STRUCT_XrFoveationApplyInfoHTC(_) \
+ _(type) \
+ _(next) \
+ _(mode) \
+ _(subImageCount) \
+ _(subImages) \
+
+/// Calls your macro with the name of each member of XrFoveationConfigurationHTC, in order.
+#define XR_LIST_STRUCT_XrFoveationConfigurationHTC(_) \
+ _(level) \
+ _(clearFovDegree) \
+ _(focalCenterOffset) \
+/// Calls your macro with the name of each member of XrFoveationDynamicModeInfoHTC, in order.
+#define XR_LIST_STRUCT_XrFoveationDynamicModeInfoHTC(_) \
+ _(type) \
+ _(next) \
+ _(dynamicFlags) \
+/// Calls your macro with the name of each member of XrFoveationCustomModeInfoHTC, in order.
+#define XR_LIST_STRUCT_XrFoveationCustomModeInfoHTC(_) \
+ _(type) \
+ _(next) \
+ _(configCount) \
+ _(configs) \
+
+/// Calls your macro with the name of each member of XrActiveActionSetPriorityEXT, in order.
+#define XR_LIST_STRUCT_XrActiveActionSetPriorityEXT(_) \
+ _(actionSet) \
+ _(priorityOverride) \
+
+/// Calls your macro with the name of each member of XrActiveActionSetPrioritiesEXT, in order.
+#define XR_LIST_STRUCT_XrActiveActionSetPrioritiesEXT(_) \
+ _(type) \
+ _(next) \
+ _(actionSetPriorityCount) \
+ _(actionSetPriorities) \
+
+
+
+/// Calls your macro with the structure type name and the XrStructureType constant for
+/// each known/available structure type, excluding those unavailable due to preprocessor definitions.
+#define XR_LIST_STRUCTURE_TYPES(_) \
+ XR_LIST_STRUCTURE_TYPES_CORE(_) \
+ XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_D3D11(_) \
+ XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_D3D12(_) \
+ XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL(_) \
+ XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_WAYLAND(_) \
+ XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_WIN32(_) \
+ XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_XCB(_) \
+ XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_XLIB(_) \
+ XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_ES(_) \
+ XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_ES_XR_USE_PLATFORM_ANDROID(_) \
+ XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_VULKAN(_) \
+ XR_LIST_STRUCTURE_TYPES_XR_USE_PLATFORM_ANDROID(_) \
+ XR_LIST_STRUCTURE_TYPES_XR_USE_PLATFORM_EGL(_) \
+ XR_LIST_STRUCTURE_TYPES_XR_USE_PLATFORM_WIN32(_) \
+
+
+/// Implementation detail of XR_LIST_STRUCTURE_TYPES() - structure types available without any preprocessor definitions
#define XR_LIST_STRUCTURE_TYPES_CORE(_) \
_(XrApiLayerProperties, XR_TYPE_API_LAYER_PROPERTIES) \
_(XrExtensionProperties, XR_TYPE_EXTENSION_PROPERTIES) \
@@ -2889,99 +3308,117 @@ XR_ENUM_STR(XrResult);
_(XrCompositionLayerSettingsFB, XR_TYPE_COMPOSITION_LAYER_SETTINGS_FB) \
_(XrPerformanceMetricsStateMETA, XR_TYPE_PERFORMANCE_METRICS_STATE_META) \
_(XrPerformanceMetricsCounterMETA, XR_TYPE_PERFORMANCE_METRICS_COUNTER_META) \
-
-
+ _(XrSystemHeadsetIdPropertiesMETA, XR_TYPE_SYSTEM_HEADSET_ID_PROPERTIES_META) \
+ _(XrPassthroughCreateInfoHTC, XR_TYPE_PASSTHROUGH_CREATE_INFO_HTC) \
+ _(XrPassthroughColorHTC, XR_TYPE_PASSTHROUGH_COLOR_HTC) \
+ _(XrPassthroughMeshTransformInfoHTC, XR_TYPE_PASSTHROUGH_MESH_TRANSFORM_INFO_HTC) \
+ _(XrCompositionLayerPassthroughHTC, XR_TYPE_COMPOSITION_LAYER_PASSTHROUGH_HTC) \
+ _(XrFoveationApplyInfoHTC, XR_TYPE_FOVEATION_APPLY_INFO_HTC) \
+ _(XrFoveationDynamicModeInfoHTC, XR_TYPE_FOVEATION_DYNAMIC_MODE_INFO_HTC) \
+ _(XrFoveationCustomModeInfoHTC, XR_TYPE_FOVEATION_CUSTOM_MODE_INFO_HTC) \
+ _(XrActiveActionSetPrioritiesEXT, XR_TYPE_ACTIVE_ACTION_SET_PRIORITIES_EXT) \
#if defined(XR_USE_GRAPHICS_API_D3D11)
+/// Implementation detail of XR_LIST_STRUCTURE_TYPES()
+/// Structure types available only when XR_USE_GRAPHICS_API_D3D11 is defined
#define XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_D3D11(_) \
_(XrGraphicsBindingD3D11KHR, XR_TYPE_GRAPHICS_BINDING_D3D11_KHR) \
_(XrSwapchainImageD3D11KHR, XR_TYPE_SWAPCHAIN_IMAGE_D3D11_KHR) \
_(XrGraphicsRequirementsD3D11KHR, XR_TYPE_GRAPHICS_REQUIREMENTS_D3D11_KHR) \
-
#else
#define XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_D3D11(_)
#endif
#if defined(XR_USE_GRAPHICS_API_D3D12)
+/// Implementation detail of XR_LIST_STRUCTURE_TYPES()
+/// Structure types available only when XR_USE_GRAPHICS_API_D3D12 is defined
#define XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_D3D12(_) \
_(XrGraphicsBindingD3D12KHR, XR_TYPE_GRAPHICS_BINDING_D3D12_KHR) \
_(XrSwapchainImageD3D12KHR, XR_TYPE_SWAPCHAIN_IMAGE_D3D12_KHR) \
_(XrGraphicsRequirementsD3D12KHR, XR_TYPE_GRAPHICS_REQUIREMENTS_D3D12_KHR) \
-
#else
#define XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_D3D12(_)
#endif
#if defined(XR_USE_GRAPHICS_API_OPENGL)
+/// Implementation detail of XR_LIST_STRUCTURE_TYPES()
+/// Structure types available only when XR_USE_GRAPHICS_API_OPENGL is defined
#define XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL(_) \
_(XrSwapchainImageOpenGLKHR, XR_TYPE_SWAPCHAIN_IMAGE_OPENGL_KHR) \
_(XrGraphicsRequirementsOpenGLKHR, XR_TYPE_GRAPHICS_REQUIREMENTS_OPENGL_KHR) \
-
#else
#define XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL(_)
#endif
#if defined(XR_USE_GRAPHICS_API_OPENGL) && defined(XR_USE_PLATFORM_WAYLAND)
+/// Implementation detail of XR_LIST_STRUCTURE_TYPES()
+/// Structure types available only when XR_USE_GRAPHICS_API_OPENGL and XR_USE_PLATFORM_WAYLAND are defined
#define XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_WAYLAND(_) \
_(XrGraphicsBindingOpenGLWaylandKHR, XR_TYPE_GRAPHICS_BINDING_OPENGL_WAYLAND_KHR) \
-
#else
#define XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_WAYLAND(_)
#endif
#if defined(XR_USE_GRAPHICS_API_OPENGL) && defined(XR_USE_PLATFORM_WIN32)
+/// Implementation detail of XR_LIST_STRUCTURE_TYPES()
+/// Structure types available only when XR_USE_GRAPHICS_API_OPENGL and XR_USE_PLATFORM_WIN32 are defined
#define XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_WIN32(_) \
_(XrGraphicsBindingOpenGLWin32KHR, XR_TYPE_GRAPHICS_BINDING_OPENGL_WIN32_KHR) \
-
#else
#define XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_WIN32(_)
#endif
#if defined(XR_USE_GRAPHICS_API_OPENGL) && defined(XR_USE_PLATFORM_XCB)
+/// Implementation detail of XR_LIST_STRUCTURE_TYPES()
+/// Structure types available only when XR_USE_GRAPHICS_API_OPENGL and XR_USE_PLATFORM_XCB are defined
#define XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_XCB(_) \
_(XrGraphicsBindingOpenGLXcbKHR, XR_TYPE_GRAPHICS_BINDING_OPENGL_XCB_KHR) \
-
#else
#define XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_XCB(_)
#endif
#if defined(XR_USE_GRAPHICS_API_OPENGL) && defined(XR_USE_PLATFORM_XLIB)
+/// Implementation detail of XR_LIST_STRUCTURE_TYPES()
+/// Structure types available only when XR_USE_GRAPHICS_API_OPENGL and XR_USE_PLATFORM_XLIB are defined
#define XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_XLIB(_) \
_(XrGraphicsBindingOpenGLXlibKHR, XR_TYPE_GRAPHICS_BINDING_OPENGL_XLIB_KHR) \
-
#else
#define XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_XLIB(_)
#endif
#if defined(XR_USE_GRAPHICS_API_OPENGL_ES)
+/// Implementation detail of XR_LIST_STRUCTURE_TYPES()
+/// Structure types available only when XR_USE_GRAPHICS_API_OPENGL_ES is defined
#define XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_ES(_) \
_(XrSwapchainImageOpenGLESKHR, XR_TYPE_SWAPCHAIN_IMAGE_OPENGL_ES_KHR) \
_(XrGraphicsRequirementsOpenGLESKHR, XR_TYPE_GRAPHICS_REQUIREMENTS_OPENGL_ES_KHR) \
_(XrSwapchainStateSamplerOpenGLESFB, XR_TYPE_SWAPCHAIN_STATE_SAMPLER_OPENGL_ES_FB) \
-
#else
#define XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_ES(_)
#endif
#if defined(XR_USE_GRAPHICS_API_OPENGL_ES) && defined(XR_USE_PLATFORM_ANDROID)
+/// Implementation detail of XR_LIST_STRUCTURE_TYPES()
+/// Structure types available only when XR_USE_GRAPHICS_API_OPENGL_ES and XR_USE_PLATFORM_ANDROID are defined
#define XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_ES_XR_USE_PLATFORM_ANDROID(_) \
_(XrGraphicsBindingOpenGLESAndroidKHR, XR_TYPE_GRAPHICS_BINDING_OPENGL_ES_ANDROID_KHR) \
-
#else
#define XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_ES_XR_USE_PLATFORM_ANDROID(_)
#endif
#if defined(XR_USE_GRAPHICS_API_VULKAN)
+/// Implementation detail of XR_LIST_STRUCTURE_TYPES()
+/// Structure types available only when XR_USE_GRAPHICS_API_VULKAN is defined
#define XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_VULKAN(_) \
_(XrVulkanSwapchainFormatListCreateInfoKHR, XR_TYPE_VULKAN_SWAPCHAIN_FORMAT_LIST_CREATE_INFO_KHR) \
_(XrGraphicsBindingVulkanKHR, XR_TYPE_GRAPHICS_BINDING_VULKAN_KHR) \
@@ -2994,58 +3431,47 @@ XR_ENUM_STR(XrResult);
_(XrSwapchainStateSamplerVulkanFB, XR_TYPE_SWAPCHAIN_STATE_SAMPLER_VULKAN_FB) \
_(XrVulkanSwapchainCreateInfoMETA, XR_TYPE_VULKAN_SWAPCHAIN_CREATE_INFO_META) \
-
#else
#define XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_VULKAN(_)
#endif
#if defined(XR_USE_PLATFORM_ANDROID)
+/// Implementation detail of XR_LIST_STRUCTURE_TYPES()
+/// Structure types available only when XR_USE_PLATFORM_ANDROID is defined
#define XR_LIST_STRUCTURE_TYPES_XR_USE_PLATFORM_ANDROID(_) \
_(XrInstanceCreateInfoAndroidKHR, XR_TYPE_INSTANCE_CREATE_INFO_ANDROID_KHR) \
_(XrLoaderInitInfoAndroidKHR, XR_TYPE_LOADER_INIT_INFO_ANDROID_KHR) \
_(XrAndroidSurfaceSwapchainCreateInfoFB, XR_TYPE_ANDROID_SURFACE_SWAPCHAIN_CREATE_INFO_FB) \
_(XrSwapchainStateAndroidSurfaceDimensionsFB, XR_TYPE_SWAPCHAIN_STATE_ANDROID_SURFACE_DIMENSIONS_FB) \
-
#else
#define XR_LIST_STRUCTURE_TYPES_XR_USE_PLATFORM_ANDROID(_)
#endif
#if defined(XR_USE_PLATFORM_EGL)
+/// Implementation detail of XR_LIST_STRUCTURE_TYPES()
+/// Structure types available only when XR_USE_PLATFORM_EGL is defined
#define XR_LIST_STRUCTURE_TYPES_XR_USE_PLATFORM_EGL(_) \
_(XrGraphicsBindingEGLMNDX, XR_TYPE_GRAPHICS_BINDING_EGL_MNDX) \
-
#else
#define XR_LIST_STRUCTURE_TYPES_XR_USE_PLATFORM_EGL(_)
#endif
#if defined(XR_USE_PLATFORM_WIN32)
+/// Implementation detail of XR_LIST_STRUCTURE_TYPES()
+/// Structure types available only when XR_USE_PLATFORM_WIN32 is defined
#define XR_LIST_STRUCTURE_TYPES_XR_USE_PLATFORM_WIN32(_) \
_(XrHolographicWindowAttachmentMSFT, XR_TYPE_HOLOGRAPHIC_WINDOW_ATTACHMENT_MSFT) \
-
#else
#define XR_LIST_STRUCTURE_TYPES_XR_USE_PLATFORM_WIN32(_)
#endif
-#define XR_LIST_STRUCTURE_TYPES(_) \
- XR_LIST_STRUCTURE_TYPES_CORE(_) \
- XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_D3D11(_) \
- XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_D3D12(_) \
- XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL(_) \
- XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_WAYLAND(_) \
- XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_WIN32(_) \
- XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_XCB(_) \
- XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_XLIB(_) \
- XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_ES(_) \
- XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_ES_XR_USE_PLATFORM_ANDROID(_) \
- XR_LIST_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_VULKAN(_) \
- XR_LIST_STRUCTURE_TYPES_XR_USE_PLATFORM_ANDROID(_) \
- XR_LIST_STRUCTURE_TYPES_XR_USE_PLATFORM_EGL(_) \
- XR_LIST_STRUCTURE_TYPES_XR_USE_PLATFORM_WIN32(_) \
+/// Calls your macro with the name and extension number of all known
+/// extensions in this version of the spec.
#define XR_LIST_EXTENSIONS(_) \
_(XR_KHR_android_thread_settings, 4) \
_(XR_KHR_android_surface_swapchain, 5) \
@@ -3151,7 +3577,11 @@ XR_ENUM_STR(XrResult);
_(XR_FB_composition_layer_settings, 205) \
_(XR_META_vulkan_swapchain_create_info, 228) \
_(XR_META_performance_metrics, 233) \
+ _(XR_META_headset_id, 246) \
_(XR_EXT_uuid, 300) \
+ _(XR_HTC_passthrough, 318) \
+ _(XR_HTC_foveation, 319) \
+ _(XR_EXT_active_action_set_priority, 374) \
#endif
diff --git a/thirdparty/openxr/include/openxr/openxr_reflection_parent_structs.h b/thirdparty/openxr/include/openxr/openxr_reflection_parent_structs.h
new file mode 100644
index 0000000000..19b0e1c3f6
--- /dev/null
+++ b/thirdparty/openxr/include/openxr/openxr_reflection_parent_structs.h
@@ -0,0 +1,261 @@
+#ifndef OPENXR_REFLECTION_PARENT_STRUCTS_H_
+#define OPENXR_REFLECTION_PARENT_STRUCTS_H_ 1
+
+/*
+** Copyright (c) 2017-2022, The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0 OR MIT
+*/
+
+/*
+** This header is generated from the Khronos OpenXR XML API Registry.
+**
+*/
+
+#include "openxr.h"
+
+/*
+This file contains expansion macros (X Macros) for OpenXR structures that have a parent type.
+*/
+
+
+/// Like XR_LIST_ALL_STRUCTURE_TYPES, but only includes types whose parent struct type is XrCompositionLayerBaseHeader
+#define XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrCompositionLayerBaseHeader(_avail, _unavail) \
+ _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrCompositionLayerBaseHeader_CORE(_avail, _unavail) \
+
+
+// Implementation detail of XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrCompositionLayerBaseHeader()
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrCompositionLayerBaseHeader_CORE(_avail, _unavail) \
+ _avail(XrCompositionLayerProjection, XR_TYPE_COMPOSITION_LAYER_PROJECTION) \
+ _avail(XrCompositionLayerQuad, XR_TYPE_COMPOSITION_LAYER_QUAD) \
+ _avail(XrCompositionLayerCubeKHR, XR_TYPE_COMPOSITION_LAYER_CUBE_KHR) \
+ _avail(XrCompositionLayerCylinderKHR, XR_TYPE_COMPOSITION_LAYER_CYLINDER_KHR) \
+ _avail(XrCompositionLayerEquirectKHR, XR_TYPE_COMPOSITION_LAYER_EQUIRECT_KHR) \
+ _avail(XrCompositionLayerEquirect2KHR, XR_TYPE_COMPOSITION_LAYER_EQUIRECT2_KHR) \
+ _avail(XrCompositionLayerPassthroughHTC, XR_TYPE_COMPOSITION_LAYER_PASSTHROUGH_HTC) \
+
+
+
+
+
+/// Like XR_LIST_ALL_STRUCTURE_TYPES, but only includes types whose parent struct type is XrEventDataBaseHeader
+#define XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrEventDataBaseHeader(_avail, _unavail) \
+ _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrEventDataBaseHeader_CORE(_avail, _unavail) \
+
+
+// Implementation detail of XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrEventDataBaseHeader()
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrEventDataBaseHeader_CORE(_avail, _unavail) \
+ _avail(XrEventDataEventsLost, XR_TYPE_EVENT_DATA_EVENTS_LOST) \
+ _avail(XrEventDataInstanceLossPending, XR_TYPE_EVENT_DATA_INSTANCE_LOSS_PENDING) \
+ _avail(XrEventDataSessionStateChanged, XR_TYPE_EVENT_DATA_SESSION_STATE_CHANGED) \
+ _avail(XrEventDataReferenceSpaceChangePending, XR_TYPE_EVENT_DATA_REFERENCE_SPACE_CHANGE_PENDING) \
+ _avail(XrEventDataInteractionProfileChanged, XR_TYPE_EVENT_DATA_INTERACTION_PROFILE_CHANGED) \
+ _avail(XrEventDataVisibilityMaskChangedKHR, XR_TYPE_EVENT_DATA_VISIBILITY_MASK_CHANGED_KHR) \
+ _avail(XrEventDataPerfSettingsEXT, XR_TYPE_EVENT_DATA_PERF_SETTINGS_EXT) \
+ _avail(XrEventDataMainSessionVisibilityChangedEXTX, XR_TYPE_EVENT_DATA_MAIN_SESSION_VISIBILITY_CHANGED_EXTX) \
+ _avail(XrEventDataDisplayRefreshRateChangedFB, XR_TYPE_EVENT_DATA_DISPLAY_REFRESH_RATE_CHANGED_FB) \
+ _avail(XrEventDataViveTrackerConnectedHTCX, XR_TYPE_EVENT_DATA_VIVE_TRACKER_CONNECTED_HTCX) \
+ _avail(XrEventDataSpatialAnchorCreateCompleteFB, XR_TYPE_EVENT_DATA_SPATIAL_ANCHOR_CREATE_COMPLETE_FB) \
+ _avail(XrEventDataSpaceSetStatusCompleteFB, XR_TYPE_EVENT_DATA_SPACE_SET_STATUS_COMPLETE_FB) \
+ _avail(XrEventDataMarkerTrackingUpdateVARJO, XR_TYPE_EVENT_DATA_MARKER_TRACKING_UPDATE_VARJO) \
+ _avail(XrEventDataSpaceQueryResultsAvailableFB, XR_TYPE_EVENT_DATA_SPACE_QUERY_RESULTS_AVAILABLE_FB) \
+ _avail(XrEventDataSpaceQueryCompleteFB, XR_TYPE_EVENT_DATA_SPACE_QUERY_COMPLETE_FB) \
+ _avail(XrEventDataSpaceSaveCompleteFB, XR_TYPE_EVENT_DATA_SPACE_SAVE_COMPLETE_FB) \
+ _avail(XrEventDataSpaceEraseCompleteFB, XR_TYPE_EVENT_DATA_SPACE_ERASE_COMPLETE_FB) \
+
+
+
+
+
+/// Like XR_LIST_ALL_STRUCTURE_TYPES, but only includes types whose parent struct type is XrHapticBaseHeader
+#define XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrHapticBaseHeader(_avail, _unavail) \
+ _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrHapticBaseHeader_CORE(_avail, _unavail) \
+
+
+// Implementation detail of XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrHapticBaseHeader()
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrHapticBaseHeader_CORE(_avail, _unavail) \
+ _avail(XrHapticVibration, XR_TYPE_HAPTIC_VIBRATION) \
+
+
+
+
+
+/// Like XR_LIST_ALL_STRUCTURE_TYPES, but only includes types whose parent struct type is XrSwapchainImageBaseHeader
+#define XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainImageBaseHeader(_avail, _unavail) \
+ _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainImageBaseHeader_CORE(_avail, _unavail) \
+ _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainImageBaseHeader_XR_USE_GRAPHICS_API_D3D11(_avail, _unavail) \
+ _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainImageBaseHeader_XR_USE_GRAPHICS_API_D3D12(_avail, _unavail) \
+ _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainImageBaseHeader_XR_USE_GRAPHICS_API_OPENGL(_avail, _unavail) \
+ _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainImageBaseHeader_XR_USE_GRAPHICS_API_OPENGL_ES(_avail, _unavail) \
+ _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainImageBaseHeader_XR_USE_GRAPHICS_API_VULKAN(_avail, _unavail) \
+
+
+// Implementation detail of XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainImageBaseHeader()
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainImageBaseHeader_CORE(_avail, _unavail) \
+
+
+#if defined(XR_USE_GRAPHICS_API_D3D11)
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainImageBaseHeader_XR_USE_GRAPHICS_API_D3D11(_avail, _unavail) \
+ _avail(XrSwapchainImageD3D11KHR, XR_TYPE_SWAPCHAIN_IMAGE_D3D11_KHR) \
+
+#else
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainImageBaseHeader_XR_USE_GRAPHICS_API_D3D11(_avail, _unavail) \
+ _unavail(XrSwapchainImageD3D11KHR, XR_TYPE_SWAPCHAIN_IMAGE_D3D11_KHR) \
+
+#endif
+
+#if defined(XR_USE_GRAPHICS_API_D3D12)
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainImageBaseHeader_XR_USE_GRAPHICS_API_D3D12(_avail, _unavail) \
+ _avail(XrSwapchainImageD3D12KHR, XR_TYPE_SWAPCHAIN_IMAGE_D3D12_KHR) \
+
+#else
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainImageBaseHeader_XR_USE_GRAPHICS_API_D3D12(_avail, _unavail) \
+ _unavail(XrSwapchainImageD3D12KHR, XR_TYPE_SWAPCHAIN_IMAGE_D3D12_KHR) \
+
+#endif
+
+#if defined(XR_USE_GRAPHICS_API_OPENGL)
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainImageBaseHeader_XR_USE_GRAPHICS_API_OPENGL(_avail, _unavail) \
+ _avail(XrSwapchainImageOpenGLKHR, XR_TYPE_SWAPCHAIN_IMAGE_OPENGL_KHR) \
+
+#else
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainImageBaseHeader_XR_USE_GRAPHICS_API_OPENGL(_avail, _unavail) \
+ _unavail(XrSwapchainImageOpenGLKHR, XR_TYPE_SWAPCHAIN_IMAGE_OPENGL_KHR) \
+
+#endif
+
+#if defined(XR_USE_GRAPHICS_API_OPENGL_ES)
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainImageBaseHeader_XR_USE_GRAPHICS_API_OPENGL_ES(_avail, _unavail) \
+ _avail(XrSwapchainImageOpenGLESKHR, XR_TYPE_SWAPCHAIN_IMAGE_OPENGL_ES_KHR) \
+
+#else
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainImageBaseHeader_XR_USE_GRAPHICS_API_OPENGL_ES(_avail, _unavail) \
+ _unavail(XrSwapchainImageOpenGLESKHR, XR_TYPE_SWAPCHAIN_IMAGE_OPENGL_ES_KHR) \
+
+#endif
+
+#if defined(XR_USE_GRAPHICS_API_VULKAN)
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainImageBaseHeader_XR_USE_GRAPHICS_API_VULKAN(_avail, _unavail) \
+ _avail(XrSwapchainImageVulkanKHR, XR_TYPE_SWAPCHAIN_IMAGE_VULKAN_KHR) \
+
+#else
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainImageBaseHeader_XR_USE_GRAPHICS_API_VULKAN(_avail, _unavail) \
+ _unavail(XrSwapchainImageVulkanKHR, XR_TYPE_SWAPCHAIN_IMAGE_VULKAN_KHR) \
+
+#endif
+
+
+
+
+/// Like XR_LIST_ALL_STRUCTURE_TYPES, but only includes types whose parent struct type is XrLoaderInitInfoBaseHeaderKHR
+#define XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrLoaderInitInfoBaseHeaderKHR(_avail, _unavail) \
+ _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrLoaderInitInfoBaseHeaderKHR_CORE(_avail, _unavail) \
+ _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrLoaderInitInfoBaseHeaderKHR_XR_USE_PLATFORM_ANDROID(_avail, _unavail) \
+
+
+// Implementation detail of XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrLoaderInitInfoBaseHeaderKHR()
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrLoaderInitInfoBaseHeaderKHR_CORE(_avail, _unavail) \
+
+
+#if defined(XR_USE_PLATFORM_ANDROID)
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrLoaderInitInfoBaseHeaderKHR_XR_USE_PLATFORM_ANDROID(_avail, _unavail) \
+ _avail(XrLoaderInitInfoAndroidKHR, XR_TYPE_LOADER_INIT_INFO_ANDROID_KHR) \
+
+#else
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrLoaderInitInfoBaseHeaderKHR_XR_USE_PLATFORM_ANDROID(_avail, _unavail) \
+ _unavail(XrLoaderInitInfoAndroidKHR, XR_TYPE_LOADER_INIT_INFO_ANDROID_KHR) \
+
+#endif
+
+
+
+
+/// Like XR_LIST_ALL_STRUCTURE_TYPES, but only includes types whose parent struct type is XrBindingModificationBaseHeaderKHR
+#define XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrBindingModificationBaseHeaderKHR(_avail, _unavail) \
+ _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrBindingModificationBaseHeaderKHR_CORE(_avail, _unavail) \
+
+
+// Implementation detail of XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrBindingModificationBaseHeaderKHR()
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrBindingModificationBaseHeaderKHR_CORE(_avail, _unavail) \
+ _avail(XrInteractionProfileDpadBindingEXT, XR_TYPE_INTERACTION_PROFILE_DPAD_BINDING_EXT) \
+ _avail(XrInteractionProfileAnalogThresholdVALVE, XR_TYPE_INTERACTION_PROFILE_ANALOG_THRESHOLD_VALVE) \
+
+
+
+
+
+/// Like XR_LIST_ALL_STRUCTURE_TYPES, but only includes types whose parent struct type is XrSwapchainStateBaseHeaderFB
+#define XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainStateBaseHeaderFB(_avail, _unavail) \
+ _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainStateBaseHeaderFB_CORE(_avail, _unavail) \
+ _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainStateBaseHeaderFB_XR_USE_GRAPHICS_API_OPENGL_ES(_avail, _unavail) \
+ _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainStateBaseHeaderFB_XR_USE_GRAPHICS_API_VULKAN(_avail, _unavail) \
+ _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainStateBaseHeaderFB_XR_USE_PLATFORM_ANDROID(_avail, _unavail) \
+
+
+// Implementation detail of XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainStateBaseHeaderFB()
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainStateBaseHeaderFB_CORE(_avail, _unavail) \
+ _avail(XrSwapchainStateFoveationFB, XR_TYPE_SWAPCHAIN_STATE_FOVEATION_FB) \
+
+
+#if defined(XR_USE_GRAPHICS_API_OPENGL_ES)
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainStateBaseHeaderFB_XR_USE_GRAPHICS_API_OPENGL_ES(_avail, _unavail) \
+ _avail(XrSwapchainStateSamplerOpenGLESFB, XR_TYPE_SWAPCHAIN_STATE_SAMPLER_OPENGL_ES_FB) \
+
+#else
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainStateBaseHeaderFB_XR_USE_GRAPHICS_API_OPENGL_ES(_avail, _unavail) \
+ _unavail(XrSwapchainStateSamplerOpenGLESFB, XR_TYPE_SWAPCHAIN_STATE_SAMPLER_OPENGL_ES_FB) \
+
+#endif
+
+#if defined(XR_USE_GRAPHICS_API_VULKAN)
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainStateBaseHeaderFB_XR_USE_GRAPHICS_API_VULKAN(_avail, _unavail) \
+ _avail(XrSwapchainStateSamplerVulkanFB, XR_TYPE_SWAPCHAIN_STATE_SAMPLER_VULKAN_FB) \
+
+#else
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainStateBaseHeaderFB_XR_USE_GRAPHICS_API_VULKAN(_avail, _unavail) \
+ _unavail(XrSwapchainStateSamplerVulkanFB, XR_TYPE_SWAPCHAIN_STATE_SAMPLER_VULKAN_FB) \
+
+#endif
+
+#if defined(XR_USE_PLATFORM_ANDROID)
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainStateBaseHeaderFB_XR_USE_PLATFORM_ANDROID(_avail, _unavail) \
+ _avail(XrSwapchainStateAndroidSurfaceDimensionsFB, XR_TYPE_SWAPCHAIN_STATE_ANDROID_SURFACE_DIMENSIONS_FB) \
+
+#else
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSwapchainStateBaseHeaderFB_XR_USE_PLATFORM_ANDROID(_avail, _unavail) \
+ _unavail(XrSwapchainStateAndroidSurfaceDimensionsFB, XR_TYPE_SWAPCHAIN_STATE_ANDROID_SURFACE_DIMENSIONS_FB) \
+
+#endif
+
+
+
+
+/// Like XR_LIST_ALL_STRUCTURE_TYPES, but only includes types whose parent struct type is XrSpaceQueryInfoBaseHeaderFB
+#define XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSpaceQueryInfoBaseHeaderFB(_avail, _unavail) \
+ _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSpaceQueryInfoBaseHeaderFB_CORE(_avail, _unavail) \
+
+
+// Implementation detail of XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSpaceQueryInfoBaseHeaderFB()
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSpaceQueryInfoBaseHeaderFB_CORE(_avail, _unavail) \
+ _avail(XrSpaceQueryInfoFB, XR_TYPE_SPACE_QUERY_INFO_FB) \
+
+
+
+
+
+/// Like XR_LIST_ALL_STRUCTURE_TYPES, but only includes types whose parent struct type is XrSpaceFilterInfoBaseHeaderFB
+#define XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSpaceFilterInfoBaseHeaderFB(_avail, _unavail) \
+ _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSpaceFilterInfoBaseHeaderFB_CORE(_avail, _unavail) \
+
+
+// Implementation detail of XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSpaceFilterInfoBaseHeaderFB()
+#define _impl_XR_LIST_ALL_CHILD_STRUCTURE_TYPES_XrSpaceFilterInfoBaseHeaderFB_CORE(_avail, _unavail) \
+ _avail(XrSpaceUuidFilterInfoFB, XR_TYPE_SPACE_UUID_FILTER_INFO_FB) \
+ _avail(XrSpaceComponentFilterInfoFB, XR_TYPE_SPACE_COMPONENT_FILTER_INFO_FB) \
+
+
+
+
+
+#endif
+
diff --git a/thirdparty/openxr/include/openxr/openxr_reflection_structs.h b/thirdparty/openxr/include/openxr/openxr_reflection_structs.h
new file mode 100644
index 0000000000..300bbbad6d
--- /dev/null
+++ b/thirdparty/openxr/include/openxr/openxr_reflection_structs.h
@@ -0,0 +1,427 @@
+#ifndef OPENXR_REFLECTION_STRUCTS_H_
+#define OPENXR_REFLECTION_STRUCTS_H_ 1
+
+/*
+** Copyright (c) 2017-2022, The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0 OR MIT
+*/
+
+/*
+** This header is generated from the Khronos OpenXR XML API Registry.
+**
+*/
+
+#include "openxr.h"
+
+/*
+This file contains expansion macros (X Macros) for OpenXR structures.
+*/
+
+
+
+/// Calls one of your macros with the structure type name and the XrStructureType constant for
+/// each known structure type. The first macro (_avail) is called for those that are available,
+/// while the second macro (_unavail) is called for those unavailable due to preprocessor definitions.
+#define XR_LIST_ALL_STRUCTURE_TYPES(_avail, _unavail) \
+ _impl_XR_LIST_ALL_STRUCTURE_TYPES_CORE(_avail, _unavail) \
+ _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_D3D11(_avail, _unavail) \
+ _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_D3D12(_avail, _unavail) \
+ _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL(_avail, _unavail) \
+ _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_WAYLAND(_avail, _unavail) \
+ _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_WIN32(_avail, _unavail) \
+ _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_XCB(_avail, _unavail) \
+ _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_XLIB(_avail, _unavail) \
+ _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_ES(_avail, _unavail) \
+ _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_ES_XR_USE_PLATFORM_ANDROID(_avail, _unavail) \
+ _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_VULKAN(_avail, _unavail) \
+ _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_PLATFORM_ANDROID(_avail, _unavail) \
+ _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_PLATFORM_EGL(_avail, _unavail) \
+ _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_PLATFORM_WIN32(_avail, _unavail) \
+
+
+// Implementation detail of XR_LIST_ALL_STRUCTURE_TYPES()
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_CORE(_avail, _unavail) \
+ _avail(XrApiLayerProperties, XR_TYPE_API_LAYER_PROPERTIES) \
+ _avail(XrExtensionProperties, XR_TYPE_EXTENSION_PROPERTIES) \
+ _avail(XrInstanceCreateInfo, XR_TYPE_INSTANCE_CREATE_INFO) \
+ _avail(XrInstanceProperties, XR_TYPE_INSTANCE_PROPERTIES) \
+ _avail(XrEventDataBuffer, XR_TYPE_EVENT_DATA_BUFFER) \
+ _avail(XrSystemGetInfo, XR_TYPE_SYSTEM_GET_INFO) \
+ _avail(XrSystemProperties, XR_TYPE_SYSTEM_PROPERTIES) \
+ _avail(XrSessionCreateInfo, XR_TYPE_SESSION_CREATE_INFO) \
+ _avail(XrSpaceVelocity, XR_TYPE_SPACE_VELOCITY) \
+ _avail(XrReferenceSpaceCreateInfo, XR_TYPE_REFERENCE_SPACE_CREATE_INFO) \
+ _avail(XrActionSpaceCreateInfo, XR_TYPE_ACTION_SPACE_CREATE_INFO) \
+ _avail(XrSpaceLocation, XR_TYPE_SPACE_LOCATION) \
+ _avail(XrViewConfigurationProperties, XR_TYPE_VIEW_CONFIGURATION_PROPERTIES) \
+ _avail(XrViewConfigurationView, XR_TYPE_VIEW_CONFIGURATION_VIEW) \
+ _avail(XrSwapchainCreateInfo, XR_TYPE_SWAPCHAIN_CREATE_INFO) \
+ _avail(XrSwapchainImageAcquireInfo, XR_TYPE_SWAPCHAIN_IMAGE_ACQUIRE_INFO) \
+ _avail(XrSwapchainImageWaitInfo, XR_TYPE_SWAPCHAIN_IMAGE_WAIT_INFO) \
+ _avail(XrSwapchainImageReleaseInfo, XR_TYPE_SWAPCHAIN_IMAGE_RELEASE_INFO) \
+ _avail(XrSessionBeginInfo, XR_TYPE_SESSION_BEGIN_INFO) \
+ _avail(XrFrameWaitInfo, XR_TYPE_FRAME_WAIT_INFO) \
+ _avail(XrFrameState, XR_TYPE_FRAME_STATE) \
+ _avail(XrFrameBeginInfo, XR_TYPE_FRAME_BEGIN_INFO) \
+ _avail(XrFrameEndInfo, XR_TYPE_FRAME_END_INFO) \
+ _avail(XrViewLocateInfo, XR_TYPE_VIEW_LOCATE_INFO) \
+ _avail(XrViewState, XR_TYPE_VIEW_STATE) \
+ _avail(XrView, XR_TYPE_VIEW) \
+ _avail(XrActionSetCreateInfo, XR_TYPE_ACTION_SET_CREATE_INFO) \
+ _avail(XrActionCreateInfo, XR_TYPE_ACTION_CREATE_INFO) \
+ _avail(XrInteractionProfileSuggestedBinding, XR_TYPE_INTERACTION_PROFILE_SUGGESTED_BINDING) \
+ _avail(XrSessionActionSetsAttachInfo, XR_TYPE_SESSION_ACTION_SETS_ATTACH_INFO) \
+ _avail(XrInteractionProfileState, XR_TYPE_INTERACTION_PROFILE_STATE) \
+ _avail(XrActionStateGetInfo, XR_TYPE_ACTION_STATE_GET_INFO) \
+ _avail(XrActionStateBoolean, XR_TYPE_ACTION_STATE_BOOLEAN) \
+ _avail(XrActionStateFloat, XR_TYPE_ACTION_STATE_FLOAT) \
+ _avail(XrActionStateVector2f, XR_TYPE_ACTION_STATE_VECTOR2F) \
+ _avail(XrActionStatePose, XR_TYPE_ACTION_STATE_POSE) \
+ _avail(XrActionsSyncInfo, XR_TYPE_ACTIONS_SYNC_INFO) \
+ _avail(XrBoundSourcesForActionEnumerateInfo, XR_TYPE_BOUND_SOURCES_FOR_ACTION_ENUMERATE_INFO) \
+ _avail(XrInputSourceLocalizedNameGetInfo, XR_TYPE_INPUT_SOURCE_LOCALIZED_NAME_GET_INFO) \
+ _avail(XrHapticActionInfo, XR_TYPE_HAPTIC_ACTION_INFO) \
+ _avail(XrCompositionLayerProjectionView, XR_TYPE_COMPOSITION_LAYER_PROJECTION_VIEW) \
+ _avail(XrCompositionLayerProjection, XR_TYPE_COMPOSITION_LAYER_PROJECTION) \
+ _avail(XrCompositionLayerQuad, XR_TYPE_COMPOSITION_LAYER_QUAD) \
+ _avail(XrEventDataEventsLost, XR_TYPE_EVENT_DATA_EVENTS_LOST) \
+ _avail(XrEventDataInstanceLossPending, XR_TYPE_EVENT_DATA_INSTANCE_LOSS_PENDING) \
+ _avail(XrEventDataSessionStateChanged, XR_TYPE_EVENT_DATA_SESSION_STATE_CHANGED) \
+ _avail(XrEventDataReferenceSpaceChangePending, XR_TYPE_EVENT_DATA_REFERENCE_SPACE_CHANGE_PENDING) \
+ _avail(XrEventDataInteractionProfileChanged, XR_TYPE_EVENT_DATA_INTERACTION_PROFILE_CHANGED) \
+ _avail(XrHapticVibration, XR_TYPE_HAPTIC_VIBRATION) \
+ _avail(XrCompositionLayerCubeKHR, XR_TYPE_COMPOSITION_LAYER_CUBE_KHR) \
+ _avail(XrCompositionLayerDepthInfoKHR, XR_TYPE_COMPOSITION_LAYER_DEPTH_INFO_KHR) \
+ _avail(XrCompositionLayerCylinderKHR, XR_TYPE_COMPOSITION_LAYER_CYLINDER_KHR) \
+ _avail(XrCompositionLayerEquirectKHR, XR_TYPE_COMPOSITION_LAYER_EQUIRECT_KHR) \
+ _avail(XrVisibilityMaskKHR, XR_TYPE_VISIBILITY_MASK_KHR) \
+ _avail(XrEventDataVisibilityMaskChangedKHR, XR_TYPE_EVENT_DATA_VISIBILITY_MASK_CHANGED_KHR) \
+ _avail(XrCompositionLayerColorScaleBiasKHR, XR_TYPE_COMPOSITION_LAYER_COLOR_SCALE_BIAS_KHR) \
+ _avail(XrCompositionLayerEquirect2KHR, XR_TYPE_COMPOSITION_LAYER_EQUIRECT2_KHR) \
+ _avail(XrBindingModificationsKHR, XR_TYPE_BINDING_MODIFICATIONS_KHR) \
+ _avail(XrEventDataPerfSettingsEXT, XR_TYPE_EVENT_DATA_PERF_SETTINGS_EXT) \
+ _avail(XrDebugUtilsObjectNameInfoEXT, XR_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT) \
+ _avail(XrDebugUtilsLabelEXT, XR_TYPE_DEBUG_UTILS_LABEL_EXT) \
+ _avail(XrDebugUtilsMessengerCallbackDataEXT, XR_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT) \
+ _avail(XrDebugUtilsMessengerCreateInfoEXT, XR_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT) \
+ _avail(XrSystemEyeGazeInteractionPropertiesEXT, XR_TYPE_SYSTEM_EYE_GAZE_INTERACTION_PROPERTIES_EXT) \
+ _avail(XrEyeGazeSampleTimeEXT, XR_TYPE_EYE_GAZE_SAMPLE_TIME_EXT) \
+ _avail(XrSessionCreateInfoOverlayEXTX, XR_TYPE_SESSION_CREATE_INFO_OVERLAY_EXTX) \
+ _avail(XrEventDataMainSessionVisibilityChangedEXTX, XR_TYPE_EVENT_DATA_MAIN_SESSION_VISIBILITY_CHANGED_EXTX) \
+ _avail(XrSpatialAnchorCreateInfoMSFT, XR_TYPE_SPATIAL_ANCHOR_CREATE_INFO_MSFT) \
+ _avail(XrSpatialAnchorSpaceCreateInfoMSFT, XR_TYPE_SPATIAL_ANCHOR_SPACE_CREATE_INFO_MSFT) \
+ _avail(XrCompositionLayerImageLayoutFB, XR_TYPE_COMPOSITION_LAYER_IMAGE_LAYOUT_FB) \
+ _avail(XrCompositionLayerAlphaBlendFB, XR_TYPE_COMPOSITION_LAYER_ALPHA_BLEND_FB) \
+ _avail(XrViewConfigurationDepthRangeEXT, XR_TYPE_VIEW_CONFIGURATION_DEPTH_RANGE_EXT) \
+ _avail(XrSpatialGraphNodeSpaceCreateInfoMSFT, XR_TYPE_SPATIAL_GRAPH_NODE_SPACE_CREATE_INFO_MSFT) \
+ _avail(XrSpatialGraphStaticNodeBindingCreateInfoMSFT, XR_TYPE_SPATIAL_GRAPH_STATIC_NODE_BINDING_CREATE_INFO_MSFT) \
+ _avail(XrSpatialGraphNodeBindingPropertiesGetInfoMSFT, XR_TYPE_SPATIAL_GRAPH_NODE_BINDING_PROPERTIES_GET_INFO_MSFT) \
+ _avail(XrSpatialGraphNodeBindingPropertiesMSFT, XR_TYPE_SPATIAL_GRAPH_NODE_BINDING_PROPERTIES_MSFT) \
+ _avail(XrSystemHandTrackingPropertiesEXT, XR_TYPE_SYSTEM_HAND_TRACKING_PROPERTIES_EXT) \
+ _avail(XrHandTrackerCreateInfoEXT, XR_TYPE_HAND_TRACKER_CREATE_INFO_EXT) \
+ _avail(XrHandJointsLocateInfoEXT, XR_TYPE_HAND_JOINTS_LOCATE_INFO_EXT) \
+ _avail(XrHandJointLocationsEXT, XR_TYPE_HAND_JOINT_LOCATIONS_EXT) \
+ _avail(XrHandJointVelocitiesEXT, XR_TYPE_HAND_JOINT_VELOCITIES_EXT) \
+ _avail(XrSystemHandTrackingMeshPropertiesMSFT, XR_TYPE_SYSTEM_HAND_TRACKING_MESH_PROPERTIES_MSFT) \
+ _avail(XrHandMeshSpaceCreateInfoMSFT, XR_TYPE_HAND_MESH_SPACE_CREATE_INFO_MSFT) \
+ _avail(XrHandMeshUpdateInfoMSFT, XR_TYPE_HAND_MESH_UPDATE_INFO_MSFT) \
+ _avail(XrHandMeshMSFT, XR_TYPE_HAND_MESH_MSFT) \
+ _avail(XrHandPoseTypeInfoMSFT, XR_TYPE_HAND_POSE_TYPE_INFO_MSFT) \
+ _avail(XrSecondaryViewConfigurationSessionBeginInfoMSFT, XR_TYPE_SECONDARY_VIEW_CONFIGURATION_SESSION_BEGIN_INFO_MSFT) \
+ _avail(XrSecondaryViewConfigurationStateMSFT, XR_TYPE_SECONDARY_VIEW_CONFIGURATION_STATE_MSFT) \
+ _avail(XrSecondaryViewConfigurationFrameStateMSFT, XR_TYPE_SECONDARY_VIEW_CONFIGURATION_FRAME_STATE_MSFT) \
+ _avail(XrSecondaryViewConfigurationLayerInfoMSFT, XR_TYPE_SECONDARY_VIEW_CONFIGURATION_LAYER_INFO_MSFT) \
+ _avail(XrSecondaryViewConfigurationFrameEndInfoMSFT, XR_TYPE_SECONDARY_VIEW_CONFIGURATION_FRAME_END_INFO_MSFT) \
+ _avail(XrSecondaryViewConfigurationSwapchainCreateInfoMSFT, XR_TYPE_SECONDARY_VIEW_CONFIGURATION_SWAPCHAIN_CREATE_INFO_MSFT) \
+ _avail(XrControllerModelKeyStateMSFT, XR_TYPE_CONTROLLER_MODEL_KEY_STATE_MSFT) \
+ _avail(XrControllerModelNodePropertiesMSFT, XR_TYPE_CONTROLLER_MODEL_NODE_PROPERTIES_MSFT) \
+ _avail(XrControllerModelPropertiesMSFT, XR_TYPE_CONTROLLER_MODEL_PROPERTIES_MSFT) \
+ _avail(XrControllerModelNodeStateMSFT, XR_TYPE_CONTROLLER_MODEL_NODE_STATE_MSFT) \
+ _avail(XrControllerModelStateMSFT, XR_TYPE_CONTROLLER_MODEL_STATE_MSFT) \
+ _avail(XrViewConfigurationViewFovEPIC, XR_TYPE_VIEW_CONFIGURATION_VIEW_FOV_EPIC) \
+ _avail(XrCompositionLayerReprojectionInfoMSFT, XR_TYPE_COMPOSITION_LAYER_REPROJECTION_INFO_MSFT) \
+ _avail(XrCompositionLayerReprojectionPlaneOverrideMSFT, XR_TYPE_COMPOSITION_LAYER_REPROJECTION_PLANE_OVERRIDE_MSFT) \
+ _avail(XrCompositionLayerSecureContentFB, XR_TYPE_COMPOSITION_LAYER_SECURE_CONTENT_FB) \
+ _avail(XrInteractionProfileDpadBindingEXT, XR_TYPE_INTERACTION_PROFILE_DPAD_BINDING_EXT) \
+ _avail(XrInteractionProfileAnalogThresholdVALVE, XR_TYPE_INTERACTION_PROFILE_ANALOG_THRESHOLD_VALVE) \
+ _avail(XrHandJointsMotionRangeInfoEXT, XR_TYPE_HAND_JOINTS_MOTION_RANGE_INFO_EXT) \
+ _avail(XrSceneObserverCreateInfoMSFT, XR_TYPE_SCENE_OBSERVER_CREATE_INFO_MSFT) \
+ _avail(XrSceneCreateInfoMSFT, XR_TYPE_SCENE_CREATE_INFO_MSFT) \
+ _avail(XrNewSceneComputeInfoMSFT, XR_TYPE_NEW_SCENE_COMPUTE_INFO_MSFT) \
+ _avail(XrVisualMeshComputeLodInfoMSFT, XR_TYPE_VISUAL_MESH_COMPUTE_LOD_INFO_MSFT) \
+ _avail(XrSceneComponentsMSFT, XR_TYPE_SCENE_COMPONENTS_MSFT) \
+ _avail(XrSceneComponentsGetInfoMSFT, XR_TYPE_SCENE_COMPONENTS_GET_INFO_MSFT) \
+ _avail(XrSceneComponentLocationsMSFT, XR_TYPE_SCENE_COMPONENT_LOCATIONS_MSFT) \
+ _avail(XrSceneComponentsLocateInfoMSFT, XR_TYPE_SCENE_COMPONENTS_LOCATE_INFO_MSFT) \
+ _avail(XrSceneObjectsMSFT, XR_TYPE_SCENE_OBJECTS_MSFT) \
+ _avail(XrSceneComponentParentFilterInfoMSFT, XR_TYPE_SCENE_COMPONENT_PARENT_FILTER_INFO_MSFT) \
+ _avail(XrSceneObjectTypesFilterInfoMSFT, XR_TYPE_SCENE_OBJECT_TYPES_FILTER_INFO_MSFT) \
+ _avail(XrScenePlanesMSFT, XR_TYPE_SCENE_PLANES_MSFT) \
+ _avail(XrScenePlaneAlignmentFilterInfoMSFT, XR_TYPE_SCENE_PLANE_ALIGNMENT_FILTER_INFO_MSFT) \
+ _avail(XrSceneMeshesMSFT, XR_TYPE_SCENE_MESHES_MSFT) \
+ _avail(XrSceneMeshBuffersGetInfoMSFT, XR_TYPE_SCENE_MESH_BUFFERS_GET_INFO_MSFT) \
+ _avail(XrSceneMeshBuffersMSFT, XR_TYPE_SCENE_MESH_BUFFERS_MSFT) \
+ _avail(XrSceneMeshVertexBufferMSFT, XR_TYPE_SCENE_MESH_VERTEX_BUFFER_MSFT) \
+ _avail(XrSceneMeshIndicesUint32MSFT, XR_TYPE_SCENE_MESH_INDICES_UINT32_MSFT) \
+ _avail(XrSceneMeshIndicesUint16MSFT, XR_TYPE_SCENE_MESH_INDICES_UINT16_MSFT) \
+ _avail(XrSerializedSceneFragmentDataGetInfoMSFT, XR_TYPE_SERIALIZED_SCENE_FRAGMENT_DATA_GET_INFO_MSFT) \
+ _avail(XrSceneDeserializeInfoMSFT, XR_TYPE_SCENE_DESERIALIZE_INFO_MSFT) \
+ _avail(XrEventDataDisplayRefreshRateChangedFB, XR_TYPE_EVENT_DATA_DISPLAY_REFRESH_RATE_CHANGED_FB) \
+ _avail(XrViveTrackerPathsHTCX, XR_TYPE_VIVE_TRACKER_PATHS_HTCX) \
+ _avail(XrEventDataViveTrackerConnectedHTCX, XR_TYPE_EVENT_DATA_VIVE_TRACKER_CONNECTED_HTCX) \
+ _avail(XrSystemFacialTrackingPropertiesHTC, XR_TYPE_SYSTEM_FACIAL_TRACKING_PROPERTIES_HTC) \
+ _avail(XrFacialExpressionsHTC, XR_TYPE_FACIAL_EXPRESSIONS_HTC) \
+ _avail(XrFacialTrackerCreateInfoHTC, XR_TYPE_FACIAL_TRACKER_CREATE_INFO_HTC) \
+ _avail(XrSystemColorSpacePropertiesFB, XR_TYPE_SYSTEM_COLOR_SPACE_PROPERTIES_FB) \
+ _avail(XrHandTrackingMeshFB, XR_TYPE_HAND_TRACKING_MESH_FB) \
+ _avail(XrHandTrackingScaleFB, XR_TYPE_HAND_TRACKING_SCALE_FB) \
+ _avail(XrHandTrackingAimStateFB, XR_TYPE_HAND_TRACKING_AIM_STATE_FB) \
+ _avail(XrHandTrackingCapsulesStateFB, XR_TYPE_HAND_TRACKING_CAPSULES_STATE_FB) \
+ _avail(XrSystemSpatialEntityPropertiesFB, XR_TYPE_SYSTEM_SPATIAL_ENTITY_PROPERTIES_FB) \
+ _avail(XrSpatialAnchorCreateInfoFB, XR_TYPE_SPATIAL_ANCHOR_CREATE_INFO_FB) \
+ _avail(XrSpaceComponentStatusSetInfoFB, XR_TYPE_SPACE_COMPONENT_STATUS_SET_INFO_FB) \
+ _avail(XrSpaceComponentStatusFB, XR_TYPE_SPACE_COMPONENT_STATUS_FB) \
+ _avail(XrEventDataSpatialAnchorCreateCompleteFB, XR_TYPE_EVENT_DATA_SPATIAL_ANCHOR_CREATE_COMPLETE_FB) \
+ _avail(XrEventDataSpaceSetStatusCompleteFB, XR_TYPE_EVENT_DATA_SPACE_SET_STATUS_COMPLETE_FB) \
+ _avail(XrFoveationProfileCreateInfoFB, XR_TYPE_FOVEATION_PROFILE_CREATE_INFO_FB) \
+ _avail(XrSwapchainCreateInfoFoveationFB, XR_TYPE_SWAPCHAIN_CREATE_INFO_FOVEATION_FB) \
+ _avail(XrSwapchainStateFoveationFB, XR_TYPE_SWAPCHAIN_STATE_FOVEATION_FB) \
+ _avail(XrFoveationLevelProfileCreateInfoFB, XR_TYPE_FOVEATION_LEVEL_PROFILE_CREATE_INFO_FB) \
+ _avail(XrSystemKeyboardTrackingPropertiesFB, XR_TYPE_SYSTEM_KEYBOARD_TRACKING_PROPERTIES_FB) \
+ _avail(XrKeyboardSpaceCreateInfoFB, XR_TYPE_KEYBOARD_SPACE_CREATE_INFO_FB) \
+ _avail(XrKeyboardTrackingQueryFB, XR_TYPE_KEYBOARD_TRACKING_QUERY_FB) \
+ _avail(XrTriangleMeshCreateInfoFB, XR_TYPE_TRIANGLE_MESH_CREATE_INFO_FB) \
+ _avail(XrSystemPassthroughPropertiesFB, XR_TYPE_SYSTEM_PASSTHROUGH_PROPERTIES_FB) \
+ _avail(XrSystemPassthroughProperties2FB, XR_TYPE_SYSTEM_PASSTHROUGH_PROPERTIES2_FB) \
+ _avail(XrPassthroughCreateInfoFB, XR_TYPE_PASSTHROUGH_CREATE_INFO_FB) \
+ _avail(XrPassthroughLayerCreateInfoFB, XR_TYPE_PASSTHROUGH_LAYER_CREATE_INFO_FB) \
+ _avail(XrCompositionLayerPassthroughFB, XR_TYPE_COMPOSITION_LAYER_PASSTHROUGH_FB) \
+ _avail(XrGeometryInstanceCreateInfoFB, XR_TYPE_GEOMETRY_INSTANCE_CREATE_INFO_FB) \
+ _avail(XrGeometryInstanceTransformFB, XR_TYPE_GEOMETRY_INSTANCE_TRANSFORM_FB) \
+ _avail(XrPassthroughStyleFB, XR_TYPE_PASSTHROUGH_STYLE_FB) \
+ _avail(XrPassthroughColorMapMonoToRgbaFB, XR_TYPE_PASSTHROUGH_COLOR_MAP_MONO_TO_RGBA_FB) \
+ _avail(XrPassthroughColorMapMonoToMonoFB, XR_TYPE_PASSTHROUGH_COLOR_MAP_MONO_TO_MONO_FB) \
+ _avail(XrPassthroughBrightnessContrastSaturationFB, XR_TYPE_PASSTHROUGH_BRIGHTNESS_CONTRAST_SATURATION_FB) \
+ _avail(XrEventDataPassthroughStateChangedFB, XR_TYPE_EVENT_DATA_PASSTHROUGH_STATE_CHANGED_FB) \
+ _avail(XrRenderModelPathInfoFB, XR_TYPE_RENDER_MODEL_PATH_INFO_FB) \
+ _avail(XrRenderModelPropertiesFB, XR_TYPE_RENDER_MODEL_PROPERTIES_FB) \
+ _avail(XrRenderModelBufferFB, XR_TYPE_RENDER_MODEL_BUFFER_FB) \
+ _avail(XrRenderModelLoadInfoFB, XR_TYPE_RENDER_MODEL_LOAD_INFO_FB) \
+ _avail(XrSystemRenderModelPropertiesFB, XR_TYPE_SYSTEM_RENDER_MODEL_PROPERTIES_FB) \
+ _avail(XrRenderModelCapabilitiesRequestFB, XR_TYPE_RENDER_MODEL_CAPABILITIES_REQUEST_FB) \
+ _avail(XrViewLocateFoveatedRenderingVARJO, XR_TYPE_VIEW_LOCATE_FOVEATED_RENDERING_VARJO) \
+ _avail(XrFoveatedViewConfigurationViewVARJO, XR_TYPE_FOVEATED_VIEW_CONFIGURATION_VIEW_VARJO) \
+ _avail(XrSystemFoveatedRenderingPropertiesVARJO, XR_TYPE_SYSTEM_FOVEATED_RENDERING_PROPERTIES_VARJO) \
+ _avail(XrCompositionLayerDepthTestVARJO, XR_TYPE_COMPOSITION_LAYER_DEPTH_TEST_VARJO) \
+ _avail(XrSystemMarkerTrackingPropertiesVARJO, XR_TYPE_SYSTEM_MARKER_TRACKING_PROPERTIES_VARJO) \
+ _avail(XrEventDataMarkerTrackingUpdateVARJO, XR_TYPE_EVENT_DATA_MARKER_TRACKING_UPDATE_VARJO) \
+ _avail(XrMarkerSpaceCreateInfoVARJO, XR_TYPE_MARKER_SPACE_CREATE_INFO_VARJO) \
+ _avail(XrSpatialAnchorPersistenceInfoMSFT, XR_TYPE_SPATIAL_ANCHOR_PERSISTENCE_INFO_MSFT) \
+ _avail(XrSpatialAnchorFromPersistedAnchorCreateInfoMSFT, XR_TYPE_SPATIAL_ANCHOR_FROM_PERSISTED_ANCHOR_CREATE_INFO_MSFT) \
+ _avail(XrSpaceQueryInfoFB, XR_TYPE_SPACE_QUERY_INFO_FB) \
+ _avail(XrSpaceStorageLocationFilterInfoFB, XR_TYPE_SPACE_STORAGE_LOCATION_FILTER_INFO_FB) \
+ _avail(XrSpaceUuidFilterInfoFB, XR_TYPE_SPACE_UUID_FILTER_INFO_FB) \
+ _avail(XrSpaceComponentFilterInfoFB, XR_TYPE_SPACE_COMPONENT_FILTER_INFO_FB) \
+ _avail(XrSpaceQueryResultsFB, XR_TYPE_SPACE_QUERY_RESULTS_FB) \
+ _avail(XrEventDataSpaceQueryResultsAvailableFB, XR_TYPE_EVENT_DATA_SPACE_QUERY_RESULTS_AVAILABLE_FB) \
+ _avail(XrEventDataSpaceQueryCompleteFB, XR_TYPE_EVENT_DATA_SPACE_QUERY_COMPLETE_FB) \
+ _avail(XrSpaceSaveInfoFB, XR_TYPE_SPACE_SAVE_INFO_FB) \
+ _avail(XrSpaceEraseInfoFB, XR_TYPE_SPACE_ERASE_INFO_FB) \
+ _avail(XrEventDataSpaceSaveCompleteFB, XR_TYPE_EVENT_DATA_SPACE_SAVE_COMPLETE_FB) \
+ _avail(XrEventDataSpaceEraseCompleteFB, XR_TYPE_EVENT_DATA_SPACE_ERASE_COMPLETE_FB) \
+ _avail(XrCompositionLayerSpaceWarpInfoFB, XR_TYPE_COMPOSITION_LAYER_SPACE_WARP_INFO_FB) \
+ _avail(XrSystemSpaceWarpPropertiesFB, XR_TYPE_SYSTEM_SPACE_WARP_PROPERTIES_FB) \
+ _avail(XrSemanticLabelsFB, XR_TYPE_SEMANTIC_LABELS_FB) \
+ _avail(XrRoomLayoutFB, XR_TYPE_ROOM_LAYOUT_FB) \
+ _avail(XrBoundary2DFB, XR_TYPE_BOUNDARY_2D_FB) \
+ _avail(XrDigitalLensControlALMALENCE, XR_TYPE_DIGITAL_LENS_CONTROL_ALMALENCE) \
+ _avail(XrSpaceContainerFB, XR_TYPE_SPACE_CONTAINER_FB) \
+ _avail(XrPassthroughKeyboardHandsIntensityFB, XR_TYPE_PASSTHROUGH_KEYBOARD_HANDS_INTENSITY_FB) \
+ _avail(XrCompositionLayerSettingsFB, XR_TYPE_COMPOSITION_LAYER_SETTINGS_FB) \
+ _avail(XrPerformanceMetricsStateMETA, XR_TYPE_PERFORMANCE_METRICS_STATE_META) \
+ _avail(XrPerformanceMetricsCounterMETA, XR_TYPE_PERFORMANCE_METRICS_COUNTER_META) \
+ _avail(XrSystemHeadsetIdPropertiesMETA, XR_TYPE_SYSTEM_HEADSET_ID_PROPERTIES_META) \
+ _avail(XrPassthroughCreateInfoHTC, XR_TYPE_PASSTHROUGH_CREATE_INFO_HTC) \
+ _avail(XrPassthroughColorHTC, XR_TYPE_PASSTHROUGH_COLOR_HTC) \
+ _avail(XrPassthroughMeshTransformInfoHTC, XR_TYPE_PASSTHROUGH_MESH_TRANSFORM_INFO_HTC) \
+ _avail(XrCompositionLayerPassthroughHTC, XR_TYPE_COMPOSITION_LAYER_PASSTHROUGH_HTC) \
+ _avail(XrFoveationApplyInfoHTC, XR_TYPE_FOVEATION_APPLY_INFO_HTC) \
+ _avail(XrFoveationDynamicModeInfoHTC, XR_TYPE_FOVEATION_DYNAMIC_MODE_INFO_HTC) \
+ _avail(XrFoveationCustomModeInfoHTC, XR_TYPE_FOVEATION_CUSTOM_MODE_INFO_HTC) \
+ _avail(XrActiveActionSetPrioritiesEXT, XR_TYPE_ACTIVE_ACTION_SET_PRIORITIES_EXT) \
+
+
+#if defined(XR_USE_GRAPHICS_API_D3D11)
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_D3D11(_avail, _unavail) \
+ _avail(XrGraphicsBindingD3D11KHR, XR_TYPE_GRAPHICS_BINDING_D3D11_KHR) \
+ _avail(XrSwapchainImageD3D11KHR, XR_TYPE_SWAPCHAIN_IMAGE_D3D11_KHR) \
+ _avail(XrGraphicsRequirementsD3D11KHR, XR_TYPE_GRAPHICS_REQUIREMENTS_D3D11_KHR) \
+
+#else
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_D3D11(_avail, _unavail) \
+ _unavail(XrGraphicsBindingD3D11KHR, XR_TYPE_GRAPHICS_BINDING_D3D11_KHR) \
+ _unavail(XrSwapchainImageD3D11KHR, XR_TYPE_SWAPCHAIN_IMAGE_D3D11_KHR) \
+ _unavail(XrGraphicsRequirementsD3D11KHR, XR_TYPE_GRAPHICS_REQUIREMENTS_D3D11_KHR) \
+
+#endif
+
+#if defined(XR_USE_GRAPHICS_API_D3D12)
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_D3D12(_avail, _unavail) \
+ _avail(XrGraphicsBindingD3D12KHR, XR_TYPE_GRAPHICS_BINDING_D3D12_KHR) \
+ _avail(XrSwapchainImageD3D12KHR, XR_TYPE_SWAPCHAIN_IMAGE_D3D12_KHR) \
+ _avail(XrGraphicsRequirementsD3D12KHR, XR_TYPE_GRAPHICS_REQUIREMENTS_D3D12_KHR) \
+
+#else
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_D3D12(_avail, _unavail) \
+ _unavail(XrGraphicsBindingD3D12KHR, XR_TYPE_GRAPHICS_BINDING_D3D12_KHR) \
+ _unavail(XrSwapchainImageD3D12KHR, XR_TYPE_SWAPCHAIN_IMAGE_D3D12_KHR) \
+ _unavail(XrGraphicsRequirementsD3D12KHR, XR_TYPE_GRAPHICS_REQUIREMENTS_D3D12_KHR) \
+
+#endif
+
+#if defined(XR_USE_GRAPHICS_API_OPENGL)
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL(_avail, _unavail) \
+ _avail(XrSwapchainImageOpenGLKHR, XR_TYPE_SWAPCHAIN_IMAGE_OPENGL_KHR) \
+ _avail(XrGraphicsRequirementsOpenGLKHR, XR_TYPE_GRAPHICS_REQUIREMENTS_OPENGL_KHR) \
+
+#else
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL(_avail, _unavail) \
+ _unavail(XrSwapchainImageOpenGLKHR, XR_TYPE_SWAPCHAIN_IMAGE_OPENGL_KHR) \
+ _unavail(XrGraphicsRequirementsOpenGLKHR, XR_TYPE_GRAPHICS_REQUIREMENTS_OPENGL_KHR) \
+
+#endif
+
+#if defined(XR_USE_GRAPHICS_API_OPENGL) && defined(XR_USE_PLATFORM_WAYLAND)
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_WAYLAND(_avail, _unavail) \
+ _avail(XrGraphicsBindingOpenGLWaylandKHR, XR_TYPE_GRAPHICS_BINDING_OPENGL_WAYLAND_KHR) \
+
+#else
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_WAYLAND(_avail, _unavail) \
+ _unavail(XrGraphicsBindingOpenGLWaylandKHR, XR_TYPE_GRAPHICS_BINDING_OPENGL_WAYLAND_KHR) \
+
+#endif
+
+#if defined(XR_USE_GRAPHICS_API_OPENGL) && defined(XR_USE_PLATFORM_WIN32)
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_WIN32(_avail, _unavail) \
+ _avail(XrGraphicsBindingOpenGLWin32KHR, XR_TYPE_GRAPHICS_BINDING_OPENGL_WIN32_KHR) \
+
+#else
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_WIN32(_avail, _unavail) \
+ _unavail(XrGraphicsBindingOpenGLWin32KHR, XR_TYPE_GRAPHICS_BINDING_OPENGL_WIN32_KHR) \
+
+#endif
+
+#if defined(XR_USE_GRAPHICS_API_OPENGL) && defined(XR_USE_PLATFORM_XCB)
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_XCB(_avail, _unavail) \
+ _avail(XrGraphicsBindingOpenGLXcbKHR, XR_TYPE_GRAPHICS_BINDING_OPENGL_XCB_KHR) \
+
+#else
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_XCB(_avail, _unavail) \
+ _unavail(XrGraphicsBindingOpenGLXcbKHR, XR_TYPE_GRAPHICS_BINDING_OPENGL_XCB_KHR) \
+
+#endif
+
+#if defined(XR_USE_GRAPHICS_API_OPENGL) && defined(XR_USE_PLATFORM_XLIB)
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_XLIB(_avail, _unavail) \
+ _avail(XrGraphicsBindingOpenGLXlibKHR, XR_TYPE_GRAPHICS_BINDING_OPENGL_XLIB_KHR) \
+
+#else
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_XR_USE_PLATFORM_XLIB(_avail, _unavail) \
+ _unavail(XrGraphicsBindingOpenGLXlibKHR, XR_TYPE_GRAPHICS_BINDING_OPENGL_XLIB_KHR) \
+
+#endif
+
+#if defined(XR_USE_GRAPHICS_API_OPENGL_ES)
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_ES(_avail, _unavail) \
+ _avail(XrSwapchainImageOpenGLESKHR, XR_TYPE_SWAPCHAIN_IMAGE_OPENGL_ES_KHR) \
+ _avail(XrGraphicsRequirementsOpenGLESKHR, XR_TYPE_GRAPHICS_REQUIREMENTS_OPENGL_ES_KHR) \
+ _avail(XrSwapchainStateSamplerOpenGLESFB, XR_TYPE_SWAPCHAIN_STATE_SAMPLER_OPENGL_ES_FB) \
+
+#else
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_ES(_avail, _unavail) \
+ _unavail(XrSwapchainImageOpenGLESKHR, XR_TYPE_SWAPCHAIN_IMAGE_OPENGL_ES_KHR) \
+ _unavail(XrGraphicsRequirementsOpenGLESKHR, XR_TYPE_GRAPHICS_REQUIREMENTS_OPENGL_ES_KHR) \
+ _unavail(XrSwapchainStateSamplerOpenGLESFB, XR_TYPE_SWAPCHAIN_STATE_SAMPLER_OPENGL_ES_FB) \
+
+#endif
+
+#if defined(XR_USE_GRAPHICS_API_OPENGL_ES) && defined(XR_USE_PLATFORM_ANDROID)
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_ES_XR_USE_PLATFORM_ANDROID(_avail, _unavail) \
+ _avail(XrGraphicsBindingOpenGLESAndroidKHR, XR_TYPE_GRAPHICS_BINDING_OPENGL_ES_ANDROID_KHR) \
+
+#else
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_OPENGL_ES_XR_USE_PLATFORM_ANDROID(_avail, _unavail) \
+ _unavail(XrGraphicsBindingOpenGLESAndroidKHR, XR_TYPE_GRAPHICS_BINDING_OPENGL_ES_ANDROID_KHR) \
+
+#endif
+
+#if defined(XR_USE_GRAPHICS_API_VULKAN)
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_VULKAN(_avail, _unavail) \
+ _avail(XrVulkanSwapchainFormatListCreateInfoKHR, XR_TYPE_VULKAN_SWAPCHAIN_FORMAT_LIST_CREATE_INFO_KHR) \
+ _avail(XrGraphicsBindingVulkanKHR, XR_TYPE_GRAPHICS_BINDING_VULKAN_KHR) \
+ _avail(XrSwapchainImageVulkanKHR, XR_TYPE_SWAPCHAIN_IMAGE_VULKAN_KHR) \
+ _avail(XrGraphicsRequirementsVulkanKHR, XR_TYPE_GRAPHICS_REQUIREMENTS_VULKAN_KHR) \
+ _avail(XrVulkanInstanceCreateInfoKHR, XR_TYPE_VULKAN_INSTANCE_CREATE_INFO_KHR) \
+ _avail(XrVulkanDeviceCreateInfoKHR, XR_TYPE_VULKAN_DEVICE_CREATE_INFO_KHR) \
+ _avail(XrVulkanGraphicsDeviceGetInfoKHR, XR_TYPE_VULKAN_GRAPHICS_DEVICE_GET_INFO_KHR) \
+ _avail(XrSwapchainImageFoveationVulkanFB, XR_TYPE_SWAPCHAIN_IMAGE_FOVEATION_VULKAN_FB) \
+ _avail(XrSwapchainStateSamplerVulkanFB, XR_TYPE_SWAPCHAIN_STATE_SAMPLER_VULKAN_FB) \
+ _avail(XrVulkanSwapchainCreateInfoMETA, XR_TYPE_VULKAN_SWAPCHAIN_CREATE_INFO_META) \
+
+#else
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_GRAPHICS_API_VULKAN(_avail, _unavail) \
+ _unavail(XrVulkanSwapchainFormatListCreateInfoKHR, XR_TYPE_VULKAN_SWAPCHAIN_FORMAT_LIST_CREATE_INFO_KHR) \
+ _unavail(XrGraphicsBindingVulkanKHR, XR_TYPE_GRAPHICS_BINDING_VULKAN_KHR) \
+ _unavail(XrSwapchainImageVulkanKHR, XR_TYPE_SWAPCHAIN_IMAGE_VULKAN_KHR) \
+ _unavail(XrGraphicsRequirementsVulkanKHR, XR_TYPE_GRAPHICS_REQUIREMENTS_VULKAN_KHR) \
+ _unavail(XrVulkanInstanceCreateInfoKHR, XR_TYPE_VULKAN_INSTANCE_CREATE_INFO_KHR) \
+ _unavail(XrVulkanDeviceCreateInfoKHR, XR_TYPE_VULKAN_DEVICE_CREATE_INFO_KHR) \
+ _unavail(XrVulkanGraphicsDeviceGetInfoKHR, XR_TYPE_VULKAN_GRAPHICS_DEVICE_GET_INFO_KHR) \
+ _unavail(XrSwapchainImageFoveationVulkanFB, XR_TYPE_SWAPCHAIN_IMAGE_FOVEATION_VULKAN_FB) \
+ _unavail(XrSwapchainStateSamplerVulkanFB, XR_TYPE_SWAPCHAIN_STATE_SAMPLER_VULKAN_FB) \
+ _unavail(XrVulkanSwapchainCreateInfoMETA, XR_TYPE_VULKAN_SWAPCHAIN_CREATE_INFO_META) \
+
+#endif
+
+#if defined(XR_USE_PLATFORM_ANDROID)
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_PLATFORM_ANDROID(_avail, _unavail) \
+ _avail(XrInstanceCreateInfoAndroidKHR, XR_TYPE_INSTANCE_CREATE_INFO_ANDROID_KHR) \
+ _avail(XrLoaderInitInfoAndroidKHR, XR_TYPE_LOADER_INIT_INFO_ANDROID_KHR) \
+ _avail(XrAndroidSurfaceSwapchainCreateInfoFB, XR_TYPE_ANDROID_SURFACE_SWAPCHAIN_CREATE_INFO_FB) \
+ _avail(XrSwapchainStateAndroidSurfaceDimensionsFB, XR_TYPE_SWAPCHAIN_STATE_ANDROID_SURFACE_DIMENSIONS_FB) \
+
+#else
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_PLATFORM_ANDROID(_avail, _unavail) \
+ _unavail(XrInstanceCreateInfoAndroidKHR, XR_TYPE_INSTANCE_CREATE_INFO_ANDROID_KHR) \
+ _unavail(XrLoaderInitInfoAndroidKHR, XR_TYPE_LOADER_INIT_INFO_ANDROID_KHR) \
+ _unavail(XrAndroidSurfaceSwapchainCreateInfoFB, XR_TYPE_ANDROID_SURFACE_SWAPCHAIN_CREATE_INFO_FB) \
+ _unavail(XrSwapchainStateAndroidSurfaceDimensionsFB, XR_TYPE_SWAPCHAIN_STATE_ANDROID_SURFACE_DIMENSIONS_FB) \
+
+#endif
+
+#if defined(XR_USE_PLATFORM_EGL)
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_PLATFORM_EGL(_avail, _unavail) \
+ _avail(XrGraphicsBindingEGLMNDX, XR_TYPE_GRAPHICS_BINDING_EGL_MNDX) \
+
+#else
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_PLATFORM_EGL(_avail, _unavail) \
+ _unavail(XrGraphicsBindingEGLMNDX, XR_TYPE_GRAPHICS_BINDING_EGL_MNDX) \
+
+#endif
+
+#if defined(XR_USE_PLATFORM_WIN32)
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_PLATFORM_WIN32(_avail, _unavail) \
+ _avail(XrHolographicWindowAttachmentMSFT, XR_TYPE_HOLOGRAPHIC_WINDOW_ATTACHMENT_MSFT) \
+
+#else
+#define _impl_XR_LIST_ALL_STRUCTURE_TYPES_XR_USE_PLATFORM_WIN32(_avail, _unavail) \
+ _unavail(XrHolographicWindowAttachmentMSFT, XR_TYPE_HOLOGRAPHIC_WINDOW_ATTACHMENT_MSFT) \
+
+#endif
+
+
+
+
+#endif
+
diff --git a/thirdparty/openxr/src/loader/loader_core.cpp b/thirdparty/openxr/src/loader/loader_core.cpp
index a8bbfb4de2..f2bc87d1fa 100644
--- a/thirdparty/openxr/src/loader/loader_core.cpp
+++ b/thirdparty/openxr/src/loader/loader_core.cpp
@@ -35,7 +35,7 @@
// Global loader lock to:
// 1. Ensure ActiveLoaderInstance get and set operations are done atomically.
// 2. Ensure RuntimeInterface isn't used to unload the runtime while the runtime is in use.
-std::mutex &GetGlobalLoaderMutex() {
+static std::mutex &GetGlobalLoaderMutex() {
static std::mutex loader_mutex;
return loader_mutex;
}
@@ -58,6 +58,8 @@ static XRAPI_ATTR XrResult XRAPI_CALL LoaderXrTermDestroyDebugUtilsMessengerEXT(
static XRAPI_ATTR XrResult XRAPI_CALL LoaderXrTermSubmitDebugUtilsMessageEXT(
XrInstance instance, XrDebugUtilsMessageSeverityFlagsEXT messageSeverity, XrDebugUtilsMessageTypeFlagsEXT messageTypes,
const XrDebugUtilsMessengerCallbackDataEXT *callbackData);
+static XRAPI_ATTR XrResult XRAPI_CALL LoaderXrGetInstanceProcAddr(XrInstance instance, const char *name,
+ PFN_xrVoidFunction *function);
// Utility template function meant to validate if a fixed size string contains
// a null-terminator.
diff --git a/thirdparty/openxr/src/loader/manifest_file.cpp b/thirdparty/openxr/src/loader/manifest_file.cpp
index df99d51f8f..1b0ef07848 100644
--- a/thirdparty/openxr/src/loader/manifest_file.cpp
+++ b/thirdparty/openxr/src/loader/manifest_file.cpp
@@ -348,14 +348,20 @@ static void ReadRuntimeDataFilesInRegistry(const std::string &runtime_registry_l
if (ERROR_SUCCESS != open_value) {
LoaderLogger::LogWarningMessage("",
"ReadRuntimeDataFilesInRegistry - failed to open registry key " + full_registry_location);
- } else if (ERROR_SUCCESS != RegGetValueW(hkey, nullptr, default_runtime_value_name_w.c_str(),
- RRF_RT_REG_SZ | REG_EXPAND_SZ | RRF_ZEROONFAILURE, NULL,
- reinterpret_cast<LPBYTE>(&value_w), &value_size_w)) {
+
+ return;
+ }
+
+ if (ERROR_SUCCESS != RegGetValueW(hkey, nullptr, default_runtime_value_name_w.c_str(),
+ RRF_RT_REG_SZ | REG_EXPAND_SZ | RRF_ZEROONFAILURE, NULL, reinterpret_cast<LPBYTE>(&value_w),
+ &value_size_w)) {
LoaderLogger::LogWarningMessage(
"", "ReadRuntimeDataFilesInRegistry - failed to read registry value " + default_runtime_value_name);
} else {
AddFilesInPath(wide_to_utf8(value_w), false, manifest_files);
}
+
+ RegCloseKey(hkey);
}
// Look for layer data files in the provided paths, but first check the environment override to determine
diff --git a/thirdparty/openxr/src/xr_generated_dispatch_table.c b/thirdparty/openxr/src/xr_generated_dispatch_table.c
index 91fa0c3ca0..094f9fbbda 100644
--- a/thirdparty/openxr/src/xr_generated_dispatch_table.c
+++ b/thirdparty/openxr/src/xr_generated_dispatch_table.c
@@ -354,6 +354,13 @@ void GeneratedXrPopulateDispatchTable(struct XrGeneratedDispatchTable *table,
(get_inst_proc_addr(instance, "xrGetAudioInputDeviceGuidOculus", (PFN_xrVoidFunction*)&table->GetAudioInputDeviceGuidOculus));
#endif // defined(XR_USE_PLATFORM_WIN32)
+ // ---- XR_FB_scene extension commands
+ (get_inst_proc_addr(instance, "xrGetSpaceBoundingBox2DFB", (PFN_xrVoidFunction*)&table->GetSpaceBoundingBox2DFB));
+ (get_inst_proc_addr(instance, "xrGetSpaceBoundingBox3DFB", (PFN_xrVoidFunction*)&table->GetSpaceBoundingBox3DFB));
+ (get_inst_proc_addr(instance, "xrGetSpaceSemanticLabelsFB", (PFN_xrVoidFunction*)&table->GetSpaceSemanticLabelsFB));
+ (get_inst_proc_addr(instance, "xrGetSpaceBoundary2DFB", (PFN_xrVoidFunction*)&table->GetSpaceBoundary2DFB));
+ (get_inst_proc_addr(instance, "xrGetSpaceRoomLayoutFB", (PFN_xrVoidFunction*)&table->GetSpaceRoomLayoutFB));
+
// ---- XR_ALMALENCE_digital_lens_control extension commands
(get_inst_proc_addr(instance, "xrSetDigitalLensControlALMALENCE", (PFN_xrVoidFunction*)&table->SetDigitalLensControlALMALENCE));
@@ -368,6 +375,13 @@ void GeneratedXrPopulateDispatchTable(struct XrGeneratedDispatchTable *table,
(get_inst_proc_addr(instance, "xrSetPerformanceMetricsStateMETA", (PFN_xrVoidFunction*)&table->SetPerformanceMetricsStateMETA));
(get_inst_proc_addr(instance, "xrGetPerformanceMetricsStateMETA", (PFN_xrVoidFunction*)&table->GetPerformanceMetricsStateMETA));
(get_inst_proc_addr(instance, "xrQueryPerformanceMetricsCounterMETA", (PFN_xrVoidFunction*)&table->QueryPerformanceMetricsCounterMETA));
+
+ // ---- XR_HTC_passthrough extension commands
+ (get_inst_proc_addr(instance, "xrCreatePassthroughHTC", (PFN_xrVoidFunction*)&table->CreatePassthroughHTC));
+ (get_inst_proc_addr(instance, "xrDestroyPassthroughHTC", (PFN_xrVoidFunction*)&table->DestroyPassthroughHTC));
+
+ // ---- XR_HTC_foveation extension commands
+ (get_inst_proc_addr(instance, "xrApplyFoveationHTC", (PFN_xrVoidFunction*)&table->ApplyFoveationHTC));
}
diff --git a/thirdparty/openxr/src/xr_generated_dispatch_table.h b/thirdparty/openxr/src/xr_generated_dispatch_table.h
index 51d48bef43..93d07a149e 100644
--- a/thirdparty/openxr/src/xr_generated_dispatch_table.h
+++ b/thirdparty/openxr/src/xr_generated_dispatch_table.h
@@ -357,6 +357,13 @@ struct XrGeneratedDispatchTable {
PFN_xrGetAudioInputDeviceGuidOculus GetAudioInputDeviceGuidOculus;
#endif // defined(XR_USE_PLATFORM_WIN32)
+ // ---- XR_FB_scene extension commands
+ PFN_xrGetSpaceBoundingBox2DFB GetSpaceBoundingBox2DFB;
+ PFN_xrGetSpaceBoundingBox3DFB GetSpaceBoundingBox3DFB;
+ PFN_xrGetSpaceSemanticLabelsFB GetSpaceSemanticLabelsFB;
+ PFN_xrGetSpaceBoundary2DFB GetSpaceBoundary2DFB;
+ PFN_xrGetSpaceRoomLayoutFB GetSpaceRoomLayoutFB;
+
// ---- XR_ALMALENCE_digital_lens_control extension commands
PFN_xrSetDigitalLensControlALMALENCE SetDigitalLensControlALMALENCE;
@@ -371,6 +378,13 @@ struct XrGeneratedDispatchTable {
PFN_xrSetPerformanceMetricsStateMETA SetPerformanceMetricsStateMETA;
PFN_xrGetPerformanceMetricsStateMETA GetPerformanceMetricsStateMETA;
PFN_xrQueryPerformanceMetricsCounterMETA QueryPerformanceMetricsCounterMETA;
+
+ // ---- XR_HTC_passthrough extension commands
+ PFN_xrCreatePassthroughHTC CreatePassthroughHTC;
+ PFN_xrDestroyPassthroughHTC DestroyPassthroughHTC;
+
+ // ---- XR_HTC_foveation extension commands
+ PFN_xrApplyFoveationHTC ApplyFoveationHTC;
};
diff --git a/thirdparty/recastnavigation/Recast/Include/Recast.h b/thirdparty/recastnavigation/Recast/Include/Recast.h
index 4d557389b5..246376bbee 100644
--- a/thirdparty/recastnavigation/Recast/Include/Recast.h
+++ b/thirdparty/recastnavigation/Recast/Include/Recast.h
@@ -22,13 +22,16 @@
/// The value of PI used by Recast.
static const float RC_PI = 3.14159265f;
+/// Used to ignore unused function parameters and silence any compiler warnings.
+template<class T> void rcIgnoreUnused(const T&) { }
+
/// Recast log categories.
/// @see rcContext
enum rcLogCategory
{
RC_LOG_PROGRESS = 1, ///< A progress log entry.
RC_LOG_WARNING, ///< A warning log entry.
- RC_LOG_ERROR, ///< An error log entry.
+ RC_LOG_ERROR ///< An error log entry.
};
/// Recast performance timer categories.
@@ -101,7 +104,6 @@ enum rcTimerLabel
class rcContext
{
public:
-
/// Contructor.
/// @param[in] state TRUE if the logging and performance timers should be enabled. [Default: true]
inline rcContext(bool state = true) : m_logEnabled(state), m_timerEnabled(state) {}
@@ -140,31 +142,30 @@ public:
inline int getAccumulatedTime(const rcTimerLabel label) const { return m_timerEnabled ? doGetAccumulatedTime(label) : -1; }
protected:
-
/// Clears all log entries.
- virtual void doResetLog() {}
+ virtual void doResetLog();
/// Logs a message.
/// @param[in] category The category of the message.
/// @param[in] msg The formatted message.
/// @param[in] len The length of the formatted message.
- virtual void doLog(const rcLogCategory /*category*/, const char* /*msg*/, const int /*len*/) {}
+ virtual void doLog(const rcLogCategory category, const char* msg, const int len) { rcIgnoreUnused(category); rcIgnoreUnused(msg); rcIgnoreUnused(len); }
/// Clears all timers. (Resets all to unused.)
virtual void doResetTimers() {}
/// Starts the specified performance timer.
/// @param[in] label The category of timer.
- virtual void doStartTimer(const rcTimerLabel /*label*/) {}
+ virtual void doStartTimer(const rcTimerLabel label) { rcIgnoreUnused(label); }
/// Stops the specified performance timer.
/// @param[in] label The category of the timer.
- virtual void doStopTimer(const rcTimerLabel /*label*/) {}
+ virtual void doStopTimer(const rcTimerLabel label) { rcIgnoreUnused(label); }
/// Returns the total accumulated time of the specified performance timer.
/// @param[in] label The category of the timer.
/// @return The accumulated time of the timer, or -1 if timers are disabled or the timer has never been started.
- virtual int doGetAccumulatedTime(const rcTimerLabel /*label*/) const { return -1; }
+ virtual int doGetAccumulatedTime(const rcTimerLabel label) const { rcIgnoreUnused(label); return -1; }
/// True if logging is enabled.
bool m_logEnabled;
@@ -564,7 +565,7 @@ static const int RC_AREA_BORDER = 0x20000;
enum rcBuildContoursFlags
{
RC_CONTOUR_TESS_WALL_EDGES = 0x01, ///< Tessellate solid (impassable) edges during contour simplification.
- RC_CONTOUR_TESS_AREA_EDGES = 0x02, ///< Tessellate edges between areas during contour simplification.
+ RC_CONTOUR_TESS_AREA_EDGES = 0x02 ///< Tessellate edges between areas during contour simplification.
};
/// Applied to the region id field of contour vertices in order to extract the region id.
@@ -595,11 +596,6 @@ static const int RC_NOT_CONNECTED = 0x3f;
/// @name General helper functions
/// @{
-/// Used to ignore a function parameter. VS complains about unused parameters
-/// and this silences the warning.
-/// @param [in] _ Unused parameter
-template<class T> void rcIgnoreUnused(const T&) { }
-
/// Swaps the values of the two parameters.
/// @param[in,out] a Value A
/// @param[in,out] b Value B
@@ -996,6 +992,7 @@ void rcMarkConvexPolyArea(rcContext* ctx, const float* verts, const int nverts,
/// @ingroup recast
/// @param[in] verts The vertices of the polygon [Form: (x, y, z) * @p nverts]
/// @param[in] nverts The number of vertices in the polygon.
+/// @param[in] offset How much to offset the polygon by. [Units: wu]
/// @param[out] outVerts The offset vertices (should hold up to 2 * @p nverts) [Form: (x, y, z) * return value]
/// @param[in] maxOutVerts The max number of vertices that can be stored to @p outVerts.
/// @returns Number of vertices in the offset polygon or 0 if too few vertices in @p outVerts.
diff --git a/thirdparty/recastnavigation/Recast/Include/RecastAlloc.h b/thirdparty/recastnavigation/Recast/Include/RecastAlloc.h
index 071278d659..8b166d736d 100644
--- a/thirdparty/recastnavigation/Recast/Include/RecastAlloc.h
+++ b/thirdparty/recastnavigation/Recast/Include/RecastAlloc.h
@@ -112,7 +112,7 @@ class rcVectorBase {
typedef rcSizeType size_type;
typedef T value_type;
- rcVectorBase() : m_size(0), m_cap(0), m_data(0) {};
+ rcVectorBase() : m_size(0), m_cap(0), m_data(0) {}
rcVectorBase(const rcVectorBase<T, H>& other) : m_size(0), m_cap(0), m_data(0) { assign(other.begin(), other.end()); }
explicit rcVectorBase(rcSizeType count) : m_size(0), m_cap(0), m_data(0) { resize(count); }
rcVectorBase(rcSizeType count, const T& value) : m_size(0), m_cap(0), m_data(0) { resize(count, value); }
@@ -142,8 +142,8 @@ class rcVectorBase {
const T& front() const { rcAssert(m_size); return m_data[0]; }
T& front() { rcAssert(m_size); return m_data[0]; }
- const T& back() const { rcAssert(m_size); return m_data[m_size - 1]; };
- T& back() { rcAssert(m_size); return m_data[m_size - 1]; };
+ const T& back() const { rcAssert(m_size); return m_data[m_size - 1]; }
+ T& back() { rcAssert(m_size); return m_data[m_size - 1]; }
const T* data() const { return m_data; }
T* data() { return m_data; }
diff --git a/thirdparty/recastnavigation/Recast/Source/Recast.cpp b/thirdparty/recastnavigation/Recast/Source/Recast.cpp
index 1b71710cdc..4cf145c981 100644
--- a/thirdparty/recastnavigation/Recast/Source/Recast.cpp
+++ b/thirdparty/recastnavigation/Recast/Source/Recast.cpp
@@ -94,6 +94,11 @@ void rcContext::log(const rcLogCategory category, const char* format, ...)
doLog(category, msg, len);
}
+void rcContext::doResetLog()
+{
+ // Defined out of line to fix the weak v-tables warning
+}
+
rcHeightfield* rcAllocHeightfield()
{
return rcNew<rcHeightfield>(RC_ALLOC_PERM);
diff --git a/thirdparty/recastnavigation/Recast/Source/RecastMesh.cpp b/thirdparty/recastnavigation/Recast/Source/RecastMesh.cpp
index e99eaebb79..ea09ee1de0 100644
--- a/thirdparty/recastnavigation/Recast/Source/RecastMesh.cpp
+++ b/thirdparty/recastnavigation/Recast/Source/RecastMesh.cpp
@@ -566,7 +566,6 @@ static bool canRemoveVertex(rcContext* ctx, rcPolyMesh& mesh, const unsigned sho
const int nvp = mesh.nvp;
// Count number of polygons to remove.
- int numRemovedVerts = 0;
int numTouchedVerts = 0;
int numRemainingEdges = 0;
for (int i = 0; i < mesh.npolys; ++i)
@@ -586,7 +585,6 @@ static bool canRemoveVertex(rcContext* ctx, rcPolyMesh& mesh, const unsigned sho
}
if (numRemoved)
{
- numRemovedVerts += numRemoved;
numRemainingEdges += numVerts-(numRemoved+1);
}
}
diff --git a/thirdparty/recastnavigation/Recast/Source/RecastMeshDetail.cpp b/thirdparty/recastnavigation/Recast/Source/RecastMeshDetail.cpp
index 1999200c1a..40bfc9b4bc 100644
--- a/thirdparty/recastnavigation/Recast/Source/RecastMeshDetail.cpp
+++ b/thirdparty/recastnavigation/Recast/Source/RecastMeshDetail.cpp
@@ -284,7 +284,7 @@ static unsigned short getHeight(const float fx, const float fy, const float fz,
enum EdgeValues
{
EV_UNDEF = -1,
- EV_HULL = -2,
+ EV_HULL = -2
};
static int findEdge(const int* edges, int nedges, int s, int t)
diff --git a/thirdparty/recastnavigation/Recast/Source/RecastRasterization.cpp b/thirdparty/recastnavigation/Recast/Source/RecastRasterization.cpp
index a4cef74909..673550e79e 100644
--- a/thirdparty/recastnavigation/Recast/Source/RecastRasterization.cpp
+++ b/thirdparty/recastnavigation/Recast/Source/RecastRasterization.cpp
@@ -264,7 +264,8 @@ static bool rasterizeTri(const float* v0, const float* v1, const float* v2,
// Calculate the footprint of the triangle on the grid's y-axis
int y0 = (int)((tmin[2] - bmin[2])*ics);
int y1 = (int)((tmax[2] - bmin[2])*ics);
- y0 = rcClamp(y0, 0, h-1);
+ // use -1 rather than 0 to cut the polygon properly at the start of the tile
+ y0 = rcClamp(y0, -1, h-1);
y1 = rcClamp(y1, 0, h-1);
// Clip the triangle into all grid cells it touches.
@@ -283,7 +284,7 @@ static bool rasterizeTri(const float* v0, const float* v1, const float* v2,
dividePoly(in, nvIn, inrow, &nvrow, p1, &nvIn, cz+cs, 2);
rcSwap(in, p1);
if (nvrow < 3) continue;
-
+ if (y < 0) continue;
// find the horizontal bounds in the row
float minX = inrow[0], maxX = inrow[0];
for (int i=1; i<nvrow; ++i)
@@ -293,7 +294,10 @@ static bool rasterizeTri(const float* v0, const float* v1, const float* v2,
}
int x0 = (int)((minX - bmin[0])*ics);
int x1 = (int)((maxX - bmin[0])*ics);
- x0 = rcClamp(x0, 0, w-1);
+ if (x1 < 0 || x0 >= w) {
+ continue;
+ }
+ x0 = rcClamp(x0, -1, w-1);
x1 = rcClamp(x1, 0, w-1);
int nv, nv2 = nvrow;
@@ -305,7 +309,7 @@ static bool rasterizeTri(const float* v0, const float* v1, const float* v2,
dividePoly(inrow, nv2, p1, &nv, p2, &nv2, cx+cs, 0);
rcSwap(inrow, p2);
if (nv < 3) continue;
-
+ if (x < 0) continue;
// Calculate min and max of the span.
float smin = p1[1], smax = p1[1];
for (int i = 1; i < nv; ++i)
diff --git a/thirdparty/spirv-reflect/patches/specialization-constants.patch b/thirdparty/spirv-reflect/patches/specialization-constants.patch
index 99815c9162..b755950423 100644
--- a/thirdparty/spirv-reflect/patches/specialization-constants.patch
+++ b/thirdparty/spirv-reflect/patches/specialization-constants.patch
@@ -1,5 +1,5 @@
diff --git a/thirdparty/spirv-reflect/spirv_reflect.c b/thirdparty/spirv-reflect/spirv_reflect.c
-index cdcf3ca663..c174ae1900 100644
+index 8c70ebecfb..c5ed7ab07d 100644
--- a/thirdparty/spirv-reflect/spirv_reflect.c
+++ b/thirdparty/spirv-reflect/spirv_reflect.c
@@ -126,6 +126,9 @@ typedef struct SpvReflectPrvDecorations {
@@ -12,7 +12,7 @@ index cdcf3ca663..c174ae1900 100644
SpvReflectPrvStringDecoration semantic;
uint32_t array_stride;
uint32_t matrix_stride;
-@@ -639,6 +642,9 @@ static SpvReflectResult ParseNodes(SpvReflectPrvParser* p_parser)
+@@ -641,6 +644,9 @@ static SpvReflectResult ParseNodes(SpvReflectPrvParser* p_parser)
p_parser->nodes[i].decorations.offset.value = (uint32_t)INVALID_VALUE;
p_parser->nodes[i].decorations.uav_counter_buffer.value = (uint32_t)INVALID_VALUE;
p_parser->nodes[i].decorations.built_in = (SpvBuiltIn)INVALID_VALUE;
@@ -22,7 +22,7 @@ index cdcf3ca663..c174ae1900 100644
}
// Mark source file id node
p_parser->source_file_id = (uint32_t)INVALID_VALUE;
-@@ -829,10 +835,16 @@ static SpvReflectResult ParseNodes(SpvReflectPrvParser* p_parser)
+@@ -837,10 +843,16 @@ static SpvReflectResult ParseNodes(SpvReflectPrvParser* p_parser)
CHECKED_READU32(p_parser, p_node->word_offset + 2, p_node->result_id);
}
break;
@@ -41,7 +41,7 @@ index cdcf3ca663..c174ae1900 100644
case SpvOpSpecConstantComposite:
case SpvOpSpecConstantOp: {
CHECKED_READU32(p_parser, p_node->word_offset + 1, p_node->result_type_id);
-@@ -864,7 +876,7 @@ static SpvReflectResult ParseNodes(SpvReflectPrvParser* p_parser)
+@@ -872,7 +884,7 @@ static SpvReflectResult ParseNodes(SpvReflectPrvParser* p_parser)
CHECKED_READU32(p_parser, p_node->word_offset + 3, p_access_chain->base_id);
//
// SPIRV_ACCESS_CHAIN_INDEX_OFFSET (4) is the number of words up until the first index:
@@ -50,7 +50,7 @@ index cdcf3ca663..c174ae1900 100644
//
p_access_chain->index_count = (node_word_count - SPIRV_ACCESS_CHAIN_INDEX_OFFSET);
if (p_access_chain->index_count > 0) {
-@@ -1346,6 +1358,9 @@ static SpvReflectResult ParseDecorations(SpvReflectPrvParser* p_parser)
+@@ -1354,6 +1366,9 @@ static SpvReflectResult ParseDecorations(SpvReflectPrvParser* p_parser)
skip = true;
}
break;
@@ -60,7 +60,7 @@ index cdcf3ca663..c174ae1900 100644
case SpvDecorationRelaxedPrecision:
case SpvDecorationBlock:
case SpvDecorationBufferBlock:
-@@ -1495,7 +1510,14 @@ static SpvReflectResult ParseDecorations(SpvReflectPrvParser* p_parser)
+@@ -1503,7 +1518,14 @@ static SpvReflectResult ParseDecorations(SpvReflectPrvParser* p_parser)
p_target_decorations->input_attachment_index.word_offset = word_offset;
}
break;
@@ -76,7 +76,7 @@ index cdcf3ca663..c174ae1900 100644
case SpvReflectDecorationHlslCounterBufferGOOGLE: {
uint32_t word_offset = p_node->word_offset + member_offset+ 3;
CHECKED_READU32(p_parser, word_offset, p_target_decorations->uav_counter_buffer.value);
-@@ -1803,6 +1825,13 @@ static SpvReflectResult ParseType(
+@@ -1811,6 +1833,13 @@ static SpvReflectResult ParseType(
p_type->type_flags |= SPV_REFLECT_TYPE_FLAG_EXTERNAL_ACCELERATION_STRUCTURE;
}
break;
@@ -90,7 +90,7 @@ index cdcf3ca663..c174ae1900 100644
}
if (result == SPV_REFLECT_RESULT_SUCCESS) {
-@@ -3332,6 +3361,69 @@ static SpvReflectResult ParseExecutionModes(
+@@ -3378,6 +3407,69 @@ static SpvReflectResult ParseExecutionModes(
return SPV_REFLECT_RESULT_SUCCESS;
}
@@ -160,7 +160,7 @@ index cdcf3ca663..c174ae1900 100644
static SpvReflectResult ParsePushConstantBlocks(
SpvReflectPrvParser* p_parser,
SpvReflectShaderModule* p_module)
-@@ -3717,6 +3809,12 @@ static SpvReflectResult CreateShaderModule(
+@@ -3763,6 +3855,12 @@ static SpvReflectResult CreateShaderModule(
result = ParsePushConstantBlocks(&parser, p_module);
SPV_REFLECT_ASSERT(result == SPV_REFLECT_RESULT_SUCCESS);
}
@@ -173,9 +173,9 @@ index cdcf3ca663..c174ae1900 100644
if (result == SPV_REFLECT_RESULT_SUCCESS) {
result = ParseEntryPoints(&parser, p_module);
SPV_REFLECT_ASSERT(result == SPV_REFLECT_RESULT_SUCCESS);
-@@ -3875,6 +3973,9 @@ void spvReflectDestroyShaderModule(SpvReflectShaderModule* p_module)
- SafeFree(p_entry->execution_modes);
+@@ -3926,6 +4024,9 @@ void spvReflectDestroyShaderModule(SpvReflectShaderModule* p_module)
}
+ SafeFree(p_module->capabilities);
SafeFree(p_module->entry_points);
+// -- GODOT begin --
+ SafeFree(p_module->specialization_constants);
@@ -183,7 +183,7 @@ index cdcf3ca663..c174ae1900 100644
// Push constants
for (size_t i = 0; i < p_module->push_constant_block_count; ++i) {
-@@ -4145,6 +4246,38 @@ SpvReflectResult spvReflectEnumerateEntryPointInterfaceVariables(
+@@ -4196,6 +4297,38 @@ SpvReflectResult spvReflectEnumerateEntryPointInterfaceVariables(
return SPV_REFLECT_RESULT_SUCCESS;
}
@@ -223,7 +223,7 @@ index cdcf3ca663..c174ae1900 100644
const SpvReflectShaderModule* p_module,
uint32_t* p_count,
diff --git a/thirdparty/spirv-reflect/spirv_reflect.h b/thirdparty/spirv-reflect/spirv_reflect.h
-index 02b81613a1..02850f8811 100644
+index 690ae6c105..1ea99d8266 100644
--- a/thirdparty/spirv-reflect/spirv_reflect.h
+++ b/thirdparty/spirv-reflect/spirv_reflect.h
@@ -329,6 +329,28 @@ typedef struct SpvReflectTypeDescription {
@@ -255,7 +255,7 @@ index 02b81613a1..02850f8811 100644
/*! @struct SpvReflectInterfaceVariable
-@@ -483,6 +505,10 @@ typedef struct SpvReflectShaderModule {
+@@ -493,6 +515,10 @@ typedef struct SpvReflectShaderModule {
SpvReflectInterfaceVariable* interface_variables; // Uses value(s) from first entry point
uint32_t push_constant_block_count; // Uses value(s) from first entry point
SpvReflectBlockVariable* push_constant_blocks; // Uses value(s) from first entry point
@@ -266,7 +266,7 @@ index 02b81613a1..02850f8811 100644
struct Internal {
SpvReflectModuleFlags module_flags;
-@@ -755,6 +781,33 @@ SpvReflectResult spvReflectEnumerateInputVariables(
+@@ -765,6 +791,33 @@ SpvReflectResult spvReflectEnumerateInputVariables(
SpvReflectInterfaceVariable** pp_variables
);
diff --git a/thirdparty/spirv-reflect/patches/zero-calloc.patch b/thirdparty/spirv-reflect/patches/zero-calloc.patch
index 796fe1266a..3ce2d08936 100644
--- a/thirdparty/spirv-reflect/patches/zero-calloc.patch
+++ b/thirdparty/spirv-reflect/patches/zero-calloc.patch
@@ -1,8 +1,8 @@
diff --git a/thirdparty/spirv-reflect/spirv_reflect.c b/thirdparty/spirv-reflect/spirv_reflect.c
-index c174ae1900..11ccbdee3a 100644
+index c5ed7ab07d..f2be1f8cae 100644
--- a/thirdparty/spirv-reflect/spirv_reflect.c
+++ b/thirdparty/spirv-reflect/spirv_reflect.c
-@@ -3322,12 +3322,18 @@ static SpvReflectResult ParseExecutionModes(
+@@ -3368,12 +3368,18 @@ static SpvReflectResult ParseExecutionModes(
}
for (size_t entry_point_idx = 0; entry_point_idx < p_module->entry_point_count; ++entry_point_idx) {
SpvReflectEntryPoint* p_entry_point = &p_module->entry_points[entry_point_idx];
diff --git a/thirdparty/spirv-reflect/spirv_reflect.c b/thirdparty/spirv-reflect/spirv_reflect.c
index 11ccbdee3a..f2be1f8cae 100644
--- a/thirdparty/spirv-reflect/spirv_reflect.c
+++ b/thirdparty/spirv-reflect/spirv_reflect.c
@@ -142,6 +142,7 @@ typedef struct SpvReflectPrvNode {
SpvOp op;
uint32_t result_type_id;
uint32_t type_id;
+ SpvCapability capability;
SpvStorageClass storage_class;
uint32_t word_offset;
uint32_t word_count;
@@ -208,6 +209,7 @@ typedef struct SpvReflectPrvParser {
size_t node_count;
SpvReflectPrvNode* nodes;
uint32_t entry_point_count;
+ uint32_t capability_count;
uint32_t function_count;
SpvReflectPrvFunction* functions;
uint32_t access_chain_count;
@@ -739,6 +741,12 @@ static SpvReflectResult ParseNodes(SpvReflectPrvParser* p_parser)
}
break;
+ case SpvOpCapability: {
+ CHECKED_READU32(p_parser, p_node->word_offset + 1, p_node->capability);
+ ++(p_parser->capability_count);
+ }
+ break;
+
case SpvOpName:
case SpvOpMemberName:
{
@@ -1886,6 +1894,44 @@ static SpvReflectResult ParseTypes(
return SPV_REFLECT_RESULT_SUCCESS;
}
+static SpvReflectResult ParseCapabilities(
+ SpvReflectPrvParser* p_parser,
+ SpvReflectShaderModule* p_module)
+{
+ if (p_parser->capability_count == 0) {
+ return SPV_REFLECT_RESULT_SUCCESS;
+ }
+
+ p_module->capability_count = p_parser->capability_count;
+ p_module->capabilities = (SpvReflectCapability*)calloc(p_module->capability_count,
+ sizeof(*(p_module->capabilities)));
+ if (IsNull(p_module->capabilities)) {
+ return SPV_REFLECT_RESULT_ERROR_ALLOC_FAILED;
+ }
+
+ // Mark all types with an invalid state
+ for (size_t i = 0; i < p_module->capability_count; ++i) {
+ SpvReflectCapability* p_cap = &(p_module->capabilities[i]);
+ p_cap->value = SpvCapabilityMax;
+ p_cap->word_offset = (uint32_t)INVALID_VALUE;
+ }
+
+ size_t capability_index = 0;
+ for (size_t i = 0; i < p_parser->node_count; ++i) {
+ SpvReflectPrvNode* p_node = &(p_parser->nodes[i]);
+ if (SpvOpCapability != p_node->op) {
+ continue;
+ }
+
+ SpvReflectCapability* p_cap = &(p_module->capabilities[capability_index]);
+ p_cap->value = p_node->capability;
+ p_cap->word_offset = p_node->word_offset + 1;
+ ++capability_index;
+ }
+
+ return SPV_REFLECT_RESULT_SUCCESS;
+}
+
static int SortCompareDescriptorBinding(const void* a, const void* b)
{
const SpvReflectDescriptorBinding* p_elem_a = (const SpvReflectDescriptorBinding*)a;
@@ -3825,6 +3871,10 @@ static SpvReflectResult CreateShaderModule(
result = ParseEntryPoints(&parser, p_module);
SPV_REFLECT_ASSERT(result == SPV_REFLECT_RESULT_SUCCESS);
}
+ if (result == SPV_REFLECT_RESULT_SUCCESS) {
+ result = ParseCapabilities(&parser, p_module);
+ SPV_REFLECT_ASSERT(result == SPV_REFLECT_RESULT_SUCCESS);
+ }
if (result == SPV_REFLECT_RESULT_SUCCESS && p_module->entry_point_count > 0) {
SpvReflectEntryPoint* p_entry = &(p_module->entry_points[0]);
p_module->entry_point_name = p_entry->name;
@@ -3978,6 +4028,7 @@ void spvReflectDestroyShaderModule(SpvReflectShaderModule* p_module)
SafeFree(p_entry->used_push_constants);
SafeFree(p_entry->execution_modes);
}
+ SafeFree(p_module->capabilities);
SafeFree(p_module->entry_points);
// -- GODOT begin --
SafeFree(p_module->specialization_constants);
diff --git a/thirdparty/spirv-reflect/spirv_reflect.h b/thirdparty/spirv-reflect/spirv_reflect.h
index 02850f8811..1ea99d8266 100644
--- a/thirdparty/spirv-reflect/spirv_reflect.h
+++ b/thirdparty/spirv-reflect/spirv_reflect.h
@@ -478,6 +478,14 @@ typedef struct SpvReflectEntryPoint {
uint32_t output_vertices; // valid for geometry, tesselation
} SpvReflectEntryPoint;
+/*! @struct SpvReflectCapability
+
+*/
+typedef struct SpvReflectCapability {
+ SpvCapability value;
+ uint32_t word_offset;
+} SpvReflectCapability;
+
/*! @struct SpvReflectShaderModule
*/
@@ -491,6 +499,8 @@ typedef struct SpvReflectShaderModule {
uint32_t source_language_version;
const char* source_file;
const char* source_source;
+ uint32_t capability_count;
+ SpvReflectCapability* capabilities;
SpvExecutionModel spirv_execution_model; // Uses value(s) from first entry point
SpvReflectShaderStageFlagBits shader_stage; // Uses value(s) from first entry point
uint32_t descriptor_binding_count; // Uses value(s) from first entry point
diff --git a/thirdparty/thorvg/inc/config.h b/thirdparty/thorvg/inc/config.h
index 879b70442b..eda302aec0 100644
--- a/thirdparty/thorvg/inc/config.h
+++ b/thirdparty/thorvg/inc/config.h
@@ -13,5 +13,5 @@
#define THORVG_JPG_LOADER_SUPPORT 1
-#define THORVG_VERSION_STRING "0.8.1"
+#define THORVG_VERSION_STRING "0.8.3"
#endif
diff --git a/thirdparty/thorvg/inc/thorvg.h b/thirdparty/thorvg/inc/thorvg.h
index b08356d9d5..7e8988a65e 100644
--- a/thirdparty/thorvg/inc/thorvg.h
+++ b/thirdparty/thorvg/inc/thorvg.h
@@ -18,7 +18,7 @@
#include <string>
#ifdef TVG_BUILD
- #if defined(_MSC_VER) && !defined(__clang__)
+ #if defined(_WIN32) && !defined(__clang__)
#define TVG_EXPORT __declspec(dllexport)
#define TVG_DEPRECATED __declspec(deprecated)
#else
@@ -74,7 +74,7 @@ class Accessor;
/**
* @brief Enumeration specifying the result from the APIs.
*/
-enum class TVG_EXPORT Result
+enum class Result
{
Success = 0, ///< The value returned in case of a correct request execution.
InvalidArguments, ///< The value returned in the event of a problem with the arguments given to the API - e.g. empty paths or null pointers.
@@ -91,7 +91,7 @@ enum class TVG_EXPORT Result
* Not to be confused with the path commands from the svg path element (like M, L, Q, H and many others).
* TVG interprets all of them and translates to the ones from the PathCommand values.
*/
-enum class TVG_EXPORT PathCommand
+enum class PathCommand
{
Close = 0, ///< Ends the current sub-path and connects it with its initial point. This command doesn't expect any points.
MoveTo, ///< Sets a new initial point of the sub-path and a new current point. This command expects 1 point: the starting position.
@@ -102,7 +102,7 @@ enum class TVG_EXPORT PathCommand
/**
* @brief Enumeration determining the ending type of a stroke in the open sub-paths.
*/
-enum class TVG_EXPORT StrokeCap
+enum class StrokeCap
{
Square = 0, ///< The stroke is extended in both end-points of a sub-path by a rectangle, with the width equal to the stroke width and the length equal to the half of the stroke width. For zero length sub-paths the square is rendered with the size of the stroke width.
Round, ///< The stroke is extended in both end-points of a sub-path by a half circle, with a radius equal to the half of a stroke width. For zero length sub-paths a full circle is rendered.
@@ -112,7 +112,7 @@ enum class TVG_EXPORT StrokeCap
/**
* @brief Enumeration determining the style used at the corners of joined stroked path segments.
*/
-enum class TVG_EXPORT StrokeJoin
+enum class StrokeJoin
{
Bevel = 0, ///< The outer corner of the joined path segments is bevelled at the join point. The triangular region of the corner is enclosed by a straight line between the outer corners of each stroke.
Round, ///< The outer corner of the joined path segments is rounded. The circular region is centered at the join point.
@@ -122,7 +122,7 @@ enum class TVG_EXPORT StrokeJoin
/**
* @brief Enumeration specifying how to fill the area outside the gradient bounds.
*/
-enum class TVG_EXPORT FillSpread
+enum class FillSpread
{
Pad = 0, ///< The remaining area is filled with the closest stop color.
Reflect, ///< The gradient pattern is reflected outside the gradient area until the expected region is filled.
@@ -132,7 +132,7 @@ enum class TVG_EXPORT FillSpread
/**
* @brief Enumeration specifying the algorithm used to establish which parts of the shape are treated as the inside of the shape.
*/
-enum class TVG_EXPORT FillRule
+enum class FillRule
{
Winding = 0, ///< A line from the point to a location outside the shape is drawn. The intersections of the line with the path segment of the shape are counted. Starting from zero, if the path segment of the shape crosses the line clockwise, one is added, otherwise one is subtracted. If the resulting sum is non zero, the point is inside the shape.
EvenOdd ///< A line from the point to a location outside the shape is drawn and its intersections with the path segments of the shape are counted. If the number of intersections is an odd number, the point is inside the shape.
@@ -141,7 +141,7 @@ enum class TVG_EXPORT FillRule
/**
* @brief Enumeration indicating the method used in the composition of two objects - the target and the source.
*/
-enum class TVG_EXPORT CompositeMethod
+enum class CompositeMethod
{
None = 0, ///< No composition is applied.
ClipPath, ///< The intersection of the source and the target is determined and only the resulting pixels from the source are rendered.
@@ -153,7 +153,7 @@ enum class TVG_EXPORT CompositeMethod
/**
* @brief Enumeration specifying the engine type used for the graphics backend. For multiple backends bitwise operation is allowed.
*/
-enum class TVG_EXPORT CanvasEngine
+enum class CanvasEngine
{
Sw = (1 << 1), ///< CPU rasterizer.
Gl = (1 << 2) ///< OpenGL rasterizer.
diff --git a/thirdparty/thorvg/src/lib/sw_engine/tvgSwCommon.h b/thirdparty/thorvg/src/lib/sw_engine/tvgSwCommon.h
index 47b0cb83f5..157fdb8f82 100644
--- a/thirdparty/thorvg/src/lib/sw_engine/tvgSwCommon.h
+++ b/thirdparty/thorvg/src/lib/sw_engine/tvgSwCommon.h
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright (c) 2020 - 2022 Samsung Electronics Co., Ltd. All rights reserved.
* Permission is hereby granted, free of charge, to any person obtaining a copy
diff --git a/thirdparty/thorvg/src/lib/sw_engine/tvgSwFill.cpp b/thirdparty/thorvg/src/lib/sw_engine/tvgSwFill.cpp
index bba6f26a0b..04014a9ec3 100644
--- a/thirdparty/thorvg/src/lib/sw_engine/tvgSwFill.cpp
+++ b/thirdparty/thorvg/src/lib/sw_engine/tvgSwFill.cpp
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright (c) 2020 - 2022 Samsung Electronics Co., Ltd. All rights reserved.
* Permission is hereby granted, free of charge, to any person obtaining a copy
diff --git a/thirdparty/thorvg/src/lib/sw_engine/tvgSwMath.cpp b/thirdparty/thorvg/src/lib/sw_engine/tvgSwMath.cpp
index ced66ae35c..1027bb1f79 100644
--- a/thirdparty/thorvg/src/lib/sw_engine/tvgSwMath.cpp
+++ b/thirdparty/thorvg/src/lib/sw_engine/tvgSwMath.cpp
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright (c) 2020 - 2022 Samsung Electronics Co., Ltd. All rights reserved.
* Permission is hereby granted, free of charge, to any person obtaining a copy
diff --git a/thirdparty/thorvg/src/lib/sw_engine/tvgSwRaster.cpp b/thirdparty/thorvg/src/lib/sw_engine/tvgSwRaster.cpp
index 810df8d435..ffd74bdd47 100644
--- a/thirdparty/thorvg/src/lib/sw_engine/tvgSwRaster.cpp
+++ b/thirdparty/thorvg/src/lib/sw_engine/tvgSwRaster.cpp
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright (c) 2020 - 2022 Samsung Electronics Co., Ltd. All rights reserved.
* Permission is hereby granted, free of charge, to any person obtaining a copy
@@ -22,10 +22,10 @@
#ifdef _WIN32
#include <malloc.h>
-#elif defined(__FreeBSD__)
- #include <stdlib.h>
-#else
+#elif defined(__linux__)
#include <alloca.h>
+#else
+ #include <stdlib.h>
#endif
#include "tvgMath.h"
diff --git a/thirdparty/thorvg/src/lib/sw_engine/tvgSwRasterTexmapInternal.h b/thirdparty/thorvg/src/lib/sw_engine/tvgSwRasterTexmapInternal.h
index f31ea1eb97..50536299b1 100644
--- a/thirdparty/thorvg/src/lib/sw_engine/tvgSwRasterTexmapInternal.h
+++ b/thirdparty/thorvg/src/lib/sw_engine/tvgSwRasterTexmapInternal.h
@@ -30,7 +30,7 @@
int32_t dw = surface->stride;
int32_t x1, x2, x, y, ar, ab, iru, irv, px, ay;
int32_t vv = 0, uu = 0;
- int32_t minx, maxx;
+ int32_t minx = INT32_MAX, maxx = INT32_MIN;
float dx, u, v, iptr;
uint32_t* buf;
SwSpan* span = nullptr; //used only when rle based.
diff --git a/thirdparty/thorvg/src/lib/sw_engine/tvgSwStroke.cpp b/thirdparty/thorvg/src/lib/sw_engine/tvgSwStroke.cpp
index be4392740e..fa213cc5d3 100644
--- a/thirdparty/thorvg/src/lib/sw_engine/tvgSwStroke.cpp
+++ b/thirdparty/thorvg/src/lib/sw_engine/tvgSwStroke.cpp
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright (c) 2020 - 2022 Samsung Electronics Co., Ltd. All rights reserved.
* Permission is hereby granted, free of charge, to any person obtaining a copy
diff --git a/thirdparty/thorvg/src/lib/tvgLoadModule.h b/thirdparty/thorvg/src/lib/tvgLoadModule.h
index bfcc165f31..004983152b 100644
--- a/thirdparty/thorvg/src/lib/tvgLoadModule.h
+++ b/thirdparty/thorvg/src/lib/tvgLoadModule.h
@@ -36,7 +36,6 @@ public:
float vw = 0;
float vh = 0;
float w = 0, h = 0; //default image size
- bool preserveAspect = true; //keep aspect ratio by default.
virtual ~LoadModule() {}
diff --git a/thirdparty/thorvg/src/loaders/external_jpg/tvgJpgLoader.cpp b/thirdparty/thorvg/src/loaders/external_jpg/tvgJpgLoader.cpp
deleted file mode 100644
index 522c3c71a6..0000000000
--- a/thirdparty/thorvg/src/loaders/external_jpg/tvgJpgLoader.cpp
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- * Copyright (c) 2021 - 2022 Samsung Electronics Co., Ltd. All rights reserved.
-
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
-
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
-
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-#include <memory.h>
-#include <turbojpeg.h>
-#include "tvgLoader.h"
-#include "tvgJpgLoader.h"
-
-/************************************************************************/
-/* Internal Class Implementation */
-/************************************************************************/
-
-void JpgLoader::clear()
-{
- if (freeData) free(data);
- data = nullptr;
- size = 0;
- freeData = false;
-}
-
-/************************************************************************/
-/* External Class Implementation */
-/************************************************************************/
-
-JpgLoader::JpgLoader()
-{
- jpegDecompressor = tjInitDecompress();
-}
-
-
-JpgLoader::~JpgLoader()
-{
- if (freeData) free(data);
- tjDestroy(jpegDecompressor);
-
- //This image is shared with raster engine.
- tjFree(image);
-}
-
-
-bool JpgLoader::open(const string& path)
-{
- clear();
-
- auto jpegFile = fopen(path.c_str(), "rb");
- if (!jpegFile) return false;
-
- auto ret = false;
-
- //determine size
- if (fseek(jpegFile, 0, SEEK_END) < 0) goto finalize;
- if (((size = ftell(jpegFile)) < 1)) goto finalize;
- if (fseek(jpegFile, 0, SEEK_SET)) goto finalize;
-
- data = (unsigned char *) malloc(size);
- if (!data) goto finalize;
-
- freeData = true;
-
- if (fread(data, size, 1, jpegFile) < 1) goto failure;
-
- int width, height, subSample, colorSpace;
- if (tjDecompressHeader3(jpegDecompressor, data, size, &width, &height, &subSample, &colorSpace) < 0) {
- TVGERR("JPG LOADER", "%s", tjGetErrorStr());
- goto failure;
- }
-
- w = static_cast<float>(width);
- h = static_cast<float>(height);
- ret = true;
-
- goto finalize;
-
-failure:
- clear();
-
-finalize:
- fclose(jpegFile);
- return ret;
-}
-
-
-bool JpgLoader::open(const char* data, uint32_t size, bool copy)
-{
- clear();
-
- int width, height, subSample, colorSpace;
- if (tjDecompressHeader3(jpegDecompressor, (unsigned char *) data, size, &width, &height, &subSample, &colorSpace) < 0) return false;
-
- if (copy) {
- this->data = (unsigned char *) malloc(size);
- if (!this->data) return false;
- memcpy((unsigned char *)this->data, data, size);
- freeData = true;
- } else {
- this->data = (unsigned char *) data;
- freeData = false;
- }
-
- w = static_cast<float>(width);
- h = static_cast<float>(height);
- this->size = size;
-
- return true;
-}
-
-
-bool JpgLoader::read()
-{
- if (image) tjFree(image);
- image = (unsigned char *)tjAlloc(static_cast<int>(w) * static_cast<int>(h) * tjPixelSize[TJPF_BGRX]);
- if (!image) return false;
-
- //decompress jpg image
- if (tjDecompress2(jpegDecompressor, data, size, image, static_cast<int>(w), 0, static_cast<int>(h), TJPF_BGRX, 0) < 0) {
- TVGERR("JPG LOADER", "%s", tjGetErrorStr());
- tjFree(image);
- image = nullptr;
- return false;
- }
-
- return true;
-}
-
-
-bool JpgLoader::close()
-{
- clear();
- return true;
-}
-
-
-unique_ptr<Surface> JpgLoader::bitmap()
-{
- if (!image) return nullptr;
-
- auto surface = static_cast<Surface*>(malloc(sizeof(Surface)));
- surface->buffer = (uint32_t*)(image);
- surface->stride = w;
- surface->w = w;
- surface->h = h;
- surface->cs = SwCanvas::ARGB8888;
-
- return unique_ptr<Surface>(surface);
-}
diff --git a/thirdparty/thorvg/src/loaders/external_jpg/tvgJpgLoader.h b/thirdparty/thorvg/src/loaders/external_jpg/tvgJpgLoader.h
deleted file mode 100644
index 3f82af8003..0000000000
--- a/thirdparty/thorvg/src/loaders/external_jpg/tvgJpgLoader.h
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright (c) 2021 - 2022 Samsung Electronics Co., Ltd. All rights reserved.
-
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
-
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
-
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-#ifndef _TVG_JPG_LOADER_H_
-#define _TVG_JPG_LOADER_H_
-
-using tjhandle = void*;
-
-//TODO: Use Task?
-class JpgLoader : public LoadModule
-{
-public:
- JpgLoader();
- ~JpgLoader();
-
- using LoadModule::open;
- bool open(const string& path) override;
- bool open(const char* data, uint32_t size, bool copy) override;
- bool read() override;
- bool close() override;
-
- unique_ptr<Surface> bitmap() override;
-
-private:
- void clear();
-
- tjhandle jpegDecompressor;
- unsigned char* data = nullptr;
- unsigned char *image = nullptr;
- unsigned long size = 0;
- bool freeData = false;
-};
-
-#endif //_TVG_JPG_LOADER_H_
diff --git a/thirdparty/thorvg/src/loaders/jpg/tvgJpgLoader.cpp b/thirdparty/thorvg/src/loaders/jpg/tvgJpgLoader.cpp
index ffdef3004c..d11dfc1e7c 100644
--- a/thirdparty/thorvg/src/loaders/jpg/tvgJpgLoader.cpp
+++ b/thirdparty/thorvg/src/loaders/jpg/tvgJpgLoader.cpp
@@ -118,9 +118,9 @@ unique_ptr<Surface> JpgLoader::bitmap()
auto surface = static_cast<Surface*>(malloc(sizeof(Surface)));
surface->buffer = (uint32_t*)(image);
- surface->stride = w;
- surface->w = w;
- surface->h = h;
+ surface->stride = static_cast<uint32_t>(w);
+ surface->w = static_cast<uint32_t>(w);
+ surface->h = static_cast<uint32_t>(h);
surface->cs = SwCanvas::ARGB8888;
return unique_ptr<Surface>(surface);
diff --git a/thirdparty/thorvg/src/loaders/jpg/tvgJpgd.cpp b/thirdparty/thorvg/src/loaders/jpg/tvgJpgd.cpp
index dacd45ed03..56b40acf0b 100644
--- a/thirdparty/thorvg/src/loaders/jpg/tvgJpgd.cpp
+++ b/thirdparty/thorvg/src/loaders/jpg/tvgJpgd.cpp
@@ -808,7 +808,7 @@ inline int jpeg_decoder::huff_decode(huff_tables *pH, int& extra_bits)
// Tables and macro used to fully decode the DPCM differences.
static const int s_extend_test[16] = { 0, 0x0001, 0x0002, 0x0004, 0x0008, 0x0010, 0x0020, 0x0040, 0x0080, 0x0100, 0x0200, 0x0400, 0x0800, 0x1000, 0x2000, 0x4000 };
-static const unsigned int s_extend_offset[16] = { 0, ((-1u)<<1) + 1, ((-1u)<<2) + 1, ((-1u)<<3) + 1, ((-1u)<<4) + 1, ((-1u)<<5) + 1, ((-1u)<<6) + 1, ((-1u)<<7) + 1, ((-1u)<<8) + 1, ((-1u)<<9) + 1, ((-1u)<<10) + 1, ((-1u)<<11) + 1, ((-1u)<<12) + 1, ((-1u)<<13) + 1, ((-1u)<<14) + 1, ((-1u)<<15) + 1 };
+static const unsigned int s_extend_offset[16] = { 0, ((~0u)<<1) + 1, ((~0u)<<2) + 1, ((~0u)<<3) + 1, ((~0u)<<4) + 1, ((~0u)<<5) + 1, ((~0u)<<6) + 1, ((~0u)<<7) + 1, ((~0u)<<8) + 1, ((~0u)<<9) + 1, ((~0u)<<10) + 1, ((~0u)<<11) + 1, ((~0u)<<12) + 1, ((~0u)<<13) + 1, ((~0u)<<14) + 1, ((~0u)<<15) + 1 };
// The logical AND's in this macro are to shut up static code analysis (aren't really necessary - couldn't find another way to do this)
#define JPGD_HUFF_EXTEND(x, s) (((x) < s_extend_test[s & 15]) ? ((x) + s_extend_offset[s & 15]) : (x))
diff --git a/thirdparty/thorvg/src/loaders/png/tvgLodePng.cpp b/thirdparty/thorvg/src/loaders/png/tvgLodePng.cpp
deleted file mode 100644
index eaed025c54..0000000000
--- a/thirdparty/thorvg/src/loaders/png/tvgLodePng.cpp
+++ /dev/null
@@ -1,2647 +0,0 @@
-/*
- * Copyright (c) 2020 - 2022 Samsung Electronics Co., Ltd. All rights reserved.
-
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
-
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
-
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-/*
- LodePNG version 20200306
-
- Copyright (c) 2005-2020 Lode Vandevenne
-
- This software is provided 'as-is', without any express or implied
- warranty. In no event will the authors be held liable for any damages
- arising from the use of this software.
-
- Permission is granted to anyone to use this software for any purpose,
- including commercial applications, and to alter it and redistribute it
- freely, subject to the following restrictions:
-
- 1. The origin of this software must not be misrepresented; you must not
- claim that you wrote the original software. If you use this software
- in a product, an acknowledgment in the product documentation would be
- appreciated but is not required.
-
- 2. Altered source versions must be plainly marked as such, and must not be
- misrepresented as being the original software.
-
- 3. This notice may not be removed or altered from any sourcedistribution.
-*/
-
-#include <cstdlib>
-#include "tvgLodePng.h"
-
-
-/************************************************************************/
-/* Internal Class Implementation */
-/************************************************************************/
-
-#if defined(_MSC_VER) && (_MSC_VER >= 1310) /*Visual Studio: A few warning types are not desired here.*/
- #pragma warning( disable : 4244 ) /*implicit conversions: not warned by gcc -Wall -Wextra and requires too much casts*/
- #pragma warning( disable : 4996 ) /*VS does not like fopen, but fopen_s is not standard C so unusable here*/
-#endif /*_MSC_VER */
-
-
-/* convince the compiler to inline a function, for use when this measurably improves performance */
-/* inline is not available in C90, but use it when supported by the compiler */
-#if (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L)) || (defined(__cplusplus) && (__cplusplus >= 199711L))
- #define LODEPNG_INLINE inline
-#else
- #define LODEPNG_INLINE /* not available */
-#endif
-
-/* restrict is not available in C90, but use it when supported by the compiler */
-#if (defined(__GNUC__) && (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1))) ||\
- (defined(_MSC_VER) && (_MSC_VER >= 1400)) || \
- (defined(__WATCOMC__) && (__WATCOMC__ >= 1250) && !defined(__cplusplus))
- #define LODEPNG_RESTRICT __restrict
-#else
- #define LODEPNG_RESTRICT /* not available */
-#endif
-
-#define LODEPNG_MAX(a, b) (((a) > (b)) ? (a) : (b))
-#define LODEPNG_MIN(a, b) (((a) < (b)) ? (a) : (b))
-#define LODEPNG_ABS(x) ((x) < 0 ? -(x) : (x))
-
-
-/* Replacements for C library functions such as memcpy and strlen, to support platforms
-where a full C library is not available. The compiler can recognize them and compile
-to something as fast. */
-
-static void lodepng_memcpy(void* LODEPNG_RESTRICT dst, const void* LODEPNG_RESTRICT src, size_t size)
-{
- size_t i;
- for (i = 0; i < size; i++) ((char*)dst)[i] = ((const char*)src)[i];
-}
-
-
-static void lodepng_memset(void* LODEPNG_RESTRICT dst, int value, size_t num)
-{
- size_t i;
- for (i = 0; i < num; i++) ((char*)dst)[i] = (char)value;
-}
-
-
-/* does not check memory out of bounds, do not use on untrusted data */
-static size_t lodepng_strlen(const char* a)
-{
- const char* orig = a;
- /* avoid warning about unused function in case of disabled COMPILE... macros */
- (void)(&lodepng_strlen);
- while (*a) a++;
- return (size_t)(a - orig);
-}
-
-
-/* Safely check if adding two integers will overflow (no undefined
-behavior, compiler removing the code, etc...) and output result. */
-static int lodepng_addofl(size_t a, size_t b, size_t* result)
-{
- *result = a + b; /* Unsigned addition is well defined and safe in C90 */
- return *result < a;
-}
-
-
-/* Safely check if multiplying two integers will overflow (no undefined
-behavior, compiler removing the code, etc...) and output result. */
-static int lodepng_mulofl(size_t a, size_t b, size_t* result)
-{
- *result = a * b; /* Unsigned multiplication is well defined and safe in C90 */
- return (a != 0 && *result / a != b);
-}
-
-
-/* Safely check if a + b > c, even if overflow could happen. */
-static int lodepng_gtofl(size_t a, size_t b, size_t c)
-{
- size_t d;
- if (lodepng_addofl(a, b, &d)) return 1;
- return d > c;
-}
-
-
-/*
- Often in case of an error a value is assigned to a variable and then it breaks
- out of a loop (to go to the cleanup phase of a function). This macro does that.
- It makes the error handling code shorter and more readable.
-
- Example: if(!uivector_resize(&lz77_encoded, datasize)) ERROR_BREAK(83);
-*/
-#define CERROR_BREAK(errorvar, code){\
- errorvar = code;\
- break;\
-}
-
-/* version of CERROR_BREAK that assumes the common case where the error variable is named "error" */
-#define ERROR_BREAK(code) CERROR_BREAK(error, code)
-
-/* Set error var to the error code, and return it.*/
-#define CERROR_RETURN_ERROR(errorvar, code){\
- errorvar = code;\
- return code;\
-}
-
-/* Try the code, if it returns error, also return the error. */
-#define CERROR_TRY_RETURN(call){\
- unsigned error = call;\
- if(error) return error;\
-}
-
-/* Set error var to the error code, and return from the void function. */
-#define CERROR_RETURN(errorvar, code){\
- errorvar = code;\
- return;\
-}
-
-
-/* dynamic vector of unsigned chars */
-struct ucvector
-{
- unsigned char* data;
- size_t size; /*used size*/
- size_t allocsize; /*allocated size*/
-};
-
-
-/* returns 1 if success, 0 if failure ==> nothing done */
-static unsigned ucvector_resize(ucvector* p, size_t size)
-{
- if (size > p->allocsize) {
- size_t newsize = size + (p->allocsize >> 1u);
- void* data = realloc(p->data, newsize);
- if(data) {
- p->allocsize = newsize;
- p->data = (unsigned char*)data;
- }
- else return 0; /*error: not enough memory*/
- }
- p->size = size;
- return 1; /*success*/
-}
-
-
-static ucvector ucvector_init(unsigned char* buffer, size_t size)
-{
- ucvector v;
- v.data = buffer;
- v.allocsize = v.size = size;
- return v;
-}
-
-
-static unsigned lodepng_read32bitInt(const unsigned char* buffer)
-{
- return (((unsigned)buffer[0] << 24u) | ((unsigned)buffer[1] << 16u) | ((unsigned)buffer[2] << 8u) | (unsigned)buffer[3]);
-}
-
-
-/* ////////////////////////////////////////////////////////////////////////// */
-/* ////////////////////////////////////////////////////////////////////////// */
-/* // End of common code and tools. Begin of Zlib related code. // */
-/* ////////////////////////////////////////////////////////////////////////// */
-/* ////////////////////////////////////////////////////////////////////////// */
-
-struct LodePNGBitReader
-{
- const unsigned char* data;
- size_t size; /*size of data in bytes*/
- size_t bitsize; /*size of data in bits, end of valid bp values, should be 8*size*/
- size_t bp;
- unsigned buffer; /*buffer for reading bits. NOTE: 'unsigned' must support at least 32 bits*/
-};
-
-
-/* data size argument is in bytes. Returns error if size too large causing overflow */
-static unsigned LodePNGBitReader_init(LodePNGBitReader* reader, const unsigned char* data, size_t size)
-{
- size_t temp;
- reader->data = data;
- reader->size = size;
- /* size in bits, return error if overflow (if size_t is 32 bit this supports up to 500MB) */
- if (lodepng_mulofl(size, 8u, &reader->bitsize)) return 105;
- /*ensure incremented bp can be compared to bitsize without overflow even when it would be incremented 32 too much and
- trying to ensure 32 more bits*/
- if (lodepng_addofl(reader->bitsize, 64u, &temp)) return 105;
- reader->bp = 0;
- reader->buffer = 0;
- return 0; /*ok*/
- }
-
-/*
- ensureBits functions:
- Ensures the reader can at least read nbits bits in one or more readBits calls,
- safely even if not enough bits are available.
- Returns 1 if there are enough bits available, 0 if not.
-*/
-
-/*See ensureBits documentation above. This one ensures exactly 1 bit */
-/*static unsigned ensureBits1(LodePNGBitReader* reader) {
- if(reader->bp >= reader->bitsize) return 0;
- reader->buffer = (unsigned)reader->data[reader->bp >> 3u] >> (reader->bp & 7u);
- return 1;
-}*/
-
-/*See ensureBits documentation above. This one ensures up to 9 bits */
-static unsigned ensureBits9(LodePNGBitReader* reader, size_t nbits)
-{
- size_t start = reader->bp >> 3u;
- size_t size = reader->size;
- if (start + 1u < size) {
- reader->buffer = (unsigned)reader->data[start + 0] | ((unsigned)reader->data[start + 1] << 8u);
- reader->buffer >>= (reader->bp & 7u);
- return 1;
- } else {
- reader->buffer = 0;
- if (start + 0u < size) reader->buffer |= reader->data[start + 0];
- reader->buffer >>= (reader->bp & 7u);
- return reader->bp + nbits <= reader->bitsize;
- }
-}
-
-
-/*See ensureBits documentation above. This one ensures up to 17 bits */
-static unsigned ensureBits17(LodePNGBitReader* reader, size_t nbits)
-{
- size_t start = reader->bp >> 3u;
- size_t size = reader->size;
- if (start + 2u < size) {
- reader->buffer = (unsigned)reader->data[start + 0] | ((unsigned)reader->data[start + 1] << 8u) | ((unsigned)reader->data[start + 2] << 16u);
- reader->buffer >>= (reader->bp & 7u);
- return 1;
- } else {
- reader->buffer = 0;
- if (start + 0u < size) reader->buffer |= reader->data[start + 0];
- if (start + 1u < size) reader->buffer |= ((unsigned)reader->data[start + 1] << 8u);
- reader->buffer >>= (reader->bp & 7u);
- return reader->bp + nbits <= reader->bitsize;
- }
-}
-
-
-/*See ensureBits documentation above. This one ensures up to 25 bits */
-static LODEPNG_INLINE unsigned ensureBits25(LodePNGBitReader* reader, size_t nbits)
-{
- size_t start = reader->bp >> 3u;
- size_t size = reader->size;
- if (start + 3u < size) {
- reader->buffer = (unsigned)reader->data[start + 0] | ((unsigned)reader->data[start + 1] << 8u) | ((unsigned)reader->data[start + 2] << 16u) | ((unsigned)reader->data[start + 3] << 24u);
- reader->buffer >>= (reader->bp & 7u);
- return 1;
- } else {
- reader->buffer = 0;
- if (start + 0u < size) reader->buffer |= reader->data[start + 0];
- if (start + 1u < size) reader->buffer |= ((unsigned)reader->data[start + 1] << 8u);
- if (start + 2u < size) reader->buffer |= ((unsigned)reader->data[start + 2] << 16u);
- reader->buffer >>= (reader->bp & 7u);
- return reader->bp + nbits <= reader->bitsize;
- }
-}
-
-
-/*See ensureBits documentation above. This one ensures up to 32 bits */
-static LODEPNG_INLINE unsigned ensureBits32(LodePNGBitReader* reader, size_t nbits)
-{
- size_t start = reader->bp >> 3u;
- size_t size = reader->size;
- if(start + 4u < size) {
- reader->buffer = (unsigned)reader->data[start + 0] | ((unsigned)reader->data[start + 1] << 8u) | ((unsigned)reader->data[start + 2] << 16u) | ((unsigned)reader->data[start + 3] << 24u);
- reader->buffer >>= (reader->bp & 7u);
- reader->buffer |= (((unsigned)reader->data[start + 4] << 24u) << (8u - (reader->bp & 7u)));
- return 1;
- } else {
- reader->buffer = 0;
- if (start + 0u < size) reader->buffer |= reader->data[start + 0];
- if (start + 1u < size) reader->buffer |= ((unsigned)reader->data[start + 1] << 8u);
- if (start + 2u < size) reader->buffer |= ((unsigned)reader->data[start + 2] << 16u);
- if (start + 3u < size) reader->buffer |= ((unsigned)reader->data[start + 3] << 24u);
- reader->buffer >>= (reader->bp & 7u);
- return reader->bp + nbits <= reader->bitsize;
- }
-}
-
-
-/* Get bits without advancing the bit pointer. Must have enough bits available with ensureBits. Max nbits is 31. */
-static unsigned peekBits(LodePNGBitReader* reader, size_t nbits)
-{
- /* The shift allows nbits to be only up to 31. */
- return reader->buffer & ((1u << nbits) - 1u);
-}
-
-
-/* Must have enough bits available with ensureBits */
-static void advanceBits(LodePNGBitReader* reader, size_t nbits)
-{
- reader->buffer >>= nbits;
- reader->bp += nbits;
-}
-
-
-/* Must have enough bits available with ensureBits */
-static unsigned readBits(LodePNGBitReader* reader, size_t nbits)
-{
- unsigned result = peekBits(reader, nbits);
- advanceBits(reader, nbits);
- return result;
-}
-
-
-/* Public for testing only. steps and result must have numsteps values. */
-unsigned lode_png_test_bitreader(const unsigned char* data, size_t size, size_t numsteps, const size_t* steps, unsigned* result)
-{
- size_t i;
- LodePNGBitReader reader;
- unsigned error = LodePNGBitReader_init(&reader, data, size);
- if (error) return 0;
- for (i = 0; i < numsteps; i++) {
- size_t step = steps[i];
- unsigned ok;
- if (step > 25) ok = ensureBits32(&reader, step);
- else if (step > 17) ok = ensureBits25(&reader, step);
- else if (step > 9) ok = ensureBits17(&reader, step);
- else ok = ensureBits9(&reader, step);
- if (!ok) return 0;
- result[i] = readBits(&reader, step);
- }
- return 1;
-}
-
-
-static unsigned reverseBits(unsigned bits, unsigned num)
-{
- /*TODO: implement faster lookup table based version when needed*/
- unsigned i, result = 0;
- for (i = 0; i < num; i++) result |= ((bits >> (num - i - 1u)) & 1u) << i;
- return result;
-}
-
-/* ////////////////////////////////////////////////////////////////////////// */
-/* / Deflate - Huffman / */
-/* ////////////////////////////////////////////////////////////////////////// */
-
-#define FIRST_LENGTH_CODE_INDEX 257
-#define LAST_LENGTH_CODE_INDEX 285
-/*256 literals, the end code, some length codes, and 2 unused codes*/
-#define NUM_DEFLATE_CODE_SYMBOLS 288
-/*the distance codes have their own symbols, 30 used, 2 unused*/
-#define NUM_DISTANCE_SYMBOLS 32
-/*the code length codes. 0-15: code lengths, 16: copy previous 3-6 times, 17: 3-10 zeros, 18: 11-138 zeros*/
-#define NUM_CODE_LENGTH_CODES 19
-
-/*the base lengths represented by codes 257-285*/
-static const unsigned LENGTHBASE[29]
- = {3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, 35, 43, 51, 59,
- 67, 83, 99, 115, 131, 163, 195, 227, 258};
-
-/*the extra bits used by codes 257-285 (added to base length)*/
-static const unsigned LENGTHEXTRA[29]
- = {0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3,
- 4, 4, 4, 4, 5, 5, 5, 5, 0};
-
-/*the base backwards distances (the bits of distance codes appear after length codes and use their own huffman tree)*/
-static const unsigned DISTANCEBASE[30]
- = {1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, 257, 385, 513,
- 769, 1025, 1537, 2049, 3073, 4097, 6145, 8193, 12289, 16385, 24577};
-
-/*the extra bits of backwards distances (added to base)*/
-static const unsigned DISTANCEEXTRA[30]
- = {0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8,
- 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13};
-
-/*the order in which "code length alphabet code lengths" are stored as specified by deflate, out of this the huffman
-tree of the dynamic huffman tree lengths is generated*/
-static const unsigned CLCL_ORDER[NUM_CODE_LENGTH_CODES]
- = {16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15};
-
-/* ////////////////////////////////////////////////////////////////////////// */
-
-/*
-Huffman tree struct, containing multiple representations of the tree
-*/
-struct HuffmanTree
-{
- unsigned* codes; /*the huffman codes (bit patterns representing the symbols)*/
- unsigned* lengths; /*the lengths of the huffman codes*/
- unsigned maxbitlen; /*maximum number of bits a single code can get*/
- unsigned numcodes; /*number of symbols in the alphabet = number of codes*/
- /* for reading only */
- unsigned char* table_len; /*length of symbol from lookup table, or max length if secondary lookup needed*/
- unsigned short* table_value; /*value of symbol from lookup table, or pointer to secondary table if needed*/
-};
-
-
-static void HuffmanTree_init(HuffmanTree* tree)
-{
- tree->codes = 0;
- tree->lengths = 0;
- tree->table_len = 0;
- tree->table_value = 0;
-}
-
-
-static void HuffmanTree_cleanup(HuffmanTree* tree)
-{
- free(tree->codes);
- free(tree->lengths);
- free(tree->table_len);
- free(tree->table_value);
-}
-
-
-/* amount of bits for first huffman table lookup (aka root bits), see HuffmanTree_makeTable and huffmanDecodeSymbol.*/
-/* values 8u and 9u work the fastest */
-#define FIRSTBITS 9u
-
-/* a symbol value too big to represent any valid symbol, to indicate reading disallowed huffman bits combination,
-which is possible in case of only 0 or 1 present symbols. */
-#define INVALIDSYMBOL 65535u
-
-/* make table for huffman decoding */
-static unsigned HuffmanTree_makeTable(HuffmanTree* tree)
-{
- static const unsigned headsize = 1u << FIRSTBITS; /*size of the first table*/
- static const unsigned mask = (1u << FIRSTBITS) /*headsize*/ - 1u;
- size_t i, numpresent, pointer, size; /*total table size*/
- unsigned* maxlens = (unsigned*)malloc(headsize * sizeof(unsigned));
- if (!maxlens) return 83; /*alloc fail*/
-
- /* compute maxlens: max total bit length of symbols sharing prefix in the first table*/
- lodepng_memset(maxlens, 0, headsize * sizeof(*maxlens));
- for (i = 0; i < tree->numcodes; i++) {
- unsigned symbol = tree->codes[i];
- unsigned l = tree->lengths[i];
- unsigned index;
- if(l <= FIRSTBITS) continue; /*symbols that fit in first table don't increase secondary table size*/
- /*get the FIRSTBITS MSBs, the MSBs of the symbol are encoded first. See later comment about the reversing*/
- index = reverseBits(symbol >> (l - FIRSTBITS), FIRSTBITS);
- maxlens[index] = LODEPNG_MAX(maxlens[index], l);
- }
- /* compute total table size: size of first table plus all secondary tables for symbols longer than FIRSTBITS */
- size = headsize;
- for (i = 0; i < headsize; ++i) {
- unsigned l = maxlens[i];
- if (l > FIRSTBITS) size += (1u << (l - FIRSTBITS));
- }
- tree->table_len = (unsigned char*)malloc(size * sizeof(*tree->table_len));
- tree->table_value = (unsigned short*)malloc(size * sizeof(*tree->table_value));
- if (!tree->table_len || !tree->table_value) {
- free(maxlens);
- /* freeing tree->table values is done at a higher scope */
- return 83; /*alloc fail*/
- }
- /*initialize with an invalid length to indicate unused entries*/
- for (i = 0; i < size; ++i) tree->table_len[i] = 16;
-
- /*fill in the first table for long symbols: max prefix size and pointer to secondary tables*/
- pointer = headsize;
- for (i = 0; i < headsize; ++i) {
- unsigned l = maxlens[i];
- if(l <= FIRSTBITS) continue;
- tree->table_len[i] = l;
- tree->table_value[i] = pointer;
- pointer += (1u << (l - FIRSTBITS));
- }
- free(maxlens);
-
- /*fill in the first table for short symbols, or secondary table for long symbols*/
- numpresent = 0;
- for (i = 0; i < tree->numcodes; ++i) {
- unsigned l = tree->lengths[i];
- unsigned symbol = tree->codes[i]; /*the huffman bit pattern. i itself is the value.*/
- /*reverse bits, because the huffman bits are given in MSB first order but the bit reader reads LSB first*/
- unsigned reverse = reverseBits(symbol, l);
- if (l == 0) continue;
- numpresent++;
-
- if (l <= FIRSTBITS) {
- /*short symbol, fully in first table, replicated num times if l < FIRSTBITS*/
- unsigned num = 1u << (FIRSTBITS - l);
- unsigned j;
- for (j = 0; j < num; ++j) {
- /*bit reader will read the l bits of symbol first, the remaining FIRSTBITS - l bits go to the MSB's*/
- unsigned index = reverse | (j << l);
- if(tree->table_len[index] != 16) return 55; /*invalid tree: long symbol shares prefix with short symbol*/
- tree->table_len[index] = l;
- tree->table_value[index] = i;
- }
- } else {
- /*long symbol, shares prefix with other long symbols in first lookup table, needs second lookup*/
- /*the FIRSTBITS MSBs of the symbol are the first table index*/
- unsigned index = reverse & mask;
- unsigned maxlen = tree->table_len[index];
- /*log2 of secondary table length, should be >= l - FIRSTBITS*/
- unsigned tablelen = maxlen - FIRSTBITS;
- unsigned start = tree->table_value[index]; /*starting index in secondary table*/
- unsigned num = 1u << (tablelen - (l - FIRSTBITS)); /*amount of entries of this symbol in secondary table*/
- unsigned j;
- if (maxlen < l) return 55; /*invalid tree: long symbol shares prefix with short symbol*/
- for (j = 0; j < num; ++j) {
- unsigned reverse2 = reverse >> FIRSTBITS; /* l - FIRSTBITS bits */
- unsigned index2 = start + (reverse2 | (j << (l - FIRSTBITS)));
- tree->table_len[index2] = l;
- tree->table_value[index2] = i;
- }
- }
- }
-
- if (numpresent < 2) {
- /* In case of exactly 1 symbol, in theory the huffman symbol needs 0 bits,
- but deflate uses 1 bit instead. In case of 0 symbols, no symbols can
- appear at all, but such huffman tree could still exist (e.g. if distance
- codes are never used). In both cases, not all symbols of the table will be
- filled in. Fill them in with an invalid symbol value so returning them from
- huffmanDecodeSymbol will cause error. */
- for (i = 0; i < size; ++i) {
- if (tree->table_len[i] == 16) {
- /* As length, use a value smaller than FIRSTBITS for the head table,
- and a value larger than FIRSTBITS for the secondary table, to ensure
- valid behavior for advanceBits when reading this symbol. */
- tree->table_len[i] = (i < headsize) ? 1 : (FIRSTBITS + 1);
- tree->table_value[i] = INVALIDSYMBOL;
- }
- }
- } else {
- /* A good huffman tree has N * 2 - 1 nodes, of which N - 1 are internal nodes.
- If that is not the case (due to too long length codes), the table will not
- have been fully used, and this is an error (not all bit combinations can be
- decoded): an oversubscribed huffman tree, indicated by error 55. */
- for (i = 0; i < size; ++i) {
- if (tree->table_len[i] == 16) return 55;
- }
- }
- return 0;
-}
-
-
-/*
- Second step for the ...makeFromLengths and ...makeFromFrequencies functions.
- numcodes, lengths and maxbitlen must already be filled in correctly. return
- value is error.
-*/
-static unsigned HuffmanTree_makeFromLengths2(HuffmanTree* tree)
-{
- unsigned* blcount;
- unsigned* nextcode;
- unsigned error = 0;
- unsigned bits, n;
-
- tree->codes = (unsigned*)malloc(tree->numcodes * sizeof(unsigned));
- blcount = (unsigned*)malloc((tree->maxbitlen + 1) * sizeof(unsigned));
- nextcode = (unsigned*)malloc((tree->maxbitlen + 1) * sizeof(unsigned));
- if (!tree->codes || !blcount || !nextcode) error = 83; /*alloc fail*/
-
- if (!error) {
- for (n = 0; n != tree->maxbitlen + 1; n++) blcount[n] = nextcode[n] = 0;
- /*step 1: count number of instances of each code length*/
- for (bits = 0; bits != tree->numcodes; ++bits) ++blcount[tree->lengths[bits]];
- /*step 2: generate the nextcode values*/
- for(bits = 1; bits <= tree->maxbitlen; ++bits) {
- nextcode[bits] = (nextcode[bits - 1] + blcount[bits - 1]) << 1u;
- }
- /*step 3: generate all the codes*/
- for (n = 0; n != tree->numcodes; ++n) {
- if (tree->lengths[n] != 0) {
- tree->codes[n] = nextcode[tree->lengths[n]]++;
- /*remove superfluous bits from the code*/
- tree->codes[n] &= ((1u << tree->lengths[n]) - 1u);
- }
- }
- }
-
- free(blcount);
- free(nextcode);
-
- if (!error) error = HuffmanTree_makeTable(tree);
- return error;
-}
-
-
-/*
- given the code lengths (as stored in the PNG file), generate the tree as defined
- by Deflate. maxbitlen is the maximum bits that a code in the tree can have.
- return value is error.
-*/
-static unsigned HuffmanTree_makeFromLengths(HuffmanTree* tree, const unsigned* bitlen, size_t numcodes, unsigned maxbitlen)
-{
- unsigned i;
- tree->lengths = (unsigned*)malloc(numcodes * sizeof(unsigned));
- if (!tree->lengths) return 83; /*alloc fail*/
- for (i = 0; i != numcodes; ++i) tree->lengths[i] = bitlen[i];
- tree->numcodes = (unsigned)numcodes; /*number of symbols*/
- tree->maxbitlen = maxbitlen;
- return HuffmanTree_makeFromLengths2(tree);
-}
-
-
-/*get the literal and length code tree of a deflated block with fixed tree, as per the deflate specification*/
-static unsigned generateFixedLitLenTree(HuffmanTree* tree)
-{
- unsigned i, error = 0;
- unsigned* bitlen = (unsigned*)malloc(NUM_DEFLATE_CODE_SYMBOLS * sizeof(unsigned));
- if (!bitlen) return 83; /*alloc fail*/
-
- /*288 possible codes: 0-255=literals, 256=endcode, 257-285=lengthcodes, 286-287=unused*/
- for (i = 0; i <= 143; ++i) bitlen[i] = 8;
- for (i = 144; i <= 255; ++i) bitlen[i] = 9;
- for (i = 256; i <= 279; ++i) bitlen[i] = 7;
- for (i = 280; i <= 287; ++i) bitlen[i] = 8;
-
- error = HuffmanTree_makeFromLengths(tree, bitlen, NUM_DEFLATE_CODE_SYMBOLS, 15);
-
- free(bitlen);
- return error;
-}
-
-
-/*get the distance code tree of a deflated block with fixed tree, as specified in the deflate specification*/
-static unsigned generateFixedDistanceTree(HuffmanTree* tree)
-{
- unsigned i, error = 0;
- unsigned* bitlen = (unsigned*)malloc(NUM_DISTANCE_SYMBOLS * sizeof(unsigned));
- if (!bitlen) return 83; /*alloc fail*/
-
- /*there are 32 distance codes, but 30-31 are unused*/
- for (i = 0; i != NUM_DISTANCE_SYMBOLS; ++i) bitlen[i] = 5;
- error = HuffmanTree_makeFromLengths(tree, bitlen, NUM_DISTANCE_SYMBOLS, 15);
-
- free(bitlen);
- return error;
-}
-
-
-/*
- returns the code. The bit reader must already have been ensured at least 15 bits
-*/
-static unsigned huffmanDecodeSymbol(LodePNGBitReader* reader, const HuffmanTree* codetree)
-{
- unsigned short code = peekBits(reader, FIRSTBITS);
- unsigned short l = codetree->table_len[code];
- unsigned short value = codetree->table_value[code];
- if (l <= FIRSTBITS) {
- advanceBits(reader, l);
- return value;
- } else {
- unsigned index2;
- advanceBits(reader, FIRSTBITS);
- index2 = value + peekBits(reader, l - FIRSTBITS);
- advanceBits(reader, codetree->table_len[index2] - FIRSTBITS);
- return codetree->table_value[index2];
- }
-}
-
-
-/* ////////////////////////////////////////////////////////////////////////// */
-/* / Inflator (Decompressor) / */
-/* ////////////////////////////////////////////////////////////////////////// */
-
-/*get the tree of a deflated block with fixed tree, as specified in the deflate specification
-Returns error code.*/
-static unsigned getTreeInflateFixed(HuffmanTree* tree_ll, HuffmanTree* tree_d)
-{
- unsigned error = generateFixedLitLenTree(tree_ll);
- if (error) return error;
- return generateFixedDistanceTree(tree_d);
-}
-
-
-/*get the tree of a deflated block with dynamic tree, the tree itself is also Huffman compressed with a known tree*/
-static unsigned getTreeInflateDynamic(HuffmanTree* tree_ll, HuffmanTree* tree_d, LodePNGBitReader* reader)
-{
- /*make sure that length values that aren't filled in will be 0, or a wrong tree will be generated*/
- unsigned error = 0;
- unsigned n, HLIT, HDIST, HCLEN, i;
-
- /*see comments in deflateDynamic for explanation of the context and these variables, it is analogous*/
- unsigned* bitlen_ll = 0; /*lit,len code lengths*/
- unsigned* bitlen_d = 0; /*dist code lengths*/
- /*code length code lengths ("clcl"), the bit lengths of the huffman tree used to compress bitlen_ll and bitlen_d*/
- unsigned* bitlen_cl = 0;
- HuffmanTree tree_cl; /*the code tree for code length codes (the huffman tree for compressed huffman trees)*/
-
- if (!ensureBits17(reader, 14)) return 49; /*error: the bit pointer is or will go past the memory*/
-
- /*number of literal/length codes + 257. Unlike the spec, the value 257 is added to it here already*/
- HLIT = readBits(reader, 5) + 257;
- /*number of distance codes. Unlike the spec, the value 1 is added to it here already*/
- HDIST = readBits(reader, 5) + 1;
- /*number of code length codes. Unlike the spec, the value 4 is added to it here already*/
- HCLEN = readBits(reader, 4) + 4;
-
- bitlen_cl = (unsigned*)malloc(NUM_CODE_LENGTH_CODES * sizeof(unsigned));
- if(!bitlen_cl) return 83 /*alloc fail*/;
-
- HuffmanTree_init(&tree_cl);
-
- while (!error) {
- /*read the code length codes out of 3 * (amount of code length codes) bits*/
- if (lodepng_gtofl(reader->bp, HCLEN * 3, reader->bitsize)) {
- ERROR_BREAK(50); /*error: the bit pointer is or will go past the memory*/
- }
- for (i = 0; i != HCLEN; ++i) {
- ensureBits9(reader, 3); /*out of bounds already checked above */
- bitlen_cl[CLCL_ORDER[i]] = readBits(reader, 3);
- }
- for (i = HCLEN; i != NUM_CODE_LENGTH_CODES; ++i) {
- bitlen_cl[CLCL_ORDER[i]] = 0;
- }
-
- error = HuffmanTree_makeFromLengths(&tree_cl, bitlen_cl, NUM_CODE_LENGTH_CODES, 7);
- if(error) break;
-
- /*now we can use this tree to read the lengths for the tree that this function will return*/
- bitlen_ll = (unsigned*)malloc(NUM_DEFLATE_CODE_SYMBOLS * sizeof(unsigned));
- bitlen_d = (unsigned*)malloc(NUM_DISTANCE_SYMBOLS * sizeof(unsigned));
- if (!bitlen_ll || !bitlen_d) ERROR_BREAK(83 /*alloc fail*/);
- lodepng_memset(bitlen_ll, 0, NUM_DEFLATE_CODE_SYMBOLS * sizeof(*bitlen_ll));
- lodepng_memset(bitlen_d, 0, NUM_DISTANCE_SYMBOLS * sizeof(*bitlen_d));
-
- /*i is the current symbol we're reading in the part that contains the code lengths of lit/len and dist codes*/
- i = 0;
- while (i < HLIT + HDIST) {
- unsigned code;
- ensureBits25(reader, 22); /* up to 15 bits for huffman code, up to 7 extra bits below*/
- code = huffmanDecodeSymbol(reader, &tree_cl);
- if (code <= 15) /*a length code*/ {
- if (i < HLIT) bitlen_ll[i] = code;
- else bitlen_d[i - HLIT] = code;
- ++i;
- } else if (code == 16) /*repeat previous*/ {
- unsigned replength = 3; /*read in the 2 bits that indicate repeat length (3-6)*/
- unsigned value; /*set value to the previous code*/
-
- if (i == 0) ERROR_BREAK(54); /*can't repeat previous if i is 0*/
-
- replength += readBits(reader, 2);
-
- if (i < HLIT + 1) value = bitlen_ll[i - 1];
- else value = bitlen_d[i - HLIT - 1];
- /*repeat this value in the next lengths*/
- for (n = 0; n < replength; ++n) {
- if (i >= HLIT + HDIST) ERROR_BREAK(13); /*error: i is larger than the amount of codes*/
- if (i < HLIT) bitlen_ll[i] = value;
- else bitlen_d[i - HLIT] = value;
- ++i;
- }
- } else if(code == 17) /*repeat "0" 3-10 times*/ {
- unsigned replength = 3; /*read in the bits that indicate repeat length*/
- replength += readBits(reader, 3);
-
- /*repeat this value in the next lengths*/
- for (n = 0; n < replength; ++n) {
- if (i >= HLIT + HDIST) ERROR_BREAK(14); /*error: i is larger than the amount of codes*/
-
- if (i < HLIT) bitlen_ll[i] = 0;
- else bitlen_d[i - HLIT] = 0;
- ++i;
- }
- } else if(code == 18) /*repeat "0" 11-138 times*/ {
- unsigned replength = 11; /*read in the bits that indicate repeat length*/
- replength += readBits(reader, 7);
-
- /*repeat this value in the next lengths*/
- for (n = 0; n < replength; ++n) {
- if(i >= HLIT + HDIST) ERROR_BREAK(15); /*error: i is larger than the amount of codes*/
-
- if(i < HLIT) bitlen_ll[i] = 0;
- else bitlen_d[i - HLIT] = 0;
- ++i;
- }
- } else /*if(code == INVALIDSYMBOL)*/ {
- ERROR_BREAK(16); /*error: tried to read disallowed huffman symbol*/
- }
- /*check if any of the ensureBits above went out of bounds*/
- if (reader->bp > reader->bitsize) {
- /*return error code 10 or 11 depending on the situation that happened in huffmanDecodeSymbol
- (10=no endcode, 11=wrong jump outside of tree)*/
- /* TODO: revise error codes 10,11,50: the above comment is no longer valid */
- ERROR_BREAK(50); /*error, bit pointer jumps past memory*/
- }
- }
- if (error) break;
-
- if (bitlen_ll[256] == 0) ERROR_BREAK(64); /*the length of the end code 256 must be larger than 0*/
-
- /*now we've finally got HLIT and HDIST, so generate the code trees, and the function is done*/
- error = HuffmanTree_makeFromLengths(tree_ll, bitlen_ll, NUM_DEFLATE_CODE_SYMBOLS, 15);
- if (error) break;
- error = HuffmanTree_makeFromLengths(tree_d, bitlen_d, NUM_DISTANCE_SYMBOLS, 15);
-
- break; /*end of error-while*/
- }
-
- free(bitlen_cl);
- free(bitlen_ll);
- free(bitlen_d);
- HuffmanTree_cleanup(&tree_cl);
-
- return error;
-}
-
-
-/*inflate a block with dynamic of fixed Huffman tree. btype must be 1 or 2.*/
-static unsigned inflateHuffmanBlock(ucvector* out, LodePNGBitReader* reader, unsigned btype)
-{
- unsigned error = 0;
- HuffmanTree tree_ll; /*the huffman tree for literal and length codes*/
- HuffmanTree tree_d; /*the huffman tree for distance codes*/
-
- HuffmanTree_init(&tree_ll);
- HuffmanTree_init(&tree_d);
-
- if (btype == 1) error = getTreeInflateFixed(&tree_ll, &tree_d);
- else /*if(btype == 2)*/ error = getTreeInflateDynamic(&tree_ll, &tree_d, reader);
-
- while (!error) /*decode all symbols until end reached, breaks at end code*/ {
- /*code_ll is literal, length or end code*/
- unsigned code_ll;
- ensureBits25(reader, 20); /* up to 15 for the huffman symbol, up to 5 for the length extra bits */
- code_ll = huffmanDecodeSymbol(reader, &tree_ll);
- if (code_ll <= 255) /*literal symbol*/ {
- if (!ucvector_resize(out, out->size + 1)) ERROR_BREAK(83 /*alloc fail*/);
- out->data[out->size - 1] = (unsigned char)code_ll;
- } else if (code_ll >= FIRST_LENGTH_CODE_INDEX && code_ll <= LAST_LENGTH_CODE_INDEX) /*length code*/ {
- unsigned code_d, distance;
- unsigned numextrabits_l, numextrabits_d; /*extra bits for length and distance*/
- size_t start, backward, length;
-
- /*part 1: get length base*/
- length = LENGTHBASE[code_ll - FIRST_LENGTH_CODE_INDEX];
-
- /*part 2: get extra bits and add the value of that to length*/
- numextrabits_l = LENGTHEXTRA[code_ll - FIRST_LENGTH_CODE_INDEX];
- if (numextrabits_l != 0) {
- /* bits already ensured above */
- length += readBits(reader, numextrabits_l);
- }
-
- /*part 3: get distance code*/
- ensureBits32(reader, 28); /* up to 15 for the huffman symbol, up to 13 for the extra bits */
- code_d = huffmanDecodeSymbol(reader, &tree_d);
- if (code_d > 29) {
- if (code_d <= 31) {
- ERROR_BREAK(18); /*error: invalid distance code (30-31 are never used)*/
- } else /* if(code_d == INVALIDSYMBOL) */{
- ERROR_BREAK(16); /*error: tried to read disallowed huffman symbol*/
- }
- }
- distance = DISTANCEBASE[code_d];
-
- /*part 4: get extra bits from distance*/
- numextrabits_d = DISTANCEEXTRA[code_d];
- if (numextrabits_d != 0) {
- /* bits already ensured above */
- distance += readBits(reader, numextrabits_d);
- }
-
- /*part 5: fill in all the out[n] values based on the length and dist*/
- start = out->size;
- if (distance > start) ERROR_BREAK(52); /*too long backward distance*/
- backward = start - distance;
-
- if (!ucvector_resize(out, out->size + length)) ERROR_BREAK(83 /*alloc fail*/);
- if (distance < length) {
- size_t forward;
- lodepng_memcpy(out->data + start, out->data + backward, distance);
- start += distance;
- for (forward = distance; forward < length; ++forward) {
- out->data[start++] = out->data[backward++];
- }
- } else {
- lodepng_memcpy(out->data + start, out->data + backward, length);
- }
- } else if (code_ll == 256) {
- break; /*end code, break the loop*/
- } else /*if(code_ll == INVALIDSYMBOL)*/ {
- ERROR_BREAK(16); /*error: tried to read disallowed huffman symbol*/
- }
- /*check if any of the ensureBits above went out of bounds*/
- if (reader->bp > reader->bitsize) {
- /*return error code 10 or 11 depending on the situation that happened in huffmanDecodeSymbol
- (10=no endcode, 11=wrong jump outside of tree)*/
- /* TODO: revise error codes 10,11,50: the above comment is no longer valid */
- ERROR_BREAK(51); /*error, bit pointer jumps past memory*/
- }
- }
-
- HuffmanTree_cleanup(&tree_ll);
- HuffmanTree_cleanup(&tree_d);
-
- return error;
-}
-
-
-static unsigned inflateNoCompression(ucvector* out, LodePNGBitReader* reader, const LodePNGDecompressSettings* settings)
-{
- size_t bytepos;
- size_t size = reader->size;
- unsigned LEN, NLEN, error = 0;
-
- /*go to first boundary of byte*/
- bytepos = (reader->bp + 7u) >> 3u;
-
- /*read LEN (2 bytes) and NLEN (2 bytes)*/
- if (bytepos + 4 >= size) return 52; /*error, bit pointer will jump past memory*/
- LEN = (unsigned)reader->data[bytepos] + ((unsigned)reader->data[bytepos + 1] << 8u); bytepos += 2;
- NLEN = (unsigned)reader->data[bytepos] + ((unsigned)reader->data[bytepos + 1] << 8u); bytepos += 2;
-
- /*check if 16-bit NLEN is really the one's complement of LEN*/
- if (!settings->ignore_nlen && LEN + NLEN != 65535) {
- return 21; /*error: NLEN is not one's complement of LEN*/
- }
-
- if (!ucvector_resize(out, out->size + LEN)) return 83; /*alloc fail*/
-
- /*read the literal data: LEN bytes are now stored in the out buffer*/
- if (bytepos + LEN > size) return 23; /*error: reading outside of in buffer*/
-
- lodepng_memcpy(out->data + out->size - LEN, reader->data + bytepos, LEN);
- bytepos += LEN;
-
- reader->bp = bytepos << 3u;
-
- return error;
-}
-
-
-static unsigned lodepng_inflatev(ucvector* out, const unsigned char* in, size_t insize, const LodePNGDecompressSettings* settings)
-{
- unsigned BFINAL = 0;
- LodePNGBitReader reader;
- unsigned error = LodePNGBitReader_init(&reader, in, insize);
-
- if (error) return error;
-
- while (!BFINAL) {
- unsigned BTYPE;
- if (!ensureBits9(&reader, 3)) return 52; /*error, bit pointer will jump past memory*/
- BFINAL = readBits(&reader, 1);
- BTYPE = readBits(&reader, 2);
-
- if (BTYPE == 3) return 20; /*error: invalid BTYPE*/
- else if (BTYPE == 0) error = inflateNoCompression(out, &reader, settings); /*no compression*/
- else error = inflateHuffmanBlock(out, &reader, BTYPE); /*compression, BTYPE 01 or 10*/
-
- if (error) return error;
- }
-
- return error;
-}
-
-
-static unsigned inflatev(ucvector* out, const unsigned char* in, size_t insize, const LodePNGDecompressSettings* settings)
-{
- if (settings->custom_inflate) {
- unsigned error = settings->custom_inflate(&out->data, &out->size, in, insize, settings);
- out->allocsize = out->size;
- return error;
- } else {
- return lodepng_inflatev(out, in, insize, settings);
- }
-}
-
-
-/* ////////////////////////////////////////////////////////////////////////// */
-/* / Adler32 / */
-/* ////////////////////////////////////////////////////////////////////////// */
-
-static unsigned update_adler32(unsigned adler, const unsigned char* data, unsigned len)
-{
- unsigned s1 = adler & 0xffffu;
- unsigned s2 = (adler >> 16u) & 0xffffu;
-
- while (len != 0u) {
- unsigned i;
- /*at least 5552 sums can be done before the sums overflow, saving a lot of module divisions*/
- unsigned amount = len > 5552u ? 5552u : len;
- len -= amount;
- for (i = 0; i != amount; ++i) {
- s1 += (*data++);
- s2 += s1;
- }
- s1 %= 65521u;
- s2 %= 65521u;
- }
-
- return (s2 << 16u) | s1;
-}
-
-/*Return the adler32 of the bytes data[0..len-1]*/
-static unsigned adler32(const unsigned char* data, unsigned len)
-{
- return update_adler32(1u, data, len);
-}
-
-/* ////////////////////////////////////////////////////////////////////////// */
-/* / Zlib / */
-/* ////////////////////////////////////////////////////////////////////////// */
-
-static unsigned lodepng_zlib_decompressv(ucvector* out, const unsigned char* in, size_t insize, const LodePNGDecompressSettings* settings)
-{
- unsigned error = 0;
- unsigned CM, CINFO, FDICT;
-
- if (insize < 2) return 53; /*error, size of zlib data too small*/
- /*read information from zlib header*/
- if ((in[0] * 256 + in[1]) % 31 != 0) {
- /*error: 256 * in[0] + in[1] must be a multiple of 31, the FCHECK value is supposed to be made that way*/
- return 24;
- }
-
- CM = in[0] & 15;
- CINFO = (in[0] >> 4) & 15;
- /*FCHECK = in[1] & 31;*/ /*FCHECK is already tested above*/
- FDICT = (in[1] >> 5) & 1;
- /*FLEVEL = (in[1] >> 6) & 3;*/ /*FLEVEL is not used here*/
-
- if (CM != 8 || CINFO > 7) {
- /*error: only compression method 8: inflate with sliding window of 32k is supported by the PNG spec*/
- return 25;
- }
- if (FDICT != 0) {
- /*error: the specification of PNG says about the zlib stream:
- "The additional flags shall not specify a preset dictionary."*/
- return 26;
- }
-
- error = inflatev(out, in + 2, insize - 2, settings);
- if (error) return error;
-
- if (!settings->ignore_adler32) {
- unsigned ADLER32 = lodepng_read32bitInt(&in[insize - 4]);
- unsigned checksum = adler32(out->data, (unsigned)(out->size));
- if(checksum != ADLER32) return 58; /*error, adler checksum not correct, data must be corrupted*/
- }
-
- return 0; /*no error*/
-}
-
-
-/*expected_size is expected output size, to avoid intermediate allocations. Set to 0 if not known. */
-static unsigned zlib_decompress(unsigned char** out, size_t* outsize, size_t expected_size, const unsigned char* in, size_t insize, const LodePNGDecompressSettings* settings)
-{
- if(settings->custom_zlib) {
- return settings->custom_zlib(out, outsize, in, insize, settings);
- } else {
- unsigned error;
- ucvector v = ucvector_init(*out, *outsize);
- if (expected_size) {
- /*reserve the memory to avoid intermediate reallocations*/
- ucvector_resize(&v, *outsize + expected_size);
- v.size = *outsize;
- }
- error = lodepng_zlib_decompressv(&v, in, insize, settings);
- *out = v.data;
- *outsize = v.size;
- return error;
- }
-}
-
-
-static void lodepng_decompress_settings_init(LodePNGDecompressSettings* settings)
-{
- settings->ignore_adler32 = 0;
- settings->ignore_nlen = 0;
- settings->custom_zlib = 0;
- settings->custom_inflate = 0;
- settings->custom_context = 0;
-}
-
-
-/* ////////////////////////////////////////////////////////////////////////// */
-/* ////////////////////////////////////////////////////////////////////////// */
-/* // End of Zlib related code. Begin of PNG related code. // */
-/* ////////////////////////////////////////////////////////////////////////// */
-/* ////////////////////////////////////////////////////////////////////////// */
-
-
-#if 0 //thorvg don't use crc
-/* CRC polynomial: 0xedb88320 */
-static unsigned lodepng_crc32_table[256] = {
- 0u, 1996959894u, 3993919788u, 2567524794u, 124634137u, 1886057615u, 3915621685u, 2657392035u,
- 249268274u, 2044508324u, 3772115230u, 2547177864u, 162941995u, 2125561021u, 3887607047u, 2428444049u,
- 498536548u, 1789927666u, 4089016648u, 2227061214u, 450548861u, 1843258603u, 4107580753u, 2211677639u,
- 325883990u, 1684777152u, 4251122042u, 2321926636u, 335633487u, 1661365465u, 4195302755u, 2366115317u,
- 997073096u, 1281953886u, 3579855332u, 2724688242u, 1006888145u, 1258607687u, 3524101629u, 2768942443u,
- 901097722u, 1119000684u, 3686517206u, 2898065728u, 853044451u, 1172266101u, 3705015759u, 2882616665u,
- 651767980u, 1373503546u, 3369554304u, 3218104598u, 565507253u, 1454621731u, 3485111705u, 3099436303u,
- 671266974u, 1594198024u, 3322730930u, 2970347812u, 795835527u, 1483230225u, 3244367275u, 3060149565u,
- 1994146192u, 31158534u, 2563907772u, 4023717930u, 1907459465u, 112637215u, 2680153253u, 3904427059u,
- 2013776290u, 251722036u, 2517215374u, 3775830040u, 2137656763u, 141376813u, 2439277719u, 3865271297u,
- 1802195444u, 476864866u, 2238001368u, 4066508878u, 1812370925u, 453092731u, 2181625025u, 4111451223u,
- 1706088902u, 314042704u, 2344532202u, 4240017532u, 1658658271u, 366619977u, 2362670323u, 4224994405u,
- 1303535960u, 984961486u, 2747007092u, 3569037538u, 1256170817u, 1037604311u, 2765210733u, 3554079995u,
- 1131014506u, 879679996u, 2909243462u, 3663771856u, 1141124467u, 855842277u, 2852801631u, 3708648649u,
- 1342533948u, 654459306u, 3188396048u, 3373015174u, 1466479909u, 544179635u, 3110523913u, 3462522015u,
- 1591671054u, 702138776u, 2966460450u, 3352799412u, 1504918807u, 783551873u, 3082640443u, 3233442989u,
- 3988292384u, 2596254646u, 62317068u, 1957810842u, 3939845945u, 2647816111u, 81470997u, 1943803523u,
- 3814918930u, 2489596804u, 225274430u, 2053790376u, 3826175755u, 2466906013u, 167816743u, 2097651377u,
- 4027552580u, 2265490386u, 503444072u, 1762050814u, 4150417245u, 2154129355u, 426522225u, 1852507879u,
- 4275313526u, 2312317920u, 282753626u, 1742555852u, 4189708143u, 2394877945u, 397917763u, 1622183637u,
- 3604390888u, 2714866558u, 953729732u, 1340076626u, 3518719985u, 2797360999u, 1068828381u, 1219638859u,
- 3624741850u, 2936675148u, 906185462u, 1090812512u, 3747672003u, 2825379669u, 829329135u, 1181335161u,
- 3412177804u, 3160834842u, 628085408u, 1382605366u, 3423369109u, 3138078467u, 570562233u, 1426400815u,
- 3317316542u, 2998733608u, 733239954u, 1555261956u, 3268935591u, 3050360625u, 752459403u, 1541320221u,
- 2607071920u, 3965973030u, 1969922972u, 40735498u, 2617837225u, 3943577151u, 1913087877u, 83908371u,
- 2512341634u, 3803740692u, 2075208622u, 213261112u, 2463272603u, 3855990285u, 2094854071u, 198958881u,
- 2262029012u, 4057260610u, 1759359992u, 534414190u, 2176718541u, 4139329115u, 1873836001u, 414664567u,
- 2282248934u, 4279200368u, 1711684554u, 285281116u, 2405801727u, 4167216745u, 1634467795u, 376229701u,
- 2685067896u, 3608007406u, 1308918612u, 956543938u, 2808555105u, 3495958263u, 1231636301u, 1047427035u,
- 2932959818u, 3654703836u, 1088359270u, 936918000u, 2847714899u, 3736837829u, 1202900863u, 817233897u,
- 3183342108u, 3401237130u, 1404277552u, 615818150u, 3134207493u, 3453421203u, 1423857449u, 601450431u,
- 3009837614u, 3294710456u, 1567103746u, 711928724u, 3020668471u, 3272380065u, 1510334235u, 755167117u
-};
-
-
-/* Calculate CRC32 of buffer
- Return the CRC of the bytes buf[0..len-1]. */
-static unsigned lodepng_crc32(const unsigned char* data, size_t length)
-{
- unsigned r = 0xffffffffu;
- size_t i;
- for (i = 0; i < length; ++i) {
- r = lodepng_crc32_table[(r ^ data[i]) & 0xffu] ^ (r >> 8u);
- }
- return r ^ 0xffffffffu;
-}
-#endif
-
-/* ////////////////////////////////////////////////////////////////////////// */
-/* / Reading and writing PNG color channel bits / */
-/* ////////////////////////////////////////////////////////////////////////// */
-
-/* The color channel bits of less-than-8-bit pixels are read with the MSB of bytes first,
-so LodePNGBitWriter and LodePNGBitReader can't be used for those. */
-
-static unsigned char readBitFromReversedStream(size_t* bitpointer, const unsigned char* bitstream)
-{
- unsigned char result = (unsigned char)((bitstream[(*bitpointer) >> 3] >> (7 - ((*bitpointer) & 0x7))) & 1);
- ++(*bitpointer);
- return result;
-}
-
-
-/* TODO: make this faster */
-static unsigned readBitsFromReversedStream(size_t* bitpointer, const unsigned char* bitstream, size_t nbits)
-{
- unsigned result = 0;
- size_t i;
- for (i = 0 ; i < nbits; ++i) {
- result <<= 1u;
- result |= (unsigned)readBitFromReversedStream(bitpointer, bitstream);
- }
- return result;
-}
-
-
-static void setBitOfReversedStream(size_t* bitpointer, unsigned char* bitstream, unsigned char bit)
-{
- /*the current bit in bitstream may be 0 or 1 for this to work*/
- if (bit == 0) bitstream[(*bitpointer) >> 3u] &= (unsigned char)(~(1u << (7u - ((*bitpointer) & 7u))));
- else bitstream[(*bitpointer) >> 3u] |= (1u << (7u - ((*bitpointer) & 7u)));
- ++(*bitpointer);
-}
-
-/* ////////////////////////////////////////////////////////////////////////// */
-/* / PNG chunks / */
-/* ////////////////////////////////////////////////////////////////////////// */
-
-/*
- The lodepng_chunk functions are normally not needed, except to traverse the
- unknown chunks stored in the LodePNGInfo struct, or add new ones to it.
- It also allows traversing the chunks of an encoded PNG file yourself.
-
- The chunk pointer always points to the beginning of the chunk itself, that is
- the first byte of the 4 length bytes.
-
- In the PNG file format, chunks have the following format:
- -4 bytes length: length of the data of the chunk in bytes (chunk itself is 12 bytes longer)
- -4 bytes chunk type (ASCII a-z,A-Z only, see below)
- -length bytes of data (may be 0 bytes if length was 0)
- -4 bytes of CRC, computed on chunk name + data
-
- The first chunk starts at the 8th byte of the PNG file, the entire rest of the file
- exists out of concatenated chunks with the above format.
-
- PNG standard chunk ASCII naming conventions:
- -First byte: uppercase = critical, lowercase = ancillary
- -Second byte: uppercase = public, lowercase = private
- -Third byte: must be uppercase
- -Fourth byte: uppercase = unsafe to copy, lowercase = safe to copy
-*/
-
-
-/*
- Gets the length of the data of the chunk. Total chunk length has 12 bytes more.
- There must be at least 4 bytes to read from. If the result value is too large,
- it may be corrupt data.
-*/
-static unsigned lodepng_chunk_length(const unsigned char* chunk)
-{
- return lodepng_read32bitInt(&chunk[0]);
-}
-
-
-/* check if the type is the given type */
-static unsigned char lodepng_chunk_type_equals(const unsigned char* chunk, const char* type)
-{
- if (lodepng_strlen(type) != 4) return 0;
- return (chunk[4] == type[0] && chunk[5] == type[1] && chunk[6] == type[2] && chunk[7] == type[3]);
-}
-
-
-/* 0: it's one of the critical chunk types, 1: it's an ancillary chunk (see PNG standard) */
-static unsigned char lodepng_chunk_ancillary(const unsigned char* chunk)
-{
- return ((chunk[4] & 32) != 0);
-}
-
-
-static const unsigned char* lodepng_chunk_data_const(const unsigned char* chunk)
-{
- return &chunk[8];
-}
-
-#if 0 //thorvg don't use crc
-/* returns 0 if the crc is correct, 1 if it's incorrect (0 for OK as usual!) */
-static unsigned lodepng_chunk_check_crc(const unsigned char* chunk)
-{
- unsigned length = lodepng_chunk_length(chunk);
- unsigned CRC = lodepng_read32bitInt(&chunk[length + 8]);
- /*the CRC is taken of the data and the 4 chunk type letters, not the length*/
- unsigned checksum = lodepng_crc32(&chunk[4], length + 4);
- if (CRC != checksum) return 1;
- else return 0;
-}
-#endif
-
-static const unsigned char* lodepng_chunk_next_const(const unsigned char* chunk, const unsigned char* end)
-{
- if (chunk >= end || end - chunk < 12) return end; /*too small to contain a chunk*/
- if (chunk[0] == 0x89 && chunk[1] == 0x50 && chunk[2] == 0x4e && chunk[3] == 0x47
- && chunk[4] == 0x0d && chunk[5] == 0x0a && chunk[6] == 0x1a && chunk[7] == 0x0a) {
- /* Is PNG magic header at start of PNG file. Jump to first actual chunk. */
- return chunk + 8;
- } else {
- size_t total_chunk_length;
- const unsigned char* result;
- if (lodepng_addofl(lodepng_chunk_length(chunk), 12, &total_chunk_length)) return end;
- result = chunk + total_chunk_length;
- if (result < chunk) return end; /*pointer overflow*/
- return result;
- }
-}
-
-
-/* ////////////////////////////////////////////////////////////////////////// */
-/* / Color types, channels, bits / */
-/* ////////////////////////////////////////////////////////////////////////// */
-
-/*checks if the colortype is valid and the bitdepth bd is allowed for this colortype.
-Return value is a LodePNG error code.*/
-static unsigned checkColorValidity(LodePNGColorType colortype, unsigned bd)
-{
- switch(colortype) {
- case LCT_GREY: if(!(bd == 1 || bd == 2 || bd == 4 || bd == 8 || bd == 16)) return 37; break;
- case LCT_RGB: if(!( bd == 8 || bd == 16)) return 37; break;
- case LCT_PALETTE: if(!(bd == 1 || bd == 2 || bd == 4 || bd == 8 )) return 37; break;
- case LCT_GREY_ALPHA: if(!( bd == 8 || bd == 16)) return 37; break;
- case LCT_RGBA: if(!( bd == 8 || bd == 16)) return 37; break;
- case LCT_MAX_OCTET_VALUE: return 31; /* invalid color type */
- default: return 31; /* invalid color type */
- }
- return 0; /*allowed color type / bits combination*/
-}
-
-
-static unsigned getNumColorChannels(LodePNGColorType colortype)
-{
- switch(colortype) {
- case LCT_GREY: return 1;
- case LCT_RGB: return 3;
- case LCT_PALETTE: return 1;
- case LCT_GREY_ALPHA: return 2;
- case LCT_RGBA: return 4;
- case LCT_MAX_OCTET_VALUE: return 0; /* invalid color type */
- default: return 0; /*invalid color type*/
- }
-}
-
-
-static unsigned lodepng_get_bpp_lct(LodePNGColorType colortype, unsigned bitdepth)
-{
- /*bits per pixel is amount of channels * bits per channel*/
- return getNumColorChannels(colortype) * bitdepth;
-}
-
-
-static void lodepng_color_mode_init(LodePNGColorMode* info)
-{
- info->key_defined = 0;
- info->key_r = info->key_g = info->key_b = 0;
- info->colortype = LCT_RGBA;
- info->bitdepth = 8;
- info->palette = 0;
- info->palettesize = 0;
-}
-
-
-/*allocates palette memory if needed, and initializes all colors to black*/
-static void lodepng_color_mode_alloc_palette(LodePNGColorMode* info)
-{
- size_t i;
- /*if the palette is already allocated, it will have size 1024 so no reallocation needed in that case*/
- /*the palette must have room for up to 256 colors with 4 bytes each.*/
- if (!info->palette) info->palette = (unsigned char*)malloc(1024);
- if (!info->palette) return; /*alloc fail*/
- for (i = 0; i != 256; ++i) {
- /*Initialize all unused colors with black, the value used for invalid palette indices.
- This is an error according to the PNG spec, but common PNG decoders make it black instead.
- That makes color conversion slightly faster due to no error handling needed.*/
- info->palette[i * 4 + 0] = 0;
- info->palette[i * 4 + 1] = 0;
- info->palette[i * 4 + 2] = 0;
- info->palette[i * 4 + 3] = 255;
- }
-}
-
-static void lodepng_palette_clear(LodePNGColorMode* info)
-{
- if (info->palette) free(info->palette);
- info->palette = 0;
- info->palettesize = 0;
-}
-
-
-static void lodepng_color_mode_cleanup(LodePNGColorMode* info)
-{
- lodepng_palette_clear(info);
-}
-
-
-/*return value is error code (0 means no error)*/
-static unsigned lodepng_color_mode_copy(LodePNGColorMode* dest, const LodePNGColorMode* source)
-{
- lodepng_color_mode_cleanup(dest);
- lodepng_memcpy(dest, source, sizeof(LodePNGColorMode));
- if (source->palette) {
- dest->palette = (unsigned char*)malloc(1024);
- if (!dest->palette && source->palettesize) return 83; /*alloc fail*/
- lodepng_memcpy(dest->palette, source->palette, source->palettesize * 4);
- }
- return 0;
-}
-
-
-static int lodepng_color_mode_equal(const LodePNGColorMode* a, const LodePNGColorMode* b)
-{
- size_t i;
- if (a->colortype != b->colortype) return 0;
- if (a->bitdepth != b->bitdepth) return 0;
- if (a->key_defined != b->key_defined) return 0;
- if (a->key_defined) {
- if(a->key_r != b->key_r) return 0;
- if(a->key_g != b->key_g) return 0;
- if(a->key_b != b->key_b) return 0;
- }
- if (a->palettesize != b->palettesize) return 0;
- for (i = 0; i != a->palettesize * 4; ++i) {
- if (a->palette[i] != b->palette[i]) return 0;
- }
- return 1;
-}
-
-
-static size_t lodepng_get_raw_size_lct(unsigned w, unsigned h, LodePNGColorType colortype, unsigned bitdepth)
-{
- size_t bpp = lodepng_get_bpp_lct(colortype, bitdepth);
- size_t n = (size_t)w * (size_t)h;
- return ((n / 8u) * bpp) + ((n & 7u) * bpp + 7u) / 8u;
-}
-
-
-/* Returns the byte size of a raw image buffer with given width, height and color mode */
-static size_t lodepng_get_raw_size(unsigned w, unsigned h, const LodePNGColorMode* color)
-{
- return lodepng_get_raw_size_lct(w, h, color->colortype, color->bitdepth);
-}
-
-
-/*in an idat chunk, each scanline is a multiple of 8 bits, unlike the lodepng output buffer,
-and in addition has one extra byte per line: the filter byte. So this gives a larger
-result than lodepng_get_raw_size. Set h to 1 to get the size of 1 row including filter byte. */
-static size_t lodepng_get_raw_size_idat(unsigned w, unsigned h, unsigned bpp)
-{
- /* + 1 for the filter byte, and possibly plus padding bits per line. */
- /* Ignoring casts, the expression is equal to (w * bpp + 7) / 8 + 1, but avoids overflow of w * bpp */
- size_t line = ((size_t)(w / 8u) * bpp) + 1u + ((w & 7u) * bpp + 7u) / 8u;
- return (size_t)h * line;
-}
-
-
-/* Safely checks whether size_t overflow can be caused due to amount of pixels.
- This check is overcautious rather than precise. If this check indicates no overflow,
- you can safely compute in a size_t (but not an unsigned):
- -(size_t)w * (size_t)h * 8
- -amount of bytes in IDAT (including filter, padding and Adam7 bytes)
- -amount of bytes in raw color model
- Returns 1 if overflow possible, 0 if not. */
-static int lodepng_pixel_overflow(unsigned w, unsigned h, const LodePNGColorMode* pngcolor, const LodePNGColorMode* rawcolor)
-{
- size_t bpp = LODEPNG_MAX(lodepng_get_bpp_lct(pngcolor->colortype, pngcolor->bitdepth), lodepng_get_bpp_lct(rawcolor->colortype, rawcolor->bitdepth));
- size_t numpixels, total;
- size_t line; /* bytes per line in worst case */
-
- if (lodepng_mulofl((size_t)w, (size_t)h, &numpixels)) return 1;
- if (lodepng_mulofl(numpixels, 8, &total)) return 1; /* bit pointer with 8-bit color, or 8 bytes per channel color */
-
- /* Bytes per scanline with the expression "(w / 8u) * bpp) + ((w & 7u) * bpp + 7u) / 8u" */
- if (lodepng_mulofl((size_t)(w / 8u), bpp, &line)) return 1;
- if (lodepng_addofl(line, ((w & 7u) * bpp + 7u) / 8u, &line)) return 1;
-
- if (lodepng_addofl(line, 5, &line)) return 1; /* 5 bytes overhead per line: 1 filterbyte, 4 for Adam7 worst case */
- if (lodepng_mulofl(line, h, &total)) return 1; /* Total bytes in worst case */
-
- return 0; /* no overflow */
-}
-
-
-static void lodepng_info_init(LodePNGInfo* info)
-{
- lodepng_color_mode_init(&info->color);
- info->interlace_method = 0;
- info->compression_method = 0;
- info->filter_method = 0;
-}
-
-
-static void lodepng_info_cleanup(LodePNGInfo* info)
-{
- lodepng_color_mode_cleanup(&info->color);
-}
-
-
-/* index: bitgroup index, bits: bitgroup size(1, 2 or 4), in: bitgroup value, out: octet array to add bits to */
-static void addColorBits(unsigned char* out, size_t index, unsigned bits, unsigned in)
-{
- unsigned m = bits == 1 ? 7 : bits == 2 ? 3 : 1; /*8 / bits - 1*/
- /*p = the partial index in the byte, e.g. with 4 palettebits it is 0 for first half or 1 for second half*/
- unsigned p = index & m;
- in &= (1u << bits) - 1u; /*filter out any other bits of the input value*/
- in = in << (bits * (m - p));
- if(p == 0) out[index * bits / 8u] = in;
- else out[index * bits / 8u] |= in;
-}
-
-/*
- One node of a color tree
- This is the data structure used to count the number of unique colors and to get a palette
- index for a color. It's like an octree, but because the alpha channel is used too, each
- node has 16 instead of 8 children.
-*/
-struct ColorTree
-{
- ColorTree* children[16]; /* up to 16 pointers to ColorTree of next level */
- int index; /* the payload. Only has a meaningful value if this is in the last level */
-};
-
-static void color_tree_init(ColorTree* tree)
-{
- lodepng_memset(tree->children, 0, 16 * sizeof(*tree->children));
- tree->index = -1;
-}
-
-static void color_tree_cleanup(ColorTree* tree)
-{
- int i;
- for (i = 0; i != 16; ++i) {
- if(tree->children[i]) {
- color_tree_cleanup(tree->children[i]);
- free(tree->children[i]);
- }
- }
-}
-
-
-/* returns -1 if color not present, its index otherwise */
-static int color_tree_get(ColorTree* tree, unsigned char r, unsigned char g, unsigned char b, unsigned char a)
-{
- int bit = 0;
- for (bit = 0; bit < 8; ++bit) {
- int i = 8 * ((r >> bit) & 1) + 4 * ((g >> bit) & 1) + 2 * ((b >> bit) & 1) + 1 * ((a >> bit) & 1);
- if (!tree->children[i]) return -1;
- else tree = tree->children[i];
- }
- return tree ? tree->index : -1;
-}
-
-
-/* color is not allowed to already exist.
- Index should be >= 0 (it's signed to be compatible with using -1 for "doesn't exist")
- Returns error code, or 0 if ok */
-static unsigned color_tree_add(ColorTree* tree, unsigned char r, unsigned char g, unsigned char b, unsigned char a, unsigned index)
-{
- int bit;
- for (bit = 0; bit < 8; ++bit) {
- int i = 8 * ((r >> bit) & 1) + 4 * ((g >> bit) & 1) + 2 * ((b >> bit) & 1) + 1 * ((a >> bit) & 1);
- if (!tree->children[i]) {
- tree->children[i] = (ColorTree*)malloc(sizeof(ColorTree));
- if (!tree->children[i]) return 83; /*alloc fail*/
- color_tree_init(tree->children[i]);
- }
- tree = tree->children[i];
- }
- tree->index = (int)index;
- return 0;
-}
-
-/* put a pixel, given its RGBA color, into image of any color type */
-static unsigned rgba8ToPixel(unsigned char* out, size_t i, const LodePNGColorMode* mode, ColorTree* tree /*for palette*/, unsigned char r, unsigned char g, unsigned char b, unsigned char a)
-{
- if (mode->colortype == LCT_GREY) {
- unsigned char gray = r; /*((unsigned short)r + g + b) / 3u;*/
- if (mode->bitdepth == 8) out[i] = gray;
- else if (mode->bitdepth == 16) out[i * 2 + 0] = out[i * 2 + 1] = gray;
- else {
- /*take the most significant bits of gray*/
- gray = ((unsigned)gray >> (8u - mode->bitdepth)) & ((1u << mode->bitdepth) - 1u);
- addColorBits(out, i, mode->bitdepth, gray);
- }
- } else if (mode->colortype == LCT_RGB) {
- if (mode->bitdepth == 8) {
- out[i * 3 + 0] = r;
- out[i * 3 + 1] = g;
- out[i * 3 + 2] = b;
- } else {
- out[i * 6 + 0] = out[i * 6 + 1] = r;
- out[i * 6 + 2] = out[i * 6 + 3] = g;
- out[i * 6 + 4] = out[i * 6 + 5] = b;
- }
- } else if(mode->colortype == LCT_PALETTE) {
- int index = color_tree_get(tree, r, g, b, a);
- if (index < 0) return 82; /*color not in palette*/
- if (mode->bitdepth == 8) out[i] = index;
- else addColorBits(out, i, mode->bitdepth, (unsigned)index);
- } else if (mode->colortype == LCT_GREY_ALPHA) {
- unsigned char gray = r; /*((unsigned short)r + g + b) / 3u;*/
- if (mode->bitdepth == 8) {
- out[i * 2 + 0] = gray;
- out[i * 2 + 1] = a;
- } else if (mode->bitdepth == 16) {
- out[i * 4 + 0] = out[i * 4 + 1] = gray;
- out[i * 4 + 2] = out[i * 4 + 3] = a;
- }
- } else if (mode->colortype == LCT_RGBA) {
- if (mode->bitdepth == 8) {
- out[i * 4 + 0] = r;
- out[i * 4 + 1] = g;
- out[i * 4 + 2] = b;
- out[i * 4 + 3] = a;
- } else {
- out[i * 8 + 0] = out[i * 8 + 1] = r;
- out[i * 8 + 2] = out[i * 8 + 3] = g;
- out[i * 8 + 4] = out[i * 8 + 5] = b;
- out[i * 8 + 6] = out[i * 8 + 7] = a;
- }
- }
- return 0; /*no error*/
-}
-
-
-/* put a pixel, given its RGBA16 color, into image of any color 16-bitdepth type */
-static void rgba16ToPixel(unsigned char* out, size_t i, const LodePNGColorMode* mode, unsigned short r, unsigned short g, unsigned short b, unsigned short a)
-{
- if (mode->colortype == LCT_GREY) {
- unsigned short gray = r; /*((unsigned)r + g + b) / 3u;*/
- out[i * 2 + 0] = (gray >> 8) & 255;
- out[i * 2 + 1] = gray & 255;
- } else if (mode->colortype == LCT_RGB) {
- out[i * 6 + 0] = (r >> 8) & 255;
- out[i * 6 + 1] = r & 255;
- out[i * 6 + 2] = (g >> 8) & 255;
- out[i * 6 + 3] = g & 255;
- out[i * 6 + 4] = (b >> 8) & 255;
- out[i * 6 + 5] = b & 255;
- } else if (mode->colortype == LCT_GREY_ALPHA) {
- unsigned short gray = r; /*((unsigned)r + g + b) / 3u;*/
- out[i * 4 + 0] = (gray >> 8) & 255;
- out[i * 4 + 1] = gray & 255;
- out[i * 4 + 2] = (a >> 8) & 255;
- out[i * 4 + 3] = a & 255;
- } else if (mode->colortype == LCT_RGBA) {
- out[i * 8 + 0] = (r >> 8) & 255;
- out[i * 8 + 1] = r & 255;
- out[i * 8 + 2] = (g >> 8) & 255;
- out[i * 8 + 3] = g & 255;
- out[i * 8 + 4] = (b >> 8) & 255;
- out[i * 8 + 5] = b & 255;
- out[i * 8 + 6] = (a >> 8) & 255;
- out[i * 8 + 7] = a & 255;
- }
-}
-
-
-/* Get RGBA8 color of pixel with index i (y * width + x) from the raw image with given color type. */
-static void getPixelColorRGBA8(unsigned char* r, unsigned char* g, unsigned char* b, unsigned char* a, const unsigned char* in, size_t i, const LodePNGColorMode* mode)
-{
- if (mode->colortype == LCT_GREY) {
- if (mode->bitdepth == 8) {
- *r = *g = *b = in[i];
- if (mode->key_defined && *r == mode->key_r) *a = 0;
- else *a = 255;
- } else if (mode->bitdepth == 16) {
- *r = *g = *b = in[i * 2 + 0];
- if (mode->key_defined && 256U * in[i * 2 + 0] + in[i * 2 + 1] == mode->key_r) *a = 0;
- else *a = 255;
- } else {
- unsigned highest = ((1U << mode->bitdepth) - 1U); /* highest possible value for this bit depth */
- size_t j = i * mode->bitdepth;
- unsigned value = readBitsFromReversedStream(&j, in, mode->bitdepth);
- *r = *g = *b = (value * 255) / highest;
- if (mode->key_defined && value == mode->key_r) *a = 0;
- else *a = 255;
- }
- } else if (mode->colortype == LCT_RGB) {
- if (mode->bitdepth == 8) {
- *r = in[i * 3 + 0]; *g = in[i * 3 + 1]; *b = in[i * 3 + 2];
- if (mode->key_defined && *r == mode->key_r && *g == mode->key_g && *b == mode->key_b) *a = 0;
- else *a = 255;
- } else {
- *r = in[i * 6 + 0];
- *g = in[i * 6 + 2];
- *b = in[i * 6 + 4];
- if (mode->key_defined && 256U * in[i * 6 + 0] + in[i * 6 + 1] == mode->key_r
- && 256U * in[i * 6 + 2] + in[i * 6 + 3] == mode->key_g
- && 256U * in[i * 6 + 4] + in[i * 6 + 5] == mode->key_b) *a = 0;
- else *a = 255;
- }
- } else if (mode->colortype == LCT_PALETTE) {
- unsigned index;
- if (mode->bitdepth == 8) index = in[i];
- else {
- size_t j = i * mode->bitdepth;
- index = readBitsFromReversedStream(&j, in, mode->bitdepth);
- }
- /* out of bounds of palette not checked: see lodepng_color_mode_alloc_palette. */
- *r = mode->palette[index * 4 + 0];
- *g = mode->palette[index * 4 + 1];
- *b = mode->palette[index * 4 + 2];
- *a = mode->palette[index * 4 + 3];
- } else if (mode->colortype == LCT_GREY_ALPHA) {
- if (mode->bitdepth == 8) {
- *r = *g = *b = in[i * 2 + 0];
- *a = in[i * 2 + 1];
- } else {
- *r = *g = *b = in[i * 4 + 0];
- *a = in[i * 4 + 2];
- }
- } else if (mode->colortype == LCT_RGBA) {
- if (mode->bitdepth == 8) {
- *r = in[i * 4 + 0];
- *g = in[i * 4 + 1];
- *b = in[i * 4 + 2];
- *a = in[i * 4 + 3];
- } else {
- *r = in[i * 8 + 0];
- *g = in[i * 8 + 2];
- *b = in[i * 8 + 4];
- *a = in[i * 8 + 6];
- }
- }
-}
-
-
-/* Similar to getPixelColorRGBA8, but with all the for loops inside of the color
- mode test cases, optimized to convert the colors much faster, when converting
- to the common case of RGBA with 8 bit per channel. buffer must be RGBA with
- enough memory.*/
-static void getPixelColorsRGBA8(unsigned char* LODEPNG_RESTRICT buffer, size_t numpixels, const unsigned char* LODEPNG_RESTRICT in, const LodePNGColorMode* mode)
-{
- unsigned num_channels = 4;
- size_t i;
- if (mode->colortype == LCT_GREY) {
- if (mode->bitdepth == 8) {
- for (i = 0; i != numpixels; ++i, buffer += num_channels) {
- buffer[0] = buffer[1] = buffer[2] = in[i];
- buffer[3] = 255;
- }
- if (mode->key_defined) {
- buffer -= numpixels * num_channels;
- for (i = 0; i != numpixels; ++i, buffer += num_channels) {
- if(buffer[0] == mode->key_r) buffer[3] = 0;
- }
- }
- } else if (mode->bitdepth == 16) {
- for (i = 0; i != numpixels; ++i, buffer += num_channels) {
- buffer[0] = buffer[1] = buffer[2] = in[i * 2];
- buffer[3] = mode->key_defined && 256U * in[i * 2 + 0] + in[i * 2 + 1] == mode->key_r ? 0 : 255;
- }
- } else {
- unsigned highest = ((1U << mode->bitdepth) - 1U); /* highest possible value for this bit depth */
- size_t j = 0;
- for (i = 0; i != numpixels; ++i, buffer += num_channels) {
- unsigned value = readBitsFromReversedStream(&j, in, mode->bitdepth);
- buffer[0] = buffer[1] = buffer[2] = (value * 255) / highest;
- buffer[3] = mode->key_defined && value == mode->key_r ? 0 : 255;
- }
- }
- } else if (mode->colortype == LCT_RGB) {
- if (mode->bitdepth == 8) {
- for (i = 0; i != numpixels; ++i, buffer += num_channels) {
- //lodepng_memcpy(buffer, &in[i * 3], 3);
- //Convert colortype to LCT_BGR?
- buffer[0] = in[i * 3 + 2];
- buffer[1] = in[i * 3 + 1];
- buffer[2] = in[i * 3 + 0];
- buffer[3] = 255;
- }
- if (mode->key_defined) {
- buffer -= numpixels * num_channels;
- for (i = 0; i != numpixels; ++i, buffer += num_channels) {
- if (buffer[0] == mode->key_r && buffer[1]== mode->key_g && buffer[2] == mode->key_b) buffer[3] = 0;
- }
- }
- } else {
- for (i = 0; i != numpixels; ++i, buffer += num_channels) {
- buffer[0] = in[i * 6 + 0];
- buffer[1] = in[i * 6 + 2];
- buffer[2] = in[i * 6 + 4];
- buffer[3] = mode->key_defined
- && 256U * in[i * 6 + 0] + in[i * 6 + 1] == mode->key_r
- && 256U * in[i * 6 + 2] + in[i * 6 + 3] == mode->key_g
- && 256U * in[i * 6 + 4] + in[i * 6 + 5] == mode->key_b ? 0 : 255;
- }
- }
- } else if (mode->colortype == LCT_PALETTE) {
- if (mode->bitdepth == 8) {
- for (i = 0; i != numpixels; ++i, buffer += num_channels) {
- unsigned index = in[i];
- /* out of bounds of palette not checked: see lodepng_color_mode_alloc_palette. */
- lodepng_memcpy(buffer, &mode->palette[index * 4], 4);
- }
- } else {
- size_t j = 0;
- for (i = 0; i != numpixels; ++i, buffer += num_channels) {
- unsigned index = readBitsFromReversedStream(&j, in, mode->bitdepth);
- /* out of bounds of palette not checked: see lodepng_color_mode_alloc_palette. */
- lodepng_memcpy(buffer, &mode->palette[index * 4], 4);
- }
- }
- } else if (mode->colortype == LCT_GREY_ALPHA) {
- if (mode->bitdepth == 8) {
- for (i = 0; i != numpixels; ++i, buffer += num_channels) {
- buffer[0] = buffer[1] = buffer[2] = in[i * 2 + 0];
- buffer[3] = in[i * 2 + 1];
- }
- } else {
- for (i = 0; i != numpixels; ++i, buffer += num_channels) {
- buffer[0] = buffer[1] = buffer[2] = in[i * 4 + 0];
- buffer[3] = in[i * 4 + 2];
- }
- }
- } else if (mode->colortype == LCT_RGBA) {
- if (mode->bitdepth == 8) {
- lodepng_memcpy(buffer, in, numpixels * 4);
- } else {
- for (i = 0; i != numpixels; ++i, buffer += num_channels) {
- buffer[0] = in[i * 8 + 0];
- buffer[1] = in[i * 8 + 2];
- buffer[2] = in[i * 8 + 4];
- buffer[3] = in[i * 8 + 6];
- }
- }
- }
-}
-
-
-/* Similar to getPixelColorsRGBA8, but with 3-channel RGB output. */
-static void getPixelColorsRGB8(unsigned char* LODEPNG_RESTRICT buffer, size_t numpixels, const unsigned char* LODEPNG_RESTRICT in, const LodePNGColorMode* mode)
-{
- const unsigned num_channels = 3;
- size_t i;
- if (mode->colortype == LCT_GREY) {
- if (mode->bitdepth == 8) {
- for (i = 0; i != numpixels; ++i, buffer += num_channels) {
- buffer[0] = buffer[1] = buffer[2] = in[i];
- }
- } else if (mode->bitdepth == 16) {
- for (i = 0; i != numpixels; ++i, buffer += num_channels) {
- buffer[0] = buffer[1] = buffer[2] = in[i * 2];
- }
- } else {
- unsigned highest = ((1U << mode->bitdepth) - 1U); /* highest possible value for this bit depth */
- size_t j = 0;
- for (i = 0; i != numpixels; ++i, buffer += num_channels) {
- unsigned value = readBitsFromReversedStream(&j, in, mode->bitdepth);
- buffer[0] = buffer[1] = buffer[2] = (value * 255) / highest;
- }
- }
- } else if (mode->colortype == LCT_RGB) {
- if (mode->bitdepth == 8) {
- lodepng_memcpy(buffer, in, numpixels * 3);
- } else {
- for(i = 0; i != numpixels; ++i, buffer += num_channels) {
- buffer[0] = in[i * 6 + 0];
- buffer[1] = in[i * 6 + 2];
- buffer[2] = in[i * 6 + 4];
- }
- }
- } else if (mode->colortype == LCT_PALETTE) {
- if (mode->bitdepth == 8) {
- for (i = 0; i != numpixels; ++i, buffer += num_channels) {
- unsigned index = in[i];
- /* out of bounds of palette not checked: see lodepng_color_mode_alloc_palette. */
- lodepng_memcpy(buffer, &mode->palette[index * 4], 3);
- }
- } else {
- size_t j = 0;
- for (i = 0; i != numpixels; ++i, buffer += num_channels) {
- unsigned index = readBitsFromReversedStream(&j, in, mode->bitdepth);
- /* out of bounds of palette not checked: see lodepng_color_mode_alloc_palette. */
- lodepng_memcpy(buffer, &mode->palette[index * 4], 3);
- }
- }
- } else if (mode->colortype == LCT_GREY_ALPHA) {
- if (mode->bitdepth == 8) {
- for (i = 0; i != numpixels; ++i, buffer += num_channels) {
- buffer[0] = buffer[1] = buffer[2] = in[i * 2 + 0];
- }
- } else {
- for (i = 0; i != numpixels; ++i, buffer += num_channels) {
- buffer[0] = buffer[1] = buffer[2] = in[i * 4 + 0];
- }
- }
- } else if (mode->colortype == LCT_RGBA) {
- if (mode->bitdepth == 8) {
- for(i = 0; i != numpixels; ++i, buffer += num_channels) {
- lodepng_memcpy(buffer, &in[i * 4], 3);
- }
- } else {
- for (i = 0; i != numpixels; ++i, buffer += num_channels) {
- buffer[0] = in[i * 8 + 0];
- buffer[1] = in[i * 8 + 2];
- buffer[2] = in[i * 8 + 4];
- }
- }
- }
-}
-
-
-/* Get RGBA16 color of pixel with index i (y * width + x) from the raw image with
- given color type, but the given color type must be 16-bit itself. */
-static void getPixelColorRGBA16(unsigned short* r, unsigned short* g, unsigned short* b, unsigned short* a, const unsigned char* in, size_t i, const LodePNGColorMode* mode)
-{
- if (mode->colortype == LCT_GREY) {
- *r = *g = *b = 256 * in[i * 2 + 0] + in[i * 2 + 1];
- if (mode->key_defined && 256U * in[i * 2 + 0] + in[i * 2 + 1] == mode->key_r) *a = 0;
- else *a = 65535;
- } else if (mode->colortype == LCT_RGB) {
- *r = 256u * in[i * 6 + 0] + in[i * 6 + 1];
- *g = 256u * in[i * 6 + 2] + in[i * 6 + 3];
- *b = 256u * in[i * 6 + 4] + in[i * 6 + 5];
- if (mode->key_defined
- && 256u * in[i * 6 + 0] + in[i * 6 + 1] == mode->key_r
- && 256u * in[i * 6 + 2] + in[i * 6 + 3] == mode->key_g
- && 256u * in[i * 6 + 4] + in[i * 6 + 5] == mode->key_b) *a = 0;
- else *a = 65535;
- } else if (mode->colortype == LCT_GREY_ALPHA) {
- *r = *g = *b = 256u * in[i * 4 + 0] + in[i * 4 + 1];
- *a = 256u * in[i * 4 + 2] + in[i * 4 + 3];
- } else if (mode->colortype == LCT_RGBA) {
- *r = 256u * in[i * 8 + 0] + in[i * 8 + 1];
- *g = 256u * in[i * 8 + 2] + in[i * 8 + 3];
- *b = 256u * in[i * 8 + 4] + in[i * 8 + 5];
- *a = 256u * in[i * 8 + 6] + in[i * 8 + 7];
- }
-}
-
-/*
- Converts raw buffer from one color type to another color type, based on
- LodePNGColorMode structs to describe the input and output color type.
- See the reference manual at the end of this header file to see which color conversions are supported.
- return value = LodePNG error code (0 if all went ok, an error if the conversion isn't supported)
- The out buffer must have size (w * h * bpp + 7) / 8, where bpp is the bits per pixel
- of the output color type (lodepng_get_bpp).
- For < 8 bpp images, there should not be padding bits at the end of scanlines.
- For 16-bit per channel colors, uses big endian format like PNG does.
- Return value is LodePNG error code
-*/
-static unsigned lodepng_convert(unsigned char* out, const unsigned char* in, const LodePNGColorMode* mode_out, const LodePNGColorMode* mode_in, unsigned w, unsigned h)
-{
- size_t i;
- ColorTree tree;
- size_t numpixels = (size_t)w * (size_t)h;
- unsigned error = 0;
-
- if (mode_in->colortype == LCT_PALETTE && !mode_in->palette) {
- return 107; /* error: must provide palette if input mode is palette */
- }
-
- if (lodepng_color_mode_equal(mode_out, mode_in)) {
- size_t numbytes = lodepng_get_raw_size(w, h, mode_in);
- lodepng_memcpy(out, in, numbytes);
- return 0;
- }
-
- if (mode_out->colortype == LCT_PALETTE) {
- size_t palettesize = mode_out->palettesize;
- const unsigned char* palette = mode_out->palette;
- size_t palsize = (size_t)1u << mode_out->bitdepth;
- /* if the user specified output palette but did not give the values, assume
- they want the values of the input color type (assuming that one is palette).
- Note that we never create a new palette ourselves.*/
- if (palettesize == 0) {
- palettesize = mode_in->palettesize;
- palette = mode_in->palette;
- /* if the input was also palette with same bitdepth, then the color types are also
- equal, so copy literally. This to preserve the exact indices that were in the PNG
- even in case there are duplicate colors in the palette.*/
- if (mode_in->colortype == LCT_PALETTE && mode_in->bitdepth == mode_out->bitdepth) {
- size_t numbytes = lodepng_get_raw_size(w, h, mode_in);
- lodepng_memcpy(out, in, numbytes);
- return 0;
- }
- }
- if (palettesize < palsize) palsize = palettesize;
- color_tree_init(&tree);
- for (i = 0; i != palsize; ++i) {
- const unsigned char* p = &palette[i * 4];
- error = color_tree_add(&tree, p[0], p[1], p[2], p[3], (unsigned)i);
- if (error) break;
- }
- }
-
- if (!error) {
- if (mode_in->bitdepth == 16 && mode_out->bitdepth == 16) {
- for (i = 0; i != numpixels; ++i) {
- unsigned short r = 0, g = 0, b = 0, a = 0;
- getPixelColorRGBA16(&r, &g, &b, &a, in, i, mode_in);
- rgba16ToPixel(out, i, mode_out, r, g, b, a);
- }
- } else if (mode_out->bitdepth == 8 && mode_out->colortype == LCT_RGBA) {
- getPixelColorsRGBA8(out, numpixels, in, mode_in);
- } else if(mode_out->bitdepth == 8 && mode_out->colortype == LCT_RGB) {
- getPixelColorsRGB8(out, numpixels, in, mode_in);
- } else {
- unsigned char r = 0, g = 0, b = 0, a = 0;
- for (i = 0; i != numpixels; ++i) {
- getPixelColorRGBA8(&r, &g, &b, &a, in, i, mode_in);
- error = rgba8ToPixel(out, i, mode_out, &tree, r, g, b, a);
- if (error) break;
- }
- }
- }
-
- if (mode_out->colortype == LCT_PALETTE) {
- color_tree_cleanup(&tree);
- }
-
- return error;
-}
-
-
-/* Paeth predictor, used by PNG filter type 4
- The parameters are of type short, but should come from unsigned chars, the shorts
- are only needed to make the paeth calculation correct.
-*/
-static unsigned char paethPredictor(short a, short b, short c)
-{
- short pa = LODEPNG_ABS(b - c);
- short pb = LODEPNG_ABS(a - c);
- short pc = LODEPNG_ABS(a + b - c - c);
- /* return input value associated with smallest of pa, pb, pc (with certain priority if equal) */
- if (pb < pa) { a = b; pa = pb; }
- return (pc < pa) ? c : a;
-}
-
-
-/*shared values used by multiple Adam7 related functions*/
-static const unsigned ADAM7_IX[7] = { 0, 4, 0, 2, 0, 1, 0 }; /*x start values*/
-static const unsigned ADAM7_IY[7] = { 0, 0, 4, 0, 2, 0, 1 }; /*y start values*/
-static const unsigned ADAM7_DX[7] = { 8, 8, 4, 4, 2, 2, 1 }; /*x delta values*/
-static const unsigned ADAM7_DY[7] = { 8, 8, 8, 4, 4, 2, 2 }; /*y delta values*/
-
-/* Outputs various dimensions and positions in the image related to the Adam7 reduced images.
- passw: output containing the width of the 7 passes
- passh: output containing the height of the 7 passes
- filter_passstart: output containing the index of the start and end of each
- reduced image with filter bytes
- padded_passstart output containing the index of the start and end of each
- reduced image when without filter bytes but with padded scanlines
- passstart: output containing the index of the start and end of each reduced
- image without padding between scanlines, but still padding between the images
- w, h: width and height of non-interlaced image
- bpp: bits per pixel
- "padded" is only relevant if bpp is less than 8 and a scanline or image does not
- end at a full byte */
-static void Adam7_getpassvalues(unsigned passw[7], unsigned passh[7], size_t filter_passstart[8], size_t padded_passstart[8], size_t passstart[8], unsigned w, unsigned h, unsigned bpp)
-{
- /* the passstart values have 8 values: the 8th one indicates the byte after the end of the 7th (= last) pass */
- unsigned i;
-
- /* calculate width and height in pixels of each pass */
- for (i = 0; i != 7; ++i) {
- passw[i] = (w + ADAM7_DX[i] - ADAM7_IX[i] - 1) / ADAM7_DX[i];
- passh[i] = (h + ADAM7_DY[i] - ADAM7_IY[i] - 1) / ADAM7_DY[i];
- if(passw[i] == 0) passh[i] = 0;
- if(passh[i] == 0) passw[i] = 0;
- }
-
- filter_passstart[0] = padded_passstart[0] = passstart[0] = 0;
- for (i = 0; i != 7; ++i) {
- /* if passw[i] is 0, it's 0 bytes, not 1 (no filtertype-byte) */
- filter_passstart[i + 1] = filter_passstart[i]
- + ((passw[i] && passh[i]) ? passh[i] * (1u + (passw[i] * bpp + 7u) / 8u) : 0);
- /* bits padded if needed to fill full byte at end of each scanline */
- padded_passstart[i + 1] = padded_passstart[i] + passh[i] * ((passw[i] * bpp + 7u) / 8u);
- /* only padded at end of reduced image */
- passstart[i + 1] = passstart[i] + (passh[i] * passw[i] * bpp + 7u) / 8u;
- }
-}
-
-
-/* ////////////////////////////////////////////////////////////////////////// */
-/* / PNG Decoder / */
-/* ////////////////////////////////////////////////////////////////////////// */
-
-static unsigned unfilterScanline(unsigned char* recon, const unsigned char* scanline, const unsigned char* precon, size_t bytewidth, unsigned char filterType, size_t length)
-{
- /* For PNG filter method 0
- unfilter a PNG image scanline by scanline. when the pixels are smaller than 1 byte,
- the filter works byte per byte (bytewidth = 1)
- precon is the previous unfiltered scanline, recon the result, scanline the current one
- the incoming scanlines do NOT include the filtertype byte, that one is given in the parameter filterType instead
- recon and scanline MAY be the same memory address! precon must be disjoint. */
-
- size_t i;
- switch (filterType) {
- case 0: {
- if (bytewidth == 4) {
- for (i = 0; i < length; i += 4) {
- //RGBA -> BGRA
- recon[i + 0] = scanline[i + 2];
- recon[i + 1] = scanline[i + 1];
- recon[i + 2] = scanline[i + 0];
- recon[i + 3] = scanline[i + 3];
- }
- } else {
- for (i = 0; i != length; ++i) recon[i] = scanline[i];
- }
- break;
- }
- case 1: {
- for (i = 0; i != bytewidth; ++i) recon[i] = scanline[i];
- for (i = bytewidth; i < length; ++i) recon[i] = scanline[i] + recon[i - bytewidth];
- break;
- }
- case 2: {
- if (precon) {
- for (i = 0; i != length; ++i) recon[i] = scanline[i] + precon[i];
- } else {
- for (i = 0; i != length; ++i) recon[i] = scanline[i];
- }
- break;
- }
- case 3: {
- if (precon) {
- for (i = 0; i != bytewidth; ++i) recon[i] = scanline[i] + (precon[i] >> 1u);
- for (i = bytewidth; i < length; ++i) recon[i] = scanline[i] + ((recon[i - bytewidth] + precon[i]) >> 1u);
- } else {
- for (i = 0; i != bytewidth; ++i) recon[i] = scanline[i];
- for (i = bytewidth; i < length; ++i) recon[i] = scanline[i] + (recon[i - bytewidth] >> 1u);
- }
- break;
- }
- case 4: {
- if (precon) {
- for (i = 0; i != bytewidth; ++i) {
- recon[i] = (scanline[i] + precon[i]); /*paethPredictor(0, precon[i], 0) is always precon[i]*/
- }
-
- /* Unroll independent paths of the paeth predictor. A 6x and 8x version would also be possible but that
- adds too much code. Whether this actually speeds anything up at all depends on compiler and settings. */
- if (bytewidth >= 4) {
- for (; i + 3 < length; i += 4) {
- size_t j = i - bytewidth;
- unsigned char s0 = scanline[i + 0], s1 = scanline[i + 1], s2 = scanline[i + 2], s3 = scanline[i + 3];
- unsigned char r0 = recon[j + 0], r1 = recon[j + 1], r2 = recon[j + 2], r3 = recon[j + 3];
- unsigned char p0 = precon[i + 0], p1 = precon[i + 1], p2 = precon[i + 2], p3 = precon[i + 3];
- unsigned char q0 = precon[j + 0], q1 = precon[j + 1], q2 = precon[j + 2], q3 = precon[j + 3];
- recon[i + 0] = s0 + paethPredictor(r0, p0, q0);
- recon[i + 1] = s1 + paethPredictor(r1, p1, q1);
- recon[i + 2] = s2 + paethPredictor(r2, p2, q2);
- recon[i + 3] = s3 + paethPredictor(r3, p3, q3);
- }
- } else if (bytewidth >= 3) {
- for (; i + 2 < length; i += 3) {
- size_t j = i - bytewidth;
- unsigned char s0 = scanline[i + 0], s1 = scanline[i + 1], s2 = scanline[i + 2];
- unsigned char r0 = recon[j + 0], r1 = recon[j + 1], r2 = recon[j + 2];
- unsigned char p0 = precon[i + 0], p1 = precon[i + 1], p2 = precon[i + 2];
- unsigned char q0 = precon[j + 0], q1 = precon[j + 1], q2 = precon[j + 2];
- recon[i + 0] = s0 + paethPredictor(r0, p0, q0);
- recon[i + 1] = s1 + paethPredictor(r1, p1, q1);
- recon[i + 2] = s2 + paethPredictor(r2, p2, q2);
- }
- } else if (bytewidth >= 2) {
- for (; i + 1 < length; i += 2) {
- size_t j = i - bytewidth;
- unsigned char s0 = scanline[i + 0], s1 = scanline[i + 1];
- unsigned char r0 = recon[j + 0], r1 = recon[j + 1];
- unsigned char p0 = precon[i + 0], p1 = precon[i + 1];
- unsigned char q0 = precon[j + 0], q1 = precon[j + 1];
- recon[i + 0] = s0 + paethPredictor(r0, p0, q0);
- recon[i + 1] = s1 + paethPredictor(r1, p1, q1);
- }
- }
-
- for (; i != length; ++i) {
- recon[i] = (scanline[i] + paethPredictor(recon[i - bytewidth], precon[i], precon[i - bytewidth]));
- }
- } else {
- for (i = 0; i != bytewidth; ++i) {
- recon[i] = scanline[i];
- }
- for (i = bytewidth; i < length; ++i) {
- /* paethPredictor(recon[i - bytewidth], 0, 0) is always recon[i - bytewidth] */
- recon[i] = (scanline[i] + recon[i - bytewidth]);
- }
- }
- break;
- }
- default: return 36; /* error: invalid filter type given */
- }
- return 0;
-}
-
-
-static unsigned unfilter(unsigned char* out, const unsigned char* in, unsigned w, unsigned h, unsigned bpp)
-{
- /* For PNG filter method 0
- this function unfilters a single image (e.g. without interlacing this is called once, with Adam7 seven times)
- out must have enough bytes allocated already, in must have the scanlines + 1 filtertype byte per scanline
- w and h are image dimensions or dimensions of reduced image, bpp is bits per pixel
- in and out are allowed to be the same memory address (but aren't the same size since in has the extra filter bytes) */
-
- unsigned y;
- unsigned char* prevline = 0;
-
- /* bytewidth is used for filtering, is 1 when bpp < 8, number of bytes per pixel otherwise */
- size_t bytewidth = (bpp + 7u) / 8u;
- /* the width of a scanline in bytes, not including the filter type */
- size_t linebytes = lodepng_get_raw_size_idat(w, 1, bpp) - 1u;
-
- for (y = 0; y < h; ++y) {
- size_t outindex = linebytes * y;
- size_t inindex = (1 + linebytes) * y; /* the extra filterbyte added to each row */
- unsigned char filterType = in[inindex];
- CERROR_TRY_RETURN(unfilterScanline(&out[outindex], &in[inindex + 1], prevline, bytewidth, filterType, linebytes));
- prevline = &out[outindex];
- }
-
- return 0;
-}
-
-/* in: Adam7 interlaced image, with no padding bits between scanlines, but between
- reduced images so that each reduced image starts at a byte.
- out: the same pixels, but re-ordered so that they're now a non-interlaced image with size w*h
- bpp: bits per pixel
- out has the following size in bits: w * h * bpp.
- in is possibly bigger due to padding bits between reduced images.
- out must be big enough AND must be 0 everywhere if bpp < 8 in the current implementation
- (because that's likely a little bit faster)
- NOTE: comments about padding bits are only relevant if bpp < 8 */
-static void Adam7_deinterlace(unsigned char* out, const unsigned char* in, unsigned w, unsigned h, unsigned bpp)
-{
- unsigned passw[7], passh[7];
- size_t filter_passstart[8], padded_passstart[8], passstart[8];
- unsigned i;
-
- Adam7_getpassvalues(passw, passh, filter_passstart, padded_passstart, passstart, w, h, bpp);
-
- if (bpp >= 8) {
- for(i = 0; i != 7; ++i) {
- unsigned x, y, b;
- size_t bytewidth = bpp / 8u;
- for (y = 0; y < passh[i]; ++y)
- for (x = 0; x < passw[i]; ++x) {
- size_t pixelinstart = passstart[i] + (y * passw[i] + x) * bytewidth;
- size_t pixeloutstart = ((ADAM7_IY[i] + (size_t)y * ADAM7_DY[i]) * (size_t)w + ADAM7_IX[i] + (size_t)x * ADAM7_DX[i]) * bytewidth;
- for (b = 0; b < bytewidth; ++b) {
- out[pixeloutstart + b] = in[pixelinstart + b];
- }
- }
- }
- } else /* bpp < 8: Adam7 with pixels < 8 bit is a bit trickier: with bit pointers */ {
- for (i = 0; i != 7; ++i) {
- unsigned x, y, b;
- unsigned ilinebits = bpp * passw[i];
- unsigned olinebits = bpp * w;
- size_t obp, ibp; /* bit pointers (for out and in buffer) */
- for (y = 0; y < passh[i]; ++y)
- for (x = 0; x < passw[i]; ++x) {
- ibp = (8 * passstart[i]) + (y * ilinebits + x * bpp);
- obp = (ADAM7_IY[i] + (size_t)y * ADAM7_DY[i]) * olinebits + (ADAM7_IX[i] + (size_t)x * ADAM7_DX[i]) * bpp;
- for (b = 0; b < bpp; ++b) {
- unsigned char bit = readBitFromReversedStream(&ibp, in);
- setBitOfReversedStream(&obp, out, bit);
- }
- }
- }
- }
-}
-
-
-static void removePaddingBits(unsigned char* out, const unsigned char* in, size_t olinebits, size_t ilinebits, unsigned h)
-{
- /* After filtering there are still padding bits if scanlines have non multiple of 8 bit amounts. They need
- to be removed (except at last scanline of (Adam7-reduced) image) before working with pure image buffers
- for the Adam7 code, the color convert code and the output to the user.
- in and out are allowed to be the same buffer, in may also be higher but still overlapping; in must
- have >= ilinebits*h bits, out must have >= olinebits*h bits, olinebits must be <= ilinebits
- also used to move bits after earlier such operations happened, e.g. in a sequence of reduced images from Adam7
- only useful if (ilinebits - olinebits) is a value in the range 1..7 */
- unsigned y;
- size_t diff = ilinebits - olinebits;
- size_t ibp = 0, obp = 0; /*input and output bit pointers*/
- for (y = 0; y < h; ++y) {
- size_t x;
- for (x = 0; x < olinebits; ++x) {
- unsigned char bit = readBitFromReversedStream(&ibp, in);
- setBitOfReversedStream(&obp, out, bit);
- }
- ibp += diff;
- }
-}
-
-
-/* out must be buffer big enough to contain full image, and in must contain the full decompressed data from
- the IDAT chunks (with filter index bytes and possible padding bits)
- return value is error */
-static unsigned postProcessScanlines(unsigned char* out, unsigned char* in, unsigned w, unsigned h, const LodePNGInfo* info_png)
-{
- /* This function converts the filtered-padded-interlaced data into pure 2D image buffer with the PNG's colortype.
- Steps:
- *) if no Adam7: 1) unfilter 2) remove padding bits (= possible extra bits per scanline if bpp < 8)
- *) if adam7: 1) 7x unfilter 2) 7x remove padding bits 3) Adam7_deinterlace
- NOTE: the in buffer will be overwritten with intermediate data! */
- unsigned bpp = lodepng_get_bpp_lct(info_png->color.colortype, info_png->color.bitdepth);
- if (bpp == 0) return 31; /* error: invalid colortype */
-
- if (info_png->interlace_method == 0) {
- if (bpp < 8 && w * bpp != ((w * bpp + 7u) / 8u) * 8u) {
- CERROR_TRY_RETURN(unfilter(in, in, w, h, bpp));
- removePaddingBits(out, in, w * bpp, ((w * bpp + 7u) / 8u) * 8u, h);
- }
- /* we can immediately filter into the out buffer, no other steps needed */
- else CERROR_TRY_RETURN(unfilter(out, in, w, h, bpp));
- } else /* interlace_method is 1 (Adam7) */ {
- unsigned passw[7], passh[7]; size_t filter_passstart[8], padded_passstart[8], passstart[8];
- unsigned i;
-
- Adam7_getpassvalues(passw, passh, filter_passstart, padded_passstart, passstart, w, h, bpp);
-
- for (i = 0; i != 7; ++i) {
- CERROR_TRY_RETURN(unfilter(&in[padded_passstart[i]], &in[filter_passstart[i]], passw[i], passh[i], bpp));
- /* TODO: possible efficiency improvement: if in this reduced image the bits fit nicely in 1 scanline,
- move bytes instead of bits or move not at all */
- if (bpp < 8) {
- /* remove padding bits in scanlines; after this there still may be padding
- bits between the different reduced images: each reduced image still starts nicely at a byte */
- removePaddingBits(&in[passstart[i]], &in[padded_passstart[i]], passw[i] * bpp, ((passw[i] * bpp + 7u) / 8u) * 8u, passh[i]);
- }
- }
- Adam7_deinterlace(out, in, w, h, bpp);
- }
- return 0;
-}
-
-
-static unsigned readChunk_PLTE(LodePNGColorMode* color, const unsigned char* data, size_t chunkLength)
-{
- unsigned pos = 0, i;
- color->palettesize = chunkLength / 3u;
- if (color->palettesize == 0 || color->palettesize > 256) return 38; /* error: palette too small or big */
- lodepng_color_mode_alloc_palette(color);
- if (!color->palette && color->palettesize) {
- color->palettesize = 0;
- return 83; /* alloc fail */
- }
-
- for (i = 0; i != color->palettesize; ++i) {
- color->palette[4 * i + 0] = data[pos++]; /*R*/
- color->palette[4 * i + 1] = data[pos++]; /*G*/
- color->palette[4 * i + 2] = data[pos++]; /*B*/
- color->palette[4 * i + 3] = 255; /*alpha*/
- }
-
- return 0; /* OK */
-}
-
-
-static unsigned readChunk_tRNS(LodePNGColorMode* color, const unsigned char* data, size_t chunkLength)
-{
- unsigned i;
- if (color->colortype == LCT_PALETTE) {
- /* error: more alpha values given than there are palette entries */
- if (chunkLength > color->palettesize) return 39;
-
- for (i = 0; i != chunkLength; ++i) color->palette[4 * i + 3] = data[i];
- } else if (color->colortype == LCT_GREY) {
- /* error: this chunk must be 2 bytes for grayscale image */
- if (chunkLength != 2) return 30;
-
- color->key_defined = 1;
- color->key_r = color->key_g = color->key_b = 256u * data[0] + data[1];
- } else if (color->colortype == LCT_RGB) {
- /* error: this chunk must be 6 bytes for RGB image */
- if (chunkLength != 6) return 41;
-
- color->key_defined = 1;
- color->key_r = 256u * data[0] + data[1];
- color->key_g = 256u * data[2] + data[3];
- color->key_b = 256u * data[4] + data[5];
- }
- else return 42; /* error: tRNS chunk not allowed for other color models */
-
- return 0; /* OK */
-}
-
-
-/* read a PNG, the result will be in the same color type as the PNG (hence "generic") */
-static void decodeGeneric(unsigned char** out, unsigned* w, unsigned* h, LodePNGState* state, const unsigned char* in, size_t insize)
-{
- unsigned char IEND = 0;
- const unsigned char* chunk;
- unsigned char* idat; /*the data from idat chunks, zlib compressed*/
- size_t idatsize = 0;
- unsigned char* scanlines = 0;
- size_t scanlines_size = 0, expected_size = 0;
- size_t outsize = 0;
-
- /* safe output values in case error happens */
- *out = 0;
- *w = *h = 0;
-
- state->error = lodepng_inspect(w, h, state, in, insize); /*reads header and resets other parameters in state->info_png*/
- if (state->error) return;
-
- if (lodepng_pixel_overflow(*w, *h, &state->info_png.color, &state->info_raw)) {
- CERROR_RETURN(state->error, 92); /*overflow possible due to amount of pixels*/
- }
-
- /*the input filesize is a safe upper bound for the sum of idat chunks size*/
- idat = (unsigned char*)malloc(insize);
- if (!idat) CERROR_RETURN(state->error, 83); /*alloc fail*/
-
- chunk = &in[33]; /*first byte of the first chunk after the header*/
-
- /*loop through the chunks, ignoring unknown chunks and stopping at IEND chunk.
- IDAT data is put at the start of the in buffer*/
- while (!IEND && !state->error) {
- unsigned chunkLength;
- const unsigned char* data; /*the data in the chunk*/
-
- /*error: size of the in buffer too small to contain next chunk*/
- if ((size_t)((chunk - in) + 12) > insize || chunk < in) {
- if (state->decoder.ignore_end) break; /*other errors may still happen though*/
- CERROR_BREAK(state->error, 30);
- }
-
- /*length of the data of the chunk, excluding the length bytes, chunk type and CRC bytes*/
- chunkLength = lodepng_chunk_length(chunk);
- /*error: chunk length larger than the max PNG chunk size*/
- if (chunkLength > 2147483647) {
- if (state->decoder.ignore_end) break; /*other errors may still happen though*/
- CERROR_BREAK(state->error, 63);
- }
-
- if ((size_t)((chunk - in) + chunkLength + 12) > insize || (chunk + chunkLength + 12) < in) {
- CERROR_BREAK(state->error, 64); /*error: size of the in buffer too small to contain next chunk*/
- }
-
- data = lodepng_chunk_data_const(chunk);
-
- /*for unknown chunk order*/
- //unsigned unknown = 0;
-
- /*IDAT chunk, containing compressed image data*/
- if (lodepng_chunk_type_equals(chunk, "IDAT")) {
- size_t newsize;
- if (lodepng_addofl(idatsize, chunkLength, &newsize)) CERROR_BREAK(state->error, 95);
- if (newsize > insize) CERROR_BREAK(state->error, 95);
- lodepng_memcpy(idat + idatsize, data, chunkLength);
- idatsize += chunkLength;
- } else if (lodepng_chunk_type_equals(chunk, "IEND")) {
- /*IEND chunk*/
- IEND = 1;
- } else if (lodepng_chunk_type_equals(chunk, "PLTE")) {
- /*palette chunk (PLTE)*/
- state->error = readChunk_PLTE(&state->info_png.color, data, chunkLength);
- if (state->error) break;
- } else if (lodepng_chunk_type_equals(chunk, "tRNS")) {
- /*palette transparency chunk (tRNS). Even though this one is an ancillary chunk , it is still compiled
- in without 'LODEPNG_COMPILE_ANCILLARY_CHUNKS' because it contains essential color information that
- affects the alpha channel of pixels. */
- state->error = readChunk_tRNS(&state->info_png.color, data, chunkLength);
- if (state->error) break;
- } else /*it's not an implemented chunk type, so ignore it: skip over the data*/ {
- /*error: unknown critical chunk (5th bit of first byte of chunk type is 0)*/
- if (!state->decoder.ignore_critical && !lodepng_chunk_ancillary(chunk)) {
- CERROR_BREAK(state->error, 69);
- }
- //unknown = 1;
- }
-
-#if 0 //We don't use CRC
- if (!state->decoder.ignore_crc && !unknown) /*check CRC if wanted, only on known chunk types*/ {
- if (lodepng_chunk_check_crc(chunk)) CERROR_BREAK(state->error, 57); /*invalid CRC*/
- }
-#endif
- if (!IEND) chunk = lodepng_chunk_next_const(chunk, in + insize);
- }
-
- if (state->info_png.color.colortype == LCT_PALETTE && !state->info_png.color.palette) {
- state->error = 106; /* error: PNG file must have PLTE chunk if color type is palette */
- }
-
- if (!state->error) {
- /*predict output size, to allocate exact size for output buffer to avoid more dynamic allocation.
- If the decompressed size does not match the prediction, the image must be corrupt.*/
- if (state->info_png.interlace_method == 0) {
- size_t bpp = lodepng_get_bpp_lct(state->info_png.color.colortype, state->info_png.color.bitdepth);
- expected_size = lodepng_get_raw_size_idat(*w, *h, bpp);
- } else {
- size_t bpp = lodepng_get_bpp_lct(state->info_png.color.colortype, state->info_png.color.bitdepth);
- /*Adam-7 interlaced: expected size is the sum of the 7 sub-images sizes*/
- expected_size = 0;
- expected_size += lodepng_get_raw_size_idat((*w + 7) >> 3, (*h + 7) >> 3, bpp);
- if (*w > 4) expected_size += lodepng_get_raw_size_idat((*w + 3) >> 3, (*h + 7) >> 3, bpp);
- expected_size += lodepng_get_raw_size_idat((*w + 3) >> 2, (*h + 3) >> 3, bpp);
- if (*w > 2) expected_size += lodepng_get_raw_size_idat((*w + 1) >> 2, (*h + 3) >> 2, bpp);
- expected_size += lodepng_get_raw_size_idat((*w + 1) >> 1, (*h + 1) >> 2, bpp);
- if (*w > 1) expected_size += lodepng_get_raw_size_idat((*w + 0) >> 1, (*h + 1) >> 1, bpp);
- expected_size += lodepng_get_raw_size_idat((*w + 0), (*h + 0) >> 1, bpp);
- }
- state->error = zlib_decompress(&scanlines, &scanlines_size, expected_size, idat, idatsize, &state->decoder.zlibsettings);
- }
-
- if (!state->error && scanlines_size != expected_size) state->error = 91; /*decompressed size doesn't match prediction*/
- free(idat);
-
- if (!state->error) {
- outsize = lodepng_get_raw_size(*w, *h, &state->info_png.color);
- *out = (unsigned char*)malloc(outsize);
- if (!*out) state->error = 83; /*alloc fail*/
- }
- if (!state->error) {
- lodepng_memset(*out, 0, outsize);
- state->error = postProcessScanlines(*out, scanlines, *w, *h, &state->info_png);
- }
- free(scanlines);
-}
-
-
-static void lodepng_decoder_settings_init(LodePNGDecoderSettings* settings)
-{
- settings->color_convert = 1;
- settings->ignore_crc = 0;
- settings->ignore_critical = 0;
- settings->ignore_end = 0;
- lodepng_decompress_settings_init(&settings->zlibsettings);
-}
-
-
-/************************************************************************/
-/* External Class Implementation */
-/************************************************************************/
-
-/*read the information from the header and store it in the LodePNGInfo. return value is error*/
-unsigned lodepng_inspect(unsigned* w, unsigned* h, LodePNGState* state, const unsigned char* in, size_t insize)
-{
- unsigned width, height;
- LodePNGInfo* info = &state->info_png;
- if (insize == 0 || in == 0) {
- CERROR_RETURN_ERROR(state->error, 48); /*error: the given data is empty*/
- }
- if (insize < 33) {
- CERROR_RETURN_ERROR(state->error, 27); /*error: the data length is smaller than the length of a PNG header*/
- }
-
- /* when decoding a new PNG image, make sure all parameters created after previous decoding are reset */
- /* TODO: remove this. One should use a new LodePNGState for new sessions */
- lodepng_info_cleanup(info);
- lodepng_info_init(info);
-
- if (in[0] != 137 || in[1] != 80 || in[2] != 78 || in[3] != 71 || in[4] != 13 || in[5] != 10 || in[6] != 26 || in[7] != 10) {
- CERROR_RETURN_ERROR(state->error, 28); /*error: the first 8 bytes are not the correct PNG signature*/
- }
- if (lodepng_chunk_length(in + 8) != 13) {
- CERROR_RETURN_ERROR(state->error, 94); /*error: header size must be 13 bytes*/
- }
- if (!lodepng_chunk_type_equals(in + 8, "IHDR")) {
- CERROR_RETURN_ERROR(state->error, 29); /*error: it doesn't start with a IHDR chunk!*/
- }
-
- /*read the values given in the header*/
- width = lodepng_read32bitInt(&in[16]);
- height = lodepng_read32bitInt(&in[20]);
- /*TODO: remove the undocumented feature that allows to give null pointers to width or height*/
- if (w) *w = width;
- if (h) *h = height;
- info->color.bitdepth = in[24];
- info->color.colortype = (LodePNGColorType)in[25];
- info->compression_method = in[26];
- info->filter_method = in[27];
- info->interlace_method = in[28];
-
- /*errors returned only after the parsing so other values are still output*/
-
- /*error: invalid image size*/
- if (width == 0 || height == 0) CERROR_RETURN_ERROR(state->error, 93);
- /*error: invalid colortype or bitdepth combination*/
- state->error = checkColorValidity(info->color.colortype, info->color.bitdepth);
- if (state->error) return state->error;
- /*error: only compression method 0 is allowed in the specification*/
- if (info->compression_method != 0) CERROR_RETURN_ERROR(state->error, 32);
- /*error: only filter method 0 is allowed in the specification*/
- if (info->filter_method != 0) CERROR_RETURN_ERROR(state->error, 33);
- /*error: only interlace methods 0 and 1 exist in the specification*/
- if (info->interlace_method > 1) CERROR_RETURN_ERROR(state->error, 34);
-
-#if 0 //thorvg don't use crc
- if (!state->decoder.ignore_crc) {
- unsigned CRC = lodepng_read32bitInt(&in[29]);
- unsigned checksum = lodepng_crc32(&in[12], 17);
- if (CRC != checksum) {
- CERROR_RETURN_ERROR(state->error, 57); /*invalid CRC*/
- }
- }
-#endif
- return state->error;
-}
-
-
-unsigned lodepng_decode(unsigned char** out, unsigned* w, unsigned* h, LodePNGState* state, const unsigned char* in, size_t insize)
-{
- *out = 0;
- decodeGeneric(out, w, h, state, in, insize);
- if (state->error) return state->error;
- if (!state->decoder.color_convert || lodepng_color_mode_equal(&state->info_raw, &state->info_png.color)) {
- /*same color type, no copying or converting of data needed*/
- /*store the info_png color settings on the info_raw so that the info_raw still reflects what colortype
- the raw image has to the end user*/
- if (!state->decoder.color_convert) {
- state->error = lodepng_color_mode_copy(&state->info_raw, &state->info_png.color);
- if (state->error) return state->error;
- }
- } else { /*color conversion needed*/
- unsigned char* data = *out;
- size_t outsize;
-
- /*TODO: check if this works according to the statement in the documentation: "The converter can convert
- from grayscale input color type, to 8-bit grayscale or grayscale with alpha"*/
- if (!(state->info_raw.colortype == LCT_RGB || state->info_raw.colortype == LCT_RGBA) && !(state->info_raw.bitdepth == 8)) {
- return 56; /*unsupported color mode conversion*/
- }
-
- outsize = lodepng_get_raw_size(*w, *h, &state->info_raw);
- *out = (unsigned char*)malloc(outsize);
- if (!(*out)) {
- state->error = 83; /*alloc fail*/
- }
- else state->error = lodepng_convert(*out, data, &state->info_raw, &state->info_png.color, *w, *h);
- free(data);
- }
- return state->error;
-}
-
-
-void lodepng_state_init(LodePNGState* state)
-{
- lodepng_decoder_settings_init(&state->decoder);
- lodepng_color_mode_init(&state->info_raw);
- lodepng_info_init(&state->info_png);
- state->error = 1;
-}
-
-
-void lodepng_state_cleanup(LodePNGState* state)
-{
- lodepng_color_mode_cleanup(&state->info_raw);
- lodepng_info_cleanup(&state->info_png);
-}
diff --git a/thirdparty/thorvg/src/loaders/png/tvgLodePng.h b/thirdparty/thorvg/src/loaders/png/tvgLodePng.h
deleted file mode 100644
index 0cdac7cbea..0000000000
--- a/thirdparty/thorvg/src/loaders/png/tvgLodePng.h
+++ /dev/null
@@ -1,174 +0,0 @@
-/*
- * Copyright (c) 2020 - 2022 Samsung Electronics Co., Ltd. All rights reserved.
-
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
-
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
-
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-/*
- LodePNG version 20200306
-
- Copyright (c) 2005-2020 Lode Vandevenne
-
- This software is provided 'as-is', without any express or implied
- warranty. In no event will the authors be held liable for any damages
- arising from the use of this software.
-
- Permission is granted to anyone to use this software for any purpose,
- including commercial applications, and to alter it and redistribute it
- freely, subject to the following restrictions:
-
- 1. The origin of this software must not be misrepresented; you must not
- claim that you wrote the original software. If you use this software
- in a product, an acknowledgment in the product documentation would be
- appreciated but is not required.
-
- 2. Altered source versions must be plainly marked as such, and must not be
- misrepresented as being the original software.
-
- 3. This notice may not be removed or altered from any source
- distribution.
-*/
-
-#ifndef _TVG_LODEPNG_H_
-#define _TVG_LODEPNG_H_
-
-#include <stddef.h>
-
-/*The PNG color types (also used for raw image).*/
-enum LodePNGColorType
-{
- LCT_GREY = 0, /*grayscale: 1,2,4,8,16 bit*/
- LCT_RGB = 2, /*RGB: 8,16 bit*/
- LCT_PALETTE = 3, /*palette: 1,2,4,8 bit*/
- LCT_GREY_ALPHA = 4, /*grayscale with alpha: 8,16 bit*/
- LCT_RGBA = 6, /*RGB with alpha: 8,16 bit*/
- /*LCT_MAX_OCTET_VALUE lets the compiler allow this enum to represent any invalid
- byte value from 0 to 255 that could be present in an invalid PNG file header. Do
- not use, compare with or set the name LCT_MAX_OCTET_VALUE, instead either use
- the valid color type names above, or numeric values like 1 or 7 when checking for
- particular disallowed color type byte values, or cast to integer to print it.*/
- LCT_MAX_OCTET_VALUE = 255
-};
-
-/*Settings for zlib decompression*/
-struct LodePNGDecompressSettings
-{
- /* Check LodePNGDecoderSettings for more ignorable errors such as ignore_crc */
- unsigned ignore_adler32; /*if 1, continue and don't give an error message if the Adler32 checksum is corrupted*/
- unsigned ignore_nlen; /*ignore complement of len checksum in uncompressed blocks*/
-
- /*use custom zlib decoder instead of built in one (default: null)*/
- unsigned (*custom_zlib)(unsigned char**, size_t*, const unsigned char*, size_t, const LodePNGDecompressSettings*);
- /*use custom deflate decoder instead of built in one (default: null) if custom_zlib is not null, custom_inflate is ignored (the zlib format uses deflate)*/
- unsigned (*custom_inflate)(unsigned char**, size_t*, const unsigned char*, size_t, const LodePNGDecompressSettings*);
-
- const void* custom_context; /*optional custom settings for custom functions*/
-};
-
-/*
- Color mode of an image. Contains all information required to decode the pixel
- bits to RGBA colors. This information is the same as used in the PNG file
- format, and is used both for PNG and raw image data in LodePNG.
-*/
-struct LodePNGColorMode
-{
- /*header (IHDR)*/
- LodePNGColorType colortype; /*color type, see PNG standard or documentation further in this header file*/
- unsigned bitdepth; /*bits per sample, see PNG standard or documentation further in this header file*/
-
- /*
- palette (PLTE and tRNS)
-
- Dynamically allocated with the colors of the palette, including alpha.
- This field may not be allocated directly, use lodepng_color_mode_init first,
- then lodepng_palette_add per color to correctly initialize it (to ensure size
- of exactly 1024 bytes).
-
- The alpha channels must be set as well, set them to 255 for opaque images.
-
- When decoding, by default you can ignore this palette, since LodePNG already
- fills the palette colors in the pixels of the raw RGBA output.
-
- The palette is only supported for color type 3.
- */
- unsigned char* palette; /*palette in RGBARGBA... order. Must be either 0, or when allocated must have 1024 bytes*/
- size_t palettesize; /*palette size in number of colors (amount of used bytes is 4 * palettesize)*/
-
- /*
- transparent color key (tRNS)
-
- This color uses the same bit depth as the bitdepth value in this struct, which can be 1-bit to 16-bit.
- For grayscale PNGs, r, g and b will all 3 be set to the same.
-
- When decoding, by default you can ignore this information, since LodePNG sets
- pixels with this key to transparent already in the raw RGBA output.
-
- The color key is only supported for color types 0 and 2.
- */
- unsigned key_defined; /*is a transparent color key given? 0 = false, 1 = true*/
- unsigned key_r; /*red/grayscale component of color key*/
- unsigned key_g; /*green component of color key*/
- unsigned key_b; /*blue component of color key*/
-};
-
-/*Information about the PNG image, except pixels, width and height.*/
-struct LodePNGInfo
-{
- /*header (IHDR), palette (PLTE) and transparency (tRNS) chunks*/
- unsigned compression_method;/*compression method of the original file. Always 0.*/
- unsigned filter_method; /*filter method of the original file*/
- unsigned interlace_method; /*interlace method of the original file: 0=none, 1=Adam7*/
- LodePNGColorMode color; /*color type and bits, palette and transparency of the PNG file*/
-};
-
-/*
- Settings for the decoder. This contains settings for the PNG and the Zlib
- decoder, but not the Info settings from the Info structs.
-*/
-struct LodePNGDecoderSettings
-{
- LodePNGDecompressSettings zlibsettings; /*in here is the setting to ignore Adler32 checksums*/
-
- /* Check LodePNGDecompressSettings for more ignorable errors such as ignore_adler32 */
- unsigned ignore_crc; /*ignore CRC checksums*/
- unsigned ignore_critical; /*ignore unknown critical chunks*/
- unsigned ignore_end; /*ignore issues at end of file if possible (missing IEND chunk, too large chunk, ...)*/
- /* TODO: make a system involving warnings with levels and a strict mode instead. Other potentially recoverable
- errors: srgb rendering intent value, size of content of ancillary chunks, more than 79 characters for some
- strings, placement/combination rules for ancillary chunks, crc of unknown chunks, allowed characters
- in string keys, etc... */
-
- unsigned color_convert; /*whether to convert the PNG to the color type you want. Default: yes*/
-};
-
-/*The settings, state and information for extended encoding and decoding.*/
-struct LodePNGState
-{
- LodePNGDecoderSettings decoder; /*the decoding settings*/
- LodePNGColorMode info_raw; /*specifies the format in which you would like to get the raw pixel buffer*/
- LodePNGInfo info_png; /*info of the PNG image obtained after decoding*/
- unsigned error;
-};
-
-void lodepng_state_init(LodePNGState* state);
-void lodepng_state_cleanup(LodePNGState* state);
-unsigned lodepng_decode(unsigned char** out, unsigned* w, unsigned* h, LodePNGState* state, const unsigned char* in, size_t insize);
-unsigned lodepng_inspect(unsigned* w, unsigned* h, LodePNGState* state, const unsigned char* in, size_t insize);
-
-#endif //_TVG_LODEPNG_H_
diff --git a/thirdparty/thorvg/src/loaders/png/tvgPngLoader.cpp b/thirdparty/thorvg/src/loaders/png/tvgPngLoader.cpp
deleted file mode 100644
index 3e293176b7..0000000000
--- a/thirdparty/thorvg/src/loaders/png/tvgPngLoader.cpp
+++ /dev/null
@@ -1,194 +0,0 @@
-/*
- * Copyright (c) 2021 - 2022 Samsung Electronics Co., Ltd. All rights reserved.
-
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
-
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
-
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-#include <memory.h>
-#include "tvgLoader.h"
-#include "tvgPngLoader.h"
-
-
-/************************************************************************/
-/* Internal Class Implementation */
-/************************************************************************/
-
-
-static inline uint32_t PREMULTIPLY(uint32_t c)
-{
- auto a = (c >> 24);
- return (c & 0xff000000) + ((((c >> 8) & 0xff) * a) & 0xff00) + ((((c & 0x00ff00ff) * a) >> 8) & 0x00ff00ff);
-}
-
-
-static void _premultiply(uint32_t* data, uint32_t w, uint32_t h)
-{
- auto buffer = data;
- for (uint32_t y = 0; y < h; ++y, buffer += w) {
- auto src = buffer;
- for (uint32_t x = 0; x < w; ++x, ++src) {
- *src = PREMULTIPLY(*src);
- }
- }
-}
-
-
-void PngLoader::clear()
-{
- lodepng_state_cleanup(&state);
-
- if (freeData) free(data);
- data = nullptr;
- size = 0;
- freeData = false;
-}
-
-
-/************************************************************************/
-/* External Class Implementation */
-/************************************************************************/
-
-PngLoader::PngLoader()
-{
- lodepng_state_init(&state);
-}
-
-
-PngLoader::~PngLoader()
-{
- if (freeData) free(data);
- free(image);
-}
-
-
-bool PngLoader::open(const string& path)
-{
- clear();
-
- auto pngFile = fopen(path.c_str(), "rb");
- if (!pngFile) return false;
-
- auto ret = false;
-
- //determine size
- if (fseek(pngFile, 0, SEEK_END) < 0) goto finalize;
- if (((size = ftell(pngFile)) < 1)) goto finalize;
- if (fseek(pngFile, 0, SEEK_SET)) goto finalize;
-
- data = (unsigned char *) malloc(size);
- if (!data) goto finalize;
-
- freeData = true;
-
- if (fread(data, size, 1, pngFile) < 1) goto failure;
-
- lodepng_state_init(&state);
-
- unsigned int width, height;
- if (lodepng_inspect(&width, &height, &state, data, size) > 0) goto failure;
-
- w = static_cast<float>(width);
- h = static_cast<float>(height);
- ret = true;
-
- goto finalize;
-
-failure:
- clear();
-
-finalize:
- fclose(pngFile);
- return ret;
-}
-
-
-bool PngLoader::open(const char* data, uint32_t size, bool copy)
-{
- clear();
-
- lodepng_state_init(&state);
-
- unsigned int width, height;
- if (lodepng_inspect(&width, &height, &state, (unsigned char*)(data), size) > 0) return false;
-
- if (copy) {
- this->data = (unsigned char *) malloc(size);
- if (!this->data) return false;
- memcpy((unsigned char *)this->data, data, size);
- freeData = true;
- } else {
- this->data = (unsigned char *) data;
- freeData = false;
- }
-
- w = static_cast<float>(width);
- h = static_cast<float>(height);
- this->size = size;
-
- return true;
-}
-
-
-bool PngLoader::read()
-{
- if (!data || w <= 0 || h <= 0) return false;
-
- TaskScheduler::request(this);
-
- return true;
-}
-
-
-bool PngLoader::close()
-{
- this->done();
- clear();
- return true;
-}
-
-
-unique_ptr<Surface> PngLoader::bitmap()
-{
- this->done();
-
- if (!image) return nullptr;
-
- auto surface = static_cast<Surface*>(malloc(sizeof(Surface)));
- surface->buffer = (uint32_t*)(image);
- surface->stride = w;
- surface->w = w;
- surface->h = h;
- surface->cs = SwCanvas::ARGB8888;
-
- return unique_ptr<Surface>(surface);
-}
-
-
-void PngLoader::run(unsigned tid)
-{
- if (image) {
- free(image);
- image = nullptr;
- }
- auto width = static_cast<unsigned>(w);
- auto height = static_cast<unsigned>(h);
-
- lodepng_decode(&image, &width, &height, &state, data, size);
-
- _premultiply((uint32_t*)(image), width, height);
-}
diff --git a/thirdparty/thorvg/src/loaders/png/tvgPngLoader.h b/thirdparty/thorvg/src/loaders/png/tvgPngLoader.h
deleted file mode 100644
index 8f07f6418f..0000000000
--- a/thirdparty/thorvg/src/loaders/png/tvgPngLoader.h
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (c) 2021 - 2022 Samsung Electronics Co., Ltd. All rights reserved.
-
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
-
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
-
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-#ifndef _TVG_PNG_LOADER_H_
-#define _TVG_PNG_LOADER_H_
-
-#include "tvgLodePng.h"
-#include "tvgTaskScheduler.h"
-
-
-class PngLoader : public LoadModule, public Task
-{
-private:
- LodePNGState state;
- unsigned char* data = nullptr;
- unsigned char *image = nullptr;
- unsigned long size = 0;
- bool freeData = false;
-
- void clear();
-
-public:
- PngLoader();
- ~PngLoader();
-
- using LoadModule::open;
- bool open(const string& path) override;
- bool open(const char* data, uint32_t size, bool copy) override;
- bool read() override;
- bool close() override;
-
- unique_ptr<Surface> bitmap() override;
- void run(unsigned tid) override;
-};
-
-#endif //_TVG_PNG_LOADER_H_
diff --git a/thirdparty/thorvg/src/loaders/raw/tvgRawLoader.cpp b/thirdparty/thorvg/src/loaders/raw/tvgRawLoader.cpp
index 2da399d8c3..889f130ce9 100644
--- a/thirdparty/thorvg/src/loaders/raw/tvgRawLoader.cpp
+++ b/thirdparty/thorvg/src/loaders/raw/tvgRawLoader.cpp
@@ -78,9 +78,9 @@ unique_ptr<Surface> RawLoader::bitmap()
auto surface = static_cast<Surface*>(malloc(sizeof(Surface)));
surface->buffer = (uint32_t*)(content);
- surface->stride = w;
- surface->w = w;
- surface->h = h;
+ surface->stride = (uint32_t)w;
+ surface->w = (uint32_t)w;
+ surface->h = (uint32_t)h;
surface->cs = SwCanvas::ARGB8888;
return unique_ptr<Surface>(surface);
diff --git a/thirdparty/thorvg/src/loaders/svg/tvgSvgCssStyle.cpp b/thirdparty/thorvg/src/loaders/svg/tvgSvgCssStyle.cpp
index 8f46b62ce9..478ba5d3d1 100644
--- a/thirdparty/thorvg/src/loaders/svg/tvgSvgCssStyle.cpp
+++ b/thirdparty/thorvg/src/loaders/svg/tvgSvgCssStyle.cpp
@@ -42,7 +42,10 @@ static void _copyStyle(SvgStyleProperty* to, const SvgStyleProperty* from)
to->fill.paint.color = from->fill.paint.color;
to->fill.paint.none = from->fill.paint.none;
to->fill.paint.curColor = from->fill.paint.curColor;
- if (from->fill.paint.url) to->fill.paint.url = strdup(from->fill.paint.url);
+ if (from->fill.paint.url) {
+ if (to->fill.paint.url) free(to->fill.paint.url);
+ to->fill.paint.url = strdup(from->fill.paint.url);
+ }
to->fill.flags = (SvgFillFlags)((int)to->fill.flags | (int)SvgFillFlags::Paint);
to->flags = (SvgStyleFlags)((int)to->flags | (int)SvgStyleFlags::Fill);
}
@@ -61,7 +64,10 @@ static void _copyStyle(SvgStyleProperty* to, const SvgStyleProperty* from)
to->stroke.paint.color = from->stroke.paint.color;
to->stroke.paint.none = from->stroke.paint.none;
to->stroke.paint.curColor = from->stroke.paint.curColor;
- if (from->stroke.paint.url) to->stroke.paint.url = strdup(from->stroke.paint.url);
+ if (from->stroke.paint.url) {
+ if (to->stroke.paint.url) free(to->stroke.paint.url);
+ to->stroke.paint.url = strdup(from->stroke.paint.url);
+ }
to->stroke.flags = (SvgStrokeFlags)((int)to->stroke.flags | (int)SvgStrokeFlags::Paint);
to->flags = (SvgStyleFlags)((int)to->flags | (int)SvgStyleFlags::Stroke);
}
@@ -122,8 +128,14 @@ void cssCopyStyleAttr(SvgNode* to, const SvgNode* from)
//Copy style attribute
_copyStyle(to->style, from->style);
- if (from->style->clipPath.url) to->style->clipPath.url = strdup(from->style->clipPath.url);
- if (from->style->mask.url) to->style->mask.url = strdup(from->style->mask.url);
+ if (from->style->clipPath.url) {
+ if (to->style->clipPath.url) free(to->style->clipPath.url);
+ to->style->clipPath.url = strdup(from->style->clipPath.url);
+ }
+ if (from->style->mask.url) {
+ if (to->style->mask.url) free(to->style->mask.url);
+ to->style->mask.url = strdup(from->style->mask.url);
+ }
}
diff --git a/thirdparty/thorvg/src/loaders/svg/tvgSvgLoader.cpp b/thirdparty/thorvg/src/loaders/svg/tvgSvgLoader.cpp
index 42bfd4de70..737fd96455 100644
--- a/thirdparty/thorvg/src/loaders/svg/tvgSvgLoader.cpp
+++ b/thirdparty/thorvg/src/loaders/svg/tvgSvgLoader.cpp
@@ -115,9 +115,54 @@ static bool _parseNumber(const char** content, float* number)
if ((*content) == end) return false;
//Skip comma if any
*content = _skipComma(end);
+
return true;
}
+
+static constexpr struct
+{
+ AspectRatioAlign align;
+ const char* tag;
+} alignTags[] = {
+ { AspectRatioAlign::XMinYMin, "xMinYMin" },
+ { AspectRatioAlign::XMidYMin, "xMidYMin" },
+ { AspectRatioAlign::XMaxYMin, "xMaxYMin" },
+ { AspectRatioAlign::XMinYMid, "xMinYMid" },
+ { AspectRatioAlign::XMidYMid, "xMidYMid" },
+ { AspectRatioAlign::XMaxYMid, "xMaxYMid" },
+ { AspectRatioAlign::XMinYMax, "xMinYMax" },
+ { AspectRatioAlign::XMidYMax, "xMidYMax" },
+ { AspectRatioAlign::XMaxYMax, "xMaxYMax" },
+};
+
+
+static bool _parseAspectRatio(const char** content, AspectRatioAlign* align, AspectRatioMeetOrSlice* meetOrSlice)
+{
+ if (!strcmp(*content, "none")) {
+ *align = AspectRatioAlign::None;
+ return true;
+ }
+
+ for (unsigned int i = 0; i < sizeof(alignTags) / sizeof(alignTags[0]); i++) {
+ if (!strncmp(*content, alignTags[i].tag, 8)) {
+ *align = alignTags[i].align;
+ *content += 8;
+ *content = _skipSpace(*content, nullptr);
+ break;
+ }
+ }
+
+ if (!strcmp(*content, "meet")) {
+ *meetOrSlice = AspectRatioMeetOrSlice::Meet;
+ } else if (!strcmp(*content, "slice")) {
+ *meetOrSlice = AspectRatioMeetOrSlice::Slice;
+ }
+
+ return true;
+}
+
+
/**
* According to https://www.w3.org/TR/SVG/coords.html#Units
*/
@@ -554,6 +599,7 @@ static void _toColor(const char* str, uint8_t* r, uint8_t* g, uint8_t* b, char**
}
}
} else if (ref && len >= 3 && !strncmp(str, "url", 3)) {
+ if (*ref) free(*ref);
*ref = _idFromUrl((const char*)(str + 3));
} else {
//Handle named color
@@ -802,7 +848,7 @@ static bool _attrParseSvgNode(void* data, const char* key, const char* value)
}
loader->svgParse->global.x = (int)doc->vx;
} else if (!strcmp(key, "preserveAspectRatio")) {
- if (!strcmp(value, "none")) doc->preserveAspect = false;
+ _parseAspectRatio(&value, &doc->align, &doc->meetOrSlice);
} else if (!strcmp(key, "style")) {
return simpleXmlParseW3CAttribute(value, strlen(value), _parseStyleAttr, loader);
#ifdef THORVG_LOG_ENABLED
@@ -1156,7 +1202,7 @@ static bool _attrParseSymbolNode(void* data, const char* key, const char* value)
symbol->h = _toFloat(loader->svgParse, value, SvgParserLengthType::Vertical);
symbol->hasHeight = true;
} else if (!strcmp(key, "preserveAspectRatio")) {
- if (!strcmp(value, "none")) symbol->preserveAspect = false;
+ _parseAspectRatio(&value, &symbol->align, &symbol->meetOrSlice);
} else if (!strcmp(key, "overflow")) {
if (!strcmp(value, "visible")) symbol->overflowVisible = true;
} else {
@@ -1248,7 +1294,8 @@ static SvgNode* _createSvgNode(SvgLoaderData* loader, SvgNode* parent, const cha
loader->svgParse->global.w = 0;
loader->svgParse->global.h = 0;
- doc->preserveAspect = true;
+ doc->align = AspectRatioAlign::XMidYMid;
+ doc->meetOrSlice = AspectRatioMeetOrSlice::Meet;
func(buf, bufLength, _attrParseSvgNode, loader);
if (loader->svgParse->global.w == 0) {
@@ -1309,7 +1356,8 @@ static SvgNode* _createSymbolNode(SvgLoaderData* loader, SvgNode* parent, const
if (!loader->svgParse->node) return nullptr;
loader->svgParse->node->display = false;
- loader->svgParse->node->node.symbol.preserveAspect = true;
+ loader->svgParse->node->node.symbol.align = AspectRatioAlign::XMidYMid;
+ loader->svgParse->node->node.symbol.meetOrSlice = AspectRatioMeetOrSlice::Meet;
loader->svgParse->node->node.symbol.overflowVisible = false;
loader->svgParse->node->node.symbol.hasViewBox = false;
@@ -1331,6 +1379,7 @@ static bool _attrParsePathNode(void* data, const char* key, const char* value)
SvgPathNode* path = &(node->node.path);
if (!strcmp(key, "d")) {
+ if (path->path) free(path->path);
//Temporary: need to copy
path->path = _copyId(value);
} else if (!strcmp(key, "style")) {
@@ -1801,19 +1850,10 @@ static SvgNode* _getDefsNode(SvgNode* node)
}
-static SvgNode* _findChildById(const SvgNode* node, const char* id)
+static SvgNode* _findNodeById(SvgNode *node, const char* id)
{
if (!node) return nullptr;
- auto child = node->child.data;
- for (uint32_t i = 0; i < node->child.count; ++i, ++child) {
- if (((*child)->id) && !strcmp((*child)->id, id)) return (*child);
- }
- return nullptr;
-}
-
-static SvgNode* _findNodeById(SvgNode *node, const char* id)
-{
SvgNode* result = nullptr;
if (node->id && !strcmp(node->id, id)) return node;
@@ -1827,6 +1867,7 @@ static SvgNode* _findNodeById(SvgNode *node, const char* id)
return result;
}
+
static void _cloneGradStops(Array<Fill::ColorStop>& dst, const Array<Fill::ColorStop>& src)
{
for (uint32_t i = 0; i < src.count; ++i) {
@@ -1889,7 +1930,10 @@ static void _styleInherit(SvgStyleProperty* child, const SvgStyleProperty* paren
child->fill.paint.color = parent->fill.paint.color;
child->fill.paint.none = parent->fill.paint.none;
child->fill.paint.curColor = parent->fill.paint.curColor;
- if (parent->fill.paint.url) child->fill.paint.url = _copyId(parent->fill.paint.url);
+ if (parent->fill.paint.url) {
+ if (child->fill.paint.url) free(child->fill.paint.url);
+ child->fill.paint.url = _copyId(parent->fill.paint.url);
+ }
}
if (!((int)child->fill.flags & (int)SvgFillFlags::Opacity)) {
child->fill.opacity = parent->fill.opacity;
@@ -1902,7 +1946,12 @@ static void _styleInherit(SvgStyleProperty* child, const SvgStyleProperty* paren
child->stroke.paint.color = parent->stroke.paint.color;
child->stroke.paint.none = parent->stroke.paint.none;
child->stroke.paint.curColor = parent->stroke.paint.curColor;
- child->stroke.paint.url = parent->stroke.paint.url ? _copyId(parent->stroke.paint.url) : nullptr;
+ if (parent->stroke.paint.url) {
+ if (child->stroke.paint.url) free(child->stroke.paint.url);
+ child->stroke.paint.url = _copyId(parent->stroke.paint.url);
+ } else {
+ child->stroke.paint.url = nullptr;
+ }
}
if (!((int)child->stroke.flags & (int)SvgStrokeFlags::Opacity)) {
child->stroke.opacity = parent->stroke.opacity;
@@ -1942,7 +1991,10 @@ static void _styleCopy(SvgStyleProperty* to, const SvgStyleProperty* from)
to->fill.paint.color = from->fill.paint.color;
to->fill.paint.none = from->fill.paint.none;
to->fill.paint.curColor = from->fill.paint.curColor;
- if (from->fill.paint.url) to->fill.paint.url = _copyId(from->fill.paint.url);
+ if (from->fill.paint.url) {
+ if (to->fill.paint.url) free(to->fill.paint.url);
+ to->fill.paint.url = _copyId(from->fill.paint.url);
+ }
}
if (((int)from->fill.flags & (int)SvgFillFlags::Opacity)) {
to->fill.opacity = from->fill.opacity;
@@ -1956,7 +2008,12 @@ static void _styleCopy(SvgStyleProperty* to, const SvgStyleProperty* from)
to->stroke.paint.color = from->stroke.paint.color;
to->stroke.paint.none = from->stroke.paint.none;
to->stroke.paint.curColor = from->stroke.paint.curColor;
- to->stroke.paint.url = from->stroke.paint.url ? _copyId(from->stroke.paint.url) : nullptr;
+ if (from->stroke.paint.url) {
+ if (to->stroke.paint.url) free(to->stroke.paint.url);
+ to->stroke.paint.url = _copyId(from->stroke.paint.url);
+ } else {
+ to->stroke.paint.url = nullptr;
+ }
}
if (((int)from->stroke.flags & (int)SvgStrokeFlags::Opacity)) {
to->stroke.opacity = from->stroke.opacity;
@@ -1992,10 +2049,14 @@ static void _copyAttr(SvgNode* to, const SvgNode* from)
//Copy style attribute
_styleCopy(to->style, from->style);
to->style->flags = (SvgStyleFlags)((int)to->style->flags | (int)from->style->flags);
- if (from->style->fill.paint.url) to->style->fill.paint.url = strdup(from->style->fill.paint.url);
- if (from->style->stroke.paint.url) to->style->stroke.paint.url = strdup(from->style->stroke.paint.url);
- if (from->style->clipPath.url) to->style->clipPath.url = strdup(from->style->clipPath.url);
- if (from->style->mask.url) to->style->mask.url = strdup(from->style->mask.url);
+ if (from->style->clipPath.url) {
+ if (to->style->clipPath.url) free(to->style->clipPath.url);
+ to->style->clipPath.url = strdup(from->style->clipPath.url);
+ }
+ if (from->style->mask.url) {
+ if (to->style->mask.url) free(to->style->mask.url);
+ to->style->mask.url = strdup(from->style->mask.url);
+ }
//Copy node attribute
switch (from->type) {
@@ -2031,7 +2092,10 @@ static void _copyAttr(SvgNode* to, const SvgNode* from)
break;
}
case SvgNodeType::Path: {
- if (from->node.path.path) to->node.path.path = strdup(from->node.path.path);
+ if (from->node.path.path) {
+ if (to->node.path.path) free(to->node.path.path);
+ to->node.path.path = strdup(from->node.path.path);
+ }
break;
}
case SvgNodeType::Polygon: {
@@ -2053,7 +2117,10 @@ static void _copyAttr(SvgNode* to, const SvgNode* from)
to->node.image.y = from->node.image.y;
to->node.image.w = from->node.image.w;
to->node.image.h = from->node.image.h;
- if (from->node.image.href) to->node.image.href = strdup(from->node.image.href);
+ if (from->node.image.href) {
+ if (to->node.image.href) free(to->node.image.href);
+ to->node.image.href = strdup(from->node.image.href);
+ }
break;
}
default: {
@@ -2093,8 +2160,8 @@ static void _clonePostponedNodes(Array<SvgNodeIdPair>* cloneNodes, SvgNode* doc)
for (uint32_t i = 0; i < cloneNodes->count; ++i) {
auto nodeIdPair = cloneNodes->data[i];
auto defs = _getDefsNode(nodeIdPair.node);
- auto nodeFrom = _findChildById(defs, nodeIdPair.id);
- if (!nodeFrom) nodeFrom = _findChildById(doc, nodeIdPair.id);
+ auto nodeFrom = _findNodeById(defs, nodeIdPair.id);
+ if (!nodeFrom) nodeFrom = _findNodeById(doc, nodeIdPair.id);
_cloneNode(nodeFrom, nodeIdPair.node, 0);
if (nodeFrom && nodeFrom->type == SvgNodeType::Symbol && nodeIdPair.node->type == SvgNodeType::Use) {
nodeIdPair.node->node.use.symbol = nodeFrom;
@@ -2141,7 +2208,7 @@ static bool _attrParseUseNode(void* data, const char* key, const char* value)
if (!strcmp(key, "href") || !strcmp(key, "xlink:href")) {
id = _idFromHref(value);
defs = _getDefsNode(node);
- nodeFrom = _findChildById(defs, id);
+ nodeFrom = _findNodeById(defs, id);
if (nodeFrom) {
_cloneNode(nodeFrom, node, 0);
if (nodeFrom->type == SvgNodeType::Symbol) use->symbol = nodeFrom;
@@ -2695,10 +2762,14 @@ static void _svgLoaderParserXmlOpen(SvgLoaderData* loader, const char* content,
if (loader->stack.count > 0) parent = loader->stack.data[loader->stack.count - 1];
else parent = loader->doc;
if (!strcmp(tagName, "style")) {
- node = method(loader, nullptr, attrs, attrsLength, simpleXmlParseAttributes);
- loader->cssStyle = node;
- loader->doc->node.doc.style = node;
- loader->style = true;
+ // TODO: For now only the first style node is saved. After the css id selector
+ // is introduced this if condition shouldin't be necessary any more
+ if (!loader->cssStyle) {
+ node = method(loader, nullptr, attrs, attrsLength, simpleXmlParseAttributes);
+ loader->cssStyle = node;
+ loader->doc->node.doc.style = node;
+ loader->style = true;
+ }
} else {
node = method(loader, parent, attrs, attrsLength, simpleXmlParseAttributes);
}
@@ -3127,7 +3198,7 @@ void SvgLoader::run(unsigned tid)
_updateStyle(loaderData.doc, nullptr);
}
- root = svgSceneBuild(loaderData.doc, vx, vy, vw, vh, w, h, preserveAspect, svgPath);
+ root = svgSceneBuild(loaderData.doc, vx, vy, vw, vh, w, h, align, meetOrSlice, svgPath);
}
@@ -3160,7 +3231,8 @@ bool SvgLoader::header()
if (vh < FLT_EPSILON) vh = h;
}
- preserveAspect = loaderData.doc->node.doc.preserveAspect;
+ align = loaderData.doc->node.doc.align;
+ meetOrSlice = loaderData.doc->node.doc.meetOrSlice;
} else {
TVGLOG("SVG", "No SVG File. There is no <svg/>");
return false;
@@ -3215,31 +3287,9 @@ bool SvgLoader::resize(Paint* paint, float w, float h)
auto sx = w / this->w;
auto sy = h / this->h;
+ Matrix m = {sx, 0, 0, 0, sy, 0, 0, 0, 1};
+ paint->transform(m);
- if (preserveAspect) {
- //Scale
- auto scale = sx < sy ? sx : sy;
- paint->scale(scale);
- //Align
- auto tx = 0.0f;
- auto ty = 0.0f;
- auto tw = this->w * scale;
- auto th = this->h * scale;
- if (tw > th) ty -= (h - th) * 0.5f;
- else tx -= (w - tw) * 0.5f;
- paint->translate(-tx, -ty);
- } else {
- //Align
- auto tx = 0.0f;
- auto ty = 0.0f;
- auto tw = this->w * sx;
- auto th = this->h * sy;
- if (tw > th) ty -= (h - th) * 0.5f;
- else tx -= (w - tw) * 0.5f;
-
- Matrix m = {sx, 0, -tx, 0, sy, -ty, 0, 0, 1};
- paint->transform(m);
- }
return true;
}
diff --git a/thirdparty/thorvg/src/loaders/svg/tvgSvgLoader.h b/thirdparty/thorvg/src/loaders/svg/tvgSvgLoader.h
index 093fb671b3..f224d1a4ac 100644
--- a/thirdparty/thorvg/src/loaders/svg/tvgSvgLoader.h
+++ b/thirdparty/thorvg/src/loaders/svg/tvgSvgLoader.h
@@ -50,6 +50,9 @@ public:
unique_ptr<Paint> paint() override;
private:
+ AspectRatioAlign align = AspectRatioAlign::XMidYMid;
+ AspectRatioMeetOrSlice meetOrSlice = AspectRatioMeetOrSlice::Meet;
+
bool header();
void clear();
void run(unsigned tid) override;
diff --git a/thirdparty/thorvg/src/loaders/svg/tvgSvgLoaderCommon.h b/thirdparty/thorvg/src/loaders/svg/tvgSvgLoaderCommon.h
index dc9ed558c3..c657c0e21a 100644
--- a/thirdparty/thorvg/src/loaders/svg/tvgSvgLoaderCommon.h
+++ b/thirdparty/thorvg/src/loaders/svg/tvgSvgLoaderCommon.h
@@ -145,6 +145,26 @@ enum class SvgParserLengthType
Other
};
+enum class AspectRatioAlign
+{
+ None,
+ XMinYMin,
+ XMidYMin,
+ XMaxYMin,
+ XMinYMid,
+ XMidYMid,
+ XMaxYMid,
+ XMinYMax,
+ XMidYMax,
+ XMaxYMax
+};
+
+enum class AspectRatioMeetOrSlice
+{
+ Meet,
+ Slice
+};
+
struct SvgDocNode
{
float w;
@@ -155,7 +175,8 @@ struct SvgDocNode
float vh;
SvgNode* defs;
SvgNode* style;
- bool preserveAspect;
+ AspectRatioAlign align;
+ AspectRatioMeetOrSlice meetOrSlice;
};
struct SvgGNode
@@ -171,7 +192,8 @@ struct SvgSymbolNode
{
float w, h;
float vx, vy, vw, vh;
- bool preserveAspect;
+ AspectRatioAlign align;
+ AspectRatioMeetOrSlice meetOrSlice;
bool overflowVisible;
bool hasViewBox;
bool hasWidth;
diff --git a/thirdparty/thorvg/src/loaders/svg/tvgSvgSceneBuilder.cpp b/thirdparty/thorvg/src/loaders/svg/tvgSvgSceneBuilder.cpp
index a3f34fd46b..4cb4ffdaeb 100644
--- a/thirdparty/thorvg/src/loaders/svg/tvgSvgSceneBuilder.cpp
+++ b/thirdparty/thorvg/src/loaders/svg/tvgSvgSceneBuilder.cpp
@@ -49,9 +49,9 @@
*/
+#include "tvgMath.h" /* to include math.h before cstring */
#include <cstring>
#include <string>
-#include "tvgMath.h"
#include "tvgSvgLoaderCommon.h"
#include "tvgSvgSceneBuilder.h"
#include "tvgSvgPath.h"
@@ -68,7 +68,7 @@ struct Box
static bool _appendShape(SvgNode* node, Shape* shape, const Box& vBox, const string& svgPath);
-static unique_ptr<Scene> _sceneBuildHelper(const SvgNode* node, const Box& vBox, const string& svgPath, bool mask, bool* isMaskWhite = nullptr);
+static unique_ptr<Scene> _sceneBuildHelper(const SvgNode* node, const Box& vBox, const string& svgPath, bool mask, int depth, bool* isMaskWhite = nullptr);
static inline bool _isGroupType(SvgNodeType type)
@@ -282,7 +282,7 @@ static void _applyComposition(Paint* paint, const SvgNode* node, const Box& vBox
node->style->mask.applying = true;
bool isMaskWhite = true;
- auto comp = _sceneBuildHelper(compNode, vBox, svgPath, true, &isMaskWhite);
+ auto comp = _sceneBuildHelper(compNode, vBox, svgPath, true, 0, &isMaskWhite);
if (comp) {
if (node->transform) comp->transform(*node->transform);
@@ -560,10 +560,84 @@ static unique_ptr<Picture> _imageBuildHelper(SvgNode* node, const Box& vBox, con
}
-static unique_ptr<Scene> _useBuildHelper(const SvgNode* node, const Box& vBox, const string& svgPath, bool* isMaskWhite)
+static Matrix _calculateAspectRatioMatrix(AspectRatioAlign align, AspectRatioMeetOrSlice meetOrSlice, float width, float height, const Box& box)
+{
+ auto sx = width / box.w;
+ auto sy = height / box.h;
+ auto tvx = box.x * sx;
+ auto tvy = box.y * sy;
+
+ if (align == AspectRatioAlign::None)
+ return {sx, 0, -tvx, 0, sy, -tvy, 0, 0, 1};
+
+ //Scale
+ if (meetOrSlice == AspectRatioMeetOrSlice::Meet) {
+ if (sx < sy) sy = sx;
+ else sx = sy;
+ } else {
+ if (sx < sy) sx = sy;
+ else sy = sx;
+ }
+
+ //Align
+ tvx = box.x * sx;
+ tvy = box.y * sy;
+ auto tvw = box.w * sx;
+ auto tvh = box.h * sy;
+
+ switch (align) {
+ case AspectRatioAlign::XMinYMin: {
+ break;
+ }
+ case AspectRatioAlign::XMidYMin: {
+ tvx -= (width - tvw) * 0.5f;
+ break;
+ }
+ case AspectRatioAlign::XMaxYMin: {
+ tvx -= width - tvw;
+ break;
+ }
+ case AspectRatioAlign::XMinYMid: {
+ tvy -= (height - tvh) * 0.5f;
+ break;
+ }
+ case AspectRatioAlign::XMidYMid: {
+ tvx -= (width - tvw) * 0.5f;
+ tvy -= (height - tvh) * 0.5f;
+ break;
+ }
+ case AspectRatioAlign::XMaxYMid: {
+ tvx -= width - tvw;
+ tvy -= (height - tvh) * 0.5f;
+ break;
+ }
+ case AspectRatioAlign::XMinYMax: {
+ tvy -= height - tvh;
+ break;
+ }
+ case AspectRatioAlign::XMidYMax: {
+ tvx -= (width - tvw) * 0.5f;
+ tvy -= height - tvh;
+ break;
+ }
+ case AspectRatioAlign::XMaxYMax: {
+ tvx -= width - tvw;
+ tvy -= height - tvh;
+ break;
+ }
+ default: {
+ break;
+ }
+ }
+
+ return {sx, 0, -tvx, 0, sy, -tvy, 0, 0, 1};
+}
+
+
+static unique_ptr<Scene> _useBuildHelper(const SvgNode* node, const Box& vBox, const string& svgPath, int depth, bool* isMaskWhite)
{
unique_ptr<Scene> finalScene;
- auto scene = _sceneBuildHelper(node, vBox, svgPath, false, isMaskWhite);
+ auto scene = _sceneBuildHelper(node, vBox, svgPath, false, depth + 1, isMaskWhite);
// mUseTransform = mUseTransform * mTranslate
Matrix mUseTransform = {1, 0, 0, 0, 1, 0, 0, 0, 1};
@@ -585,20 +659,8 @@ static unique_ptr<Scene> _useBuildHelper(const SvgNode* node, const Box& vBox, c
Matrix mViewBox = {1, 0, 0, 0, 1, 0, 0, 0, 1};
if ((!mathEqual(width, vw) || !mathEqual(height, vh)) && vw > 0 && vh > 0) {
- auto sx = width / vw;
- auto sy = height / vh;
- if (symbol.preserveAspect) {
- if (sx < sy) sy = sx;
- else sx = sy;
- }
-
- auto tvx = symbol.vx * sx;
- auto tvy = symbol.vy * sy;
- auto tvw = vw * sx;
- auto tvh = vh * sy;
- tvy -= (symbol.h - tvh) * 0.5f;
- tvx -= (symbol.w - tvw) * 0.5f;
- mViewBox = {sx, 0, -tvx, 0, sy, -tvy, 0, 0, 1};
+ Box box = {symbol.vx, symbol.vy, vw, vh};
+ mViewBox = _calculateAspectRatioMatrix(symbol.align, symbol.meetOrSlice, width, height, box);
} else if (!mathZero(symbol.vx) || !mathZero(symbol.vy)) {
mViewBox = {1, 0, -symbol.vx, 0, 1, -symbol.vy, 0, 0, 1};
}
@@ -642,8 +704,15 @@ static unique_ptr<Scene> _useBuildHelper(const SvgNode* node, const Box& vBox, c
}
-static unique_ptr<Scene> _sceneBuildHelper(const SvgNode* node, const Box& vBox, const string& svgPath, bool mask, bool* isMaskWhite)
+static unique_ptr<Scene> _sceneBuildHelper(const SvgNode* node, const Box& vBox, const string& svgPath, bool mask, int depth, bool* isMaskWhite)
{
+ /* Exception handling: Prevent invalid SVG data input.
+ The size is the arbitrary value, we need an experimental size. */
+ if (depth > 2192) {
+ TVGERR("SVG", "Infinite recursive call - stopped after %d calls! Svg file may be incorrectly formatted.", depth);
+ return nullptr;
+ }
+
if (_isGroupType(node->type) || mask) {
auto scene = Scene::gen();
// For a Symbol node, the viewBox transformation has to be applied first - see _useBuildHelper()
@@ -654,12 +723,15 @@ static unique_ptr<Scene> _sceneBuildHelper(const SvgNode* node, const Box& vBox,
for (uint32_t i = 0; i < node->child.count; ++i, ++child) {
if (_isGroupType((*child)->type)) {
if ((*child)->type == SvgNodeType::Use)
- scene->push(_useBuildHelper(*child, vBox, svgPath, isMaskWhite));
+ scene->push(_useBuildHelper(*child, vBox, svgPath, depth + 1, isMaskWhite));
else
- scene->push(_sceneBuildHelper(*child, vBox, svgPath, false, isMaskWhite));
+ scene->push(_sceneBuildHelper(*child, vBox, svgPath, false, depth + 1, isMaskWhite));
} else if ((*child)->type == SvgNodeType::Image) {
auto image = _imageBuildHelper(*child, vBox, svgPath);
- if (image) scene->push(move(image));
+ if (image) {
+ scene->push(move(image));
+ if (isMaskWhite) *isMaskWhite = false;
+ }
} else if ((*child)->type != SvgNodeType::Mask) {
auto shape = _shapeBuildHelper(*child, vBox, svgPath);
if (shape) {
@@ -688,36 +760,18 @@ static unique_ptr<Scene> _sceneBuildHelper(const SvgNode* node, const Box& vBox,
/* External Class Implementation */
/************************************************************************/
-unique_ptr<Scene> svgSceneBuild(SvgNode* node, float vx, float vy, float vw, float vh, float w, float h, bool preserveAspect, const string& svgPath)
+unique_ptr<Scene> svgSceneBuild(SvgNode* node, float vx, float vy, float vw, float vh, float w, float h, AspectRatioAlign align, AspectRatioMeetOrSlice meetOrSlice, const string& svgPath)
{
+ //TODO: aspect ratio is valid only if viewBox was set
+
if (!node || (node->type != SvgNodeType::Doc)) return nullptr;
Box vBox = {vx, vy, vw, vh};
- auto docNode = _sceneBuildHelper(node, vBox, svgPath, false);
+ auto docNode = _sceneBuildHelper(node, vBox, svgPath, false, 0);
if (!mathEqual(w, vw) || !mathEqual(h, vh)) {
- auto sx = w / vw;
- auto sy = h / vh;
-
- if (preserveAspect) {
- //Scale
- auto scale = sx < sy ? sx : sy;
- docNode->scale(scale);
- //Align
- auto tvx = vx * scale;
- auto tvy = vy * scale;
- auto tvw = vw * scale;
- auto tvh = vh * scale;
- tvx -= (w - tvw) * 0.5f;
- tvy -= (h - tvh) * 0.5f;
- docNode->translate(-tvx, -tvy);
- } else {
- //Align
- auto tvx = vx * sx;
- auto tvy = vy * sy;
- Matrix m = {sx, 0, -tvx, 0, sy, -tvy, 0, 0, 1};
- docNode->transform(m);
- }
+ Matrix m = _calculateAspectRatioMatrix(align, meetOrSlice, w, h, vBox);
+ docNode->transform(m);
} else if (!mathZero(vx) || !mathZero(vy)) {
docNode->translate(-vx, -vy);
}
diff --git a/thirdparty/thorvg/src/loaders/svg/tvgSvgSceneBuilder.h b/thirdparty/thorvg/src/loaders/svg/tvgSvgSceneBuilder.h
index cecbbf02a8..311f3c80e6 100644
--- a/thirdparty/thorvg/src/loaders/svg/tvgSvgSceneBuilder.h
+++ b/thirdparty/thorvg/src/loaders/svg/tvgSvgSceneBuilder.h
@@ -25,6 +25,6 @@
#include "tvgCommon.h"
-unique_ptr<Scene> svgSceneBuild(SvgNode* node, float vx, float vy, float vw, float vh, float w, float h, bool preserveAspect, const string& svgPath);
+unique_ptr<Scene> svgSceneBuild(SvgNode* node, float vx, float vy, float vw, float vh, float w, float h, AspectRatioAlign align, AspectRatioMeetOrSlice meetOrSlice, const string& svgPath);
#endif //_TVG_SVG_SCENE_BUILDER_H_
diff --git a/thirdparty/thorvg/src/loaders/svg/tvgXmlParser.cpp b/thirdparty/thorvg/src/loaders/svg/tvgXmlParser.cpp
index d7c51bdc30..231badd27d 100644
--- a/thirdparty/thorvg/src/loaders/svg/tvgXmlParser.cpp
+++ b/thirdparty/thorvg/src/loaders/svg/tvgXmlParser.cpp
@@ -26,10 +26,10 @@
#ifdef _WIN32
#include <malloc.h>
-#elif defined(__FreeBSD__)
- #include <stdlib.h>
-#else
+#elif defined(__linux__)
#include <alloca.h>
+#else
+ #include <stdlib.h>
#endif
#include "tvgXmlParser.h"
@@ -390,7 +390,7 @@ bool simpleXmlParse(const char* buf, unsigned bufLength, bool strip, simpleXMLCb
if (p) {
//Invalid case: '<' nested
- if (*p == '<') return false;
+ if (*p == '<' && type != SimpleXMLType::Doctype) return false;
const char *start, *end;
start = itr + 1 + toff;
diff --git a/thirdparty/thorvg/src/loaders/tvg/tvgTvgBinInterpreter.cpp b/thirdparty/thorvg/src/loaders/tvg/tvgTvgBinInterpreter.cpp
index 66de860aaa..01a39b6e17 100644
--- a/thirdparty/thorvg/src/loaders/tvg/tvgTvgBinInterpreter.cpp
+++ b/thirdparty/thorvg/src/loaders/tvg/tvgTvgBinInterpreter.cpp
@@ -23,10 +23,10 @@
#ifdef _WIN32
#include <malloc.h>
-#elif defined(__FreeBSD__)
- #include <stdlib.h>
-#else
+#elif defined(__linux__)
#include <alloca.h>
+#else
+ #include <stdlib.h>
#endif
#include "tvgTvgCommon.h"
diff --git a/thirdparty/thorvg/src/savers/tvg/tvgTvgSaver.cpp b/thirdparty/thorvg/src/savers/tvg/tvgTvgSaver.cpp
index fca313b430..57a21dcce1 100644
--- a/thirdparty/thorvg/src/savers/tvg/tvgTvgSaver.cpp
+++ b/thirdparty/thorvg/src/savers/tvg/tvgTvgSaver.cpp
@@ -28,10 +28,10 @@
#ifdef _WIN32
#include <malloc.h>
-#elif defined(__FreeBSD__)
- #include <stdlib.h>
-#else
+#elif defined(__linux__)
#include <alloca.h>
+#else
+ #include <stdlib.h>
#endif
static FILE* _fopen(const char* filename, const char* mode)
diff --git a/thirdparty/thorvg/update-thorvg.sh b/thirdparty/thorvg/update-thorvg.sh
index a3b73a7135..8cccc947ce 100755
--- a/thirdparty/thorvg/update-thorvg.sh
+++ b/thirdparty/thorvg/update-thorvg.sh
@@ -1,12 +1,12 @@
-VERSION=0.8.1
+VERSION=0.8.3
rm -rf AUTHORS inc LICENSE src *.zip
-curl -L -O https://github.com/Samsung/thorvg/archive/$VERSION.zip
+curl -L -O https://github.com/Samsung/thorvg/archive/v$VERSION.zip
bsdtar --strip-components=1 -xvf *.zip
rm *.zip
-rm -rf .github docs pc res test tools .git* *.md *.txt wasm_build.sh
+rm -rf .github docs pc res test tools tvgcompat .git* *.md *.txt wasm_build.sh
find . -type f -name 'meson.build' -delete
rm -rf src/bin src/bindings src/examples src/wasm
-rm -rf src/lib/gl_engine tvgcompat
+rm -rf src/lib/gl_engine src/loaders/external_jpg src/loaders/png
cat << EOF > inc/config.h
#ifndef THORVG_CONFIG_H
#define THORVG_CONFIG_H
@@ -26,3 +26,6 @@ cat << EOF > inc/config.h
#define THORVG_VERSION_STRING "$VERSION"
#endif
EOF
+for source in $(find ./ -type f \( -iname \*.h -o -iname \*.cpp \)); do
+ sed -i -e '$a\' $source
+done
diff --git a/thirdparty/volk/LICENSE.md b/thirdparty/volk/LICENSE.md
index da3caa7cda..e5e847ad25 100644
--- a/thirdparty/volk/LICENSE.md
+++ b/thirdparty/volk/LICENSE.md
@@ -1,4 +1,4 @@
-Copyright (c) 2018-2019 Arseny Kapoulkine
+Copyright (c) 2018-2022 Arseny Kapoulkine
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/thirdparty/volk/volk.c b/thirdparty/volk/volk.c
index c3383ee41d..7e6d3b07e5 100644
--- a/thirdparty/volk/volk.c
+++ b/thirdparty/volk/volk.c
@@ -338,6 +338,9 @@ static void volkGenLoadInstance(void* context, PFN_vkVoidFunction (*load)(void*,
#if defined(VK_NV_external_memory_capabilities)
vkGetPhysicalDeviceExternalImageFormatPropertiesNV = (PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)load(context, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV");
#endif /* defined(VK_NV_external_memory_capabilities) */
+#if defined(VK_NV_optical_flow)
+ vkGetPhysicalDeviceOpticalFlowImageFormatsNV = (PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV)load(context, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV");
+#endif /* defined(VK_NV_optical_flow) */
#if defined(VK_QNX_screen_surface)
vkCreateScreenSurfaceQNX = (PFN_vkCreateScreenSurfaceQNX)load(context, "vkCreateScreenSurfaceQNX");
vkGetPhysicalDeviceScreenPresentationSupportQNX = (PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX)load(context, "vkGetPhysicalDeviceScreenPresentationSupportQNX");
@@ -581,6 +584,9 @@ static void volkGenLoadDevice(void* context, PFN_vkVoidFunction (*load)(void*, c
vkDebugMarkerSetObjectNameEXT = (PFN_vkDebugMarkerSetObjectNameEXT)load(context, "vkDebugMarkerSetObjectNameEXT");
vkDebugMarkerSetObjectTagEXT = (PFN_vkDebugMarkerSetObjectTagEXT)load(context, "vkDebugMarkerSetObjectTagEXT");
#endif /* defined(VK_EXT_debug_marker) */
+#if defined(VK_EXT_device_fault)
+ vkGetDeviceFaultInfoEXT = (PFN_vkGetDeviceFaultInfoEXT)load(context, "vkGetDeviceFaultInfoEXT");
+#endif /* defined(VK_EXT_device_fault) */
#if defined(VK_EXT_discard_rectangles)
vkCmdSetDiscardRectangleEXT = (PFN_vkCmdSetDiscardRectangleEXT)load(context, "vkCmdSetDiscardRectangleEXT");
#endif /* defined(VK_EXT_discard_rectangles) */
@@ -611,6 +617,39 @@ static void volkGenLoadDevice(void* context, PFN_vkVoidFunction (*load)(void*, c
vkCmdSetPrimitiveRestartEnableEXT = (PFN_vkCmdSetPrimitiveRestartEnableEXT)load(context, "vkCmdSetPrimitiveRestartEnableEXT");
vkCmdSetRasterizerDiscardEnableEXT = (PFN_vkCmdSetRasterizerDiscardEnableEXT)load(context, "vkCmdSetRasterizerDiscardEnableEXT");
#endif /* defined(VK_EXT_extended_dynamic_state2) */
+#if defined(VK_EXT_extended_dynamic_state3)
+ vkCmdSetAlphaToCoverageEnableEXT = (PFN_vkCmdSetAlphaToCoverageEnableEXT)load(context, "vkCmdSetAlphaToCoverageEnableEXT");
+ vkCmdSetAlphaToOneEnableEXT = (PFN_vkCmdSetAlphaToOneEnableEXT)load(context, "vkCmdSetAlphaToOneEnableEXT");
+ vkCmdSetColorBlendAdvancedEXT = (PFN_vkCmdSetColorBlendAdvancedEXT)load(context, "vkCmdSetColorBlendAdvancedEXT");
+ vkCmdSetColorBlendEnableEXT = (PFN_vkCmdSetColorBlendEnableEXT)load(context, "vkCmdSetColorBlendEnableEXT");
+ vkCmdSetColorBlendEquationEXT = (PFN_vkCmdSetColorBlendEquationEXT)load(context, "vkCmdSetColorBlendEquationEXT");
+ vkCmdSetColorWriteMaskEXT = (PFN_vkCmdSetColorWriteMaskEXT)load(context, "vkCmdSetColorWriteMaskEXT");
+ vkCmdSetConservativeRasterizationModeEXT = (PFN_vkCmdSetConservativeRasterizationModeEXT)load(context, "vkCmdSetConservativeRasterizationModeEXT");
+ vkCmdSetCoverageModulationModeNV = (PFN_vkCmdSetCoverageModulationModeNV)load(context, "vkCmdSetCoverageModulationModeNV");
+ vkCmdSetCoverageModulationTableEnableNV = (PFN_vkCmdSetCoverageModulationTableEnableNV)load(context, "vkCmdSetCoverageModulationTableEnableNV");
+ vkCmdSetCoverageModulationTableNV = (PFN_vkCmdSetCoverageModulationTableNV)load(context, "vkCmdSetCoverageModulationTableNV");
+ vkCmdSetCoverageReductionModeNV = (PFN_vkCmdSetCoverageReductionModeNV)load(context, "vkCmdSetCoverageReductionModeNV");
+ vkCmdSetCoverageToColorEnableNV = (PFN_vkCmdSetCoverageToColorEnableNV)load(context, "vkCmdSetCoverageToColorEnableNV");
+ vkCmdSetCoverageToColorLocationNV = (PFN_vkCmdSetCoverageToColorLocationNV)load(context, "vkCmdSetCoverageToColorLocationNV");
+ vkCmdSetDepthClampEnableEXT = (PFN_vkCmdSetDepthClampEnableEXT)load(context, "vkCmdSetDepthClampEnableEXT");
+ vkCmdSetDepthClipEnableEXT = (PFN_vkCmdSetDepthClipEnableEXT)load(context, "vkCmdSetDepthClipEnableEXT");
+ vkCmdSetDepthClipNegativeOneToOneEXT = (PFN_vkCmdSetDepthClipNegativeOneToOneEXT)load(context, "vkCmdSetDepthClipNegativeOneToOneEXT");
+ vkCmdSetExtraPrimitiveOverestimationSizeEXT = (PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT)load(context, "vkCmdSetExtraPrimitiveOverestimationSizeEXT");
+ vkCmdSetLineRasterizationModeEXT = (PFN_vkCmdSetLineRasterizationModeEXT)load(context, "vkCmdSetLineRasterizationModeEXT");
+ vkCmdSetLineStippleEnableEXT = (PFN_vkCmdSetLineStippleEnableEXT)load(context, "vkCmdSetLineStippleEnableEXT");
+ vkCmdSetLogicOpEnableEXT = (PFN_vkCmdSetLogicOpEnableEXT)load(context, "vkCmdSetLogicOpEnableEXT");
+ vkCmdSetPolygonModeEXT = (PFN_vkCmdSetPolygonModeEXT)load(context, "vkCmdSetPolygonModeEXT");
+ vkCmdSetProvokingVertexModeEXT = (PFN_vkCmdSetProvokingVertexModeEXT)load(context, "vkCmdSetProvokingVertexModeEXT");
+ vkCmdSetRasterizationSamplesEXT = (PFN_vkCmdSetRasterizationSamplesEXT)load(context, "vkCmdSetRasterizationSamplesEXT");
+ vkCmdSetRasterizationStreamEXT = (PFN_vkCmdSetRasterizationStreamEXT)load(context, "vkCmdSetRasterizationStreamEXT");
+ vkCmdSetRepresentativeFragmentTestEnableNV = (PFN_vkCmdSetRepresentativeFragmentTestEnableNV)load(context, "vkCmdSetRepresentativeFragmentTestEnableNV");
+ vkCmdSetSampleLocationsEnableEXT = (PFN_vkCmdSetSampleLocationsEnableEXT)load(context, "vkCmdSetSampleLocationsEnableEXT");
+ vkCmdSetSampleMaskEXT = (PFN_vkCmdSetSampleMaskEXT)load(context, "vkCmdSetSampleMaskEXT");
+ vkCmdSetShadingRateImageEnableNV = (PFN_vkCmdSetShadingRateImageEnableNV)load(context, "vkCmdSetShadingRateImageEnableNV");
+ vkCmdSetTessellationDomainOriginEXT = (PFN_vkCmdSetTessellationDomainOriginEXT)load(context, "vkCmdSetTessellationDomainOriginEXT");
+ vkCmdSetViewportSwizzleNV = (PFN_vkCmdSetViewportSwizzleNV)load(context, "vkCmdSetViewportSwizzleNV");
+ vkCmdSetViewportWScalingEnableNV = (PFN_vkCmdSetViewportWScalingEnableNV)load(context, "vkCmdSetViewportWScalingEnableNV");
+#endif /* defined(VK_EXT_extended_dynamic_state3) */
#if defined(VK_EXT_external_memory_host)
vkGetMemoryHostPointerPropertiesEXT = (PFN_vkGetMemoryHostPointerPropertiesEXT)load(context, "vkGetMemoryHostPointerPropertiesEXT");
#endif /* defined(VK_EXT_external_memory_host) */
@@ -624,19 +663,49 @@ static void volkGenLoadDevice(void* context, PFN_vkVoidFunction (*load)(void*, c
#if defined(VK_EXT_host_query_reset)
vkResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)load(context, "vkResetQueryPoolEXT");
#endif /* defined(VK_EXT_host_query_reset) */
+#if defined(VK_EXT_image_compression_control)
+ vkGetImageSubresourceLayout2EXT = (PFN_vkGetImageSubresourceLayout2EXT)load(context, "vkGetImageSubresourceLayout2EXT");
+#endif /* defined(VK_EXT_image_compression_control) */
#if defined(VK_EXT_image_drm_format_modifier)
vkGetImageDrmFormatModifierPropertiesEXT = (PFN_vkGetImageDrmFormatModifierPropertiesEXT)load(context, "vkGetImageDrmFormatModifierPropertiesEXT");
#endif /* defined(VK_EXT_image_drm_format_modifier) */
#if defined(VK_EXT_line_rasterization)
vkCmdSetLineStippleEXT = (PFN_vkCmdSetLineStippleEXT)load(context, "vkCmdSetLineStippleEXT");
#endif /* defined(VK_EXT_line_rasterization) */
+#if defined(VK_EXT_mesh_shader)
+ vkCmdDrawMeshTasksEXT = (PFN_vkCmdDrawMeshTasksEXT)load(context, "vkCmdDrawMeshTasksEXT");
+ vkCmdDrawMeshTasksIndirectCountEXT = (PFN_vkCmdDrawMeshTasksIndirectCountEXT)load(context, "vkCmdDrawMeshTasksIndirectCountEXT");
+ vkCmdDrawMeshTasksIndirectEXT = (PFN_vkCmdDrawMeshTasksIndirectEXT)load(context, "vkCmdDrawMeshTasksIndirectEXT");
+#endif /* defined(VK_EXT_mesh_shader) */
+#if defined(VK_EXT_metal_objects)
+ vkExportMetalObjectsEXT = (PFN_vkExportMetalObjectsEXT)load(context, "vkExportMetalObjectsEXT");
+#endif /* defined(VK_EXT_metal_objects) */
#if defined(VK_EXT_multi_draw)
vkCmdDrawMultiEXT = (PFN_vkCmdDrawMultiEXT)load(context, "vkCmdDrawMultiEXT");
vkCmdDrawMultiIndexedEXT = (PFN_vkCmdDrawMultiIndexedEXT)load(context, "vkCmdDrawMultiIndexedEXT");
#endif /* defined(VK_EXT_multi_draw) */
+#if defined(VK_EXT_opacity_micromap)
+ vkBuildMicromapsEXT = (PFN_vkBuildMicromapsEXT)load(context, "vkBuildMicromapsEXT");
+ vkCmdBuildMicromapsEXT = (PFN_vkCmdBuildMicromapsEXT)load(context, "vkCmdBuildMicromapsEXT");
+ vkCmdCopyMemoryToMicromapEXT = (PFN_vkCmdCopyMemoryToMicromapEXT)load(context, "vkCmdCopyMemoryToMicromapEXT");
+ vkCmdCopyMicromapEXT = (PFN_vkCmdCopyMicromapEXT)load(context, "vkCmdCopyMicromapEXT");
+ vkCmdCopyMicromapToMemoryEXT = (PFN_vkCmdCopyMicromapToMemoryEXT)load(context, "vkCmdCopyMicromapToMemoryEXT");
+ vkCmdWriteMicromapsPropertiesEXT = (PFN_vkCmdWriteMicromapsPropertiesEXT)load(context, "vkCmdWriteMicromapsPropertiesEXT");
+ vkCopyMemoryToMicromapEXT = (PFN_vkCopyMemoryToMicromapEXT)load(context, "vkCopyMemoryToMicromapEXT");
+ vkCopyMicromapEXT = (PFN_vkCopyMicromapEXT)load(context, "vkCopyMicromapEXT");
+ vkCopyMicromapToMemoryEXT = (PFN_vkCopyMicromapToMemoryEXT)load(context, "vkCopyMicromapToMemoryEXT");
+ vkCreateMicromapEXT = (PFN_vkCreateMicromapEXT)load(context, "vkCreateMicromapEXT");
+ vkDestroyMicromapEXT = (PFN_vkDestroyMicromapEXT)load(context, "vkDestroyMicromapEXT");
+ vkGetDeviceMicromapCompatibilityEXT = (PFN_vkGetDeviceMicromapCompatibilityEXT)load(context, "vkGetDeviceMicromapCompatibilityEXT");
+ vkGetMicromapBuildSizesEXT = (PFN_vkGetMicromapBuildSizesEXT)load(context, "vkGetMicromapBuildSizesEXT");
+ vkWriteMicromapsPropertiesEXT = (PFN_vkWriteMicromapsPropertiesEXT)load(context, "vkWriteMicromapsPropertiesEXT");
+#endif /* defined(VK_EXT_opacity_micromap) */
#if defined(VK_EXT_pageable_device_local_memory)
vkSetDeviceMemoryPriorityEXT = (PFN_vkSetDeviceMemoryPriorityEXT)load(context, "vkSetDeviceMemoryPriorityEXT");
#endif /* defined(VK_EXT_pageable_device_local_memory) */
+#if defined(VK_EXT_pipeline_properties)
+ vkGetPipelinePropertiesEXT = (PFN_vkGetPipelinePropertiesEXT)load(context, "vkGetPipelinePropertiesEXT");
+#endif /* defined(VK_EXT_pipeline_properties) */
#if defined(VK_EXT_private_data)
vkCreatePrivateDataSlotEXT = (PFN_vkCreatePrivateDataSlotEXT)load(context, "vkCreatePrivateDataSlotEXT");
vkDestroyPrivateDataSlotEXT = (PFN_vkDestroyPrivateDataSlotEXT)load(context, "vkDestroyPrivateDataSlotEXT");
@@ -646,6 +715,10 @@ static void volkGenLoadDevice(void* context, PFN_vkVoidFunction (*load)(void*, c
#if defined(VK_EXT_sample_locations)
vkCmdSetSampleLocationsEXT = (PFN_vkCmdSetSampleLocationsEXT)load(context, "vkCmdSetSampleLocationsEXT");
#endif /* defined(VK_EXT_sample_locations) */
+#if defined(VK_EXT_shader_module_identifier)
+ vkGetShaderModuleCreateInfoIdentifierEXT = (PFN_vkGetShaderModuleCreateInfoIdentifierEXT)load(context, "vkGetShaderModuleCreateInfoIdentifierEXT");
+ vkGetShaderModuleIdentifierEXT = (PFN_vkGetShaderModuleIdentifierEXT)load(context, "vkGetShaderModuleIdentifierEXT");
+#endif /* defined(VK_EXT_shader_module_identifier) */
#if defined(VK_EXT_transform_feedback)
vkCmdBeginQueryIndexedEXT = (PFN_vkCmdBeginQueryIndexedEXT)load(context, "vkCmdBeginQueryIndexedEXT");
vkCmdBeginTransformFeedbackEXT = (PFN_vkCmdBeginTransformFeedbackEXT)load(context, "vkCmdBeginTransformFeedbackEXT");
@@ -827,6 +900,9 @@ static void volkGenLoadDevice(void* context, PFN_vkVoidFunction (*load)(void*, c
#if defined(VK_KHR_push_descriptor)
vkCmdPushDescriptorSetKHR = (PFN_vkCmdPushDescriptorSetKHR)load(context, "vkCmdPushDescriptorSetKHR");
#endif /* defined(VK_KHR_push_descriptor) */
+#if defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline)
+ vkCmdTraceRaysIndirect2KHR = (PFN_vkCmdTraceRaysIndirect2KHR)load(context, "vkCmdTraceRaysIndirect2KHR");
+#endif /* defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) */
#if defined(VK_KHR_ray_tracing_pipeline)
vkCmdSetRayTracingPipelineStackSizeKHR = (PFN_vkCmdSetRayTracingPipelineStackSizeKHR)load(context, "vkCmdSetRayTracingPipelineStackSizeKHR");
vkCmdTraceRaysIndirectKHR = (PFN_vkCmdTraceRaysIndirectKHR)load(context, "vkCmdTraceRaysIndirectKHR");
@@ -927,6 +1003,12 @@ static void volkGenLoadDevice(void* context, PFN_vkVoidFunction (*load)(void*, c
vkCmdDrawMeshTasksIndirectNV = (PFN_vkCmdDrawMeshTasksIndirectNV)load(context, "vkCmdDrawMeshTasksIndirectNV");
vkCmdDrawMeshTasksNV = (PFN_vkCmdDrawMeshTasksNV)load(context, "vkCmdDrawMeshTasksNV");
#endif /* defined(VK_NV_mesh_shader) */
+#if defined(VK_NV_optical_flow)
+ vkBindOpticalFlowSessionImageNV = (PFN_vkBindOpticalFlowSessionImageNV)load(context, "vkBindOpticalFlowSessionImageNV");
+ vkCmdOpticalFlowExecuteNV = (PFN_vkCmdOpticalFlowExecuteNV)load(context, "vkCmdOpticalFlowExecuteNV");
+ vkCreateOpticalFlowSessionNV = (PFN_vkCreateOpticalFlowSessionNV)load(context, "vkCreateOpticalFlowSessionNV");
+ vkDestroyOpticalFlowSessionNV = (PFN_vkDestroyOpticalFlowSessionNV)load(context, "vkDestroyOpticalFlowSessionNV");
+#endif /* defined(VK_NV_optical_flow) */
#if defined(VK_NV_ray_tracing)
vkBindAccelerationStructureMemoryNV = (PFN_vkBindAccelerationStructureMemoryNV)load(context, "vkBindAccelerationStructureMemoryNV");
vkCmdBuildAccelerationStructureNV = (PFN_vkCmdBuildAccelerationStructureNV)load(context, "vkCmdBuildAccelerationStructureNV");
@@ -949,6 +1031,14 @@ static void volkGenLoadDevice(void* context, PFN_vkVoidFunction (*load)(void*, c
vkCmdSetCoarseSampleOrderNV = (PFN_vkCmdSetCoarseSampleOrderNV)load(context, "vkCmdSetCoarseSampleOrderNV");
vkCmdSetViewportShadingRatePaletteNV = (PFN_vkCmdSetViewportShadingRatePaletteNV)load(context, "vkCmdSetViewportShadingRatePaletteNV");
#endif /* defined(VK_NV_shading_rate_image) */
+#if defined(VK_QCOM_tile_properties)
+ vkGetDynamicRenderingTilePropertiesQCOM = (PFN_vkGetDynamicRenderingTilePropertiesQCOM)load(context, "vkGetDynamicRenderingTilePropertiesQCOM");
+ vkGetFramebufferTilePropertiesQCOM = (PFN_vkGetFramebufferTilePropertiesQCOM)load(context, "vkGetFramebufferTilePropertiesQCOM");
+#endif /* defined(VK_QCOM_tile_properties) */
+#if defined(VK_VALVE_descriptor_set_host_mapping)
+ vkGetDescriptorSetHostMappingVALVE = (PFN_vkGetDescriptorSetHostMappingVALVE)load(context, "vkGetDescriptorSetHostMappingVALVE");
+ vkGetDescriptorSetLayoutHostMappingInfoVALVE = (PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE)load(context, "vkGetDescriptorSetLayoutHostMappingInfoVALVE");
+#endif /* defined(VK_VALVE_descriptor_set_host_mapping) */
#if (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1))
vkGetDeviceGroupSurfacePresentModes2EXT = (PFN_vkGetDeviceGroupSurfacePresentModes2EXT)load(context, "vkGetDeviceGroupSurfacePresentModes2EXT");
#endif /* (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) */
@@ -1198,6 +1288,9 @@ static void volkGenLoadDeviceTable(struct VolkDeviceTable* table, void* context,
table->vkDebugMarkerSetObjectNameEXT = (PFN_vkDebugMarkerSetObjectNameEXT)load(context, "vkDebugMarkerSetObjectNameEXT");
table->vkDebugMarkerSetObjectTagEXT = (PFN_vkDebugMarkerSetObjectTagEXT)load(context, "vkDebugMarkerSetObjectTagEXT");
#endif /* defined(VK_EXT_debug_marker) */
+#if defined(VK_EXT_device_fault)
+ table->vkGetDeviceFaultInfoEXT = (PFN_vkGetDeviceFaultInfoEXT)load(context, "vkGetDeviceFaultInfoEXT");
+#endif /* defined(VK_EXT_device_fault) */
#if defined(VK_EXT_discard_rectangles)
table->vkCmdSetDiscardRectangleEXT = (PFN_vkCmdSetDiscardRectangleEXT)load(context, "vkCmdSetDiscardRectangleEXT");
#endif /* defined(VK_EXT_discard_rectangles) */
@@ -1228,6 +1321,39 @@ static void volkGenLoadDeviceTable(struct VolkDeviceTable* table, void* context,
table->vkCmdSetPrimitiveRestartEnableEXT = (PFN_vkCmdSetPrimitiveRestartEnableEXT)load(context, "vkCmdSetPrimitiveRestartEnableEXT");
table->vkCmdSetRasterizerDiscardEnableEXT = (PFN_vkCmdSetRasterizerDiscardEnableEXT)load(context, "vkCmdSetRasterizerDiscardEnableEXT");
#endif /* defined(VK_EXT_extended_dynamic_state2) */
+#if defined(VK_EXT_extended_dynamic_state3)
+ table->vkCmdSetAlphaToCoverageEnableEXT = (PFN_vkCmdSetAlphaToCoverageEnableEXT)load(context, "vkCmdSetAlphaToCoverageEnableEXT");
+ table->vkCmdSetAlphaToOneEnableEXT = (PFN_vkCmdSetAlphaToOneEnableEXT)load(context, "vkCmdSetAlphaToOneEnableEXT");
+ table->vkCmdSetColorBlendAdvancedEXT = (PFN_vkCmdSetColorBlendAdvancedEXT)load(context, "vkCmdSetColorBlendAdvancedEXT");
+ table->vkCmdSetColorBlendEnableEXT = (PFN_vkCmdSetColorBlendEnableEXT)load(context, "vkCmdSetColorBlendEnableEXT");
+ table->vkCmdSetColorBlendEquationEXT = (PFN_vkCmdSetColorBlendEquationEXT)load(context, "vkCmdSetColorBlendEquationEXT");
+ table->vkCmdSetColorWriteMaskEXT = (PFN_vkCmdSetColorWriteMaskEXT)load(context, "vkCmdSetColorWriteMaskEXT");
+ table->vkCmdSetConservativeRasterizationModeEXT = (PFN_vkCmdSetConservativeRasterizationModeEXT)load(context, "vkCmdSetConservativeRasterizationModeEXT");
+ table->vkCmdSetCoverageModulationModeNV = (PFN_vkCmdSetCoverageModulationModeNV)load(context, "vkCmdSetCoverageModulationModeNV");
+ table->vkCmdSetCoverageModulationTableEnableNV = (PFN_vkCmdSetCoverageModulationTableEnableNV)load(context, "vkCmdSetCoverageModulationTableEnableNV");
+ table->vkCmdSetCoverageModulationTableNV = (PFN_vkCmdSetCoverageModulationTableNV)load(context, "vkCmdSetCoverageModulationTableNV");
+ table->vkCmdSetCoverageReductionModeNV = (PFN_vkCmdSetCoverageReductionModeNV)load(context, "vkCmdSetCoverageReductionModeNV");
+ table->vkCmdSetCoverageToColorEnableNV = (PFN_vkCmdSetCoverageToColorEnableNV)load(context, "vkCmdSetCoverageToColorEnableNV");
+ table->vkCmdSetCoverageToColorLocationNV = (PFN_vkCmdSetCoverageToColorLocationNV)load(context, "vkCmdSetCoverageToColorLocationNV");
+ table->vkCmdSetDepthClampEnableEXT = (PFN_vkCmdSetDepthClampEnableEXT)load(context, "vkCmdSetDepthClampEnableEXT");
+ table->vkCmdSetDepthClipEnableEXT = (PFN_vkCmdSetDepthClipEnableEXT)load(context, "vkCmdSetDepthClipEnableEXT");
+ table->vkCmdSetDepthClipNegativeOneToOneEXT = (PFN_vkCmdSetDepthClipNegativeOneToOneEXT)load(context, "vkCmdSetDepthClipNegativeOneToOneEXT");
+ table->vkCmdSetExtraPrimitiveOverestimationSizeEXT = (PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT)load(context, "vkCmdSetExtraPrimitiveOverestimationSizeEXT");
+ table->vkCmdSetLineRasterizationModeEXT = (PFN_vkCmdSetLineRasterizationModeEXT)load(context, "vkCmdSetLineRasterizationModeEXT");
+ table->vkCmdSetLineStippleEnableEXT = (PFN_vkCmdSetLineStippleEnableEXT)load(context, "vkCmdSetLineStippleEnableEXT");
+ table->vkCmdSetLogicOpEnableEXT = (PFN_vkCmdSetLogicOpEnableEXT)load(context, "vkCmdSetLogicOpEnableEXT");
+ table->vkCmdSetPolygonModeEXT = (PFN_vkCmdSetPolygonModeEXT)load(context, "vkCmdSetPolygonModeEXT");
+ table->vkCmdSetProvokingVertexModeEXT = (PFN_vkCmdSetProvokingVertexModeEXT)load(context, "vkCmdSetProvokingVertexModeEXT");
+ table->vkCmdSetRasterizationSamplesEXT = (PFN_vkCmdSetRasterizationSamplesEXT)load(context, "vkCmdSetRasterizationSamplesEXT");
+ table->vkCmdSetRasterizationStreamEXT = (PFN_vkCmdSetRasterizationStreamEXT)load(context, "vkCmdSetRasterizationStreamEXT");
+ table->vkCmdSetRepresentativeFragmentTestEnableNV = (PFN_vkCmdSetRepresentativeFragmentTestEnableNV)load(context, "vkCmdSetRepresentativeFragmentTestEnableNV");
+ table->vkCmdSetSampleLocationsEnableEXT = (PFN_vkCmdSetSampleLocationsEnableEXT)load(context, "vkCmdSetSampleLocationsEnableEXT");
+ table->vkCmdSetSampleMaskEXT = (PFN_vkCmdSetSampleMaskEXT)load(context, "vkCmdSetSampleMaskEXT");
+ table->vkCmdSetShadingRateImageEnableNV = (PFN_vkCmdSetShadingRateImageEnableNV)load(context, "vkCmdSetShadingRateImageEnableNV");
+ table->vkCmdSetTessellationDomainOriginEXT = (PFN_vkCmdSetTessellationDomainOriginEXT)load(context, "vkCmdSetTessellationDomainOriginEXT");
+ table->vkCmdSetViewportSwizzleNV = (PFN_vkCmdSetViewportSwizzleNV)load(context, "vkCmdSetViewportSwizzleNV");
+ table->vkCmdSetViewportWScalingEnableNV = (PFN_vkCmdSetViewportWScalingEnableNV)load(context, "vkCmdSetViewportWScalingEnableNV");
+#endif /* defined(VK_EXT_extended_dynamic_state3) */
#if defined(VK_EXT_external_memory_host)
table->vkGetMemoryHostPointerPropertiesEXT = (PFN_vkGetMemoryHostPointerPropertiesEXT)load(context, "vkGetMemoryHostPointerPropertiesEXT");
#endif /* defined(VK_EXT_external_memory_host) */
@@ -1241,19 +1367,49 @@ static void volkGenLoadDeviceTable(struct VolkDeviceTable* table, void* context,
#if defined(VK_EXT_host_query_reset)
table->vkResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)load(context, "vkResetQueryPoolEXT");
#endif /* defined(VK_EXT_host_query_reset) */
+#if defined(VK_EXT_image_compression_control)
+ table->vkGetImageSubresourceLayout2EXT = (PFN_vkGetImageSubresourceLayout2EXT)load(context, "vkGetImageSubresourceLayout2EXT");
+#endif /* defined(VK_EXT_image_compression_control) */
#if defined(VK_EXT_image_drm_format_modifier)
table->vkGetImageDrmFormatModifierPropertiesEXT = (PFN_vkGetImageDrmFormatModifierPropertiesEXT)load(context, "vkGetImageDrmFormatModifierPropertiesEXT");
#endif /* defined(VK_EXT_image_drm_format_modifier) */
#if defined(VK_EXT_line_rasterization)
table->vkCmdSetLineStippleEXT = (PFN_vkCmdSetLineStippleEXT)load(context, "vkCmdSetLineStippleEXT");
#endif /* defined(VK_EXT_line_rasterization) */
+#if defined(VK_EXT_mesh_shader)
+ table->vkCmdDrawMeshTasksEXT = (PFN_vkCmdDrawMeshTasksEXT)load(context, "vkCmdDrawMeshTasksEXT");
+ table->vkCmdDrawMeshTasksIndirectCountEXT = (PFN_vkCmdDrawMeshTasksIndirectCountEXT)load(context, "vkCmdDrawMeshTasksIndirectCountEXT");
+ table->vkCmdDrawMeshTasksIndirectEXT = (PFN_vkCmdDrawMeshTasksIndirectEXT)load(context, "vkCmdDrawMeshTasksIndirectEXT");
+#endif /* defined(VK_EXT_mesh_shader) */
+#if defined(VK_EXT_metal_objects)
+ table->vkExportMetalObjectsEXT = (PFN_vkExportMetalObjectsEXT)load(context, "vkExportMetalObjectsEXT");
+#endif /* defined(VK_EXT_metal_objects) */
#if defined(VK_EXT_multi_draw)
table->vkCmdDrawMultiEXT = (PFN_vkCmdDrawMultiEXT)load(context, "vkCmdDrawMultiEXT");
table->vkCmdDrawMultiIndexedEXT = (PFN_vkCmdDrawMultiIndexedEXT)load(context, "vkCmdDrawMultiIndexedEXT");
#endif /* defined(VK_EXT_multi_draw) */
+#if defined(VK_EXT_opacity_micromap)
+ table->vkBuildMicromapsEXT = (PFN_vkBuildMicromapsEXT)load(context, "vkBuildMicromapsEXT");
+ table->vkCmdBuildMicromapsEXT = (PFN_vkCmdBuildMicromapsEXT)load(context, "vkCmdBuildMicromapsEXT");
+ table->vkCmdCopyMemoryToMicromapEXT = (PFN_vkCmdCopyMemoryToMicromapEXT)load(context, "vkCmdCopyMemoryToMicromapEXT");
+ table->vkCmdCopyMicromapEXT = (PFN_vkCmdCopyMicromapEXT)load(context, "vkCmdCopyMicromapEXT");
+ table->vkCmdCopyMicromapToMemoryEXT = (PFN_vkCmdCopyMicromapToMemoryEXT)load(context, "vkCmdCopyMicromapToMemoryEXT");
+ table->vkCmdWriteMicromapsPropertiesEXT = (PFN_vkCmdWriteMicromapsPropertiesEXT)load(context, "vkCmdWriteMicromapsPropertiesEXT");
+ table->vkCopyMemoryToMicromapEXT = (PFN_vkCopyMemoryToMicromapEXT)load(context, "vkCopyMemoryToMicromapEXT");
+ table->vkCopyMicromapEXT = (PFN_vkCopyMicromapEXT)load(context, "vkCopyMicromapEXT");
+ table->vkCopyMicromapToMemoryEXT = (PFN_vkCopyMicromapToMemoryEXT)load(context, "vkCopyMicromapToMemoryEXT");
+ table->vkCreateMicromapEXT = (PFN_vkCreateMicromapEXT)load(context, "vkCreateMicromapEXT");
+ table->vkDestroyMicromapEXT = (PFN_vkDestroyMicromapEXT)load(context, "vkDestroyMicromapEXT");
+ table->vkGetDeviceMicromapCompatibilityEXT = (PFN_vkGetDeviceMicromapCompatibilityEXT)load(context, "vkGetDeviceMicromapCompatibilityEXT");
+ table->vkGetMicromapBuildSizesEXT = (PFN_vkGetMicromapBuildSizesEXT)load(context, "vkGetMicromapBuildSizesEXT");
+ table->vkWriteMicromapsPropertiesEXT = (PFN_vkWriteMicromapsPropertiesEXT)load(context, "vkWriteMicromapsPropertiesEXT");
+#endif /* defined(VK_EXT_opacity_micromap) */
#if defined(VK_EXT_pageable_device_local_memory)
table->vkSetDeviceMemoryPriorityEXT = (PFN_vkSetDeviceMemoryPriorityEXT)load(context, "vkSetDeviceMemoryPriorityEXT");
#endif /* defined(VK_EXT_pageable_device_local_memory) */
+#if defined(VK_EXT_pipeline_properties)
+ table->vkGetPipelinePropertiesEXT = (PFN_vkGetPipelinePropertiesEXT)load(context, "vkGetPipelinePropertiesEXT");
+#endif /* defined(VK_EXT_pipeline_properties) */
#if defined(VK_EXT_private_data)
table->vkCreatePrivateDataSlotEXT = (PFN_vkCreatePrivateDataSlotEXT)load(context, "vkCreatePrivateDataSlotEXT");
table->vkDestroyPrivateDataSlotEXT = (PFN_vkDestroyPrivateDataSlotEXT)load(context, "vkDestroyPrivateDataSlotEXT");
@@ -1263,6 +1419,10 @@ static void volkGenLoadDeviceTable(struct VolkDeviceTable* table, void* context,
#if defined(VK_EXT_sample_locations)
table->vkCmdSetSampleLocationsEXT = (PFN_vkCmdSetSampleLocationsEXT)load(context, "vkCmdSetSampleLocationsEXT");
#endif /* defined(VK_EXT_sample_locations) */
+#if defined(VK_EXT_shader_module_identifier)
+ table->vkGetShaderModuleCreateInfoIdentifierEXT = (PFN_vkGetShaderModuleCreateInfoIdentifierEXT)load(context, "vkGetShaderModuleCreateInfoIdentifierEXT");
+ table->vkGetShaderModuleIdentifierEXT = (PFN_vkGetShaderModuleIdentifierEXT)load(context, "vkGetShaderModuleIdentifierEXT");
+#endif /* defined(VK_EXT_shader_module_identifier) */
#if defined(VK_EXT_transform_feedback)
table->vkCmdBeginQueryIndexedEXT = (PFN_vkCmdBeginQueryIndexedEXT)load(context, "vkCmdBeginQueryIndexedEXT");
table->vkCmdBeginTransformFeedbackEXT = (PFN_vkCmdBeginTransformFeedbackEXT)load(context, "vkCmdBeginTransformFeedbackEXT");
@@ -1444,6 +1604,9 @@ static void volkGenLoadDeviceTable(struct VolkDeviceTable* table, void* context,
#if defined(VK_KHR_push_descriptor)
table->vkCmdPushDescriptorSetKHR = (PFN_vkCmdPushDescriptorSetKHR)load(context, "vkCmdPushDescriptorSetKHR");
#endif /* defined(VK_KHR_push_descriptor) */
+#if defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline)
+ table->vkCmdTraceRaysIndirect2KHR = (PFN_vkCmdTraceRaysIndirect2KHR)load(context, "vkCmdTraceRaysIndirect2KHR");
+#endif /* defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) */
#if defined(VK_KHR_ray_tracing_pipeline)
table->vkCmdSetRayTracingPipelineStackSizeKHR = (PFN_vkCmdSetRayTracingPipelineStackSizeKHR)load(context, "vkCmdSetRayTracingPipelineStackSizeKHR");
table->vkCmdTraceRaysIndirectKHR = (PFN_vkCmdTraceRaysIndirectKHR)load(context, "vkCmdTraceRaysIndirectKHR");
@@ -1544,6 +1707,12 @@ static void volkGenLoadDeviceTable(struct VolkDeviceTable* table, void* context,
table->vkCmdDrawMeshTasksIndirectNV = (PFN_vkCmdDrawMeshTasksIndirectNV)load(context, "vkCmdDrawMeshTasksIndirectNV");
table->vkCmdDrawMeshTasksNV = (PFN_vkCmdDrawMeshTasksNV)load(context, "vkCmdDrawMeshTasksNV");
#endif /* defined(VK_NV_mesh_shader) */
+#if defined(VK_NV_optical_flow)
+ table->vkBindOpticalFlowSessionImageNV = (PFN_vkBindOpticalFlowSessionImageNV)load(context, "vkBindOpticalFlowSessionImageNV");
+ table->vkCmdOpticalFlowExecuteNV = (PFN_vkCmdOpticalFlowExecuteNV)load(context, "vkCmdOpticalFlowExecuteNV");
+ table->vkCreateOpticalFlowSessionNV = (PFN_vkCreateOpticalFlowSessionNV)load(context, "vkCreateOpticalFlowSessionNV");
+ table->vkDestroyOpticalFlowSessionNV = (PFN_vkDestroyOpticalFlowSessionNV)load(context, "vkDestroyOpticalFlowSessionNV");
+#endif /* defined(VK_NV_optical_flow) */
#if defined(VK_NV_ray_tracing)
table->vkBindAccelerationStructureMemoryNV = (PFN_vkBindAccelerationStructureMemoryNV)load(context, "vkBindAccelerationStructureMemoryNV");
table->vkCmdBuildAccelerationStructureNV = (PFN_vkCmdBuildAccelerationStructureNV)load(context, "vkCmdBuildAccelerationStructureNV");
@@ -1566,6 +1735,14 @@ static void volkGenLoadDeviceTable(struct VolkDeviceTable* table, void* context,
table->vkCmdSetCoarseSampleOrderNV = (PFN_vkCmdSetCoarseSampleOrderNV)load(context, "vkCmdSetCoarseSampleOrderNV");
table->vkCmdSetViewportShadingRatePaletteNV = (PFN_vkCmdSetViewportShadingRatePaletteNV)load(context, "vkCmdSetViewportShadingRatePaletteNV");
#endif /* defined(VK_NV_shading_rate_image) */
+#if defined(VK_QCOM_tile_properties)
+ table->vkGetDynamicRenderingTilePropertiesQCOM = (PFN_vkGetDynamicRenderingTilePropertiesQCOM)load(context, "vkGetDynamicRenderingTilePropertiesQCOM");
+ table->vkGetFramebufferTilePropertiesQCOM = (PFN_vkGetFramebufferTilePropertiesQCOM)load(context, "vkGetFramebufferTilePropertiesQCOM");
+#endif /* defined(VK_QCOM_tile_properties) */
+#if defined(VK_VALVE_descriptor_set_host_mapping)
+ table->vkGetDescriptorSetHostMappingVALVE = (PFN_vkGetDescriptorSetHostMappingVALVE)load(context, "vkGetDescriptorSetHostMappingVALVE");
+ table->vkGetDescriptorSetLayoutHostMappingInfoVALVE = (PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE)load(context, "vkGetDescriptorSetLayoutHostMappingInfoVALVE");
+#endif /* defined(VK_VALVE_descriptor_set_host_mapping) */
#if (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1))
table->vkGetDeviceGroupSurfacePresentModes2EXT = (PFN_vkGetDeviceGroupSurfacePresentModes2EXT)load(context, "vkGetDeviceGroupSurfacePresentModes2EXT");
#endif /* (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) */
@@ -1878,6 +2055,9 @@ PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT;
PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT;
PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT;
#endif /* defined(VK_EXT_debug_utils) */
+#if defined(VK_EXT_device_fault)
+PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT;
+#endif /* defined(VK_EXT_device_fault) */
#if defined(VK_EXT_direct_mode_display)
PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT;
#endif /* defined(VK_EXT_direct_mode_display) */
@@ -1918,6 +2098,39 @@ PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT;
PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT;
PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT;
#endif /* defined(VK_EXT_extended_dynamic_state2) */
+#if defined(VK_EXT_extended_dynamic_state3)
+PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT;
+PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT;
+PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT;
+PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT;
+PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT;
+PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT;
+PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT;
+PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV;
+PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV;
+PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV;
+PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV;
+PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV;
+PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV;
+PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT;
+PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT;
+PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT;
+PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT;
+PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT;
+PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT;
+PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT;
+PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT;
+PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT;
+PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT;
+PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT;
+PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV;
+PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT;
+PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT;
+PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV;
+PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT;
+PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV;
+PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV;
+#endif /* defined(VK_EXT_extended_dynamic_state3) */
#if defined(VK_EXT_external_memory_host)
PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT;
#endif /* defined(VK_EXT_external_memory_host) */
@@ -1935,12 +2148,23 @@ PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT;
#if defined(VK_EXT_host_query_reset)
PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT;
#endif /* defined(VK_EXT_host_query_reset) */
+#if defined(VK_EXT_image_compression_control)
+PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT;
+#endif /* defined(VK_EXT_image_compression_control) */
#if defined(VK_EXT_image_drm_format_modifier)
PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT;
#endif /* defined(VK_EXT_image_drm_format_modifier) */
#if defined(VK_EXT_line_rasterization)
PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT;
#endif /* defined(VK_EXT_line_rasterization) */
+#if defined(VK_EXT_mesh_shader)
+PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT;
+PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT;
+PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT;
+#endif /* defined(VK_EXT_mesh_shader) */
+#if defined(VK_EXT_metal_objects)
+PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT;
+#endif /* defined(VK_EXT_metal_objects) */
#if defined(VK_EXT_metal_surface)
PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT;
#endif /* defined(VK_EXT_metal_surface) */
@@ -1948,9 +2172,28 @@ PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT;
PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT;
PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT;
#endif /* defined(VK_EXT_multi_draw) */
+#if defined(VK_EXT_opacity_micromap)
+PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT;
+PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT;
+PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT;
+PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT;
+PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT;
+PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT;
+PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT;
+PFN_vkCopyMicromapEXT vkCopyMicromapEXT;
+PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT;
+PFN_vkCreateMicromapEXT vkCreateMicromapEXT;
+PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT;
+PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT;
+PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT;
+PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT;
+#endif /* defined(VK_EXT_opacity_micromap) */
#if defined(VK_EXT_pageable_device_local_memory)
PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT;
#endif /* defined(VK_EXT_pageable_device_local_memory) */
+#if defined(VK_EXT_pipeline_properties)
+PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT;
+#endif /* defined(VK_EXT_pipeline_properties) */
#if defined(VK_EXT_private_data)
PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT;
PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT;
@@ -1961,6 +2204,10 @@ PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT;
PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT;
PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT;
#endif /* defined(VK_EXT_sample_locations) */
+#if defined(VK_EXT_shader_module_identifier)
+PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT;
+PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT;
+#endif /* defined(VK_EXT_shader_module_identifier) */
#if defined(VK_EXT_tooling_info)
PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT;
#endif /* defined(VK_EXT_tooling_info) */
@@ -2197,6 +2444,9 @@ PFN_vkWaitForPresentKHR vkWaitForPresentKHR;
#if defined(VK_KHR_push_descriptor)
PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR;
#endif /* defined(VK_KHR_push_descriptor) */
+#if defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline)
+PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR;
+#endif /* defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) */
#if defined(VK_KHR_ray_tracing_pipeline)
PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR;
PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR;
@@ -2344,6 +2594,13 @@ PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV;
PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV;
PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV;
#endif /* defined(VK_NV_mesh_shader) */
+#if defined(VK_NV_optical_flow)
+PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV;
+PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV;
+PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV;
+PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV;
+PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV;
+#endif /* defined(VK_NV_optical_flow) */
#if defined(VK_NV_ray_tracing)
PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV;
PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV;
@@ -2366,10 +2623,18 @@ PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV;
PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV;
PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV;
#endif /* defined(VK_NV_shading_rate_image) */
+#if defined(VK_QCOM_tile_properties)
+PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM;
+PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM;
+#endif /* defined(VK_QCOM_tile_properties) */
#if defined(VK_QNX_screen_surface)
PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX;
PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX;
#endif /* defined(VK_QNX_screen_surface) */
+#if defined(VK_VALVE_descriptor_set_host_mapping)
+PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE;
+PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE;
+#endif /* defined(VK_VALVE_descriptor_set_host_mapping) */
#if (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1))
PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT;
#endif /* (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) */
diff --git a/thirdparty/volk/volk.h b/thirdparty/volk/volk.h
index cdeedfc5ff..daf5550e51 100644
--- a/thirdparty/volk/volk.h
+++ b/thirdparty/volk/volk.h
@@ -1,7 +1,7 @@
/**
* volk
*
- * Copyright (C) 2018-2019, by Arseny Kapoulkine (arseny.kapoulkine@gmail.com)
+ * Copyright (C) 2018-2022, by Arseny Kapoulkine (arseny.kapoulkine@gmail.com)
* Report bugs and download new versions at https://github.com/zeux/volk
*
* This library is distributed under the MIT License. See notice at the end of this file.
@@ -15,7 +15,7 @@
#endif
/* VOLK_GENERATE_VERSION_DEFINE */
-#define VOLK_HEADER_VERSION 204
+#define VOLK_HEADER_VERSION 231
/* VOLK_GENERATE_VERSION_DEFINE */
#ifndef VK_NO_PROTOTYPES
@@ -360,6 +360,9 @@ struct VolkDeviceTable
PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT;
PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT;
#endif /* defined(VK_EXT_debug_marker) */
+#if defined(VK_EXT_device_fault)
+ PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT;
+#endif /* defined(VK_EXT_device_fault) */
#if defined(VK_EXT_discard_rectangles)
PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT;
#endif /* defined(VK_EXT_discard_rectangles) */
@@ -390,6 +393,39 @@ struct VolkDeviceTable
PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT;
PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT;
#endif /* defined(VK_EXT_extended_dynamic_state2) */
+#if defined(VK_EXT_extended_dynamic_state3)
+ PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT;
+ PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT;
+ PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT;
+ PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT;
+ PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT;
+ PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT;
+ PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT;
+ PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV;
+ PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV;
+ PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV;
+ PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV;
+ PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV;
+ PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV;
+ PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT;
+ PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT;
+ PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT;
+ PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT;
+ PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT;
+ PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT;
+ PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT;
+ PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT;
+ PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT;
+ PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT;
+ PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT;
+ PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV;
+ PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT;
+ PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT;
+ PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV;
+ PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT;
+ PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV;
+ PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV;
+#endif /* defined(VK_EXT_extended_dynamic_state3) */
#if defined(VK_EXT_external_memory_host)
PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT;
#endif /* defined(VK_EXT_external_memory_host) */
@@ -403,19 +439,49 @@ struct VolkDeviceTable
#if defined(VK_EXT_host_query_reset)
PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT;
#endif /* defined(VK_EXT_host_query_reset) */
+#if defined(VK_EXT_image_compression_control)
+ PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT;
+#endif /* defined(VK_EXT_image_compression_control) */
#if defined(VK_EXT_image_drm_format_modifier)
PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT;
#endif /* defined(VK_EXT_image_drm_format_modifier) */
#if defined(VK_EXT_line_rasterization)
PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT;
#endif /* defined(VK_EXT_line_rasterization) */
+#if defined(VK_EXT_mesh_shader)
+ PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT;
+ PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT;
+ PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT;
+#endif /* defined(VK_EXT_mesh_shader) */
+#if defined(VK_EXT_metal_objects)
+ PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT;
+#endif /* defined(VK_EXT_metal_objects) */
#if defined(VK_EXT_multi_draw)
PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT;
PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT;
#endif /* defined(VK_EXT_multi_draw) */
+#if defined(VK_EXT_opacity_micromap)
+ PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT;
+ PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT;
+ PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT;
+ PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT;
+ PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT;
+ PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT;
+ PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT;
+ PFN_vkCopyMicromapEXT vkCopyMicromapEXT;
+ PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT;
+ PFN_vkCreateMicromapEXT vkCreateMicromapEXT;
+ PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT;
+ PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT;
+ PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT;
+ PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT;
+#endif /* defined(VK_EXT_opacity_micromap) */
#if defined(VK_EXT_pageable_device_local_memory)
PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT;
#endif /* defined(VK_EXT_pageable_device_local_memory) */
+#if defined(VK_EXT_pipeline_properties)
+ PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT;
+#endif /* defined(VK_EXT_pipeline_properties) */
#if defined(VK_EXT_private_data)
PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT;
PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT;
@@ -425,6 +491,10 @@ struct VolkDeviceTable
#if defined(VK_EXT_sample_locations)
PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT;
#endif /* defined(VK_EXT_sample_locations) */
+#if defined(VK_EXT_shader_module_identifier)
+ PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT;
+ PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT;
+#endif /* defined(VK_EXT_shader_module_identifier) */
#if defined(VK_EXT_transform_feedback)
PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT;
PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT;
@@ -606,6 +676,9 @@ struct VolkDeviceTable
#if defined(VK_KHR_push_descriptor)
PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR;
#endif /* defined(VK_KHR_push_descriptor) */
+#if defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline)
+ PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR;
+#endif /* defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) */
#if defined(VK_KHR_ray_tracing_pipeline)
PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR;
PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR;
@@ -706,6 +779,12 @@ struct VolkDeviceTable
PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV;
PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV;
#endif /* defined(VK_NV_mesh_shader) */
+#if defined(VK_NV_optical_flow)
+ PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV;
+ PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV;
+ PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV;
+ PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV;
+#endif /* defined(VK_NV_optical_flow) */
#if defined(VK_NV_ray_tracing)
PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV;
PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV;
@@ -728,6 +807,14 @@ struct VolkDeviceTable
PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV;
PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV;
#endif /* defined(VK_NV_shading_rate_image) */
+#if defined(VK_QCOM_tile_properties)
+ PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM;
+ PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM;
+#endif /* defined(VK_QCOM_tile_properties) */
+#if defined(VK_VALVE_descriptor_set_host_mapping)
+ PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE;
+ PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE;
+#endif /* defined(VK_VALVE_descriptor_set_host_mapping) */
#if (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1))
PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT;
#endif /* (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) */
@@ -1032,6 +1119,9 @@ extern PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT;
extern PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT;
extern PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT;
#endif /* defined(VK_EXT_debug_utils) */
+#if defined(VK_EXT_device_fault)
+extern PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT;
+#endif /* defined(VK_EXT_device_fault) */
#if defined(VK_EXT_direct_mode_display)
extern PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT;
#endif /* defined(VK_EXT_direct_mode_display) */
@@ -1072,6 +1162,39 @@ extern PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT;
extern PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT;
extern PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT;
#endif /* defined(VK_EXT_extended_dynamic_state2) */
+#if defined(VK_EXT_extended_dynamic_state3)
+extern PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT;
+extern PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT;
+extern PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT;
+extern PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT;
+extern PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT;
+extern PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT;
+extern PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT;
+extern PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV;
+extern PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV;
+extern PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV;
+extern PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV;
+extern PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV;
+extern PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV;
+extern PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT;
+extern PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT;
+extern PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT;
+extern PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT;
+extern PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT;
+extern PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT;
+extern PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT;
+extern PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT;
+extern PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT;
+extern PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT;
+extern PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT;
+extern PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV;
+extern PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT;
+extern PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT;
+extern PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV;
+extern PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT;
+extern PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV;
+extern PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV;
+#endif /* defined(VK_EXT_extended_dynamic_state3) */
#if defined(VK_EXT_external_memory_host)
extern PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT;
#endif /* defined(VK_EXT_external_memory_host) */
@@ -1089,12 +1212,23 @@ extern PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT;
#if defined(VK_EXT_host_query_reset)
extern PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT;
#endif /* defined(VK_EXT_host_query_reset) */
+#if defined(VK_EXT_image_compression_control)
+extern PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT;
+#endif /* defined(VK_EXT_image_compression_control) */
#if defined(VK_EXT_image_drm_format_modifier)
extern PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT;
#endif /* defined(VK_EXT_image_drm_format_modifier) */
#if defined(VK_EXT_line_rasterization)
extern PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT;
#endif /* defined(VK_EXT_line_rasterization) */
+#if defined(VK_EXT_mesh_shader)
+extern PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT;
+extern PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT;
+extern PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT;
+#endif /* defined(VK_EXT_mesh_shader) */
+#if defined(VK_EXT_metal_objects)
+extern PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT;
+#endif /* defined(VK_EXT_metal_objects) */
#if defined(VK_EXT_metal_surface)
extern PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT;
#endif /* defined(VK_EXT_metal_surface) */
@@ -1102,9 +1236,28 @@ extern PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT;
extern PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT;
extern PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT;
#endif /* defined(VK_EXT_multi_draw) */
+#if defined(VK_EXT_opacity_micromap)
+extern PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT;
+extern PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT;
+extern PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT;
+extern PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT;
+extern PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT;
+extern PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT;
+extern PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT;
+extern PFN_vkCopyMicromapEXT vkCopyMicromapEXT;
+extern PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT;
+extern PFN_vkCreateMicromapEXT vkCreateMicromapEXT;
+extern PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT;
+extern PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT;
+extern PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT;
+extern PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT;
+#endif /* defined(VK_EXT_opacity_micromap) */
#if defined(VK_EXT_pageable_device_local_memory)
extern PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT;
#endif /* defined(VK_EXT_pageable_device_local_memory) */
+#if defined(VK_EXT_pipeline_properties)
+extern PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT;
+#endif /* defined(VK_EXT_pipeline_properties) */
#if defined(VK_EXT_private_data)
extern PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT;
extern PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT;
@@ -1115,6 +1268,10 @@ extern PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT;
extern PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT;
extern PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT;
#endif /* defined(VK_EXT_sample_locations) */
+#if defined(VK_EXT_shader_module_identifier)
+extern PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT;
+extern PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT;
+#endif /* defined(VK_EXT_shader_module_identifier) */
#if defined(VK_EXT_tooling_info)
extern PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT;
#endif /* defined(VK_EXT_tooling_info) */
@@ -1351,6 +1508,9 @@ extern PFN_vkWaitForPresentKHR vkWaitForPresentKHR;
#if defined(VK_KHR_push_descriptor)
extern PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR;
#endif /* defined(VK_KHR_push_descriptor) */
+#if defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline)
+extern PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR;
+#endif /* defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) */
#if defined(VK_KHR_ray_tracing_pipeline)
extern PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR;
extern PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR;
@@ -1498,6 +1658,13 @@ extern PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV;
extern PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV;
extern PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV;
#endif /* defined(VK_NV_mesh_shader) */
+#if defined(VK_NV_optical_flow)
+extern PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV;
+extern PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV;
+extern PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV;
+extern PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV;
+extern PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV;
+#endif /* defined(VK_NV_optical_flow) */
#if defined(VK_NV_ray_tracing)
extern PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV;
extern PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV;
@@ -1520,10 +1687,18 @@ extern PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV;
extern PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV;
extern PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV;
#endif /* defined(VK_NV_shading_rate_image) */
+#if defined(VK_QCOM_tile_properties)
+extern PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM;
+extern PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM;
+#endif /* defined(VK_QCOM_tile_properties) */
#if defined(VK_QNX_screen_surface)
extern PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX;
extern PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX;
#endif /* defined(VK_QNX_screen_surface) */
+#if defined(VK_VALVE_descriptor_set_host_mapping)
+extern PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE;
+extern PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE;
+#endif /* defined(VK_VALVE_descriptor_set_host_mapping) */
#if (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1))
extern PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT;
#endif /* (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) */
@@ -1555,7 +1730,7 @@ extern PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR;
#endif
/**
- * Copyright (c) 2018-2019 Arseny Kapoulkine
+ * Copyright (c) 2018-2022 Arseny Kapoulkine
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
diff --git a/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h264std.h b/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h264std.h
index 05956989b4..d3ebec6a14 100644
--- a/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h264std.h
+++ b/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h264std.h
@@ -21,16 +21,13 @@ extern "C" {
#define vulkan_video_codec_h264std 1
#include <stdint.h>
-// Vulkan 0.9 provisional Vulkan video H.264 encode and decode std specification version number
-#define VK_STD_VULKAN_VIDEO_CODEC_H264_API_VERSION_0_9_5 VK_MAKE_VIDEO_STD_VERSION(0, 9, 5) // Patch version should always be set to 0
-
#define STD_VIDEO_H264_CPB_CNT_LIST_SIZE 32
#define STD_VIDEO_H264_SCALING_LIST_4X4_NUM_LISTS 6
#define STD_VIDEO_H264_SCALING_LIST_4X4_NUM_ELEMENTS 16
-#define STD_VIDEO_H264_SCALING_LIST_8X8_NUM_LISTS 2
+#define STD_VIDEO_H264_SCALING_LIST_8X8_NUM_LISTS 6
#define STD_VIDEO_H264_SCALING_LIST_8X8_NUM_ELEMENTS 64
-#define VK_STD_VULKAN_VIDEO_CODEC_H264_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_API_VERSION_0_9_5
-#define VK_STD_VULKAN_VIDEO_CODEC_H264_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264"
+#define STD_VIDEO_H264_MAX_NUM_LIST_REF 32
+#define STD_VIDEO_H264_MAX_CHROMA_PLANES 2
typedef enum StdVideoH264ChromaFormatIdc {
STD_VIDEO_H264_CHROMA_FORMAT_IDC_MONOCHROME = 0,
@@ -50,29 +47,29 @@ typedef enum StdVideoH264ProfileIdc {
STD_VIDEO_H264_PROFILE_IDC_MAX_ENUM = 0x7FFFFFFF
} StdVideoH264ProfileIdc;
-typedef enum StdVideoH264Level {
- STD_VIDEO_H264_LEVEL_1_0 = 0,
- STD_VIDEO_H264_LEVEL_1_1 = 1,
- STD_VIDEO_H264_LEVEL_1_2 = 2,
- STD_VIDEO_H264_LEVEL_1_3 = 3,
- STD_VIDEO_H264_LEVEL_2_0 = 4,
- STD_VIDEO_H264_LEVEL_2_1 = 5,
- STD_VIDEO_H264_LEVEL_2_2 = 6,
- STD_VIDEO_H264_LEVEL_3_0 = 7,
- STD_VIDEO_H264_LEVEL_3_1 = 8,
- STD_VIDEO_H264_LEVEL_3_2 = 9,
- STD_VIDEO_H264_LEVEL_4_0 = 10,
- STD_VIDEO_H264_LEVEL_4_1 = 11,
- STD_VIDEO_H264_LEVEL_4_2 = 12,
- STD_VIDEO_H264_LEVEL_5_0 = 13,
- STD_VIDEO_H264_LEVEL_5_1 = 14,
- STD_VIDEO_H264_LEVEL_5_2 = 15,
- STD_VIDEO_H264_LEVEL_6_0 = 16,
- STD_VIDEO_H264_LEVEL_6_1 = 17,
- STD_VIDEO_H264_LEVEL_6_2 = 18,
- STD_VIDEO_H264_LEVEL_INVALID = 0x7FFFFFFF,
- STD_VIDEO_H264_LEVEL_MAX_ENUM = 0x7FFFFFFF
-} StdVideoH264Level;
+typedef enum StdVideoH264LevelIdc {
+ STD_VIDEO_H264_LEVEL_IDC_1_0 = 0,
+ STD_VIDEO_H264_LEVEL_IDC_1_1 = 1,
+ STD_VIDEO_H264_LEVEL_IDC_1_2 = 2,
+ STD_VIDEO_H264_LEVEL_IDC_1_3 = 3,
+ STD_VIDEO_H264_LEVEL_IDC_2_0 = 4,
+ STD_VIDEO_H264_LEVEL_IDC_2_1 = 5,
+ STD_VIDEO_H264_LEVEL_IDC_2_2 = 6,
+ STD_VIDEO_H264_LEVEL_IDC_3_0 = 7,
+ STD_VIDEO_H264_LEVEL_IDC_3_1 = 8,
+ STD_VIDEO_H264_LEVEL_IDC_3_2 = 9,
+ STD_VIDEO_H264_LEVEL_IDC_4_0 = 10,
+ STD_VIDEO_H264_LEVEL_IDC_4_1 = 11,
+ STD_VIDEO_H264_LEVEL_IDC_4_2 = 12,
+ STD_VIDEO_H264_LEVEL_IDC_5_0 = 13,
+ STD_VIDEO_H264_LEVEL_IDC_5_1 = 14,
+ STD_VIDEO_H264_LEVEL_IDC_5_2 = 15,
+ STD_VIDEO_H264_LEVEL_IDC_6_0 = 16,
+ STD_VIDEO_H264_LEVEL_IDC_6_1 = 17,
+ STD_VIDEO_H264_LEVEL_IDC_6_2 = 18,
+ STD_VIDEO_H264_LEVEL_IDC_INVALID = 0x7FFFFFFF,
+ STD_VIDEO_H264_LEVEL_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264LevelIdc;
typedef enum StdVideoH264PocType {
STD_VIDEO_H264_POC_TYPE_0 = 0,
@@ -197,6 +194,7 @@ typedef struct StdVideoH264HrdParameters {
uint8_t cpb_cnt_minus1;
uint8_t bit_rate_scale;
uint8_t cpb_size_scale;
+ uint8_t reserved1;
uint32_t bit_rate_value_minus1[STD_VIDEO_H264_CPB_CNT_LIST_SIZE];
uint32_t cpb_size_value_minus1[STD_VIDEO_H264_CPB_CNT_LIST_SIZE];
uint8_t cbr_flag[STD_VIDEO_H264_CPB_CNT_LIST_SIZE];
@@ -207,19 +205,22 @@ typedef struct StdVideoH264HrdParameters {
} StdVideoH264HrdParameters;
typedef struct StdVideoH264SequenceParameterSetVui {
- StdVideoH264AspectRatioIdc aspect_ratio_idc;
- uint16_t sar_width;
- uint16_t sar_height;
- uint8_t video_format;
- uint8_t color_primaries;
- uint8_t transfer_characteristics;
- uint8_t matrix_coefficients;
- uint32_t num_units_in_tick;
- uint32_t time_scale;
- StdVideoH264HrdParameters* pHrdParameters;
- uint8_t max_num_reorder_frames;
- uint8_t max_dec_frame_buffering;
- StdVideoH264SpsVuiFlags flags;
+ StdVideoH264SpsVuiFlags flags;
+ StdVideoH264AspectRatioIdc aspect_ratio_idc;
+ uint16_t sar_width;
+ uint16_t sar_height;
+ uint8_t video_format;
+ uint8_t colour_primaries;
+ uint8_t transfer_characteristics;
+ uint8_t matrix_coefficients;
+ uint32_t num_units_in_tick;
+ uint32_t time_scale;
+ uint8_t max_num_reorder_frames;
+ uint8_t max_dec_frame_buffering;
+ uint8_t chroma_sample_loc_type_top_field;
+ uint8_t chroma_sample_loc_type_bottom_field;
+ uint32_t reserved1;
+ const StdVideoH264HrdParameters* pHrdParameters;
} StdVideoH264SequenceParameterSetVui;
typedef struct StdVideoH264SpsFlags {
@@ -242,36 +243,38 @@ typedef struct StdVideoH264SpsFlags {
} StdVideoH264SpsFlags;
typedef struct StdVideoH264ScalingLists {
- uint8_t scaling_list_present_mask;
- uint8_t use_default_scaling_matrix_mask;
- uint8_t ScalingList4x4[STD_VIDEO_H264_SCALING_LIST_4X4_NUM_LISTS][STD_VIDEO_H264_SCALING_LIST_4X4_NUM_ELEMENTS];
- uint8_t ScalingList8x8[STD_VIDEO_H264_SCALING_LIST_8X8_NUM_LISTS][STD_VIDEO_H264_SCALING_LIST_8X8_NUM_ELEMENTS];
+ uint16_t scaling_list_present_mask;
+ uint16_t use_default_scaling_matrix_mask;
+ uint8_t ScalingList4x4[STD_VIDEO_H264_SCALING_LIST_4X4_NUM_LISTS][STD_VIDEO_H264_SCALING_LIST_4X4_NUM_ELEMENTS];
+ uint8_t ScalingList8x8[STD_VIDEO_H264_SCALING_LIST_8X8_NUM_LISTS][STD_VIDEO_H264_SCALING_LIST_8X8_NUM_ELEMENTS];
} StdVideoH264ScalingLists;
typedef struct StdVideoH264SequenceParameterSet {
- StdVideoH264ProfileIdc profile_idc;
- StdVideoH264Level level_idc;
- uint8_t seq_parameter_set_id;
- StdVideoH264ChromaFormatIdc chroma_format_idc;
- uint8_t bit_depth_luma_minus8;
- uint8_t bit_depth_chroma_minus8;
- uint8_t log2_max_frame_num_minus4;
- StdVideoH264PocType pic_order_cnt_type;
- uint8_t log2_max_pic_order_cnt_lsb_minus4;
- int32_t offset_for_non_ref_pic;
- int32_t offset_for_top_to_bottom_field;
- uint8_t num_ref_frames_in_pic_order_cnt_cycle;
- uint8_t max_num_ref_frames;
- uint32_t pic_width_in_mbs_minus1;
- uint32_t pic_height_in_map_units_minus1;
- uint32_t frame_crop_left_offset;
- uint32_t frame_crop_right_offset;
- uint32_t frame_crop_top_offset;
- uint32_t frame_crop_bottom_offset;
- StdVideoH264SpsFlags flags;
- int32_t* pOffsetForRefFrame;
- StdVideoH264ScalingLists* pScalingLists;
- StdVideoH264SequenceParameterSetVui* pSequenceParameterSetVui;
+ StdVideoH264SpsFlags flags;
+ StdVideoH264ProfileIdc profile_idc;
+ StdVideoH264LevelIdc level_idc;
+ StdVideoH264ChromaFormatIdc chroma_format_idc;
+ uint8_t seq_parameter_set_id;
+ uint8_t bit_depth_luma_minus8;
+ uint8_t bit_depth_chroma_minus8;
+ uint8_t log2_max_frame_num_minus4;
+ StdVideoH264PocType pic_order_cnt_type;
+ int32_t offset_for_non_ref_pic;
+ int32_t offset_for_top_to_bottom_field;
+ uint8_t log2_max_pic_order_cnt_lsb_minus4;
+ uint8_t num_ref_frames_in_pic_order_cnt_cycle;
+ uint8_t max_num_ref_frames;
+ uint8_t reserved1;
+ uint32_t pic_width_in_mbs_minus1;
+ uint32_t pic_height_in_map_units_minus1;
+ uint32_t frame_crop_left_offset;
+ uint32_t frame_crop_right_offset;
+ uint32_t frame_crop_top_offset;
+ uint32_t frame_crop_bottom_offset;
+ uint32_t reserved2;
+ const int32_t* pOffsetForRefFrame;
+ const StdVideoH264ScalingLists* pScalingLists;
+ const StdVideoH264SequenceParameterSetVui* pSequenceParameterSetVui;
} StdVideoH264SequenceParameterSet;
typedef struct StdVideoH264PpsFlags {
@@ -279,25 +282,24 @@ typedef struct StdVideoH264PpsFlags {
uint32_t redundant_pic_cnt_present_flag : 1;
uint32_t constrained_intra_pred_flag : 1;
uint32_t deblocking_filter_control_present_flag : 1;
- uint32_t weighted_bipred_idc_flag : 1;
uint32_t weighted_pred_flag : 1;
- uint32_t pic_order_present_flag : 1;
+ uint32_t bottom_field_pic_order_in_frame_present_flag : 1;
uint32_t entropy_coding_mode_flag : 1;
uint32_t pic_scaling_matrix_present_flag : 1;
} StdVideoH264PpsFlags;
typedef struct StdVideoH264PictureParameterSet {
- uint8_t seq_parameter_set_id;
- uint8_t pic_parameter_set_id;
- uint8_t num_ref_idx_l0_default_active_minus1;
- uint8_t num_ref_idx_l1_default_active_minus1;
- StdVideoH264WeightedBipredIdc weighted_bipred_idc;
- int8_t pic_init_qp_minus26;
- int8_t pic_init_qs_minus26;
- int8_t chroma_qp_index_offset;
- int8_t second_chroma_qp_index_offset;
- StdVideoH264PpsFlags flags;
- StdVideoH264ScalingLists* pScalingLists;
+ StdVideoH264PpsFlags flags;
+ uint8_t seq_parameter_set_id;
+ uint8_t pic_parameter_set_id;
+ uint8_t num_ref_idx_l0_default_active_minus1;
+ uint8_t num_ref_idx_l1_default_active_minus1;
+ StdVideoH264WeightedBipredIdc weighted_bipred_idc;
+ int8_t pic_init_qp_minus26;
+ int8_t pic_init_qs_minus26;
+ int8_t chroma_qp_index_offset;
+ int8_t second_chroma_qp_index_offset;
+ const StdVideoH264ScalingLists* pScalingLists;
} StdVideoH264PictureParameterSet;
diff --git a/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h264std_decode.h b/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h264std_decode.h
index 8e3b05c02e..98744f67ba 100644
--- a/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h264std_decode.h
+++ b/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h264std_decode.h
@@ -20,8 +20,12 @@ extern "C" {
#define vulkan_video_codec_h264std_decode 1
+// Vulkan 0.9 provisional Vulkan video H.264 decode std specification version number
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_0_9_8 VK_MAKE_VIDEO_STD_VERSION(0, 9, 8) // Patch version should always be set to 0
+
#define STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE 2
-#define STD_VIDEO_DECODE_H264_MVC_REF_LIST_SIZE 15
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_0_9_8
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264_decode"
typedef enum StdVideoDecodeH264FieldOrderCount {
STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_TOP = 0,
@@ -39,57 +43,30 @@ typedef struct StdVideoDecodeH264PictureInfoFlags {
} StdVideoDecodeH264PictureInfoFlags;
typedef struct StdVideoDecodeH264PictureInfo {
+ StdVideoDecodeH264PictureInfoFlags flags;
uint8_t seq_parameter_set_id;
uint8_t pic_parameter_set_id;
- uint16_t reserved;
+ uint8_t reserved1;
+ uint8_t reserved2;
uint16_t frame_num;
uint16_t idr_pic_id;
int32_t PicOrderCnt[STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE];
- StdVideoDecodeH264PictureInfoFlags flags;
} StdVideoDecodeH264PictureInfo;
typedef struct StdVideoDecodeH264ReferenceInfoFlags {
uint32_t top_field_flag : 1;
uint32_t bottom_field_flag : 1;
- uint32_t is_long_term : 1;
+ uint32_t used_for_long_term_reference : 1;
uint32_t is_non_existing : 1;
} StdVideoDecodeH264ReferenceInfoFlags;
typedef struct StdVideoDecodeH264ReferenceInfo {
+ StdVideoDecodeH264ReferenceInfoFlags flags;
uint16_t FrameNum;
uint16_t reserved;
- int32_t PicOrderCnt[2];
- StdVideoDecodeH264ReferenceInfoFlags flags;
+ int32_t PicOrderCnt[STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE];
} StdVideoDecodeH264ReferenceInfo;
-typedef struct StdVideoDecodeH264MvcElementFlags {
- uint32_t non_idr : 1;
- uint32_t anchor_pic : 1;
- uint32_t inter_view : 1;
-} StdVideoDecodeH264MvcElementFlags;
-
-typedef struct StdVideoDecodeH264MvcElement {
- StdVideoDecodeH264MvcElementFlags flags;
- uint16_t viewOrderIndex;
- uint16_t viewId;
- uint16_t temporalId;
- uint16_t priorityId;
- uint16_t numOfAnchorRefsInL0;
- uint16_t viewIdOfAnchorRefsInL0[STD_VIDEO_DECODE_H264_MVC_REF_LIST_SIZE];
- uint16_t numOfAnchorRefsInL1;
- uint16_t viewIdOfAnchorRefsInL1[STD_VIDEO_DECODE_H264_MVC_REF_LIST_SIZE];
- uint16_t numOfNonAnchorRefsInL0;
- uint16_t viewIdOfNonAnchorRefsInL0[STD_VIDEO_DECODE_H264_MVC_REF_LIST_SIZE];
- uint16_t numOfNonAnchorRefsInL1;
- uint16_t viewIdOfNonAnchorRefsInL1[STD_VIDEO_DECODE_H264_MVC_REF_LIST_SIZE];
-} StdVideoDecodeH264MvcElement;
-
-typedef struct StdVideoDecodeH264Mvc {
- uint32_t viewId0;
- uint32_t mvcElementCount;
- StdVideoDecodeH264MvcElement* pMvcElements;
-} StdVideoDecodeH264Mvc;
-
#ifdef __cplusplus
}
diff --git a/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h264std_encode.h b/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h264std_encode.h
index 7079aeddfb..76f03eb789 100644
--- a/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h264std_encode.h
+++ b/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h264std_encode.h
@@ -20,12 +20,36 @@ extern "C" {
#define vulkan_video_codec_h264std_encode 1
+// Vulkan 0.9 provisional Vulkan video H.264 encode std specification version number
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_0_9_8 VK_MAKE_VIDEO_STD_VERSION(0, 9, 8) // Patch version should always be set to 0
+
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_0_9_8
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264_encode"
+typedef struct StdVideoEncodeH264WeightTableFlags {
+ uint32_t luma_weight_l0_flag;
+ uint32_t chroma_weight_l0_flag;
+ uint32_t luma_weight_l1_flag;
+ uint32_t chroma_weight_l1_flag;
+} StdVideoEncodeH264WeightTableFlags;
+
+typedef struct StdVideoEncodeH264WeightTable {
+ StdVideoEncodeH264WeightTableFlags flags;
+ uint8_t luma_log2_weight_denom;
+ uint8_t chroma_log2_weight_denom;
+ int8_t luma_weight_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF];
+ int8_t luma_offset_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF];
+ int8_t chroma_weight_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES];
+ int8_t chroma_offset_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES];
+ int8_t luma_weight_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF];
+ int8_t luma_offset_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF];
+ int8_t chroma_weight_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES];
+ int8_t chroma_offset_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES];
+} StdVideoEncodeH264WeightTable;
+
typedef struct StdVideoEncodeH264SliceHeaderFlags {
- uint32_t idr_flag : 1;
- uint32_t is_reference_flag : 1;
+ uint32_t direct_spatial_mv_pred_flag : 1;
uint32_t num_ref_idx_active_override_flag : 1;
uint32_t no_output_of_prior_pics_flag : 1;
- uint32_t long_term_reference_flag : 1;
uint32_t adaptive_ref_pic_marking_mode_flag : 1;
uint32_t no_prior_references_available_flag : 1;
} StdVideoEncodeH264SliceHeaderFlags;
@@ -33,9 +57,13 @@ typedef struct StdVideoEncodeH264SliceHeaderFlags {
typedef struct StdVideoEncodeH264PictureInfoFlags {
uint32_t idr_flag : 1;
uint32_t is_reference_flag : 1;
- uint32_t long_term_reference_flag : 1;
+ uint32_t used_for_long_term_reference : 1;
} StdVideoEncodeH264PictureInfoFlags;
+typedef struct StdVideoEncodeH264ReferenceInfoFlags {
+ uint32_t used_for_long_term_reference : 1;
+} StdVideoEncodeH264ReferenceInfoFlags;
+
typedef struct StdVideoEncodeH264RefMgmtFlags {
uint32_t ref_pic_list_modification_l0_flag : 1;
uint32_t ref_pic_list_modification_l1_flag : 1;
@@ -56,37 +84,44 @@ typedef struct StdVideoEncodeH264RefPicMarkingEntry {
} StdVideoEncodeH264RefPicMarkingEntry;
typedef struct StdVideoEncodeH264RefMemMgmtCtrlOperations {
- StdVideoEncodeH264RefMgmtFlags flags;
- uint8_t refList0ModOpCount;
- StdVideoEncodeH264RefListModEntry* pRefList0ModOperations;
- uint8_t refList1ModOpCount;
- StdVideoEncodeH264RefListModEntry* pRefList1ModOperations;
- uint8_t refPicMarkingOpCount;
- StdVideoEncodeH264RefPicMarkingEntry* pRefPicMarkingOperations;
+ StdVideoEncodeH264RefMgmtFlags flags;
+ uint8_t refList0ModOpCount;
+ const StdVideoEncodeH264RefListModEntry* pRefList0ModOperations;
+ uint8_t refList1ModOpCount;
+ const StdVideoEncodeH264RefListModEntry* pRefList1ModOperations;
+ uint8_t refPicMarkingOpCount;
+ const StdVideoEncodeH264RefPicMarkingEntry* pRefPicMarkingOperations;
} StdVideoEncodeH264RefMemMgmtCtrlOperations;
typedef struct StdVideoEncodeH264PictureInfo {
StdVideoEncodeH264PictureInfoFlags flags;
+ uint8_t seq_parameter_set_id;
+ uint8_t pic_parameter_set_id;
StdVideoH264PictureType pictureType;
- uint32_t frameNum;
- uint32_t pictureOrderCount;
- uint16_t long_term_pic_num;
- uint16_t long_term_frame_idx;
+ uint32_t frame_num;
+ int32_t PicOrderCnt;
} StdVideoEncodeH264PictureInfo;
+typedef struct StdVideoEncodeH264ReferenceInfo {
+ StdVideoEncodeH264ReferenceInfoFlags flags;
+ uint32_t FrameNum;
+ int32_t PicOrderCnt;
+ uint16_t long_term_pic_num;
+ uint16_t long_term_frame_idx;
+} StdVideoEncodeH264ReferenceInfo;
+
typedef struct StdVideoEncodeH264SliceHeader {
- StdVideoEncodeH264SliceHeaderFlags flags;
- StdVideoH264SliceType slice_type;
- uint8_t seq_parameter_set_id;
- uint8_t pic_parameter_set_id;
- uint16_t idr_pic_id;
- uint8_t num_ref_idx_l0_active_minus1;
- uint8_t num_ref_idx_l1_active_minus1;
- StdVideoH264CabacInitIdc cabac_init_idc;
- StdVideoH264DisableDeblockingFilterIdc disable_deblocking_filter_idc;
- int8_t slice_alpha_c0_offset_div2;
- int8_t slice_beta_offset_div2;
- StdVideoEncodeH264RefMemMgmtCtrlOperations* pMemMgmtCtrlOperations;
+ StdVideoEncodeH264SliceHeaderFlags flags;
+ uint32_t first_mb_in_slice;
+ StdVideoH264SliceType slice_type;
+ uint16_t idr_pic_id;
+ uint8_t num_ref_idx_l0_active_minus1;
+ uint8_t num_ref_idx_l1_active_minus1;
+ StdVideoH264CabacInitIdc cabac_init_idc;
+ StdVideoH264DisableDeblockingFilterIdc disable_deblocking_filter_idc;
+ int8_t slice_alpha_c0_offset_div2;
+ int8_t slice_beta_offset_div2;
+ const StdVideoEncodeH264WeightTable* pWeightTable;
} StdVideoEncodeH264SliceHeader;
diff --git a/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h265std.h b/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h265std.h
index 009133abfd..862f8817f6 100644
--- a/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h265std.h
+++ b/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h265std.h
@@ -20,10 +20,7 @@ extern "C" {
#define vulkan_video_codec_h265std 1
-// Vulkan 0.5 version number WIP
-#define VK_STD_VULKAN_VIDEO_CODEC_H265_API_VERSION_0_9_5 VK_MAKE_VIDEO_STD_VERSION(0, 9, 5) // Patch version should always be set to 0
-
-#define STD_VIDEO_H265_SUBLAYERS_MINUS1_LIST_SIZE 7
+#define STD_VIDEO_H265_SUBLAYERS_LIST_SIZE 7
#define STD_VIDEO_H265_CPB_CNT_LIST_SIZE 32
#define STD_VIDEO_H265_SCALING_LIST_4X4_NUM_LISTS 6
#define STD_VIDEO_H265_SCALING_LIST_4X4_NUM_ELEMENTS 16
@@ -35,11 +32,16 @@ extern "C" {
#define STD_VIDEO_H265_SCALING_LIST_32X32_NUM_ELEMENTS 64
#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMPONENTS_LIST_SIZE 3
#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMP_ENTRIES_LIST_SIZE 128
+#define STD_VIDEO_H265_MAX_DPB_SIZE 16
+#define STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS 32
+#define STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE 6
#define STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE 19
#define STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE 21
-#define STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE 6
-#define VK_STD_VULKAN_VIDEO_CODEC_H265_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_API_VERSION_0_9_5
-#define VK_STD_VULKAN_VIDEO_CODEC_H265_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265"
+#define STD_VIDEO_H265_MAX_NUM_LIST_REF 15
+#define STD_VIDEO_H265_MAX_CHROMA_PLANES 2
+#define STD_VIDEO_H265_MAX_SHORT_TERM_REF_PIC_SETS 64
+#define STD_VIDEO_H265_MAX_LONG_TERM_PICS 16
+#define STD_VIDEO_H265_MAX_DELTA_POC 48
typedef enum StdVideoH265ChromaFormatIdc {
STD_VIDEO_H265_CHROMA_FORMAT_IDC_MONOCHROME = 0,
@@ -60,23 +62,23 @@ typedef enum StdVideoH265ProfileIdc {
STD_VIDEO_H265_PROFILE_IDC_MAX_ENUM = 0x7FFFFFFF
} StdVideoH265ProfileIdc;
-typedef enum StdVideoH265Level {
- STD_VIDEO_H265_LEVEL_1_0 = 0,
- STD_VIDEO_H265_LEVEL_2_0 = 1,
- STD_VIDEO_H265_LEVEL_2_1 = 2,
- STD_VIDEO_H265_LEVEL_3_0 = 3,
- STD_VIDEO_H265_LEVEL_3_1 = 4,
- STD_VIDEO_H265_LEVEL_4_0 = 5,
- STD_VIDEO_H265_LEVEL_4_1 = 6,
- STD_VIDEO_H265_LEVEL_5_0 = 7,
- STD_VIDEO_H265_LEVEL_5_1 = 8,
- STD_VIDEO_H265_LEVEL_5_2 = 9,
- STD_VIDEO_H265_LEVEL_6_0 = 10,
- STD_VIDEO_H265_LEVEL_6_1 = 11,
- STD_VIDEO_H265_LEVEL_6_2 = 12,
- STD_VIDEO_H265_LEVEL_INVALID = 0x7FFFFFFF,
- STD_VIDEO_H265_LEVEL_MAX_ENUM = 0x7FFFFFFF
-} StdVideoH265Level;
+typedef enum StdVideoH265LevelIdc {
+ STD_VIDEO_H265_LEVEL_IDC_1_0 = 0,
+ STD_VIDEO_H265_LEVEL_IDC_2_0 = 1,
+ STD_VIDEO_H265_LEVEL_IDC_2_1 = 2,
+ STD_VIDEO_H265_LEVEL_IDC_3_0 = 3,
+ STD_VIDEO_H265_LEVEL_IDC_3_1 = 4,
+ STD_VIDEO_H265_LEVEL_IDC_4_0 = 5,
+ STD_VIDEO_H265_LEVEL_IDC_4_1 = 6,
+ STD_VIDEO_H265_LEVEL_IDC_5_0 = 7,
+ STD_VIDEO_H265_LEVEL_IDC_5_1 = 8,
+ STD_VIDEO_H265_LEVEL_IDC_5_2 = 9,
+ STD_VIDEO_H265_LEVEL_IDC_6_0 = 10,
+ STD_VIDEO_H265_LEVEL_IDC_6_1 = 11,
+ STD_VIDEO_H265_LEVEL_IDC_6_2 = 12,
+ STD_VIDEO_H265_LEVEL_IDC_INVALID = 0x7FFFFFFF,
+ STD_VIDEO_H265_LEVEL_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265LevelIdc;
typedef enum StdVideoH265SliceType {
STD_VIDEO_H265_SLICE_TYPE_B = 0,
@@ -94,10 +96,33 @@ typedef enum StdVideoH265PictureType {
STD_VIDEO_H265_PICTURE_TYPE_INVALID = 0x7FFFFFFF,
STD_VIDEO_H265_PICTURE_TYPE_MAX_ENUM = 0x7FFFFFFF
} StdVideoH265PictureType;
+
+typedef enum StdVideoH265AspectRatioIdc {
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_UNSPECIFIED = 0,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_SQUARE = 1,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_12_11 = 2,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_10_11 = 3,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_16_11 = 4,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_40_33 = 5,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_24_11 = 6,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_20_11 = 7,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_32_11 = 8,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_80_33 = 9,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_18_11 = 10,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_15_11 = 11,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_64_33 = 12,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_160_99 = 13,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_4_3 = 14,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_3_2 = 15,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_2_1 = 16,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_EXTENDED_SAR = 255,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_INVALID = 0x7FFFFFFF,
+ STD_VIDEO_H265_ASPECT_RATIO_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265AspectRatioIdc;
typedef struct StdVideoH265DecPicBufMgr {
- uint32_t max_latency_increase_plus1[STD_VIDEO_H265_SUBLAYERS_MINUS1_LIST_SIZE];
- uint8_t max_dec_pic_buffering_minus1[STD_VIDEO_H265_SUBLAYERS_MINUS1_LIST_SIZE];
- uint8_t max_num_reorder_pics[STD_VIDEO_H265_SUBLAYERS_MINUS1_LIST_SIZE];
+ uint32_t max_latency_increase_plus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
+ uint8_t max_dec_pic_buffering_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
+ uint8_t max_num_reorder_pics[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
} StdVideoH265DecPicBufMgr;
typedef struct StdVideoH265SubLayerHrdParameters {
@@ -119,20 +144,21 @@ typedef struct StdVideoH265HrdFlags {
} StdVideoH265HrdFlags;
typedef struct StdVideoH265HrdParameters {
- uint8_t tick_divisor_minus2;
- uint8_t du_cpb_removal_delay_increment_length_minus1;
- uint8_t dpb_output_delay_du_length_minus1;
- uint8_t bit_rate_scale;
- uint8_t cpb_size_scale;
- uint8_t cpb_size_du_scale;
- uint8_t initial_cpb_removal_delay_length_minus1;
- uint8_t au_cpb_removal_delay_length_minus1;
- uint8_t dpb_output_delay_length_minus1;
- uint8_t cpb_cnt_minus1[STD_VIDEO_H265_SUBLAYERS_MINUS1_LIST_SIZE];
- uint16_t elemental_duration_in_tc_minus1[STD_VIDEO_H265_SUBLAYERS_MINUS1_LIST_SIZE];
- StdVideoH265SubLayerHrdParameters* pSubLayerHrdParametersNal[STD_VIDEO_H265_SUBLAYERS_MINUS1_LIST_SIZE];
- StdVideoH265SubLayerHrdParameters* pSubLayerHrdParametersVcl[STD_VIDEO_H265_SUBLAYERS_MINUS1_LIST_SIZE];
- StdVideoH265HrdFlags flags;
+ StdVideoH265HrdFlags flags;
+ uint8_t tick_divisor_minus2;
+ uint8_t du_cpb_removal_delay_increment_length_minus1;
+ uint8_t dpb_output_delay_du_length_minus1;
+ uint8_t bit_rate_scale;
+ uint8_t cpb_size_scale;
+ uint8_t cpb_size_du_scale;
+ uint8_t initial_cpb_removal_delay_length_minus1;
+ uint8_t au_cpb_removal_delay_length_minus1;
+ uint8_t dpb_output_delay_length_minus1;
+ uint8_t cpb_cnt_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
+ uint16_t elemental_duration_in_tc_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
+ uint16_t reserved[3];
+ const StdVideoH265SubLayerHrdParameters* pSubLayerHrdParametersNal;
+ const StdVideoH265SubLayerHrdParameters* pSubLayerHrdParametersVcl;
} StdVideoH265HrdParameters;
typedef struct StdVideoH265VpsFlags {
@@ -142,15 +168,33 @@ typedef struct StdVideoH265VpsFlags {
uint32_t vps_poc_proportional_to_timing_flag : 1;
} StdVideoH265VpsFlags;
+typedef struct StdVideoH265ProfileTierLevelFlags {
+ uint32_t general_tier_flag : 1;
+ uint32_t general_progressive_source_flag : 1;
+ uint32_t general_interlaced_source_flag : 1;
+ uint32_t general_non_packed_constraint_flag : 1;
+ uint32_t general_frame_only_constraint_flag : 1;
+} StdVideoH265ProfileTierLevelFlags;
+
+typedef struct StdVideoH265ProfileTierLevel {
+ StdVideoH265ProfileTierLevelFlags flags;
+ StdVideoH265ProfileIdc general_profile_idc;
+ StdVideoH265LevelIdc general_level_idc;
+} StdVideoH265ProfileTierLevel;
+
typedef struct StdVideoH265VideoParameterSet {
- uint8_t vps_video_parameter_set_id;
- uint8_t vps_max_sub_layers_minus1;
- uint32_t vps_num_units_in_tick;
- uint32_t vps_time_scale;
- uint32_t vps_num_ticks_poc_diff_one_minus1;
- StdVideoH265DecPicBufMgr* pDecPicBufMgr;
- StdVideoH265HrdParameters* pHrdParameters;
- StdVideoH265VpsFlags flags;
+ StdVideoH265VpsFlags flags;
+ uint8_t vps_video_parameter_set_id;
+ uint8_t vps_max_sub_layers_minus1;
+ uint8_t reserved1;
+ uint8_t reserved2;
+ uint32_t vps_num_units_in_tick;
+ uint32_t vps_time_scale;
+ uint32_t vps_num_ticks_poc_diff_one_minus1;
+ uint32_t reserved3;
+ const StdVideoH265DecPicBufMgr* pDecPicBufMgr;
+ const StdVideoH265HrdParameters* pHrdParameters;
+ const StdVideoH265ProfileTierLevel* pProfileTierLevel;
} StdVideoH265VideoParameterSet;
typedef struct StdVideoH265ScalingLists {
@@ -184,29 +228,32 @@ typedef struct StdVideoH265SpsVuiFlags {
} StdVideoH265SpsVuiFlags;
typedef struct StdVideoH265SequenceParameterSetVui {
- uint8_t aspect_ratio_idc;
- uint16_t sar_width;
- uint16_t sar_height;
- uint8_t video_format;
- uint8_t colour_primaries;
- uint8_t transfer_characteristics;
- uint8_t matrix_coeffs;
- uint8_t chroma_sample_loc_type_top_field;
- uint8_t chroma_sample_loc_type_bottom_field;
- uint16_t def_disp_win_left_offset;
- uint16_t def_disp_win_right_offset;
- uint16_t def_disp_win_top_offset;
- uint16_t def_disp_win_bottom_offset;
- uint32_t vui_num_units_in_tick;
- uint32_t vui_time_scale;
- uint32_t vui_num_ticks_poc_diff_one_minus1;
- StdVideoH265HrdParameters* pHrdParameters;
- uint16_t min_spatial_segmentation_idc;
- uint8_t max_bytes_per_pic_denom;
- uint8_t max_bits_per_min_cu_denom;
- uint8_t log2_max_mv_length_horizontal;
- uint8_t log2_max_mv_length_vertical;
- StdVideoH265SpsVuiFlags flags;
+ StdVideoH265SpsVuiFlags flags;
+ StdVideoH265AspectRatioIdc aspect_ratio_idc;
+ uint16_t sar_width;
+ uint16_t sar_height;
+ uint8_t video_format;
+ uint8_t colour_primaries;
+ uint8_t transfer_characteristics;
+ uint8_t matrix_coeffs;
+ uint8_t chroma_sample_loc_type_top_field;
+ uint8_t chroma_sample_loc_type_bottom_field;
+ uint8_t reserved1;
+ uint8_t reserved2;
+ uint16_t def_disp_win_left_offset;
+ uint16_t def_disp_win_right_offset;
+ uint16_t def_disp_win_top_offset;
+ uint16_t def_disp_win_bottom_offset;
+ uint32_t vui_num_units_in_tick;
+ uint32_t vui_time_scale;
+ uint32_t vui_num_ticks_poc_diff_one_minus1;
+ uint16_t min_spatial_segmentation_idc;
+ uint16_t reserved3;
+ uint8_t max_bytes_per_pic_denom;
+ uint8_t max_bits_per_min_cu_denom;
+ uint8_t log2_max_mv_length_horizontal;
+ uint8_t log2_max_mv_length_vertical;
+ const StdVideoH265HrdParameters* pHrdParameters;
} StdVideoH265SequenceParameterSetVui;
typedef struct StdVideoH265PredictorPaletteEntries {
@@ -216,6 +263,8 @@ typedef struct StdVideoH265PredictorPaletteEntries {
typedef struct StdVideoH265SpsFlags {
uint32_t sps_temporal_id_nesting_flag : 1;
uint32_t separate_colour_plane_flag : 1;
+ uint32_t conformance_window_flag : 1;
+ uint32_t sps_sub_layer_ordering_info_present_flag : 1;
uint32_t scaling_list_enabled_flag : 1;
uint32_t sps_scaling_list_data_present_flag : 1;
uint32_t amp_enabled_flag : 1;
@@ -237,50 +286,80 @@ typedef struct StdVideoH265SpsFlags {
uint32_t high_precision_offsets_enabled_flag : 1;
uint32_t persistent_rice_adaptation_enabled_flag : 1;
uint32_t cabac_bypass_alignment_enabled_flag : 1;
+ uint32_t sps_scc_extension_flag : 1;
uint32_t sps_curr_pic_ref_enabled_flag : 1;
uint32_t palette_mode_enabled_flag : 1;
- uint32_t sps_palette_predictor_initializer_present_flag : 1;
+ uint32_t sps_palette_predictor_initializers_present_flag : 1;
uint32_t intra_boundary_filtering_disabled_flag : 1;
} StdVideoH265SpsFlags;
+typedef struct StdVideoH265ShortTermRefPicSetFlags {
+ uint32_t inter_ref_pic_set_prediction_flag : 1;
+ uint32_t delta_rps_sign : 1;
+} StdVideoH265ShortTermRefPicSetFlags;
+
+typedef struct StdVideoH265ShortTermRefPicSet {
+ StdVideoH265ShortTermRefPicSetFlags flags;
+ uint32_t delta_idx_minus1;
+ uint16_t use_delta_flag;
+ uint16_t abs_delta_rps_minus1;
+ uint16_t used_by_curr_pic_flag;
+ uint16_t used_by_curr_pic_s0_flag;
+ uint16_t used_by_curr_pic_s1_flag;
+ uint16_t reserved1;
+ uint8_t reserved2;
+ uint8_t reserved3;
+ uint8_t num_negative_pics;
+ uint8_t num_positive_pics;
+ uint16_t delta_poc_s0_minus1[STD_VIDEO_H265_MAX_DPB_SIZE];
+ uint16_t delta_poc_s1_minus1[STD_VIDEO_H265_MAX_DPB_SIZE];
+} StdVideoH265ShortTermRefPicSet;
+
+typedef struct StdVideoH265LongTermRefPicsSps {
+ uint32_t used_by_curr_pic_lt_sps_flag;
+ uint32_t lt_ref_pic_poc_lsb_sps[STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS];
+} StdVideoH265LongTermRefPicsSps;
+
typedef struct StdVideoH265SequenceParameterSet {
- StdVideoH265ProfileIdc profile_idc;
- StdVideoH265Level level_idc;
- uint32_t pic_width_in_luma_samples;
- uint32_t pic_height_in_luma_samples;
- uint8_t sps_video_parameter_set_id;
- uint8_t sps_max_sub_layers_minus1;
- uint8_t sps_seq_parameter_set_id;
- uint8_t chroma_format_idc;
- uint8_t bit_depth_luma_minus8;
- uint8_t bit_depth_chroma_minus8;
- uint8_t log2_max_pic_order_cnt_lsb_minus4;
- uint8_t sps_max_dec_pic_buffering_minus1;
- uint8_t log2_min_luma_coding_block_size_minus3;
- uint8_t log2_diff_max_min_luma_coding_block_size;
- uint8_t log2_min_luma_transform_block_size_minus2;
- uint8_t log2_diff_max_min_luma_transform_block_size;
- uint8_t max_transform_hierarchy_depth_inter;
- uint8_t max_transform_hierarchy_depth_intra;
- uint8_t num_short_term_ref_pic_sets;
- uint8_t num_long_term_ref_pics_sps;
- uint8_t pcm_sample_bit_depth_luma_minus1;
- uint8_t pcm_sample_bit_depth_chroma_minus1;
- uint8_t log2_min_pcm_luma_coding_block_size_minus3;
- uint8_t log2_diff_max_min_pcm_luma_coding_block_size;
- uint32_t conf_win_left_offset;
- uint32_t conf_win_right_offset;
- uint32_t conf_win_top_offset;
- uint32_t conf_win_bottom_offset;
- StdVideoH265DecPicBufMgr* pDecPicBufMgr;
- StdVideoH265SpsFlags flags;
- StdVideoH265ScalingLists* pScalingLists;
- StdVideoH265SequenceParameterSetVui* pSequenceParameterSetVui;
- uint8_t palette_max_size;
- uint8_t delta_palette_max_predictor_size;
- uint8_t motion_vector_resolution_control_idc;
- uint8_t sps_num_palette_predictor_initializer_minus1;
- StdVideoH265PredictorPaletteEntries* pPredictorPaletteEntries;
+ StdVideoH265SpsFlags flags;
+ StdVideoH265ChromaFormatIdc chroma_format_idc;
+ uint32_t pic_width_in_luma_samples;
+ uint32_t pic_height_in_luma_samples;
+ uint8_t sps_video_parameter_set_id;
+ uint8_t sps_max_sub_layers_minus1;
+ uint8_t sps_seq_parameter_set_id;
+ uint8_t bit_depth_luma_minus8;
+ uint8_t bit_depth_chroma_minus8;
+ uint8_t log2_max_pic_order_cnt_lsb_minus4;
+ uint8_t log2_min_luma_coding_block_size_minus3;
+ uint8_t log2_diff_max_min_luma_coding_block_size;
+ uint8_t log2_min_luma_transform_block_size_minus2;
+ uint8_t log2_diff_max_min_luma_transform_block_size;
+ uint8_t max_transform_hierarchy_depth_inter;
+ uint8_t max_transform_hierarchy_depth_intra;
+ uint8_t num_short_term_ref_pic_sets;
+ uint8_t num_long_term_ref_pics_sps;
+ uint8_t pcm_sample_bit_depth_luma_minus1;
+ uint8_t pcm_sample_bit_depth_chroma_minus1;
+ uint8_t log2_min_pcm_luma_coding_block_size_minus3;
+ uint8_t log2_diff_max_min_pcm_luma_coding_block_size;
+ uint8_t reserved1;
+ uint8_t reserved2;
+ uint8_t palette_max_size;
+ uint8_t delta_palette_max_predictor_size;
+ uint8_t motion_vector_resolution_control_idc;
+ uint8_t sps_num_palette_predictor_initializers_minus1;
+ uint32_t conf_win_left_offset;
+ uint32_t conf_win_right_offset;
+ uint32_t conf_win_top_offset;
+ uint32_t conf_win_bottom_offset;
+ const StdVideoH265ProfileTierLevel* pProfileTierLevel;
+ const StdVideoH265DecPicBufMgr* pDecPicBufMgr;
+ const StdVideoH265ScalingLists* pScalingLists;
+ const StdVideoH265ShortTermRefPicSet* pShortTermRefPicSet;
+ const StdVideoH265LongTermRefPicsSps* pLongTermRefPicsSps;
+ const StdVideoH265SequenceParameterSetVui* pSequenceParameterSetVui;
+ const StdVideoH265PredictorPaletteEntries* pPredictorPaletteEntries;
} StdVideoH265SequenceParameterSet;
typedef struct StdVideoH265PpsFlags {
@@ -312,44 +391,48 @@ typedef struct StdVideoH265PpsFlags {
uint32_t pps_curr_pic_ref_enabled_flag : 1;
uint32_t residual_adaptive_colour_transform_enabled_flag : 1;
uint32_t pps_slice_act_qp_offsets_present_flag : 1;
- uint32_t pps_palette_predictor_initializer_present_flag : 1;
+ uint32_t pps_palette_predictor_initializers_present_flag : 1;
uint32_t monochrome_palette_flag : 1;
uint32_t pps_range_extension_flag : 1;
} StdVideoH265PpsFlags;
typedef struct StdVideoH265PictureParameterSet {
- uint8_t pps_pic_parameter_set_id;
- uint8_t pps_seq_parameter_set_id;
- uint8_t num_extra_slice_header_bits;
- uint8_t num_ref_idx_l0_default_active_minus1;
- uint8_t num_ref_idx_l1_default_active_minus1;
- int8_t init_qp_minus26;
- uint8_t diff_cu_qp_delta_depth;
- int8_t pps_cb_qp_offset;
- int8_t pps_cr_qp_offset;
- uint8_t num_tile_columns_minus1;
- uint8_t num_tile_rows_minus1;
- uint16_t column_width_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE];
- uint16_t row_height_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE];
- int8_t pps_beta_offset_div2;
- int8_t pps_tc_offset_div2;
- uint8_t log2_parallel_merge_level_minus2;
- StdVideoH265PpsFlags flags;
- StdVideoH265ScalingLists* pScalingLists;
- uint8_t log2_max_transform_skip_block_size_minus2;
- uint8_t diff_cu_chroma_qp_offset_depth;
- uint8_t chroma_qp_offset_list_len_minus1;
- int8_t cb_qp_offset_list[STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE];
- int8_t cr_qp_offset_list[STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE];
- uint8_t log2_sao_offset_scale_luma;
- uint8_t log2_sao_offset_scale_chroma;
- int8_t pps_act_y_qp_offset_plus5;
- int8_t pps_act_cb_qp_offset_plus5;
- int8_t pps_act_cr_qp_offset_plus5;
- uint8_t pps_num_palette_predictor_initializer;
- uint8_t luma_bit_depth_entry_minus8;
- uint8_t chroma_bit_depth_entry_minus8;
- StdVideoH265PredictorPaletteEntries* pPredictorPaletteEntries;
+ StdVideoH265PpsFlags flags;
+ uint8_t pps_pic_parameter_set_id;
+ uint8_t pps_seq_parameter_set_id;
+ uint8_t sps_video_parameter_set_id;
+ uint8_t num_extra_slice_header_bits;
+ uint8_t num_ref_idx_l0_default_active_minus1;
+ uint8_t num_ref_idx_l1_default_active_minus1;
+ int8_t init_qp_minus26;
+ uint8_t diff_cu_qp_delta_depth;
+ int8_t pps_cb_qp_offset;
+ int8_t pps_cr_qp_offset;
+ int8_t pps_beta_offset_div2;
+ int8_t pps_tc_offset_div2;
+ uint8_t log2_parallel_merge_level_minus2;
+ uint8_t log2_max_transform_skip_block_size_minus2;
+ uint8_t diff_cu_chroma_qp_offset_depth;
+ uint8_t chroma_qp_offset_list_len_minus1;
+ int8_t cb_qp_offset_list[STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE];
+ int8_t cr_qp_offset_list[STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE];
+ uint8_t log2_sao_offset_scale_luma;
+ uint8_t log2_sao_offset_scale_chroma;
+ int8_t pps_act_y_qp_offset_plus5;
+ int8_t pps_act_cb_qp_offset_plus5;
+ int8_t pps_act_cr_qp_offset_plus3;
+ uint8_t pps_num_palette_predictor_initializers;
+ uint8_t luma_bit_depth_entry_minus8;
+ uint8_t chroma_bit_depth_entry_minus8;
+ uint8_t num_tile_columns_minus1;
+ uint8_t num_tile_rows_minus1;
+ uint8_t reserved1;
+ uint8_t reserved2;
+ uint16_t column_width_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE];
+ uint16_t row_height_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE];
+ uint32_t reserved3;
+ const StdVideoH265ScalingLists* pScalingLists;
+ const StdVideoH265PredictorPaletteEntries* pPredictorPaletteEntries;
} StdVideoH265PictureParameterSet;
diff --git a/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h265std_decode.h b/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h265std_decode.h
index 0867952f31..831c41bc52 100644
--- a/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h265std_decode.h
+++ b/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h265std_decode.h
@@ -20,7 +20,12 @@ extern "C" {
#define vulkan_video_codec_h265std_decode 1
+// Vulkan 0.9 provisional Vulkan video H.265 decode std specification version number
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_0_9_9 VK_MAKE_VIDEO_STD_VERSION(0, 9, 9) // Patch version should always be set to 0
+
#define STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE 8
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_0_9_9
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265_decode"
typedef struct StdVideoDecodeH265PictureInfoFlags {
uint32_t IrapPicFlag : 1;
uint32_t IdrPicFlag : 1;
@@ -29,27 +34,27 @@ typedef struct StdVideoDecodeH265PictureInfoFlags {
} StdVideoDecodeH265PictureInfoFlags;
typedef struct StdVideoDecodeH265PictureInfo {
- uint8_t vps_video_parameter_set_id;
- uint8_t sps_seq_parameter_set_id;
+ StdVideoDecodeH265PictureInfoFlags flags;
+ uint8_t sps_video_parameter_set_id;
+ uint8_t pps_seq_parameter_set_id;
uint8_t pps_pic_parameter_set_id;
- uint8_t num_short_term_ref_pic_sets;
+ uint8_t NumDeltaPocsOfRefRpsIdx;
int32_t PicOrderCntVal;
uint16_t NumBitsForSTRefPicSetInSlice;
- uint8_t NumDeltaPocsOfRefRpsIdx;
+ uint16_t reserved;
uint8_t RefPicSetStCurrBefore[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE];
uint8_t RefPicSetStCurrAfter[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE];
uint8_t RefPicSetLtCurr[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE];
- StdVideoDecodeH265PictureInfoFlags flags;
} StdVideoDecodeH265PictureInfo;
typedef struct StdVideoDecodeH265ReferenceInfoFlags {
- uint32_t is_long_term : 1;
- uint32_t is_non_existing : 1;
+ uint32_t used_for_long_term_reference : 1;
+ uint32_t unused_for_reference : 1;
} StdVideoDecodeH265ReferenceInfoFlags;
typedef struct StdVideoDecodeH265ReferenceInfo {
- int32_t PicOrderCntVal;
StdVideoDecodeH265ReferenceInfoFlags flags;
+ int32_t PicOrderCntVal;
} StdVideoDecodeH265ReferenceInfo;
diff --git a/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h265std_encode.h b/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h265std_encode.h
index 2a1fed1e56..84e34e54a1 100644
--- a/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h265std_encode.h
+++ b/thirdparty/vulkan/include/vk_video/vulkan_video_codec_h265std_encode.h
@@ -20,13 +20,37 @@ extern "C" {
#define vulkan_video_codec_h265std_encode 1
-#define STD_VIDEO_ENCODE_H265_LUMA_LIST_SIZE 15
-#define STD_VIDEO_ENCODE_H265_CHROMA_LIST_SIZE 15
-#define STD_VIDEO_ENCODE_H265_CHROMA_LISTS_NUM 2
+// Vulkan 0.9 provisional Vulkan video H.265 encode std specification version number
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_0_9_9 VK_MAKE_VIDEO_STD_VERSION(0, 9, 9) // Patch version should always be set to 0
+
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_0_9_9
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265_encode"
+typedef struct StdVideoEncodeH265WeightTableFlags {
+ uint16_t luma_weight_l0_flag;
+ uint16_t chroma_weight_l0_flag;
+ uint16_t luma_weight_l1_flag;
+ uint16_t chroma_weight_l1_flag;
+} StdVideoEncodeH265WeightTableFlags;
+
+typedef struct StdVideoEncodeH265WeightTable {
+ StdVideoEncodeH265WeightTableFlags flags;
+ uint8_t luma_log2_weight_denom;
+ int8_t delta_chroma_log2_weight_denom;
+ int8_t delta_luma_weight_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF];
+ int8_t luma_offset_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF];
+ int8_t delta_chroma_weight_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES];
+ int8_t delta_chroma_offset_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES];
+ int8_t delta_luma_weight_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF];
+ int8_t luma_offset_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF];
+ int8_t delta_chroma_weight_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES];
+ int8_t delta_chroma_offset_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES];
+} StdVideoEncodeH265WeightTable;
+
typedef struct StdVideoEncodeH265SliceSegmentHeaderFlags {
uint32_t first_slice_segment_in_pic_flag : 1;
uint32_t no_output_of_prior_pics_flag : 1;
uint32_t dependent_slice_segment_flag : 1;
+ uint32_t pic_output_flag : 1;
uint32_t short_term_ref_pic_set_sps_flag : 1;
uint32_t slice_temporal_mvp_enable_flag : 1;
uint32_t slice_sao_luma_flag : 1;
@@ -34,48 +58,42 @@ typedef struct StdVideoEncodeH265SliceSegmentHeaderFlags {
uint32_t num_ref_idx_active_override_flag : 1;
uint32_t mvd_l1_zero_flag : 1;
uint32_t cabac_init_flag : 1;
- uint32_t slice_deblocking_filter_disable_flag : 1;
+ uint32_t cu_chroma_qp_offset_enabled_flag : 1;
+ uint32_t deblocking_filter_override_flag : 1;
+ uint32_t slice_deblocking_filter_disabled_flag : 1;
uint32_t collocated_from_l0_flag : 1;
uint32_t slice_loop_filter_across_slices_enabled_flag : 1;
- uint32_t bLastSliceInPic : 1;
- uint32_t reservedBits : 18;
- uint16_t luma_weight_l0_flag;
- uint16_t chroma_weight_l0_flag;
- uint16_t luma_weight_l1_flag;
- uint16_t chroma_weight_l1_flag;
} StdVideoEncodeH265SliceSegmentHeaderFlags;
+typedef struct StdVideoEncodeH265SliceSegmentLongTermRefPics {
+ uint8_t num_long_term_sps;
+ uint8_t num_long_term_pics;
+ uint8_t lt_idx_sps[STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS];
+ uint8_t poc_lsb_lt[STD_VIDEO_H265_MAX_LONG_TERM_PICS];
+ uint16_t used_by_curr_pic_lt_flag;
+ uint8_t delta_poc_msb_present_flag[STD_VIDEO_H265_MAX_DELTA_POC];
+ uint8_t delta_poc_msb_cycle_lt[STD_VIDEO_H265_MAX_DELTA_POC];
+} StdVideoEncodeH265SliceSegmentLongTermRefPics;
+
typedef struct StdVideoEncodeH265SliceSegmentHeader {
- StdVideoH265SliceType slice_type;
- uint8_t slice_pic_parameter_set_id;
- uint8_t num_short_term_ref_pic_sets;
- uint32_t slice_segment_address;
- uint8_t short_term_ref_pic_set_idx;
- uint8_t num_long_term_sps;
- uint8_t num_long_term_pics;
- uint8_t collocated_ref_idx;
- uint8_t num_ref_idx_l0_active_minus1;
- uint8_t num_ref_idx_l1_active_minus1;
- uint8_t luma_log2_weight_denom;
- int8_t delta_chroma_log2_weight_denom;
- int8_t delta_luma_weight_l0[STD_VIDEO_ENCODE_H265_LUMA_LIST_SIZE];
- int8_t luma_offset_l0[STD_VIDEO_ENCODE_H265_LUMA_LIST_SIZE];
- int8_t delta_chroma_weight_l0[STD_VIDEO_ENCODE_H265_CHROMA_LIST_SIZE][STD_VIDEO_ENCODE_H265_CHROMA_LISTS_NUM];
- int8_t delta_chroma_offset_l0[STD_VIDEO_ENCODE_H265_CHROMA_LIST_SIZE][STD_VIDEO_ENCODE_H265_CHROMA_LISTS_NUM];
- int8_t delta_luma_weight_l1[STD_VIDEO_ENCODE_H265_LUMA_LIST_SIZE];
- int8_t luma_offset_l1[STD_VIDEO_ENCODE_H265_LUMA_LIST_SIZE];
- int8_t delta_chroma_weight_l1[STD_VIDEO_ENCODE_H265_CHROMA_LIST_SIZE][STD_VIDEO_ENCODE_H265_CHROMA_LISTS_NUM];
- int8_t delta_chroma_offset_l1[STD_VIDEO_ENCODE_H265_CHROMA_LIST_SIZE][STD_VIDEO_ENCODE_H265_CHROMA_LISTS_NUM];
- uint8_t MaxNumMergeCand;
- int8_t slice_qp_delta;
- int8_t slice_cb_qp_offset;
- int8_t slice_cr_qp_offset;
- int8_t slice_beta_offset_div2;
- int8_t slice_tc_offset_div2;
- int8_t slice_act_y_qp_offset;
- int8_t slice_act_cb_qp_offset;
- int8_t slice_act_cr_qp_offset;
- StdVideoEncodeH265SliceSegmentHeaderFlags flags;
+ StdVideoEncodeH265SliceSegmentHeaderFlags flags;
+ StdVideoH265SliceType slice_type;
+ uint32_t slice_segment_address;
+ uint8_t short_term_ref_pic_set_idx;
+ uint8_t collocated_ref_idx;
+ uint8_t num_ref_idx_l0_active_minus1;
+ uint8_t num_ref_idx_l1_active_minus1;
+ uint8_t MaxNumMergeCand;
+ int8_t slice_cb_qp_offset;
+ int8_t slice_cr_qp_offset;
+ int8_t slice_beta_offset_div2;
+ int8_t slice_tc_offset_div2;
+ int8_t slice_act_y_qp_offset;
+ int8_t slice_act_cb_qp_offset;
+ int8_t slice_act_cr_qp_offset;
+ const StdVideoH265ShortTermRefPicSet* pShortTermRefPicSet;
+ const StdVideoEncodeH265SliceSegmentLongTermRefPics* pLongTermRefPics;
+ const StdVideoEncodeH265WeightTable* pWeightTable;
} StdVideoEncodeH265SliceSegmentHeader;
typedef struct StdVideoEncodeH265ReferenceModificationFlags {
@@ -86,35 +104,38 @@ typedef struct StdVideoEncodeH265ReferenceModificationFlags {
typedef struct StdVideoEncodeH265ReferenceModifications {
StdVideoEncodeH265ReferenceModificationFlags flags;
uint8_t referenceList0ModificationsCount;
- uint8_t* pReferenceList0Modifications;
+ const uint8_t* pReferenceList0Modifications;
uint8_t referenceList1ModificationsCount;
- uint8_t* pReferenceList1Modifications;
+ const uint8_t* pReferenceList1Modifications;
} StdVideoEncodeH265ReferenceModifications;
typedef struct StdVideoEncodeH265PictureInfoFlags {
uint32_t is_reference_flag : 1;
uint32_t IrapPicFlag : 1;
uint32_t long_term_flag : 1;
+ uint32_t discardable_flag : 1;
+ uint32_t cross_layer_bla_flag : 1;
} StdVideoEncodeH265PictureInfoFlags;
typedef struct StdVideoEncodeH265PictureInfo {
+ StdVideoEncodeH265PictureInfoFlags flags;
StdVideoH265PictureType PictureType;
uint8_t sps_video_parameter_set_id;
uint8_t pps_seq_parameter_set_id;
+ uint8_t pps_pic_parameter_set_id;
int32_t PicOrderCntVal;
uint8_t TemporalId;
- StdVideoEncodeH265PictureInfoFlags flags;
} StdVideoEncodeH265PictureInfo;
typedef struct StdVideoEncodeH265ReferenceInfoFlags {
- uint32_t is_long_term : 1;
- uint32_t isUsedFlag : 1;
+ uint32_t used_for_long_term_reference : 1;
+ uint32_t unused_for_reference : 1;
} StdVideoEncodeH265ReferenceInfoFlags;
typedef struct StdVideoEncodeH265ReferenceInfo {
+ StdVideoEncodeH265ReferenceInfoFlags flags;
int32_t PicOrderCntVal;
uint8_t TemporalId;
- StdVideoEncodeH265ReferenceInfoFlags flags;
} StdVideoEncodeH265ReferenceInfo;
diff --git a/thirdparty/vulkan/include/vulkan/vulkan.h b/thirdparty/vulkan/include/vulkan/vulkan.h
index 004fa70952..3510ac912b 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan.h
+++ b/thirdparty/vulkan/include/vulkan/vulkan.h
@@ -38,7 +38,6 @@
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-#include <wayland-client.h>
#include "vulkan_wayland.h"
#endif
diff --git a/thirdparty/vulkan/include/vulkan/vulkan.hpp b/thirdparty/vulkan/include/vulkan/vulkan.hpp
index c67ba81d1c..41f71cdf8f 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan.hpp
+++ b/thirdparty/vulkan/include/vulkan/vulkan.hpp
@@ -27,20 +27,11 @@
#endif
#include <algorithm>
-#include <array>
-#include <cstddef>
-#include <cstdint>
-#include <cstring>
-#include <functional>
-#include <initializer_list>
-#include <sstream>
-#include <string>
-#include <system_error>
-#include <tuple>
-#include <type_traits>
+#include <array> // ArrayWrapperND
+#include <string> // std::string
#include <vulkan/vulkan.h>
#if 17 <= VULKAN_HPP_CPP_VERSION
-# include <string_view>
+# include <string_view> // std::string_view
#endif
#if defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
@@ -48,8 +39,12 @@
# define VULKAN_HPP_NO_SMART_HANDLE
# endif
#else
-# include <memory>
-# include <vector>
+# include <tuple> // std::tie
+# include <vector> // std::vector
+#endif
+
+#if !defined( VULKAN_HPP_NO_EXCEPTIONS )
+# include <system_error> // std::is_error_code_enum
#endif
#if defined( VULKAN_HPP_NO_CONSTRUCTORS )
@@ -107,7 +102,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h
# define __has_include( x ) false
#endif
-#if ( 201711 <= __cpp_impl_three_way_comparison ) && __has_include( <compare> ) && !defined( VULKAN_HPP_NO_SPACESHIP_OPERATOR )
+#if ( 201907 <= __cpp_lib_three_way_comparison ) && __has_include( <compare> ) && !defined( VULKAN_HPP_NO_SPACESHIP_OPERATOR )
# define VULKAN_HPP_HAS_SPACESHIP_OPERATOR
#endif
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
@@ -119,9 +114,9 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h
# include <span>
#endif
-static_assert( VK_HEADER_VERSION == 205, "Wrong VK_HEADER_VERSION!" );
+static_assert( VK_HEADER_VERSION == 231, "Wrong VK_HEADER_VERSION!" );
-// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.
+// 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default.
// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
#if ( VK_USE_64_BIT_PTR_DEFINES == 1 )
# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION )
@@ -240,273 +235,13 @@ static_assert( VK_HEADER_VERSION == 205, "Wrong VK_HEADER_VERSION!" );
namespace VULKAN_HPP_NAMESPACE
{
-#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- template <typename T>
- class ArrayProxy
- {
- public:
- VULKAN_HPP_CONSTEXPR ArrayProxy() VULKAN_HPP_NOEXCEPT
- : m_count( 0 )
- , m_ptr( nullptr )
- {}
-
- VULKAN_HPP_CONSTEXPR ArrayProxy( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_count( 0 )
- , m_ptr( nullptr )
- {}
-
- ArrayProxy( T & value ) VULKAN_HPP_NOEXCEPT
- : m_count( 1 )
- , m_ptr( &value )
- {}
-
- template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
- ArrayProxy( typename std::remove_const<T>::type & value ) VULKAN_HPP_NOEXCEPT
- : m_count( 1 )
- , m_ptr( &value )
- {}
-
- ArrayProxy( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT
- : m_count( count )
- , m_ptr( ptr )
- {}
-
- template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
- ArrayProxy( uint32_t count, typename std::remove_const<T>::type * ptr ) VULKAN_HPP_NOEXCEPT
- : m_count( count )
- , m_ptr( ptr )
- {}
-
-# if __GNUC__ >= 9
-# pragma GCC diagnostic push
-# pragma GCC diagnostic ignored "-Winit-list-lifetime"
-# endif
-
- ArrayProxy( std::initializer_list<T> const & list ) VULKAN_HPP_NOEXCEPT
- : m_count( static_cast<uint32_t>( list.size() ) )
- , m_ptr( list.begin() )
- {}
-
- template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
- ArrayProxy( std::initializer_list<typename std::remove_const<T>::type> const & list ) VULKAN_HPP_NOEXCEPT
- : m_count( static_cast<uint32_t>( list.size() ) )
- , m_ptr( list.begin() )
- {}
-
- ArrayProxy( std::initializer_list<T> & list ) VULKAN_HPP_NOEXCEPT
- : m_count( static_cast<uint32_t>( list.size() ) )
- , m_ptr( list.begin() )
- {}
-
- template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
- ArrayProxy( std::initializer_list<typename std::remove_const<T>::type> & list ) VULKAN_HPP_NOEXCEPT
- : m_count( static_cast<uint32_t>( list.size() ) )
- , m_ptr( list.begin() )
- {}
-
-# if __GNUC__ >= 9
-# pragma GCC diagnostic pop
-# endif
-
- // Any type with a .data() return type implicitly convertible to T*, and a .size() return type implicitly
- // convertible to size_t. The const version can capture temporaries, with lifetime ending at end of statement.
- template <typename V,
- typename std::enable_if<
- std::is_convertible<decltype( std::declval<V>().data() ), T *>::value &&
- std::is_convertible<decltype( std::declval<V>().size() ), std::size_t>::value>::type * = nullptr>
- ArrayProxy( V const & v ) VULKAN_HPP_NOEXCEPT
- : m_count( static_cast<uint32_t>( v.size() ) )
- , m_ptr( v.data() )
- {}
-
- template <typename V,
- typename std::enable_if<
- std::is_convertible<decltype( std::declval<V>().data() ), T *>::value &&
- std::is_convertible<decltype( std::declval<V>().size() ), std::size_t>::value>::type * = nullptr>
- ArrayProxy( V & v ) VULKAN_HPP_NOEXCEPT
- : m_count( static_cast<uint32_t>( v.size() ) )
- , m_ptr( v.data() )
- {}
-
- const T * begin() const VULKAN_HPP_NOEXCEPT
- {
- return m_ptr;
- }
-
- const T * end() const VULKAN_HPP_NOEXCEPT
- {
- return m_ptr + m_count;
- }
-
- const T & front() const VULKAN_HPP_NOEXCEPT
- {
- VULKAN_HPP_ASSERT( m_count && m_ptr );
- return *m_ptr;
- }
-
- const T & back() const VULKAN_HPP_NOEXCEPT
- {
- VULKAN_HPP_ASSERT( m_count && m_ptr );
- return *( m_ptr + m_count - 1 );
- }
-
- bool empty() const VULKAN_HPP_NOEXCEPT
- {
- return ( m_count == 0 );
- }
-
- uint32_t size() const VULKAN_HPP_NOEXCEPT
- {
- return m_count;
- }
-
- T * data() const VULKAN_HPP_NOEXCEPT
- {
- return m_ptr;
- }
-
- private:
- uint32_t m_count;
- T * m_ptr;
- };
-
- template <typename T>
- class ArrayProxyNoTemporaries
- {
- public:
- VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries() VULKAN_HPP_NOEXCEPT
- : m_count( 0 )
- , m_ptr( nullptr )
- {}
-
- VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
- : m_count( 0 )
- , m_ptr( nullptr )
- {}
-
- ArrayProxyNoTemporaries( T & value ) VULKAN_HPP_NOEXCEPT
- : m_count( 1 )
- , m_ptr( &value )
- {}
-
- template <typename V>
- ArrayProxyNoTemporaries( V && value ) = delete;
-
- template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
- ArrayProxyNoTemporaries( typename std::remove_const<T>::type & value ) VULKAN_HPP_NOEXCEPT
- : m_count( 1 )
- , m_ptr( &value )
- {}
-
- template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
- ArrayProxyNoTemporaries( typename std::remove_const<T>::type && value ) = delete;
-
- ArrayProxyNoTemporaries( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT
- : m_count( count )
- , m_ptr( ptr )
- {}
-
- template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
- ArrayProxyNoTemporaries( uint32_t count, typename std::remove_const<T>::type * ptr ) VULKAN_HPP_NOEXCEPT
- : m_count( count )
- , m_ptr( ptr )
- {}
-
- ArrayProxyNoTemporaries( std::initializer_list<T> const & list ) VULKAN_HPP_NOEXCEPT
- : m_count( static_cast<uint32_t>( list.size() ) )
- , m_ptr( list.begin() )
- {}
-
- ArrayProxyNoTemporaries( std::initializer_list<T> const && list ) = delete;
-
- template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
- ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> const & list )
- VULKAN_HPP_NOEXCEPT
- : m_count( static_cast<uint32_t>( list.size() ) )
- , m_ptr( list.begin() )
- {}
-
- template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
- ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> const && list ) = delete;
-
- ArrayProxyNoTemporaries( std::initializer_list<T> & list ) VULKAN_HPP_NOEXCEPT
- : m_count( static_cast<uint32_t>( list.size() ) )
- , m_ptr( list.begin() )
- {}
-
- ArrayProxyNoTemporaries( std::initializer_list<T> && list ) = delete;
-
- template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
- ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> & list ) VULKAN_HPP_NOEXCEPT
- : m_count( static_cast<uint32_t>( list.size() ) )
- , m_ptr( list.begin() )
- {}
-
- template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
- ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> && list ) = delete;
-
- // Any type with a .data() return type implicitly convertible to T*, and a // .size() return type implicitly
- // convertible to size_t.
- template <typename V,
- typename std::enable_if<
- std::is_convertible<decltype( std::declval<V>().data() ), T *>::value &&
- std::is_convertible<decltype( std::declval<V>().size() ), std::size_t>::value>::type * = nullptr>
- ArrayProxyNoTemporaries( V & v ) VULKAN_HPP_NOEXCEPT
- : m_count( static_cast<uint32_t>( v.size() ) )
- , m_ptr( v.data() )
- {}
-
- const T * begin() const VULKAN_HPP_NOEXCEPT
- {
- return m_ptr;
- }
-
- const T * end() const VULKAN_HPP_NOEXCEPT
- {
- return m_ptr + m_count;
- }
-
- const T & front() const VULKAN_HPP_NOEXCEPT
- {
- VULKAN_HPP_ASSERT( m_count && m_ptr );
- return *m_ptr;
- }
-
- const T & back() const VULKAN_HPP_NOEXCEPT
- {
- VULKAN_HPP_ASSERT( m_count && m_ptr );
- return *( m_ptr + m_count - 1 );
- }
-
- bool empty() const VULKAN_HPP_NOEXCEPT
- {
- return ( m_count == 0 );
- }
-
- uint32_t size() const VULKAN_HPP_NOEXCEPT
- {
- return m_count;
- }
-
- T * data() const VULKAN_HPP_NOEXCEPT
- {
- return m_ptr;
- }
-
- private:
- uint32_t m_count;
- T * m_ptr;
- };
-#endif
-
template <typename T, size_t N>
class ArrayWrapper1D : public std::array<T, N>
{
public:
VULKAN_HPP_CONSTEXPR ArrayWrapper1D() VULKAN_HPP_NOEXCEPT : std::array<T, N>() {}
- VULKAN_HPP_CONSTEXPR ArrayWrapper1D( std::array<T, N> const & data ) VULKAN_HPP_NOEXCEPT : std::array<T, N>( data )
- {}
+ VULKAN_HPP_CONSTEXPR ArrayWrapper1D( std::array<T, N> const & data ) VULKAN_HPP_NOEXCEPT : std::array<T, N>( data ) {}
#if ( VK_USE_64_BIT_PTR_DEFINES == 0 )
// on 32 bit compiles, needs overloads on index type int to resolve ambiguities
@@ -635,16 +370,14 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR ArrayWrapper2D( std::array<std::array<T, M>, N> const & data ) VULKAN_HPP_NOEXCEPT
: std::array<ArrayWrapper1D<T, M>, N>( *reinterpret_cast<std::array<ArrayWrapper1D<T, M>, N> const *>( &data ) )
- {}
+ {
+ }
};
template <typename FlagBitsType>
struct FlagTraits
{
- enum
- {
- allFlags = 0
- };
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = false;
};
template <typename BitType>
@@ -822,6 +555,340 @@ namespace VULKAN_HPP_NAMESPACE
return flags.operator^( bit );
}
+ // bitwise operators on BitType
+ template <typename BitType, typename std::enable_if<FlagTraits<BitType>::isBitmask, bool>::type = true>
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags<BitType> operator&( BitType lhs, BitType rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ return Flags<BitType>( lhs ) & rhs;
+ }
+
+ template <typename BitType, typename std::enable_if<FlagTraits<BitType>::isBitmask, bool>::type = true>
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags<BitType> operator|( BitType lhs, BitType rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ return Flags<BitType>( lhs ) | rhs;
+ }
+
+ template <typename BitType, typename std::enable_if<FlagTraits<BitType>::isBitmask, bool>::type = true>
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags<BitType> operator^( BitType lhs, BitType rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ return Flags<BitType>( lhs ) ^ rhs;
+ }
+
+ template <typename BitType, typename std::enable_if<FlagTraits<BitType>::isBitmask, bool>::type = true>
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags<BitType> operator~( BitType bit ) VULKAN_HPP_NOEXCEPT
+ {
+ return ~( Flags<BitType>( bit ) );
+ }
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ template <typename T>
+ class ArrayProxy
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR ArrayProxy() VULKAN_HPP_NOEXCEPT
+ : m_count( 0 )
+ , m_ptr( nullptr )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ArrayProxy( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_count( 0 )
+ , m_ptr( nullptr )
+ {
+ }
+
+ ArrayProxy( T const & value ) VULKAN_HPP_NOEXCEPT
+ : m_count( 1 )
+ , m_ptr( &value )
+ {
+ }
+
+ ArrayProxy( uint32_t count, T const * ptr ) VULKAN_HPP_NOEXCEPT
+ : m_count( count )
+ , m_ptr( ptr )
+ {
+ }
+
+ template <std::size_t C>
+ ArrayProxy( T const ( &ptr )[C] ) VULKAN_HPP_NOEXCEPT
+ : m_count( C )
+ , m_ptr( ptr )
+ {
+ }
+
+# if __GNUC__ >= 9
+# pragma GCC diagnostic push
+# pragma GCC diagnostic ignored "-Winit-list-lifetime"
+# endif
+
+ ArrayProxy( std::initializer_list<T> const & list ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( list.size() ) )
+ , m_ptr( list.begin() )
+ {
+ }
+
+ template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxy( std::initializer_list<typename std::remove_const<T>::type> const & list ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( list.size() ) )
+ , m_ptr( list.begin() )
+ {
+ }
+
+# if __GNUC__ >= 9
+# pragma GCC diagnostic pop
+# endif
+
+ // Any type with a .data() return type implicitly convertible to T*, and a .size() return type implicitly
+ // convertible to size_t. The const version can capture temporaries, with lifetime ending at end of statement.
+ template <typename V,
+ typename std::enable_if<std::is_convertible<decltype( std::declval<V>().data() ), T *>::value &&
+ std::is_convertible<decltype( std::declval<V>().size() ), std::size_t>::value>::type * = nullptr>
+ ArrayProxy( V const & v ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( v.size() ) )
+ , m_ptr( v.data() )
+ {
+ }
+
+ const T * begin() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_ptr;
+ }
+
+ const T * end() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_ptr + m_count;
+ }
+
+ const T & front() const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( m_count && m_ptr );
+ return *m_ptr;
+ }
+
+ const T & back() const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( m_count && m_ptr );
+ return *( m_ptr + m_count - 1 );
+ }
+
+ bool empty() const VULKAN_HPP_NOEXCEPT
+ {
+ return ( m_count == 0 );
+ }
+
+ uint32_t size() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_count;
+ }
+
+ T const * data() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_ptr;
+ }
+
+ private:
+ uint32_t m_count;
+ T const * m_ptr;
+ };
+
+ template <typename T>
+ class ArrayProxyNoTemporaries
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries() VULKAN_HPP_NOEXCEPT
+ : m_count( 0 )
+ , m_ptr( nullptr )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ : m_count( 0 )
+ , m_ptr( nullptr )
+ {
+ }
+
+ ArrayProxyNoTemporaries( T & value ) VULKAN_HPP_NOEXCEPT
+ : m_count( 1 )
+ , m_ptr( &value )
+ {
+ }
+
+ template <typename V>
+ ArrayProxyNoTemporaries( V && value ) = delete;
+
+ template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxyNoTemporaries( typename std::remove_const<T>::type & value ) VULKAN_HPP_NOEXCEPT
+ : m_count( 1 )
+ , m_ptr( &value )
+ {
+ }
+
+ template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxyNoTemporaries( typename std::remove_const<T>::type && value ) = delete;
+
+ ArrayProxyNoTemporaries( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT
+ : m_count( count )
+ , m_ptr( ptr )
+ {
+ }
+
+ template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxyNoTemporaries( uint32_t count, typename std::remove_const<T>::type * ptr ) VULKAN_HPP_NOEXCEPT
+ : m_count( count )
+ , m_ptr( ptr )
+ {
+ }
+
+ template <std::size_t C>
+ ArrayProxyNoTemporaries( T ( &ptr )[C] ) VULKAN_HPP_NOEXCEPT
+ : m_count( C )
+ , m_ptr( ptr )
+ {
+ }
+
+ template <std::size_t C>
+ ArrayProxyNoTemporaries( T( &&ptr )[C] ) = delete;
+
+ template <std::size_t C, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxyNoTemporaries( typename std::remove_const<T>::type ( &ptr )[C] ) VULKAN_HPP_NOEXCEPT
+ : m_count( C )
+ , m_ptr( ptr )
+ {
+ }
+
+ template <std::size_t C, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxyNoTemporaries( typename std::remove_const<T>::type( &&ptr )[C] ) = delete;
+
+ ArrayProxyNoTemporaries( std::initializer_list<T> const & list ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( list.size() ) )
+ , m_ptr( list.begin() )
+ {
+ }
+
+ ArrayProxyNoTemporaries( std::initializer_list<T> const && list ) = delete;
+
+ template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> const & list ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( list.size() ) )
+ , m_ptr( list.begin() )
+ {
+ }
+
+ template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> const && list ) = delete;
+
+ ArrayProxyNoTemporaries( std::initializer_list<T> & list ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( list.size() ) )
+ , m_ptr( list.begin() )
+ {
+ }
+
+ ArrayProxyNoTemporaries( std::initializer_list<T> && list ) = delete;
+
+ template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> & list ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( list.size() ) )
+ , m_ptr( list.begin() )
+ {
+ }
+
+ template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+ ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> && list ) = delete;
+
+ // Any type with a .data() return type implicitly convertible to T*, and a // .size() return type implicitly
+ // convertible to size_t.
+ template <typename V,
+ typename std::enable_if<std::is_convertible<decltype( std::declval<V>().data() ), T *>::value &&
+ std::is_convertible<decltype( std::declval<V>().size() ), std::size_t>::value>::type * = nullptr>
+ ArrayProxyNoTemporaries( V & v ) VULKAN_HPP_NOEXCEPT
+ : m_count( static_cast<uint32_t>( v.size() ) )
+ , m_ptr( v.data() )
+ {
+ }
+
+ const T * begin() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_ptr;
+ }
+
+ const T * end() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_ptr + m_count;
+ }
+
+ const T & front() const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( m_count && m_ptr );
+ return *m_ptr;
+ }
+
+ const T & back() const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( m_count && m_ptr );
+ return *( m_ptr + m_count - 1 );
+ }
+
+ bool empty() const VULKAN_HPP_NOEXCEPT
+ {
+ return ( m_count == 0 );
+ }
+
+ uint32_t size() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_count;
+ }
+
+ T * data() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_ptr;
+ }
+
+ private:
+ uint32_t m_count;
+ T * m_ptr;
+ };
+
+ template <typename T>
+ class StridedArrayProxy : protected ArrayProxy<T>
+ {
+ public:
+ using ArrayProxy<T>::ArrayProxy;
+
+ StridedArrayProxy( uint32_t count, T const * ptr, uint32_t stride ) VULKAN_HPP_NOEXCEPT
+ : ArrayProxy<T>( count, ptr )
+ , m_stride( stride )
+ {
+ VULKAN_HPP_ASSERT( sizeof( T ) <= stride );
+ }
+
+ using ArrayProxy<T>::begin;
+
+ const T * end() const VULKAN_HPP_NOEXCEPT
+ {
+ return reinterpret_cast<T const *>( static_cast<uint8_t const *>( begin() ) + size() * m_stride );
+ }
+
+ using ArrayProxy<T>::front;
+
+ const T & back() const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( begin() && size() );
+ return *reinterpret_cast<T const *>( static_cast<uint8_t const *>( begin() ) + ( size() - 1 ) * m_stride );
+ }
+
+ using ArrayProxy<T>::empty;
+ using ArrayProxy<T>::size;
+ using ArrayProxy<T>::data;
+
+ uint32_t stride() const
+ {
+ return m_stride;
+ }
+
+ private:
+ uint32_t m_stride = sizeof( T );
+ };
+
template <typename RefType>
class Optional
{
@@ -880,26 +947,23 @@ namespace VULKAN_HPP_NAMESPACE
template <size_t Index, typename T, typename... ChainElements>
struct StructureChainContains
{
- static const bool value =
- std::is_same<T, typename std::tuple_element<Index, std::tuple<ChainElements...>>::type>::value ||
- StructureChainContains<Index - 1, T, ChainElements...>::value;
+ static const bool value = std::is_same<T, typename std::tuple_element<Index, std::tuple<ChainElements...>>::type>::value ||
+ StructureChainContains<Index - 1, T, ChainElements...>::value;
};
template <typename T, typename... ChainElements>
struct StructureChainContains<0, T, ChainElements...>
{
- static const bool value =
- std::is_same<T, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value;
+ static const bool value = std::is_same<T, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value;
};
template <size_t Index, typename... ChainElements>
struct StructureChainValidation
{
- using TestType = typename std::tuple_element<Index, std::tuple<ChainElements...>>::type;
- static const bool valid =
- StructExtends<TestType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value &&
- ( TestType::allowDuplicate || !StructureChainContains<Index - 1, TestType, ChainElements...>::value ) &&
- StructureChainValidation<Index - 1, ChainElements...>::valid;
+ using TestType = typename std::tuple_element<Index, std::tuple<ChainElements...>>::type;
+ static const bool valid = StructExtends<TestType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value &&
+ ( TestType::allowDuplicate || !StructureChainContains<Index - 1, TestType, ChainElements...>::value ) &&
+ StructureChainValidation<Index - 1, ChainElements...>::valid;
};
template <typename... ChainElements>
@@ -914,26 +978,22 @@ namespace VULKAN_HPP_NAMESPACE
public:
StructureChain() VULKAN_HPP_NOEXCEPT
{
- static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
- "The structure chain is not valid!" );
+ static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid, "The structure chain is not valid!" );
link<sizeof...( ChainElements ) - 1>();
}
StructureChain( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>( rhs )
{
- static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
- "The structure chain is not valid!" );
+ static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid, "The structure chain is not valid!" );
link( &std::get<0>( *this ),
&std::get<0>( rhs ),
reinterpret_cast<VkBaseOutStructure *>( &std::get<0>( *this ) ),
reinterpret_cast<VkBaseInStructure const *>( &std::get<0>( rhs ) ) );
}
- StructureChain( StructureChain && rhs ) VULKAN_HPP_NOEXCEPT
- : std::tuple<ChainElements...>( std::forward<std::tuple<ChainElements...>>( rhs ) )
+ StructureChain( StructureChain && rhs ) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>( std::forward<std::tuple<ChainElements...>>( rhs ) )
{
- static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
- "The structure chain is not valid!" );
+ static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid, "The structure chain is not valid!" );
link( &std::get<0>( *this ),
&std::get<0>( rhs ),
reinterpret_cast<VkBaseOutStructure *>( &std::get<0>( *this ) ),
@@ -942,8 +1002,7 @@ namespace VULKAN_HPP_NAMESPACE
StructureChain( ChainElements const &... elems ) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>( elems... )
{
- static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
- "The structure chain is not valid!" );
+ static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid, "The structure chain is not valid!" );
link<sizeof...( ChainElements ) - 1>();
}
@@ -962,15 +1021,13 @@ namespace VULKAN_HPP_NAMESPACE
template <typename T = typename std::tuple_element<0, std::tuple<ChainElements...>>::type, size_t Which = 0>
T & get() VULKAN_HPP_NOEXCEPT
{
- return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>(
- static_cast<std::tuple<ChainElements...> &>( *this ) );
+ return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>( static_cast<std::tuple<ChainElements...> &>( *this ) );
}
template <typename T = typename std::tuple_element<0, std::tuple<ChainElements...>>::type, size_t Which = 0>
T const & get() const VULKAN_HPP_NOEXCEPT
{
- return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>(
- static_cast<std::tuple<ChainElements...> const &>( *this ) );
+ return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>( static_cast<std::tuple<ChainElements...> const &>( *this ) );
}
template <typename T0, typename T1, typename... Ts>
@@ -986,36 +1043,25 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename ClassType, size_t Which = 0>
- typename std::enable_if<
- std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value &&
- ( Which == 0 ),
- bool>::type
+ typename std::enable_if<std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value && ( Which == 0 ), bool>::type
isLinked() const VULKAN_HPP_NOEXCEPT
{
return true;
}
template <typename ClassType, size_t Which = 0>
- typename std::enable_if<
- !std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value ||
- ( Which != 0 ),
- bool>::type
+ typename std::enable_if<!std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value || ( Which != 0 ), bool>::type
isLinked() const VULKAN_HPP_NOEXCEPT
{
- static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid,
- "Can't unlink Structure that's not part of this StructureChain!" );
+ static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid, "Can't unlink Structure that's not part of this StructureChain!" );
return isLinked( reinterpret_cast<VkBaseInStructure const *>( &get<ClassType, Which>() ) );
}
template <typename ClassType, size_t Which = 0>
- typename std::enable_if<
- !std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value ||
- ( Which != 0 ),
- void>::type
+ typename std::enable_if<!std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value || ( Which != 0 ), void>::type
relink() VULKAN_HPP_NOEXCEPT
{
- static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid,
- "Can't relink Structure that's not part of this StructureChain!" );
+ static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid, "Can't relink Structure that's not part of this StructureChain!" );
auto pNext = reinterpret_cast<VkBaseInStructure *>( &get<ClassType, Which>() );
VULKAN_HPP_ASSERT( !isLinked( pNext ) );
auto & headElement = std::get<0>( static_cast<std::tuple<ChainElements...> &>( *this ) );
@@ -1024,53 +1070,41 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename ClassType, size_t Which = 0>
- typename std::enable_if<
- !std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value ||
- ( Which != 0 ),
- void>::type
+ typename std::enable_if<!std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value || ( Which != 0 ), void>::type
unlink() VULKAN_HPP_NOEXCEPT
{
- static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid,
- "Can't unlink Structure that's not part of this StructureChain!" );
+ static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid, "Can't unlink Structure that's not part of this StructureChain!" );
unlink( reinterpret_cast<VkBaseOutStructure const *>( &get<ClassType, Which>() ) );
}
private:
template <int Index, typename T, int Which, typename, class First, class... Types>
struct ChainElementIndex : ChainElementIndex<Index + 1, T, Which, void, Types...>
- {};
+ {
+ };
template <int Index, typename T, int Which, class First, class... Types>
- struct ChainElementIndex<Index,
- T,
- Which,
- typename std::enable_if<!std::is_same<T, First>::value, void>::type,
- First,
- Types...> : ChainElementIndex<Index + 1, T, Which, void, Types...>
- {};
+ struct ChainElementIndex<Index, T, Which, typename std::enable_if<!std::is_same<T, First>::value, void>::type, First, Types...>
+ : ChainElementIndex<Index + 1, T, Which, void, Types...>
+ {
+ };
template <int Index, typename T, int Which, class First, class... Types>
- struct ChainElementIndex<Index,
- T,
- Which,
- typename std::enable_if<std::is_same<T, First>::value, void>::type,
- First,
- Types...> : ChainElementIndex<Index + 1, T, Which - 1, void, Types...>
- {};
+ struct ChainElementIndex<Index, T, Which, typename std::enable_if<std::is_same<T, First>::value, void>::type, First, Types...>
+ : ChainElementIndex<Index + 1, T, Which - 1, void, Types...>
+ {
+ };
template <int Index, typename T, class First, class... Types>
- struct ChainElementIndex<Index,
- T,
- 0,
- typename std::enable_if<std::is_same<T, First>::value, void>::type,
- First,
- Types...> : std::integral_constant<int, Index>
- {};
+ struct ChainElementIndex<Index, T, 0, typename std::enable_if<std::is_same<T, First>::value, void>::type, First, Types...>
+ : std::integral_constant<int, Index>
+ {
+ };
bool isLinked( VkBaseInStructure const * pNext ) const VULKAN_HPP_NOEXCEPT
{
- VkBaseInStructure const * elementPtr = reinterpret_cast<VkBaseInStructure const *>(
- &std::get<0>( static_cast<std::tuple<ChainElements...> const &>( *this ) ) );
+ VkBaseInStructure const * elementPtr =
+ reinterpret_cast<VkBaseInStructure const *>( &std::get<0>( static_cast<std::tuple<ChainElements...> const &>( *this ) ) );
while ( elementPtr )
{
if ( elementPtr->pNext == pNext )
@@ -1092,25 +1126,24 @@ namespace VULKAN_HPP_NAMESPACE
template <size_t Index>
typename std::enable_if<Index == 0, void>::type link() VULKAN_HPP_NOEXCEPT
- {}
+ {
+ }
void link( void * dstBase, void const * srcBase, VkBaseOutStructure * dst, VkBaseInStructure const * src )
{
while ( src->pNext )
{
- std::ptrdiff_t offset =
- reinterpret_cast<char const *>( src->pNext ) - reinterpret_cast<char const *>( srcBase );
- dst->pNext = reinterpret_cast<VkBaseOutStructure *>( reinterpret_cast<char *>( dstBase ) + offset );
- dst = dst->pNext;
- src = src->pNext;
+ std::ptrdiff_t offset = reinterpret_cast<char const *>( src->pNext ) - reinterpret_cast<char const *>( srcBase );
+ dst->pNext = reinterpret_cast<VkBaseOutStructure *>( reinterpret_cast<char *>( dstBase ) + offset );
+ dst = dst->pNext;
+ src = src->pNext;
}
dst->pNext = nullptr;
}
void unlink( VkBaseOutStructure const * pNext ) VULKAN_HPP_NOEXCEPT
{
- VkBaseOutStructure * elementPtr =
- reinterpret_cast<VkBaseOutStructure *>( &std::get<0>( static_cast<std::tuple<ChainElements...> &>( *this ) ) );
+ VkBaseOutStructure * elementPtr = reinterpret_cast<VkBaseOutStructure *>( &std::get<0>( static_cast<std::tuple<ChainElements...> &>( *this ) ) );
while ( elementPtr && ( elementPtr->pNext != pNext ) )
{
elementPtr = elementPtr->pNext;
@@ -1126,7 +1159,7 @@ namespace VULKAN_HPP_NAMESPACE
}
};
-#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
+# if !defined( VULKAN_HPP_NO_SMART_HANDLE )
template <typename Type, typename Dispatch>
class UniqueHandleTraits;
@@ -1144,14 +1177,16 @@ namespace VULKAN_HPP_NAMESPACE
explicit UniqueHandle( Type const & value, Deleter const & deleter = Deleter() ) VULKAN_HPP_NOEXCEPT
: Deleter( deleter )
, m_value( value )
- {}
+ {
+ }
UniqueHandle( UniqueHandle const & ) = delete;
UniqueHandle( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT
: Deleter( std::move( static_cast<Deleter &>( other ) ) )
, m_value( other.release() )
- {}
+ {
+ }
~UniqueHandle() VULKAN_HPP_NOEXCEPT
{
@@ -1235,22 +1270,20 @@ namespace VULKAN_HPP_NAMESPACE
};
template <typename UniqueType>
- VULKAN_HPP_INLINE std::vector<typename UniqueType::element_type>
- uniqueToRaw( std::vector<UniqueType> const & handles )
+ VULKAN_HPP_INLINE std::vector<typename UniqueType::element_type> uniqueToRaw( std::vector<UniqueType> const & handles )
{
std::vector<typename UniqueType::element_type> newBuffer( handles.size() );
- std::transform(
- handles.begin(), handles.end(), newBuffer.begin(), []( UniqueType const & handle ) { return handle.get(); } );
+ std::transform( handles.begin(), handles.end(), newBuffer.begin(), []( UniqueType const & handle ) { return handle.get(); } );
return newBuffer;
}
template <typename Type, typename Dispatch>
- VULKAN_HPP_INLINE void swap( UniqueHandle<Type, Dispatch> & lhs,
- UniqueHandle<Type, Dispatch> & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void swap( UniqueHandle<Type, Dispatch> & lhs, UniqueHandle<Type, Dispatch> & rhs ) VULKAN_HPP_NOEXCEPT
{
lhs.swap( rhs );
}
-#endif
+# endif
+#endif // VULKAN_HPP_DISABLE_ENHANCED_MODE
class DispatchLoaderBase
{
@@ -1260,7 +1293,8 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( NDEBUG )
: m_valid( false )
#endif
- {}
+ {
+ }
#if !defined( NDEBUG )
size_t getVkHeaderVersion() const
@@ -1281,9 +1315,8 @@ namespace VULKAN_HPP_NAMESPACE
public:
//=== VK_VERSION_1_0 ===
- VkResult vkCreateInstance( const VkInstanceCreateInfo * pCreateInfo,
- const VkAllocationCallbacks * pAllocator,
- VkInstance * pInstance ) const VULKAN_HPP_NOEXCEPT
+ VkResult
+ vkCreateInstance( const VkInstanceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkInstance * pInstance ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance );
}
@@ -1293,22 +1326,18 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkDestroyInstance( instance, pAllocator );
}
- VkResult vkEnumeratePhysicalDevices( VkInstance instance,
- uint32_t * pPhysicalDeviceCount,
- VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t * pPhysicalDeviceCount, VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT
{
return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices );
}
- void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures );
}
- void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT
+ void
+ vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties );
}
@@ -1319,30 +1348,24 @@ namespace VULKAN_HPP_NAMESPACE
VkImageTiling tiling,
VkImageUsageFlags usage,
VkImageCreateFlags flags,
- VkImageFormatProperties * pImageFormatProperties ) const
- VULKAN_HPP_NOEXCEPT
+ VkImageFormatProperties * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceImageFormatProperties(
- physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties );
+ return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties );
}
- void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties );
}
void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice,
uint32_t * pQueueFamilyPropertyCount,
- VkQueueFamilyProperties * pQueueFamilyProperties ) const
- VULKAN_HPP_NOEXCEPT
+ VkQueueFamilyProperties * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceQueueFamilyProperties(
- physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
+ return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
}
- void vkGetPhysicalDeviceMemoryProperties(
- VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties );
}
@@ -1385,31 +1408,23 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties );
}
- VkResult vkEnumerateInstanceLayerProperties( uint32_t * pPropertyCount,
- VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkEnumerateInstanceLayerProperties( uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties );
}
- VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice,
- uint32_t * pPropertyCount,
- VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult
+ vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties );
}
- void vkGetDeviceQueue( VkDevice device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT
+ void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue );
}
- VkResult vkQueueSubmit( VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo * pSubmits,
- VkFence fence ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT
{
return ::vkQueueSubmit( queue, submitCount, pSubmits, fence );
}
@@ -1432,19 +1447,13 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory );
}
- void vkFreeMemory( VkDevice device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkFreeMemory( device, memory, pAllocator );
}
- VkResult vkMapMemory( VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkMemoryMapFlags flags,
- void ** ppData ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void ** ppData ) const
+ VULKAN_HPP_NOEXCEPT
{
return ::vkMapMemory( device, memory, offset, size, flags, ppData );
}
@@ -1454,53 +1463,37 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkUnmapMemory( device, memory );
}
- VkResult vkFlushMappedMemoryRanges( VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
{
return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges );
}
- VkResult vkInvalidateMappedMemoryRanges( VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
{
return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges );
}
- void vkGetDeviceMemoryCommitment( VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT
+ void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes );
}
- VkResult vkBindBufferMemory( VkDevice device,
- VkBuffer buffer,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT
{
return ::vkBindBufferMemory( device, buffer, memory, memoryOffset );
}
- VkResult vkBindImageMemory( VkDevice device,
- VkImage image,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT
{
return ::vkBindImageMemory( device, image, memory, memoryOffset );
}
- void vkGetBufferMemoryRequirements( VkDevice device,
- VkBuffer buffer,
- VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+ void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements );
}
- void vkGetImageMemoryRequirements( VkDevice device,
- VkImage image,
- VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+ void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements );
}
@@ -1508,11 +1501,9 @@ namespace VULKAN_HPP_NAMESPACE
void vkGetImageSparseMemoryRequirements( VkDevice device,
VkImage image,
uint32_t * pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements * pSparseMemoryRequirements ) const
- VULKAN_HPP_NOEXCEPT
+ VkSparseImageMemoryRequirements * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetImageSparseMemoryRequirements(
- device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+ return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
}
void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice,
@@ -1522,17 +1513,12 @@ namespace VULKAN_HPP_NAMESPACE
VkImageUsageFlags usage,
VkImageTiling tiling,
uint32_t * pPropertyCount,
- VkSparseImageFormatProperties * pProperties ) const
- VULKAN_HPP_NOEXCEPT
+ VkSparseImageFormatProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceSparseImageFormatProperties(
- physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties );
+ return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties );
}
- VkResult vkQueueBindSparse( VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo * pBindInfo,
- VkFence fence ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo * pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT
{
return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence );
}
@@ -1545,9 +1531,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence );
}
- void vkDestroyFence( VkDevice device,
- VkFence fence,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyFence( device, fence, pAllocator );
}
@@ -1562,11 +1546,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetFenceStatus( device, fence );
}
- VkResult vkWaitForFences( VkDevice device,
- uint32_t fenceCount,
- const VkFence * pFences,
- VkBool32 waitAll,
- uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
{
return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout );
}
@@ -1579,9 +1559,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore );
}
- void vkDestroySemaphore( VkDevice device,
- VkSemaphore semaphore,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroySemaphore( device, semaphore, pAllocator );
}
@@ -1594,9 +1572,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent );
}
- void vkDestroyEvent( VkDevice device,
- VkEvent event,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyEvent( device, event, pAllocator );
}
@@ -1624,9 +1600,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool );
}
- void vkDestroyQueryPool( VkDevice device,
- VkQueryPool queryPool,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyQueryPool( device, queryPool, pAllocator );
}
@@ -1651,9 +1625,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer );
}
- void vkDestroyBuffer( VkDevice device,
- VkBuffer buffer,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyBuffer( device, buffer, pAllocator );
}
@@ -1666,9 +1638,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView );
}
- void vkDestroyBufferView( VkDevice device,
- VkBufferView bufferView,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyBufferView( device, bufferView, pAllocator );
}
@@ -1681,9 +1651,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage );
}
- void vkDestroyImage( VkDevice device,
- VkImage image,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyImage( device, image, pAllocator );
}
@@ -1704,9 +1672,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView );
}
- void vkDestroyImageView( VkDevice device,
- VkImageView imageView,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyImageView( device, imageView, pAllocator );
}
@@ -1719,9 +1685,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule );
}
- void vkDestroyShaderModule( VkDevice device,
- VkShaderModule shaderModule,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyShaderModule( device, shaderModule, pAllocator );
}
@@ -1734,25 +1698,18 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache );
}
- void vkDestroyPipelineCache( VkDevice device,
- VkPipelineCache pipelineCache,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator );
}
- VkResult vkGetPipelineCacheData( VkDevice device,
- VkPipelineCache pipelineCache,
- size_t * pDataSize,
- void * pData ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData );
}
- VkResult vkMergePipelineCaches( VkDevice device,
- VkPipelineCache dstCache,
- uint32_t srcCacheCount,
- const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT
+ VkResult
+ vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT
{
return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches );
}
@@ -1764,8 +1721,7 @@ namespace VULKAN_HPP_NAMESPACE
const VkAllocationCallbacks * pAllocator,
VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateGraphicsPipelines(
- device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+ return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
}
VkResult vkCreateComputePipelines( VkDevice device,
@@ -1778,9 +1734,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
}
- void vkDestroyPipeline( VkDevice device,
- VkPipeline pipeline,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyPipeline( device, pipeline, pAllocator );
}
@@ -1793,9 +1747,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout );
}
- void vkDestroyPipelineLayout( VkDevice device,
- VkPipelineLayout pipelineLayout,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator );
}
@@ -1808,9 +1760,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler );
}
- void vkDestroySampler( VkDevice device,
- VkSampler sampler,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroySampler( device, sampler, pAllocator );
}
@@ -1838,16 +1788,12 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool );
}
- void vkDestroyDescriptorPool( VkDevice device,
- VkDescriptorPool descriptorPool,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator );
}
- VkResult vkResetDescriptorPool( VkDevice device,
- VkDescriptorPool descriptorPool,
- VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
{
return ::vkResetDescriptorPool( device, descriptorPool, flags );
}
@@ -1873,8 +1819,7 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t descriptorCopyCount,
const VkCopyDescriptorSet * pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkUpdateDescriptorSets(
- device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies );
+ return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies );
}
VkResult vkCreateFramebuffer( VkDevice device,
@@ -1885,9 +1830,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer );
}
- void vkDestroyFramebuffer( VkDevice device,
- VkFramebuffer framebuffer,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyFramebuffer( device, framebuffer, pAllocator );
}
@@ -1900,16 +1843,12 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass );
}
- void vkDestroyRenderPass( VkDevice device,
- VkRenderPass renderPass,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyRenderPass( device, renderPass, pAllocator );
}
- void vkGetRenderAreaGranularity( VkDevice device,
- VkRenderPass renderPass,
- VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT
+ void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity );
}
@@ -1922,16 +1861,12 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool );
}
- void vkDestroyCommandPool( VkDevice device,
- VkCommandPool commandPool,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyCommandPool( device, commandPool, pAllocator );
}
- VkResult vkResetCommandPool( VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
{
return ::vkResetCommandPool( device, commandPool, flags );
}
@@ -1951,8 +1886,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers );
}
- VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer,
- const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo );
}
@@ -1962,31 +1896,23 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkEndCommandBuffer( commandBuffer );
}
- VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer,
- VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT
{
return ::vkResetCommandBuffer( commandBuffer, flags );
}
- void vkCmdBindPipeline( VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline );
}
- void vkCmdSetViewport( VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT
+ void
+ vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports );
}
- void vkCmdSetScissor( VkCommandBuffer commandBuffer,
- uint32_t firstScissor,
- uint32_t scissorCount,
- const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors );
}
@@ -2004,36 +1930,27 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
}
- void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer,
- const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetBlendConstants( commandBuffer, blendConstants );
}
- void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer,
- float minDepthBounds,
- float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds );
}
- void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask );
}
- void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask );
}
- void vkCmdSetStencilReference( VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t reference ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference );
}
@@ -2047,20 +1964,11 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t dynamicOffsetCount,
const uint32_t * pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdBindDescriptorSets( commandBuffer,
- pipelineBindPoint,
- layout,
- firstSet,
- descriptorSetCount,
- pDescriptorSets,
- dynamicOffsetCount,
- pDynamicOffsets );
+ return ::vkCmdBindDescriptorSets(
+ commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets );
}
- void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType );
}
@@ -2074,11 +1982,8 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets );
}
- void vkCmdDraw( VkCommandBuffer commandBuffer,
- uint32_t vertexCount,
- uint32_t instanceCount,
- uint32_t firstVertex,
- uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const
+ VULKAN_HPP_NOEXCEPT
{
return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
}
@@ -2093,44 +1998,29 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
}
- void vkCmdDrawIndirect( VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride );
}
- void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const
+ VULKAN_HPP_NOEXCEPT
{
return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride );
}
- void vkCmdDispatch( VkCommandBuffer commandBuffer,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ );
}
- void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset );
}
- void vkCmdCopyBuffer( VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferCopy * pRegions ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy * pRegions ) const
+ VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions );
}
@@ -2143,8 +2033,7 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t regionCount,
const VkImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdCopyImage(
- commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
+ return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
}
void vkCmdBlitImage( VkCommandBuffer commandBuffer,
@@ -2156,8 +2045,7 @@ namespace VULKAN_HPP_NAMESPACE
const VkImageBlit * pRegions,
VkFilter filter ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdBlitImage(
- commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter );
+ return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter );
}
void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer,
@@ -2180,20 +2068,14 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions );
}
- void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize dataSize,
- const void * pData ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void * pData ) const
+ VULKAN_HPP_NOEXCEPT
{
return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData );
}
- void vkCmdFillBuffer( VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize size,
- uint32_t data ) const VULKAN_HPP_NOEXCEPT
+ void
+ vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data );
}
@@ -2235,20 +2117,15 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t regionCount,
const VkImageResolve * pRegions ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdResolveImage(
- commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
+ return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
}
- void vkCmdSetEvent( VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetEvent( commandBuffer, event, stageMask );
}
- void vkCmdResetEvent( VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdResetEvent( commandBuffer, event, stageMask );
}
@@ -2301,10 +2178,7 @@ namespace VULKAN_HPP_NAMESPACE
pImageMemoryBarriers );
}
- void vkCmdBeginQuery( VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags );
}
@@ -2314,10 +2188,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdEndQuery( commandBuffer, queryPool, query );
}
- void vkCmdResetQueryPool( VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount );
}
@@ -2339,8 +2210,7 @@ namespace VULKAN_HPP_NAMESPACE
VkDeviceSize stride,
VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdCopyQueryPoolResults(
- commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags );
+ return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags );
}
void vkCmdPushConstants( VkCommandBuffer commandBuffer,
@@ -2370,9 +2240,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdEndRenderPass( commandBuffer );
}
- void vkCmdExecuteCommands( VkCommandBuffer commandBuffer,
- uint32_t commandBufferCount,
- const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers );
}
@@ -2384,16 +2252,12 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkEnumerateInstanceVersion( pApiVersion );
}
- VkResult vkBindBufferMemory2( VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
{
return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos );
}
- VkResult vkBindImageMemory2( VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
{
return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos );
}
@@ -2404,8 +2268,7 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t remoteDeviceIndex,
VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetDeviceGroupPeerMemoryFeatures(
- device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures );
+ return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures );
}
void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
@@ -2421,50 +2284,44 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t groupCountY,
uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdDispatchBase(
- commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+ return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
}
VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance,
uint32_t * pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const
- VULKAN_HPP_NOEXCEPT
+ VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
}
void vkGetImageMemoryRequirements2( VkDevice device,
const VkImageMemoryRequirementsInfo2 * pInfo,
- VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+ VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements );
}
void vkGetBufferMemoryRequirements2( VkDevice device,
const VkBufferMemoryRequirementsInfo2 * pInfo,
- VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+ VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements );
}
void vkGetImageSparseMemoryRequirements2( VkDevice device,
const VkImageSparseMemoryRequirementsInfo2 * pInfo,
- uint32_t * pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const
- VULKAN_HPP_NOEXCEPT
+ uint32_t * pSparseMemoryRequirementCount,
+ VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetImageSparseMemoryRequirements2(
- device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+ return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
}
- void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures );
}
- void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties );
}
@@ -2478,23 +2335,20 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
- VkImageFormatProperties2 * pImageFormatProperties ) const
- VULKAN_HPP_NOEXCEPT
+ VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties );
}
void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice,
uint32_t * pQueueFamilyPropertyCount,
- VkQueueFamilyProperties2 * pQueueFamilyProperties ) const
- VULKAN_HPP_NOEXCEPT
+ VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceQueueFamilyProperties2(
- physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
+ return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
}
- void vkGetPhysicalDeviceMemoryProperties2(
- VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice,
+ VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties );
}
@@ -2502,23 +2356,17 @@ namespace VULKAN_HPP_NAMESPACE
void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
uint32_t * pPropertyCount,
- VkSparseImageFormatProperties2 * pProperties ) const
- VULKAN_HPP_NOEXCEPT
+ VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceSparseImageFormatProperties2(
- physicalDevice, pFormatInfo, pPropertyCount, pProperties );
+ return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties );
}
- void vkTrimCommandPool( VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
+ void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
{
return ::vkTrimCommandPool( device, commandPool, flags );
}
- void vkGetDeviceQueue2( VkDevice device,
- const VkDeviceQueueInfo2 * pQueueInfo,
- VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT
+ void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2 * pQueueInfo, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue );
}
@@ -2526,7 +2374,7 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkCreateSamplerYcbcrConversion( VkDevice device,
const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,
const VkAllocationCallbacks * pAllocator,
- VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT
+ VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion );
}
@@ -2541,8 +2389,7 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkCreateDescriptorUpdateTemplate( VkDevice device,
const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,
const VkAllocationCallbacks * pAllocator,
- VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const
- VULKAN_HPP_NOEXCEPT
+ VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate );
}
@@ -2564,29 +2411,23 @@ namespace VULKAN_HPP_NAMESPACE
void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
- VkExternalBufferProperties * pExternalBufferProperties ) const
- VULKAN_HPP_NOEXCEPT
+ VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceExternalBufferProperties(
- physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
+ return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
}
void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
- VkExternalFenceProperties * pExternalFenceProperties ) const
- VULKAN_HPP_NOEXCEPT
+ VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceExternalFenceProperties(
- physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
+ return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
}
- void vkGetPhysicalDeviceExternalSemaphoreProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
- VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice,
+ const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
+ VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceExternalSemaphoreProperties(
- physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
+ return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
}
void vkGetDescriptorSetLayoutSupport( VkDevice device,
@@ -2606,8 +2447,7 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdDrawIndirectCount(
- commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+ return ::vkCmdDrawIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
}
void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer,
@@ -2618,8 +2458,7 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdDrawIndexedIndirectCount(
- commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+ return ::vkCmdDrawIndexedIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
}
VkResult vkCreateRenderPass2( VkDevice device,
@@ -2644,29 +2483,22 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo );
}
- void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer,
- const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo );
}
- void vkResetQueryPool( VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
+ void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
{
return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount );
}
- VkResult
- vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetSemaphoreCounterValue( device, semaphore, pValue );
}
- VkResult vkWaitSemaphores( VkDevice device,
- const VkSemaphoreWaitInfo * pWaitInfo,
- uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
{
return ::vkWaitSemaphores( device, pWaitInfo, timeout );
}
@@ -2676,30 +2508,26 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkSignalSemaphore( device, pSignalInfo );
}
- VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device,
- const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+ VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetBufferDeviceAddress( device, pInfo );
}
- uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device,
- const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+ uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetBufferOpaqueCaptureAddress( device, pInfo );
}
- uint64_t vkGetDeviceMemoryOpaqueCaptureAddress(
- VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+ uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo );
}
//=== VK_VERSION_1_3 ===
- VkResult
- vkGetPhysicalDeviceToolProperties( VkPhysicalDevice physicalDevice,
- uint32_t * pToolCount,
- VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceToolProperties( VkPhysicalDevice physicalDevice,
+ uint32_t * pToolCount,
+ VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceToolProperties( physicalDevice, pToolCount, pToolProperties );
}
@@ -2712,41 +2540,29 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreatePrivateDataSlot( device, pCreateInfo, pAllocator, pPrivateDataSlot );
}
- void vkDestroyPrivateDataSlot( VkDevice device,
- VkPrivateDataSlot privateDataSlot,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyPrivateDataSlot( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyPrivateDataSlot( device, privateDataSlot, pAllocator );
}
- VkResult vkSetPrivateData( VkDevice device,
- VkObjectType objectType,
- uint64_t objectHandle,
- VkPrivateDataSlot privateDataSlot,
- uint64_t data ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkSetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const
+ VULKAN_HPP_NOEXCEPT
{
return ::vkSetPrivateData( device, objectType, objectHandle, privateDataSlot, data );
}
- void vkGetPrivateData( VkDevice device,
- VkObjectType objectType,
- uint64_t objectHandle,
- VkPrivateDataSlot privateDataSlot,
- uint64_t * pData ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const
+ VULKAN_HPP_NOEXCEPT
{
return ::vkGetPrivateData( device, objectType, objectHandle, privateDataSlot, pData );
}
- void vkCmdSetEvent2( VkCommandBuffer commandBuffer,
- VkEvent event,
- const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetEvent2( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetEvent2( commandBuffer, event, pDependencyInfo );
}
- void vkCmdResetEvent2( VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdResetEvent2( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdResetEvent2( commandBuffer, event, stageMask );
}
@@ -2759,66 +2575,52 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdWaitEvents2( commandBuffer, eventCount, pEvents, pDependencyInfos );
}
- void vkCmdPipelineBarrier2( VkCommandBuffer commandBuffer,
- const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdPipelineBarrier2( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdPipelineBarrier2( commandBuffer, pDependencyInfo );
}
- void vkCmdWriteTimestamp2( VkCommandBuffer commandBuffer,
- VkPipelineStageFlags2 stage,
- VkQueryPool queryPool,
- uint32_t query ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdWriteTimestamp2( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdWriteTimestamp2( commandBuffer, stage, queryPool, query );
}
- VkResult vkQueueSubmit2( VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo2 * pSubmits,
- VkFence fence ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkQueueSubmit2( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT
{
return ::vkQueueSubmit2( queue, submitCount, pSubmits, fence );
}
- void vkCmdCopyBuffer2( VkCommandBuffer commandBuffer,
- const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdCopyBuffer2( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyBuffer2( commandBuffer, pCopyBufferInfo );
}
- void vkCmdCopyImage2( VkCommandBuffer commandBuffer,
- const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdCopyImage2( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyImage2( commandBuffer, pCopyImageInfo );
}
- void vkCmdCopyBufferToImage2( VkCommandBuffer commandBuffer,
- const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdCopyBufferToImage2( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyBufferToImage2( commandBuffer, pCopyBufferToImageInfo );
}
- void vkCmdCopyImageToBuffer2( VkCommandBuffer commandBuffer,
- const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdCopyImageToBuffer2( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyImageToBuffer2( commandBuffer, pCopyImageToBufferInfo );
}
- void vkCmdBlitImage2( VkCommandBuffer commandBuffer,
- const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdBlitImage2( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBlitImage2( commandBuffer, pBlitImageInfo );
}
- void vkCmdResolveImage2( VkCommandBuffer commandBuffer,
- const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdResolveImage2( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdResolveImage2( commandBuffer, pResolveImageInfo );
}
- void vkCmdBeginRendering( VkCommandBuffer commandBuffer,
- const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdBeginRendering( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBeginRendering( commandBuffer, pRenderingInfo );
}
@@ -2838,22 +2640,17 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdSetFrontFace( commandBuffer, frontFace );
}
- void vkCmdSetPrimitiveTopology( VkCommandBuffer commandBuffer,
- VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetPrimitiveTopology( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetPrimitiveTopology( commandBuffer, primitiveTopology );
}
- void vkCmdSetViewportWithCount( VkCommandBuffer commandBuffer,
- uint32_t viewportCount,
- const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetViewportWithCount( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetViewportWithCount( commandBuffer, viewportCount, pViewports );
}
- void vkCmdSetScissorWithCount( VkCommandBuffer commandBuffer,
- uint32_t scissorCount,
- const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetScissorWithCount( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetScissorWithCount( commandBuffer, scissorCount, pScissors );
}
@@ -2866,8 +2663,7 @@ namespace VULKAN_HPP_NAMESPACE
const VkDeviceSize * pSizes,
const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdBindVertexBuffers2(
- commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides );
+ return ::vkCmdBindVertexBuffers2( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides );
}
void vkCmdSetDepthTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
@@ -2885,14 +2681,12 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdSetDepthCompareOp( commandBuffer, depthCompareOp );
}
- void vkCmdSetDepthBoundsTestEnable( VkCommandBuffer commandBuffer,
- VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetDepthBoundsTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetDepthBoundsTestEnable( commandBuffer, depthBoundsTestEnable );
}
- void vkCmdSetStencilTestEnable( VkCommandBuffer commandBuffer,
- VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetStencilTestEnable( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetStencilTestEnable( commandBuffer, stencilTestEnable );
}
@@ -2907,8 +2701,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdSetStencilOp( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp );
}
- void vkCmdSetRasterizerDiscardEnable( VkCommandBuffer commandBuffer,
- VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetRasterizerDiscardEnable( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetRasterizerDiscardEnable( commandBuffer, rasterizerDiscardEnable );
}
@@ -2918,41 +2711,36 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdSetDepthBiasEnable( commandBuffer, depthBiasEnable );
}
- void vkCmdSetPrimitiveRestartEnable( VkCommandBuffer commandBuffer,
- VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetPrimitiveRestartEnable( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetPrimitiveRestartEnable( commandBuffer, primitiveRestartEnable );
}
void vkGetDeviceBufferMemoryRequirements( VkDevice device,
const VkDeviceBufferMemoryRequirements * pInfo,
- VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+ VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceBufferMemoryRequirements( device, pInfo, pMemoryRequirements );
}
void vkGetDeviceImageMemoryRequirements( VkDevice device,
const VkDeviceImageMemoryRequirements * pInfo,
- VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+ VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceImageMemoryRequirements( device, pInfo, pMemoryRequirements );
}
void vkGetDeviceImageSparseMemoryRequirements( VkDevice device,
const VkDeviceImageMemoryRequirements * pInfo,
- uint32_t * pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const
- VULKAN_HPP_NOEXCEPT
+ uint32_t * pSparseMemoryRequirementCount,
+ VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetDeviceImageSparseMemoryRequirements(
- device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+ return ::vkGetDeviceImageSparseMemoryRequirements( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
}
//=== VK_KHR_surface ===
- void vkDestroySurfaceKHR( VkInstance instance,
- VkSurfaceKHR surface,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroySurfaceKHR( instance, surface, pAllocator );
}
@@ -2967,8 +2755,7 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice,
VkSurfaceKHR surface,
- VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const
- VULKAN_HPP_NOEXCEPT
+ VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities );
}
@@ -2999,9 +2786,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain );
}
- void vkDestroySwapchainKHR( VkDevice device,
- VkSwapchainKHR swapchain,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroySwapchainKHR( device, swapchain, pAllocator );
}
@@ -3014,12 +2799,8 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages );
}
- VkResult vkAcquireNextImageKHR( VkDevice device,
- VkSwapchainKHR swapchain,
- uint64_t timeout,
- VkSemaphore semaphore,
- VkFence fence,
- uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkAcquireNextImageKHR(
+ VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT
{
return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex );
}
@@ -3029,14 +2810,14 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkQueuePresentKHR( queue, pPresentInfo );
}
- VkResult vkGetDeviceGroupPresentCapabilitiesKHR(
- VkDevice device, VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device,
+ VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities );
}
- VkResult vkGetDeviceGroupSurfacePresentModesKHR(
- VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT
+ VkResult
+ vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes );
}
@@ -3049,9 +2830,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects );
}
- VkResult vkAcquireNextImage2KHR( VkDevice device,
- const VkAcquireNextImageInfoKHR * pAcquireInfo,
- uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR * pAcquireInfo, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT
{
return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex );
}
@@ -3067,8 +2846,7 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice,
uint32_t * pPropertyCount,
- VkDisplayPlanePropertiesKHR * pProperties ) const
- VULKAN_HPP_NOEXCEPT
+ VkDisplayPlanePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties );
}
@@ -3207,8 +2985,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
}
- VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT
+ VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex );
}
@@ -3219,7 +2996,7 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkCreateDebugReportCallbackEXT( VkInstance instance,
const VkDebugReportCallbackCreateInfoEXT * pCreateInfo,
const VkAllocationCallbacks * pAllocator,
- VkDebugReportCallbackEXT * pCallback ) const VULKAN_HPP_NOEXCEPT
+ VkDebugReportCallbackEXT * pCallback ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback );
}
@@ -3240,26 +3017,22 @@ namespace VULKAN_HPP_NAMESPACE
const char * pLayerPrefix,
const char * pMessage ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkDebugReportMessageEXT(
- instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage );
+ return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage );
}
//=== VK_EXT_debug_marker ===
- VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device,
- const VkDebugMarkerObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo );
}
- VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device,
- const VkDebugMarkerObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo );
}
- void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer,
- const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo );
}
@@ -3269,8 +3042,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdDebugMarkerEndEXT( commandBuffer );
}
- void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer,
- const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo );
}
@@ -3278,21 +3050,19 @@ namespace VULKAN_HPP_NAMESPACE
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_queue ===
- VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR( VkPhysicalDevice physicalDevice,
- const VkVideoProfileKHR * pVideoProfile,
- VkVideoCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR( VkPhysicalDevice physicalDevice,
+ const VkVideoProfileInfoKHR * pVideoProfile,
+ VkVideoCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceVideoCapabilitiesKHR( physicalDevice, pVideoProfile, pCapabilities );
}
VkResult vkGetPhysicalDeviceVideoFormatPropertiesKHR( VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,
- uint32_t * pVideoFormatPropertyCount,
- VkVideoFormatPropertiesKHR * pVideoFormatProperties ) const
- VULKAN_HPP_NOEXCEPT
+ uint32_t * pVideoFormatPropertyCount,
+ VkVideoFormatPropertiesKHR * pVideoFormatProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceVideoFormatPropertiesKHR(
- physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties );
+ return ::vkGetPhysicalDeviceVideoFormatPropertiesKHR( physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties );
}
VkResult vkCreateVideoSessionKHR( VkDevice device,
@@ -3303,46 +3073,38 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateVideoSessionKHR( device, pCreateInfo, pAllocator, pVideoSession );
}
- void vkDestroyVideoSessionKHR( VkDevice device,
- VkVideoSessionKHR videoSession,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyVideoSessionKHR( VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyVideoSessionKHR( device, videoSession, pAllocator );
}
- VkResult vkGetVideoSessionMemoryRequirementsKHR(
- VkDevice device,
- VkVideoSessionKHR videoSession,
- uint32_t * pVideoSessionMemoryRequirementsCount,
- VkVideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetVideoSessionMemoryRequirementsKHR( VkDevice device,
+ VkVideoSessionKHR videoSession,
+ uint32_t * pMemoryRequirementsCount,
+ VkVideoSessionMemoryRequirementsKHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetVideoSessionMemoryRequirementsKHR(
- device, videoSession, pVideoSessionMemoryRequirementsCount, pVideoSessionMemoryRequirements );
+ return ::vkGetVideoSessionMemoryRequirementsKHR( device, videoSession, pMemoryRequirementsCount, pMemoryRequirements );
}
- VkResult
- vkBindVideoSessionMemoryKHR( VkDevice device,
- VkVideoSessionKHR videoSession,
- uint32_t videoSessionBindMemoryCount,
- const VkVideoBindMemoryKHR * pVideoSessionBindMemories ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkBindVideoSessionMemoryKHR( VkDevice device,
+ VkVideoSessionKHR videoSession,
+ uint32_t bindSessionMemoryInfoCount,
+ const VkBindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkBindVideoSessionMemoryKHR(
- device, videoSession, videoSessionBindMemoryCount, pVideoSessionBindMemories );
+ return ::vkBindVideoSessionMemoryKHR( device, videoSession, bindSessionMemoryInfoCount, pBindSessionMemoryInfos );
}
VkResult vkCreateVideoSessionParametersKHR( VkDevice device,
const VkVideoSessionParametersCreateInfoKHR * pCreateInfo,
const VkAllocationCallbacks * pAllocator,
- VkVideoSessionParametersKHR * pVideoSessionParameters ) const
- VULKAN_HPP_NOEXCEPT
+ VkVideoSessionParametersKHR * pVideoSessionParameters ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateVideoSessionParametersKHR( device, pCreateInfo, pAllocator, pVideoSessionParameters );
}
VkResult vkUpdateVideoSessionParametersKHR( VkDevice device,
VkVideoSessionParametersKHR videoSessionParameters,
- const VkVideoSessionParametersUpdateInfoKHR * pUpdateInfo ) const
- VULKAN_HPP_NOEXCEPT
+ const VkVideoSessionParametersUpdateInfoKHR * pUpdateInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkUpdateVideoSessionParametersKHR( device, videoSessionParameters, pUpdateInfo );
}
@@ -3354,20 +3116,17 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkDestroyVideoSessionParametersKHR( device, videoSessionParameters, pAllocator );
}
- void vkCmdBeginVideoCodingKHR( VkCommandBuffer commandBuffer,
- const VkVideoBeginCodingInfoKHR * pBeginInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdBeginVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR * pBeginInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBeginVideoCodingKHR( commandBuffer, pBeginInfo );
}
- void vkCmdEndVideoCodingKHR( VkCommandBuffer commandBuffer,
- const VkVideoEndCodingInfoKHR * pEndCodingInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdEndVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR * pEndCodingInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdEndVideoCodingKHR( commandBuffer, pEndCodingInfo );
}
- void vkCmdControlVideoCodingKHR( VkCommandBuffer commandBuffer,
- const VkVideoCodingControlInfoKHR * pCodingControlInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdControlVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR * pCodingControlInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdControlVideoCodingKHR( commandBuffer, pCodingControlInfo );
}
@@ -3376,10 +3135,9 @@ namespace VULKAN_HPP_NAMESPACE
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_decode_queue ===
- void vkCmdDecodeVideoKHR( VkCommandBuffer commandBuffer,
- const VkVideoDecodeInfoKHR * pFrameInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdDecodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR * pDecodeInfo ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdDecodeVideoKHR( commandBuffer, pFrameInfo );
+ return ::vkCmdDecodeVideoKHR( commandBuffer, pDecodeInfo );
}
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -3392,8 +3150,7 @@ namespace VULKAN_HPP_NAMESPACE
const VkDeviceSize * pOffsets,
const VkDeviceSize * pSizes ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdBindTransformFeedbackBuffersEXT(
- commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes );
+ return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes );
}
void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer,
@@ -3402,8 +3159,7 @@ namespace VULKAN_HPP_NAMESPACE
const VkBuffer * pCounterBuffers,
const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdBeginTransformFeedbackEXT(
- commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets );
+ return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets );
}
void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer,
@@ -3412,23 +3168,16 @@ namespace VULKAN_HPP_NAMESPACE
const VkBuffer * pCounterBuffers,
const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdEndTransformFeedbackEXT(
- commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets );
+ return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets );
}
- void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags,
- uint32_t index ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const
+ VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index );
}
- void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- uint32_t index ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index );
}
@@ -3441,8 +3190,7 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t counterOffset,
uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdDrawIndirectByteCountEXT(
- commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride );
+ return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride );
}
//=== VK_NVX_binary_import ===
@@ -3463,37 +3211,29 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateCuFunctionNVX( device, pCreateInfo, pAllocator, pFunction );
}
- void vkDestroyCuModuleNVX( VkDevice device,
- VkCuModuleNVX module,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyCuModuleNVX( VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyCuModuleNVX( device, module, pAllocator );
}
- void vkDestroyCuFunctionNVX( VkDevice device,
- VkCuFunctionNVX function,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyCuFunctionNVX( VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyCuFunctionNVX( device, function, pAllocator );
}
- void vkCmdCuLaunchKernelNVX( VkCommandBuffer commandBuffer,
- const VkCuLaunchInfoNVX * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdCuLaunchKernelNVX( VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCuLaunchKernelNVX( commandBuffer, pLaunchInfo );
}
//=== VK_NVX_image_view_handle ===
- uint32_t vkGetImageViewHandleNVX( VkDevice device,
- const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT
+ uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetImageViewHandleNVX( device, pInfo );
}
- VkResult vkGetImageViewAddressNVX( VkDevice device,
- VkImageView imageView,
- VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetImageViewAddressNVX( device, imageView, pProperties );
}
@@ -3508,8 +3248,7 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdDrawIndirectCountAMD(
- commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+ return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
}
void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer,
@@ -3520,8 +3259,7 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdDrawIndexedIndirectCountAMD(
- commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+ return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
}
//=== VK_AMD_shader_info ===
@@ -3538,8 +3276,7 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_dynamic_rendering ===
- void vkCmdBeginRenderingKHR( VkCommandBuffer commandBuffer,
- const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdBeginRenderingKHR( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBeginRenderingKHR( commandBuffer, pRenderingInfo );
}
@@ -3555,7 +3292,7 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance,
const VkStreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,
const VkAllocationCallbacks * pAllocator,
- VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+ VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface );
}
@@ -3563,15 +3300,14 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_external_memory_capabilities ===
- VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
- VkExternalMemoryHandleTypeFlagsNV externalHandleType,
- VkExternalImageFormatPropertiesNV * pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice,
+ VkFormat format,
+ VkImageType type,
+ VkImageTiling tiling,
+ VkImageUsageFlags usage,
+ VkImageCreateFlags flags,
+ VkExternalMemoryHandleTypeFlagsNV externalHandleType,
+ VkExternalImageFormatPropertiesNV * pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties );
@@ -3591,14 +3327,12 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_get_physical_device_properties2 ===
- void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures );
}
- void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties );
}
@@ -3612,48 +3346,41 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
- VkImageFormatProperties2 * pImageFormatProperties ) const
- VULKAN_HPP_NOEXCEPT
+ VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties );
}
void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice,
uint32_t * pQueueFamilyPropertyCount,
- VkQueueFamilyProperties2 * pQueueFamilyProperties ) const
- VULKAN_HPP_NOEXCEPT
+ VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR(
- physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
+ return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
}
- void vkGetPhysicalDeviceMemoryProperties2KHR(
- VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice,
+ VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties );
}
- void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice,
+ void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
- uint32_t * pPropertyCount,
- VkSparseImageFormatProperties2 * pProperties ) const
- VULKAN_HPP_NOEXCEPT
+ uint32_t * pPropertyCount,
+ VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
- physicalDevice, pFormatInfo, pPropertyCount, pProperties );
+ return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties );
}
//=== VK_KHR_device_group ===
- void
- vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
+ void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device,
+ uint32_t heapIndex,
+ uint32_t localDeviceIndex,
+ uint32_t remoteDeviceIndex,
+ VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetDeviceGroupPeerMemoryFeaturesKHR(
- device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures );
+ return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures );
}
void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
@@ -3669,8 +3396,7 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t groupCountY,
uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdDispatchBaseKHR(
- commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+ return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
}
# if defined( VK_USE_PLATFORM_VI_NN )
@@ -3687,50 +3413,41 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_maintenance1 ===
- void vkTrimCommandPoolKHR( VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
+ void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
{
return ::vkTrimCommandPoolKHR( device, commandPool, flags );
}
//=== VK_KHR_device_group_creation ===
- VkResult vkEnumeratePhysicalDeviceGroupsKHR(
- VkInstance instance,
- uint32_t * pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance,
+ uint32_t * pPhysicalDeviceGroupCount,
+ VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkEnumeratePhysicalDeviceGroupsKHR(
- instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
+ return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
}
//=== VK_KHR_external_memory_capabilities ===
void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
- VkExternalBufferProperties * pExternalBufferProperties ) const
- VULKAN_HPP_NOEXCEPT
+ VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR(
- physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
+ return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
}
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_memory_win32 ===
- VkResult vkGetMemoryWin32HandleKHR( VkDevice device,
- const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,
- HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
}
- VkResult vkGetMemoryWin32HandlePropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- VkMemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device,
+ VkExternalMemoryHandleTypeFlagBits handleType,
+ HANDLE handle,
+ VkMemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties );
}
@@ -3738,8 +3455,7 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_external_memory_fd ===
- VkResult
- vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd );
}
@@ -3747,35 +3463,31 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkGetMemoryFdPropertiesKHR( VkDevice device,
VkExternalMemoryHandleTypeFlagBits handleType,
int fd,
- VkMemoryFdPropertiesKHR * pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT
+ VkMemoryFdPropertiesKHR * pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties );
}
//=== VK_KHR_external_semaphore_capabilities ===
- void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
- VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice,
+ const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
+ VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
- physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
+ return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
}
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_semaphore_win32 ===
- VkResult vkImportSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device,
+ const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo );
}
- VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device,
- const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo,
- HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT
+ VkResult
+ vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
}
@@ -3783,16 +3495,12 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_external_semaphore_fd ===
- VkResult
- vkImportSemaphoreFdKHR( VkDevice device,
- const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo );
}
- VkResult vkGetSemaphoreFdKHR( VkDevice device,
- const VkSemaphoreGetFdInfoKHR * pGetFdInfo,
- int * pFd ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd );
}
@@ -3806,8 +3514,7 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t descriptorWriteCount,
const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdPushDescriptorSetKHR(
- commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites );
+ return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites );
}
void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer,
@@ -3821,9 +3528,8 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_conditional_rendering ===
- void vkCmdBeginConditionalRenderingEXT(
- VkCommandBuffer commandBuffer,
- const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer,
+ const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin );
}
@@ -3838,8 +3544,7 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device,
const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,
const VkAllocationCallbacks * pAllocator,
- VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const
- VULKAN_HPP_NOEXCEPT
+ VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate );
}
@@ -3879,17 +3584,12 @@ namespace VULKAN_HPP_NAMESPACE
# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
//=== VK_EXT_acquire_xlib_display ===
- VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice,
- Display * dpy,
- VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
{
return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display );
}
- VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice,
- Display * dpy,
- RROutput rrOutput,
- VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, RROutput rrOutput, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay );
}
@@ -3899,17 +3599,14 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice,
VkSurfaceKHR surface,
- VkSurfaceCapabilities2EXT * pSurfaceCapabilities ) const
- VULKAN_HPP_NOEXCEPT
+ VkSurfaceCapabilities2EXT * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities );
}
//=== VK_EXT_display_control ===
- VkResult vkDisplayPowerControlEXT( VkDevice device,
- VkDisplayKHR display,
- const VkDisplayPowerInfoEXT * pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT * pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo );
}
@@ -3943,8 +3640,7 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device,
VkSwapchainKHR swapchain,
- VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties ) const
- VULKAN_HPP_NOEXCEPT
+ VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties );
}
@@ -3952,8 +3648,7 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device,
VkSwapchainKHR swapchain,
uint32_t * pPresentationTimingCount,
- VkPastPresentationTimingGOOGLE * pPresentationTimings ) const
- VULKAN_HPP_NOEXCEPT
+ VkPastPresentationTimingGOOGLE * pPresentationTimings ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings );
}
@@ -3965,8 +3660,7 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t discardRectangleCount,
const VkRect2D * pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdSetDiscardRectangleEXT(
- commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles );
+ return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles );
}
//=== VK_EXT_hdr_metadata ===
@@ -4003,8 +3697,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo );
}
- void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer,
- const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo );
}
@@ -4020,25 +3713,20 @@ namespace VULKAN_HPP_NAMESPACE
void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
- VkExternalFenceProperties * pExternalFenceProperties ) const
- VULKAN_HPP_NOEXCEPT
+ VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceExternalFencePropertiesKHR(
- physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
+ return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
}
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_fence_win32 ===
- VkResult vkImportFenceWin32HandleKHR(
- VkDevice device, const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo );
}
- VkResult vkGetFenceWin32HandleKHR( VkDevice device,
- const VkFenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,
- HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
}
@@ -4046,42 +3734,37 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_external_fence_fd ===
- VkResult vkImportFenceFdKHR( VkDevice device,
- const VkImportFenceFdInfoKHR * pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR * pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkImportFenceFdKHR( device, pImportFenceFdInfo );
}
- VkResult
- vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd );
}
//=== VK_KHR_performance_query ===
- VkResult vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- uint32_t * pCounterCount,
- VkPerformanceCounterKHR * pCounters,
- VkPerformanceCounterDescriptionKHR * pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT
+ VkResult
+ vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice,
+ uint32_t queueFamilyIndex,
+ uint32_t * pCounterCount,
+ VkPerformanceCounterKHR * pCounters,
+ VkPerformanceCounterDescriptionKHR * pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT
{
return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions );
}
- void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
- VkPhysicalDevice physicalDevice,
- const VkQueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,
- uint32_t * pNumPasses ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice,
+ const VkQueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,
+ uint32_t * pNumPasses ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
- physicalDevice, pPerformanceQueryCreateInfo, pNumPasses );
+ return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses );
}
- VkResult vkAcquireProfilingLockKHR( VkDevice device,
- const VkAcquireProfilingLockInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkAcquireProfilingLockKHR( device, pInfo );
}
@@ -4095,8 +3778,7 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
- VkSurfaceCapabilities2KHR * pSurfaceCapabilities ) const
- VULKAN_HPP_NOEXCEPT
+ VkSurfaceCapabilities2KHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities );
}
@@ -4104,10 +3786,9 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
uint32_t * pSurfaceFormatCount,
- VkSurfaceFormat2KHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT
+ VkSurfaceFormat2KHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceSurfaceFormats2KHR(
- physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats );
+ return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats );
}
//=== VK_KHR_get_display_properties2 ===
@@ -4121,8 +3802,7 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice,
uint32_t * pPropertyCount,
- VkDisplayPlaneProperties2KHR * pProperties ) const
- VULKAN_HPP_NOEXCEPT
+ VkDisplayPlaneProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties );
}
@@ -4135,10 +3815,9 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties );
}
- VkResult
- vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice,
- const VkDisplayPlaneInfo2KHR * pDisplayPlaneInfo,
- VkDisplayPlaneCapabilities2KHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice,
+ const VkDisplayPlaneInfo2KHR * pDisplayPlaneInfo,
+ VkDisplayPlaneCapabilities2KHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities );
}
@@ -4169,20 +3848,17 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_debug_utils ===
- VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device,
- const VkDebugUtilsObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo );
}
- VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device,
- const VkDebugUtilsObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo );
}
- void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue,
- const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo );
}
@@ -4192,14 +3868,12 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkQueueEndDebugUtilsLabelEXT( queue );
}
- void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue,
- const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo );
}
- void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo );
}
@@ -4209,8 +3883,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer );
}
- void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo );
}
@@ -4218,7 +3891,7 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance,
const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo,
const VkAllocationCallbacks * pAllocator,
- VkDebugUtilsMessengerEXT * pMessenger ) const VULKAN_HPP_NOEXCEPT
+ VkDebugUtilsMessengerEXT * pMessenger ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger );
}
@@ -4233,8 +3906,7 @@ namespace VULKAN_HPP_NAMESPACE
void vkSubmitDebugUtilsMessageEXT( VkInstance instance,
VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
VkDebugUtilsMessageTypeFlagsEXT messageTypes,
- const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData ) const
- VULKAN_HPP_NOEXCEPT
+ const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData ) const VULKAN_HPP_NOEXCEPT
{
return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData );
}
@@ -4244,15 +3916,14 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device,
const struct AHardwareBuffer * buffer,
- VkAndroidHardwareBufferPropertiesANDROID * pProperties ) const
- VULKAN_HPP_NOEXCEPT
+ VkAndroidHardwareBufferPropertiesANDROID * pProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties );
}
VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device,
const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,
- struct AHardwareBuffer ** pBuffer ) const VULKAN_HPP_NOEXCEPT
+ struct AHardwareBuffer ** pBuffer ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer );
}
@@ -4260,16 +3931,14 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_sample_locations ===
- void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer,
- const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo );
}
void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice,
VkSampleCountFlagBits samples,
- VkMultisamplePropertiesEXT * pMultisampleProperties ) const
- VULKAN_HPP_NOEXCEPT
+ VkMultisamplePropertiesEXT * pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties );
}
@@ -4278,35 +3947,32 @@ namespace VULKAN_HPP_NAMESPACE
void vkGetImageMemoryRequirements2KHR( VkDevice device,
const VkImageMemoryRequirementsInfo2 * pInfo,
- VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+ VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements );
}
void vkGetBufferMemoryRequirements2KHR( VkDevice device,
const VkBufferMemoryRequirementsInfo2 * pInfo,
- VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+ VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements );
}
void vkGetImageSparseMemoryRequirements2KHR( VkDevice device,
const VkImageSparseMemoryRequirementsInfo2 * pInfo,
- uint32_t * pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const
- VULKAN_HPP_NOEXCEPT
+ uint32_t * pSparseMemoryRequirementCount,
+ VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetImageSparseMemoryRequirements2KHR(
- device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+ return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
}
//=== VK_KHR_acceleration_structure ===
- VkResult
- vkCreateAccelerationStructureKHR( VkDevice device,
- const VkAccelerationStructureCreateInfoKHR * pCreateInfo,
- const VkAllocationCallbacks * pAllocator,
- VkAccelerationStructureKHR * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateAccelerationStructureKHR( VkDevice device,
+ const VkAccelerationStructureCreateInfoKHR * pCreateInfo,
+ const VkAllocationCallbacks * pAllocator,
+ VkAccelerationStructureKHR * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure );
}
@@ -4318,11 +3984,10 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator );
}
- void vkCmdBuildAccelerationStructuresKHR(
- VkCommandBuffer commandBuffer,
- uint32_t infoCount,
- const VkAccelerationStructureBuildGeometryInfoKHR * pInfos,
- const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdBuildAccelerationStructuresKHR( VkCommandBuffer commandBuffer,
+ uint32_t infoCount,
+ const VkAccelerationStructureBuildGeometryInfoKHR * pInfos,
+ const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBuildAccelerationStructuresKHR( commandBuffer, infoCount, pInfos, ppBuildRangeInfos );
}
@@ -4330,45 +3995,40 @@ namespace VULKAN_HPP_NAMESPACE
void vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer,
uint32_t infoCount,
const VkAccelerationStructureBuildGeometryInfoKHR * pInfos,
- const VkDeviceAddress * pIndirectDeviceAddresses,
- const uint32_t * pIndirectStrides,
- const uint32_t * const * ppMaxPrimitiveCounts ) const
- VULKAN_HPP_NOEXCEPT
+ const VkDeviceAddress * pIndirectDeviceAddresses,
+ const uint32_t * pIndirectStrides,
+ const uint32_t * const * ppMaxPrimitiveCounts ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBuildAccelerationStructuresIndirectKHR(
commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts );
}
- VkResult vkBuildAccelerationStructuresKHR(
- VkDevice device,
- VkDeferredOperationKHR deferredOperation,
- uint32_t infoCount,
- const VkAccelerationStructureBuildGeometryInfoKHR * pInfos,
- const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkBuildAccelerationStructuresKHR( VkDevice device,
+ VkDeferredOperationKHR deferredOperation,
+ uint32_t infoCount,
+ const VkAccelerationStructureBuildGeometryInfoKHR * pInfos,
+ const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT
{
return ::vkBuildAccelerationStructuresKHR( device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos );
}
- VkResult
- vkCopyAccelerationStructureKHR( VkDevice device,
- VkDeferredOperationKHR deferredOperation,
- const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCopyAccelerationStructureKHR( VkDevice device,
+ VkDeferredOperationKHR deferredOperation,
+ const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCopyAccelerationStructureKHR( device, deferredOperation, pInfo );
}
- VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device,
- VkDeferredOperationKHR deferredOperation,
- const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const
- VULKAN_HPP_NOEXCEPT
+ VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device,
+ VkDeferredOperationKHR deferredOperation,
+ const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCopyAccelerationStructureToMemoryKHR( device, deferredOperation, pInfo );
}
- VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device,
- VkDeferredOperationKHR deferredOperation,
- const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const
- VULKAN_HPP_NOEXCEPT
+ VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device,
+ VkDeferredOperationKHR deferredOperation,
+ const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCopyMemoryToAccelerationStructureKHR( device, deferredOperation, pInfo );
}
@@ -4379,34 +4039,30 @@ namespace VULKAN_HPP_NAMESPACE
VkQueryType queryType,
size_t dataSize,
void * pData,
- size_t stride ) const VULKAN_HPP_NOEXCEPT
+ size_t stride ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkWriteAccelerationStructuresPropertiesKHR(
- device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride );
+ return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride );
}
- void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer,
- const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo );
}
void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer,
- const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const
- VULKAN_HPP_NOEXCEPT
+ const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo );
}
void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer,
- const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const
- VULKAN_HPP_NOEXCEPT
+ const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo );
}
- VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR(
- VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
+ VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( VkDevice device,
+ const VkAccelerationStructureDeviceAddressInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo );
}
@@ -4416,16 +4072,15 @@ namespace VULKAN_HPP_NAMESPACE
const VkAccelerationStructureKHR * pAccelerationStructures,
VkQueryType queryType,
VkQueryPool queryPool,
- uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
+ uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdWriteAccelerationStructuresPropertiesKHR(
commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
}
- void vkGetDeviceAccelerationStructureCompatibilityKHR(
- VkDevice device,
- const VkAccelerationStructureVersionInfoKHR * pVersionInfo,
- VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT
+ void vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device,
+ const VkAccelerationStructureVersionInfoKHR * pVersionInfo,
+ VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, pVersionInfo, pCompatibility );
}
@@ -4433,9 +4088,8 @@ namespace VULKAN_HPP_NAMESPACE
void vkGetAccelerationStructureBuildSizesKHR( VkDevice device,
VkAccelerationStructureBuildTypeKHR buildType,
const VkAccelerationStructureBuildGeometryInfoKHR * pBuildInfo,
- const uint32_t * pMaxPrimitiveCounts,
- VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const
- VULKAN_HPP_NOEXCEPT
+ const uint32_t * pMaxPrimitiveCounts,
+ VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetAccelerationStructureBuildSizesKHR( device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo );
}
@@ -4445,7 +4099,7 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device,
const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,
const VkAllocationCallbacks * pAllocator,
- VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT
+ VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion );
}
@@ -4459,24 +4113,20 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_bind_memory2 ===
- VkResult vkBindBufferMemory2KHR( VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
{
return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos );
}
- VkResult vkBindImageMemory2KHR( VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
{
return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos );
}
//=== VK_EXT_image_drm_format_modifier ===
- VkResult vkGetImageDrmFormatModifierPropertiesEXT(
- VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT * pProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult
+ vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT * pProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties );
}
@@ -4486,14 +4136,13 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkCreateValidationCacheEXT( VkDevice device,
const VkValidationCacheCreateInfoEXT * pCreateInfo,
const VkAllocationCallbacks * pAllocator,
- VkValidationCacheEXT * pValidationCache ) const VULKAN_HPP_NOEXCEPT
+ VkValidationCacheEXT * pValidationCache ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache );
}
- void vkDestroyValidationCacheEXT( VkDevice device,
- VkValidationCacheEXT validationCache,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void
+ vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator );
}
@@ -4506,19 +4155,14 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches );
}
- VkResult vkGetValidationCacheDataEXT( VkDevice device,
- VkValidationCacheEXT validationCache,
- size_t * pDataSize,
- void * pData ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData );
}
//=== VK_NV_shading_rate_image ===
- void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer,
- VkImageView imageView,
- VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout );
}
@@ -4526,30 +4170,25 @@ namespace VULKAN_HPP_NAMESPACE
void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer,
uint32_t firstViewport,
uint32_t viewportCount,
- const VkShadingRatePaletteNV * pShadingRatePalettes ) const
- VULKAN_HPP_NOEXCEPT
+ const VkShadingRatePaletteNV * pShadingRatePalettes ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdSetViewportShadingRatePaletteNV(
- commandBuffer, firstViewport, viewportCount, pShadingRatePalettes );
+ return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes );
}
- void
- vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer,
- VkCoarseSampleOrderTypeNV sampleOrderType,
- uint32_t customSampleOrderCount,
- const VkCoarseSampleOrderCustomNV * pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer,
+ VkCoarseSampleOrderTypeNV sampleOrderType,
+ uint32_t customSampleOrderCount,
+ const VkCoarseSampleOrderCustomNV * pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdSetCoarseSampleOrderNV(
- commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders );
+ return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders );
}
//=== VK_NV_ray_tracing ===
- VkResult
- vkCreateAccelerationStructureNV( VkDevice device,
- const VkAccelerationStructureCreateInfoNV * pCreateInfo,
- const VkAllocationCallbacks * pAllocator,
- VkAccelerationStructureNV * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateAccelerationStructureNV( VkDevice device,
+ const VkAccelerationStructureCreateInfoNV * pCreateInfo,
+ const VkAllocationCallbacks * pAllocator,
+ VkAccelerationStructureNV * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure );
}
@@ -4563,16 +4202,14 @@ namespace VULKAN_HPP_NAMESPACE
void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device,
const VkAccelerationStructureMemoryRequirementsInfoNV * pInfo,
- VkMemoryRequirements2KHR * pMemoryRequirements ) const
- VULKAN_HPP_NOEXCEPT
+ VkMemoryRequirements2KHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements );
}
VkResult vkBindAccelerationStructureMemoryNV( VkDevice device,
uint32_t bindInfoCount,
- const VkBindAccelerationStructureMemoryInfoNV * pBindInfos ) const
- VULKAN_HPP_NOEXCEPT
+ const VkBindAccelerationStructureMemoryInfoNV * pBindInfos ) const VULKAN_HPP_NOEXCEPT
{
return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos );
}
@@ -4585,10 +4222,9 @@ namespace VULKAN_HPP_NAMESPACE
VkAccelerationStructureNV dst,
VkAccelerationStructureNV src,
VkBuffer scratch,
- VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT
+ VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdBuildAccelerationStructureNV(
- commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset );
+ return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset );
}
void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer,
@@ -4637,18 +4273,13 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t createInfoCount,
const VkRayTracingPipelineCreateInfoNV * pCreateInfos,
const VkAllocationCallbacks * pAllocator,
- VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT
+ VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateRayTracingPipelinesNV(
- device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+ return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
}
- VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device,
- VkPipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void * pData ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetRayTracingShaderGroupHandlesNV(
+ VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData );
}
@@ -4666,7 +4297,7 @@ namespace VULKAN_HPP_NAMESPACE
const VkAccelerationStructureNV * pAccelerationStructures,
VkQueryType queryType,
VkQueryPool queryPool,
- uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
+ uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdWriteAccelerationStructuresPropertiesNV(
commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
@@ -4681,7 +4312,7 @@ namespace VULKAN_HPP_NAMESPACE
void vkGetDescriptorSetLayoutSupportKHR( VkDevice device,
const VkDescriptorSetLayoutCreateInfo * pCreateInfo,
- VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT
+ VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport );
}
@@ -4696,8 +4327,7 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdDrawIndirectCountKHR(
- commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+ return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
}
void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer,
@@ -4708,17 +4338,15 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdDrawIndexedIndirectCountKHR(
- commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+ return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
}
//=== VK_EXT_external_memory_host ===
- VkResult vkGetMemoryHostPointerPropertiesEXT(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- const void * pHostPointer,
- VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device,
+ VkExternalMemoryHandleTypeFlagBits handleType,
+ const void * pHostPointer,
+ VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties );
}
@@ -4747,25 +4375,20 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t timestampCount,
const VkCalibratedTimestampInfoEXT * pTimestampInfos,
uint64_t * pTimestamps,
- uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT
+ uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation );
}
//=== VK_NV_mesh_shader ===
- void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer,
- uint32_t taskCount,
- uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask );
}
- void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const
+ VULKAN_HPP_NOEXCEPT
{
return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride );
}
@@ -4778,8 +4401,7 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdDrawMeshTasksIndirectCountNV(
- commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+ return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
}
//=== VK_NV_scissor_exclusive ===
@@ -4789,8 +4411,7 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t exclusiveScissorCount,
const VkRect2D * pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdSetExclusiveScissorNV(
- commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors );
+ return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors );
}
//=== VK_NV_device_diagnostic_checkpoints ===
@@ -4800,39 +4421,31 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker );
}
- void vkGetQueueCheckpointDataNV( VkQueue queue,
- uint32_t * pCheckpointDataCount,
- VkCheckpointDataNV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT
+ void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointDataNV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData );
}
//=== VK_KHR_timeline_semaphore ===
- VkResult vkGetSemaphoreCounterValueKHR( VkDevice device,
- VkSemaphore semaphore,
- uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue );
}
- VkResult vkWaitSemaphoresKHR( VkDevice device,
- const VkSemaphoreWaitInfo * pWaitInfo,
- uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
{
return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout );
}
- VkResult vkSignalSemaphoreKHR( VkDevice device,
- const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkSignalSemaphoreKHR( device, pSignalInfo );
}
//=== VK_INTEL_performance_query ===
- VkResult vkInitializePerformanceApiINTEL(
- VkDevice device, const VkInitializePerformanceApiInfoINTEL * pInitializeInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL * pInitializeInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo );
}
@@ -4842,60 +4455,48 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkUninitializePerformanceApiINTEL( device );
}
- VkResult
- vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer,
- const VkPerformanceMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo );
}
- VkResult vkCmdSetPerformanceStreamMarkerINTEL(
- VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer,
+ const VkPerformanceStreamMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo );
}
- VkResult
- vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer,
- const VkPerformanceOverrideInfoINTEL * pOverrideInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL * pOverrideInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo );
}
VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device,
const VkPerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,
- VkPerformanceConfigurationINTEL * pConfiguration ) const
- VULKAN_HPP_NOEXCEPT
+ VkPerformanceConfigurationINTEL * pConfiguration ) const VULKAN_HPP_NOEXCEPT
{
return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration );
}
- VkResult
- vkReleasePerformanceConfigurationINTEL( VkDevice device,
- VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
{
return ::vkReleasePerformanceConfigurationINTEL( device, configuration );
}
- VkResult
- vkQueueSetPerformanceConfigurationINTEL( VkQueue queue,
- VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
{
return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration );
}
- VkResult vkGetPerformanceParameterINTEL( VkDevice device,
- VkPerformanceParameterTypeINTEL parameter,
- VkPerformanceValueINTEL * pValue ) const VULKAN_HPP_NOEXCEPT
+ VkResult
+ vkGetPerformanceParameterINTEL( VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL * pValue ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPerformanceParameterINTEL( device, parameter, pValue );
}
//=== VK_AMD_display_native_hdr ===
- void vkSetLocalDimmingAMD( VkDevice device,
- VkSwapchainKHR swapChain,
- VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT
+ void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT
{
return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable );
}
@@ -4906,7 +4507,7 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance,
const VkImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,
const VkAllocationCallbacks * pAllocator,
- VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+ VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface );
}
@@ -4926,47 +4527,39 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_fragment_shading_rate ===
- VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t * pFragmentShadingRateCount,
- VkPhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR( VkPhysicalDevice physicalDevice,
+ uint32_t * pFragmentShadingRateCount,
+ VkPhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceFragmentShadingRatesKHR(
- physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates );
+ return ::vkGetPhysicalDeviceFragmentShadingRatesKHR( physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates );
}
void vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer,
const VkExtent2D * pFragmentSize,
- const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const
- VULKAN_HPP_NOEXCEPT
+ const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetFragmentShadingRateKHR( commandBuffer, pFragmentSize, combinerOps );
}
//=== VK_EXT_buffer_device_address ===
- VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device,
- const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+ VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetBufferDeviceAddressEXT( device, pInfo );
}
//=== VK_EXT_tooling_info ===
- VkResult
- vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice,
- uint32_t * pToolCount,
- VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice,
+ uint32_t * pToolCount,
+ VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties );
}
//=== VK_KHR_present_wait ===
- VkResult vkWaitForPresentKHR( VkDevice device,
- VkSwapchainKHR swapchain,
- uint64_t presentId,
- uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkWaitForPresentKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
{
return ::vkWaitForPresentKHR( device, swapchain, presentId, timeout );
}
@@ -4975,8 +4568,7 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice,
uint32_t * pPropertyCount,
- VkCooperativeMatrixPropertiesNV * pProperties ) const
- VULKAN_HPP_NOEXCEPT
+ VkCooperativeMatrixPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties );
}
@@ -4984,12 +4576,9 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_coverage_reduction_mode ===
VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- VkPhysicalDevice physicalDevice,
- uint32_t * pCombinationCount,
- VkFramebufferMixedSamplesCombinationNV * pCombinations ) const VULKAN_HPP_NOEXCEPT
+ VkPhysicalDevice physicalDevice, uint32_t * pCombinationCount, VkFramebufferMixedSamplesCombinationNV * pCombinations ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- physicalDevice, pCombinationCount, pCombinations );
+ return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations );
}
# if defined( VK_USE_PLATFORM_WIN32_KHR )
@@ -4998,10 +4587,9 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
uint32_t * pPresentModeCount,
- VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT
+ VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPhysicalDeviceSurfacePresentModes2EXT(
- physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes );
+ return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes );
}
VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
@@ -5014,10 +4602,9 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain );
}
- VkResult
- vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device,
- const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
- VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device,
+ const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
+ VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes );
}
@@ -5035,39 +4622,31 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_buffer_device_address ===
- VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device,
- const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+ VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetBufferDeviceAddressKHR( device, pInfo );
}
- uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device,
- const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+ uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo );
}
- uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR(
- VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+ uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo );
}
//=== VK_EXT_line_rasterization ===
- void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer,
- uint32_t lineStippleFactor,
- uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern );
}
//=== VK_EXT_host_query_reset ===
- void vkResetQueryPoolEXT( VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
+ void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
{
return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount );
}
@@ -5084,22 +4663,17 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace );
}
- void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer,
- VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology );
}
- void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer,
- uint32_t viewportCount,
- const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports );
}
- void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer,
- uint32_t scissorCount,
- const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors );
}
@@ -5112,8 +4686,7 @@ namespace VULKAN_HPP_NAMESPACE
const VkDeviceSize * pSizes,
const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdBindVertexBuffers2EXT(
- commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides );
+ return ::vkCmdBindVertexBuffers2EXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides );
}
void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
@@ -5121,26 +4694,22 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable );
}
- void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer,
- VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable );
}
- void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer,
- VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp );
}
- void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer,
- VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable );
}
- void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer,
- VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable );
}
@@ -5164,21 +4733,17 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation );
}
- void vkDestroyDeferredOperationKHR( VkDevice device,
- VkDeferredOperationKHR operation,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator );
}
- uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device,
- VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
+ uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation );
}
- VkResult vkGetDeferredOperationResultKHR( VkDevice device,
- VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeferredOperationResultKHR( device, operation );
}
@@ -5190,55 +4755,48 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_pipeline_executable_properties ===
- VkResult
- vkGetPipelineExecutablePropertiesKHR( VkDevice device,
- const VkPipelineInfoKHR * pPipelineInfo,
- uint32_t * pExecutableCount,
- VkPipelineExecutablePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device,
+ const VkPipelineInfoKHR * pPipelineInfo,
+ uint32_t * pExecutableCount,
+ VkPipelineExecutablePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties );
}
- VkResult
- vkGetPipelineExecutableStatisticsKHR( VkDevice device,
- const VkPipelineExecutableInfoKHR * pExecutableInfo,
- uint32_t * pStatisticCount,
- VkPipelineExecutableStatisticKHR * pStatistics ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device,
+ const VkPipelineExecutableInfoKHR * pExecutableInfo,
+ uint32_t * pStatisticCount,
+ VkPipelineExecutableStatisticKHR * pStatistics ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics );
}
- VkResult vkGetPipelineExecutableInternalRepresentationsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR * pExecutableInfo,
- uint32_t * pInternalRepresentationCount,
- VkPipelineExecutableInternalRepresentationKHR * pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT
+ VkResult
+ vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device,
+ const VkPipelineExecutableInfoKHR * pExecutableInfo,
+ uint32_t * pInternalRepresentationCount,
+ VkPipelineExecutableInternalRepresentationKHR * pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetPipelineExecutableInternalRepresentationsKHR(
- device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations );
+ return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations );
}
//=== VK_NV_device_generated_commands ===
void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device,
const VkGeneratedCommandsMemoryRequirementsInfoNV * pInfo,
- VkMemoryRequirements2 * pMemoryRequirements ) const
- VULKAN_HPP_NOEXCEPT
+ VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements );
}
- void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer,
- const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const
- VULKAN_HPP_NOEXCEPT
+ void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo );
}
void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer,
VkBool32 isPreprocessed,
- const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const
- VULKAN_HPP_NOEXCEPT
+ const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo );
}
@@ -5251,11 +4809,10 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex );
}
- VkResult
- vkCreateIndirectCommandsLayoutNV( VkDevice device,
- const VkIndirectCommandsLayoutCreateInfoNV * pCreateInfo,
- const VkAllocationCallbacks * pAllocator,
- VkIndirectCommandsLayoutNV * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateIndirectCommandsLayoutNV( VkDevice device,
+ const VkIndirectCommandsLayoutCreateInfoNV * pCreateInfo,
+ const VkAllocationCallbacks * pAllocator,
+ VkIndirectCommandsLayoutNV * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout );
}
@@ -5269,17 +4826,12 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_acquire_drm_display ===
- VkResult vkAcquireDrmDisplayEXT( VkPhysicalDevice physicalDevice,
- int32_t drmFd,
- VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkAcquireDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
{
return ::vkAcquireDrmDisplayEXT( physicalDevice, drmFd, display );
}
- VkResult vkGetDrmDisplayEXT( VkPhysicalDevice physicalDevice,
- int32_t drmFd,
- uint32_t connectorId,
- VkDisplayKHR * display ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR * display ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDrmDisplayEXT( physicalDevice, drmFd, connectorId, display );
}
@@ -5289,32 +4841,24 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkCreatePrivateDataSlotEXT( VkDevice device,
const VkPrivateDataSlotCreateInfo * pCreateInfo,
const VkAllocationCallbacks * pAllocator,
- VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT
+ VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot );
}
- void vkDestroyPrivateDataSlotEXT( VkDevice device,
- VkPrivateDataSlot privateDataSlot,
- const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ void vkDestroyPrivateDataSlotEXT( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator );
}
- VkResult vkSetPrivateDataEXT( VkDevice device,
- VkObjectType objectType,
- uint64_t objectHandle,
- VkPrivateDataSlot privateDataSlot,
- uint64_t data ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkSetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const
+ VULKAN_HPP_NOEXCEPT
{
return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data );
}
- void vkGetPrivateDataEXT( VkDevice device,
- VkObjectType objectType,
- uint64_t objectHandle,
- VkPrivateDataSlot privateDataSlot,
- uint64_t * pData ) const VULKAN_HPP_NOEXCEPT
+ void vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const
+ VULKAN_HPP_NOEXCEPT
{
return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData );
}
@@ -5322,25 +4866,29 @@ namespace VULKAN_HPP_NAMESPACE
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_encode_queue ===
- void vkCmdEncodeVideoKHR( VkCommandBuffer commandBuffer,
- const VkVideoEncodeInfoKHR * pEncodeInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdEncodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR * pEncodeInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdEncodeVideoKHR( commandBuffer, pEncodeInfo );
}
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+# if defined( VK_USE_PLATFORM_METAL_EXT )
+ //=== VK_EXT_metal_objects ===
+
+ void vkExportMetalObjectsEXT( VkDevice device, VkExportMetalObjectsInfoEXT * pMetalObjectsInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkExportMetalObjectsEXT( device, pMetalObjectsInfo );
+ }
+# endif /*VK_USE_PLATFORM_METAL_EXT*/
+
//=== VK_KHR_synchronization2 ===
- void vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer,
- VkEvent event,
- const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetEvent2KHR( commandBuffer, event, pDependencyInfo );
}
- void vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdResetEvent2KHR( commandBuffer, event, stageMask );
}
@@ -5353,40 +4901,28 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdWaitEvents2KHR( commandBuffer, eventCount, pEvents, pDependencyInfos );
}
- void vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer,
- const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdPipelineBarrier2KHR( commandBuffer, pDependencyInfo );
}
- void vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer,
- VkPipelineStageFlags2 stage,
- VkQueryPool queryPool,
- uint32_t query ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdWriteTimestamp2KHR( commandBuffer, stage, queryPool, query );
}
- VkResult vkQueueSubmit2KHR( VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo2 * pSubmits,
- VkFence fence ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkQueueSubmit2KHR( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT
{
return ::vkQueueSubmit2KHR( queue, submitCount, pSubmits, fence );
}
- void vkCmdWriteBufferMarker2AMD( VkCommandBuffer commandBuffer,
- VkPipelineStageFlags2 stage,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- uint32_t marker ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdWriteBufferMarker2AMD(
+ VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker );
}
- void vkGetQueueCheckpointData2NV( VkQueue queue,
- uint32_t * pCheckpointDataCount,
- VkCheckpointData2NV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT
+ void vkGetQueueCheckpointData2NV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointData2NV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData );
}
@@ -5395,50 +4931,84 @@ namespace VULKAN_HPP_NAMESPACE
void vkCmdSetFragmentShadingRateEnumNV( VkCommandBuffer commandBuffer,
VkFragmentShadingRateNV shadingRate,
- const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const
- VULKAN_HPP_NOEXCEPT
+ const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetFragmentShadingRateEnumNV( commandBuffer, shadingRate, combinerOps );
}
+ //=== VK_EXT_mesh_shader ===
+
+ void vkCmdDrawMeshTasksEXT( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdDrawMeshTasksEXT( commandBuffer, groupCountX, groupCountY, groupCountZ );
+ }
+
+ void vkCmdDrawMeshTasksIndirectEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdDrawMeshTasksIndirectEXT( commandBuffer, buffer, offset, drawCount, stride );
+ }
+
+ void vkCmdDrawMeshTasksIndirectCountEXT( VkCommandBuffer commandBuffer,
+ VkBuffer buffer,
+ VkDeviceSize offset,
+ VkBuffer countBuffer,
+ VkDeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdDrawMeshTasksIndirectCountEXT( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+ }
+
//=== VK_KHR_copy_commands2 ===
- void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer,
- const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo );
}
- void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer,
- const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo );
}
- void vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer,
- const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo );
}
- void vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer,
- const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo );
}
- void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer,
- const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo );
}
- void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer,
- const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo );
}
+ //=== VK_EXT_image_compression_control ===
+
+ void vkGetImageSubresourceLayout2EXT( VkDevice device,
+ VkImage image,
+ const VkImageSubresource2EXT * pSubresource,
+ VkSubresourceLayout2EXT * pLayout ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout );
+ }
+
+ //=== VK_EXT_device_fault ===
+
+ VkResult vkGetDeviceFaultInfoEXT( VkDevice device, VkDeviceFaultCountsEXT * pFaultCounts, VkDeviceFaultInfoEXT * pFaultInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetDeviceFaultInfoEXT( device, pFaultCounts, pFaultInfo );
+ }
+
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_acquire_winrt_display ===
@@ -5447,9 +5017,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkAcquireWinrtDisplayNV( physicalDevice, display );
}
- VkResult vkGetWinrtDisplayNV( VkPhysicalDevice physicalDevice,
- uint32_t deviceRelativeId,
- VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetWinrtDisplayNV( VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetWinrtDisplayNV( physicalDevice, deviceRelativeId, pDisplay );
}
@@ -5466,9 +5034,8 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateDirectFBSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
}
- VkBool32 vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- IDirectFB * dfb ) const VULKAN_HPP_NOEXCEPT
+ VkBool32
+ vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB * dfb ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb );
}
@@ -5485,14 +5052,8 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t height,
uint32_t depth ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdTraceRaysKHR( commandBuffer,
- pRaygenShaderBindingTable,
- pMissShaderBindingTable,
- pHitShaderBindingTable,
- pCallableShaderBindingTable,
- width,
- height,
- depth );
+ return ::vkCmdTraceRaysKHR(
+ commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth );
}
VkResult vkCreateRayTracingPipelinesKHR( VkDevice device,
@@ -5501,31 +5062,21 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t createInfoCount,
const VkRayTracingPipelineCreateInfoKHR * pCreateInfos,
const VkAllocationCallbacks * pAllocator,
- VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT
+ VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCreateRayTracingPipelinesKHR(
- device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+ return ::vkCreateRayTracingPipelinesKHR( device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
}
- VkResult vkGetRayTracingShaderGroupHandlesKHR( VkDevice device,
- VkPipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void * pData ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetRayTracingShaderGroupHandlesKHR(
+ VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData );
}
- VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( VkDevice device,
- VkPipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void * pData ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
+ VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
- device, pipeline, firstGroup, groupCount, dataSize, pData );
+ return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData );
}
void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer,
@@ -5533,14 +5084,10 @@ namespace VULKAN_HPP_NAMESPACE
const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable,
const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable,
const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
- VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT
+ VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdTraceRaysIndirectKHR( commandBuffer,
- pRaygenShaderBindingTable,
- pMissShaderBindingTable,
- pHitShaderBindingTable,
- pCallableShaderBindingTable,
- indirectDeviceAddress );
+ return ::vkCmdTraceRaysIndirectKHR(
+ commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, indirectDeviceAddress );
}
VkDeviceSize vkGetRayTracingShaderGroupStackSizeKHR( VkDevice device,
@@ -5551,8 +5098,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetRayTracingShaderGroupStackSizeKHR( device, pipeline, group, groupShader );
}
- void vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer,
- uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer, uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetRayTracingPipelineStackSizeKHR( commandBuffer, pipelineStackSize );
}
@@ -5563,14 +5109,10 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t vertexBindingDescriptionCount,
const VkVertexInputBindingDescription2EXT * pVertexBindingDescriptions,
uint32_t vertexAttributeDescriptionCount,
- const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions ) const
- VULKAN_HPP_NOEXCEPT
+ const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdSetVertexInputEXT( commandBuffer,
- vertexBindingDescriptionCount,
- pVertexBindingDescriptions,
- vertexAttributeDescriptionCount,
- pVertexAttributeDescriptions );
+ return ::vkCmdSetVertexInputEXT(
+ commandBuffer, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions );
}
# if defined( VK_USE_PLATFORM_FUCHSIA )
@@ -5578,35 +5120,32 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkGetMemoryZirconHandleFUCHSIA( VkDevice device,
const VkMemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
- zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT
+ zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetMemoryZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle );
}
- VkResult vkGetMemoryZirconHandlePropertiesFUCHSIA(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- zx_handle_t zirconHandle,
- VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkGetMemoryZirconHandlePropertiesFUCHSIA( VkDevice device,
+ VkExternalMemoryHandleTypeFlagBits handleType,
+ zx_handle_t zirconHandle,
+ VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetMemoryZirconHandlePropertiesFUCHSIA(
- device, handleType, zirconHandle, pMemoryZirconHandleProperties );
+ return ::vkGetMemoryZirconHandlePropertiesFUCHSIA( device, handleType, zirconHandle, pMemoryZirconHandleProperties );
}
# endif /*VK_USE_PLATFORM_FUCHSIA*/
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_semaphore ===
- VkResult vkImportSemaphoreZirconHandleFUCHSIA(
- VkDevice device,
- const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkImportSemaphoreZirconHandleFUCHSIA( VkDevice device,
+ const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkImportSemaphoreZirconHandleFUCHSIA( device, pImportSemaphoreZirconHandleInfo );
}
VkResult vkGetSemaphoreZirconHandleFUCHSIA( VkDevice device,
const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
- zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT
+ zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetSemaphoreZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle );
}
@@ -5618,23 +5157,21 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkCreateBufferCollectionFUCHSIA( VkDevice device,
const VkBufferCollectionCreateInfoFUCHSIA * pCreateInfo,
const VkAllocationCallbacks * pAllocator,
- VkBufferCollectionFUCHSIA * pCollection ) const VULKAN_HPP_NOEXCEPT
+ VkBufferCollectionFUCHSIA * pCollection ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateBufferCollectionFUCHSIA( device, pCreateInfo, pAllocator, pCollection );
}
- VkResult vkSetBufferCollectionImageConstraintsFUCHSIA(
- VkDevice device,
- VkBufferCollectionFUCHSIA collection,
- const VkImageConstraintsInfoFUCHSIA * pImageConstraintsInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkSetBufferCollectionImageConstraintsFUCHSIA( VkDevice device,
+ VkBufferCollectionFUCHSIA collection,
+ const VkImageConstraintsInfoFUCHSIA * pImageConstraintsInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkSetBufferCollectionImageConstraintsFUCHSIA( device, collection, pImageConstraintsInfo );
}
- VkResult vkSetBufferCollectionBufferConstraintsFUCHSIA(
- VkDevice device,
- VkBufferCollectionFUCHSIA collection,
- const VkBufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkSetBufferCollectionBufferConstraintsFUCHSIA( VkDevice device,
+ VkBufferCollectionFUCHSIA collection,
+ const VkBufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkSetBufferCollectionBufferConstraintsFUCHSIA( device, collection, pBufferConstraintsInfo );
}
@@ -5648,8 +5185,7 @@ namespace VULKAN_HPP_NAMESPACE
VkResult vkGetBufferCollectionPropertiesFUCHSIA( VkDevice device,
VkBufferCollectionFUCHSIA collection,
- VkBufferCollectionPropertiesFUCHSIA * pProperties ) const
- VULKAN_HPP_NOEXCEPT
+ VkBufferCollectionPropertiesFUCHSIA * pProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetBufferCollectionPropertiesFUCHSIA( device, collection, pProperties );
}
@@ -5657,9 +5193,8 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_HUAWEI_subpass_shading ===
- VkResult vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( VkDevice device,
- VkRenderPass renderpass,
- VkExtent2D * pMaxWorkgroupSize ) const VULKAN_HPP_NOEXCEPT
+ VkResult
+ vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( VkDevice device, VkRenderPass renderpass, VkExtent2D * pMaxWorkgroupSize ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( device, renderpass, pMaxWorkgroupSize );
}
@@ -5671,9 +5206,7 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_HUAWEI_invocation_mask ===
- void vkCmdBindInvocationMaskHUAWEI( VkCommandBuffer commandBuffer,
- VkImageView imageView,
- VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdBindInvocationMaskHUAWEI( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBindInvocationMaskHUAWEI( commandBuffer, imageView, imageLayout );
}
@@ -5687,16 +5220,22 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetMemoryRemoteAddressNV( device, pMemoryGetRemoteAddressInfo, pAddress );
}
+ //=== VK_EXT_pipeline_properties ===
+
+ VkResult
+ vkGetPipelinePropertiesEXT( VkDevice device, const VkPipelineInfoEXT * pPipelineInfo, VkBaseOutStructure * pPipelineProperties ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetPipelinePropertiesEXT( device, pPipelineInfo, pPipelineProperties );
+ }
+
//=== VK_EXT_extended_dynamic_state2 ===
- void vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer,
- uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer, uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetPatchControlPointsEXT( commandBuffer, patchControlPoints );
}
- void vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer,
- VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetRasterizerDiscardEnableEXT( commandBuffer, rasterizerDiscardEnable );
}
@@ -5711,8 +5250,7 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdSetLogicOpEXT( commandBuffer, logicOp );
}
- void vkCmdSetPrimitiveRestartEnableEXT( VkCommandBuffer commandBuffer,
- VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetPrimitiveRestartEnableEXT( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetPrimitiveRestartEnableEXT( commandBuffer, primitiveRestartEnable );
}
@@ -5738,13 +5276,18 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_color_write_enable ===
- void vkCmdSetColorWriteEnableEXT( VkCommandBuffer commandBuffer,
- uint32_t attachmentCount,
- const VkBool32 * pColorWriteEnables ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdSetColorWriteEnableEXT( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32 * pColorWriteEnables ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetColorWriteEnableEXT( commandBuffer, attachmentCount, pColorWriteEnables );
}
+ //=== VK_KHR_ray_tracing_maintenance1 ===
+
+ void vkCmdTraceRaysIndirect2KHR( VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdTraceRaysIndirect2KHR( commandBuffer, indirectDeviceAddress );
+ }
+
//=== VK_EXT_multi_draw ===
void vkCmdDrawMultiEXT( VkCommandBuffer commandBuffer,
@@ -5765,14 +5308,110 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t stride,
const int32_t * pVertexOffset ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkCmdDrawMultiIndexedEXT(
- commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset );
+ return ::vkCmdDrawMultiIndexedEXT( commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset );
+ }
+
+ //=== VK_EXT_opacity_micromap ===
+
+ VkResult vkCreateMicromapEXT( VkDevice device,
+ const VkMicromapCreateInfoEXT * pCreateInfo,
+ const VkAllocationCallbacks * pAllocator,
+ VkMicromapEXT * pMicromap ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCreateMicromapEXT( device, pCreateInfo, pAllocator, pMicromap );
+ }
+
+ void vkDestroyMicromapEXT( VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkDestroyMicromapEXT( device, micromap, pAllocator );
+ }
+
+ void vkCmdBuildMicromapsEXT( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdBuildMicromapsEXT( commandBuffer, infoCount, pInfos );
+ }
+
+ VkResult vkBuildMicromapsEXT( VkDevice device,
+ VkDeferredOperationKHR deferredOperation,
+ uint32_t infoCount,
+ const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkBuildMicromapsEXT( device, deferredOperation, infoCount, pInfos );
+ }
+
+ VkResult vkCopyMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCopyMicromapEXT( device, deferredOperation, pInfo );
+ }
+
+ VkResult vkCopyMicromapToMemoryEXT( VkDevice device,
+ VkDeferredOperationKHR deferredOperation,
+ const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCopyMicromapToMemoryEXT( device, deferredOperation, pInfo );
+ }
+
+ VkResult vkCopyMemoryToMicromapEXT( VkDevice device,
+ VkDeferredOperationKHR deferredOperation,
+ const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCopyMemoryToMicromapEXT( device, deferredOperation, pInfo );
+ }
+
+ VkResult vkWriteMicromapsPropertiesEXT( VkDevice device,
+ uint32_t micromapCount,
+ const VkMicromapEXT * pMicromaps,
+ VkQueryType queryType,
+ size_t dataSize,
+ void * pData,
+ size_t stride ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkWriteMicromapsPropertiesEXT( device, micromapCount, pMicromaps, queryType, dataSize, pData, stride );
+ }
+
+ void vkCmdCopyMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdCopyMicromapEXT( commandBuffer, pInfo );
+ }
+
+ void vkCmdCopyMicromapToMemoryEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdCopyMicromapToMemoryEXT( commandBuffer, pInfo );
+ }
+
+ void vkCmdCopyMemoryToMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdCopyMemoryToMicromapEXT( commandBuffer, pInfo );
+ }
+
+ void vkCmdWriteMicromapsPropertiesEXT( VkCommandBuffer commandBuffer,
+ uint32_t micromapCount,
+ const VkMicromapEXT * pMicromaps,
+ VkQueryType queryType,
+ VkQueryPool queryPool,
+ uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdWriteMicromapsPropertiesEXT( commandBuffer, micromapCount, pMicromaps, queryType, queryPool, firstQuery );
+ }
+
+ void vkGetDeviceMicromapCompatibilityEXT( VkDevice device,
+ const VkMicromapVersionInfoEXT * pVersionInfo,
+ VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetDeviceMicromapCompatibilityEXT( device, pVersionInfo, pCompatibility );
+ }
+
+ void vkGetMicromapBuildSizesEXT( VkDevice device,
+ VkAccelerationStructureBuildTypeKHR buildType,
+ const VkMicromapBuildInfoEXT * pBuildInfo,
+ VkMicromapBuildSizesInfoEXT * pSizeInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetMicromapBuildSizesEXT( device, buildType, pBuildInfo, pSizeInfo );
}
//=== VK_EXT_pageable_device_local_memory ===
- void
- vkSetDeviceMemoryPriorityEXT( VkDevice device, VkDeviceMemory memory, float priority ) const VULKAN_HPP_NOEXCEPT
+ void vkSetDeviceMemoryPriorityEXT( VkDevice device, VkDeviceMemory memory, float priority ) const VULKAN_HPP_NOEXCEPT
{
return ::vkSetDeviceMemoryPriorityEXT( device, memory, priority );
}
@@ -5781,26 +5420,283 @@ namespace VULKAN_HPP_NAMESPACE
void vkGetDeviceBufferMemoryRequirementsKHR( VkDevice device,
const VkDeviceBufferMemoryRequirements * pInfo,
- VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+ VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceBufferMemoryRequirementsKHR( device, pInfo, pMemoryRequirements );
}
void vkGetDeviceImageMemoryRequirementsKHR( VkDevice device,
const VkDeviceImageMemoryRequirements * pInfo,
- VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+ VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceImageMemoryRequirementsKHR( device, pInfo, pMemoryRequirements );
}
- void vkGetDeviceImageSparseMemoryRequirementsKHR(
- VkDevice device,
- const VkDeviceImageMemoryRequirements * pInfo,
- uint32_t * pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+ void vkGetDeviceImageSparseMemoryRequirementsKHR( VkDevice device,
+ const VkDeviceImageMemoryRequirements * pInfo,
+ uint32_t * pSparseMemoryRequirementCount,
+ VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetDeviceImageSparseMemoryRequirementsKHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+ }
+
+ //=== VK_VALVE_descriptor_set_host_mapping ===
+
+ void vkGetDescriptorSetLayoutHostMappingInfoVALVE( VkDevice device,
+ const VkDescriptorSetBindingReferenceVALVE * pBindingReference,
+ VkDescriptorSetLayoutHostMappingInfoVALVE * pHostMapping ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetDescriptorSetLayoutHostMappingInfoVALVE( device, pBindingReference, pHostMapping );
+ }
+
+ void vkGetDescriptorSetHostMappingVALVE( VkDevice device, VkDescriptorSet descriptorSet, void ** ppData ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetDescriptorSetHostMappingVALVE( device, descriptorSet, ppData );
+ }
+
+ //=== VK_EXT_extended_dynamic_state3 ===
+
+ void vkCmdSetTessellationDomainOriginEXT( VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetTessellationDomainOriginEXT( commandBuffer, domainOrigin );
+ }
+
+ void vkCmdSetDepthClampEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetDepthClampEnableEXT( commandBuffer, depthClampEnable );
+ }
+
+ void vkCmdSetPolygonModeEXT( VkCommandBuffer commandBuffer, VkPolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetPolygonModeEXT( commandBuffer, polygonMode );
+ }
+
+ void vkCmdSetRasterizationSamplesEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetRasterizationSamplesEXT( commandBuffer, rasterizationSamples );
+ }
+
+ void vkCmdSetSampleMaskEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask * pSampleMask ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetSampleMaskEXT( commandBuffer, samples, pSampleMask );
+ }
+
+ void vkCmdSetAlphaToCoverageEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetAlphaToCoverageEnableEXT( commandBuffer, alphaToCoverageEnable );
+ }
+
+ void vkCmdSetAlphaToOneEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetAlphaToOneEnableEXT( commandBuffer, alphaToOneEnable );
+ }
+
+ void vkCmdSetLogicOpEnableEXT( VkCommandBuffer commandBuffer, VkBool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetLogicOpEnableEXT( commandBuffer, logicOpEnable );
+ }
+
+ void vkCmdSetColorBlendEnableEXT( VkCommandBuffer commandBuffer,
+ uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VkBool32 * pColorBlendEnables ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetColorBlendEnableEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEnables );
+ }
+
+ void vkCmdSetColorBlendEquationEXT( VkCommandBuffer commandBuffer,
+ uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VkColorBlendEquationEXT * pColorBlendEquations ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetColorBlendEquationEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEquations );
+ }
+
+ void vkCmdSetColorWriteMaskEXT( VkCommandBuffer commandBuffer,
+ uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VkColorComponentFlags * pColorWriteMasks ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetColorWriteMaskEXT( commandBuffer, firstAttachment, attachmentCount, pColorWriteMasks );
+ }
+
+ void vkCmdSetRasterizationStreamEXT( VkCommandBuffer commandBuffer, uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetRasterizationStreamEXT( commandBuffer, rasterizationStream );
+ }
+
+ void vkCmdSetConservativeRasterizationModeEXT( VkCommandBuffer commandBuffer,
+ VkConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetConservativeRasterizationModeEXT( commandBuffer, conservativeRasterizationMode );
+ }
+
+ void vkCmdSetExtraPrimitiveOverestimationSizeEXT( VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT
{
- return ::vkGetDeviceImageSparseMemoryRequirementsKHR(
- device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+ return ::vkCmdSetExtraPrimitiveOverestimationSizeEXT( commandBuffer, extraPrimitiveOverestimationSize );
+ }
+
+ void vkCmdSetDepthClipEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetDepthClipEnableEXT( commandBuffer, depthClipEnable );
+ }
+
+ void vkCmdSetSampleLocationsEnableEXT( VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetSampleLocationsEnableEXT( commandBuffer, sampleLocationsEnable );
+ }
+
+ void vkCmdSetColorBlendAdvancedEXT( VkCommandBuffer commandBuffer,
+ uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VkColorBlendAdvancedEXT * pColorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetColorBlendAdvancedEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendAdvanced );
+ }
+
+ void vkCmdSetProvokingVertexModeEXT( VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetProvokingVertexModeEXT( commandBuffer, provokingVertexMode );
+ }
+
+ void vkCmdSetLineRasterizationModeEXT( VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetLineRasterizationModeEXT( commandBuffer, lineRasterizationMode );
+ }
+
+ void vkCmdSetLineStippleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetLineStippleEnableEXT( commandBuffer, stippledLineEnable );
+ }
+
+ void vkCmdSetDepthClipNegativeOneToOneEXT( VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetDepthClipNegativeOneToOneEXT( commandBuffer, negativeOneToOne );
+ }
+
+ void vkCmdSetViewportWScalingEnableNV( VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetViewportWScalingEnableNV( commandBuffer, viewportWScalingEnable );
+ }
+
+ void vkCmdSetViewportSwizzleNV( VkCommandBuffer commandBuffer,
+ uint32_t firstViewport,
+ uint32_t viewportCount,
+ const VkViewportSwizzleNV * pViewportSwizzles ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetViewportSwizzleNV( commandBuffer, firstViewport, viewportCount, pViewportSwizzles );
+ }
+
+ void vkCmdSetCoverageToColorEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetCoverageToColorEnableNV( commandBuffer, coverageToColorEnable );
+ }
+
+ void vkCmdSetCoverageToColorLocationNV( VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetCoverageToColorLocationNV( commandBuffer, coverageToColorLocation );
+ }
+
+ void vkCmdSetCoverageModulationModeNV( VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetCoverageModulationModeNV( commandBuffer, coverageModulationMode );
+ }
+
+ void vkCmdSetCoverageModulationTableEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetCoverageModulationTableEnableNV( commandBuffer, coverageModulationTableEnable );
+ }
+
+ void vkCmdSetCoverageModulationTableNV( VkCommandBuffer commandBuffer,
+ uint32_t coverageModulationTableCount,
+ const float * pCoverageModulationTable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetCoverageModulationTableNV( commandBuffer, coverageModulationTableCount, pCoverageModulationTable );
+ }
+
+ void vkCmdSetShadingRateImageEnableNV( VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetShadingRateImageEnableNV( commandBuffer, shadingRateImageEnable );
+ }
+
+ void vkCmdSetRepresentativeFragmentTestEnableNV( VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetRepresentativeFragmentTestEnableNV( commandBuffer, representativeFragmentTestEnable );
+ }
+
+ void vkCmdSetCoverageReductionModeNV( VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdSetCoverageReductionModeNV( commandBuffer, coverageReductionMode );
+ }
+
+ //=== VK_EXT_shader_module_identifier ===
+
+ void vkGetShaderModuleIdentifierEXT( VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetShaderModuleIdentifierEXT( device, shaderModule, pIdentifier );
+ }
+
+ void vkGetShaderModuleCreateInfoIdentifierEXT( VkDevice device,
+ const VkShaderModuleCreateInfo * pCreateInfo,
+ VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetShaderModuleCreateInfoIdentifierEXT( device, pCreateInfo, pIdentifier );
+ }
+
+ //=== VK_NV_optical_flow ===
+
+ VkResult vkGetPhysicalDeviceOpticalFlowImageFormatsNV( VkPhysicalDevice physicalDevice,
+ const VkOpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo,
+ uint32_t * pFormatCount,
+ VkOpticalFlowImageFormatPropertiesNV * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetPhysicalDeviceOpticalFlowImageFormatsNV( physicalDevice, pOpticalFlowImageFormatInfo, pFormatCount, pImageFormatProperties );
+ }
+
+ VkResult vkCreateOpticalFlowSessionNV( VkDevice device,
+ const VkOpticalFlowSessionCreateInfoNV * pCreateInfo,
+ const VkAllocationCallbacks * pAllocator,
+ VkOpticalFlowSessionNV * pSession ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCreateOpticalFlowSessionNV( device, pCreateInfo, pAllocator, pSession );
+ }
+
+ void vkDestroyOpticalFlowSessionNV( VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkDestroyOpticalFlowSessionNV( device, session, pAllocator );
+ }
+
+ VkResult vkBindOpticalFlowSessionImageNV( VkDevice device,
+ VkOpticalFlowSessionNV session,
+ VkOpticalFlowSessionBindingPointNV bindingPoint,
+ VkImageView view,
+ VkImageLayout layout ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkBindOpticalFlowSessionImageNV( device, session, bindingPoint, view, layout );
+ }
+
+ void vkCmdOpticalFlowExecuteNV( VkCommandBuffer commandBuffer,
+ VkOpticalFlowSessionNV session,
+ const VkOpticalFlowExecuteInfoNV * pExecuteInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdOpticalFlowExecuteNV( commandBuffer, session, pExecuteInfo );
+ }
+
+ //=== VK_QCOM_tile_properties ===
+
+ VkResult vkGetFramebufferTilePropertiesQCOM( VkDevice device,
+ VkFramebuffer framebuffer,
+ uint32_t * pPropertiesCount,
+ VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetFramebufferTilePropertiesQCOM( device, framebuffer, pPropertiesCount, pProperties );
+ }
+
+ VkResult vkGetDynamicRenderingTilePropertiesQCOM( VkDevice device,
+ const VkRenderingInfo * pRenderingInfo,
+ VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetDynamicRenderingTilePropertiesQCOM( device, pRenderingInfo, pProperties );
}
};
#endif
@@ -5875,6 +5771,7 @@ namespace VULKAN_HPP_NAMESPACE
# define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT = VULKAN_HPP_DEFAULT_DISPATCHER
#endif
+#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
struct AllocationCallbacks;
template <typename OwnerType, typename Dispatch>
@@ -5883,14 +5780,14 @@ namespace VULKAN_HPP_NAMESPACE
public:
ObjectDestroy() = default;
- ObjectDestroy( OwnerType owner,
- Optional<const AllocationCallbacks> allocationCallbacks
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT
+ ObjectDestroy( OwnerType owner,
+ Optional<const AllocationCallbacks> allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT
: m_owner( owner )
, m_allocationCallbacks( allocationCallbacks )
, m_dispatch( &dispatch )
- {}
+ {
+ }
OwnerType getOwner() const VULKAN_HPP_NOEXCEPT
{
@@ -5927,7 +5824,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT
: m_allocationCallbacks( allocationCallbacks )
, m_dispatch( &dispatch )
- {}
+ {
+ }
Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT
{
@@ -5955,11 +5853,12 @@ namespace VULKAN_HPP_NAMESPACE
ObjectFree( OwnerType owner,
Optional<const AllocationCallbacks> allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT
+ Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT
: m_owner( owner )
, m_allocationCallbacks( allocationCallbacks )
, m_dispatch( &dispatch )
- {}
+ {
+ }
OwnerType getOwner() const VULKAN_HPP_NOEXCEPT
{
@@ -5976,7 +5875,7 @@ namespace VULKAN_HPP_NAMESPACE
void destroy( T t ) VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( m_owner && m_dispatch );
- m_owner.free( t, m_allocationCallbacks, *m_dispatch );
+ ( m_owner.free )( t, m_allocationCallbacks, *m_dispatch );
}
private:
@@ -5991,11 +5890,11 @@ namespace VULKAN_HPP_NAMESPACE
public:
ObjectRelease() = default;
- ObjectRelease( OwnerType owner,
- Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT
+ ObjectRelease( OwnerType owner, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT
: m_owner( owner )
, m_dispatch( &dispatch )
- {}
+ {
+ }
OwnerType getOwner() const VULKAN_HPP_NOEXCEPT
{
@@ -6021,13 +5920,12 @@ namespace VULKAN_HPP_NAMESPACE
public:
PoolFree() = default;
- PoolFree( OwnerType owner,
- PoolType pool,
- Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT
+ PoolFree( OwnerType owner, PoolType pool, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT
: m_owner( owner )
, m_pool( pool )
, m_dispatch( &dispatch )
- {}
+ {
+ }
OwnerType getOwner() const VULKAN_HPP_NOEXCEPT
{
@@ -6042,7 +5940,7 @@ namespace VULKAN_HPP_NAMESPACE
template <typename T>
void destroy( T t ) VULKAN_HPP_NOEXCEPT
{
- m_owner.free( m_pool, t, *m_dispatch );
+ ( m_owner.free )( m_pool, t, *m_dispatch );
}
private:
@@ -6051,6 +5949,8 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const * m_dispatch = nullptr;
};
+#endif // !VULKAN_HPP_NO_SMART_HANDLE
+
//==================
//=== BASE TYPEs ===
//==================
@@ -6064,20 +5964,23 @@ namespace VULKAN_HPP_NAMESPACE
} // namespace VULKAN_HPP_NAMESPACE
#include <vulkan/vulkan_enums.hpp>
+#if !defined( VULKAN_HPP_NO_TO_STRING )
+# include <vulkan/vulkan_to_string.hpp>
+#endif
#ifndef VULKAN_HPP_NO_EXCEPTIONS
namespace std
{
template <>
struct is_error_code_enum<VULKAN_HPP_NAMESPACE::Result> : public true_type
- {};
+ {
+ };
} // namespace std
#endif
namespace VULKAN_HPP_NAMESPACE
{
#ifndef VULKAN_HPP_NO_EXCEPTIONS
-
class ErrorCategoryImpl : public std::error_category
{
public:
@@ -6087,7 +5990,11 @@ namespace VULKAN_HPP_NAMESPACE
}
virtual std::string message( int ev ) const override
{
- return to_string( static_cast<Result>( ev ) );
+# if defined( VULKAN_HPP_NO_TO_STRING )
+ return std::to_string( ev );
+# else
+ return VULKAN_HPP_NAMESPACE::to_string( static_cast<VULKAN_HPP_NAMESPACE::Result>( ev ) );
+# endif
}
};
@@ -6124,12 +6031,8 @@ namespace VULKAN_HPP_NAMESPACE
SystemError( std::error_code ec, std::string const & what ) : Error(), std::system_error( ec, what ) {}
SystemError( std::error_code ec, char const * what ) : Error(), std::system_error( ec, what ) {}
SystemError( int ev, std::error_category const & ecat ) : Error(), std::system_error( ev, ecat ) {}
- SystemError( int ev, std::error_category const & ecat, std::string const & what )
- : Error(), std::system_error( ev, ecat, what )
- {}
- SystemError( int ev, std::error_category const & ecat, char const * what )
- : Error(), std::system_error( ev, ecat, what )
- {}
+ SystemError( int ev, std::error_category const & ecat, std::string const & what ) : Error(), std::system_error( ev, ecat, what ) {}
+ SystemError( int ev, std::error_category const & ecat, char const * what ) : Error(), std::system_error( ev, ecat, what ) {}
virtual const char * what() const VULKAN_HPP_NOEXCEPT
{
@@ -6156,130 +6059,85 @@ namespace VULKAN_HPP_NAMESPACE
class OutOfHostMemoryError : public SystemError
{
public:
- OutOfHostMemoryError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message )
- {}
- OutOfHostMemoryError( char const * message )
- : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message )
- {}
+ OutOfHostMemoryError( std::string const & message ) : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {}
+ OutOfHostMemoryError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {}
};
class OutOfDeviceMemoryError : public SystemError
{
public:
- OutOfDeviceMemoryError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message )
- {}
- OutOfDeviceMemoryError( char const * message )
- : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message )
- {}
+ OutOfDeviceMemoryError( std::string const & message ) : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {}
+ OutOfDeviceMemoryError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {}
};
class InitializationFailedError : public SystemError
{
public:
- InitializationFailedError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorInitializationFailed ), message )
- {}
- InitializationFailedError( char const * message )
- : SystemError( make_error_code( Result::eErrorInitializationFailed ), message )
- {}
+ InitializationFailedError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {}
+ InitializationFailedError( char const * message ) : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {}
};
class DeviceLostError : public SystemError
{
public:
- DeviceLostError( std::string const & message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message )
- {}
+ DeviceLostError( std::string const & message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {}
DeviceLostError( char const * message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {}
};
class MemoryMapFailedError : public SystemError
{
public:
- MemoryMapFailedError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message )
- {}
- MemoryMapFailedError( char const * message )
- : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message )
- {}
+ MemoryMapFailedError( std::string const & message ) : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {}
+ MemoryMapFailedError( char const * message ) : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {}
};
class LayerNotPresentError : public SystemError
{
public:
- LayerNotPresentError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message )
- {}
- LayerNotPresentError( char const * message )
- : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message )
- {}
+ LayerNotPresentError( std::string const & message ) : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {}
+ LayerNotPresentError( char const * message ) : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {}
};
class ExtensionNotPresentError : public SystemError
{
public:
- ExtensionNotPresentError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message )
- {}
- ExtensionNotPresentError( char const * message )
- : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message )
- {}
+ ExtensionNotPresentError( std::string const & message ) : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {}
+ ExtensionNotPresentError( char const * message ) : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {}
};
class FeatureNotPresentError : public SystemError
{
public:
- FeatureNotPresentError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message )
- {}
- FeatureNotPresentError( char const * message )
- : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message )
- {}
+ FeatureNotPresentError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {}
+ FeatureNotPresentError( char const * message ) : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {}
};
class IncompatibleDriverError : public SystemError
{
public:
- IncompatibleDriverError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message )
- {}
- IncompatibleDriverError( char const * message )
- : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message )
- {}
+ IncompatibleDriverError( std::string const & message ) : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {}
+ IncompatibleDriverError( char const * message ) : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {}
};
class TooManyObjectsError : public SystemError
{
public:
- TooManyObjectsError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorTooManyObjects ), message )
- {}
- TooManyObjectsError( char const * message )
- : SystemError( make_error_code( Result::eErrorTooManyObjects ), message )
- {}
+ TooManyObjectsError( std::string const & message ) : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {}
+ TooManyObjectsError( char const * message ) : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {}
};
class FormatNotSupportedError : public SystemError
{
public:
- FormatNotSupportedError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message )
- {}
- FormatNotSupportedError( char const * message )
- : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message )
- {}
+ FormatNotSupportedError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {}
+ FormatNotSupportedError( char const * message ) : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {}
};
class FragmentedPoolError : public SystemError
{
public:
- FragmentedPoolError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorFragmentedPool ), message )
- {}
- FragmentedPoolError( char const * message )
- : SystemError( make_error_code( Result::eErrorFragmentedPool ), message )
- {}
+ FragmentedPoolError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {}
+ FragmentedPoolError( char const * message ) : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {}
};
class UnknownError : public SystemError
@@ -6292,145 +6150,186 @@ namespace VULKAN_HPP_NAMESPACE
class OutOfPoolMemoryError : public SystemError
{
public:
- OutOfPoolMemoryError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message )
- {}
- OutOfPoolMemoryError( char const * message )
- : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message )
- {}
+ OutOfPoolMemoryError( std::string const & message ) : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {}
+ OutOfPoolMemoryError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {}
};
class InvalidExternalHandleError : public SystemError
{
public:
- InvalidExternalHandleError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message )
- {}
- InvalidExternalHandleError( char const * message )
- : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message )
- {}
+ InvalidExternalHandleError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {}
+ InvalidExternalHandleError( char const * message ) : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {}
};
class FragmentationError : public SystemError
{
public:
- FragmentationError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorFragmentation ), message )
- {}
- FragmentationError( char const * message ) : SystemError( make_error_code( Result::eErrorFragmentation ), message )
- {}
+ FragmentationError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {}
+ FragmentationError( char const * message ) : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {}
};
class InvalidOpaqueCaptureAddressError : public SystemError
{
public:
- InvalidOpaqueCaptureAddressError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message )
- {}
- InvalidOpaqueCaptureAddressError( char const * message )
- : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message )
- {}
+ InvalidOpaqueCaptureAddressError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {}
+ InvalidOpaqueCaptureAddressError( char const * message ) : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {}
};
class SurfaceLostKHRError : public SystemError
{
public:
- SurfaceLostKHRError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message )
- {}
- SurfaceLostKHRError( char const * message )
- : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message )
- {}
+ SurfaceLostKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {}
+ SurfaceLostKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {}
};
class NativeWindowInUseKHRError : public SystemError
{
public:
- NativeWindowInUseKHRError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message )
- {}
- NativeWindowInUseKHRError( char const * message )
- : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message )
- {}
+ NativeWindowInUseKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {}
+ NativeWindowInUseKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {}
};
class OutOfDateKHRError : public SystemError
{
public:
- OutOfDateKHRError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message )
- {}
+ OutOfDateKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {}
OutOfDateKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {}
};
class IncompatibleDisplayKHRError : public SystemError
{
public:
- IncompatibleDisplayKHRError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message )
- {}
- IncompatibleDisplayKHRError( char const * message )
- : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message )
- {}
+ IncompatibleDisplayKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {}
+ IncompatibleDisplayKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {}
};
class ValidationFailedEXTError : public SystemError
{
public:
- ValidationFailedEXTError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message )
- {}
- ValidationFailedEXTError( char const * message )
- : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message )
- {}
+ ValidationFailedEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {}
+ ValidationFailedEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {}
};
class InvalidShaderNVError : public SystemError
{
public:
- InvalidShaderNVError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message )
- {}
- InvalidShaderNVError( char const * message )
- : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message )
- {}
+ InvalidShaderNVError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {}
+ InvalidShaderNVError( char const * message ) : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {}
+ };
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ class ImageUsageNotSupportedKHRError : public SystemError
+ {
+ public:
+ ImageUsageNotSupportedKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorImageUsageNotSupportedKHR ), message ) {}
+ ImageUsageNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorImageUsageNotSupportedKHR ), message ) {}
};
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ class VideoPictureLayoutNotSupportedKHRError : public SystemError
+ {
+ public:
+ VideoPictureLayoutNotSupportedKHRError( std::string const & message )
+ : SystemError( make_error_code( Result::eErrorVideoPictureLayoutNotSupportedKHR ), message )
+ {
+ }
+ VideoPictureLayoutNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorVideoPictureLayoutNotSupportedKHR ), message )
+ {
+ }
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ class VideoProfileOperationNotSupportedKHRError : public SystemError
+ {
+ public:
+ VideoProfileOperationNotSupportedKHRError( std::string const & message )
+ : SystemError( make_error_code( Result::eErrorVideoProfileOperationNotSupportedKHR ), message )
+ {
+ }
+ VideoProfileOperationNotSupportedKHRError( char const * message )
+ : SystemError( make_error_code( Result::eErrorVideoProfileOperationNotSupportedKHR ), message )
+ {
+ }
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ class VideoProfileFormatNotSupportedKHRError : public SystemError
+ {
+ public:
+ VideoProfileFormatNotSupportedKHRError( std::string const & message )
+ : SystemError( make_error_code( Result::eErrorVideoProfileFormatNotSupportedKHR ), message )
+ {
+ }
+ VideoProfileFormatNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorVideoProfileFormatNotSupportedKHR ), message )
+ {
+ }
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ class VideoProfileCodecNotSupportedKHRError : public SystemError
+ {
+ public:
+ VideoProfileCodecNotSupportedKHRError( std::string const & message )
+ : SystemError( make_error_code( Result::eErrorVideoProfileCodecNotSupportedKHR ), message )
+ {
+ }
+ VideoProfileCodecNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorVideoProfileCodecNotSupportedKHR ), message ) {}
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ class VideoStdVersionNotSupportedKHRError : public SystemError
+ {
+ public:
+ VideoStdVersionNotSupportedKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorVideoStdVersionNotSupportedKHR ), message )
+ {
+ }
+ VideoStdVersionNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorVideoStdVersionNotSupportedKHR ), message ) {}
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError
{
public:
InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const & message )
: SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message )
- {}
+ {
+ }
InvalidDrmFormatModifierPlaneLayoutEXTError( char const * message )
: SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message )
- {}
+ {
+ }
};
class NotPermittedKHRError : public SystemError
{
public:
- NotPermittedKHRError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorNotPermittedKHR ), message )
- {}
- NotPermittedKHRError( char const * message )
- : SystemError( make_error_code( Result::eErrorNotPermittedKHR ), message )
- {}
+ NotPermittedKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorNotPermittedKHR ), message ) {}
+ NotPermittedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorNotPermittedKHR ), message ) {}
};
# if defined( VK_USE_PLATFORM_WIN32_KHR )
class FullScreenExclusiveModeLostEXTError : public SystemError
{
public:
- FullScreenExclusiveModeLostEXTError( std::string const & message )
- : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message )
- {}
- FullScreenExclusiveModeLostEXTError( char const * message )
- : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message )
- {}
+ FullScreenExclusiveModeLostEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message )
+ {
+ }
+ FullScreenExclusiveModeLostEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {}
};
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ class CompressionExhaustedEXTError : public SystemError
+ {
+ public:
+ CompressionExhaustedEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorCompressionExhaustedEXT ), message ) {}
+ CompressionExhaustedEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorCompressionExhaustedEXT ), message ) {}
+ };
+
namespace
{
[[noreturn]] void throwResultException( Result result, char const * message )
@@ -6460,12 +6359,30 @@ namespace VULKAN_HPP_NAMESPACE
case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError( message );
case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message );
case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message );
- case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT:
- throw InvalidDrmFormatModifierPlaneLayoutEXTError( message );
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case Result::eErrorImageUsageNotSupportedKHR: throw ImageUsageNotSupportedKHRError( message );
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case Result::eErrorVideoPictureLayoutNotSupportedKHR: throw VideoPictureLayoutNotSupportedKHRError( message );
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case Result::eErrorVideoProfileOperationNotSupportedKHR: throw VideoProfileOperationNotSupportedKHRError( message );
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case Result::eErrorVideoProfileFormatNotSupportedKHR: throw VideoProfileFormatNotSupportedKHRError( message );
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case Result::eErrorVideoProfileCodecNotSupportedKHR: throw VideoProfileCodecNotSupportedKHRError( message );
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case Result::eErrorVideoStdVersionNotSupportedKHR: throw VideoStdVersionNotSupportedKHRError( message );
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError( message );
case Result::eErrorNotPermittedKHR: throw NotPermittedKHRError( message );
# if defined( VK_USE_PLATFORM_WIN32_KHR )
case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message );
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ case Result::eErrorCompressionExhaustedEXT: throw CompressionExhaustedEXTError( message );
default: throw SystemError( make_error_code( result ) );
}
}
@@ -6474,7 +6391,8 @@ namespace VULKAN_HPP_NAMESPACE
template <typename T>
void ignore( T const & ) VULKAN_HPP_NOEXCEPT
- {}
+ {
+ }
template <typename T>
struct ResultValue
@@ -6485,7 +6403,8 @@ namespace VULKAN_HPP_NAMESPACE
ResultValue( Result r, T & v )
#endif
: result( r ), value( v )
- {}
+ {
+ }
#ifdef VULKAN_HPP_HAS_NOEXCEPT
ResultValue( Result r, T && v ) VULKAN_HPP_NOEXCEPT( VULKAN_HPP_NOEXCEPT( T( std::move( v ) ) ) )
@@ -6493,7 +6412,8 @@ namespace VULKAN_HPP_NAMESPACE
ResultValue( Result r, T && v )
#endif
: result( r ), value( std::move( v ) )
- {}
+ {
+ }
Result result;
T value;
@@ -6502,36 +6422,6 @@ namespace VULKAN_HPP_NAMESPACE
{
return std::tuple<Result &, T &>( result, value );
}
-
-#if !defined( VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST )
- VULKAN_HPP_DEPRECATED(
- "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." )
- operator T const &() const & VULKAN_HPP_NOEXCEPT
- {
- return value;
- }
-
- VULKAN_HPP_DEPRECATED(
- "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." )
- operator T &() & VULKAN_HPP_NOEXCEPT
- {
- return value;
- }
-
- VULKAN_HPP_DEPRECATED(
- "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." )
- operator T const &&() const && VULKAN_HPP_NOEXCEPT
- {
- return std::move( value );
- }
-
- VULKAN_HPP_DEPRECATED(
- "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." )
- operator T &&() && VULKAN_HPP_NOEXCEPT
- {
- return std::move( value );
- }
-#endif
};
#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
@@ -6545,28 +6435,13 @@ namespace VULKAN_HPP_NAMESPACE
# endif
: result( r )
, value( std::move( v ) )
- {}
-
- std::tuple<Result, UniqueHandle<Type, Dispatch>> asTuple()
{
- return std::make_tuple( result, std::move( value ) );
}
-# if !defined( VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST )
- VULKAN_HPP_DEPRECATED(
- "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." )
- operator UniqueHandle<Type, Dispatch> &() & VULKAN_HPP_NOEXCEPT
- {
- return value;
- }
-
- VULKAN_HPP_DEPRECATED(
- "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." )
- operator UniqueHandle<Type, Dispatch>() VULKAN_HPP_NOEXCEPT
+ std::tuple<Result, UniqueHandle<Type, Dispatch>> asTuple()
{
- return std::move( value );
+ return std::make_tuple( result, std::move( value ) );
}
-# endif
Result result;
UniqueHandle<Type, Dispatch> value;
@@ -6582,7 +6457,8 @@ namespace VULKAN_HPP_NAMESPACE
# endif
: result( r )
, value( std::move( v ) )
- {}
+ {
+ }
std::tuple<Result, std::vector<UniqueHandle<Type, Dispatch>>> asTuple()
{
@@ -6591,15 +6467,6 @@ namespace VULKAN_HPP_NAMESPACE
Result result;
std::vector<UniqueHandle<Type, Dispatch>> value;
-
-# if !defined( VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST )
- VULKAN_HPP_DEPRECATED(
- "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." )
- operator std::tuple<Result &, std::vector<UniqueHandle<Type, Dispatch>> &>() VULKAN_HPP_NOEXCEPT
- {
- return std::tuple<Result &, std::vector<UniqueHandle<Type, Dispatch>> &>( result, value );
- }
-# endif
};
#endif
@@ -6623,146 +6490,65 @@ namespace VULKAN_HPP_NAMESPACE
#endif
};
- VULKAN_HPP_INLINE ResultValueType<void>::type createResultValue( Result result, char const * message )
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type createResultValueType( Result result )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
- ignore( message );
- VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
return result;
#else
- if ( result != Result::eSuccess )
- {
- throwResultException( result, message );
- }
+ ignore( result );
#endif
}
template <typename T>
- VULKAN_HPP_INLINE typename ResultValueType<T>::type createResultValue( Result result, T & data, char const * message )
+ VULKAN_HPP_INLINE typename ResultValueType<T>::type createResultValueType( Result result, T & data )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
- ignore( message );
- VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
- return ResultValue<T>( result, std::move( data ) );
+ return ResultValue<T>( result, data );
#else
- if ( result != Result::eSuccess )
- {
- throwResultException( result, message );
- }
- return std::move( data );
+ ignore( result );
+ return data;
#endif
}
- VULKAN_HPP_INLINE Result createResultValue( Result result,
- char const * message,
- std::initializer_list<Result> successCodes )
- {
-#ifdef VULKAN_HPP_NO_EXCEPTIONS
- ignore( message );
- ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty
- VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
-#else
- if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
- {
- throwResultException( result, message );
- }
-#endif
- return result;
- }
-
template <typename T>
- VULKAN_HPP_INLINE ResultValue<T>
- createResultValue( Result result, T & data, char const * message, std::initializer_list<Result> successCodes )
+ VULKAN_HPP_INLINE typename ResultValueType<T>::type createResultValueType( Result result, T && data )
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
- ignore( message );
- ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty
- VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
+ return ResultValue<T>( result, std::move( data ) );
#else
- if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
- {
- throwResultException( result, message );
- }
+ ignore( result );
+ return std::move( data );
#endif
- return ResultValue<T>( result, std::move( data ) );
- }
-
-#ifndef VULKAN_HPP_NO_SMART_HANDLE
- template <typename T, typename D>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<T, D>>::type createResultValue(
- Result result, T & data, char const * message, typename UniqueHandleTraits<T, D>::deleter const & deleter )
- {
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- ignore( message );
- VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
- return ResultValue<UniqueHandle<T, D>>( result, UniqueHandle<T, D>( data, deleter ) );
-# else
- if ( result != Result::eSuccess )
- {
- throwResultException( result, message );
- }
- return UniqueHandle<T, D>( data, deleter );
-# endif
}
- template <typename T, typename D>
- VULKAN_HPP_INLINE ResultValue<UniqueHandle<T, D>>
- createResultValue( Result result,
- T & data,
- char const * message,
- std::initializer_list<Result> successCodes,
- typename UniqueHandleTraits<T, D>::deleter const & deleter )
+ VULKAN_HPP_INLINE void resultCheck( Result result, char const * message )
{
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- ignore( message );
- ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty
- VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
-# else
- if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
- {
- throwResultException( result, message );
- }
-# endif
- return ResultValue<UniqueHandle<T, D>>( result, UniqueHandle<T, D>( data, deleter ) );
- }
-
- template <typename T, typename D>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<T, D>>>::type
- createResultValue( Result result, std::vector<UniqueHandle<T, D>> && data, char const * message )
- {
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ ignore( result ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty
ignore( message );
VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
- return ResultValue<std::vector<UniqueHandle<T, D>>>( result, std::move( data ) );
-# else
+#else
if ( result != Result::eSuccess )
{
throwResultException( result, message );
}
- return std::move( data );
-# endif
+#endif
}
- template <typename T, typename D>
- VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<T, D>>>
- createResultValue( Result result,
- std::vector<UniqueHandle<T, D>> && data,
- char const * message,
- std::initializer_list<Result> successCodes )
+ VULKAN_HPP_INLINE void resultCheck( Result result, char const * message, std::initializer_list<Result> successCodes )
{
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+ ignore( result ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty
ignore( message );
ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty
VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
-# else
+#else
if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
{
throwResultException( result, message );
}
-# endif
- return ResultValue<std::vector<UniqueHandle<T, D>>>( result, std::move( data ) );
- }
#endif
+ }
} // namespace VULKAN_HPP_NAMESPACE
// clang-format off
@@ -6773,10 +6559,22 @@ namespace VULKAN_HPP_NAMESPACE
namespace VULKAN_HPP_NAMESPACE
{
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+
//=======================
//=== STRUCTS EXTENDS ===
//=======================
+ //=== VK_VERSION_1_0 ===
+ template <>
+ struct StructExtends<ShaderModuleCreateInfo, PipelineShaderStageCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
//=== VK_VERSION_1_1 ===
template <>
struct StructExtends<PhysicalDeviceSubgroupProperties, PhysicalDeviceProperties2>
@@ -8055,10 +7853,10 @@ namespace VULKAN_HPP_NAMESPACE
};
};
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_queue ===
template <>
- struct StructExtends<QueueFamilyQueryResultStatusProperties2KHR, QueueFamilyProperties2>
+ struct StructExtends<QueueFamilyQueryResultStatusPropertiesKHR, QueueFamilyProperties2>
{
enum
{
@@ -8066,7 +7864,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoQueueFamilyProperties2KHR, QueueFamilyProperties2>
+ struct StructExtends<QueueFamilyVideoPropertiesKHR, QueueFamilyProperties2>
{
enum
{
@@ -8074,7 +7872,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoProfileKHR, QueryPoolCreateInfo>
+ struct StructExtends<VideoProfileInfoKHR, QueryPoolCreateInfo>
{
enum
{
@@ -8082,7 +7880,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoProfileKHR, FormatProperties2>
+ struct StructExtends<VideoProfileListInfoKHR, PhysicalDeviceImageFormatInfo2>
{
enum
{
@@ -8090,7 +7888,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoProfileKHR, ImageCreateInfo>
+ struct StructExtends<VideoProfileListInfoKHR, PhysicalDeviceVideoFormatInfoKHR>
{
enum
{
@@ -8098,7 +7896,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoProfileKHR, ImageViewCreateInfo>
+ struct StructExtends<VideoProfileListInfoKHR, ImageCreateInfo>
{
enum
{
@@ -8106,23 +7904,19 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoProfileKHR, BufferCreateInfo>
- {
- enum
- {
- value = true
- };
- };
- template <>
- struct StructExtends<VideoProfilesKHR, FormatProperties2>
+ struct StructExtends<VideoProfileListInfoKHR, BufferCreateInfo>
{
enum
{
value = true
};
};
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_KHR_video_decode_queue ===
template <>
- struct StructExtends<VideoProfilesKHR, ImageCreateInfo>
+ struct StructExtends<VideoDecodeCapabilitiesKHR, VideoCapabilitiesKHR>
{
enum
{
@@ -8130,7 +7924,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoProfilesKHR, ImageViewCreateInfo>
+ struct StructExtends<VideoDecodeUsageInfoKHR, VideoProfileInfoKHR>
{
enum
{
@@ -8138,14 +7932,14 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoProfilesKHR, BufferCreateInfo>
+ struct StructExtends<VideoDecodeUsageInfoKHR, QueryPoolCreateInfo>
{
enum
{
value = true
};
};
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_NV_dedicated_allocation ===
template <>
@@ -8207,7 +8001,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_EXT_video_encode_h264 ===
template <>
struct StructExtends<VideoEncodeH264CapabilitiesEXT, VideoCapabilitiesKHR>
@@ -8218,14 +8012,6 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoEncodeH264SessionCreateInfoEXT, VideoSessionCreateInfoKHR>
- {
- enum
- {
- value = true
- };
- };
- template <>
struct StructExtends<VideoEncodeH264SessionParametersCreateInfoEXT, VideoSessionParametersCreateInfoKHR>
{
enum
@@ -8250,23 +8036,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoEncodeH264EmitPictureParametersEXT, VideoEncodeInfoKHR>
- {
- enum
- {
- value = true
- };
- };
- template <>
- struct StructExtends<VideoEncodeH264ProfileEXT, VideoProfileKHR>
- {
- enum
- {
- value = true
- };
- };
- template <>
- struct StructExtends<VideoEncodeH264ProfileEXT, QueryPoolCreateInfo>
+ struct StructExtends<VideoEncodeH264EmitPictureParametersInfoEXT, VideoEncodeInfoKHR>
{
enum
{
@@ -8274,7 +8044,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoEncodeH264ProfileEXT, FormatProperties2>
+ struct StructExtends<VideoEncodeH264ProfileInfoEXT, VideoProfileInfoKHR>
{
enum
{
@@ -8282,7 +8052,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoEncodeH264ProfileEXT, ImageCreateInfo>
+ struct StructExtends<VideoEncodeH264ProfileInfoEXT, QueryPoolCreateInfo>
{
enum
{
@@ -8290,7 +8060,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoEncodeH264ProfileEXT, ImageViewCreateInfo>
+ struct StructExtends<VideoEncodeH264RateControlInfoEXT, VideoCodingControlInfoKHR>
{
enum
{
@@ -8298,15 +8068,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoEncodeH264ProfileEXT, BufferCreateInfo>
- {
- enum
- {
- value = true
- };
- };
- template <>
- struct StructExtends<VideoEncodeH264RateControlInfoEXT, VideoEncodeRateControlInfoKHR>
+ struct StructExtends<VideoEncodeH264RateControlLayerInfoEXT, VideoCodingControlInfoKHR>
{
enum
{
@@ -8321,9 +8083,9 @@ namespace VULKAN_HPP_NAMESPACE
value = true
};
};
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_EXT_video_encode_h265 ===
template <>
struct StructExtends<VideoEncodeH265CapabilitiesEXT, VideoCapabilitiesKHR>
@@ -8334,14 +8096,6 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoEncodeH265SessionCreateInfoEXT, VideoSessionCreateInfoKHR>
- {
- enum
- {
- value = true
- };
- };
- template <>
struct StructExtends<VideoEncodeH265SessionParametersCreateInfoEXT, VideoSessionParametersCreateInfoKHR>
{
enum
@@ -8366,7 +8120,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoEncodeH265EmitPictureParametersEXT, VideoEncodeInfoKHR>
+ struct StructExtends<VideoEncodeH265EmitPictureParametersInfoEXT, VideoEncodeInfoKHR>
{
enum
{
@@ -8374,7 +8128,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoEncodeH265ProfileEXT, VideoProfileKHR>
+ struct StructExtends<VideoEncodeH265ProfileInfoEXT, VideoProfileInfoKHR>
{
enum
{
@@ -8382,7 +8136,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoEncodeH265ProfileEXT, QueryPoolCreateInfo>
+ struct StructExtends<VideoEncodeH265ProfileInfoEXT, QueryPoolCreateInfo>
{
enum
{
@@ -8390,7 +8144,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoEncodeH265ProfileEXT, FormatProperties2>
+ struct StructExtends<VideoEncodeH265RateControlInfoEXT, VideoCodingControlInfoKHR>
{
enum
{
@@ -8398,7 +8152,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoEncodeH265ProfileEXT, ImageCreateInfo>
+ struct StructExtends<VideoEncodeH265RateControlLayerInfoEXT, VideoCodingControlInfoKHR>
{
enum
{
@@ -8406,15 +8160,19 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoEncodeH265ProfileEXT, ImageViewCreateInfo>
+ struct StructExtends<VideoEncodeH265RateControlLayerInfoEXT, VideoEncodeRateControlLayerInfoKHR>
{
enum
{
value = true
};
};
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_EXT_video_decode_h264 ===
template <>
- struct StructExtends<VideoEncodeH265ProfileEXT, BufferCreateInfo>
+ struct StructExtends<VideoDecodeH264ProfileInfoEXT, VideoProfileInfoKHR>
{
enum
{
@@ -8422,7 +8180,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoEncodeH265RateControlInfoEXT, VideoEncodeRateControlInfoKHR>
+ struct StructExtends<VideoDecodeH264ProfileInfoEXT, QueryPoolCreateInfo>
{
enum
{
@@ -8430,19 +8188,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoEncodeH265RateControlLayerInfoEXT, VideoEncodeRateControlLayerInfoKHR>
- {
- enum
- {
- value = true
- };
- };
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- //=== VK_EXT_video_decode_h264 ===
- template <>
- struct StructExtends<VideoDecodeH264ProfileEXT, VideoProfileKHR>
+ struct StructExtends<VideoDecodeH264CapabilitiesEXT, VideoCapabilitiesKHR>
{
enum
{
@@ -8450,7 +8196,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoDecodeH264ProfileEXT, QueryPoolCreateInfo>
+ struct StructExtends<VideoDecodeH264SessionParametersCreateInfoEXT, VideoSessionParametersCreateInfoKHR>
{
enum
{
@@ -8458,7 +8204,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoDecodeH264ProfileEXT, FormatProperties2>
+ struct StructExtends<VideoDecodeH264SessionParametersAddInfoEXT, VideoSessionParametersUpdateInfoKHR>
{
enum
{
@@ -8466,7 +8212,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoDecodeH264ProfileEXT, ImageCreateInfo>
+ struct StructExtends<VideoDecodeH264PictureInfoEXT, VideoDecodeInfoKHR>
{
enum
{
@@ -8474,23 +8220,28 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoDecodeH264ProfileEXT, ImageViewCreateInfo>
+ struct StructExtends<VideoDecodeH264DpbSlotInfoEXT, VideoReferenceSlotInfoKHR>
{
enum
{
value = true
};
};
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ //=== VK_AMD_texture_gather_bias_lod ===
template <>
- struct StructExtends<VideoDecodeH264ProfileEXT, BufferCreateInfo>
+ struct StructExtends<TextureLODGatherFormatPropertiesAMD, ImageFormatProperties2>
{
enum
{
value = true
};
};
+
+ //=== VK_KHR_dynamic_rendering ===
template <>
- struct StructExtends<VideoDecodeH264CapabilitiesEXT, VideoCapabilitiesKHR>
+ struct StructExtends<RenderingFragmentShadingRateAttachmentInfoKHR, RenderingInfo>
{
enum
{
@@ -8498,7 +8249,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoDecodeH264SessionCreateInfoEXT, VideoSessionCreateInfoKHR>
+ struct StructExtends<RenderingFragmentDensityMapAttachmentInfoEXT, RenderingInfo>
{
enum
{
@@ -8506,7 +8257,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoDecodeH264SessionParametersCreateInfoEXT, VideoSessionParametersCreateInfoKHR>
+ struct StructExtends<AttachmentSampleCountInfoAMD, CommandBufferInheritanceInfo>
{
enum
{
@@ -8514,7 +8265,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoDecodeH264SessionParametersAddInfoEXT, VideoSessionParametersUpdateInfoKHR>
+ struct StructExtends<AttachmentSampleCountInfoAMD, GraphicsPipelineCreateInfo>
{
enum
{
@@ -8522,7 +8273,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoDecodeH264PictureInfoEXT, VideoDecodeInfoKHR>
+ struct StructExtends<MultiviewPerViewAttributesInfoNVX, CommandBufferInheritanceInfo>
{
enum
{
@@ -8530,7 +8281,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoDecodeH264MvcEXT, VideoDecodeH264PictureInfoEXT>
+ struct StructExtends<MultiviewPerViewAttributesInfoNVX, GraphicsPipelineCreateInfo>
{
enum
{
@@ -8538,36 +8289,35 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoDecodeH264DpbSlotInfoEXT, VideoReferenceSlotKHR>
+ struct StructExtends<MultiviewPerViewAttributesInfoNVX, RenderingInfo>
{
enum
{
value = true
};
};
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- //=== VK_AMD_texture_gather_bias_lod ===
+ //=== VK_NV_corner_sampled_image ===
template <>
- struct StructExtends<TextureLODGatherFormatPropertiesAMD, ImageFormatProperties2>
+ struct StructExtends<PhysicalDeviceCornerSampledImageFeaturesNV, PhysicalDeviceFeatures2>
{
enum
{
value = true
};
};
-
- //=== VK_KHR_dynamic_rendering ===
template <>
- struct StructExtends<RenderingFragmentShadingRateAttachmentInfoKHR, RenderingInfo>
+ struct StructExtends<PhysicalDeviceCornerSampledImageFeaturesNV, DeviceCreateInfo>
{
enum
{
value = true
};
};
+
+ //=== VK_NV_external_memory ===
template <>
- struct StructExtends<RenderingFragmentDensityMapAttachmentInfoEXT, RenderingInfo>
+ struct StructExtends<ExternalMemoryImageCreateInfoNV, ImageCreateInfo>
{
enum
{
@@ -8575,15 +8325,18 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<AttachmentSampleCountInfoAMD, CommandBufferInheritanceInfo>
+ struct StructExtends<ExportMemoryAllocateInfoNV, MemoryAllocateInfo>
{
enum
{
value = true
};
};
+
+# if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_NV_external_memory_win32 ===
template <>
- struct StructExtends<AttachmentSampleCountInfoAMD, GraphicsPipelineCreateInfo>
+ struct StructExtends<ImportMemoryWin32HandleInfoNV, MemoryAllocateInfo>
{
enum
{
@@ -8591,15 +8344,19 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<MultiviewPerViewAttributesInfoNVX, CommandBufferInheritanceInfo>
+ struct StructExtends<ExportMemoryWin32HandleInfoNV, MemoryAllocateInfo>
{
enum
{
value = true
};
};
+# endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+# if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_NV_win32_keyed_mutex ===
template <>
- struct StructExtends<MultiviewPerViewAttributesInfoNVX, GraphicsPipelineCreateInfo>
+ struct StructExtends<Win32KeyedMutexAcquireReleaseInfoNV, SubmitInfo>
{
enum
{
@@ -8607,35 +8364,36 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<MultiviewPerViewAttributesInfoNVX, RenderingInfo>
+ struct StructExtends<Win32KeyedMutexAcquireReleaseInfoNV, SubmitInfo2>
{
enum
{
value = true
};
};
+# endif /*VK_USE_PLATFORM_WIN32_KHR*/
- //=== VK_NV_corner_sampled_image ===
+ //=== VK_EXT_validation_flags ===
template <>
- struct StructExtends<PhysicalDeviceCornerSampledImageFeaturesNV, PhysicalDeviceFeatures2>
+ struct StructExtends<ValidationFlagsEXT, InstanceCreateInfo>
{
enum
{
value = true
};
};
+
+ //=== VK_EXT_astc_decode_mode ===
template <>
- struct StructExtends<PhysicalDeviceCornerSampledImageFeaturesNV, DeviceCreateInfo>
+ struct StructExtends<ImageViewASTCDecodeModeEXT, ImageViewCreateInfo>
{
enum
{
value = true
};
};
-
- //=== VK_NV_external_memory ===
template <>
- struct StructExtends<ExternalMemoryImageCreateInfoNV, ImageCreateInfo>
+ struct StructExtends<PhysicalDeviceASTCDecodeFeaturesEXT, PhysicalDeviceFeatures2>
{
enum
{
@@ -8643,7 +8401,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<ExportMemoryAllocateInfoNV, MemoryAllocateInfo>
+ struct StructExtends<PhysicalDeviceASTCDecodeFeaturesEXT, DeviceCreateInfo>
{
enum
{
@@ -8651,10 +8409,9 @@ namespace VULKAN_HPP_NAMESPACE
};
};
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
- //=== VK_NV_external_memory_win32 ===
+ //=== VK_EXT_pipeline_robustness ===
template <>
- struct StructExtends<ImportMemoryWin32HandleInfoNV, MemoryAllocateInfo>
+ struct StructExtends<PhysicalDevicePipelineRobustnessFeaturesEXT, PhysicalDeviceFeatures2>
{
enum
{
@@ -8662,19 +8419,15 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<ExportMemoryWin32HandleInfoNV, MemoryAllocateInfo>
+ struct StructExtends<PhysicalDevicePipelineRobustnessFeaturesEXT, DeviceCreateInfo>
{
enum
{
value = true
};
};
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
- //=== VK_NV_win32_keyed_mutex ===
template <>
- struct StructExtends<Win32KeyedMutexAcquireReleaseInfoNV, SubmitInfo>
+ struct StructExtends<PhysicalDevicePipelineRobustnessPropertiesEXT, PhysicalDeviceProperties2>
{
enum
{
@@ -8682,28 +8435,23 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<Win32KeyedMutexAcquireReleaseInfoNV, SubmitInfo2>
+ struct StructExtends<PipelineRobustnessCreateInfoEXT, GraphicsPipelineCreateInfo>
{
enum
{
value = true
};
};
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
-
- //=== VK_EXT_validation_flags ===
template <>
- struct StructExtends<ValidationFlagsEXT, InstanceCreateInfo>
+ struct StructExtends<PipelineRobustnessCreateInfoEXT, ComputePipelineCreateInfo>
{
enum
{
value = true
};
};
-
- //=== VK_EXT_astc_decode_mode ===
template <>
- struct StructExtends<ImageViewASTCDecodeModeEXT, ImageViewCreateInfo>
+ struct StructExtends<PipelineRobustnessCreateInfoEXT, PipelineShaderStageCreateInfo>
{
enum
{
@@ -8711,15 +8459,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<PhysicalDeviceASTCDecodeFeaturesEXT, PhysicalDeviceFeatures2>
- {
- enum
- {
- value = true
- };
- };
- template <>
- struct StructExtends<PhysicalDeviceASTCDecodeFeaturesEXT, DeviceCreateInfo>
+ struct StructExtends<PipelineRobustnessCreateInfoEXT, RayTracingPipelineCreateInfoKHR>
{
enum
{
@@ -8727,7 +8467,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
+# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_memory_win32 ===
template <>
struct StructExtends<ImportMemoryWin32HandleInfoKHR, MemoryAllocateInfo>
@@ -8745,7 +8485,7 @@ namespace VULKAN_HPP_NAMESPACE
value = true
};
};
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_memory_fd ===
template <>
@@ -8757,7 +8497,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
+# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_win32_keyed_mutex ===
template <>
struct StructExtends<Win32KeyedMutexAcquireReleaseInfoKHR, SubmitInfo>
@@ -8775,9 +8515,9 @@ namespace VULKAN_HPP_NAMESPACE
value = true
};
};
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+# endif /*VK_USE_PLATFORM_WIN32_KHR*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
+# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_semaphore_win32 ===
template <>
struct StructExtends<ExportSemaphoreWin32HandleInfoKHR, SemaphoreCreateInfo>
@@ -8795,7 +8535,7 @@ namespace VULKAN_HPP_NAMESPACE
value = true
};
};
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_push_descriptor ===
template <>
@@ -8965,7 +8705,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
+# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_fence_win32 ===
template <>
struct StructExtends<ExportFenceWin32HandleInfoKHR, FenceCreateInfo>
@@ -8975,7 +8715,7 @@ namespace VULKAN_HPP_NAMESPACE
value = true
};
};
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_performance_query ===
template <>
@@ -9036,8 +8776,16 @@ namespace VULKAN_HPP_NAMESPACE
value = true
};
};
+ template <>
+ struct StructExtends<DebugUtilsObjectNameInfoEXT, PipelineShaderStageCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
-#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+# if defined( VK_USE_PLATFORM_ANDROID_KHR )
//=== VK_ANDROID_external_memory_android_hardware_buffer ===
template <>
struct StructExtends<AndroidHardwareBufferUsageANDROID, ImageFormatProperties2>
@@ -9087,7 +8835,7 @@ namespace VULKAN_HPP_NAMESPACE
value = true
};
};
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
//=== VK_EXT_sample_locations ===
template <>
@@ -9296,8 +9044,16 @@ namespace VULKAN_HPP_NAMESPACE
value = true
};
};
+ template <>
+ struct StructExtends<ShaderModuleValidationCacheCreateInfoEXT, PipelineShaderStageCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_portability_subset ===
template <>
struct StructExtends<PhysicalDevicePortabilitySubsetFeaturesKHR, PhysicalDeviceFeatures2>
@@ -9323,7 +9079,7 @@ namespace VULKAN_HPP_NAMESPACE
value = true
};
};
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_NV_shading_rate_image ===
template <>
@@ -9493,42 +9249,10 @@ namespace VULKAN_HPP_NAMESPACE
};
};
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_EXT_video_decode_h265 ===
template <>
- struct StructExtends<VideoDecodeH265ProfileEXT, VideoProfileKHR>
- {
- enum
- {
- value = true
- };
- };
- template <>
- struct StructExtends<VideoDecodeH265ProfileEXT, QueryPoolCreateInfo>
- {
- enum
- {
- value = true
- };
- };
- template <>
- struct StructExtends<VideoDecodeH265ProfileEXT, FormatProperties2>
- {
- enum
- {
- value = true
- };
- };
- template <>
- struct StructExtends<VideoDecodeH265ProfileEXT, ImageCreateInfo>
- {
- enum
- {
- value = true
- };
- };
- template <>
- struct StructExtends<VideoDecodeH265ProfileEXT, ImageViewCreateInfo>
+ struct StructExtends<VideoDecodeH265ProfileInfoEXT, VideoProfileInfoKHR>
{
enum
{
@@ -9536,7 +9260,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoDecodeH265ProfileEXT, BufferCreateInfo>
+ struct StructExtends<VideoDecodeH265ProfileInfoEXT, QueryPoolCreateInfo>
{
enum
{
@@ -9552,14 +9276,6 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoDecodeH265SessionCreateInfoEXT, VideoSessionCreateInfoKHR>
- {
- enum
- {
- value = true
- };
- };
- template <>
struct StructExtends<VideoDecodeH265SessionParametersCreateInfoEXT, VideoSessionParametersCreateInfoKHR>
{
enum
@@ -9584,14 +9300,14 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<VideoDecodeH265DpbSlotInfoEXT, VideoReferenceSlotKHR>
+ struct StructExtends<VideoDecodeH265DpbSlotInfoEXT, VideoReferenceSlotInfoKHR>
{
enum
{
value = true
};
};
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_KHR_global_priority ===
template <>
@@ -9671,7 +9387,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
-#if defined( VK_USE_PLATFORM_GGP )
+# if defined( VK_USE_PLATFORM_GGP )
//=== VK_GGP_frame_token ===
template <>
struct StructExtends<PresentFrameTokenGGP, PresentInfoKHR>
@@ -9681,7 +9397,7 @@ namespace VULKAN_HPP_NAMESPACE
value = true
};
};
-#endif /*VK_USE_PLATFORM_GGP*/
+# endif /*VK_USE_PLATFORM_GGP*/
//=== VK_NV_compute_shader_derivatives ===
template <>
@@ -9727,24 +9443,6 @@ namespace VULKAN_HPP_NAMESPACE
};
};
- //=== VK_NV_fragment_shader_barycentric ===
- template <>
- struct StructExtends<PhysicalDeviceFragmentShaderBarycentricFeaturesNV, PhysicalDeviceFeatures2>
- {
- enum
- {
- value = true
- };
- };
- template <>
- struct StructExtends<PhysicalDeviceFragmentShaderBarycentricFeaturesNV, DeviceCreateInfo>
- {
- enum
- {
- value = true
- };
- };
-
//=== VK_NV_shader_image_footprint ===
template <>
struct StructExtends<PhysicalDeviceShaderImageFootprintFeaturesNV, PhysicalDeviceFeatures2>
@@ -10225,7 +9923,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
+# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_EXT_full_screen_exclusive ===
template <>
struct StructExtends<SurfaceFullScreenExclusiveInfoEXT, PhysicalDeviceSurfaceInfo2KHR>
@@ -10267,7 +9965,7 @@ namespace VULKAN_HPP_NAMESPACE
value = true
};
};
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_EXT_line_rasterization ===
template <>
@@ -10575,6 +10273,50 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+ //=== VK_KHR_pipeline_library ===
+ template <>
+ struct StructExtends<PipelineLibraryCreateInfoKHR, GraphicsPipelineCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_NV_present_barrier ===
+ template <>
+ struct StructExtends<PhysicalDevicePresentBarrierFeaturesNV, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDevicePresentBarrierFeaturesNV, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<SurfaceCapabilitiesPresentBarrierNV, SurfaceCapabilities2KHR>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<SwapchainPresentBarrierCreateInfoNV, SwapchainCreateInfoKHR>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
//=== VK_KHR_present_id ===
template <>
struct StructExtends<PresentIdKHR, PresentInfoKHR>
@@ -10601,9 +10343,33 @@ namespace VULKAN_HPP_NAMESPACE
};
};
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_encode_queue ===
template <>
+ struct StructExtends<VideoEncodeCapabilitiesKHR, VideoCapabilitiesKHR>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<VideoEncodeUsageInfoKHR, VideoProfileInfoKHR>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<VideoEncodeUsageInfoKHR, QueryPoolCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
struct StructExtends<VideoEncodeRateControlInfoKHR, VideoCodingControlInfoKHR>
{
enum
@@ -10619,7 +10385,7 @@ namespace VULKAN_HPP_NAMESPACE
value = true
};
};
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_NV_device_diagnostics_config ===
template <>
@@ -10647,6 +10413,154 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+# if defined( VK_USE_PLATFORM_METAL_EXT )
+ //=== VK_EXT_metal_objects ===
+ template <>
+ struct StructExtends<ExportMetalObjectCreateInfoEXT, InstanceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ExportMetalObjectCreateInfoEXT, MemoryAllocateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ExportMetalObjectCreateInfoEXT, ImageCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ExportMetalObjectCreateInfoEXT, ImageViewCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ExportMetalObjectCreateInfoEXT, BufferViewCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ExportMetalObjectCreateInfoEXT, SemaphoreCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ExportMetalObjectCreateInfoEXT, EventCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ExportMetalDeviceInfoEXT, ExportMetalObjectsInfoEXT>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ExportMetalCommandQueueInfoEXT, ExportMetalObjectsInfoEXT>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ExportMetalBufferInfoEXT, ExportMetalObjectsInfoEXT>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ImportMetalBufferInfoEXT, MemoryAllocateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ExportMetalTextureInfoEXT, ExportMetalObjectsInfoEXT>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ImportMetalTextureInfoEXT, ImageCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ExportMetalIOSurfaceInfoEXT, ExportMetalObjectsInfoEXT>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ImportMetalIOSurfaceInfoEXT, ImageCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ExportMetalSharedEventInfoEXT, ExportMetalObjectsInfoEXT>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ImportMetalSharedEventInfoEXT, SemaphoreCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ImportMetalSharedEventInfoEXT, EventCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+# endif /*VK_USE_PLATFORM_METAL_EXT*/
+
//=== VK_KHR_synchronization2 ===
template <>
struct StructExtends<QueueFamilyCheckpointProperties2NV, QueueFamilyProperties2>
@@ -10657,6 +10571,84 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+ //=== VK_EXT_graphics_pipeline_library ===
+ template <>
+ struct StructExtends<PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT, PhysicalDeviceProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<GraphicsPipelineLibraryCreateInfoEXT, GraphicsPipelineCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_AMD_shader_early_and_late_fragment_tests ===
+ template <>
+ struct StructExtends<PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_KHR_fragment_shader_barycentric ===
+ template <>
+ struct StructExtends<PhysicalDeviceFragmentShaderBarycentricFeaturesKHR, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceFragmentShaderBarycentricFeaturesKHR, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceFragmentShaderBarycentricPropertiesKHR, PhysicalDeviceProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
//=== VK_KHR_shader_subgroup_uniform_control_flow ===
template <>
struct StructExtends<PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR, PhysicalDeviceFeatures2>
@@ -10711,8 +10703,7 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_ray_tracing_motion_blur ===
template <>
- struct StructExtends<AccelerationStructureGeometryMotionTrianglesDataNV,
- AccelerationStructureGeometryTrianglesDataKHR>
+ struct StructExtends<AccelerationStructureGeometryMotionTrianglesDataNV, AccelerationStructureGeometryTrianglesDataKHR>
{
enum
{
@@ -10744,6 +10735,32 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+ //=== VK_EXT_mesh_shader ===
+ template <>
+ struct StructExtends<PhysicalDeviceMeshShaderFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceMeshShaderFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceMeshShaderPropertiesEXT, PhysicalDeviceProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
//=== VK_EXT_ycbcr_2plane_444_formats ===
template <>
struct StructExtends<PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT, PhysicalDeviceFeatures2>
@@ -10824,6 +10841,90 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+ //=== VK_EXT_image_compression_control ===
+ template <>
+ struct StructExtends<PhysicalDeviceImageCompressionControlFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceImageCompressionControlFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ImageCompressionControlEXT, ImageCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ImageCompressionControlEXT, SwapchainCreateInfoKHR>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ImageCompressionControlEXT, PhysicalDeviceImageFormatInfo2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ImageCompressionPropertiesEXT, ImageFormatProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ImageCompressionPropertiesEXT, SurfaceFormat2KHR>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<ImageCompressionPropertiesEXT, SubresourceLayout2EXT>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_EXT_attachment_feedback_loop_layout ===
+ template <>
+ struct StructExtends<PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
//=== VK_EXT_4444_formats ===
template <>
struct StructExtends<PhysicalDevice4444FormatsFeaturesEXT, PhysicalDeviceFeatures2>
@@ -10842,9 +10943,9 @@ namespace VULKAN_HPP_NAMESPACE
};
};
- //=== VK_ARM_rasterization_order_attachment_access ===
+ //=== VK_EXT_device_fault ===
template <>
- struct StructExtends<PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM, PhysicalDeviceFeatures2>
+ struct StructExtends<PhysicalDeviceFaultFeaturesEXT, PhysicalDeviceFeatures2>
{
enum
{
@@ -10852,7 +10953,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM, DeviceCreateInfo>
+ struct StructExtends<PhysicalDeviceFaultFeaturesEXT, DeviceCreateInfo>
{
enum
{
@@ -10922,17 +11023,9 @@ namespace VULKAN_HPP_NAMESPACE
};
};
- //=== VK_VALVE_mutable_descriptor_type ===
- template <>
- struct StructExtends<PhysicalDeviceMutableDescriptorTypeFeaturesVALVE, PhysicalDeviceFeatures2>
- {
- enum
- {
- value = true
- };
- };
+ //=== VK_EXT_vertex_input_dynamic_state ===
template <>
- struct StructExtends<PhysicalDeviceMutableDescriptorTypeFeaturesVALVE, DeviceCreateInfo>
+ struct StructExtends<PhysicalDeviceVertexInputDynamicStateFeaturesEXT, PhysicalDeviceFeatures2>
{
enum
{
@@ -10940,15 +11033,17 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<MutableDescriptorTypeCreateInfoVALVE, DescriptorSetLayoutCreateInfo>
+ struct StructExtends<PhysicalDeviceVertexInputDynamicStateFeaturesEXT, DeviceCreateInfo>
{
enum
{
value = true
};
};
+
+ //=== VK_EXT_physical_device_drm ===
template <>
- struct StructExtends<MutableDescriptorTypeCreateInfoVALVE, DescriptorPoolCreateInfo>
+ struct StructExtends<PhysicalDeviceDrmPropertiesEXT, PhysicalDeviceProperties2>
{
enum
{
@@ -10956,9 +11051,9 @@ namespace VULKAN_HPP_NAMESPACE
};
};
- //=== VK_EXT_vertex_input_dynamic_state ===
+ //=== VK_EXT_device_address_binding_report ===
template <>
- struct StructExtends<PhysicalDeviceVertexInputDynamicStateFeaturesEXT, PhysicalDeviceFeatures2>
+ struct StructExtends<PhysicalDeviceAddressBindingReportFeaturesEXT, PhysicalDeviceFeatures2>
{
enum
{
@@ -10966,17 +11061,15 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
- struct StructExtends<PhysicalDeviceVertexInputDynamicStateFeaturesEXT, DeviceCreateInfo>
+ struct StructExtends<PhysicalDeviceAddressBindingReportFeaturesEXT, DeviceCreateInfo>
{
enum
{
value = true
};
};
-
- //=== VK_EXT_physical_device_drm ===
template <>
- struct StructExtends<PhysicalDeviceDrmPropertiesEXT, PhysicalDeviceProperties2>
+ struct StructExtends<DeviceAddressBindingCallbackDataEXT, DebugUtilsMessengerCallbackDataEXT>
{
enum
{
@@ -11028,7 +11121,7 @@ namespace VULKAN_HPP_NAMESPACE
};
};
-#if defined( VK_USE_PLATFORM_FUCHSIA )
+# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_memory ===
template <>
struct StructExtends<ImportMemoryZirconHandleInfoFUCHSIA, MemoryAllocateInfo>
@@ -11038,9 +11131,9 @@ namespace VULKAN_HPP_NAMESPACE
value = true
};
};
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
+# endif /*VK_USE_PLATFORM_FUCHSIA*/
-#if defined( VK_USE_PLATFORM_FUCHSIA )
+# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
template <>
struct StructExtends<ImportMemoryBufferCollectionFUCHSIA, MemoryAllocateInfo>
@@ -11066,7 +11159,7 @@ namespace VULKAN_HPP_NAMESPACE
value = true
};
};
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
+# endif /*VK_USE_PLATFORM_FUCHSIA*/
//=== VK_HUAWEI_subpass_shading ===
template <>
@@ -11138,6 +11231,66 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+ //=== VK_EXT_pipeline_properties ===
+ template <>
+ struct StructExtends<PhysicalDevicePipelinePropertiesFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDevicePipelinePropertiesFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_EXT_multisampled_render_to_single_sampled ===
+ template <>
+ struct StructExtends<PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<SubpassResolvePerformanceQueryEXT, FormatProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<MultisampledRenderToSingleSampledInfoEXT, SubpassDescription2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<MultisampledRenderToSingleSampledInfoEXT, RenderingInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
//=== VK_EXT_extended_dynamic_state2 ===
template <>
struct StructExtends<PhysicalDeviceExtendedDynamicState2FeaturesEXT, PhysicalDeviceFeatures2>
@@ -11182,6 +11335,42 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+ //=== VK_EXT_primitives_generated_query ===
+ template <>
+ struct StructExtends<PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_KHR_ray_tracing_maintenance1 ===
+ template <>
+ struct StructExtends<PhysicalDeviceRayTracingMaintenance1FeaturesKHR, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceRayTracingMaintenance1FeaturesKHR, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
//=== VK_EXT_image_view_min_lod ===
template <>
struct StructExtends<PhysicalDeviceImageViewMinLodFeaturesEXT, PhysicalDeviceFeatures2>
@@ -11234,6 +11423,58 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+ //=== VK_EXT_image_2d_view_of_3d ===
+ template <>
+ struct StructExtends<PhysicalDeviceImage2DViewOf3DFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceImage2DViewOf3DFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_EXT_opacity_micromap ===
+ template <>
+ struct StructExtends<PhysicalDeviceOpacityMicromapFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceOpacityMicromapFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceOpacityMicromapPropertiesEXT, PhysicalDeviceProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<AccelerationStructureTrianglesOpacityMicromapEXT, AccelerationStructureGeometryTrianglesDataKHR>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
//=== VK_EXT_border_color_swizzle ===
template <>
struct StructExtends<PhysicalDeviceBorderColorSwizzleFeaturesEXT, PhysicalDeviceFeatures2>
@@ -11278,6 +11519,60 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+ //=== VK_VALVE_descriptor_set_host_mapping ===
+ template <>
+ struct StructExtends<PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_EXT_depth_clamp_zero_one ===
+ template <>
+ struct StructExtends<PhysicalDeviceDepthClampZeroOneFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceDepthClampZeroOneFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_EXT_non_seamless_cube_map ===
+ template <>
+ struct StructExtends<PhysicalDeviceNonSeamlessCubeMapFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceNonSeamlessCubeMapFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
//=== VK_QCOM_fragment_density_map_offset ===
template <>
struct StructExtends<PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM, PhysicalDeviceFeatures2>
@@ -11330,6 +11625,378 @@ namespace VULKAN_HPP_NAMESPACE
};
};
+ //=== VK_EXT_image_compression_control_swapchain ===
+ template <>
+ struct StructExtends<PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_QCOM_image_processing ===
+ template <>
+ struct StructExtends<ImageViewSampleWeightCreateInfoQCOM, ImageViewCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceImageProcessingFeaturesQCOM, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceImageProcessingFeaturesQCOM, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceImageProcessingPropertiesQCOM, PhysicalDeviceProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_EXT_extended_dynamic_state3 ===
+ template <>
+ struct StructExtends<PhysicalDeviceExtendedDynamicState3FeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceExtendedDynamicState3FeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceExtendedDynamicState3PropertiesEXT, PhysicalDeviceProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_EXT_subpass_merge_feedback ===
+ template <>
+ struct StructExtends<PhysicalDeviceSubpassMergeFeedbackFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceSubpassMergeFeedbackFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<RenderPassCreationControlEXT, RenderPassCreateInfo2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<RenderPassCreationControlEXT, SubpassDescription2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<RenderPassCreationFeedbackCreateInfoEXT, RenderPassCreateInfo2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<RenderPassSubpassFeedbackCreateInfoEXT, SubpassDescription2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_EXT_shader_module_identifier ===
+ template <>
+ struct StructExtends<PhysicalDeviceShaderModuleIdentifierFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceShaderModuleIdentifierFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceShaderModuleIdentifierPropertiesEXT, PhysicalDeviceProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PipelineShaderStageModuleIdentifierCreateInfoEXT, PipelineShaderStageCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_EXT_rasterization_order_attachment_access ===
+ template <>
+ struct StructExtends<PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_NV_optical_flow ===
+ template <>
+ struct StructExtends<PhysicalDeviceOpticalFlowFeaturesNV, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceOpticalFlowFeaturesNV, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceOpticalFlowPropertiesNV, PhysicalDeviceProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<OpticalFlowImageFormatInfoNV, PhysicalDeviceImageFormatInfo2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<OpticalFlowImageFormatInfoNV, ImageCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<OpticalFlowSessionCreatePrivateDataInfoNV, OpticalFlowSessionCreateInfoNV>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_EXT_legacy_dithering ===
+ template <>
+ struct StructExtends<PhysicalDeviceLegacyDitheringFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceLegacyDitheringFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_EXT_pipeline_protected_access ===
+ template <>
+ struct StructExtends<PhysicalDevicePipelineProtectedAccessFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDevicePipelineProtectedAccessFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_QCOM_tile_properties ===
+ template <>
+ struct StructExtends<PhysicalDeviceTilePropertiesFeaturesQCOM, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceTilePropertiesFeaturesQCOM, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_SEC_amigo_profiling ===
+ template <>
+ struct StructExtends<PhysicalDeviceAmigoProfilingFeaturesSEC, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceAmigoProfilingFeaturesSEC, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<AmigoProfilingSubmitInfoSEC, SubmitInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_EXT_mutable_descriptor_type ===
+ template <>
+ struct StructExtends<PhysicalDeviceMutableDescriptorTypeFeaturesEXT, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceMutableDescriptorTypeFeaturesEXT, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<MutableDescriptorTypeCreateInfoEXT, DescriptorSetLayoutCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<MutableDescriptorTypeCreateInfoEXT, DescriptorPoolCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+ //=== VK_ARM_shader_core_builtins ===
+ template <>
+ struct StructExtends<PhysicalDeviceShaderCoreBuiltinsFeaturesARM, PhysicalDeviceFeatures2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceShaderCoreBuiltinsFeaturesARM, DeviceCreateInfo>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+ template <>
+ struct StructExtends<PhysicalDeviceShaderCoreBuiltinsPropertiesARM, PhysicalDeviceProperties2>
+ {
+ enum
+ {
+ value = true
+ };
+ };
+
+#endif // VULKAN_HPP_DISABLE_ENHANCED_MODE
+
#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
class DynamicLoader
{
@@ -11370,8 +12037,7 @@ namespace VULKAN_HPP_NAMESPACE
# ifndef VULKAN_HPP_NO_EXCEPTIONS
if ( m_library == nullptr )
{
- // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the
- // scope of this function.
+ // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the scope of this function.
throw std::runtime_error( "Failed to load vulkan library!" );
}
# endif
@@ -11962,12 +12628,10 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0;
//=== VK_KHR_performance_query ===
- PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR
- vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0;
- PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR
- vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0;
- PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0;
- PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0;
+ PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0;
+ PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0;
+ PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0;
+ PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0;
//=== VK_KHR_get_surface_capabilities2 ===
PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0;
@@ -12157,8 +12821,7 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0;
//=== VK_NV_coverage_reduction_mode ===
- PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV
- vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0;
+ PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0;
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_EXT_full_screen_exclusive ===
@@ -12238,6 +12901,13 @@ namespace VULKAN_HPP_NAMESPACE
PFN_dummy vkCmdEncodeVideoKHR_placeholder = 0;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ //=== VK_EXT_metal_objects ===
+ PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0;
+#else
+ PFN_dummy vkExportMetalObjectsEXT_placeholder = 0;
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
//=== VK_KHR_synchronization2 ===
PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0;
PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0;
@@ -12251,6 +12921,11 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_fragment_shading_rate_enums ===
PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0;
+ //=== VK_EXT_mesh_shader ===
+ PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0;
+ PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0;
+ PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0;
+
//=== VK_KHR_copy_commands2 ===
PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0;
PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0;
@@ -12259,6 +12934,12 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0;
PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0;
+ //=== VK_EXT_image_compression_control ===
+ PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0;
+
+ //=== VK_EXT_device_fault ===
+ PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0;
+
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_acquire_winrt_display ===
PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0;
@@ -12332,6 +13013,9 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_external_memory_rdma ===
PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0;
+ //=== VK_EXT_pipeline_properties ===
+ PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0;
+
//=== VK_EXT_extended_dynamic_state2 ===
PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0;
PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0;
@@ -12351,10 +13035,29 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_color_write_enable ===
PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0;
+ //=== VK_KHR_ray_tracing_maintenance1 ===
+ PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0;
+
//=== VK_EXT_multi_draw ===
PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0;
PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0;
+ //=== VK_EXT_opacity_micromap ===
+ PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0;
+ PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0;
+ PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0;
+ PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0;
+ PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0;
+ PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0;
+ PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0;
+ PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0;
+ PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0;
+ PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0;
+ PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0;
+ PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0;
+ PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0;
+ PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0;
+
//=== VK_EXT_pageable_device_local_memory ===
PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0;
@@ -12363,37 +13066,79 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0;
PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0;
+ //=== VK_VALVE_descriptor_set_host_mapping ===
+ PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0;
+ PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0;
+
+ //=== VK_EXT_extended_dynamic_state3 ===
+ PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0;
+ PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0;
+ PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0;
+ PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0;
+ PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0;
+ PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0;
+ PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0;
+ PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0;
+ PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0;
+ PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0;
+ PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0;
+ PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0;
+ PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0;
+ PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0;
+ PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0;
+ PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0;
+ PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0;
+ PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0;
+ PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0;
+ PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0;
+ PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0;
+ PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0;
+ PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0;
+ PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0;
+ PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0;
+ PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0;
+ PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0;
+ PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0;
+ PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0;
+ PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0;
+ PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0;
+
+ //=== VK_EXT_shader_module_identifier ===
+ PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0;
+ PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0;
+
+ //=== VK_NV_optical_flow ===
+ PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0;
+ PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0;
+ PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0;
+ PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0;
+ PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0;
+
+ //=== VK_QCOM_tile_properties ===
+ PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0;
+ PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0;
+
public:
DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default;
DispatchLoaderDynamic( DispatchLoaderDynamic const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#if !defined( VK_NO_PROTOTYPES )
- // This interface is designed to be used for per-device function pointers in combination with a linked vulkan
- // library.
+ // This interface is designed to be used for per-device function pointers in combination with a linked vulkan library.
template <typename DynamicLoader>
- void init( VULKAN_HPP_NAMESPACE::Instance const & instance,
- VULKAN_HPP_NAMESPACE::Device const & device,
- DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT
- {
- PFN_vkGetInstanceProcAddr getInstanceProcAddr =
- dl.template getProcAddress<PFN_vkGetInstanceProcAddr>( "vkGetInstanceProcAddr" );
- PFN_vkGetDeviceProcAddr getDeviceProcAddr =
- dl.template getProcAddress<PFN_vkGetDeviceProcAddr>( "vkGetDeviceProcAddr" );
- init( static_cast<VkInstance>( instance ),
- getInstanceProcAddr,
- static_cast<VkDevice>( device ),
- device ? getDeviceProcAddr : nullptr );
- }
-
- // This interface is designed to be used for per-device function pointers in combination with a linked vulkan
- // library.
+ void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device, DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT
+ {
+ PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress<PFN_vkGetInstanceProcAddr>( "vkGetInstanceProcAddr" );
+ PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.template getProcAddress<PFN_vkGetDeviceProcAddr>( "vkGetDeviceProcAddr" );
+ init( static_cast<VkInstance>( instance ), getInstanceProcAddr, static_cast<VkDevice>( device ), device ? getDeviceProcAddr : nullptr );
+ }
+
+ // This interface is designed to be used for per-device function pointers in combination with a linked vulkan library.
template <typename DynamicLoader
# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
= VULKAN_HPP_NAMESPACE::DynamicLoader
# endif
>
- void init( VULKAN_HPP_NAMESPACE::Instance const & instance,
- VULKAN_HPP_NAMESPACE::Device const & device ) VULKAN_HPP_NOEXCEPT
+ void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device ) VULKAN_HPP_NOEXCEPT
{
static DynamicLoader dl;
init( instance, device, dl );
@@ -12413,14 +13158,12 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_VERSION_1_0 ===
vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) );
- vkEnumerateInstanceExtensionProperties = PFN_vkEnumerateInstanceExtensionProperties(
- vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) );
- vkEnumerateInstanceLayerProperties =
- PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) );
+ vkEnumerateInstanceExtensionProperties =
+ PFN_vkEnumerateInstanceExtensionProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) );
+ vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) );
//=== VK_VERSION_1_1 ===
- vkEnumerateInstanceVersion =
- PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) );
+ vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) );
}
// This interface does not require a linked vulkan library.
@@ -12452,442 +13195,339 @@ namespace VULKAN_HPP_NAMESPACE
VkInstance instance = static_cast<VkInstance>( instanceCpp );
//=== VK_VERSION_1_0 ===
- vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) );
- vkEnumeratePhysicalDevices =
- PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) );
- vkGetPhysicalDeviceFeatures =
- PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) );
- vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) );
- vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) );
- vkGetPhysicalDeviceProperties =
- PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) );
- vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) );
- vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) );
- vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) );
- vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) );
- vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) );
- vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties(
- vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) );
- vkEnumerateDeviceLayerProperties =
- PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) );
- vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) );
- vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) );
- vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) );
- vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) );
- vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) );
- vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) );
- vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) );
- vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) );
- vkFlushMappedMemoryRanges =
- PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) );
- vkInvalidateMappedMemoryRanges =
- PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) );
- vkGetDeviceMemoryCommitment =
- PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) );
- vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) );
- vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) );
- vkGetBufferMemoryRequirements =
- PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) );
- vkGetImageMemoryRequirements =
- PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) );
- vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements(
- vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) );
- vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) );
- vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) );
- vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) );
- vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) );
- vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) );
- vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) );
- vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) );
- vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) );
- vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) );
- vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) );
- vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) );
- vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) );
- vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) );
- vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) );
- vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) );
- vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) );
- vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) );
- vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) );
- vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) );
- vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) );
- vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) );
- vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) );
- vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) );
- vkGetImageSubresourceLayout =
- PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) );
- vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) );
- vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) );
- vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) );
- vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) );
- vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) );
- vkDestroyPipelineCache =
- PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) );
- vkGetPipelineCacheData =
- PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) );
- vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) );
- vkCreateGraphicsPipelines =
- PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) );
- vkCreateComputePipelines =
- PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) );
- vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) );
- vkCreatePipelineLayout =
- PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) );
- vkDestroyPipelineLayout =
- PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) );
- vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) );
- vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) );
- vkCreateDescriptorSetLayout =
- PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) );
- vkDestroyDescriptorSetLayout =
- PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) );
- vkCreateDescriptorPool =
- PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) );
- vkDestroyDescriptorPool =
- PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) );
- vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) );
- vkAllocateDescriptorSets =
- PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) );
- vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) );
- vkUpdateDescriptorSets =
- PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) );
- vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) );
- vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) );
- vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) );
- vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) );
- vkGetRenderAreaGranularity =
- PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) );
- vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) );
- vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) );
- vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) );
- vkAllocateCommandBuffers =
- PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) );
- vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) );
- vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) );
- vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) );
- vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) );
- vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) );
- vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) );
- vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) );
- vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) );
- vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) );
- vkCmdSetBlendConstants =
- PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) );
- vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) );
- vkCmdSetStencilCompareMask =
- PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) );
- vkCmdSetStencilWriteMask =
- PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) );
- vkCmdSetStencilReference =
- PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) );
- vkCmdBindDescriptorSets =
- PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) );
- vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) );
- vkCmdBindVertexBuffers =
- PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) );
- vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) );
- vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) );
- vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) );
- vkCmdDrawIndexedIndirect =
- PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) );
- vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) );
- vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) );
- vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) );
- vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) );
- vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) );
- vkCmdCopyBufferToImage =
- PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) );
- vkCmdCopyImageToBuffer =
- PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) );
- vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) );
- vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) );
- vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) );
- vkCmdClearDepthStencilImage =
- PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) );
- vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) );
- vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) );
- vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) );
- vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) );
- vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) );
- vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) );
- vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) );
- vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) );
- vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) );
- vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) );
- vkCmdCopyQueryPoolResults =
- PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) );
- vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) );
- vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) );
- vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) );
- vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) );
- vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) );
+ vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) );
+ vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) );
+ vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) );
+ vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) );
+ vkGetPhysicalDeviceImageFormatProperties =
+ PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) );
+ vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) );
+ vkGetPhysicalDeviceQueueFamilyProperties =
+ PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) );
+ vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) );
+ vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) );
+ vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) );
+ vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) );
+ vkEnumerateDeviceExtensionProperties =
+ PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) );
+ vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) );
+ vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) );
+ vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) );
+ vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) );
+ vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) );
+ vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) );
+ vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) );
+ vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) );
+ vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) );
+ vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) );
+ vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) );
+ vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) );
+ vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) );
+ vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) );
+ vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) );
+ vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) );
+ vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) );
+ vkGetPhysicalDeviceSparseImageFormatProperties =
+ PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) );
+ vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) );
+ vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) );
+ vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) );
+ vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) );
+ vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) );
+ vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) );
+ vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) );
+ vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) );
+ vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) );
+ vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) );
+ vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) );
+ vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) );
+ vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) );
+ vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) );
+ vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) );
+ vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) );
+ vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) );
+ vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) );
+ vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) );
+ vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) );
+ vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) );
+ vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) );
+ vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) );
+ vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) );
+ vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) );
+ vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) );
+ vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) );
+ vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) );
+ vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) );
+ vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) );
+ vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) );
+ vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) );
+ vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) );
+ vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) );
+ vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) );
+ vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) );
+ vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) );
+ vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) );
+ vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) );
+ vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) );
+ vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) );
+ vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) );
+ vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) );
+ vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) );
+ vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) );
+ vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) );
+ vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) );
+ vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) );
+ vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) );
+ vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) );
+ vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) );
+ vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) );
+ vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) );
+ vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) );
+ vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) );
+ vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) );
+ vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) );
+ vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) );
+ vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) );
+ vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) );
+ vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) );
+ vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) );
+ vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) );
+ vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) );
+ vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) );
+ vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) );
+ vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) );
+ vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) );
+ vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) );
+ vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) );
+ vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) );
+ vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) );
+ vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) );
+ vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) );
+ vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) );
+ vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) );
+ vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) );
+ vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) );
+ vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) );
+ vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) );
+ vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) );
+ vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) );
+ vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) );
+ vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) );
+ vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) );
+ vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) );
+ vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) );
+ vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) );
+ vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) );
+ vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) );
+ vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) );
+ vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) );
+ vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) );
+ vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) );
+ vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) );
+ vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) );
+ vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) );
+ vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) );
+ vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) );
+ vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) );
+ vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) );
+ vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) );
+ vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) );
//=== VK_VERSION_1_1 ===
- vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) );
- vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) );
- vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures(
- vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) );
- vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) );
- vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) );
- vkEnumeratePhysicalDeviceGroups =
- PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) );
- vkGetImageMemoryRequirements2 =
- PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) );
- vkGetBufferMemoryRequirements2 =
- PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) );
- vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2(
- vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) );
- vkGetPhysicalDeviceFeatures2 =
- PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) );
- vkGetPhysicalDeviceProperties2 =
- PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) );
- vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) );
- vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) );
- vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) );
- vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) );
- vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) );
- vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) );
- vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) );
- vkCreateSamplerYcbcrConversion =
- PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) );
- vkDestroySamplerYcbcrConversion =
- PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) );
- vkCreateDescriptorUpdateTemplate =
- PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) );
- vkDestroyDescriptorUpdateTemplate =
- PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) );
- vkUpdateDescriptorSetWithTemplate =
- PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) );
- vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) );
- vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) );
- vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) );
- vkGetDescriptorSetLayoutSupport =
- PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) );
+ vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) );
+ vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) );
+ vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) );
+ vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) );
+ vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) );
+ vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) );
+ vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) );
+ vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) );
+ vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) );
+ vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) );
+ vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) );
+ vkGetPhysicalDeviceFormatProperties2 =
+ PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) );
+ vkGetPhysicalDeviceImageFormatProperties2 =
+ PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) );
+ vkGetPhysicalDeviceQueueFamilyProperties2 =
+ PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) );
+ vkGetPhysicalDeviceMemoryProperties2 =
+ PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) );
+ vkGetPhysicalDeviceSparseImageFormatProperties2 =
+ PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) );
+ vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) );
+ vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) );
+ vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) );
+ vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) );
+ vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) );
+ vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) );
+ vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) );
+ vkGetPhysicalDeviceExternalBufferProperties =
+ PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) );
+ vkGetPhysicalDeviceExternalFenceProperties =
+ PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) );
+ vkGetPhysicalDeviceExternalSemaphoreProperties =
+ PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) );
+ vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) );
//=== VK_VERSION_1_2 ===
- vkCmdDrawIndirectCount =
- PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) );
- vkCmdDrawIndexedIndirectCount =
- PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) );
- vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) );
- vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) );
- vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) );
- vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) );
- vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) );
- vkGetSemaphoreCounterValue =
- PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) );
- vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) );
- vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) );
- vkGetBufferDeviceAddress =
- PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) );
- vkGetBufferOpaqueCaptureAddress =
- PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) );
- vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress(
- vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
+ vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) );
+ vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) );
+ vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) );
+ vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) );
+ vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) );
+ vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) );
+ vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) );
+ vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) );
+ vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) );
+ vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) );
+ vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) );
+ vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) );
+ vkGetDeviceMemoryOpaqueCaptureAddress =
+ PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
//=== VK_VERSION_1_3 ===
- vkGetPhysicalDeviceToolProperties =
- PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) );
- vkCreatePrivateDataSlot =
- PFN_vkCreatePrivateDataSlot( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlot" ) );
- vkDestroyPrivateDataSlot =
- PFN_vkDestroyPrivateDataSlot( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlot" ) );
- vkSetPrivateData = PFN_vkSetPrivateData( vkGetInstanceProcAddr( instance, "vkSetPrivateData" ) );
- vkGetPrivateData = PFN_vkGetPrivateData( vkGetInstanceProcAddr( instance, "vkGetPrivateData" ) );
- vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2" ) );
- vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2" ) );
- vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2" ) );
- vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2" ) );
- vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2" ) );
- vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetInstanceProcAddr( instance, "vkQueueSubmit2" ) );
- vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2" ) );
- vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2" ) );
- vkCmdCopyBufferToImage2 =
- PFN_vkCmdCopyBufferToImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2" ) );
- vkCmdCopyImageToBuffer2 =
- PFN_vkCmdCopyImageToBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2" ) );
- vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2" ) );
- vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2" ) );
- vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetInstanceProcAddr( instance, "vkCmdBeginRendering" ) );
- vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetInstanceProcAddr( instance, "vkCmdEndRendering" ) );
- vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetInstanceProcAddr( instance, "vkCmdSetCullMode" ) );
- vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFace" ) );
- vkCmdSetPrimitiveTopology =
- PFN_vkCmdSetPrimitiveTopology( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopology" ) );
- vkCmdSetViewportWithCount =
- PFN_vkCmdSetViewportWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCount" ) );
- vkCmdSetScissorWithCount =
- PFN_vkCmdSetScissorWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCount" ) );
- vkCmdBindVertexBuffers2 =
- PFN_vkCmdBindVertexBuffers2( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2" ) );
- vkCmdSetDepthTestEnable =
- PFN_vkCmdSetDepthTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnable" ) );
- vkCmdSetDepthWriteEnable =
- PFN_vkCmdSetDepthWriteEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnable" ) );
- vkCmdSetDepthCompareOp =
- PFN_vkCmdSetDepthCompareOp( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOp" ) );
- vkCmdSetDepthBoundsTestEnable =
- PFN_vkCmdSetDepthBoundsTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnable" ) );
- vkCmdSetStencilTestEnable =
- PFN_vkCmdSetStencilTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnable" ) );
- vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOp" ) );
- vkCmdSetRasterizerDiscardEnable =
- PFN_vkCmdSetRasterizerDiscardEnable( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnable" ) );
- vkCmdSetDepthBiasEnable =
- PFN_vkCmdSetDepthBiasEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnable" ) );
- vkCmdSetPrimitiveRestartEnable =
- PFN_vkCmdSetPrimitiveRestartEnable( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnable" ) );
- vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements(
- vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirements" ) );
- vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements(
- vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirements" ) );
- vkGetDeviceImageSparseMemoryRequirements = PFN_vkGetDeviceImageSparseMemoryRequirements(
- vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirements" ) );
+ vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) );
+ vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlot" ) );
+ vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlot" ) );
+ vkSetPrivateData = PFN_vkSetPrivateData( vkGetInstanceProcAddr( instance, "vkSetPrivateData" ) );
+ vkGetPrivateData = PFN_vkGetPrivateData( vkGetInstanceProcAddr( instance, "vkGetPrivateData" ) );
+ vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2" ) );
+ vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2" ) );
+ vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2" ) );
+ vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2" ) );
+ vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2" ) );
+ vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetInstanceProcAddr( instance, "vkQueueSubmit2" ) );
+ vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2" ) );
+ vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2" ) );
+ vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2" ) );
+ vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2" ) );
+ vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2" ) );
+ vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2" ) );
+ vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetInstanceProcAddr( instance, "vkCmdBeginRendering" ) );
+ vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetInstanceProcAddr( instance, "vkCmdEndRendering" ) );
+ vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetInstanceProcAddr( instance, "vkCmdSetCullMode" ) );
+ vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFace" ) );
+ vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopology" ) );
+ vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCount" ) );
+ vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCount" ) );
+ vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2" ) );
+ vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnable" ) );
+ vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnable" ) );
+ vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOp" ) );
+ vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnable" ) );
+ vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnable" ) );
+ vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOp" ) );
+ vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnable" ) );
+ vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnable" ) );
+ vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnable" ) );
+ vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirements" ) );
+ vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirements" ) );
+ vkGetDeviceImageSparseMemoryRequirements =
+ PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirements" ) );
//=== VK_KHR_surface ===
vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) );
- vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) );
- vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) );
- vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) );
- vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) );
+ vkGetPhysicalDeviceSurfaceSupportKHR =
+ PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) );
+ vkGetPhysicalDeviceSurfaceCapabilitiesKHR =
+ PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) );
+ vkGetPhysicalDeviceSurfaceFormatsKHR =
+ PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) );
+ vkGetPhysicalDeviceSurfacePresentModesKHR =
+ PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) );
//=== VK_KHR_swapchain ===
- vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) );
- vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) );
- vkGetSwapchainImagesKHR =
- PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) );
- vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) );
- vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) );
- vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR(
- vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
- vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR(
- vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
- vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) );
- vkAcquireNextImage2KHR =
- PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) );
+ vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) );
+ vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) );
+ vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) );
+ vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) );
+ vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) );
+ vkGetDeviceGroupPresentCapabilitiesKHR =
+ PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
+ vkGetDeviceGroupSurfacePresentModesKHR =
+ PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
+ vkGetPhysicalDevicePresentRectanglesKHR =
+ PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) );
+ vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) );
//=== VK_KHR_display ===
- vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) );
- vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) );
- vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR(
- vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) );
- vkGetDisplayModePropertiesKHR =
- PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) );
- vkCreateDisplayModeKHR =
- PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) );
- vkGetDisplayPlaneCapabilitiesKHR =
- PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) );
- vkCreateDisplayPlaneSurfaceKHR =
- PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) );
+ vkGetPhysicalDeviceDisplayPropertiesKHR =
+ PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) );
+ vkGetPhysicalDeviceDisplayPlanePropertiesKHR =
+ PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) );
+ vkGetDisplayPlaneSupportedDisplaysKHR =
+ PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) );
+ vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) );
+ vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) );
+ vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) );
+ vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) );
//=== VK_KHR_display_swapchain ===
- vkCreateSharedSwapchainsKHR =
- PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) );
+ vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) );
#if defined( VK_USE_PLATFORM_XLIB_KHR )
//=== VK_KHR_xlib_surface ===
- vkCreateXlibSurfaceKHR =
- PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) );
- vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) );
+ vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) );
+ vkGetPhysicalDeviceXlibPresentationSupportKHR =
+ PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) );
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
#if defined( VK_USE_PLATFORM_XCB_KHR )
//=== VK_KHR_xcb_surface ===
vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) );
- vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) );
+ vkGetPhysicalDeviceXcbPresentationSupportKHR =
+ PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) );
#endif /*VK_USE_PLATFORM_XCB_KHR*/
#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
//=== VK_KHR_wayland_surface ===
- vkCreateWaylandSurfaceKHR =
- PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) );
- vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) );
+ vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) );
+ vkGetPhysicalDeviceWaylandPresentationSupportKHR =
+ PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) );
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
//=== VK_KHR_android_surface ===
- vkCreateAndroidSurfaceKHR =
- PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) );
+ vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_win32_surface ===
- vkCreateWin32SurfaceKHR =
- PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) );
- vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) );
+ vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) );
+ vkGetPhysicalDeviceWin32PresentationSupportKHR =
+ PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_EXT_debug_report ===
- vkCreateDebugReportCallbackEXT =
- PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) );
- vkDestroyDebugReportCallbackEXT =
- PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) );
- vkDebugReportMessageEXT =
- PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) );
+ vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) );
+ vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) );
+ vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) );
//=== VK_EXT_debug_marker ===
- vkDebugMarkerSetObjectTagEXT =
- PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) );
- vkDebugMarkerSetObjectNameEXT =
- PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) );
- vkCmdDebugMarkerBeginEXT =
- PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) );
- vkCmdDebugMarkerEndEXT =
- PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) );
- vkCmdDebugMarkerInsertEXT =
- PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) );
+ vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) );
+ vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) );
+ vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) );
+ vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) );
+ vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_queue ===
- vkGetPhysicalDeviceVideoCapabilitiesKHR = PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) );
- vkGetPhysicalDeviceVideoFormatPropertiesKHR = PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) );
- vkCreateVideoSessionKHR =
- PFN_vkCreateVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionKHR" ) );
- vkDestroyVideoSessionKHR =
- PFN_vkDestroyVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionKHR" ) );
- vkGetVideoSessionMemoryRequirementsKHR = PFN_vkGetVideoSessionMemoryRequirementsKHR(
- vkGetInstanceProcAddr( instance, "vkGetVideoSessionMemoryRequirementsKHR" ) );
- vkBindVideoSessionMemoryKHR =
- PFN_vkBindVideoSessionMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindVideoSessionMemoryKHR" ) );
- vkCreateVideoSessionParametersKHR =
- PFN_vkCreateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionParametersKHR" ) );
- vkUpdateVideoSessionParametersKHR =
- PFN_vkUpdateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkUpdateVideoSessionParametersKHR" ) );
- vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR(
- vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionParametersKHR" ) );
- vkCmdBeginVideoCodingKHR =
- PFN_vkCmdBeginVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginVideoCodingKHR" ) );
- vkCmdEndVideoCodingKHR =
- PFN_vkCmdEndVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndVideoCodingKHR" ) );
- vkCmdControlVideoCodingKHR =
- PFN_vkCmdControlVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdControlVideoCodingKHR" ) );
+ vkGetPhysicalDeviceVideoCapabilitiesKHR =
+ PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) );
+ vkGetPhysicalDeviceVideoFormatPropertiesKHR =
+ PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) );
+ vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionKHR" ) );
+ vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionKHR" ) );
+ vkGetVideoSessionMemoryRequirementsKHR =
+ PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetVideoSessionMemoryRequirementsKHR" ) );
+ vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindVideoSessionMemoryKHR" ) );
+ vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionParametersKHR" ) );
+ vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkUpdateVideoSessionParametersKHR" ) );
+ vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionParametersKHR" ) );
+ vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginVideoCodingKHR" ) );
+ vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndVideoCodingKHR" ) );
+ vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdControlVideoCodingKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
@@ -12896,41 +13536,30 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_EXT_transform_feedback ===
- vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT(
- vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) );
- vkCmdBeginTransformFeedbackEXT =
- PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) );
- vkCmdEndTransformFeedbackEXT =
- PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) );
- vkCmdBeginQueryIndexedEXT =
- PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) );
- vkCmdEndQueryIndexedEXT =
- PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) );
- vkCmdDrawIndirectByteCountEXT =
- PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) );
+ vkCmdBindTransformFeedbackBuffersEXT =
+ PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) );
+ vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) );
+ vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) );
+ vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) );
+ vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) );
+ vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) );
//=== VK_NVX_binary_import ===
- vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetInstanceProcAddr( instance, "vkCreateCuModuleNVX" ) );
- vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkCreateCuFunctionNVX" ) );
- vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuModuleNVX" ) );
- vkDestroyCuFunctionNVX =
- PFN_vkDestroyCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuFunctionNVX" ) );
- vkCmdCuLaunchKernelNVX =
- PFN_vkCmdCuLaunchKernelNVX( vkGetInstanceProcAddr( instance, "vkCmdCuLaunchKernelNVX" ) );
+ vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetInstanceProcAddr( instance, "vkCreateCuModuleNVX" ) );
+ vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkCreateCuFunctionNVX" ) );
+ vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuModuleNVX" ) );
+ vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuFunctionNVX" ) );
+ vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetInstanceProcAddr( instance, "vkCmdCuLaunchKernelNVX" ) );
//=== VK_NVX_image_view_handle ===
- vkGetImageViewHandleNVX =
- PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) );
- vkGetImageViewAddressNVX =
- PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) );
+ vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) );
+ vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) );
//=== VK_AMD_draw_indirect_count ===
- vkCmdDrawIndirectCountAMD =
- PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) );
+ vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) );
if ( !vkCmdDrawIndirectCount )
vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD;
- vkCmdDrawIndexedIndirectCountAMD =
- PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) );
+ vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) );
if ( !vkCmdDrawIndexedIndirectCount )
vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD;
@@ -12938,8 +13567,7 @@ namespace VULKAN_HPP_NAMESPACE
vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) );
//=== VK_KHR_dynamic_rendering ===
- vkCmdBeginRenderingKHR =
- PFN_vkCmdBeginRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderingKHR" ) );
+ vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderingKHR" ) );
if ( !vkCmdBeginRendering )
vkCmdBeginRendering = vkCmdBeginRenderingKHR;
vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderingKHR" ) );
@@ -12948,53 +13576,49 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_USE_PLATFORM_GGP )
//=== VK_GGP_stream_descriptor_surface ===
- vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP(
- vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) );
+ vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) );
#endif /*VK_USE_PLATFORM_GGP*/
//=== VK_NV_external_memory_capabilities ===
- vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) );
+ vkGetPhysicalDeviceExternalImageFormatPropertiesNV =
+ PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_external_memory_win32 ===
- vkGetMemoryWin32HandleNV =
- PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) );
+ vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_get_physical_device_properties2 ===
- vkGetPhysicalDeviceFeatures2KHR =
- PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) );
+ vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) );
if ( !vkGetPhysicalDeviceFeatures2 )
vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR;
- vkGetPhysicalDeviceProperties2KHR =
- PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) );
+ vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) );
if ( !vkGetPhysicalDeviceProperties2 )
vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR;
- vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) );
+ vkGetPhysicalDeviceFormatProperties2KHR =
+ PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) );
if ( !vkGetPhysicalDeviceFormatProperties2 )
vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR;
- vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) );
+ vkGetPhysicalDeviceImageFormatProperties2KHR =
+ PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) );
if ( !vkGetPhysicalDeviceImageFormatProperties2 )
vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR;
- vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) );
+ vkGetPhysicalDeviceQueueFamilyProperties2KHR =
+ PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) );
if ( !vkGetPhysicalDeviceQueueFamilyProperties2 )
vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR;
- vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) );
+ vkGetPhysicalDeviceMemoryProperties2KHR =
+ PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) );
if ( !vkGetPhysicalDeviceMemoryProperties2 )
vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR;
- vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) );
+ vkGetPhysicalDeviceSparseImageFormatProperties2KHR =
+ PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) );
if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 )
vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR;
//=== VK_KHR_device_group ===
- vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR(
- vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
+ vkGetDeviceGroupPeerMemoryFeaturesKHR =
+ PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
if ( !vkGetDeviceGroupPeerMemoryFeatures )
vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR;
vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) );
@@ -13015,150 +13639,123 @@ namespace VULKAN_HPP_NAMESPACE
vkTrimCommandPool = vkTrimCommandPoolKHR;
//=== VK_KHR_device_group_creation ===
- vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR(
- vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) );
+ vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) );
if ( !vkEnumeratePhysicalDeviceGroups )
vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR;
//=== VK_KHR_external_memory_capabilities ===
- vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) );
+ vkGetPhysicalDeviceExternalBufferPropertiesKHR =
+ PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) );
if ( !vkGetPhysicalDeviceExternalBufferProperties )
vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR;
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_memory_win32 ===
- vkGetMemoryWin32HandleKHR =
- PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) );
- vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR(
- vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) );
+ vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) );
+ vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_memory_fd ===
- vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) );
- vkGetMemoryFdPropertiesKHR =
- PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) );
+ vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) );
+ vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) );
//=== VK_KHR_external_semaphore_capabilities ===
- vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) );
+ vkGetPhysicalDeviceExternalSemaphorePropertiesKHR =
+ PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) );
if ( !vkGetPhysicalDeviceExternalSemaphoreProperties )
vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR;
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_semaphore_win32 ===
- vkImportSemaphoreWin32HandleKHR =
- PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) );
- vkGetSemaphoreWin32HandleKHR =
- PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) );
+ vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) );
+ vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_semaphore_fd ===
- vkImportSemaphoreFdKHR =
- PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) );
- vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) );
+ vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) );
+ vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) );
//=== VK_KHR_push_descriptor ===
- vkCmdPushDescriptorSetKHR =
- PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) );
- vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR(
- vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
+ vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) );
+ vkCmdPushDescriptorSetWithTemplateKHR =
+ PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
//=== VK_EXT_conditional_rendering ===
- vkCmdBeginConditionalRenderingEXT =
- PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) );
- vkCmdEndConditionalRenderingEXT =
- PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) );
+ vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) );
+ vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) );
//=== VK_KHR_descriptor_update_template ===
- vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR(
- vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) );
+ vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) );
if ( !vkCreateDescriptorUpdateTemplate )
vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR;
- vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR(
- vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) );
+ vkDestroyDescriptorUpdateTemplateKHR =
+ PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) );
if ( !vkDestroyDescriptorUpdateTemplate )
vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR;
- vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR(
- vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) );
+ vkUpdateDescriptorSetWithTemplateKHR =
+ PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) );
if ( !vkUpdateDescriptorSetWithTemplate )
vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR;
//=== VK_NV_clip_space_w_scaling ===
- vkCmdSetViewportWScalingNV =
- PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) );
+ vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) );
//=== VK_EXT_direct_mode_display ===
vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) );
#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
//=== VK_EXT_acquire_xlib_display ===
- vkAcquireXlibDisplayEXT =
- PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) );
- vkGetRandROutputDisplayEXT =
- PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) );
+ vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) );
+ vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) );
#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
//=== VK_EXT_display_surface_counter ===
- vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) );
+ vkGetPhysicalDeviceSurfaceCapabilities2EXT =
+ PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) );
//=== VK_EXT_display_control ===
- vkDisplayPowerControlEXT =
- PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) );
- vkRegisterDeviceEventEXT =
- PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) );
- vkRegisterDisplayEventEXT =
- PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) );
- vkGetSwapchainCounterEXT =
- PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) );
+ vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) );
+ vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) );
+ vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) );
+ vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) );
//=== VK_GOOGLE_display_timing ===
- vkGetRefreshCycleDurationGOOGLE =
- PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) );
- vkGetPastPresentationTimingGOOGLE =
- PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) );
+ vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) );
+ vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) );
//=== VK_EXT_discard_rectangles ===
- vkCmdSetDiscardRectangleEXT =
- PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) );
+ vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) );
//=== VK_EXT_hdr_metadata ===
vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) );
//=== VK_KHR_create_renderpass2 ===
- vkCreateRenderPass2KHR =
- PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) );
+ vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) );
if ( !vkCreateRenderPass2 )
vkCreateRenderPass2 = vkCreateRenderPass2KHR;
- vkCmdBeginRenderPass2KHR =
- PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) );
+ vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) );
if ( !vkCmdBeginRenderPass2 )
vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR;
vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) );
if ( !vkCmdNextSubpass2 )
vkCmdNextSubpass2 = vkCmdNextSubpass2KHR;
- vkCmdEndRenderPass2KHR =
- PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) );
+ vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) );
if ( !vkCmdEndRenderPass2 )
vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR;
//=== VK_KHR_shared_presentable_image ===
- vkGetSwapchainStatusKHR =
- PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) );
+ vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) );
//=== VK_KHR_external_fence_capabilities ===
- vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) );
+ vkGetPhysicalDeviceExternalFencePropertiesKHR =
+ PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) );
if ( !vkGetPhysicalDeviceExternalFenceProperties )
vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR;
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_fence_win32 ===
- vkImportFenceWin32HandleKHR =
- PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) );
- vkGetFenceWin32HandleKHR =
- PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) );
+ vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) );
+ vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_fence_fd ===
@@ -13166,32 +13763,26 @@ namespace VULKAN_HPP_NAMESPACE
vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) );
//=== VK_KHR_performance_query ===
- vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR =
- PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
- vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) );
- vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR =
- PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) );
- vkAcquireProfilingLockKHR =
- PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) );
- vkReleaseProfilingLockKHR =
- PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) );
+ vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+ vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) );
+ vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+ vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) );
+ vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) );
+ vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) );
//=== VK_KHR_get_surface_capabilities2 ===
- vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) );
- vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) );
+ vkGetPhysicalDeviceSurfaceCapabilities2KHR =
+ PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) );
+ vkGetPhysicalDeviceSurfaceFormats2KHR =
+ PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) );
//=== VK_KHR_get_display_properties2 ===
- vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) );
- vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) );
- vkGetDisplayModeProperties2KHR =
- PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) );
- vkGetDisplayPlaneCapabilities2KHR =
- PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) );
+ vkGetPhysicalDeviceDisplayProperties2KHR =
+ PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) );
+ vkGetPhysicalDeviceDisplayPlaneProperties2KHR =
+ PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) );
+ vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) );
+ vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) );
#if defined( VK_USE_PLATFORM_IOS_MVK )
//=== VK_MVK_ios_surface ===
@@ -13200,109 +13791,85 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_USE_PLATFORM_MACOS_MVK )
//=== VK_MVK_macos_surface ===
- vkCreateMacOSSurfaceMVK =
- PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) );
+ vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) );
#endif /*VK_USE_PLATFORM_MACOS_MVK*/
//=== VK_EXT_debug_utils ===
- vkSetDebugUtilsObjectNameEXT =
- PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) );
- vkSetDebugUtilsObjectTagEXT =
- PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) );
- vkQueueBeginDebugUtilsLabelEXT =
- PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) );
- vkQueueEndDebugUtilsLabelEXT =
- PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) );
- vkQueueInsertDebugUtilsLabelEXT =
- PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) );
- vkCmdBeginDebugUtilsLabelEXT =
- PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) );
- vkCmdEndDebugUtilsLabelEXT =
- PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) );
- vkCmdInsertDebugUtilsLabelEXT =
- PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) );
- vkCreateDebugUtilsMessengerEXT =
- PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) );
- vkDestroyDebugUtilsMessengerEXT =
- PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) );
- vkSubmitDebugUtilsMessageEXT =
- PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) );
+ vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) );
+ vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) );
+ vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) );
+ vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) );
+ vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) );
+ vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) );
+ vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) );
+ vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) );
+ vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) );
+ vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) );
+ vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) );
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
//=== VK_ANDROID_external_memory_android_hardware_buffer ===
- vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID(
- vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
- vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID(
- vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
+ vkGetAndroidHardwareBufferPropertiesANDROID =
+ PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
+ vkGetMemoryAndroidHardwareBufferANDROID =
+ PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
//=== VK_EXT_sample_locations ===
- vkCmdSetSampleLocationsEXT =
- PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) );
- vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) );
+ vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) );
+ vkGetPhysicalDeviceMultisamplePropertiesEXT =
+ PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) );
//=== VK_KHR_get_memory_requirements2 ===
- vkGetImageMemoryRequirements2KHR =
- PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) );
+ vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) );
if ( !vkGetImageMemoryRequirements2 )
vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR;
- vkGetBufferMemoryRequirements2KHR =
- PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) );
+ vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) );
if ( !vkGetBufferMemoryRequirements2 )
vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR;
- vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR(
- vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) );
+ vkGetImageSparseMemoryRequirements2KHR =
+ PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) );
if ( !vkGetImageSparseMemoryRequirements2 )
vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR;
//=== VK_KHR_acceleration_structure ===
- vkCreateAccelerationStructureKHR =
- PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) );
- vkDestroyAccelerationStructureKHR =
- PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) );
- vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR(
- vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresKHR" ) );
- vkCmdBuildAccelerationStructuresIndirectKHR = PFN_vkCmdBuildAccelerationStructuresIndirectKHR(
- vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresIndirectKHR" ) );
- vkBuildAccelerationStructuresKHR =
- PFN_vkBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructuresKHR" ) );
- vkCopyAccelerationStructureKHR =
- PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) );
- vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR(
- vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) );
- vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR(
- vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) );
- vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR(
- vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) );
- vkCmdCopyAccelerationStructureKHR =
- PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) );
- vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR(
- vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
- vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR(
- vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
- vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR(
- vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) );
- vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR(
- vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
- vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR(
- vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
- vkGetAccelerationStructureBuildSizesKHR = PFN_vkGetAccelerationStructureBuildSizesKHR(
- vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureBuildSizesKHR" ) );
+ vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) );
+ vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) );
+ vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresKHR" ) );
+ vkCmdBuildAccelerationStructuresIndirectKHR =
+ PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresIndirectKHR" ) );
+ vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructuresKHR" ) );
+ vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) );
+ vkCopyAccelerationStructureToMemoryKHR =
+ PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) );
+ vkCopyMemoryToAccelerationStructureKHR =
+ PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) );
+ vkWriteAccelerationStructuresPropertiesKHR =
+ PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) );
+ vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) );
+ vkCmdCopyAccelerationStructureToMemoryKHR =
+ PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
+ vkCmdCopyMemoryToAccelerationStructureKHR =
+ PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
+ vkGetAccelerationStructureDeviceAddressKHR =
+ PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) );
+ vkCmdWriteAccelerationStructuresPropertiesKHR =
+ PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
+ vkGetDeviceAccelerationStructureCompatibilityKHR =
+ PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
+ vkGetAccelerationStructureBuildSizesKHR =
+ PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureBuildSizesKHR" ) );
//=== VK_KHR_sampler_ycbcr_conversion ===
- vkCreateSamplerYcbcrConversionKHR =
- PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) );
+ vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) );
if ( !vkCreateSamplerYcbcrConversion )
vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR;
- vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR(
- vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) );
+ vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) );
if ( !vkDestroySamplerYcbcrConversion )
vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR;
//=== VK_KHR_bind_memory2 ===
- vkBindBufferMemory2KHR =
- PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) );
+ vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) );
if ( !vkBindBufferMemory2 )
vkBindBufferMemory2 = vkBindBufferMemory2KHR;
vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) );
@@ -13310,102 +13877,77 @@ namespace VULKAN_HPP_NAMESPACE
vkBindImageMemory2 = vkBindImageMemory2KHR;
//=== VK_EXT_image_drm_format_modifier ===
- vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT(
- vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
+ vkGetImageDrmFormatModifierPropertiesEXT =
+ PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
//=== VK_EXT_validation_cache ===
- vkCreateValidationCacheEXT =
- PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) );
- vkDestroyValidationCacheEXT =
- PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) );
- vkMergeValidationCachesEXT =
- PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) );
- vkGetValidationCacheDataEXT =
- PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) );
+ vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) );
+ vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) );
+ vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) );
+ vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) );
//=== VK_NV_shading_rate_image ===
- vkCmdBindShadingRateImageNV =
- PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) );
- vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV(
- vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) );
- vkCmdSetCoarseSampleOrderNV =
- PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) );
+ vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) );
+ vkCmdSetViewportShadingRatePaletteNV =
+ PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) );
+ vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) );
//=== VK_NV_ray_tracing ===
- vkCreateAccelerationStructureNV =
- PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) );
- vkDestroyAccelerationStructureNV =
- PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) );
- vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV(
- vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
- vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV(
- vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) );
- vkCmdBuildAccelerationStructureNV =
- PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) );
- vkCmdCopyAccelerationStructureNV =
- PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) );
- vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) );
- vkCreateRayTracingPipelinesNV =
- PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) );
- vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV(
- vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) );
+ vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) );
+ vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) );
+ vkGetAccelerationStructureMemoryRequirementsNV =
+ PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
+ vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) );
+ vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) );
+ vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) );
+ vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) );
+ vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) );
+ vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) );
if ( !vkGetRayTracingShaderGroupHandlesKHR )
vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV;
- vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV(
- vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) );
- vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV(
- vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
+ vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) );
+ vkCmdWriteAccelerationStructuresPropertiesNV =
+ PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) );
//=== VK_KHR_maintenance3 ===
- vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR(
- vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) );
+ vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) );
if ( !vkGetDescriptorSetLayoutSupport )
vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR;
//=== VK_KHR_draw_indirect_count ===
- vkCmdDrawIndirectCountKHR =
- PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) );
+ vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) );
if ( !vkCmdDrawIndirectCount )
vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR;
- vkCmdDrawIndexedIndirectCountKHR =
- PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) );
+ vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) );
if ( !vkCmdDrawIndexedIndirectCount )
vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR;
//=== VK_EXT_external_memory_host ===
- vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT(
- vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) );
+ vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) );
//=== VK_AMD_buffer_marker ===
- vkCmdWriteBufferMarkerAMD =
- PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) );
+ vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) );
//=== VK_EXT_calibrated_timestamps ===
- vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) );
- vkGetCalibratedTimestampsEXT =
- PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) );
+ vkGetPhysicalDeviceCalibrateableTimeDomainsEXT =
+ PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) );
+ vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) );
//=== VK_NV_mesh_shader ===
- vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) );
- vkCmdDrawMeshTasksIndirectNV =
- PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) );
- vkCmdDrawMeshTasksIndirectCountNV =
- PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) );
+ vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) );
+ vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) );
+ vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) );
//=== VK_NV_scissor_exclusive ===
- vkCmdSetExclusiveScissorNV =
- PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) );
+ vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) );
//=== VK_NV_device_diagnostic_checkpoints ===
- vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) );
- vkGetQueueCheckpointDataNV =
- PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) );
+ vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) );
+ vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) );
//=== VK_KHR_timeline_semaphore ===
- vkGetSemaphoreCounterValueKHR =
- PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) );
+ vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) );
if ( !vkGetSemaphoreCounterValue )
vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR;
vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) );
@@ -13416,55 +13958,46 @@ namespace VULKAN_HPP_NAMESPACE
vkSignalSemaphore = vkSignalSemaphoreKHR;
//=== VK_INTEL_performance_query ===
- vkInitializePerformanceApiINTEL =
- PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) );
- vkUninitializePerformanceApiINTEL =
- PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) );
- vkCmdSetPerformanceMarkerINTEL =
- PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) );
- vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL(
- vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
- vkCmdSetPerformanceOverrideINTEL =
- PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) );
- vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL(
- vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) );
- vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL(
- vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) );
- vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL(
- vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) );
- vkGetPerformanceParameterINTEL =
- PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) );
+ vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) );
+ vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) );
+ vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) );
+ vkCmdSetPerformanceStreamMarkerINTEL =
+ PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
+ vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) );
+ vkAcquirePerformanceConfigurationINTEL =
+ PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) );
+ vkReleasePerformanceConfigurationINTEL =
+ PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) );
+ vkQueueSetPerformanceConfigurationINTEL =
+ PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) );
+ vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) );
//=== VK_AMD_display_native_hdr ===
vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) );
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_imagepipe_surface ===
- vkCreateImagePipeSurfaceFUCHSIA =
- PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) );
+ vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) );
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_surface ===
- vkCreateMetalSurfaceEXT =
- PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) );
+ vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) );
#endif /*VK_USE_PLATFORM_METAL_EXT*/
//=== VK_KHR_fragment_shading_rate ===
- vkGetPhysicalDeviceFragmentShadingRatesKHR = PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) );
- vkCmdSetFragmentShadingRateKHR =
- PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) );
+ vkGetPhysicalDeviceFragmentShadingRatesKHR =
+ PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) );
+ vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) );
//=== VK_EXT_buffer_device_address ===
- vkGetBufferDeviceAddressEXT =
- PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) );
+ vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) );
if ( !vkGetBufferDeviceAddress )
vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT;
//=== VK_EXT_tooling_info ===
- vkGetPhysicalDeviceToolPropertiesEXT = PFN_vkGetPhysicalDeviceToolPropertiesEXT(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) );
+ vkGetPhysicalDeviceToolPropertiesEXT =
+ PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) );
if ( !vkGetPhysicalDeviceToolProperties )
vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT;
@@ -13472,47 +14005,40 @@ namespace VULKAN_HPP_NAMESPACE
vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetInstanceProcAddr( instance, "vkWaitForPresentKHR" ) );
//=== VK_NV_cooperative_matrix ===
- vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) );
+ vkGetPhysicalDeviceCooperativeMatrixPropertiesNV =
+ PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) );
//=== VK_NV_coverage_reduction_mode ===
- vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV =
- PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) );
+ vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+ vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_EXT_full_screen_exclusive ===
- vkGetPhysicalDeviceSurfacePresentModes2EXT = PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) );
- vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT(
- vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) );
- vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT(
- vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) );
- vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT(
- vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
+ vkGetPhysicalDeviceSurfacePresentModes2EXT =
+ PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) );
+ vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) );
+ vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) );
+ vkGetDeviceGroupSurfacePresentModes2EXT =
+ PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_EXT_headless_surface ===
- vkCreateHeadlessSurfaceEXT =
- PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) );
+ vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) );
//=== VK_KHR_buffer_device_address ===
- vkGetBufferDeviceAddressKHR =
- PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) );
+ vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) );
if ( !vkGetBufferDeviceAddress )
vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR;
- vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR(
- vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) );
+ vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) );
if ( !vkGetBufferOpaqueCaptureAddress )
vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR;
- vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR(
- vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
+ vkGetDeviceMemoryOpaqueCaptureAddressKHR =
+ PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
if ( !vkGetDeviceMemoryOpaqueCaptureAddress )
vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR;
//=== VK_EXT_line_rasterization ===
- vkCmdSetLineStippleEXT =
- PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) );
+ vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) );
//=== VK_EXT_host_query_reset ===
vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) );
@@ -13526,40 +14052,31 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) );
if ( !vkCmdSetFrontFace )
vkCmdSetFrontFace = vkCmdSetFrontFaceEXT;
- vkCmdSetPrimitiveTopologyEXT =
- PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) );
+ vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) );
if ( !vkCmdSetPrimitiveTopology )
vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT;
- vkCmdSetViewportWithCountEXT =
- PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) );
+ vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) );
if ( !vkCmdSetViewportWithCount )
vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT;
- vkCmdSetScissorWithCountEXT =
- PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) );
+ vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) );
if ( !vkCmdSetScissorWithCount )
vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT;
- vkCmdBindVertexBuffers2EXT =
- PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) );
+ vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) );
if ( !vkCmdBindVertexBuffers2 )
vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT;
- vkCmdSetDepthTestEnableEXT =
- PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) );
+ vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) );
if ( !vkCmdSetDepthTestEnable )
vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT;
- vkCmdSetDepthWriteEnableEXT =
- PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) );
+ vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) );
if ( !vkCmdSetDepthWriteEnable )
vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT;
- vkCmdSetDepthCompareOpEXT =
- PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) );
+ vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) );
if ( !vkCmdSetDepthCompareOp )
vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT;
- vkCmdSetDepthBoundsTestEnableEXT =
- PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) );
+ vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) );
if ( !vkCmdSetDepthBoundsTestEnable )
vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT;
- vkCmdSetStencilTestEnableEXT =
- PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) );
+ vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) );
if ( !vkCmdSetStencilTestEnable )
vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT;
vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) );
@@ -13567,51 +14084,39 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdSetStencilOp = vkCmdSetStencilOpEXT;
//=== VK_KHR_deferred_host_operations ===
- vkCreateDeferredOperationKHR =
- PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) );
- vkDestroyDeferredOperationKHR =
- PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) );
- vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR(
- vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
- vkGetDeferredOperationResultKHR =
- PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) );
- vkDeferredOperationJoinKHR =
- PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) );
+ vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) );
+ vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) );
+ vkGetDeferredOperationMaxConcurrencyKHR =
+ PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
+ vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) );
+ vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) );
//=== VK_KHR_pipeline_executable_properties ===
- vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) );
- vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR(
- vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) );
- vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR(
- vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
+ vkGetPipelineExecutablePropertiesKHR =
+ PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) );
+ vkGetPipelineExecutableStatisticsKHR =
+ PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) );
+ vkGetPipelineExecutableInternalRepresentationsKHR =
+ PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
//=== VK_NV_device_generated_commands ===
- vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV(
- vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
- vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV(
- vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) );
- vkCmdExecuteGeneratedCommandsNV =
- PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) );
- vkCmdBindPipelineShaderGroupNV =
- PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) );
- vkCreateIndirectCommandsLayoutNV =
- PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) );
- vkDestroyIndirectCommandsLayoutNV =
- PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) );
+ vkGetGeneratedCommandsMemoryRequirementsNV =
+ PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
+ vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) );
+ vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) );
+ vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) );
+ vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) );
+ vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) );
//=== VK_EXT_acquire_drm_display ===
- vkAcquireDrmDisplayEXT =
- PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) );
- vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) );
+ vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) );
+ vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) );
//=== VK_EXT_private_data ===
- vkCreatePrivateDataSlotEXT =
- PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) );
+ vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) );
if ( !vkCreatePrivateDataSlot )
vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT;
- vkDestroyPrivateDataSlotEXT =
- PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) );
+ vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) );
if ( !vkDestroyPrivateDataSlot )
vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT;
vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) );
@@ -13626,6 +14131,11 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdEncodeVideoKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ //=== VK_EXT_metal_objects ===
+ vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetInstanceProcAddr( instance, "vkExportMetalObjectsEXT" ) );
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
//=== VK_KHR_synchronization2 ===
vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2KHR" ) );
if ( !vkCmdSetEvent2 )
@@ -13636,25 +14146,25 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2KHR" ) );
if ( !vkCmdWaitEvents2 )
vkCmdWaitEvents2 = vkCmdWaitEvents2KHR;
- vkCmdPipelineBarrier2KHR =
- PFN_vkCmdPipelineBarrier2KHR( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2KHR" ) );
+ vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2KHR" ) );
if ( !vkCmdPipelineBarrier2 )
vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR;
- vkCmdWriteTimestamp2KHR =
- PFN_vkCmdWriteTimestamp2KHR( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2KHR" ) );
+ vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2KHR" ) );
if ( !vkCmdWriteTimestamp2 )
vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR;
vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetInstanceProcAddr( instance, "vkQueueSubmit2KHR" ) );
if ( !vkQueueSubmit2 )
vkQueueSubmit2 = vkQueueSubmit2KHR;
- vkCmdWriteBufferMarker2AMD =
- PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) );
- vkGetQueueCheckpointData2NV =
- PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) );
+ vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) );
+ vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) );
//=== VK_NV_fragment_shading_rate_enums ===
- vkCmdSetFragmentShadingRateEnumNV =
- PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) );
+ vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) );
+
+ //=== VK_EXT_mesh_shader ===
+ vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksEXT" ) );
+ vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectEXT" ) );
+ vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountEXT" ) );
//=== VK_KHR_copy_commands2 ===
vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) );
@@ -13663,12 +14173,10 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2KHR" ) );
if ( !vkCmdCopyImage2 )
vkCmdCopyImage2 = vkCmdCopyImage2KHR;
- vkCmdCopyBufferToImage2KHR =
- PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) );
+ vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) );
if ( !vkCmdCopyBufferToImage2 )
vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR;
- vkCmdCopyImageToBuffer2KHR =
- PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) );
+ vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) );
if ( !vkCmdCopyImageToBuffer2 )
vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR;
vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2KHR" ) );
@@ -13678,135 +14186,204 @@ namespace VULKAN_HPP_NAMESPACE
if ( !vkCmdResolveImage2 )
vkCmdResolveImage2 = vkCmdResolveImage2KHR;
+ //=== VK_EXT_image_compression_control ===
+ vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) );
+
+ //=== VK_EXT_device_fault ===
+ vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceFaultInfoEXT" ) );
+
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_acquire_winrt_display ===
- vkAcquireWinrtDisplayNV =
- PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) );
- vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) );
+ vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) );
+ vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
//=== VK_EXT_directfb_surface ===
- vkCreateDirectFBSurfaceEXT =
- PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) );
- vkGetPhysicalDeviceDirectFBPresentationSupportEXT = PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) );
+ vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) );
+ vkGetPhysicalDeviceDirectFBPresentationSupportEXT =
+ PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) );
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
//=== VK_KHR_ray_tracing_pipeline ===
- vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) );
- vkCreateRayTracingPipelinesKHR =
- PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) );
- vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR(
- vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) );
- vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
- vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
- vkCmdTraceRaysIndirectKHR =
- PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) );
- vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR(
- vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupStackSizeKHR" ) );
- vkCmdSetRayTracingPipelineStackSizeKHR = PFN_vkCmdSetRayTracingPipelineStackSizeKHR(
- vkGetInstanceProcAddr( instance, "vkCmdSetRayTracingPipelineStackSizeKHR" ) );
+ vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) );
+ vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) );
+ vkGetRayTracingShaderGroupHandlesKHR =
+ PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) );
+ vkGetRayTracingCaptureReplayShaderGroupHandlesKHR =
+ PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
+ vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) );
+ vkGetRayTracingShaderGroupStackSizeKHR =
+ PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupStackSizeKHR" ) );
+ vkCmdSetRayTracingPipelineStackSizeKHR =
+ PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRayTracingPipelineStackSizeKHR" ) );
//=== VK_EXT_vertex_input_dynamic_state ===
- vkCmdSetVertexInputEXT =
- PFN_vkCmdSetVertexInputEXT( vkGetInstanceProcAddr( instance, "vkCmdSetVertexInputEXT" ) );
+ vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetInstanceProcAddr( instance, "vkCmdSetVertexInputEXT" ) );
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_memory ===
- vkGetMemoryZirconHandleFUCHSIA =
- PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandleFUCHSIA" ) );
- vkGetMemoryZirconHandlePropertiesFUCHSIA = PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA(
- vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) );
+ vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandleFUCHSIA" ) );
+ vkGetMemoryZirconHandlePropertiesFUCHSIA =
+ PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) );
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_semaphore ===
- vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA(
- vkGetInstanceProcAddr( instance, "vkImportSemaphoreZirconHandleFUCHSIA" ) );
- vkGetSemaphoreZirconHandleFUCHSIA =
- PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetSemaphoreZirconHandleFUCHSIA" ) );
+ vkImportSemaphoreZirconHandleFUCHSIA =
+ PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkImportSemaphoreZirconHandleFUCHSIA" ) );
+ vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetSemaphoreZirconHandleFUCHSIA" ) );
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
- vkCreateBufferCollectionFUCHSIA =
- PFN_vkCreateBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateBufferCollectionFUCHSIA" ) );
- vkSetBufferCollectionImageConstraintsFUCHSIA = PFN_vkSetBufferCollectionImageConstraintsFUCHSIA(
- vkGetInstanceProcAddr( instance, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) );
- vkSetBufferCollectionBufferConstraintsFUCHSIA = PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA(
- vkGetInstanceProcAddr( instance, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) );
- vkDestroyBufferCollectionFUCHSIA =
- PFN_vkDestroyBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkDestroyBufferCollectionFUCHSIA" ) );
- vkGetBufferCollectionPropertiesFUCHSIA = PFN_vkGetBufferCollectionPropertiesFUCHSIA(
- vkGetInstanceProcAddr( instance, "vkGetBufferCollectionPropertiesFUCHSIA" ) );
+ vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateBufferCollectionFUCHSIA" ) );
+ vkSetBufferCollectionImageConstraintsFUCHSIA =
+ PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) );
+ vkSetBufferCollectionBufferConstraintsFUCHSIA =
+ PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) );
+ vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkDestroyBufferCollectionFUCHSIA" ) );
+ vkGetBufferCollectionPropertiesFUCHSIA =
+ PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetBufferCollectionPropertiesFUCHSIA" ) );
#endif /*VK_USE_PLATFORM_FUCHSIA*/
//=== VK_HUAWEI_subpass_shading ===
- vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
- vkGetInstanceProcAddr( instance, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) );
- vkCmdSubpassShadingHUAWEI =
- PFN_vkCmdSubpassShadingHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdSubpassShadingHUAWEI" ) );
+ vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI =
+ PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetInstanceProcAddr( instance, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) );
+ vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdSubpassShadingHUAWEI" ) );
//=== VK_HUAWEI_invocation_mask ===
- vkCmdBindInvocationMaskHUAWEI =
- PFN_vkCmdBindInvocationMaskHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdBindInvocationMaskHUAWEI" ) );
+ vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdBindInvocationMaskHUAWEI" ) );
//=== VK_NV_external_memory_rdma ===
- vkGetMemoryRemoteAddressNV =
- PFN_vkGetMemoryRemoteAddressNV( vkGetInstanceProcAddr( instance, "vkGetMemoryRemoteAddressNV" ) );
+ vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetInstanceProcAddr( instance, "vkGetMemoryRemoteAddressNV" ) );
+
+ //=== VK_EXT_pipeline_properties ===
+ vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPipelinePropertiesEXT" ) );
//=== VK_EXT_extended_dynamic_state2 ===
- vkCmdSetPatchControlPointsEXT =
- PFN_vkCmdSetPatchControlPointsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPatchControlPointsEXT" ) );
- vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT(
- vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnableEXT" ) );
+ vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPatchControlPointsEXT" ) );
+ vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnableEXT" ) );
if ( !vkCmdSetRasterizerDiscardEnable )
vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT;
- vkCmdSetDepthBiasEnableEXT =
- PFN_vkCmdSetDepthBiasEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnableEXT" ) );
+ vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnableEXT" ) );
if ( !vkCmdSetDepthBiasEnable )
vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT;
- vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEXT" ) );
- vkCmdSetPrimitiveRestartEnableEXT =
- PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnableEXT" ) );
+ vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEXT" ) );
+ vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnableEXT" ) );
if ( !vkCmdSetPrimitiveRestartEnable )
vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT;
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_screen_surface ===
- vkCreateScreenSurfaceQNX =
- PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) );
- vkGetPhysicalDeviceScreenPresentationSupportQNX = PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) );
+ vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) );
+ vkGetPhysicalDeviceScreenPresentationSupportQNX =
+ PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) );
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
//=== VK_EXT_color_write_enable ===
- vkCmdSetColorWriteEnableEXT =
- PFN_vkCmdSetColorWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteEnableEXT" ) );
+ vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteEnableEXT" ) );
+
+ //=== VK_KHR_ray_tracing_maintenance1 ===
+ vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirect2KHR" ) );
//=== VK_EXT_multi_draw ===
- vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiEXT" ) );
- vkCmdDrawMultiIndexedEXT =
- PFN_vkCmdDrawMultiIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiIndexedEXT" ) );
+ vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiEXT" ) );
+ vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiIndexedEXT" ) );
+
+ //=== VK_EXT_opacity_micromap ===
+ vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetInstanceProcAddr( instance, "vkCreateMicromapEXT" ) );
+ vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetInstanceProcAddr( instance, "vkDestroyMicromapEXT" ) );
+ vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkCmdBuildMicromapsEXT" ) );
+ vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkBuildMicromapsEXT" ) );
+ vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapEXT" ) );
+ vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapToMemoryEXT" ) );
+ vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToMicromapEXT" ) );
+ vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkWriteMicromapsPropertiesEXT" ) );
+ vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapEXT" ) );
+ vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapToMemoryEXT" ) );
+ vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToMicromapEXT" ) );
+ vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkCmdWriteMicromapsPropertiesEXT" ) );
+ vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceMicromapCompatibilityEXT" ) );
+ vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetInstanceProcAddr( instance, "vkGetMicromapBuildSizesEXT" ) );
//=== VK_EXT_pageable_device_local_memory ===
- vkSetDeviceMemoryPriorityEXT =
- PFN_vkSetDeviceMemoryPriorityEXT( vkGetInstanceProcAddr( instance, "vkSetDeviceMemoryPriorityEXT" ) );
+ vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetInstanceProcAddr( instance, "vkSetDeviceMemoryPriorityEXT" ) );
//=== VK_KHR_maintenance4 ===
- vkGetDeviceBufferMemoryRequirementsKHR = PFN_vkGetDeviceBufferMemoryRequirementsKHR(
- vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirementsKHR" ) );
+ vkGetDeviceBufferMemoryRequirementsKHR =
+ PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirementsKHR" ) );
if ( !vkGetDeviceBufferMemoryRequirements )
vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR;
- vkGetDeviceImageMemoryRequirementsKHR = PFN_vkGetDeviceImageMemoryRequirementsKHR(
- vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirementsKHR" ) );
+ vkGetDeviceImageMemoryRequirementsKHR =
+ PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirementsKHR" ) );
if ( !vkGetDeviceImageMemoryRequirements )
vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR;
- vkGetDeviceImageSparseMemoryRequirementsKHR = PFN_vkGetDeviceImageSparseMemoryRequirementsKHR(
- vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) );
+ vkGetDeviceImageSparseMemoryRequirementsKHR =
+ PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) );
if ( !vkGetDeviceImageSparseMemoryRequirements )
vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR;
+
+ //=== VK_VALVE_descriptor_set_host_mapping ===
+ vkGetDescriptorSetLayoutHostMappingInfoVALVE =
+ PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) );
+ vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetHostMappingVALVE" ) );
+
+ //=== VK_EXT_extended_dynamic_state3 ===
+ vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetInstanceProcAddr( instance, "vkCmdSetTessellationDomainOriginEXT" ) );
+ vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClampEnableEXT" ) );
+ vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPolygonModeEXT" ) );
+ vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizationSamplesEXT" ) );
+ vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleMaskEXT" ) );
+ vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAlphaToCoverageEnableEXT" ) );
+ vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAlphaToOneEnableEXT" ) );
+ vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEnableEXT" ) );
+ vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendEnableEXT" ) );
+ vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendEquationEXT" ) );
+ vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteMaskEXT" ) );
+ vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizationStreamEXT" ) );
+ vkCmdSetConservativeRasterizationModeEXT =
+ PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetConservativeRasterizationModeEXT" ) );
+ vkCmdSetExtraPrimitiveOverestimationSizeEXT =
+ PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) );
+ vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClipEnableEXT" ) );
+ vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEnableEXT" ) );
+ vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendAdvancedEXT" ) );
+ vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetProvokingVertexModeEXT" ) );
+ vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineRasterizationModeEXT" ) );
+ vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEnableEXT" ) );
+ vkCmdSetDepthClipNegativeOneToOneEXT =
+ PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClipNegativeOneToOneEXT" ) );
+ vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingEnableNV" ) );
+ vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportSwizzleNV" ) );
+ vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageToColorEnableNV" ) );
+ vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageToColorLocationNV" ) );
+ vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationModeNV" ) );
+ vkCmdSetCoverageModulationTableEnableNV =
+ PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationTableEnableNV" ) );
+ vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationTableNV" ) );
+ vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetShadingRateImageEnableNV" ) );
+ vkCmdSetRepresentativeFragmentTestEnableNV =
+ PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetRepresentativeFragmentTestEnableNV" ) );
+ vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageReductionModeNV" ) );
+
+ //=== VK_EXT_shader_module_identifier ===
+ vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleIdentifierEXT" ) );
+ vkGetShaderModuleCreateInfoIdentifierEXT =
+ PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleCreateInfoIdentifierEXT" ) );
+
+ //=== VK_NV_optical_flow ===
+ vkGetPhysicalDeviceOpticalFlowImageFormatsNV =
+ PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) );
+ vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkCreateOpticalFlowSessionNV" ) );
+ vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkDestroyOpticalFlowSessionNV" ) );
+ vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetInstanceProcAddr( instance, "vkBindOpticalFlowSessionImageNV" ) );
+ vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetInstanceProcAddr( instance, "vkCmdOpticalFlowExecuteNV" ) );
+
+ //=== VK_QCOM_tile_properties ===
+ vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetFramebufferTilePropertiesQCOM" ) );
+ vkGetDynamicRenderingTilePropertiesQCOM =
+ PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetDynamicRenderingTilePropertiesQCOM" ) );
}
void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT
@@ -13814,245 +14391,200 @@ namespace VULKAN_HPP_NAMESPACE
VkDevice device = static_cast<VkDevice>( deviceCpp );
//=== VK_VERSION_1_0 ===
- vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) );
- vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) );
- vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) );
- vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) );
- vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) );
- vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) );
- vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) );
- vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) );
- vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) );
- vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) );
- vkFlushMappedMemoryRanges =
- PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) );
- vkInvalidateMappedMemoryRanges =
- PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) );
- vkGetDeviceMemoryCommitment =
- PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) );
- vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) );
- vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) );
- vkGetBufferMemoryRequirements =
- PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) );
- vkGetImageMemoryRequirements =
- PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) );
- vkGetImageSparseMemoryRequirements =
- PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) );
- vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) );
- vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) );
- vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) );
- vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) );
- vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) );
- vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) );
- vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) );
- vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) );
- vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) );
- vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) );
- vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) );
- vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) );
- vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) );
- vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) );
- vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) );
- vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) );
- vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) );
- vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) );
- vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) );
- vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) );
- vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) );
- vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) );
- vkGetImageSubresourceLayout =
- PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) );
- vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) );
- vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) );
- vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) );
- vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) );
- vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) );
- vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) );
- vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) );
- vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) );
- vkCreateGraphicsPipelines =
- PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) );
- vkCreateComputePipelines =
- PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) );
- vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) );
- vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) );
- vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) );
- vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) );
- vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) );
- vkCreateDescriptorSetLayout =
- PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) );
- vkDestroyDescriptorSetLayout =
- PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) );
- vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) );
- vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) );
- vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) );
- vkAllocateDescriptorSets =
- PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) );
- vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) );
- vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) );
- vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) );
- vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) );
- vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) );
- vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) );
- vkGetRenderAreaGranularity =
- PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) );
- vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) );
- vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) );
- vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) );
- vkAllocateCommandBuffers =
- PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) );
- vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) );
- vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) );
- vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) );
- vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) );
- vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) );
- vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) );
- vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) );
- vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) );
- vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) );
- vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) );
- vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) );
- vkCmdSetStencilCompareMask =
- PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) );
- vkCmdSetStencilWriteMask =
- PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) );
- vkCmdSetStencilReference =
- PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) );
- vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) );
- vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) );
- vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) );
- vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) );
- vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) );
- vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) );
- vkCmdDrawIndexedIndirect =
- PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) );
- vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) );
- vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) );
- vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) );
- vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) );
- vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) );
- vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) );
- vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) );
- vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) );
- vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) );
- vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) );
- vkCmdClearDepthStencilImage =
- PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) );
- vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) );
- vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) );
- vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) );
- vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) );
- vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) );
- vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) );
- vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) );
- vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) );
- vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) );
- vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) );
- vkCmdCopyQueryPoolResults =
- PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) );
- vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) );
- vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) );
- vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) );
- vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) );
- vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) );
+ vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) );
+ vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) );
+ vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) );
+ vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) );
+ vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) );
+ vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) );
+ vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) );
+ vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) );
+ vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) );
+ vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) );
+ vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) );
+ vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) );
+ vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) );
+ vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) );
+ vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) );
+ vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) );
+ vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) );
+ vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) );
+ vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) );
+ vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) );
+ vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) );
+ vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) );
+ vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) );
+ vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) );
+ vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) );
+ vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) );
+ vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) );
+ vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) );
+ vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) );
+ vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) );
+ vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) );
+ vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) );
+ vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) );
+ vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) );
+ vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) );
+ vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) );
+ vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) );
+ vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) );
+ vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) );
+ vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) );
+ vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) );
+ vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) );
+ vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) );
+ vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) );
+ vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) );
+ vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) );
+ vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) );
+ vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) );
+ vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) );
+ vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) );
+ vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) );
+ vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) );
+ vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) );
+ vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) );
+ vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) );
+ vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) );
+ vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) );
+ vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) );
+ vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) );
+ vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) );
+ vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) );
+ vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) );
+ vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) );
+ vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) );
+ vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) );
+ vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) );
+ vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) );
+ vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) );
+ vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) );
+ vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) );
+ vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) );
+ vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) );
+ vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) );
+ vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) );
+ vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) );
+ vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) );
+ vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) );
+ vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) );
+ vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) );
+ vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) );
+ vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) );
+ vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) );
+ vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) );
+ vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) );
+ vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) );
+ vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) );
+ vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) );
+ vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) );
+ vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) );
+ vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) );
+ vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) );
+ vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) );
+ vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) );
+ vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) );
+ vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) );
+ vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) );
+ vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) );
+ vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) );
+ vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) );
+ vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) );
+ vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) );
+ vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) );
+ vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) );
+ vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) );
+ vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) );
+ vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) );
+ vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) );
+ vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) );
+ vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) );
+ vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) );
+ vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) );
+ vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) );
+ vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) );
+ vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) );
+ vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) );
+ vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) );
+ vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) );
+ vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) );
+ vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) );
+ vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) );
+ vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) );
//=== VK_VERSION_1_1 ===
- vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) );
- vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) );
- vkGetDeviceGroupPeerMemoryFeatures =
- PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) );
- vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) );
- vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) );
- vkGetImageMemoryRequirements2 =
- PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) );
- vkGetBufferMemoryRequirements2 =
- PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) );
- vkGetImageSparseMemoryRequirements2 =
- PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) );
- vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) );
- vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) );
- vkCreateSamplerYcbcrConversion =
- PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) );
- vkDestroySamplerYcbcrConversion =
- PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) );
- vkCreateDescriptorUpdateTemplate =
- PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) );
- vkDestroyDescriptorUpdateTemplate =
- PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) );
- vkUpdateDescriptorSetWithTemplate =
- PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) );
- vkGetDescriptorSetLayoutSupport =
- PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) );
+ vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) );
+ vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) );
+ vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) );
+ vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) );
+ vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) );
+ vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) );
+ vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) );
+ vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) );
+ vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) );
+ vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) );
+ vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) );
+ vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) );
+ vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) );
+ vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) );
+ vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) );
+ vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) );
//=== VK_VERSION_1_2 ===
- vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) );
- vkCmdDrawIndexedIndirectCount =
- PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) );
- vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) );
- vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) );
- vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) );
- vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) );
- vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) );
- vkGetSemaphoreCounterValue =
- PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) );
- vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) );
- vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) );
- vkGetBufferDeviceAddress =
- PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) );
- vkGetBufferOpaqueCaptureAddress =
- PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) );
- vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress(
- vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
+ vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) );
+ vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) );
+ vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) );
+ vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) );
+ vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) );
+ vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) );
+ vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) );
+ vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) );
+ vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) );
+ vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) );
+ vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) );
+ vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) );
+ vkGetDeviceMemoryOpaqueCaptureAddress =
+ PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
//=== VK_VERSION_1_3 ===
- vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) );
- vkDestroyPrivateDataSlot =
- PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) );
- vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) );
- vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) );
- vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) );
- vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) );
- vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) );
- vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) );
- vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) );
- vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) );
- vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) );
- vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) );
- vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) );
- vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) );
- vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) );
- vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) );
- vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) );
- vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) );
- vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) );
- vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) );
- vkCmdSetPrimitiveTopology =
- PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) );
- vkCmdSetViewportWithCount =
- PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) );
- vkCmdSetScissorWithCount =
- PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) );
- vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) );
- vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) );
- vkCmdSetDepthWriteEnable =
- PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) );
- vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) );
- vkCmdSetDepthBoundsTestEnable =
- PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) );
- vkCmdSetStencilTestEnable =
- PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) );
- vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) );
- vkCmdSetRasterizerDiscardEnable =
- PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) );
- vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) );
- vkCmdSetPrimitiveRestartEnable =
- PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) );
- vkGetDeviceBufferMemoryRequirements =
- PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) );
- vkGetDeviceImageMemoryRequirements =
- PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) );
- vkGetDeviceImageSparseMemoryRequirements = PFN_vkGetDeviceImageSparseMemoryRequirements(
- vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) );
+ vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) );
+ vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) );
+ vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) );
+ vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) );
+ vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) );
+ vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) );
+ vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) );
+ vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) );
+ vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) );
+ vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) );
+ vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) );
+ vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) );
+ vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) );
+ vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) );
+ vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) );
+ vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) );
+ vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) );
+ vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) );
+ vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) );
+ vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) );
+ vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) );
+ vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) );
+ vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) );
+ vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) );
+ vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) );
+ vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) );
+ vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) );
+ vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) );
+ vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) );
+ vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) );
+ vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) );
+ vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) );
+ vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) );
+ vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) );
+ vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) );
+ vkGetDeviceImageSparseMemoryRequirements =
+ PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) );
//=== VK_KHR_swapchain ===
vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) );
@@ -14060,47 +14592,35 @@ namespace VULKAN_HPP_NAMESPACE
vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) );
vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) );
vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) );
- vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR(
- vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
- vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR(
- vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
+ vkGetDeviceGroupPresentCapabilitiesKHR =
+ PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
+ vkGetDeviceGroupSurfacePresentModesKHR =
+ PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) );
//=== VK_KHR_display_swapchain ===
- vkCreateSharedSwapchainsKHR =
- PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) );
+ vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) );
//=== VK_EXT_debug_marker ===
- vkDebugMarkerSetObjectTagEXT =
- PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) );
- vkDebugMarkerSetObjectNameEXT =
- PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) );
- vkCmdDebugMarkerBeginEXT =
- PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) );
- vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) );
- vkCmdDebugMarkerInsertEXT =
- PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) );
+ vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) );
+ vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) );
+ vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) );
+ vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) );
+ vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_queue ===
- vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) );
- vkDestroyVideoSessionKHR =
- PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) );
- vkGetVideoSessionMemoryRequirementsKHR = PFN_vkGetVideoSessionMemoryRequirementsKHR(
- vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) );
- vkBindVideoSessionMemoryKHR =
- PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) );
- vkCreateVideoSessionParametersKHR =
- PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) );
- vkUpdateVideoSessionParametersKHR =
- PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) );
- vkDestroyVideoSessionParametersKHR =
- PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) );
- vkCmdBeginVideoCodingKHR =
- PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) );
- vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) );
- vkCmdControlVideoCodingKHR =
- PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) );
+ vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) );
+ vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) );
+ vkGetVideoSessionMemoryRequirementsKHR =
+ PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) );
+ vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) );
+ vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) );
+ vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) );
+ vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) );
+ vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) );
+ vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) );
+ vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
@@ -14109,17 +14629,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_EXT_transform_feedback ===
- vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT(
- vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) );
- vkCmdBeginTransformFeedbackEXT =
- PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) );
- vkCmdEndTransformFeedbackEXT =
- PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) );
- vkCmdBeginQueryIndexedEXT =
- PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) );
- vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) );
- vkCmdDrawIndirectByteCountEXT =
- PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) );
+ vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) );
+ vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) );
+ vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) );
+ vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) );
+ vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) );
+ vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) );
//=== VK_NVX_binary_import ===
vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) );
@@ -14129,17 +14644,14 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) );
//=== VK_NVX_image_view_handle ===
- vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) );
- vkGetImageViewAddressNVX =
- PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) );
+ vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) );
+ vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) );
//=== VK_AMD_draw_indirect_count ===
- vkCmdDrawIndirectCountAMD =
- PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) );
+ vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) );
if ( !vkCmdDrawIndirectCount )
vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD;
- vkCmdDrawIndexedIndirectCountAMD =
- PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) );
+ vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) );
if ( !vkCmdDrawIndexedIndirectCount )
vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD;
@@ -14156,13 +14668,12 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_external_memory_win32 ===
- vkGetMemoryWin32HandleNV =
- PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) );
+ vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_device_group ===
- vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR(
- vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
+ vkGetDeviceGroupPeerMemoryFeaturesKHR =
+ PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
if ( !vkGetDeviceGroupPeerMemoryFeatures )
vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR;
vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) );
@@ -14179,23 +14690,18 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_memory_win32 ===
- vkGetMemoryWin32HandleKHR =
- PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) );
- vkGetMemoryWin32HandlePropertiesKHR =
- PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) );
+ vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) );
+ vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_memory_fd ===
- vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) );
- vkGetMemoryFdPropertiesKHR =
- PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) );
+ vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) );
+ vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_semaphore_win32 ===
- vkImportSemaphoreWin32HandleKHR =
- PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) );
- vkGetSemaphoreWin32HandleKHR =
- PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) );
+ vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) );
+ vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_semaphore_fd ===
@@ -14203,54 +14709,40 @@ namespace VULKAN_HPP_NAMESPACE
vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) );
//=== VK_KHR_push_descriptor ===
- vkCmdPushDescriptorSetKHR =
- PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) );
- vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR(
- vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
+ vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) );
+ vkCmdPushDescriptorSetWithTemplateKHR =
+ PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
//=== VK_EXT_conditional_rendering ===
- vkCmdBeginConditionalRenderingEXT =
- PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) );
- vkCmdEndConditionalRenderingEXT =
- PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) );
+ vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) );
+ vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) );
//=== VK_KHR_descriptor_update_template ===
- vkCreateDescriptorUpdateTemplateKHR =
- PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) );
+ vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) );
if ( !vkCreateDescriptorUpdateTemplate )
vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR;
- vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR(
- vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) );
+ vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) );
if ( !vkDestroyDescriptorUpdateTemplate )
vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR;
- vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR(
- vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) );
+ vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) );
if ( !vkUpdateDescriptorSetWithTemplate )
vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR;
//=== VK_NV_clip_space_w_scaling ===
- vkCmdSetViewportWScalingNV =
- PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) );
+ vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) );
//=== VK_EXT_display_control ===
- vkDisplayPowerControlEXT =
- PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) );
- vkRegisterDeviceEventEXT =
- PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) );
- vkRegisterDisplayEventEXT =
- PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) );
- vkGetSwapchainCounterEXT =
- PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) );
+ vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) );
+ vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) );
+ vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) );
+ vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) );
//=== VK_GOOGLE_display_timing ===
- vkGetRefreshCycleDurationGOOGLE =
- PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) );
- vkGetPastPresentationTimingGOOGLE =
- PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) );
+ vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) );
+ vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) );
//=== VK_EXT_discard_rectangles ===
- vkCmdSetDiscardRectangleEXT =
- PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) );
+ vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) );
//=== VK_EXT_hdr_metadata ===
vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) );
@@ -14259,8 +14751,7 @@ namespace VULKAN_HPP_NAMESPACE
vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) );
if ( !vkCreateRenderPass2 )
vkCreateRenderPass2 = vkCreateRenderPass2KHR;
- vkCmdBeginRenderPass2KHR =
- PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) );
+ vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) );
if ( !vkCmdBeginRenderPass2 )
vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR;
vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) );
@@ -14275,10 +14766,8 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_fence_win32 ===
- vkImportFenceWin32HandleKHR =
- PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) );
- vkGetFenceWin32HandleKHR =
- PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) );
+ vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) );
+ vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_fence_fd ===
@@ -14286,96 +14775,75 @@ namespace VULKAN_HPP_NAMESPACE
vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) );
//=== VK_KHR_performance_query ===
- vkAcquireProfilingLockKHR =
- PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) );
- vkReleaseProfilingLockKHR =
- PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) );
+ vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) );
+ vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) );
//=== VK_EXT_debug_utils ===
- vkSetDebugUtilsObjectNameEXT =
- PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) );
- vkSetDebugUtilsObjectTagEXT =
- PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) );
- vkQueueBeginDebugUtilsLabelEXT =
- PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) );
- vkQueueEndDebugUtilsLabelEXT =
- PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) );
- vkQueueInsertDebugUtilsLabelEXT =
- PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) );
- vkCmdBeginDebugUtilsLabelEXT =
- PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) );
- vkCmdEndDebugUtilsLabelEXT =
- PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) );
- vkCmdInsertDebugUtilsLabelEXT =
- PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) );
+ vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) );
+ vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) );
+ vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) );
+ vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) );
+ vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) );
+ vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) );
+ vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) );
+ vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) );
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
//=== VK_ANDROID_external_memory_android_hardware_buffer ===
- vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID(
- vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
- vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID(
- vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
+ vkGetAndroidHardwareBufferPropertiesANDROID =
+ PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
+ vkGetMemoryAndroidHardwareBufferANDROID =
+ PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
//=== VK_EXT_sample_locations ===
- vkCmdSetSampleLocationsEXT =
- PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) );
+ vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) );
//=== VK_KHR_get_memory_requirements2 ===
- vkGetImageMemoryRequirements2KHR =
- PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) );
+ vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) );
if ( !vkGetImageMemoryRequirements2 )
vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR;
- vkGetBufferMemoryRequirements2KHR =
- PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) );
+ vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) );
if ( !vkGetBufferMemoryRequirements2 )
vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR;
- vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR(
- vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) );
+ vkGetImageSparseMemoryRequirements2KHR =
+ PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) );
if ( !vkGetImageSparseMemoryRequirements2 )
vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR;
//=== VK_KHR_acceleration_structure ===
- vkCreateAccelerationStructureKHR =
- PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) );
- vkDestroyAccelerationStructureKHR =
- PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) );
- vkCmdBuildAccelerationStructuresKHR =
- PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) );
- vkCmdBuildAccelerationStructuresIndirectKHR = PFN_vkCmdBuildAccelerationStructuresIndirectKHR(
- vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) );
- vkBuildAccelerationStructuresKHR =
- PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) );
- vkCopyAccelerationStructureKHR =
- PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) );
- vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR(
- vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) );
- vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR(
- vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) );
- vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR(
- vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) );
- vkCmdCopyAccelerationStructureKHR =
- PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) );
- vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR(
- vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
- vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR(
- vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
- vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR(
- vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) );
- vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR(
- vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
- vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR(
- vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
- vkGetAccelerationStructureBuildSizesKHR = PFN_vkGetAccelerationStructureBuildSizesKHR(
- vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) );
+ vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) );
+ vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) );
+ vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) );
+ vkCmdBuildAccelerationStructuresIndirectKHR =
+ PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) );
+ vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) );
+ vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) );
+ vkCopyAccelerationStructureToMemoryKHR =
+ PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) );
+ vkCopyMemoryToAccelerationStructureKHR =
+ PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) );
+ vkWriteAccelerationStructuresPropertiesKHR =
+ PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) );
+ vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) );
+ vkCmdCopyAccelerationStructureToMemoryKHR =
+ PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
+ vkCmdCopyMemoryToAccelerationStructureKHR =
+ PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
+ vkGetAccelerationStructureDeviceAddressKHR =
+ PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) );
+ vkCmdWriteAccelerationStructuresPropertiesKHR =
+ PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
+ vkGetDeviceAccelerationStructureCompatibilityKHR =
+ PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
+ vkGetAccelerationStructureBuildSizesKHR =
+ PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) );
//=== VK_KHR_sampler_ycbcr_conversion ===
- vkCreateSamplerYcbcrConversionKHR =
- PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) );
+ vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) );
if ( !vkCreateSamplerYcbcrConversion )
vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR;
- vkDestroySamplerYcbcrConversionKHR =
- PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) );
+ vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) );
if ( !vkDestroySamplerYcbcrConversion )
vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR;
@@ -14388,100 +14856,74 @@ namespace VULKAN_HPP_NAMESPACE
vkBindImageMemory2 = vkBindImageMemory2KHR;
//=== VK_EXT_image_drm_format_modifier ===
- vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT(
- vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
+ vkGetImageDrmFormatModifierPropertiesEXT =
+ PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
//=== VK_EXT_validation_cache ===
- vkCreateValidationCacheEXT =
- PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) );
- vkDestroyValidationCacheEXT =
- PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) );
- vkMergeValidationCachesEXT =
- PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) );
- vkGetValidationCacheDataEXT =
- PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) );
+ vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) );
+ vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) );
+ vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) );
+ vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) );
//=== VK_NV_shading_rate_image ===
- vkCmdBindShadingRateImageNV =
- PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) );
- vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV(
- vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) );
- vkCmdSetCoarseSampleOrderNV =
- PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) );
+ vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) );
+ vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) );
+ vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) );
//=== VK_NV_ray_tracing ===
- vkCreateAccelerationStructureNV =
- PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) );
- vkDestroyAccelerationStructureNV =
- PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) );
- vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV(
- vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
- vkBindAccelerationStructureMemoryNV =
- PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) );
- vkCmdBuildAccelerationStructureNV =
- PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) );
- vkCmdCopyAccelerationStructureNV =
- PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) );
- vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) );
- vkCreateRayTracingPipelinesNV =
- PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) );
- vkGetRayTracingShaderGroupHandlesNV =
- PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) );
+ vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) );
+ vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) );
+ vkGetAccelerationStructureMemoryRequirementsNV =
+ PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
+ vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) );
+ vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) );
+ vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) );
+ vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) );
+ vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) );
+ vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) );
if ( !vkGetRayTracingShaderGroupHandlesKHR )
vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV;
- vkGetAccelerationStructureHandleNV =
- PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) );
- vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV(
- vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
+ vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) );
+ vkCmdWriteAccelerationStructuresPropertiesNV =
+ PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) );
//=== VK_KHR_maintenance3 ===
- vkGetDescriptorSetLayoutSupportKHR =
- PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) );
+ vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) );
if ( !vkGetDescriptorSetLayoutSupport )
vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR;
//=== VK_KHR_draw_indirect_count ===
- vkCmdDrawIndirectCountKHR =
- PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) );
+ vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) );
if ( !vkCmdDrawIndirectCount )
vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR;
- vkCmdDrawIndexedIndirectCountKHR =
- PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) );
+ vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) );
if ( !vkCmdDrawIndexedIndirectCount )
vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR;
//=== VK_EXT_external_memory_host ===
- vkGetMemoryHostPointerPropertiesEXT =
- PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) );
+ vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) );
//=== VK_AMD_buffer_marker ===
- vkCmdWriteBufferMarkerAMD =
- PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) );
+ vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) );
//=== VK_EXT_calibrated_timestamps ===
- vkGetCalibratedTimestampsEXT =
- PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) );
+ vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) );
//=== VK_NV_mesh_shader ===
- vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) );
- vkCmdDrawMeshTasksIndirectNV =
- PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) );
- vkCmdDrawMeshTasksIndirectCountNV =
- PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) );
+ vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) );
+ vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) );
+ vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) );
//=== VK_NV_scissor_exclusive ===
- vkCmdSetExclusiveScissorNV =
- PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) );
+ vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) );
//=== VK_NV_device_diagnostic_checkpoints ===
- vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) );
- vkGetQueueCheckpointDataNV =
- PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) );
+ vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) );
+ vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) );
//=== VK_KHR_timeline_semaphore ===
- vkGetSemaphoreCounterValueKHR =
- PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) );
+ vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) );
if ( !vkGetSemaphoreCounterValue )
vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR;
vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) );
@@ -14492,35 +14934,27 @@ namespace VULKAN_HPP_NAMESPACE
vkSignalSemaphore = vkSignalSemaphoreKHR;
//=== VK_INTEL_performance_query ===
- vkInitializePerformanceApiINTEL =
- PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) );
- vkUninitializePerformanceApiINTEL =
- PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) );
- vkCmdSetPerformanceMarkerINTEL =
- PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) );
- vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL(
- vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
- vkCmdSetPerformanceOverrideINTEL =
- PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) );
- vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL(
- vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) );
- vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL(
- vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) );
- vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL(
- vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) );
- vkGetPerformanceParameterINTEL =
- PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) );
+ vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) );
+ vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) );
+ vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) );
+ vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
+ vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) );
+ vkAcquirePerformanceConfigurationINTEL =
+ PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) );
+ vkReleasePerformanceConfigurationINTEL =
+ PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) );
+ vkQueueSetPerformanceConfigurationINTEL =
+ PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) );
+ vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) );
//=== VK_AMD_display_native_hdr ===
vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) );
//=== VK_KHR_fragment_shading_rate ===
- vkCmdSetFragmentShadingRateKHR =
- PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) );
+ vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) );
//=== VK_EXT_buffer_device_address ===
- vkGetBufferDeviceAddressEXT =
- PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) );
+ vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) );
if ( !vkGetBufferDeviceAddress )
vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT;
@@ -14529,25 +14963,21 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_EXT_full_screen_exclusive ===
- vkAcquireFullScreenExclusiveModeEXT =
- PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) );
- vkReleaseFullScreenExclusiveModeEXT =
- PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) );
- vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT(
- vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
+ vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) );
+ vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) );
+ vkGetDeviceGroupSurfacePresentModes2EXT =
+ PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_buffer_device_address ===
- vkGetBufferDeviceAddressKHR =
- PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) );
+ vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) );
if ( !vkGetBufferDeviceAddress )
vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR;
- vkGetBufferOpaqueCaptureAddressKHR =
- PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) );
+ vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) );
if ( !vkGetBufferOpaqueCaptureAddress )
vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR;
- vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR(
- vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
+ vkGetDeviceMemoryOpaqueCaptureAddressKHR =
+ PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
if ( !vkGetDeviceMemoryOpaqueCaptureAddress )
vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR;
@@ -14566,40 +14996,31 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) );
if ( !vkCmdSetFrontFace )
vkCmdSetFrontFace = vkCmdSetFrontFaceEXT;
- vkCmdSetPrimitiveTopologyEXT =
- PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) );
+ vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) );
if ( !vkCmdSetPrimitiveTopology )
vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT;
- vkCmdSetViewportWithCountEXT =
- PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) );
+ vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) );
if ( !vkCmdSetViewportWithCount )
vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT;
- vkCmdSetScissorWithCountEXT =
- PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) );
+ vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) );
if ( !vkCmdSetScissorWithCount )
vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT;
- vkCmdBindVertexBuffers2EXT =
- PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) );
+ vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) );
if ( !vkCmdBindVertexBuffers2 )
vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT;
- vkCmdSetDepthTestEnableEXT =
- PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) );
+ vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) );
if ( !vkCmdSetDepthTestEnable )
vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT;
- vkCmdSetDepthWriteEnableEXT =
- PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) );
+ vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) );
if ( !vkCmdSetDepthWriteEnable )
vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT;
- vkCmdSetDepthCompareOpEXT =
- PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) );
+ vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) );
if ( !vkCmdSetDepthCompareOp )
vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT;
- vkCmdSetDepthBoundsTestEnableEXT =
- PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) );
+ vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) );
if ( !vkCmdSetDepthBoundsTestEnable )
vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT;
- vkCmdSetStencilTestEnableEXT =
- PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) );
+ vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) );
if ( !vkCmdSetStencilTestEnable )
vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT;
vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) );
@@ -14607,46 +15028,33 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdSetStencilOp = vkCmdSetStencilOpEXT;
//=== VK_KHR_deferred_host_operations ===
- vkCreateDeferredOperationKHR =
- PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) );
- vkDestroyDeferredOperationKHR =
- PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) );
- vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR(
- vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
- vkGetDeferredOperationResultKHR =
- PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) );
- vkDeferredOperationJoinKHR =
- PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) );
+ vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) );
+ vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) );
+ vkGetDeferredOperationMaxConcurrencyKHR =
+ PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
+ vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) );
+ vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) );
//=== VK_KHR_pipeline_executable_properties ===
- vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR(
- vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) );
- vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR(
- vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) );
- vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR(
- vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
+ vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) );
+ vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) );
+ vkGetPipelineExecutableInternalRepresentationsKHR =
+ PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
//=== VK_NV_device_generated_commands ===
- vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV(
- vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
- vkCmdPreprocessGeneratedCommandsNV =
- PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) );
- vkCmdExecuteGeneratedCommandsNV =
- PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) );
- vkCmdBindPipelineShaderGroupNV =
- PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) );
- vkCreateIndirectCommandsLayoutNV =
- PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) );
- vkDestroyIndirectCommandsLayoutNV =
- PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) );
+ vkGetGeneratedCommandsMemoryRequirementsNV =
+ PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
+ vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) );
+ vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) );
+ vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) );
+ vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) );
+ vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) );
//=== VK_EXT_private_data ===
- vkCreatePrivateDataSlotEXT =
- PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) );
+ vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) );
if ( !vkCreatePrivateDataSlot )
vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT;
- vkDestroyPrivateDataSlotEXT =
- PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) );
+ vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) );
if ( !vkDestroyPrivateDataSlot )
vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT;
vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) );
@@ -14661,6 +15069,11 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ //=== VK_EXT_metal_objects ===
+ vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) );
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
//=== VK_KHR_synchronization2 ===
vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) );
if ( !vkCmdSetEvent2 )
@@ -14671,8 +15084,7 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) );
if ( !vkCmdWaitEvents2 )
vkCmdWaitEvents2 = vkCmdWaitEvents2KHR;
- vkCmdPipelineBarrier2KHR =
- PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) );
+ vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) );
if ( !vkCmdPipelineBarrier2 )
vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR;
vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) );
@@ -14681,14 +15093,16 @@ namespace VULKAN_HPP_NAMESPACE
vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) );
if ( !vkQueueSubmit2 )
vkQueueSubmit2 = vkQueueSubmit2KHR;
- vkCmdWriteBufferMarker2AMD =
- PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) );
- vkGetQueueCheckpointData2NV =
- PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) );
+ vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) );
+ vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) );
//=== VK_NV_fragment_shading_rate_enums ===
- vkCmdSetFragmentShadingRateEnumNV =
- PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) );
+ vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) );
+
+ //=== VK_EXT_mesh_shader ===
+ vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) );
+ vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) );
+ vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) );
//=== VK_KHR_copy_commands2 ===
vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) );
@@ -14697,12 +15111,10 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) );
if ( !vkCmdCopyImage2 )
vkCmdCopyImage2 = vkCmdCopyImage2KHR;
- vkCmdCopyBufferToImage2KHR =
- PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) );
+ vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) );
if ( !vkCmdCopyBufferToImage2 )
vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR;
- vkCmdCopyImageToBuffer2KHR =
- PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) );
+ vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) );
if ( !vkCmdCopyImageToBuffer2 )
vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR;
vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) );
@@ -14712,111 +15124,179 @@ namespace VULKAN_HPP_NAMESPACE
if ( !vkCmdResolveImage2 )
vkCmdResolveImage2 = vkCmdResolveImage2KHR;
+ //=== VK_EXT_image_compression_control ===
+ vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) );
+
+ //=== VK_EXT_device_fault ===
+ vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) );
+
//=== VK_KHR_ray_tracing_pipeline ===
- vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) );
- vkCreateRayTracingPipelinesKHR =
- PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) );
- vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR(
- vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) );
- vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
- vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
- vkCmdTraceRaysIndirectKHR =
- PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) );
- vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR(
- vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) );
- vkCmdSetRayTracingPipelineStackSizeKHR = PFN_vkCmdSetRayTracingPipelineStackSizeKHR(
- vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) );
+ vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) );
+ vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) );
+ vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) );
+ vkGetRayTracingCaptureReplayShaderGroupHandlesKHR =
+ PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
+ vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) );
+ vkGetRayTracingShaderGroupStackSizeKHR =
+ PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) );
+ vkCmdSetRayTracingPipelineStackSizeKHR =
+ PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) );
//=== VK_EXT_vertex_input_dynamic_state ===
vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) );
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_memory ===
- vkGetMemoryZirconHandleFUCHSIA =
- PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) );
- vkGetMemoryZirconHandlePropertiesFUCHSIA = PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA(
- vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) );
+ vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) );
+ vkGetMemoryZirconHandlePropertiesFUCHSIA =
+ PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) );
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_semaphore ===
- vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA(
- vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) );
- vkGetSemaphoreZirconHandleFUCHSIA =
- PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) );
+ vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) );
+ vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) );
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
- vkCreateBufferCollectionFUCHSIA =
- PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) );
- vkSetBufferCollectionImageConstraintsFUCHSIA = PFN_vkSetBufferCollectionImageConstraintsFUCHSIA(
- vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) );
- vkSetBufferCollectionBufferConstraintsFUCHSIA = PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA(
- vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) );
- vkDestroyBufferCollectionFUCHSIA =
- PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) );
- vkGetBufferCollectionPropertiesFUCHSIA = PFN_vkGetBufferCollectionPropertiesFUCHSIA(
- vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) );
+ vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) );
+ vkSetBufferCollectionImageConstraintsFUCHSIA =
+ PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) );
+ vkSetBufferCollectionBufferConstraintsFUCHSIA =
+ PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) );
+ vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) );
+ vkGetBufferCollectionPropertiesFUCHSIA =
+ PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) );
#endif /*VK_USE_PLATFORM_FUCHSIA*/
//=== VK_HUAWEI_subpass_shading ===
- vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
- vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) );
- vkCmdSubpassShadingHUAWEI =
- PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) );
+ vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI =
+ PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) );
+ vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) );
//=== VK_HUAWEI_invocation_mask ===
- vkCmdBindInvocationMaskHUAWEI =
- PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) );
+ vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) );
//=== VK_NV_external_memory_rdma ===
- vkGetMemoryRemoteAddressNV =
- PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) );
+ vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) );
+
+ //=== VK_EXT_pipeline_properties ===
+ vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) );
//=== VK_EXT_extended_dynamic_state2 ===
- vkCmdSetPatchControlPointsEXT =
- PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) );
- vkCmdSetRasterizerDiscardEnableEXT =
- PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) );
+ vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) );
+ vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) );
if ( !vkCmdSetRasterizerDiscardEnable )
vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT;
- vkCmdSetDepthBiasEnableEXT =
- PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) );
+ vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) );
if ( !vkCmdSetDepthBiasEnable )
vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT;
- vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) );
- vkCmdSetPrimitiveRestartEnableEXT =
- PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) );
+ vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) );
+ vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) );
if ( !vkCmdSetPrimitiveRestartEnable )
vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT;
//=== VK_EXT_color_write_enable ===
- vkCmdSetColorWriteEnableEXT =
- PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) );
+ vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) );
+
+ //=== VK_KHR_ray_tracing_maintenance1 ===
+ vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) );
//=== VK_EXT_multi_draw ===
- vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) );
- vkCmdDrawMultiIndexedEXT =
- PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) );
+ vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) );
+ vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) );
+
+ //=== VK_EXT_opacity_micromap ===
+ vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) );
+ vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) );
+ vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) );
+ vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) );
+ vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) );
+ vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) );
+ vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) );
+ vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) );
+ vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) );
+ vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) );
+ vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) );
+ vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) );
+ vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) );
+ vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) );
//=== VK_EXT_pageable_device_local_memory ===
- vkSetDeviceMemoryPriorityEXT =
- PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) );
+ vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) );
//=== VK_KHR_maintenance4 ===
- vkGetDeviceBufferMemoryRequirementsKHR = PFN_vkGetDeviceBufferMemoryRequirementsKHR(
- vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) );
+ vkGetDeviceBufferMemoryRequirementsKHR =
+ PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) );
if ( !vkGetDeviceBufferMemoryRequirements )
vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR;
- vkGetDeviceImageMemoryRequirementsKHR = PFN_vkGetDeviceImageMemoryRequirementsKHR(
- vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) );
+ vkGetDeviceImageMemoryRequirementsKHR =
+ PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) );
if ( !vkGetDeviceImageMemoryRequirements )
vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR;
- vkGetDeviceImageSparseMemoryRequirementsKHR = PFN_vkGetDeviceImageSparseMemoryRequirementsKHR(
- vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) );
+ vkGetDeviceImageSparseMemoryRequirementsKHR =
+ PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) );
if ( !vkGetDeviceImageSparseMemoryRequirements )
vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR;
+
+ //=== VK_VALVE_descriptor_set_host_mapping ===
+ vkGetDescriptorSetLayoutHostMappingInfoVALVE =
+ PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) );
+ vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) );
+
+ //=== VK_EXT_extended_dynamic_state3 ===
+ vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) );
+ vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) );
+ vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) );
+ vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) );
+ vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) );
+ vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) );
+ vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) );
+ vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) );
+ vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) );
+ vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) );
+ vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) );
+ vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) );
+ vkCmdSetConservativeRasterizationModeEXT =
+ PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) );
+ vkCmdSetExtraPrimitiveOverestimationSizeEXT =
+ PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) );
+ vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) );
+ vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) );
+ vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) );
+ vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) );
+ vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) );
+ vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) );
+ vkCmdSetDepthClipNegativeOneToOneEXT = PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) );
+ vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) );
+ vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) );
+ vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) );
+ vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) );
+ vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) );
+ vkCmdSetCoverageModulationTableEnableNV =
+ PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) );
+ vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) );
+ vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) );
+ vkCmdSetRepresentativeFragmentTestEnableNV =
+ PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) );
+ vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) );
+
+ //=== VK_EXT_shader_module_identifier ===
+ vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) );
+ vkGetShaderModuleCreateInfoIdentifierEXT =
+ PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) );
+
+ //=== VK_NV_optical_flow ===
+ vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) );
+ vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) );
+ vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) );
+ vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) );
+
+ //=== VK_QCOM_tile_properties ===
+ vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) );
+ vkGetDynamicRenderingTilePropertiesQCOM =
+ PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) );
}
};
} // namespace VULKAN_HPP_NAMESPACE
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_android.h b/thirdparty/vulkan/include/vulkan/vulkan_android.h
index de79d382f2..11f5397966 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_android.h
+++ b/thirdparty/vulkan/include/vulkan/vulkan_android.h
@@ -44,7 +44,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR(
#define VK_ANDROID_external_memory_android_hardware_buffer 1
struct AHardwareBuffer;
-#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION 4
+#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION 5
#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME "VK_ANDROID_external_memory_android_hardware_buffer"
typedef struct VkAndroidHardwareBufferUsageANDROID {
VkStructureType sType;
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_beta.h b/thirdparty/vulkan/include/vulkan/vulkan_beta.h
index 53294f8b90..db511024fb 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_beta.h
+++ b/thirdparty/vulkan/include/vulkan/vulkan_beta.h
@@ -22,7 +22,7 @@ extern "C" {
#define VK_KHR_video_queue 1
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionKHR)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionParametersKHR)
-#define VK_KHR_VIDEO_QUEUE_SPEC_VERSION 2
+#define VK_KHR_VIDEO_QUEUE_SPEC_VERSION 7
#define VK_KHR_VIDEO_QUEUE_EXTENSION_NAME "VK_KHR_video_queue"
typedef enum VkQueryResultStatusKHR {
@@ -33,7 +33,7 @@ typedef enum VkQueryResultStatusKHR {
} VkQueryResultStatusKHR;
typedef enum VkVideoCodecOperationFlagBitsKHR {
- VK_VIDEO_CODEC_OPERATION_INVALID_BIT_KHR = 0,
+ VK_VIDEO_CODEC_OPERATION_NONE_KHR = 0,
#ifdef VK_ENABLE_BETA_EXTENSIONS
VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_EXT = 0x00010000,
#endif
@@ -51,7 +51,7 @@ typedef enum VkVideoCodecOperationFlagBitsKHR {
typedef VkFlags VkVideoCodecOperationFlagsKHR;
typedef enum VkVideoChromaSubsamplingFlagBitsKHR {
- VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_BIT_KHR = 0,
+ VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_KHR = 0,
VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR = 0x00000001,
VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR = 0x00000002,
VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR = 0x00000004,
@@ -77,132 +77,136 @@ typedef enum VkVideoCapabilityFlagBitsKHR {
typedef VkFlags VkVideoCapabilityFlagsKHR;
typedef enum VkVideoSessionCreateFlagBitsKHR {
- VK_VIDEO_SESSION_CREATE_DEFAULT_KHR = 0,
VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR = 0x00000001,
VK_VIDEO_SESSION_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
} VkVideoSessionCreateFlagBitsKHR;
typedef VkFlags VkVideoSessionCreateFlagsKHR;
+typedef VkFlags VkVideoSessionParametersCreateFlagsKHR;
typedef VkFlags VkVideoBeginCodingFlagsKHR;
typedef VkFlags VkVideoEndCodingFlagsKHR;
typedef enum VkVideoCodingControlFlagBitsKHR {
- VK_VIDEO_CODING_CONTROL_DEFAULT_KHR = 0,
VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR = 0x00000001,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR = 0x00000002,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_LAYER_BIT_KHR = 0x00000004,
+#endif
VK_VIDEO_CODING_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
} VkVideoCodingControlFlagBitsKHR;
typedef VkFlags VkVideoCodingControlFlagsKHR;
-
-typedef enum VkVideoCodingQualityPresetFlagBitsKHR {
- VK_VIDEO_CODING_QUALITY_PRESET_NORMAL_BIT_KHR = 0x00000001,
- VK_VIDEO_CODING_QUALITY_PRESET_POWER_BIT_KHR = 0x00000002,
- VK_VIDEO_CODING_QUALITY_PRESET_QUALITY_BIT_KHR = 0x00000004,
- VK_VIDEO_CODING_QUALITY_PRESET_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
-} VkVideoCodingQualityPresetFlagBitsKHR;
-typedef VkFlags VkVideoCodingQualityPresetFlagsKHR;
-typedef struct VkQueueFamilyQueryResultStatusProperties2KHR {
+typedef struct VkQueueFamilyQueryResultStatusPropertiesKHR {
VkStructureType sType;
void* pNext;
- VkBool32 supported;
-} VkQueueFamilyQueryResultStatusProperties2KHR;
+ VkBool32 queryResultStatusSupport;
+} VkQueueFamilyQueryResultStatusPropertiesKHR;
-typedef struct VkVideoQueueFamilyProperties2KHR {
+typedef struct VkQueueFamilyVideoPropertiesKHR {
VkStructureType sType;
void* pNext;
VkVideoCodecOperationFlagsKHR videoCodecOperations;
-} VkVideoQueueFamilyProperties2KHR;
+} VkQueueFamilyVideoPropertiesKHR;
-typedef struct VkVideoProfileKHR {
+typedef struct VkVideoProfileInfoKHR {
VkStructureType sType;
- void* pNext;
+ const void* pNext;
VkVideoCodecOperationFlagBitsKHR videoCodecOperation;
VkVideoChromaSubsamplingFlagsKHR chromaSubsampling;
VkVideoComponentBitDepthFlagsKHR lumaBitDepth;
VkVideoComponentBitDepthFlagsKHR chromaBitDepth;
-} VkVideoProfileKHR;
+} VkVideoProfileInfoKHR;
-typedef struct VkVideoProfilesKHR {
- VkStructureType sType;
- void* pNext;
- uint32_t profileCount;
- const VkVideoProfileKHR* pProfiles;
-} VkVideoProfilesKHR;
+typedef struct VkVideoProfileListInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t profileCount;
+ const VkVideoProfileInfoKHR* pProfiles;
+} VkVideoProfileListInfoKHR;
typedef struct VkVideoCapabilitiesKHR {
VkStructureType sType;
void* pNext;
- VkVideoCapabilityFlagsKHR capabilityFlags;
+ VkVideoCapabilityFlagsKHR flags;
VkDeviceSize minBitstreamBufferOffsetAlignment;
VkDeviceSize minBitstreamBufferSizeAlignment;
- VkExtent2D videoPictureExtentGranularity;
- VkExtent2D minExtent;
- VkExtent2D maxExtent;
- uint32_t maxReferencePicturesSlotsCount;
- uint32_t maxReferencePicturesActiveCount;
+ VkExtent2D pictureAccessGranularity;
+ VkExtent2D minCodedExtent;
+ VkExtent2D maxCodedExtent;
+ uint32_t maxDpbSlots;
+ uint32_t maxActiveReferencePictures;
+ VkExtensionProperties stdHeaderVersion;
} VkVideoCapabilitiesKHR;
typedef struct VkPhysicalDeviceVideoFormatInfoKHR {
- VkStructureType sType;
- void* pNext;
- VkImageUsageFlags imageUsage;
- const VkVideoProfilesKHR* pVideoProfiles;
+ VkStructureType sType;
+ const void* pNext;
+ VkImageUsageFlags imageUsage;
} VkPhysicalDeviceVideoFormatInfoKHR;
typedef struct VkVideoFormatPropertiesKHR {
- VkStructureType sType;
- void* pNext;
- VkFormat format;
+ VkStructureType sType;
+ void* pNext;
+ VkFormat format;
+ VkComponentMapping componentMapping;
+ VkImageCreateFlags imageCreateFlags;
+ VkImageType imageType;
+ VkImageTiling imageTiling;
+ VkImageUsageFlags imageUsageFlags;
} VkVideoFormatPropertiesKHR;
-typedef struct VkVideoPictureResourceKHR {
+typedef struct VkVideoPictureResourceInfoKHR {
VkStructureType sType;
const void* pNext;
VkOffset2D codedOffset;
VkExtent2D codedExtent;
uint32_t baseArrayLayer;
VkImageView imageViewBinding;
-} VkVideoPictureResourceKHR;
-
-typedef struct VkVideoReferenceSlotKHR {
- VkStructureType sType;
- const void* pNext;
- int8_t slotIndex;
- const VkVideoPictureResourceKHR* pPictureResource;
-} VkVideoReferenceSlotKHR;
+} VkVideoPictureResourceInfoKHR;
-typedef struct VkVideoGetMemoryPropertiesKHR {
- VkStructureType sType;
- const void* pNext;
- uint32_t memoryBindIndex;
- VkMemoryRequirements2* pMemoryRequirements;
-} VkVideoGetMemoryPropertiesKHR;
-
-typedef struct VkVideoBindMemoryKHR {
+typedef struct VkVideoReferenceSlotInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ int32_t slotIndex;
+ const VkVideoPictureResourceInfoKHR* pPictureResource;
+} VkVideoReferenceSlotInfoKHR;
+
+typedef struct VkVideoSessionMemoryRequirementsKHR {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t memoryBindIndex;
+ VkMemoryRequirements memoryRequirements;
+} VkVideoSessionMemoryRequirementsKHR;
+
+typedef struct VkBindVideoSessionMemoryInfoKHR {
VkStructureType sType;
const void* pNext;
uint32_t memoryBindIndex;
VkDeviceMemory memory;
VkDeviceSize memoryOffset;
VkDeviceSize memorySize;
-} VkVideoBindMemoryKHR;
+} VkBindVideoSessionMemoryInfoKHR;
typedef struct VkVideoSessionCreateInfoKHR {
VkStructureType sType;
const void* pNext;
uint32_t queueFamilyIndex;
VkVideoSessionCreateFlagsKHR flags;
- const VkVideoProfileKHR* pVideoProfile;
+ const VkVideoProfileInfoKHR* pVideoProfile;
VkFormat pictureFormat;
VkExtent2D maxCodedExtent;
- VkFormat referencePicturesFormat;
- uint32_t maxReferencePicturesSlotsCount;
- uint32_t maxReferencePicturesActiveCount;
+ VkFormat referencePictureFormat;
+ uint32_t maxDpbSlots;
+ uint32_t maxActiveReferencePictures;
+ const VkExtensionProperties* pStdHeaderVersion;
} VkVideoSessionCreateInfoKHR;
typedef struct VkVideoSessionParametersCreateInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkVideoSessionParametersKHR videoSessionParametersTemplate;
- VkVideoSessionKHR videoSession;
+ VkStructureType sType;
+ const void* pNext;
+ VkVideoSessionParametersCreateFlagsKHR flags;
+ VkVideoSessionParametersKHR videoSessionParametersTemplate;
+ VkVideoSessionKHR videoSession;
} VkVideoSessionParametersCreateInfoKHR;
typedef struct VkVideoSessionParametersUpdateInfoKHR {
@@ -215,11 +219,10 @@ typedef struct VkVideoBeginCodingInfoKHR {
VkStructureType sType;
const void* pNext;
VkVideoBeginCodingFlagsKHR flags;
- VkVideoCodingQualityPresetFlagsKHR codecQualityPreset;
VkVideoSessionKHR videoSession;
VkVideoSessionParametersKHR videoSessionParameters;
uint32_t referenceSlotCount;
- const VkVideoReferenceSlotKHR* pReferenceSlots;
+ const VkVideoReferenceSlotInfoKHR* pReferenceSlots;
} VkVideoBeginCodingInfoKHR;
typedef struct VkVideoEndCodingInfoKHR {
@@ -234,12 +237,12 @@ typedef struct VkVideoCodingControlInfoKHR {
VkVideoCodingControlFlagsKHR flags;
} VkVideoCodingControlInfoKHR;
-typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR)(VkPhysicalDevice physicalDevice, const VkVideoProfileKHR* pVideoProfile, VkVideoCapabilitiesKHR* pCapabilities);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR)(VkPhysicalDevice physicalDevice, const VkVideoProfileInfoKHR* pVideoProfile, VkVideoCapabilitiesKHR* pCapabilities);
typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, uint32_t* pVideoFormatPropertyCount, VkVideoFormatPropertiesKHR* pVideoFormatProperties);
typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionKHR)(VkDevice device, const VkVideoSessionCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionKHR* pVideoSession);
typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionKHR)(VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks* pAllocator);
-typedef VkResult (VKAPI_PTR *PFN_vkGetVideoSessionMemoryRequirementsKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t* pVideoSessionMemoryRequirementsCount, VkVideoGetMemoryPropertiesKHR* pVideoSessionMemoryRequirements);
-typedef VkResult (VKAPI_PTR *PFN_vkBindVideoSessionMemoryKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t videoSessionBindMemoryCount, const VkVideoBindMemoryKHR* pVideoSessionBindMemories);
+typedef VkResult (VKAPI_PTR *PFN_vkGetVideoSessionMemoryRequirementsKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t* pMemoryRequirementsCount, VkVideoSessionMemoryRequirementsKHR* pMemoryRequirements);
+typedef VkResult (VKAPI_PTR *PFN_vkBindVideoSessionMemoryKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t bindSessionMemoryInfoCount, const VkBindVideoSessionMemoryInfoKHR* pBindSessionMemoryInfos);
typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionParametersKHR)(VkDevice device, const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionParametersKHR* pVideoSessionParameters);
typedef VkResult (VKAPI_PTR *PFN_vkUpdateVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo);
typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkAllocationCallbacks* pAllocator);
@@ -250,7 +253,7 @@ typedef void (VKAPI_PTR *PFN_vkCmdControlVideoCodingKHR)(VkCommandBuffer command
#ifndef VK_NO_PROTOTYPES
VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoCapabilitiesKHR(
VkPhysicalDevice physicalDevice,
- const VkVideoProfileKHR* pVideoProfile,
+ const VkVideoProfileInfoKHR* pVideoProfile,
VkVideoCapabilitiesKHR* pCapabilities);
VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoFormatPropertiesKHR(
@@ -273,14 +276,14 @@ VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionKHR(
VKAPI_ATTR VkResult VKAPI_CALL vkGetVideoSessionMemoryRequirementsKHR(
VkDevice device,
VkVideoSessionKHR videoSession,
- uint32_t* pVideoSessionMemoryRequirementsCount,
- VkVideoGetMemoryPropertiesKHR* pVideoSessionMemoryRequirements);
+ uint32_t* pMemoryRequirementsCount,
+ VkVideoSessionMemoryRequirementsKHR* pMemoryRequirements);
VKAPI_ATTR VkResult VKAPI_CALL vkBindVideoSessionMemoryKHR(
VkDevice device,
VkVideoSessionKHR videoSession,
- uint32_t videoSessionBindMemoryCount,
- const VkVideoBindMemoryKHR* pVideoSessionBindMemories);
+ uint32_t bindSessionMemoryInfoCount,
+ const VkBindVideoSessionMemoryInfoKHR* pBindSessionMemoryInfos);
VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionParametersKHR(
VkDevice device,
@@ -313,36 +316,56 @@ VKAPI_ATTR void VKAPI_CALL vkCmdControlVideoCodingKHR(
#define VK_KHR_video_decode_queue 1
-#define VK_KHR_VIDEO_DECODE_QUEUE_SPEC_VERSION 2
+#define VK_KHR_VIDEO_DECODE_QUEUE_SPEC_VERSION 6
#define VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME "VK_KHR_video_decode_queue"
-typedef enum VkVideoDecodeFlagBitsKHR {
- VK_VIDEO_DECODE_DEFAULT_KHR = 0,
- VK_VIDEO_DECODE_RESERVED_0_BIT_KHR = 0x00000001,
- VK_VIDEO_DECODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
-} VkVideoDecodeFlagBitsKHR;
+typedef enum VkVideoDecodeCapabilityFlagBitsKHR {
+ VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_COINCIDE_BIT_KHR = 0x00000001,
+ VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_DISTINCT_BIT_KHR = 0x00000002,
+ VK_VIDEO_DECODE_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoDecodeCapabilityFlagBitsKHR;
+typedef VkFlags VkVideoDecodeCapabilityFlagsKHR;
+
+typedef enum VkVideoDecodeUsageFlagBitsKHR {
+ VK_VIDEO_DECODE_USAGE_DEFAULT_KHR = 0,
+ VK_VIDEO_DECODE_USAGE_TRANSCODING_BIT_KHR = 0x00000001,
+ VK_VIDEO_DECODE_USAGE_OFFLINE_BIT_KHR = 0x00000002,
+ VK_VIDEO_DECODE_USAGE_STREAMING_BIT_KHR = 0x00000004,
+ VK_VIDEO_DECODE_USAGE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoDecodeUsageFlagBitsKHR;
+typedef VkFlags VkVideoDecodeUsageFlagsKHR;
typedef VkFlags VkVideoDecodeFlagsKHR;
+typedef struct VkVideoDecodeCapabilitiesKHR {
+ VkStructureType sType;
+ void* pNext;
+ VkVideoDecodeCapabilityFlagsKHR flags;
+} VkVideoDecodeCapabilitiesKHR;
+
+typedef struct VkVideoDecodeUsageInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkVideoDecodeUsageFlagsKHR videoUsageHints;
+} VkVideoDecodeUsageInfoKHR;
+
typedef struct VkVideoDecodeInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkVideoDecodeFlagsKHR flags;
- VkOffset2D codedOffset;
- VkExtent2D codedExtent;
- VkBuffer srcBuffer;
- VkDeviceSize srcBufferOffset;
- VkDeviceSize srcBufferRange;
- VkVideoPictureResourceKHR dstPictureResource;
- const VkVideoReferenceSlotKHR* pSetupReferenceSlot;
- uint32_t referenceSlotCount;
- const VkVideoReferenceSlotKHR* pReferenceSlots;
+ VkStructureType sType;
+ const void* pNext;
+ VkVideoDecodeFlagsKHR flags;
+ VkBuffer srcBuffer;
+ VkDeviceSize srcBufferOffset;
+ VkDeviceSize srcBufferRange;
+ VkVideoPictureResourceInfoKHR dstPictureResource;
+ const VkVideoReferenceSlotInfoKHR* pSetupReferenceSlot;
+ uint32_t referenceSlotCount;
+ const VkVideoReferenceSlotInfoKHR* pReferenceSlots;
} VkVideoDecodeInfoKHR;
-typedef void (VKAPI_PTR *PFN_vkCmdDecodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR* pFrameInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdDecodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR* pDecodeInfo);
#ifndef VK_NO_PROTOTYPES
VKAPI_ATTR void VKAPI_CALL vkCmdDecodeVideoKHR(
VkCommandBuffer commandBuffer,
- const VkVideoDecodeInfoKHR* pFrameInfo);
+ const VkVideoDecodeInfoKHR* pDecodeInfo);
#endif
@@ -378,22 +401,24 @@ typedef struct VkPhysicalDevicePortabilitySubsetPropertiesKHR {
#define VK_KHR_video_encode_queue 1
-#define VK_KHR_VIDEO_ENCODE_QUEUE_SPEC_VERSION 3
+#define VK_KHR_VIDEO_ENCODE_QUEUE_SPEC_VERSION 7
#define VK_KHR_VIDEO_ENCODE_QUEUE_EXTENSION_NAME "VK_KHR_video_encode_queue"
-typedef enum VkVideoEncodeFlagBitsKHR {
- VK_VIDEO_ENCODE_DEFAULT_KHR = 0,
- VK_VIDEO_ENCODE_RESERVED_0_BIT_KHR = 0x00000001,
- VK_VIDEO_ENCODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
-} VkVideoEncodeFlagBitsKHR;
+typedef enum VkVideoEncodeTuningModeKHR {
+ VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR = 0,
+ VK_VIDEO_ENCODE_TUNING_MODE_HIGH_QUALITY_KHR = 1,
+ VK_VIDEO_ENCODE_TUNING_MODE_LOW_LATENCY_KHR = 2,
+ VK_VIDEO_ENCODE_TUNING_MODE_ULTRA_LOW_LATENCY_KHR = 3,
+ VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR = 4,
+ VK_VIDEO_ENCODE_TUNING_MODE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoEncodeTuningModeKHR;
typedef VkFlags VkVideoEncodeFlagsKHR;
-typedef enum VkVideoEncodeRateControlFlagBitsKHR {
- VK_VIDEO_ENCODE_RATE_CONTROL_DEFAULT_KHR = 0,
- VK_VIDEO_ENCODE_RATE_CONTROL_RESERVED_0_BIT_KHR = 0x00000001,
- VK_VIDEO_ENCODE_RATE_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
-} VkVideoEncodeRateControlFlagBitsKHR;
-typedef VkFlags VkVideoEncodeRateControlFlagsKHR;
+typedef enum VkVideoEncodeCapabilityFlagBitsKHR {
+ VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR = 0x00000001,
+ VK_VIDEO_ENCODE_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoEncodeCapabilityFlagBitsKHR;
+typedef VkFlags VkVideoEncodeCapabilityFlagsKHR;
typedef enum VkVideoEncodeRateControlModeFlagBitsKHR {
VK_VIDEO_ENCODE_RATE_CONTROL_MODE_NONE_BIT_KHR = 0,
@@ -402,22 +427,59 @@ typedef enum VkVideoEncodeRateControlModeFlagBitsKHR {
VK_VIDEO_ENCODE_RATE_CONTROL_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
} VkVideoEncodeRateControlModeFlagBitsKHR;
typedef VkFlags VkVideoEncodeRateControlModeFlagsKHR;
+
+typedef enum VkVideoEncodeUsageFlagBitsKHR {
+ VK_VIDEO_ENCODE_USAGE_DEFAULT_KHR = 0,
+ VK_VIDEO_ENCODE_USAGE_TRANSCODING_BIT_KHR = 0x00000001,
+ VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR = 0x00000002,
+ VK_VIDEO_ENCODE_USAGE_RECORDING_BIT_KHR = 0x00000004,
+ VK_VIDEO_ENCODE_USAGE_CONFERENCING_BIT_KHR = 0x00000008,
+ VK_VIDEO_ENCODE_USAGE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoEncodeUsageFlagBitsKHR;
+typedef VkFlags VkVideoEncodeUsageFlagsKHR;
+
+typedef enum VkVideoEncodeContentFlagBitsKHR {
+ VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR = 0,
+ VK_VIDEO_ENCODE_CONTENT_CAMERA_BIT_KHR = 0x00000001,
+ VK_VIDEO_ENCODE_CONTENT_DESKTOP_BIT_KHR = 0x00000002,
+ VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR = 0x00000004,
+ VK_VIDEO_ENCODE_CONTENT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoEncodeContentFlagBitsKHR;
+typedef VkFlags VkVideoEncodeContentFlagsKHR;
+typedef VkFlags VkVideoEncodeRateControlFlagsKHR;
typedef struct VkVideoEncodeInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkVideoEncodeFlagsKHR flags;
- uint32_t qualityLevel;
- VkExtent2D codedExtent;
- VkBuffer dstBitstreamBuffer;
- VkDeviceSize dstBitstreamBufferOffset;
- VkDeviceSize dstBitstreamBufferMaxRange;
- VkVideoPictureResourceKHR srcPictureResource;
- const VkVideoReferenceSlotKHR* pSetupReferenceSlot;
- uint32_t referenceSlotCount;
- const VkVideoReferenceSlotKHR* pReferenceSlots;
- uint32_t precedingExternallyEncodedBytes;
+ VkStructureType sType;
+ const void* pNext;
+ VkVideoEncodeFlagsKHR flags;
+ uint32_t qualityLevel;
+ VkBuffer dstBitstreamBuffer;
+ VkDeviceSize dstBitstreamBufferOffset;
+ VkDeviceSize dstBitstreamBufferMaxRange;
+ VkVideoPictureResourceInfoKHR srcPictureResource;
+ const VkVideoReferenceSlotInfoKHR* pSetupReferenceSlot;
+ uint32_t referenceSlotCount;
+ const VkVideoReferenceSlotInfoKHR* pReferenceSlots;
+ uint32_t precedingExternallyEncodedBytes;
} VkVideoEncodeInfoKHR;
+typedef struct VkVideoEncodeCapabilitiesKHR {
+ VkStructureType sType;
+ void* pNext;
+ VkVideoEncodeCapabilityFlagsKHR flags;
+ VkVideoEncodeRateControlModeFlagsKHR rateControlModes;
+ uint8_t rateControlLayerCount;
+ uint8_t qualityLevelCount;
+ VkExtent2D inputImageDataFillAlignment;
+} VkVideoEncodeCapabilitiesKHR;
+
+typedef struct VkVideoEncodeUsageInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkVideoEncodeUsageFlagsKHR videoUsageHints;
+ VkVideoEncodeContentFlagsKHR videoContentHints;
+ VkVideoEncodeTuningModeKHR tuningMode;
+} VkVideoEncodeUsageInfoKHR;
+
typedef struct VkVideoEncodeRateControlLayerInfoKHR {
VkStructureType sType;
const void* pNext;
@@ -450,21 +512,42 @@ VKAPI_ATTR void VKAPI_CALL vkCmdEncodeVideoKHR(
#define VK_EXT_video_encode_h264 1
#include "vk_video/vulkan_video_codec_h264std.h"
#include "vk_video/vulkan_video_codec_h264std_encode.h"
-#define VK_EXT_VIDEO_ENCODE_H264_SPEC_VERSION 3
+#define VK_EXT_VIDEO_ENCODE_H264_SPEC_VERSION 9
#define VK_EXT_VIDEO_ENCODE_H264_EXTENSION_NAME "VK_EXT_video_encode_h264"
+typedef enum VkVideoEncodeH264RateControlStructureEXT {
+ VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT = 0,
+ VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_FLAT_EXT = 1,
+ VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_DYADIC_EXT = 2,
+ VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH264RateControlStructureEXT;
+
typedef enum VkVideoEncodeH264CapabilityFlagBitsEXT {
- VK_VIDEO_ENCODE_H264_CAPABILITY_CABAC_BIT_EXT = 0x00000001,
- VK_VIDEO_ENCODE_H264_CAPABILITY_CAVLC_BIT_EXT = 0x00000002,
- VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BI_PRED_IMPLICIT_BIT_EXT = 0x00000004,
- VK_VIDEO_ENCODE_H264_CAPABILITY_TRANSFORM_8X8_BIT_EXT = 0x00000008,
- VK_VIDEO_ENCODE_H264_CAPABILITY_CHROMA_QP_OFFSET_BIT_EXT = 0x00000010,
- VK_VIDEO_ENCODE_H264_CAPABILITY_SECOND_CHROMA_QP_OFFSET_BIT_EXT = 0x00000020,
- VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_DISABLED_BIT_EXT = 0x00000040,
- VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_ENABLED_BIT_EXT = 0x00000080,
- VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_PARTIAL_BIT_EXT = 0x00000100,
- VK_VIDEO_ENCODE_H264_CAPABILITY_MULTIPLE_SLICE_PER_FRAME_BIT_EXT = 0x00000200,
- VK_VIDEO_ENCODE_H264_CAPABILITY_EVENLY_DISTRIBUTED_SLICE_SIZE_BIT_EXT = 0x00000400,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_DIRECT_8X8_INFERENCE_ENABLED_BIT_EXT = 0x00000001,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_DIRECT_8X8_INFERENCE_DISABLED_BIT_EXT = 0x00000002,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_SEPARATE_COLOUR_PLANE_BIT_EXT = 0x00000004,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_BIT_EXT = 0x00000008,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_SCALING_LISTS_BIT_EXT = 0x00000010,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_EXT = 0x00000020,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_CHROMA_QP_OFFSET_BIT_EXT = 0x00000040,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_SECOND_CHROMA_QP_OFFSET_BIT_EXT = 0x00000080,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_PIC_INIT_QP_MINUS26_BIT_EXT = 0x00000100,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_PRED_BIT_EXT = 0x00000200,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BIPRED_EXPLICIT_BIT_EXT = 0x00000400,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BIPRED_IMPLICIT_BIT_EXT = 0x00000800,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_PRED_NO_TABLE_BIT_EXT = 0x00001000,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_TRANSFORM_8X8_BIT_EXT = 0x00002000,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_CABAC_BIT_EXT = 0x00004000,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_CAVLC_BIT_EXT = 0x00008000,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_DISABLED_BIT_EXT = 0x00010000,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_ENABLED_BIT_EXT = 0x00020000,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_PARTIAL_BIT_EXT = 0x00040000,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_DISABLE_DIRECT_SPATIAL_MV_PRED_BIT_EXT = 0x00080000,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_MULTIPLE_SLICE_PER_FRAME_BIT_EXT = 0x00100000,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_SLICE_MB_COUNT_BIT_EXT = 0x00200000,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_EXT = 0x00400000,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_EXT = 0x00800000,
+ VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_EXT = 0x01000000,
VK_VIDEO_ENCODE_H264_CAPABILITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
} VkVideoEncodeH264CapabilityFlagBitsEXT;
typedef VkFlags VkVideoEncodeH264CapabilityFlagsEXT;
@@ -484,115 +567,96 @@ typedef enum VkVideoEncodeH264OutputModeFlagBitsEXT {
VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
} VkVideoEncodeH264OutputModeFlagBitsEXT;
typedef VkFlags VkVideoEncodeH264OutputModeFlagsEXT;
-
-typedef enum VkVideoEncodeH264CreateFlagBitsEXT {
- VK_VIDEO_ENCODE_H264_CREATE_DEFAULT_EXT = 0,
- VK_VIDEO_ENCODE_H264_CREATE_RESERVED_0_BIT_EXT = 0x00000001,
- VK_VIDEO_ENCODE_H264_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
-} VkVideoEncodeH264CreateFlagBitsEXT;
-typedef VkFlags VkVideoEncodeH264CreateFlagsEXT;
-
-typedef enum VkVideoEncodeH264RateControlStructureFlagBitsEXT {
- VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT = 0,
- VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_FLAT_BIT_EXT = 0x00000001,
- VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_DYADIC_BIT_EXT = 0x00000002,
- VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
-} VkVideoEncodeH264RateControlStructureFlagBitsEXT;
-typedef VkFlags VkVideoEncodeH264RateControlStructureFlagsEXT;
typedef struct VkVideoEncodeH264CapabilitiesEXT {
VkStructureType sType;
- const void* pNext;
+ void* pNext;
VkVideoEncodeH264CapabilityFlagsEXT flags;
VkVideoEncodeH264InputModeFlagsEXT inputModeFlags;
VkVideoEncodeH264OutputModeFlagsEXT outputModeFlags;
- VkExtent2D minPictureSizeInMbs;
- VkExtent2D maxPictureSizeInMbs;
- VkExtent2D inputImageDataAlignment;
- uint8_t maxNumL0ReferenceForP;
- uint8_t maxNumL0ReferenceForB;
- uint8_t maxNumL1Reference;
- uint8_t qualityLevelCount;
- VkExtensionProperties stdExtensionVersion;
+ uint8_t maxPPictureL0ReferenceCount;
+ uint8_t maxBPictureL0ReferenceCount;
+ uint8_t maxL1ReferenceCount;
+ VkBool32 motionVectorsOverPicBoundariesFlag;
+ uint32_t maxBytesPerPicDenom;
+ uint32_t maxBitsPerMbDenom;
+ uint32_t log2MaxMvLengthHorizontal;
+ uint32_t log2MaxMvLengthVertical;
} VkVideoEncodeH264CapabilitiesEXT;
-typedef struct VkVideoEncodeH264SessionCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkVideoEncodeH264CreateFlagsEXT flags;
- VkExtent2D maxPictureSizeInMbs;
- const VkExtensionProperties* pStdExtensionVersion;
-} VkVideoEncodeH264SessionCreateInfoEXT;
-
typedef struct VkVideoEncodeH264SessionParametersAddInfoEXT {
VkStructureType sType;
const void* pNext;
- uint32_t spsStdCount;
- const StdVideoH264SequenceParameterSet* pSpsStd;
- uint32_t ppsStdCount;
- const StdVideoH264PictureParameterSet* pPpsStd;
+ uint32_t stdSPSCount;
+ const StdVideoH264SequenceParameterSet* pStdSPSs;
+ uint32_t stdPPSCount;
+ const StdVideoH264PictureParameterSet* pStdPPSs;
} VkVideoEncodeH264SessionParametersAddInfoEXT;
typedef struct VkVideoEncodeH264SessionParametersCreateInfoEXT {
VkStructureType sType;
const void* pNext;
- uint32_t maxSpsStdCount;
- uint32_t maxPpsStdCount;
+ uint32_t maxStdSPSCount;
+ uint32_t maxStdPPSCount;
const VkVideoEncodeH264SessionParametersAddInfoEXT* pParametersAddInfo;
} VkVideoEncodeH264SessionParametersCreateInfoEXT;
typedef struct VkVideoEncodeH264DpbSlotInfoEXT {
- VkStructureType sType;
- const void* pNext;
- int8_t slotIndex;
- const StdVideoEncodeH264PictureInfo* pStdPictureInfo;
-} VkVideoEncodeH264DpbSlotInfoEXT;
-
-typedef struct VkVideoEncodeH264NaluSliceEXT {
VkStructureType sType;
const void* pNext;
- const StdVideoEncodeH264SliceHeader* pSliceHeaderStd;
- uint32_t mbCount;
- uint8_t refFinalList0EntryCount;
- const VkVideoEncodeH264DpbSlotInfoEXT* pRefFinalList0Entries;
- uint8_t refFinalList1EntryCount;
- const VkVideoEncodeH264DpbSlotInfoEXT* pRefFinalList1Entries;
-} VkVideoEncodeH264NaluSliceEXT;
+ int8_t slotIndex;
+ const StdVideoEncodeH264ReferenceInfo* pStdReferenceInfo;
+} VkVideoEncodeH264DpbSlotInfoEXT;
+
+typedef struct VkVideoEncodeH264ReferenceListsInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ uint8_t referenceList0EntryCount;
+ const VkVideoEncodeH264DpbSlotInfoEXT* pReferenceList0Entries;
+ uint8_t referenceList1EntryCount;
+ const VkVideoEncodeH264DpbSlotInfoEXT* pReferenceList1Entries;
+ const StdVideoEncodeH264RefMemMgmtCtrlOperations* pMemMgmtCtrlOperations;
+} VkVideoEncodeH264ReferenceListsInfoEXT;
+
+typedef struct VkVideoEncodeH264NaluSliceInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t mbCount;
+ const VkVideoEncodeH264ReferenceListsInfoEXT* pReferenceFinalLists;
+ const StdVideoEncodeH264SliceHeader* pSliceHeaderStd;
+} VkVideoEncodeH264NaluSliceInfoEXT;
typedef struct VkVideoEncodeH264VclFrameInfoEXT {
- VkStructureType sType;
- const void* pNext;
- uint8_t refDefaultFinalList0EntryCount;
- const VkVideoEncodeH264DpbSlotInfoEXT* pRefDefaultFinalList0Entries;
- uint8_t refDefaultFinalList1EntryCount;
- const VkVideoEncodeH264DpbSlotInfoEXT* pRefDefaultFinalList1Entries;
- uint32_t naluSliceEntryCount;
- const VkVideoEncodeH264NaluSliceEXT* pNaluSliceEntries;
- const VkVideoEncodeH264DpbSlotInfoEXT* pCurrentPictureInfo;
+ VkStructureType sType;
+ const void* pNext;
+ const VkVideoEncodeH264ReferenceListsInfoEXT* pReferenceFinalLists;
+ uint32_t naluSliceEntryCount;
+ const VkVideoEncodeH264NaluSliceInfoEXT* pNaluSliceEntries;
+ const StdVideoEncodeH264PictureInfo* pCurrentPictureInfo;
} VkVideoEncodeH264VclFrameInfoEXT;
-typedef struct VkVideoEncodeH264EmitPictureParametersEXT {
+typedef struct VkVideoEncodeH264EmitPictureParametersInfoEXT {
VkStructureType sType;
const void* pNext;
uint8_t spsId;
VkBool32 emitSpsEnable;
uint32_t ppsIdEntryCount;
const uint8_t* ppsIdEntries;
-} VkVideoEncodeH264EmitPictureParametersEXT;
+} VkVideoEncodeH264EmitPictureParametersInfoEXT;
-typedef struct VkVideoEncodeH264ProfileEXT {
+typedef struct VkVideoEncodeH264ProfileInfoEXT {
VkStructureType sType;
const void* pNext;
StdVideoH264ProfileIdc stdProfileIdc;
-} VkVideoEncodeH264ProfileEXT;
+} VkVideoEncodeH264ProfileInfoEXT;
typedef struct VkVideoEncodeH264RateControlInfoEXT {
- VkStructureType sType;
- const void* pNext;
- uint32_t gopFrameCount;
- uint32_t idrPeriod;
- uint32_t consecutiveBFrameCount;
- VkVideoEncodeH264RateControlStructureFlagBitsEXT rateControlStructure;
- uint8_t temporalLayerCount;
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t gopFrameCount;
+ uint32_t idrPeriod;
+ uint32_t consecutiveBFrameCount;
+ VkVideoEncodeH264RateControlStructureEXT rateControlStructure;
+ uint8_t temporalLayerCount;
} VkVideoEncodeH264RateControlInfoEXT;
typedef struct VkVideoEncodeH264QpEXT {
@@ -626,8 +690,45 @@ typedef struct VkVideoEncodeH264RateControlLayerInfoEXT {
#define VK_EXT_video_encode_h265 1
#include "vk_video/vulkan_video_codec_h265std.h"
#include "vk_video/vulkan_video_codec_h265std_encode.h"
-#define VK_EXT_VIDEO_ENCODE_H265_SPEC_VERSION 4
+#define VK_EXT_VIDEO_ENCODE_H265_SPEC_VERSION 9
#define VK_EXT_VIDEO_ENCODE_H265_EXTENSION_NAME "VK_EXT_video_encode_h265"
+
+typedef enum VkVideoEncodeH265RateControlStructureEXT {
+ VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT = 0,
+ VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_FLAT_EXT = 1,
+ VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_DYADIC_EXT = 2,
+ VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH265RateControlStructureEXT;
+
+typedef enum VkVideoEncodeH265CapabilityFlagBitsEXT {
+ VK_VIDEO_ENCODE_H265_CAPABILITY_SEPARATE_COLOUR_PLANE_BIT_EXT = 0x00000001,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_SCALING_LISTS_BIT_EXT = 0x00000002,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_SAMPLE_ADAPTIVE_OFFSET_ENABLED_BIT_EXT = 0x00000004,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_PCM_ENABLE_BIT_EXT = 0x00000008,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_SPS_TEMPORAL_MVP_ENABLED_BIT_EXT = 0x00000010,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_EXT = 0x00000020,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_INIT_QP_MINUS26_BIT_EXT = 0x00000040,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_EXT = 0x00000080,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_SIGN_DATA_HIDING_ENABLED_BIT_EXT = 0x00000100,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSFORM_SKIP_ENABLED_BIT_EXT = 0x00000200,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSFORM_SKIP_DISABLED_BIT_EXT = 0x00000400,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_BIT_EXT = 0x00000800,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_PRED_BIT_EXT = 0x00001000,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_BIPRED_BIT_EXT = 0x00002000,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_PRED_NO_TABLE_BIT_EXT = 0x00004000,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSQUANT_BYPASS_ENABLED_BIT_EXT = 0x00008000,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_ENTROPY_CODING_SYNC_ENABLED_BIT_EXT = 0x00010000,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_DEBLOCKING_FILTER_OVERRIDE_ENABLED_BIT_EXT = 0x00020000,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILE_PER_FRAME_BIT_EXT = 0x00040000,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_PER_TILE_BIT_EXT = 0x00080000,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILE_PER_SLICE_BIT_EXT = 0x00100000,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_SLICE_SEGMENT_CTB_COUNT_BIT_EXT = 0x00200000,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_EXT = 0x00400000,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_DEPENDENT_SLICE_SEGMENT_BIT_EXT = 0x00800000,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_EXT = 0x01000000,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_EXT = 0x02000000,
+ VK_VIDEO_ENCODE_H265_CAPABILITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH265CapabilityFlagBitsEXT;
typedef VkFlags VkVideoEncodeH265CapabilityFlagsEXT;
typedef enum VkVideoEncodeH265InputModeFlagBitsEXT {
@@ -645,64 +746,65 @@ typedef enum VkVideoEncodeH265OutputModeFlagBitsEXT {
VK_VIDEO_ENCODE_H265_OUTPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
} VkVideoEncodeH265OutputModeFlagBitsEXT;
typedef VkFlags VkVideoEncodeH265OutputModeFlagsEXT;
-typedef VkFlags VkVideoEncodeH265CreateFlagsEXT;
typedef enum VkVideoEncodeH265CtbSizeFlagBitsEXT {
- VK_VIDEO_ENCODE_H265_CTB_SIZE_8_BIT_EXT = 0x00000001,
- VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_EXT = 0x00000002,
- VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_EXT = 0x00000004,
- VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_EXT = 0x00000008,
+ VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_EXT = 0x00000001,
+ VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_EXT = 0x00000002,
+ VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_EXT = 0x00000004,
VK_VIDEO_ENCODE_H265_CTB_SIZE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
} VkVideoEncodeH265CtbSizeFlagBitsEXT;
typedef VkFlags VkVideoEncodeH265CtbSizeFlagsEXT;
-typedef enum VkVideoEncodeH265RateControlStructureFlagBitsEXT {
- VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT = 0,
- VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_FLAT_BIT_EXT = 0x00000001,
- VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_DYADIC_BIT_EXT = 0x00000002,
- VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
-} VkVideoEncodeH265RateControlStructureFlagBitsEXT;
-typedef VkFlags VkVideoEncodeH265RateControlStructureFlagsEXT;
+typedef enum VkVideoEncodeH265TransformBlockSizeFlagBitsEXT {
+ VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_EXT = 0x00000001,
+ VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_EXT = 0x00000002,
+ VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_EXT = 0x00000004,
+ VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_EXT = 0x00000008,
+ VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH265TransformBlockSizeFlagBitsEXT;
+typedef VkFlags VkVideoEncodeH265TransformBlockSizeFlagsEXT;
typedef struct VkVideoEncodeH265CapabilitiesEXT {
- VkStructureType sType;
- const void* pNext;
- VkVideoEncodeH265CapabilityFlagsEXT flags;
- VkVideoEncodeH265InputModeFlagsEXT inputModeFlags;
- VkVideoEncodeH265OutputModeFlagsEXT outputModeFlags;
- VkVideoEncodeH265CtbSizeFlagsEXT ctbSizes;
- VkExtent2D inputImageDataAlignment;
- uint8_t maxNumL0ReferenceForP;
- uint8_t maxNumL0ReferenceForB;
- uint8_t maxNumL1Reference;
- uint8_t maxNumSubLayers;
- uint8_t qualityLevelCount;
- VkExtensionProperties stdExtensionVersion;
+ VkStructureType sType;
+ void* pNext;
+ VkVideoEncodeH265CapabilityFlagsEXT flags;
+ VkVideoEncodeH265InputModeFlagsEXT inputModeFlags;
+ VkVideoEncodeH265OutputModeFlagsEXT outputModeFlags;
+ VkVideoEncodeH265CtbSizeFlagsEXT ctbSizes;
+ VkVideoEncodeH265TransformBlockSizeFlagsEXT transformBlockSizes;
+ uint8_t maxPPictureL0ReferenceCount;
+ uint8_t maxBPictureL0ReferenceCount;
+ uint8_t maxL1ReferenceCount;
+ uint8_t maxSubLayersCount;
+ uint8_t minLog2MinLumaCodingBlockSizeMinus3;
+ uint8_t maxLog2MinLumaCodingBlockSizeMinus3;
+ uint8_t minLog2MinLumaTransformBlockSizeMinus2;
+ uint8_t maxLog2MinLumaTransformBlockSizeMinus2;
+ uint8_t minMaxTransformHierarchyDepthInter;
+ uint8_t maxMaxTransformHierarchyDepthInter;
+ uint8_t minMaxTransformHierarchyDepthIntra;
+ uint8_t maxMaxTransformHierarchyDepthIntra;
+ uint8_t maxDiffCuQpDeltaDepth;
+ uint8_t minMaxNumMergeCand;
+ uint8_t maxMaxNumMergeCand;
} VkVideoEncodeH265CapabilitiesEXT;
-typedef struct VkVideoEncodeH265SessionCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkVideoEncodeH265CreateFlagsEXT flags;
- const VkExtensionProperties* pStdExtensionVersion;
-} VkVideoEncodeH265SessionCreateInfoEXT;
-
typedef struct VkVideoEncodeH265SessionParametersAddInfoEXT {
VkStructureType sType;
const void* pNext;
- uint32_t vpsStdCount;
- const StdVideoH265VideoParameterSet* pVpsStd;
- uint32_t spsStdCount;
- const StdVideoH265SequenceParameterSet* pSpsStd;
- uint32_t ppsStdCount;
- const StdVideoH265PictureParameterSet* pPpsStd;
+ uint32_t stdVPSCount;
+ const StdVideoH265VideoParameterSet* pStdVPSs;
+ uint32_t stdSPSCount;
+ const StdVideoH265SequenceParameterSet* pStdSPSs;
+ uint32_t stdPPSCount;
+ const StdVideoH265PictureParameterSet* pStdPPSs;
} VkVideoEncodeH265SessionParametersAddInfoEXT;
typedef struct VkVideoEncodeH265SessionParametersCreateInfoEXT {
VkStructureType sType;
const void* pNext;
- uint32_t maxVpsStdCount;
- uint32_t maxSpsStdCount;
- uint32_t maxPpsStdCount;
+ uint32_t maxStdVPSCount;
+ uint32_t maxStdSPSCount;
+ uint32_t maxStdPPSCount;
const VkVideoEncodeH265SessionParametersAddInfoEXT* pParametersAddInfo;
} VkVideoEncodeH265SessionParametersCreateInfoEXT;
@@ -713,7 +815,7 @@ typedef struct VkVideoEncodeH265DpbSlotInfoEXT {
const StdVideoEncodeH265ReferenceInfo* pStdReferenceInfo;
} VkVideoEncodeH265DpbSlotInfoEXT;
-typedef struct VkVideoEncodeH265ReferenceListsEXT {
+typedef struct VkVideoEncodeH265ReferenceListsInfoEXT {
VkStructureType sType;
const void* pNext;
uint8_t referenceList0EntryCount;
@@ -721,26 +823,26 @@ typedef struct VkVideoEncodeH265ReferenceListsEXT {
uint8_t referenceList1EntryCount;
const VkVideoEncodeH265DpbSlotInfoEXT* pReferenceList1Entries;
const StdVideoEncodeH265ReferenceModifications* pReferenceModifications;
-} VkVideoEncodeH265ReferenceListsEXT;
+} VkVideoEncodeH265ReferenceListsInfoEXT;
-typedef struct VkVideoEncodeH265NaluSliceSegmentEXT {
- VkStructureType sType;
- const void* pNext;
- uint32_t ctbCount;
- const VkVideoEncodeH265ReferenceListsEXT* pReferenceFinalLists;
- const StdVideoEncodeH265SliceSegmentHeader* pSliceSegmentHeaderStd;
-} VkVideoEncodeH265NaluSliceSegmentEXT;
+typedef struct VkVideoEncodeH265NaluSliceSegmentInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t ctbCount;
+ const VkVideoEncodeH265ReferenceListsInfoEXT* pReferenceFinalLists;
+ const StdVideoEncodeH265SliceSegmentHeader* pSliceSegmentHeaderStd;
+} VkVideoEncodeH265NaluSliceSegmentInfoEXT;
typedef struct VkVideoEncodeH265VclFrameInfoEXT {
- VkStructureType sType;
- const void* pNext;
- const VkVideoEncodeH265ReferenceListsEXT* pReferenceFinalLists;
- uint32_t naluSliceSegmentEntryCount;
- const VkVideoEncodeH265NaluSliceSegmentEXT* pNaluSliceSegmentEntries;
- const StdVideoEncodeH265PictureInfo* pCurrentPictureInfo;
+ VkStructureType sType;
+ const void* pNext;
+ const VkVideoEncodeH265ReferenceListsInfoEXT* pReferenceFinalLists;
+ uint32_t naluSliceSegmentEntryCount;
+ const VkVideoEncodeH265NaluSliceSegmentInfoEXT* pNaluSliceSegmentEntries;
+ const StdVideoEncodeH265PictureInfo* pCurrentPictureInfo;
} VkVideoEncodeH265VclFrameInfoEXT;
-typedef struct VkVideoEncodeH265EmitPictureParametersEXT {
+typedef struct VkVideoEncodeH265EmitPictureParametersInfoEXT {
VkStructureType sType;
const void* pNext;
uint8_t vpsId;
@@ -749,22 +851,22 @@ typedef struct VkVideoEncodeH265EmitPictureParametersEXT {
VkBool32 emitSpsEnable;
uint32_t ppsIdEntryCount;
const uint8_t* ppsIdEntries;
-} VkVideoEncodeH265EmitPictureParametersEXT;
+} VkVideoEncodeH265EmitPictureParametersInfoEXT;
-typedef struct VkVideoEncodeH265ProfileEXT {
+typedef struct VkVideoEncodeH265ProfileInfoEXT {
VkStructureType sType;
const void* pNext;
StdVideoH265ProfileIdc stdProfileIdc;
-} VkVideoEncodeH265ProfileEXT;
+} VkVideoEncodeH265ProfileInfoEXT;
typedef struct VkVideoEncodeH265RateControlInfoEXT {
- VkStructureType sType;
- const void* pNext;
- uint32_t gopFrameCount;
- uint32_t idrPeriod;
- uint32_t consecutiveBFrameCount;
- VkVideoEncodeH265RateControlStructureFlagBitsEXT rateControlStructure;
- uint8_t subLayerCount;
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t gopFrameCount;
+ uint32_t idrPeriod;
+ uint32_t consecutiveBFrameCount;
+ VkVideoEncodeH265RateControlStructureEXT rateControlStructure;
+ uint8_t subLayerCount;
} VkVideoEncodeH265RateControlInfoEXT;
typedef struct VkVideoEncodeH265QpEXT {
@@ -797,7 +899,7 @@ typedef struct VkVideoEncodeH265RateControlLayerInfoEXT {
#define VK_EXT_video_decode_h264 1
#include "vk_video/vulkan_video_codec_h264std_decode.h"
-#define VK_EXT_VIDEO_DECODE_H264_SPEC_VERSION 3
+#define VK_EXT_VIDEO_DECODE_H264_SPEC_VERSION 7
#define VK_EXT_VIDEO_DECODE_H264_EXTENSION_NAME "VK_EXT_video_decode_h264"
typedef enum VkVideoDecodeH264PictureLayoutFlagBitsEXT {
@@ -807,43 +909,34 @@ typedef enum VkVideoDecodeH264PictureLayoutFlagBitsEXT {
VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
} VkVideoDecodeH264PictureLayoutFlagBitsEXT;
typedef VkFlags VkVideoDecodeH264PictureLayoutFlagsEXT;
-typedef VkFlags VkVideoDecodeH264CreateFlagsEXT;
-typedef struct VkVideoDecodeH264ProfileEXT {
- VkStructureType sType;
- const void* pNext;
- StdVideoH264ProfileIdc stdProfileIdc;
- VkVideoDecodeH264PictureLayoutFlagsEXT pictureLayout;
-} VkVideoDecodeH264ProfileEXT;
+typedef struct VkVideoDecodeH264ProfileInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ StdVideoH264ProfileIdc stdProfileIdc;
+ VkVideoDecodeH264PictureLayoutFlagBitsEXT pictureLayout;
+} VkVideoDecodeH264ProfileInfoEXT;
typedef struct VkVideoDecodeH264CapabilitiesEXT {
- VkStructureType sType;
- void* pNext;
- uint32_t maxLevel;
- VkOffset2D fieldOffsetGranularity;
- VkExtensionProperties stdExtensionVersion;
+ VkStructureType sType;
+ void* pNext;
+ StdVideoH264LevelIdc maxLevelIdc;
+ VkOffset2D fieldOffsetGranularity;
} VkVideoDecodeH264CapabilitiesEXT;
-typedef struct VkVideoDecodeH264SessionCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkVideoDecodeH264CreateFlagsEXT flags;
- const VkExtensionProperties* pStdExtensionVersion;
-} VkVideoDecodeH264SessionCreateInfoEXT;
-
typedef struct VkVideoDecodeH264SessionParametersAddInfoEXT {
VkStructureType sType;
const void* pNext;
- uint32_t spsStdCount;
- const StdVideoH264SequenceParameterSet* pSpsStd;
- uint32_t ppsStdCount;
- const StdVideoH264PictureParameterSet* pPpsStd;
+ uint32_t stdSPSCount;
+ const StdVideoH264SequenceParameterSet* pStdSPSs;
+ uint32_t stdPPSCount;
+ const StdVideoH264PictureParameterSet* pStdPPSs;
} VkVideoDecodeH264SessionParametersAddInfoEXT;
typedef struct VkVideoDecodeH264SessionParametersCreateInfoEXT {
VkStructureType sType;
const void* pNext;
- uint32_t maxSpsStdCount;
- uint32_t maxPpsStdCount;
+ uint32_t maxStdSPSCount;
+ uint32_t maxStdPPSCount;
const VkVideoDecodeH264SessionParametersAddInfoEXT* pParametersAddInfo;
} VkVideoDecodeH264SessionParametersCreateInfoEXT;
@@ -851,16 +944,10 @@ typedef struct VkVideoDecodeH264PictureInfoEXT {
VkStructureType sType;
const void* pNext;
const StdVideoDecodeH264PictureInfo* pStdPictureInfo;
- uint32_t slicesCount;
- const uint32_t* pSlicesDataOffsets;
+ uint32_t sliceCount;
+ const uint32_t* pSliceOffsets;
} VkVideoDecodeH264PictureInfoEXT;
-typedef struct VkVideoDecodeH264MvcEXT {
- VkStructureType sType;
- const void* pNext;
- const StdVideoDecodeH264Mvc* pStdMvc;
-} VkVideoDecodeH264MvcEXT;
-
typedef struct VkVideoDecodeH264DpbSlotInfoEXT {
VkStructureType sType;
const void* pNext;
@@ -871,43 +958,37 @@ typedef struct VkVideoDecodeH264DpbSlotInfoEXT {
#define VK_EXT_video_decode_h265 1
#include "vk_video/vulkan_video_codec_h265std_decode.h"
-#define VK_EXT_VIDEO_DECODE_H265_SPEC_VERSION 1
+#define VK_EXT_VIDEO_DECODE_H265_SPEC_VERSION 5
#define VK_EXT_VIDEO_DECODE_H265_EXTENSION_NAME "VK_EXT_video_decode_h265"
-typedef VkFlags VkVideoDecodeH265CreateFlagsEXT;
-typedef struct VkVideoDecodeH265ProfileEXT {
+typedef struct VkVideoDecodeH265ProfileInfoEXT {
VkStructureType sType;
const void* pNext;
StdVideoH265ProfileIdc stdProfileIdc;
-} VkVideoDecodeH265ProfileEXT;
+} VkVideoDecodeH265ProfileInfoEXT;
typedef struct VkVideoDecodeH265CapabilitiesEXT {
- VkStructureType sType;
- void* pNext;
- uint32_t maxLevel;
- VkExtensionProperties stdExtensionVersion;
+ VkStructureType sType;
+ void* pNext;
+ StdVideoH265LevelIdc maxLevelIdc;
} VkVideoDecodeH265CapabilitiesEXT;
-typedef struct VkVideoDecodeH265SessionCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkVideoDecodeH265CreateFlagsEXT flags;
- const VkExtensionProperties* pStdExtensionVersion;
-} VkVideoDecodeH265SessionCreateInfoEXT;
-
typedef struct VkVideoDecodeH265SessionParametersAddInfoEXT {
VkStructureType sType;
const void* pNext;
- uint32_t spsStdCount;
- const StdVideoH265SequenceParameterSet* pSpsStd;
- uint32_t ppsStdCount;
- const StdVideoH265PictureParameterSet* pPpsStd;
+ uint32_t stdVPSCount;
+ const StdVideoH265VideoParameterSet* pStdVPSs;
+ uint32_t stdSPSCount;
+ const StdVideoH265SequenceParameterSet* pStdSPSs;
+ uint32_t stdPPSCount;
+ const StdVideoH265PictureParameterSet* pStdPPSs;
} VkVideoDecodeH265SessionParametersAddInfoEXT;
typedef struct VkVideoDecodeH265SessionParametersCreateInfoEXT {
VkStructureType sType;
const void* pNext;
- uint32_t maxSpsStdCount;
- uint32_t maxPpsStdCount;
+ uint32_t maxStdVPSCount;
+ uint32_t maxStdSPSCount;
+ uint32_t maxStdPPSCount;
const VkVideoDecodeH265SessionParametersAddInfoEXT* pParametersAddInfo;
} VkVideoDecodeH265SessionParametersCreateInfoEXT;
@@ -915,8 +996,8 @@ typedef struct VkVideoDecodeH265PictureInfoEXT {
VkStructureType sType;
const void* pNext;
StdVideoDecodeH265PictureInfo* pStdPictureInfo;
- uint32_t slicesCount;
- const uint32_t* pSlicesDataOffsets;
+ uint32_t sliceCount;
+ const uint32_t* pSliceOffsets;
} VkVideoDecodeH265PictureInfoEXT;
typedef struct VkVideoDecodeH265DpbSlotInfoEXT {
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_core.h b/thirdparty/vulkan/include/vulkan/vulkan_core.h
index 2771094554..b7fddd1906 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_core.h
+++ b/thirdparty/vulkan/include/vulkan/vulkan_core.h
@@ -72,7 +72,7 @@ extern "C" {
#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0
// Version of this file
-#define VK_HEADER_VERSION 205
+#define VK_HEADER_VERSION 231
// Complete version of this file
#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION)
@@ -120,7 +120,6 @@ VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool)
-#define VK_UUID_SIZE 16U
#define VK_ATTACHMENT_UNUSED (~0U)
#define VK_FALSE 0U
#define VK_LOD_CLAMP_NONE 1000.0F
@@ -131,10 +130,11 @@ VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool)
#define VK_TRUE 1U
#define VK_WHOLE_SIZE (~0ULL)
#define VK_MAX_MEMORY_TYPES 32U
-#define VK_MAX_MEMORY_HEAPS 16U
#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE 256U
+#define VK_UUID_SIZE 16U
#define VK_MAX_EXTENSION_NAME_SIZE 256U
#define VK_MAX_DESCRIPTION_SIZE 256U
+#define VK_MAX_MEMORY_HEAPS 16U
typedef enum VkResult {
VK_SUCCESS = 0,
@@ -168,6 +168,24 @@ typedef enum VkResult {
VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001,
VK_ERROR_VALIDATION_FAILED_EXT = -1000011001,
VK_ERROR_INVALID_SHADER_NV = -1000012000,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR = -1000023000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR = -1000023001,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR = -1000023002,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR = -1000023003,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR = -1000023004,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR = -1000023005,
+#endif
VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT = -1000158000,
VK_ERROR_NOT_PERMITTED_KHR = -1000174001,
VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT = -1000255000,
@@ -175,6 +193,7 @@ typedef enum VkResult {
VK_THREAD_DONE_KHR = 1000268001,
VK_OPERATION_DEFERRED_KHR = 1000268002,
VK_OPERATION_NOT_DEFERRED_KHR = 1000268003,
+ VK_ERROR_COMPRESSION_EXHAUSTED_EXT = -1000338000,
VK_ERROR_OUT_OF_POOL_MEMORY_KHR = VK_ERROR_OUT_OF_POOL_MEMORY,
VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = VK_ERROR_INVALID_EXTERNAL_HANDLE,
VK_ERROR_FRAGMENTATION_EXT = VK_ERROR_FRAGMENTATION,
@@ -425,19 +444,19 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT = 1000022001,
VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT = 1000022002,
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_PROFILE_KHR = 1000023000,
+ VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR = 1000023000,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR = 1000023001,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_KHR = 1000023002,
+ VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR = 1000023002,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_GET_MEMORY_PROPERTIES_KHR = 1000023003,
+ VK_STRUCTURE_TYPE_VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR = 1000023003,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_BIND_MEMORY_KHR = 1000023004,
+ VK_STRUCTURE_TYPE_BIND_VIDEO_SESSION_MEMORY_INFO_KHR = 1000023004,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR = 1000023005,
@@ -458,13 +477,13 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR = 1000023010,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_KHR = 1000023011,
+ VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR = 1000023011,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_QUEUE_FAMILY_PROPERTIES_2_KHR = 1000023012,
+ VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR = 1000023012,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_PROFILES_KHR = 1000023013,
+ VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR = 1000023013,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR = 1000023014,
@@ -473,11 +492,17 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR = 1000023015,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_2_KHR = 1000023016,
+ VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR = 1000023016,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR = 1000024000,
#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VK_STRUCTURE_TYPE_VIDEO_DECODE_CAPABILITIES_KHR = 1000024001,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VK_STRUCTURE_TYPE_VIDEO_DECODE_USAGE_INFO_KHR = 1000024002,
+#endif
VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV = 1000026000,
VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV = 1000026001,
VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV = 1000026002,
@@ -493,94 +518,85 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_EXT = 1000038000,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_EXT = 1000038001,
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000038001,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000038002,
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT = 1000038002,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT = 1000038003,
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT = 1000038003,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT = 1000038004,
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT = 1000038004,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT = 1000038005,
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_EXT = 1000038005,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_EXT = 1000038006,
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_INFO_EXT = 1000038006,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_EXT = 1000038007,
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_EXT = 1000038007,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_EXT = 1000038008,
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_EXT = 1000038008,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_EXT = 1000038009,
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_EXT = 1000038009,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_EXT = 1000038010,
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_REFERENCE_LISTS_INFO_EXT = 1000038010,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_EXT = 1000039000,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_CREATE_INFO_EXT = 1000039001,
-#endif
-#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000039002,
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000039001,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT = 1000039003,
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT = 1000039002,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_VCL_FRAME_INFO_EXT = 1000039004,
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_VCL_FRAME_INFO_EXT = 1000039003,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_EXT = 1000039005,
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_EXT = 1000039004,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_EXT = 1000039006,
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_EXT = 1000039005,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_EMIT_PICTURE_PARAMETERS_EXT = 1000039007,
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_EMIT_PICTURE_PARAMETERS_INFO_EXT = 1000039006,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_EXT = 1000039008,
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_EXT = 1000039007,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_REFERENCE_LISTS_EXT = 1000039009,
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_REFERENCE_LISTS_INFO_EXT = 1000039008,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_EXT = 1000039010,
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_EXT = 1000039009,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_EXT = 1000039011,
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_EXT = 1000039010,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_EXT = 1000040000,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_CREATE_INFO_EXT = 1000040001,
-#endif
-#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_EXT = 1000040002,
+ VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_EXT = 1000040001,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_MVC_EXT = 1000040003,
+ VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_EXT = 1000040003,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_EXT = 1000040004,
+ VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000040004,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000040005,
+ VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT = 1000040005,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT = 1000040006,
-#endif
-#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_EXT = 1000040007,
+ VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_EXT = 1000040006,
#endif
VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000,
VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000044006,
@@ -598,6 +614,9 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN = 1000062000,
VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT = 1000067000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT = 1000067001,
+ VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT = 1000068000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT = 1000068001,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT = 1000068002,
VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073000,
VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073001,
VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR = 1000073002,
@@ -745,22 +764,19 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_EXT = 1000187000,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_CREATE_INFO_EXT = 1000187001,
-#endif
-#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000187002,
+ VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000187001,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT = 1000187003,
+ VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT = 1000187002,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_EXT = 1000187004,
+ VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_EXT = 1000187003,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_EXT = 1000187005,
+ VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_EXT = 1000187004,
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
- VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_EXT = 1000187006,
+ VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_EXT = 1000187005,
#endif
VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR = 1000174000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR = 1000388000,
@@ -773,7 +789,6 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV = 1000201000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV = 1000202000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV = 1000202001,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV = 1000203000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV = 1000204000,
VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV = 1000205000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV = 1000205002,
@@ -861,6 +876,9 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT = 1000287001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT = 1000287002,
VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR = 1000290000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV = 1000292000,
+ VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV = 1000292001,
+ VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV = 1000292002,
VK_STRUCTURE_TYPE_PRESENT_ID_KHR = 1000294000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR = 1000294001,
#ifdef VK_ENABLE_BETA_EXTENSIONS
@@ -872,10 +890,34 @@ typedef enum VkStructureType {
#ifdef VK_ENABLE_BETA_EXTENSIONS
VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR = 1000299002,
#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_CAPABILITIES_KHR = 1000299003,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR = 1000299004,
+#endif
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV = 1000300000,
VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV = 1000300001,
+ VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT = 1000311000,
+ VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT = 1000311001,
+ VK_STRUCTURE_TYPE_EXPORT_METAL_DEVICE_INFO_EXT = 1000311002,
+ VK_STRUCTURE_TYPE_EXPORT_METAL_COMMAND_QUEUE_INFO_EXT = 1000311003,
+ VK_STRUCTURE_TYPE_EXPORT_METAL_BUFFER_INFO_EXT = 1000311004,
+ VK_STRUCTURE_TYPE_IMPORT_METAL_BUFFER_INFO_EXT = 1000311005,
+ VK_STRUCTURE_TYPE_EXPORT_METAL_TEXTURE_INFO_EXT = 1000311006,
+ VK_STRUCTURE_TYPE_IMPORT_METAL_TEXTURE_INFO_EXT = 1000311007,
+ VK_STRUCTURE_TYPE_EXPORT_METAL_IO_SURFACE_INFO_EXT = 1000311008,
+ VK_STRUCTURE_TYPE_IMPORT_METAL_IO_SURFACE_INFO_EXT = 1000311009,
+ VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT = 1000311010,
+ VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT = 1000311011,
VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV = 1000314008,
VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV = 1000314009,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT = 1000320000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT = 1000320001,
+ VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT = 1000320002,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD = 1000321000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR = 1000203000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR = 1000322000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR = 1000323000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV = 1000326000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV = 1000326001,
@@ -883,21 +925,31 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV = 1000327000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV = 1000327001,
VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV = 1000327002,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT = 1000328000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT = 1000328001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT = 1000330000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT = 1000332000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT = 1000332001,
VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM = 1000333000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR = 1000336000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT = 1000338000,
+ VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT = 1000338001,
+ VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT = 1000338002,
+ VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT = 1000338003,
+ VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT = 1000338004,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT = 1000339000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT = 1000340000,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM = 1000342000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT = 1000341000,
+ VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT = 1000341001,
+ VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT = 1000341002,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT = 1000344000,
VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT = 1000346000,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE = 1000351000,
- VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE = 1000351002,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT = 1000352000,
VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT = 1000352001,
VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT = 1000352002,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT = 1000353000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT = 1000354000,
+ VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT = 1000354001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT = 1000355000,
VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT = 1000355001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT = 1000356000,
@@ -922,21 +974,76 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI = 1000370000,
VK_STRUCTURE_TYPE_MEMORY_GET_REMOTE_ADDRESS_INFO_NV = 1000371000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV = 1000371001,
+ VK_STRUCTURE_TYPE_PIPELINE_PROPERTIES_IDENTIFIER_EXT = 1000372000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT = 1000372001,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT = 1000376000,
+ VK_STRUCTURE_TYPE_SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT = 1000376001,
+ VK_STRUCTURE_TYPE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT = 1000376002,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT = 1000377000,
VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX = 1000378000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT = 1000381000,
VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT = 1000381001,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT = 1000382000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR = 1000386000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT = 1000391000,
VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT = 1000391001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT = 1000392000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT = 1000392001,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT = 1000393000,
+ VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT = 1000396000,
+ VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT = 1000396001,
+ VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT = 1000396002,
+ VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT = 1000396003,
+ VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT = 1000396004,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT = 1000396005,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT = 1000396006,
+ VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT = 1000396007,
+ VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT = 1000396008,
+ VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT = 1000396009,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT = 1000411000,
VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT = 1000411001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT = 1000412000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE = 1000420000,
+ VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE = 1000420001,
+ VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE = 1000420002,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT = 1000421000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT = 1000422000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM = 1000425000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM = 1000425001,
VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM = 1000425002,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV = 1000430000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT = 1000437000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM = 1000440000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM = 1000440001,
+ VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM = 1000440002,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT = 1000455000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT = 1000455001,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT = 1000458000,
+ VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT = 1000458001,
+ VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT = 1000458002,
+ VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT = 1000458003,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT = 1000462000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT = 1000462001,
+ VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT = 1000462002,
+ VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT = 1000462003,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT = 1000342000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV = 1000464000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV = 1000464001,
+ VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV = 1000464002,
+ VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV = 1000464003,
+ VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV = 1000464004,
+ VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV = 1000464005,
+ VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV = 1000464010,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT = 1000465000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT = 1000466000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM = 1000484000,
+ VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM = 1000484001,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC = 1000485000,
+ VK_STRUCTURE_TYPE_AMIGO_PROFILING_SUBMIT_INFO_SEC = 1000485001,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT = 1000351000,
+ VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT = 1000351002,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM = 1000497000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM = 1000497001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES,
VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
@@ -1042,6 +1149,7 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES,
VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES,
VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
@@ -1098,7 +1206,11 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2,
VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2,
VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT,
+ VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT,
VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3,
+ VK_STRUCTURE_TYPE_PIPELINE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR,
VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES,
@@ -1108,6 +1220,11 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF
} VkStructureType;
+typedef enum VkPipelineCacheHeaderVersion {
+ VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1,
+ VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineCacheHeaderVersion;
+
typedef enum VkImageLayout {
VK_IMAGE_LAYOUT_UNDEFINED = 0,
VK_IMAGE_LAYOUT_GENERAL = 1,
@@ -1148,6 +1265,7 @@ typedef enum VkImageLayout {
#ifdef VK_ENABLE_BETA_EXTENSIONS
VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR = 1000299002,
#endif
+ VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT = 1000339000,
VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR,
@@ -1211,17 +1329,14 @@ typedef enum VkObjectType {
VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR = 1000268000,
VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV = 1000277000,
VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA = 1000366000,
+ VK_OBJECT_TYPE_MICROMAP_EXT = 1000396000,
+ VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV = 1000464000,
VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE,
VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT,
VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF
} VkObjectType;
-typedef enum VkPipelineCacheHeaderVersion {
- VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1,
- VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF
-} VkPipelineCacheHeaderVersion;
-
typedef enum VkVendorId {
VK_VENDOR_ID_VIV = 0x10001,
VK_VENDOR_ID_VSI = 0x10002,
@@ -1494,6 +1609,7 @@ typedef enum VkFormat {
VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005,
VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006,
VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007,
+ VK_FORMAT_R16G16_S10_5_NV = 1000464000,
VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK,
VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK,
VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK,
@@ -1590,6 +1706,12 @@ typedef enum VkQueryType {
#ifdef VK_ENABLE_BETA_EXTENSIONS
VK_QUERY_TYPE_VIDEO_ENCODE_BITSTREAM_BUFFER_RANGE_KHR = 1000299000,
#endif
+ VK_QUERY_TYPE_MESH_PRIMITIVES_GENERATED_EXT = 1000328000,
+ VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT = 1000382000,
+ VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR = 1000386000,
+ VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR = 1000386001,
+ VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT = 1000396000,
+ VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT = 1000396001,
VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF
} VkQueryType;
@@ -1749,6 +1871,37 @@ typedef enum VkDynamicState {
VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT = 1000377000,
VK_DYNAMIC_STATE_LOGIC_OP_EXT = 1000377003,
VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT = 1000381000,
+ VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT = 1000455002,
+ VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT = 1000455003,
+ VK_DYNAMIC_STATE_POLYGON_MODE_EXT = 1000455004,
+ VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT = 1000455005,
+ VK_DYNAMIC_STATE_SAMPLE_MASK_EXT = 1000455006,
+ VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT = 1000455007,
+ VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT = 1000455008,
+ VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT = 1000455009,
+ VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT = 1000455010,
+ VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT = 1000455011,
+ VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT = 1000455012,
+ VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT = 1000455013,
+ VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT = 1000455014,
+ VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT = 1000455015,
+ VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT = 1000455016,
+ VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT = 1000455017,
+ VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT = 1000455018,
+ VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT = 1000455019,
+ VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT = 1000455020,
+ VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT = 1000455021,
+ VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT = 1000455022,
+ VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV = 1000455023,
+ VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV = 1000455024,
+ VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV = 1000455025,
+ VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV = 1000455026,
+ VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV = 1000455027,
+ VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV = 1000455028,
+ VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV = 1000455029,
+ VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV = 1000455030,
+ VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV = 1000455031,
+ VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV = 1000455032,
VK_DYNAMIC_STATE_CULL_MODE_EXT = VK_DYNAMIC_STATE_CULL_MODE,
VK_DYNAMIC_STATE_FRONT_FACE_EXT = VK_DYNAMIC_STATE_FRONT_FACE,
VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY,
@@ -1849,8 +2002,8 @@ typedef enum VkBorderColor {
typedef enum VkFilter {
VK_FILTER_NEAREST = 0,
VK_FILTER_LINEAR = 1,
- VK_FILTER_CUBIC_IMG = 1000015000,
- VK_FILTER_CUBIC_EXT = VK_FILTER_CUBIC_IMG,
+ VK_FILTER_CUBIC_EXT = 1000015000,
+ VK_FILTER_CUBIC_IMG = VK_FILTER_CUBIC_EXT,
VK_FILTER_MAX_ENUM = 0x7FFFFFFF
} VkFilter;
@@ -1885,8 +2038,11 @@ typedef enum VkDescriptorType {
VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK = 1000138000,
VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR = 1000150000,
VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000,
- VK_DESCRIPTOR_TYPE_MUTABLE_VALVE = 1000351000,
+ VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM = 1000440000,
+ VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM = 1000440001,
+ VK_DESCRIPTOR_TYPE_MUTABLE_EXT = 1000351000,
VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK,
+ VK_DESCRIPTOR_TYPE_MUTABLE_VALVE = VK_DESCRIPTOR_TYPE_MUTABLE_EXT,
VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF
} VkDescriptorType;
@@ -2021,7 +2177,6 @@ typedef enum VkFormatFeatureFlagBits {
VK_FORMAT_FEATURE_DISJOINT_BIT = 0x00400000,
VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT = 0x00800000,
VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000,
- VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = 0x00002000,
#ifdef VK_ENABLE_BETA_EXTENSIONS
VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR = 0x02000000,
#endif
@@ -2029,6 +2184,7 @@ typedef enum VkFormatFeatureFlagBits {
VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR = 0x04000000,
#endif
VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000,
+ VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT = 0x00002000,
VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000,
VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x40000000,
#ifdef VK_ENABLE_BETA_EXTENSIONS
@@ -2037,6 +2193,7 @@ typedef enum VkFormatFeatureFlagBits {
#ifdef VK_ENABLE_BETA_EXTENSIONS
VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR = 0x10000000,
#endif
+ VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT,
VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT,
@@ -2047,7 +2204,6 @@ typedef enum VkFormatFeatureFlagBits {
VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
VK_FORMAT_FEATURE_DISJOINT_BIT_KHR = VK_FORMAT_FEATURE_DISJOINT_BIT,
VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
- VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG,
VK_FORMAT_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkFormatFeatureFlagBits;
typedef VkFlags VkFormatFeatureFlags;
@@ -2068,6 +2224,8 @@ typedef enum VkImageCreateFlagBits {
VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV = 0x00002000,
VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT = 0x00001000,
VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT = 0x00004000,
+ VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT = 0x00040000,
+ VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT = 0x00020000,
VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM = 0x00008000,
VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT,
VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT,
@@ -2120,11 +2278,19 @@ typedef enum VkImageUsageFlagBits {
#ifdef VK_ENABLE_BETA_EXTENSIONS
VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR = 0x00008000,
#endif
+ VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x00080000,
VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI = 0x00040000,
+ VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM = 0x00100000,
+ VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM = 0x00200000,
VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkImageUsageFlagBits;
typedef VkFlags VkImageUsageFlags;
+
+typedef enum VkInstanceCreateFlagBits {
+ VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR = 0x00000001,
+ VK_INSTANCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkInstanceCreateFlagBits;
typedef VkFlags VkInstanceCreateFlags;
typedef enum VkMemoryHeapFlagBits {
@@ -2161,6 +2327,7 @@ typedef enum VkQueueFlagBits {
#ifdef VK_ENABLE_BETA_EXTENSIONS
VK_QUEUE_VIDEO_ENCODE_BIT_KHR = 0x00000040,
#endif
+ VK_QUEUE_OPTICAL_FLOW_BIT_NV = 0x00000100,
VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkQueueFlagBits;
typedef VkFlags VkQueueFlags;
@@ -2195,14 +2362,16 @@ typedef enum VkPipelineStageFlagBits {
VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000,
VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000,
VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR = 0x00200000,
- VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV = 0x00080000,
- VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV = 0x00100000,
VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000,
VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000,
VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV = 0x00020000,
+ VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT = 0x00080000,
+ VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT = 0x00100000,
VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR,
VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR,
+ VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT,
+ VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT,
VK_PIPELINE_STAGE_NONE_KHR = VK_PIPELINE_STAGE_NONE,
VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkPipelineStageFlagBits;
@@ -2249,6 +2418,8 @@ typedef enum VkQueryPipelineStatisticFlagBits {
VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT = 0x00000100,
VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT = 0x00000200,
VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT = 0x00000400,
+ VK_QUERY_PIPELINE_STATISTIC_TASK_SHADER_INVOCATIONS_BIT_EXT = 0x00000800,
+ VK_QUERY_PIPELINE_STATISTIC_MESH_SHADER_INVOCATIONS_BIT_EXT = 0x00001000,
VK_QUERY_PIPELINE_STATISTIC_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkQueryPipelineStatisticFlagBits;
typedef VkFlags VkQueryPipelineStatisticFlags;
@@ -2307,6 +2478,8 @@ typedef enum VkBufferUsageFlagBits {
#ifdef VK_ENABLE_BETA_EXTENSIONS
VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00010000,
#endif
+ VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT = 0x00800000,
+ VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT = 0x01000000,
VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR,
VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
@@ -2361,7 +2534,14 @@ typedef enum VkPipelineCreateFlagBits {
VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080,
VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00040000,
VK_PIPELINE_CREATE_LIBRARY_BIT_KHR = 0x00000800,
+ VK_PIPELINE_CREATE_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT = 0x00800000,
+ VK_PIPELINE_CREATE_LINK_TIME_OPTIMIZATION_BIT_EXT = 0x00000400,
VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV = 0x00100000,
+ VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x02000000,
+ VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x04000000,
+ VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT = 0x01000000,
+ VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT = 0x08000000,
+ VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT = 0x40000000,
VK_PIPELINE_CREATE_DISPATCH_BASE = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT,
@@ -2397,8 +2577,8 @@ typedef enum VkShaderStageFlagBits {
VK_SHADER_STAGE_MISS_BIT_KHR = 0x00000800,
VK_SHADER_STAGE_INTERSECTION_BIT_KHR = 0x00001000,
VK_SHADER_STAGE_CALLABLE_BIT_KHR = 0x00002000,
- VK_SHADER_STAGE_TASK_BIT_NV = 0x00000040,
- VK_SHADER_STAGE_MESH_BIT_NV = 0x00000080,
+ VK_SHADER_STAGE_TASK_BIT_EXT = 0x00000040,
+ VK_SHADER_STAGE_MESH_BIT_EXT = 0x00000080,
VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI = 0x00004000,
VK_SHADER_STAGE_RAYGEN_BIT_NV = VK_SHADER_STAGE_RAYGEN_BIT_KHR,
VK_SHADER_STAGE_ANY_HIT_BIT_NV = VK_SHADER_STAGE_ANY_HIT_BIT_KHR,
@@ -2406,6 +2586,8 @@ typedef enum VkShaderStageFlagBits {
VK_SHADER_STAGE_MISS_BIT_NV = VK_SHADER_STAGE_MISS_BIT_KHR,
VK_SHADER_STAGE_INTERSECTION_BIT_NV = VK_SHADER_STAGE_INTERSECTION_BIT_KHR,
VK_SHADER_STAGE_CALLABLE_BIT_NV = VK_SHADER_STAGE_CALLABLE_BIT_KHR,
+ VK_SHADER_STAGE_TASK_BIT_NV = VK_SHADER_STAGE_TASK_BIT_EXT,
+ VK_SHADER_STAGE_MESH_BIT_NV = VK_SHADER_STAGE_MESH_BIT_EXT,
VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkShaderStageFlagBits;
@@ -2425,24 +2607,34 @@ typedef VkFlags VkPipelineRasterizationStateCreateFlags;
typedef VkFlags VkPipelineMultisampleStateCreateFlags;
typedef enum VkPipelineDepthStencilStateCreateFlagBits {
- VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM = 0x00000001,
- VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM = 0x00000002,
+ VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT = 0x00000001,
+ VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT = 0x00000002,
+ VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT,
+ VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT,
VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkPipelineDepthStencilStateCreateFlagBits;
typedef VkFlags VkPipelineDepthStencilStateCreateFlags;
typedef enum VkPipelineColorBlendStateCreateFlagBits {
- VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM = 0x00000001,
+ VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT = 0x00000001,
+ VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT,
VK_PIPELINE_COLOR_BLEND_STATE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkPipelineColorBlendStateCreateFlagBits;
typedef VkFlags VkPipelineColorBlendStateCreateFlags;
typedef VkFlags VkPipelineDynamicStateCreateFlags;
+
+typedef enum VkPipelineLayoutCreateFlagBits {
+ VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT = 0x00000002,
+ VK_PIPELINE_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineLayoutCreateFlagBits;
typedef VkFlags VkPipelineLayoutCreateFlags;
typedef VkFlags VkShaderStageFlags;
typedef enum VkSamplerCreateFlagBits {
VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT = 0x00000001,
VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT = 0x00000002,
+ VK_SAMPLER_CREATE_NON_SEAMLESS_CUBE_MAP_BIT_EXT = 0x00000004,
+ VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM = 0x00000010,
VK_SAMPLER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkSamplerCreateFlagBits;
typedef VkFlags VkSamplerCreateFlags;
@@ -2450,8 +2642,9 @@ typedef VkFlags VkSamplerCreateFlags;
typedef enum VkDescriptorPoolCreateFlagBits {
VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001,
VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT = 0x00000002,
- VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE = 0x00000004,
+ VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT = 0x00000004,
VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT,
+ VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT,
VK_DESCRIPTOR_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkDescriptorPoolCreateFlagBits;
typedef VkFlags VkDescriptorPoolCreateFlags;
@@ -2460,8 +2653,9 @@ typedef VkFlags VkDescriptorPoolResetFlags;
typedef enum VkDescriptorSetLayoutCreateFlagBits {
VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT = 0x00000002,
VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001,
- VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE = 0x00000004,
+ VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT = 0x00000004,
VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,
+ VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT,
VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkDescriptorSetLayoutCreateFlagBits;
typedef VkFlags VkDescriptorSetLayoutCreateFlags;
@@ -2476,6 +2670,7 @@ typedef enum VkDependencyFlagBits {
VK_DEPENDENCY_BY_REGION_BIT = 0x00000001,
VK_DEPENDENCY_DEVICE_GROUP_BIT = 0x00000004,
VK_DEPENDENCY_VIEW_LOCAL_BIT = 0x00000002,
+ VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT = 0x00000008,
VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR = VK_DEPENDENCY_VIEW_LOCAL_BIT,
VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR = VK_DEPENDENCY_DEVICE_GROUP_BIT,
VK_DEPENDENCY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
@@ -2500,9 +2695,13 @@ typedef enum VkSubpassDescriptionFlagBits {
VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX = 0x00000002,
VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM = 0x00000004,
VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM = 0x00000008,
- VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_ARM = 0x00000010,
- VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM = 0x00000020,
- VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM = 0x00000040,
+ VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT = 0x00000010,
+ VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT = 0x00000020,
+ VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT = 0x00000040,
+ VK_SUBPASS_DESCRIPTION_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x00000080,
+ VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_ARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT,
+ VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT,
+ VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT,
VK_SUBPASS_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkSubpassDescriptionFlagBits;
typedef VkFlags VkSubpassDescriptionFlags;
@@ -5505,6 +5704,7 @@ typedef enum VkDriverId {
VK_DRIVER_ID_MESA_PANVK = 20,
VK_DRIVER_ID_SAMSUNG_PROPRIETARY = 21,
VK_DRIVER_ID_MESA_VENUS = 22,
+ VK_DRIVER_ID_MESA_DOZEN = 23,
VK_DRIVER_ID_AMD_PROPRIETARY_KHR = VK_DRIVER_ID_AMD_PROPRIETARY,
VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR = VK_DRIVER_ID_AMD_OPEN_SOURCE,
VK_DRIVER_ID_MESA_RADV_KHR = VK_DRIVER_ID_MESA_RADV,
@@ -6260,10 +6460,6 @@ typedef enum VkToolPurposeFlagBits {
VK_TOOL_PURPOSE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkToolPurposeFlagBits;
typedef VkFlags VkToolPurposeFlags;
-
-typedef enum VkPrivateDataSlotCreateFlagBits {
- VK_PRIVATE_DATA_SLOT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
-} VkPrivateDataSlotCreateFlagBits;
typedef VkFlags VkPrivateDataSlotCreateFlags;
typedef VkFlags64 VkPipelineStageFlags2;
@@ -6339,8 +6535,13 @@ static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE
static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000ULL;
static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV = 0x00080000ULL;
static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV = 0x00100000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT = 0x00080000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT = 0x00100000ULL;
static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI = 0x8000000000ULL;
static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI = 0x10000000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR = 0x10000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT = 0x40000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV = 0x20000000ULL;
typedef VkFlags64 VkAccessFlags2;
@@ -6415,6 +6616,11 @@ static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV =
static const VkAccessFlagBits2 VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000ULL;
static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000ULL;
static const VkAccessFlagBits2 VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI = 0x8000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR = 0x10000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_MICROMAP_READ_BIT_EXT = 0x100000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT = 0x200000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV = 0x40000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV = 0x80000000000ULL;
typedef enum VkSubmitFlagBits {
@@ -6428,6 +6634,7 @@ typedef enum VkRenderingFlagBits {
VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT = 0x00000001,
VK_RENDERING_SUSPENDING_BIT = 0x00000002,
VK_RENDERING_RESUMING_BIT = 0x00000004,
+ VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x00000008,
VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT_KHR = VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT,
VK_RENDERING_SUSPENDING_BIT_KHR = VK_RENDERING_SUSPENDING_BIT,
VK_RENDERING_RESUMING_BIT_KHR = VK_RENDERING_RESUMING_BIT,
@@ -6508,6 +6715,13 @@ static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT
static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR = 0x10000000ULL;
#endif
static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV = 0x4000000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM = 0x400000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM = 0x800000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM = 0x1000000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM = 0x2000000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV = 0x10000000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV = 0x20000000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV = 0x40000000000ULL;
typedef struct VkPhysicalDeviceVulkan13Features {
VkStructureType sType;
@@ -9367,6 +9581,23 @@ VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV(
#endif
+#define VK_KHR_fragment_shader_barycentric 1
+#define VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1
+#define VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_KHR_fragment_shader_barycentric"
+typedef struct VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 fragmentShaderBarycentric;
+} VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
+
+typedef struct VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 triStripVertexOrderIndependentOfProvokingVertex;
+} VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
+
+
+
#define VK_KHR_shader_subgroup_uniform_control_flow 1
#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_SPEC_VERSION 1
#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_EXTENSION_NAME "VK_KHR_shader_subgroup_uniform_control_flow"
@@ -9459,7 +9690,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2KHR(
#define VK_KHR_format_feature_flags2 1
-#define VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION 1
+#define VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION 2
#define VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME "VK_KHR_format_feature_flags2"
typedef VkFormatFeatureFlags2 VkFormatFeatureFlags2KHR;
@@ -9469,6 +9700,47 @@ typedef VkFormatProperties3 VkFormatProperties3KHR;
+#define VK_KHR_ray_tracing_maintenance1 1
+#define VK_KHR_RAY_TRACING_MAINTENANCE_1_SPEC_VERSION 1
+#define VK_KHR_RAY_TRACING_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_ray_tracing_maintenance1"
+typedef struct VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 rayTracingMaintenance1;
+ VkBool32 rayTracingPipelineTraceRaysIndirect2;
+} VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR;
+
+typedef struct VkTraceRaysIndirectCommand2KHR {
+ VkDeviceAddress raygenShaderRecordAddress;
+ VkDeviceSize raygenShaderRecordSize;
+ VkDeviceAddress missShaderBindingTableAddress;
+ VkDeviceSize missShaderBindingTableSize;
+ VkDeviceSize missShaderBindingTableStride;
+ VkDeviceAddress hitShaderBindingTableAddress;
+ VkDeviceSize hitShaderBindingTableSize;
+ VkDeviceSize hitShaderBindingTableStride;
+ VkDeviceAddress callableShaderBindingTableAddress;
+ VkDeviceSize callableShaderBindingTableSize;
+ VkDeviceSize callableShaderBindingTableStride;
+ uint32_t width;
+ uint32_t height;
+ uint32_t depth;
+} VkTraceRaysIndirectCommand2KHR;
+
+typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysIndirect2KHR)(VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysIndirect2KHR(
+ VkCommandBuffer commandBuffer,
+ VkDeviceAddress indirectDeviceAddress);
+#endif
+
+
+#define VK_KHR_portability_enumeration 1
+#define VK_KHR_PORTABILITY_ENUMERATION_SPEC_VERSION 1
+#define VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME "VK_KHR_portability_enumeration"
+
+
#define VK_KHR_maintenance4 1
#define VK_KHR_MAINTENANCE_4_SPEC_VERSION 2
#define VK_KHR_MAINTENANCE_4_EXTENSION_NAME "VK_KHR_maintenance4"
@@ -10154,6 +10426,51 @@ typedef struct VkPhysicalDeviceASTCDecodeFeaturesEXT {
+#define VK_EXT_pipeline_robustness 1
+#define VK_EXT_PIPELINE_ROBUSTNESS_SPEC_VERSION 1
+#define VK_EXT_PIPELINE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_pipeline_robustness"
+
+typedef enum VkPipelineRobustnessBufferBehaviorEXT {
+ VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT_EXT = 0,
+ VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT = 1,
+ VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT = 2,
+ VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT = 3,
+ VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkPipelineRobustnessBufferBehaviorEXT;
+
+typedef enum VkPipelineRobustnessImageBehaviorEXT {
+ VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT_EXT = 0,
+ VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT = 1,
+ VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_EXT = 2,
+ VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2_EXT = 3,
+ VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkPipelineRobustnessImageBehaviorEXT;
+typedef struct VkPhysicalDevicePipelineRobustnessFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 pipelineRobustness;
+} VkPhysicalDevicePipelineRobustnessFeaturesEXT;
+
+typedef struct VkPhysicalDevicePipelineRobustnessPropertiesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkPipelineRobustnessBufferBehaviorEXT defaultRobustnessStorageBuffers;
+ VkPipelineRobustnessBufferBehaviorEXT defaultRobustnessUniformBuffers;
+ VkPipelineRobustnessBufferBehaviorEXT defaultRobustnessVertexInputs;
+ VkPipelineRobustnessImageBehaviorEXT defaultRobustnessImages;
+} VkPhysicalDevicePipelineRobustnessPropertiesEXT;
+
+typedef struct VkPipelineRobustnessCreateInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkPipelineRobustnessBufferBehaviorEXT storageBuffers;
+ VkPipelineRobustnessBufferBehaviorEXT uniformBuffers;
+ VkPipelineRobustnessBufferBehaviorEXT vertexInputs;
+ VkPipelineRobustnessImageBehaviorEXT images;
+} VkPipelineRobustnessCreateInfoEXT;
+
+
+
#define VK_EXT_conditional_rendering 1
#define VK_EXT_CONDITIONAL_RENDERING_SPEC_VERSION 2
#define VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME "VK_EXT_conditional_rendering"
@@ -10609,6 +10926,7 @@ typedef enum VkDebugUtilsMessageTypeFlagBitsEXT {
VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT = 0x00000001,
VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT = 0x00000002,
VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT = 0x00000004,
+ VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT = 0x00000008,
VK_DEBUG_UTILS_MESSAGE_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
} VkDebugUtilsMessageTypeFlagBitsEXT;
typedef VkFlags VkDebugUtilsMessageTypeFlagsEXT;
@@ -11279,6 +11597,8 @@ typedef enum VkGeometryInstanceFlagBitsKHR {
VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR = 0x00000002,
VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR = 0x00000004,
VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR = 0x00000008,
+ VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT = 0x00000010,
+ VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT = 0x00000020,
VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR = VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR,
VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR,
VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR,
@@ -11299,6 +11619,9 @@ typedef enum VkBuildAccelerationStructureFlagBitsKHR {
VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR = 0x00000008,
VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR = 0x00000010,
VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV = 0x00000020,
+ VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT = 0x00000040,
+ VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT = 0x00000080,
+ VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT = 0x00000100,
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR,
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR,
VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR,
@@ -11884,11 +12207,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectCountNV(
#define VK_NV_fragment_shader_barycentric 1
#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1
#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_NV_fragment_shader_barycentric"
-typedef struct VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV {
- VkStructureType sType;
- void* pNext;
- VkBool32 fragmentShaderBarycentric;
-} VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV;
+typedef VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV;
@@ -12780,6 +13099,7 @@ typedef enum VkIndirectCommandsTokenTypeNV {
VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV = 5,
VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV = 6,
VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV = 7,
+ VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV = 1000328000,
VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
} VkIndirectCommandsTokenTypeNV;
@@ -12989,7 +13309,7 @@ typedef VkPhysicalDeviceTexelBufferAlignmentProperties VkPhysicalDeviceTexelBuff
#define VK_QCOM_render_pass_transform 1
-#define VK_QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION 2
+#define VK_QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION 3
#define VK_QCOM_RENDER_PASS_TRANSFORM_EXTENSION_NAME "VK_QCOM_render_pass_transform"
typedef struct VkRenderPassTransformBeginInfoQCOM {
VkStructureType sType;
@@ -13121,6 +13441,29 @@ typedef struct VkPhysicalDeviceCustomBorderColorFeaturesEXT {
#define VK_GOOGLE_USER_TYPE_EXTENSION_NAME "VK_GOOGLE_user_type"
+#define VK_NV_present_barrier 1
+#define VK_NV_PRESENT_BARRIER_SPEC_VERSION 1
+#define VK_NV_PRESENT_BARRIER_EXTENSION_NAME "VK_NV_present_barrier"
+typedef struct VkPhysicalDevicePresentBarrierFeaturesNV {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 presentBarrier;
+} VkPhysicalDevicePresentBarrierFeaturesNV;
+
+typedef struct VkSurfaceCapabilitiesPresentBarrierNV {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 presentBarrierSupported;
+} VkSurfaceCapabilitiesPresentBarrierNV;
+
+typedef struct VkSwapchainPresentBarrierCreateInfoNV {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 presentBarrierEnable;
+} VkSwapchainPresentBarrierCreateInfoNV;
+
+
+
#define VK_EXT_private_data 1
typedef VkPrivateDataSlot VkPrivateDataSlotEXT;
@@ -13128,8 +13471,6 @@ typedef VkPrivateDataSlot VkPrivateDataSlotEXT;
#define VK_EXT_PRIVATE_DATA_EXTENSION_NAME "VK_EXT_private_data"
typedef VkPrivateDataSlotCreateFlags VkPrivateDataSlotCreateFlagsEXT;
-typedef VkPrivateDataSlotCreateFlagBits VkPrivateDataSlotCreateFlagBitsEXT;
-
typedef VkPhysicalDevicePrivateDataFeatures VkPhysicalDevicePrivateDataFeaturesEXT;
typedef VkDevicePrivateDataCreateInfo VkDevicePrivateDataCreateInfoEXT;
@@ -13177,13 +13518,14 @@ typedef VkPhysicalDevicePipelineCreationCacheControlFeatures VkPhysicalDevicePip
#define VK_NV_device_diagnostics_config 1
-#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION 1
+#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION 2
#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME "VK_NV_device_diagnostics_config"
typedef enum VkDeviceDiagnosticsConfigFlagBitsNV {
VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV = 0x00000001,
VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV = 0x00000002,
VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV = 0x00000004,
+ VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_ERROR_REPORTING_BIT_NV = 0x00000008,
VK_DEVICE_DIAGNOSTICS_CONFIG_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
} VkDeviceDiagnosticsConfigFlagBitsNV;
typedef VkFlags VkDeviceDiagnosticsConfigFlagsNV;
@@ -13206,6 +13548,50 @@ typedef struct VkDeviceDiagnosticsConfigCreateInfoNV {
#define VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME "VK_QCOM_render_pass_store_ops"
+#define VK_EXT_graphics_pipeline_library 1
+#define VK_EXT_GRAPHICS_PIPELINE_LIBRARY_SPEC_VERSION 1
+#define VK_EXT_GRAPHICS_PIPELINE_LIBRARY_EXTENSION_NAME "VK_EXT_graphics_pipeline_library"
+
+typedef enum VkGraphicsPipelineLibraryFlagBitsEXT {
+ VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT = 0x00000001,
+ VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT = 0x00000002,
+ VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT = 0x00000004,
+ VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT = 0x00000008,
+ VK_GRAPHICS_PIPELINE_LIBRARY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkGraphicsPipelineLibraryFlagBitsEXT;
+typedef VkFlags VkGraphicsPipelineLibraryFlagsEXT;
+typedef struct VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 graphicsPipelineLibrary;
+} VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
+
+typedef struct VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 graphicsPipelineLibraryFastLinking;
+ VkBool32 graphicsPipelineLibraryIndependentInterpolationDecoration;
+} VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
+
+typedef struct VkGraphicsPipelineLibraryCreateInfoEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkGraphicsPipelineLibraryFlagsEXT flags;
+} VkGraphicsPipelineLibraryCreateInfoEXT;
+
+
+
+#define VK_AMD_shader_early_and_late_fragment_tests 1
+#define VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_SPEC_VERSION 1
+#define VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_EXTENSION_NAME "VK_AMD_shader_early_and_late_fragment_tests"
+typedef struct VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 shaderEarlyAndLateFragmentTests;
+} VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
+
+
+
#define VK_NV_fragment_shading_rate_enums 1
#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_SPEC_VERSION 1
#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_EXTENSION_NAME "VK_NV_fragment_shading_rate_enums"
@@ -13402,6 +13788,103 @@ typedef VkPhysicalDeviceImageRobustnessFeatures VkPhysicalDeviceImageRobustnessF
+#define VK_EXT_image_compression_control 1
+#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SPEC_VERSION 1
+#define VK_EXT_IMAGE_COMPRESSION_CONTROL_EXTENSION_NAME "VK_EXT_image_compression_control"
+
+typedef enum VkImageCompressionFlagBitsEXT {
+ VK_IMAGE_COMPRESSION_DEFAULT_EXT = 0,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT = 0x00000001,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_EXPLICIT_EXT = 0x00000002,
+ VK_IMAGE_COMPRESSION_DISABLED_EXT = 0x00000004,
+ VK_IMAGE_COMPRESSION_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkImageCompressionFlagBitsEXT;
+typedef VkFlags VkImageCompressionFlagsEXT;
+
+typedef enum VkImageCompressionFixedRateFlagBitsEXT {
+ VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT = 0,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT = 0x00000001,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT = 0x00000002,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT = 0x00000004,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT = 0x00000008,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT = 0x00000010,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT = 0x00000020,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT = 0x00000040,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT = 0x00000080,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT = 0x00000100,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT = 0x00000200,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT = 0x00000400,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT = 0x00000800,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_13BPC_BIT_EXT = 0x00001000,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_14BPC_BIT_EXT = 0x00002000,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_15BPC_BIT_EXT = 0x00004000,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_16BPC_BIT_EXT = 0x00008000,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_17BPC_BIT_EXT = 0x00010000,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_18BPC_BIT_EXT = 0x00020000,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_19BPC_BIT_EXT = 0x00040000,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_20BPC_BIT_EXT = 0x00080000,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_21BPC_BIT_EXT = 0x00100000,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_22BPC_BIT_EXT = 0x00200000,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_23BPC_BIT_EXT = 0x00400000,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT = 0x00800000,
+ VK_IMAGE_COMPRESSION_FIXED_RATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkImageCompressionFixedRateFlagBitsEXT;
+typedef VkFlags VkImageCompressionFixedRateFlagsEXT;
+typedef struct VkPhysicalDeviceImageCompressionControlFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 imageCompressionControl;
+} VkPhysicalDeviceImageCompressionControlFeaturesEXT;
+
+typedef struct VkImageCompressionControlEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkImageCompressionFlagsEXT flags;
+ uint32_t compressionControlPlaneCount;
+ VkImageCompressionFixedRateFlagsEXT* pFixedRateFlags;
+} VkImageCompressionControlEXT;
+
+typedef struct VkSubresourceLayout2EXT {
+ VkStructureType sType;
+ void* pNext;
+ VkSubresourceLayout subresourceLayout;
+} VkSubresourceLayout2EXT;
+
+typedef struct VkImageSubresource2EXT {
+ VkStructureType sType;
+ void* pNext;
+ VkImageSubresource imageSubresource;
+} VkImageSubresource2EXT;
+
+typedef struct VkImageCompressionPropertiesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkImageCompressionFlagsEXT imageCompressionFlags;
+ VkImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags;
+} VkImageCompressionPropertiesEXT;
+
+typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2EXT)(VkDevice device, VkImage image, const VkImageSubresource2EXT* pSubresource, VkSubresourceLayout2EXT* pLayout);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2EXT(
+ VkDevice device,
+ VkImage image,
+ const VkImageSubresource2EXT* pSubresource,
+ VkSubresourceLayout2EXT* pLayout);
+#endif
+
+
+#define VK_EXT_attachment_feedback_loop_layout 1
+#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_SPEC_VERSION 2
+#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_EXTENSION_NAME "VK_EXT_attachment_feedback_loop_layout"
+typedef struct VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 attachmentFeedbackLoopLayout;
+} VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
+
+
+
#define VK_EXT_4444_formats 1
#define VK_EXT_4444_FORMATS_SPEC_VERSION 1
#define VK_EXT_4444_FORMATS_EXTENSION_NAME "VK_EXT_4444_formats"
@@ -13414,16 +13897,95 @@ typedef struct VkPhysicalDevice4444FormatsFeaturesEXT {
+#define VK_EXT_device_fault 1
+#define VK_EXT_DEVICE_FAULT_SPEC_VERSION 1
+#define VK_EXT_DEVICE_FAULT_EXTENSION_NAME "VK_EXT_device_fault"
+
+typedef enum VkDeviceFaultAddressTypeEXT {
+ VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT = 0,
+ VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT = 1,
+ VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT = 2,
+ VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT = 3,
+ VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT = 4,
+ VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT = 5,
+ VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT = 6,
+ VK_DEVICE_FAULT_ADDRESS_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceFaultAddressTypeEXT;
+
+typedef enum VkDeviceFaultVendorBinaryHeaderVersionEXT {
+ VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT = 1,
+ VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceFaultVendorBinaryHeaderVersionEXT;
+typedef struct VkPhysicalDeviceFaultFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 deviceFault;
+ VkBool32 deviceFaultVendorBinary;
+} VkPhysicalDeviceFaultFeaturesEXT;
+
+typedef struct VkDeviceFaultCountsEXT {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t addressInfoCount;
+ uint32_t vendorInfoCount;
+ VkDeviceSize vendorBinarySize;
+} VkDeviceFaultCountsEXT;
+
+typedef struct VkDeviceFaultAddressInfoEXT {
+ VkDeviceFaultAddressTypeEXT addressType;
+ VkDeviceAddress reportedAddress;
+ VkDeviceSize addressPrecision;
+} VkDeviceFaultAddressInfoEXT;
+
+typedef struct VkDeviceFaultVendorInfoEXT {
+ char description[VK_MAX_DESCRIPTION_SIZE];
+ uint64_t vendorFaultCode;
+ uint64_t vendorFaultData;
+} VkDeviceFaultVendorInfoEXT;
+
+typedef struct VkDeviceFaultInfoEXT {
+ VkStructureType sType;
+ void* pNext;
+ char description[VK_MAX_DESCRIPTION_SIZE];
+ VkDeviceFaultAddressInfoEXT* pAddressInfos;
+ VkDeviceFaultVendorInfoEXT* pVendorInfos;
+ void* pVendorBinaryData;
+} VkDeviceFaultInfoEXT;
+
+typedef struct VkDeviceFaultVendorBinaryHeaderVersionOneEXT {
+ uint32_t headerSize;
+ VkDeviceFaultVendorBinaryHeaderVersionEXT headerVersion;
+ uint32_t vendorID;
+ uint32_t deviceID;
+ uint32_t driverVersion;
+ uint8_t pipelineCacheUUID[VK_UUID_SIZE];
+ uint32_t applicationNameOffset;
+ uint32_t applicationVersion;
+ uint32_t engineNameOffset;
+} VkDeviceFaultVendorBinaryHeaderVersionOneEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceFaultInfoEXT)(VkDevice device, VkDeviceFaultCountsEXT* pFaultCounts, VkDeviceFaultInfoEXT* pFaultInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceFaultInfoEXT(
+ VkDevice device,
+ VkDeviceFaultCountsEXT* pFaultCounts,
+ VkDeviceFaultInfoEXT* pFaultInfo);
+#endif
+
+
#define VK_ARM_rasterization_order_attachment_access 1
#define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION 1
#define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_ARM_rasterization_order_attachment_access"
-typedef struct VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM {
+typedef struct VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT {
VkStructureType sType;
void* pNext;
VkBool32 rasterizationOrderColorAttachmentAccess;
VkBool32 rasterizationOrderDepthAttachmentAccess;
VkBool32 rasterizationOrderStencilAttachmentAccess;
-} VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM;
+} VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
+
+typedef VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM;
@@ -13459,23 +14021,29 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetWinrtDisplayNV(
#define VK_VALVE_mutable_descriptor_type 1
#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1
#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_VALVE_mutable_descriptor_type"
-typedef struct VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE {
+typedef struct VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT {
VkStructureType sType;
void* pNext;
VkBool32 mutableDescriptorType;
-} VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE;
+} VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT;
-typedef struct VkMutableDescriptorTypeListVALVE {
+typedef VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE;
+
+typedef struct VkMutableDescriptorTypeListEXT {
uint32_t descriptorTypeCount;
const VkDescriptorType* pDescriptorTypes;
-} VkMutableDescriptorTypeListVALVE;
+} VkMutableDescriptorTypeListEXT;
-typedef struct VkMutableDescriptorTypeCreateInfoVALVE {
- VkStructureType sType;
- const void* pNext;
- uint32_t mutableDescriptorTypeListCount;
- const VkMutableDescriptorTypeListVALVE* pMutableDescriptorTypeLists;
-} VkMutableDescriptorTypeCreateInfoVALVE;
+typedef VkMutableDescriptorTypeListEXT VkMutableDescriptorTypeListVALVE;
+
+typedef struct VkMutableDescriptorTypeCreateInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t mutableDescriptorTypeListCount;
+ const VkMutableDescriptorTypeListEXT* pMutableDescriptorTypeLists;
+} VkMutableDescriptorTypeCreateInfoEXT;
+
+typedef VkMutableDescriptorTypeCreateInfoEXT VkMutableDescriptorTypeCreateInfoVALVE;
@@ -13534,6 +14102,38 @@ typedef struct VkPhysicalDeviceDrmPropertiesEXT {
+#define VK_EXT_device_address_binding_report 1
+#define VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_SPEC_VERSION 1
+#define VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_EXTENSION_NAME "VK_EXT_device_address_binding_report"
+
+typedef enum VkDeviceAddressBindingTypeEXT {
+ VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT = 0,
+ VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT = 1,
+ VK_DEVICE_ADDRESS_BINDING_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceAddressBindingTypeEXT;
+
+typedef enum VkDeviceAddressBindingFlagBitsEXT {
+ VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT = 0x00000001,
+ VK_DEVICE_ADDRESS_BINDING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceAddressBindingFlagBitsEXT;
+typedef VkFlags VkDeviceAddressBindingFlagsEXT;
+typedef struct VkPhysicalDeviceAddressBindingReportFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 reportAddressBinding;
+} VkPhysicalDeviceAddressBindingReportFeaturesEXT;
+
+typedef struct VkDeviceAddressBindingCallbackDataEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkDeviceAddressBindingFlagsEXT flags;
+ VkDeviceAddress baseAddress;
+ VkDeviceSize size;
+ VkDeviceAddressBindingTypeEXT bindingType;
+} VkDeviceAddressBindingCallbackDataEXT;
+
+
+
#define VK_EXT_depth_clip_control 1
#define VK_EXT_DEPTH_CLIP_CONTROL_SPEC_VERSION 1
#define VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME "VK_EXT_depth_clip_control"
@@ -13645,6 +14245,57 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryRemoteAddressNV(
#endif
+#define VK_EXT_pipeline_properties 1
+#define VK_EXT_PIPELINE_PROPERTIES_SPEC_VERSION 1
+#define VK_EXT_PIPELINE_PROPERTIES_EXTENSION_NAME "VK_EXT_pipeline_properties"
+typedef VkPipelineInfoKHR VkPipelineInfoEXT;
+
+typedef struct VkPipelinePropertiesIdentifierEXT {
+ VkStructureType sType;
+ void* pNext;
+ uint8_t pipelineIdentifier[VK_UUID_SIZE];
+} VkPipelinePropertiesIdentifierEXT;
+
+typedef struct VkPhysicalDevicePipelinePropertiesFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 pipelinePropertiesIdentifier;
+} VkPhysicalDevicePipelinePropertiesFeaturesEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPipelinePropertiesEXT)(VkDevice device, const VkPipelineInfoEXT* pPipelineInfo, VkBaseOutStructure* pPipelineProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelinePropertiesEXT(
+ VkDevice device,
+ const VkPipelineInfoEXT* pPipelineInfo,
+ VkBaseOutStructure* pPipelineProperties);
+#endif
+
+
+#define VK_EXT_multisampled_render_to_single_sampled 1
+#define VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_SPEC_VERSION 1
+#define VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXTENSION_NAME "VK_EXT_multisampled_render_to_single_sampled"
+typedef struct VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 multisampledRenderToSingleSampled;
+} VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
+
+typedef struct VkSubpassResolvePerformanceQueryEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 optimal;
+} VkSubpassResolvePerformanceQueryEXT;
+
+typedef struct VkMultisampledRenderToSingleSampledInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkBool32 multisampledRenderToSingleSampledEnable;
+ VkSampleCountFlagBits rasterizationSamples;
+} VkMultisampledRenderToSingleSampledInfoEXT;
+
+
+
#define VK_EXT_extended_dynamic_state2 1
#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_SPEC_VERSION 1
#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME "VK_EXT_extended_dynamic_state2"
@@ -13711,6 +14362,19 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetColorWrite
#endif
+#define VK_EXT_primitives_generated_query 1
+#define VK_EXT_PRIMITIVES_GENERATED_QUERY_SPEC_VERSION 1
+#define VK_EXT_PRIMITIVES_GENERATED_QUERY_EXTENSION_NAME "VK_EXT_primitives_generated_query"
+typedef struct VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 primitivesGeneratedQuery;
+ VkBool32 primitivesGeneratedQueryWithRasterizerDiscard;
+ VkBool32 primitivesGeneratedQueryWithNonZeroStreams;
+} VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
+
+
+
#define VK_EXT_global_priority_query 1
#define VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION 1
#define VK_EXT_GLOBAL_PRIORITY_QUERY_EXTENSION_NAME "VK_EXT_global_priority_query"
@@ -13787,6 +14451,286 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDrawMultiIndexedEXT(
#endif
+#define VK_EXT_image_2d_view_of_3d 1
+#define VK_EXT_IMAGE_2D_VIEW_OF_3D_SPEC_VERSION 1
+#define VK_EXT_IMAGE_2D_VIEW_OF_3D_EXTENSION_NAME "VK_EXT_image_2d_view_of_3d"
+typedef struct VkPhysicalDeviceImage2DViewOf3DFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 image2DViewOf3D;
+ VkBool32 sampler2DViewOf3D;
+} VkPhysicalDeviceImage2DViewOf3DFeaturesEXT;
+
+
+
+#define VK_EXT_opacity_micromap 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkMicromapEXT)
+#define VK_EXT_OPACITY_MICROMAP_SPEC_VERSION 2
+#define VK_EXT_OPACITY_MICROMAP_EXTENSION_NAME "VK_EXT_opacity_micromap"
+
+typedef enum VkMicromapTypeEXT {
+ VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT = 0,
+ VK_MICROMAP_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkMicromapTypeEXT;
+
+typedef enum VkBuildMicromapModeEXT {
+ VK_BUILD_MICROMAP_MODE_BUILD_EXT = 0,
+ VK_BUILD_MICROMAP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkBuildMicromapModeEXT;
+
+typedef enum VkCopyMicromapModeEXT {
+ VK_COPY_MICROMAP_MODE_CLONE_EXT = 0,
+ VK_COPY_MICROMAP_MODE_SERIALIZE_EXT = 1,
+ VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT = 2,
+ VK_COPY_MICROMAP_MODE_COMPACT_EXT = 3,
+ VK_COPY_MICROMAP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkCopyMicromapModeEXT;
+
+typedef enum VkOpacityMicromapFormatEXT {
+ VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT = 1,
+ VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT = 2,
+ VK_OPACITY_MICROMAP_FORMAT_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkOpacityMicromapFormatEXT;
+
+typedef enum VkOpacityMicromapSpecialIndexEXT {
+ VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT = -1,
+ VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT = -2,
+ VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT = -3,
+ VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT = -4,
+ VK_OPACITY_MICROMAP_SPECIAL_INDEX_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkOpacityMicromapSpecialIndexEXT;
+
+typedef enum VkAccelerationStructureCompatibilityKHR {
+ VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR = 0,
+ VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR = 1,
+ VK_ACCELERATION_STRUCTURE_COMPATIBILITY_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkAccelerationStructureCompatibilityKHR;
+
+typedef enum VkAccelerationStructureBuildTypeKHR {
+ VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR = 0,
+ VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR = 1,
+ VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR = 2,
+ VK_ACCELERATION_STRUCTURE_BUILD_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkAccelerationStructureBuildTypeKHR;
+
+typedef enum VkBuildMicromapFlagBitsEXT {
+ VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT = 0x00000001,
+ VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT = 0x00000002,
+ VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT = 0x00000004,
+ VK_BUILD_MICROMAP_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkBuildMicromapFlagBitsEXT;
+typedef VkFlags VkBuildMicromapFlagsEXT;
+
+typedef enum VkMicromapCreateFlagBitsEXT {
+ VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT = 0x00000001,
+ VK_MICROMAP_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkMicromapCreateFlagBitsEXT;
+typedef VkFlags VkMicromapCreateFlagsEXT;
+typedef struct VkMicromapUsageEXT {
+ uint32_t count;
+ uint32_t subdivisionLevel;
+ uint32_t format;
+} VkMicromapUsageEXT;
+
+typedef union VkDeviceOrHostAddressKHR {
+ VkDeviceAddress deviceAddress;
+ void* hostAddress;
+} VkDeviceOrHostAddressKHR;
+
+typedef struct VkMicromapBuildInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkMicromapTypeEXT type;
+ VkBuildMicromapFlagsEXT flags;
+ VkBuildMicromapModeEXT mode;
+ VkMicromapEXT dstMicromap;
+ uint32_t usageCountsCount;
+ const VkMicromapUsageEXT* pUsageCounts;
+ const VkMicromapUsageEXT* const* ppUsageCounts;
+ VkDeviceOrHostAddressConstKHR data;
+ VkDeviceOrHostAddressKHR scratchData;
+ VkDeviceOrHostAddressConstKHR triangleArray;
+ VkDeviceSize triangleArrayStride;
+} VkMicromapBuildInfoEXT;
+
+typedef struct VkMicromapCreateInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkMicromapCreateFlagsEXT createFlags;
+ VkBuffer buffer;
+ VkDeviceSize offset;
+ VkDeviceSize size;
+ VkMicromapTypeEXT type;
+ VkDeviceAddress deviceAddress;
+} VkMicromapCreateInfoEXT;
+
+typedef struct VkPhysicalDeviceOpacityMicromapFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 micromap;
+ VkBool32 micromapCaptureReplay;
+ VkBool32 micromapHostCommands;
+} VkPhysicalDeviceOpacityMicromapFeaturesEXT;
+
+typedef struct VkPhysicalDeviceOpacityMicromapPropertiesEXT {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t maxOpacity2StateSubdivisionLevel;
+ uint32_t maxOpacity4StateSubdivisionLevel;
+} VkPhysicalDeviceOpacityMicromapPropertiesEXT;
+
+typedef struct VkMicromapVersionInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ const uint8_t* pVersionData;
+} VkMicromapVersionInfoEXT;
+
+typedef struct VkCopyMicromapToMemoryInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkMicromapEXT src;
+ VkDeviceOrHostAddressKHR dst;
+ VkCopyMicromapModeEXT mode;
+} VkCopyMicromapToMemoryInfoEXT;
+
+typedef struct VkCopyMemoryToMicromapInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkDeviceOrHostAddressConstKHR src;
+ VkMicromapEXT dst;
+ VkCopyMicromapModeEXT mode;
+} VkCopyMemoryToMicromapInfoEXT;
+
+typedef struct VkCopyMicromapInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkMicromapEXT src;
+ VkMicromapEXT dst;
+ VkCopyMicromapModeEXT mode;
+} VkCopyMicromapInfoEXT;
+
+typedef struct VkMicromapBuildSizesInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkDeviceSize micromapSize;
+ VkDeviceSize buildScratchSize;
+ VkBool32 discardable;
+} VkMicromapBuildSizesInfoEXT;
+
+typedef struct VkAccelerationStructureTrianglesOpacityMicromapEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkIndexType indexType;
+ VkDeviceOrHostAddressConstKHR indexBuffer;
+ VkDeviceSize indexStride;
+ uint32_t baseTriangle;
+ uint32_t usageCountsCount;
+ const VkMicromapUsageEXT* pUsageCounts;
+ const VkMicromapUsageEXT* const* ppUsageCounts;
+ VkMicromapEXT micromap;
+} VkAccelerationStructureTrianglesOpacityMicromapEXT;
+
+typedef struct VkMicromapTriangleEXT {
+ uint32_t dataOffset;
+ uint16_t subdivisionLevel;
+ uint16_t format;
+} VkMicromapTriangleEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateMicromapEXT)(VkDevice device, const VkMicromapCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkMicromapEXT* pMicromap);
+typedef void (VKAPI_PTR *PFN_vkDestroyMicromapEXT)(VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkCmdBuildMicromapsEXT)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkBuildMicromapsEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyMicromapEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyMicromapToMemoryEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapToMemoryInfoEXT* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToMicromapEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToMicromapInfoEXT* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkWriteMicromapsPropertiesEXT)(VkDevice device, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, size_t dataSize, void* pData, size_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyMicromapEXT)(VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyMicromapToMemoryEXT)(VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToMicromapEXT)(VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteMicromapsPropertiesEXT)(VkCommandBuffer commandBuffer, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceMicromapCompatibilityEXT)(VkDevice device, const VkMicromapVersionInfoEXT* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility);
+typedef void (VKAPI_PTR *PFN_vkGetMicromapBuildSizesEXT)(VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkMicromapBuildInfoEXT* pBuildInfo, VkMicromapBuildSizesInfoEXT* pSizeInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateMicromapEXT(
+ VkDevice device,
+ const VkMicromapCreateInfoEXT* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkMicromapEXT* pMicromap);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyMicromapEXT(
+ VkDevice device,
+ VkMicromapEXT micromap,
+ const VkAllocationCallbacks* pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBuildMicromapsEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t infoCount,
+ const VkMicromapBuildInfoEXT* pInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBuildMicromapsEXT(
+ VkDevice device,
+ VkDeferredOperationKHR deferredOperation,
+ uint32_t infoCount,
+ const VkMicromapBuildInfoEXT* pInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyMicromapEXT(
+ VkDevice device,
+ VkDeferredOperationKHR deferredOperation,
+ const VkCopyMicromapInfoEXT* pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyMicromapToMemoryEXT(
+ VkDevice device,
+ VkDeferredOperationKHR deferredOperation,
+ const VkCopyMicromapToMemoryInfoEXT* pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToMicromapEXT(
+ VkDevice device,
+ VkDeferredOperationKHR deferredOperation,
+ const VkCopyMemoryToMicromapInfoEXT* pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkWriteMicromapsPropertiesEXT(
+ VkDevice device,
+ uint32_t micromapCount,
+ const VkMicromapEXT* pMicromaps,
+ VkQueryType queryType,
+ size_t dataSize,
+ void* pData,
+ size_t stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyMicromapEXT(
+ VkCommandBuffer commandBuffer,
+ const VkCopyMicromapInfoEXT* pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyMicromapToMemoryEXT(
+ VkCommandBuffer commandBuffer,
+ const VkCopyMicromapToMemoryInfoEXT* pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToMicromapEXT(
+ VkCommandBuffer commandBuffer,
+ const VkCopyMemoryToMicromapInfoEXT* pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteMicromapsPropertiesEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t micromapCount,
+ const VkMicromapEXT* pMicromaps,
+ VkQueryType queryType,
+ VkQueryPool queryPool,
+ uint32_t firstQuery);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceMicromapCompatibilityEXT(
+ VkDevice device,
+ const VkMicromapVersionInfoEXT* pVersionInfo,
+ VkAccelerationStructureCompatibilityKHR* pCompatibility);
+
+VKAPI_ATTR void VKAPI_CALL vkGetMicromapBuildSizesEXT(
+ VkDevice device,
+ VkAccelerationStructureBuildTypeKHR buildType,
+ const VkMicromapBuildInfoEXT* pBuildInfo,
+ VkMicromapBuildSizesInfoEXT* pSizeInfo);
+#endif
+
+
#define VK_EXT_load_store_op_none 1
#define VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION 1
#define VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME "VK_EXT_load_store_op_none"
@@ -13830,6 +14774,67 @@ VKAPI_ATTR void VKAPI_CALL vkSetDeviceMemoryPriorityEXT(
#endif
+#define VK_VALVE_descriptor_set_host_mapping 1
+#define VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_SPEC_VERSION 1
+#define VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_EXTENSION_NAME "VK_VALVE_descriptor_set_host_mapping"
+typedef struct VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 descriptorSetHostMapping;
+} VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
+
+typedef struct VkDescriptorSetBindingReferenceVALVE {
+ VkStructureType sType;
+ const void* pNext;
+ VkDescriptorSetLayout descriptorSetLayout;
+ uint32_t binding;
+} VkDescriptorSetBindingReferenceVALVE;
+
+typedef struct VkDescriptorSetLayoutHostMappingInfoVALVE {
+ VkStructureType sType;
+ void* pNext;
+ size_t descriptorOffset;
+ uint32_t descriptorSize;
+} VkDescriptorSetLayoutHostMappingInfoVALVE;
+
+typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE)(VkDevice device, const VkDescriptorSetBindingReferenceVALVE* pBindingReference, VkDescriptorSetLayoutHostMappingInfoVALVE* pHostMapping);
+typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetHostMappingVALVE)(VkDevice device, VkDescriptorSet descriptorSet, void** ppData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutHostMappingInfoVALVE(
+ VkDevice device,
+ const VkDescriptorSetBindingReferenceVALVE* pBindingReference,
+ VkDescriptorSetLayoutHostMappingInfoVALVE* pHostMapping);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetHostMappingVALVE(
+ VkDevice device,
+ VkDescriptorSet descriptorSet,
+ void** ppData);
+#endif
+
+
+#define VK_EXT_depth_clamp_zero_one 1
+#define VK_EXT_DEPTH_CLAMP_ZERO_ONE_SPEC_VERSION 1
+#define VK_EXT_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME "VK_EXT_depth_clamp_zero_one"
+typedef struct VkPhysicalDeviceDepthClampZeroOneFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 depthClampZeroOne;
+} VkPhysicalDeviceDepthClampZeroOneFeaturesEXT;
+
+
+
+#define VK_EXT_non_seamless_cube_map 1
+#define VK_EXT_NON_SEAMLESS_CUBE_MAP_SPEC_VERSION 1
+#define VK_EXT_NON_SEAMLESS_CUBE_MAP_EXTENSION_NAME "VK_EXT_non_seamless_cube_map"
+typedef struct VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 nonSeamlessCubeMap;
+} VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
+
+
+
#define VK_QCOM_fragment_density_map_offset 1
#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION 1
#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_EXTENSION_NAME "VK_QCOM_fragment_density_map_offset"
@@ -13866,10 +14871,653 @@ typedef struct VkPhysicalDeviceLinearColorAttachmentFeaturesNV {
#define VK_GOOGLE_surfaceless_query 1
-#define VK_GOOGLE_SURFACELESS_QUERY_SPEC_VERSION 1
+#define VK_GOOGLE_SURFACELESS_QUERY_SPEC_VERSION 2
#define VK_GOOGLE_SURFACELESS_QUERY_EXTENSION_NAME "VK_GOOGLE_surfaceless_query"
+#define VK_EXT_image_compression_control_swapchain 1
+#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_SPEC_VERSION 1
+#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_EXTENSION_NAME "VK_EXT_image_compression_control_swapchain"
+typedef struct VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 imageCompressionControlSwapchain;
+} VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
+
+
+
+#define VK_QCOM_image_processing 1
+#define VK_QCOM_IMAGE_PROCESSING_SPEC_VERSION 1
+#define VK_QCOM_IMAGE_PROCESSING_EXTENSION_NAME "VK_QCOM_image_processing"
+typedef struct VkImageViewSampleWeightCreateInfoQCOM {
+ VkStructureType sType;
+ const void* pNext;
+ VkOffset2D filterCenter;
+ VkExtent2D filterSize;
+ uint32_t numPhases;
+} VkImageViewSampleWeightCreateInfoQCOM;
+
+typedef struct VkPhysicalDeviceImageProcessingFeaturesQCOM {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 textureSampleWeighted;
+ VkBool32 textureBoxFilter;
+ VkBool32 textureBlockMatch;
+} VkPhysicalDeviceImageProcessingFeaturesQCOM;
+
+typedef struct VkPhysicalDeviceImageProcessingPropertiesQCOM {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t maxWeightFilterPhases;
+ VkExtent2D maxWeightFilterDimension;
+ VkExtent2D maxBlockMatchRegion;
+ VkExtent2D maxBoxFilterBlockSize;
+} VkPhysicalDeviceImageProcessingPropertiesQCOM;
+
+
+
+#define VK_EXT_extended_dynamic_state3 1
+#define VK_EXT_EXTENDED_DYNAMIC_STATE_3_SPEC_VERSION 2
+#define VK_EXT_EXTENDED_DYNAMIC_STATE_3_EXTENSION_NAME "VK_EXT_extended_dynamic_state3"
+typedef struct VkPhysicalDeviceExtendedDynamicState3FeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 extendedDynamicState3TessellationDomainOrigin;
+ VkBool32 extendedDynamicState3DepthClampEnable;
+ VkBool32 extendedDynamicState3PolygonMode;
+ VkBool32 extendedDynamicState3RasterizationSamples;
+ VkBool32 extendedDynamicState3SampleMask;
+ VkBool32 extendedDynamicState3AlphaToCoverageEnable;
+ VkBool32 extendedDynamicState3AlphaToOneEnable;
+ VkBool32 extendedDynamicState3LogicOpEnable;
+ VkBool32 extendedDynamicState3ColorBlendEnable;
+ VkBool32 extendedDynamicState3ColorBlendEquation;
+ VkBool32 extendedDynamicState3ColorWriteMask;
+ VkBool32 extendedDynamicState3RasterizationStream;
+ VkBool32 extendedDynamicState3ConservativeRasterizationMode;
+ VkBool32 extendedDynamicState3ExtraPrimitiveOverestimationSize;
+ VkBool32 extendedDynamicState3DepthClipEnable;
+ VkBool32 extendedDynamicState3SampleLocationsEnable;
+ VkBool32 extendedDynamicState3ColorBlendAdvanced;
+ VkBool32 extendedDynamicState3ProvokingVertexMode;
+ VkBool32 extendedDynamicState3LineRasterizationMode;
+ VkBool32 extendedDynamicState3LineStippleEnable;
+ VkBool32 extendedDynamicState3DepthClipNegativeOneToOne;
+ VkBool32 extendedDynamicState3ViewportWScalingEnable;
+ VkBool32 extendedDynamicState3ViewportSwizzle;
+ VkBool32 extendedDynamicState3CoverageToColorEnable;
+ VkBool32 extendedDynamicState3CoverageToColorLocation;
+ VkBool32 extendedDynamicState3CoverageModulationMode;
+ VkBool32 extendedDynamicState3CoverageModulationTableEnable;
+ VkBool32 extendedDynamicState3CoverageModulationTable;
+ VkBool32 extendedDynamicState3CoverageReductionMode;
+ VkBool32 extendedDynamicState3RepresentativeFragmentTestEnable;
+ VkBool32 extendedDynamicState3ShadingRateImageEnable;
+} VkPhysicalDeviceExtendedDynamicState3FeaturesEXT;
+
+typedef struct VkPhysicalDeviceExtendedDynamicState3PropertiesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 dynamicPrimitiveTopologyUnrestricted;
+} VkPhysicalDeviceExtendedDynamicState3PropertiesEXT;
+
+typedef struct VkColorBlendEquationEXT {
+ VkBlendFactor srcColorBlendFactor;
+ VkBlendFactor dstColorBlendFactor;
+ VkBlendOp colorBlendOp;
+ VkBlendFactor srcAlphaBlendFactor;
+ VkBlendFactor dstAlphaBlendFactor;
+ VkBlendOp alphaBlendOp;
+} VkColorBlendEquationEXT;
+
+typedef struct VkColorBlendAdvancedEXT {
+ VkBlendOp advancedBlendOp;
+ VkBool32 srcPremultiplied;
+ VkBool32 dstPremultiplied;
+ VkBlendOverlapEXT blendOverlap;
+ VkBool32 clampResults;
+} VkColorBlendAdvancedEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetTessellationDomainOriginEXT)(VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClampEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthClampEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetPolygonModeEXT)(VkCommandBuffer commandBuffer, VkPolygonMode polygonMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizationSamplesEXT)(VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples);
+typedef void (VKAPI_PTR *PFN_vkCmdSetSampleMaskEXT)(VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask* pSampleMask);
+typedef void (VKAPI_PTR *PFN_vkCmdSetAlphaToCoverageEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetAlphaToOneEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetLogicOpEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 logicOpEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendEnableEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkBool32* pColorBlendEnables);
+typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendEquationEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendEquationEXT* pColorBlendEquations);
+typedef void (VKAPI_PTR *PFN_vkCmdSetColorWriteMaskEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorComponentFlags* pColorWriteMasks);
+typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizationStreamEXT)(VkCommandBuffer commandBuffer, uint32_t rasterizationStream);
+typedef void (VKAPI_PTR *PFN_vkCmdSetConservativeRasterizationModeEXT)(VkCommandBuffer commandBuffer, VkConservativeRasterizationModeEXT conservativeRasterizationMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT)(VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClipEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthClipEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendAdvancedEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendAdvancedEXT* pColorBlendAdvanced);
+typedef void (VKAPI_PTR *PFN_vkCmdSetProvokingVertexModeEXT)(VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetLineRasterizationModeEXT)(VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClipNegativeOneToOneEXT)(VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne);
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingEnableNV)(VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewportSwizzleNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportSwizzleNV* pViewportSwizzles);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageToColorEnableNV)(VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageToColorLocationNV)(VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationModeNV)(VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationTableEnableNV)(VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationTableNV)(VkCommandBuffer commandBuffer, uint32_t coverageModulationTableCount, const float* pCoverageModulationTable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetShadingRateImageEnableNV)(VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetRepresentativeFragmentTestEnableNV)(VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageReductionModeNV)(VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetTessellationDomainOriginEXT(
+ VkCommandBuffer commandBuffer,
+ VkTessellationDomainOrigin domainOrigin);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClampEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 depthClampEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetPolygonModeEXT(
+ VkCommandBuffer commandBuffer,
+ VkPolygonMode polygonMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizationSamplesEXT(
+ VkCommandBuffer commandBuffer,
+ VkSampleCountFlagBits rasterizationSamples);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleMaskEXT(
+ VkCommandBuffer commandBuffer,
+ VkSampleCountFlagBits samples,
+ const VkSampleMask* pSampleMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetAlphaToCoverageEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 alphaToCoverageEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetAlphaToOneEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 alphaToOneEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLogicOpEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 logicOpEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendEnableEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VkBool32* pColorBlendEnables);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendEquationEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VkColorBlendEquationEXT* pColorBlendEquations);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetColorWriteMaskEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VkColorComponentFlags* pColorWriteMasks);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizationStreamEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t rasterizationStream);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetConservativeRasterizationModeEXT(
+ VkCommandBuffer commandBuffer,
+ VkConservativeRasterizationModeEXT conservativeRasterizationMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetExtraPrimitiveOverestimationSizeEXT(
+ VkCommandBuffer commandBuffer,
+ float extraPrimitiveOverestimationSize);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClipEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 depthClipEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 sampleLocationsEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendAdvancedEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VkColorBlendAdvancedEXT* pColorBlendAdvanced);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetProvokingVertexModeEXT(
+ VkCommandBuffer commandBuffer,
+ VkProvokingVertexModeEXT provokingVertexMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLineRasterizationModeEXT(
+ VkCommandBuffer commandBuffer,
+ VkLineRasterizationModeEXT lineRasterizationMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 stippledLineEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClipNegativeOneToOneEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 negativeOneToOne);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingEnableNV(
+ VkCommandBuffer commandBuffer,
+ VkBool32 viewportWScalingEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportSwizzleNV(
+ VkCommandBuffer commandBuffer,
+ uint32_t firstViewport,
+ uint32_t viewportCount,
+ const VkViewportSwizzleNV* pViewportSwizzles);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageToColorEnableNV(
+ VkCommandBuffer commandBuffer,
+ VkBool32 coverageToColorEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageToColorLocationNV(
+ VkCommandBuffer commandBuffer,
+ uint32_t coverageToColorLocation);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationModeNV(
+ VkCommandBuffer commandBuffer,
+ VkCoverageModulationModeNV coverageModulationMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationTableEnableNV(
+ VkCommandBuffer commandBuffer,
+ VkBool32 coverageModulationTableEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationTableNV(
+ VkCommandBuffer commandBuffer,
+ uint32_t coverageModulationTableCount,
+ const float* pCoverageModulationTable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetShadingRateImageEnableNV(
+ VkCommandBuffer commandBuffer,
+ VkBool32 shadingRateImageEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetRepresentativeFragmentTestEnableNV(
+ VkCommandBuffer commandBuffer,
+ VkBool32 representativeFragmentTestEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageReductionModeNV(
+ VkCommandBuffer commandBuffer,
+ VkCoverageReductionModeNV coverageReductionMode);
+#endif
+
+
+#define VK_EXT_subpass_merge_feedback 1
+#define VK_EXT_SUBPASS_MERGE_FEEDBACK_SPEC_VERSION 2
+#define VK_EXT_SUBPASS_MERGE_FEEDBACK_EXTENSION_NAME "VK_EXT_subpass_merge_feedback"
+
+typedef enum VkSubpassMergeStatusEXT {
+ VK_SUBPASS_MERGE_STATUS_MERGED_EXT = 0,
+ VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT = 1,
+ VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT = 2,
+ VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT = 3,
+ VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT = 4,
+ VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT = 5,
+ VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT = 6,
+ VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT = 7,
+ VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT = 8,
+ VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT = 9,
+ VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT = 10,
+ VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT = 11,
+ VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT = 12,
+ VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT = 13,
+ VK_SUBPASS_MERGE_STATUS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkSubpassMergeStatusEXT;
+typedef struct VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 subpassMergeFeedback;
+} VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
+
+typedef struct VkRenderPassCreationControlEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkBool32 disallowMerging;
+} VkRenderPassCreationControlEXT;
+
+typedef struct VkRenderPassCreationFeedbackInfoEXT {
+ uint32_t postMergeSubpassCount;
+} VkRenderPassCreationFeedbackInfoEXT;
+
+typedef struct VkRenderPassCreationFeedbackCreateInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkRenderPassCreationFeedbackInfoEXT* pRenderPassFeedback;
+} VkRenderPassCreationFeedbackCreateInfoEXT;
+
+typedef struct VkRenderPassSubpassFeedbackInfoEXT {
+ VkSubpassMergeStatusEXT subpassMergeStatus;
+ char description[VK_MAX_DESCRIPTION_SIZE];
+ uint32_t postMergeIndex;
+} VkRenderPassSubpassFeedbackInfoEXT;
+
+typedef struct VkRenderPassSubpassFeedbackCreateInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkRenderPassSubpassFeedbackInfoEXT* pSubpassFeedback;
+} VkRenderPassSubpassFeedbackCreateInfoEXT;
+
+
+
+#define VK_EXT_shader_module_identifier 1
+#define VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT 32U
+#define VK_EXT_SHADER_MODULE_IDENTIFIER_SPEC_VERSION 1
+#define VK_EXT_SHADER_MODULE_IDENTIFIER_EXTENSION_NAME "VK_EXT_shader_module_identifier"
+typedef struct VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 shaderModuleIdentifier;
+} VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT;
+
+typedef struct VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT {
+ VkStructureType sType;
+ void* pNext;
+ uint8_t shaderModuleIdentifierAlgorithmUUID[VK_UUID_SIZE];
+} VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT;
+
+typedef struct VkPipelineShaderStageModuleIdentifierCreateInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t identifierSize;
+ const uint8_t* pIdentifier;
+} VkPipelineShaderStageModuleIdentifierCreateInfoEXT;
+
+typedef struct VkShaderModuleIdentifierEXT {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t identifierSize;
+ uint8_t identifier[VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT];
+} VkShaderModuleIdentifierEXT;
+
+typedef void (VKAPI_PTR *PFN_vkGetShaderModuleIdentifierEXT)(VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT* pIdentifier);
+typedef void (VKAPI_PTR *PFN_vkGetShaderModuleCreateInfoIdentifierEXT)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, VkShaderModuleIdentifierEXT* pIdentifier);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetShaderModuleIdentifierEXT(
+ VkDevice device,
+ VkShaderModule shaderModule,
+ VkShaderModuleIdentifierEXT* pIdentifier);
+
+VKAPI_ATTR void VKAPI_CALL vkGetShaderModuleCreateInfoIdentifierEXT(
+ VkDevice device,
+ const VkShaderModuleCreateInfo* pCreateInfo,
+ VkShaderModuleIdentifierEXT* pIdentifier);
+#endif
+
+
+#define VK_EXT_rasterization_order_attachment_access 1
+#define VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION 1
+#define VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_EXT_rasterization_order_attachment_access"
+
+
+#define VK_NV_optical_flow 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkOpticalFlowSessionNV)
+#define VK_NV_OPTICAL_FLOW_SPEC_VERSION 1
+#define VK_NV_OPTICAL_FLOW_EXTENSION_NAME "VK_NV_optical_flow"
+
+typedef enum VkOpticalFlowPerformanceLevelNV {
+ VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV = 0,
+ VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV = 1,
+ VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV = 2,
+ VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV = 3,
+ VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowPerformanceLevelNV;
+
+typedef enum VkOpticalFlowSessionBindingPointNV {
+ VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV = 0,
+ VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV = 1,
+ VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV = 2,
+ VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV = 3,
+ VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV = 4,
+ VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV = 5,
+ VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV = 6,
+ VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV = 7,
+ VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV = 8,
+ VK_OPTICAL_FLOW_SESSION_BINDING_POINT_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowSessionBindingPointNV;
+
+typedef enum VkOpticalFlowGridSizeFlagBitsNV {
+ VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV = 0,
+ VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV = 0x00000001,
+ VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV = 0x00000002,
+ VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV = 0x00000004,
+ VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV = 0x00000008,
+ VK_OPTICAL_FLOW_GRID_SIZE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowGridSizeFlagBitsNV;
+typedef VkFlags VkOpticalFlowGridSizeFlagsNV;
+
+typedef enum VkOpticalFlowUsageFlagBitsNV {
+ VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV = 0,
+ VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV = 0x00000001,
+ VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV = 0x00000002,
+ VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV = 0x00000004,
+ VK_OPTICAL_FLOW_USAGE_COST_BIT_NV = 0x00000008,
+ VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV = 0x00000010,
+ VK_OPTICAL_FLOW_USAGE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowUsageFlagBitsNV;
+typedef VkFlags VkOpticalFlowUsageFlagsNV;
+
+typedef enum VkOpticalFlowSessionCreateFlagBitsNV {
+ VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV = 0x00000001,
+ VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV = 0x00000002,
+ VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV = 0x00000004,
+ VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV = 0x00000008,
+ VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV = 0x00000010,
+ VK_OPTICAL_FLOW_SESSION_CREATE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowSessionCreateFlagBitsNV;
+typedef VkFlags VkOpticalFlowSessionCreateFlagsNV;
+
+typedef enum VkOpticalFlowExecuteFlagBitsNV {
+ VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV = 0x00000001,
+ VK_OPTICAL_FLOW_EXECUTE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowExecuteFlagBitsNV;
+typedef VkFlags VkOpticalFlowExecuteFlagsNV;
+typedef struct VkPhysicalDeviceOpticalFlowFeaturesNV {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 opticalFlow;
+} VkPhysicalDeviceOpticalFlowFeaturesNV;
+
+typedef struct VkPhysicalDeviceOpticalFlowPropertiesNV {
+ VkStructureType sType;
+ void* pNext;
+ VkOpticalFlowGridSizeFlagsNV supportedOutputGridSizes;
+ VkOpticalFlowGridSizeFlagsNV supportedHintGridSizes;
+ VkBool32 hintSupported;
+ VkBool32 costSupported;
+ VkBool32 bidirectionalFlowSupported;
+ VkBool32 globalFlowSupported;
+ uint32_t minWidth;
+ uint32_t minHeight;
+ uint32_t maxWidth;
+ uint32_t maxHeight;
+ uint32_t maxNumRegionsOfInterest;
+} VkPhysicalDeviceOpticalFlowPropertiesNV;
+
+typedef struct VkOpticalFlowImageFormatInfoNV {
+ VkStructureType sType;
+ const void* pNext;
+ VkOpticalFlowUsageFlagsNV usage;
+} VkOpticalFlowImageFormatInfoNV;
+
+typedef struct VkOpticalFlowImageFormatPropertiesNV {
+ VkStructureType sType;
+ const void* pNext;
+ VkFormat format;
+} VkOpticalFlowImageFormatPropertiesNV;
+
+typedef struct VkOpticalFlowSessionCreateInfoNV {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t width;
+ uint32_t height;
+ VkFormat imageFormat;
+ VkFormat flowVectorFormat;
+ VkFormat costFormat;
+ VkOpticalFlowGridSizeFlagsNV outputGridSize;
+ VkOpticalFlowGridSizeFlagsNV hintGridSize;
+ VkOpticalFlowPerformanceLevelNV performanceLevel;
+ VkOpticalFlowSessionCreateFlagsNV flags;
+} VkOpticalFlowSessionCreateInfoNV;
+
+typedef struct VkOpticalFlowSessionCreatePrivateDataInfoNV {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t id;
+ uint32_t size;
+ const void* pPrivateData;
+} VkOpticalFlowSessionCreatePrivateDataInfoNV;
+
+typedef struct VkOpticalFlowExecuteInfoNV {
+ VkStructureType sType;
+ void* pNext;
+ VkOpticalFlowExecuteFlagsNV flags;
+ uint32_t regionCount;
+ const VkRect2D* pRegions;
+} VkOpticalFlowExecuteInfoNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV)(VkPhysicalDevice physicalDevice, const VkOpticalFlowImageFormatInfoNV* pOpticalFlowImageFormatInfo, uint32_t* pFormatCount, VkOpticalFlowImageFormatPropertiesNV* pImageFormatProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateOpticalFlowSessionNV)(VkDevice device, const VkOpticalFlowSessionCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkOpticalFlowSessionNV* pSession);
+typedef void (VKAPI_PTR *PFN_vkDestroyOpticalFlowSessionNV)(VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkBindOpticalFlowSessionImageNV)(VkDevice device, VkOpticalFlowSessionNV session, VkOpticalFlowSessionBindingPointNV bindingPoint, VkImageView view, VkImageLayout layout);
+typedef void (VKAPI_PTR *PFN_vkCmdOpticalFlowExecuteNV)(VkCommandBuffer commandBuffer, VkOpticalFlowSessionNV session, const VkOpticalFlowExecuteInfoNV* pExecuteInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
+ VkPhysicalDevice physicalDevice,
+ const VkOpticalFlowImageFormatInfoNV* pOpticalFlowImageFormatInfo,
+ uint32_t* pFormatCount,
+ VkOpticalFlowImageFormatPropertiesNV* pImageFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateOpticalFlowSessionNV(
+ VkDevice device,
+ const VkOpticalFlowSessionCreateInfoNV* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkOpticalFlowSessionNV* pSession);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyOpticalFlowSessionNV(
+ VkDevice device,
+ VkOpticalFlowSessionNV session,
+ const VkAllocationCallbacks* pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindOpticalFlowSessionImageNV(
+ VkDevice device,
+ VkOpticalFlowSessionNV session,
+ VkOpticalFlowSessionBindingPointNV bindingPoint,
+ VkImageView view,
+ VkImageLayout layout);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdOpticalFlowExecuteNV(
+ VkCommandBuffer commandBuffer,
+ VkOpticalFlowSessionNV session,
+ const VkOpticalFlowExecuteInfoNV* pExecuteInfo);
+#endif
+
+
+#define VK_EXT_legacy_dithering 1
+#define VK_EXT_LEGACY_DITHERING_SPEC_VERSION 1
+#define VK_EXT_LEGACY_DITHERING_EXTENSION_NAME "VK_EXT_legacy_dithering"
+typedef struct VkPhysicalDeviceLegacyDitheringFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 legacyDithering;
+} VkPhysicalDeviceLegacyDitheringFeaturesEXT;
+
+
+
+#define VK_EXT_pipeline_protected_access 1
+#define VK_EXT_PIPELINE_PROTECTED_ACCESS_SPEC_VERSION 1
+#define VK_EXT_PIPELINE_PROTECTED_ACCESS_EXTENSION_NAME "VK_EXT_pipeline_protected_access"
+typedef struct VkPhysicalDevicePipelineProtectedAccessFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 pipelineProtectedAccess;
+} VkPhysicalDevicePipelineProtectedAccessFeaturesEXT;
+
+
+
+#define VK_QCOM_tile_properties 1
+#define VK_QCOM_TILE_PROPERTIES_SPEC_VERSION 1
+#define VK_QCOM_TILE_PROPERTIES_EXTENSION_NAME "VK_QCOM_tile_properties"
+typedef struct VkPhysicalDeviceTilePropertiesFeaturesQCOM {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 tileProperties;
+} VkPhysicalDeviceTilePropertiesFeaturesQCOM;
+
+typedef struct VkTilePropertiesQCOM {
+ VkStructureType sType;
+ void* pNext;
+ VkExtent3D tileSize;
+ VkExtent2D apronSize;
+ VkOffset2D origin;
+} VkTilePropertiesQCOM;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetFramebufferTilePropertiesQCOM)(VkDevice device, VkFramebuffer framebuffer, uint32_t* pPropertiesCount, VkTilePropertiesQCOM* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDynamicRenderingTilePropertiesQCOM)(VkDevice device, const VkRenderingInfo* pRenderingInfo, VkTilePropertiesQCOM* pProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetFramebufferTilePropertiesQCOM(
+ VkDevice device,
+ VkFramebuffer framebuffer,
+ uint32_t* pPropertiesCount,
+ VkTilePropertiesQCOM* pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDynamicRenderingTilePropertiesQCOM(
+ VkDevice device,
+ const VkRenderingInfo* pRenderingInfo,
+ VkTilePropertiesQCOM* pProperties);
+#endif
+
+
+#define VK_SEC_amigo_profiling 1
+#define VK_SEC_AMIGO_PROFILING_SPEC_VERSION 1
+#define VK_SEC_AMIGO_PROFILING_EXTENSION_NAME "VK_SEC_amigo_profiling"
+typedef struct VkPhysicalDeviceAmigoProfilingFeaturesSEC {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 amigoProfiling;
+} VkPhysicalDeviceAmigoProfilingFeaturesSEC;
+
+typedef struct VkAmigoProfilingSubmitInfoSEC {
+ VkStructureType sType;
+ const void* pNext;
+ uint64_t firstDrawTimestamp;
+ uint64_t swapBufferTimestamp;
+} VkAmigoProfilingSubmitInfoSEC;
+
+
+
+#define VK_EXT_mutable_descriptor_type 1
+#define VK_EXT_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1
+#define VK_EXT_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_EXT_mutable_descriptor_type"
+
+
+#define VK_ARM_shader_core_builtins 1
+#define VK_ARM_SHADER_CORE_BUILTINS_SPEC_VERSION 1
+#define VK_ARM_SHADER_CORE_BUILTINS_EXTENSION_NAME "VK_ARM_shader_core_builtins"
+typedef struct VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 shaderCoreBuiltins;
+} VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM;
+
+typedef struct VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t shaderCoreCount;
+ uint32_t shaderWarpsPerCore;
+} VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM;
+
+
+
#define VK_KHR_acceleration_structure 1
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureKHR)
#define VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION 13
@@ -13881,30 +15529,12 @@ typedef enum VkBuildAccelerationStructureModeKHR {
VK_BUILD_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF
} VkBuildAccelerationStructureModeKHR;
-typedef enum VkAccelerationStructureBuildTypeKHR {
- VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR = 0,
- VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR = 1,
- VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR = 2,
- VK_ACCELERATION_STRUCTURE_BUILD_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF
-} VkAccelerationStructureBuildTypeKHR;
-
-typedef enum VkAccelerationStructureCompatibilityKHR {
- VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR = 0,
- VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR = 1,
- VK_ACCELERATION_STRUCTURE_COMPATIBILITY_MAX_ENUM_KHR = 0x7FFFFFFF
-} VkAccelerationStructureCompatibilityKHR;
-
typedef enum VkAccelerationStructureCreateFlagBitsKHR {
VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = 0x00000001,
VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV = 0x00000004,
VK_ACCELERATION_STRUCTURE_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
} VkAccelerationStructureCreateFlagBitsKHR;
typedef VkFlags VkAccelerationStructureCreateFlagsKHR;
-typedef union VkDeviceOrHostAddressKHR {
- VkDeviceAddress deviceAddress;
- void* hostAddress;
-} VkDeviceOrHostAddressKHR;
-
typedef struct VkAccelerationStructureBuildRangeInfoKHR {
uint32_t primitiveCount;
uint32_t primitiveOffset;
@@ -14309,6 +15939,87 @@ typedef struct VkPhysicalDeviceRayQueryFeaturesKHR {
} VkPhysicalDeviceRayQueryFeaturesKHR;
+
+#define VK_EXT_mesh_shader 1
+#define VK_EXT_MESH_SHADER_SPEC_VERSION 1
+#define VK_EXT_MESH_SHADER_EXTENSION_NAME "VK_EXT_mesh_shader"
+typedef struct VkPhysicalDeviceMeshShaderFeaturesEXT {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 taskShader;
+ VkBool32 meshShader;
+ VkBool32 multiviewMeshShader;
+ VkBool32 primitiveFragmentShadingRateMeshShader;
+ VkBool32 meshShaderQueries;
+} VkPhysicalDeviceMeshShaderFeaturesEXT;
+
+typedef struct VkPhysicalDeviceMeshShaderPropertiesEXT {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t maxTaskWorkGroupTotalCount;
+ uint32_t maxTaskWorkGroupCount[3];
+ uint32_t maxTaskWorkGroupInvocations;
+ uint32_t maxTaskWorkGroupSize[3];
+ uint32_t maxTaskPayloadSize;
+ uint32_t maxTaskSharedMemorySize;
+ uint32_t maxTaskPayloadAndSharedMemorySize;
+ uint32_t maxMeshWorkGroupTotalCount;
+ uint32_t maxMeshWorkGroupCount[3];
+ uint32_t maxMeshWorkGroupInvocations;
+ uint32_t maxMeshWorkGroupSize[3];
+ uint32_t maxMeshSharedMemorySize;
+ uint32_t maxMeshPayloadAndSharedMemorySize;
+ uint32_t maxMeshOutputMemorySize;
+ uint32_t maxMeshPayloadAndOutputMemorySize;
+ uint32_t maxMeshOutputComponents;
+ uint32_t maxMeshOutputVertices;
+ uint32_t maxMeshOutputPrimitives;
+ uint32_t maxMeshOutputLayers;
+ uint32_t maxMeshMultiviewViewCount;
+ uint32_t meshOutputPerVertexGranularity;
+ uint32_t meshOutputPerPrimitiveGranularity;
+ uint32_t maxPreferredTaskWorkGroupInvocations;
+ uint32_t maxPreferredMeshWorkGroupInvocations;
+ VkBool32 prefersLocalInvocationVertexOutput;
+ VkBool32 prefersLocalInvocationPrimitiveOutput;
+ VkBool32 prefersCompactVertexOutput;
+ VkBool32 prefersCompactPrimitiveOutput;
+} VkPhysicalDeviceMeshShaderPropertiesEXT;
+
+typedef struct VkDrawMeshTasksIndirectCommandEXT {
+ uint32_t groupCountX;
+ uint32_t groupCountY;
+ uint32_t groupCountZ;
+} VkDrawMeshTasksIndirectCommandEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksEXT)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectEXT)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectCountEXT)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t groupCountX,
+ uint32_t groupCountY,
+ uint32_t groupCountZ);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectEXT(
+ VkCommandBuffer commandBuffer,
+ VkBuffer buffer,
+ VkDeviceSize offset,
+ uint32_t drawCount,
+ uint32_t stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectCountEXT(
+ VkCommandBuffer commandBuffer,
+ VkBuffer buffer,
+ VkDeviceSize offset,
+ VkBuffer countBuffer,
+ VkDeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride);
+#endif
+
#ifdef __cplusplus
}
#endif
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_enums.hpp b/thirdparty/vulkan/include/vulkan/vulkan_enums.hpp
index 9066688702..8801c6562b 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_enums.hpp
+++ b/thirdparty/vulkan/include/vulkan/vulkan_enums.hpp
@@ -12,21 +12,9 @@ namespace VULKAN_HPP_NAMESPACE
{
template <typename EnumType, EnumType value>
struct CppType
- {};
-
- template <typename Type>
- struct isVulkanHandleType
{
- static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = false;
};
- VULKAN_HPP_INLINE std::string toHexString( uint32_t value )
- {
- std::stringstream stream;
- stream << std::hex << value;
- return stream.str();
- }
-
//=============
//=== ENUMs ===
//=============
@@ -35,37 +23,45 @@ namespace VULKAN_HPP_NAMESPACE
enum class Result
{
- eSuccess = VK_SUCCESS,
- eNotReady = VK_NOT_READY,
- eTimeout = VK_TIMEOUT,
- eEventSet = VK_EVENT_SET,
- eEventReset = VK_EVENT_RESET,
- eIncomplete = VK_INCOMPLETE,
- eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY,
- eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY,
- eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED,
- eErrorDeviceLost = VK_ERROR_DEVICE_LOST,
- eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED,
- eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT,
- eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT,
- eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT,
- eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER,
- eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS,
- eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED,
- eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL,
- eErrorUnknown = VK_ERROR_UNKNOWN,
- eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY,
- eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE,
- eErrorFragmentation = VK_ERROR_FRAGMENTATION,
- eErrorInvalidOpaqueCaptureAddress = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS,
- ePipelineCompileRequired = VK_PIPELINE_COMPILE_REQUIRED,
- eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR,
- eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR,
- eSuboptimalKHR = VK_SUBOPTIMAL_KHR,
- eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR,
- eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR,
- eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT,
- eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV,
+ eSuccess = VK_SUCCESS,
+ eNotReady = VK_NOT_READY,
+ eTimeout = VK_TIMEOUT,
+ eEventSet = VK_EVENT_SET,
+ eEventReset = VK_EVENT_RESET,
+ eIncomplete = VK_INCOMPLETE,
+ eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY,
+ eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY,
+ eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED,
+ eErrorDeviceLost = VK_ERROR_DEVICE_LOST,
+ eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED,
+ eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT,
+ eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT,
+ eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT,
+ eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER,
+ eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS,
+ eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED,
+ eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL,
+ eErrorUnknown = VK_ERROR_UNKNOWN,
+ eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY,
+ eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE,
+ eErrorFragmentation = VK_ERROR_FRAGMENTATION,
+ eErrorInvalidOpaqueCaptureAddress = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS,
+ ePipelineCompileRequired = VK_PIPELINE_COMPILE_REQUIRED,
+ eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR,
+ eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR,
+ eSuboptimalKHR = VK_SUBOPTIMAL_KHR,
+ eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR,
+ eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR,
+ eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT,
+ eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ eErrorImageUsageNotSupportedKHR = VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR,
+ eErrorVideoPictureLayoutNotSupportedKHR = VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR,
+ eErrorVideoProfileOperationNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR,
+ eErrorVideoProfileFormatNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR,
+ eErrorVideoProfileCodecNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR,
+ eErrorVideoStdVersionNotSupportedKHR = VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT,
eErrorNotPermittedKHR = VK_ERROR_NOT_PERMITTED_KHR,
#if defined( VK_USE_PLATFORM_WIN32_KHR )
@@ -75,6 +71,7 @@ namespace VULKAN_HPP_NAMESPACE
eThreadDoneKHR = VK_THREAD_DONE_KHR,
eOperationDeferredKHR = VK_OPERATION_DEFERRED_KHR,
eOperationNotDeferredKHR = VK_OPERATION_NOT_DEFERRED_KHR,
+ eErrorCompressionExhaustedEXT = VK_ERROR_COMPRESSION_EXHAUSTED_EXT,
eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT,
eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT,
eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR,
@@ -85,297 +82,235 @@ namespace VULKAN_HPP_NAMESPACE
ePipelineCompileRequiredEXT = VK_PIPELINE_COMPILE_REQUIRED_EXT
};
- VULKAN_HPP_INLINE std::string to_string( Result value )
- {
- switch ( value )
- {
- case Result::eSuccess: return "Success";
- case Result::eNotReady: return "NotReady";
- case Result::eTimeout: return "Timeout";
- case Result::eEventSet: return "EventSet";
- case Result::eEventReset: return "EventReset";
- case Result::eIncomplete: return "Incomplete";
- case Result::eErrorOutOfHostMemory: return "ErrorOutOfHostMemory";
- case Result::eErrorOutOfDeviceMemory: return "ErrorOutOfDeviceMemory";
- case Result::eErrorInitializationFailed: return "ErrorInitializationFailed";
- case Result::eErrorDeviceLost: return "ErrorDeviceLost";
- case Result::eErrorMemoryMapFailed: return "ErrorMemoryMapFailed";
- case Result::eErrorLayerNotPresent: return "ErrorLayerNotPresent";
- case Result::eErrorExtensionNotPresent: return "ErrorExtensionNotPresent";
- case Result::eErrorFeatureNotPresent: return "ErrorFeatureNotPresent";
- case Result::eErrorIncompatibleDriver: return "ErrorIncompatibleDriver";
- case Result::eErrorTooManyObjects: return "ErrorTooManyObjects";
- case Result::eErrorFormatNotSupported: return "ErrorFormatNotSupported";
- case Result::eErrorFragmentedPool: return "ErrorFragmentedPool";
- case Result::eErrorUnknown: return "ErrorUnknown";
- case Result::eErrorOutOfPoolMemory: return "ErrorOutOfPoolMemory";
- case Result::eErrorInvalidExternalHandle: return "ErrorInvalidExternalHandle";
- case Result::eErrorFragmentation: return "ErrorFragmentation";
- case Result::eErrorInvalidOpaqueCaptureAddress: return "ErrorInvalidOpaqueCaptureAddress";
- case Result::ePipelineCompileRequired: return "PipelineCompileRequired";
- case Result::eErrorSurfaceLostKHR: return "ErrorSurfaceLostKHR";
- case Result::eErrorNativeWindowInUseKHR: return "ErrorNativeWindowInUseKHR";
- case Result::eSuboptimalKHR: return "SuboptimalKHR";
- case Result::eErrorOutOfDateKHR: return "ErrorOutOfDateKHR";
- case Result::eErrorIncompatibleDisplayKHR: return "ErrorIncompatibleDisplayKHR";
- case Result::eErrorValidationFailedEXT: return "ErrorValidationFailedEXT";
- case Result::eErrorInvalidShaderNV: return "ErrorInvalidShaderNV";
- case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT";
- case Result::eErrorNotPermittedKHR: return "ErrorNotPermittedKHR";
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
- case Result::eErrorFullScreenExclusiveModeLostEXT: return "ErrorFullScreenExclusiveModeLostEXT";
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- case Result::eThreadIdleKHR: return "ThreadIdleKHR";
- case Result::eThreadDoneKHR: return "ThreadDoneKHR";
- case Result::eOperationDeferredKHR: return "OperationDeferredKHR";
- case Result::eOperationNotDeferredKHR: return "OperationNotDeferredKHR";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class StructureType
{
- eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO,
- eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
- eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
- eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
- eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO,
- eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
- eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
- eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO,
- eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
- eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
- eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
- eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,
- eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
- eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
- eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
- eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
- eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
- ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO,
- ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
- ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
- ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
- ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
- ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
- ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
- ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
- ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
- ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
- ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
- eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
- eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
- ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
- eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
- eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
- eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
- eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
- eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
- eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET,
- eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
- eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
- eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
- eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
- eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
- eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
- eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
- eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
- eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
- eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER,
- eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO,
- eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO,
- ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES,
- eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
- eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
- ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES,
- eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
- eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
- eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO,
- eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO,
- eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO,
- eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO,
- eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO,
- eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO,
- eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO,
- ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES,
- eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO,
- eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
- eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
- eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2,
- eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
- eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2,
- ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
- ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
- eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
- eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
- ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
- eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2,
- ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
- eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2,
- ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2,
- ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES,
- eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO,
- eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO,
- ePipelineTessellationDomainOriginStateCreateInfo =
- VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO,
- eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO,
- ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES,
- ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES,
- ePhysicalDeviceVariablePointersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES,
- eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO,
- ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES,
- ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES,
- eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2,
- eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
- eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO,
- eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO,
- eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
- ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES,
- eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES,
- eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO,
- ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
- eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES,
- ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO,
- eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES,
- ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES,
- eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
- eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
- eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
- ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO,
- eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES,
- eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO,
- eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
- ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO,
- eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES,
- ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
- eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT,
- ePhysicalDeviceShaderDrawParametersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES,
- ePhysicalDeviceVulkan11Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES,
- ePhysicalDeviceVulkan11Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES,
- ePhysicalDeviceVulkan12Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES,
- ePhysicalDeviceVulkan12Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES,
- eImageFormatListCreateInfo = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO,
- eAttachmentDescription2 = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2,
- eAttachmentReference2 = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2,
- eSubpassDescription2 = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2,
- eSubpassDependency2 = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
- eRenderPassCreateInfo2 = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2,
- eSubpassBeginInfo = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO,
- eSubpassEndInfo = VK_STRUCTURE_TYPE_SUBPASS_END_INFO,
- ePhysicalDevice8BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES,
- ePhysicalDeviceDriverProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES,
- ePhysicalDeviceShaderAtomicInt64Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES,
- ePhysicalDeviceShaderFloat16Int8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES,
- ePhysicalDeviceFloatControlsProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES,
- eDescriptorSetLayoutBindingFlagsCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO,
- ePhysicalDeviceDescriptorIndexingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES,
- ePhysicalDeviceDescriptorIndexingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES,
- eDescriptorSetVariableDescriptorCountAllocateInfo =
- VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO,
- eDescriptorSetVariableDescriptorCountLayoutSupport =
- VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT,
- ePhysicalDeviceDepthStencilResolveProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES,
- eSubpassDescriptionDepthStencilResolve = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE,
- ePhysicalDeviceScalarBlockLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES,
- eImageStencilUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO,
- ePhysicalDeviceSamplerFilterMinmaxProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES,
- eSamplerReductionModeCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO,
- ePhysicalDeviceVulkanMemoryModelFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES,
- ePhysicalDeviceImagelessFramebufferFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES,
- eFramebufferAttachmentsCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO,
- eFramebufferAttachmentImageInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO,
- eRenderPassAttachmentBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO,
- ePhysicalDeviceUniformBufferStandardLayoutFeatures =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES,
- ePhysicalDeviceShaderSubgroupExtendedTypesFeatures =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES,
- ePhysicalDeviceSeparateDepthStencilLayoutsFeatures =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES,
- eAttachmentReferenceStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT,
- eAttachmentDescriptionStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT,
- ePhysicalDeviceHostQueryResetFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES,
- ePhysicalDeviceTimelineSemaphoreFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES,
- ePhysicalDeviceTimelineSemaphoreProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES,
- eSemaphoreTypeCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
- eTimelineSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO,
- eSemaphoreWaitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
- eSemaphoreSignalInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO,
- ePhysicalDeviceBufferDeviceAddressFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES,
- eBufferDeviceAddressInfo = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
- eBufferOpaqueCaptureAddressCreateInfo = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO,
- eMemoryOpaqueCaptureAddressAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO,
- eDeviceMemoryOpaqueCaptureAddressInfo = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO,
- ePhysicalDeviceVulkan13Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES,
- ePhysicalDeviceVulkan13Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES,
- ePipelineCreationFeedbackCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO,
- ePhysicalDeviceShaderTerminateInvocationFeatures =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES,
- ePhysicalDeviceToolProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES,
- ePhysicalDeviceShaderDemoteToHelperInvocationFeatures =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES,
- ePhysicalDevicePrivateDataFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES,
- eDevicePrivateDataCreateInfo = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO,
- ePrivateDataSlotCreateInfo = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO,
- ePhysicalDevicePipelineCreationCacheControlFeatures =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES,
- eMemoryBarrier2 = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2,
- eBufferMemoryBarrier2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2,
- eImageMemoryBarrier2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2,
- eDependencyInfo = VK_STRUCTURE_TYPE_DEPENDENCY_INFO,
- eSubmitInfo2 = VK_STRUCTURE_TYPE_SUBMIT_INFO_2,
- eSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO,
- eCommandBufferSubmitInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO,
- ePhysicalDeviceSynchronization2Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES,
- ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES,
- ePhysicalDeviceImageRobustnessFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES,
- eCopyBufferInfo2 = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2,
- eCopyImageInfo2 = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2,
- eCopyBufferToImageInfo2 = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2,
- eCopyImageToBufferInfo2 = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2,
- eBlitImageInfo2 = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2,
- eResolveImageInfo2 = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2,
- eBufferCopy2 = VK_STRUCTURE_TYPE_BUFFER_COPY_2,
- eImageCopy2 = VK_STRUCTURE_TYPE_IMAGE_COPY_2,
- eImageBlit2 = VK_STRUCTURE_TYPE_IMAGE_BLIT_2,
- eBufferImageCopy2 = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2,
- eImageResolve2 = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2,
- ePhysicalDeviceSubgroupSizeControlProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES,
- ePipelineShaderStageRequiredSubgroupSizeCreateInfo =
- VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO,
- ePhysicalDeviceSubgroupSizeControlFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES,
- ePhysicalDeviceInlineUniformBlockFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES,
- ePhysicalDeviceInlineUniformBlockProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES,
- eWriteDescriptorSetInlineUniformBlock = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK,
- eDescriptorPoolInlineUniformBlockCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO,
- ePhysicalDeviceTextureCompressionAstcHdrFeatures =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES,
- eRenderingInfo = VK_STRUCTURE_TYPE_RENDERING_INFO,
- eRenderingAttachmentInfo = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO,
- ePipelineRenderingCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO,
- ePhysicalDeviceDynamicRenderingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES,
- eCommandBufferInheritanceRenderingInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO,
- ePhysicalDeviceShaderIntegerDotProductFeatures =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES,
- ePhysicalDeviceShaderIntegerDotProductProperties =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES,
- ePhysicalDeviceTexelBufferAlignmentProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES,
- eFormatProperties3 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3,
- ePhysicalDeviceMaintenance4Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES,
- ePhysicalDeviceMaintenance4Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES,
- eDeviceBufferMemoryRequirements = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS,
- eDeviceImageMemoryRequirements = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS,
- eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
- ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
- eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR,
- eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR,
- eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR,
- eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR,
- eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR,
- eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR,
- eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR,
- eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR,
- eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR,
+ eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO,
+ eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
+ eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
+ eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
+ eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO,
+ eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
+ eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
+ eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO,
+ eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+ eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
+ eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+ eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,
+ eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+ eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
+ eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+ eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+ eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
+ ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO,
+ ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
+ ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
+ ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
+ ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
+ ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
+ ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
+ ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
+ ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
+ ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
+ ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
+ eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
+ eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
+ ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
+ eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
+ eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
+ eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
+ eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+ eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
+ eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET,
+ eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
+ eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
+ eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
+ eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
+ eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+ eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+ eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+ eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+ eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+ eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER,
+ eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO,
+ eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO,
+ ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES,
+ eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
+ eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
+ ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES,
+ eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
+ eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
+ eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO,
+ eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO,
+ eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO,
+ eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO,
+ eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO,
+ eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO,
+ eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO,
+ ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES,
+ eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO,
+ eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
+ eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
+ eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2,
+ eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
+ eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2,
+ ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
+ ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
+ eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
+ eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
+ ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
+ eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2,
+ ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
+ eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2,
+ ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2,
+ ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES,
+ eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO,
+ eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO,
+ ePipelineTessellationDomainOriginStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO,
+ eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO,
+ ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES,
+ ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES,
+ ePhysicalDeviceVariablePointersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES,
+ eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO,
+ ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES,
+ ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES,
+ eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2,
+ eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
+ eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO,
+ eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO,
+ eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
+ ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES,
+ eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES,
+ eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO,
+ ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
+ eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES,
+ ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO,
+ eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES,
+ ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES,
+ eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
+ eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
+ eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
+ ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO,
+ eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES,
+ eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO,
+ eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
+ ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO,
+ eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES,
+ ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
+ eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT,
+ ePhysicalDeviceShaderDrawParametersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES,
+ ePhysicalDeviceVulkan11Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES,
+ ePhysicalDeviceVulkan11Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES,
+ ePhysicalDeviceVulkan12Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES,
+ ePhysicalDeviceVulkan12Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES,
+ eImageFormatListCreateInfo = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO,
+ eAttachmentDescription2 = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2,
+ eAttachmentReference2 = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2,
+ eSubpassDescription2 = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2,
+ eSubpassDependency2 = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
+ eRenderPassCreateInfo2 = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2,
+ eSubpassBeginInfo = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO,
+ eSubpassEndInfo = VK_STRUCTURE_TYPE_SUBPASS_END_INFO,
+ ePhysicalDevice8BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES,
+ ePhysicalDeviceDriverProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES,
+ ePhysicalDeviceShaderAtomicInt64Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES,
+ ePhysicalDeviceShaderFloat16Int8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES,
+ ePhysicalDeviceFloatControlsProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES,
+ eDescriptorSetLayoutBindingFlagsCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO,
+ ePhysicalDeviceDescriptorIndexingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES,
+ ePhysicalDeviceDescriptorIndexingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES,
+ eDescriptorSetVariableDescriptorCountAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO,
+ eDescriptorSetVariableDescriptorCountLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT,
+ ePhysicalDeviceDepthStencilResolveProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES,
+ eSubpassDescriptionDepthStencilResolve = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE,
+ ePhysicalDeviceScalarBlockLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES,
+ eImageStencilUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO,
+ ePhysicalDeviceSamplerFilterMinmaxProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES,
+ eSamplerReductionModeCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO,
+ ePhysicalDeviceVulkanMemoryModelFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES,
+ ePhysicalDeviceImagelessFramebufferFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES,
+ eFramebufferAttachmentsCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO,
+ eFramebufferAttachmentImageInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO,
+ eRenderPassAttachmentBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO,
+ ePhysicalDeviceUniformBufferStandardLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES,
+ ePhysicalDeviceShaderSubgroupExtendedTypesFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES,
+ ePhysicalDeviceSeparateDepthStencilLayoutsFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES,
+ eAttachmentReferenceStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT,
+ eAttachmentDescriptionStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT,
+ ePhysicalDeviceHostQueryResetFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES,
+ ePhysicalDeviceTimelineSemaphoreFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES,
+ ePhysicalDeviceTimelineSemaphoreProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES,
+ eSemaphoreTypeCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
+ eTimelineSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO,
+ eSemaphoreWaitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
+ eSemaphoreSignalInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO,
+ ePhysicalDeviceBufferDeviceAddressFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES,
+ eBufferDeviceAddressInfo = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
+ eBufferOpaqueCaptureAddressCreateInfo = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO,
+ eMemoryOpaqueCaptureAddressAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO,
+ eDeviceMemoryOpaqueCaptureAddressInfo = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO,
+ ePhysicalDeviceVulkan13Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES,
+ ePhysicalDeviceVulkan13Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES,
+ ePipelineCreationFeedbackCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO,
+ ePhysicalDeviceShaderTerminateInvocationFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES,
+ ePhysicalDeviceToolProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES,
+ ePhysicalDeviceShaderDemoteToHelperInvocationFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES,
+ ePhysicalDevicePrivateDataFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES,
+ eDevicePrivateDataCreateInfo = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO,
+ ePrivateDataSlotCreateInfo = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO,
+ ePhysicalDevicePipelineCreationCacheControlFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES,
+ eMemoryBarrier2 = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2,
+ eBufferMemoryBarrier2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2,
+ eImageMemoryBarrier2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2,
+ eDependencyInfo = VK_STRUCTURE_TYPE_DEPENDENCY_INFO,
+ eSubmitInfo2 = VK_STRUCTURE_TYPE_SUBMIT_INFO_2,
+ eSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO,
+ eCommandBufferSubmitInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO,
+ ePhysicalDeviceSynchronization2Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES,
+ ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES,
+ ePhysicalDeviceImageRobustnessFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES,
+ eCopyBufferInfo2 = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2,
+ eCopyImageInfo2 = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2,
+ eCopyBufferToImageInfo2 = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2,
+ eCopyImageToBufferInfo2 = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2,
+ eBlitImageInfo2 = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2,
+ eResolveImageInfo2 = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2,
+ eBufferCopy2 = VK_STRUCTURE_TYPE_BUFFER_COPY_2,
+ eImageCopy2 = VK_STRUCTURE_TYPE_IMAGE_COPY_2,
+ eImageBlit2 = VK_STRUCTURE_TYPE_IMAGE_BLIT_2,
+ eBufferImageCopy2 = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2,
+ eImageResolve2 = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2,
+ ePhysicalDeviceSubgroupSizeControlProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES,
+ ePipelineShaderStageRequiredSubgroupSizeCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO,
+ ePhysicalDeviceSubgroupSizeControlFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES,
+ ePhysicalDeviceInlineUniformBlockFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES,
+ ePhysicalDeviceInlineUniformBlockProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES,
+ eWriteDescriptorSetInlineUniformBlock = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK,
+ eDescriptorPoolInlineUniformBlockCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO,
+ ePhysicalDeviceTextureCompressionAstcHdrFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES,
+ eRenderingInfo = VK_STRUCTURE_TYPE_RENDERING_INFO,
+ eRenderingAttachmentInfo = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO,
+ ePipelineRenderingCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO,
+ ePhysicalDeviceDynamicRenderingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES,
+ eCommandBufferInheritanceRenderingInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO,
+ ePhysicalDeviceShaderIntegerDotProductFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES,
+ ePhysicalDeviceShaderIntegerDotProductProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES,
+ ePhysicalDeviceTexelBufferAlignmentProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES,
+ eFormatProperties3 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3,
+ ePhysicalDeviceMaintenance4Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES,
+ ePhysicalDeviceMaintenance4Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES,
+ eDeviceBufferMemoryRequirements = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS,
+ eDeviceImageMemoryRequirements = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS,
+ eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
+ ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
+ eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR,
+ eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR,
+ eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR,
+ eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR,
+ eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR,
+ eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR,
+ eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR,
+ eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR,
+ eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR,
#if defined( VK_USE_PLATFORM_XLIB_KHR )
eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR,
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
@@ -391,87 +326,79 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_USE_PLATFORM_WIN32_KHR )
eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR,
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
- ePipelineRasterizationStateRasterizationOrderAMD =
- VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD,
- eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT,
- eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT,
- eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT,
+ eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
+ ePipelineRasterizationStateRasterizationOrderAMD = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD,
+ eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT,
+ eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT,
+ eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- eVideoProfileKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILE_KHR,
- eVideoCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR,
- eVideoPictureResourceKHR = VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_KHR,
- eVideoGetMemoryPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_GET_MEMORY_PROPERTIES_KHR,
- eVideoBindMemoryKHR = VK_STRUCTURE_TYPE_VIDEO_BIND_MEMORY_KHR,
- eVideoSessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR,
- eVideoSessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR,
- eVideoSessionParametersUpdateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR,
- eVideoBeginCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR,
- eVideoEndCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR,
- eVideoCodingControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR,
- eVideoReferenceSlotKHR = VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_KHR,
- eVideoQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_VIDEO_QUEUE_FAMILY_PROPERTIES_2_KHR,
- eVideoProfilesKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILES_KHR,
- ePhysicalDeviceVideoFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR,
- eVideoFormatPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR,
- eQueueFamilyQueryResultStatusProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_2_KHR,
- eVideoDecodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR,
+ eVideoProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR,
+ eVideoCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR,
+ eVideoPictureResourceInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR,
+ eVideoSessionMemoryRequirementsKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR,
+ eBindVideoSessionMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_VIDEO_SESSION_MEMORY_INFO_KHR,
+ eVideoSessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR,
+ eVideoSessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR,
+ eVideoSessionParametersUpdateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR,
+ eVideoBeginCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR,
+ eVideoEndCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR,
+ eVideoCodingControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR,
+ eVideoReferenceSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR,
+ eQueueFamilyVideoPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR,
+ eVideoProfileListInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR,
+ ePhysicalDeviceVideoFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR,
+ eVideoFormatPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR,
+ eQueueFamilyQueryResultStatusPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR,
+ eVideoDecodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR,
+ eVideoDecodeCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_CAPABILITIES_KHR,
+ eVideoDecodeUsageInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_USAGE_INFO_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV,
- eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV,
- eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV,
- ePhysicalDeviceTransformFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT,
- ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT,
- ePipelineRasterizationStateStreamCreateInfoEXT =
- VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT,
- eCuModuleCreateInfoNVX = VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX,
- eCuFunctionCreateInfoNVX = VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX,
- eCuLaunchInfoNVX = VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX,
- eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX,
- eImageViewAddressPropertiesNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX,
+ eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV,
+ eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV,
+ eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV,
+ ePhysicalDeviceTransformFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT,
+ ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT,
+ ePipelineRasterizationStateStreamCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT,
+ eCuModuleCreateInfoNVX = VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX,
+ eCuFunctionCreateInfoNVX = VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX,
+ eCuLaunchInfoNVX = VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX,
+ eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX,
+ eImageViewAddressPropertiesNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- eVideoEncodeH264CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_EXT,
- eVideoEncodeH264SessionCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_EXT,
- eVideoEncodeH264SessionParametersCreateInfoEXT =
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT,
- eVideoEncodeH264SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT,
- eVideoEncodeH264VclFrameInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT,
- eVideoEncodeH264DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT,
- eVideoEncodeH264NaluSliceEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_EXT,
- eVideoEncodeH264EmitPictureParametersEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_EXT,
- eVideoEncodeH264ProfileEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_EXT,
- eVideoEncodeH264RateControlInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_EXT,
- eVideoEncodeH264RateControlLayerInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_EXT,
- eVideoEncodeH265CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_EXT,
- eVideoEncodeH265SessionCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_CREATE_INFO_EXT,
- eVideoEncodeH265SessionParametersCreateInfoEXT =
- VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT,
- eVideoEncodeH265SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT,
- eVideoEncodeH265VclFrameInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_VCL_FRAME_INFO_EXT,
- eVideoEncodeH265DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_EXT,
- eVideoEncodeH265NaluSliceSegmentEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_EXT,
- eVideoEncodeH265EmitPictureParametersEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_EMIT_PICTURE_PARAMETERS_EXT,
- eVideoEncodeH265ProfileEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_EXT,
- eVideoEncodeH265ReferenceListsEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_REFERENCE_LISTS_EXT,
- eVideoEncodeH265RateControlInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_EXT,
- eVideoEncodeH265RateControlLayerInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_EXT,
- eVideoDecodeH264CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_EXT,
- eVideoDecodeH264SessionCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_CREATE_INFO_EXT,
- eVideoDecodeH264PictureInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_EXT,
- eVideoDecodeH264MvcEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_MVC_EXT,
- eVideoDecodeH264ProfileEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_EXT,
- eVideoDecodeH264SessionParametersCreateInfoEXT =
- VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT,
- eVideoDecodeH264SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT,
- eVideoDecodeH264DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_EXT,
+ eVideoEncodeH264CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_EXT,
+ eVideoEncodeH264SessionParametersCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT,
+ eVideoEncodeH264SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT,
+ eVideoEncodeH264VclFrameInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT,
+ eVideoEncodeH264DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT,
+ eVideoEncodeH264NaluSliceInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_EXT,
+ eVideoEncodeH264EmitPictureParametersInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_INFO_EXT,
+ eVideoEncodeH264ProfileInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_EXT,
+ eVideoEncodeH264RateControlInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_EXT,
+ eVideoEncodeH264RateControlLayerInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_EXT,
+ eVideoEncodeH264ReferenceListsInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_REFERENCE_LISTS_INFO_EXT,
+ eVideoEncodeH265CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_EXT,
+ eVideoEncodeH265SessionParametersCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT,
+ eVideoEncodeH265SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT,
+ eVideoEncodeH265VclFrameInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_VCL_FRAME_INFO_EXT,
+ eVideoEncodeH265DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_EXT,
+ eVideoEncodeH265NaluSliceSegmentInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_EXT,
+ eVideoEncodeH265EmitPictureParametersInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_EMIT_PICTURE_PARAMETERS_INFO_EXT,
+ eVideoEncodeH265ProfileInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_EXT,
+ eVideoEncodeH265ReferenceListsInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_REFERENCE_LISTS_INFO_EXT,
+ eVideoEncodeH265RateControlInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_EXT,
+ eVideoEncodeH265RateControlLayerInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_EXT,
+ eVideoDecodeH264CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_EXT,
+ eVideoDecodeH264PictureInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_EXT,
+ eVideoDecodeH264ProfileInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_EXT,
+ eVideoDecodeH264SessionParametersCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT,
+ eVideoDecodeH264SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT,
+ eVideoDecodeH264DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_EXT,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD,
- eRenderingFragmentShadingRateAttachmentInfoKHR =
- VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR,
- eRenderingFragmentDensityMapAttachmentInfoEXT =
- VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT,
- eAttachmentSampleCountInfoAMD = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD,
- eMultiviewPerViewAttributesInfoNVX = VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX,
+ eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD,
+ eRenderingFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR,
+ eRenderingFragmentDensityMapAttachmentInfoEXT = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT,
+ eAttachmentSampleCountInfoAMD = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD,
+ eMultiviewPerViewAttributesInfoNVX = VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX,
#if defined( VK_USE_PLATFORM_GGP )
eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP,
#endif /*VK_USE_PLATFORM_GGP*/
@@ -487,8 +414,11 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_USE_PLATFORM_VI_NN )
eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN,
#endif /*VK_USE_PLATFORM_VI_NN*/
- eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT,
- ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT,
+ eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT,
+ ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT,
+ ePipelineRobustnessCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT,
+ ePhysicalDevicePipelineRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT,
+ ePhysicalDevicePipelineRobustnessPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT,
#if defined( VK_USE_PLATFORM_WIN32_KHR )
eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
@@ -505,36 +435,30 @@ namespace VULKAN_HPP_NAMESPACE
eD3D12FenceSubmitInfoKHR = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR,
eSemaphoreGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR,
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR,
- eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
- ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR,
- eCommandBufferInheritanceConditionalRenderingInfoEXT =
- VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT,
- ePhysicalDeviceConditionalRenderingFeaturesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT,
- eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT,
- ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR,
- ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV,
- eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT,
- eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT,
- eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT,
- eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT,
- eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT,
- ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE,
- ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX,
- ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV,
- ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT,
- ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT,
- ePhysicalDeviceConservativeRasterizationPropertiesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT,
- ePipelineRasterizationConservativeStateCreateInfoEXT =
- VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT,
- ePhysicalDeviceDepthClipEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT,
- ePipelineRasterizationDepthClipStateCreateInfoEXT =
- VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT,
- eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT,
- eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR,
+ eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR,
+ eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
+ ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR,
+ eCommandBufferInheritanceConditionalRenderingInfoEXT = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT,
+ ePhysicalDeviceConditionalRenderingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT,
+ eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT,
+ ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR,
+ ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV,
+ eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT,
+ eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT,
+ eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT,
+ eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT,
+ eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT,
+ ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE,
+ ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX,
+ ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV,
+ ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT,
+ ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT,
+ ePhysicalDeviceConservativeRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT,
+ ePipelineRasterizationConservativeStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT,
+ ePhysicalDeviceDepthClipEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT,
+ ePipelineRasterizationDepthClipStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT,
+ eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT,
+ eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR,
#if defined( VK_USE_PLATFORM_WIN32_KHR )
eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR,
eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR,
@@ -569,299 +493,272 @@ namespace VULKAN_HPP_NAMESPACE
eDebugUtilsMessengerCallbackDataEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT,
eDebugUtilsMessengerCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
- eAndroidHardwareBufferUsageANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID,
- eAndroidHardwareBufferPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID,
- eAndroidHardwareBufferFormatPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID,
- eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
- eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
- eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID,
- eAndroidHardwareBufferFormatProperties2ANDROID =
- VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID,
+ eAndroidHardwareBufferUsageANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID,
+ eAndroidHardwareBufferPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID,
+ eAndroidHardwareBufferFormatPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID,
+ eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
+ eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
+ eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID,
+ eAndroidHardwareBufferFormatProperties2ANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID,
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT,
- eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT,
- ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT,
- ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT,
- eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT,
- ePhysicalDeviceBlendOperationAdvancedFeaturesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT,
- ePhysicalDeviceBlendOperationAdvancedPropertiesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT,
- ePipelineColorBlendAdvancedStateCreateInfoEXT =
- VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT,
- ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV,
- eWriteDescriptorSetAccelerationStructureKHR = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR,
- eAccelerationStructureBuildGeometryInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR,
- eAccelerationStructureDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR,
- eAccelerationStructureGeometryAabbsDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR,
- eAccelerationStructureGeometryInstancesDataKHR =
- VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR,
- eAccelerationStructureGeometryTrianglesDataKHR =
- VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR,
- eAccelerationStructureGeometryKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR,
- eAccelerationStructureVersionInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR,
- eCopyAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR,
- eCopyAccelerationStructureToMemoryInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR,
- eCopyMemoryToAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR,
- ePhysicalDeviceAccelerationStructureFeaturesKHR =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR,
- ePhysicalDeviceAccelerationStructurePropertiesKHR =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR,
- eAccelerationStructureCreateInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR,
- eAccelerationStructureBuildSizesInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR,
- ePhysicalDeviceRayTracingPipelineFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR,
- ePhysicalDeviceRayTracingPipelinePropertiesKHR =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR,
- eRayTracingPipelineCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR,
- eRayTracingShaderGroupCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR,
- eRayTracingPipelineInterfaceCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR,
- ePhysicalDeviceRayQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR,
- ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV,
- ePhysicalDeviceShaderSmBuiltinsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV,
- ePhysicalDeviceShaderSmBuiltinsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV,
- eDrmFormatModifierPropertiesListEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
- ePhysicalDeviceImageDrmFormatModifierInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT,
- eImageDrmFormatModifierListCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT,
- eImageDrmFormatModifierExplicitCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT,
- eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
- eDrmFormatModifierPropertiesList2EXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT,
- eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT,
- eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT,
+ eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT,
+ eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT,
+ ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT,
+ ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT,
+ eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT,
+ ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT,
+ ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT,
+ ePipelineColorBlendAdvancedStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT,
+ ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV,
+ eWriteDescriptorSetAccelerationStructureKHR = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR,
+ eAccelerationStructureBuildGeometryInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR,
+ eAccelerationStructureDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR,
+ eAccelerationStructureGeometryAabbsDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR,
+ eAccelerationStructureGeometryInstancesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR,
+ eAccelerationStructureGeometryTrianglesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR,
+ eAccelerationStructureGeometryKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR,
+ eAccelerationStructureVersionInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR,
+ eCopyAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR,
+ eCopyAccelerationStructureToMemoryInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR,
+ eCopyMemoryToAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR,
+ ePhysicalDeviceAccelerationStructureFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR,
+ ePhysicalDeviceAccelerationStructurePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR,
+ eAccelerationStructureCreateInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR,
+ eAccelerationStructureBuildSizesInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR,
+ ePhysicalDeviceRayTracingPipelineFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR,
+ ePhysicalDeviceRayTracingPipelinePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR,
+ eRayTracingPipelineCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR,
+ eRayTracingShaderGroupCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR,
+ eRayTracingPipelineInterfaceCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR,
+ ePhysicalDeviceRayQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR,
+ ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV,
+ ePhysicalDeviceShaderSmBuiltinsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV,
+ ePhysicalDeviceShaderSmBuiltinsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV,
+ eDrmFormatModifierPropertiesListEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
+ ePhysicalDeviceImageDrmFormatModifierInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT,
+ eImageDrmFormatModifierListCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT,
+ eImageDrmFormatModifierExplicitCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT,
+ eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
+ eDrmFormatModifierPropertiesList2EXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT,
+ eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT,
+ eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
ePhysicalDevicePortabilitySubsetFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR,
ePhysicalDevicePortabilitySubsetPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- ePipelineViewportShadingRateImageStateCreateInfoNV =
- VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV,
- ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV,
- ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV,
- ePipelineViewportCoarseSampleOrderStateCreateInfoNV =
- VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV,
- eRayTracingPipelineCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV,
- eAccelerationStructureCreateInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV,
- eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV,
- eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV,
- eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV,
- eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV,
- eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV,
- eAccelerationStructureMemoryRequirementsInfoNV =
- VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV,
- ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV,
- eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV,
- eAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV,
- ePhysicalDeviceRepresentativeFragmentTestFeaturesNV =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV,
- ePipelineRepresentativeFragmentTestStateCreateInfoNV =
- VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV,
- ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT,
- eFilterCubicImageViewImageFormatPropertiesEXT =
- VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT,
- eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
- eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT,
- ePhysicalDeviceExternalMemoryHostPropertiesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT,
- ePhysicalDeviceShaderClockFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR,
- ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD,
- eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT,
- ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD,
+ ePipelineViewportShadingRateImageStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV,
+ ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV,
+ ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV,
+ ePipelineViewportCoarseSampleOrderStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV,
+ eRayTracingPipelineCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV,
+ eAccelerationStructureCreateInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV,
+ eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV,
+ eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV,
+ eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV,
+ eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV,
+ eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV,
+ eAccelerationStructureMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV,
+ ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV,
+ eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV,
+ eAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV,
+ ePhysicalDeviceRepresentativeFragmentTestFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV,
+ ePipelineRepresentativeFragmentTestStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV,
+ ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT,
+ eFilterCubicImageViewImageFormatPropertiesEXT = VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT,
+ eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
+ eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT,
+ ePhysicalDeviceExternalMemoryHostPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT,
+ ePhysicalDeviceShaderClockFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR,
+ ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD,
+ eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT,
+ ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- eVideoDecodeH265CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_EXT,
- eVideoDecodeH265SessionCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_CREATE_INFO_EXT,
- eVideoDecodeH265SessionParametersCreateInfoEXT =
- VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT,
- eVideoDecodeH265SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT,
- eVideoDecodeH265ProfileEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_EXT,
- eVideoDecodeH265PictureInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_EXT,
- eVideoDecodeH265DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_EXT,
+ eVideoDecodeH265CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_EXT,
+ eVideoDecodeH265SessionParametersCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT,
+ eVideoDecodeH265SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT,
+ eVideoDecodeH265ProfileInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_EXT,
+ eVideoDecodeH265PictureInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_EXT,
+ eVideoDecodeH265DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_EXT,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- eDeviceQueueGlobalPriorityCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR,
- ePhysicalDeviceGlobalPriorityQueryFeaturesKHR =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR,
- eQueueFamilyGlobalPriorityPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR,
- eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD,
- ePhysicalDeviceVertexAttributeDivisorPropertiesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT,
- ePipelineVertexInputDivisorStateCreateInfoEXT =
- VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT,
- ePhysicalDeviceVertexAttributeDivisorFeaturesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT,
+ eDeviceQueueGlobalPriorityCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR,
+ ePhysicalDeviceGlobalPriorityQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR,
+ eQueueFamilyGlobalPriorityPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR,
+ eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD,
+ ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT,
+ ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT,
+ ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT,
#if defined( VK_USE_PLATFORM_GGP )
ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP,
#endif /*VK_USE_PLATFORM_GGP*/
- ePhysicalDeviceComputeShaderDerivativesFeaturesNV =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV,
- ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV,
- ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV,
- ePhysicalDeviceFragmentShaderBarycentricFeaturesNV =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV,
- ePhysicalDeviceShaderImageFootprintFeaturesNV =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV,
- ePipelineViewportExclusiveScissorStateCreateInfoNV =
- VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV,
- ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV,
- eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV,
- eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV,
- ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL,
- eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL,
- eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL,
- ePerformanceMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL,
- ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL,
- ePerformanceOverrideInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL,
- ePerformanceConfigurationAcquireInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL,
- ePhysicalDevicePciBusInfoPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT,
- eDisplayNativeHdrSurfaceCapabilitiesAMD = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD,
- eSwapchainDisplayNativeHdrCreateInfoAMD = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD,
+ ePhysicalDeviceComputeShaderDerivativesFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV,
+ ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV,
+ ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV,
+ ePhysicalDeviceShaderImageFootprintFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV,
+ ePipelineViewportExclusiveScissorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV,
+ ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV,
+ eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV,
+ eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV,
+ ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL,
+ eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL,
+ eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL,
+ ePerformanceMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL,
+ ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL,
+ ePerformanceOverrideInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL,
+ ePerformanceConfigurationAcquireInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL,
+ ePhysicalDevicePciBusInfoPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT,
+ eDisplayNativeHdrSurfaceCapabilitiesAMD = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD,
+ eSwapchainDisplayNativeHdrCreateInfoAMD = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD,
#if defined( VK_USE_PLATFORM_FUCHSIA )
eImagepipeSurfaceCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA,
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
eMetalSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT,
#endif /*VK_USE_PLATFORM_METAL_EXT*/
- ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT,
- ePhysicalDeviceFragmentDensityMapPropertiesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT,
- eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT,
- eFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR,
- ePipelineFragmentShadingRateStateCreateInfoKHR =
- VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR,
- ePhysicalDeviceFragmentShadingRatePropertiesKHR =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR,
- ePhysicalDeviceFragmentShadingRateFeaturesKHR =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR,
- ePhysicalDeviceFragmentShadingRateKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR,
- ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD,
- ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD,
- ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT,
- ePhysicalDeviceMemoryBudgetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT,
- ePhysicalDeviceMemoryPriorityFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT,
- eMemoryPriorityAllocateInfoEXT = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT,
- eSurfaceProtectedCapabilitiesKHR = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR,
- ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV,
- ePhysicalDeviceBufferDeviceAddressFeaturesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT,
- eBufferDeviceAddressCreateInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT,
- eValidationFeaturesEXT = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT,
- ePhysicalDevicePresentWaitFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR,
- ePhysicalDeviceCooperativeMatrixFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV,
- eCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV,
- ePhysicalDeviceCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV,
- ePhysicalDeviceCoverageReductionModeFeaturesNV =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV,
- ePipelineCoverageReductionStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV,
- eFramebufferMixedSamplesCombinationNV = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV,
- ePhysicalDeviceFragmentShaderInterlockFeaturesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT,
- ePhysicalDeviceYcbcrImageArraysFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT,
- ePhysicalDeviceProvokingVertexFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT,
- ePipelineRasterizationProvokingVertexStateCreateInfoEXT =
- VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT,
- ePhysicalDeviceProvokingVertexPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT,
+ ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT,
+ ePhysicalDeviceFragmentDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT,
+ eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT,
+ eFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR,
+ ePipelineFragmentShadingRateStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR,
+ ePhysicalDeviceFragmentShadingRatePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR,
+ ePhysicalDeviceFragmentShadingRateFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR,
+ ePhysicalDeviceFragmentShadingRateKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR,
+ ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD,
+ ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD,
+ ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT,
+ ePhysicalDeviceMemoryBudgetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT,
+ ePhysicalDeviceMemoryPriorityFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT,
+ eMemoryPriorityAllocateInfoEXT = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT,
+ eSurfaceProtectedCapabilitiesKHR = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR,
+ ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV,
+ ePhysicalDeviceBufferDeviceAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT,
+ eBufferDeviceAddressCreateInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT,
+ eValidationFeaturesEXT = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT,
+ ePhysicalDevicePresentWaitFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR,
+ ePhysicalDeviceCooperativeMatrixFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV,
+ eCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV,
+ ePhysicalDeviceCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV,
+ ePhysicalDeviceCoverageReductionModeFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV,
+ ePipelineCoverageReductionStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV,
+ eFramebufferMixedSamplesCombinationNV = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV,
+ ePhysicalDeviceFragmentShaderInterlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT,
+ ePhysicalDeviceYcbcrImageArraysFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT,
+ ePhysicalDeviceProvokingVertexFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT,
+ ePipelineRasterizationProvokingVertexStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT,
+ ePhysicalDeviceProvokingVertexPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT,
#if defined( VK_USE_PLATFORM_WIN32_KHR )
eSurfaceFullScreenExclusiveInfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT,
eSurfaceCapabilitiesFullScreenExclusiveEXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT,
eSurfaceFullScreenExclusiveWin32InfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT,
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- eHeadlessSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT,
- ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT,
- ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT,
- ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT,
- ePhysicalDeviceShaderAtomicFloatFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT,
- ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT,
- ePhysicalDeviceExtendedDynamicStateFeaturesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT,
- ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR,
- ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR,
- ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR,
- ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR,
- ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR,
- ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR,
- ePhysicalDeviceShaderAtomicFloat2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT,
- ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV,
- eGraphicsShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV,
- eGraphicsPipelineShaderGroupsCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV,
- eIndirectCommandsLayoutTokenNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV,
- eIndirectCommandsLayoutCreateInfoNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV,
- eGeneratedCommandsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV,
- eGeneratedCommandsMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV,
- ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV,
- ePhysicalDeviceInheritedViewportScissorFeaturesNV =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV,
- eCommandBufferInheritanceViewportScissorInfoNV =
- VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV,
- ePhysicalDeviceTexelBufferAlignmentFeaturesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT,
- eCommandBufferInheritanceRenderPassTransformInfoQCOM =
- VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM,
- eRenderPassTransformBeginInfoQCOM = VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM,
- ePhysicalDeviceDeviceMemoryReportFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT,
- eDeviceDeviceMemoryReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT,
- eDeviceMemoryReportCallbackDataEXT = VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT,
- ePhysicalDeviceRobustness2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT,
- ePhysicalDeviceRobustness2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT,
- eSamplerCustomBorderColorCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT,
- ePhysicalDeviceCustomBorderColorPropertiesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT,
- ePhysicalDeviceCustomBorderColorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT,
- ePipelineLibraryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR,
- ePresentIdKHR = VK_STRUCTURE_TYPE_PRESENT_ID_KHR,
- ePhysicalDevicePresentIdFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR,
+ eHeadlessSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT,
+ ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT,
+ ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT,
+ ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT,
+ ePhysicalDeviceShaderAtomicFloatFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT,
+ ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT,
+ ePhysicalDeviceExtendedDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT,
+ ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR,
+ ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR,
+ ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR,
+ ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR,
+ ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR,
+ ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR,
+ ePhysicalDeviceShaderAtomicFloat2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT,
+ ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV,
+ eGraphicsShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV,
+ eGraphicsPipelineShaderGroupsCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV,
+ eIndirectCommandsLayoutTokenNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV,
+ eIndirectCommandsLayoutCreateInfoNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV,
+ eGeneratedCommandsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV,
+ eGeneratedCommandsMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV,
+ ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV,
+ ePhysicalDeviceInheritedViewportScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV,
+ eCommandBufferInheritanceViewportScissorInfoNV = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV,
+ ePhysicalDeviceTexelBufferAlignmentFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT,
+ eCommandBufferInheritanceRenderPassTransformInfoQCOM = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM,
+ eRenderPassTransformBeginInfoQCOM = VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM,
+ ePhysicalDeviceDeviceMemoryReportFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT,
+ eDeviceDeviceMemoryReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT,
+ eDeviceMemoryReportCallbackDataEXT = VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT,
+ ePhysicalDeviceRobustness2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT,
+ ePhysicalDeviceRobustness2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT,
+ eSamplerCustomBorderColorCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT,
+ ePhysicalDeviceCustomBorderColorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT,
+ ePhysicalDeviceCustomBorderColorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT,
+ ePipelineLibraryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR,
+ ePhysicalDevicePresentBarrierFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV,
+ eSurfaceCapabilitiesPresentBarrierNV = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV,
+ eSwapchainPresentBarrierCreateInfoNV = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV,
+ ePresentIdKHR = VK_STRUCTURE_TYPE_PRESENT_ID_KHR,
+ ePhysicalDevicePresentIdFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
eVideoEncodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR,
eVideoEncodeRateControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR,
eVideoEncodeRateControlLayerInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR,
+ eVideoEncodeCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_CAPABILITIES_KHR,
+ eVideoEncodeUsageInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV,
eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV,
- eQueueFamilyCheckpointProperties2NV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV,
- eCheckpointData2NV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV,
- ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR,
- ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV,
- ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV,
- ePipelineFragmentShadingRateEnumStateCreateInfoNV =
- VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV,
- eAccelerationStructureGeometryMotionTrianglesDataNV =
- VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV,
- ePhysicalDeviceRayTracingMotionBlurFeaturesNV =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV,
- eAccelerationStructureMotionInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV,
- ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT,
- ePhysicalDeviceFragmentDensityMap2FeaturesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT,
- ePhysicalDeviceFragmentDensityMap2PropertiesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT,
- eCopyCommandTransformInfoQCOM = VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM,
- ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR,
- ePhysicalDevice4444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT,
- ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM,
- ePhysicalDeviceRgba10X6FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT,
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ eExportMetalObjectCreateInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT,
+ eExportMetalObjectsInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT,
+ eExportMetalDeviceInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_DEVICE_INFO_EXT,
+ eExportMetalCommandQueueInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_COMMAND_QUEUE_INFO_EXT,
+ eExportMetalBufferInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_BUFFER_INFO_EXT,
+ eImportMetalBufferInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_BUFFER_INFO_EXT,
+ eExportMetalTextureInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_TEXTURE_INFO_EXT,
+ eImportMetalTextureInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_TEXTURE_INFO_EXT,
+ eExportMetalIoSurfaceInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_IO_SURFACE_INFO_EXT,
+ eImportMetalIoSurfaceInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_IO_SURFACE_INFO_EXT,
+ eExportMetalSharedEventInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT,
+ eImportMetalSharedEventInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT,
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+ eQueueFamilyCheckpointProperties2NV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV,
+ eCheckpointData2NV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV,
+ ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT,
+ ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT,
+ eGraphicsPipelineLibraryCreateInfoEXT = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT,
+ ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD,
+ ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR,
+ ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR,
+ ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR,
+ ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV,
+ ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV,
+ ePipelineFragmentShadingRateEnumStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV,
+ eAccelerationStructureGeometryMotionTrianglesDataNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV,
+ ePhysicalDeviceRayTracingMotionBlurFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV,
+ eAccelerationStructureMotionInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV,
+ ePhysicalDeviceMeshShaderFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT,
+ ePhysicalDeviceMeshShaderPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT,
+ ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT,
+ ePhysicalDeviceFragmentDensityMap2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT,
+ ePhysicalDeviceFragmentDensityMap2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT,
+ eCopyCommandTransformInfoQCOM = VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM,
+ ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR,
+ ePhysicalDeviceImageCompressionControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT,
+ eImageCompressionControlEXT = VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT,
+ eSubresourceLayout2EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT,
+ eImageSubresource2EXT = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT,
+ eImageCompressionPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT,
+ ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT,
+ ePhysicalDevice4444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT,
+ ePhysicalDeviceFaultFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT,
+ eDeviceFaultCountsEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT,
+ eDeviceFaultInfoEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT,
+ ePhysicalDeviceRgba10X6FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT,
#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
eDirectfbSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT,
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
- ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE,
- eMutableDescriptorTypeCreateInfoVALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE,
- ePhysicalDeviceVertexInputDynamicStateFeaturesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT,
- eVertexInputBindingDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT,
- eVertexInputAttributeDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT,
- ePhysicalDeviceDrmPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT,
- ePhysicalDeviceDepthClipControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT,
- ePipelineViewportDepthClipControlCreateInfoEXT =
- VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT,
- ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT,
+ ePhysicalDeviceVertexInputDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT,
+ eVertexInputBindingDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT,
+ eVertexInputAttributeDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT,
+ ePhysicalDeviceDrmPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT,
+ ePhysicalDeviceAddressBindingReportFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT,
+ eDeviceAddressBindingCallbackDataEXT = VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT,
+ ePhysicalDeviceDepthClipControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT,
+ ePipelineViewportDepthClipControlCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT,
+ ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT,
#if defined( VK_USE_PLATFORM_FUCHSIA )
eImportMemoryZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA,
eMemoryZirconHandlePropertiesFUCHSIA = VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA,
@@ -879,1087 +776,262 @@ namespace VULKAN_HPP_NAMESPACE
eSysmemColorSpaceFUCHSIA = VK_STRUCTURE_TYPE_SYSMEM_COLOR_SPACE_FUCHSIA,
eBufferCollectionConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA,
#endif /*VK_USE_PLATFORM_FUCHSIA*/
- eSubpassShadingPipelineCreateInfoHUAWEI = VK_STRUCTURE_TYPE_SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI,
- ePhysicalDeviceSubpassShadingFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI,
- ePhysicalDeviceSubpassShadingPropertiesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI,
- ePhysicalDeviceInvocationMaskFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI,
- eMemoryGetRemoteAddressInfoNV = VK_STRUCTURE_TYPE_MEMORY_GET_REMOTE_ADDRESS_INFO_NV,
- ePhysicalDeviceExternalMemoryRdmaFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV,
- ePhysicalDeviceExtendedDynamicState2FeaturesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT,
+ eSubpassShadingPipelineCreateInfoHUAWEI = VK_STRUCTURE_TYPE_SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI,
+ ePhysicalDeviceSubpassShadingFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI,
+ ePhysicalDeviceSubpassShadingPropertiesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI,
+ ePhysicalDeviceInvocationMaskFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI,
+ eMemoryGetRemoteAddressInfoNV = VK_STRUCTURE_TYPE_MEMORY_GET_REMOTE_ADDRESS_INFO_NV,
+ ePhysicalDeviceExternalMemoryRdmaFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV,
+ ePipelinePropertiesIdentifierEXT = VK_STRUCTURE_TYPE_PIPELINE_PROPERTIES_IDENTIFIER_EXT,
+ ePhysicalDevicePipelinePropertiesFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT,
+ ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT,
+ eSubpassResolvePerformanceQueryEXT = VK_STRUCTURE_TYPE_SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT,
+ eMultisampledRenderToSingleSampledInfoEXT = VK_STRUCTURE_TYPE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT,
+ ePhysicalDeviceExtendedDynamicState2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT,
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
eScreenSurfaceCreateInfoQNX = VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX,
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
- ePhysicalDeviceColorWriteEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT,
- ePipelineColorWriteCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT,
- ePhysicalDeviceImageViewMinLodFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT,
- eImageViewMinLodCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT,
- ePhysicalDeviceMultiDrawFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT,
- ePhysicalDeviceMultiDrawPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT,
- ePhysicalDeviceBorderColorSwizzleFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT,
- eSamplerBorderColorComponentMappingCreateInfoEXT =
- VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT,
- ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT,
- ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM,
- ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM,
- eSubpassFragmentDensityMapOffsetEndInfoQCOM = VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM,
- ePhysicalDeviceLinearColorAttachmentFeaturesNV =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV,
- eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR,
- eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR,
- eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR,
- eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR,
- eAttachmentSampleCountInfoNV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV,
- eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR,
- eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR,
- eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR,
- eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR,
- eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR,
- eBlitImageInfo2KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR,
- eBufferCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR,
- eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT,
- eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR,
- eBufferImageCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR,
- eBufferMemoryBarrier2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR,
- eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR,
- eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR,
- eCommandBufferInheritanceRenderingInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO_KHR,
- eCommandBufferSubmitInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR,
- eCopyBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR,
- eCopyBufferToImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR,
- eCopyImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR,
- eCopyImageToBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR,
- eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT,
- eDependencyInfoKHR = VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR,
- eDescriptorPoolInlineUniformBlockCreateInfoEXT =
- VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT,
- eDescriptorSetLayoutBindingFlagsCreateInfoEXT =
- VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT,
- eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR,
- eDescriptorSetVariableDescriptorCountAllocateInfoEXT =
- VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT,
- eDescriptorSetVariableDescriptorCountLayoutSupportEXT =
- VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT,
- eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR,
- eDeviceBufferMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR,
- eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR,
- eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR,
- eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR,
- eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR,
- eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR,
- eDeviceImageMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR,
- eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR,
- eDevicePrivateDataCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT,
- eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT,
- eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR,
- eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR,
- eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR,
- eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR,
- eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR,
- eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
- eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR,
- eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR,
- eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR,
- eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR,
- eFormatProperties3KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR,
- eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR,
- eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR,
- eImageBlit2KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR,
- eImageCopy2KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR,
- eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR,
- eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR,
- eImageMemoryBarrier2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR,
- eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR,
- eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR,
- eImageResolve2KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR,
- eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR,
- eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT,
- eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR,
- eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR,
- eMemoryBarrier2KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR,
- eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR,
- eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR,
- eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR,
- eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR,
- ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR,
- ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR,
- ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT,
- ePhysicalDeviceBufferDeviceAddressFeaturesKHR =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR,
- ePhysicalDeviceDepthStencilResolvePropertiesKHR =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR,
- ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT,
- ePhysicalDeviceDescriptorIndexingPropertiesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT,
- ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR,
- ePhysicalDeviceDynamicRenderingFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR,
- ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR,
- ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR,
- ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR,
- ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR,
- ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR,
- ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR,
- ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR,
- ePhysicalDeviceGlobalPriorityQueryFeaturesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT,
- ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR,
- ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT,
- ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR,
- ePhysicalDeviceImagelessFramebufferFeaturesKHR =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR,
- ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR,
- ePhysicalDeviceImageRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT,
- ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT,
- ePhysicalDeviceInlineUniformBlockPropertiesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT,
- ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR,
- ePhysicalDeviceMaintenance4FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR,
- ePhysicalDeviceMaintenance4PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR,
- ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR,
- ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR,
- ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR,
- ePhysicalDevicePipelineCreationCacheControlFeaturesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT,
- ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR,
- ePhysicalDevicePrivateDataFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT,
- ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
- ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT,
- ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR,
- ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT,
- ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR,
- ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR,
- ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT,
- ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES,
- ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR,
- ePhysicalDeviceShaderIntegerDotProductFeaturesKHR =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES_KHR,
- ePhysicalDeviceShaderIntegerDotProductPropertiesKHR =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR,
- ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR,
- ePhysicalDeviceShaderTerminateInvocationFeaturesKHR =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR,
- ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR,
- ePhysicalDeviceSubgroupSizeControlFeaturesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT,
- ePhysicalDeviceSubgroupSizeControlPropertiesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT,
- ePhysicalDeviceSynchronization2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR,
- ePhysicalDeviceTexelBufferAlignmentPropertiesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT,
- ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT,
- ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR,
- ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR,
- ePhysicalDeviceToolPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT,
- ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR,
- ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR,
- ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES,
- ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR,
- ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR,
- ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR =
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR,
- ePipelineCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT,
- ePipelineRenderingCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO_KHR,
- ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT =
- VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT,
- ePipelineTessellationDomainOriginStateCreateInfoKHR =
- VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR,
- ePrivateDataSlotCreateInfoEXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT,
- eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL,
- eQueueFamilyGlobalPriorityPropertiesEXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT,
- eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR,
- eRenderingAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO_KHR,
- eRenderingInfoKHR = VK_STRUCTURE_TYPE_RENDERING_INFO_KHR,
- eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR,
- eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR,
- eRenderPassInputAttachmentAspectCreateInfoKHR =
- VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR,
- eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR,
- eResolveImageInfo2KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR,
- eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT,
- eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR,
- eSamplerYcbcrConversionImageFormatPropertiesKHR =
- VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR,
- eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR,
- eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR,
- eSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR,
- eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR,
- eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR,
- eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR,
- eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR,
- eSubmitInfo2KHR = VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR,
- eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR,
- eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR,
- eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR,
- eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR,
- eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR,
- eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR,
- eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( StructureType value )
- {
- switch ( value )
- {
- case StructureType::eApplicationInfo: return "ApplicationInfo";
- case StructureType::eInstanceCreateInfo: return "InstanceCreateInfo";
- case StructureType::eDeviceQueueCreateInfo: return "DeviceQueueCreateInfo";
- case StructureType::eDeviceCreateInfo: return "DeviceCreateInfo";
- case StructureType::eSubmitInfo: return "SubmitInfo";
- case StructureType::eMemoryAllocateInfo: return "MemoryAllocateInfo";
- case StructureType::eMappedMemoryRange: return "MappedMemoryRange";
- case StructureType::eBindSparseInfo: return "BindSparseInfo";
- case StructureType::eFenceCreateInfo: return "FenceCreateInfo";
- case StructureType::eSemaphoreCreateInfo: return "SemaphoreCreateInfo";
- case StructureType::eEventCreateInfo: return "EventCreateInfo";
- case StructureType::eQueryPoolCreateInfo: return "QueryPoolCreateInfo";
- case StructureType::eBufferCreateInfo: return "BufferCreateInfo";
- case StructureType::eBufferViewCreateInfo: return "BufferViewCreateInfo";
- case StructureType::eImageCreateInfo: return "ImageCreateInfo";
- case StructureType::eImageViewCreateInfo: return "ImageViewCreateInfo";
- case StructureType::eShaderModuleCreateInfo: return "ShaderModuleCreateInfo";
- case StructureType::ePipelineCacheCreateInfo: return "PipelineCacheCreateInfo";
- case StructureType::ePipelineShaderStageCreateInfo: return "PipelineShaderStageCreateInfo";
- case StructureType::ePipelineVertexInputStateCreateInfo: return "PipelineVertexInputStateCreateInfo";
- case StructureType::ePipelineInputAssemblyStateCreateInfo: return "PipelineInputAssemblyStateCreateInfo";
- case StructureType::ePipelineTessellationStateCreateInfo: return "PipelineTessellationStateCreateInfo";
- case StructureType::ePipelineViewportStateCreateInfo: return "PipelineViewportStateCreateInfo";
- case StructureType::ePipelineRasterizationStateCreateInfo: return "PipelineRasterizationStateCreateInfo";
- case StructureType::ePipelineMultisampleStateCreateInfo: return "PipelineMultisampleStateCreateInfo";
- case StructureType::ePipelineDepthStencilStateCreateInfo: return "PipelineDepthStencilStateCreateInfo";
- case StructureType::ePipelineColorBlendStateCreateInfo: return "PipelineColorBlendStateCreateInfo";
- case StructureType::ePipelineDynamicStateCreateInfo: return "PipelineDynamicStateCreateInfo";
- case StructureType::eGraphicsPipelineCreateInfo: return "GraphicsPipelineCreateInfo";
- case StructureType::eComputePipelineCreateInfo: return "ComputePipelineCreateInfo";
- case StructureType::ePipelineLayoutCreateInfo: return "PipelineLayoutCreateInfo";
- case StructureType::eSamplerCreateInfo: return "SamplerCreateInfo";
- case StructureType::eDescriptorSetLayoutCreateInfo: return "DescriptorSetLayoutCreateInfo";
- case StructureType::eDescriptorPoolCreateInfo: return "DescriptorPoolCreateInfo";
- case StructureType::eDescriptorSetAllocateInfo: return "DescriptorSetAllocateInfo";
- case StructureType::eWriteDescriptorSet: return "WriteDescriptorSet";
- case StructureType::eCopyDescriptorSet: return "CopyDescriptorSet";
- case StructureType::eFramebufferCreateInfo: return "FramebufferCreateInfo";
- case StructureType::eRenderPassCreateInfo: return "RenderPassCreateInfo";
- case StructureType::eCommandPoolCreateInfo: return "CommandPoolCreateInfo";
- case StructureType::eCommandBufferAllocateInfo: return "CommandBufferAllocateInfo";
- case StructureType::eCommandBufferInheritanceInfo: return "CommandBufferInheritanceInfo";
- case StructureType::eCommandBufferBeginInfo: return "CommandBufferBeginInfo";
- case StructureType::eRenderPassBeginInfo: return "RenderPassBeginInfo";
- case StructureType::eBufferMemoryBarrier: return "BufferMemoryBarrier";
- case StructureType::eImageMemoryBarrier: return "ImageMemoryBarrier";
- case StructureType::eMemoryBarrier: return "MemoryBarrier";
- case StructureType::eLoaderInstanceCreateInfo: return "LoaderInstanceCreateInfo";
- case StructureType::eLoaderDeviceCreateInfo: return "LoaderDeviceCreateInfo";
- case StructureType::ePhysicalDeviceSubgroupProperties: return "PhysicalDeviceSubgroupProperties";
- case StructureType::eBindBufferMemoryInfo: return "BindBufferMemoryInfo";
- case StructureType::eBindImageMemoryInfo: return "BindImageMemoryInfo";
- case StructureType::ePhysicalDevice16BitStorageFeatures: return "PhysicalDevice16BitStorageFeatures";
- case StructureType::eMemoryDedicatedRequirements: return "MemoryDedicatedRequirements";
- case StructureType::eMemoryDedicatedAllocateInfo: return "MemoryDedicatedAllocateInfo";
- case StructureType::eMemoryAllocateFlagsInfo: return "MemoryAllocateFlagsInfo";
- case StructureType::eDeviceGroupRenderPassBeginInfo: return "DeviceGroupRenderPassBeginInfo";
- case StructureType::eDeviceGroupCommandBufferBeginInfo: return "DeviceGroupCommandBufferBeginInfo";
- case StructureType::eDeviceGroupSubmitInfo: return "DeviceGroupSubmitInfo";
- case StructureType::eDeviceGroupBindSparseInfo: return "DeviceGroupBindSparseInfo";
- case StructureType::eBindBufferMemoryDeviceGroupInfo: return "BindBufferMemoryDeviceGroupInfo";
- case StructureType::eBindImageMemoryDeviceGroupInfo: return "BindImageMemoryDeviceGroupInfo";
- case StructureType::ePhysicalDeviceGroupProperties: return "PhysicalDeviceGroupProperties";
- case StructureType::eDeviceGroupDeviceCreateInfo: return "DeviceGroupDeviceCreateInfo";
- case StructureType::eBufferMemoryRequirementsInfo2: return "BufferMemoryRequirementsInfo2";
- case StructureType::eImageMemoryRequirementsInfo2: return "ImageMemoryRequirementsInfo2";
- case StructureType::eImageSparseMemoryRequirementsInfo2: return "ImageSparseMemoryRequirementsInfo2";
- case StructureType::eMemoryRequirements2: return "MemoryRequirements2";
- case StructureType::eSparseImageMemoryRequirements2: return "SparseImageMemoryRequirements2";
- case StructureType::ePhysicalDeviceFeatures2: return "PhysicalDeviceFeatures2";
- case StructureType::ePhysicalDeviceProperties2: return "PhysicalDeviceProperties2";
- case StructureType::eFormatProperties2: return "FormatProperties2";
- case StructureType::eImageFormatProperties2: return "ImageFormatProperties2";
- case StructureType::ePhysicalDeviceImageFormatInfo2: return "PhysicalDeviceImageFormatInfo2";
- case StructureType::eQueueFamilyProperties2: return "QueueFamilyProperties2";
- case StructureType::ePhysicalDeviceMemoryProperties2: return "PhysicalDeviceMemoryProperties2";
- case StructureType::eSparseImageFormatProperties2: return "SparseImageFormatProperties2";
- case StructureType::ePhysicalDeviceSparseImageFormatInfo2: return "PhysicalDeviceSparseImageFormatInfo2";
- case StructureType::ePhysicalDevicePointClippingProperties: return "PhysicalDevicePointClippingProperties";
- case StructureType::eRenderPassInputAttachmentAspectCreateInfo:
- return "RenderPassInputAttachmentAspectCreateInfo";
- case StructureType::eImageViewUsageCreateInfo: return "ImageViewUsageCreateInfo";
- case StructureType::ePipelineTessellationDomainOriginStateCreateInfo:
- return "PipelineTessellationDomainOriginStateCreateInfo";
- case StructureType::eRenderPassMultiviewCreateInfo: return "RenderPassMultiviewCreateInfo";
- case StructureType::ePhysicalDeviceMultiviewFeatures: return "PhysicalDeviceMultiviewFeatures";
- case StructureType::ePhysicalDeviceMultiviewProperties: return "PhysicalDeviceMultiviewProperties";
- case StructureType::ePhysicalDeviceVariablePointersFeatures: return "PhysicalDeviceVariablePointersFeatures";
- case StructureType::eProtectedSubmitInfo: return "ProtectedSubmitInfo";
- case StructureType::ePhysicalDeviceProtectedMemoryFeatures: return "PhysicalDeviceProtectedMemoryFeatures";
- case StructureType::ePhysicalDeviceProtectedMemoryProperties: return "PhysicalDeviceProtectedMemoryProperties";
- case StructureType::eDeviceQueueInfo2: return "DeviceQueueInfo2";
- case StructureType::eSamplerYcbcrConversionCreateInfo: return "SamplerYcbcrConversionCreateInfo";
- case StructureType::eSamplerYcbcrConversionInfo: return "SamplerYcbcrConversionInfo";
- case StructureType::eBindImagePlaneMemoryInfo: return "BindImagePlaneMemoryInfo";
- case StructureType::eImagePlaneMemoryRequirementsInfo: return "ImagePlaneMemoryRequirementsInfo";
- case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures:
- return "PhysicalDeviceSamplerYcbcrConversionFeatures";
- case StructureType::eSamplerYcbcrConversionImageFormatProperties:
- return "SamplerYcbcrConversionImageFormatProperties";
- case StructureType::eDescriptorUpdateTemplateCreateInfo: return "DescriptorUpdateTemplateCreateInfo";
- case StructureType::ePhysicalDeviceExternalImageFormatInfo: return "PhysicalDeviceExternalImageFormatInfo";
- case StructureType::eExternalImageFormatProperties: return "ExternalImageFormatProperties";
- case StructureType::ePhysicalDeviceExternalBufferInfo: return "PhysicalDeviceExternalBufferInfo";
- case StructureType::eExternalBufferProperties: return "ExternalBufferProperties";
- case StructureType::ePhysicalDeviceIdProperties: return "PhysicalDeviceIdProperties";
- case StructureType::eExternalMemoryBufferCreateInfo: return "ExternalMemoryBufferCreateInfo";
- case StructureType::eExternalMemoryImageCreateInfo: return "ExternalMemoryImageCreateInfo";
- case StructureType::eExportMemoryAllocateInfo: return "ExportMemoryAllocateInfo";
- case StructureType::ePhysicalDeviceExternalFenceInfo: return "PhysicalDeviceExternalFenceInfo";
- case StructureType::eExternalFenceProperties: return "ExternalFenceProperties";
- case StructureType::eExportFenceCreateInfo: return "ExportFenceCreateInfo";
- case StructureType::eExportSemaphoreCreateInfo: return "ExportSemaphoreCreateInfo";
- case StructureType::ePhysicalDeviceExternalSemaphoreInfo: return "PhysicalDeviceExternalSemaphoreInfo";
- case StructureType::eExternalSemaphoreProperties: return "ExternalSemaphoreProperties";
- case StructureType::ePhysicalDeviceMaintenance3Properties: return "PhysicalDeviceMaintenance3Properties";
- case StructureType::eDescriptorSetLayoutSupport: return "DescriptorSetLayoutSupport";
- case StructureType::ePhysicalDeviceShaderDrawParametersFeatures:
- return "PhysicalDeviceShaderDrawParametersFeatures";
- case StructureType::ePhysicalDeviceVulkan11Features: return "PhysicalDeviceVulkan11Features";
- case StructureType::ePhysicalDeviceVulkan11Properties: return "PhysicalDeviceVulkan11Properties";
- case StructureType::ePhysicalDeviceVulkan12Features: return "PhysicalDeviceVulkan12Features";
- case StructureType::ePhysicalDeviceVulkan12Properties: return "PhysicalDeviceVulkan12Properties";
- case StructureType::eImageFormatListCreateInfo: return "ImageFormatListCreateInfo";
- case StructureType::eAttachmentDescription2: return "AttachmentDescription2";
- case StructureType::eAttachmentReference2: return "AttachmentReference2";
- case StructureType::eSubpassDescription2: return "SubpassDescription2";
- case StructureType::eSubpassDependency2: return "SubpassDependency2";
- case StructureType::eRenderPassCreateInfo2: return "RenderPassCreateInfo2";
- case StructureType::eSubpassBeginInfo: return "SubpassBeginInfo";
- case StructureType::eSubpassEndInfo: return "SubpassEndInfo";
- case StructureType::ePhysicalDevice8BitStorageFeatures: return "PhysicalDevice8BitStorageFeatures";
- case StructureType::ePhysicalDeviceDriverProperties: return "PhysicalDeviceDriverProperties";
- case StructureType::ePhysicalDeviceShaderAtomicInt64Features: return "PhysicalDeviceShaderAtomicInt64Features";
- case StructureType::ePhysicalDeviceShaderFloat16Int8Features: return "PhysicalDeviceShaderFloat16Int8Features";
- case StructureType::ePhysicalDeviceFloatControlsProperties: return "PhysicalDeviceFloatControlsProperties";
- case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo:
- return "DescriptorSetLayoutBindingFlagsCreateInfo";
- case StructureType::ePhysicalDeviceDescriptorIndexingFeatures: return "PhysicalDeviceDescriptorIndexingFeatures";
- case StructureType::ePhysicalDeviceDescriptorIndexingProperties:
- return "PhysicalDeviceDescriptorIndexingProperties";
- case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo:
- return "DescriptorSetVariableDescriptorCountAllocateInfo";
- case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport:
- return "DescriptorSetVariableDescriptorCountLayoutSupport";
- case StructureType::ePhysicalDeviceDepthStencilResolveProperties:
- return "PhysicalDeviceDepthStencilResolveProperties";
- case StructureType::eSubpassDescriptionDepthStencilResolve: return "SubpassDescriptionDepthStencilResolve";
- case StructureType::ePhysicalDeviceScalarBlockLayoutFeatures: return "PhysicalDeviceScalarBlockLayoutFeatures";
- case StructureType::eImageStencilUsageCreateInfo: return "ImageStencilUsageCreateInfo";
- case StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties:
- return "PhysicalDeviceSamplerFilterMinmaxProperties";
- case StructureType::eSamplerReductionModeCreateInfo: return "SamplerReductionModeCreateInfo";
- case StructureType::ePhysicalDeviceVulkanMemoryModelFeatures: return "PhysicalDeviceVulkanMemoryModelFeatures";
- case StructureType::ePhysicalDeviceImagelessFramebufferFeatures:
- return "PhysicalDeviceImagelessFramebufferFeatures";
- case StructureType::eFramebufferAttachmentsCreateInfo: return "FramebufferAttachmentsCreateInfo";
- case StructureType::eFramebufferAttachmentImageInfo: return "FramebufferAttachmentImageInfo";
- case StructureType::eRenderPassAttachmentBeginInfo: return "RenderPassAttachmentBeginInfo";
- case StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures:
- return "PhysicalDeviceUniformBufferStandardLayoutFeatures";
- case StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures:
- return "PhysicalDeviceShaderSubgroupExtendedTypesFeatures";
- case StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures:
- return "PhysicalDeviceSeparateDepthStencilLayoutsFeatures";
- case StructureType::eAttachmentReferenceStencilLayout: return "AttachmentReferenceStencilLayout";
- case StructureType::eAttachmentDescriptionStencilLayout: return "AttachmentDescriptionStencilLayout";
- case StructureType::ePhysicalDeviceHostQueryResetFeatures: return "PhysicalDeviceHostQueryResetFeatures";
- case StructureType::ePhysicalDeviceTimelineSemaphoreFeatures: return "PhysicalDeviceTimelineSemaphoreFeatures";
- case StructureType::ePhysicalDeviceTimelineSemaphoreProperties:
- return "PhysicalDeviceTimelineSemaphoreProperties";
- case StructureType::eSemaphoreTypeCreateInfo: return "SemaphoreTypeCreateInfo";
- case StructureType::eTimelineSemaphoreSubmitInfo: return "TimelineSemaphoreSubmitInfo";
- case StructureType::eSemaphoreWaitInfo: return "SemaphoreWaitInfo";
- case StructureType::eSemaphoreSignalInfo: return "SemaphoreSignalInfo";
- case StructureType::ePhysicalDeviceBufferDeviceAddressFeatures:
- return "PhysicalDeviceBufferDeviceAddressFeatures";
- case StructureType::eBufferDeviceAddressInfo: return "BufferDeviceAddressInfo";
- case StructureType::eBufferOpaqueCaptureAddressCreateInfo: return "BufferOpaqueCaptureAddressCreateInfo";
- case StructureType::eMemoryOpaqueCaptureAddressAllocateInfo: return "MemoryOpaqueCaptureAddressAllocateInfo";
- case StructureType::eDeviceMemoryOpaqueCaptureAddressInfo: return "DeviceMemoryOpaqueCaptureAddressInfo";
- case StructureType::ePhysicalDeviceVulkan13Features: return "PhysicalDeviceVulkan13Features";
- case StructureType::ePhysicalDeviceVulkan13Properties: return "PhysicalDeviceVulkan13Properties";
- case StructureType::ePipelineCreationFeedbackCreateInfo: return "PipelineCreationFeedbackCreateInfo";
- case StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures:
- return "PhysicalDeviceShaderTerminateInvocationFeatures";
- case StructureType::ePhysicalDeviceToolProperties: return "PhysicalDeviceToolProperties";
- case StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures:
- return "PhysicalDeviceShaderDemoteToHelperInvocationFeatures";
- case StructureType::ePhysicalDevicePrivateDataFeatures: return "PhysicalDevicePrivateDataFeatures";
- case StructureType::eDevicePrivateDataCreateInfo: return "DevicePrivateDataCreateInfo";
- case StructureType::ePrivateDataSlotCreateInfo: return "PrivateDataSlotCreateInfo";
- case StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures:
- return "PhysicalDevicePipelineCreationCacheControlFeatures";
- case StructureType::eMemoryBarrier2: return "MemoryBarrier2";
- case StructureType::eBufferMemoryBarrier2: return "BufferMemoryBarrier2";
- case StructureType::eImageMemoryBarrier2: return "ImageMemoryBarrier2";
- case StructureType::eDependencyInfo: return "DependencyInfo";
- case StructureType::eSubmitInfo2: return "SubmitInfo2";
- case StructureType::eSemaphoreSubmitInfo: return "SemaphoreSubmitInfo";
- case StructureType::eCommandBufferSubmitInfo: return "CommandBufferSubmitInfo";
- case StructureType::ePhysicalDeviceSynchronization2Features: return "PhysicalDeviceSynchronization2Features";
- case StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures:
- return "PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures";
- case StructureType::ePhysicalDeviceImageRobustnessFeatures: return "PhysicalDeviceImageRobustnessFeatures";
- case StructureType::eCopyBufferInfo2: return "CopyBufferInfo2";
- case StructureType::eCopyImageInfo2: return "CopyImageInfo2";
- case StructureType::eCopyBufferToImageInfo2: return "CopyBufferToImageInfo2";
- case StructureType::eCopyImageToBufferInfo2: return "CopyImageToBufferInfo2";
- case StructureType::eBlitImageInfo2: return "BlitImageInfo2";
- case StructureType::eResolveImageInfo2: return "ResolveImageInfo2";
- case StructureType::eBufferCopy2: return "BufferCopy2";
- case StructureType::eImageCopy2: return "ImageCopy2";
- case StructureType::eImageBlit2: return "ImageBlit2";
- case StructureType::eBufferImageCopy2: return "BufferImageCopy2";
- case StructureType::eImageResolve2: return "ImageResolve2";
- case StructureType::ePhysicalDeviceSubgroupSizeControlProperties:
- return "PhysicalDeviceSubgroupSizeControlProperties";
- case StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo:
- return "PipelineShaderStageRequiredSubgroupSizeCreateInfo";
- case StructureType::ePhysicalDeviceSubgroupSizeControlFeatures:
- return "PhysicalDeviceSubgroupSizeControlFeatures";
- case StructureType::ePhysicalDeviceInlineUniformBlockFeatures: return "PhysicalDeviceInlineUniformBlockFeatures";
- case StructureType::ePhysicalDeviceInlineUniformBlockProperties:
- return "PhysicalDeviceInlineUniformBlockProperties";
- case StructureType::eWriteDescriptorSetInlineUniformBlock: return "WriteDescriptorSetInlineUniformBlock";
- case StructureType::eDescriptorPoolInlineUniformBlockCreateInfo:
- return "DescriptorPoolInlineUniformBlockCreateInfo";
- case StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures:
- return "PhysicalDeviceTextureCompressionAstcHdrFeatures";
- case StructureType::eRenderingInfo: return "RenderingInfo";
- case StructureType::eRenderingAttachmentInfo: return "RenderingAttachmentInfo";
- case StructureType::ePipelineRenderingCreateInfo: return "PipelineRenderingCreateInfo";
- case StructureType::ePhysicalDeviceDynamicRenderingFeatures: return "PhysicalDeviceDynamicRenderingFeatures";
- case StructureType::eCommandBufferInheritanceRenderingInfo: return "CommandBufferInheritanceRenderingInfo";
- case StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures:
- return "PhysicalDeviceShaderIntegerDotProductFeatures";
- case StructureType::ePhysicalDeviceShaderIntegerDotProductProperties:
- return "PhysicalDeviceShaderIntegerDotProductProperties";
- case StructureType::ePhysicalDeviceTexelBufferAlignmentProperties:
- return "PhysicalDeviceTexelBufferAlignmentProperties";
- case StructureType::eFormatProperties3: return "FormatProperties3";
- case StructureType::ePhysicalDeviceMaintenance4Features: return "PhysicalDeviceMaintenance4Features";
- case StructureType::ePhysicalDeviceMaintenance4Properties: return "PhysicalDeviceMaintenance4Properties";
- case StructureType::eDeviceBufferMemoryRequirements: return "DeviceBufferMemoryRequirements";
- case StructureType::eDeviceImageMemoryRequirements: return "DeviceImageMemoryRequirements";
- case StructureType::eSwapchainCreateInfoKHR: return "SwapchainCreateInfoKHR";
- case StructureType::ePresentInfoKHR: return "PresentInfoKHR";
- case StructureType::eDeviceGroupPresentCapabilitiesKHR: return "DeviceGroupPresentCapabilitiesKHR";
- case StructureType::eImageSwapchainCreateInfoKHR: return "ImageSwapchainCreateInfoKHR";
- case StructureType::eBindImageMemorySwapchainInfoKHR: return "BindImageMemorySwapchainInfoKHR";
- case StructureType::eAcquireNextImageInfoKHR: return "AcquireNextImageInfoKHR";
- case StructureType::eDeviceGroupPresentInfoKHR: return "DeviceGroupPresentInfoKHR";
- case StructureType::eDeviceGroupSwapchainCreateInfoKHR: return "DeviceGroupSwapchainCreateInfoKHR";
- case StructureType::eDisplayModeCreateInfoKHR: return "DisplayModeCreateInfoKHR";
- case StructureType::eDisplaySurfaceCreateInfoKHR: return "DisplaySurfaceCreateInfoKHR";
- case StructureType::eDisplayPresentInfoKHR: return "DisplayPresentInfoKHR";
-#if defined( VK_USE_PLATFORM_XLIB_KHR )
- case StructureType::eXlibSurfaceCreateInfoKHR: return "XlibSurfaceCreateInfoKHR";
-#endif /*VK_USE_PLATFORM_XLIB_KHR*/
-#if defined( VK_USE_PLATFORM_XCB_KHR )
- case StructureType::eXcbSurfaceCreateInfoKHR: return "XcbSurfaceCreateInfoKHR";
-#endif /*VK_USE_PLATFORM_XCB_KHR*/
-#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
- case StructureType::eWaylandSurfaceCreateInfoKHR: return "WaylandSurfaceCreateInfoKHR";
-#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
-#if defined( VK_USE_PLATFORM_ANDROID_KHR )
- case StructureType::eAndroidSurfaceCreateInfoKHR: return "AndroidSurfaceCreateInfoKHR";
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
- case StructureType::eWin32SurfaceCreateInfoKHR: return "Win32SurfaceCreateInfoKHR";
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- case StructureType::eDebugReportCallbackCreateInfoEXT: return "DebugReportCallbackCreateInfoEXT";
- case StructureType::ePipelineRasterizationStateRasterizationOrderAMD:
- return "PipelineRasterizationStateRasterizationOrderAMD";
- case StructureType::eDebugMarkerObjectNameInfoEXT: return "DebugMarkerObjectNameInfoEXT";
- case StructureType::eDebugMarkerObjectTagInfoEXT: return "DebugMarkerObjectTagInfoEXT";
- case StructureType::eDebugMarkerMarkerInfoEXT: return "DebugMarkerMarkerInfoEXT";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case StructureType::eVideoProfileKHR: return "VideoProfileKHR";
- case StructureType::eVideoCapabilitiesKHR: return "VideoCapabilitiesKHR";
- case StructureType::eVideoPictureResourceKHR: return "VideoPictureResourceKHR";
- case StructureType::eVideoGetMemoryPropertiesKHR: return "VideoGetMemoryPropertiesKHR";
- case StructureType::eVideoBindMemoryKHR: return "VideoBindMemoryKHR";
- case StructureType::eVideoSessionCreateInfoKHR: return "VideoSessionCreateInfoKHR";
- case StructureType::eVideoSessionParametersCreateInfoKHR: return "VideoSessionParametersCreateInfoKHR";
- case StructureType::eVideoSessionParametersUpdateInfoKHR: return "VideoSessionParametersUpdateInfoKHR";
- case StructureType::eVideoBeginCodingInfoKHR: return "VideoBeginCodingInfoKHR";
- case StructureType::eVideoEndCodingInfoKHR: return "VideoEndCodingInfoKHR";
- case StructureType::eVideoCodingControlInfoKHR: return "VideoCodingControlInfoKHR";
- case StructureType::eVideoReferenceSlotKHR: return "VideoReferenceSlotKHR";
- case StructureType::eVideoQueueFamilyProperties2KHR: return "VideoQueueFamilyProperties2KHR";
- case StructureType::eVideoProfilesKHR: return "VideoProfilesKHR";
- case StructureType::ePhysicalDeviceVideoFormatInfoKHR: return "PhysicalDeviceVideoFormatInfoKHR";
- case StructureType::eVideoFormatPropertiesKHR: return "VideoFormatPropertiesKHR";
- case StructureType::eQueueFamilyQueryResultStatusProperties2KHR:
- return "QueueFamilyQueryResultStatusProperties2KHR";
- case StructureType::eVideoDecodeInfoKHR: return "VideoDecodeInfoKHR";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- case StructureType::eDedicatedAllocationImageCreateInfoNV: return "DedicatedAllocationImageCreateInfoNV";
- case StructureType::eDedicatedAllocationBufferCreateInfoNV: return "DedicatedAllocationBufferCreateInfoNV";
- case StructureType::eDedicatedAllocationMemoryAllocateInfoNV: return "DedicatedAllocationMemoryAllocateInfoNV";
- case StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT:
- return "PhysicalDeviceTransformFeedbackFeaturesEXT";
- case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT:
- return "PhysicalDeviceTransformFeedbackPropertiesEXT";
- case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT:
- return "PipelineRasterizationStateStreamCreateInfoEXT";
- case StructureType::eCuModuleCreateInfoNVX: return "CuModuleCreateInfoNVX";
- case StructureType::eCuFunctionCreateInfoNVX: return "CuFunctionCreateInfoNVX";
- case StructureType::eCuLaunchInfoNVX: return "CuLaunchInfoNVX";
- case StructureType::eImageViewHandleInfoNVX: return "ImageViewHandleInfoNVX";
- case StructureType::eImageViewAddressPropertiesNVX: return "ImageViewAddressPropertiesNVX";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case StructureType::eVideoEncodeH264CapabilitiesEXT: return "VideoEncodeH264CapabilitiesEXT";
- case StructureType::eVideoEncodeH264SessionCreateInfoEXT: return "VideoEncodeH264SessionCreateInfoEXT";
- case StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT:
- return "VideoEncodeH264SessionParametersCreateInfoEXT";
- case StructureType::eVideoEncodeH264SessionParametersAddInfoEXT:
- return "VideoEncodeH264SessionParametersAddInfoEXT";
- case StructureType::eVideoEncodeH264VclFrameInfoEXT: return "VideoEncodeH264VclFrameInfoEXT";
- case StructureType::eVideoEncodeH264DpbSlotInfoEXT: return "VideoEncodeH264DpbSlotInfoEXT";
- case StructureType::eVideoEncodeH264NaluSliceEXT: return "VideoEncodeH264NaluSliceEXT";
- case StructureType::eVideoEncodeH264EmitPictureParametersEXT: return "VideoEncodeH264EmitPictureParametersEXT";
- case StructureType::eVideoEncodeH264ProfileEXT: return "VideoEncodeH264ProfileEXT";
- case StructureType::eVideoEncodeH264RateControlInfoEXT: return "VideoEncodeH264RateControlInfoEXT";
- case StructureType::eVideoEncodeH264RateControlLayerInfoEXT: return "VideoEncodeH264RateControlLayerInfoEXT";
- case StructureType::eVideoEncodeH265CapabilitiesEXT: return "VideoEncodeH265CapabilitiesEXT";
- case StructureType::eVideoEncodeH265SessionCreateInfoEXT: return "VideoEncodeH265SessionCreateInfoEXT";
- case StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT:
- return "VideoEncodeH265SessionParametersCreateInfoEXT";
- case StructureType::eVideoEncodeH265SessionParametersAddInfoEXT:
- return "VideoEncodeH265SessionParametersAddInfoEXT";
- case StructureType::eVideoEncodeH265VclFrameInfoEXT: return "VideoEncodeH265VclFrameInfoEXT";
- case StructureType::eVideoEncodeH265DpbSlotInfoEXT: return "VideoEncodeH265DpbSlotInfoEXT";
- case StructureType::eVideoEncodeH265NaluSliceSegmentEXT: return "VideoEncodeH265NaluSliceSegmentEXT";
- case StructureType::eVideoEncodeH265EmitPictureParametersEXT: return "VideoEncodeH265EmitPictureParametersEXT";
- case StructureType::eVideoEncodeH265ProfileEXT: return "VideoEncodeH265ProfileEXT";
- case StructureType::eVideoEncodeH265ReferenceListsEXT: return "VideoEncodeH265ReferenceListsEXT";
- case StructureType::eVideoEncodeH265RateControlInfoEXT: return "VideoEncodeH265RateControlInfoEXT";
- case StructureType::eVideoEncodeH265RateControlLayerInfoEXT: return "VideoEncodeH265RateControlLayerInfoEXT";
- case StructureType::eVideoDecodeH264CapabilitiesEXT: return "VideoDecodeH264CapabilitiesEXT";
- case StructureType::eVideoDecodeH264SessionCreateInfoEXT: return "VideoDecodeH264SessionCreateInfoEXT";
- case StructureType::eVideoDecodeH264PictureInfoEXT: return "VideoDecodeH264PictureInfoEXT";
- case StructureType::eVideoDecodeH264MvcEXT: return "VideoDecodeH264MvcEXT";
- case StructureType::eVideoDecodeH264ProfileEXT: return "VideoDecodeH264ProfileEXT";
- case StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT:
- return "VideoDecodeH264SessionParametersCreateInfoEXT";
- case StructureType::eVideoDecodeH264SessionParametersAddInfoEXT:
- return "VideoDecodeH264SessionParametersAddInfoEXT";
- case StructureType::eVideoDecodeH264DpbSlotInfoEXT: return "VideoDecodeH264DpbSlotInfoEXT";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- case StructureType::eTextureLodGatherFormatPropertiesAMD: return "TextureLodGatherFormatPropertiesAMD";
- case StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR:
- return "RenderingFragmentShadingRateAttachmentInfoKHR";
- case StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT:
- return "RenderingFragmentDensityMapAttachmentInfoEXT";
- case StructureType::eAttachmentSampleCountInfoAMD: return "AttachmentSampleCountInfoAMD";
- case StructureType::eMultiviewPerViewAttributesInfoNVX: return "MultiviewPerViewAttributesInfoNVX";
-#if defined( VK_USE_PLATFORM_GGP )
- case StructureType::eStreamDescriptorSurfaceCreateInfoGGP: return "StreamDescriptorSurfaceCreateInfoGGP";
-#endif /*VK_USE_PLATFORM_GGP*/
- case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV:
- return "PhysicalDeviceCornerSampledImageFeaturesNV";
- case StructureType::eExternalMemoryImageCreateInfoNV: return "ExternalMemoryImageCreateInfoNV";
- case StructureType::eExportMemoryAllocateInfoNV: return "ExportMemoryAllocateInfoNV";
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
- case StructureType::eImportMemoryWin32HandleInfoNV: return "ImportMemoryWin32HandleInfoNV";
- case StructureType::eExportMemoryWin32HandleInfoNV: return "ExportMemoryWin32HandleInfoNV";
- case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV: return "Win32KeyedMutexAcquireReleaseInfoNV";
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- case StructureType::eValidationFlagsEXT: return "ValidationFlagsEXT";
-#if defined( VK_USE_PLATFORM_VI_NN )
- case StructureType::eViSurfaceCreateInfoNN: return "ViSurfaceCreateInfoNN";
-#endif /*VK_USE_PLATFORM_VI_NN*/
- case StructureType::eImageViewAstcDecodeModeEXT: return "ImageViewAstcDecodeModeEXT";
- case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT: return "PhysicalDeviceAstcDecodeFeaturesEXT";
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
- case StructureType::eImportMemoryWin32HandleInfoKHR: return "ImportMemoryWin32HandleInfoKHR";
- case StructureType::eExportMemoryWin32HandleInfoKHR: return "ExportMemoryWin32HandleInfoKHR";
- case StructureType::eMemoryWin32HandlePropertiesKHR: return "MemoryWin32HandlePropertiesKHR";
- case StructureType::eMemoryGetWin32HandleInfoKHR: return "MemoryGetWin32HandleInfoKHR";
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- case StructureType::eImportMemoryFdInfoKHR: return "ImportMemoryFdInfoKHR";
- case StructureType::eMemoryFdPropertiesKHR: return "MemoryFdPropertiesKHR";
- case StructureType::eMemoryGetFdInfoKHR: return "MemoryGetFdInfoKHR";
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
- case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR: return "Win32KeyedMutexAcquireReleaseInfoKHR";
- case StructureType::eImportSemaphoreWin32HandleInfoKHR: return "ImportSemaphoreWin32HandleInfoKHR";
- case StructureType::eExportSemaphoreWin32HandleInfoKHR: return "ExportSemaphoreWin32HandleInfoKHR";
- case StructureType::eD3D12FenceSubmitInfoKHR: return "D3D12FenceSubmitInfoKHR";
- case StructureType::eSemaphoreGetWin32HandleInfoKHR: return "SemaphoreGetWin32HandleInfoKHR";
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- case StructureType::eImportSemaphoreFdInfoKHR: return "ImportSemaphoreFdInfoKHR";
- case StructureType::eSemaphoreGetFdInfoKHR: return "SemaphoreGetFdInfoKHR";
- case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR:
- return "PhysicalDevicePushDescriptorPropertiesKHR";
- case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT:
- return "CommandBufferInheritanceConditionalRenderingInfoEXT";
- case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT:
- return "PhysicalDeviceConditionalRenderingFeaturesEXT";
- case StructureType::eConditionalRenderingBeginInfoEXT: return "ConditionalRenderingBeginInfoEXT";
- case StructureType::ePresentRegionsKHR: return "PresentRegionsKHR";
- case StructureType::ePipelineViewportWScalingStateCreateInfoNV:
- return "PipelineViewportWScalingStateCreateInfoNV";
- case StructureType::eSurfaceCapabilities2EXT: return "SurfaceCapabilities2EXT";
- case StructureType::eDisplayPowerInfoEXT: return "DisplayPowerInfoEXT";
- case StructureType::eDeviceEventInfoEXT: return "DeviceEventInfoEXT";
- case StructureType::eDisplayEventInfoEXT: return "DisplayEventInfoEXT";
- case StructureType::eSwapchainCounterCreateInfoEXT: return "SwapchainCounterCreateInfoEXT";
- case StructureType::ePresentTimesInfoGOOGLE: return "PresentTimesInfoGOOGLE";
- case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX:
- return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX";
- case StructureType::ePipelineViewportSwizzleStateCreateInfoNV: return "PipelineViewportSwizzleStateCreateInfoNV";
- case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT:
- return "PhysicalDeviceDiscardRectanglePropertiesEXT";
- case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT:
- return "PipelineDiscardRectangleStateCreateInfoEXT";
- case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT:
- return "PhysicalDeviceConservativeRasterizationPropertiesEXT";
- case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT:
- return "PipelineRasterizationConservativeStateCreateInfoEXT";
- case StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT: return "PhysicalDeviceDepthClipEnableFeaturesEXT";
- case StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT:
- return "PipelineRasterizationDepthClipStateCreateInfoEXT";
- case StructureType::eHdrMetadataEXT: return "HdrMetadataEXT";
- case StructureType::eSharedPresentSurfaceCapabilitiesKHR: return "SharedPresentSurfaceCapabilitiesKHR";
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
- case StructureType::eImportFenceWin32HandleInfoKHR: return "ImportFenceWin32HandleInfoKHR";
- case StructureType::eExportFenceWin32HandleInfoKHR: return "ExportFenceWin32HandleInfoKHR";
- case StructureType::eFenceGetWin32HandleInfoKHR: return "FenceGetWin32HandleInfoKHR";
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- case StructureType::eImportFenceFdInfoKHR: return "ImportFenceFdInfoKHR";
- case StructureType::eFenceGetFdInfoKHR: return "FenceGetFdInfoKHR";
- case StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR:
- return "PhysicalDevicePerformanceQueryFeaturesKHR";
- case StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR:
- return "PhysicalDevicePerformanceQueryPropertiesKHR";
- case StructureType::eQueryPoolPerformanceCreateInfoKHR: return "QueryPoolPerformanceCreateInfoKHR";
- case StructureType::ePerformanceQuerySubmitInfoKHR: return "PerformanceQuerySubmitInfoKHR";
- case StructureType::eAcquireProfilingLockInfoKHR: return "AcquireProfilingLockInfoKHR";
- case StructureType::ePerformanceCounterKHR: return "PerformanceCounterKHR";
- case StructureType::ePerformanceCounterDescriptionKHR: return "PerformanceCounterDescriptionKHR";
- case StructureType::ePhysicalDeviceSurfaceInfo2KHR: return "PhysicalDeviceSurfaceInfo2KHR";
- case StructureType::eSurfaceCapabilities2KHR: return "SurfaceCapabilities2KHR";
- case StructureType::eSurfaceFormat2KHR: return "SurfaceFormat2KHR";
- case StructureType::eDisplayProperties2KHR: return "DisplayProperties2KHR";
- case StructureType::eDisplayPlaneProperties2KHR: return "DisplayPlaneProperties2KHR";
- case StructureType::eDisplayModeProperties2KHR: return "DisplayModeProperties2KHR";
- case StructureType::eDisplayPlaneInfo2KHR: return "DisplayPlaneInfo2KHR";
- case StructureType::eDisplayPlaneCapabilities2KHR: return "DisplayPlaneCapabilities2KHR";
-#if defined( VK_USE_PLATFORM_IOS_MVK )
- case StructureType::eIosSurfaceCreateInfoMVK: return "IosSurfaceCreateInfoMVK";
-#endif /*VK_USE_PLATFORM_IOS_MVK*/
-#if defined( VK_USE_PLATFORM_MACOS_MVK )
- case StructureType::eMacosSurfaceCreateInfoMVK: return "MacosSurfaceCreateInfoMVK";
-#endif /*VK_USE_PLATFORM_MACOS_MVK*/
- case StructureType::eDebugUtilsObjectNameInfoEXT: return "DebugUtilsObjectNameInfoEXT";
- case StructureType::eDebugUtilsObjectTagInfoEXT: return "DebugUtilsObjectTagInfoEXT";
- case StructureType::eDebugUtilsLabelEXT: return "DebugUtilsLabelEXT";
- case StructureType::eDebugUtilsMessengerCallbackDataEXT: return "DebugUtilsMessengerCallbackDataEXT";
- case StructureType::eDebugUtilsMessengerCreateInfoEXT: return "DebugUtilsMessengerCreateInfoEXT";
-#if defined( VK_USE_PLATFORM_ANDROID_KHR )
- case StructureType::eAndroidHardwareBufferUsageANDROID: return "AndroidHardwareBufferUsageANDROID";
- case StructureType::eAndroidHardwareBufferPropertiesANDROID: return "AndroidHardwareBufferPropertiesANDROID";
- case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID:
- return "AndroidHardwareBufferFormatPropertiesANDROID";
- case StructureType::eImportAndroidHardwareBufferInfoANDROID: return "ImportAndroidHardwareBufferInfoANDROID";
- case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID:
- return "MemoryGetAndroidHardwareBufferInfoANDROID";
- case StructureType::eExternalFormatANDROID: return "ExternalFormatANDROID";
- case StructureType::eAndroidHardwareBufferFormatProperties2ANDROID:
- return "AndroidHardwareBufferFormatProperties2ANDROID";
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- case StructureType::eSampleLocationsInfoEXT: return "SampleLocationsInfoEXT";
- case StructureType::eRenderPassSampleLocationsBeginInfoEXT: return "RenderPassSampleLocationsBeginInfoEXT";
- case StructureType::ePipelineSampleLocationsStateCreateInfoEXT:
- return "PipelineSampleLocationsStateCreateInfoEXT";
- case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT:
- return "PhysicalDeviceSampleLocationsPropertiesEXT";
- case StructureType::eMultisamplePropertiesEXT: return "MultisamplePropertiesEXT";
- case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT:
- return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT";
- case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT:
- return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT";
- case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT:
- return "PipelineColorBlendAdvancedStateCreateInfoEXT";
- case StructureType::ePipelineCoverageToColorStateCreateInfoNV: return "PipelineCoverageToColorStateCreateInfoNV";
- case StructureType::eWriteDescriptorSetAccelerationStructureKHR:
- return "WriteDescriptorSetAccelerationStructureKHR";
- case StructureType::eAccelerationStructureBuildGeometryInfoKHR:
- return "AccelerationStructureBuildGeometryInfoKHR";
- case StructureType::eAccelerationStructureDeviceAddressInfoKHR:
- return "AccelerationStructureDeviceAddressInfoKHR";
- case StructureType::eAccelerationStructureGeometryAabbsDataKHR:
- return "AccelerationStructureGeometryAabbsDataKHR";
- case StructureType::eAccelerationStructureGeometryInstancesDataKHR:
- return "AccelerationStructureGeometryInstancesDataKHR";
- case StructureType::eAccelerationStructureGeometryTrianglesDataKHR:
- return "AccelerationStructureGeometryTrianglesDataKHR";
- case StructureType::eAccelerationStructureGeometryKHR: return "AccelerationStructureGeometryKHR";
- case StructureType::eAccelerationStructureVersionInfoKHR: return "AccelerationStructureVersionInfoKHR";
- case StructureType::eCopyAccelerationStructureInfoKHR: return "CopyAccelerationStructureInfoKHR";
- case StructureType::eCopyAccelerationStructureToMemoryInfoKHR: return "CopyAccelerationStructureToMemoryInfoKHR";
- case StructureType::eCopyMemoryToAccelerationStructureInfoKHR: return "CopyMemoryToAccelerationStructureInfoKHR";
- case StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR:
- return "PhysicalDeviceAccelerationStructureFeaturesKHR";
- case StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR:
- return "PhysicalDeviceAccelerationStructurePropertiesKHR";
- case StructureType::eAccelerationStructureCreateInfoKHR: return "AccelerationStructureCreateInfoKHR";
- case StructureType::eAccelerationStructureBuildSizesInfoKHR: return "AccelerationStructureBuildSizesInfoKHR";
- case StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR:
- return "PhysicalDeviceRayTracingPipelineFeaturesKHR";
- case StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR:
- return "PhysicalDeviceRayTracingPipelinePropertiesKHR";
- case StructureType::eRayTracingPipelineCreateInfoKHR: return "RayTracingPipelineCreateInfoKHR";
- case StructureType::eRayTracingShaderGroupCreateInfoKHR: return "RayTracingShaderGroupCreateInfoKHR";
- case StructureType::eRayTracingPipelineInterfaceCreateInfoKHR: return "RayTracingPipelineInterfaceCreateInfoKHR";
- case StructureType::ePhysicalDeviceRayQueryFeaturesKHR: return "PhysicalDeviceRayQueryFeaturesKHR";
- case StructureType::ePipelineCoverageModulationStateCreateInfoNV:
- return "PipelineCoverageModulationStateCreateInfoNV";
- case StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV: return "PhysicalDeviceShaderSmBuiltinsFeaturesNV";
- case StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV:
- return "PhysicalDeviceShaderSmBuiltinsPropertiesNV";
- case StructureType::eDrmFormatModifierPropertiesListEXT: return "DrmFormatModifierPropertiesListEXT";
- case StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT:
- return "PhysicalDeviceImageDrmFormatModifierInfoEXT";
- case StructureType::eImageDrmFormatModifierListCreateInfoEXT: return "ImageDrmFormatModifierListCreateInfoEXT";
- case StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT:
- return "ImageDrmFormatModifierExplicitCreateInfoEXT";
- case StructureType::eImageDrmFormatModifierPropertiesEXT: return "ImageDrmFormatModifierPropertiesEXT";
- case StructureType::eDrmFormatModifierPropertiesList2EXT: return "DrmFormatModifierPropertiesList2EXT";
- case StructureType::eValidationCacheCreateInfoEXT: return "ValidationCacheCreateInfoEXT";
- case StructureType::eShaderModuleValidationCacheCreateInfoEXT: return "ShaderModuleValidationCacheCreateInfoEXT";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR:
- return "PhysicalDevicePortabilitySubsetFeaturesKHR";
- case StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR:
- return "PhysicalDevicePortabilitySubsetPropertiesKHR";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV:
- return "PipelineViewportShadingRateImageStateCreateInfoNV";
- case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV: return "PhysicalDeviceShadingRateImageFeaturesNV";
- case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV:
- return "PhysicalDeviceShadingRateImagePropertiesNV";
- case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV:
- return "PipelineViewportCoarseSampleOrderStateCreateInfoNV";
- case StructureType::eRayTracingPipelineCreateInfoNV: return "RayTracingPipelineCreateInfoNV";
- case StructureType::eAccelerationStructureCreateInfoNV: return "AccelerationStructureCreateInfoNV";
- case StructureType::eGeometryNV: return "GeometryNV";
- case StructureType::eGeometryTrianglesNV: return "GeometryTrianglesNV";
- case StructureType::eGeometryAabbNV: return "GeometryAabbNV";
- case StructureType::eBindAccelerationStructureMemoryInfoNV: return "BindAccelerationStructureMemoryInfoNV";
- case StructureType::eWriteDescriptorSetAccelerationStructureNV:
- return "WriteDescriptorSetAccelerationStructureNV";
- case StructureType::eAccelerationStructureMemoryRequirementsInfoNV:
- return "AccelerationStructureMemoryRequirementsInfoNV";
- case StructureType::ePhysicalDeviceRayTracingPropertiesNV: return "PhysicalDeviceRayTracingPropertiesNV";
- case StructureType::eRayTracingShaderGroupCreateInfoNV: return "RayTracingShaderGroupCreateInfoNV";
- case StructureType::eAccelerationStructureInfoNV: return "AccelerationStructureInfoNV";
- case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV:
- return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV";
- case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV:
- return "PipelineRepresentativeFragmentTestStateCreateInfoNV";
- case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT:
- return "PhysicalDeviceImageViewImageFormatInfoEXT";
- case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT:
- return "FilterCubicImageViewImageFormatPropertiesEXT";
- case StructureType::eImportMemoryHostPointerInfoEXT: return "ImportMemoryHostPointerInfoEXT";
- case StructureType::eMemoryHostPointerPropertiesEXT: return "MemoryHostPointerPropertiesEXT";
- case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT:
- return "PhysicalDeviceExternalMemoryHostPropertiesEXT";
- case StructureType::ePhysicalDeviceShaderClockFeaturesKHR: return "PhysicalDeviceShaderClockFeaturesKHR";
- case StructureType::ePipelineCompilerControlCreateInfoAMD: return "PipelineCompilerControlCreateInfoAMD";
- case StructureType::eCalibratedTimestampInfoEXT: return "CalibratedTimestampInfoEXT";
- case StructureType::ePhysicalDeviceShaderCorePropertiesAMD: return "PhysicalDeviceShaderCorePropertiesAMD";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case StructureType::eVideoDecodeH265CapabilitiesEXT: return "VideoDecodeH265CapabilitiesEXT";
- case StructureType::eVideoDecodeH265SessionCreateInfoEXT: return "VideoDecodeH265SessionCreateInfoEXT";
- case StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT:
- return "VideoDecodeH265SessionParametersCreateInfoEXT";
- case StructureType::eVideoDecodeH265SessionParametersAddInfoEXT:
- return "VideoDecodeH265SessionParametersAddInfoEXT";
- case StructureType::eVideoDecodeH265ProfileEXT: return "VideoDecodeH265ProfileEXT";
- case StructureType::eVideoDecodeH265PictureInfoEXT: return "VideoDecodeH265PictureInfoEXT";
- case StructureType::eVideoDecodeH265DpbSlotInfoEXT: return "VideoDecodeH265DpbSlotInfoEXT";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- case StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR: return "DeviceQueueGlobalPriorityCreateInfoKHR";
- case StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR:
- return "PhysicalDeviceGlobalPriorityQueryFeaturesKHR";
- case StructureType::eQueueFamilyGlobalPriorityPropertiesKHR: return "QueueFamilyGlobalPriorityPropertiesKHR";
- case StructureType::eDeviceMemoryOverallocationCreateInfoAMD: return "DeviceMemoryOverallocationCreateInfoAMD";
- case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT:
- return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT";
- case StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT:
- return "PipelineVertexInputDivisorStateCreateInfoEXT";
- case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT:
- return "PhysicalDeviceVertexAttributeDivisorFeaturesEXT";
-#if defined( VK_USE_PLATFORM_GGP )
- case StructureType::ePresentFrameTokenGGP: return "PresentFrameTokenGGP";
-#endif /*VK_USE_PLATFORM_GGP*/
- case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV:
- return "PhysicalDeviceComputeShaderDerivativesFeaturesNV";
- case StructureType::ePhysicalDeviceMeshShaderFeaturesNV: return "PhysicalDeviceMeshShaderFeaturesNV";
- case StructureType::ePhysicalDeviceMeshShaderPropertiesNV: return "PhysicalDeviceMeshShaderPropertiesNV";
- case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV:
- return "PhysicalDeviceFragmentShaderBarycentricFeaturesNV";
- case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV:
- return "PhysicalDeviceShaderImageFootprintFeaturesNV";
- case StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV:
- return "PipelineViewportExclusiveScissorStateCreateInfoNV";
- case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV: return "PhysicalDeviceExclusiveScissorFeaturesNV";
- case StructureType::eCheckpointDataNV: return "CheckpointDataNV";
- case StructureType::eQueueFamilyCheckpointPropertiesNV: return "QueueFamilyCheckpointPropertiesNV";
- case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL:
- return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL";
- case StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL: return "QueryPoolPerformanceQueryCreateInfoINTEL";
- case StructureType::eInitializePerformanceApiInfoINTEL: return "InitializePerformanceApiInfoINTEL";
- case StructureType::ePerformanceMarkerInfoINTEL: return "PerformanceMarkerInfoINTEL";
- case StructureType::ePerformanceStreamMarkerInfoINTEL: return "PerformanceStreamMarkerInfoINTEL";
- case StructureType::ePerformanceOverrideInfoINTEL: return "PerformanceOverrideInfoINTEL";
- case StructureType::ePerformanceConfigurationAcquireInfoINTEL: return "PerformanceConfigurationAcquireInfoINTEL";
- case StructureType::ePhysicalDevicePciBusInfoPropertiesEXT: return "PhysicalDevicePciBusInfoPropertiesEXT";
- case StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD: return "DisplayNativeHdrSurfaceCapabilitiesAMD";
- case StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD: return "SwapchainDisplayNativeHdrCreateInfoAMD";
-#if defined( VK_USE_PLATFORM_FUCHSIA )
- case StructureType::eImagepipeSurfaceCreateInfoFUCHSIA: return "ImagepipeSurfaceCreateInfoFUCHSIA";
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
-#if defined( VK_USE_PLATFORM_METAL_EXT )
- case StructureType::eMetalSurfaceCreateInfoEXT: return "MetalSurfaceCreateInfoEXT";
-#endif /*VK_USE_PLATFORM_METAL_EXT*/
- case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT:
- return "PhysicalDeviceFragmentDensityMapFeaturesEXT";
- case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT:
- return "PhysicalDeviceFragmentDensityMapPropertiesEXT";
- case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT:
- return "RenderPassFragmentDensityMapCreateInfoEXT";
- case StructureType::eFragmentShadingRateAttachmentInfoKHR: return "FragmentShadingRateAttachmentInfoKHR";
- case StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR:
- return "PipelineFragmentShadingRateStateCreateInfoKHR";
- case StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR:
- return "PhysicalDeviceFragmentShadingRatePropertiesKHR";
- case StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR:
- return "PhysicalDeviceFragmentShadingRateFeaturesKHR";
- case StructureType::ePhysicalDeviceFragmentShadingRateKHR: return "PhysicalDeviceFragmentShadingRateKHR";
- case StructureType::ePhysicalDeviceShaderCoreProperties2AMD: return "PhysicalDeviceShaderCoreProperties2AMD";
- case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD: return "PhysicalDeviceCoherentMemoryFeaturesAMD";
- case StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT:
- return "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT";
- case StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT: return "PhysicalDeviceMemoryBudgetPropertiesEXT";
- case StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT: return "PhysicalDeviceMemoryPriorityFeaturesEXT";
- case StructureType::eMemoryPriorityAllocateInfoEXT: return "MemoryPriorityAllocateInfoEXT";
- case StructureType::eSurfaceProtectedCapabilitiesKHR: return "SurfaceProtectedCapabilitiesKHR";
- case StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV:
- return "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV";
- case StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT:
- return "PhysicalDeviceBufferDeviceAddressFeaturesEXT";
- case StructureType::eBufferDeviceAddressCreateInfoEXT: return "BufferDeviceAddressCreateInfoEXT";
- case StructureType::eValidationFeaturesEXT: return "ValidationFeaturesEXT";
- case StructureType::ePhysicalDevicePresentWaitFeaturesKHR: return "PhysicalDevicePresentWaitFeaturesKHR";
- case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV:
- return "PhysicalDeviceCooperativeMatrixFeaturesNV";
- case StructureType::eCooperativeMatrixPropertiesNV: return "CooperativeMatrixPropertiesNV";
- case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV:
- return "PhysicalDeviceCooperativeMatrixPropertiesNV";
- case StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV:
- return "PhysicalDeviceCoverageReductionModeFeaturesNV";
- case StructureType::ePipelineCoverageReductionStateCreateInfoNV:
- return "PipelineCoverageReductionStateCreateInfoNV";
- case StructureType::eFramebufferMixedSamplesCombinationNV: return "FramebufferMixedSamplesCombinationNV";
- case StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT:
- return "PhysicalDeviceFragmentShaderInterlockFeaturesEXT";
- case StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT:
- return "PhysicalDeviceYcbcrImageArraysFeaturesEXT";
- case StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT: return "PhysicalDeviceProvokingVertexFeaturesEXT";
- case StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT:
- return "PipelineRasterizationProvokingVertexStateCreateInfoEXT";
- case StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT:
- return "PhysicalDeviceProvokingVertexPropertiesEXT";
-#if defined( VK_USE_PLATFORM_WIN32_KHR )
- case StructureType::eSurfaceFullScreenExclusiveInfoEXT: return "SurfaceFullScreenExclusiveInfoEXT";
- case StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT:
- return "SurfaceCapabilitiesFullScreenExclusiveEXT";
- case StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT: return "SurfaceFullScreenExclusiveWin32InfoEXT";
-#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- case StructureType::eHeadlessSurfaceCreateInfoEXT: return "HeadlessSurfaceCreateInfoEXT";
- case StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT:
- return "PhysicalDeviceLineRasterizationFeaturesEXT";
- case StructureType::ePipelineRasterizationLineStateCreateInfoEXT:
- return "PipelineRasterizationLineStateCreateInfoEXT";
- case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT:
- return "PhysicalDeviceLineRasterizationPropertiesEXT";
- case StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT:
- return "PhysicalDeviceShaderAtomicFloatFeaturesEXT";
- case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT: return "PhysicalDeviceIndexTypeUint8FeaturesEXT";
- case StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT:
- return "PhysicalDeviceExtendedDynamicStateFeaturesEXT";
- case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR:
- return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR";
- case StructureType::ePipelineInfoKHR: return "PipelineInfoKHR";
- case StructureType::ePipelineExecutablePropertiesKHR: return "PipelineExecutablePropertiesKHR";
- case StructureType::ePipelineExecutableInfoKHR: return "PipelineExecutableInfoKHR";
- case StructureType::ePipelineExecutableStatisticKHR: return "PipelineExecutableStatisticKHR";
- case StructureType::ePipelineExecutableInternalRepresentationKHR:
- return "PipelineExecutableInternalRepresentationKHR";
- case StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT:
- return "PhysicalDeviceShaderAtomicFloat2FeaturesEXT";
- case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV:
- return "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV";
- case StructureType::eGraphicsShaderGroupCreateInfoNV: return "GraphicsShaderGroupCreateInfoNV";
- case StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV: return "GraphicsPipelineShaderGroupsCreateInfoNV";
- case StructureType::eIndirectCommandsLayoutTokenNV: return "IndirectCommandsLayoutTokenNV";
- case StructureType::eIndirectCommandsLayoutCreateInfoNV: return "IndirectCommandsLayoutCreateInfoNV";
- case StructureType::eGeneratedCommandsInfoNV: return "GeneratedCommandsInfoNV";
- case StructureType::eGeneratedCommandsMemoryRequirementsInfoNV:
- return "GeneratedCommandsMemoryRequirementsInfoNV";
- case StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV:
- return "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV";
- case StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV:
- return "PhysicalDeviceInheritedViewportScissorFeaturesNV";
- case StructureType::eCommandBufferInheritanceViewportScissorInfoNV:
- return "CommandBufferInheritanceViewportScissorInfoNV";
- case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT:
- return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT";
- case StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM:
- return "CommandBufferInheritanceRenderPassTransformInfoQCOM";
- case StructureType::eRenderPassTransformBeginInfoQCOM: return "RenderPassTransformBeginInfoQCOM";
- case StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT:
- return "PhysicalDeviceDeviceMemoryReportFeaturesEXT";
- case StructureType::eDeviceDeviceMemoryReportCreateInfoEXT: return "DeviceDeviceMemoryReportCreateInfoEXT";
- case StructureType::eDeviceMemoryReportCallbackDataEXT: return "DeviceMemoryReportCallbackDataEXT";
- case StructureType::ePhysicalDeviceRobustness2FeaturesEXT: return "PhysicalDeviceRobustness2FeaturesEXT";
- case StructureType::ePhysicalDeviceRobustness2PropertiesEXT: return "PhysicalDeviceRobustness2PropertiesEXT";
- case StructureType::eSamplerCustomBorderColorCreateInfoEXT: return "SamplerCustomBorderColorCreateInfoEXT";
- case StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT:
- return "PhysicalDeviceCustomBorderColorPropertiesEXT";
- case StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT:
- return "PhysicalDeviceCustomBorderColorFeaturesEXT";
- case StructureType::ePipelineLibraryCreateInfoKHR: return "PipelineLibraryCreateInfoKHR";
- case StructureType::ePresentIdKHR: return "PresentIdKHR";
- case StructureType::ePhysicalDevicePresentIdFeaturesKHR: return "PhysicalDevicePresentIdFeaturesKHR";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case StructureType::eVideoEncodeInfoKHR: return "VideoEncodeInfoKHR";
- case StructureType::eVideoEncodeRateControlInfoKHR: return "VideoEncodeRateControlInfoKHR";
- case StructureType::eVideoEncodeRateControlLayerInfoKHR: return "VideoEncodeRateControlLayerInfoKHR";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV:
- return "PhysicalDeviceDiagnosticsConfigFeaturesNV";
- case StructureType::eDeviceDiagnosticsConfigCreateInfoNV: return "DeviceDiagnosticsConfigCreateInfoNV";
- case StructureType::eQueueFamilyCheckpointProperties2NV: return "QueueFamilyCheckpointProperties2NV";
- case StructureType::eCheckpointData2NV: return "CheckpointData2NV";
- case StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR:
- return "PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR";
- case StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV:
- return "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV";
- case StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV:
- return "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV";
- case StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV:
- return "PipelineFragmentShadingRateEnumStateCreateInfoNV";
- case StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV:
- return "AccelerationStructureGeometryMotionTrianglesDataNV";
- case StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV:
- return "PhysicalDeviceRayTracingMotionBlurFeaturesNV";
- case StructureType::eAccelerationStructureMotionInfoNV: return "AccelerationStructureMotionInfoNV";
- case StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT:
- return "PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT";
- case StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT:
- return "PhysicalDeviceFragmentDensityMap2FeaturesEXT";
- case StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT:
- return "PhysicalDeviceFragmentDensityMap2PropertiesEXT";
- case StructureType::eCopyCommandTransformInfoQCOM: return "CopyCommandTransformInfoQCOM";
- case StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR:
- return "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR";
- case StructureType::ePhysicalDevice4444FormatsFeaturesEXT: return "PhysicalDevice4444FormatsFeaturesEXT";
- case StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM:
- return "PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM";
- case StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT: return "PhysicalDeviceRgba10X6FormatsFeaturesEXT";
-#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
- case StructureType::eDirectfbSurfaceCreateInfoEXT: return "DirectfbSurfaceCreateInfoEXT";
-#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
- case StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE:
- return "PhysicalDeviceMutableDescriptorTypeFeaturesVALVE";
- case StructureType::eMutableDescriptorTypeCreateInfoVALVE: return "MutableDescriptorTypeCreateInfoVALVE";
- case StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT:
- return "PhysicalDeviceVertexInputDynamicStateFeaturesEXT";
- case StructureType::eVertexInputBindingDescription2EXT: return "VertexInputBindingDescription2EXT";
- case StructureType::eVertexInputAttributeDescription2EXT: return "VertexInputAttributeDescription2EXT";
- case StructureType::ePhysicalDeviceDrmPropertiesEXT: return "PhysicalDeviceDrmPropertiesEXT";
- case StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT:
- return "PhysicalDeviceDepthClipControlFeaturesEXT";
- case StructureType::ePipelineViewportDepthClipControlCreateInfoEXT:
- return "PipelineViewportDepthClipControlCreateInfoEXT";
- case StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT:
- return "PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT";
-#if defined( VK_USE_PLATFORM_FUCHSIA )
- case StructureType::eImportMemoryZirconHandleInfoFUCHSIA: return "ImportMemoryZirconHandleInfoFUCHSIA";
- case StructureType::eMemoryZirconHandlePropertiesFUCHSIA: return "MemoryZirconHandlePropertiesFUCHSIA";
- case StructureType::eMemoryGetZirconHandleInfoFUCHSIA: return "MemoryGetZirconHandleInfoFUCHSIA";
- case StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA: return "ImportSemaphoreZirconHandleInfoFUCHSIA";
- case StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA: return "SemaphoreGetZirconHandleInfoFUCHSIA";
- case StructureType::eBufferCollectionCreateInfoFUCHSIA: return "BufferCollectionCreateInfoFUCHSIA";
- case StructureType::eImportMemoryBufferCollectionFUCHSIA: return "ImportMemoryBufferCollectionFUCHSIA";
- case StructureType::eBufferCollectionImageCreateInfoFUCHSIA: return "BufferCollectionImageCreateInfoFUCHSIA";
- case StructureType::eBufferCollectionPropertiesFUCHSIA: return "BufferCollectionPropertiesFUCHSIA";
- case StructureType::eBufferConstraintsInfoFUCHSIA: return "BufferConstraintsInfoFUCHSIA";
- case StructureType::eBufferCollectionBufferCreateInfoFUCHSIA: return "BufferCollectionBufferCreateInfoFUCHSIA";
- case StructureType::eImageConstraintsInfoFUCHSIA: return "ImageConstraintsInfoFUCHSIA";
- case StructureType::eImageFormatConstraintsInfoFUCHSIA: return "ImageFormatConstraintsInfoFUCHSIA";
- case StructureType::eSysmemColorSpaceFUCHSIA: return "SysmemColorSpaceFUCHSIA";
- case StructureType::eBufferCollectionConstraintsInfoFUCHSIA: return "BufferCollectionConstraintsInfoFUCHSIA";
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
- case StructureType::eSubpassShadingPipelineCreateInfoHUAWEI: return "SubpassShadingPipelineCreateInfoHUAWEI";
- case StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI:
- return "PhysicalDeviceSubpassShadingFeaturesHUAWEI";
- case StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI:
- return "PhysicalDeviceSubpassShadingPropertiesHUAWEI";
- case StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI:
- return "PhysicalDeviceInvocationMaskFeaturesHUAWEI";
- case StructureType::eMemoryGetRemoteAddressInfoNV: return "MemoryGetRemoteAddressInfoNV";
- case StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV:
- return "PhysicalDeviceExternalMemoryRdmaFeaturesNV";
- case StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT:
- return "PhysicalDeviceExtendedDynamicState2FeaturesEXT";
-#if defined( VK_USE_PLATFORM_SCREEN_QNX )
- case StructureType::eScreenSurfaceCreateInfoQNX: return "ScreenSurfaceCreateInfoQNX";
-#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
- case StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT:
- return "PhysicalDeviceColorWriteEnableFeaturesEXT";
- case StructureType::ePipelineColorWriteCreateInfoEXT: return "PipelineColorWriteCreateInfoEXT";
- case StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT: return "PhysicalDeviceImageViewMinLodFeaturesEXT";
- case StructureType::eImageViewMinLodCreateInfoEXT: return "ImageViewMinLodCreateInfoEXT";
- case StructureType::ePhysicalDeviceMultiDrawFeaturesEXT: return "PhysicalDeviceMultiDrawFeaturesEXT";
- case StructureType::ePhysicalDeviceMultiDrawPropertiesEXT: return "PhysicalDeviceMultiDrawPropertiesEXT";
- case StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT:
- return "PhysicalDeviceBorderColorSwizzleFeaturesEXT";
- case StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT:
- return "SamplerBorderColorComponentMappingCreateInfoEXT";
- case StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT:
- return "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT";
- case StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM:
- return "PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM";
- case StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM:
- return "PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM";
- case StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM:
- return "SubpassFragmentDensityMapOffsetEndInfoQCOM";
- case StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV:
- return "PhysicalDeviceLinearColorAttachmentFeaturesNV";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
+ ePhysicalDeviceColorWriteEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT,
+ ePipelineColorWriteCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT,
+ ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT,
+ ePhysicalDeviceRayTracingMaintenance1FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR,
+ ePhysicalDeviceImageViewMinLodFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT,
+ eImageViewMinLodCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT,
+ ePhysicalDeviceMultiDrawFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT,
+ ePhysicalDeviceMultiDrawPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT,
+ ePhysicalDeviceImage2DViewOf3DFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT,
+ eMicromapBuildInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT,
+ eMicromapVersionInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT,
+ eCopyMicromapInfoEXT = VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT,
+ eCopyMicromapToMemoryInfoEXT = VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT,
+ eCopyMemoryToMicromapInfoEXT = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT,
+ ePhysicalDeviceOpacityMicromapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT,
+ ePhysicalDeviceOpacityMicromapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT,
+ eMicromapCreateInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT,
+ eMicromapBuildSizesInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT,
+ eAccelerationStructureTrianglesOpacityMicromapEXT = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT,
+ ePhysicalDeviceBorderColorSwizzleFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT,
+ eSamplerBorderColorComponentMappingCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT,
+ ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT,
+ ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE,
+ eDescriptorSetBindingReferenceVALVE = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE,
+ eDescriptorSetLayoutHostMappingInfoVALVE = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE,
+ ePhysicalDeviceDepthClampZeroOneFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT,
+ ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT,
+ ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM,
+ ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM,
+ eSubpassFragmentDensityMapOffsetEndInfoQCOM = VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM,
+ ePhysicalDeviceLinearColorAttachmentFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV,
+ ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT,
+ ePhysicalDeviceImageProcessingFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM,
+ ePhysicalDeviceImageProcessingPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM,
+ eImageViewSampleWeightCreateInfoQCOM = VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM,
+ ePhysicalDeviceExtendedDynamicState3FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT,
+ ePhysicalDeviceExtendedDynamicState3PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT,
+ ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT,
+ eRenderPassCreationControlEXT = VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT,
+ eRenderPassCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT,
+ eRenderPassSubpassFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT,
+ ePhysicalDeviceShaderModuleIdentifierFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT,
+ ePhysicalDeviceShaderModuleIdentifierPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT,
+ ePipelineShaderStageModuleIdentifierCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT,
+ eShaderModuleIdentifierEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT,
+ ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT,
+ ePhysicalDeviceOpticalFlowFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV,
+ ePhysicalDeviceOpticalFlowPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV,
+ eOpticalFlowImageFormatInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV,
+ eOpticalFlowImageFormatPropertiesNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV,
+ eOpticalFlowSessionCreateInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV,
+ eOpticalFlowExecuteInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV,
+ eOpticalFlowSessionCreatePrivateDataInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV,
+ ePhysicalDeviceLegacyDitheringFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT,
+ ePhysicalDevicePipelineProtectedAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT,
+ ePhysicalDeviceTilePropertiesFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM,
+ eTilePropertiesQCOM = VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM,
+ ePhysicalDeviceAmigoProfilingFeaturesSEC = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC,
+ eAmigoProfilingSubmitInfoSEC = VK_STRUCTURE_TYPE_AMIGO_PROFILING_SUBMIT_INFO_SEC,
+ ePhysicalDeviceMutableDescriptorTypeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT,
+ eMutableDescriptorTypeCreateInfoEXT = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT,
+ ePhysicalDeviceShaderCoreBuiltinsFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM,
+ ePhysicalDeviceShaderCoreBuiltinsPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM,
+ eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR,
+ eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR,
+ eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR,
+ eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR,
+ eAttachmentSampleCountInfoNV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV,
+ eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR,
+ eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR,
+ eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR,
+ eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR,
+ eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR,
+ eBlitImageInfo2KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR,
+ eBufferCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR,
+ eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT,
+ eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR,
+ eBufferImageCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR,
+ eBufferMemoryBarrier2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR,
+ eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR,
+ eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR,
+ eCommandBufferInheritanceRenderingInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO_KHR,
+ eCommandBufferSubmitInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR,
+ eCopyBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR,
+ eCopyBufferToImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR,
+ eCopyImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR,
+ eCopyImageToBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR,
+ eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT,
+ eDependencyInfoKHR = VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR,
+ eDescriptorPoolInlineUniformBlockCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT,
+ eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT,
+ eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR,
+ eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT,
+ eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT,
+ eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR,
+ eDeviceBufferMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR,
+ eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR,
+ eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR,
+ eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR,
+ eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR,
+ eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR,
+ eDeviceImageMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR,
+ eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR,
+ eDevicePrivateDataCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT,
+ eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT,
+ eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR,
+ eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR,
+ eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR,
+ eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR,
+ eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR,
+ eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
+ eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR,
+ eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR,
+ eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR,
+ eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR,
+ eFormatProperties3KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR,
+ eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR,
+ eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR,
+ eImageBlit2KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR,
+ eImageCopy2KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR,
+ eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR,
+ eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR,
+ eImageMemoryBarrier2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR,
+ eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR,
+ eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR,
+ eImageResolve2KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR,
+ eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR,
+ eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT,
+ eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR,
+ eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR,
+ eMemoryBarrier2KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR,
+ eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR,
+ eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR,
+ eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR,
+ eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR,
+ eMutableDescriptorTypeCreateInfoVALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE,
+ ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR,
+ ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR,
+ ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT,
+ ePhysicalDeviceBufferDeviceAddressFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR,
+ ePhysicalDeviceDepthStencilResolvePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR,
+ ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT,
+ ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT,
+ ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR,
+ ePhysicalDeviceDynamicRenderingFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR,
+ ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR,
+ ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR,
+ ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR,
+ ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR,
+ ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR,
+ ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR,
+ ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR,
+ ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV,
+ ePhysicalDeviceGlobalPriorityQueryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT,
+ ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR,
+ ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT,
+ ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR,
+ ePhysicalDeviceImagelessFramebufferFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR,
+ ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR,
+ ePhysicalDeviceImageRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT,
+ ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT,
+ ePhysicalDeviceInlineUniformBlockPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT,
+ ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR,
+ ePhysicalDeviceMaintenance4FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR,
+ ePhysicalDeviceMaintenance4PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR,
+ ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR,
+ ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR,
+ ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR,
+ ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE,
+ ePhysicalDevicePipelineCreationCacheControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT,
+ ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR,
+ ePhysicalDevicePrivateDataFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT,
+ ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
+ ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM,
+ ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT,
+ ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR,
+ ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT,
+ ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR,
+ ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR,
+ ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT,
+ ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES,
+ ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR,
+ ePhysicalDeviceShaderIntegerDotProductFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES_KHR,
+ ePhysicalDeviceShaderIntegerDotProductPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR,
+ ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR,
+ ePhysicalDeviceShaderTerminateInvocationFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR,
+ ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR,
+ ePhysicalDeviceSubgroupSizeControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT,
+ ePhysicalDeviceSubgroupSizeControlPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT,
+ ePhysicalDeviceSynchronization2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR,
+ ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT,
+ ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT,
+ ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR,
+ ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR,
+ ePhysicalDeviceToolPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT,
+ ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR,
+ ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR,
+ ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES,
+ ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR,
+ ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR,
+ ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR,
+ ePipelineCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT,
+ ePipelineInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_INFO_EXT,
+ ePipelineRenderingCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO_KHR,
+ ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT,
+ ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR,
+ ePrivateDataSlotCreateInfoEXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT,
+ eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL,
+ eQueueFamilyGlobalPriorityPropertiesEXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT,
+ eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR,
+ eRenderingAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO_KHR,
+ eRenderingInfoKHR = VK_STRUCTURE_TYPE_RENDERING_INFO_KHR,
+ eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR,
+ eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR,
+ eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR,
+ eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR,
+ eResolveImageInfo2KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR,
+ eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT,
+ eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR,
+ eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR,
+ eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR,
+ eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR,
+ eSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR,
+ eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR,
+ eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR,
+ eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR,
+ eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR,
+ eSubmitInfo2KHR = VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR,
+ eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR,
+ eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR,
+ eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR,
+ eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR,
+ eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR,
+ eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR,
+ eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT
+ };
+
+ enum class PipelineCacheHeaderVersion
+ {
+ eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE
+ };
enum class ObjectType
{
@@ -2013,69 +1085,13 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_USE_PLATFORM_FUCHSIA )
eBufferCollectionFUCHSIA = VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA,
#endif /*VK_USE_PLATFORM_FUCHSIA*/
+ eMicromapEXT = VK_OBJECT_TYPE_MICROMAP_EXT,
+ eOpticalFlowSessionNV = VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV,
eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR,
ePrivateDataSlotEXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT,
eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR
};
- VULKAN_HPP_INLINE std::string to_string( ObjectType value )
- {
- switch ( value )
- {
- case ObjectType::eUnknown: return "Unknown";
- case ObjectType::eInstance: return "Instance";
- case ObjectType::ePhysicalDevice: return "PhysicalDevice";
- case ObjectType::eDevice: return "Device";
- case ObjectType::eQueue: return "Queue";
- case ObjectType::eSemaphore: return "Semaphore";
- case ObjectType::eCommandBuffer: return "CommandBuffer";
- case ObjectType::eFence: return "Fence";
- case ObjectType::eDeviceMemory: return "DeviceMemory";
- case ObjectType::eBuffer: return "Buffer";
- case ObjectType::eImage: return "Image";
- case ObjectType::eEvent: return "Event";
- case ObjectType::eQueryPool: return "QueryPool";
- case ObjectType::eBufferView: return "BufferView";
- case ObjectType::eImageView: return "ImageView";
- case ObjectType::eShaderModule: return "ShaderModule";
- case ObjectType::ePipelineCache: return "PipelineCache";
- case ObjectType::ePipelineLayout: return "PipelineLayout";
- case ObjectType::eRenderPass: return "RenderPass";
- case ObjectType::ePipeline: return "Pipeline";
- case ObjectType::eDescriptorSetLayout: return "DescriptorSetLayout";
- case ObjectType::eSampler: return "Sampler";
- case ObjectType::eDescriptorPool: return "DescriptorPool";
- case ObjectType::eDescriptorSet: return "DescriptorSet";
- case ObjectType::eFramebuffer: return "Framebuffer";
- case ObjectType::eCommandPool: return "CommandPool";
- case ObjectType::eSamplerYcbcrConversion: return "SamplerYcbcrConversion";
- case ObjectType::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate";
- case ObjectType::ePrivateDataSlot: return "PrivateDataSlot";
- case ObjectType::eSurfaceKHR: return "SurfaceKHR";
- case ObjectType::eSwapchainKHR: return "SwapchainKHR";
- case ObjectType::eDisplayKHR: return "DisplayKHR";
- case ObjectType::eDisplayModeKHR: return "DisplayModeKHR";
- case ObjectType::eDebugReportCallbackEXT: return "DebugReportCallbackEXT";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case ObjectType::eVideoSessionKHR: return "VideoSessionKHR";
- case ObjectType::eVideoSessionParametersKHR: return "VideoSessionParametersKHR";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- case ObjectType::eCuModuleNVX: return "CuModuleNVX";
- case ObjectType::eCuFunctionNVX: return "CuFunctionNVX";
- case ObjectType::eDebugUtilsMessengerEXT: return "DebugUtilsMessengerEXT";
- case ObjectType::eAccelerationStructureKHR: return "AccelerationStructureKHR";
- case ObjectType::eValidationCacheEXT: return "ValidationCacheEXT";
- case ObjectType::eAccelerationStructureNV: return "AccelerationStructureNV";
- case ObjectType::ePerformanceConfigurationINTEL: return "PerformanceConfigurationINTEL";
- case ObjectType::eDeferredOperationKHR: return "DeferredOperationKHR";
- case ObjectType::eIndirectCommandsLayoutNV: return "IndirectCommandsLayoutNV";
-#if defined( VK_USE_PLATFORM_FUCHSIA )
- case ObjectType::eBufferCollectionFUCHSIA: return "BufferCollectionFUCHSIA";
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class VendorId
{
eVIV = VK_VENDOR_ID_VIV,
@@ -2086,34 +1102,6 @@ namespace VULKAN_HPP_NAMESPACE
ePocl = VK_VENDOR_ID_POCL
};
- VULKAN_HPP_INLINE std::string to_string( VendorId value )
- {
- switch ( value )
- {
- case VendorId::eVIV: return "VIV";
- case VendorId::eVSI: return "VSI";
- case VendorId::eKazan: return "Kazan";
- case VendorId::eCodeplay: return "Codeplay";
- case VendorId::eMESA: return "MESA";
- case VendorId::ePocl: return "Pocl";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
- enum class PipelineCacheHeaderVersion
- {
- eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE
- };
-
- VULKAN_HPP_INLINE std::string to_string( PipelineCacheHeaderVersion value )
- {
- switch ( value )
- {
- case PipelineCacheHeaderVersion::eOne: return "One";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class Format
{
eUndefined = VK_FORMAT_UNDEFINED,
@@ -2363,6 +1351,7 @@ namespace VULKAN_HPP_NAMESPACE
ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG,
ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG,
ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG,
+ eR16G16S105NV = VK_FORMAT_R16G16_S10_5_NV,
eA4B4G4R4UnormPack16EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT,
eA4R4G4B4UnormPack16EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT,
eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT,
@@ -2419,414 +1408,85 @@ namespace VULKAN_HPP_NAMESPACE
eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR
};
- VULKAN_HPP_INLINE std::string to_string( Format value )
- {
- switch ( value )
- {
- case Format::eUndefined: return "Undefined";
- case Format::eR4G4UnormPack8: return "R4G4UnormPack8";
- case Format::eR4G4B4A4UnormPack16: return "R4G4B4A4UnormPack16";
- case Format::eB4G4R4A4UnormPack16: return "B4G4R4A4UnormPack16";
- case Format::eR5G6B5UnormPack16: return "R5G6B5UnormPack16";
- case Format::eB5G6R5UnormPack16: return "B5G6R5UnormPack16";
- case Format::eR5G5B5A1UnormPack16: return "R5G5B5A1UnormPack16";
- case Format::eB5G5R5A1UnormPack16: return "B5G5R5A1UnormPack16";
- case Format::eA1R5G5B5UnormPack16: return "A1R5G5B5UnormPack16";
- case Format::eR8Unorm: return "R8Unorm";
- case Format::eR8Snorm: return "R8Snorm";
- case Format::eR8Uscaled: return "R8Uscaled";
- case Format::eR8Sscaled: return "R8Sscaled";
- case Format::eR8Uint: return "R8Uint";
- case Format::eR8Sint: return "R8Sint";
- case Format::eR8Srgb: return "R8Srgb";
- case Format::eR8G8Unorm: return "R8G8Unorm";
- case Format::eR8G8Snorm: return "R8G8Snorm";
- case Format::eR8G8Uscaled: return "R8G8Uscaled";
- case Format::eR8G8Sscaled: return "R8G8Sscaled";
- case Format::eR8G8Uint: return "R8G8Uint";
- case Format::eR8G8Sint: return "R8G8Sint";
- case Format::eR8G8Srgb: return "R8G8Srgb";
- case Format::eR8G8B8Unorm: return "R8G8B8Unorm";
- case Format::eR8G8B8Snorm: return "R8G8B8Snorm";
- case Format::eR8G8B8Uscaled: return "R8G8B8Uscaled";
- case Format::eR8G8B8Sscaled: return "R8G8B8Sscaled";
- case Format::eR8G8B8Uint: return "R8G8B8Uint";
- case Format::eR8G8B8Sint: return "R8G8B8Sint";
- case Format::eR8G8B8Srgb: return "R8G8B8Srgb";
- case Format::eB8G8R8Unorm: return "B8G8R8Unorm";
- case Format::eB8G8R8Snorm: return "B8G8R8Snorm";
- case Format::eB8G8R8Uscaled: return "B8G8R8Uscaled";
- case Format::eB8G8R8Sscaled: return "B8G8R8Sscaled";
- case Format::eB8G8R8Uint: return "B8G8R8Uint";
- case Format::eB8G8R8Sint: return "B8G8R8Sint";
- case Format::eB8G8R8Srgb: return "B8G8R8Srgb";
- case Format::eR8G8B8A8Unorm: return "R8G8B8A8Unorm";
- case Format::eR8G8B8A8Snorm: return "R8G8B8A8Snorm";
- case Format::eR8G8B8A8Uscaled: return "R8G8B8A8Uscaled";
- case Format::eR8G8B8A8Sscaled: return "R8G8B8A8Sscaled";
- case Format::eR8G8B8A8Uint: return "R8G8B8A8Uint";
- case Format::eR8G8B8A8Sint: return "R8G8B8A8Sint";
- case Format::eR8G8B8A8Srgb: return "R8G8B8A8Srgb";
- case Format::eB8G8R8A8Unorm: return "B8G8R8A8Unorm";
- case Format::eB8G8R8A8Snorm: return "B8G8R8A8Snorm";
- case Format::eB8G8R8A8Uscaled: return "B8G8R8A8Uscaled";
- case Format::eB8G8R8A8Sscaled: return "B8G8R8A8Sscaled";
- case Format::eB8G8R8A8Uint: return "B8G8R8A8Uint";
- case Format::eB8G8R8A8Sint: return "B8G8R8A8Sint";
- case Format::eB8G8R8A8Srgb: return "B8G8R8A8Srgb";
- case Format::eA8B8G8R8UnormPack32: return "A8B8G8R8UnormPack32";
- case Format::eA8B8G8R8SnormPack32: return "A8B8G8R8SnormPack32";
- case Format::eA8B8G8R8UscaledPack32: return "A8B8G8R8UscaledPack32";
- case Format::eA8B8G8R8SscaledPack32: return "A8B8G8R8SscaledPack32";
- case Format::eA8B8G8R8UintPack32: return "A8B8G8R8UintPack32";
- case Format::eA8B8G8R8SintPack32: return "A8B8G8R8SintPack32";
- case Format::eA8B8G8R8SrgbPack32: return "A8B8G8R8SrgbPack32";
- case Format::eA2R10G10B10UnormPack32: return "A2R10G10B10UnormPack32";
- case Format::eA2R10G10B10SnormPack32: return "A2R10G10B10SnormPack32";
- case Format::eA2R10G10B10UscaledPack32: return "A2R10G10B10UscaledPack32";
- case Format::eA2R10G10B10SscaledPack32: return "A2R10G10B10SscaledPack32";
- case Format::eA2R10G10B10UintPack32: return "A2R10G10B10UintPack32";
- case Format::eA2R10G10B10SintPack32: return "A2R10G10B10SintPack32";
- case Format::eA2B10G10R10UnormPack32: return "A2B10G10R10UnormPack32";
- case Format::eA2B10G10R10SnormPack32: return "A2B10G10R10SnormPack32";
- case Format::eA2B10G10R10UscaledPack32: return "A2B10G10R10UscaledPack32";
- case Format::eA2B10G10R10SscaledPack32: return "A2B10G10R10SscaledPack32";
- case Format::eA2B10G10R10UintPack32: return "A2B10G10R10UintPack32";
- case Format::eA2B10G10R10SintPack32: return "A2B10G10R10SintPack32";
- case Format::eR16Unorm: return "R16Unorm";
- case Format::eR16Snorm: return "R16Snorm";
- case Format::eR16Uscaled: return "R16Uscaled";
- case Format::eR16Sscaled: return "R16Sscaled";
- case Format::eR16Uint: return "R16Uint";
- case Format::eR16Sint: return "R16Sint";
- case Format::eR16Sfloat: return "R16Sfloat";
- case Format::eR16G16Unorm: return "R16G16Unorm";
- case Format::eR16G16Snorm: return "R16G16Snorm";
- case Format::eR16G16Uscaled: return "R16G16Uscaled";
- case Format::eR16G16Sscaled: return "R16G16Sscaled";
- case Format::eR16G16Uint: return "R16G16Uint";
- case Format::eR16G16Sint: return "R16G16Sint";
- case Format::eR16G16Sfloat: return "R16G16Sfloat";
- case Format::eR16G16B16Unorm: return "R16G16B16Unorm";
- case Format::eR16G16B16Snorm: return "R16G16B16Snorm";
- case Format::eR16G16B16Uscaled: return "R16G16B16Uscaled";
- case Format::eR16G16B16Sscaled: return "R16G16B16Sscaled";
- case Format::eR16G16B16Uint: return "R16G16B16Uint";
- case Format::eR16G16B16Sint: return "R16G16B16Sint";
- case Format::eR16G16B16Sfloat: return "R16G16B16Sfloat";
- case Format::eR16G16B16A16Unorm: return "R16G16B16A16Unorm";
- case Format::eR16G16B16A16Snorm: return "R16G16B16A16Snorm";
- case Format::eR16G16B16A16Uscaled: return "R16G16B16A16Uscaled";
- case Format::eR16G16B16A16Sscaled: return "R16G16B16A16Sscaled";
- case Format::eR16G16B16A16Uint: return "R16G16B16A16Uint";
- case Format::eR16G16B16A16Sint: return "R16G16B16A16Sint";
- case Format::eR16G16B16A16Sfloat: return "R16G16B16A16Sfloat";
- case Format::eR32Uint: return "R32Uint";
- case Format::eR32Sint: return "R32Sint";
- case Format::eR32Sfloat: return "R32Sfloat";
- case Format::eR32G32Uint: return "R32G32Uint";
- case Format::eR32G32Sint: return "R32G32Sint";
- case Format::eR32G32Sfloat: return "R32G32Sfloat";
- case Format::eR32G32B32Uint: return "R32G32B32Uint";
- case Format::eR32G32B32Sint: return "R32G32B32Sint";
- case Format::eR32G32B32Sfloat: return "R32G32B32Sfloat";
- case Format::eR32G32B32A32Uint: return "R32G32B32A32Uint";
- case Format::eR32G32B32A32Sint: return "R32G32B32A32Sint";
- case Format::eR32G32B32A32Sfloat: return "R32G32B32A32Sfloat";
- case Format::eR64Uint: return "R64Uint";
- case Format::eR64Sint: return "R64Sint";
- case Format::eR64Sfloat: return "R64Sfloat";
- case Format::eR64G64Uint: return "R64G64Uint";
- case Format::eR64G64Sint: return "R64G64Sint";
- case Format::eR64G64Sfloat: return "R64G64Sfloat";
- case Format::eR64G64B64Uint: return "R64G64B64Uint";
- case Format::eR64G64B64Sint: return "R64G64B64Sint";
- case Format::eR64G64B64Sfloat: return "R64G64B64Sfloat";
- case Format::eR64G64B64A64Uint: return "R64G64B64A64Uint";
- case Format::eR64G64B64A64Sint: return "R64G64B64A64Sint";
- case Format::eR64G64B64A64Sfloat: return "R64G64B64A64Sfloat";
- case Format::eB10G11R11UfloatPack32: return "B10G11R11UfloatPack32";
- case Format::eE5B9G9R9UfloatPack32: return "E5B9G9R9UfloatPack32";
- case Format::eD16Unorm: return "D16Unorm";
- case Format::eX8D24UnormPack32: return "X8D24UnormPack32";
- case Format::eD32Sfloat: return "D32Sfloat";
- case Format::eS8Uint: return "S8Uint";
- case Format::eD16UnormS8Uint: return "D16UnormS8Uint";
- case Format::eD24UnormS8Uint: return "D24UnormS8Uint";
- case Format::eD32SfloatS8Uint: return "D32SfloatS8Uint";
- case Format::eBc1RgbUnormBlock: return "Bc1RgbUnormBlock";
- case Format::eBc1RgbSrgbBlock: return "Bc1RgbSrgbBlock";
- case Format::eBc1RgbaUnormBlock: return "Bc1RgbaUnormBlock";
- case Format::eBc1RgbaSrgbBlock: return "Bc1RgbaSrgbBlock";
- case Format::eBc2UnormBlock: return "Bc2UnormBlock";
- case Format::eBc2SrgbBlock: return "Bc2SrgbBlock";
- case Format::eBc3UnormBlock: return "Bc3UnormBlock";
- case Format::eBc3SrgbBlock: return "Bc3SrgbBlock";
- case Format::eBc4UnormBlock: return "Bc4UnormBlock";
- case Format::eBc4SnormBlock: return "Bc4SnormBlock";
- case Format::eBc5UnormBlock: return "Bc5UnormBlock";
- case Format::eBc5SnormBlock: return "Bc5SnormBlock";
- case Format::eBc6HUfloatBlock: return "Bc6HUfloatBlock";
- case Format::eBc6HSfloatBlock: return "Bc6HSfloatBlock";
- case Format::eBc7UnormBlock: return "Bc7UnormBlock";
- case Format::eBc7SrgbBlock: return "Bc7SrgbBlock";
- case Format::eEtc2R8G8B8UnormBlock: return "Etc2R8G8B8UnormBlock";
- case Format::eEtc2R8G8B8SrgbBlock: return "Etc2R8G8B8SrgbBlock";
- case Format::eEtc2R8G8B8A1UnormBlock: return "Etc2R8G8B8A1UnormBlock";
- case Format::eEtc2R8G8B8A1SrgbBlock: return "Etc2R8G8B8A1SrgbBlock";
- case Format::eEtc2R8G8B8A8UnormBlock: return "Etc2R8G8B8A8UnormBlock";
- case Format::eEtc2R8G8B8A8SrgbBlock: return "Etc2R8G8B8A8SrgbBlock";
- case Format::eEacR11UnormBlock: return "EacR11UnormBlock";
- case Format::eEacR11SnormBlock: return "EacR11SnormBlock";
- case Format::eEacR11G11UnormBlock: return "EacR11G11UnormBlock";
- case Format::eEacR11G11SnormBlock: return "EacR11G11SnormBlock";
- case Format::eAstc4x4UnormBlock: return "Astc4x4UnormBlock";
- case Format::eAstc4x4SrgbBlock: return "Astc4x4SrgbBlock";
- case Format::eAstc5x4UnormBlock: return "Astc5x4UnormBlock";
- case Format::eAstc5x4SrgbBlock: return "Astc5x4SrgbBlock";
- case Format::eAstc5x5UnormBlock: return "Astc5x5UnormBlock";
- case Format::eAstc5x5SrgbBlock: return "Astc5x5SrgbBlock";
- case Format::eAstc6x5UnormBlock: return "Astc6x5UnormBlock";
- case Format::eAstc6x5SrgbBlock: return "Astc6x5SrgbBlock";
- case Format::eAstc6x6UnormBlock: return "Astc6x6UnormBlock";
- case Format::eAstc6x6SrgbBlock: return "Astc6x6SrgbBlock";
- case Format::eAstc8x5UnormBlock: return "Astc8x5UnormBlock";
- case Format::eAstc8x5SrgbBlock: return "Astc8x5SrgbBlock";
- case Format::eAstc8x6UnormBlock: return "Astc8x6UnormBlock";
- case Format::eAstc8x6SrgbBlock: return "Astc8x6SrgbBlock";
- case Format::eAstc8x8UnormBlock: return "Astc8x8UnormBlock";
- case Format::eAstc8x8SrgbBlock: return "Astc8x8SrgbBlock";
- case Format::eAstc10x5UnormBlock: return "Astc10x5UnormBlock";
- case Format::eAstc10x5SrgbBlock: return "Astc10x5SrgbBlock";
- case Format::eAstc10x6UnormBlock: return "Astc10x6UnormBlock";
- case Format::eAstc10x6SrgbBlock: return "Astc10x6SrgbBlock";
- case Format::eAstc10x8UnormBlock: return "Astc10x8UnormBlock";
- case Format::eAstc10x8SrgbBlock: return "Astc10x8SrgbBlock";
- case Format::eAstc10x10UnormBlock: return "Astc10x10UnormBlock";
- case Format::eAstc10x10SrgbBlock: return "Astc10x10SrgbBlock";
- case Format::eAstc12x10UnormBlock: return "Astc12x10UnormBlock";
- case Format::eAstc12x10SrgbBlock: return "Astc12x10SrgbBlock";
- case Format::eAstc12x12UnormBlock: return "Astc12x12UnormBlock";
- case Format::eAstc12x12SrgbBlock: return "Astc12x12SrgbBlock";
- case Format::eG8B8G8R8422Unorm: return "G8B8G8R8422Unorm";
- case Format::eB8G8R8G8422Unorm: return "B8G8R8G8422Unorm";
- case Format::eG8B8R83Plane420Unorm: return "G8B8R83Plane420Unorm";
- case Format::eG8B8R82Plane420Unorm: return "G8B8R82Plane420Unorm";
- case Format::eG8B8R83Plane422Unorm: return "G8B8R83Plane422Unorm";
- case Format::eG8B8R82Plane422Unorm: return "G8B8R82Plane422Unorm";
- case Format::eG8B8R83Plane444Unorm: return "G8B8R83Plane444Unorm";
- case Format::eR10X6UnormPack16: return "R10X6UnormPack16";
- case Format::eR10X6G10X6Unorm2Pack16: return "R10X6G10X6Unorm2Pack16";
- case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return "R10X6G10X6B10X6A10X6Unorm4Pack16";
- case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return "G10X6B10X6G10X6R10X6422Unorm4Pack16";
- case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return "B10X6G10X6R10X6G10X6422Unorm4Pack16";
- case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return "G10X6B10X6R10X63Plane420Unorm3Pack16";
- case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return "G10X6B10X6R10X62Plane420Unorm3Pack16";
- case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return "G10X6B10X6R10X63Plane422Unorm3Pack16";
- case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return "G10X6B10X6R10X62Plane422Unorm3Pack16";
- case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return "G10X6B10X6R10X63Plane444Unorm3Pack16";
- case Format::eR12X4UnormPack16: return "R12X4UnormPack16";
- case Format::eR12X4G12X4Unorm2Pack16: return "R12X4G12X4Unorm2Pack16";
- case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return "R12X4G12X4B12X4A12X4Unorm4Pack16";
- case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return "G12X4B12X4G12X4R12X4422Unorm4Pack16";
- case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return "B12X4G12X4R12X4G12X4422Unorm4Pack16";
- case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return "G12X4B12X4R12X43Plane420Unorm3Pack16";
- case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return "G12X4B12X4R12X42Plane420Unorm3Pack16";
- case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return "G12X4B12X4R12X43Plane422Unorm3Pack16";
- case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return "G12X4B12X4R12X42Plane422Unorm3Pack16";
- case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return "G12X4B12X4R12X43Plane444Unorm3Pack16";
- case Format::eG16B16G16R16422Unorm: return "G16B16G16R16422Unorm";
- case Format::eB16G16R16G16422Unorm: return "B16G16R16G16422Unorm";
- case Format::eG16B16R163Plane420Unorm: return "G16B16R163Plane420Unorm";
- case Format::eG16B16R162Plane420Unorm: return "G16B16R162Plane420Unorm";
- case Format::eG16B16R163Plane422Unorm: return "G16B16R163Plane422Unorm";
- case Format::eG16B16R162Plane422Unorm: return "G16B16R162Plane422Unorm";
- case Format::eG16B16R163Plane444Unorm: return "G16B16R163Plane444Unorm";
- case Format::eG8B8R82Plane444Unorm: return "G8B8R82Plane444Unorm";
- case Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return "G10X6B10X6R10X62Plane444Unorm3Pack16";
- case Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return "G12X4B12X4R12X42Plane444Unorm3Pack16";
- case Format::eG16B16R162Plane444Unorm: return "G16B16R162Plane444Unorm";
- case Format::eA4R4G4B4UnormPack16: return "A4R4G4B4UnormPack16";
- case Format::eA4B4G4R4UnormPack16: return "A4B4G4R4UnormPack16";
- case Format::eAstc4x4SfloatBlock: return "Astc4x4SfloatBlock";
- case Format::eAstc5x4SfloatBlock: return "Astc5x4SfloatBlock";
- case Format::eAstc5x5SfloatBlock: return "Astc5x5SfloatBlock";
- case Format::eAstc6x5SfloatBlock: return "Astc6x5SfloatBlock";
- case Format::eAstc6x6SfloatBlock: return "Astc6x6SfloatBlock";
- case Format::eAstc8x5SfloatBlock: return "Astc8x5SfloatBlock";
- case Format::eAstc8x6SfloatBlock: return "Astc8x6SfloatBlock";
- case Format::eAstc8x8SfloatBlock: return "Astc8x8SfloatBlock";
- case Format::eAstc10x5SfloatBlock: return "Astc10x5SfloatBlock";
- case Format::eAstc10x6SfloatBlock: return "Astc10x6SfloatBlock";
- case Format::eAstc10x8SfloatBlock: return "Astc10x8SfloatBlock";
- case Format::eAstc10x10SfloatBlock: return "Astc10x10SfloatBlock";
- case Format::eAstc12x10SfloatBlock: return "Astc12x10SfloatBlock";
- case Format::eAstc12x12SfloatBlock: return "Astc12x12SfloatBlock";
- case Format::ePvrtc12BppUnormBlockIMG: return "Pvrtc12BppUnormBlockIMG";
- case Format::ePvrtc14BppUnormBlockIMG: return "Pvrtc14BppUnormBlockIMG";
- case Format::ePvrtc22BppUnormBlockIMG: return "Pvrtc22BppUnormBlockIMG";
- case Format::ePvrtc24BppUnormBlockIMG: return "Pvrtc24BppUnormBlockIMG";
- case Format::ePvrtc12BppSrgbBlockIMG: return "Pvrtc12BppSrgbBlockIMG";
- case Format::ePvrtc14BppSrgbBlockIMG: return "Pvrtc14BppSrgbBlockIMG";
- case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG";
- case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class FormatFeatureFlagBits : VkFormatFeatureFlags
{
- eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,
- eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,
- eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT,
- eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT,
- eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT,
- eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,
- eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT,
- eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,
- eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT,
- eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,
- eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT,
- eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT,
- eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
- eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
- eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
- eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,
- eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
- eSampledImageYcbcrConversionSeparateReconstructionFilter =
- VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
- eSampledImageYcbcrConversionChromaReconstructionExplicit =
- VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
+ eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,
+ eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,
+ eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT,
+ eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT,
+ eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT,
+ eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,
+ eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT,
+ eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,
+ eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT,
+ eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,
+ eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT,
+ eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT,
+ eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
+ eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
+ eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
+ eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,
+ eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
+ eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
+ eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
eSampledImageYcbcrConversionChromaReconstructionExplicitForceable =
VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
- eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT,
- eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
- eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT,
- eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG,
+ eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT,
+ eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
+ eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
eVideoDecodeOutputKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR,
eVideoDecodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR,
+ eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT,
eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT,
eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
eVideoEncodeInputKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR,
eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR,
- eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR,
- eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR,
- eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT,
- eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT,
- eSampledImageYcbcrConversionChromaReconstructionExplicitKHR =
- VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR,
+ eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR,
+ eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR,
+ eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR,
+ eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG,
+ eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT,
+ eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR,
eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR =
VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR,
- eSampledImageYcbcrConversionLinearFilterKHR =
- VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR,
- eSampledImageYcbcrConversionSeparateReconstructionFilterKHR =
- VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR,
- eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR,
- eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR
+ eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR,
+ eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR,
+ eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR,
+ eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits value )
- {
- switch ( value )
- {
- case FormatFeatureFlagBits::eSampledImage: return "SampledImage";
- case FormatFeatureFlagBits::eStorageImage: return "StorageImage";
- case FormatFeatureFlagBits::eStorageImageAtomic: return "StorageImageAtomic";
- case FormatFeatureFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer";
- case FormatFeatureFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer";
- case FormatFeatureFlagBits::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic";
- case FormatFeatureFlagBits::eVertexBuffer: return "VertexBuffer";
- case FormatFeatureFlagBits::eColorAttachment: return "ColorAttachment";
- case FormatFeatureFlagBits::eColorAttachmentBlend: return "ColorAttachmentBlend";
- case FormatFeatureFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment";
- case FormatFeatureFlagBits::eBlitSrc: return "BlitSrc";
- case FormatFeatureFlagBits::eBlitDst: return "BlitDst";
- case FormatFeatureFlagBits::eSampledImageFilterLinear: return "SampledImageFilterLinear";
- case FormatFeatureFlagBits::eTransferSrc: return "TransferSrc";
- case FormatFeatureFlagBits::eTransferDst: return "TransferDst";
- case FormatFeatureFlagBits::eMidpointChromaSamples: return "MidpointChromaSamples";
- case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter:
- return "SampledImageYcbcrConversionLinearFilter";
- case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter:
- return "SampledImageYcbcrConversionSeparateReconstructionFilter";
- case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit:
- return "SampledImageYcbcrConversionChromaReconstructionExplicit";
- case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable:
- return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable";
- case FormatFeatureFlagBits::eDisjoint: return "Disjoint";
- case FormatFeatureFlagBits::eCositedChromaSamples: return "CositedChromaSamples";
- case FormatFeatureFlagBits::eSampledImageFilterMinmax: return "SampledImageFilterMinmax";
- case FormatFeatureFlagBits::eSampledImageFilterCubicIMG: return "SampledImageFilterCubicIMG";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case FormatFeatureFlagBits::eVideoDecodeOutputKHR: return "VideoDecodeOutputKHR";
- case FormatFeatureFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- case FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR: return "AccelerationStructureVertexBufferKHR";
- case FormatFeatureFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT";
- case FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case FormatFeatureFlagBits::eVideoEncodeInputKHR: return "VideoEncodeInputKHR";
- case FormatFeatureFlagBits::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ImageCreateFlagBits : VkImageCreateFlags
{
- eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
- eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT,
- eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT,
- eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
- eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT,
- eAlias = VK_IMAGE_CREATE_ALIAS_BIT,
- eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT,
- e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT,
- eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT,
- eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT,
- eProtected = VK_IMAGE_CREATE_PROTECTED_BIT,
- eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT,
- eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV,
- eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT,
- eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT,
- eFragmentDensityMapOffsetQCOM = VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM,
- e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR,
- eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR,
- eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR,
- eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR,
- eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,
- eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( ImageCreateFlagBits value )
- {
- switch ( value )
- {
- case ImageCreateFlagBits::eSparseBinding: return "SparseBinding";
- case ImageCreateFlagBits::eSparseResidency: return "SparseResidency";
- case ImageCreateFlagBits::eSparseAliased: return "SparseAliased";
- case ImageCreateFlagBits::eMutableFormat: return "MutableFormat";
- case ImageCreateFlagBits::eCubeCompatible: return "CubeCompatible";
- case ImageCreateFlagBits::eAlias: return "Alias";
- case ImageCreateFlagBits::eSplitInstanceBindRegions: return "SplitInstanceBindRegions";
- case ImageCreateFlagBits::e2DArrayCompatible: return "2DArrayCompatible";
- case ImageCreateFlagBits::eBlockTexelViewCompatible: return "BlockTexelViewCompatible";
- case ImageCreateFlagBits::eExtendedUsage: return "ExtendedUsage";
- case ImageCreateFlagBits::eProtected: return "Protected";
- case ImageCreateFlagBits::eDisjoint: return "Disjoint";
- case ImageCreateFlagBits::eCornerSampledNV: return "CornerSampledNV";
- case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT: return "SampleLocationsCompatibleDepthEXT";
- case ImageCreateFlagBits::eSubsampledEXT: return "SubsampledEXT";
- case ImageCreateFlagBits::eFragmentDensityMapOffsetQCOM: return "FragmentDensityMapOffsetQCOM";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
+ eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
+ eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT,
+ eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT,
+ eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+ eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT,
+ eAlias = VK_IMAGE_CREATE_ALIAS_BIT,
+ eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT,
+ e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT,
+ eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT,
+ eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT,
+ eProtected = VK_IMAGE_CREATE_PROTECTED_BIT,
+ eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT,
+ eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV,
+ eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT,
+ eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT,
+ eMultisampledRenderToSingleSampledEXT = VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT,
+ e2DViewCompatibleEXT = VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT,
+ eFragmentDensityMapOffsetQCOM = VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM,
+ e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR,
+ eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR,
+ eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR,
+ eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR,
+ eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,
+ eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR
+ };
enum class ImageTiling
{
@@ -2835,17 +1495,6 @@ namespace VULKAN_HPP_NAMESPACE
eDrmFormatModifierEXT = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT
};
- VULKAN_HPP_INLINE std::string to_string( ImageTiling value )
- {
- switch ( value )
- {
- case ImageTiling::eOptimal: return "Optimal";
- case ImageTiling::eLinear: return "Linear";
- case ImageTiling::eDrmFormatModifierEXT: return "DrmFormatModifierEXT";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ImageType
{
e1D = VK_IMAGE_TYPE_1D,
@@ -2853,17 +1502,6 @@ namespace VULKAN_HPP_NAMESPACE
e3D = VK_IMAGE_TYPE_3D
};
- VULKAN_HPP_INLINE std::string to_string( ImageType value )
- {
- switch ( value )
- {
- case ImageType::e1D: return "1D";
- case ImageType::e2D: return "2D";
- case ImageType::e3D: return "3D";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ImageUsageFlagBits : VkImageUsageFlags
{
eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
@@ -2886,53 +1524,23 @@ namespace VULKAN_HPP_NAMESPACE
eVideoEncodeSrcKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR,
eVideoEncodeDpbKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- eInvocationMaskHUAWEI = VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI,
- eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV
+ eAttachmentFeedbackLoopEXT = VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT,
+ eInvocationMaskHUAWEI = VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI,
+ eSampleWeightQCOM = VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM,
+ eSampleBlockMatchQCOM = VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM,
+ eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV
};
- VULKAN_HPP_INLINE std::string to_string( ImageUsageFlagBits value )
+ enum class InstanceCreateFlagBits : VkInstanceCreateFlags
{
- switch ( value )
- {
- case ImageUsageFlagBits::eTransferSrc: return "TransferSrc";
- case ImageUsageFlagBits::eTransferDst: return "TransferDst";
- case ImageUsageFlagBits::eSampled: return "Sampled";
- case ImageUsageFlagBits::eStorage: return "Storage";
- case ImageUsageFlagBits::eColorAttachment: return "ColorAttachment";
- case ImageUsageFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment";
- case ImageUsageFlagBits::eTransientAttachment: return "TransientAttachment";
- case ImageUsageFlagBits::eInputAttachment: return "InputAttachment";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case ImageUsageFlagBits::eVideoDecodeDstKHR: return "VideoDecodeDstKHR";
- case ImageUsageFlagBits::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR";
- case ImageUsageFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- case ImageUsageFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT";
- case ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case ImageUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR";
- case ImageUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR";
- case ImageUsageFlagBits::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- case ImageUsageFlagBits::eInvocationMaskHUAWEI: return "InvocationMaskHUAWEI";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
+ eEnumeratePortabilityKHR = VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR
+ };
enum class InternalAllocationType
{
eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE
};
- VULKAN_HPP_INLINE std::string to_string( InternalAllocationType value )
- {
- switch ( value )
- {
- case InternalAllocationType::eExecutable: return "Executable";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class MemoryHeapFlagBits : VkMemoryHeapFlags
{
eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
@@ -2940,16 +1548,6 @@ namespace VULKAN_HPP_NAMESPACE
eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlagBits value )
- {
- switch ( value )
- {
- case MemoryHeapFlagBits::eDeviceLocal: return "DeviceLocal";
- case MemoryHeapFlagBits::eMultiInstance: return "MultiInstance";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class MemoryPropertyFlagBits : VkMemoryPropertyFlags
{
eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
@@ -2963,23 +1561,6 @@ namespace VULKAN_HPP_NAMESPACE
eRdmaCapableNV = VK_MEMORY_PROPERTY_RDMA_CAPABLE_BIT_NV
};
- VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlagBits value )
- {
- switch ( value )
- {
- case MemoryPropertyFlagBits::eDeviceLocal: return "DeviceLocal";
- case MemoryPropertyFlagBits::eHostVisible: return "HostVisible";
- case MemoryPropertyFlagBits::eHostCoherent: return "HostCoherent";
- case MemoryPropertyFlagBits::eHostCached: return "HostCached";
- case MemoryPropertyFlagBits::eLazilyAllocated: return "LazilyAllocated";
- case MemoryPropertyFlagBits::eProtected: return "Protected";
- case MemoryPropertyFlagBits::eDeviceCoherentAMD: return "DeviceCoherentAMD";
- case MemoryPropertyFlagBits::eDeviceUncachedAMD: return "DeviceUncachedAMD";
- case MemoryPropertyFlagBits::eRdmaCapableNV: return "RdmaCapableNV";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class PhysicalDeviceType
{
eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER,
@@ -2989,19 +1570,6 @@ namespace VULKAN_HPP_NAMESPACE
eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU
};
- VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceType value )
- {
- switch ( value )
- {
- case PhysicalDeviceType::eOther: return "Other";
- case PhysicalDeviceType::eIntegratedGpu: return "IntegratedGpu";
- case PhysicalDeviceType::eDiscreteGpu: return "DiscreteGpu";
- case PhysicalDeviceType::eVirtualGpu: return "VirtualGpu";
- case PhysicalDeviceType::eCpu: return "Cpu";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class QueueFlagBits : VkQueueFlags
{
eGraphics = VK_QUEUE_GRAPHICS_BIT,
@@ -3011,27 +1579,11 @@ namespace VULKAN_HPP_NAMESPACE
eProtected = VK_QUEUE_PROTECTED_BIT,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
eVideoDecodeKHR = VK_QUEUE_VIDEO_DECODE_BIT_KHR,
- eVideoEncodeKHR = VK_QUEUE_VIDEO_ENCODE_BIT_KHR
+ eVideoEncodeKHR = VK_QUEUE_VIDEO_ENCODE_BIT_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ eOpticalFlowNV = VK_QUEUE_OPTICAL_FLOW_BIT_NV
};
- VULKAN_HPP_INLINE std::string to_string( QueueFlagBits value )
- {
- switch ( value )
- {
- case QueueFlagBits::eGraphics: return "Graphics";
- case QueueFlagBits::eCompute: return "Compute";
- case QueueFlagBits::eTransfer: return "Transfer";
- case QueueFlagBits::eSparseBinding: return "SparseBinding";
- case QueueFlagBits::eProtected: return "Protected";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case QueueFlagBits::eVideoDecodeKHR: return "VideoDecodeKHR";
- case QueueFlagBits::eVideoEncodeKHR: return "VideoEncodeKHR";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class SampleCountFlagBits : VkSampleCountFlags
{
e1 = VK_SAMPLE_COUNT_1_BIT,
@@ -3043,21 +1595,6 @@ namespace VULKAN_HPP_NAMESPACE
e64 = VK_SAMPLE_COUNT_64_BIT
};
- VULKAN_HPP_INLINE std::string to_string( SampleCountFlagBits value )
- {
- switch ( value )
- {
- case SampleCountFlagBits::e1: return "1";
- case SampleCountFlagBits::e2: return "2";
- case SampleCountFlagBits::e4: return "4";
- case SampleCountFlagBits::e8: return "8";
- case SampleCountFlagBits::e16: return "16";
- case SampleCountFlagBits::e32: return "32";
- case SampleCountFlagBits::e64: return "64";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class SystemAllocationScope
{
eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND,
@@ -3067,37 +1604,10 @@ namespace VULKAN_HPP_NAMESPACE
eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE
};
- VULKAN_HPP_INLINE std::string to_string( SystemAllocationScope value )
- {
- switch ( value )
- {
- case SystemAllocationScope::eCommand: return "Command";
- case SystemAllocationScope::eObject: return "Object";
- case SystemAllocationScope::eCache: return "Cache";
- case SystemAllocationScope::eDevice: return "Device";
- case SystemAllocationScope::eInstance: return "Instance";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
- enum class InstanceCreateFlagBits
- {
- };
-
- VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlagBits )
- {
- return "(void)";
- }
-
enum class DeviceCreateFlagBits
{
};
- VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlagBits )
- {
- return "(void)";
- }
-
enum class PipelineStageFlagBits : VkPipelineStageFlags
{
eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
@@ -3122,61 +1632,23 @@ namespace VULKAN_HPP_NAMESPACE
eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT,
eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR,
eRayTracingShaderKHR = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR,
- eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV,
- eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV,
eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT,
eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV,
+ eTaskShaderEXT = VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT,
+ eMeshShaderEXT = VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT,
eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
+ eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV,
eNoneKHR = VK_PIPELINE_STAGE_NONE_KHR,
eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV,
- eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV
+ eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV,
+ eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV
};
- VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value )
- {
- switch ( value )
- {
- case PipelineStageFlagBits::eTopOfPipe: return "TopOfPipe";
- case PipelineStageFlagBits::eDrawIndirect: return "DrawIndirect";
- case PipelineStageFlagBits::eVertexInput: return "VertexInput";
- case PipelineStageFlagBits::eVertexShader: return "VertexShader";
- case PipelineStageFlagBits::eTessellationControlShader: return "TessellationControlShader";
- case PipelineStageFlagBits::eTessellationEvaluationShader: return "TessellationEvaluationShader";
- case PipelineStageFlagBits::eGeometryShader: return "GeometryShader";
- case PipelineStageFlagBits::eFragmentShader: return "FragmentShader";
- case PipelineStageFlagBits::eEarlyFragmentTests: return "EarlyFragmentTests";
- case PipelineStageFlagBits::eLateFragmentTests: return "LateFragmentTests";
- case PipelineStageFlagBits::eColorAttachmentOutput: return "ColorAttachmentOutput";
- case PipelineStageFlagBits::eComputeShader: return "ComputeShader";
- case PipelineStageFlagBits::eTransfer: return "Transfer";
- case PipelineStageFlagBits::eBottomOfPipe: return "BottomOfPipe";
- case PipelineStageFlagBits::eHost: return "Host";
- case PipelineStageFlagBits::eAllGraphics: return "AllGraphics";
- case PipelineStageFlagBits::eAllCommands: return "AllCommands";
- case PipelineStageFlagBits::eNone: return "None";
- case PipelineStageFlagBits::eTransformFeedbackEXT: return "TransformFeedbackEXT";
- case PipelineStageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT";
- case PipelineStageFlagBits::eAccelerationStructureBuildKHR: return "AccelerationStructureBuildKHR";
- case PipelineStageFlagBits::eRayTracingShaderKHR: return "RayTracingShaderKHR";
- case PipelineStageFlagBits::eTaskShaderNV: return "TaskShaderNV";
- case PipelineStageFlagBits::eMeshShaderNV: return "MeshShaderNV";
- case PipelineStageFlagBits::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT";
- case PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR";
- case PipelineStageFlagBits::eCommandPreprocessNV: return "CommandPreprocessNV";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class MemoryMapFlagBits : VkMemoryMapFlags
{
};
- VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits )
- {
- return "(void)";
- }
-
enum class ImageAspectFlagBits : VkImageAspectFlags
{
eColor = VK_IMAGE_ASPECT_COLOR_BIT,
@@ -3197,26 +1669,6 @@ namespace VULKAN_HPP_NAMESPACE
ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( ImageAspectFlagBits value )
- {
- switch ( value )
- {
- case ImageAspectFlagBits::eColor: return "Color";
- case ImageAspectFlagBits::eDepth: return "Depth";
- case ImageAspectFlagBits::eStencil: return "Stencil";
- case ImageAspectFlagBits::eMetadata: return "Metadata";
- case ImageAspectFlagBits::ePlane0: return "Plane0";
- case ImageAspectFlagBits::ePlane1: return "Plane1";
- case ImageAspectFlagBits::ePlane2: return "Plane2";
- case ImageAspectFlagBits::eNone: return "None";
- case ImageAspectFlagBits::eMemoryPlane0EXT: return "MemoryPlane0EXT";
- case ImageAspectFlagBits::eMemoryPlane1EXT: return "MemoryPlane1EXT";
- case ImageAspectFlagBits::eMemoryPlane2EXT: return "MemoryPlane2EXT";
- case ImageAspectFlagBits::eMemoryPlane3EXT: return "MemoryPlane3EXT";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class SparseImageFormatFlagBits : VkSparseImageFormatFlags
{
eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT,
@@ -3224,105 +1676,43 @@ namespace VULKAN_HPP_NAMESPACE
eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT
};
- VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlagBits value )
- {
- switch ( value )
- {
- case SparseImageFormatFlagBits::eSingleMiptail: return "SingleMiptail";
- case SparseImageFormatFlagBits::eAlignedMipSize: return "AlignedMipSize";
- case SparseImageFormatFlagBits::eNonstandardBlockSize: return "NonstandardBlockSize";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class SparseMemoryBindFlagBits : VkSparseMemoryBindFlags
{
eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT
};
- VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlagBits value )
- {
- switch ( value )
- {
- case SparseMemoryBindFlagBits::eMetadata: return "Metadata";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class FenceCreateFlagBits : VkFenceCreateFlags
{
eSignaled = VK_FENCE_CREATE_SIGNALED_BIT
};
- VULKAN_HPP_INLINE std::string to_string( FenceCreateFlagBits value )
- {
- switch ( value )
- {
- case FenceCreateFlagBits::eSignaled: return "Signaled";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class SemaphoreCreateFlagBits : VkSemaphoreCreateFlags
{
};
- VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlagBits )
- {
- return "(void)";
- }
-
enum class EventCreateFlagBits : VkEventCreateFlags
{
eDeviceOnly = VK_EVENT_CREATE_DEVICE_ONLY_BIT,
eDeviceOnlyKHR = VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits value )
- {
- switch ( value )
- {
- case EventCreateFlagBits::eDeviceOnly: return "DeviceOnly";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class QueryPipelineStatisticFlagBits : VkQueryPipelineStatisticFlags
{
- eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT,
- eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT,
- eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT,
- eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT,
- eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT,
- eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT,
- eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT,
- eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT,
- eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT,
- eTessellationEvaluationShaderInvocations =
- VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT,
- eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT
+ eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT,
+ eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT,
+ eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT,
+ eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT,
+ eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT,
+ eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT,
+ eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT,
+ eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT,
+ eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT,
+ eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT,
+ eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT,
+ eTaskShaderInvocationsEXT = VK_QUERY_PIPELINE_STATISTIC_TASK_SHADER_INVOCATIONS_BIT_EXT,
+ eMeshShaderInvocationsEXT = VK_QUERY_PIPELINE_STATISTIC_MESH_SHADER_INVOCATIONS_BIT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlagBits value )
- {
- switch ( value )
- {
- case QueryPipelineStatisticFlagBits::eInputAssemblyVertices: return "InputAssemblyVertices";
- case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives: return "InputAssemblyPrimitives";
- case QueryPipelineStatisticFlagBits::eVertexShaderInvocations: return "VertexShaderInvocations";
- case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations: return "GeometryShaderInvocations";
- case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives: return "GeometryShaderPrimitives";
- case QueryPipelineStatisticFlagBits::eClippingInvocations: return "ClippingInvocations";
- case QueryPipelineStatisticFlagBits::eClippingPrimitives: return "ClippingPrimitives";
- case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations: return "FragmentShaderInvocations";
- case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches: return "TessellationControlShaderPatches";
- case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations:
- return "TessellationEvaluationShaderInvocations";
- case QueryPipelineStatisticFlagBits::eComputeShaderInvocations: return "ComputeShaderInvocations";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class QueryResultFlagBits : VkQueryResultFlags
{
e64 = VK_QUERY_RESULT_64_BIT,
@@ -3334,21 +1724,6 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
};
- VULKAN_HPP_INLINE std::string to_string( QueryResultFlagBits value )
- {
- switch ( value )
- {
- case QueryResultFlagBits::e64: return "64";
- case QueryResultFlagBits::eWait: return "Wait";
- case QueryResultFlagBits::eWithAvailability: return "WithAvailability";
- case QueryResultFlagBits::ePartial: return "Partial";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case QueryResultFlagBits::eWithStatusKHR: return "WithStatusKHR";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class QueryType
{
eOcclusion = VK_QUERY_TYPE_OCCLUSION,
@@ -3364,42 +1739,20 @@ namespace VULKAN_HPP_NAMESPACE
eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV,
ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- eVideoEncodeBitstreamBufferRangeKHR = VK_QUERY_TYPE_VIDEO_ENCODE_BITSTREAM_BUFFER_RANGE_KHR
+ eVideoEncodeBitstreamBufferRangeKHR = VK_QUERY_TYPE_VIDEO_ENCODE_BITSTREAM_BUFFER_RANGE_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ eMeshPrimitivesGeneratedEXT = VK_QUERY_TYPE_MESH_PRIMITIVES_GENERATED_EXT,
+ ePrimitivesGeneratedEXT = VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT,
+ eAccelerationStructureSerializationBottomLevelPointersKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR,
+ eAccelerationStructureSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR,
+ eMicromapSerializationSizeEXT = VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT,
+ eMicromapCompactedSizeEXT = VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT
};
- VULKAN_HPP_INLINE std::string to_string( QueryType value )
- {
- switch ( value )
- {
- case QueryType::eOcclusion: return "Occlusion";
- case QueryType::ePipelineStatistics: return "PipelineStatistics";
- case QueryType::eTimestamp: return "Timestamp";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case QueryType::eResultStatusOnlyKHR: return "ResultStatusOnlyKHR";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- case QueryType::eTransformFeedbackStreamEXT: return "TransformFeedbackStreamEXT";
- case QueryType::ePerformanceQueryKHR: return "PerformanceQueryKHR";
- case QueryType::eAccelerationStructureCompactedSizeKHR: return "AccelerationStructureCompactedSizeKHR";
- case QueryType::eAccelerationStructureSerializationSizeKHR: return "AccelerationStructureSerializationSizeKHR";
- case QueryType::eAccelerationStructureCompactedSizeNV: return "AccelerationStructureCompactedSizeNV";
- case QueryType::ePerformanceQueryINTEL: return "PerformanceQueryINTEL";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case QueryType::eVideoEncodeBitstreamBufferRangeKHR: return "VideoEncodeBitstreamBufferRangeKHR";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class QueryPoolCreateFlagBits
{
};
- VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlagBits )
- {
- return "(void)";
- }
-
enum class BufferCreateFlagBits : VkBufferCreateFlags
{
eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
@@ -3411,19 +1764,6 @@ namespace VULKAN_HPP_NAMESPACE
eDeviceAddressCaptureReplayKHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( BufferCreateFlagBits value )
- {
- switch ( value )
- {
- case BufferCreateFlagBits::eSparseBinding: return "SparseBinding";
- case BufferCreateFlagBits::eSparseResidency: return "SparseResidency";
- case BufferCreateFlagBits::eSparseAliased: return "SparseAliased";
- case BufferCreateFlagBits::eProtected: return "Protected";
- case BufferCreateFlagBits::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class BufferUsageFlagBits : VkBufferUsageFlags
{
eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
@@ -3450,69 +1790,23 @@ namespace VULKAN_HPP_NAMESPACE
eVideoEncodeDstKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR,
eVideoEncodeSrcKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV,
- eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT,
- eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR
+ eMicromapBuildInputReadOnlyEXT = VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT,
+ eMicromapStorageEXT = VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT,
+ eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV,
+ eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT,
+ eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits value )
- {
- switch ( value )
- {
- case BufferUsageFlagBits::eTransferSrc: return "TransferSrc";
- case BufferUsageFlagBits::eTransferDst: return "TransferDst";
- case BufferUsageFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer";
- case BufferUsageFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer";
- case BufferUsageFlagBits::eUniformBuffer: return "UniformBuffer";
- case BufferUsageFlagBits::eStorageBuffer: return "StorageBuffer";
- case BufferUsageFlagBits::eIndexBuffer: return "IndexBuffer";
- case BufferUsageFlagBits::eVertexBuffer: return "VertexBuffer";
- case BufferUsageFlagBits::eIndirectBuffer: return "IndirectBuffer";
- case BufferUsageFlagBits::eShaderDeviceAddress: return "ShaderDeviceAddress";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case BufferUsageFlagBits::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR";
- case BufferUsageFlagBits::eVideoDecodeDstKHR: return "VideoDecodeDstKHR";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- case BufferUsageFlagBits::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT";
- case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT";
- case BufferUsageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT";
- case BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR:
- return "AccelerationStructureBuildInputReadOnlyKHR";
- case BufferUsageFlagBits::eAccelerationStructureStorageKHR: return "AccelerationStructureStorageKHR";
- case BufferUsageFlagBits::eShaderBindingTableKHR: return "ShaderBindingTableKHR";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case BufferUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR";
- case BufferUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class SharingMode
{
eExclusive = VK_SHARING_MODE_EXCLUSIVE,
eConcurrent = VK_SHARING_MODE_CONCURRENT
};
- VULKAN_HPP_INLINE std::string to_string( SharingMode value )
- {
- switch ( value )
- {
- case SharingMode::eExclusive: return "Exclusive";
- case SharingMode::eConcurrent: return "Concurrent";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class BufferViewCreateFlagBits : VkBufferViewCreateFlags
{
};
- VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlagBits )
- {
- return "(void)";
- }
-
enum class ImageLayout
{
eUndefined = VK_IMAGE_LAYOUT_UNDEFINED,
@@ -3546,6 +1840,7 @@ namespace VULKAN_HPP_NAMESPACE
eVideoEncodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR,
eVideoEncodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ eAttachmentFeedbackLoopOptimalEXT = VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT,
eAttachmentOptimalKHR = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR,
eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR,
eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR,
@@ -3557,45 +1852,6 @@ namespace VULKAN_HPP_NAMESPACE
eStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR
};
- VULKAN_HPP_INLINE std::string to_string( ImageLayout value )
- {
- switch ( value )
- {
- case ImageLayout::eUndefined: return "Undefined";
- case ImageLayout::eGeneral: return "General";
- case ImageLayout::eColorAttachmentOptimal: return "ColorAttachmentOptimal";
- case ImageLayout::eDepthStencilAttachmentOptimal: return "DepthStencilAttachmentOptimal";
- case ImageLayout::eDepthStencilReadOnlyOptimal: return "DepthStencilReadOnlyOptimal";
- case ImageLayout::eShaderReadOnlyOptimal: return "ShaderReadOnlyOptimal";
- case ImageLayout::eTransferSrcOptimal: return "TransferSrcOptimal";
- case ImageLayout::eTransferDstOptimal: return "TransferDstOptimal";
- case ImageLayout::ePreinitialized: return "Preinitialized";
- case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal: return "DepthReadOnlyStencilAttachmentOptimal";
- case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal: return "DepthAttachmentStencilReadOnlyOptimal";
- case ImageLayout::eDepthAttachmentOptimal: return "DepthAttachmentOptimal";
- case ImageLayout::eDepthReadOnlyOptimal: return "DepthReadOnlyOptimal";
- case ImageLayout::eStencilAttachmentOptimal: return "StencilAttachmentOptimal";
- case ImageLayout::eStencilReadOnlyOptimal: return "StencilReadOnlyOptimal";
- case ImageLayout::eReadOnlyOptimal: return "ReadOnlyOptimal";
- case ImageLayout::eAttachmentOptimal: return "AttachmentOptimal";
- case ImageLayout::ePresentSrcKHR: return "PresentSrcKHR";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case ImageLayout::eVideoDecodeDstKHR: return "VideoDecodeDstKHR";
- case ImageLayout::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR";
- case ImageLayout::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- case ImageLayout::eSharedPresentKHR: return "SharedPresentKHR";
- case ImageLayout::eFragmentDensityMapOptimalEXT: return "FragmentDensityMapOptimalEXT";
- case ImageLayout::eFragmentShadingRateAttachmentOptimalKHR: return "FragmentShadingRateAttachmentOptimalKHR";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case ImageLayout::eVideoEncodeDstKHR: return "VideoEncodeDstKHR";
- case ImageLayout::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR";
- case ImageLayout::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ComponentSwizzle
{
eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY,
@@ -3607,37 +1863,12 @@ namespace VULKAN_HPP_NAMESPACE
eA = VK_COMPONENT_SWIZZLE_A
};
- VULKAN_HPP_INLINE std::string to_string( ComponentSwizzle value )
- {
- switch ( value )
- {
- case ComponentSwizzle::eIdentity: return "Identity";
- case ComponentSwizzle::eZero: return "Zero";
- case ComponentSwizzle::eOne: return "One";
- case ComponentSwizzle::eR: return "R";
- case ComponentSwizzle::eG: return "G";
- case ComponentSwizzle::eB: return "B";
- case ComponentSwizzle::eA: return "A";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ImageViewCreateFlagBits : VkImageViewCreateFlags
{
eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT,
eFragmentDensityMapDeferredEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlagBits value )
- {
- switch ( value )
- {
- case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT: return "FragmentDensityMapDynamicEXT";
- case ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT: return "FragmentDensityMapDeferredEXT";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ImageViewType
{
e1D = VK_IMAGE_VIEW_TYPE_1D,
@@ -3649,30 +1880,10 @@ namespace VULKAN_HPP_NAMESPACE
eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY
};
- VULKAN_HPP_INLINE std::string to_string( ImageViewType value )
- {
- switch ( value )
- {
- case ImageViewType::e1D: return "1D";
- case ImageViewType::e2D: return "2D";
- case ImageViewType::e3D: return "3D";
- case ImageViewType::eCube: return "Cube";
- case ImageViewType::e1DArray: return "1DArray";
- case ImageViewType::e2DArray: return "2DArray";
- case ImageViewType::eCubeArray: return "CubeArray";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ShaderModuleCreateFlagBits : VkShaderModuleCreateFlags
{
};
- VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlagBits )
- {
- return "(void)";
- }
-
enum class BlendFactor
{
eZero = VK_BLEND_FACTOR_ZERO,
@@ -3696,33 +1907,6 @@ namespace VULKAN_HPP_NAMESPACE
eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA
};
- VULKAN_HPP_INLINE std::string to_string( BlendFactor value )
- {
- switch ( value )
- {
- case BlendFactor::eZero: return "Zero";
- case BlendFactor::eOne: return "One";
- case BlendFactor::eSrcColor: return "SrcColor";
- case BlendFactor::eOneMinusSrcColor: return "OneMinusSrcColor";
- case BlendFactor::eDstColor: return "DstColor";
- case BlendFactor::eOneMinusDstColor: return "OneMinusDstColor";
- case BlendFactor::eSrcAlpha: return "SrcAlpha";
- case BlendFactor::eOneMinusSrcAlpha: return "OneMinusSrcAlpha";
- case BlendFactor::eDstAlpha: return "DstAlpha";
- case BlendFactor::eOneMinusDstAlpha: return "OneMinusDstAlpha";
- case BlendFactor::eConstantColor: return "ConstantColor";
- case BlendFactor::eOneMinusConstantColor: return "OneMinusConstantColor";
- case BlendFactor::eConstantAlpha: return "ConstantAlpha";
- case BlendFactor::eOneMinusConstantAlpha: return "OneMinusConstantAlpha";
- case BlendFactor::eSrcAlphaSaturate: return "SrcAlphaSaturate";
- case BlendFactor::eSrc1Color: return "Src1Color";
- case BlendFactor::eOneMinusSrc1Color: return "OneMinusSrc1Color";
- case BlendFactor::eSrc1Alpha: return "Src1Alpha";
- case BlendFactor::eOneMinusSrc1Alpha: return "OneMinusSrc1Alpha";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class BlendOp
{
eAdd = VK_BLEND_OP_ADD,
@@ -3778,65 +1962,6 @@ namespace VULKAN_HPP_NAMESPACE
eBlueEXT = VK_BLEND_OP_BLUE_EXT
};
- VULKAN_HPP_INLINE std::string to_string( BlendOp value )
- {
- switch ( value )
- {
- case BlendOp::eAdd: return "Add";
- case BlendOp::eSubtract: return "Subtract";
- case BlendOp::eReverseSubtract: return "ReverseSubtract";
- case BlendOp::eMin: return "Min";
- case BlendOp::eMax: return "Max";
- case BlendOp::eZeroEXT: return "ZeroEXT";
- case BlendOp::eSrcEXT: return "SrcEXT";
- case BlendOp::eDstEXT: return "DstEXT";
- case BlendOp::eSrcOverEXT: return "SrcOverEXT";
- case BlendOp::eDstOverEXT: return "DstOverEXT";
- case BlendOp::eSrcInEXT: return "SrcInEXT";
- case BlendOp::eDstInEXT: return "DstInEXT";
- case BlendOp::eSrcOutEXT: return "SrcOutEXT";
- case BlendOp::eDstOutEXT: return "DstOutEXT";
- case BlendOp::eSrcAtopEXT: return "SrcAtopEXT";
- case BlendOp::eDstAtopEXT: return "DstAtopEXT";
- case BlendOp::eXorEXT: return "XorEXT";
- case BlendOp::eMultiplyEXT: return "MultiplyEXT";
- case BlendOp::eScreenEXT: return "ScreenEXT";
- case BlendOp::eOverlayEXT: return "OverlayEXT";
- case BlendOp::eDarkenEXT: return "DarkenEXT";
- case BlendOp::eLightenEXT: return "LightenEXT";
- case BlendOp::eColordodgeEXT: return "ColordodgeEXT";
- case BlendOp::eColorburnEXT: return "ColorburnEXT";
- case BlendOp::eHardlightEXT: return "HardlightEXT";
- case BlendOp::eSoftlightEXT: return "SoftlightEXT";
- case BlendOp::eDifferenceEXT: return "DifferenceEXT";
- case BlendOp::eExclusionEXT: return "ExclusionEXT";
- case BlendOp::eInvertEXT: return "InvertEXT";
- case BlendOp::eInvertRgbEXT: return "InvertRgbEXT";
- case BlendOp::eLineardodgeEXT: return "LineardodgeEXT";
- case BlendOp::eLinearburnEXT: return "LinearburnEXT";
- case BlendOp::eVividlightEXT: return "VividlightEXT";
- case BlendOp::eLinearlightEXT: return "LinearlightEXT";
- case BlendOp::ePinlightEXT: return "PinlightEXT";
- case BlendOp::eHardmixEXT: return "HardmixEXT";
- case BlendOp::eHslHueEXT: return "HslHueEXT";
- case BlendOp::eHslSaturationEXT: return "HslSaturationEXT";
- case BlendOp::eHslColorEXT: return "HslColorEXT";
- case BlendOp::eHslLuminosityEXT: return "HslLuminosityEXT";
- case BlendOp::ePlusEXT: return "PlusEXT";
- case BlendOp::ePlusClampedEXT: return "PlusClampedEXT";
- case BlendOp::ePlusClampedAlphaEXT: return "PlusClampedAlphaEXT";
- case BlendOp::ePlusDarkerEXT: return "PlusDarkerEXT";
- case BlendOp::eMinusEXT: return "MinusEXT";
- case BlendOp::eMinusClampedEXT: return "MinusClampedEXT";
- case BlendOp::eContrastEXT: return "ContrastEXT";
- case BlendOp::eInvertOvgEXT: return "InvertOvgEXT";
- case BlendOp::eRedEXT: return "RedEXT";
- case BlendOp::eGreenEXT: return "GreenEXT";
- case BlendOp::eBlueEXT: return "BlueEXT";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ColorComponentFlagBits : VkColorComponentFlags
{
eR = VK_COLOR_COMPONENT_R_BIT,
@@ -3845,18 +1970,6 @@ namespace VULKAN_HPP_NAMESPACE
eA = VK_COLOR_COMPONENT_A_BIT
};
- VULKAN_HPP_INLINE std::string to_string( ColorComponentFlagBits value )
- {
- switch ( value )
- {
- case ColorComponentFlagBits::eR: return "R";
- case ColorComponentFlagBits::eG: return "G";
- case ColorComponentFlagBits::eB: return "B";
- case ColorComponentFlagBits::eA: return "A";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class CompareOp
{
eNever = VK_COMPARE_OP_NEVER,
@@ -3869,22 +1982,6 @@ namespace VULKAN_HPP_NAMESPACE
eAlways = VK_COMPARE_OP_ALWAYS
};
- VULKAN_HPP_INLINE std::string to_string( CompareOp value )
- {
- switch ( value )
- {
- case CompareOp::eNever: return "Never";
- case CompareOp::eLess: return "Less";
- case CompareOp::eEqual: return "Equal";
- case CompareOp::eLessOrEqual: return "LessOrEqual";
- case CompareOp::eGreater: return "Greater";
- case CompareOp::eNotEqual: return "NotEqual";
- case CompareOp::eGreaterOrEqual: return "GreaterOrEqual";
- case CompareOp::eAlways: return "Always";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class CullModeFlagBits : VkCullModeFlags
{
eNone = VK_CULL_MODE_NONE,
@@ -3893,118 +1990,92 @@ namespace VULKAN_HPP_NAMESPACE
eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK
};
- VULKAN_HPP_INLINE std::string to_string( CullModeFlagBits value )
- {
- switch ( value )
- {
- case CullModeFlagBits::eNone: return "None";
- case CullModeFlagBits::eFront: return "Front";
- case CullModeFlagBits::eBack: return "Back";
- case CullModeFlagBits::eFrontAndBack: return "FrontAndBack";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class DynamicState
{
- eViewport = VK_DYNAMIC_STATE_VIEWPORT,
- eScissor = VK_DYNAMIC_STATE_SCISSOR,
- eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH,
- eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS,
- eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS,
- eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS,
- eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
- eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
- eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE,
- eCullMode = VK_DYNAMIC_STATE_CULL_MODE,
- eFrontFace = VK_DYNAMIC_STATE_FRONT_FACE,
- ePrimitiveTopology = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY,
- eViewportWithCount = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT,
- eScissorWithCount = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT,
- eVertexInputBindingStride = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE,
- eDepthTestEnable = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE,
- eDepthWriteEnable = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE,
- eDepthCompareOp = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP,
- eDepthBoundsTestEnable = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE,
- eStencilTestEnable = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE,
- eStencilOp = VK_DYNAMIC_STATE_STENCIL_OP,
- eRasterizerDiscardEnable = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE,
- eDepthBiasEnable = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE,
- ePrimitiveRestartEnable = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE,
- eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV,
- eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT,
- eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT,
- eRayTracingPipelineStackSizeKHR = VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR,
- eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV,
- eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV,
- eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV,
- eFragmentShadingRateKHR = VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR,
- eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT,
- eVertexInputEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_EXT,
- ePatchControlPointsEXT = VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT,
- eLogicOpEXT = VK_DYNAMIC_STATE_LOGIC_OP_EXT,
- eColorWriteEnableEXT = VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT,
- eCullModeEXT = VK_DYNAMIC_STATE_CULL_MODE_EXT,
- eDepthBiasEnableEXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT,
- eDepthBoundsTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT,
- eDepthCompareOpEXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT,
- eDepthTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT,
- eDepthWriteEnableEXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT,
- eFrontFaceEXT = VK_DYNAMIC_STATE_FRONT_FACE_EXT,
- ePrimitiveRestartEnableEXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT,
- ePrimitiveTopologyEXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT,
- eRasterizerDiscardEnableEXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT,
- eScissorWithCountEXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT,
- eStencilOpEXT = VK_DYNAMIC_STATE_STENCIL_OP_EXT,
- eStencilTestEnableEXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT,
- eVertexInputBindingStrideEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT,
- eViewportWithCountEXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( DynamicState value )
- {
- switch ( value )
- {
- case DynamicState::eViewport: return "Viewport";
- case DynamicState::eScissor: return "Scissor";
- case DynamicState::eLineWidth: return "LineWidth";
- case DynamicState::eDepthBias: return "DepthBias";
- case DynamicState::eBlendConstants: return "BlendConstants";
- case DynamicState::eDepthBounds: return "DepthBounds";
- case DynamicState::eStencilCompareMask: return "StencilCompareMask";
- case DynamicState::eStencilWriteMask: return "StencilWriteMask";
- case DynamicState::eStencilReference: return "StencilReference";
- case DynamicState::eCullMode: return "CullMode";
- case DynamicState::eFrontFace: return "FrontFace";
- case DynamicState::ePrimitiveTopology: return "PrimitiveTopology";
- case DynamicState::eViewportWithCount: return "ViewportWithCount";
- case DynamicState::eScissorWithCount: return "ScissorWithCount";
- case DynamicState::eVertexInputBindingStride: return "VertexInputBindingStride";
- case DynamicState::eDepthTestEnable: return "DepthTestEnable";
- case DynamicState::eDepthWriteEnable: return "DepthWriteEnable";
- case DynamicState::eDepthCompareOp: return "DepthCompareOp";
- case DynamicState::eDepthBoundsTestEnable: return "DepthBoundsTestEnable";
- case DynamicState::eStencilTestEnable: return "StencilTestEnable";
- case DynamicState::eStencilOp: return "StencilOp";
- case DynamicState::eRasterizerDiscardEnable: return "RasterizerDiscardEnable";
- case DynamicState::eDepthBiasEnable: return "DepthBiasEnable";
- case DynamicState::ePrimitiveRestartEnable: return "PrimitiveRestartEnable";
- case DynamicState::eViewportWScalingNV: return "ViewportWScalingNV";
- case DynamicState::eDiscardRectangleEXT: return "DiscardRectangleEXT";
- case DynamicState::eSampleLocationsEXT: return "SampleLocationsEXT";
- case DynamicState::eRayTracingPipelineStackSizeKHR: return "RayTracingPipelineStackSizeKHR";
- case DynamicState::eViewportShadingRatePaletteNV: return "ViewportShadingRatePaletteNV";
- case DynamicState::eViewportCoarseSampleOrderNV: return "ViewportCoarseSampleOrderNV";
- case DynamicState::eExclusiveScissorNV: return "ExclusiveScissorNV";
- case DynamicState::eFragmentShadingRateKHR: return "FragmentShadingRateKHR";
- case DynamicState::eLineStippleEXT: return "LineStippleEXT";
- case DynamicState::eVertexInputEXT: return "VertexInputEXT";
- case DynamicState::ePatchControlPointsEXT: return "PatchControlPointsEXT";
- case DynamicState::eLogicOpEXT: return "LogicOpEXT";
- case DynamicState::eColorWriteEnableEXT: return "ColorWriteEnableEXT";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
+ eViewport = VK_DYNAMIC_STATE_VIEWPORT,
+ eScissor = VK_DYNAMIC_STATE_SCISSOR,
+ eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH,
+ eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS,
+ eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS,
+ eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS,
+ eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
+ eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
+ eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE,
+ eCullMode = VK_DYNAMIC_STATE_CULL_MODE,
+ eFrontFace = VK_DYNAMIC_STATE_FRONT_FACE,
+ ePrimitiveTopology = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY,
+ eViewportWithCount = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT,
+ eScissorWithCount = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT,
+ eVertexInputBindingStride = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE,
+ eDepthTestEnable = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE,
+ eDepthWriteEnable = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE,
+ eDepthCompareOp = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP,
+ eDepthBoundsTestEnable = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE,
+ eStencilTestEnable = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE,
+ eStencilOp = VK_DYNAMIC_STATE_STENCIL_OP,
+ eRasterizerDiscardEnable = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE,
+ eDepthBiasEnable = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE,
+ ePrimitiveRestartEnable = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE,
+ eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV,
+ eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT,
+ eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT,
+ eRayTracingPipelineStackSizeKHR = VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR,
+ eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV,
+ eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV,
+ eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV,
+ eFragmentShadingRateKHR = VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR,
+ eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT,
+ eVertexInputEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_EXT,
+ ePatchControlPointsEXT = VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT,
+ eLogicOpEXT = VK_DYNAMIC_STATE_LOGIC_OP_EXT,
+ eColorWriteEnableEXT = VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT,
+ eTessellationDomainOriginEXT = VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT,
+ eDepthClampEnableEXT = VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT,
+ ePolygonModeEXT = VK_DYNAMIC_STATE_POLYGON_MODE_EXT,
+ eRasterizationSamplesEXT = VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT,
+ eSampleMaskEXT = VK_DYNAMIC_STATE_SAMPLE_MASK_EXT,
+ eAlphaToCoverageEnableEXT = VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT,
+ eAlphaToOneEnableEXT = VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT,
+ eLogicOpEnableEXT = VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT,
+ eColorBlendEnableEXT = VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT,
+ eColorBlendEquationEXT = VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT,
+ eColorWriteMaskEXT = VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT,
+ eRasterizationStreamEXT = VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT,
+ eConservativeRasterizationModeEXT = VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT,
+ eExtraPrimitiveOverestimationSizeEXT = VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT,
+ eDepthClipEnableEXT = VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT,
+ eSampleLocationsEnableEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT,
+ eColorBlendAdvancedEXT = VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT,
+ eProvokingVertexModeEXT = VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT,
+ eLineRasterizationModeEXT = VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT,
+ eLineStippleEnableEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT,
+ eDepthClipNegativeOneToOneEXT = VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT,
+ eViewportWScalingEnableNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV,
+ eViewportSwizzleNV = VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV,
+ eCoverageToColorEnableNV = VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV,
+ eCoverageToColorLocationNV = VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV,
+ eCoverageModulationModeNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV,
+ eCoverageModulationTableEnableNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV,
+ eCoverageModulationTableNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV,
+ eShadingRateImageEnableNV = VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV,
+ eRepresentativeFragmentTestEnableNV = VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV,
+ eCoverageReductionModeNV = VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV,
+ eCullModeEXT = VK_DYNAMIC_STATE_CULL_MODE_EXT,
+ eDepthBiasEnableEXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT,
+ eDepthBoundsTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT,
+ eDepthCompareOpEXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT,
+ eDepthTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT,
+ eDepthWriteEnableEXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT,
+ eFrontFaceEXT = VK_DYNAMIC_STATE_FRONT_FACE_EXT,
+ ePrimitiveRestartEnableEXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT,
+ ePrimitiveTopologyEXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT,
+ eRasterizerDiscardEnableEXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT,
+ eScissorWithCountEXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT,
+ eStencilOpEXT = VK_DYNAMIC_STATE_STENCIL_OP_EXT,
+ eStencilTestEnableEXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT,
+ eVertexInputBindingStrideEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT,
+ eViewportWithCountEXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT
+ };
enum class FrontFace
{
@@ -4012,16 +2083,6 @@ namespace VULKAN_HPP_NAMESPACE
eClockwise = VK_FRONT_FACE_CLOCKWISE
};
- VULKAN_HPP_INLINE std::string to_string( FrontFace value )
- {
- switch ( value )
- {
- case FrontFace::eCounterClockwise: return "CounterClockwise";
- case FrontFace::eClockwise: return "Clockwise";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class LogicOp
{
eClear = VK_LOGIC_OP_CLEAR,
@@ -4042,98 +2103,44 @@ namespace VULKAN_HPP_NAMESPACE
eSet = VK_LOGIC_OP_SET
};
- VULKAN_HPP_INLINE std::string to_string( LogicOp value )
- {
- switch ( value )
- {
- case LogicOp::eClear: return "Clear";
- case LogicOp::eAnd: return "And";
- case LogicOp::eAndReverse: return "AndReverse";
- case LogicOp::eCopy: return "Copy";
- case LogicOp::eAndInverted: return "AndInverted";
- case LogicOp::eNoOp: return "NoOp";
- case LogicOp::eXor: return "Xor";
- case LogicOp::eOr: return "Or";
- case LogicOp::eNor: return "Nor";
- case LogicOp::eEquivalent: return "Equivalent";
- case LogicOp::eInvert: return "Invert";
- case LogicOp::eOrReverse: return "OrReverse";
- case LogicOp::eCopyInverted: return "CopyInverted";
- case LogicOp::eOrInverted: return "OrInverted";
- case LogicOp::eNand: return "Nand";
- case LogicOp::eSet: return "Set";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class PipelineCreateFlagBits : VkPipelineCreateFlags
{
- eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT,
- eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT,
- eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT,
- eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
- eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
- eFailOnPipelineCompileRequired = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT,
- eEarlyReturnOnFailure = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT,
- eRenderingFragmentShadingRateAttachmentKHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
- eRenderingFragmentDensityMapAttachmentEXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT,
- eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR,
- eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR,
- eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR,
- eRayTracingNoNullIntersectionShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR,
- eRayTracingSkipTrianglesKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR,
- eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR,
- eRayTracingShaderGroupHandleCaptureReplayKHR =
- VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR,
- eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV,
- eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR,
- eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR,
- eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV,
- eLibraryKHR = VK_PIPELINE_CREATE_LIBRARY_BIT_KHR,
- eRayTracingAllowMotionNV = VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV,
- eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR,
- eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT,
- eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT,
- eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR,
- eVkPipelineRasterizationStateCreateFragmentDensityMapAttachmentEXT =
- VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT,
- eVkPipelineRasterizationStateCreateFragmentShadingRateAttachmentKHR =
- VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits value )
- {
- switch ( value )
- {
- case PipelineCreateFlagBits::eDisableOptimization: return "DisableOptimization";
- case PipelineCreateFlagBits::eAllowDerivatives: return "AllowDerivatives";
- case PipelineCreateFlagBits::eDerivative: return "Derivative";
- case PipelineCreateFlagBits::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex";
- case PipelineCreateFlagBits::eDispatchBase: return "DispatchBase";
- case PipelineCreateFlagBits::eFailOnPipelineCompileRequired: return "FailOnPipelineCompileRequired";
- case PipelineCreateFlagBits::eEarlyReturnOnFailure: return "EarlyReturnOnFailure";
- case PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR:
- return "RenderingFragmentShadingRateAttachmentKHR";
- case PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT:
- return "RenderingFragmentDensityMapAttachmentEXT";
- case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR: return "RayTracingNoNullAnyHitShadersKHR";
- case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR: return "RayTracingNoNullClosestHitShadersKHR";
- case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR: return "RayTracingNoNullMissShadersKHR";
- case PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR:
- return "RayTracingNoNullIntersectionShadersKHR";
- case PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR: return "RayTracingSkipTrianglesKHR";
- case PipelineCreateFlagBits::eRayTracingSkipAabbsKHR: return "RayTracingSkipAabbsKHR";
- case PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR:
- return "RayTracingShaderGroupHandleCaptureReplayKHR";
- case PipelineCreateFlagBits::eDeferCompileNV: return "DeferCompileNV";
- case PipelineCreateFlagBits::eCaptureStatisticsKHR: return "CaptureStatisticsKHR";
- case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR: return "CaptureInternalRepresentationsKHR";
- case PipelineCreateFlagBits::eIndirectBindableNV: return "IndirectBindableNV";
- case PipelineCreateFlagBits::eLibraryKHR: return "LibraryKHR";
- case PipelineCreateFlagBits::eRayTracingAllowMotionNV: return "RayTracingAllowMotionNV";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
+ eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT,
+ eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT,
+ eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT,
+ eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
+ eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
+ eFailOnPipelineCompileRequired = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT,
+ eEarlyReturnOnFailure = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT,
+ eRenderingFragmentShadingRateAttachmentKHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+ eRenderingFragmentDensityMapAttachmentEXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT,
+ eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR,
+ eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR,
+ eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR,
+ eRayTracingNoNullIntersectionShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR,
+ eRayTracingSkipTrianglesKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR,
+ eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR,
+ eRayTracingShaderGroupHandleCaptureReplayKHR = VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR,
+ eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV,
+ eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR,
+ eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR,
+ eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV,
+ eLibraryKHR = VK_PIPELINE_CREATE_LIBRARY_BIT_KHR,
+ eRetainLinkTimeOptimizationInfoEXT = VK_PIPELINE_CREATE_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT,
+ eLinkTimeOptimizationEXT = VK_PIPELINE_CREATE_LINK_TIME_OPTIMIZATION_BIT_EXT,
+ eRayTracingAllowMotionNV = VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV,
+ eColorAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT,
+ eDepthStencilAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT,
+ eRayTracingOpacityMicromapEXT = VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT,
+ eNoProtectedAccessEXT = VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT,
+ eProtectedAccessOnlyEXT = VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT,
+ eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR,
+ eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT,
+ eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT,
+ eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR,
+ eVkPipelineRasterizationStateCreateFragmentDensityMapAttachmentEXT = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT,
+ eVkPipelineRasterizationStateCreateFragmentShadingRateAttachmentKHR = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR
+ };
enum class PipelineShaderStageCreateFlagBits : VkPipelineShaderStageCreateFlags
{
@@ -4143,16 +2150,6 @@ namespace VULKAN_HPP_NAMESPACE
eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlagBits value )
- {
- switch ( value )
- {
- case PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize: return "AllowVaryingSubgroupSize";
- case PipelineShaderStageCreateFlagBits::eRequireFullSubgroups: return "RequireFullSubgroups";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class PolygonMode
{
eFill = VK_POLYGON_MODE_FILL,
@@ -4161,18 +2158,6 @@ namespace VULKAN_HPP_NAMESPACE
eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV
};
- VULKAN_HPP_INLINE std::string to_string( PolygonMode value )
- {
- switch ( value )
- {
- case PolygonMode::eFill: return "Fill";
- case PolygonMode::eLine: return "Line";
- case PolygonMode::ePoint: return "Point";
- case PolygonMode::eFillRectangleNV: return "FillRectangleNV";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class PrimitiveTopology
{
ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
@@ -4188,25 +2173,6 @@ namespace VULKAN_HPP_NAMESPACE
ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST
};
- VULKAN_HPP_INLINE std::string to_string( PrimitiveTopology value )
- {
- switch ( value )
- {
- case PrimitiveTopology::ePointList: return "PointList";
- case PrimitiveTopology::eLineList: return "LineList";
- case PrimitiveTopology::eLineStrip: return "LineStrip";
- case PrimitiveTopology::eTriangleList: return "TriangleList";
- case PrimitiveTopology::eTriangleStrip: return "TriangleStrip";
- case PrimitiveTopology::eTriangleFan: return "TriangleFan";
- case PrimitiveTopology::eLineListWithAdjacency: return "LineListWithAdjacency";
- case PrimitiveTopology::eLineStripWithAdjacency: return "LineStripWithAdjacency";
- case PrimitiveTopology::eTriangleListWithAdjacency: return "TriangleListWithAdjacency";
- case PrimitiveTopology::eTriangleStripWithAdjacency: return "TriangleStripWithAdjacency";
- case PrimitiveTopology::ePatchList: return "PatchList";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ShaderStageFlagBits : VkShaderStageFlags
{
eVertex = VK_SHADER_STAGE_VERTEX_BIT,
@@ -4223,42 +2189,19 @@ namespace VULKAN_HPP_NAMESPACE
eMissKHR = VK_SHADER_STAGE_MISS_BIT_KHR,
eIntersectionKHR = VK_SHADER_STAGE_INTERSECTION_BIT_KHR,
eCallableKHR = VK_SHADER_STAGE_CALLABLE_BIT_KHR,
- eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV,
- eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV,
+ eTaskEXT = VK_SHADER_STAGE_TASK_BIT_EXT,
+ eMeshEXT = VK_SHADER_STAGE_MESH_BIT_EXT,
eSubpassShadingHUAWEI = VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI,
eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV,
eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV,
eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV,
eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV,
+ eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV,
eMissNV = VK_SHADER_STAGE_MISS_BIT_NV,
- eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV
+ eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV,
+ eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV
};
- VULKAN_HPP_INLINE std::string to_string( ShaderStageFlagBits value )
- {
- switch ( value )
- {
- case ShaderStageFlagBits::eVertex: return "Vertex";
- case ShaderStageFlagBits::eTessellationControl: return "TessellationControl";
- case ShaderStageFlagBits::eTessellationEvaluation: return "TessellationEvaluation";
- case ShaderStageFlagBits::eGeometry: return "Geometry";
- case ShaderStageFlagBits::eFragment: return "Fragment";
- case ShaderStageFlagBits::eCompute: return "Compute";
- case ShaderStageFlagBits::eAllGraphics: return "AllGraphics";
- case ShaderStageFlagBits::eAll: return "All";
- case ShaderStageFlagBits::eRaygenKHR: return "RaygenKHR";
- case ShaderStageFlagBits::eAnyHitKHR: return "AnyHitKHR";
- case ShaderStageFlagBits::eClosestHitKHR: return "ClosestHitKHR";
- case ShaderStageFlagBits::eMissKHR: return "MissKHR";
- case ShaderStageFlagBits::eIntersectionKHR: return "IntersectionKHR";
- case ShaderStageFlagBits::eCallableKHR: return "CallableKHR";
- case ShaderStageFlagBits::eTaskNV: return "TaskNV";
- case ShaderStageFlagBits::eMeshNV: return "MeshNV";
- case ShaderStageFlagBits::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class StencilOp
{
eKeep = VK_STENCIL_OP_KEEP,
@@ -4271,110 +2214,40 @@ namespace VULKAN_HPP_NAMESPACE
eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP
};
- VULKAN_HPP_INLINE std::string to_string( StencilOp value )
- {
- switch ( value )
- {
- case StencilOp::eKeep: return "Keep";
- case StencilOp::eZero: return "Zero";
- case StencilOp::eReplace: return "Replace";
- case StencilOp::eIncrementAndClamp: return "IncrementAndClamp";
- case StencilOp::eDecrementAndClamp: return "DecrementAndClamp";
- case StencilOp::eInvert: return "Invert";
- case StencilOp::eIncrementAndWrap: return "IncrementAndWrap";
- case StencilOp::eDecrementAndWrap: return "DecrementAndWrap";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class VertexInputRate
{
eVertex = VK_VERTEX_INPUT_RATE_VERTEX,
eInstance = VK_VERTEX_INPUT_RATE_INSTANCE
};
- VULKAN_HPP_INLINE std::string to_string( VertexInputRate value )
- {
- switch ( value )
- {
- case VertexInputRate::eVertex: return "Vertex";
- case VertexInputRate::eInstance: return "Instance";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class PipelineDynamicStateCreateFlagBits : VkPipelineDynamicStateCreateFlags
{
};
- VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits )
- {
- return "(void)";
- }
-
enum class PipelineInputAssemblyStateCreateFlagBits : VkPipelineInputAssemblyStateCreateFlags
{
};
- VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits )
- {
- return "(void)";
- }
-
- enum class PipelineLayoutCreateFlagBits : VkPipelineLayoutCreateFlags
- {
- };
-
- VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits )
- {
- return "(void)";
- }
-
enum class PipelineMultisampleStateCreateFlagBits : VkPipelineMultisampleStateCreateFlags
{
};
- VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits )
- {
- return "(void)";
- }
-
enum class PipelineRasterizationStateCreateFlagBits : VkPipelineRasterizationStateCreateFlags
{
};
- VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits )
- {
- return "(void)";
- }
-
enum class PipelineTessellationStateCreateFlagBits : VkPipelineTessellationStateCreateFlags
{
};
- VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits )
- {
- return "(void)";
- }
-
enum class PipelineVertexInputStateCreateFlagBits : VkPipelineVertexInputStateCreateFlags
{
};
- VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits )
- {
- return "(void)";
- }
-
enum class PipelineViewportStateCreateFlagBits : VkPipelineViewportStateCreateFlags
{
};
- VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits )
- {
- return "(void)";
- }
-
enum class BorderColor
{
eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
@@ -4387,41 +2260,14 @@ namespace VULKAN_HPP_NAMESPACE
eIntCustomEXT = VK_BORDER_COLOR_INT_CUSTOM_EXT
};
- VULKAN_HPP_INLINE std::string to_string( BorderColor value )
- {
- switch ( value )
- {
- case BorderColor::eFloatTransparentBlack: return "FloatTransparentBlack";
- case BorderColor::eIntTransparentBlack: return "IntTransparentBlack";
- case BorderColor::eFloatOpaqueBlack: return "FloatOpaqueBlack";
- case BorderColor::eIntOpaqueBlack: return "IntOpaqueBlack";
- case BorderColor::eFloatOpaqueWhite: return "FloatOpaqueWhite";
- case BorderColor::eIntOpaqueWhite: return "IntOpaqueWhite";
- case BorderColor::eFloatCustomEXT: return "FloatCustomEXT";
- case BorderColor::eIntCustomEXT: return "IntCustomEXT";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class Filter
{
eNearest = VK_FILTER_NEAREST,
eLinear = VK_FILTER_LINEAR,
- eCubicIMG = VK_FILTER_CUBIC_IMG,
- eCubicEXT = VK_FILTER_CUBIC_EXT
+ eCubicEXT = VK_FILTER_CUBIC_EXT,
+ eCubicIMG = VK_FILTER_CUBIC_IMG
};
- VULKAN_HPP_INLINE std::string to_string( Filter value )
- {
- switch ( value )
- {
- case Filter::eNearest: return "Nearest";
- case Filter::eLinear: return "Linear";
- case Filter::eCubicIMG: return "CubicIMG";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class SamplerAddressMode
{
eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT,
@@ -4432,89 +2278,38 @@ namespace VULKAN_HPP_NAMESPACE
eMirrorClampToEdgeKHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR
};
- VULKAN_HPP_INLINE std::string to_string( SamplerAddressMode value )
- {
- switch ( value )
- {
- case SamplerAddressMode::eRepeat: return "Repeat";
- case SamplerAddressMode::eMirroredRepeat: return "MirroredRepeat";
- case SamplerAddressMode::eClampToEdge: return "ClampToEdge";
- case SamplerAddressMode::eClampToBorder: return "ClampToBorder";
- case SamplerAddressMode::eMirrorClampToEdge: return "MirrorClampToEdge";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class SamplerCreateFlagBits : VkSamplerCreateFlags
{
eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT,
- eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT
+ eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT,
+ eNonSeamlessCubeMapEXT = VK_SAMPLER_CREATE_NON_SEAMLESS_CUBE_MAP_BIT_EXT,
+ eImageProcessingQCOM = VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM
};
- VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlagBits value )
- {
- switch ( value )
- {
- case SamplerCreateFlagBits::eSubsampledEXT: return "SubsampledEXT";
- case SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT: return "SubsampledCoarseReconstructionEXT";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class SamplerMipmapMode
{
eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST,
eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR
};
- VULKAN_HPP_INLINE std::string to_string( SamplerMipmapMode value )
- {
- switch ( value )
- {
- case SamplerMipmapMode::eNearest: return "Nearest";
- case SamplerMipmapMode::eLinear: return "Linear";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class DescriptorPoolCreateFlagBits : VkDescriptorPoolCreateFlags
{
eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
eUpdateAfterBind = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT,
+ eHostOnlyEXT = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT,
eHostOnlyVALVE = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE,
eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlagBits value )
- {
- switch ( value )
- {
- case DescriptorPoolCreateFlagBits::eFreeDescriptorSet: return "FreeDescriptorSet";
- case DescriptorPoolCreateFlagBits::eUpdateAfterBind: return "UpdateAfterBind";
- case DescriptorPoolCreateFlagBits::eHostOnlyVALVE: return "HostOnlyVALVE";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class DescriptorSetLayoutCreateFlagBits : VkDescriptorSetLayoutCreateFlags
{
eUpdateAfterBindPool = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,
ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
+ eHostOnlyPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT,
eHostOnlyPoolVALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE,
eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlagBits value )
- {
- switch ( value )
- {
- case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool: return "UpdateAfterBindPool";
- case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR: return "PushDescriptorKHR";
- case DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolVALVE: return "HostOnlyPoolVALVE";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class DescriptorType
{
eSampler = VK_DESCRIPTOR_TYPE_SAMPLER,
@@ -4531,42 +2326,17 @@ namespace VULKAN_HPP_NAMESPACE
eInlineUniformBlock = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK,
eAccelerationStructureKHR = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV,
- eMutableVALVE = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE,
- eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT
+ eSampleWeightImageQCOM = VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM,
+ eBlockMatchImageQCOM = VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM,
+ eMutableEXT = VK_DESCRIPTOR_TYPE_MUTABLE_EXT,
+ eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT,
+ eMutableVALVE = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE
};
- VULKAN_HPP_INLINE std::string to_string( DescriptorType value )
- {
- switch ( value )
- {
- case DescriptorType::eSampler: return "Sampler";
- case DescriptorType::eCombinedImageSampler: return "CombinedImageSampler";
- case DescriptorType::eSampledImage: return "SampledImage";
- case DescriptorType::eStorageImage: return "StorageImage";
- case DescriptorType::eUniformTexelBuffer: return "UniformTexelBuffer";
- case DescriptorType::eStorageTexelBuffer: return "StorageTexelBuffer";
- case DescriptorType::eUniformBuffer: return "UniformBuffer";
- case DescriptorType::eStorageBuffer: return "StorageBuffer";
- case DescriptorType::eUniformBufferDynamic: return "UniformBufferDynamic";
- case DescriptorType::eStorageBufferDynamic: return "StorageBufferDynamic";
- case DescriptorType::eInputAttachment: return "InputAttachment";
- case DescriptorType::eInlineUniformBlock: return "InlineUniformBlock";
- case DescriptorType::eAccelerationStructureKHR: return "AccelerationStructureKHR";
- case DescriptorType::eAccelerationStructureNV: return "AccelerationStructureNV";
- case DescriptorType::eMutableVALVE: return "MutableVALVE";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class DescriptorPoolResetFlagBits : VkDescriptorPoolResetFlags
{
};
- VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlagBits )
- {
- return "(void)";
- }
-
enum class AccessFlagBits : VkAccessFlags
{
eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT,
@@ -4604,57 +2374,11 @@ namespace VULKAN_HPP_NAMESPACE
eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV
};
- VULKAN_HPP_INLINE std::string to_string( AccessFlagBits value )
- {
- switch ( value )
- {
- case AccessFlagBits::eIndirectCommandRead: return "IndirectCommandRead";
- case AccessFlagBits::eIndexRead: return "IndexRead";
- case AccessFlagBits::eVertexAttributeRead: return "VertexAttributeRead";
- case AccessFlagBits::eUniformRead: return "UniformRead";
- case AccessFlagBits::eInputAttachmentRead: return "InputAttachmentRead";
- case AccessFlagBits::eShaderRead: return "ShaderRead";
- case AccessFlagBits::eShaderWrite: return "ShaderWrite";
- case AccessFlagBits::eColorAttachmentRead: return "ColorAttachmentRead";
- case AccessFlagBits::eColorAttachmentWrite: return "ColorAttachmentWrite";
- case AccessFlagBits::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead";
- case AccessFlagBits::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite";
- case AccessFlagBits::eTransferRead: return "TransferRead";
- case AccessFlagBits::eTransferWrite: return "TransferWrite";
- case AccessFlagBits::eHostRead: return "HostRead";
- case AccessFlagBits::eHostWrite: return "HostWrite";
- case AccessFlagBits::eMemoryRead: return "MemoryRead";
- case AccessFlagBits::eMemoryWrite: return "MemoryWrite";
- case AccessFlagBits::eNone: return "None";
- case AccessFlagBits::eTransformFeedbackWriteEXT: return "TransformFeedbackWriteEXT";
- case AccessFlagBits::eTransformFeedbackCounterReadEXT: return "TransformFeedbackCounterReadEXT";
- case AccessFlagBits::eTransformFeedbackCounterWriteEXT: return "TransformFeedbackCounterWriteEXT";
- case AccessFlagBits::eConditionalRenderingReadEXT: return "ConditionalRenderingReadEXT";
- case AccessFlagBits::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT";
- case AccessFlagBits::eAccelerationStructureReadKHR: return "AccelerationStructureReadKHR";
- case AccessFlagBits::eAccelerationStructureWriteKHR: return "AccelerationStructureWriteKHR";
- case AccessFlagBits::eFragmentDensityMapReadEXT: return "FragmentDensityMapReadEXT";
- case AccessFlagBits::eFragmentShadingRateAttachmentReadKHR: return "FragmentShadingRateAttachmentReadKHR";
- case AccessFlagBits::eCommandPreprocessReadNV: return "CommandPreprocessReadNV";
- case AccessFlagBits::eCommandPreprocessWriteNV: return "CommandPreprocessWriteNV";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class AttachmentDescriptionFlagBits : VkAttachmentDescriptionFlags
{
eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT
};
- VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlagBits value )
- {
- switch ( value )
- {
- case AttachmentDescriptionFlagBits::eMayAlias: return "MayAlias";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class AttachmentLoadOp
{
eLoad = VK_ATTACHMENT_LOAD_OP_LOAD,
@@ -4663,18 +2387,6 @@ namespace VULKAN_HPP_NAMESPACE
eNoneEXT = VK_ATTACHMENT_LOAD_OP_NONE_EXT
};
- VULKAN_HPP_INLINE std::string to_string( AttachmentLoadOp value )
- {
- switch ( value )
- {
- case AttachmentLoadOp::eLoad: return "Load";
- case AttachmentLoadOp::eClear: return "Clear";
- case AttachmentLoadOp::eDontCare: return "DontCare";
- case AttachmentLoadOp::eNoneEXT: return "NoneEXT";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class AttachmentStoreOp
{
eStore = VK_ATTACHMENT_STORE_OP_STORE,
@@ -4685,52 +2397,22 @@ namespace VULKAN_HPP_NAMESPACE
eNoneQCOM = VK_ATTACHMENT_STORE_OP_NONE_QCOM
};
- VULKAN_HPP_INLINE std::string to_string( AttachmentStoreOp value )
- {
- switch ( value )
- {
- case AttachmentStoreOp::eStore: return "Store";
- case AttachmentStoreOp::eDontCare: return "DontCare";
- case AttachmentStoreOp::eNone: return "None";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class DependencyFlagBits : VkDependencyFlags
{
- eByRegion = VK_DEPENDENCY_BY_REGION_BIT,
- eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT,
- eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT,
- eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR,
- eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR
+ eByRegion = VK_DEPENDENCY_BY_REGION_BIT,
+ eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT,
+ eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT,
+ eFeedbackLoopEXT = VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT,
+ eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR,
+ eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( DependencyFlagBits value )
- {
- switch ( value )
- {
- case DependencyFlagBits::eByRegion: return "ByRegion";
- case DependencyFlagBits::eDeviceGroup: return "DeviceGroup";
- case DependencyFlagBits::eViewLocal: return "ViewLocal";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class FramebufferCreateFlagBits : VkFramebufferCreateFlags
{
eImageless = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT,
eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlagBits value )
- {
- switch ( value )
- {
- case FramebufferCreateFlagBits::eImageless: return "Imageless";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class PipelineBindPoint
{
eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS,
@@ -4740,64 +2422,26 @@ namespace VULKAN_HPP_NAMESPACE
eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV
};
- VULKAN_HPP_INLINE std::string to_string( PipelineBindPoint value )
- {
- switch ( value )
- {
- case PipelineBindPoint::eGraphics: return "Graphics";
- case PipelineBindPoint::eCompute: return "Compute";
- case PipelineBindPoint::eRayTracingKHR: return "RayTracingKHR";
- case PipelineBindPoint::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class RenderPassCreateFlagBits : VkRenderPassCreateFlags
{
eTransformQCOM = VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM
};
- VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlagBits value )
- {
- switch ( value )
- {
- case RenderPassCreateFlagBits::eTransformQCOM: return "TransformQCOM";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class SubpassDescriptionFlagBits : VkSubpassDescriptionFlags
{
- ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX,
- ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX,
- eFragmentRegionQCOM = VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM,
- eShaderResolveQCOM = VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM,
- eRasterizationOrderAttachmentColorAccessARM =
- VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_ARM,
- eRasterizationOrderAttachmentDepthAccessARM =
- VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM,
- eRasterizationOrderAttachmentStencilAccessARM =
- VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM
+ ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX,
+ ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX,
+ eFragmentRegionQCOM = VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM,
+ eShaderResolveQCOM = VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM,
+ eRasterizationOrderAttachmentColorAccessEXT = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT,
+ eRasterizationOrderAttachmentDepthAccessEXT = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT,
+ eRasterizationOrderAttachmentStencilAccessEXT = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT,
+ eEnableLegacyDitheringEXT = VK_SUBPASS_DESCRIPTION_ENABLE_LEGACY_DITHERING_BIT_EXT,
+ eRasterizationOrderAttachmentColorAccessARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_ARM,
+ eRasterizationOrderAttachmentDepthAccessARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM,
+ eRasterizationOrderAttachmentStencilAccessARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM
};
- VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlagBits value )
- {
- switch ( value )
- {
- case SubpassDescriptionFlagBits::ePerViewAttributesNVX: return "PerViewAttributesNVX";
- case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX: return "PerViewPositionXOnlyNVX";
- case SubpassDescriptionFlagBits::eFragmentRegionQCOM: return "FragmentRegionQCOM";
- case SubpassDescriptionFlagBits::eShaderResolveQCOM: return "ShaderResolveQCOM";
- case SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessARM:
- return "RasterizationOrderAttachmentColorAccessARM";
- case SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessARM:
- return "RasterizationOrderAttachmentDepthAccessARM";
- case SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessARM:
- return "RasterizationOrderAttachmentStencilAccessARM";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class CommandPoolCreateFlagBits : VkCommandPoolCreateFlags
{
eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,
@@ -4805,61 +2449,22 @@ namespace VULKAN_HPP_NAMESPACE
eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT
};
- VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlagBits value )
- {
- switch ( value )
- {
- case CommandPoolCreateFlagBits::eTransient: return "Transient";
- case CommandPoolCreateFlagBits::eResetCommandBuffer: return "ResetCommandBuffer";
- case CommandPoolCreateFlagBits::eProtected: return "Protected";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class CommandPoolResetFlagBits : VkCommandPoolResetFlags
{
eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT
};
- VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlagBits value )
- {
- switch ( value )
- {
- case CommandPoolResetFlagBits::eReleaseResources: return "ReleaseResources";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class CommandBufferLevel
{
ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY
};
- VULKAN_HPP_INLINE std::string to_string( CommandBufferLevel value )
- {
- switch ( value )
- {
- case CommandBufferLevel::ePrimary: return "Primary";
- case CommandBufferLevel::eSecondary: return "Secondary";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class CommandBufferResetFlagBits : VkCommandBufferResetFlags
{
eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
};
- VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlagBits value )
- {
- switch ( value )
- {
- case CommandBufferResetFlagBits::eReleaseResources: return "ReleaseResources";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class CommandBufferUsageFlagBits : VkCommandBufferUsageFlags
{
eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
@@ -4867,31 +2472,11 @@ namespace VULKAN_HPP_NAMESPACE
eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT
};
- VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlagBits value )
- {
- switch ( value )
- {
- case CommandBufferUsageFlagBits::eOneTimeSubmit: return "OneTimeSubmit";
- case CommandBufferUsageFlagBits::eRenderPassContinue: return "RenderPassContinue";
- case CommandBufferUsageFlagBits::eSimultaneousUse: return "SimultaneousUse";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class QueryControlFlagBits : VkQueryControlFlags
{
ePrecise = VK_QUERY_CONTROL_PRECISE_BIT
};
- VULKAN_HPP_INLINE std::string to_string( QueryControlFlagBits value )
- {
- switch ( value )
- {
- case QueryControlFlagBits::ePrecise: return "Precise";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class IndexType
{
eUint16 = VK_INDEX_TYPE_UINT16,
@@ -4901,18 +2486,6 @@ namespace VULKAN_HPP_NAMESPACE
eNoneNV = VK_INDEX_TYPE_NONE_NV
};
- VULKAN_HPP_INLINE std::string to_string( IndexType value )
- {
- switch ( value )
- {
- case IndexType::eUint16: return "Uint16";
- case IndexType::eUint32: return "Uint32";
- case IndexType::eNoneKHR: return "NoneKHR";
- case IndexType::eUint8EXT: return "Uint8EXT";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class StencilFaceFlagBits : VkStencilFaceFlags
{
eFront = VK_STENCIL_FACE_FRONT_BIT,
@@ -4921,33 +2494,12 @@ namespace VULKAN_HPP_NAMESPACE
eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK
};
- VULKAN_HPP_INLINE std::string to_string( StencilFaceFlagBits value )
- {
- switch ( value )
- {
- case StencilFaceFlagBits::eFront: return "Front";
- case StencilFaceFlagBits::eBack: return "Back";
- case StencilFaceFlagBits::eFrontAndBack: return "FrontAndBack";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class SubpassContents
{
eInline = VK_SUBPASS_CONTENTS_INLINE,
eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
};
- VULKAN_HPP_INLINE std::string to_string( SubpassContents value )
- {
- switch ( value )
- {
- case SubpassContents::eInline: return "Inline";
- case SubpassContents::eSecondaryCommandBuffers: return "SecondaryCommandBuffers";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_VERSION_1_1 ===
enum class SubgroupFeatureFlagBits : VkSubgroupFeatureFlags
@@ -4963,23 +2515,6 @@ namespace VULKAN_HPP_NAMESPACE
ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV
};
- VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlagBits value )
- {
- switch ( value )
- {
- case SubgroupFeatureFlagBits::eBasic: return "Basic";
- case SubgroupFeatureFlagBits::eVote: return "Vote";
- case SubgroupFeatureFlagBits::eArithmetic: return "Arithmetic";
- case SubgroupFeatureFlagBits::eBallot: return "Ballot";
- case SubgroupFeatureFlagBits::eShuffle: return "Shuffle";
- case SubgroupFeatureFlagBits::eShuffleRelative: return "ShuffleRelative";
- case SubgroupFeatureFlagBits::eClustered: return "Clustered";
- case SubgroupFeatureFlagBits::eQuad: return "Quad";
- case SubgroupFeatureFlagBits::ePartitionedNV: return "PartitionedNV";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class PeerMemoryFeatureFlagBits : VkPeerMemoryFeatureFlags
{
eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT,
@@ -4989,18 +2524,6 @@ namespace VULKAN_HPP_NAMESPACE
};
using PeerMemoryFeatureFlagBitsKHR = PeerMemoryFeatureFlagBits;
- VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlagBits value )
- {
- switch ( value )
- {
- case PeerMemoryFeatureFlagBits::eCopySrc: return "CopySrc";
- case PeerMemoryFeatureFlagBits::eCopyDst: return "CopyDst";
- case PeerMemoryFeatureFlagBits::eGenericSrc: return "GenericSrc";
- case PeerMemoryFeatureFlagBits::eGenericDst: return "GenericDst";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class MemoryAllocateFlagBits : VkMemoryAllocateFlags
{
eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT,
@@ -5009,26 +2532,10 @@ namespace VULKAN_HPP_NAMESPACE
};
using MemoryAllocateFlagBitsKHR = MemoryAllocateFlagBits;
- VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlagBits value )
- {
- switch ( value )
- {
- case MemoryAllocateFlagBits::eDeviceMask: return "DeviceMask";
- case MemoryAllocateFlagBits::eDeviceAddress: return "DeviceAddress";
- case MemoryAllocateFlagBits::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class CommandPoolTrimFlagBits : VkCommandPoolTrimFlags
{
};
- VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlagBits )
- {
- return "(void)";
- }
-
enum class PointClippingBehavior
{
eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
@@ -5036,16 +2543,6 @@ namespace VULKAN_HPP_NAMESPACE
};
using PointClippingBehaviorKHR = PointClippingBehavior;
- VULKAN_HPP_INLINE std::string to_string( PointClippingBehavior value )
- {
- switch ( value )
- {
- case PointClippingBehavior::eAllClipPlanes: return "AllClipPlanes";
- case PointClippingBehavior::eUserClipPlanesOnly: return "UserClipPlanesOnly";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class TessellationDomainOrigin
{
eUpperLeft = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT,
@@ -5053,30 +2550,11 @@ namespace VULKAN_HPP_NAMESPACE
};
using TessellationDomainOriginKHR = TessellationDomainOrigin;
- VULKAN_HPP_INLINE std::string to_string( TessellationDomainOrigin value )
- {
- switch ( value )
- {
- case TessellationDomainOrigin::eUpperLeft: return "UpperLeft";
- case TessellationDomainOrigin::eLowerLeft: return "LowerLeft";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class DeviceQueueCreateFlagBits : VkDeviceQueueCreateFlags
{
eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT
};
- VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlagBits value )
- {
- switch ( value )
- {
- case DeviceQueueCreateFlagBits::eProtected: return "Protected";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class SamplerYcbcrModelConversion
{
eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
@@ -5087,19 +2565,6 @@ namespace VULKAN_HPP_NAMESPACE
};
using SamplerYcbcrModelConversionKHR = SamplerYcbcrModelConversion;
- VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrModelConversion value )
- {
- switch ( value )
- {
- case SamplerYcbcrModelConversion::eRgbIdentity: return "RgbIdentity";
- case SamplerYcbcrModelConversion::eYcbcrIdentity: return "YcbcrIdentity";
- case SamplerYcbcrModelConversion::eYcbcr709: return "Ycbcr709";
- case SamplerYcbcrModelConversion::eYcbcr601: return "Ycbcr601";
- case SamplerYcbcrModelConversion::eYcbcr2020: return "Ycbcr2020";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class SamplerYcbcrRange
{
eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
@@ -5107,16 +2572,6 @@ namespace VULKAN_HPP_NAMESPACE
};
using SamplerYcbcrRangeKHR = SamplerYcbcrRange;
- VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrRange value )
- {
- switch ( value )
- {
- case SamplerYcbcrRange::eItuFull: return "ItuFull";
- case SamplerYcbcrRange::eItuNarrow: return "ItuNarrow";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ChromaLocation
{
eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN,
@@ -5124,16 +2579,6 @@ namespace VULKAN_HPP_NAMESPACE
};
using ChromaLocationKHR = ChromaLocation;
- VULKAN_HPP_INLINE std::string to_string( ChromaLocation value )
- {
- switch ( value )
- {
- case ChromaLocation::eCositedEven: return "CositedEven";
- case ChromaLocation::eMidpoint: return "Midpoint";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class DescriptorUpdateTemplateType
{
eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET,
@@ -5141,25 +2586,10 @@ namespace VULKAN_HPP_NAMESPACE
};
using DescriptorUpdateTemplateTypeKHR = DescriptorUpdateTemplateType;
- VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateType value )
- {
- switch ( value )
- {
- case DescriptorUpdateTemplateType::eDescriptorSet: return "DescriptorSet";
- case DescriptorUpdateTemplateType::ePushDescriptorsKHR: return "PushDescriptorsKHR";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class DescriptorUpdateTemplateCreateFlagBits : VkDescriptorUpdateTemplateCreateFlags
{
};
- VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlagBits )
- {
- return "(void)";
- }
-
enum class ExternalMemoryHandleTypeFlagBits : VkExternalMemoryHandleTypeFlags
{
eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
@@ -5182,31 +2612,6 @@ namespace VULKAN_HPP_NAMESPACE
};
using ExternalMemoryHandleTypeFlagBitsKHR = ExternalMemoryHandleTypeFlagBits;
- VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBits value )
- {
- switch ( value )
- {
- case ExternalMemoryHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd";
- case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32";
- case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
- case ExternalMemoryHandleTypeFlagBits::eD3D11Texture: return "D3D11Texture";
- case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt: return "D3D11TextureKmt";
- case ExternalMemoryHandleTypeFlagBits::eD3D12Heap: return "D3D12Heap";
- case ExternalMemoryHandleTypeFlagBits::eD3D12Resource: return "D3D12Resource";
- case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT: return "DmaBufEXT";
-#if defined( VK_USE_PLATFORM_ANDROID_KHR )
- case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID: return "AndroidHardwareBufferANDROID";
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT: return "HostAllocationEXT";
- case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT: return "HostMappedForeignMemoryEXT";
-#if defined( VK_USE_PLATFORM_FUCHSIA )
- case ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA: return "ZirconVmoFUCHSIA";
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
- case ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV: return "RdmaAddressNV";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ExternalMemoryFeatureFlagBits : VkExternalMemoryFeatureFlags
{
eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT,
@@ -5215,17 +2620,6 @@ namespace VULKAN_HPP_NAMESPACE
};
using ExternalMemoryFeatureFlagBitsKHR = ExternalMemoryFeatureFlagBits;
- VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBits value )
- {
- switch ( value )
- {
- case ExternalMemoryFeatureFlagBits::eDedicatedOnly: return "DedicatedOnly";
- case ExternalMemoryFeatureFlagBits::eExportable: return "Exportable";
- case ExternalMemoryFeatureFlagBits::eImportable: return "Importable";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ExternalFenceHandleTypeFlagBits : VkExternalFenceHandleTypeFlags
{
eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT,
@@ -5235,18 +2629,6 @@ namespace VULKAN_HPP_NAMESPACE
};
using ExternalFenceHandleTypeFlagBitsKHR = ExternalFenceHandleTypeFlagBits;
- VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlagBits value )
- {
- switch ( value )
- {
- case ExternalFenceHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd";
- case ExternalFenceHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32";
- case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
- case ExternalFenceHandleTypeFlagBits::eSyncFd: return "SyncFd";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ExternalFenceFeatureFlagBits : VkExternalFenceFeatureFlags
{
eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT,
@@ -5254,46 +2636,18 @@ namespace VULKAN_HPP_NAMESPACE
};
using ExternalFenceFeatureFlagBitsKHR = ExternalFenceFeatureFlagBits;
- VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlagBits value )
- {
- switch ( value )
- {
- case ExternalFenceFeatureFlagBits::eExportable: return "Exportable";
- case ExternalFenceFeatureFlagBits::eImportable: return "Importable";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class FenceImportFlagBits : VkFenceImportFlags
{
eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT
};
using FenceImportFlagBitsKHR = FenceImportFlagBits;
- VULKAN_HPP_INLINE std::string to_string( FenceImportFlagBits value )
- {
- switch ( value )
- {
- case FenceImportFlagBits::eTemporary: return "Temporary";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class SemaphoreImportFlagBits : VkSemaphoreImportFlags
{
eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT
};
using SemaphoreImportFlagBitsKHR = SemaphoreImportFlagBits;
- VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlagBits value )
- {
- switch ( value )
- {
- case SemaphoreImportFlagBits::eTemporary: return "Temporary";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ExternalSemaphoreHandleTypeFlagBits : VkExternalSemaphoreHandleTypeFlags
{
eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
@@ -5308,22 +2662,6 @@ namespace VULKAN_HPP_NAMESPACE
};
using ExternalSemaphoreHandleTypeFlagBitsKHR = ExternalSemaphoreHandleTypeFlagBits;
- VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlagBits value )
- {
- switch ( value )
- {
- case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd";
- case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32";
- case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
- case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence: return "D3D12Fence";
- case ExternalSemaphoreHandleTypeFlagBits::eSyncFd: return "SyncFd";
-#if defined( VK_USE_PLATFORM_FUCHSIA )
- case ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA: return "ZirconEventFUCHSIA";
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ExternalSemaphoreFeatureFlagBits : VkExternalSemaphoreFeatureFlags
{
eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT,
@@ -5331,16 +2669,6 @@ namespace VULKAN_HPP_NAMESPACE
};
using ExternalSemaphoreFeatureFlagBitsKHR = ExternalSemaphoreFeatureFlagBits;
- VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlagBits value )
- {
- switch ( value )
- {
- case ExternalSemaphoreFeatureFlagBits::eExportable: return "Exportable";
- case ExternalSemaphoreFeatureFlagBits::eImportable: return "Importable";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_VERSION_1_2 ===
enum class DriverId
@@ -5366,40 +2694,11 @@ namespace VULKAN_HPP_NAMESPACE
eMesaV3Dv = VK_DRIVER_ID_MESA_V3DV,
eMesaPanvk = VK_DRIVER_ID_MESA_PANVK,
eSamsungProprietary = VK_DRIVER_ID_SAMSUNG_PROPRIETARY,
- eMesaVenus = VK_DRIVER_ID_MESA_VENUS
+ eMesaVenus = VK_DRIVER_ID_MESA_VENUS,
+ eMesaDozen = VK_DRIVER_ID_MESA_DOZEN
};
using DriverIdKHR = DriverId;
- VULKAN_HPP_INLINE std::string to_string( DriverId value )
- {
- switch ( value )
- {
- case DriverId::eAmdProprietary: return "AmdProprietary";
- case DriverId::eAmdOpenSource: return "AmdOpenSource";
- case DriverId::eMesaRadv: return "MesaRadv";
- case DriverId::eNvidiaProprietary: return "NvidiaProprietary";
- case DriverId::eIntelProprietaryWindows: return "IntelProprietaryWindows";
- case DriverId::eIntelOpenSourceMESA: return "IntelOpenSourceMESA";
- case DriverId::eImaginationProprietary: return "ImaginationProprietary";
- case DriverId::eQualcommProprietary: return "QualcommProprietary";
- case DriverId::eArmProprietary: return "ArmProprietary";
- case DriverId::eGoogleSwiftshader: return "GoogleSwiftshader";
- case DriverId::eGgpProprietary: return "GgpProprietary";
- case DriverId::eBroadcomProprietary: return "BroadcomProprietary";
- case DriverId::eMesaLlvmpipe: return "MesaLlvmpipe";
- case DriverId::eMoltenvk: return "Moltenvk";
- case DriverId::eCoreaviProprietary: return "CoreaviProprietary";
- case DriverId::eJuiceProprietary: return "JuiceProprietary";
- case DriverId::eVerisiliconProprietary: return "VerisiliconProprietary";
- case DriverId::eMesaTurnip: return "MesaTurnip";
- case DriverId::eMesaV3Dv: return "MesaV3Dv";
- case DriverId::eMesaPanvk: return "MesaPanvk";
- case DriverId::eSamsungProprietary: return "SamsungProprietary";
- case DriverId::eMesaVenus: return "MesaVenus";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ShaderFloatControlsIndependence
{
e32BitOnly = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY,
@@ -5408,17 +2707,6 @@ namespace VULKAN_HPP_NAMESPACE
};
using ShaderFloatControlsIndependenceKHR = ShaderFloatControlsIndependence;
- VULKAN_HPP_INLINE std::string to_string( ShaderFloatControlsIndependence value )
- {
- switch ( value )
- {
- case ShaderFloatControlsIndependence::e32BitOnly: return "32BitOnly";
- case ShaderFloatControlsIndependence::eAll: return "All";
- case ShaderFloatControlsIndependence::eNone: return "None";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class DescriptorBindingFlagBits : VkDescriptorBindingFlags
{
eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT,
@@ -5428,18 +2716,6 @@ namespace VULKAN_HPP_NAMESPACE
};
using DescriptorBindingFlagBitsEXT = DescriptorBindingFlagBits;
- VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlagBits value )
- {
- switch ( value )
- {
- case DescriptorBindingFlagBits::eUpdateAfterBind: return "UpdateAfterBind";
- case DescriptorBindingFlagBits::eUpdateUnusedWhilePending: return "UpdateUnusedWhilePending";
- case DescriptorBindingFlagBits::ePartiallyBound: return "PartiallyBound";
- case DescriptorBindingFlagBits::eVariableDescriptorCount: return "VariableDescriptorCount";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ResolveModeFlagBits : VkResolveModeFlags
{
eNone = VK_RESOLVE_MODE_NONE,
@@ -5450,19 +2726,6 @@ namespace VULKAN_HPP_NAMESPACE
};
using ResolveModeFlagBitsKHR = ResolveModeFlagBits;
- VULKAN_HPP_INLINE std::string to_string( ResolveModeFlagBits value )
- {
- switch ( value )
- {
- case ResolveModeFlagBits::eNone: return "None";
- case ResolveModeFlagBits::eSampleZero: return "SampleZero";
- case ResolveModeFlagBits::eAverage: return "Average";
- case ResolveModeFlagBits::eMin: return "Min";
- case ResolveModeFlagBits::eMax: return "Max";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class SamplerReductionMode
{
eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE,
@@ -5471,17 +2734,6 @@ namespace VULKAN_HPP_NAMESPACE
};
using SamplerReductionModeEXT = SamplerReductionMode;
- VULKAN_HPP_INLINE std::string to_string( SamplerReductionMode value )
- {
- switch ( value )
- {
- case SamplerReductionMode::eWeightedAverage: return "WeightedAverage";
- case SamplerReductionMode::eMin: return "Min";
- case SamplerReductionMode::eMax: return "Max";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class SemaphoreType
{
eBinary = VK_SEMAPHORE_TYPE_BINARY,
@@ -5489,31 +2741,12 @@ namespace VULKAN_HPP_NAMESPACE
};
using SemaphoreTypeKHR = SemaphoreType;
- VULKAN_HPP_INLINE std::string to_string( SemaphoreType value )
- {
- switch ( value )
- {
- case SemaphoreType::eBinary: return "Binary";
- case SemaphoreType::eTimeline: return "Timeline";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class SemaphoreWaitFlagBits : VkSemaphoreWaitFlags
{
eAny = VK_SEMAPHORE_WAIT_ANY_BIT
};
using SemaphoreWaitFlagBitsKHR = SemaphoreWaitFlagBits;
- VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlagBits value )
- {
- switch ( value )
- {
- case SemaphoreWaitFlagBits::eAny: return "Any";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_VERSION_1_3 ===
enum class PipelineCreationFeedbackFlagBits : VkPipelineCreationFeedbackFlags
@@ -5524,17 +2757,6 @@ namespace VULKAN_HPP_NAMESPACE
};
using PipelineCreationFeedbackFlagBitsEXT = PipelineCreationFeedbackFlagBits;
- VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagBits value )
- {
- switch ( value )
- {
- case PipelineCreationFeedbackFlagBits::eValid: return "Valid";
- case PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit: return "ApplicationPipelineCacheHit";
- case PipelineCreationFeedbackFlagBits::eBasePipelineAcceleration: return "BasePipelineAcceleration";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ToolPurposeFlagBits : VkToolPurposeFlags
{
eValidation = VK_TOOL_PURPOSE_VALIDATION_BIT,
@@ -5547,31 +2769,11 @@ namespace VULKAN_HPP_NAMESPACE
};
using ToolPurposeFlagBitsEXT = ToolPurposeFlagBits;
- VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagBits value )
- {
- switch ( value )
- {
- case ToolPurposeFlagBits::eValidation: return "Validation";
- case ToolPurposeFlagBits::eProfiling: return "Profiling";
- case ToolPurposeFlagBits::eTracing: return "Tracing";
- case ToolPurposeFlagBits::eAdditionalFeatures: return "AdditionalFeatures";
- case ToolPurposeFlagBits::eModifyingFeatures: return "ModifyingFeatures";
- case ToolPurposeFlagBits::eDebugReportingEXT: return "DebugReportingEXT";
- case ToolPurposeFlagBits::eDebugMarkersEXT: return "DebugMarkersEXT";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class PrivateDataSlotCreateFlagBits : VkPrivateDataSlotCreateFlags
{
};
using PrivateDataSlotCreateFlagBitsEXT = PrivateDataSlotCreateFlagBits;
- VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagBits )
- {
- return "(void)";
- }
-
enum class PipelineStageFlagBits2 : VkPipelineStageFlags2
{
eNone = VK_PIPELINE_STAGE_2_NONE,
@@ -5610,65 +2812,22 @@ namespace VULKAN_HPP_NAMESPACE
eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR,
eRayTracingShaderKHR = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR,
eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT,
- eTaskShaderNV = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV,
- eMeshShaderNV = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV,
+ eTaskShaderEXT = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT,
+ eMeshShaderEXT = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT,
eSubpassShadingHUAWEI = VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI,
eInvocationMaskHUAWEI = VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI,
+ eAccelerationStructureCopyKHR = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR,
+ eMicromapBuildEXT = VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT,
+ eOpticalFlowNV = VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV,
eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
+ eMeshShaderNV = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV,
eRayTracingShaderNV = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV,
eShadingRateImageNV = VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV,
+ eTaskShaderNV = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV,
eTransfer = VK_PIPELINE_STAGE_2_TRANSFER_BIT
};
using PipelineStageFlagBits2KHR = PipelineStageFlagBits2;
- VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits2 value )
- {
- switch ( value )
- {
- case PipelineStageFlagBits2::eNone: return "None";
- case PipelineStageFlagBits2::eTopOfPipe: return "TopOfPipe";
- case PipelineStageFlagBits2::eDrawIndirect: return "DrawIndirect";
- case PipelineStageFlagBits2::eVertexInput: return "VertexInput";
- case PipelineStageFlagBits2::eVertexShader: return "VertexShader";
- case PipelineStageFlagBits2::eTessellationControlShader: return "TessellationControlShader";
- case PipelineStageFlagBits2::eTessellationEvaluationShader: return "TessellationEvaluationShader";
- case PipelineStageFlagBits2::eGeometryShader: return "GeometryShader";
- case PipelineStageFlagBits2::eFragmentShader: return "FragmentShader";
- case PipelineStageFlagBits2::eEarlyFragmentTests: return "EarlyFragmentTests";
- case PipelineStageFlagBits2::eLateFragmentTests: return "LateFragmentTests";
- case PipelineStageFlagBits2::eColorAttachmentOutput: return "ColorAttachmentOutput";
- case PipelineStageFlagBits2::eComputeShader: return "ComputeShader";
- case PipelineStageFlagBits2::eAllTransfer: return "AllTransfer";
- case PipelineStageFlagBits2::eBottomOfPipe: return "BottomOfPipe";
- case PipelineStageFlagBits2::eHost: return "Host";
- case PipelineStageFlagBits2::eAllGraphics: return "AllGraphics";
- case PipelineStageFlagBits2::eAllCommands: return "AllCommands";
- case PipelineStageFlagBits2::eCopy: return "Copy";
- case PipelineStageFlagBits2::eResolve: return "Resolve";
- case PipelineStageFlagBits2::eBlit: return "Blit";
- case PipelineStageFlagBits2::eClear: return "Clear";
- case PipelineStageFlagBits2::eIndexInput: return "IndexInput";
- case PipelineStageFlagBits2::eVertexAttributeInput: return "VertexAttributeInput";
- case PipelineStageFlagBits2::ePreRasterizationShaders: return "PreRasterizationShaders";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case PipelineStageFlagBits2::eVideoDecodeKHR: return "VideoDecodeKHR";
- case PipelineStageFlagBits2::eVideoEncodeKHR: return "VideoEncodeKHR";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- case PipelineStageFlagBits2::eTransformFeedbackEXT: return "TransformFeedbackEXT";
- case PipelineStageFlagBits2::eConditionalRenderingEXT: return "ConditionalRenderingEXT";
- case PipelineStageFlagBits2::eCommandPreprocessNV: return "CommandPreprocessNV";
- case PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR";
- case PipelineStageFlagBits2::eAccelerationStructureBuildKHR: return "AccelerationStructureBuildKHR";
- case PipelineStageFlagBits2::eRayTracingShaderKHR: return "RayTracingShaderKHR";
- case PipelineStageFlagBits2::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT";
- case PipelineStageFlagBits2::eTaskShaderNV: return "TaskShaderNV";
- case PipelineStageFlagBits2::eMeshShaderNV: return "MeshShaderNV";
- case PipelineStageFlagBits2::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI";
- case PipelineStageFlagBits2::eInvocationMaskHUAWEI: return "InvocationMaskHUAWEI";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class AccessFlagBits2 : VkAccessFlags2
{
eNone = VK_ACCESS_2_NONE,
@@ -5710,118 +2869,55 @@ namespace VULKAN_HPP_NAMESPACE
eFragmentDensityMapReadEXT = VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT,
eColorAttachmentReadNoncoherentEXT = VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT,
eInvocationMaskReadHUAWEI = VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI,
+ eShaderBindingTableReadKHR = VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR,
+ eMicromapReadEXT = VK_ACCESS_2_MICROMAP_READ_BIT_EXT,
+ eMicromapWriteEXT = VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT,
+ eOpticalFlowReadNV = VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV,
+ eOpticalFlowWriteNV = VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV,
eAccelerationStructureReadNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV,
eAccelerationStructureWriteNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV,
eShadingRateImageReadNV = VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV
};
using AccessFlagBits2KHR = AccessFlagBits2;
- VULKAN_HPP_INLINE std::string to_string( AccessFlagBits2 value )
- {
- switch ( value )
- {
- case AccessFlagBits2::eNone: return "None";
- case AccessFlagBits2::eIndirectCommandRead: return "IndirectCommandRead";
- case AccessFlagBits2::eIndexRead: return "IndexRead";
- case AccessFlagBits2::eVertexAttributeRead: return "VertexAttributeRead";
- case AccessFlagBits2::eUniformRead: return "UniformRead";
- case AccessFlagBits2::eInputAttachmentRead: return "InputAttachmentRead";
- case AccessFlagBits2::eShaderRead: return "ShaderRead";
- case AccessFlagBits2::eShaderWrite: return "ShaderWrite";
- case AccessFlagBits2::eColorAttachmentRead: return "ColorAttachmentRead";
- case AccessFlagBits2::eColorAttachmentWrite: return "ColorAttachmentWrite";
- case AccessFlagBits2::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead";
- case AccessFlagBits2::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite";
- case AccessFlagBits2::eTransferRead: return "TransferRead";
- case AccessFlagBits2::eTransferWrite: return "TransferWrite";
- case AccessFlagBits2::eHostRead: return "HostRead";
- case AccessFlagBits2::eHostWrite: return "HostWrite";
- case AccessFlagBits2::eMemoryRead: return "MemoryRead";
- case AccessFlagBits2::eMemoryWrite: return "MemoryWrite";
- case AccessFlagBits2::eShaderSampledRead: return "ShaderSampledRead";
- case AccessFlagBits2::eShaderStorageRead: return "ShaderStorageRead";
- case AccessFlagBits2::eShaderStorageWrite: return "ShaderStorageWrite";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case AccessFlagBits2::eVideoDecodeReadKHR: return "VideoDecodeReadKHR";
- case AccessFlagBits2::eVideoDecodeWriteKHR: return "VideoDecodeWriteKHR";
- case AccessFlagBits2::eVideoEncodeReadKHR: return "VideoEncodeReadKHR";
- case AccessFlagBits2::eVideoEncodeWriteKHR: return "VideoEncodeWriteKHR";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- case AccessFlagBits2::eTransformFeedbackWriteEXT: return "TransformFeedbackWriteEXT";
- case AccessFlagBits2::eTransformFeedbackCounterReadEXT: return "TransformFeedbackCounterReadEXT";
- case AccessFlagBits2::eTransformFeedbackCounterWriteEXT: return "TransformFeedbackCounterWriteEXT";
- case AccessFlagBits2::eConditionalRenderingReadEXT: return "ConditionalRenderingReadEXT";
- case AccessFlagBits2::eCommandPreprocessReadNV: return "CommandPreprocessReadNV";
- case AccessFlagBits2::eCommandPreprocessWriteNV: return "CommandPreprocessWriteNV";
- case AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR: return "FragmentShadingRateAttachmentReadKHR";
- case AccessFlagBits2::eAccelerationStructureReadKHR: return "AccelerationStructureReadKHR";
- case AccessFlagBits2::eAccelerationStructureWriteKHR: return "AccelerationStructureWriteKHR";
- case AccessFlagBits2::eFragmentDensityMapReadEXT: return "FragmentDensityMapReadEXT";
- case AccessFlagBits2::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT";
- case AccessFlagBits2::eInvocationMaskReadHUAWEI: return "InvocationMaskReadHUAWEI";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class SubmitFlagBits : VkSubmitFlags
{
eProtected = VK_SUBMIT_PROTECTED_BIT
};
using SubmitFlagBitsKHR = SubmitFlagBits;
- VULKAN_HPP_INLINE std::string to_string( SubmitFlagBits value )
- {
- switch ( value )
- {
- case SubmitFlagBits::eProtected: return "Protected";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class RenderingFlagBits : VkRenderingFlags
{
eContentsSecondaryCommandBuffers = VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT,
eSuspending = VK_RENDERING_SUSPENDING_BIT,
- eResuming = VK_RENDERING_RESUMING_BIT
+ eResuming = VK_RENDERING_RESUMING_BIT,
+ eEnableLegacyDitheringEXT = VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT
};
using RenderingFlagBitsKHR = RenderingFlagBits;
- VULKAN_HPP_INLINE std::string to_string( RenderingFlagBits value )
- {
- switch ( value )
- {
- case RenderingFlagBits::eContentsSecondaryCommandBuffers: return "ContentsSecondaryCommandBuffers";
- case RenderingFlagBits::eSuspending: return "Suspending";
- case RenderingFlagBits::eResuming: return "Resuming";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class FormatFeatureFlagBits2 : VkFormatFeatureFlags2
{
- eSampledImage = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT,
- eStorageImage = VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT,
- eStorageImageAtomic = VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT,
- eUniformTexelBuffer = VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT,
- eStorageTexelBuffer = VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT,
- eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,
- eVertexBuffer = VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT,
- eColorAttachment = VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT,
- eColorAttachmentBlend = VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT,
- eDepthStencilAttachment = VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT,
- eBlitSrc = VK_FORMAT_FEATURE_2_BLIT_SRC_BIT,
- eBlitDst = VK_FORMAT_FEATURE_2_BLIT_DST_BIT,
- eSampledImageFilterLinear = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
- eSampledImageFilterCubic = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT,
- eTransferSrc = VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT,
- eTransferDst = VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT,
- eSampledImageFilterMinmax = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT,
- eMidpointChromaSamples = VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT,
- eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
- eSampledImageYcbcrConversionSeparateReconstructionFilter =
- VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
- eSampledImageYcbcrConversionChromaReconstructionExplicit =
- VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
+ eSampledImage = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT,
+ eStorageImage = VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT,
+ eStorageImageAtomic = VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT,
+ eUniformTexelBuffer = VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT,
+ eStorageTexelBuffer = VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT,
+ eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,
+ eVertexBuffer = VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT,
+ eColorAttachment = VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT,
+ eColorAttachmentBlend = VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT,
+ eDepthStencilAttachment = VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT,
+ eBlitSrc = VK_FORMAT_FEATURE_2_BLIT_SRC_BIT,
+ eBlitDst = VK_FORMAT_FEATURE_2_BLIT_DST_BIT,
+ eSampledImageFilterLinear = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
+ eSampledImageFilterCubic = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT,
+ eTransferSrc = VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT,
+ eTransferDst = VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT,
+ eSampledImageFilterMinmax = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT,
+ eMidpointChromaSamples = VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT,
+ eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
+ eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
+ eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
eSampledImageYcbcrConversionChromaReconstructionExplicitForceable =
VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
eDisjoint = VK_FORMAT_FEATURE_2_DISJOINT_BIT,
@@ -5841,61 +2937,17 @@ namespace VULKAN_HPP_NAMESPACE
eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
eLinearColorAttachmentNV = VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV,
+ eWeightImageQCOM = VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM,
+ eWeightSampledImageQCOM = VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM,
+ eBlockMatchingQCOM = VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM,
+ eBoxFilterSampledQCOM = VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM,
+ eOpticalFlowImageNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV,
+ eOpticalFlowVectorNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV,
+ eOpticalFlowCostNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV,
eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT
};
using FormatFeatureFlagBits2KHR = FormatFeatureFlagBits2;
- VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits2 value )
- {
- switch ( value )
- {
- case FormatFeatureFlagBits2::eSampledImage: return "SampledImage";
- case FormatFeatureFlagBits2::eStorageImage: return "StorageImage";
- case FormatFeatureFlagBits2::eStorageImageAtomic: return "StorageImageAtomic";
- case FormatFeatureFlagBits2::eUniformTexelBuffer: return "UniformTexelBuffer";
- case FormatFeatureFlagBits2::eStorageTexelBuffer: return "StorageTexelBuffer";
- case FormatFeatureFlagBits2::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic";
- case FormatFeatureFlagBits2::eVertexBuffer: return "VertexBuffer";
- case FormatFeatureFlagBits2::eColorAttachment: return "ColorAttachment";
- case FormatFeatureFlagBits2::eColorAttachmentBlend: return "ColorAttachmentBlend";
- case FormatFeatureFlagBits2::eDepthStencilAttachment: return "DepthStencilAttachment";
- case FormatFeatureFlagBits2::eBlitSrc: return "BlitSrc";
- case FormatFeatureFlagBits2::eBlitDst: return "BlitDst";
- case FormatFeatureFlagBits2::eSampledImageFilterLinear: return "SampledImageFilterLinear";
- case FormatFeatureFlagBits2::eSampledImageFilterCubic: return "SampledImageFilterCubic";
- case FormatFeatureFlagBits2::eTransferSrc: return "TransferSrc";
- case FormatFeatureFlagBits2::eTransferDst: return "TransferDst";
- case FormatFeatureFlagBits2::eSampledImageFilterMinmax: return "SampledImageFilterMinmax";
- case FormatFeatureFlagBits2::eMidpointChromaSamples: return "MidpointChromaSamples";
- case FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter:
- return "SampledImageYcbcrConversionLinearFilter";
- case FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter:
- return "SampledImageYcbcrConversionSeparateReconstructionFilter";
- case FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit:
- return "SampledImageYcbcrConversionChromaReconstructionExplicit";
- case FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable:
- return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable";
- case FormatFeatureFlagBits2::eDisjoint: return "Disjoint";
- case FormatFeatureFlagBits2::eCositedChromaSamples: return "CositedChromaSamples";
- case FormatFeatureFlagBits2::eStorageReadWithoutFormat: return "StorageReadWithoutFormat";
- case FormatFeatureFlagBits2::eStorageWriteWithoutFormat: return "StorageWriteWithoutFormat";
- case FormatFeatureFlagBits2::eSampledImageDepthComparison: return "SampledImageDepthComparison";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case FormatFeatureFlagBits2::eVideoDecodeOutputKHR: return "VideoDecodeOutputKHR";
- case FormatFeatureFlagBits2::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- case FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR: return "AccelerationStructureVertexBufferKHR";
- case FormatFeatureFlagBits2::eFragmentDensityMapEXT: return "FragmentDensityMapEXT";
- case FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- case FormatFeatureFlagBits2::eVideoEncodeInputKHR: return "VideoEncodeInputKHR";
- case FormatFeatureFlagBits2::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- case FormatFeatureFlagBits2::eLinearColorAttachmentNV: return "LinearColorAttachmentNV";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_KHR_surface ===
enum class SurfaceTransformFlagBitsKHR : VkSurfaceTransformFlagsKHR
@@ -5911,23 +2963,6 @@ namespace VULKAN_HPP_NAMESPACE
eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagBitsKHR value )
- {
- switch ( value )
- {
- case SurfaceTransformFlagBitsKHR::eIdentity: return "Identity";
- case SurfaceTransformFlagBitsKHR::eRotate90: return "Rotate90";
- case SurfaceTransformFlagBitsKHR::eRotate180: return "Rotate180";
- case SurfaceTransformFlagBitsKHR::eRotate270: return "Rotate270";
- case SurfaceTransformFlagBitsKHR::eHorizontalMirror: return "HorizontalMirror";
- case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90: return "HorizontalMirrorRotate90";
- case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180: return "HorizontalMirrorRotate180";
- case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270: return "HorizontalMirrorRotate270";
- case SurfaceTransformFlagBitsKHR::eInherit: return "Inherit";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class PresentModeKHR
{
eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR,
@@ -5938,20 +2973,6 @@ namespace VULKAN_HPP_NAMESPACE
eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR
};
- VULKAN_HPP_INLINE std::string to_string( PresentModeKHR value )
- {
- switch ( value )
- {
- case PresentModeKHR::eImmediate: return "Immediate";
- case PresentModeKHR::eMailbox: return "Mailbox";
- case PresentModeKHR::eFifo: return "Fifo";
- case PresentModeKHR::eFifoRelaxed: return "FifoRelaxed";
- case PresentModeKHR::eSharedDemandRefresh: return "SharedDemandRefresh";
- case PresentModeKHR::eSharedContinuousRefresh: return "SharedContinuousRefresh";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ColorSpaceKHR
{
eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
@@ -5974,30 +2995,6 @@ namespace VULKAN_HPP_NAMESPACE
eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT
};
- VULKAN_HPP_INLINE std::string to_string( ColorSpaceKHR value )
- {
- switch ( value )
- {
- case ColorSpaceKHR::eSrgbNonlinear: return "SrgbNonlinear";
- case ColorSpaceKHR::eDisplayP3NonlinearEXT: return "DisplayP3NonlinearEXT";
- case ColorSpaceKHR::eExtendedSrgbLinearEXT: return "ExtendedSrgbLinearEXT";
- case ColorSpaceKHR::eDisplayP3LinearEXT: return "DisplayP3LinearEXT";
- case ColorSpaceKHR::eDciP3NonlinearEXT: return "DciP3NonlinearEXT";
- case ColorSpaceKHR::eBt709LinearEXT: return "Bt709LinearEXT";
- case ColorSpaceKHR::eBt709NonlinearEXT: return "Bt709NonlinearEXT";
- case ColorSpaceKHR::eBt2020LinearEXT: return "Bt2020LinearEXT";
- case ColorSpaceKHR::eHdr10St2084EXT: return "Hdr10St2084EXT";
- case ColorSpaceKHR::eDolbyvisionEXT: return "DolbyvisionEXT";
- case ColorSpaceKHR::eHdr10HlgEXT: return "Hdr10HlgEXT";
- case ColorSpaceKHR::eAdobergbLinearEXT: return "AdobergbLinearEXT";
- case ColorSpaceKHR::eAdobergbNonlinearEXT: return "AdobergbNonlinearEXT";
- case ColorSpaceKHR::ePassThroughEXT: return "PassThroughEXT";
- case ColorSpaceKHR::eExtendedSrgbNonlinearEXT: return "ExtendedSrgbNonlinearEXT";
- case ColorSpaceKHR::eDisplayNativeAMD: return "DisplayNativeAMD";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class CompositeAlphaFlagBitsKHR : VkCompositeAlphaFlagsKHR
{
eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
@@ -6006,18 +3003,6 @@ namespace VULKAN_HPP_NAMESPACE
eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagBitsKHR value )
- {
- switch ( value )
- {
- case CompositeAlphaFlagBitsKHR::eOpaque: return "Opaque";
- case CompositeAlphaFlagBitsKHR::ePreMultiplied: return "PreMultiplied";
- case CompositeAlphaFlagBitsKHR::ePostMultiplied: return "PostMultiplied";
- case CompositeAlphaFlagBitsKHR::eInherit: return "Inherit";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_KHR_swapchain ===
enum class SwapchainCreateFlagBitsKHR : VkSwapchainCreateFlagsKHR
@@ -6027,17 +3012,6 @@ namespace VULKAN_HPP_NAMESPACE
eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagBitsKHR value )
- {
- switch ( value )
- {
- case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions: return "SplitInstanceBindRegions";
- case SwapchainCreateFlagBitsKHR::eProtected: return "Protected";
- case SwapchainCreateFlagBitsKHR::eMutableFormat: return "MutableFormat";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class DeviceGroupPresentModeFlagBitsKHR : VkDeviceGroupPresentModeFlagsKHR
{
eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR,
@@ -6046,18 +3020,6 @@ namespace VULKAN_HPP_NAMESPACE
eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagBitsKHR value )
- {
- switch ( value )
- {
- case DeviceGroupPresentModeFlagBitsKHR::eLocal: return "Local";
- case DeviceGroupPresentModeFlagBitsKHR::eRemote: return "Remote";
- case DeviceGroupPresentModeFlagBitsKHR::eSum: return "Sum";
- case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice: return "LocalMultiDevice";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_KHR_display ===
enum class DisplayPlaneAlphaFlagBitsKHR : VkDisplayPlaneAlphaFlagsKHR
@@ -6068,47 +3030,20 @@ namespace VULKAN_HPP_NAMESPACE
ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagBitsKHR value )
- {
- switch ( value )
- {
- case DisplayPlaneAlphaFlagBitsKHR::eOpaque: return "Opaque";
- case DisplayPlaneAlphaFlagBitsKHR::eGlobal: return "Global";
- case DisplayPlaneAlphaFlagBitsKHR::ePerPixel: return "PerPixel";
- case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied: return "PerPixelPremultiplied";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class DisplayModeCreateFlagBitsKHR : VkDisplayModeCreateFlagsKHR
{
};
- VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagBitsKHR )
- {
- return "(void)";
- }
-
enum class DisplaySurfaceCreateFlagBitsKHR : VkDisplaySurfaceCreateFlagsKHR
{
};
- VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagBitsKHR )
- {
- return "(void)";
- }
-
#if defined( VK_USE_PLATFORM_XLIB_KHR )
//=== VK_KHR_xlib_surface ===
enum class XlibSurfaceCreateFlagBitsKHR : VkXlibSurfaceCreateFlagsKHR
{
};
-
- VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagBitsKHR )
- {
- return "(void)";
- }
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
#if defined( VK_USE_PLATFORM_XCB_KHR )
@@ -6117,11 +3052,6 @@ namespace VULKAN_HPP_NAMESPACE
enum class XcbSurfaceCreateFlagBitsKHR : VkXcbSurfaceCreateFlagsKHR
{
};
-
- VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagBitsKHR )
- {
- return "(void)";
- }
#endif /*VK_USE_PLATFORM_XCB_KHR*/
#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
@@ -6130,11 +3060,6 @@ namespace VULKAN_HPP_NAMESPACE
enum class WaylandSurfaceCreateFlagBitsKHR : VkWaylandSurfaceCreateFlagsKHR
{
};
-
- VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagBitsKHR )
- {
- return "(void)";
- }
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
@@ -6143,11 +3068,6 @@ namespace VULKAN_HPP_NAMESPACE
enum class AndroidSurfaceCreateFlagBitsKHR : VkAndroidSurfaceCreateFlagsKHR
{
};
-
- VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagBitsKHR )
- {
- return "(void)";
- }
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
@@ -6156,11 +3076,6 @@ namespace VULKAN_HPP_NAMESPACE
enum class Win32SurfaceCreateFlagBitsKHR : VkWin32SurfaceCreateFlagsKHR
{
};
-
- VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagBitsKHR )
- {
- return "(void)";
- }
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_EXT_debug_report ===
@@ -6174,19 +3089,6 @@ namespace VULKAN_HPP_NAMESPACE
eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( DebugReportFlagBitsEXT value )
- {
- switch ( value )
- {
- case DebugReportFlagBitsEXT::eInformation: return "Information";
- case DebugReportFlagBitsEXT::eWarning: return "Warning";
- case DebugReportFlagBitsEXT::ePerformanceWarning: return "PerformanceWarning";
- case DebugReportFlagBitsEXT::eError: return "Error";
- case DebugReportFlagBitsEXT::eDebug: return "Debug";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class DebugReportObjectTypeEXT
{
eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
@@ -6236,55 +3138,6 @@ namespace VULKAN_HPP_NAMESPACE
eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT
};
- VULKAN_HPP_INLINE std::string to_string( DebugReportObjectTypeEXT value )
- {
- switch ( value )
- {
- case DebugReportObjectTypeEXT::eUnknown: return "Unknown";
- case DebugReportObjectTypeEXT::eInstance: return "Instance";
- case DebugReportObjectTypeEXT::ePhysicalDevice: return "PhysicalDevice";
- case DebugReportObjectTypeEXT::eDevice: return "Device";
- case DebugReportObjectTypeEXT::eQueue: return "Queue";
- case DebugReportObjectTypeEXT::eSemaphore: return "Semaphore";
- case DebugReportObjectTypeEXT::eCommandBuffer: return "CommandBuffer";
- case DebugReportObjectTypeEXT::eFence: return "Fence";
- case DebugReportObjectTypeEXT::eDeviceMemory: return "DeviceMemory";
- case DebugReportObjectTypeEXT::eBuffer: return "Buffer";
- case DebugReportObjectTypeEXT::eImage: return "Image";
- case DebugReportObjectTypeEXT::eEvent: return "Event";
- case DebugReportObjectTypeEXT::eQueryPool: return "QueryPool";
- case DebugReportObjectTypeEXT::eBufferView: return "BufferView";
- case DebugReportObjectTypeEXT::eImageView: return "ImageView";
- case DebugReportObjectTypeEXT::eShaderModule: return "ShaderModule";
- case DebugReportObjectTypeEXT::ePipelineCache: return "PipelineCache";
- case DebugReportObjectTypeEXT::ePipelineLayout: return "PipelineLayout";
- case DebugReportObjectTypeEXT::eRenderPass: return "RenderPass";
- case DebugReportObjectTypeEXT::ePipeline: return "Pipeline";
- case DebugReportObjectTypeEXT::eDescriptorSetLayout: return "DescriptorSetLayout";
- case DebugReportObjectTypeEXT::eSampler: return "Sampler";
- case DebugReportObjectTypeEXT::eDescriptorPool: return "DescriptorPool";
- case DebugReportObjectTypeEXT::eDescriptorSet: return "DescriptorSet";
- case DebugReportObjectTypeEXT::eFramebuffer: return "Framebuffer";
- case DebugReportObjectTypeEXT::eCommandPool: return "CommandPool";
- case DebugReportObjectTypeEXT::eSurfaceKHR: return "SurfaceKHR";
- case DebugReportObjectTypeEXT::eSwapchainKHR: return "SwapchainKHR";
- case DebugReportObjectTypeEXT::eDebugReportCallbackEXT: return "DebugReportCallbackEXT";
- case DebugReportObjectTypeEXT::eDisplayKHR: return "DisplayKHR";
- case DebugReportObjectTypeEXT::eDisplayModeKHR: return "DisplayModeKHR";
- case DebugReportObjectTypeEXT::eValidationCacheEXT: return "ValidationCacheEXT";
- case DebugReportObjectTypeEXT::eSamplerYcbcrConversion: return "SamplerYcbcrConversion";
- case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate";
- case DebugReportObjectTypeEXT::eCuModuleNVX: return "CuModuleNVX";
- case DebugReportObjectTypeEXT::eCuFunctionNVX: return "CuFunctionNVX";
- case DebugReportObjectTypeEXT::eAccelerationStructureKHR: return "AccelerationStructureKHR";
- case DebugReportObjectTypeEXT::eAccelerationStructureNV: return "AccelerationStructureNV";
-#if defined( VK_USE_PLATFORM_FUCHSIA )
- case DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA: return "BufferCollectionFUCHSIA";
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_AMD_rasterization_order ===
enum class RasterizationOrderAMD
@@ -6293,67 +3146,27 @@ namespace VULKAN_HPP_NAMESPACE
eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD
};
- VULKAN_HPP_INLINE std::string to_string( RasterizationOrderAMD value )
- {
- switch ( value )
- {
- case RasterizationOrderAMD::eStrict: return "Strict";
- case RasterizationOrderAMD::eRelaxed: return "Relaxed";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_queue ===
enum class VideoCodecOperationFlagBitsKHR : VkVideoCodecOperationFlagsKHR
{
- eInvalid = VK_VIDEO_CODEC_OPERATION_INVALID_BIT_KHR,
-# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ eNone = VK_VIDEO_CODEC_OPERATION_NONE_KHR,
eEncodeH264EXT = VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_EXT,
eEncodeH265EXT = VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_EXT,
eDecodeH264EXT = VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_EXT,
eDecodeH265EXT = VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_EXT
-# endif /*VK_ENABLE_BETA_EXTENSIONS*/
};
- VULKAN_HPP_INLINE std::string to_string( VideoCodecOperationFlagBitsKHR value )
- {
- switch ( value )
- {
- case VideoCodecOperationFlagBitsKHR::eInvalid: return "Invalid";
-# if defined( VK_ENABLE_BETA_EXTENSIONS )
- case VideoCodecOperationFlagBitsKHR::eEncodeH264EXT: return "EncodeH264EXT";
- case VideoCodecOperationFlagBitsKHR::eEncodeH265EXT: return "EncodeH265EXT";
- case VideoCodecOperationFlagBitsKHR::eDecodeH264EXT: return "DecodeH264EXT";
- case VideoCodecOperationFlagBitsKHR::eDecodeH265EXT: return "DecodeH265EXT";
-# endif /*VK_ENABLE_BETA_EXTENSIONS*/
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class VideoChromaSubsamplingFlagBitsKHR : VkVideoChromaSubsamplingFlagsKHR
{
- eInvalid = VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_BIT_KHR,
+ eInvalid = VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_KHR,
eMonochrome = VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR,
e420 = VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR,
e422 = VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR,
e444 = VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( VideoChromaSubsamplingFlagBitsKHR value )
- {
- switch ( value )
- {
- case VideoChromaSubsamplingFlagBitsKHR::eInvalid: return "Invalid";
- case VideoChromaSubsamplingFlagBitsKHR::eMonochrome: return "Monochrome";
- case VideoChromaSubsamplingFlagBitsKHR::e420: return "420";
- case VideoChromaSubsamplingFlagBitsKHR::e422: return "422";
- case VideoChromaSubsamplingFlagBitsKHR::e444: return "444";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class VideoComponentBitDepthFlagBitsKHR : VkVideoComponentBitDepthFlagsKHR
{
eInvalid = VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR,
@@ -6362,84 +3175,24 @@ namespace VULKAN_HPP_NAMESPACE
e12 = VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( VideoComponentBitDepthFlagBitsKHR value )
- {
- switch ( value )
- {
- case VideoComponentBitDepthFlagBitsKHR::eInvalid: return "Invalid";
- case VideoComponentBitDepthFlagBitsKHR::e8: return "8";
- case VideoComponentBitDepthFlagBitsKHR::e10: return "10";
- case VideoComponentBitDepthFlagBitsKHR::e12: return "12";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class VideoCapabilityFlagBitsKHR : VkVideoCapabilityFlagsKHR
{
eProtectedContent = VK_VIDEO_CAPABILITY_PROTECTED_CONTENT_BIT_KHR,
eSeparateReferenceImages = VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( VideoCapabilityFlagBitsKHR value )
- {
- switch ( value )
- {
- case VideoCapabilityFlagBitsKHR::eProtectedContent: return "ProtectedContent";
- case VideoCapabilityFlagBitsKHR::eSeparateReferenceImages: return "SeparateReferenceImages";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class VideoSessionCreateFlagBitsKHR : VkVideoSessionCreateFlagsKHR
{
- eDefault = VK_VIDEO_SESSION_CREATE_DEFAULT_KHR,
eProtectedContent = VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( VideoSessionCreateFlagBitsKHR value )
- {
- switch ( value )
- {
- case VideoSessionCreateFlagBitsKHR::eDefault: return "Default";
- case VideoSessionCreateFlagBitsKHR::eProtectedContent: return "ProtectedContent";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class VideoCodingControlFlagBitsKHR : VkVideoCodingControlFlagsKHR
{
- eDefault = VK_VIDEO_CODING_CONTROL_DEFAULT_KHR,
- eReset = VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( VideoCodingControlFlagBitsKHR value )
- {
- switch ( value )
- {
- case VideoCodingControlFlagBitsKHR::eDefault: return "Default";
- case VideoCodingControlFlagBitsKHR::eReset: return "Reset";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
- enum class VideoCodingQualityPresetFlagBitsKHR : VkVideoCodingQualityPresetFlagsKHR
- {
- eNormal = VK_VIDEO_CODING_QUALITY_PRESET_NORMAL_BIT_KHR,
- ePower = VK_VIDEO_CODING_QUALITY_PRESET_POWER_BIT_KHR,
- eQuality = VK_VIDEO_CODING_QUALITY_PRESET_QUALITY_BIT_KHR
+ eReset = VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR,
+ eEncodeRateControl = VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR,
+ eEncodeRateControlLayer = VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_LAYER_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( VideoCodingQualityPresetFlagBitsKHR value )
- {
- switch ( value )
- {
- case VideoCodingQualityPresetFlagBitsKHR::eNormal: return "Normal";
- case VideoCodingQualityPresetFlagBitsKHR::ePower: return "Power";
- case VideoCodingQualityPresetFlagBitsKHR::eQuality: return "Quality";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class QueryResultStatusKHR
{
eError = VK_QUERY_RESULT_STATUS_ERROR_KHR,
@@ -6447,54 +3200,39 @@ namespace VULKAN_HPP_NAMESPACE
eComplete = VK_QUERY_RESULT_STATUS_COMPLETE_KHR
};
- VULKAN_HPP_INLINE std::string to_string( QueryResultStatusKHR value )
+ enum class VideoSessionParametersCreateFlagBitsKHR : VkVideoSessionParametersCreateFlagsKHR
{
- switch ( value )
- {
- case QueryResultStatusKHR::eError: return "Error";
- case QueryResultStatusKHR::eNotReady: return "NotReady";
- case QueryResultStatusKHR::eComplete: return "Complete";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
+ };
enum class VideoBeginCodingFlagBitsKHR : VkVideoBeginCodingFlagsKHR
{
};
- VULKAN_HPP_INLINE std::string to_string( VideoBeginCodingFlagBitsKHR )
- {
- return "(void)";
- }
-
enum class VideoEndCodingFlagBitsKHR : VkVideoEndCodingFlagsKHR
{
};
-
- VULKAN_HPP_INLINE std::string to_string( VideoEndCodingFlagBitsKHR )
- {
- return "(void)";
- }
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_decode_queue ===
- enum class VideoDecodeFlagBitsKHR : VkVideoDecodeFlagsKHR
+ enum class VideoDecodeCapabilityFlagBitsKHR : VkVideoDecodeCapabilityFlagsKHR
{
- eDefault = VK_VIDEO_DECODE_DEFAULT_KHR,
- eReserved0 = VK_VIDEO_DECODE_RESERVED_0_BIT_KHR
+ eDpbAndOutputCoincide = VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_COINCIDE_BIT_KHR,
+ eDpbAndOutputDistinct = VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_DISTINCT_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( VideoDecodeFlagBitsKHR value )
+ enum class VideoDecodeUsageFlagBitsKHR : VkVideoDecodeUsageFlagsKHR
{
- switch ( value )
- {
- case VideoDecodeFlagBitsKHR::eDefault: return "Default";
- case VideoDecodeFlagBitsKHR::eReserved0: return "Reserved0";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
+ eDefault = VK_VIDEO_DECODE_USAGE_DEFAULT_KHR,
+ eTranscoding = VK_VIDEO_DECODE_USAGE_TRANSCODING_BIT_KHR,
+ eOffline = VK_VIDEO_DECODE_USAGE_OFFLINE_BIT_KHR,
+ eStreaming = VK_VIDEO_DECODE_USAGE_STREAMING_BIT_KHR
+ };
+
+ enum class VideoDecodeFlagBitsKHR : VkVideoDecodeFlagsKHR
+ {
+ };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_EXT_transform_feedback ===
@@ -6503,48 +3241,38 @@ namespace VULKAN_HPP_NAMESPACE
{
};
- VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagBitsEXT )
- {
- return "(void)";
- }
-
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_EXT_video_encode_h264 ===
enum class VideoEncodeH264CapabilityFlagBitsEXT : VkVideoEncodeH264CapabilityFlagsEXT
{
- eCabac = VK_VIDEO_ENCODE_H264_CAPABILITY_CABAC_BIT_EXT,
- eCavlc = VK_VIDEO_ENCODE_H264_CAPABILITY_CAVLC_BIT_EXT,
- eWeightedBiPredImplicit = VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BI_PRED_IMPLICIT_BIT_EXT,
- eTransform8X8 = VK_VIDEO_ENCODE_H264_CAPABILITY_TRANSFORM_8X8_BIT_EXT,
- eChromaQpOffset = VK_VIDEO_ENCODE_H264_CAPABILITY_CHROMA_QP_OFFSET_BIT_EXT,
- eSecondChromaQpOffset = VK_VIDEO_ENCODE_H264_CAPABILITY_SECOND_CHROMA_QP_OFFSET_BIT_EXT,
- eDeblockingFilterDisabled = VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_DISABLED_BIT_EXT,
- eDeblockingFilterEnabled = VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_ENABLED_BIT_EXT,
- eDeblockingFilterPartial = VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_PARTIAL_BIT_EXT,
- eMultipleSlicePerFrame = VK_VIDEO_ENCODE_H264_CAPABILITY_MULTIPLE_SLICE_PER_FRAME_BIT_EXT,
- eEvenlyDistributedSliceSize = VK_VIDEO_ENCODE_H264_CAPABILITY_EVENLY_DISTRIBUTED_SLICE_SIZE_BIT_EXT
+ eDirect8X8InferenceEnabled = VK_VIDEO_ENCODE_H264_CAPABILITY_DIRECT_8X8_INFERENCE_ENABLED_BIT_EXT,
+ eDirect8X8InferenceDisabled = VK_VIDEO_ENCODE_H264_CAPABILITY_DIRECT_8X8_INFERENCE_DISABLED_BIT_EXT,
+ eSeparateColourPlane = VK_VIDEO_ENCODE_H264_CAPABILITY_SEPARATE_COLOUR_PLANE_BIT_EXT,
+ eQpprimeYZeroTransformBypass = VK_VIDEO_ENCODE_H264_CAPABILITY_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_BIT_EXT,
+ eScalingLists = VK_VIDEO_ENCODE_H264_CAPABILITY_SCALING_LISTS_BIT_EXT,
+ eHrdCompliance = VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_EXT,
+ eChromaQpOffset = VK_VIDEO_ENCODE_H264_CAPABILITY_CHROMA_QP_OFFSET_BIT_EXT,
+ eSecondChromaQpOffset = VK_VIDEO_ENCODE_H264_CAPABILITY_SECOND_CHROMA_QP_OFFSET_BIT_EXT,
+ ePicInitQpMinus26 = VK_VIDEO_ENCODE_H264_CAPABILITY_PIC_INIT_QP_MINUS26_BIT_EXT,
+ eWeightedPred = VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_PRED_BIT_EXT,
+ eWeightedBipredExplicit = VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BIPRED_EXPLICIT_BIT_EXT,
+ eWeightedBipredImplicit = VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BIPRED_IMPLICIT_BIT_EXT,
+ eWeightedPredNoTable = VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_PRED_NO_TABLE_BIT_EXT,
+ eTransform8X8 = VK_VIDEO_ENCODE_H264_CAPABILITY_TRANSFORM_8X8_BIT_EXT,
+ eCabac = VK_VIDEO_ENCODE_H264_CAPABILITY_CABAC_BIT_EXT,
+ eCavlc = VK_VIDEO_ENCODE_H264_CAPABILITY_CAVLC_BIT_EXT,
+ eDeblockingFilterDisabled = VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_DISABLED_BIT_EXT,
+ eDeblockingFilterEnabled = VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_ENABLED_BIT_EXT,
+ eDeblockingFilterPartial = VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_PARTIAL_BIT_EXT,
+ eDisableDirectSpatialMvPred = VK_VIDEO_ENCODE_H264_CAPABILITY_DISABLE_DIRECT_SPATIAL_MV_PRED_BIT_EXT,
+ eMultipleSlicePerFrame = VK_VIDEO_ENCODE_H264_CAPABILITY_MULTIPLE_SLICE_PER_FRAME_BIT_EXT,
+ eSliceMbCount = VK_VIDEO_ENCODE_H264_CAPABILITY_SLICE_MB_COUNT_BIT_EXT,
+ eRowUnalignedSlice = VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_EXT,
+ eDifferentSliceType = VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_EXT,
+ eBFrameInL1List = VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilityFlagBitsEXT value )
- {
- switch ( value )
- {
- case VideoEncodeH264CapabilityFlagBitsEXT::eCabac: return "Cabac";
- case VideoEncodeH264CapabilityFlagBitsEXT::eCavlc: return "Cavlc";
- case VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBiPredImplicit: return "WeightedBiPredImplicit";
- case VideoEncodeH264CapabilityFlagBitsEXT::eTransform8X8: return "Transform8X8";
- case VideoEncodeH264CapabilityFlagBitsEXT::eChromaQpOffset: return "ChromaQpOffset";
- case VideoEncodeH264CapabilityFlagBitsEXT::eSecondChromaQpOffset: return "SecondChromaQpOffset";
- case VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterDisabled: return "DeblockingFilterDisabled";
- case VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterEnabled: return "DeblockingFilterEnabled";
- case VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterPartial: return "DeblockingFilterPartial";
- case VideoEncodeH264CapabilityFlagBitsEXT::eMultipleSlicePerFrame: return "MultipleSlicePerFrame";
- case VideoEncodeH264CapabilityFlagBitsEXT::eEvenlyDistributedSliceSize: return "EvenlyDistributedSliceSize";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class VideoEncodeH264InputModeFlagBitsEXT : VkVideoEncodeH264InputModeFlagsEXT
{
eFrame = VK_VIDEO_ENCODE_H264_INPUT_MODE_FRAME_BIT_EXT,
@@ -6552,17 +3280,6 @@ namespace VULKAN_HPP_NAMESPACE
eNonVcl = VK_VIDEO_ENCODE_H264_INPUT_MODE_NON_VCL_BIT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264InputModeFlagBitsEXT value )
- {
- switch ( value )
- {
- case VideoEncodeH264InputModeFlagBitsEXT::eFrame: return "Frame";
- case VideoEncodeH264InputModeFlagBitsEXT::eSlice: return "Slice";
- case VideoEncodeH264InputModeFlagBitsEXT::eNonVcl: return "NonVcl";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class VideoEncodeH264OutputModeFlagBitsEXT : VkVideoEncodeH264OutputModeFlagsEXT
{
eFrame = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FRAME_BIT_EXT,
@@ -6570,55 +3287,47 @@ namespace VULKAN_HPP_NAMESPACE
eNonVcl = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_NON_VCL_BIT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264OutputModeFlagBitsEXT value )
- {
- switch ( value )
- {
- case VideoEncodeH264OutputModeFlagBitsEXT::eFrame: return "Frame";
- case VideoEncodeH264OutputModeFlagBitsEXT::eSlice: return "Slice";
- case VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl: return "NonVcl";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
- enum class VideoEncodeH264CreateFlagBitsEXT : VkVideoEncodeH264CreateFlagsEXT
- {
- eDefault = VK_VIDEO_ENCODE_H264_CREATE_DEFAULT_EXT,
- eReserved0 = VK_VIDEO_ENCODE_H264_CREATE_RESERVED_0_BIT_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CreateFlagBitsEXT value )
- {
- switch ( value )
- {
- case VideoEncodeH264CreateFlagBitsEXT::eDefault: return "Default";
- case VideoEncodeH264CreateFlagBitsEXT::eReserved0: return "Reserved0";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
- enum class VideoEncodeH264RateControlStructureFlagBitsEXT : VkVideoEncodeH264RateControlStructureFlagsEXT
+ enum class VideoEncodeH264RateControlStructureEXT
{
eUnknown = VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT,
- eFlat = VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_FLAT_BIT_EXT,
- eDyadic = VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_DYADIC_BIT_EXT
+ eFlat = VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_FLAT_EXT,
+ eDyadic = VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_DYADIC_EXT
};
-
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264RateControlStructureFlagBitsEXT value )
- {
- switch ( value )
- {
- case VideoEncodeH264RateControlStructureFlagBitsEXT::eUnknown: return "Unknown";
- case VideoEncodeH264RateControlStructureFlagBitsEXT::eFlat: return "Flat";
- case VideoEncodeH264RateControlStructureFlagBitsEXT::eDyadic: return "Dyadic";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_EXT_video_encode_h265 ===
+ enum class VideoEncodeH265CapabilityFlagBitsEXT : VkVideoEncodeH265CapabilityFlagsEXT
+ {
+ eSeparateColourPlane = VK_VIDEO_ENCODE_H265_CAPABILITY_SEPARATE_COLOUR_PLANE_BIT_EXT,
+ eScalingLists = VK_VIDEO_ENCODE_H265_CAPABILITY_SCALING_LISTS_BIT_EXT,
+ eSampleAdaptiveOffsetEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_SAMPLE_ADAPTIVE_OFFSET_ENABLED_BIT_EXT,
+ ePcmEnable = VK_VIDEO_ENCODE_H265_CAPABILITY_PCM_ENABLE_BIT_EXT,
+ eSpsTemporalMvpEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_SPS_TEMPORAL_MVP_ENABLED_BIT_EXT,
+ eHrdCompliance = VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_EXT,
+ eInitQpMinus26 = VK_VIDEO_ENCODE_H265_CAPABILITY_INIT_QP_MINUS26_BIT_EXT,
+ eLog2ParallelMergeLevelMinus2 = VK_VIDEO_ENCODE_H265_CAPABILITY_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_EXT,
+ eSignDataHidingEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_SIGN_DATA_HIDING_ENABLED_BIT_EXT,
+ eTransformSkipEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSFORM_SKIP_ENABLED_BIT_EXT,
+ eTransformSkipDisabled = VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSFORM_SKIP_DISABLED_BIT_EXT,
+ ePpsSliceChromaQpOffsetsPresent = VK_VIDEO_ENCODE_H265_CAPABILITY_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_BIT_EXT,
+ eWeightedPred = VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_PRED_BIT_EXT,
+ eWeightedBipred = VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_BIPRED_BIT_EXT,
+ eWeightedPredNoTable = VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_PRED_NO_TABLE_BIT_EXT,
+ eTransquantBypassEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSQUANT_BYPASS_ENABLED_BIT_EXT,
+ eEntropyCodingSyncEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_ENTROPY_CODING_SYNC_ENABLED_BIT_EXT,
+ eDeblockingFilterOverrideEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_DEBLOCKING_FILTER_OVERRIDE_ENABLED_BIT_EXT,
+ eMultipleTilePerFrame = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILE_PER_FRAME_BIT_EXT,
+ eMultipleSlicePerTile = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_PER_TILE_BIT_EXT,
+ eMultipleTilePerSlice = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILE_PER_SLICE_BIT_EXT,
+ eSliceSegmentCtbCount = VK_VIDEO_ENCODE_H265_CAPABILITY_SLICE_SEGMENT_CTB_COUNT_BIT_EXT,
+ eRowUnalignedSliceSegment = VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_EXT,
+ eDependentSliceSegment = VK_VIDEO_ENCODE_H265_CAPABILITY_DEPENDENT_SLICE_SEGMENT_BIT_EXT,
+ eDifferentSliceType = VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_EXT,
+ eBFrameInL1List = VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_EXT
+ };
+
enum class VideoEncodeH265InputModeFlagBitsEXT : VkVideoEncodeH265InputModeFlagsEXT
{
eFrame = VK_VIDEO_ENCODE_H265_INPUT_MODE_FRAME_BIT_EXT,
@@ -6626,17 +3335,6 @@ namespace VULKAN_HPP_NAMESPACE
eNonVcl = VK_VIDEO_ENCODE_H265_INPUT_MODE_NON_VCL_BIT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265InputModeFlagBitsEXT value )
- {
- switch ( value )
- {
- case VideoEncodeH265InputModeFlagBitsEXT::eFrame: return "Frame";
- case VideoEncodeH265InputModeFlagBitsEXT::eSliceSegment: return "SliceSegment";
- case VideoEncodeH265InputModeFlagBitsEXT::eNonVcl: return "NonVcl";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class VideoEncodeH265OutputModeFlagBitsEXT : VkVideoEncodeH265OutputModeFlagsEXT
{
eFrame = VK_VIDEO_ENCODE_H265_OUTPUT_MODE_FRAME_BIT_EXT,
@@ -6644,72 +3342,27 @@ namespace VULKAN_HPP_NAMESPACE
eNonVcl = VK_VIDEO_ENCODE_H265_OUTPUT_MODE_NON_VCL_BIT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265OutputModeFlagBitsEXT value )
- {
- switch ( value )
- {
- case VideoEncodeH265OutputModeFlagBitsEXT::eFrame: return "Frame";
- case VideoEncodeH265OutputModeFlagBitsEXT::eSliceSegment: return "SliceSegment";
- case VideoEncodeH265OutputModeFlagBitsEXT::eNonVcl: return "NonVcl";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class VideoEncodeH265CtbSizeFlagBitsEXT : VkVideoEncodeH265CtbSizeFlagsEXT
{
- e8 = VK_VIDEO_ENCODE_H265_CTB_SIZE_8_BIT_EXT,
e16 = VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_EXT,
e32 = VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_EXT,
e64 = VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CtbSizeFlagBitsEXT value )
- {
- switch ( value )
- {
- case VideoEncodeH265CtbSizeFlagBitsEXT::e8: return "8";
- case VideoEncodeH265CtbSizeFlagBitsEXT::e16: return "16";
- case VideoEncodeH265CtbSizeFlagBitsEXT::e32: return "32";
- case VideoEncodeH265CtbSizeFlagBitsEXT::e64: return "64";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
- enum class VideoEncodeH265RateControlStructureFlagBitsEXT : VkVideoEncodeH265RateControlStructureFlagsEXT
- {
- eUnknown = VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT,
- eFlat = VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_FLAT_BIT_EXT,
- eDyadic = VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_DYADIC_BIT_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265RateControlStructureFlagBitsEXT value )
- {
- switch ( value )
- {
- case VideoEncodeH265RateControlStructureFlagBitsEXT::eUnknown: return "Unknown";
- case VideoEncodeH265RateControlStructureFlagBitsEXT::eFlat: return "Flat";
- case VideoEncodeH265RateControlStructureFlagBitsEXT::eDyadic: return "Dyadic";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
- enum class VideoEncodeH265CapabilityFlagBitsEXT : VkVideoEncodeH265CapabilityFlagsEXT
+ enum class VideoEncodeH265TransformBlockSizeFlagBitsEXT : VkVideoEncodeH265TransformBlockSizeFlagsEXT
{
+ e4 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_EXT,
+ e8 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_EXT,
+ e16 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_EXT,
+ e32 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CapabilityFlagBitsEXT )
- {
- return "(void)";
- }
-
- enum class VideoEncodeH265CreateFlagBitsEXT : VkVideoEncodeH265CreateFlagsEXT
+ enum class VideoEncodeH265RateControlStructureEXT
{
+ eUnknown = VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT,
+ eFlat = VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_FLAT_EXT,
+ eDyadic = VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_DYADIC_EXT
};
-
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CreateFlagBitsEXT )
- {
- return "(void)";
- }
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
@@ -6721,26 +3374,6 @@ namespace VULKAN_HPP_NAMESPACE
eInterlacedInterleavedLines = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_INTERLEAVED_LINES_BIT_EXT,
eInterlacedSeparatePlanes = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_SEPARATE_PLANES_BIT_EXT
};
-
- VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264PictureLayoutFlagBitsEXT value )
- {
- switch ( value )
- {
- case VideoDecodeH264PictureLayoutFlagBitsEXT::eProgressive: return "Progressive";
- case VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedInterleavedLines: return "InterlacedInterleavedLines";
- case VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedSeparatePlanes: return "InterlacedSeparatePlanes";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
- enum class VideoDecodeH264CreateFlagBitsEXT : VkVideoDecodeH264CreateFlagsEXT
- {
- };
-
- VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264CreateFlagBitsEXT )
- {
- return "(void)";
- }
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_AMD_shader_info ===
@@ -6752,28 +3385,12 @@ namespace VULKAN_HPP_NAMESPACE
eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD
};
- VULKAN_HPP_INLINE std::string to_string( ShaderInfoTypeAMD value )
- {
- switch ( value )
- {
- case ShaderInfoTypeAMD::eStatistics: return "Statistics";
- case ShaderInfoTypeAMD::eBinary: return "Binary";
- case ShaderInfoTypeAMD::eDisassembly: return "Disassembly";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
#if defined( VK_USE_PLATFORM_GGP )
//=== VK_GGP_stream_descriptor_surface ===
enum class StreamDescriptorSurfaceCreateFlagBitsGGP : VkStreamDescriptorSurfaceCreateFlagsGGP
{
};
-
- VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagBitsGGP )
- {
- return "(void)";
- }
#endif /*VK_USE_PLATFORM_GGP*/
//=== VK_NV_external_memory_capabilities ===
@@ -6786,18 +3403,6 @@ namespace VULKAN_HPP_NAMESPACE
eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV
};
- VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBitsNV value )
- {
- switch ( value )
- {
- case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32: return "OpaqueWin32";
- case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
- case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image: return "D3D11Image";
- case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt: return "D3D11ImageKmt";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ExternalMemoryFeatureFlagBitsNV : VkExternalMemoryFeatureFlagsNV
{
eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,
@@ -6805,17 +3410,6 @@ namespace VULKAN_HPP_NAMESPACE
eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV
};
- VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBitsNV value )
- {
- switch ( value )
- {
- case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly: return "DedicatedOnly";
- case ExternalMemoryFeatureFlagBitsNV::eExportable: return "Exportable";
- case ExternalMemoryFeatureFlagBitsNV::eImportable: return "Importable";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_EXT_validation_flags ===
enum class ValidationCheckEXT
@@ -6824,28 +3418,31 @@ namespace VULKAN_HPP_NAMESPACE
eShaders = VK_VALIDATION_CHECK_SHADERS_EXT
};
- VULKAN_HPP_INLINE std::string to_string( ValidationCheckEXT value )
- {
- switch ( value )
- {
- case ValidationCheckEXT::eAll: return "All";
- case ValidationCheckEXT::eShaders: return "Shaders";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
#if defined( VK_USE_PLATFORM_VI_NN )
//=== VK_NN_vi_surface ===
enum class ViSurfaceCreateFlagBitsNN : VkViSurfaceCreateFlagsNN
{
};
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+ //=== VK_EXT_pipeline_robustness ===
- VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagBitsNN )
+ enum class PipelineRobustnessBufferBehaviorEXT
{
- return "(void)";
- }
-#endif /*VK_USE_PLATFORM_VI_NN*/
+ eDeviceDefault = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT_EXT,
+ eDisabled = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT,
+ eRobustBufferAccess = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT,
+ eRobustBufferAccess2 = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT
+ };
+
+ enum class PipelineRobustnessImageBehaviorEXT
+ {
+ eDeviceDefault = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT_EXT,
+ eDisabled = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT,
+ eRobustImageAccess = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_EXT,
+ eRobustImageAccess2 = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2_EXT
+ };
//=== VK_EXT_conditional_rendering ===
@@ -6854,15 +3451,6 @@ namespace VULKAN_HPP_NAMESPACE
eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagBitsEXT value )
- {
- switch ( value )
- {
- case ConditionalRenderingFlagBitsEXT::eInverted: return "Inverted";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_EXT_display_surface_counter ===
enum class SurfaceCounterFlagBitsEXT : VkSurfaceCounterFlagsEXT
@@ -6870,15 +3458,6 @@ namespace VULKAN_HPP_NAMESPACE
eVblank = VK_SURFACE_COUNTER_VBLANK_BIT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagBitsEXT value )
- {
- switch ( value )
- {
- case SurfaceCounterFlagBitsEXT::eVblank: return "Vblank";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_EXT_display_control ===
enum class DisplayPowerStateEXT
@@ -6888,45 +3467,16 @@ namespace VULKAN_HPP_NAMESPACE
eOn = VK_DISPLAY_POWER_STATE_ON_EXT
};
- VULKAN_HPP_INLINE std::string to_string( DisplayPowerStateEXT value )
- {
- switch ( value )
- {
- case DisplayPowerStateEXT::eOff: return "Off";
- case DisplayPowerStateEXT::eSuspend: return "Suspend";
- case DisplayPowerStateEXT::eOn: return "On";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class DeviceEventTypeEXT
{
eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT
};
- VULKAN_HPP_INLINE std::string to_string( DeviceEventTypeEXT value )
- {
- switch ( value )
- {
- case DeviceEventTypeEXT::eDisplayHotplug: return "DisplayHotplug";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class DisplayEventTypeEXT
{
eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( DisplayEventTypeEXT value )
- {
- switch ( value )
- {
- case DisplayEventTypeEXT::eFirstPixelOut: return "FirstPixelOut";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_NV_viewport_swizzle ===
enum class ViewportCoordinateSwizzleNV
@@ -6941,31 +3491,10 @@ namespace VULKAN_HPP_NAMESPACE
eNegativeW = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV
};
- VULKAN_HPP_INLINE std::string to_string( ViewportCoordinateSwizzleNV value )
- {
- switch ( value )
- {
- case ViewportCoordinateSwizzleNV::ePositiveX: return "PositiveX";
- case ViewportCoordinateSwizzleNV::eNegativeX: return "NegativeX";
- case ViewportCoordinateSwizzleNV::ePositiveY: return "PositiveY";
- case ViewportCoordinateSwizzleNV::eNegativeY: return "NegativeY";
- case ViewportCoordinateSwizzleNV::ePositiveZ: return "PositiveZ";
- case ViewportCoordinateSwizzleNV::eNegativeZ: return "NegativeZ";
- case ViewportCoordinateSwizzleNV::ePositiveW: return "PositiveW";
- case ViewportCoordinateSwizzleNV::eNegativeW: return "NegativeW";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class PipelineViewportSwizzleStateCreateFlagBitsNV : VkPipelineViewportSwizzleStateCreateFlagsNV
{
};
- VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagBitsNV )
- {
- return "(void)";
- }
-
//=== VK_EXT_discard_rectangles ===
enum class DiscardRectangleModeEXT
@@ -6974,25 +3503,10 @@ namespace VULKAN_HPP_NAMESPACE
eExclusive = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT
};
- VULKAN_HPP_INLINE std::string to_string( DiscardRectangleModeEXT value )
- {
- switch ( value )
- {
- case DiscardRectangleModeEXT::eInclusive: return "Inclusive";
- case DiscardRectangleModeEXT::eExclusive: return "Exclusive";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class PipelineDiscardRectangleStateCreateFlagBitsEXT : VkPipelineDiscardRectangleStateCreateFlagsEXT
{
};
- VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagBitsEXT )
- {
- return "(void)";
- }
-
//=== VK_EXT_conservative_rasterization ===
enum class ConservativeRasterizationModeEXT
@@ -7002,38 +3516,16 @@ namespace VULKAN_HPP_NAMESPACE
eUnderestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT
};
- VULKAN_HPP_INLINE std::string to_string( ConservativeRasterizationModeEXT value )
- {
- switch ( value )
- {
- case ConservativeRasterizationModeEXT::eDisabled: return "Disabled";
- case ConservativeRasterizationModeEXT::eOverestimate: return "Overestimate";
- case ConservativeRasterizationModeEXT::eUnderestimate: return "Underestimate";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
- enum class
- PipelineRasterizationConservativeStateCreateFlagBitsEXT : VkPipelineRasterizationConservativeStateCreateFlagsEXT
+ enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT : VkPipelineRasterizationConservativeStateCreateFlagsEXT
{
};
- VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagBitsEXT )
- {
- return "(void)";
- }
-
//=== VK_EXT_depth_clip_enable ===
enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT : VkPipelineRasterizationDepthClipStateCreateFlagsEXT
{
};
- VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagBitsEXT )
- {
- return "(void)";
- }
-
//=== VK_KHR_performance_query ===
enum class PerformanceCounterDescriptionFlagBitsKHR : VkPerformanceCounterDescriptionFlagsKHR
@@ -7042,16 +3534,6 @@ namespace VULKAN_HPP_NAMESPACE
eConcurrentlyImpacted = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagBitsKHR value )
- {
- switch ( value )
- {
- case PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting: return "PerformanceImpacting";
- case PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted: return "ConcurrentlyImpacted";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class PerformanceCounterScopeKHR
{
eCommandBuffer = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR,
@@ -7062,17 +3544,6 @@ namespace VULKAN_HPP_NAMESPACE
eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR
};
- VULKAN_HPP_INLINE std::string to_string( PerformanceCounterScopeKHR value )
- {
- switch ( value )
- {
- case PerformanceCounterScopeKHR::eCommandBuffer: return "CommandBuffer";
- case PerformanceCounterScopeKHR::eRenderPass: return "RenderPass";
- case PerformanceCounterScopeKHR::eCommand: return "Command";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class PerformanceCounterStorageKHR
{
eInt32 = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR,
@@ -7083,20 +3554,6 @@ namespace VULKAN_HPP_NAMESPACE
eFloat64 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR
};
- VULKAN_HPP_INLINE std::string to_string( PerformanceCounterStorageKHR value )
- {
- switch ( value )
- {
- case PerformanceCounterStorageKHR::eInt32: return "Int32";
- case PerformanceCounterStorageKHR::eInt64: return "Int64";
- case PerformanceCounterStorageKHR::eUint32: return "Uint32";
- case PerformanceCounterStorageKHR::eUint64: return "Uint64";
- case PerformanceCounterStorageKHR::eFloat32: return "Float32";
- case PerformanceCounterStorageKHR::eFloat64: return "Float64";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class PerformanceCounterUnitKHR
{
eGeneric = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR,
@@ -7112,45 +3569,16 @@ namespace VULKAN_HPP_NAMESPACE
eCycles = VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR
};
- VULKAN_HPP_INLINE std::string to_string( PerformanceCounterUnitKHR value )
- {
- switch ( value )
- {
- case PerformanceCounterUnitKHR::eGeneric: return "Generic";
- case PerformanceCounterUnitKHR::ePercentage: return "Percentage";
- case PerformanceCounterUnitKHR::eNanoseconds: return "Nanoseconds";
- case PerformanceCounterUnitKHR::eBytes: return "Bytes";
- case PerformanceCounterUnitKHR::eBytesPerSecond: return "BytesPerSecond";
- case PerformanceCounterUnitKHR::eKelvin: return "Kelvin";
- case PerformanceCounterUnitKHR::eWatts: return "Watts";
- case PerformanceCounterUnitKHR::eVolts: return "Volts";
- case PerformanceCounterUnitKHR::eAmps: return "Amps";
- case PerformanceCounterUnitKHR::eHertz: return "Hertz";
- case PerformanceCounterUnitKHR::eCycles: return "Cycles";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class AcquireProfilingLockFlagBitsKHR : VkAcquireProfilingLockFlagsKHR
{
};
- VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagBitsKHR )
- {
- return "(void)";
- }
-
#if defined( VK_USE_PLATFORM_IOS_MVK )
//=== VK_MVK_ios_surface ===
enum class IOSSurfaceCreateFlagBitsMVK : VkIOSSurfaceCreateFlagsMVK
{
};
-
- VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagBitsMVK )
- {
- return "(void)";
- }
#endif /*VK_USE_PLATFORM_IOS_MVK*/
#if defined( VK_USE_PLATFORM_MACOS_MVK )
@@ -7159,11 +3587,6 @@ namespace VULKAN_HPP_NAMESPACE
enum class MacOSSurfaceCreateFlagBitsMVK : VkMacOSSurfaceCreateFlagsMVK
{
};
-
- VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagBitsMVK )
- {
- return "(void)";
- }
#endif /*VK_USE_PLATFORM_MACOS_MVK*/
//=== VK_EXT_debug_utils ===
@@ -7176,54 +3599,22 @@ namespace VULKAN_HPP_NAMESPACE
eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagBitsEXT value )
- {
- switch ( value )
- {
- case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose: return "Verbose";
- case DebugUtilsMessageSeverityFlagBitsEXT::eInfo: return "Info";
- case DebugUtilsMessageSeverityFlagBitsEXT::eWarning: return "Warning";
- case DebugUtilsMessageSeverityFlagBitsEXT::eError: return "Error";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class DebugUtilsMessageTypeFlagBitsEXT : VkDebugUtilsMessageTypeFlagsEXT
{
- eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT,
- eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT,
- ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT
+ eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT,
+ eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT,
+ ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT,
+ eDeviceAddressBinding = VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagBitsEXT value )
- {
- switch ( value )
- {
- case DebugUtilsMessageTypeFlagBitsEXT::eGeneral: return "General";
- case DebugUtilsMessageTypeFlagBitsEXT::eValidation: return "Validation";
- case DebugUtilsMessageTypeFlagBitsEXT::ePerformance: return "Performance";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class DebugUtilsMessengerCallbackDataFlagBitsEXT : VkDebugUtilsMessengerCallbackDataFlagsEXT
{
};
- VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagBitsEXT )
- {
- return "(void)";
- }
-
enum class DebugUtilsMessengerCreateFlagBitsEXT : VkDebugUtilsMessengerCreateFlagsEXT
{
};
- VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagBitsEXT )
- {
- return "(void)";
- }
-
//=== VK_EXT_blend_operation_advanced ===
enum class BlendOverlapEXT
@@ -7233,28 +3624,12 @@ namespace VULKAN_HPP_NAMESPACE
eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( BlendOverlapEXT value )
- {
- switch ( value )
- {
- case BlendOverlapEXT::eUncorrelated: return "Uncorrelated";
- case BlendOverlapEXT::eDisjoint: return "Disjoint";
- case BlendOverlapEXT::eConjoint: return "Conjoint";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_NV_fragment_coverage_to_color ===
enum class PipelineCoverageToColorStateCreateFlagBitsNV : VkPipelineCoverageToColorStateCreateFlagsNV
{
};
- VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagBitsNV )
- {
- return "(void)";
- }
-
//=== VK_KHR_acceleration_structure ===
enum class AccelerationStructureTypeKHR
@@ -7265,17 +3640,6 @@ namespace VULKAN_HPP_NAMESPACE
};
using AccelerationStructureTypeNV = AccelerationStructureTypeKHR;
- VULKAN_HPP_INLINE std::string to_string( AccelerationStructureTypeKHR value )
- {
- switch ( value )
- {
- case AccelerationStructureTypeKHR::eTopLevel: return "TopLevel";
- case AccelerationStructureTypeKHR::eBottomLevel: return "BottomLevel";
- case AccelerationStructureTypeKHR::eGeneric: return "Generic";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class AccelerationStructureBuildTypeKHR
{
eHost = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR,
@@ -7283,17 +3647,6 @@ namespace VULKAN_HPP_NAMESPACE
eHostOrDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR
};
- VULKAN_HPP_INLINE std::string to_string( AccelerationStructureBuildTypeKHR value )
- {
- switch ( value )
- {
- case AccelerationStructureBuildTypeKHR::eHost: return "Host";
- case AccelerationStructureBuildTypeKHR::eDevice: return "Device";
- case AccelerationStructureBuildTypeKHR::eHostOrDevice: return "HostOrDevice";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class GeometryFlagBitsKHR : VkGeometryFlagsKHR
{
eOpaque = VK_GEOMETRY_OPAQUE_BIT_KHR,
@@ -7301,65 +3654,34 @@ namespace VULKAN_HPP_NAMESPACE
};
using GeometryFlagBitsNV = GeometryFlagBitsKHR;
- VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsKHR value )
- {
- switch ( value )
- {
- case GeometryFlagBitsKHR::eOpaque: return "Opaque";
- case GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation: return "NoDuplicateAnyHitInvocation";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class GeometryInstanceFlagBitsKHR : VkGeometryInstanceFlagsKHR
{
eTriangleFacingCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR,
eTriangleFlipFacing = VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR,
eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR,
eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR,
+ eForceOpacityMicromap2StateEXT = VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT,
+ eDisableOpacityMicromapsEXT = VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT,
eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV,
eTriangleFrontCounterclockwiseKHR = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR,
eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV
};
using GeometryInstanceFlagBitsNV = GeometryInstanceFlagBitsKHR;
- VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsKHR value )
- {
- switch ( value )
- {
- case GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable: return "TriangleFacingCullDisable";
- case GeometryInstanceFlagBitsKHR::eTriangleFlipFacing: return "TriangleFlipFacing";
- case GeometryInstanceFlagBitsKHR::eForceOpaque: return "ForceOpaque";
- case GeometryInstanceFlagBitsKHR::eForceNoOpaque: return "ForceNoOpaque";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class BuildAccelerationStructureFlagBitsKHR : VkBuildAccelerationStructureFlagsKHR
{
- eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR,
- eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR,
- ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR,
- ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR,
- eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR,
- eMotionNV = VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV
+ eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR,
+ eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR,
+ ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR,
+ ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR,
+ eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR,
+ eMotionNV = VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV,
+ eAllowOpacityMicromapUpdateEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT,
+ eAllowDisableOpacityMicromapsEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT,
+ eAllowOpacityMicromapDataUpdateEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT
};
using BuildAccelerationStructureFlagBitsNV = BuildAccelerationStructureFlagBitsKHR;
- VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsKHR value )
- {
- switch ( value )
- {
- case BuildAccelerationStructureFlagBitsKHR::eAllowUpdate: return "AllowUpdate";
- case BuildAccelerationStructureFlagBitsKHR::eAllowCompaction: return "AllowCompaction";
- case BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace: return "PreferFastTrace";
- case BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild: return "PreferFastBuild";
- case BuildAccelerationStructureFlagBitsKHR::eLowMemory: return "LowMemory";
- case BuildAccelerationStructureFlagBitsKHR::eMotionNV: return "MotionNV";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class CopyAccelerationStructureModeKHR
{
eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR,
@@ -7369,18 +3691,6 @@ namespace VULKAN_HPP_NAMESPACE
};
using CopyAccelerationStructureModeNV = CopyAccelerationStructureModeKHR;
- VULKAN_HPP_INLINE std::string to_string( CopyAccelerationStructureModeKHR value )
- {
- switch ( value )
- {
- case CopyAccelerationStructureModeKHR::eClone: return "Clone";
- case CopyAccelerationStructureModeKHR::eCompact: return "Compact";
- case CopyAccelerationStructureModeKHR::eSerialize: return "Serialize";
- case CopyAccelerationStructureModeKHR::eDeserialize: return "Deserialize";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class GeometryTypeKHR
{
eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_KHR,
@@ -7389,65 +3699,24 @@ namespace VULKAN_HPP_NAMESPACE
};
using GeometryTypeNV = GeometryTypeKHR;
- VULKAN_HPP_INLINE std::string to_string( GeometryTypeKHR value )
- {
- switch ( value )
- {
- case GeometryTypeKHR::eTriangles: return "Triangles";
- case GeometryTypeKHR::eAabbs: return "Aabbs";
- case GeometryTypeKHR::eInstances: return "Instances";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class AccelerationStructureCompatibilityKHR
{
eCompatible = VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR,
eIncompatible = VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR
};
- VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCompatibilityKHR value )
- {
- switch ( value )
- {
- case AccelerationStructureCompatibilityKHR::eCompatible: return "Compatible";
- case AccelerationStructureCompatibilityKHR::eIncompatible: return "Incompatible";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class AccelerationStructureCreateFlagBitsKHR : VkAccelerationStructureCreateFlagsKHR
{
eDeviceAddressCaptureReplay = VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR,
eMotionNV = VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV
};
- VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCreateFlagBitsKHR value )
- {
- switch ( value )
- {
- case AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay";
- case AccelerationStructureCreateFlagBitsKHR::eMotionNV: return "MotionNV";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class BuildAccelerationStructureModeKHR
{
eBuild = VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR,
eUpdate = VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR
};
- VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureModeKHR value )
- {
- switch ( value )
- {
- case BuildAccelerationStructureModeKHR::eBuild: return "Build";
- case BuildAccelerationStructureModeKHR::eUpdate: return "Update";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_NV_framebuffer_mixed_samples ===
enum class CoverageModulationModeNV
@@ -7458,27 +3727,10 @@ namespace VULKAN_HPP_NAMESPACE
eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV
};
- VULKAN_HPP_INLINE std::string to_string( CoverageModulationModeNV value )
- {
- switch ( value )
- {
- case CoverageModulationModeNV::eNone: return "None";
- case CoverageModulationModeNV::eRgb: return "Rgb";
- case CoverageModulationModeNV::eAlpha: return "Alpha";
- case CoverageModulationModeNV::eRgba: return "Rgba";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class PipelineCoverageModulationStateCreateFlagBitsNV : VkPipelineCoverageModulationStateCreateFlagsNV
{
};
- VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagBitsNV )
- {
- return "(void)";
- }
-
//=== VK_EXT_validation_cache ===
enum class ValidationCacheHeaderVersionEXT
@@ -7486,24 +3738,10 @@ namespace VULKAN_HPP_NAMESPACE
eOne = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT
};
- VULKAN_HPP_INLINE std::string to_string( ValidationCacheHeaderVersionEXT value )
- {
- switch ( value )
- {
- case ValidationCacheHeaderVersionEXT::eOne: return "One";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ValidationCacheCreateFlagBitsEXT : VkValidationCacheCreateFlagsEXT
{
};
- VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagBitsEXT )
- {
- return "(void)";
- }
-
//=== VK_NV_shading_rate_image ===
enum class ShadingRatePaletteEntryNV
@@ -7522,26 +3760,6 @@ namespace VULKAN_HPP_NAMESPACE
e1InvocationPer4X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV
};
- VULKAN_HPP_INLINE std::string to_string( ShadingRatePaletteEntryNV value )
- {
- switch ( value )
- {
- case ShadingRatePaletteEntryNV::eNoInvocations: return "NoInvocations";
- case ShadingRatePaletteEntryNV::e16InvocationsPerPixel: return "16InvocationsPerPixel";
- case ShadingRatePaletteEntryNV::e8InvocationsPerPixel: return "8InvocationsPerPixel";
- case ShadingRatePaletteEntryNV::e4InvocationsPerPixel: return "4InvocationsPerPixel";
- case ShadingRatePaletteEntryNV::e2InvocationsPerPixel: return "2InvocationsPerPixel";
- case ShadingRatePaletteEntryNV::e1InvocationPerPixel: return "1InvocationPerPixel";
- case ShadingRatePaletteEntryNV::e1InvocationPer2X1Pixels: return "1InvocationPer2X1Pixels";
- case ShadingRatePaletteEntryNV::e1InvocationPer1X2Pixels: return "1InvocationPer1X2Pixels";
- case ShadingRatePaletteEntryNV::e1InvocationPer2X2Pixels: return "1InvocationPer2X2Pixels";
- case ShadingRatePaletteEntryNV::e1InvocationPer4X2Pixels: return "1InvocationPer4X2Pixels";
- case ShadingRatePaletteEntryNV::e1InvocationPer2X4Pixels: return "1InvocationPer2X4Pixels";
- case ShadingRatePaletteEntryNV::e1InvocationPer4X4Pixels: return "1InvocationPer4X4Pixels";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class CoarseSampleOrderTypeNV
{
eDefault = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV,
@@ -7550,18 +3768,6 @@ namespace VULKAN_HPP_NAMESPACE
eSampleMajor = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV
};
- VULKAN_HPP_INLINE std::string to_string( CoarseSampleOrderTypeNV value )
- {
- switch ( value )
- {
- case CoarseSampleOrderTypeNV::eDefault: return "Default";
- case CoarseSampleOrderTypeNV::eCustom: return "Custom";
- case CoarseSampleOrderTypeNV::ePixelMajor: return "PixelMajor";
- case CoarseSampleOrderTypeNV::eSampleMajor: return "SampleMajor";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_NV_ray_tracing ===
enum class AccelerationStructureMemoryRequirementsTypeNV
@@ -7571,28 +3777,12 @@ namespace VULKAN_HPP_NAMESPACE
eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV
};
- VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMemoryRequirementsTypeNV value )
- {
- switch ( value )
- {
- case AccelerationStructureMemoryRequirementsTypeNV::eObject: return "Object";
- case AccelerationStructureMemoryRequirementsTypeNV::eBuildScratch: return "BuildScratch";
- case AccelerationStructureMemoryRequirementsTypeNV::eUpdateScratch: return "UpdateScratch";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_AMD_pipeline_compiler_control ===
enum class PipelineCompilerControlFlagBitsAMD : VkPipelineCompilerControlFlagsAMD
{
};
- VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagBitsAMD )
- {
- return "(void)";
- }
-
//=== VK_EXT_calibrated_timestamps ===
enum class TimeDomainEXT
@@ -7603,31 +3793,6 @@ namespace VULKAN_HPP_NAMESPACE
eQueryPerformanceCounter = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT
};
- VULKAN_HPP_INLINE std::string to_string( TimeDomainEXT value )
- {
- switch ( value )
- {
- case TimeDomainEXT::eDevice: return "Device";
- case TimeDomainEXT::eClockMonotonic: return "ClockMonotonic";
- case TimeDomainEXT::eClockMonotonicRaw: return "ClockMonotonicRaw";
- case TimeDomainEXT::eQueryPerformanceCounter: return "QueryPerformanceCounter";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- //=== VK_EXT_video_decode_h265 ===
-
- enum class VideoDecodeH265CreateFlagBitsEXT : VkVideoDecodeH265CreateFlagsEXT
- {
- };
-
- VULKAN_HPP_INLINE std::string to_string( VideoDecodeH265CreateFlagBitsEXT )
- {
- return "(void)";
- }
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
//=== VK_KHR_global_priority ===
enum class QueueGlobalPriorityKHR
@@ -7639,18 +3804,6 @@ namespace VULKAN_HPP_NAMESPACE
};
using QueueGlobalPriorityEXT = QueueGlobalPriorityKHR;
- VULKAN_HPP_INLINE std::string to_string( QueueGlobalPriorityKHR value )
- {
- switch ( value )
- {
- case QueueGlobalPriorityKHR::eLow: return "Low";
- case QueueGlobalPriorityKHR::eMedium: return "Medium";
- case QueueGlobalPriorityKHR::eHigh: return "High";
- case QueueGlobalPriorityKHR::eRealtime: return "Realtime";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_AMD_memory_overallocation_behavior ===
enum class MemoryOverallocationBehaviorAMD
@@ -7660,81 +3813,30 @@ namespace VULKAN_HPP_NAMESPACE
eDisallowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD
};
- VULKAN_HPP_INLINE std::string to_string( MemoryOverallocationBehaviorAMD value )
- {
- switch ( value )
- {
- case MemoryOverallocationBehaviorAMD::eDefault: return "Default";
- case MemoryOverallocationBehaviorAMD::eAllowed: return "Allowed";
- case MemoryOverallocationBehaviorAMD::eDisallowed: return "Disallowed";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_INTEL_performance_query ===
enum class PerformanceConfigurationTypeINTEL
{
- eCommandQueueMetricsDiscoveryActivated =
- VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL
+ eCommandQueueMetricsDiscoveryActivated = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL
};
- VULKAN_HPP_INLINE std::string to_string( PerformanceConfigurationTypeINTEL value )
- {
- switch ( value )
- {
- case PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated:
- return "CommandQueueMetricsDiscoveryActivated";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class QueryPoolSamplingModeINTEL
{
eManual = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL
};
- VULKAN_HPP_INLINE std::string to_string( QueryPoolSamplingModeINTEL value )
- {
- switch ( value )
- {
- case QueryPoolSamplingModeINTEL::eManual: return "Manual";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class PerformanceOverrideTypeINTEL
{
eNullHardware = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL,
eFlushGpuCaches = VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL
};
- VULKAN_HPP_INLINE std::string to_string( PerformanceOverrideTypeINTEL value )
- {
- switch ( value )
- {
- case PerformanceOverrideTypeINTEL::eNullHardware: return "NullHardware";
- case PerformanceOverrideTypeINTEL::eFlushGpuCaches: return "FlushGpuCaches";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class PerformanceParameterTypeINTEL
{
eHwCountersSupported = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL,
eStreamMarkerValidBits = VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL
};
- VULKAN_HPP_INLINE std::string to_string( PerformanceParameterTypeINTEL value )
- {
- switch ( value )
- {
- case PerformanceParameterTypeINTEL::eHwCountersSupported: return "HwCountersSupported";
- case PerformanceParameterTypeINTEL::eStreamMarkerValidBits: return "StreamMarkerValidBits";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class PerformanceValueTypeINTEL
{
eUint32 = VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL,
@@ -7744,30 +3846,12 @@ namespace VULKAN_HPP_NAMESPACE
eString = VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL
};
- VULKAN_HPP_INLINE std::string to_string( PerformanceValueTypeINTEL value )
- {
- switch ( value )
- {
- case PerformanceValueTypeINTEL::eUint32: return "Uint32";
- case PerformanceValueTypeINTEL::eUint64: return "Uint64";
- case PerformanceValueTypeINTEL::eFloat: return "Float";
- case PerformanceValueTypeINTEL::eBool: return "Bool";
- case PerformanceValueTypeINTEL::eString: return "String";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_imagepipe_surface ===
enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA : VkImagePipeSurfaceCreateFlagsFUCHSIA
{
};
-
- VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagBitsFUCHSIA )
- {
- return "(void)";
- }
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
@@ -7776,11 +3860,6 @@ namespace VULKAN_HPP_NAMESPACE
enum class MetalSurfaceCreateFlagBitsEXT : VkMetalSurfaceCreateFlagsEXT
{
};
-
- VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagBitsEXT )
- {
- return "(void)";
- }
#endif /*VK_USE_PLATFORM_METAL_EXT*/
//=== VK_KHR_fragment_shading_rate ===
@@ -7794,30 +3873,12 @@ namespace VULKAN_HPP_NAMESPACE
eMul = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR
};
- VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateCombinerOpKHR value )
- {
- switch ( value )
- {
- case FragmentShadingRateCombinerOpKHR::eKeep: return "Keep";
- case FragmentShadingRateCombinerOpKHR::eReplace: return "Replace";
- case FragmentShadingRateCombinerOpKHR::eMin: return "Min";
- case FragmentShadingRateCombinerOpKHR::eMax: return "Max";
- case FragmentShadingRateCombinerOpKHR::eMul: return "Mul";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_AMD_shader_core_properties2 ===
enum class ShaderCorePropertiesFlagBitsAMD : VkShaderCorePropertiesFlagsAMD
{
};
- VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagBitsAMD )
- {
- return "(void)";
- }
-
//=== VK_EXT_validation_features ===
enum class ValidationFeatureEnableEXT
@@ -7829,19 +3890,6 @@ namespace VULKAN_HPP_NAMESPACE
eSynchronizationValidation = VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT
};
- VULKAN_HPP_INLINE std::string to_string( ValidationFeatureEnableEXT value )
- {
- switch ( value )
- {
- case ValidationFeatureEnableEXT::eGpuAssisted: return "GpuAssisted";
- case ValidationFeatureEnableEXT::eGpuAssistedReserveBindingSlot: return "GpuAssistedReserveBindingSlot";
- case ValidationFeatureEnableEXT::eBestPractices: return "BestPractices";
- case ValidationFeatureEnableEXT::eDebugPrintf: return "DebugPrintf";
- case ValidationFeatureEnableEXT::eSynchronizationValidation: return "SynchronizationValidation";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ValidationFeatureDisableEXT
{
eAll = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT,
@@ -7854,22 +3902,6 @@ namespace VULKAN_HPP_NAMESPACE
eShaderValidationCache = VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT
};
- VULKAN_HPP_INLINE std::string to_string( ValidationFeatureDisableEXT value )
- {
- switch ( value )
- {
- case ValidationFeatureDisableEXT::eAll: return "All";
- case ValidationFeatureDisableEXT::eShaders: return "Shaders";
- case ValidationFeatureDisableEXT::eThreadSafety: return "ThreadSafety";
- case ValidationFeatureDisableEXT::eApiParameters: return "ApiParameters";
- case ValidationFeatureDisableEXT::eObjectLifetimes: return "ObjectLifetimes";
- case ValidationFeatureDisableEXT::eCoreChecks: return "CoreChecks";
- case ValidationFeatureDisableEXT::eUniqueHandles: return "UniqueHandles";
- case ValidationFeatureDisableEXT::eShaderValidationCache: return "ShaderValidationCache";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_NV_cooperative_matrix ===
enum class ScopeNV
@@ -7880,18 +3912,6 @@ namespace VULKAN_HPP_NAMESPACE
eQueueFamily = VK_SCOPE_QUEUE_FAMILY_NV
};
- VULKAN_HPP_INLINE std::string to_string( ScopeNV value )
- {
- switch ( value )
- {
- case ScopeNV::eDevice: return "Device";
- case ScopeNV::eWorkgroup: return "Workgroup";
- case ScopeNV::eSubgroup: return "Subgroup";
- case ScopeNV::eQueueFamily: return "QueueFamily";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ComponentTypeNV
{
eFloat16 = VK_COMPONENT_TYPE_FLOAT16_NV,
@@ -7907,25 +3927,6 @@ namespace VULKAN_HPP_NAMESPACE
eUint64 = VK_COMPONENT_TYPE_UINT64_NV
};
- VULKAN_HPP_INLINE std::string to_string( ComponentTypeNV value )
- {
- switch ( value )
- {
- case ComponentTypeNV::eFloat16: return "Float16";
- case ComponentTypeNV::eFloat32: return "Float32";
- case ComponentTypeNV::eFloat64: return "Float64";
- case ComponentTypeNV::eSint8: return "Sint8";
- case ComponentTypeNV::eSint16: return "Sint16";
- case ComponentTypeNV::eSint32: return "Sint32";
- case ComponentTypeNV::eSint64: return "Sint64";
- case ComponentTypeNV::eUint8: return "Uint8";
- case ComponentTypeNV::eUint16: return "Uint16";
- case ComponentTypeNV::eUint32: return "Uint32";
- case ComponentTypeNV::eUint64: return "Uint64";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_NV_coverage_reduction_mode ===
enum class CoverageReductionModeNV
@@ -7934,25 +3935,10 @@ namespace VULKAN_HPP_NAMESPACE
eTruncate = VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV
};
- VULKAN_HPP_INLINE std::string to_string( CoverageReductionModeNV value )
- {
- switch ( value )
- {
- case CoverageReductionModeNV::eMerge: return "Merge";
- case CoverageReductionModeNV::eTruncate: return "Truncate";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class PipelineCoverageReductionStateCreateFlagBitsNV : VkPipelineCoverageReductionStateCreateFlagsNV
{
};
- VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagBitsNV )
- {
- return "(void)";
- }
-
//=== VK_EXT_provoking_vertex ===
enum class ProvokingVertexModeEXT
@@ -7961,16 +3947,6 @@ namespace VULKAN_HPP_NAMESPACE
eLastVertex = VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT
};
- VULKAN_HPP_INLINE std::string to_string( ProvokingVertexModeEXT value )
- {
- switch ( value )
- {
- case ProvokingVertexModeEXT::eFirstVertex: return "FirstVertex";
- case ProvokingVertexModeEXT::eLastVertex: return "LastVertex";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_EXT_full_screen_exclusive ===
@@ -7981,18 +3957,6 @@ namespace VULKAN_HPP_NAMESPACE
eDisallowed = VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT,
eApplicationControlled = VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT
};
-
- VULKAN_HPP_INLINE std::string to_string( FullScreenExclusiveEXT value )
- {
- switch ( value )
- {
- case FullScreenExclusiveEXT::eDefault: return "Default";
- case FullScreenExclusiveEXT::eAllowed: return "Allowed";
- case FullScreenExclusiveEXT::eDisallowed: return "Disallowed";
- case FullScreenExclusiveEXT::eApplicationControlled: return "ApplicationControlled";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_EXT_headless_surface ===
@@ -8001,11 +3965,6 @@ namespace VULKAN_HPP_NAMESPACE
{
};
- VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagBitsEXT )
- {
- return "(void)";
- }
-
//=== VK_EXT_line_rasterization ===
enum class LineRasterizationModeEXT
@@ -8016,18 +3975,6 @@ namespace VULKAN_HPP_NAMESPACE
eRectangularSmooth = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT
};
- VULKAN_HPP_INLINE std::string to_string( LineRasterizationModeEXT value )
- {
- switch ( value )
- {
- case LineRasterizationModeEXT::eDefault: return "Default";
- case LineRasterizationModeEXT::eRectangular: return "Rectangular";
- case LineRasterizationModeEXT::eBresenham: return "Bresenham";
- case LineRasterizationModeEXT::eRectangularSmooth: return "RectangularSmooth";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_KHR_pipeline_executable_properties ===
enum class PipelineExecutableStatisticFormatKHR
@@ -8038,18 +3985,6 @@ namespace VULKAN_HPP_NAMESPACE
eFloat64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR
};
- VULKAN_HPP_INLINE std::string to_string( PipelineExecutableStatisticFormatKHR value )
- {
- switch ( value )
- {
- case PipelineExecutableStatisticFormatKHR::eBool32: return "Bool32";
- case PipelineExecutableStatisticFormatKHR::eInt64: return "Int64";
- case PipelineExecutableStatisticFormatKHR::eUint64: return "Uint64";
- case PipelineExecutableStatisticFormatKHR::eFloat64: return "Float64";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_NV_device_generated_commands ===
enum class IndirectStateFlagBitsNV : VkIndirectStateFlagsNV
@@ -8057,43 +3992,19 @@ namespace VULKAN_HPP_NAMESPACE
eFlagFrontface = VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV
};
- VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagBitsNV value )
- {
- switch ( value )
- {
- case IndirectStateFlagBitsNV::eFlagFrontface: return "FlagFrontface";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class IndirectCommandsTokenTypeNV
{
- eShaderGroup = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV,
- eStateFlags = VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV,
- eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV,
- eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV,
- ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV,
- eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV,
- eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV,
- eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV
+ eShaderGroup = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV,
+ eStateFlags = VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV,
+ eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV,
+ eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV,
+ ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV,
+ eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV,
+ eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV,
+ eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV,
+ eDrawMeshTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV
};
- VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNV value )
- {
- switch ( value )
- {
- case IndirectCommandsTokenTypeNV::eShaderGroup: return "ShaderGroup";
- case IndirectCommandsTokenTypeNV::eStateFlags: return "StateFlags";
- case IndirectCommandsTokenTypeNV::eIndexBuffer: return "IndexBuffer";
- case IndirectCommandsTokenTypeNV::eVertexBuffer: return "VertexBuffer";
- case IndirectCommandsTokenTypeNV::ePushConstant: return "PushConstant";
- case IndirectCommandsTokenTypeNV::eDrawIndexed: return "DrawIndexed";
- case IndirectCommandsTokenTypeNV::eDraw: return "Draw";
- case IndirectCommandsTokenTypeNV::eDrawTasks: return "DrawTasks";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class IndirectCommandsLayoutUsageFlagBitsNV : VkIndirectCommandsLayoutUsageFlagsNV
{
eExplicitPreprocess = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV,
@@ -8101,17 +4012,6 @@ namespace VULKAN_HPP_NAMESPACE
eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV
};
- VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNV value )
- {
- switch ( value )
- {
- case IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess: return "ExplicitPreprocess";
- case IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences: return "IndexedSequences";
- case IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences: return "UnorderedSequences";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_EXT_device_memory_report ===
enum class DeviceMemoryReportEventTypeEXT
@@ -8123,28 +4023,10 @@ namespace VULKAN_HPP_NAMESPACE
eAllocationFailed = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT
};
- VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportEventTypeEXT value )
- {
- switch ( value )
- {
- case DeviceMemoryReportEventTypeEXT::eAllocate: return "Allocate";
- case DeviceMemoryReportEventTypeEXT::eFree: return "Free";
- case DeviceMemoryReportEventTypeEXT::eImport: return "Import";
- case DeviceMemoryReportEventTypeEXT::eUnimport: return "Unimport";
- case DeviceMemoryReportEventTypeEXT::eAllocationFailed: return "AllocationFailed";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class DeviceMemoryReportFlagBitsEXT : VkDeviceMemoryReportFlagsEXT
{
};
- VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportFlagBitsEXT )
- {
- return "(void)";
- }
-
//=== VK_EXT_pipeline_creation_cache_control ===
enum class PipelineCacheCreateFlagBits : VkPipelineCacheCreateFlags
@@ -8153,49 +4035,39 @@ namespace VULKAN_HPP_NAMESPACE
eExternallySynchronizedEXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits value )
- {
- switch ( value )
- {
- case PipelineCacheCreateFlagBits::eExternallySynchronized: return "ExternallySynchronized";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_encode_queue ===
- enum class VideoEncodeFlagBitsKHR : VkVideoEncodeFlagsKHR
+ enum class VideoEncodeCapabilityFlagBitsKHR : VkVideoEncodeCapabilityFlagsKHR
{
- eDefault = VK_VIDEO_ENCODE_DEFAULT_KHR,
- eReserved0 = VK_VIDEO_ENCODE_RESERVED_0_BIT_KHR
+ ePrecedingExternallyEncodedBytes = VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagBitsKHR value )
+ enum class VideoEncodeUsageFlagBitsKHR : VkVideoEncodeUsageFlagsKHR
{
- switch ( value )
- {
- case VideoEncodeFlagBitsKHR::eDefault: return "Default";
- case VideoEncodeFlagBitsKHR::eReserved0: return "Reserved0";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
+ eDefault = VK_VIDEO_ENCODE_USAGE_DEFAULT_KHR,
+ eTranscoding = VK_VIDEO_ENCODE_USAGE_TRANSCODING_BIT_KHR,
+ eStreaming = VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR,
+ eRecording = VK_VIDEO_ENCODE_USAGE_RECORDING_BIT_KHR,
+ eConferencing = VK_VIDEO_ENCODE_USAGE_CONFERENCING_BIT_KHR
+ };
- enum class VideoEncodeRateControlFlagBitsKHR : VkVideoEncodeRateControlFlagsKHR
+ enum class VideoEncodeContentFlagBitsKHR : VkVideoEncodeContentFlagsKHR
{
- eDefault = VK_VIDEO_ENCODE_RATE_CONTROL_DEFAULT_KHR,
- eReserved0 = VK_VIDEO_ENCODE_RATE_CONTROL_RESERVED_0_BIT_KHR
+ eDefault = VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR,
+ eCamera = VK_VIDEO_ENCODE_CONTENT_CAMERA_BIT_KHR,
+ eDesktop = VK_VIDEO_ENCODE_CONTENT_DESKTOP_BIT_KHR,
+ eRendered = VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlFlagBitsKHR value )
+ enum class VideoEncodeTuningModeKHR
{
- switch ( value )
- {
- case VideoEncodeRateControlFlagBitsKHR::eDefault: return "Default";
- case VideoEncodeRateControlFlagBitsKHR::eReserved0: return "Reserved0";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
+ eDefault = VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR,
+ eHighQuality = VK_VIDEO_ENCODE_TUNING_MODE_HIGH_QUALITY_KHR,
+ eLowLatency = VK_VIDEO_ENCODE_TUNING_MODE_LOW_LATENCY_KHR,
+ eUltraLowLatency = VK_VIDEO_ENCODE_TUNING_MODE_ULTRA_LOW_LATENCY_KHR,
+ eLossless = VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR
+ };
enum class VideoEncodeRateControlModeFlagBitsKHR : VkVideoEncodeRateControlModeFlagsKHR
{
@@ -8204,16 +4076,13 @@ namespace VULKAN_HPP_NAMESPACE
eVbr = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR
};
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlModeFlagBitsKHR value )
+ enum class VideoEncodeFlagBitsKHR : VkVideoEncodeFlagsKHR
{
- switch ( value )
- {
- case VideoEncodeRateControlModeFlagBitsKHR::eNone: return "None";
- case VideoEncodeRateControlModeFlagBitsKHR::eCbr: return "Cbr";
- case VideoEncodeRateControlModeFlagBitsKHR::eVbr: return "Vbr";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
+ };
+
+ enum class VideoEncodeRateControlFlagBitsKHR : VkVideoEncodeRateControlFlagsKHR
+ {
+ };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_NV_device_diagnostics_config ===
@@ -8222,19 +4091,38 @@ namespace VULKAN_HPP_NAMESPACE
{
eEnableShaderDebugInfo = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV,
eEnableResourceTracking = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV,
- eEnableAutomaticCheckpoints = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV
+ eEnableAutomaticCheckpoints = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV,
+ eEnableShaderErrorReporting = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_ERROR_REPORTING_BIT_NV
};
- VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagBitsNV value )
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ //=== VK_EXT_metal_objects ===
+
+ enum class ExportMetalObjectTypeFlagBitsEXT : VkExportMetalObjectTypeFlagsEXT
{
- switch ( value )
- {
- case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo: return "EnableShaderDebugInfo";
- case DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking: return "EnableResourceTracking";
- case DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints: return "EnableAutomaticCheckpoints";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
+ eMetalDevice = VK_EXPORT_METAL_OBJECT_TYPE_METAL_DEVICE_BIT_EXT,
+ eMetalCommandQueue = VK_EXPORT_METAL_OBJECT_TYPE_METAL_COMMAND_QUEUE_BIT_EXT,
+ eMetalBuffer = VK_EXPORT_METAL_OBJECT_TYPE_METAL_BUFFER_BIT_EXT,
+ eMetalTexture = VK_EXPORT_METAL_OBJECT_TYPE_METAL_TEXTURE_BIT_EXT,
+ eMetalIosurface = VK_EXPORT_METAL_OBJECT_TYPE_METAL_IOSURFACE_BIT_EXT,
+ eMetalSharedEvent = VK_EXPORT_METAL_OBJECT_TYPE_METAL_SHARED_EVENT_BIT_EXT
+ };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+ //=== VK_EXT_graphics_pipeline_library ===
+
+ enum class GraphicsPipelineLibraryFlagBitsEXT : VkGraphicsPipelineLibraryFlagsEXT
+ {
+ eVertexInputInterface = VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT,
+ ePreRasterizationShaders = VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT,
+ eFragmentShader = VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT,
+ eFragmentOutputInterface = VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT
+ };
+
+ enum class PipelineLayoutCreateFlagBits : VkPipelineLayoutCreateFlags
+ {
+ eIndependentSetsEXT = VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT
+ };
//=== VK_NV_fragment_shading_rate_enums ===
@@ -8254,42 +4142,12 @@ namespace VULKAN_HPP_NAMESPACE
eNoInvocations = VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV
};
- VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateNV value )
- {
- switch ( value )
- {
- case FragmentShadingRateNV::e1InvocationPerPixel: return "1InvocationPerPixel";
- case FragmentShadingRateNV::e1InvocationPer1X2Pixels: return "1InvocationPer1X2Pixels";
- case FragmentShadingRateNV::e1InvocationPer2X1Pixels: return "1InvocationPer2X1Pixels";
- case FragmentShadingRateNV::e1InvocationPer2X2Pixels: return "1InvocationPer2X2Pixels";
- case FragmentShadingRateNV::e1InvocationPer2X4Pixels: return "1InvocationPer2X4Pixels";
- case FragmentShadingRateNV::e1InvocationPer4X2Pixels: return "1InvocationPer4X2Pixels";
- case FragmentShadingRateNV::e1InvocationPer4X4Pixels: return "1InvocationPer4X4Pixels";
- case FragmentShadingRateNV::e2InvocationsPerPixel: return "2InvocationsPerPixel";
- case FragmentShadingRateNV::e4InvocationsPerPixel: return "4InvocationsPerPixel";
- case FragmentShadingRateNV::e8InvocationsPerPixel: return "8InvocationsPerPixel";
- case FragmentShadingRateNV::e16InvocationsPerPixel: return "16InvocationsPerPixel";
- case FragmentShadingRateNV::eNoInvocations: return "NoInvocations";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class FragmentShadingRateTypeNV
{
eFragmentSize = VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV,
eEnums = VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV
};
- VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateTypeNV value )
- {
- switch ( value )
- {
- case FragmentShadingRateTypeNV::eFragmentSize: return "FragmentSize";
- case FragmentShadingRateTypeNV::eEnums: return "Enums";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
//=== VK_NV_ray_tracing_motion_blur ===
enum class AccelerationStructureMotionInstanceTypeNV
@@ -8299,72 +4157,70 @@ namespace VULKAN_HPP_NAMESPACE
eSrtMotion = VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_SRT_MOTION_NV
};
- VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInstanceTypeNV value )
- {
- switch ( value )
- {
- case AccelerationStructureMotionInstanceTypeNV::eStatic: return "Static";
- case AccelerationStructureMotionInstanceTypeNV::eMatrixMotion: return "MatrixMotion";
- case AccelerationStructureMotionInstanceTypeNV::eSrtMotion: return "SrtMotion";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class AccelerationStructureMotionInfoFlagBitsNV : VkAccelerationStructureMotionInfoFlagsNV
{
};
- VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInfoFlagBitsNV )
- {
- return "(void)";
- }
-
enum class AccelerationStructureMotionInstanceFlagBitsNV : VkAccelerationStructureMotionInstanceFlagsNV
{
};
- VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInstanceFlagBitsNV )
- {
- return "(void)";
- }
-
- //=== VK_ARM_rasterization_order_attachment_access ===
+ //=== VK_EXT_image_compression_control ===
- enum class PipelineColorBlendStateCreateFlagBits : VkPipelineColorBlendStateCreateFlags
+ enum class ImageCompressionFlagBitsEXT : VkImageCompressionFlagsEXT
{
- eRasterizationOrderAttachmentAccessARM =
- VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM
+ eDefault = VK_IMAGE_COMPRESSION_DEFAULT_EXT,
+ eFixedRateDefault = VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT,
+ eFixedRateExplicit = VK_IMAGE_COMPRESSION_FIXED_RATE_EXPLICIT_EXT,
+ eDisabled = VK_IMAGE_COMPRESSION_DISABLED_EXT
};
- VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits value )
+ enum class ImageCompressionFixedRateFlagBitsEXT : VkImageCompressionFixedRateFlagsEXT
{
- switch ( value )
- {
- case PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessARM:
- return "RasterizationOrderAttachmentAccessARM";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
+ eNone = VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT,
+ e1Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT,
+ e2Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT,
+ e3Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT,
+ e4Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT,
+ e5Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT,
+ e6Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT,
+ e7Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT,
+ e8Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT,
+ e9Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT,
+ e10Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT,
+ e11Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT,
+ e12Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT,
+ e13Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_13BPC_BIT_EXT,
+ e14Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_14BPC_BIT_EXT,
+ e15Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_15BPC_BIT_EXT,
+ e16Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_16BPC_BIT_EXT,
+ e17Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_17BPC_BIT_EXT,
+ e18Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_18BPC_BIT_EXT,
+ e19Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_19BPC_BIT_EXT,
+ e20Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_20BPC_BIT_EXT,
+ e21Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_21BPC_BIT_EXT,
+ e22Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_22BPC_BIT_EXT,
+ e23Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_23BPC_BIT_EXT,
+ e24Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT
+ };
- enum class PipelineDepthStencilStateCreateFlagBits : VkPipelineDepthStencilStateCreateFlags
+ //=== VK_EXT_device_fault ===
+
+ enum class DeviceFaultAddressTypeEXT
{
- eRasterizationOrderAttachmentDepthAccessARM =
- VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM,
- eRasterizationOrderAttachmentStencilAccessARM =
- VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM
+ eNone = VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT,
+ eReadInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT,
+ eWriteInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT,
+ eExecuteInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT,
+ eInstructionPointerUnknown = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT,
+ eInstructionPointerInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT,
+ eInstructionPointerFault = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT
};
- VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits value )
+ enum class DeviceFaultVendorBinaryHeaderVersionEXT
{
- switch ( value )
- {
- case PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessARM:
- return "RasterizationOrderAttachmentDepthAccessARM";
- case PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessARM:
- return "RasterizationOrderAttachmentStencilAccessARM";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
+ eOne = VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT
+ };
#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
//=== VK_EXT_directfb_surface ===
@@ -8372,11 +4228,6 @@ namespace VULKAN_HPP_NAMESPACE
enum class DirectFBSurfaceCreateFlagBitsEXT : VkDirectFBSurfaceCreateFlagsEXT
{
};
-
- VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagBitsEXT )
- {
- return "(void)";
- }
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
//=== VK_KHR_ray_tracing_pipeline ===
@@ -8389,17 +4240,6 @@ namespace VULKAN_HPP_NAMESPACE
};
using RayTracingShaderGroupTypeNV = RayTracingShaderGroupTypeKHR;
- VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeKHR value )
- {
- switch ( value )
- {
- case RayTracingShaderGroupTypeKHR::eGeneral: return "General";
- case RayTracingShaderGroupTypeKHR::eTrianglesHitGroup: return "TrianglesHitGroup";
- case RayTracingShaderGroupTypeKHR::eProceduralHitGroup: return "ProceduralHitGroup";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ShaderGroupShaderKHR
{
eGeneral = VK_SHADER_GROUP_SHADER_GENERAL_KHR,
@@ -8408,17 +4248,18 @@ namespace VULKAN_HPP_NAMESPACE
eIntersection = VK_SHADER_GROUP_SHADER_INTERSECTION_KHR
};
- VULKAN_HPP_INLINE std::string to_string( ShaderGroupShaderKHR value )
+ //=== VK_EXT_device_address_binding_report ===
+
+ enum class DeviceAddressBindingFlagBitsEXT : VkDeviceAddressBindingFlagsEXT
{
- switch ( value )
- {
- case ShaderGroupShaderKHR::eGeneral: return "General";
- case ShaderGroupShaderKHR::eClosestHit: return "ClosestHit";
- case ShaderGroupShaderKHR::eAnyHit: return "AnyHit";
- case ShaderGroupShaderKHR::eIntersection: return "Intersection";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
+ eInternalObject = VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT
+ };
+
+ enum class DeviceAddressBindingTypeEXT
+ {
+ eBind = VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT,
+ eUnbind = VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT
+ };
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
@@ -8432,27 +4273,9 @@ namespace VULKAN_HPP_NAMESPACE
eProtectedOptional = VK_IMAGE_CONSTRAINTS_INFO_PROTECTED_OPTIONAL_FUCHSIA
};
- VULKAN_HPP_INLINE std::string to_string( ImageConstraintsInfoFlagBitsFUCHSIA value )
- {
- switch ( value )
- {
- case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely: return "CpuReadRarely";
- case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften: return "CpuReadOften";
- case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely: return "CpuWriteRarely";
- case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften: return "CpuWriteOften";
- case ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional: return "ProtectedOptional";
- default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
- }
- }
-
enum class ImageFormatConstraintsFlagBitsFUCHSIA : VkImageFormatConstraintsFlagsFUCHSIA
{
};
-
- VULKAN_HPP_INLINE std::string to_string( ImageFormatConstraintsFlagBitsFUCHSIA )
- {
- return "(void)";
- }
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
@@ -8461,3316 +4284,150 @@ namespace VULKAN_HPP_NAMESPACE
enum class ScreenSurfaceCreateFlagBitsQNX : VkScreenSurfaceCreateFlagsQNX
{
};
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
- VULKAN_HPP_INLINE std::string to_string( ScreenSurfaceCreateFlagBitsQNX )
+ //=== VK_EXT_opacity_micromap ===
+
+ enum class MicromapTypeEXT
{
- return "(void)";
- }
-#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+ eOpacityMicromap = VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT
+ };
- template <ObjectType value>
- struct cpp_type
- {};
+ enum class BuildMicromapFlagBitsEXT : VkBuildMicromapFlagsEXT
+ {
+ ePreferFastTrace = VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT,
+ ePreferFastBuild = VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT,
+ eAllowCompaction = VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT
+ };
- //=====================
- //=== Format Traits ===
- //=====================
+ enum class CopyMicromapModeEXT
+ {
+ eClone = VK_COPY_MICROMAP_MODE_CLONE_EXT,
+ eSerialize = VK_COPY_MICROMAP_MODE_SERIALIZE_EXT,
+ eDeserialize = VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT,
+ eCompact = VK_COPY_MICROMAP_MODE_COMPACT_EXT
+ };
- // The texel block size in bytes.
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t blockSize( VULKAN_HPP_NAMESPACE::Format format )
+ enum class MicromapCreateFlagBitsEXT : VkMicromapCreateFlagsEXT
{
- switch ( format )
- {
- case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 12;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 12;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 12;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 24;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 24;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 24;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 5;
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 6;
- case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 8;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 8;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 8;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 8;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 8;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 8;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 8;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 8;
-
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- }
-
- // The number of texels in a texel block.
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t texelsPerBlock( VULKAN_HPP_NAMESPACE::Format format )
- {
- switch ( format )
- {
- case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 20;
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 20;
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 25;
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 25;
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 30;
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 30;
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 36;
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 36;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 40;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 40;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 48;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 48;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 64;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 64;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 50;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 50;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 60;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 60;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 80;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 80;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 100;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 100;
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 120;
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 120;
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 144;
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 144;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 20;
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 25;
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 30;
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 36;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 40;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 48;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 64;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 50;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 60;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 80;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 100;
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 120;
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 144;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 1;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 1;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 1;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 1;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 1;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 1;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 1;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 1;
-
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- }
-
- // The three-dimensional extent of a texel block.
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 std::array<uint8_t, 3> blockExtent( VULKAN_HPP_NAMESPACE::Format format )
- {
- switch ( format )
- {
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return { { 5, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return { { 5, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return { { 5, 5, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return { { 5, 5, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return { { 6, 5, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return { { 6, 5, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return { { 6, 6, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return { { 6, 6, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return { { 8, 5, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return { { 8, 5, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return { { 8, 6, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return { { 8, 6, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return { { 8, 8, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return { { 8, 8, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return { { 10, 5, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return { { 10, 5, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return { { 10, 6, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return { { 10, 6, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return { { 10, 8, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return { { 10, 8, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return { { 10, 10, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return { { 10, 10, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return { { 12, 10, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return { { 12, 10, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return { { 12, 12, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return { { 12, 12, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return { { 2, 1, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return { { 2, 1, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return { { 2, 1, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return { { 2, 1, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return { { 2, 1, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return { { 2, 1, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return { { 2, 1, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return { { 2, 1, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return { { 5, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return { { 5, 5, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return { { 6, 5, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return { { 6, 6, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return { { 8, 5, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return { { 8, 6, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return { { 8, 8, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return { { 10, 5, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return { { 10, 6, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return { { 10, 8, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return { { 10, 10, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return { { 12, 10, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return { { 12, 12, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return { { 8, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return { { 8, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return { { 8, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return { { 4, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return { { 8, 4, 1 } };
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return { { 4, 4, 1 } };
-
- default: return { { 1, 1, 1 } };
- }
- }
-
- // A textual description of the compression scheme, or an empty string if it is not compressed
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * compressionScheme( VULKAN_HPP_NAMESPACE::Format format )
- {
- switch ( format )
- {
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return "BC";
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return "BC";
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return "BC";
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return "BC";
- case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return "BC";
- case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return "BC";
- case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return "BC";
- case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return "BC";
- case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return "BC";
- case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return "BC";
- case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return "BC";
- case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return "BC";
- case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return "BC";
- case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return "BC";
- case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return "BC";
- case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return "BC";
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return "ETC2";
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return "ETC2";
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return "ETC2";
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return "ETC2";
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return "ETC2";
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return "ETC2";
- case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return "EAC";
- case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return "EAC";
- case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return "EAC";
- case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return "EAC";
- case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return "ASTC LDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return "ASTC HDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return "ASTC HDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return "ASTC HDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return "ASTC HDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return "ASTC HDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return "ASTC HDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return "ASTC HDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return "ASTC HDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return "ASTC HDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return "ASTC HDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return "ASTC HDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return "ASTC HDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return "ASTC HDR";
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return "ASTC HDR";
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return "PVRTC";
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return "PVRTC";
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return "PVRTC";
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return "PVRTC";
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return "PVRTC";
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return "PVRTC";
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return "PVRTC";
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return "PVRTC";
-
- default: return "";
- }
- }
-
- // True, if this format is a compressed one.
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 bool isCompressed( VULKAN_HPP_NAMESPACE::Format format )
- {
- return ( *VULKAN_HPP_NAMESPACE::compressionScheme( format ) != 0 );
- }
-
- // The number of bits into which the format is packed. A single image element in this format
- // can be stored in the same space as a scalar type of this bit width.
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t packed( VULKAN_HPP_NAMESPACE::Format format )
- {
- switch ( format )
- {
- case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 8;
- case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 32;
- case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 16;
- case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 16;
-
- default: return 0;
- }
- }
-
- // The number of components of this format.
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentCount( VULKAN_HPP_NAMESPACE::Format format )
- {
- switch ( format )
- {
- case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 1;
- case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 4;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 4;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 4;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 4;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 4;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 4;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 4;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 4;
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 4;
-
- default: return 0;
- }
- }
-
- // True, if the components of this format are compressed, otherwise false.
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 bool componentsAreCompressed( VULKAN_HPP_NAMESPACE::Format format )
- {
- switch ( format )
- {
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock:
- case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock:
- case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock:
- case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock:
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG:
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG:
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG:
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG:
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG:
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG:
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG:
- case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return true;
- default: return false;
- }
- }
-
- // The number of bits in this component, if not compressed, otherwise 0.
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentBits( VULKAN_HPP_NAMESPACE::Format format,
- uint8_t component )
- {
- switch ( format )
- {
- case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8:
- switch ( component )
- {
- case 0: return 4;
- case 1: return 4;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16:
- switch ( component )
- {
- case 0: return 4;
- case 1: return 4;
- case 2: return 4;
- case 3: return 4;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16:
- switch ( component )
- {
- case 0: return 4;
- case 1: return 4;
- case 2: return 4;
- case 3: return 4;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16:
- switch ( component )
- {
- case 0: return 5;
- case 1: return 6;
- case 2: return 5;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16:
- switch ( component )
- {
- case 0: return 5;
- case 1: return 6;
- case 2: return 5;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16:
- switch ( component )
- {
- case 0: return 5;
- case 1: return 5;
- case 2: return 5;
- case 3: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16:
- switch ( component )
- {
- case 0: return 5;
- case 1: return 5;
- case 2: return 5;
- case 3: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16:
- switch ( component )
- {
- case 0: return 1;
- case 1: return 5;
- case 2: return 5;
- case 3: return 5;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8Unorm:
- switch ( component )
- {
- case 0: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8Snorm:
- switch ( component )
- {
- case 0: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled:
- switch ( component )
- {
- case 0: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled:
- switch ( component )
- {
- case 0: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8Uint:
- switch ( component )
- {
- case 0: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8Sint:
- switch ( component )
- {
- case 0: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8Srgb:
- switch ( component )
- {
- case 0: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32:
- switch ( component )
- {
- case 0: return 2;
- case 1: return 10;
- case 2: return 10;
- case 3: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32:
- switch ( component )
- {
- case 0: return 2;
- case 1: return 10;
- case 2: return 10;
- case 3: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32:
- switch ( component )
- {
- case 0: return 2;
- case 1: return 10;
- case 2: return 10;
- case 3: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32:
- switch ( component )
- {
- case 0: return 2;
- case 1: return 10;
- case 2: return 10;
- case 3: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32:
- switch ( component )
- {
- case 0: return 2;
- case 1: return 10;
- case 2: return 10;
- case 3: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32:
- switch ( component )
- {
- case 0: return 2;
- case 1: return 10;
- case 2: return 10;
- case 3: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32:
- switch ( component )
- {
- case 0: return 2;
- case 1: return 10;
- case 2: return 10;
- case 3: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32:
- switch ( component )
- {
- case 0: return 2;
- case 1: return 10;
- case 2: return 10;
- case 3: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32:
- switch ( component )
- {
- case 0: return 2;
- case 1: return 10;
- case 2: return 10;
- case 3: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32:
- switch ( component )
- {
- case 0: return 2;
- case 1: return 10;
- case 2: return 10;
- case 3: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32:
- switch ( component )
- {
- case 0: return 2;
- case 1: return 10;
- case 2: return 10;
- case 3: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32:
- switch ( component )
- {
- case 0: return 2;
- case 1: return 10;
- case 2: return 10;
- case 3: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16Unorm:
- switch ( component )
- {
- case 0: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16Snorm:
- switch ( component )
- {
- case 0: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled:
- switch ( component )
- {
- case 0: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled:
- switch ( component )
- {
- case 0: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16Uint:
- switch ( component )
- {
- case 0: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16Sint:
- switch ( component )
- {
- case 0: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat:
- switch ( component )
- {
- case 0: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- case 3: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- case 3: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- case 3: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- case 3: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- case 3: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- case 3: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- case 3: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR32Uint:
- switch ( component )
- {
- case 0: return 32;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR32Sint:
- switch ( component )
- {
- case 0: return 32;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat:
- switch ( component )
- {
- case 0: return 32;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint:
- switch ( component )
- {
- case 0: return 32;
- case 1: return 32;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint:
- switch ( component )
- {
- case 0: return 32;
- case 1: return 32;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat:
- switch ( component )
- {
- case 0: return 32;
- case 1: return 32;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint:
- switch ( component )
- {
- case 0: return 32;
- case 1: return 32;
- case 2: return 32;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint:
- switch ( component )
- {
- case 0: return 32;
- case 1: return 32;
- case 2: return 32;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat:
- switch ( component )
- {
- case 0: return 32;
- case 1: return 32;
- case 2: return 32;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint:
- switch ( component )
- {
- case 0: return 32;
- case 1: return 32;
- case 2: return 32;
- case 3: return 32;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint:
- switch ( component )
- {
- case 0: return 32;
- case 1: return 32;
- case 2: return 32;
- case 3: return 32;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat:
- switch ( component )
- {
- case 0: return 32;
- case 1: return 32;
- case 2: return 32;
- case 3: return 32;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR64Uint:
- switch ( component )
- {
- case 0: return 64;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR64Sint:
- switch ( component )
- {
- case 0: return 64;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat:
- switch ( component )
- {
- case 0: return 64;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint:
- switch ( component )
- {
- case 0: return 64;
- case 1: return 64;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint:
- switch ( component )
- {
- case 0: return 64;
- case 1: return 64;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat:
- switch ( component )
- {
- case 0: return 64;
- case 1: return 64;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint:
- switch ( component )
- {
- case 0: return 64;
- case 1: return 64;
- case 2: return 64;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint:
- switch ( component )
- {
- case 0: return 64;
- case 1: return 64;
- case 2: return 64;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat:
- switch ( component )
- {
- case 0: return 64;
- case 1: return 64;
- case 2: return 64;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint:
- switch ( component )
- {
- case 0: return 64;
- case 1: return 64;
- case 2: return 64;
- case 3: return 64;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint:
- switch ( component )
- {
- case 0: return 64;
- case 1: return 64;
- case 2: return 64;
- case 3: return 64;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat:
- switch ( component )
- {
- case 0: return 64;
- case 1: return 64;
- case 2: return 64;
- case 3: return 64;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32:
- switch ( component )
- {
- case 0: return 10;
- case 1: return 11;
- case 2: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32:
- switch ( component )
- {
- case 0: return 9;
- case 1: return 9;
- case 2: return 9;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eD16Unorm:
- switch ( component )
- {
- case 0: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32:
- switch ( component )
- {
- case 0: return 24;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat:
- switch ( component )
- {
- case 0: return 32;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eS8Uint:
- switch ( component )
- {
- case 0: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint:
- switch ( component )
- {
- case 0: return 24;
- case 1: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint:
- switch ( component )
- {
- case 0: return 32;
- case 1: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock:
- switch ( component )
- {
- case 0: return 11;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock:
- switch ( component )
- {
- case 0: return 11;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock:
- switch ( component )
- {
- case 0: return 11;
- case 1: return 11;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock:
- switch ( component )
- {
- case 0: return 11;
- case 1: return 11;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- case 3: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16:
- switch ( component )
- {
- case 0: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16:
- switch ( component )
- {
- case 0: return 10;
- case 1: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16:
- switch ( component )
- {
- case 0: return 10;
- case 1: return 10;
- case 2: return 10;
- case 3: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16:
- switch ( component )
- {
- case 0: return 10;
- case 1: return 10;
- case 2: return 10;
- case 3: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16:
- switch ( component )
- {
- case 0: return 10;
- case 1: return 10;
- case 2: return 10;
- case 3: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
- switch ( component )
- {
- case 0: return 10;
- case 1: return 10;
- case 2: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
- switch ( component )
- {
- case 0: return 10;
- case 1: return 10;
- case 2: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
- switch ( component )
- {
- case 0: return 10;
- case 1: return 10;
- case 2: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
- switch ( component )
- {
- case 0: return 10;
- case 1: return 10;
- case 2: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
- switch ( component )
- {
- case 0: return 10;
- case 1: return 10;
- case 2: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16:
- switch ( component )
- {
- case 0: return 12;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16:
- switch ( component )
- {
- case 0: return 12;
- case 1: return 12;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16:
- switch ( component )
- {
- case 0: return 12;
- case 1: return 12;
- case 2: return 12;
- case 3: return 12;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16:
- switch ( component )
- {
- case 0: return 12;
- case 1: return 12;
- case 2: return 12;
- case 3: return 12;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16:
- switch ( component )
- {
- case 0: return 12;
- case 1: return 12;
- case 2: return 12;
- case 3: return 12;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
- switch ( component )
- {
- case 0: return 12;
- case 1: return 12;
- case 2: return 12;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
- switch ( component )
- {
- case 0: return 12;
- case 1: return 12;
- case 2: return 12;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
- switch ( component )
- {
- case 0: return 12;
- case 1: return 12;
- case 2: return 12;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
- switch ( component )
- {
- case 0: return 12;
- case 1: return 12;
- case 2: return 12;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
- switch ( component )
- {
- case 0: return 12;
- case 1: return 12;
- case 2: return 12;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- case 3: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- case 3: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
- switch ( component )
- {
- case 0: return 8;
- case 1: return 8;
- case 2: return 8;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
- switch ( component )
- {
- case 0: return 10;
- case 1: return 10;
- case 2: return 10;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
- switch ( component )
- {
- case 0: return 12;
- case 1: return 12;
- case 2: return 12;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
- switch ( component )
- {
- case 0: return 16;
- case 1: return 16;
- case 2: return 16;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16:
- switch ( component )
- {
- case 0: return 4;
- case 1: return 4;
- case 2: return 4;
- case 3: return 4;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16:
- switch ( component )
- {
- case 0: return 4;
- case 1: return 4;
- case 2: return 4;
- case 3: return 4;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
-
- default: return 0;
- }
- }
-
- // The plane this component lies in.
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentPlaneIndex( VULKAN_HPP_NAMESPACE::Format format,
- uint8_t component )
- {
- switch ( format )
- {
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
- switch ( component )
- {
- case 0: return 0;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 0;
- }
-
- default: return 0;
- }
- }
-
- // The number of image planes of this format.
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeCount( VULKAN_HPP_NAMESPACE::Format format )
- {
- switch ( format )
- {
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 3;
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 2;
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 2;
-
- default: return 1;
- }
- }
-
- // The single-plane format that this plane is compatible with.
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 VULKAN_HPP_NAMESPACE::Format
- planeCompatibleFormat( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane )
- {
- switch ( format )
- {
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
- case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
- case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
- case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
- case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
- case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
- case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
- case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
- case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
- case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
- case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
- case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
- case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
- switch ( plane )
- {
- case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
- case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm;
- default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
- }
-
- default: VULKAN_HPP_ASSERT( plane == 0 ); return format;
- }
- }
-
- // The relative height of this plane. A value of k means that this plane is 1/k the height of the overall format.
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeHeightDivisor( VULKAN_HPP_NAMESPACE::Format format,
- uint8_t plane )
- {
- switch ( format )
- {
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
-
- default: VULKAN_HPP_ASSERT( plane == 0 ); return 1;
- }
- }
-
- // The relative width of this plane. A value of k means that this plane is 1/k the width of the overall format.
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeWidthDivisor( VULKAN_HPP_NAMESPACE::Format format,
- uint8_t plane )
- {
- switch ( format )
- {
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- case 2: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 2;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- case 2: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
- case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
- switch ( plane )
- {
- case 0: return 1;
- case 1: return 1;
- default: VULKAN_HPP_ASSERT( false ); return 1;
- }
-
- default: VULKAN_HPP_ASSERT( plane == 0 ); return 1;
- }
- }
+ eDeviceAddressCaptureReplay = VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT
+ };
+
+ enum class BuildMicromapModeEXT
+ {
+ eBuild = VK_BUILD_MICROMAP_MODE_BUILD_EXT
+ };
+
+ enum class OpacityMicromapFormatEXT
+ {
+ e2State = VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT,
+ e4State = VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT
+ };
+
+ enum class OpacityMicromapSpecialIndexEXT
+ {
+ eFullyTransparent = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT,
+ eFullyOpaque = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT,
+ eFullyUnknownTransparent = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT,
+ eFullyUnknownOpaque = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT
+ };
+
+ //=== VK_EXT_subpass_merge_feedback ===
+
+ enum class SubpassMergeStatusEXT
+ {
+ eMerged = VK_SUBPASS_MERGE_STATUS_MERGED_EXT,
+ eDisallowed = VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT,
+ eNotMergedSideEffects = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT,
+ eNotMergedSamplesMismatch = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT,
+ eNotMergedViewsMismatch = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT,
+ eNotMergedAliasing = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT,
+ eNotMergedDependencies = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT,
+ eNotMergedIncompatibleInputAttachment = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT,
+ eNotMergedTooManyAttachments = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT,
+ eNotMergedInsufficientStorage = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT,
+ eNotMergedDepthStencilCount = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT,
+ eNotMergedResolveAttachmentReuse = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT,
+ eNotMergedSingleSubpass = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT,
+ eNotMergedUnspecified = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT
+ };
+
+ //=== VK_EXT_rasterization_order_attachment_access ===
+
+ enum class PipelineColorBlendStateCreateFlagBits : VkPipelineColorBlendStateCreateFlags
+ {
+ eRasterizationOrderAttachmentAccessEXT = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT,
+ eRasterizationOrderAttachmentAccessARM = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM
+ };
+
+ enum class PipelineDepthStencilStateCreateFlagBits : VkPipelineDepthStencilStateCreateFlags
+ {
+ eRasterizationOrderAttachmentDepthAccessEXT = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT,
+ eRasterizationOrderAttachmentStencilAccessEXT = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT,
+ eRasterizationOrderAttachmentDepthAccessARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM,
+ eRasterizationOrderAttachmentStencilAccessARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM
+ };
+
+ //=== VK_NV_optical_flow ===
+
+ enum class OpticalFlowUsageFlagBitsNV : VkOpticalFlowUsageFlagsNV
+ {
+ eUnknown = VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV,
+ eInput = VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV,
+ eOutput = VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV,
+ eHint = VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV,
+ eCost = VK_OPTICAL_FLOW_USAGE_COST_BIT_NV,
+ eGlobalFlow = VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV
+ };
+
+ enum class OpticalFlowGridSizeFlagBitsNV : VkOpticalFlowGridSizeFlagsNV
+ {
+ eUnknown = VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV,
+ e1X1 = VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV,
+ e2X2 = VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV,
+ e4X4 = VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV,
+ e8X8 = VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV
+ };
+
+ enum class OpticalFlowPerformanceLevelNV
+ {
+ eUnknown = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV,
+ eSlow = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV,
+ eMedium = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV,
+ eFast = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV
+ };
+
+ enum class OpticalFlowSessionBindingPointNV
+ {
+ eUnknown = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV,
+ eInput = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV,
+ eReference = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV,
+ eHint = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV,
+ eFlowVector = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV,
+ eBackwardFlowVector = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV,
+ eCost = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV,
+ eBackwardCost = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV,
+ eGlobalFlow = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV
+ };
+
+ enum class OpticalFlowSessionCreateFlagBitsNV : VkOpticalFlowSessionCreateFlagsNV
+ {
+ eEnableHint = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV,
+ eEnableCost = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV,
+ eEnableGlobalFlow = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV,
+ eAllowRegions = VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV,
+ eBothDirections = VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV
+ };
+
+ enum class OpticalFlowExecuteFlagBitsNV : VkOpticalFlowExecuteFlagsNV
+ {
+ eDisableTemporalHints = VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV
+ };
template <typename T>
struct IndexTypeValue
- {};
+ {
+ };
template <>
struct IndexTypeValue<uint16_t>
@@ -11814,1165 +4471,344 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_VERSION_1_0 ===
- using FormatFeatureFlags = Flags<FormatFeatureFlagBits>;
-
template <>
struct FlagTraits<FormatFeatureFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags =
VkFlags( FormatFeatureFlagBits::eSampledImage ) | VkFlags( FormatFeatureFlagBits::eStorageImage ) |
VkFlags( FormatFeatureFlagBits::eStorageImageAtomic ) | VkFlags( FormatFeatureFlagBits::eUniformTexelBuffer ) |
- VkFlags( FormatFeatureFlagBits::eStorageTexelBuffer ) |
- VkFlags( FormatFeatureFlagBits::eStorageTexelBufferAtomic ) | VkFlags( FormatFeatureFlagBits::eVertexBuffer ) |
- VkFlags( FormatFeatureFlagBits::eColorAttachment ) | VkFlags( FormatFeatureFlagBits::eColorAttachmentBlend ) |
- VkFlags( FormatFeatureFlagBits::eDepthStencilAttachment ) | VkFlags( FormatFeatureFlagBits::eBlitSrc ) |
- VkFlags( FormatFeatureFlagBits::eBlitDst ) | VkFlags( FormatFeatureFlagBits::eSampledImageFilterLinear ) |
+ VkFlags( FormatFeatureFlagBits::eStorageTexelBuffer ) | VkFlags( FormatFeatureFlagBits::eStorageTexelBufferAtomic ) |
+ VkFlags( FormatFeatureFlagBits::eVertexBuffer ) | VkFlags( FormatFeatureFlagBits::eColorAttachment ) |
+ VkFlags( FormatFeatureFlagBits::eColorAttachmentBlend ) | VkFlags( FormatFeatureFlagBits::eDepthStencilAttachment ) |
+ VkFlags( FormatFeatureFlagBits::eBlitSrc ) | VkFlags( FormatFeatureFlagBits::eBlitDst ) | VkFlags( FormatFeatureFlagBits::eSampledImageFilterLinear ) |
VkFlags( FormatFeatureFlagBits::eTransferSrc ) | VkFlags( FormatFeatureFlagBits::eTransferDst ) |
- VkFlags( FormatFeatureFlagBits::eMidpointChromaSamples ) |
- VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) |
+ VkFlags( FormatFeatureFlagBits::eMidpointChromaSamples ) | VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) |
VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) |
VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) |
- VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) |
- VkFlags( FormatFeatureFlagBits::eDisjoint ) | VkFlags( FormatFeatureFlagBits::eCositedChromaSamples ) |
- VkFlags( FormatFeatureFlagBits::eSampledImageFilterMinmax ) |
- VkFlags( FormatFeatureFlagBits::eSampledImageFilterCubicIMG )
+ VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) | VkFlags( FormatFeatureFlagBits::eDisjoint ) |
+ VkFlags( FormatFeatureFlagBits::eCositedChromaSamples ) | VkFlags( FormatFeatureFlagBits::eSampledImageFilterMinmax )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
| VkFlags( FormatFeatureFlagBits::eVideoDecodeOutputKHR ) | VkFlags( FormatFeatureFlagBits::eVideoDecodeDpbKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- | VkFlags( FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) |
- VkFlags( FormatFeatureFlagBits::eFragmentDensityMapEXT ) |
- VkFlags( FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR )
+ | VkFlags( FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) | VkFlags( FormatFeatureFlagBits::eSampledImageFilterCubicEXT ) |
+ VkFlags( FormatFeatureFlagBits::eFragmentDensityMapEXT ) | VkFlags( FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
| VkFlags( FormatFeatureFlagBits::eVideoEncodeInputKHR ) | VkFlags( FormatFeatureFlagBits::eVideoEncodeDpbKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator|( FormatFeatureFlagBits bit0,
- FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FormatFeatureFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator&( FormatFeatureFlagBits bit0,
- FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FormatFeatureFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator^( FormatFeatureFlagBits bit0,
- FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FormatFeatureFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator~( FormatFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( FormatFeatureFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & FormatFeatureFlagBits::eSampledImage )
- result += "SampledImage | ";
- if ( value & FormatFeatureFlagBits::eStorageImage )
- result += "StorageImage | ";
- if ( value & FormatFeatureFlagBits::eStorageImageAtomic )
- result += "StorageImageAtomic | ";
- if ( value & FormatFeatureFlagBits::eUniformTexelBuffer )
- result += "UniformTexelBuffer | ";
- if ( value & FormatFeatureFlagBits::eStorageTexelBuffer )
- result += "StorageTexelBuffer | ";
- if ( value & FormatFeatureFlagBits::eStorageTexelBufferAtomic )
- result += "StorageTexelBufferAtomic | ";
- if ( value & FormatFeatureFlagBits::eVertexBuffer )
- result += "VertexBuffer | ";
- if ( value & FormatFeatureFlagBits::eColorAttachment )
- result += "ColorAttachment | ";
- if ( value & FormatFeatureFlagBits::eColorAttachmentBlend )
- result += "ColorAttachmentBlend | ";
- if ( value & FormatFeatureFlagBits::eDepthStencilAttachment )
- result += "DepthStencilAttachment | ";
- if ( value & FormatFeatureFlagBits::eBlitSrc )
- result += "BlitSrc | ";
- if ( value & FormatFeatureFlagBits::eBlitDst )
- result += "BlitDst | ";
- if ( value & FormatFeatureFlagBits::eSampledImageFilterLinear )
- result += "SampledImageFilterLinear | ";
- if ( value & FormatFeatureFlagBits::eTransferSrc )
- result += "TransferSrc | ";
- if ( value & FormatFeatureFlagBits::eTransferDst )
- result += "TransferDst | ";
- if ( value & FormatFeatureFlagBits::eMidpointChromaSamples )
- result += "MidpointChromaSamples | ";
- if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter )
- result += "SampledImageYcbcrConversionLinearFilter | ";
- if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter )
- result += "SampledImageYcbcrConversionSeparateReconstructionFilter | ";
- if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit )
- result += "SampledImageYcbcrConversionChromaReconstructionExplicit | ";
- if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable )
- result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | ";
- if ( value & FormatFeatureFlagBits::eDisjoint )
- result += "Disjoint | ";
- if ( value & FormatFeatureFlagBits::eCositedChromaSamples )
- result += "CositedChromaSamples | ";
- if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmax )
- result += "SampledImageFilterMinmax | ";
- if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG )
- result += "SampledImageFilterCubicIMG | ";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- if ( value & FormatFeatureFlagBits::eVideoDecodeOutputKHR )
- result += "VideoDecodeOutputKHR | ";
- if ( value & FormatFeatureFlagBits::eVideoDecodeDpbKHR )
- result += "VideoDecodeDpbKHR | ";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- if ( value & FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR )
- result += "AccelerationStructureVertexBufferKHR | ";
- if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT )
- result += "FragmentDensityMapEXT | ";
- if ( value & FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR )
- result += "FragmentShadingRateAttachmentKHR | ";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- if ( value & FormatFeatureFlagBits::eVideoEncodeInputKHR )
- result += "VideoEncodeInputKHR | ";
- if ( value & FormatFeatureFlagBits::eVideoEncodeDpbKHR )
- result += "VideoEncodeDpbKHR | ";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using ImageCreateFlags = Flags<ImageCreateFlagBits>;
+ using FormatFeatureFlags = Flags<FormatFeatureFlagBits>;
template <>
struct FlagTraits<ImageCreateFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags =
- VkFlags( ImageCreateFlagBits::eSparseBinding ) | VkFlags( ImageCreateFlagBits::eSparseResidency ) |
- VkFlags( ImageCreateFlagBits::eSparseAliased ) | VkFlags( ImageCreateFlagBits::eMutableFormat ) |
- VkFlags( ImageCreateFlagBits::eCubeCompatible ) | VkFlags( ImageCreateFlagBits::eAlias ) |
- VkFlags( ImageCreateFlagBits::eSplitInstanceBindRegions ) | VkFlags( ImageCreateFlagBits::e2DArrayCompatible ) |
- VkFlags( ImageCreateFlagBits::eBlockTexelViewCompatible ) | VkFlags( ImageCreateFlagBits::eExtendedUsage ) |
- VkFlags( ImageCreateFlagBits::eProtected ) | VkFlags( ImageCreateFlagBits::eDisjoint ) |
- VkFlags( ImageCreateFlagBits::eCornerSampledNV ) |
- VkFlags( ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) |
- VkFlags( ImageCreateFlagBits::eSubsampledEXT ) | VkFlags( ImageCreateFlagBits::eFragmentDensityMapOffsetQCOM )
+ allFlags = VkFlags( ImageCreateFlagBits::eSparseBinding ) | VkFlags( ImageCreateFlagBits::eSparseResidency ) |
+ VkFlags( ImageCreateFlagBits::eSparseAliased ) | VkFlags( ImageCreateFlagBits::eMutableFormat ) |
+ VkFlags( ImageCreateFlagBits::eCubeCompatible ) | VkFlags( ImageCreateFlagBits::eAlias ) |
+ VkFlags( ImageCreateFlagBits::eSplitInstanceBindRegions ) | VkFlags( ImageCreateFlagBits::e2DArrayCompatible ) |
+ VkFlags( ImageCreateFlagBits::eBlockTexelViewCompatible ) | VkFlags( ImageCreateFlagBits::eExtendedUsage ) |
+ VkFlags( ImageCreateFlagBits::eProtected ) | VkFlags( ImageCreateFlagBits::eDisjoint ) | VkFlags( ImageCreateFlagBits::eCornerSampledNV ) |
+ VkFlags( ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) | VkFlags( ImageCreateFlagBits::eSubsampledEXT ) |
+ VkFlags( ImageCreateFlagBits::eMultisampledRenderToSingleSampledEXT ) | VkFlags( ImageCreateFlagBits::e2DViewCompatibleEXT ) |
+ VkFlags( ImageCreateFlagBits::eFragmentDensityMapOffsetQCOM )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator|( ImageCreateFlagBits bit0,
- ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageCreateFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator&( ImageCreateFlagBits bit0,
- ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageCreateFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator^( ImageCreateFlagBits bit0,
- ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageCreateFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator~( ImageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( ImageCreateFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( ImageCreateFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & ImageCreateFlagBits::eSparseBinding )
- result += "SparseBinding | ";
- if ( value & ImageCreateFlagBits::eSparseResidency )
- result += "SparseResidency | ";
- if ( value & ImageCreateFlagBits::eSparseAliased )
- result += "SparseAliased | ";
- if ( value & ImageCreateFlagBits::eMutableFormat )
- result += "MutableFormat | ";
- if ( value & ImageCreateFlagBits::eCubeCompatible )
- result += "CubeCompatible | ";
- if ( value & ImageCreateFlagBits::eAlias )
- result += "Alias | ";
- if ( value & ImageCreateFlagBits::eSplitInstanceBindRegions )
- result += "SplitInstanceBindRegions | ";
- if ( value & ImageCreateFlagBits::e2DArrayCompatible )
- result += "2DArrayCompatible | ";
- if ( value & ImageCreateFlagBits::eBlockTexelViewCompatible )
- result += "BlockTexelViewCompatible | ";
- if ( value & ImageCreateFlagBits::eExtendedUsage )
- result += "ExtendedUsage | ";
- if ( value & ImageCreateFlagBits::eProtected )
- result += "Protected | ";
- if ( value & ImageCreateFlagBits::eDisjoint )
- result += "Disjoint | ";
- if ( value & ImageCreateFlagBits::eCornerSampledNV )
- result += "CornerSampledNV | ";
- if ( value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT )
- result += "SampleLocationsCompatibleDepthEXT | ";
- if ( value & ImageCreateFlagBits::eSubsampledEXT )
- result += "SubsampledEXT | ";
- if ( value & ImageCreateFlagBits::eFragmentDensityMapOffsetQCOM )
- result += "FragmentDensityMapOffsetQCOM | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using ImageUsageFlags = Flags<ImageUsageFlagBits>;
+ using ImageCreateFlags = Flags<ImageCreateFlagBits>;
template <>
struct FlagTraits<ImageUsageFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( ImageUsageFlagBits::eTransferSrc ) | VkFlags( ImageUsageFlagBits::eTransferDst ) |
- VkFlags( ImageUsageFlagBits::eSampled ) | VkFlags( ImageUsageFlagBits::eStorage ) |
- VkFlags( ImageUsageFlagBits::eColorAttachment ) |
- VkFlags( ImageUsageFlagBits::eDepthStencilAttachment ) |
- VkFlags( ImageUsageFlagBits::eTransientAttachment ) | VkFlags( ImageUsageFlagBits::eInputAttachment )
+ allFlags = VkFlags( ImageUsageFlagBits::eTransferSrc ) | VkFlags( ImageUsageFlagBits::eTransferDst ) | VkFlags( ImageUsageFlagBits::eSampled ) |
+ VkFlags( ImageUsageFlagBits::eStorage ) | VkFlags( ImageUsageFlagBits::eColorAttachment ) |
+ VkFlags( ImageUsageFlagBits::eDepthStencilAttachment ) | VkFlags( ImageUsageFlagBits::eTransientAttachment ) |
+ VkFlags( ImageUsageFlagBits::eInputAttachment )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- | VkFlags( ImageUsageFlagBits::eVideoDecodeDstKHR ) |
- VkFlags( ImageUsageFlagBits::eVideoDecodeSrcKHR ) | VkFlags( ImageUsageFlagBits::eVideoDecodeDpbKHR )
+ | VkFlags( ImageUsageFlagBits::eVideoDecodeDstKHR ) | VkFlags( ImageUsageFlagBits::eVideoDecodeSrcKHR ) |
+ VkFlags( ImageUsageFlagBits::eVideoDecodeDpbKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- | VkFlags( ImageUsageFlagBits::eFragmentDensityMapEXT ) |
- VkFlags( ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR )
+ | VkFlags( ImageUsageFlagBits::eFragmentDensityMapEXT ) | VkFlags( ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- | VkFlags( ImageUsageFlagBits::eVideoEncodeDstKHR ) |
- VkFlags( ImageUsageFlagBits::eVideoEncodeSrcKHR ) | VkFlags( ImageUsageFlagBits::eVideoEncodeDpbKHR )
+ | VkFlags( ImageUsageFlagBits::eVideoEncodeDstKHR ) | VkFlags( ImageUsageFlagBits::eVideoEncodeSrcKHR ) |
+ VkFlags( ImageUsageFlagBits::eVideoEncodeDpbKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- | VkFlags( ImageUsageFlagBits::eInvocationMaskHUAWEI )
+ | VkFlags( ImageUsageFlagBits::eAttachmentFeedbackLoopEXT ) | VkFlags( ImageUsageFlagBits::eInvocationMaskHUAWEI ) |
+ VkFlags( ImageUsageFlagBits::eSampleWeightQCOM ) | VkFlags( ImageUsageFlagBits::eSampleBlockMatchQCOM )
};
};
+ using ImageUsageFlags = Flags<ImageUsageFlagBits>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator|( ImageUsageFlagBits bit0,
- ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageUsageFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator&( ImageUsageFlagBits bit0,
- ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageUsageFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator^( ImageUsageFlagBits bit0,
- ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageUsageFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator~( ImageUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( ImageUsageFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( ImageUsageFlags value )
+ template <>
+ struct FlagTraits<InstanceCreateFlagBits>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & ImageUsageFlagBits::eTransferSrc )
- result += "TransferSrc | ";
- if ( value & ImageUsageFlagBits::eTransferDst )
- result += "TransferDst | ";
- if ( value & ImageUsageFlagBits::eSampled )
- result += "Sampled | ";
- if ( value & ImageUsageFlagBits::eStorage )
- result += "Storage | ";
- if ( value & ImageUsageFlagBits::eColorAttachment )
- result += "ColorAttachment | ";
- if ( value & ImageUsageFlagBits::eDepthStencilAttachment )
- result += "DepthStencilAttachment | ";
- if ( value & ImageUsageFlagBits::eTransientAttachment )
- result += "TransientAttachment | ";
- if ( value & ImageUsageFlagBits::eInputAttachment )
- result += "InputAttachment | ";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- if ( value & ImageUsageFlagBits::eVideoDecodeDstKHR )
- result += "VideoDecodeDstKHR | ";
- if ( value & ImageUsageFlagBits::eVideoDecodeSrcKHR )
- result += "VideoDecodeSrcKHR | ";
- if ( value & ImageUsageFlagBits::eVideoDecodeDpbKHR )
- result += "VideoDecodeDpbKHR | ";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- if ( value & ImageUsageFlagBits::eFragmentDensityMapEXT )
- result += "FragmentDensityMapEXT | ";
- if ( value & ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR )
- result += "FragmentShadingRateAttachmentKHR | ";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- if ( value & ImageUsageFlagBits::eVideoEncodeDstKHR )
- result += "VideoEncodeDstKHR | ";
- if ( value & ImageUsageFlagBits::eVideoEncodeSrcKHR )
- result += "VideoEncodeSrcKHR | ";
- if ( value & ImageUsageFlagBits::eVideoEncodeDpbKHR )
- result += "VideoEncodeDpbKHR | ";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- if ( value & ImageUsageFlagBits::eInvocationMaskHUAWEI )
- result += "InvocationMaskHUAWEI | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = VkFlags( InstanceCreateFlagBits::eEnumeratePortabilityKHR )
+ };
+ };
using InstanceCreateFlags = Flags<InstanceCreateFlagBits>;
- VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlags )
- {
- return "{}";
- }
-
- using MemoryHeapFlags = Flags<MemoryHeapFlagBits>;
-
template <>
struct FlagTraits<MemoryHeapFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( MemoryHeapFlagBits::eDeviceLocal ) | VkFlags( MemoryHeapFlagBits::eMultiInstance )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator|( MemoryHeapFlagBits bit0,
- MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return MemoryHeapFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator&( MemoryHeapFlagBits bit0,
- MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return MemoryHeapFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator^( MemoryHeapFlagBits bit0,
- MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return MemoryHeapFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator~( MemoryHeapFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( MemoryHeapFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & MemoryHeapFlagBits::eDeviceLocal )
- result += "DeviceLocal | ";
- if ( value & MemoryHeapFlagBits::eMultiInstance )
- result += "MultiInstance | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using MemoryPropertyFlags = Flags<MemoryPropertyFlagBits>;
+ using MemoryHeapFlags = Flags<MemoryHeapFlagBits>;
template <>
struct FlagTraits<MemoryPropertyFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( MemoryPropertyFlagBits::eDeviceLocal ) | VkFlags( MemoryPropertyFlagBits::eHostVisible ) |
VkFlags( MemoryPropertyFlagBits::eHostCoherent ) | VkFlags( MemoryPropertyFlagBits::eHostCached ) |
VkFlags( MemoryPropertyFlagBits::eLazilyAllocated ) | VkFlags( MemoryPropertyFlagBits::eProtected ) |
- VkFlags( MemoryPropertyFlagBits::eDeviceCoherentAMD ) |
- VkFlags( MemoryPropertyFlagBits::eDeviceUncachedAMD ) |
+ VkFlags( MemoryPropertyFlagBits::eDeviceCoherentAMD ) | VkFlags( MemoryPropertyFlagBits::eDeviceUncachedAMD ) |
VkFlags( MemoryPropertyFlagBits::eRdmaCapableNV )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags
- operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return MemoryPropertyFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags
- operator&( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return MemoryPropertyFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags
- operator^( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return MemoryPropertyFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( MemoryPropertyFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & MemoryPropertyFlagBits::eDeviceLocal )
- result += "DeviceLocal | ";
- if ( value & MemoryPropertyFlagBits::eHostVisible )
- result += "HostVisible | ";
- if ( value & MemoryPropertyFlagBits::eHostCoherent )
- result += "HostCoherent | ";
- if ( value & MemoryPropertyFlagBits::eHostCached )
- result += "HostCached | ";
- if ( value & MemoryPropertyFlagBits::eLazilyAllocated )
- result += "LazilyAllocated | ";
- if ( value & MemoryPropertyFlagBits::eProtected )
- result += "Protected | ";
- if ( value & MemoryPropertyFlagBits::eDeviceCoherentAMD )
- result += "DeviceCoherentAMD | ";
- if ( value & MemoryPropertyFlagBits::eDeviceUncachedAMD )
- result += "DeviceUncachedAMD | ";
- if ( value & MemoryPropertyFlagBits::eRdmaCapableNV )
- result += "RdmaCapableNV | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using QueueFlags = Flags<QueueFlagBits>;
+ using MemoryPropertyFlags = Flags<MemoryPropertyFlagBits>;
template <>
struct FlagTraits<QueueFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( QueueFlagBits::eGraphics ) | VkFlags( QueueFlagBits::eCompute ) |
- VkFlags( QueueFlagBits::eTransfer ) | VkFlags( QueueFlagBits::eSparseBinding ) |
- VkFlags( QueueFlagBits::eProtected )
+ allFlags = VkFlags( QueueFlagBits::eGraphics ) | VkFlags( QueueFlagBits::eCompute ) | VkFlags( QueueFlagBits::eTransfer ) |
+ VkFlags( QueueFlagBits::eSparseBinding ) | VkFlags( QueueFlagBits::eProtected )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
| VkFlags( QueueFlagBits::eVideoDecodeKHR ) | VkFlags( QueueFlagBits::eVideoEncodeKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ | VkFlags( QueueFlagBits::eOpticalFlowNV )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator|( QueueFlagBits bit0,
- QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return QueueFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator&( QueueFlagBits bit0,
- QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return QueueFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator^( QueueFlagBits bit0,
- QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return QueueFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator~( QueueFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( QueueFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( QueueFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & QueueFlagBits::eGraphics )
- result += "Graphics | ";
- if ( value & QueueFlagBits::eCompute )
- result += "Compute | ";
- if ( value & QueueFlagBits::eTransfer )
- result += "Transfer | ";
- if ( value & QueueFlagBits::eSparseBinding )
- result += "SparseBinding | ";
- if ( value & QueueFlagBits::eProtected )
- result += "Protected | ";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- if ( value & QueueFlagBits::eVideoDecodeKHR )
- result += "VideoDecodeKHR | ";
- if ( value & QueueFlagBits::eVideoEncodeKHR )
- result += "VideoEncodeKHR | ";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using SampleCountFlags = Flags<SampleCountFlagBits>;
+ using QueueFlags = Flags<QueueFlagBits>;
template <>
struct FlagTraits<SampleCountFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( SampleCountFlagBits::e1 ) | VkFlags( SampleCountFlagBits::e2 ) |
- VkFlags( SampleCountFlagBits::e4 ) | VkFlags( SampleCountFlagBits::e8 ) |
- VkFlags( SampleCountFlagBits::e16 ) | VkFlags( SampleCountFlagBits::e32 ) |
+ allFlags = VkFlags( SampleCountFlagBits::e1 ) | VkFlags( SampleCountFlagBits::e2 ) | VkFlags( SampleCountFlagBits::e4 ) |
+ VkFlags( SampleCountFlagBits::e8 ) | VkFlags( SampleCountFlagBits::e16 ) | VkFlags( SampleCountFlagBits::e32 ) |
VkFlags( SampleCountFlagBits::e64 )
};
};
+ using SampleCountFlags = Flags<SampleCountFlagBits>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator|( SampleCountFlagBits bit0,
- SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SampleCountFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator&( SampleCountFlagBits bit0,
- SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SampleCountFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator^( SampleCountFlagBits bit0,
- SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SampleCountFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator~( SampleCountFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( SampleCountFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( SampleCountFlags value )
+ template <>
+ struct FlagTraits<DeviceCreateFlagBits>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & SampleCountFlagBits::e1 )
- result += "1 | ";
- if ( value & SampleCountFlagBits::e2 )
- result += "2 | ";
- if ( value & SampleCountFlagBits::e4 )
- result += "4 | ";
- if ( value & SampleCountFlagBits::e8 )
- result += "8 | ";
- if ( value & SampleCountFlagBits::e16 )
- result += "16 | ";
- if ( value & SampleCountFlagBits::e32 )
- result += "32 | ";
- if ( value & SampleCountFlagBits::e64 )
- result += "64 | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using DeviceCreateFlags = Flags<DeviceCreateFlagBits>;
- VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlags )
- {
- return "{}";
- }
-
- using DeviceQueueCreateFlags = Flags<DeviceQueueCreateFlagBits>;
-
template <>
struct FlagTraits<DeviceQueueCreateFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( DeviceQueueCreateFlagBits::eProtected )
};
};
+ using DeviceQueueCreateFlags = Flags<DeviceQueueCreateFlagBits>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags
- operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DeviceQueueCreateFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags
- operator&( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DeviceQueueCreateFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags
- operator^( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DeviceQueueCreateFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator~( DeviceQueueCreateFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( DeviceQueueCreateFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlags value )
+ template <>
+ struct FlagTraits<PipelineStageFlagBits>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & DeviceQueueCreateFlagBits::eProtected )
- result += "Protected | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = VkFlags( PipelineStageFlagBits::eTopOfPipe ) | VkFlags( PipelineStageFlagBits::eDrawIndirect ) |
+ VkFlags( PipelineStageFlagBits::eVertexInput ) | VkFlags( PipelineStageFlagBits::eVertexShader ) |
+ VkFlags( PipelineStageFlagBits::eTessellationControlShader ) | VkFlags( PipelineStageFlagBits::eTessellationEvaluationShader ) |
+ VkFlags( PipelineStageFlagBits::eGeometryShader ) | VkFlags( PipelineStageFlagBits::eFragmentShader ) |
+ VkFlags( PipelineStageFlagBits::eEarlyFragmentTests ) | VkFlags( PipelineStageFlagBits::eLateFragmentTests ) |
+ VkFlags( PipelineStageFlagBits::eColorAttachmentOutput ) | VkFlags( PipelineStageFlagBits::eComputeShader ) |
+ VkFlags( PipelineStageFlagBits::eTransfer ) | VkFlags( PipelineStageFlagBits::eBottomOfPipe ) | VkFlags( PipelineStageFlagBits::eHost ) |
+ VkFlags( PipelineStageFlagBits::eAllGraphics ) | VkFlags( PipelineStageFlagBits::eAllCommands ) | VkFlags( PipelineStageFlagBits::eNone ) |
+ VkFlags( PipelineStageFlagBits::eTransformFeedbackEXT ) | VkFlags( PipelineStageFlagBits::eConditionalRenderingEXT ) |
+ VkFlags( PipelineStageFlagBits::eAccelerationStructureBuildKHR ) | VkFlags( PipelineStageFlagBits::eRayTracingShaderKHR ) |
+ VkFlags( PipelineStageFlagBits::eFragmentDensityProcessEXT ) | VkFlags( PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR ) |
+ VkFlags( PipelineStageFlagBits::eCommandPreprocessNV ) | VkFlags( PipelineStageFlagBits::eTaskShaderEXT ) |
+ VkFlags( PipelineStageFlagBits::eMeshShaderEXT )
+ };
+ };
using PipelineStageFlags = Flags<PipelineStageFlagBits>;
template <>
- struct FlagTraits<PipelineStageFlagBits>
+ struct FlagTraits<MemoryMapFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags =
- VkFlags( PipelineStageFlagBits::eTopOfPipe ) | VkFlags( PipelineStageFlagBits::eDrawIndirect ) |
- VkFlags( PipelineStageFlagBits::eVertexInput ) | VkFlags( PipelineStageFlagBits::eVertexShader ) |
- VkFlags( PipelineStageFlagBits::eTessellationControlShader ) |
- VkFlags( PipelineStageFlagBits::eTessellationEvaluationShader ) |
- VkFlags( PipelineStageFlagBits::eGeometryShader ) | VkFlags( PipelineStageFlagBits::eFragmentShader ) |
- VkFlags( PipelineStageFlagBits::eEarlyFragmentTests ) | VkFlags( PipelineStageFlagBits::eLateFragmentTests ) |
- VkFlags( PipelineStageFlagBits::eColorAttachmentOutput ) | VkFlags( PipelineStageFlagBits::eComputeShader ) |
- VkFlags( PipelineStageFlagBits::eTransfer ) | VkFlags( PipelineStageFlagBits::eBottomOfPipe ) |
- VkFlags( PipelineStageFlagBits::eHost ) | VkFlags( PipelineStageFlagBits::eAllGraphics ) |
- VkFlags( PipelineStageFlagBits::eAllCommands ) | VkFlags( PipelineStageFlagBits::eNone ) |
- VkFlags( PipelineStageFlagBits::eTransformFeedbackEXT ) |
- VkFlags( PipelineStageFlagBits::eConditionalRenderingEXT ) |
- VkFlags( PipelineStageFlagBits::eAccelerationStructureBuildKHR ) |
- VkFlags( PipelineStageFlagBits::eRayTracingShaderKHR ) | VkFlags( PipelineStageFlagBits::eTaskShaderNV ) |
- VkFlags( PipelineStageFlagBits::eMeshShaderNV ) | VkFlags( PipelineStageFlagBits::eFragmentDensityProcessEXT ) |
- VkFlags( PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR ) |
- VkFlags( PipelineStageFlagBits::eCommandPreprocessNV )
+ allFlags = 0
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator|( PipelineStageFlagBits bit0,
- PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineStageFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator&( PipelineStageFlagBits bit0,
- PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineStageFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator^( PipelineStageFlagBits bit0,
- PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineStageFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator~( PipelineStageFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( PipelineStageFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & PipelineStageFlagBits::eTopOfPipe )
- result += "TopOfPipe | ";
- if ( value & PipelineStageFlagBits::eDrawIndirect )
- result += "DrawIndirect | ";
- if ( value & PipelineStageFlagBits::eVertexInput )
- result += "VertexInput | ";
- if ( value & PipelineStageFlagBits::eVertexShader )
- result += "VertexShader | ";
- if ( value & PipelineStageFlagBits::eTessellationControlShader )
- result += "TessellationControlShader | ";
- if ( value & PipelineStageFlagBits::eTessellationEvaluationShader )
- result += "TessellationEvaluationShader | ";
- if ( value & PipelineStageFlagBits::eGeometryShader )
- result += "GeometryShader | ";
- if ( value & PipelineStageFlagBits::eFragmentShader )
- result += "FragmentShader | ";
- if ( value & PipelineStageFlagBits::eEarlyFragmentTests )
- result += "EarlyFragmentTests | ";
- if ( value & PipelineStageFlagBits::eLateFragmentTests )
- result += "LateFragmentTests | ";
- if ( value & PipelineStageFlagBits::eColorAttachmentOutput )
- result += "ColorAttachmentOutput | ";
- if ( value & PipelineStageFlagBits::eComputeShader )
- result += "ComputeShader | ";
- if ( value & PipelineStageFlagBits::eTransfer )
- result += "Transfer | ";
- if ( value & PipelineStageFlagBits::eBottomOfPipe )
- result += "BottomOfPipe | ";
- if ( value & PipelineStageFlagBits::eHost )
- result += "Host | ";
- if ( value & PipelineStageFlagBits::eAllGraphics )
- result += "AllGraphics | ";
- if ( value & PipelineStageFlagBits::eAllCommands )
- result += "AllCommands | ";
- if ( value & PipelineStageFlagBits::eTransformFeedbackEXT )
- result += "TransformFeedbackEXT | ";
- if ( value & PipelineStageFlagBits::eConditionalRenderingEXT )
- result += "ConditionalRenderingEXT | ";
- if ( value & PipelineStageFlagBits::eAccelerationStructureBuildKHR )
- result += "AccelerationStructureBuildKHR | ";
- if ( value & PipelineStageFlagBits::eRayTracingShaderKHR )
- result += "RayTracingShaderKHR | ";
- if ( value & PipelineStageFlagBits::eTaskShaderNV )
- result += "TaskShaderNV | ";
- if ( value & PipelineStageFlagBits::eMeshShaderNV )
- result += "MeshShaderNV | ";
- if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT )
- result += "FragmentDensityProcessEXT | ";
- if ( value & PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR )
- result += "FragmentShadingRateAttachmentKHR | ";
- if ( value & PipelineStageFlagBits::eCommandPreprocessNV )
- result += "CommandPreprocessNV | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
using MemoryMapFlags = Flags<MemoryMapFlagBits>;
- VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags )
- {
- return "{}";
- }
-
- using ImageAspectFlags = Flags<ImageAspectFlagBits>;
-
template <>
struct FlagTraits<ImageAspectFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( ImageAspectFlagBits::eColor ) | VkFlags( ImageAspectFlagBits::eDepth ) |
- VkFlags( ImageAspectFlagBits::eStencil ) | VkFlags( ImageAspectFlagBits::eMetadata ) |
- VkFlags( ImageAspectFlagBits::ePlane0 ) | VkFlags( ImageAspectFlagBits::ePlane1 ) |
- VkFlags( ImageAspectFlagBits::ePlane2 ) | VkFlags( ImageAspectFlagBits::eNone ) |
- VkFlags( ImageAspectFlagBits::eMemoryPlane0EXT ) | VkFlags( ImageAspectFlagBits::eMemoryPlane1EXT ) |
- VkFlags( ImageAspectFlagBits::eMemoryPlane2EXT ) | VkFlags( ImageAspectFlagBits::eMemoryPlane3EXT )
+ allFlags = VkFlags( ImageAspectFlagBits::eColor ) | VkFlags( ImageAspectFlagBits::eDepth ) | VkFlags( ImageAspectFlagBits::eStencil ) |
+ VkFlags( ImageAspectFlagBits::eMetadata ) | VkFlags( ImageAspectFlagBits::ePlane0 ) | VkFlags( ImageAspectFlagBits::ePlane1 ) |
+ VkFlags( ImageAspectFlagBits::ePlane2 ) | VkFlags( ImageAspectFlagBits::eNone ) | VkFlags( ImageAspectFlagBits::eMemoryPlane0EXT ) |
+ VkFlags( ImageAspectFlagBits::eMemoryPlane1EXT ) | VkFlags( ImageAspectFlagBits::eMemoryPlane2EXT ) |
+ VkFlags( ImageAspectFlagBits::eMemoryPlane3EXT )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator|( ImageAspectFlagBits bit0,
- ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageAspectFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator&( ImageAspectFlagBits bit0,
- ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageAspectFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator^( ImageAspectFlagBits bit0,
- ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageAspectFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator~( ImageAspectFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( ImageAspectFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & ImageAspectFlagBits::eColor )
- result += "Color | ";
- if ( value & ImageAspectFlagBits::eDepth )
- result += "Depth | ";
- if ( value & ImageAspectFlagBits::eStencil )
- result += "Stencil | ";
- if ( value & ImageAspectFlagBits::eMetadata )
- result += "Metadata | ";
- if ( value & ImageAspectFlagBits::ePlane0 )
- result += "Plane0 | ";
- if ( value & ImageAspectFlagBits::ePlane1 )
- result += "Plane1 | ";
- if ( value & ImageAspectFlagBits::ePlane2 )
- result += "Plane2 | ";
- if ( value & ImageAspectFlagBits::eMemoryPlane0EXT )
- result += "MemoryPlane0EXT | ";
- if ( value & ImageAspectFlagBits::eMemoryPlane1EXT )
- result += "MemoryPlane1EXT | ";
- if ( value & ImageAspectFlagBits::eMemoryPlane2EXT )
- result += "MemoryPlane2EXT | ";
- if ( value & ImageAspectFlagBits::eMemoryPlane3EXT )
- result += "MemoryPlane3EXT | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using SparseImageFormatFlags = Flags<SparseImageFormatFlagBits>;
+ using ImageAspectFlags = Flags<ImageAspectFlagBits>;
template <>
struct FlagTraits<SparseImageFormatFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( SparseImageFormatFlagBits::eSingleMiptail ) |
- VkFlags( SparseImageFormatFlagBits::eAlignedMipSize ) |
+ allFlags = VkFlags( SparseImageFormatFlagBits::eSingleMiptail ) | VkFlags( SparseImageFormatFlagBits::eAlignedMipSize ) |
VkFlags( SparseImageFormatFlagBits::eNonstandardBlockSize )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags
- operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SparseImageFormatFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags
- operator&( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SparseImageFormatFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags
- operator^( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SparseImageFormatFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( SparseImageFormatFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & SparseImageFormatFlagBits::eSingleMiptail )
- result += "SingleMiptail | ";
- if ( value & SparseImageFormatFlagBits::eAlignedMipSize )
- result += "AlignedMipSize | ";
- if ( value & SparseImageFormatFlagBits::eNonstandardBlockSize )
- result += "NonstandardBlockSize | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using SparseMemoryBindFlags = Flags<SparseMemoryBindFlagBits>;
+ using SparseImageFormatFlags = Flags<SparseImageFormatFlagBits>;
template <>
struct FlagTraits<SparseMemoryBindFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( SparseMemoryBindFlagBits::eMetadata )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags
- operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SparseMemoryBindFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags
- operator&( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SparseMemoryBindFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags
- operator^( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SparseMemoryBindFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( SparseMemoryBindFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & SparseMemoryBindFlagBits::eMetadata )
- result += "Metadata | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using FenceCreateFlags = Flags<FenceCreateFlagBits>;
+ using SparseMemoryBindFlags = Flags<SparseMemoryBindFlagBits>;
template <>
struct FlagTraits<FenceCreateFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( FenceCreateFlagBits::eSignaled )
};
};
+ using FenceCreateFlags = Flags<FenceCreateFlagBits>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator|( FenceCreateFlagBits bit0,
- FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FenceCreateFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator&( FenceCreateFlagBits bit0,
- FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FenceCreateFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator^( FenceCreateFlagBits bit0,
- FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FenceCreateFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator~( FenceCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( FenceCreateFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( FenceCreateFlags value )
+ template <>
+ struct FlagTraits<SemaphoreCreateFlagBits>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & FenceCreateFlagBits::eSignaled )
- result += "Signaled | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using SemaphoreCreateFlags = Flags<SemaphoreCreateFlagBits>;
- VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlags )
- {
- return "{}";
- }
-
- using EventCreateFlags = Flags<EventCreateFlagBits>;
-
template <>
struct FlagTraits<EventCreateFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( EventCreateFlagBits::eDeviceOnly )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator|( EventCreateFlagBits bit0,
- EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return EventCreateFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator&( EventCreateFlagBits bit0,
- EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return EventCreateFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator^( EventCreateFlagBits bit0,
- EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return EventCreateFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator~( EventCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( EventCreateFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( EventCreateFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & EventCreateFlagBits::eDeviceOnly )
- result += "DeviceOnly | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using QueryPipelineStatisticFlags = Flags<QueryPipelineStatisticFlagBits>;
+ using EventCreateFlags = Flags<EventCreateFlagBits>;
template <>
struct FlagTraits<QueryPipelineStatisticFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) |
- VkFlags( QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) |
- VkFlags( QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) |
- VkFlags( QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) |
- VkFlags( QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) |
- VkFlags( QueryPipelineStatisticFlagBits::eClippingInvocations ) |
- VkFlags( QueryPipelineStatisticFlagBits::eClippingPrimitives ) |
- VkFlags( QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) |
+ allFlags = VkFlags( QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) | VkFlags( QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) |
+ VkFlags( QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) | VkFlags( QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) |
+ VkFlags( QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) | VkFlags( QueryPipelineStatisticFlagBits::eClippingInvocations ) |
+ VkFlags( QueryPipelineStatisticFlagBits::eClippingPrimitives ) | VkFlags( QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) |
VkFlags( QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) |
VkFlags( QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) |
- VkFlags( QueryPipelineStatisticFlagBits::eComputeShaderInvocations )
+ VkFlags( QueryPipelineStatisticFlagBits::eComputeShaderInvocations ) | VkFlags( QueryPipelineStatisticFlagBits::eTaskShaderInvocationsEXT ) |
+ VkFlags( QueryPipelineStatisticFlagBits::eMeshShaderInvocationsEXT )
};
};
+ using QueryPipelineStatisticFlags = Flags<QueryPipelineStatisticFlagBits>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags
- operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return QueryPipelineStatisticFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags
- operator&( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return QueryPipelineStatisticFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags
- operator^( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return QueryPipelineStatisticFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( QueryPipelineStatisticFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlags value )
+ template <>
+ struct FlagTraits<QueryPoolCreateFlagBits>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices )
- result += "InputAssemblyVertices | ";
- if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives )
- result += "InputAssemblyPrimitives | ";
- if ( value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations )
- result += "VertexShaderInvocations | ";
- if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations )
- result += "GeometryShaderInvocations | ";
- if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives )
- result += "GeometryShaderPrimitives | ";
- if ( value & QueryPipelineStatisticFlagBits::eClippingInvocations )
- result += "ClippingInvocations | ";
- if ( value & QueryPipelineStatisticFlagBits::eClippingPrimitives )
- result += "ClippingPrimitives | ";
- if ( value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations )
- result += "FragmentShaderInvocations | ";
- if ( value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches )
- result += "TessellationControlShaderPatches | ";
- if ( value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations )
- result += "TessellationEvaluationShaderInvocations | ";
- if ( value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations )
- result += "ComputeShaderInvocations | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using QueryPoolCreateFlags = Flags<QueryPoolCreateFlagBits>;
- VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlags )
- {
- return "{}";
- }
-
- using QueryResultFlags = Flags<QueryResultFlagBits>;
-
template <>
struct FlagTraits<QueryResultFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( QueryResultFlagBits::e64 ) | VkFlags( QueryResultFlagBits::eWait ) |
- VkFlags( QueryResultFlagBits::eWithAvailability ) | VkFlags( QueryResultFlagBits::ePartial )
+ allFlags = VkFlags( QueryResultFlagBits::e64 ) | VkFlags( QueryResultFlagBits::eWait ) | VkFlags( QueryResultFlagBits::eWithAvailability ) |
+ VkFlags( QueryResultFlagBits::ePartial )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
| VkFlags( QueryResultFlagBits::eWithStatusKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator|( QueryResultFlagBits bit0,
- QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return QueryResultFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator&( QueryResultFlagBits bit0,
- QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return QueryResultFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator^( QueryResultFlagBits bit0,
- QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return QueryResultFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator~( QueryResultFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( QueryResultFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & QueryResultFlagBits::e64 )
- result += "64 | ";
- if ( value & QueryResultFlagBits::eWait )
- result += "Wait | ";
- if ( value & QueryResultFlagBits::eWithAvailability )
- result += "WithAvailability | ";
- if ( value & QueryResultFlagBits::ePartial )
- result += "Partial | ";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- if ( value & QueryResultFlagBits::eWithStatusKHR )
- result += "WithStatusKHR | ";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using BufferCreateFlags = Flags<BufferCreateFlagBits>;
+ using QueryResultFlags = Flags<QueryResultFlagBits>;
template <>
struct FlagTraits<BufferCreateFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( BufferCreateFlagBits::eSparseBinding ) | VkFlags( BufferCreateFlagBits::eSparseResidency ) |
@@ -12980,4510 +4816,1425 @@ namespace VULKAN_HPP_NAMESPACE
VkFlags( BufferCreateFlagBits::eDeviceAddressCaptureReplay )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator|( BufferCreateFlagBits bit0,
- BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return BufferCreateFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator&( BufferCreateFlagBits bit0,
- BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return BufferCreateFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator^( BufferCreateFlagBits bit0,
- BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return BufferCreateFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator~( BufferCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( BufferCreateFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( BufferCreateFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & BufferCreateFlagBits::eSparseBinding )
- result += "SparseBinding | ";
- if ( value & BufferCreateFlagBits::eSparseResidency )
- result += "SparseResidency | ";
- if ( value & BufferCreateFlagBits::eSparseAliased )
- result += "SparseAliased | ";
- if ( value & BufferCreateFlagBits::eProtected )
- result += "Protected | ";
- if ( value & BufferCreateFlagBits::eDeviceAddressCaptureReplay )
- result += "DeviceAddressCaptureReplay | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using BufferUsageFlags = Flags<BufferUsageFlagBits>;
+ using BufferCreateFlags = Flags<BufferCreateFlagBits>;
template <>
struct FlagTraits<BufferUsageFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags =
- VkFlags( BufferUsageFlagBits::eTransferSrc ) | VkFlags( BufferUsageFlagBits::eTransferDst ) |
- VkFlags( BufferUsageFlagBits::eUniformTexelBuffer ) | VkFlags( BufferUsageFlagBits::eStorageTexelBuffer ) |
- VkFlags( BufferUsageFlagBits::eUniformBuffer ) | VkFlags( BufferUsageFlagBits::eStorageBuffer ) |
- VkFlags( BufferUsageFlagBits::eIndexBuffer ) | VkFlags( BufferUsageFlagBits::eVertexBuffer ) |
- VkFlags( BufferUsageFlagBits::eIndirectBuffer ) | VkFlags( BufferUsageFlagBits::eShaderDeviceAddress )
+ allFlags = VkFlags( BufferUsageFlagBits::eTransferSrc ) | VkFlags( BufferUsageFlagBits::eTransferDst ) |
+ VkFlags( BufferUsageFlagBits::eUniformTexelBuffer ) | VkFlags( BufferUsageFlagBits::eStorageTexelBuffer ) |
+ VkFlags( BufferUsageFlagBits::eUniformBuffer ) | VkFlags( BufferUsageFlagBits::eStorageBuffer ) |
+ VkFlags( BufferUsageFlagBits::eIndexBuffer ) | VkFlags( BufferUsageFlagBits::eVertexBuffer ) |
+ VkFlags( BufferUsageFlagBits::eIndirectBuffer ) | VkFlags( BufferUsageFlagBits::eShaderDeviceAddress )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- | VkFlags( BufferUsageFlagBits::eVideoDecodeSrcKHR ) | VkFlags( BufferUsageFlagBits::eVideoDecodeDstKHR )
+ | VkFlags( BufferUsageFlagBits::eVideoDecodeSrcKHR ) | VkFlags( BufferUsageFlagBits::eVideoDecodeDstKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- | VkFlags( BufferUsageFlagBits::eTransformFeedbackBufferEXT ) |
- VkFlags( BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) |
- VkFlags( BufferUsageFlagBits::eConditionalRenderingEXT ) |
- VkFlags( BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR ) |
- VkFlags( BufferUsageFlagBits::eAccelerationStructureStorageKHR ) |
- VkFlags( BufferUsageFlagBits::eShaderBindingTableKHR )
+ | VkFlags( BufferUsageFlagBits::eTransformFeedbackBufferEXT ) | VkFlags( BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) |
+ VkFlags( BufferUsageFlagBits::eConditionalRenderingEXT ) | VkFlags( BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR ) |
+ VkFlags( BufferUsageFlagBits::eAccelerationStructureStorageKHR ) | VkFlags( BufferUsageFlagBits::eShaderBindingTableKHR )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- | VkFlags( BufferUsageFlagBits::eVideoEncodeDstKHR ) | VkFlags( BufferUsageFlagBits::eVideoEncodeSrcKHR )
+ | VkFlags( BufferUsageFlagBits::eVideoEncodeDstKHR ) | VkFlags( BufferUsageFlagBits::eVideoEncodeSrcKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ | VkFlags( BufferUsageFlagBits::eMicromapBuildInputReadOnlyEXT ) | VkFlags( BufferUsageFlagBits::eMicromapStorageEXT )
};
};
+ using BufferUsageFlags = Flags<BufferUsageFlagBits>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator|( BufferUsageFlagBits bit0,
- BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return BufferUsageFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator&( BufferUsageFlagBits bit0,
- BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return BufferUsageFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator^( BufferUsageFlagBits bit0,
- BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return BufferUsageFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator~( BufferUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( BufferUsageFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags value )
+ template <>
+ struct FlagTraits<BufferViewCreateFlagBits>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & BufferUsageFlagBits::eTransferSrc )
- result += "TransferSrc | ";
- if ( value & BufferUsageFlagBits::eTransferDst )
- result += "TransferDst | ";
- if ( value & BufferUsageFlagBits::eUniformTexelBuffer )
- result += "UniformTexelBuffer | ";
- if ( value & BufferUsageFlagBits::eStorageTexelBuffer )
- result += "StorageTexelBuffer | ";
- if ( value & BufferUsageFlagBits::eUniformBuffer )
- result += "UniformBuffer | ";
- if ( value & BufferUsageFlagBits::eStorageBuffer )
- result += "StorageBuffer | ";
- if ( value & BufferUsageFlagBits::eIndexBuffer )
- result += "IndexBuffer | ";
- if ( value & BufferUsageFlagBits::eVertexBuffer )
- result += "VertexBuffer | ";
- if ( value & BufferUsageFlagBits::eIndirectBuffer )
- result += "IndirectBuffer | ";
- if ( value & BufferUsageFlagBits::eShaderDeviceAddress )
- result += "ShaderDeviceAddress | ";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- if ( value & BufferUsageFlagBits::eVideoDecodeSrcKHR )
- result += "VideoDecodeSrcKHR | ";
- if ( value & BufferUsageFlagBits::eVideoDecodeDstKHR )
- result += "VideoDecodeDstKHR | ";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT )
- result += "TransformFeedbackBufferEXT | ";
- if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT )
- result += "TransformFeedbackCounterBufferEXT | ";
- if ( value & BufferUsageFlagBits::eConditionalRenderingEXT )
- result += "ConditionalRenderingEXT | ";
- if ( value & BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR )
- result += "AccelerationStructureBuildInputReadOnlyKHR | ";
- if ( value & BufferUsageFlagBits::eAccelerationStructureStorageKHR )
- result += "AccelerationStructureStorageKHR | ";
- if ( value & BufferUsageFlagBits::eShaderBindingTableKHR )
- result += "ShaderBindingTableKHR | ";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- if ( value & BufferUsageFlagBits::eVideoEncodeDstKHR )
- result += "VideoEncodeDstKHR | ";
- if ( value & BufferUsageFlagBits::eVideoEncodeSrcKHR )
- result += "VideoEncodeSrcKHR | ";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using BufferViewCreateFlags = Flags<BufferViewCreateFlagBits>;
- VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlags )
- {
- return "{}";
- }
-
- using ImageViewCreateFlags = Flags<ImageViewCreateFlagBits>;
-
template <>
struct FlagTraits<ImageViewCreateFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) |
- VkFlags( ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT )
+ allFlags = VkFlags( ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) | VkFlags( ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT )
};
};
+ using ImageViewCreateFlags = Flags<ImageViewCreateFlagBits>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags
- operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageViewCreateFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags
- operator&( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageViewCreateFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags
- operator^( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageViewCreateFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator~( ImageViewCreateFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( ImageViewCreateFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlags value )
+ template <>
+ struct FlagTraits<ShaderModuleCreateFlagBits>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT )
- result += "FragmentDensityMapDynamicEXT | ";
- if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT )
- result += "FragmentDensityMapDeferredEXT | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using ShaderModuleCreateFlags = Flags<ShaderModuleCreateFlagBits>;
- VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlags )
- {
- return "{}";
- }
-
- using PipelineCacheCreateFlags = Flags<PipelineCacheCreateFlagBits>;
-
template <>
struct FlagTraits<PipelineCacheCreateFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( PipelineCacheCreateFlagBits::eExternallySynchronized )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags
- operator|( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineCacheCreateFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags
- operator&( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineCacheCreateFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags
- operator^( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineCacheCreateFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator~( PipelineCacheCreateFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( PipelineCacheCreateFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & PipelineCacheCreateFlagBits::eExternallySynchronized )
- result += "ExternallySynchronized | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using ColorComponentFlags = Flags<ColorComponentFlagBits>;
+ using PipelineCacheCreateFlags = Flags<PipelineCacheCreateFlagBits>;
template <>
struct FlagTraits<ColorComponentFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( ColorComponentFlagBits::eR ) | VkFlags( ColorComponentFlagBits::eG ) |
- VkFlags( ColorComponentFlagBits::eB ) | VkFlags( ColorComponentFlagBits::eA )
+ allFlags = VkFlags( ColorComponentFlagBits::eR ) | VkFlags( ColorComponentFlagBits::eG ) | VkFlags( ColorComponentFlagBits::eB ) |
+ VkFlags( ColorComponentFlagBits::eA )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags
- operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ColorComponentFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags
- operator&( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ColorComponentFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags
- operator^( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ColorComponentFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator~( ColorComponentFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( ColorComponentFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( ColorComponentFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & ColorComponentFlagBits::eR )
- result += "R | ";
- if ( value & ColorComponentFlagBits::eG )
- result += "G | ";
- if ( value & ColorComponentFlagBits::eB )
- result += "B | ";
- if ( value & ColorComponentFlagBits::eA )
- result += "A | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using CullModeFlags = Flags<CullModeFlagBits>;
+ using ColorComponentFlags = Flags<ColorComponentFlagBits>;
template <>
struct FlagTraits<CullModeFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( CullModeFlagBits::eNone ) | VkFlags( CullModeFlagBits::eFront ) |
- VkFlags( CullModeFlagBits::eBack ) | VkFlags( CullModeFlagBits::eFrontAndBack )
+ allFlags = VkFlags( CullModeFlagBits::eNone ) | VkFlags( CullModeFlagBits::eFront ) | VkFlags( CullModeFlagBits::eBack ) |
+ VkFlags( CullModeFlagBits::eFrontAndBack )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator|( CullModeFlagBits bit0,
- CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CullModeFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator&( CullModeFlagBits bit0,
- CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CullModeFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator^( CullModeFlagBits bit0,
- CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CullModeFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator~( CullModeFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( CullModeFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( CullModeFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & CullModeFlagBits::eFront )
- result += "Front | ";
- if ( value & CullModeFlagBits::eBack )
- result += "Back | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using PipelineColorBlendStateCreateFlags = Flags<PipelineColorBlendStateCreateFlagBits>;
+ using CullModeFlags = Flags<CullModeFlagBits>;
template <>
struct FlagTraits<PipelineColorBlendStateCreateFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessARM )
+ allFlags = VkFlags( PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessEXT )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineColorBlendStateCreateFlags operator|(
- PipelineColorBlendStateCreateFlagBits bit0, PipelineColorBlendStateCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineColorBlendStateCreateFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineColorBlendStateCreateFlags operator&(
- PipelineColorBlendStateCreateFlagBits bit0, PipelineColorBlendStateCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineColorBlendStateCreateFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineColorBlendStateCreateFlags operator^(
- PipelineColorBlendStateCreateFlagBits bit0, PipelineColorBlendStateCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineColorBlendStateCreateFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineColorBlendStateCreateFlags
- operator~( PipelineColorBlendStateCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( PipelineColorBlendStateCreateFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessARM )
- result += "RasterizationOrderAttachmentAccessARM | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using PipelineCreateFlags = Flags<PipelineCreateFlagBits>;
+ using PipelineColorBlendStateCreateFlags = Flags<PipelineColorBlendStateCreateFlagBits>;
template <>
struct FlagTraits<PipelineCreateFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags =
- VkFlags( PipelineCreateFlagBits::eDisableOptimization ) | VkFlags( PipelineCreateFlagBits::eAllowDerivatives ) |
- VkFlags( PipelineCreateFlagBits::eDerivative ) | VkFlags( PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) |
- VkFlags( PipelineCreateFlagBits::eDispatchBase ) |
- VkFlags( PipelineCreateFlagBits::eFailOnPipelineCompileRequired ) |
- VkFlags( PipelineCreateFlagBits::eEarlyReturnOnFailure ) |
- VkFlags( PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR ) |
- VkFlags( PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT ) |
- VkFlags( PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) |
- VkFlags( PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) |
- VkFlags( PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) |
- VkFlags( PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) |
- VkFlags( PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) |
- VkFlags( PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) |
- VkFlags( PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR ) |
- VkFlags( PipelineCreateFlagBits::eDeferCompileNV ) | VkFlags( PipelineCreateFlagBits::eCaptureStatisticsKHR ) |
- VkFlags( PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) |
- VkFlags( PipelineCreateFlagBits::eIndirectBindableNV ) | VkFlags( PipelineCreateFlagBits::eLibraryKHR ) |
- VkFlags( PipelineCreateFlagBits::eRayTracingAllowMotionNV )
+ allFlags = VkFlags( PipelineCreateFlagBits::eDisableOptimization ) | VkFlags( PipelineCreateFlagBits::eAllowDerivatives ) |
+ VkFlags( PipelineCreateFlagBits::eDerivative ) | VkFlags( PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) |
+ VkFlags( PipelineCreateFlagBits::eDispatchBase ) | VkFlags( PipelineCreateFlagBits::eFailOnPipelineCompileRequired ) |
+ VkFlags( PipelineCreateFlagBits::eEarlyReturnOnFailure ) | VkFlags( PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR ) |
+ VkFlags( PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT ) |
+ VkFlags( PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) |
+ VkFlags( PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) | VkFlags( PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) |
+ VkFlags( PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) | VkFlags( PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) |
+ VkFlags( PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) | VkFlags( PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR ) |
+ VkFlags( PipelineCreateFlagBits::eDeferCompileNV ) | VkFlags( PipelineCreateFlagBits::eCaptureStatisticsKHR ) |
+ VkFlags( PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) | VkFlags( PipelineCreateFlagBits::eIndirectBindableNV ) |
+ VkFlags( PipelineCreateFlagBits::eLibraryKHR ) | VkFlags( PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT ) |
+ VkFlags( PipelineCreateFlagBits::eLinkTimeOptimizationEXT ) | VkFlags( PipelineCreateFlagBits::eRayTracingAllowMotionNV ) |
+ VkFlags( PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT ) |
+ VkFlags( PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT ) | VkFlags( PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT ) |
+ VkFlags( PipelineCreateFlagBits::eNoProtectedAccessEXT ) | VkFlags( PipelineCreateFlagBits::eProtectedAccessOnlyEXT )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags
- operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineCreateFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags
- operator&( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineCreateFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags
- operator^( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineCreateFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator~( PipelineCreateFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( PipelineCreateFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & PipelineCreateFlagBits::eDisableOptimization )
- result += "DisableOptimization | ";
- if ( value & PipelineCreateFlagBits::eAllowDerivatives )
- result += "AllowDerivatives | ";
- if ( value & PipelineCreateFlagBits::eDerivative )
- result += "Derivative | ";
- if ( value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex )
- result += "ViewIndexFromDeviceIndex | ";
- if ( value & PipelineCreateFlagBits::eDispatchBase )
- result += "DispatchBase | ";
- if ( value & PipelineCreateFlagBits::eFailOnPipelineCompileRequired )
- result += "FailOnPipelineCompileRequired | ";
- if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailure )
- result += "EarlyReturnOnFailure | ";
- if ( value & PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR )
- result += "RenderingFragmentShadingRateAttachmentKHR | ";
- if ( value & PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT )
- result += "RenderingFragmentDensityMapAttachmentEXT | ";
- if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR )
- result += "RayTracingNoNullAnyHitShadersKHR | ";
- if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR )
- result += "RayTracingNoNullClosestHitShadersKHR | ";
- if ( value & PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR )
- result += "RayTracingNoNullMissShadersKHR | ";
- if ( value & PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR )
- result += "RayTracingNoNullIntersectionShadersKHR | ";
- if ( value & PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR )
- result += "RayTracingSkipTrianglesKHR | ";
- if ( value & PipelineCreateFlagBits::eRayTracingSkipAabbsKHR )
- result += "RayTracingSkipAabbsKHR | ";
- if ( value & PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR )
- result += "RayTracingShaderGroupHandleCaptureReplayKHR | ";
- if ( value & PipelineCreateFlagBits::eDeferCompileNV )
- result += "DeferCompileNV | ";
- if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR )
- result += "CaptureStatisticsKHR | ";
- if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR )
- result += "CaptureInternalRepresentationsKHR | ";
- if ( value & PipelineCreateFlagBits::eIndirectBindableNV )
- result += "IndirectBindableNV | ";
- if ( value & PipelineCreateFlagBits::eLibraryKHR )
- result += "LibraryKHR | ";
- if ( value & PipelineCreateFlagBits::eRayTracingAllowMotionNV )
- result += "RayTracingAllowMotionNV | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using PipelineDepthStencilStateCreateFlags = Flags<PipelineDepthStencilStateCreateFlagBits>;
+ using PipelineCreateFlags = Flags<PipelineCreateFlagBits>;
template <>
struct FlagTraits<PipelineDepthStencilStateCreateFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessARM ) |
- VkFlags( PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessARM )
+ allFlags = VkFlags( PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessEXT ) |
+ VkFlags( PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessEXT )
};
};
+ using PipelineDepthStencilStateCreateFlags = Flags<PipelineDepthStencilStateCreateFlagBits>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateFlags operator|(
- PipelineDepthStencilStateCreateFlagBits bit0, PipelineDepthStencilStateCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineDepthStencilStateCreateFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateFlags operator&(
- PipelineDepthStencilStateCreateFlagBits bit0, PipelineDepthStencilStateCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineDepthStencilStateCreateFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateFlags operator^(
- PipelineDepthStencilStateCreateFlagBits bit0, PipelineDepthStencilStateCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineDepthStencilStateCreateFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateFlags
- operator~( PipelineDepthStencilStateCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( PipelineDepthStencilStateCreateFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlags value )
+ template <>
+ struct FlagTraits<PipelineDynamicStateCreateFlagBits>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessARM )
- result += "RasterizationOrderAttachmentDepthAccessARM | ";
- if ( value & PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessARM )
- result += "RasterizationOrderAttachmentStencilAccessARM | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using PipelineDynamicStateCreateFlags = Flags<PipelineDynamicStateCreateFlagBits>;
- VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlags )
+ template <>
+ struct FlagTraits<PipelineInputAssemblyStateCreateFlagBits>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using PipelineInputAssemblyStateCreateFlags = Flags<PipelineInputAssemblyStateCreateFlagBits>;
- VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlags )
+ template <>
+ struct FlagTraits<PipelineLayoutCreateFlagBits>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = VkFlags( PipelineLayoutCreateFlagBits::eIndependentSetsEXT )
+ };
+ };
using PipelineLayoutCreateFlags = Flags<PipelineLayoutCreateFlagBits>;
- VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlags )
+ template <>
+ struct FlagTraits<PipelineMultisampleStateCreateFlagBits>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using PipelineMultisampleStateCreateFlags = Flags<PipelineMultisampleStateCreateFlagBits>;
- VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlags )
+ template <>
+ struct FlagTraits<PipelineRasterizationStateCreateFlagBits>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using PipelineRasterizationStateCreateFlags = Flags<PipelineRasterizationStateCreateFlagBits>;
- VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlags )
- {
- return "{}";
- }
-
- using PipelineShaderStageCreateFlags = Flags<PipelineShaderStageCreateFlagBits>;
-
template <>
struct FlagTraits<PipelineShaderStageCreateFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize ) |
- VkFlags( PipelineShaderStageCreateFlagBits::eRequireFullSubgroups )
+ allFlags = VkFlags( PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize ) | VkFlags( PipelineShaderStageCreateFlagBits::eRequireFullSubgroups )
};
};
+ using PipelineShaderStageCreateFlags = Flags<PipelineShaderStageCreateFlagBits>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags
- operator|( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineShaderStageCreateFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags
- operator&( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineShaderStageCreateFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags
- operator^( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineShaderStageCreateFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags
- operator~( PipelineShaderStageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( PipelineShaderStageCreateFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlags value )
+ template <>
+ struct FlagTraits<PipelineTessellationStateCreateFlagBits>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize )
- result += "AllowVaryingSubgroupSize | ";
- if ( value & PipelineShaderStageCreateFlagBits::eRequireFullSubgroups )
- result += "RequireFullSubgroups | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using PipelineTessellationStateCreateFlags = Flags<PipelineTessellationStateCreateFlagBits>;
- VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlags )
+ template <>
+ struct FlagTraits<PipelineVertexInputStateCreateFlagBits>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using PipelineVertexInputStateCreateFlags = Flags<PipelineVertexInputStateCreateFlagBits>;
- VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlags )
+ template <>
+ struct FlagTraits<PipelineViewportStateCreateFlagBits>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using PipelineViewportStateCreateFlags = Flags<PipelineViewportStateCreateFlagBits>;
- VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlags )
- {
- return "{}";
- }
-
- using ShaderStageFlags = Flags<ShaderStageFlagBits>;
-
template <>
struct FlagTraits<ShaderStageFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( ShaderStageFlagBits::eVertex ) | VkFlags( ShaderStageFlagBits::eTessellationControl ) |
VkFlags( ShaderStageFlagBits::eTessellationEvaluation ) | VkFlags( ShaderStageFlagBits::eGeometry ) |
- VkFlags( ShaderStageFlagBits::eFragment ) | VkFlags( ShaderStageFlagBits::eCompute ) |
- VkFlags( ShaderStageFlagBits::eAllGraphics ) | VkFlags( ShaderStageFlagBits::eAll ) |
- VkFlags( ShaderStageFlagBits::eRaygenKHR ) | VkFlags( ShaderStageFlagBits::eAnyHitKHR ) |
- VkFlags( ShaderStageFlagBits::eClosestHitKHR ) | VkFlags( ShaderStageFlagBits::eMissKHR ) |
- VkFlags( ShaderStageFlagBits::eIntersectionKHR ) | VkFlags( ShaderStageFlagBits::eCallableKHR ) |
- VkFlags( ShaderStageFlagBits::eTaskNV ) | VkFlags( ShaderStageFlagBits::eMeshNV ) |
+ VkFlags( ShaderStageFlagBits::eFragment ) | VkFlags( ShaderStageFlagBits::eCompute ) | VkFlags( ShaderStageFlagBits::eAllGraphics ) |
+ VkFlags( ShaderStageFlagBits::eAll ) | VkFlags( ShaderStageFlagBits::eRaygenKHR ) | VkFlags( ShaderStageFlagBits::eAnyHitKHR ) |
+ VkFlags( ShaderStageFlagBits::eClosestHitKHR ) | VkFlags( ShaderStageFlagBits::eMissKHR ) | VkFlags( ShaderStageFlagBits::eIntersectionKHR ) |
+ VkFlags( ShaderStageFlagBits::eCallableKHR ) | VkFlags( ShaderStageFlagBits::eTaskEXT ) | VkFlags( ShaderStageFlagBits::eMeshEXT ) |
VkFlags( ShaderStageFlagBits::eSubpassShadingHUAWEI )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator|( ShaderStageFlagBits bit0,
- ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ShaderStageFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator&( ShaderStageFlagBits bit0,
- ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ShaderStageFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator^( ShaderStageFlagBits bit0,
- ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ShaderStageFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator~( ShaderStageFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( ShaderStageFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( ShaderStageFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & ShaderStageFlagBits::eVertex )
- result += "Vertex | ";
- if ( value & ShaderStageFlagBits::eTessellationControl )
- result += "TessellationControl | ";
- if ( value & ShaderStageFlagBits::eTessellationEvaluation )
- result += "TessellationEvaluation | ";
- if ( value & ShaderStageFlagBits::eGeometry )
- result += "Geometry | ";
- if ( value & ShaderStageFlagBits::eFragment )
- result += "Fragment | ";
- if ( value & ShaderStageFlagBits::eCompute )
- result += "Compute | ";
- if ( value & ShaderStageFlagBits::eRaygenKHR )
- result += "RaygenKHR | ";
- if ( value & ShaderStageFlagBits::eAnyHitKHR )
- result += "AnyHitKHR | ";
- if ( value & ShaderStageFlagBits::eClosestHitKHR )
- result += "ClosestHitKHR | ";
- if ( value & ShaderStageFlagBits::eMissKHR )
- result += "MissKHR | ";
- if ( value & ShaderStageFlagBits::eIntersectionKHR )
- result += "IntersectionKHR | ";
- if ( value & ShaderStageFlagBits::eCallableKHR )
- result += "CallableKHR | ";
- if ( value & ShaderStageFlagBits::eTaskNV )
- result += "TaskNV | ";
- if ( value & ShaderStageFlagBits::eMeshNV )
- result += "MeshNV | ";
- if ( value & ShaderStageFlagBits::eSubpassShadingHUAWEI )
- result += "SubpassShadingHUAWEI | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using SamplerCreateFlags = Flags<SamplerCreateFlagBits>;
+ using ShaderStageFlags = Flags<ShaderStageFlagBits>;
template <>
struct FlagTraits<SamplerCreateFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( SamplerCreateFlagBits::eSubsampledEXT ) |
- VkFlags( SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT )
+ allFlags = VkFlags( SamplerCreateFlagBits::eSubsampledEXT ) | VkFlags( SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) |
+ VkFlags( SamplerCreateFlagBits::eNonSeamlessCubeMapEXT ) | VkFlags( SamplerCreateFlagBits::eImageProcessingQCOM )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator|( SamplerCreateFlagBits bit0,
- SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SamplerCreateFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator&( SamplerCreateFlagBits bit0,
- SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SamplerCreateFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator^( SamplerCreateFlagBits bit0,
- SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SamplerCreateFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator~( SamplerCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( SamplerCreateFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & SamplerCreateFlagBits::eSubsampledEXT )
- result += "SubsampledEXT | ";
- if ( value & SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT )
- result += "SubsampledCoarseReconstructionEXT | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using DescriptorPoolCreateFlags = Flags<DescriptorPoolCreateFlagBits>;
+ using SamplerCreateFlags = Flags<SamplerCreateFlagBits>;
template <>
struct FlagTraits<DescriptorPoolCreateFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) |
- VkFlags( DescriptorPoolCreateFlagBits::eUpdateAfterBind ) |
- VkFlags( DescriptorPoolCreateFlagBits::eHostOnlyVALVE )
+ allFlags = VkFlags( DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) | VkFlags( DescriptorPoolCreateFlagBits::eUpdateAfterBind ) |
+ VkFlags( DescriptorPoolCreateFlagBits::eHostOnlyEXT )
};
};
+ using DescriptorPoolCreateFlags = Flags<DescriptorPoolCreateFlagBits>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags
- operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DescriptorPoolCreateFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags
- operator&( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DescriptorPoolCreateFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags
- operator^( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DescriptorPoolCreateFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( DescriptorPoolCreateFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlags value )
+ template <>
+ struct FlagTraits<DescriptorPoolResetFlagBits>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet )
- result += "FreeDescriptorSet | ";
- if ( value & DescriptorPoolCreateFlagBits::eUpdateAfterBind )
- result += "UpdateAfterBind | ";
- if ( value & DescriptorPoolCreateFlagBits::eHostOnlyVALVE )
- result += "HostOnlyVALVE | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using DescriptorPoolResetFlags = Flags<DescriptorPoolResetFlagBits>;
- VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlags )
- {
- return "{}";
- }
-
- using DescriptorSetLayoutCreateFlags = Flags<DescriptorSetLayoutCreateFlagBits>;
-
template <>
struct FlagTraits<DescriptorSetLayoutCreateFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) |
- VkFlags( DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) |
- VkFlags( DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolVALVE )
+ allFlags = VkFlags( DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) | VkFlags( DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) |
+ VkFlags( DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags
- operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DescriptorSetLayoutCreateFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags
- operator&( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DescriptorSetLayoutCreateFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags
- operator^( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DescriptorSetLayoutCreateFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags
- operator~( DescriptorSetLayoutCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( DescriptorSetLayoutCreateFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool )
- result += "UpdateAfterBindPool | ";
- if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR )
- result += "PushDescriptorKHR | ";
- if ( value & DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolVALVE )
- result += "HostOnlyPoolVALVE | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using AccessFlags = Flags<AccessFlagBits>;
+ using DescriptorSetLayoutCreateFlags = Flags<DescriptorSetLayoutCreateFlagBits>;
template <>
struct FlagTraits<AccessFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags =
- VkFlags( AccessFlagBits::eIndirectCommandRead ) | VkFlags( AccessFlagBits::eIndexRead ) |
- VkFlags( AccessFlagBits::eVertexAttributeRead ) | VkFlags( AccessFlagBits::eUniformRead ) |
- VkFlags( AccessFlagBits::eInputAttachmentRead ) | VkFlags( AccessFlagBits::eShaderRead ) |
- VkFlags( AccessFlagBits::eShaderWrite ) | VkFlags( AccessFlagBits::eColorAttachmentRead ) |
- VkFlags( AccessFlagBits::eColorAttachmentWrite ) | VkFlags( AccessFlagBits::eDepthStencilAttachmentRead ) |
- VkFlags( AccessFlagBits::eDepthStencilAttachmentWrite ) | VkFlags( AccessFlagBits::eTransferRead ) |
- VkFlags( AccessFlagBits::eTransferWrite ) | VkFlags( AccessFlagBits::eHostRead ) |
- VkFlags( AccessFlagBits::eHostWrite ) | VkFlags( AccessFlagBits::eMemoryRead ) |
- VkFlags( AccessFlagBits::eMemoryWrite ) | VkFlags( AccessFlagBits::eNone ) |
- VkFlags( AccessFlagBits::eTransformFeedbackWriteEXT ) |
- VkFlags( AccessFlagBits::eTransformFeedbackCounterReadEXT ) |
- VkFlags( AccessFlagBits::eTransformFeedbackCounterWriteEXT ) |
- VkFlags( AccessFlagBits::eConditionalRenderingReadEXT ) |
- VkFlags( AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) |
- VkFlags( AccessFlagBits::eAccelerationStructureReadKHR ) |
- VkFlags( AccessFlagBits::eAccelerationStructureWriteKHR ) |
- VkFlags( AccessFlagBits::eFragmentDensityMapReadEXT ) |
- VkFlags( AccessFlagBits::eFragmentShadingRateAttachmentReadKHR ) |
- VkFlags( AccessFlagBits::eCommandPreprocessReadNV ) | VkFlags( AccessFlagBits::eCommandPreprocessWriteNV )
+ allFlags = VkFlags( AccessFlagBits::eIndirectCommandRead ) | VkFlags( AccessFlagBits::eIndexRead ) | VkFlags( AccessFlagBits::eVertexAttributeRead ) |
+ VkFlags( AccessFlagBits::eUniformRead ) | VkFlags( AccessFlagBits::eInputAttachmentRead ) | VkFlags( AccessFlagBits::eShaderRead ) |
+ VkFlags( AccessFlagBits::eShaderWrite ) | VkFlags( AccessFlagBits::eColorAttachmentRead ) | VkFlags( AccessFlagBits::eColorAttachmentWrite ) |
+ VkFlags( AccessFlagBits::eDepthStencilAttachmentRead ) | VkFlags( AccessFlagBits::eDepthStencilAttachmentWrite ) |
+ VkFlags( AccessFlagBits::eTransferRead ) | VkFlags( AccessFlagBits::eTransferWrite ) | VkFlags( AccessFlagBits::eHostRead ) |
+ VkFlags( AccessFlagBits::eHostWrite ) | VkFlags( AccessFlagBits::eMemoryRead ) | VkFlags( AccessFlagBits::eMemoryWrite ) |
+ VkFlags( AccessFlagBits::eNone ) | VkFlags( AccessFlagBits::eTransformFeedbackWriteEXT ) |
+ VkFlags( AccessFlagBits::eTransformFeedbackCounterReadEXT ) | VkFlags( AccessFlagBits::eTransformFeedbackCounterWriteEXT ) |
+ VkFlags( AccessFlagBits::eConditionalRenderingReadEXT ) | VkFlags( AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) |
+ VkFlags( AccessFlagBits::eAccelerationStructureReadKHR ) | VkFlags( AccessFlagBits::eAccelerationStructureWriteKHR ) |
+ VkFlags( AccessFlagBits::eFragmentDensityMapReadEXT ) | VkFlags( AccessFlagBits::eFragmentShadingRateAttachmentReadKHR ) |
+ VkFlags( AccessFlagBits::eCommandPreprocessReadNV ) | VkFlags( AccessFlagBits::eCommandPreprocessWriteNV )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator|( AccessFlagBits bit0,
- AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return AccessFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator&( AccessFlagBits bit0,
- AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return AccessFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator^( AccessFlagBits bit0,
- AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return AccessFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator~( AccessFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( AccessFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( AccessFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & AccessFlagBits::eIndirectCommandRead )
- result += "IndirectCommandRead | ";
- if ( value & AccessFlagBits::eIndexRead )
- result += "IndexRead | ";
- if ( value & AccessFlagBits::eVertexAttributeRead )
- result += "VertexAttributeRead | ";
- if ( value & AccessFlagBits::eUniformRead )
- result += "UniformRead | ";
- if ( value & AccessFlagBits::eInputAttachmentRead )
- result += "InputAttachmentRead | ";
- if ( value & AccessFlagBits::eShaderRead )
- result += "ShaderRead | ";
- if ( value & AccessFlagBits::eShaderWrite )
- result += "ShaderWrite | ";
- if ( value & AccessFlagBits::eColorAttachmentRead )
- result += "ColorAttachmentRead | ";
- if ( value & AccessFlagBits::eColorAttachmentWrite )
- result += "ColorAttachmentWrite | ";
- if ( value & AccessFlagBits::eDepthStencilAttachmentRead )
- result += "DepthStencilAttachmentRead | ";
- if ( value & AccessFlagBits::eDepthStencilAttachmentWrite )
- result += "DepthStencilAttachmentWrite | ";
- if ( value & AccessFlagBits::eTransferRead )
- result += "TransferRead | ";
- if ( value & AccessFlagBits::eTransferWrite )
- result += "TransferWrite | ";
- if ( value & AccessFlagBits::eHostRead )
- result += "HostRead | ";
- if ( value & AccessFlagBits::eHostWrite )
- result += "HostWrite | ";
- if ( value & AccessFlagBits::eMemoryRead )
- result += "MemoryRead | ";
- if ( value & AccessFlagBits::eMemoryWrite )
- result += "MemoryWrite | ";
- if ( value & AccessFlagBits::eTransformFeedbackWriteEXT )
- result += "TransformFeedbackWriteEXT | ";
- if ( value & AccessFlagBits::eTransformFeedbackCounterReadEXT )
- result += "TransformFeedbackCounterReadEXT | ";
- if ( value & AccessFlagBits::eTransformFeedbackCounterWriteEXT )
- result += "TransformFeedbackCounterWriteEXT | ";
- if ( value & AccessFlagBits::eConditionalRenderingReadEXT )
- result += "ConditionalRenderingReadEXT | ";
- if ( value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT )
- result += "ColorAttachmentReadNoncoherentEXT | ";
- if ( value & AccessFlagBits::eAccelerationStructureReadKHR )
- result += "AccelerationStructureReadKHR | ";
- if ( value & AccessFlagBits::eAccelerationStructureWriteKHR )
- result += "AccelerationStructureWriteKHR | ";
- if ( value & AccessFlagBits::eFragmentDensityMapReadEXT )
- result += "FragmentDensityMapReadEXT | ";
- if ( value & AccessFlagBits::eFragmentShadingRateAttachmentReadKHR )
- result += "FragmentShadingRateAttachmentReadKHR | ";
- if ( value & AccessFlagBits::eCommandPreprocessReadNV )
- result += "CommandPreprocessReadNV | ";
- if ( value & AccessFlagBits::eCommandPreprocessWriteNV )
- result += "CommandPreprocessWriteNV | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using AttachmentDescriptionFlags = Flags<AttachmentDescriptionFlagBits>;
+ using AccessFlags = Flags<AccessFlagBits>;
template <>
struct FlagTraits<AttachmentDescriptionFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( AttachmentDescriptionFlagBits::eMayAlias )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags
- operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return AttachmentDescriptionFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags
- operator&( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return AttachmentDescriptionFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags
- operator^( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return AttachmentDescriptionFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( AttachmentDescriptionFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & AttachmentDescriptionFlagBits::eMayAlias )
- result += "MayAlias | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using DependencyFlags = Flags<DependencyFlagBits>;
+ using AttachmentDescriptionFlags = Flags<AttachmentDescriptionFlagBits>;
template <>
struct FlagTraits<DependencyFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( DependencyFlagBits::eByRegion ) | VkFlags( DependencyFlagBits::eDeviceGroup ) |
- VkFlags( DependencyFlagBits::eViewLocal )
+ allFlags = VkFlags( DependencyFlagBits::eByRegion ) | VkFlags( DependencyFlagBits::eDeviceGroup ) | VkFlags( DependencyFlagBits::eViewLocal ) |
+ VkFlags( DependencyFlagBits::eFeedbackLoopEXT )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator|( DependencyFlagBits bit0,
- DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DependencyFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator&( DependencyFlagBits bit0,
- DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DependencyFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator^( DependencyFlagBits bit0,
- DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DependencyFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator~( DependencyFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( DependencyFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( DependencyFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & DependencyFlagBits::eByRegion )
- result += "ByRegion | ";
- if ( value & DependencyFlagBits::eDeviceGroup )
- result += "DeviceGroup | ";
- if ( value & DependencyFlagBits::eViewLocal )
- result += "ViewLocal | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using FramebufferCreateFlags = Flags<FramebufferCreateFlagBits>;
+ using DependencyFlags = Flags<DependencyFlagBits>;
template <>
struct FlagTraits<FramebufferCreateFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( FramebufferCreateFlagBits::eImageless )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags
- operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FramebufferCreateFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags
- operator&( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FramebufferCreateFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags
- operator^( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FramebufferCreateFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator~( FramebufferCreateFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( FramebufferCreateFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & FramebufferCreateFlagBits::eImageless )
- result += "Imageless | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using RenderPassCreateFlags = Flags<RenderPassCreateFlagBits>;
+ using FramebufferCreateFlags = Flags<FramebufferCreateFlagBits>;
template <>
struct FlagTraits<RenderPassCreateFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( RenderPassCreateFlagBits::eTransformQCOM )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags
- operator|( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return RenderPassCreateFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags
- operator&( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return RenderPassCreateFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags
- operator^( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return RenderPassCreateFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator~( RenderPassCreateFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( RenderPassCreateFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & RenderPassCreateFlagBits::eTransformQCOM )
- result += "TransformQCOM | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits>;
+ using RenderPassCreateFlags = Flags<RenderPassCreateFlagBits>;
template <>
struct FlagTraits<SubpassDescriptionFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( SubpassDescriptionFlagBits::ePerViewAttributesNVX ) |
- VkFlags( SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) |
- VkFlags( SubpassDescriptionFlagBits::eFragmentRegionQCOM ) |
- VkFlags( SubpassDescriptionFlagBits::eShaderResolveQCOM ) |
- VkFlags( SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessARM ) |
- VkFlags( SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessARM ) |
- VkFlags( SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessARM )
+ allFlags = VkFlags( SubpassDescriptionFlagBits::ePerViewAttributesNVX ) | VkFlags( SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) |
+ VkFlags( SubpassDescriptionFlagBits::eFragmentRegionQCOM ) | VkFlags( SubpassDescriptionFlagBits::eShaderResolveQCOM ) |
+ VkFlags( SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessEXT ) |
+ VkFlags( SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessEXT ) |
+ VkFlags( SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessEXT ) |
+ VkFlags( SubpassDescriptionFlagBits::eEnableLegacyDitheringEXT )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags
- operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SubpassDescriptionFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags
- operator&( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SubpassDescriptionFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags
- operator^( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SubpassDescriptionFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator~( SubpassDescriptionFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( SubpassDescriptionFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & SubpassDescriptionFlagBits::ePerViewAttributesNVX )
- result += "PerViewAttributesNVX | ";
- if ( value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX )
- result += "PerViewPositionXOnlyNVX | ";
- if ( value & SubpassDescriptionFlagBits::eFragmentRegionQCOM )
- result += "FragmentRegionQCOM | ";
- if ( value & SubpassDescriptionFlagBits::eShaderResolveQCOM )
- result += "ShaderResolveQCOM | ";
- if ( value & SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessARM )
- result += "RasterizationOrderAttachmentColorAccessARM | ";
- if ( value & SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessARM )
- result += "RasterizationOrderAttachmentDepthAccessARM | ";
- if ( value & SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessARM )
- result += "RasterizationOrderAttachmentStencilAccessARM | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using CommandPoolCreateFlags = Flags<CommandPoolCreateFlagBits>;
+ using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits>;
template <>
struct FlagTraits<CommandPoolCreateFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( CommandPoolCreateFlagBits::eTransient ) |
- VkFlags( CommandPoolCreateFlagBits::eResetCommandBuffer ) |
+ allFlags = VkFlags( CommandPoolCreateFlagBits::eTransient ) | VkFlags( CommandPoolCreateFlagBits::eResetCommandBuffer ) |
VkFlags( CommandPoolCreateFlagBits::eProtected )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags
- operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CommandPoolCreateFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags
- operator&( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CommandPoolCreateFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags
- operator^( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CommandPoolCreateFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( CommandPoolCreateFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & CommandPoolCreateFlagBits::eTransient )
- result += "Transient | ";
- if ( value & CommandPoolCreateFlagBits::eResetCommandBuffer )
- result += "ResetCommandBuffer | ";
- if ( value & CommandPoolCreateFlagBits::eProtected )
- result += "Protected | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using CommandPoolResetFlags = Flags<CommandPoolResetFlagBits>;
+ using CommandPoolCreateFlags = Flags<CommandPoolCreateFlagBits>;
template <>
struct FlagTraits<CommandPoolResetFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( CommandPoolResetFlagBits::eReleaseResources )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags
- operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CommandPoolResetFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags
- operator&( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CommandPoolResetFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags
- operator^( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CommandPoolResetFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( CommandPoolResetFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & CommandPoolResetFlagBits::eReleaseResources )
- result += "ReleaseResources | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using CommandBufferResetFlags = Flags<CommandBufferResetFlagBits>;
+ using CommandPoolResetFlags = Flags<CommandPoolResetFlagBits>;
template <>
struct FlagTraits<CommandBufferResetFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( CommandBufferResetFlagBits::eReleaseResources )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags
- operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CommandBufferResetFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags
- operator&( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CommandBufferResetFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags
- operator^( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CommandBufferResetFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( CommandBufferResetFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & CommandBufferResetFlagBits::eReleaseResources )
- result += "ReleaseResources | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using CommandBufferUsageFlags = Flags<CommandBufferUsageFlagBits>;
+ using CommandBufferResetFlags = Flags<CommandBufferResetFlagBits>;
template <>
struct FlagTraits<CommandBufferUsageFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( CommandBufferUsageFlagBits::eOneTimeSubmit ) |
- VkFlags( CommandBufferUsageFlagBits::eRenderPassContinue ) |
+ allFlags = VkFlags( CommandBufferUsageFlagBits::eOneTimeSubmit ) | VkFlags( CommandBufferUsageFlagBits::eRenderPassContinue ) |
VkFlags( CommandBufferUsageFlagBits::eSimultaneousUse )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags
- operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CommandBufferUsageFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags
- operator&( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CommandBufferUsageFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags
- operator^( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CommandBufferUsageFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( CommandBufferUsageFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & CommandBufferUsageFlagBits::eOneTimeSubmit )
- result += "OneTimeSubmit | ";
- if ( value & CommandBufferUsageFlagBits::eRenderPassContinue )
- result += "RenderPassContinue | ";
- if ( value & CommandBufferUsageFlagBits::eSimultaneousUse )
- result += "SimultaneousUse | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using QueryControlFlags = Flags<QueryControlFlagBits>;
+ using CommandBufferUsageFlags = Flags<CommandBufferUsageFlagBits>;
template <>
struct FlagTraits<QueryControlFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( QueryControlFlagBits::ePrecise )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator|( QueryControlFlagBits bit0,
- QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return QueryControlFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator&( QueryControlFlagBits bit0,
- QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return QueryControlFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator^( QueryControlFlagBits bit0,
- QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return QueryControlFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator~( QueryControlFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( QueryControlFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( QueryControlFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & QueryControlFlagBits::ePrecise )
- result += "Precise | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using StencilFaceFlags = Flags<StencilFaceFlagBits>;
+ using QueryControlFlags = Flags<QueryControlFlagBits>;
template <>
struct FlagTraits<StencilFaceFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( StencilFaceFlagBits::eFront ) | VkFlags( StencilFaceFlagBits::eBack ) |
- VkFlags( StencilFaceFlagBits::eFrontAndBack )
+ allFlags = VkFlags( StencilFaceFlagBits::eFront ) | VkFlags( StencilFaceFlagBits::eBack ) | VkFlags( StencilFaceFlagBits::eFrontAndBack )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator|( StencilFaceFlagBits bit0,
- StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return StencilFaceFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator&( StencilFaceFlagBits bit0,
- StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return StencilFaceFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator^( StencilFaceFlagBits bit0,
- StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return StencilFaceFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator~( StencilFaceFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( StencilFaceFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( StencilFaceFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & StencilFaceFlagBits::eFront )
- result += "Front | ";
- if ( value & StencilFaceFlagBits::eBack )
- result += "Back | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ using StencilFaceFlags = Flags<StencilFaceFlagBits>;
//=== VK_VERSION_1_1 ===
- using SubgroupFeatureFlags = Flags<SubgroupFeatureFlagBits>;
-
template <>
struct FlagTraits<SubgroupFeatureFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( SubgroupFeatureFlagBits::eBasic ) | VkFlags( SubgroupFeatureFlagBits::eVote ) |
- VkFlags( SubgroupFeatureFlagBits::eArithmetic ) | VkFlags( SubgroupFeatureFlagBits::eBallot ) |
- VkFlags( SubgroupFeatureFlagBits::eShuffle ) | VkFlags( SubgroupFeatureFlagBits::eShuffleRelative ) |
- VkFlags( SubgroupFeatureFlagBits::eClustered ) | VkFlags( SubgroupFeatureFlagBits::eQuad ) |
- VkFlags( SubgroupFeatureFlagBits::ePartitionedNV )
+ allFlags = VkFlags( SubgroupFeatureFlagBits::eBasic ) | VkFlags( SubgroupFeatureFlagBits::eVote ) | VkFlags( SubgroupFeatureFlagBits::eArithmetic ) |
+ VkFlags( SubgroupFeatureFlagBits::eBallot ) | VkFlags( SubgroupFeatureFlagBits::eShuffle ) |
+ VkFlags( SubgroupFeatureFlagBits::eShuffleRelative ) | VkFlags( SubgroupFeatureFlagBits::eClustered ) |
+ VkFlags( SubgroupFeatureFlagBits::eQuad ) | VkFlags( SubgroupFeatureFlagBits::ePartitionedNV )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags
- operator|( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SubgroupFeatureFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags
- operator&( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SubgroupFeatureFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags
- operator^( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SubgroupFeatureFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator~( SubgroupFeatureFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( SubgroupFeatureFlags( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & SubgroupFeatureFlagBits::eBasic )
- result += "Basic | ";
- if ( value & SubgroupFeatureFlagBits::eVote )
- result += "Vote | ";
- if ( value & SubgroupFeatureFlagBits::eArithmetic )
- result += "Arithmetic | ";
- if ( value & SubgroupFeatureFlagBits::eBallot )
- result += "Ballot | ";
- if ( value & SubgroupFeatureFlagBits::eShuffle )
- result += "Shuffle | ";
- if ( value & SubgroupFeatureFlagBits::eShuffleRelative )
- result += "ShuffleRelative | ";
- if ( value & SubgroupFeatureFlagBits::eClustered )
- result += "Clustered | ";
- if ( value & SubgroupFeatureFlagBits::eQuad )
- result += "Quad | ";
- if ( value & SubgroupFeatureFlagBits::ePartitionedNV )
- result += "PartitionedNV | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using PeerMemoryFeatureFlags = Flags<PeerMemoryFeatureFlagBits>;
+ using SubgroupFeatureFlags = Flags<SubgroupFeatureFlagBits>;
template <>
struct FlagTraits<PeerMemoryFeatureFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( PeerMemoryFeatureFlagBits::eCopySrc ) | VkFlags( PeerMemoryFeatureFlagBits::eCopyDst ) |
VkFlags( PeerMemoryFeatureFlagBits::eGenericSrc ) | VkFlags( PeerMemoryFeatureFlagBits::eGenericDst )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags
- operator|( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PeerMemoryFeatureFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags
- operator&( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PeerMemoryFeatureFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags
- operator^( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PeerMemoryFeatureFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator~( PeerMemoryFeatureFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( PeerMemoryFeatureFlags( bits ) );
- }
-
+ using PeerMemoryFeatureFlags = Flags<PeerMemoryFeatureFlagBits>;
using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags;
- VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & PeerMemoryFeatureFlagBits::eCopySrc )
- result += "CopySrc | ";
- if ( value & PeerMemoryFeatureFlagBits::eCopyDst )
- result += "CopyDst | ";
- if ( value & PeerMemoryFeatureFlagBits::eGenericSrc )
- result += "GenericSrc | ";
- if ( value & PeerMemoryFeatureFlagBits::eGenericDst )
- result += "GenericDst | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using MemoryAllocateFlags = Flags<MemoryAllocateFlagBits>;
-
template <>
struct FlagTraits<MemoryAllocateFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( MemoryAllocateFlagBits::eDeviceMask ) | VkFlags( MemoryAllocateFlagBits::eDeviceAddress ) |
VkFlags( MemoryAllocateFlagBits::eDeviceAddressCaptureReplay )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags
- operator|( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return MemoryAllocateFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags
- operator&( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return MemoryAllocateFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags
- operator^( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return MemoryAllocateFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator~( MemoryAllocateFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( MemoryAllocateFlags( bits ) );
- }
-
+ using MemoryAllocateFlags = Flags<MemoryAllocateFlagBits>;
using MemoryAllocateFlagsKHR = MemoryAllocateFlags;
- VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlags value )
+ template <>
+ struct FlagTraits<CommandPoolTrimFlagBits>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & MemoryAllocateFlagBits::eDeviceMask )
- result += "DeviceMask | ";
- if ( value & MemoryAllocateFlagBits::eDeviceAddress )
- result += "DeviceAddress | ";
- if ( value & MemoryAllocateFlagBits::eDeviceAddressCaptureReplay )
- result += "DeviceAddressCaptureReplay | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using CommandPoolTrimFlags = Flags<CommandPoolTrimFlagBits>;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using CommandPoolTrimFlags = Flags<CommandPoolTrimFlagBits>;
using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags;
- VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlags )
+ template <>
+ struct FlagTraits<DescriptorUpdateTemplateCreateFlagBits>
{
- return "{}";
- }
-
- using DescriptorUpdateTemplateCreateFlags = Flags<DescriptorUpdateTemplateCreateFlagBits>;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using DescriptorUpdateTemplateCreateFlags = Flags<DescriptorUpdateTemplateCreateFlagBits>;
using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags;
- VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlags )
- {
- return "{}";
- }
-
- using ExternalMemoryHandleTypeFlags = Flags<ExternalMemoryHandleTypeFlagBits>;
-
template <>
struct FlagTraits<ExternalMemoryHandleTypeFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) |
- VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) |
- VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) |
- VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) |
- VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) |
- VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) |
- VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) |
- VkFlags( ExternalMemoryHandleTypeFlagBits::eDmaBufEXT )
+ allFlags = VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) | VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) |
+ VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) | VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) |
+ VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) | VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) |
+ VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) | VkFlags( ExternalMemoryHandleTypeFlagBits::eDmaBufEXT )
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
| VkFlags( ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID )
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- | VkFlags( ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) |
- VkFlags( ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT )
+ | VkFlags( ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) | VkFlags( ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT )
#if defined( VK_USE_PLATFORM_FUCHSIA )
| VkFlags( ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA )
#endif /*VK_USE_PLATFORM_FUCHSIA*/
| VkFlags( ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags
- operator|( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalMemoryHandleTypeFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags
- operator&( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalMemoryHandleTypeFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags
- operator^( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalMemoryHandleTypeFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags
- operator~( ExternalMemoryHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( ExternalMemoryHandleTypeFlags( bits ) );
- }
-
+ using ExternalMemoryHandleTypeFlags = Flags<ExternalMemoryHandleTypeFlagBits>;
using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags;
- VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd )
- result += "OpaqueFd | ";
- if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 )
- result += "OpaqueWin32 | ";
- if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt )
- result += "OpaqueWin32Kmt | ";
- if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture )
- result += "D3D11Texture | ";
- if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt )
- result += "D3D11TextureKmt | ";
- if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap )
- result += "D3D12Heap | ";
- if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource )
- result += "D3D12Resource | ";
- if ( value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT )
- result += "DmaBufEXT | ";
-#if defined( VK_USE_PLATFORM_ANDROID_KHR )
- if ( value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID )
- result += "AndroidHardwareBufferANDROID | ";
-#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- if ( value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT )
- result += "HostAllocationEXT | ";
- if ( value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT )
- result += "HostMappedForeignMemoryEXT | ";
-#if defined( VK_USE_PLATFORM_FUCHSIA )
- if ( value & ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA )
- result += "ZirconVmoFUCHSIA | ";
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
- if ( value & ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV )
- result += "RdmaAddressNV | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using ExternalMemoryFeatureFlags = Flags<ExternalMemoryFeatureFlagBits>;
-
template <>
struct FlagTraits<ExternalMemoryFeatureFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( ExternalMemoryFeatureFlagBits::eDedicatedOnly ) |
- VkFlags( ExternalMemoryFeatureFlagBits::eExportable ) |
+ allFlags = VkFlags( ExternalMemoryFeatureFlagBits::eDedicatedOnly ) | VkFlags( ExternalMemoryFeatureFlagBits::eExportable ) |
VkFlags( ExternalMemoryFeatureFlagBits::eImportable )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags
- operator|( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalMemoryFeatureFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags
- operator&( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalMemoryFeatureFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags
- operator^( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalMemoryFeatureFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator~( ExternalMemoryFeatureFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( ExternalMemoryFeatureFlags( bits ) );
- }
-
+ using ExternalMemoryFeatureFlags = Flags<ExternalMemoryFeatureFlagBits>;
using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags;
- VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & ExternalMemoryFeatureFlagBits::eDedicatedOnly )
- result += "DedicatedOnly | ";
- if ( value & ExternalMemoryFeatureFlagBits::eExportable )
- result += "Exportable | ";
- if ( value & ExternalMemoryFeatureFlagBits::eImportable )
- result += "Importable | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using ExternalFenceHandleTypeFlags = Flags<ExternalFenceHandleTypeFlagBits>;
-
template <>
struct FlagTraits<ExternalFenceHandleTypeFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueFd ) |
- VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) |
- VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) |
- VkFlags( ExternalFenceHandleTypeFlagBits::eSyncFd )
+ allFlags = VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueFd ) | VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) |
+ VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) | VkFlags( ExternalFenceHandleTypeFlagBits::eSyncFd )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags
- operator|( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalFenceHandleTypeFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags
- operator&( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalFenceHandleTypeFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags
- operator^( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalFenceHandleTypeFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator~( ExternalFenceHandleTypeFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( ExternalFenceHandleTypeFlags( bits ) );
- }
-
+ using ExternalFenceHandleTypeFlags = Flags<ExternalFenceHandleTypeFlagBits>;
using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags;
- VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueFd )
- result += "OpaqueFd | ";
- if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32 )
- result += "OpaqueWin32 | ";
- if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt )
- result += "OpaqueWin32Kmt | ";
- if ( value & ExternalFenceHandleTypeFlagBits::eSyncFd )
- result += "SyncFd | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using ExternalFenceFeatureFlags = Flags<ExternalFenceFeatureFlagBits>;
-
template <>
struct FlagTraits<ExternalFenceFeatureFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags =
- VkFlags( ExternalFenceFeatureFlagBits::eExportable ) | VkFlags( ExternalFenceFeatureFlagBits::eImportable )
+ allFlags = VkFlags( ExternalFenceFeatureFlagBits::eExportable ) | VkFlags( ExternalFenceFeatureFlagBits::eImportable )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags
- operator|( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalFenceFeatureFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags
- operator&( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalFenceFeatureFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags
- operator^( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalFenceFeatureFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator~( ExternalFenceFeatureFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( ExternalFenceFeatureFlags( bits ) );
- }
-
+ using ExternalFenceFeatureFlags = Flags<ExternalFenceFeatureFlagBits>;
using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags;
- VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & ExternalFenceFeatureFlagBits::eExportable )
- result += "Exportable | ";
- if ( value & ExternalFenceFeatureFlagBits::eImportable )
- result += "Importable | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using FenceImportFlags = Flags<FenceImportFlagBits>;
-
template <>
struct FlagTraits<FenceImportFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( FenceImportFlagBits::eTemporary )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator|( FenceImportFlagBits bit0,
- FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FenceImportFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator&( FenceImportFlagBits bit0,
- FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FenceImportFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator^( FenceImportFlagBits bit0,
- FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FenceImportFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator~( FenceImportFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( FenceImportFlags( bits ) );
- }
-
+ using FenceImportFlags = Flags<FenceImportFlagBits>;
using FenceImportFlagsKHR = FenceImportFlags;
- VULKAN_HPP_INLINE std::string to_string( FenceImportFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & FenceImportFlagBits::eTemporary )
- result += "Temporary | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using SemaphoreImportFlags = Flags<SemaphoreImportFlagBits>;
-
template <>
struct FlagTraits<SemaphoreImportFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( SemaphoreImportFlagBits::eTemporary )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags
- operator|( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SemaphoreImportFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags
- operator&( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SemaphoreImportFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags
- operator^( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SemaphoreImportFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator~( SemaphoreImportFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( SemaphoreImportFlags( bits ) );
- }
-
+ using SemaphoreImportFlags = Flags<SemaphoreImportFlagBits>;
using SemaphoreImportFlagsKHR = SemaphoreImportFlags;
- VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & SemaphoreImportFlagBits::eTemporary )
- result += "Temporary | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using ExternalSemaphoreHandleTypeFlags = Flags<ExternalSemaphoreHandleTypeFlagBits>;
-
template <>
struct FlagTraits<ExternalSemaphoreHandleTypeFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) |
- VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) |
- VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) |
- VkFlags( ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) |
+ allFlags = VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) | VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) |
+ VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) | VkFlags( ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) |
VkFlags( ExternalSemaphoreHandleTypeFlagBits::eSyncFd )
#if defined( VK_USE_PLATFORM_FUCHSIA )
| VkFlags( ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA )
#endif /*VK_USE_PLATFORM_FUCHSIA*/
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags
- operator|( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalSemaphoreHandleTypeFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags
- operator&( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalSemaphoreHandleTypeFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags
- operator^( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalSemaphoreHandleTypeFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags
- operator~( ExternalSemaphoreHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( ExternalSemaphoreHandleTypeFlags( bits ) );
- }
-
+ using ExternalSemaphoreHandleTypeFlags = Flags<ExternalSemaphoreHandleTypeFlagBits>;
using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags;
- VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd )
- result += "OpaqueFd | ";
- if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 )
- result += "OpaqueWin32 | ";
- if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt )
- result += "OpaqueWin32Kmt | ";
- if ( value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence )
- result += "D3D12Fence | ";
- if ( value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd )
- result += "SyncFd | ";
-#if defined( VK_USE_PLATFORM_FUCHSIA )
- if ( value & ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA )
- result += "ZirconEventFUCHSIA | ";
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using ExternalSemaphoreFeatureFlags = Flags<ExternalSemaphoreFeatureFlagBits>;
-
template <>
struct FlagTraits<ExternalSemaphoreFeatureFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( ExternalSemaphoreFeatureFlagBits::eExportable ) |
- VkFlags( ExternalSemaphoreFeatureFlagBits::eImportable )
+ allFlags = VkFlags( ExternalSemaphoreFeatureFlagBits::eExportable ) | VkFlags( ExternalSemaphoreFeatureFlagBits::eImportable )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags
- operator|( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalSemaphoreFeatureFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags
- operator&( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalSemaphoreFeatureFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags
- operator^( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalSemaphoreFeatureFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags
- operator~( ExternalSemaphoreFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( ExternalSemaphoreFeatureFlags( bits ) );
- }
-
+ using ExternalSemaphoreFeatureFlags = Flags<ExternalSemaphoreFeatureFlagBits>;
using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags;
- VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & ExternalSemaphoreFeatureFlagBits::eExportable )
- result += "Exportable | ";
- if ( value & ExternalSemaphoreFeatureFlagBits::eImportable )
- result += "Importable | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
//=== VK_VERSION_1_2 ===
- using DescriptorBindingFlags = Flags<DescriptorBindingFlagBits>;
-
template <>
struct FlagTraits<DescriptorBindingFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( DescriptorBindingFlagBits::eUpdateAfterBind ) |
- VkFlags( DescriptorBindingFlagBits::eUpdateUnusedWhilePending ) |
- VkFlags( DescriptorBindingFlagBits::ePartiallyBound ) |
- VkFlags( DescriptorBindingFlagBits::eVariableDescriptorCount )
+ allFlags = VkFlags( DescriptorBindingFlagBits::eUpdateAfterBind ) | VkFlags( DescriptorBindingFlagBits::eUpdateUnusedWhilePending ) |
+ VkFlags( DescriptorBindingFlagBits::ePartiallyBound ) | VkFlags( DescriptorBindingFlagBits::eVariableDescriptorCount )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags
- operator|( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DescriptorBindingFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags
- operator&( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DescriptorBindingFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags
- operator^( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DescriptorBindingFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator~( DescriptorBindingFlagBits bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( DescriptorBindingFlags( bits ) );
- }
-
+ using DescriptorBindingFlags = Flags<DescriptorBindingFlagBits>;
using DescriptorBindingFlagsEXT = DescriptorBindingFlags;
- VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & DescriptorBindingFlagBits::eUpdateAfterBind )
- result += "UpdateAfterBind | ";
- if ( value & DescriptorBindingFlagBits::eUpdateUnusedWhilePending )
- result += "UpdateUnusedWhilePending | ";
- if ( value & DescriptorBindingFlagBits::ePartiallyBound )
- result += "PartiallyBound | ";
- if ( value & DescriptorBindingFlagBits::eVariableDescriptorCount )
- result += "VariableDescriptorCount | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using ResolveModeFlags = Flags<ResolveModeFlagBits>;
-
template <>
struct FlagTraits<ResolveModeFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( ResolveModeFlagBits::eNone ) | VkFlags( ResolveModeFlagBits::eSampleZero ) |
- VkFlags( ResolveModeFlagBits::eAverage ) | VkFlags( ResolveModeFlagBits::eMin ) |
- VkFlags( ResolveModeFlagBits::eMax )
+ allFlags = VkFlags( ResolveModeFlagBits::eNone ) | VkFlags( ResolveModeFlagBits::eSampleZero ) | VkFlags( ResolveModeFlagBits::eAverage ) |
+ VkFlags( ResolveModeFlagBits::eMin ) | VkFlags( ResolveModeFlagBits::eMax )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator|( ResolveModeFlagBits bit0,
- ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ResolveModeFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator&( ResolveModeFlagBits bit0,
- ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ResolveModeFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator^( ResolveModeFlagBits bit0,
- ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ResolveModeFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator~( ResolveModeFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( ResolveModeFlags( bits ) );
- }
-
+ using ResolveModeFlags = Flags<ResolveModeFlagBits>;
using ResolveModeFlagsKHR = ResolveModeFlags;
- VULKAN_HPP_INLINE std::string to_string( ResolveModeFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & ResolveModeFlagBits::eSampleZero )
- result += "SampleZero | ";
- if ( value & ResolveModeFlagBits::eAverage )
- result += "Average | ";
- if ( value & ResolveModeFlagBits::eMin )
- result += "Min | ";
- if ( value & ResolveModeFlagBits::eMax )
- result += "Max | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using SemaphoreWaitFlags = Flags<SemaphoreWaitFlagBits>;
-
template <>
struct FlagTraits<SemaphoreWaitFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( SemaphoreWaitFlagBits::eAny )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator|( SemaphoreWaitFlagBits bit0,
- SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SemaphoreWaitFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator&( SemaphoreWaitFlagBits bit0,
- SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SemaphoreWaitFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator^( SemaphoreWaitFlagBits bit0,
- SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SemaphoreWaitFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator~( SemaphoreWaitFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( SemaphoreWaitFlags( bits ) );
- }
-
+ using SemaphoreWaitFlags = Flags<SemaphoreWaitFlagBits>;
using SemaphoreWaitFlagsKHR = SemaphoreWaitFlags;
- VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & SemaphoreWaitFlagBits::eAny )
- result += "Any | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
//=== VK_VERSION_1_3 ===
- using PipelineCreationFeedbackFlags = Flags<PipelineCreationFeedbackFlagBits>;
-
template <>
struct FlagTraits<PipelineCreationFeedbackFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( PipelineCreationFeedbackFlagBits::eValid ) |
- VkFlags( PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit ) |
+ allFlags = VkFlags( PipelineCreationFeedbackFlagBits::eValid ) | VkFlags( PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit ) |
VkFlags( PipelineCreationFeedbackFlagBits::eBasePipelineAcceleration )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlags
- operator|( PipelineCreationFeedbackFlagBits bit0, PipelineCreationFeedbackFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineCreationFeedbackFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlags
- operator&( PipelineCreationFeedbackFlagBits bit0, PipelineCreationFeedbackFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineCreationFeedbackFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlags
- operator^( PipelineCreationFeedbackFlagBits bit0, PipelineCreationFeedbackFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineCreationFeedbackFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlags
- operator~( PipelineCreationFeedbackFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( PipelineCreationFeedbackFlags( bits ) );
- }
-
+ using PipelineCreationFeedbackFlags = Flags<PipelineCreationFeedbackFlagBits>;
using PipelineCreationFeedbackFlagsEXT = PipelineCreationFeedbackFlags;
- VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & PipelineCreationFeedbackFlagBits::eValid )
- result += "Valid | ";
- if ( value & PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit )
- result += "ApplicationPipelineCacheHit | ";
- if ( value & PipelineCreationFeedbackFlagBits::eBasePipelineAcceleration )
- result += "BasePipelineAcceleration | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using ToolPurposeFlags = Flags<ToolPurposeFlagBits>;
-
template <>
struct FlagTraits<ToolPurposeFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( ToolPurposeFlagBits::eValidation ) | VkFlags( ToolPurposeFlagBits::eProfiling ) |
- VkFlags( ToolPurposeFlagBits::eTracing ) | VkFlags( ToolPurposeFlagBits::eAdditionalFeatures ) |
- VkFlags( ToolPurposeFlagBits::eModifyingFeatures ) |
+ allFlags = VkFlags( ToolPurposeFlagBits::eValidation ) | VkFlags( ToolPurposeFlagBits::eProfiling ) | VkFlags( ToolPurposeFlagBits::eTracing ) |
+ VkFlags( ToolPurposeFlagBits::eAdditionalFeatures ) | VkFlags( ToolPurposeFlagBits::eModifyingFeatures ) |
VkFlags( ToolPurposeFlagBits::eDebugReportingEXT ) | VkFlags( ToolPurposeFlagBits::eDebugMarkersEXT )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlags operator|( ToolPurposeFlagBits bit0,
- ToolPurposeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ToolPurposeFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlags operator&( ToolPurposeFlagBits bit0,
- ToolPurposeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ToolPurposeFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlags operator^( ToolPurposeFlagBits bit0,
- ToolPurposeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ToolPurposeFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlags operator~( ToolPurposeFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( ToolPurposeFlags( bits ) );
- }
-
+ using ToolPurposeFlags = Flags<ToolPurposeFlagBits>;
using ToolPurposeFlagsEXT = ToolPurposeFlags;
- VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlags value )
+ template <>
+ struct FlagTraits<PrivateDataSlotCreateFlagBits>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & ToolPurposeFlagBits::eValidation )
- result += "Validation | ";
- if ( value & ToolPurposeFlagBits::eProfiling )
- result += "Profiling | ";
- if ( value & ToolPurposeFlagBits::eTracing )
- result += "Tracing | ";
- if ( value & ToolPurposeFlagBits::eAdditionalFeatures )
- result += "AdditionalFeatures | ";
- if ( value & ToolPurposeFlagBits::eModifyingFeatures )
- result += "ModifyingFeatures | ";
- if ( value & ToolPurposeFlagBits::eDebugReportingEXT )
- result += "DebugReportingEXT | ";
- if ( value & ToolPurposeFlagBits::eDebugMarkersEXT )
- result += "DebugMarkersEXT | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using PrivateDataSlotCreateFlags = Flags<PrivateDataSlotCreateFlagBits>;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using PrivateDataSlotCreateFlags = Flags<PrivateDataSlotCreateFlagBits>;
using PrivateDataSlotCreateFlagsEXT = PrivateDataSlotCreateFlags;
- VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlags )
- {
- return "{}";
- }
-
- using PipelineStageFlags2 = Flags<PipelineStageFlagBits2>;
-
template <>
struct FlagTraits<PipelineStageFlagBits2>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags64
{
- allFlags =
- VkFlags64( PipelineStageFlagBits2::eNone ) | VkFlags64( PipelineStageFlagBits2::eTopOfPipe ) |
- VkFlags64( PipelineStageFlagBits2::eDrawIndirect ) | VkFlags64( PipelineStageFlagBits2::eVertexInput ) |
- VkFlags64( PipelineStageFlagBits2::eVertexShader ) |
- VkFlags64( PipelineStageFlagBits2::eTessellationControlShader ) |
- VkFlags64( PipelineStageFlagBits2::eTessellationEvaluationShader ) |
- VkFlags64( PipelineStageFlagBits2::eGeometryShader ) | VkFlags64( PipelineStageFlagBits2::eFragmentShader ) |
- VkFlags64( PipelineStageFlagBits2::eEarlyFragmentTests ) |
- VkFlags64( PipelineStageFlagBits2::eLateFragmentTests ) |
- VkFlags64( PipelineStageFlagBits2::eColorAttachmentOutput ) |
- VkFlags64( PipelineStageFlagBits2::eComputeShader ) | VkFlags64( PipelineStageFlagBits2::eAllTransfer ) |
- VkFlags64( PipelineStageFlagBits2::eBottomOfPipe ) | VkFlags64( PipelineStageFlagBits2::eHost ) |
- VkFlags64( PipelineStageFlagBits2::eAllGraphics ) | VkFlags64( PipelineStageFlagBits2::eAllCommands ) |
- VkFlags64( PipelineStageFlagBits2::eCopy ) | VkFlags64( PipelineStageFlagBits2::eResolve ) |
- VkFlags64( PipelineStageFlagBits2::eBlit ) | VkFlags64( PipelineStageFlagBits2::eClear ) |
- VkFlags64( PipelineStageFlagBits2::eIndexInput ) | VkFlags64( PipelineStageFlagBits2::eVertexAttributeInput ) |
- VkFlags64( PipelineStageFlagBits2::ePreRasterizationShaders )
+ allFlags = VkFlags64( PipelineStageFlagBits2::eNone ) | VkFlags64( PipelineStageFlagBits2::eTopOfPipe ) |
+ VkFlags64( PipelineStageFlagBits2::eDrawIndirect ) | VkFlags64( PipelineStageFlagBits2::eVertexInput ) |
+ VkFlags64( PipelineStageFlagBits2::eVertexShader ) | VkFlags64( PipelineStageFlagBits2::eTessellationControlShader ) |
+ VkFlags64( PipelineStageFlagBits2::eTessellationEvaluationShader ) | VkFlags64( PipelineStageFlagBits2::eGeometryShader ) |
+ VkFlags64( PipelineStageFlagBits2::eFragmentShader ) | VkFlags64( PipelineStageFlagBits2::eEarlyFragmentTests ) |
+ VkFlags64( PipelineStageFlagBits2::eLateFragmentTests ) | VkFlags64( PipelineStageFlagBits2::eColorAttachmentOutput ) |
+ VkFlags64( PipelineStageFlagBits2::eComputeShader ) | VkFlags64( PipelineStageFlagBits2::eAllTransfer ) |
+ VkFlags64( PipelineStageFlagBits2::eBottomOfPipe ) | VkFlags64( PipelineStageFlagBits2::eHost ) |
+ VkFlags64( PipelineStageFlagBits2::eAllGraphics ) | VkFlags64( PipelineStageFlagBits2::eAllCommands ) |
+ VkFlags64( PipelineStageFlagBits2::eCopy ) | VkFlags64( PipelineStageFlagBits2::eResolve ) | VkFlags64( PipelineStageFlagBits2::eBlit ) |
+ VkFlags64( PipelineStageFlagBits2::eClear ) | VkFlags64( PipelineStageFlagBits2::eIndexInput ) |
+ VkFlags64( PipelineStageFlagBits2::eVertexAttributeInput ) | VkFlags64( PipelineStageFlagBits2::ePreRasterizationShaders )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- | VkFlags64( PipelineStageFlagBits2::eVideoDecodeKHR ) | VkFlags64( PipelineStageFlagBits2::eVideoEncodeKHR )
+ | VkFlags64( PipelineStageFlagBits2::eVideoDecodeKHR ) | VkFlags64( PipelineStageFlagBits2::eVideoEncodeKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- | VkFlags64( PipelineStageFlagBits2::eTransformFeedbackEXT ) |
- VkFlags64( PipelineStageFlagBits2::eConditionalRenderingEXT ) |
- VkFlags64( PipelineStageFlagBits2::eCommandPreprocessNV ) |
- VkFlags64( PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR ) |
- VkFlags64( PipelineStageFlagBits2::eAccelerationStructureBuildKHR ) |
- VkFlags64( PipelineStageFlagBits2::eRayTracingShaderKHR ) |
- VkFlags64( PipelineStageFlagBits2::eFragmentDensityProcessEXT ) |
- VkFlags64( PipelineStageFlagBits2::eTaskShaderNV ) | VkFlags64( PipelineStageFlagBits2::eMeshShaderNV ) |
- VkFlags64( PipelineStageFlagBits2::eSubpassShadingHUAWEI ) |
- VkFlags64( PipelineStageFlagBits2::eInvocationMaskHUAWEI )
+ | VkFlags64( PipelineStageFlagBits2::eTransformFeedbackEXT ) | VkFlags64( PipelineStageFlagBits2::eConditionalRenderingEXT ) |
+ VkFlags64( PipelineStageFlagBits2::eCommandPreprocessNV ) | VkFlags64( PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR ) |
+ VkFlags64( PipelineStageFlagBits2::eAccelerationStructureBuildKHR ) | VkFlags64( PipelineStageFlagBits2::eRayTracingShaderKHR ) |
+ VkFlags64( PipelineStageFlagBits2::eFragmentDensityProcessEXT ) | VkFlags64( PipelineStageFlagBits2::eTaskShaderEXT ) |
+ VkFlags64( PipelineStageFlagBits2::eMeshShaderEXT ) | VkFlags64( PipelineStageFlagBits2::eSubpassShadingHUAWEI ) |
+ VkFlags64( PipelineStageFlagBits2::eInvocationMaskHUAWEI ) | VkFlags64( PipelineStageFlagBits2::eAccelerationStructureCopyKHR ) |
+ VkFlags64( PipelineStageFlagBits2::eMicromapBuildEXT ) | VkFlags64( PipelineStageFlagBits2::eOpticalFlowNV )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2
- operator|( PipelineStageFlagBits2 bit0, PipelineStageFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineStageFlags2( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2
- operator&( PipelineStageFlagBits2 bit0, PipelineStageFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineStageFlags2( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2
- operator^( PipelineStageFlagBits2 bit0, PipelineStageFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineStageFlags2( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2 operator~( PipelineStageFlagBits2 bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( PipelineStageFlags2( bits ) );
- }
-
+ using PipelineStageFlags2 = Flags<PipelineStageFlagBits2>;
using PipelineStageFlags2KHR = PipelineStageFlags2;
- VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags2 value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & PipelineStageFlagBits2::eTopOfPipe )
- result += "TopOfPipe | ";
- if ( value & PipelineStageFlagBits2::eDrawIndirect )
- result += "DrawIndirect | ";
- if ( value & PipelineStageFlagBits2::eVertexInput )
- result += "VertexInput | ";
- if ( value & PipelineStageFlagBits2::eVertexShader )
- result += "VertexShader | ";
- if ( value & PipelineStageFlagBits2::eTessellationControlShader )
- result += "TessellationControlShader | ";
- if ( value & PipelineStageFlagBits2::eTessellationEvaluationShader )
- result += "TessellationEvaluationShader | ";
- if ( value & PipelineStageFlagBits2::eGeometryShader )
- result += "GeometryShader | ";
- if ( value & PipelineStageFlagBits2::eFragmentShader )
- result += "FragmentShader | ";
- if ( value & PipelineStageFlagBits2::eEarlyFragmentTests )
- result += "EarlyFragmentTests | ";
- if ( value & PipelineStageFlagBits2::eLateFragmentTests )
- result += "LateFragmentTests | ";
- if ( value & PipelineStageFlagBits2::eColorAttachmentOutput )
- result += "ColorAttachmentOutput | ";
- if ( value & PipelineStageFlagBits2::eComputeShader )
- result += "ComputeShader | ";
- if ( value & PipelineStageFlagBits2::eAllTransfer )
- result += "AllTransfer | ";
- if ( value & PipelineStageFlagBits2::eBottomOfPipe )
- result += "BottomOfPipe | ";
- if ( value & PipelineStageFlagBits2::eHost )
- result += "Host | ";
- if ( value & PipelineStageFlagBits2::eAllGraphics )
- result += "AllGraphics | ";
- if ( value & PipelineStageFlagBits2::eAllCommands )
- result += "AllCommands | ";
- if ( value & PipelineStageFlagBits2::eCopy )
- result += "Copy | ";
- if ( value & PipelineStageFlagBits2::eResolve )
- result += "Resolve | ";
- if ( value & PipelineStageFlagBits2::eBlit )
- result += "Blit | ";
- if ( value & PipelineStageFlagBits2::eClear )
- result += "Clear | ";
- if ( value & PipelineStageFlagBits2::eIndexInput )
- result += "IndexInput | ";
- if ( value & PipelineStageFlagBits2::eVertexAttributeInput )
- result += "VertexAttributeInput | ";
- if ( value & PipelineStageFlagBits2::ePreRasterizationShaders )
- result += "PreRasterizationShaders | ";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- if ( value & PipelineStageFlagBits2::eVideoDecodeKHR )
- result += "VideoDecodeKHR | ";
- if ( value & PipelineStageFlagBits2::eVideoEncodeKHR )
- result += "VideoEncodeKHR | ";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- if ( value & PipelineStageFlagBits2::eTransformFeedbackEXT )
- result += "TransformFeedbackEXT | ";
- if ( value & PipelineStageFlagBits2::eConditionalRenderingEXT )
- result += "ConditionalRenderingEXT | ";
- if ( value & PipelineStageFlagBits2::eCommandPreprocessNV )
- result += "CommandPreprocessNV | ";
- if ( value & PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR )
- result += "FragmentShadingRateAttachmentKHR | ";
- if ( value & PipelineStageFlagBits2::eAccelerationStructureBuildKHR )
- result += "AccelerationStructureBuildKHR | ";
- if ( value & PipelineStageFlagBits2::eRayTracingShaderKHR )
- result += "RayTracingShaderKHR | ";
- if ( value & PipelineStageFlagBits2::eFragmentDensityProcessEXT )
- result += "FragmentDensityProcessEXT | ";
- if ( value & PipelineStageFlagBits2::eTaskShaderNV )
- result += "TaskShaderNV | ";
- if ( value & PipelineStageFlagBits2::eMeshShaderNV )
- result += "MeshShaderNV | ";
- if ( value & PipelineStageFlagBits2::eSubpassShadingHUAWEI )
- result += "SubpassShadingHUAWEI | ";
- if ( value & PipelineStageFlagBits2::eInvocationMaskHUAWEI )
- result += "InvocationMaskHUAWEI | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using AccessFlags2 = Flags<AccessFlagBits2>;
-
template <>
struct FlagTraits<AccessFlagBits2>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags64
{
allFlags =
- VkFlags64( AccessFlagBits2::eNone ) | VkFlags64( AccessFlagBits2::eIndirectCommandRead ) |
- VkFlags64( AccessFlagBits2::eIndexRead ) | VkFlags64( AccessFlagBits2::eVertexAttributeRead ) |
- VkFlags64( AccessFlagBits2::eUniformRead ) | VkFlags64( AccessFlagBits2::eInputAttachmentRead ) |
- VkFlags64( AccessFlagBits2::eShaderRead ) | VkFlags64( AccessFlagBits2::eShaderWrite ) |
- VkFlags64( AccessFlagBits2::eColorAttachmentRead ) | VkFlags64( AccessFlagBits2::eColorAttachmentWrite ) |
- VkFlags64( AccessFlagBits2::eDepthStencilAttachmentRead ) |
+ VkFlags64( AccessFlagBits2::eNone ) | VkFlags64( AccessFlagBits2::eIndirectCommandRead ) | VkFlags64( AccessFlagBits2::eIndexRead ) |
+ VkFlags64( AccessFlagBits2::eVertexAttributeRead ) | VkFlags64( AccessFlagBits2::eUniformRead ) | VkFlags64( AccessFlagBits2::eInputAttachmentRead ) |
+ VkFlags64( AccessFlagBits2::eShaderRead ) | VkFlags64( AccessFlagBits2::eShaderWrite ) | VkFlags64( AccessFlagBits2::eColorAttachmentRead ) |
+ VkFlags64( AccessFlagBits2::eColorAttachmentWrite ) | VkFlags64( AccessFlagBits2::eDepthStencilAttachmentRead ) |
VkFlags64( AccessFlagBits2::eDepthStencilAttachmentWrite ) | VkFlags64( AccessFlagBits2::eTransferRead ) |
- VkFlags64( AccessFlagBits2::eTransferWrite ) | VkFlags64( AccessFlagBits2::eHostRead ) |
- VkFlags64( AccessFlagBits2::eHostWrite ) | VkFlags64( AccessFlagBits2::eMemoryRead ) |
- VkFlags64( AccessFlagBits2::eMemoryWrite ) | VkFlags64( AccessFlagBits2::eShaderSampledRead ) |
+ VkFlags64( AccessFlagBits2::eTransferWrite ) | VkFlags64( AccessFlagBits2::eHostRead ) | VkFlags64( AccessFlagBits2::eHostWrite ) |
+ VkFlags64( AccessFlagBits2::eMemoryRead ) | VkFlags64( AccessFlagBits2::eMemoryWrite ) | VkFlags64( AccessFlagBits2::eShaderSampledRead ) |
VkFlags64( AccessFlagBits2::eShaderStorageRead ) | VkFlags64( AccessFlagBits2::eShaderStorageWrite )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
| VkFlags64( AccessFlagBits2::eVideoDecodeReadKHR ) | VkFlags64( AccessFlagBits2::eVideoDecodeWriteKHR ) |
VkFlags64( AccessFlagBits2::eVideoEncodeReadKHR ) | VkFlags64( AccessFlagBits2::eVideoEncodeWriteKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- | VkFlags64( AccessFlagBits2::eTransformFeedbackWriteEXT ) |
- VkFlags64( AccessFlagBits2::eTransformFeedbackCounterReadEXT ) |
- VkFlags64( AccessFlagBits2::eTransformFeedbackCounterWriteEXT ) |
- VkFlags64( AccessFlagBits2::eConditionalRenderingReadEXT ) |
- VkFlags64( AccessFlagBits2::eCommandPreprocessReadNV ) |
- VkFlags64( AccessFlagBits2::eCommandPreprocessWriteNV ) |
- VkFlags64( AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR ) |
- VkFlags64( AccessFlagBits2::eAccelerationStructureReadKHR ) |
- VkFlags64( AccessFlagBits2::eAccelerationStructureWriteKHR ) |
- VkFlags64( AccessFlagBits2::eFragmentDensityMapReadEXT ) |
- VkFlags64( AccessFlagBits2::eColorAttachmentReadNoncoherentEXT ) |
- VkFlags64( AccessFlagBits2::eInvocationMaskReadHUAWEI )
+ | VkFlags64( AccessFlagBits2::eTransformFeedbackWriteEXT ) | VkFlags64( AccessFlagBits2::eTransformFeedbackCounterReadEXT ) |
+ VkFlags64( AccessFlagBits2::eTransformFeedbackCounterWriteEXT ) | VkFlags64( AccessFlagBits2::eConditionalRenderingReadEXT ) |
+ VkFlags64( AccessFlagBits2::eCommandPreprocessReadNV ) | VkFlags64( AccessFlagBits2::eCommandPreprocessWriteNV ) |
+ VkFlags64( AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR ) | VkFlags64( AccessFlagBits2::eAccelerationStructureReadKHR ) |
+ VkFlags64( AccessFlagBits2::eAccelerationStructureWriteKHR ) | VkFlags64( AccessFlagBits2::eFragmentDensityMapReadEXT ) |
+ VkFlags64( AccessFlagBits2::eColorAttachmentReadNoncoherentEXT ) | VkFlags64( AccessFlagBits2::eInvocationMaskReadHUAWEI ) |
+ VkFlags64( AccessFlagBits2::eShaderBindingTableReadKHR ) | VkFlags64( AccessFlagBits2::eMicromapReadEXT ) |
+ VkFlags64( AccessFlagBits2::eMicromapWriteEXT ) | VkFlags64( AccessFlagBits2::eOpticalFlowReadNV ) | VkFlags64( AccessFlagBits2::eOpticalFlowWriteNV )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2 operator|( AccessFlagBits2 bit0,
- AccessFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return AccessFlags2( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2 operator&( AccessFlagBits2 bit0,
- AccessFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return AccessFlags2( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2 operator^( AccessFlagBits2 bit0,
- AccessFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return AccessFlags2( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2 operator~( AccessFlagBits2 bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( AccessFlags2( bits ) );
- }
-
+ using AccessFlags2 = Flags<AccessFlagBits2>;
using AccessFlags2KHR = AccessFlags2;
- VULKAN_HPP_INLINE std::string to_string( AccessFlags2 value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & AccessFlagBits2::eIndirectCommandRead )
- result += "IndirectCommandRead | ";
- if ( value & AccessFlagBits2::eIndexRead )
- result += "IndexRead | ";
- if ( value & AccessFlagBits2::eVertexAttributeRead )
- result += "VertexAttributeRead | ";
- if ( value & AccessFlagBits2::eUniformRead )
- result += "UniformRead | ";
- if ( value & AccessFlagBits2::eInputAttachmentRead )
- result += "InputAttachmentRead | ";
- if ( value & AccessFlagBits2::eShaderRead )
- result += "ShaderRead | ";
- if ( value & AccessFlagBits2::eShaderWrite )
- result += "ShaderWrite | ";
- if ( value & AccessFlagBits2::eColorAttachmentRead )
- result += "ColorAttachmentRead | ";
- if ( value & AccessFlagBits2::eColorAttachmentWrite )
- result += "ColorAttachmentWrite | ";
- if ( value & AccessFlagBits2::eDepthStencilAttachmentRead )
- result += "DepthStencilAttachmentRead | ";
- if ( value & AccessFlagBits2::eDepthStencilAttachmentWrite )
- result += "DepthStencilAttachmentWrite | ";
- if ( value & AccessFlagBits2::eTransferRead )
- result += "TransferRead | ";
- if ( value & AccessFlagBits2::eTransferWrite )
- result += "TransferWrite | ";
- if ( value & AccessFlagBits2::eHostRead )
- result += "HostRead | ";
- if ( value & AccessFlagBits2::eHostWrite )
- result += "HostWrite | ";
- if ( value & AccessFlagBits2::eMemoryRead )
- result += "MemoryRead | ";
- if ( value & AccessFlagBits2::eMemoryWrite )
- result += "MemoryWrite | ";
- if ( value & AccessFlagBits2::eShaderSampledRead )
- result += "ShaderSampledRead | ";
- if ( value & AccessFlagBits2::eShaderStorageRead )
- result += "ShaderStorageRead | ";
- if ( value & AccessFlagBits2::eShaderStorageWrite )
- result += "ShaderStorageWrite | ";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- if ( value & AccessFlagBits2::eVideoDecodeReadKHR )
- result += "VideoDecodeReadKHR | ";
- if ( value & AccessFlagBits2::eVideoDecodeWriteKHR )
- result += "VideoDecodeWriteKHR | ";
- if ( value & AccessFlagBits2::eVideoEncodeReadKHR )
- result += "VideoEncodeReadKHR | ";
- if ( value & AccessFlagBits2::eVideoEncodeWriteKHR )
- result += "VideoEncodeWriteKHR | ";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- if ( value & AccessFlagBits2::eTransformFeedbackWriteEXT )
- result += "TransformFeedbackWriteEXT | ";
- if ( value & AccessFlagBits2::eTransformFeedbackCounterReadEXT )
- result += "TransformFeedbackCounterReadEXT | ";
- if ( value & AccessFlagBits2::eTransformFeedbackCounterWriteEXT )
- result += "TransformFeedbackCounterWriteEXT | ";
- if ( value & AccessFlagBits2::eConditionalRenderingReadEXT )
- result += "ConditionalRenderingReadEXT | ";
- if ( value & AccessFlagBits2::eCommandPreprocessReadNV )
- result += "CommandPreprocessReadNV | ";
- if ( value & AccessFlagBits2::eCommandPreprocessWriteNV )
- result += "CommandPreprocessWriteNV | ";
- if ( value & AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR )
- result += "FragmentShadingRateAttachmentReadKHR | ";
- if ( value & AccessFlagBits2::eAccelerationStructureReadKHR )
- result += "AccelerationStructureReadKHR | ";
- if ( value & AccessFlagBits2::eAccelerationStructureWriteKHR )
- result += "AccelerationStructureWriteKHR | ";
- if ( value & AccessFlagBits2::eFragmentDensityMapReadEXT )
- result += "FragmentDensityMapReadEXT | ";
- if ( value & AccessFlagBits2::eColorAttachmentReadNoncoherentEXT )
- result += "ColorAttachmentReadNoncoherentEXT | ";
- if ( value & AccessFlagBits2::eInvocationMaskReadHUAWEI )
- result += "InvocationMaskReadHUAWEI | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using SubmitFlags = Flags<SubmitFlagBits>;
-
template <>
struct FlagTraits<SubmitFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( SubmitFlagBits::eProtected )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlags operator|( SubmitFlagBits bit0,
- SubmitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SubmitFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlags operator&( SubmitFlagBits bit0,
- SubmitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SubmitFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlags operator^( SubmitFlagBits bit0,
- SubmitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SubmitFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlags operator~( SubmitFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( SubmitFlags( bits ) );
- }
-
+ using SubmitFlags = Flags<SubmitFlagBits>;
using SubmitFlagsKHR = SubmitFlags;
- VULKAN_HPP_INLINE std::string to_string( SubmitFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & SubmitFlagBits::eProtected )
- result += "Protected | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using RenderingFlags = Flags<RenderingFlagBits>;
-
template <>
struct FlagTraits<RenderingFlagBits>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( RenderingFlagBits::eContentsSecondaryCommandBuffers ) |
- VkFlags( RenderingFlagBits::eSuspending ) | VkFlags( RenderingFlagBits::eResuming )
+ allFlags = VkFlags( RenderingFlagBits::eContentsSecondaryCommandBuffers ) | VkFlags( RenderingFlagBits::eSuspending ) |
+ VkFlags( RenderingFlagBits::eResuming ) | VkFlags( RenderingFlagBits::eEnableLegacyDitheringEXT )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderingFlags operator|( RenderingFlagBits bit0,
- RenderingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return RenderingFlags( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderingFlags operator&( RenderingFlagBits bit0,
- RenderingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return RenderingFlags( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderingFlags operator^( RenderingFlagBits bit0,
- RenderingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return RenderingFlags( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderingFlags operator~( RenderingFlagBits bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( RenderingFlags( bits ) );
- }
-
+ using RenderingFlags = Flags<RenderingFlagBits>;
using RenderingFlagsKHR = RenderingFlags;
- VULKAN_HPP_INLINE std::string to_string( RenderingFlags value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & RenderingFlagBits::eContentsSecondaryCommandBuffers )
- result += "ContentsSecondaryCommandBuffers | ";
- if ( value & RenderingFlagBits::eSuspending )
- result += "Suspending | ";
- if ( value & RenderingFlagBits::eResuming )
- result += "Resuming | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using FormatFeatureFlags2 = Flags<FormatFeatureFlagBits2>;
-
template <>
struct FlagTraits<FormatFeatureFlagBits2>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags64
{
- allFlags =
- VkFlags64( FormatFeatureFlagBits2::eSampledImage ) | VkFlags64( FormatFeatureFlagBits2::eStorageImage ) |
- VkFlags64( FormatFeatureFlagBits2::eStorageImageAtomic ) |
- VkFlags64( FormatFeatureFlagBits2::eUniformTexelBuffer ) |
- VkFlags64( FormatFeatureFlagBits2::eStorageTexelBuffer ) |
- VkFlags64( FormatFeatureFlagBits2::eStorageTexelBufferAtomic ) |
- VkFlags64( FormatFeatureFlagBits2::eVertexBuffer ) | VkFlags64( FormatFeatureFlagBits2::eColorAttachment ) |
- VkFlags64( FormatFeatureFlagBits2::eColorAttachmentBlend ) |
- VkFlags64( FormatFeatureFlagBits2::eDepthStencilAttachment ) | VkFlags64( FormatFeatureFlagBits2::eBlitSrc ) |
- VkFlags64( FormatFeatureFlagBits2::eBlitDst ) | VkFlags64( FormatFeatureFlagBits2::eSampledImageFilterLinear ) |
- VkFlags64( FormatFeatureFlagBits2::eSampledImageFilterCubic ) |
- VkFlags64( FormatFeatureFlagBits2::eTransferSrc ) | VkFlags64( FormatFeatureFlagBits2::eTransferDst ) |
- VkFlags64( FormatFeatureFlagBits2::eSampledImageFilterMinmax ) |
- VkFlags64( FormatFeatureFlagBits2::eMidpointChromaSamples ) |
- VkFlags64( FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter ) |
- VkFlags64( FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter ) |
- VkFlags64( FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit ) |
- VkFlags64( FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) |
- VkFlags64( FormatFeatureFlagBits2::eDisjoint ) | VkFlags64( FormatFeatureFlagBits2::eCositedChromaSamples ) |
- VkFlags64( FormatFeatureFlagBits2::eStorageReadWithoutFormat ) |
- VkFlags64( FormatFeatureFlagBits2::eStorageWriteWithoutFormat ) |
- VkFlags64( FormatFeatureFlagBits2::eSampledImageDepthComparison )
+ allFlags = VkFlags64( FormatFeatureFlagBits2::eSampledImage ) | VkFlags64( FormatFeatureFlagBits2::eStorageImage ) |
+ VkFlags64( FormatFeatureFlagBits2::eStorageImageAtomic ) | VkFlags64( FormatFeatureFlagBits2::eUniformTexelBuffer ) |
+ VkFlags64( FormatFeatureFlagBits2::eStorageTexelBuffer ) | VkFlags64( FormatFeatureFlagBits2::eStorageTexelBufferAtomic ) |
+ VkFlags64( FormatFeatureFlagBits2::eVertexBuffer ) | VkFlags64( FormatFeatureFlagBits2::eColorAttachment ) |
+ VkFlags64( FormatFeatureFlagBits2::eColorAttachmentBlend ) | VkFlags64( FormatFeatureFlagBits2::eDepthStencilAttachment ) |
+ VkFlags64( FormatFeatureFlagBits2::eBlitSrc ) | VkFlags64( FormatFeatureFlagBits2::eBlitDst ) |
+ VkFlags64( FormatFeatureFlagBits2::eSampledImageFilterLinear ) | VkFlags64( FormatFeatureFlagBits2::eSampledImageFilterCubic ) |
+ VkFlags64( FormatFeatureFlagBits2::eTransferSrc ) | VkFlags64( FormatFeatureFlagBits2::eTransferDst ) |
+ VkFlags64( FormatFeatureFlagBits2::eSampledImageFilterMinmax ) | VkFlags64( FormatFeatureFlagBits2::eMidpointChromaSamples ) |
+ VkFlags64( FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter ) |
+ VkFlags64( FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter ) |
+ VkFlags64( FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit ) |
+ VkFlags64( FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) |
+ VkFlags64( FormatFeatureFlagBits2::eDisjoint ) | VkFlags64( FormatFeatureFlagBits2::eCositedChromaSamples ) |
+ VkFlags64( FormatFeatureFlagBits2::eStorageReadWithoutFormat ) | VkFlags64( FormatFeatureFlagBits2::eStorageWriteWithoutFormat ) |
+ VkFlags64( FormatFeatureFlagBits2::eSampledImageDepthComparison )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- | VkFlags64( FormatFeatureFlagBits2::eVideoDecodeOutputKHR ) |
- VkFlags64( FormatFeatureFlagBits2::eVideoDecodeDpbKHR )
+ | VkFlags64( FormatFeatureFlagBits2::eVideoDecodeOutputKHR ) | VkFlags64( FormatFeatureFlagBits2::eVideoDecodeDpbKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- | VkFlags64( FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR ) |
- VkFlags64( FormatFeatureFlagBits2::eFragmentDensityMapEXT ) |
- VkFlags64( FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR )
+ | VkFlags64( FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR ) | VkFlags64( FormatFeatureFlagBits2::eFragmentDensityMapEXT ) |
+ VkFlags64( FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- | VkFlags64( FormatFeatureFlagBits2::eVideoEncodeInputKHR ) |
- VkFlags64( FormatFeatureFlagBits2::eVideoEncodeDpbKHR )
+ | VkFlags64( FormatFeatureFlagBits2::eVideoEncodeInputKHR ) | VkFlags64( FormatFeatureFlagBits2::eVideoEncodeDpbKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- | VkFlags64( FormatFeatureFlagBits2::eLinearColorAttachmentNV )
+ | VkFlags64( FormatFeatureFlagBits2::eLinearColorAttachmentNV ) | VkFlags64( FormatFeatureFlagBits2::eWeightImageQCOM ) |
+ VkFlags64( FormatFeatureFlagBits2::eWeightSampledImageQCOM ) | VkFlags64( FormatFeatureFlagBits2::eBlockMatchingQCOM ) |
+ VkFlags64( FormatFeatureFlagBits2::eBoxFilterSampledQCOM ) | VkFlags64( FormatFeatureFlagBits2::eOpticalFlowImageNV ) |
+ VkFlags64( FormatFeatureFlagBits2::eOpticalFlowVectorNV ) | VkFlags64( FormatFeatureFlagBits2::eOpticalFlowCostNV )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags2
- operator|( FormatFeatureFlagBits2 bit0, FormatFeatureFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FormatFeatureFlags2( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags2
- operator&( FormatFeatureFlagBits2 bit0, FormatFeatureFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FormatFeatureFlags2( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags2
- operator^( FormatFeatureFlagBits2 bit0, FormatFeatureFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FormatFeatureFlags2( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags2 operator~( FormatFeatureFlagBits2 bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( FormatFeatureFlags2( bits ) );
- }
-
+ using FormatFeatureFlags2 = Flags<FormatFeatureFlagBits2>;
using FormatFeatureFlags2KHR = FormatFeatureFlags2;
- VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags2 value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & FormatFeatureFlagBits2::eSampledImage )
- result += "SampledImage | ";
- if ( value & FormatFeatureFlagBits2::eStorageImage )
- result += "StorageImage | ";
- if ( value & FormatFeatureFlagBits2::eStorageImageAtomic )
- result += "StorageImageAtomic | ";
- if ( value & FormatFeatureFlagBits2::eUniformTexelBuffer )
- result += "UniformTexelBuffer | ";
- if ( value & FormatFeatureFlagBits2::eStorageTexelBuffer )
- result += "StorageTexelBuffer | ";
- if ( value & FormatFeatureFlagBits2::eStorageTexelBufferAtomic )
- result += "StorageTexelBufferAtomic | ";
- if ( value & FormatFeatureFlagBits2::eVertexBuffer )
- result += "VertexBuffer | ";
- if ( value & FormatFeatureFlagBits2::eColorAttachment )
- result += "ColorAttachment | ";
- if ( value & FormatFeatureFlagBits2::eColorAttachmentBlend )
- result += "ColorAttachmentBlend | ";
- if ( value & FormatFeatureFlagBits2::eDepthStencilAttachment )
- result += "DepthStencilAttachment | ";
- if ( value & FormatFeatureFlagBits2::eBlitSrc )
- result += "BlitSrc | ";
- if ( value & FormatFeatureFlagBits2::eBlitDst )
- result += "BlitDst | ";
- if ( value & FormatFeatureFlagBits2::eSampledImageFilterLinear )
- result += "SampledImageFilterLinear | ";
- if ( value & FormatFeatureFlagBits2::eSampledImageFilterCubic )
- result += "SampledImageFilterCubic | ";
- if ( value & FormatFeatureFlagBits2::eTransferSrc )
- result += "TransferSrc | ";
- if ( value & FormatFeatureFlagBits2::eTransferDst )
- result += "TransferDst | ";
- if ( value & FormatFeatureFlagBits2::eSampledImageFilterMinmax )
- result += "SampledImageFilterMinmax | ";
- if ( value & FormatFeatureFlagBits2::eMidpointChromaSamples )
- result += "MidpointChromaSamples | ";
- if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter )
- result += "SampledImageYcbcrConversionLinearFilter | ";
- if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter )
- result += "SampledImageYcbcrConversionSeparateReconstructionFilter | ";
- if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit )
- result += "SampledImageYcbcrConversionChromaReconstructionExplicit | ";
- if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable )
- result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | ";
- if ( value & FormatFeatureFlagBits2::eDisjoint )
- result += "Disjoint | ";
- if ( value & FormatFeatureFlagBits2::eCositedChromaSamples )
- result += "CositedChromaSamples | ";
- if ( value & FormatFeatureFlagBits2::eStorageReadWithoutFormat )
- result += "StorageReadWithoutFormat | ";
- if ( value & FormatFeatureFlagBits2::eStorageWriteWithoutFormat )
- result += "StorageWriteWithoutFormat | ";
- if ( value & FormatFeatureFlagBits2::eSampledImageDepthComparison )
- result += "SampledImageDepthComparison | ";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- if ( value & FormatFeatureFlagBits2::eVideoDecodeOutputKHR )
- result += "VideoDecodeOutputKHR | ";
- if ( value & FormatFeatureFlagBits2::eVideoDecodeDpbKHR )
- result += "VideoDecodeDpbKHR | ";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- if ( value & FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR )
- result += "AccelerationStructureVertexBufferKHR | ";
- if ( value & FormatFeatureFlagBits2::eFragmentDensityMapEXT )
- result += "FragmentDensityMapEXT | ";
- if ( value & FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR )
- result += "FragmentShadingRateAttachmentKHR | ";
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- if ( value & FormatFeatureFlagBits2::eVideoEncodeInputKHR )
- result += "VideoEncodeInputKHR | ";
- if ( value & FormatFeatureFlagBits2::eVideoEncodeDpbKHR )
- result += "VideoEncodeDpbKHR | ";
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- if ( value & FormatFeatureFlagBits2::eLinearColorAttachmentNV )
- result += "LinearColorAttachmentNV | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
//=== VK_KHR_surface ===
- using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR>;
-
template <>
struct FlagTraits<CompositeAlphaFlagBitsKHR>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( CompositeAlphaFlagBitsKHR::eOpaque ) | VkFlags( CompositeAlphaFlagBitsKHR::ePreMultiplied ) |
VkFlags( CompositeAlphaFlagBitsKHR::ePostMultiplied ) | VkFlags( CompositeAlphaFlagBitsKHR::eInherit )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR
- operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CompositeAlphaFlagsKHR( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR
- operator&( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CompositeAlphaFlagsKHR( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR
- operator^( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CompositeAlphaFlagsKHR( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( CompositeAlphaFlagsKHR( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & CompositeAlphaFlagBitsKHR::eOpaque )
- result += "Opaque | ";
- if ( value & CompositeAlphaFlagBitsKHR::ePreMultiplied )
- result += "PreMultiplied | ";
- if ( value & CompositeAlphaFlagBitsKHR::ePostMultiplied )
- result += "PostMultiplied | ";
- if ( value & CompositeAlphaFlagBitsKHR::eInherit )
- result += "Inherit | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR>;
//=== VK_KHR_swapchain ===
- using SwapchainCreateFlagsKHR = Flags<SwapchainCreateFlagBitsKHR>;
-
template <>
struct FlagTraits<SwapchainCreateFlagBitsKHR>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) |
- VkFlags( SwapchainCreateFlagBitsKHR::eProtected ) |
+ allFlags = VkFlags( SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) | VkFlags( SwapchainCreateFlagBitsKHR::eProtected ) |
VkFlags( SwapchainCreateFlagBitsKHR::eMutableFormat )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR
- operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SwapchainCreateFlagsKHR( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR
- operator&( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SwapchainCreateFlagsKHR( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR
- operator^( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SwapchainCreateFlagsKHR( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator~( SwapchainCreateFlagBitsKHR bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( SwapchainCreateFlagsKHR( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions )
- result += "SplitInstanceBindRegions | ";
- if ( value & SwapchainCreateFlagBitsKHR::eProtected )
- result += "Protected | ";
- if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat )
- result += "MutableFormat | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using DeviceGroupPresentModeFlagsKHR = Flags<DeviceGroupPresentModeFlagBitsKHR>;
+ using SwapchainCreateFlagsKHR = Flags<SwapchainCreateFlagBitsKHR>;
template <>
struct FlagTraits<DeviceGroupPresentModeFlagBitsKHR>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( DeviceGroupPresentModeFlagBitsKHR::eLocal ) |
- VkFlags( DeviceGroupPresentModeFlagBitsKHR::eRemote ) |
- VkFlags( DeviceGroupPresentModeFlagBitsKHR::eSum ) |
- VkFlags( DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice )
+ allFlags = VkFlags( DeviceGroupPresentModeFlagBitsKHR::eLocal ) | VkFlags( DeviceGroupPresentModeFlagBitsKHR::eRemote ) |
+ VkFlags( DeviceGroupPresentModeFlagBitsKHR::eSum ) | VkFlags( DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR
- operator|( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DeviceGroupPresentModeFlagsKHR( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR
- operator&( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DeviceGroupPresentModeFlagsKHR( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR
- operator^( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DeviceGroupPresentModeFlagsKHR( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR
- operator~( DeviceGroupPresentModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( DeviceGroupPresentModeFlagsKHR( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagsKHR value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocal )
- result += "Local | ";
- if ( value & DeviceGroupPresentModeFlagBitsKHR::eRemote )
- result += "Remote | ";
- if ( value & DeviceGroupPresentModeFlagBitsKHR::eSum )
- result += "Sum | ";
- if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice )
- result += "LocalMultiDevice | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ using DeviceGroupPresentModeFlagsKHR = Flags<DeviceGroupPresentModeFlagBitsKHR>;
//=== VK_KHR_display ===
- using DisplayModeCreateFlagsKHR = Flags<DisplayModeCreateFlagBitsKHR>;
-
- VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagsKHR )
+ template <>
+ struct FlagTraits<DisplayModeCreateFlagBitsKHR>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR>;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using DisplayModeCreateFlagsKHR = Flags<DisplayModeCreateFlagBitsKHR>;
template <>
struct FlagTraits<DisplayPlaneAlphaFlagBitsKHR>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( DisplayPlaneAlphaFlagBitsKHR::eOpaque ) | VkFlags( DisplayPlaneAlphaFlagBitsKHR::eGlobal ) |
- VkFlags( DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) |
- VkFlags( DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied )
+ VkFlags( DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) | VkFlags( DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied )
};
};
+ using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR
- operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR
- operator&( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DisplayPlaneAlphaFlagsKHR( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR
- operator^( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DisplayPlaneAlphaFlagsKHR( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( DisplayPlaneAlphaFlagsKHR( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagsKHR value )
+ template <>
+ struct FlagTraits<DisplaySurfaceCreateFlagBitsKHR>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & DisplayPlaneAlphaFlagBitsKHR::eOpaque )
- result += "Opaque | ";
- if ( value & DisplayPlaneAlphaFlagBitsKHR::eGlobal )
- result += "Global | ";
- if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel )
- result += "PerPixel | ";
- if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied )
- result += "PerPixelPremultiplied | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using DisplaySurfaceCreateFlagsKHR = Flags<DisplaySurfaceCreateFlagBitsKHR>;
- VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagsKHR )
- {
- return "{}";
- }
-
- using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR>;
-
template <>
struct FlagTraits<SurfaceTransformFlagBitsKHR>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( SurfaceTransformFlagBitsKHR::eIdentity ) | VkFlags( SurfaceTransformFlagBitsKHR::eRotate90 ) |
- VkFlags( SurfaceTransformFlagBitsKHR::eRotate180 ) |
- VkFlags( SurfaceTransformFlagBitsKHR::eRotate270 ) |
- VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirror ) |
- VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) |
- VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) |
- VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) |
+ VkFlags( SurfaceTransformFlagBitsKHR::eRotate180 ) | VkFlags( SurfaceTransformFlagBitsKHR::eRotate270 ) |
+ VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirror ) | VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) |
+ VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) | VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) |
VkFlags( SurfaceTransformFlagBitsKHR::eInherit )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR
- operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SurfaceTransformFlagsKHR( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR
- operator&( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SurfaceTransformFlagsKHR( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR
- operator^( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SurfaceTransformFlagsKHR( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( SurfaceTransformFlagsKHR( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagsKHR value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & SurfaceTransformFlagBitsKHR::eIdentity )
- result += "Identity | ";
- if ( value & SurfaceTransformFlagBitsKHR::eRotate90 )
- result += "Rotate90 | ";
- if ( value & SurfaceTransformFlagBitsKHR::eRotate180 )
- result += "Rotate180 | ";
- if ( value & SurfaceTransformFlagBitsKHR::eRotate270 )
- result += "Rotate270 | ";
- if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirror )
- result += "HorizontalMirror | ";
- if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 )
- result += "HorizontalMirrorRotate90 | ";
- if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 )
- result += "HorizontalMirrorRotate180 | ";
- if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 )
- result += "HorizontalMirrorRotate270 | ";
- if ( value & SurfaceTransformFlagBitsKHR::eInherit )
- result += "Inherit | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR>;
#if defined( VK_USE_PLATFORM_XLIB_KHR )
//=== VK_KHR_xlib_surface ===
- using XlibSurfaceCreateFlagsKHR = Flags<XlibSurfaceCreateFlagBitsKHR>;
-
- VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagsKHR )
+ template <>
+ struct FlagTraits<XlibSurfaceCreateFlagBitsKHR>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using XlibSurfaceCreateFlagsKHR = Flags<XlibSurfaceCreateFlagBitsKHR>;
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
#if defined( VK_USE_PLATFORM_XCB_KHR )
//=== VK_KHR_xcb_surface ===
- using XcbSurfaceCreateFlagsKHR = Flags<XcbSurfaceCreateFlagBitsKHR>;
-
- VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagsKHR )
+ template <>
+ struct FlagTraits<XcbSurfaceCreateFlagBitsKHR>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using XcbSurfaceCreateFlagsKHR = Flags<XcbSurfaceCreateFlagBitsKHR>;
#endif /*VK_USE_PLATFORM_XCB_KHR*/
#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
//=== VK_KHR_wayland_surface ===
- using WaylandSurfaceCreateFlagsKHR = Flags<WaylandSurfaceCreateFlagBitsKHR>;
-
- VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagsKHR )
+ template <>
+ struct FlagTraits<WaylandSurfaceCreateFlagBitsKHR>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using WaylandSurfaceCreateFlagsKHR = Flags<WaylandSurfaceCreateFlagBitsKHR>;
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
//=== VK_KHR_android_surface ===
- using AndroidSurfaceCreateFlagsKHR = Flags<AndroidSurfaceCreateFlagBitsKHR>;
-
- VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagsKHR )
+ template <>
+ struct FlagTraits<AndroidSurfaceCreateFlagBitsKHR>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using AndroidSurfaceCreateFlagsKHR = Flags<AndroidSurfaceCreateFlagBitsKHR>;
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_win32_surface ===
- using Win32SurfaceCreateFlagsKHR = Flags<Win32SurfaceCreateFlagBitsKHR>;
-
- VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagsKHR )
+ template <>
+ struct FlagTraits<Win32SurfaceCreateFlagBitsKHR>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using Win32SurfaceCreateFlagsKHR = Flags<Win32SurfaceCreateFlagBitsKHR>;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_EXT_debug_report ===
- using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT>;
-
template <>
struct FlagTraits<DebugReportFlagBitsEXT>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( DebugReportFlagBitsEXT::eInformation ) | VkFlags( DebugReportFlagBitsEXT::eWarning ) |
- VkFlags( DebugReportFlagBitsEXT::ePerformanceWarning ) | VkFlags( DebugReportFlagBitsEXT::eError ) |
- VkFlags( DebugReportFlagBitsEXT::eDebug )
+ VkFlags( DebugReportFlagBitsEXT::ePerformanceWarning ) | VkFlags( DebugReportFlagBitsEXT::eError ) | VkFlags( DebugReportFlagBitsEXT::eDebug )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT
- operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DebugReportFlagsEXT( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT
- operator&( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DebugReportFlagsEXT( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT
- operator^( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DebugReportFlagsEXT( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( DebugReportFlagsEXT( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( DebugReportFlagsEXT value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & DebugReportFlagBitsEXT::eInformation )
- result += "Information | ";
- if ( value & DebugReportFlagBitsEXT::eWarning )
- result += "Warning | ";
- if ( value & DebugReportFlagBitsEXT::ePerformanceWarning )
- result += "PerformanceWarning | ";
- if ( value & DebugReportFlagBitsEXT::eError )
- result += "Error | ";
- if ( value & DebugReportFlagBitsEXT::eDebug )
- result += "Debug | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT>;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_queue ===
- using VideoCodecOperationFlagsKHR = Flags<VideoCodecOperationFlagBitsKHR>;
-
template <>
struct FlagTraits<VideoCodecOperationFlagBitsKHR>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( VideoCodecOperationFlagBitsKHR::eInvalid )
-# if defined( VK_ENABLE_BETA_EXTENSIONS )
- | VkFlags( VideoCodecOperationFlagBitsKHR::eEncodeH264EXT ) |
- VkFlags( VideoCodecOperationFlagBitsKHR::eEncodeH265EXT ) |
- VkFlags( VideoCodecOperationFlagBitsKHR::eDecodeH264EXT ) |
+ allFlags = VkFlags( VideoCodecOperationFlagBitsKHR::eNone ) | VkFlags( VideoCodecOperationFlagBitsKHR::eEncodeH264EXT ) |
+ VkFlags( VideoCodecOperationFlagBitsKHR::eEncodeH265EXT ) | VkFlags( VideoCodecOperationFlagBitsKHR::eDecodeH264EXT ) |
VkFlags( VideoCodecOperationFlagBitsKHR::eDecodeH265EXT )
-# endif /*VK_ENABLE_BETA_EXTENSIONS*/
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR
- operator|( VideoCodecOperationFlagBitsKHR bit0, VideoCodecOperationFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoCodecOperationFlagsKHR( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR
- operator&( VideoCodecOperationFlagBitsKHR bit0, VideoCodecOperationFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoCodecOperationFlagsKHR( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR
- operator^( VideoCodecOperationFlagBitsKHR bit0, VideoCodecOperationFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoCodecOperationFlagsKHR( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR operator~( VideoCodecOperationFlagBitsKHR bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoCodecOperationFlagsKHR( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( VideoCodecOperationFlagsKHR value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
-# if defined( VK_ENABLE_BETA_EXTENSIONS )
- if ( value & VideoCodecOperationFlagBitsKHR::eEncodeH264EXT )
- result += "EncodeH264EXT | ";
- if ( value & VideoCodecOperationFlagBitsKHR::eEncodeH265EXT )
- result += "EncodeH265EXT | ";
- if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH264EXT )
- result += "DecodeH264EXT | ";
- if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH265EXT )
- result += "DecodeH265EXT | ";
-# endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using VideoChromaSubsamplingFlagsKHR = Flags<VideoChromaSubsamplingFlagBitsKHR>;
+ using VideoCodecOperationFlagsKHR = Flags<VideoCodecOperationFlagBitsKHR>;
template <>
struct FlagTraits<VideoChromaSubsamplingFlagBitsKHR>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( VideoChromaSubsamplingFlagBitsKHR::eInvalid ) |
- VkFlags( VideoChromaSubsamplingFlagBitsKHR::eMonochrome ) |
- VkFlags( VideoChromaSubsamplingFlagBitsKHR::e420 ) |
- VkFlags( VideoChromaSubsamplingFlagBitsKHR::e422 ) | VkFlags( VideoChromaSubsamplingFlagBitsKHR::e444 )
+ allFlags = VkFlags( VideoChromaSubsamplingFlagBitsKHR::eInvalid ) | VkFlags( VideoChromaSubsamplingFlagBitsKHR::eMonochrome ) |
+ VkFlags( VideoChromaSubsamplingFlagBitsKHR::e420 ) | VkFlags( VideoChromaSubsamplingFlagBitsKHR::e422 ) |
+ VkFlags( VideoChromaSubsamplingFlagBitsKHR::e444 )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR
- operator|( VideoChromaSubsamplingFlagBitsKHR bit0, VideoChromaSubsamplingFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoChromaSubsamplingFlagsKHR( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR
- operator&( VideoChromaSubsamplingFlagBitsKHR bit0, VideoChromaSubsamplingFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoChromaSubsamplingFlagsKHR( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR
- operator^( VideoChromaSubsamplingFlagBitsKHR bit0, VideoChromaSubsamplingFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoChromaSubsamplingFlagsKHR( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR
- operator~( VideoChromaSubsamplingFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoChromaSubsamplingFlagsKHR( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( VideoChromaSubsamplingFlagsKHR value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & VideoChromaSubsamplingFlagBitsKHR::eMonochrome )
- result += "Monochrome | ";
- if ( value & VideoChromaSubsamplingFlagBitsKHR::e420 )
- result += "420 | ";
- if ( value & VideoChromaSubsamplingFlagBitsKHR::e422 )
- result += "422 | ";
- if ( value & VideoChromaSubsamplingFlagBitsKHR::e444 )
- result += "444 | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using VideoComponentBitDepthFlagsKHR = Flags<VideoComponentBitDepthFlagBitsKHR>;
+ using VideoChromaSubsamplingFlagsKHR = Flags<VideoChromaSubsamplingFlagBitsKHR>;
template <>
struct FlagTraits<VideoComponentBitDepthFlagBitsKHR>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( VideoComponentBitDepthFlagBitsKHR::eInvalid ) |
- VkFlags( VideoComponentBitDepthFlagBitsKHR::e8 ) | VkFlags( VideoComponentBitDepthFlagBitsKHR::e10 ) |
- VkFlags( VideoComponentBitDepthFlagBitsKHR::e12 )
+ allFlags = VkFlags( VideoComponentBitDepthFlagBitsKHR::eInvalid ) | VkFlags( VideoComponentBitDepthFlagBitsKHR::e8 ) |
+ VkFlags( VideoComponentBitDepthFlagBitsKHR::e10 ) | VkFlags( VideoComponentBitDepthFlagBitsKHR::e12 )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR
- operator|( VideoComponentBitDepthFlagBitsKHR bit0, VideoComponentBitDepthFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoComponentBitDepthFlagsKHR( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR
- operator&( VideoComponentBitDepthFlagBitsKHR bit0, VideoComponentBitDepthFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoComponentBitDepthFlagsKHR( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR
- operator^( VideoComponentBitDepthFlagBitsKHR bit0, VideoComponentBitDepthFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoComponentBitDepthFlagsKHR( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR
- operator~( VideoComponentBitDepthFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoComponentBitDepthFlagsKHR( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( VideoComponentBitDepthFlagsKHR value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & VideoComponentBitDepthFlagBitsKHR::e8 )
- result += "8 | ";
- if ( value & VideoComponentBitDepthFlagBitsKHR::e10 )
- result += "10 | ";
- if ( value & VideoComponentBitDepthFlagBitsKHR::e12 )
- result += "12 | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using VideoCapabilityFlagsKHR = Flags<VideoCapabilityFlagBitsKHR>;
+ using VideoComponentBitDepthFlagsKHR = Flags<VideoComponentBitDepthFlagBitsKHR>;
template <>
struct FlagTraits<VideoCapabilityFlagBitsKHR>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( VideoCapabilityFlagBitsKHR::eProtectedContent ) |
- VkFlags( VideoCapabilityFlagBitsKHR::eSeparateReferenceImages )
+ allFlags = VkFlags( VideoCapabilityFlagBitsKHR::eProtectedContent ) | VkFlags( VideoCapabilityFlagBitsKHR::eSeparateReferenceImages )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilityFlagsKHR
- operator|( VideoCapabilityFlagBitsKHR bit0, VideoCapabilityFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoCapabilityFlagsKHR( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilityFlagsKHR
- operator&( VideoCapabilityFlagBitsKHR bit0, VideoCapabilityFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoCapabilityFlagsKHR( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilityFlagsKHR
- operator^( VideoCapabilityFlagBitsKHR bit0, VideoCapabilityFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoCapabilityFlagsKHR( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilityFlagsKHR operator~( VideoCapabilityFlagBitsKHR bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoCapabilityFlagsKHR( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( VideoCapabilityFlagsKHR value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & VideoCapabilityFlagBitsKHR::eProtectedContent )
- result += "ProtectedContent | ";
- if ( value & VideoCapabilityFlagBitsKHR::eSeparateReferenceImages )
- result += "SeparateReferenceImages | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using VideoSessionCreateFlagsKHR = Flags<VideoSessionCreateFlagBitsKHR>;
+ using VideoCapabilityFlagsKHR = Flags<VideoCapabilityFlagBitsKHR>;
template <>
struct FlagTraits<VideoSessionCreateFlagBitsKHR>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags =
- VkFlags( VideoSessionCreateFlagBitsKHR::eDefault ) | VkFlags( VideoSessionCreateFlagBitsKHR::eProtectedContent )
+ allFlags = VkFlags( VideoSessionCreateFlagBitsKHR::eProtectedContent )
};
};
+ using VideoSessionCreateFlagsKHR = Flags<VideoSessionCreateFlagBitsKHR>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR
- operator|( VideoSessionCreateFlagBitsKHR bit0, VideoSessionCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoSessionCreateFlagsKHR( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR
- operator&( VideoSessionCreateFlagBitsKHR bit0, VideoSessionCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoSessionCreateFlagsKHR( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR
- operator^( VideoSessionCreateFlagBitsKHR bit0, VideoSessionCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+ template <>
+ struct FlagTraits<VideoSessionParametersCreateFlagBitsKHR>
{
- return VideoSessionCreateFlagsKHR( bit0 ) ^ bit1;
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR operator~( VideoSessionCreateFlagBitsKHR bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoSessionCreateFlagsKHR( bits ) );
- }
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using VideoSessionParametersCreateFlagsKHR = Flags<VideoSessionParametersCreateFlagBitsKHR>;
- VULKAN_HPP_INLINE std::string to_string( VideoSessionCreateFlagsKHR value )
+ template <>
+ struct FlagTraits<VideoBeginCodingFlagBitsKHR>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & VideoSessionCreateFlagBitsKHR::eProtectedContent )
- result += "ProtectedContent | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using VideoBeginCodingFlagsKHR = Flags<VideoBeginCodingFlagBitsKHR>;
- VULKAN_HPP_INLINE std::string to_string( VideoBeginCodingFlagsKHR )
- {
- return "{}";
- }
-
- using VideoEndCodingFlagsKHR = Flags<VideoEndCodingFlagBitsKHR>;
-
- VULKAN_HPP_INLINE std::string to_string( VideoEndCodingFlagsKHR )
- {
- return "{}";
- }
-
- using VideoCodingControlFlagsKHR = Flags<VideoCodingControlFlagBitsKHR>;
-
template <>
- struct FlagTraits<VideoCodingControlFlagBitsKHR>
+ struct FlagTraits<VideoEndCodingFlagBitsKHR>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( VideoCodingControlFlagBitsKHR::eDefault ) | VkFlags( VideoCodingControlFlagBitsKHR::eReset )
+ allFlags = 0
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR
- operator|( VideoCodingControlFlagBitsKHR bit0, VideoCodingControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoCodingControlFlagsKHR( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR
- operator&( VideoCodingControlFlagBitsKHR bit0, VideoCodingControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoCodingControlFlagsKHR( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR
- operator^( VideoCodingControlFlagBitsKHR bit0, VideoCodingControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoCodingControlFlagsKHR( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR operator~( VideoCodingControlFlagBitsKHR bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoCodingControlFlagsKHR( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( VideoCodingControlFlagsKHR value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & VideoCodingControlFlagBitsKHR::eReset )
- result += "Reset | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using VideoCodingQualityPresetFlagsKHR = Flags<VideoCodingQualityPresetFlagBitsKHR>;
+ using VideoEndCodingFlagsKHR = Flags<VideoEndCodingFlagBitsKHR>;
template <>
- struct FlagTraits<VideoCodingQualityPresetFlagBitsKHR>
+ struct FlagTraits<VideoCodingControlFlagBitsKHR>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( VideoCodingQualityPresetFlagBitsKHR::eNormal ) |
- VkFlags( VideoCodingQualityPresetFlagBitsKHR::ePower ) |
- VkFlags( VideoCodingQualityPresetFlagBitsKHR::eQuality )
+ allFlags = VkFlags( VideoCodingControlFlagBitsKHR::eReset ) | VkFlags( VideoCodingControlFlagBitsKHR::eEncodeRateControl ) |
+ VkFlags( VideoCodingControlFlagBitsKHR::eEncodeRateControlLayer )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingQualityPresetFlagsKHR
- operator|( VideoCodingQualityPresetFlagBitsKHR bit0, VideoCodingQualityPresetFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoCodingQualityPresetFlagsKHR( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingQualityPresetFlagsKHR
- operator&( VideoCodingQualityPresetFlagBitsKHR bit0, VideoCodingQualityPresetFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoCodingQualityPresetFlagsKHR( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingQualityPresetFlagsKHR
- operator^( VideoCodingQualityPresetFlagBitsKHR bit0, VideoCodingQualityPresetFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoCodingQualityPresetFlagsKHR( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingQualityPresetFlagsKHR
- operator~( VideoCodingQualityPresetFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoCodingQualityPresetFlagsKHR( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( VideoCodingQualityPresetFlagsKHR value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & VideoCodingQualityPresetFlagBitsKHR::eNormal )
- result += "Normal | ";
- if ( value & VideoCodingQualityPresetFlagBitsKHR::ePower )
- result += "Power | ";
- if ( value & VideoCodingQualityPresetFlagBitsKHR::eQuality )
- result += "Quality | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ using VideoCodingControlFlagsKHR = Flags<VideoCodingControlFlagBitsKHR>;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_decode_queue ===
- using VideoDecodeFlagsKHR = Flags<VideoDecodeFlagBitsKHR>;
-
template <>
- struct FlagTraits<VideoDecodeFlagBitsKHR>
+ struct FlagTraits<VideoDecodeCapabilityFlagBitsKHR>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( VideoDecodeFlagBitsKHR::eDefault ) | VkFlags( VideoDecodeFlagBitsKHR::eReserved0 )
+ allFlags = VkFlags( VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputCoincide ) | VkFlags( VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputDistinct )
};
};
+ using VideoDecodeCapabilityFlagsKHR = Flags<VideoDecodeCapabilityFlagBitsKHR>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeFlagsKHR
- operator|( VideoDecodeFlagBitsKHR bit0, VideoDecodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoDecodeFlagsKHR( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeFlagsKHR
- operator&( VideoDecodeFlagBitsKHR bit0, VideoDecodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoDecodeFlagsKHR( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeFlagsKHR
- operator^( VideoDecodeFlagBitsKHR bit0, VideoDecodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+ template <>
+ struct FlagTraits<VideoDecodeUsageFlagBitsKHR>
{
- return VideoDecodeFlagsKHR( bit0 ) ^ bit1;
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeFlagsKHR operator~( VideoDecodeFlagBitsKHR bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoDecodeFlagsKHR( bits ) );
- }
+ enum : VkFlags
+ {
+ allFlags = VkFlags( VideoDecodeUsageFlagBitsKHR::eDefault ) | VkFlags( VideoDecodeUsageFlagBitsKHR::eTranscoding ) |
+ VkFlags( VideoDecodeUsageFlagBitsKHR::eOffline ) | VkFlags( VideoDecodeUsageFlagBitsKHR::eStreaming )
+ };
+ };
+ using VideoDecodeUsageFlagsKHR = Flags<VideoDecodeUsageFlagBitsKHR>;
- VULKAN_HPP_INLINE std::string to_string( VideoDecodeFlagsKHR value )
+ template <>
+ struct FlagTraits<VideoDecodeFlagBitsKHR>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & VideoDecodeFlagBitsKHR::eReserved0 )
- result += "Reserved0 | ";
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using VideoDecodeFlagsKHR = Flags<VideoDecodeFlagBitsKHR>;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_EXT_transform_feedback ===
- using PipelineRasterizationStateStreamCreateFlagsEXT = Flags<PipelineRasterizationStateStreamCreateFlagBitsEXT>;
-
- VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagsEXT )
+ template <>
+ struct FlagTraits<PipelineRasterizationStateStreamCreateFlagBitsEXT>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using PipelineRasterizationStateStreamCreateFlagsEXT = Flags<PipelineRasterizationStateStreamCreateFlagBitsEXT>;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_EXT_video_encode_h264 ===
- using VideoEncodeH264CapabilityFlagsEXT = Flags<VideoEncodeH264CapabilityFlagBitsEXT>;
-
template <>
struct FlagTraits<VideoEncodeH264CapabilityFlagBitsEXT>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eCabac ) |
- VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eCavlc ) |
- VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBiPredImplicit ) |
- VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eTransform8X8 ) |
- VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eChromaQpOffset ) |
- VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eSecondChromaQpOffset ) |
- VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterDisabled ) |
- VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterEnabled ) |
- VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterPartial ) |
- VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eMultipleSlicePerFrame ) |
- VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eEvenlyDistributedSliceSize )
+ allFlags =
+ VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eDirect8X8InferenceEnabled ) |
+ VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eDirect8X8InferenceDisabled ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eSeparateColourPlane ) |
+ VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eQpprimeYZeroTransformBypass ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eScalingLists ) |
+ VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eHrdCompliance ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eChromaQpOffset ) |
+ VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eSecondChromaQpOffset ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::ePicInitQpMinus26 ) |
+ VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eWeightedPred ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBipredExplicit ) |
+ VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBipredImplicit ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eWeightedPredNoTable ) |
+ VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eTransform8X8 ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eCabac ) |
+ VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eCavlc ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterDisabled ) |
+ VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterEnabled ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterPartial ) |
+ VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eDisableDirectSpatialMvPred ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eMultipleSlicePerFrame ) |
+ VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eSliceMbCount ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eRowUnalignedSlice ) |
+ VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eDifferentSliceType ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eBFrameInL1List )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilityFlagsEXT operator|(
- VideoEncodeH264CapabilityFlagBitsEXT bit0, VideoEncodeH264CapabilityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH264CapabilityFlagsEXT( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilityFlagsEXT operator&(
- VideoEncodeH264CapabilityFlagBitsEXT bit0, VideoEncodeH264CapabilityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH264CapabilityFlagsEXT( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilityFlagsEXT operator^(
- VideoEncodeH264CapabilityFlagBitsEXT bit0, VideoEncodeH264CapabilityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH264CapabilityFlagsEXT( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilityFlagsEXT
- operator~( VideoEncodeH264CapabilityFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoEncodeH264CapabilityFlagsEXT( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilityFlagsEXT value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eCabac )
- result += "Cabac | ";
- if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eCavlc )
- result += "Cavlc | ";
- if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBiPredImplicit )
- result += "WeightedBiPredImplicit | ";
- if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eTransform8X8 )
- result += "Transform8X8 | ";
- if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eChromaQpOffset )
- result += "ChromaQpOffset | ";
- if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eSecondChromaQpOffset )
- result += "SecondChromaQpOffset | ";
- if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterDisabled )
- result += "DeblockingFilterDisabled | ";
- if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterEnabled )
- result += "DeblockingFilterEnabled | ";
- if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterPartial )
- result += "DeblockingFilterPartial | ";
- if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eMultipleSlicePerFrame )
- result += "MultipleSlicePerFrame | ";
- if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eEvenlyDistributedSliceSize )
- result += "EvenlyDistributedSliceSize | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using VideoEncodeH264InputModeFlagsEXT = Flags<VideoEncodeH264InputModeFlagBitsEXT>;
+ using VideoEncodeH264CapabilityFlagsEXT = Flags<VideoEncodeH264CapabilityFlagBitsEXT>;
template <>
struct FlagTraits<VideoEncodeH264InputModeFlagBitsEXT>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eFrame ) |
- VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eSlice ) |
+ allFlags = VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eFrame ) | VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eSlice ) |
VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eNonVcl )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT
- operator|( VideoEncodeH264InputModeFlagBitsEXT bit0, VideoEncodeH264InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH264InputModeFlagsEXT( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT
- operator&( VideoEncodeH264InputModeFlagBitsEXT bit0, VideoEncodeH264InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH264InputModeFlagsEXT( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT
- operator^( VideoEncodeH264InputModeFlagBitsEXT bit0, VideoEncodeH264InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH264InputModeFlagsEXT( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT
- operator~( VideoEncodeH264InputModeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoEncodeH264InputModeFlagsEXT( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264InputModeFlagsEXT value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & VideoEncodeH264InputModeFlagBitsEXT::eFrame )
- result += "Frame | ";
- if ( value & VideoEncodeH264InputModeFlagBitsEXT::eSlice )
- result += "Slice | ";
- if ( value & VideoEncodeH264InputModeFlagBitsEXT::eNonVcl )
- result += "NonVcl | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using VideoEncodeH264OutputModeFlagsEXT = Flags<VideoEncodeH264OutputModeFlagBitsEXT>;
+ using VideoEncodeH264InputModeFlagsEXT = Flags<VideoEncodeH264InputModeFlagBitsEXT>;
template <>
struct FlagTraits<VideoEncodeH264OutputModeFlagBitsEXT>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eFrame ) |
- VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eSlice ) |
+ allFlags = VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eFrame ) | VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eSlice ) |
VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl )
};
};
+ using VideoEncodeH264OutputModeFlagsEXT = Flags<VideoEncodeH264OutputModeFlagBitsEXT>;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT operator|(
- VideoEncodeH264OutputModeFlagBitsEXT bit0, VideoEncodeH264OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH264OutputModeFlagsEXT( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT operator&(
- VideoEncodeH264OutputModeFlagBitsEXT bit0, VideoEncodeH264OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH264OutputModeFlagsEXT( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT operator^(
- VideoEncodeH264OutputModeFlagBitsEXT bit0, VideoEncodeH264OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH264OutputModeFlagsEXT( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT
- operator~( VideoEncodeH264OutputModeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoEncodeH264OutputModeFlagsEXT( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264OutputModeFlagsEXT value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eFrame )
- result += "Frame | ";
- if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eSlice )
- result += "Slice | ";
- if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl )
- result += "NonVcl | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using VideoEncodeH264CreateFlagsEXT = Flags<VideoEncodeH264CreateFlagBitsEXT>;
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_EXT_video_encode_h265 ===
template <>
- struct FlagTraits<VideoEncodeH264CreateFlagBitsEXT>
- {
- enum : VkFlags
- {
- allFlags =
- VkFlags( VideoEncodeH264CreateFlagBitsEXT::eDefault ) | VkFlags( VideoEncodeH264CreateFlagBitsEXT::eReserved0 )
- };
- };
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CreateFlagsEXT
- operator|( VideoEncodeH264CreateFlagBitsEXT bit0, VideoEncodeH264CreateFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+ struct FlagTraits<VideoEncodeH265CapabilityFlagBitsEXT>
{
- return VideoEncodeH264CreateFlagsEXT( bit0 ) | bit1;
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CreateFlagsEXT
- operator&( VideoEncodeH264CreateFlagBitsEXT bit0, VideoEncodeH264CreateFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH264CreateFlagsEXT( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CreateFlagsEXT
- operator^( VideoEncodeH264CreateFlagBitsEXT bit0, VideoEncodeH264CreateFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH264CreateFlagsEXT( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CreateFlagsEXT
- operator~( VideoEncodeH264CreateFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoEncodeH264CreateFlagsEXT( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CreateFlagsEXT value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & VideoEncodeH264CreateFlagBitsEXT::eReserved0 )
- result += "Reserved0 | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using VideoEncodeH264RateControlStructureFlagsEXT = Flags<VideoEncodeH264RateControlStructureFlagBitsEXT>;
-
- template <>
- struct FlagTraits<VideoEncodeH264RateControlStructureFlagBitsEXT>
- {
enum : VkFlags
{
- allFlags = VkFlags( VideoEncodeH264RateControlStructureFlagBitsEXT::eUnknown ) |
- VkFlags( VideoEncodeH264RateControlStructureFlagBitsEXT::eFlat ) |
- VkFlags( VideoEncodeH264RateControlStructureFlagBitsEXT::eDyadic )
+ allFlags =
+ VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eSeparateColourPlane ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eScalingLists ) |
+ VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eSampleAdaptiveOffsetEnabled ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::ePcmEnable ) |
+ VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eSpsTemporalMvpEnabled ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eHrdCompliance ) |
+ VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eInitQpMinus26 ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eLog2ParallelMergeLevelMinus2 ) |
+ VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eSignDataHidingEnabled ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eTransformSkipEnabled ) |
+ VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eTransformSkipDisabled ) |
+ VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::ePpsSliceChromaQpOffsetsPresent ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eWeightedPred ) |
+ VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eWeightedBipred ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eWeightedPredNoTable ) |
+ VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eTransquantBypassEnabled ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eEntropyCodingSyncEnabled ) |
+ VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eDeblockingFilterOverrideEnabled ) |
+ VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eMultipleTilePerFrame ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eMultipleSlicePerTile ) |
+ VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eMultipleTilePerSlice ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eSliceSegmentCtbCount ) |
+ VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eRowUnalignedSliceSegment ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eDependentSliceSegment ) |
+ VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eDifferentSliceType ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eBFrameInL1List )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlStructureFlagsEXT
- operator|( VideoEncodeH264RateControlStructureFlagBitsEXT bit0,
- VideoEncodeH264RateControlStructureFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH264RateControlStructureFlagsEXT( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlStructureFlagsEXT
- operator&( VideoEncodeH264RateControlStructureFlagBitsEXT bit0,
- VideoEncodeH264RateControlStructureFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH264RateControlStructureFlagsEXT( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlStructureFlagsEXT
- operator^( VideoEncodeH264RateControlStructureFlagBitsEXT bit0,
- VideoEncodeH264RateControlStructureFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH264RateControlStructureFlagsEXT( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlStructureFlagsEXT
- operator~( VideoEncodeH264RateControlStructureFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoEncodeH264RateControlStructureFlagsEXT( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264RateControlStructureFlagsEXT value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & VideoEncodeH264RateControlStructureFlagBitsEXT::eFlat )
- result += "Flat | ";
- if ( value & VideoEncodeH264RateControlStructureFlagBitsEXT::eDyadic )
- result += "Dyadic | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- //=== VK_EXT_video_encode_h265 ===
-
using VideoEncodeH265CapabilityFlagsEXT = Flags<VideoEncodeH265CapabilityFlagBitsEXT>;
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CapabilityFlagsEXT )
- {
- return "{}";
- }
-
- using VideoEncodeH265InputModeFlagsEXT = Flags<VideoEncodeH265InputModeFlagBitsEXT>;
-
template <>
struct FlagTraits<VideoEncodeH265InputModeFlagBitsEXT>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( VideoEncodeH265InputModeFlagBitsEXT::eFrame ) |
- VkFlags( VideoEncodeH265InputModeFlagBitsEXT::eSliceSegment ) |
+ allFlags = VkFlags( VideoEncodeH265InputModeFlagBitsEXT::eFrame ) | VkFlags( VideoEncodeH265InputModeFlagBitsEXT::eSliceSegment ) |
VkFlags( VideoEncodeH265InputModeFlagBitsEXT::eNonVcl )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265InputModeFlagsEXT
- operator|( VideoEncodeH265InputModeFlagBitsEXT bit0, VideoEncodeH265InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH265InputModeFlagsEXT( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265InputModeFlagsEXT
- operator&( VideoEncodeH265InputModeFlagBitsEXT bit0, VideoEncodeH265InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH265InputModeFlagsEXT( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265InputModeFlagsEXT
- operator^( VideoEncodeH265InputModeFlagBitsEXT bit0, VideoEncodeH265InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH265InputModeFlagsEXT( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265InputModeFlagsEXT
- operator~( VideoEncodeH265InputModeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoEncodeH265InputModeFlagsEXT( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265InputModeFlagsEXT value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & VideoEncodeH265InputModeFlagBitsEXT::eFrame )
- result += "Frame | ";
- if ( value & VideoEncodeH265InputModeFlagBitsEXT::eSliceSegment )
- result += "SliceSegment | ";
- if ( value & VideoEncodeH265InputModeFlagBitsEXT::eNonVcl )
- result += "NonVcl | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using VideoEncodeH265OutputModeFlagsEXT = Flags<VideoEncodeH265OutputModeFlagBitsEXT>;
+ using VideoEncodeH265InputModeFlagsEXT = Flags<VideoEncodeH265InputModeFlagBitsEXT>;
template <>
struct FlagTraits<VideoEncodeH265OutputModeFlagBitsEXT>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( VideoEncodeH265OutputModeFlagBitsEXT::eFrame ) |
- VkFlags( VideoEncodeH265OutputModeFlagBitsEXT::eSliceSegment ) |
+ allFlags = VkFlags( VideoEncodeH265OutputModeFlagBitsEXT::eFrame ) | VkFlags( VideoEncodeH265OutputModeFlagBitsEXT::eSliceSegment ) |
VkFlags( VideoEncodeH265OutputModeFlagBitsEXT::eNonVcl )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265OutputModeFlagsEXT operator|(
- VideoEncodeH265OutputModeFlagBitsEXT bit0, VideoEncodeH265OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH265OutputModeFlagsEXT( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265OutputModeFlagsEXT operator&(
- VideoEncodeH265OutputModeFlagBitsEXT bit0, VideoEncodeH265OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH265OutputModeFlagsEXT( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265OutputModeFlagsEXT operator^(
- VideoEncodeH265OutputModeFlagBitsEXT bit0, VideoEncodeH265OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH265OutputModeFlagsEXT( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265OutputModeFlagsEXT
- operator~( VideoEncodeH265OutputModeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoEncodeH265OutputModeFlagsEXT( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265OutputModeFlagsEXT value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & VideoEncodeH265OutputModeFlagBitsEXT::eFrame )
- result += "Frame | ";
- if ( value & VideoEncodeH265OutputModeFlagBitsEXT::eSliceSegment )
- result += "SliceSegment | ";
- if ( value & VideoEncodeH265OutputModeFlagBitsEXT::eNonVcl )
- result += "NonVcl | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using VideoEncodeH265CreateFlagsEXT = Flags<VideoEncodeH265CreateFlagBitsEXT>;
-
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CreateFlagsEXT )
- {
- return "{}";
- }
-
- using VideoEncodeH265CtbSizeFlagsEXT = Flags<VideoEncodeH265CtbSizeFlagBitsEXT>;
+ using VideoEncodeH265OutputModeFlagsEXT = Flags<VideoEncodeH265OutputModeFlagBitsEXT>;
template <>
struct FlagTraits<VideoEncodeH265CtbSizeFlagBitsEXT>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( VideoEncodeH265CtbSizeFlagBitsEXT::e8 ) | VkFlags( VideoEncodeH265CtbSizeFlagBitsEXT::e16 ) |
- VkFlags( VideoEncodeH265CtbSizeFlagBitsEXT::e32 ) | VkFlags( VideoEncodeH265CtbSizeFlagBitsEXT::e64 )
+ allFlags = VkFlags( VideoEncodeH265CtbSizeFlagBitsEXT::e16 ) | VkFlags( VideoEncodeH265CtbSizeFlagBitsEXT::e32 ) |
+ VkFlags( VideoEncodeH265CtbSizeFlagBitsEXT::e64 )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265CtbSizeFlagsEXT
- operator|( VideoEncodeH265CtbSizeFlagBitsEXT bit0, VideoEncodeH265CtbSizeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH265CtbSizeFlagsEXT( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265CtbSizeFlagsEXT
- operator&( VideoEncodeH265CtbSizeFlagBitsEXT bit0, VideoEncodeH265CtbSizeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH265CtbSizeFlagsEXT( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265CtbSizeFlagsEXT
- operator^( VideoEncodeH265CtbSizeFlagBitsEXT bit0, VideoEncodeH265CtbSizeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH265CtbSizeFlagsEXT( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265CtbSizeFlagsEXT
- operator~( VideoEncodeH265CtbSizeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoEncodeH265CtbSizeFlagsEXT( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CtbSizeFlagsEXT value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & VideoEncodeH265CtbSizeFlagBitsEXT::e8 )
- result += "8 | ";
- if ( value & VideoEncodeH265CtbSizeFlagBitsEXT::e16 )
- result += "16 | ";
- if ( value & VideoEncodeH265CtbSizeFlagBitsEXT::e32 )
- result += "32 | ";
- if ( value & VideoEncodeH265CtbSizeFlagBitsEXT::e64 )
- result += "64 | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using VideoEncodeH265RateControlStructureFlagsEXT = Flags<VideoEncodeH265RateControlStructureFlagBitsEXT>;
+ using VideoEncodeH265CtbSizeFlagsEXT = Flags<VideoEncodeH265CtbSizeFlagBitsEXT>;
template <>
- struct FlagTraits<VideoEncodeH265RateControlStructureFlagBitsEXT>
+ struct FlagTraits<VideoEncodeH265TransformBlockSizeFlagBitsEXT>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( VideoEncodeH265RateControlStructureFlagBitsEXT::eUnknown ) |
- VkFlags( VideoEncodeH265RateControlStructureFlagBitsEXT::eFlat ) |
- VkFlags( VideoEncodeH265RateControlStructureFlagBitsEXT::eDyadic )
+ allFlags = VkFlags( VideoEncodeH265TransformBlockSizeFlagBitsEXT::e4 ) | VkFlags( VideoEncodeH265TransformBlockSizeFlagBitsEXT::e8 ) |
+ VkFlags( VideoEncodeH265TransformBlockSizeFlagBitsEXT::e16 ) | VkFlags( VideoEncodeH265TransformBlockSizeFlagBitsEXT::e32 )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlStructureFlagsEXT
- operator|( VideoEncodeH265RateControlStructureFlagBitsEXT bit0,
- VideoEncodeH265RateControlStructureFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH265RateControlStructureFlagsEXT( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlStructureFlagsEXT
- operator&( VideoEncodeH265RateControlStructureFlagBitsEXT bit0,
- VideoEncodeH265RateControlStructureFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH265RateControlStructureFlagsEXT( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlStructureFlagsEXT
- operator^( VideoEncodeH265RateControlStructureFlagBitsEXT bit0,
- VideoEncodeH265RateControlStructureFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeH265RateControlStructureFlagsEXT( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlStructureFlagsEXT
- operator~( VideoEncodeH265RateControlStructureFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoEncodeH265RateControlStructureFlagsEXT( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265RateControlStructureFlagsEXT value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & VideoEncodeH265RateControlStructureFlagBitsEXT::eFlat )
- result += "Flat | ";
- if ( value & VideoEncodeH265RateControlStructureFlagBitsEXT::eDyadic )
- result += "Dyadic | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ using VideoEncodeH265TransformBlockSizeFlagsEXT = Flags<VideoEncodeH265TransformBlockSizeFlagBitsEXT>;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_EXT_video_decode_h264 ===
- using VideoDecodeH264PictureLayoutFlagsEXT = Flags<VideoDecodeH264PictureLayoutFlagBitsEXT>;
-
template <>
struct FlagTraits<VideoDecodeH264PictureLayoutFlagBitsEXT>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( VideoDecodeH264PictureLayoutFlagBitsEXT::eProgressive ) |
@@ -17491,1266 +6242,847 @@ namespace VULKAN_HPP_NAMESPACE
VkFlags( VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedSeparatePlanes )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureLayoutFlagsEXT operator|(
- VideoDecodeH264PictureLayoutFlagBitsEXT bit0, VideoDecodeH264PictureLayoutFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoDecodeH264PictureLayoutFlagsEXT( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureLayoutFlagsEXT operator&(
- VideoDecodeH264PictureLayoutFlagBitsEXT bit0, VideoDecodeH264PictureLayoutFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoDecodeH264PictureLayoutFlagsEXT( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureLayoutFlagsEXT operator^(
- VideoDecodeH264PictureLayoutFlagBitsEXT bit0, VideoDecodeH264PictureLayoutFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoDecodeH264PictureLayoutFlagsEXT( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureLayoutFlagsEXT
- operator~( VideoDecodeH264PictureLayoutFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoDecodeH264PictureLayoutFlagsEXT( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264PictureLayoutFlagsEXT value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedInterleavedLines )
- result += "InterlacedInterleavedLines | ";
- if ( value & VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedSeparatePlanes )
- result += "InterlacedSeparatePlanes | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using VideoDecodeH264CreateFlagsEXT = Flags<VideoDecodeH264CreateFlagBitsEXT>;
-
- VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264CreateFlagsEXT )
- {
- return "{}";
- }
+ using VideoDecodeH264PictureLayoutFlagsEXT = Flags<VideoDecodeH264PictureLayoutFlagBitsEXT>;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_GGP )
//=== VK_GGP_stream_descriptor_surface ===
- using StreamDescriptorSurfaceCreateFlagsGGP = Flags<StreamDescriptorSurfaceCreateFlagBitsGGP>;
-
- VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagsGGP )
+ template <>
+ struct FlagTraits<StreamDescriptorSurfaceCreateFlagBitsGGP>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using StreamDescriptorSurfaceCreateFlagsGGP = Flags<StreamDescriptorSurfaceCreateFlagBitsGGP>;
#endif /*VK_USE_PLATFORM_GGP*/
//=== VK_NV_external_memory_capabilities ===
- using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV>;
-
template <>
struct FlagTraits<ExternalMemoryHandleTypeFlagBitsNV>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) |
- VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) |
- VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) |
- VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt )
+ allFlags = VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) | VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) |
+ VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) | VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV
- operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV
- operator&( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalMemoryHandleTypeFlagsNV( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV
- operator^( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalMemoryHandleTypeFlagsNV( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV
- operator~( ExternalMemoryHandleTypeFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( ExternalMemoryHandleTypeFlagsNV( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagsNV value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 )
- result += "OpaqueWin32 | ";
- if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt )
- result += "OpaqueWin32Kmt | ";
- if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image )
- result += "D3D11Image | ";
- if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt )
- result += "D3D11ImageKmt | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV>;
+ using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV>;
template <>
struct FlagTraits<ExternalMemoryFeatureFlagBitsNV>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) |
- VkFlags( ExternalMemoryFeatureFlagBitsNV::eExportable ) |
+ allFlags = VkFlags( ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) | VkFlags( ExternalMemoryFeatureFlagBitsNV::eExportable ) |
VkFlags( ExternalMemoryFeatureFlagBitsNV::eImportable )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV
- operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV
- operator&( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalMemoryFeatureFlagsNV( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV
- operator^( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalMemoryFeatureFlagsNV( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( ExternalMemoryFeatureFlagsNV( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagsNV value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly )
- result += "DedicatedOnly | ";
- if ( value & ExternalMemoryFeatureFlagBitsNV::eExportable )
- result += "Exportable | ";
- if ( value & ExternalMemoryFeatureFlagBitsNV::eImportable )
- result += "Importable | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV>;
#if defined( VK_USE_PLATFORM_VI_NN )
//=== VK_NN_vi_surface ===
- using ViSurfaceCreateFlagsNN = Flags<ViSurfaceCreateFlagBitsNN>;
-
- VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagsNN )
+ template <>
+ struct FlagTraits<ViSurfaceCreateFlagBitsNN>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using ViSurfaceCreateFlagsNN = Flags<ViSurfaceCreateFlagBitsNN>;
#endif /*VK_USE_PLATFORM_VI_NN*/
//=== VK_EXT_conditional_rendering ===
- using ConditionalRenderingFlagsEXT = Flags<ConditionalRenderingFlagBitsEXT>;
-
template <>
struct FlagTraits<ConditionalRenderingFlagBitsEXT>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( ConditionalRenderingFlagBitsEXT::eInverted )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT
- operator|( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ConditionalRenderingFlagsEXT( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT
- operator&( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ConditionalRenderingFlagsEXT( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT
- operator^( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ConditionalRenderingFlagsEXT( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator~( ConditionalRenderingFlagBitsEXT bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( ConditionalRenderingFlagsEXT( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagsEXT value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & ConditionalRenderingFlagBitsEXT::eInverted )
- result += "Inverted | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ using ConditionalRenderingFlagsEXT = Flags<ConditionalRenderingFlagBitsEXT>;
//=== VK_EXT_display_surface_counter ===
- using SurfaceCounterFlagsEXT = Flags<SurfaceCounterFlagBitsEXT>;
-
template <>
struct FlagTraits<SurfaceCounterFlagBitsEXT>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( SurfaceCounterFlagBitsEXT::eVblank )
};
};
+ using SurfaceCounterFlagsEXT = Flags<SurfaceCounterFlagBitsEXT>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT
- operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SurfaceCounterFlagsEXT( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT
- operator&( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SurfaceCounterFlagsEXT( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT
- operator^( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SurfaceCounterFlagsEXT( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( SurfaceCounterFlagsEXT( bits ) );
- }
+ //=== VK_NV_viewport_swizzle ===
- VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagsEXT value )
+ template <>
+ struct FlagTraits<PipelineViewportSwizzleStateCreateFlagBitsNV>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & SurfaceCounterFlagBitsEXT::eVblank )
- result += "Vblank | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- //=== VK_NV_viewport_swizzle ===
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using PipelineViewportSwizzleStateCreateFlagsNV = Flags<PipelineViewportSwizzleStateCreateFlagBitsNV>;
- VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagsNV )
- {
- return "{}";
- }
-
//=== VK_EXT_discard_rectangles ===
- using PipelineDiscardRectangleStateCreateFlagsEXT = Flags<PipelineDiscardRectangleStateCreateFlagBitsEXT>;
-
- VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagsEXT )
+ template <>
+ struct FlagTraits<PipelineDiscardRectangleStateCreateFlagBitsEXT>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- //=== VK_EXT_conservative_rasterization ===
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using PipelineDiscardRectangleStateCreateFlagsEXT = Flags<PipelineDiscardRectangleStateCreateFlagBitsEXT>;
- using PipelineRasterizationConservativeStateCreateFlagsEXT =
- Flags<PipelineRasterizationConservativeStateCreateFlagBitsEXT>;
+ //=== VK_EXT_conservative_rasterization ===
- VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagsEXT )
+ template <>
+ struct FlagTraits<PipelineRasterizationConservativeStateCreateFlagBitsEXT>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- //=== VK_EXT_depth_clip_enable ===
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags<PipelineRasterizationConservativeStateCreateFlagBitsEXT>;
- using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags<PipelineRasterizationDepthClipStateCreateFlagBitsEXT>;
+ //=== VK_EXT_depth_clip_enable ===
- VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagsEXT )
+ template <>
+ struct FlagTraits<PipelineRasterizationDepthClipStateCreateFlagBitsEXT>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- //=== VK_KHR_performance_query ===
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags<PipelineRasterizationDepthClipStateCreateFlagBitsEXT>;
- using PerformanceCounterDescriptionFlagsKHR = Flags<PerformanceCounterDescriptionFlagBitsKHR>;
+ //=== VK_KHR_performance_query ===
template <>
struct FlagTraits<PerformanceCounterDescriptionFlagBitsKHR>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) |
- VkFlags( PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted )
+ allFlags =
+ VkFlags( PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) | VkFlags( PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted )
};
};
+ using PerformanceCounterDescriptionFlagsKHR = Flags<PerformanceCounterDescriptionFlagBitsKHR>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator|(
- PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PerformanceCounterDescriptionFlagsKHR( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator&(
- PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PerformanceCounterDescriptionFlagsKHR( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator^(
- PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PerformanceCounterDescriptionFlagsKHR( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR
- operator~( PerformanceCounterDescriptionFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( PerformanceCounterDescriptionFlagsKHR( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagsKHR value )
+ template <>
+ struct FlagTraits<AcquireProfilingLockFlagBitsKHR>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting )
- result += "PerformanceImpacting | ";
- if ( value & PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted )
- result += "ConcurrentlyImpacted | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using AcquireProfilingLockFlagsKHR = Flags<AcquireProfilingLockFlagBitsKHR>;
- VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagsKHR )
- {
- return "{}";
- }
-
#if defined( VK_USE_PLATFORM_IOS_MVK )
//=== VK_MVK_ios_surface ===
- using IOSSurfaceCreateFlagsMVK = Flags<IOSSurfaceCreateFlagBitsMVK>;
-
- VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagsMVK )
+ template <>
+ struct FlagTraits<IOSSurfaceCreateFlagBitsMVK>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using IOSSurfaceCreateFlagsMVK = Flags<IOSSurfaceCreateFlagBitsMVK>;
#endif /*VK_USE_PLATFORM_IOS_MVK*/
#if defined( VK_USE_PLATFORM_MACOS_MVK )
//=== VK_MVK_macos_surface ===
- using MacOSSurfaceCreateFlagsMVK = Flags<MacOSSurfaceCreateFlagBitsMVK>;
-
- VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagsMVK )
+ template <>
+ struct FlagTraits<MacOSSurfaceCreateFlagBitsMVK>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using MacOSSurfaceCreateFlagsMVK = Flags<MacOSSurfaceCreateFlagBitsMVK>;
#endif /*VK_USE_PLATFORM_MACOS_MVK*/
//=== VK_EXT_debug_utils ===
- using DebugUtilsMessageSeverityFlagsEXT = Flags<DebugUtilsMessageSeverityFlagBitsEXT>;
-
template <>
struct FlagTraits<DebugUtilsMessageSeverityFlagBitsEXT>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) |
- VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) |
- VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) |
- VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eError )
+ allFlags = VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) | VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) |
+ VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) | VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eError )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator|(
- DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DebugUtilsMessageSeverityFlagsEXT( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator&(
- DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DebugUtilsMessageSeverityFlagsEXT( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator^(
- DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DebugUtilsMessageSeverityFlagsEXT( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT
- operator~( DebugUtilsMessageSeverityFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( DebugUtilsMessageSeverityFlagsEXT( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagsEXT value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose )
- result += "Verbose | ";
- if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo )
- result += "Info | ";
- if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning )
- result += "Warning | ";
- if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eError )
- result += "Error | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using DebugUtilsMessageTypeFlagsEXT = Flags<DebugUtilsMessageTypeFlagBitsEXT>;
+ using DebugUtilsMessageSeverityFlagsEXT = Flags<DebugUtilsMessageSeverityFlagBitsEXT>;
template <>
struct FlagTraits<DebugUtilsMessageTypeFlagBitsEXT>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) |
- VkFlags( DebugUtilsMessageTypeFlagBitsEXT::eValidation ) |
- VkFlags( DebugUtilsMessageTypeFlagBitsEXT::ePerformance )
+ allFlags = VkFlags( DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) | VkFlags( DebugUtilsMessageTypeFlagBitsEXT::eValidation ) |
+ VkFlags( DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) | VkFlags( DebugUtilsMessageTypeFlagBitsEXT::eDeviceAddressBinding )
};
};
+ using DebugUtilsMessageTypeFlagsEXT = Flags<DebugUtilsMessageTypeFlagBitsEXT>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT
- operator|( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DebugUtilsMessageTypeFlagsEXT( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT
- operator&( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DebugUtilsMessageTypeFlagsEXT( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT
- operator^( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DebugUtilsMessageTypeFlagsEXT( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT
- operator~( DebugUtilsMessageTypeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( DebugUtilsMessageTypeFlagsEXT( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagsEXT value )
+ template <>
+ struct FlagTraits<DebugUtilsMessengerCallbackDataFlagBitsEXT>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral )
- result += "General | ";
- if ( value & DebugUtilsMessageTypeFlagBitsEXT::eValidation )
- result += "Validation | ";
- if ( value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance )
- result += "Performance | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using DebugUtilsMessengerCallbackDataFlagsEXT = Flags<DebugUtilsMessengerCallbackDataFlagBitsEXT>;
- VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagsEXT )
+ template <>
+ struct FlagTraits<DebugUtilsMessengerCreateFlagBitsEXT>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using DebugUtilsMessengerCreateFlagsEXT = Flags<DebugUtilsMessengerCreateFlagBitsEXT>;
- VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagsEXT )
- {
- return "{}";
- }
-
//=== VK_NV_fragment_coverage_to_color ===
- using PipelineCoverageToColorStateCreateFlagsNV = Flags<PipelineCoverageToColorStateCreateFlagBitsNV>;
-
- VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagsNV )
+ template <>
+ struct FlagTraits<PipelineCoverageToColorStateCreateFlagBitsNV>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- //=== VK_KHR_acceleration_structure ===
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using PipelineCoverageToColorStateCreateFlagsNV = Flags<PipelineCoverageToColorStateCreateFlagBitsNV>;
- using GeometryFlagsKHR = Flags<GeometryFlagBitsKHR>;
+ //=== VK_KHR_acceleration_structure ===
template <>
struct FlagTraits<GeometryFlagBitsKHR>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( GeometryFlagBitsKHR::eOpaque ) | VkFlags( GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator|( GeometryFlagBitsKHR bit0,
- GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return GeometryFlagsKHR( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator&( GeometryFlagBitsKHR bit0,
- GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return GeometryFlagsKHR( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator^( GeometryFlagBitsKHR bit0,
- GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return GeometryFlagsKHR( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator~( GeometryFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( GeometryFlagsKHR( bits ) );
- }
-
- using GeometryFlagsNV = GeometryFlagsKHR;
-
- VULKAN_HPP_INLINE std::string to_string( GeometryFlagsKHR value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & GeometryFlagBitsKHR::eOpaque )
- result += "Opaque | ";
- if ( value & GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation )
- result += "NoDuplicateAnyHitInvocation | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using GeometryInstanceFlagsKHR = Flags<GeometryInstanceFlagBitsKHR>;
+ using GeometryFlagsKHR = Flags<GeometryFlagBitsKHR>;
+ using GeometryFlagsNV = GeometryFlagsKHR;
template <>
struct FlagTraits<GeometryInstanceFlagBitsKHR>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) |
- VkFlags( GeometryInstanceFlagBitsKHR::eTriangleFlipFacing ) |
- VkFlags( GeometryInstanceFlagBitsKHR::eForceOpaque ) |
- VkFlags( GeometryInstanceFlagBitsKHR::eForceNoOpaque )
+ allFlags = VkFlags( GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) | VkFlags( GeometryInstanceFlagBitsKHR::eTriangleFlipFacing ) |
+ VkFlags( GeometryInstanceFlagBitsKHR::eForceOpaque ) | VkFlags( GeometryInstanceFlagBitsKHR::eForceNoOpaque ) |
+ VkFlags( GeometryInstanceFlagBitsKHR::eForceOpacityMicromap2StateEXT ) | VkFlags( GeometryInstanceFlagBitsKHR::eDisableOpacityMicromapsEXT )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR
- operator|( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return GeometryInstanceFlagsKHR( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR
- operator&( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return GeometryInstanceFlagsKHR( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR
- operator^( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return GeometryInstanceFlagsKHR( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator~( GeometryInstanceFlagBitsKHR bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( GeometryInstanceFlagsKHR( bits ) );
- }
-
- using GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR;
-
- VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsKHR value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable )
- result += "TriangleFacingCullDisable | ";
- if ( value & GeometryInstanceFlagBitsKHR::eTriangleFlipFacing )
- result += "TriangleFlipFacing | ";
- if ( value & GeometryInstanceFlagBitsKHR::eForceOpaque )
- result += "ForceOpaque | ";
- if ( value & GeometryInstanceFlagBitsKHR::eForceNoOpaque )
- result += "ForceNoOpaque | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using BuildAccelerationStructureFlagsKHR = Flags<BuildAccelerationStructureFlagBitsKHR>;
+ using GeometryInstanceFlagsKHR = Flags<GeometryInstanceFlagBitsKHR>;
+ using GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR;
template <>
struct FlagTraits<BuildAccelerationStructureFlagBitsKHR>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) |
- VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) |
- VkFlags( BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) |
- VkFlags( BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) |
- VkFlags( BuildAccelerationStructureFlagBitsKHR::eLowMemory ) |
- VkFlags( BuildAccelerationStructureFlagBitsKHR::eMotionNV )
+ allFlags = VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) | VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) |
+ VkFlags( BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) | VkFlags( BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) |
+ VkFlags( BuildAccelerationStructureFlagBitsKHR::eLowMemory ) | VkFlags( BuildAccelerationStructureFlagBitsKHR::eMotionNV ) |
+ VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapUpdateEXT ) |
+ VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowDisableOpacityMicromapsEXT ) |
+ VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapDataUpdateEXT )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator|(
- BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return BuildAccelerationStructureFlagsKHR( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator&(
- BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return BuildAccelerationStructureFlagsKHR( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator^(
- BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return BuildAccelerationStructureFlagsKHR( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR
- operator~( BuildAccelerationStructureFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( BuildAccelerationStructureFlagsKHR( bits ) );
- }
-
- using BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR;
-
- VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsKHR value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowUpdate )
- result += "AllowUpdate | ";
- if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowCompaction )
- result += "AllowCompaction | ";
- if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace )
- result += "PreferFastTrace | ";
- if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild )
- result += "PreferFastBuild | ";
- if ( value & BuildAccelerationStructureFlagBitsKHR::eLowMemory )
- result += "LowMemory | ";
- if ( value & BuildAccelerationStructureFlagBitsKHR::eMotionNV )
- result += "MotionNV | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using AccelerationStructureCreateFlagsKHR = Flags<AccelerationStructureCreateFlagBitsKHR>;
+ using BuildAccelerationStructureFlagsKHR = Flags<BuildAccelerationStructureFlagBitsKHR>;
+ using BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR;
template <>
struct FlagTraits<AccelerationStructureCreateFlagBitsKHR>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay ) |
- VkFlags( AccelerationStructureCreateFlagBitsKHR::eMotionNV )
+ allFlags = VkFlags( AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay ) | VkFlags( AccelerationStructureCreateFlagBitsKHR::eMotionNV )
};
};
+ using AccelerationStructureCreateFlagsKHR = Flags<AccelerationStructureCreateFlagBitsKHR>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator|(
- AccelerationStructureCreateFlagBitsKHR bit0, AccelerationStructureCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return AccelerationStructureCreateFlagsKHR( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator&(
- AccelerationStructureCreateFlagBitsKHR bit0, AccelerationStructureCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return AccelerationStructureCreateFlagsKHR( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator^(
- AccelerationStructureCreateFlagBitsKHR bit0, AccelerationStructureCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return AccelerationStructureCreateFlagsKHR( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR
- operator~( AccelerationStructureCreateFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( AccelerationStructureCreateFlagsKHR( bits ) );
- }
+ //=== VK_NV_framebuffer_mixed_samples ===
- VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCreateFlagsKHR value )
+ template <>
+ struct FlagTraits<PipelineCoverageModulationStateCreateFlagBitsNV>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay )
- result += "DeviceAddressCaptureReplay | ";
- if ( value & AccelerationStructureCreateFlagBitsKHR::eMotionNV )
- result += "MotionNV | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- //=== VK_NV_framebuffer_mixed_samples ===
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using PipelineCoverageModulationStateCreateFlagsNV = Flags<PipelineCoverageModulationStateCreateFlagBitsNV>;
- VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagsNV )
- {
- return "{}";
- }
-
//=== VK_EXT_validation_cache ===
- using ValidationCacheCreateFlagsEXT = Flags<ValidationCacheCreateFlagBitsEXT>;
-
- VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagsEXT )
+ template <>
+ struct FlagTraits<ValidationCacheCreateFlagBitsEXT>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- //=== VK_AMD_pipeline_compiler_control ===
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using ValidationCacheCreateFlagsEXT = Flags<ValidationCacheCreateFlagBitsEXT>;
- using PipelineCompilerControlFlagsAMD = Flags<PipelineCompilerControlFlagBitsAMD>;
+ //=== VK_AMD_pipeline_compiler_control ===
- VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagsAMD )
+ template <>
+ struct FlagTraits<PipelineCompilerControlFlagBitsAMD>
{
- return "{}";
- }
-
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- //=== VK_EXT_video_decode_h265 ===
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- using VideoDecodeH265CreateFlagsEXT = Flags<VideoDecodeH265CreateFlagBitsEXT>;
-
- VULKAN_HPP_INLINE std::string to_string( VideoDecodeH265CreateFlagsEXT )
- {
- return "{}";
- }
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using PipelineCompilerControlFlagsAMD = Flags<PipelineCompilerControlFlagBitsAMD>;
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_imagepipe_surface ===
- using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags<ImagePipeSurfaceCreateFlagBitsFUCHSIA>;
-
- VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagsFUCHSIA )
+ template <>
+ struct FlagTraits<ImagePipeSurfaceCreateFlagBitsFUCHSIA>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags<ImagePipeSurfaceCreateFlagBitsFUCHSIA>;
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_surface ===
- using MetalSurfaceCreateFlagsEXT = Flags<MetalSurfaceCreateFlagBitsEXT>;
-
- VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagsEXT )
+ template <>
+ struct FlagTraits<MetalSurfaceCreateFlagBitsEXT>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using MetalSurfaceCreateFlagsEXT = Flags<MetalSurfaceCreateFlagBitsEXT>;
#endif /*VK_USE_PLATFORM_METAL_EXT*/
//=== VK_AMD_shader_core_properties2 ===
- using ShaderCorePropertiesFlagsAMD = Flags<ShaderCorePropertiesFlagBitsAMD>;
-
- VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagsAMD )
+ template <>
+ struct FlagTraits<ShaderCorePropertiesFlagBitsAMD>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- //=== VK_NV_coverage_reduction_mode ===
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using ShaderCorePropertiesFlagsAMD = Flags<ShaderCorePropertiesFlagBitsAMD>;
- using PipelineCoverageReductionStateCreateFlagsNV = Flags<PipelineCoverageReductionStateCreateFlagBitsNV>;
+ //=== VK_NV_coverage_reduction_mode ===
- VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagsNV )
+ template <>
+ struct FlagTraits<PipelineCoverageReductionStateCreateFlagBitsNV>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- //=== VK_EXT_headless_surface ===
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using PipelineCoverageReductionStateCreateFlagsNV = Flags<PipelineCoverageReductionStateCreateFlagBitsNV>;
- using HeadlessSurfaceCreateFlagsEXT = Flags<HeadlessSurfaceCreateFlagBitsEXT>;
+ //=== VK_EXT_headless_surface ===
- VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagsEXT )
+ template <>
+ struct FlagTraits<HeadlessSurfaceCreateFlagBitsEXT>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- //=== VK_NV_device_generated_commands ===
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using HeadlessSurfaceCreateFlagsEXT = Flags<HeadlessSurfaceCreateFlagBitsEXT>;
- using IndirectStateFlagsNV = Flags<IndirectStateFlagBitsNV>;
+ //=== VK_NV_device_generated_commands ===
template <>
struct FlagTraits<IndirectStateFlagBitsNV>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
allFlags = VkFlags( IndirectStateFlagBitsNV::eFlagFrontface )
};
};
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV
- operator|( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return IndirectStateFlagsNV( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV
- operator&( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return IndirectStateFlagsNV( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV
- operator^( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return IndirectStateFlagsNV( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator~( IndirectStateFlagBitsNV bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( IndirectStateFlagsNV( bits ) );
- }
-
- VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagsNV value )
- {
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & IndirectStateFlagBitsNV::eFlagFrontface )
- result += "FlagFrontface | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- using IndirectCommandsLayoutUsageFlagsNV = Flags<IndirectCommandsLayoutUsageFlagBitsNV>;
+ using IndirectStateFlagsNV = Flags<IndirectStateFlagBitsNV>;
template <>
struct FlagTraits<IndirectCommandsLayoutUsageFlagBitsNV>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) |
- VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) |
+ allFlags = VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) | VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) |
VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences )
};
};
+ using IndirectCommandsLayoutUsageFlagsNV = Flags<IndirectCommandsLayoutUsageFlagBitsNV>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator|(
- IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return IndirectCommandsLayoutUsageFlagsNV( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator&(
- IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return IndirectCommandsLayoutUsageFlagsNV( bit0 ) & bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator^(
- IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return IndirectCommandsLayoutUsageFlagsNV( bit0 ) ^ bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV
- operator~( IndirectCommandsLayoutUsageFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( IndirectCommandsLayoutUsageFlagsNV( bits ) );
- }
+ //=== VK_EXT_device_memory_report ===
- VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNV value )
+ template <>
+ struct FlagTraits<DeviceMemoryReportFlagBitsEXT>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess )
- result += "ExplicitPreprocess | ";
- if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences )
- result += "IndexedSequences | ";
- if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences )
- result += "UnorderedSequences | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-
- //=== VK_EXT_device_memory_report ===
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
using DeviceMemoryReportFlagsEXT = Flags<DeviceMemoryReportFlagBitsEXT>;
- VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportFlagsEXT )
- {
- return "{}";
- }
-
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_encode_queue ===
- using VideoEncodeFlagsKHR = Flags<VideoEncodeFlagBitsKHR>;
-
template <>
struct FlagTraits<VideoEncodeFlagBitsKHR>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( VideoEncodeFlagBitsKHR::eDefault ) | VkFlags( VideoEncodeFlagBitsKHR::eReserved0 )
+ allFlags = 0
};
};
+ using VideoEncodeFlagsKHR = Flags<VideoEncodeFlagBitsKHR>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeFlagsKHR
- operator|( VideoEncodeFlagBitsKHR bit0, VideoEncodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+ template <>
+ struct FlagTraits<VideoEncodeCapabilityFlagBitsKHR>
{
- return VideoEncodeFlagsKHR( bit0 ) | bit1;
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeFlagsKHR
- operator&( VideoEncodeFlagBitsKHR bit0, VideoEncodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeFlagsKHR( bit0 ) & bit1;
- }
+ enum : VkFlags
+ {
+ allFlags = VkFlags( VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes )
+ };
+ };
+ using VideoEncodeCapabilityFlagsKHR = Flags<VideoEncodeCapabilityFlagBitsKHR>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeFlagsKHR
- operator^( VideoEncodeFlagBitsKHR bit0, VideoEncodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+ template <>
+ struct FlagTraits<VideoEncodeUsageFlagBitsKHR>
{
- return VideoEncodeFlagsKHR( bit0 ) ^ bit1;
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeFlagsKHR operator~( VideoEncodeFlagBitsKHR bits )
- VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoEncodeFlagsKHR( bits ) );
- }
+ enum : VkFlags
+ {
+ allFlags = VkFlags( VideoEncodeUsageFlagBitsKHR::eDefault ) | VkFlags( VideoEncodeUsageFlagBitsKHR::eTranscoding ) |
+ VkFlags( VideoEncodeUsageFlagBitsKHR::eStreaming ) | VkFlags( VideoEncodeUsageFlagBitsKHR::eRecording ) |
+ VkFlags( VideoEncodeUsageFlagBitsKHR::eConferencing )
+ };
+ };
+ using VideoEncodeUsageFlagsKHR = Flags<VideoEncodeUsageFlagBitsKHR>;
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagsKHR value )
+ template <>
+ struct FlagTraits<VideoEncodeContentFlagBitsKHR>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & VideoEncodeFlagBitsKHR::eReserved0 )
- result += "Reserved0 | ";
-
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- using VideoEncodeRateControlFlagsKHR = Flags<VideoEncodeRateControlFlagBitsKHR>;
+ enum : VkFlags
+ {
+ allFlags = VkFlags( VideoEncodeContentFlagBitsKHR::eDefault ) | VkFlags( VideoEncodeContentFlagBitsKHR::eCamera ) |
+ VkFlags( VideoEncodeContentFlagBitsKHR::eDesktop ) | VkFlags( VideoEncodeContentFlagBitsKHR::eRendered )
+ };
+ };
+ using VideoEncodeContentFlagsKHR = Flags<VideoEncodeContentFlagBitsKHR>;
template <>
struct FlagTraits<VideoEncodeRateControlFlagBitsKHR>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( VideoEncodeRateControlFlagBitsKHR::eDefault ) |
- VkFlags( VideoEncodeRateControlFlagBitsKHR::eReserved0 )
+ allFlags = 0
};
};
+ using VideoEncodeRateControlFlagsKHR = Flags<VideoEncodeRateControlFlagBitsKHR>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlFlagsKHR
- operator|( VideoEncodeRateControlFlagBitsKHR bit0, VideoEncodeRateControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeRateControlFlagsKHR( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlFlagsKHR
- operator&( VideoEncodeRateControlFlagBitsKHR bit0, VideoEncodeRateControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+ template <>
+ struct FlagTraits<VideoEncodeRateControlModeFlagBitsKHR>
{
- return VideoEncodeRateControlFlagsKHR( bit0 ) & bit1;
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlFlagsKHR
- operator^( VideoEncodeRateControlFlagBitsKHR bit0, VideoEncodeRateControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeRateControlFlagsKHR( bit0 ) ^ bit1;
- }
+ enum : VkFlags
+ {
+ allFlags = VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eNone ) | VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eCbr ) |
+ VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eVbr )
+ };
+ };
+ using VideoEncodeRateControlModeFlagsKHR = Flags<VideoEncodeRateControlModeFlagBitsKHR>;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlFlagsKHR
- operator~( VideoEncodeRateControlFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoEncodeRateControlFlagsKHR( bits ) );
- }
+ //=== VK_NV_device_diagnostics_config ===
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlFlagsKHR value )
+ template <>
+ struct FlagTraits<DeviceDiagnosticsConfigFlagBitsNV>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & VideoEncodeRateControlFlagBitsKHR::eReserved0 )
- result += "Reserved0 | ";
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ enum : VkFlags
+ {
+ allFlags = VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) | VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) |
+ VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints ) |
+ VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderErrorReporting )
+ };
+ };
+ using DeviceDiagnosticsConfigFlagsNV = Flags<DeviceDiagnosticsConfigFlagBitsNV>;
- using VideoEncodeRateControlModeFlagsKHR = Flags<VideoEncodeRateControlModeFlagBitsKHR>;
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ //=== VK_EXT_metal_objects ===
template <>
- struct FlagTraits<VideoEncodeRateControlModeFlagBitsKHR>
+ struct FlagTraits<ExportMetalObjectTypeFlagBitsEXT>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eNone ) |
- VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eCbr ) |
- VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eVbr )
+ allFlags = VkFlags( ExportMetalObjectTypeFlagBitsEXT::eMetalDevice ) | VkFlags( ExportMetalObjectTypeFlagBitsEXT::eMetalCommandQueue ) |
+ VkFlags( ExportMetalObjectTypeFlagBitsEXT::eMetalBuffer ) | VkFlags( ExportMetalObjectTypeFlagBitsEXT::eMetalTexture ) |
+ VkFlags( ExportMetalObjectTypeFlagBitsEXT::eMetalIosurface ) | VkFlags( ExportMetalObjectTypeFlagBitsEXT::eMetalSharedEvent )
};
};
+ using ExportMetalObjectTypeFlagsEXT = Flags<ExportMetalObjectTypeFlagBitsEXT>;
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR operator|(
- VideoEncodeRateControlModeFlagBitsKHR bit0, VideoEncodeRateControlModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeRateControlModeFlagsKHR( bit0 ) | bit1;
- }
+ //=== VK_EXT_graphics_pipeline_library ===
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR operator&(
- VideoEncodeRateControlModeFlagBitsKHR bit0, VideoEncodeRateControlModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+ template <>
+ struct FlagTraits<GraphicsPipelineLibraryFlagBitsEXT>
{
- return VideoEncodeRateControlModeFlagsKHR( bit0 ) & bit1;
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR operator^(
- VideoEncodeRateControlModeFlagBitsKHR bit0, VideoEncodeRateControlModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return VideoEncodeRateControlModeFlagsKHR( bit0 ) ^ bit1;
- }
+ enum : VkFlags
+ {
+ allFlags = VkFlags( GraphicsPipelineLibraryFlagBitsEXT::eVertexInputInterface ) |
+ VkFlags( GraphicsPipelineLibraryFlagBitsEXT::ePreRasterizationShaders ) | VkFlags( GraphicsPipelineLibraryFlagBitsEXT::eFragmentShader ) |
+ VkFlags( GraphicsPipelineLibraryFlagBitsEXT::eFragmentOutputInterface )
+ };
+ };
+ using GraphicsPipelineLibraryFlagsEXT = Flags<GraphicsPipelineLibraryFlagBitsEXT>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR
- operator~( VideoEncodeRateControlModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( VideoEncodeRateControlModeFlagsKHR( bits ) );
- }
+ //=== VK_NV_ray_tracing_motion_blur ===
- VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlModeFlagsKHR value )
+ template <>
+ struct FlagTraits<AccelerationStructureMotionInfoFlagBitsNV>
{
- if ( !value )
- return "{}";
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- std::string result;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using AccelerationStructureMotionInfoFlagsNV = Flags<AccelerationStructureMotionInfoFlagBitsNV>;
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ template <>
+ struct FlagTraits<AccelerationStructureMotionInstanceFlagBitsNV>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- //=== VK_NV_device_diagnostics_config ===
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using AccelerationStructureMotionInstanceFlagsNV = Flags<AccelerationStructureMotionInstanceFlagBitsNV>;
- using DeviceDiagnosticsConfigFlagsNV = Flags<DeviceDiagnosticsConfigFlagBitsNV>;
+ //=== VK_EXT_image_compression_control ===
template <>
- struct FlagTraits<DeviceDiagnosticsConfigFlagBitsNV>
+ struct FlagTraits<ImageCompressionFlagBitsEXT>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) |
- VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) |
- VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints )
+ allFlags = VkFlags( ImageCompressionFlagBitsEXT::eDefault ) | VkFlags( ImageCompressionFlagBitsEXT::eFixedRateDefault ) |
+ VkFlags( ImageCompressionFlagBitsEXT::eFixedRateExplicit ) | VkFlags( ImageCompressionFlagBitsEXT::eDisabled )
};
};
+ using ImageCompressionFlagsEXT = Flags<ImageCompressionFlagBitsEXT>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV
- operator|( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+ template <>
+ struct FlagTraits<ImageCompressionFixedRateFlagBitsEXT>
{
- return DeviceDiagnosticsConfigFlagsNV( bit0 ) | bit1;
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV
- operator&( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DeviceDiagnosticsConfigFlagsNV( bit0 ) & bit1;
- }
+ enum : VkFlags
+ {
+ allFlags = VkFlags( ImageCompressionFixedRateFlagBitsEXT::eNone ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e1Bpc ) |
+ VkFlags( ImageCompressionFixedRateFlagBitsEXT::e2Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e3Bpc ) |
+ VkFlags( ImageCompressionFixedRateFlagBitsEXT::e4Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e5Bpc ) |
+ VkFlags( ImageCompressionFixedRateFlagBitsEXT::e6Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e7Bpc ) |
+ VkFlags( ImageCompressionFixedRateFlagBitsEXT::e8Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e9Bpc ) |
+ VkFlags( ImageCompressionFixedRateFlagBitsEXT::e10Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e11Bpc ) |
+ VkFlags( ImageCompressionFixedRateFlagBitsEXT::e12Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e13Bpc ) |
+ VkFlags( ImageCompressionFixedRateFlagBitsEXT::e14Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e15Bpc ) |
+ VkFlags( ImageCompressionFixedRateFlagBitsEXT::e16Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e17Bpc ) |
+ VkFlags( ImageCompressionFixedRateFlagBitsEXT::e18Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e19Bpc ) |
+ VkFlags( ImageCompressionFixedRateFlagBitsEXT::e20Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e21Bpc ) |
+ VkFlags( ImageCompressionFixedRateFlagBitsEXT::e22Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e23Bpc ) |
+ VkFlags( ImageCompressionFixedRateFlagBitsEXT::e24Bpc )
+ };
+ };
+ using ImageCompressionFixedRateFlagsEXT = Flags<ImageCompressionFixedRateFlagBitsEXT>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV
- operator^( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DeviceDiagnosticsConfigFlagsNV( bit0 ) ^ bit1;
- }
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+ //=== VK_EXT_directfb_surface ===
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV
- operator~( DeviceDiagnosticsConfigFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
+ template <>
+ struct FlagTraits<DirectFBSurfaceCreateFlagBitsEXT>
{
- return ~( DeviceDiagnosticsConfigFlagsNV( bits ) );
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagsNV value )
- {
- if ( !value )
- return "{}";
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using DirectFBSurfaceCreateFlagsEXT = Flags<DirectFBSurfaceCreateFlagBitsEXT>;
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
- std::string result;
- if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo )
- result += "EnableShaderDebugInfo | ";
- if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking )
- result += "EnableResourceTracking | ";
- if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints )
- result += "EnableAutomaticCheckpoints | ";
+ //=== VK_EXT_device_address_binding_report ===
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
+ template <>
+ struct FlagTraits<DeviceAddressBindingFlagBitsEXT>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- //=== VK_NV_ray_tracing_motion_blur ===
+ enum : VkFlags
+ {
+ allFlags = VkFlags( DeviceAddressBindingFlagBitsEXT::eInternalObject )
+ };
+ };
+ using DeviceAddressBindingFlagsEXT = Flags<DeviceAddressBindingFlagBitsEXT>;
- using AccelerationStructureMotionInfoFlagsNV = Flags<AccelerationStructureMotionInfoFlagBitsNV>;
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+ //=== VK_FUCHSIA_buffer_collection ===
- VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInfoFlagsNV )
+ template <>
+ struct FlagTraits<ImageFormatConstraintsFlagBitsFUCHSIA>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- using AccelerationStructureMotionInstanceFlagsNV = Flags<AccelerationStructureMotionInstanceFlagBitsNV>;
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using ImageFormatConstraintsFlagsFUCHSIA = Flags<ImageFormatConstraintsFlagBitsFUCHSIA>;
- VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInstanceFlagsNV )
+ template <>
+ struct FlagTraits<ImageConstraintsInfoFlagBitsFUCHSIA>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
-#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
- //=== VK_EXT_directfb_surface ===
+ enum : VkFlags
+ {
+ allFlags = VkFlags( ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely ) | VkFlags( ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften ) |
+ VkFlags( ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely ) | VkFlags( ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften ) |
+ VkFlags( ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional )
+ };
+ };
+ using ImageConstraintsInfoFlagsFUCHSIA = Flags<ImageConstraintsInfoFlagBitsFUCHSIA>;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
- using DirectFBSurfaceCreateFlagsEXT = Flags<DirectFBSurfaceCreateFlagBitsEXT>;
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+ //=== VK_QNX_screen_surface ===
- VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagsEXT )
+ template <>
+ struct FlagTraits<ScreenSurfaceCreateFlagBitsQNX>
{
- return "{}";
- }
-#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
-#if defined( VK_USE_PLATFORM_FUCHSIA )
- //=== VK_FUCHSIA_buffer_collection ===
+ enum : VkFlags
+ {
+ allFlags = 0
+ };
+ };
+ using ScreenSurfaceCreateFlagsQNX = Flags<ScreenSurfaceCreateFlagBitsQNX>;
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
- using ImageFormatConstraintsFlagsFUCHSIA = Flags<ImageFormatConstraintsFlagBitsFUCHSIA>;
+ //=== VK_EXT_opacity_micromap ===
- VULKAN_HPP_INLINE std::string to_string( ImageFormatConstraintsFlagsFUCHSIA )
+ template <>
+ struct FlagTraits<BuildMicromapFlagBitsEXT>
{
- return "{}";
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- using ImageConstraintsInfoFlagsFUCHSIA = Flags<ImageConstraintsInfoFlagBitsFUCHSIA>;
+ enum : VkFlags
+ {
+ allFlags = VkFlags( BuildMicromapFlagBitsEXT::ePreferFastTrace ) | VkFlags( BuildMicromapFlagBitsEXT::ePreferFastBuild ) |
+ VkFlags( BuildMicromapFlagBitsEXT::eAllowCompaction )
+ };
+ };
+ using BuildMicromapFlagsEXT = Flags<BuildMicromapFlagBitsEXT>;
template <>
- struct FlagTraits<ImageConstraintsInfoFlagBitsFUCHSIA>
+ struct FlagTraits<MicromapCreateFlagBitsEXT>
{
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+
enum : VkFlags
{
- allFlags = VkFlags( ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely ) |
- VkFlags( ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften ) |
- VkFlags( ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely ) |
- VkFlags( ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften ) |
- VkFlags( ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional )
+ allFlags = VkFlags( MicromapCreateFlagBitsEXT::eDeviceAddressCaptureReplay )
};
};
+ using MicromapCreateFlagsEXT = Flags<MicromapCreateFlagBitsEXT>;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFlagsFUCHSIA
- operator|( ImageConstraintsInfoFlagBitsFUCHSIA bit0, ImageConstraintsInfoFlagBitsFUCHSIA bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageConstraintsInfoFlagsFUCHSIA( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFlagsFUCHSIA
- operator&( ImageConstraintsInfoFlagBitsFUCHSIA bit0, ImageConstraintsInfoFlagBitsFUCHSIA bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageConstraintsInfoFlagsFUCHSIA( bit0 ) & bit1;
- }
+ //=== VK_NV_optical_flow ===
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFlagsFUCHSIA
- operator^( ImageConstraintsInfoFlagBitsFUCHSIA bit0, ImageConstraintsInfoFlagBitsFUCHSIA bit1 ) VULKAN_HPP_NOEXCEPT
+ template <>
+ struct FlagTraits<OpticalFlowUsageFlagBitsNV>
{
- return ImageConstraintsInfoFlagsFUCHSIA( bit0 ) ^ bit1;
- }
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFlagsFUCHSIA
- operator~( ImageConstraintsInfoFlagBitsFUCHSIA bits ) VULKAN_HPP_NOEXCEPT
- {
- return ~( ImageConstraintsInfoFlagsFUCHSIA( bits ) );
- }
+ enum : VkFlags
+ {
+ allFlags = VkFlags( OpticalFlowUsageFlagBitsNV::eUnknown ) | VkFlags( OpticalFlowUsageFlagBitsNV::eInput ) |
+ VkFlags( OpticalFlowUsageFlagBitsNV::eOutput ) | VkFlags( OpticalFlowUsageFlagBitsNV::eHint ) | VkFlags( OpticalFlowUsageFlagBitsNV::eCost ) |
+ VkFlags( OpticalFlowUsageFlagBitsNV::eGlobalFlow )
+ };
+ };
+ using OpticalFlowUsageFlagsNV = Flags<OpticalFlowUsageFlagBitsNV>;
- VULKAN_HPP_INLINE std::string to_string( ImageConstraintsInfoFlagsFUCHSIA value )
+ template <>
+ struct FlagTraits<OpticalFlowGridSizeFlagBitsNV>
{
- if ( !value )
- return "{}";
-
- std::string result;
- if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely )
- result += "CpuReadRarely | ";
- if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften )
- result += "CpuReadOften | ";
- if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely )
- result += "CpuWriteRarely | ";
- if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften )
- result += "CpuWriteOften | ";
- if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional )
- result += "ProtectedOptional | ";
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- return "{ " + result.substr( 0, result.size() - 3 ) + " }";
- }
-#endif /*VK_USE_PLATFORM_FUCHSIA*/
+ enum : VkFlags
+ {
+ allFlags = VkFlags( OpticalFlowGridSizeFlagBitsNV::eUnknown ) | VkFlags( OpticalFlowGridSizeFlagBitsNV::e1X1 ) |
+ VkFlags( OpticalFlowGridSizeFlagBitsNV::e2X2 ) | VkFlags( OpticalFlowGridSizeFlagBitsNV::e4X4 ) |
+ VkFlags( OpticalFlowGridSizeFlagBitsNV::e8X8 )
+ };
+ };
+ using OpticalFlowGridSizeFlagsNV = Flags<OpticalFlowGridSizeFlagBitsNV>;
-#if defined( VK_USE_PLATFORM_SCREEN_QNX )
- //=== VK_QNX_screen_surface ===
+ template <>
+ struct FlagTraits<OpticalFlowSessionCreateFlagBitsNV>
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
- using ScreenSurfaceCreateFlagsQNX = Flags<ScreenSurfaceCreateFlagBitsQNX>;
+ enum : VkFlags
+ {
+ allFlags = VkFlags( OpticalFlowSessionCreateFlagBitsNV::eEnableHint ) | VkFlags( OpticalFlowSessionCreateFlagBitsNV::eEnableCost ) |
+ VkFlags( OpticalFlowSessionCreateFlagBitsNV::eEnableGlobalFlow ) | VkFlags( OpticalFlowSessionCreateFlagBitsNV::eAllowRegions ) |
+ VkFlags( OpticalFlowSessionCreateFlagBitsNV::eBothDirections )
+ };
+ };
+ using OpticalFlowSessionCreateFlagsNV = Flags<OpticalFlowSessionCreateFlagBitsNV>;
- VULKAN_HPP_INLINE std::string to_string( ScreenSurfaceCreateFlagsQNX )
+ template <>
+ struct FlagTraits<OpticalFlowExecuteFlagBitsNV>
{
- return "{}";
- }
-#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+ enum : VkFlags
+ {
+ allFlags = VkFlags( OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints )
+ };
+ };
+ using OpticalFlowExecuteFlagsNV = Flags<OpticalFlowExecuteFlagBitsNV>;
} // namespace VULKAN_HPP_NAMESPACE
#endif
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_format_traits.hpp b/thirdparty/vulkan/include/vulkan/vulkan_format_traits.hpp
new file mode 100644
index 0000000000..82bafce748
--- /dev/null
+++ b/thirdparty/vulkan/include/vulkan/vulkan_format_traits.hpp
@@ -0,0 +1,7357 @@
+// Copyright 2015-2022 The Khronos Group Inc.
+//
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_FORMAT_TRAITS_HPP
+#define VULKAN_FORMAT_TRAITS_HPP
+
+#include <vulkan/vulkan.hpp>
+
+namespace VULKAN_HPP_NAMESPACE
+{
+ //=====================
+ //=== Format Traits ===
+ //=====================
+
+ // The texel block size in bytes.
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t blockSize( VULKAN_HPP_NAMESPACE::Format format )
+ {
+ switch ( format )
+ {
+ case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 12;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 12;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 12;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 24;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 24;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 24;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 5;
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 6;
+ case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 4;
+
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ }
+
+ // The number of texels in a texel block.
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t texelsPerBlock( VULKAN_HPP_NAMESPACE::Format format )
+ {
+ switch ( format )
+ {
+ case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 20;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 20;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 25;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 25;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 30;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 30;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 36;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 36;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 40;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 40;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 48;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 48;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 64;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 64;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 50;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 50;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 60;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 60;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 80;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 80;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 100;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 100;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 120;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 120;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 144;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 144;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 20;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 25;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 30;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 36;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 40;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 48;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 64;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 50;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 60;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 80;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 100;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 120;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 144;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 1;
+
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ }
+
+ // The three-dimensional extent of a texel block.
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 std::array<uint8_t, 3> blockExtent( VULKAN_HPP_NAMESPACE::Format format )
+ {
+ switch ( format )
+ {
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return { { 5, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return { { 5, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return { { 5, 5, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return { { 5, 5, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return { { 6, 5, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return { { 6, 5, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return { { 6, 6, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return { { 6, 6, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return { { 8, 5, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return { { 8, 5, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return { { 8, 6, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return { { 8, 6, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return { { 8, 8, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return { { 8, 8, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return { { 10, 5, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return { { 10, 5, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return { { 10, 6, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return { { 10, 6, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return { { 10, 8, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return { { 10, 8, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return { { 10, 10, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return { { 10, 10, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return { { 12, 10, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return { { 12, 10, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return { { 12, 12, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return { { 12, 12, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return { { 2, 1, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return { { 2, 1, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return { { 2, 1, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return { { 2, 1, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return { { 2, 1, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return { { 2, 1, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return { { 2, 1, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return { { 2, 1, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return { { 5, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return { { 5, 5, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return { { 6, 5, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return { { 6, 6, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return { { 8, 5, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return { { 8, 6, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return { { 8, 8, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return { { 10, 5, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return { { 10, 6, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return { { 10, 8, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return { { 10, 10, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return { { 12, 10, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return { { 12, 12, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return { { 8, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return { { 8, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return { { 8, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return { { 4, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return { { 8, 4, 1 } };
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return { { 4, 4, 1 } };
+
+ default: return { { 1, 1, 1 } };
+ }
+ }
+
+ // A textual description of the compression scheme, or an empty string if it is not compressed
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * compressionScheme( VULKAN_HPP_NAMESPACE::Format format )
+ {
+ switch ( format )
+ {
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return "BC";
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return "BC";
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return "BC";
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return "BC";
+ case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return "BC";
+ case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return "BC";
+ case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return "BC";
+ case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return "BC";
+ case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return "BC";
+ case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return "BC";
+ case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return "BC";
+ case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return "BC";
+ case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return "BC";
+ case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return "BC";
+ case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return "BC";
+ case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return "BC";
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return "ETC2";
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return "ETC2";
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return "ETC2";
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return "ETC2";
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return "ETC2";
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return "ETC2";
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return "EAC";
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return "EAC";
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return "EAC";
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return "EAC";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return "ASTC LDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return "ASTC HDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return "ASTC HDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return "ASTC HDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return "ASTC HDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return "ASTC HDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return "ASTC HDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return "ASTC HDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return "ASTC HDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return "ASTC HDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return "ASTC HDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return "ASTC HDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return "ASTC HDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return "ASTC HDR";
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return "ASTC HDR";
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return "PVRTC";
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return "PVRTC";
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return "PVRTC";
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return "PVRTC";
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return "PVRTC";
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return "PVRTC";
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return "PVRTC";
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return "PVRTC";
+
+ default: return "";
+ }
+ }
+
+ // True, if this format is a compressed one.
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 bool isCompressed( VULKAN_HPP_NAMESPACE::Format format )
+ {
+ return ( *VULKAN_HPP_NAMESPACE::compressionScheme( format ) != 0 );
+ }
+
+ // The number of bits into which the format is packed. A single image element in this format
+ // can be stored in the same space as a scalar type of this bit width.
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t packed( VULKAN_HPP_NAMESPACE::Format format )
+ {
+ switch ( format )
+ {
+ case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 8;
+ case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 32;
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 16;
+ case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 16;
+
+ default: return 0;
+ }
+ }
+
+ // True, if the components of this format are compressed, otherwise false.
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 bool componentsAreCompressed( VULKAN_HPP_NAMESPACE::Format format )
+ {
+ switch ( format )
+ {
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock:
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock:
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG:
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG:
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG:
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG:
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG:
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG:
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG:
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return true;
+ default: return false;
+ }
+ }
+
+ // The number of bits in this component, if not compressed, otherwise 0.
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentBits( VULKAN_HPP_NAMESPACE::Format format, uint8_t component )
+ {
+ switch ( format )
+ {
+ case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8:
+ switch ( component )
+ {
+ case 0: return 4;
+ case 1: return 4;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16:
+ switch ( component )
+ {
+ case 0: return 4;
+ case 1: return 4;
+ case 2: return 4;
+ case 3: return 4;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16:
+ switch ( component )
+ {
+ case 0: return 4;
+ case 1: return 4;
+ case 2: return 4;
+ case 3: return 4;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16:
+ switch ( component )
+ {
+ case 0: return 5;
+ case 1: return 6;
+ case 2: return 5;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16:
+ switch ( component )
+ {
+ case 0: return 5;
+ case 1: return 6;
+ case 2: return 5;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16:
+ switch ( component )
+ {
+ case 0: return 5;
+ case 1: return 5;
+ case 2: return 5;
+ case 3: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16:
+ switch ( component )
+ {
+ case 0: return 5;
+ case 1: return 5;
+ case 2: return 5;
+ case 3: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16:
+ switch ( component )
+ {
+ case 0: return 1;
+ case 1: return 5;
+ case 2: return 5;
+ case 3: return 5;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Unorm:
+ switch ( component )
+ {
+ case 0: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Snorm:
+ switch ( component )
+ {
+ case 0: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled:
+ switch ( component )
+ {
+ case 0: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled:
+ switch ( component )
+ {
+ case 0: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Uint:
+ switch ( component )
+ {
+ case 0: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Sint:
+ switch ( component )
+ {
+ case 0: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Srgb:
+ switch ( component )
+ {
+ case 0: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32:
+ switch ( component )
+ {
+ case 0: return 2;
+ case 1: return 10;
+ case 2: return 10;
+ case 3: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32:
+ switch ( component )
+ {
+ case 0: return 2;
+ case 1: return 10;
+ case 2: return 10;
+ case 3: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32:
+ switch ( component )
+ {
+ case 0: return 2;
+ case 1: return 10;
+ case 2: return 10;
+ case 3: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32:
+ switch ( component )
+ {
+ case 0: return 2;
+ case 1: return 10;
+ case 2: return 10;
+ case 3: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32:
+ switch ( component )
+ {
+ case 0: return 2;
+ case 1: return 10;
+ case 2: return 10;
+ case 3: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32:
+ switch ( component )
+ {
+ case 0: return 2;
+ case 1: return 10;
+ case 2: return 10;
+ case 3: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32:
+ switch ( component )
+ {
+ case 0: return 2;
+ case 1: return 10;
+ case 2: return 10;
+ case 3: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32:
+ switch ( component )
+ {
+ case 0: return 2;
+ case 1: return 10;
+ case 2: return 10;
+ case 3: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32:
+ switch ( component )
+ {
+ case 0: return 2;
+ case 1: return 10;
+ case 2: return 10;
+ case 3: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32:
+ switch ( component )
+ {
+ case 0: return 2;
+ case 1: return 10;
+ case 2: return 10;
+ case 3: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32:
+ switch ( component )
+ {
+ case 0: return 2;
+ case 1: return 10;
+ case 2: return 10;
+ case 3: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32:
+ switch ( component )
+ {
+ case 0: return 2;
+ case 1: return 10;
+ case 2: return 10;
+ case 3: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Unorm:
+ switch ( component )
+ {
+ case 0: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Snorm:
+ switch ( component )
+ {
+ case 0: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled:
+ switch ( component )
+ {
+ case 0: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled:
+ switch ( component )
+ {
+ case 0: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Uint:
+ switch ( component )
+ {
+ case 0: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Sint:
+ switch ( component )
+ {
+ case 0: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat:
+ switch ( component )
+ {
+ case 0: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ case 3: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ case 3: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ case 3: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ case 3: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ case 3: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ case 3: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ case 3: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32Uint:
+ switch ( component )
+ {
+ case 0: return 32;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32Sint:
+ switch ( component )
+ {
+ case 0: return 32;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat:
+ switch ( component )
+ {
+ case 0: return 32;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint:
+ switch ( component )
+ {
+ case 0: return 32;
+ case 1: return 32;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint:
+ switch ( component )
+ {
+ case 0: return 32;
+ case 1: return 32;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat:
+ switch ( component )
+ {
+ case 0: return 32;
+ case 1: return 32;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint:
+ switch ( component )
+ {
+ case 0: return 32;
+ case 1: return 32;
+ case 2: return 32;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint:
+ switch ( component )
+ {
+ case 0: return 32;
+ case 1: return 32;
+ case 2: return 32;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat:
+ switch ( component )
+ {
+ case 0: return 32;
+ case 1: return 32;
+ case 2: return 32;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint:
+ switch ( component )
+ {
+ case 0: return 32;
+ case 1: return 32;
+ case 2: return 32;
+ case 3: return 32;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint:
+ switch ( component )
+ {
+ case 0: return 32;
+ case 1: return 32;
+ case 2: return 32;
+ case 3: return 32;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat:
+ switch ( component )
+ {
+ case 0: return 32;
+ case 1: return 32;
+ case 2: return 32;
+ case 3: return 32;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64Uint:
+ switch ( component )
+ {
+ case 0: return 64;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64Sint:
+ switch ( component )
+ {
+ case 0: return 64;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat:
+ switch ( component )
+ {
+ case 0: return 64;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint:
+ switch ( component )
+ {
+ case 0: return 64;
+ case 1: return 64;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint:
+ switch ( component )
+ {
+ case 0: return 64;
+ case 1: return 64;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat:
+ switch ( component )
+ {
+ case 0: return 64;
+ case 1: return 64;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint:
+ switch ( component )
+ {
+ case 0: return 64;
+ case 1: return 64;
+ case 2: return 64;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint:
+ switch ( component )
+ {
+ case 0: return 64;
+ case 1: return 64;
+ case 2: return 64;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat:
+ switch ( component )
+ {
+ case 0: return 64;
+ case 1: return 64;
+ case 2: return 64;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint:
+ switch ( component )
+ {
+ case 0: return 64;
+ case 1: return 64;
+ case 2: return 64;
+ case 3: return 64;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint:
+ switch ( component )
+ {
+ case 0: return 64;
+ case 1: return 64;
+ case 2: return 64;
+ case 3: return 64;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat:
+ switch ( component )
+ {
+ case 0: return 64;
+ case 1: return 64;
+ case 2: return 64;
+ case 3: return 64;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32:
+ switch ( component )
+ {
+ case 0: return 10;
+ case 1: return 11;
+ case 2: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32:
+ switch ( component )
+ {
+ case 0: return 9;
+ case 1: return 9;
+ case 2: return 9;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eD16Unorm:
+ switch ( component )
+ {
+ case 0: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32:
+ switch ( component )
+ {
+ case 0: return 24;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat:
+ switch ( component )
+ {
+ case 0: return 32;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eS8Uint:
+ switch ( component )
+ {
+ case 0: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint:
+ switch ( component )
+ {
+ case 0: return 24;
+ case 1: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint:
+ switch ( component )
+ {
+ case 0: return 32;
+ case 1: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock:
+ switch ( component )
+ {
+ case 0: return 11;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock:
+ switch ( component )
+ {
+ case 0: return 11;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock:
+ switch ( component )
+ {
+ case 0: return 11;
+ case 1: return 11;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock:
+ switch ( component )
+ {
+ case 0: return 11;
+ case 1: return 11;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ case 3: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16:
+ switch ( component )
+ {
+ case 0: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16:
+ switch ( component )
+ {
+ case 0: return 10;
+ case 1: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16:
+ switch ( component )
+ {
+ case 0: return 10;
+ case 1: return 10;
+ case 2: return 10;
+ case 3: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16:
+ switch ( component )
+ {
+ case 0: return 10;
+ case 1: return 10;
+ case 2: return 10;
+ case 3: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16:
+ switch ( component )
+ {
+ case 0: return 10;
+ case 1: return 10;
+ case 2: return 10;
+ case 3: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 10;
+ case 1: return 10;
+ case 2: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 10;
+ case 1: return 10;
+ case 2: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 10;
+ case 1: return 10;
+ case 2: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 10;
+ case 1: return 10;
+ case 2: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 10;
+ case 1: return 10;
+ case 2: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16:
+ switch ( component )
+ {
+ case 0: return 12;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16:
+ switch ( component )
+ {
+ case 0: return 12;
+ case 1: return 12;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16:
+ switch ( component )
+ {
+ case 0: return 12;
+ case 1: return 12;
+ case 2: return 12;
+ case 3: return 12;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16:
+ switch ( component )
+ {
+ case 0: return 12;
+ case 1: return 12;
+ case 2: return 12;
+ case 3: return 12;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16:
+ switch ( component )
+ {
+ case 0: return 12;
+ case 1: return 12;
+ case 2: return 12;
+ case 3: return 12;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 12;
+ case 1: return 12;
+ case 2: return 12;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 12;
+ case 1: return 12;
+ case 2: return 12;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 12;
+ case 1: return 12;
+ case 2: return 12;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 12;
+ case 1: return 12;
+ case 2: return 12;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 12;
+ case 1: return 12;
+ case 2: return 12;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ case 3: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ case 3: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+ switch ( component )
+ {
+ case 0: return 8;
+ case 1: return 8;
+ case 2: return 8;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 10;
+ case 1: return 10;
+ case 2: return 10;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 12;
+ case 1: return 12;
+ case 2: return 12;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ case 2: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16:
+ switch ( component )
+ {
+ case 0: return 4;
+ case 1: return 4;
+ case 2: return 4;
+ case 3: return 4;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16:
+ switch ( component )
+ {
+ case 0: return 4;
+ case 1: return 4;
+ case 2: return 4;
+ case 3: return 4;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV:
+ switch ( component )
+ {
+ case 0: return 16;
+ case 1: return 16;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+
+ default: return 0;
+ }
+ }
+
+ // The number of components of this format.
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentCount( VULKAN_HPP_NAMESPACE::Format format )
+ {
+ switch ( format )
+ {
+ case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 1;
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 4;
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 2;
+
+ default: return 0;
+ }
+ }
+
+ // The name of the component
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * componentName( VULKAN_HPP_NAMESPACE::Format format, uint8_t component )
+ {
+ switch ( format )
+ {
+ case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "R";
+ case 2: return "G";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "R";
+ case 2: return "G";
+ case 3: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Unorm:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Snorm:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Uint:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Sint:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Srgb:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "B";
+ case 2: return "G";
+ case 3: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "B";
+ case 2: return "G";
+ case 3: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "B";
+ case 2: return "G";
+ case 3: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "B";
+ case 2: return "G";
+ case 3: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "B";
+ case 2: return "G";
+ case 3: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "B";
+ case 2: return "G";
+ case 3: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "B";
+ case 2: return "G";
+ case 3: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "R";
+ case 2: return "G";
+ case 3: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "R";
+ case 2: return "G";
+ case 3: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "R";
+ case 2: return "G";
+ case 3: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "R";
+ case 2: return "G";
+ case 3: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "R";
+ case 2: return "G";
+ case 3: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "R";
+ case 2: return "G";
+ case 3: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "B";
+ case 2: return "G";
+ case 3: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "B";
+ case 2: return "G";
+ case 3: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "B";
+ case 2: return "G";
+ case 3: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "B";
+ case 2: return "G";
+ case 3: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "B";
+ case 2: return "G";
+ case 3: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "B";
+ case 2: return "G";
+ case 3: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Unorm:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Snorm:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Uint:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Sint:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32Uint:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32Sint:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64Uint:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64Sint:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eD16Unorm:
+ switch ( component )
+ {
+ case 0: return "D";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32:
+ switch ( component )
+ {
+ case 0: return "D";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat:
+ switch ( component )
+ {
+ case 0: return "D";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eS8Uint:
+ switch ( component )
+ {
+ case 0: return "S";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint:
+ switch ( component )
+ {
+ case 0: return "D";
+ case 1: return "S";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint:
+ switch ( component )
+ {
+ case 0: return "D";
+ case 1: return "S";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint:
+ switch ( component )
+ {
+ case 0: return "D";
+ case 1: return "S";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "G";
+ case 3: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ case 3: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "G";
+ case 3: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ case 3: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16:
+ switch ( component )
+ {
+ case 0: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "G";
+ case 3: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ case 3: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "G";
+ case 3: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm:
+ switch ( component )
+ {
+ case 0: return "B";
+ case 1: return "G";
+ case 2: return "R";
+ case 3: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+ switch ( component )
+ {
+ case 0: return "G";
+ case 1: return "B";
+ case 2: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "R";
+ case 2: return "G";
+ case 3: return "B";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16:
+ switch ( component )
+ {
+ case 0: return "A";
+ case 1: return "B";
+ case 2: return "G";
+ case 3: return "R";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ case 2: return "B";
+ case 3: return "A";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV:
+ switch ( component )
+ {
+ case 0: return "R";
+ case 1: return "G";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+
+ default: return "";
+ }
+ }
+
+ // The numeric format of the component
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * componentNumericFormat( VULKAN_HPP_NAMESPACE::Format format, uint8_t component )
+ {
+ switch ( format )
+ {
+ case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Snorm:
+ switch ( component )
+ {
+ case 0: return "SNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled:
+ switch ( component )
+ {
+ case 0: return "USCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled:
+ switch ( component )
+ {
+ case 0: return "SSCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Uint:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Sint:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8Srgb:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm:
+ switch ( component )
+ {
+ case 0: return "SNORM";
+ case 1: return "SNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled:
+ switch ( component )
+ {
+ case 0: return "USCALED";
+ case 1: return "USCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled:
+ switch ( component )
+ {
+ case 0: return "SSCALED";
+ case 1: return "SSCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ case 1: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ case 1: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm:
+ switch ( component )
+ {
+ case 0: return "SNORM";
+ case 1: return "SNORM";
+ case 2: return "SNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled:
+ switch ( component )
+ {
+ case 0: return "USCALED";
+ case 1: return "USCALED";
+ case 2: return "USCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled:
+ switch ( component )
+ {
+ case 0: return "SSCALED";
+ case 1: return "SSCALED";
+ case 2: return "SSCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ case 1: return "UINT";
+ case 2: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ case 1: return "SINT";
+ case 2: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm:
+ switch ( component )
+ {
+ case 0: return "SNORM";
+ case 1: return "SNORM";
+ case 2: return "SNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled:
+ switch ( component )
+ {
+ case 0: return "USCALED";
+ case 1: return "USCALED";
+ case 2: return "USCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled:
+ switch ( component )
+ {
+ case 0: return "SSCALED";
+ case 1: return "SSCALED";
+ case 2: return "SSCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ case 1: return "UINT";
+ case 2: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ case 1: return "SINT";
+ case 2: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm:
+ switch ( component )
+ {
+ case 0: return "SNORM";
+ case 1: return "SNORM";
+ case 2: return "SNORM";
+ case 3: return "SNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled:
+ switch ( component )
+ {
+ case 0: return "USCALED";
+ case 1: return "USCALED";
+ case 2: return "USCALED";
+ case 3: return "USCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled:
+ switch ( component )
+ {
+ case 0: return "SSCALED";
+ case 1: return "SSCALED";
+ case 2: return "SSCALED";
+ case 3: return "SSCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ case 1: return "UINT";
+ case 2: return "UINT";
+ case 3: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ case 1: return "SINT";
+ case 2: return "SINT";
+ case 3: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm:
+ switch ( component )
+ {
+ case 0: return "SNORM";
+ case 1: return "SNORM";
+ case 2: return "SNORM";
+ case 3: return "SNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled:
+ switch ( component )
+ {
+ case 0: return "USCALED";
+ case 1: return "USCALED";
+ case 2: return "USCALED";
+ case 3: return "USCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled:
+ switch ( component )
+ {
+ case 0: return "SSCALED";
+ case 1: return "SSCALED";
+ case 2: return "SSCALED";
+ case 3: return "SSCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ case 1: return "UINT";
+ case 2: return "UINT";
+ case 3: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ case 1: return "SINT";
+ case 2: return "SINT";
+ case 3: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32:
+ switch ( component )
+ {
+ case 0: return "SNORM";
+ case 1: return "SNORM";
+ case 2: return "SNORM";
+ case 3: return "SNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32:
+ switch ( component )
+ {
+ case 0: return "USCALED";
+ case 1: return "USCALED";
+ case 2: return "USCALED";
+ case 3: return "USCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32:
+ switch ( component )
+ {
+ case 0: return "SSCALED";
+ case 1: return "SSCALED";
+ case 2: return "SSCALED";
+ case 3: return "SSCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ case 1: return "UINT";
+ case 2: return "UINT";
+ case 3: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ case 1: return "SINT";
+ case 2: return "SINT";
+ case 3: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32:
+ switch ( component )
+ {
+ case 0: return "SNORM";
+ case 1: return "SNORM";
+ case 2: return "SNORM";
+ case 3: return "SNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32:
+ switch ( component )
+ {
+ case 0: return "USCALED";
+ case 1: return "USCALED";
+ case 2: return "USCALED";
+ case 3: return "USCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32:
+ switch ( component )
+ {
+ case 0: return "SSCALED";
+ case 1: return "SSCALED";
+ case 2: return "SSCALED";
+ case 3: return "SSCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ case 1: return "UINT";
+ case 2: return "UINT";
+ case 3: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ case 1: return "SINT";
+ case 2: return "SINT";
+ case 3: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32:
+ switch ( component )
+ {
+ case 0: return "SNORM";
+ case 1: return "SNORM";
+ case 2: return "SNORM";
+ case 3: return "SNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32:
+ switch ( component )
+ {
+ case 0: return "USCALED";
+ case 1: return "USCALED";
+ case 2: return "USCALED";
+ case 3: return "USCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32:
+ switch ( component )
+ {
+ case 0: return "SSCALED";
+ case 1: return "SSCALED";
+ case 2: return "SSCALED";
+ case 3: return "SSCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ case 1: return "UINT";
+ case 2: return "UINT";
+ case 3: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ case 1: return "SINT";
+ case 2: return "SINT";
+ case 3: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Snorm:
+ switch ( component )
+ {
+ case 0: return "SNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled:
+ switch ( component )
+ {
+ case 0: return "USCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled:
+ switch ( component )
+ {
+ case 0: return "SSCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Uint:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Sint:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm:
+ switch ( component )
+ {
+ case 0: return "SNORM";
+ case 1: return "SNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled:
+ switch ( component )
+ {
+ case 0: return "USCALED";
+ case 1: return "USCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled:
+ switch ( component )
+ {
+ case 0: return "SSCALED";
+ case 1: return "SSCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ case 1: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ case 1: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm:
+ switch ( component )
+ {
+ case 0: return "SNORM";
+ case 1: return "SNORM";
+ case 2: return "SNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled:
+ switch ( component )
+ {
+ case 0: return "USCALED";
+ case 1: return "USCALED";
+ case 2: return "USCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled:
+ switch ( component )
+ {
+ case 0: return "SSCALED";
+ case 1: return "SSCALED";
+ case 2: return "SSCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ case 1: return "UINT";
+ case 2: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ case 1: return "SINT";
+ case 2: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm:
+ switch ( component )
+ {
+ case 0: return "SNORM";
+ case 1: return "SNORM";
+ case 2: return "SNORM";
+ case 3: return "SNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled:
+ switch ( component )
+ {
+ case 0: return "USCALED";
+ case 1: return "USCALED";
+ case 2: return "USCALED";
+ case 3: return "USCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled:
+ switch ( component )
+ {
+ case 0: return "SSCALED";
+ case 1: return "SSCALED";
+ case 2: return "SSCALED";
+ case 3: return "SSCALED";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ case 1: return "UINT";
+ case 2: return "UINT";
+ case 3: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ case 1: return "SINT";
+ case 2: return "SINT";
+ case 3: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ case 3: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32Uint:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32Sint:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ case 1: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ case 1: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ case 1: return "UINT";
+ case 2: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ case 1: return "SINT";
+ case 2: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ case 1: return "UINT";
+ case 2: return "UINT";
+ case 3: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ case 1: return "SINT";
+ case 2: return "SINT";
+ case 3: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ case 3: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64Uint:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64Sint:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ case 1: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ case 1: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ case 1: return "UINT";
+ case 2: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ case 1: return "SINT";
+ case 2: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ case 1: return "UINT";
+ case 2: return "UINT";
+ case 3: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ case 1: return "SINT";
+ case 2: return "SINT";
+ case 3: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ case 3: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32:
+ switch ( component )
+ {
+ case 0: return "UFLOAT";
+ case 1: return "UFLOAT";
+ case 2: return "UFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32:
+ switch ( component )
+ {
+ case 0: return "UFLOAT";
+ case 1: return "UFLOAT";
+ case 2: return "UFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eD16Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eS8Uint:
+ switch ( component )
+ {
+ case 0: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "UINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock:
+ switch ( component )
+ {
+ case 0: return "UFLOAT";
+ case 1: return "UFLOAT";
+ case 2: return "UFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock:
+ switch ( component )
+ {
+ case 0: return "SNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock:
+ switch ( component )
+ {
+ case 0: return "SNORM";
+ case 1: return "SNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ case 3: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ case 3: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ case 3: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ case 3: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ case 3: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ case 3: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ case 3: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ case 3: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ case 3: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ case 3: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ case 3: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ case 3: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ case 3: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock:
+ switch ( component )
+ {
+ case 0: return "SFLOAT";
+ case 1: return "SFLOAT";
+ case 2: return "SFLOAT";
+ case 3: return "SFLOAT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG:
+ switch ( component )
+ {
+ case 0: return "UNORM";
+ case 1: return "UNORM";
+ case 2: return "UNORM";
+ case 3: return "UNORM";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG:
+ switch ( component )
+ {
+ case 0: return "SRGB";
+ case 1: return "SRGB";
+ case 2: return "SRGB";
+ case 3: return "SRGB";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV:
+ switch ( component )
+ {
+ case 0: return "SINT";
+ case 1: return "SINT";
+ default: VULKAN_HPP_ASSERT( false ); return "";
+ }
+
+ default: return "";
+ }
+ }
+
+ // The plane this component lies in.
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentPlaneIndex( VULKAN_HPP_NAMESPACE::Format format, uint8_t component )
+ {
+ switch ( format )
+ {
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+ switch ( component )
+ {
+ case 0: return 0;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 0;
+ }
+
+ default: return 0;
+ }
+ }
+
+ // The number of image planes of this format.
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeCount( VULKAN_HPP_NAMESPACE::Format format )
+ {
+ switch ( format )
+ {
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 3;
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 2;
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 2;
+
+ default: return 1;
+ }
+ }
+
+ // The single-plane format that this plane is compatible with.
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 VULKAN_HPP_NAMESPACE::Format planeCompatibleFormat( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane )
+ {
+ switch ( format )
+ {
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+ case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+ case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+ case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+ case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+ case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+ case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+ case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+ case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+ case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+ case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+ case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+ case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+ switch ( plane )
+ {
+ case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+ case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm;
+ default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ }
+
+ default: VULKAN_HPP_ASSERT( plane == 0 ); return format;
+ }
+ }
+
+ // The relative height of this plane. A value of k means that this plane is 1/k the height of the overall format.
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeHeightDivisor( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane )
+ {
+ switch ( format )
+ {
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+
+ default: VULKAN_HPP_ASSERT( plane == 0 ); return 1;
+ }
+ }
+
+ // The relative width of this plane. A value of k means that this plane is 1/k the width of the overall format.
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeWidthDivisor( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane )
+ {
+ switch ( format )
+ {
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ case 2: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 2;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ case 2: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+ case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+ switch ( plane )
+ {
+ case 0: return 1;
+ case 1: return 1;
+ default: VULKAN_HPP_ASSERT( false ); return 1;
+ }
+
+ default: VULKAN_HPP_ASSERT( plane == 0 ); return 1;
+ }
+ }
+
+} // namespace VULKAN_HPP_NAMESPACE
+#endif
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_funcs.hpp b/thirdparty/vulkan/include/vulkan/vulkan_funcs.hpp
index 7a1a04c115..650fbc3599 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_funcs.hpp
+++ b/thirdparty/vulkan/include/vulkan/vulkan_funcs.hpp
@@ -17,11 +17,10 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_VERSION_1_0 ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Instance * pInstance,
- Dispatch const & d ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Instance * pInstance,
+ Dispatch const & d ) VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( pCreateInfo ),
@@ -31,47 +30,43 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type
- createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d )
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance(
+ const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d )
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Instance instance;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkInstance *>( &instance ) ) );
- return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING "::createInstance" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkInstance *>( &instance ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::createInstance" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), instance );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type
- createInstanceUnique( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d )
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type createInstanceUnique(
+ const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d )
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Instance instance;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkInstance *>( &instance ) ) );
- ObjectDestroy<NoParent, Dispatch> deleter( allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Instance, Dispatch>(
- result, instance, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkInstance *>( &instance ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>( instance, ObjectDestroy<NoParent, Dispatch>( allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
@@ -79,97 +74,85 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyInstance( m_instance,
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount,
- VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkEnumeratePhysicalDevices(
- m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) );
+ return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename PhysicalDeviceAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type
- Instance::enumeratePhysicalDevices( Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type
+ Instance::enumeratePhysicalDevices( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PhysicalDevice, PhysicalDeviceAllocator> physicalDevices;
- uint32_t physicalDeviceCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices;
+ uint32_t physicalDeviceCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && physicalDeviceCount )
+ result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && physicalDeviceCount )
{
physicalDevices.resize( physicalDeviceCount );
- result = static_cast<Result>( d.vkEnumeratePhysicalDevices(
- m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) );
+ result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
+ VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
+ if ( physicalDeviceCount < physicalDevices.size() )
{
- VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
- if ( physicalDeviceCount < physicalDevices.size() )
- {
- physicalDevices.resize( physicalDeviceCount );
- }
+ physicalDevices.resize( physicalDeviceCount );
}
- return createResultValue(
- result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDevices );
}
template <typename PhysicalDeviceAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDevice>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type
- Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDevice>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type
+ Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator );
- uint32_t physicalDeviceCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator );
+ uint32_t physicalDeviceCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && physicalDeviceCount )
+ result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && physicalDeviceCount )
{
physicalDevices.resize( physicalDeviceCount );
- result = static_cast<Result>( d.vkEnumeratePhysicalDevices(
- m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) );
+ result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
+ VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
+ if ( physicalDeviceCount < physicalDevices.size() )
{
- VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
- if ( physicalDeviceCount < physicalDevices.size() )
- {
- physicalDevices.resize( physicalDeviceCount );
- }
+ physicalDevices.resize( physicalDeviceCount );
}
- return createResultValue(
- result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDevices );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) );
@@ -181,63 +164,60 @@ namespace VULKAN_HPP_NAMESPACE
PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features;
d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) );
+
return features;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceFormatProperties(
- m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) );
+ d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties
- PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::FormatProperties formatProperties;
- d.vkGetPhysicalDeviceFormatProperties(
- m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) );
+ d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) );
+
return formatProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
- VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties(
- m_physicalDevice,
- static_cast<VkFormat>( format ),
- static_cast<VkImageType>( type ),
- static_cast<VkImageTiling>( tiling ),
- static_cast<VkImageUsageFlags>( usage ),
- static_cast<VkImageCreateFlags>( flags ),
- reinterpret_cast<VkImageFormatProperties *>( pImageFormatProperties ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice,
+ static_cast<VkFormat>( format ),
+ static_cast<VkImageType>( type ),
+ static_cast<VkImageTiling>( tiling ),
+ static_cast<VkImageUsageFlags>( usage ),
+ static_cast<VkImageCreateFlags>( flags ),
+ reinterpret_cast<VkImageFormatProperties *>( pImageFormatProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type
- PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type
+ PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
VULKAN_HPP_NAMESPACE::ImageType type,
VULKAN_HPP_NAMESPACE::ImageTiling tiling,
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
@@ -245,23 +225,24 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
- Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties(
- m_physicalDevice,
- static_cast<VkFormat>( format ),
- static_cast<VkImageType>( type ),
- static_cast<VkImageTiling>( tiling ),
- static_cast<VkImageUsageFlags>( usage ),
- static_cast<VkImageCreateFlags>( flags ),
- reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) ) );
- return createResultValue(
- result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" );
+ VkResult result = d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice,
+ static_cast<VkFormat>( format ),
+ static_cast<VkImageType>( type ),
+ static_cast<VkImageTiling>( tiling ),
+ static_cast<VkImageUsageFlags>( usage ),
+ static_cast<VkImageCreateFlags>( flags ),
+ reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) );
@@ -273,91 +254,95 @@ namespace VULKAN_HPP_NAMESPACE
PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties;
d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) );
+
return properties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount,
- VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount,
+ VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice,
- pQueueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) );
+ d.vkGetPhysicalDeviceQueueFamilyProperties(
+ m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename QueueFamilyPropertiesAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator>
PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties;
- uint32_t queueFamilyPropertyCount;
+
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties;
+ uint32_t queueFamilyPropertyCount;
d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
queueFamilyProperties.resize( queueFamilyPropertyCount );
d.vkGetPhysicalDeviceQueueFamilyProperties(
- m_physicalDevice,
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
+ m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
+
VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+ {
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ }
return queueFamilyProperties;
}
template <typename QueueFamilyPropertiesAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator>
- PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator,
- Dispatch const & d ) const
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator>
+ PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties(
- queueFamilyPropertiesAllocator );
- uint32_t queueFamilyPropertyCount;
+
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties( queueFamilyPropertiesAllocator );
+ uint32_t queueFamilyPropertyCount;
d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
queueFamilyProperties.resize( queueFamilyPropertyCount );
d.vkGetPhysicalDeviceQueueFamilyProperties(
- m_physicalDevice,
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
+ m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
+
VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+ {
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ }
return queueFamilyProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice,
- reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) );
+ d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties
- PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties;
- d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice,
- reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) );
+ d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) );
+
return memoryProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return d.vkGetInstanceProcAddr( m_instance, pName );
@@ -365,17 +350,18 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetInstanceProcAddr( m_instance, name.c_str() );
+
+ PFN_vkVoidFunction result = d.vkGetInstanceProcAddr( m_instance, name.c_str() );
+
+ return result;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return d.vkGetDeviceProcAddr( m_device, pName );
@@ -383,20 +369,21 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetDeviceProcAddr( m_device, name.c_str() );
+
+ PFN_vkVoidFunction result = d.vkGetDeviceProcAddr( m_device, name.c_str() );
+
+ return result;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Device * pDevice,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Device * pDevice,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateDevice( m_physicalDevice,
@@ -407,48 +394,47 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type
- PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type PhysicalDevice::createDevice(
+ const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Device device;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateDevice( m_physicalDevice,
reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDevice *>( &device ) ) );
- return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDevice *>( &device ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), device );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type
- PhysicalDevice::createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type
+ PhysicalDevice::createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Device device;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateDevice( m_physicalDevice,
reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDevice *>( &device ) ) );
- ObjectDestroy<NoParent, Dispatch> deleter( allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Device, Dispatch>(
- result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDevice *>( &device ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>( device, ObjectDestroy<NoParent, Dispatch>( allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
@@ -456,353 +442,302 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyDevice( m_device,
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- enumerateInstanceExtensionProperties( const char * pLayerName,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,
- Dispatch const & d ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char * pLayerName,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,
+ Dispatch const & d ) VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkEnumerateInstanceExtensionProperties(
- pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
+ return static_cast<Result>(
+ d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename ExtensionPropertiesAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type
- enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d )
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type
+ enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d )
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties;
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties;
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result = static_cast<Result>(
- d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>(
- d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr,
- &propertyCount,
- reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
+ result = d.vkEnumerateInstanceExtensionProperties(
+ layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
template <typename ExtensionPropertiesAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type
- enumerateInstanceExtensionProperties( Optional<const std::string> layerName,
- ExtensionPropertiesAllocator & extensionPropertiesAllocator,
- Dispatch const & d )
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, ExtensionProperties>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type
+ enumerateInstanceExtensionProperties( Optional<const std::string> layerName,
+ ExtensionPropertiesAllocator & extensionPropertiesAllocator,
+ Dispatch const & d )
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result = static_cast<Result>(
- d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>(
- d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr,
- &propertyCount,
- reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
+ result = d.vkEnumerateInstanceExtensionProperties(
+ layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkEnumerateDeviceExtensionProperties(
- m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
+ return static_cast<Result>(
+ d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename ExtensionPropertiesAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type
- PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type
+ PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties;
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties;
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties(
- m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>(
- d.vkEnumerateDeviceExtensionProperties( m_physicalDevice,
- layerName ? layerName->c_str() : nullptr,
- &propertyCount,
- reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
+ result = d.vkEnumerateDeviceExtensionProperties(
+ m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
template <typename ExtensionPropertiesAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type
- PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, ExtensionProperties>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type
+ PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName,
ExtensionPropertiesAllocator & extensionPropertiesAllocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties(
- m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>(
- d.vkEnumerateDeviceExtensionProperties( m_physicalDevice,
- layerName ? layerName->c_str() : nullptr,
- &propertyCount,
- reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
+ result = d.vkEnumerateDeviceExtensionProperties(
+ m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- enumerateInstanceLayerProperties( uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
- Dispatch const & d ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
+ Dispatch const & d ) VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
+ return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename LayerPropertiesAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type
- enumerateInstanceLayerProperties( Dispatch const & d )
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type
+ enumerateInstanceLayerProperties( Dispatch const & d )
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<LayerProperties, LayerPropertiesAllocator> properties;
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties;
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties(
- &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
+ result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
template <typename LayerPropertiesAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type
- enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d )
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, LayerProperties>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type
+ enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d )
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties(
- &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
+ result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkEnumerateDeviceLayerProperties(
- m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
+ return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename LayerPropertiesAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type
- PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type
+ PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<LayerProperties, LayerPropertiesAllocator> properties;
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties;
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties(
- m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
+ result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
template <typename LayerPropertiesAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type
- PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator,
- Dispatch const & d ) const
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, LayerProperties>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type
+ PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties(
- m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
+ result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VULKAN_HPP_NAMESPACE::Queue * pQueue,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) );
@@ -811,11 +746,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue
- Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Queue queue;
d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) );
+
return queue;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -824,26 +761,23 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount,
const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits,
VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkQueueSubmit(
- m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) );
+ return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Queue::submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,
- VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit(
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkQueueSubmit( m_queue,
- submits.size(),
- reinterpret_cast<const VkSubmitInfo *>( submits.data() ),
- static_cast<VkFence>( fence ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" );
+
+ VkResult result = d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo *>( submits.data() ), static_cast<VkFence>( fence ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -856,12 +790,14 @@ namespace VULKAN_HPP_NAMESPACE
}
#else
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Queue::waitIdle( Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::waitIdle( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" );
+
+ VkResult result = d.vkQueueWaitIdle( m_queue );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -874,21 +810,22 @@ namespace VULKAN_HPP_NAMESPACE
}
#else
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::waitIdle( Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::waitIdle( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" );
+
+ VkResult result = d.vkDeviceWaitIdle( m_device );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkAllocateMemory( m_device,
@@ -899,42 +836,43 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type
- Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type
+ Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DeviceMemory memory;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkAllocateMemory( m_device,
reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDeviceMemory *>( &memory ) ) );
- return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDeviceMemory *>( &memory ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memory );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type
- Device::allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type
+ Device::allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DeviceMemory memory;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkAllocateMemory( m_device,
reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDeviceMemory *>( &memory ) ) );
- ObjectFree<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>(
- result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDeviceMemory *>( &memory ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>( memory, ObjectFree<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -942,48 +880,46 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkFreeMemory(
- m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkFreeMemory( m_device,
static_cast<VkDeviceMemory>( memory ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkFreeMemory(
- m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkFreeMemory( m_device,
static_cast<VkDeviceMemory>( memory ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -993,7 +929,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize size,
VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,
void ** ppData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkMapMemory( m_device,
@@ -1006,78 +942,80 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void *>::type
- Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::DeviceSize size,
- VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::DeviceSize size,
+ VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- void * pData;
- Result result = static_cast<Result>( d.vkMapMemory( m_device,
- static_cast<VkDeviceMemory>( memory ),
- static_cast<VkDeviceSize>( offset ),
- static_cast<VkDeviceSize>( size ),
- static_cast<VkMemoryMapFlags>( flags ),
- &pData ) );
- return createResultValue( result, pData, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" );
+
+ void * pData;
+ VkResult result = d.vkMapMemory( m_device,
+ static_cast<VkDeviceMemory>( memory ),
+ static_cast<VkDeviceSize>( offset ),
+ static_cast<VkDeviceSize>( size ),
+ static_cast<VkMemoryMapFlags>( flags ),
+ &pData );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pData );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::flushMappedMemoryRanges( uint32_t memoryRangeCount,
- const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount,
+ const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkFlushMappedMemoryRanges(
- m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
+ return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,
- Dispatch const & d ) const
+ Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkFlushMappedMemoryRanges(
- m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" );
+
+ VkResult result = d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount,
- const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount,
+ const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkInvalidateMappedMemoryRanges(
- m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
+ return static_cast<Result>(
+ d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::invalidateMappedMemoryRanges(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const
+ Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkInvalidateMappedMemoryRanges(
- m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" );
+
+ VkResult result = d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -1087,20 +1025,19 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceMemoryCommitment(
- m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) );
+ d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize
- Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes;
- d.vkGetDeviceMemoryCommitment(
- m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) );
+ d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) );
+
return committedMemoryInBytes;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -1109,29 +1046,25 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkBindBufferMemory( m_device,
- static_cast<VkBuffer>( buffer ),
- static_cast<VkDeviceMemory>( memory ),
- static_cast<VkDeviceSize>( memoryOffset ) ) );
+ return static_cast<Result>(
+ d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
}
#else
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory(
+ VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkBindBufferMemory( m_device,
- static_cast<VkBuffer>( buffer ),
- static_cast<VkDeviceMemory>( memory ),
- static_cast<VkDeviceSize>( memoryOffset ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" );
+
+ VkResult result =
+ d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -1139,174 +1072,166 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image,
VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkBindImageMemory( m_device,
- static_cast<VkImage>( image ),
- static_cast<VkDeviceMemory>( memory ),
- static_cast<VkDeviceSize>( memoryOffset ) ) );
+ return static_cast<Result>(
+ d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
}
#else
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory(
+ VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkBindImageMemory( m_device,
- static_cast<VkImage>( image ),
- static_cast<VkDeviceMemory>( memory ),
- static_cast<VkDeviceSize>( memoryOffset ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" );
+
+ VkResult result =
+ d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetBufferMemoryRequirements(
- m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
+ d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements
- Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
- d.vkGetBufferMemoryRequirements(
- m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
+ d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
+
return memoryRequirements;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetImageMemoryRequirements(
- m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
+ d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements
- Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
- d.vkGetImageMemoryRequirements(
- m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
+ d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
+
return memoryRequirements;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements(
- VULKAN_HPP_NAMESPACE::Image image,
- uint32_t * pSparseMemoryRequirementCount,
- VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
+ uint32_t * pSparseMemoryRequirementCount,
+ VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetImageSparseMemoryRequirements(
- m_device,
- static_cast<VkImage>( image ),
- pSparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) );
+ d.vkGetImageSparseMemoryRequirements( m_device,
+ static_cast<VkImage>( image ),
+ pSparseMemoryRequirementCount,
+ reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
- Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
+ Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements;
- uint32_t sparseMemoryRequirementCount;
- d.vkGetImageSparseMemoryRequirements(
- m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
+
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements;
+ uint32_t sparseMemoryRequirementCount;
+ d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
- d.vkGetImageSparseMemoryRequirements(
- m_device,
- static_cast<VkImage>( image ),
- &sparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
+ d.vkGetImageSparseMemoryRequirements( m_device,
+ static_cast<VkImage>( image ),
+ &sparseMemoryRequirementCount,
+ reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
+
VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+ if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+ {
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ }
return sparseMemoryRequirements;
}
- template <
- typename SparseImageMemoryRequirementsAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements>::value, int>::type>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
- Device::getImageSparseMemoryRequirements(
- VULKAN_HPP_NAMESPACE::Image image,
- SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator,
- Dispatch const & d ) const
+ template <typename SparseImageMemoryRequirementsAllocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
+ Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
+ SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements(
+
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements(
sparseImageMemoryRequirementsAllocator );
uint32_t sparseMemoryRequirementCount;
- d.vkGetImageSparseMemoryRequirements(
- m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
+ d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
- d.vkGetImageSparseMemoryRequirements(
- m_device,
- static_cast<VkImage>( image ),
- &sparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
+ d.vkGetImageSparseMemoryRequirements( m_device,
+ static_cast<VkImage>( image ),
+ &sparseMemoryRequirementCount,
+ reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
+
VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+ if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+ {
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ }
return sparseMemoryRequirements;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceSparseImageFormatProperties(
- m_physicalDevice,
- static_cast<VkFormat>( format ),
- static_cast<VkImageType>( type ),
- static_cast<VkSampleCountFlagBits>( samples ),
- static_cast<VkImageUsageFlags>( usage ),
- static_cast<VkImageTiling>( tiling ),
- pPropertyCount,
- reinterpret_cast<VkSparseImageFormatProperties *>( pProperties ) );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
+ static_cast<VkFormat>( format ),
+ static_cast<VkImageType>( type ),
+ static_cast<VkSampleCountFlagBits>( samples ),
+ static_cast<VkImageUsageFlags>( usage ),
+ static_cast<VkImageTiling>( tiling ),
+ pPropertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties *>( pProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename SparseImageFormatPropertiesAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
- PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
+ PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
VULKAN_HPP_NAMESPACE::ImageType type,
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
@@ -1314,8 +1239,9 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties;
- uint32_t propertyCount;
+
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties;
+ uint32_t propertyCount;
d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
static_cast<VkFormat>( format ),
static_cast<VkImageType>( type ),
@@ -1325,38 +1251,40 @@ namespace VULKAN_HPP_NAMESPACE
&propertyCount,
nullptr );
properties.resize( propertyCount );
- d.vkGetPhysicalDeviceSparseImageFormatProperties(
- m_physicalDevice,
- static_cast<VkFormat>( format ),
- static_cast<VkImageType>( type ),
- static_cast<VkSampleCountFlagBits>( samples ),
- static_cast<VkImageUsageFlags>( usage ),
- static_cast<VkImageTiling>( tiling ),
- &propertyCount,
- reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
+ static_cast<VkFormat>( format ),
+ static_cast<VkImageType>( type ),
+ static_cast<VkSampleCountFlagBits>( samples ),
+ static_cast<VkImageUsageFlags>( usage ),
+ static_cast<VkImageTiling>( tiling ),
+ &propertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
+
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
+ {
+ properties.resize( propertyCount );
+ }
return properties;
}
- template <
- typename SparseImageFormatPropertiesAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
- PhysicalDevice::getSparseImageFormatProperties(
- VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties(
- sparseImageFormatPropertiesAllocator );
- uint32_t propertyCount;
+ template <typename SparseImageFormatPropertiesAllocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
+ PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties( sparseImageFormatPropertiesAllocator );
+ uint32_t propertyCount;
d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
static_cast<VkFormat>( format ),
static_cast<VkImageType>( type ),
@@ -1366,57 +1294,55 @@ namespace VULKAN_HPP_NAMESPACE
&propertyCount,
nullptr );
properties.resize( propertyCount );
- d.vkGetPhysicalDeviceSparseImageFormatProperties(
- m_physicalDevice,
- static_cast<VkFormat>( format ),
- static_cast<VkImageType>( type ),
- static_cast<VkSampleCountFlagBits>( samples ),
- static_cast<VkImageUsageFlags>( usage ),
- static_cast<VkImageTiling>( tiling ),
- &propertyCount,
- reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
+ static_cast<VkFormat>( format ),
+ static_cast<VkImageType>( type ),
+ static_cast<VkSampleCountFlagBits>( samples ),
+ static_cast<VkImageUsageFlags>( usage ),
+ static_cast<VkImageTiling>( tiling ),
+ &propertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
+
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
+ {
+ properties.resize( propertyCount );
+ }
return properties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Queue::bindSparse( uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo,
- VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo,
+ VULKAN_HPP_NAMESPACE::Fence fence,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkQueueBindSparse( m_queue,
- bindInfoCount,
- reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ),
- static_cast<VkFence>( fence ) ) );
+ return static_cast<Result>(
+ d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), static_cast<VkFence>( fence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Queue::bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo,
- VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::bindSparse(
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result =
- static_cast<Result>( d.vkQueueBindSparse( m_queue,
- bindInfo.size(),
- reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ),
- static_cast<VkFence>( fence ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" );
+
+ VkResult result =
+ d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ), static_cast<VkFence>( fence ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Fence * pFence,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Fence * pFence,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateFence( m_device,
@@ -1427,41 +1353,39 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
- Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::createFence(
+ const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Fence fence;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateFence( m_device,
reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkFence *>( &fence ) ) );
- return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkFence *>( &fence ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
- Device::createFenceUnique( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::createFenceUnique(
+ const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Fence fence;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateFence( m_device,
reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkFence *>( &fence ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>(
- result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkFence *>( &fence ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -1469,55 +1393,53 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyFence(
- m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyFence( m_device,
static_cast<VkFence>( fence ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyFence(
- m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyFence( m_device,
static_cast<VkFence>( fence ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount,
const VULKAN_HPP_NAMESPACE::Fence * pFences,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) );
@@ -1526,33 +1448,36 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d ) const
+ Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" );
+
+ VkResult result = d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
}
#else
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+
+ VkResult result = d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -1561,39 +1486,38 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::Fence * pFences,
VULKAN_HPP_NAMESPACE::Bool32 waitAll,
uint64_t timeout,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkWaitForFences(
- m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) );
+ return static_cast<Result>(
+ d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
- VULKAN_HPP_NAMESPACE::Bool32 waitAll,
- uint64_t timeout,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
+ Device::waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
+ VULKAN_HPP_NAMESPACE::Bool32 waitAll,
+ uint64_t timeout,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkWaitForFences( m_device,
- fences.size(),
- reinterpret_cast<const VkFence *>( fences.data() ),
- static_cast<VkBool32>( waitAll ),
- timeout ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
+
+ VkResult result =
+ d.vkWaitForFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateSemaphore( m_device,
@@ -1604,42 +1528,44 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type
- Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type
+ Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Semaphore semaphore;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateSemaphore( m_device,
reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSemaphore *>( &semaphore ) ) );
- return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSemaphore *>( &semaphore ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), semaphore );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type
- Device::createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type
+ Device::createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Semaphore semaphore;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateSemaphore( m_device,
reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSemaphore *>( &semaphore ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>(
- result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSemaphore *>( &semaphore ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>( semaphore, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -1647,57 +1573,54 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySemaphore(
- m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroySemaphore( m_device,
static_cast<VkSemaphore>( semaphore ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySemaphore(
- m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroySemaphore( m_device,
static_cast<VkSemaphore>( semaphore ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Event * pEvent,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Event * pEvent,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateEvent( m_device,
@@ -1708,41 +1631,39 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type
- Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type Device::createEvent(
+ const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Event event;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateEvent( m_device,
reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkEvent *>( &event ) ) );
- return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkEvent *>( &event ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), event );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type
- Device::createEventUnique( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type Device::createEventUnique(
+ const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Event event;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateEvent( m_device,
reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkEvent *>( &event ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Event, Dispatch>(
- result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkEvent *>( &event ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>( event, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -1750,116 +1671,117 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyEvent(
- m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyEvent( m_device,
static_cast<VkEvent>( event ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyEvent(
- m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyEvent( m_device,
static_cast<VkEvent>( event ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
}
#else
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus",
- { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } );
+
+ VkResult result = d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus",
+ { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
}
#else
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" );
+
+ VkResult result = d.vkSetEvent( m_device, static_cast<VkEvent>( event ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
}
#else
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" );
+
+ VkResult result = d.vkResetEvent( m_device, static_cast<VkEvent>( event ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateQueryPool( m_device,
@@ -1870,42 +1792,44 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type
- Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type
+ Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::QueryPool queryPool;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateQueryPool( m_device,
reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkQueryPool *>( &queryPool ) ) );
- return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkQueryPool *>( &queryPool ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), queryPool );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type
- Device::createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type
+ Device::createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::QueryPool queryPool;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateQueryPool( m_device,
reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkQueryPool *>( &queryPool ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>(
- result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkQueryPool *>( &queryPool ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>( queryPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -1913,61 +1837,58 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyQueryPool(
- m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyQueryPool( m_device,
static_cast<VkQueryPool>( queryPool ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyQueryPool(
- m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyQueryPool( m_device,
static_cast<VkQueryPool>( queryPool ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- void * pData,
- VULKAN_HPP_NAMESPACE::DeviceSize stride,
- VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ size_t dataSize,
+ void * pData,
+ VULKAN_HPP_NAMESPACE::DeviceSize stride,
+ VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetQueryPoolResults( m_device,
@@ -1981,33 +1902,8 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch>
- VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- ArrayProxy<T> const & data,
- VULKAN_HPP_NAMESPACE::DeviceSize stride,
- VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device,
- static_cast<VkQueryPool>( queryPool ),
- firstQuery,
- queryCount,
- data.size() * sizeof( T ),
- reinterpret_cast<void *>( data.data() ),
- static_cast<VkDeviceSize>( stride ),
- static_cast<VkQueryResultFlags>( flags ) ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
- }
-
- template <typename DataType, typename Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<DataType, Allocator>>
+ template <typename DataType, typename DataTypeAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<DataType, DataTypeAllocator>>
Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t firstQuery,
uint32_t queryCount,
@@ -2017,54 +1913,56 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
- std::vector<DataType, Allocator> data( dataSize / sizeof( DataType ) );
- Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device,
- static_cast<VkQueryPool>( queryPool ),
- firstQuery,
- queryCount,
- data.size() * sizeof( DataType ),
- reinterpret_cast<void *>( data.data() ),
- static_cast<VkDeviceSize>( stride ),
- static_cast<VkQueryResultFlags>( flags ) ) );
- return createResultValue( result,
- data,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+ std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
+ VkResult result = d.vkGetQueryPoolResults( m_device,
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery,
+ queryCount,
+ data.size() * sizeof( DataType ),
+ reinterpret_cast<void *>( data.data() ),
+ static_cast<VkDeviceSize>( stride ),
+ static_cast<VkQueryResultFlags>( flags ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+
+ return ResultValue<std::vector<DataType, DataTypeAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
template <typename DataType, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<DataType>
- Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VULKAN_HPP_NAMESPACE::DeviceSize stride,
- VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<DataType> Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ VULKAN_HPP_NAMESPACE::DeviceSize stride,
+ VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
DataType data;
- Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device,
- static_cast<VkQueryPool>( queryPool ),
- firstQuery,
- queryCount,
- sizeof( DataType ),
- reinterpret_cast<void *>( &data ),
- static_cast<VkDeviceSize>( stride ),
- static_cast<VkQueryResultFlags>( flags ) ) );
- return createResultValue( result,
- data,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+ VkResult result = d.vkGetQueryPoolResults( m_device,
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery,
+ queryCount,
+ sizeof( DataType ),
+ reinterpret_cast<void *>( &data ),
+ static_cast<VkDeviceSize>( stride ),
+ static_cast<VkQueryResultFlags>( flags ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+
+ return ResultValue<DataType>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Buffer * pBuffer,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Buffer * pBuffer,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateBuffer( m_device,
@@ -2075,41 +1973,39 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type
- Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type Device::createBuffer(
+ const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Buffer buffer;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateBuffer( m_device,
reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkBuffer *>( &buffer ) ) );
- return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkBuffer *>( &buffer ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), buffer );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type
- Device::createBufferUnique( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type Device::createBufferUnique(
+ const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Buffer buffer;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateBuffer( m_device,
reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkBuffer *>( &buffer ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>(
- result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkBuffer *>( &buffer ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>( buffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -2117,57 +2013,54 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyBuffer(
- m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyBuffer( m_device,
static_cast<VkBuffer>( buffer ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyBuffer(
- m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyBuffer( m_device,
static_cast<VkBuffer>( buffer ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::BufferView * pView,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::BufferView * pView,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateBufferView( m_device,
@@ -2178,42 +2071,43 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type
- Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type
+ Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::BufferView view;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateBufferView( m_device,
reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkBufferView *>( &view ) ) );
- return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkBufferView *>( &view ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), view );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type
- Device::createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type
+ Device::createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::BufferView view;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateBufferView( m_device,
reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkBufferView *>( &view ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>(
- result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkBufferView *>( &view ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>( view, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -2221,59 +2115,54 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyBufferView( m_device,
- static_cast<VkBufferView>( bufferView ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView,
+ VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyBufferView( m_device,
static_cast<VkBufferView>( bufferView ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyBufferView( m_device,
- static_cast<VkBufferView>( bufferView ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyBufferView( m_device,
static_cast<VkBufferView>( bufferView ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Image * pImage,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Image * pImage,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateImage( m_device,
@@ -2284,41 +2173,39 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type
- Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type Device::createImage(
+ const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Image image;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateImage( m_device,
reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkImage *>( &image ) ) );
- return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkImage *>( &image ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), image );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type
- Device::createImageUnique( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type Device::createImageUnique(
+ const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Image image;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateImage( m_device,
reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkImage *>( &image ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Image, Dispatch>(
- result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkImage *>( &image ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>( image, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -2326,48 +2213,46 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyImage(
- m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyImage( m_device,
static_cast<VkImage>( image ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyImage(
- m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyImage( m_device,
static_cast<VkImage>( image ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -2375,7 +2260,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image,
const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource,
VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetImageSubresourceLayout( m_device,
@@ -2386,27 +2271,26 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout
- Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image,
- const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout(
+ VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SubresourceLayout layout;
d.vkGetImageSubresourceLayout( m_device,
static_cast<VkImage>( image ),
reinterpret_cast<const VkImageSubresource *>( &subresource ),
reinterpret_cast<VkSubresourceLayout *>( &layout ) );
+
return layout;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::ImageView * pView,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::ImageView * pView,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateImageView( m_device,
@@ -2417,42 +2301,43 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type
- Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type
+ Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::ImageView view;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateImageView( m_device,
reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkImageView *>( &view ) ) );
- return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkImageView *>( &view ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), view );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type
- Device::createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type
+ Device::createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::ImageView view;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateImageView( m_device,
reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkImageView *>( &view ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>(
- result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkImageView *>( &view ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>( view, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -2460,104 +2345,102 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyImageView(
- m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyImageView( m_device,
static_cast<VkImageView>( imageView ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyImageView(
- m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyImageView( m_device,
static_cast<VkImageView>( imageView ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateShaderModule( m_device,
- reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkShaderModule *>( pShaderModule ) ) );
+ return static_cast<Result>( d.vkCreateShaderModule( m_device,
+ reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkShaderModule *>( pShaderModule ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type
- Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type
+ Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateShaderModule( m_device,
reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkShaderModule *>( &shaderModule ) ) );
- return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkShaderModule *>( &shaderModule ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), shaderModule );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type
- Device::createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type
+ Device::createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateShaderModule( m_device,
reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkShaderModule *>( &shaderModule ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>(
- result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkShaderModule *>( &shaderModule ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>( shaderModule, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -2565,107 +2448,102 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyShaderModule( m_device,
- static_cast<VkShaderModule>( shaderModule ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyShaderModule( m_device,
static_cast<VkShaderModule>( shaderModule ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyShaderModule( m_device,
- static_cast<VkShaderModule>( shaderModule ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyShaderModule( m_device,
static_cast<VkShaderModule>( shaderModule ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreatePipelineCache( m_device,
- reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) );
+ return static_cast<Result>( d.vkCreatePipelineCache( m_device,
+ reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type
- Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type
+ Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreatePipelineCache( m_device,
reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) );
- return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipelineCache *>( &pipelineCache ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineCache );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type
- Device::createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type
+ Device::createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreatePipelineCache( m_device,
reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>(
- result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipelineCache *>( &pipelineCache ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>( pipelineCache, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -2673,572 +2551,520 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPipelineCache( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyPipelineCache( m_device,
static_cast<VkPipelineCache>( pipelineCache ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPipelineCache( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyPipelineCache( m_device,
static_cast<VkPipelineCache>( pipelineCache ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- size_t * pDataSize,
- void * pData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ size_t * pDataSize,
+ void * pData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
+ return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Uint8_tAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
- Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const
+ Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
std::vector<uint8_t, Uint8_tAllocator> data;
size_t dataSize;
- Result result;
+ VkResult result;
do
{
- result = static_cast<Result>(
- d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
- if ( ( result == Result::eSuccess ) && dataSize )
+ result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr );
+ if ( ( result == VK_SUCCESS ) && dataSize )
{
data.resize( dataSize );
- result = static_cast<Result>( d.vkGetPipelineCacheData( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- &dataSize,
- reinterpret_cast<void *>( data.data() ) ) );
+ result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
+ VULKAN_HPP_ASSERT( dataSize <= data.size() );
+ if ( dataSize < data.size() )
{
- VULKAN_HPP_ASSERT( dataSize <= data.size() );
- if ( dataSize < data.size() )
- {
- data.resize( dataSize );
- }
+ data.resize( dataSize );
}
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
template <typename Uint8_tAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type>
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
- Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- Uint8_tAllocator & uint8_tAllocator,
- Dispatch const & d ) const
+ Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
size_t dataSize;
- Result result;
+ VkResult result;
do
{
- result = static_cast<Result>(
- d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
- if ( ( result == Result::eSuccess ) && dataSize )
+ result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr );
+ if ( ( result == VK_SUCCESS ) && dataSize )
{
data.resize( dataSize );
- result = static_cast<Result>( d.vkGetPipelineCacheData( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- &dataSize,
- reinterpret_cast<void *>( data.data() ) ) );
+ result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
+ VULKAN_HPP_ASSERT( dataSize <= data.size() );
+ if ( dataSize < data.size() )
{
- VULKAN_HPP_ASSERT( dataSize <= data.size() );
- if ( dataSize < data.size() )
- {
- data.resize( dataSize );
- }
+ data.resize( dataSize );
}
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
- uint32_t srcCacheCount,
- const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
+ uint32_t srcCacheCount,
+ const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkMergePipelineCaches( m_device,
- static_cast<VkPipelineCache>( dstCache ),
- srcCacheCount,
- reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) );
+ return static_cast<Result>(
+ d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches,
- Dispatch const & d ) const
+ Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result =
- static_cast<Result>( d.vkMergePipelineCaches( m_device,
- static_cast<VkPipelineCache>( dstCache ),
- srcCaches.size(),
- reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" );
+
+ VkResult result = d.vkMergePipelineCaches(
+ m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size(), reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ uint32_t createInfoCount,
+ const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateGraphicsPipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfoCount,
- reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkPipeline *>( pPipelines ) ) );
+ return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfoCount,
+ reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkPipeline *>( pPipelines ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename PipelineAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
- Device::createGraphicsPipelines(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateGraphicsPipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- return createResultValue(
- result,
- pipelines,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ VkResult result = d.vkCreateGraphicsPipelines(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+
+ return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
}
template <typename PipelineAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type>
+ typename B0,
+ typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
- Device::createGraphicsPipelines(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d ) const
+ Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
- Result result = static_cast<Result>(
- d.vkCreateGraphicsPipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- return createResultValue(
- result,
- pipelines,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ VkResult result = d.vkCreateGraphicsPipelines(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+
+ return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
}
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
- Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Pipeline pipeline;
- Result result = static_cast<Result>(
- d.vkCreateGraphicsPipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- 1,
- reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( &pipeline ) ) );
- return createResultValue(
- result,
- pipeline,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ VkResult result = d.vkCreateGraphicsPipelines(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ 1,
+ reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( &pipeline ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+
+ return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch, typename PipelineAllocator>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- Device::createGraphicsPipelinesUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
- std::vector<Pipeline> pipelines( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateGraphicsPipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
+ Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+ VkResult result = d.vkCreateGraphicsPipelines(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
+ uniquePipelines.reserve( createInfos.size() );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ for ( auto const & pipeline : pipelines )
{
- uniquePipelines.reserve( createInfos.size() );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- for ( size_t i = 0; i < createInfos.size(); i++ )
- {
- uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
- }
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
}
- return createResultValue(
- result,
- std::move( uniquePipelines ),
- VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
- }
-
- template <
- typename Dispatch,
- typename PipelineAllocator,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- Device::createGraphicsPipelinesUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
- std::vector<Pipeline> pipelines( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateGraphicsPipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
+ }
+
+ template <typename Dispatch,
+ typename PipelineAllocator,
+ typename B0,
+ typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
+ Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+ VkResult result = d.vkCreateGraphicsPipelines(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
+ uniquePipelines.reserve( createInfos.size() );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ for ( auto const & pipeline : pipelines )
{
- uniquePipelines.reserve( createInfos.size() );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- for ( size_t i = 0; i < createInfos.size(); i++ )
- {
- uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
- }
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
}
- return createResultValue(
- result,
- std::move( uniquePipelines ),
- VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>>
- Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>
+ Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Pipeline pipeline;
- Result result = static_cast<Result>(
- d.vkCreateGraphicsPipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- 1,
- reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( &pipeline ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<Pipeline, Dispatch>(
- result,
- pipeline,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT },
- deleter );
+
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+ VkResult result = d.vkCreateGraphicsPipelines(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ 1,
+ reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( &pipeline ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+
+ return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ uint32_t createInfoCount,
+ const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateComputePipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfoCount,
- reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkPipeline *>( pPipelines ) ) );
+ return static_cast<Result>( d.vkCreateComputePipelines( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfoCount,
+ reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkPipeline *>( pPipelines ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename PipelineAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
- Device::createComputePipelines(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateComputePipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- return createResultValue(
- result,
- pipelines,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ VkResult result = d.vkCreateComputePipelines(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+
+ return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
}
template <typename PipelineAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type>
+ typename B0,
+ typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
- Device::createComputePipelines(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d ) const
+ Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
- Result result = static_cast<Result>(
- d.vkCreateComputePipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- return createResultValue(
- result,
- pipelines,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ VkResult result = d.vkCreateComputePipelines(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+
+ return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
}
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
- Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Pipeline pipeline;
- Result result = static_cast<Result>(
- d.vkCreateComputePipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- 1,
- reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( &pipeline ) ) );
- return createResultValue(
- result,
- pipeline,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ VkResult result = d.vkCreateComputePipelines(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ 1,
+ reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( &pipeline ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+
+ return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch, typename PipelineAllocator>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- Device::createComputePipelinesUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
- std::vector<Pipeline> pipelines( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateComputePipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
+ Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+ VkResult result = d.vkCreateComputePipelines(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
+ uniquePipelines.reserve( createInfos.size() );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ for ( auto const & pipeline : pipelines )
{
- uniquePipelines.reserve( createInfos.size() );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- for ( size_t i = 0; i < createInfos.size(); i++ )
- {
- uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
- }
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
}
- return createResultValue(
- result,
- std::move( uniquePipelines ),
- VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
- }
-
- template <
- typename Dispatch,
- typename PipelineAllocator,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- Device::createComputePipelinesUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
- std::vector<Pipeline> pipelines( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateComputePipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
+ }
+
+ template <typename Dispatch,
+ typename PipelineAllocator,
+ typename B0,
+ typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
+ Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+ VkResult result = d.vkCreateComputePipelines(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
+ uniquePipelines.reserve( createInfos.size() );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ for ( auto const & pipeline : pipelines )
{
- uniquePipelines.reserve( createInfos.size() );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- for ( size_t i = 0; i < createInfos.size(); i++ )
- {
- uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
- }
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
}
- return createResultValue(
- result,
- std::move( uniquePipelines ),
- VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>>
- Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>
+ Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Pipeline pipeline;
- Result result = static_cast<Result>(
- d.vkCreateComputePipelines( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- 1,
- reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( &pipeline ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<Pipeline, Dispatch>(
- result,
- pipeline,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT },
- deleter );
+
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+ VkResult result = d.vkCreateComputePipelines(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ 1,
+ reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( &pipeline ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+
+ return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -3246,165 +3072,157 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPipeline(
- m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyPipeline( m_device,
static_cast<VkPipeline>( pipeline ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPipeline(
- m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyPipeline( m_device,
static_cast<VkPipeline>( pipeline ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreatePipelineLayout( m_device,
- reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) );
+ return static_cast<Result>( d.vkCreatePipelineLayout( m_device,
+ reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type
- Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type
+ Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreatePipelineLayout( m_device,
reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) );
- return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineLayout );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type
- Device::createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type
+ Device::createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreatePipelineLayout( m_device,
reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>(
- result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>( pipelineLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
+ VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPipelineLayout( m_device,
- static_cast<VkPipelineLayout>( pipelineLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyPipelineLayout( m_device,
static_cast<VkPipelineLayout>( pipelineLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPipelineLayout( m_device,
- static_cast<VkPipelineLayout>( pipelineLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyPipelineLayout( m_device,
static_cast<VkPipelineLayout>( pipelineLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Sampler * pSampler,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Sampler * pSampler,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateSampler( m_device,
@@ -3415,42 +3233,39 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type
- Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type Device::createSampler(
+ const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Sampler sampler;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateSampler( m_device,
reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSampler *>( &sampler ) ) );
- return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSampler *>( &sampler ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), sampler );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type
- Device::createSamplerUnique( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type Device::createSamplerUnique(
+ const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Sampler sampler;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateSampler( m_device,
reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSampler *>( &sampler ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>(
- result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSampler *>( &sampler ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>( sampler, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -3458,265 +3273,256 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySampler(
- m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroySampler( m_device,
static_cast<VkSampler>( sampler ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySampler(
- m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroySampler( m_device,
static_cast<VkSampler>( sampler ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateDescriptorSetLayout( m_device,
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) );
+ return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device,
+ reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type
- Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type
+ Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
- Result result = static_cast<Result>(
- d.vkCreateDescriptorSetLayout( m_device,
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) );
- return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" );
+ VkResult result = d.vkCreateDescriptorSetLayout(
+ m_device,
+ reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), setLayout );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type
- Device::createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type
+ Device::createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
- Result result = static_cast<Result>(
- d.vkCreateDescriptorSetLayout( m_device,
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>(
- result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique", deleter );
+ VkResult result = d.vkCreateDescriptorSetLayout(
+ m_device,
+ reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>( setLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDescriptorSetLayout( m_device,
- static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDescriptorSetLayout(
+ m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDescriptorSetLayout( m_device,
- static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+
+ d.vkDestroyDescriptorSetLayout(
+ m_device,
+ static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDescriptorSetLayout( m_device,
- static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDescriptorSetLayout(
+ m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDescriptorSetLayout( m_device,
- static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+
+ d.vkDestroyDescriptorSetLayout(
+ m_device,
+ static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateDescriptorPool( m_device,
- reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) );
+ return static_cast<Result>( d.vkCreateDescriptorPool( m_device,
+ reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type
- Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type
+ Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateDescriptorPool( m_device,
reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) );
- return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorPool );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type
- Device::createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type
+ Device::createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateDescriptorPool( m_device,
reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>(
- result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>( descriptorPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDescriptorPool( m_device,
- static_cast<VkDescriptorPool>( descriptorPool ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyDescriptorPool( m_device,
static_cast<VkDescriptorPool>( descriptorPool ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDescriptorPool( m_device,
- static_cast<VkDescriptorPool>( descriptorPool ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyDescriptorPool( m_device,
static_cast<VkDescriptorPool>( descriptorPool ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -3724,133 +3530,114 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkResetDescriptorPool(
- m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
+ return static_cast<Result>(
+ d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
}
#else
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkResetDescriptorPool(
- m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetDescriptorPool" );
+
+ d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo,
- VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo,
+ VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkAllocateDescriptorSets( m_device,
- reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ),
- reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) );
+ return static_cast<Result>( d.vkAllocateDescriptorSets(
+ m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename DescriptorSetAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type
- Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type
+ Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets(
- allocateInfo.descriptorSetCount );
- Result result = static_cast<Result>(
- d.vkAllocateDescriptorSets( m_device,
- reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
- return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
+
+ std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount );
+ VkResult result = d.vkAllocateDescriptorSets(
+ m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorSets );
}
template <typename DescriptorSetAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, DescriptorSet>::value, int>::type>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type
- Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,
+ typename B0,
+ typename std::enable_if<std::is_same<typename B0::value_type, DescriptorSet>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type
+ Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,
DescriptorSetAllocator & descriptorSetAllocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets(
- allocateInfo.descriptorSetCount, descriptorSetAllocator );
- Result result = static_cast<Result>(
- d.vkAllocateDescriptorSets( m_device,
- reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
- return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
+
+ std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount, descriptorSetAllocator );
+ VkResult result = d.vkAllocateDescriptorSets(
+ m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorSets );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch, typename DescriptorSetAllocator>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
- Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
+ Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets;
- std::vector<DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
- Result result = static_cast<Result>(
- d.vkAllocateDescriptorSets( m_device,
- reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+
+ std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
+ VkResult result = d.vkAllocateDescriptorSets(
+ m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
+ std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets;
+ uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
+ PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
+ for ( auto const & descriptorSet : descriptorSets )
{
- uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
- PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
- for ( size_t i = 0; i < allocateInfo.descriptorSetCount; i++ )
- {
- uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSets[i], deleter ) );
- }
+ uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) );
}
- return createResultValue(
- result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueDescriptorSets ) );
}
template <typename Dispatch,
typename DescriptorSetAllocator,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value,
- int>::type>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
+ typename B0,
+ typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,
- DescriptorSetAllocator & descriptorSetAllocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets(
- descriptorSetAllocator );
- std::vector<DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
- Result result = static_cast<Result>(
- d.vkAllocateDescriptorSets( m_device,
- reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ DescriptorSetAllocator & descriptorSetAllocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
+ VkResult result = d.vkAllocateDescriptorSets(
+ m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
+ std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator );
+ uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
+ PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
+ for ( auto const & descriptorSet : descriptorSets )
{
- uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
- PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
- for ( size_t i = 0; i < allocateInfo.descriptorSetCount; i++ )
- {
- uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSets[i], deleter ) );
- }
+ uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) );
}
- return createResultValue(
- result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueDescriptorSets ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -3859,71 +3646,56 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
uint32_t descriptorSetCount,
const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkFreeDescriptorSets( m_device,
- static_cast<VkDescriptorPool>( descriptorPool ),
- descriptorSetCount,
- reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
+ return static_cast<Result>( d.vkFreeDescriptorSets(
+ m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkFreeDescriptorSets( m_device,
- static_cast<VkDescriptorPool>( descriptorPool ),
- descriptorSets.size(),
- reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::freeDescriptorSets" );
+
+ d.vkFreeDescriptorSets(
+ m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE Result Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- uint32_t descriptorSetCount,
- const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE Result( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ uint32_t descriptorSetCount,
+ const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkFreeDescriptorSets( m_device,
- static_cast<VkDescriptorPool>( descriptorPool ),
- descriptorSetCount,
- reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
+ return static_cast<Result>( d.vkFreeDescriptorSets(
+ m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkFreeDescriptorSets( m_device,
- static_cast<VkDescriptorPool>( descriptorPool ),
- descriptorSets.size(),
- reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::free" );
+
+ d.vkFreeDescriptorSets(
+ m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::updateDescriptorSets( uint32_t descriptorWriteCount,
- const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount,
+ const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
+ uint32_t descriptorCopyCount,
+ const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkUpdateDescriptorSets( m_device,
@@ -3936,11 +3708,12 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void
- Device::updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkUpdateDescriptorSets( m_device,
descriptorWrites.size(),
reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ),
@@ -3950,11 +3723,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateFramebuffer( m_device,
@@ -3965,42 +3737,44 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type
- Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type
+ Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateFramebuffer( m_device,
reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) );
- return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkFramebuffer *>( &framebuffer ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), framebuffer );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type
- Device::createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type
+ Device::createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateFramebuffer( m_device,
reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>(
- result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkFramebuffer *>( &framebuffer ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>( framebuffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -4008,60 +3782,54 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyFramebuffer( m_device,
- static_cast<VkFramebuffer>( framebuffer ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyFramebuffer( m_device,
static_cast<VkFramebuffer>( framebuffer ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyFramebuffer( m_device,
- static_cast<VkFramebuffer>( framebuffer ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyFramebuffer( m_device,
static_cast<VkFramebuffer>( framebuffer ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateRenderPass( m_device,
@@ -4072,42 +3840,44 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
- Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
+ Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::RenderPass renderPass;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateRenderPass( m_device,
reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
- return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkRenderPass *>( &renderPass ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
- Device::createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
+ Device::createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::RenderPass renderPass;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateRenderPass( m_device,
reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>(
- result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkRenderPass *>( &renderPass ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -4115,83 +3885,77 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyRenderPass( m_device,
- static_cast<VkRenderPass>( renderPass ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
+ VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyRenderPass( m_device,
static_cast<VkRenderPass>( renderPass ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyRenderPass( m_device,
- static_cast<VkRenderPass>( renderPass ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyRenderPass( m_device,
static_cast<VkRenderPass>( renderPass ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetRenderAreaGranularity(
- m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) );
+ d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D
- Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Extent2D granularity;
- d.vkGetRenderAreaGranularity(
- m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) );
+ d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) );
+
return granularity;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateCommandPool( m_device,
@@ -4202,42 +3966,44 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type
- Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type
+ Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::CommandPool commandPool;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateCommandPool( m_device,
reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkCommandPool *>( &commandPool ) ) );
- return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkCommandPool *>( &commandPool ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandPool );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type
- Device::createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type
+ Device::createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::CommandPool commandPool;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateCommandPool( m_device,
reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkCommandPool *>( &commandPool ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>(
- result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkCommandPool *>( &commandPool ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>( commandPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -4245,186 +4011,162 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyCommandPool( m_device,
- static_cast<VkCommandPool>( commandPool ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyCommandPool( m_device,
static_cast<VkCommandPool>( commandPool ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyCommandPool( m_device,
- static_cast<VkCommandPool>( commandPool ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyCommandPool( m_device,
static_cast<VkCommandPool>( commandPool ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkResetCommandPool(
- m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
+ return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
}
#else
template <typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,
- Dispatch const & d ) const
+ Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkResetCommandPool(
- m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" );
+
+ VkResult result = d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo,
- VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo,
+ VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkAllocateCommandBuffers( m_device,
- reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ),
- reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) );
+ return static_cast<Result>( d.vkAllocateCommandBuffers(
+ m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename CommandBufferAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type
- Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type
+ Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers(
- allocateInfo.commandBufferCount );
- Result result = static_cast<Result>(
- d.vkAllocateCommandBuffers( m_device,
- reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
- return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
+
+ std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount );
+ VkResult result = d.vkAllocateCommandBuffers(
+ m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandBuffers );
}
template <typename CommandBufferAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, CommandBuffer>::value, int>::type>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type
- Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,
+ typename B0,
+ typename std::enable_if<std::is_same<typename B0::value_type, CommandBuffer>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type
+ Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,
CommandBufferAllocator & commandBufferAllocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers(
- allocateInfo.commandBufferCount, commandBufferAllocator );
- Result result = static_cast<Result>(
- d.vkAllocateCommandBuffers( m_device,
- reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
- return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
+
+ std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount, commandBufferAllocator );
+ VkResult result = d.vkAllocateCommandBuffers(
+ m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandBuffers );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch, typename CommandBufferAllocator>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
- Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
+ Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers;
- std::vector<CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
- Result result = static_cast<Result>(
- d.vkAllocateCommandBuffers( m_device,
- reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+
+ std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
+ VkResult result = d.vkAllocateCommandBuffers(
+ m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
+ std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers;
+ uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
+ PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
+ for ( auto const & commandBuffer : commandBuffers )
{
- uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
- PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
- for ( size_t i = 0; i < allocateInfo.commandBufferCount; i++ )
- {
- uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffers[i], deleter ) );
- }
+ uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) );
}
- return createResultValue(
- result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueCommandBuffers ) );
}
template <typename Dispatch,
typename CommandBufferAllocator,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value,
- int>::type>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
+ typename B0,
+ typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,
- CommandBufferAllocator & commandBufferAllocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers(
- commandBufferAllocator );
- std::vector<CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
- Result result = static_cast<Result>(
- d.vkAllocateCommandBuffers( m_device,
- reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ CommandBufferAllocator & commandBufferAllocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
+ VkResult result = d.vkAllocateCommandBuffers(
+ m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
+ std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator );
+ uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
+ PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
+ for ( auto const & commandBuffer : commandBuffers )
{
- uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
- PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
- for ( size_t i = 0; i < allocateInfo.commandBufferCount; i++ )
- {
- uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffers[i], deleter ) );
- }
+ uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) );
}
- return createResultValue(
- result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueCommandBuffers ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -4433,75 +4175,69 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
uint32_t commandBufferCount,
const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkFreeCommandBuffers( m_device,
- static_cast<VkCommandPool>( commandPool ),
- commandBufferCount,
- reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
+ d.vkFreeCommandBuffers(
+ m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkFreeCommandBuffers( m_device,
- static_cast<VkCommandPool>( commandPool ),
- commandBuffers.size(),
- reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
+
+ d.vkFreeCommandBuffers(
+ m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- uint32_t commandBufferCount,
- const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ uint32_t commandBufferCount,
+ const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkFreeCommandBuffers( m_device,
- static_cast<VkCommandPool>( commandPool ),
- commandBufferCount,
- reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
+ d.vkFreeCommandBuffers(
+ m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkFreeCommandBuffers( m_device,
- static_cast<VkCommandPool>( commandPool ),
- commandBuffers.size(),
- reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
+
+ d.vkFreeCommandBuffers(
+ m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin(
- const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) );
+ return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const
+ CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" );
+
+ VkResult result = d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -4514,66 +4250,66 @@ namespace VULKAN_HPP_NAMESPACE
}
#else
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- CommandBuffer::end( Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::end( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" );
+
+ VkResult result = d.vkEndCommandBuffer( m_commandBuffer );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset(
- VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
+ return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
}
#else
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type
- CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result =
- static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" );
+
+ VkResult result = d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBindPipeline(
- m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+ d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport,
uint32_t viewportCount,
const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetViewport(
- m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
+ d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
+ VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetViewport(
- m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
+
+ d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -4589,13 +4325,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetScissor(
- m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
+
+ d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -4607,68 +4343,60 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor,
- float depthBiasClamp,
- float depthBiasSlopeFactor,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4],
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds,
- float maxDepthBounds,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
- uint32_t compareMask,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
- uint32_t writeMask,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
- uint32_t reference,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t firstSet,
- uint32_t descriptorSetCount,
+ VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t firstSet,
+ uint32_t descriptorSetCount,
const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
- uint32_t dynamicOffsetCount,
- const uint32_t * pDynamicOffsets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ uint32_t dynamicOffsetCount,
+ const uint32_t * pDynamicOffsets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdBindDescriptorSets( m_commandBuffer,
@@ -4683,15 +4411,15 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t firstSet,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
- ArrayProxy<const uint32_t> const & dynamicOffsets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t firstSet,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdBindDescriptorSets( m_commandBuffer,
static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
static_cast<VkPipelineLayout>( layout ),
@@ -4710,10 +4438,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBindIndexBuffer( m_commandBuffer,
- static_cast<VkBuffer>( buffer ),
- static_cast<VkDeviceSize>( offset ),
- static_cast<VkIndexType>( indexType ) );
+ d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) );
}
template <typename Dispatch>
@@ -4721,23 +4446,19 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t bindingCount,
const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBindVertexBuffers( m_commandBuffer,
- firstBinding,
- bindingCount,
- reinterpret_cast<const VkBuffer *>( pBuffers ),
- reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
+ d.vkCmdBindVertexBuffers(
+ m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::bindVertexBuffers( uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
@@ -4745,8 +4466,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( buffers.size() != offsets.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
@@ -4759,11 +4479,8 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount,
- uint32_t instanceCount,
- uint32_t firstVertex,
- uint32_t firstInstance,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::draw(
+ uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
@@ -4789,8 +4506,7 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawIndirect(
- m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+ d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
template <typename Dispatch>
@@ -4798,18 +4514,15 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize offset,
uint32_t drawCount,
uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawIndexedIndirect(
- m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+ d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
@@ -4829,7 +4542,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
uint32_t regionCount,
const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdCopyBuffer( m_commandBuffer,
@@ -4841,12 +4554,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions,
+ VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdCopyBuffer( m_commandBuffer,
static_cast<VkBuffer>( srcBuffer ),
static_cast<VkBuffer>( dstBuffer ),
@@ -4876,14 +4590,15 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdCopyImage( m_commandBuffer,
static_cast<VkImage>( srcImage ),
static_cast<VkImageLayout>( srcImageLayout ),
@@ -4917,15 +4632,16 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,
- VULKAN_HPP_NAMESPACE::Filter filter,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,
+ VULKAN_HPP_NAMESPACE::Filter filter,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdBlitImage( m_commandBuffer,
static_cast<VkImage>( srcImage ),
static_cast<VkImageLayout>( srcImageLayout ),
@@ -4943,7 +4659,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
uint32_t regionCount,
const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdCopyBufferToImage( m_commandBuffer,
@@ -4956,14 +4672,14 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdCopyBufferToImage( m_commandBuffer,
static_cast<VkBuffer>( srcBuffer ),
static_cast<VkImage>( dstImage ),
@@ -4979,7 +4695,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
uint32_t regionCount,
const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdCopyImageToBuffer( m_commandBuffer,
@@ -4992,14 +4708,14 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdCopyImageToBuffer( m_commandBuffer,
static_cast<VkImage>( srcImage ),
static_cast<VkImageLayout>( srcImageLayout ),
@@ -5017,21 +4733,19 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdUpdateBuffer( m_commandBuffer,
- static_cast<VkBuffer>( dstBuffer ),
- static_cast<VkDeviceSize>( dstOffset ),
- static_cast<VkDeviceSize>( dataSize ),
- pData );
+ d.vkCmdUpdateBuffer(
+ m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( dataSize ), pData );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename DataType, typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
- ArrayProxy<const DataType> const & data,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdUpdateBuffer( m_commandBuffer,
static_cast<VkBuffer>( dstBuffer ),
static_cast<VkDeviceSize>( dstOffset ),
@@ -5048,20 +4762,16 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdFillBuffer( m_commandBuffer,
- static_cast<VkBuffer>( dstBuffer ),
- static_cast<VkDeviceSize>( dstOffset ),
- static_cast<VkDeviceSize>( size ),
- data );
+ d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor,
- uint32_t rangeCount,
+ VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
+ const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor,
+ uint32_t rangeCount,
const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdClearColorImage( m_commandBuffer,
@@ -5074,14 +4784,14 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- const VULKAN_HPP_NAMESPACE::ClearColorValue & color,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
+ const VULKAN_HPP_NAMESPACE::ClearColorValue & color,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdClearColorImage( m_commandBuffer,
static_cast<VkImage>( image ),
static_cast<VkImageLayout>( imageLayout ),
@@ -5092,13 +4802,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil,
- uint32_t rangeCount,
- const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
+ const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil,
+ uint32_t rangeCount,
+ const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdClearDepthStencilImage( m_commandBuffer,
@@ -5112,13 +4821,14 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void
- CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
+ CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
+ const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdClearDepthStencilImage( m_commandBuffer,
static_cast<VkImage>( image ),
static_cast<VkImageLayout>( imageLayout ),
@@ -5133,7 +4843,7 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments,
uint32_t rectCount,
const VULKAN_HPP_NAMESPACE::ClearRect * pRects,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdClearAttachments( m_commandBuffer,
@@ -5145,12 +4855,12 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdClearAttachments( m_commandBuffer,
attachments.size(),
reinterpret_cast<const VkClearAttachment *>( attachments.data() ),
@@ -5166,7 +4876,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
uint32_t regionCount,
const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdResolveImage( m_commandBuffer,
@@ -5180,15 +4890,15 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdResolveImage( m_commandBuffer,
static_cast<VkImage>( srcImage ),
static_cast<VkImageLayout>( srcImageLayout ),
@@ -5211,25 +4921,24 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event,
VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::waitEvents( uint32_t eventCount,
- const VULKAN_HPP_NAMESPACE::Event * pEvents,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
- uint32_t memoryBarrierCount,
- const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount,
+ const VULKAN_HPP_NAMESPACE::Event * pEvents,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
+ uint32_t memoryBarrierCount,
+ const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,
+ uint32_t bufferMemoryBarrierCount,
+ const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
+ uint32_t imageMemoryBarrierCount,
+ const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdWaitEvents( m_commandBuffer,
@@ -5248,15 +4957,16 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void
- CommandBuffer::waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ CommandBuffer::waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdWaitEvents( m_commandBuffer,
events.size(),
reinterpret_cast<const VkEvent *>( events.data() ),
@@ -5272,17 +4982,16 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
- VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount,
- const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
+ VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
+ uint32_t memoryBarrierCount,
+ const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,
+ uint32_t bufferMemoryBarrierCount,
+ const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
+ uint32_t imageMemoryBarrierCount,
+ const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdPipelineBarrier( m_commandBuffer,
@@ -5299,16 +5008,17 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier(
- VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
- VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
+ VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdPipelineBarrier( m_commandBuffer,
static_cast<VkPipelineStageFlags>( srcStageMask ),
static_cast<VkPipelineStageFlags>( dstStageMask ),
@@ -5326,17 +5036,14 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query,
VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginQuery(
- m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
+ d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
@@ -5356,13 +5063,10 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWriteTimestamp( m_commandBuffer,
- static_cast<VkPipelineStageFlagBits>( pipelineStage ),
- static_cast<VkQueryPool>( queryPool ),
- query );
+ d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
}
template <typename Dispatch>
@@ -5373,7 +5077,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
VULKAN_HPP_NAMESPACE::DeviceSize stride,
VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdCopyQueryPoolResults( m_commandBuffer,
@@ -5392,26 +5096,22 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t offset,
uint32_t size,
const void * pValues,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdPushConstants( m_commandBuffer,
- static_cast<VkPipelineLayout>( layout ),
- static_cast<VkShaderStageFlags>( stageFlags ),
- offset,
- size,
- pValues );
+ d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename ValuesType, typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
- uint32_t offset,
- ArrayProxy<const ValuesType> const & values,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
+ uint32_t offset,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdPushConstants( m_commandBuffer,
static_cast<VkPipelineLayout>( layout ),
static_cast<VkShaderStageFlags>( stageFlags ),
@@ -5422,34 +5122,28 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
- VULKAN_HPP_NAMESPACE::SubpassContents contents,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
+ VULKAN_HPP_NAMESPACE::SubpassContents contents,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginRenderPass( m_commandBuffer,
- reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ),
- static_cast<VkSubpassContents>( contents ) );
+ d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
- VULKAN_HPP_NAMESPACE::SubpassContents contents,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
+ VULKAN_HPP_NAMESPACE::SubpassContents contents,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginRenderPass( m_commandBuffer,
- reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ),
- static_cast<VkSubpassContents>( contents ) );
+
+ d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
@@ -5465,30 +5159,27 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount,
const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdExecuteCommands(
- m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
+ d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdExecuteCommands(
- m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
+
+ d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_VERSION_1_1 ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion,
- Dispatch const & d ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion, Dispatch const & d ) VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) );
@@ -5496,101 +5187,93 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d )
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d )
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
uint32_t apiVersion;
- Result result = static_cast<Result>( d.vkEnumerateInstanceVersion( &apiVersion ) );
- return createResultValue( result, apiVersion, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" );
+ VkResult result = d.vkEnumerateInstanceVersion( &apiVersion );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), apiVersion );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::bindBufferMemory2( uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkBindBufferMemory2(
- m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
+ return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
- Dispatch const & d ) const
+ Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkBindBufferMemory2(
- m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" );
+
+ VkResult result = d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::bindImageMemory2( uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
+ return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,
- Dispatch const & d ) const
+ Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkBindImageMemory2(
- m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" );
+
+ VkResult result = d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getGroupPeerMemoryFeatures( uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex,
+ uint32_t localDeviceIndex,
+ uint32_t remoteDeviceIndex,
+ VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceGroupPeerMemoryFeatures( m_device,
- heapIndex,
- localDeviceIndex,
- remoteDeviceIndex,
- reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
+ d.vkGetDeviceGroupPeerMemoryFeatures(
+ m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
- Device::getGroupPeerMemoryFeatures( uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures(
+ uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
- d.vkGetDeviceGroupPeerMemoryFeatures( m_device,
- heapIndex,
- localDeviceIndex,
- remoteDeviceIndex,
- reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
+ d.vkGetDeviceGroupPeerMemoryFeatures(
+ m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
+
return peerMemoryFeatures;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
@@ -5610,251 +5293,229 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups(
- uint32_t * pPhysicalDeviceGroupCount,
- VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Instance::enumeratePhysicalDeviceGroups( uint32_t * pPhysicalDeviceGroupCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups(
- m_instance,
- pPhysicalDeviceGroupCount,
- reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
+ m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
- uint32_t physicalDeviceGroupCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
+ uint32_t physicalDeviceGroupCount;
+ VkResult result;
do
{
- result =
- static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+ result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
{
physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
- result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups(
- m_instance,
- &physicalDeviceGroupCount,
- reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
+ result = d.vkEnumeratePhysicalDeviceGroups(
+ m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
+ VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+ if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
{
- VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
- if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
- {
- physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
- }
+ physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
}
- return createResultValue(
- result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
}
- template <
- typename PhysicalDeviceGroupPropertiesAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
+ template <typename PhysicalDeviceGroupPropertiesAllocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
- Instance::enumeratePhysicalDeviceGroups(
- PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
+ Instance::enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties(
+
+ std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties(
physicalDeviceGroupPropertiesAllocator );
uint32_t physicalDeviceGroupCount;
- Result result;
+ VkResult result;
do
{
- result =
- static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+ result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
{
physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
- result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups(
- m_instance,
- &physicalDeviceGroupCount,
- reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
+ result = d.vkEnumeratePhysicalDeviceGroups(
+ m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
+ VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+ if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
{
- VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
- if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
- {
- physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
- }
+ physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
}
- return createResultValue(
- result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetImageMemoryRequirements2( m_device,
- reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ),
- reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ d.vkGetImageMemoryRequirements2(
+ m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- d.vkGetImageMemoryRequirements2( m_device,
- reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ d.vkGetImageMemoryRequirements2(
+ m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return memoryRequirements;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- d.vkGetImageMemoryRequirements2( m_device,
- reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ d.vkGetImageMemoryRequirements2(
+ m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetBufferMemoryRequirements2( m_device,
- reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ),
- reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ d.vkGetBufferMemoryRequirements2(
+ m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- d.vkGetBufferMemoryRequirements2( m_device,
- reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ d.vkGetBufferMemoryRequirements2(
+ m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return memoryRequirements;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- d.vkGetBufferMemoryRequirements2( m_device,
- reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ d.vkGetBufferMemoryRequirements2(
+ m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2(
- const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
- uint32_t * pSparseMemoryRequirementCount,
- VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
+ uint32_t * pSparseMemoryRequirementCount,
+ VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetImageSparseMemoryRequirements2(
- m_device,
- reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ),
- pSparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
+ d.vkGetImageSparseMemoryRequirements2( m_device,
+ reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ),
+ pSparseMemoryRequirementCount,
+ reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
- Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
+ Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
- uint32_t sparseMemoryRequirementCount;
+
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
+ uint32_t sparseMemoryRequirementCount;
+ d.vkGetImageSparseMemoryRequirements2(
+ m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
d.vkGetImageSparseMemoryRequirements2( m_device,
reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
&sparseMemoryRequirementCount,
- nullptr );
- sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
- d.vkGetImageSparseMemoryRequirements2(
- m_device,
- reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
- &sparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+ reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+
VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+ if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+ {
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ }
return sparseMemoryRequirements;
}
- template <
- typename SparseImageMemoryRequirements2Allocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
- Device::getImageSparseMemoryRequirements2(
- const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,
- SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
- Dispatch const & d ) const
+ template <typename SparseImageMemoryRequirements2Allocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
+ Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,
+ SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
+
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
sparseImageMemoryRequirements2Allocator );
uint32_t sparseMemoryRequirementCount;
+ d.vkGetImageSparseMemoryRequirements2(
+ m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
d.vkGetImageSparseMemoryRequirements2( m_device,
reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
&sparseMemoryRequirementCount,
- nullptr );
- sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
- d.vkGetImageSparseMemoryRequirements2(
- m_device,
- reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
- &sparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+ reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+
VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+ if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+ {
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ }
return sparseMemoryRequirements;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
@@ -5866,31 +5527,32 @@ namespace VULKAN_HPP_NAMESPACE
PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+
return features;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features =
- structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceProperties2( m_physicalDevice,
- reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
+ d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
@@ -5899,163 +5561,160 @@ namespace VULKAN_HPP_NAMESPACE
PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
- d.vkGetPhysicalDeviceProperties2( m_physicalDevice,
- reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+ d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+
return properties;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
- d.vkGetPhysicalDeviceProperties2( m_physicalDevice,
- reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
+ d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceFormatProperties2(
- m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
+ d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2
- PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
- d.vkGetPhysicalDeviceFormatProperties2(
- m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+ d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+
return formatProperties;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
- d.vkGetPhysicalDeviceFormatProperties2(
- m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+ VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
+ d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
- VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
- reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
+ reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
- PhysicalDevice::getImageFormatProperties2(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
+ PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
- Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
- reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
- return createResultValue(
- result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
+ VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
+ reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
}
template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
- PhysicalDevice::getImageFormatProperties2(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
+ PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
- Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
- reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
- return createResultValue(
- result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
+ VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
+ reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount,
- VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount,
+ VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetPhysicalDeviceQueueFamilyProperties2(
- m_physicalDevice,
- pQueueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
+ m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename QueueFamilyProperties2Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
- uint32_t queueFamilyPropertyCount;
+
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
+ uint32_t queueFamilyPropertyCount;
d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
queueFamilyProperties.resize( queueFamilyPropertyCount );
d.vkGetPhysicalDeviceQueueFamilyProperties2(
- m_physicalDevice,
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+ m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+
VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+ {
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ }
return queueFamilyProperties;
}
template <typename QueueFamilyProperties2Allocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
- PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,
- Dispatch const & d ) const
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties2>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
+ PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties(
- queueFamilyProperties2Allocator );
- uint32_t queueFamilyPropertyCount;
+
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator );
+ uint32_t queueFamilyPropertyCount;
d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
queueFamilyProperties.resize( queueFamilyPropertyCount );
d.vkGetPhysicalDeviceQueueFamilyProperties2(
- m_physicalDevice,
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+ m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+
VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+ {
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ }
return queueFamilyProperties;
}
@@ -6064,164 +5723,167 @@ namespace VULKAN_HPP_NAMESPACE
PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- uint32_t queueFamilyPropertyCount;
+
+ std::vector<StructureChain, StructureChainAllocator> structureChains;
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
+ uint32_t queueFamilyPropertyCount;
d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
- std::vector<StructureChain, StructureChainAllocator> returnVector( queueFamilyPropertyCount );
- std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
+ structureChains.resize( queueFamilyPropertyCount );
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
{
- queueFamilyProperties[i].pNext =
- returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+ queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
}
d.vkGetPhysicalDeviceQueueFamilyProperties2(
- m_physicalDevice,
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+ m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+
VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+ {
+ structureChains.resize( queueFamilyPropertyCount );
+ }
for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
{
- returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
+ structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
}
- return returnVector;
+ return structureChains;
}
template <typename StructureChain,
typename StructureChainAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type>
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
- PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator,
- Dispatch const & d ) const
+ PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- uint32_t queueFamilyPropertyCount;
+
+ std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
+ uint32_t queueFamilyPropertyCount;
d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
- std::vector<StructureChain, StructureChainAllocator> returnVector( queueFamilyPropertyCount,
- structureChainAllocator );
- std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
+ structureChains.resize( queueFamilyPropertyCount );
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
{
- queueFamilyProperties[i].pNext =
- returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+ queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
}
d.vkGetPhysicalDeviceQueueFamilyProperties2(
- m_physicalDevice,
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+ m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+
VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+ {
+ structureChains.resize( queueFamilyPropertyCount );
+ }
for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
{
- returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
+ structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
}
- return returnVector;
+ return structureChains;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceMemoryProperties2(
- m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
+ d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
- PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
- d.vkGetPhysicalDeviceMemoryProperties2(
- m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+ d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+
return memoryProperties;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties =
structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
- d.vkGetPhysicalDeviceMemoryProperties2(
- m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+ d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceSparseImageFormatProperties2(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
- pPropertyCount,
- reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
+ pPropertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
- PhysicalDevice::getSparseImageFormatProperties2(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
+ PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
- uint32_t propertyCount;
+
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
+ uint32_t propertyCount;
d.vkGetPhysicalDeviceSparseImageFormatProperties2(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
- &propertyCount,
- nullptr );
+ m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
properties.resize( propertyCount );
- d.vkGetPhysicalDeviceSparseImageFormatProperties2(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
- &propertyCount,
- reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
+ &propertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
+ {
+ properties.resize( propertyCount );
+ }
return properties;
}
- template <
- typename SparseImageFormatProperties2Allocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
- PhysicalDevice::getSparseImageFormatProperties2(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
- SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
- Dispatch const & d ) const
+ template <typename SparseImageFormatProperties2Allocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties2>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
+ PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
+ SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties(
- sparseImageFormatProperties2Allocator );
- uint32_t propertyCount;
+
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator );
+ uint32_t propertyCount;
d.vkGetPhysicalDeviceSparseImageFormatProperties2(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
- &propertyCount,
- nullptr );
+ m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
properties.resize( propertyCount );
- d.vkGetPhysicalDeviceSparseImageFormatProperties2(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
- &propertyCount,
- reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
+ &propertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
+ {
+ properties.resize( propertyCount );
+ }
return properties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -6229,11 +5891,10 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkTrimCommandPool(
- m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
+ d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
}
template <typename Dispatch>
@@ -6242,20 +5903,19 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceQueue2(
- m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) );
+ d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue
- Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Queue queue;
- d.vkGetDeviceQueue2(
- m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) );
+ d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) );
+
return queue;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -6264,254 +5924,238 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateSamplerYcbcrConversion( m_device,
- reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
+ return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device,
+ reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
- Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
+ Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
- Result result = static_cast<Result>(
- d.vkCreateSamplerYcbcrConversion( m_device,
- reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
- return createResultValue(
- result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" );
+ VkResult result = d.vkCreateSamplerYcbcrConversion(
+ m_device,
+ reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), ycbcrConversion );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
- Device::createSamplerYcbcrConversionUnique(
- const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
+ Device::createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
- Result result = static_cast<Result>(
- d.vkCreateSamplerYcbcrConversion( m_device,
- reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>(
- result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique", deleter );
+ VkResult result = d.vkCreateSamplerYcbcrConversion(
+ m_device,
+ reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( ycbcrConversion, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySamplerYcbcrConversion( m_device,
- static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySamplerYcbcrConversion(
+ m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroySamplerYcbcrConversion(
m_device,
static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySamplerYcbcrConversion( m_device,
- static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySamplerYcbcrConversion(
+ m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroySamplerYcbcrConversion(
m_device,
static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate(
- const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate(
- m_device,
- reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
+ return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device,
+ reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
- Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
+ Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
- Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate(
+ VkResult result = d.vkCreateDescriptorUpdateTemplate(
m_device,
reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
- return createResultValue(
- result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorUpdateTemplate );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
- Device::createDescriptorUpdateTemplateUnique(
- const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
+ Device::createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
- Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate(
+ VkResult result = d.vkCreateDescriptorUpdateTemplate(
m_device,
reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>(
- result,
- descriptorUpdateTemplate,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique",
- deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>(
+ descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDescriptorUpdateTemplate( m_device,
- static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDescriptorUpdateTemplate(
+ m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyDescriptorUpdateTemplate(
m_device,
static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDescriptorUpdateTemplate( m_device,
- static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDescriptorUpdateTemplate(
+ m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyDescriptorUpdateTemplate(
m_device,
static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- const void * pData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ const void * pData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkUpdateDescriptorSetWithTemplate( m_device,
- static_cast<VkDescriptorSet>( descriptorSet ),
- static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- pData );
+ d.vkUpdateDescriptorSetWithTemplate(
+ m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename DataType, typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- DataType const & data,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ DataType const & data,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkUpdateDescriptorSetWithTemplate( m_device,
static_cast<VkDescriptorSet>( descriptorSet ),
static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
@@ -6520,133 +6164,127 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
- VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
+ VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceExternalBufferProperties(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
- reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
+ d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
+ reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties
- PhysicalDevice::getExternalBufferProperties(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
- d.vkGetPhysicalDeviceExternalBufferProperties(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
- reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
+ d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
+ reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
+
return externalBufferProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
- VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
+ VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceExternalFenceProperties(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
- reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
+ d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
+ reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties
- PhysicalDevice::getExternalFenceProperties(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
- d.vkGetPhysicalDeviceExternalFenceProperties(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
- reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
+ d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
+ reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
+
return externalFenceProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceExternalSemaphoreProperties(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
- reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
+ d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
+ reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
- PhysicalDevice::getExternalSemaphoreProperties(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
- d.vkGetPhysicalDeviceExternalSemaphoreProperties(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
- reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
+ d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
+ reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
+
return externalSemaphoreProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDescriptorSetLayoutSupport( m_device,
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
- reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
+ d.vkGetDescriptorSetLayoutSupport(
+ m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
- Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
- d.vkGetDescriptorSetLayoutSupport( m_device,
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+ d.vkGetDescriptorSetLayoutSupport(
+ m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+
return support;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support =
- structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
- d.vkGetDescriptorSetLayoutSupport( m_device,
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
+ d.vkGetDescriptorSetLayoutSupport(
+ m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -6660,7 +6298,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdDrawIndirectCount( m_commandBuffer,
@@ -6679,7 +6317,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdDrawIndexedIndirectCount( m_commandBuffer,
@@ -6692,11 +6330,10 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateRenderPass2( m_device,
@@ -6707,99 +6344,97 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
- Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
+ Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::RenderPass renderPass;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateRenderPass2( m_device,
reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
- return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkRenderPass *>( &renderPass ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
- Device::createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
+ Device::createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::RenderPass renderPass;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateRenderPass2( m_device,
reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>(
- result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkRenderPass *>( &renderPass ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
- const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
+ const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginRenderPass2( m_commandBuffer,
- reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ),
- reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
+ d.vkCmdBeginRenderPass2(
+ m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
- const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
+ const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginRenderPass2( m_commandBuffer,
- reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ),
- reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
+
+ d.vkCmdBeginRenderPass2(
+ m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdNextSubpass2( m_commandBuffer,
- reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ),
- reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
+ d.vkCmdNextSubpass2(
+ m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdNextSubpass2( m_commandBuffer,
- reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ),
- reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+
+ d.vkCmdNextSubpass2(
+ m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
@@ -6808,113 +6443,117 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue(
- VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ uint64_t * pValue,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
+ return static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type
- Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
uint64_t value;
- Result result =
- static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value ) );
- return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" );
+ VkResult result = d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), value );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
- uint64_t timeout,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
+ uint64_t timeout,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
+ return static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores(
- const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
+ Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
+
+ VkResult result = d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore(
- const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
+ return static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
+ Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" );
+
+ VkResult result = d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<DeviceAddress>(
- d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
+ return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+
+ VkDeviceAddress result = d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress(
- const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
@@ -6922,257 +6561,246 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress(
- const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+
+ uint64_t result = d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+
+ return result;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE uint64_t
- Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetDeviceMemoryOpaqueCaptureAddress(
- m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
+ return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE uint64_t
- Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetDeviceMemoryOpaqueCaptureAddress(
- m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
+
+ uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
+
+ return result;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_VERSION_1_3 ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getToolProperties( uint32_t * pToolCount,
- VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolProperties( uint32_t * pToolCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceToolProperties(
- m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
+ return static_cast<Result>(
+ d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
PhysicalDevice::getToolProperties( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties;
- uint32_t toolCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties;
+ uint32_t toolCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && toolCount )
+ result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && toolCount )
{
toolProperties.resize( toolCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceToolProperties(
- m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) );
+ result =
+ d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
+ VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+ if ( toolCount < toolProperties.size() )
{
- VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
- if ( toolCount < toolProperties.size() )
- {
- toolProperties.resize( toolCount );
- }
+ toolProperties.resize( toolCount );
}
- return createResultValue(
- result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
}
- template <
- typename PhysicalDeviceToolPropertiesAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceToolProperties>::value, int>::type>
+ template <typename PhysicalDeviceToolPropertiesAllocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceToolProperties>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
- PhysicalDevice::getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator,
- Dispatch const & d ) const
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
+ PhysicalDevice::getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties(
+
+ std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties(
physicalDeviceToolPropertiesAllocator );
uint32_t toolCount;
- Result result;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && toolCount )
+ result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && toolCount )
{
toolProperties.resize( toolCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceToolProperties(
- m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) );
+ result =
+ d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
+ VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+ if ( toolCount < toolProperties.size() )
{
- VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
- if ( toolCount < toolProperties.size() )
- {
- toolProperties.resize( toolCount );
- }
+ toolProperties.resize( toolCount );
}
- return createResultValue(
- result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreatePrivateDataSlot( m_device,
- reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) );
+ return static_cast<Result>( d.vkCreatePrivateDataSlot( m_device,
+ reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type
- Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type
+ Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreatePrivateDataSlot( m_device,
reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) );
- return createResultValue( result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), privateDataSlot );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type
- Device::createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type
+ Device::createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreatePrivateDataSlot( m_device,
reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>(
- result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>( privateDataSlot, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
+ VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPrivateDataSlot( m_device,
- static_cast<VkPrivateDataSlot>( privateDataSlot ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPrivateDataSlot( m_device,
- static_cast<VkPrivateDataSlot>( privateDataSlot ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+
+ d.vkDestroyPrivateDataSlot(
+ m_device,
+ static_cast<VkPrivateDataSlot>( privateDataSlot ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPrivateDataSlot( m_device,
- static_cast<VkPrivateDataSlot>( privateDataSlot ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPrivateDataSlot( m_device,
- static_cast<VkPrivateDataSlot>( privateDataSlot ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+
+ d.vkDestroyPrivateDataSlot(
+ m_device,
+ static_cast<VkPrivateDataSlot>( privateDataSlot ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
- uint64_t data,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
+ uint64_t data,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkSetPrivateData( m_device,
- static_cast<VkObjectType>( objectType ),
- objectHandle,
- static_cast<VkPrivateDataSlot>( privateDataSlot ),
- data ) );
+ return static_cast<Result>(
+ d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
}
#else
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
- uint64_t data,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
+ uint64_t data,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkSetPrivateData( m_device,
- static_cast<VkObjectType>( objectType ),
- objectHandle,
- static_cast<VkPrivateDataSlot>( privateDataSlot ),
- data ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" );
+
+ VkResult result =
+ d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -7184,28 +6812,21 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPrivateData( m_device,
- static_cast<VkObjectType>( objectType ),
- objectHandle,
- static_cast<VkPrivateDataSlot>( privateDataSlot ),
- pData );
+ d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t
- Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
uint64_t data;
- d.vkGetPrivateData( m_device,
- static_cast<VkObjectType>( objectType ),
- objectHandle,
- static_cast<VkPrivateDataSlot>( privateDataSlot ),
- &data );
+ d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
+
return data;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -7213,54 +6834,49 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event,
const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetEvent2(
- m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
+ d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event,
const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetEvent2(
- m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
+
+ d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event,
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdResetEvent2(
- m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
+ d.vkCmdResetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( uint32_t eventCount,
const VULKAN_HPP_NAMESPACE::Event * pEvents,
const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWaitEvents2( m_commandBuffer,
- eventCount,
- reinterpret_cast<const VkEvent *>( pEvents ),
- reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
+ d.vkCmdWaitEvents2(
+ m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::waitEvents2( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
@@ -7268,8 +6884,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( events.size() != dependencyInfos.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
@@ -7282,7 +6897,7 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
@@ -7291,9 +6906,10 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -7302,43 +6918,39 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWriteTimestamp2(
- m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
+ d.vkCmdWriteTimestamp2( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
}
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2( uint32_t submitCount,
const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,
VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkQueueSubmit2(
- m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
+ return static_cast<Result>( d.vkQueueSubmit2( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Queue::submit2( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
- VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit2(
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkQueueSubmit2( m_queue,
- submits.size(),
- reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ),
- static_cast<VkFence>( fence ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" );
+
+ VkResult result = d.vkQueueSubmit2( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) );
@@ -7347,16 +6959,16 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) );
@@ -7364,61 +6976,54 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyBufferToImage2( m_commandBuffer,
- reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
+ d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyBufferToImage2( m_commandBuffer,
- reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
+
+ d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyImageToBuffer2( m_commandBuffer,
- reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
+ d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyImageToBuffer2( m_commandBuffer,
- reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
+
+ d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) );
@@ -7426,18 +7031,17 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) );
@@ -7446,16 +7050,17 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) );
@@ -7464,9 +7069,10 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -7479,16 +7085,14 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetCullMode( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetFrontFace( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
@@ -7496,7 +7100,7 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetPrimitiveTopology( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
@@ -7505,7 +7109,7 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( uint32_t viewportCount,
const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetViewportWithCount( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
@@ -7513,20 +7117,18 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setViewportWithCount( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetViewportWithCount(
- m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
+
+ d.vkCmdSetViewportWithCount( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( uint32_t scissorCount,
- const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setScissorWithCount( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetScissorWithCount( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
@@ -7534,13 +7136,12 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setScissorWithCount( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetScissorWithCount(
- m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
+
+ d.vkCmdSetScissorWithCount( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -7551,7 +7152,7 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdBindVertexBuffers2( m_commandBuffer,
@@ -7565,13 +7166,12 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::bindVertexBuffers2( uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
@@ -7581,18 +7181,15 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( buffers.size() != offsets.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" );
}
if ( !sizes.empty() && buffers.size() != sizes.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" );
}
if ( !strides.empty() && buffers.size() != strides.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
@@ -7607,24 +7204,21 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetDepthTestEnable( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetDepthWriteEnable( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetDepthCompareOp( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
@@ -7632,15 +7226,14 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetDepthBoundsTestEnable( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetStencilTestEnable( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
@@ -7652,7 +7245,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::StencilOp passOp,
VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
VULKAN_HPP_NAMESPACE::CompareOp compareOp,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetStencilOp( m_commandBuffer,
@@ -7664,17 +7257,15 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetRasterizerDiscardEnable( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetDepthBiasEnable( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
@@ -7682,162 +7273,155 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetPrimitiveRestartEnable( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceBufferMemoryRequirements( m_device,
- reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ),
- reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ d.vkGetDeviceBufferMemoryRequirements(
+ m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- d.vkGetDeviceBufferMemoryRequirements( m_device,
- reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ d.vkGetDeviceBufferMemoryRequirements(
+ m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return memoryRequirements;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- d.vkGetDeviceBufferMemoryRequirements( m_device,
- reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ d.vkGetDeviceBufferMemoryRequirements(
+ m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceImageMemoryRequirements( m_device,
- reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ),
- reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ d.vkGetDeviceImageMemoryRequirements(
+ m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- d.vkGetDeviceImageMemoryRequirements( m_device,
- reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ d.vkGetDeviceImageMemoryRequirements(
+ m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return memoryRequirements;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- d.vkGetDeviceImageMemoryRequirements( m_device,
- reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ d.vkGetDeviceImageMemoryRequirements(
+ m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements(
- const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
- uint32_t * pSparseMemoryRequirementCount,
- VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
+ uint32_t * pSparseMemoryRequirementCount,
+ VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceImageSparseMemoryRequirements(
- m_device,
- reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ),
- pSparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
+ d.vkGetDeviceImageSparseMemoryRequirements( m_device,
+ reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ),
+ pSparseMemoryRequirementCount,
+ reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
- Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
+ Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
- uint32_t sparseMemoryRequirementCount;
+
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
+ uint32_t sparseMemoryRequirementCount;
+ d.vkGetDeviceImageSparseMemoryRequirements(
+ m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
d.vkGetDeviceImageSparseMemoryRequirements( m_device,
reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
&sparseMemoryRequirementCount,
- nullptr );
- sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
- d.vkGetDeviceImageSparseMemoryRequirements(
- m_device,
- reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
- &sparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+ reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+
VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+ if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+ {
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ }
return sparseMemoryRequirements;
}
- template <
- typename SparseImageMemoryRequirements2Allocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
- Device::getImageSparseMemoryRequirements(
- const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
- SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
- Dispatch const & d ) const
+ template <typename SparseImageMemoryRequirements2Allocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
+ Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
+ SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
+
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
sparseImageMemoryRequirements2Allocator );
uint32_t sparseMemoryRequirementCount;
+ d.vkGetDeviceImageSparseMemoryRequirements(
+ m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
d.vkGetDeviceImageSparseMemoryRequirements( m_device,
reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
&sparseMemoryRequirementCount,
- nullptr );
- sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
- d.vkGetDeviceImageSparseMemoryRequirements(
- m_device,
- reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
- &sparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+ reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+
VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+ if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+ {
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ }
return sparseMemoryRequirements;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -7847,353 +7431,305 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySurfaceKHR(
- m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroySurfaceKHR( m_instance,
static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySurfaceKHR(
- m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroySurfaceKHR( m_instance,
static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex,
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- VULKAN_HPP_NAMESPACE::Bool32 * pSupported,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ VULKAN_HPP_NAMESPACE::Bool32 * pSupported,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice,
- queueFamilyIndex,
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<VkBool32 *>( pSupported ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR(
+ m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( pSupported ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type
- PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex,
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type
+ PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Bool32 supported;
- Result result =
- static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice,
- queueFamilyIndex,
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<VkBool32 *>( &supported ) ) );
- return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" );
+ VkResult result = d.vkGetPhysicalDeviceSurfaceSupportKHR(
+ m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( &supported ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), supported );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
- m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) );
+ m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type
- PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type
+ PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities;
- Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
- m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) ) );
- return createResultValue(
- result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" );
+ VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
+ m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- uint32_t * pSurfaceFormatCount,
- VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ uint32_t * pSurfaceFormatCount,
+ VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- pSurfaceFormatCount,
- reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR(
+ m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename SurfaceFormatKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
- PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
+ PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats;
- uint32_t surfaceFormatCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats;
+ uint32_t surfaceFormatCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR(
- m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && surfaceFormatCount )
+ result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
{
surfaceFormats.resize( surfaceFormatCount );
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- &surfaceFormatCount,
- reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) );
+ result = d.vkGetPhysicalDeviceSurfaceFormatsKHR(
+ m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
+ VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+ if ( surfaceFormatCount < surfaceFormats.size() )
{
- VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
- if ( surfaceFormatCount < surfaceFormats.size() )
- {
- surfaceFormats.resize( surfaceFormatCount );
- }
+ surfaceFormats.resize( surfaceFormatCount );
}
- return createResultValue(
- result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
}
template <typename SurfaceFormatKHRAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormatKHR>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
- PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, SurfaceFormatKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
+ PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats( surfaceFormatKHRAllocator );
- uint32_t surfaceFormatCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats( surfaceFormatKHRAllocator );
+ uint32_t surfaceFormatCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR(
- m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && surfaceFormatCount )
+ result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
{
surfaceFormats.resize( surfaceFormatCount );
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- &surfaceFormatCount,
- reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) );
+ result = d.vkGetPhysicalDeviceSurfaceFormatsKHR(
+ m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
+ VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+ if ( surfaceFormatCount < surfaceFormats.size() )
{
- VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
- if ( surfaceFormatCount < surfaceFormats.size() )
- {
- surfaceFormats.resize( surfaceFormatCount );
- }
+ surfaceFormats.resize( surfaceFormatCount );
}
- return createResultValue(
- result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- uint32_t * pPresentModeCount,
- VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ uint32_t * pPresentModeCount,
+ VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- pPresentModeCount,
- reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR(
+ m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename PresentModeKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
- PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type
+ PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes;
- uint32_t presentModeCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes;
+ uint32_t presentModeCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR(
- m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && presentModeCount )
+ result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && presentModeCount )
{
presentModes.resize( presentModeCount );
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- &presentModeCount,
- reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
+ result = d.vkGetPhysicalDeviceSurfacePresentModesKHR(
+ m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
+ VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+ if ( presentModeCount < presentModes.size() )
{
- VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
- if ( presentModeCount < presentModes.size() )
- {
- presentModes.resize( presentModeCount );
- }
+ presentModes.resize( presentModeCount );
}
- return createResultValue(
- result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
}
template <typename PresentModeKHRAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
- PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, PresentModeKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type
+ PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
PresentModeKHRAllocator & presentModeKHRAllocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
- uint32_t presentModeCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
+ uint32_t presentModeCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR(
- m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && presentModeCount )
+ result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && presentModeCount )
{
presentModes.resize( presentModeCount );
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- &presentModeCount,
- reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
+ result = d.vkGetPhysicalDeviceSurfacePresentModesKHR(
+ m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
+ VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+ if ( presentModeCount < presentModes.size() )
{
- VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
- if ( presentModeCount < presentModes.size() )
- {
- presentModes.resize( presentModeCount );
- }
+ presentModes.resize( presentModeCount );
}
- return createResultValue(
- result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_swapchain ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateSwapchainKHR( m_device,
- reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) );
+ return static_cast<Result>( d.vkCreateSwapchainKHR( m_device,
+ reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type
- Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type
+ Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateSwapchainKHR( m_device,
reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
- return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchain );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type
- Device::createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type
+ Device::createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateSwapchainKHR( m_device,
reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>(
- result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( swapchain, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -8201,778 +7737,677 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySwapchainKHR( m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroySwapchainKHR( m_device,
static_cast<VkSwapchainKHR>( swapchain ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySwapchainKHR( m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroySwapchainKHR( m_device,
static_cast<VkSwapchainKHR>( swapchain ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- uint32_t * pSwapchainImageCount,
- VULKAN_HPP_NAMESPACE::Image * pSwapchainImages,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ uint32_t * pSwapchainImageCount,
+ VULKAN_HPP_NAMESPACE::Image * pSwapchainImages,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- pSwapchainImageCount,
- reinterpret_cast<VkImage *>( pSwapchainImages ) ) );
+ return static_cast<Result>(
+ d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage *>( pSwapchainImages ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename ImageAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image, ImageAllocator>>::type
- Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type
+ Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<Image, ImageAllocator> swapchainImages;
- uint32_t swapchainImageCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages;
+ uint32_t swapchainImageCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetSwapchainImagesKHR(
- m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && swapchainImageCount )
+ result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && swapchainImageCount )
{
swapchainImages.resize( swapchainImageCount );
- result =
- static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- &swapchainImageCount,
- reinterpret_cast<VkImage *>( swapchainImages.data() ) ) );
+ result = d.vkGetSwapchainImagesKHR(
+ m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
+ VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
+ if ( swapchainImageCount < swapchainImages.size() )
{
- VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
- if ( swapchainImageCount < swapchainImages.size() )
- {
- swapchainImages.resize( swapchainImageCount );
- }
+ swapchainImages.resize( swapchainImageCount );
}
- return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchainImages );
}
- template <typename ImageAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, Image>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image, ImageAllocator>>::type
- Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- ImageAllocator & imageAllocator,
- Dispatch const & d ) const
+ template <typename ImageAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, Image>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type
+ Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<Image, ImageAllocator> swapchainImages( imageAllocator );
- uint32_t swapchainImageCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages( imageAllocator );
+ uint32_t swapchainImageCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetSwapchainImagesKHR(
- m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && swapchainImageCount )
+ result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && swapchainImageCount )
{
swapchainImages.resize( swapchainImageCount );
- result =
- static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- &swapchainImageCount,
- reinterpret_cast<VkImage *>( swapchainImages.data() ) ) );
+ result = d.vkGetSwapchainImagesKHR(
+ m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
+ VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
+ if ( swapchainImageCount < swapchainImages.size() )
{
- VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
- if ( swapchainImageCount < swapchainImages.size() )
- {
- swapchainImages.resize( swapchainImageCount );
- }
+ swapchainImages.resize( swapchainImageCount );
}
- return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchainImages );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- uint64_t timeout,
- VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- VULKAN_HPP_NAMESPACE::Fence fence,
- uint32_t * pImageIndex,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ uint64_t timeout,
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ VULKAN_HPP_NAMESPACE::Fence fence,
+ uint32_t * pImageIndex,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkAcquireNextImageKHR( m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- timeout,
- static_cast<VkSemaphore>( semaphore ),
- static_cast<VkFence>( fence ),
- pImageIndex ) );
+ return static_cast<Result>( d.vkAcquireNextImageKHR(
+ m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t>
- Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- uint64_t timeout,
- VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ uint64_t timeout,
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ VULKAN_HPP_NAMESPACE::Fence fence,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
uint32_t imageIndex;
- Result result = static_cast<Result>( d.vkAcquireNextImageKHR( m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- timeout,
- static_cast<VkSemaphore>( semaphore ),
- static_cast<VkFence>( fence ),
- &imageIndex ) );
- return createResultValue( result,
- imageIndex,
- VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eTimeout,
- VULKAN_HPP_NAMESPACE::Result::eNotReady,
- VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+ VkResult result = d.vkAcquireNextImageKHR(
+ m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eTimeout,
+ VULKAN_HPP_NAMESPACE::Result::eNotReady,
+ VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+
+ return ResultValue<uint32_t>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR(
- const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) );
+ return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result =
- static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) ) );
- return createResultValue(
- result,
- VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+
+ VkResult result = d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR(
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR(
- m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) );
+ return static_cast<Result>(
+ d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type
- Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type
+ Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
- Result result = static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR(
- m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) ) );
- return createResultValue(
- result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" );
+ VkResult result =
+ d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), deviceGroupPresentCapabilities );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetDeviceGroupSurfacePresentModesKHR( m_device,
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
+ return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR(
+ m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
- Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
+ Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
- Result result = static_cast<Result>(
- d.vkGetDeviceGroupSurfacePresentModesKHR( m_device,
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) );
- return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" );
+ VkResult result = d.vkGetDeviceGroupSurfacePresentModesKHR(
+ m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), modes );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- uint32_t * pRectCount,
- VULKAN_HPP_NAMESPACE::Rect2D * pRects,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ uint32_t * pRectCount,
+ VULKAN_HPP_NAMESPACE::Rect2D * pRects,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR(
- m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) );
+ return static_cast<Result>(
+ d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Rect2DAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type
- PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type
+ PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<Rect2D, Rect2DAllocator> rects;
- uint32_t rectCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects;
+ uint32_t rectCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR(
- m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && rectCount )
+ result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && rectCount )
{
rects.resize( rectCount );
- result = static_cast<Result>(
- d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- &rectCount,
- reinterpret_cast<VkRect2D *>( rects.data() ) ) );
+ result = d.vkGetPhysicalDevicePresentRectanglesKHR(
+ m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
+ VULKAN_HPP_ASSERT( rectCount <= rects.size() );
+ if ( rectCount < rects.size() )
{
- VULKAN_HPP_ASSERT( rectCount <= rects.size() );
- if ( rectCount < rects.size() )
- {
- rects.resize( rectCount );
- }
+ rects.resize( rectCount );
}
- return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), rects );
}
- template <typename Rect2DAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, Rect2D>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type
- PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Rect2DAllocator & rect2DAllocator,
- Dispatch const & d ) const
+ template <typename Rect2DAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, Rect2D>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type
+ PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<Rect2D, Rect2DAllocator> rects( rect2DAllocator );
- uint32_t rectCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects( rect2DAllocator );
+ uint32_t rectCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR(
- m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && rectCount )
+ result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && rectCount )
{
rects.resize( rectCount );
- result = static_cast<Result>(
- d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- &rectCount,
- reinterpret_cast<VkRect2D *>( rects.data() ) ) );
+ result = d.vkGetPhysicalDevicePresentRectanglesKHR(
+ m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
+ VULKAN_HPP_ASSERT( rectCount <= rects.size() );
+ if ( rectCount < rects.size() )
{
- VULKAN_HPP_ASSERT( rectCount <= rects.size() );
- if ( rectCount < rects.size() )
- {
- rects.resize( rectCount );
- }
+ rects.resize( rectCount );
}
- return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), rects );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo,
- uint32_t * pImageIndex,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo,
+ uint32_t * pImageIndex,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkAcquireNextImage2KHR(
- m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) );
+ return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t>
- Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
uint32_t imageIndex;
- Result result = static_cast<Result>( d.vkAcquireNextImage2KHR(
- m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex ) );
- return createResultValue( result,
- imageIndex,
- VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eTimeout,
- VULKAN_HPP_NAMESPACE::Result::eNotReady,
- VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+ VkResult result = d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eTimeout,
+ VULKAN_HPP_NAMESPACE::Result::eNotReady,
+ VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+
+ return ResultValue<uint32_t>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_display ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR(
- m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) );
+ return static_cast<Result>(
+ d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename DisplayPropertiesKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
- PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
+ PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties;
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties;
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result =
- static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR(
- m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) );
+ result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
template <typename DisplayPropertiesKHRAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayPropertiesKHR>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
- PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator,
- Dispatch const & d ) const
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, DisplayPropertiesKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
+ PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties( displayPropertiesKHRAllocator );
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties( displayPropertiesKHRAllocator );
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result =
- static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR(
- m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) );
+ result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
- m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) );
+ return static_cast<Result>(
+ d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename DisplayPlanePropertiesKHRAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties;
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties;
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
- m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) );
+ result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
+ m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
template <typename DisplayPlanePropertiesKHRAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayPlanePropertiesKHR>::value, int>::type>
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, DisplayPlanePropertiesKHR>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
- PhysicalDevice::getDisplayPlanePropertiesKHR(
- DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
+ PhysicalDevice::getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties(
- displayPlanePropertiesKHRAllocator );
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties( displayPlanePropertiesKHRAllocator );
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
- m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) );
+ result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
+ m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex,
- uint32_t * pDisplayCount,
- VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex,
+ uint32_t * pDisplayCount,
+ VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR(
- m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) );
+ return static_cast<Result>(
+ d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename DisplayKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type
- PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type
+ PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<DisplayKHR, DisplayKHRAllocator> displays;
- uint32_t displayCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays;
+ uint32_t displayCount;
+ VkResult result;
do
{
- result = static_cast<Result>(
- d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && displayCount )
+ result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && displayCount )
{
displays.resize( displayCount );
- result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR(
- m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) );
+ result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
+ VULKAN_HPP_ASSERT( displayCount <= displays.size() );
+ if ( displayCount < displays.size() )
{
- VULKAN_HPP_ASSERT( displayCount <= displays.size() );
- if ( displayCount < displays.size() )
- {
- displays.resize( displayCount );
- }
+ displays.resize( displayCount );
}
- return createResultValue(
- result, displays, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displays );
}
template <typename DisplayKHRAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayKHR>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type
- PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex,
- DisplayKHRAllocator & displayKHRAllocator,
- Dispatch const & d ) const
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, DisplayKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type
+ PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<DisplayKHR, DisplayKHRAllocator> displays( displayKHRAllocator );
- uint32_t displayCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays( displayKHRAllocator );
+ uint32_t displayCount;
+ VkResult result;
do
{
- result = static_cast<Result>(
- d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && displayCount )
+ result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && displayCount )
{
displays.resize( displayCount );
- result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR(
- m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) );
+ result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
+ VULKAN_HPP_ASSERT( displayCount <= displays.size() );
+ if ( displayCount < displays.size() )
{
- VULKAN_HPP_ASSERT( displayCount <= displays.size() );
- if ( displayCount < displays.size() )
- {
- displays.resize( displayCount );
- }
+ displays.resize( displayCount );
}
- return createResultValue(
- result, displays, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displays );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetDisplayModePropertiesKHR( m_physicalDevice,
- static_cast<VkDisplayKHR>( display ),
- pPropertyCount,
- reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) );
+ return static_cast<Result>( d.vkGetDisplayModePropertiesKHR(
+ m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename DisplayModePropertiesKHRAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties;
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties;
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR(
- m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>(
- d.vkGetDisplayModePropertiesKHR( m_physicalDevice,
- static_cast<VkDisplayKHR>( display ),
- &propertyCount,
- reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) );
+ result = d.vkGetDisplayModePropertiesKHR(
+ m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
template <typename DisplayModePropertiesKHRAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayModePropertiesKHR>::value, int>::type>
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, DisplayModePropertiesKHR>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties(
- displayModePropertiesKHRAllocator );
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties( displayModePropertiesKHRAllocator );
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR(
- m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>(
- d.vkGetDisplayModePropertiesKHR( m_physicalDevice,
- static_cast<VkDisplayKHR>( display ),
- &propertyCount,
- reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) );
+ result = d.vkGetDisplayModePropertiesKHR(
+ m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateDisplayModeKHR( m_physicalDevice,
- static_cast<VkDisplayKHR>( display ),
- reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkDisplayModeKHR *>( pMode ) ) );
+ return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice,
+ static_cast<VkDisplayKHR>( display ),
+ reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkDisplayModeKHR *>( pMode ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type
- PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type
+ PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateDisplayModeKHR( m_physicalDevice,
static_cast<VkDisplayKHR>( display ),
reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) );
- return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDisplayModeKHR *>( &mode ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), mode );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type
- PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type
+ PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateDisplayModeKHR( m_physicalDevice,
static_cast<VkDisplayKHR>( display ),
reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) );
- ObjectDestroy<PhysicalDevice, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>(
- result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDisplayModeKHR *>( &mode ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>( mode, ObjectDestroy<PhysicalDevice, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -8982,89 +8417,82 @@ namespace VULKAN_HPP_NAMESPACE
PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,
uint32_t planeIndex,
VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice,
- static_cast<VkDisplayModeKHR>( mode ),
- planeIndex,
- reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) );
+ return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR(
+ m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type
- PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,
- uint32_t planeIndex,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type
+ PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities;
- Result result = static_cast<Result>(
- d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice,
- static_cast<VkDisplayModeKHR>( mode ),
- planeIndex,
- reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) ) );
- return createResultValue(
- result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
+ VkResult result = d.vkGetDisplayPlaneCapabilitiesKHR(
+ m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), capabilities );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateDisplayPlaneSurfaceKHR( m_instance,
- reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance,
+ reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateDisplayPlaneSurfaceKHR( m_instance,
- reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" );
+ VkResult result = d.vkCreateDisplayPlaneSurfaceKHR(
+ m_instance,
+ reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateDisplayPlaneSurfaceKHR( m_instance,
- reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique", deleter );
+ VkResult result = d.vkCreateDisplayPlaneSurfaceKHR(
+ m_instance,
+ reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -9072,174 +8500,164 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_display_swapchain ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createSharedSwapchainsKHR( uint32_t swapchainCount,
- const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount,
+ const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateSharedSwapchainsKHR( m_device,
- swapchainCount,
- reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSwapchainKHR *>( pSwapchains ) ) );
+ return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device,
+ swapchainCount,
+ reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSwapchainKHR *>( pSwapchains ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename SwapchainKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type
- Device::createSharedSwapchainsKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type
+ Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateSharedSwapchainsKHR( m_device,
- createInfos.size(),
- reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) );
- return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
+ VkResult result = d.vkCreateSharedSwapchainsKHR(
+ m_device,
+ createInfos.size(),
+ reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchains );
}
template <typename SwapchainKHRAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, SwapchainKHR>::value, int>::type>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type
- Device::createSharedSwapchainsKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- SwapchainKHRAllocator & swapchainKHRAllocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size(),
- swapchainKHRAllocator );
- Result result = static_cast<Result>(
- d.vkCreateSharedSwapchainsKHR( m_device,
- createInfos.size(),
- reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) );
- return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
- }
-
- template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type
- Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
+ typename B0,
+ typename std::enable_if<std::is_same<typename B0::value_type, SwapchainKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type
+ Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ SwapchainKHRAllocator & swapchainKHRAllocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size(), swapchainKHRAllocator );
+ VkResult result = d.vkCreateSharedSwapchainsKHR(
+ m_device,
+ createInfos.size(),
+ reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchains );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type
+ Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
- Result result = static_cast<Result>(
- d.vkCreateSharedSwapchainsKHR( m_device,
- 1,
- reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
- return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" );
+ VkResult result = d.vkCreateSharedSwapchainsKHR(
+ m_device,
+ 1,
+ reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchain );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch, typename SwapchainKHRAllocator>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type
- Device::createSharedSwapchainsKHRUnique(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains;
- std::vector<SwapchainKHR> swapchains( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateSharedSwapchainsKHR( m_device,
- createInfos.size(),
- reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type
+ Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() );
+ VkResult result = d.vkCreateSharedSwapchainsKHR(
+ m_device,
+ createInfos.size(),
+ reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
+ std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains;
+ uniqueSwapchains.reserve( createInfos.size() );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ for ( auto const & swapchain : swapchains )
{
- uniqueSwapchains.reserve( createInfos.size() );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- for ( size_t i = 0; i < createInfos.size(); i++ )
- {
- uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchains[i], deleter ) );
- }
+ uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) );
}
- return createResultValue(
- result, std::move( uniqueSwapchains ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueSwapchains ) );
}
template <typename Dispatch,
typename SwapchainKHRAllocator,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value,
- int>::type>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type
- Device::createSharedSwapchainsKHRUnique(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- SwapchainKHRAllocator & swapchainKHRAllocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator );
- std::vector<SwapchainKHR> swapchains( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateSharedSwapchainsKHR( m_device,
- createInfos.size(),
- reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ typename B0,
+ typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type
+ Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ SwapchainKHRAllocator & swapchainKHRAllocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() );
+ VkResult result = d.vkCreateSharedSwapchainsKHR(
+ m_device,
+ createInfos.size(),
+ reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
+ std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator );
+ uniqueSwapchains.reserve( createInfos.size() );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ for ( auto const & swapchain : swapchains )
{
- uniqueSwapchains.reserve( createInfos.size() );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- for ( size_t i = 0; i < createInfos.size(); i++ )
- {
- uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchains[i], deleter ) );
- }
+ uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) );
}
- return createResultValue(
- result, std::move( uniqueSwapchains ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueSwapchains ) );
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type
- Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type
+ Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- SwapchainKHR swapchain;
- Result result = static_cast<Result>(
- d.vkCreateSharedSwapchainsKHR( m_device,
- 1,
- reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<SwapchainKHR, Dispatch>(
- result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique", deleter );
+
+ VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
+ VkResult result = d.vkCreateSharedSwapchainsKHR(
+ m_device,
+ 1,
+ reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( swapchain, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -9248,82 +8666,80 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_xlib_surface ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateXlibSurfaceKHR( m_instance,
- reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ return static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance,
+ reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateXlibSurfaceKHR( m_instance,
reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateXlibSurfaceKHR( m_instance,
reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex,
- Display * dpy,
- VisualID visualID,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE Bool32
+ PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Bool32>(
- d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) );
+ return static_cast<Bool32>( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex,
- Display & dpy,
- VisualID visualID,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
+ PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID );
+
+ VkBool32 result = d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
@@ -9332,58 +8748,58 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_xcb_surface ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateXcbSurfaceKHR( m_instance,
- reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ return static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance,
+ reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateXcbSurfaceKHR( m_instance,
reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateXcbSurfaceKHR( m_instance,
reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -9392,22 +8808,24 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
xcb_connection_t * connection,
xcb_visualid_t visual_id,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Bool32>(
- d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) );
+ return static_cast<Bool32>( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
- xcb_connection_t & connection,
- xcb_visualid_t visual_id,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
+ xcb_connection_t & connection,
+ xcb_visualid_t visual_id,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id );
+
+ VkBool32 result = d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_XCB_KHR*/
@@ -9416,78 +8834,81 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_wayland_surface ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateWaylandSurfaceKHR( m_instance,
- reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance,
+ reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateWaylandSurfaceKHR( m_instance,
- reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" );
+ VkResult result = d.vkCreateWaylandSurfaceKHR(
+ m_instance,
+ reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateWaylandSurfaceKHR( m_instance,
- reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique", deleter );
+ VkResult result = d.vkCreateWaylandSurfaceKHR(
+ m_instance,
+ reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR(
- uint32_t queueFamilyIndex, struct wl_display * display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex,
+ struct wl_display * display,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Bool32>(
- d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) );
+ return static_cast<Bool32>( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR(
- uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
+ PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display );
+
+ VkBool32 result = d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
@@ -9496,58 +8917,58 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_android_surface ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateAndroidSurfaceKHR( m_instance,
- reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance,
+ reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateAndroidSurfaceKHR( m_instance,
- reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" );
+ VkResult result = d.vkCreateAndroidSurfaceKHR(
+ m_instance,
+ reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateAndroidSurfaceKHR( m_instance,
- reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique", deleter );
+ VkResult result = d.vkCreateAndroidSurfaceKHR(
+ m_instance,
+ reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -9557,69 +8978,67 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_win32_surface ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateWin32SurfaceKHR( m_instance,
- reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ return static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance,
+ reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateWin32SurfaceKHR( m_instance,
reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateWin32SurfaceKHR( m_instance,
reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR(
- uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Bool32>(
- d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) );
+ return static_cast<Bool32>( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) );
}
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
@@ -9630,109 +9049,106 @@ namespace VULKAN_HPP_NAMESPACE
Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateDebugReportCallbackEXT( m_instance,
- reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkDebugReportCallbackEXT *>( pCallback ) ) );
+ return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance,
+ reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkDebugReportCallbackEXT *>( pCallback ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type
- Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type
+ Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
- Result result = static_cast<Result>(
- d.vkCreateDebugReportCallbackEXT( m_instance,
- reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) );
- return createResultValue(
- result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" );
+ VkResult result = d.vkCreateDebugReportCallbackEXT(
+ m_instance,
+ reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), callback );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type
- Instance::createDebugReportCallbackEXTUnique(
- const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type
+ Instance::createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
- Result result = static_cast<Result>(
- d.vkCreateDebugReportCallbackEXT( m_instance,
- reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>(
- result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique", deleter );
+ VkResult result = d.vkCreateDebugReportCallbackEXT(
+ m_instance,
+ reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>( callback, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDebugReportCallbackEXT( m_instance,
- static_cast<VkDebugReportCallbackEXT>( callback ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDebugReportCallbackEXT(
+ m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyDebugReportCallbackEXT(
m_instance,
static_cast<VkDebugReportCallbackEXT>( callback ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDebugReportCallbackEXT( m_instance,
- static_cast<VkDebugReportCallbackEXT>( callback ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDebugReportCallbackEXT(
+ m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyDebugReportCallbackEXT(
m_instance,
static_cast<VkDebugReportCallbackEXT>( callback ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -9744,7 +9160,7 @@ namespace VULKAN_HPP_NAMESPACE
int32_t messageCode,
const char * pLayerPrefix,
const char * pMessage,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDebugReportMessageEXT( m_instance,
@@ -9766,9 +9182,10 @@ namespace VULKAN_HPP_NAMESPACE
int32_t messageCode,
const std::string & layerPrefix,
const std::string & message,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDebugReportMessageEXT( m_instance,
static_cast<VkDebugReportFlagsEXT>( flags ),
static_cast<VkDebugReportObjectTypeEXT>( objectType ),
@@ -9783,53 +9200,52 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_debug_marker ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT(
- const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) );
+ return static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo,
- Dispatch const & d ) const
+ Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" );
+
+ VkResult result = d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT(
- const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT(
- m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) );
+ return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo,
- Dispatch const & d ) const
+ Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT(
- m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" );
+
+ VkResult result = d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
@@ -9837,11 +9253,11 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -9854,9 +9270,8 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
@@ -9864,11 +9279,11 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -9877,207 +9292,184 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_video_queue ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile,
- VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile,
+ VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice,
- reinterpret_cast<const VkVideoProfileKHR *>( pVideoProfile ),
- reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR(
+ m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( pVideoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::type
- PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR & videoProfile,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::type
+ PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities;
- Result result = static_cast<Result>(
- d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice,
- reinterpret_cast<const VkVideoProfileKHR *>( &videoProfile ),
- reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) );
- return createResultValue(
- result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
+ VkResult result = d.vkGetPhysicalDeviceVideoCapabilitiesKHR(
+ m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), capabilities );
}
template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
- PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR & videoProfile,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
+ PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities =
- structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>();
- Result result = static_cast<Result>(
- d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice,
- reinterpret_cast<const VkVideoProfileKHR *>( &videoProfile ),
- reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) );
- return createResultValue(
- result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
+ VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>();
+ VkResult result = d.vkGetPhysicalDeviceVideoCapabilitiesKHR(
+ m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoFormatPropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,
- uint32_t * pVideoFormatPropertyCount,
- VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,
+ uint32_t * pVideoFormatPropertyCount,
+ VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( pVideoFormatInfo ),
- pVideoFormatPropertyCount,
- reinterpret_cast<VkVideoFormatPropertiesKHR *>( pVideoFormatProperties ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( pVideoFormatInfo ),
+ pVideoFormatPropertyCount,
+ reinterpret_cast<VkVideoFormatPropertiesKHR *>( pVideoFormatProperties ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename VideoFormatPropertiesKHRAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
- PhysicalDevice::getVideoFormatPropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
+ PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties;
- uint32_t videoFormatPropertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties;
+ uint32_t videoFormatPropertyCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
- &videoFormatPropertyCount,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && videoFormatPropertyCount )
+ result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
+ m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount )
{
videoFormatProperties.resize( videoFormatPropertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
- &videoFormatPropertyCount,
- reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) );
+ result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
+ &videoFormatPropertyCount,
+ reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
+ VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
+ if ( videoFormatPropertyCount < videoFormatProperties.size() )
{
- VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
- if ( videoFormatPropertyCount < videoFormatProperties.size() )
- {
- videoFormatProperties.resize( videoFormatPropertyCount );
- }
+ videoFormatProperties.resize( videoFormatPropertyCount );
}
- return createResultValue(
- result, videoFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoFormatProperties );
}
template <typename VideoFormatPropertiesKHRAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, VideoFormatPropertiesKHR>::value, int>::type>
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, VideoFormatPropertiesKHR>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
- PhysicalDevice::getVideoFormatPropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
- VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator,
- Dispatch const & d ) const
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
+ PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
+ VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties(
- videoFormatPropertiesKHRAllocator );
- uint32_t videoFormatPropertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties( videoFormatPropertiesKHRAllocator );
+ uint32_t videoFormatPropertyCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
- &videoFormatPropertyCount,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && videoFormatPropertyCount )
+ result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
+ m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount )
{
videoFormatProperties.resize( videoFormatPropertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
- &videoFormatPropertyCount,
- reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) );
+ result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
+ &videoFormatPropertyCount,
+ reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
+ VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
+ if ( videoFormatPropertyCount < videoFormatProperties.size() )
{
- VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
- if ( videoFormatPropertyCount < videoFormatProperties.size() )
- {
- videoFormatProperties.resize( videoFormatPropertyCount );
- }
+ videoFormatProperties.resize( videoFormatPropertyCount );
}
- return createResultValue(
- result, videoFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoFormatProperties );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateVideoSessionKHR( m_device,
- reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkVideoSessionKHR *>( pVideoSession ) ) );
+ return static_cast<Result>( d.vkCreateVideoSessionKHR( m_device,
+ reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkVideoSessionKHR *>( pVideoSession ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::type
- Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::type
+ Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateVideoSessionKHR( m_device,
reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ) );
- return createResultValue( result, videoSession, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoSession );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>>::type
- Device::createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>>::type
+ Device::createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateVideoSessionKHR( m_device,
reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>(
- result, videoSession, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>( videoSession, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -10085,336 +9477,309 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyVideoSessionKHR( m_device,
- static_cast<VkVideoSessionKHR>( videoSession ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyVideoSessionKHR( m_device,
- static_cast<VkVideoSessionKHR>( videoSession ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+
+ d.vkDestroyVideoSessionKHR(
+ m_device,
+ static_cast<VkVideoSessionKHR>( videoSession ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyVideoSessionKHR( m_device,
- static_cast<VkVideoSessionKHR>( videoSession ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyVideoSessionKHR( m_device,
- static_cast<VkVideoSessionKHR>( videoSession ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+
+ d.vkDestroyVideoSessionKHR(
+ m_device,
+ static_cast<VkVideoSessionKHR>( videoSession ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getVideoSessionMemoryRequirementsKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- uint32_t * pVideoSessionMemoryRequirementsCount,
- VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ uint32_t * pMemoryRequirementsCount,
+ VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR(
- m_device,
- static_cast<VkVideoSessionKHR>( videoSession ),
- pVideoSessionMemoryRequirementsCount,
- reinterpret_cast<VkVideoGetMemoryPropertiesKHR *>( pVideoSessionMemoryRequirements ) ) );
+ return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( m_device,
+ static_cast<VkVideoSessionKHR>( videoSession ),
+ pMemoryRequirementsCount,
+ reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( pMemoryRequirements ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename VideoGetMemoryPropertiesKHRAllocator, typename Dispatch>
+ template <typename VideoSessionMemoryRequirementsKHRAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator>>::type
- Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- Dispatch const & d ) const
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type
+ Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator> videoSessionMemoryRequirements;
- uint32_t videoSessionMemoryRequirementsCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements;
+ uint32_t memoryRequirementsCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR(
- m_device, static_cast<VkVideoSessionKHR>( videoSession ), &videoSessionMemoryRequirementsCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && videoSessionMemoryRequirementsCount )
+ result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && memoryRequirementsCount )
{
- videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount );
- result = static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR(
- m_device,
- static_cast<VkVideoSessionKHR>( videoSession ),
- &videoSessionMemoryRequirementsCount,
- reinterpret_cast<VkVideoGetMemoryPropertiesKHR *>( videoSessionMemoryRequirements.data() ) ) );
+ memoryRequirements.resize( memoryRequirementsCount );
+ result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device,
+ static_cast<VkVideoSessionKHR>( videoSession ),
+ &memoryRequirementsCount,
+ reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" );
+ VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() );
+ if ( memoryRequirementsCount < memoryRequirements.size() )
{
- VULKAN_HPP_ASSERT( videoSessionMemoryRequirementsCount <= videoSessionMemoryRequirements.size() );
- if ( videoSessionMemoryRequirementsCount < videoSessionMemoryRequirements.size() )
- {
- videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount );
- }
+ memoryRequirements.resize( memoryRequirementsCount );
}
- return createResultValue( result,
- videoSessionMemoryRequirements,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryRequirements );
}
- template <
- typename VideoGetMemoryPropertiesKHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, VideoGetMemoryPropertiesKHR>::value, int>::type>
+ template <typename VideoSessionMemoryRequirementsKHRAllocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, VideoSessionMemoryRequirementsKHR>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator>>::type
- Device::getVideoSessionMemoryRequirementsKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- VideoGetMemoryPropertiesKHRAllocator & videoGetMemoryPropertiesKHRAllocator,
- Dispatch const & d ) const
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type
+ Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator> videoSessionMemoryRequirements(
- videoGetMemoryPropertiesKHRAllocator );
- uint32_t videoSessionMemoryRequirementsCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements(
+ videoSessionMemoryRequirementsKHRAllocator );
+ uint32_t memoryRequirementsCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR(
- m_device, static_cast<VkVideoSessionKHR>( videoSession ), &videoSessionMemoryRequirementsCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && videoSessionMemoryRequirementsCount )
+ result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && memoryRequirementsCount )
{
- videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount );
- result = static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR(
- m_device,
- static_cast<VkVideoSessionKHR>( videoSession ),
- &videoSessionMemoryRequirementsCount,
- reinterpret_cast<VkVideoGetMemoryPropertiesKHR *>( videoSessionMemoryRequirements.data() ) ) );
+ memoryRequirements.resize( memoryRequirementsCount );
+ result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device,
+ static_cast<VkVideoSessionKHR>( videoSession ),
+ &memoryRequirementsCount,
+ reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" );
+ VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() );
+ if ( memoryRequirementsCount < memoryRequirements.size() )
{
- VULKAN_HPP_ASSERT( videoSessionMemoryRequirementsCount <= videoSessionMemoryRequirements.size() );
- if ( videoSessionMemoryRequirementsCount < videoSessionMemoryRequirements.size() )
- {
- videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount );
- }
+ memoryRequirements.resize( memoryRequirementsCount );
}
- return createResultValue( result,
- videoSessionMemoryRequirements,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryRequirements );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- uint32_t videoSessionBindMemoryCount,
- const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR * pVideoSessionBindMemories,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ uint32_t bindSessionMemoryInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkBindVideoSessionMemoryKHR( m_device,
- static_cast<VkVideoSessionKHR>( videoSession ),
- videoSessionBindMemoryCount,
- reinterpret_cast<const VkVideoBindMemoryKHR *>( pVideoSessionBindMemories ) ) );
+ return static_cast<Result>( d.vkBindVideoSessionMemoryKHR( m_device,
+ static_cast<VkVideoSessionKHR>( videoSession ),
+ bindSessionMemoryInfoCount,
+ reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( pBindSessionMemoryInfos ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::bindVideoSessionMemoryKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR> const & videoSessionBindMemories,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindVideoSessionMemoryKHR(
+ VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkBindVideoSessionMemoryKHR(
- m_device,
- static_cast<VkVideoSessionKHR>( videoSession ),
- videoSessionBindMemories.size(),
- reinterpret_cast<const VkVideoBindMemoryKHR *>( videoSessionBindMemories.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" );
+
+ VkResult result = d.vkBindVideoSessionMemoryKHR( m_device,
+ static_cast<VkVideoSessionKHR>( videoSession ),
+ bindSessionMemoryInfos.size(),
+ reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( bindSessionMemoryInfos.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionParametersKHR(
- const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateVideoSessionParametersKHR(
- m_device,
- reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkVideoSessionParametersKHR *>( pVideoSessionParameters ) ) );
+ return static_cast<Result>( d.vkCreateVideoSessionParametersKHR( m_device,
+ reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkVideoSessionParametersKHR *>( pVideoSessionParameters ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::type
- Device::createVideoSessionParametersKHR(
- const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::type
+ Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters;
- Result result = static_cast<Result>( d.vkCreateVideoSessionParametersKHR(
+ VkResult result = d.vkCreateVideoSessionParametersKHR(
m_device,
reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ) );
- return createResultValue(
- result, videoSessionParameters, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoSessionParameters );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>>::type
- Device::createVideoSessionParametersKHRUnique(
- const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>>::type
+ Device::createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters;
- Result result = static_cast<Result>( d.vkCreateVideoSessionParametersKHR(
+ VkResult result = d.vkCreateVideoSessionParametersKHR(
m_device,
reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>(
- result,
- videoSessionParameters,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique",
- deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>(
+ videoSessionParameters, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::updateVideoSessionParametersKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
- const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
+ const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR(
- m_device,
- static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
- reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( pUpdateInfo ) ) );
+ return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR( m_device,
+ static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
+ reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( pUpdateInfo ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::updateVideoSessionParametersKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
- const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo,
- Dispatch const & d ) const
+ Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
+ const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkUpdateVideoSessionParametersKHR(
- m_device,
- static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
- reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" );
+
+ VkResult result = d.vkUpdateVideoSessionParametersKHR( m_device,
+ static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
+ reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyVideoSessionParametersKHR( m_device,
- static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyVideoSessionParametersKHR(
+ m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyVideoSessionParametersKHR(
m_device,
static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyVideoSessionParametersKHR( m_device,
- static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyVideoSessionParametersKHR(
+ m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyVideoSessionParametersKHR(
m_device,
static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) );
@@ -10422,19 +9787,18 @@ namespace VULKAN_HPP_NAMESPACE
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) );
@@ -10442,34 +9806,31 @@ namespace VULKAN_HPP_NAMESPACE
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdControlVideoCodingKHR( m_commandBuffer,
- reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) );
+ d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdControlVideoCodingKHR( m_commandBuffer,
- reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) );
+
+ d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -10478,20 +9839,21 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_video_decode_queue ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pFrameInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( pFrameInfo ) );
+ d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( pDecodeInfo ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & frameInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( &frameInfo ) );
+
+ d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( &decodeInfo ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -10499,13 +9861,12 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_transform_feedback ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
- uint32_t bindingCount,
- const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
- const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
- const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
+ uint32_t bindingCount,
+ const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
+ const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
+ const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer,
@@ -10519,10 +9880,10 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void
- CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
+ CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
@@ -10532,13 +9893,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( buffers.size() != offsets.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" );
}
if ( !sizes.empty() && buffers.size() != sizes.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
@@ -10552,12 +9911,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
- const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
+ uint32_t counterBufferCount,
+ const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
+ const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer,
@@ -10569,11 +9927,11 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT(
- uint32_t firstCounterBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_INLINE void
+ CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
@@ -10581,9 +9939,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
@@ -10596,12 +9952,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
- const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer,
+ uint32_t counterBufferCount,
+ const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
+ const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdEndTransformFeedbackEXT( m_commandBuffer,
@@ -10613,11 +9968,11 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT(
- uint32_t firstCounterBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_INLINE void
+ CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
@@ -10625,9 +9980,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
@@ -10644,18 +9997,15 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t query,
VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
uint32_t index,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginQueryIndexedEXT(
- m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
+ d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query,
- uint32_t index,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index );
@@ -10668,7 +10018,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,
uint32_t counterOffset,
uint32_t vertexStride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer,
@@ -10683,11 +10033,10 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NVX_binary_import ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateCuModuleNVX( m_device,
@@ -10698,99 +10047,100 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::CuModuleNVX>::type
- Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CuModuleNVX>::type
+ Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::CuModuleNVX module;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateCuModuleNVX( m_device,
reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkCuModuleNVX *>( &module ) ) );
- return createResultValue( result, module, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkCuModuleNVX *>( &module ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), module );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>>::type
- Device::createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>>::type
+ Device::createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::CuModuleNVX module;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateCuModuleNVX( m_device,
reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkCuModuleNVX *>( &module ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>(
- result, module, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkCuModuleNVX *>( &module ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>( module, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateCuFunctionNVX( m_device,
- reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkCuFunctionNVX *>( pFunction ) ) );
+ return static_cast<Result>( d.vkCreateCuFunctionNVX( m_device,
+ reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkCuFunctionNVX *>( pFunction ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::type
- Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::type
+ Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::CuFunctionNVX function;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateCuFunctionNVX( m_device,
reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkCuFunctionNVX *>( &function ) ) );
- return createResultValue( result, function, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkCuFunctionNVX *>( &function ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), function );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>>::type
- Device::createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>>::type
+ Device::createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::CuFunctionNVX function;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateCuFunctionNVX( m_device,
reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkCuFunctionNVX *>( &function ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>(
- result, function, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkCuFunctionNVX *>( &function ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>( function, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -10798,106 +10148,98 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyCuModuleNVX(
- m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyCuModuleNVX( m_device,
static_cast<VkCuModuleNVX>( module ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyCuModuleNVX(
- m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyCuModuleNVX( m_device,
static_cast<VkCuModuleNVX>( module ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyCuFunctionNVX( m_device,
- static_cast<VkCuFunctionNVX>( function ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyCuFunctionNVX( m_device,
static_cast<VkCuFunctionNVX>( function ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyCuFunctionNVX( m_device,
- static_cast<VkCuFunctionNVX>( function ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyCuFunctionNVX( m_device,
static_cast<VkCuFunctionNVX>( function ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) );
@@ -10906,9 +10248,10 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -10917,7 +10260,7 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) );
@@ -10926,39 +10269,39 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) );
+
+ uint32_t result = d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) );
+
+ return result;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView,
- VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>(
- d.vkGetImageViewAddressNVX( m_device,
- static_cast<VkImageView>( imageView ),
- reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) );
+ d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type
- Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type
+ Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties;
- Result result = static_cast<Result>(
- d.vkGetImageViewAddressNVX( m_device,
- static_cast<VkImageView>( imageView ),
- reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) ) );
- return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" );
+ VkResult result =
+ d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -10971,7 +10314,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdDrawIndirectCountAMD( m_commandBuffer,
@@ -10990,7 +10333,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer,
@@ -11005,13 +10348,12 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_AMD_shader_info ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
- VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
- size_t * pInfoSize,
- void * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
+ VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
+ size_t * pInfoSize,
+ void * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
@@ -11031,43 +10373,42 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
std::vector<uint8_t, Uint8_tAllocator> info;
size_t infoSize;
- Result result;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
- static_cast<VkPipeline>( pipeline ),
- static_cast<VkShaderStageFlagBits>( shaderStage ),
- static_cast<VkShaderInfoTypeAMD>( infoType ),
- &infoSize,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && infoSize )
+ result = d.vkGetShaderInfoAMD( m_device,
+ static_cast<VkPipeline>( pipeline ),
+ static_cast<VkShaderStageFlagBits>( shaderStage ),
+ static_cast<VkShaderInfoTypeAMD>( infoType ),
+ &infoSize,
+ nullptr );
+ if ( ( result == VK_SUCCESS ) && infoSize )
{
info.resize( infoSize );
- result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
- static_cast<VkPipeline>( pipeline ),
- static_cast<VkShaderStageFlagBits>( shaderStage ),
- static_cast<VkShaderInfoTypeAMD>( infoType ),
- &infoSize,
- reinterpret_cast<void *>( info.data() ) ) );
+ result = d.vkGetShaderInfoAMD( m_device,
+ static_cast<VkPipeline>( pipeline ),
+ static_cast<VkShaderStageFlagBits>( shaderStage ),
+ static_cast<VkShaderInfoTypeAMD>( infoType ),
+ &infoSize,
+ reinterpret_cast<void *>( info.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" );
+ VULKAN_HPP_ASSERT( infoSize <= info.size() );
+ if ( infoSize < info.size() )
{
- VULKAN_HPP_ASSERT( infoSize <= info.size() );
- if ( infoSize < info.size() )
- {
- info.resize( infoSize );
- }
+ info.resize( infoSize );
}
- return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), info );
}
template <typename Uint8_tAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type>
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
@@ -11076,37 +10417,36 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
std::vector<uint8_t, Uint8_tAllocator> info( uint8_tAllocator );
size_t infoSize;
- Result result;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
- static_cast<VkPipeline>( pipeline ),
- static_cast<VkShaderStageFlagBits>( shaderStage ),
- static_cast<VkShaderInfoTypeAMD>( infoType ),
- &infoSize,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && infoSize )
+ result = d.vkGetShaderInfoAMD( m_device,
+ static_cast<VkPipeline>( pipeline ),
+ static_cast<VkShaderStageFlagBits>( shaderStage ),
+ static_cast<VkShaderInfoTypeAMD>( infoType ),
+ &infoSize,
+ nullptr );
+ if ( ( result == VK_SUCCESS ) && infoSize )
{
info.resize( infoSize );
- result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
- static_cast<VkPipeline>( pipeline ),
- static_cast<VkShaderStageFlagBits>( shaderStage ),
- static_cast<VkShaderInfoTypeAMD>( infoType ),
- &infoSize,
- reinterpret_cast<void *>( info.data() ) ) );
+ result = d.vkGetShaderInfoAMD( m_device,
+ static_cast<VkPipeline>( pipeline ),
+ static_cast<VkShaderStageFlagBits>( shaderStage ),
+ static_cast<VkShaderInfoTypeAMD>( infoType ),
+ &infoSize,
+ reinterpret_cast<void *>( info.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" );
+ VULKAN_HPP_ASSERT( infoSize <= info.size() );
+ if ( infoSize < info.size() )
{
- VULKAN_HPP_ASSERT( infoSize <= info.size() );
- if ( infoSize < info.size() )
- {
- info.resize( infoSize );
- }
+ info.resize( infoSize );
}
- return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), info );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -11114,7 +10454,7 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) );
@@ -11123,9 +10463,10 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -11141,61 +10482,59 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_GGP_stream_descriptor_surface ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP(
- const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP(
- m_instance,
- reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance,
+ reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createStreamDescriptorSurfaceGGP(
- const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP(
+ VkResult result = d.vkCreateStreamDescriptorSurfaceGGP(
m_instance,
reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createStreamDescriptorSurfaceGGPUnique(
- const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createStreamDescriptorSurfaceGGPUnique( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP(
+
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ VkResult result = d.vkCreateStreamDescriptorSurfaceGGP(
m_instance,
reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -11204,55 +10543,54 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_external_memory_capabilities ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV(
- VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,
- VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,
+ VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
- m_physicalDevice,
- static_cast<VkFormat>( format ),
- static_cast<VkImageType>( type ),
- static_cast<VkImageTiling>( tiling ),
- static_cast<VkImageUsageFlags>( usage ),
- static_cast<VkImageCreateFlags>( flags ),
- static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
- reinterpret_cast<VkExternalImageFormatPropertiesNV *>( pExternalImageFormatProperties ) ) );
+ return static_cast<Result>(
+ d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice,
+ static_cast<VkFormat>( format ),
+ static_cast<VkImageType>( type ),
+ static_cast<VkImageTiling>( tiling ),
+ static_cast<VkImageUsageFlags>( usage ),
+ static_cast<VkImageCreateFlags>( flags ),
+ static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
+ reinterpret_cast<VkExternalImageFormatPropertiesNV *>( pExternalImageFormatProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type
- PhysicalDevice::getExternalImageFormatPropertiesNV(
- VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type
+ PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties;
- Result result = static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
- m_physicalDevice,
- static_cast<VkFormat>( format ),
- static_cast<VkImageType>( type ),
- static_cast<VkImageTiling>( tiling ),
- static_cast<VkImageUsageFlags>( usage ),
- static_cast<VkImageCreateFlags>( flags ),
- static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
- reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) ) );
- return createResultValue( result,
- externalImageFormatProperties,
- VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" );
+ VkResult result =
+ d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice,
+ static_cast<VkFormat>( format ),
+ static_cast<VkImageType>( type ),
+ static_cast<VkImageTiling>( tiling ),
+ static_cast<VkImageUsageFlags>( usage ),
+ static_cast<VkImageCreateFlags>( flags ),
+ static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
+ reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), externalImageFormatProperties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -11260,35 +10598,29 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_external_memory_win32 ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
- HANDLE * pHandle,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
+ HANDLE * pHandle,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>(
- d.vkGetMemoryWin32HandleNV( m_device,
- static_cast<VkDeviceMemory>( memory ),
- static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ),
- pHandle ) );
+ d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
- Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV(
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- HANDLE handle;
- Result result =
- static_cast<Result>( d.vkGetMemoryWin32HandleNV( m_device,
- static_cast<VkDeviceMemory>( memory ),
- static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ),
- &handle ) );
- return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" );
+
+ HANDLE handle;
+ VkResult result =
+ d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
@@ -11297,7 +10629,7 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
@@ -11309,65 +10641,64 @@ namespace VULKAN_HPP_NAMESPACE
PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+
return features;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features =
- structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice,
- reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
+ d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
- PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
- d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice,
- reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+ d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+
return properties;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
- d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice,
- reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
+ d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetPhysicalDeviceFormatProperties2KHR(
@@ -11377,129 +10708,130 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2
- PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
d.vkGetPhysicalDeviceFormatProperties2KHR(
m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+
return formatProperties;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
+ VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
d.vkGetPhysicalDeviceFormatProperties2KHR(
m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
- VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
- reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
+ reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
- PhysicalDevice::getImageFormatProperties2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
+ PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
- Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
- reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
- return createResultValue(
- result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
+ VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
+ reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
}
template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
- PhysicalDevice::getImageFormatProperties2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
+ PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
- Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
- reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
- return createResultValue(
- result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
+ VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
+ reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount,
- VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount,
+ VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
- m_physicalDevice,
- pQueueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
+ m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename QueueFamilyProperties2Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
- uint32_t queueFamilyPropertyCount;
+
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
+ uint32_t queueFamilyPropertyCount;
d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
queueFamilyProperties.resize( queueFamilyPropertyCount );
d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
- m_physicalDevice,
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+ m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+
VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+ {
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ }
return queueFamilyProperties;
}
template <typename QueueFamilyProperties2Allocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
- PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,
- Dispatch const & d ) const
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties2>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
+ PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties(
- queueFamilyProperties2Allocator );
- uint32_t queueFamilyPropertyCount;
+
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator );
+ uint32_t queueFamilyPropertyCount;
d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
queueFamilyProperties.resize( queueFamilyPropertyCount );
d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
- m_physicalDevice,
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+ m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+
VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+ {
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ }
return queueFamilyProperties;
}
@@ -11508,164 +10840,168 @@ namespace VULKAN_HPP_NAMESPACE
PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- uint32_t queueFamilyPropertyCount;
+
+ std::vector<StructureChain, StructureChainAllocator> structureChains;
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
+ uint32_t queueFamilyPropertyCount;
d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
- std::vector<StructureChain, StructureChainAllocator> returnVector( queueFamilyPropertyCount );
- std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
+ structureChains.resize( queueFamilyPropertyCount );
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
{
- queueFamilyProperties[i].pNext =
- returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+ queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
}
d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
- m_physicalDevice,
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+ m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+
VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+ {
+ structureChains.resize( queueFamilyPropertyCount );
+ }
for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
{
- returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
+ structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
}
- return returnVector;
+ return structureChains;
}
template <typename StructureChain,
typename StructureChainAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type>
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
- PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator,
- Dispatch const & d ) const
+ PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- uint32_t queueFamilyPropertyCount;
+
+ std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
+ uint32_t queueFamilyPropertyCount;
d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
- std::vector<StructureChain, StructureChainAllocator> returnVector( queueFamilyPropertyCount,
- structureChainAllocator );
- std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
+ structureChains.resize( queueFamilyPropertyCount );
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
{
- queueFamilyProperties[i].pNext =
- returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+ queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
}
d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
- m_physicalDevice,
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+ m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+
VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+ {
+ structureChains.resize( queueFamilyPropertyCount );
+ }
for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
{
- returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
+ structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
}
- return returnVector;
+ return structureChains;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceMemoryProperties2KHR(
- m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
+ d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
- PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
- d.vkGetPhysicalDeviceMemoryProperties2KHR(
- m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+ d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+
return memoryProperties;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties =
structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
- d.vkGetPhysicalDeviceMemoryProperties2KHR(
- m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+ d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
- pPropertyCount,
- reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
+ pPropertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
- PhysicalDevice::getSparseImageFormatProperties2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
+ PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
- uint32_t propertyCount;
+
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
+ uint32_t propertyCount;
d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
- &propertyCount,
- nullptr );
+ m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
properties.resize( propertyCount );
- d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
- &propertyCount,
- reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
+ &propertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
+ {
+ properties.resize( propertyCount );
+ }
return properties;
}
- template <
- typename SparseImageFormatProperties2Allocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
- PhysicalDevice::getSparseImageFormatProperties2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
- SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
- Dispatch const & d ) const
+ template <typename SparseImageFormatProperties2Allocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties2>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
+ PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
+ SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties(
- sparseImageFormatProperties2Allocator );
- uint32_t propertyCount;
+
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator );
+ uint32_t propertyCount;
d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
- &propertyCount,
- nullptr );
+ m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
properties.resize( propertyCount );
- d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
- &propertyCount,
- reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+ d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
+ &propertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+
VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
+ {
+ properties.resize( propertyCount );
+ }
return properties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -11673,43 +11009,34 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_device_group ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex,
+ uint32_t localDeviceIndex,
+ uint32_t remoteDeviceIndex,
+ VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device,
- heapIndex,
- localDeviceIndex,
- remoteDeviceIndex,
- reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
+ d.vkGetDeviceGroupPeerMemoryFeaturesKHR(
+ m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
- Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR(
+ uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
- d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device,
- heapIndex,
- localDeviceIndex,
- remoteDeviceIndex,
- reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
+ d.vkGetDeviceGroupPeerMemoryFeaturesKHR(
+ m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
+
return peerMemoryFeatures;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask );
@@ -11725,19 +11052,17 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDispatchBaseKHR(
- m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+ d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
}
#if defined( VK_USE_PLATFORM_VI_NN )
//=== VK_NN_vi_surface ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance,
@@ -11748,42 +11073,44 @@ namespace VULKAN_HPP_NAMESPACE
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateViSurfaceNN( m_instance,
reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateViSurfaceNN( m_instance,
reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -11794,134 +11121,115 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkTrimCommandPoolKHR(
- m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
+ d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
}
//=== VK_KHR_device_group_creation ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR(
- uint32_t * pPhysicalDeviceGroupCount,
- VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t * pPhysicalDeviceGroupCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR(
- m_instance,
- pPhysicalDeviceGroupCount,
- reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
+ m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
- uint32_t physicalDeviceGroupCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
+ uint32_t physicalDeviceGroupCount;
+ VkResult result;
do
{
- result =
- static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+ result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
{
physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
- result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR(
- m_instance,
- &physicalDeviceGroupCount,
- reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
+ result = d.vkEnumeratePhysicalDeviceGroupsKHR(
+ m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
+ VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+ if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
{
- VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
- if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
- {
- physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
- }
+ physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
}
- return createResultValue( result,
- physicalDeviceGroupProperties,
- VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
}
- template <
- typename PhysicalDeviceGroupPropertiesAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
+ template <typename PhysicalDeviceGroupPropertiesAllocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
- Instance::enumeratePhysicalDeviceGroupsKHR(
- PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
+ Instance::enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties(
+
+ std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties(
physicalDeviceGroupPropertiesAllocator );
uint32_t physicalDeviceGroupCount;
- Result result;
+ VkResult result;
do
{
- result =
- static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+ result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
{
physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
- result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR(
- m_instance,
- &physicalDeviceGroupCount,
- reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
+ result = d.vkEnumeratePhysicalDeviceGroupsKHR(
+ m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
+ VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+ if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
{
- VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
- if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
- {
- physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
- }
+ physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
}
- return createResultValue( result,
- physicalDeviceGroupProperties,
- VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_external_memory_capabilities ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
- VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
+ VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceExternalBufferPropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
- reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
+ d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
+ reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties
- PhysicalDevice::getExternalBufferPropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
- d.vkGetPhysicalDeviceExternalBufferPropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
- reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
+ d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
+ reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
+
return externalBufferProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -11930,62 +11238,59 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_external_memory_win32 ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,
- HANDLE * pHandle,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,
+ HANDLE * pHandle,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetMemoryWin32HandleKHR(
- m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
+ return static_cast<Result>(
+ d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
- Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
+ Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- HANDLE handle;
- Result result = static_cast<Result>( d.vkGetMemoryWin32HandleKHR(
- m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
- return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" );
+
+ HANDLE handle;
+ VkResult result = d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR(
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ HANDLE handle,
+ VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR(
- m_device,
- static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
- handle,
- reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) );
+ return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device,
+ static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
+ handle,
+ reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type
- Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type
+ Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
- Result result = static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR(
- m_device,
- static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
- handle,
- reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) ) );
- return createResultValue(
- result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" );
+ VkResult result = d.vkGetMemoryWin32HandlePropertiesKHR( m_device,
+ static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
+ handle,
+ reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryWin32HandleProperties );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
@@ -11993,92 +11298,83 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_external_memory_fd ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo,
- int * pFd,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo,
+ int * pFd,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
+ return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type
- Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- int fd;
- Result result = static_cast<Result>(
- d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd ) );
- return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" );
+
+ int fd;
+ VkResult result = d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- int fd,
- VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ int fd,
+ VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetMemoryFdPropertiesKHR( m_device,
- static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
- fd,
- reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) );
+ return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR(
+ m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type
- Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- int fd,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type
+ Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties;
- Result result = static_cast<Result>(
- d.vkGetMemoryFdPropertiesKHR( m_device,
- static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
- fd,
- reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) ) );
- return createResultValue(
- result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" );
+ VkResult result = d.vkGetMemoryFdPropertiesKHR(
+ m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryFdProperties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_external_semaphore_capabilities ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
- reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
+ d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
+ reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
- PhysicalDevice::getExternalSemaphorePropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
- d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
- reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
+ d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
+ reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
+
return externalSemaphoreProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -12088,50 +11384,50 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR(
- m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) );
+ return static_cast<Result>(
+ d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::importSemaphoreWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo,
- Dispatch const & d ) const
+ Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR(
- m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" );
+
+ VkResult result =
+ d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo,
- HANDLE * pHandle,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR(
- m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
+ return static_cast<Result>(
+ d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
- Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
+ Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- HANDLE handle;
- Result result = static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR(
- m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
- return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" );
+
+ HANDLE handle;
+ VkResult result = d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
@@ -12139,62 +11435,60 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_external_semaphore_fd ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkImportSemaphoreFdKHR(
- m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) );
+ return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo,
- Dispatch const & d ) const
+ Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkImportSemaphoreFdKHR(
- m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" );
+
+ VkResult result = d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo,
- int * pFd,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo,
+ int * pFd,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
+ return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type
- Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type
+ Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- int fd;
- Result result = static_cast<Result>(
- d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd ) );
- return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" );
+
+ int fd;
+ VkResult result = d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_push_descriptor ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t set,
- uint32_t descriptorWriteCount,
- const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t set,
+ uint32_t descriptorWriteCount,
+ const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdPushDescriptorSetKHR( m_commandBuffer,
@@ -12207,14 +11501,15 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR(
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t set,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t set,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdPushDescriptorSetKHR( m_commandBuffer,
static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
static_cast<VkPipelineLayout>( layout ),
@@ -12225,31 +11520,27 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR(
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t set,
- const void * pData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t set,
+ const void * pData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer,
- static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- static_cast<VkPipelineLayout>( layout ),
- set,
- pData );
+ d.vkCmdPushDescriptorSetWithTemplateKHR(
+ m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename DataType, typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR(
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t set,
- DataType const & data,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t set,
+ DataType const & data,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer,
static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
static_cast<VkPipelineLayout>( layout ),
@@ -12261,24 +11552,21 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_conditional_rendering ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT(
- const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginConditionalRenderingEXT(
- m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) );
+ d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT(
- const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginConditionalRenderingEXT(
- m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) );
+
+ d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -12292,119 +11580,108 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_descriptor_update_template ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR(
- const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR(
- m_device,
- reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
+ return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device,
+ reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
- Device::createDescriptorUpdateTemplateKHR(
- const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
+ Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
- Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR(
+ VkResult result = d.vkCreateDescriptorUpdateTemplateKHR(
m_device,
reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
- return createResultValue(
- result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorUpdateTemplate );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
- Device::createDescriptorUpdateTemplateKHRUnique(
- const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
+ Device::createDescriptorUpdateTemplateKHRUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
- Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR(
+ VkResult result = d.vkCreateDescriptorUpdateTemplateKHR(
m_device,
reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>(
- result,
- descriptorUpdateTemplate,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique",
- deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>(
+ descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDescriptorUpdateTemplateKHR( m_device,
- static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDescriptorUpdateTemplateKHR(
+ m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyDescriptorUpdateTemplateKHR(
m_device,
static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- const void * pData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ const void * pData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkUpdateDescriptorSetWithTemplateKHR( m_device,
- static_cast<VkDescriptorSet>( descriptorSet ),
- static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- pData );
+ d.vkUpdateDescriptorSetWithTemplateKHR(
+ m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename DataType, typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- DataType const & data,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ DataType const & data,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkUpdateDescriptorSetWithTemplateKHR( m_device,
static_cast<VkDescriptorSet>( descriptorSet ),
static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
@@ -12415,31 +11692,26 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_clip_space_w_scaling ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setViewportWScalingNV( uint32_t firstViewport,
- uint32_t viewportCount,
- const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport,
+ uint32_t viewportCount,
+ const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetViewportWScalingNV( m_commandBuffer,
- firstViewport,
- viewportCount,
- reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) );
+ d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV(
- uint32_t firstViewport,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setViewportWScalingNV( uint32_t firstViewport,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetViewportWScalingNV( m_commandBuffer,
- firstViewport,
- viewportWScalings.size(),
- reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) );
+
+ d.vkCmdSetViewportWScalingNV(
+ m_commandBuffer, firstViewport, viewportWScalings.size(), reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -12447,21 +11719,18 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
}
#else
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type
- PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
+ VULKAN_HPP_INLINE void PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result =
- static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::releaseDisplayEXT" );
+
+ d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -12469,65 +11738,65 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_acquire_xlib_display ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT(
- Display * dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display * dpy,
+ VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
+ return static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- PhysicalDevice::acquireXlibDisplayEXT( Display & dpy,
- VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d ) const
+ PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result =
- static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" );
+
+ VkResult result = d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getRandROutputDisplayEXT( Display * dpy,
- RROutput rrOutput,
- VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display * dpy,
+ RROutput rrOutput,
+ VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
+ return static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
- PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
+ PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DisplayKHR display;
- Result result = static_cast<Result>(
- d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
- return createResultValue(
- result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" );
+ VkResult result = d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), display );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
- PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
+ PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DisplayKHR display;
- Result result = static_cast<Result>(
- d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
- ObjectRelease<PhysicalDevice, Dispatch> deleter( *this, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>(
- result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique", deleter );
+ VkResult result = d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -12539,208 +11808,199 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT(
- m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) );
+ m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type
- PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type
+ PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities;
- Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT(
- m_physicalDevice,
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) ) );
- return createResultValue(
- result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" );
+ VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2EXT(
+ m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_display_control ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>(
- d.vkDisplayPowerControlEXT( m_device,
- static_cast<VkDisplayKHR>( display ),
- reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) );
+ d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkDisplayPowerControlEXT( m_device,
- static_cast<VkDisplayKHR>( display ),
- reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" );
+
+ VkResult result =
+ d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Fence * pFence,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Fence * pFence,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkRegisterDeviceEventEXT( m_device,
- reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkFence *>( pFence ) ) );
+ return static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device,
+ reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkFence *>( pFence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
- Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
+ Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Fence fence;
- Result result = static_cast<Result>(
- d.vkRegisterDeviceEventEXT( m_device,
- reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkFence *>( &fence ) ) );
- return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" );
+ VkResult result = d.vkRegisterDeviceEventEXT(
+ m_device,
+ reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkFence *>( &fence ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
- Device::registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
+ Device::registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Fence fence;
- Result result = static_cast<Result>(
- d.vkRegisterDeviceEventEXT( m_device,
- reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkFence *>( &fence ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>(
- result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique", deleter );
+ VkResult result = d.vkRegisterDeviceEventEXT(
+ m_device,
+ reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkFence *>( &fence ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Fence * pFence,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Fence * pFence,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkRegisterDisplayEventEXT( m_device,
- static_cast<VkDisplayKHR>( display ),
- reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkFence *>( pFence ) ) );
+ return static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device,
+ static_cast<VkDisplayKHR>( display ),
+ reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkFence *>( pFence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
- Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
+ Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Fence fence;
- Result result = static_cast<Result>(
- d.vkRegisterDisplayEventEXT( m_device,
- static_cast<VkDisplayKHR>( display ),
- reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkFence *>( &fence ) ) );
- return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" );
+ VkResult result = d.vkRegisterDisplayEventEXT(
+ m_device,
+ static_cast<VkDisplayKHR>( display ),
+ reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkFence *>( &fence ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
- Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
+ Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Fence fence;
- Result result = static_cast<Result>(
- d.vkRegisterDisplayEventEXT( m_device,
- static_cast<VkDisplayKHR>( display ),
- reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkFence *>( &fence ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>(
- result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique", deleter );
+ VkResult result = d.vkRegisterDisplayEventEXT(
+ m_device,
+ static_cast<VkDisplayKHR>( display ),
+ reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkFence *>( &fence ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
- uint64_t * pCounterValue,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
+ uint64_t * pCounterValue,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- static_cast<VkSurfaceCounterFlagBitsEXT>( counter ),
- pCounterValue ) );
+ return static_cast<Result>(
+ d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type
- Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSwapchainCounterEXT(
+ VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
uint64_t counterValue;
- Result result =
- static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- static_cast<VkSurfaceCounterFlagBitsEXT>( counter ),
- &counterValue ) );
- return createResultValue( result, counterValue, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" );
+ VkResult result =
+ d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), counterValue );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -12750,154 +12010,135 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE(
- m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) );
+ m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type
- Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type
+ Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties;
- Result result = static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE(
- m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) ) );
- return createResultValue(
- result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" );
+ VkResult result = d.vkGetRefreshCycleDurationGOOGLE(
+ m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displayTimingProperties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- uint32_t * pPresentationTimingCount,
+ Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ uint32_t * pPresentationTimingCount,
VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE(
- m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- pPresentationTimingCount,
- reinterpret_cast<VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) );
+ return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device,
+ static_cast<VkSwapchainKHR>( swapchain ),
+ pPresentationTimingCount,
+ reinterpret_cast<VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename PastPresentationTimingGOOGLEAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings;
- uint32_t presentationTimingCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings;
+ uint32_t presentationTimingCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE(
- m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && presentationTimingCount )
+ result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && presentationTimingCount )
{
presentationTimings.resize( presentationTimingCount );
- result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE(
- m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- &presentationTimingCount,
- reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) );
+ result = d.vkGetPastPresentationTimingGOOGLE( m_device,
+ static_cast<VkSwapchainKHR>( swapchain ),
+ &presentationTimingCount,
+ reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" );
+ VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
+ if ( presentationTimingCount < presentationTimings.size() )
{
- VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
- if ( presentationTimingCount < presentationTimings.size() )
- {
- presentationTimings.resize( presentationTimingCount );
- }
+ presentationTimings.resize( presentationTimingCount );
}
- return createResultValue(
- result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentationTimings );
}
- template <
- typename PastPresentationTimingGOOGLEAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PastPresentationTimingGOOGLE>::value, int>::type>
+ template <typename PastPresentationTimingGOOGLEAllocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, PastPresentationTimingGOOGLE>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
- Device::getPastPresentationTimingGOOGLE(
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator,
- Dispatch const & d ) const
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
+ Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings(
+
+ std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings(
pastPresentationTimingGOOGLEAllocator );
uint32_t presentationTimingCount;
- Result result;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE(
- m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && presentationTimingCount )
+ result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && presentationTimingCount )
{
presentationTimings.resize( presentationTimingCount );
- result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE(
- m_device,
- static_cast<VkSwapchainKHR>( swapchain ),
- &presentationTimingCount,
- reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) );
+ result = d.vkGetPastPresentationTimingGOOGLE( m_device,
+ static_cast<VkSwapchainKHR>( swapchain ),
+ &presentationTimingCount,
+ reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" );
+ VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
+ if ( presentationTimingCount < presentationTimings.size() )
{
- VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
- if ( presentationTimingCount < presentationTimings.size() )
- {
- presentationTimings.resize( presentationTimingCount );
- }
+ presentationTimings.resize( presentationTimingCount );
}
- return createResultValue(
- result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentationTimings );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_discard_rectangles ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
- uint32_t discardRectangleCount,
+ VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
+ uint32_t discardRectangleCount,
const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDiscardRectangleEXT( m_commandBuffer,
- firstDiscardRectangle,
- discardRectangleCount,
- reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) );
+ d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetDiscardRectangleEXT( m_commandBuffer,
- firstDiscardRectangle,
- discardRectangles.size(),
- reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) );
+
+ d.vkCmdSetDiscardRectangleEXT(
+ m_commandBuffer, firstDiscardRectangle, discardRectangles.size(), reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -12907,21 +12148,18 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount,
const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,
const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkSetHdrMetadataEXT( m_device,
- swapchainCount,
- reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ),
- reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) );
+ d.vkSetHdrMetadataEXT(
+ m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
@@ -12929,8 +12167,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( swapchains.size() != metadata.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
@@ -12944,117 +12181,111 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_create_renderpass2 ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateRenderPass2KHR( m_device,
- reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
+ return static_cast<Result>( d.vkCreateRenderPass2KHR( m_device,
+ reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
- Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
+ Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::RenderPass renderPass;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateRenderPass2KHR( m_device,
reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
- return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkRenderPass *>( &renderPass ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
- Device::createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
+ Device::createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::RenderPass renderPass;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateRenderPass2KHR( m_device,
reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>(
- result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkRenderPass *>( &renderPass ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
- const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
+ const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginRenderPass2KHR( m_commandBuffer,
- reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ),
- reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
+ d.vkCmdBeginRenderPass2KHR(
+ m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
- const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
+ const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBeginRenderPass2KHR( m_commandBuffer,
- reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ),
- reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
+
+ d.vkCmdBeginRenderPass2KHR(
+ m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
- const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
+ const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdNextSubpass2KHR( m_commandBuffer,
- reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ),
- reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
+ d.vkCmdNextSubpass2KHR(
+ m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
- const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
+ const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdNextSubpass2KHR( m_commandBuffer,
- reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ),
- reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+
+ d.vkCmdNextSubpass2KHR(
+ m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
@@ -13063,9 +12294,10 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -13074,55 +12306,54 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR(
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
}
#else
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result =
- static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
- return createResultValue(
- result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+
+ VkResult result = d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_external_fence_capabilities ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
- VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
+ VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPhysicalDeviceExternalFencePropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
- reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
+ d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
+ reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties
- PhysicalDevice::getExternalFencePropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
- d.vkGetPhysicalDeviceExternalFencePropertiesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
- reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
+ d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
+ reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
+
return externalFenceProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -13132,49 +12363,49 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkImportFenceWin32HandleKHR(
- m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) );
+ return static_cast<Result>(
+ d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::importFenceWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const
+ Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkImportFenceWin32HandleKHR(
- m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" );
+
+ VkResult result = d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,
- HANDLE * pHandle,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,
+ HANDLE * pHandle,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetFenceWin32HandleKHR(
- m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
+ return static_cast<Result>(
+ d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
- Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
+ Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- HANDLE handle;
- Result result = static_cast<Result>( d.vkGetFenceWin32HandleKHR(
- m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
- return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" );
+
+ HANDLE handle;
+ VkResult result = d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
@@ -13182,114 +12413,110 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_external_fence_fd ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) );
+ return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo,
- Dispatch const & d ) const
+ Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" );
+
+ VkResult result = d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo,
- int * pFd,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo,
+ int * pFd,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
+ return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type
- Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- int fd;
- Result result = static_cast<Result>(
- d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd ) );
- return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" );
+
+ int fd;
+ VkResult result = d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_performance_query ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR(
- uint32_t queueFamilyIndex,
- uint32_t * pCounterCount,
- VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters,
- VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex,
+ uint32_t * pCounterCount,
+ VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters,
+ VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
- m_physicalDevice,
- queueFamilyIndex,
- pCounterCount,
- reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ),
- reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) );
+ return static_cast<Result>(
+ d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice,
+ queueFamilyIndex,
+ pCounterCount,
+ reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ),
+ reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PerformanceCounterKHRAllocator,
- typename PerformanceCounterDescriptionKHRAllocator,
- typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
- std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
- std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
- PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
- std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>
- data;
- std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first;
- std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions =
- data.second;
- uint32_t counterCount;
- Result result;
+ template <typename PerformanceCounterKHRAllocator, typename PerformanceCounterDescriptionKHRAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
+ std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
+ PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
+ std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>
+ data;
+ std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first;
+ std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data.second;
+ uint32_t counterCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
- m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) );
- if ( ( result == Result::eSuccess ) && counterCount )
+ result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr );
+ if ( ( result == VK_SUCCESS ) && counterCount )
{
counters.resize( counterCount );
counterDescriptions.resize( counterCount );
- result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+ result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
m_physicalDevice,
queueFamilyIndex,
&counterCount,
reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
- reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) );
- VULKAN_HPP_ASSERT( counterCount <= counters.size() );
+ reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( counterCount < counters.size() ) )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+ VULKAN_HPP_ASSERT( counterCount <= counters.size() );
+ if ( counterCount < counters.size() )
{
counters.resize( counterCount );
counterDescriptions.resize( counterCount );
}
- return createResultValue(
- result, data, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
template <typename PerformanceCounterKHRAllocator,
@@ -13300,101 +12527,96 @@ namespace VULKAN_HPP_NAMESPACE
typename std::enable_if<std::is_same<typename B1::value_type, PerformanceCounterKHR>::value &&
std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value,
int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
- std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
- std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
- PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR(
- uint32_t queueFamilyIndex,
- PerformanceCounterKHRAllocator & performanceCounterKHRAllocator,
- PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
- std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>
- data( std::piecewise_construct,
- std::forward_as_tuple( performanceCounterKHRAllocator ),
- std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) );
- std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first;
- std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions =
- data.second;
- uint32_t counterCount;
- Result result;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
+ std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
+ PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex,
+ PerformanceCounterKHRAllocator & performanceCounterKHRAllocator,
+ PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
+ std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>
+ data(
+ std::piecewise_construct, std::forward_as_tuple( performanceCounterKHRAllocator ), std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) );
+ std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first;
+ std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data.second;
+ uint32_t counterCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
- m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) );
- if ( ( result == Result::eSuccess ) && counterCount )
+ result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr );
+ if ( ( result == VK_SUCCESS ) && counterCount )
{
counters.resize( counterCount );
counterDescriptions.resize( counterCount );
- result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+ result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
m_physicalDevice,
queueFamilyIndex,
&counterCount,
reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
- reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) );
- VULKAN_HPP_ASSERT( counterCount <= counters.size() );
+ reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( ( result == Result::eSuccess ) && ( counterCount < counters.size() ) )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+ VULKAN_HPP_ASSERT( counterCount <= counters.size() );
+ if ( counterCount < counters.size() )
{
counters.resize( counterCount );
counterDescriptions.resize( counterCount );
}
- return createResultValue(
- result, data, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR(
- const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,
- uint32_t * pNumPasses,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,
+ uint32_t * pNumPasses,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ),
- pNumPasses );
+ m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ), pNumPasses );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR(
- const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
uint32_t numPasses;
d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
- m_physicalDevice,
- reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ),
- &numPasses );
+ m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ), &numPasses );
+
return numPasses;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR(
- const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) );
+ return static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info,
- Dispatch const & d ) const
+ Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" );
+
+ VkResult result = d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -13408,439 +12630,470 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_get_surface_capabilities2 ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
- VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
- reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
+ reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type
- PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type
+ PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities;
- Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) );
- return createResultValue(
- result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
+ VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
}
template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
- PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
+ PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities =
- structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
- Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) );
- return createResultValue(
- result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
+ VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
- uint32_t * pSurfaceFormatCount,
- VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
+ uint32_t * pSurfaceFormatCount,
+ VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
- pSurfaceFormatCount,
- reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
+ pSurfaceFormatCount,
+ reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename SurfaceFormat2KHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type
- PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type
+ PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats;
- uint32_t surfaceFormatCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats;
+ uint32_t surfaceFormatCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- &surfaceFormatCount,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && surfaceFormatCount )
+ result = d.vkGetPhysicalDeviceSurfaceFormats2KHR(
+ m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
{
surfaceFormats.resize( surfaceFormatCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- &surfaceFormatCount,
- reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) );
+ result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ &surfaceFormatCount,
+ reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
+ VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+ if ( surfaceFormatCount < surfaceFormats.size() )
{
- VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
- if ( surfaceFormatCount < surfaceFormats.size() )
+ surfaceFormats.resize( surfaceFormatCount );
+ }
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
+ }
+
+ template <typename SurfaceFormat2KHRAllocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, SurfaceFormat2KHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type
+ PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
+ SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats( surfaceFormat2KHRAllocator );
+ uint32_t surfaceFormatCount;
+ VkResult result;
+ do
+ {
+ result = d.vkGetPhysicalDeviceSurfaceFormats2KHR(
+ m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
{
surfaceFormats.resize( surfaceFormatCount );
+ result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ &surfaceFormatCount,
+ reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
}
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
+ VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+ if ( surfaceFormatCount < surfaceFormats.size() )
+ {
+ surfaceFormats.resize( surfaceFormatCount );
}
- return createResultValue(
- result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
}
- template <typename SurfaceFormat2KHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormat2KHR>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type
- PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator,
- Dispatch const & d ) const
+ template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type
+ PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats( surfaceFormat2KHRAllocator );
- uint32_t surfaceFormatCount;
- Result result;
+
+ std::vector<StructureChain, StructureChainAllocator> structureChains;
+ std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
+ uint32_t surfaceFormatCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- &surfaceFormatCount,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && surfaceFormatCount )
+ result = d.vkGetPhysicalDeviceSurfaceFormats2KHR(
+ m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
{
+ structureChains.resize( surfaceFormatCount );
surfaceFormats.resize( surfaceFormatCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- &surfaceFormatCount,
- reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) );
+ for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
+ {
+ surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext;
+ }
+ result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ &surfaceFormatCount,
+ reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
+ VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+ if ( surfaceFormatCount < surfaceFormats.size() )
+ {
+ structureChains.resize( surfaceFormatCount );
+ }
+ for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
{
- VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
- if ( surfaceFormatCount < surfaceFormats.size() )
+ structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i];
+ }
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChains );
+ }
+
+ template <typename StructureChain,
+ typename StructureChainAllocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type
+ PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
+ StructureChainAllocator & structureChainAllocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
+ std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
+ uint32_t surfaceFormatCount;
+ VkResult result;
+ do
+ {
+ result = d.vkGetPhysicalDeviceSurfaceFormats2KHR(
+ m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
{
+ structureChains.resize( surfaceFormatCount );
surfaceFormats.resize( surfaceFormatCount );
+ for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
+ {
+ surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext;
+ }
+ result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ &surfaceFormatCount,
+ reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
}
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
+ VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+ if ( surfaceFormatCount < surfaceFormats.size() )
+ {
+ structureChains.resize( surfaceFormatCount );
}
- return createResultValue(
- result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
+ for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
+ {
+ structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i];
+ }
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChains );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_get_display_properties2 ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR(
- m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) );
+ return static_cast<Result>(
+ d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename DisplayProperties2KHRAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties;
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties;
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result =
- static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR(
- m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) );
+ result =
+ d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
template <typename DisplayProperties2KHRAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayProperties2KHR>::value, int>::type>
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, DisplayProperties2KHR>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
- PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator,
- Dispatch const & d ) const
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
+ PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties( displayProperties2KHRAllocator );
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties( displayProperties2KHRAllocator );
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result =
- static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR(
- m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) );
+ result =
+ d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
- m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) );
+ return static_cast<Result>(
+ d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename DisplayPlaneProperties2KHRAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties;
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties;
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
- m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) );
+ result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
+ m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
template <typename DisplayPlaneProperties2KHRAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayPlaneProperties2KHR>::value, int>::type>
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, DisplayPlaneProperties2KHR>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
- PhysicalDevice::getDisplayPlaneProperties2KHR(
- DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
+ PhysicalDevice::getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties(
- displayPlaneProperties2KHRAllocator );
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties( displayPlaneProperties2KHRAllocator );
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
- m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) );
+ result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
+ m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetDisplayModeProperties2KHR( m_physicalDevice,
- static_cast<VkDisplayKHR>( display ),
- pPropertyCount,
- reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) );
+ return static_cast<Result>( d.vkGetDisplayModeProperties2KHR(
+ m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename DisplayModeProperties2KHRAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties;
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties;
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR(
- m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>(
- d.vkGetDisplayModeProperties2KHR( m_physicalDevice,
- static_cast<VkDisplayKHR>( display ),
- &propertyCount,
- reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) );
+ result = d.vkGetDisplayModeProperties2KHR(
+ m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
template <typename DisplayModeProperties2KHRAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayModeProperties2KHR>::value, int>::type>
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, DisplayModeProperties2KHR>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
- PhysicalDevice::getDisplayModeProperties2KHR(
- VULKAN_HPP_NAMESPACE::DisplayKHR display,
- DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator,
- Dispatch const & d ) const
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
+ PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties(
- displayModeProperties2KHRAllocator );
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties( displayModeProperties2KHRAllocator );
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR(
- m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>(
- d.vkGetDisplayModeProperties2KHR( m_physicalDevice,
- static_cast<VkDisplayKHR>( display ),
- &propertyCount,
- reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) );
+ result = d.vkGetDisplayModeProperties2KHR(
+ m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR(
- const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo,
- VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo,
+ VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
- reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ),
- reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) );
+ return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
+ reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ),
+ reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type
- PhysicalDevice::getDisplayPlaneCapabilities2KHR(
- const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type
+ PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities;
- Result result = static_cast<Result>(
- d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
- reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ),
- reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) ) );
- return createResultValue(
- result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
+ VkResult result = d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
+ reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ),
+ reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), capabilities );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -13848,58 +13101,58 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_MVK_ios_surface ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateIOSSurfaceMVK( m_instance,
- reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ return static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance,
+ reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateIOSSurfaceMVK( m_instance,
reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateIOSSurfaceMVK( m_instance,
reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -13909,58 +13162,58 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_MVK_macos_surface ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateMacOSSurfaceMVK( m_instance,
- reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance,
+ reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateMacOSSurfaceMVK( m_instance,
reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateMacOSSurfaceMVK( m_instance,
reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -13969,52 +13222,52 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_debug_utils ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT(
- const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT(
- m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) );
+ return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo,
- Dispatch const & d ) const
+ Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT(
- m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" );
+
+ VkResult result = d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT(
- const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) );
+ return static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo,
- Dispatch const & d ) const
+ Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" );
+
+ VkResult result = d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
@@ -14023,9 +13276,10 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -14039,7 +13293,7 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
@@ -14048,17 +13302,17 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
@@ -14066,11 +13320,11 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -14083,9 +13337,8 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
@@ -14093,11 +13346,11 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -14107,118 +13360,114 @@ namespace VULKAN_HPP_NAMESPACE
Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateDebugUtilsMessengerEXT( m_instance,
- reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkDebugUtilsMessengerEXT *>( pMessenger ) ) );
+ return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance,
+ reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkDebugUtilsMessengerEXT *>( pMessenger ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type
- Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type
+ Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
- Result result = static_cast<Result>(
- d.vkCreateDebugUtilsMessengerEXT( m_instance,
- reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) );
- return createResultValue(
- result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" );
+ VkResult result = d.vkCreateDebugUtilsMessengerEXT(
+ m_instance,
+ reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), messenger );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type
- Instance::createDebugUtilsMessengerEXTUnique(
- const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type
+ Instance::createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
- Result result = static_cast<Result>(
- d.vkCreateDebugUtilsMessengerEXT( m_instance,
- reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>(
- result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique", deleter );
+ VkResult result = d.vkCreateDebugUtilsMessengerEXT(
+ m_instance,
+ reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>( messenger, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDebugUtilsMessengerEXT( m_instance,
- static_cast<VkDebugUtilsMessengerEXT>( messenger ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDebugUtilsMessengerEXT(
+ m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyDebugUtilsMessengerEXT(
m_instance,
static_cast<VkDebugUtilsMessengerEXT>( messenger ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDebugUtilsMessengerEXT( m_instance,
- static_cast<VkDebugUtilsMessengerEXT>( messenger ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDebugUtilsMessengerEXT(
+ m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyDebugUtilsMessengerEXT(
m_instance,
static_cast<VkDebugUtilsMessengerEXT>( messenger ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT(
- VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
- const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
+ const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkSubmitDebugUtilsMessageEXT( m_instance,
@@ -14229,13 +13478,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
- const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
+ const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkSubmitDebugUtilsMessageEXT( m_instance,
static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
@@ -14247,68 +13496,72 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_ANDROID_external_memory_android_hardware_buffer ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID(
- const struct AHardwareBuffer * buffer,
- VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer,
+ VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID(
- m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) );
+ return static_cast<Result>(
+ d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type
- Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type
+ Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties;
- Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID(
- m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) );
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
+ VkResult result =
+ d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
- Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
+ Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties =
structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>();
- Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID(
- m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) );
- return createResultValue(
- result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
+ VkResult result =
+ d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID(
- const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo,
- struct AHardwareBuffer ** pBuffer,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo,
+ struct AHardwareBuffer ** pBuffer,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID(
- m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) );
+ return static_cast<Result>(
+ d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<struct AHardwareBuffer *>::type
- Device::getMemoryAndroidHardwareBufferANDROID(
- const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<struct AHardwareBuffer *>::type
+ Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
struct AHardwareBuffer * buffer;
- Result result = static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID(
- m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer ) );
- return createResultValue(
- result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" );
+ VkResult result =
+ d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), buffer );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
@@ -14316,52 +13569,45 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_sample_locations ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetSampleLocationsEXT( m_commandBuffer,
- reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) );
+ d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetSampleLocationsEXT( m_commandBuffer,
- reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) );
+
+ d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT(
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
- VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetPhysicalDeviceMultisamplePropertiesEXT(
- m_physicalDevice,
- static_cast<VkSampleCountFlagBits>( samples ),
- reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) );
+ m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT
- PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties;
d.vkGetPhysicalDeviceMultisamplePropertiesEXT(
- m_physicalDevice,
- static_cast<VkSampleCountFlagBits>( samples ),
- reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) );
+ m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) );
+
return multisampleProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -14369,155 +13615,148 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_get_memory_requirements2 ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetImageMemoryRequirements2KHR( m_device,
- reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ),
- reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ d.vkGetImageMemoryRequirements2KHR(
+ m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- d.vkGetImageMemoryRequirements2KHR( m_device,
- reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ d.vkGetImageMemoryRequirements2KHR(
+ m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return memoryRequirements;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- d.vkGetImageMemoryRequirements2KHR( m_device,
- reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ d.vkGetImageMemoryRequirements2KHR(
+ m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetBufferMemoryRequirements2KHR( m_device,
- reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ),
- reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ d.vkGetBufferMemoryRequirements2KHR(
+ m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- d.vkGetBufferMemoryRequirements2KHR( m_device,
- reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ d.vkGetBufferMemoryRequirements2KHR(
+ m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return memoryRequirements;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- d.vkGetBufferMemoryRequirements2KHR( m_device,
- reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ d.vkGetBufferMemoryRequirements2KHR(
+ m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR(
- const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
- uint32_t * pSparseMemoryRequirementCount,
- VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
+ uint32_t * pSparseMemoryRequirementCount,
+ VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetImageSparseMemoryRequirements2KHR(
- m_device,
- reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ),
- pSparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
+ d.vkGetImageSparseMemoryRequirements2KHR( m_device,
+ reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ),
+ pSparseMemoryRequirementCount,
+ reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
- Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
+ Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
- uint32_t sparseMemoryRequirementCount;
+
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
+ uint32_t sparseMemoryRequirementCount;
+ d.vkGetImageSparseMemoryRequirements2KHR(
+ m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
d.vkGetImageSparseMemoryRequirements2KHR( m_device,
reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
&sparseMemoryRequirementCount,
- nullptr );
- sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
- d.vkGetImageSparseMemoryRequirements2KHR(
- m_device,
- reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
- &sparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+ reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+
VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+ if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+ {
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ }
return sparseMemoryRequirements;
}
- template <
- typename SparseImageMemoryRequirements2Allocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
- Device::getImageSparseMemoryRequirements2KHR(
- const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,
- SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
- Dispatch const & d ) const
+ template <typename SparseImageMemoryRequirements2Allocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
+ Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,
+ SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
+
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
sparseImageMemoryRequirements2Allocator );
uint32_t sparseMemoryRequirementCount;
+ d.vkGetImageSparseMemoryRequirements2KHR(
+ m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
d.vkGetImageSparseMemoryRequirements2KHR( m_device,
reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
&sparseMemoryRequirementCount,
- nullptr );
- sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
- d.vkGetImageSparseMemoryRequirements2KHR(
- m_device,
- reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
- &sparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+ reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+
VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+ if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+ {
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ }
return sparseMemoryRequirements;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -14525,141 +13764,132 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_acceleration_structure ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createAccelerationStructureKHR(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateAccelerationStructureKHR( m_device,
- reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkAccelerationStructureKHR *>( pAccelerationStructure ) ) );
+ return static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device,
+ reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkAccelerationStructureKHR *>( pAccelerationStructure ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type
- Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type
+ Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
- Result result = static_cast<Result>( d.vkCreateAccelerationStructureKHR(
+ VkResult result = d.vkCreateAccelerationStructureKHR(
m_device,
reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) );
- return createResultValue(
- result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), accelerationStructure );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type
- Device::createAccelerationStructureKHRUnique(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type
+ Device::createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
- Result result = static_cast<Result>( d.vkCreateAccelerationStructureKHR(
+ VkResult result = d.vkCreateAccelerationStructureKHR(
m_device,
reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>(
- result,
- accelerationStructure,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique",
- deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>( accelerationStructure, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyAccelerationStructureKHR( m_device,
- static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyAccelerationStructureKHR(
+ m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyAccelerationStructureKHR(
m_device,
static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyAccelerationStructureKHR( m_device,
- static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyAccelerationStructureKHR(
+ m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyAccelerationStructureKHR(
m_device,
static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR(
- uint32_t infoCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::buildAccelerationStructuresKHR( uint32_t infoCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBuildAccelerationStructuresKHR(
- m_commandBuffer,
- infoCount,
- reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
- reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) );
+ d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer,
+ infoCount,
+ reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
+ reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
@@ -14668,45 +13898,41 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( infos.size() != pBuildRangeInfos.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
- d.vkCmdBuildAccelerationStructuresKHR(
- m_commandBuffer,
- infos.size(),
- reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
- reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
+ d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer,
+ infos.size(),
+ reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
+ reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR(
- uint32_t infoCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
- const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses,
- const uint32_t * pIndirectStrides,
- const uint32_t * const * ppMaxPrimitiveCounts,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( uint32_t infoCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
+ const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses,
+ const uint32_t * pIndirectStrides,
+ const uint32_t * const * ppMaxPrimitiveCounts,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBuildAccelerationStructuresIndirectKHR(
- m_commandBuffer,
- infoCount,
- reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
- reinterpret_cast<const VkDeviceAddress *>( pIndirectDeviceAddresses ),
- pIndirectStrides,
- ppMaxPrimitiveCounts );
+ d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer,
+ infoCount,
+ reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
+ reinterpret_cast<const VkDeviceAddress *>( pIndirectDeviceAddresses ),
+ pIndirectStrides,
+ ppMaxPrimitiveCounts );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,
- ArrayProxy<const uint32_t> const & indirectStrides,
- ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
@@ -14717,58 +13943,51 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( infos.size() != indirectDeviceAddresses.size() )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" );
}
if ( infos.size() != indirectStrides.size() )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" );
}
if ( infos.size() != pMaxPrimitiveCounts.size() )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
- d.vkCmdBuildAccelerationStructuresIndirectKHR(
- m_commandBuffer,
- infos.size(),
- reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
- reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ),
- indirectStrides.data(),
- pMaxPrimitiveCounts.data() );
+ d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer,
+ infos.size(),
+ reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
+ reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ),
+ indirectStrides.data(),
+ pMaxPrimitiveCounts.data() );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildAccelerationStructuresKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- uint32_t infoCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ uint32_t infoCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkBuildAccelerationStructuresKHR(
- m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- infoCount,
- reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
- reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ) );
+ return static_cast<Result>(
+ d.vkBuildAccelerationStructuresKHR( m_device,
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ infoCount,
+ reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
+ reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildAccelerationStructuresKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildAccelerationStructuresKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
@@ -14776,399 +13995,355 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( infos.size() != pBuildRangeInfos.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
- Result result = static_cast<Result>( d.vkBuildAccelerationStructuresKHR(
- m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- infos.size(),
- reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
- reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+ VkResult result =
+ d.vkBuildAccelerationStructuresKHR( m_device,
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ infos.size(),
+ reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
+ reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
+ resultCheck(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCopyAccelerationStructureKHR( m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) );
+ return static_cast<Result>( d.vkCopyAccelerationStructureKHR(
+ m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
+ Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkCopyAccelerationStructureKHR( m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ VkResult result = d.vkCopyAccelerationStructureKHR(
+ m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
+ resultCheck(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR(
- m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) );
+ m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
+ Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR(
- m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ VkResult result = d.vkCopyAccelerationStructureToMemoryKHR(
+ m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
+ resultCheck(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR(
- m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) );
+ m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
+ Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR(
- m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ VkResult result = d.vkCopyMemoryToAccelerationStructureKHR(
+ m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
+ resultCheck(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeAccelerationStructuresPropertiesKHR(
- uint32_t accelerationStructureCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- size_t dataSize,
- void * pData,
- size_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ void * pData,
+ size_t stride,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR(
- m_device,
- accelerationStructureCount,
- reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
- static_cast<VkQueryType>( queryType ),
- dataSize,
- pData,
- stride ) );
+ return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device,
+ accelerationStructureCount,
+ reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
+ static_cast<VkQueryType>( queryType ),
+ dataSize,
+ pData,
+ stride ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch>
- VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<void>::type Device::writeAccelerationStructuresPropertiesKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- ArrayProxy<T> const & data,
- size_t stride,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR(
- m_device,
- accelerationStructures.size(),
- reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
- static_cast<VkQueryType>( queryType ),
- data.size() * sizeof( T ),
- reinterpret_cast<void *>( data.data() ),
- stride ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" );
- }
-
- template <typename DataType, typename Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DataType, Allocator>>::type
- Device::writeAccelerationStructuresPropertiesKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- size_t dataSize,
- size_t stride,
- Dispatch const & d ) const
+ template <typename DataType, typename DataTypeAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type
+ Device::writeAccelerationStructuresPropertiesKHR(
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ size_t stride,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
- std::vector<DataType, Allocator> data( dataSize / sizeof( DataType ) );
- Result result = static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR(
- m_device,
- accelerationStructures.size(),
- reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
- static_cast<VkQueryType>( queryType ),
- data.size() * sizeof( DataType ),
- reinterpret_cast<void *>( data.data() ),
- stride ) );
- return createResultValue(
- result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" );
+ std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
+ VkResult result = d.vkWriteAccelerationStructuresPropertiesKHR( m_device,
+ accelerationStructures.size(),
+ reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
+ static_cast<VkQueryType>( queryType ),
+ data.size() * sizeof( DataType ),
+ reinterpret_cast<void *>( data.data() ),
+ stride );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
template <typename DataType, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
- Device::writeAccelerationStructuresPropertyKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- size_t stride,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::writeAccelerationStructuresPropertyKHR(
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t stride,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
DataType data;
- Result result = static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR(
- m_device,
- accelerationStructures.size(),
- reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
- static_cast<VkQueryType>( queryType ),
- sizeof( DataType ),
- reinterpret_cast<void *>( &data ),
- stride ) );
- return createResultValue(
- result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" );
+ VkResult result = d.vkWriteAccelerationStructuresPropertiesKHR( m_device,
+ accelerationStructures.size(),
+ reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
+ static_cast<VkQueryType>( queryType ),
+ sizeof( DataType ),
+ reinterpret_cast<void *>( &data ),
+ stride );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer,
- reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) );
+ d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer,
- reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
+
+ d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR(
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyAccelerationStructureToMemoryKHR(
- m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) );
+ d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR(
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyAccelerationStructureToMemoryKHR(
- m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
+
+ d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR(
- const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyMemoryToAccelerationStructureKHR(
- m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) );
+ d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR(
- const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyMemoryToAccelerationStructureKHR(
- m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
+
+ d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<DeviceAddress>( d.vkGetAccelerationStructureDeviceAddressKHR(
- m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) );
+ return static_cast<DeviceAddress>(
+ d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress
+ Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetAccelerationStructureDeviceAddressKHR(
- m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) );
+
+ VkDeviceAddress result =
+ d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR(
- uint32_t accelerationStructureCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWriteAccelerationStructuresPropertiesKHR(
- m_commandBuffer,
- accelerationStructureCount,
- reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
- static_cast<VkQueryType>( queryType ),
- static_cast<VkQueryPool>( queryPool ),
- firstQuery );
+ d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer,
+ accelerationStructureCount,
+ reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
+ static_cast<VkQueryType>( queryType ),
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWriteAccelerationStructuresPropertiesKHR(
- m_commandBuffer,
- accelerationStructures.size(),
- reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
- static_cast<VkQueryType>( queryType ),
- static_cast<VkQueryPool>( queryPool ),
- firstQuery );
+
+ d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer,
+ accelerationStructures.size(),
+ reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
+ static_cast<VkQueryType>( queryType ),
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo,
- VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceAccelerationStructureCompatibilityKHR(
- m_device,
- reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( pVersionInfo ),
- reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
+ d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device,
+ reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( pVersionInfo ),
+ reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
- Device::getAccelerationStructureCompatibilityKHR(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
- d.vkGetDeviceAccelerationStructureCompatibilityKHR(
- m_device,
- reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ),
- reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
+ d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device,
+ reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ),
+ reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
+
return compatibility;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR(
- VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo,
- const uint32_t * pMaxPrimitiveCounts,
- VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo,
+ const uint32_t * pMaxPrimitiveCounts,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetAccelerationStructureBuildSizesKHR(
- m_device,
- static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
- reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pBuildInfo ),
- pMaxPrimitiveCounts,
- reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( pSizeInfo ) );
+ d.vkGetAccelerationStructureBuildSizesKHR( m_device,
+ static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
+ reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pBuildInfo ),
+ pMaxPrimitiveCounts,
+ reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( pSizeInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR
- Device::getAccelerationStructureBuildSizesKHR(
- VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo,
- ArrayProxy<const uint32_t> const & maxPrimitiveCounts,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
@@ -15176,18 +14351,17 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( maxPrimitiveCounts.size() != buildInfo.geometryCount )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+
VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo;
- d.vkGetAccelerationStructureBuildSizesKHR(
- m_device,
- static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
- reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ),
- maxPrimitiveCounts.data(),
- reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) );
+ d.vkGetAccelerationStructureBuildSizesKHR( m_device,
+ static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
+ reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ),
+ maxPrimitiveCounts.data(),
+ reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) );
+
return sizeInfo;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -15198,136 +14372,131 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateSamplerYcbcrConversionKHR( m_device,
- reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
+ return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device,
+ reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
- Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
+ Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
- Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR(
+ VkResult result = d.vkCreateSamplerYcbcrConversionKHR(
m_device,
reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
- return createResultValue(
- result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), ycbcrConversion );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
- Device::createSamplerYcbcrConversionKHRUnique(
- const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
+ Device::createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
- Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR(
+ VkResult result = d.vkCreateSamplerYcbcrConversionKHR(
m_device,
reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>(
- result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( ycbcrConversion, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroySamplerYcbcrConversionKHR( m_device,
- static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroySamplerYcbcrConversionKHR(
+ m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroySamplerYcbcrConversionKHR(
m_device,
static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_bind_memory2 ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::bindBufferMemory2KHR( uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkBindBufferMemory2KHR(
- m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
+ return static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
- Dispatch const & d ) const
+ Device::bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkBindBufferMemory2KHR(
- m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" );
+
+ VkResult result = d.vkBindBufferMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::bindImageMemory2KHR( uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkBindImageMemory2KHR(
- m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
+ return static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,
- Dispatch const & d ) const
+ Device::bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkBindImageMemory2KHR(
- m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" );
+
+ VkResult result = d.vkBindImageMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -15335,258 +14504,234 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT(
- VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT(
- m_device,
- static_cast<VkImage>( image ),
- reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) );
+ m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type
- Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type
+ Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties;
- Result result = static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT(
- m_device,
- static_cast<VkImage>( image ),
- reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) ) );
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" );
+ VkResult result = d.vkGetImageDrmFormatModifierPropertiesEXT(
+ m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_validation_cache ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateValidationCacheEXT( m_device,
- reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkValidationCacheEXT *>( pValidationCache ) ) );
+ return static_cast<Result>( d.vkCreateValidationCacheEXT( m_device,
+ reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkValidationCacheEXT *>( pValidationCache ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type
- Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type
+ Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
- Result result = static_cast<Result>(
- d.vkCreateValidationCacheEXT( m_device,
- reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) );
- return createResultValue(
- result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" );
+ VkResult result = d.vkCreateValidationCacheEXT(
+ m_device,
+ reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), validationCache );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type
- Device::createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type
+ Device::createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
- Result result = static_cast<Result>(
- d.vkCreateValidationCacheEXT( m_device,
- reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>(
- result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique", deleter );
+ VkResult result = d.vkCreateValidationCacheEXT(
+ m_device,
+ reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>( validationCache, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyValidationCacheEXT( m_device,
- static_cast<VkValidationCacheEXT>( validationCache ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyValidationCacheEXT(
+ m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyValidationCacheEXT( m_device,
- static_cast<VkValidationCacheEXT>( validationCache ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+
+ d.vkDestroyValidationCacheEXT(
+ m_device,
+ static_cast<VkValidationCacheEXT>( validationCache ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyValidationCacheEXT( m_device,
- static_cast<VkValidationCacheEXT>( validationCache ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyValidationCacheEXT(
+ m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyValidationCacheEXT( m_device,
- static_cast<VkValidationCacheEXT>( validationCache ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+
+ d.vkDestroyValidationCacheEXT(
+ m_device,
+ static_cast<VkValidationCacheEXT>( validationCache ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
- uint32_t srcCacheCount,
- const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
+ uint32_t srcCacheCount,
+ const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkMergeValidationCachesEXT( m_device,
- static_cast<VkValidationCacheEXT>( dstCache ),
- srcCacheCount,
- reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) );
+ return static_cast<Result>( d.vkMergeValidationCachesEXT(
+ m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCacheCount, reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches,
- Dispatch const & d ) const
+ Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkMergeValidationCachesEXT( m_device,
- static_cast<VkValidationCacheEXT>( dstCache ),
- srcCaches.size(),
- reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" );
+
+ VkResult result = d.vkMergeValidationCachesEXT(
+ m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCaches.size(), reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
- size_t * pDataSize,
- void * pData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
+ size_t * pDataSize,
+ void * pData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetValidationCacheDataEXT(
- m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) );
+ return static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Uint8_tAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
- Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
- Dispatch const & d ) const
+ Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
std::vector<uint8_t, Uint8_tAllocator> data;
size_t dataSize;
- Result result;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetValidationCacheDataEXT(
- m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) );
- if ( ( result == Result::eSuccess ) && dataSize )
+ result = d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr );
+ if ( ( result == VK_SUCCESS ) && dataSize )
{
data.resize( dataSize );
result =
- static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device,
- static_cast<VkValidationCacheEXT>( validationCache ),
- &dataSize,
- reinterpret_cast<void *>( data.data() ) ) );
+ d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" );
+ VULKAN_HPP_ASSERT( dataSize <= data.size() );
+ if ( dataSize < data.size() )
{
- VULKAN_HPP_ASSERT( dataSize <= data.size() );
- if ( dataSize < data.size() )
- {
- data.resize( dataSize );
- }
+ data.resize( dataSize );
}
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
template <typename Uint8_tAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type>
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
- Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
- Uint8_tAllocator & uint8_tAllocator,
- Dispatch const & d ) const
+ Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
size_t dataSize;
- Result result;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetValidationCacheDataEXT(
- m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) );
- if ( ( result == Result::eSuccess ) && dataSize )
+ result = d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr );
+ if ( ( result == VK_SUCCESS ) && dataSize )
{
data.resize( dataSize );
result =
- static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device,
- static_cast<VkValidationCacheEXT>( validationCache ),
- &dataSize,
- reinterpret_cast<void *>( data.data() ) ) );
+ d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" );
+ VULKAN_HPP_ASSERT( dataSize <= data.size() );
+ if ( dataSize < data.size() )
{
- VULKAN_HPP_ASSERT( dataSize <= data.size() );
- if ( dataSize < data.size() )
- {
- data.resize( dataSize );
- }
+ data.resize( dataSize );
}
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -15595,49 +14740,42 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView,
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBindShadingRateImageNV(
- m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
+ d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV(
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport,
+ uint32_t viewportCount,
+ const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer,
- firstViewport,
- viewportCount,
- reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) );
+ d.vkCmdSetViewportShadingRatePaletteNV(
+ m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV(
- uint32_t firstViewport,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ uint32_t firstViewport,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdSetViewportShadingRatePaletteNV(
- m_commandBuffer,
- firstViewport,
- shadingRatePalettes.size(),
- reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) );
+ m_commandBuffer, firstViewport, shadingRatePalettes.size(), reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
- uint32_t customSampleOrderCount,
- const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
+ uint32_t customSampleOrderCount,
+ const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer,
@@ -15648,12 +14786,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV(
- VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer,
static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
customSampleOrders.size(),
@@ -15667,202 +14806,189 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateAccelerationStructureNV( m_device,
- reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkAccelerationStructureNV *>( pAccelerationStructure ) ) );
+ return static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device,
+ reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkAccelerationStructureNV *>( pAccelerationStructure ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type
- Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type
+ Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
- Result result = static_cast<Result>( d.vkCreateAccelerationStructureNV(
+ VkResult result = d.vkCreateAccelerationStructureNV(
m_device,
reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) );
- return createResultValue(
- result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), accelerationStructure );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type
- Device::createAccelerationStructureNVUnique(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type
+ Device::createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
- Result result = static_cast<Result>( d.vkCreateAccelerationStructureNV(
+ VkResult result = d.vkCreateAccelerationStructureNV(
m_device,
reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>(
- result,
- accelerationStructure,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique",
- deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>( accelerationStructure, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyAccelerationStructureNV( m_device,
- static_cast<VkAccelerationStructureNV>( accelerationStructure ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyAccelerationStructureNV(
+ m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyAccelerationStructureNV(
m_device,
static_cast<VkAccelerationStructureNV>( accelerationStructure ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyAccelerationStructureNV( m_device,
- static_cast<VkAccelerationStructureNV>( accelerationStructure ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyAccelerationStructureNV(
+ m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyAccelerationStructureNV(
m_device,
static_cast<VkAccelerationStructureNV>( accelerationStructure ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetAccelerationStructureMemoryRequirementsNV(
- m_device,
- reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ),
- reinterpret_cast<VkMemoryRequirements2KHR *>( pMemoryRequirements ) );
+ d.vkGetAccelerationStructureMemoryRequirementsNV( m_device,
+ reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ),
+ reinterpret_cast<VkMemoryRequirements2KHR *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR
- Device::getAccelerationStructureMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements;
- d.vkGetAccelerationStructureMemoryRequirementsNV(
- m_device,
- reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
- reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
+ d.vkGetAccelerationStructureMemoryRequirementsNV( m_device,
+ reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
+
return memoryRequirements;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- Device::getAccelerationStructureMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>();
- d.vkGetAccelerationStructureMemoryRequirementsNV(
- m_device,
- reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
- reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>();
+ d.vkGetAccelerationStructureMemoryRequirementsNV( m_device,
+ reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
+
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV(
- uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkBindAccelerationStructureMemoryNV(
- m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) );
+ return static_cast<Result>(
+ d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::bindAccelerationStructureMemoryNV(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindAccelerationStructureMemoryNV(
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkBindAccelerationStructureMemoryNV(
- m_device,
- bindInfos.size(),
- reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" );
+
+ VkResult result = d.vkBindAccelerationStructureMemoryNV(
+ m_device, bindInfos.size(), reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo,
- VULKAN_HPP_NAMESPACE::Buffer instanceData,
- VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
- VULKAN_HPP_NAMESPACE::Bool32 update,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
- VULKAN_HPP_NAMESPACE::Buffer scratch,
- VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo,
+ VULKAN_HPP_NAMESPACE::Buffer instanceData,
+ VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
+ VULKAN_HPP_NAMESPACE::Bool32 update,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
+ VULKAN_HPP_NAMESPACE::Buffer scratch,
+ VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdBuildAccelerationStructureNV( m_commandBuffer,
@@ -15878,18 +15004,18 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info,
- VULKAN_HPP_NAMESPACE::Buffer instanceData,
- VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
- VULKAN_HPP_NAMESPACE::Bool32 update,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
- VULKAN_HPP_NAMESPACE::Buffer scratch,
- VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info,
+ VULKAN_HPP_NAMESPACE::Buffer instanceData,
+ VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
+ VULKAN_HPP_NAMESPACE::Bool32 update,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
+ VULKAN_HPP_NAMESPACE::Buffer scratch,
+ VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdBuildAccelerationStructureNV( m_commandBuffer,
reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ),
static_cast<VkBuffer>( instanceData ),
@@ -15903,11 +15029,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
- VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
+ VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdCopyAccelerationStructureNV( m_commandBuffer,
@@ -15952,393 +15077,322 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ uint32_t createInfoCount,
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateRayTracingPipelinesNV( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfoCount,
- reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkPipeline *>( pPipelines ) ) );
+ return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfoCount,
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkPipeline *>( pPipelines ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename PipelineAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
- Device::createRayTracingPipelinesNV(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateRayTracingPipelinesNV( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- return createResultValue(
- result,
- pipelines,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ VkResult result = d.vkCreateRayTracingPipelinesNV(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+
+ return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
}
template <typename PipelineAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type>
+ typename B0,
+ typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
- Device::createRayTracingPipelinesNV(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d ) const
+ Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
- Result result = static_cast<Result>(
- d.vkCreateRayTracingPipelinesNV( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- return createResultValue(
- result,
- pipelines,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ VkResult result = d.vkCreateRayTracingPipelinesNV(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+
+ return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
}
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
- Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Pipeline pipeline;
- Result result = static_cast<Result>(
- d.vkCreateRayTracingPipelinesNV( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- 1,
- reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( &pipeline ) ) );
- return createResultValue(
- result,
- pipeline,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ VkResult result = d.vkCreateRayTracingPipelinesNV(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ 1,
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( &pipeline ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+
+ return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch, typename PipelineAllocator>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- Device::createRayTracingPipelinesNVUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
- std::vector<Pipeline> pipelines( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateRayTracingPipelinesNV( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
+ Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+ VkResult result = d.vkCreateRayTracingPipelinesNV(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
+ uniquePipelines.reserve( createInfos.size() );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ for ( auto const & pipeline : pipelines )
{
- uniquePipelines.reserve( createInfos.size() );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- for ( size_t i = 0; i < createInfos.size(); i++ )
- {
- uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
- }
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
}
- return createResultValue(
- result,
- std::move( uniquePipelines ),
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
- }
-
- template <
- typename Dispatch,
- typename PipelineAllocator,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- Device::createRayTracingPipelinesNVUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
- std::vector<Pipeline> pipelines( createInfos.size() );
- Result result = static_cast<Result>(
- d.vkCreateRayTracingPipelinesNV( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfos.size(),
- reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
+ }
+
+ template <typename Dispatch,
+ typename PipelineAllocator,
+ typename B0,
+ typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
+ Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+ VkResult result = d.vkCreateRayTracingPipelinesNV(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfos.size(),
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
+ uniquePipelines.reserve( createInfos.size() );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ for ( auto const & pipeline : pipelines )
{
- uniquePipelines.reserve( createInfos.size() );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- for ( size_t i = 0; i < createInfos.size(); i++ )
- {
- uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
- }
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
}
- return createResultValue(
- result,
- std::move( uniquePipelines ),
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>>
- Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>
+ Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Pipeline pipeline;
- Result result = static_cast<Result>(
- d.vkCreateRayTracingPipelinesNV( m_device,
- static_cast<VkPipelineCache>( pipelineCache ),
- 1,
- reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( &pipeline ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<Pipeline, Dispatch>(
- result,
- pipeline,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT },
- deleter );
+
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+ VkResult result = d.vkCreateRayTracingPipelinesNV(
+ m_device,
+ static_cast<VkPipelineCache>( pipelineCache ),
+ 1,
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( &pipeline ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+
+ return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void * pData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ size_t dataSize,
+ void * pData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV(
- m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
+ return static_cast<Result>(
+ d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch>
- VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesNV(
- VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- ArrayProxy<T> const & data,
- Dispatch const & d ) const
+ template <typename DataType, typename DataTypeAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::getRayTracingShaderGroupHandlesNV(
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result =
- static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device,
- static_cast<VkPipeline>( pipeline ),
- firstGroup,
- groupCount,
- data.size() * sizeof( T ),
- reinterpret_cast<void *>( data.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" );
- }
- template <typename DataType, typename Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DataType, Allocator>>::type
- Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
- std::vector<DataType, Allocator> data( dataSize / sizeof( DataType ) );
- Result result =
- static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device,
- static_cast<VkPipeline>( pipeline ),
- firstGroup,
- groupCount,
- data.size() * sizeof( DataType ),
- reinterpret_cast<void *>( data.data() ) ) );
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" );
+ std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
+ VkResult result = d.vkGetRayTracingShaderGroupHandlesNV(
+ m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
template <typename DataType, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
- Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
+ Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
DataType data;
- Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device,
- static_cast<VkPipeline>( pipeline ),
- firstGroup,
- groupCount,
- sizeof( DataType ),
- reinterpret_cast<void *>( &data ) ) );
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" );
+ VkResult result = d.vkGetRayTracingShaderGroupHandlesNV(
+ m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- size_t dataSize,
- void * pData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ size_t dataSize,
+ void * pData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetAccelerationStructureHandleNV(
- m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) );
+ return static_cast<Result>(
+ d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch>
- VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<void>::type Device::getAccelerationStructureHandleNV(
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- ArrayProxy<T> const & data,
- Dispatch const & d ) const
+ template <typename DataType, typename DataTypeAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type
+ Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkGetAccelerationStructureHandleNV( m_device,
- static_cast<VkAccelerationStructureNV>( accelerationStructure ),
- data.size() * sizeof( T ),
- reinterpret_cast<void *>( data.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
- }
- template <typename DataType, typename Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DataType, Allocator>>::type
- Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- size_t dataSize,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
- std::vector<DataType, Allocator> data( dataSize / sizeof( DataType ) );
- Result result = static_cast<Result>(
- d.vkGetAccelerationStructureHandleNV( m_device,
- static_cast<VkAccelerationStructureNV>( accelerationStructure ),
- data.size() * sizeof( DataType ),
- reinterpret_cast<void *>( data.data() ) ) );
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
+ std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
+ VkResult result = d.vkGetAccelerationStructureHandleNV(
+ m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
template <typename DataType, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
- Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
+ Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
DataType data;
- Result result = static_cast<Result>(
- d.vkGetAccelerationStructureHandleNV( m_device,
- static_cast<VkAccelerationStructureNV>( accelerationStructure ),
- sizeof( DataType ),
- reinterpret_cast<void *>( &data ) ) );
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
+ VkResult result = d.vkGetAccelerationStructureHandleNV(
+ m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), sizeof( DataType ), reinterpret_cast<void *>( &data ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV(
- uint32_t accelerationStructureCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWriteAccelerationStructuresPropertiesNV(
- m_commandBuffer,
- accelerationStructureCount,
- reinterpret_cast<const VkAccelerationStructureNV *>( pAccelerationStructures ),
- static_cast<VkQueryType>( queryType ),
- static_cast<VkQueryPool>( queryPool ),
- firstQuery );
+ d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer,
+ accelerationStructureCount,
+ reinterpret_cast<const VkAccelerationStructureNV *>( pAccelerationStructures ),
+ static_cast<VkQueryType>( queryType ),
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWriteAccelerationStructuresPropertiesNV(
- m_commandBuffer,
- accelerationStructures.size(),
- reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ),
- static_cast<VkQueryType>( queryType ),
- static_cast<VkQueryPool>( queryPool ),
- firstQuery );
+
+ d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer,
+ accelerationStructures.size(),
+ reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ),
+ static_cast<VkQueryType>( queryType ),
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV(
- VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t shader,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
@@ -16346,55 +15400,56 @@ namespace VULKAN_HPP_NAMESPACE
#else
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const
+ Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result =
- static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" );
+
+ VkResult result = d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_maintenance3 ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDescriptorSetLayoutSupportKHR( m_device,
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
- reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
+ d.vkGetDescriptorSetLayoutSupportKHR(
+ m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
- Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
- d.vkGetDescriptorSetLayoutSupportKHR( m_device,
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+ d.vkGetDescriptorSetLayoutSupportKHR(
+ m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+
return support;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support =
- structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
- d.vkGetDescriptorSetLayoutSupportKHR( m_device,
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
+ d.vkGetDescriptorSetLayoutSupportKHR(
+ m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -16408,7 +15463,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdDrawIndirectCountKHR( m_commandBuffer,
@@ -16427,7 +15482,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer,
@@ -16442,37 +15497,36 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_external_memory_host ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT(
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- const void * pHostPointer,
- VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ const void * pHostPointer,
+ VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT(
- m_device,
- static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
- pHostPointer,
- reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) );
+ return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device,
+ static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
+ pHostPointer,
+ reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type
- Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type
+ Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
const void * pHostPointer,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
- Result result = static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT(
- m_device,
- static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
- pHostPointer,
- reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) ) );
- return createResultValue(
- result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" );
+ VkResult result = d.vkGetMemoryHostPointerPropertiesEXT( m_device,
+ static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
+ pHostPointer,
+ reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryHostPointerProperties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -16483,7 +15537,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
uint32_t marker,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdWriteBufferMarkerAMD( m_commandBuffer,
@@ -16496,178 +15550,150 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_calibrated_timestamps ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount,
- VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount,
+ VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
- m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( pTimeDomains ) ) );
+ return static_cast<Result>(
+ d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( pTimeDomains ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename TimeDomainEXTAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type
- PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator>>::type
+ PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<TimeDomainEXT, TimeDomainEXTAllocator> timeDomains;
- uint32_t timeDomainCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator> timeDomains;
+ uint32_t timeDomainCount;
+ VkResult result;
do
{
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && timeDomainCount )
+ result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && timeDomainCount )
{
timeDomains.resize( timeDomainCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
- m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) ) );
+ result =
+ d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
+ VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
+ if ( timeDomainCount < timeDomains.size() )
{
- VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
- if ( timeDomainCount < timeDomains.size() )
- {
- timeDomains.resize( timeDomainCount );
- }
+ timeDomains.resize( timeDomainCount );
}
- return createResultValue(
- result, timeDomains, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), timeDomains );
}
template <typename TimeDomainEXTAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, TimeDomainEXT>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type
- PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator,
- Dispatch const & d ) const
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, TimeDomainEXT>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator>>::type
+ PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<TimeDomainEXT, TimeDomainEXTAllocator> timeDomains( timeDomainEXTAllocator );
- uint32_t timeDomainCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator> timeDomains( timeDomainEXTAllocator );
+ uint32_t timeDomainCount;
+ VkResult result;
do
{
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && timeDomainCount )
+ result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && timeDomainCount )
{
timeDomains.resize( timeDomainCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
- m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) ) );
+ result =
+ d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
+ VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
+ if ( timeDomainCount < timeDomains.size() )
{
- VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
- if ( timeDomainCount < timeDomains.size() )
- {
- timeDomains.resize( timeDomainCount );
- }
+ timeDomains.resize( timeDomainCount );
}
- return createResultValue(
- result, timeDomains, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), timeDomains );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getCalibratedTimestampsEXT( uint32_t timestampCount,
- const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos,
- uint64_t * pTimestamps,
- uint64_t * pMaxDeviation,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount,
+ const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos,
+ uint64_t * pTimestamps,
+ uint64_t * pMaxDeviation,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetCalibratedTimestampsEXT( m_device,
- timestampCount,
- reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( pTimestampInfos ),
- pTimestamps,
- pMaxDeviation ) );
+ return static_cast<Result>( d.vkGetCalibratedTimestampsEXT(
+ m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( pTimestampInfos ), pTimestamps, pMaxDeviation ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Uint64_tAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
- Device::getCalibratedTimestampsEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
+ Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data(
std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) );
std::vector<uint64_t, Uint64_tAllocator> & timestamps = data.first;
uint64_t & maxDeviation = data.second;
- Result result = static_cast<Result>(
- d.vkGetCalibratedTimestampsEXT( m_device,
- timestampInfos.size(),
- reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ),
- timestamps.data(),
- &maxDeviation ) );
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
+ VkResult result = d.vkGetCalibratedTimestampsEXT(
+ m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ), timestamps.data(), &maxDeviation );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
template <typename Uint64_tAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, uint64_t>::value, int>::type>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
- Device::getCalibratedTimestampsEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
- Uint64_tAllocator & uint64_tAllocator,
- Dispatch const & d ) const
+ typename B0,
+ typename std::enable_if<std::is_same<typename B0::value_type, uint64_t>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
+ Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
+ Uint64_tAllocator & uint64_tAllocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data(
- std::piecewise_construct,
- std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ),
- std::forward_as_tuple( 0 ) );
+ std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) );
std::vector<uint64_t, Uint64_tAllocator> & timestamps = data.first;
uint64_t & maxDeviation = data.second;
- Result result = static_cast<Result>(
- d.vkGetCalibratedTimestampsEXT( m_device,
- timestampInfos.size(),
- reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ),
- timestamps.data(),
- &maxDeviation ) );
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
+ VkResult result = d.vkGetCalibratedTimestampsEXT(
+ m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ), timestamps.data(), &maxDeviation );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::pair<uint64_t, uint64_t>>::type
- Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<uint64_t, uint64_t>>::type
+ Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
std::pair<uint64_t, uint64_t> data;
uint64_t & timestamp = data.first;
uint64_t & maxDeviation = data.second;
- Result result = static_cast<Result>(
- d.vkGetCalibratedTimestampsEXT( m_device,
- 1,
- reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( &timestampInfo ),
- &timestamp,
- &maxDeviation ) );
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" );
+ VkResult result =
+ d.vkGetCalibratedTimestampsEXT( m_device, 1, reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( &timestampInfo ), &timestamp, &maxDeviation );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_NV_mesh_shader ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount,
- uint32_t firstTask,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask );
@@ -16678,22 +15704,20 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize offset,
uint32_t drawCount,
uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawMeshTasksIndirectNV(
- m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+ d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::Buffer countBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer,
@@ -16708,38 +15732,32 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_scissor_exclusive ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor,
- uint32_t exclusiveScissorCount,
+ VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor,
+ uint32_t exclusiveScissorCount,
const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetExclusiveScissorNV( m_commandBuffer,
- firstExclusiveScissor,
- exclusiveScissorCount,
- reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) );
+ d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetExclusiveScissorNV( m_commandBuffer,
- firstExclusiveScissor,
- exclusiveScissors.size(),
- reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) );
+
+ d.vkCmdSetExclusiveScissorNV(
+ m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size(), reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_NV_device_diagnostic_checkpoints ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker );
@@ -16747,10 +15765,10 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename CheckpointMarkerType, typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdSetCheckpointNV( m_commandBuffer, reinterpret_cast<const void *>( &checkpointMarker ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -16758,44 +15776,53 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount,
VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetQueueCheckpointDataNV(
- m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) );
+ d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename CheckpointDataNVAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointDataNV, CheckpointDataNVAllocator>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator>
Queue::getCheckpointDataNV( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<CheckpointDataNV, CheckpointDataNVAllocator> checkpointData;
- uint32_t checkpointDataCount;
+
+ std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData;
+ uint32_t checkpointDataCount;
d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
checkpointData.resize( checkpointDataCount );
- d.vkGetQueueCheckpointDataNV(
- m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
+ d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
+
VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+ if ( checkpointDataCount < checkpointData.size() )
+ {
+ checkpointData.resize( checkpointDataCount );
+ }
return checkpointData;
}
template <typename CheckpointDataNVAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, CheckpointDataNV>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointDataNV, CheckpointDataNVAllocator>
- Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, CheckpointDataNV>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator>
+ Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<CheckpointDataNV, CheckpointDataNVAllocator> checkpointData( checkpointDataNVAllocator );
- uint32_t checkpointDataCount;
+
+ std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData( checkpointDataNVAllocator );
+ uint32_t checkpointDataCount;
d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
checkpointData.resize( checkpointDataCount );
- d.vkGetQueueCheckpointDataNV(
- m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
+ d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
+
VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+ if ( checkpointDataCount < checkpointData.size() )
+ {
+ checkpointData.resize( checkpointDataCount );
+ }
return checkpointData;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -16803,70 +15830,73 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_timeline_semaphore ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR(
- VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ uint64_t * pValue,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
+ return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type
- Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type
+ Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
uint64_t value;
- Result result =
- static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value ) );
- return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" );
+ VkResult result = d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), value );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
- uint64_t timeout,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
+ uint64_t timeout,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
+ return static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR(
- const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
+ Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
+
+ VkResult result = d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR(
- const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
+ return static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
+ Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" );
+
+ VkResult result = d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -16874,24 +15904,24 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL(
- const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkInitializePerformanceApiINTEL(
- m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) );
+ return static_cast<Result>(
+ d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::initializePerformanceApiINTEL(
- const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d ) const
+ Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkInitializePerformanceApiINTEL(
- m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" );
+
+ VkResult result = d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -16903,218 +15933,211 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL(
- m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) );
+ return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo,
- Dispatch const & d ) const
+ CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL(
- m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" );
+
+ VkResult result = d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL(
- m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) );
+ return static_cast<Result>(
+ d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- CommandBuffer::setPerformanceStreamMarkerINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d ) const
+ CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL(
- m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" );
+
+ VkResult result = d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL(
- m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) );
+ return static_cast<Result>(
+ d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo,
- Dispatch const & d ) const
+ CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL(
- m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" );
+
+ VkResult result = d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,
+ VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL(
- m_device,
- reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ),
- reinterpret_cast<VkPerformanceConfigurationINTEL *>( pConfiguration ) ) );
+ return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device,
+ reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ),
+ reinterpret_cast<VkPerformanceConfigurationINTEL *>( pConfiguration ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type
- Device::acquirePerformanceConfigurationINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type
+ Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
- Result result = static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL(
- m_device,
- reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
- reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) );
- return createResultValue(
- result, configuration, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" );
+ VkResult result = d.vkAcquirePerformanceConfigurationINTEL( m_device,
+ reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
+ reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), configuration );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type
- Device::acquirePerformanceConfigurationINTELUnique(
- const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type
+ Device::acquirePerformanceConfigurationINTELUnique( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
- Result result = static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL(
- m_device,
- reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
- reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) );
- ObjectRelease<Device, Dispatch> deleter( *this, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>(
- result,
- configuration,
- VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique",
- deleter );
+ VkResult result = d.vkAcquirePerformanceConfigurationINTEL( m_device,
+ reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
+ reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>( configuration, ObjectRelease<Device, Dispatch>( *this, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL(
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL(
- m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+ return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
}
#else
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
- Dispatch const & d ) const
+ Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL(
- m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" );
+
+ VkResult result = d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release(
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL(
- m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+ return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
}
#else
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
+ Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL(
- m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" );
+
+ VkResult result = d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::release" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL(
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL(
- m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+ return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
}
#else
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
- Dispatch const & d ) const
+ Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL(
- m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" );
+
+ VkResult result = d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,
- VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,
+ VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetPerformanceParameterINTEL( m_device,
- static_cast<VkPerformanceParameterTypeINTEL>( parameter ),
- reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) );
+ return static_cast<Result>( d.vkGetPerformanceParameterINTEL(
+ m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type
- Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type
+ Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value;
- Result result =
- static_cast<Result>( d.vkGetPerformanceParameterINTEL( m_device,
- static_cast<VkPerformanceParameterTypeINTEL>( parameter ),
- reinterpret_cast<VkPerformanceValueINTEL *>( &value ) ) );
- return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" );
+ VkResult result = d.vkGetPerformanceParameterINTEL(
+ m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( &value ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), value );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -17126,68 +16149,66 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkSetLocalDimmingAMD(
- m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) );
+ d.vkSetLocalDimmingAMD( m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) );
}
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_imagepipe_surface ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA(
- const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateImagePipeSurfaceFUCHSIA( m_instance,
- reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance,
+ reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA(
+ VkResult result = d.vkCreateImagePipeSurfaceFUCHSIA(
m_instance,
reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createImagePipeSurfaceFUCHSIAUnique(
- const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA(
+
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+ VkResult result = d.vkCreateImagePipeSurfaceFUCHSIA(
m_instance,
reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -17197,58 +16218,58 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_metal_surface ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateMetalSurfaceEXT( m_instance,
- reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ return static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance,
+ reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateMetalSurfaceEXT( m_instance,
reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
+ VkResult result =
d.vkCreateMetalSurfaceEXT( m_instance,
reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -17257,223 +16278,200 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_fragment_shading_rate ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getFragmentShadingRatesKHR(
- uint32_t * pFragmentShadingRateCount,
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getFragmentShadingRatesKHR( uint32_t * pFragmentShadingRateCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
- m_physicalDevice,
- pFragmentShadingRateCount,
- reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) );
+ m_physicalDevice, pFragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
- std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>
- fragmentShadingRates;
- uint32_t fragmentShadingRateCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates;
+ uint32_t fragmentShadingRateCount;
+ VkResult result;
do
{
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && fragmentShadingRateCount )
+ result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount )
{
fragmentShadingRates.resize( fragmentShadingRateCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
- m_physicalDevice,
- &fragmentShadingRateCount,
- reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) );
+ result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
+ m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
+ VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
+ if ( fragmentShadingRateCount < fragmentShadingRates.size() )
{
- VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
- if ( fragmentShadingRateCount < fragmentShadingRates.size() )
- {
- fragmentShadingRates.resize( fragmentShadingRateCount );
- }
+ fragmentShadingRates.resize( fragmentShadingRateCount );
}
- return createResultValue(
- result, fragmentShadingRates, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fragmentShadingRates );
}
template <typename PhysicalDeviceFragmentShadingRateKHRAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceFragmentShadingRateKHR>::value,
- int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
- std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
- PhysicalDevice::getFragmentShadingRatesKHR(
- PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator,
- Dispatch const & d ) const
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceFragmentShadingRateKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
+ PhysicalDevice::getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>
- fragmentShadingRates( physicalDeviceFragmentShadingRateKHRAllocator );
+
+ std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates(
+ physicalDeviceFragmentShadingRateKHRAllocator );
uint32_t fragmentShadingRateCount;
- Result result;
+ VkResult result;
do
{
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && fragmentShadingRateCount )
+ result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount )
{
fragmentShadingRates.resize( fragmentShadingRateCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
- m_physicalDevice,
- &fragmentShadingRateCount,
- reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) );
+ result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
+ m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
+ VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
+ if ( fragmentShadingRateCount < fragmentShadingRates.size() )
{
- VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
- if ( fragmentShadingRateCount < fragmentShadingRates.size() )
- {
- fragmentShadingRates.resize( fragmentShadingRateCount );
- }
+ fragmentShadingRates.resize( fragmentShadingRateCount );
}
- return createResultValue(
- result, fragmentShadingRates, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fragmentShadingRates );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR(
- const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize,
- const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize,
+ const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer,
- reinterpret_cast<const VkExtent2D *>( pFragmentSize ),
- reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
+ d.vkCmdSetFragmentShadingRateKHR(
+ m_commandBuffer, reinterpret_cast<const VkExtent2D *>( pFragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR(
- const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize,
- const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize,
+ const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer,
- reinterpret_cast<const VkExtent2D *>( &fragmentSize ),
- reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
+
+ d.vkCmdSetFragmentShadingRateKHR(
+ m_commandBuffer, reinterpret_cast<const VkExtent2D *>( &fragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_buffer_device_address ===
template <typename Dispatch>
- VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT(
- const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<DeviceAddress>(
- d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
+ return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT(
- const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+
+ VkDeviceAddress result = d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_tooling_info ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount,
- VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT(
- m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
+ return static_cast<Result>(
+ d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties;
- uint32_t toolCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties;
+ uint32_t toolCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && toolCount )
+ result = d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && toolCount )
{
toolProperties.resize( toolCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT(
- m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) );
+ result =
+ d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
+ VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+ if ( toolCount < toolProperties.size() )
{
- VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
- if ( toolCount < toolProperties.size() )
- {
- toolProperties.resize( toolCount );
- }
+ toolProperties.resize( toolCount );
}
- return createResultValue(
- result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
}
- template <
- typename PhysicalDeviceToolPropertiesAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceToolProperties>::value, int>::type>
+ template <typename PhysicalDeviceToolPropertiesAllocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceToolProperties>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
- PhysicalDevice::getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator,
- Dispatch const & d ) const
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
+ PhysicalDevice::getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties(
+
+ std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties(
physicalDeviceToolPropertiesAllocator );
uint32_t toolCount;
- Result result;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && toolCount )
+ result = d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && toolCount )
{
toolProperties.resize( toolCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT(
- m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) );
+ result =
+ d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
+ VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+ if ( toolCount < toolProperties.size() )
{
- VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
- if ( toolCount < toolProperties.size() )
- {
- toolProperties.resize( toolCount );
- }
+ toolProperties.resize( toolCount );
}
- return createResultValue(
- result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -17481,39 +16479,35 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- uint64_t presentId,
- uint64_t timeout,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ uint64_t presentId,
+ uint64_t timeout,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) );
+ return static_cast<Result>( d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) );
}
#else
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- uint64_t presentId,
- uint64_t timeout,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
+ Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
+
+ VkResult result = d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_NV_cooperative_matrix ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV(
+ uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
@@ -17523,76 +16517,65 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename CooperativeMatrixPropertiesNVAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties;
- uint32_t propertyCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties;
+ uint32_t propertyCount;
+ VkResult result;
do
{
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
- m_physicalDevice,
- &propertyCount,
- reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) );
+ result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
+ m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
- template <
- typename CooperativeMatrixPropertiesNVAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, CooperativeMatrixPropertiesNV>::value, int>::type>
+ template <typename CooperativeMatrixPropertiesNVAllocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, CooperativeMatrixPropertiesNV>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
- PhysicalDevice::getCooperativeMatrixPropertiesNV(
- CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, Dispatch const & d ) const
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
+ PhysicalDevice::getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties(
+
+ std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties(
cooperativeMatrixPropertiesNVAllocator );
uint32_t propertyCount;
- Result result;
+ VkResult result;
do
{
- result = static_cast<Result>(
- d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && propertyCount )
+ result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
- m_physicalDevice,
- &propertyCount,
- reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) );
+ result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
+ m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -17600,95 +16583,77 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(
- uint32_t * pCombinationCount,
- VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ uint32_t * pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- m_physicalDevice,
- pCombinationCount,
- reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) );
+ m_physicalDevice, pCombinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename FramebufferMixedSamplesCombinationNVAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
- std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations;
- uint32_t combinationCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations;
+ uint32_t combinationCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- m_physicalDevice, &combinationCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && combinationCount )
+ result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && combinationCount )
{
combinations.resize( combinationCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- m_physicalDevice,
- &combinationCount,
- reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) );
+ result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+ m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
+ VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
+ if ( combinationCount < combinations.size() )
{
- VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
- if ( combinationCount < combinations.size() )
- {
- combinations.resize( combinationCount );
- }
+ combinations.resize( combinationCount );
}
- return createResultValue( result,
- combinations,
- VULKAN_HPP_NAMESPACE_STRING
- "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), combinations );
}
template <typename FramebufferMixedSamplesCombinationNVAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, FramebufferMixedSamplesCombinationNV>::value,
- int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
- std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, FramebufferMixedSamplesCombinationNV>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(
- FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator,
- Dispatch const & d ) const
+ FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations(
+
+ std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations(
framebufferMixedSamplesCombinationNVAllocator );
uint32_t combinationCount;
- Result result;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- m_physicalDevice, &combinationCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && combinationCount )
+ result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && combinationCount )
{
combinations.resize( combinationCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- m_physicalDevice,
- &combinationCount,
- reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) );
+ result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+ m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
+ VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
+ if ( combinationCount < combinations.size() )
{
- VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
- if ( combinationCount < combinations.size() )
- {
- combinations.resize( combinationCount );
- }
+ combinations.resize( combinationCount );
}
- return createResultValue( result,
- combinations,
- VULKAN_HPP_NAMESPACE_STRING
- "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), combinations );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -17696,112 +16661,95 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_full_screen_exclusive ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
- uint32_t * pPresentModeCount,
- VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
+ uint32_t * pPresentModeCount,
+ VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
- pPresentModeCount,
- reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
+ return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
+ pPresentModeCount,
+ reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename PresentModeKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
- PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type
+ PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes;
- uint32_t presentModeCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes;
+ uint32_t presentModeCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- &presentModeCount,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && presentModeCount )
+ result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
+ m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && presentModeCount )
{
presentModes.resize( presentModeCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- &presentModeCount,
- reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
+ result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ &presentModeCount,
+ reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
+ VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+ if ( presentModeCount < presentModes.size() )
{
- VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
- if ( presentModeCount < presentModes.size() )
- {
- presentModes.resize( presentModeCount );
- }
+ presentModes.resize( presentModeCount );
}
- return createResultValue(
- result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
}
template <typename PresentModeKHRAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
- PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- PresentModeKHRAllocator & presentModeKHRAllocator,
- Dispatch const & d ) const
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, PresentModeKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type
+ PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
+ PresentModeKHRAllocator & presentModeKHRAllocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
- uint32_t presentModeCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
+ uint32_t presentModeCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- &presentModeCount,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && presentModeCount )
+ result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
+ m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && presentModeCount )
{
presentModes.resize( presentModeCount );
- result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
- m_physicalDevice,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- &presentModeCount,
- reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
+ result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice,
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ &presentModeCount,
+ reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
+ VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+ if ( presentModeCount < presentModes.size() )
{
- VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
- if ( presentModeCount < presentModes.size() )
- {
- presentModes.resize( presentModeCount );
- }
+ presentModes.resize( presentModeCount );
}
- return createResultValue(
- result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT(
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+ return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
}
# else
template <typename Dispatch>
@@ -17809,20 +16757,21 @@ namespace VULKAN_HPP_NAMESPACE
Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" );
+
+ VkResult result = d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT(
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+ return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
}
# else
template <typename Dispatch>
@@ -17830,9 +16779,11 @@ namespace VULKAN_HPP_NAMESPACE
Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" );
+
+ VkResult result = d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -17840,29 +16791,26 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT(
- m_device,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
- reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
+ m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
- Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
+ Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
- Result result = static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT(
- m_device,
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) );
- return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" );
+ VkResult result = d.vkGetDeviceGroupSurfacePresentModes2EXT(
+ m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), modes );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
@@ -17870,58 +16818,58 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_headless_surface ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateHeadlessSurfaceEXT( m_instance,
- reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance,
+ reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateHeadlessSurfaceEXT( m_instance,
- reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" );
+ VkResult result = d.vkCreateHeadlessSurfaceEXT(
+ m_instance,
+ reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateHeadlessSurfaceEXT( m_instance,
- reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique", deleter );
+ VkResult result = d.vkCreateHeadlessSurfaceEXT(
+ m_instance,
+ reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -17929,72 +16877,73 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_buffer_device_address ===
template <typename Dispatch>
- VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR(
- const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<DeviceAddress>(
- d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
+ return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR(
- const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+
+ VkDeviceAddress result = d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR(
- const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetBufferOpaqueCaptureAddressKHR( m_device,
- reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
+ return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR(
- const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetBufferOpaqueCaptureAddressKHR( m_device,
- reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+
+ uint64_t result = d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+
+ return result;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE uint64_t
- Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR(
- m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
+ return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE uint64_t
- Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR(
- m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
+
+ uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
+
+ return result;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_line_rasterization ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor,
- uint16_t lineStipplePattern,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern );
@@ -18015,25 +16964,22 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_extended_dynamic_state ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
@@ -18042,29 +16988,26 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount,
const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetViewportWithCountEXT(
- m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
+ d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetViewportWithCountEXT(
- m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
+
+ d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount,
- const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
@@ -18072,13 +17015,12 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetScissorWithCountEXT(
- m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
+
+ d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -18089,7 +17031,7 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdBindVertexBuffers2EXT( m_commandBuffer,
@@ -18103,13 +17045,12 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
@@ -18119,18 +17060,15 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( buffers.size() != offsets.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
}
if ( !sizes.empty() && buffers.size() != sizes.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" );
}
if ( !strides.empty() && buffers.size() != strides.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
@@ -18145,24 +17083,21 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
@@ -18170,15 +17105,14 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
@@ -18190,7 +17124,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::StencilOp passOp,
VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
VULKAN_HPP_NAMESPACE::CompareOp compareOp,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetStencilOpEXT( m_commandBuffer,
@@ -18204,110 +17138,106 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_deferred_host_operations ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateDeferredOperationKHR( m_device,
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) );
+ return static_cast<Result>( d.vkCreateDeferredOperationKHR(
+ m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type
- Device::createDeferredOperationKHR( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type
+ Device::createDeferredOperationKHR( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
- Result result = static_cast<Result>(
- d.vkCreateDeferredOperationKHR( m_device,
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) );
- return createResultValue(
- result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" );
+ VkResult result = d.vkCreateDeferredOperationKHR(
+ m_device,
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), deferredOperation );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type
- Device::createDeferredOperationKHRUnique( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type
+ Device::createDeferredOperationKHRUnique( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
- Result result = static_cast<Result>(
- d.vkCreateDeferredOperationKHR( m_device,
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>(
- result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique", deleter );
+ VkResult result = d.vkCreateDeferredOperationKHR(
+ m_device,
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>( deferredOperation, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDeferredOperationKHR( m_device,
- static_cast<VkDeferredOperationKHR>( operation ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDeferredOperationKHR(
+ m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDeferredOperationKHR( m_device,
- static_cast<VkDeferredOperationKHR>( operation ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+
+ d.vkDestroyDeferredOperationKHR(
+ m_device,
+ static_cast<VkDeferredOperationKHR>( operation ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDeferredOperationKHR( m_device,
- static_cast<VkDeferredOperationKHR>( operation ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyDeferredOperationKHR(
+ m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyDeferredOperationKHR( m_device,
- static_cast<VkDeferredOperationKHR>( operation ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+
+ d.vkDestroyDeferredOperationKHR(
+ m_device,
+ static_cast<VkDeferredOperationKHR>( operation ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
@@ -18315,147 +17245,131 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
+ return static_cast<Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
}
#else
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
+ Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getDeferredOperationResultKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+
+ VkResult result = d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
+ return static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
}
#else
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result =
- static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR,
- VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } );
+
+ VkResult result = d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_pipeline_executable_properties ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo,
- uint32_t * pExecutableCount,
- VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo,
+ uint32_t * pExecutableCount,
+ VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetPipelineExecutablePropertiesKHR( m_device,
- reinterpret_cast<const VkPipelineInfoKHR *>( pPipelineInfo ),
- pExecutableCount,
- reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( pProperties ) ) );
+ return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device,
+ reinterpret_cast<const VkPipelineInfoKHR *>( pPipelineInfo ),
+ pExecutableCount,
+ reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename PipelineExecutablePropertiesKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
- std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
- Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
+ Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties;
- uint32_t executableCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties;
+ uint32_t executableCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR(
- m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && executableCount )
+ result = d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && executableCount )
{
properties.resize( executableCount );
- result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR(
- m_device,
- reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
- &executableCount,
- reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) );
+ result = d.vkGetPipelineExecutablePropertiesKHR( m_device,
+ reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
+ &executableCount,
+ reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
+ VULKAN_HPP_ASSERT( executableCount <= properties.size() );
+ if ( executableCount < properties.size() )
{
- VULKAN_HPP_ASSERT( executableCount <= properties.size() );
- if ( executableCount < properties.size() )
- {
- properties.resize( executableCount );
- }
+ properties.resize( executableCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
- template <
- typename PipelineExecutablePropertiesKHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutablePropertiesKHR>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
- std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
- Device::getPipelineExecutablePropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo,
- PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator,
- Dispatch const & d ) const
+ template <typename PipelineExecutablePropertiesKHRAllocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, PipelineExecutablePropertiesKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
+ Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo,
+ PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties(
+
+ std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties(
pipelineExecutablePropertiesKHRAllocator );
uint32_t executableCount;
- Result result;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR(
- m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) );
- if ( ( result == Result::eSuccess ) && executableCount )
+ result = d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && executableCount )
{
properties.resize( executableCount );
- result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR(
- m_device,
- reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
- &executableCount,
- reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) );
+ result = d.vkGetPipelineExecutablePropertiesKHR( m_device,
+ reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
+ &executableCount,
+ reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
+ VULKAN_HPP_ASSERT( executableCount <= properties.size() );
+ if ( executableCount < properties.size() )
{
- VULKAN_HPP_ASSERT( executableCount <= properties.size() );
- if ( executableCount < properties.size() )
- {
- properties.resize( executableCount );
- }
+ properties.resize( executableCount );
}
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -18464,434 +17378,381 @@ namespace VULKAN_HPP_NAMESPACE
Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
uint32_t * pStatisticCount,
VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetPipelineExecutableStatisticsKHR( m_device,
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
- pStatisticCount,
- reinterpret_cast<VkPipelineExecutableStatisticKHR *>( pStatistics ) ) );
+ return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device,
+ reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
+ pStatisticCount,
+ reinterpret_cast<VkPipelineExecutableStatisticKHR *>( pStatistics ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename PipelineExecutableStatisticKHRAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
- Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,
- Dispatch const & d ) const
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
+ Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics;
- uint32_t statisticCount;
- Result result;
+
+ std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics;
+ uint32_t statisticCount;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR(
- m_device,
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
- &statisticCount,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && statisticCount )
+ result =
+ d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && statisticCount )
{
statistics.resize( statisticCount );
- result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR(
- m_device,
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
- &statisticCount,
- reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) );
+ result = d.vkGetPipelineExecutableStatisticsKHR( m_device,
+ reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
+ &statisticCount,
+ reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
+ VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
+ if ( statisticCount < statistics.size() )
{
- VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
- if ( statisticCount < statistics.size() )
- {
- statistics.resize( statisticCount );
- }
+ statistics.resize( statisticCount );
}
- return createResultValue(
- result, statistics, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), statistics );
}
- template <
- typename PipelineExecutableStatisticKHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableStatisticKHR>::value, int>::type>
+ template <typename PipelineExecutableStatisticKHRAllocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, PipelineExecutableStatisticKHR>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
- Device::getPipelineExecutableStatisticsKHR(
- const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,
- PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator,
- Dispatch const & d ) const
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
+ Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,
+ PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics(
+
+ std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics(
pipelineExecutableStatisticKHRAllocator );
uint32_t statisticCount;
- Result result;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR(
- m_device,
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
- &statisticCount,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && statisticCount )
+ result =
+ d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && statisticCount )
{
statistics.resize( statisticCount );
- result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR(
- m_device,
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
- &statisticCount,
- reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) );
+ result = d.vkGetPipelineExecutableStatisticsKHR( m_device,
+ reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
+ &statisticCount,
+ reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
+ VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
+ if ( statisticCount < statistics.size() )
{
- VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
- if ( statisticCount < statistics.size() )
- {
- statistics.resize( statisticCount );
- }
+ statistics.resize( statisticCount );
}
- return createResultValue(
- result, statistics, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), statistics );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR(
- const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
- uint32_t * pInternalRepresentationCount,
- VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
+ uint32_t * pInternalRepresentationCount,
+ VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
- m_device,
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
- pInternalRepresentationCount,
- reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( pInternalRepresentations ) ) );
+ return static_cast<Result>(
+ d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device,
+ reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
+ pInternalRepresentationCount,
+ reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( pInternalRepresentations ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename PipelineExecutableInternalRepresentationKHRAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,
- PipelineExecutableInternalRepresentationKHRAllocator>>::type
- Device::getPipelineExecutableInternalRepresentationsKHR(
- const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
+ std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type
+ Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>
+
+ std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>
internalRepresentations;
uint32_t internalRepresentationCount;
- Result result;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
- m_device,
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
- &internalRepresentationCount,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && internalRepresentationCount )
+ result = d.vkGetPipelineExecutableInternalRepresentationsKHR(
+ m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && internalRepresentationCount )
{
internalRepresentations.resize( internalRepresentationCount );
- result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
+ result = d.vkGetPipelineExecutableInternalRepresentationsKHR(
m_device,
reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
&internalRepresentationCount,
- reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) );
+ reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
+ VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
+ if ( internalRepresentationCount < internalRepresentations.size() )
{
- VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
- if ( internalRepresentationCount < internalRepresentations.size() )
- {
- internalRepresentations.resize( internalRepresentationCount );
- }
+ internalRepresentations.resize( internalRepresentationCount );
}
- return createResultValue( result,
- internalRepresentations,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), internalRepresentations );
}
- template <
- typename PipelineExecutableInternalRepresentationKHRAllocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableInternalRepresentationKHR>::value,
- int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,
- PipelineExecutableInternalRepresentationKHRAllocator>>::type
+ template <typename PipelineExecutableInternalRepresentationKHRAllocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, PipelineExecutableInternalRepresentationKHR>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
+ std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type
Device::getPipelineExecutableInternalRepresentationsKHR(
const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,
PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>
+
+ std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>
internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator );
uint32_t internalRepresentationCount;
- Result result;
+ VkResult result;
do
{
- result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
- m_device,
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
- &internalRepresentationCount,
- nullptr ) );
- if ( ( result == Result::eSuccess ) && internalRepresentationCount )
+ result = d.vkGetPipelineExecutableInternalRepresentationsKHR(
+ m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && internalRepresentationCount )
{
internalRepresentations.resize( internalRepresentationCount );
- result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
+ result = d.vkGetPipelineExecutableInternalRepresentationsKHR(
m_device,
reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
&internalRepresentationCount,
- reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) );
+ reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) );
}
- } while ( result == Result::eIncomplete );
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
+ VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
+ if ( internalRepresentationCount < internalRepresentations.size() )
{
- VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
- if ( internalRepresentationCount < internalRepresentations.size() )
- {
- internalRepresentations.resize( internalRepresentationCount );
- }
+ internalRepresentations.resize( internalRepresentationCount );
}
- return createResultValue( result,
- internalRepresentations,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), internalRepresentations );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_NV_device_generated_commands ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetGeneratedCommandsMemoryRequirementsNV(
- m_device,
- reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ),
- reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device,
+ reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ),
+ reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getGeneratedCommandsMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- d.vkGetGeneratedCommandsMemoryRequirementsNV(
- m_device,
- reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device,
+ reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return memoryRequirements;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getGeneratedCommandsMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- d.vkGetGeneratedCommandsMemoryRequirementsNV(
- m_device,
- reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device,
+ reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV(
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdPreprocessGeneratedCommandsNV(
- m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
+ d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV(
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdPreprocessGeneratedCommandsNV(
- m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
+
+ d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV(
- VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
+ const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer,
- static_cast<VkBool32>( isPreprocessed ),
- reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
+ d.vkCmdExecuteGeneratedCommandsNV(
+ m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV(
- VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
+ const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer,
- static_cast<VkBool32>( isPreprocessed ),
- reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
+
+ d.vkCmdExecuteGeneratedCommandsNV(
+ m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t groupIndex,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t groupIndex,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer,
- static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
- static_cast<VkPipeline>( pipeline ),
- groupIndex );
+ d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex );
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNV(
- const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateIndirectCommandsLayoutNV( m_device,
- reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) );
+ return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device,
+ reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type
- Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type
+ Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
- Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV(
+ VkResult result = d.vkCreateIndirectCommandsLayoutNV(
m_device,
reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) );
- return createResultValue(
- result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), indirectCommandsLayout );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type
- Device::createIndirectCommandsLayoutNVUnique(
- const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type
+ Device::createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
- Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV(
+ VkResult result = d.vkCreateIndirectCommandsLayoutNV(
m_device,
reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>(
- result,
- indirectCommandsLayout,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique",
- deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>(
+ indirectCommandsLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyIndirectCommandsLayoutNV( m_device,
- static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyIndirectCommandsLayoutNV(
+ m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyIndirectCommandsLayoutNV(
m_device,
static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyIndirectCommandsLayoutNV( m_device,
- static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyIndirectCommandsLayoutNV(
+ m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyIndirectCommandsLayoutNV(
m_device,
static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -18899,63 +17760,64 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireDrmDisplayEXT(
- int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd,
+ VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) );
+ return static_cast<Result>( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) );
}
#else
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type PhysicalDevice::acquireDrmDisplayEXT(
- int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result =
- static_cast<Result>( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" );
+
+ VkResult result = d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getDrmDisplayEXT( int32_t drmFd,
- uint32_t connectorId,
- VULKAN_HPP_NAMESPACE::DisplayKHR * display,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDrmDisplayEXT( int32_t drmFd,
+ uint32_t connectorId,
+ VULKAN_HPP_NAMESPACE::DisplayKHR * display,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( display ) ) );
+ return static_cast<Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( display ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
- PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
+ PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DisplayKHR display;
- Result result = static_cast<Result>(
- d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
- return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" );
+ VkResult result = d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), display );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
- PhysicalDevice::getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
+ PhysicalDevice::getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DisplayKHR display;
- Result result = static_cast<Result>(
- d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
- ObjectRelease<PhysicalDevice, Dispatch> deleter( *this, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>(
- result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique", deleter );
+ VkResult result = d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -18963,120 +17825,113 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_private_data ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreatePrivateDataSlotEXT( m_device,
- reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) );
+ return static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device,
+ reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type
- Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type
+ Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
- Result result = static_cast<Result>(
- d.vkCreatePrivateDataSlotEXT( m_device,
- reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) );
- return createResultValue(
- result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" );
+ VkResult result = d.vkCreatePrivateDataSlotEXT(
+ m_device,
+ reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), privateDataSlot );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type
- Device::createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type
+ Device::createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
- Result result = static_cast<Result>(
- d.vkCreatePrivateDataSlotEXT( m_device,
- reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>(
- result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique", deleter );
+ VkResult result = d.vkCreatePrivateDataSlotEXT(
+ m_device,
+ reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>( privateDataSlot, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPrivateDataSlotEXT( m_device,
- static_cast<VkPrivateDataSlot>( privateDataSlot ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyPrivateDataSlotEXT( m_device,
- static_cast<VkPrivateDataSlot>( privateDataSlot ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+
+ d.vkDestroyPrivateDataSlotEXT(
+ m_device,
+ static_cast<VkPrivateDataSlot>( privateDataSlot ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
- uint64_t data,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
+ uint64_t data,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkSetPrivateDataEXT( m_device,
- static_cast<VkObjectType>( objectType ),
- objectHandle,
- static_cast<VkPrivateDataSlot>( privateDataSlot ),
- data ) );
+ return static_cast<Result>(
+ d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
}
#else
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
- uint64_t data,
- Dispatch const & d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
+ uint64_t data,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkSetPrivateDataEXT( m_device,
- static_cast<VkObjectType>( objectType ),
- objectHandle,
- static_cast<VkPrivateDataSlot>( privateDataSlot ),
- data ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" );
+
+ VkResult result =
+ d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -19088,28 +17943,21 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetPrivateDataEXT( m_device,
- static_cast<VkObjectType>( objectType ),
- objectHandle,
- static_cast<VkPrivateDataSlot>( privateDataSlot ),
- pData );
+ d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t
- Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
uint64_t data;
- d.vkGetPrivateDataEXT( m_device,
- static_cast<VkObjectType>( objectType ),
- objectHandle,
- static_cast<VkPrivateDataSlot>( privateDataSlot ),
- &data );
+ d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
+
return data;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -19119,7 +17967,7 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( pEncodeInfo ) );
@@ -19128,67 +17976,101 @@ namespace VULKAN_HPP_NAMESPACE
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ //=== VK_EXT_metal_objects ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( pMetalObjectsInfo ) );
+ }
+
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT
+ Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo;
+ d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
+
+ return metalObjectsInfo;
+ }
+
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>();
+ d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
+
+ return structureChain;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
//=== VK_KHR_synchronization2 ===
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetEvent2KHR(
- m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
+ d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetEvent2KHR(
- m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
+
+ d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdResetEvent2KHR(
- m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
+ d.vkCmdResetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( uint32_t eventCount,
const VULKAN_HPP_NAMESPACE::Event * pEvents,
const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWaitEvents2KHR( m_commandBuffer,
- eventCount,
- reinterpret_cast<const VkEvent *>( pEvents ),
- reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
+ d.vkCmdWaitEvents2KHR(
+ m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::waitEvents2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
@@ -19196,8 +18078,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( events.size() != dependencyInfos.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
@@ -19209,9 +18090,8 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
@@ -19219,11 +18099,11 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -19232,37 +18112,34 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWriteTimestamp2KHR(
- m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
+ d.vkCmdWriteTimestamp2KHR( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2KHR( uint32_t submitCount,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2KHR( uint32_t submitCount,
const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,
VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkQueueSubmit2KHR(
- m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
+ return static_cast<Result>(
+ d.vkQueueSubmit2KHR( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Queue::submit2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
- VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit2KHR(
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkQueueSubmit2KHR( m_queue,
- submits.size(),
- reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ),
- static_cast<VkFence>( fence ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" );
+
+ VkResult result = d.vkQueueSubmit2KHR( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -19271,57 +18148,63 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
uint32_t marker,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdWriteBufferMarker2AMD( m_commandBuffer,
- static_cast<VkPipelineStageFlags2>( stage ),
- static_cast<VkBuffer>( dstBuffer ),
- static_cast<VkDeviceSize>( dstOffset ),
- marker );
+ d.vkCmdWriteBufferMarker2AMD(
+ m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
}
template <typename Dispatch>
VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount,
VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetQueueCheckpointData2NV(
- m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) );
+ d.vkGetQueueCheckpointData2NV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename CheckpointData2NVAllocator, typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointData2NV, CheckpointData2NVAllocator>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator>
Queue::getCheckpointData2NV( Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<CheckpointData2NV, CheckpointData2NVAllocator> checkpointData;
- uint32_t checkpointDataCount;
+
+ std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData;
+ uint32_t checkpointDataCount;
d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
checkpointData.resize( checkpointDataCount );
- d.vkGetQueueCheckpointData2NV(
- m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
+ d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
+
VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+ if ( checkpointDataCount < checkpointData.size() )
+ {
+ checkpointData.resize( checkpointDataCount );
+ }
return checkpointData;
}
template <typename CheckpointData2NVAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, CheckpointData2NV>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointData2NV, CheckpointData2NVAllocator>
- Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, CheckpointData2NV>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator>
+ Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<CheckpointData2NV, CheckpointData2NVAllocator> checkpointData( checkpointData2NVAllocator );
- uint32_t checkpointDataCount;
+
+ std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData( checkpointData2NVAllocator );
+ uint32_t checkpointDataCount;
d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
checkpointData.resize( checkpointDataCount );
- d.vkGetQueueCheckpointData2NV(
- m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
+ d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
+
VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+ if ( checkpointDataCount < checkpointData.size() )
+ {
+ checkpointData.resize( checkpointDataCount );
+ }
return checkpointData;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -19329,22 +18212,60 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_fragment_shading_rate_enums ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV(
- VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,
- const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,
+ const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetFragmentShadingRateEnumNV(
+ m_commandBuffer, static_cast<VkFragmentShadingRateNV>( shadingRate ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
+ }
+
+ //=== VK_EXT_mesh_shader ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdDrawMeshTasksEXT( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ uint32_t drawCount,
+ uint32_t stride,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetFragmentShadingRateEnumNV( m_commandBuffer,
- static_cast<VkFragmentShadingRateNV>( shadingRate ),
- reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
+ d.vkCmdDrawMeshTasksIndirectEXT( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdDrawMeshTasksIndirectCountEXT( m_commandBuffer,
+ static_cast<VkBuffer>( buffer ),
+ static_cast<VkDeviceSize>( offset ),
+ static_cast<VkBuffer>( countBuffer ),
+ static_cast<VkDeviceSize>( countBufferOffset ),
+ maxDrawCount,
+ stride );
}
//=== VK_KHR_copy_commands2 ===
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) );
@@ -19353,16 +18274,17 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) );
@@ -19371,60 +18293,55 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyBufferToImage2KHR( m_commandBuffer,
- reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
+ d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyBufferToImage2KHR( m_commandBuffer,
- reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
+
+ d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer,
- reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
+ d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer,
- reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
+
+ d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) );
@@ -19433,17 +18350,17 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) );
@@ -19451,22 +18368,103 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_image_compression_control ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
+ const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource,
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkGetImageSubresourceLayout2EXT( m_device,
+ static_cast<VkImage>( image ),
+ reinterpret_cast<const VkImageSubresource2EXT *>( pSubresource ),
+ reinterpret_cast<VkSubresourceLayout2EXT *>( pLayout ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT Device::getImageSubresourceLayout2EXT(
+ VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout;
+ d.vkGetImageSubresourceLayout2EXT( m_device,
+ static_cast<VkImage>( image ),
+ reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
+ reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
+
+ return layout;
+ }
+
+ template <typename X, typename Y, typename... Z, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2EXT(
+ VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>();
+ d.vkGetImageSubresourceLayout2EXT( m_device,
+ static_cast<VkImage>( image ),
+ reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
+ reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
+
+ return structureChain;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_device_fault ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts,
+ VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkGetDeviceFaultInfoEXT(
+ m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( pFaultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( pFaultInfo ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>>
+ Device::getFaultInfoEXT( Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT> data;
+ VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT & faultCounts = data.first;
+ VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT & faultInfo = data.second;
+ VkResult result =
+ d.vkGetDeviceFaultInfoEXT( m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( &faultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( &faultInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getFaultInfoEXT",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } );
+
+ return ResultValue<std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>>(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_acquire_winrt_display ===
# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV(
- VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
@@ -19477,49 +18475,50 @@ namespace VULKAN_HPP_NAMESPACE
PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result =
- static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" );
+
+ VkResult result = d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId,
- VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId,
+ VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
+ return static_cast<Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
- PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
+ PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DisplayKHR display;
- Result result = static_cast<Result>(
- d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
- return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" );
+ VkResult result = d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), display );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
- PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
+ PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::DisplayKHR display;
- Result result = static_cast<Result>(
- d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
- ObjectRelease<PhysicalDevice, Dispatch> deleter( *this, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>(
- result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique", deleter );
+ VkResult result = d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -19529,78 +18528,81 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_directfb_surface ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateDirectFBSurfaceEXT( m_instance,
- reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ return static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance,
+ reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateDirectFBSurfaceEXT( m_instance,
- reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" );
+ VkResult result = d.vkCreateDirectFBSurfaceEXT(
+ m_instance,
+ reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateDirectFBSurfaceEXT( m_instance,
- reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique", deleter );
+ VkResult result = d.vkCreateDirectFBSurfaceEXT(
+ m_instance,
+ reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT(
- uint32_t queueFamilyIndex, IDirectFB * dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex,
+ IDirectFB * dfb,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Bool32>(
- d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) );
+ return static_cast<Bool32>( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT(
- uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
+ PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb );
+
+ VkBool32 result = d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
@@ -19608,15 +18610,14 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_ray_tracing_pipeline ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR(
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
- uint32_t width,
- uint32_t height,
- uint32_t depth,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
+ uint32_t width,
+ uint32_t height,
+ uint32_t depth,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdTraceRaysKHR( m_commandBuffer,
@@ -19631,17 +18632,17 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
- uint32_t width,
- uint32_t height,
- uint32_t depth,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
+ uint32_t width,
+ uint32_t height,
+ uint32_t depth,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdTraceRaysKHR( m_commandBuffer,
reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
@@ -19655,454 +18656,367 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- uint32_t createInfoCount,
+ Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ uint32_t createInfoCount,
const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateRayTracingPipelinesKHR( m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- static_cast<VkPipelineCache>( pipelineCache ),
- createInfoCount,
- reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( pCreateInfos ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkPipeline *>( pPipelines ) ) );
+ return static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device,
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ static_cast<VkPipelineCache>( pipelineCache ),
+ createInfoCount,
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( pCreateInfos ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkPipeline *>( pPipelines ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename PipelineAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
- Device::createRayTracingPipelinesKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
- Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR(
+ VkResult result = d.vkCreateRayTracingPipelinesKHR(
m_device,
static_cast<VkDeferredOperationKHR>( deferredOperation ),
static_cast<VkPipelineCache>( pipelineCache ),
createInfos.size(),
reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- return createResultValue( result,
- pipelines,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+
+ return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
}
template <typename PipelineAllocator,
typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type>
+ typename B0,
+ typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
- Device::createRayTracingPipelinesKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d ) const
+ Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
- Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR(
+ VkResult result = d.vkCreateRayTracingPipelinesKHR(
m_device,
static_cast<VkDeferredOperationKHR>( deferredOperation ),
static_cast<VkPipelineCache>( pipelineCache ),
createInfos.size(),
reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- return createResultValue( result,
- pipelines,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+
+ return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
}
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
- Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Pipeline pipeline;
- Result result = static_cast<Result>(
- d.vkCreateRayTracingPipelinesKHR( m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- static_cast<VkPipelineCache>( pipelineCache ),
- 1,
- reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( &pipeline ) ) );
- return createResultValue( result,
- pipeline,
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ VkResult result = d.vkCreateRayTracingPipelinesKHR(
+ m_device,
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ static_cast<VkPipelineCache>( pipelineCache ),
+ 1,
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( &pipeline ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+
+ return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch, typename PipelineAllocator>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
Device::createRayTracingPipelinesKHRUnique(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
- std::vector<Pipeline> pipelines( createInfos.size() );
- Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR(
+
+ std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+ VkResult result = d.vkCreateRayTracingPipelinesKHR(
m_device,
static_cast<VkDeferredOperationKHR>( deferredOperation ),
static_cast<VkPipelineCache>( pipelineCache ),
createInfos.size(),
reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
+ uniquePipelines.reserve( createInfos.size() );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ for ( auto const & pipeline : pipelines )
{
- uniquePipelines.reserve( createInfos.size() );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- for ( size_t i = 0; i < createInfos.size(); i++ )
- {
- uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
- }
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
}
- return createResultValue( result,
- std::move( uniquePipelines ),
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
- }
-
- template <
- typename Dispatch,
- typename PipelineAllocator,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
+ return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
+ }
+
+ template <typename Dispatch,
+ typename PipelineAllocator,
+ typename B0,
+ typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
Device::createRayTracingPipelinesKHRUnique(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
- std::vector<Pipeline> pipelines( createInfos.size() );
- Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR(
+
+ std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+ VkResult result = d.vkCreateRayTracingPipelinesKHR(
m_device,
static_cast<VkDeferredOperationKHR>( deferredOperation ),
static_cast<VkPipelineCache>( pipelineCache ),
createInfos.size(),
reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
+ uniquePipelines.reserve( createInfos.size() );
+ ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+ for ( auto const & pipeline : pipelines )
{
- uniquePipelines.reserve( createInfos.size() );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- for ( size_t i = 0; i < createInfos.size(); i++ )
- {
- uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
- }
+ uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
}
- return createResultValue( result,
- std::move( uniquePipelines ),
- VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+ return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
}
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>>
- Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>
+ Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Pipeline pipeline;
- Result result = static_cast<Result>(
- d.vkCreateRayTracingPipelinesKHR( m_device,
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- static_cast<VkPipelineCache>( pipelineCache ),
- 1,
- reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkPipeline *>( &pipeline ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<Pipeline, Dispatch>( result,
- pipeline,
- VULKAN_HPP_NAMESPACE_STRING
- "::Device::createRayTracingPipelineKHRUnique",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess,
- VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
- VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT },
- deleter );
+
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+ VkResult result = d.vkCreateRayTracingPipelinesKHR(
+ m_device,
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ static_cast<VkPipelineCache>( pipelineCache ),
+ 1,
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkPipeline *>( &pipeline ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHRUnique",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+
+ return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void * pData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ size_t dataSize,
+ void * pData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR(
- m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
+ return static_cast<Result>(
+ d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch>
- VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesKHR(
- VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- ArrayProxy<T> const & data,
- Dispatch const & d ) const
+ template <typename DataType, typename DataTypeAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::getRayTracingShaderGroupHandlesKHR(
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result =
- static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device,
- static_cast<VkPipeline>( pipeline ),
- firstGroup,
- groupCount,
- data.size() * sizeof( T ),
- reinterpret_cast<void *>( data.data() ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" );
- }
- template <typename DataType, typename Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DataType, Allocator>>::type
- Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
- std::vector<DataType, Allocator> data( dataSize / sizeof( DataType ) );
- Result result =
- static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device,
- static_cast<VkPipeline>( pipeline ),
- firstGroup,
- groupCount,
- data.size() * sizeof( DataType ),
- reinterpret_cast<void *>( data.data() ) ) );
- return createResultValue(
- result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" );
+ std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
+ VkResult result = d.vkGetRayTracingShaderGroupHandlesKHR(
+ m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
template <typename DataType, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
- Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
+ Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
DataType data;
- Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device,
- static_cast<VkPipeline>( pipeline ),
- firstGroup,
- groupCount,
- sizeof( DataType ),
- reinterpret_cast<void *>( &data ) ) );
- return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" );
+ VkResult result = d.vkGetRayTracingShaderGroupHandlesKHR(
+ m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void * pData,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ size_t dataSize,
+ void * pData,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
- m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
+ return static_cast<Result>(
+ d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch>
- VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<void>::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR(
- VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- ArrayProxy<T> const & data,
- Dispatch const & d ) const
+ template <typename DataType, typename DataTypeAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type
+ Device::getRayTracingCaptureReplayShaderGroupHandlesKHR(
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>(
- d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device,
- static_cast<VkPipeline>( pipeline ),
- firstGroup,
- groupCount,
- data.size() * sizeof( T ),
- reinterpret_cast<void *>( data.data() ) ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
- }
- template <typename DataType, typename Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<std::vector<DataType, Allocator>>::type
- Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- Dispatch const & d ) const
- {
- VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
- std::vector<DataType, Allocator> data( dataSize / sizeof( DataType ) );
- Result result = static_cast<Result>(
- d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device,
- static_cast<VkPipeline>( pipeline ),
- firstGroup,
- groupCount,
- data.size() * sizeof( DataType ),
- reinterpret_cast<void *>( data.data() ) ) );
- return createResultValue(
- result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
+ std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
+ VkResult result = d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
+ m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
template <typename DataType, typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
- Device::getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::getRayTracingCaptureReplayShaderGroupHandleKHR(
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
DataType data;
- Result result =
- static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device,
- static_cast<VkPipeline>( pipeline ),
- firstGroup,
- groupCount,
- sizeof( DataType ),
- reinterpret_cast<void *>( &data ) ) );
- return createResultValue(
- result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" );
+ VkResult result = d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
+ m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR(
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
- VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
+ VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdTraceRaysIndirectKHR(
- m_commandBuffer,
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ),
- static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
+ d.vkCmdTraceRaysIndirectKHR( m_commandBuffer,
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ),
+ static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR(
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
- VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
+ VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdTraceRaysIndirectKHR(
- m_commandBuffer,
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
- static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
+
+ d.vkCmdTraceRaysIndirectKHR( m_commandBuffer,
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
+ static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE DeviceSize
- Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t group,
- VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE DeviceSize Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t group,
+ VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<DeviceSize>( d.vkGetRayTracingShaderGroupStackSizeKHR(
- m_device, static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) );
+ return static_cast<DeviceSize>(
+ d.vkGetRayTracingShaderGroupStackSizeKHR( m_device, static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize );
@@ -20111,36 +19025,34 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_vertex_input_dynamic_state ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT(
- uint32_t vertexBindingDescriptionCount,
- const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions,
- uint32_t vertexAttributeDescriptionCount,
- const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( uint32_t vertexBindingDescriptionCount,
+ const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions,
+ uint32_t vertexAttributeDescriptionCount,
+ const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetVertexInputEXT(
- m_commandBuffer,
- vertexBindingDescriptionCount,
- reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( pVertexBindingDescriptions ),
- vertexAttributeDescriptionCount,
- reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( pVertexAttributeDescriptions ) );
+ d.vkCmdSetVertexInputEXT( m_commandBuffer,
+ vertexBindingDescriptionCount,
+ reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( pVertexBindingDescriptions ),
+ vertexAttributeDescriptionCount,
+ reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( pVertexAttributeDescriptions ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetVertexInputEXT(
- m_commandBuffer,
- vertexBindingDescriptions.size(),
- reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ),
- vertexAttributeDescriptions.size(),
- reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) );
+
+ d.vkCmdSetVertexInputEXT( m_commandBuffer,
+ vertexBindingDescriptions.size(),
+ reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ),
+ vertexAttributeDescriptions.size(),
+ reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -20148,63 +19060,64 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_FUCHSIA_external_memory ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
- zx_handle_t * pZirconHandle,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
+ zx_handle_t * pZirconHandle,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetMemoryZirconHandleFUCHSIA(
- m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
+ return static_cast<Result>(
+ d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type
- Device::getMemoryZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type
+ Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
zx_handle_t zirconHandle;
- Result result = static_cast<Result>( d.vkGetMemoryZirconHandleFUCHSIA(
- m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle ) );
- return createResultValue(
- result, zirconHandle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" );
+ VkResult result =
+ d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), zirconHandle );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryZirconHandlePropertiesFUCHSIA(
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- zx_handle_t zirconHandle,
- VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ zx_handle_t zirconHandle,
+ VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetMemoryZirconHandlePropertiesFUCHSIA(
- m_device,
- static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
- zirconHandle,
- reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( pMemoryZirconHandleProperties ) ) );
+ return static_cast<Result>(
+ d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device,
+ static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
+ zirconHandle,
+ reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( pMemoryZirconHandleProperties ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::type
- Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::type
+ Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
zx_handle_t zirconHandle,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties;
- Result result = static_cast<Result>( d.vkGetMemoryZirconHandlePropertiesFUCHSIA(
- m_device,
- static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
- zirconHandle,
- reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) ) );
- return createResultValue( result,
- memoryZirconHandleProperties,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" );
+ VkResult result = d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device,
+ static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
+ zirconHandle,
+ reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryZirconHandleProperties );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_FUCHSIA*/
@@ -20214,57 +19127,53 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA(
- m_device,
- reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) );
+ m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::importSemaphoreZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo,
- Dispatch const & d ) const
+ Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA(
- m_device,
- reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) ) );
- return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" );
+
+ VkResult result = d.vkImportSemaphoreZirconHandleFUCHSIA(
+ m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
- zx_handle_t * pZirconHandle,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
+ zx_handle_t * pZirconHandle,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>( d.vkGetSemaphoreZirconHandleFUCHSIA(
- m_device,
- reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ),
- pZirconHandle ) );
+ return static_cast<Result>(
+ d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type
- Device::getSemaphoreZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type
+ Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
zx_handle_t zirconHandle;
- Result result = static_cast<Result>( d.vkGetSemaphoreZirconHandleFUCHSIA(
- m_device,
- reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ),
- &zirconHandle ) );
- return createResultValue(
- result, zirconHandle, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" );
+ VkResult result =
+ d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), zirconHandle );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_FUCHSIA*/
@@ -20277,173 +19186,162 @@ namespace VULKAN_HPP_NAMESPACE
Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateBufferCollectionFUCHSIA( m_device,
- reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkBufferCollectionFUCHSIA *>( pCollection ) ) );
+ return static_cast<Result>( d.vkCreateBufferCollectionFUCHSIA( m_device,
+ reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkBufferCollectionFUCHSIA *>( pCollection ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA>::type
- Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA>::type
+ Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection;
- Result result = static_cast<Result>( d.vkCreateBufferCollectionFUCHSIA(
+ VkResult result = d.vkCreateBufferCollectionFUCHSIA(
m_device,
reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) ) );
- return createResultValue(
- result, collection, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), collection );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>>::type
- Device::createBufferCollectionFUCHSIAUnique(
- const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>>::type
+ Device::createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection;
- Result result = static_cast<Result>( d.vkCreateBufferCollectionFUCHSIA(
+ VkResult result = d.vkCreateBufferCollectionFUCHSIA(
m_device,
reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) ) );
- ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>(
- result, collection, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique", deleter );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>( collection, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setBufferCollectionImageConstraintsFUCHSIA(
- VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
- const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
+ const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkSetBufferCollectionImageConstraintsFUCHSIA(
- m_device,
- static_cast<VkBufferCollectionFUCHSIA>( collection ),
- reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( pImageConstraintsInfo ) ) );
+ m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( pImageConstraintsInfo ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::setBufferCollectionImageConstraintsFUCHSIA(
- VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
- const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo,
- Dispatch const & d ) const
+ Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
+ const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkSetBufferCollectionImageConstraintsFUCHSIA(
- m_device,
- static_cast<VkBufferCollectionFUCHSIA>( collection ),
- reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( &imageConstraintsInfo ) ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" );
+
+ VkResult result = d.vkSetBufferCollectionImageConstraintsFUCHSIA(
+ m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( &imageConstraintsInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setBufferCollectionBufferConstraintsFUCHSIA(
- VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
- const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
+ const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkSetBufferCollectionBufferConstraintsFUCHSIA(
- m_device,
- static_cast<VkBufferCollectionFUCHSIA>( collection ),
- reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( pBufferConstraintsInfo ) ) );
+ m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( pBufferConstraintsInfo ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
- Device::setBufferCollectionBufferConstraintsFUCHSIA(
- VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
- const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo,
- Dispatch const & d ) const
+ Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
+ const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- Result result = static_cast<Result>( d.vkSetBufferCollectionBufferConstraintsFUCHSIA(
- m_device,
- static_cast<VkBufferCollectionFUCHSIA>( collection ),
- reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( &bufferConstraintsInfo ) ) );
- return createResultValue( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" );
+
+ VkResult result = d.vkSetBufferCollectionBufferConstraintsFUCHSIA(
+ m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( &bufferConstraintsInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyBufferCollectionFUCHSIA( m_device,
- static_cast<VkBufferCollectionFUCHSIA>( collection ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyBufferCollectionFUCHSIA(
+ m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyBufferCollectionFUCHSIA(
m_device,
static_cast<VkBufferCollectionFUCHSIA>( collection ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkDestroyBufferCollectionFUCHSIA( m_device,
- static_cast<VkBufferCollectionFUCHSIA>( collection ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ d.vkDestroyBufferCollectionFUCHSIA(
+ m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkDestroyBufferCollectionFUCHSIA(
m_device,
static_cast<VkBufferCollectionFUCHSIA>( collection ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -20451,30 +19349,26 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetBufferCollectionPropertiesFUCHSIA(
- m_device,
- static_cast<VkBufferCollectionFUCHSIA>( collection ),
- reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( pProperties ) ) );
+ m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( pProperties ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::type
- Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::type
+ Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties;
- Result result = static_cast<Result>( d.vkGetBufferCollectionPropertiesFUCHSIA(
- m_device,
- static_cast<VkBufferCollectionFUCHSIA>( collection ),
- reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( &properties ) ) );
- return createResultValue(
- result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" );
+ VkResult result = d.vkGetBufferCollectionPropertiesFUCHSIA(
+ m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( &properties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_FUCHSIA*/
@@ -20482,10 +19376,9 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_HUAWEI_subpass_shading ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass,
- VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass,
+ VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
@@ -20495,17 +19388,18 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Extent2D>
- Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass,
- Dispatch const & d ) const
+ Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize;
- Result result = static_cast<Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
- m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) ) );
- return createResultValue( result,
- maxWorkgroupSize,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI",
- { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } );
+ VkResult result = d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
+ m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } );
+
+ return ResultValue<VULKAN_HPP_NAMESPACE::Extent2D>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), maxWorkgroupSize );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -20521,83 +19415,103 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView,
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdBindInvocationMaskHUAWEI(
- m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
+ d.vkCmdBindInvocationMaskHUAWEI( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
}
//=== VK_NV_external_memory_rdma ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryRemoteAddressNV(
- const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo,
- VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo,
+ VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetMemoryRemoteAddressNV(
- m_device,
- reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( pMemoryGetRemoteAddressInfo ),
- reinterpret_cast<VkRemoteAddressNV *>( pAddress ) ) );
+ m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( pMemoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( pAddress ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RemoteAddressNV>::type
- Device::getMemoryRemoteAddressNV(
- const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RemoteAddressNV>::type
+ Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::RemoteAddressNV address;
- Result result = static_cast<Result>( d.vkGetMemoryRemoteAddressNV(
- m_device,
- reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( &memoryGetRemoteAddressInfo ),
- reinterpret_cast<VkRemoteAddressNV *>( &address ) ) );
- return createResultValue( result, address, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" );
+ VkResult result = d.vkGetMemoryRemoteAddressNV(
+ m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( &memoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( &address ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), address );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_pipeline_properties ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo,
+ VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkGetPipelinePropertiesEXT(
+ m_device, reinterpret_cast<const VkPipelineInfoEXT *>( pPipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( pPipelineProperties ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BaseOutStructure>::type
+ Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties;
+ VkResult result = d.vkGetPipelinePropertiesEXT(
+ m_device, reinterpret_cast<const VkPipelineInfoEXT *>( &pipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( &pipelineProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineProperties );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_extended_dynamic_state2 ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast<VkLogicOp>( logicOp ) );
}
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
@@ -20607,78 +19521,81 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_QNX_screen_surface ===
template <typename Dispatch>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
- Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Result>(
- d.vkCreateScreenSurfaceQNX( m_instance,
- reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( pCreateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
- reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
+ return static_cast<Result>( d.vkCreateScreenSurfaceQNX( m_instance,
+ reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateScreenSurfaceQNX( m_instance,
- reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" );
+ VkResult result = d.vkCreateScreenSurfaceQNX(
+ m_instance,
+ reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- Instance::createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ Instance::createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
- Result result = static_cast<Result>(
- d.vkCreateScreenSurfaceQNX( m_instance,
- reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
- ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
- return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
- result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique", deleter );
+ VkResult result = d.vkCreateScreenSurfaceQNX(
+ m_instance,
+ reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkSurfaceKHR *>( &surface ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
}
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX(
- uint32_t queueFamilyIndex, struct _screen_window * window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex,
+ struct _screen_window * window,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return static_cast<Bool32>(
- d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) );
+ return static_cast<Bool32>( d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) );
}
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX(
- uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
+ PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- return d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window );
+
+ VkBool32 result = d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
@@ -20688,25 +19605,33 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount,
const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetColorWriteEnableEXT(
- m_commandBuffer, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) );
+ d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::setColorWriteEnableEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdSetColorWriteEnableEXT(
- m_commandBuffer, colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) );
+
+ d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_ray_tracing_maintenance1 ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdTraceRaysIndirect2KHR( m_commandBuffer, static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
+ }
+
//=== VK_EXT_multi_draw ===
template <typename Dispatch>
@@ -20715,83 +19640,523 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t instanceCount,
uint32_t firstInstance,
uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawMultiEXT( m_commandBuffer,
- drawCount,
- reinterpret_cast<const VkMultiDrawInfoEXT *>( pVertexInfo ),
- instanceCount,
- firstInstance,
- stride );
+ d.vkCmdDrawMultiEXT( m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawInfoEXT *>( pVertexInfo ), instanceCount, firstInstance, stride );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::drawMultiEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo,
- uint32_t instanceCount,
- uint32_t firstInstance,
- uint32_t stride,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo,
+ uint32_t instanceCount,
+ uint32_t firstInstance,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdDrawMultiEXT( m_commandBuffer,
vertexInfo.size(),
reinterpret_cast<const VkMultiDrawInfoEXT *>( vertexInfo.data() ),
instanceCount,
firstInstance,
- stride );
+ vertexInfo.stride() );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- CommandBuffer::drawMultiIndexedEXT( uint32_t drawCount,
- const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo,
- uint32_t instanceCount,
- uint32_t firstInstance,
- uint32_t stride,
- const int32_t * pVertexOffset,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( uint32_t drawCount,
+ const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo,
+ uint32_t instanceCount,
+ uint32_t firstInstance,
+ uint32_t stride,
+ const int32_t * pVertexOffset,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkCmdDrawMultiIndexedEXT( m_commandBuffer,
- drawCount,
- reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( pIndexInfo ),
- instanceCount,
- firstInstance,
- stride,
- pVertexOffset );
+ d.vkCmdDrawMultiIndexedEXT(
+ m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( pIndexInfo ), instanceCount, firstInstance, stride, pVertexOffset );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo,
- uint32_t instanceCount,
- uint32_t firstInstance,
- uint32_t stride,
- Optional<const int32_t> vertexOffset,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo,
+ uint32_t instanceCount,
+ uint32_t firstInstance,
+ Optional<const int32_t> vertexOffset,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
d.vkCmdDrawMultiIndexedEXT( m_commandBuffer,
indexInfo.size(),
reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( indexInfo.data() ),
instanceCount,
firstInstance,
- stride,
+ indexInfo.stride(),
static_cast<const int32_t *>( vertexOffset ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_EXT_opacity_micromap ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkCreateMicromapEXT( m_device,
+ reinterpret_cast<const VkMicromapCreateInfoEXT *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkMicromapEXT *>( pMicromap ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MicromapEXT>::type
+ Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::MicromapEXT micromap;
+ VkResult result =
+ d.vkCreateMicromapEXT( m_device,
+ reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkMicromapEXT *>( &micromap ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), micromap );
+ }
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch>>::type
+ Device::createMicromapEXTUnique( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::MicromapEXT micromap;
+ VkResult result =
+ d.vkCreateMicromapEXT( m_device,
+ reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkMicromapEXT *>( &micromap ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXTUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch>( micromap, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkDestroyMicromapEXT( m_device,
+ static_cast<VkMicromapEXT>( micromap ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkDestroyMicromapEXT( m_device,
+ static_cast<VkMicromapEXT>( micromap ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( uint32_t infoCount,
+ const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdBuildMicromapsEXT( m_commandBuffer, infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdBuildMicromapsEXT( m_commandBuffer, infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ uint32_t infoCount,
+ const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkBuildMicromapsEXT(
+ m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
+ Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VkResult result = d.vkBuildMicromapsEXT(
+ m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) );
+ resultCheck(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>(
+ d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VkResult result =
+ d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
+ resultCheck(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkCopyMicromapToMemoryEXT(
+ m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapToMemoryEXT(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VkResult result = d.vkCopyMicromapToMemoryEXT(
+ m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
+ resultCheck(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkCopyMemoryToMicromapEXT(
+ m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMemoryToMicromapEXT(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VkResult result = d.vkCopyMemoryToMicromapEXT(
+ m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
+ resultCheck(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeMicromapsPropertiesEXT( uint32_t micromapCount,
+ const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ void * pData,
+ size_t stride,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkWriteMicromapsPropertiesEXT(
+ m_device, micromapCount, reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), static_cast<VkQueryType>( queryType ), dataSize, pData, stride ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename DataType, typename DataTypeAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type
+ Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ size_t stride,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
+ std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
+ VkResult result = d.vkWriteMicromapsPropertiesEXT( m_device,
+ micromaps.size(),
+ reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
+ static_cast<VkQueryType>( queryType ),
+ data.size() * sizeof( DataType ),
+ reinterpret_cast<void *>( data.data() ),
+ stride );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+ }
+
+ template <typename DataType, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
+ Device::writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t stride,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ DataType data;
+ VkResult result = d.vkWriteMicromapsPropertiesEXT( m_device,
+ micromaps.size(),
+ reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
+ static_cast<VkQueryType>( queryType ),
+ sizeof( DataType ),
+ reinterpret_cast<void *>( &data ),
+ stride );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::writeMicromapsPropertiesEXT( uint32_t micromapCount,
+ const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer,
+ micromapCount,
+ reinterpret_cast<const VkMicromapEXT *>( pMicromaps ),
+ static_cast<VkQueryType>( queryType ),
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer,
+ micromaps.size(),
+ reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
+ static_cast<VkQueryType>( queryType ),
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkGetDeviceMicromapCompatibilityEXT( m_device,
+ reinterpret_cast<const VkMicromapVersionInfoEXT *>( pVersionInfo ),
+ reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
+ Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
+ d.vkGetDeviceMicromapCompatibilityEXT( m_device,
+ reinterpret_cast<const VkMicromapVersionInfoEXT *>( &versionInfo ),
+ reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
+
+ return compatibility;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo,
+ VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkGetMicromapBuildSizesEXT( m_device,
+ static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
+ reinterpret_cast<const VkMicromapBuildInfoEXT *>( pBuildInfo ),
+ reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( pSizeInfo ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT
+ Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT sizeInfo;
+ d.vkGetMicromapBuildSizesEXT( m_device,
+ static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
+ reinterpret_cast<const VkMicromapBuildInfoEXT *>( &buildInfo ),
+ reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( &sizeInfo ) );
+
+ return sizeInfo;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
//=== VK_EXT_pageable_device_local_memory ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- float priority,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, float priority, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkSetDeviceMemoryPriorityEXT( m_device, static_cast<VkDeviceMemory>( memory ), priority );
@@ -20800,158 +20165,958 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_maintenance4 ===
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceBufferMemoryRequirementsKHR( m_device,
- reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ),
- reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ d.vkGetDeviceBufferMemoryRequirementsKHR(
+ m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- d.vkGetDeviceBufferMemoryRequirementsKHR( m_device,
- reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ d.vkGetDeviceBufferMemoryRequirementsKHR(
+ m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return memoryRequirements;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- d.vkGetDeviceBufferMemoryRequirementsKHR( m_device,
- reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ d.vkGetDeviceBufferMemoryRequirementsKHR(
+ m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void
- Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceImageMemoryRequirementsKHR( m_device,
- reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ),
- reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+ d.vkGetDeviceImageMemoryRequirementsKHR(
+ m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- d.vkGetDeviceImageMemoryRequirementsKHR( m_device,
- reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ d.vkGetDeviceImageMemoryRequirementsKHR(
+ m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return memoryRequirements;
}
template <typename X, typename Y, typename... Z, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- d.vkGetDeviceImageMemoryRequirementsKHR( m_device,
- reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ d.vkGetDeviceImageMemoryRequirementsKHR(
+ m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch>
- VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirementsKHR(
- const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
- uint32_t * pSparseMemoryRequirementCount,
- VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
- Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
+ uint32_t * pSparseMemoryRequirementCount,
+ VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- d.vkGetDeviceImageSparseMemoryRequirementsKHR(
- m_device,
- reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ),
- pSparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
+ d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device,
+ reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ),
+ pSparseMemoryRequirementCount,
+ reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
- Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
- Dispatch const & d ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
+ Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
- uint32_t sparseMemoryRequirementCount;
+
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
+ uint32_t sparseMemoryRequirementCount;
+ d.vkGetDeviceImageSparseMemoryRequirementsKHR(
+ m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device,
reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
&sparseMemoryRequirementCount,
- nullptr );
- sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
- d.vkGetDeviceImageSparseMemoryRequirementsKHR(
- m_device,
- reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
- &sparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+ reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+
VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+ if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+ {
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ }
return sparseMemoryRequirements;
}
- template <
- typename SparseImageMemoryRequirements2Allocator,
- typename Dispatch,
- typename B,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type>
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
- Device::getImageSparseMemoryRequirementsKHR(
- const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
- SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
- Dispatch const & d ) const
+ template <typename SparseImageMemoryRequirements2Allocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
+ Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
+ SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
+ Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
- std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
+
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
sparseImageMemoryRequirements2Allocator );
uint32_t sparseMemoryRequirementCount;
+ d.vkGetDeviceImageSparseMemoryRequirementsKHR(
+ m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device,
reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
&sparseMemoryRequirementCount,
- nullptr );
- sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
- d.vkGetDeviceImageSparseMemoryRequirementsKHR(
- m_device,
- reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
- &sparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+ reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+
VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+ if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+ {
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ }
return sparseMemoryRequirements;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_VALVE_descriptor_set_host_mapping ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference,
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device,
+ reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( pBindingReference ),
+ reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( pHostMapping ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE
+ Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping;
+ d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device,
+ reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( &bindingReference ),
+ reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( &hostMapping ) );
+
+ return hostMapping;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, void ** ppData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), ppData );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ void * pData;
+ d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), &pData );
+
+ return pData;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_extended_dynamic_state3 ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetTessellationDomainOriginEXT( m_commandBuffer, static_cast<VkTessellationDomainOrigin>( domainOrigin ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetDepthClampEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClampEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetPolygonModeEXT( m_commandBuffer, static_cast<VkPolygonMode>( polygonMode ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetRasterizationSamplesEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( rasterizationSamples ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( pSampleMask ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( sampleMask.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetAlphaToCoverageEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToCoverageEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetAlphaToOneEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToOneEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetLogicOpEnableEXT( m_commandBuffer, static_cast<VkBool32>( logicOpEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorBlendEnables ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, colorBlendEnables.size(), reinterpret_cast<const VkBool32 *>( colorBlendEnables.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetColorBlendEquationEXT(
+ m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendEquationEXT *>( pColorBlendEquations ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdSetColorBlendEquationEXT(
+ m_commandBuffer, firstAttachment, colorBlendEquations.size(), reinterpret_cast<const VkColorBlendEquationEXT *>( colorBlendEquations.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetColorWriteMaskEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorComponentFlags *>( pColorWriteMasks ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdSetColorWriteMaskEXT(
+ m_commandBuffer, firstAttachment, colorWriteMasks.size(), reinterpret_cast<const VkColorComponentFlags *>( colorWriteMasks.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetRasterizationStreamEXT( m_commandBuffer, rasterizationStream );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetConservativeRasterizationModeEXT( m_commandBuffer, static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( m_commandBuffer, extraPrimitiveOverestimationSize );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetDepthClipEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClipEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetSampleLocationsEnableEXT( m_commandBuffer, static_cast<VkBool32>( sampleLocationsEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetColorBlendAdvancedEXT(
+ m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendAdvancedEXT *>( pColorBlendAdvanced ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdSetColorBlendAdvancedEXT(
+ m_commandBuffer, firstAttachment, colorBlendAdvanced.size(), reinterpret_cast<const VkColorBlendAdvancedEXT *>( colorBlendAdvanced.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetProvokingVertexModeEXT( m_commandBuffer, static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetLineRasterizationModeEXT( m_commandBuffer, static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetLineStippleEnableEXT( m_commandBuffer, static_cast<VkBool32>( stippledLineEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetDepthClipNegativeOneToOneEXT( m_commandBuffer, static_cast<VkBool32>( negativeOneToOne ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetViewportWScalingEnableNV( m_commandBuffer, static_cast<VkBool32>( viewportWScalingEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport,
+ uint32_t viewportCount,
+ const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetViewportSwizzleNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportSwizzleNV *>( pViewportSwizzles ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdSetViewportSwizzleNV(
+ m_commandBuffer, firstViewport, viewportSwizzles.size(), reinterpret_cast<const VkViewportSwizzleNV *>( viewportSwizzles.data() ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetCoverageToColorEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageToColorEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetCoverageToColorLocationNV( m_commandBuffer, coverageToColorLocation );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetCoverageModulationModeNV( m_commandBuffer, static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetCoverageModulationTableEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageModulationTableEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( uint32_t coverageModulationTableCount,
+ const float * pCoverageModulationTable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTableCount, pCoverageModulationTable );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTable.size(), coverageModulationTable.data() );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetShadingRateImageEnableNV( m_commandBuffer, static_cast<VkBool32>( shadingRateImageEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetRepresentativeFragmentTestEnableNV( m_commandBuffer, static_cast<VkBool32>( representativeFragmentTestEnable ) );
+ }
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdSetCoverageReductionModeNV( m_commandBuffer, static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) );
+ }
+
+ //=== VK_EXT_shader_module_identifier ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
+ VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT
+ Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier;
+ d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
+
+ return identifier;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,
+ VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkGetShaderModuleCreateInfoIdentifierEXT(
+ m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT
+ Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier;
+ d.vkGetShaderModuleCreateInfoIdentifierEXT(
+ m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
+
+ return identifier;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_NV_optical_flow ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
+ PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo,
+ uint32_t * pFormatCount,
+ VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>(
+ d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice,
+ reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( pOpticalFlowImageFormatInfo ),
+ pFormatCount,
+ reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( pImageFormatProperties ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename OpticalFlowImageFormatPropertiesNVAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type
+ PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties;
+ uint32_t formatCount;
+ VkResult result;
+ do
+ {
+ result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
+ m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && formatCount )
+ {
+ imageFormatProperties.resize( formatCount );
+ result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice,
+ reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ),
+ &formatCount,
+ reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) );
+ }
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" );
+ VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() );
+ if ( formatCount < imageFormatProperties.size() )
+ {
+ imageFormatProperties.resize( formatCount );
+ }
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
+ }
+
+ template <typename OpticalFlowImageFormatPropertiesNVAllocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, OpticalFlowImageFormatPropertiesNV>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type
+ PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo,
+ OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties(
+ opticalFlowImageFormatPropertiesNVAllocator );
+ uint32_t formatCount;
+ VkResult result;
+ do
+ {
+ result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
+ m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && formatCount )
+ {
+ imageFormatProperties.resize( formatCount );
+ result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice,
+ reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ),
+ &formatCount,
+ reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) );
+ }
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" );
+ VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() );
+ if ( formatCount < imageFormatProperties.size() )
+ {
+ imageFormatProperties.resize( formatCount );
+ }
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkCreateOpticalFlowSessionNV( m_device,
+ reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( pCreateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
+ reinterpret_cast<VkOpticalFlowSessionNV *>( pSession ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>::type
+ Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session;
+ VkResult result = d.vkCreateOpticalFlowSessionNV(
+ m_device,
+ reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNV" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), session );
+ }
+
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch>>::type
+ Device::createOpticalFlowSessionNVUnique( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session;
+ VkResult result = d.vkCreateOpticalFlowSessionNV(
+ m_device,
+ reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNVUnique" );
+
+ return createResultValueType(
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ UniqueHandle<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch>( session, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+ }
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkDestroyOpticalFlowSessionNV(
+ m_device,
+ static_cast<VkOpticalFlowSessionNV>( session ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkDestroyOpticalFlowSessionNV(
+ m_device,
+ static_cast<VkOpticalFlowSessionNV>( session ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,
+ VULKAN_HPP_NAMESPACE::ImageView view,
+ VULKAN_HPP_NAMESPACE::ImageLayout layout,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkBindOpticalFlowSessionImageNV( m_device,
+ static_cast<VkOpticalFlowSessionNV>( session ),
+ static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ),
+ static_cast<VkImageView>( view ),
+ static_cast<VkImageLayout>( layout ) ) );
+ }
+#else
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
+ Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,
+ VULKAN_HPP_NAMESPACE::ImageView view,
+ VULKAN_HPP_NAMESPACE::ImageLayout layout,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VkResult result = d.vkBindOpticalFlowSessionImageNV( m_device,
+ static_cast<VkOpticalFlowSessionNV>( session ),
+ static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ),
+ static_cast<VkImageView>( view ),
+ static_cast<VkImageLayout>( layout ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindOpticalFlowSessionImageNV" );
+
+ return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ d.vkCmdOpticalFlowExecuteNV(
+ m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( pExecuteInfo ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ d.vkCmdOpticalFlowExecuteNV(
+ m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_QCOM_tile_properties ===
+
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
+ uint32_t * pPropertiesCount,
+ VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkGetFramebufferTilePropertiesQCOM(
+ m_device, static_cast<VkFramebuffer>( framebuffer ), pPropertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename TilePropertiesQCOMAllocator, typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type
+ Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties;
+ uint32_t propertiesCount;
+ VkResult result;
+ do
+ {
+ result = d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertiesCount )
+ {
+ properties.resize( propertiesCount );
+ result = d.vkGetFramebufferTilePropertiesQCOM(
+ m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) );
+ }
+ } while ( result == VK_INCOMPLETE );
+
+ VULKAN_HPP_ASSERT( propertiesCount <= properties.size() );
+ if ( propertiesCount < properties.size() )
+ {
+ properties.resize( propertiesCount );
+ }
+ return properties;
+ }
+
+ template <typename TilePropertiesQCOMAllocator,
+ typename Dispatch,
+ typename B1,
+ typename std::enable_if<std::is_same<typename B1::value_type, TilePropertiesQCOM>::value, int>::type>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type
+ Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
+ TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator,
+ Dispatch const & d ) const
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties( tilePropertiesQCOMAllocator );
+ uint32_t propertiesCount;
+ VkResult result;
+ do
+ {
+ result = d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertiesCount )
+ {
+ properties.resize( propertiesCount );
+ result = d.vkGetFramebufferTilePropertiesQCOM(
+ m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) );
+ }
+ } while ( result == VK_INCOMPLETE );
+
+ VULKAN_HPP_ASSERT( propertiesCount <= properties.size() );
+ if ( propertiesCount < properties.size() )
+ {
+ properties.resize( propertiesCount );
+ }
+ return properties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,
+ VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties,
+ Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+ return static_cast<Result>( d.vkGetDynamicRenderingTilePropertiesQCOM(
+ m_device, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) );
+ }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM
+ Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+ VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties;
+ d.vkGetDynamicRenderingTilePropertiesQCOM(
+ m_device, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( &properties ) );
+
+ return properties;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
} // namespace VULKAN_HPP_NAMESPACE
#endif
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_handles.hpp b/thirdparty/vulkan/include/vulkan/vulkan_handles.hpp
index 1902318730..b81194b9e6 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_handles.hpp
+++ b/thirdparty/vulkan/include/vulkan/vulkan_handles.hpp
@@ -533,17 +533,17 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_queue ===
- struct QueueFamilyQueryResultStatusProperties2KHR;
- struct VideoQueueFamilyProperties2KHR;
- struct VideoProfileKHR;
- struct VideoProfilesKHR;
+ struct QueueFamilyQueryResultStatusPropertiesKHR;
+ struct QueueFamilyVideoPropertiesKHR;
+ struct VideoProfileInfoKHR;
+ struct VideoProfileListInfoKHR;
struct VideoCapabilitiesKHR;
struct PhysicalDeviceVideoFormatInfoKHR;
struct VideoFormatPropertiesKHR;
- struct VideoPictureResourceKHR;
- struct VideoReferenceSlotKHR;
- struct VideoGetMemoryPropertiesKHR;
- struct VideoBindMemoryKHR;
+ struct VideoPictureResourceInfoKHR;
+ struct VideoReferenceSlotInfoKHR;
+ struct VideoSessionMemoryRequirementsKHR;
+ struct BindVideoSessionMemoryInfoKHR;
struct VideoSessionCreateInfoKHR;
struct VideoSessionParametersCreateInfoKHR;
struct VideoSessionParametersUpdateInfoKHR;
@@ -554,6 +554,8 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_decode_queue ===
+ struct VideoDecodeCapabilitiesKHR;
+ struct VideoDecodeUsageInfoKHR;
struct VideoDecodeInfoKHR;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -579,14 +581,14 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_EXT_video_encode_h264 ===
struct VideoEncodeH264CapabilitiesEXT;
- struct VideoEncodeH264SessionCreateInfoEXT;
struct VideoEncodeH264SessionParametersCreateInfoEXT;
struct VideoEncodeH264SessionParametersAddInfoEXT;
struct VideoEncodeH264VclFrameInfoEXT;
- struct VideoEncodeH264EmitPictureParametersEXT;
+ struct VideoEncodeH264ReferenceListsInfoEXT;
+ struct VideoEncodeH264EmitPictureParametersInfoEXT;
struct VideoEncodeH264DpbSlotInfoEXT;
- struct VideoEncodeH264NaluSliceEXT;
- struct VideoEncodeH264ProfileEXT;
+ struct VideoEncodeH264NaluSliceInfoEXT;
+ struct VideoEncodeH264ProfileInfoEXT;
struct VideoEncodeH264RateControlInfoEXT;
struct VideoEncodeH264RateControlLayerInfoEXT;
struct VideoEncodeH264QpEXT;
@@ -596,15 +598,14 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_EXT_video_encode_h265 ===
struct VideoEncodeH265CapabilitiesEXT;
- struct VideoEncodeH265SessionCreateInfoEXT;
struct VideoEncodeH265SessionParametersCreateInfoEXT;
struct VideoEncodeH265SessionParametersAddInfoEXT;
struct VideoEncodeH265VclFrameInfoEXT;
- struct VideoEncodeH265EmitPictureParametersEXT;
+ struct VideoEncodeH265EmitPictureParametersInfoEXT;
struct VideoEncodeH265DpbSlotInfoEXT;
- struct VideoEncodeH265NaluSliceSegmentEXT;
- struct VideoEncodeH265ProfileEXT;
- struct VideoEncodeH265ReferenceListsEXT;
+ struct VideoEncodeH265NaluSliceSegmentInfoEXT;
+ struct VideoEncodeH265ProfileInfoEXT;
+ struct VideoEncodeH265ReferenceListsInfoEXT;
struct VideoEncodeH265RateControlInfoEXT;
struct VideoEncodeH265RateControlLayerInfoEXT;
struct VideoEncodeH265QpEXT;
@@ -613,13 +614,11 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_EXT_video_decode_h264 ===
- struct VideoDecodeH264ProfileEXT;
+ struct VideoDecodeH264ProfileInfoEXT;
struct VideoDecodeH264CapabilitiesEXT;
- struct VideoDecodeH264SessionCreateInfoEXT;
struct VideoDecodeH264SessionParametersCreateInfoEXT;
struct VideoDecodeH264SessionParametersAddInfoEXT;
struct VideoDecodeH264PictureInfoEXT;
- struct VideoDecodeH264MvcEXT;
struct VideoDecodeH264DpbSlotInfoEXT;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -675,6 +674,11 @@ namespace VULKAN_HPP_NAMESPACE
struct ImageViewASTCDecodeModeEXT;
struct PhysicalDeviceASTCDecodeFeaturesEXT;
+ //=== VK_EXT_pipeline_robustness ===
+ struct PhysicalDevicePipelineRobustnessFeaturesEXT;
+ struct PhysicalDevicePipelineRobustnessPropertiesEXT;
+ struct PipelineRobustnessCreateInfoEXT;
+
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_memory_win32 ===
struct ImportMemoryWin32HandleInfoKHR;
@@ -946,9 +950,8 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_EXT_video_decode_h265 ===
- struct VideoDecodeH265ProfileEXT;
+ struct VideoDecodeH265ProfileInfoEXT;
struct VideoDecodeH265CapabilitiesEXT;
- struct VideoDecodeH265SessionCreateInfoEXT;
struct VideoDecodeH265SessionParametersCreateInfoEXT;
struct VideoDecodeH265SessionParametersAddInfoEXT;
struct VideoDecodeH265PictureInfoEXT;
@@ -985,9 +988,6 @@ namespace VULKAN_HPP_NAMESPACE
struct PhysicalDeviceMeshShaderPropertiesNV;
struct DrawMeshTasksIndirectCommandNV;
- //=== VK_NV_fragment_shader_barycentric ===
- struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV;
-
//=== VK_NV_shader_image_footprint ===
struct PhysicalDeviceShaderImageFootprintFeaturesNV;
@@ -1123,6 +1123,7 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_pipeline_executable_properties ===
struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
struct PipelineInfoKHR;
+ using PipelineInfoEXT = PipelineInfoKHR;
struct PipelineExecutablePropertiesKHR;
struct PipelineExecutableInfoKHR;
union PipelineExecutableStatisticValueKHR;
@@ -1175,6 +1176,11 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_pipeline_library ===
struct PipelineLibraryCreateInfoKHR;
+ //=== VK_NV_present_barrier ===
+ struct PhysicalDevicePresentBarrierFeaturesNV;
+ struct SurfaceCapabilitiesPresentBarrierNV;
+ struct SwapchainPresentBarrierCreateInfoNV;
+
//=== VK_KHR_present_id ===
struct PresentIdKHR;
struct PhysicalDevicePresentIdFeaturesKHR;
@@ -1182,6 +1188,8 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_encode_queue ===
struct VideoEncodeInfoKHR;
+ struct VideoEncodeCapabilitiesKHR;
+ struct VideoEncodeUsageInfoKHR;
struct VideoEncodeRateControlInfoKHR;
struct VideoEncodeRateControlLayerInfoKHR;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -1190,10 +1198,39 @@ namespace VULKAN_HPP_NAMESPACE
struct PhysicalDeviceDiagnosticsConfigFeaturesNV;
struct DeviceDiagnosticsConfigCreateInfoNV;
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ //=== VK_EXT_metal_objects ===
+ struct ExportMetalObjectCreateInfoEXT;
+ struct ExportMetalObjectsInfoEXT;
+ struct ExportMetalDeviceInfoEXT;
+ struct ExportMetalCommandQueueInfoEXT;
+ struct ExportMetalBufferInfoEXT;
+ struct ImportMetalBufferInfoEXT;
+ struct ExportMetalTextureInfoEXT;
+ struct ImportMetalTextureInfoEXT;
+ struct ExportMetalIOSurfaceInfoEXT;
+ struct ImportMetalIOSurfaceInfoEXT;
+ struct ExportMetalSharedEventInfoEXT;
+ struct ImportMetalSharedEventInfoEXT;
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
//=== VK_KHR_synchronization2 ===
struct QueueFamilyCheckpointProperties2NV;
struct CheckpointData2NV;
+ //=== VK_EXT_graphics_pipeline_library ===
+ struct PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
+ struct PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
+ struct GraphicsPipelineLibraryCreateInfoEXT;
+
+ //=== VK_AMD_shader_early_and_late_fragment_tests ===
+ struct PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
+
+ //=== VK_KHR_fragment_shader_barycentric ===
+ struct PhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
+ using PhysicalDeviceFragmentShaderBarycentricFeaturesNV = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
+ struct PhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
+
//=== VK_KHR_shader_subgroup_uniform_control_flow ===
struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
@@ -1212,6 +1249,11 @@ namespace VULKAN_HPP_NAMESPACE
struct SRTDataNV;
struct PhysicalDeviceRayTracingMotionBlurFeaturesNV;
+ //=== VK_EXT_mesh_shader ===
+ struct PhysicalDeviceMeshShaderFeaturesEXT;
+ struct PhysicalDeviceMeshShaderPropertiesEXT;
+ struct DrawMeshTasksIndirectCommandEXT;
+
//=== VK_EXT_ycbcr_2plane_444_formats ===
struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
@@ -1225,11 +1267,26 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_workgroup_memory_explicit_layout ===
struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
+ //=== VK_EXT_image_compression_control ===
+ struct PhysicalDeviceImageCompressionControlFeaturesEXT;
+ struct ImageCompressionControlEXT;
+ struct SubresourceLayout2EXT;
+ struct ImageSubresource2EXT;
+ struct ImageCompressionPropertiesEXT;
+
+ //=== VK_EXT_attachment_feedback_loop_layout ===
+ struct PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
+
//=== VK_EXT_4444_formats ===
struct PhysicalDevice4444FormatsFeaturesEXT;
- //=== VK_ARM_rasterization_order_attachment_access ===
- struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM;
+ //=== VK_EXT_device_fault ===
+ struct PhysicalDeviceFaultFeaturesEXT;
+ struct DeviceFaultCountsEXT;
+ struct DeviceFaultInfoEXT;
+ struct DeviceFaultAddressInfoEXT;
+ struct DeviceFaultVendorInfoEXT;
+ struct DeviceFaultVendorBinaryHeaderVersionOneEXT;
//=== VK_EXT_rgba10x6_formats ===
struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT;
@@ -1251,11 +1308,6 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_ray_query ===
struct PhysicalDeviceRayQueryFeaturesKHR;
- //=== VK_VALVE_mutable_descriptor_type ===
- struct PhysicalDeviceMutableDescriptorTypeFeaturesVALVE;
- struct MutableDescriptorTypeListVALVE;
- struct MutableDescriptorTypeCreateInfoVALVE;
-
//=== VK_EXT_vertex_input_dynamic_state ===
struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT;
struct VertexInputBindingDescription2EXT;
@@ -1264,6 +1316,10 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_physical_device_drm ===
struct PhysicalDeviceDrmPropertiesEXT;
+ //=== VK_EXT_device_address_binding_report ===
+ struct PhysicalDeviceAddressBindingReportFeaturesEXT;
+ struct DeviceAddressBindingCallbackDataEXT;
+
//=== VK_EXT_depth_clip_control ===
struct PhysicalDeviceDepthClipControlFeaturesEXT;
struct PipelineViewportDepthClipControlCreateInfoEXT;
@@ -1310,6 +1366,15 @@ namespace VULKAN_HPP_NAMESPACE
struct MemoryGetRemoteAddressInfoNV;
struct PhysicalDeviceExternalMemoryRDMAFeaturesNV;
+ //=== VK_EXT_pipeline_properties ===
+ struct PipelinePropertiesIdentifierEXT;
+ struct PhysicalDevicePipelinePropertiesFeaturesEXT;
+
+ //=== VK_EXT_multisampled_render_to_single_sampled ===
+ struct PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
+ struct SubpassResolvePerformanceQueryEXT;
+ struct MultisampledRenderToSingleSampledInfoEXT;
+
//=== VK_EXT_extended_dynamic_state2 ===
struct PhysicalDeviceExtendedDynamicState2FeaturesEXT;
@@ -1322,6 +1387,13 @@ namespace VULKAN_HPP_NAMESPACE
struct PhysicalDeviceColorWriteEnableFeaturesEXT;
struct PipelineColorWriteCreateInfoEXT;
+ //=== VK_EXT_primitives_generated_query ===
+ struct PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
+
+ //=== VK_KHR_ray_tracing_maintenance1 ===
+ struct PhysicalDeviceRayTracingMaintenance1FeaturesKHR;
+ struct TraceRaysIndirectCommand2KHR;
+
//=== VK_EXT_image_view_min_lod ===
struct PhysicalDeviceImageViewMinLodFeaturesEXT;
struct ImageViewMinLodCreateInfoEXT;
@@ -1332,6 +1404,23 @@ namespace VULKAN_HPP_NAMESPACE
struct MultiDrawInfoEXT;
struct MultiDrawIndexedInfoEXT;
+ //=== VK_EXT_image_2d_view_of_3d ===
+ struct PhysicalDeviceImage2DViewOf3DFeaturesEXT;
+
+ //=== VK_EXT_opacity_micromap ===
+ struct MicromapBuildInfoEXT;
+ struct MicromapUsageEXT;
+ struct MicromapCreateInfoEXT;
+ struct PhysicalDeviceOpacityMicromapFeaturesEXT;
+ struct PhysicalDeviceOpacityMicromapPropertiesEXT;
+ struct MicromapVersionInfoEXT;
+ struct CopyMicromapToMemoryInfoEXT;
+ struct CopyMemoryToMicromapInfoEXT;
+ struct CopyMicromapInfoEXT;
+ struct MicromapBuildSizesInfoEXT;
+ struct AccelerationStructureTrianglesOpacityMicromapEXT;
+ struct MicromapTriangleEXT;
+
//=== VK_EXT_border_color_swizzle ===
struct PhysicalDeviceBorderColorSwizzleFeaturesEXT;
struct SamplerBorderColorComponentMappingCreateInfoEXT;
@@ -1339,6 +1428,17 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_pageable_device_local_memory ===
struct PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
+ //=== VK_VALVE_descriptor_set_host_mapping ===
+ struct PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
+ struct DescriptorSetBindingReferenceVALVE;
+ struct DescriptorSetLayoutHostMappingInfoVALVE;
+
+ //=== VK_EXT_depth_clamp_zero_one ===
+ struct PhysicalDeviceDepthClampZeroOneFeaturesEXT;
+
+ //=== VK_EXT_non_seamless_cube_map ===
+ struct PhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
+
//=== VK_QCOM_fragment_density_map_offset ===
struct PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM;
struct PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM;
@@ -1347,26 +1447,97 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_linear_color_attachment ===
struct PhysicalDeviceLinearColorAttachmentFeaturesNV;
+ //=== VK_EXT_image_compression_control_swapchain ===
+ struct PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
+
+ //=== VK_QCOM_image_processing ===
+ struct ImageViewSampleWeightCreateInfoQCOM;
+ struct PhysicalDeviceImageProcessingFeaturesQCOM;
+ struct PhysicalDeviceImageProcessingPropertiesQCOM;
+
+ //=== VK_EXT_extended_dynamic_state3 ===
+ struct PhysicalDeviceExtendedDynamicState3FeaturesEXT;
+ struct PhysicalDeviceExtendedDynamicState3PropertiesEXT;
+ struct ColorBlendEquationEXT;
+ struct ColorBlendAdvancedEXT;
+
+ //=== VK_EXT_subpass_merge_feedback ===
+ struct PhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
+ struct RenderPassCreationControlEXT;
+ struct RenderPassCreationFeedbackInfoEXT;
+ struct RenderPassCreationFeedbackCreateInfoEXT;
+ struct RenderPassSubpassFeedbackInfoEXT;
+ struct RenderPassSubpassFeedbackCreateInfoEXT;
+
+ //=== VK_EXT_shader_module_identifier ===
+ struct PhysicalDeviceShaderModuleIdentifierFeaturesEXT;
+ struct PhysicalDeviceShaderModuleIdentifierPropertiesEXT;
+ struct PipelineShaderStageModuleIdentifierCreateInfoEXT;
+ struct ShaderModuleIdentifierEXT;
+
+ //=== VK_EXT_rasterization_order_attachment_access ===
+ struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
+ using PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
+
+ //=== VK_NV_optical_flow ===
+ struct PhysicalDeviceOpticalFlowFeaturesNV;
+ struct PhysicalDeviceOpticalFlowPropertiesNV;
+ struct OpticalFlowImageFormatInfoNV;
+ struct OpticalFlowImageFormatPropertiesNV;
+ struct OpticalFlowSessionCreateInfoNV;
+ struct OpticalFlowSessionCreatePrivateDataInfoNV;
+ struct OpticalFlowExecuteInfoNV;
+
+ //=== VK_EXT_legacy_dithering ===
+ struct PhysicalDeviceLegacyDitheringFeaturesEXT;
+
+ //=== VK_EXT_pipeline_protected_access ===
+ struct PhysicalDevicePipelineProtectedAccessFeaturesEXT;
+
+ //=== VK_QCOM_tile_properties ===
+ struct PhysicalDeviceTilePropertiesFeaturesQCOM;
+ struct TilePropertiesQCOM;
+
+ //=== VK_SEC_amigo_profiling ===
+ struct PhysicalDeviceAmigoProfilingFeaturesSEC;
+ struct AmigoProfilingSubmitInfoSEC;
+
+ //=== VK_EXT_mutable_descriptor_type ===
+ struct PhysicalDeviceMutableDescriptorTypeFeaturesEXT;
+ using PhysicalDeviceMutableDescriptorTypeFeaturesVALVE = PhysicalDeviceMutableDescriptorTypeFeaturesEXT;
+ struct MutableDescriptorTypeListEXT;
+ using MutableDescriptorTypeListVALVE = MutableDescriptorTypeListEXT;
+ struct MutableDescriptorTypeCreateInfoEXT;
+ using MutableDescriptorTypeCreateInfoVALVE = MutableDescriptorTypeCreateInfoEXT;
+
+ //=== VK_ARM_shader_core_builtins ===
+ struct PhysicalDeviceShaderCoreBuiltinsFeaturesARM;
+ struct PhysicalDeviceShaderCoreBuiltinsPropertiesARM;
+
//===============
//=== HANDLEs ===
//===============
+ template <typename Type>
+ struct isVulkanHandleType
+ {
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = false;
+ };
+
class SurfaceKHR
{
public:
using CType = VkSurfaceKHR;
using NativeType = VkSurfaceKHR;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR;
public:
VULKAN_HPP_CONSTEXPR SurfaceKHR() = default;
VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT : m_surfaceKHR( surfaceKHR )
- {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT : m_surfaceKHR( surfaceKHR ) {}
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
SurfaceKHR & operator=( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT
@@ -1419,17 +1590,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkSurfaceKHR m_surfaceKHR = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceKHR ) == sizeof( VkSurfaceKHR ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceKHR>::value,
- "SurfaceKHR is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eSurfaceKHR>
- {
- using type = VULKAN_HPP_NAMESPACE::SurfaceKHR;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR>
@@ -1438,8 +1598,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR>
{
using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR;
};
@@ -1456,18 +1615,17 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkDebugReportCallbackEXT;
using NativeType = VkDebugReportCallbackEXT;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT;
public:
- VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() = default;
- VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT
- DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() = default;
+ VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT
: m_debugReportCallbackEXT( debugReportCallbackEXT )
- {}
+ {
+ }
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
DebugReportCallbackEXT & operator=( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT
@@ -1520,18 +1678,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkDebugReportCallbackEXT m_debugReportCallbackEXT = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT ) ==
- sizeof( VkDebugReportCallbackEXT ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::value,
- "DebugReportCallbackEXT is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDebugReportCallbackEXT>
- {
- using type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT>
@@ -1540,8 +1686,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT>
{
using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT;
};
@@ -1558,18 +1703,17 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkDebugUtilsMessengerEXT;
using NativeType = VkDebugUtilsMessengerEXT;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
- VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() = default;
- VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT
- DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() = default;
+ VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT
: m_debugUtilsMessengerEXT( debugUtilsMessengerEXT )
- {}
+ {
+ }
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
DebugUtilsMessengerEXT & operator=( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT
@@ -1622,18 +1766,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT ) ==
- sizeof( VkDebugUtilsMessengerEXT ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::value,
- "DebugUtilsMessengerEXT is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDebugUtilsMessengerEXT>
- {
- using type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT>
@@ -1653,16 +1785,14 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkDisplayKHR;
using NativeType = VkDisplayKHR;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR;
public:
VULKAN_HPP_CONSTEXPR DisplayKHR() = default;
VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT : m_displayKHR( displayKHR )
- {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT : m_displayKHR( displayKHR ) {}
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
DisplayKHR & operator=( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT
@@ -1715,17 +1845,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkDisplayKHR m_displayKHR = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayKHR ) == sizeof( VkDisplayKHR ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayKHR>::value,
- "DisplayKHR is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDisplayKHR>
- {
- using type = VULKAN_HPP_NAMESPACE::DisplayKHR;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR>
@@ -1734,8 +1853,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR>
{
using Type = VULKAN_HPP_NAMESPACE::DisplayKHR;
};
@@ -1752,17 +1870,14 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkSwapchainKHR;
using NativeType = VkSwapchainKHR;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR;
public:
VULKAN_HPP_CONSTEXPR SwapchainKHR() = default;
VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT
- : m_swapchainKHR( swapchainKHR )
- {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT : m_swapchainKHR( swapchainKHR ) {}
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
SwapchainKHR & operator=( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT
@@ -1815,17 +1930,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkSwapchainKHR m_swapchainKHR = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainKHR ) == sizeof( VkSwapchainKHR ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainKHR>::value,
- "SwapchainKHR is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eSwapchainKHR>
- {
- using type = VULKAN_HPP_NAMESPACE::SwapchainKHR;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR>
@@ -1834,8 +1938,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR>
{
using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR;
};
@@ -1852,8 +1955,7 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkSemaphore;
using NativeType = VkSemaphore;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore;
@@ -1913,17 +2015,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkSemaphore m_semaphore = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Semaphore ) == sizeof( VkSemaphore ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Semaphore>::value,
- "Semaphore is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eSemaphore>
- {
- using type = VULKAN_HPP_NAMESPACE::Semaphore;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore>
@@ -1932,8 +2023,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore>
{
using Type = VULKAN_HPP_NAMESPACE::Semaphore;
};
@@ -1950,8 +2040,7 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkFence;
using NativeType = VkFence;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eFence;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFence;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence;
@@ -2011,16 +2100,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkFence m_fence = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Fence ) == sizeof( VkFence ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Fence>::value,
- "Fence is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eFence>
- {
- using type = VULKAN_HPP_NAMESPACE::Fence;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eFence>
@@ -2046,22 +2125,20 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkPerformanceConfigurationINTEL;
using NativeType = VkPerformanceConfigurationINTEL;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
- VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() = default;
- VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT
- PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() = default;
+ VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT
: m_performanceConfigurationINTEL( performanceConfigurationINTEL )
- {}
+ {
+ }
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
- PerformanceConfigurationINTEL &
- operator=( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT
+ PerformanceConfigurationINTEL & operator=( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT
{
m_performanceConfigurationINTEL = performanceConfigurationINTEL;
return *this;
@@ -2111,19 +2188,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL ) ==
- sizeof( VkPerformanceConfigurationINTEL ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::value,
- "PerformanceConfigurationINTEL is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::ePerformanceConfigurationINTEL>
- {
- using type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL>
@@ -2143,8 +2207,7 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkQueryPool;
using NativeType = VkQueryPool;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool;
@@ -2204,17 +2267,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkQueryPool m_queryPool = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPool ) == sizeof( VkQueryPool ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryPool>::value,
- "QueryPool is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eQueryPool>
- {
- using type = VULKAN_HPP_NAMESPACE::QueryPool;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool>
@@ -2223,8 +2275,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool>
{
using Type = VULKAN_HPP_NAMESPACE::QueryPool;
};
@@ -2241,8 +2292,7 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkBuffer;
using NativeType = VkBuffer;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eBuffer;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBuffer;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer;
@@ -2302,16 +2352,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkBuffer m_buffer = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Buffer ) == sizeof( VkBuffer ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Buffer>::value,
- "Buffer is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eBuffer>
- {
- using type = VULKAN_HPP_NAMESPACE::Buffer;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eBuffer>
@@ -2320,8 +2360,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer>
{
using Type = VULKAN_HPP_NAMESPACE::Buffer;
};
@@ -2338,17 +2377,14 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkPipelineLayout;
using NativeType = VkPipelineLayout;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout;
public:
VULKAN_HPP_CONSTEXPR PipelineLayout() = default;
VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT
- : m_pipelineLayout( pipelineLayout )
- {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT : m_pipelineLayout( pipelineLayout ) {}
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
PipelineLayout & operator=( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT
@@ -2401,17 +2437,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkPipelineLayout m_pipelineLayout = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayout ) == sizeof( VkPipelineLayout ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineLayout>::value,
- "PipelineLayout is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::ePipelineLayout>
- {
- using type = VULKAN_HPP_NAMESPACE::PipelineLayout;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout>
@@ -2420,8 +2445,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout>
{
using Type = VULKAN_HPP_NAMESPACE::PipelineLayout;
};
@@ -2438,17 +2462,14 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkDescriptorSet;
using NativeType = VkDescriptorSet;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet;
public:
VULKAN_HPP_CONSTEXPR DescriptorSet() = default;
VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT
- : m_descriptorSet( descriptorSet )
- {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT : m_descriptorSet( descriptorSet ) {}
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
DescriptorSet & operator=( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT
@@ -2501,17 +2522,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkDescriptorSet m_descriptorSet = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSet ) == sizeof( VkDescriptorSet ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSet>::value,
- "DescriptorSet is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDescriptorSet>
- {
- using type = VULKAN_HPP_NAMESPACE::DescriptorSet;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet>
@@ -2520,8 +2530,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet>
{
using Type = VULKAN_HPP_NAMESPACE::DescriptorSet;
};
@@ -2538,8 +2547,7 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkImageView;
using NativeType = VkImageView;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eImageView;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView;
@@ -2599,17 +2607,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkImageView m_imageView = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageView ) == sizeof( VkImageView ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageView>::value,
- "ImageView is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eImageView>
- {
- using type = VULKAN_HPP_NAMESPACE::ImageView;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eImageView>
@@ -2618,8 +2615,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView>
{
using Type = VULKAN_HPP_NAMESPACE::ImageView;
};
@@ -2636,8 +2632,7 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkPipeline;
using NativeType = VkPipeline;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::ePipeline;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline;
@@ -2697,16 +2692,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkPipeline m_pipeline = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Pipeline ) == sizeof( VkPipeline ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Pipeline>::value,
- "Pipeline is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::ePipeline>
- {
- using type = VULKAN_HPP_NAMESPACE::Pipeline;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipeline>
@@ -2715,8 +2700,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline>
{
using Type = VULKAN_HPP_NAMESPACE::Pipeline;
};
@@ -2733,8 +2717,7 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkImage;
using NativeType = VkImage;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eImage;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImage;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage;
@@ -2794,16 +2777,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkImage m_image = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Image ) == sizeof( VkImage ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Image>::value,
- "Image is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eImage>
- {
- using type = VULKAN_HPP_NAMESPACE::Image;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eImage>
@@ -2829,18 +2802,17 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkAccelerationStructureNV;
using NativeType = VkAccelerationStructureNV;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV;
public:
- VULKAN_HPP_CONSTEXPR AccelerationStructureNV() = default;
- VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT
- AccelerationStructureNV( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR AccelerationStructureNV() = default;
+ VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureNV( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT
: m_accelerationStructureNV( accelerationStructureNV )
- {}
+ {
+ }
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
AccelerationStructureNV & operator=( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT
@@ -2893,34 +2865,103 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkAccelerationStructureNV m_accelerationStructureNV = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureNV ) ==
- sizeof( VkAccelerationStructureNV ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::value,
- "AccelerationStructureNV is not nothrow_move_constructible!" );
template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eAccelerationStructureNV>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV>
{
- using type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV;
+ using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV;
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV>
{
using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV;
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>
{
- using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ class OpticalFlowSessionNV
+ {
+ public:
+ using CType = VkOpticalFlowSessionNV;
+ using NativeType = VkOpticalFlowSessionNV;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
+ VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+ public:
+ VULKAN_HPP_CONSTEXPR OpticalFlowSessionNV() = default;
+ VULKAN_HPP_CONSTEXPR OpticalFlowSessionNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT OpticalFlowSessionNV( VkOpticalFlowSessionNV opticalFlowSessionNV ) VULKAN_HPP_NOEXCEPT
+ : m_opticalFlowSessionNV( opticalFlowSessionNV )
+ {
+ }
+
+#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
+ OpticalFlowSessionNV & operator=( VkOpticalFlowSessionNV opticalFlowSessionNV ) VULKAN_HPP_NOEXCEPT
+ {
+ m_opticalFlowSessionNV = opticalFlowSessionNV;
+ return *this;
+ }
+#endif
+
+ OpticalFlowSessionNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_opticalFlowSessionNV = {};
+ return *this;
+ }
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( OpticalFlowSessionNV const & ) const = default;
+#else
+ bool operator==( OpticalFlowSessionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_opticalFlowSessionNV == rhs.m_opticalFlowSessionNV;
+ }
+
+ bool operator!=( OpticalFlowSessionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_opticalFlowSessionNV != rhs.m_opticalFlowSessionNV;
+ }
+
+ bool operator<( OpticalFlowSessionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_opticalFlowSessionNV < rhs.m_opticalFlowSessionNV;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkOpticalFlowSessionNV() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_opticalFlowSessionNV;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_opticalFlowSessionNV != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_opticalFlowSessionNV == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkOpticalFlowSessionNV m_opticalFlowSessionNV = {};
};
template <>
- struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV;
+ };
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>
{
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
@@ -2931,18 +2972,17 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkDescriptorUpdateTemplate;
using NativeType = VkDescriptorUpdateTemplate;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate;
public:
- VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() = default;
- VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT
- DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() = default;
+ VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT
: m_descriptorUpdateTemplate( descriptorUpdateTemplate )
- {}
+ {
+ }
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
DescriptorUpdateTemplate & operator=( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT
@@ -2995,18 +3035,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkDescriptorUpdateTemplate m_descriptorUpdateTemplate = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate ) ==
- sizeof( VkDescriptorUpdateTemplate ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::value,
- "DescriptorUpdateTemplate is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDescriptorUpdateTemplate>
- {
- using type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate>
@@ -3015,8 +3043,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate>
{
using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
};
@@ -3034,8 +3061,7 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkEvent;
using NativeType = VkEvent;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eEvent;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent;
@@ -3095,16 +3121,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkEvent m_event = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Event ) == sizeof( VkEvent ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Event>::value,
- "Event is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eEvent>
- {
- using type = VULKAN_HPP_NAMESPACE::Event;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eEvent>
@@ -3130,18 +3146,17 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkAccelerationStructureKHR;
using NativeType = VkAccelerationStructureKHR;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR;
public:
- VULKAN_HPP_CONSTEXPR AccelerationStructureKHR() = default;
- VULKAN_HPP_CONSTEXPR AccelerationStructureKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT
- AccelerationStructureKHR( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR AccelerationStructureKHR() = default;
+ VULKAN_HPP_CONSTEXPR AccelerationStructureKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureKHR( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT
: m_accelerationStructureKHR( accelerationStructureKHR )
- {}
+ {
+ }
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
AccelerationStructureKHR & operator=( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT
@@ -3194,34 +3209,100 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkAccelerationStructureKHR m_accelerationStructureKHR = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR ) ==
- sizeof( VkAccelerationStructureKHR ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::value,
- "AccelerationStructureKHR is not nothrow_move_constructible!" );
template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eAccelerationStructureKHR>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR>
{
- using type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR;
+ using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR;
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR>
{
using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR;
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>
{
- using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
+ };
+
+ class MicromapEXT
+ {
+ public:
+ using CType = VkMicromapEXT;
+ using NativeType = VkMicromapEXT;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
+ VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+ public:
+ VULKAN_HPP_CONSTEXPR MicromapEXT() = default;
+ VULKAN_HPP_CONSTEXPR MicromapEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT MicromapEXT( VkMicromapEXT micromapEXT ) VULKAN_HPP_NOEXCEPT : m_micromapEXT( micromapEXT ) {}
+
+#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
+ MicromapEXT & operator=( VkMicromapEXT micromapEXT ) VULKAN_HPP_NOEXCEPT
+ {
+ m_micromapEXT = micromapEXT;
+ return *this;
+ }
+#endif
+
+ MicromapEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+ {
+ m_micromapEXT = {};
+ return *this;
+ }
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( MicromapEXT const & ) const = default;
+#else
+ bool operator==( MicromapEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_micromapEXT == rhs.m_micromapEXT;
+ }
+
+ bool operator!=( MicromapEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_micromapEXT != rhs.m_micromapEXT;
+ }
+
+ bool operator<( MicromapEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return m_micromapEXT < rhs.m_micromapEXT;
+ }
+#endif
+
+ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkMicromapEXT() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_micromapEXT;
+ }
+
+ explicit operator bool() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_micromapEXT != VK_NULL_HANDLE;
+ }
+
+ bool operator!() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_micromapEXT == VK_NULL_HANDLE;
+ }
+
+ private:
+ VkMicromapEXT m_micromapEXT = {};
};
template <>
- struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>
+ struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT>
+ {
+ using Type = VULKAN_HPP_NAMESPACE::MicromapEXT;
+ };
+
+ template <>
+ struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::MicromapEXT>
{
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
@@ -3232,25 +3313,20 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkCommandBuffer;
using NativeType = VkCommandBuffer;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer;
public:
- VULKAN_HPP_CONSTEXPR CommandBuffer() = default;
- VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT
- : m_commandBuffer( commandBuffer )
- {}
+ VULKAN_HPP_CONSTEXPR CommandBuffer() = default;
+ VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT : m_commandBuffer( commandBuffer ) {}
-#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
CommandBuffer & operator=( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT
{
m_commandBuffer = commandBuffer;
return *this;
}
-#endif
CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
@@ -3280,52 +3356,46 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_VERSION_1_0 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type
- reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<void>::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setViewport( uint32_t firstViewport,
uint32_t viewportCount,
const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewport( uint32_t firstViewport,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
+ void setViewport( uint32_t firstViewport,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -3333,17 +3403,16 @@ namespace VULKAN_HPP_NAMESPACE
void setScissor( uint32_t firstScissor,
uint32_t scissorCount,
const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setScissor( uint32_t firstScissor,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
+ void setScissor( uint32_t firstScissor,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setLineWidth( float lineWidth,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setLineWidth( float lineWidth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setDepthBias( float depthBiasConstantFactor,
@@ -3352,28 +3421,25 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setBlendConstants( const float blendConstants[4],
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setBlendConstants( const float blendConstants[4], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDepthBounds( float minDepthBounds,
- float maxDepthBounds,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
uint32_t compareMask,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
uint32_t writeMask,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
uint32_t reference,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
@@ -3383,14 +3449,14 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
uint32_t dynamicOffsetCount,
const uint32_t * pDynamicOffsets,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t firstSet,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
- ArrayProxy<const uint32_t> const & dynamicOffsets,
+ void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t firstSet,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -3398,21 +3464,20 @@ namespace VULKAN_HPP_NAMESPACE
void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::IndexType indexType,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void bindVertexBuffers( uint32_t firstBinding,
uint32_t bindingCount,
const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindVertexBuffers( uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ void bindVertexBuffers( uint32_t firstBinding,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -3435,14 +3500,14 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize offset,
uint32_t drawCount,
uint32_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
uint32_t drawCount,
uint32_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void dispatch( uint32_t groupCountX,
@@ -3453,19 +3518,19 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
uint32_t regionCount,
const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions,
+ void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -3476,14 +3541,14 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
uint32_t regionCount,
const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions,
+ void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -3495,15 +3560,15 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t regionCount,
const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions,
VULKAN_HPP_NAMESPACE::Filter filter,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,
- VULKAN_HPP_NAMESPACE::Filter filter,
+ void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,
+ VULKAN_HPP_NAMESPACE::Filter filter,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -3513,13 +3578,13 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
uint32_t regionCount,
const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
+ void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -3529,13 +3594,13 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
uint32_t regionCount,
const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
+ void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -3544,13 +3609,13 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
VULKAN_HPP_NAMESPACE::DeviceSize dataSize,
const void * pData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
- ArrayProxy<const DataType> const & data,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -3558,7 +3623,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
VULKAN_HPP_NAMESPACE::DeviceSize size,
uint32_t data,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
@@ -3566,32 +3631,30 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor,
uint32_t rangeCount,
const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- const VULKAN_HPP_NAMESPACE::ClearColorValue & color,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
+ void clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
+ const VULKAN_HPP_NAMESPACE::ClearColorValue & color,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil,
- uint32_t rangeCount,
- const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
+ const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil,
+ uint32_t rangeCount,
+ const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
+ const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -3599,11 +3662,11 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments,
uint32_t rectCount,
const VULKAN_HPP_NAMESPACE::ClearRect * pRects,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects,
+ void clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -3614,26 +3677,26 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
uint32_t regionCount,
const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions,
+ void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setEvent( VULKAN_HPP_NAMESPACE::Event event,
VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void resetEvent( VULKAN_HPP_NAMESPACE::Event event,
VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void waitEvents( uint32_t eventCount,
@@ -3646,15 +3709,15 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
uint32_t imageMemoryBarrierCount,
const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,
+ void waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -3668,15 +3731,15 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
uint32_t imageMemoryBarrierCount,
const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
- VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,
+ void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
+ VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -3684,7 +3747,7 @@ namespace VULKAN_HPP_NAMESPACE
void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query,
VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
@@ -3695,13 +3758,13 @@ namespace VULKAN_HPP_NAMESPACE
void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t firstQuery,
uint32_t queryCount,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
@@ -3711,7 +3774,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
VULKAN_HPP_NAMESPACE::DeviceSize stride,
VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
@@ -3719,30 +3782,29 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t offset,
uint32_t size,
const void * pValues,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename ValuesType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
- uint32_t offset,
- ArrayProxy<const ValuesType> const & values,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
+ uint32_t offset,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
VULKAN_HPP_NAMESPACE::SubpassContents contents,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
VULKAN_HPP_NAMESPACE::SubpassContents contents,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void endRenderPass( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
@@ -3750,18 +3812,17 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void executeCommands( uint32_t commandBufferCount,
const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
+ void executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_VERSION_1_1 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDeviceMask( uint32_t deviceMask,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setDeviceMask( uint32_t deviceMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void dispatchBase( uint32_t baseGroupX,
@@ -3781,47 +3842,46 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::Buffer countBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_VERSION_1_3 ===
@@ -3829,142 +3889,139 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setEvent2( VULKAN_HPP_NAMESPACE::Event event,
const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setEvent2( VULKAN_HPP_NAMESPACE::Event event,
const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void resetEvent2( VULKAN_HPP_NAMESPACE::Event event,
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void waitEvents2( uint32_t eventCount,
const VULKAN_HPP_NAMESPACE::Event * pEvents,
const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void waitEvents2( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ void waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void endRendering( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setViewportWithCount( uint32_t viewportCount,
const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportWithCount( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
+ void setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setScissorWithCount( uint32_t scissorCount,
const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setScissorWithCount( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
+ void setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -3975,38 +4032,37 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void bindVertexBuffers2(
- uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ uint32_t firstBinding,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
@@ -4014,31 +4070,29 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::StencilOp passOp,
VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
VULKAN_HPP_NAMESPACE::CompareOp compareOp,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_debug_marker ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -4046,11 +4100,11 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
@@ -4058,29 +4112,29 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -4088,12 +4142,12 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_video_decode_queue ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pFrameInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & frameInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -4105,16 +4159,15 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindTransformFeedbackBuffersEXT(
- uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ void bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -4122,33 +4175,29 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t counterBufferCount,
const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets
+ void beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets
VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- endTransformFeedbackEXT( uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
- const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void endTransformFeedbackEXT( uint32_t firstCounterBuffer,
+ uint32_t counterBufferCount,
+ const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
+ const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endTransformFeedbackEXT( uint32_t firstCounterBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets
+ void endTransformFeedbackEXT( uint32_t firstCounterBuffer,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets
VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -4156,33 +4205,32 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t query,
VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
uint32_t index,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query,
uint32_t index,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- drawIndirectByteCountEXT( uint32_t instanceCount,
- uint32_t firstInstance,
- VULKAN_HPP_NAMESPACE::Buffer counterBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,
- uint32_t counterOffset,
- uint32_t vertexStride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void drawIndirectByteCountEXT( uint32_t instanceCount,
+ uint32_t firstInstance,
+ VULKAN_HPP_NAMESPACE::Buffer counterBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,
+ uint32_t counterOffset,
+ uint32_t vertexStride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NVX_binary_import ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_AMD_draw_indirect_count ===
@@ -4194,7 +4242,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
@@ -4203,18 +4251,17 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_dynamic_rendering ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -4223,8 +4270,7 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_device_group ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDeviceMaskKHR( uint32_t deviceMask,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void dispatchBaseKHR( uint32_t baseGroupX,
@@ -4243,13 +4289,13 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t set,
uint32_t descriptorWriteCount,
const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t set,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
+ void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t set,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -4258,34 +4304,29 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::PipelineLayout layout,
uint32_t set,
const void * pData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
VULKAN_HPP_NAMESPACE::PipelineLayout layout,
uint32_t set,
DataType const & data,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_conditional_rendering ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginConditionalRenderingEXT(
- const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginConditionalRenderingEXT(
- const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endConditionalRenderingEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void endConditionalRenderingEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_clip_space_w_scaling ===
@@ -4293,28 +4334,26 @@ namespace VULKAN_HPP_NAMESPACE
void setViewportWScalingNV( uint32_t firstViewport,
uint32_t viewportCount,
const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportWScalingNV( uint32_t firstViewport,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings,
+ void setViewportWScalingNV( uint32_t firstViewport,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_discard_rectangles ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
- uint32_t discardRectangleCount,
- const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
+ uint32_t discardRectangleCount,
+ const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_create_renderpass2 ===
@@ -4322,191 +4361,173 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_debug_utils ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_sample_locations ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_acceleration_structure ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void buildAccelerationStructuresKHR(
- uint32_t infoCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void buildAccelerationStructuresKHR( uint32_t infoCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void buildAccelerationStructuresKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void buildAccelerationStructuresIndirectKHR(
- uint32_t infoCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
- const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses,
- const uint32_t * pIndirectStrides,
- const uint32_t * const * ppMaxPrimitiveCounts,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void buildAccelerationStructuresIndirectKHR( uint32_t infoCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
+ const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses,
+ const uint32_t * pIndirectStrides,
+ const uint32_t * const * ppMaxPrimitiveCounts,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void buildAccelerationStructuresIndirectKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,
- ArrayProxy<const uint32_t> const & indirectStrides,
- ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyAccelerationStructureToMemoryKHR(
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyAccelerationStructureToMemoryKHR(
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyMemoryToAccelerationStructureKHR(
- const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyMemoryToAccelerationStructureKHR(
- const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeAccelerationStructuresPropertiesKHR(
- uint32_t accelerationStructureCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void writeAccelerationStructuresPropertiesKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_NV_shading_rate_image ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setViewportShadingRatePaletteNV( uint32_t firstViewport,
uint32_t viewportCount,
const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportShadingRatePaletteNV(
- uint32_t firstViewport,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setViewportShadingRatePaletteNV( uint32_t firstViewport,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
- uint32_t customSampleOrderCount,
- const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
+ uint32_t customSampleOrderCount,
+ const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setCoarseSampleOrderNV(
- VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_NV_ray_tracing ===
@@ -4520,8 +4541,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
VULKAN_HPP_NAMESPACE::Buffer scratch,
VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info,
@@ -4532,16 +4552,14 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
VULKAN_HPP_NAMESPACE::Buffer scratch,
VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer,
@@ -4558,23 +4576,22 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t width,
uint32_t height,
uint32_t depth,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeAccelerationStructuresPropertiesNV(
- uint32_t accelerationStructureCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void writeAccelerationStructuresPropertiesNV(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -4587,7 +4604,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
@@ -4596,8 +4613,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_AMD_buffer_marker ===
@@ -4606,22 +4622,19 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
uint32_t marker,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_mesh_shader ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawMeshTasksNV( uint32_t taskCount,
- uint32_t firstTask,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- uint32_t drawCount,
- uint32_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ uint32_t drawCount,
+ uint32_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
@@ -4630,8 +4643,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_scissor_exclusive ===
@@ -4639,58 +4651,54 @@ namespace VULKAN_HPP_NAMESPACE
void setExclusiveScissorNV( uint32_t firstExclusiveScissor,
uint32_t exclusiveScissorCount,
const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setExclusiveScissorNV( uint32_t firstExclusiveScissor,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors,
+ void setExclusiveScissorNV( uint32_t firstExclusiveScissor,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_NV_device_diagnostic_checkpoints ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setCheckpointNV( const void * pCheckpointMarker,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename CheckpointMarkerType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setCheckpointNV( CheckpointMarkerType const & checkpointMarker,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_INTEL_performance_query ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_fragment_shading_rate ===
@@ -4698,14 +4706,12 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize,
const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize,
const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_line_rasterization ===
@@ -4718,40 +4724,33 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_extended_dynamic_state ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setViewportWithCountEXT( uint32_t viewportCount,
- const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setViewportWithCountEXT( uint32_t viewportCount,
+ const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setScissorWithCountEXT( uint32_t scissorCount,
- const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setScissorWithCountEXT( uint32_t scissorCount,
+ const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -4761,40 +4760,37 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void bindVertexBuffers2EXT(
- uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ uint32_t firstBinding,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
@@ -4802,51 +4798,46 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::StencilOp passOp,
VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
VULKAN_HPP_NAMESPACE::CompareOp compareOp,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_device_generated_commands ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
VULKAN_HPP_NAMESPACE::Pipeline pipeline,
uint32_t groupIndex,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_encode_queue ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -4855,116 +4846,138 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void waitEvents2KHR( uint32_t eventCount,
const VULKAN_HPP_NAMESPACE::Event * pEvents,
const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void waitEvents2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ void waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
uint32_t marker,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_fragment_shading_rate_enums ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,
const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_EXT_mesh_shader ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawMeshTasksEXT( uint32_t groupCountX,
+ uint32_t groupCountY,
+ uint32_t groupCountZ,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ uint32_t drawCount,
+ uint32_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_copy_commands2 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_ray_tracing_pipeline ===
@@ -4977,7 +4990,7 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t width,
uint32_t height,
uint32_t depth,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
@@ -4987,7 +5000,7 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t width,
uint32_t height,
uint32_t depth,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -4996,7 +5009,7 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
@@ -5004,29 +5017,26 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_vertex_input_dynamic_state ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setVertexInputEXT( uint32_t vertexBindingDescriptionCount,
- const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions,
- uint32_t vertexAttributeDescriptionCount,
- const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setVertexInputEXT( uint32_t vertexBindingDescriptionCount,
+ const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions,
+ uint32_t vertexAttributeDescriptionCount,
+ const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setVertexInputEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void
+ setVertexInputEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_HUAWEI_subpass_shading ===
@@ -5037,50 +5047,48 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_HUAWEI_invocation_mask ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_extended_dynamic_state2 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setPatchControlPointsEXT( uint32_t patchControlPoints,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_color_write_enable ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setColorWriteEnableEXT( uint32_t attachmentCount,
- const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setColorWriteEnableEXT( uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- setColorWriteEnableEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ //=== VK_KHR_ray_tracing_maintenance1 ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
//=== VK_EXT_multi_draw ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -5089,13 +5097,12 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t instanceCount,
uint32_t firstInstance,
uint32_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawMultiEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo,
- uint32_t instanceCount,
- uint32_t firstInstance,
- uint32_t stride,
+ void drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo,
+ uint32_t instanceCount,
+ uint32_t firstInstance,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -5106,18 +5113,263 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t firstInstance,
uint32_t stride,
const int32_t * pVertexOffset,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawMultiIndexedEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo,
- uint32_t instanceCount,
- uint32_t firstInstance,
- uint32_t stride,
+ void drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo,
+ uint32_t instanceCount,
+ uint32_t firstInstance,
Optional<const int32_t> vertexOffset VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_opacity_micromap ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void buildMicromapsEXT( uint32_t infoCount,
+ const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeMicromapsPropertiesEXT( uint32_t micromapCount,
+ const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_extended_dynamic_state3 ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setColorBlendEnableEXT( uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setColorBlendEnableEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setColorBlendEquationEXT( uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setColorBlendEquationEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setColorWriteMaskEXT( uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setColorWriteMaskEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setColorBlendAdvancedEXT( uint32_t firstAttachment,
+ uint32_t attachmentCount,
+ const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setColorBlendAdvancedEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewportSwizzleNV( uint32_t firstViewport,
+ uint32_t viewportCount,
+ const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setViewportSwizzleNV( uint32_t firstViewport,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCoverageToColorLocationNV( uint32_t coverageToColorLocation,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCoverageModulationTableNV( uint32_t coverageModulationTableCount,
+ const float * pCoverageModulationTable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_NV_optical_flow ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT
{
return m_commandBuffer;
}
@@ -5135,17 +5387,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkCommandBuffer m_commandBuffer = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBuffer ) == sizeof( VkCommandBuffer ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBuffer>::value,
- "CommandBuffer is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eCommandBuffer>
- {
- using type = VULKAN_HPP_NAMESPACE::CommandBuffer;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer>
@@ -5154,8 +5395,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer>
{
using Type = VULKAN_HPP_NAMESPACE::CommandBuffer;
};
@@ -5172,17 +5412,14 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkDeviceMemory;
using NativeType = VkDeviceMemory;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory;
public:
VULKAN_HPP_CONSTEXPR DeviceMemory() = default;
VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT
- : m_deviceMemory( deviceMemory )
- {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT : m_deviceMemory( deviceMemory ) {}
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
DeviceMemory & operator=( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT
@@ -5201,7 +5438,7 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
auto operator<=>( DeviceMemory const & ) const = default;
#else
- bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return m_deviceMemory == rhs.m_deviceMemory;
}
@@ -5235,17 +5472,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkDeviceMemory m_deviceMemory = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemory ) == sizeof( VkDeviceMemory ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceMemory>::value,
- "DeviceMemory is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDeviceMemory>
- {
- using type = VULKAN_HPP_NAMESPACE::DeviceMemory;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory>
@@ -5254,8 +5480,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory>
{
using Type = VULKAN_HPP_NAMESPACE::DeviceMemory;
};
@@ -5273,17 +5498,14 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkVideoSessionKHR;
using NativeType = VkVideoSessionKHR;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
VULKAN_HPP_CONSTEXPR VideoSessionKHR() = default;
VULKAN_HPP_CONSTEXPR VideoSessionKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT VideoSessionKHR( VkVideoSessionKHR videoSessionKHR ) VULKAN_HPP_NOEXCEPT
- : m_videoSessionKHR( videoSessionKHR )
- {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT VideoSessionKHR( VkVideoSessionKHR videoSessionKHR ) VULKAN_HPP_NOEXCEPT : m_videoSessionKHR( videoSessionKHR ) {}
# if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
VideoSessionKHR & operator=( VkVideoSessionKHR videoSessionKHR ) VULKAN_HPP_NOEXCEPT
@@ -5336,17 +5558,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkVideoSessionKHR m_videoSessionKHR = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionKHR ) == sizeof( VkVideoSessionKHR ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::value,
- "VideoSessionKHR is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eVideoSessionKHR>
- {
- using type = VULKAN_HPP_NAMESPACE::VideoSessionKHR;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR>
@@ -5367,8 +5578,7 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkDeferredOperationKHR;
using NativeType = VkDeferredOperationKHR;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
@@ -5377,7 +5587,8 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR DeferredOperationKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
VULKAN_HPP_TYPESAFE_EXPLICIT DeferredOperationKHR( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT
: m_deferredOperationKHR( deferredOperationKHR )
- {}
+ {
+ }
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
DeferredOperationKHR & operator=( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT
@@ -5430,17 +5641,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkDeferredOperationKHR m_deferredOperationKHR = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeferredOperationKHR ) == sizeof( VkDeferredOperationKHR ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::value,
- "DeferredOperationKHR is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDeferredOperationKHR>
- {
- using type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR>
@@ -5461,18 +5661,17 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkBufferCollectionFUCHSIA;
using NativeType = VkBufferCollectionFUCHSIA;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA;
public:
- VULKAN_HPP_CONSTEXPR BufferCollectionFUCHSIA() = default;
- VULKAN_HPP_CONSTEXPR BufferCollectionFUCHSIA( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT
- BufferCollectionFUCHSIA( VkBufferCollectionFUCHSIA bufferCollectionFUCHSIA ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR BufferCollectionFUCHSIA() = default;
+ VULKAN_HPP_CONSTEXPR BufferCollectionFUCHSIA( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT BufferCollectionFUCHSIA( VkBufferCollectionFUCHSIA bufferCollectionFUCHSIA ) VULKAN_HPP_NOEXCEPT
: m_bufferCollectionFUCHSIA( bufferCollectionFUCHSIA )
- {}
+ {
+ }
# if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
BufferCollectionFUCHSIA & operator=( VkBufferCollectionFUCHSIA bufferCollectionFUCHSIA ) VULKAN_HPP_NOEXCEPT
@@ -5525,18 +5724,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkBufferCollectionFUCHSIA m_bufferCollectionFUCHSIA = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA ) ==
- sizeof( VkBufferCollectionFUCHSIA ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA>::value,
- "BufferCollectionFUCHSIA is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eBufferCollectionFUCHSIA>
- {
- using type = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA>
@@ -5545,8 +5732,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA>
{
using Type = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA;
};
@@ -5564,16 +5750,14 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkBufferView;
using NativeType = VkBufferView;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eBufferView;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView;
public:
VULKAN_HPP_CONSTEXPR BufferView() = default;
VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT : m_bufferView( bufferView )
- {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT : m_bufferView( bufferView ) {}
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
BufferView & operator=( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT
@@ -5626,17 +5810,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkBufferView m_bufferView = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferView ) == sizeof( VkBufferView ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferView>::value,
- "BufferView is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eBufferView>
- {
- using type = VULKAN_HPP_NAMESPACE::BufferView;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eBufferView>
@@ -5645,8 +5818,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView>
{
using Type = VULKAN_HPP_NAMESPACE::BufferView;
};
@@ -5663,17 +5835,14 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkCommandPool;
using NativeType = VkCommandPool;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool;
public:
VULKAN_HPP_CONSTEXPR CommandPool() = default;
VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT
- : m_commandPool( commandPool )
- {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT : m_commandPool( commandPool ) {}
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
CommandPool & operator=( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT
@@ -5726,17 +5895,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkCommandPool m_commandPool = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandPool ) == sizeof( VkCommandPool ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandPool>::value,
- "CommandPool is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eCommandPool>
- {
- using type = VULKAN_HPP_NAMESPACE::CommandPool;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool>
@@ -5745,8 +5903,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool>
{
using Type = VULKAN_HPP_NAMESPACE::CommandPool;
};
@@ -5763,17 +5920,14 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkPipelineCache;
using NativeType = VkPipelineCache;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache;
public:
VULKAN_HPP_CONSTEXPR PipelineCache() = default;
VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT
- : m_pipelineCache( pipelineCache )
- {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT : m_pipelineCache( pipelineCache ) {}
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
PipelineCache & operator=( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT
@@ -5826,17 +5980,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkPipelineCache m_pipelineCache = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCache ) == sizeof( VkPipelineCache ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCache>::value,
- "PipelineCache is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::ePipelineCache>
- {
- using type = VULKAN_HPP_NAMESPACE::PipelineCache;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache>
@@ -5845,8 +5988,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache>
{
using Type = VULKAN_HPP_NAMESPACE::PipelineCache;
};
@@ -5863,17 +6005,14 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkCuFunctionNVX;
using NativeType = VkCuFunctionNVX;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX;
public:
VULKAN_HPP_CONSTEXPR CuFunctionNVX() = default;
VULKAN_HPP_CONSTEXPR CuFunctionNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT CuFunctionNVX( VkCuFunctionNVX cuFunctionNVX ) VULKAN_HPP_NOEXCEPT
- : m_cuFunctionNVX( cuFunctionNVX )
- {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT CuFunctionNVX( VkCuFunctionNVX cuFunctionNVX ) VULKAN_HPP_NOEXCEPT : m_cuFunctionNVX( cuFunctionNVX ) {}
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
CuFunctionNVX & operator=( VkCuFunctionNVX cuFunctionNVX ) VULKAN_HPP_NOEXCEPT
@@ -5926,17 +6065,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkCuFunctionNVX m_cuFunctionNVX = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionNVX ) == sizeof( VkCuFunctionNVX ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::value,
- "CuFunctionNVX is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eCuFunctionNVX>
- {
- using type = VULKAN_HPP_NAMESPACE::CuFunctionNVX;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX>
@@ -5945,8 +6073,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX>
{
using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX;
};
@@ -5963,17 +6090,14 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkCuModuleNVX;
using NativeType = VkCuModuleNVX;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX;
public:
VULKAN_HPP_CONSTEXPR CuModuleNVX() = default;
VULKAN_HPP_CONSTEXPR CuModuleNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT CuModuleNVX( VkCuModuleNVX cuModuleNVX ) VULKAN_HPP_NOEXCEPT
- : m_cuModuleNVX( cuModuleNVX )
- {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT CuModuleNVX( VkCuModuleNVX cuModuleNVX ) VULKAN_HPP_NOEXCEPT : m_cuModuleNVX( cuModuleNVX ) {}
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
CuModuleNVX & operator=( VkCuModuleNVX cuModuleNVX ) VULKAN_HPP_NOEXCEPT
@@ -6026,17 +6150,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkCuModuleNVX m_cuModuleNVX = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuModuleNVX ) == sizeof( VkCuModuleNVX ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuModuleNVX>::value,
- "CuModuleNVX is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eCuModuleNVX>
- {
- using type = VULKAN_HPP_NAMESPACE::CuModuleNVX;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX>
@@ -6045,8 +6158,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX>
{
using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX;
};
@@ -6063,17 +6175,14 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkDescriptorPool;
using NativeType = VkDescriptorPool;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool;
public:
VULKAN_HPP_CONSTEXPR DescriptorPool() = default;
VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT
- : m_descriptorPool( descriptorPool )
- {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT : m_descriptorPool( descriptorPool ) {}
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
DescriptorPool & operator=( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT
@@ -6126,17 +6235,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkDescriptorPool m_descriptorPool = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPool ) == sizeof( VkDescriptorPool ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorPool>::value,
- "DescriptorPool is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDescriptorPool>
- {
- using type = VULKAN_HPP_NAMESPACE::DescriptorPool;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool>
@@ -6145,8 +6243,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool>
{
using Type = VULKAN_HPP_NAMESPACE::DescriptorPool;
};
@@ -6163,8 +6260,7 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkDescriptorSetLayout;
using NativeType = VkDescriptorSetLayout;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout;
@@ -6173,7 +6269,8 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT
: m_descriptorSetLayout( descriptorSetLayout )
- {}
+ {
+ }
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
DescriptorSetLayout & operator=( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT
@@ -6226,17 +6323,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkDescriptorSetLayout m_descriptorSetLayout = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::value,
- "DescriptorSetLayout is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDescriptorSetLayout>
- {
- using type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout>
@@ -6245,8 +6331,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout>
{
using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
};
@@ -6263,17 +6348,14 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkFramebuffer;
using NativeType = VkFramebuffer;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer;
public:
VULKAN_HPP_CONSTEXPR Framebuffer() = default;
VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT
- : m_framebuffer( framebuffer )
- {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT : m_framebuffer( framebuffer ) {}
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
Framebuffer & operator=( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT
@@ -6326,17 +6408,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkFramebuffer m_framebuffer = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Framebuffer ) == sizeof( VkFramebuffer ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Framebuffer>::value,
- "Framebuffer is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eFramebuffer>
- {
- using type = VULKAN_HPP_NAMESPACE::Framebuffer;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer>
@@ -6345,8 +6416,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer>
{
using Type = VULKAN_HPP_NAMESPACE::Framebuffer;
};
@@ -6363,18 +6433,17 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkIndirectCommandsLayoutNV;
using NativeType = VkIndirectCommandsLayoutNV;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
- VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() = default;
- VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT
- IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() = default;
+ VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT
: m_indirectCommandsLayoutNV( indirectCommandsLayoutNV )
- {}
+ {
+ }
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
IndirectCommandsLayoutNV & operator=( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT
@@ -6427,18 +6496,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV ) ==
- sizeof( VkIndirectCommandsLayoutNV ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::value,
- "IndirectCommandsLayoutNV is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eIndirectCommandsLayoutNV>
- {
- using type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV>
@@ -6458,17 +6515,14 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkPrivateDataSlot;
using NativeType = VkPrivateDataSlot;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
VULKAN_HPP_CONSTEXPR PrivateDataSlot() = default;
VULKAN_HPP_CONSTEXPR PrivateDataSlot( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT PrivateDataSlot( VkPrivateDataSlot privateDataSlot ) VULKAN_HPP_NOEXCEPT
- : m_privateDataSlot( privateDataSlot )
- {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT PrivateDataSlot( VkPrivateDataSlot privateDataSlot ) VULKAN_HPP_NOEXCEPT : m_privateDataSlot( privateDataSlot ) {}
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
PrivateDataSlot & operator=( VkPrivateDataSlot privateDataSlot ) VULKAN_HPP_NOEXCEPT
@@ -6521,17 +6575,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkPrivateDataSlot m_privateDataSlot = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlot ) == sizeof( VkPrivateDataSlot ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::value,
- "PrivateDataSlot is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::ePrivateDataSlot>
- {
- using type = VULKAN_HPP_NAMESPACE::PrivateDataSlot;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot>
@@ -6552,16 +6595,14 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkRenderPass;
using NativeType = VkRenderPass;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass;
public:
VULKAN_HPP_CONSTEXPR RenderPass() = default;
VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT : m_renderPass( renderPass )
- {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT : m_renderPass( renderPass ) {}
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
RenderPass & operator=( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT
@@ -6614,17 +6655,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkRenderPass m_renderPass = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPass ) == sizeof( VkRenderPass ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPass>::value,
- "RenderPass is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eRenderPass>
- {
- using type = VULKAN_HPP_NAMESPACE::RenderPass;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass>
@@ -6633,8 +6663,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass>
{
using Type = VULKAN_HPP_NAMESPACE::RenderPass;
};
@@ -6651,8 +6680,7 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkSampler;
using NativeType = VkSampler;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eSampler;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler;
@@ -6712,16 +6740,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkSampler m_sampler = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Sampler ) == sizeof( VkSampler ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Sampler>::value,
- "Sampler is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eSampler>
- {
- using type = VULKAN_HPP_NAMESPACE::Sampler;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSampler>
@@ -6730,8 +6748,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler>
{
using Type = VULKAN_HPP_NAMESPACE::Sampler;
};
@@ -6748,18 +6765,17 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkSamplerYcbcrConversion;
using NativeType = VkSamplerYcbcrConversion;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion;
public:
- VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() = default;
- VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT
- SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() = default;
+ VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT
: m_samplerYcbcrConversion( samplerYcbcrConversion )
- {}
+ {
+ }
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
SamplerYcbcrConversion & operator=( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT
@@ -6812,18 +6828,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkSamplerYcbcrConversion m_samplerYcbcrConversion = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ) ==
- sizeof( VkSamplerYcbcrConversion ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::value,
- "SamplerYcbcrConversion is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eSamplerYcbcrConversion>
- {
- using type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion>
@@ -6832,8 +6836,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion>
{
using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion;
};
@@ -6851,17 +6854,14 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkShaderModule;
using NativeType = VkShaderModule;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule;
public:
VULKAN_HPP_CONSTEXPR ShaderModule() = default;
VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT
- : m_shaderModule( shaderModule )
- {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT : m_shaderModule( shaderModule ) {}
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
ShaderModule & operator=( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT
@@ -6914,17 +6914,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkShaderModule m_shaderModule = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModule ) == sizeof( VkShaderModule ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderModule>::value,
- "ShaderModule is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eShaderModule>
- {
- using type = VULKAN_HPP_NAMESPACE::ShaderModule;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule>
@@ -6933,8 +6922,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule>
{
using Type = VULKAN_HPP_NAMESPACE::ShaderModule;
};
@@ -6951,17 +6939,16 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkValidationCacheEXT;
using NativeType = VkValidationCacheEXT;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT;
public:
VULKAN_HPP_CONSTEXPR ValidationCacheEXT() = default;
VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT
- : m_validationCacheEXT( validationCacheEXT )
- {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT : m_validationCacheEXT( validationCacheEXT )
+ {
+ }
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
ValidationCacheEXT & operator=( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT
@@ -7014,17 +7001,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkValidationCacheEXT m_validationCacheEXT = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::value,
- "ValidationCacheEXT is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eValidationCacheEXT>
- {
- using type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT>
@@ -7033,8 +7009,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT>
{
using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT;
};
@@ -7052,18 +7027,17 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkVideoSessionParametersKHR;
using NativeType = VkVideoSessionParametersKHR;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
- VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR() = default;
- VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT
- VideoSessionParametersKHR( VkVideoSessionParametersKHR videoSessionParametersKHR ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR() = default;
+ VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT VideoSessionParametersKHR( VkVideoSessionParametersKHR videoSessionParametersKHR ) VULKAN_HPP_NOEXCEPT
: m_videoSessionParametersKHR( videoSessionParametersKHR )
- {}
+ {
+ }
# if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
VideoSessionParametersKHR & operator=( VkVideoSessionParametersKHR videoSessionParametersKHR ) VULKAN_HPP_NOEXCEPT
@@ -7116,18 +7090,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkVideoSessionParametersKHR m_videoSessionParametersKHR = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR ) ==
- sizeof( VkVideoSessionParametersKHR ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::value,
- "VideoSessionParametersKHR is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eVideoSessionParametersKHR>
- {
- using type = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR>
@@ -7148,23 +7110,20 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkQueue;
using NativeType = VkQueue;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eQueue;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueue;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue;
public:
- VULKAN_HPP_CONSTEXPR Queue() = default;
- VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT : m_queue( queue ) {}
+ VULKAN_HPP_CONSTEXPR Queue() = default;
+ VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT : m_queue( queue ) {}
-#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
Queue & operator=( VkQueue queue ) VULKAN_HPP_NOEXCEPT
{
m_queue = queue;
return *this;
}
-#endif
Queue & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
@@ -7194,96 +7153,86 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_VERSION_1_0 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- submit( uint32_t submitCount,
- const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits,
- VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result submit( uint32_t submitCount,
+ const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits,
+ VULKAN_HPP_NAMESPACE::Fence fence,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,
- VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ submit( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,
+ VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- bindSparse( uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo,
- VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result bindSparse( uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo,
+ VULKAN_HPP_NAMESPACE::Fence fence,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo,
- VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo,
+ VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_VERSION_1_3 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- submit2( uint32_t submitCount,
- const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,
- VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result submit2( uint32_t submitCount,
+ const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,
+ VULKAN_HPP_NAMESPACE::Fence fence,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- submit2( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
- VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ submit2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
+ VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_swapchain ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_debug_utils ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_NV_device_diagnostic_checkpoints ===
@@ -7291,70 +7240,66 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getCheckpointDataNV( uint32_t * pCheckpointDataCount,
VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename CheckpointDataNVAllocator = std::allocator<CheckpointDataNV>,
+ template <typename CheckpointDataNVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::CheckpointDataNV>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<CheckpointDataNV, CheckpointDataNVAllocator>
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator>
getCheckpointDataNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename CheckpointDataNVAllocator = std::allocator<CheckpointDataNV>,
+ template <typename CheckpointDataNVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::CheckpointDataNV>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = CheckpointDataNVAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, CheckpointDataNV>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<CheckpointDataNV, CheckpointDataNVAllocator>
- getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename B1 = CheckpointDataNVAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, CheckpointDataNV>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator>
+ getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_INTEL_performance_query ===
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result setPerformanceConfigurationINTEL(
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_synchronization2 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- submit2KHR( uint32_t submitCount,
- const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,
- VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result submit2KHR( uint32_t submitCount,
+ const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,
+ VULKAN_HPP_NAMESPACE::Fence fence,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- submit2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
- VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
+ VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getCheckpointData2NV( uint32_t * pCheckpointDataCount,
VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename CheckpointData2NVAllocator = std::allocator<CheckpointData2NV>,
+ template <typename CheckpointData2NVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::CheckpointData2NV>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<CheckpointData2NV, CheckpointData2NVAllocator>
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator>
getCheckpointData2NV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename CheckpointData2NVAllocator = std::allocator<CheckpointData2NV>,
+ template <typename CheckpointData2NVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::CheckpointData2NV>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = CheckpointData2NVAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, CheckpointData2NV>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<CheckpointData2NV, CheckpointData2NVAllocator>
- getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename B1 = CheckpointData2NVAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, CheckpointData2NV>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator>
+ getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const VULKAN_HPP_NOEXCEPT
+ operator VkQueue() const VULKAN_HPP_NOEXCEPT
{
return m_queue;
}
@@ -7372,16 +7317,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkQueue m_queue = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Queue ) == sizeof( VkQueue ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Queue>::value,
- "Queue is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eQueue>
- {
- using type = VULKAN_HPP_NAMESPACE::Queue;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eQueue>
@@ -7554,6 +7489,20 @@ namespace VULKAN_HPP_NAMESPACE
};
using UniqueIndirectCommandsLayoutNV = UniqueHandle<IndirectCommandsLayoutNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
template <typename Dispatch>
+ class UniqueHandleTraits<MicromapEXT, Dispatch>
+ {
+ public:
+ using deleter = ObjectDestroy<Device, Dispatch>;
+ };
+ using UniqueMicromapEXT = UniqueHandle<MicromapEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch>
+ class UniqueHandleTraits<OpticalFlowSessionNV, Dispatch>
+ {
+ public:
+ using deleter = ObjectDestroy<Device, Dispatch>;
+ };
+ using UniqueOpticalFlowSessionNV = UniqueHandle<OpticalFlowSessionNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ template <typename Dispatch>
class UniqueHandleTraits<Pipeline, Dispatch>
{
public:
@@ -7665,23 +7614,20 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkDevice;
using NativeType = VkDevice;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDevice;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDevice;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice;
public:
- VULKAN_HPP_CONSTEXPR Device() = default;
- VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device ) VULKAN_HPP_NOEXCEPT : m_device( device ) {}
+ VULKAN_HPP_CONSTEXPR Device() = default;
+ VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ Device( VkDevice device ) VULKAN_HPP_NOEXCEPT : m_device( device ) {}
-#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
Device & operator=( VkDevice device ) VULKAN_HPP_NOEXCEPT
{
m_device = device;
return *this;
}
-#endif
Device & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
@@ -7711,24 +7657,19 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_VERSION_1_0 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- PFN_vkVoidFunction
- getProcAddr( const char * pName,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ PFN_vkVoidFunction getProcAddr( const char * pName, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- PFN_vkVoidFunction
- getProcAddr( const std::string & name,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -7739,133 +7680,116 @@ namespace VULKAN_HPP_NAMESPACE
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue
- getQueue( uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type
- allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type
+ allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type
- allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type
+ allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void( free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void( free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::DeviceSize size,
- VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,
- void ** ppData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::DeviceSize size,
+ VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,
+ void ** ppData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void *>::type
- mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::DeviceSize size,
- VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<void *>::type mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::DeviceSize size,
+ VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- flushMappedMemoryRanges( uint32_t memoryRangeCount,
- const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result flushMappedMemoryRanges( uint32_t memoryRangeCount,
+ const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,
+ flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result invalidateMappedMemoryRanges(
- uint32_t memoryRangeCount,
- const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount,
+ const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- invalidateMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,
+ invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize
- getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
@@ -7877,11 +7801,10 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- bindImageMemory( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result bindImageMemory( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
@@ -7894,673 +7817,580 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements(
- VULKAN_HPP_NAMESPACE::Buffer buffer,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements
+ getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements(
- VULKAN_HPP_NAMESPACE::Image image,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements
+ getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageSparseMemoryRequirements(
- VULKAN_HPP_NAMESPACE::Image image,
- uint32_t * pSparseMemoryRequirementCount,
- VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
+ uint32_t * pSparseMemoryRequirementCount,
+ VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SparseImageMemoryRequirementsAllocator = std::allocator<SparseImageMemoryRequirements>,
+ template <typename SparseImageMemoryRequirementsAllocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
- getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename SparseImageMemoryRequirementsAllocator = std::allocator<SparseImageMemoryRequirements>,
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
+ getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename SparseImageMemoryRequirementsAllocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = SparseImageMemoryRequirementsAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
+ typename B1 = SparseImageMemoryRequirementsAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Fence * pFence,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Fence * pFence,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
- createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
+ createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
- createFenceUnique( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
+ createFenceUnique( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::Fence fence,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Fence fence,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Fence fence,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- resetFences( uint32_t fenceCount,
- const VULKAN_HPP_NAMESPACE::Fence * pFences,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result resetFences( uint32_t fenceCount,
+ const VULKAN_HPP_NAMESPACE::Fence * pFences,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type
- resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<void>::type resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- waitForFences( uint32_t fenceCount,
- const VULKAN_HPP_NAMESPACE::Fence * pFences,
- VULKAN_HPP_NAMESPACE::Bool32 waitAll,
- uint64_t timeout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result waitForFences( uint32_t fenceCount,
+ const VULKAN_HPP_NAMESPACE::Fence * pFences,
+ VULKAN_HPP_NAMESPACE::Bool32 waitAll,
+ uint64_t timeout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
- VULKAN_HPP_NAMESPACE::Bool32 waitAll,
- uint64_t timeout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
+ VULKAN_HPP_NAMESPACE::Bool32 waitAll,
+ uint64_t timeout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type
- createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type
+ createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type
- createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type
+ createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Event * pEvent,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Event * pEvent,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type
- createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type
+ createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type
- createEventUnique( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type
+ createEventUnique( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyEvent( VULKAN_HPP_NAMESPACE::Event event,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyEvent( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyEvent( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::Event event,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Event event,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Event event,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getEventStatus( VULKAN_HPP_NAMESPACE::Event event,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- setEvent( VULKAN_HPP_NAMESPACE::Event event,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result setEvent( VULKAN_HPP_NAMESPACE::Event event,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setEvent( VULKAN_HPP_NAMESPACE::Event event,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- resetEvent( VULKAN_HPP_NAMESPACE::Event event,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result resetEvent( VULKAN_HPP_NAMESPACE::Event event,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type
- resetEvent( VULKAN_HPP_NAMESPACE::Event event,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<void>::type resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type
- createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type
+ createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type
- createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type
+ createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- void * pData,
- VULKAN_HPP_NAMESPACE::DeviceSize stride,
- VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ size_t dataSize,
+ void * pData,
+ VULKAN_HPP_NAMESPACE::DeviceSize stride,
+ VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- ArrayProxy<T> const & data,
- VULKAN_HPP_NAMESPACE::DeviceSize stride,
- VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename DataType,
- typename Allocator = std::allocator<DataType>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<DataType, Allocator>>
+ template <typename DataType, typename DataTypeAllocator = std::allocator<DataType>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<DataType, DataTypeAllocator>>
getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t firstQuery,
uint32_t queryCount,
size_t dataSize,
VULKAN_HPP_NAMESPACE::DeviceSize stride,
VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<DataType>
- getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VULKAN_HPP_NAMESPACE::DeviceSize stride,
- VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD ResultValue<DataType> getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ VULKAN_HPP_NAMESPACE::DeviceSize stride,
+ VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Buffer * pBuffer,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Buffer * pBuffer,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type
- createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type
+ createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type
- createBufferUnique( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type
+ createBufferUnique( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::BufferView * pView,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::BufferView * pView,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type
- createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type
+ createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type
- createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type
+ createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Image * pImage,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Image * pImage,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type
- createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type
+ createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type
- createImageUnique( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type
+ createImageUnique( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyImage( VULKAN_HPP_NAMESPACE::Image image,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyImage( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyImage( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::Image image,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Image image,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Image image,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image,
const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource,
VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout getImageSubresourceLayout(
- VULKAN_HPP_NAMESPACE::Image image,
- const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout
+ getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image,
+ const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::ImageView * pView,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::ImageView * pView,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type
- createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type
+ createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type
- createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type
+ createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type
- createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type
+ createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type
- createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type
+ createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type
- createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type
+ createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type
- createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type
+ createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- size_t * pDataSize,
- void * pData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ size_t * pDataSize,
+ void * pData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Uint8_tAllocator = std::allocator<uint8_t>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ template <typename Uint8_tAllocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
- getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Uint8_tAllocator = std::allocator<uint8_t>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = Uint8_tAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type = 0>
+ getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Uint8_tAllocator = std::allocator<uint8_t>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = Uint8_tAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type = 0>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
Uint8_tAllocator & uint8_tAllocator,
@@ -8568,423 +8398,365 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
- uint32_t srcCacheCount,
- const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
+ uint32_t srcCacheCount,
+ const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ uint32_t createInfoCount,
+ const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PipelineAllocator = std::allocator<Pipeline>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ template <typename PipelineAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
- createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PipelineAllocator = std::allocator<Pipeline>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PipelineAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
+ createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PipelineAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Pipeline>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B0 = PipelineAllocator,
+ typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type = 0>
VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
- createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
- createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- createGraphicsPipelinesUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>,
- typename B = PipelineAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- createGraphicsPipelinesUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>>
- createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename PipelineAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
+ createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename PipelineAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>,
+ typename B0 = PipelineAllocator,
+ typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
+ createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>
+ createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ uint32_t createInfoCount,
+ const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PipelineAllocator = std::allocator<Pipeline>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ template <typename PipelineAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
- createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PipelineAllocator = std::allocator<Pipeline>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PipelineAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
+ createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PipelineAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Pipeline>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B0 = PipelineAllocator,
+ typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type = 0>
VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
- createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
- createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- createComputePipelinesUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>,
- typename B = PipelineAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- createComputePipelinesUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename PipelineAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
+ createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename PipelineAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>,
+ typename B0 = PipelineAllocator,
+ typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
+ createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>>
- createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>
+ createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type
- createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type
+ createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type
- createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type
+ createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Sampler * pSampler,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Sampler * pSampler,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type
- createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type
+ createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type
- createSamplerUnique( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type
+ createSamplerUnique( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createDescriptorSetLayout(
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type
- createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type
+ createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type
- createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type
+ createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorSetLayout(
- VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type
- createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type
+ createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type
- createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type
+ createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type
- resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ void resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo,
- VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo,
+ VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DescriptorSetAllocator = std::allocator<DescriptorSet>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type
+ template <typename DescriptorSetAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DescriptorSet>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type
allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename DescriptorSetAllocator = std::allocator<DescriptorSet>,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename DescriptorSetAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DescriptorSet>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = DescriptorSetAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, DescriptorSet>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type
+ typename B0 = DescriptorSetAllocator,
+ typename std::enable_if<std::is_same<typename B0::value_type, DescriptorSet>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type
allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,
DescriptorSetAllocator & descriptorSetAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename DescriptorSetAllocator = std::allocator<UniqueHandle<DescriptorSet, Dispatch>>>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
+ typename DescriptorSetAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>>>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename DescriptorSetAllocator = std::allocator<UniqueHandle<DescriptorSet, Dispatch>>,
- typename B = DescriptorSetAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename DescriptorSetAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>>,
+ typename B0 = DescriptorSetAllocator,
+ typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,
DescriptorSetAllocator & descriptorSetAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -8992,26 +8764,24 @@ namespace VULKAN_HPP_NAMESPACE
Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
uint32_t descriptorSetCount,
const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type
- freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ void freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- uint32_t descriptorSetCount,
- const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Result( free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ uint32_t descriptorSetCount,
+ const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type
- free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ void( free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -9019,223 +8789,194 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
uint32_t descriptorCopyCount,
const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies,
+ void updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type
- createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type
+ createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type
- createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type
+ createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
- createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
+ createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
- createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
+ createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
- VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
+ VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D
getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type
- createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type
+ createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type
- createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type
+ createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type
- resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<void>::type resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo,
- VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo,
+ VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename CommandBufferAllocator = std::allocator<CommandBuffer>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type
+ template <typename CommandBufferAllocator = std::allocator<VULKAN_HPP_NAMESPACE::CommandBuffer>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type
allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename CommandBufferAllocator = std::allocator<CommandBuffer>,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename CommandBufferAllocator = std::allocator<VULKAN_HPP_NAMESPACE::CommandBuffer>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = CommandBufferAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, CommandBuffer>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type
+ typename B0 = CommandBufferAllocator,
+ typename std::enable_if<std::is_same<typename B0::value_type, CommandBuffer>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type
allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,
CommandBufferAllocator & commandBufferAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename CommandBufferAllocator = std::allocator<UniqueHandle<CommandBuffer, Dispatch>>>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
+ typename CommandBufferAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>>>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename CommandBufferAllocator = std::allocator<UniqueHandle<CommandBuffer, Dispatch>>,
- typename B = CommandBufferAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename CommandBufferAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>>,
+ typename B0 = CommandBufferAllocator,
+ typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,
CommandBufferAllocator & commandBufferAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -9243,49 +8984,47 @@ namespace VULKAN_HPP_NAMESPACE
void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
uint32_t commandBufferCount,
const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
+ void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- uint32_t commandBufferCount,
- const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void( free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ uint32_t commandBufferCount,
+ const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void( free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_VERSION_1_1 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- bindBufferMemory2( uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result bindBufferMemory2( uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
+ bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- bindImageMemory2( uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result bindImageMemory2( uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,
+ bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -9294,160 +9033,140 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t localDeviceIndex,
uint32_t remoteDeviceIndex,
VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures(
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
+ getGroupPeerMemoryFeatures( uint32_t heapIndex,
+ uint32_t localDeviceIndex,
+ uint32_t remoteDeviceIndex,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2(
- const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageMemoryRequirements2(
- const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2(
- const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirements2(
- const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageSparseMemoryRequirements2(
- const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
- uint32_t * pSparseMemoryRequirementCount,
- VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
+ uint32_t * pSparseMemoryRequirementCount,
+ VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>,
+ template <typename SparseImageMemoryRequirements2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
- getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>,
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
+ getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename SparseImageMemoryRequirements2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = SparseImageMemoryRequirements2Allocator,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
- getImageSparseMemoryRequirements2(
- const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,
- SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename B1 = SparseImageMemoryRequirements2Allocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
+ getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,
+ SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo,
VULKAN_HPP_NAMESPACE::Queue * pQueue,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue
- getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversion(
- const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
- createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
+ createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
- createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
+ createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySamplerYcbcrConversion(
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplate(
- const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
- createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
+ createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
- createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
+ createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -9455,83 +9174,72 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorUpdateTemplate(
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
const void * pData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
DataType const & data,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport(
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
+ getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getDescriptorSetLayoutSupport(
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_VERSION_1_2 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
- createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
+ createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
- createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
+ createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -9539,143 +9247,123 @@ namespace VULKAN_HPP_NAMESPACE
void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t firstQuery,
uint32_t queryCount,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- uint64_t * pValue,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ uint64_t * pValue,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type
- getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<uint64_t>::type getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
- uint64_t timeout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
+ uint64_t timeout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo,
- uint64_t timeout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo,
+ uint64_t timeout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress
- getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress
- getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_VERSION_1_3 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type
- createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type
+ createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type
- createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type
+ createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPrivateDataSlot(
- VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
- uint64_t data,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
+ uint64_t data,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type
- setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
- uint64_t data,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<void>::type setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
+ uint64_t data,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -9683,590 +9371,509 @@ namespace VULKAN_HPP_NAMESPACE
uint64_t objectHandle,
VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
uint64_t * pData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD uint64_t
- getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD uint64_t getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,
VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements(
- const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirements(
- const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements(
- const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageMemoryRequirements(
- const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageSparseMemoryRequirements(
- const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
- uint32_t * pSparseMemoryRequirementCount,
- VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
+ uint32_t * pSparseMemoryRequirementCount,
+ VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>,
+ template <typename SparseImageMemoryRequirements2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
- getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>,
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
+ getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename SparseImageMemoryRequirements2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = SparseImageMemoryRequirements2Allocator,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
- getImageSparseMemoryRequirements(
- const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
- SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename B1 = SparseImageMemoryRequirements2Allocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
+ getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
+ SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_swapchain ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type
- createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type
+ createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type
- createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type
+ createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- uint32_t * pSwapchainImageCount,
- VULKAN_HPP_NAMESPACE::Image * pSwapchainImages,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ uint32_t * pSwapchainImageCount,
+ VULKAN_HPP_NAMESPACE::Image * pSwapchainImages,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename ImageAllocator = std::allocator<Image>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Image, ImageAllocator>>::type
- getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename ImageAllocator = std::allocator<Image>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = ImageAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, Image>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Image, ImageAllocator>>::type
- getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- ImageAllocator & imageAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename ImageAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Image>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type
+ getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename ImageAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Image>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = ImageAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, Image>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type getSwapchainImagesKHR(
+ VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- uint64_t timeout,
- VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- VULKAN_HPP_NAMESPACE::Fence fence,
- uint32_t * pImageIndex,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ uint64_t timeout,
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ VULKAN_HPP_NAMESPACE::Fence fence,
+ uint32_t * pImageIndex,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<uint32_t>
- acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- uint64_t timeout,
- VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD ResultValue<uint32_t> acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ uint64_t timeout,
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getGroupPresentCapabilitiesKHR(
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type
getGroupPresentCapabilitiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModesKHR(
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
- getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
+ getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo,
- uint32_t * pImageIndex,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo,
+ uint32_t * pImageIndex,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<uint32_t>
- acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD ResultValue<uint32_t> acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_display_swapchain ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createSharedSwapchainsKHR(
- uint32_t swapchainCount,
- const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createSharedSwapchainsKHR( uint32_t swapchainCount,
+ const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SwapchainKHRAllocator = std::allocator<SwapchainKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type
- createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename SwapchainKHRAllocator = std::allocator<SwapchainKHR>,
+ template <typename SwapchainKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::SwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type
+ createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename SwapchainKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::SwapchainKHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = SwapchainKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, SwapchainKHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type
- createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- SwapchainKHRAllocator & swapchainKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type
- createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename B0 = SwapchainKHRAllocator,
+ typename std::enable_if<std::is_same<typename B0::value_type, SwapchainKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type
+ createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ SwapchainKHRAllocator & swapchainKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type
+ createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename SwapchainKHRAllocator = std::allocator<UniqueHandle<SwapchainKHR, Dispatch>>>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type
- createSharedSwapchainsKHRUnique(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename SwapchainKHRAllocator = std::allocator<UniqueHandle<SwapchainKHR, Dispatch>>,
- typename B = SwapchainKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type
- createSharedSwapchainsKHRUnique(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- SwapchainKHRAllocator & swapchainKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type
- createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename SwapchainKHRAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type
+ createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename SwapchainKHRAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>,
+ typename B0 = SwapchainKHRAllocator,
+ typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type
+ createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ SwapchainKHRAllocator & swapchainKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type
+ createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_debug_marker ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT(
- const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT(
- const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_queue ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::type
- createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::type
+ createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>>::type
- createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>>::type
+ createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getVideoSessionMemoryRequirementsKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- uint32_t * pVideoSessionMemoryRequirementsCount,
- VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ uint32_t * pMemoryRequirementsCount,
+ VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename VideoGetMemoryPropertiesKHRAllocator = std::allocator<VideoGetMemoryPropertiesKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ template <typename VideoSessionMemoryRequirementsKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator>>::type
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type
getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename VideoGetMemoryPropertiesKHRAllocator = std::allocator<VideoGetMemoryPropertiesKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = VideoGetMemoryPropertiesKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, VideoGetMemoryPropertiesKHR>::value, int>::type = 0>
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename VideoSessionMemoryRequirementsKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = VideoSessionMemoryRequirementsKHRAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, VideoSessionMemoryRequirementsKHR>::value, int>::type = 0>
VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator>>::type
- getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- VideoGetMemoryPropertiesKHRAllocator & videoGetMemoryPropertiesKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type
+ getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result bindVideoSessionMemoryKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- uint32_t videoSessionBindMemoryCount,
- const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR * pVideoSessionBindMemories,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ uint32_t bindSessionMemoryInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindVideoSessionMemoryKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR> const & videoSessionBindMemories,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createVideoSessionParametersKHR(
- const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::type
- createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::type
+ createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>>::type
- createVideoSessionParametersKHRUnique(
- const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>>::type
+ createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result updateVideoSessionParametersKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
- const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
+ const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
+ updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_NVX_binary_import ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::CuModuleNVX>::type
- createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::CuModuleNVX>::type
+ createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>>::type
- createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>>::type
+ createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::type
- createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::type
+ createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>>::type
- createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>>::type
+ createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ void destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_NVX_image_view_handle ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint32_t
- getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint32_t
- getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView,
- VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type
- getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type
+ getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_AMD_shader_info ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
- VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
- size_t * pInfoSize,
- void * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
+ VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
+ size_t * pInfoSize,
+ void * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Uint8_tAllocator = std::allocator<uint8_t>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ template <typename Uint8_tAllocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Uint8_tAllocator = std::allocator<uint8_t>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = Uint8_tAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type = 0>
+ template <typename Uint8_tAllocator = std::allocator<uint8_t>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = Uint8_tAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type = 0>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
@@ -10279,17 +9886,15 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_external_memory_win32 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
- HANDLE * pHandle,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
+ HANDLE * pHandle,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type
- getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<HANDLE>::type getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
@@ -10300,15 +9905,14 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t localDeviceIndex,
uint32_t remoteDeviceIndex,
VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR(
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
+ getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex,
+ uint32_t localDeviceIndex,
+ uint32_t remoteDeviceIndex,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_maintenance1 ===
@@ -10316,292 +9920,249 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_memory_win32 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,
- HANDLE * pHandle,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,
+ HANDLE * pHandle,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<HANDLE>::type
getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getMemoryWin32HandlePropertiesKHR(
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ HANDLE handle,
+ VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type
- getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type getMemoryWin32HandlePropertiesKHR(
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_memory_fd ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo,
- int * pFd,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo,
+ int * pFd,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<int>::type
- getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<int>::type getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- int fd,
- VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ int fd,
+ VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type
- getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- int fd,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type getMemoryFdPropertiesKHR(
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_semaphore_win32 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result importSemaphoreWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importSemaphoreWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getSemaphoreWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo,
- HANDLE * pHandle,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo,
+ HANDLE * pHandle,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<HANDLE>::type
getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_semaphore_fd ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo,
- int * pFd,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo,
+ int * pFd,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<int>::type
- getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<int>::type getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_descriptor_update_template ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplateKHR(
- const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
- createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
+ createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
- createDescriptorUpdateTemplateKHRUnique(
- const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
+ createDescriptorUpdateTemplateKHRUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorUpdateTemplateKHR(
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
const void * pData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
DataType const & data,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_display_control ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type
- displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<void>::type displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Fence * pFence,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Fence * pFence,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
- registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
+ registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
- registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
+ registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Fence * pFence,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Fence * pFence,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
- registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
+ registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
- registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
+ registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
- uint64_t * pCounterValue,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
+ uint64_t * pCounterValue,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type
- getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<uint64_t>::type getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_GOOGLE_display_timing ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getRefreshCycleDurationGOOGLE(
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type
- getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type
+ getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getPastPresentationTimingGOOGLE(
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- uint32_t * pPresentationTimingCount,
- VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ uint32_t * pPresentationTimingCount,
+ VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PastPresentationTimingGOOGLEAllocator = std::allocator<PastPresentationTimingGOOGLE>,
+ template <typename PastPresentationTimingGOOGLEAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
- getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename PastPresentationTimingGOOGLEAllocator = std::allocator<PastPresentationTimingGOOGLE>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PastPresentationTimingGOOGLEAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PastPresentationTimingGOOGLE>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
+ getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PastPresentationTimingGOOGLEAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = PastPresentationTimingGOOGLEAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, PastPresentationTimingGOOGLE>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_hdr_metadata ===
@@ -10610,38 +10171,33 @@ namespace VULKAN_HPP_NAMESPACE
void setHdrMetadataEXT( uint32_t swapchainCount,
const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,
const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ void setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_create_renderpass2 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
- createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
+ createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
- createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
+ createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -10649,139 +10205,124 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR(
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_fence_win32 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result importFenceWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,
- HANDLE * pHandle,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,
+ HANDLE * pHandle,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<HANDLE>::type
getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_fence_fd ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo,
- int * pFd,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo,
+ int * pFd,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<int>::type
- getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<int>::type getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_performance_query ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- releaseProfilingLockKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void releaseProfilingLockKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_debug_utils ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT(
- const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT(
- const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
//=== VK_ANDROID_external_memory_android_hardware_buffer ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getAndroidHardwareBufferPropertiesANDROID(
- const struct AHardwareBuffer * buffer,
- VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer,
+ VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type
- getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type
+ getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type
- getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<StructureChain<X, Y, Z...>>::type
+ getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getMemoryAndroidHardwareBufferANDROID(
- const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo,
- struct AHardwareBuffer ** pBuffer,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo,
+ struct AHardwareBuffer ** pBuffer,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<struct AHardwareBuffer *>::type
- getMemoryAndroidHardwareBufferANDROID(
- const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<struct AHardwareBuffer *>::type
+ getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
@@ -10790,82 +10331,72 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR(
- const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageMemoryRequirements2KHR(
- const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR(
- const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirements2KHR(
- const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageSparseMemoryRequirements2KHR(
- const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
- uint32_t * pSparseMemoryRequirementCount,
- VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
+ uint32_t * pSparseMemoryRequirementCount,
+ VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>,
+ template <typename SparseImageMemoryRequirements2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
- getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>,
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
+ getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename SparseImageMemoryRequirements2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = SparseImageMemoryRequirements2Allocator,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
- getImageSparseMemoryRequirements2KHR(
- const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,
- SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename B1 = SparseImageMemoryRequirements2Allocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
+ getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,
+ SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_acceleration_structure ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createAccelerationStructureKHR(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type
- createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type
+ createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type
- createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type
+ createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -10873,184 +10404,154 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyAccelerationStructureKHR(
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result buildAccelerationStructuresKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- uint32_t infoCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ uint32_t infoCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result buildAccelerationStructuresKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result buildAccelerationStructuresKHR(
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
+ copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
+ copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result writeAccelerationStructuresPropertiesKHR(
- uint32_t accelerationStructureCount,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- size_t dataSize,
- void * pData,
- size_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ void * pData,
+ size_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- writeAccelerationStructuresPropertiesKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- ArrayProxy<T> const & data,
- size_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename DataType,
- typename Allocator = std::allocator<DataType>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<DataType, Allocator>>::type
- writeAccelerationStructuresPropertiesKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- size_t dataSize,
- size_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename DataType, typename DataTypeAllocator = std::allocator<DataType>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type writeAccelerationStructuresPropertiesKHR(
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ size_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<DataType>::type
- writeAccelerationStructuresPropertyKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- size_t stride,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<DataType>::type writeAccelerationStructuresPropertyKHR(
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress getAccelerationStructureAddressKHR(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ DeviceAddress getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress getAccelerationStructureAddressKHR(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::DeviceAddress
+ getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getAccelerationStructureCompatibilityKHR(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo,
- VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
- getAccelerationStructureCompatibilityKHR(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getAccelerationStructureBuildSizesKHR(
- VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo,
- const uint32_t * pMaxPrimitiveCounts,
- VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo,
+ const uint32_t * pMaxPrimitiveCounts,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR
- getAccelerationStructureBuildSizesKHR(
- VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo,
- ArrayProxy<const uint32_t> const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR(
+ VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_sampler_ycbcr_conversion ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversionKHR(
- const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
- createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
+ createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
- createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
+ createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -11058,362 +10559,297 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySamplerYcbcrConversionKHR(
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_bind_memory2 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- bindBufferMemory2KHR( uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result bindBufferMemory2KHR( uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
+ bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- bindImageMemory2KHR( uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result bindImageMemory2KHR( uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,
+ bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_image_drm_format_modifier ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getImageDrmFormatModifierPropertiesEXT(
- VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type
- getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type
+ getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_validation_cache ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type
- createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type
+ createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type
- createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type
+ createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyValidationCacheEXT(
- VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
- uint32_t srcCacheCount,
- const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
+ uint32_t srcCacheCount,
+ const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches,
+ mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getValidationCacheDataEXT(
- VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
- size_t * pDataSize,
- void * pData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
+ size_t * pDataSize,
+ void * pData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Uint8_tAllocator = std::allocator<uint8_t>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ template <typename Uint8_tAllocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
- getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Uint8_tAllocator = std::allocator<uint8_t>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = Uint8_tAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type = 0>
+ getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Uint8_tAllocator = std::allocator<uint8_t>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = Uint8_tAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type = 0>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
Uint8_tAllocator & uint8_tAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_NV_ray_tracing ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createAccelerationStructureNV(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type
- createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type
+ createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type
- createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type
+ createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyAccelerationStructureNV(
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getAccelerationStructureMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR
+ getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryNV(
- uint32_t bindInfoCount,
- const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount,
+ const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindAccelerationStructureMemoryNV(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ bindAccelerationStructureMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createRayTracingPipelinesNV(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ uint32_t createInfoCount,
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PipelineAllocator = std::allocator<Pipeline>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ template <typename PipelineAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
- createRayTracingPipelinesNV(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PipelineAllocator = std::allocator<Pipeline>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PipelineAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
+ createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PipelineAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Pipeline>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B0 = PipelineAllocator,
+ typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type = 0>
VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
- createRayTracingPipelinesNV(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
- createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- createRayTracingPipelinesNVUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>,
- typename B = PipelineAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- createRayTracingPipelinesNVUnique(
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>>
- createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename PipelineAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
+ createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename PipelineAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>,
+ typename B0 = PipelineAllocator,
+ typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
+ createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>
+ createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesNV(
- VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void * pData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ size_t dataSize,
+ void * pData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- ArrayProxy<T> const & data,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename DataType,
- typename Allocator = std::allocator<DataType>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<DataType, Allocator>>::type
+ template <typename DataType, typename DataTypeAllocator = std::allocator<DataType>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type
getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
uint32_t firstGroup,
uint32_t groupCount,
size_t dataSize,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<DataType>::type
- getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<DataType>::type getRayTracingShaderGroupHandleNV(
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV(
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- size_t dataSize,
- void * pData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
+ size_t dataSize,
+ void * pData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- ArrayProxy<T> const & data,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename DataType,
- typename Allocator = std::allocator<DataType>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<DataType, Allocator>>::type
- getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- size_t dataSize,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename DataType, typename DataTypeAllocator = std::allocator<DataType>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type getAccelerationStructureHandleNV(
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<DataType>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<DataType>::type
getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t shader,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t shader,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t shader,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_maintenance3 ===
@@ -11421,179 +10857,154 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR(
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
+ getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getDescriptorSetLayoutSupportKHR(
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_external_memory_host ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getMemoryHostPointerPropertiesEXT(
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- const void * pHostPointer,
- VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ const void * pHostPointer,
+ VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type
getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
const void * pHostPointer,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_calibrated_timestamps ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getCalibratedTimestampsEXT(
- uint32_t timestampCount,
- const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos,
- uint64_t * pTimestamps,
- uint64_t * pMaxDeviation,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getCalibratedTimestampsEXT( uint32_t timestampCount,
+ const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos,
+ uint64_t * pTimestamps,
+ uint64_t * pMaxDeviation,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Uint64_tAllocator = std::allocator<uint64_t>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
- getCalibratedTimestampsEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Uint64_tAllocator = std::allocator<uint64_t>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = Uint64_tAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, uint64_t>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
- getCalibratedTimestampsEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
- Uint64_tAllocator & uint64_tAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::pair<uint64_t, uint64_t>>::type
+ template <typename Uint64_tAllocator = std::allocator<uint64_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
+ getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Uint64_tAllocator = std::allocator<uint64_t>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B0 = Uint64_tAllocator,
+ typename std::enable_if<std::is_same<typename B0::value_type, uint64_t>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
+ getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
+ Uint64_tAllocator & uint64_tAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::pair<uint64_t, uint64_t>>::type
getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_timeline_semaphore ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getSemaphoreCounterValueKHR(
- VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- uint64_t * pValue,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
+ uint64_t * pValue,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type
- getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<uint64_t>::type
+ getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
- uint64_t timeout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
+ uint64_t timeout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo,
- uint64_t timeout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo,
+ uint64_t timeout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_INTEL_performance_query ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL(
- const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void uninitializePerformanceApiINTEL( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void uninitializePerformanceApiINTEL( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result acquirePerformanceConfigurationINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,
+ VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type
- acquirePerformanceConfigurationINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type
+ acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type
- acquirePerformanceConfigurationINTELUnique(
- const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type
+ acquirePerformanceConfigurationINTELUnique( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result releasePerformanceConfigurationINTEL(
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type releasePerformanceConfigurationINTEL(
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getPerformanceParameterINTEL(
- VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,
- VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,
+ VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type
getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_AMD_display_native_hdr ===
@@ -11601,36 +11012,33 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain,
VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_buffer_device_address ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress
- getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress
- getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_present_wait ===
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- uint64_t presentId,
- uint64_t timeout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#else
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
uint64_t presentId,
uint64_t timeout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ uint64_t presentId,
+ uint64_t timeout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
@@ -11638,75 +11046,63 @@ namespace VULKAN_HPP_NAMESPACE
# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result acquireFullScreenExclusiveModeEXT(
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result releaseFullScreenExclusiveModeEXT(
- VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModes2EXT(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_buffer_device_address ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress
- getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress
- getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_host_query_reset ===
@@ -11715,165 +11111,141 @@ namespace VULKAN_HPP_NAMESPACE
void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t firstQuery,
uint32_t queryCount,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_deferred_host_operations ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createDeferredOperationKHR(
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type
- createDeferredOperationKHR( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type
+ createDeferredOperationKHR( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type
- createDeferredOperationKHRUnique( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type
+ createDeferredOperationKHRUnique( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDeferredOperationKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
+ getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_pipeline_executable_properties ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getPipelineExecutablePropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo,
- uint32_t * pExecutableCount,
- VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo,
+ uint32_t * pExecutableCount,
+ VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PipelineExecutablePropertiesKHRAllocator = std::allocator<PipelineExecutablePropertiesKHR>,
+ template <typename PipelineExecutablePropertiesKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<
- std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PipelineExecutablePropertiesKHRAllocator = std::allocator<PipelineExecutablePropertiesKHR>,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PipelineExecutablePropertiesKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PipelineExecutablePropertiesKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutablePropertiesKHR>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<
- std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
- getPipelineExecutablePropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo,
- PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename B1 = PipelineExecutablePropertiesKHRAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, PipelineExecutablePropertiesKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
+ getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo,
+ PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getPipelineExecutableStatisticsKHR(
- const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
- uint32_t * pStatisticCount,
- VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
+ uint32_t * pStatisticCount,
+ VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PipelineExecutableStatisticKHRAllocator = std::allocator<PipelineExecutableStatisticKHR>,
+ template <typename PipelineExecutableStatisticKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<
- std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PipelineExecutableStatisticKHRAllocator = std::allocator<PipelineExecutableStatisticKHR>,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PipelineExecutableStatisticKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PipelineExecutableStatisticKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableStatisticKHR>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<
- std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
- getPipelineExecutableStatisticsKHR(
- const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,
- PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename B1 = PipelineExecutableStatisticKHRAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, PipelineExecutableStatisticKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
+ getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,
+ PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getPipelineExecutableInternalRepresentationsKHR(
- const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
- uint32_t * pInternalRepresentationCount,
- VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
+ uint32_t * pInternalRepresentationCount,
+ VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PipelineExecutableInternalRepresentationKHRAllocator =
- std::allocator<PipelineExecutableInternalRepresentationKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,
- PipelineExecutableInternalRepresentationKHRAllocator>>::type
- getPipelineExecutableInternalRepresentationsKHR(
- const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename PipelineExecutableInternalRepresentationKHRAllocator =
- std::allocator<PipelineExecutableInternalRepresentationKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PipelineExecutableInternalRepresentationKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableInternalRepresentationKHR>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,
- PipelineExecutableInternalRepresentationKHRAllocator>>::type
+ template <typename PipelineExecutableInternalRepresentationKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<
+ std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type
+ getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PipelineExecutableInternalRepresentationKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = PipelineExecutableInternalRepresentationKHRAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, PipelineExecutableInternalRepresentationKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<
+ std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type
getPipelineExecutableInternalRepresentationsKHR(
const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,
PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator,
@@ -11883,42 +11255,36 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_device_generated_commands ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getGeneratedCommandsMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo,
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getGeneratedCommandsMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutNV(
- const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type
- createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type
+ createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type
- createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type
+ createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -11926,84 +11292,72 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyIndirectCommandsLayoutNV(
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_private_data ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type
- createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type
+ createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type
- createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type
+ createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPrivateDataSlotEXT(
- VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
- uint64_t data,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
+ uint64_t data,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type
- setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
- uint64_t data,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename ResultValueType<void>::type setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
+ uint64_t data,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -12011,196 +11365,202 @@ namespace VULKAN_HPP_NAMESPACE
uint64_t objectHandle,
VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
uint64_t * pData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD uint64_t
- getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD uint64_t getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ //=== VK_EXT_metal_objects ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT
+ exportMetalObjectsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ exportMetalObjectsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+ //=== VK_EXT_image_compression_control ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
+ const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource,
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT
+ getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
+ const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
+ const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_device_fault ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts,
+ VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>>
+ getFaultInfoEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_ray_tracing_pipeline ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createRayTracingPipelinesKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ uint32_t createInfoCount,
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PipelineAllocator = std::allocator<Pipeline>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ template <typename PipelineAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
- createRayTracingPipelinesKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PipelineAllocator = std::allocator<Pipeline>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PipelineAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
+ createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PipelineAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Pipeline>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B0 = PipelineAllocator,
+ typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type = 0>
VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
- createRayTracingPipelinesKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
- createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- createRayTracingPipelinesKHRUnique(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>,
- typename B = PipelineAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
- createRayTracingPipelinesKHRUnique(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
- PipelineAllocator & pipelineAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>>
- createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename PipelineAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
+ createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename PipelineAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>,
+ typename B0 = PipelineAllocator,
+ typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
+ createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
+ PipelineAllocator & pipelineAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>
+ createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR(
- VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void * pData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t firstGroup,
+ uint32_t groupCount,
+ size_t dataSize,
+ void * pData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- ArrayProxy<T> const & data,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename DataType,
- typename Allocator = std::allocator<DataType>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<DataType, Allocator>>::type
+ template <typename DataType, typename DataTypeAllocator = std::allocator<DataType>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type
getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
uint32_t firstGroup,
uint32_t groupCount,
size_t dataSize,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<DataType>::type
- getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<DataType>::type getRayTracingShaderGroupHandleKHR(
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getRayTracingCaptureReplayShaderGroupHandlesKHR(
- VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void * pData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ VULKAN_HPP_NODISCARD Result
getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
uint32_t firstGroup,
uint32_t groupCount,
- ArrayProxy<T> const & data,
- Dispatch const & d
- VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename DataType,
- typename Allocator = std::allocator<DataType>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<DataType, Allocator>>::type
+ size_t dataSize,
+ void * pData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename DataType, typename DataTypeAllocator = std::allocator<DataType>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type
getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
uint32_t firstGroup,
uint32_t groupCount,
size_t dataSize,
- Dispatch const & d
- VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<DataType>::type
- getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- Dispatch const & d
- VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<DataType>::type getRayTracingCaptureReplayShaderGroupHandleKHR(
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
DeviceSize getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
uint32_t group,
VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_memory ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getMemoryZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
- zx_handle_t * pZirconHandle,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
+ zx_handle_t * pZirconHandle,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<zx_handle_t>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<zx_handle_t>::type
getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getMemoryZirconHandlePropertiesFUCHSIA(
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- zx_handle_t zirconHandle,
- VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
+ zx_handle_t zirconHandle,
+ VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::type
getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
zx_handle_t zirconHandle,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_FUCHSIA*/
@@ -12208,26 +11568,25 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_FUCHSIA_external_semaphore ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result importSemaphoreZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importSemaphoreZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getSemaphoreZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
- zx_handle_t * pZirconHandle,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
+ zx_handle_t * pZirconHandle,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<zx_handle_t>::type getSemaphoreZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<zx_handle_t>::type
+ getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_FUCHSIA*/
@@ -12235,124 +11594,259 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_FUCHSIA_buffer_collection ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createBufferCollectionFUCHSIA(
- const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA>::type
- createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA>::type
+ createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>>::type
- createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>>::type
+ createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result setBufferCollectionImageConstraintsFUCHSIA(
- VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
- const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
+ const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- setBufferCollectionImageConstraintsFUCHSIA(
- VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
- const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
+ const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result setBufferCollectionBufferConstraintsFUCHSIA(
- VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
- const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
+ const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- setBufferCollectionBufferConstraintsFUCHSIA(
- VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
- const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
+ const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getBufferCollectionPropertiesFUCHSIA(
- VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
- VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
+ VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::type
getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_FUCHSIA*/
//=== VK_HUAWEI_subpass_shading ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getSubpassShadingMaxWorkgroupSizeHUAWEI(
- VULKAN_HPP_NAMESPACE::RenderPass renderpass,
- VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass,
+ VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD ResultValue<VULKAN_HPP_NAMESPACE::Extent2D>
- getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_NV_external_memory_rdma ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo,
- VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo,
+ VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::RemoteAddressNV>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::RemoteAddressNV>::type
getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_pipeline_properties ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo,
+ VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::BaseOutStructure>::type
+ getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_opacity_micromap ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::MicromapEXT>::type
+ createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch>>::type
+ createMicromapEXTUnique( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ uint32_t infoCount,
+ const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
+ buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result writeMicromapsPropertiesEXT( uint32_t micromapCount,
+ const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ void * pData,
+ size_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename DataType, typename DataTypeAllocator = std::allocator<DataType>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type
+ writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ size_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<DataType>::type
+ writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t stride,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
+ getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo,
+ VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT
+ getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_pageable_device_local_memory ===
@@ -12360,67 +11854,203 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
float priority,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_maintenance4 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirementsKHR(
- const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirementsKHR(
- const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirementsKHR(
- const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageMemoryRequirementsKHR(
- const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageSparseMemoryRequirementsKHR(
- const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
- uint32_t * pSparseMemoryRequirementCount,
- VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
+ uint32_t * pSparseMemoryRequirementCount,
+ VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>,
+ template <typename SparseImageMemoryRequirements2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
- getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>,
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
+ getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename SparseImageMemoryRequirements2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = SparseImageMemoryRequirements2Allocator,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
- getImageSparseMemoryRequirementsKHR(
- const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
- SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename B1 = SparseImageMemoryRequirements2Allocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
+ getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
+ SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_VALVE_descriptor_set_host_mapping ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference,
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE
+ getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
+ void ** ppData,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD void * getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_EXT_shader_module_identifier ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
+ VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT
+ getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,
+ VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT
+ getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_NV_optical_flow ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>::type
+ createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch>>::type
+ createOpticalFlowSessionNVUnique( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,
+ VULKAN_HPP_NAMESPACE::ImageView view,
+ VULKAN_HPP_NAMESPACE::ImageLayout layout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#else
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
+ bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,
+ VULKAN_HPP_NAMESPACE::ImageView view,
+ VULKAN_HPP_NAMESPACE::ImageLayout layout,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ //=== VK_QCOM_tile_properties ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
+ uint32_t * pPropertiesCount,
+ VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename TilePropertiesQCOMAllocator = std::allocator<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type
+ getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename TilePropertiesQCOMAllocator = std::allocator<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = TilePropertiesQCOMAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, TilePropertiesQCOM>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type
+ getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
+ TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const VULKAN_HPP_NOEXCEPT
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,
+ VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::TilePropertiesQCOM
+ getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ operator VkDevice() const VULKAN_HPP_NOEXCEPT
{
return m_device;
}
@@ -12438,16 +12068,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkDevice m_device = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Device ) == sizeof( VkDevice ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Device>::value,
- "Device is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDevice>
- {
- using type = VULKAN_HPP_NAMESPACE::Device;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDevice>
@@ -12456,8 +12076,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice>
{
using Type = VULKAN_HPP_NAMESPACE::Device;
};
@@ -12474,17 +12093,14 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkDisplayModeKHR;
using NativeType = VkDisplayModeKHR;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR;
public:
VULKAN_HPP_CONSTEXPR DisplayModeKHR() = default;
VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT
- : m_displayModeKHR( displayModeKHR )
- {}
+ VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT : m_displayModeKHR( displayModeKHR ) {}
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
DisplayModeKHR & operator=( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT
@@ -12537,17 +12153,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkDisplayModeKHR m_displayModeKHR = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeKHR ) == sizeof( VkDisplayModeKHR ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::value,
- "DisplayModeKHR is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDisplayModeKHR>
- {
- using type = VULKAN_HPP_NAMESPACE::DisplayModeKHR;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR>
@@ -12556,8 +12161,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR>
{
using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR;
};
@@ -12584,25 +12188,20 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkPhysicalDevice;
using NativeType = VkPhysicalDevice;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice;
public:
- VULKAN_HPP_CONSTEXPR PhysicalDevice() = default;
- VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT
- : m_physicalDevice( physicalDevice )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDevice() = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ PhysicalDevice( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT : m_physicalDevice( physicalDevice ) {}
-#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
PhysicalDevice & operator=( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT
{
m_physicalDevice = physicalDevice;
return *this;
}
-#endif
PhysicalDevice & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
@@ -12633,146 +12232,133 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures
- getFeatures( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getFeatures( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties
- getFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
- VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type
getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
VULKAN_HPP_NAMESPACE::ImageType type,
VULKAN_HPP_NAMESPACE::ImageTiling tiling,
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties
- getProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount,
- VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount,
+ VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename QueueFamilyPropertiesAllocator = std::allocator<QueueFamilyProperties>,
+ template <typename QueueFamilyPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::QueueFamilyProperties>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator>
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator>
getQueueFamilyProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename QueueFamilyPropertiesAllocator = std::allocator<QueueFamilyProperties>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = QueueFamilyPropertiesAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator>
+ template <typename QueueFamilyPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::QueueFamilyProperties>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = QueueFamilyPropertiesAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator>
getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties
- getMemoryProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getMemoryProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::Device * pDevice,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::Device * pDevice,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type
- createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type
+ createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type
- createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type
+ createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result enumerateDeviceExtensionProperties(
- const char * pLayerName,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result enumerateDeviceExtensionProperties( const char * pLayerName,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename ExtensionPropertiesAllocator = std::allocator<ExtensionProperties>,
+ template <typename ExtensionPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::ExtensionProperties>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type
- enumerateDeviceExtensionProperties( Optional<const std::string> layerName
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename ExtensionPropertiesAllocator = std::allocator<ExtensionProperties>,
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type
+ enumerateDeviceExtensionProperties( Optional<const std::string> layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename ExtensionPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::ExtensionProperties>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = ExtensionPropertiesAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type
+ typename B1 = ExtensionPropertiesAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, ExtensionProperties>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type
enumerateDeviceExtensionProperties( Optional<const std::string> layerName,
ExtensionPropertiesAllocator & extensionPropertiesAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result enumerateDeviceLayerProperties(
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result enumerateDeviceLayerProperties( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename LayerPropertiesAllocator = std::allocator<LayerProperties>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type
+ template <typename LayerPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type
enumerateDeviceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename LayerPropertiesAllocator = std::allocator<LayerProperties>,
+ template <typename LayerPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::LayerProperties>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = LayerPropertiesAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type
- enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename B1 = LayerPropertiesAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, LayerProperties>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type
+ enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -12783,106 +12369,97 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ImageTiling tiling,
uint32_t * pPropertyCount,
VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SparseImageFormatPropertiesAllocator = std::allocator<SparseImageFormatProperties>,
+ template <typename SparseImageFormatPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
VULKAN_HPP_NAMESPACE::ImageType type,
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename SparseImageFormatPropertiesAllocator = std::allocator<SparseImageFormatProperties>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = SparseImageFormatPropertiesAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename SparseImageFormatPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = SparseImageFormatPropertiesAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
VULKAN_HPP_NAMESPACE::ImageType type,
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
VULKAN_HPP_NAMESPACE::ImageTiling tiling,
SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_VERSION_1_1 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
- getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
- getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2
- getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getImageFormatProperties2(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
- VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<StructureChain<X, Y, Z...>>::type
getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount,
VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename QueueFamilyProperties2Allocator = std::allocator<QueueFamilyProperties2>,
+ template <typename QueueFamilyProperties2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename QueueFamilyProperties2Allocator = std::allocator<QueueFamilyProperties2>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = QueueFamilyProperties2Allocator,
- typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
+ template <typename QueueFamilyProperties2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = QueueFamilyProperties2Allocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties2>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename StructureChain,
@@ -12891,106 +12468,96 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD std::vector<StructureChain, StructureChainAllocator>
getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename StructureChain,
- typename StructureChainAllocator = std::allocator<StructureChain>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = StructureChainAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type = 0>
+ typename StructureChainAllocator = std::allocator<StructureChain>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = StructureChainAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type = 0>
VULKAN_HPP_NODISCARD std::vector<StructureChain, StructureChainAllocator>
- getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
- getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getSparseImageFormatProperties2(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SparseImageFormatProperties2Allocator = std::allocator<SparseImageFormatProperties2>,
+ template <typename SparseImageFormatProperties2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
- getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename SparseImageFormatProperties2Allocator = std::allocator<SparseImageFormatProperties2>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = SparseImageFormatProperties2Allocator,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
- getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
- SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
+ getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename SparseImageFormatProperties2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = SparseImageFormatProperties2Allocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties2>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
+ getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
+ SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getExternalBufferProperties(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
- VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
+ VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties
+ getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
- VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties
+ getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getExternalSemaphoreProperties(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
+ getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_VERSION_1_3 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getToolProperties( uint32_t * pToolCount,
- VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getToolProperties( uint32_t * pToolCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PhysicalDeviceToolPropertiesAllocator = std::allocator<PhysicalDeviceToolProperties>,
+ template <typename PhysicalDeviceToolPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
getToolProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename PhysicalDeviceToolPropertiesAllocator = std::allocator<PhysicalDeviceToolProperties>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PhysicalDeviceToolPropertiesAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceToolProperties>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
+ template <typename PhysicalDeviceToolPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = PhysicalDeviceToolPropertiesAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceToolProperties>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -12998,70 +12565,62 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getSurfaceSupportKHR( uint32_t queueFamilyIndex,
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- VULKAN_HPP_NAMESPACE::Bool32 * pSupported,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getSurfaceSupportKHR( uint32_t queueFamilyIndex,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ VULKAN_HPP_NAMESPACE::Bool32 * pSupported,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type
- getSurfaceSupportKHR( uint32_t queueFamilyIndex,
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type getSurfaceSupportKHR(
+ uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getSurfaceCapabilitiesKHR(
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type
- getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type
+ getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- uint32_t * pSurfaceFormatCount,
- VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ uint32_t * pSurfaceFormatCount,
+ VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SurfaceFormatKHRAllocator = std::allocator<SurfaceFormatKHR>,
+ template <typename SurfaceFormatKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename SurfaceFormatKHRAllocator = std::allocator<SurfaceFormatKHR>,
+ template <typename SurfaceFormatKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = SurfaceFormatKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormatKHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
+ typename B1 = SurfaceFormatKHRAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, SurfaceFormatKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getSurfacePresentModesKHR(
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- uint32_t * pPresentModeCount,
- VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ uint32_t * pPresentModeCount,
+ VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PresentModeKHRAllocator = std::allocator<PresentModeKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
+ template <typename PresentModeKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type
getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PresentModeKHRAllocator = std::allocator<PresentModeKHR>,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PresentModeKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PresentModeKHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PresentModeKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
+ typename B1 = PresentModeKHRAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, PresentModeKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type
getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
PresentModeKHRAllocator & presentModeKHRAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
@@ -13070,158 +12629,129 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_swapchain ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- uint32_t * pRectCount,
- VULKAN_HPP_NAMESPACE::Rect2D * pRects,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ uint32_t * pRectCount,
+ VULKAN_HPP_NAMESPACE::Rect2D * pRects,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename Rect2DAllocator = std::allocator<Rect2D>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type
- getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename Rect2DAllocator = std::allocator<Rect2D>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = Rect2DAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, Rect2D>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type
- getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Rect2DAllocator & rect2DAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Rect2DAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Rect2D>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type
+ getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename Rect2DAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Rect2D>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = Rect2DAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, Rect2D>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type getPresentRectanglesKHR(
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_display ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getDisplayPropertiesKHR( uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getDisplayPropertiesKHR( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayPropertiesKHRAllocator = std::allocator<DisplayPropertiesKHR>,
+ template <typename DisplayPropertiesKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
getDisplayPropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename DisplayPropertiesKHRAllocator = std::allocator<DisplayPropertiesKHR>,
+ template <typename DisplayPropertiesKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = DisplayPropertiesKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayPropertiesKHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
+ typename B1 = DisplayPropertiesKHRAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, DisplayPropertiesKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getDisplayPlanePropertiesKHR(
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayPlanePropertiesKHRAllocator = std::allocator<DisplayPlanePropertiesKHR>,
+ template <typename DisplayPlanePropertiesKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
getDisplayPlanePropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename DisplayPlanePropertiesKHRAllocator = std::allocator<DisplayPlanePropertiesKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = DisplayPlanePropertiesKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayPlanePropertiesKHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
+ template <typename DisplayPlanePropertiesKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = DisplayPlanePropertiesKHRAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, DisplayPlanePropertiesKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getDisplayPlaneSupportedDisplaysKHR(
- uint32_t planeIndex,
- uint32_t * pDisplayCount,
- VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex,
+ uint32_t * pDisplayCount,
+ VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayKHRAllocator = std::allocator<DisplayKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type
- getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename DisplayKHRAllocator = std::allocator<DisplayKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = DisplayKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayKHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type
- getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex,
- DisplayKHRAllocator & displayKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename DisplayKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type
+ getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename DisplayKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = DisplayKHRAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, DisplayKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type getDisplayPlaneSupportedDisplaysKHR(
+ uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getDisplayModePropertiesKHR(
- VULKAN_HPP_NAMESPACE::DisplayKHR display,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayModePropertiesKHRAllocator = std::allocator<DisplayModePropertiesKHR>,
+ template <typename DisplayModePropertiesKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
- getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename DisplayModePropertiesKHRAllocator = std::allocator<DisplayModePropertiesKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = DisplayModePropertiesKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayModePropertiesKHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
+ getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename DisplayModePropertiesKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = DisplayModePropertiesKHRAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, DisplayModePropertiesKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type
- createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type
+ createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type
- createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type
+ createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilitiesKHR(
- VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,
- uint32_t planeIndex,
- VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,
+ uint32_t planeIndex,
+ VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type
- getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,
- uint32_t planeIndex,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type getDisplayPlaneCapabilitiesKHR(
+ VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#if defined( VK_USE_PLATFORM_XLIB_KHR )
@@ -13231,15 +12761,13 @@ namespace VULKAN_HPP_NAMESPACE
Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex,
Display * dpy,
VisualID visualID,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex,
- Display & dpy,
- VisualID visualID,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex,
+ Display & dpy,
+ VisualID visualID,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
@@ -13250,15 +12778,13 @@ namespace VULKAN_HPP_NAMESPACE
Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
xcb_connection_t * connection,
xcb_visualid_t visual_id,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
- xcb_connection_t & connection,
- xcb_visualid_t visual_id,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
+ xcb_connection_t & connection,
+ xcb_visualid_t visual_id,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_XCB_KHR*/
@@ -13268,14 +12794,12 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex,
struct wl_display * display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex,
- struct wl_display & display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex,
+ struct wl_display & display,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
@@ -13283,282 +12807,249 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_win32_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_queue ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile,
- VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile,
+ VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::type
- getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR & videoProfile,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::type
+ getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type
- getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR & videoProfile,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<StructureChain<X, Y, Z...>>::type
+ getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getVideoFormatPropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,
- uint32_t * pVideoFormatPropertyCount,
- VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,
+ uint32_t * pVideoFormatPropertyCount,
+ VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename VideoFormatPropertiesKHRAllocator = std::allocator<VideoFormatPropertiesKHR>,
+ template <typename VideoFormatPropertiesKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename VideoFormatPropertiesKHRAllocator = std::allocator<VideoFormatPropertiesKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = VideoFormatPropertiesKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, VideoFormatPropertiesKHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename VideoFormatPropertiesKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = VideoFormatPropertiesKHRAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, VideoFormatPropertiesKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
- VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_NV_external_memory_capabilities ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getExternalImageFormatPropertiesNV(
- VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,
- VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,
+ VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type
- getExternalImageFormatPropertiesNV(
- VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type
+ getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_get_physical_device_properties2 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
- getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
- getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2
- getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getImageFormatProperties2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
- VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<StructureChain<X, Y, Z...>>::type
getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount,
VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename QueueFamilyProperties2Allocator = std::allocator<QueueFamilyProperties2>,
+ template <typename QueueFamilyProperties2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
- getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename QueueFamilyProperties2Allocator = std::allocator<QueueFamilyProperties2>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = QueueFamilyProperties2Allocator,
- typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
- getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
+ getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename QueueFamilyProperties2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = QueueFamilyProperties2Allocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties2>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
+ getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename StructureChain,
typename StructureChainAllocator = std::allocator<StructureChain>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD std::vector<StructureChain, StructureChainAllocator>
- getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename StructureChain,
- typename StructureChainAllocator = std::allocator<StructureChain>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = StructureChainAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type = 0>
+ typename StructureChainAllocator = std::allocator<StructureChain>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = StructureChainAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type = 0>
VULKAN_HPP_NODISCARD std::vector<StructureChain, StructureChainAllocator>
- getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void
- getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
- getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getSparseImageFormatProperties2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SparseImageFormatProperties2Allocator = std::allocator<SparseImageFormatProperties2>,
+ template <typename SparseImageFormatProperties2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
- getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename SparseImageFormatProperties2Allocator = std::allocator<SparseImageFormatProperties2>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = SparseImageFormatProperties2Allocator,
- typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
- getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
- SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
+ getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename SparseImageFormatProperties2Allocator = std::allocator<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = SparseImageFormatProperties2Allocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties2>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
+ getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
+ SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_external_memory_capabilities ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getExternalBufferPropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
- VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
+ VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties
+ getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_external_semaphore_capabilities ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getExternalSemaphorePropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
+ getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_direct_mode_display ===
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<void>::type
- releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ void releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
//=== VK_EXT_acquire_xlib_display ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- acquireXlibDisplayEXT( Display * dpy,
- VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result acquireXlibDisplayEXT( Display * dpy,
+ VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- acquireXlibDisplayEXT( Display & dpy,
- VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getRandROutputDisplayEXT( Display * dpy,
- RROutput rrOutput,
- VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getRandROutputDisplayEXT( Display * dpy,
+ RROutput rrOutput,
+ VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getRandROutputDisplayEXT(
- Display & dpy, RROutput rrOutput, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
+ getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
- getRandROutputDisplayEXTUnique( Display & dpy,
- RROutput rrOutput,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
+ getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
@@ -13566,203 +13057,193 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_display_surface_counter ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2EXT(
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type
- getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type
+ getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_external_fence_capabilities ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getExternalFencePropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
- VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
+ VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties
+ getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_performance_query ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result enumerateQueueFamilyPerformanceQueryCountersKHR(
- uint32_t queueFamilyIndex,
- uint32_t * pCounterCount,
- VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters,
- VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex,
+ uint32_t * pCounterCount,
+ VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters,
+ VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PerformanceCounterKHRAllocator = std::allocator<PerformanceCounterKHR>,
- typename PerformanceCounterDescriptionKHRAllocator = std::allocator<PerformanceCounterDescriptionKHR>,
+ template <typename PerformanceCounterKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>,
+ typename PerformanceCounterDescriptionKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<
- std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
- std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
- enumerateQueueFamilyPerformanceQueryCountersKHR(
- uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PerformanceCounterKHRAllocator = std::allocator<PerformanceCounterKHR>,
- typename PerformanceCounterDescriptionKHRAllocator = std::allocator<PerformanceCounterDescriptionKHR>,
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
+ std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
+ enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PerformanceCounterKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>,
+ typename PerformanceCounterDescriptionKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
typename B1 = PerformanceCounterKHRAllocator,
typename B2 = PerformanceCounterDescriptionKHRAllocator,
typename std::enable_if<std::is_same<typename B1::value_type, PerformanceCounterKHR>::value &&
std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value,
int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<
- std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
- std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
- enumerateQueueFamilyPerformanceQueryCountersKHR(
- uint32_t queueFamilyIndex,
- PerformanceCounterKHRAllocator & performanceCounterKHRAllocator,
- PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
+ std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
+ enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex,
+ PerformanceCounterKHRAllocator & performanceCounterKHRAllocator,
+ PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getQueueFamilyPerformanceQueryPassesKHR(
- const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,
- uint32_t * pNumPasses,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,
+ uint32_t * pNumPasses,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD uint32_t getQueueFamilyPerformanceQueryPassesKHR(
- const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD uint32_t
+ getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_get_surface_capabilities2 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
- VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type
getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<StructureChain<X, Y, Z...>>::type
getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
- uint32_t * pSurfaceFormatCount,
- VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
+ uint32_t * pSurfaceFormatCount,
+ VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename SurfaceFormat2KHRAllocator = std::allocator<SurfaceFormat2KHR>,
+ template <typename SurfaceFormat2KHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type
getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename SurfaceFormat2KHRAllocator = std::allocator<SurfaceFormat2KHR>,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename SurfaceFormat2KHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = SurfaceFormat2KHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormat2KHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type
+ typename B1 = SurfaceFormat2KHRAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, SurfaceFormat2KHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type
getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename StructureChain,
+ typename StructureChainAllocator = std::allocator<StructureChain>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type
+ getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename StructureChain,
+ typename StructureChainAllocator = std::allocator<StructureChain>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = StructureChainAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type
+ getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
+ StructureChainAllocator & structureChainAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_get_display_properties2 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getDisplayProperties2KHR( uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getDisplayProperties2KHR( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayProperties2KHRAllocator = std::allocator<DisplayProperties2KHR>,
+ template <typename DisplayProperties2KHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
getDisplayProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename DisplayProperties2KHRAllocator = std::allocator<DisplayProperties2KHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = DisplayProperties2KHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayProperties2KHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
+ template <typename DisplayProperties2KHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = DisplayProperties2KHRAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, DisplayProperties2KHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getDisplayPlaneProperties2KHR(
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayPlaneProperties2KHRAllocator = std::allocator<DisplayPlaneProperties2KHR>,
+ template <typename DisplayPlaneProperties2KHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
getDisplayPlaneProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename DisplayPlaneProperties2KHRAllocator = std::allocator<DisplayPlaneProperties2KHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = DisplayPlaneProperties2KHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayPlaneProperties2KHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
+ template <typename DisplayPlaneProperties2KHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = DisplayPlaneProperties2KHRAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, DisplayPlaneProperties2KHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getDisplayModeProperties2KHR(
- VULKAN_HPP_NAMESPACE::DisplayKHR display,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename DisplayModeProperties2KHRAllocator = std::allocator<DisplayModeProperties2KHR>,
+ template <typename DisplayModeProperties2KHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
- getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename DisplayModeProperties2KHRAllocator = std::allocator<DisplayModeProperties2KHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = DisplayModeProperties2KHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, DisplayModeProperties2KHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
+ getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename DisplayModeProperties2KHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = DisplayModeProperties2KHRAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, DisplayModeProperties2KHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilities2KHR(
- const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo,
- VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo,
+ VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type
getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_sample_locations ===
@@ -13770,83 +13251,70 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT(
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT
+ getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_calibrated_timestamps ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsEXT(
- uint32_t * pTimeDomainCount,
- VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount,
+ VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename TimeDomainEXTAllocator = std::allocator<TimeDomainEXT>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type
+ template <typename TimeDomainEXTAllocator = std::allocator<VULKAN_HPP_NAMESPACE::TimeDomainEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator>>::type
getCalibrateableTimeDomainsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename TimeDomainEXTAllocator = std::allocator<TimeDomainEXT>,
+ template <typename TimeDomainEXTAllocator = std::allocator<VULKAN_HPP_NAMESPACE::TimeDomainEXT>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = TimeDomainEXTAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, TimeDomainEXT>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type
- getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename B1 = TimeDomainEXTAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, TimeDomainEXT>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator>>::type
+ getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_fragment_shading_rate ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getFragmentShadingRatesKHR(
- uint32_t * pFragmentShadingRateCount,
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getFragmentShadingRatesKHR( uint32_t * pFragmentShadingRateCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <
- typename PhysicalDeviceFragmentShadingRateKHRAllocator = std::allocator<PhysicalDeviceFragmentShadingRateKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<
- std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
+ template <typename PhysicalDeviceFragmentShadingRateKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
getFragmentShadingRatesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename PhysicalDeviceFragmentShadingRateKHRAllocator = std::allocator<PhysicalDeviceFragmentShadingRateKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PhysicalDeviceFragmentShadingRateKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceFragmentShadingRateKHR>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<
- std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
- getFragmentShadingRatesKHR(
- PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PhysicalDeviceFragmentShadingRateKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = PhysicalDeviceFragmentShadingRateKHRAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceFragmentShadingRateKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
+ getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_tooling_info ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getToolPropertiesEXT( uint32_t * pToolCount,
- VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getToolPropertiesEXT( uint32_t * pToolCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PhysicalDeviceToolPropertiesAllocator = std::allocator<PhysicalDeviceToolProperties>,
+ template <typename PhysicalDeviceToolPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
getToolPropertiesEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename PhysicalDeviceToolPropertiesAllocator = std::allocator<PhysicalDeviceToolProperties>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PhysicalDeviceToolPropertiesAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceToolProperties>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
+ template <typename PhysicalDeviceToolPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = PhysicalDeviceToolPropertiesAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceToolProperties>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -13854,78 +13322,69 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_cooperative_matrix ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getCooperativeMatrixPropertiesNV(
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getCooperativeMatrixPropertiesNV( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename CooperativeMatrixPropertiesNVAllocator = std::allocator<CooperativeMatrixPropertiesNV>,
+ template <typename CooperativeMatrixPropertiesNVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
getCooperativeMatrixPropertiesNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename CooperativeMatrixPropertiesNVAllocator = std::allocator<CooperativeMatrixPropertiesNV>,
+ template <typename CooperativeMatrixPropertiesNVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = CooperativeMatrixPropertiesNVAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, CooperativeMatrixPropertiesNV>::value,
- int>::type = 0>
+ typename B1 = CooperativeMatrixPropertiesNVAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, CooperativeMatrixPropertiesNV>::value, int>::type = 0>
VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_NV_coverage_reduction_mode ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getSupportedFramebufferMixedSamplesCombinationsNV(
- uint32_t * pCombinationCount,
- VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result
+ getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t * pCombinationCount,
+ VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <
- typename FramebufferMixedSamplesCombinationNVAllocator = std::allocator<FramebufferMixedSamplesCombinationNV>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<
- std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
- getSupportedFramebufferMixedSamplesCombinationsNV(
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <
- typename FramebufferMixedSamplesCombinationNVAllocator = std::allocator<FramebufferMixedSamplesCombinationNV>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = FramebufferMixedSamplesCombinationNVAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, FramebufferMixedSamplesCombinationNV>::value,
- int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<
- std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
- getSupportedFramebufferMixedSamplesCombinationsNV(
- FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename FramebufferMixedSamplesCombinationNVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
+ getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename FramebufferMixedSamplesCombinationNVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = FramebufferMixedSamplesCombinationNVAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, FramebufferMixedSamplesCombinationNV>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
+ getSupportedFramebufferMixedSamplesCombinationsNV( FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_EXT_full_screen_exclusive ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result getSurfacePresentModes2EXT(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
- uint32_t * pPresentModeCount,
- VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
+ uint32_t * pPresentModeCount,
+ VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PresentModeKHRAllocator = std::allocator<PresentModeKHR>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
+ template <typename PresentModeKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type
getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PresentModeKHRAllocator = std::allocator<PresentModeKHR>,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename PresentModeKHRAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PresentModeKHR>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PresentModeKHRAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
+ typename B1 = PresentModeKHRAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, PresentModeKHR>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type
getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
PresentModeKHRAllocator & presentModeKHRAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
@@ -13933,37 +13392,28 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- acquireDrmDisplayEXT( int32_t drmFd,
- VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result acquireDrmDisplayEXT( int32_t drmFd,
+ VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<void>::type
- acquireDrmDisplayEXT( int32_t drmFd,
- VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getDrmDisplayEXT( int32_t drmFd,
- uint32_t connectorId,
- VULKAN_HPP_NAMESPACE::DisplayKHR * display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getDrmDisplayEXT( int32_t drmFd,
+ uint32_t connectorId,
+ VULKAN_HPP_NAMESPACE::DisplayKHR * display,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
- getDrmDisplayEXT( int32_t drmFd,
- uint32_t connectorId,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
+ getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
- getDrmDisplayEXTUnique( int32_t drmFd,
- uint32_t connectorId,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
+ getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -13972,31 +13422,26 @@ namespace VULKAN_HPP_NAMESPACE
# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# else
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
- acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- getWinrtDisplayNV( uint32_t deviceRelativeId,
- VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result getWinrtDisplayNV( uint32_t deviceRelativeId,
+ VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
- getWinrtDisplayNVUnique( uint32_t deviceRelativeId,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
+ getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
@@ -14007,14 +13452,12 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex,
IDirectFB * dfb,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex,
- IDirectFB & dfb,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex,
+ IDirectFB & dfb,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
@@ -14024,18 +13467,41 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex,
struct _screen_window * window,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex,
- struct _screen_window & window,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex,
+ struct _screen_window & window,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT
+ //=== VK_NV_optical_flow ===
+
+ template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD Result getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo,
+ uint32_t * pFormatCount,
+ VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename OpticalFlowImageFormatPropertiesNVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type
+ getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ template <typename OpticalFlowImageFormatPropertiesNVAllocator = std::allocator<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>,
+ typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
+ typename B1 = OpticalFlowImageFormatPropertiesNVAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, OpticalFlowImageFormatPropertiesNV>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type
+ getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo,
+ OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT
{
return m_physicalDevice;
}
@@ -14053,17 +13519,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkPhysicalDevice m_physicalDevice = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) == sizeof( VkPhysicalDevice ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevice>::value,
- "PhysicalDevice is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED(
- "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::ePhysicalDevice>
- {
- using type = VULKAN_HPP_NAMESPACE::PhysicalDevice;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice>
@@ -14072,8 +13527,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice>
{
using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice;
};
@@ -14115,23 +13569,20 @@ namespace VULKAN_HPP_NAMESPACE
using CType = VkInstance;
using NativeType = VkInstance;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eInstance;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eInstance;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance;
public:
- VULKAN_HPP_CONSTEXPR Instance() = default;
- VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
- VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT : m_instance( instance ) {}
+ VULKAN_HPP_CONSTEXPR Instance() = default;
+ VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
+ Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT : m_instance( instance ) {}
-#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
Instance & operator=( VkInstance instance ) VULKAN_HPP_NOEXCEPT
{
m_instance = instance;
return *this;
}
-#endif
Instance & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
@@ -14162,66 +13613,56 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount,
- VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PhysicalDeviceAllocator = std::allocator<PhysicalDevice>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type
+ template <typename PhysicalDeviceAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDevice>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type
enumeratePhysicalDevices( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PhysicalDeviceAllocator = std::allocator<PhysicalDevice>,
+ template <typename PhysicalDeviceAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDevice>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PhysicalDeviceAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDevice>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type
- enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ typename B1 = PhysicalDeviceAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDevice>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type
+ enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- PFN_vkVoidFunction
- getProcAddr( const char * pName,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ PFN_vkVoidFunction getProcAddr( const char * pName, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- PFN_vkVoidFunction
- getProcAddr( const std::string & name,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_VERSION_1_1 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroups(
- uint32_t * pPhysicalDeviceGroupCount,
- VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroups( uint32_t * pPhysicalDeviceGroupCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<PhysicalDeviceGroupProperties>,
+ template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
enumeratePhysicalDeviceGroups( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<PhysicalDeviceGroupProperties>,
+ template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PhysicalDeviceGroupPropertiesAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value,
- int>::type = 0>
+ typename B1 = PhysicalDeviceGroupPropertiesAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceGroupProperties>::value, int>::type = 0>
VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_surface ===
@@ -14229,50 +13670,44 @@ namespace VULKAN_HPP_NAMESPACE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_KHR_display ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createDisplayPlaneSurfaceKHR(
- const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -14280,26 +13715,22 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_xlib_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
@@ -14308,26 +13739,22 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_xcb_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_XCB_KHR*/
@@ -14336,26 +13763,22 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_wayland_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
@@ -14364,26 +13787,22 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_android_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
@@ -14392,26 +13811,22 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_win32_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
@@ -14419,53 +13834,45 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_debug_report ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createDebugReportCallbackEXT(
- const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type
- createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type
+ createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type
- createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type
+ createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDebugReportCallbackEXT(
- VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -14476,7 +13883,7 @@ namespace VULKAN_HPP_NAMESPACE
int32_t messageCode,
const char * pLayerPrefix,
const char * pMessage,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,
@@ -14486,34 +13893,29 @@ namespace VULKAN_HPP_NAMESPACE
int32_t messageCode,
const std::string & layerPrefix,
const std::string & message,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#if defined( VK_USE_PLATFORM_GGP )
//=== VK_GGP_stream_descriptor_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createStreamDescriptorSurfaceGGP(
- const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createStreamDescriptorSurfaceGGPUnique(
- const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createStreamDescriptorSurfaceGGPUnique(
+ const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_GGP*/
@@ -14522,26 +13924,22 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NN_vi_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_VI_NN*/
@@ -14549,51 +13947,45 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_device_group_creation ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroupsKHR(
- uint32_t * pPhysicalDeviceGroupCount,
- VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroupsKHR( uint32_t * pPhysicalDeviceGroupCount,
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<PhysicalDeviceGroupProperties>,
+ template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
enumeratePhysicalDeviceGroupsKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
- template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<PhysicalDeviceGroupProperties>,
+ template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = PhysicalDeviceGroupPropertiesAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value,
- int>::type = 0>
+ typename B1 = PhysicalDeviceGroupPropertiesAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceGroupProperties>::value, int>::type = 0>
VULKAN_HPP_NODISCARD
- typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
+ typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#if defined( VK_USE_PLATFORM_IOS_MVK )
//=== VK_MVK_ios_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_IOS_MVK*/
@@ -14602,26 +13994,22 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_MVK_macos_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_MACOS_MVK*/
@@ -14629,94 +14017,80 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_debug_utils ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createDebugUtilsMessengerEXT(
- const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type
- createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type
+ createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type
- createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type
+ createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDebugUtilsMessengerEXT(
- VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_imagepipe_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result createImagePipeSurfaceFUCHSIA(
- const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_FUCHSIA*/
@@ -14725,26 +14099,22 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_metal_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_METAL_EXT*/
@@ -14752,26 +14122,22 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_headless_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
@@ -14779,26 +14145,22 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_directfb_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
@@ -14807,31 +14169,27 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_QNX_screen_surface ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo,
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
- VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo,
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
- createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
+ createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
- createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
+ createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
- VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const VULKAN_HPP_NOEXCEPT
+ operator VkInstance() const VULKAN_HPP_NOEXCEPT
{
return m_instance;
}
@@ -14849,16 +14207,6 @@ namespace VULKAN_HPP_NAMESPACE
private:
VkInstance m_instance = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Instance ) == sizeof( VkInstance ),
- "handle and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Instance>::value,
- "Instance is not nothrow_move_constructible!" );
-
- template <>
- struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eInstance>
- {
- using type = VULKAN_HPP_NAMESPACE::Instance;
- };
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eInstance>
@@ -14867,8 +14215,7 @@ namespace VULKAN_HPP_NAMESPACE
};
template <>
- struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance>
+ struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance>
{
using Type = VULKAN_HPP_NAMESPACE::Instance;
};
@@ -14895,76 +14242,67 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
VULKAN_HPP_NAMESPACE::Instance * pInstance,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT )
- VULKAN_HPP_NOEXCEPT;
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance(
- const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+ VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type
+ createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
- typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type
- createInstanceUnique( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+ VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type
+ createInstanceUnique( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
# endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result enumerateInstanceExtensionProperties(
- const char * pLayerName,
- uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result enumerateInstanceExtensionProperties( const char * pLayerName,
+ uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename ExtensionPropertiesAllocator = std::allocator<ExtensionProperties>,
+ template <typename ExtensionPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::ExtensionProperties>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type
- enumerateInstanceExtensionProperties( Optional<const std::string> layerName
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
- template <typename ExtensionPropertiesAllocator = std::allocator<ExtensionProperties>,
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type
+ enumerateInstanceExtensionProperties( Optional<const std::string> layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+ template <typename ExtensionPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::ExtensionProperties>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = ExtensionPropertiesAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type
+ typename B1 = ExtensionPropertiesAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, ExtensionProperties>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type
enumerateInstanceExtensionProperties( Optional<const std::string> layerName,
ExtensionPropertiesAllocator & extensionPropertiesAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result
- enumerateInstanceLayerProperties( uint32_t * pPropertyCount,
- VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
-#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
- template <typename LayerPropertiesAllocator = std::allocator<LayerProperties>,
- typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type
+ VULKAN_HPP_NODISCARD Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount,
+ VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template <typename LayerPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type
enumerateInstanceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
- template <typename LayerPropertiesAllocator = std::allocator<LayerProperties>,
+ template <typename LayerPropertiesAllocator = std::allocator<VULKAN_HPP_NAMESPACE::LayerProperties>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
- typename B = LayerPropertiesAllocator,
- typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type = 0>
- VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type
- enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator,
- Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+ typename B1 = LayerPropertiesAllocator,
+ typename std::enable_if<std::is_same<typename B1::value_type, LayerProperties>::value, int>::type = 0>
+ VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type
+ enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_VERSION_1_1 ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- VULKAN_HPP_NODISCARD Result enumerateInstanceVersion(
- uint32_t * pApiVersion, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD Result enumerateInstanceVersion( uint32_t * pApiVersion,
+ Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<uint32_t>::type
- enumerateInstanceVersion( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
+ VULKAN_HPP_NODISCARD typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
} // namespace VULKAN_HPP_NAMESPACE
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_hash.hpp b/thirdparty/vulkan/include/vulkan/vulkan_hash.hpp
index e1b22a9862..054ab2120b 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_hash.hpp
+++ b/thirdparty/vulkan/include/vulkan/vulkan_hash.hpp
@@ -21,8 +21,7 @@ namespace std
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::Flags<BitType> const & flags ) const VULKAN_HPP_NOEXCEPT
{
- return std::hash<typename std::underlying_type<BitType>::type>{}(
- static_cast<typename std::underlying_type<BitType>::type>( flags ) );
+ return std::hash<typename std::underlying_type<BitType>::type>{}( static_cast<typename std::underlying_type<BitType>::type>( flags ) );
}
};
@@ -215,8 +214,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayout const & descriptorSetLayout ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayout const & descriptorSetLayout ) const VULKAN_HPP_NOEXCEPT
{
return std::hash<VkDescriptorSetLayout>{}( static_cast<VkDescriptorSetLayout>( descriptorSetLayout ) );
}
@@ -263,8 +261,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const & samplerYcbcrConversion ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const & samplerYcbcrConversion ) const VULKAN_HPP_NOEXCEPT
{
return std::hash<VkSamplerYcbcrConversion>{}( static_cast<VkSamplerYcbcrConversion>( samplerYcbcrConversion ) );
}
@@ -273,11 +270,9 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate const & descriptorUpdateTemplate ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate const & descriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT
{
- return std::hash<VkDescriptorUpdateTemplate>{}(
- static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ) );
+ return std::hash<VkDescriptorUpdateTemplate>{}( static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ) );
}
};
@@ -339,8 +334,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const & debugReportCallbackEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const & debugReportCallbackEXT ) const VULKAN_HPP_NOEXCEPT
{
return std::hash<VkDebugReportCallbackEXT>{}( static_cast<VkDebugReportCallbackEXT>( debugReportCallbackEXT ) );
}
@@ -361,11 +355,9 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const & videoSessionParametersKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const & videoSessionParametersKHR ) const VULKAN_HPP_NOEXCEPT
{
- return std::hash<VkVideoSessionParametersKHR>{}(
- static_cast<VkVideoSessionParametersKHR>( videoSessionParametersKHR ) );
+ return std::hash<VkVideoSessionParametersKHR>{}( static_cast<VkVideoSessionParametersKHR>( videoSessionParametersKHR ) );
}
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -395,8 +387,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT const & debugUtilsMessengerEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT const & debugUtilsMessengerEXT ) const VULKAN_HPP_NOEXCEPT
{
return std::hash<VkDebugUtilsMessengerEXT>{}( static_cast<VkDebugUtilsMessengerEXT>( debugUtilsMessengerEXT ) );
}
@@ -407,11 +398,9 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const & accelerationStructureKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const & accelerationStructureKHR ) const VULKAN_HPP_NOEXCEPT
{
- return std::hash<VkAccelerationStructureKHR>{}(
- static_cast<VkAccelerationStructureKHR>( accelerationStructureKHR ) );
+ return std::hash<VkAccelerationStructureKHR>{}( static_cast<VkAccelerationStructureKHR>( accelerationStructureKHR ) );
}
};
@@ -420,8 +409,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ValidationCacheEXT const & validationCacheEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ValidationCacheEXT const & validationCacheEXT ) const VULKAN_HPP_NOEXCEPT
{
return std::hash<VkValidationCacheEXT>{}( static_cast<VkValidationCacheEXT>( validationCacheEXT ) );
}
@@ -432,11 +420,9 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureNV const & accelerationStructureNV ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureNV const & accelerationStructureNV ) const VULKAN_HPP_NOEXCEPT
{
- return std::hash<VkAccelerationStructureNV>{}(
- static_cast<VkAccelerationStructureNV>( accelerationStructureNV ) );
+ return std::hash<VkAccelerationStructureNV>{}( static_cast<VkAccelerationStructureNV>( accelerationStructureNV ) );
}
};
@@ -445,11 +431,9 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const & performanceConfigurationINTEL )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const & performanceConfigurationINTEL ) const VULKAN_HPP_NOEXCEPT
{
- return std::hash<VkPerformanceConfigurationINTEL>{}(
- static_cast<VkPerformanceConfigurationINTEL>( performanceConfigurationINTEL ) );
+ return std::hash<VkPerformanceConfigurationINTEL>{}( static_cast<VkPerformanceConfigurationINTEL>( performanceConfigurationINTEL ) );
}
};
@@ -458,8 +442,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DeferredOperationKHR const & deferredOperationKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeferredOperationKHR const & deferredOperationKHR ) const VULKAN_HPP_NOEXCEPT
{
return std::hash<VkDeferredOperationKHR>{}( static_cast<VkDeferredOperationKHR>( deferredOperationKHR ) );
}
@@ -470,11 +453,9 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const & indirectCommandsLayoutNV ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const & indirectCommandsLayoutNV ) const VULKAN_HPP_NOEXCEPT
{
- return std::hash<VkIndirectCommandsLayoutNV>{}(
- static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayoutNV ) );
+ return std::hash<VkIndirectCommandsLayoutNV>{}( static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayoutNV ) );
}
};
@@ -484,15 +465,35 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const & bufferCollectionFUCHSIA ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const & bufferCollectionFUCHSIA ) const VULKAN_HPP_NOEXCEPT
{
- return std::hash<VkBufferCollectionFUCHSIA>{}(
- static_cast<VkBufferCollectionFUCHSIA>( bufferCollectionFUCHSIA ) );
+ return std::hash<VkBufferCollectionFUCHSIA>{}( static_cast<VkBufferCollectionFUCHSIA>( bufferCollectionFUCHSIA ) );
}
};
#endif /*VK_USE_PLATFORM_FUCHSIA*/
+ //=== VK_EXT_opacity_micromap ===
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::MicromapEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapEXT const & micromapEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkMicromapEXT>{}( static_cast<VkMicromapEXT>( micromapEXT ) );
+ }
+ };
+
+ //=== VK_NV_optical_flow ===
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV const & opticalFlowSessionNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ return std::hash<VkOpticalFlowSessionNV>{}( static_cast<VkOpticalFlowSessionNV>( opticalFlowSessionNV ) );
+ }
+ };
+
#if 14 <= VULKAN_HPP_CPP_VERSION
//======================================
//=== HASH structures for structures ===
@@ -522,8 +523,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR const &
- accelerationStructureBuildRangeInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR const & accelerationStructureBuildRangeInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildRangeInfoKHR.primitiveCount );
@@ -537,8 +538,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR const &
- accelerationStructureBuildSizesInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR const & accelerationStructureBuildSizesInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.sType );
@@ -553,8 +554,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const &
- accelerationStructureCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & accelerationStructureCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.sType );
@@ -572,8 +572,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & geometryTrianglesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & geometryTrianglesNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.sType );
@@ -639,8 +638,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & accelerationStructureInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & accelerationStructureInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.sType );
@@ -657,8 +655,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const &
- accelerationStructureCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & accelerationStructureCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoNV.sType );
@@ -672,8 +669,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR const &
- accelerationStructureDeviceAddressInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR const & accelerationStructureDeviceAddressInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureDeviceAddressInfoKHR.sType );
@@ -686,8 +683,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::TransformMatrixKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformMatrixKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformMatrixKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
for ( size_t i = 0; i < 3; ++i )
@@ -704,8 +700,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const &
- accelerationStructureInstanceKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const & accelerationStructureInstanceKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.transform );
@@ -721,16 +716,15 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV const &
- accelerationStructureMatrixMotionInstanceNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV const & accelerationStructureMatrixMotionInstanceNV ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.transformT0 );
VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.transformT1 );
VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.instanceCustomIndex );
VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.mask );
- VULKAN_HPP_HASH_COMBINE( seed,
- accelerationStructureMatrixMotionInstanceNV.instanceShaderBindingTableRecordOffset );
+ VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.instanceShaderBindingTableRecordOffset );
VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.flags );
VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.accelerationStructureReference );
return seed;
@@ -740,8 +734,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const &
- accelerationStructureMemoryRequirementsInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const & accelerationStructureMemoryRequirementsInfoNV ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMemoryRequirementsInfoNV.sType );
@@ -755,8 +749,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV const &
- accelerationStructureMotionInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV const & accelerationStructureMotionInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMotionInfoNV.sType );
@@ -796,8 +789,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV const &
- accelerationStructureSRTMotionInstanceNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV const & accelerationStructureSRTMotionInstanceNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.transformT0 );
@@ -812,10 +805,22 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::MicromapUsageEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapUsageEXT const & micromapUsageEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, micromapUsageEXT.count );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapUsageEXT.subdivisionLevel );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapUsageEXT.format );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR const &
- accelerationStructureVersionInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR const & accelerationStructureVersionInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureVersionInfoKHR.sType );
@@ -828,8 +833,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const & acquireNextImageInfoKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const & acquireNextImageInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.sType );
@@ -846,8 +850,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const & acquireProfilingLockInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const & acquireProfilingLockInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, acquireProfilingLockInfoKHR.sType );
@@ -861,8 +864,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AllocationCallbacks>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::AllocationCallbacks const & allocationCallbacks ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AllocationCallbacks const & allocationCallbacks ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pUserData );
@@ -876,6 +878,20 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC const & amigoProfilingSubmitInfoSEC ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, amigoProfilingSubmitInfoSEC.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, amigoProfilingSubmitInfoSEC.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, amigoProfilingSubmitInfoSEC.firstDrawTimestamp );
+ VULKAN_HPP_HASH_COMBINE( seed, amigoProfilingSubmitInfoSEC.swapBufferTimestamp );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::ComponentMapping>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::ComponentMapping const & componentMapping ) const VULKAN_HPP_NOEXCEPT
@@ -893,8 +909,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID const &
- androidHardwareBufferFormatProperties2ANDROID ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID const & androidHardwareBufferFormatProperties2ANDROID ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.sType );
@@ -916,8 +932,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const &
- androidHardwareBufferFormatPropertiesANDROID ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const & androidHardwareBufferFormatPropertiesANDROID ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.sType );
@@ -939,8 +955,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const &
- androidHardwareBufferPropertiesANDROID ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const & androidHardwareBufferPropertiesANDROID ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferPropertiesANDROID.sType );
@@ -956,8 +972,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const &
- androidHardwareBufferUsageANDROID ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const & androidHardwareBufferUsageANDROID ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferUsageANDROID.sType );
@@ -972,8 +987,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & androidSurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & androidSurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, androidSurfaceCreateInfoKHR.sType );
@@ -1011,8 +1025,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AttachmentDescription>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::AttachmentDescription const & attachmentDescription ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentDescription const & attachmentDescription ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.flags );
@@ -1031,8 +1044,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AttachmentDescription2>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentDescription2 const & attachmentDescription2 ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentDescription2 const & attachmentDescription2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.sType );
@@ -1053,8 +1065,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const &
- attachmentDescriptionStencilLayout ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const & attachmentDescriptionStencilLayout ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, attachmentDescriptionStencilLayout.sType );
@@ -1068,8 +1079,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AttachmentReference>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::AttachmentReference const & attachmentReference ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentReference const & attachmentReference ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, attachmentReference.attachment );
@@ -1081,8 +1091,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AttachmentReference2>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::AttachmentReference2 const & attachmentReference2 ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentReference2 const & attachmentReference2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.sType );
@@ -1097,8 +1106,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout const &
- attachmentReferenceStencilLayout ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout const & attachmentReferenceStencilLayout ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, attachmentReferenceStencilLayout.sType );
@@ -1111,8 +1119,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD const & attachmentSampleCountInfoAMD )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD const & attachmentSampleCountInfoAMD ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.sType );
@@ -1139,8 +1146,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SampleLocationEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SampleLocationEXT const & sampleLocationEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SampleLocationEXT const & sampleLocationEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, sampleLocationEXT.x );
@@ -1152,8 +1158,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfoEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.sType );
@@ -1169,8 +1174,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT const & attachmentSampleLocationsEXT )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT const & attachmentSampleLocationsEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleLocationsEXT.attachmentIndex );
@@ -1206,8 +1210,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const &
- bindAccelerationStructureMemoryInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const & bindAccelerationStructureMemoryInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.sType );
@@ -1224,9 +1228,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const & bindBufferMemoryDeviceGroupInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const & bindBufferMemoryDeviceGroupInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryDeviceGroupInfo.sType );
@@ -1240,8 +1242,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const & bindBufferMemoryInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const & bindBufferMemoryInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.sType );
@@ -1280,9 +1281,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const & bindImageMemoryDeviceGroupInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const & bindImageMemoryDeviceGroupInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.sType );
@@ -1298,8 +1297,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const & bindImageMemoryInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const & bindImageMemoryInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.sType );
@@ -1314,9 +1312,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const & bindImageMemorySwapchainInfoKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const & bindImageMemorySwapchainInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bindImageMemorySwapchainInfoKHR.sType );
@@ -1330,8 +1326,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const & bindImagePlaneMemoryInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const & bindImagePlaneMemoryInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bindImagePlaneMemoryInfo.sType );
@@ -1344,8 +1339,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV const &
- bindIndexBufferIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV const & bindIndexBufferIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandNV.bufferAddress );
@@ -1358,8 +1352,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV const &
- bindShaderGroupIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV const & bindShaderGroupIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bindShaderGroupIndirectCommandNV.groupIndex );
@@ -1385,8 +1378,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo const & sparseBufferMemoryBindInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo const & sparseBufferMemoryBindInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, sparseBufferMemoryBindInfo.buffer );
@@ -1399,9 +1391,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo const & sparseImageOpaqueMemoryBindInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo const & sparseImageOpaqueMemoryBindInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, sparseImageOpaqueMemoryBindInfo.image );
@@ -1453,8 +1443,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SparseImageMemoryBind const & sparseImageMemoryBind ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageMemoryBind const & sparseImageMemoryBind ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.subresource );
@@ -1470,8 +1459,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo const & sparseImageMemoryBindInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo const & sparseImageMemoryBindInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBindInfo.image );
@@ -1506,8 +1494,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV const &
- bindVertexBufferIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV const & bindVertexBufferIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandNV.bufferAddress );
@@ -1517,11 +1504,28 @@ namespace std
}
};
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR const & bindVideoSessionMemoryInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.memoryBindIndex );
+ VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.memory );
+ VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.memoryOffset );
+ VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.memorySize );
+ return seed;
+ }
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresourceLayers ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresourceLayers ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceLayers.aspectMask );
@@ -1577,8 +1581,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA const &
- bufferCollectionBufferCreateInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA const & bufferCollectionBufferCreateInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionBufferCreateInfoFUCHSIA.sType );
@@ -1594,8 +1598,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const &
- bufferCollectionConstraintsInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraintsInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.sType );
@@ -1614,8 +1618,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const &
- bufferCollectionCreateInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & bufferCollectionCreateInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionCreateInfoFUCHSIA.sType );
@@ -1630,8 +1633,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA const &
- bufferCollectionImageCreateInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA const & bufferCollectionImageCreateInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionImageCreateInfoFUCHSIA.sType );
@@ -1647,8 +1650,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const & sysmemColorSpaceFUCHSIA ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const & sysmemColorSpaceFUCHSIA ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, sysmemColorSpaceFUCHSIA.sType );
@@ -1663,8 +1665,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA const &
- bufferCollectionPropertiesFUCHSIA ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA const & bufferCollectionPropertiesFUCHSIA ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.sType );
@@ -1707,8 +1708,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA const & bufferConstraintsInfoFUCHSIA )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA const & bufferConstraintsInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.sType );
@@ -1752,8 +1752,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const &
- bufferDeviceAddressCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const & bufferDeviceAddressCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressCreateInfoEXT.sType );
@@ -1766,8 +1765,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo const & bufferDeviceAddressInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo const & bufferDeviceAddressInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressInfo.sType );
@@ -1814,8 +1812,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const & bufferMemoryBarrier ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const & bufferMemoryBarrier ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.sType );
@@ -1834,8 +1831,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 const & bufferMemoryBarrier2 ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 const & bufferMemoryBarrier2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.sType );
@@ -1856,8 +1852,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const & bufferMemoryRequirementsInfo2 )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const & bufferMemoryRequirementsInfo2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryRequirementsInfo2.sType );
@@ -1870,8 +1865,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const &
- bufferOpaqueCaptureAddressCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const & bufferOpaqueCaptureAddressCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bufferOpaqueCaptureAddressCreateInfo.sType );
@@ -1884,8 +1878,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & bufferViewCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & bufferViewCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.sType );
@@ -1902,8 +1895,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT const & calibratedTimestampInfoEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT const & calibratedTimestampInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, calibratedTimestampInfoEXT.sType );
@@ -1916,8 +1908,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::CheckpointData2NV>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::CheckpointData2NV const & checkpointData2NV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CheckpointData2NV const & checkpointData2NV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, checkpointData2NV.sType );
@@ -1945,8 +1936,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & clearDepthStencilValue ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & clearDepthStencilValue ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, clearDepthStencilValue.depth );
@@ -1971,8 +1961,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV const & coarseSampleLocationNV ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV const & coarseSampleLocationNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, coarseSampleLocationNV.pixelX );
@@ -1985,8 +1974,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV const & coarseSampleOrderCustomNV ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV const & coarseSampleOrderCustomNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.shadingRate );
@@ -1998,10 +1986,40 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT const & colorBlendAdvancedEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.advancedBlendOp );
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.srcPremultiplied );
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.dstPremultiplied );
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.blendOverlap );
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.clampResults );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT const & colorBlendEquationEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.srcColorBlendFactor );
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.dstColorBlendFactor );
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.colorBlendOp );
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.srcAlphaBlendFactor );
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.dstAlphaBlendFactor );
+ VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.alphaBlendOp );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & commandBufferAllocateInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & commandBufferAllocateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.sType );
@@ -2016,8 +2034,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const & commandBufferInheritanceInfo )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const & commandBufferInheritanceInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.sType );
@@ -2035,8 +2052,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const & commandBufferBeginInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const & commandBufferBeginInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.sType );
@@ -2050,8 +2066,9 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const &
- commandBufferInheritanceConditionalRenderingInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const & commandBufferInheritanceConditionalRenderingInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceConditionalRenderingInfoEXT.sType );
@@ -2064,8 +2081,9 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM const &
- commandBufferInheritanceRenderPassTransformInfoQCOM ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM const & commandBufferInheritanceRenderPassTransformInfoQCOM ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.sType );
@@ -2079,8 +2097,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo const &
- commandBufferInheritanceRenderingInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo const & commandBufferInheritanceRenderingInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.sType );
@@ -2115,8 +2133,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV const &
- commandBufferInheritanceViewportScissorInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV const & commandBufferInheritanceViewportScissorInfoNV ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.sType );
@@ -2131,8 +2149,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo const & commandBufferSubmitInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo const & commandBufferSubmitInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, commandBufferSubmitInfo.sType );
@@ -2146,8 +2163,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & commandPoolCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & commandPoolCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, commandPoolCreateInfo.sType );
@@ -2161,8 +2177,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SpecializationMapEntry>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SpecializationMapEntry const & specializationMapEntry ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SpecializationMapEntry const & specializationMapEntry ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, specializationMapEntry.constantID );
@@ -2175,8 +2190,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SpecializationInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SpecializationInfo const & specializationInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SpecializationInfo const & specializationInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, specializationInfo.mapEntryCount );
@@ -2190,8 +2204,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & pipelineShaderStageCreateInfo )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & pipelineShaderStageCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.sType );
@@ -2211,8 +2224,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & computePipelineCreateInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & computePipelineCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.sType );
@@ -2229,8 +2241,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const &
- conditionalRenderingBeginInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const & conditionalRenderingBeginInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.sType );
@@ -2245,8 +2256,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ConformanceVersion>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ConformanceVersion const & conformanceVersion ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ConformanceVersion const & conformanceVersion ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, conformanceVersion.major );
@@ -2260,8 +2270,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const & cooperativeMatrixPropertiesNV )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const & cooperativeMatrixPropertiesNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.sType );
@@ -2281,8 +2290,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR const &
- copyAccelerationStructureInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR const & copyAccelerationStructureInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.sType );
@@ -2313,8 +2321,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 const & copyBufferToImageInfo2 ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 const & copyBufferToImageInfo2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.sType );
@@ -2331,8 +2338,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM const & copyCommandTransformInfoQCOM )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM const & copyCommandTransformInfoQCOM ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, copyCommandTransformInfoQCOM.sType );
@@ -2345,8 +2351,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::CopyDescriptorSet>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::CopyDescriptorSet const & copyDescriptorSet ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyDescriptorSet const & copyDescriptorSet ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.sType );
@@ -2400,8 +2405,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 const & copyImageToBufferInfo2 ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 const & copyImageToBufferInfo2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.sType );
@@ -2416,10 +2420,24 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT const & copyMicromapInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.src );
+ VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.dst );
+ VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.mode );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & cuFunctionCreateInfoNVX ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & cuFunctionCreateInfoNVX ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, cuFunctionCreateInfoNVX.sType );
@@ -2460,8 +2478,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & cuModuleCreateInfoNVX ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & cuModuleCreateInfoNVX ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, cuModuleCreateInfoNVX.sType );
@@ -2476,8 +2493,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const & d3D12FenceSubmitInfoKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const & d3D12FenceSubmitInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.sType );
@@ -2494,8 +2510,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const & debugMarkerMarkerInfoEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const & debugMarkerMarkerInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, debugMarkerMarkerInfoEXT.sType );
@@ -2515,8 +2530,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const & debugMarkerObjectNameInfoEXT )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const & debugMarkerObjectNameInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectNameInfoEXT.sType );
@@ -2534,8 +2548,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const & debugMarkerObjectTagInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const & debugMarkerObjectTagInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.sType );
@@ -2552,8 +2565,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const &
- debugReportCallbackCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & debugReportCallbackCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.sType );
@@ -2568,8 +2580,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const & debugUtilsLabelEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const & debugUtilsLabelEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, debugUtilsLabelEXT.sType );
@@ -2589,8 +2600,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const & debugUtilsObjectNameInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const & debugUtilsObjectNameInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectNameInfoEXT.sType );
@@ -2608,8 +2618,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const &
- debugUtilsMessengerCallbackDataEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const & debugUtilsMessengerCallbackDataEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.sType );
@@ -2637,8 +2646,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const &
- debugUtilsMessengerCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & debugUtilsMessengerCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.sType );
@@ -2655,8 +2663,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const & debugUtilsObjectTagInfoEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const & debugUtilsObjectTagInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.sType );
@@ -2673,8 +2680,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const &
- dedicatedAllocationBufferCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const & dedicatedAllocationBufferCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationBufferCreateInfoNV.sType );
@@ -2687,8 +2694,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const &
- dedicatedAllocationImageCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const & dedicatedAllocationImageCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationImageCreateInfoNV.sType );
@@ -2701,8 +2707,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const &
- dedicatedAllocationMemoryAllocateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const & dedicatedAllocationMemoryAllocateInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationMemoryAllocateInfoNV.sType );
@@ -2732,8 +2738,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageSubresourceRange>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & imageSubresourceRange ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & imageSubresourceRange ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.aspectMask );
@@ -2748,8 +2753,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 const & imageMemoryBarrier2 ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 const & imageMemoryBarrier2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.sType );
@@ -2790,8 +2794,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DescriptorBufferInfo const & descriptorBufferInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorBufferInfo const & descriptorBufferInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferInfo.buffer );
@@ -2804,8 +2807,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DescriptorImageInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DescriptorImageInfo const & descriptorImageInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorImageInfo const & descriptorImageInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, descriptorImageInfo.sampler );
@@ -2818,8 +2820,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DescriptorPoolSize>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DescriptorPoolSize const & descriptorPoolSize ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorPoolSize const & descriptorPoolSize ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolSize.type );
@@ -2831,8 +2832,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & descriptorPoolCreateInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & descriptorPoolCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.sType );
@@ -2848,8 +2848,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo const &
- descriptorPoolInlineUniformBlockCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo const & descriptorPoolInlineUniformBlockCreateInfo ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolInlineUniformBlockCreateInfo.sType );
@@ -2862,8 +2862,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & descriptorSetAllocateInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & descriptorSetAllocateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.sType );
@@ -2876,10 +2875,23 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE const & descriptorSetBindingReferenceVALVE ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, descriptorSetBindingReferenceVALVE.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, descriptorSetBindingReferenceVALVE.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, descriptorSetBindingReferenceVALVE.descriptorSetLayout );
+ VULKAN_HPP_HASH_COMBINE( seed, descriptorSetBindingReferenceVALVE.binding );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding const & descriptorSetLayoutBinding ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding const & descriptorSetLayoutBinding ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.binding );
@@ -2894,8 +2906,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const &
- descriptorSetLayoutBindingFlagsCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const & descriptorSetLayoutBindingFlagsCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBindingFlagsCreateInfo.sType );
@@ -2909,8 +2921,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & descriptorSetLayoutCreateInfo )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & descriptorSetLayoutCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.sType );
@@ -2923,10 +2934,24 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE const & descriptorSetLayoutHostMappingInfoVALVE ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutHostMappingInfoVALVE.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutHostMappingInfoVALVE.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutHostMappingInfoVALVE.descriptorOffset );
+ VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutHostMappingInfoVALVE.descriptorSize );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const & descriptorSetLayoutSupport ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const & descriptorSetLayoutSupport ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutSupport.sType );
@@ -2939,8 +2964,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const &
- descriptorSetVariableDescriptorCountAllocateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const & descriptorSetVariableDescriptorCountAllocateInfo )
+ const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountAllocateInfo.sType );
@@ -2954,8 +2979,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const &
- descriptorSetVariableDescriptorCountLayoutSupport ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const & descriptorSetVariableDescriptorCountLayoutSupport )
+ const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountLayoutSupport.sType );
@@ -2968,8 +2993,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry const & descriptorUpdateTemplateEntry )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry const & descriptorUpdateTemplateEntry ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.dstBinding );
@@ -2985,8 +3009,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const &
- descriptorUpdateTemplateCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & descriptorUpdateTemplateCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.sType );
@@ -3004,11 +3027,25 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT const & deviceAddressBindingCallbackDataEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.flags );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.baseAddress );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.size );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.bindingType );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements const & deviceBufferMemoryRequirements ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements const & deviceBufferMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, deviceBufferMemoryRequirements.sType );
@@ -3021,8 +3058,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const & deviceQueueCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const & deviceQueueCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.sType );
@@ -3038,8 +3074,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & physicalDeviceFeatures ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & physicalDeviceFeatures ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.robustBufferAccess );
@@ -3136,8 +3171,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT const &
- deviceDeviceMemoryReportCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT const & deviceDeviceMemoryReportCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.sType );
@@ -3152,8 +3187,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV const &
- deviceDiagnosticsConfigCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV const & deviceDiagnosticsConfigCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, deviceDiagnosticsConfigCreateInfoNV.sType );
@@ -3166,8 +3200,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, deviceEventInfoEXT.sType );
@@ -3178,12 +3211,97 @@ namespace std
};
template <>
- struct hash<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo>
+ struct hash<VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const & deviceGroupBindSparseInfo ) const
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT const & deviceFaultAddressInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultAddressInfoEXT.addressType );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultAddressInfoEXT.reportedAddress );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultAddressInfoEXT.addressPrecision );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT const & deviceFaultCountsEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.addressInfoCount );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.vendorInfoCount );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.vendorBinarySize );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT const & deviceFaultVendorInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i )
+ {
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorInfoEXT.description[i] );
+ }
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorInfoEXT.vendorFaultCode );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorInfoEXT.vendorFaultData );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT const & deviceFaultInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pNext );
+ for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i )
+ {
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.description[i] );
+ }
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pAddressInfos );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pVendorInfos );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pVendorBinaryData );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT const & deviceFaultVendorBinaryHeaderVersionOneEXT ) const
VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.headerSize );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.headerVersion );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.vendorID );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.deviceID );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.driverVersion );
+ for ( size_t i = 0; i < VK_UUID_SIZE; ++i )
+ {
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.pipelineCacheUUID[i] );
+ }
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.applicationNameOffset );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.applicationVersion );
+ VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.engineNameOffset );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const & deviceGroupBindSparseInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, deviceGroupBindSparseInfo.sType );
VULKAN_HPP_HASH_COMBINE( seed, deviceGroupBindSparseInfo.pNext );
VULKAN_HPP_HASH_COMBINE( seed, deviceGroupBindSparseInfo.resourceDeviceIndex );
@@ -3195,8 +3313,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const &
- deviceGroupCommandBufferBeginInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const & deviceGroupCommandBufferBeginInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, deviceGroupCommandBufferBeginInfo.sType );
@@ -3209,8 +3326,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const & deviceGroupDeviceCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const & deviceGroupDeviceCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, deviceGroupDeviceCreateInfo.sType );
@@ -3224,8 +3340,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const &
- deviceGroupPresentCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const & deviceGroupPresentCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentCapabilitiesKHR.sType );
@@ -3242,8 +3357,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const & deviceGroupPresentInfoKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const & deviceGroupPresentInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.sType );
@@ -3258,9 +3372,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const & deviceGroupRenderPassBeginInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const & deviceGroupRenderPassBeginInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.sType );
@@ -3275,8 +3387,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const & deviceGroupSubmitInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const & deviceGroupSubmitInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.sType );
@@ -3294,8 +3405,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const &
- deviceGroupSwapchainCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const & deviceGroupSwapchainCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSwapchainCreateInfoKHR.sType );
@@ -3333,8 +3443,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements const & deviceImageMemoryRequirements )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements const & deviceImageMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, deviceImageMemoryRequirements.sType );
@@ -3348,8 +3457,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const &
- deviceMemoryOpaqueCaptureAddressInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const & deviceMemoryOpaqueCaptureAddressInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOpaqueCaptureAddressInfo.sType );
@@ -3362,8 +3470,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const &
- deviceMemoryOverallocationCreateInfoAMD ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const & deviceMemoryOverallocationCreateInfoAMD ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOverallocationCreateInfoAMD.sType );
@@ -3376,8 +3484,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT const &
- deviceMemoryReportCallbackDataEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT const & deviceMemoryReportCallbackDataEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.sType );
@@ -3396,8 +3503,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo const & devicePrivateDataCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo const & devicePrivateDataCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, devicePrivateDataCreateInfo.sType );
@@ -3410,8 +3516,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR const &
- deviceQueueGlobalPriorityCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR const & deviceQueueGlobalPriorityCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfoKHR.sType );
@@ -3440,8 +3546,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & directFBSurfaceCreateInfoEXT )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & directFBSurfaceCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.sType );
@@ -3457,8 +3562,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DispatchIndirectCommand const & dispatchIndirectCommand ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DispatchIndirectCommand const & dispatchIndirectCommand ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, dispatchIndirectCommand.x );
@@ -3471,8 +3575,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, displayEventInfoEXT.sType );
@@ -3485,8 +3588,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & displayModeParametersKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & displayModeParametersKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, displayModeParametersKHR.visibleRegion );
@@ -3498,8 +3600,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & displayModeCreateInfoKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & displayModeCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, displayModeCreateInfoKHR.sType );
@@ -3513,8 +3614,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const & displayModePropertiesKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const & displayModePropertiesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, displayModePropertiesKHR.displayMode );
@@ -3526,8 +3626,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const & displayModeProperties2KHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const & displayModeProperties2KHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, displayModeProperties2KHR.sType );
@@ -3540,8 +3639,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const &
- displayNativeHdrSurfaceCapabilitiesAMD ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const & displayNativeHdrSurfaceCapabilitiesAMD ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, displayNativeHdrSurfaceCapabilitiesAMD.sType );
@@ -3554,8 +3653,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const & displayPlaneCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const & displayPlaneCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.supportedAlpha );
@@ -3574,8 +3672,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const & displayPlaneCapabilities2KHR )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const & displayPlaneCapabilities2KHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilities2KHR.sType );
@@ -3588,8 +3685,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const & displayPlaneInfo2KHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const & displayPlaneInfo2KHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, displayPlaneInfo2KHR.sType );
@@ -3603,8 +3699,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const & displayPlanePropertiesKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const & displayPlanePropertiesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, displayPlanePropertiesKHR.currentDisplay );
@@ -3616,8 +3711,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const & displayPlaneProperties2KHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const & displayPlaneProperties2KHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, displayPlaneProperties2KHR.sType );
@@ -3630,8 +3724,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const & displayPowerInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const & displayPowerInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, displayPowerInfoEXT.sType );
@@ -3644,8 +3737,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const & displayPresentInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const & displayPresentInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.sType );
@@ -3660,8 +3752,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const & displayPropertiesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const & displayPropertiesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.display );
@@ -3681,8 +3772,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const & displayProperties2KHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const & displayProperties2KHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, displayProperties2KHR.sType );
@@ -3695,8 +3785,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & displaySurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & displaySurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.sType );
@@ -3716,8 +3805,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand const & drawIndexedIndirectCommand ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand const & drawIndexedIndirectCommand ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.indexCount );
@@ -3732,8 +3820,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DrawIndirectCommand>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DrawIndirectCommand const & drawIndirectCommand ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DrawIndirectCommand const & drawIndirectCommand ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCommand.vertexCount );
@@ -3745,11 +3832,22 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT const & drawMeshTasksIndirectCommandEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandEXT.groupCountX );
+ VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandEXT.groupCountY );
+ VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandEXT.groupCountZ );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV const & drawMeshTasksIndirectCommandNV ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV const & drawMeshTasksIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandNV.taskCount );
@@ -3761,9 +3859,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT const & drmFormatModifierProperties2EXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT const & drmFormatModifierProperties2EXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierProperties2EXT.drmFormatModifier );
@@ -3776,9 +3872,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT const & drmFormatModifierPropertiesEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT const & drmFormatModifierPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesEXT.drmFormatModifier );
@@ -3791,8 +3885,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT const &
- drmFormatModifierPropertiesList2EXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT const & drmFormatModifierPropertiesList2EXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesList2EXT.sType );
@@ -3806,8 +3899,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const &
- drmFormatModifierPropertiesListEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const & drmFormatModifierPropertiesListEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesListEXT.sType );
@@ -3834,8 +3926,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const & exportFenceCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const & exportFenceCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, exportFenceCreateInfo.sType );
@@ -3849,8 +3940,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const & exportFenceWin32HandleInfoKHR )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const & exportFenceWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.sType );
@@ -3866,8 +3956,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const & exportMemoryAllocateInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const & exportMemoryAllocateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfo.sType );
@@ -3880,8 +3969,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV const & exportMemoryAllocateInfoNV ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV const & exportMemoryAllocateInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfoNV.sType );
@@ -3895,9 +3983,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR const & exportMemoryWin32HandleInfoKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR const & exportMemoryWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.sType );
@@ -3914,8 +4000,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV const & exportMemoryWin32HandleInfoNV )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV const & exportMemoryWin32HandleInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoNV.sType );
@@ -3927,11 +4012,138 @@ namespace std
};
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
+# if defined( VK_USE_PLATFORM_METAL_EXT )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT const & exportMetalBufferInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalBufferInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalBufferInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalBufferInfoEXT.memory );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalBufferInfoEXT.mtlBuffer );
+ return seed;
+ }
+ };
+# endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+# if defined( VK_USE_PLATFORM_METAL_EXT )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT const & exportMetalCommandQueueInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalCommandQueueInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalCommandQueueInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalCommandQueueInfoEXT.queue );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalCommandQueueInfoEXT.mtlCommandQueue );
+ return seed;
+ }
+ };
+# endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+# if defined( VK_USE_PLATFORM_METAL_EXT )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT const & exportMetalDeviceInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalDeviceInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalDeviceInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalDeviceInfoEXT.mtlDevice );
+ return seed;
+ }
+ };
+# endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+# if defined( VK_USE_PLATFORM_METAL_EXT )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT const & exportMetalIOSurfaceInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalIOSurfaceInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalIOSurfaceInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalIOSurfaceInfoEXT.image );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalIOSurfaceInfoEXT.ioSurface );
+ return seed;
+ }
+ };
+# endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+# if defined( VK_USE_PLATFORM_METAL_EXT )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT const & exportMetalObjectCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalObjectCreateInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalObjectCreateInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalObjectCreateInfoEXT.exportObjectType );
+ return seed;
+ }
+ };
+# endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+# if defined( VK_USE_PLATFORM_METAL_EXT )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT const & exportMetalObjectsInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalObjectsInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalObjectsInfoEXT.pNext );
+ return seed;
+ }
+ };
+# endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+# if defined( VK_USE_PLATFORM_METAL_EXT )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT const & exportMetalSharedEventInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalSharedEventInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalSharedEventInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalSharedEventInfoEXT.semaphore );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalSharedEventInfoEXT.event );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalSharedEventInfoEXT.mtlSharedEvent );
+ return seed;
+ }
+ };
+# endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+# if defined( VK_USE_PLATFORM_METAL_EXT )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT const & exportMetalTextureInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.image );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.imageView );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.bufferView );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.plane );
+ VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.mtlTexture );
+ return seed;
+ }
+ };
+# endif /*VK_USE_PLATFORM_METAL_EXT*/
+
template <>
struct hash<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const & exportSemaphoreCreateInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const & exportSemaphoreCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreCreateInfo.sType );
@@ -3945,8 +4157,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR const &
- exportSemaphoreWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR const & exportSemaphoreWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.sType );
@@ -3962,8 +4173,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ExtensionProperties>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ExtensionProperties const & extensionProperties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExtensionProperties const & extensionProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i )
@@ -3978,8 +4188,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const & externalMemoryProperties ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const & externalMemoryProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, externalMemoryProperties.externalMemoryFeatures );
@@ -3992,8 +4201,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ExternalBufferProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalBufferProperties const & externalBufferProperties ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalBufferProperties const & externalBufferProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, externalBufferProperties.sType );
@@ -4006,8 +4214,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ExternalFenceProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalFenceProperties const & externalFenceProperties ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalFenceProperties const & externalFenceProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.sType );
@@ -4023,8 +4230,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID const & externalFormatANDROID ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID const & externalFormatANDROID ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, externalFormatANDROID.sType );
@@ -4038,8 +4244,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const & externalImageFormatProperties )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const & externalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatProperties.sType );
@@ -4052,8 +4257,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageFormatProperties>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ImageFormatProperties const & imageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageFormatProperties const & imageFormatProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.maxExtent );
@@ -4068,9 +4272,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV const & externalImageFormatPropertiesNV ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV const & externalImageFormatPropertiesNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatPropertiesNV.imageFormatProperties );
@@ -4084,9 +4286,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const & externalMemoryBufferCreateInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const & externalMemoryBufferCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, externalMemoryBufferCreateInfo.sType );
@@ -4099,8 +4299,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const & externalMemoryImageCreateInfo )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const & externalMemoryImageCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfo.sType );
@@ -4113,9 +4312,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV const & externalMemoryImageCreateInfoNV ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV const & externalMemoryImageCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfoNV.sType );
@@ -4128,8 +4325,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const & externalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const & externalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.sType );
@@ -4157,8 +4353,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const & fenceGetFdInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const & fenceGetFdInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, fenceGetFdInfoKHR.sType );
@@ -4173,8 +4368,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR const & fenceGetWin32HandleInfoKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR const & fenceGetWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, fenceGetWin32HandleInfoKHR.sType );
@@ -4189,8 +4383,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const &
- filterCubicImageViewImageFormatPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const & filterCubicImageViewImageFormatPropertiesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, filterCubicImageViewImageFormatPropertiesEXT.sType );
@@ -4217,8 +4411,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::FormatProperties2>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::FormatProperties2 const & formatProperties2 ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::FormatProperties2 const & formatProperties2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, formatProperties2.sType );
@@ -4231,8 +4424,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::FormatProperties3>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::FormatProperties3 const & formatProperties3 ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::FormatProperties3 const & formatProperties3 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.sType );
@@ -4247,8 +4439,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR const &
- fragmentShadingRateAttachmentInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR const & fragmentShadingRateAttachmentInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, fragmentShadingRateAttachmentInfoKHR.sType );
@@ -4262,9 +4453,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo const & framebufferAttachmentImageInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo const & framebufferAttachmentImageInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.sType );
@@ -4283,8 +4472,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo const &
- framebufferAttachmentsCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo const & framebufferAttachmentsCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentsCreateInfo.sType );
@@ -4298,8 +4486,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & framebufferCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & framebufferCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.sType );
@@ -4318,8 +4505,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const &
- framebufferMixedSamplesCombinationNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const & framebufferMixedSamplesCombinationNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.sType );
@@ -4335,8 +4521,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV const & indirectCommandsStreamNV ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV const & indirectCommandsStreamNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsStreamNV.buffer );
@@ -4348,8 +4533,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV const & generatedCommandsInfoNV ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV const & generatedCommandsInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sType );
@@ -4374,8 +4558,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV const &
- generatedCommandsMemoryRequirementsInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV const & generatedCommandsMemoryRequirementsInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.sType );
@@ -4391,8 +4575,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription const & vertexInputBindingDescription )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription const & vertexInputBindingDescription ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription.binding );
@@ -4405,9 +4588,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription const & vertexInputAttributeDescription ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription const & vertexInputAttributeDescription ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription.location );
@@ -4421,8 +4602,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const &
- pipelineVertexInputStateCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const & pipelineVertexInputStateCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.sType );
@@ -4439,8 +4619,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const &
- pipelineInputAssemblyStateCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const & pipelineInputAssemblyStateCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.sType );
@@ -4455,8 +4634,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const &
- pipelineTessellationStateCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const & pipelineTessellationStateCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationStateCreateInfo.sType );
@@ -4470,9 +4648,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const & pipelineViewportStateCreateInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const & pipelineViewportStateCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.sType );
@@ -4489,8 +4665,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const &
- pipelineRasterizationStateCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const & pipelineRasterizationStateCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.sType );
@@ -4513,8 +4688,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const &
- pipelineMultisampleStateCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const & pipelineMultisampleStateCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.sType );
@@ -4550,8 +4724,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const &
- pipelineDepthStencilStateCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const & pipelineDepthStencilStateCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.sType );
@@ -4573,8 +4746,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState const &
- pipelineColorBlendAttachmentState ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState const & pipelineColorBlendAttachmentState ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.blendEnable );
@@ -4592,8 +4764,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const &
- pipelineColorBlendStateCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const & pipelineColorBlendStateCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.sType );
@@ -4614,9 +4785,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const & pipelineDynamicStateCreateInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const & pipelineDynamicStateCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.sType );
@@ -4631,8 +4800,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & graphicsPipelineCreateInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & graphicsPipelineCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.sType );
@@ -4659,11 +4827,22 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT const & graphicsPipelineLibraryCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineLibraryCreateInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineLibraryCreateInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineLibraryCreateInfoEXT.flags );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV const & graphicsShaderGroupCreateInfoNV ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV const & graphicsShaderGroupCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.sType );
@@ -4679,8 +4858,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV const &
- graphicsPipelineShaderGroupsCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV const & graphicsPipelineShaderGroupsCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.sType );
@@ -4728,8 +4907,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & headlessSurfaceCreateInfoEXT )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & headlessSurfaceCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, headlessSurfaceCreateInfoEXT.sType );
@@ -4743,8 +4921,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & iOSSurfaceCreateInfoMVK ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & iOSSurfaceCreateInfoMVK ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, iOSSurfaceCreateInfoMVK.sType );
@@ -4776,12 +4953,40 @@ namespace std
}
};
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT const & imageCompressionControlEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, imageCompressionControlEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, imageCompressionControlEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, imageCompressionControlEXT.flags );
+ VULKAN_HPP_HASH_COMBINE( seed, imageCompressionControlEXT.compressionControlPlaneCount );
+ VULKAN_HPP_HASH_COMBINE( seed, imageCompressionControlEXT.pFixedRateFlags );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT const & imageCompressionPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, imageCompressionPropertiesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, imageCompressionPropertiesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, imageCompressionPropertiesEXT.imageCompressionFlags );
+ VULKAN_HPP_HASH_COMBINE( seed, imageCompressionPropertiesEXT.imageCompressionFixedRateFlags );
+ return seed;
+ }
+ };
+
# if defined( VK_USE_PLATFORM_FUCHSIA )
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA const &
- imageFormatConstraintsInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA const & imageFormatConstraintsInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.sType );
@@ -4801,8 +5006,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA const & imageConstraintsInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA const & imageConstraintsInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.sType );
@@ -4834,8 +5038,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SubresourceLayout>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SubresourceLayout const & subresourceLayout ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceLayout const & subresourceLayout ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.offset );
@@ -4850,8 +5053,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const &
- imageDrmFormatModifierExplicitCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const & imageDrmFormatModifierExplicitCreateInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.sType );
@@ -4866,8 +5069,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const &
- imageDrmFormatModifierListCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const & imageDrmFormatModifierListCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierListCreateInfoEXT.sType );
@@ -4881,8 +5084,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const &
- imageDrmFormatModifierPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const & imageDrmFormatModifierPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierPropertiesEXT.sType );
@@ -4895,8 +5097,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo const & imageFormatListCreateInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo const & imageFormatListCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageFormatListCreateInfo.sType );
@@ -4910,8 +5111,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const & imageFormatProperties2 ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const & imageFormatProperties2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties2.sType );
@@ -4924,8 +5124,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const & imageMemoryBarrier ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const & imageMemoryBarrier ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.sType );
@@ -4945,8 +5144,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const & imageMemoryRequirementsInfo2 )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const & imageMemoryRequirementsInfo2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageMemoryRequirementsInfo2.sType );
@@ -4960,8 +5158,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const &
- imagePipeSurfaceCreateInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & imagePipeSurfaceCreateInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imagePipeSurfaceCreateInfoFUCHSIA.sType );
@@ -4976,8 +5173,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const &
- imagePlaneMemoryRequirementsInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const & imagePlaneMemoryRequirementsInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imagePlaneMemoryRequirementsInfo.sType );
@@ -5022,8 +5218,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const &
- imageSparseMemoryRequirementsInfo2 ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const & imageSparseMemoryRequirementsInfo2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageSparseMemoryRequirementsInfo2.sType );
@@ -5036,8 +5231,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo const & imageStencilUsageCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo const & imageStencilUsageCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageStencilUsageCreateInfo.sType );
@@ -5048,10 +5242,22 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ImageSubresource2EXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresource2EXT const & imageSubresource2EXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2EXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2EXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2EXT.imageSubresource );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const & imageSwapchainCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const & imageSwapchainCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageSwapchainCreateInfoKHR.sType );
@@ -5064,8 +5270,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const & imageViewASTCDecodeModeEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const & imageViewASTCDecodeModeEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageViewASTCDecodeModeEXT.sType );
@@ -5078,8 +5283,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX const & imageViewAddressPropertiesNVX )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX const & imageViewAddressPropertiesNVX ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageViewAddressPropertiesNVX.sType );
@@ -5093,8 +5297,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & imageViewCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & imageViewCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.sType );
@@ -5112,8 +5315,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX const & imageViewHandleInfoNVX ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX const & imageViewHandleInfoNVX ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.sType );
@@ -5128,8 +5330,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT const & imageViewMinLodCreateInfoEXT )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT const & imageViewMinLodCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageViewMinLodCreateInfoEXT.sType );
@@ -5140,10 +5341,24 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM const & imageViewSampleWeightCreateInfoQCOM ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, imageViewSampleWeightCreateInfoQCOM.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, imageViewSampleWeightCreateInfoQCOM.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, imageViewSampleWeightCreateInfoQCOM.filterCenter );
+ VULKAN_HPP_HASH_COMBINE( seed, imageViewSampleWeightCreateInfoQCOM.filterSize );
+ VULKAN_HPP_HASH_COMBINE( seed, imageViewSampleWeightCreateInfoQCOM.numPhases );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const & imageViewUsageCreateInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const & imageViewUsageCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, imageViewUsageCreateInfo.sType );
@@ -5157,8 +5372,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const &
- importAndroidHardwareBufferInfoANDROID ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const & importAndroidHardwareBufferInfoANDROID ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, importAndroidHardwareBufferInfoANDROID.sType );
@@ -5172,8 +5387,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const & importFenceFdInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const & importFenceFdInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.sType );
@@ -5190,8 +5404,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR const & importFenceWin32HandleInfoKHR )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR const & importFenceWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.sType );
@@ -5210,8 +5423,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA const &
- importMemoryBufferCollectionFUCHSIA ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA const & importMemoryBufferCollectionFUCHSIA ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, importMemoryBufferCollectionFUCHSIA.sType );
@@ -5226,8 +5438,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const & importMemoryFdInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const & importMemoryFdInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, importMemoryFdInfoKHR.sType );
@@ -5241,9 +5452,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const & importMemoryHostPointerInfoEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const & importMemoryHostPointerInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, importMemoryHostPointerInfoEXT.sType );
@@ -5258,9 +5467,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR const & importMemoryWin32HandleInfoKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR const & importMemoryWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.sType );
@@ -5277,8 +5484,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV const & importMemoryWin32HandleInfoNV )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV const & importMemoryWin32HandleInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoNV.sType );
@@ -5294,8 +5500,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA const &
- importMemoryZirconHandleInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA const & importMemoryZirconHandleInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, importMemoryZirconHandleInfoFUCHSIA.sType );
@@ -5307,11 +5512,71 @@ namespace std
};
# endif /*VK_USE_PLATFORM_FUCHSIA*/
+# if defined( VK_USE_PLATFORM_METAL_EXT )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT const & importMetalBufferInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, importMetalBufferInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, importMetalBufferInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, importMetalBufferInfoEXT.mtlBuffer );
+ return seed;
+ }
+ };
+# endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+# if defined( VK_USE_PLATFORM_METAL_EXT )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT const & importMetalIOSurfaceInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, importMetalIOSurfaceInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, importMetalIOSurfaceInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, importMetalIOSurfaceInfoEXT.ioSurface );
+ return seed;
+ }
+ };
+# endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+# if defined( VK_USE_PLATFORM_METAL_EXT )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT const & importMetalSharedEventInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, importMetalSharedEventInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, importMetalSharedEventInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, importMetalSharedEventInfoEXT.mtlSharedEvent );
+ return seed;
+ }
+ };
+# endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+# if defined( VK_USE_PLATFORM_METAL_EXT )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT const & importMetalTextureInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, importMetalTextureInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, importMetalTextureInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, importMetalTextureInfoEXT.plane );
+ VULKAN_HPP_HASH_COMBINE( seed, importMetalTextureInfoEXT.mtlTexture );
+ return seed;
+ }
+ };
+# endif /*VK_USE_PLATFORM_METAL_EXT*/
+
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const & importSemaphoreFdInfoKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const & importSemaphoreFdInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.sType );
@@ -5328,8 +5593,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR const &
- importSemaphoreWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR const & importSemaphoreWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.sType );
@@ -5348,8 +5612,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA const &
- importSemaphoreZirconHandleInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA const & importSemaphoreZirconHandleInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.sType );
@@ -5366,8 +5630,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV const & indirectCommandsLayoutTokenNV )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV const & indirectCommandsLayoutTokenNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.sType );
@@ -5392,8 +5655,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const &
- indirectCommandsLayoutCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & indirectCommandsLayoutCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.sType );
@@ -5411,8 +5673,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const &
- initializePerformanceApiInfoINTEL ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const & initializePerformanceApiInfoINTEL ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, initializePerformanceApiInfoINTEL.sType );
@@ -5425,9 +5686,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference const & inputAttachmentAspectReference ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference const & inputAttachmentAspectReference ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, inputAttachmentAspectReference.subpass );
@@ -5440,8 +5699,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::InstanceCreateInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & instanceCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & instanceCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.sType );
@@ -5492,8 +5750,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & macOSSurfaceCreateInfoMVK ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & macOSSurfaceCreateInfoMVK ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, macOSSurfaceCreateInfoMVK.sType );
@@ -5508,8 +5765,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MappedMemoryRange>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::MappedMemoryRange const & mappedMemoryRange ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MappedMemoryRange const & mappedMemoryRange ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.sType );
@@ -5524,8 +5780,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const & memoryAllocateFlagsInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const & memoryAllocateFlagsInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateFlagsInfo.sType );
@@ -5539,8 +5794,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & memoryAllocateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & memoryAllocateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateInfo.sType );
@@ -5568,8 +5822,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const & memoryDedicatedAllocateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const & memoryDedicatedAllocateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedAllocateInfo.sType );
@@ -5583,8 +5836,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const & memoryDedicatedRequirements ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const & memoryDedicatedRequirements ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedRequirements.sType );
@@ -5598,8 +5850,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const & memoryFdPropertiesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const & memoryFdPropertiesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, memoryFdPropertiesKHR.sType );
@@ -5613,8 +5864,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const &
- memoryGetAndroidHardwareBufferInfoANDROID ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const & memoryGetAndroidHardwareBufferInfoANDROID ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, memoryGetAndroidHardwareBufferInfoANDROID.sType );
@@ -5628,8 +5879,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const & memoryGetFdInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const & memoryGetFdInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, memoryGetFdInfoKHR.sType );
@@ -5643,8 +5893,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV const & memoryGetRemoteAddressInfoNV )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV const & memoryGetRemoteAddressInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, memoryGetRemoteAddressInfoNV.sType );
@@ -5659,8 +5908,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR const & memoryGetWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR const & memoryGetWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, memoryGetWin32HandleInfoKHR.sType );
@@ -5676,8 +5924,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA const &
- memoryGetZirconHandleInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA const & memoryGetZirconHandleInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, memoryGetZirconHandleInfoFUCHSIA.sType );
@@ -5704,9 +5951,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const & memoryHostPointerPropertiesEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const & memoryHostPointerPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, memoryHostPointerPropertiesEXT.sType );
@@ -5719,8 +5964,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo const &
- memoryOpaqueCaptureAddressAllocateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo const & memoryOpaqueCaptureAddressAllocateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, memoryOpaqueCaptureAddressAllocateInfo.sType );
@@ -5733,8 +5978,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT const & memoryPriorityAllocateInfoEXT )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT const & memoryPriorityAllocateInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, memoryPriorityAllocateInfoEXT.sType );
@@ -5747,8 +5991,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MemoryRequirements>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::MemoryRequirements const & memoryRequirements ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryRequirements const & memoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements.size );
@@ -5761,8 +6004,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MemoryRequirements2>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::MemoryRequirements2 const & memoryRequirements2 ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryRequirements2 const & memoryRequirements2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements2.sType );
@@ -5788,9 +6030,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR const & memoryWin32HandlePropertiesKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR const & memoryWin32HandlePropertiesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, memoryWin32HandlePropertiesKHR.sType );
@@ -5805,8 +6045,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA const &
- memoryZirconHandlePropertiesFUCHSIA ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA const & memoryZirconHandlePropertiesFUCHSIA ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, memoryZirconHandlePropertiesFUCHSIA.sType );
@@ -5821,8 +6060,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & metalSurfaceCreateInfoEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & metalSurfaceCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, metalSurfaceCreateInfoEXT.sType );
@@ -5835,10 +6073,68 @@ namespace std
# endif /*VK_USE_PLATFORM_METAL_EXT*/
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT const & micromapBuildSizesInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.micromapSize );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.buildScratchSize );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.discardable );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & micromapCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.createFlags );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.buffer );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.offset );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.size );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.type );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.deviceAddress );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::MicromapTriangleEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapTriangleEXT const & micromapTriangleEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, micromapTriangleEXT.dataOffset );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapTriangleEXT.subdivisionLevel );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapTriangleEXT.format );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT const & micromapVersionInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, micromapVersionInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapVersionInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, micromapVersionInfoEXT.pVersionData );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT const & multiDrawIndexedInfoEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT const & multiDrawIndexedInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, multiDrawIndexedInfoEXT.firstIndex );
@@ -5863,8 +6159,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const & multisamplePropertiesEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const & multisamplePropertiesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, multisamplePropertiesEXT.sType );
@@ -5875,10 +6170,24 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT const & multisampledRenderToSingleSampledInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, multisampledRenderToSingleSampledInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, multisampledRenderToSingleSampledInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, multisampledRenderToSingleSampledInfoEXT.multisampledRenderToSingleSampledEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, multisampledRenderToSingleSampledInfoEXT.rasterizationSamples );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX const &
- multiviewPerViewAttributesInfoNVX ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX const & multiviewPerViewAttributesInfoNVX ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewAttributesInfoNVX.sType );
@@ -5890,30 +6199,105 @@ namespace std
};
template <>
- struct hash<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE>
+ struct hash<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE const & mutableDescriptorTypeListVALVE ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT const & mutableDescriptorTypeListEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeListEXT.descriptorTypeCount );
+ VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeListEXT.pDescriptorTypes );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT const & mutableDescriptorTypeCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoEXT.mutableDescriptorTypeListCount );
+ VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoEXT.pMutableDescriptorTypeLists );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV const & opticalFlowExecuteInfoNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.flags );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.regionCount );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.pRegions );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV const & opticalFlowImageFormatInfoNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatInfoNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatInfoNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatInfoNV.usage );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV const & opticalFlowImageFormatPropertiesNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatPropertiesNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatPropertiesNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatPropertiesNV.format );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & opticalFlowSessionCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeListVALVE.descriptorTypeCount );
- VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeListVALVE.pDescriptorTypes );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.width );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.height );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.imageFormat );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.flowVectorFormat );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.costFormat );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.outputGridSize );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.hintGridSize );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.performanceLevel );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.flags );
return seed;
}
};
template <>
- struct hash<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoVALVE>
+ struct hash<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoVALVE const &
- mutableDescriptorTypeCreateInfoVALVE ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV const & opticalFlowSessionCreatePrivateDataInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoVALVE.sType );
- VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoVALVE.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoVALVE.mutableDescriptorTypeListCount );
- VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoVALVE.pMutableDescriptorTypeLists );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.id );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.size );
+ VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.pPrivateData );
return seed;
}
};
@@ -5921,8 +6305,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE const & pastPresentationTimingGOOGLE )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE const & pastPresentationTimingGOOGLE ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.presentID );
@@ -5937,8 +6320,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const &
- performanceConfigurationAcquireInfoINTEL ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & performanceConfigurationAcquireInfoINTEL ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, performanceConfigurationAcquireInfoINTEL.sType );
@@ -5951,8 +6334,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const &
- performanceCounterDescriptionKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const & performanceCounterDescriptionKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.sType );
@@ -5977,8 +6359,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const & performanceCounterKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const & performanceCounterKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.sType );
@@ -5997,8 +6378,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const & performanceMarkerInfoINTEL ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const & performanceMarkerInfoINTEL ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, performanceMarkerInfoINTEL.sType );
@@ -6011,8 +6391,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const & performanceOverrideInfoINTEL )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const & performanceOverrideInfoINTEL ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.sType );
@@ -6027,8 +6406,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const & performanceQuerySubmitInfoKHR )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const & performanceQuerySubmitInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, performanceQuerySubmitInfoKHR.sType );
@@ -6041,8 +6419,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const &
- performanceStreamMarkerInfoINTEL ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const & performanceStreamMarkerInfoINTEL ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, performanceStreamMarkerInfoINTEL.sType );
@@ -6055,8 +6432,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const &
- physicalDevice16BitStorageFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const & physicalDevice16BitStorageFeatures ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.sType );
@@ -6072,8 +6448,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT const &
- physicalDevice4444FormatsFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT const & physicalDevice4444FormatsFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDevice4444FormatsFeaturesEXT.sType );
@@ -6087,8 +6462,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures const &
- physicalDevice8BitStorageFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures const & physicalDevice8BitStorageFeatures ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.sType );
@@ -6103,8 +6477,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const &
- physicalDeviceASTCDecodeFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const & physicalDeviceASTCDecodeFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceASTCDecodeFeaturesEXT.sType );
@@ -6117,20 +6490,17 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR const &
- physicalDeviceAccelerationStructureFeaturesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR const & physicalDeviceAccelerationStructureFeaturesKHR ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructure );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructureCaptureReplay );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructureIndirectBuild );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructureCaptureReplay );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructureIndirectBuild );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructureHostCommands );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceAccelerationStructureFeaturesKHR.descriptorBindingAccelerationStructureUpdateAfterBind );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.descriptorBindingAccelerationStructureUpdateAfterBind );
return seed;
}
};
@@ -6138,8 +6508,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR const &
- physicalDeviceAccelerationStructurePropertiesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR const & physicalDeviceAccelerationStructurePropertiesKHR )
+ const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.sType );
@@ -6147,17 +6517,53 @@ namespace std
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxGeometryCount );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxInstanceCount );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxPrimitiveCount );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceAccelerationStructurePropertiesKHR.maxPerStageDescriptorAccelerationStructures );
- VULKAN_HPP_HASH_COMBINE(
- seed,
- physicalDeviceAccelerationStructurePropertiesKHR.maxPerStageDescriptorUpdateAfterBindAccelerationStructures );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceAccelerationStructurePropertiesKHR.maxDescriptorSetAccelerationStructures );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceAccelerationStructurePropertiesKHR.maxDescriptorSetUpdateAfterBindAccelerationStructures );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceAccelerationStructurePropertiesKHR.minAccelerationStructureScratchOffsetAlignment );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxPerStageDescriptorAccelerationStructures );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxPerStageDescriptorUpdateAfterBindAccelerationStructures );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxDescriptorSetAccelerationStructures );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxDescriptorSetUpdateAfterBindAccelerationStructures );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.minAccelerationStructureScratchOffsetAlignment );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT const & physicalDeviceAddressBindingReportFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAddressBindingReportFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAddressBindingReportFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAddressBindingReportFeaturesEXT.reportAddressBinding );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC const & physicalDeviceAmigoProfilingFeaturesSEC ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAmigoProfilingFeaturesSEC.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAmigoProfilingFeaturesSEC.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAmigoProfilingFeaturesSEC.amigoProfiling );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const &
+ physicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT.attachmentFeedbackLoopLayout );
return seed;
}
};
@@ -6165,8 +6571,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const &
- physicalDeviceBlendOperationAdvancedFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()(
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & physicalDeviceBlendOperationAdvancedFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedFeaturesEXT.sType );
@@ -6179,19 +6585,16 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const &
- physicalDeviceBlendOperationAdvancedPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & physicalDeviceBlendOperationAdvancedPropertiesEXT )
+ const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendMaxColorAttachments );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendMaxColorAttachments );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendIndependentBlend );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendNonPremultipliedSrcColor );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendNonPremultipliedDstColor );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendNonPremultipliedSrcColor );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendNonPremultipliedDstColor );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendCorrelatedOverlap );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendAllOperations );
return seed;
@@ -6201,8 +6604,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT const &
- physicalDeviceBorderColorSwizzleFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT const & physicalDeviceBorderColorSwizzleFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBorderColorSwizzleFeaturesEXT.sType );
@@ -6216,8 +6619,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures const &
- physicalDeviceBufferDeviceAddressFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures const & physicalDeviceBufferDeviceAddressFeatures ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.sType );
@@ -6232,8 +6635,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const &
- physicalDeviceBufferDeviceAddressFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const & physicalDeviceBufferDeviceAddressFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.sType );
@@ -6248,8 +6651,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const &
- physicalDeviceCoherentMemoryFeaturesAMD ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const & physicalDeviceCoherentMemoryFeaturesAMD ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoherentMemoryFeaturesAMD.sType );
@@ -6262,8 +6665,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT const &
- physicalDeviceColorWriteEnableFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT const & physicalDeviceColorWriteEnableFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceColorWriteEnableFeaturesEXT.sType );
@@ -6276,8 +6679,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const &
- physicalDeviceComputeShaderDerivativesFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const & physicalDeviceComputeShaderDerivativesFeaturesNV )
+ const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.sType );
@@ -6291,8 +6694,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const &
- physicalDeviceConditionalRenderingFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const & physicalDeviceConditionalRenderingFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.sType );
@@ -6313,20 +6716,14 @@ namespace std
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.primitiveOverestimationSize );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceConservativeRasterizationPropertiesEXT.maxExtraPrimitiveOverestimationSize );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceConservativeRasterizationPropertiesEXT.extraPrimitiveOverestimationSizeGranularity );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.maxExtraPrimitiveOverestimationSize );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.extraPrimitiveOverestimationSizeGranularity );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.primitiveUnderestimation );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceConservativeRasterizationPropertiesEXT.conservativePointAndLineRasterization );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceConservativeRasterizationPropertiesEXT.degenerateTrianglesRasterized );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.conservativePointAndLineRasterization );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.degenerateTrianglesRasterized );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.degenerateLinesRasterized );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceConservativeRasterizationPropertiesEXT.fullyCoveredFragmentShaderInputVariable );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceConservativeRasterizationPropertiesEXT.conservativeRasterizationPostDepthCoverage );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.fullyCoveredFragmentShaderInputVariable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.conservativeRasterizationPostDepthCoverage );
return seed;
}
};
@@ -6334,8 +6731,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const &
- physicalDeviceCooperativeMatrixFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const & physicalDeviceCooperativeMatrixFeaturesNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.sType );
@@ -6349,8 +6746,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const &
- physicalDeviceCooperativeMatrixPropertiesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const & physicalDeviceCooperativeMatrixPropertiesNV ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesNV.sType );
@@ -6363,8 +6760,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const &
- physicalDeviceCornerSampledImageFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const & physicalDeviceCornerSampledImageFeaturesNV ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCornerSampledImageFeaturesNV.sType );
@@ -6377,8 +6774,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const &
- physicalDeviceCoverageReductionModeFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const & physicalDeviceCoverageReductionModeFeaturesNV ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoverageReductionModeFeaturesNV.sType );
@@ -6391,8 +6788,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT const &
- physicalDeviceCustomBorderColorFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT const & physicalDeviceCustomBorderColorFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorFeaturesEXT.sType );
@@ -6406,8 +6803,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT const &
- physicalDeviceCustomBorderColorPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT const & physicalDeviceCustomBorderColorPropertiesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorPropertiesEXT.sType );
@@ -6426,8 +6823,21 @@ namespace std
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDedicatedAllocationImageAliasingFeaturesNV.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDedicatedAllocationImageAliasingFeaturesNV.pNext );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDedicatedAllocationImageAliasingFeaturesNV.dedicatedAllocationImageAliasing );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDedicatedAllocationImageAliasingFeaturesNV.dedicatedAllocationImageAliasing );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT const & physicalDeviceDepthClampZeroOneFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampZeroOneFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampZeroOneFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampZeroOneFeaturesEXT.depthClampZeroOne );
return seed;
}
};
@@ -6435,8 +6845,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT const &
- physicalDeviceDepthClipControlFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT const & physicalDeviceDepthClipControlFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipControlFeaturesEXT.sType );
@@ -6449,8 +6859,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const &
- physicalDeviceDepthClipEnableFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const & physicalDeviceDepthClipEnableFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipEnableFeaturesEXT.sType );
@@ -6463,8 +6873,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties const &
- physicalDeviceDepthStencilResolveProperties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties const & physicalDeviceDepthStencilResolveProperties ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.sType );
@@ -6480,49 +6890,31 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const &
- physicalDeviceDescriptorIndexingFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const & physicalDeviceDescriptorIndexingFeatures ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.pNext );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceDescriptorIndexingFeatures.shaderInputAttachmentArrayDynamicIndexing );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceDescriptorIndexingFeatures.shaderUniformTexelBufferArrayDynamicIndexing );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceDescriptorIndexingFeatures.shaderStorageTexelBufferArrayDynamicIndexing );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceDescriptorIndexingFeatures.shaderUniformBufferArrayNonUniformIndexing );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceDescriptorIndexingFeatures.shaderSampledImageArrayNonUniformIndexing );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceDescriptorIndexingFeatures.shaderStorageBufferArrayNonUniformIndexing );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceDescriptorIndexingFeatures.shaderStorageImageArrayNonUniformIndexing );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceDescriptorIndexingFeatures.shaderInputAttachmentArrayNonUniformIndexing );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingFeatures.shaderUniformTexelBufferArrayNonUniformIndexing );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingFeatures.shaderStorageTexelBufferArrayNonUniformIndexing );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceDescriptorIndexingFeatures.descriptorBindingUniformBufferUpdateAfterBind );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceDescriptorIndexingFeatures.descriptorBindingSampledImageUpdateAfterBind );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceDescriptorIndexingFeatures.descriptorBindingStorageImageUpdateAfterBind );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceDescriptorIndexingFeatures.descriptorBindingStorageBufferUpdateAfterBind );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingUniformTexelBufferUpdateAfterBind );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingStorageTexelBufferUpdateAfterBind );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceDescriptorIndexingFeatures.descriptorBindingUpdateUnusedWhilePending );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderInputAttachmentArrayDynamicIndexing );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderUniformTexelBufferArrayDynamicIndexing );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderStorageTexelBufferArrayDynamicIndexing );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderUniformBufferArrayNonUniformIndexing );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderSampledImageArrayNonUniformIndexing );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderStorageBufferArrayNonUniformIndexing );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderStorageImageArrayNonUniformIndexing );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderInputAttachmentArrayNonUniformIndexing );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderUniformTexelBufferArrayNonUniformIndexing );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderStorageTexelBufferArrayNonUniformIndexing );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingUniformBufferUpdateAfterBind );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingSampledImageUpdateAfterBind );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingStorageImageUpdateAfterBind );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingStorageBufferUpdateAfterBind );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingUniformTexelBufferUpdateAfterBind );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingStorageTexelBufferUpdateAfterBind );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingUpdateUnusedWhilePending );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingPartiallyBound );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceDescriptorIndexingFeatures.descriptorBindingVariableDescriptorCount );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingVariableDescriptorCount );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.runtimeDescriptorArray );
return seed;
}
@@ -6531,55 +6923,50 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties const &
- physicalDeviceDescriptorIndexingProperties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties const & physicalDeviceDescriptorIndexingProperties ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.pNext );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceDescriptorIndexingProperties.maxUpdateAfterBindDescriptorsInAllPools );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingProperties.shaderUniformBufferArrayNonUniformIndexingNative );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingProperties.shaderSampledImageArrayNonUniformIndexingNative );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingProperties.shaderStorageBufferArrayNonUniformIndexingNative );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingProperties.shaderStorageImageArrayNonUniformIndexingNative );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingProperties.shaderInputAttachmentArrayNonUniformIndexingNative );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxUpdateAfterBindDescriptorsInAllPools );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.shaderUniformBufferArrayNonUniformIndexingNative );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.shaderSampledImageArrayNonUniformIndexingNative );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.shaderStorageBufferArrayNonUniformIndexingNative );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.shaderStorageImageArrayNonUniformIndexingNative );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.shaderInputAttachmentArrayNonUniformIndexingNative );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.robustBufferAccessUpdateAfterBind );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.quadDivergentImplicitLod );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindSamplers );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindUniformBuffers );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindStorageBuffers );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindSampledImages );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindStorageImages );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindInputAttachments );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindSamplers );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindUniformBuffers );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindStorageBuffers );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindSampledImages );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindStorageImages );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindInputAttachments );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageUpdateAfterBindResources );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindSamplers );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindUniformBuffers );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageBuffers );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindSampledImages );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageImages );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindInputAttachments );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindSamplers );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindUniformBuffers );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageBuffers );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindSampledImages );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageImages );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindInputAttachments );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & physicalDeviceDescriptorSetHostMappingFeaturesVALVE ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorSetHostMappingFeaturesVALVE.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorSetHostMappingFeaturesVALVE.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorSetHostMappingFeaturesVALVE.descriptorSetHostMapping );
return seed;
}
};
@@ -6587,8 +6974,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &
- physicalDeviceDeviceGeneratedCommandsFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()(
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & physicalDeviceDeviceGeneratedCommandsFeaturesNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesNV.sType );
@@ -6601,8 +6988,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &
- physicalDeviceDeviceGeneratedCommandsPropertiesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & physicalDeviceDeviceGeneratedCommandsPropertiesNV )
+ const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.sType );
@@ -6612,14 +6999,10 @@ namespace std
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsTokenCount );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsStreamCount );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsTokenOffset );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsStreamStride );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.minSequencesCountBufferOffsetAlignment );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.minSequencesIndexBufferOffsetAlignment );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.minIndirectCommandsBufferOffsetAlignment );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsStreamStride );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.minSequencesCountBufferOffsetAlignment );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.minSequencesIndexBufferOffsetAlignment );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.minIndirectCommandsBufferOffsetAlignment );
return seed;
}
};
@@ -6627,8 +7010,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT const &
- physicalDeviceDeviceMemoryReportFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT const & physicalDeviceDeviceMemoryReportFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceMemoryReportFeaturesEXT.sType );
@@ -6641,8 +7024,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV const &
- physicalDeviceDiagnosticsConfigFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV const & physicalDeviceDiagnosticsConfigFeaturesNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiagnosticsConfigFeaturesNV.sType );
@@ -6655,8 +7038,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const &
- physicalDeviceDiscardRectanglePropertiesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const & physicalDeviceDiscardRectanglePropertiesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiscardRectanglePropertiesEXT.sType );
@@ -6669,9 +7052,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties const & physicalDeviceDriverProperties ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties const & physicalDeviceDriverProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.sType );
@@ -6693,9 +7074,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT const & physicalDeviceDrmPropertiesEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT const & physicalDeviceDrmPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.sType );
@@ -6713,8 +7092,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures const &
- physicalDeviceDynamicRenderingFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures const & physicalDeviceDynamicRenderingFeatures ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingFeatures.sType );
@@ -6727,8 +7106,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const &
- physicalDeviceExclusiveScissorFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const & physicalDeviceExclusiveScissorFeaturesNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExclusiveScissorFeaturesNV.sType );
@@ -6741,16 +7120,73 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT const &
- physicalDeviceExtendedDynamicState2FeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT const & physicalDeviceExtendedDynamicState2FeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.extendedDynamicState2 );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.extendedDynamicState2LogicOp );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceExtendedDynamicState2FeaturesEXT.extendedDynamicState2PatchControlPoints );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.extendedDynamicState2PatchControlPoints );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT const & physicalDeviceExtendedDynamicState3FeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3TessellationDomainOrigin );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3DepthClampEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3PolygonMode );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3RasterizationSamples );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3SampleMask );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3AlphaToCoverageEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3AlphaToOneEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3LogicOpEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorBlendEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorBlendEquation );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorWriteMask );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3RasterizationStream );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ConservativeRasterizationMode );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ExtraPrimitiveOverestimationSize );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3DepthClipEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3SampleLocationsEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorBlendAdvanced );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ProvokingVertexMode );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3LineRasterizationMode );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3LineStippleEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3DepthClipNegativeOneToOne );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ViewportWScalingEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ViewportSwizzle );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageToColorEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageToColorLocation );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageModulationMode );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageModulationTableEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageModulationTable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageReductionMode );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3RepresentativeFragmentTestEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ShadingRateImageEnable );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT const & physicalDeviceExtendedDynamicState3PropertiesEXT )
+ const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3PropertiesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3PropertiesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3PropertiesEXT.dynamicPrimitiveTopologyUnrestricted );
return seed;
}
};
@@ -6758,8 +7194,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT const &
- physicalDeviceExtendedDynamicStateFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT const & physicalDeviceExtendedDynamicStateFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicStateFeaturesEXT.sType );
@@ -6772,8 +7208,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const &
- physicalDeviceExternalBufferInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const & physicalDeviceExternalBufferInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.sType );
@@ -6788,9 +7223,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const & physicalDeviceExternalFenceInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const & physicalDeviceExternalFenceInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFenceInfo.sType );
@@ -6803,8 +7236,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const &
- physicalDeviceExternalImageFormatInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const & physicalDeviceExternalImageFormatInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalImageFormatInfo.sType );
@@ -6817,8 +7250,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const &
- physicalDeviceExternalMemoryHostPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const & physicalDeviceExternalMemoryHostPropertiesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryHostPropertiesEXT.sType );
@@ -6831,8 +7264,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV const &
- physicalDeviceExternalMemoryRDMAFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV const & physicalDeviceExternalMemoryRDMAFeaturesNV ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryRDMAFeaturesNV.sType );
@@ -6845,8 +7278,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const &
- physicalDeviceExternalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const & physicalDeviceExternalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalSemaphoreInfo.sType );
@@ -6857,10 +7289,23 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT const & physicalDeviceFaultFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.deviceFault );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.deviceFaultVendorBinary );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const & physicalDeviceFeatures2 ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const & physicalDeviceFeatures2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures2.sType );
@@ -6873,8 +7318,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties const &
- physicalDeviceFloatControlsProperties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties const & physicalDeviceFloatControlsProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.sType );
@@ -6903,8 +7348,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT const &
- physicalDeviceFragmentDensityMap2FeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT const & physicalDeviceFragmentDensityMap2FeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2FeaturesEXT.sType );
@@ -6917,18 +7362,16 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT const &
- physicalDeviceFragmentDensityMap2PropertiesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT const & physicalDeviceFragmentDensityMap2PropertiesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.subsampledLoads );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceFragmentDensityMap2PropertiesEXT.subsampledCoarseReconstructionEarlyAccess );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.subsampledCoarseReconstructionEarlyAccess );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.maxSubsampledArrayLayers );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceFragmentDensityMap2PropertiesEXT.maxDescriptorSetSubsampledSamplers );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.maxDescriptorSetSubsampledSamplers );
return seed;
}
};
@@ -6936,16 +7379,15 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const &
- physicalDeviceFragmentDensityMapFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const & physicalDeviceFragmentDensityMapFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.fragmentDensityMap );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.fragmentDensityMapDynamic );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceFragmentDensityMapFeaturesEXT.fragmentDensityMapNonSubsampledImages );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.fragmentDensityMapNonSubsampledImages );
return seed;
}
};
@@ -6953,8 +7395,9 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const &
- physicalDeviceFragmentDensityMapOffsetFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & physicalDeviceFragmentDensityMapOffsetFeaturesQCOM ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetFeaturesQCOM.sType );
@@ -6973,8 +7416,7 @@ namespace std
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetPropertiesQCOM.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetPropertiesQCOM.pNext );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceFragmentDensityMapOffsetPropertiesQCOM.fragmentDensityOffsetGranularity );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetPropertiesQCOM.fragmentDensityOffsetGranularity );
return seed;
}
};
@@ -6982,8 +7424,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const &
- physicalDeviceFragmentDensityMapPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const & physicalDeviceFragmentDensityMapPropertiesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.sType );
@@ -6996,15 +7438,30 @@ namespace std
};
template <>
- struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV const &
- physicalDeviceFragmentShaderBarycentricFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & physicalDeviceFragmentShaderBarycentricFeaturesKHR ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricFeaturesNV.sType );
- VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricFeaturesNV.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricFeaturesNV.fragmentShaderBarycentric );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricFeaturesKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricFeaturesKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricFeaturesKHR.fragmentShaderBarycentric );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const &
+ physicalDeviceFragmentShaderBarycentricPropertiesKHR ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricPropertiesKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricPropertiesKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricPropertiesKHR.triStripVertexOrderIndependentOfProvokingVertex );
return seed;
}
};
@@ -7012,16 +7469,15 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const &
- physicalDeviceFragmentShaderInterlockFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & physicalDeviceFragmentShaderInterlockFeaturesEXT )
+ const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.fragmentShaderSampleInterlock );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.fragmentShaderPixelInterlock );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceFragmentShaderInterlockFeaturesEXT.fragmentShaderShadingRateInterlock );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.fragmentShaderShadingRateInterlock );
return seed;
}
};
@@ -7029,16 +7485,15 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &
- physicalDeviceFragmentShadingRateEnumsFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & physicalDeviceFragmentShadingRateEnumsFeaturesNV )
+ const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.fragmentShadingRateEnums );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.supersampleFragmentShadingRates );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceFragmentShadingRateEnumsFeaturesNV.noInvocationFragmentShadingRates );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.noInvocationFragmentShadingRates );
return seed;
}
};
@@ -7046,14 +7501,14 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &
- physicalDeviceFragmentShadingRateEnumsPropertiesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & physicalDeviceFragmentShadingRateEnumsPropertiesNV ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsPropertiesNV.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsPropertiesNV.pNext );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceFragmentShadingRateEnumsPropertiesNV.maxFragmentShadingRateInvocationCount );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsPropertiesNV.maxFragmentShadingRateInvocationCount );
return seed;
}
};
@@ -7061,8 +7516,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR const &
- physicalDeviceFragmentShadingRateFeaturesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR const & physicalDeviceFragmentShadingRateFeaturesKHR ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.sType );
@@ -7077,8 +7532,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR const &
- physicalDeviceFragmentShadingRateKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR const & physicalDeviceFragmentShadingRateKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateKHR.sType );
@@ -7092,42 +7546,29 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR const &
- physicalDeviceFragmentShadingRatePropertiesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR const & physicalDeviceFragmentShadingRatePropertiesKHR ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.pNext );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceFragmentShadingRatePropertiesKHR.minFragmentShadingRateAttachmentTexelSize );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateAttachmentTexelSize );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateAttachmentTexelSizeAspectRatio );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceFragmentShadingRatePropertiesKHR.primitiveFragmentShadingRateWithMultipleViewports );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.minFragmentShadingRateAttachmentTexelSize );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateAttachmentTexelSize );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateAttachmentTexelSizeAspectRatio );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.primitiveFragmentShadingRateWithMultipleViewports );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.layeredShadingRateAttachments );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateNonTrivialCombinerOps );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateNonTrivialCombinerOps );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentSize );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentSizeAspectRatio );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateCoverageSamples );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateRasterizationSamples );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithShaderDepthStencilWrites );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateCoverageSamples );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateRasterizationSamples );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithShaderDepthStencilWrites );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithSampleMask );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithShaderSampleMask );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithConservativeRasterization );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithFragmentShaderInterlock );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithCustomSampleLocations );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateStrictMultiplyCombiner );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithShaderSampleMask );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithConservativeRasterization );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithFragmentShaderInterlock );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithCustomSampleLocations );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateStrictMultiplyCombiner );
return seed;
}
};
@@ -7135,8 +7576,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR const &
- physicalDeviceGlobalPriorityQueryFeaturesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & physicalDeviceGlobalPriorityQueryFeaturesKHR ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeaturesKHR.sType );
@@ -7147,12 +7588,41 @@ namespace std
};
template <>
- struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const & physicalDeviceGroupProperties )
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & physicalDeviceGraphicsPipelineLibraryFeaturesEXT )
const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryFeaturesEXT.graphicsPipelineLibrary );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & physicalDeviceGraphicsPipelineLibraryPropertiesEXT ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryPropertiesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryPropertiesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryPropertiesEXT.graphicsPipelineLibraryFastLinking );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryPropertiesEXT.graphicsPipelineLibraryIndependentInterpolationDecoration );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const & physicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.physicalDeviceCount );
@@ -7168,8 +7638,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const &
- physicalDeviceHostQueryResetFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const & physicalDeviceHostQueryResetFeatures ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostQueryResetFeatures.sType );
@@ -7182,8 +7651,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const & physicalDeviceIDProperties ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const & physicalDeviceIDProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.sType );
@@ -7207,10 +7675,53 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT const & physicalDeviceImage2DViewOf3DFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImage2DViewOf3DFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImage2DViewOf3DFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImage2DViewOf3DFeaturesEXT.image2DViewOf3D );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImage2DViewOf3DFeaturesEXT.sampler2DViewOf3D );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT const & physicalDeviceImageCompressionControlFeaturesEXT )
+ const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlFeaturesEXT.imageCompressionControl );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const &
+ physicalDeviceImageCompressionControlSwapchainFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlSwapchainFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlSwapchainFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlSwapchainFeaturesEXT.imageCompressionControlSwapchain );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const &
- physicalDeviceImageDrmFormatModifierInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const & physicalDeviceImageDrmFormatModifierInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.sType );
@@ -7226,9 +7737,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const & physicalDeviceImageFormatInfo2 ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const & physicalDeviceImageFormatInfo2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.sType );
@@ -7243,10 +7752,43 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM const & physicalDeviceImageProcessingFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingFeaturesQCOM.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingFeaturesQCOM.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingFeaturesQCOM.textureSampleWeighted );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingFeaturesQCOM.textureBoxFilter );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingFeaturesQCOM.textureBlockMatch );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM const & physicalDeviceImageProcessingPropertiesQCOM ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.maxWeightFilterPhases );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.maxWeightFilterDimension );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.maxBlockMatchRegion );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.maxBoxFilterBlockSize );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures const &
- physicalDeviceImageRobustnessFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures const & physicalDeviceImageRobustnessFeatures ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageRobustnessFeatures.sType );
@@ -7259,8 +7801,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const &
- physicalDeviceImageViewImageFormatInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const & physicalDeviceImageViewImageFormatInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewImageFormatInfoEXT.sType );
@@ -7273,8 +7815,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT const &
- physicalDeviceImageViewMinLodFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT const & physicalDeviceImageViewMinLodFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewMinLodFeaturesEXT.sType );
@@ -7287,8 +7829,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures const &
- physicalDeviceImagelessFramebufferFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures const & physicalDeviceImagelessFramebufferFeatures ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImagelessFramebufferFeatures.sType );
@@ -7301,8 +7843,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const &
- physicalDeviceIndexTypeUint8FeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const & physicalDeviceIndexTypeUint8FeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8FeaturesEXT.sType );
@@ -7315,8 +7857,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV const &
- physicalDeviceInheritedViewportScissorFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV const & physicalDeviceInheritedViewportScissorFeaturesNV )
+ const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInheritedViewportScissorFeaturesNV.sType );
@@ -7329,15 +7871,14 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures const &
- physicalDeviceInlineUniformBlockFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures const & physicalDeviceInlineUniformBlockFeatures ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockFeatures.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockFeatures.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockFeatures.inlineUniformBlock );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceInlineUniformBlockFeatures.descriptorBindingInlineUniformBlockUpdateAfterBind );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockFeatures.descriptorBindingInlineUniformBlockUpdateAfterBind );
return seed;
}
};
@@ -7345,20 +7886,17 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties const &
- physicalDeviceInlineUniformBlockProperties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties const & physicalDeviceInlineUniformBlockProperties ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxInlineUniformBlockSize );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceInlineUniformBlockProperties.maxPerStageDescriptorInlineUniformBlocks );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceInlineUniformBlockProperties.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxPerStageDescriptorInlineUniformBlocks );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxDescriptorSetInlineUniformBlocks );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceInlineUniformBlockProperties.maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
return seed;
}
};
@@ -7366,8 +7904,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI const &
- physicalDeviceInvocationMaskFeaturesHUAWEI ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI const & physicalDeviceInvocationMaskFeaturesHUAWEI ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInvocationMaskFeaturesHUAWEI.sType );
@@ -7378,10 +7916,23 @@ namespace std
};
template <>
- struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT>
{
std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const & physicalDeviceLimits ) const VULKAN_HPP_NOEXCEPT
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT const & physicalDeviceLegacyDitheringFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.legacyDithering );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const & physicalDeviceLimits ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageDimension1D );
@@ -7515,8 +8066,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const &
- physicalDeviceLineRasterizationFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const & physicalDeviceLineRasterizationFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.sType );
@@ -7534,8 +8085,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const &
- physicalDeviceLineRasterizationPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const & physicalDeviceLineRasterizationPropertiesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationPropertiesEXT.sType );
@@ -7548,8 +8099,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV const &
- physicalDeviceLinearColorAttachmentFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV const & physicalDeviceLinearColorAttachmentFeaturesNV ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLinearColorAttachmentFeaturesNV.sType );
@@ -7562,8 +8113,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const &
- physicalDeviceMaintenance3Properties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const & physicalDeviceMaintenance3Properties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance3Properties.sType );
@@ -7577,8 +8127,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features const &
- physicalDeviceMaintenance4Features ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features const & physicalDeviceMaintenance4Features ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Features.sType );
@@ -7591,8 +8140,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties const &
- physicalDeviceMaintenance4Properties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties const & physicalDeviceMaintenance4Properties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Properties.sType );
@@ -7605,8 +8153,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const &
- physicalDeviceMemoryBudgetPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const & physicalDeviceMemoryBudgetPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryBudgetPropertiesEXT.sType );
@@ -7626,8 +8174,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const &
- physicalDeviceMemoryPriorityFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const & physicalDeviceMemoryPriorityFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryPriorityFeaturesEXT.sType );
@@ -7640,9 +8188,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const & physicalDeviceMemoryProperties ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const & physicalDeviceMemoryProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties.memoryTypeCount );
@@ -7662,9 +8208,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const & physicalDeviceMemoryProperties2 ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const & physicalDeviceMemoryProperties2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties2.sType );
@@ -7675,10 +8219,26 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT const & physicalDeviceMeshShaderFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.taskShader );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.meshShader );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.multiviewMeshShader );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.primitiveFragmentShadingRateMeshShader );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.meshShaderQueries );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const &
- physicalDeviceMeshShaderFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const & physicalDeviceMeshShaderFeaturesNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesNV.sType );
@@ -7690,10 +8250,62 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT const & physicalDeviceMeshShaderPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskWorkGroupTotalCount );
+ for ( size_t i = 0; i < 3; ++i )
+ {
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskWorkGroupCount[i] );
+ }
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskWorkGroupInvocations );
+ for ( size_t i = 0; i < 3; ++i )
+ {
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskWorkGroupSize[i] );
+ }
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskPayloadSize );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskSharedMemorySize );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskPayloadAndSharedMemorySize );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshWorkGroupTotalCount );
+ for ( size_t i = 0; i < 3; ++i )
+ {
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshWorkGroupCount[i] );
+ }
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshWorkGroupInvocations );
+ for ( size_t i = 0; i < 3; ++i )
+ {
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshWorkGroupSize[i] );
+ }
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshSharedMemorySize );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshPayloadAndSharedMemorySize );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshOutputMemorySize );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshPayloadAndOutputMemorySize );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshOutputComponents );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshOutputVertices );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshOutputPrimitives );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshOutputLayers );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshMultiviewViewCount );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.meshOutputPerVertexGranularity );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.meshOutputPerPrimitiveGranularity );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxPreferredTaskWorkGroupInvocations );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxPreferredMeshWorkGroupInvocations );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.prefersLocalInvocationVertexOutput );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.prefersLocalInvocationPrimitiveOutput );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.prefersCompactVertexOutput );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.prefersCompactPrimitiveOutput );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const &
- physicalDeviceMeshShaderPropertiesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const & physicalDeviceMeshShaderPropertiesNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.sType );
@@ -7724,8 +8336,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT const &
- physicalDeviceMultiDrawFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT const & physicalDeviceMultiDrawFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawFeaturesEXT.sType );
@@ -7738,8 +8349,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT const &
- physicalDeviceMultiDrawPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT const & physicalDeviceMultiDrawPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawPropertiesEXT.sType );
@@ -7750,11 +8360,23 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const &
+ physicalDeviceMultisampledRenderToSingleSampledFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultisampledRenderToSingleSampledFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultisampledRenderToSingleSampledFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultisampledRenderToSingleSampledFeaturesEXT.multisampledRenderToSingleSampled );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const & physicalDeviceMultiviewFeatures ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const & physicalDeviceMultiviewFeatures ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.sType );
@@ -7775,8 +8397,7 @@ namespace std
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewAttributesPropertiesNVX.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewAttributesPropertiesNVX.pNext );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceMultiviewPerViewAttributesPropertiesNVX.perViewPositionAllComponents );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewAttributesPropertiesNVX.perViewPositionAllComponents );
return seed;
}
};
@@ -7784,8 +8405,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const &
- physicalDeviceMultiviewProperties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const & physicalDeviceMultiviewProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewProperties.sType );
@@ -7797,15 +8417,97 @@ namespace std
};
template <>
- struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesVALVE>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const &
- physicalDeviceMutableDescriptorTypeFeaturesVALVE ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & physicalDeviceMutableDescriptorTypeFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMutableDescriptorTypeFeaturesVALVE.sType );
- VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMutableDescriptorTypeFeaturesVALVE.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMutableDescriptorTypeFeaturesVALVE.mutableDescriptorType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMutableDescriptorTypeFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMutableDescriptorTypeFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMutableDescriptorTypeFeaturesEXT.mutableDescriptorType );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & physicalDeviceNonSeamlessCubeMapFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNonSeamlessCubeMapFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNonSeamlessCubeMapFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNonSeamlessCubeMapFeaturesEXT.nonSeamlessCubeMap );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT const & physicalDeviceOpacityMicromapFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.micromap );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.micromapCaptureReplay );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.micromapHostCommands );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT const & physicalDeviceOpacityMicromapPropertiesEXT ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.maxOpacity2StateSubdivisionLevel );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.maxOpacity4StateSubdivisionLevel );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV const & physicalDeviceOpticalFlowFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowFeaturesNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowFeaturesNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowFeaturesNV.opticalFlow );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV const & physicalDeviceOpticalFlowPropertiesNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.supportedOutputGridSizes );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.supportedHintGridSizes );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.hintSupported );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.costSupported );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.bidirectionalFlowSupported );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.globalFlowSupported );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.minWidth );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.minHeight );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.maxWidth );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.maxHeight );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.maxNumRegionsOfInterest );
return seed;
}
};
@@ -7813,8 +8515,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const &
- physicalDevicePCIBusInfoPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const & physicalDevicePCIBusInfoPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.sType );
@@ -7830,8 +8532,9 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &
- physicalDevicePageableDeviceLocalMemoryFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & physicalDevicePageableDeviceLocalMemoryFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePageableDeviceLocalMemoryFeaturesEXT.sType );
@@ -7844,8 +8547,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const &
- physicalDevicePerformanceQueryFeaturesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const & physicalDevicePerformanceQueryFeaturesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.sType );
@@ -7859,8 +8562,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const &
- physicalDevicePerformanceQueryPropertiesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const & physicalDevicePerformanceQueryPropertiesKHR ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryPropertiesKHR.sType );
@@ -7873,8 +8576,9 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures const &
- physicalDevicePipelineCreationCacheControlFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures const & physicalDevicePipelineCreationCacheControlFeatures ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineCreationCacheControlFeatures.sType );
@@ -7899,10 +8603,69 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT const & physicalDevicePipelinePropertiesFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelinePropertiesFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelinePropertiesFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelinePropertiesFeaturesEXT.pipelinePropertiesIdentifier );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT const & physicalDevicePipelineProtectedAccessFeaturesEXT )
+ const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeaturesEXT.pipelineProtectedAccess );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT const & physicalDevicePipelineRobustnessFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeaturesEXT.pipelineRobustness );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT const & physicalDevicePipelineRobustnessPropertiesEXT ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.defaultRobustnessStorageBuffers );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.defaultRobustnessUniformBuffers );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.defaultRobustnessVertexInputs );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.defaultRobustnessImages );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const &
- physicalDevicePointClippingProperties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const & physicalDevicePointClippingProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePointClippingProperties.sType );
@@ -7916,8 +8679,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR const &
- physicalDevicePortabilitySubsetFeaturesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR const & physicalDevicePortabilitySubsetFeaturesKHR ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.sType );
@@ -7932,8 +8695,7 @@ namespace std
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.pointPolygons );
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.samplerMipLodBias );
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.separateStencilMaskRef );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDevicePortabilitySubsetFeaturesKHR.shaderSampleRateInterpolationFunctions );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.shaderSampleRateInterpolationFunctions );
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.tessellationIsolines );
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.tessellationPointMode );
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.triangleFans );
@@ -7947,24 +8709,36 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR const &
- physicalDevicePortabilitySubsetPropertiesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR const & physicalDevicePortabilitySubsetPropertiesKHR ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetPropertiesKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetPropertiesKHR.pNext );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDevicePortabilitySubsetPropertiesKHR.minVertexInputBindingStrideAlignment );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetPropertiesKHR.minVertexInputBindingStrideAlignment );
return seed;
}
};
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV const & physicalDevicePresentBarrierFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentBarrierFeaturesNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentBarrierFeaturesNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentBarrierFeaturesNV.presentBarrier );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR const &
- physicalDevicePresentIdFeaturesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR const & physicalDevicePresentIdFeaturesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentIdFeaturesKHR.sType );
@@ -7977,8 +8751,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR const &
- physicalDevicePresentWaitFeaturesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR const & physicalDevicePresentWaitFeaturesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentWaitFeaturesKHR.sType );
@@ -7997,10 +8770,24 @@ namespace std
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitiveTopologyListRestartFeaturesEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitiveTopologyListRestartFeaturesEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDevicePrimitiveTopologyListRestartFeaturesEXT.primitiveTopologyListRestart );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDevicePrimitiveTopologyListRestartFeaturesEXT.primitiveTopologyPatchListRestart );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitiveTopologyListRestartFeaturesEXT.primitiveTopologyListRestart );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitiveTopologyListRestartFeaturesEXT.primitiveTopologyPatchListRestart );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & physicalDevicePrimitivesGeneratedQueryFeaturesEXT )
+ const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitivesGeneratedQueryFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitivesGeneratedQueryFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitivesGeneratedQueryFeaturesEXT.primitivesGeneratedQuery );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitivesGeneratedQueryFeaturesEXT.primitivesGeneratedQueryWithRasterizerDiscard );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitivesGeneratedQueryFeaturesEXT.primitivesGeneratedQueryWithNonZeroStreams );
return seed;
}
};
@@ -8008,8 +8795,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures const &
- physicalDevicePrivateDataFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures const & physicalDevicePrivateDataFeatures ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrivateDataFeatures.sType );
@@ -8022,9 +8808,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const & physicalDeviceSparseProperties ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const & physicalDeviceSparseProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard2DBlockShape );
@@ -8039,8 +8823,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const & physicalDeviceProperties ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const & physicalDeviceProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.apiVersion );
@@ -8065,8 +8848,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const & physicalDeviceProperties2 ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const & physicalDeviceProperties2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.sType );
@@ -8079,8 +8861,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const &
- physicalDeviceProtectedMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const & physicalDeviceProtectedMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryFeatures.sType );
@@ -8093,8 +8875,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const &
- physicalDeviceProtectedMemoryProperties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const & physicalDeviceProtectedMemoryProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryProperties.sType );
@@ -8107,15 +8889,14 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT const &
- physicalDeviceProvokingVertexFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT const & physicalDeviceProvokingVertexFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexFeaturesEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexFeaturesEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexFeaturesEXT.provokingVertexLast );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceProvokingVertexFeaturesEXT.transformFeedbackPreservesProvokingVertex );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexFeaturesEXT.transformFeedbackPreservesProvokingVertex );
return seed;
}
};
@@ -8123,15 +8904,14 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT const &
- physicalDeviceProvokingVertexPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT const & physicalDeviceProvokingVertexPropertiesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexPropertiesEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexPropertiesEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexPropertiesEXT.provokingVertexModePerPipeline );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceProvokingVertexPropertiesEXT.transformFeedbackPreservesTriangleFanProvokingVertex );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexPropertiesEXT.transformFeedbackPreservesTriangleFanProvokingVertex );
return seed;
}
};
@@ -8139,8 +8919,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const &
- physicalDevicePushDescriptorPropertiesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const & physicalDevicePushDescriptorPropertiesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorPropertiesKHR.sType );
@@ -8153,8 +8933,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT const &
- physicalDeviceRGBA10X6FormatsFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & physicalDeviceRGBA10X6FormatsFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRGBA10X6FormatsFeaturesEXT.sType );
@@ -8165,20 +8945,17 @@ namespace std
};
template <>
- struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const &
- physicalDeviceRasterizationOrderAttachmentAccessFeaturesARM ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const &
+ physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesARM.sType );
- VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesARM.pNext );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesARM.rasterizationOrderColorAttachmentAccess );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesARM.rasterizationOrderDepthAttachmentAccess );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesARM.rasterizationOrderStencilAttachmentAccess );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.rasterizationOrderColorAttachmentAccess );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.rasterizationOrderDepthAttachmentAccess );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.rasterizationOrderStencilAttachmentAccess );
return seed;
}
};
@@ -8186,8 +8963,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR const &
- physicalDeviceRayQueryFeaturesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR const & physicalDeviceRayQueryFeaturesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayQueryFeaturesKHR.sType );
@@ -8198,17 +8974,31 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR>
+ {
+ std::size_t operator()(
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & physicalDeviceRayTracingMaintenance1FeaturesKHR ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMaintenance1FeaturesKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMaintenance1FeaturesKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMaintenance1FeaturesKHR.rayTracingMaintenance1 );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMaintenance1FeaturesKHR.rayTracingPipelineTraceRaysIndirect2 );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV const &
- physicalDeviceRayTracingMotionBlurFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV const & physicalDeviceRayTracingMotionBlurFeaturesNV ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMotionBlurFeaturesNV.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMotionBlurFeaturesNV.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMotionBlurFeaturesNV.rayTracingMotionBlur );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceRayTracingMotionBlurFeaturesNV.rayTracingMotionBlurPipelineTraceRaysIndirect );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMotionBlurFeaturesNV.rayTracingMotionBlurPipelineTraceRaysIndirect );
return seed;
}
};
@@ -8216,17 +9006,15 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR const &
- physicalDeviceRayTracingPipelineFeaturesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR const & physicalDeviceRayTracingPipelineFeaturesKHR ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipeline );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipelineShaderGroupHandleCaptureReplay );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipelineShaderGroupHandleCaptureReplayMixed );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipelineShaderGroupHandleCaptureReplay );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipelineShaderGroupHandleCaptureReplayMixed );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipelineTraceRaysIndirect );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTraversalPrimitiveCulling );
return seed;
@@ -8236,8 +9024,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR const &
- physicalDeviceRayTracingPipelinePropertiesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR const & physicalDeviceRayTracingPipelinePropertiesKHR ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.sType );
@@ -8257,8 +9045,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const &
- physicalDeviceRayTracingPropertiesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const & physicalDeviceRayTracingPropertiesNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.sType );
@@ -8278,8 +9065,9 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const &
- physicalDeviceRepresentativeFragmentTestFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & physicalDeviceRepresentativeFragmentTestFeaturesNV ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRepresentativeFragmentTestFeaturesNV.sType );
@@ -8292,8 +9080,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT const &
- physicalDeviceRobustness2FeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT const & physicalDeviceRobustness2FeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.sType );
@@ -8308,8 +9095,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT const &
- physicalDeviceRobustness2PropertiesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT const & physicalDeviceRobustness2PropertiesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2PropertiesEXT.sType );
@@ -8323,8 +9110,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const &
- physicalDeviceSampleLocationsPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const & physicalDeviceSampleLocationsPropertiesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.sType );
@@ -8344,8 +9131,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties const &
- physicalDeviceSamplerFilterMinmaxProperties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties const & physicalDeviceSamplerFilterMinmaxProperties ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerFilterMinmaxProperties.sType );
@@ -8359,8 +9146,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const &
- physicalDeviceSamplerYcbcrConversionFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const & physicalDeviceSamplerYcbcrConversionFeatures ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerYcbcrConversionFeatures.sType );
@@ -8373,8 +9160,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures const &
- physicalDeviceScalarBlockLayoutFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures const & physicalDeviceScalarBlockLayoutFeatures ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceScalarBlockLayoutFeatures.sType );
@@ -8387,8 +9174,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const &
- physicalDeviceSeparateDepthStencilLayoutsFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & physicalDeviceSeparateDepthStencilLayoutsFeatures )
+ const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSeparateDepthStencilLayoutsFeatures.sType );
@@ -8401,8 +9188,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT const &
- physicalDeviceShaderAtomicFloat2FeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & physicalDeviceShaderAtomicFloat2FeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.sType );
@@ -8426,8 +9213,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT const &
- physicalDeviceShaderAtomicFloatFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT const & physicalDeviceShaderAtomicFloatFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.sType );
@@ -8451,8 +9238,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features const &
- physicalDeviceShaderAtomicInt64Features ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features const & physicalDeviceShaderAtomicInt64Features ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicInt64Features.sType );
@@ -8466,8 +9253,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const &
- physicalDeviceShaderClockFeaturesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const & physicalDeviceShaderClockFeaturesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderClockFeaturesKHR.sType );
@@ -8479,10 +9265,39 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & physicalDeviceShaderCoreBuiltinsFeaturesARM ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsFeaturesARM.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsFeaturesARM.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsFeaturesARM.shaderCoreBuiltins );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & physicalDeviceShaderCoreBuiltinsPropertiesARM ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.shaderCoreCount );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.shaderWarpsPerCore );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const &
- physicalDeviceShaderCoreProperties2AMD ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const & physicalDeviceShaderCoreProperties2AMD ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreProperties2AMD.sType );
@@ -8496,8 +9311,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const &
- physicalDeviceShaderCorePropertiesAMD ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const & physicalDeviceShaderCorePropertiesAMD ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.sType );
@@ -8529,8 +9344,7 @@ namespace std
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDemoteToHelperInvocationFeatures.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDemoteToHelperInvocationFeatures.pNext );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceShaderDemoteToHelperInvocationFeatures.shaderDemoteToHelperInvocation );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDemoteToHelperInvocationFeatures.shaderDemoteToHelperInvocation );
return seed;
}
};
@@ -8538,8 +9352,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const &
- physicalDeviceShaderDrawParametersFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const & physicalDeviceShaderDrawParametersFeatures ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDrawParametersFeatures.sType );
@@ -8550,10 +9364,24 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const &
+ physicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD.shaderEarlyAndLateFragmentTests );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features const &
- physicalDeviceShaderFloat16Int8Features ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features const & physicalDeviceShaderFloat16Int8Features ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat16Int8Features.sType );
@@ -8567,8 +9395,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &
- physicalDeviceShaderImageAtomicInt64FeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()(
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & physicalDeviceShaderImageAtomicInt64FeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageAtomicInt64FeaturesEXT.sType );
@@ -8582,8 +9410,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const &
- physicalDeviceShaderImageFootprintFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const & physicalDeviceShaderImageFootprintFeaturesNV ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageFootprintFeaturesNV.sType );
@@ -8596,8 +9424,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures const &
- physicalDeviceShaderIntegerDotProductFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures const & physicalDeviceShaderIntegerDotProductFeatures ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductFeatures.sType );
@@ -8610,87 +9438,43 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties const &
- physicalDeviceShaderIntegerDotProductProperties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()(
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties const & physicalDeviceShaderIntegerDotProductProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.pNext );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct8BitUnsignedAccelerated );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceShaderIntegerDotProductProperties.integerDotProduct8BitSignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct8BitMixedSignednessAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct4x8BitPackedUnsignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct4x8BitPackedSignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct4x8BitPackedMixedSignednessAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct16BitUnsignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct16BitSignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct16BitMixedSignednessAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct32BitUnsignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct32BitSignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct32BitMixedSignednessAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct64BitUnsignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct64BitSignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct64BitMixedSignednessAccelerated );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceShaderIntegerDotProductProperties
- .integerDotProductAccumulatingSaturating8BitUnsignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed,
- physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating8BitSignedAccelerated );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceShaderIntegerDotProductProperties
- .integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceShaderIntegerDotProductProperties
- .integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceShaderIntegerDotProductProperties
- .integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceShaderIntegerDotProductProperties
- .integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct8BitUnsignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct8BitSignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct8BitMixedSignednessAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct4x8BitPackedUnsignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct4x8BitPackedSignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct4x8BitPackedMixedSignednessAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct16BitUnsignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct16BitSignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct16BitMixedSignednessAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct32BitUnsignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct32BitSignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct32BitMixedSignednessAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct64BitUnsignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct64BitSignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct64BitMixedSignednessAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating8BitSignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated );
VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceShaderIntegerDotProductProperties
- .integerDotProductAccumulatingSaturating16BitUnsignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed,
- physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating16BitSignedAccelerated );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceShaderIntegerDotProductProperties
- .integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceShaderIntegerDotProductProperties
- .integerDotProductAccumulatingSaturating32BitUnsignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed,
- physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating32BitSignedAccelerated );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceShaderIntegerDotProductProperties
- .integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceShaderIntegerDotProductProperties
- .integerDotProductAccumulatingSaturating64BitUnsignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed,
- physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating64BitSignedAccelerated );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceShaderIntegerDotProductProperties
- .integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated );
+ physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating16BitSignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating32BitSignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating64BitSignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated );
return seed;
}
};
@@ -8698,8 +9482,9 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &
- physicalDeviceShaderIntegerFunctions2FeaturesINTEL ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & physicalDeviceShaderIntegerFunctions2FeaturesINTEL ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerFunctions2FeaturesINTEL.sType );
@@ -8710,10 +9495,41 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT>
+ {
+ std::size_t operator()(
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & physicalDeviceShaderModuleIdentifierFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierFeaturesEXT.shaderModuleIdentifier );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & physicalDeviceShaderModuleIdentifierPropertiesEXT )
+ const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierPropertiesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierPropertiesEXT.pNext );
+ for ( size_t i = 0; i < VK_UUID_SIZE; ++i )
+ {
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierPropertiesEXT.shaderModuleIdentifierAlgorithmUUID[i] );
+ }
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const &
- physicalDeviceShaderSMBuiltinsFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const & physicalDeviceShaderSMBuiltinsFeaturesNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsFeaturesNV.sType );
@@ -8726,8 +9542,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const &
- physicalDeviceShaderSMBuiltinsPropertiesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const & physicalDeviceShaderSMBuiltinsPropertiesNV ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsPropertiesNV.sType );
@@ -8741,8 +9557,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const &
- physicalDeviceShaderSubgroupExtendedTypesFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & physicalDeviceShaderSubgroupExtendedTypesFeatures )
+ const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupExtendedTypesFeatures.sType );
@@ -8761,8 +9577,7 @@ namespace std
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR.pNext );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR.shaderSubgroupUniformControlFlow );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR.shaderSubgroupUniformControlFlow );
return seed;
}
};
@@ -8770,8 +9585,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures const &
- physicalDeviceShaderTerminateInvocationFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()(
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures const & physicalDeviceShaderTerminateInvocationFeatures ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTerminateInvocationFeatures.sType );
@@ -8784,8 +9599,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const &
- physicalDeviceShadingRateImageFeaturesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const & physicalDeviceShadingRateImageFeaturesNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImageFeaturesNV.sType );
@@ -8799,8 +9614,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const &
- physicalDeviceShadingRateImagePropertiesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const & physicalDeviceShadingRateImagePropertiesNV ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.sType );
@@ -8815,8 +9630,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const &
- physicalDeviceSparseImageFormatInfo2 ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const & physicalDeviceSparseImageFormatInfo2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.sType );
@@ -8833,8 +9647,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const &
- physicalDeviceSubgroupProperties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const & physicalDeviceSubgroupProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.sType );
@@ -8850,8 +9663,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures const &
- physicalDeviceSubgroupSizeControlFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures const & physicalDeviceSubgroupSizeControlFeatures ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlFeatures.sType );
@@ -8865,8 +9678,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties const &
- physicalDeviceSubgroupSizeControlProperties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties const & physicalDeviceSubgroupSizeControlProperties ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.sType );
@@ -8880,10 +9693,24 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & physicalDeviceSubpassMergeFeedbackFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassMergeFeedbackFeaturesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassMergeFeedbackFeaturesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassMergeFeedbackFeaturesEXT.subpassMergeFeedback );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI const &
- physicalDeviceSubpassShadingFeaturesHUAWEI ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI const & physicalDeviceSubpassShadingFeaturesHUAWEI ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingFeaturesHUAWEI.sType );
@@ -8896,14 +9723,13 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI const &
- physicalDeviceSubpassShadingPropertiesHUAWEI ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI const & physicalDeviceSubpassShadingPropertiesHUAWEI ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingPropertiesHUAWEI.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingPropertiesHUAWEI.pNext );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceSubpassShadingPropertiesHUAWEI.maxSubpassShadingWorkgroupSizeAspectRatio );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingPropertiesHUAWEI.maxSubpassShadingWorkgroupSizeAspectRatio );
return seed;
}
};
@@ -8911,8 +9737,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const & physicalDeviceSurfaceInfo2KHR )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const & physicalDeviceSurfaceInfo2KHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSurfaceInfo2KHR.sType );
@@ -8925,8 +9750,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features const &
- physicalDeviceSynchronization2Features ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features const & physicalDeviceSynchronization2Features ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSynchronization2Features.sType );
@@ -8939,8 +9764,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const &
- physicalDeviceTexelBufferAlignmentFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & physicalDeviceTexelBufferAlignmentFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentFeaturesEXT.sType );
@@ -8953,20 +9778,16 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties const &
- physicalDeviceTexelBufferAlignmentProperties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties const & physicalDeviceTexelBufferAlignmentProperties ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.pNext );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceTexelBufferAlignmentProperties.storageTexelBufferOffsetAlignmentBytes );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceTexelBufferAlignmentProperties.storageTexelBufferOffsetSingleTexelAlignment );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceTexelBufferAlignmentProperties.uniformTexelBufferOffsetAlignmentBytes );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceTexelBufferAlignmentProperties.uniformTexelBufferOffsetSingleTexelAlignment );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.storageTexelBufferOffsetAlignmentBytes );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.storageTexelBufferOffsetSingleTexelAlignment );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.uniformTexelBufferOffsetAlignmentBytes );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.uniformTexelBufferOffsetSingleTexelAlignment );
return seed;
}
};
@@ -8974,8 +9795,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures const &
- physicalDeviceTextureCompressionASTCHDRFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()(
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures const & physicalDeviceTextureCompressionASTCHDRFeatures ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTextureCompressionASTCHDRFeatures.sType );
@@ -8986,10 +9807,24 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM const & physicalDeviceTilePropertiesFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTilePropertiesFeaturesQCOM.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTilePropertiesFeaturesQCOM.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTilePropertiesFeaturesQCOM.tileProperties );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures const &
- physicalDeviceTimelineSemaphoreFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures const & physicalDeviceTimelineSemaphoreFeatures ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreFeatures.sType );
@@ -9002,8 +9837,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties const &
- physicalDeviceTimelineSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties const & physicalDeviceTimelineSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreProperties.sType );
@@ -9016,8 +9851,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties const & physicalDeviceToolProperties )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties const & physicalDeviceToolProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.sType );
@@ -9046,8 +9880,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const &
- physicalDeviceTransformFeedbackFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const & physicalDeviceTransformFeedbackFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackFeaturesEXT.sType );
@@ -9061,8 +9895,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const &
- physicalDeviceTransformFeedbackPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const & physicalDeviceTransformFeedbackPropertiesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.sType );
@@ -9072,13 +9906,10 @@ namespace std
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackBufferSize );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackStreamDataSize );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackBufferDataSize );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackBufferDataStride );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackBufferDataStride );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackQueries );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackStreamsLinesTriangles );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackRasterizationStreamSelect );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackStreamsLinesTriangles );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackRasterizationStreamSelect );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackDraw );
return seed;
}
@@ -9087,8 +9918,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const &
- physicalDeviceUniformBufferStandardLayoutFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const & physicalDeviceUniformBufferStandardLayoutFeatures )
+ const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceUniformBufferStandardLayoutFeatures.sType );
@@ -9101,8 +9932,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const &
- physicalDeviceVariablePointersFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const & physicalDeviceVariablePointersFeatures ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVariablePointersFeatures.sType );
@@ -9116,16 +9947,14 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const &
- physicalDeviceVertexAttributeDivisorFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()(
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & physicalDeviceVertexAttributeDivisorFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVertexAttributeDivisorFeaturesEXT.vertexAttributeInstanceRateDivisor );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVertexAttributeDivisorFeaturesEXT.vertexAttributeInstanceRateZeroDivisor );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesEXT.vertexAttributeInstanceRateDivisor );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesEXT.vertexAttributeInstanceRateZeroDivisor );
return seed;
}
};
@@ -9133,8 +9962,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const &
- physicalDeviceVertexAttributeDivisorPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & physicalDeviceVertexAttributeDivisorPropertiesEXT )
+ const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesEXT.sType );
@@ -9147,8 +9976,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT const &
- physicalDeviceVertexInputDynamicStateFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & physicalDeviceVertexInputDynamicStateFeaturesEXT )
+ const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexInputDynamicStateFeaturesEXT.sType );
@@ -9160,50 +9989,14 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoProfileKHR>
- {
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoProfileKHR const & videoProfileKHR ) const VULKAN_HPP_NOEXCEPT
- {
- std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoProfileKHR.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoProfileKHR.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoProfileKHR.videoCodecOperation );
- VULKAN_HPP_HASH_COMBINE( seed, videoProfileKHR.chromaSubsampling );
- VULKAN_HPP_HASH_COMBINE( seed, videoProfileKHR.lumaBitDepth );
- VULKAN_HPP_HASH_COMBINE( seed, videoProfileKHR.chromaBitDepth );
- return seed;
- }
- };
-# endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
-# if defined( VK_ENABLE_BETA_EXTENSIONS )
- template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoProfilesKHR>
- {
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoProfilesKHR const & videoProfilesKHR ) const VULKAN_HPP_NOEXCEPT
- {
- std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoProfilesKHR.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoProfilesKHR.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoProfilesKHR.profileCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoProfilesKHR.pProfiles );
- return seed;
- }
- };
-# endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
-# if defined( VK_ENABLE_BETA_EXTENSIONS )
- template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR const &
- physicalDeviceVideoFormatInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR const & physicalDeviceVideoFormatInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoFormatInfoKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoFormatInfoKHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoFormatInfoKHR.imageUsage );
- VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoFormatInfoKHR.pVideoProfiles );
return seed;
}
};
@@ -9212,9 +10005,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features const & physicalDeviceVulkan11Features ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features const & physicalDeviceVulkan11Features ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.sType );
@@ -9238,8 +10029,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties const &
- physicalDeviceVulkan11Properties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties const & physicalDeviceVulkan11Properties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.sType );
@@ -9275,9 +10065,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features const & physicalDeviceVulkan12Features ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features const & physicalDeviceVulkan12Features ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.sType );
@@ -9306,10 +10094,8 @@ namespace std
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingSampledImageUpdateAfterBind );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingStorageImageUpdateAfterBind );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingStorageBufferUpdateAfterBind );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVulkan12Features.descriptorBindingUniformTexelBufferUpdateAfterBind );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVulkan12Features.descriptorBindingStorageTexelBufferUpdateAfterBind );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingUniformTexelBufferUpdateAfterBind );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingStorageTexelBufferUpdateAfterBind );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingUpdateUnusedWhilePending );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingPartiallyBound );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingVariableDescriptorCount );
@@ -9338,8 +10124,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties const &
- physicalDeviceVulkan12Properties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties const & physicalDeviceVulkan12Properties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.sType );
@@ -9372,35 +10157,25 @@ namespace std
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTZFloat32 );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTZFloat64 );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxUpdateAfterBindDescriptorsInAllPools );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVulkan12Properties.shaderUniformBufferArrayNonUniformIndexingNative );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderUniformBufferArrayNonUniformIndexingNative );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderSampledImageArrayNonUniformIndexingNative );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVulkan12Properties.shaderStorageBufferArrayNonUniformIndexingNative );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderStorageBufferArrayNonUniformIndexingNative );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderStorageImageArrayNonUniformIndexingNative );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVulkan12Properties.shaderInputAttachmentArrayNonUniformIndexingNative );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderInputAttachmentArrayNonUniformIndexingNative );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.robustBufferAccessUpdateAfterBind );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.quadDivergentImplicitLod );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindSamplers );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindUniformBuffers );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindStorageBuffers );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindSampledImages );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindStorageImages );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindInputAttachments );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindUniformBuffers );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindStorageBuffers );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindSampledImages );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindStorageImages );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindInputAttachments );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageUpdateAfterBindResources );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindSamplers );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindUniformBuffers );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindStorageBuffers );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindSampledImages );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindStorageImages );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindInputAttachments );
@@ -9419,17 +10194,14 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features const & physicalDeviceVulkan13Features ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features const & physicalDeviceVulkan13Features ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.robustImageAccess );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.inlineUniformBlock );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVulkan13Features.descriptorBindingInlineUniformBlockUpdateAfterBind );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.descriptorBindingInlineUniformBlockUpdateAfterBind );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.pipelineCreationCacheControl );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.privateData );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.shaderDemoteToHelperInvocation );
@@ -9449,8 +10221,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties const &
- physicalDeviceVulkan13Properties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties const & physicalDeviceVulkan13Properties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.sType );
@@ -9461,63 +10232,40 @@ namespace std
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.requiredSubgroupSizeStages );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxInlineUniformBlockSize );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxPerStageDescriptorInlineUniformBlocks );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceVulkan13Properties.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxDescriptorSetInlineUniformBlocks );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVulkan13Properties.maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxInlineUniformTotalSize );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct8BitUnsignedAccelerated );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct8BitSignedAccelerated );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct8BitMixedSignednessAccelerated );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVulkan13Properties.integerDotProduct4x8BitPackedUnsignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct4x8BitPackedUnsignedAccelerated );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct4x8BitPackedSignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceVulkan13Properties.integerDotProduct4x8BitPackedMixedSignednessAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct4x8BitPackedMixedSignednessAccelerated );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct16BitUnsignedAccelerated );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct16BitSignedAccelerated );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVulkan13Properties.integerDotProduct16BitMixedSignednessAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct16BitMixedSignednessAccelerated );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct32BitUnsignedAccelerated );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct32BitSignedAccelerated );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVulkan13Properties.integerDotProduct32BitMixedSignednessAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct32BitMixedSignednessAccelerated );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct64BitUnsignedAccelerated );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct64BitSignedAccelerated );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVulkan13Properties.integerDotProduct64BitMixedSignednessAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating8BitSignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVulkan13Properties
- .integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating16BitSignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating32BitSignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating64BitSignedAccelerated );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct64BitMixedSignednessAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating8BitSignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating16BitSignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating32BitSignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating64BitSignedAccelerated );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.storageTexelBufferOffsetAlignmentBytes );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.storageTexelBufferOffsetSingleTexelAlignment );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.uniformTexelBufferOffsetAlignmentBytes );
@@ -9530,16 +10278,15 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures const &
- physicalDeviceVulkanMemoryModelFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures const & physicalDeviceVulkanMemoryModelFeatures ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.vulkanMemoryModel );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.vulkanMemoryModelDeviceScope );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceVulkanMemoryModelFeatures.vulkanMemoryModelAvailabilityVisibilityChains );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.vulkanMemoryModelAvailabilityVisibilityChains );
return seed;
}
};
@@ -9553,14 +10300,10 @@ namespace std
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.pNext );
- VULKAN_HPP_HASH_COMBINE( seed,
- physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayout );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayoutScalarBlockLayout );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayout8BitAccess );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayout16BitAccess );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayout );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayoutScalarBlockLayout );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayout8BitAccess );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayout16BitAccess );
return seed;
}
};
@@ -9568,8 +10311,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &
- physicalDeviceYcbcr2Plane444FormatsFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & physicalDeviceYcbcr2Plane444FormatsFeaturesEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcr2Plane444FormatsFeaturesEXT.sType );
@@ -9582,8 +10325,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const &
- physicalDeviceYcbcrImageArraysFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const & physicalDeviceYcbcrImageArraysFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrImageArraysFeaturesEXT.sType );
@@ -9596,14 +10339,14 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &
- physicalDeviceZeroInitializeWorkgroupMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & physicalDeviceZeroInitializeWorkgroupMemoryFeatures ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceZeroInitializeWorkgroupMemoryFeatures.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceZeroInitializeWorkgroupMemoryFeatures.pNext );
- VULKAN_HPP_HASH_COMBINE(
- seed, physicalDeviceZeroInitializeWorkgroupMemoryFeatures.shaderZeroInitializeWorkgroupMemory );
+ VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceZeroInitializeWorkgroupMemoryFeatures.shaderZeroInitializeWorkgroupMemory );
return seed;
}
};
@@ -9611,8 +10354,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & pipelineCacheCreateInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & pipelineCacheCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.sType );
@@ -9627,8 +10369,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne const & pipelineCacheHeaderVersionOne )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne const & pipelineCacheHeaderVersionOne ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.headerSize );
@@ -9646,8 +10387,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const &
- pipelineColorBlendAdvancedStateCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const & pipelineColorBlendAdvancedStateCreateInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.sType );
@@ -9662,9 +10403,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT const & pipelineColorWriteCreateInfoEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT const & pipelineColorWriteCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineColorWriteCreateInfoEXT.sType );
@@ -9678,8 +10417,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const &
- pipelineCompilerControlCreateInfoAMD ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const & pipelineCompilerControlCreateInfoAMD ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineCompilerControlCreateInfoAMD.sType );
@@ -9692,8 +10430,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const &
- pipelineCoverageModulationStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const & pipelineCoverageModulationStateCreateInfoNV ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.sType );
@@ -9710,8 +10448,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const &
- pipelineCoverageReductionStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const & pipelineCoverageReductionStateCreateInfoNV ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageReductionStateCreateInfoNV.sType );
@@ -9725,8 +10463,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const &
- pipelineCoverageToColorStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const & pipelineCoverageToColorStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.sType );
@@ -9741,8 +10479,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback const & pipelineCreationFeedback ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback const & pipelineCreationFeedback ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedback.flags );
@@ -9754,8 +10491,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo const &
- pipelineCreationFeedbackCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo const & pipelineCreationFeedbackCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.sType );
@@ -9770,8 +10506,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const &
- pipelineDiscardRectangleStateCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const & pipelineDiscardRectangleStateCreateInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.sType );
@@ -9787,8 +10523,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR const & pipelineExecutableInfoKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR const & pipelineExecutableInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInfoKHR.sType );
@@ -9802,8 +10537,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const &
- pipelineExecutableInternalRepresentationKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const & pipelineExecutableInternalRepresentationKHR ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.sType );
@@ -9826,9 +10561,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR const & pipelineExecutablePropertiesKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR const & pipelineExecutablePropertiesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.sType );
@@ -9850,8 +10583,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV const &
- pipelineFragmentShadingRateEnumStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV const & pipelineFragmentShadingRateEnumStateCreateInfoNV )
+ const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.sType );
@@ -9869,8 +10602,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR const &
- pipelineFragmentShadingRateStateCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR const & pipelineFragmentShadingRateStateCreateInfoKHR ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateStateCreateInfoKHR.sType );
@@ -9900,8 +10633,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PushConstantRange>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PushConstantRange const & pushConstantRange ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PushConstantRange const & pushConstantRange ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.stageFlags );
@@ -9914,8 +10646,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & pipelineLayoutCreateInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & pipelineLayoutCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.sType );
@@ -9932,8 +10663,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const & pipelineLibraryCreateInfoKHR )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const & pipelineLibraryCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.sType );
@@ -9945,19 +10675,34 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT const & pipelinePropertiesIdentifierEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, pipelinePropertiesIdentifierEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelinePropertiesIdentifierEXT.pNext );
+ for ( size_t i = 0; i < VK_UUID_SIZE; ++i )
+ {
+ VULKAN_HPP_HASH_COMBINE( seed, pipelinePropertiesIdentifierEXT.pipelineIdentifier[i] );
+ }
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const &
- pipelineRasterizationConservativeStateCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const & pipelineRasterizationConservativeStateCreateInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.flags );
- VULKAN_HPP_HASH_COMBINE( seed,
- pipelineRasterizationConservativeStateCreateInfoEXT.conservativeRasterizationMode );
- VULKAN_HPP_HASH_COMBINE( seed,
- pipelineRasterizationConservativeStateCreateInfoEXT.extraPrimitiveOverestimationSize );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.conservativeRasterizationMode );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.extraPrimitiveOverestimationSize );
return seed;
}
};
@@ -9965,8 +10710,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const &
- pipelineRasterizationDepthClipStateCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const & pipelineRasterizationDepthClipStateCreateInfoEXT )
+ const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationDepthClipStateCreateInfoEXT.sType );
@@ -9980,8 +10725,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT const &
- pipelineRasterizationLineStateCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT const & pipelineRasterizationLineStateCreateInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.sType );
@@ -10011,8 +10756,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const &
- pipelineRasterizationStateRasterizationOrderAMD ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()(
+ VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const & pipelineRasterizationStateRasterizationOrderAMD ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateRasterizationOrderAMD.sType );
@@ -10025,8 +10770,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const &
- pipelineRasterizationStateStreamCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const & pipelineRasterizationStateStreamCreateInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateStreamCreateInfoEXT.sType );
@@ -10040,8 +10785,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo const & pipelineRenderingCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo const & pipelineRenderingCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.sType );
@@ -10058,14 +10802,30 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const &
- pipelineRepresentativeFragmentTestStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const & pipelineRepresentativeFragmentTestStateCreateInfoNV ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineRepresentativeFragmentTestStateCreateInfoNV.sType );
VULKAN_HPP_HASH_COMBINE( seed, pipelineRepresentativeFragmentTestStateCreateInfoNV.pNext );
- VULKAN_HPP_HASH_COMBINE( seed,
- pipelineRepresentativeFragmentTestStateCreateInfoNV.representativeFragmentTestEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineRepresentativeFragmentTestStateCreateInfoNV.representativeFragmentTestEnable );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT const & pipelineRobustnessCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.storageBuffers );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.uniformBuffers );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.vertexInputs );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.images );
return seed;
}
};
@@ -10073,8 +10833,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const &
- pipelineSampleLocationsStateCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const & pipelineSampleLocationsStateCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineSampleLocationsStateCreateInfoEXT.sType );
@@ -10086,10 +10846,25 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT const & pipelineShaderStageModuleIdentifierCreateInfoEXT )
+ const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageModuleIdentifierCreateInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageModuleIdentifierCreateInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageModuleIdentifierCreateInfoEXT.identifierSize );
+ VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageModuleIdentifierCreateInfoEXT.pIdentifier );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo const &
- pipelineShaderStageRequiredSubgroupSizeCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo const & pipelineShaderStageRequiredSubgroupSizeCreateInfo )
+ const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageRequiredSubgroupSizeCreateInfo.sType );
@@ -10102,8 +10877,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const &
- pipelineTessellationDomainOriginStateCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()(
+ VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const & pipelineTessellationDomainOriginStateCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationDomainOriginStateCreateInfo.sType );
@@ -10116,8 +10891,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT const &
- vertexInputBindingDivisorDescriptionEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT const & vertexInputBindingDivisorDescriptionEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDivisorDescriptionEXT.binding );
@@ -10129,8 +10904,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT const &
- pipelineVertexInputDivisorStateCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT const & pipelineVertexInputDivisorStateCreateInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoEXT.sType );
@@ -10144,8 +10919,9 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const &
- pipelineViewportCoarseSampleOrderStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const & pipelineViewportCoarseSampleOrderStateCreateInfoNV ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.sType );
@@ -10160,8 +10936,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT const &
- pipelineViewportDepthClipControlCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT const & pipelineViewportDepthClipControlCreateInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClipControlCreateInfoEXT.sType );
@@ -10174,8 +10950,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const &
- pipelineViewportExclusiveScissorStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const & pipelineViewportExclusiveScissorStateCreateInfoNV )
+ const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportExclusiveScissorStateCreateInfoNV.sType );
@@ -10189,8 +10965,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV const & shadingRatePaletteNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV const & shadingRatePaletteNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, shadingRatePaletteNV.shadingRatePaletteEntryCount );
@@ -10202,8 +10977,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const &
- pipelineViewportShadingRateImageStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const & pipelineViewportShadingRateImageStateCreateInfoNV )
+ const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.sType );
@@ -10218,8 +10993,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ViewportSwizzleNV const & viewportSwizzleNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ViewportSwizzleNV const & viewportSwizzleNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, viewportSwizzleNV.x );
@@ -10233,8 +11007,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const &
- pipelineViewportSwizzleStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const & pipelineViewportSwizzleStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.sType );
@@ -10249,8 +11023,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ViewportWScalingNV>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ViewportWScalingNV const & viewportWScalingNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ViewportWScalingNV const & viewportWScalingNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, viewportWScalingNV.xcoeff );
@@ -10262,8 +11035,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const &
- pipelineViewportWScalingStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const & pipelineViewportWScalingStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.sType );
@@ -10279,8 +11052,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP const & presentFrameTokenGGP ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP const & presentFrameTokenGGP ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, presentFrameTokenGGP.sType );
@@ -10351,8 +11123,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PresentRegionsKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PresentRegionsKHR const & presentRegionsKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentRegionsKHR const & presentRegionsKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, presentRegionsKHR.sType );
@@ -10366,8 +11137,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE const & presentTimeGOOGLE ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE const & presentTimeGOOGLE ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, presentTimeGOOGLE.presentID );
@@ -10379,8 +11149,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE const & presentTimesInfoGOOGLE ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE const & presentTimesInfoGOOGLE ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, presentTimesInfoGOOGLE.sType );
@@ -10394,8 +11163,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & privateDataSlotCreateInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & privateDataSlotCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, privateDataSlotCreateInfo.sType );
@@ -10408,8 +11176,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const & protectedSubmitInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const & protectedSubmitInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, protectedSubmitInfo.sType );
@@ -10422,8 +11189,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & queryPoolCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & queryPoolCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.sType );
@@ -10439,8 +11205,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const &
- queryPoolPerformanceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const & queryPoolPerformanceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.sType );
@@ -10455,8 +11220,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL const &
- queryPoolPerformanceQueryCreateInfoINTEL ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL const & queryPoolPerformanceQueryCreateInfoINTEL ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceQueryCreateInfoINTEL.sType );
@@ -10469,8 +11234,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV const &
- queueFamilyCheckpointProperties2NV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV const & queueFamilyCheckpointProperties2NV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointProperties2NV.sType );
@@ -10483,8 +11247,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV const &
- queueFamilyCheckpointPropertiesNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV const & queueFamilyCheckpointPropertiesNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointPropertiesNV.sType );
@@ -10497,8 +11260,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR const &
- queueFamilyGlobalPriorityPropertiesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR const & queueFamilyGlobalPriorityPropertiesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.sType );
@@ -10515,8 +11278,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::QueueFamilyProperties>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::QueueFamilyProperties const & queueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyProperties const & queueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties.queueFlags );
@@ -10530,8 +11292,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const & queueFamilyProperties2 ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const & queueFamilyProperties2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties2.sType );
@@ -10543,15 +11304,30 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusProperties2KHR>
+ struct hash<VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusProperties2KHR const &
- queueFamilyQueryResultStatusProperties2KHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR const & queueFamilyQueryResultStatusPropertiesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, queueFamilyQueryResultStatusProperties2KHR.sType );
- VULKAN_HPP_HASH_COMBINE( seed, queueFamilyQueryResultStatusProperties2KHR.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, queueFamilyQueryResultStatusProperties2KHR.supported );
+ VULKAN_HPP_HASH_COMBINE( seed, queueFamilyQueryResultStatusPropertiesKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, queueFamilyQueryResultStatusPropertiesKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, queueFamilyQueryResultStatusPropertiesKHR.queryResultStatusSupport );
+ return seed;
+ }
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR const & queueFamilyVideoPropertiesKHR ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, queueFamilyVideoPropertiesKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, queueFamilyVideoPropertiesKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, queueFamilyVideoPropertiesKHR.videoCodecOperations );
return seed;
}
};
@@ -10560,8 +11336,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR const &
- rayTracingShaderGroupCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR const & rayTracingShaderGroupCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.sType );
@@ -10579,8 +11354,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR const &
- rayTracingPipelineInterfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR const & rayTracingPipelineInterfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineInterfaceCreateInfoKHR.sType );
@@ -10594,9 +11369,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & rayTracingPipelineCreateInfoKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & rayTracingPipelineCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.sType );
@@ -10620,8 +11393,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV const &
- rayTracingShaderGroupCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV const & rayTracingShaderGroupCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.sType );
@@ -10638,9 +11410,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & rayTracingPipelineCreateInfoNV ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & rayTracingPipelineCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.sType );
@@ -10661,8 +11431,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE const & refreshCycleDurationGOOGLE ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE const & refreshCycleDurationGOOGLE ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, refreshCycleDurationGOOGLE.refreshDuration );
@@ -10673,8 +11442,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo const & renderPassAttachmentBeginInfo )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo const & renderPassAttachmentBeginInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, renderPassAttachmentBeginInfo.sType );
@@ -10688,8 +11456,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const & renderPassBeginInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const & renderPassBeginInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.sType );
@@ -10706,8 +11473,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SubpassDescription>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SubpassDescription const & subpassDescription ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassDescription const & subpassDescription ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.flags );
@@ -10727,8 +11493,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SubpassDependency>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SubpassDependency const & subpassDependency ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassDependency const & subpassDependency ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.srcSubpass );
@@ -10745,8 +11510,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & renderPassCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & renderPassCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.sType );
@@ -10765,8 +11529,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SubpassDescription2>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SubpassDescription2 const & subpassDescription2 ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassDescription2 const & subpassDescription2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.sType );
@@ -10789,8 +11552,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SubpassDependency2>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SubpassDependency2 const & subpassDependency2 ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassDependency2 const & subpassDependency2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.sType );
@@ -10810,8 +11572,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & renderPassCreateInfo2 ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & renderPassCreateInfo2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.sType );
@@ -10830,10 +11591,48 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT const & renderPassCreationControlEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationControlEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationControlEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationControlEXT.disallowMerging );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT const & renderPassCreationFeedbackInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationFeedbackInfoEXT.postMergeSubpassCount );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT const & renderPassCreationFeedbackCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationFeedbackCreateInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationFeedbackCreateInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationFeedbackCreateInfoEXT.pRenderPassFeedback );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const &
- renderPassFragmentDensityMapCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const & renderPassFragmentDensityMapCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, renderPassFragmentDensityMapCreateInfoEXT.sType );
@@ -10846,8 +11645,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const &
- renderPassInputAttachmentAspectCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const & renderPassInputAttachmentAspectCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, renderPassInputAttachmentAspectCreateInfo.sType );
@@ -10861,8 +11660,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const & renderPassMultiviewCreateInfo )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const & renderPassMultiviewCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.sType );
@@ -10880,8 +11678,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT const & subpassSampleLocationsEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT const & subpassSampleLocationsEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, subpassSampleLocationsEXT.subpassIndex );
@@ -10893,8 +11690,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const &
- renderPassSampleLocationsBeginInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const & renderPassSampleLocationsBeginInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.sType );
@@ -10908,10 +11705,39 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT const & renderPassSubpassFeedbackInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackInfoEXT.subpassMergeStatus );
+ for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i )
+ {
+ VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackInfoEXT.description[i] );
+ }
+ VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackInfoEXT.postMergeIndex );
+ return seed;
+ }
+ };
+
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT>
+ {
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT const & renderPassSubpassFeedbackCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackCreateInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackCreateInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackCreateInfoEXT.pSubpassFeedback );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM const &
- renderPassTransformBeginInfoQCOM ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM const & renderPassTransformBeginInfoQCOM ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, renderPassTransformBeginInfoQCOM.sType );
@@ -10924,8 +11750,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT const &
- renderingFragmentDensityMapAttachmentInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT const & renderingFragmentDensityMapAttachmentInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentDensityMapAttachmentInfoEXT.sType );
@@ -10939,8 +11765,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR const &
- renderingFragmentShadingRateAttachmentInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR const & renderingFragmentShadingRateAttachmentInfoKHR ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.sType );
@@ -10975,8 +11801,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ResolveImageInfo2>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ResolveImageInfo2 const & resolveImageInfo2 ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ResolveImageInfo2 const & resolveImageInfo2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.sType );
@@ -10994,8 +11819,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT const &
- samplerBorderColorComponentMappingCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()(
+ VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT const & samplerBorderColorComponentMappingCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, samplerBorderColorComponentMappingCreateInfoEXT.sType );
@@ -11009,8 +11834,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SamplerCreateInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & samplerCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & samplerCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.sType );
@@ -11038,9 +11862,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo const & samplerReductionModeCreateInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo const & samplerReductionModeCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, samplerReductionModeCreateInfo.sType );
@@ -11053,8 +11875,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const &
- samplerYcbcrConversionCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & samplerYcbcrConversionCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.sType );
@@ -11074,8 +11895,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const &
- samplerYcbcrConversionImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const & samplerYcbcrConversionImageFormatProperties ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionImageFormatProperties.sType );
@@ -11088,8 +11909,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const & samplerYcbcrConversionInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const & samplerYcbcrConversionInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionInfo.sType );
@@ -11103,8 +11923,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & screenSurfaceCreateInfoQNX ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & screenSurfaceCreateInfoQNX ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.sType );
@@ -11120,8 +11939,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & semaphoreCreateInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & semaphoreCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, semaphoreCreateInfo.sType );
@@ -11134,8 +11952,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const & semaphoreGetFdInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const & semaphoreGetFdInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetFdInfoKHR.sType );
@@ -11150,9 +11967,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR const & semaphoreGetWin32HandleInfoKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR const & semaphoreGetWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetWin32HandleInfoKHR.sType );
@@ -11168,8 +11983,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA const &
- semaphoreGetZirconHandleInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA const & semaphoreGetZirconHandleInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetZirconHandleInfoFUCHSIA.sType );
@@ -11184,8 +11998,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo const & semaphoreSignalInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo const & semaphoreSignalInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, semaphoreSignalInfo.sType );
@@ -11199,8 +12012,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo const & semaphoreSubmitInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo const & semaphoreSubmitInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.sType );
@@ -11216,8 +12028,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo const & semaphoreTypeCreateInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo const & semaphoreTypeCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, semaphoreTypeCreateInfo.sType );
@@ -11231,8 +12042,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo const & semaphoreWaitInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo const & semaphoreWaitInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.sType );
@@ -11248,9 +12058,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV const & setStateFlagsIndirectCommandNV ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV const & setStateFlagsIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, setStateFlagsIndirectCommandNV.data );
@@ -11261,8 +12069,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & shaderModuleCreateInfo ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & shaderModuleCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.sType );
@@ -11275,10 +12082,27 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT const & shaderModuleIdentifierEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.identifierSize );
+ for ( size_t i = 0; i < VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT; ++i )
+ {
+ VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.identifier[i] );
+ }
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const &
- shaderModuleValidationCacheCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const & shaderModuleValidationCacheCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, shaderModuleValidationCacheCreateInfoEXT.sType );
@@ -11291,8 +12115,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD const & shaderResourceUsageAMD ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD const & shaderResourceUsageAMD ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.numUsedVgprs );
@@ -11307,8 +12130,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD const & shaderStatisticsInfoAMD ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD const & shaderStatisticsInfoAMD ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.shaderStageMask );
@@ -11328,8 +12150,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const &
- sharedPresentSurfaceCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const & sharedPresentSurfaceCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, sharedPresentSurfaceCapabilitiesKHR.sType );
@@ -11342,8 +12163,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const & sparseImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const & sparseImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties.aspectMask );
@@ -11356,8 +12176,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 const & sparseImageFormatProperties2 )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 const & sparseImageFormatProperties2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties2.sType );
@@ -11370,8 +12189,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const & sparseImageMemoryRequirements )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const & sparseImageMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.formatProperties );
@@ -11386,9 +12204,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 const & sparseImageMemoryRequirements2 ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 const & sparseImageMemoryRequirements2 ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements2.sType );
@@ -11402,8 +12218,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const &
- streamDescriptorSurfaceCreateInfoGGP ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & streamDescriptorSurfaceCreateInfoGGP ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, streamDescriptorSurfaceCreateInfoGGP.sType );
@@ -11418,8 +12233,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const & stridedDeviceAddressRegionKHR )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const & stridedDeviceAddressRegionKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressRegionKHR.deviceAddress );
@@ -11483,8 +12297,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve const &
- subpassDescriptionDepthStencilResolve ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve const & subpassDescriptionDepthStencilResolve ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.sType );
@@ -11511,8 +12325,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM const &
- subpassFragmentDensityMapOffsetEndInfoQCOM ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM const & subpassFragmentDensityMapOffsetEndInfoQCOM ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, subpassFragmentDensityMapOffsetEndInfoQCOM.sType );
@@ -11524,10 +12338,23 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT const & subpassResolvePerformanceQueryEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, subpassResolvePerformanceQueryEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, subpassResolvePerformanceQueryEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, subpassResolvePerformanceQueryEXT.optimal );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI const &
- subpassShadingPipelineCreateInfoHUAWEI ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI const & subpassShadingPipelineCreateInfoHUAWEI ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, subpassShadingPipelineCreateInfoHUAWEI.sType );
@@ -11539,10 +12366,22 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT const & subresourceLayout2EXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2EXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2EXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2EXT.subresourceLayout );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const & surfaceCapabilities2EXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const & surfaceCapabilities2EXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.sType );
@@ -11565,8 +12404,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const & surfaceCapabilitiesKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const & surfaceCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.minImageCount );
@@ -11586,8 +12424,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const & surfaceCapabilities2KHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const & surfaceCapabilities2KHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2KHR.sType );
@@ -11601,8 +12438,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const &
- surfaceCapabilitiesFullScreenExclusiveEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const & surfaceCapabilitiesFullScreenExclusiveEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesFullScreenExclusiveEXT.sType );
@@ -11614,6 +12451,19 @@ namespace std
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV const & surfaceCapabilitiesPresentBarrierNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentBarrierNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentBarrierNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentBarrierNV.presentBarrierSupported );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const & surfaceFormatKHR ) const VULKAN_HPP_NOEXCEPT
@@ -11628,8 +12478,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const & surfaceFormat2KHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const & surfaceFormat2KHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, surfaceFormat2KHR.sType );
@@ -11643,8 +12492,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const &
- surfaceFullScreenExclusiveInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const & surfaceFullScreenExclusiveInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveInfoEXT.sType );
@@ -11659,8 +12507,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const &
- surfaceFullScreenExclusiveWin32InfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const & surfaceFullScreenExclusiveWin32InfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveWin32InfoEXT.sType );
@@ -11674,9 +12522,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const & surfaceProtectedCapabilitiesKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const & surfaceProtectedCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, surfaceProtectedCapabilitiesKHR.sType );
@@ -11689,8 +12535,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const & swapchainCounterCreateInfoEXT )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const & swapchainCounterCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, swapchainCounterCreateInfoEXT.sType );
@@ -11703,8 +12548,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & swapchainCreateInfoKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & swapchainCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.sType );
@@ -11732,8 +12576,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const &
- swapchainDisplayNativeHdrCreateInfoAMD ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const & swapchainDisplayNativeHdrCreateInfoAMD ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, swapchainDisplayNativeHdrCreateInfoAMD.sType );
@@ -11744,10 +12588,22 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV const & swapchainPresentBarrierCreateInfoNV ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentBarrierCreateInfoNV.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentBarrierCreateInfoNV.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentBarrierCreateInfoNV.presentBarrierEnable );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const &
- textureLODGatherFormatPropertiesAMD ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const & textureLODGatherFormatPropertiesAMD ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, textureLODGatherFormatPropertiesAMD.sType );
@@ -11758,10 +12614,24 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::TilePropertiesQCOM const & tilePropertiesQCOM ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.tileSize );
+ VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.apronSize );
+ VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.origin );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const & timelineSemaphoreSubmitInfo ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const & timelineSemaphoreSubmitInfo ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.sType );
@@ -11775,10 +12645,33 @@ namespace std
};
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR const & traceRaysIndirectCommand2KHR ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.raygenShaderRecordAddress );
+ VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.raygenShaderRecordSize );
+ VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.missShaderBindingTableAddress );
+ VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.missShaderBindingTableSize );
+ VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.missShaderBindingTableStride );
+ VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.hitShaderBindingTableAddress );
+ VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.hitShaderBindingTableSize );
+ VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.hitShaderBindingTableStride );
+ VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.callableShaderBindingTableAddress );
+ VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.callableShaderBindingTableSize );
+ VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.callableShaderBindingTableStride );
+ VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.width );
+ VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.height );
+ VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.depth );
+ return seed;
+ }
+ };
+
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR const & traceRaysIndirectCommandKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR const & traceRaysIndirectCommandKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommandKHR.width );
@@ -11791,8 +12684,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & validationCacheCreateInfoEXT )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & validationCacheCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.sType );
@@ -11807,8 +12699,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const & validationFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const & validationFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.sType );
@@ -11824,8 +12715,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const & validationFlagsEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const & validationFlagsEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.sType );
@@ -11839,8 +12729,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT const &
- vertexInputAttributeDescription2EXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT const & vertexInputAttributeDescription2EXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.sType );
@@ -11856,8 +12745,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT const &
- vertexInputBindingDescription2EXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT const & vertexInputBindingDescription2EXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.sType );
@@ -11874,8 +12762,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & viSurfaceCreateInfoNN ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & viSurfaceCreateInfoNN ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.sType );
@@ -11889,18 +12776,17 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const & videoPictureResourceKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const & videoPictureResourceInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceKHR.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceKHR.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceKHR.codedOffset );
- VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceKHR.codedExtent );
- VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceKHR.baseArrayLayer );
- VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceKHR.imageViewBinding );
+ VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.codedOffset );
+ VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.codedExtent );
+ VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.baseArrayLayer );
+ VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.imageViewBinding );
return seed;
}
};
@@ -11908,16 +12794,15 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR const & videoReferenceSlotKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR const & videoReferenceSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotKHR.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotKHR.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotKHR.slotIndex );
- VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotKHR.pPictureResource );
+ VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.slotIndex );
+ VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.pPictureResource );
return seed;
}
};
@@ -11927,14 +12812,12 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR const & videoBeginCodingInfoKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR const & videoBeginCodingInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.flags );
- VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.codecQualityPreset );
VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.videoSession );
VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.videoSessionParameters );
VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.referenceSlotCount );
@@ -11946,41 +12829,22 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR>
- {
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR const & videoBindMemoryKHR ) const VULKAN_HPP_NOEXCEPT
- {
- std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoBindMemoryKHR.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoBindMemoryKHR.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoBindMemoryKHR.memoryBindIndex );
- VULKAN_HPP_HASH_COMBINE( seed, videoBindMemoryKHR.memory );
- VULKAN_HPP_HASH_COMBINE( seed, videoBindMemoryKHR.memoryOffset );
- VULKAN_HPP_HASH_COMBINE( seed, videoBindMemoryKHR.memorySize );
- return seed;
- }
- };
-# endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
-# if defined( VK_ENABLE_BETA_EXTENSIONS )
- template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR const & videoCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR const & videoCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.capabilityFlags );
+ VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.flags );
VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minBitstreamBufferOffsetAlignment );
VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minBitstreamBufferSizeAlignment );
- VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.videoPictureExtentGranularity );
- VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minExtent );
- VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxExtent );
- VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxReferencePicturesSlotsCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxReferencePicturesActiveCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.pictureAccessGranularity );
+ VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minCodedExtent );
+ VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxCodedExtent );
+ VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxDpbSlots );
+ VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxActiveReferencePictures );
+ VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.stdHeaderVersion );
return seed;
}
};
@@ -11990,8 +12854,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR const & videoCodingControlInfoKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR const & videoCodingControlInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoCodingControlInfoKHR.sType );
@@ -12004,18 +12867,14 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT const & videoDecodeH264CapabilitiesEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR const & videoDecodeCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesEXT.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesEXT.maxLevel );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesEXT.fieldOffsetGranularity );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesEXT.stdExtensionVersion );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeCapabilitiesKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeCapabilitiesKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeCapabilitiesKHR.flags );
return seed;
}
};
@@ -12023,15 +12882,15 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT const & videoDecodeH264DpbSlotInfoEXT )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT const & videoDecodeH264CapabilitiesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoEXT.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoEXT.pStdReferenceInfo );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesEXT.maxLevelIdc );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesEXT.fieldOffsetGranularity );
return seed;
}
};
@@ -12039,15 +12898,14 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH264MvcEXT>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264MvcEXT const & videoDecodeH264MvcEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT const & videoDecodeH264DpbSlotInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264MvcEXT.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264MvcEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264MvcEXT.pStdMvc );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoEXT.pStdReferenceInfo );
return seed;
}
};
@@ -12057,32 +12915,14 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoEXT const & videoDecodeH264PictureInfoEXT )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoEXT const & videoDecodeH264PictureInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoEXT.pStdPictureInfo );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoEXT.slicesCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoEXT.pSlicesDataOffsets );
- return seed;
- }
- };
-# endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
-# if defined( VK_ENABLE_BETA_EXTENSIONS )
- template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileEXT>
- {
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileEXT const & videoDecodeH264ProfileEXT ) const
- VULKAN_HPP_NOEXCEPT
- {
- std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileEXT.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileEXT.stdProfileIdc );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileEXT.pictureLayout );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoEXT.sliceCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoEXT.pSliceOffsets );
return seed;
}
};
@@ -12090,16 +12930,15 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionCreateInfoEXT>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionCreateInfoEXT const &
- videoDecodeH264SessionCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoEXT const & videoDecodeH264ProfileInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionCreateInfoEXT.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionCreateInfoEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionCreateInfoEXT.flags );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionCreateInfoEXT.pStdExtensionVersion );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoEXT.stdProfileIdc );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoEXT.pictureLayout );
return seed;
}
};
@@ -12109,16 +12948,16 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT const &
- videoDecodeH264SessionParametersAddInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT const & videoDecodeH264SessionParametersAddInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.spsStdCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.pSpsStd );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.ppsStdCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.pPpsStd );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.stdSPSCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.pStdSPSs );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.stdPPSCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.pStdPPSs );
return seed;
}
};
@@ -12128,14 +12967,14 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoEXT const &
- videoDecodeH264SessionParametersCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoEXT const & videoDecodeH264SessionParametersCreateInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoEXT.maxSpsStdCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoEXT.maxPpsStdCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoEXT.maxStdSPSCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoEXT.maxStdPPSCount );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoEXT.pParametersAddInfo );
return seed;
}
@@ -12146,15 +12985,12 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesEXT const & videoDecodeH265CapabilitiesEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesEXT const & videoDecodeH265CapabilitiesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesEXT.maxLevel );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesEXT.stdExtensionVersion );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesEXT.maxLevelIdc );
return seed;
}
};
@@ -12164,8 +13000,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoEXT const & videoDecodeH265DpbSlotInfoEXT )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoEXT const & videoDecodeH265DpbSlotInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265DpbSlotInfoEXT.sType );
@@ -12180,15 +13015,14 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoEXT const & videoDecodeH265PictureInfoEXT )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoEXT const & videoDecodeH265PictureInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoEXT.pStdPictureInfo );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoEXT.slicesCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoEXT.pSlicesDataOffsets );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoEXT.sliceCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoEXT.pSliceOffsets );
return seed;
}
};
@@ -12196,32 +13030,14 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileEXT>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileEXT const & videoDecodeH265ProfileEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoEXT const & videoDecodeH265ProfileInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileEXT.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileEXT.stdProfileIdc );
- return seed;
- }
- };
-# endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
-# if defined( VK_ENABLE_BETA_EXTENSIONS )
- template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionCreateInfoEXT>
- {
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionCreateInfoEXT const &
- videoDecodeH265SessionCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
- {
- std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionCreateInfoEXT.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionCreateInfoEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionCreateInfoEXT.flags );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionCreateInfoEXT.pStdExtensionVersion );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileInfoEXT.stdProfileIdc );
return seed;
}
};
@@ -12231,16 +13047,18 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT const &
- videoDecodeH265SessionParametersAddInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT const & videoDecodeH265SessionParametersAddInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.spsStdCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.pSpsStd );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.ppsStdCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.pPpsStd );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.stdVPSCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.pStdVPSs );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.stdSPSCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.pStdSPSs );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.stdPPSCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.pStdPPSs );
return seed;
}
};
@@ -12250,14 +13068,15 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoEXT const &
- videoDecodeH265SessionParametersCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoEXT const & videoDecodeH265SessionParametersCreateInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoEXT.maxSpsStdCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoEXT.maxPpsStdCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoEXT.maxStdVPSCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoEXT.maxStdSPSCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoEXT.maxStdPPSCount );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoEXT.pParametersAddInfo );
return seed;
}
@@ -12268,15 +13087,12 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR const & videoDecodeInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR const & videoDecodeInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.flags );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.codedOffset );
- VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.codedExtent );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBuffer );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBufferOffset );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBufferRange );
@@ -12291,11 +13107,43 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR const & videoDecodeUsageInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeUsageInfoKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeUsageInfoKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoDecodeUsageInfoKHR.videoUsageHints );
+ return seed;
+ }
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR const & videoEncodeCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.flags );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.rateControlModes );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.rateControlLayerCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.qualityLevelCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.inputImageDataFillAlignment );
+ return seed;
+ }
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT const & videoEncodeH264CapabilitiesEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT const & videoEncodeH264CapabilitiesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.sType );
@@ -12303,14 +13151,14 @@ namespace std
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.flags );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.inputModeFlags );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.outputModeFlags );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.minPictureSizeInMbs );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxPictureSizeInMbs );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.inputImageDataAlignment );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxNumL0ReferenceForP );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxNumL0ReferenceForB );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxNumL1Reference );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.qualityLevelCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.stdExtensionVersion );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxPPictureL0ReferenceCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxBPictureL0ReferenceCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxL1ReferenceCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.motionVectorsOverPicBoundariesFlag );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxBytesPerPicDenom );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxBitsPerMbDenom );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.log2MaxMvLengthHorizontal );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.log2MaxMvLengthVertical );
return seed;
}
};
@@ -12320,14 +13168,13 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT const & videoEncodeH264DpbSlotInfoEXT )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT const & videoEncodeH264DpbSlotInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoEXT.slotIndex );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoEXT.pStdPictureInfo );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoEXT.pStdReferenceInfo );
return seed;
}
};
@@ -12335,18 +13182,18 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersEXT>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersEXT const &
- videoEncodeH264EmitPictureParametersEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersInfoEXT const & videoEncodeH264EmitPictureParametersInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersEXT.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersEXT.spsId );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersEXT.emitSpsEnable );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersEXT.ppsIdEntryCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersEXT.ppsIdEntries );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersInfoEXT.spsId );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersInfoEXT.emitSpsEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersInfoEXT.ppsIdEntryCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersInfoEXT.ppsIdEntries );
return seed;
}
};
@@ -12356,8 +13203,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT const & videoEncodeH264FrameSizeEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT const & videoEncodeH264FrameSizeEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264FrameSizeEXT.frameISize );
@@ -12370,20 +13216,18 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT const & videoEncodeH264NaluSliceEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT const & videoEncodeH264ReferenceListsInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceEXT.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceEXT.pSliceHeaderStd );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceEXT.mbCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceEXT.refFinalList0EntryCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceEXT.pRefFinalList0Entries );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceEXT.refFinalList1EntryCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceEXT.pRefFinalList1Entries );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.referenceList0EntryCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.pReferenceList0Entries );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.referenceList1EntryCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.pReferenceList1Entries );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.pMemMgmtCtrlOperations );
return seed;
}
};
@@ -12391,15 +13235,31 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileEXT>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileEXT const & videoEncodeH264ProfileEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT const & videoEncodeH264NaluSliceInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoEXT.mbCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoEXT.pReferenceFinalLists );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoEXT.pSliceHeaderStd );
+ return seed;
+ }
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoEXT>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoEXT const & videoEncodeH264ProfileInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileEXT.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileEXT.stdProfileIdc );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileInfoEXT.stdProfileIdc );
return seed;
}
};
@@ -12409,8 +13269,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & videoEncodeH264QpEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & videoEncodeH264QpEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QpEXT.qpI );
@@ -12425,8 +13284,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT const &
- videoEncodeH264RateControlInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT const & videoEncodeH264RateControlInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.sType );
@@ -12445,8 +13303,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT const &
- videoEncodeH264RateControlLayerInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT const & videoEncodeH264RateControlLayerInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.sType );
@@ -12467,36 +13325,18 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoEXT>
- {
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoEXT const &
- videoEncodeH264SessionCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
- {
- std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoEXT.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoEXT.flags );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoEXT.maxPictureSizeInMbs );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoEXT.pStdExtensionVersion );
- return seed;
- }
- };
-# endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
-# if defined( VK_ENABLE_BETA_EXTENSIONS )
- template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT const &
- videoEncodeH264SessionParametersAddInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT const & videoEncodeH264SessionParametersAddInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.spsStdCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.pSpsStd );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.ppsStdCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.pPpsStd );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.stdSPSCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.pStdSPSs );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.stdPPSCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.pStdPPSs );
return seed;
}
};
@@ -12506,14 +13346,14 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT const &
- videoEncodeH264SessionParametersCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT const & videoEncodeH264SessionParametersCreateInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.maxSpsStdCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.maxPpsStdCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.maxStdSPSCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.maxStdPPSCount );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.pParametersAddInfo );
return seed;
}
@@ -12524,17 +13364,12 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT const & videoEncodeH264VclFrameInfoEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT const & videoEncodeH264VclFrameInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.refDefaultFinalList0EntryCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.pRefDefaultFinalList0Entries );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.refDefaultFinalList1EntryCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.pRefDefaultFinalList1Entries );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.pReferenceFinalLists );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.naluSliceEntryCount );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.pNaluSliceEntries );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.pCurrentPictureInfo );
@@ -12547,9 +13382,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT const & videoEncodeH265CapabilitiesEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT const & videoEncodeH265CapabilitiesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.sType );
@@ -12558,13 +13391,22 @@ namespace std
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.inputModeFlags );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.outputModeFlags );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.ctbSizes );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.inputImageDataAlignment );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxNumL0ReferenceForP );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxNumL0ReferenceForB );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxNumL1Reference );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxNumSubLayers );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.qualityLevelCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.stdExtensionVersion );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.transformBlockSizes );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxPPictureL0ReferenceCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxBPictureL0ReferenceCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxL1ReferenceCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxSubLayersCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.minLog2MinLumaCodingBlockSizeMinus3 );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxLog2MinLumaCodingBlockSizeMinus3 );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.minLog2MinLumaTransformBlockSizeMinus2 );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxLog2MinLumaTransformBlockSizeMinus2 );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.minMaxTransformHierarchyDepthInter );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxMaxTransformHierarchyDepthInter );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.minMaxTransformHierarchyDepthIntra );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxMaxTransformHierarchyDepthIntra );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxDiffCuQpDeltaDepth );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.minMaxNumMergeCand );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxMaxNumMergeCand );
return seed;
}
};
@@ -12574,8 +13416,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT const & videoEncodeH265DpbSlotInfoEXT )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT const & videoEncodeH265DpbSlotInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoEXT.sType );
@@ -12589,20 +13430,20 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersEXT>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersEXT const &
- videoEncodeH265EmitPictureParametersEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersInfoEXT const & videoEncodeH265EmitPictureParametersInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersEXT.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersEXT.vpsId );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersEXT.spsId );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersEXT.emitVpsEnable );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersEXT.emitSpsEnable );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersEXT.ppsIdEntryCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersEXT.ppsIdEntries );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.vpsId );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.spsId );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.emitVpsEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.emitSpsEnable );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.ppsIdEntryCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.ppsIdEntries );
return seed;
}
};
@@ -12612,8 +13453,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT const & videoEncodeH265FrameSizeEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT const & videoEncodeH265FrameSizeEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265FrameSizeEXT.frameISize );
@@ -12626,19 +13466,18 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT const &
- videoEncodeH265ReferenceListsEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT const & videoEncodeH265ReferenceListsInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsEXT.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsEXT.referenceList0EntryCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsEXT.pReferenceList0Entries );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsEXT.referenceList1EntryCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsEXT.pReferenceList1Entries );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsEXT.pReferenceModifications );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.referenceList0EntryCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.pReferenceList0Entries );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.referenceList1EntryCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.pReferenceList1Entries );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.pReferenceModifications );
return seed;
}
};
@@ -12646,17 +13485,17 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT const &
- videoEncodeH265NaluSliceSegmentEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT const & videoEncodeH265NaluSliceSegmentInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentEXT.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentEXT.ctbCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentEXT.pReferenceFinalLists );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentEXT.pSliceSegmentHeaderStd );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoEXT.ctbCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoEXT.pReferenceFinalLists );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoEXT.pSliceSegmentHeaderStd );
return seed;
}
};
@@ -12664,15 +13503,14 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileEXT>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileEXT const & videoEncodeH265ProfileEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoEXT const & videoEncodeH265ProfileInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileEXT.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileEXT.stdProfileIdc );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileInfoEXT.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileInfoEXT.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileInfoEXT.stdProfileIdc );
return seed;
}
};
@@ -12682,8 +13520,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & videoEncodeH265QpEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & videoEncodeH265QpEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QpEXT.qpI );
@@ -12698,8 +13535,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT const &
- videoEncodeH265RateControlInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT const & videoEncodeH265RateControlInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.sType );
@@ -12718,8 +13554,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT const &
- videoEncodeH265RateControlLayerInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT const & videoEncodeH265RateControlLayerInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.sType );
@@ -12740,37 +13576,20 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoEXT>
- {
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoEXT const &
- videoEncodeH265SessionCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
- {
- std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionCreateInfoEXT.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionCreateInfoEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionCreateInfoEXT.flags );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionCreateInfoEXT.pStdExtensionVersion );
- return seed;
- }
- };
-# endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
-# if defined( VK_ENABLE_BETA_EXTENSIONS )
- template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT const &
- videoEncodeH265SessionParametersAddInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT const & videoEncodeH265SessionParametersAddInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.vpsStdCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.pVpsStd );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.spsStdCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.pSpsStd );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.ppsStdCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.pPpsStd );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.stdVPSCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.pStdVPSs );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.stdSPSCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.pStdSPSs );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.stdPPSCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.pStdPPSs );
return seed;
}
};
@@ -12780,15 +13599,15 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT const &
- videoEncodeH265SessionParametersCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT const & videoEncodeH265SessionParametersCreateInfoEXT ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.maxVpsStdCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.maxSpsStdCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.maxPpsStdCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.maxStdVPSCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.maxStdSPSCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.maxStdPPSCount );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.pParametersAddInfo );
return seed;
}
@@ -12799,9 +13618,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT const & videoEncodeH265VclFrameInfoEXT ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT const & videoEncodeH265VclFrameInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265VclFrameInfoEXT.sType );
@@ -12819,15 +13636,13 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR const & videoEncodeInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR const & videoEncodeInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.flags );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.qualityLevel );
- VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.codedExtent );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBitstreamBuffer );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBitstreamBufferOffset );
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBitstreamBufferMaxRange );
@@ -12845,8 +13660,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR const &
- videoEncodeRateControlLayerInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR const & videoEncodeRateControlLayerInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.sType );
@@ -12866,8 +13680,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR const & videoEncodeRateControlInfoKHR )
- const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR const & videoEncodeRateControlInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.sType );
@@ -12883,10 +13696,26 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR const & videoEncodeUsageInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeUsageInfoKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeUsageInfoKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeUsageInfoKHR.videoUsageHints );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeUsageInfoKHR.videoContentHints );
+ VULKAN_HPP_HASH_COMBINE( seed, videoEncodeUsageInfoKHR.tuningMode );
+ return seed;
+ }
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR const & videoEndCodingInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR const & videoEndCodingInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoEndCodingInfoKHR.sType );
@@ -12901,13 +13730,17 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR const & videoFormatPropertiesKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR const & videoFormatPropertiesKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.format );
+ VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.componentMapping );
+ VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.imageCreateFlags );
+ VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.imageType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.imageTiling );
+ VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.imageUsageFlags );
return seed;
}
};
@@ -12915,16 +13748,17 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR const & videoGetMemoryPropertiesKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR const & videoProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoGetMemoryPropertiesKHR.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoGetMemoryPropertiesKHR.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoGetMemoryPropertiesKHR.memoryBindIndex );
- VULKAN_HPP_HASH_COMBINE( seed, videoGetMemoryPropertiesKHR.pMemoryRequirements );
+ VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.videoCodecOperation );
+ VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.chromaSubsampling );
+ VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.lumaBitDepth );
+ VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.chromaBitDepth );
return seed;
}
};
@@ -12932,16 +13766,15 @@ namespace std
# if defined( VK_ENABLE_BETA_EXTENSIONS )
template <>
- struct hash<VULKAN_HPP_NAMESPACE::VideoQueueFamilyProperties2KHR>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::VideoQueueFamilyProperties2KHR const & videoQueueFamilyProperties2KHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR const & videoProfileListInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
- VULKAN_HPP_HASH_COMBINE( seed, videoQueueFamilyProperties2KHR.sType );
- VULKAN_HPP_HASH_COMBINE( seed, videoQueueFamilyProperties2KHR.pNext );
- VULKAN_HPP_HASH_COMBINE( seed, videoQueueFamilyProperties2KHR.videoCodecOperations );
+ VULKAN_HPP_HASH_COMBINE( seed, videoProfileListInfoKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoProfileListInfoKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoProfileListInfoKHR.profileCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoProfileListInfoKHR.pProfiles );
return seed;
}
};
@@ -12951,8 +13784,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & videoSessionCreateInfoKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & videoSessionCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.sType );
@@ -12962,9 +13794,26 @@ namespace std
VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pVideoProfile );
VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pictureFormat );
VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxCodedExtent );
- VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.referencePicturesFormat );
- VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxReferencePicturesSlotsCount );
- VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxReferencePicturesActiveCount );
+ VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.referencePictureFormat );
+ VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxDpbSlots );
+ VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxActiveReferencePictures );
+ VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pStdHeaderVersion );
+ return seed;
+ }
+ };
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ template <>
+ struct hash<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR>
+ {
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR const & videoSessionMemoryRequirementsKHR ) const VULKAN_HPP_NOEXCEPT
+ {
+ std::size_t seed = 0;
+ VULKAN_HPP_HASH_COMBINE( seed, videoSessionMemoryRequirementsKHR.sType );
+ VULKAN_HPP_HASH_COMBINE( seed, videoSessionMemoryRequirementsKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoSessionMemoryRequirementsKHR.memoryBindIndex );
+ VULKAN_HPP_HASH_COMBINE( seed, videoSessionMemoryRequirementsKHR.memoryRequirements );
return seed;
}
};
@@ -12974,12 +13823,12 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const &
- videoSessionParametersCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & videoSessionParametersCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.pNext );
+ VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.flags );
VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.videoSessionParametersTemplate );
VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.videoSession );
return seed;
@@ -12991,8 +13840,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR const &
- videoSessionParametersUpdateInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR const & videoSessionParametersUpdateInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersUpdateInfoKHR.sType );
@@ -13007,8 +13855,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR>
{
- std::size_t operator()(
- VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & waylandSurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & waylandSurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.sType );
@@ -13025,8 +13872,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const &
- win32KeyedMutexAcquireReleaseInfoKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const & win32KeyedMutexAcquireReleaseInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.sType );
@@ -13047,8 +13893,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const &
- win32KeyedMutexAcquireReleaseInfoNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const & win32KeyedMutexAcquireReleaseInfoNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.sType );
@@ -13069,8 +13914,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & win32SurfaceCreateInfoKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & win32SurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.sType );
@@ -13086,8 +13930,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::WriteDescriptorSet>
{
- std::size_t
- operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSet const & writeDescriptorSet ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSet const & writeDescriptorSet ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.sType );
@@ -13107,8 +13950,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR const &
- writeDescriptorSetAccelerationStructureKHR ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR const & writeDescriptorSetAccelerationStructureKHR ) const
+ VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureKHR.sType );
@@ -13122,8 +13965,8 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV const &
- writeDescriptorSetAccelerationStructureNV ) const VULKAN_HPP_NOEXCEPT
+ std::size_t
+ operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV const & writeDescriptorSetAccelerationStructureNV ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureNV.sType );
@@ -13137,8 +13980,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock const &
- writeDescriptorSetInlineUniformBlock ) const VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock const & writeDescriptorSetInlineUniformBlock ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetInlineUniformBlock.sType );
@@ -13153,8 +13995,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & xcbSurfaceCreateInfoKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & xcbSurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.sType );
@@ -13171,8 +14012,7 @@ namespace std
template <>
struct hash<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR>
{
- std::size_t operator()( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & xlibSurfaceCreateInfoKHR ) const
- VULKAN_HPP_NOEXCEPT
+ std::size_t operator()( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & xlibSurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.sType );
@@ -13188,4 +14028,4 @@ namespace std
#endif // 14 <= VULKAN_HPP_CPP_VERSION
} // namespace std
-#endif // VULKAN_HASH_HPP
+#endif
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_metal.h b/thirdparty/vulkan/include/vulkan/vulkan_metal.h
index 3631f1200a..11b9640919 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_metal.h
+++ b/thirdparty/vulkan/include/vulkan/vulkan_metal.h
@@ -20,7 +20,6 @@ extern "C" {
#define VK_EXT_metal_surface 1
-
#ifdef __OBJC__
@class CAMetalLayer;
#else
@@ -47,6 +46,146 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateMetalSurfaceEXT(
VkSurfaceKHR* pSurface);
#endif
+
+#define VK_EXT_metal_objects 1
+#ifdef __OBJC__
+@protocol MTLDevice;
+typedef id<MTLDevice> MTLDevice_id;
+#else
+typedef void* MTLDevice_id;
+#endif
+
+#ifdef __OBJC__
+@protocol MTLCommandQueue;
+typedef id<MTLCommandQueue> MTLCommandQueue_id;
+#else
+typedef void* MTLCommandQueue_id;
+#endif
+
+#ifdef __OBJC__
+@protocol MTLBuffer;
+typedef id<MTLBuffer> MTLBuffer_id;
+#else
+typedef void* MTLBuffer_id;
+#endif
+
+#ifdef __OBJC__
+@protocol MTLTexture;
+typedef id<MTLTexture> MTLTexture_id;
+#else
+typedef void* MTLTexture_id;
+#endif
+
+typedef struct __IOSurface* IOSurfaceRef;
+#ifdef __OBJC__
+@protocol MTLSharedEvent;
+typedef id<MTLSharedEvent> MTLSharedEvent_id;
+#else
+typedef void* MTLSharedEvent_id;
+#endif
+
+#define VK_EXT_METAL_OBJECTS_SPEC_VERSION 1
+#define VK_EXT_METAL_OBJECTS_EXTENSION_NAME "VK_EXT_metal_objects"
+
+typedef enum VkExportMetalObjectTypeFlagBitsEXT {
+ VK_EXPORT_METAL_OBJECT_TYPE_METAL_DEVICE_BIT_EXT = 0x00000001,
+ VK_EXPORT_METAL_OBJECT_TYPE_METAL_COMMAND_QUEUE_BIT_EXT = 0x00000002,
+ VK_EXPORT_METAL_OBJECT_TYPE_METAL_BUFFER_BIT_EXT = 0x00000004,
+ VK_EXPORT_METAL_OBJECT_TYPE_METAL_TEXTURE_BIT_EXT = 0x00000008,
+ VK_EXPORT_METAL_OBJECT_TYPE_METAL_IOSURFACE_BIT_EXT = 0x00000010,
+ VK_EXPORT_METAL_OBJECT_TYPE_METAL_SHARED_EVENT_BIT_EXT = 0x00000020,
+ VK_EXPORT_METAL_OBJECT_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkExportMetalObjectTypeFlagBitsEXT;
+typedef VkFlags VkExportMetalObjectTypeFlagsEXT;
+typedef struct VkExportMetalObjectCreateInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkExportMetalObjectTypeFlagBitsEXT exportObjectType;
+} VkExportMetalObjectCreateInfoEXT;
+
+typedef struct VkExportMetalObjectsInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+} VkExportMetalObjectsInfoEXT;
+
+typedef struct VkExportMetalDeviceInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ MTLDevice_id mtlDevice;
+} VkExportMetalDeviceInfoEXT;
+
+typedef struct VkExportMetalCommandQueueInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkQueue queue;
+ MTLCommandQueue_id mtlCommandQueue;
+} VkExportMetalCommandQueueInfoEXT;
+
+typedef struct VkExportMetalBufferInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkDeviceMemory memory;
+ MTLBuffer_id mtlBuffer;
+} VkExportMetalBufferInfoEXT;
+
+typedef struct VkImportMetalBufferInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ MTLBuffer_id mtlBuffer;
+} VkImportMetalBufferInfoEXT;
+
+typedef struct VkExportMetalTextureInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkImage image;
+ VkImageView imageView;
+ VkBufferView bufferView;
+ VkImageAspectFlagBits plane;
+ MTLTexture_id mtlTexture;
+} VkExportMetalTextureInfoEXT;
+
+typedef struct VkImportMetalTextureInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkImageAspectFlagBits plane;
+ MTLTexture_id mtlTexture;
+} VkImportMetalTextureInfoEXT;
+
+typedef struct VkExportMetalIOSurfaceInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkImage image;
+ IOSurfaceRef ioSurface;
+} VkExportMetalIOSurfaceInfoEXT;
+
+typedef struct VkImportMetalIOSurfaceInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ IOSurfaceRef ioSurface;
+} VkImportMetalIOSurfaceInfoEXT;
+
+typedef struct VkExportMetalSharedEventInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ VkSemaphore semaphore;
+ VkEvent event;
+ MTLSharedEvent_id mtlSharedEvent;
+} VkExportMetalSharedEventInfoEXT;
+
+typedef struct VkImportMetalSharedEventInfoEXT {
+ VkStructureType sType;
+ const void* pNext;
+ MTLSharedEvent_id mtlSharedEvent;
+} VkImportMetalSharedEventInfoEXT;
+
+typedef void (VKAPI_PTR *PFN_vkExportMetalObjectsEXT)(VkDevice device, VkExportMetalObjectsInfoEXT* pMetalObjectsInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkExportMetalObjectsEXT(
+ VkDevice device,
+ VkExportMetalObjectsInfoEXT* pMetalObjectsInfo);
+#endif
+
#ifdef __cplusplus
}
#endif
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_raii.hpp b/thirdparty/vulkan/include/vulkan/vulkan_raii.hpp
index a5304301d7..80217dddbc 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_raii.hpp
+++ b/thirdparty/vulkan/include/vulkan/vulkan_raii.hpp
@@ -8,6 +8,8 @@
#ifndef VULKAN_RAII_HPP
#define VULKAN_RAII_HPP
+#include <memory>
+#include <utility> // std::exchange, std::forward
#include <vulkan/vulkan.hpp>
#if !defined( VULKAN_HPP_RAII_NAMESPACE )
@@ -39,14 +41,12 @@ namespace VULKAN_HPP_NAMESPACE
: vkGetInstanceProcAddr( getProcAddr )
//=== VK_VERSION_1_0 ===
, vkCreateInstance( PFN_vkCreateInstance( getProcAddr( NULL, "vkCreateInstance" ) ) )
- , vkEnumerateInstanceExtensionProperties( PFN_vkEnumerateInstanceExtensionProperties(
- getProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ) )
- , vkEnumerateInstanceLayerProperties(
- PFN_vkEnumerateInstanceLayerProperties( getProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ) )
+ , vkEnumerateInstanceExtensionProperties( PFN_vkEnumerateInstanceExtensionProperties( getProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ) )
+ , vkEnumerateInstanceLayerProperties( PFN_vkEnumerateInstanceLayerProperties( getProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ) )
//=== VK_VERSION_1_1 ===
- , vkEnumerateInstanceVersion(
- PFN_vkEnumerateInstanceVersion( getProcAddr( NULL, "vkEnumerateInstanceVersion" ) ) )
- {}
+ , vkEnumerateInstanceVersion( PFN_vkEnumerateInstanceVersion( getProcAddr( NULL, "vkEnumerateInstanceVersion" ) ) )
+ {
+ }
public:
PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0;
@@ -63,303 +63,265 @@ namespace VULKAN_HPP_NAMESPACE
class InstanceDispatcher : public DispatchLoaderBase
{
public:
- InstanceDispatcher( PFN_vkGetInstanceProcAddr getProcAddr, VkInstance instance )
- : vkGetInstanceProcAddr( getProcAddr )
+ InstanceDispatcher( PFN_vkGetInstanceProcAddr getProcAddr, VkInstance instance ) : vkGetInstanceProcAddr( getProcAddr )
{
//=== VK_VERSION_1_0 ===
- vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) );
- vkEnumeratePhysicalDevices =
- PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) );
- vkGetPhysicalDeviceFeatures =
- PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) );
- vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) );
- vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) );
- vkGetPhysicalDeviceProperties =
- PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) );
- vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) );
- vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) );
+ vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) );
+ vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) );
+ vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) );
+ vkGetPhysicalDeviceFormatProperties =
+ PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) );
+ vkGetPhysicalDeviceImageFormatProperties =
+ PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) );
+ vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) );
+ vkGetPhysicalDeviceQueueFamilyProperties =
+ PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) );
+ vkGetPhysicalDeviceMemoryProperties =
+ PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) );
vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) );
- vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties(
- vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) );
- vkEnumerateDeviceLayerProperties =
- PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) );
- vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) );
+ vkEnumerateDeviceExtensionProperties =
+ PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) );
+ vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) );
+ vkGetPhysicalDeviceSparseImageFormatProperties =
+ PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) );
//=== VK_VERSION_1_1 ===
- vkEnumeratePhysicalDeviceGroups =
- PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) );
- vkGetPhysicalDeviceFeatures2 =
- PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) );
- vkGetPhysicalDeviceProperties2 =
- PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) );
- vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) );
- vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) );
- vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) );
- vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) );
- vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) );
- vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) );
- vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) );
- vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) );
+ vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) );
+ vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) );
+ vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) );
+ vkGetPhysicalDeviceFormatProperties2 =
+ PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) );
+ vkGetPhysicalDeviceImageFormatProperties2 =
+ PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) );
+ vkGetPhysicalDeviceQueueFamilyProperties2 =
+ PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) );
+ vkGetPhysicalDeviceMemoryProperties2 =
+ PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) );
+ vkGetPhysicalDeviceSparseImageFormatProperties2 =
+ PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) );
+ vkGetPhysicalDeviceExternalBufferProperties =
+ PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) );
+ vkGetPhysicalDeviceExternalFenceProperties =
+ PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) );
+ vkGetPhysicalDeviceExternalSemaphoreProperties =
+ PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) );
//=== VK_VERSION_1_3 ===
- vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) );
+ vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) );
//=== VK_EXT_acquire_drm_display ===
- vkAcquireDrmDisplayEXT =
- PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) );
- vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) );
+ vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) );
+ vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) );
# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
//=== VK_EXT_acquire_xlib_display ===
- vkAcquireXlibDisplayEXT =
- PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) );
- vkGetRandROutputDisplayEXT =
- PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) );
+ vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) );
+ vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) );
# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
//=== VK_EXT_calibrated_timestamps ===
- vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) );
+ vkGetPhysicalDeviceCalibrateableTimeDomainsEXT =
+ PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) );
//=== VK_EXT_debug_report ===
- vkCreateDebugReportCallbackEXT =
- PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) );
- vkDestroyDebugReportCallbackEXT =
- PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) );
- vkDebugReportMessageEXT =
- PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) );
+ vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) );
+ vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) );
+ vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) );
//=== VK_EXT_debug_utils ===
- vkCreateDebugUtilsMessengerEXT =
- PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) );
- vkDestroyDebugUtilsMessengerEXT =
- PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) );
- vkSubmitDebugUtilsMessageEXT =
- PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) );
+ vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) );
+ vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) );
+ vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) );
//=== VK_EXT_direct_mode_display ===
vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) );
# if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
//=== VK_EXT_directfb_surface ===
- vkCreateDirectFBSurfaceEXT =
- PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) );
- vkGetPhysicalDeviceDirectFBPresentationSupportEXT = PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) );
+ vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) );
+ vkGetPhysicalDeviceDirectFBPresentationSupportEXT =
+ PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) );
# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
//=== VK_EXT_display_surface_counter ===
- vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) );
+ vkGetPhysicalDeviceSurfaceCapabilities2EXT =
+ PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) );
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_EXT_full_screen_exclusive ===
- vkGetPhysicalDeviceSurfacePresentModes2EXT = PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) );
+ vkGetPhysicalDeviceSurfacePresentModes2EXT =
+ PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) );
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_EXT_headless_surface ===
- vkCreateHeadlessSurfaceEXT =
- PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) );
+ vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) );
# if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_surface ===
- vkCreateMetalSurfaceEXT =
- PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) );
+ vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) );
# endif /*VK_USE_PLATFORM_METAL_EXT*/
//=== VK_EXT_sample_locations ===
- vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) );
+ vkGetPhysicalDeviceMultisamplePropertiesEXT =
+ PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) );
//=== VK_EXT_tooling_info ===
- vkGetPhysicalDeviceToolPropertiesEXT = PFN_vkGetPhysicalDeviceToolPropertiesEXT(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) );
+ vkGetPhysicalDeviceToolPropertiesEXT =
+ PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) );
if ( !vkGetPhysicalDeviceToolProperties )
vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT;
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_imagepipe_surface ===
- vkCreateImagePipeSurfaceFUCHSIA =
- PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) );
+ vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) );
# endif /*VK_USE_PLATFORM_FUCHSIA*/
# if defined( VK_USE_PLATFORM_GGP )
//=== VK_GGP_stream_descriptor_surface ===
- vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP(
- vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) );
+ vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) );
# endif /*VK_USE_PLATFORM_GGP*/
# if defined( VK_USE_PLATFORM_ANDROID_KHR )
//=== VK_KHR_android_surface ===
- vkCreateAndroidSurfaceKHR =
- PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) );
+ vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) );
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
//=== VK_KHR_device_group ===
- vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) );
+ vkGetPhysicalDevicePresentRectanglesKHR =
+ PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) );
//=== VK_KHR_device_group_creation ===
- vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR(
- vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) );
+ vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) );
if ( !vkEnumeratePhysicalDeviceGroups )
vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR;
//=== VK_KHR_display ===
- vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) );
- vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) );
- vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR(
- vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) );
- vkGetDisplayModePropertiesKHR =
- PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) );
- vkCreateDisplayModeKHR =
- PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) );
- vkGetDisplayPlaneCapabilitiesKHR =
- PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) );
- vkCreateDisplayPlaneSurfaceKHR =
- PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) );
+ vkGetPhysicalDeviceDisplayPropertiesKHR =
+ PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) );
+ vkGetPhysicalDeviceDisplayPlanePropertiesKHR =
+ PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) );
+ vkGetDisplayPlaneSupportedDisplaysKHR =
+ PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) );
+ vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) );
+ vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) );
+ vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) );
+ vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) );
//=== VK_KHR_external_fence_capabilities ===
- vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) );
+ vkGetPhysicalDeviceExternalFencePropertiesKHR =
+ PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) );
if ( !vkGetPhysicalDeviceExternalFenceProperties )
vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR;
//=== VK_KHR_external_memory_capabilities ===
- vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) );
+ vkGetPhysicalDeviceExternalBufferPropertiesKHR =
+ PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) );
if ( !vkGetPhysicalDeviceExternalBufferProperties )
vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR;
//=== VK_KHR_external_semaphore_capabilities ===
- vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) );
+ vkGetPhysicalDeviceExternalSemaphorePropertiesKHR =
+ PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) );
if ( !vkGetPhysicalDeviceExternalSemaphoreProperties )
vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR;
//=== VK_KHR_fragment_shading_rate ===
- vkGetPhysicalDeviceFragmentShadingRatesKHR = PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) );
+ vkGetPhysicalDeviceFragmentShadingRatesKHR =
+ PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) );
//=== VK_KHR_get_display_properties2 ===
- vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) );
- vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) );
- vkGetDisplayModeProperties2KHR =
- PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) );
- vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR(
- vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) );
+ vkGetPhysicalDeviceDisplayProperties2KHR =
+ PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) );
+ vkGetPhysicalDeviceDisplayPlaneProperties2KHR =
+ PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) );
+ vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) );
+ vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) );
//=== VK_KHR_get_physical_device_properties2 ===
- vkGetPhysicalDeviceFeatures2KHR =
- PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) );
+ vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) );
if ( !vkGetPhysicalDeviceFeatures2 )
vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR;
- vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) );
+ vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) );
if ( !vkGetPhysicalDeviceProperties2 )
vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR;
- vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) );
+ vkGetPhysicalDeviceFormatProperties2KHR =
+ PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) );
if ( !vkGetPhysicalDeviceFormatProperties2 )
vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR;
- vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) );
+ vkGetPhysicalDeviceImageFormatProperties2KHR =
+ PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) );
if ( !vkGetPhysicalDeviceImageFormatProperties2 )
vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR;
- vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) );
+ vkGetPhysicalDeviceQueueFamilyProperties2KHR =
+ PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) );
if ( !vkGetPhysicalDeviceQueueFamilyProperties2 )
vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR;
- vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) );
+ vkGetPhysicalDeviceMemoryProperties2KHR =
+ PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) );
if ( !vkGetPhysicalDeviceMemoryProperties2 )
vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR;
- vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) );
+ vkGetPhysicalDeviceSparseImageFormatProperties2KHR =
+ PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) );
if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 )
vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR;
//=== VK_KHR_get_surface_capabilities2 ===
- vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) );
- vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) );
+ vkGetPhysicalDeviceSurfaceCapabilities2KHR =
+ PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) );
+ vkGetPhysicalDeviceSurfaceFormats2KHR =
+ PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) );
//=== VK_KHR_performance_query ===
- vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR =
- PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
- vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) );
- vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR =
- PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) );
+ vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+ vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) );
+ vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+ vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) );
//=== VK_KHR_surface ===
vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) );
- vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) );
- vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) );
- vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) );
- vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) );
+ vkGetPhysicalDeviceSurfaceSupportKHR =
+ PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) );
+ vkGetPhysicalDeviceSurfaceCapabilitiesKHR =
+ PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) );
+ vkGetPhysicalDeviceSurfaceFormatsKHR =
+ PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) );
+ vkGetPhysicalDeviceSurfacePresentModesKHR =
+ PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) );
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_queue ===
- vkGetPhysicalDeviceVideoCapabilitiesKHR = PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) );
- vkGetPhysicalDeviceVideoFormatPropertiesKHR = PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) );
+ vkGetPhysicalDeviceVideoCapabilitiesKHR =
+ PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) );
+ vkGetPhysicalDeviceVideoFormatPropertiesKHR =
+ PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) );
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_USE_PLATFORM_WAYLAND_KHR )
//=== VK_KHR_wayland_surface ===
- vkCreateWaylandSurfaceKHR =
- PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) );
- vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) );
+ vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) );
+ vkGetPhysicalDeviceWaylandPresentationSupportKHR =
+ PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) );
# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_win32_surface ===
- vkCreateWin32SurfaceKHR =
- PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) );
- vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) );
+ vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) );
+ vkGetPhysicalDeviceWin32PresentationSupportKHR =
+ PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) );
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
# if defined( VK_USE_PLATFORM_XCB_KHR )
//=== VK_KHR_xcb_surface ===
vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) );
- vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) );
+ vkGetPhysicalDeviceXcbPresentationSupportKHR =
+ PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) );
# endif /*VK_USE_PLATFORM_XCB_KHR*/
# if defined( VK_USE_PLATFORM_XLIB_KHR )
//=== VK_KHR_xlib_surface ===
- vkCreateXlibSurfaceKHR =
- PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) );
- vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) );
+ vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) );
+ vkGetPhysicalDeviceXlibPresentationSupportKHR =
+ PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) );
# endif /*VK_USE_PLATFORM_XLIB_KHR*/
# if defined( VK_USE_PLATFORM_IOS_MVK )
@@ -369,8 +331,7 @@ namespace VULKAN_HPP_NAMESPACE
# if defined( VK_USE_PLATFORM_MACOS_MVK )
//=== VK_MVK_macos_surface ===
- vkCreateMacOSSurfaceMVK =
- PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) );
+ vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) );
# endif /*VK_USE_PLATFORM_MACOS_MVK*/
# if defined( VK_USE_PLATFORM_VI_NN )
@@ -380,30 +341,31 @@ namespace VULKAN_HPP_NAMESPACE
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_acquire_winrt_display ===
- vkAcquireWinrtDisplayNV =
- PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) );
- vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) );
+ vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) );
+ vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) );
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_NV_cooperative_matrix ===
- vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) );
+ vkGetPhysicalDeviceCooperativeMatrixPropertiesNV =
+ PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) );
//=== VK_NV_coverage_reduction_mode ===
- vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV =
- PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) );
+ vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+ vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) );
//=== VK_NV_external_memory_capabilities ===
- vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) );
+ vkGetPhysicalDeviceExternalImageFormatPropertiesNV =
+ PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) );
+
+ //=== VK_NV_optical_flow ===
+ vkGetPhysicalDeviceOpticalFlowImageFormatsNV =
+ PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) );
# if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_screen_surface ===
- vkCreateScreenSurfaceQNX =
- PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) );
- vkGetPhysicalDeviceScreenPresentationSupportQNX = PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX(
- vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) );
+ vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) );
+ vkGetPhysicalDeviceScreenPresentationSupportQNX =
+ PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) );
# endif /*VK_USE_PLATFORM_SCREEN_QNX*/
vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) );
@@ -573,10 +535,8 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0;
//=== VK_KHR_performance_query ===
- PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR
- vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0;
- PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR
- vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0;
+ PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0;
+ PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0;
//=== VK_KHR_surface ===
PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0;
@@ -664,12 +624,14 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0;
//=== VK_NV_coverage_reduction_mode ===
- PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV
- vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0;
+ PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0;
//=== VK_NV_external_memory_capabilities ===
PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0;
+ //=== VK_NV_optical_flow ===
+ PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0;
+
# if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_screen_surface ===
PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0;
@@ -688,269 +650,212 @@ namespace VULKAN_HPP_NAMESPACE
DeviceDispatcher( PFN_vkGetDeviceProcAddr getProcAddr, VkDevice device ) : vkGetDeviceProcAddr( getProcAddr )
{
//=== VK_VERSION_1_0 ===
- vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) );
- vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) );
- vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) );
- vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) );
- vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) );
- vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) );
- vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) );
- vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) );
- vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) );
- vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) );
- vkFlushMappedMemoryRanges =
- PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) );
- vkInvalidateMappedMemoryRanges =
- PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) );
- vkGetDeviceMemoryCommitment =
- PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) );
- vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) );
- vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) );
- vkGetBufferMemoryRequirements =
- PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) );
- vkGetImageMemoryRequirements =
- PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) );
- vkGetImageSparseMemoryRequirements =
- PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) );
- vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) );
- vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) );
- vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) );
- vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) );
- vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) );
- vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) );
- vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) );
- vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) );
- vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) );
- vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) );
- vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) );
- vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) );
- vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) );
- vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) );
- vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) );
- vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) );
- vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) );
- vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) );
- vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) );
- vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) );
- vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) );
- vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) );
- vkGetImageSubresourceLayout =
- PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) );
- vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) );
- vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) );
- vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) );
- vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) );
- vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) );
- vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) );
- vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) );
- vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) );
- vkCreateGraphicsPipelines =
- PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) );
- vkCreateComputePipelines =
- PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) );
- vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) );
- vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) );
- vkDestroyPipelineLayout =
- PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) );
- vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) );
- vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) );
- vkCreateDescriptorSetLayout =
- PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) );
- vkDestroyDescriptorSetLayout =
- PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) );
- vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) );
- vkDestroyDescriptorPool =
- PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) );
- vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) );
- vkAllocateDescriptorSets =
- PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) );
- vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) );
- vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) );
- vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) );
- vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) );
- vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) );
- vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) );
- vkGetRenderAreaGranularity =
- PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) );
- vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) );
- vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) );
- vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) );
- vkAllocateCommandBuffers =
- PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) );
- vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) );
- vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) );
- vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) );
- vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) );
- vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) );
- vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) );
- vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) );
- vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) );
- vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) );
- vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) );
- vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) );
- vkCmdSetStencilCompareMask =
- PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) );
- vkCmdSetStencilWriteMask =
- PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) );
- vkCmdSetStencilReference =
- PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) );
- vkCmdBindDescriptorSets =
- PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) );
- vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) );
- vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) );
- vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) );
- vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) );
- vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) );
- vkCmdDrawIndexedIndirect =
- PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) );
- vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) );
- vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) );
- vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) );
- vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) );
- vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) );
- vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) );
- vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) );
- vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) );
- vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) );
- vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) );
- vkCmdClearDepthStencilImage =
- PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) );
- vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) );
- vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) );
- vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) );
- vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) );
- vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) );
- vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) );
- vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) );
- vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) );
- vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) );
- vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) );
- vkCmdCopyQueryPoolResults =
- PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) );
- vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) );
- vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) );
- vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) );
- vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) );
- vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) );
+ vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) );
+ vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) );
+ vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) );
+ vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) );
+ vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) );
+ vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) );
+ vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) );
+ vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) );
+ vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) );
+ vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) );
+ vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) );
+ vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) );
+ vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) );
+ vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) );
+ vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) );
+ vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) );
+ vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) );
+ vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) );
+ vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) );
+ vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) );
+ vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) );
+ vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) );
+ vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) );
+ vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) );
+ vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) );
+ vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) );
+ vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) );
+ vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) );
+ vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) );
+ vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) );
+ vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) );
+ vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) );
+ vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) );
+ vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) );
+ vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) );
+ vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) );
+ vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) );
+ vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) );
+ vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) );
+ vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) );
+ vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) );
+ vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) );
+ vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) );
+ vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) );
+ vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) );
+ vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) );
+ vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) );
+ vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) );
+ vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) );
+ vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) );
+ vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) );
+ vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) );
+ vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) );
+ vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) );
+ vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) );
+ vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) );
+ vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) );
+ vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) );
+ vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) );
+ vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) );
+ vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) );
+ vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) );
+ vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) );
+ vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) );
+ vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) );
+ vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) );
+ vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) );
+ vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) );
+ vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) );
+ vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) );
+ vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) );
+ vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) );
+ vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) );
+ vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) );
+ vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) );
+ vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) );
+ vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) );
+ vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) );
+ vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) );
+ vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) );
+ vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) );
+ vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) );
+ vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) );
+ vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) );
+ vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) );
+ vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) );
+ vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) );
+ vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) );
+ vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) );
+ vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) );
+ vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) );
+ vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) );
+ vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) );
+ vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) );
+ vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) );
+ vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) );
+ vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) );
+ vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) );
+ vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) );
+ vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) );
+ vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) );
+ vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) );
+ vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) );
+ vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) );
+ vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) );
+ vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) );
+ vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) );
+ vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) );
+ vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) );
+ vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) );
+ vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) );
+ vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) );
+ vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) );
+ vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) );
+ vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) );
+ vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) );
+ vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) );
+ vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) );
+ vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) );
+ vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) );
+ vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) );
//=== VK_VERSION_1_1 ===
- vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) );
- vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) );
- vkGetDeviceGroupPeerMemoryFeatures =
- PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) );
- vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) );
- vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) );
- vkGetImageMemoryRequirements2 =
- PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) );
- vkGetBufferMemoryRequirements2 =
- PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) );
- vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2(
- vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) );
- vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) );
- vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) );
- vkCreateSamplerYcbcrConversion =
- PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) );
- vkDestroySamplerYcbcrConversion =
- PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) );
- vkCreateDescriptorUpdateTemplate =
- PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) );
- vkDestroyDescriptorUpdateTemplate =
- PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) );
- vkUpdateDescriptorSetWithTemplate =
- PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) );
- vkGetDescriptorSetLayoutSupport =
- PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) );
+ vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) );
+ vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) );
+ vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) );
+ vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) );
+ vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) );
+ vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) );
+ vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) );
+ vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) );
+ vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) );
+ vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) );
+ vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) );
+ vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) );
+ vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) );
+ vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) );
+ vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) );
+ vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) );
//=== VK_VERSION_1_2 ===
- vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) );
- vkCmdDrawIndexedIndirectCount =
- PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) );
- vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) );
- vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) );
- vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) );
- vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) );
- vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) );
- vkGetSemaphoreCounterValue =
- PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) );
- vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) );
- vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) );
- vkGetBufferDeviceAddress =
- PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) );
- vkGetBufferOpaqueCaptureAddress =
- PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) );
- vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress(
- vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
+ vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) );
+ vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) );
+ vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) );
+ vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) );
+ vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) );
+ vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) );
+ vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) );
+ vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) );
+ vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) );
+ vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) );
+ vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) );
+ vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) );
+ vkGetDeviceMemoryOpaqueCaptureAddress =
+ PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
//=== VK_VERSION_1_3 ===
- vkCreatePrivateDataSlot =
- PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) );
- vkDestroyPrivateDataSlot =
- PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) );
- vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) );
- vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) );
- vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) );
- vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) );
- vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) );
- vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) );
- vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) );
- vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) );
- vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) );
- vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) );
- vkCmdCopyBufferToImage2 =
- PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) );
- vkCmdCopyImageToBuffer2 =
- PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) );
- vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) );
- vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) );
- vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) );
- vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) );
- vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) );
- vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) );
- vkCmdSetPrimitiveTopology =
- PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) );
- vkCmdSetViewportWithCount =
- PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) );
- vkCmdSetScissorWithCount =
- PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) );
- vkCmdBindVertexBuffers2 =
- PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) );
- vkCmdSetDepthTestEnable =
- PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) );
- vkCmdSetDepthWriteEnable =
- PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) );
- vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) );
- vkCmdSetDepthBoundsTestEnable =
- PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) );
- vkCmdSetStencilTestEnable =
- PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) );
- vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) );
- vkCmdSetRasterizerDiscardEnable =
- PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) );
- vkCmdSetDepthBiasEnable =
- PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) );
- vkCmdSetPrimitiveRestartEnable =
- PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) );
- vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements(
- vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) );
- vkGetDeviceImageMemoryRequirements =
- PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) );
- vkGetDeviceImageSparseMemoryRequirements = PFN_vkGetDeviceImageSparseMemoryRequirements(
- vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) );
+ vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) );
+ vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) );
+ vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) );
+ vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) );
+ vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) );
+ vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) );
+ vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) );
+ vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) );
+ vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) );
+ vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) );
+ vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) );
+ vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) );
+ vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) );
+ vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) );
+ vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) );
+ vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) );
+ vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) );
+ vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) );
+ vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) );
+ vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) );
+ vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) );
+ vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) );
+ vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) );
+ vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) );
+ vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) );
+ vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) );
+ vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) );
+ vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) );
+ vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) );
+ vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) );
+ vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) );
+ vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) );
+ vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) );
+ vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) );
+ vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) );
+ vkGetDeviceImageSparseMemoryRequirements =
+ PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) );
//=== VK_AMD_buffer_marker ===
- vkCmdWriteBufferMarkerAMD =
- PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) );
+ vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) );
//=== VK_AMD_display_native_hdr ===
vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) );
//=== VK_AMD_draw_indirect_count ===
- vkCmdDrawIndirectCountAMD =
- PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) );
+ vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) );
if ( !vkCmdDrawIndirectCount )
vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD;
- vkCmdDrawIndexedIndirectCountAMD =
- PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) );
+ vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) );
if ( !vkCmdDrawIndexedIndirectCount )
vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD;
@@ -959,74 +864,55 @@ namespace VULKAN_HPP_NAMESPACE
# if defined( VK_USE_PLATFORM_ANDROID_KHR )
//=== VK_ANDROID_external_memory_android_hardware_buffer ===
- vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID(
- vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
- vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID(
- vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
+ vkGetAndroidHardwareBufferPropertiesANDROID =
+ PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
+ vkGetMemoryAndroidHardwareBufferANDROID =
+ PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
//=== VK_EXT_buffer_device_address ===
- vkGetBufferDeviceAddressEXT =
- PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) );
+ vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) );
if ( !vkGetBufferDeviceAddress )
vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT;
//=== VK_EXT_calibrated_timestamps ===
- vkGetCalibratedTimestampsEXT =
- PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) );
+ vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) );
//=== VK_EXT_color_write_enable ===
- vkCmdSetColorWriteEnableEXT =
- PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) );
+ vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) );
//=== VK_EXT_conditional_rendering ===
- vkCmdBeginConditionalRenderingEXT =
- PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) );
- vkCmdEndConditionalRenderingEXT =
- PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) );
+ vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) );
+ vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) );
//=== VK_EXT_debug_marker ===
- vkDebugMarkerSetObjectTagEXT =
- PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) );
- vkDebugMarkerSetObjectNameEXT =
- PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) );
- vkCmdDebugMarkerBeginEXT =
- PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) );
- vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) );
- vkCmdDebugMarkerInsertEXT =
- PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) );
+ vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) );
+ vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) );
+ vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) );
+ vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) );
+ vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) );
//=== VK_EXT_debug_utils ===
- vkSetDebugUtilsObjectNameEXT =
- PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) );
- vkSetDebugUtilsObjectTagEXT =
- PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) );
- vkQueueBeginDebugUtilsLabelEXT =
- PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) );
- vkQueueEndDebugUtilsLabelEXT =
- PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) );
- vkQueueInsertDebugUtilsLabelEXT =
- PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) );
- vkCmdBeginDebugUtilsLabelEXT =
- PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) );
- vkCmdEndDebugUtilsLabelEXT =
- PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) );
- vkCmdInsertDebugUtilsLabelEXT =
- PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) );
+ vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) );
+ vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) );
+ vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) );
+ vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) );
+ vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) );
+ vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) );
+ vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) );
+ vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) );
+
+ //=== VK_EXT_device_fault ===
+ vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) );
//=== VK_EXT_discard_rectangles ===
- vkCmdSetDiscardRectangleEXT =
- PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) );
+ vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) );
//=== VK_EXT_display_control ===
- vkDisplayPowerControlEXT =
- PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) );
- vkRegisterDeviceEventEXT =
- PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) );
- vkRegisterDisplayEventEXT =
- PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) );
- vkGetSwapchainCounterEXT =
- PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) );
+ vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) );
+ vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) );
+ vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) );
+ vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) );
//=== VK_EXT_extended_dynamic_state ===
vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) );
@@ -1035,40 +921,31 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) );
if ( !vkCmdSetFrontFace )
vkCmdSetFrontFace = vkCmdSetFrontFaceEXT;
- vkCmdSetPrimitiveTopologyEXT =
- PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) );
+ vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) );
if ( !vkCmdSetPrimitiveTopology )
vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT;
- vkCmdSetViewportWithCountEXT =
- PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) );
+ vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) );
if ( !vkCmdSetViewportWithCount )
vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT;
- vkCmdSetScissorWithCountEXT =
- PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) );
+ vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) );
if ( !vkCmdSetScissorWithCount )
vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT;
- vkCmdBindVertexBuffers2EXT =
- PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) );
+ vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) );
if ( !vkCmdBindVertexBuffers2 )
vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT;
- vkCmdSetDepthTestEnableEXT =
- PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) );
+ vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) );
if ( !vkCmdSetDepthTestEnable )
vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT;
- vkCmdSetDepthWriteEnableEXT =
- PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) );
+ vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) );
if ( !vkCmdSetDepthWriteEnable )
vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT;
- vkCmdSetDepthCompareOpEXT =
- PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) );
+ vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) );
if ( !vkCmdSetDepthCompareOp )
vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT;
- vkCmdSetDepthBoundsTestEnableEXT =
- PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) );
+ vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) );
if ( !vkCmdSetDepthBoundsTestEnable )
vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT;
- vkCmdSetStencilTestEnableEXT =
- PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) );
+ vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) );
if ( !vkCmdSetStencilTestEnable )
vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT;
vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) );
@@ -1076,34 +953,65 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdSetStencilOp = vkCmdSetStencilOpEXT;
//=== VK_EXT_extended_dynamic_state2 ===
- vkCmdSetPatchControlPointsEXT =
- PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) );
- vkCmdSetRasterizerDiscardEnableEXT =
- PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) );
+ vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) );
+ vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) );
if ( !vkCmdSetRasterizerDiscardEnable )
vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT;
- vkCmdSetDepthBiasEnableEXT =
- PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) );
+ vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) );
if ( !vkCmdSetDepthBiasEnable )
vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT;
- vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) );
- vkCmdSetPrimitiveRestartEnableEXT =
- PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) );
+ vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) );
+ vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) );
if ( !vkCmdSetPrimitiveRestartEnable )
vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT;
+ //=== VK_EXT_extended_dynamic_state3 ===
+ vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) );
+ vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) );
+ vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) );
+ vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) );
+ vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) );
+ vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) );
+ vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) );
+ vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) );
+ vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) );
+ vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) );
+ vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) );
+ vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) );
+ vkCmdSetConservativeRasterizationModeEXT =
+ PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) );
+ vkCmdSetExtraPrimitiveOverestimationSizeEXT =
+ PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) );
+ vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) );
+ vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) );
+ vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) );
+ vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) );
+ vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) );
+ vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) );
+ vkCmdSetDepthClipNegativeOneToOneEXT =
+ PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) );
+ vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) );
+ vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) );
+ vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) );
+ vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) );
+ vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) );
+ vkCmdSetCoverageModulationTableEnableNV =
+ PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) );
+ vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) );
+ vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) );
+ vkCmdSetRepresentativeFragmentTestEnableNV =
+ PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) );
+ vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) );
+
//=== VK_EXT_external_memory_host ===
- vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT(
- vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) );
+ vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) );
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_EXT_full_screen_exclusive ===
- vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT(
- vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) );
- vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT(
- vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) );
- vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT(
- vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
+ vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) );
+ vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) );
+ vkGetDeviceGroupSurfacePresentModes2EXT =
+ PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_EXT_hdr_metadata ===
@@ -1114,29 +1022,57 @@ namespace VULKAN_HPP_NAMESPACE
if ( !vkResetQueryPool )
vkResetQueryPool = vkResetQueryPoolEXT;
+ //=== VK_EXT_image_compression_control ===
+ vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) );
+
//=== VK_EXT_image_drm_format_modifier ===
- vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT(
- vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
+ vkGetImageDrmFormatModifierPropertiesEXT =
+ PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
//=== VK_EXT_line_rasterization ===
vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) );
+ //=== VK_EXT_mesh_shader ===
+ vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) );
+ vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) );
+ vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) );
+
+# if defined( VK_USE_PLATFORM_METAL_EXT )
+ //=== VK_EXT_metal_objects ===
+ vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) );
+# endif /*VK_USE_PLATFORM_METAL_EXT*/
+
//=== VK_EXT_multi_draw ===
- vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) );
- vkCmdDrawMultiIndexedEXT =
- PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) );
+ vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) );
+ vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) );
+
+ //=== VK_EXT_opacity_micromap ===
+ vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) );
+ vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) );
+ vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) );
+ vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) );
+ vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) );
+ vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) );
+ vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) );
+ vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) );
+ vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) );
+ vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) );
+ vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) );
+ vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) );
+ vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) );
+ vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) );
//=== VK_EXT_pageable_device_local_memory ===
- vkSetDeviceMemoryPriorityEXT =
- PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) );
+ vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) );
+
+ //=== VK_EXT_pipeline_properties ===
+ vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) );
//=== VK_EXT_private_data ===
- vkCreatePrivateDataSlotEXT =
- PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) );
+ vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) );
if ( !vkCreatePrivateDataSlot )
vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT;
- vkDestroyPrivateDataSlotEXT =
- PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) );
+ vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) );
if ( !vkDestroyPrivateDataSlot )
vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT;
vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) );
@@ -1147,135 +1083,111 @@ namespace VULKAN_HPP_NAMESPACE
vkGetPrivateData = vkGetPrivateDataEXT;
//=== VK_EXT_sample_locations ===
- vkCmdSetSampleLocationsEXT =
- PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) );
+ vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) );
+
+ //=== VK_EXT_shader_module_identifier ===
+ vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) );
+ vkGetShaderModuleCreateInfoIdentifierEXT =
+ PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) );
//=== VK_EXT_transform_feedback ===
- vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT(
- vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) );
- vkCmdBeginTransformFeedbackEXT =
- PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) );
- vkCmdEndTransformFeedbackEXT =
- PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) );
- vkCmdBeginQueryIndexedEXT =
- PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) );
- vkCmdEndQueryIndexedEXT =
- PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) );
- vkCmdDrawIndirectByteCountEXT =
- PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) );
+ vkCmdBindTransformFeedbackBuffersEXT =
+ PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) );
+ vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) );
+ vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) );
+ vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) );
+ vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) );
+ vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) );
//=== VK_EXT_validation_cache ===
- vkCreateValidationCacheEXT =
- PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) );
- vkDestroyValidationCacheEXT =
- PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) );
- vkMergeValidationCachesEXT =
- PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) );
- vkGetValidationCacheDataEXT =
- PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) );
+ vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) );
+ vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) );
+ vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) );
+ vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) );
//=== VK_EXT_vertex_input_dynamic_state ===
vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) );
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
- vkCreateBufferCollectionFUCHSIA =
- PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) );
- vkSetBufferCollectionImageConstraintsFUCHSIA = PFN_vkSetBufferCollectionImageConstraintsFUCHSIA(
- vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) );
- vkSetBufferCollectionBufferConstraintsFUCHSIA = PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA(
- vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) );
- vkDestroyBufferCollectionFUCHSIA =
- PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) );
- vkGetBufferCollectionPropertiesFUCHSIA = PFN_vkGetBufferCollectionPropertiesFUCHSIA(
- vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) );
+ vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) );
+ vkSetBufferCollectionImageConstraintsFUCHSIA =
+ PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) );
+ vkSetBufferCollectionBufferConstraintsFUCHSIA =
+ PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) );
+ vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) );
+ vkGetBufferCollectionPropertiesFUCHSIA =
+ PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) );
# endif /*VK_USE_PLATFORM_FUCHSIA*/
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_memory ===
- vkGetMemoryZirconHandleFUCHSIA =
- PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) );
- vkGetMemoryZirconHandlePropertiesFUCHSIA = PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA(
- vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) );
+ vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) );
+ vkGetMemoryZirconHandlePropertiesFUCHSIA =
+ PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) );
# endif /*VK_USE_PLATFORM_FUCHSIA*/
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_semaphore ===
- vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA(
- vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) );
- vkGetSemaphoreZirconHandleFUCHSIA =
- PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) );
+ vkImportSemaphoreZirconHandleFUCHSIA =
+ PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) );
+ vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) );
# endif /*VK_USE_PLATFORM_FUCHSIA*/
//=== VK_GOOGLE_display_timing ===
- vkGetRefreshCycleDurationGOOGLE =
- PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) );
- vkGetPastPresentationTimingGOOGLE =
- PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) );
+ vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) );
+ vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) );
//=== VK_HUAWEI_invocation_mask ===
- vkCmdBindInvocationMaskHUAWEI =
- PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) );
+ vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) );
//=== VK_HUAWEI_subpass_shading ===
- vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
- vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) );
- vkCmdSubpassShadingHUAWEI =
- PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) );
+ vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI =
+ PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) );
+ vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) );
//=== VK_INTEL_performance_query ===
- vkInitializePerformanceApiINTEL =
- PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) );
- vkUninitializePerformanceApiINTEL =
- PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) );
- vkCmdSetPerformanceMarkerINTEL =
- PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) );
- vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL(
- vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
- vkCmdSetPerformanceOverrideINTEL =
- PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) );
- vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL(
- vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) );
- vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL(
- vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) );
- vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL(
- vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) );
- vkGetPerformanceParameterINTEL =
- PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) );
+ vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) );
+ vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) );
+ vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) );
+ vkCmdSetPerformanceStreamMarkerINTEL =
+ PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
+ vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) );
+ vkAcquirePerformanceConfigurationINTEL =
+ PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) );
+ vkReleasePerformanceConfigurationINTEL =
+ PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) );
+ vkQueueSetPerformanceConfigurationINTEL =
+ PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) );
+ vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) );
//=== VK_KHR_acceleration_structure ===
- vkCreateAccelerationStructureKHR =
- PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) );
- vkDestroyAccelerationStructureKHR =
- PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) );
- vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR(
- vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) );
- vkCmdBuildAccelerationStructuresIndirectKHR = PFN_vkCmdBuildAccelerationStructuresIndirectKHR(
- vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) );
- vkBuildAccelerationStructuresKHR =
- PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) );
- vkCopyAccelerationStructureKHR =
- PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) );
- vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR(
- vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) );
- vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR(
- vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) );
- vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR(
- vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) );
- vkCmdCopyAccelerationStructureKHR =
- PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) );
- vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR(
- vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
- vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR(
- vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
- vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR(
- vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) );
- vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR(
- vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
- vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR(
- vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
- vkGetAccelerationStructureBuildSizesKHR = PFN_vkGetAccelerationStructureBuildSizesKHR(
- vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) );
+ vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) );
+ vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) );
+ vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) );
+ vkCmdBuildAccelerationStructuresIndirectKHR =
+ PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) );
+ vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) );
+ vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) );
+ vkCopyAccelerationStructureToMemoryKHR =
+ PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) );
+ vkCopyMemoryToAccelerationStructureKHR =
+ PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) );
+ vkWriteAccelerationStructuresPropertiesKHR =
+ PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) );
+ vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) );
+ vkCmdCopyAccelerationStructureToMemoryKHR =
+ PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
+ vkCmdCopyMemoryToAccelerationStructureKHR =
+ PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
+ vkGetAccelerationStructureDeviceAddressKHR =
+ PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) );
+ vkCmdWriteAccelerationStructuresPropertiesKHR =
+ PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
+ vkGetDeviceAccelerationStructureCompatibilityKHR =
+ PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
+ vkGetAccelerationStructureBuildSizesKHR =
+ PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) );
//=== VK_KHR_bind_memory2 ===
vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) );
@@ -1286,16 +1198,14 @@ namespace VULKAN_HPP_NAMESPACE
vkBindImageMemory2 = vkBindImageMemory2KHR;
//=== VK_KHR_buffer_device_address ===
- vkGetBufferDeviceAddressKHR =
- PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) );
+ vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) );
if ( !vkGetBufferDeviceAddress )
vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR;
- vkGetBufferOpaqueCaptureAddressKHR =
- PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) );
+ vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) );
if ( !vkGetBufferOpaqueCaptureAddress )
vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR;
- vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR(
- vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
+ vkGetDeviceMemoryOpaqueCaptureAddressKHR =
+ PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
if ( !vkGetDeviceMemoryOpaqueCaptureAddress )
vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR;
@@ -1306,12 +1216,10 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) );
if ( !vkCmdCopyImage2 )
vkCmdCopyImage2 = vkCmdCopyImage2KHR;
- vkCmdCopyBufferToImage2KHR =
- PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) );
+ vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) );
if ( !vkCmdCopyBufferToImage2 )
vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR;
- vkCmdCopyImageToBuffer2KHR =
- PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) );
+ vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) );
if ( !vkCmdCopyImageToBuffer2 )
vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR;
vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) );
@@ -1325,8 +1233,7 @@ namespace VULKAN_HPP_NAMESPACE
vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) );
if ( !vkCreateRenderPass2 )
vkCreateRenderPass2 = vkCreateRenderPass2KHR;
- vkCmdBeginRenderPass2KHR =
- PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) );
+ vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) );
if ( !vkCmdBeginRenderPass2 )
vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR;
vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) );
@@ -1337,36 +1244,31 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR;
//=== VK_KHR_deferred_host_operations ===
- vkCreateDeferredOperationKHR =
- PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) );
- vkDestroyDeferredOperationKHR =
- PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) );
- vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR(
- vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
- vkGetDeferredOperationResultKHR =
- PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) );
- vkDeferredOperationJoinKHR =
- PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) );
+ vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) );
+ vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) );
+ vkGetDeferredOperationMaxConcurrencyKHR =
+ PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
+ vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) );
+ vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) );
//=== VK_KHR_descriptor_update_template ===
- vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR(
- vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) );
+ vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) );
if ( !vkCreateDescriptorUpdateTemplate )
vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR;
- vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR(
- vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) );
+ vkDestroyDescriptorUpdateTemplateKHR =
+ PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) );
if ( !vkDestroyDescriptorUpdateTemplate )
vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR;
- vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR(
- vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) );
+ vkUpdateDescriptorSetWithTemplateKHR =
+ PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) );
if ( !vkUpdateDescriptorSetWithTemplate )
vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR;
- vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR(
- vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
+ vkCmdPushDescriptorSetWithTemplateKHR =
+ PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
//=== VK_KHR_device_group ===
- vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR(
- vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
+ vkGetDeviceGroupPeerMemoryFeaturesKHR =
+ PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
if ( !vkGetDeviceGroupPeerMemoryFeatures )
vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR;
vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) );
@@ -1375,23 +1277,20 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) );
if ( !vkCmdDispatchBase )
vkCmdDispatchBase = vkCmdDispatchBaseKHR;
- vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR(
- vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
- vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR(
- vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
+ vkGetDeviceGroupPresentCapabilitiesKHR =
+ PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
+ vkGetDeviceGroupSurfacePresentModesKHR =
+ PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) );
//=== VK_KHR_display_swapchain ===
- vkCreateSharedSwapchainsKHR =
- PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) );
+ vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) );
//=== VK_KHR_draw_indirect_count ===
- vkCmdDrawIndirectCountKHR =
- PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) );
+ vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) );
if ( !vkCmdDrawIndirectCount )
vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR;
- vkCmdDrawIndexedIndirectCountKHR =
- PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) );
+ vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) );
if ( !vkCmdDrawIndexedIndirectCount )
vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR;
@@ -1409,23 +1308,18 @@ namespace VULKAN_HPP_NAMESPACE
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_fence_win32 ===
- vkImportFenceWin32HandleKHR =
- PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) );
- vkGetFenceWin32HandleKHR =
- PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) );
+ vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) );
+ vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) );
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_memory_fd ===
- vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) );
- vkGetMemoryFdPropertiesKHR =
- PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) );
+ vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) );
+ vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) );
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_memory_win32 ===
- vkGetMemoryWin32HandleKHR =
- PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) );
- vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR(
- vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) );
+ vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) );
+ vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) );
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_semaphore_fd ===
@@ -1434,27 +1328,22 @@ namespace VULKAN_HPP_NAMESPACE
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_semaphore_win32 ===
- vkImportSemaphoreWin32HandleKHR =
- PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) );
- vkGetSemaphoreWin32HandleKHR =
- PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) );
+ vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) );
+ vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) );
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_fragment_shading_rate ===
- vkCmdSetFragmentShadingRateKHR =
- PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) );
+ vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) );
//=== VK_KHR_get_memory_requirements2 ===
- vkGetImageMemoryRequirements2KHR =
- PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) );
+ vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) );
if ( !vkGetImageMemoryRequirements2 )
vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR;
- vkGetBufferMemoryRequirements2KHR =
- PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) );
+ vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) );
if ( !vkGetBufferMemoryRequirements2 )
vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR;
- vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR(
- vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) );
+ vkGetImageSparseMemoryRequirements2KHR =
+ PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) );
if ( !vkGetImageSparseMemoryRequirements2 )
vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR;
@@ -1464,82 +1353,75 @@ namespace VULKAN_HPP_NAMESPACE
vkTrimCommandPool = vkTrimCommandPoolKHR;
//=== VK_KHR_maintenance3 ===
- vkGetDescriptorSetLayoutSupportKHR =
- PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) );
+ vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) );
if ( !vkGetDescriptorSetLayoutSupport )
vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR;
//=== VK_KHR_maintenance4 ===
- vkGetDeviceBufferMemoryRequirementsKHR = PFN_vkGetDeviceBufferMemoryRequirementsKHR(
- vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) );
+ vkGetDeviceBufferMemoryRequirementsKHR =
+ PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) );
if ( !vkGetDeviceBufferMemoryRequirements )
vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR;
- vkGetDeviceImageMemoryRequirementsKHR = PFN_vkGetDeviceImageMemoryRequirementsKHR(
- vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) );
+ vkGetDeviceImageMemoryRequirementsKHR =
+ PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) );
if ( !vkGetDeviceImageMemoryRequirements )
vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR;
- vkGetDeviceImageSparseMemoryRequirementsKHR = PFN_vkGetDeviceImageSparseMemoryRequirementsKHR(
- vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) );
+ vkGetDeviceImageSparseMemoryRequirementsKHR =
+ PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) );
if ( !vkGetDeviceImageSparseMemoryRequirements )
vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR;
//=== VK_KHR_performance_query ===
- vkAcquireProfilingLockKHR =
- PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) );
- vkReleaseProfilingLockKHR =
- PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) );
+ vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) );
+ vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) );
//=== VK_KHR_pipeline_executable_properties ===
- vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR(
- vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) );
- vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR(
- vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) );
- vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR(
- vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
+ vkGetPipelineExecutablePropertiesKHR =
+ PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) );
+ vkGetPipelineExecutableStatisticsKHR =
+ PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) );
+ vkGetPipelineExecutableInternalRepresentationsKHR =
+ PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
//=== VK_KHR_present_wait ===
vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) );
//=== VK_KHR_push_descriptor ===
- vkCmdPushDescriptorSetKHR =
- PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) );
+ vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) );
+
+ //=== VK_KHR_ray_tracing_maintenance1 ===
+ vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) );
//=== VK_KHR_ray_tracing_pipeline ===
- vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) );
- vkCreateRayTracingPipelinesKHR =
- PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) );
- vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR(
- vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) );
- vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
- vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
- vkCmdTraceRaysIndirectKHR =
- PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) );
- vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR(
- vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) );
- vkCmdSetRayTracingPipelineStackSizeKHR = PFN_vkCmdSetRayTracingPipelineStackSizeKHR(
- vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) );
+ vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) );
+ vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) );
+ vkGetRayTracingShaderGroupHandlesKHR =
+ PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) );
+ vkGetRayTracingCaptureReplayShaderGroupHandlesKHR =
+ PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
+ vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) );
+ vkGetRayTracingShaderGroupStackSizeKHR =
+ PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) );
+ vkCmdSetRayTracingPipelineStackSizeKHR =
+ PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) );
//=== VK_KHR_sampler_ycbcr_conversion ===
- vkCreateSamplerYcbcrConversionKHR =
- PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) );
+ vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) );
if ( !vkCreateSamplerYcbcrConversion )
vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR;
- vkDestroySamplerYcbcrConversionKHR =
- PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) );
+ vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) );
if ( !vkDestroySamplerYcbcrConversion )
vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR;
//=== VK_KHR_shared_presentable_image ===
- vkGetSwapchainStatusKHR =
- PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) );
+ vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) );
//=== VK_KHR_swapchain ===
- vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) );
- vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) );
- vkGetSwapchainImagesKHR =
- PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) );
- vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) );
- vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) );
+ vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) );
+ vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) );
+ vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) );
+ vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) );
+ vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) );
//=== VK_KHR_synchronization2 ===
vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) );
@@ -1551,25 +1433,20 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) );
if ( !vkCmdWaitEvents2 )
vkCmdWaitEvents2 = vkCmdWaitEvents2KHR;
- vkCmdPipelineBarrier2KHR =
- PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) );
+ vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) );
if ( !vkCmdPipelineBarrier2 )
vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR;
- vkCmdWriteTimestamp2KHR =
- PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) );
+ vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) );
if ( !vkCmdWriteTimestamp2 )
vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR;
vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) );
if ( !vkQueueSubmit2 )
vkQueueSubmit2 = vkQueueSubmit2KHR;
- vkCmdWriteBufferMarker2AMD =
- PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) );
- vkGetQueueCheckpointData2NV =
- PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) );
+ vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) );
+ vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) );
//=== VK_KHR_timeline_semaphore ===
- vkGetSemaphoreCounterValueKHR =
- PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) );
+ vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) );
if ( !vkGetSemaphoreCounterValue )
vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR;
vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) );
@@ -1591,25 +1468,17 @@ namespace VULKAN_HPP_NAMESPACE
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_queue ===
- vkCreateVideoSessionKHR =
- PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) );
- vkDestroyVideoSessionKHR =
- PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) );
- vkGetVideoSessionMemoryRequirementsKHR = PFN_vkGetVideoSessionMemoryRequirementsKHR(
- vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) );
- vkBindVideoSessionMemoryKHR =
- PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) );
- vkCreateVideoSessionParametersKHR =
- PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) );
- vkUpdateVideoSessionParametersKHR =
- PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) );
- vkDestroyVideoSessionParametersKHR =
- PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) );
- vkCmdBeginVideoCodingKHR =
- PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) );
- vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) );
- vkCmdControlVideoCodingKHR =
- PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) );
+ vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) );
+ vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) );
+ vkGetVideoSessionMemoryRequirementsKHR =
+ PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) );
+ vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) );
+ vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) );
+ vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) );
+ vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) );
+ vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) );
+ vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) );
+ vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) );
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_NVX_binary_import ===
@@ -1620,92 +1489,83 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) );
//=== VK_NVX_image_view_handle ===
- vkGetImageViewHandleNVX =
- PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) );
- vkGetImageViewAddressNVX =
- PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) );
+ vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) );
+ vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) );
//=== VK_NV_clip_space_w_scaling ===
- vkCmdSetViewportWScalingNV =
- PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) );
+ vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) );
//=== VK_NV_device_diagnostic_checkpoints ===
- vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) );
- vkGetQueueCheckpointDataNV =
- PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) );
+ vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) );
+ vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) );
//=== VK_NV_device_generated_commands ===
- vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV(
- vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
- vkCmdPreprocessGeneratedCommandsNV =
- PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) );
- vkCmdExecuteGeneratedCommandsNV =
- PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) );
- vkCmdBindPipelineShaderGroupNV =
- PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) );
- vkCreateIndirectCommandsLayoutNV =
- PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) );
- vkDestroyIndirectCommandsLayoutNV =
- PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) );
+ vkGetGeneratedCommandsMemoryRequirementsNV =
+ PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
+ vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) );
+ vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) );
+ vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) );
+ vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) );
+ vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) );
//=== VK_NV_external_memory_rdma ===
- vkGetMemoryRemoteAddressNV =
- PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) );
+ vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) );
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_external_memory_win32 ===
- vkGetMemoryWin32HandleNV =
- PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) );
+ vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) );
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_NV_fragment_shading_rate_enums ===
- vkCmdSetFragmentShadingRateEnumNV =
- PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) );
+ vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) );
//=== VK_NV_mesh_shader ===
- vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) );
- vkCmdDrawMeshTasksIndirectNV =
- PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) );
- vkCmdDrawMeshTasksIndirectCountNV =
- PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) );
+ vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) );
+ vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) );
+ vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) );
+
+ //=== VK_NV_optical_flow ===
+ vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) );
+ vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) );
+ vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) );
+ vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) );
//=== VK_NV_ray_tracing ===
- vkCreateAccelerationStructureNV =
- PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) );
- vkDestroyAccelerationStructureNV =
- PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) );
- vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV(
- vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
- vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV(
- vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) );
- vkCmdBuildAccelerationStructureNV =
- PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) );
- vkCmdCopyAccelerationStructureNV =
- PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) );
- vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) );
- vkCreateRayTracingPipelinesNV =
- PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) );
- vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV(
- vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) );
+ vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) );
+ vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) );
+ vkGetAccelerationStructureMemoryRequirementsNV =
+ PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
+ vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) );
+ vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) );
+ vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) );
+ vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) );
+ vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) );
+ vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) );
if ( !vkGetRayTracingShaderGroupHandlesKHR )
vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV;
- vkGetAccelerationStructureHandleNV =
- PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) );
- vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV(
- vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
+ vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) );
+ vkCmdWriteAccelerationStructuresPropertiesNV =
+ PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) );
//=== VK_NV_scissor_exclusive ===
- vkCmdSetExclusiveScissorNV =
- PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) );
+ vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) );
//=== VK_NV_shading_rate_image ===
- vkCmdBindShadingRateImageNV =
- PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) );
- vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV(
- vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) );
- vkCmdSetCoarseSampleOrderNV =
- PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) );
+ vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) );
+ vkCmdSetViewportShadingRatePaletteNV =
+ PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) );
+ vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) );
+
+ //=== VK_QCOM_tile_properties ===
+ vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) );
+ vkGetDynamicRenderingTilePropertiesQCOM =
+ PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) );
+
+ //=== VK_VALVE_descriptor_set_host_mapping ===
+ vkGetDescriptorSetLayoutHostMappingInfoVALVE =
+ PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) );
+ vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) );
}
public:
@@ -1955,6 +1815,9 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0;
PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0;
+ //=== VK_EXT_device_fault ===
+ PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0;
+
//=== VK_EXT_discard_rectangles ===
PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0;
@@ -1985,6 +1848,39 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0;
PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0;
+ //=== VK_EXT_extended_dynamic_state3 ===
+ PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0;
+ PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0;
+ PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0;
+ PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0;
+ PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0;
+ PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0;
+ PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0;
+ PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0;
+ PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0;
+ PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0;
+ PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0;
+ PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0;
+ PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0;
+ PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0;
+ PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0;
+ PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0;
+ PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0;
+ PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0;
+ PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0;
+ PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0;
+ PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0;
+ PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0;
+ PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0;
+ PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0;
+ PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0;
+ PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0;
+ PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0;
+ PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0;
+ PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0;
+ PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0;
+ PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0;
+
//=== VK_EXT_external_memory_host ===
PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0;
@@ -2005,19 +1901,53 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_host_query_reset ===
PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0;
+ //=== VK_EXT_image_compression_control ===
+ PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0;
+
//=== VK_EXT_image_drm_format_modifier ===
PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0;
//=== VK_EXT_line_rasterization ===
PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0;
+ //=== VK_EXT_mesh_shader ===
+ PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0;
+ PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0;
+ PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0;
+
+# if defined( VK_USE_PLATFORM_METAL_EXT )
+ //=== VK_EXT_metal_objects ===
+ PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0;
+# else
+ PFN_dummy vkExportMetalObjectsEXT_placeholder = 0;
+# endif /*VK_USE_PLATFORM_METAL_EXT*/
+
//=== VK_EXT_multi_draw ===
PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0;
PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0;
+ //=== VK_EXT_opacity_micromap ===
+ PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0;
+ PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0;
+ PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0;
+ PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0;
+ PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0;
+ PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0;
+ PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0;
+ PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0;
+ PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0;
+ PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0;
+ PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0;
+ PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0;
+ PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0;
+ PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0;
+
//=== VK_EXT_pageable_device_local_memory ===
PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0;
+ //=== VK_EXT_pipeline_properties ===
+ PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0;
+
//=== VK_EXT_private_data ===
PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0;
PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0;
@@ -2027,6 +1957,10 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_sample_locations ===
PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0;
+ //=== VK_EXT_shader_module_identifier ===
+ PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0;
+ PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0;
+
//=== VK_EXT_transform_feedback ===
PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0;
PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0;
@@ -2245,6 +2179,9 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_push_descriptor ===
PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0;
+ //=== VK_KHR_ray_tracing_maintenance1 ===
+ PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0;
+
//=== VK_KHR_ray_tracing_pipeline ===
PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0;
PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0;
@@ -2366,6 +2303,12 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0;
PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0;
+ //=== VK_NV_optical_flow ===
+ PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0;
+ PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0;
+ PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0;
+ PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0;
+
//=== VK_NV_ray_tracing ===
PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0;
PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0;
@@ -2387,6 +2330,14 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0;
PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0;
PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0;
+
+ //=== VK_QCOM_tile_properties ===
+ PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0;
+ PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0;
+
+ //=== VK_VALVE_descriptor_set_host_mapping ===
+ PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0;
+ PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0;
};
//========================================
@@ -2476,6 +2427,12 @@ namespace VULKAN_HPP_NAMESPACE
class BufferCollectionFUCHSIA;
# endif /*VK_USE_PLATFORM_FUCHSIA*/
+ //=== VK_EXT_opacity_micromap ===
+ class MicromapEXT;
+
+ //=== VK_NV_optical_flow ===
+ class OpticalFlowSessionNV;
+
//====================
//=== RAII HANDLES ===
//====================
@@ -2491,7 +2448,8 @@ namespace VULKAN_HPP_NAMESPACE
Context( PFN_vkGetInstanceProcAddr getInstanceProcAddr )
: m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher( getInstanceProcAddr ) )
# endif
- {}
+ {
+ }
~Context() = default;
@@ -2503,7 +2461,8 @@ namespace VULKAN_HPP_NAMESPACE
# else
: m_dispatcher( rhs.m_dispatcher.release() )
# endif
- {}
+ {
+ }
Context & operator=( Context const & ) = delete;
Context & operator =( Context && rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -2517,14 +2476,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher const * getDispatcher() const
+ {
+ VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+ return &*m_dispatcher;
+ }
+
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context & rhs )
+ {
+# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
+ std::swap( m_dynamicLoader, rhs.m_dynamicLoader );
+# endif
+ m_dispatcher.swap( rhs.m_dispatcher );
+ }
+
//=== VK_VERSION_1_0 ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Instance createInstance(
- VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Instance
+ createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties> enumerateInstanceExtensionProperties(
- Optional<const std::string> layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties>
+ enumerateInstanceExtensionProperties( Optional<const std::string> layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> enumerateInstanceLayerProperties() const;
@@ -2532,12 +2505,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD uint32_t enumerateInstanceVersion() const;
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher const * getDispatcher() const
- {
- VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
- return &*m_dispatcher;
- }
-
private:
# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
VULKAN_HPP_NAMESPACE::DynamicLoader m_dynamicLoader;
@@ -2550,8 +2517,7 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkInstance;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eInstance;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eInstance;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance;
@@ -2561,37 +2527,32 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- context.getDispatcher()->vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkInstance *>( &m_instance ) ) );
+ VULKAN_HPP_NAMESPACE::Result result =
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( context.getDispatcher()->vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkInstance *>( &m_instance ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateInstance" );
}
- m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher(
- context.getDispatcher()->vkGetInstanceProcAddr, static_cast<VkInstance>( m_instance ) ) );
+ m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher( context.getDispatcher()->vkGetInstanceProcAddr,
+ static_cast<VkInstance>( m_instance ) ) );
}
Instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context const & context,
VkInstance instance,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
- : m_instance( instance )
- , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
+ : m_instance( instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
{
- m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher(
- context.getDispatcher()->vkGetInstanceProcAddr, static_cast<VkInstance>( m_instance ) ) );
+ m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher( context.getDispatcher()->vkGetInstanceProcAddr,
+ static_cast<VkInstance>( m_instance ) ) );
}
Instance( std::nullptr_t ) {}
~Instance()
{
- if ( m_instance )
- {
- getDispatcher()->vkDestroyInstance( static_cast<VkInstance>( m_instance ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
Instance() = delete;
@@ -2600,17 +2561,14 @@ namespace VULKAN_HPP_NAMESPACE
: m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( rhs.m_dispatcher.release() )
- {}
+ {
+ }
Instance & operator=( Instance const & ) = delete;
Instance & operator =( Instance && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_instance )
- {
- getDispatcher()->vkDestroyInstance( static_cast<VkInstance>( m_instance ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_instance = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
m_dispatcher.reset( rhs.m_dispatcher.release() );
@@ -2623,12 +2581,30 @@ namespace VULKAN_HPP_NAMESPACE
return m_instance;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_instance )
+ {
+ getDispatcher()->vkDestroyInstance( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_instance = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return &*m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_instance, rhs.m_instance );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_VERSION_1_0 ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice> enumeratePhysicalDevices() const;
@@ -2637,60 +2613,59 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_VERSION_1_1 ===
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>
- enumeratePhysicalDeviceGroups() const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> enumeratePhysicalDeviceGroups() const;
//=== VK_KHR_display ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createDisplayPlaneSurfaceKHR(
- VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
# if defined( VK_USE_PLATFORM_XLIB_KHR )
//=== VK_KHR_xlib_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createXlibSurfaceKHR(
- VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
# endif /*VK_USE_PLATFORM_XLIB_KHR*/
# if defined( VK_USE_PLATFORM_XCB_KHR )
//=== VK_KHR_xcb_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createXcbSurfaceKHR(
- VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
# endif /*VK_USE_PLATFORM_XCB_KHR*/
# if defined( VK_USE_PLATFORM_WAYLAND_KHR )
//=== VK_KHR_wayland_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createWaylandSurfaceKHR(
- VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
# if defined( VK_USE_PLATFORM_ANDROID_KHR )
//=== VK_KHR_android_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createAndroidSurfaceKHR(
- VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_win32_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createWin32SurfaceKHR(
- VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_EXT_debug_report ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT createDebugReportCallbackEXT(
- VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT
+ createDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_,
@@ -2703,92 +2678,90 @@ namespace VULKAN_HPP_NAMESPACE
# if defined( VK_USE_PLATFORM_GGP )
//=== VK_GGP_stream_descriptor_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createStreamDescriptorSurfaceGGP(
- VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
# endif /*VK_USE_PLATFORM_GGP*/
# if defined( VK_USE_PLATFORM_VI_NN )
//=== VK_NN_vi_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createViSurfaceNN(
- VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
# endif /*VK_USE_PLATFORM_VI_NN*/
//=== VK_KHR_device_group_creation ===
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>
- enumeratePhysicalDeviceGroupsKHR() const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> enumeratePhysicalDeviceGroupsKHR() const;
# if defined( VK_USE_PLATFORM_IOS_MVK )
//=== VK_MVK_ios_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createIOSSurfaceMVK(
- VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
# endif /*VK_USE_PLATFORM_IOS_MVK*/
# if defined( VK_USE_PLATFORM_MACOS_MVK )
//=== VK_MVK_macos_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createMacOSSurfaceMVK(
- VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
# endif /*VK_USE_PLATFORM_MACOS_MVK*/
//=== VK_EXT_debug_utils ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT createDebugUtilsMessengerEXT(
- VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT
+ createDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
- void submitDebugUtilsMessageEXT(
- VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
- const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData ) const VULKAN_HPP_NOEXCEPT;
+ void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
+ const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData ) const VULKAN_HPP_NOEXCEPT;
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_imagepipe_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createImagePipeSurfaceFUCHSIA(
- VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
# endif /*VK_USE_PLATFORM_FUCHSIA*/
# if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createMetalSurfaceEXT(
- VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
# endif /*VK_USE_PLATFORM_METAL_EXT*/
//=== VK_EXT_headless_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createHeadlessSurfaceEXT(
- VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
# if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
//=== VK_EXT_directfb_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createDirectFBSurfaceEXT(
- VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
# if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_screen_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createScreenSurfaceQNX(
- VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
# endif /*VK_USE_PLATFORM_SCREEN_QNX*/
private:
VULKAN_HPP_NAMESPACE::Instance m_instance = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
std::unique_ptr<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher> m_dispatcher;
};
@@ -2797,27 +2770,37 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkPhysicalDevice;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice;
public:
- PhysicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
- VkPhysicalDevice physicalDevice )
+ PhysicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VkPhysicalDevice physicalDevice )
: m_physicalDevice( physicalDevice ), m_dispatcher( instance.getDispatcher() )
- {}
+ {
+ }
PhysicalDevice( std::nullptr_t ) {}
- PhysicalDevice() = delete;
- PhysicalDevice( PhysicalDevice const & ) = delete;
+ ~PhysicalDevice()
+ {
+ clear();
+ }
+
+ PhysicalDevice() = delete;
+ PhysicalDevice( PhysicalDevice const & rhs ) : m_physicalDevice( rhs.m_physicalDevice ), m_dispatcher( rhs.m_dispatcher ) {}
PhysicalDevice( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT
: m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
- PhysicalDevice & operator=( PhysicalDevice const & ) = delete;
- PhysicalDevice & operator =( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ }
+ PhysicalDevice & operator=( PhysicalDevice const & rhs )
+ {
+ m_physicalDevice = rhs.m_physicalDevice;
+ m_dispatcher = rhs.m_dispatcher;
+ return *this;
+ }
+ PhysicalDevice & operator=( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
@@ -2832,40 +2815,49 @@ namespace VULKAN_HPP_NAMESPACE
return m_physicalDevice;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ m_physicalDevice = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_physicalDevice, rhs.m_physicalDevice );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_VERSION_1_0 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures() const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties
- getFormatProperties( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties getImageFormatProperties(
- VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties
+ getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties() const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties>
- getQueueFamilyProperties() const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties> getQueueFamilyProperties() const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties
- getMemoryProperties() const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties() const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Device createDevice(
- VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Device
+ createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties> enumerateDeviceExtensionProperties(
- Optional<const std::string> layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties>
+ enumerateDeviceExtensionProperties( Optional<const std::string> layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> enumerateDeviceLayerProperties() const;
@@ -2874,7 +2866,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ImageType type,
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling ) const;
//=== VK_VERSION_1_1 ===
@@ -2888,45 +2880,39 @@ namespace VULKAN_HPP_NAMESPACE
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getProperties2() const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2
- getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties2
- getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const;
+ getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const;
+ getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const;
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>
- getQueueFamilyProperties2() const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> getQueueFamilyProperties2() const;
template <typename StructureChain>
VULKAN_HPP_NODISCARD std::vector<StructureChain> getQueueFamilyProperties2() const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
- getMemoryProperties2() const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2() const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getMemoryProperties2() const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>
- getSparseImageFormatProperties2(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const VULKAN_HPP_NOEXCEPT;
+ getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties
+ getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties
+ getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
+ getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_VERSION_1_3 ===
@@ -2934,86 +2920,77 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32
- getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR
- getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>
- getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
+ getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR> getSurfacePresentModesKHR(
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR>
+ getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
//=== VK_KHR_swapchain ===
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::Rect2D>
- getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::Rect2D> getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const;
//=== VK_KHR_display ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR> getDisplayPropertiesKHR() const;
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>
- getDisplayPlanePropertiesKHR() const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR> getDisplayPlanePropertiesKHR() const;
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::DisplayKHR>
- getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::DisplayKHR> getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const;
# if defined( VK_USE_PLATFORM_XLIB_KHR )
//=== VK_KHR_xlib_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getXlibPresentationSupportKHR(
- uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32
+ getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT;
# endif /*VK_USE_PLATFORM_XLIB_KHR*/
# if defined( VK_USE_PLATFORM_XCB_KHR )
//=== VK_KHR_xcb_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getXcbPresentationSupportKHR(
- uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32
+ getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT;
# endif /*VK_USE_PLATFORM_XCB_KHR*/
# if defined( VK_USE_PLATFORM_WAYLAND_KHR )
//=== VK_KHR_wayland_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32
- getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex,
- struct wl_display & display ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex,
+ struct wl_display & display ) const VULKAN_HPP_NOEXCEPT;
# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_win32_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32
- getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT;
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_queue ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR
- getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR & videoProfile ) const;
+ getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const;
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR & videoProfile ) const;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const;
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR> getVideoFormatPropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>
+ getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_NV_external_memory_capabilities ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV getExternalImageFormatPropertiesNV(
- VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType
- VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
+ VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
//=== VK_KHR_get_physical_device_properties2 ===
@@ -3022,107 +2999,100 @@ namespace VULKAN_HPP_NAMESPACE
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getFeatures2KHR() const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
- getProperties2KHR() const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR() const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getProperties2KHR() const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2
- getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties2 getImageFormatProperties2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties2
+ getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const;
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageFormatProperties2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const;
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>
- getQueueFamilyProperties2KHR() const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> getQueueFamilyProperties2KHR() const;
template <typename StructureChain>
VULKAN_HPP_NODISCARD std::vector<StructureChain> getQueueFamilyProperties2KHR() const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
- getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>
- getSparseImageFormatProperties2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const VULKAN_HPP_NOEXCEPT;
+ getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const;
//=== VK_KHR_external_memory_capabilities ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties
+ getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_external_semaphore_capabilities ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
+ getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT;
# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
//=== VK_EXT_acquire_xlib_display ===
void acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayKHR getRandROutputDisplayEXT( Display & dpy,
- RROutput rrOutput ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayKHR getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const;
# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
//=== VK_EXT_display_surface_counter ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT
- getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const;
//=== VK_KHR_external_fence_capabilities ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties
+ getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_performance_query ===
- VULKAN_HPP_NODISCARD std::pair<std::vector<PerformanceCounterKHR>, std::vector<PerformanceCounterDescriptionKHR>>
- enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex ) const;
+ VULKAN_HPP_NODISCARD
+ std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>>
+ enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex ) const;
VULKAN_HPP_NODISCARD uint32_t getQueueFamilyPerformanceQueryPassesKHR(
- const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo ) const
- VULKAN_HPP_NOEXCEPT;
+ const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_get_surface_capabilities2 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR
- getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
+ getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
+ getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>
- getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
+ getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
+
+ template <typename StructureChain>
+ VULKAN_HPP_NODISCARD std::vector<StructureChain> getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
//=== VK_KHR_get_display_properties2 ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR> getDisplayProperties2KHR() const;
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>
- getDisplayPlaneProperties2KHR() const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR> getDisplayPlaneProperties2KHR() const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR
- getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo ) const;
+ getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo ) const;
//=== VK_EXT_sample_locations ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT
- getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples ) const VULKAN_HPP_NOEXCEPT;
+ getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_calibrated_timestamps ===
@@ -3130,8 +3100,7 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_fragment_shading_rate ===
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>
- getFragmentShadingRatesKHR() const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR> getFragmentShadingRatesKHR() const;
//=== VK_EXT_tooling_info ===
@@ -3139,27 +3108,24 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_cooperative_matrix ===
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>
- getCooperativeMatrixPropertiesNV() const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV> getCooperativeMatrixPropertiesNV() const;
//=== VK_NV_coverage_reduction_mode ===
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>
- getSupportedFramebufferMixedSamplesCombinationsNV() const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV> getSupportedFramebufferMixedSamplesCombinationsNV() const;
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_EXT_full_screen_exclusive ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR>
- getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
+ getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_EXT_acquire_drm_display ===
void acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayKHR getDrmDisplayEXT( int32_t drmFd,
- uint32_t connectorId ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayKHR getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const;
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_acquire_winrt_display ===
@@ -3170,18 +3136,22 @@ namespace VULKAN_HPP_NAMESPACE
# if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
//=== VK_EXT_directfb_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32
- getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex,
+ IDirectFB & dfb ) const VULKAN_HPP_NOEXCEPT;
# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
# if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_screen_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32
- getScreenPresentationSupportQNX( uint32_t queueFamilyIndex,
- struct _screen_window & window ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex,
+ struct _screen_window & window ) const VULKAN_HPP_NOEXCEPT;
# endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+ //=== VK_NV_optical_flow ===
+
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>
+ getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo ) const;
+
private:
VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr;
@@ -3192,20 +3162,19 @@ namespace VULKAN_HPP_NAMESPACE
public:
PhysicalDevices( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance )
{
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * dispatcher =
- instance.getDispatcher();
- std::vector<VkPhysicalDevice> physicalDevices;
- uint32_t physicalDeviceCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * dispatcher = instance.getDispatcher();
+ std::vector<VkPhysicalDevice> physicalDevices;
+ uint32_t physicalDeviceCount;
+ VULKAN_HPP_NAMESPACE::Result result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkEnumeratePhysicalDevices(
- static_cast<VkInstance>( *instance ), &physicalDeviceCount, nullptr ) );
+ result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ dispatcher->vkEnumeratePhysicalDevices( static_cast<VkInstance>( *instance ), &physicalDeviceCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount )
{
physicalDevices.resize( physicalDeviceCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkEnumeratePhysicalDevices(
- static_cast<VkInstance>( *instance ), &physicalDeviceCount, physicalDevices.data() ) );
+ result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ dispatcher->vkEnumeratePhysicalDevices( static_cast<VkInstance>( *instance ), &physicalDeviceCount, physicalDevices.data() ) );
}
} while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
@@ -3223,6 +3192,8 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ PhysicalDevices( std::nullptr_t ) {}
+
PhysicalDevices() = delete;
PhysicalDevices( PhysicalDevices const & ) = delete;
PhysicalDevices( PhysicalDevices && rhs ) = default;
@@ -3235,8 +3206,7 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkDevice;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDevice;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDevice;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice;
@@ -3246,18 +3216,17 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( physicalDevice.getDispatcher()->vkCreateDevice(
- static_cast<VkPhysicalDevice>( *physicalDevice ),
- reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkDevice *>( &m_device ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ physicalDevice.getDispatcher()->vkCreateDevice( static_cast<VkPhysicalDevice>( *physicalDevice ),
+ reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkDevice *>( &m_device ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateDevice" );
}
- m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher(
- physicalDevice.getDispatcher()->vkGetDeviceProcAddr, static_cast<VkDevice>( m_device ) ) );
+ m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher( physicalDevice.getDispatcher()->vkGetDeviceProcAddr,
+ static_cast<VkDevice>( m_device ) ) );
}
Device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice,
@@ -3265,19 +3234,15 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
{
- m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher(
- physicalDevice.getDispatcher()->vkGetDeviceProcAddr, static_cast<VkDevice>( m_device ) ) );
+ m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher( physicalDevice.getDispatcher()->vkGetDeviceProcAddr,
+ static_cast<VkDevice>( m_device ) ) );
}
Device( std::nullptr_t ) {}
~Device()
{
- if ( m_device )
- {
- getDispatcher()->vkDestroyDevice( static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
Device() = delete;
@@ -3286,17 +3251,14 @@ namespace VULKAN_HPP_NAMESPACE
: m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( rhs.m_dispatcher.release() )
- {}
+ {
+ }
Device & operator=( Device const & ) = delete;
Device & operator =( Device && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_device )
- {
- getDispatcher()->vkDestroyDevice( static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
m_dispatcher.reset( rhs.m_dispatcher.release() );
@@ -3309,263 +3271,266 @@ namespace VULKAN_HPP_NAMESPACE
return m_device;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_device )
+ {
+ getDispatcher()->vkDestroyDevice( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return &*m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_VERSION_1_0 ===
VULKAN_HPP_NODISCARD PFN_vkVoidFunction getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Queue getQueue( uint32_t queueFamilyIndex,
- uint32_t queueIndex ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const;
void waitIdle() const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DeviceMemory allocateMemory(
- VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DeviceMemory
+ allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
- void
- flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges ) const;
+ void flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges ) const;
- void invalidateMappedMemoryRanges(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges ) const;
+ void invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence createFence(
- VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence
+ createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
- void resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences ) const;
+ void resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
- waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
- VULKAN_HPP_NAMESPACE::Bool32 waitAll,
- uint64_t timeout ) const;
-
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Semaphore createSemaphore(
- VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
-
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Event createEvent(
- VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
-
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::QueryPool createQueryPool(
- VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
-
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Buffer createBuffer(
- VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
-
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::BufferView createBufferView(
- VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
-
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Image createImage(
- VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
-
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ImageView createImageView(
- VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
-
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ShaderModule createShaderModule(
- VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
-
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PipelineCache createPipelineCache(
- VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
-
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> createGraphicsPipelines(
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
-
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline createGraphicsPipeline(
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
-
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> createComputePipelines(
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
-
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline createComputePipeline(
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
-
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PipelineLayout createPipelineLayout(
- VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
-
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Sampler createSampler(
- VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
-
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout createDescriptorSetLayout(
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
-
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorPool createDescriptorPool(
- VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
+ VULKAN_HPP_NAMESPACE::Bool32 waitAll,
+ uint64_t timeout ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Semaphore
+ createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Event
+ createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::QueryPool
+ createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Buffer
+ createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::BufferView
+ createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Image
+ createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ImageView
+ createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ShaderModule
+ createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PipelineCache
+ createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline>
+ createGraphicsPipelines( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline
+ createGraphicsPipeline( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline>
+ createComputePipelines( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline
+ createComputePipeline( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PipelineLayout
+ createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Sampler
+ createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout
+ createDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorPool
+ createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::DescriptorSet>
- allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const;
+ allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const;
- void updateDescriptorSets(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies ) const VULKAN_HPP_NOEXCEPT;
+ void updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies ) const
+ VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Framebuffer createFramebuffer(
- VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Framebuffer
+ createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass createRenderPass(
- VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass
+ createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CommandPool createCommandPool(
- VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CommandPool
+ createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::CommandBuffer>
- allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const;
+ allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const;
//=== VK_VERSION_1_1 ===
- void bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos ) const;
+ void bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos ) const;
- void bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos ) const;
+ void bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures(
- uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
+ getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2(
- const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageMemoryRequirements2(
- const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2(
- const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirements2(
- const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
- getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const
- VULKAN_HPP_NOEXCEPT;
+ getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Queue
- getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Queue getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion createSamplerYcbcrConversion(
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion
+ createSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate createDescriptorUpdateTemplate(
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate
+ createDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport(
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
+ getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getDescriptorSetLayoutSupport(
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_VERSION_1_2 ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass createRenderPass2(
- VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass
+ createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
- waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const;
void signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress
- getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
+ getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD uint64_t getBufferOpaqueCaptureAddress(
- const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD uint64_t getMemoryOpaqueCaptureAddress(
- const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD uint64_t
+ getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
//=== VK_VERSION_1_3 ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot createPrivateDataSlot(
- VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot
+ createPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
void setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
uint64_t objectHandle,
VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
uint64_t data ) const;
- VULKAN_HPP_NODISCARD uint64_t
- getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD uint64_t getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements(
- const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirements(
- const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements(
- const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageMemoryRequirements(
- const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
- getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const
- VULKAN_HPP_NOEXCEPT;
+ getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const;
//=== VK_KHR_swapchain ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR createSwapchainKHR(
- VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR
+ createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR
- getGroupPresentCapabilitiesKHR() const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR getGroupPresentCapabilitiesKHR() const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR
getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const;
VULKAN_HPP_NODISCARD std::pair<VULKAN_HPP_NAMESPACE::Result, uint32_t>
- acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo ) const;
+ acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo ) const;
//=== VK_KHR_display_swapchain ===
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR> createSharedSwapchainsKHR(
- VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR>
+ createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR createSharedSwapchainKHR(
- VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR
+ createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
//=== VK_EXT_debug_marker ===
@@ -3576,44 +3541,41 @@ namespace VULKAN_HPP_NAMESPACE
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_queue ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR createVideoSessionKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR
+ createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR createVideoSessionParametersKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR
+ createVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_NVX_binary_import ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX createCuModuleNVX(
- VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX
+ createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX createCuFunctionNVX(
- VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX
+ createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
//=== VK_NVX_image_view_handle ===
- VULKAN_HPP_NODISCARD uint32_t
- getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_device_group ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR(
- uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
+ getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT;
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_memory_win32 ===
- VULKAN_HPP_NODISCARD HANDLE
- getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo ) const;
+ VULKAN_HPP_NODISCARD HANDLE getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR
- getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle ) const;
+ getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle ) const;
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_memory_fd ===
@@ -3621,16 +3583,14 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD int getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR
- getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd ) const;
+ getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd ) const;
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_semaphore_win32 ===
- void importSemaphoreWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo ) const;
+ void importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo ) const;
- VULKAN_HPP_NODISCARD HANDLE getSemaphoreWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo ) const;
+ VULKAN_HPP_NODISCARD HANDLE getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo ) const;
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_semaphore_fd ===
@@ -3641,49 +3601,44 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_descriptor_update_template ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate createDescriptorUpdateTemplateKHR(
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate
+ createDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
- void destroyDescriptorUpdateTemplateKHR(
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_display_control ===
- void displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const;
+ void displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence registerEventEXT(
- VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence
+ registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence registerDisplayEventEXT(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display,
- VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence
+ registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display,
+ VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
//=== VK_EXT_hdr_metadata ===
- void setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ void setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata ) const;
//=== VK_KHR_create_renderpass2 ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass createRenderPass2KHR(
- VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass
+ createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_fence_win32 ===
- void importFenceWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo ) const;
+ void importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo ) const;
- VULKAN_HPP_NODISCARD HANDLE
- getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo ) const;
+ VULKAN_HPP_NODISCARD HANDLE getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo ) const;
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_fence_fd ===
@@ -3711,112 +3666,105 @@ namespace VULKAN_HPP_NAMESPACE
getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const;
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const;
- VULKAN_HPP_NODISCARD struct AHardwareBuffer * getMemoryAndroidHardwareBufferANDROID(
- const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const;
+ VULKAN_HPP_NODISCARD struct AHardwareBuffer *
+ getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const;
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
//=== VK_KHR_get_memory_requirements2 ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR(
- const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageMemoryRequirements2KHR(
- const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR(
- const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirements2KHR(
- const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
- getImageSparseMemoryRequirements2KHR(
- const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
+ getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const;
//=== VK_KHR_acceleration_structure ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR createAccelerationStructureKHR(
- VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR
+ createAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result buildAccelerationStructuresKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const &
- pBuildRangeInfos ) const;
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyAccelerationStructureToMemoryKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
+ copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMemoryToAccelerationStructureKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
+ copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const;
template <typename DataType>
VULKAN_HPP_NODISCARD std::vector<DataType> writeAccelerationStructuresPropertiesKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- size_t dataSize,
- size_t stride ) const;
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ size_t stride ) const;
template <typename DataType>
VULKAN_HPP_NODISCARD DataType writeAccelerationStructuresPropertyKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- size_t stride ) const;
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t stride ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress getAccelerationStructureAddressKHR(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress
+ getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
- getAccelerationStructureCompatibilityKHR(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo ) const VULKAN_HPP_NOEXCEPT;
-
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR
- getAccelerationStructureBuildSizesKHR(
- VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo,
- ArrayProxy<const uint32_t> const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo ) const VULKAN_HPP_NOEXCEPT;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR(
+ VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
//=== VK_KHR_sampler_ycbcr_conversion ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion createSamplerYcbcrConversionKHR(
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion
+ createSamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
- void destroySamplerYcbcrConversionKHR(
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_bind_memory2 ===
- void bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos ) const;
+ void bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos ) const;
- void bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos ) const;
+ void bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos ) const;
//=== VK_EXT_validation_cache ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT createValidationCacheEXT(
- VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT
+ createValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
//=== VK_NV_ray_tracing ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV createAccelerationStructureNV(
- VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV
+ createAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV(
const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT;
@@ -3826,126 +3774,116 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT;
void bindAccelerationStructureMemoryNV(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos ) const;
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos ) const;
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> createRayTracingPipelinesNV(
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline>
+ createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline createRayTracingPipelineNV(
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline
+ createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
//=== VK_KHR_maintenance3 ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR(
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
+ getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getDescriptorSetLayoutSupportKHR(
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_external_memory_host ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT
- getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- const void * pHostPointer ) const;
+ getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer ) const;
//=== VK_EXT_calibrated_timestamps ===
- VULKAN_HPP_NODISCARD std::pair<std::vector<uint64_t>, uint64_t> getCalibratedTimestampsEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos ) const;
+ VULKAN_HPP_NODISCARD std::pair<std::vector<uint64_t>, uint64_t>
+ getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos ) const;
VULKAN_HPP_NODISCARD std::pair<uint64_t, uint64_t>
- getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo ) const;
+ getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo ) const;
//=== VK_KHR_timeline_semaphore ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
- waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const;
void signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const;
//=== VK_INTEL_performance_query ===
- void initializePerformanceApiINTEL(
- const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo ) const;
+ void initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo ) const;
void uninitializePerformanceApiINTEL() const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL
- acquirePerformanceConfigurationINTEL(
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const;
+ acquirePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PerformanceValueINTEL
- getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter ) const;
+ getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter ) const;
//=== VK_EXT_buffer_device_address ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress
- getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
+ getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_EXT_full_screen_exclusive ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR getGroupSurfacePresentModes2EXT(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR
+ getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_buffer_device_address ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress
- getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
+ getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD uint64_t getBufferOpaqueCaptureAddressKHR(
- const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD uint64_t getMemoryOpaqueCaptureAddressKHR(
- const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD uint64_t
+ getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_deferred_host_operations ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR createDeferredOperationKHR(
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR
+ createDeferredOperationKHR( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
//=== VK_KHR_pipeline_executable_properties ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>
- getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo ) const;
+ getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo ) const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>
- getPipelineExecutableStatisticsKHR(
- const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const;
+ getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>
- getPipelineExecutableInternalRepresentationsKHR(
- const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const;
+ getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const;
//=== VK_NV_device_generated_commands ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getGeneratedCommandsMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV createIndirectCommandsLayoutNV(
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV
+ createIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
//=== VK_EXT_private_data ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot createPrivateDataSlotEXT(
- VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot
+ createPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
- void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot
- VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
+ void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
@@ -3954,86 +3892,155 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
uint64_t data ) const;
- VULKAN_HPP_NODISCARD uint64_t
- getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD uint64_t getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT;
+
+# if defined( VK_USE_PLATFORM_METAL_EXT )
+ //=== VK_EXT_metal_objects ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT;
+
+ template <typename X, typename Y, typename... Z>
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT;
+# endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+ //=== VK_EXT_device_fault ===
+
+ VULKAN_HPP_NODISCARD
+ std::pair<VULKAN_HPP_NAMESPACE::Result, std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>>
+ getFaultInfoEXT() const;
//=== VK_KHR_ray_tracing_pipeline ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> createRayTracingPipelinesKHR(
- VULKAN_HPP_NAMESPACE::Optional<
- const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline createRayTracingPipelineKHR(
- VULKAN_HPP_NAMESPACE::Optional<
- const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_memory ===
- VULKAN_HPP_NODISCARD zx_handle_t getMemoryZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const;
+ VULKAN_HPP_NODISCARD zx_handle_t getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA
- getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- zx_handle_t zirconHandle ) const;
+ getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle ) const;
# endif /*VK_USE_PLATFORM_FUCHSIA*/
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_semaphore ===
- void importSemaphoreZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo ) const;
+ void importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo ) const;
- VULKAN_HPP_NODISCARD zx_handle_t getSemaphoreZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const;
+ VULKAN_HPP_NODISCARD zx_handle_t
+ getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const;
# endif /*VK_USE_PLATFORM_FUCHSIA*/
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_buffer_collection ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA createBufferCollectionFUCHSIA(
- VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA
+ createBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
# endif /*VK_USE_PLATFORM_FUCHSIA*/
//=== VK_NV_external_memory_rdma ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::RemoteAddressNV getMemoryRemoteAddressNV(
- const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::RemoteAddressNV
+ getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo ) const;
+
+ //=== VK_EXT_pipeline_properties ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::BaseOutStructure getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo ) const;
+
+ //=== VK_EXT_opacity_micromap ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::MicromapEXT
+ createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
+ buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const;
+
+ template <typename DataType>
+ VULKAN_HPP_NODISCARD std::vector<DataType>
+ writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ size_t stride ) const;
+
+ template <typename DataType>
+ VULKAN_HPP_NODISCARD DataType writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t stride ) const;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
+ getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo ) const VULKAN_HPP_NOEXCEPT;
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT
+ getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_maintenance4 ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirementsKHR(
- const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirementsKHR(
- const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirementsKHR(
- const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageMemoryRequirementsKHR(
- const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
- getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const
- VULKAN_HPP_NOEXCEPT;
+ getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const;
+
+ //=== VK_VALVE_descriptor_set_host_mapping ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE getDescriptorSetLayoutHostMappingInfoVALVE(
+ const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference ) const VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_EXT_shader_module_identifier ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT
+ getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_NV_optical_flow ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV
+ createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+
+ //=== VK_QCOM_tile_properties ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::TilePropertiesQCOM
+ getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
std::unique_ptr<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher> m_dispatcher;
};
@@ -4042,81 +4049,65 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkAccelerationStructureKHR;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR;
public:
- AccelerationStructureKHR(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ AccelerationStructureKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateAccelerationStructureKHR(
- static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkAccelerationStructureKHR *>( &m_accelerationStructure ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateAccelerationStructureKHR( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkAccelerationStructureKHR *>( &m_accelerationStructure ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateAccelerationStructureKHR" );
}
}
- AccelerationStructureKHR(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkAccelerationStructureKHR accelerationStructure,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ AccelerationStructureKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkAccelerationStructureKHR accelerationStructure,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_accelerationStructure( accelerationStructure )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
AccelerationStructureKHR( std::nullptr_t ) {}
~AccelerationStructureKHR()
{
- if ( m_accelerationStructure )
- {
- getDispatcher()->vkDestroyAccelerationStructureKHR(
- static_cast<VkDevice>( m_device ),
- static_cast<VkAccelerationStructureKHR>( m_accelerationStructure ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
AccelerationStructureKHR() = delete;
AccelerationStructureKHR( AccelerationStructureKHR const & ) = delete;
AccelerationStructureKHR( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
- , m_accelerationStructure(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) )
+ , m_accelerationStructure( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
AccelerationStructureKHR & operator=( AccelerationStructureKHR const & ) = delete;
- AccelerationStructureKHR & operator=( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureKHR & operator =( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_accelerationStructure )
- {
- getDispatcher()->vkDestroyAccelerationStructureKHR(
- static_cast<VkDevice>( m_device ),
- static_cast<VkAccelerationStructureKHR>( m_accelerationStructure ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
- m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
- m_accelerationStructure =
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} );
- m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
- m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+ clear();
+ m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+ m_accelerationStructure = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} );
+ m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+ m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
}
return *this;
}
@@ -4126,6 +4117,20 @@ namespace VULKAN_HPP_NAMESPACE
return m_accelerationStructure;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_accelerationStructure )
+ {
+ getDispatcher()->vkDestroyAccelerationStructureKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkAccelerationStructureKHR>( m_accelerationStructure ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_accelerationStructure = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -4137,10 +4142,18 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_accelerationStructure, rhs.m_accelerationStructure );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR m_accelerationStructure = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -4149,81 +4162,65 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkAccelerationStructureNV;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV;
public:
- AccelerationStructureNV(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ AccelerationStructureNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateAccelerationStructureNV(
- static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkAccelerationStructureNV *>( &m_accelerationStructure ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateAccelerationStructureNV( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkAccelerationStructureNV *>( &m_accelerationStructure ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateAccelerationStructureNV" );
}
}
- AccelerationStructureNV(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkAccelerationStructureNV accelerationStructure,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ AccelerationStructureNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkAccelerationStructureNV accelerationStructure,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_accelerationStructure( accelerationStructure )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
AccelerationStructureNV( std::nullptr_t ) {}
~AccelerationStructureNV()
{
- if ( m_accelerationStructure )
- {
- getDispatcher()->vkDestroyAccelerationStructureNV(
- static_cast<VkDevice>( m_device ),
- static_cast<VkAccelerationStructureNV>( m_accelerationStructure ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
AccelerationStructureNV() = delete;
AccelerationStructureNV( AccelerationStructureNV const & ) = delete;
AccelerationStructureNV( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
- , m_accelerationStructure(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) )
+ , m_accelerationStructure( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
AccelerationStructureNV & operator=( AccelerationStructureNV const & ) = delete;
- AccelerationStructureNV & operator=( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureNV & operator =( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_accelerationStructure )
- {
- getDispatcher()->vkDestroyAccelerationStructureNV(
- static_cast<VkDevice>( m_device ),
- static_cast<VkAccelerationStructureNV>( m_accelerationStructure ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
- m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
- m_accelerationStructure =
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} );
- m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
- m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+ clear();
+ m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+ m_accelerationStructure = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} );
+ m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+ m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
}
return *this;
}
@@ -4233,6 +4230,20 @@ namespace VULKAN_HPP_NAMESPACE
return m_accelerationStructure;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_accelerationStructure )
+ {
+ getDispatcher()->vkDestroyAccelerationStructureNV( static_cast<VkDevice>( m_device ),
+ static_cast<VkAccelerationStructureNV>( m_accelerationStructure ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_accelerationStructure = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -4244,6 +4255,14 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_accelerationStructure, rhs.m_accelerationStructure );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_NV_ray_tracing ===
template <typename DataType>
@@ -4255,7 +4274,7 @@ namespace VULKAN_HPP_NAMESPACE
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureNV m_accelerationStructure = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -4264,8 +4283,7 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkBuffer;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eBuffer;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBuffer;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer;
@@ -4277,11 +4295,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- device.getDispatcher()->vkCreateBuffer( static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkBuffer *>( &m_buffer ) ) );
+ VULKAN_HPP_NAMESPACE::Result result =
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateBuffer( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkBuffer *>( &m_buffer ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateBuffer" );
@@ -4295,18 +4313,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_buffer( buffer )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
Buffer( std::nullptr_t ) {}
~Buffer()
{
- if ( m_buffer )
- {
- getDispatcher()->vkDestroyBuffer( static_cast<VkDevice>( m_device ),
- static_cast<VkBuffer>( m_buffer ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
Buffer() = delete;
@@ -4316,18 +4330,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_buffer, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
Buffer & operator=( Buffer const & ) = delete;
Buffer & operator =( Buffer && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_buffer )
- {
- getDispatcher()->vkDestroyBuffer( static_cast<VkDevice>( m_device ),
- static_cast<VkBuffer>( m_buffer ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_buffer = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_buffer, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -4341,6 +4351,19 @@ namespace VULKAN_HPP_NAMESPACE
return m_buffer;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_buffer )
+ {
+ getDispatcher()->vkDestroyBuffer(
+ static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( m_buffer ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_buffer = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -4352,6 +4375,14 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Buffer & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_buffer, rhs.m_buffer );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_VERSION_1_0 ===
void bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const;
@@ -4361,7 +4392,7 @@ namespace VULKAN_HPP_NAMESPACE
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::Buffer m_buffer = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -4371,53 +4402,44 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkBufferCollectionFUCHSIA;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA;
public:
- BufferCollectionFUCHSIA(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ BufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateBufferCollectionFUCHSIA(
- static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkBufferCollectionFUCHSIA *>( &m_collection ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateBufferCollectionFUCHSIA( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkBufferCollectionFUCHSIA *>( &m_collection ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateBufferCollectionFUCHSIA" );
}
}
- BufferCollectionFUCHSIA(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkBufferCollectionFUCHSIA collection,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ BufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkBufferCollectionFUCHSIA collection,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_collection( collection )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
BufferCollectionFUCHSIA( std::nullptr_t ) {}
~BufferCollectionFUCHSIA()
{
- if ( m_collection )
- {
- getDispatcher()->vkDestroyBufferCollectionFUCHSIA(
- static_cast<VkDevice>( m_device ),
- static_cast<VkBufferCollectionFUCHSIA>( m_collection ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
BufferCollectionFUCHSIA() = delete;
@@ -4427,19 +4449,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_collection( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_collection, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA const & ) = delete;
- BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT
+ BufferCollectionFUCHSIA & operator =( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_collection )
- {
- getDispatcher()->vkDestroyBufferCollectionFUCHSIA(
- static_cast<VkDevice>( m_device ),
- static_cast<VkBufferCollectionFUCHSIA>( m_collection ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_collection = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_collection, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -4453,6 +4470,20 @@ namespace VULKAN_HPP_NAMESPACE
return m_collection;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_collection )
+ {
+ getDispatcher()->vkDestroyBufferCollectionFUCHSIA( static_cast<VkDevice>( m_device ),
+ static_cast<VkBufferCollectionFUCHSIA>( m_collection ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_collection = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -4464,19 +4495,26 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_collection, rhs.m_collection );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_FUCHSIA_buffer_collection ===
void setImageConstraints( const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo ) const;
- void
- setBufferConstraints( const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo ) const;
+ void setBufferConstraints( const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA getProperties() const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA m_collection = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
# endif /*VK_USE_PLATFORM_FUCHSIA*/
@@ -4486,8 +4524,7 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkBufferView;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eBufferView;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView;
@@ -4517,18 +4554,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_bufferView( bufferView )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
BufferView( std::nullptr_t ) {}
~BufferView()
{
- if ( m_bufferView )
- {
- getDispatcher()->vkDestroyBufferView( static_cast<VkDevice>( m_device ),
- static_cast<VkBufferView>( m_bufferView ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
BufferView() = delete;
@@ -4538,18 +4571,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_bufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_bufferView, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
BufferView & operator=( BufferView const & ) = delete;
BufferView & operator =( BufferView && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_bufferView )
- {
- getDispatcher()->vkDestroyBufferView( static_cast<VkDevice>( m_device ),
- static_cast<VkBufferView>( m_bufferView ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_bufferView = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_bufferView, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -4563,6 +4592,19 @@ namespace VULKAN_HPP_NAMESPACE
return m_bufferView;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_bufferView )
+ {
+ getDispatcher()->vkDestroyBufferView(
+ static_cast<VkDevice>( m_device ), static_cast<VkBufferView>( m_bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_bufferView = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -4574,10 +4616,18 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferView & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_bufferView, rhs.m_bufferView );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::BufferView m_bufferView = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -4586,8 +4636,7 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkCommandPool;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool;
@@ -4617,18 +4666,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_commandPool( commandPool )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
CommandPool( std::nullptr_t ) {}
~CommandPool()
{
- if ( m_commandPool )
- {
- getDispatcher()->vkDestroyCommandPool( static_cast<VkDevice>( m_device ),
- static_cast<VkCommandPool>( m_commandPool ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
CommandPool() = delete;
@@ -4638,18 +4683,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_commandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
CommandPool & operator=( CommandPool const & ) = delete;
CommandPool & operator =( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_commandPool )
- {
- getDispatcher()->vkDestroyCommandPool( static_cast<VkDevice>( m_device ),
- static_cast<VkCommandPool>( m_commandPool ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_commandPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -4663,6 +4704,19 @@ namespace VULKAN_HPP_NAMESPACE
return m_commandPool;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_commandPool )
+ {
+ getDispatcher()->vkDestroyCommandPool(
+ static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( m_commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_commandPool = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -4674,24 +4728,30 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandPool & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_commandPool, rhs.m_commandPool );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_VERSION_1_0 ===
void reset( VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
//=== VK_VERSION_1_1 ===
- void trim( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void trim( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_maintenance1 ===
- void trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -4700,32 +4760,21 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkCommandBuffer;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer;
public:
- CommandBuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkCommandBuffer commandBuffer,
- VkCommandPool commandPool )
- : m_device( *device )
- , m_commandPool( commandPool )
- , m_commandBuffer( commandBuffer )
- , m_dispatcher( device.getDispatcher() )
- {}
+ CommandBuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkCommandBuffer commandBuffer, VkCommandPool commandPool )
+ : m_device( *device ), m_commandPool( commandPool ), m_commandBuffer( commandBuffer ), m_dispatcher( device.getDispatcher() )
+ {
+ }
CommandBuffer( std::nullptr_t ) {}
~CommandBuffer()
{
- if ( m_commandBuffer )
- {
- getDispatcher()->vkFreeCommandBuffers( static_cast<VkDevice>( m_device ),
- static_cast<VkCommandPool>( m_commandPool ),
- 1,
- reinterpret_cast<VkCommandBuffer const *>( &m_commandBuffer ) );
- }
+ clear();
}
CommandBuffer() = delete;
@@ -4735,19 +4784,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_commandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} ) )
, m_commandBuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandBuffer, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
CommandBuffer & operator=( CommandBuffer const & ) = delete;
CommandBuffer & operator =( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_commandBuffer )
- {
- getDispatcher()->vkFreeCommandBuffers( static_cast<VkDevice>( m_device ),
- static_cast<VkCommandPool>( m_commandPool ),
- 1,
- reinterpret_cast<VkCommandBuffer const *>( &m_commandBuffer ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_commandPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} );
m_commandBuffer = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandBuffer, {} );
@@ -4761,6 +4805,19 @@ namespace VULKAN_HPP_NAMESPACE
return m_commandBuffer;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_commandBuffer )
+ {
+ getDispatcher()->vkFreeCommandBuffers(
+ static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( m_commandPool ), 1, reinterpret_cast<VkCommandBuffer const *>( &m_commandBuffer ) );
+ }
+ m_device = nullptr;
+ m_commandPool = nullptr;
+ m_commandBuffer = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -4772,6 +4829,14 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandBuffer & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_commandPool, rhs.m_commandPool );
+ std::swap( m_commandBuffer, rhs.m_commandBuffer );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_VERSION_1_0 ===
void begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo ) const;
@@ -4780,59 +4845,45 @@ namespace VULKAN_HPP_NAMESPACE
void reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
- void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT;
+ void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT;
- void setViewport( uint32_t firstViewport,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT;
+ void setViewport( uint32_t firstViewport,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT;
- void setScissor( uint32_t firstScissor,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT;
+ void setScissor( uint32_t firstScissor, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT;
void setLineWidth( float lineWidth ) const VULKAN_HPP_NOEXCEPT;
- void setDepthBias( float depthBiasConstantFactor,
- float depthBiasClamp,
- float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT;
+ void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT;
void setBlendConstants( const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT;
void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT;
- void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
- uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT;
+ void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT;
- void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
- uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT;
+ void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT;
- void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
- uint32_t reference ) const VULKAN_HPP_NOEXCEPT;
+ void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT;
- void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t firstSet,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
- ArrayProxy<const uint32_t> const & dynamicOffsets ) const VULKAN_HPP_NOEXCEPT;
+ void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t firstSet,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets ) const VULKAN_HPP_NOEXCEPT;
void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT;
- void bindVertexBuffers( uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ void bindVertexBuffers( uint32_t firstBinding,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets ) const;
- void draw( uint32_t vertexCount,
- uint32_t instanceCount,
- uint32_t firstVertex,
- uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT;
+ void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT;
- void drawIndexed( uint32_t indexCount,
- uint32_t instanceCount,
- uint32_t firstIndex,
- int32_t vertexOffset,
- uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT;
+ void
+ drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT;
void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
@@ -4846,124 +4897,111 @@ namespace VULKAN_HPP_NAMESPACE
void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT;
- void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT;
-
- void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions ) const VULKAN_HPP_NOEXCEPT;
-
- void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT;
-
- void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,
- VULKAN_HPP_NAMESPACE::Filter filter ) const VULKAN_HPP_NOEXCEPT;
-
- void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions ) const
- VULKAN_HPP_NOEXCEPT;
+ void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT;
- void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions ) const
- VULKAN_HPP_NOEXCEPT;
+ void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions ) const VULKAN_HPP_NOEXCEPT;
+
+ void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT;
+
+ void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,
+ VULKAN_HPP_NAMESPACE::Filter filter ) const VULKAN_HPP_NOEXCEPT;
+
+ void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT;
+
+ void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT;
template <typename DataType>
- void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
- ArrayProxy<const DataType> const & data ) const VULKAN_HPP_NOEXCEPT;
+ void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data ) const VULKAN_HPP_NOEXCEPT;
void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
VULKAN_HPP_NAMESPACE::DeviceSize size,
uint32_t data ) const VULKAN_HPP_NOEXCEPT;
- void clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- const VULKAN_HPP_NAMESPACE::ClearColorValue & color,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges ) const
- VULKAN_HPP_NOEXCEPT;
-
- void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges ) const
- VULKAN_HPP_NOEXCEPT;
-
- void
- clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects ) const VULKAN_HPP_NOEXCEPT;
+ void clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
+ const VULKAN_HPP_NAMESPACE::ClearColorValue & color,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges ) const VULKAN_HPP_NOEXCEPT;
void
- resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions ) const VULKAN_HPP_NOEXCEPT;
-
- void setEvent( VULKAN_HPP_NAMESPACE::Event event,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask
- VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
- void resetEvent( VULKAN_HPP_NAMESPACE::Event event,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask
- VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
-
- void waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers ) const
+ clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
+ const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges ) const VULKAN_HPP_NOEXCEPT;
+
+ void clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects ) const VULKAN_HPP_NOEXCEPT;
+
+ void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions ) const VULKAN_HPP_NOEXCEPT;
+
+ void setEvent( VULKAN_HPP_NAMESPACE::Event event,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ void resetEvent( VULKAN_HPP_NAMESPACE::Event event,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ void waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT;
+
+ void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
+ VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers ) const
VULKAN_HPP_NOEXCEPT;
- void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
- VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers )
- const VULKAN_HPP_NOEXCEPT;
-
- void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query,
- VULKAN_HPP_NAMESPACE::QueryControlFlags flags
- VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t query,
+ VULKAN_HPP_NAMESPACE::QueryControlFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT;
- void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT;
+ void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT;
void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query ) const VULKAN_HPP_NOEXCEPT;
- void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
- VULKAN_HPP_NAMESPACE::DeviceSize stride,
- VULKAN_HPP_NAMESPACE::QueryResultFlags flags
- VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
+ VULKAN_HPP_NAMESPACE::DeviceSize stride,
+ VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
template <typename ValuesType>
- void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
- uint32_t offset,
- ArrayProxy<const ValuesType> const & values ) const VULKAN_HPP_NOEXCEPT;
+ void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
+ uint32_t offset,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values ) const VULKAN_HPP_NOEXCEPT;
void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT;
@@ -4972,19 +5010,14 @@ namespace VULKAN_HPP_NAMESPACE
void endRenderPass() const VULKAN_HPP_NOEXCEPT;
- void executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers ) const
- VULKAN_HPP_NOEXCEPT;
+ void executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers ) const VULKAN_HPP_NOEXCEPT;
//=== VK_VERSION_1_1 ===
void setDeviceMask( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT;
- void dispatchBase( uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT;
+ void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const
+ VULKAN_HPP_NOEXCEPT;
//=== VK_VERSION_1_2 ===
@@ -5002,9 +5035,8 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t maxDrawCount,
uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
- void
- beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
- const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT;
+ void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
+ const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT;
void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT;
@@ -5013,32 +5045,26 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_VERSION_1_3 ===
- void setEvent2( VULKAN_HPP_NAMESPACE::Event event,
- const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT;
+ void setEvent2( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT;
- void resetEvent2( VULKAN_HPP_NAMESPACE::Event event,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask
- VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void resetEvent2( VULKAN_HPP_NAMESPACE::Event event,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
- void waitEvents2( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ void waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos ) const;
void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT;
- void writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query ) const VULKAN_HPP_NOEXCEPT;
+ void
+ writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT;
void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT;
void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT;
- void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const
- VULKAN_HPP_NOEXCEPT;
+ void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT;
- void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const
- VULKAN_HPP_NOEXCEPT;
+ void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT;
void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT;
@@ -5048,26 +5074,22 @@ namespace VULKAN_HPP_NAMESPACE
void endRendering() const VULKAN_HPP_NOEXCEPT;
- void setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
void setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT;
void setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT;
- void setViewportWithCount( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const
- VULKAN_HPP_NOEXCEPT;
+ void setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT;
- void setScissorWithCount( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const
- VULKAN_HPP_NOEXCEPT;
+ void setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT;
void bindVertexBuffers2(
- uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ uint32_t firstBinding,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
void setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT;
@@ -5093,62 +5115,52 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_debug_marker ===
- void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const
- VULKAN_HPP_NOEXCEPT;
+ void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT;
void debugMarkerEndEXT() const VULKAN_HPP_NOEXCEPT;
- void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const
- VULKAN_HPP_NOEXCEPT;
+ void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT;
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_queue ===
- void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo ) const
- VULKAN_HPP_NOEXCEPT;
+ void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo ) const VULKAN_HPP_NOEXCEPT;
- void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo ) const
- VULKAN_HPP_NOEXCEPT;
+ void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo ) const VULKAN_HPP_NOEXCEPT;
- void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo ) const
- VULKAN_HPP_NOEXCEPT;
+ void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo ) const VULKAN_HPP_NOEXCEPT;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_decode_queue ===
- void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & frameInfo ) const VULKAN_HPP_NOEXCEPT;
+ void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo ) const VULKAN_HPP_NOEXCEPT;
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_EXT_transform_feedback ===
- void bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
-
- void beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
-
- void endTransformFeedbackEXT( uint32_t firstCounterBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ void bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
+
+ void beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
+
+ void endTransformFeedbackEXT( uint32_t firstCounterBuffer,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets
+ VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query,
VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
uint32_t index ) const VULKAN_HPP_NOEXCEPT;
- void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query,
- uint32_t index ) const VULKAN_HPP_NOEXCEPT;
+ void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT;
void drawIndirectByteCountEXT( uint32_t instanceCount,
uint32_t firstInstance,
@@ -5187,51 +5199,44 @@ namespace VULKAN_HPP_NAMESPACE
void setDeviceMaskKHR( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT;
- void dispatchBaseKHR( uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT;
+ void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ )
+ const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_push_descriptor ===
- void pushDescriptorSetKHR(
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t set,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites ) const VULKAN_HPP_NOEXCEPT;
+ void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t set,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites ) const
+ VULKAN_HPP_NOEXCEPT;
template <typename DataType>
void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
VULKAN_HPP_NAMESPACE::PipelineLayout layout,
uint32_t set,
- DataType const & data ) const VULKAN_HPP_NOEXCEPT;
+ DataType const & data ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_conditional_rendering ===
- void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT &
- conditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT;
+ void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT;
void endConditionalRenderingEXT() const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_clip_space_w_scaling ===
- void setViewportWScalingNV( uint32_t firstViewport,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings )
- const VULKAN_HPP_NOEXCEPT;
+ void setViewportWScalingNV( uint32_t firstViewport,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings ) const
+ VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_discard_rectangles ===
- void setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles ) const
- VULKAN_HPP_NOEXCEPT;
+ void setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_create_renderpass2 ===
void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
- const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const
- VULKAN_HPP_NOEXCEPT;
+ const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT;
void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT;
@@ -5240,59 +5245,51 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_debug_utils ===
- void
- beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT;
+ void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT;
void endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT;
- void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const
- VULKAN_HPP_NOEXCEPT;
+ void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_sample_locations ===
- void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const
- VULKAN_HPP_NOEXCEPT;
+ void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_acceleration_structure ===
void buildAccelerationStructuresKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const &
- pBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos ) const;
void buildAccelerationStructuresIndirectKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,
- ArrayProxy<const uint32_t> const & indirectStrides,
- ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts ) const;
- void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const
- VULKAN_HPP_NOEXCEPT;
+ void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
- void copyAccelerationStructureToMemoryKHR(
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
+ void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
- void copyMemoryToAccelerationStructureKHR(
- const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
+ void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
void writeAccelerationStructuresPropertiesKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_shading_rate_image ===
- void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT;
+ void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT;
- void setViewportShadingRatePaletteNV( uint32_t firstViewport,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const &
- shadingRatePalettes ) const VULKAN_HPP_NOEXCEPT;
+ void setViewportShadingRatePaletteNV(
+ uint32_t firstViewport,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes ) const VULKAN_HPP_NOEXCEPT;
- void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const &
- customSampleOrders ) const VULKAN_HPP_NOEXCEPT;
+ void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders ) const
+ VULKAN_HPP_NOEXCEPT;
//=== VK_NV_ray_tracing ===
@@ -5303,12 +5300,11 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
VULKAN_HPP_NAMESPACE::Buffer scratch,
- VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT;
void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
- VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode ) const
- VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT;
void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset,
@@ -5326,10 +5322,10 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t depth ) const VULKAN_HPP_NOEXCEPT;
void writeAccelerationStructuresPropertiesNV(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_draw_indirect_count ===
@@ -5372,9 +5368,8 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_scissor_exclusive ===
- void setExclusiveScissorNV( uint32_t firstExclusiveScissor,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors ) const
- VULKAN_HPP_NOEXCEPT;
+ void setExclusiveScissorNV( uint32_t firstExclusiveScissor,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_device_diagnostic_checkpoints ===
@@ -5385,16 +5380,14 @@ namespace VULKAN_HPP_NAMESPACE
void setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo ) const;
- void setPerformanceStreamMarkerINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo ) const;
+ void setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo ) const;
void setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo ) const;
//=== VK_KHR_fragment_shading_rate ===
- void setFragmentShadingRateKHR(
- const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize,
- const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT;
+ void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize,
+ const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_line_rasterization ===
@@ -5402,27 +5395,22 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_extended_dynamic_state ===
- void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT;
- void
- setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT;
+ void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT;
- void setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const
- VULKAN_HPP_NOEXCEPT;
+ void setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT;
- void setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const
- VULKAN_HPP_NOEXCEPT;
+ void setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT;
void bindVertexBuffers2EXT(
- uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ uint32_t firstBinding,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT;
@@ -5442,12 +5430,10 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_device_generated_commands ===
- void preprocessGeneratedCommandsNV(
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT;
+ void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT;
- void executeGeneratedCommandsNV(
- VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT;
+ void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
+ const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT;
void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
VULKAN_HPP_NAMESPACE::Pipeline pipeline,
@@ -5461,16 +5447,13 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_synchronization2 ===
- void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
- const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT;
+ void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT;
- void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask
- VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
- void waitEvents2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
+ void waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos ) const;
void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT;
@@ -5485,9 +5468,24 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_fragment_shading_rate_enums ===
- void setFragmentShadingRateEnumNV(
- VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,
- const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT;
+ void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,
+ const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_EXT_mesh_shader ===
+
+ void drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT;
+
+ void drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ uint32_t drawCount,
+ uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
+
+ void drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_copy_commands2 ===
@@ -5495,16 +5493,13 @@ namespace VULKAN_HPP_NAMESPACE
void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT;
- void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const
- VULKAN_HPP_NOEXCEPT;
+ void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT;
- void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const
- VULKAN_HPP_NOEXCEPT;
+ void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT;
void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT;
- void
- resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT;
+ void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_ray_tracing_pipeline ===
@@ -5520,16 +5515,15 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
- VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT;
void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_vertex_input_dynamic_state ===
- void setVertexInputEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const &
- vertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT;
+ void setVertexInputEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const &
+ vertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT;
//=== VK_HUAWEI_subpass_shading ===
@@ -5537,41 +5531,133 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_HUAWEI_invocation_mask ===
- void bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT;
+ void bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_extended_dynamic_state2 ===
void setPatchControlPointsEXT( uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT;
- void
- setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT;
+ void setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT;
void setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT;
void setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const VULKAN_HPP_NOEXCEPT;
- void
- setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT;
+ void setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_color_write_enable ===
- void setColorWriteEnableEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables ) const
- VULKAN_HPP_NOEXCEPT;
+ void setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables ) const VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_KHR_ray_tracing_maintenance1 ===
+
+ void traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_multi_draw ===
- void drawMultiEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo,
- uint32_t instanceCount,
- uint32_t firstInstance,
- uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
+ void drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo,
+ uint32_t instanceCount,
+ uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT;
+
+ void drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo,
+ uint32_t instanceCount,
+ uint32_t firstInstance,
+ Optional<const int32_t> vertexOffset VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_EXT_opacity_micromap ===
+
+ void buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos ) const VULKAN_HPP_NOEXCEPT;
+
+ void copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT;
+
+ void copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const VULKAN_HPP_NOEXCEPT;
+
+ void copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT;
+
+ void writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_EXT_extended_dynamic_state3 ===
+
+ void setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT;
- void drawMultiIndexedEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo,
- uint32_t instanceCount,
- uint32_t firstInstance,
- uint32_t stride,
- Optional<const int32_t> vertexOffset
- VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
+ void setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT;
+
+ void setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT;
+
+ void setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask ) const VULKAN_HPP_NOEXCEPT;
+
+ void setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setColorBlendEnableEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables ) const VULKAN_HPP_NOEXCEPT;
+
+ void setColorBlendEquationEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations ) const
+ VULKAN_HPP_NOEXCEPT;
+
+ void setColorWriteMaskEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks ) const
+ VULKAN_HPP_NOEXCEPT;
+
+ void setRasterizationStreamEXT( uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT;
+
+ void
+ setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT;
+
+ void setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT;
+
+ void setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setColorBlendAdvancedEXT( uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced ) const
+ VULKAN_HPP_NOEXCEPT;
+
+ void setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT;
+
+ void setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT;
+
+ void setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT;
+
+ void setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setViewportSwizzleNV( uint32_t firstViewport,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles ) const
+ VULKAN_HPP_NOEXCEPT;
+
+ void setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setCoverageToColorLocationNV( uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT;
+
+ void setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT;
+
+ void setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT;
+
+ void setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_NV_optical_flow ===
+
+ void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const VULKAN_HPP_NOEXCEPT;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
@@ -5587,11 +5673,9 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo )
{
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
- std::vector<VkCommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- dispatcher->vkAllocateCommandBuffers( static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ),
- commandBuffers.data() ) );
+ std::vector<VkCommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkAllocateCommandBuffers(
+ static_cast<VkDevice>( *device ), reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), commandBuffers.data() ) );
if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
this->reserve( allocateInfo.commandBufferCount );
@@ -5606,6 +5690,8 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ CommandBuffers( std::nullptr_t ) {}
+
CommandBuffers() = delete;
CommandBuffers( CommandBuffers const & ) = delete;
CommandBuffers( CommandBuffers && rhs ) = default;
@@ -5618,52 +5704,44 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkCuFunctionNVX;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX;
public:
- CuFunctionNVX(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ CuFunctionNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateCuFunctionNVX(
- static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkCuFunctionNVX *>( &m_function ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateCuFunctionNVX( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkCuFunctionNVX *>( &m_function ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateCuFunctionNVX" );
}
}
- CuFunctionNVX(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkCuFunctionNVX function,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ CuFunctionNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkCuFunctionNVX function,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_function( function )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
CuFunctionNVX( std::nullptr_t ) {}
~CuFunctionNVX()
{
- if ( m_function )
- {
- getDispatcher()->vkDestroyCuFunctionNVX( static_cast<VkDevice>( m_device ),
- static_cast<VkCuFunctionNVX>( m_function ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
CuFunctionNVX() = delete;
@@ -5673,18 +5751,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_function( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_function, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
CuFunctionNVX & operator=( CuFunctionNVX const & ) = delete;
CuFunctionNVX & operator =( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_function )
- {
- getDispatcher()->vkDestroyCuFunctionNVX( static_cast<VkDevice>( m_device ),
- static_cast<VkCuFunctionNVX>( m_function ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_function = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_function, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -5698,6 +5772,19 @@ namespace VULKAN_HPP_NAMESPACE
return m_function;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_function )
+ {
+ getDispatcher()->vkDestroyCuFunctionNVX(
+ static_cast<VkDevice>( m_device ), static_cast<VkCuFunctionNVX>( m_function ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_function = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -5709,10 +5796,18 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_function, rhs.m_function );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::CuFunctionNVX m_function = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -5721,8 +5816,7 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkCuModuleNVX;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX;
@@ -5752,18 +5846,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_module( module )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
CuModuleNVX( std::nullptr_t ) {}
~CuModuleNVX()
{
- if ( m_module )
- {
- getDispatcher()->vkDestroyCuModuleNVX( static_cast<VkDevice>( m_device ),
- static_cast<VkCuModuleNVX>( m_module ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
CuModuleNVX() = delete;
@@ -5773,18 +5863,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_module( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_module, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
CuModuleNVX & operator=( CuModuleNVX const & ) = delete;
CuModuleNVX & operator =( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_module )
- {
- getDispatcher()->vkDestroyCuModuleNVX( static_cast<VkDevice>( m_device ),
- static_cast<VkCuModuleNVX>( m_module ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_module = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_module, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -5798,6 +5884,19 @@ namespace VULKAN_HPP_NAMESPACE
return m_module;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_module )
+ {
+ getDispatcher()->vkDestroyCuModuleNVX(
+ static_cast<VkDevice>( m_device ), static_cast<VkCuModuleNVX>( m_module ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_module = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -5809,10 +5908,18 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_module, rhs.m_module );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::CuModuleNVX m_module = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -5821,53 +5928,44 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkDebugReportCallbackEXT;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT;
public:
- DebugReportCallbackEXT(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
- VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ DebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
+ VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_instance( *instance )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateDebugReportCallbackEXT(
- static_cast<VkInstance>( *instance ),
- reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkDebugReportCallbackEXT *>( &m_callback ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ instance.getDispatcher()->vkCreateDebugReportCallbackEXT( static_cast<VkInstance>( *instance ),
+ reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkDebugReportCallbackEXT *>( &m_callback ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateDebugReportCallbackEXT" );
}
}
- DebugReportCallbackEXT(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
- VkDebugReportCallbackEXT callback,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ DebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
+ VkDebugReportCallbackEXT callback,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_instance( *instance )
, m_callback( callback )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
- {}
+ {
+ }
DebugReportCallbackEXT( std::nullptr_t ) {}
~DebugReportCallbackEXT()
{
- if ( m_callback )
- {
- getDispatcher()->vkDestroyDebugReportCallbackEXT(
- static_cast<VkInstance>( m_instance ),
- static_cast<VkDebugReportCallbackEXT>( m_callback ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
DebugReportCallbackEXT() = delete;
@@ -5877,19 +5975,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_callback( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_callback, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
DebugReportCallbackEXT & operator=( DebugReportCallbackEXT const & ) = delete;
- DebugReportCallbackEXT & operator=( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT
+ DebugReportCallbackEXT & operator =( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_callback )
- {
- getDispatcher()->vkDestroyDebugReportCallbackEXT(
- static_cast<VkInstance>( m_instance ),
- static_cast<VkDebugReportCallbackEXT>( m_callback ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_instance = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} );
m_callback = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_callback, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -5903,6 +5996,20 @@ namespace VULKAN_HPP_NAMESPACE
return m_callback;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_callback )
+ {
+ getDispatcher()->vkDestroyDebugReportCallbackEXT( static_cast<VkInstance>( m_instance ),
+ static_cast<VkDebugReportCallbackEXT>( m_callback ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_instance = nullptr;
+ m_callback = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Instance getInstance() const
{
return m_instance;
@@ -5914,10 +6021,18 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_instance, rhs.m_instance );
+ std::swap( m_callback, rhs.m_callback );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
private:
VULKAN_HPP_NAMESPACE::Instance m_instance = {};
VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT m_callback = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr;
};
@@ -5926,53 +6041,44 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkDebugUtilsMessengerEXT;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
- DebugUtilsMessengerEXT(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
- VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ DebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_instance( *instance )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateDebugUtilsMessengerEXT(
- static_cast<VkInstance>( *instance ),
- reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkDebugUtilsMessengerEXT *>( &m_messenger ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ instance.getDispatcher()->vkCreateDebugUtilsMessengerEXT( static_cast<VkInstance>( *instance ),
+ reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkDebugUtilsMessengerEXT *>( &m_messenger ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateDebugUtilsMessengerEXT" );
}
}
- DebugUtilsMessengerEXT(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
- VkDebugUtilsMessengerEXT messenger,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ DebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
+ VkDebugUtilsMessengerEXT messenger,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_instance( *instance )
, m_messenger( messenger )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
- {}
+ {
+ }
DebugUtilsMessengerEXT( std::nullptr_t ) {}
~DebugUtilsMessengerEXT()
{
- if ( m_messenger )
- {
- getDispatcher()->vkDestroyDebugUtilsMessengerEXT(
- static_cast<VkInstance>( m_instance ),
- static_cast<VkDebugUtilsMessengerEXT>( m_messenger ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
DebugUtilsMessengerEXT() = delete;
@@ -5982,19 +6088,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_messenger( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_messenger, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT const & ) = delete;
- DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT
+ DebugUtilsMessengerEXT & operator =( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_messenger )
- {
- getDispatcher()->vkDestroyDebugUtilsMessengerEXT(
- static_cast<VkInstance>( m_instance ),
- static_cast<VkDebugUtilsMessengerEXT>( m_messenger ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_instance = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} );
m_messenger = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_messenger, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -6008,6 +6109,20 @@ namespace VULKAN_HPP_NAMESPACE
return m_messenger;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_messenger )
+ {
+ getDispatcher()->vkDestroyDebugUtilsMessengerEXT( static_cast<VkInstance>( m_instance ),
+ static_cast<VkDebugUtilsMessengerEXT>( m_messenger ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_instance = nullptr;
+ m_messenger = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Instance getInstance() const
{
return m_instance;
@@ -6019,10 +6134,18 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_instance, rhs.m_instance );
+ std::swap( m_messenger, rhs.m_messenger );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
private:
VULKAN_HPP_NAMESPACE::Instance m_instance = {};
VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT m_messenger = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr;
};
@@ -6031,51 +6154,42 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkDeferredOperationKHR;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
- DeferredOperationKHR(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ DeferredOperationKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateDeferredOperationKHR(
- static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkDeferredOperationKHR *>( &m_operation ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateDeferredOperationKHR( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkDeferredOperationKHR *>( &m_operation ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateDeferredOperationKHR" );
}
}
- DeferredOperationKHR(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkDeferredOperationKHR operation,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ DeferredOperationKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkDeferredOperationKHR operation,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_operation( operation )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
DeferredOperationKHR( std::nullptr_t ) {}
~DeferredOperationKHR()
{
- if ( m_operation )
- {
- getDispatcher()->vkDestroyDeferredOperationKHR(
- static_cast<VkDevice>( m_device ),
- static_cast<VkDeferredOperationKHR>( m_operation ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
DeferredOperationKHR() = delete;
@@ -6085,19 +6199,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_operation( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_operation, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
DeferredOperationKHR & operator=( DeferredOperationKHR const & ) = delete;
- DeferredOperationKHR & operator=( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT
+ DeferredOperationKHR & operator =( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_operation )
- {
- getDispatcher()->vkDestroyDeferredOperationKHR(
- static_cast<VkDevice>( m_device ),
- static_cast<VkDeferredOperationKHR>( m_operation ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_operation = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_operation, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -6111,6 +6220,20 @@ namespace VULKAN_HPP_NAMESPACE
return m_operation;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_operation )
+ {
+ getDispatcher()->vkDestroyDeferredOperationKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkDeferredOperationKHR>( m_operation ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_operation = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -6122,6 +6245,14 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_operation, rhs.m_operation );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_KHR_deferred_host_operations ===
VULKAN_HPP_NODISCARD uint32_t getMaxConcurrency() const VULKAN_HPP_NOEXCEPT;
@@ -6133,7 +6264,7 @@ namespace VULKAN_HPP_NAMESPACE
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::DeferredOperationKHR m_operation = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -6142,52 +6273,44 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkDescriptorPool;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool;
public:
- DescriptorPool(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ DescriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateDescriptorPool(
- static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkDescriptorPool *>( &m_descriptorPool ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateDescriptorPool( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkDescriptorPool *>( &m_descriptorPool ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateDescriptorPool" );
}
}
- DescriptorPool(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkDescriptorPool descriptorPool,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ DescriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkDescriptorPool descriptorPool,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_descriptorPool( descriptorPool )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
DescriptorPool( std::nullptr_t ) {}
~DescriptorPool()
{
- if ( m_descriptorPool )
- {
- getDispatcher()->vkDestroyDescriptorPool( static_cast<VkDevice>( m_device ),
- static_cast<VkDescriptorPool>( m_descriptorPool ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
DescriptorPool() = delete;
@@ -6197,18 +6320,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_descriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
DescriptorPool & operator=( DescriptorPool const & ) = delete;
DescriptorPool & operator =( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_descriptorPool )
- {
- getDispatcher()->vkDestroyDescriptorPool( static_cast<VkDevice>( m_device ),
- static_cast<VkDescriptorPool>( m_descriptorPool ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_descriptorPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -6222,6 +6341,20 @@ namespace VULKAN_HPP_NAMESPACE
return m_descriptorPool;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_descriptorPool )
+ {
+ getDispatcher()->vkDestroyDescriptorPool( static_cast<VkDevice>( m_device ),
+ static_cast<VkDescriptorPool>( m_descriptorPool ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_descriptorPool = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -6233,15 +6366,22 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorPool & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_descriptorPool, rhs.m_descriptorPool );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_VERSION_1_0 ===
- void reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const
- VULKAN_HPP_NOEXCEPT;
+ void reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -6250,32 +6390,21 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkDescriptorSet;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet;
public:
- DescriptorSet( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkDescriptorSet descriptorSet,
- VkDescriptorPool descriptorPool )
- : m_device( *device )
- , m_descriptorPool( descriptorPool )
- , m_descriptorSet( descriptorSet )
- , m_dispatcher( device.getDispatcher() )
- {}
+ DescriptorSet( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDescriptorSet descriptorSet, VkDescriptorPool descriptorPool )
+ : m_device( *device ), m_descriptorPool( descriptorPool ), m_descriptorSet( descriptorSet ), m_dispatcher( device.getDispatcher() )
+ {
+ }
DescriptorSet( std::nullptr_t ) {}
~DescriptorSet()
{
- if ( m_descriptorSet )
- {
- getDispatcher()->vkFreeDescriptorSets( static_cast<VkDevice>( m_device ),
- static_cast<VkDescriptorPool>( m_descriptorPool ),
- 1,
- reinterpret_cast<VkDescriptorSet const *>( &m_descriptorSet ) );
- }
+ clear();
}
DescriptorSet() = delete;
@@ -6285,19 +6414,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_descriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) )
, m_descriptorSet( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSet, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
DescriptorSet & operator=( DescriptorSet const & ) = delete;
DescriptorSet & operator =( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_descriptorSet )
- {
- getDispatcher()->vkFreeDescriptorSets( static_cast<VkDevice>( m_device ),
- static_cast<VkDescriptorPool>( m_descriptorPool ),
- 1,
- reinterpret_cast<VkDescriptorSet const *>( &m_descriptorSet ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_descriptorPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} );
m_descriptorSet = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSet, {} );
@@ -6311,6 +6435,21 @@ namespace VULKAN_HPP_NAMESPACE
return m_descriptorSet;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_descriptorSet )
+ {
+ getDispatcher()->vkFreeDescriptorSets( static_cast<VkDevice>( m_device ),
+ static_cast<VkDescriptorPool>( m_descriptorPool ),
+ 1,
+ reinterpret_cast<VkDescriptorSet const *>( &m_descriptorSet ) );
+ }
+ m_device = nullptr;
+ m_descriptorPool = nullptr;
+ m_descriptorSet = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -6322,17 +6461,27 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSet & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_descriptorPool, rhs.m_descriptorPool );
+ std::swap( m_descriptorSet, rhs.m_descriptorSet );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_VERSION_1_1 ===
template <typename DataType>
- void updateWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- DataType const & data ) const VULKAN_HPP_NOEXCEPT;
+ void updateWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data ) const VULKAN_HPP_NOEXCEPT;
//=== VK_KHR_descriptor_update_template ===
template <typename DataType>
- void updateWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- DataType const & data ) const VULKAN_HPP_NOEXCEPT;
+ void updateWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data ) const VULKAN_HPP_NOEXCEPT;
+
+ //=== VK_VALVE_descriptor_set_host_mapping ===
+
+ VULKAN_HPP_NODISCARD void * getHostMappingVALVE() const VULKAN_HPP_NOEXCEPT;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
@@ -6348,11 +6497,9 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo )
{
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
- std::vector<VkDescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- dispatcher->vkAllocateDescriptorSets( static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ),
- descriptorSets.data() ) );
+ std::vector<VkDescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkAllocateDescriptorSets(
+ static_cast<VkDevice>( *device ), reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), descriptorSets.data() ) );
if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
this->reserve( allocateInfo.descriptorSetCount );
@@ -6367,6 +6514,8 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ DescriptorSets( std::nullptr_t ) {}
+
DescriptorSets() = delete;
DescriptorSets( DescriptorSets const & ) = delete;
DescriptorSets( DescriptorSets && rhs ) = default;
@@ -6379,81 +6528,65 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkDescriptorSetLayout;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout;
public:
- DescriptorSetLayout(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ DescriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateDescriptorSetLayout(
- static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkDescriptorSetLayout *>( &m_descriptorSetLayout ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateDescriptorSetLayout( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkDescriptorSetLayout *>( &m_descriptorSetLayout ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateDescriptorSetLayout" );
}
}
- DescriptorSetLayout(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkDescriptorSetLayout descriptorSetLayout,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ DescriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkDescriptorSetLayout descriptorSetLayout,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_descriptorSetLayout( descriptorSetLayout )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
DescriptorSetLayout( std::nullptr_t ) {}
~DescriptorSetLayout()
{
- if ( m_descriptorSetLayout )
- {
- getDispatcher()->vkDestroyDescriptorSetLayout(
- static_cast<VkDevice>( m_device ),
- static_cast<VkDescriptorSetLayout>( m_descriptorSetLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
DescriptorSetLayout() = delete;
DescriptorSetLayout( DescriptorSetLayout const & ) = delete;
DescriptorSetLayout( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
- , m_descriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSetLayout,
- {} ) )
+ , m_descriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
DescriptorSetLayout & operator=( DescriptorSetLayout const & ) = delete;
DescriptorSetLayout & operator =( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_descriptorSetLayout )
- {
- getDispatcher()->vkDestroyDescriptorSetLayout(
- static_cast<VkDevice>( m_device ),
- static_cast<VkDescriptorSetLayout>( m_descriptorSetLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
- m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
- m_descriptorSetLayout =
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} );
- m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
- m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+ clear();
+ m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+ m_descriptorSetLayout = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} );
+ m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+ m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
}
return *this;
}
@@ -6463,6 +6596,20 @@ namespace VULKAN_HPP_NAMESPACE
return m_descriptorSetLayout;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_descriptorSetLayout )
+ {
+ getDispatcher()->vkDestroyDescriptorSetLayout( static_cast<VkDevice>( m_device ),
+ static_cast<VkDescriptorSetLayout>( m_descriptorSetLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_descriptorSetLayout = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -6474,10 +6621,18 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_descriptorSetLayout, rhs.m_descriptorSetLayout );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::DescriptorSetLayout m_descriptorSetLayout = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -6486,81 +6641,65 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkDescriptorUpdateTemplate;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate;
public:
- DescriptorUpdateTemplate(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ DescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateDescriptorUpdateTemplate(
- static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkDescriptorUpdateTemplate *>( &m_descriptorUpdateTemplate ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateDescriptorUpdateTemplate( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkDescriptorUpdateTemplate *>( &m_descriptorUpdateTemplate ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateDescriptorUpdateTemplate" );
}
}
- DescriptorUpdateTemplate(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ DescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkDescriptorUpdateTemplate descriptorUpdateTemplate,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_descriptorUpdateTemplate( descriptorUpdateTemplate )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
DescriptorUpdateTemplate( std::nullptr_t ) {}
~DescriptorUpdateTemplate()
{
- if ( m_descriptorUpdateTemplate )
- {
- getDispatcher()->vkDestroyDescriptorUpdateTemplate(
- static_cast<VkDevice>( m_device ),
- static_cast<VkDescriptorUpdateTemplate>( m_descriptorUpdateTemplate ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
DescriptorUpdateTemplate() = delete;
DescriptorUpdateTemplate( DescriptorUpdateTemplate const & ) = delete;
DescriptorUpdateTemplate( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
- , m_descriptorUpdateTemplate(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ) )
+ , m_descriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate const & ) = delete;
- DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT
+ DescriptorUpdateTemplate & operator =( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_descriptorUpdateTemplate )
- {
- getDispatcher()->vkDestroyDescriptorUpdateTemplate(
- static_cast<VkDevice>( m_device ),
- static_cast<VkDescriptorUpdateTemplate>( m_descriptorUpdateTemplate ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
- m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
- m_descriptorUpdateTemplate =
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} );
- m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
- m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+ clear();
+ m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+ m_descriptorUpdateTemplate = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} );
+ m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+ m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
}
return *this;
}
@@ -6570,6 +6709,20 @@ namespace VULKAN_HPP_NAMESPACE
return m_descriptorUpdateTemplate;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_descriptorUpdateTemplate )
+ {
+ getDispatcher()->vkDestroyDescriptorUpdateTemplate( static_cast<VkDevice>( m_device ),
+ static_cast<VkDescriptorUpdateTemplate>( m_descriptorUpdateTemplate ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_descriptorUpdateTemplate = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -6581,10 +6734,18 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_descriptorUpdateTemplate, rhs.m_descriptorUpdateTemplate );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate m_descriptorUpdateTemplate = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -6593,51 +6754,44 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkDeviceMemory;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory;
public:
- DeviceMemory(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ DeviceMemory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- device.getDispatcher()->vkAllocateMemory( static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkDeviceMemory *>( &m_memory ) ) );
+ VULKAN_HPP_NAMESPACE::Result result =
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkAllocateMemory( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkDeviceMemory *>( &m_memory ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkAllocateMemory" );
}
}
- DeviceMemory(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkDeviceMemory memory,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ DeviceMemory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkDeviceMemory memory,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_memory( memory )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
DeviceMemory( std::nullptr_t ) {}
~DeviceMemory()
{
- if ( m_memory )
- {
- getDispatcher()->vkFreeMemory( static_cast<VkDevice>( m_device ),
- static_cast<VkDeviceMemory>( m_memory ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
DeviceMemory() = delete;
@@ -6647,18 +6801,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_memory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_memory, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
DeviceMemory & operator=( DeviceMemory const & ) = delete;
DeviceMemory & operator =( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_memory )
- {
- getDispatcher()->vkFreeMemory( static_cast<VkDevice>( m_device ),
- static_cast<VkDeviceMemory>( m_memory ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_memory = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_memory, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -6672,6 +6822,19 @@ namespace VULKAN_HPP_NAMESPACE
return m_memory;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_memory )
+ {
+ getDispatcher()->vkFreeMemory(
+ static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_memory = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -6683,12 +6846,19 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceMemory & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_memory, rhs.m_memory );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_VERSION_1_0 ===
- VULKAN_HPP_NODISCARD void *
- mapMemory( VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::DeviceSize size,
- VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
+ VULKAN_HPP_NODISCARD void * mapMemory( VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::DeviceSize size,
+ VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
void unmapMemory() const VULKAN_HPP_NOEXCEPT;
@@ -6697,8 +6867,7 @@ namespace VULKAN_HPP_NAMESPACE
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_external_memory_win32 ===
- VULKAN_HPP_NODISCARD HANDLE
- getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType ) const;
+ VULKAN_HPP_NODISCARD HANDLE getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType ) const;
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_EXT_pageable_device_local_memory ===
@@ -6708,7 +6877,7 @@ namespace VULKAN_HPP_NAMESPACE
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::DeviceMemory m_memory = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -6717,22 +6886,16 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkDisplayKHR;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR;
public:
- DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice,
- int32_t drmFd,
- uint32_t connectorId )
+ DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, int32_t drmFd, uint32_t connectorId )
: m_physicalDevice( *physicalDevice ), m_dispatcher( physicalDevice.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- physicalDevice.getDispatcher()->vkGetDrmDisplayEXT( static_cast<VkPhysicalDevice>( *physicalDevice ),
- drmFd,
- connectorId,
- reinterpret_cast<VkDisplayKHR *>( &m_display ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( physicalDevice.getDispatcher()->vkGetDrmDisplayEXT(
+ static_cast<VkPhysicalDevice>( *physicalDevice ), drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &m_display ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkGetDrmDisplayEXT" );
@@ -6740,17 +6903,11 @@ namespace VULKAN_HPP_NAMESPACE
}
# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
- DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice,
- Display & dpy,
- RROutput rrOutput )
+ DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, Display & dpy, RROutput rrOutput )
: m_physicalDevice( *physicalDevice ), m_dispatcher( physicalDevice.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( physicalDevice.getDispatcher()->vkGetRandROutputDisplayEXT(
- static_cast<VkPhysicalDevice>( *physicalDevice ),
- &dpy,
- rrOutput,
- reinterpret_cast<VkDisplayKHR *>( &m_display ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( physicalDevice.getDispatcher()->vkGetRandROutputDisplayEXT(
+ static_cast<VkPhysicalDevice>( *physicalDevice ), &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &m_display ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkGetRandROutputDisplayEXT" );
@@ -6759,14 +6916,11 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
# if defined( VK_USE_PLATFORM_WIN32_KHR )
- DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice,
- uint32_t deviceRelativeId )
+ DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, uint32_t deviceRelativeId )
: m_physicalDevice( *physicalDevice ), m_dispatcher( physicalDevice.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- physicalDevice.getDispatcher()->vkGetWinrtDisplayNV( static_cast<VkPhysicalDevice>( *physicalDevice ),
- deviceRelativeId,
- reinterpret_cast<VkDisplayKHR *>( &m_display ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( physicalDevice.getDispatcher()->vkGetWinrtDisplayNV(
+ static_cast<VkPhysicalDevice>( *physicalDevice ), deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &m_display ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkGetWinrtDisplayNV" );
@@ -6774,20 +6928,16 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
- DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice,
- VkDisplayKHR display )
+ DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, VkDisplayKHR display )
: m_physicalDevice( *physicalDevice ), m_display( display ), m_dispatcher( physicalDevice.getDispatcher() )
- {}
+ {
+ }
DisplayKHR( std::nullptr_t ) {}
~DisplayKHR()
{
- if ( m_display )
- {
- getDispatcher()->vkReleaseDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkDisplayKHR>( m_display ) );
- }
+ clear();
}
DisplayKHR() = delete;
@@ -6796,17 +6946,14 @@ namespace VULKAN_HPP_NAMESPACE
: m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) )
, m_display( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_display, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
DisplayKHR & operator=( DisplayKHR const & ) = delete;
DisplayKHR & operator =( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_display )
- {
- getDispatcher()->vkReleaseDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkDisplayKHR>( m_display ) );
- }
+ clear();
m_physicalDevice = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} );
m_display = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_display, {} );
m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
@@ -6819,6 +6966,17 @@ namespace VULKAN_HPP_NAMESPACE
return m_display;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_display )
+ {
+ getDispatcher()->vkReleaseDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( m_display ) );
+ }
+ m_physicalDevice = nullptr;
+ m_display = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::PhysicalDevice getPhysicalDevice() const
{
return m_physicalDevice;
@@ -6830,13 +6988,20 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_physicalDevice, rhs.m_physicalDevice );
+ std::swap( m_display, rhs.m_display );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_KHR_display ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR> getModeProperties() const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR createMode(
- VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR
+ createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
//=== VK_KHR_get_display_properties2 ===
@@ -6857,18 +7022,16 @@ namespace VULKAN_HPP_NAMESPACE
class DisplayKHRs : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR>
{
public:
- DisplayKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice,
- uint32_t planeIndex )
- {
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * dispatcher =
- physicalDevice.getDispatcher();
- std::vector<VkDisplayKHR> displays;
- uint32_t displayCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ DisplayKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, uint32_t planeIndex )
+ {
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * dispatcher = physicalDevice.getDispatcher();
+ std::vector<VkDisplayKHR> displays;
+ uint32_t displayCount;
+ VULKAN_HPP_NAMESPACE::Result result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkGetDisplayPlaneSupportedDisplaysKHR(
- static_cast<VkPhysicalDevice>( *physicalDevice ), planeIndex, &displayCount, nullptr ) );
+ result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ dispatcher->vkGetDisplayPlaneSupportedDisplaysKHR( static_cast<VkPhysicalDevice>( *physicalDevice ), planeIndex, &displayCount, nullptr ) );
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount )
{
displays.resize( displayCount );
@@ -6891,6 +7054,8 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ DisplayKHRs( std::nullptr_t ) {}
+
DisplayKHRs() = delete;
DisplayKHRs( DisplayKHRs const & ) = delete;
DisplayKHRs( DisplayKHRs && rhs ) = default;
@@ -6903,50 +7068,55 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkDisplayModeKHR;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR;
public:
- DisplayModeKHR(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display,
- VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ DisplayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display,
+ VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_physicalDevice( display.getPhysicalDevice() ), m_dispatcher( display.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( display.getDispatcher()->vkCreateDisplayModeKHR(
- static_cast<VkPhysicalDevice>( display.getPhysicalDevice() ),
- static_cast<VkDisplayKHR>( *display ),
- reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- reinterpret_cast<VkDisplayModeKHR *>( &m_displayModeKHR ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( display.getDispatcher()->vkCreateDisplayModeKHR(
+ static_cast<VkPhysicalDevice>( display.getPhysicalDevice() ),
+ static_cast<VkDisplayKHR>( *display ),
+ reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ reinterpret_cast<VkDisplayModeKHR *>( &m_displayModeKHR ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateDisplayModeKHR" );
}
}
- DisplayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display,
- VkDisplayModeKHR displayModeKHR )
- : m_physicalDevice( display.getPhysicalDevice() )
- , m_displayModeKHR( displayModeKHR )
- , m_dispatcher( display.getDispatcher() )
- {}
+ DisplayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, VkDisplayModeKHR displayModeKHR )
+ : m_physicalDevice( display.getPhysicalDevice() ), m_displayModeKHR( displayModeKHR ), m_dispatcher( display.getDispatcher() )
+ {
+ }
DisplayModeKHR( std::nullptr_t ) {}
- DisplayModeKHR() = delete;
- DisplayModeKHR( DisplayModeKHR const & ) = delete;
+ ~DisplayModeKHR()
+ {
+ clear();
+ }
+
+ DisplayModeKHR() = delete;
+ DisplayModeKHR( DisplayModeKHR const & rhs ) : m_displayModeKHR( rhs.m_displayModeKHR ), m_dispatcher( rhs.m_dispatcher ) {}
DisplayModeKHR( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT
: m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) )
, m_displayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_displayModeKHR, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
- DisplayModeKHR & operator=( DisplayModeKHR const & ) = delete;
- DisplayModeKHR & operator =( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ }
+ DisplayModeKHR & operator=( DisplayModeKHR const & rhs )
+ {
+ m_displayModeKHR = rhs.m_displayModeKHR;
+ m_dispatcher = rhs.m_dispatcher;
+ return *this;
+ }
+ DisplayModeKHR & operator=( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
@@ -6962,16 +7132,29 @@ namespace VULKAN_HPP_NAMESPACE
return m_displayModeKHR;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ m_physicalDevice = nullptr;
+ m_displayModeKHR = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_physicalDevice, rhs.m_physicalDevice );
+ std::swap( m_displayModeKHR, rhs.m_displayModeKHR );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_KHR_display ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR
- getDisplayPlaneCapabilities( uint32_t planeIndex ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR getDisplayPlaneCapabilities( uint32_t planeIndex ) const;
private:
VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {};
@@ -6984,8 +7167,7 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkEvent;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eEvent;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent;
@@ -6997,11 +7179,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- device.getDispatcher()->vkCreateEvent( static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkEvent *>( &m_event ) ) );
+ VULKAN_HPP_NAMESPACE::Result result =
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateEvent( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkEvent *>( &m_event ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateEvent" );
@@ -7015,18 +7197,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_event( event )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
Event( std::nullptr_t ) {}
~Event()
{
- if ( m_event )
- {
- getDispatcher()->vkDestroyEvent( static_cast<VkDevice>( m_device ),
- static_cast<VkEvent>( m_event ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
Event() = delete;
@@ -7036,18 +7214,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_event, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
Event & operator=( Event const & ) = delete;
Event & operator =( Event && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_event )
- {
- getDispatcher()->vkDestroyEvent( static_cast<VkDevice>( m_device ),
- static_cast<VkEvent>( m_event ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_event = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_event, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -7061,6 +7235,19 @@ namespace VULKAN_HPP_NAMESPACE
return m_event;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_event )
+ {
+ getDispatcher()->vkDestroyEvent(
+ static_cast<VkDevice>( m_device ), static_cast<VkEvent>( m_event ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_event = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -7072,6 +7259,14 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Event & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_event, rhs.m_event );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_VERSION_1_0 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const;
@@ -7083,7 +7278,7 @@ namespace VULKAN_HPP_NAMESPACE
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::Event m_event = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -7092,8 +7287,7 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkFence;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eFence;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFence;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence;
@@ -7105,11 +7299,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- device.getDispatcher()->vkCreateFence( static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkFence *>( &m_fence ) ) );
+ VULKAN_HPP_NAMESPACE::Result result =
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateFence( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkFence *>( &m_fence ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateFence" );
@@ -7123,12 +7317,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkRegisterDeviceEventEXT(
- static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkFence *>( &m_fence ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkRegisterDeviceEventEXT( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkFence *>( &m_fence ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkRegisterDeviceEventEXT" );
@@ -7143,13 +7336,12 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkRegisterDisplayEventEXT(
- static_cast<VkDevice>( *device ),
- static_cast<VkDisplayKHR>( *display ),
- reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkFence *>( &m_fence ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkRegisterDisplayEventEXT( static_cast<VkDevice>( *device ),
+ static_cast<VkDisplayKHR>( *display ),
+ reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkFence *>( &m_fence ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkRegisterDisplayEventEXT" );
@@ -7163,18 +7355,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_fence( fence )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
Fence( std::nullptr_t ) {}
~Fence()
{
- if ( m_fence )
- {
- getDispatcher()->vkDestroyFence( static_cast<VkDevice>( m_device ),
- static_cast<VkFence>( m_fence ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
Fence() = delete;
@@ -7184,18 +7372,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_fence, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
Fence & operator=( Fence const & ) = delete;
Fence & operator =( Fence && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_fence )
- {
- getDispatcher()->vkDestroyFence( static_cast<VkDevice>( m_device ),
- static_cast<VkFence>( m_fence ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_fence = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_fence, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -7209,6 +7393,19 @@ namespace VULKAN_HPP_NAMESPACE
return m_fence;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_fence )
+ {
+ getDispatcher()->vkDestroyFence(
+ static_cast<VkDevice>( m_device ), static_cast<VkFence>( m_fence ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_fence = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -7220,6 +7417,14 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_fence, rhs.m_fence );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_VERSION_1_0 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const;
@@ -7227,7 +7432,7 @@ namespace VULKAN_HPP_NAMESPACE
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::Fence m_fence = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -7236,8 +7441,7 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkFramebuffer;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer;
@@ -7267,18 +7471,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_framebuffer( framebuffer )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
Framebuffer( std::nullptr_t ) {}
~Framebuffer()
{
- if ( m_framebuffer )
- {
- getDispatcher()->vkDestroyFramebuffer( static_cast<VkDevice>( m_device ),
- static_cast<VkFramebuffer>( m_framebuffer ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
Framebuffer() = delete;
@@ -7288,18 +7488,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_framebuffer, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
Framebuffer & operator=( Framebuffer const & ) = delete;
Framebuffer & operator =( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_framebuffer )
- {
- getDispatcher()->vkDestroyFramebuffer( static_cast<VkDevice>( m_device ),
- static_cast<VkFramebuffer>( m_framebuffer ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_framebuffer = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_framebuffer, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -7313,6 +7509,19 @@ namespace VULKAN_HPP_NAMESPACE
return m_framebuffer;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_framebuffer )
+ {
+ getDispatcher()->vkDestroyFramebuffer(
+ static_cast<VkDevice>( m_device ), static_cast<VkFramebuffer>( m_framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_framebuffer = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -7324,10 +7533,22 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Framebuffer & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_framebuffer, rhs.m_framebuffer );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
+ //=== VK_QCOM_tile_properties ===
+
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM> getTilePropertiesQCOM() const;
+
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::Framebuffer m_framebuffer = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -7336,8 +7557,7 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkImage;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eImage;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImage;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage;
@@ -7349,11 +7569,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- device.getDispatcher()->vkCreateImage( static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkImage *>( &m_image ) ) );
+ VULKAN_HPP_NAMESPACE::Result result =
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateImage( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkImage *>( &m_image ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateImage" );
@@ -7367,18 +7587,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_image( image )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
Image( std::nullptr_t ) {}
~Image()
{
- if ( m_image )
- {
- getDispatcher()->vkDestroyImage( static_cast<VkDevice>( m_device ),
- static_cast<VkImage>( m_image ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
Image() = delete;
@@ -7388,18 +7604,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_image, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
Image & operator=( Image const & ) = delete;
Image & operator =( Image && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_image )
- {
- getDispatcher()->vkDestroyImage( static_cast<VkDevice>( m_device ),
- static_cast<VkImage>( m_image ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_image = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_image, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -7413,6 +7625,19 @@ namespace VULKAN_HPP_NAMESPACE
return m_image;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_image )
+ {
+ getDispatcher()->vkDestroyImage(
+ static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_image = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -7424,27 +7649,42 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Image & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_image, rhs.m_image );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_VERSION_1_0 ===
void bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getMemoryRequirements() const VULKAN_HPP_NOEXCEPT;
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>
- getSparseMemoryRequirements() const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements> getSparseMemoryRequirements() const;
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout
- getSubresourceLayout( const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource ) const VULKAN_HPP_NOEXCEPT;
+ getSubresourceLayout( const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_image_drm_format_modifier ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT
- getDrmFormatModifierPropertiesEXT() const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT getDrmFormatModifierPropertiesEXT() const;
+
+ //=== VK_EXT_image_compression_control ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT
+ getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT;
+
+ template <typename X, typename Y, typename... Z>
+ VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
+ getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::Image m_image = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -7453,8 +7693,7 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkImageView;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eImageView;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView;
@@ -7466,11 +7705,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- device.getDispatcher()->vkCreateImageView( static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkImageView *>( &m_imageView ) ) );
+ VULKAN_HPP_NAMESPACE::Result result =
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateImageView( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkImageView *>( &m_imageView ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateImageView" );
@@ -7484,18 +7723,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_imageView( imageView )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
ImageView( std::nullptr_t ) {}
~ImageView()
{
- if ( m_imageView )
- {
- getDispatcher()->vkDestroyImageView( static_cast<VkDevice>( m_device ),
- static_cast<VkImageView>( m_imageView ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
ImageView() = delete;
@@ -7505,18 +7740,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_imageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_imageView, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
ImageView & operator=( ImageView const & ) = delete;
ImageView & operator =( ImageView && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_imageView )
- {
- getDispatcher()->vkDestroyImageView( static_cast<VkDevice>( m_device ),
- static_cast<VkImageView>( m_imageView ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_imageView = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_imageView, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -7530,6 +7761,19 @@ namespace VULKAN_HPP_NAMESPACE
return m_imageView;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_imageView )
+ {
+ getDispatcher()->vkDestroyImageView(
+ static_cast<VkDevice>( m_device ), static_cast<VkImageView>( m_imageView ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_imageView = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -7541,6 +7785,14 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ImageView & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_imageView, rhs.m_imageView );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_NVX_image_view_handle ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX getAddressNVX() const;
@@ -7548,7 +7800,7 @@ namespace VULKAN_HPP_NAMESPACE
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::ImageView m_imageView = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -7557,81 +7809,65 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkIndirectCommandsLayoutNV;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
- IndirectCommandsLayoutNV(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ IndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateIndirectCommandsLayoutNV(
- static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkIndirectCommandsLayoutNV *>( &m_indirectCommandsLayout ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateIndirectCommandsLayoutNV( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkIndirectCommandsLayoutNV *>( &m_indirectCommandsLayout ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateIndirectCommandsLayoutNV" );
}
}
- IndirectCommandsLayoutNV(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkIndirectCommandsLayoutNV indirectCommandsLayout,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ IndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkIndirectCommandsLayoutNV indirectCommandsLayout,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_indirectCommandsLayout( indirectCommandsLayout )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
IndirectCommandsLayoutNV( std::nullptr_t ) {}
~IndirectCommandsLayoutNV()
{
- if ( m_indirectCommandsLayout )
- {
- getDispatcher()->vkDestroyIndirectCommandsLayoutNV(
- static_cast<VkDevice>( m_device ),
- static_cast<VkIndirectCommandsLayoutNV>( m_indirectCommandsLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
IndirectCommandsLayoutNV() = delete;
IndirectCommandsLayoutNV( IndirectCommandsLayoutNV const & ) = delete;
IndirectCommandsLayoutNV( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
- , m_indirectCommandsLayout(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_indirectCommandsLayout, {} ) )
+ , m_indirectCommandsLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_indirectCommandsLayout, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV const & ) = delete;
- IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutNV & operator =( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_indirectCommandsLayout )
- {
- getDispatcher()->vkDestroyIndirectCommandsLayoutNV(
- static_cast<VkDevice>( m_device ),
- static_cast<VkIndirectCommandsLayoutNV>( m_indirectCommandsLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
- m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
- m_indirectCommandsLayout =
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_indirectCommandsLayout, {} );
- m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
- m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+ clear();
+ m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+ m_indirectCommandsLayout = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_indirectCommandsLayout, {} );
+ m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+ m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
}
return *this;
}
@@ -7641,6 +7877,20 @@ namespace VULKAN_HPP_NAMESPACE
return m_indirectCommandsLayout;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_indirectCommandsLayout )
+ {
+ getDispatcher()->vkDestroyIndirectCommandsLayoutNV( static_cast<VkDevice>( m_device ),
+ static_cast<VkIndirectCommandsLayoutNV>( m_indirectCommandsLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_indirectCommandsLayout = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -7652,54 +7902,286 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_indirectCommandsLayout, rhs.m_indirectCommandsLayout );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV m_indirectCommandsLayout = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
+ class MicromapEXT
+ {
+ public:
+ using CType = VkMicromapEXT;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
+ VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+ public:
+ MicromapEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ : m_device( *device )
+ , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
+ , m_dispatcher( device.getDispatcher() )
+ {
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateMicromapEXT( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkMicromapEXT *>( &m_micromap ) ) );
+ if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ {
+ throwResultException( result, "vkCreateMicromapEXT" );
+ }
+ }
+
+ MicromapEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkMicromapEXT micromap,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ : m_device( *device )
+ , m_micromap( micromap )
+ , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
+ , m_dispatcher( device.getDispatcher() )
+ {
+ }
+
+ MicromapEXT( std::nullptr_t ) {}
+
+ ~MicromapEXT()
+ {
+ clear();
+ }
+
+ MicromapEXT() = delete;
+ MicromapEXT( MicromapEXT const & ) = delete;
+ MicromapEXT( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT
+ : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
+ , m_micromap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_micromap, {} ) )
+ , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
+ , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+ {
+ }
+ MicromapEXT & operator=( MicromapEXT const & ) = delete;
+ MicromapEXT & operator =( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ if ( this != &rhs )
+ {
+ clear();
+ m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+ m_micromap = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_micromap, {} );
+ m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+ m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+ }
+ return *this;
+ }
+
+ VULKAN_HPP_NAMESPACE::MicromapEXT const & operator*() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_micromap;
+ }
+
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_micromap )
+ {
+ getDispatcher()->vkDestroyMicromapEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkMicromapEXT>( m_micromap ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_micromap = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
+ VULKAN_HPP_NAMESPACE::Device getDevice() const
+ {
+ return m_device;
+ }
+
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+ {
+ VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+ return m_dispatcher;
+ }
+
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::MicromapEXT & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_micromap, rhs.m_micromap );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
+ private:
+ VULKAN_HPP_NAMESPACE::Device m_device = {};
+ VULKAN_HPP_NAMESPACE::MicromapEXT m_micromap = {};
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+ };
+
+ class OpticalFlowSessionNV
+ {
+ public:
+ using CType = VkOpticalFlowSessionNV;
+
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
+ VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+
+ public:
+ OpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ : m_device( *device )
+ , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
+ , m_dispatcher( device.getDispatcher() )
+ {
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateOpticalFlowSessionNV( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkOpticalFlowSessionNV *>( &m_session ) ) );
+ if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ {
+ throwResultException( result, "vkCreateOpticalFlowSessionNV" );
+ }
+ }
+
+ OpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkOpticalFlowSessionNV session,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ : m_device( *device )
+ , m_session( session )
+ , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
+ , m_dispatcher( device.getDispatcher() )
+ {
+ }
+
+ OpticalFlowSessionNV( std::nullptr_t ) {}
+
+ ~OpticalFlowSessionNV()
+ {
+ clear();
+ }
+
+ OpticalFlowSessionNV() = delete;
+ OpticalFlowSessionNV( OpticalFlowSessionNV const & ) = delete;
+ OpticalFlowSessionNV( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT
+ : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
+ , m_session( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_session, {} ) )
+ , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
+ , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
+ {
+ }
+ OpticalFlowSessionNV & operator=( OpticalFlowSessionNV const & ) = delete;
+ OpticalFlowSessionNV & operator =( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ if ( this != &rhs )
+ {
+ clear();
+ m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+ m_session = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_session, {} );
+ m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+ m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+ }
+ return *this;
+ }
+
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV const & operator*() const VULKAN_HPP_NOEXCEPT
+ {
+ return m_session;
+ }
+
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_session )
+ {
+ getDispatcher()->vkDestroyOpticalFlowSessionNV( static_cast<VkDevice>( m_device ),
+ static_cast<VkOpticalFlowSessionNV>( m_session ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_session = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
+ VULKAN_HPP_NAMESPACE::Device getDevice() const
+ {
+ return m_device;
+ }
+
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
+ {
+ VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
+ return m_dispatcher;
+ }
+
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_session, rhs.m_session );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
+ //=== VK_NV_optical_flow ===
+
+ void bindImage( VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,
+ VULKAN_HPP_NAMESPACE::ImageView view,
+ VULKAN_HPP_NAMESPACE::ImageLayout layout ) const;
+
+ private:
+ VULKAN_HPP_NAMESPACE::Device m_device = {};
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV m_session = {};
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+ };
+
class PerformanceConfigurationINTEL
{
public:
using CType = VkPerformanceConfigurationINTEL;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
- PerformanceConfigurationINTEL(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo )
+ PerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo )
: m_device( *device ), m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkAcquirePerformanceConfigurationINTEL(
- static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
- reinterpret_cast<VkPerformanceConfigurationINTEL *>( &m_configuration ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkAcquirePerformanceConfigurationINTEL( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
+ reinterpret_cast<VkPerformanceConfigurationINTEL *>( &m_configuration ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkAcquirePerformanceConfigurationINTEL" );
}
}
- PerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkPerformanceConfigurationINTEL configuration )
+ PerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkPerformanceConfigurationINTEL configuration )
: m_device( *device ), m_configuration( configuration ), m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
PerformanceConfigurationINTEL( std::nullptr_t ) {}
~PerformanceConfigurationINTEL()
{
- if ( m_configuration )
- {
- getDispatcher()->vkReleasePerformanceConfigurationINTEL(
- static_cast<VkDevice>( m_device ), static_cast<VkPerformanceConfigurationINTEL>( m_configuration ) );
- }
+ clear();
}
PerformanceConfigurationINTEL() = delete;
@@ -7708,17 +8190,14 @@ namespace VULKAN_HPP_NAMESPACE
: m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_configuration( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_configuration, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL const & ) = delete;
- PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT
+ PerformanceConfigurationINTEL & operator =( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_configuration )
- {
- getDispatcher()->vkReleasePerformanceConfigurationINTEL(
- static_cast<VkDevice>( m_device ), static_cast<VkPerformanceConfigurationINTEL>( m_configuration ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_configuration = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_configuration, {} );
m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
@@ -7731,6 +8210,18 @@ namespace VULKAN_HPP_NAMESPACE
return m_configuration;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_configuration )
+ {
+ getDispatcher()->vkReleasePerformanceConfigurationINTEL( static_cast<VkDevice>( m_device ),
+ static_cast<VkPerformanceConfigurationINTEL>( m_configuration ) );
+ }
+ m_device = nullptr;
+ m_configuration = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -7742,6 +8233,13 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_configuration, rhs.m_configuration );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL m_configuration = {};
@@ -7753,52 +8251,44 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkPipelineCache;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache;
public:
- PipelineCache(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ PipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreatePipelineCache(
- static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkPipelineCache *>( &m_pipelineCache ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreatePipelineCache( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkPipelineCache *>( &m_pipelineCache ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreatePipelineCache" );
}
}
- PipelineCache(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkPipelineCache pipelineCache,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ PipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkPipelineCache pipelineCache,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_pipelineCache( pipelineCache )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
PipelineCache( std::nullptr_t ) {}
~PipelineCache()
{
- if ( m_pipelineCache )
- {
- getDispatcher()->vkDestroyPipelineCache( static_cast<VkDevice>( m_device ),
- static_cast<VkPipelineCache>( m_pipelineCache ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
PipelineCache() = delete;
@@ -7808,18 +8298,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_pipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineCache, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
PipelineCache & operator=( PipelineCache const & ) = delete;
PipelineCache & operator =( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_pipelineCache )
- {
- getDispatcher()->vkDestroyPipelineCache( static_cast<VkDevice>( m_device ),
- static_cast<VkPipelineCache>( m_pipelineCache ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_pipelineCache = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineCache, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -7833,6 +8319,20 @@ namespace VULKAN_HPP_NAMESPACE
return m_pipelineCache;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_pipelineCache )
+ {
+ getDispatcher()->vkDestroyPipelineCache( static_cast<VkDevice>( m_device ),
+ static_cast<VkPipelineCache>( m_pipelineCache ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_pipelineCache = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -7844,16 +8344,24 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_pipelineCache, rhs.m_pipelineCache );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_VERSION_1_0 ===
VULKAN_HPP_NODISCARD std::vector<uint8_t> getData() const;
- void merge( ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches ) const;
+ void merge( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches ) const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::PipelineCache m_pipelineCache = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -7862,29 +8370,26 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkPipeline;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::ePipeline;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline;
public:
- Pipeline(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- m_constructorSuccessCode = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateComputePipelines(
- static_cast<VkDevice>( *device ),
- pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
- 1,
- reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkPipeline *>( &m_pipeline ) ) );
+ m_constructorSuccessCode = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ getDispatcher()->vkCreateComputePipelines( static_cast<VkDevice>( *device ),
+ pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
+ 1,
+ reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkPipeline *>( &m_pipeline ) ) );
if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
@@ -7892,24 +8397,21 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- Pipeline(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- m_constructorSuccessCode =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateGraphicsPipelines(
- static_cast<VkDevice>( *device ),
- pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
- 1,
- reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkPipeline *>( &m_pipeline ) ) );
+ m_constructorSuccessCode = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ getDispatcher()->vkCreateGraphicsPipelines( static_cast<VkDevice>( *device ),
+ pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
+ 1,
+ reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkPipeline *>( &m_pipeline ) ) );
if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
@@ -7917,27 +8419,23 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- Pipeline(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::Optional<
- const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- m_constructorSuccessCode =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateRayTracingPipelinesKHR(
- static_cast<VkDevice>( *device ),
- deferredOperation ? static_cast<VkDeferredOperationKHR>( **deferredOperation ) : 0,
- pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
- 1,
- reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkPipeline *>( &m_pipeline ) ) );
+ m_constructorSuccessCode = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ getDispatcher()->vkCreateRayTracingPipelinesKHR( static_cast<VkDevice>( *device ),
+ deferredOperation ? static_cast<VkDeferredOperationKHR>( **deferredOperation ) : 0,
+ pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
+ 1,
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkPipeline *>( &m_pipeline ) ) );
if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) &&
( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) &&
@@ -7947,24 +8445,21 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- Pipeline(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- m_constructorSuccessCode =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateRayTracingPipelinesNV(
- static_cast<VkDevice>( *device ),
- pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
- 1,
- reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkPipeline *>( &m_pipeline ) ) );
+ m_constructorSuccessCode = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ getDispatcher()->vkCreateRayTracingPipelinesNV( static_cast<VkDevice>( *device ),
+ pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
+ 1,
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkPipeline *>( &m_pipeline ) ) );
if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
@@ -7974,25 +8469,21 @@ namespace VULKAN_HPP_NAMESPACE
Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkPipeline pipeline,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr,
- VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr,
+ VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess )
: m_device( *device )
, m_pipeline( pipeline )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_constructorSuccessCode( successCode )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
Pipeline( std::nullptr_t ) {}
~Pipeline()
{
- if ( m_pipeline )
- {
- getDispatcher()->vkDestroyPipeline( static_cast<VkDevice>( m_device ),
- static_cast<VkPipeline>( m_pipeline ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
Pipeline() = delete;
@@ -8001,23 +8492,21 @@ namespace VULKAN_HPP_NAMESPACE
: m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipeline, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
+ , m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
Pipeline & operator=( Pipeline const & ) = delete;
Pipeline & operator =( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_pipeline )
- {
- getDispatcher()->vkDestroyPipeline( static_cast<VkDevice>( m_device ),
- static_cast<VkPipeline>( m_pipeline ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
- m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
- m_pipeline = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipeline, {} );
- m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
- m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+ clear();
+ m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+ m_pipeline = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipeline, {} );
+ m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+ m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} );
+ m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
}
return *this;
}
@@ -8027,6 +8516,20 @@ namespace VULKAN_HPP_NAMESPACE
return m_pipeline;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_pipeline )
+ {
+ getDispatcher()->vkDestroyPipeline(
+ static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_pipeline = nullptr;
+ m_allocator = nullptr;
+ m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const
{
return m_constructorSuccessCode;
@@ -8043,17 +8546,24 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_pipeline, rhs.m_pipeline );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_AMD_shader_info ===
- VULKAN_HPP_NODISCARD std::vector<uint8_t>
- getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
- VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const;
+ VULKAN_HPP_NODISCARD std::vector<uint8_t> getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
+ VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const;
//=== VK_NV_ray_tracing ===
template <typename DataType>
- VULKAN_HPP_NODISCARD std::vector<DataType>
- getRayTracingShaderGroupHandlesNV( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const;
+ VULKAN_HPP_NODISCARD std::vector<DataType> getRayTracingShaderGroupHandlesNV( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const;
template <typename DataType>
VULKAN_HPP_NODISCARD DataType getRayTracingShaderGroupHandleNV( uint32_t firstGroup, uint32_t groupCount ) const;
@@ -8063,54 +8573,47 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_ray_tracing_pipeline ===
template <typename DataType>
- VULKAN_HPP_NODISCARD std::vector<DataType>
- getRayTracingShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const;
+ VULKAN_HPP_NODISCARD std::vector<DataType> getRayTracingShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const;
template <typename DataType>
VULKAN_HPP_NODISCARD DataType getRayTracingShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const;
template <typename DataType>
- VULKAN_HPP_NODISCARD std::vector<DataType> getRayTracingCaptureReplayShaderGroupHandlesKHR(
- uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const;
+ VULKAN_HPP_NODISCARD std::vector<DataType>
+ getRayTracingCaptureReplayShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const;
template <typename DataType>
- VULKAN_HPP_NODISCARD DataType getRayTracingCaptureReplayShaderGroupHandleKHR( uint32_t firstGroup,
- uint32_t groupCount ) const;
+ VULKAN_HPP_NODISCARD DataType getRayTracingCaptureReplayShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const;
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getRayTracingShaderGroupStackSizeKHR(
- uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize
+ getRayTracingShaderGroupStackSizeKHR( uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT;
private:
- VULKAN_HPP_NAMESPACE::Device m_device = {};
- VULKAN_HPP_NAMESPACE::Pipeline m_pipeline = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
- VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode;
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
+ VULKAN_HPP_NAMESPACE::Device m_device = {};
+ VULKAN_HPP_NAMESPACE::Pipeline m_pipeline = {};
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
+ VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;
+ VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
class Pipelines : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>
{
public:
- Pipelines(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
std::vector<VkPipeline> pipelines( createInfos.size() );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateComputePipelines(
- static_cast<VkDevice>( *device ),
- pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
- createInfos.size(),
- reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- pipelines.data() ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateComputePipelines(
+ static_cast<VkDevice>( *device ),
+ pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
+ createInfos.size(),
+ reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ pipelines.data() ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
this->reserve( createInfos.size() );
for ( auto const & pipeline : pipelines )
@@ -8124,26 +8627,21 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- Pipelines(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
std::vector<VkPipeline> pipelines( createInfos.size() );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateGraphicsPipelines(
- static_cast<VkDevice>( *device ),
- pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
- createInfos.size(),
- reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- pipelines.data() ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateGraphicsPipelines(
+ static_cast<VkDevice>( *device ),
+ pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
+ createInfos.size(),
+ reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ pipelines.data() ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
this->reserve( createInfos.size() );
for ( auto const & pipeline : pipelines )
@@ -8157,31 +8655,24 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- Pipelines(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::Optional<
- const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
std::vector<VkPipeline> pipelines( createInfos.size() );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateRayTracingPipelinesKHR(
- static_cast<VkDevice>( *device ),
- deferredOperation ? static_cast<VkDeferredOperationKHR>( **deferredOperation ) : 0,
- pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
- createInfos.size(),
- reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- pipelines.data() ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateRayTracingPipelinesKHR(
+ static_cast<VkDevice>( *device ),
+ deferredOperation ? static_cast<VkDeferredOperationKHR>( **deferredOperation ) : 0,
+ pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
+ createInfos.size(),
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ pipelines.data() ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) ||
+ ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
this->reserve( createInfos.size() );
for ( auto const & pipeline : pipelines )
@@ -8195,26 +8686,21 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- Pipelines(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
std::vector<VkPipeline> pipelines( createInfos.size() );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateRayTracingPipelinesNV(
- static_cast<VkDevice>( *device ),
- pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
- createInfos.size(),
- reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- pipelines.data() ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
- ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateRayTracingPipelinesNV(
+ static_cast<VkDevice>( *device ),
+ pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
+ createInfos.size(),
+ reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ pipelines.data() ) );
+ if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
{
this->reserve( createInfos.size() );
for ( auto const & pipeline : pipelines )
@@ -8228,6 +8714,8 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ Pipelines( std::nullptr_t ) {}
+
Pipelines() = delete;
Pipelines( Pipelines const & ) = delete;
Pipelines( Pipelines && rhs ) = default;
@@ -8240,52 +8728,44 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkPipelineLayout;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout;
public:
- PipelineLayout(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ PipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreatePipelineLayout(
- static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkPipelineLayout *>( &m_pipelineLayout ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreatePipelineLayout( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkPipelineLayout *>( &m_pipelineLayout ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreatePipelineLayout" );
}
}
- PipelineLayout(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkPipelineLayout pipelineLayout,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ PipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkPipelineLayout pipelineLayout,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_pipelineLayout( pipelineLayout )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
PipelineLayout( std::nullptr_t ) {}
~PipelineLayout()
{
- if ( m_pipelineLayout )
- {
- getDispatcher()->vkDestroyPipelineLayout( static_cast<VkDevice>( m_device ),
- static_cast<VkPipelineLayout>( m_pipelineLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
PipelineLayout() = delete;
@@ -8295,18 +8775,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_pipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineLayout, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
PipelineLayout & operator=( PipelineLayout const & ) = delete;
PipelineLayout & operator =( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_pipelineLayout )
- {
- getDispatcher()->vkDestroyPipelineLayout( static_cast<VkDevice>( m_device ),
- static_cast<VkPipelineLayout>( m_pipelineLayout ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_pipelineLayout = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineLayout, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -8320,6 +8796,20 @@ namespace VULKAN_HPP_NAMESPACE
return m_pipelineLayout;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_pipelineLayout )
+ {
+ getDispatcher()->vkDestroyPipelineLayout( static_cast<VkDevice>( m_device ),
+ static_cast<VkPipelineLayout>( m_pipelineLayout ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_pipelineLayout = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -8331,10 +8821,18 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineLayout & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_pipelineLayout, rhs.m_pipelineLayout );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::PipelineLayout m_pipelineLayout = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -8343,52 +8841,44 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkPrivateDataSlot;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
- PrivateDataSlot(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ PrivateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreatePrivateDataSlot(
- static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkPrivateDataSlot *>( &m_privateDataSlot ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreatePrivateDataSlot( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkPrivateDataSlot *>( &m_privateDataSlot ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreatePrivateDataSlot" );
}
}
- PrivateDataSlot(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkPrivateDataSlot privateDataSlot,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ PrivateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkPrivateDataSlot privateDataSlot,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_privateDataSlot( privateDataSlot )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
PrivateDataSlot( std::nullptr_t ) {}
~PrivateDataSlot()
{
- if ( m_privateDataSlot )
- {
- getDispatcher()->vkDestroyPrivateDataSlot( static_cast<VkDevice>( m_device ),
- static_cast<VkPrivateDataSlot>( m_privateDataSlot ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
PrivateDataSlot() = delete;
@@ -8398,18 +8888,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_privateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_privateDataSlot, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
PrivateDataSlot & operator=( PrivateDataSlot const & ) = delete;
PrivateDataSlot & operator =( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_privateDataSlot )
- {
- getDispatcher()->vkDestroyPrivateDataSlot( static_cast<VkDevice>( m_device ),
- static_cast<VkPrivateDataSlot>( m_privateDataSlot ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_privateDataSlot = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_privateDataSlot, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -8423,6 +8909,20 @@ namespace VULKAN_HPP_NAMESPACE
return m_privateDataSlot;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_privateDataSlot )
+ {
+ getDispatcher()->vkDestroyPrivateDataSlot( static_cast<VkDevice>( m_device ),
+ static_cast<VkPrivateDataSlot>( m_privateDataSlot ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_privateDataSlot = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -8434,10 +8934,18 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_privateDataSlot, rhs.m_privateDataSlot );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::PrivateDataSlot m_privateDataSlot = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -8446,8 +8954,7 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkQueryPool;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool;
@@ -8459,11 +8966,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- device.getDispatcher()->vkCreateQueryPool( static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkQueryPool *>( &m_queryPool ) ) );
+ VULKAN_HPP_NAMESPACE::Result result =
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateQueryPool( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkQueryPool *>( &m_queryPool ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateQueryPool" );
@@ -8477,18 +8984,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_queryPool( queryPool )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
QueryPool( std::nullptr_t ) {}
~QueryPool()
{
- if ( m_queryPool )
- {
- getDispatcher()->vkDestroyQueryPool( static_cast<VkDevice>( m_device ),
- static_cast<VkQueryPool>( m_queryPool ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
QueryPool() = delete;
@@ -8498,18 +9001,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_queryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queryPool, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
QueryPool & operator=( QueryPool const & ) = delete;
QueryPool & operator =( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_queryPool )
- {
- getDispatcher()->vkDestroyQueryPool( static_cast<VkDevice>( m_device ),
- static_cast<VkQueryPool>( m_queryPool ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_queryPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queryPool, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -8523,6 +9022,19 @@ namespace VULKAN_HPP_NAMESPACE
return m_queryPool;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_queryPool )
+ {
+ getDispatcher()->vkDestroyQueryPool(
+ static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( m_queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_queryPool = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -8534,6 +9046,14 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::QueryPool & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_queryPool, rhs.m_queryPool );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_VERSION_1_0 ===
template <typename DataType>
@@ -8562,7 +9082,7 @@ namespace VULKAN_HPP_NAMESPACE
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::QueryPool m_queryPool = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -8571,44 +9091,49 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkQueue;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eQueue;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueue;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue;
public:
- Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex )
+ Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, uint32_t queueFamilyIndex, uint32_t queueIndex )
: m_dispatcher( device.getDispatcher() )
{
- getDispatcher()->vkGetDeviceQueue(
- static_cast<VkDevice>( *device ), queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &m_queue ) );
+ getDispatcher()->vkGetDeviceQueue( static_cast<VkDevice>( *device ), queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &m_queue ) );
}
- Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo )
+ Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo )
: m_dispatcher( device.getDispatcher() )
{
- getDispatcher()->vkGetDeviceQueue2( static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ),
- reinterpret_cast<VkQueue *>( &m_queue ) );
+ getDispatcher()->vkGetDeviceQueue2(
+ static_cast<VkDevice>( *device ), reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &m_queue ) );
}
- Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkQueue queue )
- : m_queue( queue ), m_dispatcher( device.getDispatcher() )
- {}
+ Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkQueue queue ) : m_queue( queue ), m_dispatcher( device.getDispatcher() )
+ {
+ }
Queue( std::nullptr_t ) {}
- Queue() = delete;
- Queue( Queue const & ) = delete;
+ ~Queue()
+ {
+ clear();
+ }
+
+ Queue() = delete;
+ Queue( Queue const & rhs ) : m_queue( rhs.m_queue ), m_dispatcher( rhs.m_dispatcher ) {}
Queue( Queue && rhs ) VULKAN_HPP_NOEXCEPT
: m_queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queue, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
- Queue & operator=( Queue const & ) = delete;
- Queue & operator =( Queue && rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ }
+ Queue & operator=( Queue const & rhs )
+ {
+ m_queue = rhs.m_queue;
+ m_dispatcher = rhs.m_dispatcher;
+ return *this;
+ }
+ Queue & operator=( Queue && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
@@ -8623,46 +9148,54 @@ namespace VULKAN_HPP_NAMESPACE
return m_queue;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ m_queue = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Queue & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_queue, rhs.m_queue );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_VERSION_1_0 ===
- void submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,
- VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
+ void submit( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,
+ VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
void waitIdle() const;
- void bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo,
- VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
+ void bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo,
+ VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
//=== VK_VERSION_1_3 ===
- void submit2( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
- VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
+ void submit2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
+ VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
//=== VK_KHR_swapchain ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
- presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo ) const;
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo ) const;
//=== VK_EXT_debug_utils ===
- void
- beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT;
+ void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT;
void endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT;
- void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const
- VULKAN_HPP_NOEXCEPT;
+ void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_device_diagnostic_checkpoints ===
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV>
- getCheckpointDataNV() const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV> getCheckpointDataNV() const;
//=== VK_INTEL_performance_query ===
@@ -8670,11 +9203,10 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_synchronization2 ===
- void submit2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
- VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
+ void submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
+ VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV>
- getCheckpointData2NV() const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV> getCheckpointData2NV() const;
private:
VULKAN_HPP_NAMESPACE::Queue m_queue = {};
@@ -8686,8 +9218,7 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkRenderPass;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass;
@@ -8735,18 +9266,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_renderPass( renderPass )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
RenderPass( std::nullptr_t ) {}
~RenderPass()
{
- if ( m_renderPass )
- {
- getDispatcher()->vkDestroyRenderPass( static_cast<VkDevice>( m_device ),
- static_cast<VkRenderPass>( m_renderPass ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
RenderPass() = delete;
@@ -8756,18 +9283,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_renderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_renderPass, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
RenderPass & operator=( RenderPass const & ) = delete;
RenderPass & operator =( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_renderPass )
- {
- getDispatcher()->vkDestroyRenderPass( static_cast<VkDevice>( m_device ),
- static_cast<VkRenderPass>( m_renderPass ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_renderPass = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_renderPass, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -8781,6 +9304,19 @@ namespace VULKAN_HPP_NAMESPACE
return m_renderPass;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_renderPass )
+ {
+ getDispatcher()->vkDestroyRenderPass(
+ static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( m_renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_renderPass = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -8792,19 +9328,26 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_renderPass, rhs.m_renderPass );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_VERSION_1_0 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity() const VULKAN_HPP_NOEXCEPT;
//=== VK_HUAWEI_subpass_shading ===
- VULKAN_HPP_NODISCARD std::pair<VULKAN_HPP_NAMESPACE::Result, VULKAN_HPP_NAMESPACE::Extent2D>
- getSubpassShadingMaxWorkgroupSizeHUAWEI() const;
+ VULKAN_HPP_NODISCARD std::pair<VULKAN_HPP_NAMESPACE::Result, VULKAN_HPP_NAMESPACE::Extent2D> getSubpassShadingMaxWorkgroupSizeHUAWEI() const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::RenderPass m_renderPass = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -8813,8 +9356,7 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkSampler;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eSampler;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler;
@@ -8826,11 +9368,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- device.getDispatcher()->vkCreateSampler( static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkSampler *>( &m_sampler ) ) );
+ VULKAN_HPP_NAMESPACE::Result result =
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateSampler( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkSampler *>( &m_sampler ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateSampler" );
@@ -8844,18 +9386,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_sampler( sampler )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
Sampler( std::nullptr_t ) {}
~Sampler()
{
- if ( m_sampler )
- {
- getDispatcher()->vkDestroySampler( static_cast<VkDevice>( m_device ),
- static_cast<VkSampler>( m_sampler ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
Sampler() = delete;
@@ -8865,18 +9403,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_sampler, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
Sampler & operator=( Sampler const & ) = delete;
Sampler & operator =( Sampler && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_sampler )
- {
- getDispatcher()->vkDestroySampler( static_cast<VkDevice>( m_device ),
- static_cast<VkSampler>( m_sampler ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_sampler = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_sampler, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -8890,6 +9424,19 @@ namespace VULKAN_HPP_NAMESPACE
return m_sampler;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_sampler )
+ {
+ getDispatcher()->vkDestroySampler(
+ static_cast<VkDevice>( m_device ), static_cast<VkSampler>( m_sampler ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_sampler = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -8901,10 +9448,18 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Sampler & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_sampler, rhs.m_sampler );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::Sampler m_sampler = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -8913,53 +9468,44 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkSamplerYcbcrConversion;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion;
public:
- SamplerYcbcrConversion(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ SamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateSamplerYcbcrConversion(
- static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkSamplerYcbcrConversion *>( &m_ycbcrConversion ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateSamplerYcbcrConversion( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkSamplerYcbcrConversion *>( &m_ycbcrConversion ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateSamplerYcbcrConversion" );
}
}
- SamplerYcbcrConversion(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkSamplerYcbcrConversion ycbcrConversion,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ SamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkSamplerYcbcrConversion ycbcrConversion,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_ycbcrConversion( ycbcrConversion )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
SamplerYcbcrConversion( std::nullptr_t ) {}
~SamplerYcbcrConversion()
{
- if ( m_ycbcrConversion )
- {
- getDispatcher()->vkDestroySamplerYcbcrConversion(
- static_cast<VkDevice>( m_device ),
- static_cast<VkSamplerYcbcrConversion>( m_ycbcrConversion ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
SamplerYcbcrConversion() = delete;
@@ -8969,19 +9515,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_ycbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_ycbcrConversion, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
SamplerYcbcrConversion & operator=( SamplerYcbcrConversion const & ) = delete;
- SamplerYcbcrConversion & operator=( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversion & operator =( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_ycbcrConversion )
- {
- getDispatcher()->vkDestroySamplerYcbcrConversion(
- static_cast<VkDevice>( m_device ),
- static_cast<VkSamplerYcbcrConversion>( m_ycbcrConversion ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_ycbcrConversion = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_ycbcrConversion, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -8995,6 +9536,20 @@ namespace VULKAN_HPP_NAMESPACE
return m_ycbcrConversion;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_ycbcrConversion )
+ {
+ getDispatcher()->vkDestroySamplerYcbcrConversion( static_cast<VkDevice>( m_device ),
+ static_cast<VkSamplerYcbcrConversion>( m_ycbcrConversion ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_ycbcrConversion = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -9006,10 +9561,18 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_ycbcrConversion, rhs.m_ycbcrConversion );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion m_ycbcrConversion = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -9018,8 +9581,7 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkSemaphore;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore;
@@ -9031,11 +9593,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- device.getDispatcher()->vkCreateSemaphore( static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkSemaphore *>( &m_semaphore ) ) );
+ VULKAN_HPP_NAMESPACE::Result result =
+ static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateSemaphore( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkSemaphore *>( &m_semaphore ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateSemaphore" );
@@ -9049,18 +9611,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_semaphore( semaphore )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
Semaphore( std::nullptr_t ) {}
~Semaphore()
{
- if ( m_semaphore )
- {
- getDispatcher()->vkDestroySemaphore( static_cast<VkDevice>( m_device ),
- static_cast<VkSemaphore>( m_semaphore ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
Semaphore() = delete;
@@ -9070,18 +9628,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_semaphore, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
Semaphore & operator=( Semaphore const & ) = delete;
Semaphore & operator =( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_semaphore )
- {
- getDispatcher()->vkDestroySemaphore( static_cast<VkDevice>( m_device ),
- static_cast<VkSemaphore>( m_semaphore ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_semaphore = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_semaphore, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -9095,6 +9649,19 @@ namespace VULKAN_HPP_NAMESPACE
return m_semaphore;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_semaphore )
+ {
+ getDispatcher()->vkDestroySemaphore(
+ static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( m_semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_semaphore = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -9106,6 +9673,14 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Semaphore & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_semaphore, rhs.m_semaphore );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_VERSION_1_2 ===
VULKAN_HPP_NODISCARD uint64_t getCounterValue() const;
@@ -9117,7 +9692,7 @@ namespace VULKAN_HPP_NAMESPACE
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::Semaphore m_semaphore = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -9126,52 +9701,44 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkShaderModule;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule;
public:
- ShaderModule(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ ShaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateShaderModule(
- static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkShaderModule *>( &m_shaderModule ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateShaderModule( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkShaderModule *>( &m_shaderModule ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateShaderModule" );
}
}
- ShaderModule(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkShaderModule shaderModule,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ ShaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkShaderModule shaderModule,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_shaderModule( shaderModule )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
ShaderModule( std::nullptr_t ) {}
~ShaderModule()
{
- if ( m_shaderModule )
- {
- getDispatcher()->vkDestroyShaderModule( static_cast<VkDevice>( m_device ),
- static_cast<VkShaderModule>( m_shaderModule ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
ShaderModule() = delete;
@@ -9181,18 +9748,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_shaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shaderModule, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
ShaderModule & operator=( ShaderModule const & ) = delete;
ShaderModule & operator =( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_shaderModule )
- {
- getDispatcher()->vkDestroyShaderModule( static_cast<VkDevice>( m_device ),
- static_cast<VkShaderModule>( m_shaderModule ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_shaderModule = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shaderModule, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -9206,6 +9769,19 @@ namespace VULKAN_HPP_NAMESPACE
return m_shaderModule;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_shaderModule )
+ {
+ getDispatcher()->vkDestroyShaderModule(
+ static_cast<VkDevice>( m_device ), static_cast<VkShaderModule>( m_shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_shaderModule = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -9217,10 +9793,22 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderModule & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_shaderModule, rhs.m_shaderModule );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
+ //=== VK_EXT_shader_module_identifier ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getIdentifierEXT() const VULKAN_HPP_NOEXCEPT;
+
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::ShaderModule m_shaderModule = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -9229,8 +9817,7 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkSurfaceKHR;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR;
@@ -9243,12 +9830,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateAndroidSurfaceKHR(
- static_cast<VkInstance>( *instance ),
- reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ instance.getDispatcher()->vkCreateAndroidSurfaceKHR( static_cast<VkInstance>( *instance ),
+ reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateAndroidSurfaceKHR" );
@@ -9264,12 +9850,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateDirectFBSurfaceEXT(
- static_cast<VkInstance>( *instance ),
- reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ instance.getDispatcher()->vkCreateDirectFBSurfaceEXT( static_cast<VkInstance>( *instance ),
+ reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateDirectFBSurfaceEXT" );
@@ -9284,12 +9869,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateDisplayPlaneSurfaceKHR(
- static_cast<VkInstance>( *instance ),
- reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ instance.getDispatcher()->vkCreateDisplayPlaneSurfaceKHR( static_cast<VkInstance>( *instance ),
+ reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateDisplayPlaneSurfaceKHR" );
@@ -9303,12 +9887,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateHeadlessSurfaceEXT(
- static_cast<VkInstance>( *instance ),
- reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ instance.getDispatcher()->vkCreateHeadlessSurfaceEXT( static_cast<VkInstance>( *instance ),
+ reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateHeadlessSurfaceEXT" );
@@ -9323,12 +9906,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateIOSSurfaceMVK(
- static_cast<VkInstance>( *instance ),
- reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ instance.getDispatcher()->vkCreateIOSSurfaceMVK( static_cast<VkInstance>( *instance ),
+ reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateIOSSurfaceMVK" );
@@ -9344,12 +9926,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateImagePipeSurfaceFUCHSIA(
- static_cast<VkInstance>( *instance ),
- reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ instance.getDispatcher()->vkCreateImagePipeSurfaceFUCHSIA( static_cast<VkInstance>( *instance ),
+ reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateImagePipeSurfaceFUCHSIA" );
@@ -9365,12 +9946,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateMacOSSurfaceMVK(
- static_cast<VkInstance>( *instance ),
- reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ instance.getDispatcher()->vkCreateMacOSSurfaceMVK( static_cast<VkInstance>( *instance ),
+ reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateMacOSSurfaceMVK" );
@@ -9386,12 +9966,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateMetalSurfaceEXT(
- static_cast<VkInstance>( *instance ),
- reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ instance.getDispatcher()->vkCreateMetalSurfaceEXT( static_cast<VkInstance>( *instance ),
+ reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateMetalSurfaceEXT" );
@@ -9407,12 +9986,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateScreenSurfaceQNX(
- static_cast<VkInstance>( *instance ),
- reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ instance.getDispatcher()->vkCreateScreenSurfaceQNX( static_cast<VkInstance>( *instance ),
+ reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateScreenSurfaceQNX" );
@@ -9428,12 +10006,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateStreamDescriptorSurfaceGGP(
- static_cast<VkInstance>( *instance ),
- reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ instance.getDispatcher()->vkCreateStreamDescriptorSurfaceGGP( static_cast<VkInstance>( *instance ),
+ reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateStreamDescriptorSurfaceGGP" );
@@ -9449,12 +10026,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateViSurfaceNN(
- static_cast<VkInstance>( *instance ),
- reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ instance.getDispatcher()->vkCreateViSurfaceNN( static_cast<VkInstance>( *instance ),
+ reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateViSurfaceNN" );
@@ -9470,12 +10046,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateWaylandSurfaceKHR(
- static_cast<VkInstance>( *instance ),
- reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ instance.getDispatcher()->vkCreateWaylandSurfaceKHR( static_cast<VkInstance>( *instance ),
+ reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateWaylandSurfaceKHR" );
@@ -9491,12 +10066,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateWin32SurfaceKHR(
- static_cast<VkInstance>( *instance ),
- reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ instance.getDispatcher()->vkCreateWin32SurfaceKHR( static_cast<VkInstance>( *instance ),
+ reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateWin32SurfaceKHR" );
@@ -9512,12 +10086,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateXcbSurfaceKHR(
- static_cast<VkInstance>( *instance ),
- reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ instance.getDispatcher()->vkCreateXcbSurfaceKHR( static_cast<VkInstance>( *instance ),
+ reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateXcbSurfaceKHR" );
@@ -9533,12 +10106,11 @@ namespace VULKAN_HPP_NAMESPACE
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( instance.getDispatcher()->vkCreateXlibSurfaceKHR(
- static_cast<VkInstance>( *instance ),
- reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ instance.getDispatcher()->vkCreateXlibSurfaceKHR( static_cast<VkInstance>( *instance ),
+ reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateXlibSurfaceKHR" );
@@ -9553,18 +10125,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_surface( surface )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( instance.getDispatcher() )
- {}
+ {
+ }
SurfaceKHR( std::nullptr_t ) {}
~SurfaceKHR()
{
- if ( m_surface )
- {
- getDispatcher()->vkDestroySurfaceKHR( static_cast<VkInstance>( m_instance ),
- static_cast<VkSurfaceKHR>( m_surface ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
SurfaceKHR() = delete;
@@ -9574,18 +10142,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_surface( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_surface, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
SurfaceKHR & operator=( SurfaceKHR const & ) = delete;
SurfaceKHR & operator =( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_surface )
- {
- getDispatcher()->vkDestroySurfaceKHR( static_cast<VkInstance>( m_instance ),
- static_cast<VkSurfaceKHR>( m_surface ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_instance = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} );
m_surface = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_surface, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -9599,6 +10163,19 @@ namespace VULKAN_HPP_NAMESPACE
return m_surface;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_surface )
+ {
+ getDispatcher()->vkDestroySurfaceKHR(
+ static_cast<VkInstance>( m_instance ), static_cast<VkSurfaceKHR>( m_surface ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_instance = nullptr;
+ m_surface = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Instance getInstance() const
{
return m_instance;
@@ -9610,10 +10187,18 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_instance, rhs.m_instance );
+ std::swap( m_surface, rhs.m_surface );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
private:
VULKAN_HPP_NAMESPACE::Instance m_instance = {};
VULKAN_HPP_NAMESPACE::SurfaceKHR m_surface = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr;
};
@@ -9622,52 +10207,44 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkSwapchainKHR;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR;
public:
- SwapchainKHR(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ SwapchainKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateSwapchainKHR(
- static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkSwapchainKHR *>( &m_swapchain ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateSwapchainKHR( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkSwapchainKHR *>( &m_swapchain ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateSwapchainKHR" );
}
}
- SwapchainKHR(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkSwapchainKHR swapchain,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ SwapchainKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkSwapchainKHR swapchain,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_swapchain( swapchain )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
SwapchainKHR( std::nullptr_t ) {}
~SwapchainKHR()
{
- if ( m_swapchain )
- {
- getDispatcher()->vkDestroySwapchainKHR( static_cast<VkDevice>( m_device ),
- static_cast<VkSwapchainKHR>( m_swapchain ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
SwapchainKHR() = delete;
@@ -9677,18 +10254,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_swapchain( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_swapchain, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
SwapchainKHR & operator=( SwapchainKHR const & ) = delete;
SwapchainKHR & operator =( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_swapchain )
- {
- getDispatcher()->vkDestroySwapchainKHR( static_cast<VkDevice>( m_device ),
- static_cast<VkSwapchainKHR>( m_swapchain ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_swapchain = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_swapchain, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -9702,6 +10275,19 @@ namespace VULKAN_HPP_NAMESPACE
return m_swapchain;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_swapchain )
+ {
+ getDispatcher()->vkDestroySwapchainKHR(
+ static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_swapchain = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -9713,9 +10299,17 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_swapchain, rhs.m_swapchain );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_KHR_swapchain ===
- VULKAN_HPP_NODISCARD std::vector<VkImage> getImages() const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::Image> getImages() const;
VULKAN_HPP_NODISCARD std::pair<VULKAN_HPP_NAMESPACE::Result, uint32_t>
acquireNextImage( uint64_t timeout,
@@ -9730,8 +10324,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE getRefreshCycleDurationGOOGLE() const;
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>
- getPastPresentationTimingGOOGLE() const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE> getPastPresentationTimingGOOGLE() const;
//=== VK_KHR_shared_presentable_image ===
@@ -9756,28 +10349,25 @@ namespace VULKAN_HPP_NAMESPACE
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::SwapchainKHR m_swapchain = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
class SwapchainKHRs : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR>
{
public:
- SwapchainKHRs(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ SwapchainKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
std::vector<VkSwapchainKHR> swapchains( createInfos.size() );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateSharedSwapchainsKHR(
- static_cast<VkDevice>( *device ),
- createInfos.size(),
- reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
- swapchains.data() ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateSharedSwapchainsKHR(
+ static_cast<VkDevice>( *device ),
+ createInfos.size(),
+ reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
+ swapchains.data() ) );
if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
this->reserve( createInfos.size() );
@@ -9792,6 +10382,8 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ SwapchainKHRs( std::nullptr_t ) {}
+
SwapchainKHRs() = delete;
SwapchainKHRs( SwapchainKHRs const & ) = delete;
SwapchainKHRs( SwapchainKHRs && rhs ) = default;
@@ -9804,53 +10396,44 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkValidationCacheEXT;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT;
public:
- ValidationCacheEXT(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ ValidationCacheEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateValidationCacheEXT(
- static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkValidationCacheEXT *>( &m_validationCache ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateValidationCacheEXT( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkValidationCacheEXT *>( &m_validationCache ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateValidationCacheEXT" );
}
}
- ValidationCacheEXT(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkValidationCacheEXT validationCache,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ ValidationCacheEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkValidationCacheEXT validationCache,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_validationCache( validationCache )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
ValidationCacheEXT( std::nullptr_t ) {}
~ValidationCacheEXT()
{
- if ( m_validationCache )
- {
- getDispatcher()->vkDestroyValidationCacheEXT(
- static_cast<VkDevice>( m_device ),
- static_cast<VkValidationCacheEXT>( m_validationCache ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
ValidationCacheEXT() = delete;
@@ -9860,19 +10443,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_validationCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_validationCache, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
ValidationCacheEXT & operator=( ValidationCacheEXT const & ) = delete;
ValidationCacheEXT & operator =( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_validationCache )
- {
- getDispatcher()->vkDestroyValidationCacheEXT(
- static_cast<VkDevice>( m_device ),
- static_cast<VkValidationCacheEXT>( m_validationCache ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_validationCache = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_validationCache, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -9886,6 +10464,20 @@ namespace VULKAN_HPP_NAMESPACE
return m_validationCache;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_validationCache )
+ {
+ getDispatcher()->vkDestroyValidationCacheEXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkValidationCacheEXT>( m_validationCache ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_validationCache = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -9897,16 +10489,24 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_validationCache, rhs.m_validationCache );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_EXT_validation_cache ===
- void merge( ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches ) const;
+ void merge( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches ) const;
VULKAN_HPP_NODISCARD std::vector<uint8_t> getData() const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::ValidationCacheEXT m_validationCache = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
@@ -9916,52 +10516,44 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkVideoSessionKHR;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
- VideoSessionKHR(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ VideoSessionKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateVideoSessionKHR(
- static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkVideoSessionKHR *>( &m_videoSession ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateVideoSessionKHR( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkVideoSessionKHR *>( &m_videoSession ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateVideoSessionKHR" );
}
}
- VideoSessionKHR(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkVideoSessionKHR videoSession,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ VideoSessionKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkVideoSessionKHR videoSession,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_videoSession( videoSession )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
VideoSessionKHR( std::nullptr_t ) {}
~VideoSessionKHR()
{
- if ( m_videoSession )
- {
- getDispatcher()->vkDestroyVideoSessionKHR( static_cast<VkDevice>( m_device ),
- static_cast<VkVideoSessionKHR>( m_videoSession ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
VideoSessionKHR() = delete;
@@ -9971,18 +10563,14 @@ namespace VULKAN_HPP_NAMESPACE
, m_videoSession( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSession, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
VideoSessionKHR & operator=( VideoSessionKHR const & ) = delete;
VideoSessionKHR & operator =( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_videoSession )
- {
- getDispatcher()->vkDestroyVideoSessionKHR( static_cast<VkDevice>( m_device ),
- static_cast<VkVideoSessionKHR>( m_videoSession ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_videoSession = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSession, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
@@ -9996,6 +10584,20 @@ namespace VULKAN_HPP_NAMESPACE
return m_videoSession;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_videoSession )
+ {
+ getDispatcher()->vkDestroyVideoSessionKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkVideoSessionKHR>( m_videoSession ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_videoSession = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -10007,17 +10609,24 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_videoSession, rhs.m_videoSession );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_KHR_video_queue ===
- VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR> getMemoryRequirements() const;
+ VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR> getMemoryRequirements() const;
- void
- bindMemory( ArrayProxy<const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR> const & videoSessionBindMemories ) const;
+ void bindMemory( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos ) const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::VideoSessionKHR m_videoSession = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -10028,81 +10637,65 @@ namespace VULKAN_HPP_NAMESPACE
public:
using CType = VkVideoSessionParametersKHR;
- static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
- VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
- VideoSessionParametersKHR(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ VideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateVideoSessionParametersKHR(
- static_cast<VkDevice>( *device ),
- reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
- reinterpret_cast<VkVideoSessionParametersKHR *>( &m_videoSessionParameters ) ) );
+ VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
+ device.getDispatcher()->vkCreateVideoSessionParametersKHR( static_cast<VkDevice>( *device ),
+ reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
+ reinterpret_cast<VkVideoSessionParametersKHR *>( &m_videoSessionParameters ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateVideoSessionParametersKHR" );
}
}
- VideoSessionParametersKHR(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
- VkVideoSessionParametersKHR videoSessionParameters,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
+ VideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
+ VkVideoSessionParametersKHR videoSessionParameters,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_videoSessionParameters( videoSessionParameters )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
- {}
+ {
+ }
VideoSessionParametersKHR( std::nullptr_t ) {}
~VideoSessionParametersKHR()
{
- if ( m_videoSessionParameters )
- {
- getDispatcher()->vkDestroyVideoSessionParametersKHR(
- static_cast<VkDevice>( m_device ),
- static_cast<VkVideoSessionParametersKHR>( m_videoSessionParameters ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
+ clear();
}
VideoSessionParametersKHR() = delete;
VideoSessionParametersKHR( VideoSessionParametersKHR const & ) = delete;
VideoSessionParametersKHR( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
- , m_videoSessionParameters(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSessionParameters, {} ) )
+ , m_videoSessionParameters( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSessionParameters, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
- {}
+ {
+ }
VideoSessionParametersKHR & operator=( VideoSessionParametersKHR const & ) = delete;
- VideoSessionParametersKHR & operator=( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT
+ VideoSessionParametersKHR & operator =( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
- if ( m_videoSessionParameters )
- {
- getDispatcher()->vkDestroyVideoSessionParametersKHR(
- static_cast<VkDevice>( m_device ),
- static_cast<VkVideoSessionParametersKHR>( m_videoSessionParameters ),
- reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
- }
- m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
- m_videoSessionParameters =
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSessionParameters, {} );
- m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
- m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
+ clear();
+ m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
+ m_videoSessionParameters = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSessionParameters, {} );
+ m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
+ m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
}
return *this;
}
@@ -10112,6 +10705,20 @@ namespace VULKAN_HPP_NAMESPACE
return m_videoSessionParameters;
}
+ void clear() VULKAN_HPP_NOEXCEPT
+ {
+ if ( m_videoSessionParameters )
+ {
+ getDispatcher()->vkDestroyVideoSessionParametersKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkVideoSessionParametersKHR>( m_videoSessionParameters ),
+ reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
+ }
+ m_device = nullptr;
+ m_videoSessionParameters = nullptr;
+ m_allocator = nullptr;
+ m_dispatcher = nullptr;
+ }
+
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
@@ -10123,6 +10730,14 @@ namespace VULKAN_HPP_NAMESPACE
return m_dispatcher;
}
+ void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ std::swap( m_device, rhs.m_device );
+ std::swap( m_videoSessionParameters, rhs.m_videoSessionParameters );
+ std::swap( m_allocator, rhs.m_allocator );
+ std::swap( m_dispatcher, rhs.m_dispatcher );
+ }
+
//=== VK_KHR_video_queue ===
void update( const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const;
@@ -10130,7 +10745,7 @@ namespace VULKAN_HPP_NAMESPACE
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR m_videoSessionParameters = {};
- const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr;
+ const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -10141,319 +10756,259 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_VERSION_1_0 ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Instance Context::createInstance(
- VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Instance
+ Context::createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Instance( *this, createInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice>
- Instance::enumeratePhysicalDevices() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice> Instance::enumeratePhysicalDevices() const
{
return VULKAN_HPP_RAII_NAMESPACE::PhysicalDevices( *this );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures
- PhysicalDevice::getFeatures() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures PhysicalDevice::getFeatures() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features;
getDispatcher()->vkGetPhysicalDeviceFeatures( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) );
+
return features;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties
- PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::FormatProperties formatProperties;
getDispatcher()->vkGetPhysicalDeviceFormatProperties(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkFormat>( format ),
- reinterpret_cast<VkFormatProperties *>( &formatProperties ) );
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) );
+
return formatProperties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties
- PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
VULKAN_HPP_NAMESPACE::ImageType type,
VULKAN_HPP_NAMESPACE::ImageTiling tiling,
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
VULKAN_HPP_NAMESPACE::ImageCreateFlags flags ) const
{
VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkFormat>( format ),
- static_cast<VkImageType>( type ),
- static_cast<VkImageTiling>( tiling ),
- static_cast<VkImageUsageFlags>( usage ),
- static_cast<VkImageCreateFlags>( flags ),
- reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" );
- }
+ VkResult result = getDispatcher()->vkGetPhysicalDeviceImageFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ static_cast<VkFormat>( format ),
+ static_cast<VkImageType>( type ),
+ static_cast<VkImageTiling>( tiling ),
+ static_cast<VkImageUsageFlags>( usage ),
+ static_cast<VkImageCreateFlags>( flags ),
+ reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" );
+
return imageFormatProperties;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties
- PhysicalDevice::getProperties() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties PhysicalDevice::getProperties() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties;
getDispatcher()->vkGetPhysicalDeviceProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) );
+
return properties;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties>
- PhysicalDevice::getQueueFamilyProperties() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties> PhysicalDevice::getQueueFamilyProperties() const
{
- uint32_t queueFamilyPropertyCount;
- getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
- std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties> queueFamilyProperties( queueFamilyPropertyCount );
- getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
- VULKAN_HPP_ASSERT( queueFamilyPropertyCount == queueFamilyProperties.size() );
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties> queueFamilyProperties;
+ uint32_t queueFamilyPropertyCount;
+ getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ &queueFamilyPropertyCount,
+ reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
+
+ VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+ {
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ }
return queueFamilyProperties;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties
- PhysicalDevice::getMemoryProperties() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties;
- getDispatcher()->vkGetPhysicalDeviceMemoryProperties(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) );
+ getDispatcher()->vkGetPhysicalDeviceMemoryProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) );
+
return memoryProperties;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PFN_vkVoidFunction
- Instance::getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT
{
- return getDispatcher()->vkGetInstanceProcAddr( static_cast<VkInstance>( m_instance ), name.c_str() );
+ PFN_vkVoidFunction result = getDispatcher()->vkGetInstanceProcAddr( static_cast<VkInstance>( m_instance ), name.c_str() );
+
+ return result;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PFN_vkVoidFunction
- Device::getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT
{
- return getDispatcher()->vkGetDeviceProcAddr( static_cast<VkDevice>( m_device ), name.c_str() );
+ PFN_vkVoidFunction result = getDispatcher()->vkGetDeviceProcAddr( static_cast<VkDevice>( m_device ), name.c_str() );
+
+ return result;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Device PhysicalDevice::createDevice(
- VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Device
+ PhysicalDevice::createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Device( *this, createInfo, allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties>
- Context::enumerateInstanceExtensionProperties( Optional<const std::string> layerName ) const
+ Context::enumerateInstanceExtensionProperties( Optional<const std::string> layerName ) const
{
std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties> properties;
uint32_t propertyCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEnumerateInstanceExtensionProperties(
- layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
+ result = getDispatcher()->vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEnumerateInstanceExtensionProperties(
- layerName ? layerName->c_str() : nullptr,
- &propertyCount,
- reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
+ result = getDispatcher()->vkEnumerateInstanceExtensionProperties(
+ layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceExtensionProperties" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceExtensionProperties" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
return properties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties>
- PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName ) const
+ PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName ) const
{
std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties> properties;
uint32_t propertyCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkEnumerateDeviceExtensionProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
- layerName ? layerName->c_str() : nullptr,
- &propertyCount,
- nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
+ result = getDispatcher()->vkEnumerateDeviceExtensionProperties(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEnumerateDeviceExtensionProperties(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- layerName ? layerName->c_str() : nullptr,
- &propertyCount,
- reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
+ result = getDispatcher()->vkEnumerateDeviceExtensionProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ layerName ? layerName->c_str() : nullptr,
+ &propertyCount,
+ reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result,
- VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
return properties;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::LayerProperties>
- Context::enumerateInstanceLayerProperties() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> Context::enumerateInstanceLayerProperties() const
{
std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> properties;
uint32_t propertyCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
+ result = getDispatcher()->vkEnumerateInstanceLayerProperties( &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEnumerateInstanceLayerProperties(
- &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
+ result = getDispatcher()->vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceLayerProperties" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceLayerProperties" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
return properties;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::LayerProperties>
- PhysicalDevice::enumerateDeviceLayerProperties() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> PhysicalDevice::enumerateDeviceLayerProperties() const
{
std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> properties;
uint32_t propertyCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEnumerateDeviceLayerProperties(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
+ result = getDispatcher()->vkEnumerateDeviceLayerProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEnumerateDeviceLayerProperties(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- &propertyCount,
- reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
+ result = getDispatcher()->vkEnumerateDeviceLayerProperties(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
return properties;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Queue
- Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Queue( *this, queueFamilyIndex, queueIndex );
}
- VULKAN_HPP_INLINE void Queue::submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,
- VULKAN_HPP_NAMESPACE::Fence fence ) const
+ VULKAN_HPP_INLINE void Queue::submit( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,
+ VULKAN_HPP_NAMESPACE::Fence fence ) const
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkQueueSubmit( static_cast<VkQueue>( m_queue ),
- submits.size(),
- reinterpret_cast<const VkSubmitInfo *>( submits.data() ),
- static_cast<VkFence>( fence ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" );
- }
+ VkResult result = getDispatcher()->vkQueueSubmit(
+ static_cast<VkQueue>( m_queue ), submits.size(), reinterpret_cast<const VkSubmitInfo *>( submits.data() ), static_cast<VkFence>( fence ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" );
}
VULKAN_HPP_INLINE void Queue::waitIdle() const
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkQueueWaitIdle( static_cast<VkQueue>( m_queue ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" );
- }
+ VkResult result = getDispatcher()->vkQueueWaitIdle( static_cast<VkQueue>( m_queue ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" );
}
VULKAN_HPP_INLINE void Device::waitIdle() const
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkDeviceWaitIdle( static_cast<VkDevice>( m_device ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" );
- }
+ VkResult result = getDispatcher()->vkDeviceWaitIdle( static_cast<VkDevice>( m_device ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DeviceMemory Device::allocateMemory(
- VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DeviceMemory
+ Device::allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::DeviceMemory( *this, allocateInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void *
- DeviceMemory::mapMemory( VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::DeviceSize size,
- VULKAN_HPP_NAMESPACE::MemoryMapFlags flags ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * DeviceMemory::mapMemory( VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::DeviceSize size,
+ VULKAN_HPP_NAMESPACE::MemoryMapFlags flags ) const
{
- void * pData;
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkMapMemory( static_cast<VkDevice>( m_device ),
- static_cast<VkDeviceMemory>( m_memory ),
- static_cast<VkDeviceSize>( offset ),
- static_cast<VkDeviceSize>( size ),
- static_cast<VkMemoryMapFlags>( flags ),
- &pData ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::mapMemory" );
- }
+ void * pData;
+ VkResult result = getDispatcher()->vkMapMemory( static_cast<VkDevice>( m_device ),
+ static_cast<VkDeviceMemory>( m_memory ),
+ static_cast<VkDeviceSize>( offset ),
+ static_cast<VkDeviceSize>( size ),
+ static_cast<VkMemoryMapFlags>( flags ),
+ &pData );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::mapMemory" );
+
return pData;
}
@@ -10462,609 +11017,502 @@ namespace VULKAN_HPP_NAMESPACE
getDispatcher()->vkUnmapMemory( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ) );
}
- VULKAN_HPP_INLINE void Device::flushMappedMemoryRanges(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges ) const
+ VULKAN_HPP_INLINE void
+ Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges ) const
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkFlushMappedMemoryRanges(
- static_cast<VkDevice>( m_device ),
- memoryRanges.size(),
- reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" );
- }
+ VkResult result = getDispatcher()->vkFlushMappedMemoryRanges(
+ static_cast<VkDevice>( m_device ), memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" );
}
- VULKAN_HPP_INLINE void Device::invalidateMappedMemoryRanges(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges ) const
+ VULKAN_HPP_INLINE void
+ Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges ) const
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkInvalidateMappedMemoryRanges(
- static_cast<VkDevice>( m_device ),
- memoryRanges.size(),
- reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" );
- }
+ VkResult result = getDispatcher()->vkInvalidateMappedMemoryRanges(
+ static_cast<VkDevice>( m_device ), memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize
- DeviceMemory::getCommitment() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize DeviceMemory::getCommitment() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes;
- getDispatcher()->vkGetDeviceMemoryCommitment( static_cast<VkDevice>( m_device ),
- static_cast<VkDeviceMemory>( m_memory ),
- reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) );
+ getDispatcher()->vkGetDeviceMemoryCommitment(
+ static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) );
+
return committedMemoryInBytes;
}
- VULKAN_HPP_INLINE void Buffer::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const
+ VULKAN_HPP_INLINE void Buffer::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkBindBufferMemory( static_cast<VkDevice>( m_device ),
- static_cast<VkBuffer>( m_buffer ),
- static_cast<VkDeviceMemory>( memory ),
- static_cast<VkDeviceSize>( memoryOffset ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Buffer::bindMemory" );
- }
+ VkResult result = getDispatcher()->vkBindBufferMemory( static_cast<VkDevice>( m_device ),
+ static_cast<VkBuffer>( m_buffer ),
+ static_cast<VkDeviceMemory>( memory ),
+ static_cast<VkDeviceSize>( memoryOffset ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Buffer::bindMemory" );
}
- VULKAN_HPP_INLINE void Image::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const
+ VULKAN_HPP_INLINE void Image::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkBindImageMemory( static_cast<VkDevice>( m_device ),
- static_cast<VkImage>( m_image ),
- static_cast<VkDeviceMemory>( memory ),
- static_cast<VkDeviceSize>( memoryOffset ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Image::bindMemory" );
- }
+ VkResult result = getDispatcher()->vkBindImageMemory(
+ static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Image::bindMemory" );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements
- Buffer::getMemoryRequirements() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Buffer::getMemoryRequirements() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
- getDispatcher()->vkGetBufferMemoryRequirements( static_cast<VkDevice>( m_device ),
- static_cast<VkBuffer>( m_buffer ),
- reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
+ getDispatcher()->vkGetBufferMemoryRequirements(
+ static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( m_buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
+
return memoryRequirements;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements
- Image::getMemoryRequirements() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Image::getMemoryRequirements() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
- getDispatcher()->vkGetImageMemoryRequirements( static_cast<VkDevice>( m_device ),
- static_cast<VkImage>( m_image ),
- reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
+ getDispatcher()->vkGetImageMemoryRequirements(
+ static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
+
return memoryRequirements;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>
- Image::getSparseMemoryRequirements() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements> Image::getSparseMemoryRequirements() const
{
- uint32_t sparseMemoryRequirementCount;
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements> sparseMemoryRequirements;
+ uint32_t sparseMemoryRequirementCount;
getDispatcher()->vkGetImageSparseMemoryRequirements(
static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), &sparseMemoryRequirementCount, nullptr );
- std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements> sparseMemoryRequirements(
- sparseMemoryRequirementCount );
- getDispatcher()->vkGetImageSparseMemoryRequirements(
- static_cast<VkDevice>( m_device ),
- static_cast<VkImage>( m_image ),
- &sparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
- VULKAN_HPP_ASSERT( sparseMemoryRequirementCount == sparseMemoryRequirements.size() );
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ getDispatcher()->vkGetImageSparseMemoryRequirements( static_cast<VkDevice>( m_device ),
+ static_cast<VkImage>( m_image ),
+ &sparseMemoryRequirementCount,
+ reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
+
+ VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+ if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+ {
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ }
return sparseMemoryRequirements;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>
- PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
+ PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
VULKAN_HPP_NAMESPACE::ImageType type,
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
VULKAN_HPP_NAMESPACE::ImageTiling tiling ) const
- VULKAN_HPP_NOEXCEPT
{
- uint32_t propertyCount;
- getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkFormat>( format ),
- static_cast<VkImageType>( type ),
- static_cast<VkSampleCountFlagBits>( samples ),
- static_cast<VkImageUsageFlags>( usage ),
- static_cast<VkImageTiling>( tiling ),
- &propertyCount,
- nullptr );
- std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties> properties( propertyCount );
- getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkFormat>( format ),
- static_cast<VkImageType>( type ),
- static_cast<VkSampleCountFlagBits>( samples ),
- static_cast<VkImageUsageFlags>( usage ),
- static_cast<VkImageTiling>( tiling ),
- &propertyCount,
- reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
- VULKAN_HPP_ASSERT( propertyCount == properties.size() );
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties> properties;
+ uint32_t propertyCount;
+ getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ static_cast<VkFormat>( format ),
+ static_cast<VkImageType>( type ),
+ static_cast<VkSampleCountFlagBits>( samples ),
+ static_cast<VkImageUsageFlags>( usage ),
+ static_cast<VkImageTiling>( tiling ),
+ &propertyCount,
+ nullptr );
+ properties.resize( propertyCount );
+ getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ static_cast<VkFormat>( format ),
+ static_cast<VkImageType>( type ),
+ static_cast<VkSampleCountFlagBits>( samples ),
+ static_cast<VkImageUsageFlags>( usage ),
+ static_cast<VkImageTiling>( tiling ),
+ &propertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
+
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
+ {
+ properties.resize( propertyCount );
+ }
return properties;
}
- VULKAN_HPP_INLINE void Queue::bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo,
- VULKAN_HPP_NAMESPACE::Fence fence ) const
+ VULKAN_HPP_INLINE void Queue::bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo,
+ VULKAN_HPP_NAMESPACE::Fence fence ) const
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkQueueBindSparse( static_cast<VkQueue>( m_queue ),
- bindInfo.size(),
- reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ),
- static_cast<VkFence>( fence ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" );
- }
+ VkResult result = getDispatcher()->vkQueueBindSparse(
+ static_cast<VkQueue>( m_queue ), bindInfo.size(), reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ), static_cast<VkFence>( fence ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence Device::createFence(
- VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence
+ Device::createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Fence( *this, createInfo, allocator );
}
- VULKAN_HPP_INLINE void Device::resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences ) const
+ VULKAN_HPP_INLINE void Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences ) const
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkResetFences(
- static_cast<VkDevice>( m_device ), fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" );
- }
+ VkResult result = getDispatcher()->vkResetFences( static_cast<VkDevice>( m_device ), fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Fence::getStatus() const
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetFenceStatus( static_cast<VkDevice>( m_device ), static_cast<VkFence>( m_fence ) ) );
- if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eNotReady ) )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Fence::getStatus" );
- }
- return result;
+ VkResult result = getDispatcher()->vkGetFenceStatus( static_cast<VkDevice>( m_device ), static_cast<VkFence>( m_fence ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Fence::getStatus",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
- Device::waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
- VULKAN_HPP_NAMESPACE::Bool32 waitAll,
- uint64_t timeout ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitForFences(
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout ) const
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkWaitForFences( static_cast<VkDevice>( m_device ),
- fences.size(),
- reinterpret_cast<const VkFence *>( fences.data() ),
- static_cast<VkBool32>( waitAll ),
- timeout ) );
- if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eTimeout ) )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences" );
- }
- return result;
+ VkResult result = getDispatcher()->vkWaitForFences(
+ static_cast<VkDevice>( m_device ), fences.size(), reinterpret_cast<const VkFence *>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Semaphore Device::createSemaphore(
- VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Semaphore
+ Device::createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Semaphore( *this, createInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Event Device::createEvent(
- VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Event
+ Device::createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Event( *this, createInfo, allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Event::getStatus() const
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetEventStatus( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( m_event ) ) );
- if ( ( result != VULKAN_HPP_NAMESPACE::Result::eEventSet ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eEventReset ) )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Event::getStatus" );
- }
- return result;
+ VkResult result = getDispatcher()->vkGetEventStatus( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( m_event ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Event::getStatus",
+ { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
VULKAN_HPP_INLINE void Event::set() const
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkSetEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( m_event ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Event::set" );
- }
+ VkResult result = getDispatcher()->vkSetEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( m_event ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Event::set" );
}
VULKAN_HPP_INLINE void Event::reset() const
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkResetEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( m_event ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Event::reset" );
- }
+ VkResult result = getDispatcher()->vkResetEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( m_event ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Event::reset" );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::QueryPool Device::createQueryPool(
- VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::QueryPool
+ Device::createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::QueryPool( *this, createInfo, allocator );
}
template <typename DataType>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, std::vector<DataType>>
- QueryPool::getResults( uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- VULKAN_HPP_NAMESPACE::DeviceSize stride,
- VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, std::vector<DataType>> QueryPool::getResults(
+ uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const
{
VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
std::vector<DataType> data( dataSize / sizeof( DataType ) );
- Result result =
- static_cast<Result>( getDispatcher()->vkGetQueryPoolResults( static_cast<VkDevice>( m_device ),
- static_cast<VkQueryPool>( m_queryPool ),
- firstQuery,
- queryCount,
- data.size() * sizeof( DataType ),
- reinterpret_cast<void *>( data.data() ),
- static_cast<VkDeviceSize>( stride ),
- static_cast<VkQueryResultFlags>( flags ) ) );
- if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eNotReady ) )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResults" );
- }
- return std::make_pair( result, data );
+ VkResult result = getDispatcher()->vkGetQueryPoolResults( static_cast<VkDevice>( m_device ),
+ static_cast<VkQueryPool>( m_queryPool ),
+ firstQuery,
+ queryCount,
+ data.size() * sizeof( DataType ),
+ reinterpret_cast<void *>( data.data() ),
+ static_cast<VkDeviceSize>( stride ),
+ static_cast<VkQueryResultFlags>( flags ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResults",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+
+ return std::make_pair( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
template <typename DataType>
- VULKAN_HPP_NODISCARD std::pair<VULKAN_HPP_NAMESPACE::Result, DataType>
- QueryPool::getResult( uint32_t firstQuery,
- uint32_t queryCount,
- VULKAN_HPP_NAMESPACE::DeviceSize stride,
- VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, DataType> QueryPool::getResult(
+ uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const
{
DataType data;
- Result result =
- static_cast<Result>( getDispatcher()->vkGetQueryPoolResults( static_cast<VkDevice>( m_device ),
- static_cast<VkQueryPool>( m_queryPool ),
- firstQuery,
- queryCount,
- sizeof( DataType ),
- reinterpret_cast<void *>( &data ),
- static_cast<VkDeviceSize>( stride ),
- static_cast<VkQueryResultFlags>( flags ) ) );
- if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eNotReady ) )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResult" );
- }
- return std::make_pair( result, data );
+ VkResult result = getDispatcher()->vkGetQueryPoolResults( static_cast<VkDevice>( m_device ),
+ static_cast<VkQueryPool>( m_queryPool ),
+ firstQuery,
+ queryCount,
+ sizeof( DataType ),
+ reinterpret_cast<void *>( &data ),
+ static_cast<VkDeviceSize>( stride ),
+ static_cast<VkQueryResultFlags>( flags ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResult",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+
+ return std::make_pair( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Buffer Device::createBuffer(
- VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Buffer
+ Device::createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Buffer( *this, createInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::BufferView Device::createBufferView(
- VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::BufferView
+ Device::createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::BufferView( *this, createInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Image Device::createImage(
- VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Image
+ Device::createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Image( *this, createInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Image::getSubresourceLayout(
- const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout
+ Image::getSubresourceLayout( const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::SubresourceLayout layout;
getDispatcher()->vkGetImageSubresourceLayout( static_cast<VkDevice>( m_device ),
static_cast<VkImage>( m_image ),
reinterpret_cast<const VkImageSubresource *>( &subresource ),
reinterpret_cast<VkSubresourceLayout *>( &layout ) );
+
return layout;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ImageView Device::createImageView(
- VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ImageView
+ Device::createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::ImageView( *this, createInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ShaderModule Device::createShaderModule(
- VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ShaderModule
+ Device::createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::ShaderModule( *this, createInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PipelineCache Device::createPipelineCache(
- VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PipelineCache
+ Device::createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::PipelineCache( *this, createInfo, allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<uint8_t> PipelineCache::getData() const
{
- std::vector<uint8_t> data;
- size_t dataSize;
- VULKAN_HPP_NAMESPACE::Result result;
+ std::vector<uint8_t> data;
+ size_t dataSize;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPipelineCacheData(
- static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( m_pipelineCache ), &dataSize, nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize )
+ result =
+ getDispatcher()->vkGetPipelineCacheData( static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( m_pipelineCache ), &dataSize, nullptr );
+ if ( ( result == VK_SUCCESS ) && dataSize )
{
data.resize( dataSize );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetPipelineCacheData( static_cast<VkDevice>( m_device ),
- static_cast<VkPipelineCache>( m_pipelineCache ),
- &dataSize,
- reinterpret_cast<void *>( data.data() ) ) );
+ result = getDispatcher()->vkGetPipelineCacheData(
+ static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( m_pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::getData" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::getData" );
+ VULKAN_HPP_ASSERT( dataSize <= data.size() );
+ if ( dataSize < data.size() )
{
- VULKAN_HPP_ASSERT( dataSize <= data.size() );
- if ( dataSize < data.size() )
- {
- data.resize( dataSize );
- }
+ data.resize( dataSize );
}
return data;
}
- VULKAN_HPP_INLINE void
- PipelineCache::merge( ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches ) const
+ VULKAN_HPP_INLINE void PipelineCache::merge( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches ) const
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkMergePipelineCaches( static_cast<VkDevice>( m_device ),
- static_cast<VkPipelineCache>( m_pipelineCache ),
- srcCaches.size(),
- reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::merge" );
- }
+ VkResult result = getDispatcher()->vkMergePipelineCaches( static_cast<VkDevice>( m_device ),
+ static_cast<VkPipelineCache>( m_pipelineCache ),
+ srcCaches.size(),
+ reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::merge" );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline>
- Device::createGraphicsPipelines(
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> Device::createGraphicsPipelines(
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createGraphicsPipeline(
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline>
- Device::createComputePipelines(
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> Device::createComputePipelines(
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createComputePipeline(
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline
+ Device::createComputePipeline( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PipelineLayout Device::createPipelineLayout(
- VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PipelineLayout
+ Device::createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::PipelineLayout( *this, createInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Sampler Device::createSampler(
- VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Sampler
+ Device::createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Sampler( *this, createInfo, allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout
- Device::createDescriptorSetLayout(
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ Device::createDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout( *this, createInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorPool Device::createDescriptorPool(
- VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorPool
+ Device::createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::DescriptorPool( *this, createInfo, allocator );
}
- VULKAN_HPP_INLINE void
- DescriptorPool::reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void DescriptorPool::reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkResetDescriptorPool( static_cast<VkDevice>( m_device ),
- static_cast<VkDescriptorPool>( m_descriptorPool ),
- static_cast<VkDescriptorPoolResetFlags>( flags ) );
+ getDispatcher()->vkResetDescriptorPool(
+ static_cast<VkDevice>( m_device ), static_cast<VkDescriptorPool>( m_descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::DescriptorSet>
- Device::allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const
+ Device::allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const
{
return VULKAN_HPP_RAII_NAMESPACE::DescriptorSets( *this, allocateInfo );
}
VULKAN_HPP_INLINE void Device::updateDescriptorSets(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkUpdateDescriptorSets(
- static_cast<VkDevice>( m_device ),
- descriptorWrites.size(),
- reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ),
- descriptorCopies.size(),
- reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) );
+ getDispatcher()->vkUpdateDescriptorSets( static_cast<VkDevice>( m_device ),
+ descriptorWrites.size(),
+ reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ),
+ descriptorCopies.size(),
+ reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Framebuffer Device::createFramebuffer(
- VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Framebuffer
+ Device::createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Framebuffer( *this, createInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass Device::createRenderPass(
- VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass
+ Device::createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, createInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D
- RenderPass::getRenderAreaGranularity() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D RenderPass::getRenderAreaGranularity() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::Extent2D granularity;
- getDispatcher()->vkGetRenderAreaGranularity( static_cast<VkDevice>( m_device ),
- static_cast<VkRenderPass>( m_renderPass ),
- reinterpret_cast<VkExtent2D *>( &granularity ) );
+ getDispatcher()->vkGetRenderAreaGranularity(
+ static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( m_renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) );
+
return granularity;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CommandPool Device::createCommandPool(
- VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CommandPool
+ Device::createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::CommandPool( *this, createInfo, allocator );
}
VULKAN_HPP_INLINE void CommandPool::reset( VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags ) const
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkResetCommandPool( static_cast<VkDevice>( m_device ),
- static_cast<VkCommandPool>( m_commandPool ),
- static_cast<VkCommandPoolResetFlags>( flags ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::CommandPool::reset" );
- }
+ VkResult result = getDispatcher()->vkResetCommandPool(
+ static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( m_commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandPool::reset" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::CommandBuffer>
- Device::allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const
+ Device::allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const
{
return VULKAN_HPP_RAII_NAMESPACE::CommandBuffers( *this, allocateInfo );
}
VULKAN_HPP_INLINE void CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo ) const
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkBeginCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" );
- }
+ VkResult result = getDispatcher()->vkBeginCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" );
}
VULKAN_HPP_INLINE void CommandBuffer::end() const
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkEndCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" );
- }
+ VkResult result = getDispatcher()->vkEndCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" );
}
VULKAN_HPP_INLINE void CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags ) const
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkResetCommandBuffer(
- static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCommandBufferResetFlags>( flags ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" );
- }
+ VkResult result =
+ getDispatcher()->vkResetCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCommandBufferResetFlags>( flags ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdBindPipeline( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
- static_cast<VkPipeline>( pipeline ) );
+ getDispatcher()->vkCmdBindPipeline(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::setViewport(
- uint32_t firstViewport,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setViewport( uint32_t firstViewport,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetViewport( static_cast<VkCommandBuffer>( m_commandBuffer ),
- firstViewport,
- viewports.size(),
- reinterpret_cast<const VkViewport *>( viewports.data() ) );
+ getDispatcher()->vkCmdSetViewport(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::setScissor(
- uint32_t firstScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setScissor( uint32_t firstScissor,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetScissor( static_cast<VkCommandBuffer>( m_commandBuffer ),
- firstScissor,
- scissors.size(),
- reinterpret_cast<const VkRect2D *>( scissors.data() ) );
+ getDispatcher()->vkCmdSetScissor(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth ) const VULKAN_HPP_NOEXCEPT
@@ -11072,14 +11520,10 @@ namespace VULKAN_HPP_NAMESPACE
getDispatcher()->vkCmdSetLineWidth( static_cast<VkCommandBuffer>( m_commandBuffer ), lineWidth );
}
- VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor,
- float depthBiasClamp,
- float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetDepthBias( static_cast<VkCommandBuffer>( m_commandBuffer ),
- depthBiasConstantFactor,
- depthBiasClamp,
- depthBiasSlopeFactor );
+ getDispatcher()->vkCmdSetDepthBias( static_cast<VkCommandBuffer>( m_commandBuffer ), depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
}
VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT
@@ -11087,40 +11531,33 @@ namespace VULKAN_HPP_NAMESPACE
getDispatcher()->vkCmdSetBlendConstants( static_cast<VkCommandBuffer>( m_commandBuffer ), blendConstants );
}
- VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds,
- float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetDepthBounds(
- static_cast<VkCommandBuffer>( m_commandBuffer ), minDepthBounds, maxDepthBounds );
+ getDispatcher()->vkCmdSetDepthBounds( static_cast<VkCommandBuffer>( m_commandBuffer ), minDepthBounds, maxDepthBounds );
}
VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
- uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT
+ uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetStencilCompareMask(
- static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
+ getDispatcher()->vkCmdSetStencilCompareMask( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
}
- VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
- uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetStencilWriteMask(
- static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
+ getDispatcher()->vkCmdSetStencilWriteMask( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
}
- VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
- uint32_t reference ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetStencilReference(
- static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), reference );
+ getDispatcher()->vkCmdSetStencilReference( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), reference );
}
VULKAN_HPP_INLINE void
- CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t firstSet,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
- ArrayProxy<const uint32_t> const & dynamicOffsets ) const VULKAN_HPP_NOEXCEPT
+ CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t firstSet,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdBindDescriptorSets( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
@@ -11132,10 +11569,9 @@ namespace VULKAN_HPP_NAMESPACE
dynamicOffsets.data() );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdBindIndexBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
@@ -11143,20 +11579,14 @@ namespace VULKAN_HPP_NAMESPACE
static_cast<VkIndexType>( indexType ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers(
- uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets ) const
{
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
-# else
if ( buffers.size() != offsets.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
}
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
getDispatcher()->vkCmdBindVertexBuffers( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstBinding,
@@ -11165,74 +11595,52 @@ namespace VULKAN_HPP_NAMESPACE
reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount,
- uint32_t instanceCount,
- uint32_t firstVertex,
- uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdDraw(
- static_cast<VkCommandBuffer>( m_commandBuffer ), vertexCount, instanceCount, firstVertex, firstInstance );
+ getDispatcher()->vkCmdDraw( static_cast<VkCommandBuffer>( m_commandBuffer ), vertexCount, instanceCount, firstVertex, firstInstance );
}
- VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount,
- uint32_t instanceCount,
- uint32_t firstIndex,
- int32_t vertexOffset,
- uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexed(
+ uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdDrawIndexed( static_cast<VkCommandBuffer>( m_commandBuffer ),
- indexCount,
- instanceCount,
- firstIndex,
- vertexOffset,
- firstInstance );
+ getDispatcher()->vkCmdDrawIndexed( static_cast<VkCommandBuffer>( m_commandBuffer ), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
}
VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
uint32_t drawCount,
- uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+ uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdDrawIndirect( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkBuffer>( buffer ),
- static_cast<VkDeviceSize>( offset ),
- drawCount,
- stride );
+ getDispatcher()->vkCmdDrawIndirect(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
uint32_t drawCount,
- uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+ uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdDrawIndexedIndirect( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkBuffer>( buffer ),
- static_cast<VkDeviceSize>( offset ),
- drawCount,
- stride );
+ getDispatcher()->vkCmdDrawIndexedIndirect(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
- VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdDispatch(
- static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ );
+ getDispatcher()->vkCmdDispatch( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdDispatchIndirect( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkBuffer>( buffer ),
- static_cast<VkDeviceSize>( offset ) );
+ getDispatcher()->vkCmdDispatchIndirect(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::copyBuffer(
- VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdCopyBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( srcBuffer ),
@@ -11241,12 +11649,12 @@ namespace VULKAN_HPP_NAMESPACE
reinterpret_cast<const VkBufferCopy *>( regions.data() ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::copyImage(
- VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdCopyImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkImage>( srcImage ),
@@ -11257,11 +11665,11 @@ namespace VULKAN_HPP_NAMESPACE
reinterpret_cast<const VkImageCopy *>( regions.data() ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,
+ VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,
VULKAN_HPP_NAMESPACE::Filter filter ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdBlitImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
@@ -11275,10 +11683,10 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage(
- VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdCopyBufferToImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( srcBuffer ),
@@ -11289,10 +11697,10 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer(
- VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdCopyImageToBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkImage>( srcImage ),
@@ -11303,10 +11711,9 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename DataType>
- VULKAN_HPP_INLINE void
- CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
- ArrayProxy<const DataType> const & data ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdUpdateBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( dstBuffer ),
@@ -11328,10 +11735,10 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_INLINE void CommandBuffer::clearColorImage(
- VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- const VULKAN_HPP_NAMESPACE::ClearColorValue & color,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
+ const VULKAN_HPP_NAMESPACE::ClearColorValue & color,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdClearColorImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkImage>( image ),
@@ -11342,23 +11749,22 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage(
- VULKAN_HPP_NAMESPACE::Image image,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
- const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::Image image,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
+ const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdClearDepthStencilImage(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkImage>( image ),
- static_cast<VkImageLayout>( imageLayout ),
- reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ),
- ranges.size(),
- reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
+ getDispatcher()->vkCmdClearDepthStencilImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkImage>( image ),
+ static_cast<VkImageLayout>( imageLayout ),
+ reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ),
+ ranges.size(),
+ reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::clearAttachments(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdClearAttachments( static_cast<VkCommandBuffer>( m_commandBuffer ),
attachments.size(),
@@ -11367,12 +11773,12 @@ namespace VULKAN_HPP_NAMESPACE
reinterpret_cast<const VkClearRect *>( rects.data() ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::resolveImage(
- VULKAN_HPP_NAMESPACE::Image srcImage,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
- VULKAN_HPP_NAMESPACE::Image dstImage,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
+ VULKAN_HPP_NAMESPACE::Image dstImage,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdResolveImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkImage>( srcImage ),
@@ -11383,31 +11789,27 @@ namespace VULKAN_HPP_NAMESPACE
reinterpret_cast<const VkImageResolve *>( regions.data() ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetEvent( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkEvent>( event ),
- static_cast<VkPipelineStageFlags>( stageMask ) );
+ getDispatcher()->vkCmdSetEvent(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdResetEvent( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkEvent>( event ),
- static_cast<VkPipelineStageFlags>( stageMask ) );
+ getDispatcher()->vkCmdResetEvent(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
}
VULKAN_HPP_INLINE void CommandBuffer::waitEvents(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdWaitEvents( static_cast<VkCommandBuffer>( m_commandBuffer ),
events.size(),
@@ -11423,72 +11825,59 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier(
- VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
- VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
- {
- getDispatcher()->vkCmdPipelineBarrier(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkPipelineStageFlags>( srcStageMask ),
- static_cast<VkPipelineStageFlags>( dstStageMask ),
- static_cast<VkDependencyFlags>( dependencyFlags ),
- memoryBarriers.size(),
- reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ),
- bufferMemoryBarriers.size(),
- reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ),
- imageMemoryBarriers.size(),
- reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
+ VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
+ {
+ getDispatcher()->vkCmdPipelineBarrier( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkPipelineStageFlags>( srcStageMask ),
+ static_cast<VkPipelineStageFlags>( dstStageMask ),
+ static_cast<VkDependencyFlags>( dependencyFlags ),
+ memoryBarriers.size(),
+ reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ),
+ bufferMemoryBarriers.size(),
+ reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ),
+ imageMemoryBarriers.size(),
+ reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query,
- VULKAN_HPP_NAMESPACE::QueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t query,
+ VULKAN_HPP_NAMESPACE::QueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdBeginQuery( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkQueryPool>( queryPool ),
- query,
- static_cast<VkQueryControlFlags>( flags ) );
+ getDispatcher()->vkCmdBeginQuery(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdEndQuery(
- static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query );
+ getDispatcher()->vkCmdEndQuery( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query );
}
- VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdResetQueryPool( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkQueryPool>( queryPool ),
- firstQuery,
- queryCount );
+ getDispatcher()->vkCmdResetQueryPool( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
}
VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query ) const VULKAN_HPP_NOEXCEPT
+ uint32_t query ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdWriteTimestamp( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkPipelineStageFlagBits>( pipelineStage ),
- static_cast<VkQueryPool>( queryPool ),
- query );
+ getDispatcher()->vkCmdWriteTimestamp(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
- VULKAN_HPP_NAMESPACE::DeviceSize stride,
- VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
+ VULKAN_HPP_NAMESPACE::DeviceSize stride,
+ VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdCopyQueryPoolResults( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkQueryPool>( queryPool ),
@@ -11501,11 +11890,10 @@ namespace VULKAN_HPP_NAMESPACE
}
template <typename ValuesType>
- VULKAN_HPP_INLINE void
- CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
- uint32_t offset,
- ArrayProxy<const ValuesType> const & values ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
+ uint32_t offset,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdPushConstants( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkPipelineLayout>( layout ),
@@ -11515,20 +11903,17 @@ namespace VULKAN_HPP_NAMESPACE
reinterpret_cast<const void *>( values.data() ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
- VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
+ VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdBeginRenderPass( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ),
static_cast<VkSubpassContents>( contents ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdNextSubpass( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkSubpassContents>( contents ) );
+ getDispatcher()->vkCmdNextSubpass( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkSubpassContents>( contents ) );
}
VULKAN_HPP_INLINE void CommandBuffer::endRenderPass() const VULKAN_HPP_NOEXCEPT
@@ -11537,65 +11922,48 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_INLINE void CommandBuffer::executeCommands(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdExecuteCommands( static_cast<VkCommandBuffer>( m_commandBuffer ),
- commandBuffers.size(),
- reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
+ getDispatcher()->vkCmdExecuteCommands(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
}
//=== VK_VERSION_1_1 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t Context::enumerateInstanceVersion() const
{
- uint32_t apiVersion;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEnumerateInstanceVersion( &apiVersion ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceVersion" );
- }
+ uint32_t apiVersion;
+ VkResult result = getDispatcher()->vkEnumerateInstanceVersion( &apiVersion );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceVersion" );
+
return apiVersion;
}
VULKAN_HPP_INLINE void
- Device::bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos ) const
+ Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos ) const
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkBindBufferMemory2( static_cast<VkDevice>( m_device ),
- bindInfos.size(),
- reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" );
- }
+ VkResult result = getDispatcher()->vkBindBufferMemory2(
+ static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" );
}
- VULKAN_HPP_INLINE void
- Device::bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos ) const
+ VULKAN_HPP_INLINE void Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos ) const
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkBindImageMemory2( static_cast<VkDevice>( m_device ),
- bindInfos.size(),
- reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" );
- }
+ VkResult result = getDispatcher()->vkBindImageMemory2(
+ static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
- Device::getGroupPeerMemoryFeatures( uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT
+ Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
- getDispatcher()->vkGetDeviceGroupPeerMemoryFeatures(
- static_cast<VkDevice>( m_device ),
- heapIndex,
- localDeviceIndex,
- remoteDeviceIndex,
- reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
+ getDispatcher()->vkGetDeviceGroupPeerMemoryFeatures( static_cast<VkDevice>( m_device ),
+ heapIndex,
+ localDeviceIndex,
+ remoteDeviceIndex,
+ reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
+
return peerMemoryFeatures;
}
@@ -11611,409 +11979,377 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t groupCountY,
uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdDispatchBase( static_cast<VkCommandBuffer>( m_commandBuffer ),
- baseGroupX,
- baseGroupY,
- baseGroupZ,
- groupCountX,
- groupCountY,
- groupCountZ );
+ getDispatcher()->vkCmdDispatchBase(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>
- Instance::enumeratePhysicalDeviceGroups() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> Instance::enumeratePhysicalDeviceGroups() const
{
std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> physicalDeviceGroupProperties;
uint32_t physicalDeviceGroupCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEnumeratePhysicalDeviceGroups(
- static_cast<VkInstance>( m_instance ), &physicalDeviceGroupCount, nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount )
+ result = getDispatcher()->vkEnumeratePhysicalDeviceGroups( static_cast<VkInstance>( m_instance ), &physicalDeviceGroupCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
{
physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEnumeratePhysicalDeviceGroups(
- static_cast<VkInstance>( m_instance ),
- &physicalDeviceGroupCount,
- reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
+ result =
+ getDispatcher()->vkEnumeratePhysicalDeviceGroups( static_cast<VkInstance>( m_instance ),
+ &physicalDeviceGroupCount,
+ reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
+ VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+ if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
{
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
- if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
- {
- physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
- }
+ physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
}
return physicalDeviceGroupProperties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const
- VULKAN_HPP_NOEXCEPT
+ Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- getDispatcher()->vkGetImageMemoryRequirements2(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ getDispatcher()->vkGetImageMemoryRequirements2( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return memoryRequirements;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2(
- const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- getDispatcher()->vkGetImageMemoryRequirements2(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ getDispatcher()->vkGetImageMemoryRequirements2( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const
- VULKAN_HPP_NOEXCEPT
+ Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- getDispatcher()->vkGetBufferMemoryRequirements2(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ getDispatcher()->vkGetBufferMemoryRequirements2( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return memoryRequirements;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2(
- const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- getDispatcher()->vkGetBufferMemoryRequirements2(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ getDispatcher()->vkGetBufferMemoryRequirements2( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
- Device::getImageSparseMemoryRequirements2(
- const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
+ Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const
{
- uint32_t sparseMemoryRequirementCount;
- getDispatcher()->vkGetImageSparseMemoryRequirements2(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
- &sparseMemoryRequirementCount,
- nullptr );
- std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> sparseMemoryRequirements(
- sparseMemoryRequirementCount );
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> sparseMemoryRequirements;
+ uint32_t sparseMemoryRequirementCount;
getDispatcher()->vkGetImageSparseMemoryRequirements2(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
- &sparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
- VULKAN_HPP_ASSERT( sparseMemoryRequirementCount == sparseMemoryRequirements.size() );
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ getDispatcher()->vkGetImageSparseMemoryRequirements2( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
+ &sparseMemoryRequirementCount,
+ reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+
+ VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+ if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+ {
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ }
return sparseMemoryRequirements;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
- PhysicalDevice::getFeatures2() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
getDispatcher()->vkGetPhysicalDeviceFeatures2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+
return features;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getFeatures2() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2() const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features =
- structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
getDispatcher()->vkGetPhysicalDeviceFeatures2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+
return structureChain;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
- PhysicalDevice::getProperties2() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
getDispatcher()->vkGetPhysicalDeviceProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+
return properties;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getProperties2() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2() const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
getDispatcher()->vkGetPhysicalDeviceProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2
- PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
getDispatcher()->vkGetPhysicalDeviceFormatProperties2(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkFormat>( format ),
- reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+
return formatProperties;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
+ VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
getDispatcher()->vkGetPhysicalDeviceFormatProperties2(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkFormat>( format ),
- reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties2
- PhysicalDevice::getImageFormatProperties2(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const
+ PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const
{
VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
- reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
- }
+ VkResult result =
+ getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
+ reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
+
return imageFormatProperties;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> PhysicalDevice::getImageFormatProperties2(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const
{
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
- reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
- }
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
+ VkResult result =
+ getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
+ reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
+
return structureChain;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>
- PhysicalDevice::getQueueFamilyProperties2() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> PhysicalDevice::getQueueFamilyProperties2() const
{
- uint32_t queueFamilyPropertyCount;
- getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
- std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
- getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
- VULKAN_HPP_ASSERT( queueFamilyPropertyCount == queueFamilyProperties.size() );
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
+ uint32_t queueFamilyPropertyCount;
+ getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ &queueFamilyPropertyCount,
+ reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+
+ VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+ {
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ }
return queueFamilyProperties;
}
template <typename StructureChain>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain> PhysicalDevice::getQueueFamilyProperties2() const
{
- uint32_t queueFamilyPropertyCount;
- getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
- std::vector<StructureChain> returnVector( queueFamilyPropertyCount );
- std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
+ std::vector<StructureChain> structureChains;
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
+ uint32_t queueFamilyPropertyCount;
+ getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
+ structureChains.resize( queueFamilyPropertyCount );
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
{
- queueFamilyProperties[i].pNext =
- returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+ queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
}
- getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+ getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ &queueFamilyPropertyCount,
+ reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+
VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+ {
+ structureChains.resize( queueFamilyPropertyCount );
+ }
for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
{
- returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
+ structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
}
- return returnVector;
+ return structureChains;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
PhysicalDevice::getMemoryProperties2() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
- getDispatcher()->vkGetPhysicalDeviceMemoryProperties2(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+ getDispatcher()->vkGetPhysicalDeviceMemoryProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+
return memoryProperties;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getMemoryProperties2() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2() const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties =
structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
- getDispatcher()->vkGetPhysicalDeviceMemoryProperties2(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+ getDispatcher()->vkGetPhysicalDeviceMemoryProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>
- PhysicalDevice::getSparseImageFormatProperties2(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const
{
- uint32_t propertyCount;
- getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
- &propertyCount,
- nullptr );
- std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2> properties( propertyCount );
- getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
- &propertyCount,
- reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
- VULKAN_HPP_ASSERT( propertyCount == properties.size() );
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2> properties;
+ uint32_t propertyCount;
+ getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
+ &propertyCount,
+ nullptr );
+ properties.resize( propertyCount );
+ getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
+ &propertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
+ {
+ properties.resize( propertyCount );
+ }
return properties;
}
- VULKAN_HPP_INLINE void
- CommandPool::trim( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandPool::trim( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkTrimCommandPool( static_cast<VkDevice>( m_device ),
- static_cast<VkCommandPool>( m_commandPool ),
- static_cast<VkCommandPoolTrimFlags>( flags ) );
+ getDispatcher()->vkTrimCommandPool(
+ static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( m_commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Queue
- Device::getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Queue Device::getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Queue( *this, queueInfo );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion
- Device::createSamplerYcbcrConversion(
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ Device::createSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion( *this, createInfo, allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate
- Device::createDescriptorUpdateTemplate(
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ Device::createDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate( *this, createInfo, allocator );
}
template <typename DataType>
- VULKAN_HPP_INLINE void
- DescriptorSet::updateWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- DataType const & data ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void DescriptorSet::updateWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ DataType const & data ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkUpdateDescriptorSetWithTemplate(
- static_cast<VkDevice>( m_device ),
- static_cast<VkDescriptorSet>( m_descriptorSet ),
- static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- reinterpret_cast<const void *>( &data ) );
+ getDispatcher()->vkUpdateDescriptorSetWithTemplate( static_cast<VkDevice>( m_device ),
+ static_cast<VkDescriptorSet>( m_descriptorSet ),
+ static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
+ reinterpret_cast<const void *>( &data ) );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties
- PhysicalDevice::getExternalBufferProperties(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
- getDispatcher()->vkGetPhysicalDeviceExternalBufferProperties(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
- reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
+ getDispatcher()->vkGetPhysicalDeviceExternalBufferProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
+ reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
+
return externalBufferProperties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties
- PhysicalDevice::getExternalFenceProperties(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
- getDispatcher()->vkGetPhysicalDeviceExternalFenceProperties(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
- reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
+ getDispatcher()->vkGetPhysicalDeviceExternalFenceProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
+ reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
+
return externalFenceProperties;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
- PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo &
- externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
getDispatcher()->vkGetPhysicalDeviceExternalSemaphoreProperties(
static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
+
return externalSemaphoreProperties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
- Device::getDescriptorSetLayoutSupport(
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
+ Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
- getDispatcher()->vkGetDescriptorSetLayoutSupport(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+ getDispatcher()->vkGetDescriptorSetLayoutSupport( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
+ reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+
return support;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupport(
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support =
- structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
- getDispatcher()->vkGetDescriptorSetLayoutSupport(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
+ getDispatcher()->vkGetDescriptorSetLayoutSupport( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
+ reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+
return structureChain;
}
@@ -12024,7 +12360,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
- uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+ uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdDrawIndirectCount( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
@@ -12040,7 +12376,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
- uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+ uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdDrawIndexedIndirectCount( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
@@ -12051,142 +12387,122 @@ namespace VULKAN_HPP_NAMESPACE
stride );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass Device::createRenderPass2(
- VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass
+ Device::createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, createInfo, allocator );
}
- VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2(
- const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
- const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
+ const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdBeginRenderPass2( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ),
reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2(
- const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
- const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
+ const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdNextSubpass2( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ),
reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2(
- const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdEndRenderPass2( static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+ getDispatcher()->vkCmdEndRenderPass2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
}
VULKAN_HPP_INLINE void QueryPool::reset( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkResetQueryPool(
- static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( m_queryPool ), firstQuery, queryCount );
+ getDispatcher()->vkResetQueryPool( static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( m_queryPool ), firstQuery, queryCount );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Semaphore::getCounterValue() const
{
- uint64_t value;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetSemaphoreCounterValue(
- static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( m_semaphore ), &value ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValue" );
- }
+ uint64_t value;
+ VkResult result = getDispatcher()->vkGetSemaphoreCounterValue( static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( m_semaphore ), &value );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValue" );
+
return value;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
- Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo,
+ uint64_t timeout ) const
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkWaitSemaphores(
- static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) );
- if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eTimeout ) )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores" );
- }
- return result;
+ VkResult result =
+ getDispatcher()->vkWaitSemaphores( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
VULKAN_HPP_INLINE void Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const
{
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkSignalSemaphore(
- static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" );
- }
+ VkResult result = getDispatcher()->vkSignalSemaphore( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress
- Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
+ Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( getDispatcher()->vkGetBufferDeviceAddress(
- static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ) );
+ VkDeviceAddress result =
+ getDispatcher()->vkGetBufferDeviceAddress( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress(
- const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t
+ Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
{
- return getDispatcher()->vkGetBufferOpaqueCaptureAddress(
- static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+ uint64_t result =
+ getDispatcher()->vkGetBufferOpaqueCaptureAddress( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+
+ return result;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress(
- const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t
+ Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
{
- return getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddress(
- static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
+ uint64_t result = getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddress( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
+
+ return result;
}
//=== VK_VERSION_1_3 ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>
- PhysicalDevice::getToolProperties() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> PhysicalDevice::getToolProperties() const
{
std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> toolProperties;
uint32_t toolCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceToolProperties(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), &toolCount, nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount )
+ result = getDispatcher()->vkGetPhysicalDeviceToolProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), &toolCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && toolCount )
{
toolProperties.resize( toolCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceToolProperties(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- &toolCount,
- reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) );
+ result = getDispatcher()->vkGetPhysicalDeviceToolProperties(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
+ VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+ if ( toolCount < toolProperties.size() )
{
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
- if ( toolCount < toolProperties.size() )
- {
- toolProperties.resize( toolCount );
- }
+ toolProperties.resize( toolCount );
}
return toolProperties;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot Device::createPrivateDataSlot(
- VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot
+ Device::createPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot( *this, createInfo, allocator );
}
@@ -12196,64 +12512,44 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
uint64_t data ) const
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkSetPrivateData( static_cast<VkDevice>( m_device ),
- static_cast<VkObjectType>( objectType_ ),
- objectHandle,
- static_cast<VkPrivateDataSlot>( privateDataSlot ),
- data ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" );
- }
+ VkResult result = getDispatcher()->vkSetPrivateData(
+ static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t
- Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT
{
uint64_t data;
- getDispatcher()->vkGetPrivateData( static_cast<VkDevice>( m_device ),
- static_cast<VkObjectType>( objectType_ ),
- objectHandle,
- static_cast<VkPrivateDataSlot>( privateDataSlot ),
- &data );
+ getDispatcher()->vkGetPrivateData(
+ static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
+
return data;
}
- VULKAN_HPP_INLINE void
- CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event,
- const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event,
+ const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetEvent2( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkEvent>( event ),
- reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
+ getDispatcher()->vkCmdSetEvent2(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdResetEvent2( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkEvent>( event ),
- static_cast<VkPipelineStageFlags2>( stageMask ) );
+ getDispatcher()->vkCmdResetEvent2(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
}
VULKAN_HPP_INLINE void
- CommandBuffer::waitEvents2( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ CommandBuffer::waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos ) const
{
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
-# else
if ( events.size() != dependencyInfos.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" );
}
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
getDispatcher()->vkCmdWaitEvents2( static_cast<VkCommandBuffer>( m_commandBuffer ),
events.size(),
@@ -12261,86 +12557,65 @@ namespace VULKAN_HPP_NAMESPACE
reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2(
- const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdPipelineBarrier2( static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
+ getDispatcher()->vkCmdPipelineBarrier2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query ) const VULKAN_HPP_NOEXCEPT
+ uint32_t query ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdWriteTimestamp2( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkPipelineStageFlags2>( stage ),
- static_cast<VkQueryPool>( queryPool ),
- query );
+ getDispatcher()->vkCmdWriteTimestamp2(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
}
- VULKAN_HPP_INLINE void Queue::submit2( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
- VULKAN_HPP_NAMESPACE::Fence fence ) const
+ VULKAN_HPP_INLINE void Queue::submit2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
+ VULKAN_HPP_NAMESPACE::Fence fence ) const
{
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkQueueSubmit2( static_cast<VkQueue>( m_queue ),
- submits.size(),
- reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ),
- static_cast<VkFence>( fence ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" );
- }
+ VkResult result = getDispatcher()->vkQueueSubmit2(
+ static_cast<VkQueue>( m_queue ), submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" );
}
- VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2(
- const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdCopyBuffer2( static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
+ getDispatcher()->vkCmdCopyBuffer2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdCopyImage2( static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
+ getDispatcher()->vkCmdCopyImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2(
- const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdCopyBufferToImage2(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
+ getDispatcher()->vkCmdCopyBufferToImage2( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2(
- const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdCopyImageToBuffer2(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
+ getDispatcher()->vkCmdCopyImageToBuffer2( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdBlitImage2( static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
+ getDispatcher()->vkCmdBlitImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::resolveImage2(
- const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdResolveImage2( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::beginRendering(
- const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdBeginRendering( static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
+ getDispatcher()->vkCmdBeginRendering( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::endRendering() const VULKAN_HPP_NOEXCEPT
@@ -12348,71 +12623,53 @@ namespace VULKAN_HPP_NAMESPACE
getDispatcher()->vkCmdEndRendering( static_cast<VkCommandBuffer>( m_commandBuffer ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetCullMode( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkCullModeFlags>( cullMode ) );
+ getDispatcher()->vkCmdSetCullMode( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetFrontFace( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkFrontFace>( frontFace ) );
+ getDispatcher()->vkCmdSetFrontFace( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology(
- VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetPrimitiveTopology( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkPrimitiveTopology>( primitiveTopology ) );
+ getDispatcher()->vkCmdSetPrimitiveTopology( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetViewportWithCount( static_cast<VkCommandBuffer>( m_commandBuffer ),
- viewports.size(),
- reinterpret_cast<const VkViewport *>( viewports.data() ) );
+ getDispatcher()->vkCmdSetViewportWithCount(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetScissorWithCount( static_cast<VkCommandBuffer>( m_commandBuffer ),
- scissors.size(),
- reinterpret_cast<const VkRect2D *>( scissors.data() ) );
+ getDispatcher()->vkCmdSetScissorWithCount(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2(
- uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides ) const
{
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
- VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
- VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
-# else
if ( buffers.size() != offsets.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" );
}
if ( !sizes.empty() && buffers.size() != sizes.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" );
}
if ( !strides.empty() && buffers.size() != strides.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" );
}
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
getDispatcher()->vkCmdBindVertexBuffers2( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstBinding,
@@ -12423,47 +12680,36 @@ namespace VULKAN_HPP_NAMESPACE
reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetDepthTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkBool32>( depthTestEnable ) );
+ getDispatcher()->vkCmdSetDepthTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetDepthWriteEnable( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkBool32>( depthWriteEnable ) );
+ getDispatcher()->vkCmdSetDepthWriteEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetDepthCompareOp( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkCompareOp>( depthCompareOp ) );
+ getDispatcher()->vkCmdSetDepthCompareOp( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable(
- VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetDepthBoundsTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkBool32>( depthBoundsTestEnable ) );
+ getDispatcher()->vkCmdSetDepthBoundsTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetStencilTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkBool32>( stencilTestEnable ) );
+ getDispatcher()->vkCmdSetStencilTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
- VULKAN_HPP_NAMESPACE::StencilOp failOp,
- VULKAN_HPP_NAMESPACE::StencilOp passOp,
- VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
- VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
+ VULKAN_HPP_NAMESPACE::StencilOp failOp,
+ VULKAN_HPP_NAMESPACE::StencilOp passOp,
+ VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
+ VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
{
getDispatcher()->vkCmdSetStencilOp( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkStencilFaceFlags>( faceMask ),
@@ -12473,548 +12719,424 @@ namespace VULKAN_HPP_NAMESPACE
static_cast<VkCompareOp>( compareOp ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable(
- VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetRasterizerDiscardEnable( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkBool32>( rasterizerDiscardEnable ) );
+ getDispatcher()->vkCmdSetRasterizerDiscardEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetDepthBiasEnable( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkBool32>( depthBiasEnable ) );
+ getDispatcher()->vkCmdSetDepthBiasEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable(
- VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
{
- getDispatcher()->vkCmdSetPrimitiveRestartEnable( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkBool32>( primitiveRestartEnable ) );
+ getDispatcher()->vkCmdSetPrimitiveRestartEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const
- VULKAN_HPP_NOEXCEPT
+ Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- getDispatcher()->vkGetDeviceBufferMemoryRequirements(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ getDispatcher()->vkGetDeviceBufferMemoryRequirements( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return memoryRequirements;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements(
- const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- getDispatcher()->vkGetDeviceBufferMemoryRequirements(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ getDispatcher()->vkGetDeviceBufferMemoryRequirements( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return structureChain;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements(
- const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
+ Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- getDispatcher()->vkGetDeviceImageMemoryRequirements(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ getDispatcher()->vkGetDeviceImageMemoryRequirements( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return memoryRequirements;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements(
- const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- getDispatcher()->vkGetDeviceImageMemoryRequirements(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ getDispatcher()->vkGetDeviceImageMemoryRequirements( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
- Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const
- VULKAN_HPP_NOEXCEPT
+ Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const
{
- uint32_t sparseMemoryRequirementCount;
- getDispatcher()->vkGetDeviceImageSparseMemoryRequirements(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
- &sparseMemoryRequirementCount,
- nullptr );
- std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> sparseMemoryRequirements(
- sparseMemoryRequirementCount );
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> sparseMemoryRequirements;
+ uint32_t sparseMemoryRequirementCount;
getDispatcher()->vkGetDeviceImageSparseMemoryRequirements(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
- &sparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
- VULKAN_HPP_ASSERT( sparseMemoryRequirementCount == sparseMemoryRequirements.size() );
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ getDispatcher()->vkGetDeviceImageSparseMemoryRequirements( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
+ &sparseMemoryRequirementCount,
+ reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+
+ VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+ if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+ {
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ }
return sparseMemoryRequirements;
}
//=== VK_KHR_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
- PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceSupportKHR &&
"Function <vkGetPhysicalDeviceSurfaceSupportKHR> needs extension <VK_KHR_surface> enabled!" );
VULKAN_HPP_NAMESPACE::Bool32 supported;
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetPhysicalDeviceSurfaceSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
- queueFamilyIndex,
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<VkBool32 *>( &supported ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" );
- }
+ VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceSupportKHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( &supported ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" );
+
return supported;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR
- PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
+ PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR &&
- "Function <vkGetPhysicalDeviceSurfaceCapabilitiesKHR> needs extension <VK_KHR_surface> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR &&
+ "Function <vkGetPhysicalDeviceSurfaceCapabilitiesKHR> needs extension <VK_KHR_surface> enabled!" );
VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" );
- }
+ VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ static_cast<VkSurfaceKHR>( surface ),
+ reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" );
+
return surfaceCapabilities;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>
- PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
+ PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR &&
"Function <vkGetPhysicalDeviceSurfaceFormatsKHR> needs extension <VK_KHR_surface> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR> surfaceFormats;
uint32_t surfaceFormatCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkSurfaceKHR>( surface ),
- &surfaceFormatCount,
- nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount )
+ result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
{
surfaceFormats.resize( surfaceFormatCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkSurfaceKHR>( surface ),
- &surfaceFormatCount,
- reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) );
+ result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ static_cast<VkSurfaceKHR>( surface ),
+ &surfaceFormatCount,
+ reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
+ VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+ if ( surfaceFormatCount < surfaceFormats.size() )
{
- VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
- if ( surfaceFormatCount < surfaceFormats.size() )
- {
- surfaceFormats.resize( surfaceFormatCount );
- }
+ surfaceFormats.resize( surfaceFormatCount );
}
return surfaceFormats;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR>
- PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
+ PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR &&
- "Function <vkGetPhysicalDeviceSurfacePresentModesKHR> needs extension <VK_KHR_surface> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR &&
+ "Function <vkGetPhysicalDeviceSurfacePresentModesKHR> needs extension <VK_KHR_surface> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR> presentModes;
uint32_t presentModeCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkSurfaceKHR>( surface ),
- &presentModeCount,
- nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount )
+ result = getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && presentModeCount )
{
presentModes.resize( presentModeCount );
- result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkSurfaceKHR>( surface ),
- &presentModeCount,
- reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
+ result = getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ static_cast<VkSurfaceKHR>( surface ),
+ &presentModeCount,
+ reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
+ VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+ if ( presentModeCount < presentModes.size() )
{
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
- if ( presentModeCount < presentModes.size() )
- {
- presentModes.resize( presentModeCount );
- }
+ presentModes.resize( presentModeCount );
}
return presentModes;
}
//=== VK_KHR_swapchain ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR Device::createSwapchainKHR(
- VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR
+ Device::createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR( *this, createInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VkImage> SwapchainKHR::getImages() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::Image> SwapchainKHR::getImages() const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainImagesKHR &&
- "Function <vkGetSwapchainImagesKHR> needs extension <VK_KHR_swapchain> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainImagesKHR && "Function <vkGetSwapchainImagesKHR> needs extension <VK_KHR_swapchain> enabled!" );
- std::vector<VkImage> swapchainImages;
- uint32_t swapchainImageCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ std::vector<VULKAN_HPP_NAMESPACE::Image> swapchainImages;
+ uint32_t swapchainImageCount;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetSwapchainImagesKHR( static_cast<VkDevice>( m_device ),
- static_cast<VkSwapchainKHR>( m_swapchain ),
- &swapchainImageCount,
- nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && swapchainImageCount )
+ result = getDispatcher()->vkGetSwapchainImagesKHR(
+ static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), &swapchainImageCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && swapchainImageCount )
{
swapchainImages.resize( swapchainImageCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetSwapchainImagesKHR( static_cast<VkDevice>( m_device ),
- static_cast<VkSwapchainKHR>( m_swapchain ),
- &swapchainImageCount,
- swapchainImages.data() ) );
+ result = getDispatcher()->vkGetSwapchainImagesKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkSwapchainKHR>( m_swapchain ),
+ &swapchainImageCount,
+ reinterpret_cast<VkImage *>( swapchainImages.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getImages" );
+ VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
+ if ( swapchainImageCount < swapchainImages.size() )
{
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getImages" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
- if ( swapchainImageCount < swapchainImages.size() )
- {
- swapchainImages.resize( swapchainImageCount );
- }
+ swapchainImages.resize( swapchainImageCount );
}
return swapchainImages;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, uint32_t>
- SwapchainKHR::acquireNextImage( uint64_t timeout,
- VULKAN_HPP_NAMESPACE::Semaphore semaphore,
- VULKAN_HPP_NAMESPACE::Fence fence ) const
+ SwapchainKHR::acquireNextImage( uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireNextImageKHR &&
- "Function <vkAcquireNextImageKHR> needs extension <VK_KHR_swapchain> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireNextImageKHR && "Function <vkAcquireNextImageKHR> needs extension <VK_KHR_swapchain> enabled!" );
- uint32_t imageIndex;
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkAcquireNextImageKHR( static_cast<VkDevice>( m_device ),
- static_cast<VkSwapchainKHR>( m_swapchain ),
- timeout,
- static_cast<VkSemaphore>( semaphore ),
- static_cast<VkFence>( fence ),
- &imageIndex ) );
- if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eTimeout ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eNotReady ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR ) )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireNextImage" );
- }
- return std::make_pair( result, imageIndex );
+ uint32_t imageIndex;
+ VkResult result = getDispatcher()->vkAcquireNextImageKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkSwapchainKHR>( m_swapchain ),
+ timeout,
+ static_cast<VkSemaphore>( semaphore ),
+ static_cast<VkFence>( fence ),
+ &imageIndex );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireNextImage",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eTimeout,
+ VULKAN_HPP_NAMESPACE::Result::eNotReady,
+ VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+
+ return std::make_pair( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
- Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkQueuePresentKHR &&
- "Function <vkQueuePresentKHR> needs extension <VK_KHR_swapchain> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkQueuePresentKHR && "Function <vkQueuePresentKHR> needs extension <VK_KHR_swapchain> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkQueuePresentKHR(
- static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) ) );
- if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR ) )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR" );
- }
- return result;
+ VkResult result = getDispatcher()->vkQueuePresentKHR( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR
- Device::getGroupPresentCapabilitiesKHR() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR Device::getGroupPresentCapabilitiesKHR() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR &&
- "Function <vkGetDeviceGroupPresentCapabilitiesKHR> needs extension <VK_KHR_swapchain> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR &&
+ "Function <vkGetDeviceGroupPresentCapabilitiesKHR> needs extension <VK_KHR_swapchain> enabled!" );
VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" );
- }
+ VkResult result = getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" );
+
return deviceGroupPresentCapabilities;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR
- Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
+ Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR &&
- "Function <vkGetDeviceGroupSurfacePresentModesKHR> needs extension <VK_KHR_swapchain> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR &&
+ "Function <vkGetDeviceGroupSurfacePresentModesKHR> needs extension <VK_KHR_swapchain> enabled!" );
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR(
- static_cast<VkDevice>( m_device ),
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" );
- }
+ VkResult result = getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR(
+ static_cast<VkDevice>( m_device ), static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" );
+
return modes;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::Rect2D>
- PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
+ PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR &&
- "Function <vkGetPhysicalDevicePresentRectanglesKHR> needs extension <VK_KHR_swapchain> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR &&
+ "Function <vkGetPhysicalDevicePresentRectanglesKHR> needs extension <VK_KHR_swapchain> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::Rect2D> rects;
uint32_t rectCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkSurfaceKHR>( surface ),
- &rectCount,
- nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && rectCount )
+ result = getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && rectCount )
{
rects.resize( rectCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkSurfaceKHR>( surface ),
- &rectCount,
- reinterpret_cast<VkRect2D *>( rects.data() ) ) );
+ result = getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
+ VULKAN_HPP_ASSERT( rectCount <= rects.size() );
+ if ( rectCount < rects.size() )
{
- VULKAN_HPP_ASSERT( rectCount <= rects.size() );
- if ( rectCount < rects.size() )
- {
- rects.resize( rectCount );
- }
+ rects.resize( rectCount );
}
return rects;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, uint32_t>
- Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo ) const
+ Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireNextImage2KHR &&
- "Function <vkAcquireNextImage2KHR> needs extension <VK_KHR_swapchain> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireNextImage2KHR && "Function <vkAcquireNextImage2KHR> needs extension <VK_KHR_swapchain> enabled!" );
- uint32_t imageIndex;
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkAcquireNextImage2KHR( static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ),
- &imageIndex ) );
- if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eTimeout ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eNotReady ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR ) )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR" );
- }
- return std::make_pair( result, imageIndex );
+ uint32_t imageIndex;
+ VkResult result = getDispatcher()->vkAcquireNextImage2KHR(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eTimeout,
+ VULKAN_HPP_NAMESPACE::Result::eNotReady,
+ VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+
+ return std::make_pair( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex );
}
//=== VK_KHR_display ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>
- PhysicalDevice::getDisplayPropertiesKHR() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR> PhysicalDevice::getDisplayPropertiesKHR() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR &&
- "Function <vkGetPhysicalDeviceDisplayPropertiesKHR> needs extension <VK_KHR_display> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR &&
+ "Function <vkGetPhysicalDeviceDisplayPropertiesKHR> needs extension <VK_KHR_display> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR> properties;
uint32_t propertyCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
+ result = getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- &propertyCount,
- reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) );
+ result = getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
return properties;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>
- PhysicalDevice::getDisplayPlanePropertiesKHR() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR> PhysicalDevice::getDisplayPlanePropertiesKHR() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR &&
- "Function <vkGetPhysicalDeviceDisplayPlanePropertiesKHR> needs extension <VK_KHR_display> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR &&
+ "Function <vkGetPhysicalDeviceDisplayPlanePropertiesKHR> needs extension <VK_KHR_display> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR> properties;
uint32_t propertyCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
+ result = getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- &propertyCount,
- reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) );
+ result = getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
return properties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::DisplayKHR>
- PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const
+ PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const
{
return VULKAN_HPP_RAII_NAMESPACE::DisplayKHRs( *this, planeIndex );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>
- DisplayKHR::getModeProperties() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR> DisplayKHR::getModeProperties() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayModePropertiesKHR &&
"Function <vkGetDisplayModePropertiesKHR> needs extension <VK_KHR_display> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR> properties;
uint32_t propertyCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetDisplayModePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkDisplayKHR>( m_display ),
- &propertyCount,
- nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
+ result = getDispatcher()->vkGetDisplayModePropertiesKHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( m_display ), &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetDisplayModePropertiesKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkDisplayKHR>( m_display ),
- &propertyCount,
- reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) );
+ result = getDispatcher()->vkGetDisplayModePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ static_cast<VkDisplayKHR>( m_display ),
+ &propertyCount,
+ reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
return properties;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR DisplayKHR::createMode(
- VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR
+ DisplayKHR::createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR( *this, createInfo, allocator );
}
@@ -13026,22 +13148,18 @@ namespace VULKAN_HPP_NAMESPACE
"Function <vkGetDisplayPlaneCapabilitiesKHR> needs extension <VK_KHR_display> enabled!" );
VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetDisplayPlaneCapabilitiesKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkDisplayModeKHR>( m_displayModeKHR ),
- planeIndex,
- reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayModeKHR::getDisplayPlaneCapabilities" );
- }
+ VkResult result = getDispatcher()->vkGetDisplayPlaneCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ static_cast<VkDisplayModeKHR>( m_displayModeKHR ),
+ planeIndex,
+ reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::DisplayModeKHR::getDisplayPlaneCapabilities" );
+
return capabilities;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createDisplayPlaneSurfaceKHR(
- VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ Instance::createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
}
@@ -13049,16 +13167,15 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_display_swapchain ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR>
- Device::createSharedSwapchainsKHR(
- VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::SwapchainKHRs( *this, createInfos, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR Device::createSharedSwapchainKHR(
- VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR
+ Device::createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR( *this, createInfo, allocator );
}
@@ -13066,31 +13183,32 @@ namespace VULKAN_HPP_NAMESPACE
# if defined( VK_USE_PLATFORM_XLIB_KHR )
//=== VK_KHR_xlib_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createXlibSurfaceKHR(
- VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ Instance::createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXlibPresentationSupportKHR(
- uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
+ PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceXlibPresentationSupportKHR &&
- "Function <vkGetPhysicalDeviceXlibPresentationSupportKHR> needs extension <VK_KHR_xlib_surface> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceXlibPresentationSupportKHR &&
+ "Function <vkGetPhysicalDeviceXlibPresentationSupportKHR> needs extension <VK_KHR_xlib_surface> enabled!" );
- return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( getDispatcher()->vkGetPhysicalDeviceXlibPresentationSupportKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &dpy, visualID ) );
+ VkBool32 result =
+ getDispatcher()->vkGetPhysicalDeviceXlibPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &dpy, visualID );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
}
# endif /*VK_USE_PLATFORM_XLIB_KHR*/
# if defined( VK_USE_PLATFORM_XCB_KHR )
//=== VK_KHR_xcb_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createXcbSurfaceKHR(
- VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ Instance::createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
}
@@ -13098,45 +13216,45 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR(
uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceXcbPresentationSupportKHR &&
- "Function <vkGetPhysicalDeviceXcbPresentationSupportKHR> needs extension <VK_KHR_xcb_surface> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceXcbPresentationSupportKHR &&
+ "Function <vkGetPhysicalDeviceXcbPresentationSupportKHR> needs extension <VK_KHR_xcb_surface> enabled!" );
- return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( getDispatcher()->vkGetPhysicalDeviceXcbPresentationSupportKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &connection, visual_id ) );
+ VkBool32 result = getDispatcher()->vkGetPhysicalDeviceXcbPresentationSupportKHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &connection, visual_id );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
}
# endif /*VK_USE_PLATFORM_XCB_KHR*/
# if defined( VK_USE_PLATFORM_WAYLAND_KHR )
//=== VK_KHR_wayland_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createWaylandSurfaceKHR(
- VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ Instance::createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
- PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex,
- struct wl_display & display ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceWaylandPresentationSupportKHR &&
- "Function <vkGetPhysicalDeviceWaylandPresentationSupportKHR> needs extension <VK_KHR_wayland_surface> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceWaylandPresentationSupportKHR &&
+ "Function <vkGetPhysicalDeviceWaylandPresentationSupportKHR> needs extension <VK_KHR_wayland_surface> enabled!" );
- return static_cast<VULKAN_HPP_NAMESPACE::Bool32>(
- getDispatcher()->vkGetPhysicalDeviceWaylandPresentationSupportKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &display ) );
+ VkBool32 result =
+ getDispatcher()->vkGetPhysicalDeviceWaylandPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &display );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
}
# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
# if defined( VK_USE_PLATFORM_ANDROID_KHR )
//=== VK_KHR_android_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createAndroidSurfaceKHR(
- VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ Instance::createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
}
@@ -13145,31 +13263,30 @@ namespace VULKAN_HPP_NAMESPACE
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_win32_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createWin32SurfaceKHR(
- VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ Instance::createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
- PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceWin32PresentationSupportKHR &&
- "Function <vkGetPhysicalDeviceWin32PresentationSupportKHR> needs extension <VK_KHR_win32_surface> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceWin32PresentationSupportKHR &&
+ "Function <vkGetPhysicalDeviceWin32PresentationSupportKHR> needs extension <VK_KHR_win32_surface> enabled!" );
+
+ VkBool32 result = getDispatcher()->vkGetPhysicalDeviceWin32PresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex );
- return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( getDispatcher()->vkGetPhysicalDeviceWin32PresentationSupportKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex ) );
+ return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
}
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_EXT_debug_report ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT
- Instance::createDebugReportCallbackEXT(
- VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ Instance::createDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT( *this, createInfo, allocator );
}
@@ -13180,10 +13297,9 @@ namespace VULKAN_HPP_NAMESPACE
size_t location,
int32_t messageCode,
const std::string & layerPrefix,
- const std::string & message ) const VULKAN_HPP_NOEXCEPT
+ const std::string & message ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkDebugReportMessageEXT &&
- "Function <vkDebugReportMessageEXT> needs extension <VK_EXT_debug_report> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkDebugReportMessageEXT && "Function <vkDebugReportMessageEXT> needs extension <VK_EXT_debug_report> enabled!" );
getDispatcher()->vkDebugReportMessageEXT( static_cast<VkInstance>( m_instance ),
static_cast<VkDebugReportFlagsEXT>( flags ),
@@ -13197,41 +13313,29 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_debug_marker ===
- VULKAN_HPP_INLINE void
- Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo ) const
+ VULKAN_HPP_INLINE void Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkDebugMarkerSetObjectTagEXT &&
"Function <vkDebugMarkerSetObjectTagEXT> needs extension <VK_EXT_debug_marker> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkDebugMarkerSetObjectTagEXT(
- static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" );
- }
+ VkResult result =
+ getDispatcher()->vkDebugMarkerSetObjectTagEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" );
}
- VULKAN_HPP_INLINE void
- Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo ) const
+ VULKAN_HPP_INLINE void Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkDebugMarkerSetObjectNameEXT &&
"Function <vkDebugMarkerSetObjectNameEXT> needs extension <VK_EXT_debug_marker> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkDebugMarkerSetObjectNameEXT(
- static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" );
- }
+ VkResult result = getDispatcher()->vkDebugMarkerSetObjectNameEXT( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" );
}
- VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT(
- const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerBeginEXT &&
- "Function <vkCmdDebugMarkerBeginEXT> needs extension <VK_EXT_debug_marker> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerBeginEXT && "Function <vkCmdDebugMarkerBeginEXT> needs extension <VK_EXT_debug_marker> enabled!" );
getDispatcher()->vkCmdDebugMarkerBeginEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
@@ -13239,17 +13343,14 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT() const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerEndEXT &&
- "Function <vkCmdDebugMarkerEndEXT> needs extension <VK_EXT_debug_marker> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerEndEXT && "Function <vkCmdDebugMarkerEndEXT> needs extension <VK_EXT_debug_marker> enabled!" );
getDispatcher()->vkCmdDebugMarkerEndEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT(
- const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerInsertEXT &&
- "Function <vkCmdDebugMarkerInsertEXT> needs extension <VK_EXT_debug_marker> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerInsertEXT && "Function <vkCmdDebugMarkerInsertEXT> needs extension <VK_EXT_debug_marker> enabled!" );
getDispatcher()->vkCmdDebugMarkerInsertEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
@@ -13259,254 +13360,196 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_video_queue ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR
- PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR & videoProfile ) const
+ PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR &&
- "Function <vkGetPhysicalDeviceVideoCapabilitiesKHR> needs extension <VK_KHR_video_queue> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR &&
+ "Function <vkGetPhysicalDeviceVideoCapabilitiesKHR> needs extension <VK_KHR_video_queue> enabled!" );
VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkVideoProfileKHR *>( &videoProfile ),
- reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
- }
+ VkResult result = getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ),
+ reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
+
return capabilities;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR & videoProfile ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const
{
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR &&
+ "Function <vkGetPhysicalDeviceVideoCapabilitiesKHR> needs extension <VK_KHR_video_queue> enabled!" );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities =
- structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>();
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkVideoProfileKHR *>( &videoProfile ),
- reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
- }
+ VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>();
+ VkResult result = getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ),
+ reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
+
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>
- PhysicalDevice::getVideoFormatPropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const
+ PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR &&
- "Function <vkGetPhysicalDeviceVideoFormatPropertiesKHR> needs extension <VK_KHR_video_queue> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR &&
+ "Function <vkGetPhysicalDeviceVideoFormatPropertiesKHR> needs extension <VK_KHR_video_queue> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR> videoFormatProperties;
uint32_t videoFormatPropertyCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
- &videoFormatPropertyCount,
- nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount )
+ result = getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
+ &videoFormatPropertyCount,
+ nullptr );
+ if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount )
{
videoFormatProperties.resize( videoFormatPropertyCount );
result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
- &videoFormatPropertyCount,
- reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) );
+ getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
+ &videoFormatPropertyCount,
+ reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
+ VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
+ if ( videoFormatPropertyCount < videoFormatProperties.size() )
{
- VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
- if ( videoFormatPropertyCount < videoFormatProperties.size() )
- {
- videoFormatProperties.resize( videoFormatPropertyCount );
- }
+ videoFormatProperties.resize( videoFormatPropertyCount );
}
return videoFormatProperties;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR Device::createVideoSessionKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR
+ Device::createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR( *this, createInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR>
- VideoSessionKHR::getMemoryRequirements() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR> VideoSessionKHR::getMemoryRequirements() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR &&
- "Function <vkGetVideoSessionMemoryRequirementsKHR> needs extension <VK_KHR_video_queue> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR &&
+ "Function <vkGetVideoSessionMemoryRequirementsKHR> needs extension <VK_KHR_video_queue> enabled!" );
- std::vector<VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR> videoSessionMemoryRequirements;
- uint32_t videoSessionMemoryRequirementsCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR> memoryRequirements;
+ uint32_t memoryRequirementsCount;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
- static_cast<VkVideoSessionKHR>( m_videoSession ),
- &videoSessionMemoryRequirementsCount,
- nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoSessionMemoryRequirementsCount )
+ result = getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR(
+ static_cast<VkDevice>( m_device ), static_cast<VkVideoSessionKHR>( m_videoSession ), &memoryRequirementsCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && memoryRequirementsCount )
{
- videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR(
- static_cast<VkDevice>( m_device ),
- static_cast<VkVideoSessionKHR>( m_videoSession ),
- &videoSessionMemoryRequirementsCount,
- reinterpret_cast<VkVideoGetMemoryPropertiesKHR *>( videoSessionMemoryRequirements.data() ) ) );
+ memoryRequirements.resize( memoryRequirementsCount );
+ result =
+ getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkVideoSessionKHR>( m_videoSession ),
+ &memoryRequirementsCount,
+ reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::VideoSessionKHR::getMemoryRequirements" );
+ VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() );
+ if ( memoryRequirementsCount < memoryRequirements.size() )
{
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::VideoSessionKHR::getMemoryRequirements" );
+ memoryRequirements.resize( memoryRequirementsCount );
}
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- VULKAN_HPP_ASSERT( videoSessionMemoryRequirementsCount <= videoSessionMemoryRequirements.size() );
- if ( videoSessionMemoryRequirementsCount < videoSessionMemoryRequirements.size() )
- {
- videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount );
- }
- }
- return videoSessionMemoryRequirements;
+ return memoryRequirements;
}
VULKAN_HPP_INLINE void VideoSessionKHR::bindMemory(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR> const & videoSessionBindMemories ) const
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkBindVideoSessionMemoryKHR &&
"Function <vkBindVideoSessionMemoryKHR> needs extension <VK_KHR_video_queue> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkBindVideoSessionMemoryKHR(
- static_cast<VkDevice>( m_device ),
- static_cast<VkVideoSessionKHR>( m_videoSession ),
- videoSessionBindMemories.size(),
- reinterpret_cast<const VkVideoBindMemoryKHR *>( videoSessionBindMemories.data() ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::VideoSessionKHR::bindMemory" );
- }
+ VkResult result =
+ getDispatcher()->vkBindVideoSessionMemoryKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkVideoSessionKHR>( m_videoSession ),
+ bindSessionMemoryInfos.size(),
+ reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( bindSessionMemoryInfos.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::VideoSessionKHR::bindMemory" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR
- Device::createVideoSessionParametersKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ Device::createVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR( *this, createInfo, allocator );
}
- VULKAN_HPP_INLINE void VideoSessionParametersKHR::update(
- const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const
+ VULKAN_HPP_INLINE void VideoSessionParametersKHR::update( const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateVideoSessionParametersKHR &&
"Function <vkUpdateVideoSessionParametersKHR> needs extension <VK_KHR_video_queue> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkUpdateVideoSessionParametersKHR(
- static_cast<VkDevice>( m_device ),
- static_cast<VkVideoSessionParametersKHR>( m_videoSessionParameters ),
- reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::VideoSessionParametersKHR::update" );
- }
+ VkResult result = getDispatcher()->vkUpdateVideoSessionParametersKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkVideoSessionParametersKHR>( m_videoSessionParameters ),
+ reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::VideoSessionParametersKHR::update" );
}
- VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR(
- const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginVideoCodingKHR &&
- "Function <vkCmdBeginVideoCodingKHR> needs extension <VK_KHR_video_queue> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginVideoCodingKHR && "Function <vkCmdBeginVideoCodingKHR> needs extension <VK_KHR_video_queue> enabled!" );
getDispatcher()->vkCmdBeginVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR(
- const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndVideoCodingKHR &&
- "Function <vkCmdEndVideoCodingKHR> needs extension <VK_KHR_video_queue> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndVideoCodingKHR && "Function <vkCmdEndVideoCodingKHR> needs extension <VK_KHR_video_queue> enabled!" );
getDispatcher()->vkCmdEndVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR(
- const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdControlVideoCodingKHR &&
- "Function <vkCmdControlVideoCodingKHR> needs extension <VK_KHR_video_queue> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdControlVideoCodingKHR && "Function <vkCmdControlVideoCodingKHR> needs extension <VK_KHR_video_queue> enabled!" );
- getDispatcher()->vkCmdControlVideoCodingKHR(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) );
+ getDispatcher()->vkCmdControlVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) );
}
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_decode_queue ===
- VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR(
- const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & frameInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecodeVideoKHR &&
- "Function <vkCmdDecodeVideoKHR> needs extension <VK_KHR_video_decode_queue> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecodeVideoKHR && "Function <vkCmdDecodeVideoKHR> needs extension <VK_KHR_video_decode_queue> enabled!" );
- getDispatcher()->vkCmdDecodeVideoKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkVideoDecodeInfoKHR *>( &frameInfo ) );
+ getDispatcher()->vkCmdDecodeVideoKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoDecodeInfoKHR *>( &decodeInfo ) );
}
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
//=== VK_EXT_transform_feedback ===
- VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT(
- uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_INLINE void
+ CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdBindTransformFeedbackBuffersEXT &&
- "Function <vkCmdBindTransformFeedbackBuffersEXT> needs extension <VK_EXT_transform_feedback> enabled!" );
-
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
- VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
-# else
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindTransformFeedbackBuffersEXT &&
+ "Function <vkCmdBindTransformFeedbackBuffersEXT> needs extension <VK_EXT_transform_feedback> enabled!" );
if ( buffers.size() != offsets.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" );
}
if ( !sizes.empty() && buffers.size() != sizes.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" );
}
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
getDispatcher()->vkCmdBindTransformFeedbackBuffersEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstBinding,
@@ -13516,101 +13559,74 @@ namespace VULKAN_HPP_NAMESPACE
reinterpret_cast<const VkDeviceSize *>( sizes.data() ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT(
- uint32_t firstCounterBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_INLINE void
+ CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdBeginTransformFeedbackEXT &&
- "Function <vkCmdBeginTransformFeedbackEXT> needs extension <VK_EXT_transform_feedback> enabled!" );
-
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
-# else
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginTransformFeedbackEXT &&
+ "Function <vkCmdBeginTransformFeedbackEXT> needs extension <VK_EXT_transform_feedback> enabled!" );
if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
}
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
- getDispatcher()->vkCmdBeginTransformFeedbackEXT(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- firstCounterBuffer,
- counterBuffers.size(),
- reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
- reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
+ getDispatcher()->vkCmdBeginTransformFeedbackEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ firstCounterBuffer,
+ counterBuffers.size(),
+ reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
+ reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT(
- uint32_t firstCounterBuffer,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_INLINE void
+ CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdEndTransformFeedbackEXT &&
- "Function <vkCmdEndTransformFeedbackEXT> needs extension <VK_EXT_transform_feedback> enabled!" );
-
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
-# else
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndTransformFeedbackEXT &&
+ "Function <vkCmdEndTransformFeedbackEXT> needs extension <VK_EXT_transform_feedback> enabled!" );
if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
}
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
- getDispatcher()->vkCmdEndTransformFeedbackEXT(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- firstCounterBuffer,
- counterBuffers.size(),
- reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
- reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
+ getDispatcher()->vkCmdEndTransformFeedbackEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ firstCounterBuffer,
+ counterBuffers.size(),
+ reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
+ reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
uint32_t query,
VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
- uint32_t index ) const VULKAN_HPP_NOEXCEPT
+ uint32_t index ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginQueryIndexedEXT &&
"Function <vkCmdBeginQueryIndexedEXT> needs extension <VK_EXT_transform_feedback> enabled!" );
- getDispatcher()->vkCmdBeginQueryIndexedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkQueryPool>( queryPool ),
- query,
- static_cast<VkQueryControlFlags>( flags ),
- index );
+ getDispatcher()->vkCmdBeginQueryIndexedEXT(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
}
- VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query,
- uint32_t index ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndQueryIndexedEXT &&
"Function <vkCmdEndQueryIndexedEXT> needs extension <VK_EXT_transform_feedback> enabled!" );
- getDispatcher()->vkCmdEndQueryIndexedEXT(
- static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, index );
+ getDispatcher()->vkCmdEndQueryIndexedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, index );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount,
- uint32_t firstInstance,
- VULKAN_HPP_NAMESPACE::Buffer counterBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,
- uint32_t counterOffset,
- uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount,
+ uint32_t firstInstance,
+ VULKAN_HPP_NAMESPACE::Buffer counterBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,
+ uint32_t counterOffset,
+ uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdDrawIndirectByteCountEXT &&
- "Function <vkCmdDrawIndirectByteCountEXT> needs extension <VK_EXT_transform_feedback> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectByteCountEXT &&
+ "Function <vkCmdDrawIndirectByteCountEXT> needs extension <VK_EXT_transform_feedback> enabled!" );
getDispatcher()->vkCmdDrawIndirectByteCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
instanceCount,
@@ -13623,58 +13639,50 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NVX_binary_import ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX Device::createCuModuleNVX(
- VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX
+ Device::createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX( *this, createInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX Device::createCuFunctionNVX(
- VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX
+ Device::createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX( *this, createInfo, allocator );
}
- VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX(
- const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCuLaunchKernelNVX &&
- "Function <vkCmdCuLaunchKernelNVX> needs extension <VK_NVX_binary_import> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCuLaunchKernelNVX && "Function <vkCmdCuLaunchKernelNVX> needs extension <VK_NVX_binary_import> enabled!" );
- getDispatcher()->vkCmdCuLaunchKernelNVX( static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) );
+ getDispatcher()->vkCmdCuLaunchKernelNVX( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) );
}
//=== VK_NVX_image_view_handle ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX(
- const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t
+ Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewHandleNVX &&
- "Function <vkGetImageViewHandleNVX> needs extension <VK_NVX_image_view_handle> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewHandleNVX && "Function <vkGetImageViewHandleNVX> needs extension <VK_NVX_image_view_handle> enabled!" );
+
+ uint32_t result =
+ getDispatcher()->vkGetImageViewHandleNVX( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) );
- return getDispatcher()->vkGetImageViewHandleNVX( static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) );
+ return result;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX
- ImageView::getAddressNVX() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX ImageView::getAddressNVX() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewAddressNVX &&
"Function <vkGetImageViewAddressNVX> needs extension <VK_NVX_image_view_handle> enabled!" );
VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetImageViewAddressNVX(
- static_cast<VkDevice>( m_device ),
- static_cast<VkImageView>( m_imageView ),
- reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::ImageView::getAddressNVX" );
- }
+ VkResult result = getDispatcher()->vkGetImageViewAddressNVX(
+ static_cast<VkDevice>( m_device ), static_cast<VkImageView>( m_imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::ImageView::getAddressNVX" );
+
return properties;
}
@@ -13685,7 +13693,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
- uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+ uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCountAMD &&
"Function <vkCmdDrawIndirectCountAMD> needs extension <VK_AMD_draw_indirect_count> enabled!" );
@@ -13699,17 +13707,15 @@ namespace VULKAN_HPP_NAMESPACE
stride );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::Buffer countBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdDrawIndexedIndirectCountAMD &&
- "Function <vkCmdDrawIndexedIndirectCountAMD> needs extension <VK_AMD_draw_indirect_count> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexedIndirectCountAMD &&
+ "Function <vkCmdDrawIndexedIndirectCountAMD> needs extension <VK_AMD_draw_indirect_count> enabled!" );
getDispatcher()->vkCmdDrawIndexedIndirectCountAMD( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
@@ -13722,68 +13728,54 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_AMD_shader_info ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<uint8_t>
- Pipeline::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
- VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<uint8_t> Pipeline::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
+ VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderInfoAMD &&
- "Function <vkGetShaderInfoAMD> needs extension <VK_AMD_shader_info> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderInfoAMD && "Function <vkGetShaderInfoAMD> needs extension <VK_AMD_shader_info> enabled!" );
- std::vector<uint8_t> info;
- size_t infoSize;
- VULKAN_HPP_NAMESPACE::Result result;
+ std::vector<uint8_t> info;
+ size_t infoSize;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetShaderInfoAMD( static_cast<VkDevice>( m_device ),
- static_cast<VkPipeline>( m_pipeline ),
- static_cast<VkShaderStageFlagBits>( shaderStage ),
- static_cast<VkShaderInfoTypeAMD>( infoType ),
- &infoSize,
- nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && infoSize )
+ result = getDispatcher()->vkGetShaderInfoAMD( static_cast<VkDevice>( m_device ),
+ static_cast<VkPipeline>( m_pipeline ),
+ static_cast<VkShaderStageFlagBits>( shaderStage ),
+ static_cast<VkShaderInfoTypeAMD>( infoType ),
+ &infoSize,
+ nullptr );
+ if ( ( result == VK_SUCCESS ) && infoSize )
{
info.resize( infoSize );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetShaderInfoAMD( static_cast<VkDevice>( m_device ),
- static_cast<VkPipeline>( m_pipeline ),
- static_cast<VkShaderStageFlagBits>( shaderStage ),
- static_cast<VkShaderInfoTypeAMD>( infoType ),
- &infoSize,
- reinterpret_cast<void *>( info.data() ) ) );
+ result = getDispatcher()->vkGetShaderInfoAMD( static_cast<VkDevice>( m_device ),
+ static_cast<VkPipeline>( m_pipeline ),
+ static_cast<VkShaderStageFlagBits>( shaderStage ),
+ static_cast<VkShaderInfoTypeAMD>( infoType ),
+ &infoSize,
+ reinterpret_cast<void *>( info.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getShaderInfoAMD" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getShaderInfoAMD" );
+ VULKAN_HPP_ASSERT( infoSize <= info.size() );
+ if ( infoSize < info.size() )
{
- VULKAN_HPP_ASSERT( infoSize <= info.size() );
- if ( infoSize < info.size() )
- {
- info.resize( infoSize );
- }
+ info.resize( infoSize );
}
return info;
}
//=== VK_KHR_dynamic_rendering ===
- VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR(
- const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderingKHR &&
- "Function <vkCmdBeginRenderingKHR> needs extension <VK_KHR_dynamic_rendering> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderingKHR && "Function <vkCmdBeginRenderingKHR> needs extension <VK_KHR_dynamic_rendering> enabled!" );
- getDispatcher()->vkCmdBeginRenderingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
+ getDispatcher()->vkCmdBeginRenderingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
}
VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR() const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderingKHR &&
- "Function <vkCmdEndRenderingKHR> needs extension <VK_KHR_dynamic_rendering> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderingKHR && "Function <vkCmdEndRenderingKHR> needs extension <VK_KHR_dynamic_rendering> enabled!" );
getDispatcher()->vkCmdEndRenderingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ) );
}
@@ -13792,9 +13784,8 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_GGP_stream_descriptor_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
- Instance::createStreamDescriptorSurfaceGGP(
- VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ Instance::createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
}
@@ -13803,324 +13794,292 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_external_memory_capabilities ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV
- PhysicalDevice::getExternalImageFormatPropertiesNV(
- VULKAN_HPP_NAMESPACE::Format format,
- VULKAN_HPP_NAMESPACE::ImageType type,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType ) const
+ PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format,
+ VULKAN_HPP_NAMESPACE::ImageType type,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceExternalImageFormatPropertiesNV &&
- "Function <vkGetPhysicalDeviceExternalImageFormatPropertiesNV> needs extension <VK_NV_external_memory_capabilities> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalImageFormatPropertiesNV &&
+ "Function <vkGetPhysicalDeviceExternalImageFormatPropertiesNV> needs extension <VK_NV_external_memory_capabilities> enabled!" );
VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkFormat>( format ),
- static_cast<VkImageType>( type ),
- static_cast<VkImageTiling>( tiling ),
- static_cast<VkImageUsageFlags>( usage ),
- static_cast<VkImageCreateFlags>( flags ),
- static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
- reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result,
- VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" );
- }
+ VkResult result = getDispatcher()->vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ static_cast<VkFormat>( format ),
+ static_cast<VkImageType>( type ),
+ static_cast<VkImageTiling>( tiling ),
+ static_cast<VkImageUsageFlags>( usage ),
+ static_cast<VkImageCreateFlags>( flags ),
+ static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
+ reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" );
+
return externalImageFormatProperties;
}
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_external_memory_win32 ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE
- DeviceMemory::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE DeviceMemory::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandleNV &&
"Function <vkGetMemoryWin32HandleNV> needs extension <VK_NV_external_memory_win32> enabled!" );
- HANDLE handle;
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetMemoryWin32HandleNV( static_cast<VkDevice>( m_device ),
- static_cast<VkDeviceMemory>( m_memory ),
- static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ),
- &handle ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::getMemoryWin32HandleNV" );
- }
+ HANDLE handle;
+ VkResult result = getDispatcher()->vkGetMemoryWin32HandleNV(
+ static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::getMemoryWin32HandleNV" );
+
return handle;
}
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_get_physical_device_properties2 ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
- PhysicalDevice::getFeatures2KHR() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR() const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceFeatures2KHR &&
- "Function <vkGetPhysicalDeviceFeatures2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2KHR &&
+ "Function <vkGetPhysicalDeviceFeatures2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
getDispatcher()->vkGetPhysicalDeviceFeatures2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+
return features;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getFeatures2KHR() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2KHR() const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceFeatures2KHR &&
- "Function <vkGetPhysicalDeviceFeatures2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2KHR &&
+ "Function <vkGetPhysicalDeviceFeatures2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features =
- structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
getDispatcher()->vkGetPhysicalDeviceFeatures2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+
return structureChain;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
- PhysicalDevice::getProperties2KHR() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR() const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceProperties2KHR &&
- "Function <vkGetPhysicalDeviceProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2KHR &&
+ "Function <vkGetPhysicalDeviceProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
- getDispatcher()->vkGetPhysicalDeviceProperties2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+ getDispatcher()->vkGetPhysicalDeviceProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+
return properties;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getProperties2KHR() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2KHR() const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceProperties2KHR &&
- "Function <vkGetPhysicalDeviceProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2KHR &&
+ "Function <vkGetPhysicalDeviceProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
- getDispatcher()->vkGetPhysicalDeviceProperties2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
+ getDispatcher()->vkGetPhysicalDeviceProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2
- PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR &&
- "Function <vkGetPhysicalDeviceFormatProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR &&
+ "Function <vkGetPhysicalDeviceFormatProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkFormat>( format ),
- reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+
return formatProperties;
}
template <typename X, typename Y, typename... Z>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR &&
- "Function <vkGetPhysicalDeviceFormatProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR &&
+ "Function <vkGetPhysicalDeviceFormatProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
+ VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkFormat>( format ),
- reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties2
- PhysicalDevice::getImageFormatProperties2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const
+ PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR &&
- "Function <vkGetPhysicalDeviceImageFormatProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR &&
+ "Function <vkGetPhysicalDeviceImageFormatProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
- reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
- }
+ VkResult result =
+ getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
+ reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
+
return imageFormatProperties;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> PhysicalDevice::getImageFormatProperties2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const
{
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR &&
+ "Function <vkGetPhysicalDeviceImageFormatProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties =
- structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
- reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
- }
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
+ VkResult result =
+ getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
+ reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
+
return structureChain;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>
- PhysicalDevice::getQueueFamilyProperties2KHR() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> PhysicalDevice::getQueueFamilyProperties2KHR() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR &&
- "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
-
- uint32_t queueFamilyPropertyCount;
- getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
- std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
- getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
- VULKAN_HPP_ASSERT( queueFamilyPropertyCount == queueFamilyProperties.size() );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR &&
+ "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
+ uint32_t queueFamilyPropertyCount;
+ getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ &queueFamilyPropertyCount,
+ reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+
+ VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+ {
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
+ }
return queueFamilyProperties;
}
template <typename StructureChain>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain>
- PhysicalDevice::getQueueFamilyProperties2KHR() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain> PhysicalDevice::getQueueFamilyProperties2KHR() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR &&
- "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
-
- uint32_t queueFamilyPropertyCount;
- getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
- std::vector<StructureChain> returnVector( queueFamilyPropertyCount );
- std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR &&
+ "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+
+ std::vector<StructureChain> structureChains;
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
+ uint32_t queueFamilyPropertyCount;
+ getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
+ structureChains.resize( queueFamilyPropertyCount );
+ queueFamilyProperties.resize( queueFamilyPropertyCount );
for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
{
- queueFamilyProperties[i].pNext =
- returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+ queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
}
- getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- &queueFamilyPropertyCount,
- reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+ getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ &queueFamilyPropertyCount,
+ reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+
VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+ if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+ {
+ structureChains.resize( queueFamilyPropertyCount );
+ }
for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
{
- returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
+ structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
}
- return returnVector;
+ return structureChains;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
PhysicalDevice::getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR &&
- "Function <vkGetPhysicalDeviceMemoryProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR &&
+ "Function <vkGetPhysicalDeviceMemoryProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
- getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+ getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+
return memoryProperties;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- PhysicalDevice::getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR &&
- "Function <vkGetPhysicalDeviceMemoryProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR &&
+ "Function <vkGetPhysicalDeviceMemoryProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties =
structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
- getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+ getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>
- PhysicalDevice::getSparseImageFormatProperties2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR &&
- "Function <vkGetPhysicalDeviceSparseImageFormatProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR &&
+ "Function <vkGetPhysicalDeviceSparseImageFormatProperties2KHR> needs extension <VK_KHR_get_physical_device_properties2> enabled!" );
- uint32_t propertyCount;
- getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
- &propertyCount,
- nullptr );
- std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2> properties( propertyCount );
- getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
- &propertyCount,
- reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
- VULKAN_HPP_ASSERT( propertyCount == properties.size() );
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2> properties;
+ uint32_t propertyCount;
+ getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
+ &propertyCount,
+ nullptr );
+ properties.resize( propertyCount );
+ getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
+ &propertyCount,
+ reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
+ {
+ properties.resize( propertyCount );
+ }
return properties;
}
//=== VK_KHR_device_group ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
- Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT
+ Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetDeviceGroupPeerMemoryFeaturesKHR &&
- "Function <vkGetDeviceGroupPeerMemoryFeaturesKHR> needs extension <VK_KHR_device_group> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPeerMemoryFeaturesKHR &&
+ "Function <vkGetDeviceGroupPeerMemoryFeaturesKHR> needs extension <VK_KHR_device_group> enabled!" );
VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
- getDispatcher()->vkGetDeviceGroupPeerMemoryFeaturesKHR(
- static_cast<VkDevice>( m_device ),
- heapIndex,
- localDeviceIndex,
- remoteDeviceIndex,
- reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
+ getDispatcher()->vkGetDeviceGroupPeerMemoryFeaturesKHR( static_cast<VkDevice>( m_device ),
+ heapIndex,
+ localDeviceIndex,
+ remoteDeviceIndex,
+ reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
+
return peerMemoryFeatures;
}
VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDeviceMaskKHR &&
- "Function <vkCmdSetDeviceMaskKHR> needs extension <VK_KHR_device_group> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDeviceMaskKHR && "Function <vkCmdSetDeviceMaskKHR> needs extension <VK_KHR_device_group> enabled!" );
getDispatcher()->vkCmdSetDeviceMaskKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), deviceMask );
}
@@ -14132,24 +14091,18 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t groupCountY,
uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchBaseKHR &&
- "Function <vkCmdDispatchBaseKHR> needs extension <VK_KHR_device_group> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchBaseKHR && "Function <vkCmdDispatchBaseKHR> needs extension <VK_KHR_device_group> enabled!" );
- getDispatcher()->vkCmdDispatchBaseKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
- baseGroupX,
- baseGroupY,
- baseGroupZ,
- groupCountX,
- groupCountY,
- groupCountZ );
+ getDispatcher()->vkCmdDispatchBaseKHR(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
}
# if defined( VK_USE_PLATFORM_VI_NN )
//=== VK_NN_vi_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createViSurfaceNN(
- VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ Instance::createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
}
@@ -14157,315 +14110,247 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_maintenance1 ===
- VULKAN_HPP_INLINE void
- CommandPool::trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandPool::trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkTrimCommandPoolKHR &&
- "Function <vkTrimCommandPoolKHR> needs extension <VK_KHR_maintenance1> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkTrimCommandPoolKHR && "Function <vkTrimCommandPoolKHR> needs extension <VK_KHR_maintenance1> enabled!" );
- getDispatcher()->vkTrimCommandPoolKHR( static_cast<VkDevice>( m_device ),
- static_cast<VkCommandPool>( m_commandPool ),
- static_cast<VkCommandPoolTrimFlags>( flags ) );
+ getDispatcher()->vkTrimCommandPoolKHR(
+ static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( m_commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
}
//=== VK_KHR_device_group_creation ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>
- Instance::enumeratePhysicalDeviceGroupsKHR() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> Instance::enumeratePhysicalDeviceGroupsKHR() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR &&
- "Function <vkEnumeratePhysicalDeviceGroupsKHR> needs extension <VK_KHR_device_group_creation> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR &&
+ "Function <vkEnumeratePhysicalDeviceGroupsKHR> needs extension <VK_KHR_device_group_creation> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> physicalDeviceGroupProperties;
uint32_t physicalDeviceGroupCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR(
- static_cast<VkInstance>( m_instance ), &physicalDeviceGroupCount, nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount )
+ result = getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR( static_cast<VkInstance>( m_instance ), &physicalDeviceGroupCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
{
physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR(
- static_cast<VkInstance>( m_instance ),
- &physicalDeviceGroupCount,
- reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
+ result =
+ getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR( static_cast<VkInstance>( m_instance ),
+ &physicalDeviceGroupCount,
+ reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
+ VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+ if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
{
- VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
- if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
- {
- physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
- }
+ physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
}
return physicalDeviceGroupProperties;
}
//=== VK_KHR_external_memory_capabilities ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties
- PhysicalDevice::getExternalBufferPropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceExternalBufferPropertiesKHR &&
- "Function <vkGetPhysicalDeviceExternalBufferPropertiesKHR> needs extension <VK_KHR_external_memory_capabilities> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalBufferPropertiesKHR &&
+ "Function <vkGetPhysicalDeviceExternalBufferPropertiesKHR> needs extension <VK_KHR_external_memory_capabilities> enabled!" );
VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
- getDispatcher()->vkGetPhysicalDeviceExternalBufferPropertiesKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
- reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
+ getDispatcher()->vkGetPhysicalDeviceExternalBufferPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
+ reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
+
return externalBufferProperties;
}
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_memory_win32 ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE Device::getMemoryWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE
+ Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetMemoryWin32HandleKHR &&
- "Function <vkGetMemoryWin32HandleKHR> needs extension <VK_KHR_external_memory_win32> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandleKHR &&
+ "Function <vkGetMemoryWin32HandleKHR> needs extension <VK_KHR_external_memory_win32> enabled!" );
+
+ HANDLE handle;
+ VkResult result = getDispatcher()->vkGetMemoryWin32HandleKHR(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" );
- HANDLE handle;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetMemoryWin32HandleKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ),
- &handle ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" );
- }
return handle;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR
- Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle ) const
+ Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetMemoryWin32HandlePropertiesKHR &&
- "Function <vkGetMemoryWin32HandlePropertiesKHR> needs extension <VK_KHR_external_memory_win32> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandlePropertiesKHR &&
+ "Function <vkGetMemoryWin32HandlePropertiesKHR> needs extension <VK_KHR_external_memory_win32> enabled!" );
VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetMemoryWin32HandlePropertiesKHR(
- static_cast<VkDevice>( m_device ),
- static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
- handle,
- reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" );
- }
+ VkResult result =
+ getDispatcher()->vkGetMemoryWin32HandlePropertiesKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
+ handle,
+ reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" );
+
return memoryWin32HandleProperties;
}
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_memory_fd ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int
- Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryFdKHR &&
- "Function <vkGetMemoryFdKHR> needs extension <VK_KHR_external_memory_fd> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryFdKHR && "Function <vkGetMemoryFdKHR> needs extension <VK_KHR_external_memory_fd> enabled!" );
+
+ int fd;
+ VkResult result =
+ getDispatcher()->vkGetMemoryFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" );
- int fd;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetMemoryFdKHR(
- static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" );
- }
return fd;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR
- Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- int fd ) const
+ Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryFdPropertiesKHR &&
"Function <vkGetMemoryFdPropertiesKHR> needs extension <VK_KHR_external_memory_fd> enabled!" );
VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetMemoryFdPropertiesKHR(
- static_cast<VkDevice>( m_device ),
- static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
- fd,
- reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" );
- }
+ VkResult result = getDispatcher()->vkGetMemoryFdPropertiesKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
+ fd,
+ reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" );
+
return memoryFdProperties;
}
//=== VK_KHR_external_semaphore_capabilities ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
- PhysicalDevice::getExternalSemaphorePropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const
- VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR(
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceExternalSemaphorePropertiesKHR &&
- "Function <vkGetPhysicalDeviceExternalSemaphorePropertiesKHR> needs extension <VK_KHR_external_semaphore_capabilities> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalSemaphorePropertiesKHR &&
+ "Function <vkGetPhysicalDeviceExternalSemaphorePropertiesKHR> needs extension <VK_KHR_external_semaphore_capabilities> enabled!" );
VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
getDispatcher()->vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
+
return externalSemaphoreProperties;
}
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_semaphore_win32 ===
- VULKAN_HPP_INLINE void Device::importSemaphoreWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo ) const
+ VULKAN_HPP_INLINE void
+ Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkImportSemaphoreWin32HandleKHR &&
- "Function <vkImportSemaphoreWin32HandleKHR> needs extension <VK_KHR_external_semaphore_win32> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreWin32HandleKHR &&
+ "Function <vkImportSemaphoreWin32HandleKHR> needs extension <VK_KHR_external_semaphore_win32> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkImportSemaphoreWin32HandleKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" );
- }
+ VkResult result = getDispatcher()->vkImportSemaphoreWin32HandleKHR(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE Device::getSemaphoreWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE
+ Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetSemaphoreWin32HandleKHR &&
- "Function <vkGetSemaphoreWin32HandleKHR> needs extension <VK_KHR_external_semaphore_win32> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreWin32HandleKHR &&
+ "Function <vkGetSemaphoreWin32HandleKHR> needs extension <VK_KHR_external_semaphore_win32> enabled!" );
+
+ HANDLE handle;
+ VkResult result = getDispatcher()->vkGetSemaphoreWin32HandleKHR(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" );
- HANDLE handle;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetSemaphoreWin32HandleKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ),
- &handle ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" );
- }
return handle;
}
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_semaphore_fd ===
- VULKAN_HPP_INLINE void
- Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo ) const
+ VULKAN_HPP_INLINE void Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreFdKHR &&
"Function <vkImportSemaphoreFdKHR> needs extension <VK_KHR_external_semaphore_fd> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkImportSemaphoreFdKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" );
- }
+ VkResult result = getDispatcher()->vkImportSemaphoreFdKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int
- Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreFdKHR &&
- "Function <vkGetSemaphoreFdKHR> needs extension <VK_KHR_external_semaphore_fd> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreFdKHR && "Function <vkGetSemaphoreFdKHR> needs extension <VK_KHR_external_semaphore_fd> enabled!" );
+
+ int fd;
+ VkResult result =
+ getDispatcher()->vkGetSemaphoreFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" );
- int fd;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetSemaphoreFdKHR(
- static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" );
- }
return fd;
}
//=== VK_KHR_push_descriptor ===
VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR(
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t set,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t set,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetKHR &&
"Function <vkCmdPushDescriptorSetKHR> needs extension <VK_KHR_push_descriptor> enabled!" );
- getDispatcher()->vkCmdPushDescriptorSetKHR(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
- static_cast<VkPipelineLayout>( layout ),
- set,
- descriptorWrites.size(),
- reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) );
+ getDispatcher()->vkCmdPushDescriptorSetKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
+ static_cast<VkPipelineLayout>( layout ),
+ set,
+ descriptorWrites.size(),
+ reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) );
}
template <typename DataType>
- VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR(
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- VULKAN_HPP_NAMESPACE::PipelineLayout layout,
- uint32_t set,
- DataType const & data ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout,
+ uint32_t set,
+ DataType const & data ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR &&
- "Function <vkCmdPushDescriptorSetWithTemplateKHR> needs extension <VK_KHR_push_descriptor> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR &&
+ "Function <vkCmdPushDescriptorSetWithTemplateKHR> needs extension <VK_KHR_push_descriptor> enabled!" );
- getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- static_cast<VkPipelineLayout>( layout ),
- set,
- reinterpret_cast<const void *>( &data ) );
+ getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
+ static_cast<VkPipelineLayout>( layout ),
+ set,
+ reinterpret_cast<const void *>( &data ) );
}
//=== VK_EXT_conditional_rendering ===
VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT(
- const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin ) const
- VULKAN_HPP_NOEXCEPT
+ const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdBeginConditionalRenderingEXT &&
- "Function <vkCmdBeginConditionalRenderingEXT> needs extension <VK_EXT_conditional_rendering> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginConditionalRenderingEXT &&
+ "Function <vkCmdBeginConditionalRenderingEXT> needs extension <VK_EXT_conditional_rendering> enabled!" );
- getDispatcher()->vkCmdBeginConditionalRenderingEXT(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) );
+ getDispatcher()->vkCmdBeginConditionalRenderingEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) );
}
VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT() const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdEndConditionalRenderingEXT &&
- "Function <vkCmdEndConditionalRenderingEXT> needs extension <VK_EXT_conditional_rendering> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndConditionalRenderingEXT &&
+ "Function <vkCmdEndConditionalRenderingEXT> needs extension <VK_EXT_conditional_rendering> enabled!" );
getDispatcher()->vkCmdEndConditionalRenderingEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) );
}
@@ -14473,81 +14358,68 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_descriptor_update_template ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate
- Device::createDescriptorUpdateTemplateKHR(
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ Device::createDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate( *this, createInfo, allocator );
}
- VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR(
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkDestroyDescriptorUpdateTemplateKHR &&
- "Function <vkDestroyDescriptorUpdateTemplateKHR> needs extension <VK_KHR_descriptor_update_template> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkDestroyDescriptorUpdateTemplateKHR &&
+ "Function <vkDestroyDescriptorUpdateTemplateKHR> needs extension <VK_KHR_descriptor_update_template> enabled!" );
getDispatcher()->vkDestroyDescriptorUpdateTemplateKHR(
static_cast<VkDevice>( m_device ),
static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
template <typename DataType>
- VULKAN_HPP_INLINE void
- DescriptorSet::updateWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
- DataType const & data ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void DescriptorSet::updateWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
+ DataType const & data ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkUpdateDescriptorSetWithTemplateKHR &&
- "Function <vkUpdateDescriptorSetWithTemplateKHR> needs extension <VK_KHR_descriptor_update_template> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateDescriptorSetWithTemplateKHR &&
+ "Function <vkUpdateDescriptorSetWithTemplateKHR> needs extension <VK_KHR_descriptor_update_template> enabled!" );
- getDispatcher()->vkUpdateDescriptorSetWithTemplateKHR(
- static_cast<VkDevice>( m_device ),
- static_cast<VkDescriptorSet>( m_descriptorSet ),
- static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
- reinterpret_cast<const void *>( &data ) );
+ getDispatcher()->vkUpdateDescriptorSetWithTemplateKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkDescriptorSet>( m_descriptorSet ),
+ static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
+ reinterpret_cast<const void *>( &data ) );
}
//=== VK_NV_clip_space_w_scaling ===
VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV(
- uint32_t firstViewport,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings ) const VULKAN_HPP_NOEXCEPT
+ uint32_t firstViewport,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetViewportWScalingNV &&
- "Function <vkCmdSetViewportWScalingNV> needs extension <VK_NV_clip_space_w_scaling> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWScalingNV &&
+ "Function <vkCmdSetViewportWScalingNV> needs extension <VK_NV_clip_space_w_scaling> enabled!" );
- getDispatcher()->vkCmdSetViewportWScalingNV(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- firstViewport,
- viewportWScalings.size(),
- reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) );
+ getDispatcher()->vkCmdSetViewportWScalingNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ firstViewport,
+ viewportWScalings.size(),
+ reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) );
}
# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
//=== VK_EXT_acquire_xlib_display ===
- VULKAN_HPP_INLINE void PhysicalDevice::acquireXlibDisplayEXT( Display & dpy,
- VULKAN_HPP_NAMESPACE::DisplayKHR display ) const
+ VULKAN_HPP_INLINE void PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireXlibDisplayEXT &&
"Function <vkAcquireXlibDisplayEXT> needs extension <VK_EXT_acquire_xlib_display> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkAcquireXlibDisplayEXT(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), &dpy, static_cast<VkDisplayKHR>( display ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" );
- }
+ VkResult result =
+ getDispatcher()->vkAcquireXlibDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), &dpy, static_cast<VkDisplayKHR>( display ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR
- PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR PhysicalDevice::getRandROutputDisplayEXT( Display & dpy,
+ RROutput rrOutput ) const
{
return VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, dpy, rrOutput );
}
@@ -14556,139 +14428,101 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_display_surface_counter ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT
- PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
+ PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT &&
- "Function <vkGetPhysicalDeviceSurfaceCapabilities2EXT> needs extension <VK_EXT_display_surface_counter> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT &&
+ "Function <vkGetPhysicalDeviceSurfaceCapabilities2EXT> needs extension <VK_EXT_display_surface_counter> enabled!" );
VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkSurfaceKHR>( surface ),
- reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" );
- }
+ VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ static_cast<VkSurfaceKHR>( surface ),
+ reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" );
+
return surfaceCapabilities;
}
//=== VK_EXT_display_control ===
- VULKAN_HPP_INLINE void
- Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
- const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const
+ VULKAN_HPP_INLINE void Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
+ const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkDisplayPowerControlEXT &&
- "Function <vkDisplayPowerControlEXT> needs extension <VK_EXT_display_control> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkDisplayPowerControlEXT && "Function <vkDisplayPowerControlEXT> needs extension <VK_EXT_display_control> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkDisplayPowerControlEXT(
- static_cast<VkDevice>( m_device ),
- static_cast<VkDisplayKHR>( display ),
- reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" );
- }
+ VkResult result = getDispatcher()->vkDisplayPowerControlEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence Device::registerEventEXT(
- VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence
+ Device::registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Fence( *this, deviceEventInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence Device::registerDisplayEventEXT(
- VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display,
- VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence
+ Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display,
+ VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Fence( *this, display, displayEventInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t
- SwapchainKHR::getCounterEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t SwapchainKHR::getCounterEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainCounterEXT &&
- "Function <vkGetSwapchainCounterEXT> needs extension <VK_EXT_display_control> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainCounterEXT && "Function <vkGetSwapchainCounterEXT> needs extension <VK_EXT_display_control> enabled!" );
+
+ uint64_t counterValue;
+ VkResult result = getDispatcher()->vkGetSwapchainCounterEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getCounterEXT" );
- uint64_t counterValue;
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetSwapchainCounterEXT( static_cast<VkDevice>( m_device ),
- static_cast<VkSwapchainKHR>( m_swapchain ),
- static_cast<VkSurfaceCounterFlagBitsEXT>( counter ),
- &counterValue ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getCounterEXT" );
- }
return counterValue;
}
//=== VK_GOOGLE_display_timing ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE
- SwapchainKHR::getRefreshCycleDurationGOOGLE() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE SwapchainKHR::getRefreshCycleDurationGOOGLE() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetRefreshCycleDurationGOOGLE &&
- "Function <vkGetRefreshCycleDurationGOOGLE> needs extension <VK_GOOGLE_display_timing> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetRefreshCycleDurationGOOGLE &&
+ "Function <vkGetRefreshCycleDurationGOOGLE> needs extension <VK_GOOGLE_display_timing> enabled!" );
VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetRefreshCycleDurationGOOGLE(
- static_cast<VkDevice>( m_device ),
- static_cast<VkSwapchainKHR>( m_swapchain ),
- reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getRefreshCycleDurationGOOGLE" );
- }
+ VkResult result = getDispatcher()->vkGetRefreshCycleDurationGOOGLE( static_cast<VkDevice>( m_device ),
+ static_cast<VkSwapchainKHR>( m_swapchain ),
+ reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getRefreshCycleDurationGOOGLE" );
+
return displayTimingProperties;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>
- SwapchainKHR::getPastPresentationTimingGOOGLE() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE> SwapchainKHR::getPastPresentationTimingGOOGLE() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPastPresentationTimingGOOGLE &&
- "Function <vkGetPastPresentationTimingGOOGLE> needs extension <VK_GOOGLE_display_timing> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPastPresentationTimingGOOGLE &&
+ "Function <vkGetPastPresentationTimingGOOGLE> needs extension <VK_GOOGLE_display_timing> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE> presentationTimings;
uint32_t presentationTimingCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetPastPresentationTimingGOOGLE( static_cast<VkDevice>( m_device ),
- static_cast<VkSwapchainKHR>( m_swapchain ),
- &presentationTimingCount,
- nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentationTimingCount )
+ result = getDispatcher()->vkGetPastPresentationTimingGOOGLE(
+ static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), &presentationTimingCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && presentationTimingCount )
{
presentationTimings.resize( presentationTimingCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPastPresentationTimingGOOGLE(
- static_cast<VkDevice>( m_device ),
- static_cast<VkSwapchainKHR>( m_swapchain ),
- &presentationTimingCount,
- reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) );
+ result = getDispatcher()->vkGetPastPresentationTimingGOOGLE( static_cast<VkDevice>( m_device ),
+ static_cast<VkSwapchainKHR>( m_swapchain ),
+ &presentationTimingCount,
+ reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getPastPresentationTimingGOOGLE" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getPastPresentationTimingGOOGLE" );
+ VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
+ if ( presentationTimingCount < presentationTimings.size() )
{
- VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
- if ( presentationTimingCount < presentationTimings.size() )
- {
- presentationTimings.resize( presentationTimingCount );
- }
+ presentationTimings.resize( presentationTimingCount );
}
return presentationTimings;
}
@@ -14696,12 +14530,10 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_discard_rectangles ===
VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT(
- uint32_t firstDiscardRectangle,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles ) const VULKAN_HPP_NOEXCEPT
+ uint32_t firstDiscardRectangle, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetDiscardRectangleEXT &&
- "Function <vkCmdSetDiscardRectangleEXT> needs extension <VK_EXT_discard_rectangles> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDiscardRectangleEXT &&
+ "Function <vkCmdSetDiscardRectangleEXT> needs extension <VK_EXT_discard_rectangles> enabled!" );
getDispatcher()->vkCmdSetDiscardRectangleEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstDiscardRectangle,
@@ -14711,23 +14543,14 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_hdr_metadata ===
- VULKAN_HPP_INLINE void
- Device::setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata ) const
- VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkSetHdrMetadataEXT &&
- "Function <vkSetHdrMetadataEXT> needs extension <VK_EXT_hdr_metadata> enabled!" );
-
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() );
-# else
+ VULKAN_HPP_ASSERT( getDispatcher()->vkSetHdrMetadataEXT && "Function <vkSetHdrMetadataEXT> needs extension <VK_EXT_hdr_metadata> enabled!" );
if ( swapchains.size() != metadata.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
}
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
getDispatcher()->vkSetHdrMetadataEXT( static_cast<VkDevice>( m_device ),
swapchains.size(),
@@ -14737,16 +14560,15 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_create_renderpass2 ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass Device::createRenderPass2KHR(
- VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass
+ Device::createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, createInfo, allocator );
}
- VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR(
- const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
- const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
+ const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderPass2KHR &&
"Function <vkCmdBeginRenderPass2KHR> needs extension <VK_KHR_create_renderpass2> enabled!" );
@@ -14756,214 +14578,168 @@ namespace VULKAN_HPP_NAMESPACE
reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR(
- const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
- const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
+ const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdNextSubpass2KHR &&
- "Function <vkCmdNextSubpass2KHR> needs extension <VK_KHR_create_renderpass2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdNextSubpass2KHR && "Function <vkCmdNextSubpass2KHR> needs extension <VK_KHR_create_renderpass2> enabled!" );
getDispatcher()->vkCmdNextSubpass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ),
reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR(
- const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderPass2KHR &&
- "Function <vkCmdEndRenderPass2KHR> needs extension <VK_KHR_create_renderpass2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderPass2KHR && "Function <vkCmdEndRenderPass2KHR> needs extension <VK_KHR_create_renderpass2> enabled!" );
- getDispatcher()->vkCmdEndRenderPass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+ getDispatcher()->vkCmdEndRenderPass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
}
//=== VK_KHR_shared_presentable_image ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result SwapchainKHR::getStatus() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetSwapchainStatusKHR &&
- "Function <vkGetSwapchainStatusKHR> needs extension <VK_KHR_shared_presentable_image> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainStatusKHR &&
+ "Function <vkGetSwapchainStatusKHR> needs extension <VK_KHR_shared_presentable_image> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetSwapchainStatusKHR(
- static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ) ) );
- if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR ) )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getStatus" );
- }
- return result;
+ VkResult result = getDispatcher()->vkGetSwapchainStatusKHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getStatus",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
//=== VK_KHR_external_fence_capabilities ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties
- PhysicalDevice::getExternalFencePropertiesKHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceExternalFencePropertiesKHR &&
- "Function <vkGetPhysicalDeviceExternalFencePropertiesKHR> needs extension <VK_KHR_external_fence_capabilities> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalFencePropertiesKHR &&
+ "Function <vkGetPhysicalDeviceExternalFencePropertiesKHR> needs extension <VK_KHR_external_fence_capabilities> enabled!" );
VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
- getDispatcher()->vkGetPhysicalDeviceExternalFencePropertiesKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
- reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
+ getDispatcher()->vkGetPhysicalDeviceExternalFencePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
+ reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
+
return externalFenceProperties;
}
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_KHR_external_fence_win32 ===
- VULKAN_HPP_INLINE void Device::importFenceWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo ) const
+ VULKAN_HPP_INLINE void Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkImportFenceWin32HandleKHR &&
- "Function <vkImportFenceWin32HandleKHR> needs extension <VK_KHR_external_fence_win32> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkImportFenceWin32HandleKHR &&
+ "Function <vkImportFenceWin32HandleKHR> needs extension <VK_KHR_external_fence_win32> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkImportFenceWin32HandleKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" );
- }
+ VkResult result = getDispatcher()->vkImportFenceWin32HandleKHR(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE Device::getFenceWin32HandleKHR(
- const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE
+ Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceWin32HandleKHR &&
"Function <vkGetFenceWin32HandleKHR> needs extension <VK_KHR_external_fence_win32> enabled!" );
- HANDLE handle;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetFenceWin32HandleKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ),
- &handle ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" );
- }
+ HANDLE handle;
+ VkResult result = getDispatcher()->vkGetFenceWin32HandleKHR(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" );
+
return handle;
}
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_external_fence_fd ===
- VULKAN_HPP_INLINE void
- Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo ) const
+ VULKAN_HPP_INLINE void Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkImportFenceFdKHR &&
- "Function <vkImportFenceFdKHR> needs extension <VK_KHR_external_fence_fd> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkImportFenceFdKHR && "Function <vkImportFenceFdKHR> needs extension <VK_KHR_external_fence_fd> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkImportFenceFdKHR(
- static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" );
- }
+ VkResult result =
+ getDispatcher()->vkImportFenceFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int
- Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceFdKHR &&
- "Function <vkGetFenceFdKHR> needs extension <VK_KHR_external_fence_fd> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceFdKHR && "Function <vkGetFenceFdKHR> needs extension <VK_KHR_external_fence_fd> enabled!" );
+
+ int fd;
+ VkResult result = getDispatcher()->vkGetFenceFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" );
- int fd;
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetFenceFdKHR(
- static_cast<VkDevice>( m_device ), reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" );
- }
return fd;
}
//=== VK_KHR_performance_query ===
VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::pair<std::vector<PerformanceCounterKHR>, std::vector<PerformanceCounterDescriptionKHR>>
- PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex ) const
+ VULKAN_HPP_INLINE std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>>
+ PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR &&
- "Function <vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR> needs extension <VK_KHR_performance_query> enabled!" );
-
- std::pair<std::vector<PerformanceCounterKHR>, std::vector<PerformanceCounterDescriptionKHR>> data;
- std::vector<PerformanceCounterKHR> & counters = data.first;
- std::vector<PerformanceCounterDescriptionKHR> & counterDescriptions = data.second;
- uint32_t counterCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR &&
+ "Function <vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR> needs extension <VK_KHR_performance_query> enabled!" );
+
+ std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>> data;
+ std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> & counters = data.first;
+ std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR> & counterDescriptions = data.second;
+ uint32_t counterCount;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &counterCount, nullptr, nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && counterCount )
+ result = getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &counterCount, nullptr, nullptr );
+ if ( ( result == VK_SUCCESS ) && counterCount )
{
counters.resize( counterCount );
counterDescriptions.resize( counterCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- queueFamilyIndex,
- &counterCount,
- reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
- reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) );
- VULKAN_HPP_ASSERT( counterCount <= counters.size() );
+ result = getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ queueFamilyIndex,
+ &counterCount,
+ reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
+ reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( counterCount < counters.size() ) )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+ VULKAN_HPP_ASSERT( counterCount <= counters.size() );
+ if ( counterCount < counters.size() )
{
counters.resize( counterCount );
counterDescriptions.resize( counterCount );
}
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException(
- result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
- }
return data;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR(
- const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo ) const
- VULKAN_HPP_NOEXCEPT
+ const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR &&
- "Function <vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR> needs extension <VK_KHR_performance_query> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR &&
+ "Function <vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR> needs extension <VK_KHR_performance_query> enabled!" );
uint32_t numPasses;
getDispatcher()->vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
static_cast<VkPhysicalDevice>( m_physicalDevice ),
reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ),
&numPasses );
+
return numPasses;
}
- VULKAN_HPP_INLINE void
- Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info ) const
+ VULKAN_HPP_INLINE void Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireProfilingLockKHR &&
"Function <vkAcquireProfilingLockKHR> needs extension <VK_KHR_performance_query> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkAcquireProfilingLockKHR(
- static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" );
- }
+ VkResult result =
+ getDispatcher()->vkAcquireProfilingLockKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" );
}
VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR() const VULKAN_HPP_NOEXCEPT
@@ -14977,237 +14753,221 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_get_surface_capabilities2 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR
- PhysicalDevice::getSurfaceCapabilities2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
+ PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR &&
- "Function <vkGetPhysicalDeviceSurfaceCapabilities2KHR> needs extension <VK_KHR_get_surface_capabilities2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR &&
+ "Function <vkGetPhysicalDeviceSurfaceCapabilities2KHR> needs extension <VK_KHR_get_surface_capabilities2> enabled!" );
VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
- }
+ VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
+
return surfaceCapabilities;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> PhysicalDevice::getSurfaceCapabilities2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
{
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR &&
+ "Function <vkGetPhysicalDeviceSurfaceCapabilities2KHR> needs extension <VK_KHR_get_surface_capabilities2> enabled!" );
+
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities =
- structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
- }
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
+ VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
+
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>
- PhysicalDevice::getSurfaceFormats2KHR(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
+ PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR &&
- "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> needs extension <VK_KHR_get_surface_capabilities2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR &&
+ "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> needs extension <VK_KHR_get_surface_capabilities2> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
uint32_t surfaceFormatCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- &surfaceFormatCount,
- nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount )
+ result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ &surfaceFormatCount,
+ nullptr );
+ if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
{
surfaceFormats.resize( surfaceFormatCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- &surfaceFormatCount,
- reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) );
+ result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ &surfaceFormatCount,
+ reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
+ VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+ if ( surfaceFormatCount < surfaceFormats.size() )
{
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
+ surfaceFormats.resize( surfaceFormatCount );
}
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ return surfaceFormats;
+ }
+
+ template <typename StructureChain>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain>
+ PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR &&
+ "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> needs extension <VK_KHR_get_surface_capabilities2> enabled!" );
+
+ std::vector<StructureChain> structureChains;
+ std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
+ uint32_t surfaceFormatCount;
+ VkResult result;
+ do
{
- VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
- if ( surfaceFormatCount < surfaceFormats.size() )
+ result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ &surfaceFormatCount,
+ nullptr );
+ if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
{
+ structureChains.resize( surfaceFormatCount );
surfaceFormats.resize( surfaceFormatCount );
+ for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
+ {
+ surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext;
+ }
+ result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ &surfaceFormatCount,
+ reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
}
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
+ VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+ if ( surfaceFormatCount < surfaceFormats.size() )
+ {
+ structureChains.resize( surfaceFormatCount );
}
- return surfaceFormats;
+ for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
+ {
+ structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i];
+ }
+ return structureChains;
}
//=== VK_KHR_get_display_properties2 ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>
- PhysicalDevice::getDisplayProperties2KHR() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR> PhysicalDevice::getDisplayProperties2KHR() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR &&
- "Function <vkGetPhysicalDeviceDisplayProperties2KHR> needs extension <VK_KHR_get_display_properties2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR &&
+ "Function <vkGetPhysicalDeviceDisplayProperties2KHR> needs extension <VK_KHR_get_display_properties2> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR> properties;
uint32_t propertyCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
+ result = getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- &propertyCount,
- reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) );
+ result = getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
return properties;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>
- PhysicalDevice::getDisplayPlaneProperties2KHR() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR> PhysicalDevice::getDisplayPlaneProperties2KHR() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR &&
- "Function <vkGetPhysicalDeviceDisplayPlaneProperties2KHR> needs extension <VK_KHR_get_display_properties2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR &&
+ "Function <vkGetPhysicalDeviceDisplayPlaneProperties2KHR> needs extension <VK_KHR_get_display_properties2> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR> properties;
uint32_t propertyCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
+ result = getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- &propertyCount,
- reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) );
+ result = getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
return properties;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>
- DisplayKHR::getModeProperties2() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR> DisplayKHR::getModeProperties2() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetDisplayModeProperties2KHR &&
- "Function <vkGetDisplayModeProperties2KHR> needs extension <VK_KHR_get_display_properties2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayModeProperties2KHR &&
+ "Function <vkGetDisplayModeProperties2KHR> needs extension <VK_KHR_get_display_properties2> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR> properties;
uint32_t propertyCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetDisplayModeProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkDisplayKHR>( m_display ),
- &propertyCount,
- nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
+ result = getDispatcher()->vkGetDisplayModeProperties2KHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( m_display ), &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetDisplayModeProperties2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkDisplayKHR>( m_display ),
- &propertyCount,
- reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) );
+ result = getDispatcher()->vkGetDisplayModeProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ static_cast<VkDisplayKHR>( m_display ),
+ &propertyCount,
+ reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties2" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties2" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
return properties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR
- PhysicalDevice::getDisplayPlaneCapabilities2KHR(
- const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo ) const
+ PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetDisplayPlaneCapabilities2KHR &&
- "Function <vkGetDisplayPlaneCapabilities2KHR> needs extension <VK_KHR_get_display_properties2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayPlaneCapabilities2KHR &&
+ "Function <vkGetDisplayPlaneCapabilities2KHR> needs extension <VK_KHR_get_display_properties2> enabled!" );
VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetDisplayPlaneCapabilities2KHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ),
- reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
- }
+ VkResult result = getDispatcher()->vkGetDisplayPlaneCapabilities2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ),
+ reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
+
return capabilities;
}
# if defined( VK_USE_PLATFORM_IOS_MVK )
//=== VK_MVK_ios_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createIOSSurfaceMVK(
- VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ Instance::createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
}
@@ -15216,9 +14976,9 @@ namespace VULKAN_HPP_NAMESPACE
# if defined( VK_USE_PLATFORM_MACOS_MVK )
//=== VK_MVK_macos_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createMacOSSurfaceMVK(
- VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ Instance::createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
}
@@ -15226,44 +14986,32 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_debug_utils ===
- VULKAN_HPP_INLINE void
- Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo ) const
+ VULKAN_HPP_INLINE void Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkSetDebugUtilsObjectNameEXT &&
"Function <vkSetDebugUtilsObjectNameEXT> needs extension <VK_EXT_debug_utils> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkSetDebugUtilsObjectNameEXT(
- static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" );
- }
+ VkResult result = getDispatcher()->vkSetDebugUtilsObjectNameEXT( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" );
}
- VULKAN_HPP_INLINE void
- Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo ) const
+ VULKAN_HPP_INLINE void Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkSetDebugUtilsObjectTagEXT &&
"Function <vkSetDebugUtilsObjectTagEXT> needs extension <VK_EXT_debug_utils> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkSetDebugUtilsObjectTagEXT(
- static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" );
- }
+ VkResult result =
+ getDispatcher()->vkSetDebugUtilsObjectTagEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" );
}
- VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT(
- const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkQueueBeginDebugUtilsLabelEXT &&
"Function <vkQueueBeginDebugUtilsLabelEXT> needs extension <VK_EXT_debug_utils> enabled!" );
- getDispatcher()->vkQueueBeginDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ),
- reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
+ getDispatcher()->vkQueueBeginDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
}
VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT
@@ -15274,18 +15022,15 @@ namespace VULKAN_HPP_NAMESPACE
getDispatcher()->vkQueueEndDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ) );
}
- VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT(
- const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkQueueInsertDebugUtilsLabelEXT &&
"Function <vkQueueInsertDebugUtilsLabelEXT> needs extension <VK_EXT_debug_utils> enabled!" );
- getDispatcher()->vkQueueInsertDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ),
- reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
+ getDispatcher()->vkQueueInsertDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT(
- const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginDebugUtilsLabelEXT &&
"Function <vkCmdBeginDebugUtilsLabelEXT> needs extension <VK_EXT_debug_utils> enabled!" );
@@ -15296,14 +15041,12 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndDebugUtilsLabelEXT &&
- "Function <vkCmdEndDebugUtilsLabelEXT> needs extension <VK_EXT_debug_utils> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndDebugUtilsLabelEXT && "Function <vkCmdEndDebugUtilsLabelEXT> needs extension <VK_EXT_debug_utils> enabled!" );
getDispatcher()->vkCmdEndDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT(
- const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdInsertDebugUtilsLabelEXT &&
"Function <vkCmdInsertDebugUtilsLabelEXT> needs extension <VK_EXT_debug_utils> enabled!" );
@@ -15313,245 +15056,208 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT
- Instance::createDebugUtilsMessengerEXT(
- VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ Instance::createDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT( *this, createInfo, allocator );
}
- VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT(
- VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
- const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
+ const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkSubmitDebugUtilsMessageEXT &&
"Function <vkSubmitDebugUtilsMessageEXT> needs extension <VK_EXT_debug_utils> enabled!" );
- getDispatcher()->vkSubmitDebugUtilsMessageEXT(
- static_cast<VkInstance>( m_instance ),
- static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
- static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
- reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) );
+ getDispatcher()->vkSubmitDebugUtilsMessageEXT( static_cast<VkInstance>( m_instance ),
+ static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
+ static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
+ reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) );
}
# if defined( VK_USE_PLATFORM_ANDROID_KHR )
//=== VK_ANDROID_external_memory_android_hardware_buffer ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID
- Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const
+ Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const
{
VULKAN_HPP_ASSERT(
getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID &&
"Function <vkGetAndroidHardwareBufferPropertiesANDROID> needs extension <VK_ANDROID_external_memory_android_hardware_buffer> enabled!" );
VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID(
- static_cast<VkDevice>( m_device ),
- &buffer,
- reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
- }
+ VkResult result = getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID(
+ static_cast<VkDevice>( m_device ), &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
+
return properties;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
- Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const
{
+ VULKAN_HPP_ASSERT(
+ getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID &&
+ "Function <vkGetAndroidHardwareBufferPropertiesANDROID> needs extension <VK_ANDROID_external_memory_android_hardware_buffer> enabled!" );
+
StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties =
structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>();
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID(
- static_cast<VkDevice>( m_device ),
- &buffer,
- reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
- }
+ VkResult result = getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID(
+ static_cast<VkDevice>( m_device ), &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
+
return structureChain;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE struct AHardwareBuffer * Device::getMemoryAndroidHardwareBufferANDROID(
- const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE struct AHardwareBuffer *
+ Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID &&
- "Function <vkGetMemoryAndroidHardwareBufferANDROID> needs extension <VK_ANDROID_external_memory_android_hardware_buffer> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID &&
+ "Function <vkGetMemoryAndroidHardwareBufferANDROID> needs extension <VK_ANDROID_external_memory_android_hardware_buffer> enabled!" );
+
+ struct AHardwareBuffer * buffer;
+ VkResult result = getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" );
- struct AHardwareBuffer * buffer;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ),
- &buffer ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" );
- }
return buffer;
}
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
//=== VK_EXT_sample_locations ===
- VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT(
- const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleLocationsEXT &&
"Function <vkCmdSetSampleLocationsEXT> needs extension <VK_EXT_sample_locations> enabled!" );
- getDispatcher()->vkCmdSetSampleLocationsEXT(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) );
+ getDispatcher()->vkCmdSetSampleLocationsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT
- PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples ) const
- VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceMultisamplePropertiesEXT &&
- "Function <vkGetPhysicalDeviceMultisamplePropertiesEXT> needs extension <VK_EXT_sample_locations> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMultisamplePropertiesEXT &&
+ "Function <vkGetPhysicalDeviceMultisamplePropertiesEXT> needs extension <VK_EXT_sample_locations> enabled!" );
VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties;
- getDispatcher()->vkGetPhysicalDeviceMultisamplePropertiesEXT(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- static_cast<VkSampleCountFlagBits>( samples ),
- reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) );
+ getDispatcher()->vkGetPhysicalDeviceMultisamplePropertiesEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ static_cast<VkSampleCountFlagBits>( samples ),
+ reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) );
+
return multisampleProperties;
}
//=== VK_KHR_get_memory_requirements2 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const
- VULKAN_HPP_NOEXCEPT
+ Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetImageMemoryRequirements2KHR &&
- "Function <vkGetImageMemoryRequirements2KHR> needs extension <VK_KHR_get_memory_requirements2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2KHR &&
+ "Function <vkGetImageMemoryRequirements2KHR> needs extension <VK_KHR_get_memory_requirements2> enabled!" );
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- getDispatcher()->vkGetImageMemoryRequirements2KHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ getDispatcher()->vkGetImageMemoryRequirements2KHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return memoryRequirements;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2KHR(
- const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetImageMemoryRequirements2KHR &&
- "Function <vkGetImageMemoryRequirements2KHR> needs extension <VK_KHR_get_memory_requirements2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2KHR &&
+ "Function <vkGetImageMemoryRequirements2KHR> needs extension <VK_KHR_get_memory_requirements2> enabled!" );
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- getDispatcher()->vkGetImageMemoryRequirements2KHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ getDispatcher()->vkGetImageMemoryRequirements2KHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const
- VULKAN_HPP_NOEXCEPT
+ Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetBufferMemoryRequirements2KHR &&
- "Function <vkGetBufferMemoryRequirements2KHR> needs extension <VK_KHR_get_memory_requirements2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2KHR &&
+ "Function <vkGetBufferMemoryRequirements2KHR> needs extension <VK_KHR_get_memory_requirements2> enabled!" );
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- getDispatcher()->vkGetBufferMemoryRequirements2KHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ getDispatcher()->vkGetBufferMemoryRequirements2KHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return memoryRequirements;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2KHR(
- const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetBufferMemoryRequirements2KHR &&
- "Function <vkGetBufferMemoryRequirements2KHR> needs extension <VK_KHR_get_memory_requirements2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2KHR &&
+ "Function <vkGetBufferMemoryRequirements2KHR> needs extension <VK_KHR_get_memory_requirements2> enabled!" );
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- getDispatcher()->vkGetBufferMemoryRequirements2KHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ getDispatcher()->vkGetBufferMemoryRequirements2KHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
- Device::getImageSparseMemoryRequirements2KHR(
- const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
+ Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetImageSparseMemoryRequirements2KHR &&
- "Function <vkGetImageSparseMemoryRequirements2KHR> needs extension <VK_KHR_get_memory_requirements2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSparseMemoryRequirements2KHR &&
+ "Function <vkGetImageSparseMemoryRequirements2KHR> needs extension <VK_KHR_get_memory_requirements2> enabled!" );
- uint32_t sparseMemoryRequirementCount;
- getDispatcher()->vkGetImageSparseMemoryRequirements2KHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
- &sparseMemoryRequirementCount,
- nullptr );
- std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> sparseMemoryRequirements(
- sparseMemoryRequirementCount );
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> sparseMemoryRequirements;
+ uint32_t sparseMemoryRequirementCount;
getDispatcher()->vkGetImageSparseMemoryRequirements2KHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
- &sparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
- VULKAN_HPP_ASSERT( sparseMemoryRequirementCount == sparseMemoryRequirements.size() );
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ getDispatcher()->vkGetImageSparseMemoryRequirements2KHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
+ &sparseMemoryRequirementCount,
+ reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+
+ VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+ if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+ {
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ }
return sparseMemoryRequirements;
}
//=== VK_KHR_acceleration_structure ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR
- Device::createAccelerationStructureKHR(
- VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ Device::createAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR( *this, createInfo, allocator );
}
VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos )
- const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdBuildAccelerationStructuresKHR &&
- "Function <vkCmdBuildAccelerationStructuresKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
-
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
-# else
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructuresKHR &&
+ "Function <vkCmdBuildAccelerationStructuresKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
if ( infos.size() != pBuildRangeInfos.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
}
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
getDispatcher()->vkCmdBuildAccelerationStructuresKHR(
static_cast<VkCommandBuffer>( m_commandBuffer ),
@@ -15561,410 +15267,342 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,
- ArrayProxy<const uint32_t> const & indirectStrides,
- ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdBuildAccelerationStructuresIndirectKHR &&
- "Function <vkCmdBuildAccelerationStructuresIndirectKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
-
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() );
- VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() );
- VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() );
-# else
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructuresIndirectKHR &&
+ "Function <vkCmdBuildAccelerationStructuresIndirectKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
if ( infos.size() != indirectDeviceAddresses.size() )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING
+ "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" );
}
if ( infos.size() != indirectStrides.size() )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" );
}
if ( infos.size() != pMaxPrimitiveCounts.size() )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" );
}
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
- getDispatcher()->vkCmdBuildAccelerationStructuresIndirectKHR(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- infos.size(),
- reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
- reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ),
- indirectStrides.data(),
- pMaxPrimitiveCounts.data() );
+ getDispatcher()->vkCmdBuildAccelerationStructuresIndirectKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ infos.size(),
+ reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
+ reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ),
+ indirectStrides.data(),
+ pMaxPrimitiveCounts.data() );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildAccelerationStructuresKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos )
- const
+ VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkBuildAccelerationStructuresKHR &&
- "Function <vkBuildAccelerationStructuresKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkBuildAccelerationStructuresKHR &&
+ "Function <vkBuildAccelerationStructuresKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
if ( infos.size() != pBuildRangeInfos.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
}
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkBuildAccelerationStructuresKHR(
- static_cast<VkDevice>( m_device ),
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- infos.size(),
- reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
- reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) ) );
- if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR" );
- }
- return result;
+ VkResult result = getDispatcher()->vkBuildAccelerationStructuresKHR(
+ static_cast<VkDevice>( m_device ),
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ infos.size(),
+ reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
+ reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
- Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCopyAccelerationStructureKHR &&
- "Function <vkCopyAccelerationStructureKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCopyAccelerationStructureKHR &&
+ "Function <vkCopyAccelerationStructureKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCopyAccelerationStructureKHR(
- static_cast<VkDevice>( m_device ),
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ) );
- if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR" );
- }
- return result;
+ VkResult result = getDispatcher()->vkCopyAccelerationStructureKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyAccelerationStructureToMemoryKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
+ Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCopyAccelerationStructureToMemoryKHR &&
- "Function <vkCopyAccelerationStructureToMemoryKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCopyAccelerationStructureToMemoryKHR &&
+ "Function <vkCopyAccelerationStructureToMemoryKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCopyAccelerationStructureToMemoryKHR(
- static_cast<VkDevice>( m_device ),
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ) );
- if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR" );
- }
- return result;
+ VkResult result =
+ getDispatcher()->vkCopyAccelerationStructureToMemoryKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMemoryToAccelerationStructureKHR(
- VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
- const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
+ Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCopyMemoryToAccelerationStructureKHR &&
- "Function <vkCopyMemoryToAccelerationStructureKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToAccelerationStructureKHR &&
+ "Function <vkCopyMemoryToAccelerationStructureKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCopyMemoryToAccelerationStructureKHR(
- static_cast<VkDevice>( m_device ),
- static_cast<VkDeferredOperationKHR>( deferredOperation ),
- reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ) );
- if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR" );
- }
- return result;
+ VkResult result =
+ getDispatcher()->vkCopyMemoryToAccelerationStructureKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<DataType> Device::writeAccelerationStructuresPropertiesKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- size_t dataSize,
- size_t stride ) const
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ size_t stride ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR &&
- "Function <vkWriteAccelerationStructuresPropertiesKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR &&
+ "Function <vkWriteAccelerationStructuresPropertiesKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
std::vector<DataType> data( dataSize / sizeof( DataType ) );
- Result result = static_cast<Result>( getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR(
- static_cast<VkDevice>( m_device ),
- accelerationStructures.size(),
- reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
- static_cast<VkQueryType>( queryType ),
- data.size() * sizeof( DataType ),
- reinterpret_cast<void *>( data.data() ),
- stride ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" );
- }
+ VkResult result =
+ getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR( static_cast<VkDevice>( m_device ),
+ accelerationStructures.size(),
+ reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
+ static_cast<VkQueryType>( queryType ),
+ data.size() * sizeof( DataType ),
+ reinterpret_cast<void *>( data.data() ),
+ stride );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" );
+
return data;
}
template <typename DataType>
- VULKAN_HPP_NODISCARD DataType Device::writeAccelerationStructuresPropertyKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- size_t stride ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Device::writeAccelerationStructuresPropertyKHR(
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t stride ) const
{
+ VULKAN_HPP_ASSERT( getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR &&
+ "Function <vkWriteAccelerationStructuresPropertiesKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+
DataType data;
- Result result = static_cast<Result>( getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR(
- static_cast<VkDevice>( m_device ),
- accelerationStructures.size(),
- reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
- static_cast<VkQueryType>( queryType ),
- sizeof( DataType ),
- reinterpret_cast<void *>( &data ),
- stride ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" );
- }
+ VkResult result =
+ getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR( static_cast<VkDevice>( m_device ),
+ accelerationStructures.size(),
+ reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
+ static_cast<VkQueryType>( queryType ),
+ sizeof( DataType ),
+ reinterpret_cast<void *>( &data ),
+ stride );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" );
+
return data;
}
- VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR(
- const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdCopyAccelerationStructureKHR &&
- "Function <vkCmdCopyAccelerationStructureKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureKHR &&
+ "Function <vkCmdCopyAccelerationStructureKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
- getDispatcher()->vkCmdCopyAccelerationStructureKHR(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
+ getDispatcher()->vkCmdCopyAccelerationStructureKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
}
VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR(
const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdCopyAccelerationStructureToMemoryKHR &&
- "Function <vkCmdCopyAccelerationStructureToMemoryKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureToMemoryKHR &&
+ "Function <vkCmdCopyAccelerationStructureToMemoryKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
- getDispatcher()->vkCmdCopyAccelerationStructureToMemoryKHR(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
+ getDispatcher()->vkCmdCopyAccelerationStructureToMemoryKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
}
VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR(
const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdCopyMemoryToAccelerationStructureKHR &&
- "Function <vkCmdCopyMemoryToAccelerationStructureKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToAccelerationStructureKHR &&
+ "Function <vkCmdCopyMemoryToAccelerationStructureKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
- getDispatcher()->vkCmdCopyMemoryToAccelerationStructureKHR(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
+ getDispatcher()->vkCmdCopyMemoryToAccelerationStructureKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress
- Device::getAccelerationStructureAddressKHR(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
+ Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetAccelerationStructureDeviceAddressKHR &&
- "Function <vkGetAccelerationStructureDeviceAddressKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureDeviceAddressKHR &&
+ "Function <vkGetAccelerationStructureDeviceAddressKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+
+ VkDeviceAddress result = getDispatcher()->vkGetAccelerationStructureDeviceAddressKHR(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) );
- return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>(
- getDispatcher()->vkGetAccelerationStructureDeviceAddressKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) ) );
+ return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
}
VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesKHR &&
- "Function <vkCmdWriteAccelerationStructuresPropertiesKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesKHR &&
+ "Function <vkCmdWriteAccelerationStructuresPropertiesKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
- getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesKHR(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- accelerationStructures.size(),
- reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
- static_cast<VkQueryType>( queryType ),
- static_cast<VkQueryPool>( queryPool ),
- firstQuery );
+ getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ accelerationStructures.size(),
+ reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
+ static_cast<VkQueryType>( queryType ),
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
- Device::getAccelerationStructureCompatibilityKHR(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR Device::getAccelerationStructureCompatibilityKHR(
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetDeviceAccelerationStructureCompatibilityKHR &&
- "Function <vkGetDeviceAccelerationStructureCompatibilityKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceAccelerationStructureCompatibilityKHR &&
+ "Function <vkGetDeviceAccelerationStructureCompatibilityKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
- getDispatcher()->vkGetDeviceAccelerationStructureCompatibilityKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ),
- reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
+ getDispatcher()->vkGetDeviceAccelerationStructureCompatibilityKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ),
+ reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
+
return compatibility;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR
- Device::getAccelerationStructureBuildSizesKHR(
- VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo,
- ArrayProxy<const uint32_t> const & maxPrimitiveCounts ) const VULKAN_HPP_NOEXCEPT
+ Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetAccelerationStructureBuildSizesKHR &&
- "Function <vkGetAccelerationStructureBuildSizesKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureBuildSizesKHR &&
+ "Function <vkGetAccelerationStructureBuildSizesKHR> needs extension <VK_KHR_acceleration_structure> enabled!" );
+ if ( maxPrimitiveCounts.size() != buildInfo.geometryCount )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" );
+ }
VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo;
- getDispatcher()->vkGetAccelerationStructureBuildSizesKHR(
- static_cast<VkDevice>( m_device ),
- static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
- reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ),
- maxPrimitiveCounts.data(),
- reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) );
+ getDispatcher()->vkGetAccelerationStructureBuildSizesKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
+ reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ),
+ maxPrimitiveCounts.data(),
+ reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) );
+
return sizeInfo;
}
//=== VK_KHR_sampler_ycbcr_conversion ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion
- Device::createSamplerYcbcrConversionKHR(
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ Device::createSamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion( *this, createInfo, allocator );
}
- VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR(
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkDestroySamplerYcbcrConversionKHR &&
- "Function <vkDestroySamplerYcbcrConversionKHR> needs extension <VK_KHR_sampler_ycbcr_conversion> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkDestroySamplerYcbcrConversionKHR &&
+ "Function <vkDestroySamplerYcbcrConversionKHR> needs extension <VK_KHR_sampler_ycbcr_conversion> enabled!" );
getDispatcher()->vkDestroySamplerYcbcrConversionKHR(
static_cast<VkDevice>( m_device ),
static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
//=== VK_KHR_bind_memory2 ===
- VULKAN_HPP_INLINE void Device::bindBufferMemory2KHR(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos ) const
+ VULKAN_HPP_INLINE void
+ Device::bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkBindBufferMemory2KHR &&
- "Function <vkBindBufferMemory2KHR> needs extension <VK_KHR_bind_memory2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkBindBufferMemory2KHR && "Function <vkBindBufferMemory2KHR> needs extension <VK_KHR_bind_memory2> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkBindBufferMemory2KHR(
- static_cast<VkDevice>( m_device ),
- bindInfos.size(),
- reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" );
- }
+ VkResult result = getDispatcher()->vkBindBufferMemory2KHR(
+ static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" );
}
VULKAN_HPP_INLINE void
- Device::bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos ) const
+ Device::bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkBindImageMemory2KHR &&
- "Function <vkBindImageMemory2KHR> needs extension <VK_KHR_bind_memory2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkBindImageMemory2KHR && "Function <vkBindImageMemory2KHR> needs extension <VK_KHR_bind_memory2> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkBindImageMemory2KHR( static_cast<VkDevice>( m_device ),
- bindInfos.size(),
- reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" );
- }
+ VkResult result = getDispatcher()->vkBindImageMemory2KHR(
+ static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" );
}
//=== VK_EXT_image_drm_format_modifier ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT
- Image::getDrmFormatModifierPropertiesEXT() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT Image::getDrmFormatModifierPropertiesEXT() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT &&
- "Function <vkGetImageDrmFormatModifierPropertiesEXT> needs extension <VK_EXT_image_drm_format_modifier> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT &&
+ "Function <vkGetImageDrmFormatModifierPropertiesEXT> needs extension <VK_EXT_image_drm_format_modifier> enabled!" );
VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT(
- static_cast<VkDevice>( m_device ),
- static_cast<VkImage>( m_image ),
- reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Image::getDrmFormatModifierPropertiesEXT" );
- }
+ VkResult result = getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Image::getDrmFormatModifierPropertiesEXT" );
+
return properties;
}
//=== VK_EXT_validation_cache ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT
- Device::createValidationCacheEXT(
- VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ Device::createValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT( *this, createInfo, allocator );
}
- VULKAN_HPP_INLINE void
- ValidationCacheEXT::merge( ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches ) const
+ VULKAN_HPP_INLINE void ValidationCacheEXT::merge( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkMergeValidationCachesEXT &&
"Function <vkMergeValidationCachesEXT> needs extension <VK_EXT_validation_cache> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkMergeValidationCachesEXT(
- static_cast<VkDevice>( m_device ),
- static_cast<VkValidationCacheEXT>( m_validationCache ),
- srcCaches.size(),
- reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::merge" );
- }
+ VkResult result = getDispatcher()->vkMergeValidationCachesEXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkValidationCacheEXT>( m_validationCache ),
+ srcCaches.size(),
+ reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::merge" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<uint8_t> ValidationCacheEXT::getData() const
@@ -15972,177 +15610,143 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_ASSERT( getDispatcher()->vkGetValidationCacheDataEXT &&
"Function <vkGetValidationCacheDataEXT> needs extension <VK_EXT_validation_cache> enabled!" );
- std::vector<uint8_t> data;
- size_t dataSize;
- VULKAN_HPP_NAMESPACE::Result result;
+ std::vector<uint8_t> data;
+ size_t dataSize;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetValidationCacheDataEXT( static_cast<VkDevice>( m_device ),
- static_cast<VkValidationCacheEXT>( m_validationCache ),
- &dataSize,
- nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize )
+ result = getDispatcher()->vkGetValidationCacheDataEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( m_validationCache ), &dataSize, nullptr );
+ if ( ( result == VK_SUCCESS ) && dataSize )
{
data.resize( dataSize );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetValidationCacheDataEXT( static_cast<VkDevice>( m_device ),
- static_cast<VkValidationCacheEXT>( m_validationCache ),
- &dataSize,
- reinterpret_cast<void *>( data.data() ) ) );
+ result = getDispatcher()->vkGetValidationCacheDataEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( m_validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::getData" );
+ VULKAN_HPP_ASSERT( dataSize <= data.size() );
+ if ( dataSize < data.size() )
{
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::getData" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- VULKAN_HPP_ASSERT( dataSize <= data.size() );
- if ( dataSize < data.size() )
- {
- data.resize( dataSize );
- }
+ data.resize( dataSize );
}
return data;
}
//=== VK_NV_shading_rate_image ===
- VULKAN_HPP_INLINE void
- CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindShadingRateImageNV &&
"Function <vkCmdBindShadingRateImageNV> needs extension <VK_NV_shading_rate_image> enabled!" );
- getDispatcher()->vkCmdBindShadingRateImageNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkImageView>( imageView ),
- static_cast<VkImageLayout>( imageLayout ) );
+ getDispatcher()->vkCmdBindShadingRateImageNV(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV(
- uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes )
- const VULKAN_HPP_NOEXCEPT
+ uint32_t firstViewport,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetViewportShadingRatePaletteNV &&
- "Function <vkCmdSetViewportShadingRatePaletteNV> needs extension <VK_NV_shading_rate_image> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportShadingRatePaletteNV &&
+ "Function <vkCmdSetViewportShadingRatePaletteNV> needs extension <VK_NV_shading_rate_image> enabled!" );
- getDispatcher()->vkCmdSetViewportShadingRatePaletteNV(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- firstViewport,
- shadingRatePalettes.size(),
- reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) );
+ getDispatcher()->vkCmdSetViewportShadingRatePaletteNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ firstViewport,
+ shadingRatePalettes.size(),
+ reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV(
- VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders ) const
- VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoarseSampleOrderNV &&
"Function <vkCmdSetCoarseSampleOrderNV> needs extension <VK_NV_shading_rate_image> enabled!" );
- getDispatcher()->vkCmdSetCoarseSampleOrderNV(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
- customSampleOrders.size(),
- reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) );
+ getDispatcher()->vkCmdSetCoarseSampleOrderNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
+ customSampleOrders.size(),
+ reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) );
}
//=== VK_NV_ray_tracing ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV
- Device::createAccelerationStructureNV(
- VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ Device::createAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV( *this, createInfo, allocator );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR
- Device::getAccelerationStructureMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV(
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV &&
- "Function <vkGetAccelerationStructureMemoryRequirementsNV> needs extension <VK_NV_ray_tracing> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV &&
+ "Function <vkGetAccelerationStructureMemoryRequirementsNV> needs extension <VK_NV_ray_tracing> enabled!" );
VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements;
- getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
- reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
+ getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
+
return memoryRequirements;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
- Device::getAccelerationStructureMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getAccelerationStructureMemoryRequirementsNV(
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV &&
- "Function <vkGetAccelerationStructureMemoryRequirementsNV> needs extension <VK_NV_ray_tracing> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV &&
+ "Function <vkGetAccelerationStructureMemoryRequirementsNV> needs extension <VK_NV_ray_tracing> enabled!" );
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>();
- getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
- reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>();
+ getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
+
return structureChain;
}
VULKAN_HPP_INLINE void Device::bindAccelerationStructureMemoryNV(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos ) const
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkBindAccelerationStructureMemoryNV &&
- "Function <vkBindAccelerationStructureMemoryNV> needs extension <VK_NV_ray_tracing> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkBindAccelerationStructureMemoryNV &&
+ "Function <vkBindAccelerationStructureMemoryNV> needs extension <VK_NV_ray_tracing> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkBindAccelerationStructureMemoryNV(
- static_cast<VkDevice>( m_device ),
- bindInfos.size(),
- reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" );
- }
+ VkResult result = getDispatcher()->vkBindAccelerationStructureMemoryNV(
+ static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" );
}
- VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info,
- VULKAN_HPP_NAMESPACE::Buffer instanceData,
- VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
- VULKAN_HPP_NAMESPACE::Bool32 update,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
- VULKAN_HPP_NAMESPACE::Buffer scratch,
- VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info,
+ VULKAN_HPP_NAMESPACE::Buffer instanceData,
+ VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
+ VULKAN_HPP_NAMESPACE::Bool32 update,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
+ VULKAN_HPP_NAMESPACE::Buffer scratch,
+ VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructureNV &&
"Function <vkCmdBuildAccelerationStructureNV> needs extension <VK_NV_ray_tracing> enabled!" );
- getDispatcher()->vkCmdBuildAccelerationStructureNV(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ),
- static_cast<VkBuffer>( instanceData ),
- static_cast<VkDeviceSize>( instanceOffset ),
- static_cast<VkBool32>( update ),
- static_cast<VkAccelerationStructureNV>( dst ),
- static_cast<VkAccelerationStructureNV>( src ),
- static_cast<VkBuffer>( scratch ),
- static_cast<VkDeviceSize>( scratchOffset ) );
+ getDispatcher()->vkCmdBuildAccelerationStructureNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ),
+ static_cast<VkBuffer>( instanceData ),
+ static_cast<VkDeviceSize>( instanceOffset ),
+ static_cast<VkBool32>( update ),
+ static_cast<VkAccelerationStructureNV>( dst ),
+ static_cast<VkAccelerationStructureNV>( src ),
+ static_cast<VkBuffer>( scratch ),
+ static_cast<VkDeviceSize>( scratchOffset ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV(
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
- VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
+ VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureNV &&
"Function <vkCmdCopyAccelerationStructureNV> needs extension <VK_NV_ray_tracing> enabled!" );
@@ -16161,15 +15765,14 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset,
VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride,
- VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer,
+ VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset,
VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride,
uint32_t width,
uint32_t height,
- uint32_t depth ) const VULKAN_HPP_NOEXCEPT
+ uint32_t depth ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysNV &&
- "Function <vkCmdTraceRaysNV> needs extension <VK_NV_ray_tracing> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysNV && "Function <vkCmdTraceRaysNV> needs extension <VK_NV_ray_tracing> enabled!" );
getDispatcher()->vkCmdTraceRaysNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( raygenShaderBindingTableBuffer ),
@@ -16188,171 +15791,147 @@ namespace VULKAN_HPP_NAMESPACE
depth );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline>
- Device::createRayTracingPipelinesNV(
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> Device::createRayTracingPipelinesNV(
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createRayTracingPipelineNV(
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator );
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<DataType>
- Pipeline::getRayTracingShaderGroupHandlesNV( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const
+ Pipeline::getRayTracingShaderGroupHandlesNV( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetRayTracingShaderGroupHandlesNV &&
- "Function <vkGetRayTracingShaderGroupHandlesNV> needs extension <VK_NV_ray_tracing> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV &&
+ "Function <vkGetRayTracingShaderGroupHandlesNV> needs extension <VK_NV_ray_tracing> enabled!" );
VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
std::vector<DataType> data( dataSize / sizeof( DataType ) );
- Result result = static_cast<Result>(
- getDispatcher()->vkGetRayTracingShaderGroupHandlesNV( static_cast<VkDevice>( m_device ),
- static_cast<VkPipeline>( m_pipeline ),
- firstGroup,
- groupCount,
- data.size() * sizeof( DataType ),
- reinterpret_cast<void *>( data.data() ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesNV" );
- }
+ VkResult result = getDispatcher()->vkGetRayTracingShaderGroupHandlesNV( static_cast<VkDevice>( m_device ),
+ static_cast<VkPipeline>( m_pipeline ),
+ firstGroup,
+ groupCount,
+ data.size() * sizeof( DataType ),
+ reinterpret_cast<void *>( data.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesNV" );
+
return data;
}
template <typename DataType>
- VULKAN_HPP_NODISCARD DataType Pipeline::getRayTracingShaderGroupHandleNV( uint32_t firstGroup,
- uint32_t groupCount ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingShaderGroupHandleNV( uint32_t firstGroup, uint32_t groupCount ) const
{
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV &&
+ "Function <vkGetRayTracingShaderGroupHandlesNV> needs extension <VK_NV_ray_tracing> enabled!" );
+
DataType data;
- Result result = static_cast<Result>(
- getDispatcher()->vkGetRayTracingShaderGroupHandlesNV( static_cast<VkDevice>( m_device ),
- static_cast<VkPipeline>( m_pipeline ),
- firstGroup,
- groupCount,
- sizeof( DataType ),
- reinterpret_cast<void *>( &data ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleNV" );
- }
+ VkResult result = getDispatcher()->vkGetRayTracingShaderGroupHandlesNV( static_cast<VkDevice>( m_device ),
+ static_cast<VkPipeline>( m_pipeline ),
+ firstGroup,
+ groupCount,
+ sizeof( DataType ),
+ reinterpret_cast<void *>( &data ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleNV" );
+
return data;
}
template <typename DataType>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<DataType>
- AccelerationStructureNV::getHandle( size_t dataSize ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<DataType> AccelerationStructureNV::getHandle( size_t dataSize ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureHandleNV &&
"Function <vkGetAccelerationStructureHandleNV> needs extension <VK_NV_ray_tracing> enabled!" );
VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
std::vector<DataType> data( dataSize / sizeof( DataType ) );
- Result result = static_cast<Result>( getDispatcher()->vkGetAccelerationStructureHandleNV(
- static_cast<VkDevice>( m_device ),
- static_cast<VkAccelerationStructureNV>( m_accelerationStructure ),
- data.size() * sizeof( DataType ),
- reinterpret_cast<void *>( data.data() ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" );
- }
+ VkResult result = getDispatcher()->vkGetAccelerationStructureHandleNV( static_cast<VkDevice>( m_device ),
+ static_cast<VkAccelerationStructureNV>( m_accelerationStructure ),
+ data.size() * sizeof( DataType ),
+ reinterpret_cast<void *>( data.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" );
+
return data;
}
template <typename DataType>
- VULKAN_HPP_NODISCARD DataType AccelerationStructureNV::getHandle() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType AccelerationStructureNV::getHandle() const
{
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureHandleNV &&
+ "Function <vkGetAccelerationStructureHandleNV> needs extension <VK_NV_ray_tracing> enabled!" );
+
DataType data;
- Result result = static_cast<Result>( getDispatcher()->vkGetAccelerationStructureHandleNV(
- static_cast<VkDevice>( m_device ),
- static_cast<VkAccelerationStructureNV>( m_accelerationStructure ),
- sizeof( DataType ),
- reinterpret_cast<void *>( &data ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" );
- }
+ VkResult result = getDispatcher()->vkGetAccelerationStructureHandleNV( static_cast<VkDevice>( m_device ),
+ static_cast<VkAccelerationStructureNV>( m_accelerationStructure ),
+ sizeof( DataType ),
+ reinterpret_cast<void *>( &data ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" );
+
return data;
}
VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
- VULKAN_HPP_NAMESPACE::QueryType queryType,
- VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesNV &&
- "Function <vkCmdWriteAccelerationStructuresPropertiesNV> needs extension <VK_NV_ray_tracing> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesNV &&
+ "Function <vkCmdWriteAccelerationStructuresPropertiesNV> needs extension <VK_NV_ray_tracing> enabled!" );
- getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesNV(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- accelerationStructures.size(),
- reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ),
- static_cast<VkQueryType>( queryType ),
- static_cast<VkQueryPool>( queryPool ),
- firstQuery );
+ getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ accelerationStructures.size(),
+ reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ),
+ static_cast<VkQueryType>( queryType ),
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery );
}
VULKAN_HPP_INLINE void Pipeline::compileDeferredNV( uint32_t shader ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCompileDeferredNV &&
- "Function <vkCompileDeferredNV> needs extension <VK_NV_ray_tracing> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCompileDeferredNV && "Function <vkCompileDeferredNV> needs extension <VK_NV_ray_tracing> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCompileDeferredNV(
- static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), shader ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::compileDeferredNV" );
- }
+ VkResult result = getDispatcher()->vkCompileDeferredNV( static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), shader );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::compileDeferredNV" );
}
//=== VK_KHR_maintenance3 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
- Device::getDescriptorSetLayoutSupportKHR(
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
+ Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetDescriptorSetLayoutSupportKHR &&
- "Function <vkGetDescriptorSetLayoutSupportKHR> needs extension <VK_KHR_maintenance3> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupportKHR &&
+ "Function <vkGetDescriptorSetLayoutSupportKHR> needs extension <VK_KHR_maintenance3> enabled!" );
VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
- getDispatcher()->vkGetDescriptorSetLayoutSupportKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+ getDispatcher()->vkGetDescriptorSetLayoutSupportKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
+ reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+
return support;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupportKHR(
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetDescriptorSetLayoutSupportKHR &&
- "Function <vkGetDescriptorSetLayoutSupportKHR> needs extension <VK_KHR_maintenance3> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupportKHR &&
+ "Function <vkGetDescriptorSetLayoutSupportKHR> needs extension <VK_KHR_maintenance3> enabled!" );
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support =
- structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
- getDispatcher()->vkGetDescriptorSetLayoutSupportKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
- reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
+ getDispatcher()->vkGetDescriptorSetLayoutSupportKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
+ reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+
return structureChain;
}
@@ -16363,7 +15942,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Buffer countBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
uint32_t maxDrawCount,
- uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+ uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCountKHR &&
"Function <vkCmdDrawIndirectCountKHR> needs extension <VK_KHR_draw_indirect_count> enabled!" );
@@ -16377,17 +15956,15 @@ namespace VULKAN_HPP_NAMESPACE
stride );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::Buffer countBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdDrawIndexedIndirectCountKHR &&
- "Function <vkCmdDrawIndexedIndirectCountKHR> needs extension <VK_KHR_draw_indirect_count> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexedIndirectCountKHR &&
+ "Function <vkCmdDrawIndexedIndirectCountKHR> needs extension <VK_KHR_draw_indirect_count> enabled!" );
getDispatcher()->vkCmdDrawIndexedIndirectCountKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBuffer>( buffer ),
@@ -16401,37 +15978,30 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_external_memory_host ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT
- Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- const void * pHostPointer ) const
+ Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetMemoryHostPointerPropertiesEXT &&
- "Function <vkGetMemoryHostPointerPropertiesEXT> needs extension <VK_EXT_external_memory_host> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryHostPointerPropertiesEXT &&
+ "Function <vkGetMemoryHostPointerPropertiesEXT> needs extension <VK_EXT_external_memory_host> enabled!" );
VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetMemoryHostPointerPropertiesEXT(
- static_cast<VkDevice>( m_device ),
- static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
- pHostPointer,
- reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" );
- }
+ VkResult result =
+ getDispatcher()->vkGetMemoryHostPointerPropertiesEXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
+ pHostPointer,
+ reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" );
+
return memoryHostPointerProperties;
}
//=== VK_AMD_buffer_marker ===
- VULKAN_HPP_INLINE void
- CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
- uint32_t marker ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
+ uint32_t marker ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarkerAMD &&
- "Function <vkCmdWriteBufferMarkerAMD> needs extension <VK_AMD_buffer_marker> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarkerAMD && "Function <vkCmdWriteBufferMarkerAMD> needs extension <VK_AMD_buffer_marker> enabled!" );
getDispatcher()->vkCmdWriteBufferMarkerAMD( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkPipelineStageFlagBits>( pipelineStage ),
@@ -16442,103 +16012,74 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_calibrated_timestamps ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT>
- PhysicalDevice::getCalibrateableTimeDomainsEXT() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT> PhysicalDevice::getCalibrateableTimeDomainsEXT() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT &&
- "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsEXT> needs extension <VK_EXT_calibrated_timestamps> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT &&
+ "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsEXT> needs extension <VK_EXT_calibrated_timestamps> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT> timeDomains;
uint32_t timeDomainCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), &timeDomainCount, nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount )
+ getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), &timeDomainCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && timeDomainCount )
{
timeDomains.resize( timeDomainCount );
- result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- &timeDomainCount,
- reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) ) );
+ result = getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
+ VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
+ if ( timeDomainCount < timeDomains.size() )
{
- VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
- if ( timeDomainCount < timeDomains.size() )
- {
- timeDomains.resize( timeDomainCount );
- }
+ timeDomains.resize( timeDomainCount );
}
return timeDomains;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<std::vector<uint64_t>, uint64_t>
- Device::getCalibratedTimestampsEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<std::vector<uint64_t>, uint64_t> Device::getCalibratedTimestampsEXT(
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetCalibratedTimestampsEXT &&
- "Function <vkGetCalibratedTimestampsEXT> needs extension <VK_EXT_calibrated_timestamps> enabled!" );
-
- std::pair<std::vector<uint64_t>, uint64_t> data(
- std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) );
- std::vector<uint64_t> & timestamps = data.first;
- uint64_t & maxDeviation = data.second;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetCalibratedTimestampsEXT(
- static_cast<VkDevice>( m_device ),
- timestampInfos.size(),
- reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ),
- timestamps.data(),
- &maxDeviation ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
- }
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsEXT &&
+ "Function <vkGetCalibratedTimestampsEXT> needs extension <VK_EXT_calibrated_timestamps> enabled!" );
+
+ std::pair<std::vector<uint64_t>, uint64_t> data( std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) );
+ std::vector<uint64_t> & timestamps = data.first;
+ uint64_t & maxDeviation = data.second;
+ VkResult result = getDispatcher()->vkGetCalibratedTimestampsEXT( static_cast<VkDevice>( m_device ),
+ timestampInfos.size(),
+ reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ),
+ timestamps.data(),
+ &maxDeviation );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
+
return data;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<uint64_t, uint64_t>
- Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo ) const
+ Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetCalibratedTimestampsEXT &&
- "Function <vkGetCalibratedTimestampsEXT> needs extension <VK_EXT_calibrated_timestamps> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsEXT &&
+ "Function <vkGetCalibratedTimestampsEXT> needs extension <VK_EXT_calibrated_timestamps> enabled!" );
std::pair<uint64_t, uint64_t> data;
uint64_t & timestamp = data.first;
uint64_t & maxDeviation = data.second;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetCalibratedTimestampsEXT(
- static_cast<VkDevice>( m_device ),
- 1,
- reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( &timestampInfo ),
- &timestamp,
- &maxDeviation ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" );
- }
+ VkResult result = getDispatcher()->vkGetCalibratedTimestampsEXT(
+ static_cast<VkDevice>( m_device ), 1, reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( &timestampInfo ), &timestamp, &maxDeviation );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" );
+
return data;
}
//=== VK_NV_mesh_shader ===
- VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount,
- uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksNV &&
- "Function <vkCmdDrawMeshTasksNV> needs extension <VK_NV_mesh_shader> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksNV && "Function <vkCmdDrawMeshTasksNV> needs extension <VK_NV_mesh_shader> enabled!" );
getDispatcher()->vkCmdDrawMeshTasksNV( static_cast<VkCommandBuffer>( m_commandBuffer ), taskCount, firstTask );
}
@@ -16546,25 +16087,21 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
VULKAN_HPP_NAMESPACE::DeviceSize offset,
uint32_t drawCount,
- uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+ uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectNV &&
"Function <vkCmdDrawMeshTasksIndirectNV> needs extension <VK_NV_mesh_shader> enabled!" );
- getDispatcher()->vkCmdDrawMeshTasksIndirectNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkBuffer>( buffer ),
- static_cast<VkDeviceSize>( offset ),
- drawCount,
- stride );
+ getDispatcher()->vkCmdDrawMeshTasksIndirectNV(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
- VULKAN_HPP_NAMESPACE::DeviceSize offset,
- VULKAN_HPP_NAMESPACE::Buffer countBuffer,
- VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectCountNV &&
"Function <vkCmdDrawMeshTasksIndirectCountNV> needs extension <VK_NV_mesh_shader> enabled!" );
@@ -16581,8 +16118,7 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_scissor_exclusive ===
VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV(
- uint32_t firstExclusiveScissor,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors ) const VULKAN_HPP_NOEXCEPT
+ uint32_t firstExclusiveScissor, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExclusiveScissorNV &&
"Function <vkCmdSetExclusiveScissorNV> needs extension <VK_NV_scissor_exclusive> enabled!" );
@@ -16596,31 +16132,31 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_device_diagnostic_checkpoints ===
template <typename CheckpointMarkerType>
- VULKAN_HPP_INLINE void
- CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetCheckpointNV &&
- "Function <vkCmdSetCheckpointNV> needs extension <VK_NV_device_diagnostic_checkpoints> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCheckpointNV &&
+ "Function <vkCmdSetCheckpointNV> needs extension <VK_NV_device_diagnostic_checkpoints> enabled!" );
- getDispatcher()->vkCmdSetCheckpointNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const void *>( &checkpointMarker ) );
+ getDispatcher()->vkCmdSetCheckpointNV( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const void *>( &checkpointMarker ) );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV>
- Queue::getCheckpointDataNV() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV> Queue::getCheckpointDataNV() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetQueueCheckpointDataNV &&
- "Function <vkGetQueueCheckpointDataNV> needs extension <VK_NV_device_diagnostic_checkpoints> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointDataNV &&
+ "Function <vkGetQueueCheckpointDataNV> needs extension <VK_NV_device_diagnostic_checkpoints> enabled!" );
- uint32_t checkpointDataCount;
+ std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV> checkpointData;
+ uint32_t checkpointDataCount;
getDispatcher()->vkGetQueueCheckpointDataNV( static_cast<VkQueue>( m_queue ), &checkpointDataCount, nullptr );
- std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV> checkpointData( checkpointDataCount );
- getDispatcher()->vkGetQueueCheckpointDataNV( static_cast<VkQueue>( m_queue ),
- &checkpointDataCount,
- reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
- VULKAN_HPP_ASSERT( checkpointDataCount == checkpointData.size() );
+ checkpointData.resize( checkpointDataCount );
+ getDispatcher()->vkGetQueueCheckpointDataNV(
+ static_cast<VkQueue>( m_queue ), &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
+
+ VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+ if ( checkpointDataCount < checkpointData.size() )
+ {
+ checkpointData.resize( checkpointDataCount );
+ }
return checkpointData;
}
@@ -16628,194 +16164,135 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Semaphore::getCounterValueKHR() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetSemaphoreCounterValueKHR &&
- "Function <vkGetSemaphoreCounterValueKHR> needs extension <VK_KHR_timeline_semaphore> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreCounterValueKHR &&
+ "Function <vkGetSemaphoreCounterValueKHR> needs extension <VK_KHR_timeline_semaphore> enabled!" );
+
+ uint64_t value;
+ VkResult result = getDispatcher()->vkGetSemaphoreCounterValueKHR( static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( m_semaphore ), &value );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValueKHR" );
- uint64_t value;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetSemaphoreCounterValueKHR(
- static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( m_semaphore ), &value ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValueKHR" );
- }
return value;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
- Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo,
+ uint64_t timeout ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkWaitSemaphoresKHR &&
- "Function <vkWaitSemaphoresKHR> needs extension <VK_KHR_timeline_semaphore> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkWaitSemaphoresKHR && "Function <vkWaitSemaphoresKHR> needs extension <VK_KHR_timeline_semaphore> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkWaitSemaphoresKHR(
- static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) );
- if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eTimeout ) )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR" );
- }
- return result;
+ VkResult result =
+ getDispatcher()->vkWaitSemaphoresKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
- VULKAN_HPP_INLINE void
- Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const
+ VULKAN_HPP_INLINE void Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkSignalSemaphoreKHR &&
- "Function <vkSignalSemaphoreKHR> needs extension <VK_KHR_timeline_semaphore> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkSignalSemaphoreKHR && "Function <vkSignalSemaphoreKHR> needs extension <VK_KHR_timeline_semaphore> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkSignalSemaphoreKHR(
- static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" );
- }
+ VkResult result =
+ getDispatcher()->vkSignalSemaphoreKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" );
}
//=== VK_INTEL_performance_query ===
- VULKAN_HPP_INLINE void Device::initializePerformanceApiINTEL(
- const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo ) const
+ VULKAN_HPP_INLINE void Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkInitializePerformanceApiINTEL &&
- "Function <vkInitializePerformanceApiINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkInitializePerformanceApiINTEL &&
+ "Function <vkInitializePerformanceApiINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkInitializePerformanceApiINTEL(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" );
- }
+ VkResult result = getDispatcher()->vkInitializePerformanceApiINTEL( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" );
}
VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL() const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkUninitializePerformanceApiINTEL &&
- "Function <vkUninitializePerformanceApiINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkUninitializePerformanceApiINTEL &&
+ "Function <vkUninitializePerformanceApiINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
getDispatcher()->vkUninitializePerformanceApiINTEL( static_cast<VkDevice>( m_device ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::setPerformanceMarkerINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo ) const
+ VULKAN_HPP_INLINE void CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetPerformanceMarkerINTEL &&
- "Function <vkCmdSetPerformanceMarkerINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceMarkerINTEL &&
+ "Function <vkCmdSetPerformanceMarkerINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCmdSetPerformanceMarkerINTEL(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" );
- }
+ VkResult result = getDispatcher()->vkCmdSetPerformanceMarkerINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" );
}
- VULKAN_HPP_INLINE void CommandBuffer::setPerformanceStreamMarkerINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo ) const
+ VULKAN_HPP_INLINE void CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL &&
- "Function <vkCmdSetPerformanceStreamMarkerINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL &&
+ "Function <vkCmdSetPerformanceStreamMarkerINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" );
- }
+ VkResult result = getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" );
}
- VULKAN_HPP_INLINE void CommandBuffer::setPerformanceOverrideINTEL(
- const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo ) const
+ VULKAN_HPP_INLINE void CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetPerformanceOverrideINTEL &&
- "Function <vkCmdSetPerformanceOverrideINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceOverrideINTEL &&
+ "Function <vkCmdSetPerformanceOverrideINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCmdSetPerformanceOverrideINTEL(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" );
- }
+ VkResult result = getDispatcher()->vkCmdSetPerformanceOverrideINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL
- Device::acquirePerformanceConfigurationINTEL(
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const
+ Device::acquirePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const
{
return VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL( *this, acquireInfo );
}
- VULKAN_HPP_INLINE void
- Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration ) const
+ VULKAN_HPP_INLINE void Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkQueueSetPerformanceConfigurationINTEL &&
- "Function <vkQueueSetPerformanceConfigurationINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSetPerformanceConfigurationINTEL &&
+ "Function <vkQueueSetPerformanceConfigurationINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkQueueSetPerformanceConfigurationINTEL(
- static_cast<VkQueue>( m_queue ), static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" );
- }
+ VkResult result = getDispatcher()->vkQueueSetPerformanceConfigurationINTEL( static_cast<VkQueue>( m_queue ),
+ static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PerformanceValueINTEL
- Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter ) const
+ Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPerformanceParameterINTEL &&
- "Function <vkGetPerformanceParameterINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPerformanceParameterINTEL &&
+ "Function <vkGetPerformanceParameterINTEL> needs extension <VK_INTEL_performance_query> enabled!" );
VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value;
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetPerformanceParameterINTEL( static_cast<VkDevice>( m_device ),
- static_cast<VkPerformanceParameterTypeINTEL>( parameter ),
- reinterpret_cast<VkPerformanceValueINTEL *>( &value ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" );
- }
+ VkResult result = getDispatcher()->vkGetPerformanceParameterINTEL(
+ static_cast<VkDevice>( m_device ), static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( &value ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" );
+
return value;
}
//=== VK_AMD_display_native_hdr ===
- VULKAN_HPP_INLINE void
- SwapchainKHR::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void SwapchainKHR::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkSetLocalDimmingAMD &&
- "Function <vkSetLocalDimmingAMD> needs extension <VK_AMD_display_native_hdr> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkSetLocalDimmingAMD && "Function <vkSetLocalDimmingAMD> needs extension <VK_AMD_display_native_hdr> enabled!" );
- getDispatcher()->vkSetLocalDimmingAMD( static_cast<VkDevice>( m_device ),
- static_cast<VkSwapchainKHR>( m_swapchain ),
- static_cast<VkBool32>( localDimmingEnable ) );
+ getDispatcher()->vkSetLocalDimmingAMD(
+ static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), static_cast<VkBool32>( localDimmingEnable ) );
}
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_imagepipe_surface ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
- Instance::createImagePipeSurfaceFUCHSIA(
- VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ Instance::createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
}
@@ -16824,9 +16301,9 @@ namespace VULKAN_HPP_NAMESPACE
# if defined( VK_USE_PLATFORM_METAL_EXT )
//=== VK_EXT_metal_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createMetalSurfaceEXT(
- VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ Instance::createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
}
@@ -16837,126 +16314,102 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>
PhysicalDevice::getFragmentShadingRatesKHR() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR &&
- "Function <vkGetPhysicalDeviceFragmentShadingRatesKHR> needs extension <VK_KHR_fragment_shading_rate> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR &&
+ "Function <vkGetPhysicalDeviceFragmentShadingRatesKHR> needs extension <VK_KHR_fragment_shading_rate> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR> fragmentShadingRates;
uint32_t fragmentShadingRateCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), &fragmentShadingRateCount, nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && fragmentShadingRateCount )
+ result =
+ getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &fragmentShadingRateCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount )
{
fragmentShadingRates.resize( fragmentShadingRateCount );
- result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- &fragmentShadingRateCount,
- reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) );
+ result = getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ &fragmentShadingRateCount,
+ reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
+ VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
+ if ( fragmentShadingRateCount < fragmentShadingRates.size() )
{
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
- if ( fragmentShadingRateCount < fragmentShadingRates.size() )
- {
- fragmentShadingRates.resize( fragmentShadingRateCount );
- }
+ fragmentShadingRates.resize( fragmentShadingRateCount );
}
return fragmentShadingRates;
}
- VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR(
- const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize,
- const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize,
+ const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetFragmentShadingRateKHR &&
- "Function <vkCmdSetFragmentShadingRateKHR> needs extension <VK_KHR_fragment_shading_rate> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFragmentShadingRateKHR &&
+ "Function <vkCmdSetFragmentShadingRateKHR> needs extension <VK_KHR_fragment_shading_rate> enabled!" );
- getDispatcher()->vkCmdSetFragmentShadingRateKHR(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkExtent2D *>( &fragmentSize ),
- reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
+ getDispatcher()->vkCmdSetFragmentShadingRateKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkExtent2D *>( &fragmentSize ),
+ reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
}
//=== VK_EXT_buffer_device_address ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressEXT(
- const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress
+ Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetBufferDeviceAddressEXT &&
- "Function <vkGetBufferDeviceAddressEXT> needs extension <VK_EXT_buffer_device_address> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferDeviceAddressEXT &&
+ "Function <vkGetBufferDeviceAddressEXT> needs extension <VK_EXT_buffer_device_address> enabled!" );
+
+ VkDeviceAddress result =
+ getDispatcher()->vkGetBufferDeviceAddressEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
- return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( getDispatcher()->vkGetBufferDeviceAddressEXT(
- static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ) );
+ return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
}
//=== VK_EXT_tooling_info ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>
- PhysicalDevice::getToolPropertiesEXT() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> PhysicalDevice::getToolPropertiesEXT() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT &&
- "Function <vkGetPhysicalDeviceToolPropertiesEXT> needs extension <VK_EXT_tooling_info> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT &&
+ "Function <vkGetPhysicalDeviceToolPropertiesEXT> needs extension <VK_EXT_tooling_info> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> toolProperties;
uint32_t toolCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), &toolCount, nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount )
+ result = getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), &toolCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && toolCount )
{
toolProperties.resize( toolCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- &toolCount,
- reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) );
+ result = getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
+ VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+ if ( toolCount < toolProperties.size() )
{
- VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
- if ( toolCount < toolProperties.size() )
- {
- toolProperties.resize( toolCount );
- }
+ toolProperties.resize( toolCount );
}
return toolProperties;
}
//=== VK_KHR_present_wait ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
- SwapchainKHR::waitForPresent( uint64_t presentId, uint64_t timeout ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result SwapchainKHR::waitForPresent( uint64_t presentId, uint64_t timeout ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkWaitForPresentKHR &&
- "Function <vkWaitForPresentKHR> needs extension <VK_KHR_present_wait> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkWaitForPresentKHR && "Function <vkWaitForPresentKHR> needs extension <VK_KHR_present_wait> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkWaitForPresentKHR(
- static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), presentId, timeout ) );
- if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eTimeout ) )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::waitForPresent" );
- }
- return result;
+ VkResult result =
+ getDispatcher()->vkWaitForPresentKHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), presentId, timeout );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::waitForPresent",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
//=== VK_NV_cooperative_matrix ===
@@ -16964,40 +16417,28 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>
PhysicalDevice::getCooperativeMatrixPropertiesNV() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV &&
- "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesNV> needs extension <VK_NV_cooperative_matrix> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV &&
+ "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesNV> needs extension <VK_NV_cooperative_matrix> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV> properties;
uint32_t propertyCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount )
+ getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertyCount )
{
properties.resize( propertyCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- &propertyCount,
- reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) );
+ result = getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result,
- VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
+ VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+ if ( propertyCount < properties.size() )
{
- VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
- if ( propertyCount < properties.size() )
- {
- properties.resize( propertyCount );
- }
+ properties.resize( propertyCount );
}
return properties;
}
@@ -17013,34 +16454,26 @@ namespace VULKAN_HPP_NAMESPACE
std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV> combinations;
uint32_t combinationCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), &combinationCount, nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && combinationCount )
+ result = getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ), &combinationCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && combinationCount )
{
combinations.resize( combinationCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- &combinationCount,
- reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) );
+ result = getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ &combinationCount,
+ reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException(
- result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
+ VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
+ if ( combinationCount < combinations.size() )
{
- VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
- if ( combinationCount < combinations.size() )
- {
- combinations.resize( combinationCount );
- }
+ combinations.resize( combinationCount );
}
return combinations;
}
@@ -17049,257 +16482,202 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_full_screen_exclusive ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR>
- PhysicalDevice::getSurfacePresentModes2EXT(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
+ PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT &&
- "Function <vkGetPhysicalDeviceSurfacePresentModes2EXT> needs extension <VK_EXT_full_screen_exclusive> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT &&
+ "Function <vkGetPhysicalDeviceSurfacePresentModes2EXT> needs extension <VK_EXT_full_screen_exclusive> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR> presentModes;
uint32_t presentModeCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- &presentModeCount,
- nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount )
+ result = getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ &presentModeCount,
+ nullptr );
+ if ( ( result == VK_SUCCESS ) && presentModeCount )
{
presentModes.resize( presentModeCount );
- result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT(
- static_cast<VkPhysicalDevice>( m_physicalDevice ),
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- &presentModeCount,
- reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
+ result = getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ &presentModeCount,
+ reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
+ VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+ if ( presentModeCount < presentModes.size() )
{
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
- if ( presentModeCount < presentModes.size() )
- {
- presentModes.resize( presentModeCount );
- }
+ presentModes.resize( presentModeCount );
}
return presentModes;
}
VULKAN_HPP_INLINE void SwapchainKHR::acquireFullScreenExclusiveModeEXT() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkAcquireFullScreenExclusiveModeEXT &&
- "Function <vkAcquireFullScreenExclusiveModeEXT> needs extension <VK_EXT_full_screen_exclusive> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireFullScreenExclusiveModeEXT &&
+ "Function <vkAcquireFullScreenExclusiveModeEXT> needs extension <VK_EXT_full_screen_exclusive> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkAcquireFullScreenExclusiveModeEXT(
- static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireFullScreenExclusiveModeEXT" );
- }
+ VkResult result = getDispatcher()->vkAcquireFullScreenExclusiveModeEXT( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireFullScreenExclusiveModeEXT" );
}
VULKAN_HPP_INLINE void SwapchainKHR::releaseFullScreenExclusiveModeEXT() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkReleaseFullScreenExclusiveModeEXT &&
- "Function <vkReleaseFullScreenExclusiveModeEXT> needs extension <VK_EXT_full_screen_exclusive> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseFullScreenExclusiveModeEXT &&
+ "Function <vkReleaseFullScreenExclusiveModeEXT> needs extension <VK_EXT_full_screen_exclusive> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkReleaseFullScreenExclusiveModeEXT(
- static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::releaseFullScreenExclusiveModeEXT" );
- }
+ VkResult result = getDispatcher()->vkReleaseFullScreenExclusiveModeEXT( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::releaseFullScreenExclusiveModeEXT" );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR
- Device::getGroupSurfacePresentModes2EXT(
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
+ Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT &&
- "Function <vkGetDeviceGroupSurfacePresentModes2EXT> needs extension <VK_EXT_full_screen_exclusive> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT &&
+ "Function <vkGetDeviceGroupSurfacePresentModes2EXT> needs extension <VK_EXT_full_screen_exclusive> enabled!" );
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
- reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" );
- }
+ VkResult result = getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
+ reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" );
+
return modes;
}
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_EXT_headless_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createHeadlessSurfaceEXT(
- VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ Instance::createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
}
//=== VK_KHR_buffer_device_address ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressKHR(
- const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress
+ Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetBufferDeviceAddressKHR &&
- "Function <vkGetBufferDeviceAddressKHR> needs extension <VK_KHR_buffer_device_address> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferDeviceAddressKHR &&
+ "Function <vkGetBufferDeviceAddressKHR> needs extension <VK_KHR_buffer_device_address> enabled!" );
+
+ VkDeviceAddress result =
+ getDispatcher()->vkGetBufferDeviceAddressKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
- return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( getDispatcher()->vkGetBufferDeviceAddressKHR(
- static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ) );
+ return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR(
- const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t
+ Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetBufferOpaqueCaptureAddressKHR &&
- "Function <vkGetBufferOpaqueCaptureAddressKHR> needs extension <VK_KHR_buffer_device_address> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferOpaqueCaptureAddressKHR &&
+ "Function <vkGetBufferOpaqueCaptureAddressKHR> needs extension <VK_KHR_buffer_device_address> enabled!" );
+
+ uint64_t result =
+ getDispatcher()->vkGetBufferOpaqueCaptureAddressKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
- return getDispatcher()->vkGetBufferOpaqueCaptureAddressKHR(
- static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+ return result;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR(
- const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t
+ Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddressKHR &&
- "Function <vkGetDeviceMemoryOpaqueCaptureAddressKHR> needs extension <VK_KHR_buffer_device_address> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddressKHR &&
+ "Function <vkGetDeviceMemoryOpaqueCaptureAddressKHR> needs extension <VK_KHR_buffer_device_address> enabled!" );
+
+ uint64_t result = getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddressKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
- return getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddressKHR(
- static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
+ return result;
}
//=== VK_EXT_line_rasterization ===
- VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor,
- uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEXT &&
- "Function <vkCmdSetLineStippleEXT> needs extension <VK_EXT_line_rasterization> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEXT && "Function <vkCmdSetLineStippleEXT> needs extension <VK_EXT_line_rasterization> enabled!" );
- getDispatcher()->vkCmdSetLineStippleEXT(
- static_cast<VkCommandBuffer>( m_commandBuffer ), lineStippleFactor, lineStipplePattern );
+ getDispatcher()->vkCmdSetLineStippleEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), lineStippleFactor, lineStipplePattern );
}
//=== VK_EXT_host_query_reset ===
VULKAN_HPP_INLINE void QueryPool::resetEXT( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkResetQueryPoolEXT &&
- "Function <vkResetQueryPoolEXT> needs extension <VK_EXT_host_query_reset> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkResetQueryPoolEXT && "Function <vkResetQueryPoolEXT> needs extension <VK_EXT_host_query_reset> enabled!" );
- getDispatcher()->vkResetQueryPoolEXT(
- static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( m_queryPool ), firstQuery, queryCount );
+ getDispatcher()->vkResetQueryPoolEXT( static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( m_queryPool ), firstQuery, queryCount );
}
//=== VK_EXT_extended_dynamic_state ===
- VULKAN_HPP_INLINE void
- CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullModeEXT &&
- "Function <vkCmdSetCullModeEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullModeEXT && "Function <vkCmdSetCullModeEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
- getDispatcher()->vkCmdSetCullModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkCullModeFlags>( cullMode ) );
+ getDispatcher()->vkCmdSetCullModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFaceEXT &&
- "Function <vkCmdSetFrontFaceEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFaceEXT && "Function <vkCmdSetFrontFaceEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
- getDispatcher()->vkCmdSetFrontFaceEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkFrontFace>( frontFace ) );
+ getDispatcher()->vkCmdSetFrontFaceEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT(
- VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetPrimitiveTopologyEXT &&
- "Function <vkCmdSetPrimitiveTopologyEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopologyEXT &&
+ "Function <vkCmdSetPrimitiveTopologyEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
- getDispatcher()->vkCmdSetPrimitiveTopologyEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkPrimitiveTopology>( primitiveTopology ) );
+ getDispatcher()->vkCmdSetPrimitiveTopologyEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetViewportWithCountEXT &&
- "Function <vkCmdSetViewportWithCountEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCountEXT &&
+ "Function <vkCmdSetViewportWithCountEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
- getDispatcher()->vkCmdSetViewportWithCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
- viewports.size(),
- reinterpret_cast<const VkViewport *>( viewports.data() ) );
+ getDispatcher()->vkCmdSetViewportWithCountEXT(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetScissorWithCountEXT &&
- "Function <vkCmdSetScissorWithCountEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCountEXT &&
+ "Function <vkCmdSetScissorWithCountEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
- getDispatcher()->vkCmdSetScissorWithCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
- scissors.size(),
- reinterpret_cast<const VkRect2D *>( scissors.data() ) );
+ getDispatcher()->vkCmdSetScissorWithCountEXT(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT(
- uint32_t firstBinding,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
- {
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdBindVertexBuffers2EXT &&
- "Function <vkCmdBindVertexBuffers2EXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
-
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
- VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
- VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
-# else
+ VULKAN_HPP_INLINE void
+ CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2EXT &&
+ "Function <vkCmdBindVertexBuffers2EXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
if ( buffers.size() != offsets.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
}
if ( !sizes.empty() && buffers.size() != sizes.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" );
}
if ( !strides.empty() && buffers.size() != strides.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" );
}
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
getDispatcher()->vkCmdBindVertexBuffers2EXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstBinding,
@@ -17310,70 +16688,53 @@ namespace VULKAN_HPP_NAMESPACE
reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetDepthTestEnableEXT &&
- "Function <vkCmdSetDepthTestEnableEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnableEXT &&
+ "Function <vkCmdSetDepthTestEnableEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
- getDispatcher()->vkCmdSetDepthTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkBool32>( depthTestEnable ) );
+ getDispatcher()->vkCmdSetDepthTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetDepthWriteEnableEXT &&
- "Function <vkCmdSetDepthWriteEnableEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnableEXT &&
+ "Function <vkCmdSetDepthWriteEnableEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
- getDispatcher()->vkCmdSetDepthWriteEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkBool32>( depthWriteEnable ) );
+ getDispatcher()->vkCmdSetDepthWriteEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetDepthCompareOpEXT &&
- "Function <vkCmdSetDepthCompareOpEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOpEXT &&
+ "Function <vkCmdSetDepthCompareOpEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
- getDispatcher()->vkCmdSetDepthCompareOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkCompareOp>( depthCompareOp ) );
+ getDispatcher()->vkCmdSetDepthCompareOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT(
- VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT &&
- "Function <vkCmdSetDepthBoundsTestEnableEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT &&
+ "Function <vkCmdSetDepthBoundsTestEnableEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
- getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkBool32>( depthBoundsTestEnable ) );
+ getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetStencilTestEnableEXT &&
- "Function <vkCmdSetStencilTestEnableEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnableEXT &&
+ "Function <vkCmdSetStencilTestEnableEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
- getDispatcher()->vkCmdSetStencilTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkBool32>( stencilTestEnable ) );
+ getDispatcher()->vkCmdSetStencilTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
- VULKAN_HPP_NAMESPACE::StencilOp failOp,
- VULKAN_HPP_NAMESPACE::StencilOp passOp,
- VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
- VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
+ VULKAN_HPP_NAMESPACE::StencilOp failOp,
+ VULKAN_HPP_NAMESPACE::StencilOp passOp,
+ VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
+ VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOpEXT &&
- "Function <vkCmdSetStencilOpEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOpEXT && "Function <vkCmdSetStencilOpEXT> needs extension <VK_EXT_extended_dynamic_state> enabled!" );
getDispatcher()->vkCmdSetStencilOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkStencilFaceFlags>( faceMask ),
@@ -17386,181 +16747,139 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_deferred_host_operations ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR
- Device::createDeferredOperationKHR(
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ Device::createDeferredOperationKHR( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR( *this, allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t DeferredOperationKHR::getMaxConcurrency() const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetDeferredOperationMaxConcurrencyKHR &&
- "Function <vkGetDeferredOperationMaxConcurrencyKHR> needs extension <VK_KHR_deferred_host_operations> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeferredOperationMaxConcurrencyKHR &&
+ "Function <vkGetDeferredOperationMaxConcurrencyKHR> needs extension <VK_KHR_deferred_host_operations> enabled!" );
+
+ uint32_t result =
+ getDispatcher()->vkGetDeferredOperationMaxConcurrencyKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( m_operation ) );
- return getDispatcher()->vkGetDeferredOperationMaxConcurrencyKHR(
- static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( m_operation ) );
+ return result;
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
- DeferredOperationKHR::getResult() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result DeferredOperationKHR::getResult() const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetDeferredOperationResultKHR &&
- "Function <vkGetDeferredOperationResultKHR> needs extension <VK_KHR_deferred_host_operations> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeferredOperationResultKHR &&
+ "Function <vkGetDeferredOperationResultKHR> needs extension <VK_KHR_deferred_host_operations> enabled!" );
- return static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetDeferredOperationResultKHR(
- static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( m_operation ) ) );
+ VkResult result =
+ getDispatcher()->vkGetDeferredOperationResultKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( m_operation ) );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result DeferredOperationKHR::join() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkDeferredOperationJoinKHR &&
- "Function <vkDeferredOperationJoinKHR> needs extension <VK_KHR_deferred_host_operations> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkDeferredOperationJoinKHR &&
+ "Function <vkDeferredOperationJoinKHR> needs extension <VK_KHR_deferred_host_operations> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkDeferredOperationJoinKHR(
- static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( m_operation ) ) );
- if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR ) )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::DeferredOperationKHR::join" );
- }
- return result;
+ VkResult result = getDispatcher()->vkDeferredOperationJoinKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( m_operation ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::DeferredOperationKHR::join",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
//=== VK_KHR_pipeline_executable_properties ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>
- Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo ) const
+ Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPipelineExecutablePropertiesKHR &&
- "Function <vkGetPipelineExecutablePropertiesKHR> needs extension <VK_KHR_pipeline_executable_properties> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutablePropertiesKHR &&
+ "Function <vkGetPipelineExecutablePropertiesKHR> needs extension <VK_KHR_pipeline_executable_properties> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR> properties;
uint32_t executableCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPipelineExecutablePropertiesKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
- &executableCount,
- nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && executableCount )
+ result = getDispatcher()->vkGetPipelineExecutablePropertiesKHR(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && executableCount )
{
properties.resize( executableCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPipelineExecutablePropertiesKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
- &executableCount,
- reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) );
+ result = getDispatcher()->vkGetPipelineExecutablePropertiesKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
+ &executableCount,
+ reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
+ VULKAN_HPP_ASSERT( executableCount <= properties.size() );
+ if ( executableCount < properties.size() )
{
- VULKAN_HPP_ASSERT( executableCount <= properties.size() );
- if ( executableCount < properties.size() )
- {
- properties.resize( executableCount );
- }
+ properties.resize( executableCount );
}
return properties;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>
- Device::getPipelineExecutableStatisticsKHR(
- const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const
+ Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPipelineExecutableStatisticsKHR &&
- "Function <vkGetPipelineExecutableStatisticsKHR> needs extension <VK_KHR_pipeline_executable_properties> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutableStatisticsKHR &&
+ "Function <vkGetPipelineExecutableStatisticsKHR> needs extension <VK_KHR_pipeline_executable_properties> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR> statistics;
uint32_t statisticCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPipelineExecutableStatisticsKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
- &statisticCount,
- nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && statisticCount )
+ result = getDispatcher()->vkGetPipelineExecutableStatisticsKHR(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && statisticCount )
{
statistics.resize( statisticCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPipelineExecutableStatisticsKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
- &statisticCount,
- reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) );
+ result = getDispatcher()->vkGetPipelineExecutableStatisticsKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
+ &statisticCount,
+ reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
+ VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
+ if ( statisticCount < statistics.size() )
{
- VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
- if ( statisticCount < statistics.size() )
- {
- statistics.resize( statisticCount );
- }
+ statistics.resize( statisticCount );
}
return statistics;
}
- VULKAN_HPP_NODISCARD
- VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>
- Device::getPipelineExecutableInternalRepresentationsKHR(
- const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>
+ Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR &&
- "Function <vkGetPipelineExecutableInternalRepresentationsKHR> needs extension <VK_KHR_pipeline_executable_properties> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR &&
+ "Function <vkGetPipelineExecutableInternalRepresentationsKHR> needs extension <VK_KHR_pipeline_executable_properties> enabled!" );
std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR> internalRepresentations;
uint32_t internalRepresentationCount;
- VULKAN_HPP_NAMESPACE::Result result;
+ VkResult result;
do
{
- result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR(
+ result = getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && internalRepresentationCount )
+ {
+ internalRepresentations.resize( internalRepresentationCount );
+ result = getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR(
static_cast<VkDevice>( m_device ),
reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
&internalRepresentationCount,
- nullptr ) );
- if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && internalRepresentationCount )
- {
- internalRepresentations.resize( internalRepresentationCount );
- result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
- &internalRepresentationCount,
- reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) );
+ reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) );
}
- } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result,
- VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
- }
- if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
+ VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
+ if ( internalRepresentationCount < internalRepresentations.size() )
{
- VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
- if ( internalRepresentationCount < internalRepresentations.size() )
- {
- internalRepresentations.resize( internalRepresentationCount );
- }
+ internalRepresentations.resize( internalRepresentationCount );
}
return internalRepresentations;
}
@@ -17568,73 +16887,63 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_device_generated_commands ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getGeneratedCommandsMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT
+ Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV &&
- "Function <vkGetGeneratedCommandsMemoryRequirementsNV> needs extension <VK_NV_device_generated_commands> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV &&
+ "Function <vkGetGeneratedCommandsMemoryRequirementsNV> needs extension <VK_NV_device_generated_commands> enabled!" );
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return memoryRequirements;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getGeneratedCommandsMemoryRequirementsNV(
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV &&
- "Function <vkGetGeneratedCommandsMemoryRequirementsNV> needs extension <VK_NV_device_generated_commands> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV &&
+ "Function <vkGetGeneratedCommandsMemoryRequirementsNV> needs extension <VK_NV_device_generated_commands> enabled!" );
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return structureChain;
}
- VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV(
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdPreprocessGeneratedCommandsNV &&
- "Function <vkCmdPreprocessGeneratedCommandsNV> needs extension <VK_NV_device_generated_commands> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPreprocessGeneratedCommandsNV &&
+ "Function <vkCmdPreprocessGeneratedCommandsNV> needs extension <VK_NV_device_generated_commands> enabled!" );
- getDispatcher()->vkCmdPreprocessGeneratedCommandsNV(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
+ getDispatcher()->vkCmdPreprocessGeneratedCommandsNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV(
- VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
- const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
+ const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdExecuteGeneratedCommandsNV &&
- "Function <vkCmdExecuteGeneratedCommandsNV> needs extension <VK_NV_device_generated_commands> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdExecuteGeneratedCommandsNV &&
+ "Function <vkCmdExecuteGeneratedCommandsNV> needs extension <VK_NV_device_generated_commands> enabled!" );
- getDispatcher()->vkCmdExecuteGeneratedCommandsNV(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkBool32>( isPreprocessed ),
- reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
+ getDispatcher()->vkCmdExecuteGeneratedCommandsNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkBool32>( isPreprocessed ),
+ reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
- VULKAN_HPP_NAMESPACE::Pipeline pipeline,
- uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline,
+ uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdBindPipelineShaderGroupNV &&
- "Function <vkCmdBindPipelineShaderGroupNV> needs extension <VK_NV_device_generated_commands> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindPipelineShaderGroupNV &&
+ "Function <vkCmdBindPipelineShaderGroupNV> needs extension <VK_NV_device_generated_commands> enabled!" );
getDispatcher()->vkCmdBindPipelineShaderGroupNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
@@ -17643,48 +16952,39 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV
- Device::createIndirectCommandsLayoutNV(
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ Device::createIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV( *this, createInfo, allocator );
}
//=== VK_EXT_acquire_drm_display ===
- VULKAN_HPP_INLINE void PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd,
- VULKAN_HPP_NAMESPACE::DisplayKHR display ) const
+ VULKAN_HPP_INLINE void PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireDrmDisplayEXT &&
- "Function <vkAcquireDrmDisplayEXT> needs extension <VK_EXT_acquire_drm_display> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireDrmDisplayEXT && "Function <vkAcquireDrmDisplayEXT> needs extension <VK_EXT_acquire_drm_display> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkAcquireDrmDisplayEXT(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), drmFd, static_cast<VkDisplayKHR>( display ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" );
- }
+ VkResult result =
+ getDispatcher()->vkAcquireDrmDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), drmFd, static_cast<VkDisplayKHR>( display ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR
- PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const
{
return VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, drmFd, connectorId );
}
//=== VK_EXT_private_data ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot Device::createPrivateDataSlotEXT(
- VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot
+ Device::createPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot( *this, createInfo, allocator );
}
- VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT(
- VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
- Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
+ Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkDestroyPrivateDataSlotEXT &&
"Function <vkDestroyPrivateDataSlotEXT> needs extension <VK_EXT_private_data> enabled!" );
@@ -17692,8 +16992,7 @@ namespace VULKAN_HPP_NAMESPACE
getDispatcher()->vkDestroyPrivateDataSlotEXT(
static_cast<VkDevice>( m_device ),
static_cast<VkPrivateDataSlot>( privateDataSlot ),
- reinterpret_cast<const VkAllocationCallbacks *>(
- static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+ reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
VULKAN_HPP_INLINE void Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
@@ -17701,95 +17000,92 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
uint64_t data ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkSetPrivateDataEXT &&
- "Function <vkSetPrivateDataEXT> needs extension <VK_EXT_private_data> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkSetPrivateDataEXT && "Function <vkSetPrivateDataEXT> needs extension <VK_EXT_private_data> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkSetPrivateDataEXT( static_cast<VkDevice>( m_device ),
- static_cast<VkObjectType>( objectType_ ),
- objectHandle,
- static_cast<VkPrivateDataSlot>( privateDataSlot ),
- data ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" );
- }
+ VkResult result = getDispatcher()->vkSetPrivateDataEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t
- Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
- uint64_t objectHandle,
- VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
+ uint64_t objectHandle,
+ VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkGetPrivateDataEXT &&
- "Function <vkGetPrivateDataEXT> needs extension <VK_EXT_private_data> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPrivateDataEXT && "Function <vkGetPrivateDataEXT> needs extension <VK_EXT_private_data> enabled!" );
uint64_t data;
- getDispatcher()->vkGetPrivateDataEXT( static_cast<VkDevice>( m_device ),
- static_cast<VkObjectType>( objectType_ ),
- objectHandle,
- static_cast<VkPrivateDataSlot>( privateDataSlot ),
- &data );
+ getDispatcher()->vkGetPrivateDataEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
+
return data;
}
# if defined( VK_ENABLE_BETA_EXTENSIONS )
//=== VK_KHR_video_encode_queue ===
- VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR(
- const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEncodeVideoKHR &&
- "Function <vkCmdEncodeVideoKHR> needs extension <VK_KHR_video_encode_queue> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEncodeVideoKHR && "Function <vkCmdEncodeVideoKHR> needs extension <VK_KHR_video_encode_queue> enabled!" );
- getDispatcher()->vkCmdEncodeVideoKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) );
+ getDispatcher()->vkCmdEncodeVideoKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) );
}
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+# if defined( VK_USE_PLATFORM_METAL_EXT )
+ //=== VK_EXT_metal_objects ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT Device::exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkExportMetalObjectsEXT && "Function <vkExportMetalObjectsEXT> needs extension <VK_EXT_metal_objects> enabled!" );
+
+ VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo;
+ getDispatcher()->vkExportMetalObjectsEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
+
+ return metalObjectsInfo;
+ }
+
+ template <typename X, typename Y, typename... Z>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkExportMetalObjectsEXT && "Function <vkExportMetalObjectsEXT> needs extension <VK_EXT_metal_objects> enabled!" );
+
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>();
+ getDispatcher()->vkExportMetalObjectsEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
+
+ return structureChain;
+ }
+# endif /*VK_USE_PLATFORM_METAL_EXT*/
+
//=== VK_KHR_synchronization2 ===
- VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR(
- VULKAN_HPP_NAMESPACE::Event event,
- const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
+ const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetEvent2KHR &&
- "Function <vkCmdSetEvent2KHR> needs extension <VK_KHR_synchronization2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetEvent2KHR && "Function <vkCmdSetEvent2KHR> needs extension <VK_KHR_synchronization2> enabled!" );
- getDispatcher()->vkCmdSetEvent2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkEvent>( event ),
- reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
+ getDispatcher()->vkCmdSetEvent2KHR(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetEvent2KHR &&
- "Function <vkCmdResetEvent2KHR> needs extension <VK_KHR_synchronization2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetEvent2KHR && "Function <vkCmdResetEvent2KHR> needs extension <VK_KHR_synchronization2> enabled!" );
- getDispatcher()->vkCmdResetEvent2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkEvent>( event ),
- static_cast<VkPipelineStageFlags2>( stageMask ) );
+ getDispatcher()->vkCmdResetEvent2KHR(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
}
VULKAN_HPP_INLINE void
- CommandBuffer::waitEvents2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos )
- const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+ CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWaitEvents2KHR &&
- "Function <vkCmdWaitEvents2KHR> needs extension <VK_KHR_synchronization2> enabled!" );
-
-# ifdef VULKAN_HPP_NO_EXCEPTIONS
- VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
-# else
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWaitEvents2KHR && "Function <vkCmdWaitEvents2KHR> needs extension <VK_KHR_synchronization2> enabled!" );
if ( events.size() != dependencyInfos.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" );
}
-# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
getDispatcher()->vkCmdWaitEvents2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
events.size(),
@@ -17797,8 +17093,7 @@ namespace VULKAN_HPP_NAMESPACE
reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR(
- const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPipelineBarrier2KHR &&
"Function <vkCmdPipelineBarrier2KHR> needs extension <VK_KHR_synchronization2> enabled!" );
@@ -17809,38 +17104,28 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
VULKAN_HPP_NAMESPACE::QueryPool queryPool,
- uint32_t query ) const VULKAN_HPP_NOEXCEPT
+ uint32_t query ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteTimestamp2KHR &&
- "Function <vkCmdWriteTimestamp2KHR> needs extension <VK_KHR_synchronization2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteTimestamp2KHR && "Function <vkCmdWriteTimestamp2KHR> needs extension <VK_KHR_synchronization2> enabled!" );
- getDispatcher()->vkCmdWriteTimestamp2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkPipelineStageFlags2>( stage ),
- static_cast<VkQueryPool>( queryPool ),
- query );
+ getDispatcher()->vkCmdWriteTimestamp2KHR(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
}
- VULKAN_HPP_INLINE void Queue::submit2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
- VULKAN_HPP_NAMESPACE::Fence fence ) const
+ VULKAN_HPP_INLINE void Queue::submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
+ VULKAN_HPP_NAMESPACE::Fence fence ) const
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSubmit2KHR &&
- "Function <vkQueueSubmit2KHR> needs extension <VK_KHR_synchronization2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSubmit2KHR && "Function <vkQueueSubmit2KHR> needs extension <VK_KHR_synchronization2> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
- getDispatcher()->vkQueueSubmit2KHR( static_cast<VkQueue>( m_queue ),
- submits.size(),
- reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ),
- static_cast<VkFence>( fence ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" );
- }
+ VkResult result = getDispatcher()->vkQueueSubmit2KHR(
+ static_cast<VkQueue>( m_queue ), submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" );
}
VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
- uint32_t marker ) const VULKAN_HPP_NOEXCEPT
+ uint32_t marker ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarker2AMD &&
"Function <vkCmdWriteBufferMarker2AMD> needs extension <VK_KHR_synchronization2> enabled!" );
@@ -17852,102 +17137,185 @@ namespace VULKAN_HPP_NAMESPACE
marker );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV>
- Queue::getCheckpointData2NV() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV> Queue::getCheckpointData2NV() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointData2NV &&
"Function <vkGetQueueCheckpointData2NV> needs extension <VK_KHR_synchronization2> enabled!" );
- uint32_t checkpointDataCount;
+ std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV> checkpointData;
+ uint32_t checkpointDataCount;
getDispatcher()->vkGetQueueCheckpointData2NV( static_cast<VkQueue>( m_queue ), &checkpointDataCount, nullptr );
- std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV> checkpointData( checkpointDataCount );
- getDispatcher()->vkGetQueueCheckpointData2NV( static_cast<VkQueue>( m_queue ),
- &checkpointDataCount,
- reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
- VULKAN_HPP_ASSERT( checkpointDataCount == checkpointData.size() );
+ checkpointData.resize( checkpointDataCount );
+ getDispatcher()->vkGetQueueCheckpointData2NV(
+ static_cast<VkQueue>( m_queue ), &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
+
+ VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
+ if ( checkpointDataCount < checkpointData.size() )
+ {
+ checkpointData.resize( checkpointDataCount );
+ }
return checkpointData;
}
//=== VK_NV_fragment_shading_rate_enums ===
- VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV(
- VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,
- const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,
+ const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetFragmentShadingRateEnumNV &&
- "Function <vkCmdSetFragmentShadingRateEnumNV> needs extension <VK_NV_fragment_shading_rate_enums> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFragmentShadingRateEnumNV &&
+ "Function <vkCmdSetFragmentShadingRateEnumNV> needs extension <VK_NV_fragment_shading_rate_enums> enabled!" );
- getDispatcher()->vkCmdSetFragmentShadingRateEnumNV(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkFragmentShadingRateNV>( shadingRate ),
- reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
+ getDispatcher()->vkCmdSetFragmentShadingRateEnumNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkFragmentShadingRateNV>( shadingRate ),
+ reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
+ }
+
+ //=== VK_EXT_mesh_shader ===
+
+ VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksEXT && "Function <vkCmdDrawMeshTasksEXT> needs extension <VK_EXT_mesh_shader> enabled!" );
+
+ getDispatcher()->vkCmdDrawMeshTasksEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ uint32_t drawCount,
+ uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectEXT &&
+ "Function <vkCmdDrawMeshTasksIndirectEXT> needs extension <VK_EXT_mesh_shader> enabled!" );
+
+ getDispatcher()->vkCmdDrawMeshTasksIndirectEXT(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset,
+ VULKAN_HPP_NAMESPACE::Buffer countBuffer,
+ VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectCountEXT &&
+ "Function <vkCmdDrawMeshTasksIndirectCountEXT> needs extension <VK_EXT_mesh_shader> enabled!" );
+
+ getDispatcher()->vkCmdDrawMeshTasksIndirectCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkBuffer>( buffer ),
+ static_cast<VkDeviceSize>( offset ),
+ static_cast<VkBuffer>( countBuffer ),
+ static_cast<VkDeviceSize>( countBufferOffset ),
+ maxDrawCount,
+ stride );
}
//=== VK_KHR_copy_commands2 ===
- VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR(
- const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBuffer2KHR &&
- "Function <vkCmdCopyBuffer2KHR> needs extension <VK_KHR_copy_commands2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBuffer2KHR && "Function <vkCmdCopyBuffer2KHR> needs extension <VK_KHR_copy_commands2> enabled!" );
- getDispatcher()->vkCmdCopyBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
+ getDispatcher()->vkCmdCopyBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR(
- const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImage2KHR &&
- "Function <vkCmdCopyImage2KHR> needs extension <VK_KHR_copy_commands2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImage2KHR && "Function <vkCmdCopyImage2KHR> needs extension <VK_KHR_copy_commands2> enabled!" );
- getDispatcher()->vkCmdCopyImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
+ getDispatcher()->vkCmdCopyImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR(
- const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBufferToImage2KHR &&
"Function <vkCmdCopyBufferToImage2KHR> needs extension <VK_KHR_copy_commands2> enabled!" );
- getDispatcher()->vkCmdCopyBufferToImage2KHR(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
+ getDispatcher()->vkCmdCopyBufferToImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR(
- const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImageToBuffer2KHR &&
"Function <vkCmdCopyImageToBuffer2KHR> needs extension <VK_KHR_copy_commands2> enabled!" );
- getDispatcher()->vkCmdCopyImageToBuffer2KHR(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
+ getDispatcher()->vkCmdCopyImageToBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR(
- const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBlitImage2KHR &&
- "Function <vkCmdBlitImage2KHR> needs extension <VK_KHR_copy_commands2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBlitImage2KHR && "Function <vkCmdBlitImage2KHR> needs extension <VK_KHR_copy_commands2> enabled!" );
- getDispatcher()->vkCmdBlitImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
+ getDispatcher()->vkCmdBlitImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR(
- const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResolveImage2KHR &&
- "Function <vkCmdResolveImage2KHR> needs extension <VK_KHR_copy_commands2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResolveImage2KHR && "Function <vkCmdResolveImage2KHR> needs extension <VK_KHR_copy_commands2> enabled!" );
getDispatcher()->vkCmdResolveImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
}
+ //=== VK_EXT_image_compression_control ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT
+ Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT &&
+ "Function <vkGetImageSubresourceLayout2EXT> needs extension <VK_EXT_image_compression_control> enabled!" );
+
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout;
+ getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkImage>( m_image ),
+ reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
+ reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
+
+ return layout;
+ }
+
+ template <typename X, typename Y, typename... Z>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT &&
+ "Function <vkGetImageSubresourceLayout2EXT> needs extension <VK_EXT_image_compression_control> enabled!" );
+
+ StructureChain<X, Y, Z...> structureChain;
+ VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>();
+ getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkImage>( m_image ),
+ reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
+ reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
+
+ return structureChain;
+ }
+
+ //=== VK_EXT_device_fault ===
+
+ VULKAN_HPP_NODISCARD
+ VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>>
+ Device::getFaultInfoEXT() const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceFaultInfoEXT && "Function <vkGetDeviceFaultInfoEXT> needs extension <VK_EXT_device_fault> enabled!" );
+
+ std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT> data;
+ VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT & faultCounts = data.first;
+ VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT & faultInfo = data.second;
+ VkResult result = getDispatcher()->vkGetDeviceFaultInfoEXT(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<VkDeviceFaultCountsEXT *>( &faultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( &faultInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::getFaultInfoEXT",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } );
+
+ return std::make_pair( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+ }
+
# if defined( VK_USE_PLATFORM_WIN32_KHR )
//=== VK_NV_acquire_winrt_display ===
@@ -17956,17 +17324,11 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireWinrtDisplayNV &&
"Function <vkAcquireWinrtDisplayNV> needs extension <VK_NV_acquire_winrt_display> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkAcquireWinrtDisplayNV(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( m_display ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::acquireWinrtNV" );
- }
+ VkResult result = getDispatcher()->vkAcquireWinrtDisplayNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( m_display ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::acquireWinrtNV" );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR
- PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId ) const
{
return VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, deviceRelativeId );
}
@@ -17975,272 +17337,229 @@ namespace VULKAN_HPP_NAMESPACE
# if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
//=== VK_EXT_directfb_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createDirectFBSurfaceEXT(
- VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ Instance::createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
- PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex,
- IDirectFB & dfb ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceDirectFBPresentationSupportEXT &&
- "Function <vkGetPhysicalDeviceDirectFBPresentationSupportEXT> needs extension <VK_EXT_directfb_surface> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDirectFBPresentationSupportEXT &&
+ "Function <vkGetPhysicalDeviceDirectFBPresentationSupportEXT> needs extension <VK_EXT_directfb_surface> enabled!" );
+
+ VkBool32 result =
+ getDispatcher()->vkGetPhysicalDeviceDirectFBPresentationSupportEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &dfb );
- return static_cast<VULKAN_HPP_NAMESPACE::Bool32>(
- getDispatcher()->vkGetPhysicalDeviceDirectFBPresentationSupportEXT(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &dfb ) );
+ return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
}
# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
//=== VK_KHR_ray_tracing_pipeline ===
- VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR(
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
- uint32_t width,
- uint32_t height,
- uint32_t depth ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
+ uint32_t width,
+ uint32_t height,
+ uint32_t depth ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysKHR &&
- "Function <vkCmdTraceRaysKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysKHR && "Function <vkCmdTraceRaysKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
- getDispatcher()->vkCmdTraceRaysKHR(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
- width,
- height,
- depth );
- }
-
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline>
- Device::createRayTracingPipelinesKHR(
- VULKAN_HPP_NAMESPACE::Optional<
- const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ getDispatcher()->vkCmdTraceRaysKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
+ width,
+ height,
+ depth );
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> Device::createRayTracingPipelinesKHR(
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, deferredOperation, pipelineCache, createInfos, allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createRayTracingPipelineKHR(
- VULKAN_HPP_NAMESPACE::Optional<
- const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const &
- pipelineCache,
- VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
+ VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, deferredOperation, pipelineCache, createInfo, allocator );
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<DataType>
- Pipeline::getRayTracingShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const
+ Pipeline::getRayTracingShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR &&
- "Function <vkGetRayTracingShaderGroupHandlesKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR &&
+ "Function <vkGetRayTracingShaderGroupHandlesKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
std::vector<DataType> data( dataSize / sizeof( DataType ) );
- Result result = static_cast<Result>(
- getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR( static_cast<VkDevice>( m_device ),
- static_cast<VkPipeline>( m_pipeline ),
- firstGroup,
- groupCount,
- data.size() * sizeof( DataType ),
- reinterpret_cast<void *>( data.data() ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesKHR" );
- }
+ VkResult result = getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkPipeline>( m_pipeline ),
+ firstGroup,
+ groupCount,
+ data.size() * sizeof( DataType ),
+ reinterpret_cast<void *>( data.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesKHR" );
+
return data;
}
template <typename DataType>
- VULKAN_HPP_NODISCARD DataType Pipeline::getRayTracingShaderGroupHandleKHR( uint32_t firstGroup,
- uint32_t groupCount ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const
{
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR &&
+ "Function <vkGetRayTracingShaderGroupHandlesKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
+
DataType data;
- Result result = static_cast<Result>(
- getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR( static_cast<VkDevice>( m_device ),
- static_cast<VkPipeline>( m_pipeline ),
- firstGroup,
- groupCount,
- sizeof( DataType ),
- reinterpret_cast<void *>( &data ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleKHR" );
- }
+ VkResult result = getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkPipeline>( m_pipeline ),
+ firstGroup,
+ groupCount,
+ sizeof( DataType ),
+ reinterpret_cast<void *>( &data ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleKHR" );
+
return data;
}
template <typename DataType>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<DataType>
- Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR( uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize ) const
+ Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR &&
- "Function <vkGetRayTracingCaptureReplayShaderGroupHandlesKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR &&
+ "Function <vkGetRayTracingCaptureReplayShaderGroupHandlesKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
std::vector<DataType> data( dataSize / sizeof( DataType ) );
- Result result = static_cast<Result>(
- getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast<VkDevice>( m_device ),
- static_cast<VkPipeline>( m_pipeline ),
- firstGroup,
- groupCount,
- data.size() * sizeof( DataType ),
- reinterpret_cast<void *>( data.data() ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException(
- result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
- }
+ VkResult result = getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkPipeline>( m_pipeline ),
+ firstGroup,
+ groupCount,
+ data.size() * sizeof( DataType ),
+ reinterpret_cast<void *>( data.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
+
return data;
}
template <typename DataType>
- VULKAN_HPP_NODISCARD DataType Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR( uint32_t firstGroup,
- uint32_t groupCount ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const
{
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR &&
+ "Function <vkGetRayTracingCaptureReplayShaderGroupHandlesKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
+
DataType data;
- Result result = static_cast<Result>(
- getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast<VkDevice>( m_device ),
- static_cast<VkPipeline>( m_pipeline ),
- firstGroup,
- groupCount,
- sizeof( DataType ),
- reinterpret_cast<void *>( &data ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException(
- result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR" );
- }
+ VkResult result = getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast<VkDevice>( m_device ),
+ static_cast<VkPipeline>( m_pipeline ),
+ firstGroup,
+ groupCount,
+ sizeof( DataType ),
+ reinterpret_cast<void *>( &data ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR" );
+
return data;
}
- VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR(
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
- const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
- VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
+ const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
+ VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdTraceRaysIndirectKHR &&
- "Function <vkCmdTraceRaysIndirectKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysIndirectKHR &&
+ "Function <vkCmdTraceRaysIndirectKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
- getDispatcher()->vkCmdTraceRaysIndirectKHR(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
- reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
- static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
+ getDispatcher()->vkCmdTraceRaysIndirectKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
+ reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
+ static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize
- Pipeline::getRayTracingShaderGroupStackSizeKHR(
- uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT
+ Pipeline::getRayTracingShaderGroupStackSizeKHR( uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetRayTracingShaderGroupStackSizeKHR &&
- "Function <vkGetRayTracingShaderGroupStackSizeKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupStackSizeKHR &&
+ "Function <vkGetRayTracingShaderGroupStackSizeKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
+
+ VkDeviceSize result = getDispatcher()->vkGetRayTracingShaderGroupStackSizeKHR(
+ static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) );
- return static_cast<VULKAN_HPP_NAMESPACE::DeviceSize>(
- getDispatcher()->vkGetRayTracingShaderGroupStackSizeKHR( static_cast<VkDevice>( m_device ),
- static_cast<VkPipeline>( m_pipeline ),
- group,
- static_cast<VkShaderGroupShaderKHR>( groupShader ) ) );
+ return static_cast<VULKAN_HPP_NAMESPACE::DeviceSize>( result );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetRayTracingPipelineStackSizeKHR &&
- "Function <vkCmdSetRayTracingPipelineStackSizeKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRayTracingPipelineStackSizeKHR &&
+ "Function <vkCmdSetRayTracingPipelineStackSizeKHR> needs extension <VK_KHR_ray_tracing_pipeline> enabled!" );
- getDispatcher()->vkCmdSetRayTracingPipelineStackSizeKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
- pipelineStackSize );
+ getDispatcher()->vkCmdSetRayTracingPipelineStackSizeKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), pipelineStackSize );
}
//=== VK_EXT_vertex_input_dynamic_state ===
VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,
- ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions )
- const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions ) const
+ VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetVertexInputEXT &&
- "Function <vkCmdSetVertexInputEXT> needs extension <VK_EXT_vertex_input_dynamic_state> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetVertexInputEXT &&
+ "Function <vkCmdSetVertexInputEXT> needs extension <VK_EXT_vertex_input_dynamic_state> enabled!" );
- getDispatcher()->vkCmdSetVertexInputEXT(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- vertexBindingDescriptions.size(),
- reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ),
- vertexAttributeDescriptions.size(),
- reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) );
+ getDispatcher()->vkCmdSetVertexInputEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ vertexBindingDescriptions.size(),
+ reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ),
+ vertexAttributeDescriptions.size(),
+ reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) );
}
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_memory ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE zx_handle_t Device::getMemoryZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE zx_handle_t
+ Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetMemoryZirconHandleFUCHSIA &&
- "Function <vkGetMemoryZirconHandleFUCHSIA> needs extension <VK_FUCHSIA_external_memory> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryZirconHandleFUCHSIA &&
+ "Function <vkGetMemoryZirconHandleFUCHSIA> needs extension <VK_FUCHSIA_external_memory> enabled!" );
+
+ zx_handle_t zirconHandle;
+ VkResult result = getDispatcher()->vkGetMemoryZirconHandleFUCHSIA(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" );
- zx_handle_t zirconHandle;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetMemoryZirconHandleFUCHSIA(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ),
- &zirconHandle ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" );
- }
return zirconHandle;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA
- Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
- zx_handle_t zirconHandle ) const
+ Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetMemoryZirconHandlePropertiesFUCHSIA &&
- "Function <vkGetMemoryZirconHandlePropertiesFUCHSIA> needs extension <VK_FUCHSIA_external_memory> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryZirconHandlePropertiesFUCHSIA &&
+ "Function <vkGetMemoryZirconHandlePropertiesFUCHSIA> needs extension <VK_FUCHSIA_external_memory> enabled!" );
VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetMemoryZirconHandlePropertiesFUCHSIA(
- static_cast<VkDevice>( m_device ),
- static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
- zirconHandle,
- reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" );
- }
+ VkResult result =
+ getDispatcher()->vkGetMemoryZirconHandlePropertiesFUCHSIA( static_cast<VkDevice>( m_device ),
+ static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
+ zirconHandle,
+ reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" );
+
return memoryZirconHandleProperties;
}
# endif /*VK_USE_PLATFORM_FUCHSIA*/
@@ -18248,40 +17567,28 @@ namespace VULKAN_HPP_NAMESPACE
# if defined( VK_USE_PLATFORM_FUCHSIA )
//=== VK_FUCHSIA_external_semaphore ===
- VULKAN_HPP_INLINE void Device::importSemaphoreZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo ) const
+ VULKAN_HPP_INLINE void
+ Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA &&
- "Function <vkImportSemaphoreZirconHandleFUCHSIA> needs extension <VK_FUCHSIA_external_semaphore> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA &&
+ "Function <vkImportSemaphoreZirconHandleFUCHSIA> needs extension <VK_FUCHSIA_external_semaphore> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" );
- }
+ VkResult result = getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE zx_handle_t Device::getSemaphoreZirconHandleFUCHSIA(
- const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE zx_handle_t
+ Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA &&
- "Function <vkGetSemaphoreZirconHandleFUCHSIA> needs extension <VK_FUCHSIA_external_semaphore> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA &&
+ "Function <vkGetSemaphoreZirconHandleFUCHSIA> needs extension <VK_FUCHSIA_external_semaphore> enabled!" );
+
+ zx_handle_t zirconHandle;
+ VkResult result = getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA(
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" );
- zx_handle_t zirconHandle;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ),
- &zirconHandle ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" );
- }
return zirconHandle;
}
# endif /*VK_USE_PLATFORM_FUCHSIA*/
@@ -18290,66 +17597,48 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_FUCHSIA_buffer_collection ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA
- Device::createBufferCollectionFUCHSIA(
- VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ Device::createBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA( *this, createInfo, allocator );
}
- VULKAN_HPP_INLINE void BufferCollectionFUCHSIA::setImageConstraints(
- const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo ) const
+ VULKAN_HPP_INLINE void BufferCollectionFUCHSIA::setImageConstraints( const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA &&
- "Function <vkSetBufferCollectionImageConstraintsFUCHSIA> needs extension <VK_FUCHSIA_buffer_collection> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA &&
+ "Function <vkSetBufferCollectionImageConstraintsFUCHSIA> needs extension <VK_FUCHSIA_buffer_collection> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA(
- static_cast<VkDevice>( m_device ),
- static_cast<VkBufferCollectionFUCHSIA>( m_collection ),
- reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( &imageConstraintsInfo ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setImageConstraints" );
- }
+ VkResult result =
+ getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA( static_cast<VkDevice>( m_device ),
+ static_cast<VkBufferCollectionFUCHSIA>( m_collection ),
+ reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( &imageConstraintsInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setImageConstraints" );
}
- VULKAN_HPP_INLINE void BufferCollectionFUCHSIA::setBufferConstraints(
- const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo ) const
+ VULKAN_HPP_INLINE void
+ BufferCollectionFUCHSIA::setBufferConstraints( const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA &&
- "Function <vkSetBufferCollectionBufferConstraintsFUCHSIA> needs extension <VK_FUCHSIA_buffer_collection> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA &&
+ "Function <vkSetBufferCollectionBufferConstraintsFUCHSIA> needs extension <VK_FUCHSIA_buffer_collection> enabled!" );
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA(
- static_cast<VkDevice>( m_device ),
- static_cast<VkBufferCollectionFUCHSIA>( m_collection ),
- reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( &bufferConstraintsInfo ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setBufferConstraints" );
- }
+ VkResult result =
+ getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA( static_cast<VkDevice>( m_device ),
+ static_cast<VkBufferCollectionFUCHSIA>( m_collection ),
+ reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( &bufferConstraintsInfo ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setBufferConstraints" );
}
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA
- BufferCollectionFUCHSIA::getProperties() const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA BufferCollectionFUCHSIA::getProperties() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA &&
- "Function <vkGetBufferCollectionPropertiesFUCHSIA> needs extension <VK_FUCHSIA_buffer_collection> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA &&
+ "Function <vkGetBufferCollectionPropertiesFUCHSIA> needs extension <VK_FUCHSIA_buffer_collection> enabled!" );
VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA(
- static_cast<VkDevice>( m_device ),
- static_cast<VkBufferCollectionFUCHSIA>( m_collection ),
- reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( &properties ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::getProperties" );
- }
+ VkResult result = getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA( static_cast<VkDevice>( m_device ),
+ static_cast<VkBufferCollectionFUCHSIA>( m_collection ),
+ reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( &properties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::getProperties" );
+
return properties;
}
# endif /*VK_USE_PLATFORM_FUCHSIA*/
@@ -18359,23 +17648,17 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, VULKAN_HPP_NAMESPACE::Extent2D>
RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI() const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI &&
- "Function <vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI> needs extension <VK_HUAWEI_subpass_shading> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI &&
+ "Function <vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI> needs extension <VK_HUAWEI_subpass_shading> enabled!" );
VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
- static_cast<VkDevice>( m_device ),
- static_cast<VkRenderPass>( m_renderPass ),
- reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) ) );
- if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
- ( result != VULKAN_HPP_NAMESPACE::Result::eIncomplete ) )
- {
- throwResultException( result,
- VULKAN_HPP_NAMESPACE_STRING "::RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI" );
- }
- return std::make_pair( result, maxWorkgroupSize );
+ VkResult result = getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
+ static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( m_renderPass ), reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } );
+
+ return std::make_pair( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), maxWorkgroupSize );
}
VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI() const VULKAN_HPP_NOEXCEPT
@@ -18388,281 +17671,942 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_HUAWEI_invocation_mask ===
- VULKAN_HPP_INLINE void
- CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView,
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView,
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdBindInvocationMaskHUAWEI &&
- "Function <vkCmdBindInvocationMaskHUAWEI> needs extension <VK_HUAWEI_invocation_mask> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindInvocationMaskHUAWEI &&
+ "Function <vkCmdBindInvocationMaskHUAWEI> needs extension <VK_HUAWEI_invocation_mask> enabled!" );
- getDispatcher()->vkCmdBindInvocationMaskHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkImageView>( imageView ),
- static_cast<VkImageLayout>( imageLayout ) );
+ getDispatcher()->vkCmdBindInvocationMaskHUAWEI(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
}
//=== VK_NV_external_memory_rdma ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::RemoteAddressNV Device::getMemoryRemoteAddressNV(
- const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::RemoteAddressNV
+ Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetMemoryRemoteAddressNV &&
- "Function <vkGetMemoryRemoteAddressNV> needs extension <VK_NV_external_memory_rdma> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryRemoteAddressNV &&
+ "Function <vkGetMemoryRemoteAddressNV> needs extension <VK_NV_external_memory_rdma> enabled!" );
VULKAN_HPP_NAMESPACE::RemoteAddressNV address;
- VULKAN_HPP_NAMESPACE::Result result =
- static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetMemoryRemoteAddressNV(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( &memoryGetRemoteAddressInfo ),
- reinterpret_cast<VkRemoteAddressNV *>( &address ) ) );
- if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
- {
- throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" );
- }
+ VkResult result = getDispatcher()->vkGetMemoryRemoteAddressNV( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( &memoryGetRemoteAddressInfo ),
+ reinterpret_cast<VkRemoteAddressNV *>( &address ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" );
+
return address;
}
+ //=== VK_EXT_pipeline_properties ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::BaseOutStructure
+ Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelinePropertiesEXT &&
+ "Function <vkGetPipelinePropertiesEXT> needs extension <VK_EXT_pipeline_properties> enabled!" );
+
+ VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties;
+ VkResult result = getDispatcher()->vkGetPipelinePropertiesEXT( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkPipelineInfoEXT *>( &pipelineInfo ),
+ reinterpret_cast<VkBaseOutStructure *>( &pipelineProperties ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" );
+
+ return pipelineProperties;
+ }
+
//=== VK_EXT_extended_dynamic_state2 ===
- VULKAN_HPP_INLINE void
- CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetPatchControlPointsEXT &&
- "Function <vkCmdSetPatchControlPointsEXT> needs extension <VK_EXT_extended_dynamic_state2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPatchControlPointsEXT &&
+ "Function <vkCmdSetPatchControlPointsEXT> needs extension <VK_EXT_extended_dynamic_state2> enabled!" );
- getDispatcher()->vkCmdSetPatchControlPointsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
- patchControlPoints );
+ getDispatcher()->vkCmdSetPatchControlPointsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), patchControlPoints );
}
- VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT(
- VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT &&
- "Function <vkCmdSetRasterizerDiscardEnableEXT> needs extension <VK_EXT_extended_dynamic_state2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT &&
+ "Function <vkCmdSetRasterizerDiscardEnableEXT> needs extension <VK_EXT_extended_dynamic_state2> enabled!" );
- getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkBool32>( rasterizerDiscardEnable ) );
+ getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetDepthBiasEnableEXT &&
- "Function <vkCmdSetDepthBiasEnableEXT> needs extension <VK_EXT_extended_dynamic_state2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnableEXT &&
+ "Function <vkCmdSetDepthBiasEnableEXT> needs extension <VK_EXT_extended_dynamic_state2> enabled!" );
- getDispatcher()->vkCmdSetDepthBiasEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkBool32>( depthBiasEnable ) );
+ getDispatcher()->vkCmdSetDepthBiasEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) );
}
- VULKAN_HPP_INLINE void
- CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEXT &&
- "Function <vkCmdSetLogicOpEXT> needs extension <VK_EXT_extended_dynamic_state2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEXT && "Function <vkCmdSetLogicOpEXT> needs extension <VK_EXT_extended_dynamic_state2> enabled!" );
- getDispatcher()->vkCmdSetLogicOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkLogicOp>( logicOp ) );
+ getDispatcher()->vkCmdSetLogicOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkLogicOp>( logicOp ) );
}
- VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT(
- VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT &&
- "Function <vkCmdSetPrimitiveRestartEnableEXT> needs extension <VK_EXT_extended_dynamic_state2> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT &&
+ "Function <vkCmdSetPrimitiveRestartEnableEXT> needs extension <VK_EXT_extended_dynamic_state2> enabled!" );
- getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
- static_cast<VkBool32>( primitiveRestartEnable ) );
+ getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) );
}
# if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_screen_surface ===
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createScreenSurfaceQNX(
- VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo,
- VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
+ Instance::createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
- PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex,
- struct _screen_window & window ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetPhysicalDeviceScreenPresentationSupportQNX &&
- "Function <vkGetPhysicalDeviceScreenPresentationSupportQNX> needs extension <VK_QNX_screen_surface> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceScreenPresentationSupportQNX &&
+ "Function <vkGetPhysicalDeviceScreenPresentationSupportQNX> needs extension <VK_QNX_screen_surface> enabled!" );
- return static_cast<VULKAN_HPP_NAMESPACE::Bool32>(
- getDispatcher()->vkGetPhysicalDeviceScreenPresentationSupportQNX(
- static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &window ) );
+ VkBool32 result =
+ getDispatcher()->vkGetPhysicalDeviceScreenPresentationSupportQNX( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &window );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
}
# endif /*VK_USE_PLATFORM_SCREEN_QNX*/
//=== VK_EXT_color_write_enable ===
VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkCmdSetColorWriteEnableEXT &&
- "Function <vkCmdSetColorWriteEnableEXT> needs extension <VK_EXT_color_write_enable> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteEnableEXT &&
+ "Function <vkCmdSetColorWriteEnableEXT> needs extension <VK_EXT_color_write_enable> enabled!" );
+
+ getDispatcher()->vkCmdSetColorWriteEnableEXT(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) );
+ }
+
+ //=== VK_KHR_ray_tracing_maintenance1 ===
+
+ VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysIndirect2KHR &&
+ "Function <vkCmdTraceRaysIndirect2KHR> needs extension <VK_KHR_ray_tracing_maintenance1> enabled!" );
- getDispatcher()->vkCmdSetColorWriteEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
- colorWriteEnables.size(),
- reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) );
+ getDispatcher()->vkCmdTraceRaysIndirect2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
}
//=== VK_EXT_multi_draw ===
VULKAN_HPP_INLINE void
- CommandBuffer::drawMultiEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo,
- uint32_t instanceCount,
- uint32_t firstInstance,
- uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+ CommandBuffer::drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo,
+ uint32_t instanceCount,
+ uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMultiEXT &&
- "Function <vkCmdDrawMultiEXT> needs extension <VK_EXT_multi_draw> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMultiEXT && "Function <vkCmdDrawMultiEXT> needs extension <VK_EXT_multi_draw> enabled!" );
getDispatcher()->vkCmdDrawMultiEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
vertexInfo.size(),
reinterpret_cast<const VkMultiDrawInfoEXT *>( vertexInfo.data() ),
instanceCount,
firstInstance,
- stride );
+ vertexInfo.stride() );
}
- VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT(
- ArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo,
- uint32_t instanceCount,
- uint32_t firstInstance,
- uint32_t stride,
- Optional<const int32_t> vertexOffset ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void
+ CommandBuffer::drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo,
+ uint32_t instanceCount,
+ uint32_t firstInstance,
+ Optional<const int32_t> vertexOffset ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMultiIndexedEXT &&
- "Function <vkCmdDrawMultiIndexedEXT> needs extension <VK_EXT_multi_draw> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMultiIndexedEXT && "Function <vkCmdDrawMultiIndexedEXT> needs extension <VK_EXT_multi_draw> enabled!" );
- getDispatcher()->vkCmdDrawMultiIndexedEXT(
- static_cast<VkCommandBuffer>( m_commandBuffer ),
- indexInfo.size(),
- reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( indexInfo.data() ),
- instanceCount,
- firstInstance,
- stride,
- static_cast<const int32_t *>( vertexOffset ) );
+ getDispatcher()->vkCmdDrawMultiIndexedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ indexInfo.size(),
+ reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( indexInfo.data() ),
+ instanceCount,
+ firstInstance,
+ indexInfo.stride(),
+ static_cast<const int32_t *>( vertexOffset ) );
+ }
+
+ //=== VK_EXT_opacity_micromap ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::MicromapEXT
+ Device::createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ {
+ return VULKAN_HPP_RAII_NAMESPACE::MicromapEXT( *this, createInfo, allocator );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT(
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildMicromapsEXT && "Function <vkCmdBuildMicromapsEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ getDispatcher()->vkCmdBuildMicromapsEXT(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) );
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
+ Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkBuildMicromapsEXT && "Function <vkBuildMicromapsEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ VkResult result = getDispatcher()->vkBuildMicromapsEXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ infos.size(),
+ reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMicromapEXT && "Function <vkCopyMicromapEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ VkResult result = getDispatcher()->vkCopyMicromapEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
+ Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMicromapToMemoryEXT &&
+ "Function <vkCopyMicromapToMemoryEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ VkResult result = getDispatcher()->vkCopyMicromapToMemoryEXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
+ Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
+ const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToMicromapEXT &&
+ "Function <vkCopyMemoryToMicromapEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ VkResult result = getDispatcher()->vkCopyMemoryToMicromapEXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkDeferredOperationKHR>( deferredOperation ),
+ reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
+ VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT",
+ { VULKAN_HPP_NAMESPACE::Result::eSuccess,
+ VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
+ VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
+
+ return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+ }
+
+ template <typename DataType>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<DataType>
+ Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t dataSize,
+ size_t stride ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkWriteMicromapsPropertiesEXT &&
+ "Function <vkWriteMicromapsPropertiesEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
+ std::vector<DataType> data( dataSize / sizeof( DataType ) );
+ VkResult result = getDispatcher()->vkWriteMicromapsPropertiesEXT( static_cast<VkDevice>( m_device ),
+ micromaps.size(),
+ reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
+ static_cast<VkQueryType>( queryType ),
+ data.size() * sizeof( DataType ),
+ reinterpret_cast<void *>( data.data() ),
+ stride );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" );
+
+ return data;
+ }
+
+ template <typename DataType>
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType
+ Device::writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ size_t stride ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkWriteMicromapsPropertiesEXT &&
+ "Function <vkWriteMicromapsPropertiesEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ DataType data;
+ VkResult result = getDispatcher()->vkWriteMicromapsPropertiesEXT( static_cast<VkDevice>( m_device ),
+ micromaps.size(),
+ reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
+ static_cast<VkQueryType>( queryType ),
+ sizeof( DataType ),
+ reinterpret_cast<void *>( &data ),
+ stride );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" );
+
+ return data;
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMicromapEXT && "Function <vkCmdCopyMicromapEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ getDispatcher()->vkCmdCopyMicromapEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMicromapToMemoryEXT &&
+ "Function <vkCmdCopyMicromapToMemoryEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ getDispatcher()->vkCmdCopyMicromapToMemoryEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToMicromapEXT &&
+ "Function <vkCmdCopyMemoryToMicromapEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ getDispatcher()->vkCmdCopyMemoryToMicromapEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
+ }
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
+ VULKAN_HPP_NAMESPACE::QueryType queryType,
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool,
+ uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteMicromapsPropertiesEXT &&
+ "Function <vkCmdWriteMicromapsPropertiesEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ getDispatcher()->vkCmdWriteMicromapsPropertiesEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ micromaps.size(),
+ reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
+ static_cast<VkQueryType>( queryType ),
+ static_cast<VkQueryPool>( queryPool ),
+ firstQuery );
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
+ Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMicromapCompatibilityEXT &&
+ "Function <vkGetDeviceMicromapCompatibilityEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
+ getDispatcher()->vkGetDeviceMicromapCompatibilityEXT( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkMicromapVersionInfoEXT *>( &versionInfo ),
+ reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
+
+ return compatibility;
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT
+ Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
+ const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetMicromapBuildSizesEXT &&
+ "Function <vkGetMicromapBuildSizesEXT> needs extension <VK_EXT_opacity_micromap> enabled!" );
+
+ VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT sizeInfo;
+ getDispatcher()->vkGetMicromapBuildSizesEXT( static_cast<VkDevice>( m_device ),
+ static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
+ reinterpret_cast<const VkMicromapBuildInfoEXT *>( &buildInfo ),
+ reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( &sizeInfo ) );
+
+ return sizeInfo;
}
//=== VK_EXT_pageable_device_local_memory ===
VULKAN_HPP_INLINE void DeviceMemory::setPriorityEXT( float priority ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkSetDeviceMemoryPriorityEXT &&
- "Function <vkSetDeviceMemoryPriorityEXT> needs extension <VK_EXT_pageable_device_local_memory> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkSetDeviceMemoryPriorityEXT &&
+ "Function <vkSetDeviceMemoryPriorityEXT> needs extension <VK_EXT_pageable_device_local_memory> enabled!" );
- getDispatcher()->vkSetDeviceMemoryPriorityEXT(
- static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ), priority );
+ getDispatcher()->vkSetDeviceMemoryPriorityEXT( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ), priority );
}
//=== VK_KHR_maintenance4 ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const
- VULKAN_HPP_NOEXCEPT
+ Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR &&
- "Function <vkGetDeviceBufferMemoryRequirementsKHR> needs extension <VK_KHR_maintenance4> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR &&
+ "Function <vkGetDeviceBufferMemoryRequirementsKHR> needs extension <VK_KHR_maintenance4> enabled!" );
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return memoryRequirements;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirementsKHR(
- const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR &&
- "Function <vkGetDeviceBufferMemoryRequirementsKHR> needs extension <VK_KHR_maintenance4> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR &&
+ "Function <vkGetDeviceBufferMemoryRequirementsKHR> needs extension <VK_KHR_maintenance4> enabled!" );
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
- Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const
- VULKAN_HPP_NOEXCEPT
+ Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR &&
- "Function <vkGetDeviceImageMemoryRequirementsKHR> needs extension <VK_KHR_maintenance4> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR &&
+ "Function <vkGetDeviceImageMemoryRequirementsKHR> needs extension <VK_KHR_maintenance4> enabled!" );
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
- getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return memoryRequirements;
}
template <typename X, typename Y, typename... Z>
- VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirementsKHR(
- const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
+ Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR &&
- "Function <vkGetDeviceImageMemoryRequirementsKHR> needs extension <VK_KHR_maintenance4> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR &&
+ "Function <vkGetDeviceImageMemoryRequirementsKHR> needs extension <VK_KHR_maintenance4> enabled!" );
StructureChain<X, Y, Z...> structureChain;
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
- structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
- getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
- reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+ getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
+ reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+
return structureChain;
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
- Device::getImageSparseMemoryRequirementsKHR(
- const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
+ Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const
{
- VULKAN_HPP_ASSERT(
- getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR &&
- "Function <vkGetDeviceImageSparseMemoryRequirementsKHR> needs extension <VK_KHR_maintenance4> enabled!" );
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR &&
+ "Function <vkGetDeviceImageSparseMemoryRequirementsKHR> needs extension <VK_KHR_maintenance4> enabled!" );
- uint32_t sparseMemoryRequirementCount;
- getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
- &sparseMemoryRequirementCount,
- nullptr );
- std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> sparseMemoryRequirements(
- sparseMemoryRequirementCount );
+ std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> sparseMemoryRequirements;
+ uint32_t sparseMemoryRequirementCount;
getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR(
- static_cast<VkDevice>( m_device ),
- reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
- &sparseMemoryRequirementCount,
- reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
- VULKAN_HPP_ASSERT( sparseMemoryRequirementCount == sparseMemoryRequirements.size() );
+ static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
+ &sparseMemoryRequirementCount,
+ reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+
+ VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+ if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+ {
+ sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+ }
return sparseMemoryRequirements;
}
+ //=== VK_VALVE_descriptor_set_host_mapping ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE Device::getDescriptorSetLayoutHostMappingInfoVALVE(
+ const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutHostMappingInfoVALVE &&
+ "Function <vkGetDescriptorSetLayoutHostMappingInfoVALVE> needs extension <VK_VALVE_descriptor_set_host_mapping> enabled!" );
+
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping;
+ getDispatcher()->vkGetDescriptorSetLayoutHostMappingInfoVALVE( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( &bindingReference ),
+ reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( &hostMapping ) );
+
+ return hostMapping;
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * DescriptorSet::getHostMappingVALVE() const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetHostMappingVALVE &&
+ "Function <vkGetDescriptorSetHostMappingVALVE> needs extension <VK_VALVE_descriptor_set_host_mapping> enabled!" );
+
+ void * pData;
+ getDispatcher()->vkGetDescriptorSetHostMappingVALVE( static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSet>( m_descriptorSet ), &pData );
+
+ return pData;
+ }
+
+ //=== VK_EXT_extended_dynamic_state3 ===
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetTessellationDomainOriginEXT &&
+ "Function <vkCmdSetTessellationDomainOriginEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetTessellationDomainOriginEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkTessellationDomainOrigin>( domainOrigin ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClampEnableEXT &&
+ "Function <vkCmdSetDepthClampEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetDepthClampEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClampEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPolygonModeEXT &&
+ "Function <vkCmdSetPolygonModeEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetPolygonModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPolygonMode>( polygonMode ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationSamplesEXT &&
+ "Function <vkCmdSetRasterizationSamplesEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetRasterizationSamplesEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkSampleCountFlagBits>( rasterizationSamples ) );
+ }
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleMaskEXT &&
+ "Function <vkCmdSetSampleMaskEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetSampleMaskEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkSampleCountFlagBits>( samples ),
+ reinterpret_cast<const VkSampleMask *>( sampleMask.data() ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT &&
+ "Function <vkCmdSetAlphaToCoverageEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToCoverageEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToOneEnableEXT &&
+ "Function <vkCmdSetAlphaToOneEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetAlphaToOneEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToOneEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEnableEXT &&
+ "Function <vkCmdSetLogicOpEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetLogicOpEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( logicOpEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT(
+ uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEnableEXT &&
+ "Function <vkCmdSetColorBlendEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetColorBlendEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ firstAttachment,
+ colorBlendEnables.size(),
+ reinterpret_cast<const VkBool32 *>( colorBlendEnables.data() ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT(
+ uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEquationEXT &&
+ "Function <vkCmdSetColorBlendEquationEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetColorBlendEquationEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ firstAttachment,
+ colorBlendEquations.size(),
+ reinterpret_cast<const VkColorBlendEquationEXT *>( colorBlendEquations.data() ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT(
+ uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteMaskEXT &&
+ "Function <vkCmdSetColorWriteMaskEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetColorWriteMaskEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ firstAttachment,
+ colorWriteMasks.size(),
+ reinterpret_cast<const VkColorComponentFlags *>( colorWriteMasks.data() ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationStreamEXT &&
+ "Function <vkCmdSetRasterizationStreamEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetRasterizationStreamEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), rasterizationStream );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setConservativeRasterizationModeEXT(
+ VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetConservativeRasterizationModeEXT &&
+ "Function <vkCmdSetConservativeRasterizationModeEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetConservativeRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT &&
+ "Function <vkCmdSetExtraPrimitiveOverestimationSizeEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), extraPrimitiveOverestimationSize );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipEnableEXT &&
+ "Function <vkCmdSetDepthClipEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetDepthClipEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClipEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleLocationsEnableEXT &&
+ "Function <vkCmdSetSampleLocationsEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetSampleLocationsEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( sampleLocationsEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT(
+ uint32_t firstAttachment,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendAdvancedEXT &&
+ "Function <vkCmdSetColorBlendAdvancedEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetColorBlendAdvancedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ firstAttachment,
+ colorBlendAdvanced.size(),
+ reinterpret_cast<const VkColorBlendAdvancedEXT *>( colorBlendAdvanced.data() ) );
+ }
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetProvokingVertexModeEXT &&
+ "Function <vkCmdSetProvokingVertexModeEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetProvokingVertexModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) );
+ }
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineRasterizationModeEXT &&
+ "Function <vkCmdSetLineRasterizationModeEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetLineRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEnableEXT &&
+ "Function <vkCmdSetLineStippleEnableEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetLineStippleEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stippledLineEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT &&
+ "Function <vkCmdSetDepthClipNegativeOneToOneEXT> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( negativeOneToOne ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWScalingEnableNV &&
+ "Function <vkCmdSetViewportWScalingEnableNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetViewportWScalingEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( viewportWScalingEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV(
+ uint32_t firstViewport,
+ VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportSwizzleNV &&
+ "Function <vkCmdSetViewportSwizzleNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetViewportSwizzleNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ firstViewport,
+ viewportSwizzles.size(),
+ reinterpret_cast<const VkViewportSwizzleNV *>( viewportSwizzles.data() ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorEnableNV &&
+ "Function <vkCmdSetCoverageToColorEnableNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetCoverageToColorEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( coverageToColorEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorLocationNV &&
+ "Function <vkCmdSetCoverageToColorLocationNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetCoverageToColorLocationNV( static_cast<VkCommandBuffer>( m_commandBuffer ), coverageToColorLocation );
+ }
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationModeNV &&
+ "Function <vkCmdSetCoverageModulationModeNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetCoverageModulationModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) );
+ }
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableEnableNV &&
+ "Function <vkCmdSetCoverageModulationTableEnableNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetCoverageModulationTableEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkBool32>( coverageModulationTableEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableNV &&
+ "Function <vkCmdSetCoverageModulationTableNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetCoverageModulationTableNV(
+ static_cast<VkCommandBuffer>( m_commandBuffer ), coverageModulationTable.size(), coverageModulationTable.data() );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetShadingRateImageEnableNV &&
+ "Function <vkCmdSetShadingRateImageEnableNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetShadingRateImageEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( shadingRateImageEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV &&
+ "Function <vkCmdSetRepresentativeFragmentTestEnableNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkBool32>( representativeFragmentTestEnable ) );
+ }
+
+ VULKAN_HPP_INLINE void
+ CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageReductionModeNV &&
+ "Function <vkCmdSetCoverageReductionModeNV> needs extension <VK_EXT_extended_dynamic_state3> enabled!" );
+
+ getDispatcher()->vkCmdSetCoverageReductionModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) );
+ }
+
+ //=== VK_EXT_shader_module_identifier ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT ShaderModule::getIdentifierEXT() const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderModuleIdentifierEXT &&
+ "Function <vkGetShaderModuleIdentifierEXT> needs extension <VK_EXT_shader_module_identifier> enabled!" );
+
+ VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier;
+ getDispatcher()->vkGetShaderModuleIdentifierEXT(
+ static_cast<VkDevice>( m_device ), static_cast<VkShaderModule>( m_shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
+
+ return identifier;
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT
+ Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderModuleCreateInfoIdentifierEXT &&
+ "Function <vkGetShaderModuleCreateInfoIdentifierEXT> needs extension <VK_EXT_shader_module_identifier> enabled!" );
+
+ VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier;
+ getDispatcher()->vkGetShaderModuleCreateInfoIdentifierEXT( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
+ reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
+
+ return identifier;
+ }
+
+ //=== VK_NV_optical_flow ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>
+ PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV &&
+ "Function <vkGetPhysicalDeviceOpticalFlowImageFormatsNV> needs extension <VK_NV_optical_flow> enabled!" );
+
+ std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV> imageFormatProperties;
+ uint32_t formatCount;
+ VkResult result;
+ do
+ {
+ result = getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ),
+ &formatCount,
+ nullptr );
+ if ( ( result == VK_SUCCESS ) && formatCount )
+ {
+ imageFormatProperties.resize( formatCount );
+ result = getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
+ static_cast<VkPhysicalDevice>( m_physicalDevice ),
+ reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ),
+ &formatCount,
+ reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) );
+ }
+ } while ( result == VK_INCOMPLETE );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" );
+ VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() );
+ if ( formatCount < imageFormatProperties.size() )
+ {
+ imageFormatProperties.resize( formatCount );
+ }
+ return imageFormatProperties;
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV
+ Device::createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo,
+ VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
+ {
+ return VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV( *this, createInfo, allocator );
+ }
+
+ VULKAN_HPP_INLINE void OpticalFlowSessionNV::bindImage( VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,
+ VULKAN_HPP_NAMESPACE::ImageView view,
+ VULKAN_HPP_NAMESPACE::ImageLayout layout ) const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkBindOpticalFlowSessionImageNV &&
+ "Function <vkBindOpticalFlowSessionImageNV> needs extension <VK_NV_optical_flow> enabled!" );
+
+ VkResult result = getDispatcher()->vkBindOpticalFlowSessionImageNV( static_cast<VkDevice>( m_device ),
+ static_cast<VkOpticalFlowSessionNV>( m_session ),
+ static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ),
+ static_cast<VkImageView>( view ),
+ static_cast<VkImageLayout>( layout ) );
+ resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::OpticalFlowSessionNV::bindImage" );
+ }
+
+ VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
+ const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdOpticalFlowExecuteNV && "Function <vkCmdOpticalFlowExecuteNV> needs extension <VK_NV_optical_flow> enabled!" );
+
+ getDispatcher()->vkCmdOpticalFlowExecuteNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
+ static_cast<VkOpticalFlowSessionNV>( session ),
+ reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) );
+ }
+
+ //=== VK_QCOM_tile_properties ===
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM> Framebuffer::getTilePropertiesQCOM() const
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetFramebufferTilePropertiesQCOM &&
+ "Function <vkGetFramebufferTilePropertiesQCOM> needs extension <VK_QCOM_tile_properties> enabled!" );
+
+ std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM> properties;
+ uint32_t propertiesCount;
+ VkResult result;
+ do
+ {
+ result = getDispatcher()->vkGetFramebufferTilePropertiesQCOM(
+ static_cast<VkDevice>( m_device ), static_cast<VkFramebuffer>( m_framebuffer ), &propertiesCount, nullptr );
+ if ( ( result == VK_SUCCESS ) && propertiesCount )
+ {
+ properties.resize( propertiesCount );
+ result = getDispatcher()->vkGetFramebufferTilePropertiesQCOM( static_cast<VkDevice>( m_device ),
+ static_cast<VkFramebuffer>( m_framebuffer ),
+ &propertiesCount,
+ reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) );
+ }
+ } while ( result == VK_INCOMPLETE );
+
+ VULKAN_HPP_ASSERT( propertiesCount <= properties.size() );
+ if ( propertiesCount < properties.size() )
+ {
+ properties.resize( propertiesCount );
+ }
+ return properties;
+ }
+
+ VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM
+ Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ VULKAN_HPP_ASSERT( getDispatcher()->vkGetDynamicRenderingTilePropertiesQCOM &&
+ "Function <vkGetDynamicRenderingTilePropertiesQCOM> needs extension <VK_QCOM_tile_properties> enabled!" );
+
+ VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties;
+ getDispatcher()->vkGetDynamicRenderingTilePropertiesQCOM( static_cast<VkDevice>( m_device ),
+ reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ),
+ reinterpret_cast<VkTilePropertiesQCOM *>( &properties ) );
+
+ return properties;
+ }
+
#endif
} // namespace VULKAN_HPP_RAII_NAMESPACE
} // namespace VULKAN_HPP_NAMESPACE
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_static_assertions.hpp b/thirdparty/vulkan/include/vulkan/vulkan_static_assertions.hpp
new file mode 100644
index 0000000000..b6a1e9a798
--- /dev/null
+++ b/thirdparty/vulkan/include/vulkan/vulkan_static_assertions.hpp
@@ -0,0 +1,6105 @@
+// Copyright 2015-2022 The Khronos Group Inc.
+//
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_STRUCTS_HPP
+#define VULKAN_STRUCTS_HPP
+
+#include <vulkan/vulkan.hpp>
+
+//=========================
+//=== static_assertions ===
+//=========================
+
+//=== VK_VERSION_1_0 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Extent2D>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Extent2D>::value, "Extent2D is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Extent3D>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Extent3D>::value, "Extent3D is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Offset2D>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Offset2D>::value, "Offset2D is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Offset3D>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Offset3D>::value, "Offset3D is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Rect2D>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Rect2D>::value, "Rect2D is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BaseInStructure ) == sizeof( VkBaseInStructure ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BaseInStructure>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BaseInStructure>::value,
+ "BaseInStructure is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BaseOutStructure ) == sizeof( VkBaseOutStructure ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BaseOutStructure>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BaseOutStructure>::value,
+ "BaseOutStructure is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier>::value,
+ "BufferMemoryBarrier is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand>::value,
+ "DispatchIndirectCommand is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand>::value,
+ "DrawIndexedIndirectCommand is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrawIndirectCommand>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrawIndirectCommand>::value,
+ "DrawIndirectCommand is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier>::value,
+ "ImageMemoryBarrier is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryBarrier>::value, "MemoryBarrier is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne ) == sizeof( VkPipelineCacheHeaderVersionOne ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne>::value,
+ "PipelineCacheHeaderVersionOne is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AllocationCallbacks>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AllocationCallbacks>::value,
+ "AllocationCallbacks is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ApplicationInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ApplicationInfo>::value,
+ "ApplicationInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FormatProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FormatProperties>::value,
+ "FormatProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatProperties ) == sizeof( VkImageFormatProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::value,
+ "ImageFormatProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Instance>::value, "Instance is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::InstanceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::InstanceCreateInfo>::value,
+ "InstanceCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryHeap>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryHeap>::value, "MemoryHeap is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryType>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryType>::value, "MemoryType is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevice>::value,
+ "PhysicalDevice is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures>::value,
+ "PhysicalDeviceFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits>::value,
+ "PhysicalDeviceLimits is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties>::value,
+ "PhysicalDeviceMemoryProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties>::value,
+ "PhysicalDeviceProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties>::value,
+ "PhysicalDeviceSparseProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyProperties>::value,
+ "QueueFamilyProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Device>::value, "Device is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceCreateInfo>::value,
+ "DeviceCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo>::value,
+ "DeviceQueueCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExtensionProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExtensionProperties>::value,
+ "ExtensionProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::LayerProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::LayerProperties>::value,
+ "LayerProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Queue>::value, "Queue is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubmitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubmitInfo>::value, "SubmitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MappedMemoryRange>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MappedMemoryRange>::value,
+ "MappedMemoryRange is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo>::value,
+ "MemoryAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceMemory>::value, "DeviceMemory is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryRequirements>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryRequirements>::value,
+ "MemoryRequirements is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindSparseInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindSparseInfo>::value,
+ "BindSparseInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresource>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresource>::value,
+ "ImageSubresource is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo>::value,
+ "SparseBufferMemoryBindInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>::value,
+ "SparseImageFormatProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind>::value,
+ "SparseImageMemoryBind is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo>::value,
+ "SparseImageMemoryBindInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>::value,
+ "SparseImageMemoryRequirements is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo>::value,
+ "SparseImageOpaqueMemoryBindInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseMemoryBind>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseMemoryBind>::value,
+ "SparseMemoryBind is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Fence>::value, "Fence is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FenceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FenceCreateInfo>::value,
+ "FenceCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Semaphore>::value, "Semaphore is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo>::value,
+ "SemaphoreCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Event>::value, "Event is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::EventCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::EventCreateInfo>::value,
+ "EventCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryPool>::value, "QueryPool is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo>::value,
+ "QueryPoolCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Buffer>::value, "Buffer is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCreateInfo>::value,
+ "BufferCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferView>::value, "BufferView is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo>::value,
+ "BufferViewCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Image ) == sizeof( VkImage ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Image>::value, "Image is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCreateInfo>::value,
+ "ImageCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubresourceLayout>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubresourceLayout>::value,
+ "SubresourceLayout is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ComponentMapping>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ComponentMapping>::value,
+ "ComponentMapping is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresourceRange>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresourceRange>::value,
+ "ImageSubresourceRange is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageView>::value, "ImageView is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo>::value,
+ "ImageViewCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderModule>::value, "ShaderModule is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo>::value,
+ "ShaderModuleCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCache>::value, "PipelineCache is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo>::value,
+ "PipelineCacheCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo>::value,
+ "ComputePipelineCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo>::value,
+ "GraphicsPipelineCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Pipeline>::value, "Pipeline is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState>::value,
+ "PipelineColorBlendAttachmentState is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo>::value,
+ "PipelineColorBlendStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo>::value,
+ "PipelineDepthStencilStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo>::value,
+ "PipelineDynamicStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo>::value,
+ "PipelineInputAssemblyStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo>::value,
+ "PipelineMultisampleStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo>::value,
+ "PipelineRasterizationStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo>::value,
+ "PipelineShaderStageCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo>::value,
+ "PipelineTessellationStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo>::value,
+ "PipelineVertexInputStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo>::value,
+ "PipelineViewportStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SpecializationInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SpecializationInfo>::value,
+ "SpecializationInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SpecializationMapEntry>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SpecializationMapEntry>::value,
+ "SpecializationMapEntry is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::StencilOpState>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::StencilOpState>::value,
+ "StencilOpState is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription>::value,
+ "VertexInputAttributeDescription is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription>::value,
+ "VertexInputBindingDescription is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Viewport>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Viewport>::value, "Viewport is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineLayout>::value,
+ "PipelineLayout is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo>::value,
+ "PipelineLayoutCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PushConstantRange>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PushConstantRange>::value,
+ "PushConstantRange is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Sampler>::value, "Sampler is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerCreateInfo>::value,
+ "SamplerCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyDescriptorSet>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyDescriptorSet>::value,
+ "CopyDescriptorSet is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo>::value,
+ "DescriptorBufferInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorImageInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorImageInfo>::value,
+ "DescriptorImageInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorPool>::value,
+ "DescriptorPool is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo>::value,
+ "DescriptorPoolCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorPoolSize>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorPoolSize>::value,
+ "DescriptorPoolSize is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSet>::value, "DescriptorSet is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo>::value,
+ "DescriptorSetAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::value,
+ "DescriptorSetLayout is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding>::value,
+ "DescriptorSetLayoutBinding is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo>::value,
+ "DescriptorSetLayoutCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSet>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSet>::value,
+ "WriteDescriptorSet is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescription ) == sizeof( VkAttachmentDescription ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentDescription>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentDescription>::value,
+ "AttachmentDescription is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentReference>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentReference>::value,
+ "AttachmentReference is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Framebuffer>::value, "Framebuffer is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo>::value,
+ "FramebufferCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPass>::value, "RenderPass is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo>::value,
+ "RenderPassCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDependency>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDependency>::value,
+ "SubpassDependency is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDescription>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDescription>::value,
+ "SubpassDescription is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandPool>::value, "CommandPool is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo>::value,
+ "CommandPoolCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBuffer>::value, "CommandBuffer is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo>::value,
+ "CommandBufferAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo>::value,
+ "CommandBufferBeginInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo>::value,
+ "CommandBufferInheritanceInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCopy>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCopy>::value, "BufferCopy is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferImageCopy>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferImageCopy>::value,
+ "BufferImageCopy is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearAttachment>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearAttachment>::value,
+ "ClearAttachment is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) == sizeof( VkClearColorValue ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearColorValue>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearColorValue>::value,
+ "ClearColorValue is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue>::value,
+ "ClearDepthStencilValue is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearRect>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearRect>::value, "ClearRect is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) == sizeof( VkClearValue ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearValue>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearValue>::value, "ClearValue is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageBlit>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageBlit>::value, "ImageBlit is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCopy>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCopy>::value, "ImageCopy is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageResolve>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageResolve>::value, "ImageResolve is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers>::value,
+ "ImageSubresourceLayers is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo>::value,
+ "RenderPassBeginInfo is not nothrow_move_constructible!" );
+
+//=== VK_VERSION_1_1 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties>::value,
+ "PhysicalDeviceSubgroupProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo>::value,
+ "BindBufferMemoryInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo>::value,
+ "BindImageMemoryInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures>::value,
+ "PhysicalDevice16BitStorageFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements>::value,
+ "MemoryDedicatedRequirements is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo>::value,
+ "MemoryDedicatedAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo>::value,
+ "MemoryAllocateFlagsInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo>::value,
+ "DeviceGroupRenderPassBeginInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo>::value,
+ "DeviceGroupCommandBufferBeginInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo>::value,
+ "DeviceGroupSubmitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo>::value,
+ "DeviceGroupBindSparseInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo>::value,
+ "BindBufferMemoryDeviceGroupInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo>::value,
+ "BindImageMemoryDeviceGroupInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value,
+ "PhysicalDeviceGroupProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo>::value,
+ "DeviceGroupDeviceCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2>::value,
+ "BufferMemoryRequirementsInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2>::value,
+ "ImageMemoryRequirementsInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2>::value,
+ "ImageSparseMemoryRequirementsInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryRequirements2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryRequirements2>::value,
+ "MemoryRequirements2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value,
+ "SparseImageMemoryRequirements2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>::value,
+ "PhysicalDeviceFeatures2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>::value,
+ "PhysicalDeviceProperties2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FormatProperties2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FormatProperties2>::value,
+ "FormatProperties2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::value,
+ "ImageFormatProperties2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2>::value,
+ "PhysicalDeviceImageFormatInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value,
+ "QueueFamilyProperties2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>::value,
+ "PhysicalDeviceMemoryProperties2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value,
+ "SparseImageFormatProperties2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2>::value,
+ "PhysicalDeviceSparseImageFormatInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties>::value,
+ "PhysicalDevicePointClippingProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo>::value,
+ "RenderPassInputAttachmentAspectCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference>::value,
+ "InputAttachmentAspectReference is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo>::value,
+ "ImageViewUsageCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo ) ==
+ sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo>::value,
+ "PipelineTessellationDomainOriginStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo>::value,
+ "RenderPassMultiviewCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures>::value,
+ "PhysicalDeviceMultiviewFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties>::value,
+ "PhysicalDeviceMultiviewProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures>::value,
+ "PhysicalDeviceVariablePointersFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures>::value,
+ "PhysicalDeviceProtectedMemoryFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties>::value,
+ "PhysicalDeviceProtectedMemoryProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2>::value,
+ "DeviceQueueInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo>::value,
+ "ProtectedSubmitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo>::value,
+ "SamplerYcbcrConversionCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo>::value,
+ "SamplerYcbcrConversionInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo>::value,
+ "BindImagePlaneMemoryInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo>::value,
+ "ImagePlaneMemoryRequirementsInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures ) ==
+ sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures>::value,
+ "PhysicalDeviceSamplerYcbcrConversionFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties ) ==
+ sizeof( VkSamplerYcbcrConversionImageFormatProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties>::value,
+ "SamplerYcbcrConversionImageFormatProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ),
+ "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::value,
+ "SamplerYcbcrConversion is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ),
+ "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::value,
+ "DescriptorUpdateTemplate is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry>::value,
+ "DescriptorUpdateTemplateEntry is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo>::value,
+ "DescriptorUpdateTemplateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties>::value,
+ "ExternalMemoryProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo>::value,
+ "PhysicalDeviceExternalImageFormatInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties>::value,
+ "ExternalImageFormatProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo>::value,
+ "PhysicalDeviceExternalBufferInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalBufferProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalBufferProperties>::value,
+ "ExternalBufferProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties>::value,
+ "PhysicalDeviceIDProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo>::value,
+ "ExternalMemoryImageCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo>::value,
+ "ExternalMemoryBufferCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo>::value,
+ "ExportMemoryAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo>::value,
+ "PhysicalDeviceExternalFenceInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalFenceProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalFenceProperties>::value,
+ "ExternalFenceProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo>::value,
+ "ExportFenceCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo>::value,
+ "ExportSemaphoreCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo>::value,
+ "PhysicalDeviceExternalSemaphoreInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties>::value,
+ "ExternalSemaphoreProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties>::value,
+ "PhysicalDeviceMaintenance3Properties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>::value,
+ "DescriptorSetLayoutSupport is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures>::value,
+ "PhysicalDeviceShaderDrawParametersFeatures is not nothrow_move_constructible!" );
+
+//=== VK_VERSION_1_2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features ) == sizeof( VkPhysicalDeviceVulkan11Features ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features>::value,
+ "PhysicalDeviceVulkan11Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties ) == sizeof( VkPhysicalDeviceVulkan11Properties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties>::value,
+ "PhysicalDeviceVulkan11Properties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features ) == sizeof( VkPhysicalDeviceVulkan12Features ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features>::value,
+ "PhysicalDeviceVulkan12Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties ) == sizeof( VkPhysicalDeviceVulkan12Properties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties>::value,
+ "PhysicalDeviceVulkan12Properties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo ) == sizeof( VkImageFormatListCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo>::value,
+ "ImageFormatListCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2>::value,
+ "RenderPassCreateInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescription2 ) == sizeof( VkAttachmentDescription2 ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentDescription2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentDescription2>::value,
+ "AttachmentDescription2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReference2 ) == sizeof( VkAttachmentReference2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentReference2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentReference2>::value,
+ "AttachmentReference2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescription2 ) == sizeof( VkSubpassDescription2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDescription2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDescription2>::value,
+ "SubpassDescription2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDependency2 ) == sizeof( VkSubpassDependency2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDependency2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDependency2>::value,
+ "SubpassDependency2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassBeginInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassBeginInfo>::value,
+ "SubpassBeginInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassEndInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassEndInfo>::value,
+ "SubpassEndInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures ) == sizeof( VkPhysicalDevice8BitStorageFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures>::value,
+ "PhysicalDevice8BitStorageFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ConformanceVersion ) == sizeof( VkConformanceVersion ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ConformanceVersion>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ConformanceVersion>::value,
+ "ConformanceVersion is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties ) == sizeof( VkPhysicalDeviceDriverProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties>::value,
+ "PhysicalDeviceDriverProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features ) == sizeof( VkPhysicalDeviceShaderAtomicInt64Features ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features>::value,
+ "PhysicalDeviceShaderAtomicInt64Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features ) == sizeof( VkPhysicalDeviceShaderFloat16Int8Features ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features>::value,
+ "PhysicalDeviceShaderFloat16Int8Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties ) == sizeof( VkPhysicalDeviceFloatControlsProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties>::value,
+ "PhysicalDeviceFloatControlsProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo>::value,
+ "DescriptorSetLayoutBindingFlagsCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures>::value,
+ "PhysicalDeviceDescriptorIndexingFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties ) == sizeof( VkPhysicalDeviceDescriptorIndexingProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties>::value,
+ "PhysicalDeviceDescriptorIndexingProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo ) ==
+ sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo>::value,
+ "DescriptorSetVariableDescriptorCountAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport ) ==
+ sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport>::value,
+ "DescriptorSetVariableDescriptorCountLayoutSupport is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve ) == sizeof( VkSubpassDescriptionDepthStencilResolve ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve>::value,
+ "SubpassDescriptionDepthStencilResolve is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties ) ==
+ sizeof( VkPhysicalDeviceDepthStencilResolveProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties>::value,
+ "PhysicalDeviceDepthStencilResolveProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures ) == sizeof( VkPhysicalDeviceScalarBlockLayoutFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures>::value,
+ "PhysicalDeviceScalarBlockLayoutFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo ) == sizeof( VkImageStencilUsageCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo>::value,
+ "ImageStencilUsageCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo ) == sizeof( VkSamplerReductionModeCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo>::value,
+ "SamplerReductionModeCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties ) ==
+ sizeof( VkPhysicalDeviceSamplerFilterMinmaxProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties>::value,
+ "PhysicalDeviceSamplerFilterMinmaxProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures>::value,
+ "PhysicalDeviceVulkanMemoryModelFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures ) == sizeof( VkPhysicalDeviceImagelessFramebufferFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures>::value,
+ "PhysicalDeviceImagelessFramebufferFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo ) == sizeof( VkFramebufferAttachmentsCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo>::value,
+ "FramebufferAttachmentsCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo ) == sizeof( VkFramebufferAttachmentImageInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo>::value,
+ "FramebufferAttachmentImageInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo ) == sizeof( VkRenderPassAttachmentBeginInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo>::value,
+ "RenderPassAttachmentBeginInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures ) ==
+ sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures>::value,
+ "PhysicalDeviceUniformBufferStandardLayoutFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) ==
+ sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures>::value,
+ "PhysicalDeviceShaderSubgroupExtendedTypesFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) ==
+ sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures>::value,
+ "PhysicalDeviceSeparateDepthStencilLayoutsFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout ) == sizeof( VkAttachmentReferenceStencilLayout ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout>::value,
+ "AttachmentReferenceStencilLayout is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout ) == sizeof( VkAttachmentDescriptionStencilLayout ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout>::value,
+ "AttachmentDescriptionStencilLayout is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures ) == sizeof( VkPhysicalDeviceHostQueryResetFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures>::value,
+ "PhysicalDeviceHostQueryResetFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures ) == sizeof( VkPhysicalDeviceTimelineSemaphoreFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures>::value,
+ "PhysicalDeviceTimelineSemaphoreFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties ) == sizeof( VkPhysicalDeviceTimelineSemaphoreProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties>::value,
+ "PhysicalDeviceTimelineSemaphoreProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo ) == sizeof( VkSemaphoreTypeCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo>::value,
+ "SemaphoreTypeCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo ) == sizeof( VkTimelineSemaphoreSubmitInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo>::value,
+ "TimelineSemaphoreSubmitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo>::value,
+ "SemaphoreWaitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo>::value,
+ "SemaphoreSignalInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures>::value,
+ "PhysicalDeviceBufferDeviceAddressFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo ) == sizeof( VkBufferDeviceAddressInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo>::value,
+ "BufferDeviceAddressInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo ) == sizeof( VkBufferOpaqueCaptureAddressCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo>::value,
+ "BufferOpaqueCaptureAddressCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo ) == sizeof( VkMemoryOpaqueCaptureAddressAllocateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo>::value,
+ "MemoryOpaqueCaptureAddressAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo ) == sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo>::value,
+ "DeviceMemoryOpaqueCaptureAddressInfo is not nothrow_move_constructible!" );
+
+//=== VK_VERSION_1_3 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features ) == sizeof( VkPhysicalDeviceVulkan13Features ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features>::value,
+ "PhysicalDeviceVulkan13Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties ) == sizeof( VkPhysicalDeviceVulkan13Properties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties>::value,
+ "PhysicalDeviceVulkan13Properties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo ) == sizeof( VkPipelineCreationFeedbackCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo>::value,
+ "PipelineCreationFeedbackCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback ) == sizeof( VkPipelineCreationFeedback ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback>::value,
+ "PipelineCreationFeedback is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures ) ==
+ sizeof( VkPhysicalDeviceShaderTerminateInvocationFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures>::value,
+ "PhysicalDeviceShaderTerminateInvocationFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties ) == sizeof( VkPhysicalDeviceToolProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value,
+ "PhysicalDeviceToolProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures ) ==
+ sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures>::value,
+ "PhysicalDeviceShaderDemoteToHelperInvocationFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures ) == sizeof( VkPhysicalDevicePrivateDataFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures>::value,
+ "PhysicalDevicePrivateDataFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo ) == sizeof( VkDevicePrivateDataCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo>::value,
+ "DevicePrivateDataCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo ) == sizeof( VkPrivateDataSlotCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo>::value,
+ "PrivateDataSlotCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlot ) == sizeof( VkPrivateDataSlot ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::value,
+ "PrivateDataSlot is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures ) ==
+ sizeof( VkPhysicalDevicePipelineCreationCacheControlFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures>::value,
+ "PhysicalDevicePipelineCreationCacheControlFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrier2 ) == sizeof( VkMemoryBarrier2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryBarrier2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryBarrier2>::value,
+ "MemoryBarrier2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 ) == sizeof( VkBufferMemoryBarrier2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2>::value,
+ "BufferMemoryBarrier2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 ) == sizeof( VkImageMemoryBarrier2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2>::value,
+ "ImageMemoryBarrier2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DependencyInfo ) == sizeof( VkDependencyInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DependencyInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DependencyInfo>::value,
+ "DependencyInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo2 ) == sizeof( VkSubmitInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubmitInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubmitInfo2>::value, "SubmitInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo ) == sizeof( VkSemaphoreSubmitInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo>::value,
+ "SemaphoreSubmitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo ) == sizeof( VkCommandBufferSubmitInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo>::value,
+ "CommandBufferSubmitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features ) == sizeof( VkPhysicalDeviceSynchronization2Features ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features>::value,
+ "PhysicalDeviceSynchronization2Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures ) ==
+ sizeof( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures>::value,
+ "PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures ) == sizeof( VkPhysicalDeviceImageRobustnessFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures>::value,
+ "PhysicalDeviceImageRobustnessFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyBufferInfo2 ) == sizeof( VkCopyBufferInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyBufferInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyBufferInfo2>::value,
+ "CopyBufferInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageInfo2 ) == sizeof( VkCopyImageInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyImageInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyImageInfo2>::value,
+ "CopyImageInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 ) == sizeof( VkCopyBufferToImageInfo2 ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2>::value,
+ "CopyBufferToImageInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 ) == sizeof( VkCopyImageToBufferInfo2 ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2>::value,
+ "CopyImageToBufferInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BlitImageInfo2 ) == sizeof( VkBlitImageInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BlitImageInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BlitImageInfo2>::value,
+ "BlitImageInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ResolveImageInfo2 ) == sizeof( VkResolveImageInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ResolveImageInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ResolveImageInfo2>::value,
+ "ResolveImageInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCopy2 ) == sizeof( VkBufferCopy2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCopy2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCopy2>::value, "BufferCopy2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCopy2 ) == sizeof( VkImageCopy2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCopy2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCopy2>::value, "ImageCopy2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageBlit2 ) == sizeof( VkImageBlit2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageBlit2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageBlit2>::value, "ImageBlit2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferImageCopy2 ) == sizeof( VkBufferImageCopy2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferImageCopy2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferImageCopy2>::value,
+ "BufferImageCopy2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageResolve2 ) == sizeof( VkImageResolve2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageResolve2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageResolve2>::value, "ImageResolve2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures ) == sizeof( VkPhysicalDeviceSubgroupSizeControlFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures>::value,
+ "PhysicalDeviceSubgroupSizeControlFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties ) ==
+ sizeof( VkPhysicalDeviceSubgroupSizeControlProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties>::value,
+ "PhysicalDeviceSubgroupSizeControlProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo ) ==
+ sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo>::value,
+ "PipelineShaderStageRequiredSubgroupSizeCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures>::value,
+ "PhysicalDeviceInlineUniformBlockFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties ) == sizeof( VkPhysicalDeviceInlineUniformBlockProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties>::value,
+ "PhysicalDeviceInlineUniformBlockProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock ) == sizeof( VkWriteDescriptorSetInlineUniformBlock ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock>::value,
+ "WriteDescriptorSetInlineUniformBlock is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo>::value,
+ "DescriptorPoolInlineUniformBlockCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures ) ==
+ sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures>::value,
+ "PhysicalDeviceTextureCompressionASTCHDRFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingInfo ) == sizeof( VkRenderingInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingInfo>::value, "RenderingInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo ) == sizeof( VkRenderingAttachmentInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo>::value,
+ "RenderingAttachmentInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo ) == sizeof( VkPipelineRenderingCreateInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo>::value,
+ "PipelineRenderingCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures ) == sizeof( VkPhysicalDeviceDynamicRenderingFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures>::value,
+ "PhysicalDeviceDynamicRenderingFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo ) == sizeof( VkCommandBufferInheritanceRenderingInfo ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo>::value,
+ "CommandBufferInheritanceRenderingInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures ) ==
+ sizeof( VkPhysicalDeviceShaderIntegerDotProductFeatures ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures>::value,
+ "PhysicalDeviceShaderIntegerDotProductFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties ) ==
+ sizeof( VkPhysicalDeviceShaderIntegerDotProductProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties>::value,
+ "PhysicalDeviceShaderIntegerDotProductProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties ) ==
+ sizeof( VkPhysicalDeviceTexelBufferAlignmentProperties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties>::value,
+ "PhysicalDeviceTexelBufferAlignmentProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties3 ) == sizeof( VkFormatProperties3 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FormatProperties3>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FormatProperties3>::value,
+ "FormatProperties3 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features ) == sizeof( VkPhysicalDeviceMaintenance4Features ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features>::value,
+ "PhysicalDeviceMaintenance4Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties ) == sizeof( VkPhysicalDeviceMaintenance4Properties ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties>::value,
+ "PhysicalDeviceMaintenance4Properties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements ) == sizeof( VkDeviceBufferMemoryRequirements ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements>::value,
+ "DeviceBufferMemoryRequirements is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements ) == sizeof( VkDeviceImageMemoryRequirements ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements>::value,
+ "DeviceImageMemoryRequirements is not nothrow_move_constructible!" );
+
+//=== VK_KHR_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceKHR>::value, "SurfaceKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::value,
+ "SurfaceCapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>::value,
+ "SurfaceFormatKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_swapchain ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR>::value,
+ "SwapchainCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainKHR>::value, "SwapchainKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentInfoKHR>::value,
+ "PresentInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR>::value,
+ "ImageSwapchainCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR>::value,
+ "BindImageMemorySwapchainInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR>::value,
+ "AcquireNextImageInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::value,
+ "DeviceGroupPresentCapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR>::value,
+ "DeviceGroupPresentInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR>::value,
+ "DeviceGroupSwapchainCreateInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_display ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayKHR>::value, "DisplayKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR>::value,
+ "DisplayModeCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::value,
+ "DisplayModeKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR>::value,
+ "DisplayModeParametersKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>::value,
+ "DisplayModePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::value,
+ "DisplayPlaneCapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>::value,
+ "DisplayPlanePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>::value,
+ "DisplayPropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR>::value,
+ "DisplaySurfaceCreateInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_display_swapchain ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR>::value,
+ "DisplayPresentInfoKHR is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+//=== VK_KHR_xlib_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR>::value,
+ "XlibSurfaceCreateInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+//=== VK_KHR_xcb_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR>::value,
+ "XcbSurfaceCreateInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+//=== VK_KHR_wayland_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR>::value,
+ "WaylandSurfaceCreateInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+//=== VK_KHR_android_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR>::value,
+ "AndroidSurfaceCreateInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+//=== VK_KHR_win32_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR>::value,
+ "Win32SurfaceCreateInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+//=== VK_EXT_debug_report ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ),
+ "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::value,
+ "DebugReportCallbackEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT>::value,
+ "DebugReportCallbackCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_AMD_rasterization_order ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD ) ==
+ sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD>::value,
+ "PipelineRasterizationStateRasterizationOrderAMD is not nothrow_move_constructible!" );
+
+//=== VK_EXT_debug_marker ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT>::value,
+ "DebugMarkerObjectNameInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT>::value,
+ "DebugMarkerObjectTagInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT>::value,
+ "DebugMarkerMarkerInfoEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+//=== VK_KHR_video_queue ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionKHR ) == sizeof( VkVideoSessionKHR ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::value,
+ "VideoSessionKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR ) == sizeof( VkVideoSessionParametersKHR ),
+ "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::value,
+ "VideoSessionParametersKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR ) == sizeof( VkQueueFamilyQueryResultStatusPropertiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR>::value,
+ "QueueFamilyQueryResultStatusPropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR ) == sizeof( VkQueueFamilyVideoPropertiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR>::value,
+ "QueueFamilyVideoPropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR ) == sizeof( VkVideoProfileInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR>::value,
+ "VideoProfileInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR ) == sizeof( VkVideoProfileListInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR>::value,
+ "VideoProfileListInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR ) == sizeof( VkVideoCapabilitiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::value,
+ "VideoCapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR ) == sizeof( VkPhysicalDeviceVideoFormatInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR>::value,
+ "PhysicalDeviceVideoFormatInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR ) == sizeof( VkVideoFormatPropertiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>::value,
+ "VideoFormatPropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR ) == sizeof( VkVideoPictureResourceInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR>::value,
+ "VideoPictureResourceInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR ) == sizeof( VkVideoReferenceSlotInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR>::value,
+ "VideoReferenceSlotInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR ) == sizeof( VkVideoSessionMemoryRequirementsKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR>::value,
+ "VideoSessionMemoryRequirementsKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR ) == sizeof( VkBindVideoSessionMemoryInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR>::value,
+ "BindVideoSessionMemoryInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR ) == sizeof( VkVideoSessionCreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR>::value,
+ "VideoSessionCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR ) == sizeof( VkVideoSessionParametersCreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR>::value,
+ "VideoSessionParametersCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR ) == sizeof( VkVideoSessionParametersUpdateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR>::value,
+ "VideoSessionParametersUpdateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR ) == sizeof( VkVideoBeginCodingInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR>::value,
+ "VideoBeginCodingInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR ) == sizeof( VkVideoEndCodingInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR>::value,
+ "VideoEndCodingInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR ) == sizeof( VkVideoCodingControlInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR>::value,
+ "VideoCodingControlInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+//=== VK_KHR_video_decode_queue ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR ) == sizeof( VkVideoDecodeCapabilitiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR>::value,
+ "VideoDecodeCapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR ) == sizeof( VkVideoDecodeUsageInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR>::value,
+ "VideoDecodeUsageInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR ) == sizeof( VkVideoDecodeInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR>::value,
+ "VideoDecodeInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+//=== VK_NV_dedicated_allocation ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV>::value,
+ "DedicatedAllocationImageCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV>::value,
+ "DedicatedAllocationBufferCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV>::value,
+ "DedicatedAllocationMemoryAllocateInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_transform_feedback ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT>::value,
+ "PhysicalDeviceTransformFeedbackFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT ) ==
+ sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT>::value,
+ "PhysicalDeviceTransformFeedbackPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT ) ==
+ sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT>::value,
+ "PipelineRasterizationStateStreamCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_NVX_binary_import ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuModuleNVX ) == sizeof( VkCuModuleNVX ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuModuleNVX>::value, "CuModuleNVX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionNVX ) == sizeof( VkCuFunctionNVX ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::value, "CuFunctionNVX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX ) == sizeof( VkCuModuleCreateInfoNVX ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX>::value,
+ "CuModuleCreateInfoNVX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX ) == sizeof( VkCuFunctionCreateInfoNVX ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX>::value,
+ "CuFunctionCreateInfoNVX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX ) == sizeof( VkCuLaunchInfoNVX ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX>::value,
+ "CuLaunchInfoNVX is not nothrow_move_constructible!" );
+
+//=== VK_NVX_image_view_handle ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX>::value,
+ "ImageViewHandleInfoNVX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX ) == sizeof( VkImageViewAddressPropertiesNVX ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::value,
+ "ImageViewAddressPropertiesNVX is not nothrow_move_constructible!" );
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+//=== VK_EXT_video_encode_h264 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT ) == sizeof( VkVideoEncodeH264CapabilitiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT>::value,
+ "VideoEncodeH264CapabilitiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT ) ==
+ sizeof( VkVideoEncodeH264SessionParametersCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT>::value,
+ "VideoEncodeH264SessionParametersCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT ) == sizeof( VkVideoEncodeH264SessionParametersAddInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT>::value,
+ "VideoEncodeH264SessionParametersAddInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT ) == sizeof( VkVideoEncodeH264VclFrameInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT>::value,
+ "VideoEncodeH264VclFrameInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT ) == sizeof( VkVideoEncodeH264ReferenceListsInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT>::value,
+ "VideoEncodeH264ReferenceListsInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersInfoEXT ) ==
+ sizeof( VkVideoEncodeH264EmitPictureParametersInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersInfoEXT>::value,
+ "VideoEncodeH264EmitPictureParametersInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT ) == sizeof( VkVideoEncodeH264DpbSlotInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT>::value,
+ "VideoEncodeH264DpbSlotInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT ) == sizeof( VkVideoEncodeH264NaluSliceInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT>::value,
+ "VideoEncodeH264NaluSliceInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoEXT ) == sizeof( VkVideoEncodeH264ProfileInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoEXT>::value,
+ "VideoEncodeH264ProfileInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT ) == sizeof( VkVideoEncodeH264RateControlInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT>::value,
+ "VideoEncodeH264RateControlInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT ) == sizeof( VkVideoEncodeH264RateControlLayerInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT>::value,
+ "VideoEncodeH264RateControlLayerInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT ) == sizeof( VkVideoEncodeH264QpEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT>::value,
+ "VideoEncodeH264QpEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT ) == sizeof( VkVideoEncodeH264FrameSizeEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT>::value,
+ "VideoEncodeH264FrameSizeEXT is not nothrow_move_constructible!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+//=== VK_EXT_video_encode_h265 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT ) == sizeof( VkVideoEncodeH265CapabilitiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT>::value,
+ "VideoEncodeH265CapabilitiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT ) ==
+ sizeof( VkVideoEncodeH265SessionParametersCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT>::value,
+ "VideoEncodeH265SessionParametersCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT ) == sizeof( VkVideoEncodeH265SessionParametersAddInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT>::value,
+ "VideoEncodeH265SessionParametersAddInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT ) == sizeof( VkVideoEncodeH265VclFrameInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT>::value,
+ "VideoEncodeH265VclFrameInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersInfoEXT ) ==
+ sizeof( VkVideoEncodeH265EmitPictureParametersInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersInfoEXT>::value,
+ "VideoEncodeH265EmitPictureParametersInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT ) == sizeof( VkVideoEncodeH265DpbSlotInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT>::value,
+ "VideoEncodeH265DpbSlotInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT ) == sizeof( VkVideoEncodeH265NaluSliceSegmentInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT>::value,
+ "VideoEncodeH265NaluSliceSegmentInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoEXT ) == sizeof( VkVideoEncodeH265ProfileInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoEXT>::value,
+ "VideoEncodeH265ProfileInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT ) == sizeof( VkVideoEncodeH265ReferenceListsInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT>::value,
+ "VideoEncodeH265ReferenceListsInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT ) == sizeof( VkVideoEncodeH265RateControlInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT>::value,
+ "VideoEncodeH265RateControlInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT ) == sizeof( VkVideoEncodeH265RateControlLayerInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT>::value,
+ "VideoEncodeH265RateControlLayerInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT ) == sizeof( VkVideoEncodeH265QpEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT>::value,
+ "VideoEncodeH265QpEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT ) == sizeof( VkVideoEncodeH265FrameSizeEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT>::value,
+ "VideoEncodeH265FrameSizeEXT is not nothrow_move_constructible!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+//=== VK_EXT_video_decode_h264 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoEXT ) == sizeof( VkVideoDecodeH264ProfileInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoEXT>::value,
+ "VideoDecodeH264ProfileInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT ) == sizeof( VkVideoDecodeH264CapabilitiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT>::value,
+ "VideoDecodeH264CapabilitiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoEXT ) ==
+ sizeof( VkVideoDecodeH264SessionParametersCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoEXT>::value,
+ "VideoDecodeH264SessionParametersCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT ) == sizeof( VkVideoDecodeH264SessionParametersAddInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT>::value,
+ "VideoDecodeH264SessionParametersAddInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoEXT ) == sizeof( VkVideoDecodeH264PictureInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoEXT>::value,
+ "VideoDecodeH264PictureInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT ) == sizeof( VkVideoDecodeH264DpbSlotInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT>::value,
+ "VideoDecodeH264DpbSlotInfoEXT is not nothrow_move_constructible!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+//=== VK_AMD_texture_gather_bias_lod ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD>::value,
+ "TextureLODGatherFormatPropertiesAMD is not nothrow_move_constructible!" );
+
+//=== VK_AMD_shader_info ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD>::value,
+ "ShaderResourceUsageAMD is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD>::value,
+ "ShaderStatisticsInfoAMD is not nothrow_move_constructible!" );
+
+//=== VK_KHR_dynamic_rendering ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR ) ==
+ sizeof( VkRenderingFragmentShadingRateAttachmentInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR>::value,
+ "RenderingFragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT ) ==
+ sizeof( VkRenderingFragmentDensityMapAttachmentInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT>::value,
+ "RenderingFragmentDensityMapAttachmentInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD ) == sizeof( VkAttachmentSampleCountInfoAMD ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD>::value,
+ "AttachmentSampleCountInfoAMD is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX ) == sizeof( VkMultiviewPerViewAttributesInfoNVX ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX>::value,
+ "MultiviewPerViewAttributesInfoNVX is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_GGP )
+//=== VK_GGP_stream_descriptor_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP>::value,
+ "StreamDescriptorSurfaceCreateInfoGGP is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_GGP*/
+
+//=== VK_NV_corner_sampled_image ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV>::value,
+ "PhysicalDeviceCornerSampledImageFeaturesNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_external_memory_capabilities ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::value,
+ "ExternalImageFormatPropertiesNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_external_memory ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV>::value,
+ "ExternalMemoryImageCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV>::value,
+ "ExportMemoryAllocateInfoNV is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+//=== VK_NV_external_memory_win32 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV>::value,
+ "ImportMemoryWin32HandleInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV>::value,
+ "ExportMemoryWin32HandleInfoNV is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+//=== VK_NV_win32_keyed_mutex ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV>::value,
+ "Win32KeyedMutexAcquireReleaseInfoNV is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+//=== VK_EXT_validation_flags ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT>::value,
+ "ValidationFlagsEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+//=== VK_NN_vi_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN>::value,
+ "ViSurfaceCreateInfoNN is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+//=== VK_EXT_astc_decode_mode ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT>::value,
+ "ImageViewASTCDecodeModeEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT>::value,
+ "PhysicalDeviceASTCDecodeFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_pipeline_robustness ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT ) ==
+ sizeof( VkPhysicalDevicePipelineRobustnessFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT>::value,
+ "PhysicalDevicePipelineRobustnessFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT ) ==
+ sizeof( VkPhysicalDevicePipelineRobustnessPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT>::value,
+ "PhysicalDevicePipelineRobustnessPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT ) == sizeof( VkPipelineRobustnessCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT>::value,
+ "PipelineRobustnessCreateInfoEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+//=== VK_KHR_external_memory_win32 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR>::value,
+ "ImportMemoryWin32HandleInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR>::value,
+ "ExportMemoryWin32HandleInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::value,
+ "MemoryWin32HandlePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR>::value,
+ "MemoryGetWin32HandleInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+//=== VK_KHR_external_memory_fd ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR>::value,
+ "ImportMemoryFdInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::value,
+ "MemoryFdPropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR>::value,
+ "MemoryGetFdInfoKHR is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+//=== VK_KHR_win32_keyed_mutex ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR>::value,
+ "Win32KeyedMutexAcquireReleaseInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+//=== VK_KHR_external_semaphore_win32 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR>::value,
+ "ImportSemaphoreWin32HandleInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR>::value,
+ "ExportSemaphoreWin32HandleInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR>::value,
+ "D3D12FenceSubmitInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR>::value,
+ "SemaphoreGetWin32HandleInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+//=== VK_KHR_external_semaphore_fd ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR>::value,
+ "ImportSemaphoreFdInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR>::value,
+ "SemaphoreGetFdInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_push_descriptor ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR>::value,
+ "PhysicalDevicePushDescriptorPropertiesKHR is not nothrow_move_constructible!" );
+
+//=== VK_EXT_conditional_rendering ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT>::value,
+ "ConditionalRenderingBeginInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT>::value,
+ "PhysicalDeviceConditionalRenderingFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT ) ==
+ sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT>::value,
+ "CommandBufferInheritanceConditionalRenderingInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_KHR_incremental_present ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentRegionsKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentRegionsKHR>::value,
+ "PresentRegionsKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentRegionKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentRegionKHR>::value,
+ "PresentRegionKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RectLayerKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RectLayerKHR>::value, "RectLayerKHR is not nothrow_move_constructible!" );
+
+//=== VK_NV_clip_space_w_scaling ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ViewportWScalingNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ViewportWScalingNV>::value,
+ "ViewportWScalingNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV>::value,
+ "PipelineViewportWScalingStateCreateInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_display_surface_counter ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::value,
+ "SurfaceCapabilities2EXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_display_control ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT>::value,
+ "DisplayPowerInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT>::value,
+ "DeviceEventInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT>::value,
+ "DisplayEventInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT>::value,
+ "SwapchainCounterCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_GOOGLE_display_timing ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::value,
+ "RefreshCycleDurationGOOGLE is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>::value,
+ "PastPresentationTimingGOOGLE is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE>::value,
+ "PresentTimesInfoGOOGLE is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE>::value,
+ "PresentTimeGOOGLE is not nothrow_move_constructible!" );
+
+//=== VK_NVX_multiview_per_view_attributes ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) ==
+ sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>::value,
+ "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX is not nothrow_move_constructible!" );
+
+//=== VK_NV_viewport_swizzle ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV>::value,
+ "ViewportSwizzleNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV>::value,
+ "PipelineViewportSwizzleStateCreateInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_discard_rectangles ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT ) ==
+ sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT>::value,
+ "PhysicalDeviceDiscardRectanglePropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT>::value,
+ "PipelineDiscardRectangleStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_conservative_rasterization ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT ) ==
+ sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT>::value,
+ "PhysicalDeviceConservativeRasterizationPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT ) ==
+ sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT>::value,
+ "PipelineRasterizationConservativeStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_depth_clip_enable ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT>::value,
+ "PhysicalDeviceDepthClipEnableFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT ) ==
+ sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT>::value,
+ "PipelineRasterizationDepthClipStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_hdr_metadata ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::HdrMetadataEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::HdrMetadataEXT>::value,
+ "HdrMetadataEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::XYColorEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::XYColorEXT>::value, "XYColorEXT is not nothrow_move_constructible!" );
+
+//=== VK_KHR_shared_presentable_image ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR>::value,
+ "SharedPresentSurfaceCapabilitiesKHR is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+//=== VK_KHR_external_fence_win32 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR>::value,
+ "ImportFenceWin32HandleInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR>::value,
+ "ExportFenceWin32HandleInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR>::value,
+ "FenceGetWin32HandleInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+//=== VK_KHR_external_fence_fd ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR>::value,
+ "ImportFenceFdInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR>::value,
+ "FenceGetFdInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_performance_query ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryFeaturesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR>::value,
+ "PhysicalDevicePerformanceQueryFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR ) ==
+ sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR>::value,
+ "PhysicalDevicePerformanceQueryPropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>::value,
+ "PerformanceCounterKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR ) == sizeof( VkPerformanceCounterDescriptionKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>::value,
+ "PerformanceCounterDescriptionKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR>::value,
+ "QueryPoolPerformanceCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) == sizeof( VkPerformanceCounterResultKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR>::value,
+ "PerformanceCounterResultKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR ) == sizeof( VkAcquireProfilingLockInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR>::value,
+ "AcquireProfilingLockInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR ) == sizeof( VkPerformanceQuerySubmitInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR>::value,
+ "PerformanceQuerySubmitInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_get_surface_capabilities2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR>::value,
+ "PhysicalDeviceSurfaceInfo2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::value,
+ "SurfaceCapabilities2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>::value,
+ "SurfaceFormat2KHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_get_display_properties2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>::value,
+ "DisplayProperties2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>::value,
+ "DisplayPlaneProperties2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>::value,
+ "DisplayModeProperties2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR>::value,
+ "DisplayPlaneInfo2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::value,
+ "DisplayPlaneCapabilities2KHR is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+//=== VK_MVK_ios_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK>::value,
+ "IOSSurfaceCreateInfoMVK is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+//=== VK_MVK_macos_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK>::value,
+ "MacOSSurfaceCreateInfoMVK is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+//=== VK_EXT_debug_utils ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT>::value,
+ "DebugUtilsLabelEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT>::value,
+ "DebugUtilsMessengerCallbackDataEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT>::value,
+ "DebugUtilsMessengerCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ),
+ "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::value,
+ "DebugUtilsMessengerEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT>::value,
+ "DebugUtilsObjectNameInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT>::value,
+ "DebugUtilsObjectTagInfoEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+//=== VK_ANDROID_external_memory_android_hardware_buffer ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID>::value,
+ "AndroidHardwareBufferUsageANDROID is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::value,
+ "AndroidHardwareBufferPropertiesANDROID is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID ) ==
+ sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID>::value,
+ "AndroidHardwareBufferFormatPropertiesANDROID is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID>::value,
+ "ImportAndroidHardwareBufferInfoANDROID is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID>::value,
+ "MemoryGetAndroidHardwareBufferInfoANDROID is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID>::value,
+ "ExternalFormatANDROID is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID ) ==
+ sizeof( VkAndroidHardwareBufferFormatProperties2ANDROID ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID>::value,
+ "AndroidHardwareBufferFormatProperties2ANDROID is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+//=== VK_EXT_sample_locations ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SampleLocationEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SampleLocationEXT>::value,
+ "SampleLocationEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT>::value,
+ "SampleLocationsInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT>::value,
+ "AttachmentSampleLocationsEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT>::value,
+ "SubpassSampleLocationsEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT>::value,
+ "RenderPassSampleLocationsBeginInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT>::value,
+ "PipelineSampleLocationsStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT>::value,
+ "PhysicalDeviceSampleLocationsPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT>::value,
+ "MultisamplePropertiesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_blend_operation_advanced ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT>::value,
+ "PhysicalDeviceBlendOperationAdvancedFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) ==
+ sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT>::value,
+ "PhysicalDeviceBlendOperationAdvancedPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT ) ==
+ sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT>::value,
+ "PipelineColorBlendAdvancedStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_NV_fragment_coverage_to_color ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV>::value,
+ "PipelineCoverageToColorStateCreateInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_KHR_acceleration_structure ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) == sizeof( VkDeviceOrHostAddressKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR>::value,
+ "DeviceOrHostAddressKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) == sizeof( VkDeviceOrHostAddressConstKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR>::value,
+ "DeviceOrHostAddressConstKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR ) == sizeof( VkAccelerationStructureBuildRangeInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR>::value,
+ "AccelerationStructureBuildRangeInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AabbPositionsKHR ) == sizeof( VkAabbPositionsKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AabbPositionsKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AabbPositionsKHR>::value,
+ "AabbPositionsKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR ) ==
+ sizeof( VkAccelerationStructureGeometryTrianglesDataKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR>::value,
+ "AccelerationStructureGeometryTrianglesDataKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TransformMatrixKHR ) == sizeof( VkTransformMatrixKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TransformMatrixKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TransformMatrixKHR>::value,
+ "TransformMatrixKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR ) == sizeof( VkAccelerationStructureBuildGeometryInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR>::value,
+ "AccelerationStructureBuildGeometryInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR ) == sizeof( VkAccelerationStructureGeometryAabbsDataKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR>::value,
+ "AccelerationStructureGeometryAabbsDataKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR ) == sizeof( VkAccelerationStructureInstanceKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR>::value,
+ "AccelerationStructureInstanceKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR ) ==
+ sizeof( VkAccelerationStructureGeometryInstancesDataKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR>::value,
+ "AccelerationStructureGeometryInstancesDataKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) == sizeof( VkAccelerationStructureGeometryDataKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR>::value,
+ "AccelerationStructureGeometryDataKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR ) == sizeof( VkAccelerationStructureGeometryKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR>::value,
+ "AccelerationStructureGeometryKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR ) == sizeof( VkAccelerationStructureCreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR>::value,
+ "AccelerationStructureCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR ) == sizeof( VkAccelerationStructureKHR ),
+ "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::value,
+ "AccelerationStructureKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR ) == sizeof( VkWriteDescriptorSetAccelerationStructureKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR>::value,
+ "WriteDescriptorSetAccelerationStructureKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR ) ==
+ sizeof( VkPhysicalDeviceAccelerationStructureFeaturesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR>::value,
+ "PhysicalDeviceAccelerationStructureFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR ) ==
+ sizeof( VkPhysicalDeviceAccelerationStructurePropertiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR>::value,
+ "PhysicalDeviceAccelerationStructurePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR ) == sizeof( VkAccelerationStructureDeviceAddressInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR>::value,
+ "AccelerationStructureDeviceAddressInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR ) == sizeof( VkAccelerationStructureVersionInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR>::value,
+ "AccelerationStructureVersionInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR ) == sizeof( VkCopyAccelerationStructureToMemoryInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR>::value,
+ "CopyAccelerationStructureToMemoryInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR ) == sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR>::value,
+ "CopyMemoryToAccelerationStructureInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR ) == sizeof( VkCopyAccelerationStructureInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR>::value,
+ "CopyAccelerationStructureInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR ) == sizeof( VkAccelerationStructureBuildSizesInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR>::value,
+ "AccelerationStructureBuildSizesInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_NV_framebuffer_mixed_samples ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV ) ==
+ sizeof( VkPipelineCoverageModulationStateCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV>::value,
+ "PipelineCoverageModulationStateCreateInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_shader_sm_builtins ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV>::value,
+ "PhysicalDeviceShaderSMBuiltinsPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV>::value,
+ "PhysicalDeviceShaderSMBuiltinsFeaturesNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_image_drm_format_modifier ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT>::value,
+ "DrmFormatModifierPropertiesListEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT>::value,
+ "DrmFormatModifierPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT ) ==
+ sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT>::value,
+ "PhysicalDeviceImageDrmFormatModifierInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT>::value,
+ "ImageDrmFormatModifierListCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT ) ==
+ sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT>::value,
+ "ImageDrmFormatModifierExplicitCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::value,
+ "ImageDrmFormatModifierPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT ) == sizeof( VkDrmFormatModifierPropertiesList2EXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT>::value,
+ "DrmFormatModifierPropertiesList2EXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT ) == sizeof( VkDrmFormatModifierProperties2EXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT>::value,
+ "DrmFormatModifierProperties2EXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_validation_cache ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::value,
+ "ValidationCacheEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT>::value,
+ "ValidationCacheCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT>::value,
+ "ShaderModuleValidationCacheCreateInfoEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+//=== VK_KHR_portability_subset ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR ) == sizeof( VkPhysicalDevicePortabilitySubsetFeaturesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR>::value,
+ "PhysicalDevicePortabilitySubsetFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR ) ==
+ sizeof( VkPhysicalDevicePortabilitySubsetPropertiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR>::value,
+ "PhysicalDevicePortabilitySubsetPropertiesKHR is not nothrow_move_constructible!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+//=== VK_NV_shading_rate_image ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV>::value,
+ "ShadingRatePaletteNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV ) ==
+ sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV>::value,
+ "PipelineViewportShadingRateImageStateCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV>::value,
+ "PhysicalDeviceShadingRateImageFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV>::value,
+ "PhysicalDeviceShadingRateImagePropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV>::value,
+ "CoarseSampleLocationNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV>::value,
+ "CoarseSampleOrderCustomNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV ) ==
+ sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV>::value,
+ "PipelineViewportCoarseSampleOrderStateCreateInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_ray_tracing ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV>::value,
+ "RayTracingShaderGroupCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV>::value,
+ "RayTracingPipelineCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV>::value,
+ "GeometryTrianglesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeometryAABBNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeometryAABBNV>::value,
+ "GeometryAABBNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryDataNV ) == sizeof( VkGeometryDataNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeometryDataNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeometryDataNV>::value,
+ "GeometryDataNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeometryNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeometryNV>::value, "GeometryNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV>::value,
+ "AccelerationStructureInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV>::value,
+ "AccelerationStructureCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureNV ) == sizeof( VkAccelerationStructureNV ),
+ "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::value,
+ "AccelerationStructureNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV>::value,
+ "BindAccelerationStructureMemoryInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV ) == sizeof( VkWriteDescriptorSetAccelerationStructureNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV>::value,
+ "WriteDescriptorSetAccelerationStructureNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV ) ==
+ sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV>::value,
+ "AccelerationStructureMemoryRequirementsInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV>::value,
+ "PhysicalDeviceRayTracingPropertiesNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_representative_fragment_test ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) ==
+ sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV>::value,
+ "PhysicalDeviceRepresentativeFragmentTestFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV ) ==
+ sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV>::value,
+ "PipelineRepresentativeFragmentTestStateCreateInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_filter_cubic ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT ) == sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT>::value,
+ "PhysicalDeviceImageViewImageFormatInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT ) ==
+ sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT>::value,
+ "FilterCubicImageViewImageFormatPropertiesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_external_memory_host ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT>::value,
+ "ImportMemoryHostPointerInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::value,
+ "MemoryHostPointerPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT ) ==
+ sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT>::value,
+ "PhysicalDeviceExternalMemoryHostPropertiesEXT is not nothrow_move_constructible!" );
+
+//=== VK_KHR_shader_clock ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR>::value,
+ "PhysicalDeviceShaderClockFeaturesKHR is not nothrow_move_constructible!" );
+
+//=== VK_AMD_pipeline_compiler_control ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD>::value,
+ "PipelineCompilerControlCreateInfoAMD is not nothrow_move_constructible!" );
+
+//=== VK_EXT_calibrated_timestamps ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT>::value,
+ "CalibratedTimestampInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_AMD_shader_core_properties ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD>::value,
+ "PhysicalDeviceShaderCorePropertiesAMD is not nothrow_move_constructible!" );
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+//=== VK_EXT_video_decode_h265 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoEXT ) == sizeof( VkVideoDecodeH265ProfileInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoEXT>::value,
+ "VideoDecodeH265ProfileInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesEXT ) == sizeof( VkVideoDecodeH265CapabilitiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesEXT>::value,
+ "VideoDecodeH265CapabilitiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoEXT ) ==
+ sizeof( VkVideoDecodeH265SessionParametersCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoEXT>::value,
+ "VideoDecodeH265SessionParametersCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT ) == sizeof( VkVideoDecodeH265SessionParametersAddInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT>::value,
+ "VideoDecodeH265SessionParametersAddInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoEXT ) == sizeof( VkVideoDecodeH265PictureInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoEXT>::value,
+ "VideoDecodeH265PictureInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoEXT ) == sizeof( VkVideoDecodeH265DpbSlotInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoEXT>::value,
+ "VideoDecodeH265DpbSlotInfoEXT is not nothrow_move_constructible!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+//=== VK_KHR_global_priority ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR>::value,
+ "DeviceQueueGlobalPriorityCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR ) ==
+ sizeof( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR>::value,
+ "PhysicalDeviceGlobalPriorityQueryFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR ) == sizeof( VkQueueFamilyGlobalPriorityPropertiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR>::value,
+ "QueueFamilyGlobalPriorityPropertiesKHR is not nothrow_move_constructible!" );
+
+//=== VK_AMD_memory_overallocation_behavior ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD>::value,
+ "DeviceMemoryOverallocationCreateInfoAMD is not nothrow_move_constructible!" );
+
+//=== VK_EXT_vertex_attribute_divisor ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) ==
+ sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT>::value,
+ "PhysicalDeviceVertexAttributeDivisorPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT ) == sizeof( VkVertexInputBindingDivisorDescriptionEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT>::value,
+ "VertexInputBindingDivisorDescriptionEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT ) ==
+ sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT>::value,
+ "PipelineVertexInputDivisorStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT>::value,
+ "PhysicalDeviceVertexAttributeDivisorFeaturesEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_GGP )
+//=== VK_GGP_frame_token ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP>::value,
+ "PresentFrameTokenGGP is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_GGP*/
+
+//=== VK_NV_compute_shader_derivatives ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV ) ==
+ sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV>::value,
+ "PhysicalDeviceComputeShaderDerivativesFeaturesNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_mesh_shader ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV>::value,
+ "PhysicalDeviceMeshShaderFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV>::value,
+ "PhysicalDeviceMeshShaderPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV>::value,
+ "DrawMeshTasksIndirectCommandNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_shader_image_footprint ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV ) ==
+ sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV>::value,
+ "PhysicalDeviceShaderImageFootprintFeaturesNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_scissor_exclusive ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV ) ==
+ sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV>::value,
+ "PipelineViewportExclusiveScissorStateCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV>::value,
+ "PhysicalDeviceExclusiveScissorFeaturesNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_device_diagnostic_checkpoints ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV>::value,
+ "QueueFamilyCheckpointPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CheckpointDataNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CheckpointDataNV>::value,
+ "CheckpointDataNV is not nothrow_move_constructible!" );
+
+//=== VK_INTEL_shader_integer_functions2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) ==
+ sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>::value,
+ "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL is not nothrow_move_constructible!" );
+
+//=== VK_INTEL_performance_query ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) == sizeof( VkPerformanceValueDataINTEL ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL>::value,
+ "PerformanceValueDataINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::value,
+ "PerformanceValueINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL>::value,
+ "InitializePerformanceApiInfoINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL ) == sizeof( VkQueryPoolPerformanceQueryCreateInfoINTEL ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL>::value,
+ "QueryPoolPerformanceQueryCreateInfoINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL>::value,
+ "PerformanceMarkerInfoINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL>::value,
+ "PerformanceStreamMarkerInfoINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL>::value,
+ "PerformanceOverrideInfoINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL>::value,
+ "PerformanceConfigurationAcquireInfoINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL ) == sizeof( VkPerformanceConfigurationINTEL ),
+ "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::value,
+ "PerformanceConfigurationINTEL is not nothrow_move_constructible!" );
+
+//=== VK_EXT_pci_bus_info ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT>::value,
+ "PhysicalDevicePCIBusInfoPropertiesEXT is not nothrow_move_constructible!" );
+
+//=== VK_AMD_display_native_hdr ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD>::value,
+ "DisplayNativeHdrSurfaceCapabilitiesAMD is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD>::value,
+ "SwapchainDisplayNativeHdrCreateInfoAMD is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+//=== VK_FUCHSIA_imagepipe_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA>::value,
+ "ImagePipeSurfaceCreateInfoFUCHSIA is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+//=== VK_EXT_metal_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT>::value,
+ "MetalSurfaceCreateInfoEXT is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+//=== VK_EXT_fragment_density_map ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT>::value,
+ "PhysicalDeviceFragmentDensityMapFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT ) ==
+ sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT>::value,
+ "PhysicalDeviceFragmentDensityMapPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT ) == sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT>::value,
+ "RenderPassFragmentDensityMapCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_KHR_fragment_shading_rate ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR ) == sizeof( VkFragmentShadingRateAttachmentInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR>::value,
+ "FragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR ) ==
+ sizeof( VkPipelineFragmentShadingRateStateCreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR>::value,
+ "PipelineFragmentShadingRateStateCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR ) ==
+ sizeof( VkPhysicalDeviceFragmentShadingRateFeaturesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR>::value,
+ "PhysicalDeviceFragmentShadingRateFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR ) ==
+ sizeof( VkPhysicalDeviceFragmentShadingRatePropertiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR>::value,
+ "PhysicalDeviceFragmentShadingRatePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR ) == sizeof( VkPhysicalDeviceFragmentShadingRateKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>::value,
+ "PhysicalDeviceFragmentShadingRateKHR is not nothrow_move_constructible!" );
+
+//=== VK_AMD_shader_core_properties2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD>::value,
+ "PhysicalDeviceShaderCoreProperties2AMD is not nothrow_move_constructible!" );
+
+//=== VK_AMD_device_coherent_memory ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD ) == sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD>::value,
+ "PhysicalDeviceCoherentMemoryFeaturesAMD is not nothrow_move_constructible!" );
+
+//=== VK_EXT_shader_image_atomic_int64 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT>::value,
+ "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_memory_budget ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT ) == sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT>::value,
+ "PhysicalDeviceMemoryBudgetPropertiesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_memory_priority ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT ) == sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT>::value,
+ "PhysicalDeviceMemoryPriorityFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT>::value,
+ "MemoryPriorityAllocateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_KHR_surface_protected_capabilities ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR>::value,
+ "SurfaceProtectedCapabilitiesKHR is not nothrow_move_constructible!" );
+
+//=== VK_NV_dedicated_allocation_image_aliasing ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) ==
+ sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>::value,
+ "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_buffer_device_address ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT>::value,
+ "PhysicalDeviceBufferDeviceAddressFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT>::value,
+ "BufferDeviceAddressCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_validation_features ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT>::value,
+ "ValidationFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_KHR_present_wait ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR ) == sizeof( VkPhysicalDevicePresentWaitFeaturesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR>::value,
+ "PhysicalDevicePresentWaitFeaturesKHR is not nothrow_move_constructible!" );
+
+//=== VK_NV_cooperative_matrix ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>::value,
+ "CooperativeMatrixPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV>::value,
+ "PhysicalDeviceCooperativeMatrixFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV ) ==
+ sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV>::value,
+ "PhysicalDeviceCooperativeMatrixPropertiesNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_coverage_reduction_mode ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV ) ==
+ sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV>::value,
+ "PhysicalDeviceCoverageReductionModeFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV ) == sizeof( VkPipelineCoverageReductionStateCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV>::value,
+ "PipelineCoverageReductionStateCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>::value,
+ "FramebufferMixedSamplesCombinationNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_fragment_shader_interlock ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT>::value,
+ "PhysicalDeviceFragmentShaderInterlockFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_ycbcr_image_arrays ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT>::value,
+ "PhysicalDeviceYcbcrImageArraysFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_provoking_vertex ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT ) == sizeof( VkPhysicalDeviceProvokingVertexFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT>::value,
+ "PhysicalDeviceProvokingVertexFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT ) == sizeof( VkPhysicalDeviceProvokingVertexPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT>::value,
+ "PhysicalDeviceProvokingVertexPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT ) ==
+ sizeof( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT>::value,
+ "PipelineRasterizationProvokingVertexStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+//=== VK_EXT_full_screen_exclusive ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT>::value,
+ "SurfaceFullScreenExclusiveInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT ) == sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT>::value,
+ "SurfaceCapabilitiesFullScreenExclusiveEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT>::value,
+ "SurfaceFullScreenExclusiveWin32InfoEXT is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+//=== VK_EXT_headless_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT>::value,
+ "HeadlessSurfaceCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_line_rasterization ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT>::value,
+ "PhysicalDeviceLineRasterizationFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT ) ==
+ sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT>::value,
+ "PhysicalDeviceLineRasterizationPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT ) ==
+ sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT>::value,
+ "PipelineRasterizationLineStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_shader_atomic_float ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT>::value,
+ "PhysicalDeviceShaderAtomicFloatFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_index_type_uint8 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT ) == sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT>::value,
+ "PhysicalDeviceIndexTypeUint8FeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_extended_dynamic_state ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT>::value,
+ "PhysicalDeviceExtendedDynamicStateFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_KHR_deferred_host_operations ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeferredOperationKHR ) == sizeof( VkDeferredOperationKHR ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::value,
+ "DeferredOperationKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_pipeline_executable_properties ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) ==
+ sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>::value,
+ "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineInfoKHR>::value,
+ "PipelineInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>::value,
+ "PipelineExecutablePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR>::value,
+ "PipelineExecutableInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) == sizeof( VkPipelineExecutableStatisticValueKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR>::value,
+ "PipelineExecutableStatisticValueKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>::value,
+ "PipelineExecutableStatisticKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR ) ==
+ sizeof( VkPipelineExecutableInternalRepresentationKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>::value,
+ "PipelineExecutableInternalRepresentationKHR is not nothrow_move_constructible!" );
+
+//=== VK_EXT_shader_atomic_float2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT>::value,
+ "PhysicalDeviceShaderAtomicFloat2FeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_NV_device_generated_commands ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) ==
+ sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV>::value,
+ "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) ==
+ sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV>::value,
+ "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV ) == sizeof( VkGraphicsShaderGroupCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV>::value,
+ "GraphicsShaderGroupCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV ) == sizeof( VkGraphicsPipelineShaderGroupsCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV>::value,
+ "GraphicsPipelineShaderGroupsCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV ) == sizeof( VkBindShaderGroupIndirectCommandNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV>::value,
+ "BindShaderGroupIndirectCommandNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV ) == sizeof( VkBindIndexBufferIndirectCommandNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV>::value,
+ "BindIndexBufferIndirectCommandNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV ) == sizeof( VkBindVertexBufferIndirectCommandNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV>::value,
+ "BindVertexBufferIndirectCommandNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV ) == sizeof( VkSetStateFlagsIndirectCommandNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV>::value,
+ "SetStateFlagsIndirectCommandNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV ) == sizeof( VkIndirectCommandsLayoutNV ),
+ "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::value,
+ "IndirectCommandsLayoutNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV>::value,
+ "IndirectCommandsStreamNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV ) == sizeof( VkIndirectCommandsLayoutTokenNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV>::value,
+ "IndirectCommandsLayoutTokenNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV ) == sizeof( VkIndirectCommandsLayoutCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV>::value,
+ "IndirectCommandsLayoutCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV>::value,
+ "GeneratedCommandsInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV ) == sizeof( VkGeneratedCommandsMemoryRequirementsInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV>::value,
+ "GeneratedCommandsMemoryRequirementsInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_inherited_viewport_scissor ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV ) ==
+ sizeof( VkPhysicalDeviceInheritedViewportScissorFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV>::value,
+ "PhysicalDeviceInheritedViewportScissorFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV ) ==
+ sizeof( VkCommandBufferInheritanceViewportScissorInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV>::value,
+ "CommandBufferInheritanceViewportScissorInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_texel_buffer_alignment ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT>::value,
+ "PhysicalDeviceTexelBufferAlignmentFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_QCOM_render_pass_transform ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM ) == sizeof( VkRenderPassTransformBeginInfoQCOM ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM>::value,
+ "RenderPassTransformBeginInfoQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM ) ==
+ sizeof( VkCommandBufferInheritanceRenderPassTransformInfoQCOM ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM>::value,
+ "CommandBufferInheritanceRenderPassTransformInfoQCOM is not nothrow_move_constructible!" );
+
+//=== VK_EXT_device_memory_report ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT>::value,
+ "PhysicalDeviceDeviceMemoryReportFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT ) == sizeof( VkDeviceDeviceMemoryReportCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT>::value,
+ "DeviceDeviceMemoryReportCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT ) == sizeof( VkDeviceMemoryReportCallbackDataEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT>::value,
+ "DeviceMemoryReportCallbackDataEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_robustness2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT ) == sizeof( VkPhysicalDeviceRobustness2FeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT>::value,
+ "PhysicalDeviceRobustness2FeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT ) == sizeof( VkPhysicalDeviceRobustness2PropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT>::value,
+ "PhysicalDeviceRobustness2PropertiesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_custom_border_color ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT ) == sizeof( VkSamplerCustomBorderColorCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT>::value,
+ "SamplerCustomBorderColorCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT ) ==
+ sizeof( VkPhysicalDeviceCustomBorderColorPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT>::value,
+ "PhysicalDeviceCustomBorderColorPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT ) == sizeof( VkPhysicalDeviceCustomBorderColorFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT>::value,
+ "PhysicalDeviceCustomBorderColorFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_KHR_pipeline_library ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR ) == sizeof( VkPipelineLibraryCreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR>::value,
+ "PipelineLibraryCreateInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_NV_present_barrier ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV ) == sizeof( VkPhysicalDevicePresentBarrierFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV>::value,
+ "PhysicalDevicePresentBarrierFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV ) == sizeof( VkSurfaceCapabilitiesPresentBarrierNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV>::value,
+ "SurfaceCapabilitiesPresentBarrierNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV ) == sizeof( VkSwapchainPresentBarrierCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV>::value,
+ "SwapchainPresentBarrierCreateInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_KHR_present_id ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentIdKHR ) == sizeof( VkPresentIdKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentIdKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentIdKHR>::value, "PresentIdKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR ) == sizeof( VkPhysicalDevicePresentIdFeaturesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR>::value,
+ "PhysicalDevicePresentIdFeaturesKHR is not nothrow_move_constructible!" );
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+//=== VK_KHR_video_encode_queue ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR ) == sizeof( VkVideoEncodeInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR>::value,
+ "VideoEncodeInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR ) == sizeof( VkVideoEncodeCapabilitiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR>::value,
+ "VideoEncodeCapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR ) == sizeof( VkVideoEncodeUsageInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR>::value,
+ "VideoEncodeUsageInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR ) == sizeof( VkVideoEncodeRateControlInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR>::value,
+ "VideoEncodeRateControlInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR ) == sizeof( VkVideoEncodeRateControlLayerInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR>::value,
+ "VideoEncodeRateControlLayerInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+//=== VK_NV_device_diagnostics_config ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV ) == sizeof( VkPhysicalDeviceDiagnosticsConfigFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV>::value,
+ "PhysicalDeviceDiagnosticsConfigFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV ) == sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV>::value,
+ "DeviceDiagnosticsConfigCreateInfoNV is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+//=== VK_EXT_metal_objects ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT ) == sizeof( VkExportMetalObjectCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT>::value,
+ "ExportMetalObjectCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT ) == sizeof( VkExportMetalObjectsInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>::value,
+ "ExportMetalObjectsInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT ) == sizeof( VkExportMetalDeviceInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT>::value,
+ "ExportMetalDeviceInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT ) == sizeof( VkExportMetalCommandQueueInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT>::value,
+ "ExportMetalCommandQueueInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT ) == sizeof( VkExportMetalBufferInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT>::value,
+ "ExportMetalBufferInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT ) == sizeof( VkImportMetalBufferInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT>::value,
+ "ImportMetalBufferInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT ) == sizeof( VkExportMetalTextureInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT>::value,
+ "ExportMetalTextureInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT ) == sizeof( VkImportMetalTextureInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT>::value,
+ "ImportMetalTextureInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT ) == sizeof( VkExportMetalIOSurfaceInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT>::value,
+ "ExportMetalIOSurfaceInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT ) == sizeof( VkImportMetalIOSurfaceInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT>::value,
+ "ImportMetalIOSurfaceInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT ) == sizeof( VkExportMetalSharedEventInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT>::value,
+ "ExportMetalSharedEventInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT ) == sizeof( VkImportMetalSharedEventInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT>::value,
+ "ImportMetalSharedEventInfoEXT is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+//=== VK_KHR_synchronization2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV ) == sizeof( VkQueueFamilyCheckpointProperties2NV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV>::value,
+ "QueueFamilyCheckpointProperties2NV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointData2NV ) == sizeof( VkCheckpointData2NV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CheckpointData2NV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CheckpointData2NV>::value,
+ "CheckpointData2NV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_graphics_pipeline_library ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT>::value,
+ "PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT ) ==
+ sizeof( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT>::value,
+ "PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT ) == sizeof( VkGraphicsPipelineLibraryCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT>::value,
+ "GraphicsPipelineLibraryCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_AMD_shader_early_and_late_fragment_tests ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD ) ==
+ sizeof( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD>::value,
+ "PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD is not nothrow_move_constructible!" );
+
+//=== VK_KHR_fragment_shader_barycentric ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR ) ==
+ sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR>::value,
+ "PhysicalDeviceFragmentShaderBarycentricFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR ) ==
+ sizeof( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR>::value,
+ "PhysicalDeviceFragmentShaderBarycentricPropertiesKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_shader_subgroup_uniform_control_flow ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ) ==
+ sizeof( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>::value,
+ "PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR is not nothrow_move_constructible!" );
+
+//=== VK_NV_fragment_shading_rate_enums ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV ) ==
+ sizeof( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV>::value,
+ "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV ) ==
+ sizeof( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV>::value,
+ "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV ) ==
+ sizeof( VkPipelineFragmentShadingRateEnumStateCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV>::value,
+ "PipelineFragmentShadingRateEnumStateCreateInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_ray_tracing_motion_blur ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV ) ==
+ sizeof( VkAccelerationStructureGeometryMotionTrianglesDataNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV>::value,
+ "AccelerationStructureGeometryMotionTrianglesDataNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV ) == sizeof( VkAccelerationStructureMotionInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV>::value,
+ "AccelerationStructureMotionInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV ) == sizeof( VkAccelerationStructureMotionInstanceNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV>::value,
+ "AccelerationStructureMotionInstanceNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV ) == sizeof( VkAccelerationStructureMotionInstanceDataNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV>::value,
+ "AccelerationStructureMotionInstanceDataNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV ) ==
+ sizeof( VkAccelerationStructureMatrixMotionInstanceNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV>::value,
+ "AccelerationStructureMatrixMotionInstanceNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV ) == sizeof( VkAccelerationStructureSRTMotionInstanceNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV>::value,
+ "AccelerationStructureSRTMotionInstanceNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SRTDataNV ) == sizeof( VkSRTDataNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SRTDataNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SRTDataNV>::value, "SRTDataNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV ) ==
+ sizeof( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV>::value,
+ "PhysicalDeviceRayTracingMotionBlurFeaturesNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_mesh_shader ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT>::value,
+ "PhysicalDeviceMeshShaderFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT>::value,
+ "PhysicalDeviceMeshShaderPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT ) == sizeof( VkDrawMeshTasksIndirectCommandEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT>::value,
+ "DrawMeshTasksIndirectCommandEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_ycbcr_2plane_444_formats ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>::value,
+ "PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_fragment_density_map2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT>::value,
+ "PhysicalDeviceFragmentDensityMap2FeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT ) ==
+ sizeof( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT>::value,
+ "PhysicalDeviceFragmentDensityMap2PropertiesEXT is not nothrow_move_constructible!" );
+
+//=== VK_QCOM_rotated_copy_commands ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM ) == sizeof( VkCopyCommandTransformInfoQCOM ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM>::value,
+ "CopyCommandTransformInfoQCOM is not nothrow_move_constructible!" );
+
+//=== VK_KHR_workgroup_memory_explicit_layout ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ) ==
+ sizeof( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>::value,
+ "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR is not nothrow_move_constructible!" );
+
+//=== VK_EXT_image_compression_control ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceImageCompressionControlFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT>::value,
+ "PhysicalDeviceImageCompressionControlFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT ) == sizeof( VkImageCompressionControlEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT>::value,
+ "ImageCompressionControlEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT ) == sizeof( VkSubresourceLayout2EXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>::value,
+ "SubresourceLayout2EXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource2EXT ) == sizeof( VkImageSubresource2EXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresource2EXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresource2EXT>::value,
+ "ImageSubresource2EXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT ) == sizeof( VkImageCompressionPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT>::value,
+ "ImageCompressionPropertiesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_attachment_feedback_loop_layout ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT>::value,
+ "PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_4444_formats ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT ) == sizeof( VkPhysicalDevice4444FormatsFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT>::value,
+ "PhysicalDevice4444FormatsFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_device_fault ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT ) == sizeof( VkPhysicalDeviceFaultFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT>::value,
+ "PhysicalDeviceFaultFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT ) == sizeof( VkDeviceFaultCountsEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT>::value,
+ "DeviceFaultCountsEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT ) == sizeof( VkDeviceFaultInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>::value,
+ "DeviceFaultInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT ) == sizeof( VkDeviceFaultAddressInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT>::value,
+ "DeviceFaultAddressInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT ) == sizeof( VkDeviceFaultVendorInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT>::value,
+ "DeviceFaultVendorInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT ) == sizeof( VkDeviceFaultVendorBinaryHeaderVersionOneEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT>::value,
+ "DeviceFaultVendorBinaryHeaderVersionOneEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_rgba10x6_formats ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT ) == sizeof( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT>::value,
+ "PhysicalDeviceRGBA10X6FormatsFeaturesEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+//=== VK_EXT_directfb_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT ) == sizeof( VkDirectFBSurfaceCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT>::value,
+ "DirectFBSurfaceCreateInfoEXT is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+//=== VK_KHR_ray_tracing_pipeline ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR ) == sizeof( VkRayTracingShaderGroupCreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR>::value,
+ "RayTracingShaderGroupCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR ) == sizeof( VkRayTracingPipelineCreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR>::value,
+ "RayTracingPipelineCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR ) ==
+ sizeof( VkPhysicalDeviceRayTracingPipelineFeaturesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR>::value,
+ "PhysicalDeviceRayTracingPipelineFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR ) ==
+ sizeof( VkPhysicalDeviceRayTracingPipelinePropertiesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR>::value,
+ "PhysicalDeviceRayTracingPipelinePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR ) == sizeof( VkStridedDeviceAddressRegionKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR>::value,
+ "StridedDeviceAddressRegionKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR ) == sizeof( VkTraceRaysIndirectCommandKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR>::value,
+ "TraceRaysIndirectCommandKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR ) == sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR>::value,
+ "RayTracingPipelineInterfaceCreateInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_ray_query ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR ) == sizeof( VkPhysicalDeviceRayQueryFeaturesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR>::value,
+ "PhysicalDeviceRayQueryFeaturesKHR is not nothrow_move_constructible!" );
+
+//=== VK_EXT_vertex_input_dynamic_state ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT>::value,
+ "PhysicalDeviceVertexInputDynamicStateFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT ) == sizeof( VkVertexInputBindingDescription2EXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT>::value,
+ "VertexInputBindingDescription2EXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT ) == sizeof( VkVertexInputAttributeDescription2EXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT>::value,
+ "VertexInputAttributeDescription2EXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_physical_device_drm ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT ) == sizeof( VkPhysicalDeviceDrmPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT>::value,
+ "PhysicalDeviceDrmPropertiesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_device_address_binding_report ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceAddressBindingReportFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT>::value,
+ "PhysicalDeviceAddressBindingReportFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT ) == sizeof( VkDeviceAddressBindingCallbackDataEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT>::value,
+ "DeviceAddressBindingCallbackDataEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_depth_clip_control ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipControlFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT>::value,
+ "PhysicalDeviceDepthClipControlFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT ) ==
+ sizeof( VkPipelineViewportDepthClipControlCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT>::value,
+ "PipelineViewportDepthClipControlCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_primitive_topology_list_restart ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT ) ==
+ sizeof( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT>::value,
+ "PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+//=== VK_FUCHSIA_external_memory ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA ) == sizeof( VkImportMemoryZirconHandleInfoFUCHSIA ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA>::value,
+ "ImportMemoryZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA ) == sizeof( VkMemoryZirconHandlePropertiesFUCHSIA ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::value,
+ "MemoryZirconHandlePropertiesFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA ) == sizeof( VkMemoryGetZirconHandleInfoFUCHSIA ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA>::value,
+ "MemoryGetZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+//=== VK_FUCHSIA_external_semaphore ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA ) == sizeof( VkImportSemaphoreZirconHandleInfoFUCHSIA ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA>::value,
+ "ImportSemaphoreZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA ) == sizeof( VkSemaphoreGetZirconHandleInfoFUCHSIA ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA>::value,
+ "SemaphoreGetZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+//=== VK_FUCHSIA_buffer_collection ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA ) == sizeof( VkBufferCollectionFUCHSIA ),
+ "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA>::value,
+ "BufferCollectionFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA ) == sizeof( VkBufferCollectionCreateInfoFUCHSIA ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA>::value,
+ "BufferCollectionCreateInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA ) == sizeof( VkImportMemoryBufferCollectionFUCHSIA ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA>::value,
+ "ImportMemoryBufferCollectionFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA ) == sizeof( VkBufferCollectionImageCreateInfoFUCHSIA ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA>::value,
+ "BufferCollectionImageCreateInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA ) == sizeof( VkBufferConstraintsInfoFUCHSIA ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA>::value,
+ "BufferConstraintsInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA ) == sizeof( VkBufferCollectionBufferCreateInfoFUCHSIA ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA>::value,
+ "BufferCollectionBufferCreateInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA ) == sizeof( VkBufferCollectionPropertiesFUCHSIA ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::value,
+ "BufferCollectionPropertiesFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA ) == sizeof( VkSysmemColorSpaceFUCHSIA ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA>::value,
+ "SysmemColorSpaceFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA ) == sizeof( VkImageConstraintsInfoFUCHSIA ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA>::value,
+ "ImageConstraintsInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA ) == sizeof( VkImageFormatConstraintsInfoFUCHSIA ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA>::value,
+ "ImageFormatConstraintsInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA ) == sizeof( VkBufferCollectionConstraintsInfoFUCHSIA ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA>::value,
+ "BufferCollectionConstraintsInfoFUCHSIA is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+//=== VK_HUAWEI_subpass_shading ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI ) == sizeof( VkSubpassShadingPipelineCreateInfoHUAWEI ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI>::value,
+ "SubpassShadingPipelineCreateInfoHUAWEI is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI ) == sizeof( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI>::value,
+ "PhysicalDeviceSubpassShadingFeaturesHUAWEI is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI ) ==
+ sizeof( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI>::value,
+ "PhysicalDeviceSubpassShadingPropertiesHUAWEI is not nothrow_move_constructible!" );
+
+//=== VK_HUAWEI_invocation_mask ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI ) == sizeof( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI>::value,
+ "PhysicalDeviceInvocationMaskFeaturesHUAWEI is not nothrow_move_constructible!" );
+
+//=== VK_NV_external_memory_rdma ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV ) == sizeof( VkMemoryGetRemoteAddressInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV>::value,
+ "MemoryGetRemoteAddressInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV ) == sizeof( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV>::value,
+ "PhysicalDeviceExternalMemoryRDMAFeaturesNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_pipeline_properties ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT ) == sizeof( VkPipelinePropertiesIdentifierEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT>::value,
+ "PipelinePropertiesIdentifierEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT ) ==
+ sizeof( VkPhysicalDevicePipelinePropertiesFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT>::value,
+ "PhysicalDevicePipelinePropertiesFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_multisampled_render_to_single_sampled ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT>::value,
+ "PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT ) == sizeof( VkSubpassResolvePerformanceQueryEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT>::value,
+ "SubpassResolvePerformanceQueryEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT ) == sizeof( VkMultisampledRenderToSingleSampledInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT>::value,
+ "MultisampledRenderToSingleSampledInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_extended_dynamic_state2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT>::value,
+ "PhysicalDeviceExtendedDynamicState2FeaturesEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+//=== VK_QNX_screen_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX ) == sizeof( VkScreenSurfaceCreateInfoQNX ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX>::value,
+ "ScreenSurfaceCreateInfoQNX is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+//=== VK_EXT_color_write_enable ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceColorWriteEnableFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT>::value,
+ "PhysicalDeviceColorWriteEnableFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT ) == sizeof( VkPipelineColorWriteCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT>::value,
+ "PipelineColorWriteCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_primitives_generated_query ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT ) ==
+ sizeof( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT>::value,
+ "PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_KHR_ray_tracing_maintenance1 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR ) ==
+ sizeof( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR>::value,
+ "PhysicalDeviceRayTracingMaintenance1FeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR ) == sizeof( VkTraceRaysIndirectCommand2KHR ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR>::value,
+ "TraceRaysIndirectCommand2KHR is not nothrow_move_constructible!" );
+
+//=== VK_EXT_image_view_min_lod ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT ) == sizeof( VkPhysicalDeviceImageViewMinLodFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT>::value,
+ "PhysicalDeviceImageViewMinLodFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT ) == sizeof( VkImageViewMinLodCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT>::value,
+ "ImageViewMinLodCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_multi_draw ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT ) == sizeof( VkPhysicalDeviceMultiDrawFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT>::value,
+ "PhysicalDeviceMultiDrawFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT ) == sizeof( VkPhysicalDeviceMultiDrawPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT>::value,
+ "PhysicalDeviceMultiDrawPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT ) == sizeof( VkMultiDrawInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT>::value,
+ "MultiDrawInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT ) == sizeof( VkMultiDrawIndexedInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT>::value,
+ "MultiDrawIndexedInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_image_2d_view_of_3d ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT ) == sizeof( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT>::value,
+ "PhysicalDeviceImage2DViewOf3DFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_opacity_micromap ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT ) == sizeof( VkMicromapBuildInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT>::value,
+ "MicromapBuildInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapUsageEXT ) == sizeof( VkMicromapUsageEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapUsageEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapUsageEXT>::value,
+ "MicromapUsageEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT ) == sizeof( VkMicromapCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT>::value,
+ "MicromapCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapEXT ) == sizeof( VkMicromapEXT ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapEXT>::value, "MicromapEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT ) == sizeof( VkPhysicalDeviceOpacityMicromapFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT>::value,
+ "PhysicalDeviceOpacityMicromapFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT ) == sizeof( VkPhysicalDeviceOpacityMicromapPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT>::value,
+ "PhysicalDeviceOpacityMicromapPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT ) == sizeof( VkMicromapVersionInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT>::value,
+ "MicromapVersionInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT ) == sizeof( VkCopyMicromapToMemoryInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT>::value,
+ "CopyMicromapToMemoryInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT ) == sizeof( VkCopyMemoryToMicromapInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT>::value,
+ "CopyMemoryToMicromapInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT ) == sizeof( VkCopyMicromapInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT>::value,
+ "CopyMicromapInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT ) == sizeof( VkMicromapBuildSizesInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT>::value,
+ "MicromapBuildSizesInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT ) ==
+ sizeof( VkAccelerationStructureTrianglesOpacityMicromapEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT>::value,
+ "AccelerationStructureTrianglesOpacityMicromapEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapTriangleEXT ) == sizeof( VkMicromapTriangleEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapTriangleEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapTriangleEXT>::value,
+ "MicromapTriangleEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_border_color_swizzle ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT>::value,
+ "PhysicalDeviceBorderColorSwizzleFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT ) ==
+ sizeof( VkSamplerBorderColorComponentMappingCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT>::value,
+ "SamplerBorderColorComponentMappingCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_pageable_device_local_memory ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT ) ==
+ sizeof( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT>::value,
+ "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_VALVE_descriptor_set_host_mapping ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE ) ==
+ sizeof( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE>::value,
+ "PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE ) == sizeof( VkDescriptorSetBindingReferenceVALVE ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE>::value,
+ "DescriptorSetBindingReferenceVALVE is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE ) == sizeof( VkDescriptorSetLayoutHostMappingInfoVALVE ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE>::value,
+ "DescriptorSetLayoutHostMappingInfoVALVE is not nothrow_move_constructible!" );
+
+//=== VK_EXT_depth_clamp_zero_one ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClampZeroOneFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT>::value,
+ "PhysicalDeviceDepthClampZeroOneFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_non_seamless_cube_map ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT>::value,
+ "PhysicalDeviceNonSeamlessCubeMapFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_QCOM_fragment_density_map_offset ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM ) ==
+ sizeof( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM>::value,
+ "PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM ) ==
+ sizeof( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM>::value,
+ "PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM ) == sizeof( VkSubpassFragmentDensityMapOffsetEndInfoQCOM ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM>::value,
+ "SubpassFragmentDensityMapOffsetEndInfoQCOM is not nothrow_move_constructible!" );
+
+//=== VK_NV_linear_color_attachment ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV ) ==
+ sizeof( VkPhysicalDeviceLinearColorAttachmentFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV>::value,
+ "PhysicalDeviceLinearColorAttachmentFeaturesNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_image_compression_control_swapchain ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT>::value,
+ "PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_QCOM_image_processing ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM ) == sizeof( VkImageViewSampleWeightCreateInfoQCOM ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM>::value,
+ "ImageViewSampleWeightCreateInfoQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM ) == sizeof( VkPhysicalDeviceImageProcessingFeaturesQCOM ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM>::value,
+ "PhysicalDeviceImageProcessingFeaturesQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM ) ==
+ sizeof( VkPhysicalDeviceImageProcessingPropertiesQCOM ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM>::value,
+ "PhysicalDeviceImageProcessingPropertiesQCOM is not nothrow_move_constructible!" );
+
+//=== VK_EXT_extended_dynamic_state3 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT>::value,
+ "PhysicalDeviceExtendedDynamicState3FeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT ) ==
+ sizeof( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT>::value,
+ "PhysicalDeviceExtendedDynamicState3PropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT ) == sizeof( VkColorBlendEquationEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT>::value,
+ "ColorBlendEquationEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT ) == sizeof( VkColorBlendAdvancedEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT>::value,
+ "ColorBlendAdvancedEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_subpass_merge_feedback ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT>::value,
+ "PhysicalDeviceSubpassMergeFeedbackFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT ) == sizeof( VkRenderPassCreationControlEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT>::value,
+ "RenderPassCreationControlEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT ) == sizeof( VkRenderPassCreationFeedbackInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT>::value,
+ "RenderPassCreationFeedbackInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT ) == sizeof( VkRenderPassCreationFeedbackCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT>::value,
+ "RenderPassCreationFeedbackCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT ) == sizeof( VkRenderPassSubpassFeedbackInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT>::value,
+ "RenderPassSubpassFeedbackInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT ) == sizeof( VkRenderPassSubpassFeedbackCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT>::value,
+ "RenderPassSubpassFeedbackCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_shader_module_identifier ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT>::value,
+ "PhysicalDeviceShaderModuleIdentifierFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT ) ==
+ sizeof( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT>::value,
+ "PhysicalDeviceShaderModuleIdentifierPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT ) ==
+ sizeof( VkPipelineShaderStageModuleIdentifierCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT>::value,
+ "PipelineShaderStageModuleIdentifierCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT ) == sizeof( VkShaderModuleIdentifierEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT>::value,
+ "ShaderModuleIdentifierEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_rasterization_order_attachment_access ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT>::value,
+ "PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_NV_optical_flow ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV ) == sizeof( VkPhysicalDeviceOpticalFlowFeaturesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV>::value,
+ "PhysicalDeviceOpticalFlowFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV ) == sizeof( VkPhysicalDeviceOpticalFlowPropertiesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV>::value,
+ "PhysicalDeviceOpticalFlowPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV ) == sizeof( VkOpticalFlowImageFormatInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV>::value,
+ "OpticalFlowImageFormatInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV ) == sizeof( VkOpticalFlowImageFormatPropertiesNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>::value,
+ "OpticalFlowImageFormatPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV ) == sizeof( VkOpticalFlowSessionNV ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>::value,
+ "OpticalFlowSessionNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV ) == sizeof( VkOpticalFlowSessionCreateInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV>::value,
+ "OpticalFlowSessionCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV ) == sizeof( VkOpticalFlowSessionCreatePrivateDataInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV>::value,
+ "OpticalFlowSessionCreatePrivateDataInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV ) == sizeof( VkOpticalFlowExecuteInfoNV ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV>::value,
+ "OpticalFlowExecuteInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_legacy_dithering ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT ) == sizeof( VkPhysicalDeviceLegacyDitheringFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT>::value,
+ "PhysicalDeviceLegacyDitheringFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_pipeline_protected_access ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT ) ==
+ sizeof( VkPhysicalDevicePipelineProtectedAccessFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT>::value,
+ "PhysicalDevicePipelineProtectedAccessFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_QCOM_tile_properties ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM ) == sizeof( VkPhysicalDeviceTilePropertiesFeaturesQCOM ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM>::value,
+ "PhysicalDeviceTilePropertiesFeaturesQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TilePropertiesQCOM ) == sizeof( VkTilePropertiesQCOM ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM>::value,
+ "TilePropertiesQCOM is not nothrow_move_constructible!" );
+
+//=== VK_SEC_amigo_profiling ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC ) == sizeof( VkPhysicalDeviceAmigoProfilingFeaturesSEC ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC>::value,
+ "PhysicalDeviceAmigoProfilingFeaturesSEC is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC ) == sizeof( VkAmigoProfilingSubmitInfoSEC ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC>::value,
+ "AmigoProfilingSubmitInfoSEC is not nothrow_move_constructible!" );
+
+//=== VK_EXT_mutable_descriptor_type ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT ) ==
+ sizeof( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT>::value,
+ "PhysicalDeviceMutableDescriptorTypeFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT ) == sizeof( VkMutableDescriptorTypeListEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT>::value,
+ "MutableDescriptorTypeListEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT ) == sizeof( VkMutableDescriptorTypeCreateInfoEXT ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT>::value,
+ "MutableDescriptorTypeCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_ARM_shader_core_builtins ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM ) ==
+ sizeof( VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM>::value,
+ "PhysicalDeviceShaderCoreBuiltinsFeaturesARM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM ) ==
+ sizeof( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM ),
+ "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM>::value,
+ "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM>::value,
+ "PhysicalDeviceShaderCoreBuiltinsPropertiesARM is not nothrow_move_constructible!" );
+
+#endif
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_structs.hpp b/thirdparty/vulkan/include/vulkan/vulkan_structs.hpp
index 9991330a3e..bdd7240569 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_structs.hpp
+++ b/thirdparty/vulkan/include/vulkan/vulkan_structs.hpp
@@ -8,6 +8,8 @@
#ifndef VULKAN_STRUCTS_HPP
#define VULKAN_STRUCTS_HPP
+#include <cstring> // strcmp
+
namespace VULKAN_HPP_NAMESPACE
{
//===============
@@ -19,25 +21,20 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAabbPositionsKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR AabbPositionsKHR( float minX_ = {},
- float minY_ = {},
- float minZ_ = {},
- float maxX_ = {},
- float maxY_ = {},
- float maxZ_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR
+ AabbPositionsKHR( float minX_ = {}, float minY_ = {}, float minZ_ = {}, float maxX_ = {}, float maxY_ = {}, float maxZ_ = {} ) VULKAN_HPP_NOEXCEPT
: minX( minX_ )
, minY( minY_ )
, minZ( minZ_ )
, maxX( maxX_ )
, maxY( maxY_ )
, maxZ( maxZ_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR AabbPositionsKHR( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : AabbPositionsKHR( *reinterpret_cast<AabbPositionsKHR const *>( &rhs ) )
- {}
+ AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AabbPositionsKHR( *reinterpret_cast<AabbPositionsKHR const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
AabbPositionsKHR & operator=( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -86,17 +83,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAabbPositionsKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAabbPositionsKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAabbPositionsKHR *>( this );
}
- explicit operator VkAabbPositionsKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkAabbPositionsKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAabbPositionsKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -113,11 +110,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( minX == rhs.minX ) && ( minY == rhs.minY ) && ( minZ == rhs.minZ ) && ( maxX == rhs.maxX ) &&
- ( maxY == rhs.maxY ) && ( maxZ == rhs.maxZ );
+ return ( minX == rhs.minX ) && ( minY == rhs.minY ) && ( minZ == rhs.minZ ) && ( maxX == rhs.maxX ) && ( maxY == rhs.maxY ) && ( maxZ == rhs.maxZ );
# endif
}
@@ -135,12 +131,6 @@ namespace VULKAN_HPP_NAMESPACE
float maxY = {};
float maxZ = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AabbPositionsKHR ) == sizeof( VkAabbPositionsKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AabbPositionsKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AabbPositionsKHR>::value,
- "AabbPositionsKHR is not nothrow_move_constructible!" );
using AabbPositionsNV = AabbPositionsKHR;
union DeviceOrHostAddressConstKHR
@@ -148,23 +138,19 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDeviceOrHostAddressConstKHR;
#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} )
- : deviceAddress( deviceAddress_ )
- {}
+ VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {}
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( const void * hostAddress_ ) : hostAddress( hostAddress_ ) {}
#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR &
- setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
deviceAddress = deviceAddress_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR &
- setHostAddress( const void * hostAddress_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR & setHostAddress( const void * hostAddress_ ) VULKAN_HPP_NOEXCEPT
{
hostAddress = hostAddress_;
return *this;
@@ -195,64 +181,61 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAccelerationStructureGeometryTrianglesDataKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eAccelerationStructureGeometryTrianglesDataKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR(
- VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {},
- uint32_t maxVertex_ = {},
- VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16,
- VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {},
- VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {} ) VULKAN_HPP_NOEXCEPT
- : vertexFormat( vertexFormat_ )
+ VULKAN_HPP_CONSTEXPR_14
+ AccelerationStructureGeometryTrianglesDataKHR( VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {},
+ uint32_t maxVertex_ = {},
+ VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16,
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , vertexFormat( vertexFormat_ )
, vertexData( vertexData_ )
, vertexStride( vertexStride_ )
, maxVertex( maxVertex_ )
, indexType( indexType_ )
, indexData( indexData_ )
, transformData( transformData_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR(
- AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14
+ AccelerationStructureGeometryTrianglesDataKHR( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : AccelerationStructureGeometryTrianglesDataKHR(
- *reinterpret_cast<AccelerationStructureGeometryTrianglesDataKHR const *>( &rhs ) )
- {}
+ AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : AccelerationStructureGeometryTrianglesDataKHR( *reinterpret_cast<AccelerationStructureGeometryTrianglesDataKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AccelerationStructureGeometryTrianglesDataKHR &
- operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AccelerationStructureGeometryTrianglesDataKHR & operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureGeometryTrianglesDataKHR &
- operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryTrianglesDataKHR & operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR &
- setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
{
vertexFormat = vertexFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR &
- setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT
+ setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT
{
vertexData = vertexData_;
return *this;
@@ -265,46 +248,44 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR &
- setMaxVertex( uint32_t maxVertex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setMaxVertex( uint32_t maxVertex_ ) VULKAN_HPP_NOEXCEPT
{
maxVertex = maxVertex_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR &
- setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
{
indexType = indexType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR &
- setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT
+ setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT
{
indexData = indexData_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR &
- setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT
+ setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT
{
transformData = transformData_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAccelerationStructureGeometryTrianglesDataKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureGeometryTrianglesDataKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureGeometryTrianglesDataKHR *>( this );
}
- explicit operator VkAccelerationStructureGeometryTrianglesDataKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureGeometryTrianglesDataKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureGeometryTrianglesDataKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -320,15 +301,14 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, vertexFormat, vertexData, vertexStride, maxVertex, indexType, indexData, transformData );
+ return std::tie( sType, pNext, vertexFormat, vertexData, vertexStride, maxVertex, indexType, indexData, transformData );
}
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {};
VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {};
uint32_t maxVertex = {};
@@ -336,15 +316,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR ) ==
- sizeof( VkAccelerationStructureGeometryTrianglesDataKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR>::value,
- "AccelerationStructureGeometryTrianglesDataKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureGeometryTrianglesDataKHR>
@@ -357,71 +328,66 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAccelerationStructureGeometryAabbsDataKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eAccelerationStructureGeometryAabbsDataKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryAabbsDataKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14
- AccelerationStructureGeometryAabbsDataKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {} ) VULKAN_HPP_NOEXCEPT
- : data( data_ )
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , data( data_ )
, stride( stride_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR(
- AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : AccelerationStructureGeometryAabbsDataKHR(
- *reinterpret_cast<AccelerationStructureGeometryAabbsDataKHR const *>( &rhs ) )
- {}
+ AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : AccelerationStructureGeometryAabbsDataKHR( *reinterpret_cast<AccelerationStructureGeometryAabbsDataKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AccelerationStructureGeometryAabbsDataKHR &
- operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AccelerationStructureGeometryAabbsDataKHR & operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureGeometryAabbsDataKHR &
- operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryAabbsDataKHR & operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR &
- setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
+ setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
{
data = data_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR &
- setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
{
stride = stride_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAccelerationStructureGeometryAabbsDataKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureGeometryAabbsDataKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureGeometryAabbsDataKHR *>( this );
}
- explicit operator VkAccelerationStructureGeometryAabbsDataKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureGeometryAabbsDataKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureGeometryAabbsDataKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -437,20 +403,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
VULKAN_HPP_NAMESPACE::DeviceSize stride = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR ) ==
- sizeof( VkAccelerationStructureGeometryAabbsDataKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR>::value,
- "AccelerationStructureGeometryAabbsDataKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureGeometryAabbsDataKHR>
@@ -463,40 +420,37 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAccelerationStructureGeometryInstancesDataKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eAccelerationStructureGeometryInstancesDataKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryInstancesDataKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR(
- VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {},
- VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {} ) VULKAN_HPP_NOEXCEPT
- : arrayOfPointers( arrayOfPointers_ )
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , arrayOfPointers( arrayOfPointers_ )
, data( data_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR(
- AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14
+ AccelerationStructureGeometryInstancesDataKHR( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : AccelerationStructureGeometryInstancesDataKHR(
- *reinterpret_cast<AccelerationStructureGeometryInstancesDataKHR const *>( &rhs ) )
- {}
+ AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : AccelerationStructureGeometryInstancesDataKHR( *reinterpret_cast<AccelerationStructureGeometryInstancesDataKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AccelerationStructureGeometryInstancesDataKHR &
- operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AccelerationStructureGeometryInstancesDataKHR & operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureGeometryInstancesDataKHR &
- operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryInstancesDataKHR & operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
@@ -510,24 +464,24 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR &
- setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
+ setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
{
data = data_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAccelerationStructureGeometryInstancesDataKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureGeometryInstancesDataKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureGeometryInstancesDataKHR *>( this );
}
- explicit operator VkAccelerationStructureGeometryInstancesDataKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureGeometryInstancesDataKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureGeometryInstancesDataKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -543,20 +497,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryInstancesDataKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers = {};
- VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryInstancesDataKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR ) ==
- sizeof( VkAccelerationStructureGeometryInstancesDataKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR>::value,
- "AccelerationStructureGeometryInstancesDataKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureGeometryInstancesDataKHR>
@@ -569,39 +514,36 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAccelerationStructureGeometryDataKHR;
#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR(
- VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} )
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} )
: triangles( triangles_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs_ )
- : aabbs( aabbs_ )
- {}
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs_ ) : aabbs( aabbs_ ) {}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR(
- VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_ )
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_ )
: instances( instances_ )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & setTriangles(
- VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR &
+ setTriangles( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT
{
triangles = triangles_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR &
- setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT
+ setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT
{
aabbs = aabbs_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & setInstances(
- VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR &
+ setInstances( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT
{
instances = instances_;
return *this;
@@ -634,28 +576,30 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAccelerationStructureGeometryKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR(
- VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles,
- VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {},
- VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : geometryType( geometryType_ )
+ VULKAN_HPP_CONSTEXPR_14
+ AccelerationStructureGeometryKHR( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {},
+ VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , geometryType( geometryType_ )
, geometry( geometry_ )
, flags( flags_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- AccelerationStructureGeometryKHR( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureGeometryKHR( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureGeometryKHR( *reinterpret_cast<AccelerationStructureGeometryKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AccelerationStructureGeometryKHR &
- operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AccelerationStructureGeometryKHR & operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureGeometryKHR & operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -670,39 +614,37 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR &
- setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
{
geometryType = geometryType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR &
- setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT
+ setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT
{
geometry = geometry_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR &
- setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAccelerationStructureGeometryKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureGeometryKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureGeometryKHR *>( this );
}
- explicit operator VkAccelerationStructureGeometryKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureGeometryKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureGeometryKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -719,20 +661,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles;
- VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry = {};
- VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles;
+ VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry = {};
+ VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR ) ==
- sizeof( VkAccelerationStructureGeometryKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR>::value,
- "AccelerationStructureGeometryKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureGeometryKHR>
@@ -745,16 +679,13 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDeviceOrHostAddressKHR;
#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} )
- : deviceAddress( deviceAddress_ )
- {}
+ VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {}
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( void * hostAddress_ ) : hostAddress( hostAddress_ ) {}
#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR &
- setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
deviceAddress = deviceAddress_;
return *this;
@@ -791,23 +722,22 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAccelerationStructureBuildGeometryInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eAccelerationStructureBuildGeometryInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildGeometryInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR(
- VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ =
- VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel,
- VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {},
- VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ =
- VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel,
+ VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {},
+ VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild,
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ = {},
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ = {},
uint32_t geometryCount_ = {},
const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ = {},
const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ = {},
- VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {} ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , type( type_ )
, flags( flags_ )
, mode( mode_ )
, srcAccelerationStructure( srcAccelerationStructure_ )
@@ -816,30 +746,29 @@ namespace VULKAN_HPP_NAMESPACE
, pGeometries( pGeometries_ )
, ppGeometries( ppGeometries_ )
, scratchData( scratchData_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR(
- AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : AccelerationStructureBuildGeometryInfoKHR(
- *reinterpret_cast<AccelerationStructureBuildGeometryInfoKHR const *>( &rhs ) )
- {}
+ AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : AccelerationStructureBuildGeometryInfoKHR( *reinterpret_cast<AccelerationStructureBuildGeometryInfoKHR const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
AccelerationStructureBuildGeometryInfoKHR(
- VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_,
- VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_,
- VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_,
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_,
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR> const & geometries_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const> const & pGeometries_ = {},
- VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {} )
- : type( type_ )
+ VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_,
+ VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_,
+ VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR> const & geometries_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const> const & pGeometries_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , type( type_ )
, flags( flags_ )
, mode( mode_ )
, srcAccelerationStructure( srcAccelerationStructure_ )
@@ -863,33 +792,29 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AccelerationStructureBuildGeometryInfoKHR &
- operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AccelerationStructureBuildGeometryInfoKHR & operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureBuildGeometryInfoKHR &
- operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildGeometryInfoKHR & operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR &
- setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -902,29 +827,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setSrcAccelerationStructure(
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR &
+ setSrcAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
srcAccelerationStructure = srcAccelerationStructure_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setDstAccelerationStructure(
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR &
+ setDstAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
dstAccelerationStructure = dstAccelerationStructure_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR &
- setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
{
geometryCount = geometryCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR &
- setPGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ ) VULKAN_HPP_NOEXCEPT
+ setPGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ ) VULKAN_HPP_NOEXCEPT
{
pGeometries = pGeometries_;
return *this;
@@ -932,8 +856,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
AccelerationStructureBuildGeometryInfoKHR & setGeometries(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR> const & geometries_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR> const & geometries_ ) VULKAN_HPP_NOEXCEPT
{
geometryCount = static_cast<uint32_t>( geometries_.size() );
pGeometries = geometries_.data();
@@ -941,17 +864,17 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPpGeometries(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR &
+ setPpGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ ) VULKAN_HPP_NOEXCEPT
{
ppGeometries = ppGeometries_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- AccelerationStructureBuildGeometryInfoKHR & setPGeometries(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const> const & pGeometries_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildGeometryInfoKHR &
+ setPGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const> const & pGeometries_ )
+ VULKAN_HPP_NOEXCEPT
{
geometryCount = static_cast<uint32_t>( pGeometries_.size() );
ppGeometries = pGeometries_.data();
@@ -960,24 +883,24 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR &
- setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT
+ setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT
{
scratchData = scratchData_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAccelerationStructureBuildGeometryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureBuildGeometryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( this );
}
- explicit operator VkAccelerationStructureBuildGeometryInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureBuildGeometryInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureBuildGeometryInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -995,28 +918,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- type,
- flags,
- mode,
- srcAccelerationStructure,
- dstAccelerationStructure,
- geometryCount,
- pGeometries,
- ppGeometries,
- scratchData );
+ return std::tie(
+ sType, pNext, type, flags, mode, srcAccelerationStructure, dstAccelerationStructure, geometryCount, pGeometries, ppGeometries, scratchData );
}
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildGeometryInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type =
- VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel;
- VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {};
- VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode =
- VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildGeometryInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel;
+ VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {};
+ VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild;
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure = {};
uint32_t geometryCount = {};
@@ -1024,15 +936,6 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR ) ==
- sizeof( VkAccelerationStructureBuildGeometryInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR>::value,
- "AccelerationStructureBuildGeometryInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureBuildGeometryInfoKHR>
@@ -1053,68 +956,62 @@ namespace VULKAN_HPP_NAMESPACE
, primitiveOffset( primitiveOffset_ )
, firstVertex( firstVertex_ )
, transformOffset( transformOffset_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( AccelerationStructureBuildRangeInfoKHR const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureBuildRangeInfoKHR( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : AccelerationStructureBuildRangeInfoKHR(
- *reinterpret_cast<AccelerationStructureBuildRangeInfoKHR const *>( &rhs ) )
- {}
+ : AccelerationStructureBuildRangeInfoKHR( *reinterpret_cast<AccelerationStructureBuildRangeInfoKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AccelerationStructureBuildRangeInfoKHR &
- operator=( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AccelerationStructureBuildRangeInfoKHR & operator=( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureBuildRangeInfoKHR &
- operator=( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildRangeInfoKHR & operator=( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR &
- setPrimitiveCount( uint32_t primitiveCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setPrimitiveCount( uint32_t primitiveCount_ ) VULKAN_HPP_NOEXCEPT
{
primitiveCount = primitiveCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR &
- setPrimitiveOffset( uint32_t primitiveOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setPrimitiveOffset( uint32_t primitiveOffset_ ) VULKAN_HPP_NOEXCEPT
{
primitiveOffset = primitiveOffset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR &
- setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
{
firstVertex = firstVertex_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR &
- setTransformOffset( uint32_t transformOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setTransformOffset( uint32_t transformOffset_ ) VULKAN_HPP_NOEXCEPT
{
transformOffset = transformOffset_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAccelerationStructureBuildRangeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureBuildRangeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR *>( this );
}
- explicit operator VkAccelerationStructureBuildRangeInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureBuildRangeInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureBuildRangeInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -1129,13 +1026,13 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
auto operator<=>( AccelerationStructureBuildRangeInfoKHR const & ) const = default;
#else
- bool operator==( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( primitiveCount == rhs.primitiveCount ) && ( primitiveOffset == rhs.primitiveOffset ) &&
- ( firstVertex == rhs.firstVertex ) && ( transformOffset == rhs.transformOffset );
+ return ( primitiveCount == rhs.primitiveCount ) && ( primitiveOffset == rhs.primitiveOffset ) && ( firstVertex == rhs.firstVertex ) &&
+ ( transformOffset == rhs.transformOffset );
# endif
}
@@ -1151,48 +1048,37 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t firstVertex = {};
uint32_t transformOffset = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR ) ==
- sizeof( VkAccelerationStructureBuildRangeInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR>::value,
- "AccelerationStructureBuildRangeInfoKHR is not nothrow_move_constructible!" );
struct AccelerationStructureBuildSizesInfoKHR
{
using NativeType = VkAccelerationStructureBuildSizesInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eAccelerationStructureBuildSizesInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildSizesInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR(
- VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : accelerationStructureSize( accelerationStructureSize_ )
+ VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , accelerationStructureSize( accelerationStructureSize_ )
, updateScratchSize( updateScratchSize_ )
, buildScratchSize( buildScratchSize_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( AccelerationStructureBuildSizesInfoKHR const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureBuildSizesInfoKHR( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : AccelerationStructureBuildSizesInfoKHR(
- *reinterpret_cast<AccelerationStructureBuildSizesInfoKHR const *>( &rhs ) )
- {}
+ : AccelerationStructureBuildSizesInfoKHR( *reinterpret_cast<AccelerationStructureBuildSizesInfoKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AccelerationStructureBuildSizesInfoKHR &
- operator=( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AccelerationStructureBuildSizesInfoKHR & operator=( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureBuildSizesInfoKHR &
- operator=( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureBuildSizesInfoKHR & operator=( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR const *>( &rhs );
return *this;
@@ -1206,38 +1092,38 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR &
- setAccelerationStructureSize( VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ ) VULKAN_HPP_NOEXCEPT
+ setAccelerationStructureSize( VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureSize = accelerationStructureSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR &
- setUpdateScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ ) VULKAN_HPP_NOEXCEPT
+ setUpdateScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ ) VULKAN_HPP_NOEXCEPT
{
updateScratchSize = updateScratchSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR &
- setBuildScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ ) VULKAN_HPP_NOEXCEPT
+ setBuildScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ ) VULKAN_HPP_NOEXCEPT
{
buildScratchSize = buildScratchSize_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAccelerationStructureBuildSizesInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureBuildSizesInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureBuildSizesInfoKHR *>( this );
}
- explicit operator VkAccelerationStructureBuildSizesInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureBuildSizesInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -1258,11 +1144,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( accelerationStructureSize == rhs.accelerationStructureSize ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructureSize == rhs.accelerationStructureSize ) &&
( updateScratchSize == rhs.updateScratchSize ) && ( buildScratchSize == rhs.buildScratchSize );
# endif
}
@@ -1274,21 +1159,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildSizesInfoKHR;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildSizesInfoKHR;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize = {};
VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize = {};
VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR ) ==
- sizeof( VkAccelerationStructureBuildSizesInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR>::value,
- "AccelerationStructureBuildSizesInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureBuildSizesInfoKHR>
@@ -1301,39 +1177,38 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAccelerationStructureCreateInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eAccelerationStructureCreateInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- AccelerationStructureCreateInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ = {},
- VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
- VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ =
- VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel,
- VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT
- : createFlags( createFlags_ )
+ VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR(
+ VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
+ VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel,
+ VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , createFlags( createFlags_ )
, buffer( buffer_ )
, offset( offset_ )
, size( size_ )
, type( type_ )
, deviceAddress( deviceAddress_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( AccelerationStructureCreateInfoKHR const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureCreateInfoKHR( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureCreateInfoKHR( *reinterpret_cast<AccelerationStructureCreateInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AccelerationStructureCreateInfoKHR &
- operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AccelerationStructureCreateInfoKHR & operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureCreateInfoKHR &
- operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateInfoKHR & operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const *>( &rhs );
return *this;
@@ -1347,59 +1222,54 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR &
- setCreateFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ ) VULKAN_HPP_NOEXCEPT
+ setCreateFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ ) VULKAN_HPP_NOEXCEPT
{
createFlags = createFlags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR &
- setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR &
- setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR &
- setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR &
- setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR &
- setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
deviceAddress = deviceAddress_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAccelerationStructureCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( this );
}
- explicit operator VkAccelerationStructureCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureCreateInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -1423,12 +1293,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createFlags == rhs.createFlags ) &&
- ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( size == rhs.size ) && ( type == rhs.type ) &&
- ( deviceAddress == rhs.deviceAddress );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createFlags == rhs.createFlags ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) &&
+ ( size == rhs.size ) && ( type == rhs.type ) && ( deviceAddress == rhs.deviceAddress );
# endif
}
@@ -1439,24 +1308,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags = {};
- VULKAN_HPP_NAMESPACE::Buffer buffer = {};
- VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
- VULKAN_HPP_NAMESPACE::DeviceSize size = {};
- VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type =
- VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel;
- VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags = {};
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel;
+ VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR ) ==
- sizeof( VkAccelerationStructureCreateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR>::value,
- "AccelerationStructureCreateInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureCreateInfoKHR>
@@ -1472,19 +1332,20 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryTrianglesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- GeometryTrianglesNV( VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {},
- uint32_t vertexCount_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {},
- VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::Buffer indexData_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {},
- uint32_t indexCount_ = {},
- VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16,
- VULKAN_HPP_NAMESPACE::Buffer transformData_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT
- : vertexData( vertexData_ )
+ VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {},
+ uint32_t vertexCount_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {},
+ VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::Buffer indexData_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {},
+ uint32_t indexCount_ = {},
+ VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16,
+ VULKAN_HPP_NAMESPACE::Buffer transformData_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , vertexData( vertexData_ )
, vertexOffset( vertexOffset_ )
, vertexCount( vertexCount_ )
, vertexStride( vertexStride_ )
@@ -1495,13 +1356,14 @@ namespace VULKAN_HPP_NAMESPACE
, indexType( indexType_ )
, transformData( transformData_ )
, transformOffset( transformOffset_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : GeometryTrianglesNV( *reinterpret_cast<GeometryTrianglesNV const *>( &rhs ) )
- {}
+ GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryTrianglesNV( *reinterpret_cast<GeometryTrianglesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
GeometryTrianglesNV & operator=( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -1519,15 +1381,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &
- setVertexData( VULKAN_HPP_NAMESPACE::Buffer vertexData_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexData( VULKAN_HPP_NAMESPACE::Buffer vertexData_ ) VULKAN_HPP_NOEXCEPT
{
vertexData = vertexData_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &
- setVertexOffset( VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexOffset( VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ ) VULKAN_HPP_NOEXCEPT
{
vertexOffset = vertexOffset_;
return *this;
@@ -1539,29 +1399,25 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &
- setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
{
vertexStride = vertexStride_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &
- setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
{
vertexFormat = vertexFormat_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &
- setIndexData( VULKAN_HPP_NAMESPACE::Buffer indexData_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexData( VULKAN_HPP_NAMESPACE::Buffer indexData_ ) VULKAN_HPP_NOEXCEPT
{
indexData = indexData_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &
- setIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ ) VULKAN_HPP_NOEXCEPT
{
indexOffset = indexOffset_;
return *this;
@@ -1573,39 +1429,36 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &
- setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
{
indexType = indexType_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &
- setTransformData( VULKAN_HPP_NAMESPACE::Buffer transformData_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setTransformData( VULKAN_HPP_NAMESPACE::Buffer transformData_ ) VULKAN_HPP_NOEXCEPT
{
transformData = transformData_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &
- setTransformOffset( VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setTransformOffset( VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ ) VULKAN_HPP_NOEXCEPT
{
transformOffset = transformOffset_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkGeometryTrianglesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkGeometryTrianglesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGeometryTrianglesNV *>( this );
}
- explicit operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGeometryTrianglesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -1646,15 +1499,13 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexData == rhs.vertexData ) &&
- ( vertexOffset == rhs.vertexOffset ) && ( vertexCount == rhs.vertexCount ) &&
- ( vertexStride == rhs.vertexStride ) && ( vertexFormat == rhs.vertexFormat ) &&
- ( indexData == rhs.indexData ) && ( indexOffset == rhs.indexOffset ) && ( indexCount == rhs.indexCount ) &&
- ( indexType == rhs.indexType ) && ( transformData == rhs.transformData ) &&
- ( transformOffset == rhs.transformOffset );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexData == rhs.vertexData ) && ( vertexOffset == rhs.vertexOffset ) &&
+ ( vertexCount == rhs.vertexCount ) && ( vertexStride == rhs.vertexStride ) && ( vertexFormat == rhs.vertexFormat ) &&
+ ( indexData == rhs.indexData ) && ( indexOffset == rhs.indexOffset ) && ( indexCount == rhs.indexCount ) && ( indexType == rhs.indexType ) &&
+ ( transformData == rhs.transformData ) && ( transformOffset == rhs.transformOffset );
# endif
}
@@ -1679,12 +1530,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Buffer transformData = {};
VULKAN_HPP_NAMESPACE::DeviceSize transformOffset = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV>::value,
- "GeometryTrianglesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eGeometryTrianglesNV>
@@ -1703,18 +1548,19 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR GeometryAABBNV( VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {},
uint32_t numAABBs_ = {},
uint32_t stride_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT
- : aabbData( aabbData_ )
+ VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , aabbData( aabbData_ )
, numAABBs( numAABBs_ )
, stride( stride_ )
, offset( offset_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR GeometryAABBNV( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : GeometryAABBNV( *reinterpret_cast<GeometryAABBNV const *>( &rhs ) )
- {}
+ GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryAABBNV( *reinterpret_cast<GeometryAABBNV const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
GeometryAABBNV & operator=( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -1757,17 +1603,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkGeometryAABBNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkGeometryAABBNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGeometryAABBNV *>( this );
}
- explicit operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT
+ operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGeometryAABBNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -1789,11 +1635,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aabbData == rhs.aabbData ) &&
- ( numAABBs == rhs.numAABBs ) && ( stride == rhs.stride ) && ( offset == rhs.offset );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aabbData == rhs.aabbData ) && ( numAABBs == rhs.numAABBs ) && ( stride == rhs.stride ) &&
+ ( offset == rhs.offset );
# endif
}
@@ -1811,12 +1657,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t stride = {};
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryAABBNV ) == sizeof( VkGeometryAABBNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeometryAABBNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeometryAABBNV>::value,
- "GeometryAABBNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eGeometryAabbNV>
@@ -1833,13 +1673,12 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {} ) VULKAN_HPP_NOEXCEPT
: triangles( triangles_ )
, aabbs( aabbs_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR GeometryDataNV( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : GeometryDataNV( *reinterpret_cast<GeometryDataNV const *>( &rhs ) )
- {}
+ GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryDataNV( *reinterpret_cast<GeometryDataNV const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
GeometryDataNV & operator=( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -1851,32 +1690,30 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 GeometryDataNV &
- setTriangles( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & triangles_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & setTriangles( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & triangles_ ) VULKAN_HPP_NOEXCEPT
{
triangles = triangles_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeometryDataNV &
- setAabbs( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & aabbs_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & setAabbs( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & aabbs_ ) VULKAN_HPP_NOEXCEPT
{
aabbs = aabbs_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkGeometryDataNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkGeometryDataNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGeometryDataNV *>( this );
}
- explicit operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT
+ operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGeometryDataNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -1893,7 +1730,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( triangles == rhs.triangles ) && ( aabbs == rhs.aabbs );
@@ -1910,12 +1747,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles = {};
VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryDataNV ) == sizeof( VkGeometryDataNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeometryDataNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeometryDataNV>::value,
- "GeometryDataNV is not nothrow_move_constructible!" );
struct GeometryNV
{
@@ -1925,20 +1756,20 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR GeometryNV(
- VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles,
- VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {},
- VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : geometryType( geometryType_ )
+ VULKAN_HPP_CONSTEXPR GeometryNV( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles,
+ VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {},
+ VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , geometryType( geometryType_ )
, geometry( geometry_ )
, flags( flags_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR GeometryNV( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : GeometryNV( *reinterpret_cast<GeometryNV const *>( &rhs ) )
- {}
+ GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryNV( *reinterpret_cast<GeometryNV const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
GeometryNV & operator=( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -1956,15 +1787,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeometryNV &
- setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeometryNV & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
{
geometryType = geometryType_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeometryNV &
- setGeometry( VULKAN_HPP_NAMESPACE::GeometryDataNV const & geometry_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeometryNV & setGeometry( VULKAN_HPP_NAMESPACE::GeometryDataNV const & geometry_ ) VULKAN_HPP_NOEXCEPT
{
geometry = geometry_;
return *this;
@@ -1977,17 +1806,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkGeometryNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkGeometryNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGeometryNV *>( this );
}
- explicit operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT
+ operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGeometryNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -2008,11 +1837,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( geometryType == rhs.geometryType ) &&
- ( geometry == rhs.geometry ) && ( flags == rhs.flags );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( geometryType == rhs.geometryType ) && ( geometry == rhs.geometry ) && ( flags == rhs.flags );
# endif
}
@@ -2029,12 +1857,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {};
VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryNV ) == sizeof( VkGeometryNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeometryNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeometryNV>::value,
- "GeometryNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eGeometryNV>
@@ -2050,38 +1872,42 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = {},
- VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {},
- uint32_t instanceCount_ = {},
- uint32_t geometryCount_ = {},
- const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ = {} ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
+ VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = {},
+ VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {},
+ uint32_t instanceCount_ = {},
+ uint32_t geometryCount_ = {},
+ const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , type( type_ )
, flags( flags_ )
, instanceCount( instanceCount_ )
, geometryCount( geometryCount_ )
, pGeometries( pGeometries_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- AccelerationStructureInfoNV( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureInfoNV( *reinterpret_cast<AccelerationStructureInfoNV const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- AccelerationStructureInfoNV(
- VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_,
- VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_,
- uint32_t instanceCount_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GeometryNV> const & geometries_ )
- : type( type_ )
+ AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_,
+ VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_,
+ uint32_t instanceCount_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GeometryNV> const & geometries_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , type( type_ )
, flags( flags_ )
, instanceCount( instanceCount_ )
, geometryCount( static_cast<uint32_t>( geometries_.size() ) )
, pGeometries( geometries_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -2100,45 +1926,39 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV &
- setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV &
- setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV &
- setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
{
instanceCount = instanceCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV &
- setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
{
geometryCount = geometryCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV &
- setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ ) VULKAN_HPP_NOEXCEPT
{
pGeometries = pGeometries_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- AccelerationStructureInfoNV & setGeometries(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GeometryNV> const & geometries_ )
- VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInfoNV &
+ setGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GeometryNV> const & geometries_ ) VULKAN_HPP_NOEXCEPT
{
geometryCount = static_cast<uint32_t>( geometries_.size() );
pGeometries = geometries_.data();
@@ -2147,17 +1967,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureInfoNV *>( this );
}
- explicit operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -2180,12 +2000,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( flags == rhs.flags ) &&
- ( instanceCount == rhs.instanceCount ) && ( geometryCount == rhs.geometryCount ) &&
- ( pGeometries == rhs.pGeometries );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( flags == rhs.flags ) && ( instanceCount == rhs.instanceCount ) &&
+ ( geometryCount == rhs.geometryCount ) && ( pGeometries == rhs.pGeometries );
# endif
}
@@ -2204,14 +2023,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t geometryCount = {};
const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV ) ==
- sizeof( VkAccelerationStructureInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV>::value,
- "AccelerationStructureInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureInfoNV>
@@ -2224,27 +2035,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAccelerationStructureCreateInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eAccelerationStructureCreateInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV(
- VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {},
- VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {} ) VULKAN_HPP_NOEXCEPT
- : compactedSize( compactedSize_ )
+ VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {},
+ VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , compactedSize( compactedSize_ )
, info( info_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- AccelerationStructureCreateInfoNV( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureCreateInfoNV( *reinterpret_cast<AccelerationStructureCreateInfoNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AccelerationStructureCreateInfoNV &
- operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AccelerationStructureCreateInfoNV & operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureCreateInfoNV & operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -2259,32 +2070,30 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV &
- setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT
{
compactedSize = compactedSize_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV &
- setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & info_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & info_ ) VULKAN_HPP_NOEXCEPT
{
info = info_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAccelerationStructureCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( this );
}
- explicit operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureCreateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -2304,11 +2113,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compactedSize == rhs.compactedSize ) &&
- ( info == rhs.info );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compactedSize == rhs.compactedSize ) && ( info == rhs.info );
# endif
}
@@ -2324,14 +2132,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV ) ==
- sizeof( VkAccelerationStructureCreateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV>::value,
- "AccelerationStructureCreateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureCreateInfoNV>
@@ -2344,68 +2144,62 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAccelerationStructureDeviceAddressInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eAccelerationStructureDeviceAddressInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureDeviceAddressInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR(
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT
- : accelerationStructure( accelerationStructure_ )
- {}
+ VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , accelerationStructure( accelerationStructure_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR(
- AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : AccelerationStructureDeviceAddressInfoKHR(
- *reinterpret_cast<AccelerationStructureDeviceAddressInfoKHR const *>( &rhs ) )
- {}
+ AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : AccelerationStructureDeviceAddressInfoKHR( *reinterpret_cast<AccelerationStructureDeviceAddressInfoKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AccelerationStructureDeviceAddressInfoKHR &
- operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AccelerationStructureDeviceAddressInfoKHR & operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureDeviceAddressInfoKHR &
- operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureDeviceAddressInfoKHR & operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & setAccelerationStructure(
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR &
+ setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructure = accelerationStructure_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAccelerationStructureDeviceAddressInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureDeviceAddressInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( this );
}
- explicit operator VkAccelerationStructureDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureDeviceAddressInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -2418,7 +2212,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure );
@@ -2432,19 +2226,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR ) ==
- sizeof( VkAccelerationStructureDeviceAddressInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR>::value,
- "AccelerationStructureDeviceAddressInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureDeviceAddressInfoKHR>
@@ -2457,69 +2242,64 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAccelerationStructureGeometryMotionTrianglesDataNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV(
- VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {} ) VULKAN_HPP_NOEXCEPT
- : vertexData( vertexData_ )
- {}
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , vertexData( vertexData_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV(
- AccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14
+ AccelerationStructureGeometryMotionTrianglesDataNV( AccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureGeometryMotionTrianglesDataNV(
- VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : AccelerationStructureGeometryMotionTrianglesDataNV(
- *reinterpret_cast<AccelerationStructureGeometryMotionTrianglesDataNV const *>( &rhs ) )
- {}
+ AccelerationStructureGeometryMotionTrianglesDataNV( VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : AccelerationStructureGeometryMotionTrianglesDataNV( *reinterpret_cast<AccelerationStructureGeometryMotionTrianglesDataNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
AccelerationStructureGeometryMotionTrianglesDataNV &
operator=( AccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureGeometryMotionTrianglesDataNV &
- operator=( VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureGeometryMotionTrianglesDataNV & operator=( VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV &
- setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT
+ setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT
{
vertexData = vertexData_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAccelerationStructureGeometryMotionTrianglesDataNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureGeometryMotionTrianglesDataNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureGeometryMotionTrianglesDataNV *>( this );
}
- explicit operator VkAccelerationStructureGeometryMotionTrianglesDataNV &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureGeometryMotionTrianglesDataNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureGeometryMotionTrianglesDataNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -2528,19 +2308,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV ) ==
- sizeof( VkAccelerationStructureGeometryMotionTrianglesDataNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV>::value,
- "AccelerationStructureGeometryMotionTrianglesDataNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV>
@@ -2553,16 +2324,11 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkTransformMatrixKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14
- TransformMatrixKHR( std::array<std::array<float, 4>, 3> const & matrix_ = {} ) VULKAN_HPP_NOEXCEPT
- : matrix( matrix_ )
- {}
+ VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( std::array<std::array<float, 4>, 3> const & matrix_ = {} ) VULKAN_HPP_NOEXCEPT : matrix( matrix_ ) {}
VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : TransformMatrixKHR( *reinterpret_cast<TransformMatrixKHR const *>( &rhs ) )
- {}
+ TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT : TransformMatrixKHR( *reinterpret_cast<TransformMatrixKHR const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
TransformMatrixKHR & operator=( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -2574,25 +2340,24 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR &
- setMatrix( std::array<std::array<float, 4>, 3> matrix_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR & setMatrix( std::array<std::array<float, 4>, 3> matrix_ ) VULKAN_HPP_NOEXCEPT
{
matrix = matrix_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkTransformMatrixKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkTransformMatrixKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTransformMatrixKHR *>( this );
}
- explicit operator VkTransformMatrixKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkTransformMatrixKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTransformMatrixKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -2609,7 +2374,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( matrix == rhs.matrix );
@@ -2625,12 +2390,6 @@ namespace VULKAN_HPP_NAMESPACE
public:
VULKAN_HPP_NAMESPACE::ArrayWrapper2D<float, 3, 4> matrix = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TransformMatrixKHR ) == sizeof( VkTransformMatrixKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TransformMatrixKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TransformMatrixKHR>::value,
- "TransformMatrixKHR is not nothrow_move_constructible!" );
using TransformMatrixNV = TransformMatrixKHR;
struct AccelerationStructureInstanceKHR
@@ -2638,31 +2397,30 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAccelerationStructureInstanceKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14
- AccelerationStructureInstanceKHR( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {},
- uint32_t instanceCustomIndex_ = {},
- uint32_t mask_ = {},
- uint32_t instanceShaderBindingTableRecordOffset_ = {},
- VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {},
- uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {},
+ uint32_t instanceCustomIndex_ = {},
+ uint32_t mask_ = {},
+ uint32_t instanceShaderBindingTableRecordOffset_ = {},
+ VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {},
+ uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT
: transform( transform_ )
, instanceCustomIndex( instanceCustomIndex_ )
, mask( mask_ )
, instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ )
, flags( flags_ )
, accelerationStructureReference( accelerationStructureReference_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- AccelerationStructureInstanceKHR( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureInstanceKHR( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureInstanceKHR( *reinterpret_cast<AccelerationStructureInstanceKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AccelerationStructureInstanceKHR &
- operator=( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AccelerationStructureInstanceKHR & operator=( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureInstanceKHR & operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -2671,15 +2429,13 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR &
- setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT
{
transform = transform_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR &
- setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
{
instanceCustomIndex = instanceCustomIndex_;
return *this;
@@ -2692,38 +2448,36 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR &
- setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
+ setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
{
instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
return *this;
}
- AccelerationStructureInstanceKHR &
- setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInstanceKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR *>( &flags_ );
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR &
- setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureReference = accelerationStructureReference_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAccelerationStructureInstanceKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureInstanceKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureInstanceKHR *>( this );
}
- explicit operator VkAccelerationStructureInstanceKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureInstanceKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureInstanceKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -2736,12 +2490,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( transform,
- instanceCustomIndex,
- mask,
- instanceShaderBindingTableRecordOffset,
- flags,
- accelerationStructureReference );
+ return std::tie( transform, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference );
}
#endif
@@ -2750,13 +2499,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( transform == rhs.transform ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) &&
- ( mask == rhs.mask ) &&
- ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) &&
- ( flags == rhs.flags ) && ( accelerationStructureReference == rhs.accelerationStructureReference );
+ return ( transform == rhs.transform ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) && ( mask == rhs.mask ) &&
+ ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && ( flags == rhs.flags ) &&
+ ( accelerationStructureReference == rhs.accelerationStructureReference );
# endif
}
@@ -2774,14 +2522,6 @@ namespace VULKAN_HPP_NAMESPACE
VkGeometryInstanceFlagsKHR flags : 8;
uint64_t accelerationStructureReference = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR ) ==
- sizeof( VkAccelerationStructureInstanceKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR>::value,
- "AccelerationStructureInstanceKHR is not nothrow_move_constructible!" );
using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR;
struct AccelerationStructureMatrixMotionInstanceNV
@@ -2789,14 +2529,13 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAccelerationStructureMatrixMotionInstanceNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14
- AccelerationStructureMatrixMotionInstanceNV( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT0_ = {},
- VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT1_ = {},
- uint32_t instanceCustomIndex_ = {},
- uint32_t mask_ = {},
- uint32_t instanceShaderBindingTableRecordOffset_ = {},
- VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {},
- uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT0_ = {},
+ VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT1_ = {},
+ uint32_t instanceCustomIndex_ = {},
+ uint32_t mask_ = {},
+ uint32_t instanceShaderBindingTableRecordOffset_ = {},
+ VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {},
+ uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT
: transformT0( transformT0_ )
, transformT1( transformT1_ )
, instanceCustomIndex( instanceCustomIndex_ )
@@ -2804,23 +2543,21 @@ namespace VULKAN_HPP_NAMESPACE
, instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ )
, flags( flags_ )
, accelerationStructureReference( accelerationStructureReference_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV(
- AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14
+ AccelerationStructureMatrixMotionInstanceNV( AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureMatrixMotionInstanceNV( VkAccelerationStructureMatrixMotionInstanceNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : AccelerationStructureMatrixMotionInstanceNV(
- *reinterpret_cast<AccelerationStructureMatrixMotionInstanceNV const *>( &rhs ) )
- {}
+ AccelerationStructureMatrixMotionInstanceNV( VkAccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : AccelerationStructureMatrixMotionInstanceNV( *reinterpret_cast<AccelerationStructureMatrixMotionInstanceNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AccelerationStructureMatrixMotionInstanceNV &
- operator=( AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AccelerationStructureMatrixMotionInstanceNV & operator=( AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureMatrixMotionInstanceNV &
- operator=( VkAccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureMatrixMotionInstanceNV & operator=( VkAccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV const *>( &rhs );
return *this;
@@ -2828,21 +2565,20 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV &
- setTransformT0( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformT0_ ) VULKAN_HPP_NOEXCEPT
+ setTransformT0( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformT0_ ) VULKAN_HPP_NOEXCEPT
{
transformT0 = transformT0_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV &
- setTransformT1( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformT1_ ) VULKAN_HPP_NOEXCEPT
+ setTransformT1( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformT1_ ) VULKAN_HPP_NOEXCEPT
{
transformT1 = transformT1_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV &
- setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
{
instanceCustomIndex = instanceCustomIndex_;
return *this;
@@ -2855,38 +2591,37 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV &
- setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
+ setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
{
instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
return *this;
}
- AccelerationStructureMatrixMotionInstanceNV &
- setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureMatrixMotionInstanceNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR *>( &flags_ );
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV &
- setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
+ setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureReference = accelerationStructureReference_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAccelerationStructureMatrixMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureMatrixMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureMatrixMotionInstanceNV *>( this );
}
- explicit operator VkAccelerationStructureMatrixMotionInstanceNV &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureMatrixMotionInstanceNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureMatrixMotionInstanceNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -2900,13 +2635,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( transformT0,
- transformT1,
- instanceCustomIndex,
- mask,
- instanceShaderBindingTableRecordOffset,
- flags,
- accelerationStructureReference );
+ return std::tie( transformT0, transformT1, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference );
}
#endif
@@ -2915,13 +2644,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AccelerationStructureMatrixMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( transformT0 == rhs.transformT0 ) && ( transformT1 == rhs.transformT1 ) &&
- ( instanceCustomIndex == rhs.instanceCustomIndex ) && ( mask == rhs.mask ) &&
- ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) &&
- ( flags == rhs.flags ) && ( accelerationStructureReference == rhs.accelerationStructureReference );
+ return ( transformT0 == rhs.transformT0 ) && ( transformT1 == rhs.transformT1 ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) &&
+ ( mask == rhs.mask ) && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && ( flags == rhs.flags ) &&
+ ( accelerationStructureReference == rhs.accelerationStructureReference );
# endif
}
@@ -2940,87 +2668,75 @@ namespace VULKAN_HPP_NAMESPACE
VkGeometryInstanceFlagsKHR flags : 8;
uint64_t accelerationStructureReference = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV ) ==
- sizeof( VkAccelerationStructureMatrixMotionInstanceNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV>::value,
- "AccelerationStructureMatrixMotionInstanceNV is not nothrow_move_constructible!" );
struct AccelerationStructureMemoryRequirementsInfoNV
{
using NativeType = VkAccelerationStructureMemoryRequirementsInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV(
- VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ =
- VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject,
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
+ VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , type( type_ )
, accelerationStructure( accelerationStructure_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV(
- AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ AccelerationStructureMemoryRequirementsInfoNV( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : AccelerationStructureMemoryRequirementsInfoNV(
- *reinterpret_cast<AccelerationStructureMemoryRequirementsInfoNV const *>( &rhs ) )
- {}
+ AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : AccelerationStructureMemoryRequirementsInfoNV( *reinterpret_cast<AccelerationStructureMemoryRequirementsInfoNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AccelerationStructureMemoryRequirementsInfoNV &
- operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AccelerationStructureMemoryRequirementsInfoNV & operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureMemoryRequirementsInfoNV &
- operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureMemoryRequirementsInfoNV & operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV &
- setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT
+ setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure(
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV &
+ setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructure = accelerationStructure_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAccelerationStructureMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( this );
}
- explicit operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -3040,11 +2756,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) &&
- ( accelerationStructure == rhs.accelerationStructure );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( accelerationStructure == rhs.accelerationStructure );
# endif
}
@@ -3055,21 +2770,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type =
- VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject;
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject;
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV ) ==
- sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV>::value,
- "AccelerationStructureMemoryRequirementsInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureMemoryRequirementsInfoNV>
@@ -3082,27 +2787,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAccelerationStructureMotionInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eAccelerationStructureMotionInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMotionInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV(
- uint32_t maxInstances_ = {},
- VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxInstances( maxInstances_ )
+ VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV( uint32_t maxInstances_ = {},
+ VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxInstances( maxInstances_ )
, flags( flags_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- AccelerationStructureMotionInfoNV( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureMotionInfoNV( VkAccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureMotionInfoNV( *reinterpret_cast<AccelerationStructureMotionInfoNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AccelerationStructureMotionInfoNV &
- operator=( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AccelerationStructureMotionInfoNV & operator=( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureMotionInfoNV & operator=( VkAccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -3117,32 +2822,31 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV &
- setMaxInstances( uint32_t maxInstances_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setMaxInstances( uint32_t maxInstances_ ) VULKAN_HPP_NOEXCEPT
{
maxInstances = maxInstances_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV &
- setFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ setFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAccelerationStructureMotionInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureMotionInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureMotionInfoNV *>( this );
}
- explicit operator VkAccelerationStructureMotionInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureMotionInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureMotionInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -3162,11 +2866,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AccelerationStructureMotionInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInstances == rhs.maxInstances ) &&
- ( flags == rhs.flags );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInstances == rhs.maxInstances ) && ( flags == rhs.flags );
# endif
}
@@ -3177,19 +2880,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMotionInfoNV;
- const void * pNext = {};
- uint32_t maxInstances = {};
- VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMotionInfoNV;
+ const void * pNext = {};
+ uint32_t maxInstances = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV ) ==
- sizeof( VkAccelerationStructureMotionInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV>::value,
- "AccelerationStructureMotionInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureMotionInfoNV>
@@ -3234,12 +2929,12 @@ namespace VULKAN_HPP_NAMESPACE
, tx( tx_ )
, ty( ty_ )
, tz( tz_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SRTDataNV( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SRTDataNV( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : SRTDataNV( *reinterpret_cast<SRTDataNV const *>( &rhs ) )
- {}
+ SRTDataNV( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : SRTDataNV( *reinterpret_cast<SRTDataNV const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SRTDataNV & operator=( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -3348,17 +3043,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSRTDataNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSRTDataNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSRTDataNV *>( this );
}
- explicit operator VkSRTDataNV &() VULKAN_HPP_NOEXCEPT
+ operator VkSRTDataNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSRTDataNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -3390,13 +3085,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SRTDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sx == rhs.sx ) && ( a == rhs.a ) && ( b == rhs.b ) && ( pvx == rhs.pvx ) && ( sy == rhs.sy ) &&
- ( c == rhs.c ) && ( pvy == rhs.pvy ) && ( sz == rhs.sz ) && ( pvz == rhs.pvz ) && ( qx == rhs.qx ) &&
- ( qy == rhs.qy ) && ( qz == rhs.qz ) && ( qw == rhs.qw ) && ( tx == rhs.tx ) && ( ty == rhs.ty ) &&
- ( tz == rhs.tz );
+ return ( sx == rhs.sx ) && ( a == rhs.a ) && ( b == rhs.b ) && ( pvx == rhs.pvx ) && ( sy == rhs.sy ) && ( c == rhs.c ) && ( pvy == rhs.pvy ) &&
+ ( sz == rhs.sz ) && ( pvz == rhs.pvz ) && ( qx == rhs.qx ) && ( qy == rhs.qy ) && ( qz == rhs.qz ) && ( qw == rhs.qw ) && ( tx == rhs.tx ) &&
+ ( ty == rhs.ty ) && ( tz == rhs.tz );
# endif
}
@@ -3424,26 +3118,19 @@ namespace VULKAN_HPP_NAMESPACE
float ty = {};
float tz = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SRTDataNV ) == sizeof( VkSRTDataNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SRTDataNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SRTDataNV>::value,
- "SRTDataNV is not nothrow_move_constructible!" );
struct AccelerationStructureSRTMotionInstanceNV
{
using NativeType = VkAccelerationStructureSRTMotionInstanceNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- AccelerationStructureSRTMotionInstanceNV( VULKAN_HPP_NAMESPACE::SRTDataNV transformT0_ = {},
- VULKAN_HPP_NAMESPACE::SRTDataNV transformT1_ = {},
- uint32_t instanceCustomIndex_ = {},
- uint32_t mask_ = {},
- uint32_t instanceShaderBindingTableRecordOffset_ = {},
- VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {},
- uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV( VULKAN_HPP_NAMESPACE::SRTDataNV transformT0_ = {},
+ VULKAN_HPP_NAMESPACE::SRTDataNV transformT1_ = {},
+ uint32_t instanceCustomIndex_ = {},
+ uint32_t mask_ = {},
+ uint32_t instanceShaderBindingTableRecordOffset_ = {},
+ VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {},
+ uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT
: transformT0( transformT0_ )
, transformT1( transformT1_ )
, instanceCustomIndex( instanceCustomIndex_ )
@@ -3451,23 +3138,20 @@ namespace VULKAN_HPP_NAMESPACE
, instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ )
, flags( flags_ )
, accelerationStructureReference( accelerationStructureReference_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV(
- AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV( AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureSRTMotionInstanceNV( VkAccelerationStructureSRTMotionInstanceNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : AccelerationStructureSRTMotionInstanceNV(
- *reinterpret_cast<AccelerationStructureSRTMotionInstanceNV const *>( &rhs ) )
- {}
+ AccelerationStructureSRTMotionInstanceNV( VkAccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : AccelerationStructureSRTMotionInstanceNV( *reinterpret_cast<AccelerationStructureSRTMotionInstanceNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AccelerationStructureSRTMotionInstanceNV &
- operator=( AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AccelerationStructureSRTMotionInstanceNV & operator=( AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureSRTMotionInstanceNV &
- operator=( VkAccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureSRTMotionInstanceNV & operator=( VkAccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV const *>( &rhs );
return *this;
@@ -3488,8 +3172,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV &
- setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
{
instanceCustomIndex = instanceCustomIndex_;
return *this;
@@ -3502,38 +3185,37 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV &
- setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
+ setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
{
instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
return *this;
}
- AccelerationStructureSRTMotionInstanceNV &
- setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureSRTMotionInstanceNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR *>( &flags_ );
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV &
- setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
+ setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureReference = accelerationStructureReference_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAccelerationStructureSRTMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureSRTMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureSRTMotionInstanceNV *>( this );
}
- explicit operator VkAccelerationStructureSRTMotionInstanceNV &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureSRTMotionInstanceNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureSRTMotionInstanceNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -3547,13 +3229,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( transformT0,
- transformT1,
- instanceCustomIndex,
- mask,
- instanceShaderBindingTableRecordOffset,
- flags,
- accelerationStructureReference );
+ return std::tie( transformT0, transformT1, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference );
}
#endif
@@ -3562,13 +3238,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AccelerationStructureSRTMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( transformT0 == rhs.transformT0 ) && ( transformT1 == rhs.transformT1 ) &&
- ( instanceCustomIndex == rhs.instanceCustomIndex ) && ( mask == rhs.mask ) &&
- ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) &&
- ( flags == rhs.flags ) && ( accelerationStructureReference == rhs.accelerationStructureReference );
+ return ( transformT0 == rhs.transformT0 ) && ( transformT1 == rhs.transformT1 ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) &&
+ ( mask == rhs.mask ) && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && ( flags == rhs.flags ) &&
+ ( accelerationStructureReference == rhs.accelerationStructureReference );
# endif
}
@@ -3587,55 +3262,45 @@ namespace VULKAN_HPP_NAMESPACE
VkGeometryInstanceFlagsKHR flags : 8;
uint64_t accelerationStructureReference = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV ) ==
- sizeof( VkAccelerationStructureSRTMotionInstanceNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV>::value,
- "AccelerationStructureSRTMotionInstanceNV is not nothrow_move_constructible!" );
union AccelerationStructureMotionInstanceDataNV
{
using NativeType = VkAccelerationStructureMotionInstanceDataNV;
#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV(
- VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR staticInstance_ = {} )
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR staticInstance_ = {} )
: staticInstance( staticInstance_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV(
- VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV matrixMotionInstance_ )
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV matrixMotionInstance_ )
: matrixMotionInstance( matrixMotionInstance_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV(
- VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV srtMotionInstance_ )
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV srtMotionInstance_ )
: srtMotionInstance( srtMotionInstance_ )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setStaticInstance(
- VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const & staticInstance_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV &
+ setStaticInstance( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const & staticInstance_ ) VULKAN_HPP_NOEXCEPT
{
staticInstance = staticInstance_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setMatrixMotionInstance(
- VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV const & matrixMotionInstance_ )
- VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV &
+ setMatrixMotionInstance( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV const & matrixMotionInstance_ ) VULKAN_HPP_NOEXCEPT
{
matrixMotionInstance = matrixMotionInstance_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setSrtMotionInstance(
- VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV const & srtMotionInstance_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV &
+ setSrtMotionInstance( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV const & srtMotionInstance_ ) VULKAN_HPP_NOEXCEPT
{
srtMotionInstance = srtMotionInstance_;
return *this;
@@ -3669,29 +3334,26 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV(
- VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ =
- VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV::eStatic,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV::eStatic,
VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_ = {},
VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV data_ = {} ) VULKAN_HPP_NOEXCEPT
: type( type_ )
, flags( flags_ )
, data( data_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV( AccelerationStructureMotionInstanceNV const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV( AccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureMotionInstanceNV( VkAccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : AccelerationStructureMotionInstanceNV(
- *reinterpret_cast<AccelerationStructureMotionInstanceNV const *>( &rhs ) )
- {}
+ : AccelerationStructureMotionInstanceNV( *reinterpret_cast<AccelerationStructureMotionInstanceNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AccelerationStructureMotionInstanceNV &
- operator=( AccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AccelerationStructureMotionInstanceNV & operator=( AccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureMotionInstanceNV &
- operator=( VkAccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureMotionInstanceNV & operator=( VkAccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV const *>( &rhs );
return *this;
@@ -3699,38 +3361,38 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV &
- setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ ) VULKAN_HPP_NOEXCEPT
+ setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV &
- setFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ setFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV &
- setData( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV const & data_ ) VULKAN_HPP_NOEXCEPT
+ setData( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV const & data_ ) VULKAN_HPP_NOEXCEPT
{
data = data_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAccelerationStructureMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureMotionInstanceNV *>( this );
}
- explicit operator VkAccelerationStructureMotionInstanceNV &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureMotionInstanceNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureMotionInstanceNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -3745,46 +3407,337 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type =
- VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV::eStatic;
+ VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV::eStatic;
VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV data = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV ) ==
- sizeof( VkAccelerationStructureMotionInstanceNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV>::value,
- "AccelerationStructureMotionInstanceNV is not nothrow_move_constructible!" );
+
+ struct MicromapUsageEXT
+ {
+ using NativeType = VkMicromapUsageEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MicromapUsageEXT( uint32_t count_ = {}, uint32_t subdivisionLevel_ = {}, uint32_t format_ = {} ) VULKAN_HPP_NOEXCEPT
+ : count( count_ )
+ , subdivisionLevel( subdivisionLevel_ )
+ , format( format_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR MicromapUsageEXT( MicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapUsageEXT( VkMicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MicromapUsageEXT( *reinterpret_cast<MicromapUsageEXT const *>( &rhs ) ) {}
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ MicromapUsageEXT & operator=( MicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapUsageEXT & operator=( VkMicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapUsageEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setCount( uint32_t count_ ) VULKAN_HPP_NOEXCEPT
+ {
+ count = count_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setSubdivisionLevel( uint32_t subdivisionLevel_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subdivisionLevel = subdivisionLevel_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setFormat( uint32_t format_ ) VULKAN_HPP_NOEXCEPT
+ {
+ format = format_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkMicromapUsageEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMicromapUsageEXT *>( this );
+ }
+
+ operator VkMicromapUsageEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMicromapUsageEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( count, subdivisionLevel, format );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( MicromapUsageEXT const & ) const = default;
+#else
+ bool operator==( MicromapUsageEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( count == rhs.count ) && ( subdivisionLevel == rhs.subdivisionLevel ) && ( format == rhs.format );
+# endif
+ }
+
+ bool operator!=( MicromapUsageEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ uint32_t count = {};
+ uint32_t subdivisionLevel = {};
+ uint32_t format = {};
+ };
+
+ struct AccelerationStructureTrianglesOpacityMicromapEXT
+ {
+ using NativeType = VkAccelerationStructureTrianglesOpacityMicromapEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14
+ AccelerationStructureTrianglesOpacityMicromapEXT( VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16,
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ = {},
+ uint32_t baseTriangle_ = {},
+ uint32_t usageCountsCount_ = {},
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ = {},
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {},
+ VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , indexType( indexType_ )
+ , indexBuffer( indexBuffer_ )
+ , indexStride( indexStride_ )
+ , baseTriangle( baseTriangle_ )
+ , usageCountsCount( usageCountsCount_ )
+ , pUsageCounts( pUsageCounts_ )
+ , ppUsageCounts( ppUsageCounts_ )
+ , micromap( micromap_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14
+ AccelerationStructureTrianglesOpacityMicromapEXT( AccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ AccelerationStructureTrianglesOpacityMicromapEXT( VkAccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : AccelerationStructureTrianglesOpacityMicromapEXT( *reinterpret_cast<AccelerationStructureTrianglesOpacityMicromapEXT const *>( &rhs ) )
+ {
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ AccelerationStructureTrianglesOpacityMicromapEXT(
+ VULKAN_HPP_NAMESPACE::IndexType indexType_,
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_,
+ VULKAN_HPP_NAMESPACE::DeviceSize indexStride_,
+ uint32_t baseTriangle_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT> const & usageCounts_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const> const & pUsageCounts_ = {},
+ VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {},
+ void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , indexType( indexType_ )
+ , indexBuffer( indexBuffer_ )
+ , indexStride( indexStride_ )
+ , baseTriangle( baseTriangle_ )
+ , usageCountsCount( static_cast<uint32_t>( !usageCounts_.empty() ? usageCounts_.size() : pUsageCounts_.size() ) )
+ , pUsageCounts( usageCounts_.data() )
+ , ppUsageCounts( pUsageCounts_.data() )
+ , micromap( micromap_ )
+ {
+# ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( ( !usageCounts_.empty() + !pUsageCounts_.empty() ) <= 1 );
+# else
+ if ( 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() ) )
+ {
+ throw LogicError(
+ VULKAN_HPP_NAMESPACE_STRING
+ "::AccelerationStructureTrianglesOpacityMicromapEXT::AccelerationStructureTrianglesOpacityMicromapEXT: 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() )" );
+ }
+# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ AccelerationStructureTrianglesOpacityMicromapEXT & operator=( AccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ AccelerationStructureTrianglesOpacityMicromapEXT & operator=( VkAccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
+ {
+ indexType = indexType_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT &
+ setIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexBuffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ indexBuffer = indexBuffer_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT &
+ setIndexStride( VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT
+ {
+ indexStride = indexStride_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setBaseTriangle( uint32_t baseTriangle_ ) VULKAN_HPP_NOEXCEPT
+ {
+ baseTriangle = baseTriangle_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ usageCountsCount = usageCountsCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT &
+ setPUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pUsageCounts = pUsageCounts_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ AccelerationStructureTrianglesOpacityMicromapEXT &
+ setUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT> const & usageCounts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ usageCountsCount = static_cast<uint32_t>( usageCounts_.size() );
+ pUsageCounts = usageCounts_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT &
+ setPpUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ ppUsageCounts = ppUsageCounts_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ AccelerationStructureTrianglesOpacityMicromapEXT & setPUsageCounts(
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const> const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ usageCountsCount = static_cast<uint32_t>( pUsageCounts_.size() );
+ ppUsageCounts = pUsageCounts_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setMicromap( VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ ) VULKAN_HPP_NOEXCEPT
+ {
+ micromap = micromap_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkAccelerationStructureTrianglesOpacityMicromapEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkAccelerationStructureTrianglesOpacityMicromapEXT *>( this );
+ }
+
+ operator VkAccelerationStructureTrianglesOpacityMicromapEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkAccelerationStructureTrianglesOpacityMicromapEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::IndexType const &,
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ uint32_t const &,
+ uint32_t const &,
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const &,
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * const &,
+ VULKAN_HPP_NAMESPACE::MicromapEXT const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, indexType, indexBuffer, indexStride, baseTriangle, usageCountsCount, pUsageCounts, ppUsageCounts, micromap );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize indexStride = {};
+ uint32_t baseTriangle = {};
+ uint32_t usageCountsCount = {};
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts = {};
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts = {};
+ VULKAN_HPP_NAMESPACE::MicromapEXT micromap = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT>
+ {
+ using Type = AccelerationStructureTrianglesOpacityMicromapEXT;
+ };
struct AccelerationStructureVersionInfoKHR
{
using NativeType = VkAccelerationStructureVersionInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eAccelerationStructureVersionInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureVersionInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( const uint8_t * pVersionData_ = {} ) VULKAN_HPP_NOEXCEPT
- : pVersionData( pVersionData_ )
- {}
+ VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( const uint8_t * pVersionData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pVersionData( pVersionData_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( AccelerationStructureVersionInfoKHR const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AccelerationStructureVersionInfoKHR( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AccelerationStructureVersionInfoKHR( *reinterpret_cast<AccelerationStructureVersionInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AccelerationStructureVersionInfoKHR &
- operator=( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AccelerationStructureVersionInfoKHR & operator=( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AccelerationStructureVersionInfoKHR &
- operator=( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureVersionInfoKHR & operator=( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR const *>( &rhs );
return *this;
@@ -3797,25 +3750,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR &
- setPVersionData( const uint8_t * pVersionData_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & setPVersionData( const uint8_t * pVersionData_ ) VULKAN_HPP_NOEXCEPT
{
pVersionData = pVersionData_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAccelerationStructureVersionInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureVersionInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( this );
}
- explicit operator VkAccelerationStructureVersionInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkAccelerationStructureVersionInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAccelerationStructureVersionInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -3832,7 +3784,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVersionData == rhs.pVersionData );
@@ -3850,14 +3802,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
const uint8_t * pVersionData = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR ) ==
- sizeof( VkAccelerationStructureVersionInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR>::value,
- "AccelerationStructureVersionInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAccelerationStructureVersionInfoKHR>
@@ -3873,23 +3817,27 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireNextImageInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {},
- uint64_t timeout_ = {},
- VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
- VULKAN_HPP_NAMESPACE::Fence fence_ = {},
- uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT
- : swapchain( swapchain_ )
+ VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {},
+ uint64_t timeout_ = {},
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
+ VULKAN_HPP_NAMESPACE::Fence fence_ = {},
+ uint32_t deviceMask_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , swapchain( swapchain_ )
, timeout( timeout_ )
, semaphore( semaphore_ )
, fence( fence_ )
, deviceMask( deviceMask_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AcquireNextImageInfoKHR( *reinterpret_cast<AcquireNextImageInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
AcquireNextImageInfoKHR & operator=( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -3907,8 +3855,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR &
- setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
{
swapchain = swapchain_;
return *this;
@@ -3920,8 +3867,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR &
- setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
@@ -3940,17 +3886,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAcquireNextImageInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAcquireNextImageInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAcquireNextImageInfoKHR *>( this );
}
- explicit operator VkAcquireNextImageInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkAcquireNextImageInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAcquireNextImageInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -3973,12 +3919,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) &&
- ( timeout == rhs.timeout ) && ( semaphore == rhs.semaphore ) && ( fence == rhs.fence ) &&
- ( deviceMask == rhs.deviceMask );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && ( timeout == rhs.timeout ) &&
+ ( semaphore == rhs.semaphore ) && ( fence == rhs.fence ) && ( deviceMask == rhs.deviceMask );
# endif
}
@@ -3997,13 +3942,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Fence fence = {};
uint32_t deviceMask = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR ) ==
- sizeof( VkAcquireNextImageInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR>::value,
- "AcquireNextImageInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAcquireNextImageInfoKHR>
@@ -4019,18 +3957,21 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireProfilingLockInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {},
- uint64_t timeout_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {},
+ uint64_t timeout_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, timeout( timeout_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- AcquireProfilingLockInfoKHR( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AcquireProfilingLockInfoKHR( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AcquireProfilingLockInfoKHR( *reinterpret_cast<AcquireProfilingLockInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
AcquireProfilingLockInfoKHR & operator=( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -4048,8 +3989,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -4062,24 +4002,21 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAcquireProfilingLockInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAcquireProfilingLockInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( this );
}
- explicit operator VkAcquireProfilingLockInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkAcquireProfilingLockInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAcquireProfilingLockInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR const &,
- uint64_t const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR const &, uint64_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -4092,7 +4029,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( timeout == rhs.timeout );
@@ -4111,14 +4048,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags = {};
uint64_t timeout = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR ) ==
- sizeof( VkAcquireProfilingLockInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR>::value,
- "AcquireProfilingLockInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAcquireProfilingLockInfoKHR>
@@ -4136,20 +4065,21 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkReallocationFunction pfnReallocation_ = {},
PFN_vkFreeFunction pfnFree_ = {},
PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {},
- PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT
+ PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT
: pUserData( pUserData_ )
, pfnAllocation( pfnAllocation_ )
, pfnReallocation( pfnReallocation_ )
, pfnFree( pfnFree_ )
, pfnInternalAllocation( pfnInternalAllocation_ )
, pfnInternalFree( pfnInternalFree_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR AllocationCallbacks( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
- : AllocationCallbacks( *reinterpret_cast<AllocationCallbacks const *>( &rhs ) )
- {}
+ AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT : AllocationCallbacks( *reinterpret_cast<AllocationCallbacks const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
AllocationCallbacks & operator=( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -4167,15 +4097,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks &
- setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT
{
pfnAllocation = pfnAllocation_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks &
- setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT
{
pfnReallocation = pfnReallocation_;
return *this;
@@ -4187,32 +4115,30 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks &
- setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT
{
pfnInternalAllocation = pfnInternalAllocation_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks &
- setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT
{
pfnInternalFree = pfnInternalFree_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAllocationCallbacks const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAllocationCallbacks const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAllocationCallbacks *>( this );
}
- explicit operator VkAllocationCallbacks &() VULKAN_HPP_NOEXCEPT
+ operator VkAllocationCallbacks &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAllocationCallbacks *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -4229,25 +4155,20 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif
-#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( AllocationCallbacks const & ) const = default;
-#else
bool operator==( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
-# else
- return ( pUserData == rhs.pUserData ) && ( pfnAllocation == rhs.pfnAllocation ) &&
- ( pfnReallocation == rhs.pfnReallocation ) && ( pfnFree == rhs.pfnFree ) &&
- ( pfnInternalAllocation == rhs.pfnInternalAllocation ) && ( pfnInternalFree == rhs.pfnInternalFree );
-# endif
+#else
+ return ( pUserData == rhs.pUserData ) && ( pfnAllocation == rhs.pfnAllocation ) && ( pfnReallocation == rhs.pfnReallocation ) &&
+ ( pfnFree == rhs.pfnFree ) && ( pfnInternalAllocation == rhs.pfnInternalAllocation ) && ( pfnInternalFree == rhs.pfnInternalFree );
+#endif
}
bool operator!=( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
-#endif
public:
void * pUserData = {};
@@ -4257,35 +4178,132 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkInternalAllocationNotification pfnInternalAllocation = {};
PFN_vkInternalFreeNotification pfnInternalFree = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AllocationCallbacks ) == sizeof( VkAllocationCallbacks ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AllocationCallbacks>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AllocationCallbacks>::value,
- "AllocationCallbacks is not nothrow_move_constructible!" );
+
+ struct AmigoProfilingSubmitInfoSEC
+ {
+ using NativeType = VkAmigoProfilingSubmitInfoSEC;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAmigoProfilingSubmitInfoSEC;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR
+ AmigoProfilingSubmitInfoSEC( uint64_t firstDrawTimestamp_ = {}, uint64_t swapBufferTimestamp_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , firstDrawTimestamp( firstDrawTimestamp_ )
+ , swapBufferTimestamp( swapBufferTimestamp_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR AmigoProfilingSubmitInfoSEC( AmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ AmigoProfilingSubmitInfoSEC( VkAmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT
+ : AmigoProfilingSubmitInfoSEC( *reinterpret_cast<AmigoProfilingSubmitInfoSEC const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ AmigoProfilingSubmitInfoSEC & operator=( AmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ AmigoProfilingSubmitInfoSEC & operator=( VkAmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setFirstDrawTimestamp( uint64_t firstDrawTimestamp_ ) VULKAN_HPP_NOEXCEPT
+ {
+ firstDrawTimestamp = firstDrawTimestamp_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setSwapBufferTimestamp( uint64_t swapBufferTimestamp_ ) VULKAN_HPP_NOEXCEPT
+ {
+ swapBufferTimestamp = swapBufferTimestamp_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkAmigoProfilingSubmitInfoSEC const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkAmigoProfilingSubmitInfoSEC *>( this );
+ }
+
+ operator VkAmigoProfilingSubmitInfoSEC &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkAmigoProfilingSubmitInfoSEC *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &, uint64_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, firstDrawTimestamp, swapBufferTimestamp );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( AmigoProfilingSubmitInfoSEC const & ) const = default;
+#else
+ bool operator==( AmigoProfilingSubmitInfoSEC const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( firstDrawTimestamp == rhs.firstDrawTimestamp ) &&
+ ( swapBufferTimestamp == rhs.swapBufferTimestamp );
+# endif
+ }
+
+ bool operator!=( AmigoProfilingSubmitInfoSEC const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAmigoProfilingSubmitInfoSEC;
+ const void * pNext = {};
+ uint64_t firstDrawTimestamp = {};
+ uint64_t swapBufferTimestamp = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eAmigoProfilingSubmitInfoSEC>
+ {
+ using Type = AmigoProfilingSubmitInfoSEC;
+ };
struct ComponentMapping
{
using NativeType = VkComponentMapping;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ComponentMapping( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
- VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
- VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
- VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity )
- VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ComponentMapping( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
+ VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
+ VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
+ VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity ) VULKAN_HPP_NOEXCEPT
: r( r_ )
, g( g_ )
, b( b_ )
, a( a_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
- : ComponentMapping( *reinterpret_cast<ComponentMapping const *>( &rhs ) )
- {}
+ ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT : ComponentMapping( *reinterpret_cast<ComponentMapping const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ComponentMapping & operator=( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -4322,17 +4340,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkComponentMapping const &() const VULKAN_HPP_NOEXCEPT
+ operator VkComponentMapping const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkComponentMapping *>( this );
}
- explicit operator VkComponentMapping &() VULKAN_HPP_NOEXCEPT
+ operator VkComponentMapping &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkComponentMapping *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -4352,7 +4370,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( r == rhs.r ) && ( g == rhs.g ) && ( b == rhs.b ) && ( a == rhs.a );
@@ -4371,12 +4389,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ComponentSwizzle b = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
VULKAN_HPP_NAMESPACE::ComponentSwizzle a = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComponentMapping ) == sizeof( VkComponentMapping ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ComponentMapping>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ComponentMapping>::value,
- "ComponentMapping is not nothrow_move_constructible!" );
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
struct AndroidHardwareBufferFormatProperties2ANDROID
@@ -4384,22 +4396,21 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAndroidHardwareBufferFormatProperties2ANDROID;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eAndroidHardwareBufferFormatProperties2ANDROID;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatProperties2ANDROID(
- VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- uint64_t externalFormat_ = {},
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 formatFeatures_ = {},
+ VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ uint64_t externalFormat_ = {},
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 formatFeatures_ = {},
VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {},
- VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ =
- VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity,
- VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull,
- VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
- VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ =
- VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven ) VULKAN_HPP_NOEXCEPT
- : format( format_ )
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity,
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull,
+ VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
+ VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , format( format_ )
, externalFormat( externalFormat_ )
, formatFeatures( formatFeatures_ )
, samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ )
@@ -4407,39 +4418,37 @@ namespace VULKAN_HPP_NAMESPACE
, suggestedYcbcrRange( suggestedYcbcrRange_ )
, suggestedXChromaOffset( suggestedXChromaOffset_ )
, suggestedYChromaOffset( suggestedYChromaOffset_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatProperties2ANDROID(
- AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ AndroidHardwareBufferFormatProperties2ANDROID( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AndroidHardwareBufferFormatProperties2ANDROID( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs )
- VULKAN_HPP_NOEXCEPT
- : AndroidHardwareBufferFormatProperties2ANDROID(
- *reinterpret_cast<AndroidHardwareBufferFormatProperties2ANDROID const *>( &rhs ) )
- {}
+ AndroidHardwareBufferFormatProperties2ANDROID( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ : AndroidHardwareBufferFormatProperties2ANDROID( *reinterpret_cast<AndroidHardwareBufferFormatProperties2ANDROID const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AndroidHardwareBufferFormatProperties2ANDROID &
- operator=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AndroidHardwareBufferFormatProperties2ANDROID & operator=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AndroidHardwareBufferFormatProperties2ANDROID &
- operator=( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ AndroidHardwareBufferFormatProperties2ANDROID & operator=( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID const *>( &rhs );
return *this;
}
- explicit operator VkAndroidHardwareBufferFormatProperties2ANDROID const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAndroidHardwareBufferFormatProperties2ANDROID const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAndroidHardwareBufferFormatProperties2ANDROID *>( this );
}
- explicit operator VkAndroidHardwareBufferFormatProperties2ANDROID &() VULKAN_HPP_NOEXCEPT
+ operator VkAndroidHardwareBufferFormatProperties2ANDROID &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAndroidHardwareBufferFormatProperties2ANDROID *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -4474,15 +4483,13 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) &&
- ( externalFormat == rhs.externalFormat ) && ( formatFeatures == rhs.formatFeatures ) &&
- ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( externalFormat == rhs.externalFormat ) &&
+ ( formatFeatures == rhs.formatFeatures ) && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) &&
( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) &&
- ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) &&
- ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
+ ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
# endif
}
@@ -4493,27 +4500,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
- uint64_t externalFormat = {};
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 formatFeatures = {};
- VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
- VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel =
- VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
- VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
- VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
- VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ uint64_t externalFormat = {};
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 formatFeatures = {};
+ VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
+ VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+ VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID ) ==
- sizeof( VkAndroidHardwareBufferFormatProperties2ANDROID ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID>::value,
- "AndroidHardwareBufferFormatProperties2ANDROID is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAndroidHardwareBufferFormatProperties2ANDROID>
@@ -4528,22 +4525,21 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAndroidHardwareBufferFormatPropertiesANDROID;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID(
- VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- uint64_t externalFormat_ = {},
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {},
+ VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ uint64_t externalFormat_ = {},
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {},
VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {},
- VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ =
- VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity,
- VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull,
- VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
- VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ =
- VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven ) VULKAN_HPP_NOEXCEPT
- : format( format_ )
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity,
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull,
+ VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
+ VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , format( format_ )
, externalFormat( externalFormat_ )
, formatFeatures( formatFeatures_ )
, samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ )
@@ -4551,39 +4547,36 @@ namespace VULKAN_HPP_NAMESPACE
, suggestedYcbcrRange( suggestedYcbcrRange_ )
, suggestedXChromaOffset( suggestedXChromaOffset_ )
, suggestedYChromaOffset( suggestedYChromaOffset_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID(
- AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs )
- VULKAN_HPP_NOEXCEPT
- : AndroidHardwareBufferFormatPropertiesANDROID(
- *reinterpret_cast<AndroidHardwareBufferFormatPropertiesANDROID const *>( &rhs ) )
- {}
+ AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ : AndroidHardwareBufferFormatPropertiesANDROID( *reinterpret_cast<AndroidHardwareBufferFormatPropertiesANDROID const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AndroidHardwareBufferFormatPropertiesANDROID &
- operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AndroidHardwareBufferFormatPropertiesANDROID & operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AndroidHardwareBufferFormatPropertiesANDROID &
- operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ AndroidHardwareBufferFormatPropertiesANDROID & operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const *>( &rhs );
return *this;
}
- explicit operator VkAndroidHardwareBufferFormatPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAndroidHardwareBufferFormatPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAndroidHardwareBufferFormatPropertiesANDROID *>( this );
}
- explicit operator VkAndroidHardwareBufferFormatPropertiesANDROID &() VULKAN_HPP_NOEXCEPT
+ operator VkAndroidHardwareBufferFormatPropertiesANDROID &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAndroidHardwareBufferFormatPropertiesANDROID *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -4618,15 +4611,13 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) &&
- ( externalFormat == rhs.externalFormat ) && ( formatFeatures == rhs.formatFeatures ) &&
- ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( externalFormat == rhs.externalFormat ) &&
+ ( formatFeatures == rhs.formatFeatures ) && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) &&
( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) &&
- ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) &&
- ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
+ ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
# endif
}
@@ -4637,27 +4628,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
- uint64_t externalFormat = {};
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {};
- VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
- VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel =
- VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
- VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
- VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
- VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ uint64_t externalFormat = {};
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {};
+ VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
+ VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+ VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID ) ==
- sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID>::value,
- "AndroidHardwareBufferFormatPropertiesANDROID is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAndroidHardwareBufferFormatPropertiesANDROID>
@@ -4672,53 +4653,49 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAndroidHardwareBufferPropertiesANDROID;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eAndroidHardwareBufferPropertiesANDROID;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferPropertiesANDROID;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {},
- uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
- : allocationSize( allocationSize_ )
+ uint32_t memoryTypeBits_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , allocationSize( allocationSize_ )
, memoryTypeBits( memoryTypeBits_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( AndroidHardwareBufferPropertiesANDROID const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- : AndroidHardwareBufferPropertiesANDROID(
- *reinterpret_cast<AndroidHardwareBufferPropertiesANDROID const *>( &rhs ) )
- {}
+ : AndroidHardwareBufferPropertiesANDROID( *reinterpret_cast<AndroidHardwareBufferPropertiesANDROID const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AndroidHardwareBufferPropertiesANDROID &
- operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AndroidHardwareBufferPropertiesANDROID & operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AndroidHardwareBufferPropertiesANDROID &
- operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ AndroidHardwareBufferPropertiesANDROID & operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const *>( &rhs );
return *this;
}
- explicit operator VkAndroidHardwareBufferPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAndroidHardwareBufferPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAndroidHardwareBufferPropertiesANDROID *>( this );
}
- explicit operator VkAndroidHardwareBufferPropertiesANDROID &() VULKAN_HPP_NOEXCEPT
+ operator VkAndroidHardwareBufferPropertiesANDROID &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::DeviceSize const &,
- uint32_t const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -4731,11 +4708,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) &&
- ( memoryTypeBits == rhs.memoryTypeBits );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeBits == rhs.memoryTypeBits );
# endif
}
@@ -4751,15 +4727,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
uint32_t memoryTypeBits = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID ) ==
- sizeof( VkAndroidHardwareBufferPropertiesANDROID ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::value,
- "AndroidHardwareBufferPropertiesANDROID is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAndroidHardwareBufferPropertiesANDROID>
@@ -4774,25 +4741,24 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAndroidHardwareBufferUsageANDROID;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eAndroidHardwareBufferUsageANDROID;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferUsageANDROID;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = {} ) VULKAN_HPP_NOEXCEPT
- : androidHardwareBufferUsage( androidHardwareBufferUsage_ )
- {}
+ VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , androidHardwareBufferUsage( androidHardwareBufferUsage_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- AndroidHardwareBufferUsageANDROID( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
: AndroidHardwareBufferUsageANDROID( *reinterpret_cast<AndroidHardwareBufferUsageANDROID const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AndroidHardwareBufferUsageANDROID &
- operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AndroidHardwareBufferUsageANDROID & operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AndroidHardwareBufferUsageANDROID & operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -4800,17 +4766,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkAndroidHardwareBufferUsageANDROID const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAndroidHardwareBufferUsageANDROID const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAndroidHardwareBufferUsageANDROID *>( this );
}
- explicit operator VkAndroidHardwareBufferUsageANDROID &() VULKAN_HPP_NOEXCEPT
+ operator VkAndroidHardwareBufferUsageANDROID &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAndroidHardwareBufferUsageANDROID *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -4827,11 +4793,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage );
# endif
}
@@ -4846,14 +4811,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
uint64_t androidHardwareBufferUsage = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID ) ==
- sizeof( VkAndroidHardwareBufferUsageANDROID ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID>::value,
- "AndroidHardwareBufferUsageANDROID is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAndroidHardwareBufferUsageANDROID>
@@ -4871,18 +4828,21 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidSurfaceCreateInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {},
- struct ANativeWindow * window_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {},
+ struct ANativeWindow * window_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, window( window_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: AndroidSurfaceCreateInfoKHR( *reinterpret_cast<AndroidSurfaceCreateInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
AndroidSurfaceCreateInfoKHR & operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -4900,32 +4860,30 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR &
- setWindow( struct ANativeWindow * window_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setWindow( struct ANativeWindow * window_ ) VULKAN_HPP_NOEXCEPT
{
window = window_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAndroidSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAndroidSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( this );
}
- explicit operator VkAndroidSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkAndroidSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAndroidSurfaceCreateInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -4945,7 +4903,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window );
@@ -4964,14 +4922,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags = {};
struct ANativeWindow * window = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR ) ==
- sizeof( VkAndroidSurfaceCreateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR>::value,
- "AndroidSurfaceCreateInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAndroidSurfaceCreateInfoKHR>
@@ -4992,19 +4942,20 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t applicationVersion_ = {},
const char * pEngineName_ = {},
uint32_t engineVersion_ = {},
- uint32_t apiVersion_ = {} ) VULKAN_HPP_NOEXCEPT
- : pApplicationName( pApplicationName_ )
+ uint32_t apiVersion_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pApplicationName( pApplicationName_ )
, applicationVersion( applicationVersion_ )
, pEngineName( pEngineName_ )
, engineVersion( engineVersion_ )
, apiVersion( apiVersion_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ApplicationInfo( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : ApplicationInfo( *reinterpret_cast<ApplicationInfo const *>( &rhs ) )
- {}
+ ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ApplicationInfo( *reinterpret_cast<ApplicationInfo const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -5053,17 +5004,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkApplicationInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkApplicationInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkApplicationInfo *>( this );
}
- explicit operator VkApplicationInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkApplicationInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkApplicationInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -5108,10 +5059,8 @@ namespace VULKAN_HPP_NAMESPACE
bool operator==( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( ( pApplicationName == rhs.pApplicationName ) ||
- ( strcmp( pApplicationName, rhs.pApplicationName ) == 0 ) ) &&
- ( applicationVersion == rhs.applicationVersion ) &&
- ( ( pEngineName == rhs.pEngineName ) || ( strcmp( pEngineName, rhs.pEngineName ) == 0 ) ) &&
+ ( ( pApplicationName == rhs.pApplicationName ) || ( strcmp( pApplicationName, rhs.pApplicationName ) == 0 ) ) &&
+ ( applicationVersion == rhs.applicationVersion ) && ( ( pEngineName == rhs.pEngineName ) || ( strcmp( pEngineName, rhs.pEngineName ) == 0 ) ) &&
( engineVersion == rhs.engineVersion ) && ( apiVersion == rhs.apiVersion );
}
@@ -5129,12 +5078,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t engineVersion = {};
uint32_t apiVersion = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ApplicationInfo ) == sizeof( VkApplicationInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ApplicationInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ApplicationInfo>::value,
- "ApplicationInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eApplicationInfo>
@@ -5147,17 +5090,16 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAttachmentDescription;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR AttachmentDescription(
- VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
- VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
- VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
- VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
- VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
- VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined )
- VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR
+ AttachmentDescription( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
+ VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
+ VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
+ VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
+ VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
: flags( flags_ )
, format( format_ )
, samples( samples_ )
@@ -5167,13 +5109,15 @@ namespace VULKAN_HPP_NAMESPACE
, stencilStoreOp( stencilStoreOp_ )
, initialLayout( initialLayout_ )
, finalLayout( finalLayout_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR AttachmentDescription( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
: AttachmentDescription( *reinterpret_cast<AttachmentDescription const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
AttachmentDescription & operator=( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -5185,81 +5129,72 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 AttachmentDescription &
- setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentDescription &
- setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentDescription &
- setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
{
samples = samples_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentDescription &
- setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
{
loadOp = loadOp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentDescription &
- setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
{
storeOp = storeOp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentDescription &
- setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
{
stencilLoadOp = stencilLoadOp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentDescription &
- setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
{
stencilStoreOp = stencilStoreOp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentDescription &
- setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
{
initialLayout = initialLayout_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentDescription &
- setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
{
finalLayout = finalLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAttachmentDescription const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentDescription const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentDescription *>( this );
}
- explicit operator VkAttachmentDescription &() VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentDescription &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAttachmentDescription *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -5275,8 +5210,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout );
+ return std::tie( flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout );
}
#endif
@@ -5285,12 +5219,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( flags == rhs.flags ) && ( format == rhs.format ) && ( samples == rhs.samples ) &&
- ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) && ( stencilLoadOp == rhs.stencilLoadOp ) &&
- ( stencilStoreOp == rhs.stencilStoreOp ) && ( initialLayout == rhs.initialLayout ) &&
+ return ( flags == rhs.flags ) && ( format == rhs.format ) && ( samples == rhs.samples ) && ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) &&
+ ( stencilLoadOp == rhs.stencilLoadOp ) && ( stencilStoreOp == rhs.stencilStoreOp ) && ( initialLayout == rhs.initialLayout ) &&
( finalLayout == rhs.finalLayout );
# endif
}
@@ -5312,12 +5245,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescription ) == sizeof( VkAttachmentDescription ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentDescription>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentDescription>::value,
- "AttachmentDescription is not nothrow_move_constructible!" );
struct AttachmentDescription2
{
@@ -5327,18 +5254,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescription2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR AttachmentDescription2(
- VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
- VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
- VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
- VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
- VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
- VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined )
- VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR AttachmentDescription2( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
+ VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
+ VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
+ VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
+ VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, format( format_ )
, samples( samples_ )
, loadOp( loadOp_ )
@@ -5347,13 +5274,15 @@ namespace VULKAN_HPP_NAMESPACE
, stencilStoreOp( stencilStoreOp_ )
, initialLayout( initialLayout_ )
, finalLayout( finalLayout_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR AttachmentDescription2( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentDescription2( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
: AttachmentDescription2( *reinterpret_cast<AttachmentDescription2 const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
AttachmentDescription2 & operator=( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -5371,81 +5300,72 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 &
- setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 &
- setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 &
- setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
{
samples = samples_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 &
- setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
{
loadOp = loadOp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 &
- setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
{
storeOp = storeOp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 &
- setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
{
stencilLoadOp = stencilLoadOp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 &
- setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
{
stencilStoreOp = stencilStoreOp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 &
- setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
{
initialLayout = initialLayout_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 &
- setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
{
finalLayout = finalLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAttachmentDescription2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentDescription2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentDescription2 *>( this );
}
- explicit operator VkAttachmentDescription2 &() VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentDescription2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAttachmentDescription2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -5463,17 +5383,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- flags,
- format,
- samples,
- loadOp,
- storeOp,
- stencilLoadOp,
- stencilStoreOp,
- initialLayout,
- finalLayout );
+ return std::tie( sType, pNext, flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout );
}
#endif
@@ -5482,12 +5392,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( format == rhs.format ) &&
- ( samples == rhs.samples ) && ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) &&
- ( stencilLoadOp == rhs.stencilLoadOp ) && ( stencilStoreOp == rhs.stencilStoreOp ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( format == rhs.format ) && ( samples == rhs.samples ) &&
+ ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) && ( stencilLoadOp == rhs.stencilLoadOp ) && ( stencilStoreOp == rhs.stencilStoreOp ) &&
( initialLayout == rhs.initialLayout ) && ( finalLayout == rhs.finalLayout );
# endif
}
@@ -5511,13 +5420,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescription2 ) ==
- sizeof( VkAttachmentDescription2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentDescription2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentDescription2>::value,
- "AttachmentDescription2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAttachmentDescription2>
@@ -5531,31 +5433,30 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAttachmentDescriptionStencilLayout;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eAttachmentDescriptionStencilLayout;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescriptionStencilLayout;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout(
- VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined )
- VULKAN_HPP_NOEXCEPT
- : stencilInitialLayout( stencilInitialLayout_ )
+ VULKAN_HPP_CONSTEXPR
+ AttachmentDescriptionStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , stencilInitialLayout( stencilInitialLayout_ )
, stencilFinalLayout( stencilFinalLayout_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( AttachmentDescriptionStencilLayout const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentDescriptionStencilLayout( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
: AttachmentDescriptionStencilLayout( *reinterpret_cast<AttachmentDescriptionStencilLayout const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AttachmentDescriptionStencilLayout &
- operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AttachmentDescriptionStencilLayout & operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AttachmentDescriptionStencilLayout &
- operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescriptionStencilLayout & operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const *>( &rhs );
return *this;
@@ -5569,38 +5470,36 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout &
- setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT
+ setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT
{
stencilInitialLayout = stencilInitialLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout &
- setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT
+ setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT
{
stencilFinalLayout = stencilFinalLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAttachmentDescriptionStencilLayout const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentDescriptionStencilLayout const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentDescriptionStencilLayout *>( this );
}
- explicit operator VkAttachmentDescriptionStencilLayout &() VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentDescriptionStencilLayout &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAttachmentDescriptionStencilLayout *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::ImageLayout const &,
- VULKAN_HPP_NAMESPACE::ImageLayout const &>
+ std::
+ tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -5613,7 +5512,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilInitialLayout == rhs.stencilInitialLayout ) &&
@@ -5633,14 +5532,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout ) ==
- sizeof( VkAttachmentDescriptionStencilLayout ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout>::value,
- "AttachmentDescriptionStencilLayout is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAttachmentDescriptionStencilLayout>
@@ -5654,18 +5545,18 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAttachmentReference;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR AttachmentReference(
- uint32_t attachment_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR AttachmentReference( uint32_t attachment_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
: attachment( attachment_ )
, layout( layout_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR AttachmentReference( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
- : AttachmentReference( *reinterpret_cast<AttachmentReference const *>( &rhs ) )
- {}
+ AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT : AttachmentReference( *reinterpret_cast<AttachmentReference const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
AttachmentReference & operator=( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -5683,25 +5574,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentReference &
- setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentReference & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAttachmentReference const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentReference const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentReference *>( this );
}
- explicit operator VkAttachmentReference &() VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentReference &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAttachmentReference *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -5718,7 +5608,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( attachment == rhs.attachment ) && ( layout == rhs.layout );
@@ -5735,12 +5625,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t attachment = {};
VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReference ) == sizeof( VkAttachmentReference ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentReference>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentReference>::value,
- "AttachmentReference is not nothrow_move_constructible!" );
struct AttachmentReference2
{
@@ -5750,20 +5634,23 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReference2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- AttachmentReference2( uint32_t attachment_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT
- : attachment( attachment_ )
+ VULKAN_HPP_CONSTEXPR AttachmentReference2( uint32_t attachment_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , attachment( attachment_ )
, layout( layout_ )
, aspectMask( aspectMask_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR AttachmentReference2( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentReference2( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
: AttachmentReference2( *reinterpret_cast<AttachmentReference2 const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
AttachmentReference2 & operator=( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -5787,32 +5674,30 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 &
- setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 &
- setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
aspectMask = aspectMask_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAttachmentReference2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentReference2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentReference2 *>( this );
}
- explicit operator VkAttachmentReference2 &() VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentReference2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAttachmentReference2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -5833,11 +5718,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachment == rhs.attachment ) &&
- ( layout == rhs.layout ) && ( aspectMask == rhs.aspectMask );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachment == rhs.attachment ) && ( layout == rhs.layout ) &&
+ ( aspectMask == rhs.aspectMask );
# endif
}
@@ -5854,12 +5739,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReference2 ) == sizeof( VkAttachmentReference2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentReference2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentReference2>::value,
- "AttachmentReference2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAttachmentReference2>
@@ -5873,25 +5752,25 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAttachmentReferenceStencilLayout;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReferenceStencilLayout;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReferenceStencilLayout;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- AttachmentReferenceStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ =
- VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
- : stencilLayout( stencilLayout_ )
- {}
+ VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , stencilLayout( stencilLayout_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- AttachmentReferenceStencilLayout( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentReferenceStencilLayout( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
: AttachmentReferenceStencilLayout( *reinterpret_cast<AttachmentReferenceStencilLayout const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- AttachmentReferenceStencilLayout &
- operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ AttachmentReferenceStencilLayout & operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentReferenceStencilLayout & operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -5906,25 +5785,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout &
- setStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & setStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT
{
stencilLayout = stencilLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAttachmentReferenceStencilLayout const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentReferenceStencilLayout const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentReferenceStencilLayout *>( this );
}
- explicit operator VkAttachmentReferenceStencilLayout &() VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentReferenceStencilLayout &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAttachmentReferenceStencilLayout *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -5941,7 +5819,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilLayout == rhs.stencilLayout );
@@ -5959,14 +5837,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout ) ==
- sizeof( VkAttachmentReferenceStencilLayout ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout>::value,
- "AttachmentReferenceStencilLayout is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAttachmentReferenceStencilLayout>
@@ -5986,29 +5856,33 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR
AttachmentSampleCountInfoAMD( uint32_t colorAttachmentCount_ = {},
const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ =
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1 ) VULKAN_HPP_NOEXCEPT
- : colorAttachmentCount( colorAttachmentCount_ )
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , colorAttachmentCount( colorAttachmentCount_ )
, pColorAttachmentSamples( pColorAttachmentSamples_ )
, depthStencilAttachmentSamples( depthStencilAttachmentSamples_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- AttachmentSampleCountInfoAMD( AttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR AttachmentSampleCountInfoAMD( AttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentSampleCountInfoAMD( VkAttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
: AttachmentSampleCountInfoAMD( *reinterpret_cast<AttachmentSampleCountInfoAMD const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- AttachmentSampleCountInfoAMD( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::SampleCountFlagBits> const & colorAttachmentSamples_,
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ =
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1 )
- : colorAttachmentCount( static_cast<uint32_t>( colorAttachmentSamples_.size() ) )
+ AttachmentSampleCountInfoAMD(
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleCountFlagBits> const & colorAttachmentSamples_,
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , colorAttachmentCount( static_cast<uint32_t>( colorAttachmentSamples_.size() ) )
, pColorAttachmentSamples( colorAttachmentSamples_.data() )
, depthStencilAttachmentSamples( depthStencilAttachmentSamples_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -6027,15 +5901,14 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD &
- setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = colorAttachmentCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setPColorAttachmentSamples(
- const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD &
+ setPColorAttachmentSamples( const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT
{
pColorAttachmentSamples = pColorAttachmentSamples_;
return *this;
@@ -6043,8 +5916,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
AttachmentSampleCountInfoAMD & setColorAttachmentSamples(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleCountFlagBits> const &
- colorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleCountFlagBits> const & colorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = static_cast<uint32_t>( colorAttachmentSamples_.size() );
pColorAttachmentSamples = colorAttachmentSamples_.data();
@@ -6052,25 +5924,25 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setDepthStencilAttachmentSamples(
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD &
+ setDepthStencilAttachmentSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT
{
depthStencilAttachmentSamples = depthStencilAttachmentSamples_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAttachmentSampleCountInfoAMD const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentSampleCountInfoAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentSampleCountInfoAMD *>( this );
}
- explicit operator VkAttachmentSampleCountInfoAMD &() VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentSampleCountInfoAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAttachmentSampleCountInfoAMD *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -6091,12 +5963,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AttachmentSampleCountInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) &&
- ( pColorAttachmentSamples == rhs.pColorAttachmentSamples ) &&
- ( depthStencilAttachmentSamples == rhs.depthStencilAttachmentSamples );
+ ( pColorAttachmentSamples == rhs.pColorAttachmentSamples ) && ( depthStencilAttachmentSamples == rhs.depthStencilAttachmentSamples );
# endif
}
@@ -6107,21 +5978,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentSampleCountInfoAMD;
- const void * pNext = {};
- uint32_t colorAttachmentCount = {};
- const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples = {};
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples =
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentSampleCountInfoAMD;
+ const void * pNext = {};
+ uint32_t colorAttachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD ) ==
- sizeof( VkAttachmentSampleCountInfoAMD ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD>::value,
- "AttachmentSampleCountInfoAMD is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eAttachmentSampleCountInfoAMD>
@@ -6138,7 +6000,8 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = {}, uint32_t height_ = {} ) VULKAN_HPP_NOEXCEPT
: width( width_ )
, height( height_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR Extent2D( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -6167,17 +6030,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkExtent2D const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExtent2D const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExtent2D *>( this );
}
- explicit operator VkExtent2D &() VULKAN_HPP_NOEXCEPT
+ operator VkExtent2D &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExtent2D *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -6194,7 +6057,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( width == rhs.width ) && ( height == rhs.height );
@@ -6211,12 +6074,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t width = {};
uint32_t height = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Extent2D ) == sizeof( VkExtent2D ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Extent2D>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Extent2D>::value,
- "Extent2D is not nothrow_move_constructible!" );
struct SampleLocationEXT
{
@@ -6226,13 +6083,12 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT
: x( x_ )
, y( y_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SampleLocationEXT( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : SampleLocationEXT( *reinterpret_cast<SampleLocationEXT const *>( &rhs ) )
- {}
+ SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT : SampleLocationEXT( *reinterpret_cast<SampleLocationEXT const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SampleLocationEXT & operator=( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -6257,17 +6113,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSampleLocationEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSampleLocationEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSampleLocationEXT *>( this );
}
- explicit operator VkSampleLocationEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkSampleLocationEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSampleLocationEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -6284,7 +6140,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( x == rhs.x ) && ( y == rhs.y );
@@ -6301,12 +6157,6 @@ namespace VULKAN_HPP_NAMESPACE
float x = {};
float y = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationEXT ) == sizeof( VkSampleLocationEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SampleLocationEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SampleLocationEXT>::value,
- "SampleLocationEXT is not nothrow_move_constructible!" );
struct SampleLocationsInfoEXT
{
@@ -6316,35 +6166,39 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSampleLocationsInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT(
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ =
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
- VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {},
- uint32_t sampleLocationsCount_ = {},
- const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT
- : sampleLocationsPerPixel( sampleLocationsPerPixel_ )
+ VULKAN_HPP_CONSTEXPR
+ SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {},
+ uint32_t sampleLocationsCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , sampleLocationsPerPixel( sampleLocationsPerPixel_ )
, sampleLocationGridSize( sampleLocationGridSize_ )
, sampleLocationsCount( sampleLocationsCount_ )
, pSampleLocations( pSampleLocations_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SampleLocationsInfoEXT( *reinterpret_cast<SampleLocationsInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SampleLocationsInfoEXT(
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_,
- VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleLocationEXT> const &
- sampleLocations_ )
- : sampleLocationsPerPixel( sampleLocationsPerPixel_ )
+ SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_,
+ VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleLocationEXT> const & sampleLocations_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , sampleLocationsPerPixel( sampleLocationsPerPixel_ )
, sampleLocationGridSize( sampleLocationGridSize_ )
, sampleLocationsCount( static_cast<uint32_t>( sampleLocations_.size() ) )
, pSampleLocations( sampleLocations_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -6363,29 +6217,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationsPerPixel(
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT &
+ setSampleLocationsPerPixel( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsPerPixel = sampleLocationsPerPixel_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT &
- setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT
+ setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationGridSize = sampleLocationGridSize_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT &
- setSampleLocationsCount( uint32_t sampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationsCount( uint32_t sampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsCount = sampleLocationsCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT &
- setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+ setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
{
pSampleLocations = pSampleLocations_;
return *this;
@@ -6393,8 +6246,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SampleLocationsInfoEXT & setSampleLocations(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleLocationEXT> const &
- sampleLocations_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleLocationEXT> const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsCount = static_cast<uint32_t>( sampleLocations_.size() );
pSampleLocations = sampleLocations_.data();
@@ -6403,17 +6255,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSampleLocationsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSampleLocationsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSampleLocationsInfoEXT *>( this );
}
- explicit operator VkSampleLocationsInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkSampleLocationsInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSampleLocationsInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -6426,8 +6278,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, sampleLocationsPerPixel, sampleLocationGridSize, sampleLocationsCount, pSampleLocations );
+ return std::tie( sType, pNext, sampleLocationsPerPixel, sampleLocationGridSize, sampleLocationsCount, pSampleLocations );
}
#endif
@@ -6436,13 +6287,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel ) &&
- ( sampleLocationGridSize == rhs.sampleLocationGridSize ) &&
- ( sampleLocationsCount == rhs.sampleLocationsCount ) && ( pSampleLocations == rhs.pSampleLocations );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel ) &&
+ ( sampleLocationGridSize == rhs.sampleLocationGridSize ) && ( sampleLocationsCount == rhs.sampleLocationsCount ) &&
+ ( pSampleLocations == rhs.pSampleLocations );
# endif
}
@@ -6453,20 +6303,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
- VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {};
- uint32_t sampleLocationsCount = {};
- const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+ VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {};
+ uint32_t sampleLocationsCount = {};
+ const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT ) ==
- sizeof( VkSampleLocationsInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT>::value,
- "SampleLocationsInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSampleLocationsInfoEXT>
@@ -6479,19 +6322,19 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkAttachmentSampleLocationsEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT(
- uint32_t attachmentIndex_ = {},
- VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = {},
+ VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT
: attachmentIndex( attachmentIndex_ )
, sampleLocationsInfo( sampleLocationsInfo_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- AttachmentSampleLocationsEXT( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: AttachmentSampleLocationsEXT( *reinterpret_cast<AttachmentSampleLocationsEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
AttachmentSampleLocationsEXT & operator=( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -6503,32 +6346,31 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT &
- setAttachmentIndex( uint32_t attachmentIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & setAttachmentIndex( uint32_t attachmentIndex_ ) VULKAN_HPP_NOEXCEPT
{
attachmentIndex = attachmentIndex_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & setSampleLocationsInfo(
- VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT &
+ setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsInfo = sampleLocationsInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkAttachmentSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkAttachmentSampleLocationsEXT *>( this );
}
- explicit operator VkAttachmentSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkAttachmentSampleLocationsEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -6545,7 +6387,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( attachmentIndex == rhs.attachmentIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
@@ -6562,29 +6404,22 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t attachmentIndex = {};
VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT ) ==
- sizeof( VkAttachmentSampleLocationsEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT>::value,
- "AttachmentSampleLocationsEXT is not nothrow_move_constructible!" );
struct BaseInStructure
{
using NativeType = VkBaseInStructure;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- BaseInStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ =
- VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo ) VULKAN_HPP_NOEXCEPT : sType( sType_ )
- {}
+ BaseInStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo,
+ const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : sType( sType_ )
+ , pNext( pNext_ )
+ {
+ }
BaseInStructure( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
- : BaseInStructure( *reinterpret_cast<BaseInStructure const *>( &rhs ) )
- {}
+ BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT : BaseInStructure( *reinterpret_cast<BaseInStructure const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
BaseInStructure & operator=( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -6596,30 +6431,28 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 BaseInStructure &
- setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BaseInStructure & setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBaseInStructure const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBaseInStructure const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBaseInStructure *>( this );
}
- explicit operator VkBaseInStructure &() VULKAN_HPP_NOEXCEPT
+ operator VkBaseInStructure &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBaseInStructure *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const struct VULKAN_HPP_NAMESPACE::BaseInStructure * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const struct VULKAN_HPP_NAMESPACE::BaseInStructure * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -6632,7 +6465,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
@@ -6649,27 +6482,22 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo;
const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BaseInStructure ) == sizeof( VkBaseInStructure ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BaseInStructure>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BaseInStructure>::value,
- "BaseInStructure is not nothrow_move_constructible!" );
struct BaseOutStructure
{
using NativeType = VkBaseOutStructure;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- BaseOutStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ =
- VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo ) VULKAN_HPP_NOEXCEPT : sType( sType_ )
- {}
+ BaseOutStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo,
+ struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : sType( sType_ )
+ , pNext( pNext_ )
+ {
+ }
BaseOutStructure( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
- : BaseOutStructure( *reinterpret_cast<BaseOutStructure const *>( &rhs ) )
- {}
+ BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT : BaseOutStructure( *reinterpret_cast<BaseOutStructure const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
BaseOutStructure & operator=( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -6681,25 +6509,24 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 BaseOutStructure &
- setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BaseOutStructure & setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBaseOutStructure const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBaseOutStructure const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBaseOutStructure *>( this );
}
- explicit operator VkBaseOutStructure &() VULKAN_HPP_NOEXCEPT
+ operator VkBaseOutStructure &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBaseOutStructure *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -6716,7 +6543,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
@@ -6733,63 +6560,57 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo;
struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BaseOutStructure ) == sizeof( VkBaseOutStructure ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BaseOutStructure>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BaseOutStructure>::value,
- "BaseOutStructure is not nothrow_move_constructible!" );
struct BindAccelerationStructureMemoryInfoNV
{
using NativeType = VkBindAccelerationStructureMemoryInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eBindAccelerationStructureMemoryInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindAccelerationStructureMemoryInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {},
- VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
- uint32_t deviceIndexCount_ = {},
- const uint32_t * pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT
- : accelerationStructure( accelerationStructure_ )
+ VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
+ uint32_t deviceIndexCount_ = {},
+ const uint32_t * pDeviceIndices_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , accelerationStructure( accelerationStructure_ )
, memory( memory_ )
, memoryOffset( memoryOffset_ )
, deviceIndexCount( deviceIndexCount_ )
, pDeviceIndices( pDeviceIndices_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( BindAccelerationStructureMemoryInfoNV const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : BindAccelerationStructureMemoryInfoNV(
- *reinterpret_cast<BindAccelerationStructureMemoryInfoNV const *>( &rhs ) )
- {}
+ : BindAccelerationStructureMemoryInfoNV( *reinterpret_cast<BindAccelerationStructureMemoryInfoNV const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- BindAccelerationStructureMemoryInfoNV(
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_,
- VULKAN_HPP_NAMESPACE::DeviceMemory memory_,
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ )
- : accelerationStructure( accelerationStructure_ )
+ BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_,
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory_,
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , accelerationStructure( accelerationStructure_ )
, memory( memory_ )
, memoryOffset( memoryOffset_ )
, deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) )
, pDeviceIndices( deviceIndices_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- BindAccelerationStructureMemoryInfoNV &
- operator=( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ BindAccelerationStructureMemoryInfoNV & operator=( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- BindAccelerationStructureMemoryInfoNV &
- operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ BindAccelerationStructureMemoryInfoNV & operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const *>( &rhs );
return *this;
@@ -6802,44 +6623,40 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setAccelerationStructure(
- VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV &
+ setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructure = accelerationStructure_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV &
- setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV &
- setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
{
memoryOffset = memoryOffset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV &
- setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
deviceIndexCount = deviceIndexCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV &
- setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
pDeviceIndices = pDeviceIndices_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- BindAccelerationStructureMemoryInfoNV & setDeviceIndices(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
+ BindAccelerationStructureMemoryInfoNV &
+ setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
pDeviceIndices = deviceIndices_.data();
@@ -6848,17 +6665,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBindAccelerationStructureMemoryInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBindAccelerationStructureMemoryInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( this );
}
- explicit operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindAccelerationStructureMemoryInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -6881,13 +6698,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( accelerationStructure == rhs.accelerationStructure ) && ( memory == rhs.memory ) &&
- ( memoryOffset == rhs.memoryOffset ) && ( deviceIndexCount == rhs.deviceIndexCount ) &&
- ( pDeviceIndices == rhs.pDeviceIndices );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ) && ( memory == rhs.memory ) &&
+ ( memoryOffset == rhs.memoryOffset ) && ( deviceIndexCount == rhs.deviceIndexCount ) && ( pDeviceIndices == rhs.pDeviceIndices );
# endif
}
@@ -6898,22 +6713,14 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
uint32_t deviceIndexCount = {};
const uint32_t * pDeviceIndices = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV ) ==
- sizeof( VkBindAccelerationStructureMemoryInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV>::value,
- "BindAccelerationStructureMemoryInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBindAccelerationStructureMemoryInfoNV>
@@ -6930,28 +6737,30 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {},
- const uint32_t * pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT
- : deviceIndexCount( deviceIndexCount_ )
+ const uint32_t * pDeviceIndices_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , deviceIndexCount( deviceIndexCount_ )
, pDeviceIndices( pDeviceIndices_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- BindBufferMemoryDeviceGroupInfo( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: BindBufferMemoryDeviceGroupInfo( *reinterpret_cast<BindBufferMemoryDeviceGroupInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- BindBufferMemoryDeviceGroupInfo(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ )
- : deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() )
- {}
+ BindBufferMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_, const void * pNext_ = nullptr )
+ : pNext( pNext_ ), deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- BindBufferMemoryDeviceGroupInfo &
- operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ BindBufferMemoryDeviceGroupInfo & operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindBufferMemoryDeviceGroupInfo & operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -6966,23 +6775,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo &
- setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
deviceIndexCount = deviceIndexCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo &
- setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
pDeviceIndices = pDeviceIndices_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- BindBufferMemoryDeviceGroupInfo & setDeviceIndices(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
+ BindBufferMemoryDeviceGroupInfo &
+ setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
pDeviceIndices = deviceIndices_.data();
@@ -6991,24 +6798,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBindBufferMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBindBufferMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindBufferMemoryDeviceGroupInfo *>( this );
}
- explicit operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- uint32_t const &,
- const uint32_t * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -7021,11 +6825,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) &&
- ( pDeviceIndices == rhs.pDeviceIndices );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && ( pDeviceIndices == rhs.pDeviceIndices );
# endif
}
@@ -7041,14 +6844,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t deviceIndexCount = {};
const uint32_t * pDeviceIndices = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo ) ==
- sizeof( VkBindBufferMemoryDeviceGroupInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo>::value,
- "BindBufferMemoryDeviceGroupInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBindBufferMemoryDeviceGroupInfo>
@@ -7065,19 +6860,23 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
- VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT
- : buffer( buffer_ )
+ VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , buffer( buffer_ )
, memory( memory_ )
, memoryOffset( memoryOffset_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: BindBufferMemoryInfo( *reinterpret_cast<BindBufferMemoryInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
BindBufferMemoryInfo & operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -7101,32 +6900,30 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo &
- setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo &
- setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
{
memoryOffset = memoryOffset_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBindBufferMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBindBufferMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindBufferMemoryInfo *>( this );
}
- explicit operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindBufferMemoryInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -7147,11 +6944,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( memory == rhs.memory ) &&
- ( memoryOffset == rhs.memoryOffset );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset );
# endif
}
@@ -7168,12 +6964,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo>::value,
- "BindBufferMemoryInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBindBufferMemoryInfo>
@@ -7190,7 +6980,8 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {}, int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT
: x( x_ )
, y( y_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -7219,17 +7010,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkOffset2D const &() const VULKAN_HPP_NOEXCEPT
+ operator VkOffset2D const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkOffset2D *>( this );
}
- explicit operator VkOffset2D &() VULKAN_HPP_NOEXCEPT
+ operator VkOffset2D &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkOffset2D *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -7246,7 +7037,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( x == rhs.x ) && ( y == rhs.y );
@@ -7263,23 +7054,17 @@ namespace VULKAN_HPP_NAMESPACE
int32_t x = {};
int32_t y = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Offset2D ) == sizeof( VkOffset2D ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Offset2D>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Offset2D>::value,
- "Offset2D is not nothrow_move_constructible!" );
struct Rect2D
{
using NativeType = VkRect2D;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR Rect2D( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D extent_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR Rect2D( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {} ) VULKAN_HPP_NOEXCEPT
: offset( offset_ )
, extent( extent_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -7308,17 +7093,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkRect2D const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRect2D const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRect2D *>( this );
}
- explicit operator VkRect2D &() VULKAN_HPP_NOEXCEPT
+ operator VkRect2D &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRect2D *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -7335,7 +7120,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( offset == rhs.offset ) && ( extent == rhs.extent );
@@ -7352,12 +7137,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Offset2D offset = {};
VULKAN_HPP_NAMESPACE::Extent2D extent = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Rect2D ) == sizeof( VkRect2D ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Rect2D>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Rect2D>::value,
- "Rect2D is not nothrow_move_constructible!" );
struct BindImageMemoryDeviceGroupInfo
{
@@ -7367,39 +7146,41 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryDeviceGroupInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo(
- uint32_t deviceIndexCount_ = {},
- const uint32_t * pDeviceIndices_ = {},
- uint32_t splitInstanceBindRegionCount_ = {},
- const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ = {} ) VULKAN_HPP_NOEXCEPT
- : deviceIndexCount( deviceIndexCount_ )
+ VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {},
+ const uint32_t * pDeviceIndices_ = {},
+ uint32_t splitInstanceBindRegionCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , deviceIndexCount( deviceIndexCount_ )
, pDeviceIndices( pDeviceIndices_ )
, splitInstanceBindRegionCount( splitInstanceBindRegionCount_ )
, pSplitInstanceBindRegions( pSplitInstanceBindRegions_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- BindImageMemoryDeviceGroupInfo( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: BindImageMemoryDeviceGroupInfo( *reinterpret_cast<BindImageMemoryDeviceGroupInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- BindImageMemoryDeviceGroupInfo(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const &
- splitInstanceBindRegions_ = {} )
- : deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) )
+ BindImageMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & splitInstanceBindRegions_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) )
, pDeviceIndices( deviceIndices_.data() )
, splitInstanceBindRegionCount( static_cast<uint32_t>( splitInstanceBindRegions_.size() ) )
, pSplitInstanceBindRegions( splitInstanceBindRegions_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- BindImageMemoryDeviceGroupInfo &
- operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ BindImageMemoryDeviceGroupInfo & operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindImageMemoryDeviceGroupInfo & operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -7414,23 +7195,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo &
- setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
deviceIndexCount = deviceIndexCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo &
- setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
pDeviceIndices = pDeviceIndices_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- BindImageMemoryDeviceGroupInfo & setDeviceIndices(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
+ BindImageMemoryDeviceGroupInfo &
+ setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
pDeviceIndices = deviceIndices_.data();
@@ -7438,15 +7217,14 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo &
- setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT
{
splitInstanceBindRegionCount = splitInstanceBindRegionCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions(
- const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo &
+ setPSplitInstanceBindRegions( const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
{
pSplitInstanceBindRegions = pSplitInstanceBindRegions_;
return *this;
@@ -7454,8 +7232,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegions(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const &
- splitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & splitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
{
splitInstanceBindRegionCount = static_cast<uint32_t>( splitInstanceBindRegions_.size() );
pSplitInstanceBindRegions = splitInstanceBindRegions_.data();
@@ -7464,17 +7241,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBindImageMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBindImageMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindImageMemoryDeviceGroupInfo *>( this );
}
- explicit operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindImageMemoryDeviceGroupInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -7487,8 +7264,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, deviceIndexCount, pDeviceIndices, splitInstanceBindRegionCount, pSplitInstanceBindRegions );
+ return std::tie( sType, pNext, deviceIndexCount, pDeviceIndices, splitInstanceBindRegionCount, pSplitInstanceBindRegions );
}
#endif
@@ -7497,13 +7273,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) &&
- ( pDeviceIndices == rhs.pDeviceIndices ) &&
- ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount ) &&
- ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && ( pDeviceIndices == rhs.pDeviceIndices ) &&
+ ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount ) && ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions );
# endif
}
@@ -7521,14 +7295,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t splitInstanceBindRegionCount = {};
const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo ) ==
- sizeof( VkBindImageMemoryDeviceGroupInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo>::value,
- "BindImageMemoryDeviceGroupInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBindImageMemoryDeviceGroupInfo>
@@ -7545,19 +7311,22 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( VULKAN_HPP_NAMESPACE::Image image_ = {},
- VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT
- : image( image_ )
+ VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( VULKAN_HPP_NAMESPACE::Image image_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , image( image_ )
, memory( memory_ )
, memoryOffset( memoryOffset_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : BindImageMemoryInfo( *reinterpret_cast<BindImageMemoryInfo const *>( &rhs ) )
- {}
+ BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BindImageMemoryInfo( *reinterpret_cast<BindImageMemoryInfo const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
BindImageMemoryInfo & operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -7581,32 +7350,30 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo &
- setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo &
- setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
{
memoryOffset = memoryOffset_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBindImageMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBindImageMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindImageMemoryInfo *>( this );
}
- explicit operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindImageMemoryInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -7627,11 +7394,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( memory == rhs.memory ) &&
- ( memoryOffset == rhs.memoryOffset );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset );
# endif
}
@@ -7648,12 +7414,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo>::value,
- "BindImageMemoryInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBindImageMemoryInfo>
@@ -7670,22 +7430,24 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemorySwapchainInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {},
- uint32_t imageIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : swapchain( swapchain_ )
+ VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {},
+ uint32_t imageIndex_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , swapchain( swapchain_ )
, imageIndex( imageIndex_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- BindImageMemorySwapchainInfoKHR( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: BindImageMemorySwapchainInfoKHR( *reinterpret_cast<BindImageMemorySwapchainInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- BindImageMemorySwapchainInfoKHR &
- operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ BindImageMemorySwapchainInfoKHR & operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindImageMemorySwapchainInfoKHR & operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -7700,8 +7462,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR &
- setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
{
swapchain = swapchain_;
return *this;
@@ -7714,24 +7475,21 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBindImageMemorySwapchainInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBindImageMemorySwapchainInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR *>( this );
}
- explicit operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindImageMemorySwapchainInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::SwapchainKHR const &,
- uint32_t const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SwapchainKHR const &, uint32_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -7744,11 +7502,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) &&
- ( imageIndex == rhs.imageIndex );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && ( imageIndex == rhs.imageIndex );
# endif
}
@@ -7764,14 +7521,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
uint32_t imageIndex = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR ) ==
- sizeof( VkBindImageMemorySwapchainInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR>::value,
- "BindImageMemorySwapchainInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBindImageMemorySwapchainInfoKHR>
@@ -7787,17 +7536,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImagePlaneMemoryInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- BindImagePlaneMemoryInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ =
- VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT
- : planeAspect( planeAspect_ )
- {}
+ VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , planeAspect( planeAspect_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: BindImagePlaneMemoryInfo( *reinterpret_cast<BindImagePlaneMemoryInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
BindImagePlaneMemoryInfo & operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -7815,31 +7566,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo &
- setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
{
planeAspect = planeAspect_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBindImagePlaneMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBindImagePlaneMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindImagePlaneMemoryInfo *>( this );
}
- explicit operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindImagePlaneMemoryInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -7852,7 +7600,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect );
@@ -7870,13 +7618,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo ) ==
- sizeof( VkBindImagePlaneMemoryInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo>::value,
- "BindImagePlaneMemoryInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBindImagePlaneMemoryInfo>
@@ -7890,25 +7631,25 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkBindIndexBufferIndirectCommandNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV(
- VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {},
- uint32_t size_ = {},
- VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR
+ BindIndexBufferIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {},
+ uint32_t size_ = {},
+ VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT
: bufferAddress( bufferAddress_ )
, size( size_ )
, indexType( indexType_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- BindIndexBufferIndirectCommandNV( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindIndexBufferIndirectCommandNV( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
: BindIndexBufferIndirectCommandNV( *reinterpret_cast<BindIndexBufferIndirectCommandNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- BindIndexBufferIndirectCommandNV &
- operator=( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ BindIndexBufferIndirectCommandNV & operator=( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindIndexBufferIndirectCommandNV & operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -7917,8 +7658,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV &
- setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
{
bufferAddress = bufferAddress_;
return *this;
@@ -7930,25 +7670,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV &
- setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
{
indexType = indexType_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBindIndexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBindIndexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindIndexBufferIndirectCommandNV *>( this );
}
- explicit operator VkBindIndexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+ operator VkBindIndexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindIndexBufferIndirectCommandNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -7965,7 +7704,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( indexType == rhs.indexType );
@@ -7983,34 +7722,23 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t size = {};
VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV ) ==
- sizeof( VkBindIndexBufferIndirectCommandNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV>::value,
- "BindIndexBufferIndirectCommandNV is not nothrow_move_constructible!" );
struct BindShaderGroupIndirectCommandNV
{
using NativeType = VkBindShaderGroupIndirectCommandNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( uint32_t groupIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : groupIndex( groupIndex_ )
- {}
+ VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( uint32_t groupIndex_ = {} ) VULKAN_HPP_NOEXCEPT : groupIndex( groupIndex_ ) {}
- VULKAN_HPP_CONSTEXPR
- BindShaderGroupIndirectCommandNV( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindShaderGroupIndirectCommandNV( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
: BindShaderGroupIndirectCommandNV( *reinterpret_cast<BindShaderGroupIndirectCommandNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- BindShaderGroupIndirectCommandNV &
- operator=( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ BindShaderGroupIndirectCommandNV & operator=( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindShaderGroupIndirectCommandNV & operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -8026,17 +7754,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBindShaderGroupIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBindShaderGroupIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindShaderGroupIndirectCommandNV *>( this );
}
- explicit operator VkBindShaderGroupIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+ operator VkBindShaderGroupIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindShaderGroupIndirectCommandNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -8053,7 +7781,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( groupIndex == rhs.groupIndex );
@@ -8069,14 +7797,6 @@ namespace VULKAN_HPP_NAMESPACE
public:
uint32_t groupIndex = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV ) ==
- sizeof( VkBindShaderGroupIndirectCommandNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV>::value,
- "BindShaderGroupIndirectCommandNV is not nothrow_move_constructible!" );
struct SparseMemoryBind
{
@@ -8087,19 +7807,18 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
- VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
: resourceOffset( resourceOffset_ )
, size( size_ )
, memory( memory_ )
, memoryOffset( memoryOffset_ )
, flags( flags_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SparseMemoryBind( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
- : SparseMemoryBind( *reinterpret_cast<SparseMemoryBind const *>( &rhs ) )
- {}
+ SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT : SparseMemoryBind( *reinterpret_cast<SparseMemoryBind const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SparseMemoryBind & operator=( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -8111,8 +7830,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind &
- setResourceOffset( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setResourceOffset( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ ) VULKAN_HPP_NOEXCEPT
{
resourceOffset = resourceOffset_;
return *this;
@@ -8124,39 +7842,36 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind &
- setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind &
- setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
{
memoryOffset = memoryOffset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind &
- setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSparseMemoryBind const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSparseMemoryBind const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseMemoryBind *>( this );
}
- explicit operator VkSparseMemoryBind &() VULKAN_HPP_NOEXCEPT
+ operator VkSparseMemoryBind &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSparseMemoryBind *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -8177,11 +7892,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( resourceOffset == rhs.resourceOffset ) && ( size == rhs.size ) && ( memory == rhs.memory ) &&
- ( memoryOffset == rhs.memoryOffset ) && ( flags == rhs.flags );
+ return ( resourceOffset == rhs.resourceOffset ) && ( size == rhs.size ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) &&
+ ( flags == rhs.flags );
# endif
}
@@ -8198,40 +7913,34 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseMemoryBind ) == sizeof( VkSparseMemoryBind ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseMemoryBind>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseMemoryBind>::value,
- "SparseMemoryBind is not nothrow_move_constructible!" );
struct SparseBufferMemoryBindInfo
{
using NativeType = VkSparseBufferMemoryBindInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
- uint32_t bindCount_ = {},
- const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
+ uint32_t bindCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT
: buffer( buffer_ )
, bindCount( bindCount_ )
, pBinds( pBinds_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- SparseBufferMemoryBindInfo( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SparseBufferMemoryBindInfo( *reinterpret_cast<SparseBufferMemoryBindInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SparseBufferMemoryBindInfo(
- VULKAN_HPP_NAMESPACE::Buffer buffer_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
+ SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
: buffer( buffer_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -8244,8 +7953,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo &
- setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
@@ -8257,17 +7965,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo &
- setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
{
pBinds = pBinds_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SparseBufferMemoryBindInfo & setBinds(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
- VULKAN_HPP_NOEXCEPT
+ SparseBufferMemoryBindInfo &
+ setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
{
bindCount = static_cast<uint32_t>( binds_.size() );
pBinds = binds_.data();
@@ -8276,23 +7982,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSparseBufferMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSparseBufferMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseBufferMemoryBindInfo *>( this );
}
- explicit operator VkSparseBufferMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSparseBufferMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSparseBufferMemoryBindInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::Buffer const &,
- uint32_t const &,
- const VULKAN_HPP_NAMESPACE::SparseMemoryBind * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::Buffer const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseMemoryBind * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -8305,7 +8009,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( buffer == rhs.buffer ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds );
@@ -8323,46 +8027,38 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t bindCount = {};
const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo ) ==
- sizeof( VkSparseBufferMemoryBindInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo>::value,
- "SparseBufferMemoryBindInfo is not nothrow_move_constructible!" );
struct SparseImageOpaqueMemoryBindInfo
{
using NativeType = VkSparseImageOpaqueMemoryBindInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {},
- uint32_t bindCount_ = {},
- const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {},
+ uint32_t bindCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT
: image( image_ )
, bindCount( bindCount_ )
, pBinds( pBinds_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- SparseImageOpaqueMemoryBindInfo( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SparseImageOpaqueMemoryBindInfo( *reinterpret_cast<SparseImageOpaqueMemoryBindInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SparseImageOpaqueMemoryBindInfo(
- VULKAN_HPP_NAMESPACE::Image image_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
+ SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
: image( image_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SparseImageOpaqueMemoryBindInfo &
- operator=( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SparseImageOpaqueMemoryBindInfo & operator=( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SparseImageOpaqueMemoryBindInfo & operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -8371,8 +8067,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo &
- setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
@@ -8384,17 +8079,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo &
- setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
{
pBinds = pBinds_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SparseImageOpaqueMemoryBindInfo & setBinds(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
- VULKAN_HPP_NOEXCEPT
+ SparseImageOpaqueMemoryBindInfo &
+ setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
{
bindCount = static_cast<uint32_t>( binds_.size() );
pBinds = binds_.data();
@@ -8403,23 +8096,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSparseImageOpaqueMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSparseImageOpaqueMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseImageOpaqueMemoryBindInfo *>( this );
}
- explicit operator VkSparseImageOpaqueMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSparseImageOpaqueMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::Image const &,
- uint32_t const &,
- const VULKAN_HPP_NAMESPACE::SparseMemoryBind * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::Image const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseMemoryBind * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -8432,7 +8123,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds );
@@ -8450,33 +8141,23 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t bindCount = {};
const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo ) ==
- sizeof( VkSparseImageOpaqueMemoryBindInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo>::value,
- "SparseImageOpaqueMemoryBindInfo is not nothrow_move_constructible!" );
struct ImageSubresource
{
using NativeType = VkImageSubresource;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ImageSubresource( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
- uint32_t mipLevel_ = {},
- uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR
+ ImageSubresource( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT
: aspectMask( aspectMask_ )
, mipLevel( mipLevel_ )
, arrayLayer( arrayLayer_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ImageSubresource( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
- : ImageSubresource( *reinterpret_cast<ImageSubresource const *>( &rhs ) )
- {}
+ ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT : ImageSubresource( *reinterpret_cast<ImageSubresource const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageSubresource & operator=( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -8488,8 +8169,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 ImageSubresource &
- setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
aspectMask = aspectMask_;
return *this;
@@ -8508,17 +8188,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageSubresource const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageSubresource const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageSubresource *>( this );
}
- explicit operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT
+ operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageSubresource *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -8535,7 +8215,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && ( arrayLayer == rhs.arrayLayer );
@@ -8553,12 +8233,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t mipLevel = {};
uint32_t arrayLayer = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource ) == sizeof( VkImageSubresource ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresource>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresource>::value,
- "ImageSubresource is not nothrow_move_constructible!" );
struct Offset3D
{
@@ -8569,7 +8243,8 @@ namespace VULKAN_HPP_NAMESPACE
: x( x_ )
, y( y_ )
, z( z_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR Offset3D( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -8606,17 +8281,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkOffset3D const &() const VULKAN_HPP_NOEXCEPT
+ operator VkOffset3D const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkOffset3D *>( this );
}
- explicit operator VkOffset3D &() VULKAN_HPP_NOEXCEPT
+ operator VkOffset3D &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkOffset3D *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -8633,7 +8308,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z );
@@ -8651,32 +8326,24 @@ namespace VULKAN_HPP_NAMESPACE
int32_t y = {};
int32_t z = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) == sizeof( VkOffset3D ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Offset3D>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Offset3D>::value,
- "Offset3D is not nothrow_move_constructible!" );
struct Extent3D
{
using NativeType = VkExtent3D;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- Extent3D( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR Extent3D( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT
: width( width_ )
, height( height_ )
, depth( depth_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR Extent3D( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent3D( *reinterpret_cast<Extent3D const *>( &rhs ) ) {}
- explicit Extent3D( Extent2D const & extent2D, uint32_t depth_ = {} )
- : width( extent2D.width ), height( extent2D.height ), depth( depth_ )
- {}
+ explicit Extent3D( Extent2D const & extent2D, uint32_t depth_ = {} ) : width( extent2D.width ), height( extent2D.height ), depth( depth_ ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -8707,17 +8374,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkExtent3D const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExtent3D const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExtent3D *>( this );
}
- explicit operator VkExtent3D &() VULKAN_HPP_NOEXCEPT
+ operator VkExtent3D &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExtent3D *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -8734,7 +8401,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth );
@@ -8752,38 +8419,33 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t height = {};
uint32_t depth = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Extent3D ) == sizeof( VkExtent3D ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Extent3D>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Extent3D>::value,
- "Extent3D is not nothrow_move_constructible!" );
struct SparseImageMemoryBind
{
using NativeType = VkSparseImageMemoryBind;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- SparseImageMemoryBind( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {},
- VULKAN_HPP_NAMESPACE::Offset3D offset_ = {},
- VULKAN_HPP_NAMESPACE::Extent3D extent_ = {},
- VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
- VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {},
+ VULKAN_HPP_NAMESPACE::Offset3D offset_ = {},
+ VULKAN_HPP_NAMESPACE::Extent3D extent_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
+ VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
: subresource( subresource_ )
, offset( offset_ )
, extent( extent_ )
, memory( memory_ )
, memoryOffset( memoryOffset_ )
, flags( flags_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
: SparseImageMemoryBind( *reinterpret_cast<SparseImageMemoryBind const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SparseImageMemoryBind & operator=( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -8795,60 +8457,54 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind &
- setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & subresource_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & subresource_ ) VULKAN_HPP_NOEXCEPT
{
subresource = subresource_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind &
- setOffset( VULKAN_HPP_NAMESPACE::Offset3D const & offset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setOffset( VULKAN_HPP_NAMESPACE::Offset3D const & offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind &
- setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind &
- setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind &
- setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
{
memoryOffset = memoryOffset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind &
- setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSparseImageMemoryBind const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSparseImageMemoryBind const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseImageMemoryBind *>( this );
}
- explicit operator VkSparseImageMemoryBind &() VULKAN_HPP_NOEXCEPT
+ operator VkSparseImageMemoryBind &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSparseImageMemoryBind *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -8870,11 +8526,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( subresource == rhs.subresource ) && ( offset == rhs.offset ) && ( extent == rhs.extent ) &&
- ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) && ( flags == rhs.flags );
+ return ( subresource == rhs.subresource ) && ( offset == rhs.offset ) && ( extent == rhs.extent ) && ( memory == rhs.memory ) &&
+ ( memoryOffset == rhs.memoryOffset ) && ( flags == rhs.flags );
# endif
}
@@ -8892,40 +8548,34 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind>::value,
- "SparseImageMemoryBind is not nothrow_move_constructible!" );
struct SparseImageMemoryBindInfo
{
using NativeType = VkSparseImageMemoryBindInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {},
- uint32_t bindCount_ = {},
- const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {},
+ uint32_t bindCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT
: image( image_ )
, bindCount( bindCount_ )
, pBinds( pBinds_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- SparseImageMemoryBindInfo( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SparseImageMemoryBindInfo( *reinterpret_cast<SparseImageMemoryBindInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SparseImageMemoryBindInfo(
- VULKAN_HPP_NAMESPACE::Image image_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind> const & binds_ )
+ SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind> const & binds_ )
: image( image_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -8938,8 +8588,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo &
- setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
@@ -8951,17 +8600,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo &
- setPBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
{
pBinds = pBinds_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SparseImageMemoryBindInfo & setBinds(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind> const & binds_ )
- VULKAN_HPP_NOEXCEPT
+ SparseImageMemoryBindInfo &
+ setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
{
bindCount = static_cast<uint32_t>( binds_.size() );
pBinds = binds_.data();
@@ -8970,23 +8617,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSparseImageMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSparseImageMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseImageMemoryBindInfo *>( this );
}
- explicit operator VkSparseImageMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSparseImageMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSparseImageMemoryBindInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::Image const &,
- uint32_t const &,
- const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::Image const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -8999,7 +8644,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds );
@@ -9017,13 +8662,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t bindCount = {};
const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo ) ==
- sizeof( VkSparseImageMemoryBindInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo>::value,
- "SparseImageMemoryBindInfo is not nothrow_move_constructible!" );
struct BindSparseInfo
{
@@ -9033,18 +8671,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindSparseInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- BindSparseInfo( uint32_t waitSemaphoreCount_ = {},
- const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {},
- uint32_t bufferBindCount_ = {},
- const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ = {},
- uint32_t imageOpaqueBindCount_ = {},
- const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ = {},
- uint32_t imageBindCount_ = {},
- const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ = {},
- uint32_t signalSemaphoreCount_ = {},
- const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT
- : waitSemaphoreCount( waitSemaphoreCount_ )
+ VULKAN_HPP_CONSTEXPR BindSparseInfo( uint32_t waitSemaphoreCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {},
+ uint32_t bufferBindCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ = {},
+ uint32_t imageOpaqueBindCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ = {},
+ uint32_t imageBindCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ = {},
+ uint32_t signalSemaphoreCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , waitSemaphoreCount( waitSemaphoreCount_ )
, pWaitSemaphores( pWaitSemaphores_ )
, bufferBindCount( bufferBindCount_ )
, pBufferBinds( pBufferBinds_ )
@@ -9054,26 +8693,22 @@ namespace VULKAN_HPP_NAMESPACE
, pImageBinds( pImageBinds_ )
, signalSemaphoreCount( signalSemaphoreCount_ )
, pSignalSemaphores( pSignalSemaphores_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR BindSparseInfo( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : BindSparseInfo( *reinterpret_cast<BindSparseInfo const *>( &rhs ) )
- {}
+ BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BindSparseInfo( *reinterpret_cast<BindSparseInfo const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- BindSparseInfo(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo> const &
- bufferBinds_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo> const &
- imageOpaqueBinds_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo> const &
- imageBinds_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const &
- signalSemaphores_ = {} )
- : waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) )
+ BindSparseInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo> const & bufferBinds_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo> const & imageOpaqueBinds_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo> const & imageBinds_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) )
, pWaitSemaphores( waitSemaphores_.data() )
, bufferBindCount( static_cast<uint32_t>( bufferBinds_.size() ) )
, pBufferBinds( bufferBinds_.data() )
@@ -9083,7 +8718,8 @@ namespace VULKAN_HPP_NAMESPACE
, pImageBinds( imageBinds_.data() )
, signalSemaphoreCount( static_cast<uint32_t>( signalSemaphores_.size() ) )
, pSignalSemaphores( signalSemaphores_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -9108,17 +8744,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindSparseInfo &
- setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
pWaitSemaphores = pWaitSemaphores_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- BindSparseInfo & setWaitSemaphores(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ )
- VULKAN_HPP_NOEXCEPT
+ BindSparseInfo &
+ setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
pWaitSemaphores = waitSemaphores_.data();
@@ -9132,8 +8766,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindSparseInfo &
- setPBufferBinds( const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPBufferBinds( const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ ) VULKAN_HPP_NOEXCEPT
{
pBufferBinds = pBufferBinds_;
return *this;
@@ -9141,8 +8774,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BindSparseInfo & setBufferBinds(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo> const &
- bufferBinds_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo> const & bufferBinds_ ) VULKAN_HPP_NOEXCEPT
{
bufferBindCount = static_cast<uint32_t>( bufferBinds_.size() );
pBufferBinds = bufferBinds_.data();
@@ -9150,15 +8782,14 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 BindSparseInfo &
- setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) VULKAN_HPP_NOEXCEPT
{
imageOpaqueBindCount = imageOpaqueBindCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPImageOpaqueBinds(
- const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindSparseInfo &
+ setPImageOpaqueBinds( const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
{
pImageOpaqueBinds = pImageOpaqueBinds_;
return *this;
@@ -9166,8 +8797,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BindSparseInfo & setImageOpaqueBinds(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo> const &
- imageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo> const & imageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
{
imageOpaqueBindCount = static_cast<uint32_t>( imageOpaqueBinds_.size() );
pImageOpaqueBinds = imageOpaqueBinds_.data();
@@ -9181,17 +8811,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindSparseInfo &
- setPImageBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPImageBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ ) VULKAN_HPP_NOEXCEPT
{
pImageBinds = pImageBinds_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- BindSparseInfo & setImageBinds(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo> const &
- imageBinds_ ) VULKAN_HPP_NOEXCEPT
+ BindSparseInfo & setImageBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo> const & imageBinds_ )
+ VULKAN_HPP_NOEXCEPT
{
imageBindCount = static_cast<uint32_t>( imageBinds_.size() );
pImageBinds = imageBinds_.data();
@@ -9199,24 +8827,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 BindSparseInfo &
- setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreCount = signalSemaphoreCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BindSparseInfo &
- setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
pSignalSemaphores = pSignalSemaphores_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- BindSparseInfo & setSignalSemaphores(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ )
- VULKAN_HPP_NOEXCEPT
+ BindSparseInfo &
+ setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreCount = static_cast<uint32_t>( signalSemaphores_.size() );
pSignalSemaphores = signalSemaphores_.data();
@@ -9225,17 +8850,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindSparseInfo *>( this );
}
- explicit operator VkBindSparseInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkBindSparseInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindSparseInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -9274,14 +8899,13 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) &&
- ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( bufferBindCount == rhs.bufferBindCount ) &&
- ( pBufferBinds == rhs.pBufferBinds ) && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount ) &&
- ( pImageOpaqueBinds == rhs.pImageOpaqueBinds ) && ( imageBindCount == rhs.imageBindCount ) &&
- ( pImageBinds == rhs.pImageBinds ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) &&
+ ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( bufferBindCount == rhs.bufferBindCount ) && ( pBufferBinds == rhs.pBufferBinds ) &&
+ ( imageOpaqueBindCount == rhs.imageOpaqueBindCount ) && ( pImageOpaqueBinds == rhs.pImageOpaqueBinds ) &&
+ ( imageBindCount == rhs.imageBindCount ) && ( pImageBinds == rhs.pImageBinds ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) &&
( pSignalSemaphores == rhs.pSignalSemaphores );
# endif
}
@@ -9306,12 +8930,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t signalSemaphoreCount = {};
const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindSparseInfo ) == sizeof( VkBindSparseInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindSparseInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindSparseInfo>::value,
- "BindSparseInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBindSparseInfo>
@@ -9326,22 +8944,22 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {},
uint32_t size_ = {},
- uint32_t stride_ = {} ) VULKAN_HPP_NOEXCEPT
+ uint32_t stride_ = {} ) VULKAN_HPP_NOEXCEPT
: bufferAddress( bufferAddress_ )
, size( size_ )
, stride( stride_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- BindVertexBufferIndirectCommandNV( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindVertexBufferIndirectCommandNV( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
: BindVertexBufferIndirectCommandNV( *reinterpret_cast<BindVertexBufferIndirectCommandNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- BindVertexBufferIndirectCommandNV &
- operator=( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ BindVertexBufferIndirectCommandNV & operator=( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BindVertexBufferIndirectCommandNV & operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -9350,8 +8968,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV &
- setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
{
bufferAddress = bufferAddress_;
return *this;
@@ -9370,17 +8987,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBindVertexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBindVertexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBindVertexBufferIndirectCommandNV *>( this );
}
- explicit operator VkBindVertexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+ operator VkBindVertexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBindVertexBufferIndirectCommandNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -9397,7 +9014,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( stride == rhs.stride );
@@ -9415,14 +9032,138 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t size = {};
uint32_t stride = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV ) ==
- sizeof( VkBindVertexBufferIndirectCommandNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV>::value,
- "BindVertexBufferIndirectCommandNV is not nothrow_move_constructible!" );
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ struct BindVideoSessionMemoryInfoKHR
+ {
+ using NativeType = VkBindVideoSessionMemoryInfoKHR;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindVideoSessionMemoryInfoKHR;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR BindVideoSessionMemoryInfoKHR( uint32_t memoryBindIndex_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , memoryBindIndex( memoryBindIndex_ )
+ , memory( memory_ )
+ , memoryOffset( memoryOffset_ )
+ , memorySize( memorySize_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR BindVideoSessionMemoryInfoKHR( BindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ BindVideoSessionMemoryInfoKHR( VkBindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : BindVideoSessionMemoryInfoKHR( *reinterpret_cast<BindVideoSessionMemoryInfoKHR const *>( &rhs ) )
+ {
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ BindVideoSessionMemoryInfoKHR & operator=( BindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ BindVideoSessionMemoryInfoKHR & operator=( VkBindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemoryBindIndex( uint32_t memoryBindIndex_ ) VULKAN_HPP_NOEXCEPT
+ {
+ memoryBindIndex = memoryBindIndex_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ {
+ memory = memory_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ memoryOffset = memoryOffset_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemorySize( VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ memorySize = memorySize_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkBindVideoSessionMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( this );
+ }
+
+ operator VkBindVideoSessionMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkBindVideoSessionMemoryInfoKHR *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ uint32_t const &,
+ VULKAN_HPP_NAMESPACE::DeviceMemory const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, memoryBindIndex, memory, memoryOffset, memorySize );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( BindVideoSessionMemoryInfoKHR const & ) const = default;
+# else
+ bool operator==( BindVideoSessionMemoryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) && ( memory == rhs.memory ) &&
+ ( memoryOffset == rhs.memoryOffset ) && ( memorySize == rhs.memorySize );
+# endif
+ }
+
+ bool operator!=( BindVideoSessionMemoryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindVideoSessionMemoryInfoKHR;
+ const void * pNext = {};
+ uint32_t memoryBindIndex = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize memorySize = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eBindVideoSessionMemoryInfoKHR>
+ {
+ using Type = BindVideoSessionMemoryInfoKHR;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
struct ImageSubresourceLayers
{
@@ -9432,18 +9173,20 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
uint32_t mipLevel_ = {},
uint32_t baseArrayLayer_ = {},
- uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
+ uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
: aspectMask( aspectMask_ )
, mipLevel( mipLevel_ )
, baseArrayLayer( baseArrayLayer_ )
, layerCount( layerCount_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageSubresourceLayers( *reinterpret_cast<ImageSubresourceLayers const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageSubresourceLayers & operator=( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -9455,8 +9198,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers &
- setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
aspectMask = aspectMask_;
return *this;
@@ -9481,17 +9223,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageSubresourceLayers const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageSubresourceLayers const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageSubresourceLayers *>( this );
}
- explicit operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT
+ operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageSubresourceLayers *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -9508,11 +9250,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) &&
- ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount );
+ return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount );
# endif
}
@@ -9528,13 +9269,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t baseArrayLayer = {};
uint32_t layerCount = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers ) ==
- sizeof( VkImageSubresourceLayers ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers>::value,
- "ImageSubresourceLayers is not nothrow_move_constructible!" );
struct ImageBlit2
{
@@ -9544,22 +9278,22 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageBlit2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14
- ImageBlit2( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {},
- std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & srcOffsets_ = {},
- VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
- std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcSubresource( srcSubresource_ )
+ VULKAN_HPP_CONSTEXPR_14 ImageBlit2( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {},
+ std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & srcOffsets_ = {},
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
+ std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & dstOffsets_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , srcSubresource( srcSubresource_ )
, srcOffsets( srcOffsets_ )
, dstSubresource( dstSubresource_ )
, dstOffsets( dstOffsets_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR_14 ImageBlit2( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImageBlit2( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : ImageBlit2( *reinterpret_cast<ImageBlit2 const *>( &rhs ) )
- {}
+ ImageBlit2( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit2( *reinterpret_cast<ImageBlit2 const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageBlit2 & operator=( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -9577,46 +9311,42 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageBlit2 &
- setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
{
srcSubresource = srcSubresource_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageBlit2 &
- setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
{
srcOffsets = srcOffsets_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageBlit2 &
- setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
{
dstSubresource = dstSubresource_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageBlit2 &
- setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
{
dstOffsets = dstOffsets_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageBlit2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageBlit2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageBlit2 *>( this );
}
- explicit operator VkImageBlit2 &() VULKAN_HPP_NOEXCEPT
+ operator VkImageBlit2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageBlit2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -9638,12 +9368,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) &&
- ( srcOffsets == rhs.srcOffsets ) && ( dstSubresource == rhs.dstSubresource ) &&
- ( dstOffsets == rhs.dstOffsets );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) &&
+ ( dstSubresource == rhs.dstSubresource ) && ( dstOffsets == rhs.dstOffsets );
# endif
}
@@ -9661,12 +9390,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageBlit2 ) == sizeof( VkImageBlit2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageBlit2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageBlit2>::value,
- "ImageBlit2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageBlit2>
@@ -9683,45 +9406,47 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageInfo2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2(
- VULKAN_HPP_NAMESPACE::Image srcImage_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- VULKAN_HPP_NAMESPACE::Image dstImage_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- uint32_t regionCount_ = {},
- const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ = {},
- VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest ) VULKAN_HPP_NOEXCEPT
- : srcImage( srcImage_ )
+ VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::Image dstImage_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ uint32_t regionCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ = {},
+ VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , srcImage( srcImage_ )
, srcImageLayout( srcImageLayout_ )
, dstImage( dstImage_ )
, dstImageLayout( dstImageLayout_ )
, regionCount( regionCount_ )
, pRegions( pRegions_ )
, filter( filter_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- BlitImageInfo2( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : BlitImageInfo2( *reinterpret_cast<BlitImageInfo2 const *>( &rhs ) )
- {}
+ BlitImageInfo2( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : BlitImageInfo2( *reinterpret_cast<BlitImageInfo2 const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- BlitImageInfo2(
- VULKAN_HPP_NAMESPACE::Image srcImage_,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_,
- VULKAN_HPP_NAMESPACE::Image dstImage_,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageBlit2> const & regions_,
- VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest )
- : srcImage( srcImage_ )
+ BlitImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_,
+ VULKAN_HPP_NAMESPACE::Image dstImage_,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageBlit2> const & regions_,
+ VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , srcImage( srcImage_ )
, srcImageLayout( srcImageLayout_ )
, dstImage( dstImage_ )
, dstImageLayout( dstImageLayout_ )
, regionCount( static_cast<uint32_t>( regions_.size() ) )
, pRegions( regions_.data() )
, filter( filter_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -9746,8 +9471,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 &
- setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
srcImageLayout = srcImageLayout_;
return *this;
@@ -9759,8 +9483,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 &
- setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
dstImageLayout = dstImageLayout_;
return *this;
@@ -9772,17 +9495,14 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 &
- setPRegions( const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- BlitImageInfo2 & setRegions(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageBlit2> const & regions_ )
- VULKAN_HPP_NOEXCEPT
+ BlitImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageBlit2> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = static_cast<uint32_t>( regions_.size() );
pRegions = regions_.data();
@@ -9797,17 +9517,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBlitImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBlitImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBlitImageInfo2 *>( this );
}
- explicit operator VkBlitImageInfo2 &() VULKAN_HPP_NOEXCEPT
+ operator VkBlitImageInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBlitImageInfo2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -9823,8 +9543,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter );
+ return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter );
}
#endif
@@ -9833,13 +9552,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BlitImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) &&
- ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) &&
- ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) &&
- ( pRegions == rhs.pRegions ) && ( filter == rhs.filter );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) &&
+ ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ) &&
+ ( filter == rhs.filter );
# endif
}
@@ -9860,12 +9578,6 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions = {};
VULKAN_HPP_NAMESPACE::Filter filter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BlitImageInfo2 ) == sizeof( VkBlitImageInfo2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BlitImageInfo2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BlitImageInfo2>::value,
- "BlitImageInfo2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBlitImageInfo2>
@@ -9880,46 +9592,43 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkBufferCollectionBufferCreateInfoFUCHSIA;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eBufferCollectionBufferCreateInfoFUCHSIA;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- BufferCollectionBufferCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {},
- uint32_t index_ = {} ) VULKAN_HPP_NOEXCEPT
- : collection( collection_ )
+ VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {},
+ uint32_t index_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , collection( collection_ )
, index( index_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA( BufferCollectionBufferCreateInfoFUCHSIA const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferCollectionBufferCreateInfoFUCHSIA( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
- : BufferCollectionBufferCreateInfoFUCHSIA(
- *reinterpret_cast<BufferCollectionBufferCreateInfoFUCHSIA const *>( &rhs ) )
- {}
+ : BufferCollectionBufferCreateInfoFUCHSIA( *reinterpret_cast<BufferCollectionBufferCreateInfoFUCHSIA const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- BufferCollectionBufferCreateInfoFUCHSIA &
- operator=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ BufferCollectionBufferCreateInfoFUCHSIA & operator=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- BufferCollectionBufferCreateInfoFUCHSIA &
- operator=( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+ BufferCollectionBufferCreateInfoFUCHSIA & operator=( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA &
- setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT
+ setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT
{
collection = collection_;
return *this;
@@ -9932,24 +9641,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBufferCollectionBufferCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBufferCollectionBufferCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferCollectionBufferCreateInfoFUCHSIA *>( this );
}
- explicit operator VkBufferCollectionBufferCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+ operator VkBufferCollectionBufferCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferCollectionBufferCreateInfoFUCHSIA *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const &,
- uint32_t const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const &, uint32_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -9962,11 +9668,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) &&
- ( index == rhs.index );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && ( index == rhs.index );
# endif
}
@@ -9982,15 +9687,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {};
uint32_t index = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA ) ==
- sizeof( VkBufferCollectionBufferCreateInfoFUCHSIA ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA>::value,
- "BufferCollectionBufferCreateInfoFUCHSIA is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBufferCollectionBufferCreateInfoFUCHSIA>
@@ -10005,37 +9701,35 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkBufferCollectionConstraintsInfoFUCHSIA;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eBufferCollectionConstraintsInfoFUCHSIA;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- BufferCollectionConstraintsInfoFUCHSIA( uint32_t minBufferCount_ = {},
- uint32_t maxBufferCount_ = {},
- uint32_t minBufferCountForCamping_ = {},
- uint32_t minBufferCountForDedicatedSlack_ = {},
- uint32_t minBufferCountForSharedSlack_ = {} ) VULKAN_HPP_NOEXCEPT
- : minBufferCount( minBufferCount_ )
+ VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA( uint32_t minBufferCount_ = {},
+ uint32_t maxBufferCount_ = {},
+ uint32_t minBufferCountForCamping_ = {},
+ uint32_t minBufferCountForDedicatedSlack_ = {},
+ uint32_t minBufferCountForSharedSlack_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , minBufferCount( minBufferCount_ )
, maxBufferCount( maxBufferCount_ )
, minBufferCountForCamping( minBufferCountForCamping_ )
, minBufferCountForDedicatedSlack( minBufferCountForDedicatedSlack_ )
, minBufferCountForSharedSlack( minBufferCountForSharedSlack_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA( BufferCollectionConstraintsInfoFUCHSIA const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferCollectionConstraintsInfoFUCHSIA( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
- : BufferCollectionConstraintsInfoFUCHSIA(
- *reinterpret_cast<BufferCollectionConstraintsInfoFUCHSIA const *>( &rhs ) )
- {}
+ : BufferCollectionConstraintsInfoFUCHSIA( *reinterpret_cast<BufferCollectionConstraintsInfoFUCHSIA const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- BufferCollectionConstraintsInfoFUCHSIA &
- operator=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ BufferCollectionConstraintsInfoFUCHSIA & operator=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- BufferCollectionConstraintsInfoFUCHSIA &
- operator=( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+ BufferCollectionConstraintsInfoFUCHSIA & operator=( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const *>( &rhs );
return *this;
@@ -10048,53 +9742,50 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA &
- setMinBufferCount( uint32_t minBufferCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCount( uint32_t minBufferCount_ ) VULKAN_HPP_NOEXCEPT
{
minBufferCount = minBufferCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA &
- setMaxBufferCount( uint32_t maxBufferCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMaxBufferCount( uint32_t maxBufferCount_ ) VULKAN_HPP_NOEXCEPT
{
maxBufferCount = maxBufferCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA &
- setMinBufferCountForCamping( uint32_t minBufferCountForCamping_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCountForCamping( uint32_t minBufferCountForCamping_ ) VULKAN_HPP_NOEXCEPT
{
minBufferCountForCamping = minBufferCountForCamping_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA &
- setMinBufferCountForDedicatedSlack( uint32_t minBufferCountForDedicatedSlack_ ) VULKAN_HPP_NOEXCEPT
+ setMinBufferCountForDedicatedSlack( uint32_t minBufferCountForDedicatedSlack_ ) VULKAN_HPP_NOEXCEPT
{
minBufferCountForDedicatedSlack = minBufferCountForDedicatedSlack_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA &
- setMinBufferCountForSharedSlack( uint32_t minBufferCountForSharedSlack_ ) VULKAN_HPP_NOEXCEPT
+ setMinBufferCountForSharedSlack( uint32_t minBufferCountForSharedSlack_ ) VULKAN_HPP_NOEXCEPT
{
minBufferCountForSharedSlack = minBufferCountForSharedSlack_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBufferCollectionConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBufferCollectionConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferCollectionConstraintsInfoFUCHSIA *>( this );
}
- explicit operator VkBufferCollectionConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+ operator VkBufferCollectionConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferCollectionConstraintsInfoFUCHSIA *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -10108,13 +9799,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- minBufferCount,
- maxBufferCount,
- minBufferCountForCamping,
- minBufferCountForDedicatedSlack,
- minBufferCountForSharedSlack );
+ return std::tie( sType, pNext, minBufferCount, maxBufferCount, minBufferCountForCamping, minBufferCountForDedicatedSlack, minBufferCountForSharedSlack );
}
# endif
@@ -10123,12 +9808,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minBufferCount == rhs.minBufferCount ) &&
- ( maxBufferCount == rhs.maxBufferCount ) && ( minBufferCountForCamping == rhs.minBufferCountForCamping ) &&
- ( minBufferCountForDedicatedSlack == rhs.minBufferCountForDedicatedSlack ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minBufferCount == rhs.minBufferCount ) && ( maxBufferCount == rhs.maxBufferCount ) &&
+ ( minBufferCountForCamping == rhs.minBufferCountForCamping ) && ( minBufferCountForDedicatedSlack == rhs.minBufferCountForDedicatedSlack ) &&
( minBufferCountForSharedSlack == rhs.minBufferCountForSharedSlack );
# endif
}
@@ -10140,23 +9824,14 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA;
- const void * pNext = {};
- uint32_t minBufferCount = {};
- uint32_t maxBufferCount = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA;
+ const void * pNext = {};
+ uint32_t minBufferCount = {};
+ uint32_t maxBufferCount = {};
uint32_t minBufferCountForCamping = {};
uint32_t minBufferCountForDedicatedSlack = {};
uint32_t minBufferCountForSharedSlack = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA ) ==
- sizeof( VkBufferCollectionConstraintsInfoFUCHSIA ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA>::value,
- "BufferCollectionConstraintsInfoFUCHSIA is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBufferCollectionConstraintsInfoFUCHSIA>
@@ -10171,24 +9846,24 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkBufferCollectionCreateInfoFUCHSIA;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eBufferCollectionCreateInfoFUCHSIA;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionCreateInfoFUCHSIA;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA( zx_handle_t collectionToken_ = {} ) VULKAN_HPP_NOEXCEPT
- : collectionToken( collectionToken_ )
- {}
+ VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA( zx_handle_t collectionToken_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , collectionToken( collectionToken_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- BufferCollectionCreateInfoFUCHSIA( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferCollectionCreateInfoFUCHSIA( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferCollectionCreateInfoFUCHSIA( *reinterpret_cast<BufferCollectionCreateInfoFUCHSIA const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- BufferCollectionCreateInfoFUCHSIA &
- operator=( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ BufferCollectionCreateInfoFUCHSIA & operator=( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferCollectionCreateInfoFUCHSIA & operator=( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -10203,25 +9878,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA &
- setCollectionToken( zx_handle_t collectionToken_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA & setCollectionToken( zx_handle_t collectionToken_ ) VULKAN_HPP_NOEXCEPT
{
collectionToken = collectionToken_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBufferCollectionCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBufferCollectionCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( this );
}
- explicit operator VkBufferCollectionCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+ operator VkBufferCollectionCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferCollectionCreateInfoFUCHSIA *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -10249,8 +9923,7 @@ namespace VULKAN_HPP_NAMESPACE
bool operator==( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( memcmp( &collectionToken, &rhs.collectionToken, sizeof( zx_handle_t ) ) == 0 );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &collectionToken, &rhs.collectionToken, sizeof( zx_handle_t ) ) == 0 );
}
bool operator!=( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
@@ -10263,14 +9936,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
zx_handle_t collectionToken = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA ) ==
- sizeof( VkBufferCollectionCreateInfoFUCHSIA ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA>::value,
- "BufferCollectionCreateInfoFUCHSIA is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBufferCollectionCreateInfoFUCHSIA>
@@ -10285,31 +9950,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkBufferCollectionImageCreateInfoFUCHSIA;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eBufferCollectionImageCreateInfoFUCHSIA;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionImageCreateInfoFUCHSIA;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- BufferCollectionImageCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {},
- uint32_t index_ = {} ) VULKAN_HPP_NOEXCEPT
- : collection( collection_ )
+ VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {},
+ uint32_t index_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , collection( collection_ )
, index( index_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA( BufferCollectionImageCreateInfoFUCHSIA const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferCollectionImageCreateInfoFUCHSIA( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
- : BufferCollectionImageCreateInfoFUCHSIA(
- *reinterpret_cast<BufferCollectionImageCreateInfoFUCHSIA const *>( &rhs ) )
- {}
+ : BufferCollectionImageCreateInfoFUCHSIA( *reinterpret_cast<BufferCollectionImageCreateInfoFUCHSIA const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- BufferCollectionImageCreateInfoFUCHSIA &
- operator=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ BufferCollectionImageCreateInfoFUCHSIA & operator=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- BufferCollectionImageCreateInfoFUCHSIA &
- operator=( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+ BufferCollectionImageCreateInfoFUCHSIA & operator=( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA const *>( &rhs );
return *this;
@@ -10323,7 +9986,7 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA &
- setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT
+ setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT
{
collection = collection_;
return *this;
@@ -10336,24 +9999,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBufferCollectionImageCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBufferCollectionImageCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferCollectionImageCreateInfoFUCHSIA *>( this );
}
- explicit operator VkBufferCollectionImageCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+ operator VkBufferCollectionImageCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferCollectionImageCreateInfoFUCHSIA *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const &,
- uint32_t const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const &, uint32_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -10366,11 +10026,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) &&
- ( index == rhs.index );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && ( index == rhs.index );
# endif
}
@@ -10386,15 +10045,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {};
uint32_t index = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA ) ==
- sizeof( VkBufferCollectionImageCreateInfoFUCHSIA ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA>::value,
- "BufferCollectionImageCreateInfoFUCHSIA is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBufferCollectionImageCreateInfoFUCHSIA>
@@ -10412,15 +10062,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSysmemColorSpaceFUCHSIA;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA( uint32_t colorSpace_ = {} ) VULKAN_HPP_NOEXCEPT
- : colorSpace( colorSpace_ )
- {}
+ VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA( uint32_t colorSpace_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , colorSpace( colorSpace_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SysmemColorSpaceFUCHSIA( VkSysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: SysmemColorSpaceFUCHSIA( *reinterpret_cast<SysmemColorSpaceFUCHSIA const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SysmemColorSpaceFUCHSIA & operator=( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -10445,17 +10098,17 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSysmemColorSpaceFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSysmemColorSpaceFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSysmemColorSpaceFUCHSIA *>( this );
}
- explicit operator VkSysmemColorSpaceFUCHSIA &() VULKAN_HPP_NOEXCEPT
+ operator VkSysmemColorSpaceFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSysmemColorSpaceFUCHSIA *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -10472,7 +10125,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( SysmemColorSpaceFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorSpace == rhs.colorSpace );
@@ -10490,13 +10143,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
uint32_t colorSpace = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA ) ==
- sizeof( VkSysmemColorSpaceFUCHSIA ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA>::value,
- "SysmemColorSpaceFUCHSIA is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSysmemColorSpaceFUCHSIA>
@@ -10511,8 +10157,7 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkBufferCollectionPropertiesFUCHSIA;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eBufferCollectionPropertiesFUCHSIA;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionPropertiesFUCHSIA;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA(
@@ -10523,13 +10168,13 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {},
VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA sysmemColorSpaceIndex_ = {},
VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {},
- VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ =
- VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity,
- VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull,
- VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
- VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ =
- VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven ) VULKAN_HPP_NOEXCEPT
- : memoryTypeBits( memoryTypeBits_ )
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity,
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull,
+ VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
+ VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , memoryTypeBits( memoryTypeBits_ )
, bufferCount( bufferCount_ )
, createInfoIndex( createInfoIndex_ )
, sysmemPixelFormat( sysmemPixelFormat_ )
@@ -10540,18 +10185,18 @@ namespace VULKAN_HPP_NAMESPACE
, suggestedYcbcrRange( suggestedYcbcrRange_ )
, suggestedXChromaOffset( suggestedXChromaOffset_ )
, suggestedYChromaOffset( suggestedYChromaOffset_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- BufferCollectionPropertiesFUCHSIA( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferCollectionPropertiesFUCHSIA( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferCollectionPropertiesFUCHSIA( *reinterpret_cast<BufferCollectionPropertiesFUCHSIA const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- BufferCollectionPropertiesFUCHSIA &
- operator=( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ BufferCollectionPropertiesFUCHSIA & operator=( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferCollectionPropertiesFUCHSIA & operator=( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -10566,95 +10211,91 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &
- setMemoryTypeBits( uint32_t memoryTypeBits_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setMemoryTypeBits( uint32_t memoryTypeBits_ ) VULKAN_HPP_NOEXCEPT
{
memoryTypeBits = memoryTypeBits_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &
- setBufferCount( uint32_t bufferCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setBufferCount( uint32_t bufferCount_ ) VULKAN_HPP_NOEXCEPT
{
bufferCount = bufferCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &
- setCreateInfoIndex( uint32_t createInfoIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setCreateInfoIndex( uint32_t createInfoIndex_ ) VULKAN_HPP_NOEXCEPT
{
createInfoIndex = createInfoIndex_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &
- setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) VULKAN_HPP_NOEXCEPT
{
sysmemPixelFormat = sysmemPixelFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &
- setFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ ) VULKAN_HPP_NOEXCEPT
+ setFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ ) VULKAN_HPP_NOEXCEPT
{
formatFeatures = formatFeatures_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSysmemColorSpaceIndex(
- VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const & sysmemColorSpaceIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &
+ setSysmemColorSpaceIndex( VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const & sysmemColorSpaceIndex_ ) VULKAN_HPP_NOEXCEPT
{
sysmemColorSpaceIndex = sysmemColorSpaceIndex_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSamplerYcbcrConversionComponents(
- VULKAN_HPP_NAMESPACE::ComponentMapping const & samplerYcbcrConversionComponents_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &
+ setSamplerYcbcrConversionComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & samplerYcbcrConversionComponents_ ) VULKAN_HPP_NOEXCEPT
{
samplerYcbcrConversionComponents = samplerYcbcrConversionComponents_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSuggestedYcbcrModel(
- VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &
+ setSuggestedYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ ) VULKAN_HPP_NOEXCEPT
{
suggestedYcbcrModel = suggestedYcbcrModel_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &
- setSuggestedYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ ) VULKAN_HPP_NOEXCEPT
+ setSuggestedYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ ) VULKAN_HPP_NOEXCEPT
{
suggestedYcbcrRange = suggestedYcbcrRange_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &
- setSuggestedXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ ) VULKAN_HPP_NOEXCEPT
+ setSuggestedXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ ) VULKAN_HPP_NOEXCEPT
{
suggestedXChromaOffset = suggestedXChromaOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &
- setSuggestedYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ ) VULKAN_HPP_NOEXCEPT
+ setSuggestedYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ ) VULKAN_HPP_NOEXCEPT
{
suggestedYChromaOffset = suggestedYChromaOffset_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBufferCollectionPropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBufferCollectionPropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferCollectionPropertiesFUCHSIA *>( this );
}
- explicit operator VkBufferCollectionPropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT
+ operator VkBufferCollectionPropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -10695,17 +10336,14 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( BufferCollectionPropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ) &&
- ( bufferCount == rhs.bufferCount ) && ( createInfoIndex == rhs.createInfoIndex ) &&
- ( sysmemPixelFormat == rhs.sysmemPixelFormat ) && ( formatFeatures == rhs.formatFeatures ) &&
- ( sysmemColorSpaceIndex == rhs.sysmemColorSpaceIndex ) &&
- ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ) && ( bufferCount == rhs.bufferCount ) &&
+ ( createInfoIndex == rhs.createInfoIndex ) && ( sysmemPixelFormat == rhs.sysmemPixelFormat ) && ( formatFeatures == rhs.formatFeatures ) &&
+ ( sysmemColorSpaceIndex == rhs.sysmemColorSpaceIndex ) && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) &&
( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) &&
- ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) &&
- ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
+ ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
# endif
}
@@ -10716,8 +10354,8 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionPropertiesFUCHSIA;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionPropertiesFUCHSIA;
+ void * pNext = {};
uint32_t memoryTypeBits = {};
uint32_t bufferCount = {};
uint32_t createInfoIndex = {};
@@ -10725,20 +10363,11 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {};
VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA sysmemColorSpaceIndex = {};
VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
- VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel =
- VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
- VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
- VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
- VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA ) ==
- sizeof( VkBufferCollectionPropertiesFUCHSIA ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::value,
- "BufferCollectionPropertiesFUCHSIA is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
+ VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+ VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+ };
template <>
struct CppType<StructureType, StructureType::eBufferCollectionPropertiesFUCHSIA>
@@ -10755,40 +10384,43 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
- VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {},
- VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
- uint32_t queueFamilyIndexCount_ = {},
- const uint32_t * pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
+ VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {},
+ VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
+ uint32_t queueFamilyIndexCount_ = {},
+ const uint32_t * pQueueFamilyIndices_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, size( size_ )
, usage( usage_ )
, sharingMode( sharingMode_ )
, queueFamilyIndexCount( queueFamilyIndexCount_ )
, pQueueFamilyIndices( pQueueFamilyIndices_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : BufferCreateInfo( *reinterpret_cast<BufferCreateInfo const *>( &rhs ) )
- {}
+ BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCreateInfo( *reinterpret_cast<BufferCreateInfo const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_,
VULKAN_HPP_NAMESPACE::DeviceSize size_,
VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_,
VULKAN_HPP_NAMESPACE::SharingMode sharingMode_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ )
- : flags( flags_ )
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, size( size_ )
, usage( usage_ )
, sharingMode( sharingMode_ )
, queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) )
, pQueueFamilyIndices( queueFamilyIndices_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -10807,8 +10439,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -10820,37 +10451,32 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo &
- setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo &
- setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
{
sharingMode = sharingMode_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo &
- setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndexCount = queueFamilyIndexCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo &
- setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
{
pQueueFamilyIndices = pQueueFamilyIndices_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- BufferCreateInfo & setQueueFamilyIndices(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+ BufferCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
pQueueFamilyIndices = queueFamilyIndices_.data();
@@ -10859,17 +10485,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferCreateInfo *>( this );
}
- explicit operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -10893,13 +10519,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( size == rhs.size ) &&
- ( usage == rhs.usage ) && ( sharingMode == rhs.sharingMode ) &&
- ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) &&
- ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( size == rhs.size ) && ( usage == rhs.usage ) &&
+ ( sharingMode == rhs.sharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
# endif
}
@@ -10919,12 +10543,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t queueFamilyIndexCount = {};
const uint32_t * pQueueFamilyIndices = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCreateInfo ) == sizeof( VkBufferCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCreateInfo>::value,
- "BufferCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBufferCreateInfo>
@@ -10941,22 +10559,23 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferConstraintsInfoFUCHSIA;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA(
- VULKAN_HPP_NAMESPACE::BufferCreateInfo createInfo_ = {},
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {},
- VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {} )
- VULKAN_HPP_NOEXCEPT
- : createInfo( createInfo_ )
+ VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCreateInfo createInfo_ = {},
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {},
+ VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , createInfo( createInfo_ )
, requiredFormatFeatures( requiredFormatFeatures_ )
, bufferCollectionConstraints( bufferCollectionConstraints_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- BufferConstraintsInfoFUCHSIA( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferConstraintsInfoFUCHSIA( VkBufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferConstraintsInfoFUCHSIA( *reinterpret_cast<BufferConstraintsInfoFUCHSIA const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
BufferConstraintsInfoFUCHSIA & operator=( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -10974,40 +10593,38 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA &
- setCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo_ ) VULKAN_HPP_NOEXCEPT
{
createInfo = createInfo_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA &
- setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT
+ setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT
{
requiredFormatFeatures = requiredFormatFeatures_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setBufferCollectionConstraints(
- VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ )
- VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA &
+ setBufferCollectionConstraints( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) VULKAN_HPP_NOEXCEPT
{
bufferCollectionConstraints = bufferCollectionConstraints_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBufferConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBufferConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( this );
}
- explicit operator VkBufferConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+ operator VkBufferConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferConstraintsInfoFUCHSIA *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -11028,11 +10645,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( BufferConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createInfo == rhs.createInfo ) &&
- ( requiredFormatFeatures == rhs.requiredFormatFeatures ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createInfo == rhs.createInfo ) && ( requiredFormatFeatures == rhs.requiredFormatFeatures ) &&
( bufferCollectionConstraints == rhs.bufferCollectionConstraints );
# endif
}
@@ -11044,20 +10660,12 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferConstraintsInfoFUCHSIA;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferConstraintsInfoFUCHSIA;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::BufferCreateInfo createInfo = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures = {};
VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA ) ==
- sizeof( VkBufferConstraintsInfoFUCHSIA ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA>::value,
- "BufferConstraintsInfoFUCHSIA is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBufferConstraintsInfoFUCHSIA>
@@ -11077,13 +10685,12 @@ namespace VULKAN_HPP_NAMESPACE
: srcOffset( srcOffset_ )
, dstOffset( dstOffset_ )
, size( size_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR BufferCopy( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
- : BufferCopy( *reinterpret_cast<BufferCopy const *>( &rhs ) )
- {}
+ BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCopy( *reinterpret_cast<BufferCopy const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -11114,23 +10721,21 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBufferCopy const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBufferCopy const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferCopy *>( this );
}
- explicit operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT
+ operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferCopy *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &,
- VULKAN_HPP_NAMESPACE::DeviceSize const &,
- VULKAN_HPP_NAMESPACE::DeviceSize const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -11143,7 +10748,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( srcOffset == rhs.srcOffset ) && ( dstOffset == rhs.dstOffset ) && ( size == rhs.size );
@@ -11161,12 +10766,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCopy ) == sizeof( VkBufferCopy ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCopy>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCopy>::value,
- "BufferCopy is not nothrow_move_constructible!" );
struct BufferCopy2
{
@@ -11178,17 +10777,18 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR BufferCopy2( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {},
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcOffset( srcOffset_ )
+ VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , srcOffset( srcOffset_ )
, dstOffset( dstOffset_ )
, size( size_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR BufferCopy2( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- BufferCopy2( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : BufferCopy2( *reinterpret_cast<BufferCopy2 const *>( &rhs ) )
- {}
+ BufferCopy2( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCopy2( *reinterpret_cast<BufferCopy2 const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
BufferCopy2 & operator=( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -11206,15 +10806,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferCopy2 &
- setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
{
srcOffset = srcOffset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferCopy2 &
- setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
{
dstOffset = dstOffset_;
return *this;
@@ -11227,17 +10825,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBufferCopy2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBufferCopy2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferCopy2 *>( this );
}
- explicit operator VkBufferCopy2 &() VULKAN_HPP_NOEXCEPT
+ operator VkBufferCopy2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferCopy2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -11258,11 +10856,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcOffset == rhs.srcOffset ) &&
- ( dstOffset == rhs.dstOffset ) && ( size == rhs.size );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcOffset == rhs.srcOffset ) && ( dstOffset == rhs.dstOffset ) && ( size == rhs.size );
# endif
}
@@ -11279,12 +10876,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCopy2 ) == sizeof( VkBufferCopy2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCopy2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCopy2>::value,
- "BufferCopy2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBufferCopy2>
@@ -11298,24 +10889,25 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkBufferDeviceAddressCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- BufferDeviceAddressCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT
- : deviceAddress( deviceAddress_ )
- {}
+ VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , deviceAddress( deviceAddress_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- BufferDeviceAddressCreateInfoEXT( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferDeviceAddressCreateInfoEXT( *reinterpret_cast<BufferDeviceAddressCreateInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- BufferDeviceAddressCreateInfoEXT &
- operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ BufferDeviceAddressCreateInfoEXT & operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferDeviceAddressCreateInfoEXT & operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -11330,31 +10922,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT &
- setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
deviceAddress = deviceAddress_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBufferDeviceAddressCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBufferDeviceAddressCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferDeviceAddressCreateInfoEXT *>( this );
}
- explicit operator VkBufferDeviceAddressCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkBufferDeviceAddressCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferDeviceAddressCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::DeviceAddress const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -11367,7 +10956,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress );
@@ -11385,14 +10974,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT ) ==
- sizeof( VkBufferDeviceAddressCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT>::value,
- "BufferDeviceAddressCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBufferDeviceAddressCreateInfoEXT>
@@ -11408,15 +10989,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
- : buffer( buffer_ )
- {}
+ VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , buffer( buffer_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferDeviceAddressInfo( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferDeviceAddressInfo( *reinterpret_cast<BufferDeviceAddressInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
BufferDeviceAddressInfo & operator=( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -11434,25 +11018,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo &
- setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBufferDeviceAddressInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBufferDeviceAddressInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferDeviceAddressInfo *>( this );
}
- explicit operator VkBufferDeviceAddressInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkBufferDeviceAddressInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferDeviceAddressInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -11469,7 +11052,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer );
@@ -11487,13 +11070,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo ) ==
- sizeof( VkBufferDeviceAddressInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo>::value,
- "BufferDeviceAddressInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBufferDeviceAddressInfo>
@@ -11513,20 +11089,19 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t bufferImageHeight_ = {},
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {},
VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {},
- VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT
: bufferOffset( bufferOffset_ )
, bufferRowLength( bufferRowLength_ )
, bufferImageHeight( bufferImageHeight_ )
, imageSubresource( imageSubresource_ )
, imageOffset( imageOffset_ )
, imageExtent( imageExtent_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR BufferImageCopy( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
- : BufferImageCopy( *reinterpret_cast<BufferImageCopy const *>( &rhs ) )
- {}
+ BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : BufferImageCopy( *reinterpret_cast<BufferImageCopy const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -11538,8 +11113,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 BufferImageCopy &
- setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
{
bufferOffset = bufferOffset_;
return *this;
@@ -11557,39 +11131,36 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferImageCopy &
- setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
{
imageSubresource = imageSubresource_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferImageCopy &
- setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
{
imageOffset = imageOffset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferImageCopy &
- setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
{
imageExtent = imageExtent_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBufferImageCopy const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBufferImageCopy const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferImageCopy *>( this );
}
- explicit operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT
+ operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferImageCopy *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -11611,12 +11182,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( bufferOffset == rhs.bufferOffset ) && ( bufferRowLength == rhs.bufferRowLength ) &&
- ( bufferImageHeight == rhs.bufferImageHeight ) && ( imageSubresource == rhs.imageSubresource ) &&
- ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent );
+ return ( bufferOffset == rhs.bufferOffset ) && ( bufferRowLength == rhs.bufferRowLength ) && ( bufferImageHeight == rhs.bufferImageHeight ) &&
+ ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent );
# endif
}
@@ -11634,12 +11204,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferImageCopy ) == sizeof( VkBufferImageCopy ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferImageCopy>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferImageCopy>::value,
- "BufferImageCopy is not nothrow_move_constructible!" );
struct BufferImageCopy2
{
@@ -11654,20 +11218,21 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t bufferImageHeight_ = {},
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {},
VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {},
- VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT
- : bufferOffset( bufferOffset_ )
+ VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , bufferOffset( bufferOffset_ )
, bufferRowLength( bufferRowLength_ )
, bufferImageHeight( bufferImageHeight_ )
, imageSubresource( imageSubresource_ )
, imageOffset( imageOffset_ )
, imageExtent( imageExtent_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR BufferImageCopy2( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- BufferImageCopy2( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : BufferImageCopy2( *reinterpret_cast<BufferImageCopy2 const *>( &rhs ) )
- {}
+ BufferImageCopy2( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : BufferImageCopy2( *reinterpret_cast<BufferImageCopy2 const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
BufferImageCopy2 & operator=( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -11685,8 +11250,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 &
- setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
{
bufferOffset = bufferOffset_;
return *this;
@@ -11704,39 +11268,36 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 &
- setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
{
imageSubresource = imageSubresource_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 &
- setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
{
imageOffset = imageOffset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 &
- setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
{
imageExtent = imageExtent_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBufferImageCopy2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBufferImageCopy2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferImageCopy2 *>( this );
}
- explicit operator VkBufferImageCopy2 &() VULKAN_HPP_NOEXCEPT
+ operator VkBufferImageCopy2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferImageCopy2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -11751,8 +11312,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent );
+ return std::tie( sType, pNext, bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent );
}
#endif
@@ -11761,12 +11321,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BufferImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferOffset == rhs.bufferOffset ) &&
- ( bufferRowLength == rhs.bufferRowLength ) && ( bufferImageHeight == rhs.bufferImageHeight ) &&
- ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferOffset == rhs.bufferOffset ) && ( bufferRowLength == rhs.bufferRowLength ) &&
+ ( bufferImageHeight == rhs.bufferImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) &&
( imageExtent == rhs.imageExtent );
# endif
}
@@ -11787,12 +11346,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferImageCopy2 ) == sizeof( VkBufferImageCopy2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferImageCopy2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferImageCopy2>::value,
- "BufferImageCopy2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBufferImageCopy2>
@@ -11815,21 +11368,24 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t dstQueueFamilyIndex_ = {},
VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcAccessMask( srcAccessMask_ )
+ VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , srcAccessMask( srcAccessMask_ )
, dstAccessMask( dstAccessMask_ )
, srcQueueFamilyIndex( srcQueueFamilyIndex_ )
, dstQueueFamilyIndex( dstQueueFamilyIndex_ )
, buffer( buffer_ )
, offset( offset_ )
, size( size_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
- : BufferMemoryBarrier( *reinterpret_cast<BufferMemoryBarrier const *>( &rhs ) )
- {}
+ BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : BufferMemoryBarrier( *reinterpret_cast<BufferMemoryBarrier const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
BufferMemoryBarrier & operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -11847,29 +11403,25 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier &
- setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier &
- setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier &
- setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
srcQueueFamilyIndex = srcQueueFamilyIndex_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier &
- setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
dstQueueFamilyIndex = dstQueueFamilyIndex_;
return *this;
@@ -11881,8 +11433,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier &
- setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
@@ -11895,17 +11446,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBufferMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBufferMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferMemoryBarrier *>( this );
}
- explicit operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT
+ operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferMemoryBarrier *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -11921,8 +11472,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, srcAccessMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size );
+ return std::tie( sType, pNext, srcAccessMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size );
}
#endif
@@ -11931,12 +11481,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) &&
- ( dstAccessMask == rhs.dstAccessMask ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) &&
- ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) &&
+ ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) &&
( offset == rhs.offset ) && ( size == rhs.size );
# endif
}
@@ -11958,12 +11507,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier>::value,
- "BufferMemoryBarrier is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBufferMemoryBarrier>
@@ -11987,8 +11530,10 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t dstQueueFamilyIndex_ = {},
VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcStageMask( srcStageMask_ )
+ VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , srcStageMask( srcStageMask_ )
, srcAccessMask( srcAccessMask_ )
, dstStageMask( dstStageMask_ )
, dstAccessMask( dstAccessMask_ )
@@ -11997,13 +11542,15 @@ namespace VULKAN_HPP_NAMESPACE
, buffer( buffer_ )
, offset( offset_ )
, size( size_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferMemoryBarrier2( VkBufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferMemoryBarrier2( *reinterpret_cast<BufferMemoryBarrier2 const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
BufferMemoryBarrier2 & operator=( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -12021,43 +11568,37 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 &
- setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
{
srcStageMask = srcStageMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 &
- setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 &
- setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
{
dstStageMask = dstStageMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 &
- setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 &
- setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
srcQueueFamilyIndex = srcQueueFamilyIndex_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 &
- setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
dstQueueFamilyIndex = dstQueueFamilyIndex_;
return *this;
@@ -12069,8 +11610,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 &
- setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
@@ -12083,17 +11623,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBufferMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBufferMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferMemoryBarrier2 *>( this );
}
- explicit operator VkBufferMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
+ operator VkBufferMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferMemoryBarrier2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -12111,17 +11651,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- srcStageMask,
- srcAccessMask,
- dstStageMask,
- dstAccessMask,
- srcQueueFamilyIndex,
- dstQueueFamilyIndex,
- buffer,
- offset,
- size );
+ return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size );
}
#endif
@@ -12130,14 +11660,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BufferMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) &&
- ( srcAccessMask == rhs.srcAccessMask ) && ( dstStageMask == rhs.dstStageMask ) &&
- ( dstAccessMask == rhs.dstAccessMask ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) &&
- ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) &&
- ( offset == rhs.offset ) && ( size == rhs.size );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) &&
+ ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) &&
+ ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( size == rhs.size );
# endif
}
@@ -12160,12 +11688,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 ) == sizeof( VkBufferMemoryBarrier2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2>::value,
- "BufferMemoryBarrier2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBufferMemoryBarrier2>
@@ -12182,20 +11704,21 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryRequirementsInfo2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
- : buffer( buffer_ )
- {}
+ VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , buffer( buffer_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- BufferMemoryRequirementsInfo2( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferMemoryRequirementsInfo2( *reinterpret_cast<BufferMemoryRequirementsInfo2 const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- BufferMemoryRequirementsInfo2 &
- operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ BufferMemoryRequirementsInfo2 & operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferMemoryRequirementsInfo2 & operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -12210,25 +11733,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 &
- setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBufferMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBufferMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( this );
}
- explicit operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
+ operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferMemoryRequirementsInfo2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -12245,7 +11767,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer );
@@ -12263,14 +11785,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 ) ==
- sizeof( VkBufferMemoryRequirementsInfo2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2>::value,
- "BufferMemoryRequirementsInfo2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBufferMemoryRequirementsInfo2>
@@ -12284,27 +11798,26 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkBufferOpaqueCaptureAddressCreateInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eBufferOpaqueCaptureAddressCreateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferOpaqueCaptureAddressCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( uint64_t opaqueCaptureAddress_ = {} ) VULKAN_HPP_NOEXCEPT
- : opaqueCaptureAddress( opaqueCaptureAddress_ )
- {}
+ VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , opaqueCaptureAddress( opaqueCaptureAddress_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( BufferOpaqueCaptureAddressCreateInfo const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferOpaqueCaptureAddressCreateInfo( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferOpaqueCaptureAddressCreateInfo( *reinterpret_cast<BufferOpaqueCaptureAddressCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- BufferOpaqueCaptureAddressCreateInfo &
- operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ BufferOpaqueCaptureAddressCreateInfo & operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- BufferOpaqueCaptureAddressCreateInfo &
- operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ BufferOpaqueCaptureAddressCreateInfo & operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const *>( &rhs );
return *this;
@@ -12317,25 +11830,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo &
- setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
{
opaqueCaptureAddress = opaqueCaptureAddress_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBufferOpaqueCaptureAddressCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBufferOpaqueCaptureAddressCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferOpaqueCaptureAddressCreateInfo *>( this );
}
- explicit operator VkBufferOpaqueCaptureAddressCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkBufferOpaqueCaptureAddressCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferOpaqueCaptureAddressCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -12352,7 +11864,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
@@ -12370,14 +11882,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
uint64_t opaqueCaptureAddress = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo ) ==
- sizeof( VkBufferOpaqueCaptureAddressCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo>::value,
- "BufferOpaqueCaptureAddressCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBufferOpaqueCaptureAddressCreateInfo>
@@ -12394,24 +11898,27 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferViewCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- BufferViewCreateInfo( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
- VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
+ VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize range_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, buffer( buffer_ )
, format( format_ )
, offset( offset_ )
, range( range_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: BufferViewCreateInfo( *reinterpret_cast<BufferViewCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
BufferViewCreateInfo & operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -12429,8 +11936,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -12448,32 +11954,30 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo &
- setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo &
- setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
{
range = range_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkBufferViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkBufferViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkBufferViewCreateInfo *>( this );
}
- explicit operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkBufferViewCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -12496,11 +12000,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( buffer == rhs.buffer ) &&
- ( format == rhs.format ) && ( offset == rhs.offset ) && ( range == rhs.range );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( buffer == rhs.buffer ) && ( format == rhs.format ) &&
+ ( offset == rhs.offset ) && ( range == rhs.range );
# endif
}
@@ -12519,12 +12023,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
VULKAN_HPP_NAMESPACE::DeviceSize range = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo>::value,
- "BufferViewCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eBufferViewCreateInfo>
@@ -12540,18 +12038,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCalibratedTimestampInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- CalibratedTimestampInfoEXT( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ =
- VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice ) VULKAN_HPP_NOEXCEPT
- : timeDomain( timeDomain_ )
- {}
+ VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , timeDomain( timeDomain_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- CalibratedTimestampInfoEXT( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CalibratedTimestampInfoEXT( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: CalibratedTimestampInfoEXT( *reinterpret_cast<CalibratedTimestampInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
CalibratedTimestampInfoEXT & operator=( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -12569,31 +12068,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoEXT &
- setTimeDomain( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoEXT & setTimeDomain( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ ) VULKAN_HPP_NOEXCEPT
{
timeDomain = timeDomain_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCalibratedTimestampInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCalibratedTimestampInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( this );
}
- explicit operator VkCalibratedTimestampInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkCalibratedTimestampInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCalibratedTimestampInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::TimeDomainEXT const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TimeDomainEXT const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -12606,7 +12102,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CalibratedTimestampInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timeDomain == rhs.timeDomain );
@@ -12624,13 +12120,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT ) ==
- sizeof( VkCalibratedTimestampInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT>::value,
- "CalibratedTimestampInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCalibratedTimestampInfoEXT>
@@ -12646,17 +12135,17 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointData2NV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CheckpointData2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage_ = {},
- void * pCheckpointMarker_ = {} ) VULKAN_HPP_NOEXCEPT
- : stage( stage_ )
+ VULKAN_HPP_CONSTEXPR
+ CheckpointData2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage_ = {}, void * pCheckpointMarker_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , stage( stage_ )
, pCheckpointMarker( pCheckpointMarker_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR CheckpointData2NV( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CheckpointData2NV( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT
- : CheckpointData2NV( *reinterpret_cast<CheckpointData2NV const *>( &rhs ) )
- {}
+ CheckpointData2NV( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT : CheckpointData2NV( *reinterpret_cast<CheckpointData2NV const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
CheckpointData2NV & operator=( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -12667,24 +12156,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkCheckpointData2NV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCheckpointData2NV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCheckpointData2NV *>( this );
}
- explicit operator VkCheckpointData2NV &() VULKAN_HPP_NOEXCEPT
+ operator VkCheckpointData2NV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCheckpointData2NV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &,
- void * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, void * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -12697,11 +12183,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) &&
- ( pCheckpointMarker == rhs.pCheckpointMarker );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( pCheckpointMarker == rhs.pCheckpointMarker );
# endif
}
@@ -12717,12 +12202,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage = {};
void * pCheckpointMarker = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointData2NV ) == sizeof( VkCheckpointData2NV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CheckpointData2NV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CheckpointData2NV>::value,
- "CheckpointData2NV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCheckpointData2NV>
@@ -12738,18 +12217,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointDataNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CheckpointDataNV(
- VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe,
- void * pCheckpointMarker_ = {} ) VULKAN_HPP_NOEXCEPT
- : stage( stage_ )
+ VULKAN_HPP_CONSTEXPR CheckpointDataNV( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe,
+ void * pCheckpointMarker_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , stage( stage_ )
, pCheckpointMarker( pCheckpointMarker_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR CheckpointDataNV( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : CheckpointDataNV( *reinterpret_cast<CheckpointDataNV const *>( &rhs ) )
- {}
+ CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : CheckpointDataNV( *reinterpret_cast<CheckpointDataNV const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
CheckpointDataNV & operator=( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -12760,24 +12239,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkCheckpointDataNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCheckpointDataNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCheckpointDataNV *>( this );
}
- explicit operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT
+ operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCheckpointDataNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::PipelineStageFlagBits const &,
- void * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlagBits const &, void * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -12790,11 +12266,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) &&
- ( pCheckpointMarker == rhs.pCheckpointMarker );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( pCheckpointMarker == rhs.pCheckpointMarker );
# endif
}
@@ -12805,17 +12280,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe;
void * pCheckpointMarker = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointDataNV ) == sizeof( VkCheckpointDataNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CheckpointDataNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CheckpointDataNV>::value,
- "CheckpointDataNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCheckpointDataNV>
@@ -12878,13 +12347,15 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {}, uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT
: depth( depth_ )
, stencil( stencil_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
: ClearDepthStencilValue( *reinterpret_cast<ClearDepthStencilValue const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ClearDepthStencilValue & operator=( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -12909,17 +12380,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkClearDepthStencilValue const &() const VULKAN_HPP_NOEXCEPT
+ operator VkClearDepthStencilValue const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkClearDepthStencilValue *>( this );
}
- explicit operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT
+ operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkClearDepthStencilValue *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -12936,7 +12407,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( depth == rhs.depth ) && ( stencil == rhs.stencil );
@@ -12953,13 +12424,6 @@ namespace VULKAN_HPP_NAMESPACE
float depth = {};
uint32_t stencil = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue ) ==
- sizeof( VkClearDepthStencilValue ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue>::value,
- "ClearDepthStencilValue is not nothrow_move_constructible!" );
union ClearValue
{
@@ -12968,21 +12432,17 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} ) : color( color_ ) {}
- VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ )
- : depthStencil( depthStencil_ )
- {}
+ VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ ) : depthStencil( depthStencil_ ) {}
#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 ClearValue &
- setColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & color_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & color_ ) VULKAN_HPP_NOEXCEPT
{
color = color_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ClearValue &
- setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ClearValue & setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT
{
depthStencil = depthStencil_;
return *this;
@@ -13015,17 +12475,16 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 ClearAttachment( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
uint32_t colorAttachment_ = {},
- VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT
: aspectMask( aspectMask_ )
, colorAttachment( colorAttachment_ )
, clearValue( clearValue_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR_14 ClearAttachment( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
- : ClearAttachment( *reinterpret_cast<ClearAttachment const *>( &rhs ) )
- {}
+ ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT : ClearAttachment( *reinterpret_cast<ClearAttachment const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -13037,8 +12496,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 ClearAttachment &
- setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
aspectMask = aspectMask_;
return *this;
@@ -13050,30 +12508,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ClearAttachment &
- setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT
{
clearValue = clearValue_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkClearAttachment const &() const VULKAN_HPP_NOEXCEPT
+ operator VkClearAttachment const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkClearAttachment *>( this );
}
- explicit operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT
+ operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkClearAttachment *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::
- tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ClearValue const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ClearValue const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -13086,30 +12542,22 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t colorAttachment = {};
VULKAN_HPP_NAMESPACE::ClearValue clearValue = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearAttachment ) == sizeof( VkClearAttachment ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearAttachment>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearAttachment>::value,
- "ClearAttachment is not nothrow_move_constructible!" );
struct ClearRect
{
using NativeType = VkClearRect;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ = {},
- uint32_t baseArrayLayer_ = {},
- uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
: rect( rect_ )
, baseArrayLayer( baseArrayLayer_ )
, layerCount( layerCount_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ClearRect( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT : ClearRect( *reinterpret_cast<ClearRect const *>( &rhs ) )
- {}
+ ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT : ClearRect( *reinterpret_cast<ClearRect const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -13140,17 +12588,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkClearRect const &() const VULKAN_HPP_NOEXCEPT
+ operator VkClearRect const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkClearRect *>( this );
}
- explicit operator VkClearRect &() VULKAN_HPP_NOEXCEPT
+ operator VkClearRect &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkClearRect *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -13167,7 +12615,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( rect == rhs.rect ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount );
@@ -13185,30 +12633,25 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t baseArrayLayer = {};
uint32_t layerCount = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearRect ) == sizeof( VkClearRect ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearRect>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearRect>::value,
- "ClearRect is not nothrow_move_constructible!" );
struct CoarseSampleLocationNV
{
using NativeType = VkCoarseSampleLocationNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- CoarseSampleLocationNV( uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT
: pixelX( pixelX_ )
, pixelY( pixelY_ )
, sample( sample_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
: CoarseSampleLocationNV( *reinterpret_cast<CoarseSampleLocationNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
CoarseSampleLocationNV & operator=( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -13239,17 +12682,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCoarseSampleLocationNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCoarseSampleLocationNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCoarseSampleLocationNV *>( this );
}
- explicit operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT
+ operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCoarseSampleLocationNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -13266,7 +12709,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( pixelX == rhs.pixelX ) && ( pixelY == rhs.pixelY ) && ( sample == rhs.sample );
@@ -13284,49 +12727,41 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t pixelY = {};
uint32_t sample = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV ) ==
- sizeof( VkCoarseSampleLocationNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV>::value,
- "CoarseSampleLocationNV is not nothrow_move_constructible!" );
struct CoarseSampleOrderCustomNV
{
using NativeType = VkCoarseSampleOrderCustomNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV(
- VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ =
- VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations,
- uint32_t sampleCount_ = {},
- uint32_t sampleLocationCount_ = {},
- const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR
+ CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations,
+ uint32_t sampleCount_ = {},
+ uint32_t sampleLocationCount_ = {},
+ const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT
: shadingRate( shadingRate_ )
, sampleCount( sampleCount_ )
, sampleLocationCount( sampleLocationCount_ )
, pSampleLocations( pSampleLocations_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- CoarseSampleOrderCustomNV( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
: CoarseSampleOrderCustomNV( *reinterpret_cast<CoarseSampleOrderCustomNV const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- CoarseSampleOrderCustomNV(
- VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_,
- uint32_t sampleCount_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV> const &
- sampleLocations_ )
+ CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_,
+ uint32_t sampleCount_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV> const & sampleLocations_ )
: shadingRate( shadingRate_ )
, sampleCount( sampleCount_ )
, sampleLocationCount( static_cast<uint32_t>( sampleLocations_.size() ) )
, pSampleLocations( sampleLocations_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -13339,8 +12774,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV &
- setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
{
shadingRate = shadingRate_;
return *this;
@@ -13352,15 +12786,14 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV &
- setSampleLocationCount( uint32_t sampleLocationCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setSampleLocationCount( uint32_t sampleLocationCount_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationCount = sampleLocationCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV &
- setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+ setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
{
pSampleLocations = pSampleLocations_;
return *this;
@@ -13368,8 +12801,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CoarseSampleOrderCustomNV & setSampleLocations(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV> const &
- sampleLocations_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV> const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationCount = static_cast<uint32_t>( sampleLocations_.size() );
pSampleLocations = sampleLocations_.data();
@@ -13378,17 +12810,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCoarseSampleOrderCustomNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCoarseSampleOrderCustomNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( this );
}
- explicit operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT
+ operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCoarseSampleOrderCustomNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -13408,11 +12840,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( shadingRate == rhs.shadingRate ) && ( sampleCount == rhs.sampleCount ) &&
- ( sampleLocationCount == rhs.sampleLocationCount ) && ( pSampleLocations == rhs.pSampleLocations );
+ return ( shadingRate == rhs.shadingRate ) && ( sampleCount == rhs.sampleCount ) && ( sampleLocationCount == rhs.sampleLocationCount ) &&
+ ( pSampleLocations == rhs.pSampleLocations );
# endif
}
@@ -13423,19 +12855,259 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate =
- VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations;
+ VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations;
uint32_t sampleCount = {};
uint32_t sampleLocationCount = {};
const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV ) ==
- sizeof( VkCoarseSampleOrderCustomNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV>::value,
- "CoarseSampleOrderCustomNV is not nothrow_move_constructible!" );
+
+ struct ColorBlendAdvancedEXT
+ {
+ using NativeType = VkColorBlendAdvancedEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ColorBlendAdvancedEXT( VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd,
+ VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {},
+ VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated,
+ VULKAN_HPP_NAMESPACE::Bool32 clampResults_ = {} ) VULKAN_HPP_NOEXCEPT
+ : advancedBlendOp( advancedBlendOp_ )
+ , srcPremultiplied( srcPremultiplied_ )
+ , dstPremultiplied( dstPremultiplied_ )
+ , blendOverlap( blendOverlap_ )
+ , clampResults( clampResults_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ColorBlendAdvancedEXT( ColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ColorBlendAdvancedEXT( VkColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ColorBlendAdvancedEXT( *reinterpret_cast<ColorBlendAdvancedEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ColorBlendAdvancedEXT & operator=( ColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ColorBlendAdvancedEXT & operator=( VkColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setAdvancedBlendOp( VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp_ ) VULKAN_HPP_NOEXCEPT
+ {
+ advancedBlendOp = advancedBlendOp_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcPremultiplied = srcPremultiplied_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstPremultiplied = dstPremultiplied_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT
+ {
+ blendOverlap = blendOverlap_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setClampResults( VULKAN_HPP_NAMESPACE::Bool32 clampResults_ ) VULKAN_HPP_NOEXCEPT
+ {
+ clampResults = clampResults_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkColorBlendAdvancedEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkColorBlendAdvancedEXT *>( this );
+ }
+
+ operator VkColorBlendAdvancedEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkColorBlendAdvancedEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::BlendOp const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::BlendOverlapEXT const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( advancedBlendOp, srcPremultiplied, dstPremultiplied, blendOverlap, clampResults );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ColorBlendAdvancedEXT const & ) const = default;
+#else
+ bool operator==( ColorBlendAdvancedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( advancedBlendOp == rhs.advancedBlendOp ) && ( srcPremultiplied == rhs.srcPremultiplied ) && ( dstPremultiplied == rhs.dstPremultiplied ) &&
+ ( blendOverlap == rhs.blendOverlap ) && ( clampResults == rhs.clampResults );
+# endif
+ }
+
+ bool operator!=( ColorBlendAdvancedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
+ VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {};
+ VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {};
+ VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated;
+ VULKAN_HPP_NAMESPACE::Bool32 clampResults = {};
+ };
+
+ struct ColorBlendEquationEXT
+ {
+ using NativeType = VkColorBlendEquationEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ColorBlendEquationEXT( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
+ VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
+ VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd,
+ VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
+ VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
+ VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd ) VULKAN_HPP_NOEXCEPT
+ : srcColorBlendFactor( srcColorBlendFactor_ )
+ , dstColorBlendFactor( dstColorBlendFactor_ )
+ , colorBlendOp( colorBlendOp_ )
+ , srcAlphaBlendFactor( srcAlphaBlendFactor_ )
+ , dstAlphaBlendFactor( dstAlphaBlendFactor_ )
+ , alphaBlendOp( alphaBlendOp_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ColorBlendEquationEXT( ColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ColorBlendEquationEXT( VkColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ColorBlendEquationEXT( *reinterpret_cast<ColorBlendEquationEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ColorBlendEquationEXT & operator=( ColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ColorBlendEquationEXT & operator=( VkColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcColorBlendFactor = srcColorBlendFactor_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstColorBlendFactor = dstColorBlendFactor_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT
+ {
+ colorBlendOp = colorBlendOp_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ {
+ srcAlphaBlendFactor = srcAlphaBlendFactor_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstAlphaBlendFactor = dstAlphaBlendFactor_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT
+ {
+ alphaBlendOp = alphaBlendOp_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkColorBlendEquationEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkColorBlendEquationEXT *>( this );
+ }
+
+ operator VkColorBlendEquationEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkColorBlendEquationEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::BlendFactor const &,
+ VULKAN_HPP_NAMESPACE::BlendFactor const &,
+ VULKAN_HPP_NAMESPACE::BlendOp const &,
+ VULKAN_HPP_NAMESPACE::BlendFactor const &,
+ VULKAN_HPP_NAMESPACE::BlendFactor const &,
+ VULKAN_HPP_NAMESPACE::BlendOp const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ColorBlendEquationEXT const & ) const = default;
+#else
+ bool operator==( ColorBlendEquationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( srcColorBlendFactor == rhs.srcColorBlendFactor ) && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) && ( colorBlendOp == rhs.colorBlendOp ) &&
+ ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) && ( alphaBlendOp == rhs.alphaBlendOp );
+# endif
+ }
+
+ bool operator!=( ColorBlendEquationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+ VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+ VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
+ VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+ VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+ VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
+ };
struct CommandBufferAllocateInfo
{
@@ -13445,21 +13117,23 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferAllocateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo(
- VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {},
- VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary,
- uint32_t commandBufferCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : commandPool( commandPool_ )
+ VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {},
+ VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary,
+ uint32_t commandBufferCount_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , commandPool( commandPool_ )
, level( level_ )
, commandBufferCount( commandBufferCount_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- CommandBufferAllocateInfo( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: CommandBufferAllocateInfo( *reinterpret_cast<CommandBufferAllocateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
CommandBufferAllocateInfo & operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -13477,39 +13151,36 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo &
- setCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT
{
commandPool = commandPool_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo &
- setLevel( VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setLevel( VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT
{
level = level_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo &
- setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
{
commandBufferCount = commandBufferCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCommandBufferAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCommandBufferAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandBufferAllocateInfo *>( this );
}
- explicit operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCommandBufferAllocateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -13530,11 +13201,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandPool == rhs.commandPool ) &&
- ( level == rhs.level ) && ( commandBufferCount == rhs.commandBufferCount );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandPool == rhs.commandPool ) && ( level == rhs.level ) &&
+ ( commandBufferCount == rhs.commandBufferCount );
# endif
}
@@ -13551,13 +13222,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary;
uint32_t commandBufferCount = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo ) ==
- sizeof( VkCommandBufferAllocateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo>::value,
- "CommandBufferAllocateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCommandBufferAllocateInfo>
@@ -13573,27 +13237,29 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo(
- VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
- uint32_t subpass_ = {},
- VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {},
- VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {},
- VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT
- : renderPass( renderPass_ )
+ VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
+ uint32_t subpass_ = {},
+ VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {},
+ VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {},
+ VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , renderPass( renderPass_ )
, subpass( subpass_ )
, framebuffer( framebuffer_ )
, occlusionQueryEnable( occlusionQueryEnable_ )
, queryFlags( queryFlags_ )
, pipelineStatistics( pipelineStatistics_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: CommandBufferInheritanceInfo( *reinterpret_cast<CommandBufferInheritanceInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
CommandBufferInheritanceInfo & operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -13611,8 +13277,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo &
- setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
{
renderPass = renderPass_;
return *this;
@@ -13624,46 +13289,43 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo &
- setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
{
framebuffer = framebuffer_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo &
- setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT
{
occlusionQueryEnable = occlusionQueryEnable_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo &
- setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT
{
queryFlags = queryFlags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo &
- setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
+ setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
{
pipelineStatistics = pipelineStatistics_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCommandBufferInheritanceInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCommandBufferInheritanceInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandBufferInheritanceInfo *>( this );
}
- explicit operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCommandBufferInheritanceInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -13678,8 +13340,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, renderPass, subpass, framebuffer, occlusionQueryEnable, queryFlags, pipelineStatistics );
+ return std::tie( sType, pNext, renderPass, subpass, framebuffer, occlusionQueryEnable, queryFlags, pipelineStatistics );
}
#endif
@@ -13688,12 +13349,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) &&
- ( subpass == rhs.subpass ) && ( framebuffer == rhs.framebuffer ) &&
- ( occlusionQueryEnable == rhs.occlusionQueryEnable ) && ( queryFlags == rhs.queryFlags ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ) &&
+ ( framebuffer == rhs.framebuffer ) && ( occlusionQueryEnable == rhs.occlusionQueryEnable ) && ( queryFlags == rhs.queryFlags ) &&
( pipelineStatistics == rhs.pipelineStatistics );
# endif
}
@@ -13705,23 +13365,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
- uint32_t subpass = {};
- VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
+ uint32_t subpass = {};
+ VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {};
VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {};
VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo ) ==
- sizeof( VkCommandBufferInheritanceInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo>::value,
- "CommandBufferInheritanceInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCommandBufferInheritanceInfo>
@@ -13737,18 +13389,21 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferBeginInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo(
- VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {},
- const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {},
+ const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, pInheritanceInfo( pInheritanceInfo_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: CommandBufferBeginInfo( *reinterpret_cast<CommandBufferBeginInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
CommandBufferBeginInfo & operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -13766,32 +13421,31 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo &
- setFlags( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setPInheritanceInfo(
- const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo &
+ setPInheritanceInfo( const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT
{
pInheritanceInfo = pInheritanceInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandBufferBeginInfo *>( this );
}
- explicit operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCommandBufferBeginInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -13811,11 +13465,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( pInheritanceInfo == rhs.pInheritanceInfo );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pInheritanceInfo == rhs.pInheritanceInfo );
# endif
}
@@ -13826,18 +13479,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {};
const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo ) ==
- sizeof( VkCommandBufferBeginInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo>::value,
- "CommandBufferBeginInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCommandBufferBeginInfo>
@@ -13850,63 +13496,60 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkCommandBufferInheritanceConditionalRenderingInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT(
- VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {} ) VULKAN_HPP_NOEXCEPT
- : conditionalRenderingEnable( conditionalRenderingEnable_ )
- {}
+ VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , conditionalRenderingEnable( conditionalRenderingEnable_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT(
- CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ CommandBufferInheritanceConditionalRenderingInfoEXT( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CommandBufferInheritanceConditionalRenderingInfoEXT(
- VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : CommandBufferInheritanceConditionalRenderingInfoEXT(
- *reinterpret_cast<CommandBufferInheritanceConditionalRenderingInfoEXT const *>( &rhs ) )
- {}
+ CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CommandBufferInheritanceConditionalRenderingInfoEXT( *reinterpret_cast<CommandBufferInheritanceConditionalRenderingInfoEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
CommandBufferInheritanceConditionalRenderingInfoEXT &
operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CommandBufferInheritanceConditionalRenderingInfoEXT &
- operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT &
- setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT
+ setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT
{
conditionalRenderingEnable = conditionalRenderingEnable_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandBufferInheritanceConditionalRenderingInfoEXT *>( this );
}
- explicit operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCommandBufferInheritanceConditionalRenderingInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -13923,11 +13566,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( conditionalRenderingEnable == rhs.conditionalRenderingEnable );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conditionalRenderingEnable == rhs.conditionalRenderingEnable );
# endif
}
@@ -13938,19 +13580,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT ) ==
- sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<
- VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT>::value,
- "CommandBufferInheritanceConditionalRenderingInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT>
@@ -13963,49 +13596,46 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkCommandBufferInheritanceRenderPassTransformInfoQCOM;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM(
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ =
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
- VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {} ) VULKAN_HPP_NOEXCEPT
- : transform( transform_ )
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
+ VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , transform( transform_ )
, renderArea( renderArea_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM(
- CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ CommandBufferInheritanceRenderPassTransformInfoQCOM( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CommandBufferInheritanceRenderPassTransformInfoQCOM(
- VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
- : CommandBufferInheritanceRenderPassTransformInfoQCOM(
- *reinterpret_cast<CommandBufferInheritanceRenderPassTransformInfoQCOM const *>( &rhs ) )
- {}
+ CommandBufferInheritanceRenderPassTransformInfoQCOM( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CommandBufferInheritanceRenderPassTransformInfoQCOM( *reinterpret_cast<CommandBufferInheritanceRenderPassTransformInfoQCOM const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
CommandBufferInheritanceRenderPassTransformInfoQCOM &
operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CommandBufferInheritanceRenderPassTransformInfoQCOM &
- operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM &
- setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
+ setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
{
transform = transform_;
return *this;
@@ -14019,17 +13649,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandBufferInheritanceRenderPassTransformInfoQCOM *>( this );
}
- explicit operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
+ operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCommandBufferInheritanceRenderPassTransformInfoQCOM *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -14049,11 +13679,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ) &&
- ( renderArea == rhs.renderArea );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ) && ( renderArea == rhs.renderArea );
# endif
}
@@ -14064,21 +13693,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform =
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
- VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+ VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM ) ==
- sizeof( VkCommandBufferInheritanceRenderPassTransformInfoQCOM ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<
- VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM>::value,
- "CommandBufferInheritanceRenderPassTransformInfoQCOM is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM>
@@ -14091,60 +13710,60 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkCommandBufferInheritanceRenderingInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eCommandBufferInheritanceRenderingInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderingInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo(
- VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {},
- uint32_t viewMask_ = {},
- uint32_t colorAttachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {},
- VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1 )
- VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR
+ CommandBufferInheritanceRenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {},
+ uint32_t viewMask_ = {},
+ uint32_t colorAttachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {},
+ VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, viewMask( viewMask_ )
, colorAttachmentCount( colorAttachmentCount_ )
, pColorAttachmentFormats( pColorAttachmentFormats_ )
, depthAttachmentFormat( depthAttachmentFormat_ )
, stencilAttachmentFormat( stencilAttachmentFormat_ )
, rasterizationSamples( rasterizationSamples_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo( CommandBufferInheritanceRenderingInfo const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CommandBufferInheritanceRenderingInfo( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : CommandBufferInheritanceRenderingInfo(
- *reinterpret_cast<CommandBufferInheritanceRenderingInfo const *>( &rhs ) )
- {}
+ : CommandBufferInheritanceRenderingInfo( *reinterpret_cast<CommandBufferInheritanceRenderingInfo const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- CommandBufferInheritanceRenderingInfo(
- VULKAN_HPP_NAMESPACE::RenderingFlags flags_,
- uint32_t viewMask_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_,
- VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1 )
- : flags( flags_ )
+ CommandBufferInheritanceRenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_,
+ uint32_t viewMask_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_,
+ VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, viewMask( viewMask_ )
, colorAttachmentCount( static_cast<uint32_t>( colorAttachmentFormats_.size() ) )
, pColorAttachmentFormats( colorAttachmentFormats_.data() )
, depthAttachmentFormat( depthAttachmentFormat_ )
, stencilAttachmentFormat( stencilAttachmentFormat_ )
, rasterizationSamples( rasterizationSamples_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- CommandBufferInheritanceRenderingInfo &
- operator=( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ CommandBufferInheritanceRenderingInfo & operator=( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CommandBufferInheritanceRenderingInfo &
- operator=( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ CommandBufferInheritanceRenderingInfo & operator=( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo const *>( &rhs );
return *this;
@@ -14157,29 +13776,26 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo &
- setFlags( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo &
- setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
{
viewMask = viewMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo &
- setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = colorAttachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo &
- setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
+ setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
{
pColorAttachmentFormats = pColorAttachmentFormats_;
return *this;
@@ -14187,8 +13803,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CommandBufferInheritanceRenderingInfo & setColorAttachmentFormats(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const &
- colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = static_cast<uint32_t>( colorAttachmentFormats_.size() );
pColorAttachmentFormats = colorAttachmentFormats_.data();
@@ -14197,38 +13812,38 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo &
- setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
+ setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
{
depthAttachmentFormat = depthAttachmentFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo &
- setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
+ setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
{
stencilAttachmentFormat = stencilAttachmentFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo &
- setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
+ setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
{
rasterizationSamples = rasterizationSamples_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCommandBufferInheritanceRenderingInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCommandBufferInheritanceRenderingInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandBufferInheritanceRenderingInfo *>( this );
}
- explicit operator VkCommandBufferInheritanceRenderingInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkCommandBufferInheritanceRenderingInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCommandBufferInheritanceRenderingInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -14244,15 +13859,8 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- flags,
- viewMask,
- colorAttachmentCount,
- pColorAttachmentFormats,
- depthAttachmentFormat,
- stencilAttachmentFormat,
- rasterizationSamples );
+ return std::tie(
+ sType, pNext, flags, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat, rasterizationSamples );
}
#endif
@@ -14261,14 +13869,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) &&
- ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) &&
- ( depthAttachmentFormat == rhs.depthAttachmentFormat ) &&
- ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewMask == rhs.viewMask ) &&
+ ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) &&
+ ( depthAttachmentFormat == rhs.depthAttachmentFormat ) && ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ) &&
( rasterizationSamples == rhs.rasterizationSamples );
# endif
}
@@ -14280,24 +13886,16 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderingInfo;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::RenderingFlags flags = {};
- uint32_t viewMask = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderingInfo;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::RenderingFlags flags = {};
+ uint32_t viewMask = {};
uint32_t colorAttachmentCount = {};
const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {};
VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo ) ==
- sizeof( VkCommandBufferInheritanceRenderingInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo>::value,
- "CommandBufferInheritanceRenderingInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCommandBufferInheritanceRenderingInfo>
@@ -14311,19 +13909,16 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkViewport;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR Viewport( float x_ = {},
- float y_ = {},
- float width_ = {},
- float height_ = {},
- float minDepth_ = {},
- float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR
+ Viewport( float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT
: x( x_ )
, y( y_ )
, width( width_ )
, height( height_ )
, minDepth( minDepth_ )
, maxDepth( maxDepth_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -14376,17 +13971,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkViewport const &() const VULKAN_HPP_NOEXCEPT
+ operator VkViewport const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkViewport *>( this );
}
- explicit operator VkViewport &() VULKAN_HPP_NOEXCEPT
+ operator VkViewport &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkViewport *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -14403,11 +13998,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( x == rhs.x ) && ( y == rhs.y ) && ( width == rhs.width ) && ( height == rhs.height ) &&
- ( minDepth == rhs.minDepth ) && ( maxDepth == rhs.maxDepth );
+ return ( x == rhs.x ) && ( y == rhs.y ) && ( width == rhs.width ) && ( height == rhs.height ) && ( minDepth == rhs.minDepth ) &&
+ ( maxDepth == rhs.maxDepth );
# endif
}
@@ -14425,54 +14020,45 @@ namespace VULKAN_HPP_NAMESPACE
float minDepth = {};
float maxDepth = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Viewport ) == sizeof( VkViewport ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Viewport>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Viewport>::value,
- "Viewport is not nothrow_move_constructible!" );
struct CommandBufferInheritanceViewportScissorInfoNV
{
using NativeType = VkCommandBufferInheritanceViewportScissorInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eCommandBufferInheritanceViewportScissorInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV(
- VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ = {},
- uint32_t viewportDepthCount_ = {},
- const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ = {} ) VULKAN_HPP_NOEXCEPT
- : viewportScissor2D( viewportScissor2D_ )
+ VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ = {},
+ uint32_t viewportDepthCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , viewportScissor2D( viewportScissor2D_ )
, viewportDepthCount( viewportDepthCount_ )
, pViewportDepths( pViewportDepths_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV(
- CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ CommandBufferInheritanceViewportScissorInfoNV( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CommandBufferInheritanceViewportScissorInfoNV( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : CommandBufferInheritanceViewportScissorInfoNV(
- *reinterpret_cast<CommandBufferInheritanceViewportScissorInfoNV const *>( &rhs ) )
- {}
+ CommandBufferInheritanceViewportScissorInfoNV( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CommandBufferInheritanceViewportScissorInfoNV( *reinterpret_cast<CommandBufferInheritanceViewportScissorInfoNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- CommandBufferInheritanceViewportScissorInfoNV &
- operator=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ CommandBufferInheritanceViewportScissorInfoNV & operator=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CommandBufferInheritanceViewportScissorInfoNV &
- operator=( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ CommandBufferInheritanceViewportScissorInfoNV & operator=( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
@@ -14485,32 +14071,31 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV &
- setViewportDepthCount( uint32_t viewportDepthCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setViewportDepthCount( uint32_t viewportDepthCount_ ) VULKAN_HPP_NOEXCEPT
{
viewportDepthCount = viewportDepthCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV &
- setPViewportDepths( const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ ) VULKAN_HPP_NOEXCEPT
+ setPViewportDepths( const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ ) VULKAN_HPP_NOEXCEPT
{
pViewportDepths = pViewportDepths_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCommandBufferInheritanceViewportScissorInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCommandBufferInheritanceViewportScissorInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandBufferInheritanceViewportScissorInfoNV *>( this );
}
- explicit operator VkCommandBufferInheritanceViewportScissorInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkCommandBufferInheritanceViewportScissorInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCommandBufferInheritanceViewportScissorInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -14531,7 +14116,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewportScissor2D == rhs.viewportScissor2D ) &&
@@ -14546,21 +14131,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D = {};
uint32_t viewportDepthCount = {};
const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV ) ==
- sizeof( VkCommandBufferInheritanceViewportScissorInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV>::value,
- "CommandBufferInheritanceViewportScissorInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCommandBufferInheritanceViewportScissorInfoNV>
@@ -14577,16 +14153,20 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ = {},
- uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT
- : commandBuffer( commandBuffer_ )
+ uint32_t deviceMask_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , commandBuffer( commandBuffer_ )
, deviceMask( deviceMask_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CommandBufferSubmitInfo( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: CommandBufferSubmitInfo( *reinterpret_cast<CommandBufferSubmitInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
CommandBufferSubmitInfo & operator=( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -14604,8 +14184,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo &
- setCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ ) VULKAN_HPP_NOEXCEPT
{
commandBuffer = commandBuffer_;
return *this;
@@ -14618,24 +14197,21 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCommandBufferSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCommandBufferSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandBufferSubmitInfo *>( this );
}
- explicit operator VkCommandBufferSubmitInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkCommandBufferSubmitInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCommandBufferSubmitInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::CommandBuffer const &,
- uint32_t const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CommandBuffer const &, uint32_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -14648,11 +14224,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandBuffer == rhs.commandBuffer ) &&
- ( deviceMask == rhs.deviceMask );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandBuffer == rhs.commandBuffer ) && ( deviceMask == rhs.deviceMask );
# endif
}
@@ -14668,13 +14243,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer = {};
uint32_t deviceMask = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo ) ==
- sizeof( VkCommandBufferSubmitInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo>::value,
- "CommandBufferSubmitInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCommandBufferSubmitInfo>
@@ -14691,17 +14259,21 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {},
- uint32_t queueFamilyIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {},
+ uint32_t queueFamilyIndex_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, queueFamilyIndex( queueFamilyIndex_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: CommandPoolCreateInfo( *reinterpret_cast<CommandPoolCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -14719,39 +14291,34 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo &
- setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndex = queueFamilyIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCommandPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCommandPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCommandPoolCreateInfo *>( this );
}
- explicit operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCommandPoolCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags const &,
- uint32_t const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags const &, uint32_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -14764,11 +14331,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( queueFamilyIndex == rhs.queueFamilyIndex );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex );
# endif
}
@@ -14784,12 +14350,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {};
uint32_t queueFamilyIndex = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo>::value,
- "CommandPoolCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCommandPoolCreateInfo>
@@ -14802,18 +14362,19 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkSpecializationMapEntry;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- SpecializationMapEntry( uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SpecializationMapEntry( uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {} ) VULKAN_HPP_NOEXCEPT
: constantID( constantID_ )
, offset( offset_ )
, size( size_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
: SpecializationMapEntry( *reinterpret_cast<SpecializationMapEntry const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -14844,17 +14405,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSpecializationMapEntry const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSpecializationMapEntry const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSpecializationMapEntry *>( this );
}
- explicit operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT
+ operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSpecializationMapEntry *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -14871,7 +14432,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( constantID == rhs.constantID ) && ( offset == rhs.offset ) && ( size == rhs.size );
@@ -14889,13 +14450,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t offset = {};
size_t size = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SpecializationMapEntry ) ==
- sizeof( VkSpecializationMapEntry ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SpecializationMapEntry>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SpecializationMapEntry>::value,
- "SpecializationMapEntry is not nothrow_move_constructible!" );
struct SpecializationInfo
{
@@ -14905,29 +14459,28 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t mapEntryCount_ = {},
const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ = {},
size_t dataSize_ = {},
- const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT
+ const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT
: mapEntryCount( mapEntryCount_ )
, pMapEntries( pMapEntries_ )
, dataSize( dataSize_ )
, pData( pData_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : SpecializationInfo( *reinterpret_cast<SpecializationInfo const *>( &rhs ) )
- {}
+ SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SpecializationInfo( *reinterpret_cast<SpecializationInfo const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
- SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ = {} )
+ SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ = {} )
: mapEntryCount( static_cast<uint32_t>( mapEntries_.size() ) )
, pMapEntries( mapEntries_.data() )
, dataSize( data_.size() * sizeof( T ) )
, pData( data_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -14946,17 +14499,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SpecializationInfo &
- setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ ) VULKAN_HPP_NOEXCEPT
{
pMapEntries = pMapEntries_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SpecializationInfo & setMapEntries(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const &
- mapEntries_ ) VULKAN_HPP_NOEXCEPT
+ SpecializationInfo &
+ setMapEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_ ) VULKAN_HPP_NOEXCEPT
{
mapEntryCount = static_cast<uint32_t>( mapEntries_.size() );
pMapEntries = mapEntries_.data();
@@ -14978,8 +14529,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
- SpecializationInfo &
- setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
+ SpecializationInfo & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
{
dataSize = data_.size() * sizeof( T );
pData = data_.data();
@@ -14988,24 +14538,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSpecializationInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSpecializationInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSpecializationInfo *>( this );
}
- explicit operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSpecializationInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<uint32_t const &,
- const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * const &,
- size_t const &,
- const void * const &>
+ std::tuple<uint32_t const &, const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * const &, size_t const &, const void * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -15018,11 +14565,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( mapEntryCount == rhs.mapEntryCount ) && ( pMapEntries == rhs.pMapEntries ) &&
- ( dataSize == rhs.dataSize ) && ( pData == rhs.pData );
+ return ( mapEntryCount == rhs.mapEntryCount ) && ( pMapEntries == rhs.pMapEntries ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData );
# endif
}
@@ -15038,12 +14584,6 @@ namespace VULKAN_HPP_NAMESPACE
size_t dataSize = {};
const void * pData = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SpecializationInfo ) == sizeof( VkSpecializationInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SpecializationInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SpecializationInfo>::value,
- "SpecializationInfo is not nothrow_move_constructible!" );
struct PipelineShaderStageCreateInfo
{
@@ -15053,29 +14593,30 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo(
- VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex,
- VULKAN_HPP_NAMESPACE::ShaderModule module_ = {},
- const char * pName_ = {},
- const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex,
+ VULKAN_HPP_NAMESPACE::ShaderModule module_ = {},
+ const char * pName_ = {},
+ const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, stage( stage_ )
, module( module_ )
, pName( pName_ )
, pSpecializationInfo( pSpecializationInfo_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineShaderStageCreateInfo( *reinterpret_cast<PipelineShaderStageCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineShaderStageCreateInfo &
- operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -15090,22 +14631,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo &
- setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
{
stage = stage_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo &
- setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT
{
module = module_;
return *this;
@@ -15117,25 +14655,25 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPSpecializationInfo(
- const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo &
+ setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
{
pSpecializationInfo = pSpecializationInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineShaderStageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineShaderStageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineShaderStageCreateInfo *>( this );
}
- explicit operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineShaderStageCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -15178,9 +14716,8 @@ namespace VULKAN_HPP_NAMESPACE
bool operator==( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) &&
- ( module == rhs.module ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) &&
- ( pSpecializationInfo == rhs.pSpecializationInfo );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( module == rhs.module ) &&
+ ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( pSpecializationInfo == rhs.pSpecializationInfo );
}
bool operator!=( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
@@ -15189,22 +14726,14 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex;
- VULKAN_HPP_NAMESPACE::ShaderModule module = {};
- const char * pName = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex;
+ VULKAN_HPP_NAMESPACE::ShaderModule module = {};
+ const char * pName = {};
const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo ) ==
- sizeof( VkPipelineShaderStageCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo>::value,
- "PipelineShaderStageCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineShaderStageCreateInfo>
@@ -15220,24 +14749,27 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {},
- VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
- VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
- int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
+ VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
+ int32_t basePipelineIndex_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, stage( stage_ )
, layout( layout_ )
, basePipelineHandle( basePipelineHandle_ )
, basePipelineIndex( basePipelineIndex_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ComputePipelineCreateInfo( *reinterpret_cast<ComputePipelineCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -15255,53 +14787,48 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo &
- setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT
{
stage = stage_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo &
- setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo &
- setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineHandle = basePipelineHandle_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo &
- setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineIndex = basePipelineIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkComputePipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkComputePipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkComputePipelineCreateInfo *>( this );
}
- explicit operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkComputePipelineCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -15324,12 +14851,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) &&
- ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) &&
- ( basePipelineIndex == rhs.basePipelineIndex );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( layout == rhs.layout ) &&
+ ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex );
# endif
}
@@ -15348,13 +14874,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
int32_t basePipelineIndex = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo ) ==
- sizeof( VkComputePipelineCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo>::value,
- "ComputePipelineCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eComputePipelineCreateInfo>
@@ -15367,28 +14886,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkConditionalRenderingBeginInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT(
- VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
- VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : buffer( buffer_ )
+ VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
+ VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , buffer( buffer_ )
, offset( offset_ )
, flags( flags_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ConditionalRenderingBeginInfoEXT( *reinterpret_cast<ConditionalRenderingBeginInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ConditionalRenderingBeginInfoEXT &
- operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -15403,39 +14923,36 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT &
- setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT &
- setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT &
- setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkConditionalRenderingBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkConditionalRenderingBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( this );
}
- explicit operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkConditionalRenderingBeginInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -15456,11 +14973,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) &&
- ( flags == rhs.flags );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( flags == rhs.flags );
# endif
}
@@ -15477,14 +14993,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT ) ==
- sizeof( VkConditionalRenderingBeginInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT>::value,
- "ConditionalRenderingBeginInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eConditionalRenderingBeginInfoEXT>
@@ -15497,21 +15005,17 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkConformanceVersion;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_ = {},
- uint8_t minor_ = {},
- uint8_t subminor_ = {},
- uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT
: major( major_ )
, minor( minor_ )
, subminor( subminor_ )
, patch( patch_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
- : ConformanceVersion( *reinterpret_cast<ConformanceVersion const *>( &rhs ) )
- {}
+ ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT : ConformanceVersion( *reinterpret_cast<ConformanceVersion const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ConformanceVersion & operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -15548,17 +15052,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkConformanceVersion const &() const VULKAN_HPP_NOEXCEPT
+ operator VkConformanceVersion const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkConformanceVersion *>( this );
}
- explicit operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT
+ operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkConformanceVersion *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -15575,7 +15079,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( major == rhs.major ) && ( minor == rhs.minor ) && ( subminor == rhs.subminor ) && ( patch == rhs.patch );
@@ -15594,12 +15098,6 @@ namespace VULKAN_HPP_NAMESPACE
uint8_t subminor = {};
uint8_t patch = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ConformanceVersion ) == sizeof( VkConformanceVersion ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ConformanceVersion>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ConformanceVersion>::value,
- "ConformanceVersion is not nothrow_move_constructible!" );
using ConformanceVersionKHR = ConformanceVersion;
struct CooperativeMatrixPropertiesNV
@@ -15610,16 +15108,17 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV(
- uint32_t MSize_ = {},
- uint32_t NSize_ = {},
- uint32_t KSize_ = {},
- VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
- VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
- VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
- VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
- VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice ) VULKAN_HPP_NOEXCEPT
- : MSize( MSize_ )
+ VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( uint32_t MSize_ = {},
+ uint32_t NSize_ = {},
+ uint32_t KSize_ = {},
+ VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
+ VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
+ VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
+ VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
+ VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice,
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , MSize( MSize_ )
, NSize( NSize_ )
, KSize( KSize_ )
, AType( AType_ )
@@ -15627,18 +15126,18 @@ namespace VULKAN_HPP_NAMESPACE
, CType( CType_ )
, DType( DType_ )
, scope( scope_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: CooperativeMatrixPropertiesNV( *reinterpret_cast<CooperativeMatrixPropertiesNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- CooperativeMatrixPropertiesNV &
- operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -15671,53 +15170,48 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV &
- setAType( VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setAType( VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ ) VULKAN_HPP_NOEXCEPT
{
AType = AType_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV &
- setBType( VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setBType( VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ ) VULKAN_HPP_NOEXCEPT
{
BType = BType_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV &
- setCType( VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setCType( VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ ) VULKAN_HPP_NOEXCEPT
{
CType = CType_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV &
- setDType( VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setDType( VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ ) VULKAN_HPP_NOEXCEPT
{
DType = DType_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV &
- setScope( VULKAN_HPP_NAMESPACE::ScopeNV scope_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setScope( VULKAN_HPP_NAMESPACE::ScopeNV scope_ ) VULKAN_HPP_NOEXCEPT
{
scope = scope_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCooperativeMatrixPropertiesNV *>( this );
}
- explicit operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -15743,12 +15237,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MSize == rhs.MSize ) && ( NSize == rhs.NSize ) &&
- ( KSize == rhs.KSize ) && ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) &&
- ( DType == rhs.DType ) && ( scope == rhs.scope );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MSize == rhs.MSize ) && ( NSize == rhs.NSize ) && ( KSize == rhs.KSize ) &&
+ ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && ( DType == rhs.DType ) && ( scope == rhs.scope );
# endif
}
@@ -15770,14 +15263,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV ) ==
- sizeof( VkCooperativeMatrixPropertiesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>::value,
- "CooperativeMatrixPropertiesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCooperativeMatrixPropertiesNV>
@@ -15790,29 +15275,30 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkCopyAccelerationStructureInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR(
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {},
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {},
- VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ =
- VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) VULKAN_HPP_NOEXCEPT
- : src( src_ )
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {},
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {},
+ VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , src( src_ )
, dst( dst_ )
, mode( mode_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- CopyAccelerationStructureInfoKHR( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyAccelerationStructureInfoKHR( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyAccelerationStructureInfoKHR( *reinterpret_cast<CopyAccelerationStructureInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- CopyAccelerationStructureInfoKHR &
- operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ CopyAccelerationStructureInfoKHR & operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyAccelerationStructureInfoKHR & operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -15827,39 +15313,36 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR &
- setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT
{
src = src_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR &
- setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
{
dst = dst_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR &
- setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
{
mode = mode_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCopyAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCopyAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( this );
}
- explicit operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyAccelerationStructureInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -15880,11 +15363,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) &&
- ( mode == rhs.mode );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) && ( mode == rhs.mode );
# endif
}
@@ -15899,17 +15381,8 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {};
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {};
- VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode =
- VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
+ VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR ) ==
- sizeof( VkCopyAccelerationStructureInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR>::value,
- "CopyAccelerationStructureInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCopyAccelerationStructureInfoKHR>
@@ -15922,57 +15395,51 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkCopyAccelerationStructureToMemoryInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eCopyAccelerationStructureToMemoryInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR(
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {},
- VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {},
- VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ =
- VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) VULKAN_HPP_NOEXCEPT
- : src( src_ )
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {},
+ VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , src( src_ )
, dst( dst_ )
, mode( mode_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR(
- CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : CopyAccelerationStructureToMemoryInfoKHR(
- *reinterpret_cast<CopyAccelerationStructureToMemoryInfoKHR const *>( &rhs ) )
- {}
+ CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CopyAccelerationStructureToMemoryInfoKHR( *reinterpret_cast<CopyAccelerationStructureToMemoryInfoKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- CopyAccelerationStructureToMemoryInfoKHR &
- operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ CopyAccelerationStructureToMemoryInfoKHR & operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CopyAccelerationStructureToMemoryInfoKHR &
- operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ CopyAccelerationStructureToMemoryInfoKHR & operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR &
- setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT
{
src = src_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR &
- setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT
{
dst = dst_;
return *this;
@@ -15986,17 +15453,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCopyAccelerationStructureToMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCopyAccelerationStructureToMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( this );
}
- explicit operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyAccelerationStructureToMemoryInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -16013,22 +15480,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {};
- VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {};
- VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode =
- VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR ) ==
- sizeof( VkCopyAccelerationStructureToMemoryInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR>::value,
- "CopyAccelerationStructureToMemoryInfoKHR is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {};
+ VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
+ };
template <>
struct CppType<StructureType, StructureType::eCopyAccelerationStructureToMemoryInfoKHR>
@@ -16047,29 +15504,28 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR CopyBufferInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {},
VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {},
uint32_t regionCount_ = {},
- const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcBuffer( srcBuffer_ )
+ const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , srcBuffer( srcBuffer_ )
, dstBuffer( dstBuffer_ )
, regionCount( regionCount_ )
, pRegions( pRegions_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR CopyBufferInfo2( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CopyBufferInfo2( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : CopyBufferInfo2( *reinterpret_cast<CopyBufferInfo2 const *>( &rhs ) )
- {}
+ CopyBufferInfo2( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : CopyBufferInfo2( *reinterpret_cast<CopyBufferInfo2 const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- CopyBufferInfo2(
- VULKAN_HPP_NAMESPACE::Buffer srcBuffer_,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferCopy2> const & regions_ )
- : srcBuffer( srcBuffer_ )
- , dstBuffer( dstBuffer_ )
- , regionCount( static_cast<uint32_t>( regions_.size() ) )
- , pRegions( regions_.data() )
- {}
+ CopyBufferInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferCopy2> const & regions_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -16088,15 +15544,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 &
- setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
{
srcBuffer = srcBuffer_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 &
- setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
{
dstBuffer = dstBuffer_;
return *this;
@@ -16108,17 +15562,14 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 &
- setPRegions( const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- CopyBufferInfo2 & setRegions(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferCopy2> const & regions_ )
- VULKAN_HPP_NOEXCEPT
+ CopyBufferInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = static_cast<uint32_t>( regions_.size() );
pRegions = regions_.data();
@@ -16127,17 +15578,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCopyBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCopyBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyBufferInfo2 *>( this );
}
- explicit operator VkCopyBufferInfo2 &() VULKAN_HPP_NOEXCEPT
+ operator VkCopyBufferInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyBufferInfo2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -16159,11 +15610,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) &&
- ( dstBuffer == rhs.dstBuffer ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && ( dstBuffer == rhs.dstBuffer ) &&
+ ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
# endif
}
@@ -16181,12 +15632,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t regionCount = {};
const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyBufferInfo2 ) == sizeof( VkCopyBufferInfo2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyBufferInfo2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyBufferInfo2>::value,
- "CopyBufferInfo2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCopyBufferInfo2>
@@ -16203,37 +15648,42 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferToImageInfo2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2(
- VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {},
- VULKAN_HPP_NAMESPACE::Image dstImage_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- uint32_t regionCount_ = {},
- const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcBuffer( srcBuffer_ )
+ VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {},
+ VULKAN_HPP_NAMESPACE::Image dstImage_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ uint32_t regionCount_ = {},
+ const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , srcBuffer( srcBuffer_ )
, dstImage( dstImage_ )
, dstImageLayout( dstImageLayout_ )
, regionCount( regionCount_ )
, pRegions( pRegions_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyBufferToImageInfo2( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyBufferToImageInfo2( *reinterpret_cast<CopyBufferToImageInfo2 const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- CopyBufferToImageInfo2(
- VULKAN_HPP_NAMESPACE::Buffer srcBuffer_,
- VULKAN_HPP_NAMESPACE::Image dstImage_,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_ )
- : srcBuffer( srcBuffer_ )
+ CopyBufferToImageInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_,
+ VULKAN_HPP_NAMESPACE::Image dstImage_,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , srcBuffer( srcBuffer_ )
, dstImage( dstImage_ )
, dstImageLayout( dstImageLayout_ )
, regionCount( static_cast<uint32_t>( regions_.size() ) )
, pRegions( regions_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -16252,22 +15702,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 &
- setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
{
srcBuffer = srcBuffer_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 &
- setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
{
dstImage = dstImage_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 &
- setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
dstImageLayout = dstImageLayout_;
return *this;
@@ -16279,17 +15726,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 &
- setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- CopyBufferToImageInfo2 & setRegions(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_ )
- VULKAN_HPP_NOEXCEPT
+ CopyBufferToImageInfo2 &
+ setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = static_cast<uint32_t>( regions_.size() );
pRegions = regions_.data();
@@ -16298,17 +15743,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCopyBufferToImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCopyBufferToImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyBufferToImageInfo2 *>( this );
}
- explicit operator VkCopyBufferToImageInfo2 &() VULKAN_HPP_NOEXCEPT
+ operator VkCopyBufferToImageInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyBufferToImageInfo2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -16331,12 +15776,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) &&
- ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) &&
- ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && ( dstImage == rhs.dstImage ) &&
+ ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
# endif
}
@@ -16355,13 +15799,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t regionCount = {};
const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 ) ==
- sizeof( VkCopyBufferToImageInfo2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2>::value,
- "CopyBufferToImageInfo2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCopyBufferToImageInfo2>
@@ -16379,17 +15816,19 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR
- CopyCommandTransformInfoQCOM( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ =
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity ) VULKAN_HPP_NOEXCEPT
- : transform( transform_ )
- {}
+ CopyCommandTransformInfoQCOM( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , transform( transform_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- CopyCommandTransformInfoQCOM( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyCommandTransformInfoQCOM( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyCommandTransformInfoQCOM( *reinterpret_cast<CopyCommandTransformInfoQCOM const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
CopyCommandTransformInfoQCOM & operator=( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -16407,31 +15846,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM &
- setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
{
transform = transform_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCopyCommandTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCopyCommandTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyCommandTransformInfoQCOM *>( this );
}
- explicit operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
+ operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyCommandTransformInfoQCOM *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -16444,7 +15880,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform );
@@ -16458,19 +15894,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyCommandTransformInfoQCOM;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform =
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyCommandTransformInfoQCOM;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM ) ==
- sizeof( VkCopyCommandTransformInfoQCOM ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM>::value,
- "CopyCommandTransformInfoQCOM is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCopyCommandTransformInfoQCOM>
@@ -16492,21 +15919,22 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {},
uint32_t dstBinding_ = {},
uint32_t dstArrayElement_ = {},
- uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcSet( srcSet_ )
+ uint32_t descriptorCount_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , srcSet( srcSet_ )
, srcBinding( srcBinding_ )
, srcArrayElement( srcArrayElement_ )
, dstSet( dstSet_ )
, dstBinding( dstBinding_ )
, dstArrayElement( dstArrayElement_ )
, descriptorCount( descriptorCount_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
- : CopyDescriptorSet( *reinterpret_cast<CopyDescriptorSet const *>( &rhs ) )
- {}
+ CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT : CopyDescriptorSet( *reinterpret_cast<CopyDescriptorSet const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -16524,8 +15952,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet &
- setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT
{
srcSet = srcSet_;
return *this;
@@ -16543,8 +15970,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet &
- setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
{
dstSet = dstSet_;
return *this;
@@ -16569,17 +15995,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCopyDescriptorSet const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCopyDescriptorSet const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyDescriptorSet *>( this );
}
- explicit operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT
+ operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyDescriptorSet *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -16595,8 +16021,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, srcSet, srcBinding, srcArrayElement, dstSet, dstBinding, dstArrayElement, descriptorCount );
+ return std::tie( sType, pNext, srcSet, srcBinding, srcArrayElement, dstSet, dstBinding, dstArrayElement, descriptorCount );
}
#endif
@@ -16605,12 +16030,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSet == rhs.srcSet ) &&
- ( srcBinding == rhs.srcBinding ) && ( srcArrayElement == rhs.srcArrayElement ) &&
- ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSet == rhs.srcSet ) && ( srcBinding == rhs.srcBinding ) &&
+ ( srcArrayElement == rhs.srcArrayElement ) && ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) &&
( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount );
# endif
}
@@ -16632,12 +16056,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t dstArrayElement = {};
uint32_t descriptorCount = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyDescriptorSet>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyDescriptorSet>::value,
- "CopyDescriptorSet is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCopyDescriptorSet>
@@ -16657,19 +16075,20 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {},
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {},
- VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcSubresource( srcSubresource_ )
+ VULKAN_HPP_NAMESPACE::Extent3D extent_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , srcSubresource( srcSubresource_ )
, srcOffset( srcOffset_ )
, dstSubresource( dstSubresource_ )
, dstOffset( dstOffset_ )
, extent( extent_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ImageCopy2( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImageCopy2( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : ImageCopy2( *reinterpret_cast<ImageCopy2 const *>( &rhs ) )
- {}
+ ImageCopy2( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy2( *reinterpret_cast<ImageCopy2 const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageCopy2 & operator=( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -16687,29 +16106,25 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageCopy2 &
- setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
{
srcSubresource = srcSubresource_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageCopy2 &
- setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
{
srcOffset = srcOffset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageCopy2 &
- setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
{
dstSubresource = dstSubresource_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageCopy2 &
- setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
{
dstOffset = dstOffset_;
return *this;
@@ -16722,17 +16137,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageCopy2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageCopy2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageCopy2 *>( this );
}
- explicit operator VkImageCopy2 &() VULKAN_HPP_NOEXCEPT
+ operator VkImageCopy2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageCopy2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -16755,12 +16170,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) &&
- ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) &&
- ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) &&
+ ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
# endif
}
@@ -16779,12 +16193,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCopy2 ) == sizeof( VkImageCopy2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCopy2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCopy2>::value,
- "ImageCopy2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageCopy2>
@@ -16801,41 +16209,43 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- CopyImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- VULKAN_HPP_NAMESPACE::Image dstImage_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- uint32_t regionCount_ = {},
- const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcImage( srcImage_ )
+ VULKAN_HPP_CONSTEXPR CopyImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::Image dstImage_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ uint32_t regionCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , srcImage( srcImage_ )
, srcImageLayout( srcImageLayout_ )
, dstImage( dstImage_ )
, dstImageLayout( dstImageLayout_ )
, regionCount( regionCount_ )
, pRegions( pRegions_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR CopyImageInfo2( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CopyImageInfo2( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : CopyImageInfo2( *reinterpret_cast<CopyImageInfo2 const *>( &rhs ) )
- {}
+ CopyImageInfo2( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : CopyImageInfo2( *reinterpret_cast<CopyImageInfo2 const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- CopyImageInfo2(
- VULKAN_HPP_NAMESPACE::Image srcImage_,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_,
- VULKAN_HPP_NAMESPACE::Image dstImage_,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2> const & regions_ )
- : srcImage( srcImage_ )
+ CopyImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_,
+ VULKAN_HPP_NAMESPACE::Image dstImage_,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2> const & regions_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , srcImage( srcImage_ )
, srcImageLayout( srcImageLayout_ )
, dstImage( dstImage_ )
, dstImageLayout( dstImageLayout_ )
, regionCount( static_cast<uint32_t>( regions_.size() ) )
, pRegions( regions_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -16860,8 +16270,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 &
- setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
srcImageLayout = srcImageLayout_;
return *this;
@@ -16873,8 +16282,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 &
- setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
dstImageLayout = dstImageLayout_;
return *this;
@@ -16886,17 +16294,14 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 &
- setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- CopyImageInfo2 & setRegions(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2> const & regions_ )
- VULKAN_HPP_NOEXCEPT
+ CopyImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = static_cast<uint32_t>( regions_.size() );
pRegions = regions_.data();
@@ -16905,17 +16310,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCopyImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCopyImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyImageInfo2 *>( this );
}
- explicit operator VkCopyImageInfo2 &() VULKAN_HPP_NOEXCEPT
+ operator VkCopyImageInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyImageInfo2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -16939,13 +16344,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) &&
- ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) &&
- ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) &&
- ( pRegions == rhs.pRegions );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) &&
+ ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
# endif
}
@@ -16965,12 +16368,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t regionCount = {};
const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageInfo2 ) == sizeof( VkCopyImageInfo2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyImageInfo2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyImageInfo2>::value,
- "CopyImageInfo2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCopyImageInfo2>
@@ -16987,37 +16384,42 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2(
- VULKAN_HPP_NAMESPACE::Image srcImage_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {},
- uint32_t regionCount_ = {},
- const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcImage( srcImage_ )
+ VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {},
+ uint32_t regionCount_ = {},
+ const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , srcImage( srcImage_ )
, srcImageLayout( srcImageLayout_ )
, dstBuffer( dstBuffer_ )
, regionCount( regionCount_ )
, pRegions( pRegions_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CopyImageToBufferInfo2( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: CopyImageToBufferInfo2( *reinterpret_cast<CopyImageToBufferInfo2 const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- CopyImageToBufferInfo2(
- VULKAN_HPP_NAMESPACE::Image srcImage_,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_,
- VULKAN_HPP_NAMESPACE::Buffer dstBuffer_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_ )
- : srcImage( srcImage_ )
+ CopyImageToBufferInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_,
+ VULKAN_HPP_NAMESPACE::Buffer dstBuffer_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , srcImage( srcImage_ )
, srcImageLayout( srcImageLayout_ )
, dstBuffer( dstBuffer_ )
, regionCount( static_cast<uint32_t>( regions_.size() ) )
, pRegions( regions_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -17036,22 +16438,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 &
- setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
{
srcImage = srcImage_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 &
- setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
srcImageLayout = srcImageLayout_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 &
- setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
{
dstBuffer = dstBuffer_;
return *this;
@@ -17063,17 +16462,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 &
- setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- CopyImageToBufferInfo2 & setRegions(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_ )
- VULKAN_HPP_NOEXCEPT
+ CopyImageToBufferInfo2 &
+ setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = static_cast<uint32_t>( regions_.size() );
pRegions = regions_.data();
@@ -17082,17 +16479,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCopyImageToBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCopyImageToBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyImageToBufferInfo2 *>( this );
}
- explicit operator VkCopyImageToBufferInfo2 &() VULKAN_HPP_NOEXCEPT
+ operator VkCopyImageToBufferInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyImageToBufferInfo2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -17115,12 +16512,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) &&
- ( srcImageLayout == rhs.srcImageLayout ) && ( dstBuffer == rhs.dstBuffer ) &&
- ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) &&
+ ( dstBuffer == rhs.dstBuffer ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
# endif
}
@@ -17139,13 +16535,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t regionCount = {};
const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 ) ==
- sizeof( VkCopyImageToBufferInfo2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2>::value,
- "CopyImageToBufferInfo2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCopyImageToBufferInfo2>
@@ -17159,43 +16548,39 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkCopyMemoryToAccelerationStructureInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR(
- VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {},
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {},
- VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ =
- VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) VULKAN_HPP_NOEXCEPT
- : src( src_ )
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {},
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {},
+ VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , src( src_ )
, dst( dst_ )
, mode( mode_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR(
- CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : CopyMemoryToAccelerationStructureInfoKHR(
- *reinterpret_cast<CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs ) )
- {}
+ CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CopyMemoryToAccelerationStructureInfoKHR( *reinterpret_cast<CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- CopyMemoryToAccelerationStructureInfoKHR &
- operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ CopyMemoryToAccelerationStructureInfoKHR & operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CopyMemoryToAccelerationStructureInfoKHR &
- operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ CopyMemoryToAccelerationStructureInfoKHR & operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
@@ -17208,8 +16593,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR &
- setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
{
dst = dst_;
return *this;
@@ -17223,17 +16607,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCopyMemoryToAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCopyMemoryToAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( this );
}
- explicit operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCopyMemoryToAccelerationStructureInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -17250,22 +16634,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {};
- VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {};
- VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode =
- VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {};
+ VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR ) ==
- sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR>::value,
- "CopyMemoryToAccelerationStructureInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCopyMemoryToAccelerationStructureInfoKHR>
@@ -17273,6 +16647,326 @@ namespace VULKAN_HPP_NAMESPACE
using Type = CopyMemoryToAccelerationStructureInfoKHR;
};
+ struct CopyMemoryToMicromapInfoEXT
+ {
+ using NativeType = VkCopyMemoryToMicromapInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToMicromapInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {},
+ VULKAN_HPP_NAMESPACE::MicromapEXT dst_ = {},
+ VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , src( src_ )
+ , dst( dst_ )
+ , mode( mode_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT( CopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyMemoryToMicromapInfoEXT( VkCopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CopyMemoryToMicromapInfoEXT( *reinterpret_cast<CopyMemoryToMicromapInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ CopyMemoryToMicromapInfoEXT & operator=( CopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyMemoryToMicromapInfoEXT & operator=( VkCopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT
+ {
+ src = src_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setDst( VULKAN_HPP_NAMESPACE::MicromapEXT dst_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dst = dst_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mode = mode_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkCopyMemoryToMicromapInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( this );
+ }
+
+ operator VkCopyMemoryToMicromapInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyMemoryToMicromapInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &,
+ VULKAN_HPP_NAMESPACE::MicromapEXT const &,
+ VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, src, dst, mode );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToMicromapInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {};
+ VULKAN_HPP_NAMESPACE::MicromapEXT dst = {};
+ VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone;
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eCopyMemoryToMicromapInfoEXT>
+ {
+ using Type = CopyMemoryToMicromapInfoEXT;
+ };
+
+ struct CopyMicromapInfoEXT
+ {
+ using NativeType = VkCopyMicromapInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMicromapInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR CopyMicromapInfoEXT( VULKAN_HPP_NAMESPACE::MicromapEXT src_ = {},
+ VULKAN_HPP_NAMESPACE::MicromapEXT dst_ = {},
+ VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , src( src_ )
+ , dst( dst_ )
+ , mode( mode_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR CopyMicromapInfoEXT( CopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyMicromapInfoEXT( VkCopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : CopyMicromapInfoEXT( *reinterpret_cast<CopyMicromapInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ CopyMicromapInfoEXT & operator=( CopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyMicromapInfoEXT & operator=( VkCopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::MicromapEXT src_ ) VULKAN_HPP_NOEXCEPT
+ {
+ src = src_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setDst( VULKAN_HPP_NAMESPACE::MicromapEXT dst_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dst = dst_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mode = mode_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkCopyMicromapInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCopyMicromapInfoEXT *>( this );
+ }
+
+ operator VkCopyMicromapInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyMicromapInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::MicromapEXT const &,
+ VULKAN_HPP_NAMESPACE::MicromapEXT const &,
+ VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, src, dst, mode );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( CopyMicromapInfoEXT const & ) const = default;
+#else
+ bool operator==( CopyMicromapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) && ( mode == rhs.mode );
+# endif
+ }
+
+ bool operator!=( CopyMicromapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMicromapInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::MicromapEXT src = {};
+ VULKAN_HPP_NAMESPACE::MicromapEXT dst = {};
+ VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone;
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eCopyMicromapInfoEXT>
+ {
+ using Type = CopyMicromapInfoEXT;
+ };
+
+ struct CopyMicromapToMemoryInfoEXT
+ {
+ using NativeType = VkCopyMicromapToMemoryInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMicromapToMemoryInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT( VULKAN_HPP_NAMESPACE::MicromapEXT src_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {},
+ VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , src( src_ )
+ , dst( dst_ )
+ , mode( mode_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT( CopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyMicromapToMemoryInfoEXT( VkCopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : CopyMicromapToMemoryInfoEXT( *reinterpret_cast<CopyMicromapToMemoryInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ CopyMicromapToMemoryInfoEXT & operator=( CopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ CopyMicromapToMemoryInfoEXT & operator=( VkCopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::MicromapEXT src_ ) VULKAN_HPP_NOEXCEPT
+ {
+ src = src_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dst = dst_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mode = mode_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkCopyMicromapToMemoryInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( this );
+ }
+
+ operator VkCopyMicromapToMemoryInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkCopyMicromapToMemoryInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::MicromapEXT const &,
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const &,
+ VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, src, dst, mode );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMicromapToMemoryInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::MicromapEXT src = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {};
+ VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone;
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eCopyMicromapToMemoryInfoEXT>
+ {
+ using Type = CopyMicromapToMemoryInfoEXT;
+ };
+
struct CuFunctionCreateInfoNVX
{
using NativeType = VkCuFunctionCreateInfoNVX;
@@ -17281,17 +16975,20 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuFunctionCreateInfoNVX;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ = {},
- const char * pName_ = {} ) VULKAN_HPP_NOEXCEPT
- : module( module_ )
+ VULKAN_HPP_CONSTEXPR
+ CuFunctionCreateInfoNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ = {}, const char * pName_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , module( module_ )
, pName( pName_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CuFunctionCreateInfoNVX( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
: CuFunctionCreateInfoNVX( *reinterpret_cast<CuFunctionCreateInfoNVX const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
CuFunctionCreateInfoNVX & operator=( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -17309,8 +17006,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX &
- setModule( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setModule( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ ) VULKAN_HPP_NOEXCEPT
{
module = module_;
return *this;
@@ -17323,24 +17019,21 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCuFunctionCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCuFunctionCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( this );
}
- explicit operator VkCuFunctionCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
+ operator VkCuFunctionCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCuFunctionCreateInfoNVX *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::CuModuleNVX const &,
- const char * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CuModuleNVX const &, const char * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -17367,8 +17060,7 @@ namespace VULKAN_HPP_NAMESPACE
bool operator==( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) &&
- ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) );
}
bool operator!=( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
@@ -17382,13 +17074,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::CuModuleNVX module = {};
const char * pName = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX ) ==
- sizeof( VkCuFunctionCreateInfoNVX ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX>::value,
- "CuFunctionCreateInfoNVX is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCuFunctionCreateInfoNVX>
@@ -17415,8 +17100,10 @@ namespace VULKAN_HPP_NAMESPACE
size_t paramCount_ = {},
const void * const * pParams_ = {},
size_t extraCount_ = {},
- const void * const * pExtras_ = {} ) VULKAN_HPP_NOEXCEPT
- : function( function_ )
+ const void * const * pExtras_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , function( function_ )
, gridDimX( gridDimX_ )
, gridDimY( gridDimY_ )
, gridDimZ( gridDimZ_ )
@@ -17428,13 +17115,12 @@ namespace VULKAN_HPP_NAMESPACE
, pParams( pParams_ )
, extraCount( extraCount_ )
, pExtras( pExtras_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- CuLaunchInfoNVX( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- : CuLaunchInfoNVX( *reinterpret_cast<CuLaunchInfoNVX const *>( &rhs ) )
- {}
+ CuLaunchInfoNVX( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT : CuLaunchInfoNVX( *reinterpret_cast<CuLaunchInfoNVX const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
CuLaunchInfoNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_,
@@ -17446,8 +17132,10 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t blockDimZ_,
uint32_t sharedMemBytes_,
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & params_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & extras_ = {} )
- : function( function_ )
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & extras_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , function( function_ )
, gridDimX( gridDimX_ )
, gridDimY( gridDimY_ )
, gridDimZ( gridDimZ_ )
@@ -17459,7 +17147,8 @@ namespace VULKAN_HPP_NAMESPACE
, pParams( params_.data() )
, extraCount( extras_.size() )
, pExtras( extras_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -17478,8 +17167,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX &
- setFunction( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setFunction( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ ) VULKAN_HPP_NOEXCEPT
{
function = function_;
return *this;
@@ -17540,8 +17228,7 @@ namespace VULKAN_HPP_NAMESPACE
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- CuLaunchInfoNVX &
- setParams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & params_ ) VULKAN_HPP_NOEXCEPT
+ CuLaunchInfoNVX & setParams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & params_ ) VULKAN_HPP_NOEXCEPT
{
paramCount = params_.size();
pParams = params_.data();
@@ -17562,8 +17249,7 @@ namespace VULKAN_HPP_NAMESPACE
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- CuLaunchInfoNVX &
- setExtras( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & extras_ ) VULKAN_HPP_NOEXCEPT
+ CuLaunchInfoNVX & setExtras( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & extras_ ) VULKAN_HPP_NOEXCEPT
{
extraCount = extras_.size();
pExtras = extras_.data();
@@ -17572,17 +17258,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCuLaunchInfoNVX const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCuLaunchInfoNVX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCuLaunchInfoNVX *>( this );
}
- explicit operator VkCuLaunchInfoNVX &() VULKAN_HPP_NOEXCEPT
+ operator VkCuLaunchInfoNVX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCuLaunchInfoNVX *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -17603,20 +17289,8 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- function,
- gridDimX,
- gridDimY,
- gridDimZ,
- blockDimX,
- blockDimY,
- blockDimZ,
- sharedMemBytes,
- paramCount,
- pParams,
- extraCount,
- pExtras );
+ return std::tie(
+ sType, pNext, function, gridDimX, gridDimY, gridDimZ, blockDimX, blockDimY, blockDimZ, sharedMemBytes, paramCount, pParams, extraCount, pExtras );
}
#endif
@@ -17625,14 +17299,13 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( function == rhs.function ) &&
- ( gridDimX == rhs.gridDimX ) && ( gridDimY == rhs.gridDimY ) && ( gridDimZ == rhs.gridDimZ ) &&
- ( blockDimX == rhs.blockDimX ) && ( blockDimY == rhs.blockDimY ) && ( blockDimZ == rhs.blockDimZ ) &&
- ( sharedMemBytes == rhs.sharedMemBytes ) && ( paramCount == rhs.paramCount ) &&
- ( pParams == rhs.pParams ) && ( extraCount == rhs.extraCount ) && ( pExtras == rhs.pExtras );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( function == rhs.function ) && ( gridDimX == rhs.gridDimX ) && ( gridDimY == rhs.gridDimY ) &&
+ ( gridDimZ == rhs.gridDimZ ) && ( blockDimX == rhs.blockDimX ) && ( blockDimY == rhs.blockDimY ) && ( blockDimZ == rhs.blockDimZ ) &&
+ ( sharedMemBytes == rhs.sharedMemBytes ) && ( paramCount == rhs.paramCount ) && ( pParams == rhs.pParams ) && ( extraCount == rhs.extraCount ) &&
+ ( pExtras == rhs.pExtras );
# endif
}
@@ -17658,12 +17331,6 @@ namespace VULKAN_HPP_NAMESPACE
size_t extraCount = {};
const void * const * pExtras = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX ) == sizeof( VkCuLaunchInfoNVX ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX>::value,
- "CuLaunchInfoNVX is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCuLaunchInfoNVX>
@@ -17679,22 +17346,26 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuModuleCreateInfoNVX;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( size_t dataSize_ = {}, const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT
- : dataSize( dataSize_ )
+ VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( size_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , dataSize( dataSize_ )
, pData( pData_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
CuModuleCreateInfoNVX( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
: CuModuleCreateInfoNVX( *reinterpret_cast<CuModuleCreateInfoNVX const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
- CuModuleCreateInfoNVX( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ )
- : dataSize( data_.size() * sizeof( T ) ), pData( data_.data() )
- {}
+ CuModuleCreateInfoNVX( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_, const void * pNext_ = nullptr )
+ : pNext( pNext_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -17727,8 +17398,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
- CuModuleCreateInfoNVX &
- setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
+ CuModuleCreateInfoNVX & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
{
dataSize = data_.size() * sizeof( T );
pData = data_.data();
@@ -17737,17 +17407,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkCuModuleCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT
+ operator VkCuModuleCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkCuModuleCreateInfoNVX *>( this );
}
- explicit operator VkCuModuleCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
+ operator VkCuModuleCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkCuModuleCreateInfoNVX *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -17764,7 +17434,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData );
@@ -17783,12 +17453,6 @@ namespace VULKAN_HPP_NAMESPACE
size_t dataSize = {};
const void * pData = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX ) == sizeof( VkCuModuleCreateInfoNVX ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX>::value,
- "CuModuleCreateInfoNVX is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eCuModuleCreateInfoNVX>
@@ -17808,28 +17472,34 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = {},
const uint64_t * pWaitSemaphoreValues_ = {},
uint32_t signalSemaphoreValuesCount_ = {},
- const uint64_t * pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT
- : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ )
+ const uint64_t * pSignalSemaphoreValues_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , waitSemaphoreValuesCount( waitSemaphoreValuesCount_ )
, pWaitSemaphoreValues( pWaitSemaphoreValues_ )
, signalSemaphoreValuesCount( signalSemaphoreValuesCount_ )
, pSignalSemaphoreValues( pSignalSemaphoreValues_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: D3D12FenceSubmitInfoKHR( *reinterpret_cast<D3D12FenceSubmitInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- D3D12FenceSubmitInfoKHR(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {} )
- : waitSemaphoreValuesCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) )
+ D3D12FenceSubmitInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , waitSemaphoreValuesCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) )
, pWaitSemaphoreValues( waitSemaphoreValues_.data() )
, signalSemaphoreValuesCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) )
, pSignalSemaphoreValues( signalSemaphoreValues_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -17848,23 +17518,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR &
- setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreValuesCount = waitSemaphoreValuesCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR &
- setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
{
pWaitSemaphoreValues = pWaitSemaphoreValues_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- D3D12FenceSubmitInfoKHR & setWaitSemaphoreValues(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+ D3D12FenceSubmitInfoKHR &
+ setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreValuesCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
pWaitSemaphoreValues = waitSemaphoreValues_.data();
@@ -17872,23 +17540,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR &
- setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreValuesCount = signalSemaphoreValuesCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR &
- setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
{
pSignalSemaphoreValues = pSignalSemaphoreValues_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- D3D12FenceSubmitInfoKHR & setSignalSemaphoreValues(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+ D3D12FenceSubmitInfoKHR &
+ setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreValuesCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
pSignalSemaphoreValues = signalSemaphoreValues_.data();
@@ -17897,17 +17563,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkD3D12FenceSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkD3D12FenceSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkD3D12FenceSubmitInfoKHR *>( this );
}
- explicit operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkD3D12FenceSubmitInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -17920,12 +17586,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- waitSemaphoreValuesCount,
- pWaitSemaphoreValues,
- signalSemaphoreValuesCount,
- pSignalSemaphoreValues );
+ return std::tie( sType, pNext, waitSemaphoreValuesCount, pWaitSemaphoreValues, signalSemaphoreValuesCount, pSignalSemaphoreValues );
}
# endif
@@ -17934,13 +17595,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount ) &&
- ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) &&
- ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount ) &&
+ ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount ) &&
( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
# endif
}
@@ -17959,13 +17618,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t signalSemaphoreValuesCount = {};
const uint64_t * pSignalSemaphoreValues = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR ) ==
- sizeof( VkD3D12FenceSubmitInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR>::value,
- "D3D12FenceSubmitInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eD3D12FenceSubmitInfoKHR>
@@ -17982,18 +17634,20 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( const char * pMarkerName_ = {},
- std::array<float, 4> const & color_ = {} ) VULKAN_HPP_NOEXCEPT
- : pMarkerName( pMarkerName_ )
+ VULKAN_HPP_CONSTEXPR_14
+ DebugMarkerMarkerInfoEXT( const char * pMarkerName_ = {}, std::array<float, 4> const & color_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pMarkerName( pMarkerName_ )
, color( color_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DebugMarkerMarkerInfoEXT( *reinterpret_cast<DebugMarkerMarkerInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DebugMarkerMarkerInfoEXT & operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -18024,24 +17678,21 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDebugMarkerMarkerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDebugMarkerMarkerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( this );
}
- explicit operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDebugMarkerMarkerInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- const char * const &,
- VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const char * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -18068,8 +17719,7 @@ namespace VULKAN_HPP_NAMESPACE
bool operator==( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( ( pMarkerName == rhs.pMarkerName ) || ( strcmp( pMarkerName, rhs.pMarkerName ) == 0 ) ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pMarkerName == rhs.pMarkerName ) || ( strcmp( pMarkerName, rhs.pMarkerName ) == 0 ) ) &&
( color == rhs.color );
}
@@ -18084,13 +17734,6 @@ namespace VULKAN_HPP_NAMESPACE
const char * pMarkerName = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT ) ==
- sizeof( VkDebugMarkerMarkerInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT>::value,
- "DebugMarkerMarkerInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDebugMarkerMarkerInfoEXT>
@@ -18106,21 +17749,24 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectNameInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ =
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown,
- uint64_t object_ = {},
- const char * pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT
- : objectType( objectType_ )
+ VULKAN_HPP_CONSTEXPR
+ DebugMarkerObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown,
+ uint64_t object_ = {},
+ const char * pObjectName_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , objectType( objectType_ )
, object( object_ )
, pObjectName( pObjectName_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DebugMarkerObjectNameInfoEXT( *reinterpret_cast<DebugMarkerObjectNameInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DebugMarkerObjectNameInfoEXT & operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -18138,8 +17784,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT &
- setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
{
objectType = objectType_;
return *this;
@@ -18151,25 +17796,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT &
- setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT
{
pObjectName = pObjectName_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDebugMarkerObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDebugMarkerObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( this );
}
- explicit operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDebugMarkerObjectNameInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -18206,8 +17850,7 @@ namespace VULKAN_HPP_NAMESPACE
bool operator==( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) &&
- ( object == rhs.object ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( object == rhs.object ) &&
( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) );
}
@@ -18217,21 +17860,12 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType =
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
- uint64_t object = {};
- const char * pObjectName = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+ uint64_t object = {};
+ const char * pObjectName = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT ) ==
- sizeof( VkDebugMarkerObjectNameInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT>::value,
- "DebugMarkerObjectNameInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDebugMarkerObjectNameInfoEXT>
@@ -18247,38 +17881,39 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectTagInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ =
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown,
- uint64_t object_ = {},
- uint64_t tagName_ = {},
- size_t tagSize_ = {},
- const void * pTag_ = {} ) VULKAN_HPP_NOEXCEPT
- : objectType( objectType_ )
+ VULKAN_HPP_CONSTEXPR
+ DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown,
+ uint64_t object_ = {},
+ uint64_t tagName_ = {},
+ size_t tagSize_ = {},
+ const void * pTag_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , objectType( objectType_ )
, object( object_ )
, tagName( tagName_ )
, tagSize( tagSize_ )
, pTag( pTag_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DebugMarkerObjectTagInfoEXT( *reinterpret_cast<DebugMarkerObjectTagInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_,
uint64_t object_,
uint64_t tagName_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ )
- : objectType( objectType_ )
- , object( object_ )
- , tagName( tagName_ )
- , tagSize( tag_.size() * sizeof( T ) )
- , pTag( tag_.data() )
- {}
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof( T ) ), pTag( tag_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -18297,8 +17932,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT &
- setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
{
objectType = objectType_;
return *this;
@@ -18330,8 +17964,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
- DebugMarkerObjectTagInfoEXT &
- setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
+ DebugMarkerObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
{
tagSize = tag_.size() * sizeof( T );
pTag = tag_.data();
@@ -18340,17 +17973,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDebugMarkerObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDebugMarkerObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( this );
}
- explicit operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDebugMarkerObjectTagInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -18373,12 +18006,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) &&
- ( object == rhs.object ) && ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) &&
- ( pTag == rhs.pTag );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( object == rhs.object ) && ( tagName == rhs.tagName ) &&
+ ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag );
# endif
}
@@ -18389,23 +18021,14 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType =
- VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
- uint64_t object = {};
- uint64_t tagName = {};
- size_t tagSize = {};
- const void * pTag = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT ) ==
- sizeof( VkDebugMarkerObjectTagInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT>::value,
- "DebugMarkerObjectTagInfoEXT is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+ uint64_t object = {};
+ uint64_t tagName = {};
+ size_t tagSize = {};
+ const void * pTag = {};
+ };
template <>
struct CppType<StructureType, StructureType::eDebugMarkerObjectTagInfoEXT>
@@ -18418,27 +18041,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDebugReportCallbackCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {},
PFN_vkDebugReportCallbackEXT pfnCallback_ = {},
- void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ void * pUserData_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, pfnCallback( pfnCallback_ )
, pUserData( pUserData_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DebugReportCallbackCreateInfoEXT( *reinterpret_cast<DebugReportCallbackCreateInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DebugReportCallbackCreateInfoEXT &
- operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DebugReportCallbackCreateInfoEXT & operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -18453,15 +18078,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT &
- setFlags( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT &
- setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT
{
pfnCallback = pfnCallback_;
return *this;
@@ -18474,17 +18097,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDebugReportCallbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDebugReportCallbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( this );
}
- explicit operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDebugReportCallbackCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -18500,24 +18123,19 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif
-#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( DebugReportCallbackCreateInfoEXT const & ) const = default;
-#else
bool operator==( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
-# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( pfnCallback == rhs.pfnCallback ) && ( pUserData == rhs.pUserData );
-# endif
+#else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pfnCallback == rhs.pfnCallback ) && ( pUserData == rhs.pUserData );
+#endif
}
bool operator!=( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
-#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT;
@@ -18526,14 +18144,6 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkDebugReportCallbackEXT pfnCallback = {};
void * pUserData = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT ) ==
- sizeof( VkDebugReportCallbackCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT>::value,
- "DebugReportCallbackCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDebugReportCallbackCreateInfoEXT>
@@ -18549,17 +18159,17 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( const char * pLabelName_ = {},
- std::array<float, 4> const & color_ = {} ) VULKAN_HPP_NOEXCEPT
- : pLabelName( pLabelName_ )
+ VULKAN_HPP_CONSTEXPR_14
+ DebugUtilsLabelEXT( const char * pLabelName_ = {}, std::array<float, 4> const & color_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pLabelName( pLabelName_ )
, color( color_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : DebugUtilsLabelEXT( *reinterpret_cast<DebugUtilsLabelEXT const *>( &rhs ) )
- {}
+ DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DebugUtilsLabelEXT( *reinterpret_cast<DebugUtilsLabelEXT const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -18590,24 +18200,21 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDebugUtilsLabelEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDebugUtilsLabelEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugUtilsLabelEXT *>( this );
}
- explicit operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDebugUtilsLabelEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- const char * const &,
- VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const char * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -18634,8 +18241,7 @@ namespace VULKAN_HPP_NAMESPACE
bool operator==( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( ( pLabelName == rhs.pLabelName ) || ( strcmp( pLabelName, rhs.pLabelName ) == 0 ) ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pLabelName == rhs.pLabelName ) || ( strcmp( pLabelName, rhs.pLabelName ) == 0 ) ) &&
( color == rhs.color );
}
@@ -18650,12 +18256,6 @@ namespace VULKAN_HPP_NAMESPACE
const char * pLabelName = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT>::value,
- "DebugUtilsLabelEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDebugUtilsLabelEXT>
@@ -18671,21 +18271,23 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT(
- VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown,
- uint64_t objectHandle_ = {},
- const char * pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT
- : objectType( objectType_ )
+ VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown,
+ uint64_t objectHandle_ = {},
+ const char * pObjectName_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , objectType( objectType_ )
, objectHandle( objectHandle_ )
, pObjectName( pObjectName_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DebugUtilsObjectNameInfoEXT( *reinterpret_cast<DebugUtilsObjectNameInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -18703,8 +18305,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT &
- setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
{
objectType = objectType_;
return *this;
@@ -18716,33 +18317,29 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT &
- setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT
{
pObjectName = pObjectName_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDebugUtilsObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDebugUtilsObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( this );
}
- explicit operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDebugUtilsObjectNameInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ObjectType const &,
- uint64_t const &,
- const char * const &>
+ std::
+ tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ObjectType const &, uint64_t const &, const char * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -18771,8 +18368,7 @@ namespace VULKAN_HPP_NAMESPACE
bool operator==( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) &&
- ( objectHandle == rhs.objectHandle ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) &&
( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) );
}
@@ -18788,14 +18384,6 @@ namespace VULKAN_HPP_NAMESPACE
uint64_t objectHandle = {};
const char * pObjectName = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT ) ==
- sizeof( VkDebugUtilsObjectNameInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT>::value,
- "DebugUtilsObjectNameInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDebugUtilsObjectNameInfoEXT>
@@ -18808,22 +18396,22 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDebugUtilsMessengerCallbackDataEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDebugUtilsMessengerCallbackDataEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT(
- VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {},
- const char * pMessageIdName_ = {},
- int32_t messageIdNumber_ = {},
- const char * pMessage_ = {},
- uint32_t queueLabelCount_ = {},
- const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ = {},
- uint32_t cmdBufLabelCount_ = {},
- const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ = {},
- uint32_t objectCount_ = {},
- const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {},
+ const char * pMessageIdName_ = {},
+ int32_t messageIdNumber_ = {},
+ const char * pMessage_ = {},
+ uint32_t queueLabelCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ = {},
+ uint32_t cmdBufLabelCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ = {},
+ uint32_t objectCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, pMessageIdName( pMessageIdName_ )
, messageIdNumber( messageIdNumber_ )
, pMessage( pMessage_ )
@@ -18833,28 +18421,28 @@ namespace VULKAN_HPP_NAMESPACE
, pCmdBufLabels( pCmdBufLabels_ )
, objectCount( objectCount_ )
, pObjects( pObjects_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DebugUtilsMessengerCallbackDataEXT( *reinterpret_cast<DebugUtilsMessengerCallbackDataEXT const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DebugUtilsMessengerCallbackDataEXT(
- VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_,
- const char * pMessageIdName_,
- int32_t messageIdNumber_,
- const char * pMessage_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const &
- queueLabels_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const &
- cmdBufLabels_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const &
- objects_ = {} )
- : flags( flags_ )
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_,
+ const char * pMessageIdName_,
+ int32_t messageIdNumber_,
+ const char * pMessage_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & queueLabels_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & cmdBufLabels_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const & objects_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, pMessageIdName( pMessageIdName_ )
, messageIdNumber( messageIdNumber_ )
, pMessage( pMessage_ )
@@ -18864,15 +18452,14 @@ namespace VULKAN_HPP_NAMESPACE
, pCmdBufLabels( cmdBufLabels_.data() )
, objectCount( static_cast<uint32_t>( objects_.size() ) )
, pObjects( objects_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DebugUtilsMessengerCallbackDataEXT &
- operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DebugUtilsMessengerCallbackDataEXT & operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DebugUtilsMessengerCallbackDataEXT &
- operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DebugUtilsMessengerCallbackDataEXT & operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const *>( &rhs );
return *this;
@@ -18886,51 +18473,46 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &
- setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &
- setPMessageIdName( const char * pMessageIdName_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char * pMessageIdName_ ) VULKAN_HPP_NOEXCEPT
{
pMessageIdName = pMessageIdName_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &
- setMessageIdNumber( int32_t messageIdNumber_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setMessageIdNumber( int32_t messageIdNumber_ ) VULKAN_HPP_NOEXCEPT
{
messageIdNumber = messageIdNumber_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &
- setPMessage( const char * pMessage_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessage( const char * pMessage_ ) VULKAN_HPP_NOEXCEPT
{
pMessage = pMessage_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &
- setQueueLabelCount( uint32_t queueLabelCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setQueueLabelCount( uint32_t queueLabelCount_ ) VULKAN_HPP_NOEXCEPT
{
queueLabelCount = queueLabelCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &
- setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ ) VULKAN_HPP_NOEXCEPT
+ setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ ) VULKAN_HPP_NOEXCEPT
{
pQueueLabels = pQueueLabels_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DebugUtilsMessengerCallbackDataEXT & setQueueLabels(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const &
- queueLabels_ ) VULKAN_HPP_NOEXCEPT
+ DebugUtilsMessengerCallbackDataEXT &
+ setQueueLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & queueLabels_ ) VULKAN_HPP_NOEXCEPT
{
queueLabelCount = static_cast<uint32_t>( queueLabels_.size() );
pQueueLabels = queueLabels_.data();
@@ -18938,24 +18520,22 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &
- setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT
{
cmdBufLabelCount = cmdBufLabelCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &
- setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
+ setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
{
pCmdBufLabels = pCmdBufLabels_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DebugUtilsMessengerCallbackDataEXT & setCmdBufLabels(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const &
- cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
+ DebugUtilsMessengerCallbackDataEXT &
+ setCmdBufLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
{
cmdBufLabelCount = static_cast<uint32_t>( cmdBufLabels_.size() );
pCmdBufLabels = cmdBufLabels_.data();
@@ -18963,24 +18543,22 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &
- setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT
{
objectCount = objectCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &
- setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ ) VULKAN_HPP_NOEXCEPT
+ setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ ) VULKAN_HPP_NOEXCEPT
{
pObjects = pObjects_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DebugUtilsMessengerCallbackDataEXT & setObjects(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const &
- objects_ ) VULKAN_HPP_NOEXCEPT
+ DebugUtilsMessengerCallbackDataEXT &
+ setObjects( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const & objects_ ) VULKAN_HPP_NOEXCEPT
{
objectCount = static_cast<uint32_t>( objects_.size() );
pObjects = objects_.data();
@@ -18989,17 +18567,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDebugUtilsMessengerCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDebugUtilsMessengerCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( this );
}
- explicit operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -19018,18 +18596,8 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- flags,
- pMessageIdName,
- messageIdNumber,
- pMessage,
- queueLabelCount,
- pQueueLabels,
- cmdBufLabelCount,
- pCmdBufLabels,
- objectCount,
- pObjects );
+ return std::tie(
+ sType, pNext, flags, pMessageIdName, messageIdNumber, pMessage, queueLabelCount, pQueueLabels, cmdBufLabelCount, pCmdBufLabels, objectCount, pObjects );
}
#endif
@@ -19071,11 +18639,9 @@ namespace VULKAN_HPP_NAMESPACE
{
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
( ( pMessageIdName == rhs.pMessageIdName ) || ( strcmp( pMessageIdName, rhs.pMessageIdName ) == 0 ) ) &&
- ( messageIdNumber == rhs.messageIdNumber ) &&
- ( ( pMessage == rhs.pMessage ) || ( strcmp( pMessage, rhs.pMessage ) == 0 ) ) &&
- ( queueLabelCount == rhs.queueLabelCount ) && ( pQueueLabels == rhs.pQueueLabels ) &&
- ( cmdBufLabelCount == rhs.cmdBufLabelCount ) && ( pCmdBufLabels == rhs.pCmdBufLabels ) &&
- ( objectCount == rhs.objectCount ) && ( pObjects == rhs.pObjects );
+ ( messageIdNumber == rhs.messageIdNumber ) && ( ( pMessage == rhs.pMessage ) || ( strcmp( pMessage, rhs.pMessage ) == 0 ) ) &&
+ ( queueLabelCount == rhs.queueLabelCount ) && ( pQueueLabels == rhs.pQueueLabels ) && ( cmdBufLabelCount == rhs.cmdBufLabelCount ) &&
+ ( pCmdBufLabels == rhs.pCmdBufLabels ) && ( objectCount == rhs.objectCount ) && ( pObjects == rhs.pObjects );
}
bool operator!=( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
@@ -19084,8 +18650,8 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {};
const char * pMessageIdName = {};
int32_t messageIdNumber = {};
@@ -19097,14 +18663,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t objectCount = {};
const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT ) ==
- sizeof( VkDebugUtilsMessengerCallbackDataEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT>::value,
- "DebugUtilsMessengerCallbackDataEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDebugUtilsMessengerCallbackDataEXT>
@@ -19117,32 +18675,33 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDebugUtilsMessengerCreateInfoEXT;
static const bool allowDuplicate = true;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {},
- VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {},
- VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {},
- PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {},
- void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {},
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {},
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {},
+ PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {},
+ void * pUserData_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, messageSeverity( messageSeverity_ )
, messageType( messageType_ )
, pfnUserCallback( pfnUserCallback_ )
, pUserData( pUserData_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DebugUtilsMessengerCreateInfoEXT( *reinterpret_cast<DebugUtilsMessengerCreateInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DebugUtilsMessengerCreateInfoEXT &
- operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -19157,29 +18716,27 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT &
- setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT &
- setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT
+ setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT
{
messageSeverity = messageSeverity_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT &
- setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT
+ setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT
{
messageType = messageType_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT &
- setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
{
pfnUserCallback = pfnUserCallback_;
return *this;
@@ -19192,17 +18749,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDebugUtilsMessengerCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDebugUtilsMessengerCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( this );
}
- explicit operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -19220,43 +18777,30 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif
-#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( DebugUtilsMessengerCreateInfoEXT const & ) const = default;
-#else
bool operator==( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
-# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( messageSeverity == rhs.messageSeverity ) && ( messageType == rhs.messageType ) &&
- ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData );
-# endif
+#else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( messageSeverity == rhs.messageSeverity ) &&
+ ( messageType == rhs.messageType ) && ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData );
+#endif
}
bool operator!=( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
-#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {};
VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {};
VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {};
PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {};
void * pUserData = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT ) ==
- sizeof( VkDebugUtilsMessengerCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT>::value,
- "DebugUtilsMessengerCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDebugUtilsMessengerCreateInfoEXT>
@@ -19272,38 +18816,43 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT(
- VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown,
- uint64_t objectHandle_ = {},
- uint64_t tagName_ = {},
- size_t tagSize_ = {},
- const void * pTag_ = {} ) VULKAN_HPP_NOEXCEPT
- : objectType( objectType_ )
+ VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown,
+ uint64_t objectHandle_ = {},
+ uint64_t tagName_ = {},
+ size_t tagSize_ = {},
+ const void * pTag_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , objectType( objectType_ )
, objectHandle( objectHandle_ )
, tagName( tagName_ )
, tagSize( tagSize_ )
, pTag( pTag_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DebugUtilsObjectTagInfoEXT( *reinterpret_cast<DebugUtilsObjectTagInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
uint64_t objectHandle_,
uint64_t tagName_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ )
- : objectType( objectType_ )
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , objectType( objectType_ )
, objectHandle( objectHandle_ )
, tagName( tagName_ )
, tagSize( tag_.size() * sizeof( T ) )
, pTag( tag_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -19322,8 +18871,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT &
- setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
{
objectType = objectType_;
return *this;
@@ -19355,8 +18903,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
- DebugUtilsObjectTagInfoEXT &
- setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
+ DebugUtilsObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
{
tagSize = tag_.size() * sizeof( T );
pTag = tag_.data();
@@ -19365,17 +18912,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDebugUtilsObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDebugUtilsObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( this );
}
- explicit operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDebugUtilsObjectTagInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -19398,12 +18945,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) &&
- ( objectHandle == rhs.objectHandle ) && ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) &&
- ( pTag == rhs.pTag );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) &&
+ ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag );
# endif
}
@@ -19422,13 +18968,6 @@ namespace VULKAN_HPP_NAMESPACE
size_t tagSize = {};
const void * pTag = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT ) ==
- sizeof( VkDebugUtilsObjectTagInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT>::value,
- "DebugUtilsObjectTagInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDebugUtilsObjectTagInfoEXT>
@@ -19441,28 +18980,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDedicatedAllocationBufferCreateInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDedicatedAllocationBufferCreateInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} )
- VULKAN_HPP_NOEXCEPT : dedicatedAllocation( dedicatedAllocation_ )
- {}
+ VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , dedicatedAllocation( dedicatedAllocation_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : DedicatedAllocationBufferCreateInfoNV(
- *reinterpret_cast<DedicatedAllocationBufferCreateInfoNV const *>( &rhs ) )
- {}
+ : DedicatedAllocationBufferCreateInfoNV( *reinterpret_cast<DedicatedAllocationBufferCreateInfoNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DedicatedAllocationBufferCreateInfoNV &
- operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DedicatedAllocationBufferCreateInfoNV & operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DedicatedAllocationBufferCreateInfoNV &
- operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ DedicatedAllocationBufferCreateInfoNV & operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const *>( &rhs );
return *this;
@@ -19476,24 +19014,24 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV &
- setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
+ setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
{
dedicatedAllocation = dedicatedAllocation_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDedicatedAllocationBufferCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDedicatedAllocationBufferCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV *>( this );
}
- explicit operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -19510,7 +19048,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation );
@@ -19528,14 +19066,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV ) ==
- sizeof( VkDedicatedAllocationBufferCreateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV>::value,
- "DedicatedAllocationBufferCreateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDedicatedAllocationBufferCreateInfoNV>
@@ -19548,28 +19078,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDedicatedAllocationImageCreateInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDedicatedAllocationImageCreateInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationImageCreateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- DedicatedAllocationImageCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT
- : dedicatedAllocation( dedicatedAllocation_ )
- {}
+ VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , dedicatedAllocation( dedicatedAllocation_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: DedicatedAllocationImageCreateInfoNV( *reinterpret_cast<DedicatedAllocationImageCreateInfoNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DedicatedAllocationImageCreateInfoNV &
- operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DedicatedAllocationImageCreateInfoNV & operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DedicatedAllocationImageCreateInfoNV &
- operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ DedicatedAllocationImageCreateInfoNV & operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const *>( &rhs );
return *this;
@@ -19583,24 +19112,24 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV &
- setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
+ setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
{
dedicatedAllocation = dedicatedAllocation_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDedicatedAllocationImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDedicatedAllocationImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV *>( this );
}
- explicit operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -19617,7 +19146,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation );
@@ -19635,14 +19164,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV ) ==
- sizeof( VkDedicatedAllocationImageCreateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV>::value,
- "DedicatedAllocationImageCreateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDedicatedAllocationImageCreateInfoNV>
@@ -19655,77 +19176,69 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDedicatedAllocationMemoryAllocateInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- DedicatedAllocationMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::Image image_ = {},
- VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
- : image( image_ )
+ VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::Image image_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , image( image_ )
, buffer( buffer_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : DedicatedAllocationMemoryAllocateInfoNV(
- *reinterpret_cast<DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs ) )
- {}
+ : DedicatedAllocationMemoryAllocateInfoNV( *reinterpret_cast<DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DedicatedAllocationMemoryAllocateInfoNV &
- operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DedicatedAllocationMemoryAllocateInfoNV & operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DedicatedAllocationMemoryAllocateInfoNV &
- operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ DedicatedAllocationMemoryAllocateInfoNV & operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV &
- setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV &
- setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDedicatedAllocationMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDedicatedAllocationMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV *>( this );
}
- explicit operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::Image const &,
- VULKAN_HPP_NAMESPACE::Buffer const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::Buffer const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -19738,7 +19251,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer );
@@ -19757,15 +19270,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Image image = {};
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV ) ==
- sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV>::value,
- "DedicatedAllocationMemoryAllocateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDedicatedAllocationMemoryAllocateInfoNV>
@@ -19784,18 +19288,19 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR MemoryBarrier2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {},
VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {},
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {},
- VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcStageMask( srcStageMask_ )
+ VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , srcStageMask( srcStageMask_ )
, srcAccessMask( srcAccessMask_ )
, dstStageMask( dstStageMask_ )
, dstAccessMask( dstAccessMask_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR MemoryBarrier2( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MemoryBarrier2( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : MemoryBarrier2( *reinterpret_cast<MemoryBarrier2 const *>( &rhs ) )
- {}
+ MemoryBarrier2( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryBarrier2( *reinterpret_cast<MemoryBarrier2 const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
MemoryBarrier2 & operator=( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -19813,46 +19318,42 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 &
- setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
{
srcStageMask = srcStageMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 &
- setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 &
- setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
{
dstStageMask = dstStageMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 &
- setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryBarrier2 *>( this );
}
- explicit operator VkMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryBarrier2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -19874,12 +19375,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) &&
- ( srcAccessMask == rhs.srcAccessMask ) && ( dstStageMask == rhs.dstStageMask ) &&
- ( dstAccessMask == rhs.dstAccessMask );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) &&
+ ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask );
# endif
}
@@ -19897,12 +19397,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {};
VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrier2 ) == sizeof( VkMemoryBarrier2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryBarrier2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryBarrier2>::value,
- "MemoryBarrier2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMemoryBarrier2>
@@ -19920,19 +19414,21 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t baseMipLevel_ = {},
uint32_t levelCount_ = {},
uint32_t baseArrayLayer_ = {},
- uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
+ uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
: aspectMask( aspectMask_ )
, baseMipLevel( baseMipLevel_ )
, levelCount( levelCount_ )
, baseArrayLayer( baseArrayLayer_ )
, layerCount( layerCount_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageSubresourceRange( *reinterpret_cast<ImageSubresourceRange const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -19944,8 +19440,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange &
- setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
aspectMask = aspectMask_;
return *this;
@@ -19976,25 +19471,21 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageSubresourceRange const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageSubresourceRange const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageSubresourceRange *>( this );
}
- explicit operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT
+ operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageSubresourceRange *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &,
- uint32_t const &,
- uint32_t const &,
- uint32_t const &,
- uint32_t const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -20007,12 +19498,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( aspectMask == rhs.aspectMask ) && ( baseMipLevel == rhs.baseMipLevel ) &&
- ( levelCount == rhs.levelCount ) && ( baseArrayLayer == rhs.baseArrayLayer ) &&
- ( layerCount == rhs.layerCount );
+ return ( aspectMask == rhs.aspectMask ) && ( baseMipLevel == rhs.baseMipLevel ) && ( levelCount == rhs.levelCount ) &&
+ ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount );
# endif
}
@@ -20029,12 +19519,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t baseArrayLayer = {};
uint32_t layerCount = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresourceRange>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresourceRange>::value,
- "ImageSubresourceRange is not nothrow_move_constructible!" );
struct ImageMemoryBarrier2
{
@@ -20044,18 +19528,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImageMemoryBarrier2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {},
- VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {},
- VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {},
- VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- uint32_t srcQueueFamilyIndex_ = {},
- uint32_t dstQueueFamilyIndex_ = {},
- VULKAN_HPP_NAMESPACE::Image image_ = {},
- VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcStageMask( srcStageMask_ )
+ VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {},
+ VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {},
+ VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ uint32_t srcQueueFamilyIndex_ = {},
+ uint32_t dstQueueFamilyIndex_ = {},
+ VULKAN_HPP_NAMESPACE::Image image_ = {},
+ VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , srcStageMask( srcStageMask_ )
, srcAccessMask( srcAccessMask_ )
, dstStageMask( dstStageMask_ )
, dstAccessMask( dstAccessMask_ )
@@ -20065,13 +19550,14 @@ namespace VULKAN_HPP_NAMESPACE
, dstQueueFamilyIndex( dstQueueFamilyIndex_ )
, image( image_ )
, subresourceRange( subresourceRange_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImageMemoryBarrier2( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : ImageMemoryBarrier2( *reinterpret_cast<ImageMemoryBarrier2 const *>( &rhs ) )
- {}
+ ImageMemoryBarrier2( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageMemoryBarrier2( *reinterpret_cast<ImageMemoryBarrier2 const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageMemoryBarrier2 & operator=( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -20089,57 +19575,49 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 &
- setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
{
srcStageMask = srcStageMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 &
- setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 &
- setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
{
dstStageMask = dstStageMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 &
- setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 &
- setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
{
oldLayout = oldLayout_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 &
- setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
{
newLayout = newLayout_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 &
- setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
srcQueueFamilyIndex = srcQueueFamilyIndex_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 &
- setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
dstQueueFamilyIndex = dstQueueFamilyIndex_;
return *this;
@@ -20152,24 +19630,24 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 &
- setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
+ setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
{
subresourceRange = subresourceRange_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageMemoryBarrier2 *>( this );
}
- explicit operator VkImageMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
+ operator VkImageMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageMemoryBarrier2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -20208,15 +19686,13 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) &&
- ( srcAccessMask == rhs.srcAccessMask ) && ( dstStageMask == rhs.dstStageMask ) &&
- ( dstAccessMask == rhs.dstAccessMask ) && ( oldLayout == rhs.oldLayout ) &&
- ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) &&
- ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( image == rhs.image ) &&
- ( subresourceRange == rhs.subresourceRange );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) &&
+ ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ) && ( oldLayout == rhs.oldLayout ) &&
+ ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) &&
+ ( image == rhs.image ) && ( subresourceRange == rhs.subresourceRange );
# endif
}
@@ -20240,12 +19716,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Image image = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 ) == sizeof( VkImageMemoryBarrier2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2>::value,
- "ImageMemoryBarrier2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageMemoryBarrier2>
@@ -20262,45 +19732,45 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDependencyInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- DependencyInfo( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {},
- uint32_t memoryBarrierCount_ = {},
- const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ = {},
- uint32_t bufferMemoryBarrierCount_ = {},
- const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ = {},
- uint32_t imageMemoryBarrierCount_ = {},
- const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ = {} ) VULKAN_HPP_NOEXCEPT
- : dependencyFlags( dependencyFlags_ )
+ VULKAN_HPP_CONSTEXPR DependencyInfo( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {},
+ uint32_t memoryBarrierCount_ = {},
+ const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ = {},
+ uint32_t bufferMemoryBarrierCount_ = {},
+ const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ = {},
+ uint32_t imageMemoryBarrierCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , dependencyFlags( dependencyFlags_ )
, memoryBarrierCount( memoryBarrierCount_ )
, pMemoryBarriers( pMemoryBarriers_ )
, bufferMemoryBarrierCount( bufferMemoryBarrierCount_ )
, pBufferMemoryBarriers( pBufferMemoryBarriers_ )
, imageMemoryBarrierCount( imageMemoryBarrierCount_ )
, pImageMemoryBarriers( pImageMemoryBarriers_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR DependencyInfo( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DependencyInfo( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : DependencyInfo( *reinterpret_cast<DependencyInfo const *>( &rhs ) )
- {}
+ DependencyInfo( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DependencyInfo( *reinterpret_cast<DependencyInfo const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DependencyInfo(
- VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryBarrier2> const & memoryBarriers_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2> const &
- bufferMemoryBarriers_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2> const &
- imageMemoryBarriers_ = {} )
- : dependencyFlags( dependencyFlags_ )
+ DependencyInfo( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryBarrier2> const & memoryBarriers_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2> const & bufferMemoryBarriers_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2> const & imageMemoryBarriers_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , dependencyFlags( dependencyFlags_ )
, memoryBarrierCount( static_cast<uint32_t>( memoryBarriers_.size() ) )
, pMemoryBarriers( memoryBarriers_.data() )
, bufferMemoryBarrierCount( static_cast<uint32_t>( bufferMemoryBarriers_.size() ) )
, pBufferMemoryBarriers( bufferMemoryBarriers_.data() )
, imageMemoryBarrierCount( static_cast<uint32_t>( imageMemoryBarriers_.size() ) )
, pImageMemoryBarriers( imageMemoryBarriers_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -20319,8 +19789,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DependencyInfo &
- setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
{
dependencyFlags = dependencyFlags_;
return *this;
@@ -20332,17 +19801,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DependencyInfo &
- setPMemoryBarriers( const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPMemoryBarriers( const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
{
pMemoryBarriers = pMemoryBarriers_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DependencyInfo & setMemoryBarriers(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryBarrier2> const &
- memoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+ DependencyInfo &
+ setMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryBarrier2> const & memoryBarriers_ ) VULKAN_HPP_NOEXCEPT
{
memoryBarrierCount = static_cast<uint32_t>( memoryBarriers_.size() );
pMemoryBarriers = memoryBarriers_.data();
@@ -20350,15 +19817,14 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 DependencyInfo &
- setBufferMemoryBarrierCount( uint32_t bufferMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setBufferMemoryBarrierCount( uint32_t bufferMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
{
bufferMemoryBarrierCount = bufferMemoryBarrierCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPBufferMemoryBarriers(
- const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DependencyInfo &
+ setPBufferMemoryBarriers( const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
{
pBufferMemoryBarriers = pBufferMemoryBarriers_;
return *this;
@@ -20366,8 +19832,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DependencyInfo & setBufferMemoryBarriers(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2> const &
- bufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2> const & bufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
{
bufferMemoryBarrierCount = static_cast<uint32_t>( bufferMemoryBarriers_.size() );
pBufferMemoryBarriers = bufferMemoryBarriers_.data();
@@ -20375,15 +19840,14 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 DependencyInfo &
- setImageMemoryBarrierCount( uint32_t imageMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setImageMemoryBarrierCount( uint32_t imageMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
{
imageMemoryBarrierCount = imageMemoryBarrierCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPImageMemoryBarriers(
- const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DependencyInfo &
+ setPImageMemoryBarriers( const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
{
pImageMemoryBarriers = pImageMemoryBarriers_;
return *this;
@@ -20391,8 +19855,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DependencyInfo & setImageMemoryBarriers(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2> const &
- imageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2> const & imageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
{
imageMemoryBarrierCount = static_cast<uint32_t>( imageMemoryBarriers_.size() );
pImageMemoryBarriers = imageMemoryBarriers_.data();
@@ -20401,17 +19864,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDependencyInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDependencyInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDependencyInfo *>( this );
}
- explicit operator VkDependencyInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDependencyInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDependencyInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -20444,15 +19907,13 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dependencyFlags == rhs.dependencyFlags ) &&
( memoryBarrierCount == rhs.memoryBarrierCount ) && ( pMemoryBarriers == rhs.pMemoryBarriers ) &&
- ( bufferMemoryBarrierCount == rhs.bufferMemoryBarrierCount ) &&
- ( pBufferMemoryBarriers == rhs.pBufferMemoryBarriers ) &&
- ( imageMemoryBarrierCount == rhs.imageMemoryBarrierCount ) &&
- ( pImageMemoryBarriers == rhs.pImageMemoryBarriers );
+ ( bufferMemoryBarrierCount == rhs.bufferMemoryBarrierCount ) && ( pBufferMemoryBarriers == rhs.pBufferMemoryBarriers ) &&
+ ( imageMemoryBarrierCount == rhs.imageMemoryBarrierCount ) && ( pImageMemoryBarriers == rhs.pImageMemoryBarriers );
# endif
}
@@ -20473,12 +19934,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t imageMemoryBarrierCount = {};
const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DependencyInfo ) == sizeof( VkDependencyInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DependencyInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DependencyInfo>::value,
- "DependencyInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDependencyInfo>
@@ -20498,13 +19953,15 @@ namespace VULKAN_HPP_NAMESPACE
: buffer( buffer_ )
, offset( offset_ )
, range( range_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorBufferInfo( *reinterpret_cast<DescriptorBufferInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DescriptorBufferInfo & operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -20522,38 +19979,34 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo &
- setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo &
- setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
{
range = range_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDescriptorBufferInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorBufferInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorBufferInfo *>( this );
}
- explicit operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorBufferInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::Buffer const &,
- VULKAN_HPP_NAMESPACE::DeviceSize const &,
- VULKAN_HPP_NAMESPACE::DeviceSize const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -20566,7 +20019,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( range == rhs.range );
@@ -20584,32 +20037,27 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
VULKAN_HPP_NAMESPACE::DeviceSize range = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo>::value,
- "DescriptorBufferInfo is not nothrow_move_constructible!" );
struct DescriptorImageInfo
{
using NativeType = VkDescriptorImageInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DescriptorImageInfo( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {},
- VULKAN_HPP_NAMESPACE::ImageView imageView_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ =
- VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR
+ DescriptorImageInfo( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {},
+ VULKAN_HPP_NAMESPACE::ImageView imageView_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
: sampler( sampler_ )
, imageView( imageView_ )
, imageLayout( imageLayout_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : DescriptorImageInfo( *reinterpret_cast<DescriptorImageInfo const *>( &rhs ) )
- {}
+ DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorImageInfo( *reinterpret_cast<DescriptorImageInfo const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -20621,45 +20069,40 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo &
- setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
{
sampler = sampler_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo &
- setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
{
imageView = imageView_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo &
- setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
{
imageLayout = imageLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDescriptorImageInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorImageInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorImageInfo *>( this );
}
- explicit operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorImageInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::Sampler const &,
- VULKAN_HPP_NAMESPACE::ImageView const &,
- VULKAN_HPP_NAMESPACE::ImageLayout const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::Sampler const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -20672,7 +20115,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sampler == rhs.sampler ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout );
@@ -20690,30 +20133,22 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ImageView imageView = {};
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorImageInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorImageInfo>::value,
- "DescriptorImageInfo is not nothrow_move_constructible!" );
struct DescriptorPoolSize
{
using NativeType = VkDescriptorPoolSize;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- DescriptorPoolSize( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
- uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DescriptorPoolSize( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
+ uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT
: type( type_ )
, descriptorCount( descriptorCount_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
- : DescriptorPoolSize( *reinterpret_cast<DescriptorPoolSize const *>( &rhs ) )
- {}
+ DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorPoolSize( *reinterpret_cast<DescriptorPoolSize const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DescriptorPoolSize & operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -20725,8 +20160,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize &
- setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
@@ -20739,17 +20173,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDescriptorPoolSize const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorPoolSize const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorPoolSize *>( this );
}
- explicit operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorPoolSize *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -20766,7 +20200,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( type == rhs.type ) && ( descriptorCount == rhs.descriptorCount );
@@ -20783,12 +20217,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
uint32_t descriptorCount = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorPoolSize>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorPoolSize>::value,
- "DescriptorPoolSize is not nothrow_move_constructible!" );
struct DescriptorPoolCreateInfo
{
@@ -20798,33 +20226,34 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {},
- uint32_t maxSets_ = {},
- uint32_t poolSizeCount_ = {},
- const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {},
+ uint32_t maxSets_ = {},
+ uint32_t poolSizeCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, maxSets( maxSets_ )
, poolSizeCount( poolSizeCount_ )
, pPoolSizes( pPoolSizes_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorPoolCreateInfo( *reinterpret_cast<DescriptorPoolCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DescriptorPoolCreateInfo(
- VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_,
- uint32_t maxSets_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_ )
- : flags( flags_ )
- , maxSets( maxSets_ )
- , poolSizeCount( static_cast<uint32_t>( poolSizes_.size() ) )
- , pPoolSizes( poolSizes_.data() )
- {}
+ DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_,
+ uint32_t maxSets_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( static_cast<uint32_t>( poolSizes_.size() ) ), pPoolSizes( poolSizes_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -20843,8 +20272,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -20862,17 +20290,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo &
- setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ ) VULKAN_HPP_NOEXCEPT
{
pPoolSizes = pPoolSizes_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DescriptorPoolCreateInfo & setPoolSizes(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_ )
- VULKAN_HPP_NOEXCEPT
+ DescriptorPoolCreateInfo &
+ setPoolSizes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_ ) VULKAN_HPP_NOEXCEPT
{
poolSizeCount = static_cast<uint32_t>( poolSizes_.size() );
pPoolSizes = poolSizes_.data();
@@ -20881,17 +20307,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDescriptorPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorPoolCreateInfo *>( this );
}
- explicit operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorPoolCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -20913,7 +20339,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( maxSets == rhs.maxSets ) &&
@@ -20935,13 +20361,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t poolSizeCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo ) ==
- sizeof( VkDescriptorPoolCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo>::value,
- "DescriptorPoolCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDescriptorPoolCreateInfo>
@@ -20954,62 +20373,58 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDescriptorPoolInlineUniformBlockCreateInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDescriptorPoolInlineUniformBlockCreateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- DescriptorPoolInlineUniformBlockCreateInfo( uint32_t maxInlineUniformBlockBindings_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ )
- {}
+ VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( uint32_t maxInlineUniformBlockBindings_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo(
- DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DescriptorPoolInlineUniformBlockCreateInfo( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs )
- VULKAN_HPP_NOEXCEPT
- : DescriptorPoolInlineUniformBlockCreateInfo(
- *reinterpret_cast<DescriptorPoolInlineUniformBlockCreateInfo const *>( &rhs ) )
- {}
+ DescriptorPoolInlineUniformBlockCreateInfo( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DescriptorPoolInlineUniformBlockCreateInfo( *reinterpret_cast<DescriptorPoolInlineUniformBlockCreateInfo const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DescriptorPoolInlineUniformBlockCreateInfo &
- operator=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DescriptorPoolInlineUniformBlockCreateInfo & operator=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DescriptorPoolInlineUniformBlockCreateInfo &
- operator=( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ DescriptorPoolInlineUniformBlockCreateInfo & operator=( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo &
- setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT
+ setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT
{
maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDescriptorPoolInlineUniformBlockCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorPoolInlineUniformBlockCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorPoolInlineUniformBlockCreateInfo *>( this );
}
- explicit operator VkDescriptorPoolInlineUniformBlockCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorPoolInlineUniformBlockCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -21026,11 +20441,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings );
# endif
}
@@ -21041,19 +20455,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo;
+ const void * pNext = {};
uint32_t maxInlineUniformBlockBindings = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo ) ==
- sizeof( VkDescriptorPoolInlineUniformBlockCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo>::value,
- "DescriptorPoolInlineUniformBlockCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDescriptorPoolInlineUniformBlockCreateInfo>
@@ -21070,31 +20475,31 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo(
- VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {},
- uint32_t descriptorSetCount_ = {},
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {} ) VULKAN_HPP_NOEXCEPT
- : descriptorPool( descriptorPool_ )
+ VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {},
+ uint32_t descriptorSetCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , descriptorPool( descriptorPool_ )
, descriptorSetCount( descriptorSetCount_ )
, pSetLayouts( pSetLayouts_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorSetAllocateInfo( *reinterpret_cast<DescriptorSetAllocateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DescriptorSetAllocateInfo(
- VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const &
- setLayouts_ )
- : descriptorPool( descriptorPool_ )
- , descriptorSetCount( static_cast<uint32_t>( setLayouts_.size() ) )
- , pSetLayouts( setLayouts_.data() )
- {}
+ DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), descriptorPool( descriptorPool_ ), descriptorSetCount( static_cast<uint32_t>( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -21113,31 +20518,27 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo &
- setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT
{
descriptorPool = descriptorPool_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo &
- setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorSetCount = descriptorSetCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo &
- setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
{
pSetLayouts = pSetLayouts_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DescriptorSetAllocateInfo & setSetLayouts(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const &
- setLayouts_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetAllocateInfo &
+ setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
{
descriptorSetCount = static_cast<uint32_t>( setLayouts_.size() );
pSetLayouts = setLayouts_.data();
@@ -21146,17 +20547,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDescriptorSetAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorSetAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetAllocateInfo *>( this );
}
- explicit operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorSetAllocateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -21177,11 +20578,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorPool == rhs.descriptorPool ) &&
- ( descriptorSetCount == rhs.descriptorSetCount ) && ( pSetLayouts == rhs.pSetLayouts );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorPool == rhs.descriptorPool ) && ( descriptorSetCount == rhs.descriptorSetCount ) &&
+ ( pSetLayouts == rhs.pSetLayouts );
# endif
}
@@ -21198,13 +20599,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t descriptorSetCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo ) ==
- sizeof( VkDescriptorSetAllocateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo>::value,
- "DescriptorSetAllocateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDescriptorSetAllocateInfo>
@@ -21212,43 +20606,150 @@ namespace VULKAN_HPP_NAMESPACE
using Type = DescriptorSetAllocateInfo;
};
+ struct DescriptorSetBindingReferenceVALVE
+ {
+ using NativeType = VkDescriptorSetBindingReferenceVALVE;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetBindingReferenceVALVE;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {},
+ uint32_t binding_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , descriptorSetLayout( descriptorSetLayout_ )
+ , binding( binding_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE( DescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DescriptorSetBindingReferenceVALVE( VkDescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DescriptorSetBindingReferenceVALVE( *reinterpret_cast<DescriptorSetBindingReferenceVALVE const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ DescriptorSetBindingReferenceVALVE & operator=( DescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DescriptorSetBindingReferenceVALVE & operator=( VkDescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE &
+ setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorSetLayout = descriptorSetLayout_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
+ {
+ binding = binding_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkDescriptorSetBindingReferenceVALVE const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( this );
+ }
+
+ operator VkDescriptorSetBindingReferenceVALVE &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDescriptorSetBindingReferenceVALVE *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorSetLayout const &, uint32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, descriptorSetLayout, binding );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( DescriptorSetBindingReferenceVALVE const & ) const = default;
+#else
+ bool operator==( DescriptorSetBindingReferenceVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetLayout == rhs.descriptorSetLayout ) && ( binding == rhs.binding );
+# endif
+ }
+
+ bool operator!=( DescriptorSetBindingReferenceVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetBindingReferenceVALVE;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {};
+ uint32_t binding = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eDescriptorSetBindingReferenceVALVE>
+ {
+ using Type = DescriptorSetBindingReferenceVALVE;
+ };
+
struct DescriptorSetLayoutBinding
{
using NativeType = VkDescriptorSetLayoutBinding;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding(
- uint32_t binding_ = {},
- VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
- uint32_t descriptorCount_ = {},
- VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {},
- const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( uint32_t binding_ = {},
+ VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
+ uint32_t descriptorCount_ = {},
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {},
+ const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT
: binding( binding_ )
, descriptorType( descriptorType_ )
, descriptorCount( descriptorCount_ )
, stageFlags( stageFlags_ )
, pImmutableSamplers( pImmutableSamplers_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorSetLayoutBinding( *reinterpret_cast<DescriptorSetLayoutBinding const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DescriptorSetLayoutBinding(
- uint32_t binding_,
- VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_,
- VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ )
+ DescriptorSetLayoutBinding( uint32_t binding_,
+ VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_,
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ )
: binding( binding_ )
, descriptorType( descriptorType_ )
, descriptorCount( static_cast<uint32_t>( immutableSamplers_.size() ) )
, stageFlags( stageFlags_ )
, pImmutableSamplers( immutableSamplers_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -21267,38 +20768,33 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding &
- setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
{
descriptorType = descriptorType_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding &
- setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorCount = descriptorCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding &
- setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
{
stageFlags = stageFlags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding &
- setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT
{
pImmutableSamplers = pImmutableSamplers_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DescriptorSetLayoutBinding & setImmutableSamplers(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ )
- VULKAN_HPP_NOEXCEPT
+ DescriptorSetLayoutBinding &
+ setImmutableSamplers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ ) VULKAN_HPP_NOEXCEPT
{
descriptorCount = static_cast<uint32_t>( immutableSamplers_.size() );
pImmutableSamplers = immutableSamplers_.data();
@@ -21307,17 +20803,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDescriptorSetLayoutBinding const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorSetLayoutBinding const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetLayoutBinding *>( this );
}
- explicit operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorSetLayoutBinding *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -21338,12 +20834,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( binding == rhs.binding ) && ( descriptorType == rhs.descriptorType ) &&
- ( descriptorCount == rhs.descriptorCount ) && ( stageFlags == rhs.stageFlags ) &&
- ( pImmutableSamplers == rhs.pImmutableSamplers );
+ return ( binding == rhs.binding ) && ( descriptorType == rhs.descriptorType ) && ( descriptorCount == rhs.descriptorCount ) &&
+ ( stageFlags == rhs.stageFlags ) && ( pImmutableSamplers == rhs.pImmutableSamplers );
# endif
}
@@ -21360,75 +20855,63 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding ) ==
- sizeof( VkDescriptorSetLayoutBinding ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding>::value,
- "DescriptorSetLayoutBinding is not nothrow_move_constructible!" );
struct DescriptorSetLayoutBindingFlagsCreateInfo
{
using NativeType = VkDescriptorSetLayoutBindingFlagsCreateInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo(
- uint32_t bindingCount_ = {},
- const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ = {} ) VULKAN_HPP_NOEXCEPT
- : bindingCount( bindingCount_ )
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( uint32_t bindingCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , bindingCount( bindingCount_ )
, pBindingFlags( pBindingFlags_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo(
- DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs )
- VULKAN_HPP_NOEXCEPT
- : DescriptorSetLayoutBindingFlagsCreateInfo(
- *reinterpret_cast<DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs ) )
- {}
+ DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DescriptorSetLayoutBindingFlagsCreateInfo( *reinterpret_cast<DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DescriptorSetLayoutBindingFlagsCreateInfo(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const &
- bindingFlags_ )
- : bindingCount( static_cast<uint32_t>( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() )
- {}
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const & bindingFlags_, const void * pNext_ = nullptr )
+ : pNext( pNext_ ), bindingCount( static_cast<uint32_t>( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DescriptorSetLayoutBindingFlagsCreateInfo &
- operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DescriptorSetLayoutBindingFlagsCreateInfo & operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DescriptorSetLayoutBindingFlagsCreateInfo &
- operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetLayoutBindingFlagsCreateInfo & operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo &
- setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
{
bindingCount = bindingCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo &
- setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ ) VULKAN_HPP_NOEXCEPT
+ setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ ) VULKAN_HPP_NOEXCEPT
{
pBindingFlags = pBindingFlags_;
return *this;
@@ -21436,8 +20919,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DescriptorSetLayoutBindingFlagsCreateInfo & setBindingFlags(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const &
- bindingFlags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const & bindingFlags_ ) VULKAN_HPP_NOEXCEPT
{
bindingCount = static_cast<uint32_t>( bindingFlags_.size() );
pBindingFlags = bindingFlags_.data();
@@ -21446,24 +20928,22 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDescriptorSetLayoutBindingFlagsCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorSetLayoutBindingFlagsCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfo *>( this );
}
- explicit operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- uint32_t const &,
- const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * const &>
+ std::
+ tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -21476,11 +20956,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bindingCount == rhs.bindingCount ) &&
- ( pBindingFlags == rhs.pBindingFlags );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bindingCount == rhs.bindingCount ) && ( pBindingFlags == rhs.pBindingFlags );
# endif
}
@@ -21491,20 +20970,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
- const void * pNext = {};
- uint32_t bindingCount = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
+ const void * pNext = {};
+ uint32_t bindingCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo ) ==
- sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo>::value,
- "DescriptorSetLayoutBindingFlagsCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo>
@@ -21521,34 +20991,35 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo(
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {},
- uint32_t bindingCount_ = {},
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {},
+ uint32_t bindingCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, bindingCount( bindingCount_ )
, pBindings( pBindings_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorSetLayoutCreateInfo( *reinterpret_cast<DescriptorSetLayoutCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DescriptorSetLayoutCreateInfo(
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const &
- bindings_ )
- : flags( flags_ ), bindingCount( static_cast<uint32_t>( bindings_.size() ) ), pBindings( bindings_.data() )
- {}
+ DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const & bindings_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), flags( flags_ ), bindingCount( static_cast<uint32_t>( bindings_.size() ) ), pBindings( bindings_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DescriptorSetLayoutCreateInfo &
- operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DescriptorSetLayoutCreateInfo & operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -21563,31 +21034,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo &
- setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
{
bindingCount = bindingCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo &
- setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ ) VULKAN_HPP_NOEXCEPT
+ setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ ) VULKAN_HPP_NOEXCEPT
{
pBindings = pBindings_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DescriptorSetLayoutCreateInfo & setBindings(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const &
- bindings_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetLayoutCreateInfo &
+ setBindings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const & bindings_ ) VULKAN_HPP_NOEXCEPT
{
bindingCount = static_cast<uint32_t>( bindings_.size() );
pBindings = bindings_.data();
@@ -21596,17 +21064,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDescriptorSetLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorSetLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( this );
}
- explicit operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorSetLayoutCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -21627,11 +21095,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( bindingCount == rhs.bindingCount ) && ( pBindings == rhs.pBindings );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( bindingCount == rhs.bindingCount ) &&
+ ( pBindings == rhs.pBindings );
# endif
}
@@ -21642,20 +21110,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {};
uint32_t bindingCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo ) ==
- sizeof( VkDescriptorSetLayoutCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo>::value,
- "DescriptorSetLayoutCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDescriptorSetLayoutCreateInfo>
@@ -21663,6 +21123,111 @@ namespace VULKAN_HPP_NAMESPACE
using Type = DescriptorSetLayoutCreateInfo;
};
+ struct DescriptorSetLayoutHostMappingInfoVALVE
+ {
+ using NativeType = VkDescriptorSetLayoutHostMappingInfoVALVE;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR
+ DescriptorSetLayoutHostMappingInfoVALVE( size_t descriptorOffset_ = {}, uint32_t descriptorSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , descriptorOffset( descriptorOffset_ )
+ , descriptorSize( descriptorSize_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayoutHostMappingInfoVALVE( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DescriptorSetLayoutHostMappingInfoVALVE( VkDescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DescriptorSetLayoutHostMappingInfoVALVE( *reinterpret_cast<DescriptorSetLayoutHostMappingInfoVALVE const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ DescriptorSetLayoutHostMappingInfoVALVE & operator=( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DescriptorSetLayoutHostMappingInfoVALVE & operator=( VkDescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setDescriptorOffset( size_t descriptorOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorOffset = descriptorOffset_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setDescriptorSize( uint32_t descriptorSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorSize = descriptorSize_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkDescriptorSetLayoutHostMappingInfoVALVE const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDescriptorSetLayoutHostMappingInfoVALVE *>( this );
+ }
+
+ operator VkDescriptorSetLayoutHostMappingInfoVALVE &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, size_t const &, uint32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, descriptorOffset, descriptorSize );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( DescriptorSetLayoutHostMappingInfoVALVE const & ) const = default;
+#else
+ bool operator==( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorOffset == rhs.descriptorOffset ) && ( descriptorSize == rhs.descriptorSize );
+# endif
+ }
+
+ bool operator!=( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE;
+ void * pNext = {};
+ size_t descriptorOffset = {};
+ uint32_t descriptorSize = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eDescriptorSetLayoutHostMappingInfoVALVE>
+ {
+ using Type = DescriptorSetLayoutHostMappingInfoVALVE;
+ };
+
struct DescriptorSetLayoutSupport
{
using NativeType = VkDescriptorSetLayoutSupport;
@@ -21671,16 +21236,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( VULKAN_HPP_NAMESPACE::Bool32 supported_ = {} ) VULKAN_HPP_NOEXCEPT
- : supported( supported_ )
- {}
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( VULKAN_HPP_NAMESPACE::Bool32 supported_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , supported( supported_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorSetLayoutSupport( *reinterpret_cast<DescriptorSetLayoutSupport const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -21691,17 +21258,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkDescriptorSetLayoutSupport const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorSetLayoutSupport const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetLayoutSupport *>( this );
}
- explicit operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorSetLayoutSupport *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -21718,7 +21285,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supported == rhs.supported );
@@ -21736,13 +21303,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 supported = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport ) ==
- sizeof( VkDescriptorSetLayoutSupport ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>::value,
- "DescriptorSetLayoutSupport is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDescriptorSetLayoutSupport>
@@ -21756,70 +21316,65 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDescriptorSetVariableDescriptorCountAllocateInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- DescriptorSetVariableDescriptorCountAllocateInfo( uint32_t descriptorSetCount_ = {},
- const uint32_t * pDescriptorCounts_ = {} ) VULKAN_HPP_NOEXCEPT
- : descriptorSetCount( descriptorSetCount_ )
+ VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( uint32_t descriptorSetCount_ = {},
+ const uint32_t * pDescriptorCounts_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , descriptorSetCount( descriptorSetCount_ )
, pDescriptorCounts( pDescriptorCounts_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo(
- DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ DescriptorSetVariableDescriptorCountAllocateInfo( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs )
- VULKAN_HPP_NOEXCEPT
- : DescriptorSetVariableDescriptorCountAllocateInfo(
- *reinterpret_cast<DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs ) )
- {}
+ DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DescriptorSetVariableDescriptorCountAllocateInfo( *reinterpret_cast<DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DescriptorSetVariableDescriptorCountAllocateInfo(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ )
- : descriptorSetCount( static_cast<uint32_t>( descriptorCounts_.size() ) )
- , pDescriptorCounts( descriptorCounts_.data() )
- {}
+ DescriptorSetVariableDescriptorCountAllocateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), descriptorSetCount( static_cast<uint32_t>( descriptorCounts_.size() ) ), pDescriptorCounts( descriptorCounts_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DescriptorSetVariableDescriptorCountAllocateInfo &
- operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DescriptorSetVariableDescriptorCountAllocateInfo & operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DescriptorSetVariableDescriptorCountAllocateInfo &
- operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetVariableDescriptorCountAllocateInfo & operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo &
- setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorSetCount = descriptorSetCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo &
- setPDescriptorCounts( const uint32_t * pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPDescriptorCounts( const uint32_t * pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT
{
pDescriptorCounts = pDescriptorCounts_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorCounts(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetVariableDescriptorCountAllocateInfo &
+ setDescriptorCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT
{
descriptorSetCount = static_cast<uint32_t>( descriptorCounts_.size() );
pDescriptorCounts = descriptorCounts_.data();
@@ -21828,24 +21383,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDescriptorSetVariableDescriptorCountAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorSetVariableDescriptorCountAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfo *>( this );
}
- explicit operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- uint32_t const &,
- const uint32_t * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -21858,7 +21410,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetCount == rhs.descriptorSetCount ) &&
@@ -21873,20 +21425,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
+ const void * pNext = {};
uint32_t descriptorSetCount = {};
const uint32_t * pDescriptorCounts = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo ) ==
- sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo>::value,
- "DescriptorSetVariableDescriptorCountAllocateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo>
@@ -21900,47 +21443,45 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDescriptorSetVariableDescriptorCountLayoutSupport;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- DescriptorSetVariableDescriptorCountLayoutSupport( uint32_t maxVariableDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxVariableDescriptorCount( maxVariableDescriptorCount_ )
- {}
+ VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( uint32_t maxVariableDescriptorCount_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxVariableDescriptorCount( maxVariableDescriptorCount_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport(
- DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ DescriptorSetVariableDescriptorCountLayoutSupport( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs )
- VULKAN_HPP_NOEXCEPT
- : DescriptorSetVariableDescriptorCountLayoutSupport(
- *reinterpret_cast<DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs ) )
- {}
+ DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DescriptorSetVariableDescriptorCountLayoutSupport( *reinterpret_cast<DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DescriptorSetVariableDescriptorCountLayoutSupport &
operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DescriptorSetVariableDescriptorCountLayoutSupport &
- operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetVariableDescriptorCountLayoutSupport & operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs );
return *this;
}
- explicit operator VkDescriptorSetVariableDescriptorCountLayoutSupport const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorSetVariableDescriptorCountLayoutSupport const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupport *>( this );
}
- explicit operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupport *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -21957,11 +21498,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount );
# endif
}
@@ -21972,19 +21512,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
+ void * pNext = {};
uint32_t maxVariableDescriptorCount = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport ) ==
- sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport>::value,
- "DescriptorSetVariableDescriptorCountLayoutSupport is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport>
@@ -21998,31 +21529,30 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDescriptorUpdateTemplateEntry;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry(
- uint32_t dstBinding_ = {},
- uint32_t dstArrayElement_ = {},
- uint32_t descriptorCount_ = {},
- VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
- size_t offset_ = {},
- size_t stride_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( uint32_t dstBinding_ = {},
+ uint32_t dstArrayElement_ = {},
+ uint32_t descriptorCount_ = {},
+ VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
+ size_t offset_ = {},
+ size_t stride_ = {} ) VULKAN_HPP_NOEXCEPT
: dstBinding( dstBinding_ )
, dstArrayElement( dstArrayElement_ )
, descriptorCount( descriptorCount_ )
, descriptorType( descriptorType_ )
, offset( offset_ )
, stride( stride_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorUpdateTemplateEntry( *reinterpret_cast<DescriptorUpdateTemplateEntry const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DescriptorUpdateTemplateEntry &
- operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DescriptorUpdateTemplateEntry & operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -22037,22 +21567,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry &
- setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
{
dstArrayElement = dstArrayElement_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry &
- setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorCount = descriptorCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry &
- setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
{
descriptorType = descriptorType_;
return *this;
@@ -22071,26 +21598,21 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDescriptorUpdateTemplateEntry const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorUpdateTemplateEntry const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorUpdateTemplateEntry *>( this );
}
- explicit operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorUpdateTemplateEntry *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<uint32_t const &,
- uint32_t const &,
- uint32_t const &,
- VULKAN_HPP_NAMESPACE::DescriptorType const &,
- size_t const &,
- size_t const &>
+ std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DescriptorType const &, size_t const &, size_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -22103,12 +21625,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( dstBinding == rhs.dstBinding ) && ( dstArrayElement == rhs.dstArrayElement ) &&
- ( descriptorCount == rhs.descriptorCount ) && ( descriptorType == rhs.descriptorType ) &&
- ( offset == rhs.offset ) && ( stride == rhs.stride );
+ return ( dstBinding == rhs.dstBinding ) && ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ) &&
+ ( descriptorType == rhs.descriptorType ) && ( offset == rhs.offset ) && ( stride == rhs.stride );
# endif
}
@@ -22126,14 +21647,6 @@ namespace VULKAN_HPP_NAMESPACE
size_t offset = {};
size_t stride = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry ) ==
- sizeof( VkDescriptorUpdateTemplateEntry ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry>::value,
- "DescriptorUpdateTemplateEntry is not nothrow_move_constructible!" );
using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry;
struct DescriptorUpdateTemplateCreateInfo
@@ -22141,21 +21654,21 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDescriptorUpdateTemplateCreateInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDescriptorUpdateTemplateCreateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorUpdateTemplateCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo(
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {},
uint32_t descriptorUpdateEntryCount_ = {},
const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ = {},
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ =
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet,
- VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {},
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
- VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {},
- uint32_t set_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet,
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
+ VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {},
+ uint32_t set_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, descriptorUpdateEntryCount( descriptorUpdateEntryCount_ )
, pDescriptorUpdateEntries( pDescriptorUpdateEntries_ )
, templateType( templateType_ )
@@ -22163,27 +21676,28 @@ namespace VULKAN_HPP_NAMESPACE
, pipelineBindPoint( pipelineBindPoint_ )
, pipelineLayout( pipelineLayout_ )
, set( set_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DescriptorUpdateTemplateCreateInfo( *reinterpret_cast<DescriptorUpdateTemplateCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DescriptorUpdateTemplateCreateInfo(
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const &
- descriptorUpdateEntries_,
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ =
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet,
- VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {},
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
- VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {},
- uint32_t set_ = {} )
- : flags( flags_ )
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const & descriptorUpdateEntries_,
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet,
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
+ VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {},
+ uint32_t set_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, descriptorUpdateEntryCount( static_cast<uint32_t>( descriptorUpdateEntries_.size() ) )
, pDescriptorUpdateEntries( descriptorUpdateEntries_.data() )
, templateType( templateType_ )
@@ -22191,15 +21705,14 @@ namespace VULKAN_HPP_NAMESPACE
, pipelineBindPoint( pipelineBindPoint_ )
, pipelineLayout( pipelineLayout_ )
, set( set_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DescriptorUpdateTemplateCreateInfo &
- operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DescriptorUpdateTemplateCreateInfo & operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DescriptorUpdateTemplateCreateInfo &
- operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ DescriptorUpdateTemplateCreateInfo & operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const *>( &rhs );
return *this;
@@ -22213,21 +21726,20 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo &
- setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorUpdateEntryCount = descriptorUpdateEntryCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries(
- const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo &
+ setPDescriptorUpdateEntries( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
{
pDescriptorUpdateEntries = pDescriptorUpdateEntries_;
return *this;
@@ -22235,8 +21747,8 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntries(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const &
- descriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const & descriptorUpdateEntries_ )
+ VULKAN_HPP_NOEXCEPT
{
descriptorUpdateEntryCount = static_cast<uint32_t>( descriptorUpdateEntries_.size() );
pDescriptorUpdateEntries = descriptorUpdateEntries_.data();
@@ -22245,28 +21757,27 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo &
- setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT
+ setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT
{
templateType = templateType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo &
- setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT
+ setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT
{
descriptorSetLayout = descriptorSetLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo &
- setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+ setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBindPoint = pipelineBindPoint_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo &
- setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
{
pipelineLayout = pipelineLayout_;
return *this;
@@ -22279,17 +21790,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDescriptorUpdateTemplateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorUpdateTemplateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( this );
}
- explicit operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -22306,16 +21817,8 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- flags,
- descriptorUpdateEntryCount,
- pDescriptorUpdateEntries,
- templateType,
- descriptorSetLayout,
- pipelineBindPoint,
- pipelineLayout,
- set );
+ return std::tie(
+ sType, pNext, flags, descriptorUpdateEntryCount, pDescriptorUpdateEntries, templateType, descriptorSetLayout, pipelineBindPoint, pipelineLayout, set );
}
#endif
@@ -22324,14 +21827,13 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount ) &&
( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries ) && ( templateType == rhs.templateType ) &&
- ( descriptorSetLayout == rhs.descriptorSetLayout ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) &&
- ( pipelineLayout == rhs.pipelineLayout ) && ( set == rhs.set );
+ ( descriptorSetLayout == rhs.descriptorSetLayout ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipelineLayout == rhs.pipelineLayout ) &&
+ ( set == rhs.set );
# endif
}
@@ -22342,26 +21844,17 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {};
uint32_t descriptorUpdateEntryCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries = {};
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType =
- VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet;
- VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {};
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
- VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {};
- uint32_t set = {};
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet;
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {};
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+ VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {};
+ uint32_t set = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo ) ==
- sizeof( VkDescriptorUpdateTemplateCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo>::value,
- "DescriptorUpdateTemplateCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDescriptorUpdateTemplateCreateInfo>
@@ -22370,6 +21863,138 @@ namespace VULKAN_HPP_NAMESPACE
};
using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo;
+ struct DeviceAddressBindingCallbackDataEXT
+ {
+ using NativeType = VkDeviceAddressBindingCallbackDataEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceAddressBindingCallbackDataEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceAddressBindingCallbackDataEXT(
+ VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType_ = VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT::eBind,
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ , baseAddress( baseAddress_ )
+ , size( size_ )
+ , bindingType( bindingType_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR DeviceAddressBindingCallbackDataEXT( DeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceAddressBindingCallbackDataEXT( VkDeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DeviceAddressBindingCallbackDataEXT( *reinterpret_cast<DeviceAddressBindingCallbackDataEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ DeviceAddressBindingCallbackDataEXT & operator=( DeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceAddressBindingCallbackDataEXT & operator=( VkDeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setFlags( VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setBaseAddress( VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ baseAddress = baseAddress_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+ {
+ size = size_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT &
+ setBindingType( VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType_ ) VULKAN_HPP_NOEXCEPT
+ {
+ bindingType = bindingType_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkDeviceAddressBindingCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeviceAddressBindingCallbackDataEXT *>( this );
+ }
+
+ operator VkDeviceAddressBindingCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceAddressBindingCallbackDataEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT const &,
+ VULKAN_HPP_NAMESPACE::DeviceAddress const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, flags, baseAddress, size, bindingType );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( DeviceAddressBindingCallbackDataEXT const & ) const = default;
+#else
+ bool operator==( DeviceAddressBindingCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( baseAddress == rhs.baseAddress ) && ( size == rhs.size ) &&
+ ( bindingType == rhs.bindingType );
+# endif
+ }
+
+ bool operator!=( DeviceAddressBindingCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceAddressBindingCallbackDataEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags = {};
+ VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+ VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType = VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT::eBind;
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceAddressBindingCallbackDataEXT>
+ {
+ using Type = DeviceAddressBindingCallbackDataEXT;
+ };
+
struct DeviceBufferMemoryRequirements
{
using NativeType = VkDeviceBufferMemoryRequirements;
@@ -22378,21 +22003,22 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceBufferMemoryRequirements;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements(
- const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ = {} ) VULKAN_HPP_NOEXCEPT
- : pCreateInfo( pCreateInfo_ )
- {}
+ VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pCreateInfo( pCreateInfo_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DeviceBufferMemoryRequirements( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceBufferMemoryRequirements( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceBufferMemoryRequirements( *reinterpret_cast<DeviceBufferMemoryRequirements const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DeviceBufferMemoryRequirements &
- operator=( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DeviceBufferMemoryRequirements & operator=( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceBufferMemoryRequirements & operator=( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -22407,31 +22033,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements &
- setPCreateInfo( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPCreateInfo( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT
{
pCreateInfo = pCreateInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDeviceBufferMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceBufferMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( this );
}
- explicit operator VkDeviceBufferMemoryRequirements &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceBufferMemoryRequirements &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceBufferMemoryRequirements *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- const VULKAN_HPP_NAMESPACE::BufferCreateInfo * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const VULKAN_HPP_NAMESPACE::BufferCreateInfo * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -22444,7 +22067,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo );
@@ -22462,14 +22085,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements ) ==
- sizeof( VkDeviceBufferMemoryRequirements ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements>::value,
- "DeviceBufferMemoryRequirements is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDeviceBufferMemoryRequirements>
@@ -22489,28 +22104,35 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {},
uint32_t queueFamilyIndex_ = {},
uint32_t queueCount_ = {},
- const float * pQueuePriorities_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ const float * pQueuePriorities_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, queueFamilyIndex( queueFamilyIndex_ )
, queueCount( queueCount_ )
, pQueuePriorities( pQueuePriorities_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceQueueCreateInfo( *reinterpret_cast<DeviceQueueCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_,
uint32_t queueFamilyIndex_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_ )
- : flags( flags_ )
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, queueFamilyIndex( queueFamilyIndex_ )
, queueCount( static_cast<uint32_t>( queuePriorities_.size() ) )
, pQueuePriorities( queuePriorities_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -22529,15 +22151,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo &
- setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndex = queueFamilyIndex_;
return *this;
@@ -22549,16 +22169,14 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo &
- setPQueuePriorities( const float * pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPQueuePriorities( const float * pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT
{
pQueuePriorities = pQueuePriorities_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DeviceQueueCreateInfo & setQueuePriorities(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT
+ DeviceQueueCreateInfo & setQueuePriorities( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT
{
queueCount = static_cast<uint32_t>( queuePriorities_.size() );
pQueuePriorities = queuePriorities_.data();
@@ -22567,17 +22185,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDeviceQueueCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceQueueCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceQueueCreateInfo *>( this );
}
- explicit operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceQueueCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -22599,12 +22217,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( queueCount == rhs.queueCount ) &&
- ( pQueuePriorities == rhs.pQueuePriorities );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) &&
+ ( queueCount == rhs.queueCount ) && ( pQueuePriorities == rhs.pQueuePriorities );
# endif
}
@@ -22622,12 +22239,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t queueCount = {};
const float * pQueuePriorities = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo>::value,
- "DeviceQueueCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDeviceQueueCreateInfo>
@@ -22640,62 +22251,61 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT
: robustBufferAccess( robustBufferAccess_ )
, fullDrawIndexUint32( fullDrawIndexUint32_ )
, imageCubeArray( imageCubeArray_ )
@@ -22751,13 +22361,15 @@ namespace VULKAN_HPP_NAMESPACE
, sparseResidencyAliased( sparseResidencyAliased_ )
, variableMultisampleRate( variableMultisampleRate_ )
, inheritedQueries( inheritedQueries_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFeatures( *reinterpret_cast<PhysicalDeviceFeatures const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceFeatures & operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -22769,403 +22381,359 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
{
robustBufferAccess = robustBufferAccess_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT
{
fullDrawIndexUint32 = fullDrawIndexUint32_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT
{
imageCubeArray = imageCubeArray_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT
{
independentBlend = independentBlend_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT
{
geometryShader = geometryShader_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT
{
tessellationShader = tessellationShader_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT
{
sampleRateShading = sampleRateShading_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT
{
dualSrcBlend = dualSrcBlend_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT
{
logicOp = logicOp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT
{
multiDrawIndirect = multiDrawIndirect_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT
{
drawIndirectFirstInstance = drawIndirectFirstInstance_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT
{
depthClamp = depthClamp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
{
depthBiasClamp = depthBiasClamp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT
{
fillModeNonSolid = fillModeNonSolid_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT
{
depthBounds = depthBounds_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT
{
wideLines = wideLines_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT
{
largePoints = largePoints_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT
{
alphaToOne = alphaToOne_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT
{
multiViewport = multiViewport_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT
{
samplerAnisotropy = samplerAnisotropy_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT
{
textureCompressionETC2 = textureCompressionETC2_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT
+ setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT
{
textureCompressionASTC_LDR = textureCompressionASTC_LDR_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT
{
textureCompressionBC = textureCompressionBC_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT
{
occlusionQueryPrecise = occlusionQueryPrecise_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT
{
pipelineStatisticsQuery = pipelineStatisticsQuery_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics(
- VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
+ setVertexPipelineStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
{
vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
{
fragmentStoresAndAtomics = fragmentStoresAndAtomics_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize(
- VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
+ setShaderTessellationAndGeometryPointSize( VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT
{
shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT
{
shaderImageGatherExtended = shaderImageGatherExtended_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats(
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
+ setShaderStorageImageExtendedFormats( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageMultisample(
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
+ setShaderStorageImageMultisample( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageMultisample = shaderStorageImageMultisample_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat(
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
+ setShaderStorageImageReadWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat(
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
+ setShaderStorageImageWriteWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
+ setShaderUniformBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
+ setShaderSampledImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
+ setShaderStorageBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
+ setShaderStorageImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT
{
shaderClipDistance = shaderClipDistance_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT
{
shaderCullDistance = shaderCullDistance_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT
{
shaderFloat64 = shaderFloat64_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT
{
shaderInt64 = shaderInt64_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT
{
shaderInt16 = shaderInt16_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT
{
shaderResourceResidency = shaderResourceResidency_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT
{
shaderResourceMinLod = shaderResourceMinLod_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT
{
sparseBinding = sparseBinding_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidencyBuffer = sparseResidencyBuffer_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidencyImage2D = sparseResidencyImage2D_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidencyImage3D = sparseResidencyImage3D_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidency2Samples = sparseResidency2Samples_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidency4Samples = sparseResidency4Samples_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidency8Samples = sparseResidency8Samples_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidency16Samples = sparseResidency16Samples_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidencyAliased = sparseResidencyAliased_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT
{
variableMultisampleRate = variableMultisampleRate_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
- setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT
{
inheritedQueries = inheritedQueries_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFeatures *>( this );
}
- explicit operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -23290,48 +22858,36 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( robustBufferAccess == rhs.robustBufferAccess ) && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 ) &&
- ( imageCubeArray == rhs.imageCubeArray ) && ( independentBlend == rhs.independentBlend ) &&
- ( geometryShader == rhs.geometryShader ) && ( tessellationShader == rhs.tessellationShader ) &&
- ( sampleRateShading == rhs.sampleRateShading ) && ( dualSrcBlend == rhs.dualSrcBlend ) &&
- ( logicOp == rhs.logicOp ) && ( multiDrawIndirect == rhs.multiDrawIndirect ) &&
- ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance ) && ( depthClamp == rhs.depthClamp ) &&
- ( depthBiasClamp == rhs.depthBiasClamp ) && ( fillModeNonSolid == rhs.fillModeNonSolid ) &&
- ( depthBounds == rhs.depthBounds ) && ( wideLines == rhs.wideLines ) &&
- ( largePoints == rhs.largePoints ) && ( alphaToOne == rhs.alphaToOne ) &&
+ ( imageCubeArray == rhs.imageCubeArray ) && ( independentBlend == rhs.independentBlend ) && ( geometryShader == rhs.geometryShader ) &&
+ ( tessellationShader == rhs.tessellationShader ) && ( sampleRateShading == rhs.sampleRateShading ) && ( dualSrcBlend == rhs.dualSrcBlend ) &&
+ ( logicOp == rhs.logicOp ) && ( multiDrawIndirect == rhs.multiDrawIndirect ) && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance ) &&
+ ( depthClamp == rhs.depthClamp ) && ( depthBiasClamp == rhs.depthBiasClamp ) && ( fillModeNonSolid == rhs.fillModeNonSolid ) &&
+ ( depthBounds == rhs.depthBounds ) && ( wideLines == rhs.wideLines ) && ( largePoints == rhs.largePoints ) && ( alphaToOne == rhs.alphaToOne ) &&
( multiViewport == rhs.multiViewport ) && ( samplerAnisotropy == rhs.samplerAnisotropy ) &&
- ( textureCompressionETC2 == rhs.textureCompressionETC2 ) &&
- ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR ) &&
- ( textureCompressionBC == rhs.textureCompressionBC ) &&
- ( occlusionQueryPrecise == rhs.occlusionQueryPrecise ) &&
- ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery ) &&
- ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics ) &&
+ ( textureCompressionETC2 == rhs.textureCompressionETC2 ) && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR ) &&
+ ( textureCompressionBC == rhs.textureCompressionBC ) && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise ) &&
+ ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery ) && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics ) &&
( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics ) &&
( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize ) &&
- ( shaderImageGatherExtended == rhs.shaderImageGatherExtended ) &&
- ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats ) &&
+ ( shaderImageGatherExtended == rhs.shaderImageGatherExtended ) && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats ) &&
( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample ) &&
( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat ) &&
( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat ) &&
( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing ) &&
( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing ) &&
( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing ) &&
- ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing ) &&
- ( shaderClipDistance == rhs.shaderClipDistance ) && ( shaderCullDistance == rhs.shaderCullDistance ) &&
- ( shaderFloat64 == rhs.shaderFloat64 ) && ( shaderInt64 == rhs.shaderInt64 ) &&
+ ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing ) && ( shaderClipDistance == rhs.shaderClipDistance ) &&
+ ( shaderCullDistance == rhs.shaderCullDistance ) && ( shaderFloat64 == rhs.shaderFloat64 ) && ( shaderInt64 == rhs.shaderInt64 ) &&
( shaderInt16 == rhs.shaderInt16 ) && ( shaderResourceResidency == rhs.shaderResourceResidency ) &&
( shaderResourceMinLod == rhs.shaderResourceMinLod ) && ( sparseBinding == rhs.sparseBinding ) &&
- ( sparseResidencyBuffer == rhs.sparseResidencyBuffer ) &&
- ( sparseResidencyImage2D == rhs.sparseResidencyImage2D ) &&
- ( sparseResidencyImage3D == rhs.sparseResidencyImage3D ) &&
- ( sparseResidency2Samples == rhs.sparseResidency2Samples ) &&
- ( sparseResidency4Samples == rhs.sparseResidency4Samples ) &&
- ( sparseResidency8Samples == rhs.sparseResidency8Samples ) &&
- ( sparseResidency16Samples == rhs.sparseResidency16Samples ) &&
- ( sparseResidencyAliased == rhs.sparseResidencyAliased ) &&
+ ( sparseResidencyBuffer == rhs.sparseResidencyBuffer ) && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D ) &&
+ ( sparseResidencyImage3D == rhs.sparseResidencyImage3D ) && ( sparseResidency2Samples == rhs.sparseResidency2Samples ) &&
+ ( sparseResidency4Samples == rhs.sparseResidency4Samples ) && ( sparseResidency8Samples == rhs.sparseResidency8Samples ) &&
+ ( sparseResidency16Samples == rhs.sparseResidency16Samples ) && ( sparseResidencyAliased == rhs.sparseResidencyAliased ) &&
( variableMultisampleRate == rhs.variableMultisampleRate ) && ( inheritedQueries == rhs.inheritedQueries );
# endif
}
@@ -23399,13 +22955,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {};
VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures ) ==
- sizeof( VkPhysicalDeviceFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures>::value,
- "PhysicalDeviceFeatures is not nothrow_move_constructible!" );
struct DeviceCreateInfo
{
@@ -23415,16 +22964,17 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {},
- uint32_t queueCreateInfoCount_ = {},
- const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ = {},
- uint32_t enabledLayerCount_ = {},
- const char * const * ppEnabledLayerNames_ = {},
- uint32_t enabledExtensionCount_ = {},
- const char * const * ppEnabledExtensionNames_ = {},
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {} )
- VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {},
+ uint32_t queueCreateInfoCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ = {},
+ uint32_t enabledLayerCount_ = {},
+ const char * const * ppEnabledLayerNames_ = {},
+ uint32_t enabledExtensionCount_ = {},
+ const char * const * ppEnabledExtensionNames_ = {},
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, queueCreateInfoCount( queueCreateInfoCount_ )
, pQueueCreateInfos( pQueueCreateInfos_ )
, enabledLayerCount( enabledLayerCount_ )
@@ -23432,23 +22982,22 @@ namespace VULKAN_HPP_NAMESPACE
, enabledExtensionCount( enabledExtensionCount_ )
, ppEnabledExtensionNames( ppEnabledExtensionNames_ )
, pEnabledFeatures( pEnabledFeatures_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : DeviceCreateInfo( *reinterpret_cast<DeviceCreateInfo const *>( &rhs ) )
- {}
+ DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceCreateInfo( *reinterpret_cast<DeviceCreateInfo const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DeviceCreateInfo(
- VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const &
- queueCreateInfos_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ = {},
- const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {} )
- : flags( flags_ )
+ DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const & queueCreateInfos_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ = {},
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, queueCreateInfoCount( static_cast<uint32_t>( queueCreateInfos_.size() ) )
, pQueueCreateInfos( queueCreateInfos_.data() )
, enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) )
@@ -23456,7 +23005,8 @@ namespace VULKAN_HPP_NAMESPACE
, enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) )
, ppEnabledExtensionNames( pEnabledExtensionNames_.data() )
, pEnabledFeatures( pEnabledFeatures_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -23475,22 +23025,20 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo &
- setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT
{
queueCreateInfoCount = queueCreateInfoCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo &
- setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
+ setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
{
pQueueCreateInfos = pQueueCreateInfos_;
return *this;
@@ -23498,8 +23046,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceCreateInfo & setQueueCreateInfos(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const &
- queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const & queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
{
queueCreateInfoCount = static_cast<uint32_t>( queueCreateInfos_.size() );
pQueueCreateInfos = queueCreateInfos_.data();
@@ -23513,16 +23060,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo &
- setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
{
ppEnabledLayerNames = ppEnabledLayerNames_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DeviceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const &
- pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
+ DeviceCreateInfo &
+ setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
{
enabledLayerCount = static_cast<uint32_t>( pEnabledLayerNames_.size() );
ppEnabledLayerNames = pEnabledLayerNames_.data();
@@ -23530,24 +23076,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo &
- setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
{
enabledExtensionCount = enabledExtensionCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo &
- setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
{
ppEnabledExtensionNames = ppEnabledExtensionNames_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DeviceCreateInfo & setPEnabledExtensionNames(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ )
- VULKAN_HPP_NOEXCEPT
+ DeviceCreateInfo &
+ setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
{
enabledExtensionCount = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
ppEnabledExtensionNames = pEnabledExtensionNames_.data();
@@ -23555,25 +23098,24 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo &
- setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT
{
pEnabledFeatures = pEnabledFeatures_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceCreateInfo *>( this );
}
- explicit operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -23641,16 +23183,14 @@ namespace VULKAN_HPP_NAMESPACE
bool operator==( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( queueCreateInfoCount == rhs.queueCreateInfoCount ) && ( pQueueCreateInfos == rhs.pQueueCreateInfos ) &&
- ( enabledLayerCount == rhs.enabledLayerCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueCreateInfoCount == rhs.queueCreateInfoCount ) &&
+ ( pQueueCreateInfos == rhs.pQueueCreateInfos ) && ( enabledLayerCount == rhs.enabledLayerCount ) &&
[this, rhs]
{
bool equal = true;
for ( size_t i = 0; equal && ( i < enabledLayerCount ); ++i )
{
- equal = ( ( ppEnabledLayerNames[i] == rhs.ppEnabledLayerNames[i] ) ||
- ( strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ) == 0 ) );
+ equal = ( ( ppEnabledLayerNames[i] == rhs.ppEnabledLayerNames[i] ) || ( strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ) == 0 ) );
}
return equal;
}() && ( enabledExtensionCount == rhs.enabledExtensionCount ) &&
@@ -23683,12 +23223,6 @@ namespace VULKAN_HPP_NAMESPACE
const char * const * ppEnabledExtensionNames = {};
const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceCreateInfo>::value,
- "DeviceCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDeviceCreateInfo>
@@ -23701,33 +23235,31 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDeviceDeviceMemoryReportCreateInfoEXT;
static const bool allowDuplicate = true;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- DeviceDeviceMemoryReportCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {},
- PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ = {},
- void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {},
+ PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ = {},
+ void * pUserData_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, pfnUserCallback( pfnUserCallback_ )
, pUserData( pUserData_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceDeviceMemoryReportCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceDeviceMemoryReportCreateInfoEXT( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : DeviceDeviceMemoryReportCreateInfoEXT(
- *reinterpret_cast<DeviceDeviceMemoryReportCreateInfoEXT const *>( &rhs ) )
- {}
+ : DeviceDeviceMemoryReportCreateInfoEXT( *reinterpret_cast<DeviceDeviceMemoryReportCreateInfoEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DeviceDeviceMemoryReportCreateInfoEXT &
- operator=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DeviceDeviceMemoryReportCreateInfoEXT & operator=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DeviceDeviceMemoryReportCreateInfoEXT &
- operator=( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceDeviceMemoryReportCreateInfoEXT & operator=( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT const *>( &rhs );
return *this;
@@ -23740,39 +23272,37 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT &
- setFlags( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT &
- setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
+ setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
{
pfnUserCallback = pfnUserCallback_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT &
- setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
{
pUserData = pUserData_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDeviceDeviceMemoryReportCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceDeviceMemoryReportCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceDeviceMemoryReportCreateInfoEXT *>( this );
}
- explicit operator VkDeviceDeviceMemoryReportCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceDeviceMemoryReportCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceDeviceMemoryReportCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -23788,40 +23318,28 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif
-#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( DeviceDeviceMemoryReportCreateInfoEXT const & ) const = default;
-#else
bool operator==( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
-# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData );
-# endif
+#else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pfnUserCallback == rhs.pfnUserCallback ) &&
+ ( pUserData == rhs.pUserData );
+#endif
}
bool operator!=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
-#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {};
PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback = {};
void * pUserData = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT ) ==
- sizeof( VkDeviceDeviceMemoryReportCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT>::value,
- "DeviceDeviceMemoryReportCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDeviceDeviceMemoryReportCreateInfoEXT>
@@ -23834,27 +23352,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDeviceDiagnosticsConfigCreateInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDeviceDiagnosticsConfigCreateInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV(
- VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ )
- {}
+ VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceDiagnosticsConfigCreateInfoNV( *reinterpret_cast<DeviceDiagnosticsConfigCreateInfoNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DeviceDiagnosticsConfigCreateInfoNV &
- operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DeviceDiagnosticsConfigCreateInfoNV & operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DeviceDiagnosticsConfigCreateInfoNV &
- operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceDiagnosticsConfigCreateInfoNV & operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV const *>( &rhs );
return *this;
@@ -23867,31 +23385,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV &
- setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDeviceDiagnosticsConfigCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceDiagnosticsConfigCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceDiagnosticsConfigCreateInfoNV *>( this );
}
- explicit operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceDiagnosticsConfigCreateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -23904,7 +23419,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
@@ -23922,14 +23437,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV ) ==
- sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV>::value,
- "DeviceDiagnosticsConfigCreateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDeviceDiagnosticsConfigCreateInfoNV>
@@ -23945,17 +23452,16 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ =
- VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug ) VULKAN_HPP_NOEXCEPT
- : deviceEvent( deviceEvent_ )
- {}
+ VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , deviceEvent( deviceEvent_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : DeviceEventInfoEXT( *reinterpret_cast<DeviceEventInfoEXT const *>( &rhs ) )
- {}
+ DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceEventInfoEXT( *reinterpret_cast<DeviceEventInfoEXT const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -23973,31 +23479,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT &
- setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT
{
deviceEvent = deviceEvent_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDeviceEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceEventInfoEXT *>( this );
}
- explicit operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceEventInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -24010,7 +23513,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceEvent == rhs.deviceEvent );
@@ -24028,12 +23531,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT>::value,
- "DeviceEventInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDeviceEventInfoEXT>
@@ -24041,6 +23538,607 @@ namespace VULKAN_HPP_NAMESPACE
using Type = DeviceEventInfoEXT;
};
+ struct DeviceFaultAddressInfoEXT
+ {
+ using NativeType = VkDeviceFaultAddressInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR
+ DeviceFaultAddressInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType_ = VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT::eNone,
+ VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision_ = {} ) VULKAN_HPP_NOEXCEPT
+ : addressType( addressType_ )
+ , reportedAddress( reportedAddress_ )
+ , addressPrecision( addressPrecision_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR DeviceFaultAddressInfoEXT( DeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceFaultAddressInfoEXT( VkDeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DeviceFaultAddressInfoEXT( *reinterpret_cast<DeviceFaultAddressInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ DeviceFaultAddressInfoEXT & operator=( DeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceFaultAddressInfoEXT & operator=( VkDeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setAddressType( VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType_ ) VULKAN_HPP_NOEXCEPT
+ {
+ addressType = addressType_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setReportedAddress( VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ reportedAddress = reportedAddress_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setAddressPrecision( VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision_ ) VULKAN_HPP_NOEXCEPT
+ {
+ addressPrecision = addressPrecision_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkDeviceFaultAddressInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeviceFaultAddressInfoEXT *>( this );
+ }
+
+ operator VkDeviceFaultAddressInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceFaultAddressInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( addressType, reportedAddress, addressPrecision );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( DeviceFaultAddressInfoEXT const & ) const = default;
+#else
+ bool operator==( DeviceFaultAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( addressType == rhs.addressType ) && ( reportedAddress == rhs.reportedAddress ) && ( addressPrecision == rhs.addressPrecision );
+# endif
+ }
+
+ bool operator!=( DeviceFaultAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType = VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT::eNone;
+ VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision = {};
+ };
+
+ struct DeviceFaultCountsEXT
+ {
+ using NativeType = VkDeviceFaultCountsEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceFaultCountsEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR DeviceFaultCountsEXT( uint32_t addressInfoCount_ = {},
+ uint32_t vendorInfoCount_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , addressInfoCount( addressInfoCount_ )
+ , vendorInfoCount( vendorInfoCount_ )
+ , vendorBinarySize( vendorBinarySize_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR DeviceFaultCountsEXT( DeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceFaultCountsEXT( VkDeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DeviceFaultCountsEXT( *reinterpret_cast<DeviceFaultCountsEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ DeviceFaultCountsEXT & operator=( DeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceFaultCountsEXT & operator=( VkDeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setAddressInfoCount( uint32_t addressInfoCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ addressInfoCount = addressInfoCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setVendorInfoCount( uint32_t vendorInfoCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vendorInfoCount = vendorInfoCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setVendorBinarySize( VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vendorBinarySize = vendorBinarySize_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkDeviceFaultCountsEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeviceFaultCountsEXT *>( this );
+ }
+
+ operator VkDeviceFaultCountsEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceFaultCountsEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, addressInfoCount, vendorInfoCount, vendorBinarySize );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( DeviceFaultCountsEXT const & ) const = default;
+#else
+ bool operator==( DeviceFaultCountsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( addressInfoCount == rhs.addressInfoCount ) && ( vendorInfoCount == rhs.vendorInfoCount ) &&
+ ( vendorBinarySize == rhs.vendorBinarySize );
+# endif
+ }
+
+ bool operator!=( DeviceFaultCountsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceFaultCountsEXT;
+ void * pNext = {};
+ uint32_t addressInfoCount = {};
+ uint32_t vendorInfoCount = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceFaultCountsEXT>
+ {
+ using Type = DeviceFaultCountsEXT;
+ };
+
+ struct DeviceFaultVendorInfoEXT
+ {
+ using NativeType = VkDeviceFaultVendorInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT( std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
+ uint64_t vendorFaultCode_ = {},
+ uint64_t vendorFaultData_ = {} ) VULKAN_HPP_NOEXCEPT
+ : description( description_ )
+ , vendorFaultCode( vendorFaultCode_ )
+ , vendorFaultData( vendorFaultData_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT( DeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceFaultVendorInfoEXT( VkDeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DeviceFaultVendorInfoEXT( *reinterpret_cast<DeviceFaultVendorInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ DeviceFaultVendorInfoEXT & operator=( DeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceFaultVendorInfoEXT & operator=( VkDeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setDescription( std::array<char, VK_MAX_DESCRIPTION_SIZE> description_ ) VULKAN_HPP_NOEXCEPT
+ {
+ description = description_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setVendorFaultCode( uint64_t vendorFaultCode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vendorFaultCode = vendorFaultCode_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setVendorFaultData( uint64_t vendorFaultData_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vendorFaultData = vendorFaultData_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkDeviceFaultVendorInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeviceFaultVendorInfoEXT *>( this );
+ }
+
+ operator VkDeviceFaultVendorInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceFaultVendorInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, uint64_t const &, uint64_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( description, vendorFaultCode, vendorFaultData );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( DeviceFaultVendorInfoEXT const & ) const = default;
+#else
+ bool operator==( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( description == rhs.description ) && ( vendorFaultCode == rhs.vendorFaultCode ) && ( vendorFaultData == rhs.vendorFaultData );
+# endif
+ }
+
+ bool operator!=( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+ uint64_t vendorFaultCode = {};
+ uint64_t vendorFaultData = {};
+ };
+
+ struct DeviceFaultInfoEXT
+ {
+ using NativeType = VkDeviceFaultInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceFaultInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT( std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * pAddressInfos_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * pVendorInfos_ = {},
+ void * pVendorBinaryData_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , description( description_ )
+ , pAddressInfos( pAddressInfos_ )
+ , pVendorInfos( pVendorInfos_ )
+ , pVendorBinaryData( pVendorBinaryData_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT( DeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceFaultInfoEXT( VkDeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceFaultInfoEXT( *reinterpret_cast<DeviceFaultInfoEXT const *>( &rhs ) ) {}
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ DeviceFaultInfoEXT & operator=( DeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceFaultInfoEXT & operator=( VkDeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT & setDescription( std::array<char, VK_MAX_DESCRIPTION_SIZE> description_ ) VULKAN_HPP_NOEXCEPT
+ {
+ description = description_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT & setPAddressInfos( VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * pAddressInfos_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pAddressInfos = pAddressInfos_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT & setPVendorInfos( VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * pVendorInfos_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pVendorInfos = pVendorInfos_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT & setPVendorBinaryData( void * pVendorBinaryData_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pVendorBinaryData = pVendorBinaryData_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkDeviceFaultInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeviceFaultInfoEXT *>( this );
+ }
+
+ operator VkDeviceFaultInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceFaultInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &,
+ VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * const &,
+ VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * const &,
+ void * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, description, pAddressInfos, pVendorInfos, pVendorBinaryData );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( DeviceFaultInfoEXT const & ) const = default;
+#else
+ bool operator==( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( description == rhs.description ) && ( pAddressInfos == rhs.pAddressInfos ) &&
+ ( pVendorInfos == rhs.pVendorInfos ) && ( pVendorBinaryData == rhs.pVendorBinaryData );
+# endif
+ }
+
+ bool operator!=( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceFaultInfoEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+ VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * pAddressInfos = {};
+ VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * pVendorInfos = {};
+ void * pVendorBinaryData = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eDeviceFaultInfoEXT>
+ {
+ using Type = DeviceFaultInfoEXT;
+ };
+
+ struct DeviceFaultVendorBinaryHeaderVersionOneEXT
+ {
+ using NativeType = VkDeviceFaultVendorBinaryHeaderVersionOneEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT(
+ uint32_t headerSize_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion_ = VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT::eOne,
+ uint32_t vendorID_ = {},
+ uint32_t deviceID_ = {},
+ uint32_t driverVersion_ = {},
+ std::array<uint8_t, VK_UUID_SIZE> const & pipelineCacheUUID_ = {},
+ uint32_t applicationNameOffset_ = {},
+ uint32_t applicationVersion_ = {},
+ uint32_t engineNameOffset_ = {} ) VULKAN_HPP_NOEXCEPT
+ : headerSize( headerSize_ )
+ , headerVersion( headerVersion_ )
+ , vendorID( vendorID_ )
+ , deviceID( deviceID_ )
+ , driverVersion( driverVersion_ )
+ , pipelineCacheUUID( pipelineCacheUUID_ )
+ , applicationNameOffset( applicationNameOffset_ )
+ , applicationVersion( applicationVersion_ )
+ , engineNameOffset( engineNameOffset_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceFaultVendorBinaryHeaderVersionOneEXT( VkDeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DeviceFaultVendorBinaryHeaderVersionOneEXT( *reinterpret_cast<DeviceFaultVendorBinaryHeaderVersionOneEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ DeviceFaultVendorBinaryHeaderVersionOneEXT & operator=( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DeviceFaultVendorBinaryHeaderVersionOneEXT & operator=( VkDeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setHeaderSize( uint32_t headerSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ headerSize = headerSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT &
+ setHeaderVersion( VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion_ ) VULKAN_HPP_NOEXCEPT
+ {
+ headerVersion = headerVersion_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setVendorID( uint32_t vendorID_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vendorID = vendorID_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setDeviceID( uint32_t deviceID_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceID = deviceID_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setDriverVersion( uint32_t driverVersion_ ) VULKAN_HPP_NOEXCEPT
+ {
+ driverVersion = driverVersion_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT &
+ setPipelineCacheUUID( std::array<uint8_t, VK_UUID_SIZE> pipelineCacheUUID_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pipelineCacheUUID = pipelineCacheUUID_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApplicationNameOffset( uint32_t applicationNameOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ applicationNameOffset = applicationNameOffset_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT
+ {
+ applicationVersion = applicationVersion_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setEngineNameOffset( uint32_t engineNameOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ engineNameOffset = engineNameOffset_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeviceFaultVendorBinaryHeaderVersionOneEXT *>( this );
+ }
+
+ operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceFaultVendorBinaryHeaderVersionOneEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<uint32_t const &,
+ VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie(
+ headerSize, headerVersion, vendorID, deviceID, driverVersion, pipelineCacheUUID, applicationNameOffset, applicationVersion, engineNameOffset );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( DeviceFaultVendorBinaryHeaderVersionOneEXT const & ) const = default;
+#else
+ bool operator==( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( headerSize == rhs.headerSize ) && ( headerVersion == rhs.headerVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) &&
+ ( driverVersion == rhs.driverVersion ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) &&
+ ( applicationNameOffset == rhs.applicationNameOffset ) && ( applicationVersion == rhs.applicationVersion ) &&
+ ( engineNameOffset == rhs.engineNameOffset );
+# endif
+ }
+
+ bool operator!=( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ uint32_t headerSize = {};
+ VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion = VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT::eOne;
+ uint32_t vendorID = {};
+ uint32_t deviceID = {};
+ uint32_t driverVersion = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineCacheUUID = {};
+ uint32_t applicationNameOffset = {};
+ uint32_t applicationVersion = {};
+ uint32_t engineNameOffset = {};
+ };
+
struct DeviceGroupBindSparseInfo
{
using NativeType = VkDeviceGroupBindSparseInfo;
@@ -24049,18 +24147,20 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupBindSparseInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = {},
- uint32_t memoryDeviceIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : resourceDeviceIndex( resourceDeviceIndex_ )
+ VULKAN_HPP_CONSTEXPR
+ DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = {}, uint32_t memoryDeviceIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , resourceDeviceIndex( resourceDeviceIndex_ )
, memoryDeviceIndex( memoryDeviceIndex_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceGroupBindSparseInfo( *reinterpret_cast<DeviceGroupBindSparseInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DeviceGroupBindSparseInfo & operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -24078,32 +24178,30 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo &
- setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
{
resourceDeviceIndex = resourceDeviceIndex_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo &
- setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
{
memoryDeviceIndex = memoryDeviceIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDeviceGroupBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceGroupBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupBindSparseInfo *>( this );
}
- explicit operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupBindSparseInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -24120,7 +24218,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( resourceDeviceIndex == rhs.resourceDeviceIndex ) &&
@@ -24140,13 +24238,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t resourceDeviceIndex = {};
uint32_t memoryDeviceIndex = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo ) ==
- sizeof( VkDeviceGroupBindSparseInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo>::value,
- "DeviceGroupBindSparseInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDeviceGroupBindSparseInfo>
@@ -24160,24 +24251,24 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDeviceGroupCommandBufferBeginInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDeviceGroupCommandBufferBeginInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupCommandBufferBeginInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT
- : deviceMask( deviceMask_ )
- {}
+ VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , deviceMask( deviceMask_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceGroupCommandBufferBeginInfo( *reinterpret_cast<DeviceGroupCommandBufferBeginInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DeviceGroupCommandBufferBeginInfo &
- operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -24192,25 +24283,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo &
- setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
{
deviceMask = deviceMask_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDeviceGroupCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceGroupCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo *>( this );
}
- explicit operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -24227,7 +24317,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask );
@@ -24245,14 +24335,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
uint32_t deviceMask = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo ) ==
- sizeof( VkDeviceGroupCommandBufferBeginInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo>::value,
- "DeviceGroupCommandBufferBeginInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDeviceGroupCommandBufferBeginInfo>
@@ -24269,27 +24351,28 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupDeviceCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo(
- uint32_t physicalDeviceCount_ = {},
- const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ = {} ) VULKAN_HPP_NOEXCEPT
- : physicalDeviceCount( physicalDeviceCount_ )
+ VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = {},
+ const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , physicalDeviceCount( physicalDeviceCount_ )
, pPhysicalDevices( pPhysicalDevices_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceGroupDeviceCreateInfo( *reinterpret_cast<DeviceGroupDeviceCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DeviceGroupDeviceCreateInfo(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PhysicalDevice> const &
- physicalDevices_ )
- : physicalDeviceCount( static_cast<uint32_t>( physicalDevices_.size() ) )
- , pPhysicalDevices( physicalDevices_.data() )
- {}
+ DeviceGroupDeviceCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PhysicalDevice> const & physicalDevices_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), physicalDeviceCount( static_cast<uint32_t>( physicalDevices_.size() ) ), pPhysicalDevices( physicalDevices_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -24308,15 +24391,14 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo &
- setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT
{
physicalDeviceCount = physicalDeviceCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo &
- setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT
+ setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT
{
pPhysicalDevices = pPhysicalDevices_;
return *this;
@@ -24324,8 +24406,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceGroupDeviceCreateInfo & setPhysicalDevices(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PhysicalDevice> const &
- physicalDevices_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PhysicalDevice> const & physicalDevices_ ) VULKAN_HPP_NOEXCEPT
{
physicalDeviceCount = static_cast<uint32_t>( physicalDevices_.size() );
pPhysicalDevices = physicalDevices_.data();
@@ -24334,24 +24415,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDeviceGroupDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceGroupDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupDeviceCreateInfo *>( this );
}
- explicit operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupDeviceCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- uint32_t const &,
- const VULKAN_HPP_NAMESPACE::PhysicalDevice * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PhysicalDevice * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -24364,7 +24442,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) &&
@@ -24384,14 +24462,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t physicalDeviceCount = {};
const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo ) ==
- sizeof( VkDeviceGroupDeviceCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo>::value,
- "DeviceGroupDeviceCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDeviceGroupDeviceCreateInfo>
@@ -24405,27 +24475,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDeviceGroupPresentCapabilitiesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDeviceGroupPresentCapabilitiesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR(
- std::array<uint32_t, VK_MAX_DEVICE_GROUP_SIZE> const & presentMask_ = {},
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT
- : presentMask( presentMask_ )
+ VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( std::array<uint32_t, VK_MAX_DEVICE_GROUP_SIZE> const & presentMask_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , presentMask( presentMask_ )
, modes( modes_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceGroupPresentCapabilitiesKHR( *reinterpret_cast<DeviceGroupPresentCapabilitiesKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DeviceGroupPresentCapabilitiesKHR &
- operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DeviceGroupPresentCapabilitiesKHR & operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceGroupPresentCapabilitiesKHR & operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -24433,17 +24503,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkDeviceGroupPresentCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceGroupPresentCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupPresentCapabilitiesKHR *>( this );
}
- explicit operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -24463,11 +24533,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMask == rhs.presentMask ) &&
- ( modes == rhs.modes );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMask == rhs.presentMask ) && ( modes == rhs.modes );
# endif
}
@@ -24478,19 +24547,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, VK_MAX_DEVICE_GROUP_SIZE> presentMask = {};
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR ) ==
- sizeof( VkDeviceGroupPresentCapabilitiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::value,
- "DeviceGroupPresentCapabilitiesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDeviceGroupPresentCapabilitiesKHR>
@@ -24506,31 +24567,32 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = {},
- const uint32_t * pDeviceMasks_ = {},
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ =
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal ) VULKAN_HPP_NOEXCEPT
- : swapchainCount( swapchainCount_ )
+ VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR(
+ uint32_t swapchainCount_ = {},
+ const uint32_t * pDeviceMasks_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , swapchainCount( swapchainCount_ )
, pDeviceMasks( pDeviceMasks_ )
, mode( mode_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceGroupPresentInfoKHR( *reinterpret_cast<DeviceGroupPresentInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DeviceGroupPresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_,
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ =
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal )
- : swapchainCount( static_cast<uint32_t>( deviceMasks_.size() ) )
- , pDeviceMasks( deviceMasks_.data() )
- , mode( mode_ )
- {}
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( deviceMasks_.size() ) ), pDeviceMasks( deviceMasks_.data() ), mode( mode_ )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -24549,23 +24611,20 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR &
- setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = swapchainCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR &
- setPDeviceMasks( const uint32_t * pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t * pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
{
pDeviceMasks = pDeviceMasks_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DeviceGroupPresentInfoKHR & setDeviceMasks(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupPresentInfoKHR & setDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = static_cast<uint32_t>( deviceMasks_.size() );
pDeviceMasks = deviceMasks_.data();
@@ -24573,25 +24632,24 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR &
- setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT
{
mode = mode_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDeviceGroupPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceGroupPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupPresentInfoKHR *>( this );
}
- explicit operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupPresentInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -24612,11 +24670,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) &&
- ( pDeviceMasks == rhs.pDeviceMasks ) && ( mode == rhs.mode );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pDeviceMasks == rhs.pDeviceMasks ) &&
+ ( mode == rhs.mode );
# endif
}
@@ -24631,16 +24689,8 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
uint32_t swapchainCount = {};
const uint32_t * pDeviceMasks = {};
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode =
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal;
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR ) ==
- sizeof( VkDeviceGroupPresentInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR>::value,
- "DeviceGroupPresentInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDeviceGroupPresentInfoKHR>
@@ -24656,35 +24706,38 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupRenderPassBeginInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = {},
- uint32_t deviceRenderAreaCount_ = {},
- const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ = {} )
- VULKAN_HPP_NOEXCEPT
- : deviceMask( deviceMask_ )
+ VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = {},
+ uint32_t deviceRenderAreaCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , deviceMask( deviceMask_ )
, deviceRenderAreaCount( deviceRenderAreaCount_ )
, pDeviceRenderAreas( pDeviceRenderAreas_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceGroupRenderPassBeginInfo( *reinterpret_cast<DeviceGroupRenderPassBeginInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DeviceGroupRenderPassBeginInfo(
- uint32_t deviceMask_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & deviceRenderAreas_ )
- : deviceMask( deviceMask_ )
+ DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & deviceRenderAreas_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , deviceMask( deviceMask_ )
, deviceRenderAreaCount( static_cast<uint32_t>( deviceRenderAreas_.size() ) )
, pDeviceRenderAreas( deviceRenderAreas_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DeviceGroupRenderPassBeginInfo &
- operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DeviceGroupRenderPassBeginInfo & operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceGroupRenderPassBeginInfo & operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -24705,24 +24758,22 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo &
- setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT
{
deviceRenderAreaCount = deviceRenderAreaCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo &
- setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
+ setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
{
pDeviceRenderAreas = pDeviceRenderAreas_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DeviceGroupRenderPassBeginInfo & setDeviceRenderAreas(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & deviceRenderAreas_ )
- VULKAN_HPP_NOEXCEPT
+ DeviceGroupRenderPassBeginInfo &
+ setDeviceRenderAreas( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & deviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
{
deviceRenderAreaCount = static_cast<uint32_t>( deviceRenderAreas_.size() );
pDeviceRenderAreas = deviceRenderAreas_.data();
@@ -24731,25 +24782,22 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDeviceGroupRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceGroupRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupRenderPassBeginInfo *>( this );
}
- explicit operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupRenderPassBeginInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- uint32_t const &,
- uint32_t const &,
- const VULKAN_HPP_NAMESPACE::Rect2D * const &>
+ std::
+ tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -24762,11 +24810,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ) &&
- ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ) && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) &&
+ ( pDeviceRenderAreas == rhs.pDeviceRenderAreas );
# endif
}
@@ -24783,14 +24831,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t deviceRenderAreaCount = {};
const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo ) ==
- sizeof( VkDeviceGroupRenderPassBeginInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo>::value,
- "DeviceGroupRenderPassBeginInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDeviceGroupRenderPassBeginInfo>
@@ -24807,39 +24847,44 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSubmitInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = {},
- const uint32_t * pWaitSemaphoreDeviceIndices_ = {},
- uint32_t commandBufferCount_ = {},
- const uint32_t * pCommandBufferDeviceMasks_ = {},
- uint32_t signalSemaphoreCount_ = {},
- const uint32_t * pSignalSemaphoreDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT
- : waitSemaphoreCount( waitSemaphoreCount_ )
+ VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = {},
+ const uint32_t * pWaitSemaphoreDeviceIndices_ = {},
+ uint32_t commandBufferCount_ = {},
+ const uint32_t * pCommandBufferDeviceMasks_ = {},
+ uint32_t signalSemaphoreCount_ = {},
+ const uint32_t * pSignalSemaphoreDeviceIndices_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , waitSemaphoreCount( waitSemaphoreCount_ )
, pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ )
, commandBufferCount( commandBufferCount_ )
, pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ )
, signalSemaphoreCount( signalSemaphoreCount_ )
, pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceGroupSubmitInfo( *reinterpret_cast<DeviceGroupSubmitInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DeviceGroupSubmitInfo(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ = {} )
- : waitSemaphoreCount( static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() ) )
+ DeviceGroupSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , waitSemaphoreCount( static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() ) )
, pWaitSemaphoreDeviceIndices( waitSemaphoreDeviceIndices_.data() )
, commandBufferCount( static_cast<uint32_t>( commandBufferDeviceMasks_.size() ) )
, pCommandBufferDeviceMasks( commandBufferDeviceMasks_.data() )
, signalSemaphoreCount( static_cast<uint32_t>( signalSemaphoreDeviceIndices_.size() ) )
, pSignalSemaphoreDeviceIndices( signalSemaphoreDeviceIndices_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -24858,24 +24903,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo &
- setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreCount = waitSemaphoreCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo &
- setPWaitSemaphoreDeviceIndices( const uint32_t * pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t * pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DeviceGroupSubmitInfo & setWaitSemaphoreDeviceIndices(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_ )
- VULKAN_HPP_NOEXCEPT
+ DeviceGroupSubmitInfo &
+ setWaitSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreCount = static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() );
pWaitSemaphoreDeviceIndices = waitSemaphoreDeviceIndices_.data();
@@ -24883,24 +24925,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo &
- setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
{
commandBufferCount = commandBufferCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo &
- setPCommandBufferDeviceMasks( const uint32_t * pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t * pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
{
pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DeviceGroupSubmitInfo & setCommandBufferDeviceMasks(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_ )
- VULKAN_HPP_NOEXCEPT
+ DeviceGroupSubmitInfo &
+ setCommandBufferDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
{
commandBufferCount = static_cast<uint32_t>( commandBufferDeviceMasks_.size() );
pCommandBufferDeviceMasks = commandBufferDeviceMasks_.data();
@@ -24908,24 +24947,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo &
- setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreCount = signalSemaphoreCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo &
- setPSignalSemaphoreDeviceIndices( const uint32_t * pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t * pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- DeviceGroupSubmitInfo & setSignalSemaphoreDeviceIndices(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ )
- VULKAN_HPP_NOEXCEPT
+ DeviceGroupSubmitInfo &
+ setSignalSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreCount = static_cast<uint32_t>( signalSemaphoreDeviceIndices_.size() );
pSignalSemaphoreDeviceIndices = signalSemaphoreDeviceIndices_.data();
@@ -24934,17 +24970,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDeviceGroupSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceGroupSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupSubmitInfo *>( this );
}
- explicit operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupSubmitInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -24975,14 +25011,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) &&
- ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) &&
- ( commandBufferCount == rhs.commandBufferCount ) &&
- ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) &&
- ( signalSemaphoreCount == rhs.signalSemaphoreCount ) &&
+ ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) && ( commandBufferCount == rhs.commandBufferCount ) &&
+ ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) &&
( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices );
# endif
}
@@ -25003,12 +25037,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t signalSemaphoreCount = {};
const uint32_t * pSignalSemaphoreDeviceIndices = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo>::value,
- "DeviceGroupSubmitInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDeviceGroupSubmitInfo>
@@ -25022,24 +25050,25 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDeviceGroupSwapchainCreateInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDeviceGroupSwapchainCreateInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR(
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT : modes( modes_ )
- {}
+ VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , modes( modes_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceGroupSwapchainCreateInfoKHR( *reinterpret_cast<DeviceGroupSwapchainCreateInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DeviceGroupSwapchainCreateInfoKHR &
- operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DeviceGroupSwapchainCreateInfoKHR & operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceGroupSwapchainCreateInfoKHR & operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -25054,31 +25083,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR &
- setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT
{
modes = modes_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDeviceGroupSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceGroupSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceGroupSwapchainCreateInfoKHR *>( this );
}
- explicit operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -25091,7 +25117,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( modes == rhs.modes );
@@ -25109,14 +25135,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR ) ==
- sizeof( VkDeviceGroupSwapchainCreateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR>::value,
- "DeviceGroupSwapchainCreateInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDeviceGroupSwapchainCreateInfoKHR>
@@ -25132,22 +25150,22 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ImageCreateInfo(
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D,
- VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::Extent3D extent_ = {},
- uint32_t mipLevels_ = {},
- uint32_t arrayLayers_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
- VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
- uint32_t queueFamilyIndexCount_ = {},
- const uint32_t * pQueueFamilyIndices_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined )
- VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D,
+ VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::Extent3D extent_ = {},
+ uint32_t mipLevels_ = {},
+ uint32_t arrayLayers_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
+ VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
+ uint32_t queueFamilyIndexCount_ = {},
+ const uint32_t * pQueueFamilyIndices_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, imageType( imageType_ )
, format( format_ )
, extent( extent_ )
@@ -25160,13 +25178,12 @@ namespace VULKAN_HPP_NAMESPACE
, queueFamilyIndexCount( queueFamilyIndexCount_ )
, pQueueFamilyIndices( pQueueFamilyIndices_ )
, initialLayout( initialLayout_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : ImageCreateInfo( *reinterpret_cast<ImageCreateInfo const *>( &rhs ) )
- {}
+ ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCreateInfo( *reinterpret_cast<ImageCreateInfo const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_,
@@ -25180,8 +25197,10 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_,
VULKAN_HPP_NAMESPACE::SharingMode sharingMode_,
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_,
- VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined )
- : flags( flags_ )
+ VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, imageType( imageType_ )
, format( format_ )
, extent( extent_ )
@@ -25194,7 +25213,8 @@ namespace VULKAN_HPP_NAMESPACE
, queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) )
, pQueueFamilyIndices( queueFamilyIndices_.data() )
, initialLayout( initialLayout_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -25213,15 +25233,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &
- setImageType( VULKAN_HPP_NAMESPACE::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setImageType( VULKAN_HPP_NAMESPACE::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT
{
imageType = imageType_;
return *this;
@@ -25233,8 +25251,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &
- setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
@@ -25252,8 +25269,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &
- setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
{
samples = samples_;
return *this;
@@ -25265,37 +25281,32 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &
- setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &
- setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
{
sharingMode = sharingMode_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &
- setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndexCount = queueFamilyIndexCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &
- setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
{
pQueueFamilyIndices = pQueueFamilyIndices_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- ImageCreateInfo & setQueueFamilyIndices(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+ ImageCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
pQueueFamilyIndices = queueFamilyIndices_.data();
@@ -25303,25 +25314,24 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &
- setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
{
initialLayout = initialLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageCreateInfo *>( this );
}
- explicit operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -25366,15 +25376,14 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( imageType == rhs.imageType ) && ( format == rhs.format ) && ( extent == rhs.extent ) &&
- ( mipLevels == rhs.mipLevels ) && ( arrayLayers == rhs.arrayLayers ) && ( samples == rhs.samples ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( imageType == rhs.imageType ) && ( format == rhs.format ) &&
+ ( extent == rhs.extent ) && ( mipLevels == rhs.mipLevels ) && ( arrayLayers == rhs.arrayLayers ) && ( samples == rhs.samples ) &&
( tiling == rhs.tiling ) && ( usage == rhs.usage ) && ( sharingMode == rhs.sharingMode ) &&
- ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) &&
- ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && ( initialLayout == rhs.initialLayout );
+ ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) &&
+ ( initialLayout == rhs.initialLayout );
# endif
}
@@ -25401,12 +25410,6 @@ namespace VULKAN_HPP_NAMESPACE
const uint32_t * pQueueFamilyIndices = {};
VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCreateInfo ) == sizeof( VkImageCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCreateInfo>::value,
- "ImageCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageCreateInfo>
@@ -25424,22 +25427,23 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR
DeviceImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ = {},
- VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ =
- VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT
- : pCreateInfo( pCreateInfo_ )
+ VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pCreateInfo( pCreateInfo_ )
, planeAspect( planeAspect_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DeviceImageMemoryRequirements( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DeviceImageMemoryRequirements( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceImageMemoryRequirements( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceImageMemoryRequirements( *reinterpret_cast<DeviceImageMemoryRequirements const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DeviceImageMemoryRequirements &
- operator=( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DeviceImageMemoryRequirements & operator=( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceImageMemoryRequirements & operator=( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -25454,32 +25458,30 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements &
- setPCreateInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPCreateInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT
{
pCreateInfo = pCreateInfo_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements &
- setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
{
planeAspect = planeAspect_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDeviceImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceImageMemoryRequirements *>( this );
}
- explicit operator VkDeviceImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceImageMemoryRequirements *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -25499,11 +25501,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ) &&
- ( planeAspect == rhs.planeAspect );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ) && ( planeAspect == rhs.planeAspect );
# endif
}
@@ -25519,14 +25520,6 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo = {};
VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements ) ==
- sizeof( VkDeviceImageMemoryRequirements ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements>::value,
- "DeviceImageMemoryRequirements is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDeviceImageMemoryRequirements>
@@ -25540,28 +25533,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDeviceMemoryOpaqueCaptureAddressInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- DeviceMemoryOpaqueCaptureAddressInfo( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT
- : memory( memory_ )
- {}
+ VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , memory( memory_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemoryOpaqueCaptureAddressInfo const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceMemoryOpaqueCaptureAddressInfo( *reinterpret_cast<DeviceMemoryOpaqueCaptureAddressInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DeviceMemoryOpaqueCaptureAddressInfo &
- operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DeviceMemoryOpaqueCaptureAddressInfo & operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DeviceMemoryOpaqueCaptureAddressInfo &
- operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceMemoryOpaqueCaptureAddressInfo & operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const *>( &rhs );
return *this;
@@ -25574,31 +25566,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo &
- setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDeviceMemoryOpaqueCaptureAddressInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceMemoryOpaqueCaptureAddressInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( this );
}
- explicit operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceMemoryOpaqueCaptureAddressInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::DeviceMemory const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -25611,7 +25600,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory );
@@ -25629,14 +25618,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo ) ==
- sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo>::value,
- "DeviceMemoryOpaqueCaptureAddressInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDeviceMemoryOpaqueCaptureAddressInfo>
@@ -25650,68 +25631,63 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDeviceMemoryOverallocationCreateInfoAMD;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD(
- VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ =
- VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault ) VULKAN_HPP_NOEXCEPT
- : overallocationBehavior( overallocationBehavior_ )
- {}
+ VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , overallocationBehavior( overallocationBehavior_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- : DeviceMemoryOverallocationCreateInfoAMD(
- *reinterpret_cast<DeviceMemoryOverallocationCreateInfoAMD const *>( &rhs ) )
- {}
+ : DeviceMemoryOverallocationCreateInfoAMD( *reinterpret_cast<DeviceMemoryOverallocationCreateInfoAMD const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DeviceMemoryOverallocationCreateInfoAMD &
- operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DeviceMemoryOverallocationCreateInfoAMD & operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DeviceMemoryOverallocationCreateInfoAMD &
- operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceMemoryOverallocationCreateInfoAMD & operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior(
- VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD &
+ setOverallocationBehavior( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT
{
overallocationBehavior = overallocationBehavior_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDeviceMemoryOverallocationCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceMemoryOverallocationCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceMemoryOverallocationCreateInfoAMD *>( this );
}
- explicit operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -25724,11 +25700,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( overallocationBehavior == rhs.overallocationBehavior );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( overallocationBehavior == rhs.overallocationBehavior );
# endif
}
@@ -25739,20 +25714,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior =
- VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD ) ==
- sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD>::value,
- "DeviceMemoryOverallocationCreateInfoAMD is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDeviceMemoryOverallocationCreateInfoAMD>
@@ -25765,38 +25730,38 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDeviceMemoryReportCallbackDataEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDeviceMemoryReportCallbackDataEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryReportCallbackDataEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT(
- VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {},
- VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type_ =
- VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate,
- uint64_t memoryObjectId_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
- VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown,
- uint64_t objectHandle_ = {},
- uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type_ = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate,
+ uint64_t memoryObjectId_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
+ VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown,
+ uint64_t objectHandle_ = {},
+ uint32_t heapIndex_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, type( type_ )
, memoryObjectId( memoryObjectId_ )
, size( size_ )
, objectType( objectType_ )
, objectHandle( objectHandle_ )
, heapIndex( heapIndex_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceMemoryReportCallbackDataEXT( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DeviceMemoryReportCallbackDataEXT( *reinterpret_cast<DeviceMemoryReportCallbackDataEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DeviceMemoryReportCallbackDataEXT &
- operator=( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DeviceMemoryReportCallbackDataEXT & operator=( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceMemoryReportCallbackDataEXT & operator=( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -25804,17 +25769,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkDeviceMemoryReportCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceMemoryReportCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceMemoryReportCallbackDataEXT *>( this );
}
- explicit operator VkDeviceMemoryReportCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceMemoryReportCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceMemoryReportCallbackDataEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -25839,12 +25804,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( type == rhs.type ) &&
- ( memoryObjectId == rhs.memoryObjectId ) && ( size == rhs.size ) && ( objectType == rhs.objectType ) &&
- ( objectHandle == rhs.objectHandle ) && ( heapIndex == rhs.heapIndex );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( type == rhs.type ) && ( memoryObjectId == rhs.memoryObjectId ) &&
+ ( size == rhs.size ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) && ( heapIndex == rhs.heapIndex );
# endif
}
@@ -25855,25 +25819,16 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryReportCallbackDataEXT;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {};
- VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type =
- VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate;
- uint64_t memoryObjectId = {};
- VULKAN_HPP_NAMESPACE::DeviceSize size = {};
- VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
- uint64_t objectHandle = {};
- uint32_t heapIndex = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryReportCallbackDataEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate;
+ uint64_t memoryObjectId = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+ VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
+ uint64_t objectHandle = {};
+ uint32_t heapIndex = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT ) ==
- sizeof( VkDeviceMemoryReportCallbackDataEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT>::value,
- "DeviceMemoryReportCallbackDataEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDeviceMemoryReportCallbackDataEXT>
@@ -25889,16 +25844,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePrivateDataCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( uint32_t privateDataSlotRequestCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : privateDataSlotRequestCount( privateDataSlotRequestCount_ )
- {}
+ VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( uint32_t privateDataSlotRequestCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , privateDataSlotRequestCount( privateDataSlotRequestCount_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DevicePrivateDataCreateInfo( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DevicePrivateDataCreateInfo( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: DevicePrivateDataCreateInfo( *reinterpret_cast<DevicePrivateDataCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DevicePrivateDataCreateInfo & operator=( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -25916,25 +25873,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo &
- setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) VULKAN_HPP_NOEXCEPT
{
privateDataSlotRequestCount = privateDataSlotRequestCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDevicePrivateDataCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDevicePrivateDataCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDevicePrivateDataCreateInfo *>( this );
}
- explicit operator VkDevicePrivateDataCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkDevicePrivateDataCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDevicePrivateDataCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -25951,11 +25907,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount );
# endif
}
@@ -25970,14 +25925,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
uint32_t privateDataSlotRequestCount = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo ) ==
- sizeof( VkDevicePrivateDataCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo>::value,
- "DevicePrivateDataCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDevicePrivateDataCreateInfo>
@@ -25991,30 +25938,28 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDeviceQueueGlobalPriorityCreateInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR
- DeviceQueueGlobalPriorityCreateInfoKHR( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority_ =
- VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow ) VULKAN_HPP_NOEXCEPT
- : globalPriority( globalPriority_ )
- {}
+ DeviceQueueGlobalPriorityCreateInfoKHR( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , globalPriority( globalPriority_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoKHR( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoKHR( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DeviceQueueGlobalPriorityCreateInfoKHR( VkDeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : DeviceQueueGlobalPriorityCreateInfoKHR(
- *reinterpret_cast<DeviceQueueGlobalPriorityCreateInfoKHR const *>( &rhs ) )
- {}
+ : DeviceQueueGlobalPriorityCreateInfoKHR( *reinterpret_cast<DeviceQueueGlobalPriorityCreateInfoKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DeviceQueueGlobalPriorityCreateInfoKHR &
- operator=( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DeviceQueueGlobalPriorityCreateInfoKHR & operator=( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DeviceQueueGlobalPriorityCreateInfoKHR &
- operator=( VkDeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ DeviceQueueGlobalPriorityCreateInfoKHR & operator=( VkDeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR const *>( &rhs );
return *this;
@@ -26028,30 +25973,28 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfoKHR &
- setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority_ ) VULKAN_HPP_NOEXCEPT
+ setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority_ ) VULKAN_HPP_NOEXCEPT
{
globalPriority = globalPriority_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDeviceQueueGlobalPriorityCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceQueueGlobalPriorityCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceQueueGlobalPriorityCreateInfoKHR *>( this );
}
- explicit operator VkDeviceQueueGlobalPriorityCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceQueueGlobalPriorityCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -26064,7 +26007,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriority == rhs.globalPriority );
@@ -26078,19 +26021,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR ) ==
- sizeof( VkDeviceQueueGlobalPriorityCreateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR>::value,
- "DeviceQueueGlobalPriorityCreateInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR>
@@ -26109,17 +26043,18 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {},
uint32_t queueFamilyIndex_ = {},
- uint32_t queueIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ uint32_t queueIndex_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, queueFamilyIndex( queueFamilyIndex_ )
, queueIndex( queueIndex_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : DeviceQueueInfo2( *reinterpret_cast<DeviceQueueInfo2 const *>( &rhs ) )
- {}
+ DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceQueueInfo2( *reinterpret_cast<DeviceQueueInfo2 const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -26137,8 +26072,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 &
- setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -26157,17 +26091,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDeviceQueueInfo2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDeviceQueueInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDeviceQueueInfo2 *>( this );
}
- explicit operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT
+ operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDeviceQueueInfo2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -26188,11 +26122,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( queueIndex == rhs.queueIndex );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) &&
+ ( queueIndex == rhs.queueIndex );
# endif
}
@@ -26209,12 +26143,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t queueFamilyIndex = {};
uint32_t queueIndex = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2>::value,
- "DeviceQueueInfo2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDeviceQueueInfo2>
@@ -26231,20 +26159,23 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectfbSurfaceCreateInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ = {},
- IDirectFB * dfb_ = {},
- IDirectFBSurface * surface_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ = {},
+ IDirectFB * dfb_ = {},
+ IDirectFBSurface * surface_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, dfb( dfb_ )
, surface( surface_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DirectFBSurfaceCreateInfoEXT( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DirectFBSurfaceCreateInfoEXT( *reinterpret_cast<DirectFBSurfaceCreateInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DirectFBSurfaceCreateInfoEXT & operator=( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -26262,8 +26193,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT &
- setFlags( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -26282,17 +26212,17 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDirectFBSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDirectFBSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( this );
}
- explicit operator VkDirectFBSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDirectFBSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDirectFBSurfaceCreateInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -26313,11 +26243,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dfb == rhs.dfb ) &&
- ( surface == rhs.surface );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dfb == rhs.dfb ) && ( surface == rhs.surface );
# endif
}
@@ -26334,14 +26263,6 @@ namespace VULKAN_HPP_NAMESPACE
IDirectFB * dfb = {};
IDirectFBSurface * surface = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT ) ==
- sizeof( VkDirectFBSurfaceCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT>::value,
- "DirectFBSurfaceCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDirectfbSurfaceCreateInfoEXT>
@@ -26355,18 +26276,19 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDispatchIndirectCommand;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- DispatchIndirectCommand( uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT
: x( x_ )
, y( y_ )
, z( z_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
: DispatchIndirectCommand( *reinterpret_cast<DispatchIndirectCommand const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DispatchIndirectCommand & operator=( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -26397,17 +26319,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDispatchIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDispatchIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDispatchIndirectCommand *>( this );
}
- explicit operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT
+ operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDispatchIndirectCommand *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -26424,7 +26346,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z );
@@ -26442,13 +26364,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t y = {};
uint32_t z = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchIndirectCommand ) ==
- sizeof( VkDispatchIndirectCommand ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand>::value,
- "DispatchIndirectCommand is not nothrow_move_constructible!" );
struct DisplayEventInfoEXT
{
@@ -26459,16 +26374,18 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR
- DisplayEventInfoEXT( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ =
- VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut ) VULKAN_HPP_NOEXCEPT
- : displayEvent( displayEvent_ )
- {}
+ DisplayEventInfoEXT( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , displayEvent( displayEvent_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : DisplayEventInfoEXT( *reinterpret_cast<DisplayEventInfoEXT const *>( &rhs ) )
- {}
+ DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayEventInfoEXT( *reinterpret_cast<DisplayEventInfoEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DisplayEventInfoEXT & operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -26486,31 +26403,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT &
- setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT
{
displayEvent = displayEvent_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDisplayEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDisplayEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayEventInfoEXT *>( this );
}
- explicit operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayEventInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -26523,7 +26437,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayEvent == rhs.displayEvent );
@@ -26541,12 +26455,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT>::value,
- "DisplayEventInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDisplayEventInfoEXT>
@@ -26559,17 +26467,18 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDisplayModeParametersKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {},
- uint32_t refreshRate_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, uint32_t refreshRate_ = {} ) VULKAN_HPP_NOEXCEPT
: visibleRegion( visibleRegion_ )
, refreshRate( refreshRate_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayModeParametersKHR( *reinterpret_cast<DisplayModeParametersKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DisplayModeParametersKHR & operator=( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -26581,8 +26490,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR &
- setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT
{
visibleRegion = visibleRegion_;
return *this;
@@ -26595,17 +26503,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDisplayModeParametersKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDisplayModeParametersKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayModeParametersKHR *>( this );
}
- explicit operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayModeParametersKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -26622,7 +26530,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( visibleRegion == rhs.visibleRegion ) && ( refreshRate == rhs.refreshRate );
@@ -26639,13 +26547,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {};
uint32_t refreshRate = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR ) ==
- sizeof( VkDisplayModeParametersKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR>::value,
- "DisplayModeParametersKHR is not nothrow_move_constructible!" );
struct DisplayModeCreateInfoKHR
{
@@ -26655,18 +26556,21 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- DisplayModeCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {},
- VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {},
+ VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, parameters( parameters_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayModeCreateInfoKHR( *reinterpret_cast<DisplayModeCreateInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DisplayModeCreateInfoKHR & operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -26684,32 +26588,30 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR &
- setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT
{
parameters = parameters_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDisplayModeCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDisplayModeCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( this );
}
- explicit operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayModeCreateInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -26729,11 +26631,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( parameters == rhs.parameters );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( parameters == rhs.parameters );
# endif
}
@@ -26749,13 +26650,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {};
VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR ) ==
- sizeof( VkDisplayModeCreateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR>::value,
- "DisplayModeCreateInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDisplayModeCreateInfoKHR>
@@ -26768,18 +26662,19 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDisplayModePropertiesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- DisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {},
- VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {},
+ VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT
: displayMode( displayMode_ )
, parameters( parameters_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayModePropertiesKHR( *reinterpret_cast<DisplayModePropertiesKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DisplayModePropertiesKHR & operator=( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -26790,17 +26685,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkDisplayModePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDisplayModePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayModePropertiesKHR *>( this );
}
- explicit operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayModePropertiesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -26817,7 +26712,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( displayMode == rhs.displayMode ) && ( parameters == rhs.parameters );
@@ -26834,13 +26729,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {};
VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR ) ==
- sizeof( VkDisplayModePropertiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>::value,
- "DisplayModePropertiesKHR is not nothrow_move_constructible!" );
struct DisplayModeProperties2KHR
{
@@ -26850,17 +26738,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeProperties2KHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR(
- VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : displayModeProperties( displayModeProperties_ )
- {}
+ VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , displayModeProperties( displayModeProperties_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DisplayModeProperties2KHR( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayModeProperties2KHR( *reinterpret_cast<DisplayModeProperties2KHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DisplayModeProperties2KHR & operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -26871,23 +26761,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkDisplayModeProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDisplayModeProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayModeProperties2KHR *>( this );
}
- explicit operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayModeProperties2KHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -26900,7 +26788,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayModeProperties == rhs.displayModeProperties );
@@ -26918,13 +26806,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR ) ==
- sizeof( VkDisplayModeProperties2KHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>::value,
- "DisplayModeProperties2KHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDisplayModeProperties2KHR>
@@ -26937,45 +26818,43 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDisplayNativeHdrSurfaceCapabilitiesAMD;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD(
- VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {} ) VULKAN_HPP_NOEXCEPT
- : localDimmingSupport( localDimmingSupport_ )
- {}
+ VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , localDimmingSupport( localDimmingSupport_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- : DisplayNativeHdrSurfaceCapabilitiesAMD(
- *reinterpret_cast<DisplayNativeHdrSurfaceCapabilitiesAMD const *>( &rhs ) )
- {}
+ : DisplayNativeHdrSurfaceCapabilitiesAMD( *reinterpret_cast<DisplayNativeHdrSurfaceCapabilitiesAMD const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DisplayNativeHdrSurfaceCapabilitiesAMD &
- operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DisplayNativeHdrSurfaceCapabilitiesAMD &
- operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const *>( &rhs );
return *this;
}
- explicit operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayNativeHdrSurfaceCapabilitiesAMD *>( this );
}
- explicit operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayNativeHdrSurfaceCapabilitiesAMD *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -26992,7 +26871,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingSupport == rhs.localDimmingSupport );
@@ -27010,15 +26889,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD ) ==
- sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD>::value,
- "DisplayNativeHdrSurfaceCapabilitiesAMD is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD>
@@ -27031,16 +26901,15 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDisplayPlaneCapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- DisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {},
- VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {},
- VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {},
- VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {},
- VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {},
+ VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {},
+ VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {},
+ VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {},
+ VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {} ) VULKAN_HPP_NOEXCEPT
: supportedAlpha( supportedAlpha_ )
, minSrcPosition( minSrcPosition_ )
, maxSrcPosition( maxSrcPosition_ )
@@ -27050,14 +26919,15 @@ namespace VULKAN_HPP_NAMESPACE
, maxDstPosition( maxDstPosition_ )
, minDstExtent( minDstExtent_ )
, maxDstExtent( maxDstExtent_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayPlaneCapabilitiesKHR( *reinterpret_cast<DisplayPlaneCapabilitiesKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DisplayPlaneCapabilitiesKHR & operator=( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -27068,17 +26938,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkDisplayPlaneCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDisplayPlaneCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR *>( this );
}
- explicit operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -27094,15 +26964,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( supportedAlpha,
- minSrcPosition,
- maxSrcPosition,
- minSrcExtent,
- maxSrcExtent,
- minDstPosition,
- maxDstPosition,
- minDstExtent,
- maxDstExtent );
+ return std::tie( supportedAlpha, minSrcPosition, maxSrcPosition, minSrcExtent, maxSrcExtent, minDstPosition, maxDstPosition, minDstExtent, maxDstExtent );
}
#endif
@@ -27111,14 +26973,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( supportedAlpha == rhs.supportedAlpha ) && ( minSrcPosition == rhs.minSrcPosition ) &&
- ( maxSrcPosition == rhs.maxSrcPosition ) && ( minSrcExtent == rhs.minSrcExtent ) &&
- ( maxSrcExtent == rhs.maxSrcExtent ) && ( minDstPosition == rhs.minDstPosition ) &&
- ( maxDstPosition == rhs.maxDstPosition ) && ( minDstExtent == rhs.minDstExtent ) &&
- ( maxDstExtent == rhs.maxDstExtent );
+ return ( supportedAlpha == rhs.supportedAlpha ) && ( minSrcPosition == rhs.minSrcPosition ) && ( maxSrcPosition == rhs.maxSrcPosition ) &&
+ ( minSrcExtent == rhs.minSrcExtent ) && ( maxSrcExtent == rhs.maxSrcExtent ) && ( minDstPosition == rhs.minDstPosition ) &&
+ ( maxDstPosition == rhs.maxDstPosition ) && ( minDstExtent == rhs.minDstExtent ) && ( maxDstExtent == rhs.maxDstExtent );
# endif
}
@@ -27139,14 +26999,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {};
VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR ) ==
- sizeof( VkDisplayPlaneCapabilitiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::value,
- "DisplayPlaneCapabilitiesKHR is not nothrow_move_constructible!" );
struct DisplayPlaneCapabilities2KHR
{
@@ -27156,17 +27008,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneCapabilities2KHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR(
- VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {} ) VULKAN_HPP_NOEXCEPT
- : capabilities( capabilities_ )
- {}
+ VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , capabilities( capabilities_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayPlaneCapabilities2KHR( *reinterpret_cast<DisplayPlaneCapabilities2KHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DisplayPlaneCapabilities2KHR & operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -27177,23 +27031,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkDisplayPlaneCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDisplayPlaneCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayPlaneCapabilities2KHR *>( this );
}
- explicit operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -27206,7 +27058,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( capabilities == rhs.capabilities );
@@ -27224,14 +27076,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR ) ==
- sizeof( VkDisplayPlaneCapabilities2KHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::value,
- "DisplayPlaneCapabilities2KHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDisplayPlaneCapabilities2KHR>
@@ -27247,17 +27091,20 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneInfo2KHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {},
- uint32_t planeIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : mode( mode_ )
+ VULKAN_HPP_CONSTEXPR
+ DisplayPlaneInfo2KHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, uint32_t planeIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , mode( mode_ )
, planeIndex( planeIndex_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayPlaneInfo2KHR( *reinterpret_cast<DisplayPlaneInfo2KHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DisplayPlaneInfo2KHR & operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -27275,8 +27122,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR &
- setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
{
mode = mode_;
return *this;
@@ -27289,24 +27135,21 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDisplayPlaneInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDisplayPlaneInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( this );
}
- explicit operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayPlaneInfo2KHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::DisplayModeKHR const &,
- uint32_t const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplayModeKHR const &, uint32_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -27319,11 +27162,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) &&
- ( planeIndex == rhs.planeIndex );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && ( planeIndex == rhs.planeIndex );
# endif
}
@@ -27339,12 +27181,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {};
uint32_t planeIndex = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR>::value,
- "DisplayPlaneInfo2KHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDisplayPlaneInfo2KHR>
@@ -27357,18 +27193,19 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDisplayPlanePropertiesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {},
- uint32_t currentStackIndex_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {},
+ uint32_t currentStackIndex_ = {} ) VULKAN_HPP_NOEXCEPT
: currentDisplay( currentDisplay_ )
, currentStackIndex( currentStackIndex_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayPlanePropertiesKHR( *reinterpret_cast<DisplayPlanePropertiesKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DisplayPlanePropertiesKHR & operator=( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -27379,17 +27216,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkDisplayPlanePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDisplayPlanePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayPlanePropertiesKHR *>( this );
}
- explicit operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayPlanePropertiesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -27406,7 +27243,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( currentDisplay == rhs.currentDisplay ) && ( currentStackIndex == rhs.currentStackIndex );
@@ -27423,13 +27260,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {};
uint32_t currentStackIndex = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR ) ==
- sizeof( VkDisplayPlanePropertiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>::value,
- "DisplayPlanePropertiesKHR is not nothrow_move_constructible!" );
struct DisplayPlaneProperties2KHR
{
@@ -27439,17 +27269,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneProperties2KHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR(
- VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : displayPlaneProperties( displayPlaneProperties_ )
- {}
+ VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , displayPlaneProperties( displayPlaneProperties_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayPlaneProperties2KHR( *reinterpret_cast<DisplayPlaneProperties2KHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DisplayPlaneProperties2KHR & operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -27460,23 +27292,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkDisplayPlaneProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDisplayPlaneProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayPlaneProperties2KHR *>( this );
}
- explicit operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayPlaneProperties2KHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -27489,11 +27319,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( displayPlaneProperties == rhs.displayPlaneProperties );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPlaneProperties == rhs.displayPlaneProperties );
# endif
}
@@ -27508,13 +27337,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR ) ==
- sizeof( VkDisplayPlaneProperties2KHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>::value,
- "DisplayPlaneProperties2KHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDisplayPlaneProperties2KHR>
@@ -27530,16 +27352,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPowerInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ =
- VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff ) VULKAN_HPP_NOEXCEPT
- : powerState( powerState_ )
- {}
+ VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , powerState( powerState_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : DisplayPowerInfoEXT( *reinterpret_cast<DisplayPowerInfoEXT const *>( &rhs ) )
- {}
+ DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayPowerInfoEXT( *reinterpret_cast<DisplayPowerInfoEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DisplayPowerInfoEXT & operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -27557,31 +27381,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT &
- setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT
{
powerState = powerState_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDisplayPowerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDisplayPowerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayPowerInfoEXT *>( this );
}
- explicit operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayPowerInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -27594,7 +27415,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( powerState == rhs.powerState );
@@ -27612,12 +27433,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT>::value,
- "DisplayPowerInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDisplayPowerInfoEXT>
@@ -27635,17 +27450,21 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {},
VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcRect( srcRect_ )
+ VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , srcRect( srcRect_ )
, dstRect( dstRect_ )
, persistent( persistent_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayPresentInfoKHR( *reinterpret_cast<DisplayPresentInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DisplayPresentInfoKHR & operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -27663,39 +27482,36 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR &
- setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D const & srcRect_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D const & srcRect_ ) VULKAN_HPP_NOEXCEPT
{
srcRect = srcRect_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR &
- setDstRect( VULKAN_HPP_NAMESPACE::Rect2D const & dstRect_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D const & dstRect_ ) VULKAN_HPP_NOEXCEPT
{
dstRect = dstRect_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR &
- setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT
{
persistent = persistent_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDisplayPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDisplayPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayPresentInfoKHR *>( this );
}
- explicit operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayPresentInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -27716,11 +27532,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcRect == rhs.srcRect ) &&
- ( dstRect == rhs.dstRect ) && ( persistent == rhs.persistent );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcRect == rhs.srcRect ) && ( dstRect == rhs.dstRect ) && ( persistent == rhs.persistent );
# endif
}
@@ -27737,12 +27552,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Rect2D dstRect = {};
VULKAN_HPP_NAMESPACE::Bool32 persistent = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR>::value,
- "DisplayPresentInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDisplayPresentInfoKHR>
@@ -27755,14 +27564,13 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDisplayPropertiesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- DisplayPropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {},
- const char * displayName_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {},
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {},
+ const char * displayName_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {},
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {} ) VULKAN_HPP_NOEXCEPT
: display( display_ )
, displayName( displayName_ )
, physicalDimensions( physicalDimensions_ )
@@ -27770,13 +27578,15 @@ namespace VULKAN_HPP_NAMESPACE
, supportedTransforms( supportedTransforms_ )
, planeReorderPossible( planeReorderPossible_ )
, persistentContent( persistentContent_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayPropertiesKHR( *reinterpret_cast<DisplayPropertiesKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DisplayPropertiesKHR & operator=( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -27787,17 +27597,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkDisplayPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDisplayPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayPropertiesKHR *>( this );
}
- explicit operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayPropertiesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -27811,13 +27621,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( display,
- displayName,
- physicalDimensions,
- physicalResolution,
- supportedTransforms,
- planeReorderPossible,
- persistentContent );
+ return std::tie( display, displayName, physicalDimensions, physicalResolution, supportedTransforms, planeReorderPossible, persistentContent );
}
#endif
@@ -27846,11 +27650,10 @@ namespace VULKAN_HPP_NAMESPACE
bool operator==( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( display == rhs.display ) &&
- ( ( displayName == rhs.displayName ) || ( strcmp( displayName, rhs.displayName ) == 0 ) ) &&
+ return ( display == rhs.display ) && ( ( displayName == rhs.displayName ) || ( strcmp( displayName, rhs.displayName ) == 0 ) ) &&
( physicalDimensions == rhs.physicalDimensions ) && ( physicalResolution == rhs.physicalResolution ) &&
- ( supportedTransforms == rhs.supportedTransforms ) &&
- ( planeReorderPossible == rhs.planeReorderPossible ) && ( persistentContent == rhs.persistentContent );
+ ( supportedTransforms == rhs.supportedTransforms ) && ( planeReorderPossible == rhs.planeReorderPossible ) &&
+ ( persistentContent == rhs.persistentContent );
}
bool operator!=( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
@@ -27867,12 +27670,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {};
VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>::value,
- "DisplayPropertiesKHR is not nothrow_move_constructible!" );
struct DisplayProperties2KHR
{
@@ -27882,16 +27679,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayProperties2KHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- DisplayProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : displayProperties( displayProperties_ )
- {}
+ VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , displayProperties( displayProperties_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplayProperties2KHR( *reinterpret_cast<DisplayProperties2KHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DisplayProperties2KHR & operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -27902,23 +27702,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkDisplayProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDisplayProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplayProperties2KHR *>( this );
}
- explicit operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplayProperties2KHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -27931,7 +27729,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayProperties == rhs.displayProperties );
@@ -27949,12 +27747,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>::value,
- "DisplayProperties2KHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDisplayProperties2KHR>
@@ -27975,13 +27767,13 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {},
uint32_t planeIndex_ = {},
uint32_t planeStackIndex_ = {},
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ =
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
float globalAlpha_ = {},
- VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ =
- VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque,
- VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque,
+ VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, displayMode( displayMode_ )
, planeIndex( planeIndex_ )
, planeStackIndex( planeStackIndex_ )
@@ -27989,14 +27781,15 @@ namespace VULKAN_HPP_NAMESPACE
, globalAlpha( globalAlpha_ )
, alphaMode( alphaMode_ )
, imageExtent( imageExtent_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: DisplaySurfaceCreateInfoKHR( *reinterpret_cast<DisplaySurfaceCreateInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DisplaySurfaceCreateInfoKHR & operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -28014,15 +27807,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR &
- setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT
{
displayMode = displayMode_;
return *this;
@@ -28034,15 +27825,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR &
- setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT
{
planeStackIndex = planeStackIndex_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR &
- setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
{
transform = transform_;
return *this;
@@ -28054,32 +27843,30 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR &
- setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT
{
alphaMode = alphaMode_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR &
- setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
{
imageExtent = imageExtent_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDisplaySurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDisplaySurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( this );
}
- explicit operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDisplaySurfaceCreateInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -28096,8 +27883,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, flags, displayMode, planeIndex, planeStackIndex, transform, globalAlpha, alphaMode, imageExtent );
+ return std::tie( sType, pNext, flags, displayMode, planeIndex, planeStackIndex, transform, globalAlpha, alphaMode, imageExtent );
}
#endif
@@ -28106,12 +27892,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( displayMode == rhs.displayMode ) && ( planeIndex == rhs.planeIndex ) &&
- ( planeStackIndex == rhs.planeStackIndex ) && ( transform == rhs.transform ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( displayMode == rhs.displayMode ) &&
+ ( planeIndex == rhs.planeIndex ) && ( planeStackIndex == rhs.planeStackIndex ) && ( transform == rhs.transform ) &&
( globalAlpha == rhs.globalAlpha ) && ( alphaMode == rhs.alphaMode ) && ( imageExtent == rhs.imageExtent );
# endif
}
@@ -28129,21 +27914,11 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {};
uint32_t planeIndex = {};
uint32_t planeStackIndex = {};
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform =
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
- float globalAlpha = {};
- VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode =
- VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque;
- VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR ) ==
- sizeof( VkDisplaySurfaceCreateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR>::value,
- "DisplaySurfaceCreateInfoKHR is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+ float globalAlpha = {};
+ VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque;
+ VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
+ };
template <>
struct CppType<StructureType, StructureType::eDisplaySurfaceCreateInfoKHR>
@@ -28166,14 +27941,15 @@ namespace VULKAN_HPP_NAMESPACE
, firstIndex( firstIndex_ )
, vertexOffset( vertexOffset_ )
, firstInstance( firstInstance_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
: DrawIndexedIndirectCommand( *reinterpret_cast<DrawIndexedIndirectCommand const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DrawIndexedIndirectCommand & operator=( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -28216,17 +27992,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDrawIndexedIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDrawIndexedIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDrawIndexedIndirectCommand *>( this );
}
- explicit operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT
+ operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDrawIndexedIndirectCommand *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -28243,12 +28019,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( indexCount == rhs.indexCount ) && ( instanceCount == rhs.instanceCount ) &&
- ( firstIndex == rhs.firstIndex ) && ( vertexOffset == rhs.vertexOffset ) &&
- ( firstInstance == rhs.firstInstance );
+ return ( indexCount == rhs.indexCount ) && ( instanceCount == rhs.instanceCount ) && ( firstIndex == rhs.firstIndex ) &&
+ ( vertexOffset == rhs.vertexOffset ) && ( firstInstance == rhs.firstInstance );
# endif
}
@@ -28265,13 +28040,6 @@ namespace VULKAN_HPP_NAMESPACE
int32_t vertexOffset = {};
uint32_t firstInstance = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand ) ==
- sizeof( VkDrawIndexedIndirectCommand ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand>::value,
- "DrawIndexedIndirectCommand is not nothrow_move_constructible!" );
struct DrawIndirectCommand
{
@@ -28286,13 +28054,14 @@ namespace VULKAN_HPP_NAMESPACE
, instanceCount( instanceCount_ )
, firstVertex( firstVertex_ )
, firstInstance( firstInstance_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR DrawIndirectCommand( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
- : DrawIndirectCommand( *reinterpret_cast<DrawIndirectCommand const *>( &rhs ) )
- {}
+ DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT : DrawIndirectCommand( *reinterpret_cast<DrawIndirectCommand const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
DrawIndirectCommand & operator=( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -28329,17 +28098,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDrawIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDrawIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDrawIndirectCommand *>( this );
}
- explicit operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT
+ operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDrawIndirectCommand *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -28356,11 +28125,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( vertexCount == rhs.vertexCount ) && ( instanceCount == rhs.instanceCount ) &&
- ( firstVertex == rhs.firstVertex ) && ( firstInstance == rhs.firstInstance );
+ return ( vertexCount == rhs.vertexCount ) && ( instanceCount == rhs.instanceCount ) && ( firstVertex == rhs.firstVertex ) &&
+ ( firstInstance == rhs.firstInstance );
# endif
}
@@ -28376,34 +28145,122 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t firstVertex = {};
uint32_t firstInstance = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrawIndirectCommand>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrawIndirectCommand>::value,
- "DrawIndirectCommand is not nothrow_move_constructible!" );
+
+ struct DrawMeshTasksIndirectCommandEXT
+ {
+ using NativeType = VkDrawMeshTasksIndirectCommandEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR
+ DrawMeshTasksIndirectCommandEXT( uint32_t groupCountX_ = {}, uint32_t groupCountY_ = {}, uint32_t groupCountZ_ = {} ) VULKAN_HPP_NOEXCEPT
+ : groupCountX( groupCountX_ )
+ , groupCountY( groupCountY_ )
+ , groupCountZ( groupCountZ_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandEXT( DrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DrawMeshTasksIndirectCommandEXT( VkDrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : DrawMeshTasksIndirectCommandEXT( *reinterpret_cast<DrawMeshTasksIndirectCommandEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ DrawMeshTasksIndirectCommandEXT & operator=( DrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ DrawMeshTasksIndirectCommandEXT & operator=( VkDrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountX( uint32_t groupCountX_ ) VULKAN_HPP_NOEXCEPT
+ {
+ groupCountX = groupCountX_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountY( uint32_t groupCountY_ ) VULKAN_HPP_NOEXCEPT
+ {
+ groupCountY = groupCountY_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountZ( uint32_t groupCountZ_ ) VULKAN_HPP_NOEXCEPT
+ {
+ groupCountZ = groupCountZ_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkDrawMeshTasksIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDrawMeshTasksIndirectCommandEXT *>( this );
+ }
+
+ operator VkDrawMeshTasksIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDrawMeshTasksIndirectCommandEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( groupCountX, groupCountY, groupCountZ );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( DrawMeshTasksIndirectCommandEXT const & ) const = default;
+#else
+ bool operator==( DrawMeshTasksIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( groupCountX == rhs.groupCountX ) && ( groupCountY == rhs.groupCountY ) && ( groupCountZ == rhs.groupCountZ );
+# endif
+ }
+
+ bool operator!=( DrawMeshTasksIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ uint32_t groupCountX = {};
+ uint32_t groupCountY = {};
+ uint32_t groupCountZ = {};
+ };
struct DrawMeshTasksIndirectCommandNV
{
using NativeType = VkDrawMeshTasksIndirectCommandNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = {},
- uint32_t firstTask_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = {}, uint32_t firstTask_ = {} ) VULKAN_HPP_NOEXCEPT
: taskCount( taskCount_ )
, firstTask( firstTask_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DrawMeshTasksIndirectCommandNV( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
: DrawMeshTasksIndirectCommandNV( *reinterpret_cast<DrawMeshTasksIndirectCommandNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DrawMeshTasksIndirectCommandNV &
- operator=( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DrawMeshTasksIndirectCommandNV & operator=( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DrawMeshTasksIndirectCommandNV & operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -28425,17 +28282,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkDrawMeshTasksIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDrawMeshTasksIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDrawMeshTasksIndirectCommandNV *>( this );
}
- explicit operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+ operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDrawMeshTasksIndirectCommandNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -28452,7 +28309,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( taskCount == rhs.taskCount ) && ( firstTask == rhs.firstTask );
@@ -28469,39 +28326,30 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t taskCount = {};
uint32_t firstTask = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV ) ==
- sizeof( VkDrawMeshTasksIndirectCommandNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV>::value,
- "DrawMeshTasksIndirectCommandNV is not nothrow_move_constructible!" );
struct DrmFormatModifierProperties2EXT
{
using NativeType = VkDrmFormatModifierProperties2EXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT(
- uint64_t drmFormatModifier_ = {},
- uint32_t drmFormatModifierPlaneCount_ = {},
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( uint64_t drmFormatModifier_ = {},
+ uint32_t drmFormatModifierPlaneCount_ = {},
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
: drmFormatModifier( drmFormatModifier_ )
, drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ )
, drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DrmFormatModifierProperties2EXT( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DrmFormatModifierProperties2EXT( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DrmFormatModifierProperties2EXT( *reinterpret_cast<DrmFormatModifierProperties2EXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DrmFormatModifierProperties2EXT &
- operator=( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DrmFormatModifierProperties2EXT & operator=( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DrmFormatModifierProperties2EXT & operator=( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -28509,17 +28357,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkDrmFormatModifierProperties2EXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDrmFormatModifierProperties2EXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDrmFormatModifierProperties2EXT *>( this );
}
- explicit operator VkDrmFormatModifierProperties2EXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDrmFormatModifierProperties2EXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDrmFormatModifierProperties2EXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -28536,11 +28384,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( drmFormatModifier == rhs.drmFormatModifier ) &&
- ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) &&
+ return ( drmFormatModifier == rhs.drmFormatModifier ) && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) &&
( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures );
# endif
}
@@ -28556,39 +28403,30 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t drmFormatModifierPlaneCount = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT ) ==
- sizeof( VkDrmFormatModifierProperties2EXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT>::value,
- "DrmFormatModifierProperties2EXT is not nothrow_move_constructible!" );
struct DrmFormatModifierPropertiesEXT
{
using NativeType = VkDrmFormatModifierPropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT(
- uint64_t drmFormatModifier_ = {},
- uint32_t drmFormatModifierPlaneCount_ = {},
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {},
+ uint32_t drmFormatModifierPlaneCount_ = {},
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
: drmFormatModifier( drmFormatModifier_ )
, drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ )
, drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DrmFormatModifierPropertiesEXT( *reinterpret_cast<DrmFormatModifierPropertiesEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DrmFormatModifierPropertiesEXT &
- operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DrmFormatModifierPropertiesEXT & operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DrmFormatModifierPropertiesEXT & operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -28596,17 +28434,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDrmFormatModifierPropertiesEXT *>( this );
}
- explicit operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDrmFormatModifierPropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -28623,11 +28461,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( drmFormatModifier == rhs.drmFormatModifier ) &&
- ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) &&
+ return ( drmFormatModifier == rhs.drmFormatModifier ) && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) &&
( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures );
# endif
}
@@ -28643,76 +28480,66 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t drmFormatModifierPlaneCount = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT ) ==
- sizeof( VkDrmFormatModifierPropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT>::value,
- "DrmFormatModifierPropertiesEXT is not nothrow_move_constructible!" );
struct DrmFormatModifierPropertiesList2EXT
{
using NativeType = VkDrmFormatModifierPropertiesList2EXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDrmFormatModifierPropertiesList2EXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesList2EXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT(
- uint32_t drmFormatModifierCount_ = {},
- VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : drmFormatModifierCount( drmFormatModifierCount_ )
+ VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( uint32_t drmFormatModifierCount_ = {},
+ VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , drmFormatModifierCount( drmFormatModifierCount_ )
, pDrmFormatModifierProperties( pDrmFormatModifierProperties_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( DrmFormatModifierPropertiesList2EXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DrmFormatModifierPropertiesList2EXT( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DrmFormatModifierPropertiesList2EXT( *reinterpret_cast<DrmFormatModifierPropertiesList2EXT const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DrmFormatModifierPropertiesList2EXT(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT> const &
- drmFormatModifierProperties_ )
- : drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifierProperties_.size() ) )
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT> const & drmFormatModifierProperties_,
+ void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifierProperties_.size() ) )
, pDrmFormatModifierProperties( drmFormatModifierProperties_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DrmFormatModifierPropertiesList2EXT &
- operator=( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DrmFormatModifierPropertiesList2EXT & operator=( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DrmFormatModifierPropertiesList2EXT &
- operator=( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DrmFormatModifierPropertiesList2EXT & operator=( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT const *>( &rhs );
return *this;
}
- explicit operator VkDrmFormatModifierPropertiesList2EXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDrmFormatModifierPropertiesList2EXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDrmFormatModifierPropertiesList2EXT *>( this );
}
- explicit operator VkDrmFormatModifierPropertiesList2EXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDrmFormatModifierPropertiesList2EXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDrmFormatModifierPropertiesList2EXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- uint32_t const &,
- VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -28725,11 +28552,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( drmFormatModifierCount == rhs.drmFormatModifierCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) &&
( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties );
# endif
}
@@ -28741,19 +28567,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesList2EXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesList2EXT;
+ void * pNext = {};
uint32_t drmFormatModifierCount = {};
VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT ) ==
- sizeof( VkDrmFormatModifierPropertiesList2EXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT>::value,
- "DrmFormatModifierPropertiesList2EXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDrmFormatModifierPropertiesList2EXT>
@@ -28766,62 +28584,60 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkDrmFormatModifierPropertiesListEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eDrmFormatModifierPropertiesListEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesListEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT(
- uint32_t drmFormatModifierCount_ = {},
- VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : drmFormatModifierCount( drmFormatModifierCount_ )
+ VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( uint32_t drmFormatModifierCount_ = {},
+ VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , drmFormatModifierCount( drmFormatModifierCount_ )
, pDrmFormatModifierProperties( pDrmFormatModifierProperties_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: DrmFormatModifierPropertiesListEXT( *reinterpret_cast<DrmFormatModifierPropertiesListEXT const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
DrmFormatModifierPropertiesListEXT(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT> const &
- drmFormatModifierProperties_ )
- : drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifierProperties_.size() ) )
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT> const & drmFormatModifierProperties_,
+ void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifierProperties_.size() ) )
, pDrmFormatModifierProperties( drmFormatModifierProperties_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- DrmFormatModifierPropertiesListEXT &
- operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ DrmFormatModifierPropertiesListEXT & operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- DrmFormatModifierPropertiesListEXT &
- operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DrmFormatModifierPropertiesListEXT & operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const *>( &rhs );
return *this;
}
- explicit operator VkDrmFormatModifierPropertiesListEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkDrmFormatModifierPropertiesListEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkDrmFormatModifierPropertiesListEXT *>( this );
}
- explicit operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkDrmFormatModifierPropertiesListEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- uint32_t const &,
- VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -28834,11 +28650,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( drmFormatModifierCount == rhs.drmFormatModifierCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) &&
( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties );
# endif
}
@@ -28850,19 +28665,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT;
+ void * pNext = {};
uint32_t drmFormatModifierCount = {};
VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT ) ==
- sizeof( VkDrmFormatModifierPropertiesListEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT>::value,
- "DrmFormatModifierPropertiesListEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eDrmFormatModifierPropertiesListEXT>
@@ -28878,15 +28685,15 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eEventCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR EventCreateInfo( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- {}
+ VULKAN_HPP_CONSTEXPR EventCreateInfo( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : EventCreateInfo( *reinterpret_cast<EventCreateInfo const *>( &rhs ) )
- {}
+ EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : EventCreateInfo( *reinterpret_cast<EventCreateInfo const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -28904,31 +28711,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 EventCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkEventCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkEventCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkEventCreateInfo *>( this );
}
- explicit operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkEventCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::EventCreateFlags const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::EventCreateFlags const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -28941,7 +28745,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
@@ -28959,12 +28763,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::EventCreateInfo ) == sizeof( VkEventCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::EventCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::EventCreateInfo>::value,
- "EventCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eEventCreateInfo>
@@ -28980,16 +28778,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ExportFenceCreateInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
- {}
+ VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , handleTypes( handleTypes_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportFenceCreateInfo( *reinterpret_cast<ExportFenceCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ExportFenceCreateInfo & operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -29007,31 +28808,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo &
- setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkExportFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExportFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportFenceCreateInfo *>( this );
}
- explicit operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportFenceCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -29044,7 +28842,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
@@ -29062,12 +28860,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo>::value,
- "ExportFenceCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eExportFenceCreateInfo>
@@ -29087,22 +28879,24 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {},
DWORD dwAccess_ = {},
- LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT
- : pAttributes( pAttributes_ )
+ LPCWSTR name_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pAttributes( pAttributes_ )
, dwAccess( dwAccess_ )
, name( name_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ExportFenceWin32HandleInfoKHR( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportFenceWin32HandleInfoKHR( *reinterpret_cast<ExportFenceWin32HandleInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ExportFenceWin32HandleInfoKHR &
- operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ExportFenceWin32HandleInfoKHR & operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportFenceWin32HandleInfoKHR & operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -29117,8 +28911,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR &
- setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
{
pAttributes = pAttributes_;
return *this;
@@ -29137,25 +28930,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkExportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportFenceWin32HandleInfoKHR *>( this );
}
- explicit operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportFenceWin32HandleInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- const SECURITY_ATTRIBUTES * const &,
- DWORD const &,
- LPCWSTR const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const SECURITY_ATTRIBUTES * const &, DWORD const &, LPCWSTR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -29168,11 +28957,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) &&
- ( dwAccess == rhs.dwAccess ) && ( name == rhs.name );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name );
# endif
}
@@ -29189,14 +28977,6 @@ namespace VULKAN_HPP_NAMESPACE
DWORD dwAccess = {};
LPCWSTR name = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR ) ==
- sizeof( VkExportFenceWin32HandleInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR>::value,
- "ExportFenceWin32HandleInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eExportFenceWin32HandleInfoKHR>
@@ -29213,16 +28993,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo(
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
- {}
+ VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , handleTypes( handleTypes_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportMemoryAllocateInfo( *reinterpret_cast<ExportMemoryAllocateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ExportMemoryAllocateInfo & operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -29240,31 +29023,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo &
- setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkExportMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExportMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportMemoryAllocateInfo *>( this );
}
- explicit operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportMemoryAllocateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -29277,7 +29057,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
@@ -29295,13 +29075,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo ) ==
- sizeof( VkExportMemoryAllocateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo>::value,
- "ExportMemoryAllocateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eExportMemoryAllocateInfo>
@@ -29318,17 +29091,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV(
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
- {}
+ VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , handleTypes( handleTypes_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ExportMemoryAllocateInfoNV( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportMemoryAllocateInfoNV( *reinterpret_cast<ExportMemoryAllocateInfoNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ExportMemoryAllocateInfoNV & operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -29347,30 +29122,28 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV &
- setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkExportMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExportMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportMemoryAllocateInfoNV *>( this );
}
- explicit operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportMemoryAllocateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -29383,7 +29156,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
@@ -29401,13 +29174,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV ) ==
- sizeof( VkExportMemoryAllocateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV>::value,
- "ExportMemoryAllocateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eExportMemoryAllocateInfoNV>
@@ -29426,22 +29192,24 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {},
DWORD dwAccess_ = {},
- LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT
- : pAttributes( pAttributes_ )
+ LPCWSTR name_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pAttributes( pAttributes_ )
, dwAccess( dwAccess_ )
, name( name_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ExportMemoryWin32HandleInfoKHR( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportMemoryWin32HandleInfoKHR( *reinterpret_cast<ExportMemoryWin32HandleInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ExportMemoryWin32HandleInfoKHR &
- operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ExportMemoryWin32HandleInfoKHR & operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportMemoryWin32HandleInfoKHR & operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -29456,8 +29224,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR &
- setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
{
pAttributes = pAttributes_;
return *this;
@@ -29476,25 +29243,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkExportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportMemoryWin32HandleInfoKHR *>( this );
}
- explicit operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportMemoryWin32HandleInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- const SECURITY_ATTRIBUTES * const &,
- DWORD const &,
- LPCWSTR const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const SECURITY_ATTRIBUTES * const &, DWORD const &, LPCWSTR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -29507,11 +29270,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) &&
- ( dwAccess == rhs.dwAccess ) && ( name == rhs.name );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name );
# endif
}
@@ -29528,14 +29290,6 @@ namespace VULKAN_HPP_NAMESPACE
DWORD dwAccess = {};
LPCWSTR name = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR ) ==
- sizeof( VkExportMemoryWin32HandleInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR>::value,
- "ExportMemoryWin32HandleInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eExportMemoryWin32HandleInfoKHR>
@@ -29553,22 +29307,23 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoNV;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES * pAttributes_ = {},
- DWORD dwAccess_ = {} ) VULKAN_HPP_NOEXCEPT
- : pAttributes( pAttributes_ )
+ VULKAN_HPP_CONSTEXPR
+ ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pAttributes( pAttributes_ )
, dwAccess( dwAccess_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ExportMemoryWin32HandleInfoNV( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportMemoryWin32HandleInfoNV( *reinterpret_cast<ExportMemoryWin32HandleInfoNV const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ExportMemoryWin32HandleInfoNV &
- operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ExportMemoryWin32HandleInfoNV & operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportMemoryWin32HandleInfoNV & operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -29583,8 +29338,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV &
- setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
{
pAttributes = pAttributes_;
return *this;
@@ -29597,24 +29351,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkExportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportMemoryWin32HandleInfoNV *>( this );
}
- explicit operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportMemoryWin32HandleInfoNV *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- const SECURITY_ATTRIBUTES * const &,
- DWORD const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const SECURITY_ATTRIBUTES * const &, DWORD const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -29627,11 +29378,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) &&
- ( dwAccess == rhs.dwAccess );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess );
# endif
}
@@ -29647,14 +29397,6 @@ namespace VULKAN_HPP_NAMESPACE
const SECURITY_ATTRIBUTES * pAttributes = {};
DWORD dwAccess = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV ) ==
- sizeof( VkExportMemoryWin32HandleInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV>::value,
- "ExportMemoryWin32HandleInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eExportMemoryWin32HandleInfoNV>
@@ -29663,6 +29405,879 @@ namespace VULKAN_HPP_NAMESPACE
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ struct ExportMetalBufferInfoEXT
+ {
+ using NativeType = VkExportMetalBufferInfoEXT;
+
+ static const bool allowDuplicate = true;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalBufferInfoEXT;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExportMetalBufferInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
+ MTLBuffer_id mtlBuffer_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , memory( memory_ )
+ , mtlBuffer( mtlBuffer_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ExportMetalBufferInfoEXT( ExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportMetalBufferInfoEXT( VkExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ExportMetalBufferInfoEXT( *reinterpret_cast<ExportMetalBufferInfoEXT const *>( &rhs ) )
+ {
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ExportMetalBufferInfoEXT & operator=( ExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportMetalBufferInfoEXT & operator=( VkExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ {
+ memory = memory_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setMtlBuffer( MTLBuffer_id mtlBuffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mtlBuffer = mtlBuffer_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkExportMetalBufferInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkExportMetalBufferInfoEXT *>( this );
+ }
+
+ operator VkExportMetalBufferInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkExportMetalBufferInfoEXT *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, MTLBuffer_id const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, memory, mtlBuffer );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ExportMetalBufferInfoEXT const & ) const = default;
+# else
+ bool operator==( ExportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( mtlBuffer == rhs.mtlBuffer );
+# endif
+ }
+
+ bool operator!=( ExportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalBufferInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+ MTLBuffer_id mtlBuffer = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eExportMetalBufferInfoEXT>
+ {
+ using Type = ExportMetalBufferInfoEXT;
+ };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ struct ExportMetalCommandQueueInfoEXT
+ {
+ using NativeType = VkExportMetalCommandQueueInfoEXT;
+
+ static const bool allowDuplicate = true;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalCommandQueueInfoEXT;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExportMetalCommandQueueInfoEXT( VULKAN_HPP_NAMESPACE::Queue queue_ = {},
+ MTLCommandQueue_id mtlCommandQueue_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , queue( queue_ )
+ , mtlCommandQueue( mtlCommandQueue_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ExportMetalCommandQueueInfoEXT( ExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportMetalCommandQueueInfoEXT( VkExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ExportMetalCommandQueueInfoEXT( *reinterpret_cast<ExportMetalCommandQueueInfoEXT const *>( &rhs ) )
+ {
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ExportMetalCommandQueueInfoEXT & operator=( ExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportMetalCommandQueueInfoEXT & operator=( VkExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setQueue( VULKAN_HPP_NAMESPACE::Queue queue_ ) VULKAN_HPP_NOEXCEPT
+ {
+ queue = queue_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setMtlCommandQueue( MTLCommandQueue_id mtlCommandQueue_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mtlCommandQueue = mtlCommandQueue_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkExportMetalCommandQueueInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkExportMetalCommandQueueInfoEXT *>( this );
+ }
+
+ operator VkExportMetalCommandQueueInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkExportMetalCommandQueueInfoEXT *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Queue const &, MTLCommandQueue_id const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, queue, mtlCommandQueue );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ExportMetalCommandQueueInfoEXT const & ) const = default;
+# else
+ bool operator==( ExportMetalCommandQueueInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queue == rhs.queue ) && ( mtlCommandQueue == rhs.mtlCommandQueue );
+# endif
+ }
+
+ bool operator!=( ExportMetalCommandQueueInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalCommandQueueInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Queue queue = {};
+ MTLCommandQueue_id mtlCommandQueue = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eExportMetalCommandQueueInfoEXT>
+ {
+ using Type = ExportMetalCommandQueueInfoEXT;
+ };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ struct ExportMetalDeviceInfoEXT
+ {
+ using NativeType = VkExportMetalDeviceInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalDeviceInfoEXT;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExportMetalDeviceInfoEXT( MTLDevice_id mtlDevice_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , mtlDevice( mtlDevice_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ExportMetalDeviceInfoEXT( ExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportMetalDeviceInfoEXT( VkExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ExportMetalDeviceInfoEXT( *reinterpret_cast<ExportMetalDeviceInfoEXT const *>( &rhs ) )
+ {
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ExportMetalDeviceInfoEXT & operator=( ExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportMetalDeviceInfoEXT & operator=( VkExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalDeviceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalDeviceInfoEXT & setMtlDevice( MTLDevice_id mtlDevice_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mtlDevice = mtlDevice_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkExportMetalDeviceInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkExportMetalDeviceInfoEXT *>( this );
+ }
+
+ operator VkExportMetalDeviceInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkExportMetalDeviceInfoEXT *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, MTLDevice_id const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, mtlDevice );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ExportMetalDeviceInfoEXT const & ) const = default;
+# else
+ bool operator==( ExportMetalDeviceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlDevice == rhs.mtlDevice );
+# endif
+ }
+
+ bool operator!=( ExportMetalDeviceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalDeviceInfoEXT;
+ const void * pNext = {};
+ MTLDevice_id mtlDevice = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eExportMetalDeviceInfoEXT>
+ {
+ using Type = ExportMetalDeviceInfoEXT;
+ };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ struct ExportMetalIOSurfaceInfoEXT
+ {
+ using NativeType = VkExportMetalIOSurfaceInfoEXT;
+
+ static const bool allowDuplicate = true;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalIoSurfaceInfoEXT;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR
+ ExportMetalIOSurfaceInfoEXT( VULKAN_HPP_NAMESPACE::Image image_ = {}, IOSurfaceRef ioSurface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , image( image_ )
+ , ioSurface( ioSurface_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ExportMetalIOSurfaceInfoEXT( ExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportMetalIOSurfaceInfoEXT( VkExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ExportMetalIOSurfaceInfoEXT( *reinterpret_cast<ExportMetalIOSurfaceInfoEXT const *>( &rhs ) )
+ {
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ExportMetalIOSurfaceInfoEXT & operator=( ExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportMetalIOSurfaceInfoEXT & operator=( VkExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+ {
+ image = image_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setIoSurface( IOSurfaceRef ioSurface_ ) VULKAN_HPP_NOEXCEPT
+ {
+ ioSurface = ioSurface_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkExportMetalIOSurfaceInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkExportMetalIOSurfaceInfoEXT *>( this );
+ }
+
+ operator VkExportMetalIOSurfaceInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkExportMetalIOSurfaceInfoEXT *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, IOSurfaceRef const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, image, ioSurface );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ExportMetalIOSurfaceInfoEXT const & ) const = default;
+# else
+ bool operator==( ExportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( ioSurface == rhs.ioSurface );
+# endif
+ }
+
+ bool operator!=( ExportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalIoSurfaceInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Image image = {};
+ IOSurfaceRef ioSurface = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eExportMetalIoSurfaceInfoEXT>
+ {
+ using Type = ExportMetalIOSurfaceInfoEXT;
+ };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ struct ExportMetalObjectCreateInfoEXT
+ {
+ using NativeType = VkExportMetalObjectCreateInfoEXT;
+
+ static const bool allowDuplicate = true;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalObjectCreateInfoEXT;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExportMetalObjectCreateInfoEXT(
+ VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType_ = VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT::eMetalDevice,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , exportObjectType( exportObjectType_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ExportMetalObjectCreateInfoEXT( ExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportMetalObjectCreateInfoEXT( VkExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ExportMetalObjectCreateInfoEXT( *reinterpret_cast<ExportMetalObjectCreateInfoEXT const *>( &rhs ) )
+ {
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ExportMetalObjectCreateInfoEXT & operator=( ExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportMetalObjectCreateInfoEXT & operator=( VkExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectCreateInfoEXT &
+ setExportObjectType( VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType_ ) VULKAN_HPP_NOEXCEPT
+ {
+ exportObjectType = exportObjectType_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkExportMetalObjectCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkExportMetalObjectCreateInfoEXT *>( this );
+ }
+
+ operator VkExportMetalObjectCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkExportMetalObjectCreateInfoEXT *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, exportObjectType );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ExportMetalObjectCreateInfoEXT const & ) const = default;
+# else
+ bool operator==( ExportMetalObjectCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportObjectType == rhs.exportObjectType );
+# endif
+ }
+
+ bool operator!=( ExportMetalObjectCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalObjectCreateInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType = VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT::eMetalDevice;
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eExportMetalObjectCreateInfoEXT>
+ {
+ using Type = ExportMetalObjectCreateInfoEXT;
+ };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ struct ExportMetalObjectsInfoEXT
+ {
+ using NativeType = VkExportMetalObjectsInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalObjectsInfoEXT;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext( pNext_ ) {}
+
+ VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT( ExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportMetalObjectsInfoEXT( VkExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ExportMetalObjectsInfoEXT( *reinterpret_cast<ExportMetalObjectsInfoEXT const *>( &rhs ) )
+ {
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ExportMetalObjectsInfoEXT & operator=( ExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportMetalObjectsInfoEXT & operator=( VkExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkExportMetalObjectsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkExportMetalObjectsInfoEXT *>( this );
+ }
+
+ operator VkExportMetalObjectsInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkExportMetalObjectsInfoEXT *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ExportMetalObjectsInfoEXT const & ) const = default;
+# else
+ bool operator==( ExportMetalObjectsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
+# endif
+ }
+
+ bool operator!=( ExportMetalObjectsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalObjectsInfoEXT;
+ const void * pNext = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eExportMetalObjectsInfoEXT>
+ {
+ using Type = ExportMetalObjectsInfoEXT;
+ };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ struct ExportMetalSharedEventInfoEXT
+ {
+ using NativeType = VkExportMetalSharedEventInfoEXT;
+
+ static const bool allowDuplicate = true;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalSharedEventInfoEXT;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExportMetalSharedEventInfoEXT( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
+ VULKAN_HPP_NAMESPACE::Event event_ = {},
+ MTLSharedEvent_id mtlSharedEvent_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , semaphore( semaphore_ )
+ , event( event_ )
+ , mtlSharedEvent( mtlSharedEvent_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ExportMetalSharedEventInfoEXT( ExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportMetalSharedEventInfoEXT( VkExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ExportMetalSharedEventInfoEXT( *reinterpret_cast<ExportMetalSharedEventInfoEXT const *>( &rhs ) )
+ {
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ExportMetalSharedEventInfoEXT & operator=( ExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportMetalSharedEventInfoEXT & operator=( VkExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ {
+ semaphore = semaphore_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setEvent( VULKAN_HPP_NAMESPACE::Event event_ ) VULKAN_HPP_NOEXCEPT
+ {
+ event = event_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setMtlSharedEvent( MTLSharedEvent_id mtlSharedEvent_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mtlSharedEvent = mtlSharedEvent_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkExportMetalSharedEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkExportMetalSharedEventInfoEXT *>( this );
+ }
+
+ operator VkExportMetalSharedEventInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkExportMetalSharedEventInfoEXT *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::Semaphore const &,
+ VULKAN_HPP_NAMESPACE::Event const &,
+ MTLSharedEvent_id const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, semaphore, event, mtlSharedEvent );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ExportMetalSharedEventInfoEXT const & ) const = default;
+# else
+ bool operator==( ExportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( event == rhs.event ) &&
+ ( mtlSharedEvent == rhs.mtlSharedEvent );
+# endif
+ }
+
+ bool operator!=( ExportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalSharedEventInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+ VULKAN_HPP_NAMESPACE::Event event = {};
+ MTLSharedEvent_id mtlSharedEvent = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eExportMetalSharedEventInfoEXT>
+ {
+ using Type = ExportMetalSharedEventInfoEXT;
+ };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ struct ExportMetalTextureInfoEXT
+ {
+ using NativeType = VkExportMetalTextureInfoEXT;
+
+ static const bool allowDuplicate = true;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalTextureInfoEXT;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ExportMetalTextureInfoEXT( VULKAN_HPP_NAMESPACE::Image image_ = {},
+ VULKAN_HPP_NAMESPACE::ImageView imageView_ = {},
+ VULKAN_HPP_NAMESPACE::BufferView bufferView_ = {},
+ VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor,
+ MTLTexture_id mtlTexture_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , image( image_ )
+ , imageView( imageView_ )
+ , bufferView( bufferView_ )
+ , plane( plane_ )
+ , mtlTexture( mtlTexture_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ExportMetalTextureInfoEXT( ExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportMetalTextureInfoEXT( VkExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ExportMetalTextureInfoEXT( *reinterpret_cast<ExportMetalTextureInfoEXT const *>( &rhs ) )
+ {
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ExportMetalTextureInfoEXT & operator=( ExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ExportMetalTextureInfoEXT & operator=( VkExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+ {
+ image = image_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageView = imageView_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView_ ) VULKAN_HPP_NOEXCEPT
+ {
+ bufferView = bufferView_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setPlane( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ ) VULKAN_HPP_NOEXCEPT
+ {
+ plane = plane_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setMtlTexture( MTLTexture_id mtlTexture_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mtlTexture = mtlTexture_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkExportMetalTextureInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkExportMetalTextureInfoEXT *>( this );
+ }
+
+ operator VkExportMetalTextureInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkExportMetalTextureInfoEXT *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::Image const &,
+ VULKAN_HPP_NAMESPACE::ImageView const &,
+ VULKAN_HPP_NAMESPACE::BufferView const &,
+ VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &,
+ MTLTexture_id const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, image, imageView, bufferView, plane, mtlTexture );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ExportMetalTextureInfoEXT const & ) const = default;
+# else
+ bool operator==( ExportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( imageView == rhs.imageView ) && ( bufferView == rhs.bufferView ) &&
+ ( plane == rhs.plane ) && ( mtlTexture == rhs.mtlTexture );
+# endif
+ }
+
+ bool operator!=( ExportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalTextureInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Image image = {};
+ VULKAN_HPP_NAMESPACE::ImageView imageView = {};
+ VULKAN_HPP_NAMESPACE::BufferView bufferView = {};
+ VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
+ MTLTexture_id mtlTexture = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eExportMetalTextureInfoEXT>
+ {
+ using Type = ExportMetalTextureInfoEXT;
+ };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
struct ExportSemaphoreCreateInfo
{
using NativeType = VkExportSemaphoreCreateInfo;
@@ -29671,17 +30286,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo(
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
- {}
+ VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , handleTypes( handleTypes_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportSemaphoreCreateInfo( *reinterpret_cast<ExportSemaphoreCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ExportSemaphoreCreateInfo & operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -29700,30 +30317,28 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo &
- setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkExportSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExportSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportSemaphoreCreateInfo *>( this );
}
- explicit operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportSemaphoreCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -29736,7 +30351,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
@@ -29754,13 +30369,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo ) ==
- sizeof( VkExportSemaphoreCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo>::value,
- "ExportSemaphoreCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eExportSemaphoreCreateInfo>
@@ -29775,28 +30383,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkExportSemaphoreWin32HandleInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eExportSemaphoreWin32HandleInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreWin32HandleInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {},
DWORD dwAccess_ = {},
- LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT
- : pAttributes( pAttributes_ )
+ LPCWSTR name_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pAttributes( pAttributes_ )
, dwAccess( dwAccess_ )
, name( name_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ExportSemaphoreWin32HandleInfoKHR( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ExportSemaphoreWin32HandleInfoKHR( *reinterpret_cast<ExportSemaphoreWin32HandleInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ExportSemaphoreWin32HandleInfoKHR &
- operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ExportSemaphoreWin32HandleInfoKHR & operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExportSemaphoreWin32HandleInfoKHR & operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -29811,8 +30420,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR &
- setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
{
pAttributes = pAttributes_;
return *this;
@@ -29831,25 +30439,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkExportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExportSemaphoreWin32HandleInfoKHR *>( this );
}
- explicit operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExportSemaphoreWin32HandleInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- const SECURITY_ATTRIBUTES * const &,
- DWORD const &,
- LPCWSTR const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const SECURITY_ATTRIBUTES * const &, DWORD const &, LPCWSTR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -29862,11 +30466,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) &&
- ( dwAccess == rhs.dwAccess ) && ( name == rhs.name );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name );
# endif
}
@@ -29883,14 +30486,6 @@ namespace VULKAN_HPP_NAMESPACE
DWORD dwAccess = {};
LPCWSTR name = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR ) ==
- sizeof( VkExportSemaphoreWin32HandleInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR>::value,
- "ExportSemaphoreWin32HandleInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eExportSemaphoreWin32HandleInfoKHR>
@@ -29904,18 +30499,18 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkExtensionProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14
- ExtensionProperties( std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & extensionName_ = {},
- uint32_t specVersion_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & extensionName_ = {},
+ uint32_t specVersion_ = {} ) VULKAN_HPP_NOEXCEPT
: extensionName( extensionName_ )
, specVersion( specVersion_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- : ExtensionProperties( *reinterpret_cast<ExtensionProperties const *>( &rhs ) )
- {}
+ ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT : ExtensionProperties( *reinterpret_cast<ExtensionProperties const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -29926,17 +30521,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkExtensionProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExtensionProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExtensionProperties *>( this );
}
- explicit operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExtensionProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -29953,7 +30548,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( extensionName == rhs.extensionName ) && ( specVersion == rhs.specVersion );
@@ -29970,32 +30565,27 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> extensionName = {};
uint32_t specVersion = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExtensionProperties ) == sizeof( VkExtensionProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExtensionProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExtensionProperties>::value,
- "ExtensionProperties is not nothrow_move_constructible!" );
struct ExternalMemoryProperties
{
using NativeType = VkExternalMemoryProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ExternalMemoryProperties(
- VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {},
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {},
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
: externalMemoryFeatures( externalMemoryFeatures_ )
, exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
, compatibleHandleTypes( compatibleHandleTypes_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalMemoryProperties( *reinterpret_cast<ExternalMemoryProperties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ExternalMemoryProperties & operator=( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -30006,17 +30596,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkExternalMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExternalMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalMemoryProperties *>( this );
}
- explicit operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalMemoryProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -30035,11 +30625,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) &&
- ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) &&
+ return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) &&
( compatibleHandleTypes == rhs.compatibleHandleTypes );
# endif
}
@@ -30055,13 +30644,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties ) ==
- sizeof( VkExternalMemoryProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties>::value,
- "ExternalMemoryProperties is not nothrow_move_constructible!" );
using ExternalMemoryPropertiesKHR = ExternalMemoryProperties;
struct ExternalBufferProperties
@@ -30072,16 +30654,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalBufferProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ExternalBufferProperties(
- VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : externalMemoryProperties( externalMemoryProperties_ )
- {}
+ VULKAN_HPP_CONSTEXPR ExternalBufferProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , externalMemoryProperties( externalMemoryProperties_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalBufferProperties( *reinterpret_cast<ExternalBufferProperties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ExternalBufferProperties & operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -30092,23 +30677,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkExternalBufferProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExternalBufferProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalBufferProperties *>( this );
}
- explicit operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalBufferProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -30121,11 +30704,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( externalMemoryProperties == rhs.externalMemoryProperties );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryProperties == rhs.externalMemoryProperties );
# endif
}
@@ -30140,13 +30722,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalBufferProperties ) ==
- sizeof( VkExternalBufferProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalBufferProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalBufferProperties>::value,
- "ExternalBufferProperties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eExternalBufferProperties>
@@ -30163,20 +30738,23 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFenceProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ExternalFenceProperties(
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {},
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {},
- VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
- : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
+ VULKAN_HPP_CONSTEXPR ExternalFenceProperties( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
, compatibleHandleTypes( compatibleHandleTypes_ )
, externalFenceFeatures( externalFenceFeatures_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalFenceProperties( *reinterpret_cast<ExternalFenceProperties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ExternalFenceProperties & operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -30187,17 +30765,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkExternalFenceProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExternalFenceProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalFenceProperties *>( this );
}
- explicit operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalFenceProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -30218,13 +30796,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) &&
- ( compatibleHandleTypes == rhs.compatibleHandleTypes ) &&
- ( externalFenceFeatures == rhs.externalFenceFeatures );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) &&
+ ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && ( externalFenceFeatures == rhs.externalFenceFeatures );
# endif
}
@@ -30235,19 +30811,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {};
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {};
VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalFenceProperties ) ==
- sizeof( VkExternalFenceProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalFenceProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalFenceProperties>::value,
- "ExternalFenceProperties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eExternalFenceProperties>
@@ -30265,15 +30834,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatANDROID;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = {} ) VULKAN_HPP_NOEXCEPT
- : externalFormat( externalFormat_ )
- {}
+ VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , externalFormat( externalFormat_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalFormatANDROID( *reinterpret_cast<ExternalFormatANDROID const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ExternalFormatANDROID & operator=( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -30298,17 +30870,17 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkExternalFormatANDROID const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExternalFormatANDROID const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalFormatANDROID *>( this );
}
- explicit operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT
+ operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalFormatANDROID *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -30325,7 +30897,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormat == rhs.externalFormat );
@@ -30343,12 +30915,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
uint64_t externalFormat = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID>::value,
- "ExternalFormatANDROID is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eExternalFormatANDROID>
@@ -30365,21 +30931,22 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalImageFormatProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties(
- VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : externalMemoryProperties( externalMemoryProperties_ )
- {}
+ VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , externalMemoryProperties( externalMemoryProperties_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ExternalImageFormatProperties( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalImageFormatProperties( *reinterpret_cast<ExternalImageFormatProperties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ExternalImageFormatProperties &
- operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ExternalImageFormatProperties & operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalImageFormatProperties & operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -30387,23 +30954,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkExternalImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExternalImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalImageFormatProperties *>( this );
}
- explicit operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalImageFormatProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -30416,11 +30981,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( externalMemoryProperties == rhs.externalMemoryProperties );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryProperties == rhs.externalMemoryProperties );
# endif
}
@@ -30431,18 +30995,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties ) ==
- sizeof( VkExternalImageFormatProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties>::value,
- "ExternalImageFormatProperties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eExternalImageFormatProperties>
@@ -30456,24 +31012,25 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkImageFormatProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImageFormatProperties( VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {},
- uint32_t maxMipLevels_ = {},
- uint32_t maxArrayLayers_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImageFormatProperties( VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {},
+ uint32_t maxMipLevels_ = {},
+ uint32_t maxArrayLayers_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT
: maxExtent( maxExtent_ )
, maxMipLevels( maxMipLevels_ )
, maxArrayLayers( maxArrayLayers_ )
, sampleCounts( sampleCounts_ )
, maxResourceSize( maxResourceSize_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ImageFormatProperties( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageFormatProperties( *reinterpret_cast<ImageFormatProperties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageFormatProperties & operator=( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -30484,17 +31041,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageFormatProperties *>( this );
}
- explicit operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageFormatProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -30515,12 +31072,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( maxExtent == rhs.maxExtent ) && ( maxMipLevels == rhs.maxMipLevels ) &&
- ( maxArrayLayers == rhs.maxArrayLayers ) && ( sampleCounts == rhs.sampleCounts ) &&
- ( maxResourceSize == rhs.maxResourceSize );
+ return ( maxExtent == rhs.maxExtent ) && ( maxMipLevels == rhs.maxMipLevels ) && ( maxArrayLayers == rhs.maxArrayLayers ) &&
+ ( sampleCounts == rhs.sampleCounts ) && ( maxResourceSize == rhs.maxResourceSize );
# endif
}
@@ -30537,39 +31093,33 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {};
VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatProperties ) == sizeof( VkImageFormatProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::value,
- "ImageFormatProperties is not nothrow_move_constructible!" );
struct ExternalImageFormatPropertiesNV
{
using NativeType = VkExternalImageFormatPropertiesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV(
- VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {},
- VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {},
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {},
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR
+ ExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
: imageFormatProperties( imageFormatProperties_ )
, externalMemoryFeatures( externalMemoryFeatures_ )
, exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
, compatibleHandleTypes( compatibleHandleTypes_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ExternalImageFormatPropertiesNV( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalImageFormatPropertiesNV( *reinterpret_cast<ExternalImageFormatPropertiesNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ExternalImageFormatPropertiesNV &
- operator=( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ExternalImageFormatPropertiesNV & operator=( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalImageFormatPropertiesNV & operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -30577,17 +31127,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkExternalImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExternalImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalImageFormatPropertiesNV *>( this );
}
- explicit operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalImageFormatPropertiesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -30598,8 +31148,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- imageFormatProperties, externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes );
+ return std::tie( imageFormatProperties, externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes );
}
#endif
@@ -30608,13 +31157,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( imageFormatProperties == rhs.imageFormatProperties ) &&
- ( externalMemoryFeatures == rhs.externalMemoryFeatures ) &&
- ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) &&
- ( compatibleHandleTypes == rhs.compatibleHandleTypes );
+ return ( imageFormatProperties == rhs.imageFormatProperties ) && ( externalMemoryFeatures == rhs.externalMemoryFeatures ) &&
+ ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
# endif
}
@@ -30630,14 +31177,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV ) ==
- sizeof( VkExternalImageFormatPropertiesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::value,
- "ExternalImageFormatPropertiesNV is not nothrow_move_constructible!" );
struct ExternalMemoryBufferCreateInfo
{
@@ -30647,21 +31186,22 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryBufferCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo(
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
- {}
+ VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , handleTypes( handleTypes_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalMemoryBufferCreateInfo( *reinterpret_cast<ExternalMemoryBufferCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ExternalMemoryBufferCreateInfo &
- operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ExternalMemoryBufferCreateInfo & operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalMemoryBufferCreateInfo & operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -30677,30 +31217,28 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo &
- setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkExternalMemoryBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExternalMemoryBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalMemoryBufferCreateInfo *>( this );
}
- explicit operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalMemoryBufferCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -30713,7 +31251,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
@@ -30731,14 +31269,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo ) ==
- sizeof( VkExternalMemoryBufferCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo>::value,
- "ExternalMemoryBufferCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eExternalMemoryBufferCreateInfo>
@@ -30755,21 +31285,22 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo(
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
- {}
+ VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , handleTypes( handleTypes_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalMemoryImageCreateInfo( *reinterpret_cast<ExternalMemoryImageCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ExternalMemoryImageCreateInfo &
- operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ExternalMemoryImageCreateInfo & operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalMemoryImageCreateInfo & operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -30785,30 +31316,28 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo &
- setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkExternalMemoryImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExternalMemoryImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalMemoryImageCreateInfo *>( this );
}
- explicit operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalMemoryImageCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -30821,7 +31350,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
@@ -30839,14 +31368,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo ) ==
- sizeof( VkExternalMemoryImageCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo>::value,
- "ExternalMemoryImageCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eExternalMemoryImageCreateInfo>
@@ -30863,21 +31384,22 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV(
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
- {}
+ VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , handleTypes( handleTypes_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ExternalMemoryImageCreateInfoNV( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalMemoryImageCreateInfoNV( *reinterpret_cast<ExternalMemoryImageCreateInfoNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ExternalMemoryImageCreateInfoNV &
- operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ExternalMemoryImageCreateInfoNV & operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalMemoryImageCreateInfoNV & operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -30893,30 +31415,28 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV &
- setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkExternalMemoryImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExternalMemoryImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV *>( this );
}
- explicit operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalMemoryImageCreateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -30929,7 +31449,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
@@ -30947,14 +31467,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV ) ==
- sizeof( VkExternalMemoryImageCreateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV>::value,
- "ExternalMemoryImageCreateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eExternalMemoryImageCreateInfoNV>
@@ -30970,21 +31482,23 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalSemaphoreProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties(
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {},
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {},
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
- : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
+ VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
, compatibleHandleTypes( compatibleHandleTypes_ )
, externalSemaphoreFeatures( externalSemaphoreFeatures_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ExternalSemaphoreProperties( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: ExternalSemaphoreProperties( *reinterpret_cast<ExternalSemaphoreProperties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ExternalSemaphoreProperties & operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -30995,17 +31509,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkExternalSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkExternalSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkExternalSemaphoreProperties *>( this );
}
- explicit operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkExternalSemaphoreProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -31026,13 +31540,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) &&
- ( compatibleHandleTypes == rhs.compatibleHandleTypes ) &&
- ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) &&
+ ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures );
# endif
}
@@ -31043,20 +31555,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {};
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {};
VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties ) ==
- sizeof( VkExternalSemaphoreProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties>::value,
- "ExternalSemaphoreProperties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eExternalSemaphoreProperties>
@@ -31073,15 +31577,15 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR FenceCreateInfo( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- {}
+ VULKAN_HPP_CONSTEXPR FenceCreateInfo( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : FenceCreateInfo( *reinterpret_cast<FenceCreateInfo const *>( &rhs ) )
- {}
+ FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : FenceCreateInfo( *reinterpret_cast<FenceCreateInfo const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -31099,31 +31603,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFenceCreateInfo *>( this );
}
- explicit operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFenceCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::FenceCreateFlags const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::FenceCreateFlags const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -31136,7 +31637,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
@@ -31154,12 +31655,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceCreateInfo ) == sizeof( VkFenceCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FenceCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FenceCreateInfo>::value,
- "FenceCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eFenceCreateInfo>
@@ -31176,18 +31671,18 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR
- FenceGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {},
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : fence( fence_ )
+ FenceGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , fence( fence_ )
, handleType( handleType_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : FenceGetFdInfoKHR( *reinterpret_cast<FenceGetFdInfoKHR const *>( &rhs ) )
- {}
+ FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : FenceGetFdInfoKHR( *reinterpret_cast<FenceGetFdInfoKHR const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
FenceGetFdInfoKHR & operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -31211,25 +31706,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkFenceGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkFenceGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFenceGetFdInfoKHR *>( this );
}
- explicit operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFenceGetFdInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -31249,11 +31743,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) &&
- ( handleType == rhs.handleType );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( handleType == rhs.handleType );
# endif
}
@@ -31264,18 +31757,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::Fence fence = {};
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Fence fence = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR>::value,
- "FenceGetFdInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eFenceGetFdInfoKHR>
@@ -31293,19 +31779,21 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR(
- VULKAN_HPP_NAMESPACE::Fence fence_ = {},
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : fence( fence_ )
+ VULKAN_HPP_NAMESPACE::Fence fence_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , fence( fence_ )
, handleType( handleType_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- FenceGetWin32HandleInfoKHR( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: FenceGetWin32HandleInfoKHR( *reinterpret_cast<FenceGetWin32HandleInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
FenceGetWin32HandleInfoKHR & operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -31323,32 +31811,30 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR &
- setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
{
fence = fence_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkFenceGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkFenceGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( this );
}
- explicit operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFenceGetWin32HandleInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -31368,11 +31854,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) &&
- ( handleType == rhs.handleType );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( handleType == rhs.handleType );
# endif
}
@@ -31383,19 +31868,11 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::Fence fence = {};
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Fence fence = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR ) ==
- sizeof( VkFenceGetWin32HandleInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR>::value,
- "FenceGetWin32HandleInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eFenceGetWin32HandleInfoKHR>
@@ -31409,55 +31886,49 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkFilterCubicImageViewImageFormatPropertiesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {} ) VULKAN_HPP_NOEXCEPT
- : filterCubic( filterCubic_ )
+ VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , filterCubic( filterCubic_ )
, filterCubicMinmax( filterCubicMinmax_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT(
- FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : FilterCubicImageViewImageFormatPropertiesEXT(
- *reinterpret_cast<FilterCubicImageViewImageFormatPropertiesEXT const *>( &rhs ) )
- {}
+ FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : FilterCubicImageViewImageFormatPropertiesEXT( *reinterpret_cast<FilterCubicImageViewImageFormatPropertiesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- FilterCubicImageViewImageFormatPropertiesEXT &
- operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ FilterCubicImageViewImageFormatPropertiesEXT & operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- FilterCubicImageViewImageFormatPropertiesEXT &
- operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ FilterCubicImageViewImageFormatPropertiesEXT & operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const *>( &rhs );
return *this;
}
- explicit operator VkFilterCubicImageViewImageFormatPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkFilterCubicImageViewImageFormatPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFilterCubicImageViewImageFormatPropertiesEXT *>( this );
}
- explicit operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFilterCubicImageViewImageFormatPropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -31470,11 +31941,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCubic == rhs.filterCubic ) &&
- ( filterCubicMinmax == rhs.filterCubicMinmax );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCubic == rhs.filterCubic ) && ( filterCubicMinmax == rhs.filterCubicMinmax );
# endif
}
@@ -31485,20 +31955,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {};
VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT ) ==
- sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT>::value,
- "FilterCubicImageViewImageFormatPropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eFilterCubicImageViewImageFormatPropertiesEXT>
@@ -31511,20 +31972,18 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkFormatProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- FormatProperties( VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {},
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {},
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR FormatProperties( VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {},
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {},
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
: linearTilingFeatures( linearTilingFeatures_ )
, optimalTilingFeatures( optimalTilingFeatures_ )
, bufferFeatures( bufferFeatures_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR FormatProperties( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- : FormatProperties( *reinterpret_cast<FormatProperties const *>( &rhs ) )
- {}
+ FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties( *reinterpret_cast<FormatProperties const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
FormatProperties & operator=( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -31535,17 +31994,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkFormatProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkFormatProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFormatProperties *>( this );
}
- explicit operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFormatProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -31564,11 +32023,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( linearTilingFeatures == rhs.linearTilingFeatures ) &&
- ( optimalTilingFeatures == rhs.optimalTilingFeatures ) && ( bufferFeatures == rhs.bufferFeatures );
+ return ( linearTilingFeatures == rhs.linearTilingFeatures ) && ( optimalTilingFeatures == rhs.optimalTilingFeatures ) &&
+ ( bufferFeatures == rhs.bufferFeatures );
# endif
}
@@ -31583,12 +32042,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties ) == sizeof( VkFormatProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FormatProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FormatProperties>::value,
- "FormatProperties is not nothrow_move_constructible!" );
struct FormatProperties2
{
@@ -31598,16 +32051,15 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- FormatProperties2( VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : formatProperties( formatProperties_ )
- {}
+ VULKAN_HPP_CONSTEXPR FormatProperties2( VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , formatProperties( formatProperties_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : FormatProperties2( *reinterpret_cast<FormatProperties2 const *>( &rhs ) )
- {}
+ FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties2( *reinterpret_cast<FormatProperties2 const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
FormatProperties2 & operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -31618,22 +32070,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFormatProperties2 *>( this );
}
- explicit operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT
+ operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFormatProperties2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::
- tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::FormatProperties const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::FormatProperties const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -31646,7 +32097,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatProperties == rhs.formatProperties );
@@ -31664,12 +32115,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties2 ) == sizeof( VkFormatProperties2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FormatProperties2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FormatProperties2>::value,
- "FormatProperties2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eFormatProperties2>
@@ -31686,20 +32131,20 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties3;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- FormatProperties3( VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures_ = {},
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures_ = {},
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
- : linearTilingFeatures( linearTilingFeatures_ )
+ VULKAN_HPP_CONSTEXPR FormatProperties3( VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures_ = {},
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures_ = {},
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , linearTilingFeatures( linearTilingFeatures_ )
, optimalTilingFeatures( optimalTilingFeatures_ )
, bufferFeatures( bufferFeatures_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR FormatProperties3( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- FormatProperties3( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT
- : FormatProperties3( *reinterpret_cast<FormatProperties3 const *>( &rhs ) )
- {}
+ FormatProperties3( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties3( *reinterpret_cast<FormatProperties3 const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
FormatProperties3 & operator=( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -31710,17 +32155,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkFormatProperties3 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkFormatProperties3 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFormatProperties3 *>( this );
}
- explicit operator VkFormatProperties3 &() VULKAN_HPP_NOEXCEPT
+ operator VkFormatProperties3 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFormatProperties3 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -31741,7 +32186,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( linearTilingFeatures == rhs.linearTilingFeatures ) &&
@@ -31762,12 +32207,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties3 ) == sizeof( VkFormatProperties3 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FormatProperties3>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FormatProperties3>::value,
- "FormatProperties3 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eFormatProperties3>
@@ -31781,30 +32220,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkFragmentShadingRateAttachmentInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eFragmentShadingRateAttachmentInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFragmentShadingRateAttachmentInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR(
- const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : pFragmentShadingRateAttachment( pFragmentShadingRateAttachment_ )
+ VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pFragmentShadingRateAttachment( pFragmentShadingRateAttachment_ )
, shadingRateAttachmentTexelSize( shadingRateAttachmentTexelSize_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( FragmentShadingRateAttachmentInfoKHR const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FragmentShadingRateAttachmentInfoKHR( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: FragmentShadingRateAttachmentInfoKHR( *reinterpret_cast<FragmentShadingRateAttachmentInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- FragmentShadingRateAttachmentInfoKHR &
- operator=( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ FragmentShadingRateAttachmentInfoKHR & operator=( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- FragmentShadingRateAttachmentInfoKHR &
- operator=( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ FragmentShadingRateAttachmentInfoKHR & operator=( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR const *>( &rhs );
return *this;
@@ -31817,32 +32255,32 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setPFragmentShadingRateAttachment(
- const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR &
+ setPFragmentShadingRateAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ ) VULKAN_HPP_NOEXCEPT
{
pFragmentShadingRateAttachment = pFragmentShadingRateAttachment_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setShadingRateAttachmentTexelSize(
- VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR &
+ setShadingRateAttachmentTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT
{
shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFragmentShadingRateAttachmentInfoKHR *>( this );
}
- explicit operator VkFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFragmentShadingRateAttachmentInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -31862,11 +32300,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( pFragmentShadingRateAttachment == rhs.pFragmentShadingRateAttachment ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pFragmentShadingRateAttachment == rhs.pFragmentShadingRateAttachment ) &&
( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize );
# endif
}
@@ -31878,19 +32315,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFragmentShadingRateAttachmentInfoKHR;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFragmentShadingRateAttachmentInfoKHR;
+ const void * pNext = {};
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment = {};
VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR ) ==
- sizeof( VkFragmentShadingRateAttachmentInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR>::value,
- "FragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eFragmentShadingRateAttachmentInfoKHR>
@@ -31906,51 +32335,54 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentImageInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
- uint32_t width_ = {},
- uint32_t height_ = {},
- uint32_t layerCount_ = {},
- uint32_t viewFormatCount_ = {},
- const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
+ uint32_t width_ = {},
+ uint32_t height_ = {},
+ uint32_t layerCount_ = {},
+ uint32_t viewFormatCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, usage( usage_ )
, width( width_ )
, height( height_ )
, layerCount( layerCount_ )
, viewFormatCount( viewFormatCount_ )
, pViewFormats( pViewFormats_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- FramebufferAttachmentImageInfo( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: FramebufferAttachmentImageInfo( *reinterpret_cast<FramebufferAttachmentImageInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- FramebufferAttachmentImageInfo(
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_,
- uint32_t width_,
- uint32_t height_,
- uint32_t layerCount_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ )
- : flags( flags_ )
+ FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_,
+ uint32_t width_,
+ uint32_t height_,
+ uint32_t layerCount_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, usage( usage_ )
, width( width_ )
, height( height_ )
, layerCount( layerCount_ )
, viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) )
, pViewFormats( viewFormats_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- FramebufferAttachmentImageInfo &
- operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ FramebufferAttachmentImageInfo & operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FramebufferAttachmentImageInfo & operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -31965,15 +32397,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo &
- setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo &
- setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
@@ -31997,24 +32427,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo &
- setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
{
viewFormatCount = viewFormatCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo &
- setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT
{
pViewFormats = pViewFormats_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- FramebufferAttachmentImageInfo & setViewFormats(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ )
- VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentImageInfo &
+ setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ ) VULKAN_HPP_NOEXCEPT
{
viewFormatCount = static_cast<uint32_t>( viewFormats_.size() );
pViewFormats = viewFormats_.data();
@@ -32023,17 +32450,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkFramebufferAttachmentImageInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkFramebufferAttachmentImageInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFramebufferAttachmentImageInfo *>( this );
}
- explicit operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFramebufferAttachmentImageInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -32058,12 +32485,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) &&
- ( width == rhs.width ) && ( height == rhs.height ) && ( layerCount == rhs.layerCount ) &&
- ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && ( width == rhs.width ) &&
+ ( height == rhs.height ) && ( layerCount == rhs.layerCount ) && ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats );
# endif
}
@@ -32084,14 +32510,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t viewFormatCount = {};
const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo ) ==
- sizeof( VkFramebufferAttachmentImageInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo>::value,
- "FramebufferAttachmentImageInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eFramebufferAttachmentImageInfo>
@@ -32105,35 +32523,38 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkFramebufferAttachmentsCreateInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo(
- uint32_t attachmentImageInfoCount_ = {},
- const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ = {} ) VULKAN_HPP_NOEXCEPT
- : attachmentImageInfoCount( attachmentImageInfoCount_ )
+ VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( uint32_t attachmentImageInfoCount_ = {},
+ const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , attachmentImageInfoCount( attachmentImageInfoCount_ )
, pAttachmentImageInfos( pAttachmentImageInfos_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- FramebufferAttachmentsCreateInfo( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: FramebufferAttachmentsCreateInfo( *reinterpret_cast<FramebufferAttachmentsCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
FramebufferAttachmentsCreateInfo(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo> const &
- attachmentImageInfos_ )
- : attachmentImageInfoCount( static_cast<uint32_t>( attachmentImageInfos_.size() ) )
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo> const & attachmentImageInfos_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , attachmentImageInfoCount( static_cast<uint32_t>( attachmentImageInfos_.size() ) )
, pAttachmentImageInfos( attachmentImageInfos_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- FramebufferAttachmentsCreateInfo &
- operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ FramebufferAttachmentsCreateInfo & operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FramebufferAttachmentsCreateInfo & operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -32148,15 +32569,14 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo &
- setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT
{
attachmentImageInfoCount = attachmentImageInfoCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setPAttachmentImageInfos(
- const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo &
+ setPAttachmentImageInfos( const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
{
pAttachmentImageInfos = pAttachmentImageInfos_;
return *this;
@@ -32164,8 +32584,8 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
FramebufferAttachmentsCreateInfo & setAttachmentImageInfos(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo> const &
- attachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo> const & attachmentImageInfos_ )
+ VULKAN_HPP_NOEXCEPT
{
attachmentImageInfoCount = static_cast<uint32_t>( attachmentImageInfos_.size() );
pAttachmentImageInfos = attachmentImageInfos_.data();
@@ -32174,17 +32594,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkFramebufferAttachmentsCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkFramebufferAttachmentsCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFramebufferAttachmentsCreateInfo *>( this );
}
- explicit operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFramebufferAttachmentsCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -32204,11 +32624,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( attachmentImageInfoCount == rhs.attachmentImageInfoCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentImageInfoCount == rhs.attachmentImageInfoCount ) &&
( pAttachmentImageInfos == rhs.pAttachmentImageInfos );
# endif
}
@@ -32220,19 +32639,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo;
- const void * pNext = {};
- uint32_t attachmentImageInfoCount = {};
- const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo;
+ const void * pNext = {};
+ uint32_t attachmentImageInfoCount = {};
+ const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo ) ==
- sizeof( VkFramebufferAttachmentsCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo>::value,
- "FramebufferAttachmentsCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eFramebufferAttachmentsCreateInfo>
@@ -32255,38 +32666,44 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {},
uint32_t width_ = {},
uint32_t height_ = {},
- uint32_t layers_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ uint32_t layers_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, renderPass( renderPass_ )
, attachmentCount( attachmentCount_ )
, pAttachments( pAttachments_ )
, width( width_ )
, height( height_ )
, layers( layers_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: FramebufferCreateInfo( *reinterpret_cast<FramebufferCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- FramebufferCreateInfo(
- VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_,
- VULKAN_HPP_NAMESPACE::RenderPass renderPass_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_,
- uint32_t width_ = {},
- uint32_t height_ = {},
- uint32_t layers_ = {} )
- : flags( flags_ )
+ FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_,
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_,
+ uint32_t width_ = {},
+ uint32_t height_ = {},
+ uint32_t layers_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, renderPass( renderPass_ )
, attachmentCount( static_cast<uint32_t>( attachments_.size() ) )
, pAttachments( attachments_.data() )
, width( width_ )
, height( height_ )
, layers( layers_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -32305,15 +32722,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo &
- setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
{
renderPass = renderPass_;
return *this;
@@ -32325,17 +32740,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo &
- setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pAttachments = pAttachments_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- FramebufferCreateInfo & setAttachments(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ )
- VULKAN_HPP_NOEXCEPT
+ FramebufferCreateInfo &
+ setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = static_cast<uint32_t>( attachments_.size() );
pAttachments = attachments_.data();
@@ -32362,17 +32775,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkFramebufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkFramebufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFramebufferCreateInfo *>( this );
}
- explicit operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFramebufferCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -32397,12 +32810,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( renderPass == rhs.renderPass ) && ( attachmentCount == rhs.attachmentCount ) &&
- ( pAttachments == rhs.pAttachments ) && ( width == rhs.width ) && ( height == rhs.height ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( renderPass == rhs.renderPass ) &&
+ ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && ( width == rhs.width ) && ( height == rhs.height ) &&
( layers == rhs.layers );
# endif
}
@@ -32424,12 +32836,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t height = {};
uint32_t layers = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo>::value,
- "FramebufferCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eFramebufferCreateInfo>
@@ -32442,51 +32848,50 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkFramebufferMixedSamplesCombinationNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eFramebufferMixedSamplesCombinationNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferMixedSamplesCombinationNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV(
- VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ =
- VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge,
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
- VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {} ) VULKAN_HPP_NOEXCEPT
- : coverageReductionMode( coverageReductionMode_ )
+ VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge,
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , coverageReductionMode( coverageReductionMode_ )
, rasterizationSamples( rasterizationSamples_ )
, depthStencilSamples( depthStencilSamples_ )
, colorSamples( colorSamples_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( FramebufferMixedSamplesCombinationNV const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
: FramebufferMixedSamplesCombinationNV( *reinterpret_cast<FramebufferMixedSamplesCombinationNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- FramebufferMixedSamplesCombinationNV &
- operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ FramebufferMixedSamplesCombinationNV & operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- FramebufferMixedSamplesCombinationNV &
- operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ FramebufferMixedSamplesCombinationNV & operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const *>( &rhs );
return *this;
}
- explicit operator VkFramebufferMixedSamplesCombinationNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkFramebufferMixedSamplesCombinationNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkFramebufferMixedSamplesCombinationNV *>( this );
}
- explicit operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT
+ operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -32508,13 +32913,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( coverageReductionMode == rhs.coverageReductionMode ) &&
- ( rasterizationSamples == rhs.rasterizationSamples ) &&
- ( depthStencilSamples == rhs.depthStencilSamples ) && ( colorSamples == rhs.colorSamples );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( coverageReductionMode == rhs.coverageReductionMode ) &&
+ ( rasterizationSamples == rhs.rasterizationSamples ) && ( depthStencilSamples == rhs.depthStencilSamples ) && ( colorSamples == rhs.colorSamples );
# endif
}
@@ -32525,22 +32928,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode =
- VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
- VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {};
- VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+ VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV ) ==
- sizeof( VkFramebufferMixedSamplesCombinationNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>::value,
- "FramebufferMixedSamplesCombinationNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eFramebufferMixedSamplesCombinationNV>
@@ -32557,13 +32951,15 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT
: buffer( buffer_ )
, offset( offset_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
: IndirectCommandsStreamNV( *reinterpret_cast<IndirectCommandsStreamNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
IndirectCommandsStreamNV & operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -32575,32 +32971,30 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV &
- setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV &
- setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkIndirectCommandsStreamNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkIndirectCommandsStreamNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkIndirectCommandsStreamNV *>( this );
}
- explicit operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT
+ operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkIndirectCommandsStreamNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -32617,7 +33011,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( buffer == rhs.buffer ) && ( offset == rhs.offset );
@@ -32634,13 +33028,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV ) ==
- sizeof( VkIndirectCommandsStreamNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV>::value,
- "IndirectCommandsStreamNV is not nothrow_move_constructible!" );
struct GeneratedCommandsInfoNV
{
@@ -32650,21 +33037,23 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV(
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
- VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {},
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {},
- uint32_t streamCount_ = {},
- const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ = {},
- uint32_t sequencesCount_ = {},
- VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {},
- VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {},
- VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} ) VULKAN_HPP_NOEXCEPT
- : pipelineBindPoint( pipelineBindPoint_ )
+ VULKAN_HPP_CONSTEXPR
+ GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {},
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {},
+ uint32_t streamCount_ = {},
+ const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ = {},
+ uint32_t sequencesCount_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pipelineBindPoint( pipelineBindPoint_ )
, pipeline( pipeline_ )
, indirectCommandsLayout( indirectCommandsLayout_ )
, streamCount( streamCount_ )
@@ -32677,30 +33066,32 @@ namespace VULKAN_HPP_NAMESPACE
, sequencesCountOffset( sequencesCountOffset_ )
, sequencesIndexBuffer( sequencesIndexBuffer_ )
, sequencesIndexOffset( sequencesIndexOffset_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: GeneratedCommandsInfoNV( *reinterpret_cast<GeneratedCommandsInfoNV const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- GeneratedCommandsInfoNV(
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_,
- VULKAN_HPP_NAMESPACE::Pipeline pipeline_,
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const &
- streams_,
- uint32_t sequencesCount_ = {},
- VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {},
- VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {},
- VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} )
- : pipelineBindPoint( pipelineBindPoint_ )
+ GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_,
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline_,
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_,
+ uint32_t sequencesCount_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , pipelineBindPoint( pipelineBindPoint_ )
, pipeline( pipeline_ )
, indirectCommandsLayout( indirectCommandsLayout_ )
, streamCount( static_cast<uint32_t>( streams_.size() ) )
@@ -32713,7 +33104,8 @@ namespace VULKAN_HPP_NAMESPACE
, sequencesCountOffset( sequencesCountOffset_ )
, sequencesIndexBuffer( sequencesIndexBuffer_ )
, sequencesIndexOffset( sequencesIndexOffset_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -32732,22 +33124,20 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &
- setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBindPoint = pipelineBindPoint_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &
- setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
{
pipeline = pipeline_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setIndirectCommandsLayout(
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &
+ setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
{
indirectCommandsLayout = indirectCommandsLayout_;
return *this;
@@ -32759,17 +33149,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &
- setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ ) VULKAN_HPP_NOEXCEPT
{
pStreams = pStreams_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- GeneratedCommandsInfoNV & setStreams(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const &
- streams_ ) VULKAN_HPP_NOEXCEPT
+ GeneratedCommandsInfoNV &
+ setStreams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_ ) VULKAN_HPP_NOEXCEPT
{
streamCount = static_cast<uint32_t>( streams_.size() );
pStreams = streams_.data();
@@ -32783,67 +33171,60 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &
- setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT
{
preprocessBuffer = preprocessBuffer_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &
- setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT
{
preprocessOffset = preprocessOffset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &
- setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT
{
preprocessSize = preprocessSize_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &
- setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT
{
sequencesCountBuffer = sequencesCountBuffer_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &
- setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT
{
sequencesCountOffset = sequencesCountOffset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &
- setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
{
sequencesIndexBuffer = sequencesIndexBuffer_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &
- setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT
{
sequencesIndexOffset = sequencesIndexOffset_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkGeneratedCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkGeneratedCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGeneratedCommandsInfoNV *>( this );
}
- explicit operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGeneratedCommandsInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -32888,17 +33269,14 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) &&
- ( pipeline == rhs.pipeline ) && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) &&
- ( streamCount == rhs.streamCount ) && ( pStreams == rhs.pStreams ) &&
- ( sequencesCount == rhs.sequencesCount ) && ( preprocessBuffer == rhs.preprocessBuffer ) &&
- ( preprocessOffset == rhs.preprocessOffset ) && ( preprocessSize == rhs.preprocessSize ) &&
- ( sequencesCountBuffer == rhs.sequencesCountBuffer ) &&
- ( sequencesCountOffset == rhs.sequencesCountOffset ) &&
- ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipeline == rhs.pipeline ) &&
+ ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( streamCount == rhs.streamCount ) && ( pStreams == rhs.pStreams ) &&
+ ( sequencesCount == rhs.sequencesCount ) && ( preprocessBuffer == rhs.preprocessBuffer ) && ( preprocessOffset == rhs.preprocessOffset ) &&
+ ( preprocessSize == rhs.preprocessSize ) && ( sequencesCountBuffer == rhs.sequencesCountBuffer ) &&
+ ( sequencesCountOffset == rhs.sequencesCountOffset ) && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) &&
( sequencesIndexOffset == rhs.sequencesIndexOffset );
# endif
}
@@ -32910,10 +33288,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
- VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {};
uint32_t streamCount = {};
const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams = {};
@@ -32926,13 +33304,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {};
VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV ) ==
- sizeof( VkGeneratedCommandsInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV>::value,
- "GeneratedCommandsInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eGeneratedCommandsInfoNV>
@@ -32945,89 +33316,84 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkGeneratedCommandsMemoryRequirementsInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eGeneratedCommandsMemoryRequirementsInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV(
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
- VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {},
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {},
- uint32_t maxSequencesCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : pipelineBindPoint( pipelineBindPoint_ )
+ uint32_t maxSequencesCount_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pipelineBindPoint( pipelineBindPoint_ )
, pipeline( pipeline_ )
, indirectCommandsLayout( indirectCommandsLayout_ )
, maxSequencesCount( maxSequencesCount_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV(
- GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : GeneratedCommandsMemoryRequirementsInfoNV(
- *reinterpret_cast<GeneratedCommandsMemoryRequirementsInfoNV const *>( &rhs ) )
- {}
+ GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : GeneratedCommandsMemoryRequirementsInfoNV( *reinterpret_cast<GeneratedCommandsMemoryRequirementsInfoNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- GeneratedCommandsMemoryRequirementsInfoNV &
- operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ GeneratedCommandsMemoryRequirementsInfoNV & operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- GeneratedCommandsMemoryRequirementsInfoNV &
- operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ GeneratedCommandsMemoryRequirementsInfoNV & operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV &
- setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+ setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBindPoint = pipelineBindPoint_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV &
- setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
{
pipeline = pipeline_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setIndirectCommandsLayout(
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV &
+ setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
{
indirectCommandsLayout = indirectCommandsLayout_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV &
- setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT
{
maxSequencesCount = maxSequencesCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkGeneratedCommandsMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkGeneratedCommandsMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( this );
}
- explicit operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGeneratedCommandsMemoryRequirementsInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -33049,12 +33415,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) &&
- ( pipeline == rhs.pipeline ) && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) &&
- ( maxSequencesCount == rhs.maxSequencesCount );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipeline == rhs.pipeline ) &&
+ ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( maxSequencesCount == rhs.maxSequencesCount );
# endif
}
@@ -33065,22 +33430,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
- VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {};
uint32_t maxSequencesCount = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV ) ==
- sizeof( VkGeneratedCommandsMemoryRequirementsInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV>::value,
- "GeneratedCommandsMemoryRequirementsInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eGeneratedCommandsMemoryRequirementsInfoNV>
@@ -33094,25 +33450,24 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR
- VertexInputBindingDescription( uint32_t binding_ = {},
- uint32_t stride_ = {},
- VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ =
- VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT
+ VertexInputBindingDescription( uint32_t binding_ = {},
+ uint32_t stride_ = {},
+ VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT
: binding( binding_ )
, stride( stride_ )
, inputRate( inputRate_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VertexInputBindingDescription( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
: VertexInputBindingDescription( *reinterpret_cast<VertexInputBindingDescription const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VertexInputBindingDescription &
- operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VertexInputBindingDescription & operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VertexInputBindingDescription & operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -33133,25 +33488,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription &
- setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
{
inputRate = inputRate_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVertexInputBindingDescription const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVertexInputBindingDescription const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVertexInputBindingDescription *>( this );
}
- explicit operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT
+ operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVertexInputBindingDescription *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -33168,7 +33522,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( binding == rhs.binding ) && ( stride == rhs.stride ) && ( inputRate == rhs.inputRate );
@@ -33186,41 +33540,32 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t stride = {};
VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription ) ==
- sizeof( VkVertexInputBindingDescription ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription>::value,
- "VertexInputBindingDescription is not nothrow_move_constructible!" );
struct VertexInputAttributeDescription
{
using NativeType = VkVertexInputAttributeDescription;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- VertexInputAttributeDescription( uint32_t location_ = {},
- uint32_t binding_ = {},
- VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( uint32_t location_ = {},
+ uint32_t binding_ = {},
+ VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT
: location( location_ )
, binding( binding_ )
, format( format_ )
, offset( offset_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VertexInputAttributeDescription( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
: VertexInputAttributeDescription( *reinterpret_cast<VertexInputAttributeDescription const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VertexInputAttributeDescription &
- operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VertexInputAttributeDescription & operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VertexInputAttributeDescription & operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -33241,8 +33586,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription &
- setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
@@ -33255,17 +33599,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVertexInputAttributeDescription const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVertexInputAttributeDescription const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVertexInputAttributeDescription *>( this );
}
- explicit operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT
+ operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVertexInputAttributeDescription *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -33282,11 +33626,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) &&
- ( offset == rhs.offset );
+ return ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) && ( offset == rhs.offset );
# endif
}
@@ -33302,66 +33645,57 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
uint32_t offset = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription ) ==
- sizeof( VkVertexInputAttributeDescription ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription>::value,
- "VertexInputAttributeDescription is not nothrow_move_constructible!" );
struct PipelineVertexInputStateCreateInfo
{
using NativeType = VkPipelineVertexInputStateCreateInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineVertexInputStateCreateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputStateCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo(
- VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {},
- uint32_t vertexBindingDescriptionCount_ = {},
- const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ = {},
- uint32_t vertexAttributeDescriptionCount_ = {},
- const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ = {} )
- VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {},
+ uint32_t vertexBindingDescriptionCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ = {},
+ uint32_t vertexAttributeDescriptionCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, vertexBindingDescriptionCount( vertexBindingDescriptionCount_ )
, pVertexBindingDescriptions( pVertexBindingDescriptions_ )
, vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ )
, pVertexAttributeDescriptions( pVertexAttributeDescriptions_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineVertexInputStateCreateInfo( *reinterpret_cast<PipelineVertexInputStateCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineVertexInputStateCreateInfo(
- VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription> const &
- vertexBindingDescriptions_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription> const &
- vertexAttributeDescriptions_ = {} )
- : flags( flags_ )
+ VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription> const & vertexBindingDescriptions_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription> const & vertexAttributeDescriptions_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, vertexBindingDescriptionCount( static_cast<uint32_t>( vertexBindingDescriptions_.size() ) )
, pVertexBindingDescriptions( vertexBindingDescriptions_.data() )
, vertexAttributeDescriptionCount( static_cast<uint32_t>( vertexAttributeDescriptions_.size() ) )
, pVertexAttributeDescriptions( vertexAttributeDescriptions_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineVertexInputStateCreateInfo &
- operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineVertexInputStateCreateInfo & operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineVertexInputStateCreateInfo &
- operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineVertexInputStateCreateInfo & operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const *>( &rhs );
return *this;
@@ -33375,21 +33709,20 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo &
- setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
{
vertexBindingDescriptionCount = vertexBindingDescriptionCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions(
- const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo &
+ setPVertexBindingDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
{
pVertexBindingDescriptions = pVertexBindingDescriptions_;
return *this;
@@ -33397,8 +33730,8 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineVertexInputStateCreateInfo & setVertexBindingDescriptions(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription> const &
- vertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription> const & vertexBindingDescriptions_ )
+ VULKAN_HPP_NOEXCEPT
{
vertexBindingDescriptionCount = static_cast<uint32_t>( vertexBindingDescriptions_.size() );
pVertexBindingDescriptions = vertexBindingDescriptions_.data();
@@ -33407,14 +33740,14 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo &
- setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
+ setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
{
vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions(
- const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo &
+ setPVertexAttributeDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
{
pVertexAttributeDescriptions = pVertexAttributeDescriptions_;
return *this;
@@ -33422,8 +33755,8 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptions(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription> const &
- vertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription> const & vertexAttributeDescriptions_ )
+ VULKAN_HPP_NOEXCEPT
{
vertexAttributeDescriptionCount = static_cast<uint32_t>( vertexAttributeDescriptions_.size() );
pVertexAttributeDescriptions = vertexAttributeDescriptions_.data();
@@ -33432,17 +33765,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineVertexInputStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineVertexInputStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineVertexInputStateCreateInfo *>( this );
}
- explicit operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineVertexInputStateCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -33456,13 +33789,8 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- flags,
- vertexBindingDescriptionCount,
- pVertexBindingDescriptions,
- vertexAttributeDescriptionCount,
- pVertexAttributeDescriptions );
+ return std::tie(
+ sType, pNext, flags, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions );
}
#endif
@@ -33471,14 +33799,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) &&
- ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) &&
- ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) &&
- ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions );
+ ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) &&
+ ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions );
# endif
}
@@ -33489,22 +33815,14 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {};
uint32_t vertexBindingDescriptionCount = {};
const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions = {};
uint32_t vertexAttributeDescriptionCount = {};
const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo ) ==
- sizeof( VkPipelineVertexInputStateCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo>::value,
- "PipelineVertexInputStateCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineVertexInputStateCreateInfo>
@@ -33517,32 +33835,32 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineInputAssemblyStateCreateInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineInputAssemblyStateCreateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInputAssemblyStateCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo(
- VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList,
- VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR
+ PipelineInputAssemblyStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList,
+ VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, topology( topology_ )
, primitiveRestartEnable( primitiveRestartEnable_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineInputAssemblyStateCreateInfo( *reinterpret_cast<PipelineInputAssemblyStateCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineInputAssemblyStateCreateInfo &
- operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineInputAssemblyStateCreateInfo & operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineInputAssemblyStateCreateInfo &
- operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineInputAssemblyStateCreateInfo & operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const *>( &rhs );
return *this;
@@ -33556,38 +33874,37 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo &
- setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT
{
topology = topology_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo &
- setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT
+ setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT
{
primitiveRestartEnable = primitiveRestartEnable_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineInputAssemblyStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineInputAssemblyStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineInputAssemblyStateCreateInfo *>( this );
}
- explicit operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -33608,11 +33925,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( topology == rhs.topology ) && ( primitiveRestartEnable == rhs.primitiveRestartEnable );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( topology == rhs.topology ) &&
+ ( primitiveRestartEnable == rhs.primitiveRestartEnable );
# endif
}
@@ -33623,20 +33940,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList;
- VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList;
+ VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo ) ==
- sizeof( VkPipelineInputAssemblyStateCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo>::value,
- "PipelineInputAssemblyStateCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineInputAssemblyStateCreateInfo>
@@ -33649,30 +33958,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineTessellationStateCreateInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineTessellationStateCreateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationStateCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PipelineTessellationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {},
- uint32_t patchControlPoints_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {},
+ uint32_t patchControlPoints_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, patchControlPoints( patchControlPoints_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineTessellationStateCreateInfo( *reinterpret_cast<PipelineTessellationStateCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineTessellationStateCreateInfo &
- operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineTessellationStateCreateInfo & operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineTessellationStateCreateInfo &
- operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineTessellationStateCreateInfo & operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const *>( &rhs );
return *this;
@@ -33686,31 +33994,30 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo &
- setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT
{
patchControlPoints = patchControlPoints_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineTessellationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineTessellationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineTessellationStateCreateInfo *>( this );
}
- explicit operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineTessellationStateCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -33730,11 +34037,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( patchControlPoints == rhs.patchControlPoints );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( patchControlPoints == rhs.patchControlPoints );
# endif
}
@@ -33745,19 +34051,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {};
uint32_t patchControlPoints = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo ) ==
- sizeof( VkPipelineTessellationStateCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo>::value,
- "PipelineTessellationStateCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineTessellationStateCreateInfo>
@@ -33773,42 +34071,45 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportStateCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {},
- uint32_t viewportCount_ = {},
- const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ = {},
- uint32_t scissorCount_ = {},
- const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {},
+ uint32_t viewportCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ = {},
+ uint32_t scissorCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, viewportCount( viewportCount_ )
, pViewports( pViewports_ )
, scissorCount( scissorCount_ )
, pScissors( pScissors_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineViewportStateCreateInfo( *reinterpret_cast<PipelineViewportStateCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PipelineViewportStateCreateInfo(
- VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors_ = {} )
- : flags( flags_ )
+ PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, viewportCount( static_cast<uint32_t>( viewports_.size() ) )
, pViewports( viewports_.data() )
, scissorCount( static_cast<uint32_t>( scissors_.size() ) )
, pScissors( scissors_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineViewportStateCreateInfo &
- operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineViewportStateCreateInfo & operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineViewportStateCreateInfo & operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -33823,31 +34124,27 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo &
- setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
{
viewportCount = viewportCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo &
- setPViewports( const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPViewports( const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ ) VULKAN_HPP_NOEXCEPT
{
pViewports = pViewports_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PipelineViewportStateCreateInfo & setViewports(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports_ )
- VULKAN_HPP_NOEXCEPT
+ PipelineViewportStateCreateInfo &
+ setViewports( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports_ ) VULKAN_HPP_NOEXCEPT
{
viewportCount = static_cast<uint32_t>( viewports_.size() );
pViewports = viewports_.data();
@@ -33855,15 +34152,13 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo &
- setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT
{
scissorCount = scissorCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo &
- setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ ) VULKAN_HPP_NOEXCEPT
{
pScissors = pScissors_;
return *this;
@@ -33871,8 +34166,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportStateCreateInfo &
- setScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors_ )
- VULKAN_HPP_NOEXCEPT
+ setScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors_ ) VULKAN_HPP_NOEXCEPT
{
scissorCount = static_cast<uint32_t>( scissors_.size() );
pScissors = scissors_.data();
@@ -33881,17 +34175,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineViewportStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineViewportStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineViewportStateCreateInfo *>( this );
}
- explicit operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineViewportStateCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -33914,12 +34208,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( viewportCount == rhs.viewportCount ) && ( pViewports == rhs.pViewports ) &&
- ( scissorCount == rhs.scissorCount ) && ( pScissors == rhs.pScissors );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewportCount == rhs.viewportCount ) &&
+ ( pViewports == rhs.pViewports ) && ( scissorCount == rhs.scissorCount ) && ( pScissors == rhs.pScissors );
# endif
}
@@ -33930,22 +34223,14 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {};
uint32_t viewportCount = {};
const VULKAN_HPP_NAMESPACE::Viewport * pViewports = {};
uint32_t scissorCount = {};
const VULKAN_HPP_NAMESPACE::Rect2D * pScissors = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo ) ==
- sizeof( VkPipelineViewportStateCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo>::value,
- "PipelineViewportStateCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineViewportStateCreateInfo>
@@ -33958,23 +34243,23 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineRasterizationStateCreateInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineRasterizationStateCreateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo(
- VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {},
- VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill,
- VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {},
- VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise,
- VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {},
- float depthBiasConstantFactor_ = {},
- float depthBiasClamp_ = {},
- float depthBiasSlopeFactor_ = {},
- float lineWidth_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {},
+ VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill,
+ VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {},
+ VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise,
+ VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {},
+ float depthBiasConstantFactor_ = {},
+ float depthBiasClamp_ = {},
+ float depthBiasSlopeFactor_ = {},
+ float lineWidth_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, depthClampEnable( depthClampEnable_ )
, rasterizerDiscardEnable( rasterizerDiscardEnable_ )
, polygonMode( polygonMode_ )
@@ -33985,21 +34270,20 @@ namespace VULKAN_HPP_NAMESPACE
, depthBiasClamp( depthBiasClamp_ )
, depthBiasSlopeFactor( depthBiasSlopeFactor_ )
, lineWidth( lineWidth_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineRasterizationStateCreateInfo( *reinterpret_cast<PipelineRasterizationStateCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineRasterizationStateCreateInfo &
- operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineRasterizationStateCreateInfo & operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineRasterizationStateCreateInfo &
- operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const *>( &rhs );
return *this;
@@ -34013,70 +34297,62 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &
- setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT
{
depthClampEnable = depthClampEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &
- setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT
+ setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT
{
rasterizerDiscardEnable = rasterizerDiscardEnable_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &
- setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT
{
polygonMode = polygonMode_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &
- setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT
{
cullMode = cullMode_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &
- setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT
{
frontFace = frontFace_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &
- setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT
{
depthBiasEnable = depthBiasEnable_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &
- setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT
{
depthBiasConstantFactor = depthBiasConstantFactor_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &
- setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
{
depthBiasClamp = depthBiasClamp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &
- setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT
{
depthBiasSlopeFactor = depthBiasSlopeFactor_;
return *this;
@@ -34089,17 +34365,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineRasterizationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineRasterizationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRasterizationStateCreateInfo *>( this );
}
- explicit operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineRasterizationStateCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -34140,16 +34416,13 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( depthClampEnable == rhs.depthClampEnable ) &&
- ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable ) && ( polygonMode == rhs.polygonMode ) &&
- ( cullMode == rhs.cullMode ) && ( frontFace == rhs.frontFace ) &&
- ( depthBiasEnable == rhs.depthBiasEnable ) && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) &&
- ( depthBiasClamp == rhs.depthBiasClamp ) && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ) &&
- ( lineWidth == rhs.lineWidth );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthClampEnable == rhs.depthClampEnable ) &&
+ ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable ) && ( polygonMode == rhs.polygonMode ) && ( cullMode == rhs.cullMode ) &&
+ ( frontFace == rhs.frontFace ) && ( depthBiasEnable == rhs.depthBiasEnable ) && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) &&
+ ( depthBiasClamp == rhs.depthBiasClamp ) && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ) && ( lineWidth == rhs.lineWidth );
# endif
}
@@ -34160,28 +34433,20 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {};
VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {};
VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {};
- VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill;
- VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {};
- VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise;
- VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {};
- float depthBiasConstantFactor = {};
- float depthBiasClamp = {};
- float depthBiasSlopeFactor = {};
- float lineWidth = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo ) ==
- sizeof( VkPipelineRasterizationStateCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo>::value,
- "PipelineRasterizationStateCreateInfo is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill;
+ VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {};
+ VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise;
+ VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {};
+ float depthBiasConstantFactor = {};
+ float depthBiasClamp = {};
+ float depthBiasSlopeFactor = {};
+ float lineWidth = {};
+ };
template <>
struct CppType<StructureType, StructureType::ePipelineRasterizationStateCreateInfo>
@@ -34194,40 +34459,40 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineMultisampleStateCreateInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineMultisampleStateCreateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineMultisampleStateCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo(
- VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
- VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {},
- float minSampleShading_ = {},
- const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR
+ PipelineMultisampleStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {},
+ float minSampleShading_ = {},
+ const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, rasterizationSamples( rasterizationSamples_ )
, sampleShadingEnable( sampleShadingEnable_ )
, minSampleShading( minSampleShading_ )
, pSampleMask( pSampleMask_ )
, alphaToCoverageEnable( alphaToCoverageEnable_ )
, alphaToOneEnable( alphaToOneEnable_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineMultisampleStateCreateInfo( *reinterpret_cast<PipelineMultisampleStateCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineMultisampleStateCreateInfo &
- operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineMultisampleStateCreateInfo & operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineMultisampleStateCreateInfo &
- operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineMultisampleStateCreateInfo & operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const *>( &rhs );
return *this;
@@ -34241,66 +34506,62 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo &
- setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
+ setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
{
rasterizationSamples = rasterizationSamples_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo &
- setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT
{
sampleShadingEnable = sampleShadingEnable_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo &
- setMinSampleShading( float minSampleShading_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setMinSampleShading( float minSampleShading_ ) VULKAN_HPP_NOEXCEPT
{
minSampleShading = minSampleShading_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo &
- setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ ) VULKAN_HPP_NOEXCEPT
{
pSampleMask = pSampleMask_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo &
- setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT
+ setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT
{
alphaToCoverageEnable = alphaToCoverageEnable_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo &
- setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT
{
alphaToOneEnable = alphaToOneEnable_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineMultisampleStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineMultisampleStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineMultisampleStateCreateInfo *>( this );
}
- explicit operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineMultisampleStateCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -34316,15 +34577,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- flags,
- rasterizationSamples,
- sampleShadingEnable,
- minSampleShading,
- pSampleMask,
- alphaToCoverageEnable,
- alphaToOneEnable );
+ return std::tie( sType, pNext, flags, rasterizationSamples, sampleShadingEnable, minSampleShading, pSampleMask, alphaToCoverageEnable, alphaToOneEnable );
}
#endif
@@ -34333,14 +34586,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( rasterizationSamples == rhs.rasterizationSamples ) &&
- ( sampleShadingEnable == rhs.sampleShadingEnable ) && ( minSampleShading == rhs.minSampleShading ) &&
- ( pSampleMask == rhs.pSampleMask ) && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable ) &&
- ( alphaToOneEnable == rhs.alphaToOneEnable );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rasterizationSamples == rhs.rasterizationSamples ) &&
+ ( sampleShadingEnable == rhs.sampleShadingEnable ) && ( minSampleShading == rhs.minSampleShading ) && ( pSampleMask == rhs.pSampleMask ) &&
+ ( alphaToCoverageEnable == rhs.alphaToCoverageEnable ) && ( alphaToOneEnable == rhs.alphaToOneEnable );
# endif
}
@@ -34351,24 +34602,16 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
- VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {};
- float minSampleShading = {};
- const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask = {};
- VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {};
- VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo ) ==
- sizeof( VkPipelineMultisampleStateCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo>::value,
- "PipelineMultisampleStateCreateInfo is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+ VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {};
+ float minSampleShading = {};
+ const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask = {};
+ VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {};
+ };
template <>
struct CppType<StructureType, StructureType::ePipelineMultisampleStateCreateInfo>
@@ -34381,14 +34624,13 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkStencilOpState;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- StencilOpState( VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
- VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
- VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
- VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
- uint32_t compareMask_ = {},
- uint32_t writeMask_ = {},
- uint32_t reference_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR StencilOpState( VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
+ VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
+ VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
+ VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
+ uint32_t compareMask_ = {},
+ uint32_t writeMask_ = {},
+ uint32_t reference_ = {} ) VULKAN_HPP_NOEXCEPT
: failOp( failOp_ )
, passOp( passOp_ )
, depthFailOp( depthFailOp_ )
@@ -34396,13 +34638,12 @@ namespace VULKAN_HPP_NAMESPACE
, compareMask( compareMask_ )
, writeMask( writeMask_ )
, reference( reference_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR StencilOpState( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
- : StencilOpState( *reinterpret_cast<StencilOpState const *>( &rhs ) )
- {}
+ StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT : StencilOpState( *reinterpret_cast<StencilOpState const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
StencilOpState & operator=( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -34426,15 +34667,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 StencilOpState &
- setDepthFailOp( VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 StencilOpState & setDepthFailOp( VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT
{
depthFailOp = depthFailOp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 StencilOpState &
- setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
{
compareOp = compareOp_;
return *this;
@@ -34459,17 +34698,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkStencilOpState const &() const VULKAN_HPP_NOEXCEPT
+ operator VkStencilOpState const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkStencilOpState *>( this );
}
- explicit operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT
+ operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkStencilOpState *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -34492,12 +34731,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( failOp == rhs.failOp ) && ( passOp == rhs.passOp ) && ( depthFailOp == rhs.depthFailOp ) &&
- ( compareOp == rhs.compareOp ) && ( compareMask == rhs.compareMask ) && ( writeMask == rhs.writeMask ) &&
- ( reference == rhs.reference );
+ return ( failOp == rhs.failOp ) && ( passOp == rhs.passOp ) && ( depthFailOp == rhs.depthFailOp ) && ( compareOp == rhs.compareOp ) &&
+ ( compareMask == rhs.compareMask ) && ( writeMask == rhs.writeMask ) && ( reference == rhs.reference );
# endif
}
@@ -34516,34 +34754,28 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t writeMask = {};
uint32_t reference = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StencilOpState ) == sizeof( VkStencilOpState ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::StencilOpState>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::StencilOpState>::value,
- "StencilOpState is not nothrow_move_constructible!" );
struct PipelineDepthStencilStateCreateInfo
{
using NativeType = VkPipelineDepthStencilStateCreateInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineDepthStencilStateCreateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDepthStencilStateCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo(
- VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {},
- VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
- VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {},
- VULKAN_HPP_NAMESPACE::StencilOpState front_ = {},
- VULKAN_HPP_NAMESPACE::StencilOpState back_ = {},
- float minDepthBounds_ = {},
- float maxDepthBounds_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {},
+ VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
+ VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {},
+ VULKAN_HPP_NAMESPACE::StencilOpState front_ = {},
+ VULKAN_HPP_NAMESPACE::StencilOpState back_ = {},
+ float minDepthBounds_ = {},
+ float maxDepthBounds_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, depthTestEnable( depthTestEnable_ )
, depthWriteEnable( depthWriteEnable_ )
, depthCompareOp( depthCompareOp_ )
@@ -34553,21 +34785,20 @@ namespace VULKAN_HPP_NAMESPACE
, back( back_ )
, minDepthBounds( minDepthBounds_ )
, maxDepthBounds( maxDepthBounds_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineDepthStencilStateCreateInfo( *reinterpret_cast<PipelineDepthStencilStateCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineDepthStencilStateCreateInfo &
- operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineDepthStencilStateCreateInfo & operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineDepthStencilStateCreateInfo &
- operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineDepthStencilStateCreateInfo & operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const *>( &rhs );
return *this;
@@ -34581,87 +34812,79 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &
- setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT
{
depthTestEnable = depthTestEnable_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &
- setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT
{
depthWriteEnable = depthWriteEnable_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &
- setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT
{
depthCompareOp = depthCompareOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &
- setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT
+ setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT
{
depthBoundsTestEnable = depthBoundsTestEnable_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &
- setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT
{
stencilTestEnable = stencilTestEnable_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &
- setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT
{
front = front_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &
- setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT
{
back = back_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &
- setMinDepthBounds( float minDepthBounds_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMinDepthBounds( float minDepthBounds_ ) VULKAN_HPP_NOEXCEPT
{
minDepthBounds = minDepthBounds_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &
- setMaxDepthBounds( float maxDepthBounds_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMaxDepthBounds( float maxDepthBounds_ ) VULKAN_HPP_NOEXCEPT
{
maxDepthBounds = maxDepthBounds_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineDepthStencilStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineDepthStencilStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo *>( this );
}
- explicit operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineDepthStencilStateCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -34700,14 +34923,13 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( depthTestEnable == rhs.depthTestEnable ) && ( depthWriteEnable == rhs.depthWriteEnable ) &&
- ( depthCompareOp == rhs.depthCompareOp ) && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable ) &&
- ( stencilTestEnable == rhs.stencilTestEnable ) && ( front == rhs.front ) && ( back == rhs.back ) &&
- ( minDepthBounds == rhs.minDepthBounds ) && ( maxDepthBounds == rhs.maxDepthBounds );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthTestEnable == rhs.depthTestEnable ) &&
+ ( depthWriteEnable == rhs.depthWriteEnable ) && ( depthCompareOp == rhs.depthCompareOp ) &&
+ ( depthBoundsTestEnable == rhs.depthBoundsTestEnable ) && ( stencilTestEnable == rhs.stencilTestEnable ) && ( front == rhs.front ) &&
+ ( back == rhs.back ) && ( minDepthBounds == rhs.minDepthBounds ) && ( maxDepthBounds == rhs.maxDepthBounds );
# endif
}
@@ -34718,12 +34940,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {};
- VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {};
- VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {};
+ VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {};
VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {};
VULKAN_HPP_NAMESPACE::StencilOpState front = {};
@@ -34731,14 +34953,6 @@ namespace VULKAN_HPP_NAMESPACE
float minDepthBounds = {};
float maxDepthBounds = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo ) ==
- sizeof( VkPipelineDepthStencilStateCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo>::value,
- "PipelineDepthStencilStateCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineDepthStencilStateCreateInfo>
@@ -34751,15 +34965,14 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineColorBlendAttachmentState;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState(
- VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {},
- VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
- VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
- VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd,
- VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
- VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
- VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd,
- VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {},
+ VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
+ VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
+ VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd,
+ VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
+ VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
+ VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd,
+ VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {} ) VULKAN_HPP_NOEXCEPT
: blendEnable( blendEnable_ )
, srcColorBlendFactor( srcColorBlendFactor_ )
, dstColorBlendFactor( dstColorBlendFactor_ )
@@ -34768,18 +34981,18 @@ namespace VULKAN_HPP_NAMESPACE
, dstAlphaBlendFactor( dstAlphaBlendFactor_ )
, alphaBlendOp( alphaBlendOp_ )
, colorWriteMask( colorWriteMask_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineColorBlendAttachmentState( *reinterpret_cast<PipelineColorBlendAttachmentState const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineColorBlendAttachmentState &
- operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineColorBlendAttachmentState & operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineColorBlendAttachmentState & operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -34788,74 +35001,71 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState &
- setBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT
{
blendEnable = blendEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState &
- setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
{
srcColorBlendFactor = srcColorBlendFactor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState &
- setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
{
dstColorBlendFactor = dstColorBlendFactor_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState &
- setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT
{
colorBlendOp = colorBlendOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState &
- setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
{
srcAlphaBlendFactor = srcAlphaBlendFactor_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState &
- setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
{
dstAlphaBlendFactor = dstAlphaBlendFactor_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState &
- setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT
{
alphaBlendOp = alphaBlendOp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState &
- setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT
+ setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT
{
colorWriteMask = colorWriteMask_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineColorBlendAttachmentState const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineColorBlendAttachmentState const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineColorBlendAttachmentState *>( this );
}
- explicit operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineColorBlendAttachmentState *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -34870,14 +35080,8 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( blendEnable,
- srcColorBlendFactor,
- dstColorBlendFactor,
- colorBlendOp,
- srcAlphaBlendFactor,
- dstAlphaBlendFactor,
- alphaBlendOp,
- colorWriteMask );
+ return std::tie(
+ blendEnable, srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp, colorWriteMask );
}
#endif
@@ -34886,12 +35090,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( blendEnable == rhs.blendEnable ) && ( srcColorBlendFactor == rhs.srcColorBlendFactor ) &&
- ( dstColorBlendFactor == rhs.dstColorBlendFactor ) && ( colorBlendOp == rhs.colorBlendOp ) &&
- ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) &&
+ return ( blendEnable == rhs.blendEnable ) && ( srcColorBlendFactor == rhs.srcColorBlendFactor ) && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) &&
+ ( colorBlendOp == rhs.colorBlendOp ) && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) &&
( alphaBlendOp == rhs.alphaBlendOp ) && ( colorWriteMask == rhs.colorWriteMask );
# endif
}
@@ -34912,66 +35115,60 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState ) ==
- sizeof( VkPipelineColorBlendAttachmentState ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState>::value,
- "PipelineColorBlendAttachmentState is not nothrow_move_constructible!" );
struct PipelineColorBlendStateCreateInfo
{
using NativeType = VkPipelineColorBlendStateCreateInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineColorBlendStateCreateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendStateCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo(
- VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {},
- VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear,
- uint32_t attachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ = {},
- std::array<float, 4> const & blendConstants_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {},
+ VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear,
+ uint32_t attachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ = {},
+ std::array<float, 4> const & blendConstants_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, logicOpEnable( logicOpEnable_ )
, logicOp( logicOp_ )
, attachmentCount( attachmentCount_ )
, pAttachments( pAttachments_ )
, blendConstants( blendConstants_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineColorBlendStateCreateInfo( *reinterpret_cast<PipelineColorBlendStateCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineColorBlendStateCreateInfo(
- VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_,
- VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_,
- VULKAN_HPP_NAMESPACE::LogicOp logicOp_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_,
- std::array<float, 4> const & blendConstants_ = {} )
- : flags( flags_ )
+ VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_,
+ VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_,
+ VULKAN_HPP_NAMESPACE::LogicOp logicOp_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_,
+ std::array<float, 4> const & blendConstants_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, logicOpEnable( logicOpEnable_ )
, logicOp( logicOp_ )
, attachmentCount( static_cast<uint32_t>( attachments_.size() ) )
, pAttachments( attachments_.data() )
, blendConstants( blendConstants_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineColorBlendStateCreateInfo &
- operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineColorBlendStateCreateInfo & operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineColorBlendStateCreateInfo & operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -34986,36 +35183,32 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo &
- setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT
{
logicOpEnable = logicOpEnable_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo &
- setLogicOp( VULKAN_HPP_NAMESPACE::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOp( VULKAN_HPP_NAMESPACE::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT
{
logicOp = logicOp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo &
- setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = attachmentCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setPAttachments(
- const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo &
+ setPAttachments( const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pAttachments = pAttachments_;
return *this;
@@ -35023,8 +35216,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineColorBlendStateCreateInfo & setAttachments(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = static_cast<uint32_t>( attachments_.size() );
pAttachments = attachments_.data();
@@ -35032,25 +35224,24 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo &
- setBlendConstants( std::array<float, 4> blendConstants_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setBlendConstants( std::array<float, 4> blendConstants_ ) VULKAN_HPP_NOEXCEPT
{
blendConstants = blendConstants_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineColorBlendStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineColorBlendStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineColorBlendStateCreateInfo *>( this );
}
- explicit operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineColorBlendStateCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -35074,12 +35265,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( logicOpEnable == rhs.logicOpEnable ) && ( logicOp == rhs.logicOp ) &&
- ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( logicOpEnable == rhs.logicOpEnable ) &&
+ ( logicOp == rhs.logicOp ) && ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) &&
( blendConstants == rhs.blendConstants );
# endif
}
@@ -35091,23 +35281,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {};
- VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear;
- uint32_t attachmentCount = {};
- const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments = {};
- VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> blendConstants = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {};
+ VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear;
+ uint32_t attachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> blendConstants = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo ) ==
- sizeof( VkPipelineColorBlendStateCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo>::value,
- "PipelineColorBlendStateCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineColorBlendStateCreateInfo>
@@ -35123,35 +35305,35 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDynamicStateCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo(
- VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {},
- uint32_t dynamicStateCount_ = {},
- const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {},
+ uint32_t dynamicStateCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, dynamicStateCount( dynamicStateCount_ )
, pDynamicStates( pDynamicStates_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineDynamicStateCreateInfo( *reinterpret_cast<PipelineDynamicStateCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PipelineDynamicStateCreateInfo(
- VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DynamicState> const & dynamicStates_ )
- : flags( flags_ )
- , dynamicStateCount( static_cast<uint32_t>( dynamicStates_.size() ) )
- , pDynamicStates( dynamicStates_.data() )
- {}
+ PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DynamicState> const & dynamicStates_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), flags( flags_ ), dynamicStateCount( static_cast<uint32_t>( dynamicStates_.size() ) ), pDynamicStates( dynamicStates_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineDynamicStateCreateInfo &
- operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineDynamicStateCreateInfo & operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineDynamicStateCreateInfo & operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -35166,31 +35348,27 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo &
- setDynamicStateCount( uint32_t dynamicStateCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setDynamicStateCount( uint32_t dynamicStateCount_ ) VULKAN_HPP_NOEXCEPT
{
dynamicStateCount = dynamicStateCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo &
- setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ ) VULKAN_HPP_NOEXCEPT
{
pDynamicStates = pDynamicStates_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PipelineDynamicStateCreateInfo & setDynamicStates(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DynamicState> const & dynamicStates_ )
- VULKAN_HPP_NOEXCEPT
+ PipelineDynamicStateCreateInfo &
+ setDynamicStates( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DynamicState> const & dynamicStates_ ) VULKAN_HPP_NOEXCEPT
{
dynamicStateCount = static_cast<uint32_t>( dynamicStates_.size() );
pDynamicStates = dynamicStates_.data();
@@ -35199,17 +35377,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineDynamicStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineDynamicStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo *>( this );
}
- explicit operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineDynamicStateCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -35230,11 +35408,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( dynamicStateCount == rhs.dynamicStateCount ) && ( pDynamicStates == rhs.pDynamicStates );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dynamicStateCount == rhs.dynamicStateCount ) &&
+ ( pDynamicStates == rhs.pDynamicStates );
# endif
}
@@ -35245,20 +35423,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {};
uint32_t dynamicStateCount = {};
const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo ) ==
- sizeof( VkPipelineDynamicStateCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo>::value,
- "PipelineDynamicStateCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineDynamicStateCreateInfo>
@@ -35274,25 +35444,26 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo(
- VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {},
- uint32_t stageCount_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {},
- VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
- VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
- uint32_t subpass_ = {},
- VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
- int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {},
+ uint32_t stageCount_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
+ uint32_t subpass_ = {},
+ VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
+ int32_t basePipelineIndex_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, stageCount( stageCount_ )
, pStages( pStages_ )
, pVertexInputState( pVertexInputState_ )
@@ -35309,35 +35480,36 @@ namespace VULKAN_HPP_NAMESPACE
, subpass( subpass_ )
, basePipelineHandle( basePipelineHandle_ )
, basePipelineIndex( basePipelineIndex_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: GraphicsPipelineCreateInfo( *reinterpret_cast<GraphicsPipelineCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- GraphicsPipelineCreateInfo(
- VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const &
- stages_,
- const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {},
- VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
- VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
- uint32_t subpass_ = {},
- VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
- int32_t basePipelineIndex_ = {} )
- : flags( flags_ )
+ GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_,
+ const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
+ uint32_t subpass_ = {},
+ VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
+ int32_t basePipelineIndex_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, stageCount( static_cast<uint32_t>( stages_.size() ) )
, pStages( stages_.data() )
, pVertexInputState( pVertexInputState_ )
@@ -35354,7 +35526,8 @@ namespace VULKAN_HPP_NAMESPACE
, subpass( subpass_ )
, basePipelineHandle( basePipelineHandle_ )
, basePipelineIndex( basePipelineIndex_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -35373,8 +35546,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -35386,17 +35558,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
- setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
{
pStages = pStages_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- GraphicsPipelineCreateInfo & setStages(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const &
- stages_ ) VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineCreateInfo &
+ setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
{
stageCount = static_cast<uint32_t>( stages_.size() );
pStages = stages_.data();
@@ -35404,78 +35574,76 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPVertexInputState(
- const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
+ setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
{
pVertexInputState = pVertexInputState_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPInputAssemblyState(
- const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
+ setPInputAssemblyState( const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT
{
pInputAssemblyState = pInputAssemblyState_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPTessellationState(
- const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
+ setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT
{
pTessellationState = pTessellationState_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPViewportState(
- const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
+ setPViewportState( const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ ) VULKAN_HPP_NOEXCEPT
{
pViewportState = pViewportState_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPRasterizationState(
- const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
+ setPRasterizationState( const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ ) VULKAN_HPP_NOEXCEPT
{
pRasterizationState = pRasterizationState_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPMultisampleState(
- const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
+ setPMultisampleState( const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ ) VULKAN_HPP_NOEXCEPT
{
pMultisampleState = pMultisampleState_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPDepthStencilState(
- const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
+ setPDepthStencilState( const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT
{
pDepthStencilState = pDepthStencilState_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPColorBlendState(
- const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
+ setPColorBlendState( const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ ) VULKAN_HPP_NOEXCEPT
{
pColorBlendState = pColorBlendState_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPDynamicState(
- const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
+ setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT
{
pDynamicState = pDynamicState_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
- setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
- setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
{
renderPass = renderPass_;
return *this;
@@ -35487,32 +35655,30 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
- setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineHandle = basePipelineHandle_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
- setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineIndex = basePipelineIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkGraphicsPipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkGraphicsPipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( this );
}
- explicit operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGraphicsPipelineCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -35565,17 +35731,15 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) &&
( pVertexInputState == rhs.pVertexInputState ) && ( pInputAssemblyState == rhs.pInputAssemblyState ) &&
( pTessellationState == rhs.pTessellationState ) && ( pViewportState == rhs.pViewportState ) &&
( pRasterizationState == rhs.pRasterizationState ) && ( pMultisampleState == rhs.pMultisampleState ) &&
- ( pDepthStencilState == rhs.pDepthStencilState ) && ( pColorBlendState == rhs.pColorBlendState ) &&
- ( pDynamicState == rhs.pDynamicState ) && ( layout == rhs.layout ) && ( renderPass == rhs.renderPass ) &&
- ( subpass == rhs.subpass ) && ( basePipelineHandle == rhs.basePipelineHandle ) &&
+ ( pDepthStencilState == rhs.pDepthStencilState ) && ( pColorBlendState == rhs.pColorBlendState ) && ( pDynamicState == rhs.pDynamicState ) &&
+ ( layout == rhs.layout ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ) && ( basePipelineHandle == rhs.basePipelineHandle ) &&
( basePipelineIndex == rhs.basePipelineIndex );
# endif
}
@@ -35587,12 +35751,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
- uint32_t stageCount = {};
- const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
- const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
+ uint32_t stageCount = {};
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
+ const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {};
const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState = {};
const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {};
const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState = {};
@@ -35607,13 +35771,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
int32_t basePipelineIndex = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo ) ==
- sizeof( VkGraphicsPipelineCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo>::value,
- "GraphicsPipelineCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eGraphicsPipelineCreateInfo>
@@ -35621,6 +35778,103 @@ namespace VULKAN_HPP_NAMESPACE
using Type = GraphicsPipelineCreateInfo;
};
+ struct GraphicsPipelineLibraryCreateInfoEXT
+ {
+ using NativeType = VkGraphicsPipelineLibraryCreateInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ GraphicsPipelineLibraryCreateInfoEXT( VkGraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : GraphicsPipelineLibraryCreateInfoEXT( *reinterpret_cast<GraphicsPipelineLibraryCreateInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ GraphicsPipelineLibraryCreateInfoEXT & operator=( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ GraphicsPipelineLibraryCreateInfoEXT & operator=( VkGraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkGraphicsPipelineLibraryCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkGraphicsPipelineLibraryCreateInfoEXT *>( this );
+ }
+
+ operator VkGraphicsPipelineLibraryCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkGraphicsPipelineLibraryCreateInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, flags );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( GraphicsPipelineLibraryCreateInfoEXT const & ) const = default;
+#else
+ bool operator==( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
+# endif
+ }
+
+ bool operator!=( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eGraphicsPipelineLibraryCreateInfoEXT>
+ {
+ using Type = GraphicsPipelineLibraryCreateInfoEXT;
+ };
+
struct GraphicsShaderGroupCreateInfoNV
{
using NativeType = VkGraphicsShaderGroupCreateInfoNV;
@@ -35629,40 +35883,42 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsShaderGroupCreateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV(
- uint32_t stageCount_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {} ) VULKAN_HPP_NOEXCEPT
- : stageCount( stageCount_ )
+ VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( uint32_t stageCount_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , stageCount( stageCount_ )
, pStages( pStages_ )
, pVertexInputState( pVertexInputState_ )
, pTessellationState( pTessellationState_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- GraphicsShaderGroupCreateInfoNV( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
GraphicsShaderGroupCreateInfoNV( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: GraphicsShaderGroupCreateInfoNV( *reinterpret_cast<GraphicsShaderGroupCreateInfoNV const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- GraphicsShaderGroupCreateInfoNV(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const &
- stages_,
- const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {} )
- : stageCount( static_cast<uint32_t>( stages_.size() ) )
+ GraphicsShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_,
+ const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , stageCount( static_cast<uint32_t>( stages_.size() ) )
, pStages( stages_.data() )
, pVertexInputState( pVertexInputState_ )
, pTessellationState( pTessellationState_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- GraphicsShaderGroupCreateInfoNV &
- operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ GraphicsShaderGroupCreateInfoNV & operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
GraphicsShaderGroupCreateInfoNV & operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -35684,16 +35940,15 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV &
- setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
+ setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
{
pStages = pStages_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- GraphicsShaderGroupCreateInfoNV & setStages(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const &
- stages_ ) VULKAN_HPP_NOEXCEPT
+ GraphicsShaderGroupCreateInfoNV &
+ setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
{
stageCount = static_cast<uint32_t>( stages_.size() );
pStages = stages_.data();
@@ -35701,32 +35956,32 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPVertexInputState(
- const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV &
+ setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
{
pVertexInputState = pVertexInputState_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPTessellationState(
- const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV &
+ setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT
{
pTessellationState = pTessellationState_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkGraphicsShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkGraphicsShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGraphicsShaderGroupCreateInfoNV *>( this );
}
- explicit operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGraphicsShaderGroupCreateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -35748,12 +36003,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageCount == rhs.stageCount ) &&
- ( pStages == rhs.pStages ) && ( pVertexInputState == rhs.pVertexInputState ) &&
- ( pTessellationState == rhs.pTessellationState );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) &&
+ ( pVertexInputState == rhs.pVertexInputState ) && ( pTessellationState == rhs.pTessellationState );
# endif
}
@@ -35764,21 +36018,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV;
- const void * pNext = {};
- uint32_t stageCount = {};
- const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV;
+ const void * pNext = {};
+ uint32_t stageCount = {};
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {};
const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV ) ==
- sizeof( VkGraphicsShaderGroupCreateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV>::value,
- "GraphicsShaderGroupCreateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eGraphicsShaderGroupCreateInfoNV>
@@ -35791,70 +36037,67 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkGraphicsPipelineShaderGroupsCreateInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV(
- uint32_t groupCount_ = {},
- const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ = {},
- uint32_t pipelineCount_ = {},
- const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ = {} ) VULKAN_HPP_NOEXCEPT
- : groupCount( groupCount_ )
+ VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( uint32_t groupCount_ = {},
+ const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ = {},
+ uint32_t pipelineCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , groupCount( groupCount_ )
, pGroups( pGroups_ )
, pipelineCount( pipelineCount_ )
, pPipelines( pPipelines_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV(
- GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : GraphicsPipelineShaderGroupsCreateInfoNV(
- *reinterpret_cast<GraphicsPipelineShaderGroupsCreateInfoNV const *>( &rhs ) )
- {}
+ GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : GraphicsPipelineShaderGroupsCreateInfoNV( *reinterpret_cast<GraphicsPipelineShaderGroupsCreateInfoNV const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
GraphicsPipelineShaderGroupsCreateInfoNV(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV> const &
- groups_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & pipelines_ = {} )
- : groupCount( static_cast<uint32_t>( groups_.size() ) )
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV> const & groups_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & pipelines_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , groupCount( static_cast<uint32_t>( groups_.size() ) )
, pGroups( groups_.data() )
, pipelineCount( static_cast<uint32_t>( pipelines_.size() ) )
, pPipelines( pipelines_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- GraphicsPipelineShaderGroupsCreateInfoNV &
- operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ GraphicsPipelineShaderGroupsCreateInfoNV & operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- GraphicsPipelineShaderGroupsCreateInfoNV &
- operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineShaderGroupsCreateInfoNV & operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV &
- setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
{
groupCount = groupCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV &
- setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT
+ setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT
{
pGroups = pGroups_;
return *this;
@@ -35862,8 +36105,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
GraphicsPipelineShaderGroupsCreateInfoNV & setGroups(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV> const &
- groups_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT
{
groupCount = static_cast<uint32_t>( groups_.size() );
pGroups = groups_.data();
@@ -35871,24 +36113,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV &
- setPipelineCount( uint32_t pipelineCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPipelineCount( uint32_t pipelineCount_ ) VULKAN_HPP_NOEXCEPT
{
pipelineCount = pipelineCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV &
- setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ ) VULKAN_HPP_NOEXCEPT
{
pPipelines = pPipelines_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- GraphicsPipelineShaderGroupsCreateInfoNV & setPipelines(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & pipelines_ )
- VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineShaderGroupsCreateInfoNV &
+ setPipelines( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & pipelines_ ) VULKAN_HPP_NOEXCEPT
{
pipelineCount = static_cast<uint32_t>( pipelines_.size() );
pPipelines = pipelines_.data();
@@ -35897,17 +36136,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkGraphicsPipelineShaderGroupsCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkGraphicsPipelineShaderGroupsCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkGraphicsPipelineShaderGroupsCreateInfoNV *>( this );
}
- explicit operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkGraphicsPipelineShaderGroupsCreateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -35929,11 +36168,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( groupCount == rhs.groupCount ) &&
- ( pGroups == rhs.pGroups ) && ( pipelineCount == rhs.pipelineCount ) && ( pPipelines == rhs.pPipelines );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( groupCount == rhs.groupCount ) && ( pGroups == rhs.pGroups ) &&
+ ( pipelineCount == rhs.pipelineCount ) && ( pPipelines == rhs.pPipelines );
# endif
}
@@ -35944,22 +36183,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV;
- const void * pNext = {};
- uint32_t groupCount = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV;
+ const void * pNext = {};
+ uint32_t groupCount = {};
const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups = {};
uint32_t pipelineCount = {};
const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV ) ==
- sizeof( VkGraphicsPipelineShaderGroupsCreateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV>::value,
- "GraphicsPipelineShaderGroupsCreateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV>
@@ -35975,13 +36205,12 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT
: x( x_ )
, y( y_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR XYColorEXT( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : XYColorEXT( *reinterpret_cast<XYColorEXT const *>( &rhs ) )
- {}
+ XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT : XYColorEXT( *reinterpret_cast<XYColorEXT const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
XYColorEXT & operator=( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -36006,17 +36235,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkXYColorEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkXYColorEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkXYColorEXT *>( this );
}
- explicit operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkXYColorEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -36033,7 +36262,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( x == rhs.x ) && ( y == rhs.y );
@@ -36050,12 +36279,6 @@ namespace VULKAN_HPP_NAMESPACE
float x = {};
float y = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XYColorEXT ) == sizeof( VkXYColorEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::XYColorEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::XYColorEXT>::value,
- "XYColorEXT is not nothrow_move_constructible!" );
struct HdrMetadataEXT
{
@@ -36065,15 +36288,17 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrMetadataEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR HdrMetadataEXT( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {},
- VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {},
- VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {},
- VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {},
- float maxLuminance_ = {},
- float minLuminance_ = {},
- float maxContentLightLevel_ = {},
- float maxFrameAverageLightLevel_ = {} ) VULKAN_HPP_NOEXCEPT
- : displayPrimaryRed( displayPrimaryRed_ )
+ VULKAN_HPP_CONSTEXPR HdrMetadataEXT( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {},
+ VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {},
+ VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {},
+ VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {},
+ float maxLuminance_ = {},
+ float minLuminance_ = {},
+ float maxContentLightLevel_ = {},
+ float maxFrameAverageLightLevel_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , displayPrimaryRed( displayPrimaryRed_ )
, displayPrimaryGreen( displayPrimaryGreen_ )
, displayPrimaryBlue( displayPrimaryBlue_ )
, whitePoint( whitePoint_ )
@@ -36081,13 +36306,12 @@ namespace VULKAN_HPP_NAMESPACE
, minLuminance( minLuminance_ )
, maxContentLightLevel( maxContentLightLevel_ )
, maxFrameAverageLightLevel( maxFrameAverageLightLevel_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR HdrMetadataEXT( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : HdrMetadataEXT( *reinterpret_cast<HdrMetadataEXT const *>( &rhs ) )
- {}
+ HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT : HdrMetadataEXT( *reinterpret_cast<HdrMetadataEXT const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -36105,29 +36329,25 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT &
- setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT
{
displayPrimaryRed = displayPrimaryRed_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT &
- setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT
{
displayPrimaryGreen = displayPrimaryGreen_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT &
- setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT
{
displayPrimaryBlue = displayPrimaryBlue_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT &
- setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT const & whitePoint_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT const & whitePoint_ ) VULKAN_HPP_NOEXCEPT
{
whitePoint = whitePoint_;
return *this;
@@ -36151,25 +36371,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT &
- setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT
{
maxFrameAverageLightLevel = maxFrameAverageLightLevel_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkHdrMetadataEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkHdrMetadataEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkHdrMetadataEXT *>( this );
}
- explicit operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkHdrMetadataEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -36204,13 +36423,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPrimaryRed == rhs.displayPrimaryRed ) &&
- ( displayPrimaryGreen == rhs.displayPrimaryGreen ) && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) &&
- ( whitePoint == rhs.whitePoint ) && ( maxLuminance == rhs.maxLuminance ) &&
- ( minLuminance == rhs.minLuminance ) && ( maxContentLightLevel == rhs.maxContentLightLevel ) &&
+ ( displayPrimaryGreen == rhs.displayPrimaryGreen ) && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) && ( whitePoint == rhs.whitePoint ) &&
+ ( maxLuminance == rhs.maxLuminance ) && ( minLuminance == rhs.minLuminance ) && ( maxContentLightLevel == rhs.maxContentLightLevel ) &&
( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel );
# endif
}
@@ -36233,12 +36451,6 @@ namespace VULKAN_HPP_NAMESPACE
float maxContentLightLevel = {};
float maxFrameAverageLightLevel = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::HdrMetadataEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::HdrMetadataEXT>::value,
- "HdrMetadataEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eHdrMetadataEXT>
@@ -36254,16 +36466,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHeadlessSurfaceCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {} )
- VULKAN_HPP_NOEXCEPT : flags( flags_ )
- {}
+ VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: HeadlessSurfaceCreateInfoEXT( *reinterpret_cast<HeadlessSurfaceCreateInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
HeadlessSurfaceCreateInfoEXT & operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -36281,31 +36496,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT &
- setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkHeadlessSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkHeadlessSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( this );
}
- explicit operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkHeadlessSurfaceCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -36318,7 +36530,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
@@ -36336,14 +36548,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT ) ==
- sizeof( VkHeadlessSurfaceCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT>::value,
- "HeadlessSurfaceCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eHeadlessSurfaceCreateInfoEXT>
@@ -36361,16 +36565,20 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {},
- const void * pView_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ const void * pView_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, pView( pView_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
: IOSSurfaceCreateInfoMVK( *reinterpret_cast<IOSSurfaceCreateInfoMVK const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
IOSSurfaceCreateInfoMVK & operator=( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -36388,8 +36596,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK &
- setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -36402,24 +36609,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkIOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT
+ operator VkIOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( this );
}
- explicit operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
+ operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkIOSSurfaceCreateInfoMVK *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK const &,
- const void * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK const &, const void * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -36432,7 +36636,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView );
@@ -36451,13 +36655,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {};
const void * pView = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK ) ==
- sizeof( VkIOSSurfaceCreateInfoMVK ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK>::value,
- "IOSSurfaceCreateInfoMVK is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eIosSurfaceCreateInfoMVK>
@@ -36471,21 +36668,20 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkImageBlit;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14
- ImageBlit( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {},
- std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & srcOffsets_ = {},
- VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
- std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageBlit( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {},
+ std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & srcOffsets_ = {},
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
+ std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT
: srcSubresource( srcSubresource_ )
, srcOffsets( srcOffsets_ )
, dstSubresource( dstSubresource_ )
, dstOffsets( dstOffsets_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit( *reinterpret_cast<ImageBlit const *>( &rhs ) )
- {}
+ ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit( *reinterpret_cast<ImageBlit const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -36497,46 +36693,42 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 ImageBlit &
- setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
{
srcSubresource = srcSubresource_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageBlit &
- setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
{
srcOffsets = srcOffsets_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageBlit &
- setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
{
dstSubresource = dstSubresource_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageBlit &
- setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
{
dstOffsets = dstOffsets_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageBlit const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageBlit const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageBlit *>( this );
}
- explicit operator VkImageBlit &() VULKAN_HPP_NOEXCEPT
+ operator VkImageBlit &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageBlit *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -36556,11 +36748,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) &&
- ( dstSubresource == rhs.dstSubresource ) && ( dstOffsets == rhs.dstOffsets );
+ return ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) && ( dstSubresource == rhs.dstSubresource ) &&
+ ( dstOffsets == rhs.dstOffsets );
# endif
}
@@ -36576,12 +36768,239 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageBlit ) == sizeof( VkImageBlit ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageBlit>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageBlit>::value,
- "ImageBlit is not nothrow_move_constructible!" );
+
+ struct ImageCompressionControlEXT
+ {
+ using NativeType = VkImageCompressionControlEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCompressionControlEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageCompressionControlEXT( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_ = {},
+ uint32_t compressionControlPlaneCount_ = {},
+ VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ , compressionControlPlaneCount( compressionControlPlaneCount_ )
+ , pFixedRateFlags( pFixedRateFlags_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ImageCompressionControlEXT( ImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageCompressionControlEXT( VkImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ImageCompressionControlEXT( *reinterpret_cast<ImageCompressionControlEXT const *>( &rhs ) )
+ {
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ ImageCompressionControlEXT( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT> const & fixedRateFlags_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
+ , compressionControlPlaneCount( static_cast<uint32_t>( fixedRateFlags_.size() ) )
+ , pFixedRateFlags( fixedRateFlags_.data() )
+ {
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ImageCompressionControlEXT & operator=( ImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageCompressionControlEXT & operator=( VkImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setFlags( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setCompressionControlPlaneCount( uint32_t compressionControlPlaneCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ compressionControlPlaneCount = compressionControlPlaneCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT &
+ setPFixedRateFlags( VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pFixedRateFlags = pFixedRateFlags_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ ImageCompressionControlEXT & setFixedRateFlags(
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT> const & fixedRateFlags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ compressionControlPlaneCount = static_cast<uint32_t>( fixedRateFlags_.size() );
+ pFixedRateFlags = fixedRateFlags_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkImageCompressionControlEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageCompressionControlEXT *>( this );
+ }
+
+ operator VkImageCompressionControlEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageCompressionControlEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT const &,
+ uint32_t const &,
+ VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, flags, compressionControlPlaneCount, pFixedRateFlags );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ImageCompressionControlEXT const & ) const = default;
+#else
+ bool operator==( ImageCompressionControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
+ ( compressionControlPlaneCount == rhs.compressionControlPlaneCount ) && ( pFixedRateFlags == rhs.pFixedRateFlags );
+# endif
+ }
+
+ bool operator!=( ImageCompressionControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCompressionControlEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags = {};
+ uint32_t compressionControlPlaneCount = {};
+ VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eImageCompressionControlEXT>
+ {
+ using Type = ImageCompressionControlEXT;
+ };
+
+ struct ImageCompressionPropertiesEXT
+ {
+ using NativeType = VkImageCompressionPropertiesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCompressionPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageCompressionPropertiesEXT( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT imageCompressionFlags_ = {},
+ VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , imageCompressionFlags( imageCompressionFlags_ )
+ , imageCompressionFixedRateFlags( imageCompressionFixedRateFlags_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ImageCompressionPropertiesEXT( ImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageCompressionPropertiesEXT( VkImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ImageCompressionPropertiesEXT( *reinterpret_cast<ImageCompressionPropertiesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ImageCompressionPropertiesEXT & operator=( ImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageCompressionPropertiesEXT & operator=( VkImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT const *>( &rhs );
+ return *this;
+ }
+
+ operator VkImageCompressionPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageCompressionPropertiesEXT *>( this );
+ }
+
+ operator VkImageCompressionPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageCompressionPropertiesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT const &,
+ VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, imageCompressionFlags, imageCompressionFixedRateFlags );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ImageCompressionPropertiesEXT const & ) const = default;
+#else
+ bool operator==( ImageCompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionFlags == rhs.imageCompressionFlags ) &&
+ ( imageCompressionFixedRateFlags == rhs.imageCompressionFixedRateFlags );
+# endif
+ }
+
+ bool operator!=( ImageCompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCompressionPropertiesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT imageCompressionFlags = {};
+ VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eImageCompressionPropertiesEXT>
+ {
+ using Type = ImageCompressionPropertiesEXT;
+ };
#if defined( VK_USE_PLATFORM_FUCHSIA )
struct ImageFormatConstraintsInfoFUCHSIA
@@ -36589,52 +37008,53 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkImageFormatConstraintsInfoFUCHSIA;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eImageFormatConstraintsInfoFUCHSIA;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatConstraintsInfoFUCHSIA;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA(
- VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_ = {},
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {},
- VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ = {},
- uint64_t sysmemPixelFormat_ = {},
- uint32_t colorSpaceCount_ = {},
- const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ = {} ) VULKAN_HPP_NOEXCEPT
- : imageCreateInfo( imageCreateInfo_ )
+ VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_ = {},
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {},
+ VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ = {},
+ uint64_t sysmemPixelFormat_ = {},
+ uint32_t colorSpaceCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , imageCreateInfo( imageCreateInfo_ )
, requiredFormatFeatures( requiredFormatFeatures_ )
, flags( flags_ )
, sysmemPixelFormat( sysmemPixelFormat_ )
, colorSpaceCount( colorSpaceCount_ )
, pColorSpaces( pColorSpaces_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ImageFormatConstraintsInfoFUCHSIA( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageFormatConstraintsInfoFUCHSIA( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageFormatConstraintsInfoFUCHSIA( *reinterpret_cast<ImageFormatConstraintsInfoFUCHSIA const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- ImageFormatConstraintsInfoFUCHSIA(
- VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_,
- VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_,
- VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_,
- uint64_t sysmemPixelFormat_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA> const &
- colorSpaces_ )
- : imageCreateInfo( imageCreateInfo_ )
+ ImageFormatConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_,
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_,
+ VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_,
+ uint64_t sysmemPixelFormat_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA> const & colorSpaces_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , imageCreateInfo( imageCreateInfo_ )
, requiredFormatFeatures( requiredFormatFeatures_ )
, flags( flags_ )
, sysmemPixelFormat( sysmemPixelFormat_ )
, colorSpaceCount( static_cast<uint32_t>( colorSpaces_.size() ) )
, pColorSpaces( colorSpaces_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ImageFormatConstraintsInfoFUCHSIA &
- operator=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ImageFormatConstraintsInfoFUCHSIA & operator=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageFormatConstraintsInfoFUCHSIA & operator=( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -36650,42 +37070,39 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA &
- setImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & imageCreateInfo_ ) VULKAN_HPP_NOEXCEPT
+ setImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & imageCreateInfo_ ) VULKAN_HPP_NOEXCEPT
{
imageCreateInfo = imageCreateInfo_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA &
- setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT
+ setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT
{
requiredFormatFeatures = requiredFormatFeatures_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA &
- setFlags( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA &
- setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) VULKAN_HPP_NOEXCEPT
{
sysmemPixelFormat = sysmemPixelFormat_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA &
- setColorSpaceCount( uint32_t colorSpaceCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setColorSpaceCount( uint32_t colorSpaceCount_ ) VULKAN_HPP_NOEXCEPT
{
colorSpaceCount = colorSpaceCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA &
- setPColorSpaces( const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ ) VULKAN_HPP_NOEXCEPT
+ setPColorSpaces( const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ ) VULKAN_HPP_NOEXCEPT
{
pColorSpaces = pColorSpaces_;
return *this;
@@ -36693,8 +37110,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ImageFormatConstraintsInfoFUCHSIA & setColorSpaces(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA> const &
- colorSpaces_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA> const & colorSpaces_ ) VULKAN_HPP_NOEXCEPT
{
colorSpaceCount = static_cast<uint32_t>( colorSpaces_.size() );
pColorSpaces = colorSpaces_.data();
@@ -36703,17 +37119,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageFormatConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageFormatConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageFormatConstraintsInfoFUCHSIA *>( this );
}
- explicit operator VkImageFormatConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+ operator VkImageFormatConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageFormatConstraintsInfoFUCHSIA *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -36728,14 +37144,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- imageCreateInfo,
- requiredFormatFeatures,
- flags,
- sysmemPixelFormat,
- colorSpaceCount,
- pColorSpaces );
+ return std::tie( sType, pNext, imageCreateInfo, requiredFormatFeatures, flags, sysmemPixelFormat, colorSpaceCount, pColorSpaces );
}
# endif
@@ -36744,13 +37153,12 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCreateInfo == rhs.imageCreateInfo ) &&
- ( requiredFormatFeatures == rhs.requiredFormatFeatures ) && ( flags == rhs.flags ) &&
- ( sysmemPixelFormat == rhs.sysmemPixelFormat ) && ( colorSpaceCount == rhs.colorSpaceCount ) &&
- ( pColorSpaces == rhs.pColorSpaces );
+ ( requiredFormatFeatures == rhs.requiredFormatFeatures ) && ( flags == rhs.flags ) && ( sysmemPixelFormat == rhs.sysmemPixelFormat ) &&
+ ( colorSpaceCount == rhs.colorSpaceCount ) && ( pColorSpaces == rhs.pColorSpaces );
# endif
}
@@ -36761,8 +37169,8 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatConstraintsInfoFUCHSIA;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatConstraintsInfoFUCHSIA;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo = {};
VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures = {};
VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags = {};
@@ -36770,14 +37178,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t colorSpaceCount = {};
const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA ) ==
- sizeof( VkImageFormatConstraintsInfoFUCHSIA ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA>::value,
- "ImageFormatConstraintsInfoFUCHSIA is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageFormatConstraintsInfoFUCHSIA>
@@ -36795,35 +37195,39 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageConstraintsInfoFUCHSIA;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA(
- uint32_t formatConstraintsCount_ = {},
- const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ = {},
- VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {},
- VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : formatConstraintsCount( formatConstraintsCount_ )
+ VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA( uint32_t formatConstraintsCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ = {},
+ VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {},
+ VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , formatConstraintsCount( formatConstraintsCount_ )
, pFormatConstraints( pFormatConstraints_ )
, bufferCollectionConstraints( bufferCollectionConstraints_ )
, flags( flags_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ImageConstraintsInfoFUCHSIA( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageConstraintsInfoFUCHSIA( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageConstraintsInfoFUCHSIA( *reinterpret_cast<ImageConstraintsInfoFUCHSIA const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ImageConstraintsInfoFUCHSIA(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA> const & formatConstraints_,
- VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {},
- VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {} )
- : formatConstraintsCount( static_cast<uint32_t>( formatConstraints_.size() ) )
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA> const & formatConstraints_,
+ VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {},
+ VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , formatConstraintsCount( static_cast<uint32_t>( formatConstraints_.size() ) )
, pFormatConstraints( formatConstraints_.data() )
, bufferCollectionConstraints( bufferCollectionConstraints_ )
, flags( flags_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -36842,15 +37246,14 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA &
- setFormatConstraintsCount( uint32_t formatConstraintsCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setFormatConstraintsCount( uint32_t formatConstraintsCount_ ) VULKAN_HPP_NOEXCEPT
{
formatConstraintsCount = formatConstraintsCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setPFormatConstraints(
- const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA &
+ setPFormatConstraints( const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ ) VULKAN_HPP_NOEXCEPT
{
pFormatConstraints = pFormatConstraints_;
return *this;
@@ -36858,8 +37261,8 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ImageConstraintsInfoFUCHSIA & setFormatConstraints(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA> const & formatConstraints_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA> const & formatConstraints_ )
+ VULKAN_HPP_NOEXCEPT
{
formatConstraintsCount = static_cast<uint32_t>( formatConstraints_.size() );
pFormatConstraints = formatConstraints_.data();
@@ -36867,33 +37270,31 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setBufferCollectionConstraints(
- VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ )
- VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA &
+ setBufferCollectionConstraints( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) VULKAN_HPP_NOEXCEPT
{
bufferCollectionConstraints = bufferCollectionConstraints_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA &
- setFlags( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( this );
}
- explicit operator VkImageConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+ operator VkImageConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageConstraintsInfoFUCHSIA *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -36915,13 +37316,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( formatConstraintsCount == rhs.formatConstraintsCount ) &&
- ( pFormatConstraints == rhs.pFormatConstraints ) &&
- ( bufferCollectionConstraints == rhs.bufferCollectionConstraints ) && ( flags == rhs.flags );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatConstraintsCount == rhs.formatConstraintsCount ) &&
+ ( pFormatConstraints == rhs.pFormatConstraints ) && ( bufferCollectionConstraints == rhs.bufferCollectionConstraints ) && ( flags == rhs.flags );
# endif
}
@@ -36932,21 +37331,13 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageConstraintsInfoFUCHSIA;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageConstraintsInfoFUCHSIA;
+ const void * pNext = {};
uint32_t formatConstraintsCount = {};
const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints = {};
VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {};
VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA ) ==
- sizeof( VkImageConstraintsInfoFUCHSIA ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA>::value,
- "ImageConstraintsInfoFUCHSIA is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageConstraintsInfoFUCHSIA>
@@ -36964,18 +37355,18 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {},
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {},
- VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT
: srcSubresource( srcSubresource_ )
, srcOffset( srcOffset_ )
, dstSubresource( dstSubresource_ )
, dstOffset( dstOffset_ )
, extent( extent_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy( *reinterpret_cast<ImageCopy const *>( &rhs ) )
- {}
+ ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy( *reinterpret_cast<ImageCopy const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -36987,29 +37378,25 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 ImageCopy &
- setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
{
srcSubresource = srcSubresource_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageCopy &
- setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
{
srcOffset = srcOffset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageCopy &
- setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
{
dstSubresource = dstSubresource_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageCopy &
- setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
{
dstOffset = dstOffset_;
return *this;
@@ -37022,17 +37409,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageCopy const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageCopy const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageCopy *>( this );
}
- explicit operator VkImageCopy &() VULKAN_HPP_NOEXCEPT
+ operator VkImageCopy &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageCopy *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -37053,11 +37440,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) &&
- ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
+ return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) &&
+ ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
# endif
}
@@ -37074,12 +37461,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCopy ) == sizeof( VkImageCopy ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCopy>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCopy>::value,
- "ImageCopy is not nothrow_move_constructible!" );
struct SubresourceLayout
{
@@ -37096,13 +37477,12 @@ namespace VULKAN_HPP_NAMESPACE
, rowPitch( rowPitch_ )
, arrayPitch( arrayPitch_ )
, depthPitch( depthPitch_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SubresourceLayout( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
- : SubresourceLayout( *reinterpret_cast<SubresourceLayout const *>( &rhs ) )
- {}
+ SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT : SubresourceLayout( *reinterpret_cast<SubresourceLayout const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SubresourceLayout & operator=( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -37113,17 +37493,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkSubresourceLayout const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSubresourceLayout const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubresourceLayout *>( this );
}
- explicit operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT
+ operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubresourceLayout *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -37144,11 +37524,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( offset == rhs.offset ) && ( size == rhs.size ) && ( rowPitch == rhs.rowPitch ) &&
- ( arrayPitch == rhs.arrayPitch ) && ( depthPitch == rhs.depthPitch );
+ return ( offset == rhs.offset ) && ( size == rhs.size ) && ( rowPitch == rhs.rowPitch ) && ( arrayPitch == rhs.arrayPitch ) &&
+ ( depthPitch == rhs.depthPitch );
# endif
}
@@ -37165,72 +37545,63 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {};
VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout ) == sizeof( VkSubresourceLayout ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubresourceLayout>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubresourceLayout>::value,
- "SubresourceLayout is not nothrow_move_constructible!" );
struct ImageDrmFormatModifierExplicitCreateInfoEXT
{
using NativeType = VkImageDrmFormatModifierExplicitCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT(
- uint64_t drmFormatModifier_ = {},
- uint32_t drmFormatModifierPlaneCount_ = {},
- const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ = {} ) VULKAN_HPP_NOEXCEPT
- : drmFormatModifier( drmFormatModifier_ )
+ VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_ = {},
+ uint32_t drmFormatModifierPlaneCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , drmFormatModifier( drmFormatModifier_ )
, drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ )
, pPlaneLayouts( pPlaneLayouts_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT(
- ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : ImageDrmFormatModifierExplicitCreateInfoEXT(
- *reinterpret_cast<ImageDrmFormatModifierExplicitCreateInfoEXT const *>( &rhs ) )
- {}
+ ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ImageDrmFormatModifierExplicitCreateInfoEXT( *reinterpret_cast<ImageDrmFormatModifierExplicitCreateInfoEXT const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ImageDrmFormatModifierExplicitCreateInfoEXT(
- uint64_t drmFormatModifier_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubresourceLayout> const &
- planeLayouts_ )
- : drmFormatModifier( drmFormatModifier_ )
+ uint64_t drmFormatModifier_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubresourceLayout> const & planeLayouts_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , drmFormatModifier( drmFormatModifier_ )
, drmFormatModifierPlaneCount( static_cast<uint32_t>( planeLayouts_.size() ) )
, pPlaneLayouts( planeLayouts_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ImageDrmFormatModifierExplicitCreateInfoEXT &
- operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImageDrmFormatModifierExplicitCreateInfoEXT &
- operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT &
- setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
{
drmFormatModifier = drmFormatModifier_;
return *this;
@@ -37244,16 +37615,15 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT &
- setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT
+ setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT
{
pPlaneLayouts = pPlaneLayouts_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- ImageDrmFormatModifierExplicitCreateInfoEXT & setPlaneLayouts(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubresourceLayout> const &
- planeLayouts_ ) VULKAN_HPP_NOEXCEPT
+ ImageDrmFormatModifierExplicitCreateInfoEXT &
+ setPlaneLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubresourceLayout> const & planeLayouts_ ) VULKAN_HPP_NOEXCEPT
{
drmFormatModifierPlaneCount = static_cast<uint32_t>( planeLayouts_.size() );
pPlaneLayouts = planeLayouts_.data();
@@ -37262,17 +37632,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageDrmFormatModifierExplicitCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageDrmFormatModifierExplicitCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageDrmFormatModifierExplicitCreateInfoEXT *>( this );
}
- explicit operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -37293,12 +37663,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) &&
- ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) &&
- ( pPlaneLayouts == rhs.pPlaneLayouts );
+ ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && ( pPlaneLayouts == rhs.pPlaneLayouts );
# endif
}
@@ -37309,21 +37678,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
+ const void * pNext = {};
uint64_t drmFormatModifier = {};
uint32_t drmFormatModifierPlaneCount = {};
const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT ) ==
- sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT>::value,
- "ImageDrmFormatModifierExplicitCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT>
@@ -37336,69 +37696,64 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkImageDrmFormatModifierListCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eImageDrmFormatModifierListCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = {},
- const uint64_t * pDrmFormatModifiers_ = {} ) VULKAN_HPP_NOEXCEPT
- : drmFormatModifierCount( drmFormatModifierCount_ )
+ VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = {},
+ const uint64_t * pDrmFormatModifiers_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , drmFormatModifierCount( drmFormatModifierCount_ )
, pDrmFormatModifiers( pDrmFormatModifiers_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : ImageDrmFormatModifierListCreateInfoEXT(
- *reinterpret_cast<ImageDrmFormatModifierListCreateInfoEXT const *>( &rhs ) )
- {}
+ : ImageDrmFormatModifierListCreateInfoEXT( *reinterpret_cast<ImageDrmFormatModifierListCreateInfoEXT const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- ImageDrmFormatModifierListCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_ )
- : drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifiers_.size() ) )
- , pDrmFormatModifiers( drmFormatModifiers_.data() )
- {}
+ ImageDrmFormatModifierListCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifiers_.size() ) ), pDrmFormatModifiers( drmFormatModifiers_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ImageDrmFormatModifierListCreateInfoEXT &
- operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ImageDrmFormatModifierListCreateInfoEXT & operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImageDrmFormatModifierListCreateInfoEXT &
- operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageDrmFormatModifierListCreateInfoEXT & operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT &
- setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT
{
drmFormatModifierCount = drmFormatModifierCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT &
- setPDrmFormatModifiers( const uint64_t * pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t * pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
{
pDrmFormatModifiers = pDrmFormatModifiers_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifiers(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
+ ImageDrmFormatModifierListCreateInfoEXT &
+ setDrmFormatModifiers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
{
drmFormatModifierCount = static_cast<uint32_t>( drmFormatModifiers_.size() );
pDrmFormatModifiers = drmFormatModifiers_.data();
@@ -37407,24 +37762,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageDrmFormatModifierListCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageDrmFormatModifierListCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageDrmFormatModifierListCreateInfoEXT *>( this );
}
- explicit operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- uint32_t const &,
- const uint64_t * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -37437,11 +37789,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( drmFormatModifierCount == rhs.drmFormatModifierCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) &&
( pDrmFormatModifiers == rhs.pDrmFormatModifiers );
# endif
}
@@ -37453,20 +37804,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
+ const void * pNext = {};
uint32_t drmFormatModifierCount = {};
const uint64_t * pDrmFormatModifiers = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT ) ==
- sizeof( VkImageDrmFormatModifierListCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT>::value,
- "ImageDrmFormatModifierListCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageDrmFormatModifierListCreateInfoEXT>
@@ -37479,43 +37821,42 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkImageDrmFormatModifierPropertiesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eImageDrmFormatModifierPropertiesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierPropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {} ) VULKAN_HPP_NOEXCEPT
- : drmFormatModifier( drmFormatModifier_ )
- {}
+ VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , drmFormatModifier( drmFormatModifier_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageDrmFormatModifierPropertiesEXT( *reinterpret_cast<ImageDrmFormatModifierPropertiesEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ImageDrmFormatModifierPropertiesEXT &
- operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ImageDrmFormatModifierPropertiesEXT & operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImageDrmFormatModifierPropertiesEXT &
- operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageDrmFormatModifierPropertiesEXT & operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const *>( &rhs );
return *this;
}
- explicit operator VkImageDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageDrmFormatModifierPropertiesEXT *>( this );
}
- explicit operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -37532,7 +37873,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier );
@@ -37550,14 +37891,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
uint64_t drmFormatModifier = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT ) ==
- sizeof( VkImageDrmFormatModifierPropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::value,
- "ImageDrmFormatModifierPropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageDrmFormatModifierPropertiesEXT>
@@ -37573,25 +37906,28 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatListCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImageFormatListCreateInfo( uint32_t viewFormatCount_ = {},
- const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT
- : viewFormatCount( viewFormatCount_ )
+ VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( uint32_t viewFormatCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , viewFormatCount( viewFormatCount_ )
, pViewFormats( pViewFormats_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ImageFormatListCreateInfo( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageFormatListCreateInfo( *reinterpret_cast<ImageFormatListCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- ImageFormatListCreateInfo(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ )
- : viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() )
- {}
+ ImageFormatListCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -37610,24 +37946,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo &
- setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
{
viewFormatCount = viewFormatCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo &
- setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT
{
pViewFormats = pViewFormats_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- ImageFormatListCreateInfo & setViewFormats(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ )
- VULKAN_HPP_NOEXCEPT
+ ImageFormatListCreateInfo &
+ setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ ) VULKAN_HPP_NOEXCEPT
{
viewFormatCount = static_cast<uint32_t>( viewFormats_.size() );
pViewFormats = viewFormats_.data();
@@ -37636,24 +37969,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageFormatListCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageFormatListCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageFormatListCreateInfo *>( this );
}
- explicit operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageFormatListCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- uint32_t const &,
- const VULKAN_HPP_NAMESPACE::Format * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Format * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -37666,11 +37996,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewFormatCount == rhs.viewFormatCount ) &&
- ( pViewFormats == rhs.pViewFormats );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats );
# endif
}
@@ -37686,13 +38015,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t viewFormatCount = {};
const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo ) ==
- sizeof( VkImageFormatListCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo>::value,
- "ImageFormatListCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageFormatListCreateInfo>
@@ -37709,16 +38031,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatProperties2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ImageFormatProperties2(
- VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : imageFormatProperties( imageFormatProperties_ )
- {}
+ VULKAN_HPP_CONSTEXPR ImageFormatProperties2( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , imageFormatProperties( imageFormatProperties_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageFormatProperties2( *reinterpret_cast<ImageFormatProperties2 const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageFormatProperties2 & operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -37729,23 +38054,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageFormatProperties2 *>( this );
}
- explicit operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
+ operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageFormatProperties2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::ImageFormatProperties const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageFormatProperties const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -37758,7 +38081,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFormatProperties == rhs.imageFormatProperties );
@@ -37776,13 +38099,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 ) ==
- sizeof( VkImageFormatProperties2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::value,
- "ImageFormatProperties2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageFormatProperties2>
@@ -37799,16 +38115,17 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImageMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
- VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- uint32_t srcQueueFamilyIndex_ = {},
- uint32_t dstQueueFamilyIndex_ = {},
- VULKAN_HPP_NAMESPACE::Image image_ = {},
- VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcAccessMask( srcAccessMask_ )
+ VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
+ VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ uint32_t srcQueueFamilyIndex_ = {},
+ uint32_t dstQueueFamilyIndex_ = {},
+ VULKAN_HPP_NAMESPACE::Image image_ = {},
+ VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , srcAccessMask( srcAccessMask_ )
, dstAccessMask( dstAccessMask_ )
, oldLayout( oldLayout_ )
, newLayout( newLayout_ )
@@ -37816,13 +38133,12 @@ namespace VULKAN_HPP_NAMESPACE
, dstQueueFamilyIndex( dstQueueFamilyIndex_ )
, image( image_ )
, subresourceRange( subresourceRange_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
- : ImageMemoryBarrier( *reinterpret_cast<ImageMemoryBarrier const *>( &rhs ) )
- {}
+ ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : ImageMemoryBarrier( *reinterpret_cast<ImageMemoryBarrier const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -37840,43 +38156,37 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier &
- setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier &
- setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier &
- setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
{
oldLayout = oldLayout_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier &
- setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
{
newLayout = newLayout_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier &
- setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
srcQueueFamilyIndex = srcQueueFamilyIndex_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier &
- setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
dstQueueFamilyIndex = dstQueueFamilyIndex_;
return *this;
@@ -37889,24 +38199,24 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier &
- setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
+ setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
{
subresourceRange = subresourceRange_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageMemoryBarrier *>( this );
}
- explicit operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT
+ operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageMemoryBarrier *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -37923,16 +38233,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- srcAccessMask,
- dstAccessMask,
- oldLayout,
- newLayout,
- srcQueueFamilyIndex,
- dstQueueFamilyIndex,
- image,
- subresourceRange );
+ return std::tie( sType, pNext, srcAccessMask, dstAccessMask, oldLayout, newLayout, srcQueueFamilyIndex, dstQueueFamilyIndex, image, subresourceRange );
}
#endif
@@ -37941,14 +38242,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) &&
- ( dstAccessMask == rhs.dstAccessMask ) && ( oldLayout == rhs.oldLayout ) &&
- ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) &&
- ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( image == rhs.image ) &&
- ( subresourceRange == rhs.subresourceRange );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) &&
+ ( oldLayout == rhs.oldLayout ) && ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) &&
+ ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( image == rhs.image ) && ( subresourceRange == rhs.subresourceRange );
# endif
}
@@ -37970,12 +38269,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Image image = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier>::value,
- "ImageMemoryBarrier is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageMemoryBarrier>
@@ -37991,16 +38284,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryRequirementsInfo2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT
- : image( image_ )
- {}
+ VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , image( image_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageMemoryRequirementsInfo2( *reinterpret_cast<ImageMemoryRequirementsInfo2 const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageMemoryRequirementsInfo2 & operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -38018,25 +38313,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 &
- setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( this );
}
- explicit operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
+ operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageMemoryRequirementsInfo2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -38053,7 +38347,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image );
@@ -38071,14 +38365,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Image image = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 ) ==
- sizeof( VkImageMemoryRequirementsInfo2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2>::value,
- "ImageMemoryRequirementsInfo2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageMemoryRequirementsInfo2>
@@ -38093,27 +38379,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkImagePipeSurfaceCreateInfoFUCHSIA;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImagePipeSurfaceCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {},
- zx_handle_t imagePipeHandle_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {},
+ zx_handle_t imagePipeHandle_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, imagePipeHandle( imagePipeHandle_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: ImagePipeSurfaceCreateInfoFUCHSIA( *reinterpret_cast<ImagePipeSurfaceCreateInfoFUCHSIA const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ImagePipeSurfaceCreateInfoFUCHSIA &
- operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ImagePipeSurfaceCreateInfoFUCHSIA & operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -38128,32 +38414,30 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA &
- setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA &
- setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT
{
imagePipeHandle = imagePipeHandle_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImagePipeSurfaceCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImagePipeSurfaceCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( this );
}
- explicit operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+ operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImagePipeSurfaceCreateInfoFUCHSIA *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -38196,19 +38480,11 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {};
zx_handle_t imagePipeHandle = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA ) ==
- sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA>::value,
- "ImagePipeSurfaceCreateInfoFUCHSIA is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImagepipeSurfaceCreateInfoFUCHSIA>
@@ -38222,25 +38498,26 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkImagePlaneMemoryRequirementsInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR
- ImagePlaneMemoryRequirementsInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ =
- VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT
- : planeAspect( planeAspect_ )
- {}
+ ImagePlaneMemoryRequirementsInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , planeAspect( planeAspect_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ImagePlaneMemoryRequirementsInfo( *reinterpret_cast<ImagePlaneMemoryRequirementsInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ImagePlaneMemoryRequirementsInfo &
- operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ImagePlaneMemoryRequirementsInfo & operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImagePlaneMemoryRequirementsInfo & operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -38255,31 +38532,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo &
- setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
{
planeAspect = planeAspect_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImagePlaneMemoryRequirementsInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImagePlaneMemoryRequirementsInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImagePlaneMemoryRequirementsInfo *>( this );
}
- explicit operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImagePlaneMemoryRequirementsInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -38292,7 +38566,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect );
@@ -38310,14 +38584,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo ) ==
- sizeof( VkImagePlaneMemoryRequirementsInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo>::value,
- "ImagePlaneMemoryRequirementsInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImagePlaneMemoryRequirementsInfo>
@@ -38335,19 +38601,18 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {},
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {},
- VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT
: srcSubresource( srcSubresource_ )
, srcOffset( srcOffset_ )
, dstSubresource( dstSubresource_ )
, dstOffset( dstOffset_ )
, extent( extent_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
- : ImageResolve( *reinterpret_cast<ImageResolve const *>( &rhs ) )
- {}
+ ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT : ImageResolve( *reinterpret_cast<ImageResolve const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -38359,53 +38624,48 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 ImageResolve &
- setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
{
srcSubresource = srcSubresource_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageResolve &
- setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
{
srcOffset = srcOffset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageResolve &
- setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
{
dstSubresource = dstSubresource_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageResolve &
- setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
{
dstOffset = dstOffset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageResolve &
- setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageResolve const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageResolve const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageResolve *>( this );
}
- explicit operator VkImageResolve &() VULKAN_HPP_NOEXCEPT
+ operator VkImageResolve &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageResolve *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -38426,11 +38686,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) &&
- ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
+ return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) &&
+ ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
# endif
}
@@ -38447,12 +38707,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageResolve ) == sizeof( VkImageResolve ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageResolve>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageResolve>::value,
- "ImageResolve is not nothrow_move_constructible!" );
struct ImageResolve2
{
@@ -38466,19 +38720,20 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {},
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {},
- VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcSubresource( srcSubresource_ )
+ VULKAN_HPP_NAMESPACE::Extent3D extent_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , srcSubresource( srcSubresource_ )
, srcOffset( srcOffset_ )
, dstSubresource( dstSubresource_ )
, dstOffset( dstOffset_ )
, extent( extent_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ImageResolve2( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImageResolve2( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : ImageResolve2( *reinterpret_cast<ImageResolve2 const *>( &rhs ) )
- {}
+ ImageResolve2( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageResolve2( *reinterpret_cast<ImageResolve2 const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageResolve2 & operator=( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -38496,53 +38751,48 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageResolve2 &
- setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
{
srcSubresource = srcSubresource_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageResolve2 &
- setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
{
srcOffset = srcOffset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageResolve2 &
- setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
{
dstSubresource = dstSubresource_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageResolve2 &
- setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
{
dstOffset = dstOffset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageResolve2 &
- setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageResolve2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageResolve2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageResolve2 *>( this );
}
- explicit operator VkImageResolve2 &() VULKAN_HPP_NOEXCEPT
+ operator VkImageResolve2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageResolve2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -38565,12 +38815,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) &&
- ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) &&
- ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) &&
+ ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
# endif
}
@@ -38589,12 +38838,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageResolve2 ) == sizeof( VkImageResolve2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageResolve2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageResolve2>::value,
- "ImageResolve2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageResolve2>
@@ -38608,28 +38851,26 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkImageSparseMemoryRequirementsInfo2;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eImageSparseMemoryRequirementsInfo2;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSparseMemoryRequirementsInfo2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImageSparseMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT
- : image( image_ )
- {}
+ VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , image( image_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( ImageSparseMemoryRequirementsInfo2 const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageSparseMemoryRequirementsInfo2( *reinterpret_cast<ImageSparseMemoryRequirementsInfo2 const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ImageSparseMemoryRequirementsInfo2 &
- operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ImageSparseMemoryRequirementsInfo2 & operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImageSparseMemoryRequirementsInfo2 &
- operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageSparseMemoryRequirementsInfo2 & operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const *>( &rhs );
return *this;
@@ -38642,25 +38883,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 &
- setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageSparseMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageSparseMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( this );
}
- explicit operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
+ operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageSparseMemoryRequirementsInfo2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -38677,7 +38917,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image );
@@ -38695,14 +38935,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Image image = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 ) ==
- sizeof( VkImageSparseMemoryRequirementsInfo2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2>::value,
- "ImageSparseMemoryRequirementsInfo2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageSparseMemoryRequirementsInfo2>
@@ -38719,17 +38951,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageStencilUsageCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImageStencilUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {} ) VULKAN_HPP_NOEXCEPT
- : stencilUsage( stencilUsage_ )
- {}
+ VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , stencilUsage( stencilUsage_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ImageStencilUsageCreateInfo( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageStencilUsageCreateInfo( *reinterpret_cast<ImageStencilUsageCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageStencilUsageCreateInfo & operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -38747,31 +38981,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo &
- setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT
{
stencilUsage = stencilUsage_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageStencilUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageStencilUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageStencilUsageCreateInfo *>( this );
}
- explicit operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageStencilUsageCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -38784,7 +39015,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilUsage == rhs.stencilUsage );
@@ -38802,14 +39033,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo ) ==
- sizeof( VkImageStencilUsageCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo>::value,
- "ImageStencilUsageCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageStencilUsageCreateInfo>
@@ -38818,6 +39041,102 @@ namespace VULKAN_HPP_NAMESPACE
};
using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo;
+ struct ImageSubresource2EXT
+ {
+ using NativeType = VkImageSubresource2EXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSubresource2EXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageSubresource2EXT( VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , imageSubresource( imageSubresource_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ImageSubresource2EXT( ImageSubresource2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageSubresource2EXT( VkImageSubresource2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ImageSubresource2EXT( *reinterpret_cast<ImageSubresource2EXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ImageSubresource2EXT & operator=( ImageSubresource2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageSubresource2EXT & operator=( VkImageSubresource2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresource2EXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ImageSubresource2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ImageSubresource2EXT & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageSubresource = imageSubresource_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkImageSubresource2EXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageSubresource2EXT *>( this );
+ }
+
+ operator VkImageSubresource2EXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageSubresource2EXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageSubresource const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, imageSubresource );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ImageSubresource2EXT const & ) const = default;
+#else
+ bool operator==( ImageSubresource2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageSubresource == rhs.imageSubresource );
+# endif
+ }
+
+ bool operator!=( ImageSubresource2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSubresource2EXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eImageSubresource2EXT>
+ {
+ using Type = ImageSubresource2EXT;
+ };
+
struct ImageSwapchainCreateInfoKHR
{
using NativeType = VkImageSwapchainCreateInfoKHR;
@@ -38826,17 +39145,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSwapchainCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImageSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {} ) VULKAN_HPP_NOEXCEPT
- : swapchain( swapchain_ )
- {}
+ VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , swapchain( swapchain_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageSwapchainCreateInfoKHR( *reinterpret_cast<ImageSwapchainCreateInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageSwapchainCreateInfoKHR & operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -38854,31 +39174,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR &
- setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
{
swapchain = swapchain_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageSwapchainCreateInfoKHR *>( this );
}
- explicit operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageSwapchainCreateInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::SwapchainKHR const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SwapchainKHR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -38891,7 +39208,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain );
@@ -38909,14 +39226,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR ) ==
- sizeof( VkImageSwapchainCreateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR>::value,
- "ImageSwapchainCreateInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageSwapchainCreateInfoKHR>
@@ -38932,17 +39241,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAstcDecodeModeEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT(
- VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT
- : decodeMode( decodeMode_ )
- {}
+ VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , decodeMode( decodeMode_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageViewASTCDecodeModeEXT( *reinterpret_cast<ImageViewASTCDecodeModeEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageViewASTCDecodeModeEXT & operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -38960,25 +39271,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT &
- setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT
{
decodeMode = decodeMode_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageViewASTCDecodeModeEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageViewASTCDecodeModeEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageViewASTCDecodeModeEXT *>( this );
}
- explicit operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageViewASTCDecodeModeEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -38995,7 +39305,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeMode == rhs.decodeMode );
@@ -39013,13 +39323,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT ) ==
- sizeof( VkImageViewASTCDecodeModeEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT>::value,
- "ImageViewASTCDecodeModeEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageViewAstcDecodeModeEXT>
@@ -39035,23 +39338,24 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAddressPropertiesNVX;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImageViewAddressPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT
- : deviceAddress( deviceAddress_ )
+ VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , deviceAddress( deviceAddress_ )
, size( size_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ImageViewAddressPropertiesNVX( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageViewAddressPropertiesNVX( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageViewAddressPropertiesNVX( *reinterpret_cast<ImageViewAddressPropertiesNVX const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ImageViewAddressPropertiesNVX &
- operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ImageViewAddressPropertiesNVX & operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageViewAddressPropertiesNVX & operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -39059,24 +39363,22 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkImageViewAddressPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageViewAddressPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageViewAddressPropertiesNVX *>( this );
}
- explicit operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT
+ operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageViewAddressPropertiesNVX *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::DeviceAddress const &,
- VULKAN_HPP_NAMESPACE::DeviceSize const &>
+ std::
+ tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -39089,11 +39391,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ) &&
- ( size == rhs.size );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ) && ( size == rhs.size );
# endif
}
@@ -39109,14 +39410,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX ) ==
- sizeof( VkImageViewAddressPropertiesNVX ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::value,
- "ImageViewAddressPropertiesNVX is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageViewAddressPropertiesNVX>
@@ -39132,26 +39425,28 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImageViewCreateInfo( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::Image image_ = {},
- VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D,
- VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {},
- VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::Image image_ = {},
+ VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D,
+ VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {},
+ VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, image( image_ )
, viewType( viewType_ )
, format( format_ )
, components( components_ )
, subresourceRange( subresourceRange_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : ImageViewCreateInfo( *reinterpret_cast<ImageViewCreateInfo const *>( &rhs ) )
- {}
+ ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageViewCreateInfo( *reinterpret_cast<ImageViewCreateInfo const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageViewCreateInfo & operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -39169,8 +39464,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -39182,8 +39476,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo &
- setViewType( VULKAN_HPP_NAMESPACE::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setViewType( VULKAN_HPP_NAMESPACE::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT
{
viewType = viewType_;
return *this;
@@ -39195,32 +39488,31 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo &
- setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
{
components = components_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo &
- setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
+ setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
{
subresourceRange = subresourceRange_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageViewCreateInfo *>( this );
}
- explicit operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageViewCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -39244,12 +39536,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( image == rhs.image ) &&
- ( viewType == rhs.viewType ) && ( format == rhs.format ) && ( components == rhs.components ) &&
- ( subresourceRange == rhs.subresourceRange );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( image == rhs.image ) && ( viewType == rhs.viewType ) &&
+ ( format == rhs.format ) && ( components == rhs.components ) && ( subresourceRange == rhs.subresourceRange );
# endif
}
@@ -39269,12 +39560,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo>::value,
- "ImageViewCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageViewCreateInfo>
@@ -39290,20 +39575,23 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewHandleInfoNVX;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX(
- VULKAN_HPP_NAMESPACE::ImageView imageView_ = {},
- VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
- VULKAN_HPP_NAMESPACE::Sampler sampler_ = {} ) VULKAN_HPP_NOEXCEPT
- : imageView( imageView_ )
+ VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {},
+ VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
+ VULKAN_HPP_NAMESPACE::Sampler sampler_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , imageView( imageView_ )
, descriptorType( descriptorType_ )
, sampler( sampler_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageViewHandleInfoNVX( *reinterpret_cast<ImageViewHandleInfoNVX const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageViewHandleInfoNVX & operator=( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -39321,39 +39609,36 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX &
- setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
{
imageView = imageView_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX &
- setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
{
descriptorType = descriptorType_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX &
- setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
{
sampler = sampler_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageViewHandleInfoNVX const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageViewHandleInfoNVX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageViewHandleInfoNVX *>( this );
}
- explicit operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT
+ operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageViewHandleInfoNVX *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -39374,11 +39659,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) &&
- ( descriptorType == rhs.descriptorType ) && ( sampler == rhs.sampler );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && ( descriptorType == rhs.descriptorType ) &&
+ ( sampler == rhs.sampler );
# endif
}
@@ -39395,13 +39680,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
VULKAN_HPP_NAMESPACE::Sampler sampler = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX ) ==
- sizeof( VkImageViewHandleInfoNVX ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX>::value,
- "ImageViewHandleInfoNVX is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageViewHandleInfoNVX>
@@ -39417,14 +39695,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewMinLodCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT( float minLod_ = {} ) VULKAN_HPP_NOEXCEPT : minLod( minLod_ ) {}
+ VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT( float minLod_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , minLod( minLod_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ImageViewMinLodCreateInfoEXT( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageViewMinLodCreateInfoEXT( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageViewMinLodCreateInfoEXT( *reinterpret_cast<ImageViewMinLodCreateInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageViewMinLodCreateInfoEXT & operator=( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -39449,17 +39731,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageViewMinLodCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageViewMinLodCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageViewMinLodCreateInfoEXT *>( this );
}
- explicit operator VkImageViewMinLodCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkImageViewMinLodCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageViewMinLodCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -39476,7 +39758,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minLod == rhs.minLod );
@@ -39494,14 +39776,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
float minLod = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT ) ==
- sizeof( VkImageViewMinLodCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT>::value,
- "ImageViewMinLodCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageViewMinLodCreateInfoEXT>
@@ -39509,6 +39783,126 @@ namespace VULKAN_HPP_NAMESPACE
using Type = ImageViewMinLodCreateInfoEXT;
};
+ struct ImageViewSampleWeightCreateInfoQCOM
+ {
+ using NativeType = VkImageViewSampleWeightCreateInfoQCOM;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewSampleWeightCreateInfoQCOM;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImageViewSampleWeightCreateInfoQCOM( VULKAN_HPP_NAMESPACE::Offset2D filterCenter_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D filterSize_ = {},
+ uint32_t numPhases_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , filterCenter( filterCenter_ )
+ , filterSize( filterSize_ )
+ , numPhases( numPhases_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ImageViewSampleWeightCreateInfoQCOM( ImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageViewSampleWeightCreateInfoQCOM( VkImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ImageViewSampleWeightCreateInfoQCOM( *reinterpret_cast<ImageViewSampleWeightCreateInfoQCOM const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ImageViewSampleWeightCreateInfoQCOM & operator=( ImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImageViewSampleWeightCreateInfoQCOM & operator=( VkImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setFilterCenter( VULKAN_HPP_NAMESPACE::Offset2D const & filterCenter_ ) VULKAN_HPP_NOEXCEPT
+ {
+ filterCenter = filterCenter_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setFilterSize( VULKAN_HPP_NAMESPACE::Extent2D const & filterSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ filterSize = filterSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setNumPhases( uint32_t numPhases_ ) VULKAN_HPP_NOEXCEPT
+ {
+ numPhases = numPhases_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkImageViewSampleWeightCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImageViewSampleWeightCreateInfoQCOM *>( this );
+ }
+
+ operator VkImageViewSampleWeightCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImageViewSampleWeightCreateInfoQCOM *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::Offset2D const &,
+ VULKAN_HPP_NAMESPACE::Extent2D const &,
+ uint32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, filterCenter, filterSize, numPhases );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ImageViewSampleWeightCreateInfoQCOM const & ) const = default;
+#else
+ bool operator==( ImageViewSampleWeightCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCenter == rhs.filterCenter ) && ( filterSize == rhs.filterSize ) &&
+ ( numPhases == rhs.numPhases );
+# endif
+ }
+
+ bool operator!=( ImageViewSampleWeightCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewSampleWeightCreateInfoQCOM;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Offset2D filterCenter = {};
+ VULKAN_HPP_NAMESPACE::Extent2D filterSize = {};
+ uint32_t numPhases = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eImageViewSampleWeightCreateInfoQCOM>
+ {
+ using Type = ImageViewSampleWeightCreateInfoQCOM;
+ };
+
struct ImageViewUsageCreateInfo
{
using NativeType = VkImageViewUsageCreateInfo;
@@ -39517,16 +39911,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewUsageCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImageViewUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {} ) VULKAN_HPP_NOEXCEPT
- : usage( usage_ )
- {}
+ VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , usage( usage_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ImageViewUsageCreateInfo( *reinterpret_cast<ImageViewUsageCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImageViewUsageCreateInfo & operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -39544,31 +39940,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo &
- setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImageViewUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImageViewUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImageViewUsageCreateInfo *>( this );
}
- explicit operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImageViewUsageCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -39581,7 +39974,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage );
@@ -39599,13 +39992,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo ) ==
- sizeof( VkImageViewUsageCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo>::value,
- "ImageViewUsageCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImageViewUsageCreateInfo>
@@ -39620,29 +40006,26 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkImportAndroidHardwareBufferInfoANDROID;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eImportAndroidHardwareBufferInfoANDROID;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportAndroidHardwareBufferInfoANDROID;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer * buffer_ = {} ) VULKAN_HPP_NOEXCEPT
- : buffer( buffer_ )
- {}
+ VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer * buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , buffer( buffer_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( ImportAndroidHardwareBufferInfoANDROID const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- : ImportAndroidHardwareBufferInfoANDROID(
- *reinterpret_cast<ImportAndroidHardwareBufferInfoANDROID const *>( &rhs ) )
- {}
+ : ImportAndroidHardwareBufferInfoANDROID( *reinterpret_cast<ImportAndroidHardwareBufferInfoANDROID const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ImportAndroidHardwareBufferInfoANDROID &
- operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ImportAndroidHardwareBufferInfoANDROID & operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImportAndroidHardwareBufferInfoANDROID &
- operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImportAndroidHardwareBufferInfoANDROID & operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const *>( &rhs );
return *this;
@@ -39655,25 +40038,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID &
- setBuffer( struct AHardwareBuffer * buffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer * buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImportAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImportAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportAndroidHardwareBufferInfoANDROID *>( this );
}
- explicit operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
+ operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportAndroidHardwareBufferInfoANDROID *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -39690,7 +40072,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer );
@@ -39708,15 +40090,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
struct AHardwareBuffer * buffer = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID ) ==
- sizeof( VkImportAndroidHardwareBufferInfoANDROID ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID>::value,
- "ImportAndroidHardwareBufferInfoANDROID is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImportAndroidHardwareBufferInfoANDROID>
@@ -39733,22 +40106,26 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceFdInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {},
- VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
- int fd_ = {} ) VULKAN_HPP_NOEXCEPT
- : fence( fence_ )
+ VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR(
+ VULKAN_HPP_NAMESPACE::Fence fence_ = {},
+ VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
+ int fd_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , fence( fence_ )
, flags( flags_ )
, handleType( handleType_ )
, fd( fd_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportFenceFdInfoKHR( *reinterpret_cast<ImportFenceFdInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImportFenceFdInfoKHR & operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -39772,15 +40149,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -39793,17 +40168,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImportFenceFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImportFenceFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportFenceFdInfoKHR *>( this );
}
- explicit operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportFenceFdInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -39825,11 +40200,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) &&
- ( handleType == rhs.handleType ) && ( fd == rhs.fd );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) &&
+ ( fd == rhs.fd );
# endif
}
@@ -39840,20 +40215,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::Fence fence = {};
- VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
- int fd = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Fence fence = {};
+ VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+ int fd = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR>::value,
- "ImportFenceFdInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImportFenceFdInfoKHR>
@@ -39870,30 +40238,31 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceWin32HandleInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImportFenceWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {},
- VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
- HANDLE handle_ = {},
- LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT
- : fence( fence_ )
+ VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR(
+ VULKAN_HPP_NAMESPACE::Fence fence_ = {},
+ VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
+ HANDLE handle_ = {},
+ LPCWSTR name_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , fence( fence_ )
, flags( flags_ )
, handleType( handleType_ )
, handle( handle_ )
, name( name_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ImportFenceWin32HandleInfoKHR( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportFenceWin32HandleInfoKHR( *reinterpret_cast<ImportFenceWin32HandleInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ImportFenceWin32HandleInfoKHR &
- operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ImportFenceWin32HandleInfoKHR & operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportFenceWin32HandleInfoKHR & operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -39908,22 +40277,20 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR &
- setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
{
fence = fence_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -39942,17 +40309,17 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( this );
}
- explicit operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportFenceWin32HandleInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -39975,11 +40342,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) &&
- ( handleType == rhs.handleType ) && ( handle == rhs.handle ) && ( name == rhs.name );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) &&
+ ( handle == rhs.handle ) && ( name == rhs.name );
# endif
}
@@ -39990,23 +40357,14 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::Fence fence = {};
- VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
- HANDLE handle = {};
- LPCWSTR name = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Fence fence = {};
+ VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+ HANDLE handle = {};
+ LPCWSTR name = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR ) ==
- sizeof( VkImportFenceWin32HandleInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR>::value,
- "ImportFenceWin32HandleInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImportFenceWin32HandleInfoKHR>
@@ -40021,30 +40379,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkImportMemoryBufferCollectionFUCHSIA;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eImportMemoryBufferCollectionFUCHSIA;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryBufferCollectionFUCHSIA;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImportMemoryBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {},
- uint32_t index_ = {} ) VULKAN_HPP_NOEXCEPT
- : collection( collection_ )
+ VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {},
+ uint32_t index_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , collection( collection_ )
, index( index_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA( ImportMemoryBufferCollectionFUCHSIA const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportMemoryBufferCollectionFUCHSIA( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportMemoryBufferCollectionFUCHSIA( *reinterpret_cast<ImportMemoryBufferCollectionFUCHSIA const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ImportMemoryBufferCollectionFUCHSIA &
- operator=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ImportMemoryBufferCollectionFUCHSIA & operator=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImportMemoryBufferCollectionFUCHSIA &
- operator=( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImportMemoryBufferCollectionFUCHSIA & operator=( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA const *>( &rhs );
return *this;
@@ -40057,8 +40414,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA &
- setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT
{
collection = collection_;
return *this;
@@ -40071,24 +40427,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImportMemoryBufferCollectionFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImportMemoryBufferCollectionFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportMemoryBufferCollectionFUCHSIA *>( this );
}
- explicit operator VkImportMemoryBufferCollectionFUCHSIA &() VULKAN_HPP_NOEXCEPT
+ operator VkImportMemoryBufferCollectionFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportMemoryBufferCollectionFUCHSIA *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const &,
- uint32_t const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const &, uint32_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -40101,11 +40454,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) &&
- ( index == rhs.index );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && ( index == rhs.index );
# endif
}
@@ -40121,14 +40473,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {};
uint32_t index = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA ) ==
- sizeof( VkImportMemoryBufferCollectionFUCHSIA ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA>::value,
- "ImportMemoryBufferCollectionFUCHSIA is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImportMemoryBufferCollectionFUCHSIA>
@@ -40145,18 +40489,22 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryFdInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
- int fd_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
+ VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR(
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
+ int fd_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , handleType( handleType_ )
, fd( fd_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportMemoryFdInfoKHR( *reinterpret_cast<ImportMemoryFdInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImportMemoryFdInfoKHR & operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -40174,8 +40522,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -40188,24 +40535,21 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImportMemoryFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImportMemoryFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportMemoryFdInfoKHR *>( this );
}
- explicit operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportMemoryFdInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &,
- int const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &, int const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -40218,7 +40562,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd );
@@ -40232,18 +40576,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
- int fd = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+ int fd = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR>::value,
- "ImportMemoryFdInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImportMemoryFdInfoKHR>
@@ -40259,24 +40596,25 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryHostPointerInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImportMemoryHostPointerInfoEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
- void * pHostPointer_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
+ VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT(
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
+ void * pHostPointer_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , handleType( handleType_ )
, pHostPointer( pHostPointer_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportMemoryHostPointerInfoEXT( *reinterpret_cast<ImportMemoryHostPointerInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ImportMemoryHostPointerInfoEXT &
- operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ImportMemoryHostPointerInfoEXT & operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportMemoryHostPointerInfoEXT & operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -40292,7 +40630,7 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -40305,24 +40643,22 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImportMemoryHostPointerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImportMemoryHostPointerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportMemoryHostPointerInfoEXT *>( this );
}
- explicit operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportMemoryHostPointerInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &,
- void * const &>
+ std::
+ tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &, void * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -40335,11 +40671,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) &&
- ( pHostPointer == rhs.pHostPointer );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( pHostPointer == rhs.pHostPointer );
# endif
}
@@ -40350,20 +40685,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
- void * pHostPointer = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+ void * pHostPointer = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT ) ==
- sizeof( VkImportMemoryHostPointerInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT>::value,
- "ImportMemoryHostPointerInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImportMemoryHostPointerInfoEXT>
@@ -40380,26 +40706,27 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImportMemoryWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
- HANDLE handle_ = {},
- LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
+ VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR(
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
+ HANDLE handle_ = {},
+ LPCWSTR name_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , handleType( handleType_ )
, handle( handle_ )
, name( name_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ImportMemoryWin32HandleInfoKHR( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportMemoryWin32HandleInfoKHR( *reinterpret_cast<ImportMemoryWin32HandleInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ImportMemoryWin32HandleInfoKHR &
- operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ImportMemoryWin32HandleInfoKHR & operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportMemoryWin32HandleInfoKHR & operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -40415,7 +40742,7 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -40434,17 +40761,17 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportMemoryWin32HandleInfoKHR *>( this );
}
- explicit operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportMemoryWin32HandleInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -40465,11 +40792,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) &&
- ( handle == rhs.handle ) && ( name == rhs.name );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ) && ( name == rhs.name );
# endif
}
@@ -40480,21 +40806,12 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
- HANDLE handle = {};
- LPCWSTR name = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+ HANDLE handle = {};
+ LPCWSTR name = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR ) ==
- sizeof( VkImportMemoryWin32HandleInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR>::value,
- "ImportMemoryWin32HandleInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImportMemoryWin32HandleInfoKHR>
@@ -40512,23 +40829,24 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoNV;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImportMemoryWin32HandleInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {},
- HANDLE handle_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
+ VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {},
+ HANDLE handle_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , handleType( handleType_ )
, handle( handle_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ImportMemoryWin32HandleInfoNV( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportMemoryWin32HandleInfoNV( *reinterpret_cast<ImportMemoryWin32HandleInfoNV const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ImportMemoryWin32HandleInfoNV &
- operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ImportMemoryWin32HandleInfoNV & operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportMemoryWin32HandleInfoNV & operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -40544,7 +40862,7 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT
+ setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -40557,24 +40875,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV *>( this );
}
- explicit operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportMemoryWin32HandleInfoNV *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &,
- HANDLE const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &, HANDLE const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -40587,11 +40902,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) &&
- ( handle == rhs.handle );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle );
# endif
}
@@ -40607,14 +40921,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {};
HANDLE handle = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV ) ==
- sizeof( VkImportMemoryWin32HandleInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV>::value,
- "ImportMemoryWin32HandleInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImportMemoryWin32HandleInfoNV>
@@ -40629,31 +40935,30 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkImportMemoryZirconHandleInfoFUCHSIA;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eImportMemoryZirconHandleInfoFUCHSIA;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImportMemoryZirconHandleInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
- zx_handle_t handle_ = {} ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
+ VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA(
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
+ zx_handle_t handle_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , handleType( handleType_ )
, handle( handle_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( ImportMemoryZirconHandleInfoFUCHSIA const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportMemoryZirconHandleInfoFUCHSIA( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportMemoryZirconHandleInfoFUCHSIA( *reinterpret_cast<ImportMemoryZirconHandleInfoFUCHSIA const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ImportMemoryZirconHandleInfoFUCHSIA &
- operator=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ImportMemoryZirconHandleInfoFUCHSIA & operator=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImportMemoryZirconHandleInfoFUCHSIA &
- operator=( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImportMemoryZirconHandleInfoFUCHSIA & operator=( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA const *>( &rhs );
return *this;
@@ -40667,7 +40972,7 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -40680,17 +40985,17 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImportMemoryZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImportMemoryZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportMemoryZirconHandleInfoFUCHSIA *>( this );
}
- explicit operator VkImportMemoryZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+ operator VkImportMemoryZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportMemoryZirconHandleInfoFUCHSIA *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -40733,20 +41038,11 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
- zx_handle_t handle = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+ zx_handle_t handle = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA ) ==
- sizeof( VkImportMemoryZirconHandleInfoFUCHSIA ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA>::value,
- "ImportMemoryZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImportMemoryZirconHandleInfoFUCHSIA>
@@ -40755,6 +41051,408 @@ namespace VULKAN_HPP_NAMESPACE
};
#endif /*VK_USE_PLATFORM_FUCHSIA*/
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ struct ImportMetalBufferInfoEXT
+ {
+ using NativeType = VkImportMetalBufferInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalBufferInfoEXT;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImportMetalBufferInfoEXT( MTLBuffer_id mtlBuffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , mtlBuffer( mtlBuffer_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ImportMetalBufferInfoEXT( ImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImportMetalBufferInfoEXT( VkImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ImportMetalBufferInfoEXT( *reinterpret_cast<ImportMetalBufferInfoEXT const *>( &rhs ) )
+ {
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ImportMetalBufferInfoEXT & operator=( ImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImportMetalBufferInfoEXT & operator=( VkImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ImportMetalBufferInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ImportMetalBufferInfoEXT & setMtlBuffer( MTLBuffer_id mtlBuffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mtlBuffer = mtlBuffer_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkImportMetalBufferInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImportMetalBufferInfoEXT *>( this );
+ }
+
+ operator VkImportMetalBufferInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImportMetalBufferInfoEXT *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, MTLBuffer_id const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, mtlBuffer );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ImportMetalBufferInfoEXT const & ) const = default;
+# else
+ bool operator==( ImportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlBuffer == rhs.mtlBuffer );
+# endif
+ }
+
+ bool operator!=( ImportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalBufferInfoEXT;
+ const void * pNext = {};
+ MTLBuffer_id mtlBuffer = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eImportMetalBufferInfoEXT>
+ {
+ using Type = ImportMetalBufferInfoEXT;
+ };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ struct ImportMetalIOSurfaceInfoEXT
+ {
+ using NativeType = VkImportMetalIOSurfaceInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalIoSurfaceInfoEXT;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImportMetalIOSurfaceInfoEXT( IOSurfaceRef ioSurface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , ioSurface( ioSurface_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ImportMetalIOSurfaceInfoEXT( ImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImportMetalIOSurfaceInfoEXT( VkImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ImportMetalIOSurfaceInfoEXT( *reinterpret_cast<ImportMetalIOSurfaceInfoEXT const *>( &rhs ) )
+ {
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ImportMetalIOSurfaceInfoEXT & operator=( ImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImportMetalIOSurfaceInfoEXT & operator=( VkImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ImportMetalIOSurfaceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ImportMetalIOSurfaceInfoEXT & setIoSurface( IOSurfaceRef ioSurface_ ) VULKAN_HPP_NOEXCEPT
+ {
+ ioSurface = ioSurface_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkImportMetalIOSurfaceInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImportMetalIOSurfaceInfoEXT *>( this );
+ }
+
+ operator VkImportMetalIOSurfaceInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImportMetalIOSurfaceInfoEXT *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, IOSurfaceRef const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, ioSurface );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ImportMetalIOSurfaceInfoEXT const & ) const = default;
+# else
+ bool operator==( ImportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ioSurface == rhs.ioSurface );
+# endif
+ }
+
+ bool operator!=( ImportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalIoSurfaceInfoEXT;
+ const void * pNext = {};
+ IOSurfaceRef ioSurface = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eImportMetalIoSurfaceInfoEXT>
+ {
+ using Type = ImportMetalIOSurfaceInfoEXT;
+ };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ struct ImportMetalSharedEventInfoEXT
+ {
+ using NativeType = VkImportMetalSharedEventInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalSharedEventInfoEXT;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImportMetalSharedEventInfoEXT( MTLSharedEvent_id mtlSharedEvent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , mtlSharedEvent( mtlSharedEvent_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ImportMetalSharedEventInfoEXT( ImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImportMetalSharedEventInfoEXT( VkImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ImportMetalSharedEventInfoEXT( *reinterpret_cast<ImportMetalSharedEventInfoEXT const *>( &rhs ) )
+ {
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ImportMetalSharedEventInfoEXT & operator=( ImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImportMetalSharedEventInfoEXT & operator=( VkImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ImportMetalSharedEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ImportMetalSharedEventInfoEXT & setMtlSharedEvent( MTLSharedEvent_id mtlSharedEvent_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mtlSharedEvent = mtlSharedEvent_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkImportMetalSharedEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImportMetalSharedEventInfoEXT *>( this );
+ }
+
+ operator VkImportMetalSharedEventInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImportMetalSharedEventInfoEXT *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, MTLSharedEvent_id const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, mtlSharedEvent );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ImportMetalSharedEventInfoEXT const & ) const = default;
+# else
+ bool operator==( ImportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlSharedEvent == rhs.mtlSharedEvent );
+# endif
+ }
+
+ bool operator!=( ImportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalSharedEventInfoEXT;
+ const void * pNext = {};
+ MTLSharedEvent_id mtlSharedEvent = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eImportMetalSharedEventInfoEXT>
+ {
+ using Type = ImportMetalSharedEventInfoEXT;
+ };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ struct ImportMetalTextureInfoEXT
+ {
+ using NativeType = VkImportMetalTextureInfoEXT;
+
+ static const bool allowDuplicate = true;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalTextureInfoEXT;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR ImportMetalTextureInfoEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor,
+ MTLTexture_id mtlTexture_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , plane( plane_ )
+ , mtlTexture( mtlTexture_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR ImportMetalTextureInfoEXT( ImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImportMetalTextureInfoEXT( VkImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ImportMetalTextureInfoEXT( *reinterpret_cast<ImportMetalTextureInfoEXT const *>( &rhs ) )
+ {
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ImportMetalTextureInfoEXT & operator=( ImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ImportMetalTextureInfoEXT & operator=( VkImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setPlane( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ ) VULKAN_HPP_NOEXCEPT
+ {
+ plane = plane_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setMtlTexture( MTLTexture_id mtlTexture_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mtlTexture = mtlTexture_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkImportMetalTextureInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkImportMetalTextureInfoEXT *>( this );
+ }
+
+ operator VkImportMetalTextureInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkImportMetalTextureInfoEXT *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &, MTLTexture_id const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, plane, mtlTexture );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ImportMetalTextureInfoEXT const & ) const = default;
+# else
+ bool operator==( ImportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( plane == rhs.plane ) && ( mtlTexture == rhs.mtlTexture );
+# endif
+ }
+
+ bool operator!=( ImportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalTextureInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
+ MTLTexture_id mtlTexture = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eImportMetalTextureInfoEXT>
+ {
+ using Type = ImportMetalTextureInfoEXT;
+ };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
struct ImportSemaphoreFdInfoKHR
{
using NativeType = VkImportSemaphoreFdInfoKHR;
@@ -40763,23 +41461,26 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreFdInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImportSemaphoreFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
- VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
- int fd_ = {} ) VULKAN_HPP_NOEXCEPT
- : semaphore( semaphore_ )
+ VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR(
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
+ VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
+ int fd_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , semaphore( semaphore_ )
, flags( flags_ )
, handleType( handleType_ )
, fd( fd_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportSemaphoreFdInfoKHR( *reinterpret_cast<ImportSemaphoreFdInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ImportSemaphoreFdInfoKHR & operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -40797,22 +41498,20 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR &
- setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -40825,17 +41524,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImportSemaphoreFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImportSemaphoreFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( this );
}
- explicit operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportSemaphoreFdInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -40857,11 +41556,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) &&
- ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) &&
+ ( fd == rhs.fd );
# endif
}
@@ -40872,21 +41571,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
- VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
- int fd = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+ VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+ int fd = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR ) ==
- sizeof( VkImportSemaphoreFdInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR>::value,
- "ImportSemaphoreFdInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImportSemaphoreFdInfoKHR>
@@ -40900,34 +41591,34 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkImportSemaphoreWin32HandleInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eImportSemaphoreWin32HandleInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImportSemaphoreWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
- VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
- HANDLE handle_ = {},
- LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT
- : semaphore( semaphore_ )
+ VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR(
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
+ VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
+ HANDLE handle_ = {},
+ LPCWSTR name_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , semaphore( semaphore_ )
, flags( flags_ )
, handleType( handleType_ )
, handle( handle_ )
, name( name_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ImportSemaphoreWin32HandleInfoKHR( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: ImportSemaphoreWin32HandleInfoKHR( *reinterpret_cast<ImportSemaphoreWin32HandleInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ImportSemaphoreWin32HandleInfoKHR &
- operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ImportSemaphoreWin32HandleInfoKHR & operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportSemaphoreWin32HandleInfoKHR & operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -40942,22 +41633,20 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR &
- setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -40976,17 +41665,17 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( this );
}
- explicit operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportSemaphoreWin32HandleInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -41009,12 +41698,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) &&
- ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ) &&
- ( name == rhs.name );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) &&
+ ( handle == rhs.handle ) && ( name == rhs.name );
# endif
}
@@ -41025,23 +41713,14 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
- VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
- HANDLE handle = {};
- LPCWSTR name = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+ VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+ HANDLE handle = {};
+ LPCWSTR name = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR ) ==
- sizeof( VkImportSemaphoreWin32HandleInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR>::value,
- "ImportSemaphoreWin32HandleInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImportSemaphoreWin32HandleInfoKHR>
@@ -41056,36 +41735,34 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkImportSemaphoreZirconHandleInfoFUCHSIA;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ImportSemaphoreZirconHandleInfoFUCHSIA( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
- VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
- zx_handle_t zirconHandle_ = {} ) VULKAN_HPP_NOEXCEPT
- : semaphore( semaphore_ )
+ VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA(
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
+ VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
+ zx_handle_t zirconHandle_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , semaphore( semaphore_ )
, flags( flags_ )
, handleType( handleType_ )
, zirconHandle( zirconHandle_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ImportSemaphoreZirconHandleInfoFUCHSIA( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
- : ImportSemaphoreZirconHandleInfoFUCHSIA(
- *reinterpret_cast<ImportSemaphoreZirconHandleInfoFUCHSIA const *>( &rhs ) )
- {}
+ : ImportSemaphoreZirconHandleInfoFUCHSIA( *reinterpret_cast<ImportSemaphoreZirconHandleInfoFUCHSIA const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ImportSemaphoreZirconHandleInfoFUCHSIA &
- operator=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ImportSemaphoreZirconHandleInfoFUCHSIA & operator=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ImportSemaphoreZirconHandleInfoFUCHSIA &
- operator=( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImportSemaphoreZirconHandleInfoFUCHSIA & operator=( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA const *>( &rhs );
return *this;
@@ -41098,46 +41775,43 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA &
- setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA &
- setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA &
- setZirconHandle( zx_handle_t zirconHandle_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setZirconHandle( zx_handle_t zirconHandle_ ) VULKAN_HPP_NOEXCEPT
{
zirconHandle = zirconHandle_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkImportSemaphoreZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+ operator VkImportSemaphoreZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( this );
}
- explicit operator VkImportSemaphoreZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+ operator VkImportSemaphoreZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkImportSemaphoreZirconHandleInfoFUCHSIA *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -41176,8 +41850,7 @@ namespace VULKAN_HPP_NAMESPACE
bool operator==( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) &&
- ( flags == rhs.flags ) && ( handleType == rhs.handleType ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) &&
( memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ) == 0 );
}
@@ -41187,23 +41860,13 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
- VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
- zx_handle_t zirconHandle = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+ VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+ zx_handle_t zirconHandle = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA ) ==
- sizeof( VkImportSemaphoreZirconHandleInfoFUCHSIA ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA>::value,
- "ImportSemaphoreZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA>
@@ -41220,22 +41883,23 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ =
- VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup,
- uint32_t stream_ = {},
- uint32_t offset_ = {},
- uint32_t vertexBindingUnit_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {},
- VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {},
- VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {},
- uint32_t pushconstantOffset_ = {},
- uint32_t pushconstantSize_ = {},
- VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {},
- uint32_t indexTypeCount_ = {},
- const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ = {},
- const uint32_t * pIndexTypeValues_ = {} ) VULKAN_HPP_NOEXCEPT
- : tokenType( tokenType_ )
+ VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV(
+ VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup,
+ uint32_t stream_ = {},
+ uint32_t offset_ = {},
+ uint32_t vertexBindingUnit_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {},
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {},
+ uint32_t pushconstantOffset_ = {},
+ uint32_t pushconstantSize_ = {},
+ VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {},
+ uint32_t indexTypeCount_ = {},
+ const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ = {},
+ const uint32_t * pIndexTypeValues_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , tokenType( tokenType_ )
, stream( stream_ )
, offset( offset_ )
, vertexBindingUnit( vertexBindingUnit_ )
@@ -41248,30 +41912,32 @@ namespace VULKAN_HPP_NAMESPACE
, indexTypeCount( indexTypeCount_ )
, pIndexTypes( pIndexTypes_ )
, pIndexTypeValues( pIndexTypeValues_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- IndirectCommandsLayoutTokenNV( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
IndirectCommandsLayoutTokenNV( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT
: IndirectCommandsLayoutTokenNV( *reinterpret_cast<IndirectCommandsLayoutTokenNV const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- IndirectCommandsLayoutTokenNV(
- VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_,
- uint32_t stream_,
- uint32_t offset_,
- uint32_t vertexBindingUnit_,
- VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_,
- VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_,
- VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_,
- uint32_t pushconstantOffset_,
- uint32_t pushconstantSize_,
- VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndexType> const & indexTypes_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & indexTypeValues_ = {} )
- : tokenType( tokenType_ )
+ IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_,
+ uint32_t stream_,
+ uint32_t offset_,
+ uint32_t vertexBindingUnit_,
+ VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_,
+ VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_,
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_,
+ uint32_t pushconstantOffset_,
+ uint32_t pushconstantSize_,
+ VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndexType> const & indexTypes_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & indexTypeValues_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , tokenType( tokenType_ )
, stream( stream_ )
, offset( offset_ )
, vertexBindingUnit( vertexBindingUnit_ )
@@ -41290,17 +41956,15 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( indexTypes_.size() != indexTypeValues_.size() )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::IndirectCommandsLayoutTokenNV::IndirectCommandsLayoutTokenNV: indexTypes_.size() != indexTypeValues_.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING
+ "::IndirectCommandsLayoutTokenNV::IndirectCommandsLayoutTokenNV: indexTypes_.size() != indexTypeValues_.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- IndirectCommandsLayoutTokenNV &
- operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ IndirectCommandsLayoutTokenNV & operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
IndirectCommandsLayoutTokenNV & operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -41315,8 +41979,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &
- setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT
{
tokenType = tokenType_;
return *this;
@@ -41334,73 +41997,66 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &
- setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT
{
vertexBindingUnit = vertexBindingUnit_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &
- setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT
{
vertexDynamicStride = vertexDynamicStride_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantPipelineLayout(
- VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &
+ setPushconstantPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT
{
pushconstantPipelineLayout = pushconstantPipelineLayout_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantShaderStageFlags(
- VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &
+ setPushconstantShaderStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT
{
pushconstantShaderStageFlags = pushconstantShaderStageFlags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &
- setPushconstantOffset( uint32_t pushconstantOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantOffset( uint32_t pushconstantOffset_ ) VULKAN_HPP_NOEXCEPT
{
pushconstantOffset = pushconstantOffset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &
- setPushconstantSize( uint32_t pushconstantSize_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantSize( uint32_t pushconstantSize_ ) VULKAN_HPP_NOEXCEPT
{
pushconstantSize = pushconstantSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &
- setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT
+ setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT
{
indirectStateFlags = indirectStateFlags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &
- setIndexTypeCount( uint32_t indexTypeCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setIndexTypeCount( uint32_t indexTypeCount_ ) VULKAN_HPP_NOEXCEPT
{
indexTypeCount = indexTypeCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &
- setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ ) VULKAN_HPP_NOEXCEPT
{
pIndexTypes = pIndexTypes_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- IndirectCommandsLayoutTokenNV & setIndexTypes(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndexType> const & indexTypes_ )
- VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutTokenNV &
+ setIndexTypes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndexType> const & indexTypes_ ) VULKAN_HPP_NOEXCEPT
{
indexTypeCount = static_cast<uint32_t>( indexTypes_.size() );
pIndexTypes = indexTypes_.data();
@@ -41408,16 +42064,15 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &
- setPIndexTypeValues( const uint32_t * pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t * pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT
{
pIndexTypeValues = pIndexTypeValues_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- IndirectCommandsLayoutTokenNV & setIndexTypeValues(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & indexTypeValues_ ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutTokenNV &
+ setIndexTypeValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & indexTypeValues_ ) VULKAN_HPP_NOEXCEPT
{
indexTypeCount = static_cast<uint32_t>( indexTypeValues_.size() );
pIndexTypeValues = indexTypeValues_.data();
@@ -41426,17 +42081,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkIndirectCommandsLayoutTokenNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkIndirectCommandsLayoutTokenNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkIndirectCommandsLayoutTokenNV *>( this );
}
- explicit operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT
+ operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkIndirectCommandsLayoutTokenNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -41481,17 +42136,15 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tokenType == rhs.tokenType ) &&
- ( stream == rhs.stream ) && ( offset == rhs.offset ) && ( vertexBindingUnit == rhs.vertexBindingUnit ) &&
- ( vertexDynamicStride == rhs.vertexDynamicStride ) &&
- ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout ) &&
- ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tokenType == rhs.tokenType ) && ( stream == rhs.stream ) && ( offset == rhs.offset ) &&
+ ( vertexBindingUnit == rhs.vertexBindingUnit ) && ( vertexDynamicStride == rhs.vertexDynamicStride ) &&
+ ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout ) && ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags ) &&
( pushconstantOffset == rhs.pushconstantOffset ) && ( pushconstantSize == rhs.pushconstantSize ) &&
- ( indirectStateFlags == rhs.indirectStateFlags ) && ( indexTypeCount == rhs.indexTypeCount ) &&
- ( pIndexTypes == rhs.pIndexTypes ) && ( pIndexTypeValues == rhs.pIndexTypeValues );
+ ( indirectStateFlags == rhs.indirectStateFlags ) && ( indexTypeCount == rhs.indexTypeCount ) && ( pIndexTypes == rhs.pIndexTypes ) &&
+ ( pIndexTypeValues == rhs.pIndexTypeValues );
# endif
}
@@ -41502,31 +42155,22 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType =
- VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup;
- uint32_t stream = {};
- uint32_t offset = {};
- uint32_t vertexBindingUnit = {};
- VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {};
- VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {};
- VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {};
- uint32_t pushconstantOffset = {};
- uint32_t pushconstantSize = {};
- VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {};
- uint32_t indexTypeCount = {};
- const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes = {};
- const uint32_t * pIndexTypeValues = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV ) ==
- sizeof( VkIndirectCommandsLayoutTokenNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV>::value,
- "IndirectCommandsLayoutTokenNV is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup;
+ uint32_t stream = {};
+ uint32_t offset = {};
+ uint32_t vertexBindingUnit = {};
+ VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {};
+ VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {};
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {};
+ uint32_t pushconstantOffset = {};
+ uint32_t pushconstantSize = {};
+ VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {};
+ uint32_t indexTypeCount = {};
+ const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes = {};
+ const uint32_t * pIndexTypeValues = {};
+ };
template <>
struct CppType<StructureType, StructureType::eIndirectCommandsLayoutTokenNV>
@@ -41539,54 +42183,56 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkIndirectCommandsLayoutCreateInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eIndirectCommandsLayoutCreateInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutCreateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV(
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {},
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
- uint32_t tokenCount_ = {},
- const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ = {},
- uint32_t streamCount_ = {},
- const uint32_t * pStreamStrides_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR
+ IndirectCommandsLayoutCreateInfoNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
+ uint32_t tokenCount_ = {},
+ const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ = {},
+ uint32_t streamCount_ = {},
+ const uint32_t * pStreamStrides_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, pipelineBindPoint( pipelineBindPoint_ )
, tokenCount( tokenCount_ )
, pTokens( pTokens_ )
, streamCount( streamCount_ )
, pStreamStrides( pStreamStrides_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutCreateInfoNV const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
IndirectCommandsLayoutCreateInfoNV( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: IndirectCommandsLayoutCreateInfoNV( *reinterpret_cast<IndirectCommandsLayoutCreateInfoNV const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
IndirectCommandsLayoutCreateInfoNV(
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_,
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV> const &
- tokens_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & streamStrides_ = {} )
- : flags( flags_ )
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_,
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV> const & tokens_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & streamStrides_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, pipelineBindPoint( pipelineBindPoint_ )
, tokenCount( static_cast<uint32_t>( tokens_.size() ) )
, pTokens( tokens_.data() )
, streamCount( static_cast<uint32_t>( streamStrides_.size() ) )
, pStreamStrides( streamStrides_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- IndirectCommandsLayoutCreateInfoNV &
- operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ IndirectCommandsLayoutCreateInfoNV & operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- IndirectCommandsLayoutCreateInfoNV &
- operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutCreateInfoNV & operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const *>( &rhs );
return *this;
@@ -41599,38 +42245,35 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV &
- setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV &
- setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+ setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBindPoint = pipelineBindPoint_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV &
- setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT
{
tokenCount = tokenCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV &
- setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ ) VULKAN_HPP_NOEXCEPT
+ setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ ) VULKAN_HPP_NOEXCEPT
{
pTokens = pTokens_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- IndirectCommandsLayoutCreateInfoNV & setTokens(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV> const &
- tokens_ ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutCreateInfoNV &
+ setTokens( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV> const & tokens_ ) VULKAN_HPP_NOEXCEPT
{
tokenCount = static_cast<uint32_t>( tokens_.size() );
pTokens = tokens_.data();
@@ -41638,23 +42281,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV &
- setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
{
streamCount = streamCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV &
- setPStreamStrides( const uint32_t * pStreamStrides_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t * pStreamStrides_ ) VULKAN_HPP_NOEXCEPT
{
pStreamStrides = pStreamStrides_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- IndirectCommandsLayoutCreateInfoNV & setStreamStrides(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & streamStrides_ ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutCreateInfoNV &
+ setStreamStrides( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & streamStrides_ ) VULKAN_HPP_NOEXCEPT
{
streamCount = static_cast<uint32_t>( streamStrides_.size() );
pStreamStrides = streamStrides_.data();
@@ -41663,17 +42304,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkIndirectCommandsLayoutCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkIndirectCommandsLayoutCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( this );
}
- explicit operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkIndirectCommandsLayoutCreateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -41697,13 +42338,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( tokenCount == rhs.tokenCount ) &&
- ( pTokens == rhs.pTokens ) && ( streamCount == rhs.streamCount ) &&
- ( pStreamStrides == rhs.pStreamStrides );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) &&
+ ( tokenCount == rhs.tokenCount ) && ( pTokens == rhs.pTokens ) && ( streamCount == rhs.streamCount ) && ( pStreamStrides == rhs.pStreamStrides );
# endif
}
@@ -41714,23 +42353,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags = {};
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
- uint32_t tokenCount = {};
- const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens = {};
- uint32_t streamCount = {};
- const uint32_t * pStreamStrides = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags = {};
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+ uint32_t tokenCount = {};
+ const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens = {};
+ uint32_t streamCount = {};
+ const uint32_t * pStreamStrides = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV ) ==
- sizeof( VkIndirectCommandsLayoutCreateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV>::value,
- "IndirectCommandsLayoutCreateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eIndirectCommandsLayoutCreateInfoNV>
@@ -41743,24 +42374,24 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkInitializePerformanceApiInfoINTEL;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eInitializePerformanceApiInfoINTEL;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInitializePerformanceApiInfoINTEL;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT
- : pUserData( pUserData_ )
- {}
+ VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void * pUserData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pUserData( pUserData_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- InitializePerformanceApiInfoINTEL( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
: InitializePerformanceApiInfoINTEL( *reinterpret_cast<InitializePerformanceApiInfoINTEL const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- InitializePerformanceApiInfoINTEL &
- operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ InitializePerformanceApiInfoINTEL & operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
InitializePerformanceApiInfoINTEL & operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -41782,17 +42413,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkInitializePerformanceApiInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
+ operator VkInitializePerformanceApiInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( this );
}
- explicit operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT
+ operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkInitializePerformanceApiInfoINTEL *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -41809,7 +42440,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pUserData == rhs.pUserData );
@@ -41827,14 +42458,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
void * pUserData = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL ) ==
- sizeof( VkInitializePerformanceApiInfoINTEL ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL>::value,
- "InitializePerformanceApiInfoINTEL is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eInitializePerformanceApiInfoINTEL>
@@ -41847,25 +42470,24 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkInputAttachmentAspectReference;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- InputAttachmentAspectReference( uint32_t subpass_ = {},
- uint32_t inputAttachmentIndex_ = {},
- VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( uint32_t subpass_ = {},
+ uint32_t inputAttachmentIndex_ = {},
+ VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT
: subpass( subpass_ )
, inputAttachmentIndex( inputAttachmentIndex_ )
, aspectMask( aspectMask_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- InputAttachmentAspectReference( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
: InputAttachmentAspectReference( *reinterpret_cast<InputAttachmentAspectReference const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- InputAttachmentAspectReference &
- operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ InputAttachmentAspectReference & operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
InputAttachmentAspectReference & operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -41880,32 +42502,30 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference &
- setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT
{
inputAttachmentIndex = inputAttachmentIndex_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference &
- setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
aspectMask = aspectMask_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkInputAttachmentAspectReference const &() const VULKAN_HPP_NOEXCEPT
+ operator VkInputAttachmentAspectReference const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkInputAttachmentAspectReference *>( this );
}
- explicit operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT
+ operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkInputAttachmentAspectReference *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -41922,11 +42542,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( subpass == rhs.subpass ) && ( inputAttachmentIndex == rhs.inputAttachmentIndex ) &&
- ( aspectMask == rhs.aspectMask );
+ return ( subpass == rhs.subpass ) && ( inputAttachmentIndex == rhs.inputAttachmentIndex ) && ( aspectMask == rhs.aspectMask );
# endif
}
@@ -41941,14 +42560,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t inputAttachmentIndex = {};
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference ) ==
- sizeof( VkInputAttachmentAspectReference ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference>::value,
- "InputAttachmentAspectReference is not nothrow_move_constructible!" );
using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference;
struct InstanceCreateInfo
@@ -41959,39 +42570,42 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInstanceCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {},
- const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ = {},
- uint32_t enabledLayerCount_ = {},
- const char * const * ppEnabledLayerNames_ = {},
- uint32_t enabledExtensionCount_ = {},
- const char * const * ppEnabledExtensionNames_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {},
+ const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ = {},
+ uint32_t enabledLayerCount_ = {},
+ const char * const * ppEnabledLayerNames_ = {},
+ uint32_t enabledExtensionCount_ = {},
+ const char * const * ppEnabledExtensionNames_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, pApplicationInfo( pApplicationInfo_ )
, enabledLayerCount( enabledLayerCount_ )
, ppEnabledLayerNames( ppEnabledLayerNames_ )
, enabledExtensionCount( enabledExtensionCount_ )
, ppEnabledExtensionNames( ppEnabledExtensionNames_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : InstanceCreateInfo( *reinterpret_cast<InstanceCreateInfo const *>( &rhs ) )
- {}
+ InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : InstanceCreateInfo( *reinterpret_cast<InstanceCreateInfo const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- InstanceCreateInfo(
- VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_,
- const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ = {} )
- : flags( flags_ )
+ InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_,
+ const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, pApplicationInfo( pApplicationInfo_ )
, enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) )
, ppEnabledLayerNames( pEnabledLayerNames_.data() )
, enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) )
, ppEnabledExtensionNames( pEnabledExtensionNames_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -42010,15 +42624,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo &
- setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT
{
pApplicationInfo = pApplicationInfo_;
return *this;
@@ -42030,17 +42642,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo &
- setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
{
ppEnabledLayerNames = ppEnabledLayerNames_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- InstanceCreateInfo & setPEnabledLayerNames(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_ )
- VULKAN_HPP_NOEXCEPT
+ InstanceCreateInfo &
+ setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
{
enabledLayerCount = static_cast<uint32_t>( pEnabledLayerNames_.size() );
ppEnabledLayerNames = pEnabledLayerNames_.data();
@@ -42048,24 +42658,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo &
- setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
{
enabledExtensionCount = enabledExtensionCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo &
- setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
{
ppEnabledExtensionNames = ppEnabledExtensionNames_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- InstanceCreateInfo & setPEnabledExtensionNames(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ )
- VULKAN_HPP_NOEXCEPT
+ InstanceCreateInfo &
+ setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
{
enabledExtensionCount = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
ppEnabledExtensionNames = pEnabledExtensionNames_.data();
@@ -42074,17 +42681,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkInstanceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkInstanceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkInstanceCreateInfo *>( this );
}
- explicit operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkInstanceCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -42099,14 +42706,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- flags,
- pApplicationInfo,
- enabledLayerCount,
- ppEnabledLayerNames,
- enabledExtensionCount,
- ppEnabledExtensionNames );
+ return std::tie( sType, pNext, flags, pApplicationInfo, enabledLayerCount, ppEnabledLayerNames, enabledExtensionCount, ppEnabledExtensionNames );
}
#endif
@@ -42144,15 +42744,14 @@ namespace VULKAN_HPP_NAMESPACE
bool operator==( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( pApplicationInfo == rhs.pApplicationInfo ) && ( enabledLayerCount == rhs.enabledLayerCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pApplicationInfo == rhs.pApplicationInfo ) &&
+ ( enabledLayerCount == rhs.enabledLayerCount ) &&
[this, rhs]
{
bool equal = true;
for ( size_t i = 0; equal && ( i < enabledLayerCount ); ++i )
{
- equal = ( ( ppEnabledLayerNames[i] == rhs.ppEnabledLayerNames[i] ) ||
- ( strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ) == 0 ) );
+ equal = ( ( ppEnabledLayerNames[i] == rhs.ppEnabledLayerNames[i] ) || ( strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ) == 0 ) );
}
return equal;
}() && ( enabledExtensionCount == rhs.enabledExtensionCount ) &&
@@ -42183,12 +42782,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t enabledExtensionCount = {};
const char * const * ppEnabledExtensionNames = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::InstanceCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::InstanceCreateInfo>::value,
- "InstanceCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eInstanceCreateInfo>
@@ -42201,22 +42794,20 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkLayerProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14
- LayerProperties( std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & layerName_ = {},
- uint32_t specVersion_ = {},
- uint32_t implementationVersion_ = {},
- std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 LayerProperties( std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & layerName_ = {},
+ uint32_t specVersion_ = {},
+ uint32_t implementationVersion_ = {},
+ std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {} ) VULKAN_HPP_NOEXCEPT
: layerName( layerName_ )
, specVersion( specVersion_ )
, implementationVersion( implementationVersion_ )
, description( description_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR_14 LayerProperties( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- : LayerProperties( *reinterpret_cast<LayerProperties const *>( &rhs ) )
- {}
+ LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT : LayerProperties( *reinterpret_cast<LayerProperties const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -42227,17 +42818,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkLayerProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkLayerProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkLayerProperties *>( this );
}
- explicit operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkLayerProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -42257,11 +42848,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( layerName == rhs.layerName ) && ( specVersion == rhs.specVersion ) &&
- ( implementationVersion == rhs.implementationVersion ) && ( description == rhs.description );
+ return ( layerName == rhs.layerName ) && ( specVersion == rhs.specVersion ) && ( implementationVersion == rhs.implementationVersion ) &&
+ ( description == rhs.description );
# endif
}
@@ -42277,12 +42868,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t implementationVersion = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LayerProperties ) == sizeof( VkLayerProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::LayerProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::LayerProperties>::value,
- "LayerProperties is not nothrow_move_constructible!" );
#if defined( VK_USE_PLATFORM_MACOS_MVK )
struct MacOSSurfaceCreateInfoMVK
@@ -42294,17 +42879,20 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {},
- const void * pView_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ const void * pView_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, pView( pView_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
: MacOSSurfaceCreateInfoMVK( *reinterpret_cast<MacOSSurfaceCreateInfoMVK const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
MacOSSurfaceCreateInfoMVK & operator=( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -42322,8 +42910,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK &
- setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -42336,24 +42923,22 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkMacOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMacOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( this );
}
- explicit operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
+ operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMacOSSurfaceCreateInfoMVK *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK const &,
- const void * const &>
+ std::
+ tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK const &, const void * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -42366,7 +42951,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView );
@@ -42385,13 +42970,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {};
const void * pView = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK ) ==
- sizeof( VkMacOSSurfaceCreateInfoMVK ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK>::value,
- "MacOSSurfaceCreateInfoMVK is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMacosSurfaceCreateInfoMVK>
@@ -42410,17 +42988,18 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MappedMemoryRange( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT
- : memory( memory_ )
+ VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , memory( memory_ )
, offset( offset_ )
, size( size_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR MappedMemoryRange( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
- : MappedMemoryRange( *reinterpret_cast<MappedMemoryRange const *>( &rhs ) )
- {}
+ MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT : MappedMemoryRange( *reinterpret_cast<MappedMemoryRange const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
MappedMemoryRange & operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -42438,15 +43017,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange &
- setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange &
- setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
@@ -42459,17 +43036,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkMappedMemoryRange const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMappedMemoryRange const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMappedMemoryRange *>( this );
}
- explicit operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT
+ operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMappedMemoryRange *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -42490,11 +43067,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( offset == rhs.offset ) &&
- ( size == rhs.size );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( offset == rhs.offset ) && ( size == rhs.size );
# endif
}
@@ -42511,12 +43087,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MappedMemoryRange ) == sizeof( VkMappedMemoryRange ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MappedMemoryRange>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MappedMemoryRange>::value,
- "MappedMemoryRange is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMappedMemoryRange>
@@ -42532,17 +43102,21 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateFlagsInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {},
- uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {},
+ uint32_t deviceMask_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, deviceMask( deviceMask_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryAllocateFlagsInfo( *reinterpret_cast<MemoryAllocateFlagsInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
MemoryAllocateFlagsInfo & operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -42560,8 +43134,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo &
- setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -42574,24 +43147,21 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkMemoryAllocateFlagsInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryAllocateFlagsInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryAllocateFlagsInfo *>( this );
}
- explicit operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryAllocateFlagsInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::MemoryAllocateFlags const &,
- uint32_t const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MemoryAllocateFlags const &, uint32_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -42604,11 +43174,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( deviceMask == rhs.deviceMask );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( deviceMask == rhs.deviceMask );
# endif
}
@@ -42624,13 +43193,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {};
uint32_t deviceMask = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo ) ==
- sizeof( VkMemoryAllocateFlagsInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo>::value,
- "MemoryAllocateFlagsInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMemoryAllocateFlagsInfo>
@@ -42647,17 +43209,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {},
- uint32_t memoryTypeIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : allocationSize( allocationSize_ )
+ VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {},
+ uint32_t memoryTypeIndex_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , allocationSize( allocationSize_ )
, memoryTypeIndex( memoryTypeIndex_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : MemoryAllocateInfo( *reinterpret_cast<MemoryAllocateInfo const *>( &rhs ) )
- {}
+ MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryAllocateInfo( *reinterpret_cast<MemoryAllocateInfo const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
MemoryAllocateInfo & operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -42675,8 +43238,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo &
- setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT
{
allocationSize = allocationSize_;
return *this;
@@ -42689,24 +43251,21 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryAllocateInfo *>( this );
}
- explicit operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryAllocateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::DeviceSize const &,
- uint32_t const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -42719,11 +43278,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) &&
- ( memoryTypeIndex == rhs.memoryTypeIndex );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeIndex == rhs.memoryTypeIndex );
# endif
}
@@ -42739,12 +43297,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
uint32_t memoryTypeIndex = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo>::value,
- "MemoryAllocateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMemoryAllocateInfo>
@@ -42761,16 +43313,17 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
- VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcAccessMask( srcAccessMask_ )
+ VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , srcAccessMask( srcAccessMask_ )
, dstAccessMask( dstAccessMask_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
- : MemoryBarrier( *reinterpret_cast<MemoryBarrier const *>( &rhs ) )
- {}
+ MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryBarrier( *reinterpret_cast<MemoryBarrier const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -42788,32 +43341,30 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MemoryBarrier &
- setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MemoryBarrier &
- setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryBarrier *>( this );
}
- explicit operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryBarrier *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -42833,11 +43384,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) &&
- ( dstAccessMask == rhs.dstAccessMask );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask );
# endif
}
@@ -42853,12 +43403,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrier ) == sizeof( VkMemoryBarrier ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryBarrier>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryBarrier>::value,
- "MemoryBarrier is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMemoryBarrier>
@@ -42875,17 +43419,20 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( VULKAN_HPP_NAMESPACE::Image image_ = {},
- VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
- : image( image_ )
+ VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , image( image_ )
, buffer( buffer_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryDedicatedAllocateInfo( *reinterpret_cast<MemoryDedicatedAllocateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
MemoryDedicatedAllocateInfo & operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -42903,39 +43450,34 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo &
- setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo &
- setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkMemoryDedicatedAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryDedicatedAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryDedicatedAllocateInfo *>( this );
}
- explicit operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryDedicatedAllocateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::Image const &,
- VULKAN_HPP_NAMESPACE::Buffer const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::Buffer const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -42948,7 +43490,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer );
@@ -42967,14 +43509,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Image image = {};
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo ) ==
- sizeof( VkMemoryDedicatedAllocateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo>::value,
- "MemoryDedicatedAllocateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMemoryDedicatedAllocateInfo>
@@ -42991,19 +43525,21 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedRequirements;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- MemoryDedicatedRequirements( VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT
- : prefersDedicatedAllocation( prefersDedicatedAllocation_ )
+ VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , prefersDedicatedAllocation( prefersDedicatedAllocation_ )
, requiresDedicatedAllocation( requiresDedicatedAllocation_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- MemoryDedicatedRequirements( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryDedicatedRequirements( *reinterpret_cast<MemoryDedicatedRequirements const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
MemoryDedicatedRequirements & operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -43014,24 +43550,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkMemoryDedicatedRequirements const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryDedicatedRequirements const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryDedicatedRequirements *>( this );
}
- explicit operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryDedicatedRequirements *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -43044,11 +43577,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation ) &&
( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation );
# endif
}
@@ -43065,14 +43597,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {};
VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements ) ==
- sizeof( VkMemoryDedicatedRequirements ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements>::value,
- "MemoryDedicatedRequirements is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMemoryDedicatedRequirements>
@@ -43089,15 +43613,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryFdPropertiesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
- : memoryTypeBits( memoryTypeBits_ )
- {}
+ VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , memoryTypeBits( memoryTypeBits_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryFdPropertiesKHR( *reinterpret_cast<MemoryFdPropertiesKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
MemoryFdPropertiesKHR & operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -43108,17 +43635,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkMemoryFdPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryFdPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryFdPropertiesKHR *>( this );
}
- explicit operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryFdPropertiesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -43135,7 +43662,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits );
@@ -43153,12 +43680,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
uint32_t memoryTypeBits = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::value,
- "MemoryFdPropertiesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMemoryFdPropertiesKHR>
@@ -43172,68 +43693,61 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkMemoryGetAndroidHardwareBufferInfoANDROID;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- MemoryGetAndroidHardwareBufferInfoANDROID( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT
- : memory( memory_ )
- {}
+ VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , memory( memory_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID(
- MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs )
- VULKAN_HPP_NOEXCEPT
- : MemoryGetAndroidHardwareBufferInfoANDROID(
- *reinterpret_cast<MemoryGetAndroidHardwareBufferInfoANDROID const *>( &rhs ) )
- {}
+ MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ : MemoryGetAndroidHardwareBufferInfoANDROID( *reinterpret_cast<MemoryGetAndroidHardwareBufferInfoANDROID const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- MemoryGetAndroidHardwareBufferInfoANDROID &
- operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ MemoryGetAndroidHardwareBufferInfoANDROID & operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MemoryGetAndroidHardwareBufferInfoANDROID &
- operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ MemoryGetAndroidHardwareBufferInfoANDROID & operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID &
- setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkMemoryGetAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryGetAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( this );
}
- explicit operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryGetAndroidHardwareBufferInfoANDROID *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::DeviceMemory const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -43246,7 +43760,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory );
@@ -43264,15 +43778,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID ) ==
- sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID>::value,
- "MemoryGetAndroidHardwareBufferInfoANDROID is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID>
@@ -43289,19 +43794,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetFdInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- MemoryGetFdInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : memory( memory_ )
+ VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR(
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , memory( memory_ )
, handleType( handleType_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : MemoryGetFdInfoKHR( *reinterpret_cast<MemoryGetFdInfoKHR const *>( &rhs ) )
- {}
+ MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryGetFdInfoKHR( *reinterpret_cast<MemoryGetFdInfoKHR const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
MemoryGetFdInfoKHR & operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -43319,32 +43824,30 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR &
- setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkMemoryGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryGetFdInfoKHR *>( this );
}
- explicit operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryGetFdInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -43364,11 +43867,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) &&
- ( handleType == rhs.handleType );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType );
# endif
}
@@ -43379,18 +43881,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR>::value,
- "MemoryGetFdInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMemoryGetFdInfoKHR>
@@ -43407,19 +43902,21 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV(
- VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : memory( memory_ )
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , memory( memory_ )
, handleType( handleType_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- MemoryGetRemoteAddressInfoNV( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryGetRemoteAddressInfoNV( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryGetRemoteAddressInfoNV( *reinterpret_cast<MemoryGetRemoteAddressInfoNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
MemoryGetRemoteAddressInfoNV & operator=( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -43437,32 +43934,31 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV &
- setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkMemoryGetRemoteAddressInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryGetRemoteAddressInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( this );
}
- explicit operator VkMemoryGetRemoteAddressInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryGetRemoteAddressInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryGetRemoteAddressInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -43482,11 +43978,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) &&
- ( handleType == rhs.handleType );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType );
# endif
}
@@ -43497,20 +43992,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetRemoteAddressInfoNV;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetRemoteAddressInfoNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV ) ==
- sizeof( VkMemoryGetRemoteAddressInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV>::value,
- "MemoryGetRemoteAddressInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMemoryGetRemoteAddressInfoNV>
@@ -43528,19 +44014,21 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR(
- VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : memory( memory_ )
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , memory( memory_ )
, handleType( handleType_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- MemoryGetWin32HandleInfoKHR( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryGetWin32HandleInfoKHR( *reinterpret_cast<MemoryGetWin32HandleInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
MemoryGetWin32HandleInfoKHR & operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -43558,32 +44046,31 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR &
- setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkMemoryGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( this );
}
- explicit operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryGetWin32HandleInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -43603,11 +44090,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) &&
- ( handleType == rhs.handleType );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType );
# endif
}
@@ -43618,20 +44104,11 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR ) ==
- sizeof( VkMemoryGetWin32HandleInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR>::value,
- "MemoryGetWin32HandleInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMemoryGetWin32HandleInfoKHR>
@@ -43646,27 +44123,28 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkMemoryGetZirconHandleInfoFUCHSIA;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA(
- VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : memory( memory_ )
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , memory( memory_ )
, handleType( handleType_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- MemoryGetZirconHandleInfoFUCHSIA( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryGetZirconHandleInfoFUCHSIA( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryGetZirconHandleInfoFUCHSIA( *reinterpret_cast<MemoryGetZirconHandleInfoFUCHSIA const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- MemoryGetZirconHandleInfoFUCHSIA &
- operator=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ MemoryGetZirconHandleInfoFUCHSIA & operator=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryGetZirconHandleInfoFUCHSIA & operator=( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -43681,32 +44159,31 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA &
- setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkMemoryGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( this );
}
- explicit operator VkMemoryGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryGetZirconHandleInfoFUCHSIA *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -43726,11 +44203,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) &&
- ( handleType == rhs.handleType );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType );
# endif
}
@@ -43741,20 +44217,11 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA ) ==
- sizeof( VkMemoryGetZirconHandleInfoFUCHSIA ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA>::value,
- "MemoryGetZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMemoryGetZirconHandleInfoFUCHSIA>
@@ -43768,17 +44235,15 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkMemoryHeap;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR MemoryHeap( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
- VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR MemoryHeap( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
: size( size_ )
, flags( flags_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR MemoryHeap( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
- : MemoryHeap( *reinterpret_cast<MemoryHeap const *>( &rhs ) )
- {}
+ MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryHeap( *reinterpret_cast<MemoryHeap const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
MemoryHeap & operator=( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -43789,17 +44254,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkMemoryHeap const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryHeap const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryHeap *>( this );
}
- explicit operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryHeap *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -43816,7 +44281,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( size == rhs.size ) && ( flags == rhs.flags );
@@ -43833,12 +44298,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) == sizeof( VkMemoryHeap ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryHeap>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryHeap>::value,
- "MemoryHeap is not nothrow_move_constructible!" );
struct MemoryHostPointerPropertiesEXT
{
@@ -43848,20 +44307,21 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryHostPointerPropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
- : memoryTypeBits( memoryTypeBits_ )
- {}
+ VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , memoryTypeBits( memoryTypeBits_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryHostPointerPropertiesEXT( *reinterpret_cast<MemoryHostPointerPropertiesEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- MemoryHostPointerPropertiesEXT &
- operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ MemoryHostPointerPropertiesEXT & operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryHostPointerPropertiesEXT & operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -43869,17 +44329,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkMemoryHostPointerPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryHostPointerPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryHostPointerPropertiesEXT *>( this );
}
- explicit operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -43896,7 +44356,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits );
@@ -43914,14 +44374,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
uint32_t memoryTypeBits = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT ) ==
- sizeof( VkMemoryHostPointerPropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::value,
- "MemoryHostPointerPropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMemoryHostPointerPropertiesEXT>
@@ -43934,29 +44386,26 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkMemoryOpaqueCaptureAddressAllocateInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- MemoryOpaqueCaptureAddressAllocateInfo( uint64_t opaqueCaptureAddress_ = {} ) VULKAN_HPP_NOEXCEPT
- : opaqueCaptureAddress( opaqueCaptureAddress_ )
- {}
+ VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , opaqueCaptureAddress( opaqueCaptureAddress_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( MemoryOpaqueCaptureAddressAllocateInfo const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : MemoryOpaqueCaptureAddressAllocateInfo(
- *reinterpret_cast<MemoryOpaqueCaptureAddressAllocateInfo const *>( &rhs ) )
- {}
+ : MemoryOpaqueCaptureAddressAllocateInfo( *reinterpret_cast<MemoryOpaqueCaptureAddressAllocateInfo const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- MemoryOpaqueCaptureAddressAllocateInfo &
- operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ MemoryOpaqueCaptureAddressAllocateInfo & operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MemoryOpaqueCaptureAddressAllocateInfo &
- operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ MemoryOpaqueCaptureAddressAllocateInfo & operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo const *>( &rhs );
return *this;
@@ -43969,25 +44418,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo &
- setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
{
opaqueCaptureAddress = opaqueCaptureAddress_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkMemoryOpaqueCaptureAddressAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryOpaqueCaptureAddressAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryOpaqueCaptureAddressAllocateInfo *>( this );
}
- explicit operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryOpaqueCaptureAddressAllocateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -44004,7 +44452,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
@@ -44022,15 +44470,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
uint64_t opaqueCaptureAddress = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo ) ==
- sizeof( VkMemoryOpaqueCaptureAddressAllocateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo>::value,
- "MemoryOpaqueCaptureAddressAllocateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMemoryOpaqueCaptureAddressAllocateInfo>
@@ -44047,20 +44486,21 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryPriorityAllocateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = {} ) VULKAN_HPP_NOEXCEPT
- : priority( priority_ )
- {}
+ VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , priority( priority_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- MemoryPriorityAllocateInfoEXT( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryPriorityAllocateInfoEXT( *reinterpret_cast<MemoryPriorityAllocateInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- MemoryPriorityAllocateInfoEXT &
- operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ MemoryPriorityAllocateInfoEXT & operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryPriorityAllocateInfoEXT & operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -44082,17 +44522,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkMemoryPriorityAllocateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryPriorityAllocateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryPriorityAllocateInfoEXT *>( this );
}
- explicit operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryPriorityAllocateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -44109,7 +44549,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priority == rhs.priority );
@@ -44127,14 +44567,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
float priority = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT ) ==
- sizeof( VkMemoryPriorityAllocateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT>::value,
- "MemoryPriorityAllocateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMemoryPriorityAllocateInfoEXT>
@@ -44153,13 +44585,12 @@ namespace VULKAN_HPP_NAMESPACE
: size( size_ )
, alignment( alignment_ )
, memoryTypeBits( memoryTypeBits_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
- : MemoryRequirements( *reinterpret_cast<MemoryRequirements const *>( &rhs ) )
- {}
+ MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryRequirements( *reinterpret_cast<MemoryRequirements const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
MemoryRequirements & operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -44170,17 +44601,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryRequirements *>( this );
}
- explicit operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryRequirements *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -44197,7 +44628,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( size == rhs.size ) && ( alignment == rhs.alignment ) && ( memoryTypeBits == rhs.memoryTypeBits );
@@ -44215,12 +44646,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize alignment = {};
uint32_t memoryTypeBits = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements ) == sizeof( VkMemoryRequirements ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryRequirements>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryRequirements>::value,
- "MemoryRequirements is not nothrow_move_constructible!" );
struct MemoryRequirements2
{
@@ -44230,16 +44655,17 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryRequirements2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT
- : memoryRequirements( memoryRequirements_ )
- {}
+ VULKAN_HPP_CONSTEXPR MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , memoryRequirements( memoryRequirements_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : MemoryRequirements2( *reinterpret_cast<MemoryRequirements2 const *>( &rhs ) )
- {}
+ MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryRequirements2( *reinterpret_cast<MemoryRequirements2 const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
MemoryRequirements2 & operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -44250,23 +44676,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryRequirements2 *>( this );
}
- explicit operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryRequirements2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::MemoryRequirements const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::MemoryRequirements const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -44279,7 +44703,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements );
@@ -44297,12 +44721,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryRequirements2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryRequirements2>::value,
- "MemoryRequirements2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMemoryRequirements2>
@@ -44316,17 +44734,15 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkMemoryType;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {},
- uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT
: propertyFlags( propertyFlags_ )
, heapIndex( heapIndex_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
- : MemoryType( *reinterpret_cast<MemoryType const *>( &rhs ) )
- {}
+ MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryType( *reinterpret_cast<MemoryType const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -44337,17 +44753,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkMemoryType const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryType const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryType *>( this );
}
- explicit operator VkMemoryType &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryType &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryType *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -44364,7 +44780,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( propertyFlags == rhs.propertyFlags ) && ( heapIndex == rhs.heapIndex );
@@ -44381,12 +44797,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {};
uint32_t heapIndex = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) == sizeof( VkMemoryType ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryType>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryType>::value,
- "MemoryType is not nothrow_move_constructible!" );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
struct MemoryWin32HandlePropertiesKHR
@@ -44397,20 +44807,21 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryWin32HandlePropertiesKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
- : memoryTypeBits( memoryTypeBits_ )
- {}
+ VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , memoryTypeBits( memoryTypeBits_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryWin32HandlePropertiesKHR( *reinterpret_cast<MemoryWin32HandlePropertiesKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- MemoryWin32HandlePropertiesKHR &
- operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ MemoryWin32HandlePropertiesKHR & operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryWin32HandlePropertiesKHR & operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -44418,17 +44829,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkMemoryWin32HandlePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryWin32HandlePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryWin32HandlePropertiesKHR *>( this );
}
- explicit operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -44445,7 +44856,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits );
@@ -44463,14 +44874,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
uint32_t memoryTypeBits = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR ) ==
- sizeof( VkMemoryWin32HandlePropertiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::value,
- "MemoryWin32HandlePropertiesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMemoryWin32HandlePropertiesKHR>
@@ -44485,43 +44888,42 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkMemoryZirconHandlePropertiesFUCHSIA;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eMemoryZirconHandlePropertiesFUCHSIA;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
- : memoryTypeBits( memoryTypeBits_ )
- {}
+ VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , memoryTypeBits( memoryTypeBits_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( MemoryZirconHandlePropertiesFUCHSIA const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MemoryZirconHandlePropertiesFUCHSIA( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: MemoryZirconHandlePropertiesFUCHSIA( *reinterpret_cast<MemoryZirconHandlePropertiesFUCHSIA const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- MemoryZirconHandlePropertiesFUCHSIA &
- operator=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ MemoryZirconHandlePropertiesFUCHSIA & operator=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MemoryZirconHandlePropertiesFUCHSIA &
- operator=( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+ MemoryZirconHandlePropertiesFUCHSIA & operator=( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA const *>( &rhs );
return *this;
}
- explicit operator VkMemoryZirconHandlePropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMemoryZirconHandlePropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMemoryZirconHandlePropertiesFUCHSIA *>( this );
}
- explicit operator VkMemoryZirconHandlePropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT
+ operator VkMemoryZirconHandlePropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -44538,7 +44940,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits );
@@ -44556,14 +44958,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
uint32_t memoryTypeBits = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA ) ==
- sizeof( VkMemoryZirconHandlePropertiesFUCHSIA ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::value,
- "MemoryZirconHandlePropertiesFUCHSIA is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMemoryZirconHandlePropertiesFUCHSIA>
@@ -44581,18 +44975,21 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMetalSurfaceCreateInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {},
- const CAMetalLayer * pLayer_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {},
+ const CAMetalLayer * pLayer_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, pLayer( pLayer_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MetalSurfaceCreateInfoEXT( *reinterpret_cast<MetalSurfaceCreateInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
MetalSurfaceCreateInfoEXT & operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -44610,8 +45007,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT &
- setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -44624,17 +45020,17 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkMetalSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMetalSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( this );
}
- explicit operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMetalSurfaceCreateInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -44654,7 +45050,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pLayer == rhs.pLayer );
@@ -44673,13 +45069,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {};
const CAMetalLayer * pLayer = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT ) ==
- sizeof( VkMetalSurfaceCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT>::value,
- "MetalSurfaceCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMetalSurfaceCreateInfoEXT>
@@ -44688,24 +45077,722 @@ namespace VULKAN_HPP_NAMESPACE
};
#endif /*VK_USE_PLATFORM_METAL_EXT*/
+ struct MicromapBuildInfoEXT
+ {
+ using NativeType = VkMicromapBuildInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapBuildInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap,
+ VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_ = {},
+ VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT::eBuild,
+ VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_ = {},
+ uint32_t usageCountsCount_ = {},
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ = {},
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , type( type_ )
+ , flags( flags_ )
+ , mode( mode_ )
+ , dstMicromap( dstMicromap_ )
+ , usageCountsCount( usageCountsCount_ )
+ , pUsageCounts( pUsageCounts_ )
+ , ppUsageCounts( ppUsageCounts_ )
+ , data( data_ )
+ , scratchData( scratchData_ )
+ , triangleArray( triangleArray_ )
+ , triangleArrayStride( triangleArrayStride_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT( MicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapBuildInfoEXT( VkMicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : MicromapBuildInfoEXT( *reinterpret_cast<MicromapBuildInfoEXT const *>( &rhs ) )
+ {
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ MicromapBuildInfoEXT( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_,
+ VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_,
+ VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_,
+ VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT> const & usageCounts_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const> const & pUsageCounts_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , type( type_ )
+ , flags( flags_ )
+ , mode( mode_ )
+ , dstMicromap( dstMicromap_ )
+ , usageCountsCount( static_cast<uint32_t>( !usageCounts_.empty() ? usageCounts_.size() : pUsageCounts_.size() ) )
+ , pUsageCounts( usageCounts_.data() )
+ , ppUsageCounts( pUsageCounts_.data() )
+ , data( data_ )
+ , scratchData( scratchData_ )
+ , triangleArray( triangleArray_ )
+ , triangleArrayStride( triangleArrayStride_ )
+ {
+# ifdef VULKAN_HPP_NO_EXCEPTIONS
+ VULKAN_HPP_ASSERT( ( !usageCounts_.empty() + !pUsageCounts_.empty() ) <= 1 );
+# else
+ if ( 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() ) )
+ {
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::MicromapBuildInfoEXT::MicromapBuildInfoEXT: 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() )" );
+ }
+# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ MicromapBuildInfoEXT & operator=( MicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapBuildInfoEXT & operator=( VkMicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setType( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ ) VULKAN_HPP_NOEXCEPT
+ {
+ type = type_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setMode( VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mode = mode_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setDstMicromap( VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dstMicromap = dstMicromap_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ usageCountsCount = usageCountsCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pUsageCounts = pUsageCounts_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ MicromapBuildInfoEXT &
+ setUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT> const & usageCounts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ usageCountsCount = static_cast<uint32_t>( usageCounts_.size() );
+ pUsageCounts = usageCounts_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPpUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ ppUsageCounts = ppUsageCounts_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ MicromapBuildInfoEXT & setPUsageCounts(
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const> const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ usageCountsCount = static_cast<uint32_t>( pUsageCounts_.size() );
+ ppUsageCounts = pUsageCounts_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
+ {
+ data = data_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT
+ {
+ scratchData = scratchData_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT &
+ setTriangleArray( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & triangleArray_ ) VULKAN_HPP_NOEXCEPT
+ {
+ triangleArray = triangleArray_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setTriangleArrayStride( VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ ) VULKAN_HPP_NOEXCEPT
+ {
+ triangleArrayStride = triangleArrayStride_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkMicromapBuildInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMicromapBuildInfoEXT *>( this );
+ }
+
+ operator VkMicromapBuildInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMicromapBuildInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::MicromapTypeEXT const &,
+ VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT const &,
+ VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT const &,
+ VULKAN_HPP_NAMESPACE::MicromapEXT const &,
+ uint32_t const &,
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const &,
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * const &,
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &,
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const &,
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie(
+ sType, pNext, type, flags, mode, dstMicromap, usageCountsCount, pUsageCounts, ppUsageCounts, data, scratchData, triangleArray, triangleArrayStride );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapBuildInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::MicromapTypeEXT type = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap;
+ VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags = {};
+ VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT::eBuild;
+ VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap = {};
+ uint32_t usageCountsCount = {};
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts = {};
+ const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {};
+ VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eMicromapBuildInfoEXT>
+ {
+ using Type = MicromapBuildInfoEXT;
+ };
+
+ struct MicromapBuildSizesInfoEXT
+ {
+ using NativeType = VkMicromapBuildSizesInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapBuildSizesInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MicromapBuildSizesInfoEXT( VULKAN_HPP_NAMESPACE::DeviceSize micromapSize_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 discardable_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , micromapSize( micromapSize_ )
+ , buildScratchSize( buildScratchSize_ )
+ , discardable( discardable_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR MicromapBuildSizesInfoEXT( MicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapBuildSizesInfoEXT( VkMicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : MicromapBuildSizesInfoEXT( *reinterpret_cast<MicromapBuildSizesInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ MicromapBuildSizesInfoEXT & operator=( MicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapBuildSizesInfoEXT & operator=( VkMicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setMicromapSize( VULKAN_HPP_NAMESPACE::DeviceSize micromapSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ micromapSize = micromapSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setBuildScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ buildScratchSize = buildScratchSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setDiscardable( VULKAN_HPP_NAMESPACE::Bool32 discardable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ discardable = discardable_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkMicromapBuildSizesInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMicromapBuildSizesInfoEXT *>( this );
+ }
+
+ operator VkMicromapBuildSizesInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, micromapSize, buildScratchSize, discardable );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( MicromapBuildSizesInfoEXT const & ) const = default;
+#else
+ bool operator==( MicromapBuildSizesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( micromapSize == rhs.micromapSize ) && ( buildScratchSize == rhs.buildScratchSize ) &&
+ ( discardable == rhs.discardable );
+# endif
+ }
+
+ bool operator!=( MicromapBuildSizesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapBuildSizesInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize micromapSize = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize = {};
+ VULKAN_HPP_NAMESPACE::Bool32 discardable = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eMicromapBuildSizesInfoEXT>
+ {
+ using Type = MicromapBuildSizesInfoEXT;
+ };
+
+ struct MicromapCreateInfoEXT
+ {
+ using NativeType = VkMicromapCreateInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MicromapCreateInfoEXT( VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
+ VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap,
+ VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , createFlags( createFlags_ )
+ , buffer( buffer_ )
+ , offset( offset_ )
+ , size( size_ )
+ , type( type_ )
+ , deviceAddress( deviceAddress_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR MicromapCreateInfoEXT( MicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapCreateInfoEXT( VkMicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : MicromapCreateInfoEXT( *reinterpret_cast<MicromapCreateInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ MicromapCreateInfoEXT & operator=( MicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapCreateInfoEXT & operator=( VkMicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setCreateFlags( VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ createFlags = createFlags_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ buffer = buffer_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ offset = offset_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+ {
+ size = size_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setType( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ ) VULKAN_HPP_NOEXCEPT
+ {
+ type = type_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceAddress = deviceAddress_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkMicromapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMicromapCreateInfoEXT *>( this );
+ }
+
+ operator VkMicromapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMicromapCreateInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT const &,
+ VULKAN_HPP_NAMESPACE::Buffer const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ VULKAN_HPP_NAMESPACE::MicromapTypeEXT const &,
+ VULKAN_HPP_NAMESPACE::DeviceAddress const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, createFlags, buffer, offset, size, type, deviceAddress );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( MicromapCreateInfoEXT const & ) const = default;
+#else
+ bool operator==( MicromapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createFlags == rhs.createFlags ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) &&
+ ( size == rhs.size ) && ( type == rhs.type ) && ( deviceAddress == rhs.deviceAddress );
+# endif
+ }
+
+ bool operator!=( MicromapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapCreateInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags = {};
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+ VULKAN_HPP_NAMESPACE::MicromapTypeEXT type = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap;
+ VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eMicromapCreateInfoEXT>
+ {
+ using Type = MicromapCreateInfoEXT;
+ };
+
+ struct MicromapTriangleEXT
+ {
+ using NativeType = VkMicromapTriangleEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MicromapTriangleEXT( uint32_t dataOffset_ = {}, uint16_t subdivisionLevel_ = {}, uint16_t format_ = {} ) VULKAN_HPP_NOEXCEPT
+ : dataOffset( dataOffset_ )
+ , subdivisionLevel( subdivisionLevel_ )
+ , format( format_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR MicromapTriangleEXT( MicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapTriangleEXT( VkMicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MicromapTriangleEXT( *reinterpret_cast<MicromapTriangleEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ MicromapTriangleEXT & operator=( MicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapTriangleEXT & operator=( VkMicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapTriangleEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setDataOffset( uint32_t dataOffset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dataOffset = dataOffset_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setSubdivisionLevel( uint16_t subdivisionLevel_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subdivisionLevel = subdivisionLevel_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setFormat( uint16_t format_ ) VULKAN_HPP_NOEXCEPT
+ {
+ format = format_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkMicromapTriangleEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMicromapTriangleEXT *>( this );
+ }
+
+ operator VkMicromapTriangleEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMicromapTriangleEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<uint32_t const &, uint16_t const &, uint16_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( dataOffset, subdivisionLevel, format );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( MicromapTriangleEXT const & ) const = default;
+#else
+ bool operator==( MicromapTriangleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( dataOffset == rhs.dataOffset ) && ( subdivisionLevel == rhs.subdivisionLevel ) && ( format == rhs.format );
+# endif
+ }
+
+ bool operator!=( MicromapTriangleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ uint32_t dataOffset = {};
+ uint16_t subdivisionLevel = {};
+ uint16_t format = {};
+ };
+
+ struct MicromapVersionInfoEXT
+ {
+ using NativeType = VkMicromapVersionInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapVersionInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR MicromapVersionInfoEXT( const uint8_t * pVersionData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pVersionData( pVersionData_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR MicromapVersionInfoEXT( MicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapVersionInfoEXT( VkMicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : MicromapVersionInfoEXT( *reinterpret_cast<MicromapVersionInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ MicromapVersionInfoEXT & operator=( MicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MicromapVersionInfoEXT & operator=( VkMicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT & setPVersionData( const uint8_t * pVersionData_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pVersionData = pVersionData_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkMicromapVersionInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMicromapVersionInfoEXT *>( this );
+ }
+
+ operator VkMicromapVersionInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMicromapVersionInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const uint8_t * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, pVersionData );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( MicromapVersionInfoEXT const & ) const = default;
+#else
+ bool operator==( MicromapVersionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVersionData == rhs.pVersionData );
+# endif
+ }
+
+ bool operator!=( MicromapVersionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapVersionInfoEXT;
+ const void * pNext = {};
+ const uint8_t * pVersionData = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eMicromapVersionInfoEXT>
+ {
+ using Type = MicromapVersionInfoEXT;
+ };
+
struct MultiDrawIndexedInfoEXT
{
using NativeType = VkMultiDrawIndexedInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( uint32_t firstIndex_ = {},
- uint32_t indexCount_ = {},
- int32_t vertexOffset_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( uint32_t firstIndex_ = {}, uint32_t indexCount_ = {}, int32_t vertexOffset_ = {} ) VULKAN_HPP_NOEXCEPT
: firstIndex( firstIndex_ )
, indexCount( indexCount_ )
, vertexOffset( vertexOffset_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MultiDrawIndexedInfoEXT( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MultiDrawIndexedInfoEXT( *reinterpret_cast<MultiDrawIndexedInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
MultiDrawIndexedInfoEXT & operator=( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -44736,17 +45823,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkMultiDrawIndexedInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMultiDrawIndexedInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( this );
}
- explicit operator VkMultiDrawIndexedInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkMultiDrawIndexedInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMultiDrawIndexedInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -44763,11 +45850,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( firstIndex == rhs.firstIndex ) && ( indexCount == rhs.indexCount ) &&
- ( vertexOffset == rhs.vertexOffset );
+ return ( firstIndex == rhs.firstIndex ) && ( indexCount == rhs.indexCount ) && ( vertexOffset == rhs.vertexOffset );
# endif
}
@@ -44782,13 +45868,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t indexCount = {};
int32_t vertexOffset = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT ) ==
- sizeof( VkMultiDrawIndexedInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT>::value,
- "MultiDrawIndexedInfoEXT is not nothrow_move_constructible!" );
struct MultiDrawInfoEXT
{
@@ -44798,13 +45877,12 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT( uint32_t firstVertex_ = {}, uint32_t vertexCount_ = {} ) VULKAN_HPP_NOEXCEPT
: firstVertex( firstVertex_ )
, vertexCount( vertexCount_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MultiDrawInfoEXT( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : MultiDrawInfoEXT( *reinterpret_cast<MultiDrawInfoEXT const *>( &rhs ) )
- {}
+ MultiDrawInfoEXT( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MultiDrawInfoEXT( *reinterpret_cast<MultiDrawInfoEXT const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
MultiDrawInfoEXT & operator=( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -44829,17 +45907,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkMultiDrawInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMultiDrawInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMultiDrawInfoEXT *>( this );
}
- explicit operator VkMultiDrawInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkMultiDrawInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMultiDrawInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -44856,7 +45934,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( firstVertex == rhs.firstVertex ) && ( vertexCount == rhs.vertexCount );
@@ -44873,12 +45951,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t firstVertex = {};
uint32_t vertexCount = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT ) == sizeof( VkMultiDrawInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT>::value,
- "MultiDrawInfoEXT is not nothrow_move_constructible!" );
struct MultisamplePropertiesEXT
{
@@ -44888,16 +45960,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisamplePropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxSampleLocationGridSize( maxSampleLocationGridSize_ )
- {}
+ VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxSampleLocationGridSize( maxSampleLocationGridSize_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: MultisamplePropertiesEXT( *reinterpret_cast<MultisamplePropertiesEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
MultisamplePropertiesEXT & operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -44908,17 +45982,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkMultisamplePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMultisamplePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMultisamplePropertiesEXT *>( this );
}
- explicit operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMultisamplePropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -44935,11 +46009,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize );
# endif
}
@@ -44954,13 +46027,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT ) ==
- sizeof( VkMultisamplePropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT>::value,
- "MultisamplePropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMultisamplePropertiesEXT>
@@ -44968,32 +46034,145 @@ namespace VULKAN_HPP_NAMESPACE
using Type = MultisamplePropertiesEXT;
};
+ struct MultisampledRenderToSingleSampledInfoEXT
+ {
+ using NativeType = VkMultisampledRenderToSingleSampledInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisampledRenderToSingleSampledInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR
+ MultisampledRenderToSingleSampledInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , multisampledRenderToSingleSampledEnable( multisampledRenderToSingleSampledEnable_ )
+ , rasterizationSamples( rasterizationSamples_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR MultisampledRenderToSingleSampledInfoEXT( MultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MultisampledRenderToSingleSampledInfoEXT( VkMultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : MultisampledRenderToSingleSampledInfoEXT( *reinterpret_cast<MultisampledRenderToSingleSampledInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ MultisampledRenderToSingleSampledInfoEXT & operator=( MultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ MultisampledRenderToSingleSampledInfoEXT & operator=( VkMultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT &
+ setMultisampledRenderToSingleSampledEnable( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ multisampledRenderToSingleSampledEnable = multisampledRenderToSingleSampledEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT &
+ setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
+ {
+ rasterizationSamples = rasterizationSamples_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkMultisampledRenderToSingleSampledInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMultisampledRenderToSingleSampledInfoEXT *>( this );
+ }
+
+ operator VkMultisampledRenderToSingleSampledInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMultisampledRenderToSingleSampledInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, multisampledRenderToSingleSampledEnable, rasterizationSamples );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( MultisampledRenderToSingleSampledInfoEXT const & ) const = default;
+#else
+ bool operator==( MultisampledRenderToSingleSampledInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multisampledRenderToSingleSampledEnable == rhs.multisampledRenderToSingleSampledEnable ) &&
+ ( rasterizationSamples == rhs.rasterizationSamples );
+# endif
+ }
+
+ bool operator!=( MultisampledRenderToSingleSampledInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisampledRenderToSingleSampledInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eMultisampledRenderToSingleSampledInfoEXT>
+ {
+ using Type = MultisampledRenderToSingleSampledInfoEXT;
+ };
+
struct MultiviewPerViewAttributesInfoNVX
{
using NativeType = VkMultiviewPerViewAttributesInfoNVX;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eMultiviewPerViewAttributesInfoNVX;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultiviewPerViewAttributesInfoNVX;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX(
- VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ = {} ) VULKAN_HPP_NOEXCEPT
- : perViewAttributes( perViewAttributes_ )
+ VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , perViewAttributes( perViewAttributes_ )
, perViewAttributesPositionXOnly( perViewAttributesPositionXOnly_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- MultiviewPerViewAttributesInfoNVX( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MultiviewPerViewAttributesInfoNVX( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
: MultiviewPerViewAttributesInfoNVX( *reinterpret_cast<MultiviewPerViewAttributesInfoNVX const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- MultiviewPerViewAttributesInfoNVX &
- operator=( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ MultiviewPerViewAttributesInfoNVX & operator=( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
MultiviewPerViewAttributesInfoNVX & operator=( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -45008,39 +46187,35 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX &
- setPerViewAttributes( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPerViewAttributes( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ ) VULKAN_HPP_NOEXCEPT
{
perViewAttributes = perViewAttributes_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPerViewAttributesPositionXOnly(
- VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX &
+ setPerViewAttributesPositionXOnly( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ ) VULKAN_HPP_NOEXCEPT
{
perViewAttributesPositionXOnly = perViewAttributesPositionXOnly_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkMultiviewPerViewAttributesInfoNVX const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMultiviewPerViewAttributesInfoNVX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkMultiviewPerViewAttributesInfoNVX *>( this );
}
- explicit operator VkMultiviewPerViewAttributesInfoNVX &() VULKAN_HPP_NOEXCEPT
+ operator VkMultiviewPerViewAttributesInfoNVX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkMultiviewPerViewAttributesInfoNVX *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -45053,7 +46228,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perViewAttributes == rhs.perViewAttributes ) &&
@@ -45068,19 +46243,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultiviewPerViewAttributesInfoNVX;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultiviewPerViewAttributesInfoNVX;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes = {};
VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX ) ==
- sizeof( VkMultiviewPerViewAttributesInfoNVX ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX>::value,
- "MultiviewPerViewAttributesInfoNVX is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eMultiviewPerViewAttributesInfoNVX>
@@ -45088,63 +46255,58 @@ namespace VULKAN_HPP_NAMESPACE
using Type = MultiviewPerViewAttributesInfoNVX;
};
- struct MutableDescriptorTypeListVALVE
+ struct MutableDescriptorTypeListEXT
{
- using NativeType = VkMutableDescriptorTypeListVALVE;
+ using NativeType = VkMutableDescriptorTypeListEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListVALVE(
- uint32_t descriptorTypeCount_ = {},
- const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListEXT( uint32_t descriptorTypeCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ = {} ) VULKAN_HPP_NOEXCEPT
: descriptorTypeCount( descriptorTypeCount_ )
, pDescriptorTypes( pDescriptorTypes_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- MutableDescriptorTypeListVALVE( MutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListEXT( MutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MutableDescriptorTypeListVALVE( VkMutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
- : MutableDescriptorTypeListVALVE( *reinterpret_cast<MutableDescriptorTypeListVALVE const *>( &rhs ) )
- {}
+ MutableDescriptorTypeListEXT( VkMutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : MutableDescriptorTypeListEXT( *reinterpret_cast<MutableDescriptorTypeListEXT const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- MutableDescriptorTypeListVALVE(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorType> const &
- descriptorTypes_ )
- : descriptorTypeCount( static_cast<uint32_t>( descriptorTypes_.size() ) )
- , pDescriptorTypes( descriptorTypes_.data() )
- {}
+ MutableDescriptorTypeListEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorType> const & descriptorTypes_ )
+ : descriptorTypeCount( static_cast<uint32_t>( descriptorTypes_.size() ) ), pDescriptorTypes( descriptorTypes_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- MutableDescriptorTypeListVALVE &
- operator=( MutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ MutableDescriptorTypeListEXT & operator=( MutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MutableDescriptorTypeListVALVE & operator=( VkMutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
+ MutableDescriptorTypeListEXT & operator=( VkMutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListVALVE &
- setDescriptorTypeCount( uint32_t descriptorTypeCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListEXT & setDescriptorTypeCount( uint32_t descriptorTypeCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorTypeCount = descriptorTypeCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListVALVE &
- setPDescriptorTypes( const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListEXT &
+ setPDescriptorTypes( const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ ) VULKAN_HPP_NOEXCEPT
{
pDescriptorTypes = pDescriptorTypes_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- MutableDescriptorTypeListVALVE & setDescriptorTypes(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorType> const &
- descriptorTypes_ ) VULKAN_HPP_NOEXCEPT
+ MutableDescriptorTypeListEXT & setDescriptorTypes(
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorType> const & descriptorTypes_ ) VULKAN_HPP_NOEXCEPT
{
descriptorTypeCount = static_cast<uint32_t>( descriptorTypes_.size() );
pDescriptorTypes = descriptorTypes_.data();
@@ -45153,17 +46315,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkMutableDescriptorTypeListVALVE const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMutableDescriptorTypeListEXT const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMutableDescriptorTypeListVALVE *>( this );
+ return *reinterpret_cast<const VkMutableDescriptorTypeListEXT *>( this );
}
- explicit operator VkMutableDescriptorTypeListVALVE &() VULKAN_HPP_NOEXCEPT
+ operator VkMutableDescriptorTypeListEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMutableDescriptorTypeListVALVE *>( this );
+ return *reinterpret_cast<VkMutableDescriptorTypeListEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -45176,18 +46338,18 @@ namespace VULKAN_HPP_NAMESPACE
#endif
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( MutableDescriptorTypeListVALVE const & ) const = default;
+ auto operator<=>( MutableDescriptorTypeListEXT const & ) const = default;
#else
- bool operator==( MutableDescriptorTypeListVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( MutableDescriptorTypeListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( descriptorTypeCount == rhs.descriptorTypeCount ) && ( pDescriptorTypes == rhs.pDescriptorTypes );
# endif
}
- bool operator!=( MutableDescriptorTypeListVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( MutableDescriptorTypeListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
@@ -45197,84 +46359,77 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t descriptorTypeCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE ) ==
- sizeof( VkMutableDescriptorTypeListVALVE ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE>::value,
- "MutableDescriptorTypeListVALVE is not nothrow_move_constructible!" );
+ using MutableDescriptorTypeListVALVE = MutableDescriptorTypeListEXT;
- struct MutableDescriptorTypeCreateInfoVALVE
+ struct MutableDescriptorTypeCreateInfoEXT
{
- using NativeType = VkMutableDescriptorTypeCreateInfoVALVE;
+ using NativeType = VkMutableDescriptorTypeCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eMutableDescriptorTypeCreateInfoVALVE;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMutableDescriptorTypeCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoVALVE(
- uint32_t mutableDescriptorTypeListCount_ = {},
- const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE * pMutableDescriptorTypeLists_ = {} )
- VULKAN_HPP_NOEXCEPT
- : mutableDescriptorTypeListCount( mutableDescriptorTypeListCount_ )
+ VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoEXT( uint32_t mutableDescriptorTypeListCount_ = {},
+ const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , mutableDescriptorTypeListCount( mutableDescriptorTypeListCount_ )
, pMutableDescriptorTypeLists( pMutableDescriptorTypeLists_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoVALVE( MutableDescriptorTypeCreateInfoVALVE const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoEXT( MutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MutableDescriptorTypeCreateInfoVALVE( VkMutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
- : MutableDescriptorTypeCreateInfoVALVE( *reinterpret_cast<MutableDescriptorTypeCreateInfoVALVE const *>( &rhs ) )
- {}
+ MutableDescriptorTypeCreateInfoEXT( VkMutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : MutableDescriptorTypeCreateInfoEXT( *reinterpret_cast<MutableDescriptorTypeCreateInfoEXT const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- MutableDescriptorTypeCreateInfoVALVE(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE> const &
- mutableDescriptorTypeLists_ )
- : mutableDescriptorTypeListCount( static_cast<uint32_t>( mutableDescriptorTypeLists_.size() ) )
+ MutableDescriptorTypeCreateInfoEXT(
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT> const & mutableDescriptorTypeLists_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , mutableDescriptorTypeListCount( static_cast<uint32_t>( mutableDescriptorTypeLists_.size() ) )
, pMutableDescriptorTypeLists( mutableDescriptorTypeLists_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- MutableDescriptorTypeCreateInfoVALVE &
- operator=( MutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ MutableDescriptorTypeCreateInfoEXT & operator=( MutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- MutableDescriptorTypeCreateInfoVALVE &
- operator=( VkMutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
+ MutableDescriptorTypeCreateInfoEXT & operator=( VkMutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoVALVE const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoVALVE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoVALVE &
- setMutableDescriptorTypeListCount( uint32_t mutableDescriptorTypeListCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT &
+ setMutableDescriptorTypeListCount( uint32_t mutableDescriptorTypeListCount_ ) VULKAN_HPP_NOEXCEPT
{
mutableDescriptorTypeListCount = mutableDescriptorTypeListCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoVALVE & setPMutableDescriptorTypeLists(
- const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE * pMutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT &
+ setPMutableDescriptorTypeLists( const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT
{
pMutableDescriptorTypeLists = pMutableDescriptorTypeLists_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- MutableDescriptorTypeCreateInfoVALVE & setMutableDescriptorTypeLists(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE> const &
- mutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT
+ MutableDescriptorTypeCreateInfoEXT & setMutableDescriptorTypeLists(
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT> const & mutableDescriptorTypeLists_ )
+ VULKAN_HPP_NOEXCEPT
{
mutableDescriptorTypeListCount = static_cast<uint32_t>( mutableDescriptorTypeLists_.size() );
pMutableDescriptorTypeLists = mutableDescriptorTypeLists_.data();
@@ -45283,24 +46438,24 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkMutableDescriptorTypeCreateInfoVALVE const &() const VULKAN_HPP_NOEXCEPT
+ operator VkMutableDescriptorTypeCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkMutableDescriptorTypeCreateInfoVALVE *>( this );
+ return *reinterpret_cast<const VkMutableDescriptorTypeCreateInfoEXT *>( this );
}
- explicit operator VkMutableDescriptorTypeCreateInfoVALVE &() VULKAN_HPP_NOEXCEPT
+ operator VkMutableDescriptorTypeCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkMutableDescriptorTypeCreateInfoVALVE *>( this );
+ return *reinterpret_cast<VkMutableDescriptorTypeCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
const void * const &,
uint32_t const &,
- const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE * const &>
+ const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -45309,44 +46464,654 @@ namespace VULKAN_HPP_NAMESPACE
#endif
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( MutableDescriptorTypeCreateInfoVALVE const & ) const = default;
+ auto operator<=>( MutableDescriptorTypeCreateInfoEXT const & ) const = default;
#else
- bool operator==( MutableDescriptorTypeCreateInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( MutableDescriptorTypeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( mutableDescriptorTypeListCount == rhs.mutableDescriptorTypeListCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mutableDescriptorTypeListCount == rhs.mutableDescriptorTypeListCount ) &&
( pMutableDescriptorTypeLists == rhs.pMutableDescriptorTypeLists );
# endif
}
- bool operator!=( MutableDescriptorTypeCreateInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( MutableDescriptorTypeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMutableDescriptorTypeCreateInfoVALVE;
- const void * pNext = {};
- uint32_t mutableDescriptorTypeListCount = {};
- const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE * pMutableDescriptorTypeLists = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMutableDescriptorTypeCreateInfoEXT;
+ const void * pNext = {};
+ uint32_t mutableDescriptorTypeListCount = {};
+ const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoVALVE ) ==
- sizeof( VkMutableDescriptorTypeCreateInfoVALVE ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoVALVE>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoVALVE>::value,
- "MutableDescriptorTypeCreateInfoVALVE is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::eMutableDescriptorTypeCreateInfoVALVE>
+ struct CppType<StructureType, StructureType::eMutableDescriptorTypeCreateInfoEXT>
{
- using Type = MutableDescriptorTypeCreateInfoVALVE;
+ using Type = MutableDescriptorTypeCreateInfoEXT;
+ };
+ using MutableDescriptorTypeCreateInfoVALVE = MutableDescriptorTypeCreateInfoEXT;
+
+ struct OpticalFlowExecuteInfoNV
+ {
+ using NativeType = VkOpticalFlowExecuteInfoNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowExecuteInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR OpticalFlowExecuteInfoNV( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_ = {},
+ uint32_t regionCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Rect2D * pRegions_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ , regionCount( regionCount_ )
+ , pRegions( pRegions_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR OpticalFlowExecuteInfoNV( OpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ OpticalFlowExecuteInfoNV( VkOpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : OpticalFlowExecuteInfoNV( *reinterpret_cast<OpticalFlowExecuteInfoNV const *>( &rhs ) )
+ {
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ OpticalFlowExecuteInfoNV( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & regions_,
+ void * pNext_ = nullptr )
+ : pNext( pNext_ ), flags( flags_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+ {
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ OpticalFlowExecuteInfoNV & operator=( OpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ OpticalFlowExecuteInfoNV & operator=( VkOpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setFlags( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = regionCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setPRegions( const VULKAN_HPP_NAMESPACE::Rect2D * pRegions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pRegions = pRegions_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ OpticalFlowExecuteInfoNV &
+ setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & regions_ ) VULKAN_HPP_NOEXCEPT
+ {
+ regionCount = static_cast<uint32_t>( regions_.size() );
+ pRegions = regions_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkOpticalFlowExecuteInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( this );
+ }
+
+ operator VkOpticalFlowExecuteInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkOpticalFlowExecuteInfoNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV const &,
+ uint32_t const &,
+ const VULKAN_HPP_NAMESPACE::Rect2D * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, flags, regionCount, pRegions );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( OpticalFlowExecuteInfoNV const & ) const = default;
+#else
+ bool operator==( OpticalFlowExecuteInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
+# endif
+ }
+
+ bool operator!=( OpticalFlowExecuteInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowExecuteInfoNV;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags = {};
+ uint32_t regionCount = {};
+ const VULKAN_HPP_NAMESPACE::Rect2D * pRegions = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eOpticalFlowExecuteInfoNV>
+ {
+ using Type = OpticalFlowExecuteInfoNV;
+ };
+
+ struct OpticalFlowImageFormatInfoNV
+ {
+ using NativeType = VkOpticalFlowImageFormatInfoNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowImageFormatInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatInfoNV( VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , usage( usage_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatInfoNV( OpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ OpticalFlowImageFormatInfoNV( VkOpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : OpticalFlowImageFormatInfoNV( *reinterpret_cast<OpticalFlowImageFormatInfoNV const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ OpticalFlowImageFormatInfoNV & operator=( OpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ OpticalFlowImageFormatInfoNV & operator=( VkOpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowImageFormatInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowImageFormatInfoNV & setUsage( VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage_ ) VULKAN_HPP_NOEXCEPT
+ {
+ usage = usage_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkOpticalFlowImageFormatInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( this );
+ }
+
+ operator VkOpticalFlowImageFormatInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkOpticalFlowImageFormatInfoNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, usage );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( OpticalFlowImageFormatInfoNV const & ) const = default;
+#else
+ bool operator==( OpticalFlowImageFormatInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage );
+# endif
+ }
+
+ bool operator!=( OpticalFlowImageFormatInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowImageFormatInfoNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eOpticalFlowImageFormatInfoNV>
+ {
+ using Type = OpticalFlowImageFormatInfoNV;
+ };
+
+ struct OpticalFlowImageFormatPropertiesNV
+ {
+ using NativeType = VkOpticalFlowImageFormatPropertiesNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowImageFormatPropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , format( format_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatPropertiesNV( OpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ OpticalFlowImageFormatPropertiesNV( VkOpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : OpticalFlowImageFormatPropertiesNV( *reinterpret_cast<OpticalFlowImageFormatPropertiesNV const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ OpticalFlowImageFormatPropertiesNV & operator=( OpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ OpticalFlowImageFormatPropertiesNV & operator=( VkOpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV const *>( &rhs );
+ return *this;
+ }
+
+ operator VkOpticalFlowImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkOpticalFlowImageFormatPropertiesNV *>( this );
+ }
+
+ operator VkOpticalFlowImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, format );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( OpticalFlowImageFormatPropertiesNV const & ) const = default;
+#else
+ bool operator==( OpticalFlowImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format );
+# endif
+ }
+
+ bool operator!=( OpticalFlowImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowImageFormatPropertiesNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eOpticalFlowImageFormatPropertiesNV>
+ {
+ using Type = OpticalFlowImageFormatPropertiesNV;
+ };
+
+ struct OpticalFlowSessionCreateInfoNV
+ {
+ using NativeType = VkOpticalFlowSessionCreateInfoNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowSessionCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateInfoNV(
+ uint32_t width_ = {},
+ uint32_t height_ = {},
+ VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::Format flowVectorFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::Format costFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize_ = {},
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize_ = {},
+ VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel_ = VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV::eUnknown,
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , width( width_ )
+ , height( height_ )
+ , imageFormat( imageFormat_ )
+ , flowVectorFormat( flowVectorFormat_ )
+ , costFormat( costFormat_ )
+ , outputGridSize( outputGridSize_ )
+ , hintGridSize( hintGridSize_ )
+ , performanceLevel( performanceLevel_ )
+ , flags( flags_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateInfoNV( OpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ OpticalFlowSessionCreateInfoNV( VkOpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : OpticalFlowSessionCreateInfoNV( *reinterpret_cast<OpticalFlowSessionCreateInfoNV const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ OpticalFlowSessionCreateInfoNV & operator=( OpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ OpticalFlowSessionCreateInfoNV & operator=( VkOpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+ {
+ width = width_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+ {
+ height = height_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageFormat = imageFormat_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setFlowVectorFormat( VULKAN_HPP_NAMESPACE::Format flowVectorFormat_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flowVectorFormat = flowVectorFormat_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setCostFormat( VULKAN_HPP_NAMESPACE::Format costFormat_ ) VULKAN_HPP_NOEXCEPT
+ {
+ costFormat = costFormat_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV &
+ setOutputGridSize( VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ outputGridSize = outputGridSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV &
+ setHintGridSize( VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ hintGridSize = hintGridSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV &
+ setPerformanceLevel( VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel_ ) VULKAN_HPP_NOEXCEPT
+ {
+ performanceLevel = performanceLevel_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkOpticalFlowSessionCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( this );
+ }
+
+ operator VkOpticalFlowSessionCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkOpticalFlowSessionCreateInfoNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ uint32_t const &,
+ uint32_t const &,
+ VULKAN_HPP_NAMESPACE::Format const &,
+ VULKAN_HPP_NAMESPACE::Format const &,
+ VULKAN_HPP_NAMESPACE::Format const &,
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV const &,
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV const &,
+ VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV const &,
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, width, height, imageFormat, flowVectorFormat, costFormat, outputGridSize, hintGridSize, performanceLevel, flags );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( OpticalFlowSessionCreateInfoNV const & ) const = default;
+#else
+ bool operator==( OpticalFlowSessionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( width == rhs.width ) && ( height == rhs.height ) && ( imageFormat == rhs.imageFormat ) &&
+ ( flowVectorFormat == rhs.flowVectorFormat ) && ( costFormat == rhs.costFormat ) && ( outputGridSize == rhs.outputGridSize ) &&
+ ( hintGridSize == rhs.hintGridSize ) && ( performanceLevel == rhs.performanceLevel ) && ( flags == rhs.flags );
+# endif
+ }
+
+ bool operator!=( OpticalFlowSessionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowSessionCreateInfoNV;
+ void * pNext = {};
+ uint32_t width = {};
+ uint32_t height = {};
+ VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::Format flowVectorFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::Format costFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize = {};
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize = {};
+ VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel = VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV::eUnknown;
+ VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eOpticalFlowSessionCreateInfoNV>
+ {
+ using Type = OpticalFlowSessionCreateInfoNV;
+ };
+
+ struct OpticalFlowSessionCreatePrivateDataInfoNV
+ {
+ using NativeType = VkOpticalFlowSessionCreatePrivateDataInfoNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreatePrivateDataInfoNV( uint32_t id_ = {},
+ uint32_t size_ = {},
+ const void * pPrivateData_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , id( id_ )
+ , size( size_ )
+ , pPrivateData( pPrivateData_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreatePrivateDataInfoNV( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ OpticalFlowSessionCreatePrivateDataInfoNV( VkOpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : OpticalFlowSessionCreatePrivateDataInfoNV( *reinterpret_cast<OpticalFlowSessionCreatePrivateDataInfoNV const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ OpticalFlowSessionCreatePrivateDataInfoNV & operator=( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ OpticalFlowSessionCreatePrivateDataInfoNV & operator=( VkOpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setId( uint32_t id_ ) VULKAN_HPP_NOEXCEPT
+ {
+ id = id_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
+ {
+ size = size_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setPPrivateData( const void * pPrivateData_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pPrivateData = pPrivateData_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkOpticalFlowSessionCreatePrivateDataInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkOpticalFlowSessionCreatePrivateDataInfoNV *>( this );
+ }
+
+ operator VkOpticalFlowSessionCreatePrivateDataInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkOpticalFlowSessionCreatePrivateDataInfoNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, const void * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, id, size, pPrivateData );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( OpticalFlowSessionCreatePrivateDataInfoNV const & ) const = default;
+#else
+ bool operator==( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( id == rhs.id ) && ( size == rhs.size ) && ( pPrivateData == rhs.pPrivateData );
+# endif
+ }
+
+ bool operator!=( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV;
+ void * pNext = {};
+ uint32_t id = {};
+ uint32_t size = {};
+ const void * pPrivateData = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV>
+ {
+ using Type = OpticalFlowSessionCreatePrivateDataInfoNV;
};
struct PastPresentationTimingGOOGLE
@@ -45364,14 +47129,15 @@ namespace VULKAN_HPP_NAMESPACE
, actualPresentTime( actualPresentTime_ )
, earliestPresentTime( earliestPresentTime_ )
, presentMargin( presentMargin_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PastPresentationTimingGOOGLE( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
: PastPresentationTimingGOOGLE( *reinterpret_cast<PastPresentationTimingGOOGLE const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PastPresentationTimingGOOGLE & operator=( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -45382,17 +47148,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPastPresentationTimingGOOGLE const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPastPresentationTimingGOOGLE const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPastPresentationTimingGOOGLE *>( this );
}
- explicit operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT
+ operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPastPresentationTimingGOOGLE *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -45409,12 +47175,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ) &&
- ( actualPresentTime == rhs.actualPresentTime ) && ( earliestPresentTime == rhs.earliestPresentTime ) &&
- ( presentMargin == rhs.presentMargin );
+ return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ) && ( actualPresentTime == rhs.actualPresentTime ) &&
+ ( earliestPresentTime == rhs.earliestPresentTime ) && ( presentMargin == rhs.presentMargin );
# endif
}
@@ -45431,53 +47196,42 @@ namespace VULKAN_HPP_NAMESPACE
uint64_t earliestPresentTime = {};
uint64_t presentMargin = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE ) ==
- sizeof( VkPastPresentationTimingGOOGLE ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>::value,
- "PastPresentationTimingGOOGLE is not nothrow_move_constructible!" );
struct PerformanceConfigurationAcquireInfoINTEL
{
using NativeType = VkPerformanceConfigurationAcquireInfoINTEL;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePerformanceConfigurationAcquireInfoINTEL;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL(
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ =
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated )
- VULKAN_HPP_NOEXCEPT : type( type_ )
- {}
+ VULKAN_HPP_CONSTEXPR
+ PerformanceConfigurationAcquireInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ =
+ VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , type( type_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL(
- PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PerformanceConfigurationAcquireInfoINTEL(
- *reinterpret_cast<PerformanceConfigurationAcquireInfoINTEL const *>( &rhs ) )
- {}
+ PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PerformanceConfigurationAcquireInfoINTEL( *reinterpret_cast<PerformanceConfigurationAcquireInfoINTEL const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PerformanceConfigurationAcquireInfoINTEL &
- operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PerformanceConfigurationAcquireInfoINTEL &
- operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ PerformanceConfigurationAcquireInfoINTEL & operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
@@ -45491,23 +47245,21 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPerformanceConfigurationAcquireInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceConfigurationAcquireInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( this );
}
- explicit operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -45520,7 +47272,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type );
@@ -45534,20 +47286,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type =
VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL ) ==
- sizeof( VkPerformanceConfigurationAcquireInfoINTEL ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL>::value,
- "PerformanceConfigurationAcquireInfoINTEL is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePerformanceConfigurationAcquireInfoINTEL>
@@ -45560,30 +47303,31 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPerformanceCounterDescriptionKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR(
- VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {},
- std::array<char, VK_MAX_DESCRIPTION_SIZE> const & name_ = {},
- std::array<char, VK_MAX_DESCRIPTION_SIZE> const & category_ = {},
- std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {},
+ std::array<char, VK_MAX_DESCRIPTION_SIZE> const & name_ = {},
+ std::array<char, VK_MAX_DESCRIPTION_SIZE> const & category_ = {},
+ std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, name( name_ )
, category( category_ )
, description( description_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PerformanceCounterDescriptionKHR( *reinterpret_cast<PerformanceCounterDescriptionKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PerformanceCounterDescriptionKHR &
- operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PerformanceCounterDescriptionKHR & operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -45591,17 +47335,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPerformanceCounterDescriptionKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceCounterDescriptionKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPerformanceCounterDescriptionKHR *>( this );
}
- explicit operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -45623,11 +47367,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( name == rhs.name ) &&
- ( category == rhs.category ) && ( description == rhs.description );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( name == rhs.name ) && ( category == rhs.category ) &&
+ ( description == rhs.description );
# endif
}
@@ -45638,21 +47382,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> category = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR ) ==
- sizeof( VkPerformanceCounterDescriptionKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>::value,
- "PerformanceCounterDescriptionKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePerformanceCounterDescriptionKHR>
@@ -45668,24 +47404,26 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR(
- VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric,
- VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ =
- VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer,
- VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ =
- VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32,
- std::array<uint8_t, VK_UUID_SIZE> const & uuid_ = {} ) VULKAN_HPP_NOEXCEPT
- : unit( unit_ )
+ VULKAN_HPP_CONSTEXPR_14
+ PerformanceCounterKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric,
+ VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer,
+ VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32,
+ std::array<uint8_t, VK_UUID_SIZE> const & uuid_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , unit( unit_ )
, scope( scope_ )
, storage( storage_ )
, uuid( uuid_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PerformanceCounterKHR( *reinterpret_cast<PerformanceCounterKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PerformanceCounterKHR & operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -45696,17 +47434,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPerformanceCounterKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceCounterKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPerformanceCounterKHR *>( this );
}
- explicit operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPerformanceCounterKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -45728,11 +47466,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( unit == rhs.unit ) && ( scope == rhs.scope ) &&
- ( storage == rhs.storage ) && ( uuid == rhs.uuid );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( unit == rhs.unit ) && ( scope == rhs.scope ) && ( storage == rhs.storage ) &&
+ ( uuid == rhs.uuid );
# endif
}
@@ -45743,21 +47481,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric;
- VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope =
- VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer;
- VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage =
- VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32;
- VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> uuid = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric;
+ VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer;
+ VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32;
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> uuid = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>::value,
- "PerformanceCounterKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePerformanceCounterKHR>
@@ -45847,14 +47577,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = {} ) VULKAN_HPP_NOEXCEPT : marker( marker_ ) {}
+ VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , marker( marker_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
: PerformanceMarkerInfoINTEL( *reinterpret_cast<PerformanceMarkerInfoINTEL const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -45879,17 +47613,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPerformanceMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( this );
}
- explicit operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPerformanceMarkerInfoINTEL *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -45906,7 +47640,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker );
@@ -45924,13 +47658,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
uint64_t marker = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL ) ==
- sizeof( VkPerformanceMarkerInfoINTEL ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL>::value,
- "PerformanceMarkerInfoINTEL is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePerformanceMarkerInfoINTEL>
@@ -45946,22 +47673,24 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PerformanceOverrideInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ =
- VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware,
- VULKAN_HPP_NAMESPACE::Bool32 enable_ = {},
- uint64_t parameter_ = {} ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
+ VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL(
+ VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware,
+ VULKAN_HPP_NAMESPACE::Bool32 enable_ = {},
+ uint64_t parameter_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , type( type_ )
, enable( enable_ )
, parameter( parameter_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
: PerformanceOverrideInfoINTEL( *reinterpret_cast<PerformanceOverrideInfoINTEL const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -45979,15 +47708,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL &
- setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL &
- setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT
{
enable = enable_;
return *this;
@@ -46000,17 +47727,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPerformanceOverrideInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceOverrideInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( this );
}
- explicit operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPerformanceOverrideInfoINTEL *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -46031,11 +47758,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( enable == rhs.enable ) &&
- ( parameter == rhs.parameter );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( enable == rhs.enable ) && ( parameter == rhs.parameter );
# endif
}
@@ -46046,21 +47772,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type =
- VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware;
- VULKAN_HPP_NAMESPACE::Bool32 enable = {};
- uint64_t parameter = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware;
+ VULKAN_HPP_NAMESPACE::Bool32 enable = {};
+ uint64_t parameter = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL ) ==
- sizeof( VkPerformanceOverrideInfoINTEL ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL>::value,
- "PerformanceOverrideInfoINTEL is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePerformanceOverrideInfoINTEL>
@@ -46076,20 +47793,21 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQuerySubmitInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( uint32_t counterPassIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : counterPassIndex( counterPassIndex_ )
- {}
+ VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( uint32_t counterPassIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , counterPassIndex( counterPassIndex_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PerformanceQuerySubmitInfoKHR( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PerformanceQuerySubmitInfoKHR( *reinterpret_cast<PerformanceQuerySubmitInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PerformanceQuerySubmitInfoKHR &
- operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PerformanceQuerySubmitInfoKHR & operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PerformanceQuerySubmitInfoKHR & operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -46104,25 +47822,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR &
- setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT
{
counterPassIndex = counterPassIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPerformanceQuerySubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceQuerySubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPerformanceQuerySubmitInfoKHR *>( this );
}
- explicit operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPerformanceQuerySubmitInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -46139,7 +47856,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( counterPassIndex == rhs.counterPassIndex );
@@ -46157,14 +47874,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
uint32_t counterPassIndex = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR ) ==
- sizeof( VkPerformanceQuerySubmitInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR>::value,
- "PerformanceQuerySubmitInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePerformanceQuerySubmitInfoKHR>
@@ -46177,23 +47886,24 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPerformanceStreamMarkerInfoINTEL;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = {} ) VULKAN_HPP_NOEXCEPT
- : marker( marker_ )
- {}
+ VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , marker( marker_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
: PerformanceStreamMarkerInfoINTEL( *reinterpret_cast<PerformanceStreamMarkerInfoINTEL const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PerformanceStreamMarkerInfoINTEL &
- operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -46215,17 +47925,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPerformanceStreamMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceStreamMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( this );
}
- explicit operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -46242,7 +47952,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker );
@@ -46260,14 +47970,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
uint32_t marker = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL ) ==
- sizeof( VkPerformanceStreamMarkerInfoINTEL ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL>::value,
- "PerformanceStreamMarkerInfoINTEL is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePerformanceStreamMarkerInfoINTEL>
@@ -46308,8 +48010,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL &
- setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT
{
valueBool = valueBool_;
return *this;
@@ -46352,18 +48053,20 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPerformanceValueINTEL;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL(
- VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32,
- VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14
+ PerformanceValueINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32,
+ VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {} ) VULKAN_HPP_NOEXCEPT
: type( type_ )
, data( data_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
: PerformanceValueINTEL( *reinterpret_cast<PerformanceValueINTEL const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -46375,37 +48078,34 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL &
- setType( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL &
- setData( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & data_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL & setData( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & data_ ) VULKAN_HPP_NOEXCEPT
{
data = data_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPerformanceValueINTEL const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceValueINTEL const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPerformanceValueINTEL *>( this );
}
- explicit operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT
+ operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPerformanceValueINTEL *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL const &,
- VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL const &, VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -46417,46 +48117,39 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32;
VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::value,
- "PerformanceValueINTEL is not nothrow_move_constructible!" );
struct PhysicalDevice16BitStorageFeatures
{
using NativeType = VkPhysicalDevice16BitStorageFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDevice16BitStorageFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice16BitStorageFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDevice16BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {} ) VULKAN_HPP_NOEXCEPT
- : storageBuffer16BitAccess( storageBuffer16BitAccess_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , storageBuffer16BitAccess( storageBuffer16BitAccess_ )
, uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ )
, storagePushConstant16( storagePushConstant16_ )
, storageInputOutput16( storageInputOutput16_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevice16BitStorageFeatures( *reinterpret_cast<PhysicalDevice16BitStorageFeatures const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDevice16BitStorageFeatures &
- operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDevice16BitStorageFeatures & operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevice16BitStorageFeatures &
- operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevice16BitStorageFeatures & operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const *>( &rhs );
return *this;
@@ -46470,45 +48163,45 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures &
- setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
+ setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
storageBuffer16BitAccess = storageBuffer16BitAccess_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess(
- VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures &
+ setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures &
- setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
+ setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
{
storagePushConstant16 = storagePushConstant16_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures &
- setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
+ setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
{
storageInputOutput16 = storageInputOutput16_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDevice16BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevice16BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures *>( this );
}
- explicit operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevice16BitStorageFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -46521,12 +48214,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- storageBuffer16BitAccess,
- uniformAndStorageBuffer16BitAccess,
- storagePushConstant16,
- storageInputOutput16 );
+ return std::tie( sType, pNext, storageBuffer16BitAccess, uniformAndStorageBuffer16BitAccess, storagePushConstant16, storageInputOutput16 );
}
#endif
@@ -46535,13 +48223,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) &&
- ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) &&
- ( storagePushConstant16 == rhs.storagePushConstant16 ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) &&
+ ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && ( storagePushConstant16 == rhs.storagePushConstant16 ) &&
( storageInputOutput16 == rhs.storageInputOutput16 );
# endif
}
@@ -46553,21 +48239,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {};
VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures ) ==
- sizeof( VkPhysicalDevice16BitStorageFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures>::value,
- "PhysicalDevice16BitStorageFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDevice16BitStorageFeatures>
@@ -46581,30 +48259,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDevice4444FormatsFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDevice4444FormatsFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDevice4444FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ = {} ) VULKAN_HPP_NOEXCEPT
- : formatA4R4G4B4( formatA4R4G4B4_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , formatA4R4G4B4( formatA4R4G4B4_ )
, formatA4B4G4R4( formatA4B4G4R4_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( PhysicalDevice4444FormatsFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevice4444FormatsFeaturesEXT( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevice4444FormatsFeaturesEXT( *reinterpret_cast<PhysicalDevice4444FormatsFeaturesEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDevice4444FormatsFeaturesEXT &
- operator=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDevice4444FormatsFeaturesEXT & operator=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevice4444FormatsFeaturesEXT &
- operator=( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevice4444FormatsFeaturesEXT & operator=( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT const *>( &rhs );
return *this;
@@ -46617,39 +48294,34 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT &
- setFormatA4R4G4B4( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4R4G4B4( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ ) VULKAN_HPP_NOEXCEPT
{
formatA4R4G4B4 = formatA4R4G4B4_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT &
- setFormatA4B4G4R4( VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4B4G4R4( VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ ) VULKAN_HPP_NOEXCEPT
{
formatA4B4G4R4 = formatA4B4G4R4_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDevice4444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevice4444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevice4444FormatsFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevice4444FormatsFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -46662,11 +48334,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatA4R4G4B4 == rhs.formatA4R4G4B4 ) &&
- ( formatA4B4G4R4 == rhs.formatA4B4G4R4 );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatA4R4G4B4 == rhs.formatA4R4G4B4 ) && ( formatA4B4G4R4 == rhs.formatA4B4G4R4 );
# endif
}
@@ -46682,14 +48353,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4 = {};
VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4 = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT ) ==
- sizeof( VkPhysicalDevice4444FormatsFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT>::value,
- "PhysicalDevice4444FormatsFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDevice4444FormatsFeaturesEXT>
@@ -46702,29 +48365,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDevice8BitStorageFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDevice8BitStorageFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice8BitStorageFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDevice8BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {} ) VULKAN_HPP_NOEXCEPT
- : storageBuffer8BitAccess( storageBuffer8BitAccess_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , storageBuffer8BitAccess( storageBuffer8BitAccess_ )
, uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ )
, storagePushConstant8( storagePushConstant8_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PhysicalDevice8BitStorageFeatures( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevice8BitStorageFeatures( *reinterpret_cast<PhysicalDevice8BitStorageFeatures const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDevice8BitStorageFeatures &
- operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDevice8BitStorageFeatures & operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevice8BitStorageFeatures & operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -46740,38 +48403,38 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures &
- setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
+ setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
storageBuffer8BitAccess = storageBuffer8BitAccess_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setUniformAndStorageBuffer8BitAccess(
- VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures &
+ setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures &
- setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
+ setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
{
storagePushConstant8 = storagePushConstant8_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDevice8BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevice8BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevice8BitStorageFeatures *>( this );
}
- explicit operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevice8BitStorageFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -46792,13 +48455,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) &&
- ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) &&
- ( storagePushConstant8 == rhs.storagePushConstant8 );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) &&
+ ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && ( storagePushConstant8 == rhs.storagePushConstant8 );
# endif
}
@@ -46809,20 +48470,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures ) ==
- sizeof( VkPhysicalDevice8BitStorageFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures>::value,
- "PhysicalDevice8BitStorageFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDevice8BitStorageFeatures>
@@ -46836,28 +48489,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceASTCDecodeFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {} ) VULKAN_HPP_NOEXCEPT
- : decodeModeSharedExponent( decodeModeSharedExponent_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , decodeModeSharedExponent( decodeModeSharedExponent_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceASTCDecodeFeaturesEXT( *reinterpret_cast<PhysicalDeviceASTCDecodeFeaturesEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceASTCDecodeFeaturesEXT &
- operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceASTCDecodeFeaturesEXT & operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceASTCDecodeFeaturesEXT &
- operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceASTCDecodeFeaturesEXT & operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const *>( &rhs );
return *this;
@@ -46871,24 +48523,24 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT &
- setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT
+ setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT
{
decodeModeSharedExponent = decodeModeSharedExponent_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceASTCDecodeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceASTCDecodeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceASTCDecodeFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -46905,11 +48557,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( decodeModeSharedExponent == rhs.decodeModeSharedExponent );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeModeSharedExponent == rhs.decodeModeSharedExponent );
# endif
}
@@ -46924,14 +48575,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT>::value,
- "PhysicalDeviceASTCDecodeFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT>
@@ -46944,99 +48587,96 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceAccelerationStructureFeaturesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR(
- VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ = {} ) VULKAN_HPP_NOEXCEPT
- : accelerationStructure( accelerationStructure_ )
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceAccelerationStructureFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , accelerationStructure( accelerationStructure_ )
, accelerationStructureCaptureReplay( accelerationStructureCaptureReplay_ )
, accelerationStructureIndirectBuild( accelerationStructureIndirectBuild_ )
, accelerationStructureHostCommands( accelerationStructureHostCommands_ )
, descriptorBindingAccelerationStructureUpdateAfterBind( descriptorBindingAccelerationStructureUpdateAfterBind_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR(
- PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceAccelerationStructureFeaturesKHR( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceAccelerationStructureFeaturesKHR( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceAccelerationStructureFeaturesKHR(
- *reinterpret_cast<PhysicalDeviceAccelerationStructureFeaturesKHR const *>( &rhs ) )
- {}
+ PhysicalDeviceAccelerationStructureFeaturesKHR( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceAccelerationStructureFeaturesKHR( *reinterpret_cast<PhysicalDeviceAccelerationStructureFeaturesKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceAccelerationStructureFeaturesKHR &
- operator=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceAccelerationStructureFeaturesKHR &
- operator=( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR &
- setAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+ setAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructure = accelerationStructure_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureCaptureReplay(
- VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR &
+ setAccelerationStructureCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureCaptureReplay = accelerationStructureCaptureReplay_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureIndirectBuild(
- VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR &
+ setAccelerationStructureIndirectBuild( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureIndirectBuild = accelerationStructureIndirectBuild_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureHostCommands(
- VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR &
+ setAccelerationStructureHostCommands( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureHostCommands = accelerationStructureHostCommands_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR &
- setDescriptorBindingAccelerationStructureUpdateAfterBind(
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setDescriptorBindingAccelerationStructureUpdateAfterBind(
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingAccelerationStructureUpdateAfterBind = descriptorBindingAccelerationStructureUpdateAfterBind_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceAccelerationStructureFeaturesKHR *>( this );
}
- explicit operator VkPhysicalDeviceAccelerationStructureFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceAccelerationStructureFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceAccelerationStructureFeaturesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -47065,16 +48705,14 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( accelerationStructure == rhs.accelerationStructure ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ) &&
( accelerationStructureCaptureReplay == rhs.accelerationStructureCaptureReplay ) &&
( accelerationStructureIndirectBuild == rhs.accelerationStructureIndirectBuild ) &&
( accelerationStructureHostCommands == rhs.accelerationStructureHostCommands ) &&
- ( descriptorBindingAccelerationStructureUpdateAfterBind ==
- rhs.descriptorBindingAccelerationStructureUpdateAfterBind );
+ ( descriptorBindingAccelerationStructureUpdateAfterBind == rhs.descriptorBindingAccelerationStructureUpdateAfterBind );
# endif
}
@@ -47085,23 +48723,14 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure = {};
VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay = {};
VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild = {};
VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR ) ==
- sizeof( VkPhysicalDeviceAccelerationStructureFeaturesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR>::value,
- "PhysicalDeviceAccelerationStructureFeaturesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR>
@@ -47114,61 +48743,58 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceAccelerationStructurePropertiesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR(
- uint64_t maxGeometryCount_ = {},
- uint64_t maxInstanceCount_ = {},
- uint64_t maxPrimitiveCount_ = {},
- uint32_t maxPerStageDescriptorAccelerationStructures_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ = {},
- uint32_t maxDescriptorSetAccelerationStructures_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures_ = {},
- uint32_t minAccelerationStructureScratchOffsetAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxGeometryCount( maxGeometryCount_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR( uint64_t maxGeometryCount_ = {},
+ uint64_t maxInstanceCount_ = {},
+ uint64_t maxPrimitiveCount_ = {},
+ uint32_t maxPerStageDescriptorAccelerationStructures_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ = {},
+ uint32_t maxDescriptorSetAccelerationStructures_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures_ = {},
+ uint32_t minAccelerationStructureScratchOffsetAlignment_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxGeometryCount( maxGeometryCount_ )
, maxInstanceCount( maxInstanceCount_ )
, maxPrimitiveCount( maxPrimitiveCount_ )
, maxPerStageDescriptorAccelerationStructures( maxPerStageDescriptorAccelerationStructures_ )
- , maxPerStageDescriptorUpdateAfterBindAccelerationStructures(
- maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ )
+ , maxPerStageDescriptorUpdateAfterBindAccelerationStructures( maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ )
, maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ )
, maxDescriptorSetUpdateAfterBindAccelerationStructures( maxDescriptorSetUpdateAfterBindAccelerationStructures_ )
, minAccelerationStructureScratchOffsetAlignment( minAccelerationStructureScratchOffsetAlignment_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR(
- PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceAccelerationStructurePropertiesKHR( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceAccelerationStructurePropertiesKHR( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceAccelerationStructurePropertiesKHR(
- *reinterpret_cast<PhysicalDeviceAccelerationStructurePropertiesKHR const *>( &rhs ) )
- {}
+ PhysicalDeviceAccelerationStructurePropertiesKHR( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceAccelerationStructurePropertiesKHR( *reinterpret_cast<PhysicalDeviceAccelerationStructurePropertiesKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceAccelerationStructurePropertiesKHR &
- operator=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceAccelerationStructurePropertiesKHR &
- operator=( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceAccelerationStructurePropertiesKHR *>( this );
}
- explicit operator VkPhysicalDeviceAccelerationStructurePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceAccelerationStructurePropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceAccelerationStructurePropertiesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -47203,17 +48829,15 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGeometryCount == rhs.maxGeometryCount ) &&
- ( maxInstanceCount == rhs.maxInstanceCount ) && ( maxPrimitiveCount == rhs.maxPrimitiveCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGeometryCount == rhs.maxGeometryCount ) && ( maxInstanceCount == rhs.maxInstanceCount ) &&
+ ( maxPrimitiveCount == rhs.maxPrimitiveCount ) &&
( maxPerStageDescriptorAccelerationStructures == rhs.maxPerStageDescriptorAccelerationStructures ) &&
- ( maxPerStageDescriptorUpdateAfterBindAccelerationStructures ==
- rhs.maxPerStageDescriptorUpdateAfterBindAccelerationStructures ) &&
+ ( maxPerStageDescriptorUpdateAfterBindAccelerationStructures == rhs.maxPerStageDescriptorUpdateAfterBindAccelerationStructures ) &&
( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ) &&
- ( maxDescriptorSetUpdateAfterBindAccelerationStructures ==
- rhs.maxDescriptorSetUpdateAfterBindAccelerationStructures ) &&
+ ( maxDescriptorSetUpdateAfterBindAccelerationStructures == rhs.maxDescriptorSetUpdateAfterBindAccelerationStructures ) &&
( minAccelerationStructureScratchOffsetAlignment == rhs.minAccelerationStructureScratchOffsetAlignment );
# endif
}
@@ -47225,26 +48849,17 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR;
- void * pNext = {};
- uint64_t maxGeometryCount = {};
- uint64_t maxInstanceCount = {};
- uint64_t maxPrimitiveCount = {};
- uint32_t maxPerStageDescriptorAccelerationStructures = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR;
+ void * pNext = {};
+ uint64_t maxGeometryCount = {};
+ uint64_t maxInstanceCount = {};
+ uint64_t maxPrimitiveCount = {};
+ uint32_t maxPerStageDescriptorAccelerationStructures = {};
uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures = {};
uint32_t maxDescriptorSetAccelerationStructures = {};
uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures = {};
uint32_t minAccelerationStructureScratchOffsetAlignment = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR ) ==
- sizeof( VkPhysicalDeviceAccelerationStructurePropertiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR>::value,
- "PhysicalDeviceAccelerationStructurePropertiesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR>
@@ -47252,67 +48867,360 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceAccelerationStructurePropertiesKHR;
};
+ struct PhysicalDeviceAddressBindingReportFeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceAddressBindingReportFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceAddressBindingReportFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , reportAddressBinding( reportAddressBinding_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceAddressBindingReportFeaturesEXT( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceAddressBindingReportFeaturesEXT( VkPhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceAddressBindingReportFeaturesEXT( *reinterpret_cast<PhysicalDeviceAddressBindingReportFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceAddressBindingReportFeaturesEXT & operator=( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceAddressBindingReportFeaturesEXT & operator=( VkPhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAddressBindingReportFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAddressBindingReportFeaturesEXT &
+ setReportAddressBinding( VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding_ ) VULKAN_HPP_NOEXCEPT
+ {
+ reportAddressBinding = reportAddressBinding_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceAddressBindingReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceAddressBindingReportFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceAddressBindingReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceAddressBindingReportFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, reportAddressBinding );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceAddressBindingReportFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reportAddressBinding == rhs.reportAddressBinding );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT>
+ {
+ using Type = PhysicalDeviceAddressBindingReportFeaturesEXT;
+ };
+
+ struct PhysicalDeviceAmigoProfilingFeaturesSEC
+ {
+ using NativeType = VkPhysicalDeviceAmigoProfilingFeaturesSEC;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceAmigoProfilingFeaturesSEC( VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , amigoProfiling( amigoProfiling_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceAmigoProfilingFeaturesSEC( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceAmigoProfilingFeaturesSEC( VkPhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceAmigoProfilingFeaturesSEC( *reinterpret_cast<PhysicalDeviceAmigoProfilingFeaturesSEC const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceAmigoProfilingFeaturesSEC & operator=( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceAmigoProfilingFeaturesSEC & operator=( VkPhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAmigoProfilingFeaturesSEC & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAmigoProfilingFeaturesSEC & setAmigoProfiling( VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling_ ) VULKAN_HPP_NOEXCEPT
+ {
+ amigoProfiling = amigoProfiling_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceAmigoProfilingFeaturesSEC const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceAmigoProfilingFeaturesSEC *>( this );
+ }
+
+ operator VkPhysicalDeviceAmigoProfilingFeaturesSEC &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceAmigoProfilingFeaturesSEC *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, amigoProfiling );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceAmigoProfilingFeaturesSEC const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( amigoProfiling == rhs.amigoProfiling );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC>
+ {
+ using Type = PhysicalDeviceAmigoProfilingFeaturesSEC;
+ };
+
+ struct PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , attachmentFeedbackLoopLayout( attachmentFeedbackLoopLayout_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( *reinterpret_cast<PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT &
+ operator=( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & operator=( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT &
+ setAttachmentFeedbackLoopLayout( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ attachmentFeedbackLoopLayout = attachmentFeedbackLoopLayout_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, attachmentFeedbackLoopLayout );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentFeedbackLoopLayout == rhs.attachmentFeedbackLoopLayout );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT>
+ {
+ using Type = PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
+ };
+
struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT
{
using NativeType = VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {} ) VULKAN_HPP_NOEXCEPT
- : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , advancedBlendCoherentOperations( advancedBlendCoherentOperations_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT(
- PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceBlendOperationAdvancedFeaturesEXT( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceBlendOperationAdvancedFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceBlendOperationAdvancedFeaturesEXT( *reinterpret_cast<PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceBlendOperationAdvancedFeaturesEXT &
- operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceBlendOperationAdvancedFeaturesEXT &
- operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations(
- VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT &
+ setAdvancedBlendCoherentOperations( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT
{
advancedBlendCoherentOperations = advancedBlendCoherentOperations_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -47329,11 +49237,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations );
# endif
}
@@ -47344,19 +49251,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT>::value,
- "PhysicalDeviceBlendOperationAdvancedFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT>
@@ -47369,57 +49267,55 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT(
- uint32_t advancedBlendMaxColorAttachments_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {} ) VULKAN_HPP_NOEXCEPT
- : advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( uint32_t advancedBlendMaxColorAttachments_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ )
, advancedBlendIndependentBlend( advancedBlendIndependentBlend_ )
, advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ )
, advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ )
, advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ )
, advancedBlendAllOperations( advancedBlendAllOperations_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT(
- PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceBlendOperationAdvancedPropertiesEXT( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceBlendOperationAdvancedPropertiesEXT(
- *reinterpret_cast<PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceBlendOperationAdvancedPropertiesEXT( *reinterpret_cast<PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceBlendOperationAdvancedPropertiesEXT &
operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceBlendOperationAdvancedPropertiesEXT &
- operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *>( this );
}
- explicit operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -47450,16 +49346,14 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments ) &&
( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend ) &&
( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor ) &&
( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor ) &&
- ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap ) &&
- ( advancedBlendAllOperations == rhs.advancedBlendAllOperations );
+ ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap ) && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations );
# endif
}
@@ -47470,8 +49364,8 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+ void * pNext = {};
uint32_t advancedBlendMaxColorAttachments = {};
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {};
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {};
@@ -47479,15 +49373,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {};
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) ==
- sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT>::value,
- "PhysicalDeviceBlendOperationAdvancedPropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT>
@@ -47500,32 +49385,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceBorderColorSwizzleFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ = {} ) VULKAN_HPP_NOEXCEPT
- : borderColorSwizzle( borderColorSwizzle_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , borderColorSwizzle( borderColorSwizzle_ )
, borderColorSwizzleFromImage( borderColorSwizzleFromImage_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT(
- PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceBorderColorSwizzleFeaturesEXT( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceBorderColorSwizzleFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceBorderColorSwizzleFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceBorderColorSwizzleFeaturesEXT( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceBorderColorSwizzleFeaturesEXT( *reinterpret_cast<PhysicalDeviceBorderColorSwizzleFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceBorderColorSwizzleFeaturesEXT &
- operator=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceBorderColorSwizzleFeaturesEXT & operator=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceBorderColorSwizzleFeaturesEXT &
- operator=( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceBorderColorSwizzleFeaturesEXT & operator=( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT const *>( &rhs );
return *this;
@@ -47539,38 +49421,35 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT &
- setBorderColorSwizzle( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ ) VULKAN_HPP_NOEXCEPT
+ setBorderColorSwizzle( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ ) VULKAN_HPP_NOEXCEPT
{
borderColorSwizzle = borderColorSwizzle_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT &
- setBorderColorSwizzleFromImage( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ ) VULKAN_HPP_NOEXCEPT
+ setBorderColorSwizzleFromImage( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ ) VULKAN_HPP_NOEXCEPT
{
borderColorSwizzleFromImage = borderColorSwizzleFromImage_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceBorderColorSwizzleFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceBorderColorSwizzleFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -47583,7 +49462,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( borderColorSwizzle == rhs.borderColorSwizzle ) &&
@@ -47598,20 +49477,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle = {};
VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT>::value,
- "PhysicalDeviceBorderColorSwizzleFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT>
@@ -47624,34 +49494,31 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceBufferDeviceAddressFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures(
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT
- : bufferDeviceAddress( bufferDeviceAddress_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , bufferDeviceAddress( bufferDeviceAddress_ )
, bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ )
, bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures(
- PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceBufferDeviceAddressFeatures(
- *reinterpret_cast<PhysicalDeviceBufferDeviceAddressFeatures const *>( &rhs ) )
- {}
+ PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceBufferDeviceAddressFeatures( *reinterpret_cast<PhysicalDeviceBufferDeviceAddressFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceBufferDeviceAddressFeatures &
- operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceBufferDeviceAddressFeatures & operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceBufferDeviceAddressFeatures &
- operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceBufferDeviceAddressFeatures & operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures const *>( &rhs );
return *this;
@@ -47665,38 +49532,38 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures &
- setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
+ setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddress = bufferDeviceAddress_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressCaptureReplay(
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures &
+ setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressMultiDevice(
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures &
+ setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceBufferDeviceAddressFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceBufferDeviceAddressFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeatures *>( this );
}
- explicit operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -47708,8 +49575,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice );
+ return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice );
}
#endif
@@ -47718,7 +49584,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) &&
@@ -47734,21 +49600,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures ) ==
- sizeof( VkPhysicalDeviceBufferDeviceAddressFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures>::value,
- "PhysicalDeviceBufferDeviceAddressFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceBufferDeviceAddressFeatures>
@@ -47762,34 +49619,31 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceBufferDeviceAddressFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT
- : bufferDeviceAddress( bufferDeviceAddress_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , bufferDeviceAddress( bufferDeviceAddress_ )
, bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ )
, bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT(
- PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceBufferDeviceAddressFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceBufferDeviceAddressFeaturesEXT( *reinterpret_cast<PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceBufferDeviceAddressFeaturesEXT &
- operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceBufferDeviceAddressFeaturesEXT &
- operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>( &rhs );
return *this;
@@ -47803,38 +49657,38 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT &
- setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
+ setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddress = bufferDeviceAddress_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay(
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT &
+ setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice(
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT &
+ setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -47846,8 +49700,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice );
+ return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice );
}
#endif
@@ -47856,7 +49709,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) &&
@@ -47872,21 +49725,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT>::value,
- "PhysicalDeviceBufferDeviceAddressFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT>
@@ -47900,29 +49744,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceCoherentMemoryFeaturesAMD;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD(
- VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {} ) VULKAN_HPP_NOEXCEPT
- : deviceCoherentMemory( deviceCoherentMemory_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , deviceCoherentMemory( deviceCoherentMemory_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceCoherentMemoryFeaturesAMD(
- *reinterpret_cast<PhysicalDeviceCoherentMemoryFeaturesAMD const *>( &rhs ) )
- {}
+ : PhysicalDeviceCoherentMemoryFeaturesAMD( *reinterpret_cast<PhysicalDeviceCoherentMemoryFeaturesAMD const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceCoherentMemoryFeaturesAMD &
- operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceCoherentMemoryFeaturesAMD &
- operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const *>( &rhs );
return *this;
@@ -47936,24 +49778,24 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD &
- setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT
+ setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT
{
deviceCoherentMemory = deviceCoherentMemory_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCoherentMemoryFeaturesAMD *>( this );
}
- explicit operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCoherentMemoryFeaturesAMD *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -47970,7 +49812,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceCoherentMemory == rhs.deviceCoherentMemory );
@@ -47988,15 +49830,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD ) ==
- sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD>::value,
- "PhysicalDeviceCoherentMemoryFeaturesAMD is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD>
@@ -48009,29 +49842,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceColorWriteEnableFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ = {} ) VULKAN_HPP_NOEXCEPT : colorWriteEnable( colorWriteEnable_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , colorWriteEnable( colorWriteEnable_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT(
- PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceColorWriteEnableFeaturesEXT( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceColorWriteEnableFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceColorWriteEnableFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceColorWriteEnableFeaturesEXT( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceColorWriteEnableFeaturesEXT( *reinterpret_cast<PhysicalDeviceColorWriteEnableFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceColorWriteEnableFeaturesEXT &
- operator=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceColorWriteEnableFeaturesEXT &
- operator=( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT const *>( &rhs );
return *this;
@@ -48052,17 +49883,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceColorWriteEnableFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceColorWriteEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceColorWriteEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceColorWriteEnableFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -48079,7 +49910,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorWriteEnable == rhs.colorWriteEnable );
@@ -48097,15 +49928,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceColorWriteEnableFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT>::value,
- "PhysicalDeviceColorWriteEnableFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT>
@@ -48118,78 +49940,72 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceComputeShaderDerivativesFeaturesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV(
- VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {} ) VULKAN_HPP_NOEXCEPT
- : computeDerivativeGroupQuads( computeDerivativeGroupQuads_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , computeDerivativeGroupQuads( computeDerivativeGroupQuads_ )
, computeDerivativeGroupLinear( computeDerivativeGroupLinear_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV(
- PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceComputeShaderDerivativesFeaturesNV( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceComputeShaderDerivativesFeaturesNV(
- *reinterpret_cast<PhysicalDeviceComputeShaderDerivativesFeaturesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceComputeShaderDerivativesFeaturesNV( *reinterpret_cast<PhysicalDeviceComputeShaderDerivativesFeaturesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceComputeShaderDerivativesFeaturesNV &
- operator=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceComputeShaderDerivativesFeaturesNV &
- operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV &
- setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT
+ setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT
{
computeDerivativeGroupQuads = computeDerivativeGroupQuads_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV &
- setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT
+ setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT
{
computeDerivativeGroupLinear = computeDerivativeGroupLinear_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceComputeShaderDerivativesFeaturesNV *>( this );
}
- explicit operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -48202,11 +50018,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads ) &&
( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear );
# endif
}
@@ -48218,20 +50033,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {};
VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV ) ==
- sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV>::value,
- "PhysicalDeviceComputeShaderDerivativesFeaturesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV>
@@ -48244,78 +50050,72 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceConditionalRenderingFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {} ) VULKAN_HPP_NOEXCEPT
- : conditionalRendering( conditionalRendering_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , conditionalRendering( conditionalRendering_ )
, inheritedConditionalRendering( inheritedConditionalRendering_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT(
- PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceConditionalRenderingFeaturesEXT( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceConditionalRenderingFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceConditionalRenderingFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceConditionalRenderingFeaturesEXT( *reinterpret_cast<PhysicalDeviceConditionalRenderingFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceConditionalRenderingFeaturesEXT &
- operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceConditionalRenderingFeaturesEXT &
- operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT &
- setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT
+ setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT
{
conditionalRendering = conditionalRendering_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering(
- VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT &
+ setInheritedConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT
{
inheritedConditionalRendering = inheritedConditionalRendering_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceConditionalRenderingFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceConditionalRenderingFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -48328,7 +50128,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conditionalRendering == rhs.conditionalRendering ) &&
@@ -48343,20 +50143,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {};
VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT>::value,
- "PhysicalDeviceConditionalRenderingFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT>
@@ -48369,21 +50160,21 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceConservativeRasterizationPropertiesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT(
- float primitiveOverestimationSize_ = {},
- float maxExtraPrimitiveOverestimationSize_ = {},
- float extraPrimitiveOverestimationSizeGranularity_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {} ) VULKAN_HPP_NOEXCEPT
- : primitiveOverestimationSize( primitiveOverestimationSize_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( float primitiveOverestimationSize_ = {},
+ float maxExtraPrimitiveOverestimationSize_ = {},
+ float extraPrimitiveOverestimationSizeGranularity_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , primitiveOverestimationSize( primitiveOverestimationSize_ )
, maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ )
, extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ )
, primitiveUnderestimation( primitiveUnderestimation_ )
@@ -48392,40 +50183,38 @@ namespace VULKAN_HPP_NAMESPACE
, degenerateLinesRasterized( degenerateLinesRasterized_ )
, fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ )
, conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT(
- PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceConservativeRasterizationPropertiesEXT( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceConservativeRasterizationPropertiesEXT(
- VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceConservativeRasterizationPropertiesEXT(
- *reinterpret_cast<PhysicalDeviceConservativeRasterizationPropertiesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceConservativeRasterizationPropertiesEXT( *reinterpret_cast<PhysicalDeviceConservativeRasterizationPropertiesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceConservativeRasterizationPropertiesEXT &
operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceConservativeRasterizationPropertiesEXT &
- operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceConservativeRasterizationPropertiesEXT *>( this );
}
- explicit operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -48462,17 +50251,15 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( primitiveOverestimationSize == rhs.primitiveOverestimationSize ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitiveOverestimationSize == rhs.primitiveOverestimationSize ) &&
( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize ) &&
( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity ) &&
( primitiveUnderestimation == rhs.primitiveUnderestimation ) &&
( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization ) &&
- ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized ) &&
- ( degenerateLinesRasterized == rhs.degenerateLinesRasterized ) &&
+ ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized ) && ( degenerateLinesRasterized == rhs.degenerateLinesRasterized ) &&
( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable ) &&
( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage );
# endif
@@ -48485,8 +50272,8 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
+ void * pNext = {};
float primitiveOverestimationSize = {};
float maxExtraPrimitiveOverestimationSize = {};
float extraPrimitiveOverestimationSizeGranularity = {};
@@ -48497,15 +50284,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {};
VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT ) ==
- sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<
- VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT>::value,
- "PhysicalDeviceConservativeRasterizationPropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT>
@@ -48518,32 +50296,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceCooperativeMatrixFeaturesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV(
- VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {} ) VULKAN_HPP_NOEXCEPT
- : cooperativeMatrix( cooperativeMatrix_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , cooperativeMatrix( cooperativeMatrix_ )
, cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV(
- PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceCooperativeMatrixFeaturesNV(
- *reinterpret_cast<PhysicalDeviceCooperativeMatrixFeaturesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceCooperativeMatrixFeaturesNV( *reinterpret_cast<PhysicalDeviceCooperativeMatrixFeaturesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceCooperativeMatrixFeaturesNV &
- operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceCooperativeMatrixFeaturesNV &
- operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const *>( &rhs );
return *this;
@@ -48563,32 +50338,29 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess(
- VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV &
+ setCooperativeMatrixRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
{
cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixFeaturesNV *>( this );
}
- explicit operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -48601,7 +50373,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrix == rhs.cooperativeMatrix ) &&
@@ -48616,20 +50388,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {};
VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV ) ==
- sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV>::value,
- "PhysicalDeviceCooperativeMatrixFeaturesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV>
@@ -48642,51 +50405,47 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceCooperativeMatrixPropertiesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV(
- VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {} ) VULKAN_HPP_NOEXCEPT
- : cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV(
- PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceCooperativeMatrixPropertiesNV(
- *reinterpret_cast<PhysicalDeviceCooperativeMatrixPropertiesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceCooperativeMatrixPropertiesNV( *reinterpret_cast<PhysicalDeviceCooperativeMatrixPropertiesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceCooperativeMatrixPropertiesNV &
- operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceCooperativeMatrixPropertiesNV &
- operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixPropertiesNV *>( this );
}
- explicit operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::
- tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -48699,11 +50458,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages );
# endif
}
@@ -48714,19 +50472,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV ) ==
- sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV>::value,
- "PhysicalDeviceCooperativeMatrixPropertiesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV>
@@ -48739,30 +50488,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceCornerSampledImageFeaturesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV(
- VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {} ) VULKAN_HPP_NOEXCEPT
- : cornerSampledImage( cornerSampledImage_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , cornerSampledImage( cornerSampledImage_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV(
- PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceCornerSampledImageFeaturesNV(
- *reinterpret_cast<PhysicalDeviceCornerSampledImageFeaturesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceCornerSampledImageFeaturesNV( *reinterpret_cast<PhysicalDeviceCornerSampledImageFeaturesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceCornerSampledImageFeaturesNV &
- operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceCornerSampledImageFeaturesNV & operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceCornerSampledImageFeaturesNV &
- operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceCornerSampledImageFeaturesNV & operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const *>( &rhs );
return *this;
@@ -48776,24 +50522,24 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV &
- setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT
+ setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT
{
cornerSampledImage = cornerSampledImage_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceCornerSampledImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceCornerSampledImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCornerSampledImageFeaturesNV *>( this );
}
- explicit operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCornerSampledImageFeaturesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -48810,7 +50556,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cornerSampledImage == rhs.cornerSampledImage );
@@ -48828,15 +50574,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV ) ==
- sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV>::value,
- "PhysicalDeviceCornerSampledImageFeaturesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV>
@@ -48849,62 +50586,59 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceCoverageReductionModeFeaturesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV(
- VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {} ) VULKAN_HPP_NOEXCEPT
- : coverageReductionMode( coverageReductionMode_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , coverageReductionMode( coverageReductionMode_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV(
- PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceCoverageReductionModeFeaturesNV( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceCoverageReductionModeFeaturesNV(
- *reinterpret_cast<PhysicalDeviceCoverageReductionModeFeaturesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceCoverageReductionModeFeaturesNV( *reinterpret_cast<PhysicalDeviceCoverageReductionModeFeaturesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceCoverageReductionModeFeaturesNV &
- operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceCoverageReductionModeFeaturesNV &
- operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV &
- setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
+ setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
{
coverageReductionMode = coverageReductionMode_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCoverageReductionModeFeaturesNV *>( this );
}
- explicit operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCoverageReductionModeFeaturesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -48921,7 +50655,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( coverageReductionMode == rhs.coverageReductionMode );
@@ -48935,19 +50669,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV ) ==
- sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV>::value,
- "PhysicalDeviceCoverageReductionModeFeaturesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV>
@@ -48960,32 +50685,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceCustomBorderColorFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {} ) VULKAN_HPP_NOEXCEPT
- : customBorderColors( customBorderColors_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , customBorderColors( customBorderColors_ )
, customBorderColorWithoutFormat( customBorderColorWithoutFormat_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT(
- PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceCustomBorderColorFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceCustomBorderColorFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceCustomBorderColorFeaturesEXT( *reinterpret_cast<PhysicalDeviceCustomBorderColorFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceCustomBorderColorFeaturesEXT &
- operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceCustomBorderColorFeaturesEXT &
- operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT const *>( &rhs );
return *this;
@@ -48999,38 +50721,35 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT &
- setCustomBorderColors( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ ) VULKAN_HPP_NOEXCEPT
+ setCustomBorderColors( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ ) VULKAN_HPP_NOEXCEPT
{
customBorderColors = customBorderColors_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColorWithoutFormat(
- VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT &
+ setCustomBorderColorWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
{
customBorderColorWithoutFormat = customBorderColorWithoutFormat_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCustomBorderColorFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCustomBorderColorFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -49043,7 +50762,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( customBorderColors == rhs.customBorderColors ) &&
@@ -49058,20 +50777,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 customBorderColors = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 customBorderColors = {};
VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceCustomBorderColorFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT>::value,
- "PhysicalDeviceCustomBorderColorFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT>
@@ -49084,46 +50794,43 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceCustomBorderColorPropertiesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceCustomBorderColorPropertiesEXT( uint32_t maxCustomBorderColorSamplers_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxCustomBorderColorSamplers( maxCustomBorderColorSamplers_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( uint32_t maxCustomBorderColorSamplers_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxCustomBorderColorSamplers( maxCustomBorderColorSamplers_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT(
- PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceCustomBorderColorPropertiesEXT(
- *reinterpret_cast<PhysicalDeviceCustomBorderColorPropertiesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceCustomBorderColorPropertiesEXT( *reinterpret_cast<PhysicalDeviceCustomBorderColorPropertiesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceCustomBorderColorPropertiesEXT &
- operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceCustomBorderColorPropertiesEXT &
- operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceCustomBorderColorPropertiesEXT *>( this );
}
- explicit operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceCustomBorderColorPropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -49140,11 +50847,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers );
# endif
}
@@ -49155,19 +50861,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;
+ void * pNext = {};
uint32_t maxCustomBorderColorSamplers = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT ) ==
- sizeof( VkPhysicalDeviceCustomBorderColorPropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT>::value,
- "PhysicalDeviceCustomBorderColorPropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT>
@@ -49180,23 +50877,23 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(
- VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {} ) VULKAN_HPP_NOEXCEPT
- : dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(
- PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs )
+ VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(
- VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(
- *reinterpret_cast<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( *reinterpret_cast<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &
@@ -49205,39 +50902,36 @@ namespace VULKAN_HPP_NAMESPACE
PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &
operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>(
- &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &
- setDedicatedAllocationImageAliasing( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ )
- VULKAN_HPP_NOEXCEPT
+ setDedicatedAllocationImageAliasing( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT
{
dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *>( this );
}
- explicit operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -49254,11 +50948,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing );
# endif
}
@@ -49269,21 +50962,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType =
- StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) ==
- sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<
- VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>::value,
- "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>
@@ -49291,34 +50973,130 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
};
+ struct PhysicalDeviceDepthClampZeroOneFeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceDepthClampZeroOneFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampZeroOneFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , depthClampZeroOne( depthClampZeroOne_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampZeroOneFeaturesEXT( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceDepthClampZeroOneFeaturesEXT( VkPhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceDepthClampZeroOneFeaturesEXT( *reinterpret_cast<PhysicalDeviceDepthClampZeroOneFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceDepthClampZeroOneFeaturesEXT & operator=( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceDepthClampZeroOneFeaturesEXT & operator=( VkPhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampZeroOneFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampZeroOneFeaturesEXT &
+ setDepthClampZeroOne( VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne_ ) VULKAN_HPP_NOEXCEPT
+ {
+ depthClampZeroOne = depthClampZeroOne_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceDepthClampZeroOneFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceDepthClampZeroOneFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceDepthClampZeroOneFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceDepthClampZeroOneFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, depthClampZeroOne );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClampZeroOne == rhs.depthClampZeroOne );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesEXT>
+ {
+ using Type = PhysicalDeviceDepthClampZeroOneFeaturesEXT;
+ };
+
struct PhysicalDeviceDepthClipControlFeaturesEXT
{
using NativeType = VkPhysicalDeviceDepthClipControlFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_ = {} ) VULKAN_HPP_NOEXCEPT : depthClipControl( depthClipControl_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , depthClipControl( depthClipControl_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT(
- PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDepthClipControlFeaturesEXT( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceDepthClipControlFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceDepthClipControlFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceDepthClipControlFeaturesEXT( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceDepthClipControlFeaturesEXT( *reinterpret_cast<PhysicalDeviceDepthClipControlFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceDepthClipControlFeaturesEXT &
- operator=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceDepthClipControlFeaturesEXT & operator=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDepthClipControlFeaturesEXT &
- operator=( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDepthClipControlFeaturesEXT & operator=( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT const *>( &rhs );
return *this;
@@ -49339,17 +51117,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceDepthClipControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDepthClipControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDepthClipControlFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceDepthClipControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDepthClipControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDepthClipControlFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -49366,7 +51144,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipControl == rhs.depthClipControl );
@@ -49384,15 +51162,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 depthClipControl = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceDepthClipControlFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT>::value,
- "PhysicalDeviceDepthClipControlFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT>
@@ -49405,30 +51174,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceDepthClipEnableFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT
- : depthClipEnable( depthClipEnable_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , depthClipEnable( depthClipEnable_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT(
- PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceDepthClipEnableFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceDepthClipEnableFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceDepthClipEnableFeaturesEXT( *reinterpret_cast<PhysicalDeviceDepthClipEnableFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceDepthClipEnableFeaturesEXT &
- operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDepthClipEnableFeaturesEXT &
- operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const *>( &rhs );
return *this;
@@ -49441,25 +51207,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT &
- setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
{
depthClipEnable = depthClipEnable_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDepthClipEnableFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDepthClipEnableFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -49476,7 +51241,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipEnable == rhs.depthClipEnable );
@@ -49494,15 +51259,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT>::value,
- "PhysicalDeviceDepthClipEnableFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT>
@@ -49515,52 +51271,49 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceDepthStencilResolveProperties;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceDepthStencilResolveProperties;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthStencilResolveProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties(
- VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {},
- VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {} ) VULKAN_HPP_NOEXCEPT
- : supportedDepthResolveModes( supportedDepthResolveModes_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {},
+ VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , supportedDepthResolveModes( supportedDepthResolveModes_ )
, supportedStencilResolveModes( supportedStencilResolveModes_ )
, independentResolveNone( independentResolveNone_ )
, independentResolve( independentResolve_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties(
- PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceDepthStencilResolveProperties(
- *reinterpret_cast<PhysicalDeviceDepthStencilResolveProperties const *>( &rhs ) )
- {}
+ PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceDepthStencilResolveProperties( *reinterpret_cast<PhysicalDeviceDepthStencilResolveProperties const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceDepthStencilResolveProperties &
- operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceDepthStencilResolveProperties & operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDepthStencilResolveProperties &
- operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDepthStencilResolveProperties & operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceDepthStencilResolveProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDepthStencilResolveProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDepthStencilResolveProperties *>( this );
}
- explicit operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDepthStencilResolveProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -49573,12 +51326,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- supportedDepthResolveModes,
- supportedStencilResolveModes,
- independentResolveNone,
- independentResolve );
+ return std::tie( sType, pNext, supportedDepthResolveModes, supportedStencilResolveModes, independentResolveNone, independentResolve );
}
#endif
@@ -49587,13 +51335,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) &&
- ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) &&
- ( independentResolveNone == rhs.independentResolveNone ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) &&
+ ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) && ( independentResolveNone == rhs.independentResolveNone ) &&
( independentResolve == rhs.independentResolve );
# endif
}
@@ -49605,22 +51351,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {};
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {};
VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {};
VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties ) ==
- sizeof( VkPhysicalDeviceDepthStencilResolveProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties>::value,
- "PhysicalDeviceDepthStencilResolveProperties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDepthStencilResolveProperties>
@@ -49634,32 +51371,32 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceDescriptorIndexingFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures(
- VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ )
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ )
, shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ )
, shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ )
, shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ )
@@ -49679,23 +51416,20 @@ namespace VULKAN_HPP_NAMESPACE
, descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ )
, descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ )
, runtimeDescriptorArray( runtimeDescriptorArray_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures(
- PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceDescriptorIndexingFeatures(
- *reinterpret_cast<PhysicalDeviceDescriptorIndexingFeatures const *>( &rhs ) )
- {}
+ PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceDescriptorIndexingFeatures( *reinterpret_cast<PhysicalDeviceDescriptorIndexingFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceDescriptorIndexingFeatures &
- operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceDescriptorIndexingFeatures & operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDescriptorIndexingFeatures &
- operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const *>( &rhs );
return *this;
@@ -49708,162 +51442,158 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayDynamicIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
+ setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayDynamicIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
+ setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayDynamicIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
+ setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformBufferArrayNonUniformIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
+ setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderSampledImageArrayNonUniformIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
+ setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageBufferArrayNonUniformIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
+ setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageImageArrayNonUniformIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
+ setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayNonUniformIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
+ setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
- setShaderUniformTexelBufferArrayNonUniformIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
- setShaderStorageTexelBufferArrayNonUniformIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformBufferUpdateAfterBind(
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
+ setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingSampledImageUpdateAfterBind(
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
+ setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageImageUpdateAfterBind(
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
+ setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageBufferUpdateAfterBind(
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
+ setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
- setDescriptorBindingUniformTexelBufferUpdateAfterBind(
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformTexelBufferUpdateAfterBind(
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
- setDescriptorBindingStorageTexelBufferUpdateAfterBind(
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageTexelBufferUpdateAfterBind(
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUpdateUnusedWhilePending(
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
+ setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingPartiallyBound(
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
+ setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingVariableDescriptorCount(
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
+ setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
- setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
+ setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
{
runtimeDescriptorArray = runtimeDescriptorArray_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceDescriptorIndexingFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDescriptorIndexingFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeatures *>( this );
}
- explicit operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -49922,7 +51652,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
@@ -49934,18 +51664,14 @@ namespace VULKAN_HPP_NAMESPACE
( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) &&
( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) &&
( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) &&
- ( shaderUniformTexelBufferArrayNonUniformIndexing ==
- rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) &&
- ( shaderStorageTexelBufferArrayNonUniformIndexing ==
- rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) &&
+ ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) &&
+ ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) &&
( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) &&
( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) &&
( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) &&
( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) &&
- ( descriptorBindingUniformTexelBufferUpdateAfterBind ==
- rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) &&
- ( descriptorBindingStorageTexelBufferUpdateAfterBind ==
- rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) &&
+ ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) &&
+ ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) &&
( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) &&
( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) &&
( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) &&
@@ -49960,8 +51686,8 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {};
@@ -49983,15 +51709,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {};
VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures ) ==
- sizeof( VkPhysicalDeviceDescriptorIndexingFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures>::value,
- "PhysicalDeviceDescriptorIndexingFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorIndexingFeatures>
@@ -50005,35 +51722,35 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceDescriptorIndexingProperties;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceDescriptorIndexingProperties;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties(
- uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {},
- uint32_t maxPerStageUpdateAfterBindResources_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ )
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingProperties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {},
+ uint32_t maxPerStageUpdateAfterBindResources_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ )
, shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ )
, shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ )
, shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ )
@@ -50056,39 +51773,36 @@ namespace VULKAN_HPP_NAMESPACE
, maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ )
, maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ )
, maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties(
- PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceDescriptorIndexingProperties(
- *reinterpret_cast<PhysicalDeviceDescriptorIndexingProperties const *>( &rhs ) )
- {}
+ PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceDescriptorIndexingProperties( *reinterpret_cast<PhysicalDeviceDescriptorIndexingProperties const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceDescriptorIndexingProperties &
- operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceDescriptorIndexingProperties & operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDescriptorIndexingProperties &
- operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingProperties & operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceDescriptorIndexingProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDescriptorIndexingProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingProperties *>( this );
}
- explicit operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -50153,42 +51867,28 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) &&
- ( shaderUniformBufferArrayNonUniformIndexingNative ==
- rhs.shaderUniformBufferArrayNonUniformIndexingNative ) &&
- ( shaderSampledImageArrayNonUniformIndexingNative ==
- rhs.shaderSampledImageArrayNonUniformIndexingNative ) &&
- ( shaderStorageBufferArrayNonUniformIndexingNative ==
- rhs.shaderStorageBufferArrayNonUniformIndexingNative ) &&
- ( shaderStorageImageArrayNonUniformIndexingNative ==
- rhs.shaderStorageImageArrayNonUniformIndexingNative ) &&
- ( shaderInputAttachmentArrayNonUniformIndexingNative ==
- rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) &&
- ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) &&
- ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) &&
+ ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) &&
+ ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) &&
+ ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) &&
+ ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) &&
+ ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) &&
+ ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) &&
( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) &&
- ( maxPerStageDescriptorUpdateAfterBindUniformBuffers ==
- rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) &&
- ( maxPerStageDescriptorUpdateAfterBindStorageBuffers ==
- rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) &&
- ( maxPerStageDescriptorUpdateAfterBindSampledImages ==
- rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) &&
- ( maxPerStageDescriptorUpdateAfterBindStorageImages ==
- rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) &&
- ( maxPerStageDescriptorUpdateAfterBindInputAttachments ==
- rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) &&
+ ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) &&
+ ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) &&
+ ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) &&
+ ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) &&
+ ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) &&
( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) &&
( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) &&
( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) &&
- ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ==
- rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) &&
+ ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) &&
( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) &&
- ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ==
- rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) &&
+ ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) &&
( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) &&
( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) &&
( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments );
@@ -50202,8 +51902,8 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties;
+ void * pNext = {};
uint32_t maxUpdateAfterBindDescriptorsInAllPools = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {};
@@ -50228,15 +51928,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {};
uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties ) ==
- sizeof( VkPhysicalDeviceDescriptorIndexingProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties>::value,
- "PhysicalDeviceDescriptorIndexingProperties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorIndexingProperties>
@@ -50245,67 +51936,164 @@ namespace VULKAN_HPP_NAMESPACE
};
using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties;
+ struct PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE
+ {
+ using NativeType = VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , descriptorSetHostMapping( descriptorSetHostMapping_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( *reinterpret_cast<PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE &
+ operator=( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & operator=( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE &
+ setDescriptorSetHostMapping( VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorSetHostMapping = descriptorSetHostMapping_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE *>( this );
+ }
+
+ operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, descriptorSetHostMapping );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetHostMapping == rhs.descriptorSetHostMapping );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE>
+ {
+ using Type = PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
+ };
+
struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV
{
using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV(
- VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {} ) VULKAN_HPP_NOEXCEPT
- : deviceGeneratedCommands( deviceGeneratedCommands_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , deviceGeneratedCommands( deviceGeneratedCommands_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV(
- PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceDeviceGeneratedCommandsFeaturesNV(
- *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceDeviceGeneratedCommandsFeaturesNV &
- operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDeviceGeneratedCommandsFeaturesNV &
- operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV &
- setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT
+ setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT
{
deviceGeneratedCommands = deviceGeneratedCommands_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV *>( this );
}
- explicit operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -50322,11 +52110,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( deviceGeneratedCommands == rhs.deviceGeneratedCommands );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands );
# endif
}
@@ -50337,19 +52124,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) ==
- sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV>::value,
- "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV>
@@ -50362,21 +52140,21 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV(
- uint32_t maxGraphicsShaderGroupCount_ = {},
- uint32_t maxIndirectSequenceCount_ = {},
- uint32_t maxIndirectCommandsTokenCount_ = {},
- uint32_t maxIndirectCommandsStreamCount_ = {},
- uint32_t maxIndirectCommandsTokenOffset_ = {},
- uint32_t maxIndirectCommandsStreamStride_ = {},
- uint32_t minSequencesCountBufferOffsetAlignment_ = {},
- uint32_t minSequencesIndexBufferOffsetAlignment_ = {},
- uint32_t minIndirectCommandsBufferOffsetAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxGraphicsShaderGroupCount( maxGraphicsShaderGroupCount_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( uint32_t maxGraphicsShaderGroupCount_ = {},
+ uint32_t maxIndirectSequenceCount_ = {},
+ uint32_t maxIndirectCommandsTokenCount_ = {},
+ uint32_t maxIndirectCommandsStreamCount_ = {},
+ uint32_t maxIndirectCommandsTokenOffset_ = {},
+ uint32_t maxIndirectCommandsStreamStride_ = {},
+ uint32_t minSequencesCountBufferOffsetAlignment_ = {},
+ uint32_t minSequencesIndexBufferOffsetAlignment_ = {},
+ uint32_t minIndirectCommandsBufferOffsetAlignment_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxGraphicsShaderGroupCount( maxGraphicsShaderGroupCount_ )
, maxIndirectSequenceCount( maxIndirectSequenceCount_ )
, maxIndirectCommandsTokenCount( maxIndirectCommandsTokenCount_ )
, maxIndirectCommandsStreamCount( maxIndirectCommandsStreamCount_ )
@@ -50385,40 +52163,38 @@ namespace VULKAN_HPP_NAMESPACE
, minSequencesCountBufferOffsetAlignment( minSequencesCountBufferOffsetAlignment_ )
, minSequencesIndexBufferOffsetAlignment( minSequencesIndexBufferOffsetAlignment_ )
, minIndirectCommandsBufferOffsetAlignment( minIndirectCommandsBufferOffsetAlignment_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV(
- PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceDeviceGeneratedCommandsPropertiesNV(
- *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceDeviceGeneratedCommandsPropertiesNV &
operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDeviceGeneratedCommandsPropertiesNV &
- operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV *>( this );
}
- explicit operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -50455,13 +52231,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount ) &&
- ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) &&
- ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount ) &&
+ ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) && ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) &&
( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount ) &&
( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) &&
( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride ) &&
@@ -50478,8 +52252,8 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
+ void * pNext = {};
uint32_t maxGraphicsShaderGroupCount = {};
uint32_t maxIndirectSequenceCount = {};
uint32_t maxIndirectCommandsTokenCount = {};
@@ -50490,15 +52264,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t minSequencesIndexBufferOffsetAlignment = {};
uint32_t minIndirectCommandsBufferOffsetAlignment = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) ==
- sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV>::value,
- "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV>
@@ -50511,30 +52276,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceDeviceMemoryReportFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ = {} ) VULKAN_HPP_NOEXCEPT
- : deviceMemoryReport( deviceMemoryReport_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , deviceMemoryReport( deviceMemoryReport_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT(
- PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDeviceMemoryReportFeaturesEXT( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceDeviceMemoryReportFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceDeviceMemoryReportFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceDeviceMemoryReportFeaturesEXT( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceDeviceMemoryReportFeaturesEXT( *reinterpret_cast<PhysicalDeviceDeviceMemoryReportFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceDeviceMemoryReportFeaturesEXT &
- operator=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDeviceMemoryReportFeaturesEXT &
- operator=( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT const *>( &rhs );
return *this;
@@ -50548,24 +52310,24 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT &
- setDeviceMemoryReport( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ ) VULKAN_HPP_NOEXCEPT
+ setDeviceMemoryReport( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ ) VULKAN_HPP_NOEXCEPT
{
deviceMemoryReport = deviceMemoryReport_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -50582,7 +52344,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMemoryReport == rhs.deviceMemoryReport );
@@ -50596,19 +52358,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT>::value,
- "PhysicalDeviceDeviceMemoryReportFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT>
@@ -50621,30 +52374,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceDiagnosticsConfigFeaturesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV(
- VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {} ) VULKAN_HPP_NOEXCEPT
- : diagnosticsConfig( diagnosticsConfig_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , diagnosticsConfig( diagnosticsConfig_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV(
- PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceDiagnosticsConfigFeaturesNV(
- *reinterpret_cast<PhysicalDeviceDiagnosticsConfigFeaturesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceDiagnosticsConfigFeaturesNV( *reinterpret_cast<PhysicalDeviceDiagnosticsConfigFeaturesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceDiagnosticsConfigFeaturesNV &
- operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDiagnosticsConfigFeaturesNV &
- operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV const *>( &rhs );
return *this;
@@ -50665,17 +52415,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDiagnosticsConfigFeaturesNV *>( this );
}
- explicit operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDiagnosticsConfigFeaturesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -50692,7 +52442,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( diagnosticsConfig == rhs.diagnosticsConfig );
@@ -50710,15 +52460,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV ) ==
- sizeof( VkPhysicalDeviceDiagnosticsConfigFeaturesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV>::value,
- "PhysicalDeviceDiagnosticsConfigFeaturesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV>
@@ -50731,46 +52472,42 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceDiscardRectanglePropertiesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxDiscardRectangles( maxDiscardRectangles_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxDiscardRectangles( maxDiscardRectangles_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT(
- PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceDiscardRectanglePropertiesEXT(
- *reinterpret_cast<PhysicalDeviceDiscardRectanglePropertiesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceDiscardRectanglePropertiesEXT( *reinterpret_cast<PhysicalDeviceDiscardRectanglePropertiesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceDiscardRectanglePropertiesEXT &
- operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDiscardRectanglePropertiesEXT &
- operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDiscardRectanglePropertiesEXT *>( this );
}
- explicit operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -50787,7 +52524,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDiscardRectangles == rhs.maxDiscardRectangles );
@@ -50801,19 +52538,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
+ void * pNext = {};
uint32_t maxDiscardRectangles = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT ) ==
- sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT>::value,
- "PhysicalDeviceDiscardRectanglePropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT>
@@ -50829,27 +52557,28 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDriverProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties(
- VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary,
- std::array<char, VK_MAX_DRIVER_NAME_SIZE> const & driverName_ = {},
- std::array<char, VK_MAX_DRIVER_INFO_SIZE> const & driverInfo_ = {},
- VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {} ) VULKAN_HPP_NOEXCEPT
- : driverID( driverID_ )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary,
+ std::array<char, VK_MAX_DRIVER_NAME_SIZE> const & driverName_ = {},
+ std::array<char, VK_MAX_DRIVER_INFO_SIZE> const & driverInfo_ = {},
+ VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , driverID( driverID_ )
, driverName( driverName_ )
, driverInfo( driverInfo_ )
, conformanceVersion( conformanceVersion_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- PhysicalDeviceDriverProperties( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDriverProperties( *reinterpret_cast<PhysicalDeviceDriverProperties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceDriverProperties &
- operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDriverProperties & operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -50857,17 +52586,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPhysicalDeviceDriverProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDriverProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDriverProperties *>( this );
}
- explicit operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDriverProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -50889,12 +52618,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) &&
- ( driverName == rhs.driverName ) && ( driverInfo == rhs.driverInfo ) &&
- ( conformanceVersion == rhs.conformanceVersion );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( driverName == rhs.driverName ) &&
+ ( driverInfo == rhs.driverInfo ) && ( conformanceVersion == rhs.conformanceVersion );
# endif
}
@@ -50905,21 +52633,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> driverName = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> driverInfo = {};
VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties ) ==
- sizeof( VkPhysicalDeviceDriverProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties>::value,
- "PhysicalDeviceDriverProperties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDriverProperties>
@@ -50941,25 +52661,27 @@ namespace VULKAN_HPP_NAMESPACE
int64_t primaryMajor_ = {},
int64_t primaryMinor_ = {},
int64_t renderMajor_ = {},
- int64_t renderMinor_ = {} ) VULKAN_HPP_NOEXCEPT
- : hasPrimary( hasPrimary_ )
+ int64_t renderMinor_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , hasPrimary( hasPrimary_ )
, hasRender( hasRender_ )
, primaryMajor( primaryMajor_ )
, primaryMinor( primaryMinor_ )
, renderMajor( renderMajor_ )
, renderMinor( renderMinor_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceDrmPropertiesEXT( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDrmPropertiesEXT( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceDrmPropertiesEXT( *reinterpret_cast<PhysicalDeviceDrmPropertiesEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceDrmPropertiesEXT &
- operator=( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceDrmPropertiesEXT & operator=( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDrmPropertiesEXT & operator=( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -50967,17 +52689,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPhysicalDeviceDrmPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDrmPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDrmPropertiesEXT *>( this );
}
- explicit operator VkPhysicalDeviceDrmPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDrmPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDrmPropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -51001,12 +52723,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasPrimary == rhs.hasPrimary ) &&
- ( hasRender == rhs.hasRender ) && ( primaryMajor == rhs.primaryMajor ) &&
- ( primaryMinor == rhs.primaryMinor ) && ( renderMajor == rhs.renderMajor ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasPrimary == rhs.hasPrimary ) && ( hasRender == rhs.hasRender ) &&
+ ( primaryMajor == rhs.primaryMajor ) && ( primaryMinor == rhs.primaryMinor ) && ( renderMajor == rhs.renderMajor ) &&
( renderMinor == rhs.renderMinor );
# endif
}
@@ -51027,14 +52748,6 @@ namespace VULKAN_HPP_NAMESPACE
int64_t renderMajor = {};
int64_t renderMinor = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT ) ==
- sizeof( VkPhysicalDeviceDrmPropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT>::value,
- "PhysicalDeviceDrmPropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDrmPropertiesEXT>
@@ -51047,29 +52760,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceDynamicRenderingFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceDynamicRenderingFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceDynamicRenderingFeatures( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {} ) VULKAN_HPP_NOEXCEPT
- : dynamicRendering( dynamicRendering_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , dynamicRendering( dynamicRendering_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( PhysicalDeviceDynamicRenderingFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceDynamicRenderingFeatures( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceDynamicRenderingFeatures(
- *reinterpret_cast<PhysicalDeviceDynamicRenderingFeatures const *>( &rhs ) )
- {}
+ : PhysicalDeviceDynamicRenderingFeatures( *reinterpret_cast<PhysicalDeviceDynamicRenderingFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceDynamicRenderingFeatures &
- operator=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceDynamicRenderingFeatures & operator=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceDynamicRenderingFeatures &
- operator=( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDynamicRenderingFeatures & operator=( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures const *>( &rhs );
return *this;
@@ -51082,25 +52793,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures &
- setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT
{
dynamicRendering = dynamicRendering_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceDynamicRenderingFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDynamicRenderingFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceDynamicRenderingFeatures *>( this );
}
- explicit operator VkPhysicalDeviceDynamicRenderingFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDynamicRenderingFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceDynamicRenderingFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -51117,7 +52827,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicRendering == rhs.dynamicRendering );
@@ -51135,15 +52845,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures ) ==
- sizeof( VkPhysicalDeviceDynamicRenderingFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures>::value,
- "PhysicalDeviceDynamicRenderingFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceDynamicRenderingFeatures>
@@ -51157,29 +52858,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceExclusiveScissorFeaturesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {} )
- VULKAN_HPP_NOEXCEPT : exclusiveScissor( exclusiveScissor_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , exclusiveScissor( exclusiveScissor_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV(
- PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceExclusiveScissorFeaturesNV(
- *reinterpret_cast<PhysicalDeviceExclusiveScissorFeaturesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceExclusiveScissorFeaturesNV( *reinterpret_cast<PhysicalDeviceExclusiveScissorFeaturesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceExclusiveScissorFeaturesNV &
- operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceExclusiveScissorFeaturesNV & operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceExclusiveScissorFeaturesNV &
- operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExclusiveScissorFeaturesNV & operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const *>( &rhs );
return *this;
@@ -51192,25 +52891,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV &
- setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT
{
exclusiveScissor = exclusiveScissor_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceExclusiveScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExclusiveScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExclusiveScissorFeaturesNV *>( this );
}
- explicit operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -51227,7 +52925,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exclusiveScissor == rhs.exclusiveScissor );
@@ -51245,15 +52943,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV ) ==
- sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV>::value,
- "PhysicalDeviceExclusiveScissorFeaturesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV>
@@ -51266,80 +52955,77 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceExtendedDynamicState2FeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ = {} ) VULKAN_HPP_NOEXCEPT
- : extendedDynamicState2( extendedDynamicState2_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , extendedDynamicState2( extendedDynamicState2_ )
, extendedDynamicState2LogicOp( extendedDynamicState2LogicOp_ )
, extendedDynamicState2PatchControlPoints( extendedDynamicState2PatchControlPoints_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT(
- PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceExtendedDynamicState2FeaturesEXT( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceExtendedDynamicState2FeaturesEXT( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceExtendedDynamicState2FeaturesEXT(
- *reinterpret_cast<PhysicalDeviceExtendedDynamicState2FeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceExtendedDynamicState2FeaturesEXT( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceExtendedDynamicState2FeaturesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicState2FeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceExtendedDynamicState2FeaturesEXT &
- operator=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceExtendedDynamicState2FeaturesEXT &
- operator=( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT &
- setExtendedDynamicState2( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ ) VULKAN_HPP_NOEXCEPT
+ setExtendedDynamicState2( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState2 = extendedDynamicState2_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT &
- setExtendedDynamicState2LogicOp( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ ) VULKAN_HPP_NOEXCEPT
+ setExtendedDynamicState2LogicOp( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState2LogicOp = extendedDynamicState2LogicOp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2PatchControlPoints(
- VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT &
+ setExtendedDynamicState2PatchControlPoints( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState2PatchControlPoints = extendedDynamicState2PatchControlPoints_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState2FeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicState2FeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -51351,8 +53037,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, extendedDynamicState2, extendedDynamicState2LogicOp, extendedDynamicState2PatchControlPoints );
+ return std::tie( sType, pNext, extendedDynamicState2, extendedDynamicState2LogicOp, extendedDynamicState2PatchControlPoints );
}
#endif
@@ -51361,11 +53046,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( extendedDynamicState2 == rhs.extendedDynamicState2 ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState2 == rhs.extendedDynamicState2 ) &&
( extendedDynamicState2LogicOp == rhs.extendedDynamicState2LogicOp ) &&
( extendedDynamicState2PatchControlPoints == rhs.extendedDynamicState2PatchControlPoints );
# endif
@@ -51378,21 +53062,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2 = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT ) ==
- sizeof( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT>::value,
- "PhysicalDeviceExtendedDynamicState2FeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT>
@@ -51400,67 +53075,658 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceExtendedDynamicState2FeaturesEXT;
};
+ struct PhysicalDeviceExtendedDynamicState3FeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceExtendedDynamicState3FeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceExtendedDynamicState3FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , extendedDynamicState3TessellationDomainOrigin( extendedDynamicState3TessellationDomainOrigin_ )
+ , extendedDynamicState3DepthClampEnable( extendedDynamicState3DepthClampEnable_ )
+ , extendedDynamicState3PolygonMode( extendedDynamicState3PolygonMode_ )
+ , extendedDynamicState3RasterizationSamples( extendedDynamicState3RasterizationSamples_ )
+ , extendedDynamicState3SampleMask( extendedDynamicState3SampleMask_ )
+ , extendedDynamicState3AlphaToCoverageEnable( extendedDynamicState3AlphaToCoverageEnable_ )
+ , extendedDynamicState3AlphaToOneEnable( extendedDynamicState3AlphaToOneEnable_ )
+ , extendedDynamicState3LogicOpEnable( extendedDynamicState3LogicOpEnable_ )
+ , extendedDynamicState3ColorBlendEnable( extendedDynamicState3ColorBlendEnable_ )
+ , extendedDynamicState3ColorBlendEquation( extendedDynamicState3ColorBlendEquation_ )
+ , extendedDynamicState3ColorWriteMask( extendedDynamicState3ColorWriteMask_ )
+ , extendedDynamicState3RasterizationStream( extendedDynamicState3RasterizationStream_ )
+ , extendedDynamicState3ConservativeRasterizationMode( extendedDynamicState3ConservativeRasterizationMode_ )
+ , extendedDynamicState3ExtraPrimitiveOverestimationSize( extendedDynamicState3ExtraPrimitiveOverestimationSize_ )
+ , extendedDynamicState3DepthClipEnable( extendedDynamicState3DepthClipEnable_ )
+ , extendedDynamicState3SampleLocationsEnable( extendedDynamicState3SampleLocationsEnable_ )
+ , extendedDynamicState3ColorBlendAdvanced( extendedDynamicState3ColorBlendAdvanced_ )
+ , extendedDynamicState3ProvokingVertexMode( extendedDynamicState3ProvokingVertexMode_ )
+ , extendedDynamicState3LineRasterizationMode( extendedDynamicState3LineRasterizationMode_ )
+ , extendedDynamicState3LineStippleEnable( extendedDynamicState3LineStippleEnable_ )
+ , extendedDynamicState3DepthClipNegativeOneToOne( extendedDynamicState3DepthClipNegativeOneToOne_ )
+ , extendedDynamicState3ViewportWScalingEnable( extendedDynamicState3ViewportWScalingEnable_ )
+ , extendedDynamicState3ViewportSwizzle( extendedDynamicState3ViewportSwizzle_ )
+ , extendedDynamicState3CoverageToColorEnable( extendedDynamicState3CoverageToColorEnable_ )
+ , extendedDynamicState3CoverageToColorLocation( extendedDynamicState3CoverageToColorLocation_ )
+ , extendedDynamicState3CoverageModulationMode( extendedDynamicState3CoverageModulationMode_ )
+ , extendedDynamicState3CoverageModulationTableEnable( extendedDynamicState3CoverageModulationTableEnable_ )
+ , extendedDynamicState3CoverageModulationTable( extendedDynamicState3CoverageModulationTable_ )
+ , extendedDynamicState3CoverageReductionMode( extendedDynamicState3CoverageReductionMode_ )
+ , extendedDynamicState3RepresentativeFragmentTestEnable( extendedDynamicState3RepresentativeFragmentTestEnable_ )
+ , extendedDynamicState3ShadingRateImageEnable( extendedDynamicState3ShadingRateImageEnable_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceExtendedDynamicState3FeaturesEXT( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceExtendedDynamicState3FeaturesEXT( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceExtendedDynamicState3FeaturesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicState3FeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceExtendedDynamicState3FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceExtendedDynamicState3FeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3TessellationDomainOrigin( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3TessellationDomainOrigin = extendedDynamicState3TessellationDomainOrigin_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3DepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3DepthClampEnable = extendedDynamicState3DepthClampEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3PolygonMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3PolygonMode = extendedDynamicState3PolygonMode_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3RasterizationSamples( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3RasterizationSamples = extendedDynamicState3RasterizationSamples_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3SampleMask( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3SampleMask = extendedDynamicState3SampleMask_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3AlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3AlphaToCoverageEnable = extendedDynamicState3AlphaToCoverageEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3AlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3AlphaToOneEnable = extendedDynamicState3AlphaToOneEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3LogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3LogicOpEnable = extendedDynamicState3LogicOpEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3ColorBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3ColorBlendEnable = extendedDynamicState3ColorBlendEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3ColorBlendEquation( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3ColorBlendEquation = extendedDynamicState3ColorBlendEquation_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3ColorWriteMask( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3ColorWriteMask = extendedDynamicState3ColorWriteMask_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3RasterizationStream( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3RasterizationStream = extendedDynamicState3RasterizationStream_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ConservativeRasterizationMode(
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3ConservativeRasterizationMode = extendedDynamicState3ConservativeRasterizationMode_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ExtraPrimitiveOverestimationSize(
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3ExtraPrimitiveOverestimationSize = extendedDynamicState3ExtraPrimitiveOverestimationSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3DepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3DepthClipEnable = extendedDynamicState3DepthClipEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3SampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3SampleLocationsEnable = extendedDynamicState3SampleLocationsEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3ColorBlendAdvanced( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3ColorBlendAdvanced = extendedDynamicState3ColorBlendAdvanced_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3ProvokingVertexMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3ProvokingVertexMode = extendedDynamicState3ProvokingVertexMode_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3LineRasterizationMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3LineRasterizationMode = extendedDynamicState3LineRasterizationMode_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3LineStippleEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3LineStippleEnable = extendedDynamicState3LineStippleEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3DepthClipNegativeOneToOne( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3DepthClipNegativeOneToOne = extendedDynamicState3DepthClipNegativeOneToOne_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3ViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3ViewportWScalingEnable = extendedDynamicState3ViewportWScalingEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3ViewportSwizzle( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3ViewportSwizzle = extendedDynamicState3ViewportSwizzle_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3CoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3CoverageToColorEnable = extendedDynamicState3CoverageToColorEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3CoverageToColorLocation( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3CoverageToColorLocation = extendedDynamicState3CoverageToColorLocation_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3CoverageModulationMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3CoverageModulationMode = extendedDynamicState3CoverageModulationMode_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageModulationTableEnable(
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3CoverageModulationTableEnable = extendedDynamicState3CoverageModulationTableEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3CoverageModulationTable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3CoverageModulationTable = extendedDynamicState3CoverageModulationTable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3CoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3CoverageReductionMode = extendedDynamicState3CoverageReductionMode_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3RepresentativeFragmentTestEnable(
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3RepresentativeFragmentTestEnable = extendedDynamicState3RepresentativeFragmentTestEnable_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
+ setExtendedDynamicState3ShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ extendedDynamicState3ShadingRateImageEnable = extendedDynamicState3ShadingRateImageEnable_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState3FeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicState3FeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType,
+ pNext,
+ extendedDynamicState3TessellationDomainOrigin,
+ extendedDynamicState3DepthClampEnable,
+ extendedDynamicState3PolygonMode,
+ extendedDynamicState3RasterizationSamples,
+ extendedDynamicState3SampleMask,
+ extendedDynamicState3AlphaToCoverageEnable,
+ extendedDynamicState3AlphaToOneEnable,
+ extendedDynamicState3LogicOpEnable,
+ extendedDynamicState3ColorBlendEnable,
+ extendedDynamicState3ColorBlendEquation,
+ extendedDynamicState3ColorWriteMask,
+ extendedDynamicState3RasterizationStream,
+ extendedDynamicState3ConservativeRasterizationMode,
+ extendedDynamicState3ExtraPrimitiveOverestimationSize,
+ extendedDynamicState3DepthClipEnable,
+ extendedDynamicState3SampleLocationsEnable,
+ extendedDynamicState3ColorBlendAdvanced,
+ extendedDynamicState3ProvokingVertexMode,
+ extendedDynamicState3LineRasterizationMode,
+ extendedDynamicState3LineStippleEnable,
+ extendedDynamicState3DepthClipNegativeOneToOne,
+ extendedDynamicState3ViewportWScalingEnable,
+ extendedDynamicState3ViewportSwizzle,
+ extendedDynamicState3CoverageToColorEnable,
+ extendedDynamicState3CoverageToColorLocation,
+ extendedDynamicState3CoverageModulationMode,
+ extendedDynamicState3CoverageModulationTableEnable,
+ extendedDynamicState3CoverageModulationTable,
+ extendedDynamicState3CoverageReductionMode,
+ extendedDynamicState3RepresentativeFragmentTestEnable,
+ extendedDynamicState3ShadingRateImageEnable );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
+ ( extendedDynamicState3TessellationDomainOrigin == rhs.extendedDynamicState3TessellationDomainOrigin ) &&
+ ( extendedDynamicState3DepthClampEnable == rhs.extendedDynamicState3DepthClampEnable ) &&
+ ( extendedDynamicState3PolygonMode == rhs.extendedDynamicState3PolygonMode ) &&
+ ( extendedDynamicState3RasterizationSamples == rhs.extendedDynamicState3RasterizationSamples ) &&
+ ( extendedDynamicState3SampleMask == rhs.extendedDynamicState3SampleMask ) &&
+ ( extendedDynamicState3AlphaToCoverageEnable == rhs.extendedDynamicState3AlphaToCoverageEnable ) &&
+ ( extendedDynamicState3AlphaToOneEnable == rhs.extendedDynamicState3AlphaToOneEnable ) &&
+ ( extendedDynamicState3LogicOpEnable == rhs.extendedDynamicState3LogicOpEnable ) &&
+ ( extendedDynamicState3ColorBlendEnable == rhs.extendedDynamicState3ColorBlendEnable ) &&
+ ( extendedDynamicState3ColorBlendEquation == rhs.extendedDynamicState3ColorBlendEquation ) &&
+ ( extendedDynamicState3ColorWriteMask == rhs.extendedDynamicState3ColorWriteMask ) &&
+ ( extendedDynamicState3RasterizationStream == rhs.extendedDynamicState3RasterizationStream ) &&
+ ( extendedDynamicState3ConservativeRasterizationMode == rhs.extendedDynamicState3ConservativeRasterizationMode ) &&
+ ( extendedDynamicState3ExtraPrimitiveOverestimationSize == rhs.extendedDynamicState3ExtraPrimitiveOverestimationSize ) &&
+ ( extendedDynamicState3DepthClipEnable == rhs.extendedDynamicState3DepthClipEnable ) &&
+ ( extendedDynamicState3SampleLocationsEnable == rhs.extendedDynamicState3SampleLocationsEnable ) &&
+ ( extendedDynamicState3ColorBlendAdvanced == rhs.extendedDynamicState3ColorBlendAdvanced ) &&
+ ( extendedDynamicState3ProvokingVertexMode == rhs.extendedDynamicState3ProvokingVertexMode ) &&
+ ( extendedDynamicState3LineRasterizationMode == rhs.extendedDynamicState3LineRasterizationMode ) &&
+ ( extendedDynamicState3LineStippleEnable == rhs.extendedDynamicState3LineStippleEnable ) &&
+ ( extendedDynamicState3DepthClipNegativeOneToOne == rhs.extendedDynamicState3DepthClipNegativeOneToOne ) &&
+ ( extendedDynamicState3ViewportWScalingEnable == rhs.extendedDynamicState3ViewportWScalingEnable ) &&
+ ( extendedDynamicState3ViewportSwizzle == rhs.extendedDynamicState3ViewportSwizzle ) &&
+ ( extendedDynamicState3CoverageToColorEnable == rhs.extendedDynamicState3CoverageToColorEnable ) &&
+ ( extendedDynamicState3CoverageToColorLocation == rhs.extendedDynamicState3CoverageToColorLocation ) &&
+ ( extendedDynamicState3CoverageModulationMode == rhs.extendedDynamicState3CoverageModulationMode ) &&
+ ( extendedDynamicState3CoverageModulationTableEnable == rhs.extendedDynamicState3CoverageModulationTableEnable ) &&
+ ( extendedDynamicState3CoverageModulationTable == rhs.extendedDynamicState3CoverageModulationTable ) &&
+ ( extendedDynamicState3CoverageReductionMode == rhs.extendedDynamicState3CoverageReductionMode ) &&
+ ( extendedDynamicState3RepresentativeFragmentTestEnable == rhs.extendedDynamicState3RepresentativeFragmentTestEnable ) &&
+ ( extendedDynamicState3ShadingRateImageEnable == rhs.extendedDynamicState3ShadingRateImageEnable );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT>
+ {
+ using Type = PhysicalDeviceExtendedDynamicState3FeaturesEXT;
+ };
+
+ struct PhysicalDeviceExtendedDynamicState3PropertiesEXT
+ {
+ using NativeType = VkPhysicalDeviceExtendedDynamicState3PropertiesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState3PropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 dynamicPrimitiveTopologyUnrestricted_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , dynamicPrimitiveTopologyUnrestricted( dynamicPrimitiveTopologyUnrestricted_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceExtendedDynamicState3PropertiesEXT( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceExtendedDynamicState3PropertiesEXT( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceExtendedDynamicState3PropertiesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicState3PropertiesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceExtendedDynamicState3PropertiesEXT & operator=( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceExtendedDynamicState3PropertiesEXT & operator=( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3PropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3PropertiesEXT &
+ setDynamicPrimitiveTopologyUnrestricted( VULKAN_HPP_NAMESPACE::Bool32 dynamicPrimitiveTopologyUnrestricted_ ) VULKAN_HPP_NOEXCEPT
+ {
+ dynamicPrimitiveTopologyUnrestricted = dynamicPrimitiveTopologyUnrestricted_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState3PropertiesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicState3PropertiesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, dynamicPrimitiveTopologyUnrestricted );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicPrimitiveTopologyUnrestricted == rhs.dynamicPrimitiveTopologyUnrestricted );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 dynamicPrimitiveTopologyUnrestricted = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT>
+ {
+ using Type = PhysicalDeviceExtendedDynamicState3PropertiesEXT;
+ };
+
struct PhysicalDeviceExtendedDynamicStateFeaturesEXT
{
using NativeType = VkPhysicalDeviceExtendedDynamicStateFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {} ) VULKAN_HPP_NOEXCEPT
- : extendedDynamicState( extendedDynamicState_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , extendedDynamicState( extendedDynamicState_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT(
- PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceExtendedDynamicStateFeaturesEXT( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceExtendedDynamicStateFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceExtendedDynamicStateFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceExtendedDynamicStateFeaturesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicStateFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceExtendedDynamicStateFeaturesEXT &
- operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceExtendedDynamicStateFeaturesEXT &
- operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT &
- setExtendedDynamicState( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ ) VULKAN_HPP_NOEXCEPT
+ setExtendedDynamicState( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ ) VULKAN_HPP_NOEXCEPT
{
extendedDynamicState = extendedDynamicState_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -51477,7 +53743,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState == rhs.extendedDynamicState );
@@ -51491,19 +53757,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT>::value,
- "PhysicalDeviceExtendedDynamicStateFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT>
@@ -51516,29 +53773,30 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceExternalBufferInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalBufferInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalBufferInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo(
- VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {},
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, usage( usage_ )
, handleType( handleType_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExternalBufferInfo( *reinterpret_cast<PhysicalDeviceExternalBufferInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceExternalBufferInfo &
- operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceExternalBufferInfo & operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExternalBufferInfo & operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -51553,39 +53811,37 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo &
- setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo &
- setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceExternalBufferInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExternalBufferInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( this );
}
- explicit operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExternalBufferInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -51606,11 +53862,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) &&
- ( handleType == rhs.handleType );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && ( handleType == rhs.handleType );
# endif
}
@@ -51621,21 +53876,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo ) ==
- sizeof( VkPhysicalDeviceExternalBufferInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo>::value,
- "PhysicalDeviceExternalBufferInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalBufferInfo>
@@ -51653,21 +53899,22 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo(
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
- {}
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , handleType( handleType_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExternalFenceInfo( *reinterpret_cast<PhysicalDeviceExternalFenceInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceExternalFenceInfo &
- operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceExternalFenceInfo & operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExternalFenceInfo & operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -51683,30 +53930,28 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceExternalFenceInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExternalFenceInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( this );
}
- explicit operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExternalFenceInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -51719,7 +53964,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType );
@@ -51733,19 +53978,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo ) ==
- sizeof( VkPhysicalDeviceExternalFenceInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo>::value,
- "PhysicalDeviceExternalFenceInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalFenceInfo>
@@ -51759,30 +53995,28 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceExternalImageFormatInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceExternalImageFormatInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo(
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
- {}
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , handleType( handleType_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceExternalImageFormatInfo(
- *reinterpret_cast<PhysicalDeviceExternalImageFormatInfo const *>( &rhs ) )
- {}
+ : PhysicalDeviceExternalImageFormatInfo( *reinterpret_cast<PhysicalDeviceExternalImageFormatInfo const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceExternalImageFormatInfo &
- operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceExternalImageFormatInfo & operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceExternalImageFormatInfo &
- operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalImageFormatInfo & operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const *>( &rhs );
return *this;
@@ -51796,30 +54030,28 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceExternalImageFormatInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExternalImageFormatInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo *>( this );
}
- explicit operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -51832,7 +54064,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType );
@@ -51846,19 +54078,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo ) ==
- sizeof( VkPhysicalDeviceExternalImageFormatInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo>::value,
- "PhysicalDeviceExternalImageFormatInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalImageFormatInfo>
@@ -51872,46 +54095,44 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceExternalMemoryHostPropertiesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT(
- VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
- : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , minImportedHostPointerAlignment( minImportedHostPointerAlignment_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT(
- PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceExternalMemoryHostPropertiesEXT( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceExternalMemoryHostPropertiesEXT(
- *reinterpret_cast<PhysicalDeviceExternalMemoryHostPropertiesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceExternalMemoryHostPropertiesEXT( *reinterpret_cast<PhysicalDeviceExternalMemoryHostPropertiesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceExternalMemoryHostPropertiesEXT &
- operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceExternalMemoryHostPropertiesEXT &
- operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>( this );
}
- explicit operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -51928,11 +54149,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment );
# endif
}
@@ -51943,19 +54163,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT ) ==
- sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT>::value,
- "PhysicalDeviceExternalMemoryHostPropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT>
@@ -51968,30 +54179,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceExternalMemoryRDMAFeaturesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV(
- VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ = {} ) VULKAN_HPP_NOEXCEPT
- : externalMemoryRDMA( externalMemoryRDMA_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , externalMemoryRDMA( externalMemoryRDMA_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV(
- PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceExternalMemoryRDMAFeaturesNV( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceExternalMemoryRDMAFeaturesNV(
- *reinterpret_cast<PhysicalDeviceExternalMemoryRDMAFeaturesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceExternalMemoryRDMAFeaturesNV( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceExternalMemoryRDMAFeaturesNV( *reinterpret_cast<PhysicalDeviceExternalMemoryRDMAFeaturesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceExternalMemoryRDMAFeaturesNV &
- operator=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceExternalMemoryRDMAFeaturesNV & operator=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceExternalMemoryRDMAFeaturesNV &
- operator=( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalMemoryRDMAFeaturesNV & operator=( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV const *>( &rhs );
return *this;
@@ -52005,24 +54213,24 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV &
- setExternalMemoryRDMA( VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ ) VULKAN_HPP_NOEXCEPT
+ setExternalMemoryRDMA( VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ ) VULKAN_HPP_NOEXCEPT
{
externalMemoryRDMA = externalMemoryRDMA_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryRDMAFeaturesNV *>( this );
}
- explicit operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExternalMemoryRDMAFeaturesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -52039,7 +54247,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryRDMA == rhs.externalMemoryRDMA );
@@ -52057,15 +54265,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV ) ==
- sizeof( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV>::value,
- "PhysicalDeviceExternalMemoryRDMAFeaturesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV>
@@ -52078,29 +54277,28 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceExternalSemaphoreInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceExternalSemaphoreInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo(
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
- {}
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , handleType( handleType_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceExternalSemaphoreInfo( *reinterpret_cast<PhysicalDeviceExternalSemaphoreInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceExternalSemaphoreInfo &
- operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceExternalSemaphoreInfo & operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceExternalSemaphoreInfo &
- operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalSemaphoreInfo & operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const *>( &rhs );
return *this;
@@ -52114,30 +54312,28 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceExternalSemaphoreInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExternalSemaphoreInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( this );
}
- explicit operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -52150,7 +54346,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType );
@@ -52164,19 +54360,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo ) ==
- sizeof( VkPhysicalDeviceExternalSemaphoreInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo>::value,
- "PhysicalDeviceExternalSemaphoreInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalSemaphoreInfo>
@@ -52185,6 +54372,114 @@ namespace VULKAN_HPP_NAMESPACE
};
using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo;
+ struct PhysicalDeviceFaultFeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceFaultFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFaultFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFaultFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 deviceFault_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , deviceFault( deviceFault_ )
+ , deviceFaultVendorBinary( deviceFaultVendorBinary_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFaultFeaturesEXT( PhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceFaultFeaturesEXT( VkPhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceFaultFeaturesEXT( *reinterpret_cast<PhysicalDeviceFaultFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceFaultFeaturesEXT & operator=( PhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceFaultFeaturesEXT & operator=( VkPhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setDeviceFault( VULKAN_HPP_NAMESPACE::Bool32 deviceFault_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceFault = deviceFault_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT &
+ setDeviceFaultVendorBinary( VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceFaultVendorBinary = deviceFaultVendorBinary_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceFaultFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceFaultFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceFaultFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceFaultFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, deviceFault, deviceFaultVendorBinary );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceFaultFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceFaultFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceFault == rhs.deviceFault ) &&
+ ( deviceFaultVendorBinary == rhs.deviceFaultVendorBinary );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceFaultFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFaultFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 deviceFault = {};
+ VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceFaultFeaturesEXT>
+ {
+ using Type = PhysicalDeviceFaultFeaturesEXT;
+ };
+
struct PhysicalDeviceFeatures2
{
using NativeType = VkPhysicalDeviceFeatures2;
@@ -52193,16 +54488,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFeatures2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {} ) VULKAN_HPP_NOEXCEPT
- : features( features_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , features( features_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFeatures2( *reinterpret_cast<PhysicalDeviceFeatures2 const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceFeatures2 & operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -52220,31 +54517,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 &
- setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT
{
features = features_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceFeatures2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFeatures2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFeatures2 *>( this );
}
- explicit operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFeatures2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -52257,7 +54551,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( features == rhs.features );
@@ -52275,13 +54569,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 ) ==
- sizeof( VkPhysicalDeviceFeatures2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>::value,
- "PhysicalDeviceFeatures2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFeatures2>
@@ -52295,31 +54582,30 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceFloatControlsProperties;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceFloatControlsProperties;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFloatControlsProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties(
- VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ =
- VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
- VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ =
- VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
- VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {} ) VULKAN_HPP_NOEXCEPT
- : denormBehaviorIndependence( denormBehaviorIndependence_ )
+ VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
+ VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , denormBehaviorIndependence( denormBehaviorIndependence_ )
, roundingModeIndependence( roundingModeIndependence_ )
, shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ )
, shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ )
@@ -52336,38 +54622,36 @@ namespace VULKAN_HPP_NAMESPACE
, shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ )
, shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ )
, shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( PhysicalDeviceFloatControlsProperties const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceFloatControlsProperties(
- *reinterpret_cast<PhysicalDeviceFloatControlsProperties const *>( &rhs ) )
- {}
+ : PhysicalDeviceFloatControlsProperties( *reinterpret_cast<PhysicalDeviceFloatControlsProperties const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceFloatControlsProperties &
- operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceFloatControlsProperties & operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFloatControlsProperties &
- operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFloatControlsProperties & operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceFloatControlsProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFloatControlsProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFloatControlsProperties *>( this );
}
- explicit operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFloatControlsProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -52420,26 +54704,20 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) &&
( roundingModeIndependence == rhs.roundingModeIndependence ) &&
( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) &&
( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) &&
( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) &&
- ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) &&
- ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) &&
- ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) &&
- ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) &&
+ ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) &&
+ ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) &&
( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) &&
- ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) &&
- ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) &&
- ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) &&
- ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) &&
- ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) &&
- ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) &&
+ ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) &&
+ ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) &&
+ ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) &&
( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 );
# endif
}
@@ -52451,36 +54729,26 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence =
- VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
- VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence =
- VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
- VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties ) ==
- sizeof( VkPhysicalDeviceFloatControlsProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties>::value,
- "PhysicalDeviceFloatControlsProperties is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
+ VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {};
+ };
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFloatControlsProperties>
@@ -52494,30 +54762,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceFragmentDensityMap2FeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ = {} ) VULKAN_HPP_NOEXCEPT
- : fragmentDensityMapDeferred( fragmentDensityMapDeferred_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , fragmentDensityMapDeferred( fragmentDensityMapDeferred_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT(
- PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentDensityMap2FeaturesEXT( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceFragmentDensityMap2FeaturesEXT(
- *reinterpret_cast<PhysicalDeviceFragmentDensityMap2FeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceFragmentDensityMap2FeaturesEXT( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceFragmentDensityMap2FeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMap2FeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceFragmentDensityMap2FeaturesEXT &
- operator=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentDensityMap2FeaturesEXT &
- operator=( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT const *>( &rhs );
return *this;
@@ -52531,24 +54796,24 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT &
- setFragmentDensityMapDeferred( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ ) VULKAN_HPP_NOEXCEPT
+ setFragmentDensityMapDeferred( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ ) VULKAN_HPP_NOEXCEPT
{
fragmentDensityMapDeferred = fragmentDensityMapDeferred_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2FeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -52565,11 +54830,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( fragmentDensityMapDeferred == rhs.fragmentDensityMapDeferred );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapDeferred == rhs.fragmentDensityMapDeferred );
# endif
}
@@ -52580,19 +54844,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT ) ==
- sizeof( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT>::value,
- "PhysicalDeviceFragmentDensityMap2FeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT>
@@ -52605,52 +54860,50 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceFragmentDensityMap2PropertiesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess_ = {},
- uint32_t maxSubsampledArrayLayers_ = {},
- uint32_t maxDescriptorSetSubsampledSamplers_ = {} ) VULKAN_HPP_NOEXCEPT
- : subsampledLoads( subsampledLoads_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess_ = {},
+ uint32_t maxSubsampledArrayLayers_ = {},
+ uint32_t maxDescriptorSetSubsampledSamplers_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , subsampledLoads( subsampledLoads_ )
, subsampledCoarseReconstructionEarlyAccess( subsampledCoarseReconstructionEarlyAccess_ )
, maxSubsampledArrayLayers( maxSubsampledArrayLayers_ )
, maxDescriptorSetSubsampledSamplers( maxDescriptorSetSubsampledSamplers_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT(
- PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceFragmentDensityMap2PropertiesEXT( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentDensityMap2PropertiesEXT( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceFragmentDensityMap2PropertiesEXT(
- *reinterpret_cast<PhysicalDeviceFragmentDensityMap2PropertiesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceFragmentDensityMap2PropertiesEXT( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceFragmentDensityMap2PropertiesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMap2PropertiesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceFragmentDensityMap2PropertiesEXT &
- operator=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentDensityMap2PropertiesEXT &
- operator=( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2PropertiesEXT *>( this );
}
- explicit operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2PropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -52663,12 +54916,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- subsampledLoads,
- subsampledCoarseReconstructionEarlyAccess,
- maxSubsampledArrayLayers,
- maxDescriptorSetSubsampledSamplers );
+ return std::tie( sType, pNext, subsampledLoads, subsampledCoarseReconstructionEarlyAccess, maxSubsampledArrayLayers, maxDescriptorSetSubsampledSamplers );
}
#endif
@@ -52677,13 +54925,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subsampledLoads == rhs.subsampledLoads ) &&
( subsampledCoarseReconstructionEarlyAccess == rhs.subsampledCoarseReconstructionEarlyAccess ) &&
- ( maxSubsampledArrayLayers == rhs.maxSubsampledArrayLayers ) &&
- ( maxDescriptorSetSubsampledSamplers == rhs.maxDescriptorSetSubsampledSamplers );
+ ( maxSubsampledArrayLayers == rhs.maxSubsampledArrayLayers ) && ( maxDescriptorSetSubsampledSamplers == rhs.maxDescriptorSetSubsampledSamplers );
# endif
}
@@ -52694,22 +54941,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads = {};
VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess = {};
uint32_t maxSubsampledArrayLayers = {};
uint32_t maxDescriptorSetSubsampledSamplers = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT ) ==
- sizeof( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT>::value,
- "PhysicalDeviceFragmentDensityMap2PropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT>
@@ -52722,34 +54960,31 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceFragmentDensityMapFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {} ) VULKAN_HPP_NOEXCEPT
- : fragmentDensityMap( fragmentDensityMap_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , fragmentDensityMap( fragmentDensityMap_ )
, fragmentDensityMapDynamic( fragmentDensityMapDynamic_ )
, fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT(
- PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceFragmentDensityMapFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceFragmentDensityMapFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceFragmentDensityMapFeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMapFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceFragmentDensityMapFeaturesEXT &
- operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentDensityMapFeaturesEXT &
- operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const *>( &rhs );
return *this;
@@ -52763,38 +54998,38 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT &
- setFragmentDensityMap( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ ) VULKAN_HPP_NOEXCEPT
+ setFragmentDensityMap( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ ) VULKAN_HPP_NOEXCEPT
{
fragmentDensityMap = fragmentDensityMap_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT &
- setFragmentDensityMapDynamic( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ ) VULKAN_HPP_NOEXCEPT
+ setFragmentDensityMapDynamic( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ ) VULKAN_HPP_NOEXCEPT
{
fragmentDensityMapDynamic = fragmentDensityMapDynamic_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapNonSubsampledImages(
- VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT &
+ setFragmentDensityMapNonSubsampledImages( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ ) VULKAN_HPP_NOEXCEPT
{
fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -52806,8 +55041,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, fragmentDensityMap, fragmentDensityMapDynamic, fragmentDensityMapNonSubsampledImages );
+ return std::tie( sType, pNext, fragmentDensityMap, fragmentDensityMapDynamic, fragmentDensityMapNonSubsampledImages );
}
#endif
@@ -52816,7 +55050,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMap == rhs.fragmentDensityMap ) &&
@@ -52832,21 +55066,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT>::value,
- "PhysicalDeviceFragmentDensityMapFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT>
@@ -52859,63 +55084,60 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM(
- VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ = {} ) VULKAN_HPP_NOEXCEPT
- : fragmentDensityMapOffset( fragmentDensityMapOffset_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , fragmentDensityMapOffset( fragmentDensityMapOffset_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM(
- PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM(
- VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM(
- *reinterpret_cast<PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const *>( &rhs ) )
- {}
+ PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( *reinterpret_cast<PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM &
operator=( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM &
- operator=( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & operator=( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM &
- setFragmentDensityMapOffset( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ ) VULKAN_HPP_NOEXCEPT
+ setFragmentDensityMapOffset( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ ) VULKAN_HPP_NOEXCEPT
{
fragmentDensityMapOffset = fragmentDensityMapOffset_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM *>( this );
}
- explicit operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -52932,11 +55154,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( fragmentDensityMapOffset == rhs.fragmentDensityMapOffset );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapOffset == rhs.fragmentDensityMapOffset );
# endif
}
@@ -52947,19 +55168,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM ) ==
- sizeof( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM>::value,
- "PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM>
@@ -52972,47 +55184,45 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM(
- VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity_ = {} ) VULKAN_HPP_NOEXCEPT
- : fragmentDensityOffsetGranularity( fragmentDensityOffsetGranularity_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , fragmentDensityOffsetGranularity( fragmentDensityOffsetGranularity_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM(
- PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM(
- VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM(
- *reinterpret_cast<PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const *>( &rhs ) )
- {}
+ PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( *reinterpret_cast<PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM &
operator=( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM &
- operator=( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM & operator=( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM *>( this );
}
- explicit operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -53029,11 +55239,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( fragmentDensityOffsetGranularity == rhs.fragmentDensityOffsetGranularity );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityOffsetGranularity == rhs.fragmentDensityOffsetGranularity );
# endif
}
@@ -53044,19 +55253,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM ) ==
- sizeof( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<
- VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM>::value,
- "PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM>
@@ -53069,50 +55269,48 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceFragmentDensityMapPropertiesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT(
- VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {} ) VULKAN_HPP_NOEXCEPT
- : minFragmentDensityTexelSize( minFragmentDensityTexelSize_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , minFragmentDensityTexelSize( minFragmentDensityTexelSize_ )
, maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ )
, fragmentDensityInvocations( fragmentDensityInvocations_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT(
- PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceFragmentDensityMapPropertiesEXT( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceFragmentDensityMapPropertiesEXT(
- *reinterpret_cast<PhysicalDeviceFragmentDensityMapPropertiesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceFragmentDensityMapPropertiesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMapPropertiesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceFragmentDensityMapPropertiesEXT &
- operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentDensityMapPropertiesEXT &
- operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapPropertiesEXT *>( this );
}
- explicit operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapPropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -53124,8 +55322,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, minFragmentDensityTexelSize, maxFragmentDensityTexelSize, fragmentDensityInvocations );
+ return std::tie( sType, pNext, minFragmentDensityTexelSize, maxFragmentDensityTexelSize, fragmentDensityInvocations );
}
#endif
@@ -53134,13 +55331,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize ) &&
- ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize ) &&
- ( fragmentDensityInvocations == rhs.fragmentDensityInvocations );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize ) &&
+ ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize ) && ( fragmentDensityInvocations == rhs.fragmentDensityInvocations );
# endif
}
@@ -53151,21 +55346,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {};
VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT ) ==
- sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT>::value,
- "PhysicalDeviceFragmentDensityMapPropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT>
@@ -53173,68 +55359,65 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT;
};
- struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV
+ struct PhysicalDeviceFragmentShaderBarycentricFeaturesKHR
{
- using NativeType = VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV;
+ using NativeType = VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV(
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {} ) VULKAN_HPP_NOEXCEPT
- : fragmentShaderBarycentric( fragmentShaderBarycentric_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , fragmentShaderBarycentric( fragmentShaderBarycentric_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV(
- PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceFragmentShaderBarycentricFeaturesNV(
- *reinterpret_cast<PhysicalDeviceFragmentShaderBarycentricFeaturesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( *reinterpret_cast<PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceFragmentShaderBarycentricFeaturesNV &
- operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceFragmentShaderBarycentricFeaturesKHR &
+ operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentShaderBarycentricFeaturesNV &
- operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesNV &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesNV &
- setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR &
+ setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT
{
fragmentShaderBarycentric = fragmentShaderBarycentric_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV *>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR *>( this );
}
- explicit operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV *>( this );
+ return *reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -53247,43 +55430,121 @@ namespace VULKAN_HPP_NAMESPACE
#endif
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & ) const = default;
+ auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & ) const = default;
#else
- bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric );
# endif
}
- bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) ==
- sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV>::value,
- "PhysicalDeviceFragmentShaderBarycentricFeaturesNV is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR>
+ {
+ using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
+ };
+ using PhysicalDeviceFragmentShaderBarycentricFeaturesNV = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
+
+ struct PhysicalDeviceFragmentShaderBarycentricPropertiesKHR
+ {
+ using NativeType = VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 triStripVertexOrderIndependentOfProvokingVertex_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , triStripVertexOrderIndependentOfProvokingVertex( triStripVertexOrderIndependentOfProvokingVertex_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( *reinterpret_cast<PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceFragmentShaderBarycentricPropertiesKHR &
+ operator=( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceFragmentShaderBarycentricPropertiesKHR & operator=( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const *>( &rhs );
+ return *this;
+ }
+
+ operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR *>( this );
+ }
+
+ operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, triStripVertexOrderIndependentOfProvokingVertex );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
+ ( triStripVertexOrderIndependentOfProvokingVertex == rhs.triStripVertexOrderIndependentOfProvokingVertex );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 triStripVertexOrderIndependentOfProvokingVertex = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR>
{
- using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesNV;
+ using Type = PhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
};
struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT
@@ -53291,80 +55552,77 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {} ) VULKAN_HPP_NOEXCEPT
- : fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ )
, fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ )
, fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT(
- PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceFragmentShaderInterlockFeaturesEXT( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceFragmentShaderInterlockFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceFragmentShaderInterlockFeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceFragmentShaderInterlockFeaturesEXT &
- operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentShaderInterlockFeaturesEXT &
- operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock(
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT &
+ setFragmentShaderSampleInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT
{
fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT &
- setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT
+ setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT
{
fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock(
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT &
+ setFragmentShaderShadingRateInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT
{
fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -53376,8 +55634,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, fragmentShaderSampleInterlock, fragmentShaderPixelInterlock, fragmentShaderShadingRateInterlock );
+ return std::tie( sType, pNext, fragmentShaderSampleInterlock, fragmentShaderPixelInterlock, fragmentShaderShadingRateInterlock );
}
#endif
@@ -53386,11 +55643,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock ) &&
( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock ) &&
( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock );
# endif
@@ -53403,21 +55659,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT>::value,
- "PhysicalDeviceFragmentShaderInterlockFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT>
@@ -53430,80 +55677,77 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV(
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ = {} ) VULKAN_HPP_NOEXCEPT
- : fragmentShadingRateEnums( fragmentShadingRateEnums_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , fragmentShadingRateEnums( fragmentShadingRateEnums_ )
, supersampleFragmentShadingRates( supersampleFragmentShadingRates_ )
, noInvocationFragmentShadingRates( noInvocationFragmentShadingRates_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV(
- PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceFragmentShadingRateEnumsFeaturesNV(
- *reinterpret_cast<PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( *reinterpret_cast<PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceFragmentShadingRateEnumsFeaturesNV &
- operator=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentShadingRateEnumsFeaturesNV &
- operator=( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV &
- setFragmentShadingRateEnums( VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ ) VULKAN_HPP_NOEXCEPT
+ setFragmentShadingRateEnums( VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ ) VULKAN_HPP_NOEXCEPT
{
fragmentShadingRateEnums = fragmentShadingRateEnums_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setSupersampleFragmentShadingRates(
- VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV &
+ setSupersampleFragmentShadingRates( VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT
{
supersampleFragmentShadingRates = supersampleFragmentShadingRates_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setNoInvocationFragmentShadingRates(
- VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV &
+ setNoInvocationFragmentShadingRates( VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT
{
noInvocationFragmentShadingRates = noInvocationFragmentShadingRates_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV *>( this );
}
- explicit operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -53515,8 +55759,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, fragmentShadingRateEnums, supersampleFragmentShadingRates, noInvocationFragmentShadingRates );
+ return std::tie( sType, pNext, fragmentShadingRateEnums, supersampleFragmentShadingRates, noInvocationFragmentShadingRates );
}
#endif
@@ -53525,11 +55768,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( fragmentShadingRateEnums == rhs.fragmentShadingRateEnums ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShadingRateEnums == rhs.fragmentShadingRateEnums ) &&
( supersampleFragmentShadingRates == rhs.supersampleFragmentShadingRates ) &&
( noInvocationFragmentShadingRates == rhs.noInvocationFragmentShadingRates );
# endif
@@ -53542,21 +55784,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums = {};
VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates = {};
VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV ) ==
- sizeof( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV>::value,
- "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV>
@@ -53569,71 +55802,65 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV(
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ =
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1 ) VULKAN_HPP_NOEXCEPT
- : maxFragmentShadingRateInvocationCount( maxFragmentShadingRateInvocationCount_ )
- {}
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxFragmentShadingRateInvocationCount( maxFragmentShadingRateInvocationCount_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV(
- PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentShadingRateEnumsPropertiesNV(
- VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceFragmentShadingRateEnumsPropertiesNV(
- *reinterpret_cast<PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( *reinterpret_cast<PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceFragmentShadingRateEnumsPropertiesNV &
operator=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentShadingRateEnumsPropertiesNV &
- operator=( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV &
- setMaxFragmentShadingRateInvocationCount(
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ ) VULKAN_HPP_NOEXCEPT
+ setMaxFragmentShadingRateInvocationCount( VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ ) VULKAN_HPP_NOEXCEPT
{
maxFragmentShadingRateInvocationCount = maxFragmentShadingRateInvocationCount_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV *>( this );
}
- explicit operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -53646,11 +55873,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( maxFragmentShadingRateInvocationCount == rhs.maxFragmentShadingRateInvocationCount );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxFragmentShadingRateInvocationCount == rhs.maxFragmentShadingRateInvocationCount );
# endif
}
@@ -53661,20 +55887,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount =
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV ) ==
- sizeof( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV>::value,
- "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV>
@@ -53687,34 +55903,31 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceFragmentShadingRateFeaturesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR(
- VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ = {} ) VULKAN_HPP_NOEXCEPT
- : pipelineFragmentShadingRate( pipelineFragmentShadingRate_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pipelineFragmentShadingRate( pipelineFragmentShadingRate_ )
, primitiveFragmentShadingRate( primitiveFragmentShadingRate_ )
, attachmentFragmentShadingRate( attachmentFragmentShadingRate_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR(
- PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentShadingRateFeaturesKHR( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceFragmentShadingRateFeaturesKHR(
- *reinterpret_cast<PhysicalDeviceFragmentShadingRateFeaturesKHR const *>( &rhs ) )
- {}
+ PhysicalDeviceFragmentShadingRateFeaturesKHR( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceFragmentShadingRateFeaturesKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRateFeaturesKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceFragmentShadingRateFeaturesKHR &
- operator=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentShadingRateFeaturesKHR &
- operator=( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR const *>( &rhs );
return *this;
@@ -53728,38 +55941,38 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR &
- setPipelineFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
+ setPipelineFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
{
pipelineFragmentShadingRate = pipelineFragmentShadingRate_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR &
- setPrimitiveFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
+ setPrimitiveFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
{
primitiveFragmentShadingRate = primitiveFragmentShadingRate_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setAttachmentFragmentShadingRate(
- VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR &
+ setAttachmentFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
{
attachmentFragmentShadingRate = attachmentFragmentShadingRate_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateFeaturesKHR *>( this );
}
- explicit operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateFeaturesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -53771,8 +55984,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, pipelineFragmentShadingRate, primitiveFragmentShadingRate, attachmentFragmentShadingRate );
+ return std::tie( sType, pNext, pipelineFragmentShadingRate, primitiveFragmentShadingRate, attachmentFragmentShadingRate );
}
#endif
@@ -53781,13 +55993,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( pipelineFragmentShadingRate == rhs.pipelineFragmentShadingRate ) &&
- ( primitiveFragmentShadingRate == rhs.primitiveFragmentShadingRate ) &&
- ( attachmentFragmentShadingRate == rhs.attachmentFragmentShadingRate );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineFragmentShadingRate == rhs.pipelineFragmentShadingRate ) &&
+ ( primitiveFragmentShadingRate == rhs.primitiveFragmentShadingRate ) && ( attachmentFragmentShadingRate == rhs.attachmentFragmentShadingRate );
# endif
}
@@ -53798,21 +56008,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate = {};
VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate = {};
VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR ) ==
- sizeof( VkPhysicalDeviceFragmentShadingRateFeaturesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR>::value,
- "PhysicalDeviceFragmentShadingRateFeaturesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR>
@@ -53825,53 +56026,50 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceFragmentShadingRateKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceFragmentShadingRateKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceFragmentShadingRateKHR( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : sampleCounts( sampleCounts_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , sampleCounts( sampleCounts_ )
, fragmentSize( fragmentSize_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( PhysicalDeviceFragmentShadingRateKHR const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceFragmentShadingRateKHR( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceFragmentShadingRateKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRateKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceFragmentShadingRateKHR &
- operator=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceFragmentShadingRateKHR & operator=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentShadingRateKHR &
- operator=( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentShadingRateKHR & operator=( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceFragmentShadingRateKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentShadingRateKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateKHR *>( this );
}
- explicit operator VkPhysicalDeviceFragmentShadingRateKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentShadingRateKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::SampleCountFlags const &,
- VULKAN_HPP_NAMESPACE::Extent2D const &>
+ std::
+ tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -53884,11 +56082,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleCounts == rhs.sampleCounts ) &&
- ( fragmentSize == rhs.fragmentSize );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleCounts == rhs.sampleCounts ) && ( fragmentSize == rhs.fragmentSize );
# endif
}
@@ -53904,14 +56101,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {};
VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR ) ==
- sizeof( VkPhysicalDeviceFragmentShadingRateKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>::value,
- "PhysicalDeviceFragmentShadingRateKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateKHR>
@@ -53924,8 +56113,7 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceFragmentShadingRatePropertiesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR(
@@ -53938,16 +56126,17 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize_ = {},
uint32_t maxFragmentSizeAspectRatio_ = {},
uint32_t maxFragmentShadingRateCoverageSamples_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples_ =
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner_ = {} ) VULKAN_HPP_NOEXCEPT
- : minFragmentShadingRateAttachmentTexelSize( minFragmentShadingRateAttachmentTexelSize_ )
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , minFragmentShadingRateAttachmentTexelSize( minFragmentShadingRateAttachmentTexelSize_ )
, maxFragmentShadingRateAttachmentTexelSize( maxFragmentShadingRateAttachmentTexelSize_ )
, maxFragmentShadingRateAttachmentTexelSizeAspectRatio( maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ )
, primitiveFragmentShadingRateWithMultipleViewports( primitiveFragmentShadingRateWithMultipleViewports_ )
@@ -53964,39 +56153,37 @@ namespace VULKAN_HPP_NAMESPACE
, fragmentShadingRateWithFragmentShaderInterlock( fragmentShadingRateWithFragmentShaderInterlock_ )
, fragmentShadingRateWithCustomSampleLocations( fragmentShadingRateWithCustomSampleLocations_ )
, fragmentShadingRateStrictMultiplyCombiner( fragmentShadingRateStrictMultiplyCombiner_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR(
- PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceFragmentShadingRatePropertiesKHR( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentShadingRatePropertiesKHR( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceFragmentShadingRatePropertiesKHR(
- *reinterpret_cast<PhysicalDeviceFragmentShadingRatePropertiesKHR const *>( &rhs ) )
- {}
+ PhysicalDeviceFragmentShadingRatePropertiesKHR( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceFragmentShadingRatePropertiesKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRatePropertiesKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceFragmentShadingRatePropertiesKHR &
- operator=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceFragmentShadingRatePropertiesKHR &
- operator=( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRatePropertiesKHR *>( this );
}
- explicit operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRatePropertiesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -54049,28 +56236,23 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
( minFragmentShadingRateAttachmentTexelSize == rhs.minFragmentShadingRateAttachmentTexelSize ) &&
( maxFragmentShadingRateAttachmentTexelSize == rhs.maxFragmentShadingRateAttachmentTexelSize ) &&
- ( maxFragmentShadingRateAttachmentTexelSizeAspectRatio ==
- rhs.maxFragmentShadingRateAttachmentTexelSizeAspectRatio ) &&
- ( primitiveFragmentShadingRateWithMultipleViewports ==
- rhs.primitiveFragmentShadingRateWithMultipleViewports ) &&
+ ( maxFragmentShadingRateAttachmentTexelSizeAspectRatio == rhs.maxFragmentShadingRateAttachmentTexelSizeAspectRatio ) &&
+ ( primitiveFragmentShadingRateWithMultipleViewports == rhs.primitiveFragmentShadingRateWithMultipleViewports ) &&
( layeredShadingRateAttachments == rhs.layeredShadingRateAttachments ) &&
- ( fragmentShadingRateNonTrivialCombinerOps == rhs.fragmentShadingRateNonTrivialCombinerOps ) &&
- ( maxFragmentSize == rhs.maxFragmentSize ) &&
+ ( fragmentShadingRateNonTrivialCombinerOps == rhs.fragmentShadingRateNonTrivialCombinerOps ) && ( maxFragmentSize == rhs.maxFragmentSize ) &&
( maxFragmentSizeAspectRatio == rhs.maxFragmentSizeAspectRatio ) &&
( maxFragmentShadingRateCoverageSamples == rhs.maxFragmentShadingRateCoverageSamples ) &&
( maxFragmentShadingRateRasterizationSamples == rhs.maxFragmentShadingRateRasterizationSamples ) &&
- ( fragmentShadingRateWithShaderDepthStencilWrites ==
- rhs.fragmentShadingRateWithShaderDepthStencilWrites ) &&
+ ( fragmentShadingRateWithShaderDepthStencilWrites == rhs.fragmentShadingRateWithShaderDepthStencilWrites ) &&
( fragmentShadingRateWithSampleMask == rhs.fragmentShadingRateWithSampleMask ) &&
( fragmentShadingRateWithShaderSampleMask == rhs.fragmentShadingRateWithShaderSampleMask ) &&
- ( fragmentShadingRateWithConservativeRasterization ==
- rhs.fragmentShadingRateWithConservativeRasterization ) &&
+ ( fragmentShadingRateWithConservativeRasterization == rhs.fragmentShadingRateWithConservativeRasterization ) &&
( fragmentShadingRateWithFragmentShaderInterlock == rhs.fragmentShadingRateWithFragmentShaderInterlock ) &&
( fragmentShadingRateWithCustomSampleLocations == rhs.fragmentShadingRateWithCustomSampleLocations ) &&
( fragmentShadingRateStrictMultiplyCombiner == rhs.fragmentShadingRateStrictMultiplyCombiner );
@@ -54084,10 +56266,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize = {};
- VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize = {};
+ VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize = {};
uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio = {};
VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports = {};
VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments = {};
@@ -54095,25 +56277,15 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize = {};
uint32_t maxFragmentSizeAspectRatio = {};
uint32_t maxFragmentShadingRateCoverageSamples = {};
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples =
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites = {};
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask = {};
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask = {};
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization = {};
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock = {};
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations = {};
- VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR ) ==
- sizeof( VkPhysicalDeviceFragmentShadingRatePropertiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR>::value,
- "PhysicalDeviceFragmentShadingRatePropertiesKHR is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites = {};
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask = {};
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask = {};
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization = {};
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock = {};
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations = {};
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner = {};
+ };
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR>
@@ -54126,30 +56298,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesKHR(
- VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {} ) VULKAN_HPP_NOEXCEPT
- : globalPriorityQuery( globalPriorityQuery_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , globalPriorityQuery( globalPriorityQuery_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesKHR(
- PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesKHR( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceGlobalPriorityQueryFeaturesKHR( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceGlobalPriorityQueryFeaturesKHR(
- *reinterpret_cast<PhysicalDeviceGlobalPriorityQueryFeaturesKHR const *>( &rhs ) )
- {}
+ PhysicalDeviceGlobalPriorityQueryFeaturesKHR( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceGlobalPriorityQueryFeaturesKHR( *reinterpret_cast<PhysicalDeviceGlobalPriorityQueryFeaturesKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceGlobalPriorityQueryFeaturesKHR &
- operator=( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceGlobalPriorityQueryFeaturesKHR & operator=( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceGlobalPriorityQueryFeaturesKHR &
- operator=( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceGlobalPriorityQueryFeaturesKHR & operator=( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR const *>( &rhs );
return *this;
@@ -54163,24 +56332,24 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeaturesKHR &
- setGlobalPriorityQuery( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ ) VULKAN_HPP_NOEXCEPT
+ setGlobalPriorityQuery( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ ) VULKAN_HPP_NOEXCEPT
{
globalPriorityQuery = globalPriorityQuery_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR *>( this );
}
- explicit operator VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -54197,7 +56366,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriorityQuery == rhs.globalPriorityQuery );
@@ -54211,19 +56380,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR ) ==
- sizeof( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR>::value,
- "PhysicalDeviceGlobalPriorityQueryFeaturesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR>
@@ -54232,6 +56392,217 @@ namespace VULKAN_HPP_NAMESPACE
};
using PhysicalDeviceGlobalPriorityQueryFeaturesEXT = PhysicalDeviceGlobalPriorityQueryFeaturesKHR;
+ struct PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , graphicsPipelineLibrary( graphicsPipelineLibrary_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( *reinterpret_cast<PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & operator=( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & operator=( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT &
+ setGraphicsPipelineLibrary( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary_ ) VULKAN_HPP_NOEXCEPT
+ {
+ graphicsPipelineLibrary = graphicsPipelineLibrary_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, graphicsPipelineLibrary );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( graphicsPipelineLibrary == rhs.graphicsPipelineLibrary );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT>
+ {
+ using Type = PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
+ };
+
+ struct PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT
+ {
+ using NativeType = VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , graphicsPipelineLibraryFastLinking( graphicsPipelineLibraryFastLinking_ )
+ , graphicsPipelineLibraryIndependentInterpolationDecoration( graphicsPipelineLibraryIndependentInterpolationDecoration_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( *reinterpret_cast<PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT &
+ operator=( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & operator=( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT &
+ setGraphicsPipelineLibraryFastLinking( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking_ ) VULKAN_HPP_NOEXCEPT
+ {
+ graphicsPipelineLibraryFastLinking = graphicsPipelineLibraryFastLinking_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & setGraphicsPipelineLibraryIndependentInterpolationDecoration(
+ VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration_ ) VULKAN_HPP_NOEXCEPT
+ {
+ graphicsPipelineLibraryIndependentInterpolationDecoration = graphicsPipelineLibraryIndependentInterpolationDecoration_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, graphicsPipelineLibraryFastLinking, graphicsPipelineLibraryIndependentInterpolationDecoration );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( graphicsPipelineLibraryFastLinking == rhs.graphicsPipelineLibraryFastLinking ) &&
+ ( graphicsPipelineLibraryIndependentInterpolationDecoration == rhs.graphicsPipelineLibraryIndependentInterpolationDecoration );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking = {};
+ VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT>
+ {
+ using Type = PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
+ };
+
struct PhysicalDeviceGroupProperties
{
using NativeType = VkPhysicalDeviceGroupProperties;
@@ -54240,25 +56611,27 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGroupProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties(
- uint32_t physicalDeviceCount_ = {},
- std::array<VULKAN_HPP_NAMESPACE::PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> const & physicalDevices_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {} ) VULKAN_HPP_NOEXCEPT
- : physicalDeviceCount( physicalDeviceCount_ )
+ VULKAN_HPP_CONSTEXPR_14
+ PhysicalDeviceGroupProperties( uint32_t physicalDeviceCount_ = {},
+ std::array<VULKAN_HPP_NAMESPACE::PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> const & physicalDevices_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , physicalDeviceCount( physicalDeviceCount_ )
, physicalDevices( physicalDevices_ )
, subsetAllocation( subsetAllocation_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceGroupProperties( *reinterpret_cast<PhysicalDeviceGroupProperties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceGroupProperties &
- operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceGroupProperties & operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -54266,26 +56639,25 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPhysicalDeviceGroupProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceGroupProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceGroupProperties *>( this );
}
- explicit operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceGroupProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<
- VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- uint32_t const &,
- VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ uint32_t const &,
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -54298,7 +56670,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) &&
@@ -54313,21 +56685,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties;
- void * pNext = {};
- uint32_t physicalDeviceCount = {};
- VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE>
- physicalDevices = {};
- VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties;
+ void * pNext = {};
+ uint32_t physicalDeviceCount = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> physicalDevices = {};
+ VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties ) ==
- sizeof( VkPhysicalDeviceGroupProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value,
- "PhysicalDeviceGroupProperties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceGroupProperties>
@@ -54341,28 +56704,26 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceHostQueryResetFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceHostQueryResetFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostQueryResetFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceHostQueryResetFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {} ) VULKAN_HPP_NOEXCEPT
- : hostQueryReset( hostQueryReset_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , hostQueryReset( hostQueryReset_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( PhysicalDeviceHostQueryResetFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceHostQueryResetFeatures( *reinterpret_cast<PhysicalDeviceHostQueryResetFeatures const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceHostQueryResetFeatures &
- operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceHostQueryResetFeatures & operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceHostQueryResetFeatures &
- operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceHostQueryResetFeatures & operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const *>( &rhs );
return *this;
@@ -54375,25 +56736,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures &
- setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
{
hostQueryReset = hostQueryReset_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceHostQueryResetFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceHostQueryResetFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeatures *>( this );
}
- explicit operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceHostQueryResetFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -54410,7 +56770,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hostQueryReset == rhs.hostQueryReset );
@@ -54428,14 +56788,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures ) ==
- sizeof( VkPhysicalDeviceHostQueryResetFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures>::value,
- "PhysicalDeviceHostQueryResetFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceHostQueryResetFeatures>
@@ -54452,25 +56804,27 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIdProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14
- PhysicalDeviceIDProperties( std::array<uint8_t, VK_UUID_SIZE> const & deviceUUID_ = {},
- std::array<uint8_t, VK_UUID_SIZE> const & driverUUID_ = {},
- std::array<uint8_t, VK_LUID_SIZE> const & deviceLUID_ = {},
- uint32_t deviceNodeMask_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {} ) VULKAN_HPP_NOEXCEPT
- : deviceUUID( deviceUUID_ )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( std::array<uint8_t, VK_UUID_SIZE> const & deviceUUID_ = {},
+ std::array<uint8_t, VK_UUID_SIZE> const & driverUUID_ = {},
+ std::array<uint8_t, VK_LUID_SIZE> const & deviceLUID_ = {},
+ uint32_t deviceNodeMask_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , deviceUUID( deviceUUID_ )
, driverUUID( driverUUID_ )
, deviceLUID( deviceLUID_ )
, deviceNodeMask( deviceNodeMask_ )
, deviceLUIDValid( deviceLUIDValid_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceIDProperties( *reinterpret_cast<PhysicalDeviceIDProperties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceIDProperties & operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -54481,17 +56835,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPhysicalDeviceIDProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceIDProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceIDProperties *>( this );
}
- explicit operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceIDProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -54514,12 +56868,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) &&
- ( driverUUID == rhs.driverUUID ) && ( deviceLUID == rhs.deviceLUID ) &&
- ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && ( driverUUID == rhs.driverUUID ) &&
+ ( deviceLUID == rhs.deviceLUID ) && ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid );
# endif
}
@@ -54530,21 +56883,14 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID = {};
- VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID = {};
- VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID = {};
uint32_t deviceNodeMask = {};
VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties ) ==
- sizeof( VkPhysicalDeviceIDProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties>::value,
- "PhysicalDeviceIDProperties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceIdProperties>
@@ -54553,97 +56899,400 @@ namespace VULKAN_HPP_NAMESPACE
};
using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties;
+ struct PhysicalDeviceImage2DViewOf3DFeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceImage2DViewOf3DFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImage2DViewOf3DFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , image2DViewOf3D( image2DViewOf3D_ )
+ , sampler2DViewOf3D( sampler2DViewOf3D_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImage2DViewOf3DFeaturesEXT( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceImage2DViewOf3DFeaturesEXT( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceImage2DViewOf3DFeaturesEXT( *reinterpret_cast<PhysicalDeviceImage2DViewOf3DFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceImage2DViewOf3DFeaturesEXT & operator=( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceImage2DViewOf3DFeaturesEXT & operator=( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setImage2DViewOf3D( VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D_ ) VULKAN_HPP_NOEXCEPT
+ {
+ image2DViewOf3D = image2DViewOf3D_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT &
+ setSampler2DViewOf3D( VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D_ ) VULKAN_HPP_NOEXCEPT
+ {
+ sampler2DViewOf3D = sampler2DViewOf3D_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceImage2DViewOf3DFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceImage2DViewOf3DFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, image2DViewOf3D, sampler2DViewOf3D );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image2DViewOf3D == rhs.image2DViewOf3D ) && ( sampler2DViewOf3D == rhs.sampler2DViewOf3D );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D = {};
+ VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT>
+ {
+ using Type = PhysicalDeviceImage2DViewOf3DFeaturesEXT;
+ };
+
+ struct PhysicalDeviceImageCompressionControlFeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceImageCompressionControlFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , imageCompressionControl( imageCompressionControl_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceImageCompressionControlFeaturesEXT( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceImageCompressionControlFeaturesEXT( VkPhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceImageCompressionControlFeaturesEXT( *reinterpret_cast<PhysicalDeviceImageCompressionControlFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceImageCompressionControlFeaturesEXT & operator=( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceImageCompressionControlFeaturesEXT & operator=( VkPhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlFeaturesEXT &
+ setImageCompressionControl( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageCompressionControl = imageCompressionControl_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceImageCompressionControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceImageCompressionControlFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceImageCompressionControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceImageCompressionControlFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, imageCompressionControl );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceImageCompressionControlFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionControl == rhs.imageCompressionControl );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT>
+ {
+ using Type = PhysicalDeviceImageCompressionControlFeaturesEXT;
+ };
+
+ struct PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , imageCompressionControlSwapchain( imageCompressionControlSwapchain_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs )
+ VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT(
+ *reinterpret_cast<PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT &
+ operator=( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT &
+ operator=( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT &
+ setImageCompressionControlSwapchain( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imageCompressionControlSwapchain = imageCompressionControlSwapchain_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, imageCompressionControlSwapchain );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionControlSwapchain == rhs.imageCompressionControlSwapchain );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT>
+ {
+ using Type = PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
+ };
+
struct PhysicalDeviceImageDrmFormatModifierInfoEXT
{
using NativeType = VkPhysicalDeviceImageDrmFormatModifierInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT(
- uint64_t drmFormatModifier_ = {},
- VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
- uint32_t queueFamilyIndexCount_ = {},
- const uint32_t * pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT
- : drmFormatModifier( drmFormatModifier_ )
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = {},
+ VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
+ uint32_t queueFamilyIndexCount_ = {},
+ const uint32_t * pQueueFamilyIndices_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , drmFormatModifier( drmFormatModifier_ )
, sharingMode( sharingMode_ )
, queueFamilyIndexCount( queueFamilyIndexCount_ )
, pQueueFamilyIndices( pQueueFamilyIndices_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT(
- PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceImageDrmFormatModifierInfoEXT(
- *reinterpret_cast<PhysicalDeviceImageDrmFormatModifierInfoEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceImageDrmFormatModifierInfoEXT( *reinterpret_cast<PhysicalDeviceImageDrmFormatModifierInfoEXT const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PhysicalDeviceImageDrmFormatModifierInfoEXT(
- uint64_t drmFormatModifier_,
- VULKAN_HPP_NAMESPACE::SharingMode sharingMode_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ )
- : drmFormatModifier( drmFormatModifier_ )
+ PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_,
+ VULKAN_HPP_NAMESPACE::SharingMode sharingMode_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , drmFormatModifier( drmFormatModifier_ )
, sharingMode( sharingMode_ )
, queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) )
, pQueueFamilyIndices( queueFamilyIndices_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceImageDrmFormatModifierInfoEXT &
- operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceImageDrmFormatModifierInfoEXT &
- operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT &
- setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
{
drmFormatModifier = drmFormatModifier_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT &
- setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
{
sharingMode = sharingMode_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT &
- setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndexCount = queueFamilyIndexCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT &
- setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
{
pQueueFamilyIndices = pQueueFamilyIndices_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndices(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageDrmFormatModifierInfoEXT &
+ setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
pQueueFamilyIndices = queueFamilyIndices_.data();
@@ -54652,17 +57301,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageDrmFormatModifierInfoEXT *>( this );
}
- explicit operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -54684,12 +57333,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) &&
- ( sharingMode == rhs.sharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) &&
- ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && ( sharingMode == rhs.sharingMode ) &&
+ ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
# endif
}
@@ -54700,22 +57348,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
- const void * pNext = {};
- uint64_t drmFormatModifier = {};
- VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
+ const void * pNext = {};
+ uint64_t drmFormatModifier = {};
+ VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
uint32_t queueFamilyIndexCount = {};
const uint32_t * pQueueFamilyIndices = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT ) ==
- sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT>::value,
- "PhysicalDeviceImageDrmFormatModifierInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT>
@@ -54731,29 +57370,30 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageFormatInfo2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2(
- VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D,
- VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
- VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : format( format_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , format( format_ )
, type( type_ )
, tiling( tiling_ )
, usage( usage_ )
, flags( flags_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceImageFormatInfo2( *reinterpret_cast<PhysicalDeviceImageFormatInfo2 const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceImageFormatInfo2 &
- operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceImageFormatInfo2 & operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImageFormatInfo2 & operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -54768,53 +57408,48 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 &
- setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 &
- setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 &
- setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
{
tiling = tiling_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 &
- setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 &
- setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( this );
}
- explicit operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImageFormatInfo2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -54837,11 +57472,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) &&
- ( tiling == rhs.tiling ) && ( usage == rhs.usage ) && ( flags == rhs.flags );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && ( tiling == rhs.tiling ) &&
+ ( usage == rhs.usage ) && ( flags == rhs.flags );
# endif
}
@@ -54860,14 +57495,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 ) ==
- sizeof( VkPhysicalDeviceImageFormatInfo2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2>::value,
- "PhysicalDeviceImageFormatInfo2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImageFormatInfo2>
@@ -54876,34 +57503,254 @@ namespace VULKAN_HPP_NAMESPACE
};
using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2;
+ struct PhysicalDeviceImageProcessingFeaturesQCOM
+ {
+ using NativeType = VkPhysicalDeviceImageProcessingFeaturesQCOM;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , textureSampleWeighted( textureSampleWeighted_ )
+ , textureBoxFilter( textureBoxFilter_ )
+ , textureBlockMatch( textureBlockMatch_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingFeaturesQCOM( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceImageProcessingFeaturesQCOM( VkPhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceImageProcessingFeaturesQCOM( *reinterpret_cast<PhysicalDeviceImageProcessingFeaturesQCOM const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceImageProcessingFeaturesQCOM & operator=( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceImageProcessingFeaturesQCOM & operator=( VkPhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM &
+ setTextureSampleWeighted( VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted_ ) VULKAN_HPP_NOEXCEPT
+ {
+ textureSampleWeighted = textureSampleWeighted_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM &
+ setTextureBoxFilter( VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter_ ) VULKAN_HPP_NOEXCEPT
+ {
+ textureBoxFilter = textureBoxFilter_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM &
+ setTextureBlockMatch( VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch_ ) VULKAN_HPP_NOEXCEPT
+ {
+ textureBlockMatch = textureBlockMatch_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceImageProcessingFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceImageProcessingFeaturesQCOM *>( this );
+ }
+
+ operator VkPhysicalDeviceImageProcessingFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceImageProcessingFeaturesQCOM *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, textureSampleWeighted, textureBoxFilter, textureBlockMatch );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceImageProcessingFeaturesQCOM const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( textureSampleWeighted == rhs.textureSampleWeighted ) &&
+ ( textureBoxFilter == rhs.textureBoxFilter ) && ( textureBlockMatch == rhs.textureBlockMatch );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted = {};
+ VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter = {};
+ VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM>
+ {
+ using Type = PhysicalDeviceImageProcessingFeaturesQCOM;
+ };
+
+ struct PhysicalDeviceImageProcessingPropertiesQCOM
+ {
+ using NativeType = VkPhysicalDeviceImageProcessingPropertiesQCOM;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingPropertiesQCOM( uint32_t maxWeightFilterPhases_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D maxWeightFilterDimension_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchRegion_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D maxBoxFilterBlockSize_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxWeightFilterPhases( maxWeightFilterPhases_ )
+ , maxWeightFilterDimension( maxWeightFilterDimension_ )
+ , maxBlockMatchRegion( maxBlockMatchRegion_ )
+ , maxBoxFilterBlockSize( maxBoxFilterBlockSize_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingPropertiesQCOM( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceImageProcessingPropertiesQCOM( VkPhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceImageProcessingPropertiesQCOM( *reinterpret_cast<PhysicalDeviceImageProcessingPropertiesQCOM const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceImageProcessingPropertiesQCOM & operator=( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceImageProcessingPropertiesQCOM & operator=( VkPhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM const *>( &rhs );
+ return *this;
+ }
+
+ operator VkPhysicalDeviceImageProcessingPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceImageProcessingPropertiesQCOM *>( this );
+ }
+
+ operator VkPhysicalDeviceImageProcessingPropertiesQCOM &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceImageProcessingPropertiesQCOM *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ uint32_t const &,
+ VULKAN_HPP_NAMESPACE::Extent2D const &,
+ VULKAN_HPP_NAMESPACE::Extent2D const &,
+ VULKAN_HPP_NAMESPACE::Extent2D const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, maxWeightFilterPhases, maxWeightFilterDimension, maxBlockMatchRegion, maxBoxFilterBlockSize );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceImageProcessingPropertiesQCOM const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxWeightFilterPhases == rhs.maxWeightFilterPhases ) &&
+ ( maxWeightFilterDimension == rhs.maxWeightFilterDimension ) && ( maxBlockMatchRegion == rhs.maxBlockMatchRegion ) &&
+ ( maxBoxFilterBlockSize == rhs.maxBoxFilterBlockSize );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM;
+ void * pNext = {};
+ uint32_t maxWeightFilterPhases = {};
+ VULKAN_HPP_NAMESPACE::Extent2D maxWeightFilterDimension = {};
+ VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchRegion = {};
+ VULKAN_HPP_NAMESPACE::Extent2D maxBoxFilterBlockSize = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM>
+ {
+ using Type = PhysicalDeviceImageProcessingPropertiesQCOM;
+ };
+
struct PhysicalDeviceImageRobustnessFeatures
{
using NativeType = VkPhysicalDeviceImageRobustnessFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceImageRobustnessFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageRobustnessFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceImageRobustnessFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {} ) VULKAN_HPP_NOEXCEPT
- : robustImageAccess( robustImageAccess_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , robustImageAccess( robustImageAccess_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( PhysicalDeviceImageRobustnessFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceImageRobustnessFeatures( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceImageRobustnessFeatures(
- *reinterpret_cast<PhysicalDeviceImageRobustnessFeatures const *>( &rhs ) )
- {}
+ : PhysicalDeviceImageRobustnessFeatures( *reinterpret_cast<PhysicalDeviceImageRobustnessFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceImageRobustnessFeatures &
- operator=( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceImageRobustnessFeatures & operator=( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceImageRobustnessFeatures &
- operator=( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageRobustnessFeatures & operator=( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures const *>( &rhs );
return *this;
@@ -54916,25 +57763,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures &
- setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT
{
robustImageAccess = robustImageAccess_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceImageRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceImageRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageRobustnessFeatures *>( this );
}
- explicit operator VkPhysicalDeviceImageRobustnessFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceImageRobustnessFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImageRobustnessFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -54951,7 +57797,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustImageAccess == rhs.robustImageAccess );
@@ -54969,14 +57815,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures ) ==
- sizeof( VkPhysicalDeviceImageRobustnessFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures>::value,
- "PhysicalDeviceImageRobustnessFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImageRobustnessFeatures>
@@ -54990,31 +57828,28 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceImageViewImageFormatInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR
- PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ =
- VULKAN_HPP_NAMESPACE::ImageViewType::e1D ) VULKAN_HPP_NOEXCEPT
- : imageViewType( imageViewType_ )
- {}
+ PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D,
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , imageViewType( imageViewType_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT(
- PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceImageViewImageFormatInfoEXT(
- *reinterpret_cast<PhysicalDeviceImageViewImageFormatInfoEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceImageViewImageFormatInfoEXT( *reinterpret_cast<PhysicalDeviceImageViewImageFormatInfoEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceImageViewImageFormatInfoEXT &
- operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceImageViewImageFormatInfoEXT & operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceImageViewImageFormatInfoEXT &
- operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageViewImageFormatInfoEXT & operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const *>( &rhs );
return *this;
@@ -55035,17 +57870,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceImageViewImageFormatInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceImageViewImageFormatInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageViewImageFormatInfoEXT *>( this );
}
- explicit operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImageViewImageFormatInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -55062,7 +57897,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageViewType == rhs.imageViewType );
@@ -55080,15 +57915,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT ) ==
- sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT>::value,
- "PhysicalDeviceImageViewImageFormatInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT>
@@ -55101,30 +57927,26 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceImageViewMinLodFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceImageViewMinLodFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 minLod_ = {} ) VULKAN_HPP_NOEXCEPT
- : minLod( minLod_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 minLod_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , minLod( minLod_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT(
- PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceImageViewMinLodFeaturesEXT( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceImageViewMinLodFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceImageViewMinLodFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceImageViewMinLodFeaturesEXT( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceImageViewMinLodFeaturesEXT( *reinterpret_cast<PhysicalDeviceImageViewMinLodFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceImageViewMinLodFeaturesEXT &
- operator=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceImageViewMinLodFeaturesEXT & operator=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceImageViewMinLodFeaturesEXT &
- operator=( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageViewMinLodFeaturesEXT & operator=( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT const *>( &rhs );
return *this;
@@ -55137,25 +57959,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT &
- setMinLod( VULKAN_HPP_NAMESPACE::Bool32 minLod_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & setMinLod( VULKAN_HPP_NAMESPACE::Bool32 minLod_ ) VULKAN_HPP_NOEXCEPT
{
minLod = minLod_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceImageViewMinLodFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceImageViewMinLodFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImageViewMinLodFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceImageViewMinLodFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceImageViewMinLodFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImageViewMinLodFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -55172,7 +57993,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minLod == rhs.minLod );
@@ -55190,15 +58011,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 minLod = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceImageViewMinLodFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT>::value,
- "PhysicalDeviceImageViewMinLodFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT>
@@ -55211,30 +58023,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceImagelessFramebufferFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures(
- VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {} ) VULKAN_HPP_NOEXCEPT
- : imagelessFramebuffer( imagelessFramebuffer_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , imagelessFramebuffer( imagelessFramebuffer_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures(
- PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceImagelessFramebufferFeatures(
- *reinterpret_cast<PhysicalDeviceImagelessFramebufferFeatures const *>( &rhs ) )
- {}
+ PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceImagelessFramebufferFeatures( *reinterpret_cast<PhysicalDeviceImagelessFramebufferFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceImagelessFramebufferFeatures &
- operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceImagelessFramebufferFeatures & operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceImagelessFramebufferFeatures &
- operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImagelessFramebufferFeatures & operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures const *>( &rhs );
return *this;
@@ -55248,24 +58057,24 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures &
- setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
+ setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
{
imagelessFramebuffer = imagelessFramebuffer_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceImagelessFramebufferFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceImagelessFramebufferFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeatures *>( this );
}
- explicit operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -55282,7 +58091,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imagelessFramebuffer == rhs.imagelessFramebuffer );
@@ -55296,19 +58105,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures ) ==
- sizeof( VkPhysicalDeviceImagelessFramebufferFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures>::value,
- "PhysicalDeviceImagelessFramebufferFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceImagelessFramebufferFeatures>
@@ -55322,29 +58122,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceIndexTypeUint8FeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceIndexTypeUint8FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {} ) VULKAN_HPP_NOEXCEPT
- : indexTypeUint8( indexTypeUint8_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , indexTypeUint8( indexTypeUint8_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceIndexTypeUint8FeaturesEXT(
- *reinterpret_cast<PhysicalDeviceIndexTypeUint8FeaturesEXT const *>( &rhs ) )
- {}
+ : PhysicalDeviceIndexTypeUint8FeaturesEXT( *reinterpret_cast<PhysicalDeviceIndexTypeUint8FeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceIndexTypeUint8FeaturesEXT &
- operator=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceIndexTypeUint8FeaturesEXT &
- operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const *>( &rhs );
return *this;
@@ -55357,25 +58155,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8FeaturesEXT &
- setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8FeaturesEXT & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT
{
indexTypeUint8 = indexTypeUint8_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceIndexTypeUint8FeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceIndexTypeUint8FeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -55392,7 +58189,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indexTypeUint8 == rhs.indexTypeUint8 );
@@ -55410,15 +58207,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT ) ==
- sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT>::value,
- "PhysicalDeviceIndexTypeUint8FeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT>
@@ -55431,62 +58219,59 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceInheritedViewportScissorFeaturesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV(
- VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ = {} ) VULKAN_HPP_NOEXCEPT
- : inheritedViewportScissor2D( inheritedViewportScissor2D_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , inheritedViewportScissor2D( inheritedViewportScissor2D_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV(
- PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceInheritedViewportScissorFeaturesNV( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceInheritedViewportScissorFeaturesNV( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceInheritedViewportScissorFeaturesNV(
- *reinterpret_cast<PhysicalDeviceInheritedViewportScissorFeaturesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceInheritedViewportScissorFeaturesNV( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceInheritedViewportScissorFeaturesNV( *reinterpret_cast<PhysicalDeviceInheritedViewportScissorFeaturesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceInheritedViewportScissorFeaturesNV &
- operator=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceInheritedViewportScissorFeaturesNV & operator=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceInheritedViewportScissorFeaturesNV &
- operator=( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceInheritedViewportScissorFeaturesNV & operator=( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV &
- setInheritedViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ ) VULKAN_HPP_NOEXCEPT
+ setInheritedViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ ) VULKAN_HPP_NOEXCEPT
{
inheritedViewportScissor2D = inheritedViewportScissor2D_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceInheritedViewportScissorFeaturesNV *>( this );
}
- explicit operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceInheritedViewportScissorFeaturesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -55503,11 +58288,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( inheritedViewportScissor2D == rhs.inheritedViewportScissor2D );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inheritedViewportScissor2D == rhs.inheritedViewportScissor2D );
# endif
}
@@ -55518,19 +58302,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV ) ==
- sizeof( VkPhysicalDeviceInheritedViewportScissorFeaturesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV>::value,
- "PhysicalDeviceInheritedViewportScissorFeaturesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV>
@@ -55543,32 +58318,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceInlineUniformBlockFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceInlineUniformBlockFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures(
- VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {} ) VULKAN_HPP_NOEXCEPT
- : inlineUniformBlock( inlineUniformBlock_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , inlineUniformBlock( inlineUniformBlock_ )
, descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures(
- PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceInlineUniformBlockFeatures( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceInlineUniformBlockFeatures(
- *reinterpret_cast<PhysicalDeviceInlineUniformBlockFeatures const *>( &rhs ) )
- {}
+ PhysicalDeviceInlineUniformBlockFeatures( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceInlineUniformBlockFeatures( *reinterpret_cast<PhysicalDeviceInlineUniformBlockFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceInlineUniformBlockFeatures &
- operator=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceInlineUniformBlockFeatures & operator=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceInlineUniformBlockFeatures &
- operator=( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceInlineUniformBlockFeatures & operator=( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures const *>( &rhs );
return *this;
@@ -55582,39 +58354,35 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures &
- setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
+ setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
{
inlineUniformBlock = inlineUniformBlock_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures &
- setDescriptorBindingInlineUniformBlockUpdateAfterBind(
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setDescriptorBindingInlineUniformBlockUpdateAfterBind(
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceInlineUniformBlockFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceInlineUniformBlockFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockFeatures *>( this );
}
- explicit operator VkPhysicalDeviceInlineUniformBlockFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceInlineUniformBlockFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -55627,12 +58395,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inlineUniformBlock == rhs.inlineUniformBlock ) &&
- ( descriptorBindingInlineUniformBlockUpdateAfterBind ==
- rhs.descriptorBindingInlineUniformBlockUpdateAfterBind );
+ ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind );
# endif
}
@@ -55643,20 +58410,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures ) ==
- sizeof( VkPhysicalDeviceInlineUniformBlockFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures>::value,
- "PhysicalDeviceInlineUniformBlockFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceInlineUniformBlockFeatures>
@@ -55670,55 +58428,51 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceInlineUniformBlockProperties;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceInlineUniformBlockProperties;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties(
- uint32_t maxInlineUniformBlockSize_ = {},
- uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {},
- uint32_t maxDescriptorSetInlineUniformBlocks_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxInlineUniformBlockSize( maxInlineUniformBlockSize_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( uint32_t maxInlineUniformBlockSize_ = {},
+ uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {},
+ uint32_t maxDescriptorSetInlineUniformBlocks_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxInlineUniformBlockSize( maxInlineUniformBlockSize_ )
, maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ )
- , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks(
- maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ )
+ , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ )
, maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ )
, maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties(
- PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceInlineUniformBlockProperties( VkPhysicalDeviceInlineUniformBlockProperties const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceInlineUniformBlockProperties(
- *reinterpret_cast<PhysicalDeviceInlineUniformBlockProperties const *>( &rhs ) )
- {}
+ PhysicalDeviceInlineUniformBlockProperties( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceInlineUniformBlockProperties( *reinterpret_cast<PhysicalDeviceInlineUniformBlockProperties const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceInlineUniformBlockProperties &
- operator=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceInlineUniformBlockProperties & operator=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceInlineUniformBlockProperties &
- operator=( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceInlineUniformBlockProperties & operator=( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceInlineUniformBlockProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceInlineUniformBlockProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockProperties *>( this );
}
- explicit operator VkPhysicalDeviceInlineUniformBlockProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceInlineUniformBlockProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -55747,17 +58501,14 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) &&
( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) &&
- ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ==
- rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) &&
+ ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) &&
( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) &&
- ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks ==
- rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
+ ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
# endif
}
@@ -55768,23 +58519,14 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockProperties;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockProperties;
+ void * pNext = {};
uint32_t maxInlineUniformBlockSize = {};
uint32_t maxPerStageDescriptorInlineUniformBlocks = {};
uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {};
uint32_t maxDescriptorSetInlineUniformBlocks = {};
uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties ) ==
- sizeof( VkPhysicalDeviceInlineUniformBlockProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties>::value,
- "PhysicalDeviceInlineUniformBlockProperties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceInlineUniformBlockProperties>
@@ -55798,29 +58540,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceInvocationMaskFeaturesHUAWEI;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ = {} )
- VULKAN_HPP_NOEXCEPT : invocationMask( invocationMask_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , invocationMask( invocationMask_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI(
- PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceInvocationMaskFeaturesHUAWEI( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceInvocationMaskFeaturesHUAWEI(
- *reinterpret_cast<PhysicalDeviceInvocationMaskFeaturesHUAWEI const *>( &rhs ) )
- {}
+ PhysicalDeviceInvocationMaskFeaturesHUAWEI( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceInvocationMaskFeaturesHUAWEI( *reinterpret_cast<PhysicalDeviceInvocationMaskFeaturesHUAWEI const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceInvocationMaskFeaturesHUAWEI &
- operator=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceInvocationMaskFeaturesHUAWEI & operator=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceInvocationMaskFeaturesHUAWEI &
- operator=( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceInvocationMaskFeaturesHUAWEI & operator=( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI const *>( &rhs );
return *this;
@@ -55833,25 +58573,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI &
- setInvocationMask( VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & setInvocationMask( VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ ) VULKAN_HPP_NOEXCEPT
{
invocationMask = invocationMask_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceInvocationMaskFeaturesHUAWEI *>( this );
}
- explicit operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceInvocationMaskFeaturesHUAWEI *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -55868,7 +58607,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( invocationMask == rhs.invocationMask );
@@ -55886,15 +58625,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 invocationMask = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI ) ==
- sizeof( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI>::value,
- "PhysicalDeviceInvocationMaskFeaturesHUAWEI is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI>
@@ -55902,118 +58632,214 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceInvocationMaskFeaturesHUAWEI;
};
+ struct PhysicalDeviceLegacyDitheringFeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceLegacyDitheringFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 legacyDithering_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , legacyDithering( legacyDithering_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceLegacyDitheringFeaturesEXT( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceLegacyDitheringFeaturesEXT( *reinterpret_cast<PhysicalDeviceLegacyDitheringFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setLegacyDithering( VULKAN_HPP_NAMESPACE::Bool32 legacyDithering_ ) VULKAN_HPP_NOEXCEPT
+ {
+ legacyDithering = legacyDithering_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceLegacyDitheringFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceLegacyDitheringFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceLegacyDitheringFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceLegacyDitheringFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, legacyDithering );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceLegacyDitheringFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( legacyDithering == rhs.legacyDithering );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 legacyDithering = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT>
+ {
+ using Type = PhysicalDeviceLegacyDitheringFeaturesEXT;
+ };
+
struct PhysicalDeviceLimits
{
using NativeType = VkPhysicalDeviceLimits;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14
- PhysicalDeviceLimits( uint32_t maxImageDimension1D_ = {},
- uint32_t maxImageDimension2D_ = {},
- uint32_t maxImageDimension3D_ = {},
- uint32_t maxImageDimensionCube_ = {},
- uint32_t maxImageArrayLayers_ = {},
- uint32_t maxTexelBufferElements_ = {},
- uint32_t maxUniformBufferRange_ = {},
- uint32_t maxStorageBufferRange_ = {},
- uint32_t maxPushConstantsSize_ = {},
- uint32_t maxMemoryAllocationCount_ = {},
- uint32_t maxSamplerAllocationCount_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {},
- uint32_t maxBoundDescriptorSets_ = {},
- uint32_t maxPerStageDescriptorSamplers_ = {},
- uint32_t maxPerStageDescriptorUniformBuffers_ = {},
- uint32_t maxPerStageDescriptorStorageBuffers_ = {},
- uint32_t maxPerStageDescriptorSampledImages_ = {},
- uint32_t maxPerStageDescriptorStorageImages_ = {},
- uint32_t maxPerStageDescriptorInputAttachments_ = {},
- uint32_t maxPerStageResources_ = {},
- uint32_t maxDescriptorSetSamplers_ = {},
- uint32_t maxDescriptorSetUniformBuffers_ = {},
- uint32_t maxDescriptorSetUniformBuffersDynamic_ = {},
- uint32_t maxDescriptorSetStorageBuffers_ = {},
- uint32_t maxDescriptorSetStorageBuffersDynamic_ = {},
- uint32_t maxDescriptorSetSampledImages_ = {},
- uint32_t maxDescriptorSetStorageImages_ = {},
- uint32_t maxDescriptorSetInputAttachments_ = {},
- uint32_t maxVertexInputAttributes_ = {},
- uint32_t maxVertexInputBindings_ = {},
- uint32_t maxVertexInputAttributeOffset_ = {},
- uint32_t maxVertexInputBindingStride_ = {},
- uint32_t maxVertexOutputComponents_ = {},
- uint32_t maxTessellationGenerationLevel_ = {},
- uint32_t maxTessellationPatchSize_ = {},
- uint32_t maxTessellationControlPerVertexInputComponents_ = {},
- uint32_t maxTessellationControlPerVertexOutputComponents_ = {},
- uint32_t maxTessellationControlPerPatchOutputComponents_ = {},
- uint32_t maxTessellationControlTotalOutputComponents_ = {},
- uint32_t maxTessellationEvaluationInputComponents_ = {},
- uint32_t maxTessellationEvaluationOutputComponents_ = {},
- uint32_t maxGeometryShaderInvocations_ = {},
- uint32_t maxGeometryInputComponents_ = {},
- uint32_t maxGeometryOutputComponents_ = {},
- uint32_t maxGeometryOutputVertices_ = {},
- uint32_t maxGeometryTotalOutputComponents_ = {},
- uint32_t maxFragmentInputComponents_ = {},
- uint32_t maxFragmentOutputAttachments_ = {},
- uint32_t maxFragmentDualSrcAttachments_ = {},
- uint32_t maxFragmentCombinedOutputResources_ = {},
- uint32_t maxComputeSharedMemorySize_ = {},
- std::array<uint32_t, 3> const & maxComputeWorkGroupCount_ = {},
- uint32_t maxComputeWorkGroupInvocations_ = {},
- std::array<uint32_t, 3> const & maxComputeWorkGroupSize_ = {},
- uint32_t subPixelPrecisionBits_ = {},
- uint32_t subTexelPrecisionBits_ = {},
- uint32_t mipmapPrecisionBits_ = {},
- uint32_t maxDrawIndexedIndexValue_ = {},
- uint32_t maxDrawIndirectCount_ = {},
- float maxSamplerLodBias_ = {},
- float maxSamplerAnisotropy_ = {},
- uint32_t maxViewports_ = {},
- std::array<uint32_t, 2> const & maxViewportDimensions_ = {},
- std::array<float, 2> const & viewportBoundsRange_ = {},
- uint32_t viewportSubPixelBits_ = {},
- size_t minMemoryMapAlignment_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {},
- int32_t minTexelOffset_ = {},
- uint32_t maxTexelOffset_ = {},
- int32_t minTexelGatherOffset_ = {},
- uint32_t maxTexelGatherOffset_ = {},
- float minInterpolationOffset_ = {},
- float maxInterpolationOffset_ = {},
- uint32_t subPixelInterpolationOffsetBits_ = {},
- uint32_t maxFramebufferWidth_ = {},
- uint32_t maxFramebufferHeight_ = {},
- uint32_t maxFramebufferLayers_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {},
- uint32_t maxColorAttachments_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {},
- uint32_t maxSampleMaskWords_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {},
- float timestampPeriod_ = {},
- uint32_t maxClipDistances_ = {},
- uint32_t maxCullDistances_ = {},
- uint32_t maxCombinedClipAndCullDistances_ = {},
- uint32_t discreteQueuePriorities_ = {},
- std::array<float, 2> const & pointSizeRange_ = {},
- std::array<float, 2> const & lineWidthRange_ = {},
- float pointSizeGranularity_ = {},
- float lineWidthGranularity_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( uint32_t maxImageDimension1D_ = {},
+ uint32_t maxImageDimension2D_ = {},
+ uint32_t maxImageDimension3D_ = {},
+ uint32_t maxImageDimensionCube_ = {},
+ uint32_t maxImageArrayLayers_ = {},
+ uint32_t maxTexelBufferElements_ = {},
+ uint32_t maxUniformBufferRange_ = {},
+ uint32_t maxStorageBufferRange_ = {},
+ uint32_t maxPushConstantsSize_ = {},
+ uint32_t maxMemoryAllocationCount_ = {},
+ uint32_t maxSamplerAllocationCount_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {},
+ uint32_t maxBoundDescriptorSets_ = {},
+ uint32_t maxPerStageDescriptorSamplers_ = {},
+ uint32_t maxPerStageDescriptorUniformBuffers_ = {},
+ uint32_t maxPerStageDescriptorStorageBuffers_ = {},
+ uint32_t maxPerStageDescriptorSampledImages_ = {},
+ uint32_t maxPerStageDescriptorStorageImages_ = {},
+ uint32_t maxPerStageDescriptorInputAttachments_ = {},
+ uint32_t maxPerStageResources_ = {},
+ uint32_t maxDescriptorSetSamplers_ = {},
+ uint32_t maxDescriptorSetUniformBuffers_ = {},
+ uint32_t maxDescriptorSetUniformBuffersDynamic_ = {},
+ uint32_t maxDescriptorSetStorageBuffers_ = {},
+ uint32_t maxDescriptorSetStorageBuffersDynamic_ = {},
+ uint32_t maxDescriptorSetSampledImages_ = {},
+ uint32_t maxDescriptorSetStorageImages_ = {},
+ uint32_t maxDescriptorSetInputAttachments_ = {},
+ uint32_t maxVertexInputAttributes_ = {},
+ uint32_t maxVertexInputBindings_ = {},
+ uint32_t maxVertexInputAttributeOffset_ = {},
+ uint32_t maxVertexInputBindingStride_ = {},
+ uint32_t maxVertexOutputComponents_ = {},
+ uint32_t maxTessellationGenerationLevel_ = {},
+ uint32_t maxTessellationPatchSize_ = {},
+ uint32_t maxTessellationControlPerVertexInputComponents_ = {},
+ uint32_t maxTessellationControlPerVertexOutputComponents_ = {},
+ uint32_t maxTessellationControlPerPatchOutputComponents_ = {},
+ uint32_t maxTessellationControlTotalOutputComponents_ = {},
+ uint32_t maxTessellationEvaluationInputComponents_ = {},
+ uint32_t maxTessellationEvaluationOutputComponents_ = {},
+ uint32_t maxGeometryShaderInvocations_ = {},
+ uint32_t maxGeometryInputComponents_ = {},
+ uint32_t maxGeometryOutputComponents_ = {},
+ uint32_t maxGeometryOutputVertices_ = {},
+ uint32_t maxGeometryTotalOutputComponents_ = {},
+ uint32_t maxFragmentInputComponents_ = {},
+ uint32_t maxFragmentOutputAttachments_ = {},
+ uint32_t maxFragmentDualSrcAttachments_ = {},
+ uint32_t maxFragmentCombinedOutputResources_ = {},
+ uint32_t maxComputeSharedMemorySize_ = {},
+ std::array<uint32_t, 3> const & maxComputeWorkGroupCount_ = {},
+ uint32_t maxComputeWorkGroupInvocations_ = {},
+ std::array<uint32_t, 3> const & maxComputeWorkGroupSize_ = {},
+ uint32_t subPixelPrecisionBits_ = {},
+ uint32_t subTexelPrecisionBits_ = {},
+ uint32_t mipmapPrecisionBits_ = {},
+ uint32_t maxDrawIndexedIndexValue_ = {},
+ uint32_t maxDrawIndirectCount_ = {},
+ float maxSamplerLodBias_ = {},
+ float maxSamplerAnisotropy_ = {},
+ uint32_t maxViewports_ = {},
+ std::array<uint32_t, 2> const & maxViewportDimensions_ = {},
+ std::array<float, 2> const & viewportBoundsRange_ = {},
+ uint32_t viewportSubPixelBits_ = {},
+ size_t minMemoryMapAlignment_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {},
+ int32_t minTexelOffset_ = {},
+ uint32_t maxTexelOffset_ = {},
+ int32_t minTexelGatherOffset_ = {},
+ uint32_t maxTexelGatherOffset_ = {},
+ float minInterpolationOffset_ = {},
+ float maxInterpolationOffset_ = {},
+ uint32_t subPixelInterpolationOffsetBits_ = {},
+ uint32_t maxFramebufferWidth_ = {},
+ uint32_t maxFramebufferHeight_ = {},
+ uint32_t maxFramebufferLayers_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {},
+ uint32_t maxColorAttachments_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {},
+ uint32_t maxSampleMaskWords_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {},
+ float timestampPeriod_ = {},
+ uint32_t maxClipDistances_ = {},
+ uint32_t maxCullDistances_ = {},
+ uint32_t maxCombinedClipAndCullDistances_ = {},
+ uint32_t discreteQueuePriorities_ = {},
+ std::array<float, 2> const & pointSizeRange_ = {},
+ std::array<float, 2> const & lineWidthRange_ = {},
+ float pointSizeGranularity_ = {},
+ float lineWidthGranularity_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT
: maxImageDimension1D( maxImageDimension1D_ )
, maxImageDimension2D( maxImageDimension2D_ )
, maxImageDimension3D( maxImageDimension3D_ )
@@ -56120,13 +58946,15 @@ namespace VULKAN_HPP_NAMESPACE
, optimalBufferCopyOffsetAlignment( optimalBufferCopyOffsetAlignment_ )
, optimalBufferCopyRowPitchAlignment( optimalBufferCopyRowPitchAlignment_ )
, nonCoherentAtomSize( nonCoherentAtomSize_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceLimits( *reinterpret_cast<PhysicalDeviceLimits const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceLimits & operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -56137,17 +58965,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPhysicalDeviceLimits const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceLimits const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceLimits *>( this );
}
- explicit operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceLimits *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -56374,108 +59202,71 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( maxImageDimension1D == rhs.maxImageDimension1D ) && ( maxImageDimension2D == rhs.maxImageDimension2D ) &&
- ( maxImageDimension3D == rhs.maxImageDimension3D ) &&
- ( maxImageDimensionCube == rhs.maxImageDimensionCube ) &&
- ( maxImageArrayLayers == rhs.maxImageArrayLayers ) &&
- ( maxTexelBufferElements == rhs.maxTexelBufferElements ) &&
- ( maxUniformBufferRange == rhs.maxUniformBufferRange ) &&
- ( maxStorageBufferRange == rhs.maxStorageBufferRange ) &&
- ( maxPushConstantsSize == rhs.maxPushConstantsSize ) &&
- ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) &&
- ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) &&
- ( bufferImageGranularity == rhs.bufferImageGranularity ) &&
- ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) &&
- ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) &&
+ ( maxImageDimension3D == rhs.maxImageDimension3D ) && ( maxImageDimensionCube == rhs.maxImageDimensionCube ) &&
+ ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && ( maxTexelBufferElements == rhs.maxTexelBufferElements ) &&
+ ( maxUniformBufferRange == rhs.maxUniformBufferRange ) && ( maxStorageBufferRange == rhs.maxStorageBufferRange ) &&
+ ( maxPushConstantsSize == rhs.maxPushConstantsSize ) && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) &&
+ ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) && ( bufferImageGranularity == rhs.bufferImageGranularity ) &&
+ ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) &&
( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) &&
( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) &&
( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) &&
( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) &&
( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) &&
- ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) &&
- ( maxPerStageResources == rhs.maxPerStageResources ) &&
- ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) &&
- ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) &&
+ ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) && ( maxPerStageResources == rhs.maxPerStageResources ) &&
+ ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) &&
( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) &&
( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) &&
( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) &&
- ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) &&
- ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) &&
- ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) &&
- ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) &&
- ( maxVertexInputBindings == rhs.maxVertexInputBindings ) &&
- ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) &&
- ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) &&
- ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) &&
- ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) &&
- ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) &&
+ ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) &&
+ ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) &&
+ ( maxVertexInputBindings == rhs.maxVertexInputBindings ) && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) &&
+ ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) &&
+ ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) &&
( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) &&
- ( maxTessellationControlPerVertexOutputComponents ==
- rhs.maxTessellationControlPerVertexOutputComponents ) &&
+ ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents ) &&
( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) &&
( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) &&
( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) &&
( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) &&
- ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) &&
- ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) &&
- ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) &&
- ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) &&
- ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) &&
- ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) &&
- ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) &&
- ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) &&
+ ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) &&
+ ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) &&
+ ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) &&
+ ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) &&
( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) &&
- ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) &&
- ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount ) &&
- ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) &&
- ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize ) &&
- ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) &&
- ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) &&
- ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) &&
- ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) &&
+ ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) && ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount ) &&
+ ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) && ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize ) &&
+ ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) &&
+ ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) &&
( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) &&
( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) && ( maxViewports == rhs.maxViewports ) &&
- ( maxViewportDimensions == rhs.maxViewportDimensions ) &&
- ( viewportBoundsRange == rhs.viewportBoundsRange ) &&
- ( viewportSubPixelBits == rhs.viewportSubPixelBits ) &&
- ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) &&
+ ( maxViewportDimensions == rhs.maxViewportDimensions ) && ( viewportBoundsRange == rhs.viewportBoundsRange ) &&
+ ( viewportSubPixelBits == rhs.viewportSubPixelBits ) && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) &&
( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) &&
( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) &&
- ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) &&
- ( minTexelOffset == rhs.minTexelOffset ) && ( maxTexelOffset == rhs.maxTexelOffset ) &&
- ( minTexelGatherOffset == rhs.minTexelGatherOffset ) &&
- ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) &&
- ( minInterpolationOffset == rhs.minInterpolationOffset ) &&
- ( maxInterpolationOffset == rhs.maxInterpolationOffset ) &&
- ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) &&
- ( maxFramebufferWidth == rhs.maxFramebufferWidth ) &&
- ( maxFramebufferHeight == rhs.maxFramebufferHeight ) &&
- ( maxFramebufferLayers == rhs.maxFramebufferLayers ) &&
- ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) &&
- ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) &&
- ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) &&
- ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) &&
- ( maxColorAttachments == rhs.maxColorAttachments ) &&
+ ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) && ( minTexelOffset == rhs.minTexelOffset ) &&
+ ( maxTexelOffset == rhs.maxTexelOffset ) && ( minTexelGatherOffset == rhs.minTexelGatherOffset ) &&
+ ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) && ( minInterpolationOffset == rhs.minInterpolationOffset ) &&
+ ( maxInterpolationOffset == rhs.maxInterpolationOffset ) && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) &&
+ ( maxFramebufferWidth == rhs.maxFramebufferWidth ) && ( maxFramebufferHeight == rhs.maxFramebufferHeight ) &&
+ ( maxFramebufferLayers == rhs.maxFramebufferLayers ) && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) &&
+ ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) &&
+ ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) && ( maxColorAttachments == rhs.maxColorAttachments ) &&
( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) &&
( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) &&
( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) &&
- ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) &&
- ( storageImageSampleCounts == rhs.storageImageSampleCounts ) &&
- ( maxSampleMaskWords == rhs.maxSampleMaskWords ) &&
- ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) &&
- ( timestampPeriod == rhs.timestampPeriod ) && ( maxClipDistances == rhs.maxClipDistances ) &&
- ( maxCullDistances == rhs.maxCullDistances ) &&
- ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) &&
- ( discreteQueuePriorities == rhs.discreteQueuePriorities ) && ( pointSizeRange == rhs.pointSizeRange ) &&
- ( lineWidthRange == rhs.lineWidthRange ) && ( pointSizeGranularity == rhs.pointSizeGranularity ) &&
+ ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) && ( storageImageSampleCounts == rhs.storageImageSampleCounts ) &&
+ ( maxSampleMaskWords == rhs.maxSampleMaskWords ) && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) &&
+ ( timestampPeriod == rhs.timestampPeriod ) && ( maxClipDistances == rhs.maxClipDistances ) && ( maxCullDistances == rhs.maxCullDistances ) &&
+ ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) && ( discreteQueuePriorities == rhs.discreteQueuePriorities ) &&
+ ( pointSizeRange == rhs.pointSizeRange ) && ( lineWidthRange == rhs.lineWidthRange ) && ( pointSizeGranularity == rhs.pointSizeGranularity ) &&
( lineWidthGranularity == rhs.lineWidthGranularity ) && ( strictLines == rhs.strictLines ) &&
- ( standardSampleLocations == rhs.standardSampleLocations ) &&
- ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) &&
- ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) &&
- ( nonCoherentAtomSize == rhs.nonCoherentAtomSize );
+ ( standardSampleLocations == rhs.standardSampleLocations ) && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) &&
+ ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize );
# endif
}
@@ -56593,52 +59384,43 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {};
VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits>::value,
- "PhysicalDeviceLimits is not nothrow_move_constructible!" );
struct PhysicalDeviceLineRasterizationFeaturesEXT
{
using NativeType = VkPhysicalDeviceLineRasterizationFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {} ) VULKAN_HPP_NOEXCEPT
- : rectangularLines( rectangularLines_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , rectangularLines( rectangularLines_ )
, bresenhamLines( bresenhamLines_ )
, smoothLines( smoothLines_ )
, stippledRectangularLines( stippledRectangularLines_ )
, stippledBresenhamLines( stippledBresenhamLines_ )
, stippledSmoothLines( stippledSmoothLines_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT(
- PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceLineRasterizationFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceLineRasterizationFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceLineRasterizationFeaturesEXT( *reinterpret_cast<PhysicalDeviceLineRasterizationFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceLineRasterizationFeaturesEXT &
- operator=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceLineRasterizationFeaturesEXT & operator=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceLineRasterizationFeaturesEXT &
- operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceLineRasterizationFeaturesEXT & operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const *>( &rhs );
return *this;
@@ -56658,53 +59440,51 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT &
- setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT
{
bresenhamLines = bresenhamLines_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT &
- setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT
{
smoothLines = smoothLines_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT &
- setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT
+ setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT
{
stippledRectangularLines = stippledRectangularLines_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT &
- setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT
+ setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT
{
stippledBresenhamLines = stippledBresenhamLines_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT &
- setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT
+ setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT
{
stippledSmoothLines = stippledSmoothLines_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceLineRasterizationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceLineRasterizationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceLineRasterizationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceLineRasterizationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceLineRasterizationFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -56719,14 +59499,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- rectangularLines,
- bresenhamLines,
- smoothLines,
- stippledRectangularLines,
- stippledBresenhamLines,
- stippledSmoothLines );
+ return std::tie( sType, pNext, rectangularLines, bresenhamLines, smoothLines, stippledRectangularLines, stippledBresenhamLines, stippledSmoothLines );
}
#endif
@@ -56735,14 +59508,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rectangularLines == rhs.rectangularLines ) &&
- ( bresenhamLines == rhs.bresenhamLines ) && ( smoothLines == rhs.smoothLines ) &&
- ( stippledRectangularLines == rhs.stippledRectangularLines ) &&
- ( stippledBresenhamLines == rhs.stippledBresenhamLines ) &&
- ( stippledSmoothLines == rhs.stippledSmoothLines );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rectangularLines == rhs.rectangularLines ) && ( bresenhamLines == rhs.bresenhamLines ) &&
+ ( smoothLines == rhs.smoothLines ) && ( stippledRectangularLines == rhs.stippledRectangularLines ) &&
+ ( stippledBresenhamLines == rhs.stippledBresenhamLines ) && ( stippledSmoothLines == rhs.stippledSmoothLines );
# endif
}
@@ -56753,24 +59524,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {};
- VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {};
- VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {};
+ VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {};
+ VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {};
VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {};
VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {};
VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT>::value,
- "PhysicalDeviceLineRasterizationFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT>
@@ -56783,46 +59545,42 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceLineRasterizationPropertiesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceLineRasterizationPropertiesEXT( uint32_t lineSubPixelPrecisionBits_ = {} ) VULKAN_HPP_NOEXCEPT
- : lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( uint32_t lineSubPixelPrecisionBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT(
- PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceLineRasterizationPropertiesEXT(
- *reinterpret_cast<PhysicalDeviceLineRasterizationPropertiesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceLineRasterizationPropertiesEXT( *reinterpret_cast<PhysicalDeviceLineRasterizationPropertiesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceLineRasterizationPropertiesEXT &
- operator=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceLineRasterizationPropertiesEXT & operator=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceLineRasterizationPropertiesEXT &
- operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceLineRasterizationPropertiesEXT & operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceLineRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceLineRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationPropertiesEXT *>( this );
}
- explicit operator VkPhysicalDeviceLineRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceLineRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -56839,11 +59597,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits );
# endif
}
@@ -56854,19 +59611,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT;
+ void * pNext = {};
uint32_t lineSubPixelPrecisionBits = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT ) ==
- sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT>::value,
- "PhysicalDeviceLineRasterizationPropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT>
@@ -56879,62 +59627,59 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceLinearColorAttachmentFeaturesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV(
- VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ = {} ) VULKAN_HPP_NOEXCEPT
- : linearColorAttachment( linearColorAttachment_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , linearColorAttachment( linearColorAttachment_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV(
- PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceLinearColorAttachmentFeaturesNV( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceLinearColorAttachmentFeaturesNV( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceLinearColorAttachmentFeaturesNV(
- *reinterpret_cast<PhysicalDeviceLinearColorAttachmentFeaturesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceLinearColorAttachmentFeaturesNV( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceLinearColorAttachmentFeaturesNV( *reinterpret_cast<PhysicalDeviceLinearColorAttachmentFeaturesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceLinearColorAttachmentFeaturesNV &
- operator=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceLinearColorAttachmentFeaturesNV & operator=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceLinearColorAttachmentFeaturesNV &
- operator=( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceLinearColorAttachmentFeaturesNV & operator=( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV &
- setLinearColorAttachment( VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ ) VULKAN_HPP_NOEXCEPT
+ setLinearColorAttachment( VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ ) VULKAN_HPP_NOEXCEPT
{
linearColorAttachment = linearColorAttachment_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceLinearColorAttachmentFeaturesNV *>( this );
}
- explicit operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceLinearColorAttachmentFeaturesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -56951,7 +59696,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( linearColorAttachment == rhs.linearColorAttachment );
@@ -56965,19 +59710,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV ) ==
- sizeof( VkPhysicalDeviceLinearColorAttachmentFeaturesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV>::value,
- "PhysicalDeviceLinearColorAttachmentFeaturesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV>
@@ -56990,53 +59726,49 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceMaintenance3Properties;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceMaintenance3Properties;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance3Properties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties(
- uint32_t maxPerSetDescriptors_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxPerSetDescriptors( maxPerSetDescriptors_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( uint32_t maxPerSetDescriptors_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxPerSetDescriptors( maxPerSetDescriptors_ )
, maxMemoryAllocationSize( maxMemoryAllocationSize_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMaintenance3Properties( *reinterpret_cast<PhysicalDeviceMaintenance3Properties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceMaintenance3Properties &
- operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceMaintenance3Properties & operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceMaintenance3Properties &
- operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMaintenance3Properties & operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceMaintenance3Properties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMaintenance3Properties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMaintenance3Properties *>( this );
}
- explicit operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMaintenance3Properties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- uint32_t const &,
- VULKAN_HPP_NAMESPACE::DeviceSize const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -57049,7 +59781,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) &&
@@ -57069,14 +59801,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t maxPerSetDescriptors = {};
VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties ) ==
- sizeof( VkPhysicalDeviceMaintenance3Properties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties>::value,
- "PhysicalDeviceMaintenance3Properties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance3Properties>
@@ -57090,28 +59814,26 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceMaintenance4Features;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceMaintenance4Features;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Features;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceMaintenance4Features( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {} ) VULKAN_HPP_NOEXCEPT
- : maintenance4( maintenance4_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maintenance4( maintenance4_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( PhysicalDeviceMaintenance4Features const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMaintenance4Features( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMaintenance4Features( *reinterpret_cast<PhysicalDeviceMaintenance4Features const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceMaintenance4Features &
- operator=( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceMaintenance4Features & operator=( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceMaintenance4Features &
- operator=( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMaintenance4Features & operator=( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features const *>( &rhs );
return *this;
@@ -57124,25 +59846,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features &
- setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT
{
maintenance4 = maintenance4_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceMaintenance4Features const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMaintenance4Features const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMaintenance4Features *>( this );
}
- explicit operator VkPhysicalDeviceMaintenance4Features &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMaintenance4Features &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMaintenance4Features *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -57159,7 +59880,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance4 == rhs.maintenance4 );
@@ -57177,14 +59898,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features ) ==
- sizeof( VkPhysicalDeviceMaintenance4Features ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features>::value,
- "PhysicalDeviceMaintenance4Features is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance4Features>
@@ -57198,44 +59911,43 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceMaintenance4Properties;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceMaintenance4Properties;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Properties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceMaintenance4Properties( VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxBufferSize( maxBufferSize_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxBufferSize( maxBufferSize_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( PhysicalDeviceMaintenance4Properties const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMaintenance4Properties( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMaintenance4Properties( *reinterpret_cast<PhysicalDeviceMaintenance4Properties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceMaintenance4Properties &
- operator=( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceMaintenance4Properties & operator=( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceMaintenance4Properties &
- operator=( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMaintenance4Properties & operator=( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceMaintenance4Properties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMaintenance4Properties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMaintenance4Properties *>( this );
}
- explicit operator VkPhysicalDeviceMaintenance4Properties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMaintenance4Properties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMaintenance4Properties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -57252,7 +59964,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxBufferSize == rhs.maxBufferSize );
@@ -57270,14 +59982,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties ) ==
- sizeof( VkPhysicalDeviceMaintenance4Properties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties>::value,
- "PhysicalDeviceMaintenance4Properties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance4Properties>
@@ -57291,47 +59995,45 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceMemoryBudgetPropertiesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT(
- std::array<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> const & heapBudget_ = {},
- std::array<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> const & heapUsage_ = {} ) VULKAN_HPP_NOEXCEPT
- : heapBudget( heapBudget_ )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( std::array<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> const & heapBudget_ = {},
+ std::array<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> const & heapUsage_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , heapBudget( heapBudget_ )
, heapUsage( heapUsage_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT(
- PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceMemoryBudgetPropertiesEXT(
- *reinterpret_cast<PhysicalDeviceMemoryBudgetPropertiesEXT const *>( &rhs ) )
- {}
+ : PhysicalDeviceMemoryBudgetPropertiesEXT( *reinterpret_cast<PhysicalDeviceMemoryBudgetPropertiesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceMemoryBudgetPropertiesEXT &
- operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceMemoryBudgetPropertiesEXT &
- operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMemoryBudgetPropertiesEXT *>( this );
}
- explicit operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMemoryBudgetPropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -57351,11 +60053,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( heapBudget == rhs.heapBudget ) &&
- ( heapUsage == rhs.heapUsage );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( heapBudget == rhs.heapBudget ) && ( heapUsage == rhs.heapUsage );
# endif
}
@@ -57366,20 +60067,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> heapBudget = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> heapUsage = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT ) ==
- sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT>::value,
- "PhysicalDeviceMemoryBudgetPropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT>
@@ -57392,29 +60084,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceMemoryPriorityFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceMemoryPriorityFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {} ) VULKAN_HPP_NOEXCEPT
- : memoryPriority( memoryPriority_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , memoryPriority( memoryPriority_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceMemoryPriorityFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceMemoryPriorityFeaturesEXT const *>( &rhs ) )
- {}
+ : PhysicalDeviceMemoryPriorityFeaturesEXT( *reinterpret_cast<PhysicalDeviceMemoryPriorityFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceMemoryPriorityFeaturesEXT &
- operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceMemoryPriorityFeaturesEXT &
- operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const *>( &rhs );
return *this;
@@ -57427,25 +60117,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT &
- setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT
{
memoryPriority = memoryPriority_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMemoryPriorityFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMemoryPriorityFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -57462,7 +60151,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryPriority == rhs.memoryPriority );
@@ -57480,15 +60169,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT>::value,
- "PhysicalDeviceMemoryPriorityFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT>
@@ -57501,27 +60181,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceMemoryProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties(
- uint32_t memoryTypeCount_ = {},
- std::array<VULKAN_HPP_NAMESPACE::MemoryType, VK_MAX_MEMORY_TYPES> const & memoryTypes_ = {},
- uint32_t memoryHeapCount_ = {},
- std::array<VULKAN_HPP_NAMESPACE::MemoryHeap, VK_MAX_MEMORY_HEAPS> const & memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14
+ PhysicalDeviceMemoryProperties( uint32_t memoryTypeCount_ = {},
+ std::array<VULKAN_HPP_NAMESPACE::MemoryType, VK_MAX_MEMORY_TYPES> const & memoryTypes_ = {},
+ uint32_t memoryHeapCount_ = {},
+ std::array<VULKAN_HPP_NAMESPACE::MemoryHeap, VK_MAX_MEMORY_HEAPS> const & memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT
: memoryTypeCount( memoryTypeCount_ )
, memoryTypes( memoryTypes_ )
, memoryHeapCount( memoryHeapCount_ )
, memoryHeaps( memoryHeaps_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMemoryProperties( *reinterpret_cast<PhysicalDeviceMemoryProperties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceMemoryProperties &
- operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMemoryProperties & operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -57529,17 +60209,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPhysicalDeviceMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties *>( this );
}
- explicit operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -57559,11 +60239,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( memoryTypeCount == rhs.memoryTypeCount ) && ( memoryTypes == rhs.memoryTypes ) &&
- ( memoryHeapCount == rhs.memoryHeapCount ) && ( memoryHeaps == rhs.memoryHeaps );
+ return ( memoryTypeCount == rhs.memoryTypeCount ) && ( memoryTypes == rhs.memoryTypes ) && ( memoryHeapCount == rhs.memoryHeapCount ) &&
+ ( memoryHeaps == rhs.memoryHeaps );
# endif
}
@@ -57579,14 +60259,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t memoryHeapCount = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryHeap, VK_MAX_MEMORY_HEAPS> memoryHeaps = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties ) ==
- sizeof( VkPhysicalDeviceMemoryProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties>::value,
- "PhysicalDeviceMemoryProperties is not nothrow_move_constructible!" );
struct PhysicalDeviceMemoryProperties2
{
@@ -57596,21 +60268,22 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryProperties2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2(
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : memoryProperties( memoryProperties_ )
- {}
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , memoryProperties( memoryProperties_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMemoryProperties2( *reinterpret_cast<PhysicalDeviceMemoryProperties2 const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceMemoryProperties2 &
- operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceMemoryProperties2 & operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMemoryProperties2 & operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -57618,23 +60291,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPhysicalDeviceMemoryProperties2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMemoryProperties2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties2 *>( this );
}
- explicit operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -57647,7 +60318,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryProperties == rhs.memoryProperties );
@@ -57661,18 +60332,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 ) ==
- sizeof( VkPhysicalDeviceMemoryProperties2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>::value,
- "PhysicalDeviceMemoryProperties2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryProperties2>
@@ -57681,35 +60344,177 @@ namespace VULKAN_HPP_NAMESPACE
};
using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2;
+ struct PhysicalDeviceMeshShaderFeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceMeshShaderFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , taskShader( taskShader_ )
+ , meshShader( meshShader_ )
+ , multiviewMeshShader( multiviewMeshShader_ )
+ , primitiveFragmentShadingRateMeshShader( primitiveFragmentShadingRateMeshShader_ )
+ , meshShaderQueries( meshShaderQueries_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesEXT( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceMeshShaderFeaturesEXT( VkPhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceMeshShaderFeaturesEXT( *reinterpret_cast<PhysicalDeviceMeshShaderFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceMeshShaderFeaturesEXT & operator=( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceMeshShaderFeaturesEXT & operator=( VkPhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT
+ {
+ taskShader = taskShader_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT
+ {
+ meshShader = meshShader_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT &
+ setMultiviewMeshShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader_ ) VULKAN_HPP_NOEXCEPT
+ {
+ multiviewMeshShader = multiviewMeshShader_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT &
+ setPrimitiveFragmentShadingRateMeshShader( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader_ ) VULKAN_HPP_NOEXCEPT
+ {
+ primitiveFragmentShadingRateMeshShader = primitiveFragmentShadingRateMeshShader_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMeshShaderQueries( VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries_ ) VULKAN_HPP_NOEXCEPT
+ {
+ meshShaderQueries = meshShaderQueries_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceMeshShaderFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceMeshShaderFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, taskShader, meshShader, multiviewMeshShader, primitiveFragmentShadingRateMeshShader, meshShaderQueries );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceMeshShaderFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( taskShader == rhs.taskShader ) && ( meshShader == rhs.meshShader ) &&
+ ( multiviewMeshShader == rhs.multiviewMeshShader ) && ( primitiveFragmentShadingRateMeshShader == rhs.primitiveFragmentShadingRateMeshShader ) &&
+ ( meshShaderQueries == rhs.meshShaderQueries );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 taskShader = {};
+ VULKAN_HPP_NAMESPACE::Bool32 meshShader = {};
+ VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader = {};
+ VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader = {};
+ VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderFeaturesEXT>
+ {
+ using Type = PhysicalDeviceMeshShaderFeaturesEXT;
+ };
+
struct PhysicalDeviceMeshShaderFeaturesNV
{
using NativeType = VkPhysicalDeviceMeshShaderFeaturesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceMeshShaderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {} ) VULKAN_HPP_NOEXCEPT
- : taskShader( taskShader_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , taskShader( taskShader_ )
, meshShader( meshShader_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMeshShaderFeaturesNV( *reinterpret_cast<PhysicalDeviceMeshShaderFeaturesNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceMeshShaderFeaturesNV &
- operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceMeshShaderFeaturesNV & operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceMeshShaderFeaturesNV &
- operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMeshShaderFeaturesNV & operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const *>( &rhs );
return *this;
@@ -57722,39 +60527,34 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV &
- setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT
{
taskShader = taskShader_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV &
- setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT
{
meshShader = meshShader_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceMeshShaderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMeshShaderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesNV *>( this );
}
- explicit operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -57767,11 +60567,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( taskShader == rhs.taskShader ) &&
- ( meshShader == rhs.meshShader );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( taskShader == rhs.taskShader ) && ( meshShader == rhs.meshShader );
# endif
}
@@ -57787,14 +60586,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 taskShader = {};
VULKAN_HPP_NAMESPACE::Bool32 meshShader = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV ) ==
- sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV>::value,
- "PhysicalDeviceMeshShaderFeaturesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderFeaturesNV>
@@ -57802,30 +60593,268 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceMeshShaderFeaturesNV;
};
+ struct PhysicalDeviceMeshShaderPropertiesEXT
+ {
+ using NativeType = VkPhysicalDeviceMeshShaderPropertiesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesEXT( uint32_t maxTaskWorkGroupTotalCount_ = {},
+ std::array<uint32_t, 3> const & maxTaskWorkGroupCount_ = {},
+ uint32_t maxTaskWorkGroupInvocations_ = {},
+ std::array<uint32_t, 3> const & maxTaskWorkGroupSize_ = {},
+ uint32_t maxTaskPayloadSize_ = {},
+ uint32_t maxTaskSharedMemorySize_ = {},
+ uint32_t maxTaskPayloadAndSharedMemorySize_ = {},
+ uint32_t maxMeshWorkGroupTotalCount_ = {},
+ std::array<uint32_t, 3> const & maxMeshWorkGroupCount_ = {},
+ uint32_t maxMeshWorkGroupInvocations_ = {},
+ std::array<uint32_t, 3> const & maxMeshWorkGroupSize_ = {},
+ uint32_t maxMeshSharedMemorySize_ = {},
+ uint32_t maxMeshPayloadAndSharedMemorySize_ = {},
+ uint32_t maxMeshOutputMemorySize_ = {},
+ uint32_t maxMeshPayloadAndOutputMemorySize_ = {},
+ uint32_t maxMeshOutputComponents_ = {},
+ uint32_t maxMeshOutputVertices_ = {},
+ uint32_t maxMeshOutputPrimitives_ = {},
+ uint32_t maxMeshOutputLayers_ = {},
+ uint32_t maxMeshMultiviewViewCount_ = {},
+ uint32_t meshOutputPerVertexGranularity_ = {},
+ uint32_t meshOutputPerPrimitiveGranularity_ = {},
+ uint32_t maxPreferredTaskWorkGroupInvocations_ = {},
+ uint32_t maxPreferredMeshWorkGroupInvocations_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationVertexOutput_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationPrimitiveOutput_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 prefersCompactVertexOutput_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 prefersCompactPrimitiveOutput_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxTaskWorkGroupTotalCount( maxTaskWorkGroupTotalCount_ )
+ , maxTaskWorkGroupCount( maxTaskWorkGroupCount_ )
+ , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ )
+ , maxTaskWorkGroupSize( maxTaskWorkGroupSize_ )
+ , maxTaskPayloadSize( maxTaskPayloadSize_ )
+ , maxTaskSharedMemorySize( maxTaskSharedMemorySize_ )
+ , maxTaskPayloadAndSharedMemorySize( maxTaskPayloadAndSharedMemorySize_ )
+ , maxMeshWorkGroupTotalCount( maxMeshWorkGroupTotalCount_ )
+ , maxMeshWorkGroupCount( maxMeshWorkGroupCount_ )
+ , maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ )
+ , maxMeshWorkGroupSize( maxMeshWorkGroupSize_ )
+ , maxMeshSharedMemorySize( maxMeshSharedMemorySize_ )
+ , maxMeshPayloadAndSharedMemorySize( maxMeshPayloadAndSharedMemorySize_ )
+ , maxMeshOutputMemorySize( maxMeshOutputMemorySize_ )
+ , maxMeshPayloadAndOutputMemorySize( maxMeshPayloadAndOutputMemorySize_ )
+ , maxMeshOutputComponents( maxMeshOutputComponents_ )
+ , maxMeshOutputVertices( maxMeshOutputVertices_ )
+ , maxMeshOutputPrimitives( maxMeshOutputPrimitives_ )
+ , maxMeshOutputLayers( maxMeshOutputLayers_ )
+ , maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ )
+ , meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ )
+ , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ )
+ , maxPreferredTaskWorkGroupInvocations( maxPreferredTaskWorkGroupInvocations_ )
+ , maxPreferredMeshWorkGroupInvocations( maxPreferredMeshWorkGroupInvocations_ )
+ , prefersLocalInvocationVertexOutput( prefersLocalInvocationVertexOutput_ )
+ , prefersLocalInvocationPrimitiveOutput( prefersLocalInvocationPrimitiveOutput_ )
+ , prefersCompactVertexOutput( prefersCompactVertexOutput_ )
+ , prefersCompactPrimitiveOutput( prefersCompactPrimitiveOutput_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesEXT( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceMeshShaderPropertiesEXT( VkPhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceMeshShaderPropertiesEXT( *reinterpret_cast<PhysicalDeviceMeshShaderPropertiesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceMeshShaderPropertiesEXT & operator=( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceMeshShaderPropertiesEXT & operator=( VkPhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT const *>( &rhs );
+ return *this;
+ }
+
+ operator VkPhysicalDeviceMeshShaderPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceMeshShaderPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ uint32_t const &,
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &,
+ uint32_t const &,
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &,
+ uint32_t const &,
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType,
+ pNext,
+ maxTaskWorkGroupTotalCount,
+ maxTaskWorkGroupCount,
+ maxTaskWorkGroupInvocations,
+ maxTaskWorkGroupSize,
+ maxTaskPayloadSize,
+ maxTaskSharedMemorySize,
+ maxTaskPayloadAndSharedMemorySize,
+ maxMeshWorkGroupTotalCount,
+ maxMeshWorkGroupCount,
+ maxMeshWorkGroupInvocations,
+ maxMeshWorkGroupSize,
+ maxMeshSharedMemorySize,
+ maxMeshPayloadAndSharedMemorySize,
+ maxMeshOutputMemorySize,
+ maxMeshPayloadAndOutputMemorySize,
+ maxMeshOutputComponents,
+ maxMeshOutputVertices,
+ maxMeshOutputPrimitives,
+ maxMeshOutputLayers,
+ maxMeshMultiviewViewCount,
+ meshOutputPerVertexGranularity,
+ meshOutputPerPrimitiveGranularity,
+ maxPreferredTaskWorkGroupInvocations,
+ maxPreferredMeshWorkGroupInvocations,
+ prefersLocalInvocationVertexOutput,
+ prefersLocalInvocationPrimitiveOutput,
+ prefersCompactVertexOutput,
+ prefersCompactPrimitiveOutput );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceMeshShaderPropertiesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTaskWorkGroupTotalCount == rhs.maxTaskWorkGroupTotalCount ) &&
+ ( maxTaskWorkGroupCount == rhs.maxTaskWorkGroupCount ) && ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) &&
+ ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) && ( maxTaskPayloadSize == rhs.maxTaskPayloadSize ) &&
+ ( maxTaskSharedMemorySize == rhs.maxTaskSharedMemorySize ) && ( maxTaskPayloadAndSharedMemorySize == rhs.maxTaskPayloadAndSharedMemorySize ) &&
+ ( maxMeshWorkGroupTotalCount == rhs.maxMeshWorkGroupTotalCount ) && ( maxMeshWorkGroupCount == rhs.maxMeshWorkGroupCount ) &&
+ ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) &&
+ ( maxMeshSharedMemorySize == rhs.maxMeshSharedMemorySize ) && ( maxMeshPayloadAndSharedMemorySize == rhs.maxMeshPayloadAndSharedMemorySize ) &&
+ ( maxMeshOutputMemorySize == rhs.maxMeshOutputMemorySize ) && ( maxMeshPayloadAndOutputMemorySize == rhs.maxMeshPayloadAndOutputMemorySize ) &&
+ ( maxMeshOutputComponents == rhs.maxMeshOutputComponents ) && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) &&
+ ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) && ( maxMeshOutputLayers == rhs.maxMeshOutputLayers ) &&
+ ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) && ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) &&
+ ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ) &&
+ ( maxPreferredTaskWorkGroupInvocations == rhs.maxPreferredTaskWorkGroupInvocations ) &&
+ ( maxPreferredMeshWorkGroupInvocations == rhs.maxPreferredMeshWorkGroupInvocations ) &&
+ ( prefersLocalInvocationVertexOutput == rhs.prefersLocalInvocationVertexOutput ) &&
+ ( prefersLocalInvocationPrimitiveOutput == rhs.prefersLocalInvocationPrimitiveOutput ) &&
+ ( prefersCompactVertexOutput == rhs.prefersCompactVertexOutput ) && ( prefersCompactPrimitiveOutput == rhs.prefersCompactPrimitiveOutput );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesEXT;
+ void * pNext = {};
+ uint32_t maxTaskWorkGroupTotalCount = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxTaskWorkGroupCount = {};
+ uint32_t maxTaskWorkGroupInvocations = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxTaskWorkGroupSize = {};
+ uint32_t maxTaskPayloadSize = {};
+ uint32_t maxTaskSharedMemorySize = {};
+ uint32_t maxTaskPayloadAndSharedMemorySize = {};
+ uint32_t maxMeshWorkGroupTotalCount = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxMeshWorkGroupCount = {};
+ uint32_t maxMeshWorkGroupInvocations = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxMeshWorkGroupSize = {};
+ uint32_t maxMeshSharedMemorySize = {};
+ uint32_t maxMeshPayloadAndSharedMemorySize = {};
+ uint32_t maxMeshOutputMemorySize = {};
+ uint32_t maxMeshPayloadAndOutputMemorySize = {};
+ uint32_t maxMeshOutputComponents = {};
+ uint32_t maxMeshOutputVertices = {};
+ uint32_t maxMeshOutputPrimitives = {};
+ uint32_t maxMeshOutputLayers = {};
+ uint32_t maxMeshMultiviewViewCount = {};
+ uint32_t meshOutputPerVertexGranularity = {};
+ uint32_t meshOutputPerPrimitiveGranularity = {};
+ uint32_t maxPreferredTaskWorkGroupInvocations = {};
+ uint32_t maxPreferredMeshWorkGroupInvocations = {};
+ VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationVertexOutput = {};
+ VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationPrimitiveOutput = {};
+ VULKAN_HPP_NAMESPACE::Bool32 prefersCompactVertexOutput = {};
+ VULKAN_HPP_NAMESPACE::Bool32 prefersCompactPrimitiveOutput = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderPropertiesEXT>
+ {
+ using Type = PhysicalDeviceMeshShaderPropertiesEXT;
+ };
+
struct PhysicalDeviceMeshShaderPropertiesNV
{
using NativeType = VkPhysicalDeviceMeshShaderPropertiesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14
- PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = {},
- uint32_t maxTaskWorkGroupInvocations_ = {},
- std::array<uint32_t, 3> const & maxTaskWorkGroupSize_ = {},
- uint32_t maxTaskTotalMemorySize_ = {},
- uint32_t maxTaskOutputCount_ = {},
- uint32_t maxMeshWorkGroupInvocations_ = {},
- std::array<uint32_t, 3> const & maxMeshWorkGroupSize_ = {},
- uint32_t maxMeshTotalMemorySize_ = {},
- uint32_t maxMeshOutputVertices_ = {},
- uint32_t maxMeshOutputPrimitives_ = {},
- uint32_t maxMeshMultiviewViewCount_ = {},
- uint32_t meshOutputPerVertexGranularity_ = {},
- uint32_t meshOutputPerPrimitiveGranularity_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxDrawMeshTasksCount( maxDrawMeshTasksCount_ )
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = {},
+ uint32_t maxTaskWorkGroupInvocations_ = {},
+ std::array<uint32_t, 3> const & maxTaskWorkGroupSize_ = {},
+ uint32_t maxTaskTotalMemorySize_ = {},
+ uint32_t maxTaskOutputCount_ = {},
+ uint32_t maxMeshWorkGroupInvocations_ = {},
+ std::array<uint32_t, 3> const & maxMeshWorkGroupSize_ = {},
+ uint32_t maxMeshTotalMemorySize_ = {},
+ uint32_t maxMeshOutputVertices_ = {},
+ uint32_t maxMeshOutputPrimitives_ = {},
+ uint32_t maxMeshMultiviewViewCount_ = {},
+ uint32_t meshOutputPerVertexGranularity_ = {},
+ uint32_t meshOutputPerPrimitiveGranularity_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxDrawMeshTasksCount( maxDrawMeshTasksCount_ )
, maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ )
, maxTaskWorkGroupSize( maxTaskWorkGroupSize_ )
, maxTaskTotalMemorySize( maxTaskTotalMemorySize_ )
@@ -57838,37 +60867,36 @@ namespace VULKAN_HPP_NAMESPACE
, maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ )
, meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ )
, meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMeshShaderPropertiesNV( *reinterpret_cast<PhysicalDeviceMeshShaderPropertiesNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceMeshShaderPropertiesNV &
- operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceMeshShaderPropertiesNV & operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceMeshShaderPropertiesNV &
- operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMeshShaderPropertiesNV & operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceMeshShaderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMeshShaderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesNV *>( this );
}
- explicit operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -57913,21 +60941,15 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount ) &&
- ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) &&
- ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) &&
- ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize ) &&
- ( maxTaskOutputCount == rhs.maxTaskOutputCount ) &&
- ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) &&
- ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) &&
- ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize ) &&
- ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) &&
- ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) &&
- ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount ) &&
+ ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) && ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) &&
+ ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize ) && ( maxTaskOutputCount == rhs.maxTaskOutputCount ) &&
+ ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) &&
+ ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize ) && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) &&
+ ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) && ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) &&
( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) &&
( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity );
# endif
@@ -57940,8 +60962,8 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
+ void * pNext = {};
uint32_t maxDrawMeshTasksCount = {};
uint32_t maxTaskWorkGroupInvocations = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxTaskWorkGroupSize = {};
@@ -57956,14 +60978,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t meshOutputPerVertexGranularity = {};
uint32_t meshOutputPerPrimitiveGranularity = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV ) ==
- sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV>::value,
- "PhysicalDeviceMeshShaderPropertiesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderPropertiesNV>
@@ -57976,28 +60990,26 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceMultiDrawFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceMultiDrawFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceMultiDrawFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ = {} ) VULKAN_HPP_NOEXCEPT
- : multiDraw( multiDraw_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , multiDraw( multiDraw_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT( PhysicalDeviceMultiDrawFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMultiDrawFeaturesEXT( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMultiDrawFeaturesEXT( *reinterpret_cast<PhysicalDeviceMultiDrawFeaturesEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceMultiDrawFeaturesEXT &
- operator=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceMultiDrawFeaturesEXT & operator=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceMultiDrawFeaturesEXT &
- operator=( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMultiDrawFeaturesEXT & operator=( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT const *>( &rhs );
return *this;
@@ -58010,25 +61022,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT &
- setMultiDraw( VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & setMultiDraw( VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ ) VULKAN_HPP_NOEXCEPT
{
multiDraw = multiDraw_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceMultiDrawFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMultiDrawFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMultiDrawFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceMultiDrawFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMultiDrawFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMultiDrawFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -58045,7 +61056,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiDraw == rhs.multiDraw );
@@ -58063,14 +61074,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 multiDraw = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceMultiDrawFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT>::value,
- "PhysicalDeviceMultiDrawFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMultiDrawFeaturesEXT>
@@ -58083,43 +61086,42 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceMultiDrawPropertiesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceMultiDrawPropertiesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( uint32_t maxMultiDrawCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxMultiDrawCount( maxMultiDrawCount_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( uint32_t maxMultiDrawCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxMultiDrawCount( maxMultiDrawCount_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( PhysicalDeviceMultiDrawPropertiesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMultiDrawPropertiesEXT( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMultiDrawPropertiesEXT( *reinterpret_cast<PhysicalDeviceMultiDrawPropertiesEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceMultiDrawPropertiesEXT &
- operator=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceMultiDrawPropertiesEXT & operator=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceMultiDrawPropertiesEXT &
- operator=( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMultiDrawPropertiesEXT & operator=( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceMultiDrawPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMultiDrawPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMultiDrawPropertiesEXT *>( this );
}
- explicit operator VkPhysicalDeviceMultiDrawPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMultiDrawPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMultiDrawPropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -58136,7 +61138,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxMultiDrawCount == rhs.maxMultiDrawCount );
@@ -58154,14 +61156,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
uint32_t maxMultiDrawCount = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT ) ==
- sizeof( VkPhysicalDeviceMultiDrawPropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT>::value,
- "PhysicalDeviceMultiDrawPropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMultiDrawPropertiesEXT>
@@ -58169,6 +61163,108 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceMultiDrawPropertiesEXT;
};
+ struct PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , multisampledRenderToSingleSampled( multisampledRenderToSingleSampled_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs )
+ VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT(
+ *reinterpret_cast<PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT &
+ operator=( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT &
+ operator=( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT &
+ setMultisampledRenderToSingleSampled( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled_ ) VULKAN_HPP_NOEXCEPT
+ {
+ multisampledRenderToSingleSampled = multisampledRenderToSingleSampled_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, multisampledRenderToSingleSampled );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multisampledRenderToSingleSampled == rhs.multisampledRenderToSingleSampled );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT>
+ {
+ using Type = PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
+ };
+
struct PhysicalDeviceMultiviewFeatures
{
using NativeType = VkPhysicalDeviceMultiviewFeatures;
@@ -58177,25 +61273,26 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures(
- VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {} ) VULKAN_HPP_NOEXCEPT
- : multiview( multiview_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , multiview( multiview_ )
, multiviewGeometryShader( multiviewGeometryShader_ )
, multiviewTessellationShader( multiviewTessellationShader_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMultiviewFeatures( *reinterpret_cast<PhysicalDeviceMultiviewFeatures const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceMultiviewFeatures &
- operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceMultiviewFeatures & operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMultiviewFeatures & operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -58210,39 +61307,38 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures &
- setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
{
multiview = multiview_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures &
- setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
+ setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
{
multiviewGeometryShader = multiviewGeometryShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures &
- setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
+ setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
{
multiviewTessellationShader = multiviewTessellationShader_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceMultiviewFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMultiviewFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures *>( this );
}
- explicit operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMultiviewFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -58263,11 +61359,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiview == rhs.multiview ) &&
- ( multiviewGeometryShader == rhs.multiviewGeometryShader ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiview == rhs.multiview ) && ( multiviewGeometryShader == rhs.multiviewGeometryShader ) &&
( multiviewTessellationShader == rhs.multiviewTessellationShader );
# endif
}
@@ -58285,14 +61380,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {};
VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures ) ==
- sizeof( VkPhysicalDeviceMultiviewFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures>::value,
- "PhysicalDeviceMultiviewFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewFeatures>
@@ -58306,47 +61393,45 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(
- VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {} ) VULKAN_HPP_NOEXCEPT
- : perViewPositionAllComponents( perViewPositionAllComponents_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , perViewPositionAllComponents( perViewPositionAllComponents_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(
- PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(
- VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(
- *reinterpret_cast<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>( &rhs ) )
- {}
+ PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( *reinterpret_cast<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &
operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &
- operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX *>( this );
}
- explicit operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -58363,11 +61448,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( perViewPositionAllComponents == rhs.perViewPositionAllComponents );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perViewPositionAllComponents == rhs.perViewPositionAllComponents );
# endif
}
@@ -58378,20 +61462,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) ==
- sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<
- VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>::value,
- "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>
@@ -58404,27 +61478,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceMultiviewProperties;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceMultiviewProperties;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = {},
- uint32_t maxMultiviewInstanceIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxMultiviewViewCount( maxMultiviewViewCount_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = {},
+ uint32_t maxMultiviewInstanceIndex_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxMultiviewViewCount( maxMultiviewViewCount_ )
, maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceMultiviewProperties( *reinterpret_cast<PhysicalDeviceMultiviewProperties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceMultiviewProperties &
- operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceMultiviewProperties & operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceMultiviewProperties & operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -58432,17 +61506,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPhysicalDeviceMultiviewProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMultiviewProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceMultiviewProperties *>( this );
}
- explicit operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceMultiviewProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -58459,11 +61533,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) &&
( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex );
# endif
}
@@ -58480,14 +61553,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t maxMultiviewViewCount = {};
uint32_t maxMultiviewInstanceIndex = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties ) ==
- sizeof( VkPhysicalDeviceMultiviewProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties>::value,
- "PhysicalDeviceMultiviewProperties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewProperties>
@@ -58496,67 +61561,64 @@ namespace VULKAN_HPP_NAMESPACE
};
using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties;
- struct PhysicalDeviceMutableDescriptorTypeFeaturesVALVE
+ struct PhysicalDeviceMutableDescriptorTypeFeaturesEXT
{
- using NativeType = VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE;
+ using NativeType = VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesVALVE(
- VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ = {} ) VULKAN_HPP_NOEXCEPT
- : mutableDescriptorType( mutableDescriptorType_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , mutableDescriptorType( mutableDescriptorType_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesVALVE(
- PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceMutableDescriptorTypeFeaturesEXT( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceMutableDescriptorTypeFeaturesVALVE( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceMutableDescriptorTypeFeaturesVALVE(
- *reinterpret_cast<PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const *>( &rhs ) )
- {}
+ PhysicalDeviceMutableDescriptorTypeFeaturesEXT( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceMutableDescriptorTypeFeaturesEXT( *reinterpret_cast<PhysicalDeviceMutableDescriptorTypeFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceMutableDescriptorTypeFeaturesVALVE &
- operator=( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceMutableDescriptorTypeFeaturesEXT & operator=( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceMutableDescriptorTypeFeaturesVALVE &
- operator=( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMutableDescriptorTypeFeaturesEXT & operator=( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesVALVE &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesVALVE &
- setMutableDescriptorType( VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesEXT &
+ setMutableDescriptorType( VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ ) VULKAN_HPP_NOEXCEPT
{
mutableDescriptorType = mutableDescriptorType_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE *>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE *>( this );
+ return *reinterpret_cast<VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -58569,42 +61631,578 @@ namespace VULKAN_HPP_NAMESPACE
#endif
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & ) const = default;
+ auto operator<=>( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & ) const = default;
#else
- bool operator==( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mutableDescriptorType == rhs.mutableDescriptorType );
# endif
}
- bool operator!=( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesVALVE ) ==
- sizeof( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesVALVE>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesVALVE>::value,
- "PhysicalDeviceMutableDescriptorTypeFeaturesVALVE is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT>
+ {
+ using Type = PhysicalDeviceMutableDescriptorTypeFeaturesEXT;
+ };
+ using PhysicalDeviceMutableDescriptorTypeFeaturesVALVE = PhysicalDeviceMutableDescriptorTypeFeaturesEXT;
+
+ struct PhysicalDeviceNonSeamlessCubeMapFeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , nonSeamlessCubeMap( nonSeamlessCubeMap_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( *reinterpret_cast<PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & operator=( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & operator=( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNonSeamlessCubeMapFeaturesEXT &
+ setNonSeamlessCubeMap( VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap_ ) VULKAN_HPP_NOEXCEPT
+ {
+ nonSeamlessCubeMap = nonSeamlessCubeMap_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, nonSeamlessCubeMap );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( nonSeamlessCubeMap == rhs.nonSeamlessCubeMap );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT>
+ {
+ using Type = PhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
+ };
+
+ struct PhysicalDeviceOpacityMicromapFeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceOpacityMicromapFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 micromap_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , micromap( micromap_ )
+ , micromapCaptureReplay( micromapCaptureReplay_ )
+ , micromapHostCommands( micromapHostCommands_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapFeaturesEXT( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceOpacityMicromapFeaturesEXT( VkPhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceOpacityMicromapFeaturesEXT( *reinterpret_cast<PhysicalDeviceOpacityMicromapFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceOpacityMicromapFeaturesEXT & operator=( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceOpacityMicromapFeaturesEXT & operator=( VkPhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setMicromap( VULKAN_HPP_NAMESPACE::Bool32 micromap_ ) VULKAN_HPP_NOEXCEPT
+ {
+ micromap = micromap_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT &
+ setMicromapCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+ {
+ micromapCaptureReplay = micromapCaptureReplay_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT &
+ setMicromapHostCommands( VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands_ ) VULKAN_HPP_NOEXCEPT
+ {
+ micromapHostCommands = micromapHostCommands_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceOpacityMicromapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceOpacityMicromapFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceOpacityMicromapFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceOpacityMicromapFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, micromap, micromapCaptureReplay, micromapHostCommands );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceOpacityMicromapFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( micromap == rhs.micromap ) && ( micromapCaptureReplay == rhs.micromapCaptureReplay ) &&
+ ( micromapHostCommands == rhs.micromapHostCommands );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 micromap = {};
+ VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay = {};
+ VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT>
+ {
+ using Type = PhysicalDeviceOpacityMicromapFeaturesEXT;
+ };
+
+ struct PhysicalDeviceOpacityMicromapPropertiesEXT
+ {
+ using NativeType = VkPhysicalDeviceOpacityMicromapPropertiesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapPropertiesEXT( uint32_t maxOpacity2StateSubdivisionLevel_ = {},
+ uint32_t maxOpacity4StateSubdivisionLevel_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxOpacity2StateSubdivisionLevel( maxOpacity2StateSubdivisionLevel_ )
+ , maxOpacity4StateSubdivisionLevel( maxOpacity4StateSubdivisionLevel_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapPropertiesEXT( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceOpacityMicromapPropertiesEXT( VkPhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceOpacityMicromapPropertiesEXT( *reinterpret_cast<PhysicalDeviceOpacityMicromapPropertiesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceOpacityMicromapPropertiesEXT & operator=( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceOpacityMicromapPropertiesEXT & operator=( VkPhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT const *>( &rhs );
+ return *this;
+ }
+
+ operator VkPhysicalDeviceOpacityMicromapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceOpacityMicromapPropertiesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceOpacityMicromapPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceOpacityMicromapPropertiesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, maxOpacity2StateSubdivisionLevel, maxOpacity4StateSubdivisionLevel );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceOpacityMicromapPropertiesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxOpacity2StateSubdivisionLevel == rhs.maxOpacity2StateSubdivisionLevel ) &&
+ ( maxOpacity4StateSubdivisionLevel == rhs.maxOpacity4StateSubdivisionLevel );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT;
+ void * pNext = {};
+ uint32_t maxOpacity2StateSubdivisionLevel = {};
+ uint32_t maxOpacity4StateSubdivisionLevel = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT>
+ {
+ using Type = PhysicalDeviceOpacityMicromapPropertiesEXT;
+ };
+
+ struct PhysicalDeviceOpticalFlowFeaturesNV
+ {
+ using NativeType = VkPhysicalDeviceOpticalFlowFeaturesNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpticalFlowFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 opticalFlow_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , opticalFlow( opticalFlow_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowFeaturesNV( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceOpticalFlowFeaturesNV( VkPhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceOpticalFlowFeaturesNV( *reinterpret_cast<PhysicalDeviceOpticalFlowFeaturesNV const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceOpticalFlowFeaturesNV & operator=( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceOpticalFlowFeaturesNV & operator=( VkPhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpticalFlowFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpticalFlowFeaturesNV & setOpticalFlow( VULKAN_HPP_NAMESPACE::Bool32 opticalFlow_ ) VULKAN_HPP_NOEXCEPT
+ {
+ opticalFlow = opticalFlow_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceOpticalFlowFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceOpticalFlowFeaturesNV *>( this );
+ }
+
+ operator VkPhysicalDeviceOpticalFlowFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceOpticalFlowFeaturesNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, opticalFlow );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceOpticalFlowFeaturesNV const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opticalFlow == rhs.opticalFlow );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpticalFlowFeaturesNV;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 opticalFlow = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceOpticalFlowFeaturesNV>
+ {
+ using Type = PhysicalDeviceOpticalFlowFeaturesNV;
+ };
+
+ struct PhysicalDeviceOpticalFlowPropertiesNV
+ {
+ using NativeType = VkPhysicalDeviceOpticalFlowPropertiesNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpticalFlowPropertiesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowPropertiesNV( VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedOutputGridSizes_ = {},
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedHintGridSizes_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 hintSupported_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 costSupported_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 bidirectionalFlowSupported_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 globalFlowSupported_ = {},
+ uint32_t minWidth_ = {},
+ uint32_t minHeight_ = {},
+ uint32_t maxWidth_ = {},
+ uint32_t maxHeight_ = {},
+ uint32_t maxNumRegionsOfInterest_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , supportedOutputGridSizes( supportedOutputGridSizes_ )
+ , supportedHintGridSizes( supportedHintGridSizes_ )
+ , hintSupported( hintSupported_ )
+ , costSupported( costSupported_ )
+ , bidirectionalFlowSupported( bidirectionalFlowSupported_ )
+ , globalFlowSupported( globalFlowSupported_ )
+ , minWidth( minWidth_ )
+ , minHeight( minHeight_ )
+ , maxWidth( maxWidth_ )
+ , maxHeight( maxHeight_ )
+ , maxNumRegionsOfInterest( maxNumRegionsOfInterest_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowPropertiesNV( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceOpticalFlowPropertiesNV( VkPhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceOpticalFlowPropertiesNV( *reinterpret_cast<PhysicalDeviceOpticalFlowPropertiesNV const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceOpticalFlowPropertiesNV & operator=( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceOpticalFlowPropertiesNV & operator=( VkPhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV const *>( &rhs );
+ return *this;
+ }
+
+ operator VkPhysicalDeviceOpticalFlowPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceOpticalFlowPropertiesNV *>( this );
+ }
+
+ operator VkPhysicalDeviceOpticalFlowPropertiesNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceOpticalFlowPropertiesNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV const &,
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType,
+ pNext,
+ supportedOutputGridSizes,
+ supportedHintGridSizes,
+ hintSupported,
+ costSupported,
+ bidirectionalFlowSupported,
+ globalFlowSupported,
+ minWidth,
+ minHeight,
+ maxWidth,
+ maxHeight,
+ maxNumRegionsOfInterest );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceOpticalFlowPropertiesNV const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedOutputGridSizes == rhs.supportedOutputGridSizes ) &&
+ ( supportedHintGridSizes == rhs.supportedHintGridSizes ) && ( hintSupported == rhs.hintSupported ) && ( costSupported == rhs.costSupported ) &&
+ ( bidirectionalFlowSupported == rhs.bidirectionalFlowSupported ) && ( globalFlowSupported == rhs.globalFlowSupported ) &&
+ ( minWidth == rhs.minWidth ) && ( minHeight == rhs.minHeight ) && ( maxWidth == rhs.maxWidth ) && ( maxHeight == rhs.maxHeight ) &&
+ ( maxNumRegionsOfInterest == rhs.maxNumRegionsOfInterest );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpticalFlowPropertiesNV;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedOutputGridSizes = {};
+ VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedHintGridSizes = {};
+ VULKAN_HPP_NAMESPACE::Bool32 hintSupported = {};
+ VULKAN_HPP_NAMESPACE::Bool32 costSupported = {};
+ VULKAN_HPP_NAMESPACE::Bool32 bidirectionalFlowSupported = {};
+ VULKAN_HPP_NAMESPACE::Bool32 globalFlowSupported = {};
+ uint32_t minWidth = {};
+ uint32_t minHeight = {};
+ uint32_t maxWidth = {};
+ uint32_t maxHeight = {};
+ uint32_t maxNumRegionsOfInterest = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceOpticalFlowPropertiesNV>
{
- using Type = PhysicalDeviceMutableDescriptorTypeFeaturesVALVE;
+ using Type = PhysicalDeviceOpticalFlowPropertiesNV;
};
struct PhysicalDevicePCIBusInfoPropertiesEXT
@@ -58612,59 +62210,50 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDevicePCIBusInfoPropertiesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( uint32_t pciDomain_ = {},
- uint32_t pciBus_ = {},
- uint32_t pciDevice_ = {},
- uint32_t pciFunction_ = {} ) VULKAN_HPP_NOEXCEPT
- : pciDomain( pciDomain_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT(
+ uint32_t pciDomain_ = {}, uint32_t pciBus_ = {}, uint32_t pciDevice_ = {}, uint32_t pciFunction_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pciDomain( pciDomain_ )
, pciBus( pciBus_ )
, pciDevice( pciDevice_ )
, pciFunction( pciFunction_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDevicePCIBusInfoPropertiesEXT(
- *reinterpret_cast<PhysicalDevicePCIBusInfoPropertiesEXT const *>( &rhs ) )
- {}
+ : PhysicalDevicePCIBusInfoPropertiesEXT( *reinterpret_cast<PhysicalDevicePCIBusInfoPropertiesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDevicePCIBusInfoPropertiesEXT &
- operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDevicePCIBusInfoPropertiesEXT & operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePCIBusInfoPropertiesEXT &
- operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePCIBusInfoPropertiesEXT & operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDevicePCIBusInfoPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePCIBusInfoPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePCIBusInfoPropertiesEXT *>( this );
}
- explicit operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- uint32_t const &,
- uint32_t const &,
- uint32_t const &,
- uint32_t const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -58677,11 +62266,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pciDomain == rhs.pciDomain ) &&
- ( pciBus == rhs.pciBus ) && ( pciDevice == rhs.pciDevice ) && ( pciFunction == rhs.pciFunction );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pciDomain == rhs.pciDomain ) && ( pciBus == rhs.pciBus ) && ( pciDevice == rhs.pciDevice ) &&
+ ( pciFunction == rhs.pciFunction );
# endif
}
@@ -58699,14 +62288,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t pciDevice = {};
uint32_t pciFunction = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT ) ==
- sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT>::value,
- "PhysicalDevicePCIBusInfoPropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePciBusInfoPropertiesEXT>
@@ -58719,63 +62300,60 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ = {} ) VULKAN_HPP_NOEXCEPT
- : pageableDeviceLocalMemory( pageableDeviceLocalMemory_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pageableDeviceLocalMemory( pageableDeviceLocalMemory_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT(
- PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT(
- VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT(
- *reinterpret_cast<PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( *reinterpret_cast<PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &
operator=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &
- operator=( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & operator=( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &
- setPageableDeviceLocalMemory( VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ ) VULKAN_HPP_NOEXCEPT
+ setPageableDeviceLocalMemory( VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ ) VULKAN_HPP_NOEXCEPT
{
pageableDeviceLocalMemory = pageableDeviceLocalMemory_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -58792,11 +62370,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( pageableDeviceLocalMemory == rhs.pageableDeviceLocalMemory );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pageableDeviceLocalMemory == rhs.pageableDeviceLocalMemory );
# endif
}
@@ -58807,19 +62384,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT ) ==
- sizeof( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT>::value,
- "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT>
@@ -58832,32 +62400,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDevicePerformanceQueryFeaturesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR(
- VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {} ) VULKAN_HPP_NOEXCEPT
- : performanceCounterQueryPools( performanceCounterQueryPools_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , performanceCounterQueryPools( performanceCounterQueryPools_ )
, performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR(
- PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDevicePerformanceQueryFeaturesKHR(
- *reinterpret_cast<PhysicalDevicePerformanceQueryFeaturesKHR const *>( &rhs ) )
- {}
+ PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDevicePerformanceQueryFeaturesKHR( *reinterpret_cast<PhysicalDevicePerformanceQueryFeaturesKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDevicePerformanceQueryFeaturesKHR &
- operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDevicePerformanceQueryFeaturesKHR & operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePerformanceQueryFeaturesKHR &
- operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePerformanceQueryFeaturesKHR & operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const *>( &rhs );
return *this;
@@ -58871,38 +62436,35 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR &
- setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT
+ setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT
{
performanceCounterQueryPools = performanceCounterQueryPools_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterMultipleQueryPools(
- VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR &
+ setPerformanceCounterMultipleQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT
{
performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDevicePerformanceQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePerformanceQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryFeaturesKHR *>( this );
}
- explicit operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePerformanceQueryFeaturesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -58915,11 +62477,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( performanceCounterQueryPools == rhs.performanceCounterQueryPools ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( performanceCounterQueryPools == rhs.performanceCounterQueryPools ) &&
( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools );
# endif
}
@@ -58931,20 +62492,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {};
VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR ) ==
- sizeof( VkPhysicalDevicePerformanceQueryFeaturesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR>::value,
- "PhysicalDevicePerformanceQueryFeaturesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR>
@@ -58957,46 +62509,43 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDevicePerformanceQueryPropertiesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR(
- VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {} ) VULKAN_HPP_NOEXCEPT
- : allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR(
- PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDevicePerformanceQueryPropertiesKHR(
- *reinterpret_cast<PhysicalDevicePerformanceQueryPropertiesKHR const *>( &rhs ) )
- {}
+ PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDevicePerformanceQueryPropertiesKHR( *reinterpret_cast<PhysicalDevicePerformanceQueryPropertiesKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDevicePerformanceQueryPropertiesKHR &
- operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDevicePerformanceQueryPropertiesKHR & operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePerformanceQueryPropertiesKHR &
- operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePerformanceQueryPropertiesKHR & operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDevicePerformanceQueryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePerformanceQueryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryPropertiesKHR *>( this );
}
- explicit operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePerformanceQueryPropertiesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -59013,11 +62562,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies );
# endif
}
@@ -59028,19 +62576,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR ) ==
- sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR>::value,
- "PhysicalDevicePerformanceQueryPropertiesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR>
@@ -59053,63 +62592,60 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDevicePipelineCreationCacheControlFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures(
- VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {} ) VULKAN_HPP_NOEXCEPT
- : pipelineCreationCacheControl( pipelineCreationCacheControl_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pipelineCreationCacheControl( pipelineCreationCacheControl_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures(
- PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDevicePipelineCreationCacheControlFeatures( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePipelineCreationCacheControlFeatures(
- VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDevicePipelineCreationCacheControlFeatures(
- *reinterpret_cast<PhysicalDevicePipelineCreationCacheControlFeatures const *>( &rhs ) )
- {}
+ PhysicalDevicePipelineCreationCacheControlFeatures( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDevicePipelineCreationCacheControlFeatures( *reinterpret_cast<PhysicalDevicePipelineCreationCacheControlFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDevicePipelineCreationCacheControlFeatures &
operator=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePipelineCreationCacheControlFeatures &
- operator=( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePipelineCreationCacheControlFeatures & operator=( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures &
- setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT
+ setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT
{
pipelineCreationCacheControl = pipelineCreationCacheControl_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDevicePipelineCreationCacheControlFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePipelineCreationCacheControlFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePipelineCreationCacheControlFeatures *>( this );
}
- explicit operator VkPhysicalDevicePipelineCreationCacheControlFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePipelineCreationCacheControlFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePipelineCreationCacheControlFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -59126,11 +62662,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl );
# endif
}
@@ -59141,19 +62676,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures ) ==
- sizeof( VkPhysicalDevicePipelineCreationCacheControlFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures>::value,
- "PhysicalDevicePipelineCreationCacheControlFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures>
@@ -59167,63 +62693,60 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR(
- VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {} ) VULKAN_HPP_NOEXCEPT
- : pipelineExecutableInfo( pipelineExecutableInfo_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pipelineExecutableInfo( pipelineExecutableInfo_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR(
- PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePipelineExecutablePropertiesFeaturesKHR(
- VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDevicePipelineExecutablePropertiesFeaturesKHR(
- *reinterpret_cast<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>( &rhs ) )
- {}
+ PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( *reinterpret_cast<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDevicePipelineExecutablePropertiesFeaturesKHR &
operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePipelineExecutablePropertiesFeaturesKHR &
- operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR &
- setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT
+ setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT
{
pipelineExecutableInfo = pipelineExecutableInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *>( this );
}
- explicit operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -59240,11 +62763,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( pipelineExecutableInfo == rhs.pipelineExecutableInfo );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineExecutableInfo == rhs.pipelineExecutableInfo );
# endif
}
@@ -59255,20 +62777,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) ==
- sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<
- VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>::value,
- "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR>
@@ -59276,57 +62788,456 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
};
+ struct PhysicalDevicePipelinePropertiesFeaturesEXT
+ {
+ using NativeType = VkPhysicalDevicePipelinePropertiesFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePipelinePropertiesFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pipelinePropertiesIdentifier( pipelinePropertiesIdentifier_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePipelinePropertiesFeaturesEXT( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevicePipelinePropertiesFeaturesEXT( VkPhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDevicePipelinePropertiesFeaturesEXT( *reinterpret_cast<PhysicalDevicePipelinePropertiesFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDevicePipelinePropertiesFeaturesEXT & operator=( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevicePipelinePropertiesFeaturesEXT & operator=( VkPhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelinePropertiesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelinePropertiesFeaturesEXT &
+ setPipelinePropertiesIdentifier( VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pipelinePropertiesIdentifier = pipelinePropertiesIdentifier_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDevicePipelinePropertiesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDevicePipelinePropertiesFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDevicePipelinePropertiesFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDevicePipelinePropertiesFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, pipelinePropertiesIdentifier );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDevicePipelinePropertiesFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelinePropertiesIdentifier == rhs.pipelinePropertiesIdentifier );
+# endif
+ }
+
+ bool operator!=( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT>
+ {
+ using Type = PhysicalDevicePipelinePropertiesFeaturesEXT;
+ };
+
+ struct PhysicalDevicePipelineProtectedAccessFeaturesEXT
+ {
+ using NativeType = VkPhysicalDevicePipelineProtectedAccessFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineProtectedAccessFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pipelineProtectedAccess( pipelineProtectedAccess_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDevicePipelineProtectedAccessFeaturesEXT( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevicePipelineProtectedAccessFeaturesEXT( VkPhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDevicePipelineProtectedAccessFeaturesEXT( *reinterpret_cast<PhysicalDevicePipelineProtectedAccessFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDevicePipelineProtectedAccessFeaturesEXT & operator=( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevicePipelineProtectedAccessFeaturesEXT & operator=( VkPhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeaturesEXT &
+ setPipelineProtectedAccess( VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pipelineProtectedAccess = pipelineProtectedAccess_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDevicePipelineProtectedAccessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDevicePipelineProtectedAccessFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDevicePipelineProtectedAccessFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDevicePipelineProtectedAccessFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, pipelineProtectedAccess );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineProtectedAccess == rhs.pipelineProtectedAccess );
+# endif
+ }
+
+ bool operator!=( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT>
+ {
+ using Type = PhysicalDevicePipelineProtectedAccessFeaturesEXT;
+ };
+
+ struct PhysicalDevicePipelineRobustnessFeaturesEXT
+ {
+ using NativeType = VkPhysicalDevicePipelineRobustnessFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineRobustnessFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pipelineRobustness( pipelineRobustness_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeaturesEXT( PhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevicePipelineRobustnessFeaturesEXT( VkPhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDevicePipelineRobustnessFeaturesEXT( *reinterpret_cast<PhysicalDevicePipelineRobustnessFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDevicePipelineRobustnessFeaturesEXT & operator=( PhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevicePipelineRobustnessFeaturesEXT & operator=( VkPhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeaturesEXT &
+ setPipelineRobustness( VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pipelineRobustness = pipelineRobustness_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDevicePipelineRobustnessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDevicePipelineRobustnessFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDevicePipelineRobustnessFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDevicePipelineRobustnessFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, pipelineRobustness );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDevicePipelineRobustnessFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineRobustness == rhs.pipelineRobustness );
+# endif
+ }
+
+ bool operator!=( PhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineRobustnessFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevicePipelineRobustnessFeaturesEXT>
+ {
+ using Type = PhysicalDevicePipelineRobustnessFeaturesEXT;
+ };
+
+ struct PhysicalDevicePipelineRobustnessPropertiesEXT
+ {
+ using NativeType = VkPhysicalDevicePipelineRobustnessPropertiesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineRobustnessPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDevicePipelineRobustnessPropertiesEXT( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessStorageBuffers_ =
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault,
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessUniformBuffers_ =
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault,
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessVertexInputs_ =
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault,
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT defaultRobustnessImages_ =
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault,
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , defaultRobustnessStorageBuffers( defaultRobustnessStorageBuffers_ )
+ , defaultRobustnessUniformBuffers( defaultRobustnessUniformBuffers_ )
+ , defaultRobustnessVertexInputs( defaultRobustnessVertexInputs_ )
+ , defaultRobustnessImages( defaultRobustnessImages_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDevicePipelineRobustnessPropertiesEXT( PhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevicePipelineRobustnessPropertiesEXT( VkPhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDevicePipelineRobustnessPropertiesEXT( *reinterpret_cast<PhysicalDevicePipelineRobustnessPropertiesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDevicePipelineRobustnessPropertiesEXT & operator=( PhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevicePipelineRobustnessPropertiesEXT & operator=( VkPhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT const *>( &rhs );
+ return *this;
+ }
+
+ operator VkPhysicalDevicePipelineRobustnessPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDevicePipelineRobustnessPropertiesEXT *>( this );
+ }
+
+ operator VkPhysicalDevicePipelineRobustnessPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDevicePipelineRobustnessPropertiesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT const &,
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT const &,
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT const &,
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, defaultRobustnessStorageBuffers, defaultRobustnessUniformBuffers, defaultRobustnessVertexInputs, defaultRobustnessImages );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDevicePipelineRobustnessPropertiesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( defaultRobustnessStorageBuffers == rhs.defaultRobustnessStorageBuffers ) &&
+ ( defaultRobustnessUniformBuffers == rhs.defaultRobustnessUniformBuffers ) &&
+ ( defaultRobustnessVertexInputs == rhs.defaultRobustnessVertexInputs ) && ( defaultRobustnessImages == rhs.defaultRobustnessImages );
+# endif
+ }
+
+ bool operator!=( PhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineRobustnessPropertiesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessStorageBuffers =
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault;
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessUniformBuffers =
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault;
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessVertexInputs =
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault;
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT defaultRobustnessImages = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault;
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevicePipelineRobustnessPropertiesEXT>
+ {
+ using Type = PhysicalDevicePipelineRobustnessPropertiesEXT;
+ };
+
struct PhysicalDevicePointClippingProperties
{
using NativeType = VkPhysicalDevicePointClippingProperties;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDevicePointClippingProperties;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePointClippingProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties(
- VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ =
- VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes ) VULKAN_HPP_NOEXCEPT
- : pointClippingBehavior( pointClippingBehavior_ )
- {}
+ VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes,
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pointClippingBehavior( pointClippingBehavior_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDevicePointClippingProperties(
- *reinterpret_cast<PhysicalDevicePointClippingProperties const *>( &rhs ) )
- {}
+ : PhysicalDevicePointClippingProperties( *reinterpret_cast<PhysicalDevicePointClippingProperties const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDevicePointClippingProperties &
- operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDevicePointClippingProperties & operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePointClippingProperties &
- operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePointClippingProperties & operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDevicePointClippingProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePointClippingProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePointClippingProperties *>( this );
}
- explicit operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePointClippingProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::PointClippingBehavior const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PointClippingBehavior const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -59339,7 +63250,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pointClippingBehavior == rhs.pointClippingBehavior );
@@ -59353,19 +63264,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior =
- VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties ) ==
- sizeof( VkPhysicalDevicePointClippingProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties>::value,
- "PhysicalDevicePointClippingProperties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePointClippingProperties>
@@ -59380,27 +63282,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDevicePortabilitySubsetFeaturesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR(
- VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 events_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ = {} ) VULKAN_HPP_NOEXCEPT
- : constantAlphaColorBlendFactors( constantAlphaColorBlendFactors_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 events_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , constantAlphaColorBlendFactors( constantAlphaColorBlendFactors_ )
, events( events_ )
, imageViewFormatReinterpretation( imageViewFormatReinterpretation_ )
, imageViewFormatSwizzle( imageViewFormatSwizzle_ )
@@ -59415,23 +63317,20 @@ namespace VULKAN_HPP_NAMESPACE
, tessellationPointMode( tessellationPointMode_ )
, triangleFans( triangleFans_ )
, vertexAttributeAccessBeyondStride( vertexAttributeAccessBeyondStride_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR(
- PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePortabilitySubsetFeaturesKHR( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDevicePortabilitySubsetFeaturesKHR(
- *reinterpret_cast<PhysicalDevicePortabilitySubsetFeaturesKHR const *>( &rhs ) )
- {}
+ PhysicalDevicePortabilitySubsetFeaturesKHR( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDevicePortabilitySubsetFeaturesKHR( *reinterpret_cast<PhysicalDevicePortabilitySubsetFeaturesKHR const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDevicePortabilitySubsetFeaturesKHR &
- operator=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePortabilitySubsetFeaturesKHR &
- operator=( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR const *>( &rhs );
return *this;
@@ -59444,57 +63343,55 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setConstantAlphaColorBlendFactors(
- VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &
+ setConstantAlphaColorBlendFactors( VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ ) VULKAN_HPP_NOEXCEPT
{
constantAlphaColorBlendFactors = constantAlphaColorBlendFactors_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &
- setEvents( VULKAN_HPP_NAMESPACE::Bool32 events_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setEvents( VULKAN_HPP_NAMESPACE::Bool32 events_ ) VULKAN_HPP_NOEXCEPT
{
events = events_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatReinterpretation(
- VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &
+ setImageViewFormatReinterpretation( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ ) VULKAN_HPP_NOEXCEPT
{
imageViewFormatReinterpretation = imageViewFormatReinterpretation_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &
- setImageViewFormatSwizzle( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ ) VULKAN_HPP_NOEXCEPT
+ setImageViewFormatSwizzle( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ ) VULKAN_HPP_NOEXCEPT
{
imageViewFormatSwizzle = imageViewFormatSwizzle_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &
- setImageView2DOn3DImage( VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ ) VULKAN_HPP_NOEXCEPT
+ setImageView2DOn3DImage( VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ ) VULKAN_HPP_NOEXCEPT
{
imageView2DOn3DImage = imageView2DOn3DImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &
- setMultisampleArrayImage( VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ ) VULKAN_HPP_NOEXCEPT
+ setMultisampleArrayImage( VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ ) VULKAN_HPP_NOEXCEPT
{
multisampleArrayImage = multisampleArrayImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &
- setMutableComparisonSamplers( VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ ) VULKAN_HPP_NOEXCEPT
+ setMutableComparisonSamplers( VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ ) VULKAN_HPP_NOEXCEPT
{
mutableComparisonSamplers = mutableComparisonSamplers_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &
- setPointPolygons( VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setPointPolygons( VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ ) VULKAN_HPP_NOEXCEPT
{
pointPolygons = pointPolygons_;
return *this;
@@ -59508,59 +63405,58 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &
- setSeparateStencilMaskRef( VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ ) VULKAN_HPP_NOEXCEPT
+ setSeparateStencilMaskRef( VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ ) VULKAN_HPP_NOEXCEPT
{
separateStencilMaskRef = separateStencilMaskRef_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setShaderSampleRateInterpolationFunctions(
- VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &
+ setShaderSampleRateInterpolationFunctions( VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ ) VULKAN_HPP_NOEXCEPT
{
shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &
- setTessellationIsolines( VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ ) VULKAN_HPP_NOEXCEPT
+ setTessellationIsolines( VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ ) VULKAN_HPP_NOEXCEPT
{
tessellationIsolines = tessellationIsolines_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &
- setTessellationPointMode( VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ ) VULKAN_HPP_NOEXCEPT
+ setTessellationPointMode( VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ ) VULKAN_HPP_NOEXCEPT
{
tessellationPointMode = tessellationPointMode_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &
- setTriangleFans( VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTriangleFans( VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ ) VULKAN_HPP_NOEXCEPT
{
triangleFans = triangleFans_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setVertexAttributeAccessBeyondStride(
- VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &
+ setVertexAttributeAccessBeyondStride( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ ) VULKAN_HPP_NOEXCEPT
{
vertexAttributeAccessBeyondStride = vertexAttributeAccessBeyondStride_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePortabilitySubsetFeaturesKHR *>( this );
}
- explicit operator VkPhysicalDevicePortabilitySubsetFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePortabilitySubsetFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePortabilitySubsetFeaturesKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -59609,20 +63505,16 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( constantAlphaColorBlendFactors == rhs.constantAlphaColorBlendFactors ) && ( events == rhs.events ) &&
- ( imageViewFormatReinterpretation == rhs.imageViewFormatReinterpretation ) &&
- ( imageViewFormatSwizzle == rhs.imageViewFormatSwizzle ) &&
- ( imageView2DOn3DImage == rhs.imageView2DOn3DImage ) &&
- ( multisampleArrayImage == rhs.multisampleArrayImage ) &&
- ( mutableComparisonSamplers == rhs.mutableComparisonSamplers ) && ( pointPolygons == rhs.pointPolygons ) &&
- ( samplerMipLodBias == rhs.samplerMipLodBias ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( constantAlphaColorBlendFactors == rhs.constantAlphaColorBlendFactors ) &&
+ ( events == rhs.events ) && ( imageViewFormatReinterpretation == rhs.imageViewFormatReinterpretation ) &&
+ ( imageViewFormatSwizzle == rhs.imageViewFormatSwizzle ) && ( imageView2DOn3DImage == rhs.imageView2DOn3DImage ) &&
+ ( multisampleArrayImage == rhs.multisampleArrayImage ) && ( mutableComparisonSamplers == rhs.mutableComparisonSamplers ) &&
+ ( pointPolygons == rhs.pointPolygons ) && ( samplerMipLodBias == rhs.samplerMipLodBias ) &&
( separateStencilMaskRef == rhs.separateStencilMaskRef ) &&
- ( shaderSampleRateInterpolationFunctions == rhs.shaderSampleRateInterpolationFunctions ) &&
- ( tessellationIsolines == rhs.tessellationIsolines ) &&
+ ( shaderSampleRateInterpolationFunctions == rhs.shaderSampleRateInterpolationFunctions ) && ( tessellationIsolines == rhs.tessellationIsolines ) &&
( tessellationPointMode == rhs.tessellationPointMode ) && ( triangleFans == rhs.triangleFans ) &&
( vertexAttributeAccessBeyondStride == rhs.vertexAttributeAccessBeyondStride );
# endif
@@ -59635,8 +63527,8 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors = {};
VULKAN_HPP_NAMESPACE::Bool32 events = {};
VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation = {};
@@ -59653,15 +63545,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 triangleFans = {};
VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR ) ==
- sizeof( VkPhysicalDevicePortabilitySubsetFeaturesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR>::value,
- "PhysicalDevicePortabilitySubsetFeaturesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR>
@@ -59676,30 +63559,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDevicePortabilitySubsetPropertiesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR(
- uint32_t minVertexInputBindingStrideAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
- : minVertexInputBindingStrideAlignment( minVertexInputBindingStrideAlignment_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( uint32_t minVertexInputBindingStrideAlignment_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , minVertexInputBindingStrideAlignment( minVertexInputBindingStrideAlignment_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR(
- PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDevicePortabilitySubsetPropertiesKHR(
- *reinterpret_cast<PhysicalDevicePortabilitySubsetPropertiesKHR const *>( &rhs ) )
- {}
+ PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDevicePortabilitySubsetPropertiesKHR( *reinterpret_cast<PhysicalDevicePortabilitySubsetPropertiesKHR const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDevicePortabilitySubsetPropertiesKHR &
- operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePortabilitySubsetPropertiesKHR &
- operator=( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR const *>( &rhs );
return *this;
@@ -59713,24 +63593,24 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR &
- setMinVertexInputBindingStrideAlignment( uint32_t minVertexInputBindingStrideAlignment_ ) VULKAN_HPP_NOEXCEPT
+ setMinVertexInputBindingStrideAlignment( uint32_t minVertexInputBindingStrideAlignment_ ) VULKAN_HPP_NOEXCEPT
{
minVertexInputBindingStrideAlignment = minVertexInputBindingStrideAlignment_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePortabilitySubsetPropertiesKHR *>( this );
}
- explicit operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePortabilitySubsetPropertiesKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -59747,11 +63627,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( minVertexInputBindingStrideAlignment == rhs.minVertexInputBindingStrideAlignment );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minVertexInputBindingStrideAlignment == rhs.minVertexInputBindingStrideAlignment );
# endif
}
@@ -59762,19 +63641,10 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR;
+ void * pNext = {};
uint32_t minVertexInputBindingStrideAlignment = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR ) ==
- sizeof( VkPhysicalDevicePortabilitySubsetPropertiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR>::value,
- "PhysicalDevicePortabilitySubsetPropertiesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR>
@@ -59783,33 +63653,128 @@ namespace VULKAN_HPP_NAMESPACE
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ struct PhysicalDevicePresentBarrierFeaturesNV
+ {
+ using NativeType = VkPhysicalDevicePresentBarrierFeaturesNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentBarrierFeaturesNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 presentBarrier_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , presentBarrier( presentBarrier_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevicePresentBarrierFeaturesNV( VkPhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDevicePresentBarrierFeaturesNV( *reinterpret_cast<PhysicalDevicePresentBarrierFeaturesNV const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDevicePresentBarrierFeaturesNV & operator=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevicePresentBarrierFeaturesNV & operator=( VkPhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV & setPresentBarrier( VULKAN_HPP_NAMESPACE::Bool32 presentBarrier_ ) VULKAN_HPP_NOEXCEPT
+ {
+ presentBarrier = presentBarrier_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDevicePresentBarrierFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDevicePresentBarrierFeaturesNV *>( this );
+ }
+
+ operator VkPhysicalDevicePresentBarrierFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDevicePresentBarrierFeaturesNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, presentBarrier );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDevicePresentBarrierFeaturesNV const & ) const = default;
+#else
+ bool operator==( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrier == rhs.presentBarrier );
+# endif
+ }
+
+ bool operator!=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentBarrierFeaturesNV;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 presentBarrier = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevicePresentBarrierFeaturesNV>
+ {
+ using Type = PhysicalDevicePresentBarrierFeaturesNV;
+ };
+
struct PhysicalDevicePresentIdFeaturesKHR
{
using NativeType = VkPhysicalDevicePresentIdFeaturesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDevicePresentIdFeaturesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentIdFeaturesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDevicePresentIdFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 presentId_ = {} ) VULKAN_HPP_NOEXCEPT
- : presentId( presentId_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 presentId_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , presentId( presentId_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( PhysicalDevicePresentIdFeaturesKHR const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePresentIdFeaturesKHR( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePresentIdFeaturesKHR( *reinterpret_cast<PhysicalDevicePresentIdFeaturesKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDevicePresentIdFeaturesKHR &
- operator=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDevicePresentIdFeaturesKHR & operator=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePresentIdFeaturesKHR &
- operator=( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePresentIdFeaturesKHR & operator=( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR const *>( &rhs );
return *this;
@@ -59822,25 +63787,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR &
- setPresentId( VULKAN_HPP_NAMESPACE::Bool32 presentId_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPresentId( VULKAN_HPP_NAMESPACE::Bool32 presentId_ ) VULKAN_HPP_NOEXCEPT
{
presentId = presentId_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDevicePresentIdFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePresentIdFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePresentIdFeaturesKHR *>( this );
}
- explicit operator VkPhysicalDevicePresentIdFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePresentIdFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePresentIdFeaturesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -59857,7 +63821,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentId == rhs.presentId );
@@ -59875,14 +63839,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 presentId = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR ) ==
- sizeof( VkPhysicalDevicePresentIdFeaturesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR>::value,
- "PhysicalDevicePresentIdFeaturesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePresentIdFeaturesKHR>
@@ -59895,28 +63851,26 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDevicePresentWaitFeaturesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDevicePresentWaitFeaturesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentWaitFeaturesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDevicePresentWaitFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 presentWait_ = {} ) VULKAN_HPP_NOEXCEPT
- : presentWait( presentWait_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 presentWait_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , presentWait( presentWait_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR( PhysicalDevicePresentWaitFeaturesKHR const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePresentWaitFeaturesKHR( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePresentWaitFeaturesKHR( *reinterpret_cast<PhysicalDevicePresentWaitFeaturesKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDevicePresentWaitFeaturesKHR &
- operator=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDevicePresentWaitFeaturesKHR & operator=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePresentWaitFeaturesKHR &
- operator=( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePresentWaitFeaturesKHR & operator=( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR const *>( &rhs );
return *this;
@@ -59929,25 +63883,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR &
- setPresentWait( VULKAN_HPP_NAMESPACE::Bool32 presentWait_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & setPresentWait( VULKAN_HPP_NAMESPACE::Bool32 presentWait_ ) VULKAN_HPP_NOEXCEPT
{
presentWait = presentWait_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDevicePresentWaitFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePresentWaitFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePresentWaitFeaturesKHR *>( this );
}
- explicit operator VkPhysicalDevicePresentWaitFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePresentWaitFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePresentWaitFeaturesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -59964,7 +63917,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentWait == rhs.presentWait );
@@ -59982,14 +63935,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 presentWait = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR ) ==
- sizeof( VkPhysicalDevicePresentWaitFeaturesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR>::value,
- "PhysicalDevicePresentWaitFeaturesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePresentWaitFeaturesKHR>
@@ -60002,80 +63947,73 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ = {} ) VULKAN_HPP_NOEXCEPT
- : primitiveTopologyListRestart( primitiveTopologyListRestart_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , primitiveTopologyListRestart( primitiveTopologyListRestart_ )
, primitiveTopologyPatchListRestart( primitiveTopologyPatchListRestart_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT(
- PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT(
- VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT(
- *reinterpret_cast<PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( *reinterpret_cast<PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &
operator=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &
- operator=( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & operator=( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &
- setPrimitiveTopologyListRestart( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ ) VULKAN_HPP_NOEXCEPT
+ setPrimitiveTopologyListRestart( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ ) VULKAN_HPP_NOEXCEPT
{
primitiveTopologyListRestart = primitiveTopologyListRestart_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &
- setPrimitiveTopologyPatchListRestart( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ )
- VULKAN_HPP_NOEXCEPT
+ setPrimitiveTopologyPatchListRestart( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ ) VULKAN_HPP_NOEXCEPT
{
primitiveTopologyPatchListRestart = primitiveTopologyPatchListRestart_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -60088,11 +64026,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( primitiveTopologyListRestart == rhs.primitiveTopologyListRestart ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitiveTopologyListRestart == rhs.primitiveTopologyListRestart ) &&
( primitiveTopologyPatchListRestart == rhs.primitiveTopologyPatchListRestart );
# endif
}
@@ -60104,21 +64041,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart = {};
VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT ) ==
- sizeof( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<
- VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT>::value,
- "PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT>
@@ -60126,30 +64053,155 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
};
+ struct PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT
+ {
+ using NativeType = VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , primitivesGeneratedQuery( primitivesGeneratedQuery_ )
+ , primitivesGeneratedQueryWithRasterizerDiscard( primitivesGeneratedQueryWithRasterizerDiscard_ )
+ , primitivesGeneratedQueryWithNonZeroStreams( primitivesGeneratedQueryWithNonZeroStreams_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( *reinterpret_cast<PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &
+ operator=( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & operator=( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &
+ setPrimitivesGeneratedQuery( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery_ ) VULKAN_HPP_NOEXCEPT
+ {
+ primitivesGeneratedQuery = primitivesGeneratedQuery_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &
+ setPrimitivesGeneratedQueryWithRasterizerDiscard( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ ) VULKAN_HPP_NOEXCEPT
+ {
+ primitivesGeneratedQueryWithRasterizerDiscard = primitivesGeneratedQueryWithRasterizerDiscard_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &
+ setPrimitivesGeneratedQueryWithNonZeroStreams( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams_ ) VULKAN_HPP_NOEXCEPT
+ {
+ primitivesGeneratedQueryWithNonZeroStreams = primitivesGeneratedQueryWithNonZeroStreams_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, primitivesGeneratedQuery, primitivesGeneratedQueryWithRasterizerDiscard, primitivesGeneratedQueryWithNonZeroStreams );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitivesGeneratedQuery == rhs.primitivesGeneratedQuery ) &&
+ ( primitivesGeneratedQueryWithRasterizerDiscard == rhs.primitivesGeneratedQueryWithRasterizerDiscard ) &&
+ ( primitivesGeneratedQueryWithNonZeroStreams == rhs.primitivesGeneratedQueryWithNonZeroStreams );
+# endif
+ }
+
+ bool operator!=( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery = {};
+ VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard = {};
+ VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT>
+ {
+ using Type = PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
+ };
+
struct PhysicalDevicePrivateDataFeatures
{
using NativeType = VkPhysicalDevicePrivateDataFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDevicePrivateDataFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrivateDataFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDevicePrivateDataFeatures( VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {} ) VULKAN_HPP_NOEXCEPT
- : privateData( privateData_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , privateData( privateData_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PhysicalDevicePrivateDataFeatures( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePrivateDataFeatures( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDevicePrivateDataFeatures( *reinterpret_cast<PhysicalDevicePrivateDataFeatures const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDevicePrivateDataFeatures &
- operator=( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDevicePrivateDataFeatures & operator=( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDevicePrivateDataFeatures & operator=( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -60164,25 +64216,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures &
- setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT
{
privateData = privateData_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDevicePrivateDataFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePrivateDataFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePrivateDataFeatures *>( this );
}
- explicit operator VkPhysicalDevicePrivateDataFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePrivateDataFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePrivateDataFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -60199,7 +64250,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateData == rhs.privateData );
@@ -60217,14 +64268,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 privateData = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures ) ==
- sizeof( VkPhysicalDevicePrivateDataFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures>::value,
- "PhysicalDevicePrivateDataFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePrivateDataFeatures>
@@ -60238,29 +64281,28 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceSparseProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties(
- VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT
: residencyStandard2DBlockShape( residencyStandard2DBlockShape_ )
, residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ )
, residencyStandard3DBlockShape( residencyStandard3DBlockShape_ )
, residencyAlignedMipSize( residencyAlignedMipSize_ )
, residencyNonResidentStrict( residencyNonResidentStrict_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSparseProperties( *reinterpret_cast<PhysicalDeviceSparseProperties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceSparseProperties &
- operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceSparseProperties & operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -60268,17 +64310,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPhysicalDeviceSparseProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSparseProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSparseProperties *>( this );
}
- explicit operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSparseProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -60303,13 +64345,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) &&
( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) &&
- ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) &&
- ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) &&
+ ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) &&
( residencyNonResidentStrict == rhs.residencyNonResidentStrict );
# endif
}
@@ -60327,30 +64368,21 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {};
VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties ) ==
- sizeof( VkPhysicalDeviceSparseProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties>::value,
- "PhysicalDeviceSparseProperties is not nothrow_move_constructible!" );
struct PhysicalDeviceProperties
{
using NativeType = VkPhysicalDeviceProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties(
- uint32_t apiVersion_ = {},
- uint32_t driverVersion_ = {},
- uint32_t vendorID_ = {},
- uint32_t deviceID_ = {},
- VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther,
- std::array<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const & deviceName_ = {},
- std::array<uint8_t, VK_UUID_SIZE> const & pipelineCacheUUID_ = {},
- VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {},
- VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( uint32_t apiVersion_ = {},
+ uint32_t driverVersion_ = {},
+ uint32_t vendorID_ = {},
+ uint32_t deviceID_ = {},
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther,
+ std::array<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const & deviceName_ = {},
+ std::array<uint8_t, VK_UUID_SIZE> const & pipelineCacheUUID_ = {},
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {},
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT
: apiVersion( apiVersion_ )
, driverVersion( driverVersion_ )
, vendorID( vendorID_ )
@@ -60360,14 +64392,15 @@ namespace VULKAN_HPP_NAMESPACE
, pipelineCacheUUID( pipelineCacheUUID_ )
, limits( limits_ )
, sparseProperties( sparseProperties_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceProperties( *reinterpret_cast<PhysicalDeviceProperties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -60378,17 +64411,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPhysicalDeviceProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceProperties *>( this );
}
- explicit operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -60404,15 +64437,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( apiVersion,
- driverVersion,
- vendorID,
- deviceID,
- deviceType,
- deviceName,
- pipelineCacheUUID,
- limits,
- sparseProperties );
+ return std::tie( apiVersion, driverVersion, vendorID, deviceID, deviceType, deviceName, pipelineCacheUUID, limits, sparseProperties );
}
#endif
@@ -60421,12 +64446,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( apiVersion == rhs.apiVersion ) && ( driverVersion == rhs.driverVersion ) &&
- ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && ( deviceType == rhs.deviceType ) &&
- ( deviceName == rhs.deviceName ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) &&
+ return ( apiVersion == rhs.apiVersion ) && ( driverVersion == rhs.driverVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) &&
+ ( deviceType == rhs.deviceType ) && ( deviceName == rhs.deviceName ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) &&
( limits == rhs.limits ) && ( sparseProperties == rhs.sparseProperties );
# endif
}
@@ -60438,23 +64462,16 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- uint32_t apiVersion = {};
- uint32_t driverVersion = {};
- uint32_t vendorID = {};
- uint32_t deviceID = {};
- VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther;
+ uint32_t apiVersion = {};
+ uint32_t driverVersion = {};
+ uint32_t vendorID = {};
+ uint32_t deviceID = {};
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther;
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> deviceName = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineCacheUUID = {};
VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {};
VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties ) ==
- sizeof( VkPhysicalDeviceProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties>::value,
- "PhysicalDeviceProperties is not nothrow_move_constructible!" );
struct PhysicalDeviceProperties2
{
@@ -60464,17 +64481,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14
- PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT
- : properties( properties_ )
- {}
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , properties( properties_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceProperties2( *reinterpret_cast<PhysicalDeviceProperties2 const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -60485,23 +64504,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPhysicalDeviceProperties2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceProperties2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceProperties2 *>( this );
}
- explicit operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceProperties2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -60514,7 +64531,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties );
@@ -60532,13 +64549,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 ) ==
- sizeof( VkPhysicalDeviceProperties2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>::value,
- "PhysicalDeviceProperties2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceProperties2>
@@ -60552,29 +64562,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceProtectedMemoryFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceProtectedMemoryFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {} ) VULKAN_HPP_NOEXCEPT
- : protectedMemory( protectedMemory_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , protectedMemory( protectedMemory_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceProtectedMemoryFeatures(
- *reinterpret_cast<PhysicalDeviceProtectedMemoryFeatures const *>( &rhs ) )
- {}
+ : PhysicalDeviceProtectedMemoryFeatures( *reinterpret_cast<PhysicalDeviceProtectedMemoryFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceProtectedMemoryFeatures &
- operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceProtectedMemoryFeatures & operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceProtectedMemoryFeatures &
- operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceProtectedMemoryFeatures & operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const *>( &rhs );
return *this;
@@ -60587,25 +64595,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures &
- setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
{
protectedMemory = protectedMemory_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceProtectedMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceProtectedMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryFeatures *>( this );
}
- explicit operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -60622,7 +64629,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedMemory == rhs.protectedMemory );
@@ -60640,14 +64647,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures ) ==
- sizeof( VkPhysicalDeviceProtectedMemoryFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures>::value,
- "PhysicalDeviceProtectedMemoryFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceProtectedMemoryFeatures>
@@ -60660,45 +64659,43 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceProtectedMemoryProperties;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceProtectedMemoryProperties;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {} ) VULKAN_HPP_NOEXCEPT
- : protectedNoFault( protectedNoFault_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , protectedNoFault( protectedNoFault_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceProtectedMemoryProperties(
- *reinterpret_cast<PhysicalDeviceProtectedMemoryProperties const *>( &rhs ) )
- {}
+ : PhysicalDeviceProtectedMemoryProperties( *reinterpret_cast<PhysicalDeviceProtectedMemoryProperties const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceProtectedMemoryProperties &
- operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceProtectedMemoryProperties & operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceProtectedMemoryProperties &
- operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceProtectedMemoryProperties & operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceProtectedMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceProtectedMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryProperties *>( this );
}
- explicit operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -60715,7 +64712,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedNoFault == rhs.protectedNoFault );
@@ -60733,15 +64730,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties ) ==
- sizeof( VkPhysicalDeviceProtectedMemoryProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties>::value,
- "PhysicalDeviceProtectedMemoryProperties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceProtectedMemoryProperties>
@@ -60754,32 +64742,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceProvokingVertexFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ = {} ) VULKAN_HPP_NOEXCEPT
- : provokingVertexLast( provokingVertexLast_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , provokingVertexLast( provokingVertexLast_ )
, transformFeedbackPreservesProvokingVertex( transformFeedbackPreservesProvokingVertex_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT(
- PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceProvokingVertexFeaturesEXT( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceProvokingVertexFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceProvokingVertexFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceProvokingVertexFeaturesEXT( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceProvokingVertexFeaturesEXT( *reinterpret_cast<PhysicalDeviceProvokingVertexFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceProvokingVertexFeaturesEXT &
- operator=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceProvokingVertexFeaturesEXT & operator=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceProvokingVertexFeaturesEXT &
- operator=( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceProvokingVertexFeaturesEXT & operator=( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT const *>( &rhs );
return *this;
@@ -60793,38 +64778,35 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT &
- setProvokingVertexLast( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ ) VULKAN_HPP_NOEXCEPT
+ setProvokingVertexLast( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ ) VULKAN_HPP_NOEXCEPT
{
provokingVertexLast = provokingVertexLast_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setTransformFeedbackPreservesProvokingVertex(
- VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT &
+ setTransformFeedbackPreservesProvokingVertex( VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ ) VULKAN_HPP_NOEXCEPT
{
transformFeedbackPreservesProvokingVertex = transformFeedbackPreservesProvokingVertex_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceProvokingVertexFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceProvokingVertexFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceProvokingVertexFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceProvokingVertexFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceProvokingVertexFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceProvokingVertexFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -60837,7 +64819,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexLast == rhs.provokingVertexLast ) &&
@@ -60852,20 +64834,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast = {};
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceProvokingVertexFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT>::value,
- "PhysicalDeviceProvokingVertexFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT>
@@ -60878,60 +64851,53 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceProvokingVertexPropertiesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex_ = {} ) VULKAN_HPP_NOEXCEPT
- : provokingVertexModePerPipeline( provokingVertexModePerPipeline_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , provokingVertexModePerPipeline( provokingVertexModePerPipeline_ )
, transformFeedbackPreservesTriangleFanProvokingVertex( transformFeedbackPreservesTriangleFanProvokingVertex_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT(
- PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceProvokingVertexPropertiesEXT( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceProvokingVertexPropertiesEXT(
- *reinterpret_cast<PhysicalDeviceProvokingVertexPropertiesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceProvokingVertexPropertiesEXT( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceProvokingVertexPropertiesEXT( *reinterpret_cast<PhysicalDeviceProvokingVertexPropertiesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceProvokingVertexPropertiesEXT &
- operator=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceProvokingVertexPropertiesEXT & operator=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceProvokingVertexPropertiesEXT &
- operator=( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceProvokingVertexPropertiesEXT & operator=( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceProvokingVertexPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceProvokingVertexPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceProvokingVertexPropertiesEXT *>( this );
}
- explicit operator VkPhysicalDeviceProvokingVertexPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceProvokingVertexPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceProvokingVertexPropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, provokingVertexModePerPipeline, transformFeedbackPreservesTriangleFanProvokingVertex );
+ return std::tie( sType, pNext, provokingVertexModePerPipeline, transformFeedbackPreservesTriangleFanProvokingVertex );
}
#endif
@@ -60940,13 +64906,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( provokingVertexModePerPipeline == rhs.provokingVertexModePerPipeline ) &&
- ( transformFeedbackPreservesTriangleFanProvokingVertex ==
- rhs.transformFeedbackPreservesTriangleFanProvokingVertex );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexModePerPipeline == rhs.provokingVertexModePerPipeline ) &&
+ ( transformFeedbackPreservesTriangleFanProvokingVertex == rhs.transformFeedbackPreservesTriangleFanProvokingVertex );
# endif
}
@@ -60957,20 +64921,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline = {};
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT ) ==
- sizeof( VkPhysicalDeviceProvokingVertexPropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT>::value,
- "PhysicalDeviceProvokingVertexPropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT>
@@ -60983,46 +64938,42 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDevicePushDescriptorPropertiesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDevicePushDescriptorPropertiesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxPushDescriptors( maxPushDescriptors_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxPushDescriptors( maxPushDescriptors_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR(
- PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDevicePushDescriptorPropertiesKHR(
- *reinterpret_cast<PhysicalDevicePushDescriptorPropertiesKHR const *>( &rhs ) )
- {}
+ PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDevicePushDescriptorPropertiesKHR( *reinterpret_cast<PhysicalDevicePushDescriptorPropertiesKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDevicePushDescriptorPropertiesKHR &
- operator=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDevicePushDescriptorPropertiesKHR & operator=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDevicePushDescriptorPropertiesKHR &
- operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePushDescriptorPropertiesKHR & operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDevicePushDescriptorPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePushDescriptorPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDevicePushDescriptorPropertiesKHR *>( this );
}
- explicit operator VkPhysicalDevicePushDescriptorPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevicePushDescriptorPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDevicePushDescriptorPropertiesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -61039,7 +64990,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPushDescriptors == rhs.maxPushDescriptors );
@@ -61057,15 +65008,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
uint32_t maxPushDescriptors = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR ) ==
- sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR>::value,
- "PhysicalDevicePushDescriptorPropertiesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDevicePushDescriptorPropertiesKHR>
@@ -61078,30 +65020,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ = {} ) VULKAN_HPP_NOEXCEPT
- : formatRgba10x6WithoutYCbCrSampler( formatRgba10x6WithoutYCbCrSampler_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , formatRgba10x6WithoutYCbCrSampler( formatRgba10x6WithoutYCbCrSampler_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT(
- PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceRGBA10X6FormatsFeaturesEXT( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceRGBA10X6FormatsFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceRGBA10X6FormatsFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceRGBA10X6FormatsFeaturesEXT( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceRGBA10X6FormatsFeaturesEXT( *reinterpret_cast<PhysicalDeviceRGBA10X6FormatsFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceRGBA10X6FormatsFeaturesEXT &
- operator=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceRGBA10X6FormatsFeaturesEXT & operator=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceRGBA10X6FormatsFeaturesEXT &
- operator=( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceRGBA10X6FormatsFeaturesEXT & operator=( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT const *>( &rhs );
return *this;
@@ -61114,25 +65053,25 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & setFormatRgba10x6WithoutYCbCrSampler(
- VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT &
+ setFormatRgba10x6WithoutYCbCrSampler( VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ ) VULKAN_HPP_NOEXCEPT
{
formatRgba10x6WithoutYCbCrSampler = formatRgba10x6WithoutYCbCrSampler_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -61149,11 +65088,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( formatRgba10x6WithoutYCbCrSampler == rhs.formatRgba10x6WithoutYCbCrSampler );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatRgba10x6WithoutYCbCrSampler == rhs.formatRgba10x6WithoutYCbCrSampler );
# endif
}
@@ -61164,19 +65102,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT>::value,
- "PhysicalDeviceRGBA10X6FormatsFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT>
@@ -61184,90 +65113,86 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceRGBA10X6FormatsFeaturesEXT;
};
- struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM
+ struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT
{
- using NativeType = VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM;
+ using NativeType = VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM(
- VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ = {} ) VULKAN_HPP_NOEXCEPT
- : rasterizationOrderColorAttachmentAccess( rasterizationOrderColorAttachmentAccess_ )
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , rasterizationOrderColorAttachmentAccess( rasterizationOrderColorAttachmentAccess_ )
, rasterizationOrderDepthAttachmentAccess( rasterizationOrderDepthAttachmentAccess_ )
, rasterizationOrderStencilAttachmentAccess( rasterizationOrderStencilAttachmentAccess_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM(
- PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs )
+ VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM(
- VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM(
- *reinterpret_cast<PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const *>( &rhs ) )
- {}
+ PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT(
+ *reinterpret_cast<PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM & operator =(
- PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT &
+ operator=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM &
- operator=( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT &
+ operator=( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const *>(
- &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM &
- setRasterizationOrderColorAttachmentAccess(
- VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT &
+ setRasterizationOrderColorAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT
{
rasterizationOrderColorAttachmentAccess = rasterizationOrderColorAttachmentAccess_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM &
- setRasterizationOrderDepthAttachmentAccess(
- VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT &
+ setRasterizationOrderDepthAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT
{
rasterizationOrderDepthAttachmentAccess = rasterizationOrderDepthAttachmentAccess_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM &
- setRasterizationOrderStencilAttachmentAccess(
- VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT &
+ setRasterizationOrderStencilAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT
{
rasterizationOrderStencilAttachmentAccess = rasterizationOrderStencilAttachmentAccess_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM *>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM *>( this );
+ return *reinterpret_cast<VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -61279,85 +65204,69 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- rasterizationOrderColorAttachmentAccess,
- rasterizationOrderDepthAttachmentAccess,
- rasterizationOrderStencilAttachmentAccess );
+ return std::tie(
+ sType, pNext, rasterizationOrderColorAttachmentAccess, rasterizationOrderDepthAttachmentAccess, rasterizationOrderStencilAttachmentAccess );
}
#endif
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & ) const = default;
+ auto operator<=>( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & ) const = default;
#else
- bool operator==( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( rasterizationOrderColorAttachmentAccess == rhs.rasterizationOrderColorAttachmentAccess ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rasterizationOrderColorAttachmentAccess == rhs.rasterizationOrderColorAttachmentAccess ) &&
( rasterizationOrderDepthAttachmentAccess == rhs.rasterizationOrderDepthAttachmentAccess ) &&
( rasterizationOrderStencilAttachmentAccess == rhs.rasterizationOrderStencilAttachmentAccess );
# endif
}
- bool operator!=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType =
- StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess = {};
- VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess = {};
- VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess = {};
+ VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess = {};
+ VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess = {};
};
- VULKAN_HPP_STATIC_ASSERT(
- sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM ) ==
- sizeof( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<
- VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM>::value,
- "PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT>
{
- using Type = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM;
+ using Type = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
};
+ using PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
struct PhysicalDeviceRayQueryFeaturesKHR
{
using NativeType = VkPhysicalDeviceRayQueryFeaturesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceRayQueryFeaturesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceRayQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {} ) VULKAN_HPP_NOEXCEPT
- : rayQuery( rayQuery_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , rayQuery( rayQuery_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceRayQueryFeaturesKHR( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRayQueryFeaturesKHR( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRayQueryFeaturesKHR( *reinterpret_cast<PhysicalDeviceRayQueryFeaturesKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceRayQueryFeaturesKHR &
- operator=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceRayQueryFeaturesKHR & operator=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRayQueryFeaturesKHR & operator=( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -61372,25 +65281,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR &
- setRayQuery( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & setRayQuery( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ ) VULKAN_HPP_NOEXCEPT
{
rayQuery = rayQuery_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceRayQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceRayQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRayQueryFeaturesKHR *>( this );
}
- explicit operator VkPhysicalDeviceRayQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceRayQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRayQueryFeaturesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -61407,7 +65315,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayQuery == rhs.rayQuery );
@@ -61425,14 +65333,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 rayQuery = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR ) ==
- sizeof( VkPhysicalDeviceRayQueryFeaturesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR>::value,
- "PhysicalDeviceRayQueryFeaturesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRayQueryFeaturesKHR>
@@ -61440,37 +65340,144 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceRayQueryFeaturesKHR;
};
+ struct PhysicalDeviceRayTracingMaintenance1FeaturesKHR
+ {
+ using NativeType = VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMaintenance1FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , rayTracingMaintenance1( rayTracingMaintenance1_ )
+ , rayTracingPipelineTraceRaysIndirect2( rayTracingPipelineTraceRaysIndirect2_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceRayTracingMaintenance1FeaturesKHR( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceRayTracingMaintenance1FeaturesKHR( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceRayTracingMaintenance1FeaturesKHR( *reinterpret_cast<PhysicalDeviceRayTracingMaintenance1FeaturesKHR const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceRayTracingMaintenance1FeaturesKHR & operator=( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceRayTracingMaintenance1FeaturesKHR & operator=( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR &
+ setRayTracingMaintenance1( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1_ ) VULKAN_HPP_NOEXCEPT
+ {
+ rayTracingMaintenance1 = rayTracingMaintenance1_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR &
+ setRayTracingPipelineTraceRaysIndirect2( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2_ ) VULKAN_HPP_NOEXCEPT
+ {
+ rayTracingPipelineTraceRaysIndirect2 = rayTracingPipelineTraceRaysIndirect2_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR *>( this );
+ }
+
+ operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, rayTracingMaintenance1, rayTracingPipelineTraceRaysIndirect2 );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingMaintenance1 == rhs.rayTracingMaintenance1 ) &&
+ ( rayTracingPipelineTraceRaysIndirect2 == rhs.rayTracingPipelineTraceRaysIndirect2 );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2 = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR>
+ {
+ using Type = PhysicalDeviceRayTracingMaintenance1FeaturesKHR;
+ };
+
struct PhysicalDeviceRayTracingMotionBlurFeaturesNV
{
using NativeType = VkPhysicalDeviceRayTracingMotionBlurFeaturesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV(
- VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ = {} ) VULKAN_HPP_NOEXCEPT
- : rayTracingMotionBlur( rayTracingMotionBlur_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , rayTracingMotionBlur( rayTracingMotionBlur_ )
, rayTracingMotionBlurPipelineTraceRaysIndirect( rayTracingMotionBlurPipelineTraceRaysIndirect_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV(
- PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceRayTracingMotionBlurFeaturesNV( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceRayTracingMotionBlurFeaturesNV(
- *reinterpret_cast<PhysicalDeviceRayTracingMotionBlurFeaturesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceRayTracingMotionBlurFeaturesNV( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceRayTracingMotionBlurFeaturesNV( *reinterpret_cast<PhysicalDeviceRayTracingMotionBlurFeaturesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceRayTracingMotionBlurFeaturesNV &
- operator=( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceRayTracingMotionBlurFeaturesNV & operator=( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceRayTracingMotionBlurFeaturesNV &
- operator=( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceRayTracingMotionBlurFeaturesNV & operator=( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV const *>( &rhs );
return *this;
@@ -61484,39 +65491,35 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV &
- setRayTracingMotionBlur( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ ) VULKAN_HPP_NOEXCEPT
+ setRayTracingMotionBlur( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ ) VULKAN_HPP_NOEXCEPT
{
rayTracingMotionBlur = rayTracingMotionBlur_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV &
- setRayTracingMotionBlurPipelineTraceRaysIndirect(
- VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT
+ setRayTracingMotionBlurPipelineTraceRaysIndirect( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT
{
rayTracingMotionBlurPipelineTraceRaysIndirect = rayTracingMotionBlurPipelineTraceRaysIndirect_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRayTracingMotionBlurFeaturesNV *>( this );
}
- explicit operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRayTracingMotionBlurFeaturesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -61529,7 +65532,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingMotionBlur == rhs.rayTracingMotionBlur ) &&
@@ -61544,20 +65547,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur = {};
VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV ) ==
- sizeof( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV>::value,
- "PhysicalDeviceRayTracingMotionBlurFeaturesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV>
@@ -61570,38 +65564,35 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceRayTracingPipelineFeaturesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR(
- VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ = {} ) VULKAN_HPP_NOEXCEPT
- : rayTracingPipeline( rayTracingPipeline_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , rayTracingPipeline( rayTracingPipeline_ )
, rayTracingPipelineShaderGroupHandleCaptureReplay( rayTracingPipelineShaderGroupHandleCaptureReplay_ )
, rayTracingPipelineShaderGroupHandleCaptureReplayMixed( rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ )
, rayTracingPipelineTraceRaysIndirect( rayTracingPipelineTraceRaysIndirect_ )
, rayTraversalPrimitiveCulling( rayTraversalPrimitiveCulling_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR(
- PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceRayTracingPipelineFeaturesKHR( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceRayTracingPipelineFeaturesKHR(
- *reinterpret_cast<PhysicalDeviceRayTracingPipelineFeaturesKHR const *>( &rhs ) )
- {}
+ PhysicalDeviceRayTracingPipelineFeaturesKHR( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceRayTracingPipelineFeaturesKHR( *reinterpret_cast<PhysicalDeviceRayTracingPipelineFeaturesKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceRayTracingPipelineFeaturesKHR &
- operator=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceRayTracingPipelineFeaturesKHR & operator=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceRayTracingPipelineFeaturesKHR &
- operator=( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceRayTracingPipelineFeaturesKHR & operator=( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR const *>( &rhs );
return *this;
@@ -61615,54 +65606,52 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR &
- setRayTracingPipeline( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ ) VULKAN_HPP_NOEXCEPT
+ setRayTracingPipeline( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ ) VULKAN_HPP_NOEXCEPT
{
rayTracingPipeline = rayTracingPipeline_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR &
- setRayTracingPipelineShaderGroupHandleCaptureReplay(
- VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+ setRayTracingPipelineShaderGroupHandleCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
{
rayTracingPipelineShaderGroupHandleCaptureReplay = rayTracingPipelineShaderGroupHandleCaptureReplay_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR &
- setRayTracingPipelineShaderGroupHandleCaptureReplayMixed(
- VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineShaderGroupHandleCaptureReplayMixed(
+ VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT
{
rayTracingPipelineShaderGroupHandleCaptureReplayMixed = rayTracingPipelineShaderGroupHandleCaptureReplayMixed_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineTraceRaysIndirect(
- VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR &
+ setRayTracingPipelineTraceRaysIndirect( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT
{
rayTracingPipelineTraceRaysIndirect = rayTracingPipelineTraceRaysIndirect_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR &
- setRayTraversalPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT
+ setRayTraversalPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT
{
rayTraversalPrimitiveCulling = rayTraversalPrimitiveCulling_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRayTracingPipelineFeaturesKHR *>( this );
}
- explicit operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRayTracingPipelineFeaturesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -61691,14 +65680,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingPipeline == rhs.rayTracingPipeline ) &&
- ( rayTracingPipelineShaderGroupHandleCaptureReplay ==
- rhs.rayTracingPipelineShaderGroupHandleCaptureReplay ) &&
- ( rayTracingPipelineShaderGroupHandleCaptureReplayMixed ==
- rhs.rayTracingPipelineShaderGroupHandleCaptureReplayMixed ) &&
+ ( rayTracingPipelineShaderGroupHandleCaptureReplay == rhs.rayTracingPipelineShaderGroupHandleCaptureReplay ) &&
+ ( rayTracingPipelineShaderGroupHandleCaptureReplayMixed == rhs.rayTracingPipelineShaderGroupHandleCaptureReplayMixed ) &&
( rayTracingPipelineTraceRaysIndirect == rhs.rayTracingPipelineTraceRaysIndirect ) &&
( rayTraversalPrimitiveCulling == rhs.rayTraversalPrimitiveCulling );
# endif
@@ -61711,23 +65698,14 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline = {};
VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay = {};
VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed = {};
VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect = {};
VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR ) ==
- sizeof( VkPhysicalDeviceRayTracingPipelineFeaturesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR>::value,
- "PhysicalDeviceRayTracingPipelineFeaturesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR>
@@ -61740,20 +65718,20 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceRayTracingPipelinePropertiesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceRayTracingPipelinePropertiesKHR( uint32_t shaderGroupHandleSize_ = {},
- uint32_t maxRayRecursionDepth_ = {},
- uint32_t maxShaderGroupStride_ = {},
- uint32_t shaderGroupBaseAlignment_ = {},
- uint32_t shaderGroupHandleCaptureReplaySize_ = {},
- uint32_t maxRayDispatchInvocationCount_ = {},
- uint32_t shaderGroupHandleAlignment_ = {},
- uint32_t maxRayHitAttributeSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderGroupHandleSize( shaderGroupHandleSize_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR( uint32_t shaderGroupHandleSize_ = {},
+ uint32_t maxRayRecursionDepth_ = {},
+ uint32_t maxShaderGroupStride_ = {},
+ uint32_t shaderGroupBaseAlignment_ = {},
+ uint32_t shaderGroupHandleCaptureReplaySize_ = {},
+ uint32_t maxRayDispatchInvocationCount_ = {},
+ uint32_t shaderGroupHandleAlignment_ = {},
+ uint32_t maxRayHitAttributeSize_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderGroupHandleSize( shaderGroupHandleSize_ )
, maxRayRecursionDepth( maxRayRecursionDepth_ )
, maxShaderGroupStride( maxShaderGroupStride_ )
, shaderGroupBaseAlignment( shaderGroupBaseAlignment_ )
@@ -61761,39 +65739,37 @@ namespace VULKAN_HPP_NAMESPACE
, maxRayDispatchInvocationCount( maxRayDispatchInvocationCount_ )
, shaderGroupHandleAlignment( shaderGroupHandleAlignment_ )
, maxRayHitAttributeSize( maxRayHitAttributeSize_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR(
- PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceRayTracingPipelinePropertiesKHR( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceRayTracingPipelinePropertiesKHR( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceRayTracingPipelinePropertiesKHR(
- *reinterpret_cast<PhysicalDeviceRayTracingPipelinePropertiesKHR const *>( &rhs ) )
- {}
+ PhysicalDeviceRayTracingPipelinePropertiesKHR( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceRayTracingPipelinePropertiesKHR( *reinterpret_cast<PhysicalDeviceRayTracingPipelinePropertiesKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceRayTracingPipelinePropertiesKHR &
- operator=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceRayTracingPipelinePropertiesKHR & operator=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceRayTracingPipelinePropertiesKHR &
- operator=( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceRayTracingPipelinePropertiesKHR & operator=( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRayTracingPipelinePropertiesKHR *>( this );
}
- explicit operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRayTracingPipelinePropertiesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -61828,17 +65804,13 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) &&
- ( maxRayRecursionDepth == rhs.maxRayRecursionDepth ) &&
- ( maxShaderGroupStride == rhs.maxShaderGroupStride ) &&
- ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) &&
- ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize ) &&
- ( maxRayDispatchInvocationCount == rhs.maxRayDispatchInvocationCount ) &&
- ( shaderGroupHandleAlignment == rhs.shaderGroupHandleAlignment ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) &&
+ ( maxRayRecursionDepth == rhs.maxRayRecursionDepth ) && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) &&
+ ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) && ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize ) &&
+ ( maxRayDispatchInvocationCount == rhs.maxRayDispatchInvocationCount ) && ( shaderGroupHandleAlignment == rhs.shaderGroupHandleAlignment ) &&
( maxRayHitAttributeSize == rhs.maxRayHitAttributeSize );
# endif
}
@@ -61850,8 +65822,8 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR;
+ void * pNext = {};
uint32_t shaderGroupHandleSize = {};
uint32_t maxRayRecursionDepth = {};
uint32_t maxShaderGroupStride = {};
@@ -61861,15 +65833,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t shaderGroupHandleAlignment = {};
uint32_t maxRayHitAttributeSize = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR ) ==
- sizeof( VkPhysicalDeviceRayTracingPipelinePropertiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR>::value,
- "PhysicalDeviceRayTracingPipelinePropertiesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR>
@@ -61882,20 +65845,20 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceRayTracingPropertiesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceRayTracingPropertiesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = {},
- uint32_t maxRecursionDepth_ = {},
- uint32_t maxShaderGroupStride_ = {},
- uint32_t shaderGroupBaseAlignment_ = {},
- uint64_t maxGeometryCount_ = {},
- uint64_t maxInstanceCount_ = {},
- uint64_t maxTriangleCount_ = {},
- uint32_t maxDescriptorSetAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderGroupHandleSize( shaderGroupHandleSize_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = {},
+ uint32_t maxRecursionDepth_ = {},
+ uint32_t maxShaderGroupStride_ = {},
+ uint32_t shaderGroupBaseAlignment_ = {},
+ uint64_t maxGeometryCount_ = {},
+ uint64_t maxInstanceCount_ = {},
+ uint64_t maxTriangleCount_ = {},
+ uint32_t maxDescriptorSetAccelerationStructures_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderGroupHandleSize( shaderGroupHandleSize_ )
, maxRecursionDepth( maxRecursionDepth_ )
, maxShaderGroupStride( maxShaderGroupStride_ )
, shaderGroupBaseAlignment( shaderGroupBaseAlignment_ )
@@ -61903,37 +65866,36 @@ namespace VULKAN_HPP_NAMESPACE
, maxInstanceCount( maxInstanceCount_ )
, maxTriangleCount( maxTriangleCount_ )
, maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( PhysicalDeviceRayTracingPropertiesNV const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRayTracingPropertiesNV( *reinterpret_cast<PhysicalDeviceRayTracingPropertiesNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceRayTracingPropertiesNV &
- operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceRayTracingPropertiesNV & operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceRayTracingPropertiesNV &
- operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceRayTracingPropertiesNV & operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceRayTracingPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceRayTracingPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRayTracingPropertiesNV *>( this );
}
- explicit operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -61968,15 +65930,13 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) && ( maxRecursionDepth == rhs.maxRecursionDepth ) &&
- ( maxShaderGroupStride == rhs.maxShaderGroupStride ) &&
- ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) &&
- ( maxGeometryCount == rhs.maxGeometryCount ) && ( maxInstanceCount == rhs.maxInstanceCount ) &&
- ( maxTriangleCount == rhs.maxTriangleCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) &&
+ ( maxRecursionDepth == rhs.maxRecursionDepth ) && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) &&
+ ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) && ( maxGeometryCount == rhs.maxGeometryCount ) &&
+ ( maxInstanceCount == rhs.maxInstanceCount ) && ( maxTriangleCount == rhs.maxTriangleCount ) &&
( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures );
# endif
}
@@ -61988,25 +65948,17 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
- void * pNext = {};
- uint32_t shaderGroupHandleSize = {};
- uint32_t maxRecursionDepth = {};
- uint32_t maxShaderGroupStride = {};
- uint32_t shaderGroupBaseAlignment = {};
- uint64_t maxGeometryCount = {};
- uint64_t maxInstanceCount = {};
- uint64_t maxTriangleCount = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
+ void * pNext = {};
+ uint32_t shaderGroupHandleSize = {};
+ uint32_t maxRecursionDepth = {};
+ uint32_t maxShaderGroupStride = {};
+ uint32_t shaderGroupBaseAlignment = {};
+ uint64_t maxGeometryCount = {};
+ uint64_t maxInstanceCount = {};
+ uint64_t maxTriangleCount = {};
uint32_t maxDescriptorSetAccelerationStructures = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV ) ==
- sizeof( VkPhysicalDeviceRayTracingPropertiesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV>::value,
- "PhysicalDeviceRayTracingPropertiesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPropertiesNV>
@@ -62019,63 +65971,60 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV(
- VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {} ) VULKAN_HPP_NOEXCEPT
- : representativeFragmentTest( representativeFragmentTest_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , representativeFragmentTest( representativeFragmentTest_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV(
- PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceRepresentativeFragmentTestFeaturesNV( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceRepresentativeFragmentTestFeaturesNV(
- VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceRepresentativeFragmentTestFeaturesNV(
- *reinterpret_cast<PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceRepresentativeFragmentTestFeaturesNV( *reinterpret_cast<PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceRepresentativeFragmentTestFeaturesNV &
operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceRepresentativeFragmentTestFeaturesNV &
- operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV &
- setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT
+ setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT
{
representativeFragmentTest = representativeFragmentTest_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *>( this );
}
- explicit operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -62092,11 +66041,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( representativeFragmentTest == rhs.representativeFragmentTest );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( representativeFragmentTest == rhs.representativeFragmentTest );
# endif
}
@@ -62107,19 +66055,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) ==
- sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV>::value,
- "PhysicalDeviceRepresentativeFragmentTestFeaturesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV>
@@ -62132,32 +66071,31 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceRobustness2FeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceRobustness2FeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceRobustness2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ = {} ) VULKAN_HPP_NOEXCEPT
- : robustBufferAccess2( robustBufferAccess2_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , robustBufferAccess2( robustBufferAccess2_ )
, robustImageAccess2( robustImageAccess2_ )
, nullDescriptor( nullDescriptor_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( PhysicalDeviceRobustness2FeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRobustness2FeaturesEXT( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceRobustness2FeaturesEXT( *reinterpret_cast<PhysicalDeviceRobustness2FeaturesEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceRobustness2FeaturesEXT &
- operator=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceRobustness2FeaturesEXT & operator=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceRobustness2FeaturesEXT &
- operator=( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceRobustness2FeaturesEXT & operator=( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT const *>( &rhs );
return *this;
@@ -62171,38 +66109,36 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT &
- setRobustBufferAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ ) VULKAN_HPP_NOEXCEPT
+ setRobustBufferAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ ) VULKAN_HPP_NOEXCEPT
{
robustBufferAccess2 = robustBufferAccess2_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT &
- setRobustImageAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setRobustImageAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ ) VULKAN_HPP_NOEXCEPT
{
robustImageAccess2 = robustImageAccess2_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT &
- setNullDescriptor( VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setNullDescriptor( VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ ) VULKAN_HPP_NOEXCEPT
{
nullDescriptor = nullDescriptor_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceRobustness2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceRobustness2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRobustness2FeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceRobustness2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceRobustness2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRobustness2FeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -62223,7 +66159,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustBufferAccess2 == rhs.robustBufferAccess2 ) &&
@@ -62244,14 +66180,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2 = {};
VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT ) ==
- sizeof( VkPhysicalDeviceRobustness2FeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT>::value,
- "PhysicalDeviceRobustness2FeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRobustness2FeaturesEXT>
@@ -62264,54 +66192,49 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceRobustness2PropertiesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceRobustness2PropertiesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT(
- VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
- : robustStorageBufferAccessSizeAlignment( robustStorageBufferAccessSizeAlignment_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , robustStorageBufferAccessSizeAlignment( robustStorageBufferAccessSizeAlignment_ )
, robustUniformBufferAccessSizeAlignment( robustUniformBufferAccessSizeAlignment_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( PhysicalDeviceRobustness2PropertiesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceRobustness2PropertiesEXT( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceRobustness2PropertiesEXT(
- *reinterpret_cast<PhysicalDeviceRobustness2PropertiesEXT const *>( &rhs ) )
- {}
+ : PhysicalDeviceRobustness2PropertiesEXT( *reinterpret_cast<PhysicalDeviceRobustness2PropertiesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceRobustness2PropertiesEXT &
- operator=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceRobustness2PropertiesEXT & operator=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceRobustness2PropertiesEXT &
- operator=( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceRobustness2PropertiesEXT & operator=( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceRobustness2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceRobustness2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceRobustness2PropertiesEXT *>( this );
}
- explicit operator VkPhysicalDeviceRobustness2PropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceRobustness2PropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceRobustness2PropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::DeviceSize const &,
- VULKAN_HPP_NAMESPACE::DeviceSize const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -62324,11 +66247,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment ) &&
( robustUniformBufferAccessSizeAlignment == rhs.robustUniformBufferAccessSizeAlignment );
# endif
}
@@ -62340,20 +66262,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment = {};
VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT ) ==
- sizeof( VkPhysicalDeviceRobustness2PropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT>::value,
- "PhysicalDeviceRobustness2PropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceRobustness2PropertiesEXT>
@@ -62366,54 +66279,51 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceSampleLocationsPropertiesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT(
- VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {},
- std::array<float, 2> const & sampleLocationCoordinateRange_ = {},
- uint32_t sampleLocationSubPixelBits_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT
- : sampleLocationSampleCounts( sampleLocationSampleCounts_ )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {},
+ std::array<float, 2> const & sampleLocationCoordinateRange_ = {},
+ uint32_t sampleLocationSubPixelBits_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , sampleLocationSampleCounts( sampleLocationSampleCounts_ )
, maxSampleLocationGridSize( maxSampleLocationGridSize_ )
, sampleLocationCoordinateRange( sampleLocationCoordinateRange_ )
, sampleLocationSubPixelBits( sampleLocationSubPixelBits_ )
, variableSampleLocations( variableSampleLocations_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT(
- PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceSampleLocationsPropertiesEXT(
- *reinterpret_cast<PhysicalDeviceSampleLocationsPropertiesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceSampleLocationsPropertiesEXT( *reinterpret_cast<PhysicalDeviceSampleLocationsPropertiesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceSampleLocationsPropertiesEXT &
- operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceSampleLocationsPropertiesEXT & operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceSampleLocationsPropertiesEXT &
- operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSampleLocationsPropertiesEXT & operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceSampleLocationsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSampleLocationsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSampleLocationsPropertiesEXT *>( this );
}
- explicit operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -62442,15 +66352,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts ) &&
- ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ) &&
- ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange ) &&
- ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits ) &&
- ( variableSampleLocations == rhs.variableSampleLocations );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts ) &&
+ ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ) && ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange ) &&
+ ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits ) && ( variableSampleLocations == rhs.variableSampleLocations );
# endif
}
@@ -62461,23 +66368,14 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {};
VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> sampleLocationCoordinateRange = {};
uint32_t sampleLocationSubPixelBits = {};
VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT ) ==
- sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT>::value,
- "PhysicalDeviceSampleLocationsPropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT>
@@ -62490,55 +66388,49 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceSamplerFilterMinmaxProperties;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties(
- VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {} ) VULKAN_HPP_NOEXCEPT
- : filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ )
, filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties(
- PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceSamplerFilterMinmaxProperties(
- *reinterpret_cast<PhysicalDeviceSamplerFilterMinmaxProperties const *>( &rhs ) )
- {}
+ PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceSamplerFilterMinmaxProperties( *reinterpret_cast<PhysicalDeviceSamplerFilterMinmaxProperties const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceSamplerFilterMinmaxProperties &
- operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceSamplerFilterMinmaxProperties & operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceSamplerFilterMinmaxProperties &
- operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSamplerFilterMinmaxProperties & operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceSamplerFilterMinmaxProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSamplerFilterMinmaxProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxProperties *>( this );
}
- explicit operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -62551,11 +66443,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) &&
( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping );
# endif
}
@@ -62567,20 +66458,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {};
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties ) ==
- sizeof( VkPhysicalDeviceSamplerFilterMinmaxProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties>::value,
- "PhysicalDeviceSamplerFilterMinmaxProperties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties>
@@ -62594,30 +66476,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceSamplerYcbcrConversionFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures(
- VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {} ) VULKAN_HPP_NOEXCEPT
- : samplerYcbcrConversion( samplerYcbcrConversion_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , samplerYcbcrConversion( samplerYcbcrConversion_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures(
- PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceSamplerYcbcrConversionFeatures(
- *reinterpret_cast<PhysicalDeviceSamplerYcbcrConversionFeatures const *>( &rhs ) )
- {}
+ PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceSamplerYcbcrConversionFeatures( *reinterpret_cast<PhysicalDeviceSamplerYcbcrConversionFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceSamplerYcbcrConversionFeatures &
- operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceSamplerYcbcrConversionFeatures &
- operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const *>( &rhs );
return *this;
@@ -62631,24 +66510,24 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures &
- setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
+ setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
{
samplerYcbcrConversion = samplerYcbcrConversion_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures *>( this );
}
- explicit operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -62665,11 +66544,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( samplerYcbcrConversion == rhs.samplerYcbcrConversion );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion );
# endif
}
@@ -62680,19 +66558,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures ) ==
- sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures>::value,
- "PhysicalDeviceSamplerYcbcrConversionFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures>
@@ -62706,28 +66575,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceScalarBlockLayoutFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {} )
- VULKAN_HPP_NOEXCEPT : scalarBlockLayout( scalarBlockLayout_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , scalarBlockLayout( scalarBlockLayout_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( PhysicalDeviceScalarBlockLayoutFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceScalarBlockLayoutFeatures( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceScalarBlockLayoutFeatures(
- *reinterpret_cast<PhysicalDeviceScalarBlockLayoutFeatures const *>( &rhs ) )
- {}
+ : PhysicalDeviceScalarBlockLayoutFeatures( *reinterpret_cast<PhysicalDeviceScalarBlockLayoutFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceScalarBlockLayoutFeatures &
- operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceScalarBlockLayoutFeatures & operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceScalarBlockLayoutFeatures &
- operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceScalarBlockLayoutFeatures & operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures const *>( &rhs );
return *this;
@@ -62748,17 +66616,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceScalarBlockLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceScalarBlockLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeatures *>( this );
}
- explicit operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -62775,7 +66643,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( scalarBlockLayout == rhs.scalarBlockLayout );
@@ -62793,15 +66661,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures ) ==
- sizeof( VkPhysicalDeviceScalarBlockLayoutFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures>::value,
- "PhysicalDeviceScalarBlockLayoutFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceScalarBlockLayoutFeatures>
@@ -62815,63 +66674,60 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures(
- VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {} ) VULKAN_HPP_NOEXCEPT
- : separateDepthStencilLayouts( separateDepthStencilLayouts_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , separateDepthStencilLayouts( separateDepthStencilLayouts_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures(
- PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceSeparateDepthStencilLayoutsFeatures( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceSeparateDepthStencilLayoutsFeatures(
- *reinterpret_cast<PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>( &rhs ) )
- {}
+ PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceSeparateDepthStencilLayoutsFeatures( *reinterpret_cast<PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceSeparateDepthStencilLayoutsFeatures &
operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceSeparateDepthStencilLayoutsFeatures &
- operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures &
- setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
+ setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
{
separateDepthStencilLayouts = separateDepthStencilLayouts_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures *>( this );
}
- explicit operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -62888,11 +66744,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts );
# endif
}
@@ -62903,19 +66758,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) ==
- sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures>::value,
- "PhysicalDeviceSeparateDepthStencilLayoutsFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures>
@@ -62929,24 +66775,24 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderBufferFloat16Atomics( shaderBufferFloat16Atomics_ )
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderBufferFloat16Atomics( shaderBufferFloat16Atomics_ )
, shaderBufferFloat16AtomicAdd( shaderBufferFloat16AtomicAdd_ )
, shaderBufferFloat16AtomicMinMax( shaderBufferFloat16AtomicMinMax_ )
, shaderBufferFloat32AtomicMinMax( shaderBufferFloat32AtomicMinMax_ )
@@ -62958,23 +66804,20 @@ namespace VULKAN_HPP_NAMESPACE
, shaderSharedFloat64AtomicMinMax( shaderSharedFloat64AtomicMinMax_ )
, shaderImageFloat32AtomicMinMax( shaderImageFloat32AtomicMinMax_ )
, sparseImageFloat32AtomicMinMax( sparseImageFloat32AtomicMinMax_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT(
- PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderAtomicFloat2FeaturesEXT( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceShaderAtomicFloat2FeaturesEXT(
- *reinterpret_cast<PhysicalDeviceShaderAtomicFloat2FeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceShaderAtomicFloat2FeaturesEXT( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderAtomicFloat2FeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderAtomicFloat2FeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
- operator=( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceShaderAtomicFloat2FeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
- operator=( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderAtomicFloat2FeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT const *>( &rhs );
return *this;
@@ -62988,101 +66831,101 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
- setShaderBufferFloat16Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT
+ setShaderBufferFloat16Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferFloat16Atomics = shaderBufferFloat16Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
- setShaderBufferFloat16AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+ setShaderBufferFloat16AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferFloat16AtomicAdd = shaderBufferFloat16AtomicAdd_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat16AtomicMinMax(
- VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
+ setShaderBufferFloat16AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferFloat16AtomicMinMax = shaderBufferFloat16AtomicMinMax_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat32AtomicMinMax(
- VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
+ setShaderBufferFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferFloat32AtomicMinMax = shaderBufferFloat32AtomicMinMax_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat64AtomicMinMax(
- VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
+ setShaderBufferFloat64AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferFloat64AtomicMinMax = shaderBufferFloat64AtomicMinMax_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
- setShaderSharedFloat16Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT
+ setShaderSharedFloat16Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedFloat16Atomics = shaderSharedFloat16Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
- setShaderSharedFloat16AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+ setShaderSharedFloat16AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedFloat16AtomicAdd = shaderSharedFloat16AtomicAdd_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat16AtomicMinMax(
- VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
+ setShaderSharedFloat16AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedFloat16AtomicMinMax = shaderSharedFloat16AtomicMinMax_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat32AtomicMinMax(
- VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
+ setShaderSharedFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedFloat32AtomicMinMax = shaderSharedFloat32AtomicMinMax_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat64AtomicMinMax(
- VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
+ setShaderSharedFloat64AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedFloat64AtomicMinMax = shaderSharedFloat64AtomicMinMax_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderImageFloat32AtomicMinMax(
- VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
+ setShaderImageFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
{
shaderImageFloat32AtomicMinMax = shaderImageFloat32AtomicMinMax_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setSparseImageFloat32AtomicMinMax(
- VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
+ setSparseImageFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
{
sparseImageFloat32AtomicMinMax = sparseImageFloat32AtomicMinMax_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -63125,16 +66968,14 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( shaderBufferFloat16Atomics == rhs.shaderBufferFloat16Atomics ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBufferFloat16Atomics == rhs.shaderBufferFloat16Atomics ) &&
( shaderBufferFloat16AtomicAdd == rhs.shaderBufferFloat16AtomicAdd ) &&
( shaderBufferFloat16AtomicMinMax == rhs.shaderBufferFloat16AtomicMinMax ) &&
( shaderBufferFloat32AtomicMinMax == rhs.shaderBufferFloat32AtomicMinMax ) &&
- ( shaderBufferFloat64AtomicMinMax == rhs.shaderBufferFloat64AtomicMinMax ) &&
- ( shaderSharedFloat16Atomics == rhs.shaderSharedFloat16Atomics ) &&
+ ( shaderBufferFloat64AtomicMinMax == rhs.shaderBufferFloat64AtomicMinMax ) && ( shaderSharedFloat16Atomics == rhs.shaderSharedFloat16Atomics ) &&
( shaderSharedFloat16AtomicAdd == rhs.shaderSharedFloat16AtomicAdd ) &&
( shaderSharedFloat16AtomicMinMax == rhs.shaderSharedFloat16AtomicMinMax ) &&
( shaderSharedFloat32AtomicMinMax == rhs.shaderSharedFloat32AtomicMinMax ) &&
@@ -63151,8 +66992,8 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax = {};
@@ -63166,15 +67007,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT ) ==
- sizeof( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT>::value,
- "PhysicalDeviceShaderAtomicFloat2FeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT>
@@ -63187,24 +67019,24 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceShaderAtomicFloatFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderBufferFloat32Atomics( shaderBufferFloat32Atomics_ )
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderBufferFloat32Atomics( shaderBufferFloat32Atomics_ )
, shaderBufferFloat32AtomicAdd( shaderBufferFloat32AtomicAdd_ )
, shaderBufferFloat64Atomics( shaderBufferFloat64Atomics_ )
, shaderBufferFloat64AtomicAdd( shaderBufferFloat64AtomicAdd_ )
@@ -63216,23 +67048,20 @@ namespace VULKAN_HPP_NAMESPACE
, shaderImageFloat32AtomicAdd( shaderImageFloat32AtomicAdd_ )
, sparseImageFloat32Atomics( sparseImageFloat32Atomics_ )
, sparseImageFloat32AtomicAdd( sparseImageFloat32AtomicAdd_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT(
- PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderAtomicFloatFeaturesEXT( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceShaderAtomicFloatFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceShaderAtomicFloatFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceShaderAtomicFloatFeaturesEXT( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderAtomicFloatFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderAtomicFloatFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceShaderAtomicFloatFeaturesEXT &
- operator=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderAtomicFloatFeaturesEXT &
- operator=( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT const *>( &rhs );
return *this;
@@ -63246,101 +67075,101 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
- setShaderBufferFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
+ setShaderBufferFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferFloat32Atomics = shaderBufferFloat32Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
- setShaderBufferFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+ setShaderBufferFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferFloat32AtomicAdd = shaderBufferFloat32AtomicAdd_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
- setShaderBufferFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
+ setShaderBufferFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferFloat64Atomics = shaderBufferFloat64Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
- setShaderBufferFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+ setShaderBufferFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
- setShaderSharedFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
+ setShaderSharedFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedFloat32Atomics = shaderSharedFloat32Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
- setShaderSharedFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+ setShaderSharedFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
- setShaderSharedFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
+ setShaderSharedFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
- setShaderSharedFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+ setShaderSharedFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
- setShaderImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
+ setShaderImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderImageFloat32Atomics = shaderImageFloat32Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
- setShaderImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+ setShaderImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
{
shaderImageFloat32AtomicAdd = shaderImageFloat32AtomicAdd_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
- setSparseImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
+ setSparseImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
{
sparseImageFloat32Atomics = sparseImageFloat32Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
- setSparseImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
+ setSparseImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
{
sparseImageFloat32AtomicAdd = sparseImageFloat32AtomicAdd_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloatFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -63383,21 +67212,15 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( shaderBufferFloat32Atomics == rhs.shaderBufferFloat32Atomics ) &&
- ( shaderBufferFloat32AtomicAdd == rhs.shaderBufferFloat32AtomicAdd ) &&
- ( shaderBufferFloat64Atomics == rhs.shaderBufferFloat64Atomics ) &&
- ( shaderBufferFloat64AtomicAdd == rhs.shaderBufferFloat64AtomicAdd ) &&
- ( shaderSharedFloat32Atomics == rhs.shaderSharedFloat32Atomics ) &&
- ( shaderSharedFloat32AtomicAdd == rhs.shaderSharedFloat32AtomicAdd ) &&
- ( shaderSharedFloat64Atomics == rhs.shaderSharedFloat64Atomics ) &&
- ( shaderSharedFloat64AtomicAdd == rhs.shaderSharedFloat64AtomicAdd ) &&
- ( shaderImageFloat32Atomics == rhs.shaderImageFloat32Atomics ) &&
- ( shaderImageFloat32AtomicAdd == rhs.shaderImageFloat32AtomicAdd ) &&
- ( sparseImageFloat32Atomics == rhs.sparseImageFloat32Atomics ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBufferFloat32Atomics == rhs.shaderBufferFloat32Atomics ) &&
+ ( shaderBufferFloat32AtomicAdd == rhs.shaderBufferFloat32AtomicAdd ) && ( shaderBufferFloat64Atomics == rhs.shaderBufferFloat64Atomics ) &&
+ ( shaderBufferFloat64AtomicAdd == rhs.shaderBufferFloat64AtomicAdd ) && ( shaderSharedFloat32Atomics == rhs.shaderSharedFloat32Atomics ) &&
+ ( shaderSharedFloat32AtomicAdd == rhs.shaderSharedFloat32AtomicAdd ) && ( shaderSharedFloat64Atomics == rhs.shaderSharedFloat64Atomics ) &&
+ ( shaderSharedFloat64AtomicAdd == rhs.shaderSharedFloat64AtomicAdd ) && ( shaderImageFloat32Atomics == rhs.shaderImageFloat32Atomics ) &&
+ ( shaderImageFloat32AtomicAdd == rhs.shaderImageFloat32AtomicAdd ) && ( sparseImageFloat32Atomics == rhs.sparseImageFloat32Atomics ) &&
( sparseImageFloat32AtomicAdd == rhs.sparseImageFloat32AtomicAdd );
# endif
}
@@ -63409,8 +67232,8 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics = {};
@@ -63424,15 +67247,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT>::value,
- "PhysicalDeviceShaderAtomicFloatFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT>
@@ -63445,31 +67259,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceShaderAtomicInt64Features;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShaderAtomicInt64Features;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicInt64Features;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features(
- VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderBufferInt64Atomics( shaderBufferInt64Atomics_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderBufferInt64Atomics( shaderBufferInt64Atomics_ )
, shaderSharedInt64Atomics( shaderSharedInt64Atomics_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceShaderAtomicInt64Features(
- *reinterpret_cast<PhysicalDeviceShaderAtomicInt64Features const *>( &rhs ) )
- {}
+ : PhysicalDeviceShaderAtomicInt64Features( *reinterpret_cast<PhysicalDeviceShaderAtomicInt64Features const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceShaderAtomicInt64Features &
- operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceShaderAtomicInt64Features & operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderAtomicInt64Features &
- operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderAtomicInt64Features & operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features const *>( &rhs );
return *this;
@@ -63483,38 +67295,35 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features &
- setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+ setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features &
- setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+ setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceShaderAtomicInt64Features const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderAtomicInt64Features const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicInt64Features *>( this );
}
- explicit operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64Features *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -63527,11 +67336,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) &&
( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics );
# endif
}
@@ -63543,20 +67351,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features ) ==
- sizeof( VkPhysicalDeviceShaderAtomicInt64Features ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features>::value,
- "PhysicalDeviceShaderAtomicInt64Features is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicInt64Features>
@@ -63570,30 +67369,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceShaderClockFeaturesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceShaderClockFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderSubgroupClock( shaderSubgroupClock_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderSubgroupClock( shaderSubgroupClock_ )
, shaderDeviceClock( shaderDeviceClock_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderClockFeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderClockFeaturesKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceShaderClockFeaturesKHR &
- operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceShaderClockFeaturesKHR & operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderClockFeaturesKHR &
- operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderClockFeaturesKHR & operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const *>( &rhs );
return *this;
@@ -63607,38 +67405,34 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR &
- setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT
+ setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT
{
shaderSubgroupClock = shaderSubgroupClock_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR &
- setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT
{
shaderDeviceClock = shaderDeviceClock_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceShaderClockFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderClockFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderClockFeaturesKHR *>( this );
}
- explicit operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderClockFeaturesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -63651,7 +67445,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupClock == rhs.shaderSubgroupClock ) &&
@@ -63671,14 +67465,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR ) ==
- sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR>::value,
- "PhysicalDeviceShaderClockFeaturesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderClockFeaturesKHR>
@@ -63686,59 +67472,239 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceShaderClockFeaturesKHR;
};
+ struct PhysicalDeviceShaderCoreBuiltinsFeaturesARM
+ {
+ using NativeType = VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsFeaturesARM( VULKAN_HPP_NAMESPACE::Bool32 shaderCoreBuiltins_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderCoreBuiltins( shaderCoreBuiltins_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsFeaturesARM( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceShaderCoreBuiltinsFeaturesARM( VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderCoreBuiltinsFeaturesARM( *reinterpret_cast<PhysicalDeviceShaderCoreBuiltinsFeaturesARM const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceShaderCoreBuiltinsFeaturesARM & operator=( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceShaderCoreBuiltinsFeaturesARM & operator=( VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCoreBuiltinsFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCoreBuiltinsFeaturesARM &
+ setShaderCoreBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderCoreBuiltins_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderCoreBuiltins = shaderCoreBuiltins_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM *>( this );
+ }
+
+ operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, shaderCoreBuiltins );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreBuiltins == rhs.shaderCoreBuiltins );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderCoreBuiltins = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM>
+ {
+ using Type = PhysicalDeviceShaderCoreBuiltinsFeaturesARM;
+ };
+
+ struct PhysicalDeviceShaderCoreBuiltinsPropertiesARM
+ {
+ using NativeType = VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsPropertiesARM( uint32_t shaderCoreCount_ = {},
+ uint32_t shaderWarpsPerCore_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderCoreCount( shaderCoreCount_ )
+ , shaderWarpsPerCore( shaderWarpsPerCore_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceShaderCoreBuiltinsPropertiesARM( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceShaderCoreBuiltinsPropertiesARM( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderCoreBuiltinsPropertiesARM( *reinterpret_cast<PhysicalDeviceShaderCoreBuiltinsPropertiesARM const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceShaderCoreBuiltinsPropertiesARM & operator=( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceShaderCoreBuiltinsPropertiesARM & operator=( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM const *>( &rhs );
+ return *this;
+ }
+
+ operator VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM *>( this );
+ }
+
+ operator VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, shaderCoreCount, shaderWarpsPerCore );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreCount == rhs.shaderCoreCount ) && ( shaderWarpsPerCore == rhs.shaderWarpsPerCore );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM;
+ void * pNext = {};
+ uint32_t shaderCoreCount = {};
+ uint32_t shaderWarpsPerCore = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM>
+ {
+ using Type = PhysicalDeviceShaderCoreBuiltinsPropertiesARM;
+ };
+
struct PhysicalDeviceShaderCoreProperties2AMD
{
using NativeType = VkPhysicalDeviceShaderCoreProperties2AMD;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD(
- VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {},
- uint32_t activeComputeUnitCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderCoreFeatures( shaderCoreFeatures_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {},
+ uint32_t activeComputeUnitCount_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderCoreFeatures( shaderCoreFeatures_ )
, activeComputeUnitCount( activeComputeUnitCount_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceShaderCoreProperties2AMD(
- *reinterpret_cast<PhysicalDeviceShaderCoreProperties2AMD const *>( &rhs ) )
- {}
+ : PhysicalDeviceShaderCoreProperties2AMD( *reinterpret_cast<PhysicalDeviceShaderCoreProperties2AMD const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceShaderCoreProperties2AMD &
- operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceShaderCoreProperties2AMD & operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderCoreProperties2AMD &
- operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderCoreProperties2AMD & operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceShaderCoreProperties2AMD const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderCoreProperties2AMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderCoreProperties2AMD *>( this );
}
- explicit operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderCoreProperties2AMD *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD const &,
- uint32_t const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD const &, uint32_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -63751,7 +67717,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreFeatures == rhs.shaderCoreFeatures ) &&
@@ -63766,20 +67732,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {};
uint32_t activeComputeUnitCount = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD ) ==
- sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD>::value,
- "PhysicalDeviceShaderCoreProperties2AMD is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCoreProperties2AMD>
@@ -63792,26 +67749,26 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceShaderCorePropertiesAMD;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_ = {},
- uint32_t shaderArraysPerEngineCount_ = {},
- uint32_t computeUnitsPerShaderArray_ = {},
- uint32_t simdPerComputeUnit_ = {},
- uint32_t wavefrontsPerSimd_ = {},
- uint32_t wavefrontSize_ = {},
- uint32_t sgprsPerSimd_ = {},
- uint32_t minSgprAllocation_ = {},
- uint32_t maxSgprAllocation_ = {},
- uint32_t sgprAllocationGranularity_ = {},
- uint32_t vgprsPerSimd_ = {},
- uint32_t minVgprAllocation_ = {},
- uint32_t maxVgprAllocation_ = {},
- uint32_t vgprAllocationGranularity_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderEngineCount( shaderEngineCount_ )
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_ = {},
+ uint32_t shaderArraysPerEngineCount_ = {},
+ uint32_t computeUnitsPerShaderArray_ = {},
+ uint32_t simdPerComputeUnit_ = {},
+ uint32_t wavefrontsPerSimd_ = {},
+ uint32_t wavefrontSize_ = {},
+ uint32_t sgprsPerSimd_ = {},
+ uint32_t minSgprAllocation_ = {},
+ uint32_t maxSgprAllocation_ = {},
+ uint32_t sgprAllocationGranularity_ = {},
+ uint32_t vgprsPerSimd_ = {},
+ uint32_t minVgprAllocation_ = {},
+ uint32_t maxVgprAllocation_ = {},
+ uint32_t vgprAllocationGranularity_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderEngineCount( shaderEngineCount_ )
, shaderArraysPerEngineCount( shaderArraysPerEngineCount_ )
, computeUnitsPerShaderArray( computeUnitsPerShaderArray_ )
, simdPerComputeUnit( simdPerComputeUnit_ )
@@ -63825,38 +67782,36 @@ namespace VULKAN_HPP_NAMESPACE
, minVgprAllocation( minVgprAllocation_ )
, maxVgprAllocation( maxVgprAllocation_ )
, vgprAllocationGranularity( vgprAllocationGranularity_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceShaderCorePropertiesAMD(
- *reinterpret_cast<PhysicalDeviceShaderCorePropertiesAMD const *>( &rhs ) )
- {}
+ : PhysicalDeviceShaderCorePropertiesAMD( *reinterpret_cast<PhysicalDeviceShaderCorePropertiesAMD const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceShaderCorePropertiesAMD &
- operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceShaderCorePropertiesAMD & operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderCorePropertiesAMD &
- operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderCorePropertiesAMD & operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceShaderCorePropertiesAMD const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderCorePropertiesAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesAMD *>( this );
}
- explicit operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesAMD *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -63903,15 +67858,13 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEngineCount == rhs.shaderEngineCount ) &&
- ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount ) &&
- ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray ) &&
- ( simdPerComputeUnit == rhs.simdPerComputeUnit ) && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd ) &&
- ( wavefrontSize == rhs.wavefrontSize ) && ( sgprsPerSimd == rhs.sgprsPerSimd ) &&
- ( minSgprAllocation == rhs.minSgprAllocation ) && ( maxSgprAllocation == rhs.maxSgprAllocation ) &&
+ ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount ) && ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray ) &&
+ ( simdPerComputeUnit == rhs.simdPerComputeUnit ) && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd ) && ( wavefrontSize == rhs.wavefrontSize ) &&
+ ( sgprsPerSimd == rhs.sgprsPerSimd ) && ( minSgprAllocation == rhs.minSgprAllocation ) && ( maxSgprAllocation == rhs.maxSgprAllocation ) &&
( sgprAllocationGranularity == rhs.sgprAllocationGranularity ) && ( vgprsPerSimd == rhs.vgprsPerSimd ) &&
( minVgprAllocation == rhs.minVgprAllocation ) && ( maxVgprAllocation == rhs.maxVgprAllocation ) &&
( vgprAllocationGranularity == rhs.vgprAllocationGranularity );
@@ -63925,9 +67878,9 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
- void * pNext = {};
- uint32_t shaderEngineCount = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
+ void * pNext = {};
+ uint32_t shaderEngineCount = {};
uint32_t shaderArraysPerEngineCount = {};
uint32_t computeUnitsPerShaderArray = {};
uint32_t simdPerComputeUnit = {};
@@ -63942,14 +67895,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t maxVgprAllocation = {};
uint32_t vgprAllocationGranularity = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD ) ==
- sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD>::value,
- "PhysicalDeviceShaderCorePropertiesAMD is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCorePropertiesAMD>
@@ -63962,63 +67907,60 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeatures(
- VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeatures(
- PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceShaderDemoteToHelperInvocationFeatures( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderDemoteToHelperInvocationFeatures(
- VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceShaderDemoteToHelperInvocationFeatures(
- *reinterpret_cast<PhysicalDeviceShaderDemoteToHelperInvocationFeatures const *>( &rhs ) )
- {}
+ PhysicalDeviceShaderDemoteToHelperInvocationFeatures( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderDemoteToHelperInvocationFeatures( *reinterpret_cast<PhysicalDeviceShaderDemoteToHelperInvocationFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceShaderDemoteToHelperInvocationFeatures &
operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderDemoteToHelperInvocationFeatures &
- operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderDemoteToHelperInvocationFeatures & operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & setShaderDemoteToHelperInvocation(
- VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures &
+ setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
{
shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures *>( this );
}
- explicit operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -64035,11 +67977,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation );
# endif
}
@@ -64050,19 +67991,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures ) ==
- sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<
- VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures>::value,
- "PhysicalDeviceShaderDemoteToHelperInvocationFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures>
@@ -64076,30 +68008,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceShaderDrawParametersFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures(
- VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderDrawParameters( shaderDrawParameters_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderDrawParameters( shaderDrawParameters_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures(
- PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceShaderDrawParametersFeatures(
- *reinterpret_cast<PhysicalDeviceShaderDrawParametersFeatures const *>( &rhs ) )
- {}
+ PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderDrawParametersFeatures( *reinterpret_cast<PhysicalDeviceShaderDrawParametersFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceShaderDrawParametersFeatures &
- operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceShaderDrawParametersFeatures & operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderDrawParametersFeatures &
- operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderDrawParametersFeatures & operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const *>( &rhs );
return *this;
@@ -64113,24 +68042,24 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures &
- setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
+ setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
{
shaderDrawParameters = shaderDrawParameters_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceShaderDrawParametersFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderDrawParametersFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures *>( this );
}
- explicit operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderDrawParametersFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -64147,7 +68076,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderDrawParameters == rhs.shaderDrawParameters );
@@ -64161,19 +68090,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures ) ==
- sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures>::value,
- "PhysicalDeviceShaderDrawParametersFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderDrawParametersFeatures>
@@ -64182,36 +68102,135 @@ namespace VULKAN_HPP_NAMESPACE
};
using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures;
+ struct PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD
+ {
+ using NativeType = VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderEarlyAndLateFragmentTests( shaderEarlyAndLateFragmentTests_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs )
+ VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( *reinterpret_cast<PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD &
+ operator=( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD &
+ operator=( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD &
+ setShaderEarlyAndLateFragmentTests( VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderEarlyAndLateFragmentTests = shaderEarlyAndLateFragmentTests_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD *>( this );
+ }
+
+ operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, shaderEarlyAndLateFragmentTests );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEarlyAndLateFragmentTests == rhs.shaderEarlyAndLateFragmentTests );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD>
+ {
+ using Type = PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
+ };
+
struct PhysicalDeviceShaderFloat16Int8Features
{
using NativeType = VkPhysicalDeviceShaderFloat16Int8Features;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShaderFloat16Int8Features;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloat16Int8Features;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceShaderFloat16Int8Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderFloat16( shaderFloat16_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderFloat16( shaderFloat16_ )
, shaderInt8( shaderInt8_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceShaderFloat16Int8Features(
- *reinterpret_cast<PhysicalDeviceShaderFloat16Int8Features const *>( &rhs ) )
- {}
+ : PhysicalDeviceShaderFloat16Int8Features( *reinterpret_cast<PhysicalDeviceShaderFloat16Int8Features const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceShaderFloat16Int8Features &
- operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceShaderFloat16Int8Features & operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderFloat16Int8Features &
- operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderFloat16Int8Features & operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features const *>( &rhs );
return *this;
@@ -64224,39 +68243,34 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features &
- setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
{
shaderFloat16 = shaderFloat16_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features &
- setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
{
shaderInt8 = shaderInt8_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceShaderFloat16Int8Features const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderFloat16Int8Features const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderFloat16Int8Features *>( this );
}
- explicit operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8Features *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -64269,11 +68283,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloat16 == rhs.shaderFloat16 ) &&
- ( shaderInt8 == rhs.shaderInt8 );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloat16 == rhs.shaderFloat16 ) && ( shaderInt8 == rhs.shaderInt8 );
# endif
}
@@ -64289,15 +68302,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features ) ==
- sizeof( VkPhysicalDeviceShaderFloat16Int8Features ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features>::value,
- "PhysicalDeviceShaderFloat16Int8Features is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderFloat16Int8Features>
@@ -64312,78 +68316,72 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderImageInt64Atomics( shaderImageInt64Atomics_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderImageInt64Atomics( shaderImageInt64Atomics_ )
, sparseImageInt64Atomics( sparseImageInt64Atomics_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT(
- PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceShaderImageAtomicInt64FeaturesEXT(
- *reinterpret_cast<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceShaderImageAtomicInt64FeaturesEXT &
- operator=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderImageAtomicInt64FeaturesEXT &
- operator=( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT &
- setShaderImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+ setShaderImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderImageInt64Atomics = shaderImageInt64Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT &
- setSparseImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+ setSparseImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
{
sparseImageInt64Atomics = sparseImageInt64Atomics_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -64396,11 +68394,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( shaderImageInt64Atomics == rhs.shaderImageInt64Atomics ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderImageInt64Atomics == rhs.shaderImageInt64Atomics ) &&
( sparseImageInt64Atomics == rhs.sparseImageInt64Atomics );
# endif
}
@@ -64412,20 +68409,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics = {};
VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT ) ==
- sizeof( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT>::value,
- "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT>
@@ -64438,29 +68426,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceShaderImageFootprintFeaturesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV(
- VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {} ) VULKAN_HPP_NOEXCEPT : imageFootprint( imageFootprint_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , imageFootprint( imageFootprint_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV(
- PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceShaderImageFootprintFeaturesNV(
- *reinterpret_cast<PhysicalDeviceShaderImageFootprintFeaturesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderImageFootprintFeaturesNV( *reinterpret_cast<PhysicalDeviceShaderImageFootprintFeaturesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceShaderImageFootprintFeaturesNV &
- operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderImageFootprintFeaturesNV &
- operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const *>( &rhs );
return *this;
@@ -64473,25 +68459,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV &
- setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT
{
imageFootprint = imageFootprint_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderImageFootprintFeaturesNV *>( this );
}
- explicit operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderImageFootprintFeaturesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -64508,7 +68493,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFootprint == rhs.imageFootprint );
@@ -64526,15 +68511,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV ) ==
- sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV>::value,
- "PhysicalDeviceShaderImageFootprintFeaturesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV>
@@ -64547,62 +68523,59 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceShaderIntegerDotProductFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductFeatures(
- VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderIntegerDotProduct( shaderIntegerDotProduct_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderIntegerDotProduct( shaderIntegerDotProduct_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductFeatures(
- PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceShaderIntegerDotProductFeatures( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderIntegerDotProductFeatures( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceShaderIntegerDotProductFeatures(
- *reinterpret_cast<PhysicalDeviceShaderIntegerDotProductFeatures const *>( &rhs ) )
- {}
+ PhysicalDeviceShaderIntegerDotProductFeatures( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderIntegerDotProductFeatures( *reinterpret_cast<PhysicalDeviceShaderIntegerDotProductFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceShaderIntegerDotProductFeatures &
- operator=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceShaderIntegerDotProductFeatures & operator=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderIntegerDotProductFeatures &
- operator=( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderIntegerDotProductFeatures & operator=( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures &
- setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT
+ setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT
{
shaderIntegerDotProduct = shaderIntegerDotProduct_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceShaderIntegerDotProductFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderIntegerDotProductFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerDotProductFeatures *>( this );
}
- explicit operator VkPhysicalDeviceShaderIntegerDotProductFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderIntegerDotProductFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderIntegerDotProductFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -64619,11 +68592,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct );
# endif
}
@@ -64634,19 +68606,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures ) ==
- sizeof( VkPhysicalDeviceShaderIntegerDotProductFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures>::value,
- "PhysicalDeviceShaderIntegerDotProductFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures>
@@ -64660,8 +68623,7 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceShaderIntegerDotProductProperties;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShaderIntegerDotProductProperties;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductProperties(
@@ -64694,15 +68656,15 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {},
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {},
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {} )
- VULKAN_HPP_NOEXCEPT
- : integerDotProduct8BitUnsignedAccelerated( integerDotProduct8BitUnsignedAccelerated_ )
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , integerDotProduct8BitUnsignedAccelerated( integerDotProduct8BitUnsignedAccelerated_ )
, integerDotProduct8BitSignedAccelerated( integerDotProduct8BitSignedAccelerated_ )
, integerDotProduct8BitMixedSignednessAccelerated( integerDotProduct8BitMixedSignednessAccelerated_ )
, integerDotProduct4x8BitPackedUnsignedAccelerated( integerDotProduct4x8BitPackedUnsignedAccelerated_ )
, integerDotProduct4x8BitPackedSignedAccelerated( integerDotProduct4x8BitPackedSignedAccelerated_ )
- , integerDotProduct4x8BitPackedMixedSignednessAccelerated(
- integerDotProduct4x8BitPackedMixedSignednessAccelerated_ )
+ , integerDotProduct4x8BitPackedMixedSignednessAccelerated( integerDotProduct4x8BitPackedMixedSignednessAccelerated_ )
, integerDotProduct16BitUnsignedAccelerated( integerDotProduct16BitUnsignedAccelerated_ )
, integerDotProduct16BitSignedAccelerated( integerDotProduct16BitSignedAccelerated_ )
, integerDotProduct16BitMixedSignednessAccelerated( integerDotProduct16BitMixedSignednessAccelerated_ )
@@ -64712,347 +68674,53 @@ namespace VULKAN_HPP_NAMESPACE
, integerDotProduct64BitUnsignedAccelerated( integerDotProduct64BitUnsignedAccelerated_ )
, integerDotProduct64BitSignedAccelerated( integerDotProduct64BitSignedAccelerated_ )
, integerDotProduct64BitMixedSignednessAccelerated( integerDotProduct64BitMixedSignednessAccelerated_ )
- , integerDotProductAccumulatingSaturating8BitUnsignedAccelerated(
- integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ )
- , integerDotProductAccumulatingSaturating8BitSignedAccelerated(
- integerDotProductAccumulatingSaturating8BitSignedAccelerated_ )
- , integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated(
- integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ )
- , integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated(
- integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ )
- , integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated(
- integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ )
+ , integerDotProductAccumulatingSaturating8BitUnsignedAccelerated( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ )
+ , integerDotProductAccumulatingSaturating8BitSignedAccelerated( integerDotProductAccumulatingSaturating8BitSignedAccelerated_ )
+ , integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ )
+ , integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ )
+ , integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ )
, integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated(
integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ )
- , integerDotProductAccumulatingSaturating16BitUnsignedAccelerated(
- integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ )
- , integerDotProductAccumulatingSaturating16BitSignedAccelerated(
- integerDotProductAccumulatingSaturating16BitSignedAccelerated_ )
- , integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated(
- integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ )
- , integerDotProductAccumulatingSaturating32BitUnsignedAccelerated(
- integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ )
- , integerDotProductAccumulatingSaturating32BitSignedAccelerated(
- integerDotProductAccumulatingSaturating32BitSignedAccelerated_ )
- , integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated(
- integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ )
- , integerDotProductAccumulatingSaturating64BitUnsignedAccelerated(
- integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ )
- , integerDotProductAccumulatingSaturating64BitSignedAccelerated(
- integerDotProductAccumulatingSaturating64BitSignedAccelerated_ )
- , integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated(
- integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ )
- {}
-
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductProperties(
- PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- PhysicalDeviceShaderIntegerDotProductProperties( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceShaderIntegerDotProductProperties(
- *reinterpret_cast<PhysicalDeviceShaderIntegerDotProductProperties const *>( &rhs ) )
- {}
-#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
-
- PhysicalDeviceShaderIntegerDotProductProperties &
- operator=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- PhysicalDeviceShaderIntegerDotProductProperties &
- operator=( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties const *>( &rhs );
- return *this;
- }
-
-#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProduct8BitUnsignedAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ ) VULKAN_HPP_NOEXCEPT
- {
- integerDotProduct8BitUnsignedAccelerated = integerDotProduct8BitUnsignedAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & setIntegerDotProduct8BitSignedAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ ) VULKAN_HPP_NOEXCEPT
- {
- integerDotProduct8BitSignedAccelerated = integerDotProduct8BitSignedAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProduct8BitMixedSignednessAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ ) VULKAN_HPP_NOEXCEPT
- {
- integerDotProduct8BitMixedSignednessAccelerated = integerDotProduct8BitMixedSignednessAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProduct4x8BitPackedUnsignedAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ ) VULKAN_HPP_NOEXCEPT
- {
- integerDotProduct4x8BitPackedUnsignedAccelerated = integerDotProduct4x8BitPackedUnsignedAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProduct4x8BitPackedSignedAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ ) VULKAN_HPP_NOEXCEPT
- {
- integerDotProduct4x8BitPackedSignedAccelerated = integerDotProduct4x8BitPackedSignedAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProduct4x8BitPackedMixedSignednessAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ ) VULKAN_HPP_NOEXCEPT
- {
- integerDotProduct4x8BitPackedMixedSignednessAccelerated =
- integerDotProduct4x8BitPackedMixedSignednessAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProduct16BitUnsignedAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ ) VULKAN_HPP_NOEXCEPT
- {
- integerDotProduct16BitUnsignedAccelerated = integerDotProduct16BitUnsignedAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProduct16BitSignedAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ ) VULKAN_HPP_NOEXCEPT
- {
- integerDotProduct16BitSignedAccelerated = integerDotProduct16BitSignedAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProduct16BitMixedSignednessAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ ) VULKAN_HPP_NOEXCEPT
- {
- integerDotProduct16BitMixedSignednessAccelerated = integerDotProduct16BitMixedSignednessAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProduct32BitUnsignedAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ ) VULKAN_HPP_NOEXCEPT
- {
- integerDotProduct32BitUnsignedAccelerated = integerDotProduct32BitUnsignedAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProduct32BitSignedAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ ) VULKAN_HPP_NOEXCEPT
- {
- integerDotProduct32BitSignedAccelerated = integerDotProduct32BitSignedAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProduct32BitMixedSignednessAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ ) VULKAN_HPP_NOEXCEPT
- {
- integerDotProduct32BitMixedSignednessAccelerated = integerDotProduct32BitMixedSignednessAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProduct64BitUnsignedAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ ) VULKAN_HPP_NOEXCEPT
- {
- integerDotProduct64BitUnsignedAccelerated = integerDotProduct64BitUnsignedAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProduct64BitSignedAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ ) VULKAN_HPP_NOEXCEPT
- {
- integerDotProduct64BitSignedAccelerated = integerDotProduct64BitSignedAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProduct64BitMixedSignednessAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ ) VULKAN_HPP_NOEXCEPT
- {
- integerDotProduct64BitMixedSignednessAccelerated = integerDotProduct64BitMixedSignednessAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProductAccumulatingSaturating8BitUnsignedAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ )
- VULKAN_HPP_NOEXCEPT
- {
- integerDotProductAccumulatingSaturating8BitUnsignedAccelerated =
- integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProductAccumulatingSaturating8BitSignedAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ ) VULKAN_HPP_NOEXCEPT
- {
- integerDotProductAccumulatingSaturating8BitSignedAccelerated =
- integerDotProductAccumulatingSaturating8BitSignedAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ )
- VULKAN_HPP_NOEXCEPT
- {
- integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated =
- integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ )
- VULKAN_HPP_NOEXCEPT
- {
- integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated =
- integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ )
- VULKAN_HPP_NOEXCEPT
- {
- integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated =
- integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ )
- VULKAN_HPP_NOEXCEPT
- {
- integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated =
- integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProductAccumulatingSaturating16BitUnsignedAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ )
- VULKAN_HPP_NOEXCEPT
- {
- integerDotProductAccumulatingSaturating16BitUnsignedAccelerated =
- integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProductAccumulatingSaturating16BitSignedAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ )
- VULKAN_HPP_NOEXCEPT
- {
- integerDotProductAccumulatingSaturating16BitSignedAccelerated =
- integerDotProductAccumulatingSaturating16BitSignedAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ )
- VULKAN_HPP_NOEXCEPT
+ , integerDotProductAccumulatingSaturating16BitUnsignedAccelerated( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ )
+ , integerDotProductAccumulatingSaturating16BitSignedAccelerated( integerDotProductAccumulatingSaturating16BitSignedAccelerated_ )
+ , integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ )
+ , integerDotProductAccumulatingSaturating32BitUnsignedAccelerated( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ )
+ , integerDotProductAccumulatingSaturating32BitSignedAccelerated( integerDotProductAccumulatingSaturating32BitSignedAccelerated_ )
+ , integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ )
+ , integerDotProductAccumulatingSaturating64BitUnsignedAccelerated( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ )
+ , integerDotProductAccumulatingSaturating64BitSignedAccelerated( integerDotProductAccumulatingSaturating64BitSignedAccelerated_ )
+ , integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ )
{
- integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated =
- integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProductAccumulatingSaturating32BitUnsignedAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ )
- VULKAN_HPP_NOEXCEPT
- {
- integerDotProductAccumulatingSaturating32BitUnsignedAccelerated =
- integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_;
- return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProductAccumulatingSaturating32BitSignedAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ )
- VULKAN_HPP_NOEXCEPT
- {
- integerDotProductAccumulatingSaturating32BitSignedAccelerated =
- integerDotProductAccumulatingSaturating32BitSignedAccelerated_;
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceShaderIntegerDotProductProperties( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ )
- VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderIntegerDotProductProperties( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderIntegerDotProductProperties( *reinterpret_cast<PhysicalDeviceShaderIntegerDotProductProperties const *>( &rhs ) )
{
- integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated =
- integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProductAccumulatingSaturating64BitUnsignedAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ )
- VULKAN_HPP_NOEXCEPT
- {
- integerDotProductAccumulatingSaturating64BitUnsignedAccelerated =
- integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_;
- return *this;
}
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProductAccumulatingSaturating64BitSignedAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ )
- VULKAN_HPP_NOEXCEPT
- {
- integerDotProductAccumulatingSaturating64BitSignedAccelerated =
- integerDotProductAccumulatingSaturating64BitSignedAccelerated_;
- return *this;
- }
+ PhysicalDeviceShaderIntegerDotProductProperties & operator=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties &
- setIntegerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated(
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ )
- VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderIntegerDotProductProperties & operator=( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated =
- integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties const *>( &rhs );
return *this;
}
-#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceShaderIntegerDotProductProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderIntegerDotProductProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerDotProductProperties *>( this );
}
- explicit operator VkPhysicalDeviceShaderIntegerDotProductProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderIntegerDotProductProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderIntegerDotProductProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -65131,35 +68799,26 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated ) &&
( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated ) &&
- ( integerDotProduct8BitMixedSignednessAccelerated ==
- rhs.integerDotProduct8BitMixedSignednessAccelerated ) &&
- ( integerDotProduct4x8BitPackedUnsignedAccelerated ==
- rhs.integerDotProduct4x8BitPackedUnsignedAccelerated ) &&
+ ( integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated ) &&
+ ( integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated ) &&
( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated ) &&
- ( integerDotProduct4x8BitPackedMixedSignednessAccelerated ==
- rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated ) &&
+ ( integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated ) &&
( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated ) &&
( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated ) &&
- ( integerDotProduct16BitMixedSignednessAccelerated ==
- rhs.integerDotProduct16BitMixedSignednessAccelerated ) &&
+ ( integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated ) &&
( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated ) &&
( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated ) &&
- ( integerDotProduct32BitMixedSignednessAccelerated ==
- rhs.integerDotProduct32BitMixedSignednessAccelerated ) &&
+ ( integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated ) &&
( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated ) &&
( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated ) &&
- ( integerDotProduct64BitMixedSignednessAccelerated ==
- rhs.integerDotProduct64BitMixedSignednessAccelerated ) &&
- ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ==
- rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ) &&
- ( integerDotProductAccumulatingSaturating8BitSignedAccelerated ==
- rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated ) &&
+ ( integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated ) &&
+ ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ) &&
+ ( integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated ) &&
( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ==
rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ) &&
( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ==
@@ -65168,22 +68827,16 @@ namespace VULKAN_HPP_NAMESPACE
rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ) &&
( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ==
rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ) &&
- ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ==
- rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ) &&
- ( integerDotProductAccumulatingSaturating16BitSignedAccelerated ==
- rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated ) &&
+ ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ) &&
+ ( integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated ) &&
( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ==
rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ) &&
- ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ==
- rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ) &&
- ( integerDotProductAccumulatingSaturating32BitSignedAccelerated ==
- rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated ) &&
+ ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ) &&
+ ( integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated ) &&
( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ==
rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ) &&
- ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ==
- rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ) &&
- ( integerDotProductAccumulatingSaturating64BitSignedAccelerated ==
- rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated ) &&
+ ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ) &&
+ ( integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated ) &&
( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ==
rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated );
# endif
@@ -65196,48 +68849,39 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties ) ==
- sizeof( VkPhysicalDeviceShaderIntegerDotProductProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties>::value,
- "PhysicalDeviceShaderIntegerDotProductProperties is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {};
+ };
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerDotProductProperties>
@@ -65251,63 +68895,60 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(
- VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderIntegerFunctions2( shaderIntegerFunctions2_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderIntegerFunctions2( shaderIntegerFunctions2_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(
- PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(
- VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(
- *reinterpret_cast<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>( &rhs ) )
- {}
+ PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( *reinterpret_cast<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &
operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &
- operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &
- setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT
+ setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT
{
shaderIntegerFunctions2 = shaderIntegerFunctions2_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *>( this );
}
- explicit operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -65324,11 +68965,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 );
# endif
}
@@ -65339,19 +68979,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) ==
- sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>::value,
- "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>
@@ -65359,34 +68990,217 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
};
+ struct PhysicalDeviceShaderModuleIdentifierFeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderModuleIdentifierFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderModuleIdentifier( shaderModuleIdentifier_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceShaderModuleIdentifierFeaturesEXT( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceShaderModuleIdentifierFeaturesEXT( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderModuleIdentifierFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderModuleIdentifierFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceShaderModuleIdentifierFeaturesEXT & operator=( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceShaderModuleIdentifierFeaturesEXT & operator=( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierFeaturesEXT &
+ setShaderModuleIdentifier( VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderModuleIdentifier = shaderModuleIdentifier_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, shaderModuleIdentifier );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderModuleIdentifier == rhs.shaderModuleIdentifier );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT>
+ {
+ using Type = PhysicalDeviceShaderModuleIdentifierFeaturesEXT;
+ };
+
+ struct PhysicalDeviceShaderModuleIdentifierPropertiesEXT
+ {
+ using NativeType = VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14
+ PhysicalDeviceShaderModuleIdentifierPropertiesEXT( std::array<uint8_t, VK_UUID_SIZE> const & shaderModuleIdentifierAlgorithmUUID_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderModuleIdentifierAlgorithmUUID( shaderModuleIdentifierAlgorithmUUID_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14
+ PhysicalDeviceShaderModuleIdentifierPropertiesEXT( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceShaderModuleIdentifierPropertiesEXT( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderModuleIdentifierPropertiesEXT( *reinterpret_cast<PhysicalDeviceShaderModuleIdentifierPropertiesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceShaderModuleIdentifierPropertiesEXT &
+ operator=( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceShaderModuleIdentifierPropertiesEXT & operator=( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT const *>( &rhs );
+ return *this;
+ }
+
+ operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, shaderModuleIdentifierAlgorithmUUID );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderModuleIdentifierAlgorithmUUID == rhs.shaderModuleIdentifierAlgorithmUUID );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> shaderModuleIdentifierAlgorithmUUID = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT>
+ {
+ using Type = PhysicalDeviceShaderModuleIdentifierPropertiesEXT;
+ };
+
struct PhysicalDeviceShaderSMBuiltinsFeaturesNV
{
using NativeType = VkPhysicalDeviceShaderSMBuiltinsFeaturesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {} )
- VULKAN_HPP_NOEXCEPT : shaderSMBuiltins( shaderSMBuiltins_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderSMBuiltins( shaderSMBuiltins_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV(
- PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceShaderSMBuiltinsFeaturesNV(
- *reinterpret_cast<PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderSMBuiltinsFeaturesNV( *reinterpret_cast<PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceShaderSMBuiltinsFeaturesNV &
- operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderSMBuiltinsFeaturesNV &
- operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>( &rhs );
return *this;
@@ -65399,25 +69213,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV &
- setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT
{
shaderSMBuiltins = shaderSMBuiltins_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *>( this );
}
- explicit operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -65434,7 +69247,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMBuiltins == rhs.shaderSMBuiltins );
@@ -65452,15 +69265,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV ) ==
- sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV>::value,
- "PhysicalDeviceShaderSMBuiltinsFeaturesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV>
@@ -65473,48 +69277,44 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceShaderSMBuiltinsPropertiesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR
- PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = {},
- uint32_t shaderWarpsPerSM_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderSMCount( shaderSMCount_ )
+ PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = {}, uint32_t shaderWarpsPerSM_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderSMCount( shaderSMCount_ )
, shaderWarpsPerSM( shaderWarpsPerSM_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV(
- PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceShaderSMBuiltinsPropertiesNV(
- *reinterpret_cast<PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderSMBuiltinsPropertiesNV( *reinterpret_cast<PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceShaderSMBuiltinsPropertiesNV &
- operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderSMBuiltinsPropertiesNV &
- operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV *>( this );
}
- explicit operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -65531,11 +69331,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMCount == rhs.shaderSMCount ) &&
- ( shaderWarpsPerSM == rhs.shaderWarpsPerSM );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMCount == rhs.shaderSMCount ) && ( shaderWarpsPerSM == rhs.shaderWarpsPerSM );
# endif
}
@@ -65551,15 +69350,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t shaderSMCount = {};
uint32_t shaderWarpsPerSM = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV ) ==
- sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV>::value,
- "PhysicalDeviceShaderSMBuiltinsPropertiesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV>
@@ -65572,63 +69362,60 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures(
- VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures(
- PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceShaderSubgroupExtendedTypesFeatures( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceShaderSubgroupExtendedTypesFeatures(
- *reinterpret_cast<PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>( &rhs ) )
- {}
+ PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderSubgroupExtendedTypesFeatures( *reinterpret_cast<PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceShaderSubgroupExtendedTypesFeatures &
operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderSubgroupExtendedTypesFeatures &
- operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures &
- setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
+ setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
{
shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures *>( this );
}
- explicit operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -65645,11 +69432,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes );
# endif
}
@@ -65660,19 +69446,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) ==
- sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures>::value,
- "PhysicalDeviceShaderSubgroupExtendedTypesFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures>
@@ -65686,23 +69463,24 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR(
- VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderSubgroupUniformControlFlow( shaderSubgroupUniformControlFlow_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderSubgroupUniformControlFlow( shaderSubgroupUniformControlFlow_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR(
- PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs )
+ VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR(
- VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR(
*reinterpret_cast<PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &
@@ -65711,40 +69489,36 @@ namespace VULKAN_HPP_NAMESPACE
PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &
operator=( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const *>(
- &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &
- setShaderSubgroupUniformControlFlow( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ )
- VULKAN_HPP_NOEXCEPT
+ setShaderSubgroupUniformControlFlow( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ ) VULKAN_HPP_NOEXCEPT
{
shaderSubgroupUniformControlFlow = shaderSubgroupUniformControlFlow_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR *>( this );
}
- explicit operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -65761,11 +69535,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( shaderSubgroupUniformControlFlow == rhs.shaderSubgroupUniformControlFlow );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupUniformControlFlow == rhs.shaderSubgroupUniformControlFlow );
# endif
}
@@ -65776,21 +69549,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType =
- StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ) ==
- sizeof( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<
- VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>::value,
- "PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>
@@ -65803,62 +69565,59 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceShaderTerminateInvocationFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeatures(
- VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderTerminateInvocation( shaderTerminateInvocation_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderTerminateInvocation( shaderTerminateInvocation_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeatures(
- PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceShaderTerminateInvocationFeatures( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderTerminateInvocationFeatures( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceShaderTerminateInvocationFeatures(
- *reinterpret_cast<PhysicalDeviceShaderTerminateInvocationFeatures const *>( &rhs ) )
- {}
+ PhysicalDeviceShaderTerminateInvocationFeatures( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShaderTerminateInvocationFeatures( *reinterpret_cast<PhysicalDeviceShaderTerminateInvocationFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceShaderTerminateInvocationFeatures &
- operator=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceShaderTerminateInvocationFeatures & operator=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShaderTerminateInvocationFeatures &
- operator=( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderTerminateInvocationFeatures & operator=( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures &
- setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT
+ setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT
{
shaderTerminateInvocation = shaderTerminateInvocation_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceShaderTerminateInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderTerminateInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderTerminateInvocationFeatures *>( this );
}
- explicit operator VkPhysicalDeviceShaderTerminateInvocationFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderTerminateInvocationFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderTerminateInvocationFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -65875,11 +69634,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( shaderTerminateInvocation == rhs.shaderTerminateInvocation );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTerminateInvocation == rhs.shaderTerminateInvocation );
# endif
}
@@ -65890,19 +69648,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures ) ==
- sizeof( VkPhysicalDeviceShaderTerminateInvocationFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures>::value,
- "PhysicalDeviceShaderTerminateInvocationFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures>
@@ -65916,32 +69665,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceShadingRateImageFeaturesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV(
- VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {} ) VULKAN_HPP_NOEXCEPT
- : shadingRateImage( shadingRateImage_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shadingRateImage( shadingRateImage_ )
, shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV(
- PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceShadingRateImageFeaturesNV(
- *reinterpret_cast<PhysicalDeviceShadingRateImageFeaturesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShadingRateImageFeaturesNV( *reinterpret_cast<PhysicalDeviceShadingRateImageFeaturesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceShadingRateImageFeaturesNV &
- operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceShadingRateImageFeaturesNV & operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShadingRateImageFeaturesNV &
- operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShadingRateImageFeaturesNV & operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const *>( &rhs );
return *this;
@@ -65954,39 +69700,35 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV &
- setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT
{
shadingRateImage = shadingRateImage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV &
- setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT
+ setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT
{
shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceShadingRateImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShadingRateImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShadingRateImageFeaturesNV *>( this );
}
- explicit operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShadingRateImageFeaturesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -65999,7 +69741,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateImage == rhs.shadingRateImage ) &&
@@ -66014,20 +69756,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {};
VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV ) ==
- sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV>::value,
- "PhysicalDeviceShadingRateImageFeaturesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShadingRateImageFeaturesNV>
@@ -66040,58 +69773,51 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceShadingRateImagePropertiesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceShadingRateImagePropertiesNV( VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {},
- uint32_t shadingRatePaletteSize_ = {},
- uint32_t shadingRateMaxCoarseSamples_ = {} ) VULKAN_HPP_NOEXCEPT
- : shadingRateTexelSize( shadingRateTexelSize_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {},
+ uint32_t shadingRatePaletteSize_ = {},
+ uint32_t shadingRateMaxCoarseSamples_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shadingRateTexelSize( shadingRateTexelSize_ )
, shadingRatePaletteSize( shadingRatePaletteSize_ )
, shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV(
- PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceShadingRateImagePropertiesNV(
- *reinterpret_cast<PhysicalDeviceShadingRateImagePropertiesNV const *>( &rhs ) )
- {}
+ PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceShadingRateImagePropertiesNV( *reinterpret_cast<PhysicalDeviceShadingRateImagePropertiesNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceShadingRateImagePropertiesNV &
- operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceShadingRateImagePropertiesNV & operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceShadingRateImagePropertiesNV &
- operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShadingRateImagePropertiesNV & operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceShadingRateImagePropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShadingRateImagePropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShadingRateImagePropertiesNV *>( this );
}
- explicit operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShadingRateImagePropertiesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Extent2D const &,
- uint32_t const &,
- uint32_t const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, uint32_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -66104,12 +69830,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateTexelSize == rhs.shadingRateTexelSize ) &&
- ( shadingRatePaletteSize == rhs.shadingRatePaletteSize ) &&
- ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples );
+ ( shadingRatePaletteSize == rhs.shadingRatePaletteSize ) && ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples );
# endif
}
@@ -66120,21 +69845,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {};
uint32_t shadingRatePaletteSize = {};
uint32_t shadingRateMaxCoarseSamples = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV ) ==
- sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV>::value,
- "PhysicalDeviceShadingRateImagePropertiesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShadingRateImagePropertiesNV>
@@ -66147,36 +69863,36 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceSparseImageFormatInfo2;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceSparseImageFormatInfo2;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2(
- VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D,
- VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
- VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal ) VULKAN_HPP_NOEXCEPT
- : format( format_ )
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceSparseImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D,
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , format( format_ )
, type( type_ )
, samples( samples_ )
, usage( usage_ )
, tiling( tiling_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( PhysicalDeviceSparseImageFormatInfo2 const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSparseImageFormatInfo2( *reinterpret_cast<PhysicalDeviceSparseImageFormatInfo2 const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceSparseImageFormatInfo2 &
- operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceSparseImageFormatInfo2 & operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceSparseImageFormatInfo2 &
- operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSparseImageFormatInfo2 & operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const *>( &rhs );
return *this;
@@ -66189,53 +69905,48 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 &
- setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 &
- setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 &
- setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
{
samples = samples_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 &
- setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 &
- setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
{
tiling = tiling_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceSparseImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSparseImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( this );
}
- explicit operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -66258,11 +69969,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) &&
- ( samples == rhs.samples ) && ( usage == rhs.usage ) && ( tiling == rhs.tiling );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && ( samples == rhs.samples ) &&
+ ( usage == rhs.usage ) && ( tiling == rhs.tiling );
# endif
}
@@ -66281,14 +69992,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 ) ==
- sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2>::value,
- "PhysicalDeviceSparseImageFormatInfo2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSparseImageFormatInfo2>
@@ -66302,30 +70005,31 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceSubgroupProperties;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupProperties;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties(
- uint32_t subgroupSize_ = {},
- VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {},
- VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {} ) VULKAN_HPP_NOEXCEPT
- : subgroupSize( subgroupSize_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( uint32_t subgroupSize_ = {},
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {},
+ VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , subgroupSize( subgroupSize_ )
, supportedStages( supportedStages_ )
, supportedOperations( supportedOperations_ )
, quadOperationsInAllStages( quadOperationsInAllStages_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSubgroupProperties( *reinterpret_cast<PhysicalDeviceSubgroupProperties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceSubgroupProperties &
- operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceSubgroupProperties & operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSubgroupProperties & operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -66333,17 +70037,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPhysicalDeviceSubgroupProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSubgroupProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSubgroupProperties *>( this );
}
- explicit operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSubgroupProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -66365,12 +70069,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSize == rhs.subgroupSize ) &&
- ( supportedStages == rhs.supportedStages ) && ( supportedOperations == rhs.supportedOperations ) &&
- ( quadOperationsInAllStages == rhs.quadOperationsInAllStages );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSize == rhs.subgroupSize ) && ( supportedStages == rhs.supportedStages ) &&
+ ( supportedOperations == rhs.supportedOperations ) && ( quadOperationsInAllStages == rhs.quadOperationsInAllStages );
# endif
}
@@ -66381,21 +70084,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties;
- void * pNext = {};
- uint32_t subgroupSize = {};
- VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {};
- VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties;
+ void * pNext = {};
+ uint32_t subgroupSize = {};
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {};
+ VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {};
VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties ) ==
- sizeof( VkPhysicalDeviceSubgroupProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties>::value,
- "PhysicalDeviceSubgroupProperties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupProperties>
@@ -66408,32 +70103,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceSubgroupSizeControlFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceSubgroupSizeControlFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures(
- VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {} ) VULKAN_HPP_NOEXCEPT
- : subgroupSizeControl( subgroupSizeControl_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , subgroupSizeControl( subgroupSizeControl_ )
, computeFullSubgroups( computeFullSubgroups_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures(
- PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceSubgroupSizeControlFeatures( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceSubgroupSizeControlFeatures(
- *reinterpret_cast<PhysicalDeviceSubgroupSizeControlFeatures const *>( &rhs ) )
- {}
+ PhysicalDeviceSubgroupSizeControlFeatures( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceSubgroupSizeControlFeatures( *reinterpret_cast<PhysicalDeviceSubgroupSizeControlFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceSubgroupSizeControlFeatures &
- operator=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceSubgroupSizeControlFeatures & operator=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceSubgroupSizeControlFeatures &
- operator=( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSubgroupSizeControlFeatures & operator=( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures const *>( &rhs );
return *this;
@@ -66447,38 +70139,35 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures &
- setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
+ setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
{
subgroupSizeControl = subgroupSizeControl_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures &
- setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
+ setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
{
computeFullSubgroups = computeFullSubgroups_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceSubgroupSizeControlFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSubgroupSizeControlFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlFeatures *>( this );
}
- explicit operator VkPhysicalDeviceSubgroupSizeControlFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSubgroupSizeControlFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -66491,7 +70180,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSizeControl == rhs.subgroupSizeControl ) &&
@@ -66506,20 +70195,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {};
VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures ) ==
- sizeof( VkPhysicalDeviceSubgroupSizeControlFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures>::value,
- "PhysicalDeviceSubgroupSizeControlFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupSizeControlFeatures>
@@ -66533,52 +70213,49 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceSubgroupSizeControlProperties;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceSubgroupSizeControlProperties;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties(
- uint32_t minSubgroupSize_ = {},
- uint32_t maxSubgroupSize_ = {},
- uint32_t maxComputeWorkgroupSubgroups_ = {},
- VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {} ) VULKAN_HPP_NOEXCEPT
- : minSubgroupSize( minSubgroupSize_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties( uint32_t minSubgroupSize_ = {},
+ uint32_t maxSubgroupSize_ = {},
+ uint32_t maxComputeWorkgroupSubgroups_ = {},
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , minSubgroupSize( minSubgroupSize_ )
, maxSubgroupSize( maxSubgroupSize_ )
, maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ )
, requiredSubgroupSizeStages( requiredSubgroupSizeStages_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties(
- PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceSubgroupSizeControlProperties( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceSubgroupSizeControlProperties(
- *reinterpret_cast<PhysicalDeviceSubgroupSizeControlProperties const *>( &rhs ) )
- {}
+ PhysicalDeviceSubgroupSizeControlProperties( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceSubgroupSizeControlProperties( *reinterpret_cast<PhysicalDeviceSubgroupSizeControlProperties const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceSubgroupSizeControlProperties &
- operator=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceSubgroupSizeControlProperties & operator=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceSubgroupSizeControlProperties &
- operator=( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSubgroupSizeControlProperties & operator=( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceSubgroupSizeControlProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSubgroupSizeControlProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlProperties *>( this );
}
- explicit operator VkPhysicalDeviceSubgroupSizeControlProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSubgroupSizeControlProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -66591,8 +70268,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, minSubgroupSize, maxSubgroupSize, maxComputeWorkgroupSubgroups, requiredSubgroupSizeStages );
+ return std::tie( sType, pNext, minSubgroupSize, maxSubgroupSize, maxComputeWorkgroupSubgroups, requiredSubgroupSizeStages );
}
#endif
@@ -66601,13 +70277,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) &&
- ( maxSubgroupSize == rhs.maxSubgroupSize ) &&
- ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) &&
- ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) && ( maxSubgroupSize == rhs.maxSubgroupSize ) &&
+ ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages );
# endif
}
@@ -66618,22 +70292,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties;
+ void * pNext = {};
uint32_t minSubgroupSize = {};
uint32_t maxSubgroupSize = {};
uint32_t maxComputeWorkgroupSubgroups = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties ) ==
- sizeof( VkPhysicalDeviceSubgroupSizeControlProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties>::value,
- "PhysicalDeviceSubgroupSizeControlProperties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupSizeControlProperties>
@@ -66642,34 +70307,131 @@ namespace VULKAN_HPP_NAMESPACE
};
using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties;
+ struct PhysicalDeviceSubpassMergeFeedbackFeaturesEXT
+ {
+ using NativeType = VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , subpassMergeFeedback( subpassMergeFeedback_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( *reinterpret_cast<PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & operator=( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & operator=( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassMergeFeedbackFeaturesEXT &
+ setSubpassMergeFeedback( VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subpassMergeFeedback = subpassMergeFeedback_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT *>( this );
+ }
+
+ operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, subpassMergeFeedback );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassMergeFeedback == rhs.subpassMergeFeedback );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT>
+ {
+ using Type = PhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
+ };
+
struct PhysicalDeviceSubpassShadingFeaturesHUAWEI
{
using NativeType = VkPhysicalDeviceSubpassShadingFeaturesHUAWEI;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ = {} )
- VULKAN_HPP_NOEXCEPT : subpassShading( subpassShading_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , subpassShading( subpassShading_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI(
- PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceSubpassShadingFeaturesHUAWEI( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceSubpassShadingFeaturesHUAWEI(
- *reinterpret_cast<PhysicalDeviceSubpassShadingFeaturesHUAWEI const *>( &rhs ) )
- {}
+ PhysicalDeviceSubpassShadingFeaturesHUAWEI( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceSubpassShadingFeaturesHUAWEI( *reinterpret_cast<PhysicalDeviceSubpassShadingFeaturesHUAWEI const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceSubpassShadingFeaturesHUAWEI &
- operator=( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceSubpassShadingFeaturesHUAWEI & operator=( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceSubpassShadingFeaturesHUAWEI &
- operator=( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSubpassShadingFeaturesHUAWEI & operator=( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI const *>( &rhs );
return *this;
@@ -66682,25 +70444,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI &
- setSubpassShading( VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI & setSubpassShading( VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ ) VULKAN_HPP_NOEXCEPT
{
subpassShading = subpassShading_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSubpassShadingFeaturesHUAWEI *>( this );
}
- explicit operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSubpassShadingFeaturesHUAWEI *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -66717,7 +70478,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassShading == rhs.subpassShading );
@@ -66735,15 +70496,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 subpassShading = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI ) ==
- sizeof( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI>::value,
- "PhysicalDeviceSubpassShadingFeaturesHUAWEI is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI>
@@ -66756,46 +70508,43 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceSubpassShadingPropertiesHUAWEI;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI(
- uint32_t maxSubpassShadingWorkgroupSizeAspectRatio_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxSubpassShadingWorkgroupSizeAspectRatio( maxSubpassShadingWorkgroupSizeAspectRatio_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI( uint32_t maxSubpassShadingWorkgroupSizeAspectRatio_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxSubpassShadingWorkgroupSizeAspectRatio( maxSubpassShadingWorkgroupSizeAspectRatio_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI(
- PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceSubpassShadingPropertiesHUAWEI( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceSubpassShadingPropertiesHUAWEI(
- *reinterpret_cast<PhysicalDeviceSubpassShadingPropertiesHUAWEI const *>( &rhs ) )
- {}
+ PhysicalDeviceSubpassShadingPropertiesHUAWEI( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceSubpassShadingPropertiesHUAWEI( *reinterpret_cast<PhysicalDeviceSubpassShadingPropertiesHUAWEI const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceSubpassShadingPropertiesHUAWEI &
- operator=( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceSubpassShadingPropertiesHUAWEI & operator=( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceSubpassShadingPropertiesHUAWEI &
- operator=( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSubpassShadingPropertiesHUAWEI & operator=( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSubpassShadingPropertiesHUAWEI *>( this );
}
- explicit operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSubpassShadingPropertiesHUAWEI *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -66812,11 +70561,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( maxSubpassShadingWorkgroupSizeAspectRatio == rhs.maxSubpassShadingWorkgroupSizeAspectRatio );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSubpassShadingWorkgroupSizeAspectRatio == rhs.maxSubpassShadingWorkgroupSizeAspectRatio );
# endif
}
@@ -66827,19 +70575,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI;
+ void * pNext = {};
uint32_t maxSubpassShadingWorkgroupSizeAspectRatio = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI ) ==
- sizeof( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI>::value,
- "PhysicalDeviceSubpassShadingPropertiesHUAWEI is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI>
@@ -66855,21 +70594,21 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {} ) VULKAN_HPP_NOEXCEPT
- : surface( surface_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , surface( surface_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceSurfaceInfo2KHR( *reinterpret_cast<PhysicalDeviceSurfaceInfo2KHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceSurfaceInfo2KHR &
- operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceSurfaceInfo2KHR & operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSurfaceInfo2KHR & operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -66884,30 +70623,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR &
- setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
{
surface = surface_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceSurfaceInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSurfaceInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( this );
}
- explicit operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::
- tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SurfaceKHR const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SurfaceKHR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -66920,7 +70657,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surface == rhs.surface );
@@ -66938,14 +70675,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR ) ==
- sizeof( VkPhysicalDeviceSurfaceInfo2KHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR>::value,
- "PhysicalDeviceSurfaceInfo2KHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSurfaceInfo2KHR>
@@ -66958,29 +70687,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceSynchronization2Features;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceSynchronization2Features;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSynchronization2Features;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceSynchronization2Features( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {} ) VULKAN_HPP_NOEXCEPT
- : synchronization2( synchronization2_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , synchronization2( synchronization2_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features( PhysicalDeviceSynchronization2Features const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceSynchronization2Features( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceSynchronization2Features(
- *reinterpret_cast<PhysicalDeviceSynchronization2Features const *>( &rhs ) )
- {}
+ : PhysicalDeviceSynchronization2Features( *reinterpret_cast<PhysicalDeviceSynchronization2Features const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceSynchronization2Features &
- operator=( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceSynchronization2Features & operator=( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceSynchronization2Features &
- operator=( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSynchronization2Features & operator=( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features const *>( &rhs );
return *this;
@@ -66993,25 +70720,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features &
- setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT
{
synchronization2 = synchronization2_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceSynchronization2Features const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSynchronization2Features const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceSynchronization2Features *>( this );
}
- explicit operator VkPhysicalDeviceSynchronization2Features &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSynchronization2Features &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceSynchronization2Features *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -67028,7 +70754,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( synchronization2 == rhs.synchronization2 );
@@ -67046,15 +70772,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features ) ==
- sizeof( VkPhysicalDeviceSynchronization2Features ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features>::value,
- "PhysicalDeviceSynchronization2Features is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceSynchronization2Features>
@@ -67068,62 +70785,59 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
- : texelBufferAlignment( texelBufferAlignment_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , texelBufferAlignment( texelBufferAlignment_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT(
- PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceTexelBufferAlignmentFeaturesEXT( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceTexelBufferAlignmentFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceTexelBufferAlignmentFeaturesEXT( *reinterpret_cast<PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceTexelBufferAlignmentFeaturesEXT &
- operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceTexelBufferAlignmentFeaturesEXT &
- operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT &
- setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT
+ setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT
{
texelBufferAlignment = texelBufferAlignment_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -67140,7 +70854,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( texelBufferAlignment == rhs.texelBufferAlignment );
@@ -67154,19 +70868,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT>::value,
- "PhysicalDeviceTexelBufferAlignmentFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT>
@@ -67179,52 +70884,49 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceTexelBufferAlignmentProperties;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceTexelBufferAlignmentProperties;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties(
- VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
- : storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties( VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ )
, storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ )
, uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ )
, uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties(
- PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceTexelBufferAlignmentProperties( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceTexelBufferAlignmentProperties(
- *reinterpret_cast<PhysicalDeviceTexelBufferAlignmentProperties const *>( &rhs ) )
- {}
+ PhysicalDeviceTexelBufferAlignmentProperties( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceTexelBufferAlignmentProperties( *reinterpret_cast<PhysicalDeviceTexelBufferAlignmentProperties const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceTexelBufferAlignmentProperties &
- operator=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceTexelBufferAlignmentProperties & operator=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceTexelBufferAlignmentProperties &
- operator=( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTexelBufferAlignmentProperties & operator=( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceTexelBufferAlignmentProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceTexelBufferAlignmentProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentProperties *>( this );
}
- explicit operator VkPhysicalDeviceTexelBufferAlignmentProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceTexelBufferAlignmentProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -67251,11 +70953,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) &&
( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) &&
( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) &&
( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment );
@@ -67269,22 +70970,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {};
VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {};
VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {};
VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties ) ==
- sizeof( VkPhysicalDeviceTexelBufferAlignmentProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties>::value,
- "PhysicalDeviceTexelBufferAlignmentProperties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTexelBufferAlignmentProperties>
@@ -67298,62 +70990,59 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceTextureCompressionASTCHDRFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeatures(
- VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {} ) VULKAN_HPP_NOEXCEPT
- : textureCompressionASTC_HDR( textureCompressionASTC_HDR_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeatures( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , textureCompressionASTC_HDR( textureCompressionASTC_HDR_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeatures(
- PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceTextureCompressionASTCHDRFeatures( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceTextureCompressionASTCHDRFeatures( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceTextureCompressionASTCHDRFeatures(
- *reinterpret_cast<PhysicalDeviceTextureCompressionASTCHDRFeatures const *>( &rhs ) )
- {}
+ PhysicalDeviceTextureCompressionASTCHDRFeatures( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceTextureCompressionASTCHDRFeatures( *reinterpret_cast<PhysicalDeviceTextureCompressionASTCHDRFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceTextureCompressionASTCHDRFeatures &
- operator=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceTextureCompressionASTCHDRFeatures & operator=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceTextureCompressionASTCHDRFeatures &
- operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTextureCompressionASTCHDRFeatures & operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures &
- setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
+ setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
{
textureCompressionASTC_HDR = textureCompressionASTC_HDR_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTextureCompressionASTCHDRFeatures *>( this );
}
- explicit operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTextureCompressionASTCHDRFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -67370,11 +71059,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR );
# endif
}
@@ -67385,19 +71073,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures ) ==
- sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures>::value,
- "PhysicalDeviceTextureCompressionASTCHDRFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures>
@@ -67406,33 +71085,129 @@ namespace VULKAN_HPP_NAMESPACE
};
using PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT = PhysicalDeviceTextureCompressionASTCHDRFeatures;
+ struct PhysicalDeviceTilePropertiesFeaturesQCOM
+ {
+ using NativeType = VkPhysicalDeviceTilePropertiesFeaturesQCOM;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTilePropertiesFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 tileProperties_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , tileProperties( tileProperties_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTilePropertiesFeaturesQCOM( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceTilePropertiesFeaturesQCOM( VkPhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceTilePropertiesFeaturesQCOM( *reinterpret_cast<PhysicalDeviceTilePropertiesFeaturesQCOM const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PhysicalDeviceTilePropertiesFeaturesQCOM & operator=( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PhysicalDeviceTilePropertiesFeaturesQCOM & operator=( VkPhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTilePropertiesFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTilePropertiesFeaturesQCOM & setTileProperties( VULKAN_HPP_NAMESPACE::Bool32 tileProperties_ ) VULKAN_HPP_NOEXCEPT
+ {
+ tileProperties = tileProperties_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPhysicalDeviceTilePropertiesFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceTilePropertiesFeaturesQCOM *>( this );
+ }
+
+ operator VkPhysicalDeviceTilePropertiesFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceTilePropertiesFeaturesQCOM *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, tileProperties );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PhysicalDeviceTilePropertiesFeaturesQCOM const & ) const = default;
+#else
+ bool operator==( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tileProperties == rhs.tileProperties );
+# endif
+ }
+
+ bool operator!=( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 tileProperties = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM>
+ {
+ using Type = PhysicalDeviceTilePropertiesFeaturesQCOM;
+ };
+
struct PhysicalDeviceTimelineSemaphoreFeatures
{
using NativeType = VkPhysicalDeviceTimelineSemaphoreFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {} )
- VULKAN_HPP_NOEXCEPT : timelineSemaphore( timelineSemaphore_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , timelineSemaphore( timelineSemaphore_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( PhysicalDeviceTimelineSemaphoreFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceTimelineSemaphoreFeatures( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceTimelineSemaphoreFeatures(
- *reinterpret_cast<PhysicalDeviceTimelineSemaphoreFeatures const *>( &rhs ) )
- {}
+ : PhysicalDeviceTimelineSemaphoreFeatures( *reinterpret_cast<PhysicalDeviceTimelineSemaphoreFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceTimelineSemaphoreFeatures &
- operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceTimelineSemaphoreFeatures & operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceTimelineSemaphoreFeatures &
- operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTimelineSemaphoreFeatures & operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures const *>( &rhs );
return *this;
@@ -67453,17 +71228,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceTimelineSemaphoreFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceTimelineSemaphoreFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeatures *>( this );
}
- explicit operator VkPhysicalDeviceTimelineSemaphoreFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceTimelineSemaphoreFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -67480,7 +71255,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timelineSemaphore == rhs.timelineSemaphore );
@@ -67498,15 +71273,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures ) ==
- sizeof( VkPhysicalDeviceTimelineSemaphoreFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures>::value,
- "PhysicalDeviceTimelineSemaphoreFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTimelineSemaphoreFeatures>
@@ -67520,45 +71286,43 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceTimelineSemaphoreProperties;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( uint64_t maxTimelineSemaphoreValueDifference_ = {} )
- VULKAN_HPP_NOEXCEPT : maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( uint64_t maxTimelineSemaphoreValueDifference_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties(
- PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceTimelineSemaphoreProperties(
- *reinterpret_cast<PhysicalDeviceTimelineSemaphoreProperties const *>( &rhs ) )
- {}
+ PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceTimelineSemaphoreProperties( *reinterpret_cast<PhysicalDeviceTimelineSemaphoreProperties const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceTimelineSemaphoreProperties &
- operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceTimelineSemaphoreProperties & operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceTimelineSemaphoreProperties &
- operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTimelineSemaphoreProperties & operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceTimelineSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceTimelineSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreProperties *>( this );
}
- explicit operator VkPhysicalDeviceTimelineSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceTimelineSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -67575,11 +71339,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference );
# endif
}
@@ -67590,19 +71353,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
+ void * pNext = {};
uint64_t maxTimelineSemaphoreValueDifference = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties ) ==
- sizeof( VkPhysicalDeviceTimelineSemaphoreProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties>::value,
- "PhysicalDeviceTimelineSemaphoreProperties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTimelineSemaphoreProperties>
@@ -67619,25 +71373,27 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceToolProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties(
- std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & name_ = {},
- std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & version_ = {},
- VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes_ = {},
- std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
- std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & layer_ = {} ) VULKAN_HPP_NOEXCEPT
- : name( name_ )
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties( std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & name_ = {},
+ std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & version_ = {},
+ VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes_ = {},
+ std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
+ std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & layer_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , name( name_ )
, version( version_ )
, purposes( purposes_ )
, description( description_ )
, layer( layer_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- PhysicalDeviceToolProperties( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceToolProperties( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceToolProperties( *reinterpret_cast<PhysicalDeviceToolProperties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceToolProperties & operator=( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -67648,17 +71404,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPhysicalDeviceToolProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceToolProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceToolProperties *>( this );
}
- explicit operator VkPhysicalDeviceToolProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceToolProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceToolProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -67681,11 +71437,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) && ( version == rhs.version ) &&
- ( purposes == rhs.purposes ) && ( description == rhs.description ) && ( layer == rhs.layer );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) && ( version == rhs.version ) && ( purposes == rhs.purposes ) &&
+ ( description == rhs.description ) && ( layer == rhs.layer );
# endif
}
@@ -67696,22 +71452,14 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolProperties;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolProperties;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> name = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> version = {};
VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> layer = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties ) ==
- sizeof( VkPhysicalDeviceToolProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value,
- "PhysicalDeviceToolProperties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceToolProperties>
@@ -67725,32 +71473,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceTransformFeedbackFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {} ) VULKAN_HPP_NOEXCEPT
- : transformFeedback( transformFeedback_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , transformFeedback( transformFeedback_ )
, geometryStreams( geometryStreams_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT(
- PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceTransformFeedbackFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceTransformFeedbackFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceTransformFeedbackFeaturesEXT( *reinterpret_cast<PhysicalDeviceTransformFeedbackFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceTransformFeedbackFeaturesEXT &
- operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceTransformFeedbackFeaturesEXT &
- operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const *>( &rhs );
return *this;
@@ -67770,32 +71515,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT &
- setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT
{
geometryStreams = geometryStreams_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -67808,11 +71549,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transformFeedback == rhs.transformFeedback ) &&
- ( geometryStreams == rhs.geometryStreams );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transformFeedback == rhs.transformFeedback ) && ( geometryStreams == rhs.geometryStreams );
# endif
}
@@ -67828,15 +71568,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {};
VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT>::value,
- "PhysicalDeviceTransformFeedbackFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT>
@@ -67849,22 +71580,22 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceTransformFeedbackPropertiesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT(
- uint32_t maxTransformFeedbackStreams_ = {},
- uint32_t maxTransformFeedbackBuffers_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {},
- uint32_t maxTransformFeedbackStreamDataSize_ = {},
- uint32_t maxTransformFeedbackBufferDataSize_ = {},
- uint32_t maxTransformFeedbackBufferDataStride_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxTransformFeedbackStreams( maxTransformFeedbackStreams_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( uint32_t maxTransformFeedbackStreams_ = {},
+ uint32_t maxTransformFeedbackBuffers_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {},
+ uint32_t maxTransformFeedbackStreamDataSize_ = {},
+ uint32_t maxTransformFeedbackBufferDataSize_ = {},
+ uint32_t maxTransformFeedbackBufferDataStride_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxTransformFeedbackStreams( maxTransformFeedbackStreams_ )
, maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ )
, maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ )
, maxTransformFeedbackStreamDataSize( maxTransformFeedbackStreamDataSize_ )
@@ -67874,39 +71605,36 @@ namespace VULKAN_HPP_NAMESPACE
, transformFeedbackStreamsLinesTriangles( transformFeedbackStreamsLinesTriangles_ )
, transformFeedbackRasterizationStreamSelect( transformFeedbackRasterizationStreamSelect_ )
, transformFeedbackDraw( transformFeedbackDraw_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT(
- PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceTransformFeedbackPropertiesEXT(
- *reinterpret_cast<PhysicalDeviceTransformFeedbackPropertiesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceTransformFeedbackPropertiesEXT( *reinterpret_cast<PhysicalDeviceTransformFeedbackPropertiesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceTransformFeedbackPropertiesEXT &
- operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceTransformFeedbackPropertiesEXT &
- operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackPropertiesEXT *>( this );
}
- explicit operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackPropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -67945,13 +71673,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams ) &&
- ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers ) &&
- ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams ) &&
+ ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers ) && ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize ) &&
( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize ) &&
( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize ) &&
( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride ) &&
@@ -67969,8 +71695,8 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
+ void * pNext = {};
uint32_t maxTransformFeedbackStreams = {};
uint32_t maxTransformFeedbackBuffers = {};
VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize = {};
@@ -67982,15 +71708,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {};
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT ) ==
- sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT>::value,
- "PhysicalDeviceTransformFeedbackPropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT>
@@ -68003,63 +71720,60 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceUniformBufferStandardLayoutFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures(
- VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {} ) VULKAN_HPP_NOEXCEPT
- : uniformBufferStandardLayout( uniformBufferStandardLayout_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , uniformBufferStandardLayout( uniformBufferStandardLayout_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures(
- PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceUniformBufferStandardLayoutFeatures( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceUniformBufferStandardLayoutFeatures(
- *reinterpret_cast<PhysicalDeviceUniformBufferStandardLayoutFeatures const *>( &rhs ) )
- {}
+ PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceUniformBufferStandardLayoutFeatures( *reinterpret_cast<PhysicalDeviceUniformBufferStandardLayoutFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceUniformBufferStandardLayoutFeatures &
operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceUniformBufferStandardLayoutFeatures &
- operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures &
- setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
+ setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
{
uniformBufferStandardLayout = uniformBufferStandardLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceUniformBufferStandardLayoutFeatures *>( this );
}
- explicit operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -68076,11 +71790,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout );
# endif
}
@@ -68091,19 +71804,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures ) ==
- sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures>::value,
- "PhysicalDeviceUniformBufferStandardLayoutFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures>
@@ -68117,31 +71821,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceVariablePointersFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceVariablePointersFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVariablePointersFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {} ) VULKAN_HPP_NOEXCEPT
- : variablePointersStorageBuffer( variablePointersStorageBuffer_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , variablePointersStorageBuffer( variablePointersStorageBuffer_ )
, variablePointers( variablePointers_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceVariablePointersFeatures(
- *reinterpret_cast<PhysicalDeviceVariablePointersFeatures const *>( &rhs ) )
- {}
+ : PhysicalDeviceVariablePointersFeatures( *reinterpret_cast<PhysicalDeviceVariablePointersFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceVariablePointersFeatures &
- operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceVariablePointersFeatures & operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceVariablePointersFeatures &
- operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVariablePointersFeatures & operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const *>( &rhs );
return *this;
@@ -68154,39 +71856,35 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setVariablePointersStorageBuffer(
- VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures &
+ setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
{
variablePointersStorageBuffer = variablePointersStorageBuffer_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures &
- setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
{
variablePointers = variablePointers_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceVariablePointersFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVariablePointersFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVariablePointersFeatures *>( this );
}
- explicit operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVariablePointersFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -68199,11 +71897,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) &&
( variablePointers == rhs.variablePointers );
# endif
}
@@ -68215,20 +71912,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {};
VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures ) ==
- sizeof( VkPhysicalDeviceVariablePointersFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures>::value,
- "PhysicalDeviceVariablePointersFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVariablePointersFeatures>
@@ -68244,78 +71932,72 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {} ) VULKAN_HPP_NOEXCEPT
- : vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ )
, vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT(
- PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceVertexAttributeDivisorFeaturesEXT( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceVertexAttributeDivisorFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceVertexAttributeDivisorFeaturesEXT( *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceVertexAttributeDivisorFeaturesEXT &
- operator=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceVertexAttributeDivisorFeaturesEXT &
- operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateDivisor(
- VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT &
+ setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT
{
vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateZeroDivisor(
- VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT &
+ setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT
{
vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &,
- VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -68328,11 +72010,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) &&
( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor );
# endif
}
@@ -68344,20 +72025,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {};
VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT>::value,
- "PhysicalDeviceVertexAttributeDivisorFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT>
@@ -68370,47 +72042,44 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxVertexAttribDivisor( maxVertexAttribDivisor_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxVertexAttribDivisor( maxVertexAttribDivisor_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT(
- PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceVertexAttributeDivisorPropertiesEXT( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceVertexAttributeDivisorPropertiesEXT(
- *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceVertexAttributeDivisorPropertiesEXT( *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceVertexAttributeDivisorPropertiesEXT &
operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceVertexAttributeDivisorPropertiesEXT &
- operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>( &rhs );
return *this;
}
- explicit operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *>( this );
}
- explicit operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -68427,11 +72096,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor );
# endif
}
@@ -68442,19 +72110,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+ void * pNext = {};
uint32_t maxVertexAttribDivisor = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) ==
- sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT>::value,
- "PhysicalDeviceVertexAttributeDivisorPropertiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT>
@@ -68467,62 +72126,59 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ = {} ) VULKAN_HPP_NOEXCEPT
- : vertexInputDynamicState( vertexInputDynamicState_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , vertexInputDynamicState( vertexInputDynamicState_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT(
- PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceVertexInputDynamicStateFeaturesEXT( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceVertexInputDynamicStateFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceVertexInputDynamicStateFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceVertexInputDynamicStateFeaturesEXT( *reinterpret_cast<PhysicalDeviceVertexInputDynamicStateFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceVertexInputDynamicStateFeaturesEXT &
- operator=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceVertexInputDynamicStateFeaturesEXT & operator=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceVertexInputDynamicStateFeaturesEXT &
- operator=( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVertexInputDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT &
- setVertexInputDynamicState( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ ) VULKAN_HPP_NOEXCEPT
+ setVertexInputDynamicState( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ ) VULKAN_HPP_NOEXCEPT
{
vertexInputDynamicState = vertexInputDynamicState_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -68539,11 +72195,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( vertexInputDynamicState == rhs.vertexInputDynamicState );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexInputDynamicState == rhs.vertexInputDynamicState );
# endif
}
@@ -68554,19 +72209,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT>::value,
- "PhysicalDeviceVertexInputDynamicStateFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT>
@@ -68575,318 +72221,70 @@ namespace VULKAN_HPP_NAMESPACE
};
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoProfileKHR
+ struct PhysicalDeviceVideoFormatInfoKHR
{
- using NativeType = VkVideoProfileKHR;
+ using NativeType = VkPhysicalDeviceVideoFormatInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfileKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoFormatInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- VideoProfileKHR( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ =
- VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eInvalid,
- VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ = {},
- VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ = {},
- VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ = {} ) VULKAN_HPP_NOEXCEPT
- : videoCodecOperation( videoCodecOperation_ )
- , chromaSubsampling( chromaSubsampling_ )
- , lumaBitDepth( lumaBitDepth_ )
- , chromaBitDepth( chromaBitDepth_ )
- {}
-
- VULKAN_HPP_CONSTEXPR VideoProfileKHR( VideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- VideoProfileKHR( VkVideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoProfileKHR( *reinterpret_cast<VideoProfileKHR const *>( &rhs ) )
- {}
-# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
-
- VideoProfileKHR & operator=( VideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- VideoProfileKHR & operator=( VkVideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoProfileKHR const *>( &rhs );
- return *this;
- }
-
-# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR & setVideoCodecOperation(
- VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ ) VULKAN_HPP_NOEXCEPT
- {
- videoCodecOperation = videoCodecOperation_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR & setChromaSubsampling(
- VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ ) VULKAN_HPP_NOEXCEPT
- {
- chromaSubsampling = chromaSubsampling_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR &
- setLumaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ ) VULKAN_HPP_NOEXCEPT
- {
- lumaBitDepth = lumaBitDepth_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR &
- setChromaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ ) VULKAN_HPP_NOEXCEPT
- {
- chromaBitDepth = chromaBitDepth_;
- return *this;
- }
-# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
-
- explicit operator VkVideoProfileKHR const &() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkVideoProfileKHR *>( this );
- }
-
- explicit operator VkVideoProfileKHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkVideoProfileKHR *>( this );
- }
-
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
-# if 14 <= VULKAN_HPP_CPP_VERSION
- auto
-# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR const &,
- VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR const &,
- VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR const &,
- VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR const &>
-# endif
- reflect() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , imageUsage( imageUsage_ )
{
- return std::tie( sType, pNext, videoCodecOperation, chromaSubsampling, lumaBitDepth, chromaBitDepth );
}
-# endif
-# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( VideoProfileKHR const & ) const = default;
-# else
- bool operator==( VideoProfileKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
- return this->reflect() == rhs.reflect();
-# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperation == rhs.videoCodecOperation ) &&
- ( chromaSubsampling == rhs.chromaSubsampling ) && ( lumaBitDepth == rhs.lumaBitDepth ) &&
- ( chromaBitDepth == rhs.chromaBitDepth );
-# endif
- }
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- bool operator!=( VideoProfileKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVideoFormatInfoKHR( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceVideoFormatInfoKHR( *reinterpret_cast<PhysicalDeviceVideoFormatInfoKHR const *>( &rhs ) )
{
- return !operator==( rhs );
}
-# endif
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfileKHR;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation =
- VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eInvalid;
- VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling = {};
- VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth = {};
- VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoProfileKHR ) == sizeof( VkVideoProfileKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoProfileKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoProfileKHR>::value,
- "VideoProfileKHR is not nothrow_move_constructible!" );
-
- template <>
- struct CppType<StructureType, StructureType::eVideoProfileKHR>
- {
- using Type = VideoProfileKHR;
- };
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoProfilesKHR
- {
- using NativeType = VkVideoProfilesKHR;
-
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfilesKHR;
-
-# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- VideoProfilesKHR( uint32_t profileCount_ = {},
- const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pProfiles_ = {} ) VULKAN_HPP_NOEXCEPT
- : profileCount( profileCount_ )
- , pProfiles( pProfiles_ )
- {}
-
- VULKAN_HPP_CONSTEXPR VideoProfilesKHR( VideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- VideoProfilesKHR( VkVideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoProfilesKHR( *reinterpret_cast<VideoProfilesKHR const *>( &rhs ) )
- {}
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoProfilesKHR & operator=( VideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceVideoFormatInfoKHR & operator=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoProfilesKHR & operator=( VkVideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVideoFormatInfoKHR & operator=( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoProfilesKHR const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoProfilesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoFormatInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoProfilesKHR & setProfileCount( uint32_t profileCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoFormatInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT
{
- profileCount = profileCount_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoProfilesKHR &
- setPProfiles( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pProfiles_ ) VULKAN_HPP_NOEXCEPT
- {
- pProfiles = pProfiles_;
+ imageUsage = imageUsage_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoProfilesKHR const &() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkVideoProfilesKHR *>( this );
- }
-
- explicit operator VkVideoProfilesKHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkVideoProfilesKHR *>( this );
- }
-
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
-# if 14 <= VULKAN_HPP_CPP_VERSION
- auto
-# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- uint32_t const &,
- const VULKAN_HPP_NAMESPACE::VideoProfileKHR * const &>
-# endif
- reflect() const VULKAN_HPP_NOEXCEPT
- {
- return std::tie( sType, pNext, profileCount, pProfiles );
- }
-# endif
-
-# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( VideoProfilesKHR const & ) const = default;
-# else
- bool operator==( VideoProfilesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
- return this->reflect() == rhs.reflect();
-# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( profileCount == rhs.profileCount ) &&
- ( pProfiles == rhs.pProfiles );
-# endif
- }
-
- bool operator!=( VideoProfilesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-# endif
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfilesKHR;
- void * pNext = {};
- uint32_t profileCount = {};
- const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pProfiles = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoProfilesKHR ) == sizeof( VkVideoProfilesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoProfilesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoProfilesKHR>::value,
- "VideoProfilesKHR is not nothrow_move_constructible!" );
-
- template <>
- struct CppType<StructureType, StructureType::eVideoProfilesKHR>
- {
- using Type = VideoProfilesKHR;
- };
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct PhysicalDeviceVideoFormatInfoKHR
- {
- using NativeType = VkPhysicalDeviceVideoFormatInfoKHR;
-
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoFormatInfoKHR;
-
-# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR(
- VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {},
- const VULKAN_HPP_NAMESPACE::VideoProfilesKHR * pVideoProfiles_ = {} ) VULKAN_HPP_NOEXCEPT
- : imageUsage( imageUsage_ )
- , pVideoProfiles( pVideoProfiles_ )
- {}
-
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceVideoFormatInfoKHR( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- PhysicalDeviceVideoFormatInfoKHR( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceVideoFormatInfoKHR( *reinterpret_cast<PhysicalDeviceVideoFormatInfoKHR const *>( &rhs ) )
- {}
-# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
-
- PhysicalDeviceVideoFormatInfoKHR &
- operator=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- PhysicalDeviceVideoFormatInfoKHR & operator=( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR const *>( &rhs );
- return *this;
- }
-
- explicit operator VkPhysicalDeviceVideoFormatInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVideoFormatInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( this );
}
- explicit operator VkPhysicalDeviceVideoFormatInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVideoFormatInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVideoFormatInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags const &,
- const VULKAN_HPP_NAMESPACE::VideoProfilesKHR * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, imageUsage, pVideoProfiles );
+ return std::tie( sType, pNext, imageUsage );
}
# endif
@@ -68895,11 +72293,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageUsage == rhs.imageUsage ) &&
- ( pVideoProfiles == rhs.pVideoProfiles );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageUsage == rhs.imageUsage );
# endif
}
@@ -68910,19 +72307,10 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoFormatInfoKHR;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {};
- const VULKAN_HPP_NAMESPACE::VideoProfilesKHR * pVideoProfiles = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoFormatInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR ) ==
- sizeof( VkPhysicalDeviceVideoFormatInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR>::value,
- "PhysicalDeviceVideoFormatInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVideoFormatInfoKHR>
@@ -68939,20 +72327,21 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Features;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceVulkan11Features( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT
- : storageBuffer16BitAccess( storageBuffer16BitAccess_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , storageBuffer16BitAccess( storageBuffer16BitAccess_ )
, uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ )
, storagePushConstant16( storagePushConstant16_ )
, storageInputOutput16( storageInputOutput16_ )
@@ -68964,18 +72353,18 @@ namespace VULKAN_HPP_NAMESPACE
, protectedMemory( protectedMemory_ )
, samplerYcbcrConversion( samplerYcbcrConversion_ )
, shaderDrawParameters( shaderDrawParameters_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceVulkan11Features( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan11Features( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVulkan11Features( *reinterpret_cast<PhysicalDeviceVulkan11Features const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceVulkan11Features &
- operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceVulkan11Features & operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan11Features & operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -68991,101 +72380,95 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &
- setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
+ setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
storageBuffer16BitAccess = storageBuffer16BitAccess_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setUniformAndStorageBuffer16BitAccess(
- VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &
+ setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &
- setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
{
storagePushConstant16 = storagePushConstant16_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &
- setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
{
storageInputOutput16 = storageInputOutput16_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &
- setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
{
multiview = multiview_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &
- setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
+ setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
{
multiviewGeometryShader = multiviewGeometryShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &
- setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
+ setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
{
multiviewTessellationShader = multiviewTessellationShader_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setVariablePointersStorageBuffer(
- VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &
+ setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
{
variablePointersStorageBuffer = variablePointersStorageBuffer_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &
- setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
{
variablePointers = variablePointers_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &
- setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
{
protectedMemory = protectedMemory_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &
- setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
+ setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
{
samplerYcbcrConversion = samplerYcbcrConversion_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &
- setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
{
shaderDrawParameters = shaderDrawParameters_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceVulkan11Features const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVulkan11Features const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVulkan11Features *>( this );
}
- explicit operator VkPhysicalDeviceVulkan11Features &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVulkan11Features &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVulkan11Features *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -69128,19 +72511,15 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) &&
- ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) &&
- ( storagePushConstant16 == rhs.storagePushConstant16 ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) &&
+ ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && ( storagePushConstant16 == rhs.storagePushConstant16 ) &&
( storageInputOutput16 == rhs.storageInputOutput16 ) && ( multiview == rhs.multiview ) &&
- ( multiviewGeometryShader == rhs.multiviewGeometryShader ) &&
- ( multiviewTessellationShader == rhs.multiviewTessellationShader ) &&
- ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) &&
- ( variablePointers == rhs.variablePointers ) && ( protectedMemory == rhs.protectedMemory ) &&
- ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ) &&
+ ( multiviewGeometryShader == rhs.multiviewGeometryShader ) && ( multiviewTessellationShader == rhs.multiviewTessellationShader ) &&
+ ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) && ( variablePointers == rhs.variablePointers ) &&
+ ( protectedMemory == rhs.protectedMemory ) && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ) &&
( shaderDrawParameters == rhs.shaderDrawParameters );
# endif
}
@@ -69152,9 +72531,9 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {};
VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {};
@@ -69167,14 +72546,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features ) ==
- sizeof( VkPhysicalDeviceVulkan11Features ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features>::value,
- "PhysicalDeviceVulkan11Features is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan11Features>
@@ -69187,7 +72558,7 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceVulkan11Properties;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Properties;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Properties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties(
@@ -69200,14 +72571,15 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {},
VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {},
VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {},
- VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ =
- VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes,
- uint32_t maxMultiviewViewCount_ = {},
- uint32_t maxMultiviewInstanceIndex_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {},
- uint32_t maxPerSetDescriptors_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : deviceUUID( deviceUUID_ )
+ VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes,
+ uint32_t maxMultiviewViewCount_ = {},
+ uint32_t maxMultiviewInstanceIndex_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {},
+ uint32_t maxPerSetDescriptors_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , deviceUUID( deviceUUID_ )
, driverUUID( driverUUID_ )
, deviceLUID( deviceLUID_ )
, deviceNodeMask( deviceNodeMask_ )
@@ -69222,18 +72594,18 @@ namespace VULKAN_HPP_NAMESPACE
, protectedNoFault( protectedNoFault_ )
, maxPerSetDescriptors( maxPerSetDescriptors_ )
, maxMemoryAllocationSize( maxMemoryAllocationSize_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- PhysicalDeviceVulkan11Properties( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan11Properties( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVulkan11Properties( *reinterpret_cast<PhysicalDeviceVulkan11Properties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceVulkan11Properties &
- operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceVulkan11Properties & operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan11Properties & operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -69241,17 +72613,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPhysicalDeviceVulkan11Properties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVulkan11Properties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVulkan11Properties *>( this );
}
- explicit operator VkPhysicalDeviceVulkan11Properties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVulkan11Properties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVulkan11Properties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -69300,18 +72672,15 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) &&
- ( driverUUID == rhs.driverUUID ) && ( deviceLUID == rhs.deviceLUID ) &&
- ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && ( driverUUID == rhs.driverUUID ) &&
+ ( deviceLUID == rhs.deviceLUID ) && ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ) &&
( subgroupSize == rhs.subgroupSize ) && ( subgroupSupportedStages == rhs.subgroupSupportedStages ) &&
( subgroupSupportedOperations == rhs.subgroupSupportedOperations ) &&
- ( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages ) &&
- ( pointClippingBehavior == rhs.pointClippingBehavior ) &&
- ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) &&
- ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ) &&
+ ( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages ) && ( pointClippingBehavior == rhs.pointClippingBehavior ) &&
+ ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ) &&
( protectedNoFault == rhs.protectedNoFault ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) &&
( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
# endif
@@ -69324,8 +72693,8 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID = {};
@@ -69335,22 +72704,13 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages = {};
VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations = {};
VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages = {};
- VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior =
- VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
- uint32_t maxMultiviewViewCount = {};
- uint32_t maxMultiviewInstanceIndex = {};
- VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {};
- uint32_t maxPerSetDescriptors = {};
- VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties ) ==
- sizeof( VkPhysicalDeviceVulkan11Properties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties>::value,
- "PhysicalDeviceVulkan11Properties is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
+ uint32_t maxMultiviewViewCount = {};
+ uint32_t maxMultiviewInstanceIndex = {};
+ VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {};
+ uint32_t maxPerSetDescriptors = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {};
+ };
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan11Properties>
@@ -69366,55 +72726,56 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Features;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features(
- VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {} ) VULKAN_HPP_NOEXCEPT
- : samplerMirrorClampToEdge( samplerMirrorClampToEdge_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , samplerMirrorClampToEdge( samplerMirrorClampToEdge_ )
, drawIndirectCount( drawIndirectCount_ )
, storageBuffer8BitAccess( storageBuffer8BitAccess_ )
, uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ )
@@ -69461,18 +72822,18 @@ namespace VULKAN_HPP_NAMESPACE
, shaderOutputViewportIndex( shaderOutputViewportIndex_ )
, shaderOutputLayer( shaderOutputLayer_ )
, subgroupBroadcastDynamicId( subgroupBroadcastDynamicId_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVulkan12Features( *reinterpret_cast<PhysicalDeviceVulkan12Features const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceVulkan12Features &
- operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceVulkan12Features & operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan12Features & operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -69488,168 +72849,163 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setSamplerMirrorClampToEdge( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT
+ setSamplerMirrorClampToEdge( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT
{
samplerMirrorClampToEdge = samplerMirrorClampToEdge_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setDrawIndirectCount( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDrawIndirectCount( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT
{
drawIndirectCount = drawIndirectCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
+ setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
storageBuffer8BitAccess = storageBuffer8BitAccess_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setUniformAndStorageBuffer8BitAccess(
- VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
{
storagePushConstant8 = storagePushConstant8_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+ setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+ setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
{
shaderFloat16 = shaderFloat16_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
{
shaderInt8 = shaderInt8_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setDescriptorIndexing( VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorIndexing( VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT
{
descriptorIndexing = descriptorIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayDynamicIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayDynamicIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayDynamicIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderUniformBufferArrayNonUniformIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderSampledImageArrayNonUniformIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageBufferArrayNonUniformIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageImageArrayNonUniformIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayNonUniformIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayNonUniformIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayNonUniformIndexing(
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUniformBufferUpdateAfterBind(
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingSampledImageUpdateAfterBind(
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageImageUpdateAfterBind(
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageBufferUpdateAfterBind(
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
return *this;
@@ -69669,165 +73025,157 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUpdateUnusedWhilePending(
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingPartiallyBound(
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingVariableDescriptorCount(
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
+ setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
{
runtimeDescriptorArray = runtimeDescriptorArray_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setSamplerFilterMinmax( VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSamplerFilterMinmax( VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT
{
samplerFilterMinmax = samplerFilterMinmax_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
{
scalarBlockLayout = scalarBlockLayout_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
{
imagelessFramebuffer = imagelessFramebuffer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
+ setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
{
uniformBufferStandardLayout = uniformBufferStandardLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
+ setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
{
shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
+ setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
{
separateDepthStencilLayouts = separateDepthStencilLayouts_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
{
hostQueryReset = hostQueryReset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
{
timelineSemaphore = timelineSemaphore_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddress = bufferDeviceAddress_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddressCaptureReplay(
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddressMultiDevice(
- VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
{
vulkanMemoryModel = vulkanMemoryModel_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
+ setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
{
vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModelAvailabilityVisibilityChains(
- VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
+ setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
{
vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setShaderOutputViewportIndex( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT
+ setShaderOutputViewportIndex( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT
{
shaderOutputViewportIndex = shaderOutputViewportIndex_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setShaderOutputLayer( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderOutputLayer( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT
{
shaderOutputLayer = shaderOutputLayer_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
- setSubgroupBroadcastDynamicId( VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT
+ setSubgroupBroadcastDynamicId( VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT
{
subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceVulkan12Features const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVulkan12Features const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVulkan12Features *>( this );
}
- explicit operator VkPhysicalDeviceVulkan12Features &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVulkan12Features &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVulkan12Features *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -69940,18 +73288,14 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge ) &&
- ( drawIndirectCount == rhs.drawIndirectCount ) &&
- ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) &&
- ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) &&
- ( storagePushConstant8 == rhs.storagePushConstant8 ) &&
- ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) &&
- ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ) && ( shaderFloat16 == rhs.shaderFloat16 ) &&
- ( shaderInt8 == rhs.shaderInt8 ) && ( descriptorIndexing == rhs.descriptorIndexing ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge ) &&
+ ( drawIndirectCount == rhs.drawIndirectCount ) && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) &&
+ ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && ( storagePushConstant8 == rhs.storagePushConstant8 ) &&
+ ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ) &&
+ ( shaderFloat16 == rhs.shaderFloat16 ) && ( shaderInt8 == rhs.shaderInt8 ) && ( descriptorIndexing == rhs.descriptorIndexing ) &&
( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) &&
( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) &&
( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) &&
@@ -69960,36 +73304,27 @@ namespace VULKAN_HPP_NAMESPACE
( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) &&
( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) &&
( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) &&
- ( shaderUniformTexelBufferArrayNonUniformIndexing ==
- rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) &&
- ( shaderStorageTexelBufferArrayNonUniformIndexing ==
- rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) &&
+ ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) &&
+ ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) &&
( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) &&
( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) &&
( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) &&
( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) &&
- ( descriptorBindingUniformTexelBufferUpdateAfterBind ==
- rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) &&
- ( descriptorBindingStorageTexelBufferUpdateAfterBind ==
- rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) &&
+ ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) &&
+ ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) &&
( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) &&
( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) &&
( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) &&
- ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ) &&
- ( samplerFilterMinmax == rhs.samplerFilterMinmax ) && ( scalarBlockLayout == rhs.scalarBlockLayout ) &&
- ( imagelessFramebuffer == rhs.imagelessFramebuffer ) &&
- ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ) &&
- ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ) &&
- ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ) &&
- ( hostQueryReset == rhs.hostQueryReset ) && ( timelineSemaphore == rhs.timelineSemaphore ) &&
- ( bufferDeviceAddress == rhs.bufferDeviceAddress ) &&
+ ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ) && ( samplerFilterMinmax == rhs.samplerFilterMinmax ) &&
+ ( scalarBlockLayout == rhs.scalarBlockLayout ) && ( imagelessFramebuffer == rhs.imagelessFramebuffer ) &&
+ ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ) && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ) &&
+ ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ) && ( hostQueryReset == rhs.hostQueryReset ) &&
+ ( timelineSemaphore == rhs.timelineSemaphore ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) &&
( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) &&
- ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ) &&
- ( vulkanMemoryModel == rhs.vulkanMemoryModel ) &&
+ ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ) && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) &&
( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) &&
( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ) &&
- ( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex ) &&
- ( shaderOutputLayer == rhs.shaderOutputLayer ) &&
+ ( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex ) && ( shaderOutputLayer == rhs.shaderOutputLayer ) &&
( subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId );
# endif
}
@@ -70001,11 +73336,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge = {};
- VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount = {};
- VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge = {};
+ VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount = {};
+ VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {};
@@ -70051,14 +73386,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {};
VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features ) ==
- sizeof( VkPhysicalDeviceVulkan12Features ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features>::value,
- "PhysicalDeviceVulkan12Features is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan12Features>
@@ -70071,65 +73398,65 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceVulkan12Properties;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Properties;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Properties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties(
- VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary,
- std::array<char, VK_MAX_DRIVER_NAME_SIZE> const & driverName_ = {},
- std::array<char, VK_MAX_DRIVER_INFO_SIZE> const & driverInfo_ = {},
- VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {},
- VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ =
- VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
- VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ =
- VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
- VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {},
- uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {},
- uint32_t maxPerStageUpdateAfterBindResources_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {},
- VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {},
- VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {},
- uint64_t maxTimelineSemaphoreValueDifference_ = {},
- VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {} ) VULKAN_HPP_NOEXCEPT
- : driverID( driverID_ )
+ VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary,
+ std::array<char, VK_MAX_DRIVER_NAME_SIZE> const & driverName_ = {},
+ std::array<char, VK_MAX_DRIVER_INFO_SIZE> const & driverInfo_ = {},
+ VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {},
+ VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
+ VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {},
+ uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {},
+ uint32_t maxPerStageUpdateAfterBindResources_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {},
+ VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {},
+ VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {},
+ uint64_t maxTimelineSemaphoreValueDifference_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , driverID( driverID_ )
, driverName( driverName_ )
, driverInfo( driverInfo_ )
, conformanceVersion( conformanceVersion_ )
@@ -70181,18 +73508,18 @@ namespace VULKAN_HPP_NAMESPACE
, filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ )
, maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ )
, framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- PhysicalDeviceVulkan12Properties( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVulkan12Properties( *reinterpret_cast<PhysicalDeviceVulkan12Properties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceVulkan12Properties &
- operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan12Properties & operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -70200,17 +73527,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPhysicalDeviceVulkan12Properties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVulkan12Properties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVulkan12Properties *>( this );
}
- explicit operator VkPhysicalDeviceVulkan12Properties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVulkan12Properties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVulkan12Properties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -70333,69 +73660,46 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) &&
- ( driverName == rhs.driverName ) && ( driverInfo == rhs.driverInfo ) &&
- ( conformanceVersion == rhs.conformanceVersion ) &&
- ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) &&
- ( roundingModeIndependence == rhs.roundingModeIndependence ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( driverName == rhs.driverName ) &&
+ ( driverInfo == rhs.driverInfo ) && ( conformanceVersion == rhs.conformanceVersion ) &&
+ ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && ( roundingModeIndependence == rhs.roundingModeIndependence ) &&
( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) &&
( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) &&
( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) &&
- ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) &&
- ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) &&
- ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) &&
- ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) &&
+ ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) &&
+ ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) &&
( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) &&
- ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) &&
- ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) &&
- ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) &&
- ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) &&
- ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) &&
- ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) &&
+ ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) &&
+ ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) &&
+ ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) &&
( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ) &&
( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) &&
- ( shaderUniformBufferArrayNonUniformIndexingNative ==
- rhs.shaderUniformBufferArrayNonUniformIndexingNative ) &&
- ( shaderSampledImageArrayNonUniformIndexingNative ==
- rhs.shaderSampledImageArrayNonUniformIndexingNative ) &&
- ( shaderStorageBufferArrayNonUniformIndexingNative ==
- rhs.shaderStorageBufferArrayNonUniformIndexingNative ) &&
- ( shaderStorageImageArrayNonUniformIndexingNative ==
- rhs.shaderStorageImageArrayNonUniformIndexingNative ) &&
- ( shaderInputAttachmentArrayNonUniformIndexingNative ==
- rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) &&
- ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) &&
- ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) &&
+ ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) &&
+ ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) &&
+ ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) &&
+ ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) &&
+ ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) &&
+ ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) &&
( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) &&
- ( maxPerStageDescriptorUpdateAfterBindUniformBuffers ==
- rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) &&
- ( maxPerStageDescriptorUpdateAfterBindStorageBuffers ==
- rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) &&
- ( maxPerStageDescriptorUpdateAfterBindSampledImages ==
- rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) &&
- ( maxPerStageDescriptorUpdateAfterBindStorageImages ==
- rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) &&
- ( maxPerStageDescriptorUpdateAfterBindInputAttachments ==
- rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) &&
+ ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) &&
+ ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) &&
+ ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) &&
+ ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) &&
+ ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) &&
( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) &&
( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) &&
( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) &&
- ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ==
- rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) &&
+ ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) &&
( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) &&
- ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ==
- rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) &&
+ ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) &&
( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) &&
( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) &&
- ( maxDescriptorSetUpdateAfterBindInputAttachments ==
- rhs.maxDescriptorSetUpdateAfterBindInputAttachments ) &&
- ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) &&
- ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) &&
- ( independentResolveNone == rhs.independentResolveNone ) &&
- ( independentResolve == rhs.independentResolve ) &&
+ ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ) &&
+ ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) &&
+ ( independentResolveNone == rhs.independentResolveNone ) && ( independentResolve == rhs.independentResolve ) &&
( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) &&
( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ) &&
( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ) &&
@@ -70410,71 +73714,61 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> driverName = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> driverInfo = {};
VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {};
- VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence =
- VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
- VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence =
- VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
- VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {};
- uint32_t maxUpdateAfterBindDescriptorsInAllPools = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {};
- VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {};
- VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {};
- VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {};
- uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {};
- uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {};
- uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {};
- uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {};
- uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {};
- uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
- uint32_t maxPerStageUpdateAfterBindResources = {};
- uint32_t maxDescriptorSetUpdateAfterBindSamplers = {};
- uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {};
- uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
- uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {};
- uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
- uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {};
- uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {};
- uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {};
- VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {};
- VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {};
- VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {};
- VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {};
- VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {};
- VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {};
- uint64_t maxTimelineSemaphoreValueDifference = {};
- VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties ) ==
- sizeof( VkPhysicalDeviceVulkan12Properties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties>::value,
- "PhysicalDeviceVulkan12Properties is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
+ VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {};
+ uint32_t maxUpdateAfterBindDescriptorsInAllPools = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {};
+ VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {};
+ VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {};
+ uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {};
+ uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {};
+ uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {};
+ uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {};
+ uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {};
+ uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
+ uint32_t maxPerStageUpdateAfterBindResources = {};
+ uint32_t maxDescriptorSetUpdateAfterBindSamplers = {};
+ uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {};
+ uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
+ uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {};
+ uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
+ uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {};
+ uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {};
+ uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {};
+ VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {};
+ VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {};
+ VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {};
+ VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {};
+ VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {};
+ VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {};
+ uint64_t maxTimelineSemaphoreValueDifference = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {};
+ };
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan12Properties>
@@ -70490,23 +73784,24 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Features;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features(
- VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {} ) VULKAN_HPP_NOEXCEPT
- : robustImageAccess( robustImageAccess_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , robustImageAccess( robustImageAccess_ )
, inlineUniformBlock( inlineUniformBlock_ )
, descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ )
, pipelineCreationCacheControl( pipelineCreationCacheControl_ )
@@ -70521,18 +73816,18 @@ namespace VULKAN_HPP_NAMESPACE
, dynamicRendering( dynamicRendering_ )
, shaderIntegerDotProduct( shaderIntegerDotProduct_ )
, maintenance4( maintenance4_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceVulkan13Features( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan13Features( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVulkan13Features( *reinterpret_cast<PhysicalDeviceVulkan13Features const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceVulkan13Features &
- operator=( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceVulkan13Features & operator=( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan13Features & operator=( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -70547,15 +73842,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
- setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT
{
robustImageAccess = robustImageAccess_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
- setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
{
inlineUniformBlock = inlineUniformBlock_;
return *this;
@@ -70569,101 +73862,95 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
- setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT
+ setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT
{
pipelineCreationCacheControl = pipelineCreationCacheControl_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
- setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT
{
privateData = privateData_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderDemoteToHelperInvocation(
- VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
+ setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
{
shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
- setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT
+ setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT
{
shaderTerminateInvocation = shaderTerminateInvocation_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
- setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
{
subgroupSizeControl = subgroupSizeControl_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
- setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
{
computeFullSubgroups = computeFullSubgroups_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
- setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT
{
synchronization2 = synchronization2_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
- setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
+ setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
{
textureCompressionASTC_HDR = textureCompressionASTC_HDR_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderZeroInitializeWorkgroupMemory(
- VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
+ setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT
{
shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
- setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT
{
dynamicRendering = dynamicRendering_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
- setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT
+ setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT
{
shaderIntegerDotProduct = shaderIntegerDotProduct_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
- setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT
{
maintenance4 = maintenance4_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceVulkan13Features const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVulkan13Features const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVulkan13Features *>( this );
}
- explicit operator VkPhysicalDeviceVulkan13Features &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVulkan13Features &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVulkan13Features *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -70712,22 +73999,17 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustImageAccess == rhs.robustImageAccess ) &&
( inlineUniformBlock == rhs.inlineUniformBlock ) &&
- ( descriptorBindingInlineUniformBlockUpdateAfterBind ==
- rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ) &&
- ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ) &&
- ( privateData == rhs.privateData ) &&
- ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ) &&
- ( shaderTerminateInvocation == rhs.shaderTerminateInvocation ) &&
- ( subgroupSizeControl == rhs.subgroupSizeControl ) &&
- ( computeFullSubgroups == rhs.computeFullSubgroups ) && ( synchronization2 == rhs.synchronization2 ) &&
- ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ) &&
- ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory ) &&
- ( dynamicRendering == rhs.dynamicRendering ) &&
+ ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ) &&
+ ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ) && ( privateData == rhs.privateData ) &&
+ ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ) && ( shaderTerminateInvocation == rhs.shaderTerminateInvocation ) &&
+ ( subgroupSizeControl == rhs.subgroupSizeControl ) && ( computeFullSubgroups == rhs.computeFullSubgroups ) &&
+ ( synchronization2 == rhs.synchronization2 ) && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ) &&
+ ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory ) && ( dynamicRendering == rhs.dynamicRendering ) &&
( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct ) && ( maintenance4 == rhs.maintenance4 );
# endif
}
@@ -70739,10 +74021,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Features;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {};
- VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Features;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {};
+ VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {};
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {};
VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {};
VULKAN_HPP_NAMESPACE::Bool32 privateData = {};
@@ -70757,14 +74039,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {};
VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features ) ==
- sizeof( VkPhysicalDeviceVulkan13Features ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features>::value,
- "PhysicalDeviceVulkan13Features is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan13Features>
@@ -70777,63 +74051,64 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceVulkan13Properties;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Properties;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Properties(
- uint32_t minSubgroupSize_ = {},
- uint32_t maxSubgroupSize_ = {},
- uint32_t maxComputeWorkgroupSubgroups_ = {},
- VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {},
- uint32_t maxInlineUniformBlockSize_ = {},
- uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {},
- uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {},
- uint32_t maxDescriptorSetInlineUniformBlocks_ = {},
- uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {},
- uint32_t maxInlineUniformTotalSize_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : minSubgroupSize( minSubgroupSize_ )
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Properties;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceVulkan13Properties( uint32_t minSubgroupSize_ = {},
+ uint32_t maxSubgroupSize_ = {},
+ uint32_t maxComputeWorkgroupSubgroups_ = {},
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {},
+ uint32_t maxInlineUniformBlockSize_ = {},
+ uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {},
+ uint32_t maxDescriptorSetInlineUniformBlocks_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {},
+ uint32_t maxInlineUniformTotalSize_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , minSubgroupSize( minSubgroupSize_ )
, maxSubgroupSize( maxSubgroupSize_ )
, maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ )
, requiredSubgroupSizeStages( requiredSubgroupSizeStages_ )
, maxInlineUniformBlockSize( maxInlineUniformBlockSize_ )
, maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ )
- , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks(
- maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ )
+ , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ )
, maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ )
, maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ )
, maxInlineUniformTotalSize( maxInlineUniformTotalSize_ )
@@ -70842,8 +74117,7 @@ namespace VULKAN_HPP_NAMESPACE
, integerDotProduct8BitMixedSignednessAccelerated( integerDotProduct8BitMixedSignednessAccelerated_ )
, integerDotProduct4x8BitPackedUnsignedAccelerated( integerDotProduct4x8BitPackedUnsignedAccelerated_ )
, integerDotProduct4x8BitPackedSignedAccelerated( integerDotProduct4x8BitPackedSignedAccelerated_ )
- , integerDotProduct4x8BitPackedMixedSignednessAccelerated(
- integerDotProduct4x8BitPackedMixedSignednessAccelerated_ )
+ , integerDotProduct4x8BitPackedMixedSignednessAccelerated( integerDotProduct4x8BitPackedMixedSignednessAccelerated_ )
, integerDotProduct16BitUnsignedAccelerated( integerDotProduct16BitUnsignedAccelerated_ )
, integerDotProduct16BitSignedAccelerated( integerDotProduct16BitSignedAccelerated_ )
, integerDotProduct16BitMixedSignednessAccelerated( integerDotProduct16BitMixedSignednessAccelerated_ )
@@ -70853,53 +74127,39 @@ namespace VULKAN_HPP_NAMESPACE
, integerDotProduct64BitUnsignedAccelerated( integerDotProduct64BitUnsignedAccelerated_ )
, integerDotProduct64BitSignedAccelerated( integerDotProduct64BitSignedAccelerated_ )
, integerDotProduct64BitMixedSignednessAccelerated( integerDotProduct64BitMixedSignednessAccelerated_ )
- , integerDotProductAccumulatingSaturating8BitUnsignedAccelerated(
- integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ )
- , integerDotProductAccumulatingSaturating8BitSignedAccelerated(
- integerDotProductAccumulatingSaturating8BitSignedAccelerated_ )
- , integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated(
- integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ )
- , integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated(
- integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ )
- , integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated(
- integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ )
+ , integerDotProductAccumulatingSaturating8BitUnsignedAccelerated( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ )
+ , integerDotProductAccumulatingSaturating8BitSignedAccelerated( integerDotProductAccumulatingSaturating8BitSignedAccelerated_ )
+ , integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ )
+ , integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ )
+ , integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ )
, integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated(
integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ )
- , integerDotProductAccumulatingSaturating16BitUnsignedAccelerated(
- integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ )
- , integerDotProductAccumulatingSaturating16BitSignedAccelerated(
- integerDotProductAccumulatingSaturating16BitSignedAccelerated_ )
- , integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated(
- integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ )
- , integerDotProductAccumulatingSaturating32BitUnsignedAccelerated(
- integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ )
- , integerDotProductAccumulatingSaturating32BitSignedAccelerated(
- integerDotProductAccumulatingSaturating32BitSignedAccelerated_ )
- , integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated(
- integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ )
- , integerDotProductAccumulatingSaturating64BitUnsignedAccelerated(
- integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ )
- , integerDotProductAccumulatingSaturating64BitSignedAccelerated(
- integerDotProductAccumulatingSaturating64BitSignedAccelerated_ )
- , integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated(
- integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ )
+ , integerDotProductAccumulatingSaturating16BitUnsignedAccelerated( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ )
+ , integerDotProductAccumulatingSaturating16BitSignedAccelerated( integerDotProductAccumulatingSaturating16BitSignedAccelerated_ )
+ , integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ )
+ , integerDotProductAccumulatingSaturating32BitUnsignedAccelerated( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ )
+ , integerDotProductAccumulatingSaturating32BitSignedAccelerated( integerDotProductAccumulatingSaturating32BitSignedAccelerated_ )
+ , integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ )
+ , integerDotProductAccumulatingSaturating64BitUnsignedAccelerated( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ )
+ , integerDotProductAccumulatingSaturating64BitSignedAccelerated( integerDotProductAccumulatingSaturating64BitSignedAccelerated_ )
+ , integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ )
, storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ )
, storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ )
, uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ )
, uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ )
, maxBufferSize( maxBufferSize_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PhysicalDeviceVulkan13Properties( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Properties( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan13Properties( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceVulkan13Properties( *reinterpret_cast<PhysicalDeviceVulkan13Properties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceVulkan13Properties &
- operator=( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceVulkan13Properties & operator=( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkan13Properties & operator=( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -70907,17 +74167,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPhysicalDeviceVulkan13Properties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVulkan13Properties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVulkan13Properties *>( this );
}
- explicit operator VkPhysicalDeviceVulkan13Properties &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVulkan13Properties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVulkan13Properties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -71026,46 +74286,34 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) &&
- ( maxSubgroupSize == rhs.maxSubgroupSize ) &&
- ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) &&
- ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) && ( maxSubgroupSize == rhs.maxSubgroupSize ) &&
+ ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ) &&
( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) &&
( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) &&
- ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ==
- rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) &&
+ ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) &&
( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) &&
- ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks ==
- rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ) &&
+ ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ) &&
( maxInlineUniformTotalSize == rhs.maxInlineUniformTotalSize ) &&
( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated ) &&
( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated ) &&
- ( integerDotProduct8BitMixedSignednessAccelerated ==
- rhs.integerDotProduct8BitMixedSignednessAccelerated ) &&
- ( integerDotProduct4x8BitPackedUnsignedAccelerated ==
- rhs.integerDotProduct4x8BitPackedUnsignedAccelerated ) &&
+ ( integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated ) &&
+ ( integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated ) &&
( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated ) &&
- ( integerDotProduct4x8BitPackedMixedSignednessAccelerated ==
- rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated ) &&
+ ( integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated ) &&
( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated ) &&
( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated ) &&
- ( integerDotProduct16BitMixedSignednessAccelerated ==
- rhs.integerDotProduct16BitMixedSignednessAccelerated ) &&
+ ( integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated ) &&
( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated ) &&
( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated ) &&
- ( integerDotProduct32BitMixedSignednessAccelerated ==
- rhs.integerDotProduct32BitMixedSignednessAccelerated ) &&
+ ( integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated ) &&
( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated ) &&
( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated ) &&
- ( integerDotProduct64BitMixedSignednessAccelerated ==
- rhs.integerDotProduct64BitMixedSignednessAccelerated ) &&
- ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ==
- rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ) &&
- ( integerDotProductAccumulatingSaturating8BitSignedAccelerated ==
- rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated ) &&
+ ( integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated ) &&
+ ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ) &&
+ ( integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated ) &&
( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ==
rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ) &&
( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ==
@@ -71074,29 +74322,22 @@ namespace VULKAN_HPP_NAMESPACE
rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ) &&
( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ==
rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ) &&
- ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ==
- rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ) &&
- ( integerDotProductAccumulatingSaturating16BitSignedAccelerated ==
- rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated ) &&
+ ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ) &&
+ ( integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated ) &&
( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ==
rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ) &&
- ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ==
- rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ) &&
- ( integerDotProductAccumulatingSaturating32BitSignedAccelerated ==
- rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated ) &&
+ ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ) &&
+ ( integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated ) &&
( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ==
rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ) &&
- ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ==
- rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ) &&
- ( integerDotProductAccumulatingSaturating64BitSignedAccelerated ==
- rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated ) &&
+ ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ) &&
+ ( integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated ) &&
( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ==
rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ) &&
( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) &&
( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) &&
( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) &&
- ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ) &&
- ( maxBufferSize == rhs.maxBufferSize );
+ ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ) && ( maxBufferSize == rhs.maxBufferSize );
# endif
}
@@ -71107,62 +74348,54 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Properties;
- void * pNext = {};
- uint32_t minSubgroupSize = {};
- uint32_t maxSubgroupSize = {};
- uint32_t maxComputeWorkgroupSubgroups = {};
- VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {};
- uint32_t maxInlineUniformBlockSize = {};
- uint32_t maxPerStageDescriptorInlineUniformBlocks = {};
- uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {};
- uint32_t maxDescriptorSetInlineUniformBlocks = {};
- uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {};
- uint32_t maxInlineUniformTotalSize = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {};
- VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {};
- VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {};
- VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {};
- VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {};
- VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {};
- VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties ) ==
- sizeof( VkPhysicalDeviceVulkan13Properties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties>::value,
- "PhysicalDeviceVulkan13Properties is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Properties;
+ void * pNext = {};
+ uint32_t minSubgroupSize = {};
+ uint32_t maxSubgroupSize = {};
+ uint32_t maxComputeWorkgroupSubgroups = {};
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {};
+ uint32_t maxInlineUniformBlockSize = {};
+ uint32_t maxPerStageDescriptorInlineUniformBlocks = {};
+ uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {};
+ uint32_t maxDescriptorSetInlineUniformBlocks = {};
+ uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {};
+ uint32_t maxInlineUniformTotalSize = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {};
+ VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {};
+ VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {};
+ VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {};
+ };
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan13Properties>
@@ -71175,33 +74408,31 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceVulkanMemoryModelFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures(
- VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {} ) VULKAN_HPP_NOEXCEPT
- : vulkanMemoryModel( vulkanMemoryModel_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , vulkanMemoryModel( vulkanMemoryModel_ )
, vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ )
, vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( PhysicalDeviceVulkanMemoryModelFeatures const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceVulkanMemoryModelFeatures( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceVulkanMemoryModelFeatures(
- *reinterpret_cast<PhysicalDeviceVulkanMemoryModelFeatures const *>( &rhs ) )
- {}
+ : PhysicalDeviceVulkanMemoryModelFeatures( *reinterpret_cast<PhysicalDeviceVulkanMemoryModelFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceVulkanMemoryModelFeatures &
- operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceVulkanMemoryModelFeatures & operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceVulkanMemoryModelFeatures &
- operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVulkanMemoryModelFeatures & operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures const *>( &rhs );
return *this;
@@ -71222,31 +74453,31 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures &
- setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
+ setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
{
vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelAvailabilityVisibilityChains(
- VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures &
+ setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
{
vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceVulkanMemoryModelFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVulkanMemoryModelFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceVulkanMemoryModelFeatures *>( this );
}
- explicit operator VkPhysicalDeviceVulkanMemoryModelFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVulkanMemoryModelFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -71258,8 +74489,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, vulkanMemoryModel, vulkanMemoryModelDeviceScope, vulkanMemoryModelAvailabilityVisibilityChains );
+ return std::tie( sType, pNext, vulkanMemoryModel, vulkanMemoryModelDeviceScope, vulkanMemoryModelAvailabilityVisibilityChains );
}
#endif
@@ -71268,7 +74498,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) &&
@@ -71284,21 +74514,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {};
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {};
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures ) ==
- sizeof( VkPhysicalDeviceVulkanMemoryModelFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures>::value,
- "PhysicalDeviceVulkanMemoryModelFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkanMemoryModelFeatures>
@@ -71312,29 +74533,30 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR(
- VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ = {} ) VULKAN_HPP_NOEXCEPT
- : workgroupMemoryExplicitLayout( workgroupMemoryExplicitLayout_ )
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , workgroupMemoryExplicitLayout( workgroupMemoryExplicitLayout_ )
, workgroupMemoryExplicitLayoutScalarBlockLayout( workgroupMemoryExplicitLayoutScalarBlockLayout_ )
, workgroupMemoryExplicitLayout8BitAccess( workgroupMemoryExplicitLayout8BitAccess_ )
, workgroupMemoryExplicitLayout16BitAccess( workgroupMemoryExplicitLayout16BitAccess_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR(
- PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs )
+ VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR(
- VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR(
- *reinterpret_cast<PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const *>( &rhs ) )
- {}
+ PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( *reinterpret_cast<PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &
@@ -71343,62 +74565,57 @@ namespace VULKAN_HPP_NAMESPACE
PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &
operator=( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout(
- VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &
+ setWorkgroupMemoryExplicitLayout( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ ) VULKAN_HPP_NOEXCEPT
{
workgroupMemoryExplicitLayout = workgroupMemoryExplicitLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &
- setWorkgroupMemoryExplicitLayoutScalarBlockLayout(
- VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
+ setWorkgroupMemoryExplicitLayoutScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
{
workgroupMemoryExplicitLayoutScalarBlockLayout = workgroupMemoryExplicitLayoutScalarBlockLayout_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &
- setWorkgroupMemoryExplicitLayout8BitAccess(
- VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ ) VULKAN_HPP_NOEXCEPT
+ setWorkgroupMemoryExplicitLayout8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
workgroupMemoryExplicitLayout8BitAccess = workgroupMemoryExplicitLayout8BitAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &
- setWorkgroupMemoryExplicitLayout16BitAccess(
- VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ ) VULKAN_HPP_NOEXCEPT
+ setWorkgroupMemoryExplicitLayout16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
workgroupMemoryExplicitLayout16BitAccess = workgroupMemoryExplicitLayout16BitAccess_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR *>( this );
}
- explicit operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -71425,11 +74642,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( workgroupMemoryExplicitLayout == rhs.workgroupMemoryExplicitLayout ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( workgroupMemoryExplicitLayout == rhs.workgroupMemoryExplicitLayout ) &&
( workgroupMemoryExplicitLayoutScalarBlockLayout == rhs.workgroupMemoryExplicitLayoutScalarBlockLayout ) &&
( workgroupMemoryExplicitLayout8BitAccess == rhs.workgroupMemoryExplicitLayout8BitAccess ) &&
( workgroupMemoryExplicitLayout16BitAccess == rhs.workgroupMemoryExplicitLayout16BitAccess );
@@ -71443,23 +74659,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout = {};
VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout = {};
VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ) ==
- sizeof( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<
- VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>::value,
- "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>
@@ -71472,62 +74678,59 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ = {} ) VULKAN_HPP_NOEXCEPT
- : ycbcr2plane444Formats( ycbcr2plane444Formats_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , ycbcr2plane444Formats( ycbcr2plane444Formats_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT(
- PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( *reinterpret_cast<PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &
- operator=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & operator=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &
- operator=( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & operator=( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &
- setYcbcr2plane444Formats( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ ) VULKAN_HPP_NOEXCEPT
+ setYcbcr2plane444Formats( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ ) VULKAN_HPP_NOEXCEPT
{
ycbcr2plane444Formats = ycbcr2plane444Formats_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -71544,7 +74747,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcr2plane444Formats == rhs.ycbcr2plane444Formats );
@@ -71558,19 +74761,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>::value,
- "PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>
@@ -71583,29 +74777,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceYcbcrImageArraysFeaturesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT(
- VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {} ) VULKAN_HPP_NOEXCEPT : ycbcrImageArrays( ycbcrImageArrays_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , ycbcrImageArrays( ycbcrImageArrays_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT(
- PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceYcbcrImageArraysFeaturesEXT(
- *reinterpret_cast<PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>( &rhs ) )
- {}
+ PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceYcbcrImageArraysFeaturesEXT( *reinterpret_cast<PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PhysicalDeviceYcbcrImageArraysFeaturesEXT &
- operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceYcbcrImageArraysFeaturesEXT &
- operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>( &rhs );
return *this;
@@ -71626,17 +74818,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *>( this );
}
- explicit operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -71653,7 +74845,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcrImageArrays == rhs.ycbcrImageArrays );
@@ -71671,15 +74863,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT ) ==
- sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT>::value,
- "PhysicalDeviceYcbcrImageArraysFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT>
@@ -71692,64 +74875,60 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures(
- VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {} ) VULKAN_HPP_NOEXCEPT
- : shaderZeroInitializeWorkgroupMemory( shaderZeroInitializeWorkgroupMemory_ )
- {}
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shaderZeroInitializeWorkgroupMemory( shaderZeroInitializeWorkgroupMemory_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures(
- PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures(
- VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- : PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures(
- *reinterpret_cast<PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const *>( &rhs ) )
- {}
+ PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( *reinterpret_cast<PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &
operator=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &
- operator=( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & operator=( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &
- setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &
- setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ )
- VULKAN_HPP_NOEXCEPT
+ setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT
{
shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures *>( this );
}
- explicit operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -71766,11 +74945,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory );
# endif
}
@@ -71781,19 +74959,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures ) ==
- sizeof( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<
- VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures>::value,
- "PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures>
@@ -71812,24 +74981,30 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {},
size_t initialDataSize_ = {},
- const void * pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ const void * pInitialData_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, initialDataSize( initialDataSize_ )
, pInitialData( pInitialData_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineCacheCreateInfo( *reinterpret_cast<PipelineCacheCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ )
- : flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() )
- {}
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -71848,8 +75023,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -71869,8 +75043,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
- PipelineCacheCreateInfo &
- setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
+ PipelineCacheCreateInfo & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
{
initialDataSize = initialData_.size() * sizeof( T );
pInitialData = initialData_.data();
@@ -71879,17 +75052,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineCacheCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineCacheCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCacheCreateInfo *>( this );
}
- explicit operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineCacheCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -71910,11 +75083,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( initialDataSize == rhs.initialDataSize ) && ( pInitialData == rhs.pInitialData );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( initialDataSize == rhs.initialDataSize ) &&
+ ( pInitialData == rhs.pInitialData );
# endif
}
@@ -71931,13 +75104,6 @@ namespace VULKAN_HPP_NAMESPACE
size_t initialDataSize = {};
const void * pInitialData = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo ) ==
- sizeof( VkPipelineCacheCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo>::value,
- "PipelineCacheCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineCacheCreateInfo>
@@ -71950,30 +75116,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineCacheHeaderVersionOne;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne(
- uint32_t headerSize_ = {},
- VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ =
- VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne,
- uint32_t vendorID_ = {},
- uint32_t deviceID_ = {},
- std::array<uint8_t, VK_UUID_SIZE> const & pipelineCacheUUID_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14
+ PipelineCacheHeaderVersionOne( uint32_t headerSize_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ = VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne,
+ uint32_t vendorID_ = {},
+ uint32_t deviceID_ = {},
+ std::array<uint8_t, VK_UUID_SIZE> const & pipelineCacheUUID_ = {} ) VULKAN_HPP_NOEXCEPT
: headerSize( headerSize_ )
, headerVersion( headerVersion_ )
, vendorID( vendorID_ )
, deviceID( deviceID_ )
, pipelineCacheUUID( pipelineCacheUUID_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- PipelineCacheHeaderVersionOne( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCacheHeaderVersionOne( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineCacheHeaderVersionOne( *reinterpret_cast<PipelineCacheHeaderVersionOne const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineCacheHeaderVersionOne &
- operator=( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineCacheHeaderVersionOne & operator=( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCacheHeaderVersionOne & operator=( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -71989,7 +75154,7 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne &
- setHeaderVersion( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ ) VULKAN_HPP_NOEXCEPT
+ setHeaderVersion( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ ) VULKAN_HPP_NOEXCEPT
{
headerVersion = headerVersion_;
return *this;
@@ -72007,25 +75172,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne &
- setPipelineCacheUUID( std::array<uint8_t, VK_UUID_SIZE> pipelineCacheUUID_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setPipelineCacheUUID( std::array<uint8_t, VK_UUID_SIZE> pipelineCacheUUID_ ) VULKAN_HPP_NOEXCEPT
{
pipelineCacheUUID = pipelineCacheUUID_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineCacheHeaderVersionOne const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineCacheHeaderVersionOne const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCacheHeaderVersionOne *>( this );
}
- explicit operator VkPipelineCacheHeaderVersionOne &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineCacheHeaderVersionOne &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineCacheHeaderVersionOne *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -72046,11 +75210,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( headerSize == rhs.headerSize ) && ( headerVersion == rhs.headerVersion ) &&
- ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) &&
+ return ( headerSize == rhs.headerSize ) && ( headerVersion == rhs.headerVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) &&
( pipelineCacheUUID == rhs.pipelineCacheUUID );
# endif
}
@@ -72062,64 +75225,51 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- uint32_t headerSize = {};
- VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion =
- VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne;
+ uint32_t headerSize = {};
+ VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion = VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne;
uint32_t vendorID = {};
uint32_t deviceID = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineCacheUUID = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne ) ==
- sizeof( VkPipelineCacheHeaderVersionOne ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne>::value,
- "PipelineCacheHeaderVersionOne is not nothrow_move_constructible!" );
struct PipelineColorBlendAdvancedStateCreateInfoEXT
{
using NativeType = VkPipelineColorBlendAdvancedStateCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {},
- VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated )
- VULKAN_HPP_NOEXCEPT
- : srcPremultiplied( srcPremultiplied_ )
+ VULKAN_HPP_CONSTEXPR
+ PipelineColorBlendAdvancedStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {},
+ VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , srcPremultiplied( srcPremultiplied_ )
, dstPremultiplied( dstPremultiplied_ )
, blendOverlap( blendOverlap_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT(
- PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineColorBlendAdvancedStateCreateInfoEXT(
- *reinterpret_cast<PipelineColorBlendAdvancedStateCreateInfoEXT const *>( &rhs ) )
- {}
+ PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineColorBlendAdvancedStateCreateInfoEXT( *reinterpret_cast<PipelineColorBlendAdvancedStateCreateInfoEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineColorBlendAdvancedStateCreateInfoEXT &
- operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineColorBlendAdvancedStateCreateInfoEXT &
- operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
@@ -72147,17 +75297,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineColorBlendAdvancedStateCreateInfoEXT *>( this );
}
- explicit operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineColorBlendAdvancedStateCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -72178,11 +75328,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcPremultiplied == rhs.srcPremultiplied ) &&
- ( dstPremultiplied == rhs.dstPremultiplied ) && ( blendOverlap == rhs.blendOverlap );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcPremultiplied == rhs.srcPremultiplied ) && ( dstPremultiplied == rhs.dstPremultiplied ) &&
+ ( blendOverlap == rhs.blendOverlap );
# endif
}
@@ -72193,21 +75343,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {};
VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {};
VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT ) ==
- sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT>::value,
- "PipelineColorBlendAdvancedStateCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT>
@@ -72223,31 +75364,32 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorWriteCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT(
- uint32_t attachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ = {} ) VULKAN_HPP_NOEXCEPT
- : attachmentCount( attachmentCount_ )
+ VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( uint32_t attachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , attachmentCount( attachmentCount_ )
, pColorWriteEnables( pColorWriteEnables_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PipelineColorWriteCreateInfoEXT( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineColorWriteCreateInfoEXT( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineColorWriteCreateInfoEXT( *reinterpret_cast<PipelineColorWriteCreateInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PipelineColorWriteCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables_ )
- : attachmentCount( static_cast<uint32_t>( colorWriteEnables_.size() ) )
- , pColorWriteEnables( colorWriteEnables_.data() )
- {}
+ PipelineColorWriteCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), attachmentCount( static_cast<uint32_t>( colorWriteEnables_.size() ) ), pColorWriteEnables( colorWriteEnables_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineColorWriteCreateInfoEXT &
- operator=( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineColorWriteCreateInfoEXT & operator=( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineColorWriteCreateInfoEXT & operator=( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -72262,24 +75404,22 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT &
- setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = attachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT &
- setPColorWriteEnables( const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ ) VULKAN_HPP_NOEXCEPT
+ setPColorWriteEnables( const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ ) VULKAN_HPP_NOEXCEPT
{
pColorWriteEnables = pColorWriteEnables_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PipelineColorWriteCreateInfoEXT & setColorWriteEnables(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables_ )
- VULKAN_HPP_NOEXCEPT
+ PipelineColorWriteCreateInfoEXT &
+ setColorWriteEnables( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = static_cast<uint32_t>( colorWriteEnables_.size() );
pColorWriteEnables = colorWriteEnables_.data();
@@ -72288,24 +75428,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineColorWriteCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineColorWriteCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineColorWriteCreateInfoEXT *>( this );
}
- explicit operator VkPipelineColorWriteCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineColorWriteCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineColorWriteCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- uint32_t const &,
- const VULKAN_HPP_NAMESPACE::Bool32 * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Bool32 * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -72318,11 +75455,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) &&
- ( pColorWriteEnables == rhs.pColorWriteEnables );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && ( pColorWriteEnables == rhs.pColorWriteEnables );
# endif
}
@@ -72338,14 +75474,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t attachmentCount = {};
const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT ) ==
- sizeof( VkPipelineColorWriteCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT>::value,
- "PipelineColorWriteCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineColorWriteCreateInfoEXT>
@@ -72358,28 +75486,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineCompilerControlCreateInfoAMD;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineCompilerControlCreateInfoAMD;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCompilerControlCreateInfoAMD;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD(
- VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {} ) VULKAN_HPP_NOEXCEPT
- : compilerControlFlags( compilerControlFlags_ )
- {}
+ VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , compilerControlFlags( compilerControlFlags_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( PipelineCompilerControlCreateInfoAMD const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineCompilerControlCreateInfoAMD( *reinterpret_cast<PipelineCompilerControlCreateInfoAMD const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineCompilerControlCreateInfoAMD &
- operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineCompilerControlCreateInfoAMD & operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineCompilerControlCreateInfoAMD &
- operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineCompilerControlCreateInfoAMD & operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const *>( &rhs );
return *this;
@@ -72392,31 +75519,29 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & setCompilerControlFlags(
- VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD &
+ setCompilerControlFlags( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT
{
compilerControlFlags = compilerControlFlags_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineCompilerControlCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineCompilerControlCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCompilerControlCreateInfoAMD *>( this );
}
- explicit operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineCompilerControlCreateInfoAMD *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -72429,7 +75554,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compilerControlFlags == rhs.compilerControlFlags );
@@ -72443,18 +75568,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD ) ==
- sizeof( VkPipelineCompilerControlCreateInfoAMD ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD>::value,
- "PipelineCompilerControlCreateInfoAMD is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineCompilerControlCreateInfoAMD>
@@ -72467,89 +75584,87 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineCoverageModulationStateCreateInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineCoverageModulationStateCreateInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV(
- VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {},
- VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ =
- VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone,
- VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {},
- uint32_t coverageModulationTableCount_ = {},
- const float * pCoverageModulationTable_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {},
+ VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone,
+ VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {},
+ uint32_t coverageModulationTableCount_ = {},
+ const float * pCoverageModulationTable_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, coverageModulationMode( coverageModulationMode_ )
, coverageModulationTableEnable( coverageModulationTableEnable_ )
, coverageModulationTableCount( coverageModulationTableCount_ )
, pCoverageModulationTable( pCoverageModulationTable_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV(
- PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineCoverageModulationStateCreateInfoNV(
- *reinterpret_cast<PipelineCoverageModulationStateCreateInfoNV const *>( &rhs ) )
- {}
+ PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineCoverageModulationStateCreateInfoNV( *reinterpret_cast<PipelineCoverageModulationStateCreateInfoNV const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PipelineCoverageModulationStateCreateInfoNV(
- VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_,
- VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_,
- VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & coverageModulationTable_ )
- : flags( flags_ )
+ PipelineCoverageModulationStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_,
+ VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_,
+ VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & coverageModulationTable_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, coverageModulationMode( coverageModulationMode_ )
, coverageModulationTableEnable( coverageModulationTableEnable_ )
, coverageModulationTableCount( static_cast<uint32_t>( coverageModulationTable_.size() ) )
, pCoverageModulationTable( coverageModulationTable_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineCoverageModulationStateCreateInfoNV &
- operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineCoverageModulationStateCreateInfoNV & operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineCoverageModulationStateCreateInfoNV &
- operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineCoverageModulationStateCreateInfoNV & operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationMode(
- VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV &
+ setCoverageModulationMode( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT
{
coverageModulationMode = coverageModulationMode_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableEnable(
- VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV &
+ setCoverageModulationTableEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT
{
coverageModulationTableEnable = coverageModulationTableEnable_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV &
- setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT
+ setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT
{
coverageModulationTableCount = coverageModulationTableCount_;
return *this;
@@ -72563,8 +75678,8 @@ namespace VULKAN_HPP_NAMESPACE
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTable(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & coverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineCoverageModulationStateCreateInfoNV &
+ setCoverageModulationTable( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & coverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
{
coverageModulationTableCount = static_cast<uint32_t>( coverageModulationTable_.size() );
pCoverageModulationTable = coverageModulationTable_.data();
@@ -72573,17 +75688,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineCoverageModulationStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineCoverageModulationStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCoverageModulationStateCreateInfoNV *>( this );
}
- explicit operator VkPipelineCoverageModulationStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineCoverageModulationStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineCoverageModulationStateCreateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -72597,13 +75712,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- flags,
- coverageModulationMode,
- coverageModulationTableEnable,
- coverageModulationTableCount,
- pCoverageModulationTable );
+ return std::tie( sType, pNext, flags, coverageModulationMode, coverageModulationTableEnable, coverageModulationTableCount, pCoverageModulationTable );
}
#endif
@@ -72612,13 +75721,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( coverageModulationMode == rhs.coverageModulationMode ) &&
- ( coverageModulationTableEnable == rhs.coverageModulationTableEnable ) &&
- ( coverageModulationTableCount == rhs.coverageModulationTableCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( coverageModulationMode == rhs.coverageModulationMode ) &&
+ ( coverageModulationTableEnable == rhs.coverageModulationTableEnable ) && ( coverageModulationTableCount == rhs.coverageModulationTableCount ) &&
( pCoverageModulationTable == rhs.pCoverageModulationTable );
# endif
}
@@ -72630,24 +75737,14 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags = {};
- VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode =
- VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone;
- VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {};
- uint32_t coverageModulationTableCount = {};
- const float * pCoverageModulationTable = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV ) ==
- sizeof( VkPipelineCoverageModulationStateCreateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV>::value,
- "PipelineCoverageModulationStateCreateInfoNV is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags = {};
+ VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone;
+ VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {};
+ uint32_t coverageModulationTableCount = {};
+ const float * pCoverageModulationTable = {};
+ };
template <>
struct CppType<StructureType, StructureType::ePipelineCoverageModulationStateCreateInfoNV>
@@ -72660,72 +75757,68 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineCoverageReductionStateCreateInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineCoverageReductionStateCreateInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageReductionStateCreateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV(
- VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {},
- VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ =
- VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {},
+ VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, coverageReductionMode( coverageReductionMode_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV(
- PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineCoverageReductionStateCreateInfoNV(
- *reinterpret_cast<PipelineCoverageReductionStateCreateInfoNV const *>( &rhs ) )
- {}
+ PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineCoverageReductionStateCreateInfoNV( *reinterpret_cast<PipelineCoverageReductionStateCreateInfoNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineCoverageReductionStateCreateInfoNV &
- operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineCoverageReductionStateCreateInfoNV & operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineCoverageReductionStateCreateInfoNV &
- operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineCoverageReductionStateCreateInfoNV & operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & setCoverageReductionMode(
- VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV &
+ setCoverageReductionMode( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
{
coverageReductionMode = coverageReductionMode_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineCoverageReductionStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineCoverageReductionStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCoverageReductionStateCreateInfoNV *>( this );
}
- explicit operator VkPipelineCoverageReductionStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineCoverageReductionStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineCoverageReductionStateCreateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -72745,11 +75838,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( coverageReductionMode == rhs.coverageReductionMode );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( coverageReductionMode == rhs.coverageReductionMode );
# endif
}
@@ -72760,21 +75852,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {};
- VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode =
- VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {};
+ VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV ) ==
- sizeof( VkPipelineCoverageReductionStateCreateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV>::value,
- "PipelineCoverageReductionStateCreateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineCoverageReductionStateCreateInfoNV>
@@ -72787,80 +75869,75 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineCoverageToColorStateCreateInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineCoverageToColorStateCreateInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV(
- VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {},
- uint32_t coverageToColorLocation_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {},
+ uint32_t coverageToColorLocation_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, coverageToColorEnable( coverageToColorEnable_ )
, coverageToColorLocation( coverageToColorLocation_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV(
- PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineCoverageToColorStateCreateInfoNV(
- *reinterpret_cast<PipelineCoverageToColorStateCreateInfoNV const *>( &rhs ) )
- {}
+ PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineCoverageToColorStateCreateInfoNV( *reinterpret_cast<PipelineCoverageToColorStateCreateInfoNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineCoverageToColorStateCreateInfoNV &
- operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineCoverageToColorStateCreateInfoNV & operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineCoverageToColorStateCreateInfoNV &
- operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineCoverageToColorStateCreateInfoNV & operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV &
- setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT
+ setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT
{
coverageToColorEnable = coverageToColorEnable_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV &
- setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT
{
coverageToColorLocation = coverageToColorLocation_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineCoverageToColorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineCoverageToColorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCoverageToColorStateCreateInfoNV *>( this );
}
- explicit operator VkPipelineCoverageToColorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineCoverageToColorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineCoverageToColorStateCreateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -72881,11 +75958,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( coverageToColorEnable == rhs.coverageToColorEnable ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( coverageToColorEnable == rhs.coverageToColorEnable ) &&
( coverageToColorLocation == rhs.coverageToColorLocation );
# endif
}
@@ -72897,21 +75973,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {};
VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {};
uint32_t coverageToColorLocation = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV ) ==
- sizeof( VkPipelineCoverageToColorStateCreateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV>::value,
- "PipelineCoverageToColorStateCreateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineCoverageToColorStateCreateInfoNV>
@@ -72924,17 +75991,19 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineCreationFeedback;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags_ = {},
- uint64_t duration_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags_ = {},
+ uint64_t duration_ = {} ) VULKAN_HPP_NOEXCEPT
: flags( flags_ )
, duration( duration_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCreationFeedback( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineCreationFeedback( *reinterpret_cast<PipelineCreationFeedback const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PipelineCreationFeedback & operator=( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -72945,17 +76014,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPipelineCreationFeedback const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineCreationFeedback const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCreationFeedback *>( this );
}
- explicit operator VkPipelineCreationFeedback &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineCreationFeedback &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineCreationFeedback *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -72972,7 +76041,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( flags == rhs.flags ) && ( duration == rhs.duration );
@@ -72989,13 +76058,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags = {};
uint64_t duration = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback ) ==
- sizeof( VkPipelineCreationFeedback ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback>::value,
- "PipelineCreationFeedback is not nothrow_move_constructible!" );
using PipelineCreationFeedbackEXT = PipelineCreationFeedback;
struct PipelineCreationFeedbackCreateInfo
@@ -73003,43 +76065,44 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineCreationFeedbackCreateInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineCreationFeedbackCreateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreationFeedbackCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo(
- VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ = {},
- uint32_t pipelineStageCreationFeedbackCount_ = {},
- VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ = {} ) VULKAN_HPP_NOEXCEPT
- : pPipelineCreationFeedback( pPipelineCreationFeedback_ )
+ VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ = {},
+ uint32_t pipelineStageCreationFeedbackCount_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pPipelineCreationFeedback( pPipelineCreationFeedback_ )
, pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ )
, pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo( PipelineCreationFeedbackCreateInfo const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineCreationFeedbackCreateInfo( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineCreationFeedbackCreateInfo( *reinterpret_cast<PipelineCreationFeedbackCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineCreationFeedbackCreateInfo(
- VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback> const &
- pipelineStageCreationFeedbacks_ )
- : pPipelineCreationFeedback( pPipelineCreationFeedback_ )
+ VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback> const & pipelineStageCreationFeedbacks_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , pPipelineCreationFeedback( pPipelineCreationFeedback_ )
, pipelineStageCreationFeedbackCount( static_cast<uint32_t>( pipelineStageCreationFeedbacks_.size() ) )
, pPipelineStageCreationFeedbacks( pipelineStageCreationFeedbacks_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineCreationFeedbackCreateInfo &
- operator=( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineCreationFeedbackCreateInfo & operator=( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineCreationFeedbackCreateInfo &
- operator=( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineCreationFeedbackCreateInfo & operator=( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo const *>( &rhs );
return *this;
@@ -73052,22 +76115,22 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPPipelineCreationFeedback(
- VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo &
+ setPPipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT
{
pPipelineCreationFeedback = pPipelineCreationFeedback_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo &
- setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT
+ setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT
{
pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPPipelineStageCreationFeedbacks(
- VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo &
+ setPPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
{
pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_;
return *this;
@@ -73075,8 +76138,8 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineCreationFeedbackCreateInfo & setPipelineStageCreationFeedbacks(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback> const &
- pipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback> const & pipelineStageCreationFeedbacks_ )
+ VULKAN_HPP_NOEXCEPT
{
pipelineStageCreationFeedbackCount = static_cast<uint32_t>( pipelineStageCreationFeedbacks_.size() );
pPipelineStageCreationFeedbacks = pipelineStageCreationFeedbacks_.data();
@@ -73085,17 +76148,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineCreationFeedbackCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineCreationFeedbackCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineCreationFeedbackCreateInfo *>( this );
}
- explicit operator VkPipelineCreationFeedbackCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineCreationFeedbackCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineCreationFeedbackCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -73107,8 +76170,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, pPipelineCreationFeedback, pipelineStageCreationFeedbackCount, pPipelineStageCreationFeedbacks );
+ return std::tie( sType, pNext, pPipelineCreationFeedback, pipelineStageCreationFeedbackCount, pPipelineStageCreationFeedbacks );
}
#endif
@@ -73117,11 +76179,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback ) &&
( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount ) &&
( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks );
# endif
@@ -73134,20 +76195,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfo;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfo;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback = {};
uint32_t pipelineStageCreationFeedbackCount = {};
VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo ) ==
- sizeof( VkPipelineCreationFeedbackCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo>::value,
- "PipelineCreationFeedbackCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineCreationFeedbackCreateInfo>
@@ -73161,94 +76214,90 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineDiscardRectangleStateCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {},
- VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ =
- VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive,
- uint32_t discardRectangleCount_ = {},
- const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {},
+ VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive,
+ uint32_t discardRectangleCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, discardRectangleMode( discardRectangleMode_ )
, discardRectangleCount( discardRectangleCount_ )
, pDiscardRectangles( pDiscardRectangles_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT(
- PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineDiscardRectangleStateCreateInfoEXT(
- *reinterpret_cast<PipelineDiscardRectangleStateCreateInfoEXT const *>( &rhs ) )
- {}
+ PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineDiscardRectangleStateCreateInfoEXT( *reinterpret_cast<PipelineDiscardRectangleStateCreateInfoEXT const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PipelineDiscardRectangleStateCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_,
- VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles_ )
- : flags( flags_ )
+ PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_,
+ VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, discardRectangleMode( discardRectangleMode_ )
, discardRectangleCount( static_cast<uint32_t>( discardRectangles_.size() ) )
, pDiscardRectangles( discardRectangles_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineDiscardRectangleStateCreateInfoEXT &
- operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineDiscardRectangleStateCreateInfoEXT & operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineDiscardRectangleStateCreateInfoEXT &
- operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineDiscardRectangleStateCreateInfoEXT & operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT &
- setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT
+ setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT
{
discardRectangleMode = discardRectangleMode_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT &
- setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT
{
discardRectangleCount = discardRectangleCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT &
- setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT
+ setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT
{
pDiscardRectangles = pDiscardRectangles_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangles(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles_ )
- VULKAN_HPP_NOEXCEPT
+ PipelineDiscardRectangleStateCreateInfoEXT &
+ setDiscardRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles_ ) VULKAN_HPP_NOEXCEPT
{
discardRectangleCount = static_cast<uint32_t>( discardRectangles_.size() );
pDiscardRectangles = discardRectangles_.data();
@@ -73257,17 +76306,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineDiscardRectangleStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineDiscardRectangleStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineDiscardRectangleStateCreateInfoEXT *>( this );
}
- explicit operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineDiscardRectangleStateCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -73289,11 +76338,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( discardRectangleMode == rhs.discardRectangleMode ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( discardRectangleMode == rhs.discardRectangleMode ) &&
( discardRectangleCount == rhs.discardRectangleCount ) && ( pDiscardRectangles == rhs.pDiscardRectangles );
# endif
}
@@ -73305,23 +76353,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags = {};
- VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode =
- VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive;
- uint32_t discardRectangleCount = {};
- const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT ) ==
- sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT>::value,
- "PipelineDiscardRectangleStateCreateInfoEXT is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags = {};
+ VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive;
+ uint32_t discardRectangleCount = {};
+ const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles = {};
+ };
template <>
struct CppType<StructureType, StructureType::ePipelineDiscardRectangleStateCreateInfoEXT>
@@ -73337,18 +76375,21 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {},
- uint32_t executableIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : pipeline( pipeline_ )
+ VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {},
+ uint32_t executableIndex_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pipeline( pipeline_ )
, executableIndex( executableIndex_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PipelineExecutableInfoKHR( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineExecutableInfoKHR( *reinterpret_cast<PipelineExecutableInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PipelineExecutableInfoKHR & operator=( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -73366,39 +76407,34 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR &
- setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
{
pipeline = pipeline_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR &
- setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT
{
executableIndex = executableIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineExecutableInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineExecutableInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineExecutableInfoKHR *>( this );
}
- explicit operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineExecutableInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::Pipeline const &,
- uint32_t const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Pipeline const &, uint32_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -73411,11 +76447,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ) &&
- ( executableIndex == rhs.executableIndex );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ) && ( executableIndex == rhs.executableIndex );
# endif
}
@@ -73431,13 +76466,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
uint32_t executableIndex = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR ) ==
- sizeof( VkPipelineExecutableInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR>::value,
- "PipelineExecutableInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineExecutableInfoKHR>
@@ -73450,68 +76478,64 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineExecutableInternalRepresentationKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineExecutableInternalRepresentationKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInternalRepresentationKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14
- PipelineExecutableInternalRepresentationKHR( std::array<char, VK_MAX_DESCRIPTION_SIZE> const & name_ = {},
- std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 isText_ = {},
- size_t dataSize_ = {},
- void * pData_ = {} ) VULKAN_HPP_NOEXCEPT
- : name( name_ )
+ VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( std::array<char, VK_MAX_DESCRIPTION_SIZE> const & name_ = {},
+ std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 isText_ = {},
+ size_t dataSize_ = {},
+ void * pData_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , name( name_ )
, description( description_ )
, isText( isText_ )
, dataSize( dataSize_ )
, pData( pData_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR(
- PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14
+ PipelineExecutableInternalRepresentationKHR( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineExecutableInternalRepresentationKHR(
- *reinterpret_cast<PipelineExecutableInternalRepresentationKHR const *>( &rhs ) )
- {}
+ PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineExecutableInternalRepresentationKHR( *reinterpret_cast<PipelineExecutableInternalRepresentationKHR const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
PipelineExecutableInternalRepresentationKHR( std::array<char, VK_MAX_DESCRIPTION_SIZE> const & name_,
std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_,
VULKAN_HPP_NAMESPACE::Bool32 isText_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<T> const & data_ )
- : name( name_ )
- , description( description_ )
- , isText( isText_ )
- , dataSize( data_.size() * sizeof( T ) )
- , pData( data_.data() )
- {}
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<T> const & data_,
+ void * pNext_ = nullptr )
+ : pNext( pNext_ ), name( name_ ), description( description_ ), isText( isText_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineExecutableInternalRepresentationKHR &
- operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineExecutableInternalRepresentationKHR & operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineExecutableInternalRepresentationKHR &
- operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineExecutableInternalRepresentationKHR & operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const *>( &rhs );
return *this;
}
- explicit operator VkPipelineExecutableInternalRepresentationKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineExecutableInternalRepresentationKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineExecutableInternalRepresentationKHR *>( this );
}
- explicit operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -73534,12 +76558,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) &&
- ( description == rhs.description ) && ( isText == rhs.isText ) && ( dataSize == rhs.dataSize ) &&
- ( pData == rhs.pData );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) && ( description == rhs.description ) && ( isText == rhs.isText ) &&
+ ( dataSize == rhs.dataSize ) && ( pData == rhs.pData );
# endif
}
@@ -73550,23 +76573,14 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
VULKAN_HPP_NAMESPACE::Bool32 isText = {};
size_t dataSize = {};
void * pData = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR ) ==
- sizeof( VkPipelineExecutableInternalRepresentationKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>::value,
- "PipelineExecutableInternalRepresentationKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineExecutableInternalRepresentationKHR>
@@ -73582,27 +76596,28 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutablePropertiesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14
- PipelineExecutablePropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {},
- std::array<char, VK_MAX_DESCRIPTION_SIZE> const & name_ = {},
- std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
- uint32_t subgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : stages( stages_ )
+ VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {},
+ std::array<char, VK_MAX_DESCRIPTION_SIZE> const & name_ = {},
+ std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
+ uint32_t subgroupSize_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , stages( stages_ )
, name( name_ )
, description( description_ )
, subgroupSize( subgroupSize_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- PipelineExecutablePropertiesKHR( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineExecutablePropertiesKHR( *reinterpret_cast<PipelineExecutablePropertiesKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineExecutablePropertiesKHR &
- operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineExecutablePropertiesKHR & operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineExecutablePropertiesKHR & operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -73610,17 +76625,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPipelineExecutablePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineExecutablePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineExecutablePropertiesKHR *>( this );
}
- explicit operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -73642,11 +76657,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stages == rhs.stages ) && ( name == rhs.name ) &&
- ( description == rhs.description ) && ( subgroupSize == rhs.subgroupSize );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stages == rhs.stages ) && ( name == rhs.name ) && ( description == rhs.description ) &&
+ ( subgroupSize == rhs.subgroupSize );
# endif
}
@@ -73657,21 +76672,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
uint32_t subgroupSize = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR ) ==
- sizeof( VkPipelineExecutablePropertiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>::value,
- "PipelineExecutablePropertiesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineExecutablePropertiesKHR>
@@ -73684,8 +76691,7 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineExecutableStatisticValueKHR;
#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} ) : b32( b32_ )
- {}
+ VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} ) : b32( b32_ ) {}
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( int64_t i64_ ) : i64( i64_ ) {}
@@ -73695,8 +76701,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR &
- setB32( VULKAN_HPP_NAMESPACE::Bool32 b32_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setB32( VULKAN_HPP_NAMESPACE::Bool32 b32_ ) VULKAN_HPP_NOEXCEPT
{
b32 = b32_;
return *this;
@@ -73755,25 +76760,26 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR(
std::array<char, VK_MAX_DESCRIPTION_SIZE> const & name_ = {},
std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
- VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ =
- VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32,
- VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {} ) VULKAN_HPP_NOEXCEPT
- : name( name_ )
+ VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32,
+ VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , name( name_ )
, description( description_ )
, format( format_ )
, value( value_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- PipelineExecutableStatisticKHR( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineExecutableStatisticKHR( *reinterpret_cast<PipelineExecutableStatisticKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineExecutableStatisticKHR &
- operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineExecutableStatisticKHR & operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineExecutableStatisticKHR & operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -73781,17 +76787,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkPipelineExecutableStatisticKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineExecutableStatisticKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineExecutableStatisticKHR *>( this );
}
- explicit operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineExecutableStatisticKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -73809,22 +76815,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
- VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format =
- VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32;
- VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {};
+ VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32;
+ VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR ) ==
- sizeof( VkPipelineExecutableStatisticKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>::value,
- "PipelineExecutableStatisticKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineExecutableStatisticKHR>
@@ -73837,84 +76834,79 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineFragmentShadingRateEnumStateCreateInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV(
- VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ =
- VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize,
- VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ =
- VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel,
+ VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ = VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize,
+ VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ = VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel,
std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> const &
- combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep,
- VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } } ) VULKAN_HPP_NOEXCEPT
- : shadingRateType( shadingRateType_ )
+ combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } },
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shadingRateType( shadingRateType_ )
, shadingRate( shadingRate_ )
, combinerOps( combinerOps_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV(
- PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14
+ PipelineFragmentShadingRateEnumStateCreateInfoNV( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineFragmentShadingRateEnumStateCreateInfoNV( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineFragmentShadingRateEnumStateCreateInfoNV(
- *reinterpret_cast<PipelineFragmentShadingRateEnumStateCreateInfoNV const *>( &rhs ) )
- {}
+ PipelineFragmentShadingRateEnumStateCreateInfoNV( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineFragmentShadingRateEnumStateCreateInfoNV( *reinterpret_cast<PipelineFragmentShadingRateEnumStateCreateInfoNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineFragmentShadingRateEnumStateCreateInfoNV &
- operator=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineFragmentShadingRateEnumStateCreateInfoNV & operator=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineFragmentShadingRateEnumStateCreateInfoNV &
- operator=( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineFragmentShadingRateEnumStateCreateInfoNV & operator=( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV &
- setShadingRateType( VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ ) VULKAN_HPP_NOEXCEPT
+ setShadingRateType( VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ ) VULKAN_HPP_NOEXCEPT
{
shadingRateType = shadingRateType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV &
- setShadingRate( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
+ setShadingRate( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
{
shadingRate = shadingRate_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setCombinerOps(
- std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV &
+ setCombinerOps( std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps_ ) VULKAN_HPP_NOEXCEPT
{
combinerOps = combinerOps_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineFragmentShadingRateEnumStateCreateInfoNV *>( this );
}
- explicit operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineFragmentShadingRateEnumStateCreateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -73935,11 +76927,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateType == rhs.shadingRateType ) &&
- ( shadingRate == rhs.shadingRate ) && ( combinerOps == rhs.combinerOps );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateType == rhs.shadingRateType ) && ( shadingRate == rhs.shadingRate ) &&
+ ( combinerOps == rhs.combinerOps );
# endif
}
@@ -73950,23 +76942,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType =
- VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize;
- VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate =
- VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType = VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize;
+ VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate = VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel;
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV ) ==
- sizeof( VkPipelineFragmentShadingRateEnumStateCreateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV>::value,
- "PipelineFragmentShadingRateEnumStateCreateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV>
@@ -73979,42 +76960,39 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineFragmentShadingRateStateCreateInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR(
VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {},
std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> const &
- combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep,
- VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } } ) VULKAN_HPP_NOEXCEPT
- : fragmentSize( fragmentSize_ )
+ combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } },
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , fragmentSize( fragmentSize_ )
, combinerOps( combinerOps_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR(
- PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14
+ PipelineFragmentShadingRateStateCreateInfoKHR( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineFragmentShadingRateStateCreateInfoKHR( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineFragmentShadingRateStateCreateInfoKHR(
- *reinterpret_cast<PipelineFragmentShadingRateStateCreateInfoKHR const *>( &rhs ) )
- {}
+ PipelineFragmentShadingRateStateCreateInfoKHR( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineFragmentShadingRateStateCreateInfoKHR( *reinterpret_cast<PipelineFragmentShadingRateStateCreateInfoKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineFragmentShadingRateStateCreateInfoKHR &
- operator=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineFragmentShadingRateStateCreateInfoKHR & operator=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineFragmentShadingRateStateCreateInfoKHR &
- operator=( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineFragmentShadingRateStateCreateInfoKHR & operator=( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
@@ -74027,25 +77005,25 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setCombinerOps(
- std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR &
+ setCombinerOps( std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps_ ) VULKAN_HPP_NOEXCEPT
{
combinerOps = combinerOps_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineFragmentShadingRateStateCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineFragmentShadingRateStateCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineFragmentShadingRateStateCreateInfoKHR *>( this );
}
- explicit operator VkPipelineFragmentShadingRateStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineFragmentShadingRateStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineFragmentShadingRateStateCreateInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -74065,11 +77043,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentSize == rhs.fragmentSize ) &&
- ( combinerOps == rhs.combinerOps );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentSize == rhs.fragmentSize ) && ( combinerOps == rhs.combinerOps );
# endif
}
@@ -74085,15 +77062,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR ) ==
- sizeof( VkPipelineFragmentShadingRateStateCreateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR>::value,
- "PipelineFragmentShadingRateStateCreateInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR>
@@ -74109,15 +77077,15 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} ) VULKAN_HPP_NOEXCEPT
- : pipeline( pipeline_ )
- {}
+ VULKAN_HPP_CONSTEXPR PipelineInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pipeline( pipeline_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR PipelineInfoKHR( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : PipelineInfoKHR( *reinterpret_cast<PipelineInfoKHR const *>( &rhs ) )
- {}
+ PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PipelineInfoKHR( *reinterpret_cast<PipelineInfoKHR const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PipelineInfoKHR & operator=( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -74135,30 +77103,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR &
- setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
{
pipeline = pipeline_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineInfoKHR *>( this );
}
- explicit operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::
- tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Pipeline const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Pipeline const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -74171,7 +77137,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline );
@@ -74189,37 +77155,30 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineInfoKHR>::value,
- "PipelineInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineInfoKHR>
{
using Type = PipelineInfoKHR;
};
+ using PipelineInfoEXT = PipelineInfoKHR;
struct PushConstantRange
{
using NativeType = VkPushConstantRange;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {},
- uint32_t offset_ = {},
- uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR
+ PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, uint32_t offset_ = {}, uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT
: stageFlags( stageFlags_ )
, offset( offset_ )
, size( size_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
- : PushConstantRange( *reinterpret_cast<PushConstantRange const *>( &rhs ) )
- {}
+ PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT : PushConstantRange( *reinterpret_cast<PushConstantRange const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PushConstantRange & operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -74231,8 +77190,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PushConstantRange &
- setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
{
stageFlags = stageFlags_;
return *this;
@@ -74251,17 +77209,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPushConstantRange const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPushConstantRange const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPushConstantRange *>( this );
}
- explicit operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT
+ operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPushConstantRange *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -74278,7 +77236,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( stageFlags == rhs.stageFlags ) && ( offset == rhs.offset ) && ( size == rhs.size );
@@ -74296,12 +77254,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t offset = {};
uint32_t size = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushConstantRange ) == sizeof( VkPushConstantRange ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PushConstantRange>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PushConstantRange>::value,
- "PushConstantRange is not nothrow_move_constructible!" );
struct PipelineLayoutCreateInfo
{
@@ -74311,38 +77263,41 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLayoutCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo(
- VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {},
- uint32_t setLayoutCount_ = {},
- const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {},
- uint32_t pushConstantRangeCount_ = {},
- const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {},
+ uint32_t setLayoutCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {},
+ uint32_t pushConstantRangeCount_ = {},
+ const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, setLayoutCount( setLayoutCount_ )
, pSetLayouts( pSetLayouts_ )
, pushConstantRangeCount( pushConstantRangeCount_ )
, pPushConstantRanges( pPushConstantRanges_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineLayoutCreateInfo( *reinterpret_cast<PipelineLayoutCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PipelineLayoutCreateInfo(
- VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const &
- setLayouts_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const &
- pushConstantRanges_ = {} )
- : flags( flags_ )
+ PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, setLayoutCount( static_cast<uint32_t>( setLayouts_.size() ) )
, pSetLayouts( setLayouts_.data() )
, pushConstantRangeCount( static_cast<uint32_t>( pushConstantRanges_.size() ) )
, pPushConstantRanges( pushConstantRanges_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -74361,8 +77316,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -74374,17 +77328,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo &
- setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
{
pSetLayouts = pSetLayouts_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PipelineLayoutCreateInfo & setSetLayouts(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const &
- setLayouts_ ) VULKAN_HPP_NOEXCEPT
+ PipelineLayoutCreateInfo &
+ setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
{
setLayoutCount = static_cast<uint32_t>( setLayouts_.size() );
pSetLayouts = setLayouts_.data();
@@ -74392,15 +77344,14 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo &
- setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT
{
pushConstantRangeCount = pushConstantRangeCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo &
- setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
+ setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
{
pPushConstantRanges = pPushConstantRanges_;
return *this;
@@ -74408,8 +77359,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineLayoutCreateInfo & setPushConstantRanges(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const &
- pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
{
pushConstantRangeCount = static_cast<uint32_t>( pushConstantRanges_.size() );
pPushConstantRanges = pushConstantRanges_.data();
@@ -74418,17 +77368,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineLayoutCreateInfo *>( this );
}
- explicit operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineLayoutCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -74451,12 +77401,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( setLayoutCount == rhs.setLayoutCount ) && ( pSetLayouts == rhs.pSetLayouts ) &&
- ( pushConstantRangeCount == rhs.pushConstantRangeCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( setLayoutCount == rhs.setLayoutCount ) &&
+ ( pSetLayouts == rhs.pSetLayouts ) && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) &&
( pPushConstantRanges == rhs.pPushConstantRanges );
# endif
}
@@ -74476,13 +77425,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t pushConstantRangeCount = {};
const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo ) ==
- sizeof( VkPipelineLayoutCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo>::value,
- "PipelineLayoutCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineLayoutCreateInfo>
@@ -74498,25 +77440,28 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PipelineLibraryCreateInfoKHR( uint32_t libraryCount_ = {},
- const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ = {} ) VULKAN_HPP_NOEXCEPT
- : libraryCount( libraryCount_ )
+ VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( uint32_t libraryCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , libraryCount( libraryCount_ )
, pLibraries( pLibraries_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineLibraryCreateInfoKHR( *reinterpret_cast<PipelineLibraryCreateInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PipelineLibraryCreateInfoKHR(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_ )
- : libraryCount( static_cast<uint32_t>( libraries_.size() ) ), pLibraries( libraries_.data() )
- {}
+ PipelineLibraryCreateInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), libraryCount( static_cast<uint32_t>( libraries_.size() ) ), pLibraries( libraries_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -74541,17 +77486,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR &
- setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ ) VULKAN_HPP_NOEXCEPT
{
pLibraries = pLibraries_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PipelineLibraryCreateInfoKHR & setLibraries(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_ )
- VULKAN_HPP_NOEXCEPT
+ PipelineLibraryCreateInfoKHR &
+ setLibraries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_ ) VULKAN_HPP_NOEXCEPT
{
libraryCount = static_cast<uint32_t>( libraries_.size() );
pLibraries = libraries_.data();
@@ -74560,24 +77503,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineLibraryCreateInfoKHR *>( this );
}
- explicit operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineLibraryCreateInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- uint32_t const &,
- const VULKAN_HPP_NAMESPACE::Pipeline * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Pipeline * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -74590,11 +77530,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( libraryCount == rhs.libraryCount ) &&
- ( pLibraries == rhs.pLibraries );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( libraryCount == rhs.libraryCount ) && ( pLibraries == rhs.pLibraries );
# endif
}
@@ -74610,14 +77549,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t libraryCount = {};
const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR ) ==
- sizeof( VkPipelineLibraryCreateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR>::value,
- "PipelineLibraryCreateInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineLibraryCreateInfoKHR>
@@ -74625,87 +77556,181 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PipelineLibraryCreateInfoKHR;
};
+ struct PipelinePropertiesIdentifierEXT
+ {
+ using NativeType = VkPipelinePropertiesIdentifierEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelinePropertiesIdentifierEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT( std::array<uint8_t, VK_UUID_SIZE> const & pipelineIdentifier_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pipelineIdentifier( pipelineIdentifier_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT( PipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelinePropertiesIdentifierEXT( VkPipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelinePropertiesIdentifierEXT( *reinterpret_cast<PipelinePropertiesIdentifierEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PipelinePropertiesIdentifierEXT & operator=( PipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelinePropertiesIdentifierEXT & operator=( VkPipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT & setPipelineIdentifier( std::array<uint8_t, VK_UUID_SIZE> pipelineIdentifier_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pipelineIdentifier = pipelineIdentifier_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPipelinePropertiesIdentifierEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPipelinePropertiesIdentifierEXT *>( this );
+ }
+
+ operator VkPipelinePropertiesIdentifierEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelinePropertiesIdentifierEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, pipelineIdentifier );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PipelinePropertiesIdentifierEXT const & ) const = default;
+#else
+ bool operator==( PipelinePropertiesIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineIdentifier == rhs.pipelineIdentifier );
+# endif
+ }
+
+ bool operator!=( PipelinePropertiesIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelinePropertiesIdentifierEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineIdentifier = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePipelinePropertiesIdentifierEXT>
+ {
+ using Type = PipelinePropertiesIdentifierEXT;
+ };
+
struct PipelineRasterizationConservativeStateCreateInfoEXT
{
using NativeType = VkPipelineRasterizationConservativeStateCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {},
- VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ =
- VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled,
- float extraPrimitiveOverestimationSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {},
+ VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled,
+ float extraPrimitiveOverestimationSize_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, conservativeRasterizationMode( conservativeRasterizationMode_ )
, extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT(
- PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PipelineRasterizationConservativeStateCreateInfoEXT( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineRasterizationConservativeStateCreateInfoEXT(
- VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : PipelineRasterizationConservativeStateCreateInfoEXT(
- *reinterpret_cast<PipelineRasterizationConservativeStateCreateInfoEXT const *>( &rhs ) )
- {}
+ PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineRasterizationConservativeStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationConservativeStateCreateInfoEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PipelineRasterizationConservativeStateCreateInfoEXT &
operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineRasterizationConservativeStateCreateInfoEXT &
- operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationConservativeStateCreateInfoEXT & operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setConservativeRasterizationMode(
- VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT &
+ setConservativeRasterizationMode( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
{
conservativeRasterizationMode = conservativeRasterizationMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT &
- setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT
+ setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT
{
extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineRasterizationConservativeStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineRasterizationConservativeStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRasterizationConservativeStateCreateInfoEXT *>( this );
}
- explicit operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineRasterizationConservativeStateCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -74726,7 +77751,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
@@ -74742,22 +77767,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {};
- VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode =
- VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled;
- float extraPrimitiveOverestimationSize = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT ) ==
- sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<
- VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT>::value,
- "PipelineRasterizationConservativeStateCreateInfoEXT is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {};
+ VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled;
+ float extraPrimitiveOverestimationSize = {};
+ };
template <>
struct CppType<StructureType, StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT>
@@ -74770,47 +77785,44 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineRasterizationDepthClipStateCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, depthClipEnable( depthClipEnable_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT(
- PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PipelineRasterizationDepthClipStateCreateInfoEXT( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineRasterizationDepthClipStateCreateInfoEXT(
- *reinterpret_cast<PipelineRasterizationDepthClipStateCreateInfoEXT const *>( &rhs ) )
- {}
+ PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineRasterizationDepthClipStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationDepthClipStateCreateInfoEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineRasterizationDepthClipStateCreateInfoEXT &
- operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineRasterizationDepthClipStateCreateInfoEXT &
- operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -74824,17 +77836,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRasterizationDepthClipStateCreateInfoEXT *>( this );
}
- explicit operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineRasterizationDepthClipStateCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -74854,11 +77866,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( depthClipEnable == rhs.depthClipEnable );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthClipEnable == rhs.depthClipEnable );
# endif
}
@@ -74869,20 +77880,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {};
VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT ) ==
- sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT>::value,
- "PipelineRasterizationDepthClipStateCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT>
@@ -74895,90 +77897,84 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineRasterizationLineStateCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineRasterizationLineStateCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ =
- VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault,
- VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {},
- uint32_t lineStippleFactor_ = {},
- uint16_t lineStipplePattern_ = {} ) VULKAN_HPP_NOEXCEPT
- : lineRasterizationMode( lineRasterizationMode_ )
+ VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault,
+ VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {},
+ uint32_t lineStippleFactor_ = {},
+ uint16_t lineStipplePattern_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , lineRasterizationMode( lineRasterizationMode_ )
, stippledLineEnable( stippledLineEnable_ )
, lineStippleFactor( lineStippleFactor_ )
, lineStipplePattern( lineStipplePattern_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT(
- PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineRasterizationLineStateCreateInfoEXT(
- *reinterpret_cast<PipelineRasterizationLineStateCreateInfoEXT const *>( &rhs ) )
- {}
+ PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineRasterizationLineStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationLineStateCreateInfoEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineRasterizationLineStateCreateInfoEXT &
- operator=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineRasterizationLineStateCreateInfoEXT & operator=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineRasterizationLineStateCreateInfoEXT &
- operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationLineStateCreateInfoEXT & operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setLineRasterizationMode(
- VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT &
+ setLineRasterizationMode( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
{
lineRasterizationMode = lineRasterizationMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT &
- setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT
+ setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT
{
stippledLineEnable = stippledLineEnable_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT &
- setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT
{
lineStippleFactor = lineStippleFactor_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT &
- setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT
{
lineStipplePattern = lineStipplePattern_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineRasterizationLineStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineRasterizationLineStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRasterizationLineStateCreateInfoEXT *>( this );
}
- explicit operator VkPipelineRasterizationLineStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineRasterizationLineStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineRasterizationLineStateCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -75000,11 +77996,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( lineRasterizationMode == rhs.lineRasterizationMode ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lineRasterizationMode == rhs.lineRasterizationMode ) &&
( stippledLineEnable == rhs.stippledLineEnable ) && ( lineStippleFactor == rhs.lineStippleFactor ) &&
( lineStipplePattern == rhs.lineStipplePattern );
# endif
@@ -75017,23 +78012,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode =
- VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault;
- VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {};
- uint32_t lineStippleFactor = {};
- uint16_t lineStipplePattern = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT ) ==
- sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT>::value,
- "PipelineRasterizationLineStateCreateInfoEXT is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault;
+ VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {};
+ uint32_t lineStippleFactor = {};
+ uint16_t lineStipplePattern = {};
+ };
template <>
struct CppType<StructureType, StructureType::ePipelineRasterizationLineStateCreateInfoEXT>
@@ -75046,24 +78031,24 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineRasterizationProvokingVertexStateCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ =
- VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex ) VULKAN_HPP_NOEXCEPT
- : provokingVertexMode( provokingVertexMode_ )
- {}
+ VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ = VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , provokingVertexMode( provokingVertexMode_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT(
- PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs )
+ VULKAN_HPP_NOEXCEPT = default;
- PipelineRasterizationProvokingVertexStateCreateInfoEXT(
- VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : PipelineRasterizationProvokingVertexStateCreateInfoEXT(
- *reinterpret_cast<PipelineRasterizationProvokingVertexStateCreateInfoEXT const *>( &rhs ) )
- {}
+ PipelineRasterizationProvokingVertexStateCreateInfoEXT( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineRasterizationProvokingVertexStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationProvokingVertexStateCreateInfoEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PipelineRasterizationProvokingVertexStateCreateInfoEXT &
@@ -75072,44 +78057,40 @@ namespace VULKAN_HPP_NAMESPACE
PipelineRasterizationProvokingVertexStateCreateInfoEXT &
operator=( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT &
- setProvokingVertexMode( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ ) VULKAN_HPP_NOEXCEPT
+ setProvokingVertexMode( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ ) VULKAN_HPP_NOEXCEPT
{
provokingVertexMode = provokingVertexMode_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRasterizationProvokingVertexStateCreateInfoEXT *>( this );
}
- explicit operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineRasterizationProvokingVertexStateCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -75122,7 +78103,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexMode == rhs.provokingVertexMode );
@@ -75136,21 +78117,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode =
- VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode = VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT ) ==
- sizeof( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<
- VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT>::value,
- "PipelineRasterizationProvokingVertexStateCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT>
@@ -75163,69 +78133,64 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineRasterizationStateRasterizationOrderAMD;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD(
- VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ =
- VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict ) VULKAN_HPP_NOEXCEPT
- : rasterizationOrder( rasterizationOrder_ )
- {}
+ VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , rasterizationOrder( rasterizationOrder_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD(
- PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PipelineRasterizationStateRasterizationOrderAMD( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineRasterizationStateRasterizationOrderAMD(
- *reinterpret_cast<PipelineRasterizationStateRasterizationOrderAMD const *>( &rhs ) )
- {}
+ PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineRasterizationStateRasterizationOrderAMD( *reinterpret_cast<PipelineRasterizationStateRasterizationOrderAMD const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineRasterizationStateRasterizationOrderAMD &
- operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineRasterizationStateRasterizationOrderAMD & operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineRasterizationStateRasterizationOrderAMD &
- operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateRasterizationOrderAMD & operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD &
- setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT
+ setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT
{
rasterizationOrder = rasterizationOrder_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineRasterizationStateRasterizationOrderAMD const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineRasterizationStateRasterizationOrderAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD *>( this );
}
- explicit operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineRasterizationStateRasterizationOrderAMD *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::RasterizationOrderAMD const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RasterizationOrderAMD const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -75238,7 +78203,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rasterizationOrder == rhs.rasterizationOrder );
@@ -75252,20 +78217,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder =
- VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD ) ==
- sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD>::value,
- "PipelineRasterizationStateRasterizationOrderAMD is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineRasterizationStateRasterizationOrderAMD>
@@ -75278,71 +78233,67 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineRasterizationStateStreamCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {},
- uint32_t rasterizationStream_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {},
+ uint32_t rasterizationStream_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, rasterizationStream( rasterizationStream_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT(
- PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PipelineRasterizationStateStreamCreateInfoEXT( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineRasterizationStateStreamCreateInfoEXT(
- *reinterpret_cast<PipelineRasterizationStateStreamCreateInfoEXT const *>( &rhs ) )
- {}
+ PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineRasterizationStateStreamCreateInfoEXT( *reinterpret_cast<PipelineRasterizationStateStreamCreateInfoEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineRasterizationStateStreamCreateInfoEXT &
- operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineRasterizationStateStreamCreateInfoEXT & operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineRasterizationStateStreamCreateInfoEXT &
- operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateStreamCreateInfoEXT & operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT &
- setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT
{
rasterizationStream = rasterizationStream_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineRasterizationStateStreamCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineRasterizationStateStreamCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRasterizationStateStreamCreateInfoEXT *>( this );
}
- explicit operator VkPipelineRasterizationStateStreamCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineRasterizationStateStreamCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineRasterizationStateStreamCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -75362,11 +78313,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( rasterizationStream == rhs.rasterizationStream );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rasterizationStream == rhs.rasterizationStream );
# endif
}
@@ -75377,20 +78327,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {};
uint32_t rasterizationStream = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT ) ==
- sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT>::value,
- "PipelineRasterizationStateStreamCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineRasterizationStateStreamCreateInfoEXT>
@@ -75406,39 +78347,42 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRenderingCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo(
- uint32_t viewMask_ = {},
- uint32_t colorAttachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {},
- VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined )
- VULKAN_HPP_NOEXCEPT
- : viewMask( viewMask_ )
+ VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo( uint32_t viewMask_ = {},
+ uint32_t colorAttachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {},
+ VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , viewMask( viewMask_ )
, colorAttachmentCount( colorAttachmentCount_ )
, pColorAttachmentFormats( pColorAttachmentFormats_ )
, depthAttachmentFormat( depthAttachmentFormat_ )
, stencilAttachmentFormat( stencilAttachmentFormat_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PipelineRenderingCreateInfo( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PipelineRenderingCreateInfo( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PipelineRenderingCreateInfo( *reinterpret_cast<PipelineRenderingCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PipelineRenderingCreateInfo(
- uint32_t viewMask_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_,
- VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined )
- : viewMask( viewMask_ )
+ PipelineRenderingCreateInfo( uint32_t viewMask_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_,
+ VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , viewMask( viewMask_ )
, colorAttachmentCount( static_cast<uint32_t>( colorAttachmentFormats_.size() ) )
, pColorAttachmentFormats( colorAttachmentFormats_.data() )
, depthAttachmentFormat( depthAttachmentFormat_ )
, stencilAttachmentFormat( stencilAttachmentFormat_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -75463,15 +78407,14 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo &
- setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = colorAttachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo &
- setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
+ setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
{
pColorAttachmentFormats = pColorAttachmentFormats_;
return *this;
@@ -75479,8 +78422,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineRenderingCreateInfo & setColorAttachmentFormats(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const &
- colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = static_cast<uint32_t>( colorAttachmentFormats_.size() );
pColorAttachmentFormats = colorAttachmentFormats_.data();
@@ -75488,32 +78430,31 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo &
- setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
{
depthAttachmentFormat = depthAttachmentFormat_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo &
- setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
+ setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
{
stencilAttachmentFormat = stencilAttachmentFormat_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineRenderingCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineRenderingCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRenderingCreateInfo *>( this );
}
- explicit operator VkPipelineRenderingCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineRenderingCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineRenderingCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -75527,13 +78468,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- viewMask,
- colorAttachmentCount,
- pColorAttachmentFormats,
- depthAttachmentFormat,
- stencilAttachmentFormat );
+ return std::tie( sType, pNext, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat );
}
#endif
@@ -75542,13 +78477,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewMask == rhs.viewMask ) &&
- ( colorAttachmentCount == rhs.colorAttachmentCount ) &&
- ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) &&
- ( depthAttachmentFormat == rhs.depthAttachmentFormat ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) &&
+ ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && ( depthAttachmentFormat == rhs.depthAttachmentFormat ) &&
( stencilAttachmentFormat == rhs.stencilAttachmentFormat );
# endif
}
@@ -75568,14 +78501,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo ) ==
- sizeof( VkPipelineRenderingCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo>::value,
- "PipelineRenderingCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineRenderingCreateInfo>
@@ -75589,63 +78514,60 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineRepresentativeFragmentTestStateCreateInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV(
- VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {} ) VULKAN_HPP_NOEXCEPT
- : representativeFragmentTestEnable( representativeFragmentTestEnable_ )
- {}
+ VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , representativeFragmentTestEnable( representativeFragmentTestEnable_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV(
- PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PipelineRepresentativeFragmentTestStateCreateInfoNV( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineRepresentativeFragmentTestStateCreateInfoNV(
- VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : PipelineRepresentativeFragmentTestStateCreateInfoNV(
- *reinterpret_cast<PipelineRepresentativeFragmentTestStateCreateInfoNV const *>( &rhs ) )
- {}
+ PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineRepresentativeFragmentTestStateCreateInfoNV( *reinterpret_cast<PipelineRepresentativeFragmentTestStateCreateInfoNV const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PipelineRepresentativeFragmentTestStateCreateInfoNV &
operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineRepresentativeFragmentTestStateCreateInfoNV &
- operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & setRepresentativeFragmentTestEnable(
- VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV &
+ setRepresentativeFragmentTestEnable( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT
{
representativeFragmentTestEnable = representativeFragmentTestEnable_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineRepresentativeFragmentTestStateCreateInfoNV *>( this );
}
- explicit operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineRepresentativeFragmentTestStateCreateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -75662,11 +78584,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable );
# endif
}
@@ -75677,19 +78598,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV ) ==
- sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<
- VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV>::value,
- "PipelineRepresentativeFragmentTestStateCreateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV>
@@ -75697,76 +78609,206 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PipelineRepresentativeFragmentTestStateCreateInfoNV;
};
+ struct PipelineRobustnessCreateInfoEXT
+ {
+ using NativeType = VkPipelineRobustnessCreateInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRobustnessCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineRobustnessCreateInfoEXT(
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT storageBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault,
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT uniformBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault,
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT vertexInputs_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault,
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT images_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , storageBuffers( storageBuffers_ )
+ , uniformBuffers( uniformBuffers_ )
+ , vertexInputs( vertexInputs_ )
+ , images( images_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR PipelineRobustnessCreateInfoEXT( PipelineRobustnessCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineRobustnessCreateInfoEXT( VkPipelineRobustnessCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineRobustnessCreateInfoEXT( *reinterpret_cast<PipelineRobustnessCreateInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PipelineRobustnessCreateInfoEXT & operator=( PipelineRobustnessCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineRobustnessCreateInfoEXT & operator=( VkPipelineRobustnessCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT &
+ setStorageBuffers( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT storageBuffers_ ) VULKAN_HPP_NOEXCEPT
+ {
+ storageBuffers = storageBuffers_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT &
+ setUniformBuffers( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT uniformBuffers_ ) VULKAN_HPP_NOEXCEPT
+ {
+ uniformBuffers = uniformBuffers_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT &
+ setVertexInputs( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT vertexInputs_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vertexInputs = vertexInputs_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & setImages( VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT images_ ) VULKAN_HPP_NOEXCEPT
+ {
+ images = images_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPipelineRobustnessCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPipelineRobustnessCreateInfoEXT *>( this );
+ }
+
+ operator VkPipelineRobustnessCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineRobustnessCreateInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT const &,
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT const &,
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT const &,
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, storageBuffers, uniformBuffers, vertexInputs, images );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PipelineRobustnessCreateInfoEXT const & ) const = default;
+#else
+ bool operator==( PipelineRobustnessCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffers == rhs.storageBuffers ) && ( uniformBuffers == rhs.uniformBuffers ) &&
+ ( vertexInputs == rhs.vertexInputs ) && ( images == rhs.images );
+# endif
+ }
+
+ bool operator!=( PipelineRobustnessCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRobustnessCreateInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT storageBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault;
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT uniformBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault;
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT vertexInputs = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault;
+ VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT images = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault;
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineRobustnessCreateInfoEXT>
+ {
+ using Type = PipelineRobustnessCreateInfoEXT;
+ };
+
struct PipelineSampleLocationsStateCreateInfoEXT
{
using NativeType = VkPipelineSampleLocationsStateCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {},
- VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT
- : sampleLocationsEnable( sampleLocationsEnable_ )
+ VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {},
+ VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , sampleLocationsEnable( sampleLocationsEnable_ )
, sampleLocationsInfo( sampleLocationsInfo_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT(
- PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineSampleLocationsStateCreateInfoEXT(
- *reinterpret_cast<PipelineSampleLocationsStateCreateInfoEXT const *>( &rhs ) )
- {}
+ PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineSampleLocationsStateCreateInfoEXT( *reinterpret_cast<PipelineSampleLocationsStateCreateInfoEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineSampleLocationsStateCreateInfoEXT &
- operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineSampleLocationsStateCreateInfoEXT & operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineSampleLocationsStateCreateInfoEXT &
- operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineSampleLocationsStateCreateInfoEXT & operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT &
- setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT
+ setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsEnable = sampleLocationsEnable_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo(
- VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT &
+ setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsInfo = sampleLocationsInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineSampleLocationsStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineSampleLocationsStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineSampleLocationsStateCreateInfoEXT *>( this );
}
- explicit operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineSampleLocationsStateCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -75786,11 +78828,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( sampleLocationsEnable == rhs.sampleLocationsEnable ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationsEnable == rhs.sampleLocationsEnable ) &&
( sampleLocationsInfo == rhs.sampleLocationsInfo );
# endif
}
@@ -75802,20 +78843,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {};
VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT ) ==
- sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT>::value,
- "PipelineSampleLocationsStateCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineSampleLocationsStateCreateInfoEXT>
@@ -75823,52 +78855,174 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PipelineSampleLocationsStateCreateInfoEXT;
};
+ struct PipelineShaderStageModuleIdentifierCreateInfoEXT
+ {
+ using NativeType = VkPipelineShaderStageModuleIdentifierCreateInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR PipelineShaderStageModuleIdentifierCreateInfoEXT( uint32_t identifierSize_ = {},
+ const uint8_t * pIdentifier_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , identifierSize( identifierSize_ )
+ , pIdentifier( pIdentifier_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR
+ PipelineShaderStageModuleIdentifierCreateInfoEXT( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineShaderStageModuleIdentifierCreateInfoEXT( VkPipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineShaderStageModuleIdentifierCreateInfoEXT( *reinterpret_cast<PipelineShaderStageModuleIdentifierCreateInfoEXT const *>( &rhs ) )
+ {
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ PipelineShaderStageModuleIdentifierCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & identifier_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), identifierSize( static_cast<uint32_t>( identifier_.size() ) ), pIdentifier( identifier_.data() )
+ {
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ PipelineShaderStageModuleIdentifierCreateInfoEXT & operator=( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ PipelineShaderStageModuleIdentifierCreateInfoEXT & operator=( VkPipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setIdentifierSize( uint32_t identifierSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ identifierSize = identifierSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setPIdentifier( const uint8_t * pIdentifier_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pIdentifier = pIdentifier_;
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ PipelineShaderStageModuleIdentifierCreateInfoEXT &
+ setIdentifier( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & identifier_ ) VULKAN_HPP_NOEXCEPT
+ {
+ identifierSize = static_cast<uint32_t>( identifier_.size() );
+ pIdentifier = identifier_.data();
+ return *this;
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPipelineShaderStageModuleIdentifierCreateInfoEXT *>( this );
+ }
+
+ operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPipelineShaderStageModuleIdentifierCreateInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint8_t * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, identifierSize, pIdentifier );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( PipelineShaderStageModuleIdentifierCreateInfoEXT const & ) const = default;
+#else
+ bool operator==( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( identifierSize == rhs.identifierSize ) && ( pIdentifier == rhs.pIdentifier );
+# endif
+ }
+
+ bool operator!=( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT;
+ const void * pNext = {};
+ uint32_t identifierSize = {};
+ const uint8_t * pIdentifier = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT>
+ {
+ using Type = PipelineShaderStageModuleIdentifierCreateInfoEXT;
+ };
+
struct PipelineShaderStageRequiredSubgroupSizeCreateInfo
{
using NativeType = VkPipelineShaderStageRequiredSubgroupSizeCreateInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PipelineShaderStageRequiredSubgroupSizeCreateInfo( uint32_t requiredSubgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : requiredSubgroupSize( requiredSubgroupSize_ )
- {}
+ VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfo( uint32_t requiredSubgroupSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , requiredSubgroupSize( requiredSubgroupSize_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfo(
- PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PipelineShaderStageRequiredSubgroupSizeCreateInfo( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineShaderStageRequiredSubgroupSizeCreateInfo( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineShaderStageRequiredSubgroupSizeCreateInfo(
- *reinterpret_cast<PipelineShaderStageRequiredSubgroupSizeCreateInfo const *>( &rhs ) )
- {}
+ PipelineShaderStageRequiredSubgroupSizeCreateInfo( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineShaderStageRequiredSubgroupSizeCreateInfo( *reinterpret_cast<PipelineShaderStageRequiredSubgroupSizeCreateInfo const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PipelineShaderStageRequiredSubgroupSizeCreateInfo &
operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineShaderStageRequiredSubgroupSizeCreateInfo &
- operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineShaderStageRequiredSubgroupSizeCreateInfo & operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo const *>( &rhs );
return *this;
}
- explicit operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineShaderStageRequiredSubgroupSizeCreateInfo *>( this );
}
- explicit operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineShaderStageRequiredSubgroupSizeCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -75885,7 +79039,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( requiredSubgroupSize == rhs.requiredSubgroupSize );
@@ -75899,19 +79053,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo;
+ void * pNext = {};
uint32_t requiredSubgroupSize = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo ) ==
- sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo>::value,
- "PipelineShaderStageRequiredSubgroupSizeCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo>
@@ -75925,68 +79070,64 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineTessellationDomainOriginStateCreateInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo(
- VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ =
- VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft ) VULKAN_HPP_NOEXCEPT : domainOrigin( domainOrigin_ )
- {}
+ VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , domainOrigin( domainOrigin_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo(
- PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PipelineTessellationDomainOriginStateCreateInfo( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineTessellationDomainOriginStateCreateInfo(
- *reinterpret_cast<PipelineTessellationDomainOriginStateCreateInfo const *>( &rhs ) )
- {}
+ PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineTessellationDomainOriginStateCreateInfo( *reinterpret_cast<PipelineTessellationDomainOriginStateCreateInfo const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineTessellationDomainOriginStateCreateInfo &
- operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineTessellationDomainOriginStateCreateInfo & operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineTessellationDomainOriginStateCreateInfo &
- operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineTessellationDomainOriginStateCreateInfo & operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo &
- setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT
+ setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT
{
domainOrigin = domainOrigin_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineTessellationDomainOriginStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineTessellationDomainOriginStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineTessellationDomainOriginStateCreateInfo *>( this );
}
- explicit operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineTessellationDomainOriginStateCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::TessellationDomainOrigin const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TessellationDomainOrigin const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -75999,7 +79140,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( domainOrigin == rhs.domainOrigin );
@@ -76013,20 +79154,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin =
- VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo ) ==
- sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo>::value,
- "PipelineTessellationDomainOriginStateCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineTessellationDomainOriginStateCreateInfo>
@@ -76040,58 +79171,53 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkVertexInputBindingDivisorDescriptionEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( uint32_t binding_ = {},
- uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( uint32_t binding_ = {}, uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT
: binding( binding_ )
, divisor( divisor_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( VertexInputBindingDivisorDescriptionEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : VertexInputBindingDivisorDescriptionEXT(
- *reinterpret_cast<VertexInputBindingDivisorDescriptionEXT const *>( &rhs ) )
- {}
+ : VertexInputBindingDivisorDescriptionEXT( *reinterpret_cast<VertexInputBindingDivisorDescriptionEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VertexInputBindingDivisorDescriptionEXT &
- operator=( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VertexInputBindingDivisorDescriptionEXT & operator=( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VertexInputBindingDivisorDescriptionEXT &
- operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VertexInputBindingDivisorDescriptionEXT & operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionEXT &
- setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionEXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
{
binding = binding_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionEXT &
- setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionEXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT
{
divisor = divisor_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVertexInputBindingDivisorDescriptionEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVertexInputBindingDivisorDescriptionEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVertexInputBindingDivisorDescriptionEXT *>( this );
}
- explicit operator VkVertexInputBindingDivisorDescriptionEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVertexInputBindingDivisorDescriptionEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVertexInputBindingDivisorDescriptionEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -76108,7 +79234,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( VertexInputBindingDivisorDescriptionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( binding == rhs.binding ) && ( divisor == rhs.divisor );
@@ -76125,65 +79251,54 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t binding = {};
uint32_t divisor = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT ) ==
- sizeof( VkVertexInputBindingDivisorDescriptionEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT>::value,
- "VertexInputBindingDivisorDescriptionEXT is not nothrow_move_constructible!" );
struct PipelineVertexInputDivisorStateCreateInfoEXT
{
using NativeType = VkPipelineVertexInputDivisorStateCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT(
- uint32_t vertexBindingDivisorCount_ = {},
- const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors_ = {} )
- VULKAN_HPP_NOEXCEPT
- : vertexBindingDivisorCount( vertexBindingDivisorCount_ )
+ VULKAN_HPP_CONSTEXPR
+ PipelineVertexInputDivisorStateCreateInfoEXT( uint32_t vertexBindingDivisorCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , vertexBindingDivisorCount( vertexBindingDivisorCount_ )
, pVertexBindingDivisors( pVertexBindingDivisors_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT(
- PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineVertexInputDivisorStateCreateInfoEXT(
- *reinterpret_cast<PipelineVertexInputDivisorStateCreateInfoEXT const *>( &rhs ) )
- {}
+ PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineVertexInputDivisorStateCreateInfoEXT( *reinterpret_cast<PipelineVertexInputDivisorStateCreateInfoEXT const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineVertexInputDivisorStateCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_ )
- : vertexBindingDivisorCount( static_cast<uint32_t>( vertexBindingDivisors_.size() ) )
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , vertexBindingDivisorCount( static_cast<uint32_t>( vertexBindingDivisors_.size() ) )
, pVertexBindingDivisors( vertexBindingDivisors_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineVertexInputDivisorStateCreateInfoEXT &
- operator=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineVertexInputDivisorStateCreateInfoEXT & operator=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineVertexInputDivisorStateCreateInfoEXT &
- operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineVertexInputDivisorStateCreateInfoEXT & operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
@@ -76196,9 +79311,8 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT & setPVertexBindingDivisors(
- const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors_ )
- VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT &
+ setPVertexBindingDivisors( const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT
{
pVertexBindingDivisors = pVertexBindingDivisors_;
return *this;
@@ -76206,8 +79320,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisors(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_ )
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_ )
VULKAN_HPP_NOEXCEPT
{
vertexBindingDivisorCount = static_cast<uint32_t>( vertexBindingDivisors_.size() );
@@ -76217,17 +79330,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineVertexInputDivisorStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineVertexInputDivisorStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineVertexInputDivisorStateCreateInfoEXT *>( this );
}
- explicit operator VkPipelineVertexInputDivisorStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineVertexInputDivisorStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineVertexInputDivisorStateCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -76247,11 +79360,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount ) &&
( pVertexBindingDivisors == rhs.pVertexBindingDivisors );
# endif
}
@@ -76263,20 +79375,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;
- const void * pNext = {};
- uint32_t vertexBindingDivisorCount = {};
- const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;
+ const void * pNext = {};
+ uint32_t vertexBindingDivisorCount = {};
+ const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT ) ==
- sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT>::value,
- "PipelineVertexInputDivisorStateCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT>
@@ -76289,62 +79392,61 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineViewportCoarseSampleOrderStateCreateInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV(
- VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ =
- VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault,
+ VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault,
uint32_t customSampleOrderCount_ = {},
- const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ = {} ) VULKAN_HPP_NOEXCEPT
- : sampleOrderType( sampleOrderType_ )
+ const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , sampleOrderType( sampleOrderType_ )
, customSampleOrderCount( customSampleOrderCount_ )
, pCustomSampleOrders( pCustomSampleOrders_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV(
- PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PipelineViewportCoarseSampleOrderStateCreateInfoNV( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineViewportCoarseSampleOrderStateCreateInfoNV(
- VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : PipelineViewportCoarseSampleOrderStateCreateInfoNV(
- *reinterpret_cast<PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>( &rhs ) )
- {}
+ PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineViewportCoarseSampleOrderStateCreateInfoNV( *reinterpret_cast<PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportCoarseSampleOrderStateCreateInfoNV(
- VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const &
- customSampleOrders_ )
- : sampleOrderType( sampleOrderType_ )
+ VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , sampleOrderType( sampleOrderType_ )
, customSampleOrderCount( static_cast<uint32_t>( customSampleOrders_.size() ) )
, pCustomSampleOrders( customSampleOrders_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PipelineViewportCoarseSampleOrderStateCreateInfoNV &
operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineViewportCoarseSampleOrderStateCreateInfoNV &
- operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV &
- setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT
+ setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT
{
sampleOrderType = sampleOrderType_;
return *this;
@@ -76357,8 +79459,8 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPCustomSampleOrders(
- const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV &
+ setPCustomSampleOrders( const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT
{
pCustomSampleOrders = pCustomSampleOrders_;
return *this;
@@ -76366,8 +79468,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrders(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const &
- customSampleOrders_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders_ ) VULKAN_HPP_NOEXCEPT
{
customSampleOrderCount = static_cast<uint32_t>( customSampleOrders_.size() );
pCustomSampleOrders = customSampleOrders_.data();
@@ -76376,17 +79477,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *>( this );
}
- explicit operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -76407,12 +79508,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleOrderType == rhs.sampleOrderType ) &&
- ( customSampleOrderCount == rhs.customSampleOrderCount ) &&
- ( pCustomSampleOrders == rhs.pCustomSampleOrders );
+ ( customSampleOrderCount == rhs.customSampleOrderCount ) && ( pCustomSampleOrders == rhs.pCustomSampleOrders );
# endif
}
@@ -76423,22 +79523,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType =
- VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault;
uint32_t customSampleOrderCount = {};
const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV ) ==
- sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV>::value,
- "PipelineViewportCoarseSampleOrderStateCreateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV>
@@ -76451,37 +79541,35 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineViewportDepthClipControlCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineViewportDepthClipControlCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineViewportDepthClipControlCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_ = {} ) VULKAN_HPP_NOEXCEPT : negativeOneToOne( negativeOneToOne_ )
- {}
+ VULKAN_HPP_CONSTEXPR PipelineViewportDepthClipControlCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , negativeOneToOne( negativeOneToOne_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineViewportDepthClipControlCreateInfoEXT(
- PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PipelineViewportDepthClipControlCreateInfoEXT( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineViewportDepthClipControlCreateInfoEXT( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineViewportDepthClipControlCreateInfoEXT(
- *reinterpret_cast<PipelineViewportDepthClipControlCreateInfoEXT const *>( &rhs ) )
- {}
+ PipelineViewportDepthClipControlCreateInfoEXT( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineViewportDepthClipControlCreateInfoEXT( *reinterpret_cast<PipelineViewportDepthClipControlCreateInfoEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineViewportDepthClipControlCreateInfoEXT &
- operator=( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineViewportDepthClipControlCreateInfoEXT & operator=( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineViewportDepthClipControlCreateInfoEXT &
- operator=( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportDepthClipControlCreateInfoEXT & operator=( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
@@ -76495,17 +79583,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineViewportDepthClipControlCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineViewportDepthClipControlCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineViewportDepthClipControlCreateInfoEXT *>( this );
}
- explicit operator VkPipelineViewportDepthClipControlCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineViewportDepthClipControlCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineViewportDepthClipControlCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -76522,7 +79610,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( negativeOneToOne == rhs.negativeOneToOne );
@@ -76536,19 +79624,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT ) ==
- sizeof( VkPipelineViewportDepthClipControlCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT>::value,
- "PipelineViewportDepthClipControlCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineViewportDepthClipControlCreateInfoEXT>
@@ -76561,72 +79640,67 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPipelineViewportExclusiveScissorStateCreateInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV(
- uint32_t exclusiveScissorCount_ = {},
- const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ = {} ) VULKAN_HPP_NOEXCEPT
- : exclusiveScissorCount( exclusiveScissorCount_ )
+ VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( uint32_t exclusiveScissorCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , exclusiveScissorCount( exclusiveScissorCount_ )
, pExclusiveScissors( pExclusiveScissors_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV(
- PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PipelineViewportExclusiveScissorStateCreateInfoNV( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineViewportExclusiveScissorStateCreateInfoNV(
- *reinterpret_cast<PipelineViewportExclusiveScissorStateCreateInfoNV const *>( &rhs ) )
- {}
+ PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineViewportExclusiveScissorStateCreateInfoNV( *reinterpret_cast<PipelineViewportExclusiveScissorStateCreateInfoNV const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportExclusiveScissorStateCreateInfoNV(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors_ )
- : exclusiveScissorCount( static_cast<uint32_t>( exclusiveScissors_.size() ) )
- , pExclusiveScissors( exclusiveScissors_.data() )
- {}
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors_, const void * pNext_ = nullptr )
+ : pNext( pNext_ ), exclusiveScissorCount( static_cast<uint32_t>( exclusiveScissors_.size() ) ), pExclusiveScissors( exclusiveScissors_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PipelineViewportExclusiveScissorStateCreateInfoNV &
operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineViewportExclusiveScissorStateCreateInfoNV &
- operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV &
- setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT
{
exclusiveScissorCount = exclusiveScissorCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV &
- setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT
+ setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT
{
pExclusiveScissors = pExclusiveScissors_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissors(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors_ )
- VULKAN_HPP_NOEXCEPT
+ PipelineViewportExclusiveScissorStateCreateInfoNV &
+ setExclusiveScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors_ ) VULKAN_HPP_NOEXCEPT
{
exclusiveScissorCount = static_cast<uint32_t>( exclusiveScissors_.size() );
pExclusiveScissors = exclusiveScissors_.data();
@@ -76635,24 +79709,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineViewportExclusiveScissorStateCreateInfoNV *>( this );
}
- explicit operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineViewportExclusiveScissorStateCreateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- uint32_t const &,
- const VULKAN_HPP_NAMESPACE::Rect2D * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -76665,11 +79736,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( exclusiveScissorCount == rhs.exclusiveScissorCount ) && ( pExclusiveScissors == rhs.pExclusiveScissors );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exclusiveScissorCount == rhs.exclusiveScissorCount ) &&
+ ( pExclusiveScissors == rhs.pExclusiveScissors );
# endif
}
@@ -76680,20 +79751,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
+ const void * pNext = {};
uint32_t exclusiveScissorCount = {};
const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV ) ==
- sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV>::value,
- "PipelineViewportExclusiveScissorStateCreateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV>
@@ -76706,26 +79768,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkShadingRatePaletteNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV(
- uint32_t shadingRatePaletteEntryCount_ = {},
- const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( uint32_t shadingRatePaletteEntryCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ = {} ) VULKAN_HPP_NOEXCEPT
: shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ )
, pShadingRatePaletteEntries( pShadingRatePaletteEntries_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
: ShadingRatePaletteNV( *reinterpret_cast<ShadingRatePaletteNV const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ShadingRatePaletteNV(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const &
- shadingRatePaletteEntries_ )
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ )
: shadingRatePaletteEntryCount( static_cast<uint32_t>( shadingRatePaletteEntries_.size() ) )
, pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -76738,15 +79801,14 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV &
- setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT
{
shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & setPShadingRatePaletteEntries(
- const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV &
+ setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
{
pShadingRatePaletteEntries = pShadingRatePaletteEntries_;
return *this;
@@ -76754,8 +79816,8 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ShadingRatePaletteNV & setShadingRatePaletteEntries(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const &
- shadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ )
+ VULKAN_HPP_NOEXCEPT
{
shadingRatePaletteEntryCount = static_cast<uint32_t>( shadingRatePaletteEntries_.size() );
pShadingRatePaletteEntries = shadingRatePaletteEntries_.data();
@@ -76764,17 +79826,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkShadingRatePaletteNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkShadingRatePaletteNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkShadingRatePaletteNV *>( this );
}
- explicit operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT
+ operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkShadingRatePaletteNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -76791,11 +79853,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount ) &&
- ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries );
+ return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount ) && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries );
# endif
}
@@ -76809,87 +79870,79 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t shadingRatePaletteEntryCount = {};
const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV>::value,
- "ShadingRatePaletteNV is not nothrow_move_constructible!" );
struct PipelineViewportShadingRateImageStateCreateInfoNV
{
using NativeType = VkPipelineViewportShadingRateImageStateCreateInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV(
- VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {},
- uint32_t viewportCount_ = {},
- const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ = {} ) VULKAN_HPP_NOEXCEPT
- : shadingRateImageEnable( shadingRateImageEnable_ )
+ VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {},
+ uint32_t viewportCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , shadingRateImageEnable( shadingRateImageEnable_ )
, viewportCount( viewportCount_ )
, pShadingRatePalettes( pShadingRatePalettes_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV(
- PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ PipelineViewportShadingRateImageStateCreateInfoNV( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineViewportShadingRateImageStateCreateInfoNV(
- *reinterpret_cast<PipelineViewportShadingRateImageStateCreateInfoNV const *>( &rhs ) )
- {}
+ PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineViewportShadingRateImageStateCreateInfoNV( *reinterpret_cast<PipelineViewportShadingRateImageStateCreateInfoNV const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportShadingRateImageStateCreateInfoNV(
- VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const &
- shadingRatePalettes_ )
- : shadingRateImageEnable( shadingRateImageEnable_ )
+ VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , shadingRateImageEnable( shadingRateImageEnable_ )
, viewportCount( static_cast<uint32_t>( shadingRatePalettes_.size() ) )
, pShadingRatePalettes( shadingRatePalettes_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PipelineViewportShadingRateImageStateCreateInfoNV &
operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineViewportShadingRateImageStateCreateInfoNV &
- operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportShadingRateImageStateCreateInfoNV & operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this =
- *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV &
- setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT
+ setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT
{
shadingRateImageEnable = shadingRateImageEnable_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV &
- setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
{
viewportCount = viewportCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setPShadingRatePalettes(
- const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV &
+ setPShadingRatePalettes( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
{
pShadingRatePalettes = pShadingRatePalettes_;
return *this;
@@ -76897,8 +79950,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRatePalettes(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const &
- shadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
{
viewportCount = static_cast<uint32_t>( shadingRatePalettes_.size() );
pShadingRatePalettes = shadingRatePalettes_.data();
@@ -76907,17 +79959,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineViewportShadingRateImageStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineViewportShadingRateImageStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineViewportShadingRateImageStateCreateInfoNV *>( this );
}
- explicit operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineViewportShadingRateImageStateCreateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -76938,12 +79990,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( shadingRateImageEnable == rhs.shadingRateImageEnable ) && ( viewportCount == rhs.viewportCount ) &&
- ( pShadingRatePalettes == rhs.pShadingRatePalettes );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateImageEnable == rhs.shadingRateImageEnable ) &&
+ ( viewportCount == rhs.viewportCount ) && ( pShadingRatePalettes == rhs.pShadingRatePalettes );
# endif
}
@@ -76954,21 +80005,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {};
- uint32_t viewportCount = {};
- const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {};
+ uint32_t viewportCount = {};
+ const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV ) ==
- sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV>::value,
- "PipelineViewportShadingRateImageStateCreateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV>
@@ -76981,26 +80023,21 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkViewportSwizzleNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- ViewportSwizzleNV( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ =
- VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX,
- VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ =
- VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX,
- VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ =
- VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX,
- VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ =
- VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ViewportSwizzleNV(
+ VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX,
+ VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX,
+ VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX,
+ VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT
: x( x_ )
, y( y_ )
, z( z_ )
, w( w_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : ViewportSwizzleNV( *reinterpret_cast<ViewportSwizzleNV const *>( &rhs ) )
- {}
+ ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT : ViewportSwizzleNV( *reinterpret_cast<ViewportSwizzleNV const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ViewportSwizzleNV & operator=( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -77012,46 +80049,42 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV &
- setX( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setX( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ ) VULKAN_HPP_NOEXCEPT
{
x = x_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV &
- setY( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setY( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ ) VULKAN_HPP_NOEXCEPT
{
y = y_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV &
- setZ( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setZ( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ ) VULKAN_HPP_NOEXCEPT
{
z = z_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV &
- setW( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setW( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ ) VULKAN_HPP_NOEXCEPT
{
w = w_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkViewportSwizzleNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkViewportSwizzleNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkViewportSwizzleNV *>( this );
}
- explicit operator VkViewportSwizzleNV &() VULKAN_HPP_NOEXCEPT
+ operator VkViewportSwizzleNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkViewportSwizzleNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -77071,7 +80104,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ) && ( w == rhs.w );
@@ -77090,86 +80123,74 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV>::value,
- "ViewportSwizzleNV is not nothrow_move_constructible!" );
struct PipelineViewportSwizzleStateCreateInfoNV
{
using NativeType = VkPipelineViewportSwizzleStateCreateInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV(
- VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {},
- uint32_t viewportCount_ = {},
- const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {},
+ uint32_t viewportCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, viewportCount( viewportCount_ )
, pViewportSwizzles( pViewportSwizzles_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV(
- PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineViewportSwizzleStateCreateInfoNV(
- *reinterpret_cast<PipelineViewportSwizzleStateCreateInfoNV const *>( &rhs ) )
- {}
+ PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineViewportSwizzleStateCreateInfoNV( *reinterpret_cast<PipelineViewportSwizzleStateCreateInfoNV const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportSwizzleStateCreateInfoNV(
- VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const &
- viewportSwizzles_ )
- : flags( flags_ )
- , viewportCount( static_cast<uint32_t>( viewportSwizzles_.size() ) )
- , pViewportSwizzles( viewportSwizzles_.data() )
- {}
+ VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), flags( flags_ ), viewportCount( static_cast<uint32_t>( viewportSwizzles_.size() ) ), pViewportSwizzles( viewportSwizzles_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineViewportSwizzleStateCreateInfoNV &
- operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineViewportSwizzleStateCreateInfoNV & operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineViewportSwizzleStateCreateInfoNV &
- operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportSwizzleStateCreateInfoNV & operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV &
- setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
{
viewportCount = viewportCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV &
- setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
+ setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
{
pViewportSwizzles = pViewportSwizzles_;
return *this;
@@ -77177,8 +80198,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportSwizzleStateCreateInfoNV & setViewportSwizzles(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const &
- viewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
{
viewportCount = static_cast<uint32_t>( viewportSwizzles_.size() );
pViewportSwizzles = viewportSwizzles_.data();
@@ -77187,17 +80207,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineViewportSwizzleStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineViewportSwizzleStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineViewportSwizzleStateCreateInfoNV *>( this );
}
- explicit operator VkPipelineViewportSwizzleStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineViewportSwizzleStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineViewportSwizzleStateCreateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -77218,11 +80238,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( viewportCount == rhs.viewportCount ) && ( pViewportSwizzles == rhs.pViewportSwizzles );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewportCount == rhs.viewportCount ) &&
+ ( pViewportSwizzles == rhs.pViewportSwizzles );
# endif
}
@@ -77233,21 +80253,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {};
uint32_t viewportCount = {};
const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV ) ==
- sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV>::value,
- "PipelineViewportSwizzleStateCreateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineViewportSwizzleStateCreateInfoNV>
@@ -77263,13 +80274,12 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR ViewportWScalingNV( float xcoeff_ = {}, float ycoeff_ = {} ) VULKAN_HPP_NOEXCEPT
: xcoeff( xcoeff_ )
, ycoeff( ycoeff_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : ViewportWScalingNV( *reinterpret_cast<ViewportWScalingNV const *>( &rhs ) )
- {}
+ ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT : ViewportWScalingNV( *reinterpret_cast<ViewportWScalingNV const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ViewportWScalingNV & operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -77294,17 +80304,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkViewportWScalingNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkViewportWScalingNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkViewportWScalingNV *>( this );
}
- explicit operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT
+ operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkViewportWScalingNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -77321,7 +80331,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( xcoeff == rhs.xcoeff ) && ( ycoeff == rhs.ycoeff );
@@ -77338,86 +80348,77 @@ namespace VULKAN_HPP_NAMESPACE
float xcoeff = {};
float ycoeff = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ViewportWScalingNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ViewportWScalingNV>::value,
- "ViewportWScalingNV is not nothrow_move_constructible!" );
struct PipelineViewportWScalingStateCreateInfoNV
{
using NativeType = VkPipelineViewportWScalingStateCreateInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::ePipelineViewportWScalingStateCreateInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV(
- VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {},
- uint32_t viewportCount_ = {},
- const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ = {} ) VULKAN_HPP_NOEXCEPT
- : viewportWScalingEnable( viewportWScalingEnable_ )
+ VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {},
+ uint32_t viewportCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , viewportWScalingEnable( viewportWScalingEnable_ )
, viewportCount( viewportCount_ )
, pViewportWScalings( pViewportWScalings_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV(
- PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : PipelineViewportWScalingStateCreateInfoNV(
- *reinterpret_cast<PipelineViewportWScalingStateCreateInfoNV const *>( &rhs ) )
- {}
+ PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : PipelineViewportWScalingStateCreateInfoNV( *reinterpret_cast<PipelineViewportWScalingStateCreateInfoNV const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportWScalingStateCreateInfoNV(
- VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const &
- viewportWScalings_ )
- : viewportWScalingEnable( viewportWScalingEnable_ )
+ VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , viewportWScalingEnable( viewportWScalingEnable_ )
, viewportCount( static_cast<uint32_t>( viewportWScalings_.size() ) )
, pViewportWScalings( viewportWScalings_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- PipelineViewportWScalingStateCreateInfoNV &
- operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ PipelineViewportWScalingStateCreateInfoNV & operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PipelineViewportWScalingStateCreateInfoNV &
- operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportWScalingStateCreateInfoNV & operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV &
- setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT
+ setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT
{
viewportWScalingEnable = viewportWScalingEnable_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV &
- setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
{
viewportCount = viewportCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV &
- setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT
+ setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT
{
pViewportWScalings = pViewportWScalings_;
return *this;
@@ -77425,8 +80426,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
PipelineViewportWScalingStateCreateInfoNV & setViewportWScalings(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const &
- viewportWScalings_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings_ ) VULKAN_HPP_NOEXCEPT
{
viewportCount = static_cast<uint32_t>( viewportWScalings_.size() );
pViewportWScalings = viewportWScalings_.data();
@@ -77435,17 +80435,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPipelineViewportWScalingStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPipelineViewportWScalingStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPipelineViewportWScalingStateCreateInfoNV *>( this );
}
- explicit operator VkPipelineViewportWScalingStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkPipelineViewportWScalingStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPipelineViewportWScalingStateCreateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -77466,12 +80466,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( viewportWScalingEnable == rhs.viewportWScalingEnable ) && ( viewportCount == rhs.viewportCount ) &&
- ( pViewportWScalings == rhs.pViewportWScalings );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewportWScalingEnable == rhs.viewportWScalingEnable ) &&
+ ( viewportCount == rhs.viewportCount ) && ( pViewportWScalings == rhs.pViewportWScalings );
# endif
}
@@ -77482,21 +80481,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {};
uint32_t viewportCount = {};
const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV ) ==
- sizeof( VkPipelineViewportWScalingStateCreateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV>::value,
- "PipelineViewportWScalingStateCreateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePipelineViewportWScalingStateCreateInfoNV>
@@ -77513,15 +80503,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentFrameTokenGGP;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( GgpFrameToken frameToken_ = {} ) VULKAN_HPP_NOEXCEPT
- : frameToken( frameToken_ )
- {}
+ VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( GgpFrameToken frameToken_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , frameToken( frameToken_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
: PresentFrameTokenGGP( *reinterpret_cast<PresentFrameTokenGGP const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PresentFrameTokenGGP & operator=( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -77546,17 +80539,17 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPresentFrameTokenGGP const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPresentFrameTokenGGP const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentFrameTokenGGP *>( this );
}
- explicit operator VkPresentFrameTokenGGP &() VULKAN_HPP_NOEXCEPT
+ operator VkPresentFrameTokenGGP &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPresentFrameTokenGGP *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -77584,8 +80577,7 @@ namespace VULKAN_HPP_NAMESPACE
bool operator==( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ) == 0 );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ) == 0 );
}
bool operator!=( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
@@ -77598,12 +80590,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
GgpFrameToken frameToken = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP>::value,
- "PresentFrameTokenGGP is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePresentFrameTokenGGP>
@@ -77620,22 +80606,22 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentIdKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PresentIdKHR( uint32_t swapchainCount_ = {},
- const uint64_t * pPresentIds_ = {} ) VULKAN_HPP_NOEXCEPT
- : swapchainCount( swapchainCount_ )
+ VULKAN_HPP_CONSTEXPR PresentIdKHR( uint32_t swapchainCount_ = {}, const uint64_t * pPresentIds_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , swapchainCount( swapchainCount_ )
, pPresentIds( pPresentIds_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR PresentIdKHR( PresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PresentIdKHR( VkPresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : PresentIdKHR( *reinterpret_cast<PresentIdKHR const *>( &rhs ) )
- {}
+ PresentIdKHR( VkPresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentIdKHR( *reinterpret_cast<PresentIdKHR const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PresentIdKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & presentIds_ )
- : swapchainCount( static_cast<uint32_t>( presentIds_.size() ) ), pPresentIds( presentIds_.data() )
- {}
+ PresentIdKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & presentIds_, const void * pNext_ = nullptr )
+ : pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( presentIds_.size() ) ), pPresentIds( presentIds_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -77667,8 +80653,7 @@ namespace VULKAN_HPP_NAMESPACE
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PresentIdKHR & setPresentIds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & presentIds_ )
- VULKAN_HPP_NOEXCEPT
+ PresentIdKHR & setPresentIds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & presentIds_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = static_cast<uint32_t>( presentIds_.size() );
pPresentIds = presentIds_.data();
@@ -77677,24 +80662,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPresentIdKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPresentIdKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentIdKHR *>( this );
}
- explicit operator VkPresentIdKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPresentIdKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPresentIdKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- uint32_t const &,
- const uint64_t * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -77707,11 +80689,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PresentIdKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) &&
- ( pPresentIds == rhs.pPresentIds );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pPresentIds == rhs.pPresentIds );
# endif
}
@@ -77727,12 +80708,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t swapchainCount = {};
const uint64_t * pPresentIds = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentIdKHR ) == sizeof( VkPresentIdKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentIdKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentIdKHR>::value,
- "PresentIdKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePresentIdKHR>
@@ -77753,28 +80728,30 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t swapchainCount_ = {},
const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ = {},
const uint32_t * pImageIndices_ = {},
- VULKAN_HPP_NAMESPACE::Result * pResults_ = {} ) VULKAN_HPP_NOEXCEPT
- : waitSemaphoreCount( waitSemaphoreCount_ )
+ VULKAN_HPP_NAMESPACE::Result * pResults_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , waitSemaphoreCount( waitSemaphoreCount_ )
, pWaitSemaphores( pWaitSemaphores_ )
, swapchainCount( swapchainCount_ )
, pSwapchains( pSwapchains_ )
, pImageIndices( pImageIndices_ )
, pResults( pResults_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR PresentInfoKHR( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : PresentInfoKHR( *reinterpret_cast<PresentInfoKHR const *>( &rhs ) )
- {}
+ PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentInfoKHR( *reinterpret_cast<PresentInfoKHR const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PresentInfoKHR(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::Result> const & results_ = {} )
- : waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) )
+ PresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::Result> const & results_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) )
, pWaitSemaphores( waitSemaphores_.data() )
, swapchainCount( static_cast<uint32_t>( swapchains_.size() ) )
, pSwapchains( swapchains_.data() )
@@ -77788,20 +80765,15 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( swapchains_.size() != imageIndices_.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::PresentInfoKHR::PresentInfoKHR: swapchains_.size() != imageIndices_.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: swapchains_.size() != imageIndices_.size()" );
}
if ( !results_.empty() && ( swapchains_.size() != results_.size() ) )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( swapchains_.size() != results_.size() )" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( swapchains_.size() != results_.size() )" );
}
if ( !results_.empty() && ( imageIndices_.size() != results_.size() ) )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( imageIndices_.size() != results_.size() )" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( imageIndices_.size() != results_.size() )" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
@@ -77829,17 +80801,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR &
- setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
pWaitSemaphores = pWaitSemaphores_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PresentInfoKHR & setWaitSemaphores(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ )
- VULKAN_HPP_NOEXCEPT
+ PresentInfoKHR &
+ setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
pWaitSemaphores = waitSemaphores_.data();
@@ -77853,17 +80823,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR &
- setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ ) VULKAN_HPP_NOEXCEPT
{
pSwapchains = pSwapchains_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PresentInfoKHR & setSwapchains(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains_ )
- VULKAN_HPP_NOEXCEPT
+ PresentInfoKHR &
+ setSwapchains( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = static_cast<uint32_t>( swapchains_.size() );
pSwapchains = swapchains_.data();
@@ -77878,8 +80846,7 @@ namespace VULKAN_HPP_NAMESPACE
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PresentInfoKHR & setImageIndices(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ ) VULKAN_HPP_NOEXCEPT
+ PresentInfoKHR & setImageIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = static_cast<uint32_t>( imageIndices_.size() );
pImageIndices = imageIndices_.data();
@@ -77894,8 +80861,7 @@ namespace VULKAN_HPP_NAMESPACE
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PresentInfoKHR & setResults(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::Result> const & results_ ) VULKAN_HPP_NOEXCEPT
+ PresentInfoKHR & setResults( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::Result> const & results_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = static_cast<uint32_t>( results_.size() );
pResults = results_.data();
@@ -77904,17 +80870,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentInfoKHR *>( this );
}
- explicit operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPresentInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -77929,8 +80895,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, waitSemaphoreCount, pWaitSemaphores, swapchainCount, pSwapchains, pImageIndices, pResults );
+ return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphores, swapchainCount, pSwapchains, pImageIndices, pResults );
}
#endif
@@ -77939,13 +80904,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) &&
- ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( swapchainCount == rhs.swapchainCount ) &&
- ( pSwapchains == rhs.pSwapchains ) && ( pImageIndices == rhs.pImageIndices ) &&
- ( pResults == rhs.pResults );
+ ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( swapchainCount == rhs.swapchainCount ) && ( pSwapchains == rhs.pSwapchains ) &&
+ ( pImageIndices == rhs.pImageIndices ) && ( pResults == rhs.pResults );
# endif
}
@@ -77965,12 +80929,6 @@ namespace VULKAN_HPP_NAMESPACE
const uint32_t * pImageIndices = {};
VULKAN_HPP_NAMESPACE::Result * pResults = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentInfoKHR ) == sizeof( VkPresentInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentInfoKHR>::value,
- "PresentInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePresentInfoKHR>
@@ -77983,23 +80941,19 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkRectLayerKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR RectLayerKHR( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D extent_ = {},
- uint32_t layer_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR
+ RectLayerKHR( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, uint32_t layer_ = {} ) VULKAN_HPP_NOEXCEPT
: offset( offset_ )
, extent( extent_ )
, layer( layer_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR RectLayerKHR( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : RectLayerKHR( *reinterpret_cast<RectLayerKHR const *>( &rhs ) )
- {}
+ RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT : RectLayerKHR( *reinterpret_cast<RectLayerKHR const *>( &rhs ) ) {}
- explicit RectLayerKHR( Rect2D const & rect2D, uint32_t layer_ = {} )
- : offset( rect2D.offset ), extent( rect2D.extent ), layer( layer_ )
- {}
+ explicit RectLayerKHR( Rect2D const & rect2D, uint32_t layer_ = {} ) : offset( rect2D.offset ), extent( rect2D.extent ), layer( layer_ ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
RectLayerKHR & operator=( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -78011,15 +80965,13 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 RectLayerKHR &
- setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RectLayerKHR &
- setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
@@ -78032,17 +80984,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkRectLayerKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRectLayerKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRectLayerKHR *>( this );
}
- explicit operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRectLayerKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -78059,7 +81011,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( offset == rhs.offset ) && ( extent == rhs.extent ) && ( layer == rhs.layer );
@@ -78077,36 +81029,27 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Extent2D extent = {};
uint32_t layer = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RectLayerKHR ) == sizeof( VkRectLayerKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RectLayerKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RectLayerKHR>::value,
- "RectLayerKHR is not nothrow_move_constructible!" );
struct PresentRegionKHR
{
using NativeType = VkPresentRegionKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PresentRegionKHR( uint32_t rectangleCount_ = {},
- const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PresentRegionKHR( uint32_t rectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ = {} ) VULKAN_HPP_NOEXCEPT
: rectangleCount( rectangleCount_ )
, pRectangles( pRectangles_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR PresentRegionKHR( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : PresentRegionKHR( *reinterpret_cast<PresentRegionKHR const *>( &rhs ) )
- {}
+ PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentRegionKHR( *reinterpret_cast<PresentRegionKHR const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PresentRegionKHR(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RectLayerKHR> const & rectangles_ )
+ PresentRegionKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RectLayerKHR> const & rectangles_ )
: rectangleCount( static_cast<uint32_t>( rectangles_.size() ) ), pRectangles( rectangles_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -78125,17 +81068,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR &
- setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ ) VULKAN_HPP_NOEXCEPT
{
pRectangles = pRectangles_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PresentRegionKHR & setRectangles(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RectLayerKHR> const & rectangles_ )
- VULKAN_HPP_NOEXCEPT
+ PresentRegionKHR &
+ setRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RectLayerKHR> const & rectangles_ ) VULKAN_HPP_NOEXCEPT
{
rectangleCount = static_cast<uint32_t>( rectangles_.size() );
pRectangles = rectangles_.data();
@@ -78144,17 +81085,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPresentRegionKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPresentRegionKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentRegionKHR *>( this );
}
- explicit operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPresentRegionKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -78171,7 +81112,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( rectangleCount == rhs.rectangleCount ) && ( pRectangles == rhs.pRectangles );
@@ -78188,12 +81129,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t rectangleCount = {};
const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentRegionKHR ) == sizeof( VkPresentRegionKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentRegionKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentRegionKHR>::value,
- "PresentRegionKHR is not nothrow_move_constructible!" );
struct PresentRegionsKHR
{
@@ -78203,24 +81138,25 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentRegionsKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PresentRegionsKHR( uint32_t swapchainCount_ = {},
- const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT
- : swapchainCount( swapchainCount_ )
+ VULKAN_HPP_CONSTEXPR PresentRegionsKHR( uint32_t swapchainCount_ = {},
+ const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , swapchainCount( swapchainCount_ )
, pRegions( pRegions_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR PresentRegionsKHR( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : PresentRegionsKHR( *reinterpret_cast<PresentRegionsKHR const *>( &rhs ) )
- {}
+ PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentRegionsKHR( *reinterpret_cast<PresentRegionsKHR const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PresentRegionsKHR(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentRegionKHR> const & regions_ )
- : swapchainCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
- {}
+ PresentRegionsKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentRegionKHR> const & regions_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -78245,17 +81181,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR &
- setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PresentRegionsKHR & setRegions(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentRegionKHR> const & regions_ )
- VULKAN_HPP_NOEXCEPT
+ PresentRegionsKHR &
+ setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentRegionKHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = static_cast<uint32_t>( regions_.size() );
pRegions = regions_.data();
@@ -78264,24 +81198,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPresentRegionsKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPresentRegionsKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentRegionsKHR *>( this );
}
- explicit operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPresentRegionsKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- uint32_t const &,
- const VULKAN_HPP_NAMESPACE::PresentRegionKHR * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PresentRegionKHR * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -78294,11 +81225,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) &&
- ( pRegions == rhs.pRegions );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pRegions == rhs.pRegions );
# endif
}
@@ -78314,12 +81244,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t swapchainCount = {};
const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentRegionsKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentRegionsKHR>::value,
- "PresentRegionsKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePresentRegionsKHR>
@@ -78332,17 +81256,15 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkPresentTimeGOOGLE;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_ = {},
- uint64_t desiredPresentTime_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {} ) VULKAN_HPP_NOEXCEPT
: presentID( presentID_ )
, desiredPresentTime( desiredPresentTime_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
- : PresentTimeGOOGLE( *reinterpret_cast<PresentTimeGOOGLE const *>( &rhs ) )
- {}
+ PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT : PresentTimeGOOGLE( *reinterpret_cast<PresentTimeGOOGLE const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PresentTimeGOOGLE & operator=( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -78360,25 +81282,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE &
- setDesiredPresentTime( uint64_t desiredPresentTime_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & setDesiredPresentTime( uint64_t desiredPresentTime_ ) VULKAN_HPP_NOEXCEPT
{
desiredPresentTime = desiredPresentTime_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPresentTimeGOOGLE const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPresentTimeGOOGLE const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentTimeGOOGLE *>( this );
}
- explicit operator VkPresentTimeGOOGLE &() VULKAN_HPP_NOEXCEPT
+ operator VkPresentTimeGOOGLE &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPresentTimeGOOGLE *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -78395,7 +81316,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime );
@@ -78412,12 +81333,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t presentID = {};
uint64_t desiredPresentTime = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE>::value,
- "PresentTimeGOOGLE is not nothrow_move_constructible!" );
struct PresentTimesInfoGOOGLE
{
@@ -78427,24 +81342,28 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentTimesInfoGOOGLE;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = {},
- const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ = {} ) VULKAN_HPP_NOEXCEPT
- : swapchainCount( swapchainCount_ )
+ VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = {},
+ const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , swapchainCount( swapchainCount_ )
, pTimes( pTimes_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
: PresentTimesInfoGOOGLE( *reinterpret_cast<PresentTimesInfoGOOGLE const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PresentTimesInfoGOOGLE(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE> const & times_ )
- : swapchainCount( static_cast<uint32_t>( times_.size() ) ), pTimes( times_.data() )
- {}
+ PresentTimesInfoGOOGLE( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE> const & times_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( times_.size() ) ), pTimes( times_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -78469,17 +81388,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE &
- setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ ) VULKAN_HPP_NOEXCEPT
{
pTimes = pTimes_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- PresentTimesInfoGOOGLE & setTimes(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE> const & times_ )
- VULKAN_HPP_NOEXCEPT
+ PresentTimesInfoGOOGLE &
+ setTimes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE> const & times_ ) VULKAN_HPP_NOEXCEPT
{
swapchainCount = static_cast<uint32_t>( times_.size() );
pTimes = times_.data();
@@ -78488,24 +81405,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPresentTimesInfoGOOGLE const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPresentTimesInfoGOOGLE const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPresentTimesInfoGOOGLE *>( this );
}
- explicit operator VkPresentTimesInfoGOOGLE &() VULKAN_HPP_NOEXCEPT
+ operator VkPresentTimesInfoGOOGLE &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPresentTimesInfoGOOGLE *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- uint32_t const &,
- const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -78518,11 +81432,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) &&
- ( pTimes == rhs.pTimes );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pTimes == rhs.pTimes );
# endif
}
@@ -78538,13 +81451,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t swapchainCount = {};
const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE ) ==
- sizeof( VkPresentTimesInfoGOOGLE ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE>::value,
- "PresentTimesInfoGOOGLE is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePresentTimesInfoGOOGLE>
@@ -78560,17 +81466,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePrivateDataSlotCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- PrivateDataSlotCreateInfo( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- {}
+ VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- PrivateDataSlotCreateInfo( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PrivateDataSlotCreateInfo( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: PrivateDataSlotCreateInfo( *reinterpret_cast<PrivateDataSlotCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PrivateDataSlotCreateInfo & operator=( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -78588,31 +81496,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkPrivateDataSlotCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkPrivateDataSlotCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( this );
}
- explicit operator VkPrivateDataSlotCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkPrivateDataSlotCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPrivateDataSlotCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -78625,7 +81530,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
@@ -78643,13 +81548,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo ) ==
- sizeof( VkPrivateDataSlotCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo>::value,
- "PrivateDataSlotCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::ePrivateDataSlotCreateInfo>
@@ -78666,15 +81564,17 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eProtectedSubmitInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {} ) VULKAN_HPP_NOEXCEPT
- : protectedSubmit( protectedSubmit_ )
- {}
+ VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , protectedSubmit( protectedSubmit_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : ProtectedSubmitInfo( *reinterpret_cast<ProtectedSubmitInfo const *>( &rhs ) )
- {}
+ ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ProtectedSubmitInfo( *reinterpret_cast<ProtectedSubmitInfo const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ProtectedSubmitInfo & operator=( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -78692,25 +81592,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo &
- setProtectedSubmit( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & setProtectedSubmit( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ ) VULKAN_HPP_NOEXCEPT
{
protectedSubmit = protectedSubmit_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkProtectedSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkProtectedSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkProtectedSubmitInfo *>( this );
}
- explicit operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkProtectedSubmitInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -78727,7 +81626,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedSubmit == rhs.protectedSubmit );
@@ -78745,12 +81644,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo>::value,
- "ProtectedSubmitInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eProtectedSubmitInfo>
@@ -78766,22 +81659,24 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo(
- VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion,
- uint32_t queryCount_ = {},
- VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion,
+ uint32_t queryCount_ = {},
+ VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, queryType( queryType_ )
, queryCount( queryCount_ )
, pipelineStatistics( pipelineStatistics_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : QueryPoolCreateInfo( *reinterpret_cast<QueryPoolCreateInfo const *>( &rhs ) )
- {}
+ QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : QueryPoolCreateInfo( *reinterpret_cast<QueryPoolCreateInfo const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
QueryPoolCreateInfo & operator=( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -78799,15 +81694,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo &
- setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT
{
queryType = queryType_;
return *this;
@@ -78820,24 +81713,24 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo &
- setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
+ setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
{
pipelineStatistics = pipelineStatistics_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkQueryPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkQueryPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueryPoolCreateInfo *>( this );
}
- explicit operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueryPoolCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -78859,11 +81752,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( queryType == rhs.queryType ) && ( queryCount == rhs.queryCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queryType == rhs.queryType ) && ( queryCount == rhs.queryCount ) &&
( pipelineStatistics == rhs.pipelineStatistics );
# endif
}
@@ -78882,12 +81774,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t queryCount = {};
VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo>::value,
- "QueryPoolCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eQueryPoolCreateInfo>
@@ -78900,38 +81786,41 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkQueryPoolPerformanceCreateInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eQueryPoolPerformanceCreateInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_ = {},
uint32_t counterIndexCount_ = {},
- const uint32_t * pCounterIndices_ = {} ) VULKAN_HPP_NOEXCEPT
- : queueFamilyIndex( queueFamilyIndex_ )
+ const uint32_t * pCounterIndices_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , queueFamilyIndex( queueFamilyIndex_ )
, counterIndexCount( counterIndexCount_ )
, pCounterIndices( pCounterIndices_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: QueryPoolPerformanceCreateInfoKHR( *reinterpret_cast<QueryPoolPerformanceCreateInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- QueryPoolPerformanceCreateInfoKHR(
- uint32_t queueFamilyIndex_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_ )
- : queueFamilyIndex( queueFamilyIndex_ )
+ QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , queueFamilyIndex( queueFamilyIndex_ )
, counterIndexCount( static_cast<uint32_t>( counterIndices_.size() ) )
, pCounterIndices( counterIndices_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- QueryPoolPerformanceCreateInfoKHR &
- operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ QueryPoolPerformanceCreateInfoKHR & operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueryPoolPerformanceCreateInfoKHR & operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -78946,30 +81835,27 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR &
- setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndex = queueFamilyIndex_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR &
- setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
counterIndexCount = counterIndexCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR &
- setPCounterIndices( const uint32_t * pCounterIndices_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t * pCounterIndices_ ) VULKAN_HPP_NOEXCEPT
{
pCounterIndices = pCounterIndices_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- QueryPoolPerformanceCreateInfoKHR & setCounterIndices(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_ ) VULKAN_HPP_NOEXCEPT
+ QueryPoolPerformanceCreateInfoKHR &
+ setCounterIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_ ) VULKAN_HPP_NOEXCEPT
{
counterIndexCount = static_cast<uint32_t>( counterIndices_.size() );
pCounterIndices = counterIndices_.data();
@@ -78978,25 +81864,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkQueryPoolPerformanceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkQueryPoolPerformanceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( this );
}
- explicit operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueryPoolPerformanceCreateInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- uint32_t const &,
- uint32_t const &,
- const uint32_t * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const uint32_t * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -79009,7 +81891,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) &&
@@ -79030,14 +81912,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t counterIndexCount = {};
const uint32_t * pCounterIndices = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR ) ==
- sizeof( VkQueryPoolPerformanceCreateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR>::value,
- "QueryPoolPerformanceCreateInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eQueryPoolPerformanceCreateInfoKHR>
@@ -79050,69 +81924,63 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkQueryPoolPerformanceQueryCreateInfoINTEL;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL(
- VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ =
- VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual ) VULKAN_HPP_NOEXCEPT
- : performanceCountersSampling( performanceCountersSampling_ )
- {}
+ VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , performanceCountersSampling( performanceCountersSampling_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL(
- QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs )
- VULKAN_HPP_NOEXCEPT
- : QueryPoolPerformanceQueryCreateInfoINTEL(
- *reinterpret_cast<QueryPoolPerformanceQueryCreateInfoINTEL const *>( &rhs ) )
- {}
+ QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ : QueryPoolPerformanceQueryCreateInfoINTEL( *reinterpret_cast<QueryPoolPerformanceQueryCreateInfoINTEL const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- QueryPoolPerformanceQueryCreateInfoINTEL &
- operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ QueryPoolPerformanceQueryCreateInfoINTEL & operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- QueryPoolPerformanceQueryCreateInfoINTEL &
- operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ QueryPoolPerformanceQueryCreateInfoINTEL & operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & setPerformanceCountersSampling(
- VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL &
+ setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT
{
performanceCountersSampling = performanceCountersSampling_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkQueryPoolPerformanceQueryCreateInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
+ operator VkQueryPoolPerformanceQueryCreateInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueryPoolPerformanceQueryCreateInfoINTEL *>( this );
}
- explicit operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT
+ operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueryPoolPerformanceQueryCreateInfoINTEL *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -79125,11 +81993,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( performanceCountersSampling == rhs.performanceCountersSampling );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( performanceCountersSampling == rhs.performanceCountersSampling );
# endif
}
@@ -79140,20 +82007,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling =
- VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL ) ==
- sizeof( VkQueryPoolPerformanceQueryCreateInfoINTEL ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL>::value,
- "QueryPoolPerformanceQueryCreateInfoINTEL is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL>
@@ -79167,50 +82024,47 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkQueueFamilyCheckpointProperties2NV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eQueueFamilyCheckpointProperties2NV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointProperties2NV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV(
- VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask_ = {} ) VULKAN_HPP_NOEXCEPT
- : checkpointExecutionStageMask( checkpointExecutionStageMask_ )
- {}
+ VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , checkpointExecutionStageMask( checkpointExecutionStageMask_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( QueueFamilyCheckpointProperties2NV const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueueFamilyCheckpointProperties2NV( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT
: QueueFamilyCheckpointProperties2NV( *reinterpret_cast<QueueFamilyCheckpointProperties2NV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- QueueFamilyCheckpointProperties2NV &
- operator=( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ QueueFamilyCheckpointProperties2NV & operator=( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- QueueFamilyCheckpointProperties2NV &
- operator=( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT
+ QueueFamilyCheckpointProperties2NV & operator=( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV const *>( &rhs );
return *this;
}
- explicit operator VkQueueFamilyCheckpointProperties2NV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkQueueFamilyCheckpointProperties2NV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueueFamilyCheckpointProperties2NV *>( this );
}
- explicit operator VkQueueFamilyCheckpointProperties2NV &() VULKAN_HPP_NOEXCEPT
+ operator VkQueueFamilyCheckpointProperties2NV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueueFamilyCheckpointProperties2NV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -79223,11 +82077,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
# endif
}
@@ -79238,18 +82091,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointProperties2NV;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointProperties2NV;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV ) ==
- sizeof( VkQueueFamilyCheckpointProperties2NV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV>::value,
- "QueueFamilyCheckpointProperties2NV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eQueueFamilyCheckpointProperties2NV>
@@ -79262,25 +82107,25 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkQueueFamilyCheckpointPropertiesNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eQueueFamilyCheckpointPropertiesNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointPropertiesNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV(
- VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {} ) VULKAN_HPP_NOEXCEPT
- : checkpointExecutionStageMask( checkpointExecutionStageMask_ )
- {}
+ VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , checkpointExecutionStageMask( checkpointExecutionStageMask_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- QueueFamilyCheckpointPropertiesNV( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
: QueueFamilyCheckpointPropertiesNV( *reinterpret_cast<QueueFamilyCheckpointPropertiesNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- QueueFamilyCheckpointPropertiesNV &
- operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ QueueFamilyCheckpointPropertiesNV & operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueueFamilyCheckpointPropertiesNV & operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -79288,23 +82133,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkQueueFamilyCheckpointPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkQueueFamilyCheckpointPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueueFamilyCheckpointPropertiesNV *>( this );
}
- explicit operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT
+ operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueueFamilyCheckpointPropertiesNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::PipelineStageFlags const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -79317,11 +82160,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
# endif
}
@@ -79332,18 +82174,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV ) ==
- sizeof( VkQueueFamilyCheckpointPropertiesNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV>::value,
- "QueueFamilyCheckpointPropertiesNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eQueueFamilyCheckpointPropertiesNV>
@@ -79356,47 +82190,46 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkQueueFamilyGlobalPriorityPropertiesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eQueueFamilyGlobalPriorityPropertiesKHR;
-
-#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR(
- uint32_t priorityCount_ = {},
- std::array<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR, VK_MAX_GLOBAL_PRIORITY_SIZE_KHR> const &
- priorities_ = { { VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
- VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
- VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
- VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
- VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
- VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
- VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
- VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
- VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
- VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
- VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
- VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
- VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
- VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
- VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
- VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow } } ) VULKAN_HPP_NOEXCEPT
- : priorityCount( priorityCount_ )
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyGlobalPriorityPropertiesKHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14
+ QueueFamilyGlobalPriorityPropertiesKHR( uint32_t priorityCount_ = {},
+ std::array<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR, VK_MAX_GLOBAL_PRIORITY_SIZE_KHR> const &
+ priorities_ = { { VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow,
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow } },
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , priorityCount( priorityCount_ )
, priorities( priorities_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR( QueueFamilyGlobalPriorityPropertiesKHR const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueueFamilyGlobalPriorityPropertiesKHR( VkQueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : QueueFamilyGlobalPriorityPropertiesKHR(
- *reinterpret_cast<QueueFamilyGlobalPriorityPropertiesKHR const *>( &rhs ) )
- {}
+ : QueueFamilyGlobalPriorityPropertiesKHR( *reinterpret_cast<QueueFamilyGlobalPriorityPropertiesKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- QueueFamilyGlobalPriorityPropertiesKHR &
- operator=( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ QueueFamilyGlobalPriorityPropertiesKHR & operator=( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- QueueFamilyGlobalPriorityPropertiesKHR &
- operator=( VkQueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ QueueFamilyGlobalPriorityPropertiesKHR & operator=( VkQueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR const *>( &rhs );
return *this;
@@ -79409,41 +82242,38 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR &
- setPriorityCount( uint32_t priorityCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & setPriorityCount( uint32_t priorityCount_ ) VULKAN_HPP_NOEXCEPT
{
priorityCount = priorityCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & setPriorities(
- std::array<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR, VK_MAX_GLOBAL_PRIORITY_SIZE_KHR> priorities_ )
- VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR &
+ setPriorities( std::array<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR, VK_MAX_GLOBAL_PRIORITY_SIZE_KHR> priorities_ ) VULKAN_HPP_NOEXCEPT
{
priorities = priorities_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkQueueFamilyGlobalPriorityPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkQueueFamilyGlobalPriorityPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueueFamilyGlobalPriorityPropertiesKHR *>( this );
}
- explicit operator VkQueueFamilyGlobalPriorityPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkQueueFamilyGlobalPriorityPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueueFamilyGlobalPriorityPropertiesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
void * const &,
uint32_t const &,
- VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR,
- VK_MAX_GLOBAL_PRIORITY_SIZE_KHR> const &>
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR, VK_MAX_GLOBAL_PRIORITY_SIZE_KHR> const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -79456,11 +82286,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priorityCount == rhs.priorityCount ) &&
- ( priorities == rhs.priorities );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priorityCount == rhs.priorityCount ) && ( priorities == rhs.priorities );
# endif
}
@@ -79474,18 +82303,8 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyGlobalPriorityPropertiesKHR;
void * pNext = {};
uint32_t priorityCount = {};
- VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR, VK_MAX_GLOBAL_PRIORITY_SIZE_KHR>
- priorities = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR, VK_MAX_GLOBAL_PRIORITY_SIZE_KHR> priorities = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR ) ==
- sizeof( VkQueueFamilyGlobalPriorityPropertiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR>::value,
- "QueueFamilyGlobalPriorityPropertiesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eQueueFamilyGlobalPriorityPropertiesKHR>
@@ -79499,22 +82318,23 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkQueueFamilyProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- QueueFamilyProperties( VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {},
- uint32_t queueCount_ = {},
- uint32_t timestampValidBits_ = {},
- VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR QueueFamilyProperties( VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {},
+ uint32_t queueCount_ = {},
+ uint32_t timestampValidBits_ = {},
+ VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {} ) VULKAN_HPP_NOEXCEPT
: queueFlags( queueFlags_ )
, queueCount( queueCount_ )
, timestampValidBits( timestampValidBits_ )
, minImageTransferGranularity( minImageTransferGranularity_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR QueueFamilyProperties( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: QueueFamilyProperties( *reinterpret_cast<QueueFamilyProperties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
QueueFamilyProperties & operator=( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -79525,24 +82345,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkQueueFamilyProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkQueueFamilyProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueueFamilyProperties *>( this );
}
- explicit operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueueFamilyProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::QueueFlags const &,
- uint32_t const &,
- uint32_t const &,
- VULKAN_HPP_NAMESPACE::Extent3D const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::QueueFlags const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -79555,11 +82372,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( queueFlags == rhs.queueFlags ) && ( queueCount == rhs.queueCount ) &&
- ( timestampValidBits == rhs.timestampValidBits ) &&
+ return ( queueFlags == rhs.queueFlags ) && ( queueCount == rhs.queueCount ) && ( timestampValidBits == rhs.timestampValidBits ) &&
( minImageTransferGranularity == rhs.minImageTransferGranularity );
# endif
}
@@ -79576,12 +82392,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t timestampValidBits = {};
VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyProperties>::value,
- "QueueFamilyProperties is not nothrow_move_constructible!" );
struct QueueFamilyProperties2
{
@@ -79591,16 +82401,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyProperties2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR QueueFamilyProperties2(
- VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {} ) VULKAN_HPP_NOEXCEPT
- : queueFamilyProperties( queueFamilyProperties_ )
- {}
+ VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , queueFamilyProperties( queueFamilyProperties_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
: QueueFamilyProperties2( *reinterpret_cast<QueueFamilyProperties2 const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
QueueFamilyProperties2 & operator=( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -79611,23 +82424,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkQueueFamilyProperties2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkQueueFamilyProperties2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkQueueFamilyProperties2 *>( this );
}
- explicit operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT
+ operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkQueueFamilyProperties2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::QueueFamilyProperties const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::QueueFamilyProperties const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -79640,7 +82451,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyProperties == rhs.queueFamilyProperties );
@@ -79658,13 +82469,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 ) ==
- sizeof( VkQueueFamilyProperties2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value,
- "QueueFamilyProperties2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eQueueFamilyProperties2>
@@ -79674,114 +82478,172 @@ namespace VULKAN_HPP_NAMESPACE
using QueueFamilyProperties2KHR = QueueFamilyProperties2;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct QueueFamilyQueryResultStatusProperties2KHR
+ struct QueueFamilyQueryResultStatusPropertiesKHR
{
- using NativeType = VkQueueFamilyQueryResultStatusProperties2KHR;
+ using NativeType = VkQueueFamilyQueryResultStatusPropertiesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eQueueFamilyQueryResultStatusProperties2KHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyQueryResultStatusPropertiesKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- QueueFamilyQueryResultStatusProperties2KHR( VULKAN_HPP_NAMESPACE::Bool32 supported_ = {} ) VULKAN_HPP_NOEXCEPT
- : supported( supported_ )
- {}
+ VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 queryResultStatusSupport_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , queryResultStatusSupport( queryResultStatusSupport_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusProperties2KHR(
- QueueFamilyQueryResultStatusProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusPropertiesKHR( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- QueueFamilyQueryResultStatusProperties2KHR( VkQueueFamilyQueryResultStatusProperties2KHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : QueueFamilyQueryResultStatusProperties2KHR(
- *reinterpret_cast<QueueFamilyQueryResultStatusProperties2KHR const *>( &rhs ) )
- {}
+ QueueFamilyQueryResultStatusPropertiesKHR( VkQueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : QueueFamilyQueryResultStatusPropertiesKHR( *reinterpret_cast<QueueFamilyQueryResultStatusPropertiesKHR const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- QueueFamilyQueryResultStatusProperties2KHR &
- operator=( QueueFamilyQueryResultStatusProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ QueueFamilyQueryResultStatusPropertiesKHR & operator=( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- QueueFamilyQueryResultStatusProperties2KHR &
- operator=( VkQueueFamilyQueryResultStatusProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ QueueFamilyQueryResultStatusPropertiesKHR & operator=( VkQueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusProperties2KHR const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR const *>( &rhs );
return *this;
}
-# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 QueueFamilyQueryResultStatusProperties2KHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ operator VkQueueFamilyQueryResultStatusPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
- return *this;
+ return *reinterpret_cast<const VkQueueFamilyQueryResultStatusPropertiesKHR *>( this );
+ }
+
+ operator VkQueueFamilyQueryResultStatusPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkQueueFamilyQueryResultStatusPropertiesKHR *>( this );
}
- VULKAN_HPP_CONSTEXPR_14 QueueFamilyQueryResultStatusProperties2KHR &
- setSupported( VULKAN_HPP_NAMESPACE::Bool32 supported_ ) VULKAN_HPP_NOEXCEPT
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
{
- supported = supported_;
+ return std::tie( sType, pNext, queryResultStatusSupport );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( QueueFamilyQueryResultStatusPropertiesKHR const & ) const = default;
+# else
+ bool operator==( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queryResultStatusSupport == rhs.queryResultStatusSupport );
+# endif
+ }
+
+ bool operator!=( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyQueryResultStatusPropertiesKHR;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 queryResultStatusSupport = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eQueueFamilyQueryResultStatusPropertiesKHR>
+ {
+ using Type = QueueFamilyQueryResultStatusPropertiesKHR;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ struct QueueFamilyVideoPropertiesKHR
+ {
+ using NativeType = VkQueueFamilyVideoPropertiesKHR;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyVideoPropertiesKHR;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR QueueFamilyVideoPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , videoCodecOperations( videoCodecOperations_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR QueueFamilyVideoPropertiesKHR( QueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ QueueFamilyVideoPropertiesKHR( VkQueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : QueueFamilyVideoPropertiesKHR( *reinterpret_cast<QueueFamilyVideoPropertiesKHR const *>( &rhs ) )
+ {
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ QueueFamilyVideoPropertiesKHR & operator=( QueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ QueueFamilyVideoPropertiesKHR & operator=( VkQueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR const *>( &rhs );
return *this;
}
-# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkQueueFamilyQueryResultStatusProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkQueueFamilyVideoPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkQueueFamilyQueryResultStatusProperties2KHR *>( this );
+ return *reinterpret_cast<const VkQueueFamilyVideoPropertiesKHR *>( this );
}
- explicit operator VkQueueFamilyQueryResultStatusProperties2KHR &() VULKAN_HPP_NOEXCEPT
+ operator VkQueueFamilyVideoPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkQueueFamilyQueryResultStatusProperties2KHR *>( this );
+ return *reinterpret_cast<VkQueueFamilyVideoPropertiesKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, supported );
+ return std::tie( sType, pNext, videoCodecOperations );
}
# endif
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( QueueFamilyQueryResultStatusProperties2KHR const & ) const = default;
+ auto operator<=>( QueueFamilyVideoPropertiesKHR const & ) const = default;
# else
- bool operator==( QueueFamilyQueryResultStatusProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( QueueFamilyVideoPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supported == rhs.supported );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperations == rhs.videoCodecOperations );
# endif
}
- bool operator!=( QueueFamilyQueryResultStatusProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( QueueFamilyVideoPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyQueryResultStatusProperties2KHR;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Bool32 supported = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyVideoPropertiesKHR;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusProperties2KHR ) ==
- sizeof( VkQueueFamilyQueryResultStatusProperties2KHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusProperties2KHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusProperties2KHR>::value,
- "QueueFamilyQueryResultStatusProperties2KHR is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::eQueueFamilyQueryResultStatusProperties2KHR>
+ struct CppType<StructureType, StructureType::eQueueFamilyVideoPropertiesKHR>
{
- using Type = QueueFamilyQueryResultStatusProperties2KHR;
+ using Type = QueueFamilyVideoPropertiesKHR;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -79790,39 +82652,38 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkRayTracingShaderGroupCreateInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eRayTracingShaderGroupCreateInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- RayTracingShaderGroupCreateInfoKHR( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ =
- VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral,
- uint32_t generalShader_ = {},
- uint32_t closestHitShader_ = {},
- uint32_t anyHitShader_ = {},
- uint32_t intersectionShader_ = {},
- const void * pShaderGroupCaptureReplayHandle_ = {} ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
+ VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR(
+ VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral,
+ uint32_t generalShader_ = {},
+ uint32_t closestHitShader_ = {},
+ uint32_t anyHitShader_ = {},
+ uint32_t intersectionShader_ = {},
+ const void * pShaderGroupCaptureReplayHandle_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , type( type_ )
, generalShader( generalShader_ )
, closestHitShader( closestHitShader_ )
, anyHitShader( anyHitShader_ )
, intersectionShader( intersectionShader_ )
, pShaderGroupCaptureReplayHandle( pShaderGroupCaptureReplayHandle_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( RayTracingShaderGroupCreateInfoKHR const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RayTracingShaderGroupCreateInfoKHR( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: RayTracingShaderGroupCreateInfoKHR( *reinterpret_cast<RayTracingShaderGroupCreateInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- RayTracingShaderGroupCreateInfoKHR &
- operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ RayTracingShaderGroupCreateInfoKHR & operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- RayTracingShaderGroupCreateInfoKHR &
- operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ RayTracingShaderGroupCreateInfoKHR & operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR const *>( &rhs );
return *this;
@@ -79835,60 +82696,55 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR &
- setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR &
- setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
{
generalShader = generalShader_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR &
- setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
{
closestHitShader = closestHitShader_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR &
- setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
{
anyHitShader = anyHitShader_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR &
- setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
{
intersectionShader = intersectionShader_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR &
- setPShaderGroupCaptureReplayHandle( const void * pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT
+ setPShaderGroupCaptureReplayHandle( const void * pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT
{
pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkRayTracingShaderGroupCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRayTracingShaderGroupCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoKHR *>( this );
}
- explicit operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -79903,14 +82759,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- type,
- generalShader,
- closestHitShader,
- anyHitShader,
- intersectionShader,
- pShaderGroupCaptureReplayHandle );
+ return std::tie( sType, pNext, type, generalShader, closestHitShader, anyHitShader, intersectionShader, pShaderGroupCaptureReplayHandle );
}
#endif
@@ -79919,12 +82768,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) &&
- ( generalShader == rhs.generalShader ) && ( closestHitShader == rhs.closestHitShader ) &&
- ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( generalShader == rhs.generalShader ) &&
+ ( closestHitShader == rhs.closestHitShader ) && ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader ) &&
( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle );
# endif
}
@@ -79936,24 +82784,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type =
- VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral;
- uint32_t generalShader = {};
- uint32_t closestHitShader = {};
- uint32_t anyHitShader = {};
- uint32_t intersectionShader = {};
- const void * pShaderGroupCaptureReplayHandle = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral;
+ uint32_t generalShader = {};
+ uint32_t closestHitShader = {};
+ uint32_t anyHitShader = {};
+ uint32_t intersectionShader = {};
+ const void * pShaderGroupCaptureReplayHandle = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR ) ==
- sizeof( VkRayTracingShaderGroupCreateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR>::value,
- "RayTracingShaderGroupCreateInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eRayTracingShaderGroupCreateInfoKHR>
@@ -79966,71 +82805,66 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkRayTracingPipelineInterfaceCreateInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eRayTracingPipelineInterfaceCreateInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- RayTracingPipelineInterfaceCreateInfoKHR( uint32_t maxPipelineRayPayloadSize_ = {},
- uint32_t maxPipelineRayHitAttributeSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxPipelineRayPayloadSize( maxPipelineRayPayloadSize_ )
+ VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( uint32_t maxPipelineRayPayloadSize_ = {},
+ uint32_t maxPipelineRayHitAttributeSize_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxPipelineRayPayloadSize( maxPipelineRayPayloadSize_ )
, maxPipelineRayHitAttributeSize( maxPipelineRayHitAttributeSize_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR(
- RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : RayTracingPipelineInterfaceCreateInfoKHR(
- *reinterpret_cast<RayTracingPipelineInterfaceCreateInfoKHR const *>( &rhs ) )
- {}
+ RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : RayTracingPipelineInterfaceCreateInfoKHR( *reinterpret_cast<RayTracingPipelineInterfaceCreateInfoKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- RayTracingPipelineInterfaceCreateInfoKHR &
- operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ RayTracingPipelineInterfaceCreateInfoKHR & operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- RayTracingPipelineInterfaceCreateInfoKHR &
- operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineInterfaceCreateInfoKHR & operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR &
- setMaxPipelineRayPayloadSize( uint32_t maxPipelineRayPayloadSize_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setMaxPipelineRayPayloadSize( uint32_t maxPipelineRayPayloadSize_ ) VULKAN_HPP_NOEXCEPT
{
maxPipelineRayPayloadSize = maxPipelineRayPayloadSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR &
- setMaxPipelineRayHitAttributeSize( uint32_t maxPipelineRayHitAttributeSize_ ) VULKAN_HPP_NOEXCEPT
+ setMaxPipelineRayHitAttributeSize( uint32_t maxPipelineRayHitAttributeSize_ ) VULKAN_HPP_NOEXCEPT
{
maxPipelineRayHitAttributeSize = maxPipelineRayHitAttributeSize_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkRayTracingPipelineInterfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRayTracingPipelineInterfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRayTracingPipelineInterfaceCreateInfoKHR *>( this );
}
- explicit operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRayTracingPipelineInterfaceCreateInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -80047,11 +82881,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( maxPipelineRayPayloadSize == rhs.maxPipelineRayPayloadSize ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPipelineRayPayloadSize == rhs.maxPipelineRayPayloadSize ) &&
( maxPipelineRayHitAttributeSize == rhs.maxPipelineRayHitAttributeSize );
# endif
}
@@ -80063,20 +82896,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR;
+ const void * pNext = {};
uint32_t maxPipelineRayPayloadSize = {};
uint32_t maxPipelineRayHitAttributeSize = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR ) ==
- sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR>::value,
- "RayTracingPipelineInterfaceCreateInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eRayTracingPipelineInterfaceCreateInfoKHR>
@@ -80092,20 +82916,21 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR(
- VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {},
- uint32_t stageCount_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {},
- uint32_t groupCount_ = {},
- const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ = {},
- uint32_t maxPipelineRayRecursionDepth_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {},
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {},
- VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
- VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
- int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {},
+ uint32_t stageCount_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {},
+ uint32_t groupCount_ = {},
+ const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ = {},
+ uint32_t maxPipelineRayRecursionDepth_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {},
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
+ VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
+ int32_t basePipelineIndex_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, stageCount( stageCount_ )
, pStages( pStages_ )
, groupCount( groupCount_ )
@@ -80117,30 +82942,31 @@ namespace VULKAN_HPP_NAMESPACE
, layout( layout_ )
, basePipelineHandle( basePipelineHandle_ )
, basePipelineIndex( basePipelineIndex_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- RayTracingPipelineCreateInfoKHR( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RayTracingPipelineCreateInfoKHR( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: RayTracingPipelineCreateInfoKHR( *reinterpret_cast<RayTracingPipelineCreateInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RayTracingPipelineCreateInfoKHR(
- VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const &
- stages_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ = {},
- uint32_t maxPipelineRayRecursionDepth_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {},
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {},
- VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
- VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
- int32_t basePipelineIndex_ = {} )
- : flags( flags_ )
+ VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ = {},
+ uint32_t maxPipelineRayRecursionDepth_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {},
+ const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
+ VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
+ int32_t basePipelineIndex_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, stageCount( static_cast<uint32_t>( stages_.size() ) )
, pStages( stages_.data() )
, groupCount( static_cast<uint32_t>( groups_.size() ) )
@@ -80152,12 +82978,12 @@ namespace VULKAN_HPP_NAMESPACE
, layout( layout_ )
, basePipelineHandle( basePipelineHandle_ )
, basePipelineIndex( basePipelineIndex_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- RayTracingPipelineCreateInfoKHR &
- operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ RayTracingPipelineCreateInfoKHR & operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RayTracingPipelineCreateInfoKHR & operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -80172,8 +82998,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -80186,16 +83011,15 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR &
- setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
+ setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
{
pStages = pStages_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- RayTracingPipelineCreateInfoKHR & setStages(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const &
- stages_ ) VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineCreateInfoKHR &
+ setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
{
stageCount = static_cast<uint32_t>( stages_.size() );
pStages = stages_.data();
@@ -80210,16 +83034,15 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR &
- setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ ) VULKAN_HPP_NOEXCEPT
+ setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ ) VULKAN_HPP_NOEXCEPT
{
pGroups = pGroups_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- RayTracingPipelineCreateInfoKHR &
- setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ ) VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineCreateInfoKHR & setGroups(
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ ) VULKAN_HPP_NOEXCEPT
{
groupCount = static_cast<uint32_t>( groups_.size() );
pGroups = groups_.data();
@@ -80227,67 +83050,63 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR &
- setMaxPipelineRayRecursionDepth( uint32_t maxPipelineRayRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setMaxPipelineRayRecursionDepth( uint32_t maxPipelineRayRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
{
maxPipelineRayRecursionDepth = maxPipelineRayRecursionDepth_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR &
- setPLibraryInfo( const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT
+ setPLibraryInfo( const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT
{
pLibraryInfo = pLibraryInfo_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPLibraryInterface(
- const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR &
+ setPLibraryInterface( const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT
{
pLibraryInterface = pLibraryInterface_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPDynamicState(
- const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR &
+ setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT
{
pDynamicState = pDynamicState_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR &
- setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR &
- setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineHandle = basePipelineHandle_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR &
- setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineIndex = basePipelineIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkRayTracingPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRayTracingPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( this );
}
- explicit operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRayTracingPipelineCreateInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -80330,15 +83149,13 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && ( groupCount == rhs.groupCount ) &&
- ( pGroups == rhs.pGroups ) && ( maxPipelineRayRecursionDepth == rhs.maxPipelineRayRecursionDepth ) &&
- ( pLibraryInfo == rhs.pLibraryInfo ) && ( pLibraryInterface == rhs.pLibraryInterface ) &&
- ( pDynamicState == rhs.pDynamicState ) && ( layout == rhs.layout ) &&
- ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) &&
+ ( groupCount == rhs.groupCount ) && ( pGroups == rhs.pGroups ) && ( maxPipelineRayRecursionDepth == rhs.maxPipelineRayRecursionDepth ) &&
+ ( pLibraryInfo == rhs.pLibraryInfo ) && ( pLibraryInterface == rhs.pLibraryInterface ) && ( pDynamicState == rhs.pDynamicState ) &&
+ ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex );
# endif
}
@@ -80349,12 +83166,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
- uint32_t stageCount = {};
- const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
- uint32_t groupCount = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
+ uint32_t stageCount = {};
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
+ uint32_t groupCount = {};
const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups = {};
uint32_t maxPipelineRayRecursionDepth = {};
const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo = {};
@@ -80364,14 +83181,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
int32_t basePipelineIndex = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR ) ==
- sizeof( VkRayTracingPipelineCreateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR>::value,
- "RayTracingPipelineCreateInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eRayTracingPipelineCreateInfoKHR>
@@ -80384,34 +83193,34 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkRayTracingShaderGroupCreateInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eRayTracingShaderGroupCreateInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- RayTracingShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ =
- VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral,
- uint32_t generalShader_ = {},
- uint32_t closestHitShader_ = {},
- uint32_t anyHitShader_ = {},
- uint32_t intersectionShader_ = {} ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
+ VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV(
+ VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral,
+ uint32_t generalShader_ = {},
+ uint32_t closestHitShader_ = {},
+ uint32_t anyHitShader_ = {},
+ uint32_t intersectionShader_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , type( type_ )
, generalShader( generalShader_ )
, closestHitShader( closestHitShader_ )
, anyHitShader( anyHitShader_ )
, intersectionShader( intersectionShader_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: RayTracingShaderGroupCreateInfoNV( *reinterpret_cast<RayTracingShaderGroupCreateInfoNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- RayTracingShaderGroupCreateInfoNV &
- operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ RayTracingShaderGroupCreateInfoNV & operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RayTracingShaderGroupCreateInfoNV & operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -80426,53 +83235,48 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV &
- setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV &
- setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
{
generalShader = generalShader_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV &
- setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
{
closestHitShader = closestHitShader_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV &
- setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
{
anyHitShader = anyHitShader_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV &
- setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
{
intersectionShader = intersectionShader_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkRayTracingShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRayTracingShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoNV *>( this );
}
- explicit operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -80495,12 +83299,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) &&
- ( generalShader == rhs.generalShader ) && ( closestHitShader == rhs.closestHitShader ) &&
- ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( generalShader == rhs.generalShader ) &&
+ ( closestHitShader == rhs.closestHitShader ) && ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader );
# endif
}
@@ -80511,23 +83314,14 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type =
- VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral;
- uint32_t generalShader = {};
- uint32_t closestHitShader = {};
- uint32_t anyHitShader = {};
- uint32_t intersectionShader = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral;
+ uint32_t generalShader = {};
+ uint32_t closestHitShader = {};
+ uint32_t anyHitShader = {};
+ uint32_t intersectionShader = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV ) ==
- sizeof( VkRayTracingShaderGroupCreateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV>::value,
- "RayTracingShaderGroupCreateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eRayTracingShaderGroupCreateInfoNV>
@@ -80543,17 +83337,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {},
- uint32_t stageCount_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {},
- uint32_t groupCount_ = {},
- const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ = {},
- uint32_t maxRecursionDepth_ = {},
- VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
- VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
- int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {},
+ uint32_t stageCount_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {},
+ uint32_t groupCount_ = {},
+ const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ = {},
+ uint32_t maxRecursionDepth_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
+ VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
+ int32_t basePipelineIndex_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, stageCount( stageCount_ )
, pStages( pStages_ )
, groupCount( groupCount_ )
@@ -80562,27 +83357,28 @@ namespace VULKAN_HPP_NAMESPACE
, layout( layout_ )
, basePipelineHandle( basePipelineHandle_ )
, basePipelineIndex( basePipelineIndex_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- RayTracingPipelineCreateInfoNV( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: RayTracingPipelineCreateInfoNV( *reinterpret_cast<RayTracingPipelineCreateInfoNV const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RayTracingPipelineCreateInfoNV(
- VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const &
- stages_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ = {},
- uint32_t maxRecursionDepth_ = {},
- VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
- VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
- int32_t basePipelineIndex_ = {} )
- : flags( flags_ )
+ VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ = {},
+ uint32_t maxRecursionDepth_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
+ VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
+ int32_t basePipelineIndex_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, stageCount( static_cast<uint32_t>( stages_.size() ) )
, pStages( stages_.data() )
, groupCount( static_cast<uint32_t>( groups_.size() ) )
@@ -80591,12 +83387,12 @@ namespace VULKAN_HPP_NAMESPACE
, layout( layout_ )
, basePipelineHandle( basePipelineHandle_ )
, basePipelineIndex( basePipelineIndex_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- RayTracingPipelineCreateInfoNV &
- operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ RayTracingPipelineCreateInfoNV & operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RayTracingPipelineCreateInfoNV & operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -80611,8 +83407,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV &
- setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -80625,16 +83420,15 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV &
- setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
+ setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
{
pStages = pStages_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- RayTracingPipelineCreateInfoNV & setStages(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const &
- stages_ ) VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineCreateInfoNV &
+ setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
{
stageCount = static_cast<uint32_t>( stages_.size() );
pStages = stages_.data();
@@ -80649,16 +83443,15 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV &
- setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT
+ setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT
{
pGroups = pGroups_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- RayTracingPipelineCreateInfoNV &
- setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineCreateInfoNV & setGroups(
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT
{
groupCount = static_cast<uint32_t>( groups_.size() );
pGroups = groups_.data();
@@ -80666,46 +83459,42 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV &
- setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
{
maxRecursionDepth = maxRecursionDepth_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV &
- setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV &
- setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineHandle = basePipelineHandle_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV &
- setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineIndex = basePipelineIndex_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkRayTracingPipelineCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRayTracingPipelineCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( this );
}
- explicit operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRayTracingPipelineCreateInfoNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -80723,17 +83512,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- flags,
- stageCount,
- pStages,
- groupCount,
- pGroups,
- maxRecursionDepth,
- layout,
- basePipelineHandle,
- basePipelineIndex );
+ return std::tie( sType, pNext, flags, stageCount, pStages, groupCount, pGroups, maxRecursionDepth, layout, basePipelineHandle, basePipelineIndex );
}
#endif
@@ -80742,12 +83521,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && ( groupCount == rhs.groupCount ) &&
- ( pGroups == rhs.pGroups ) && ( maxRecursionDepth == rhs.maxRecursionDepth ) && ( layout == rhs.layout ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) &&
+ ( groupCount == rhs.groupCount ) && ( pGroups == rhs.pGroups ) && ( maxRecursionDepth == rhs.maxRecursionDepth ) && ( layout == rhs.layout ) &&
( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex );
# endif
}
@@ -80759,26 +83537,18 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
- uint32_t stageCount = {};
- const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
- uint32_t groupCount = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
+ uint32_t stageCount = {};
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
+ uint32_t groupCount = {};
const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups = {};
uint32_t maxRecursionDepth = {};
VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
int32_t basePipelineIndex = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV ) ==
- sizeof( VkRayTracingPipelineCreateInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV>::value,
- "RayTracingPipelineCreateInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eRayTracingPipelineCreateInfoNV>
@@ -80791,16 +83561,14 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkRefreshCycleDurationGOOGLE;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = {} ) VULKAN_HPP_NOEXCEPT
- : refreshDuration( refreshDuration_ )
- {}
+ VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = {} ) VULKAN_HPP_NOEXCEPT : refreshDuration( refreshDuration_ ) {}
- VULKAN_HPP_CONSTEXPR
- RefreshCycleDurationGOOGLE( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
: RefreshCycleDurationGOOGLE( *reinterpret_cast<RefreshCycleDurationGOOGLE const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
RefreshCycleDurationGOOGLE & operator=( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -80811,17 +83579,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkRefreshCycleDurationGOOGLE const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRefreshCycleDurationGOOGLE const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRefreshCycleDurationGOOGLE *>( this );
}
- explicit operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT
+ operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -80838,7 +83606,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( refreshDuration == rhs.refreshDuration );
@@ -80854,13 +83622,6 @@ namespace VULKAN_HPP_NAMESPACE
public:
uint64_t refreshDuration = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE ) ==
- sizeof( VkRefreshCycleDurationGOOGLE ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::value,
- "RefreshCycleDurationGOOGLE is not nothrow_move_constructible!" );
struct RenderPassAttachmentBeginInfo
{
@@ -80870,30 +83631,32 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassAttachmentBeginInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- RenderPassAttachmentBeginInfo( uint32_t attachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {} ) VULKAN_HPP_NOEXCEPT
- : attachmentCount( attachmentCount_ )
+ VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( uint32_t attachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , attachmentCount( attachmentCount_ )
, pAttachments( pAttachments_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- RenderPassAttachmentBeginInfo( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassAttachmentBeginInfo( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassAttachmentBeginInfo( *reinterpret_cast<RenderPassAttachmentBeginInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- RenderPassAttachmentBeginInfo(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ )
- : attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() )
- {}
+ RenderPassAttachmentBeginInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- RenderPassAttachmentBeginInfo &
- operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ RenderPassAttachmentBeginInfo & operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassAttachmentBeginInfo & operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -80908,24 +83671,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo &
- setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = attachmentCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo &
- setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pAttachments = pAttachments_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- RenderPassAttachmentBeginInfo & setAttachments(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ )
- VULKAN_HPP_NOEXCEPT
+ RenderPassAttachmentBeginInfo &
+ setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = static_cast<uint32_t>( attachments_.size() );
pAttachments = attachments_.data();
@@ -80934,24 +83694,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkRenderPassAttachmentBeginInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassAttachmentBeginInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassAttachmentBeginInfo *>( this );
}
- explicit operator VkRenderPassAttachmentBeginInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassAttachmentBeginInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassAttachmentBeginInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- uint32_t const &,
- const VULKAN_HPP_NAMESPACE::ImageView * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageView * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -80964,11 +83721,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) &&
- ( pAttachments == rhs.pAttachments );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments );
# endif
}
@@ -80984,14 +83740,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t attachmentCount = {};
const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo ) ==
- sizeof( VkRenderPassAttachmentBeginInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo>::value,
- "RenderPassAttachmentBeginInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eRenderPassAttachmentBeginInfo>
@@ -81008,37 +83756,41 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14
- RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
- VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {},
- VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {},
- uint32_t clearValueCount_ = {},
- const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ = {} ) VULKAN_HPP_NOEXCEPT
- : renderPass( renderPass_ )
+ VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
+ VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {},
+ VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {},
+ uint32_t clearValueCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , renderPass( renderPass_ )
, framebuffer( framebuffer_ )
, renderArea( renderArea_ )
, clearValueCount( clearValueCount_ )
, pClearValues( pClearValues_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : RenderPassBeginInfo( *reinterpret_cast<RenderPassBeginInfo const *>( &rhs ) )
- {}
+ RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT : RenderPassBeginInfo( *reinterpret_cast<RenderPassBeginInfo const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- RenderPassBeginInfo(
- VULKAN_HPP_NAMESPACE::RenderPass renderPass_,
- VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_,
- VULKAN_HPP_NAMESPACE::Rect2D renderArea_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ )
- : renderPass( renderPass_ )
+ RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_,
+ VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_,
+ VULKAN_HPP_NAMESPACE::Rect2D renderArea_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , renderPass( renderPass_ )
, framebuffer( framebuffer_ )
, renderArea( renderArea_ )
, clearValueCount( static_cast<uint32_t>( clearValues_.size() ) )
, pClearValues( clearValues_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -81057,22 +83809,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo &
- setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
{
renderPass = renderPass_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo &
- setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
{
framebuffer = framebuffer_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo &
- setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
{
renderArea = renderArea_;
return *this;
@@ -81084,17 +83833,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo &
- setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ ) VULKAN_HPP_NOEXCEPT
{
pClearValues = pClearValues_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- RenderPassBeginInfo & setClearValues(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ )
- VULKAN_HPP_NOEXCEPT
+ RenderPassBeginInfo &
+ setClearValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ ) VULKAN_HPP_NOEXCEPT
{
clearValueCount = static_cast<uint32_t>( clearValues_.size() );
pClearValues = clearValues_.data();
@@ -81103,17 +83850,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassBeginInfo *>( this );
}
- explicit operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassBeginInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -81136,12 +83883,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) &&
- ( framebuffer == rhs.framebuffer ) && ( renderArea == rhs.renderArea ) &&
- ( clearValueCount == rhs.clearValueCount ) && ( pClearValues == rhs.pClearValues );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( framebuffer == rhs.framebuffer ) &&
+ ( renderArea == rhs.renderArea ) && ( clearValueCount == rhs.clearValueCount ) && ( pClearValues == rhs.pClearValues );
# endif
}
@@ -81160,12 +83906,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t clearValueCount = {};
const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo>::value,
- "RenderPassBeginInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eRenderPassBeginInfo>
@@ -81178,17 +83918,16 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkSubpassDescription;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SubpassDescription(
- VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
- uint32_t inputAttachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ = {},
- uint32_t colorAttachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {},
- uint32_t preserveAttachmentCount_ = {},
- const uint32_t * pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
+ uint32_t inputAttachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ = {},
+ uint32_t colorAttachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {},
+ uint32_t preserveAttachmentCount_ = {},
+ const uint32_t * pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT
: flags( flags_ )
, pipelineBindPoint( pipelineBindPoint_ )
, inputAttachmentCount( inputAttachmentCount_ )
@@ -81199,26 +83938,21 @@ namespace VULKAN_HPP_NAMESPACE
, pDepthStencilAttachment( pDepthStencilAttachment_ )
, preserveAttachmentCount( preserveAttachmentCount_ )
, pPreserveAttachments( pPreserveAttachments_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SubpassDescription( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
- : SubpassDescription( *reinterpret_cast<SubpassDescription const *>( &rhs ) )
- {}
+ SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDescription( *reinterpret_cast<SubpassDescription const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SubpassDescription(
- VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_,
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const &
- inputAttachments_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const &
- colorAttachments_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const &
- resolveAttachments_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ = {} )
+ SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_,
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & inputAttachments_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & colorAttachments_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & resolveAttachments_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ = {} )
: flags( flags_ )
, pipelineBindPoint( pipelineBindPoint_ )
, inputAttachmentCount( static_cast<uint32_t>( inputAttachments_.size() ) )
@@ -81253,29 +83987,26 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 SubpassDescription &
- setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassDescription &
- setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBindPoint = pipelineBindPoint_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassDescription &
- setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
inputAttachmentCount = inputAttachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescription &
- setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
+ setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pInputAttachments = pInputAttachments_;
return *this;
@@ -81283,8 +84014,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubpassDescription & setInputAttachments(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const &
- inputAttachments_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT
{
inputAttachmentCount = static_cast<uint32_t>( inputAttachments_.size() );
pInputAttachments = inputAttachments_.data();
@@ -81292,15 +84022,14 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 SubpassDescription &
- setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = colorAttachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescription &
- setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
+ setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pColorAttachments = pColorAttachments_;
return *this;
@@ -81308,8 +84037,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubpassDescription & setColorAttachments(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const &
- colorAttachments_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
pColorAttachments = colorAttachments_.data();
@@ -81317,8 +84045,8 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPResolveAttachments(
- const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDescription &
+ setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pResolveAttachments = pResolveAttachments_;
return *this;
@@ -81326,8 +84054,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubpassDescription & setResolveAttachments(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const &
- resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = static_cast<uint32_t>( resolveAttachments_.size() );
pResolveAttachments = resolveAttachments_.data();
@@ -81335,30 +84062,28 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPDepthStencilAttachment(
- const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDescription &
+ setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
{
pDepthStencilAttachment = pDepthStencilAttachment_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassDescription &
- setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
preserveAttachmentCount = preserveAttachmentCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassDescription &
- setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pPreserveAttachments = pPreserveAttachments_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SubpassDescription & setPreserveAttachments(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription &
+ setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
{
preserveAttachmentCount = static_cast<uint32_t>( preserveAttachments_.size() );
pPreserveAttachments = preserveAttachments_.data();
@@ -81367,17 +84092,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSubpassDescription const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSubpassDescription const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassDescription *>( this );
}
- explicit operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT
+ operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubpassDescription *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -81412,15 +84137,13 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) &&
- ( inputAttachmentCount == rhs.inputAttachmentCount ) && ( pInputAttachments == rhs.pInputAttachments ) &&
- ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachments == rhs.pColorAttachments ) &&
- ( pResolveAttachments == rhs.pResolveAttachments ) &&
- ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) &&
- ( preserveAttachmentCount == rhs.preserveAttachmentCount ) &&
+ return ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( inputAttachmentCount == rhs.inputAttachmentCount ) &&
+ ( pInputAttachments == rhs.pInputAttachments ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) &&
+ ( pColorAttachments == rhs.pColorAttachments ) && ( pResolveAttachments == rhs.pResolveAttachments ) &&
+ ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) &&
( pPreserveAttachments == rhs.pPreserveAttachments );
# endif
}
@@ -81432,9 +84155,9 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
- uint32_t inputAttachmentCount = {};
+ VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+ uint32_t inputAttachmentCount = {};
const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments = {};
uint32_t colorAttachmentCount = {};
const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments = {};
@@ -81443,26 +84166,19 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t preserveAttachmentCount = {};
const uint32_t * pPreserveAttachments = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescription ) == sizeof( VkSubpassDescription ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDescription>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDescription>::value,
- "SubpassDescription is not nothrow_move_constructible!" );
struct SubpassDependency
{
using NativeType = VkSubpassDependency;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- SubpassDependency( uint32_t srcSubpass_ = {},
- uint32_t dstSubpass_ = {},
- VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {},
- VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {},
- VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
- VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
- VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SubpassDependency( uint32_t srcSubpass_ = {},
+ uint32_t dstSubpass_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {},
+ VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
+ VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
+ VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {} ) VULKAN_HPP_NOEXCEPT
: srcSubpass( srcSubpass_ )
, dstSubpass( dstSubpass_ )
, srcStageMask( srcStageMask_ )
@@ -81470,13 +84186,12 @@ namespace VULKAN_HPP_NAMESPACE
, srcAccessMask( srcAccessMask_ )
, dstAccessMask( dstAccessMask_ )
, dependencyFlags( dependencyFlags_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SubpassDependency( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
- : SubpassDependency( *reinterpret_cast<SubpassDependency const *>( &rhs ) )
- {}
+ SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDependency( *reinterpret_cast<SubpassDependency const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SubpassDependency & operator=( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -81500,53 +84215,48 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassDependency &
- setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
{
srcStageMask = srcStageMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassDependency &
- setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
{
dstStageMask = dstStageMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassDependency &
- setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassDependency &
- setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassDependency &
- setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
{
dependencyFlags = dependencyFlags_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSubpassDependency const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSubpassDependency const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassDependency *>( this );
}
- explicit operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT
+ operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubpassDependency *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -81560,8 +84270,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags );
+ return std::tie( srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags );
}
#endif
@@ -81570,12 +84279,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( srcSubpass == rhs.srcSubpass ) && ( dstSubpass == rhs.dstSubpass ) &&
- ( srcStageMask == rhs.srcStageMask ) && ( dstStageMask == rhs.dstStageMask ) &&
- ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) &&
+ return ( srcSubpass == rhs.srcSubpass ) && ( dstSubpass == rhs.dstSubpass ) && ( srcStageMask == rhs.srcStageMask ) &&
+ ( dstStageMask == rhs.dstStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) &&
( dependencyFlags == rhs.dependencyFlags );
# endif
}
@@ -81595,12 +84303,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDependency ) == sizeof( VkSubpassDependency ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDependency>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDependency>::value,
- "SubpassDependency is not nothrow_move_constructible!" );
struct RenderPassCreateInfo
{
@@ -81610,46 +84312,48 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {},
- uint32_t attachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ = {},
- uint32_t subpassCount_ = {},
- const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ = {},
- uint32_t dependencyCount_ = {},
- const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {},
+ uint32_t attachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ = {},
+ uint32_t subpassCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ = {},
+ uint32_t dependencyCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, attachmentCount( attachmentCount_ )
, pAttachments( pAttachments_ )
, subpassCount( subpassCount_ )
, pSubpasses( pSubpasses_ )
, dependencyCount( dependencyCount_ )
, pDependencies( pDependencies_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassCreateInfo( *reinterpret_cast<RenderPassCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- RenderPassCreateInfo(
- VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription> const &
- attachments_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription> const &
- subpasses_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency> const &
- dependencies_ = {} )
- : flags( flags_ )
+ RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription> const & attachments_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription> const & subpasses_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency> const & dependencies_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, attachmentCount( static_cast<uint32_t>( attachments_.size() ) )
, pAttachments( attachments_.data() )
, subpassCount( static_cast<uint32_t>( subpasses_.size() ) )
, pSubpasses( subpasses_.data() )
, dependencyCount( static_cast<uint32_t>( dependencies_.size() ) )
, pDependencies( dependencies_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -81668,8 +84372,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -81681,8 +84384,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo &
- setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pAttachments = pAttachments_;
return *this;
@@ -81690,8 +84392,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassCreateInfo & setAttachments(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription> const &
- attachments_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription> const & attachments_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = static_cast<uint32_t>( attachments_.size() );
pAttachments = attachments_.data();
@@ -81705,17 +84406,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo &
- setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ ) VULKAN_HPP_NOEXCEPT
{
pSubpasses = pSubpasses_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- RenderPassCreateInfo & setSubpasses(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription> const & subpasses_ )
- VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo &
+ setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription> const & subpasses_ ) VULKAN_HPP_NOEXCEPT
{
subpassCount = static_cast<uint32_t>( subpasses_.size() );
pSubpasses = subpasses_.data();
@@ -81729,17 +84428,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo &
- setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ ) VULKAN_HPP_NOEXCEPT
{
pDependencies = pDependencies_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- RenderPassCreateInfo & setDependencies(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency> const &
- dependencies_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo &
+ setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency> const & dependencies_ ) VULKAN_HPP_NOEXCEPT
{
dependencyCount = static_cast<uint32_t>( dependencies_.size() );
pDependencies = dependencies_.data();
@@ -81748,17 +84445,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkRenderPassCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassCreateInfo *>( this );
}
- explicit operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -81774,8 +84471,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, flags, attachmentCount, pAttachments, subpassCount, pSubpasses, dependencyCount, pDependencies );
+ return std::tie( sType, pNext, flags, attachmentCount, pAttachments, subpassCount, pSubpasses, dependencyCount, pDependencies );
}
#endif
@@ -81784,12 +84480,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) &&
- ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( attachmentCount == rhs.attachmentCount ) &&
+ ( pAttachments == rhs.pAttachments ) && ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) &&
( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies );
# endif
}
@@ -81811,12 +84506,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t dependencyCount = {};
const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo>::value,
- "RenderPassCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eRenderPassCreateInfo>
@@ -81832,19 +84521,20 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescription2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SubpassDescription2(
- VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
- uint32_t viewMask_ = {},
- uint32_t inputAttachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ = {},
- uint32_t colorAttachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {},
- uint32_t preserveAttachmentCount_ = {},
- const uint32_t * pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
+ uint32_t viewMask_ = {},
+ uint32_t inputAttachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ = {},
+ uint32_t colorAttachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {},
+ uint32_t preserveAttachmentCount_ = {},
+ const uint32_t * pPreserveAttachments_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, pipelineBindPoint( pipelineBindPoint_ )
, viewMask( viewMask_ )
, inputAttachmentCount( inputAttachmentCount_ )
@@ -81855,28 +84545,27 @@ namespace VULKAN_HPP_NAMESPACE
, pDepthStencilAttachment( pDepthStencilAttachment_ )
, preserveAttachmentCount( preserveAttachmentCount_ )
, pPreserveAttachments( pPreserveAttachments_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SubpassDescription2( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : SubpassDescription2( *reinterpret_cast<SubpassDescription2 const *>( &rhs ) )
- {}
+ SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDescription2( *reinterpret_cast<SubpassDescription2 const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SubpassDescription2(
- VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_,
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_,
- uint32_t viewMask_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const &
- inputAttachments_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const &
- colorAttachments_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const &
- resolveAttachments_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ = {} )
- : flags( flags_ )
+ SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_,
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_,
+ uint32_t viewMask_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & inputAttachments_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & colorAttachments_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & resolveAttachments_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, pipelineBindPoint( pipelineBindPoint_ )
, viewMask( viewMask_ )
, inputAttachmentCount( static_cast<uint32_t>( inputAttachments_.size() ) )
@@ -81917,15 +84606,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &
- setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &
- setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBindPoint = pipelineBindPoint_;
return *this;
@@ -81937,15 +84624,14 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &
- setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
inputAttachmentCount = inputAttachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &
- setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
+ setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pInputAttachments = pInputAttachments_;
return *this;
@@ -81953,8 +84639,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubpassDescription2 & setInputAttachments(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const &
- inputAttachments_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT
{
inputAttachmentCount = static_cast<uint32_t>( inputAttachments_.size() );
pInputAttachments = inputAttachments_.data();
@@ -81962,15 +84647,14 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &
- setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = colorAttachmentCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &
- setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
+ setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pColorAttachments = pColorAttachments_;
return *this;
@@ -81978,8 +84662,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubpassDescription2 & setColorAttachments(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const &
- colorAttachments_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
pColorAttachments = colorAttachments_.data();
@@ -81987,8 +84670,8 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPResolveAttachments(
- const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &
+ setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pResolveAttachments = pResolveAttachments_;
return *this;
@@ -81996,8 +84679,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubpassDescription2 & setResolveAttachments(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const &
- resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = static_cast<uint32_t>( resolveAttachments_.size() );
pResolveAttachments = resolveAttachments_.data();
@@ -82005,30 +84687,28 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPDepthStencilAttachment(
- const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &
+ setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
{
pDepthStencilAttachment = pDepthStencilAttachment_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &
- setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
preserveAttachmentCount = preserveAttachmentCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &
- setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pPreserveAttachments = pPreserveAttachments_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SubpassDescription2 & setPreserveAttachments(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription2 &
+ setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
{
preserveAttachmentCount = static_cast<uint32_t>( preserveAttachments_.size() );
pPreserveAttachments = preserveAttachments_.data();
@@ -82037,17 +84717,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSubpassDescription2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSubpassDescription2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassDescription2 *>( this );
}
- explicit operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT
+ operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubpassDescription2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -82088,17 +84768,14 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( viewMask == rhs.viewMask ) &&
- ( inputAttachmentCount == rhs.inputAttachmentCount ) && ( pInputAttachments == rhs.pInputAttachments ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) &&
+ ( viewMask == rhs.viewMask ) && ( inputAttachmentCount == rhs.inputAttachmentCount ) && ( pInputAttachments == rhs.pInputAttachments ) &&
( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachments == rhs.pColorAttachments ) &&
- ( pResolveAttachments == rhs.pResolveAttachments ) &&
- ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) &&
- ( preserveAttachmentCount == rhs.preserveAttachmentCount ) &&
- ( pPreserveAttachments == rhs.pPreserveAttachments );
+ ( pResolveAttachments == rhs.pResolveAttachments ) && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) &&
+ ( preserveAttachmentCount == rhs.preserveAttachmentCount ) && ( pPreserveAttachments == rhs.pPreserveAttachments );
# endif
}
@@ -82109,12 +84786,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
- VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
- uint32_t viewMask = {};
- uint32_t inputAttachmentCount = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+ uint32_t viewMask = {};
+ uint32_t inputAttachmentCount = {};
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments = {};
uint32_t colorAttachmentCount = {};
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments = {};
@@ -82123,12 +84800,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t preserveAttachmentCount = {};
const uint32_t * pPreserveAttachments = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescription2 ) == sizeof( VkSubpassDescription2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDescription2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDescription2>::value,
- "SubpassDescription2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSubpassDescription2>
@@ -82152,8 +84823,10 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {},
- int32_t viewOffset_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcSubpass( srcSubpass_ )
+ int32_t viewOffset_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , srcSubpass( srcSubpass_ )
, dstSubpass( dstSubpass_ )
, srcStageMask( srcStageMask_ )
, dstStageMask( dstStageMask_ )
@@ -82161,13 +84834,12 @@ namespace VULKAN_HPP_NAMESPACE
, dstAccessMask( dstAccessMask_ )
, dependencyFlags( dependencyFlags_ )
, viewOffset( viewOffset_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SubpassDependency2( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : SubpassDependency2( *reinterpret_cast<SubpassDependency2 const *>( &rhs ) )
- {}
+ SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDependency2( *reinterpret_cast<SubpassDependency2 const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SubpassDependency2 & operator=( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -82197,36 +84869,31 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 &
- setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
{
srcStageMask = srcStageMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 &
- setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
{
dstStageMask = dstStageMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 &
- setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 &
- setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 &
- setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
{
dependencyFlags = dependencyFlags_;
return *this;
@@ -82239,17 +84906,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSubpassDependency2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSubpassDependency2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassDependency2 *>( this );
}
- explicit operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT
+ operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubpassDependency2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -82266,16 +84933,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- srcSubpass,
- dstSubpass,
- srcStageMask,
- dstStageMask,
- srcAccessMask,
- dstAccessMask,
- dependencyFlags,
- viewOffset );
+ return std::tie( sType, pNext, srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags, viewOffset );
}
#endif
@@ -82284,14 +84942,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubpass == rhs.srcSubpass ) &&
- ( dstSubpass == rhs.dstSubpass ) && ( srcStageMask == rhs.srcStageMask ) &&
- ( dstStageMask == rhs.dstStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) &&
- ( dstAccessMask == rhs.dstAccessMask ) && ( dependencyFlags == rhs.dependencyFlags ) &&
- ( viewOffset == rhs.viewOffset );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubpass == rhs.srcSubpass ) && ( dstSubpass == rhs.dstSubpass ) &&
+ ( srcStageMask == rhs.srcStageMask ) && ( dstStageMask == rhs.dstStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) &&
+ ( dstAccessMask == rhs.dstAccessMask ) && ( dependencyFlags == rhs.dependencyFlags ) && ( viewOffset == rhs.viewOffset );
# endif
}
@@ -82313,12 +84969,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
int32_t viewOffset = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDependency2 ) == sizeof( VkSubpassDependency2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDependency2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDependency2>::value,
- "SubpassDependency2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSubpassDependency2>
@@ -82335,16 +84985,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {},
- uint32_t attachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ = {},
- uint32_t subpassCount_ = {},
- const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ = {},
- uint32_t dependencyCount_ = {},
- const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ = {},
- uint32_t correlatedViewMaskCount_ = {},
- const uint32_t * pCorrelatedViewMasks_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {},
+ uint32_t attachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ = {},
+ uint32_t subpassCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ = {},
+ uint32_t dependencyCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ = {},
+ uint32_t correlatedViewMaskCount_ = {},
+ const uint32_t * pCorrelatedViewMasks_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, attachmentCount( attachmentCount_ )
, pAttachments( pAttachments_ )
, subpassCount( subpassCount_ )
@@ -82353,25 +85005,25 @@ namespace VULKAN_HPP_NAMESPACE
, pDependencies( pDependencies_ )
, correlatedViewMaskCount( correlatedViewMaskCount_ )
, pCorrelatedViewMasks( pCorrelatedViewMasks_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassCreateInfo2( *reinterpret_cast<RenderPassCreateInfo2 const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- RenderPassCreateInfo2(
- VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription2> const &
- attachments_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription2> const &
- subpasses_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency2> const &
- dependencies_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ = {} )
- : flags( flags_ )
+ RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription2> const & attachments_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription2> const & subpasses_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency2> const & dependencies_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, attachmentCount( static_cast<uint32_t>( attachments_.size() ) )
, pAttachments( attachments_.data() )
, subpassCount( static_cast<uint32_t>( subpasses_.size() ) )
@@ -82380,7 +85032,8 @@ namespace VULKAN_HPP_NAMESPACE
, pDependencies( dependencies_.data() )
, correlatedViewMaskCount( static_cast<uint32_t>( correlatedViewMasks_.size() ) )
, pCorrelatedViewMasks( correlatedViewMasks_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -82399,8 +85052,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 &
- setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -82412,8 +85064,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 &
- setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pAttachments = pAttachments_;
return *this;
@@ -82421,8 +85072,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassCreateInfo2 & setAttachments(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription2> const &
- attachments_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription2> const & attachments_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = static_cast<uint32_t>( attachments_.size() );
pAttachments = attachments_.data();
@@ -82436,17 +85086,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 &
- setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ ) VULKAN_HPP_NOEXCEPT
{
pSubpasses = pSubpasses_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- RenderPassCreateInfo2 & setSubpasses(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription2> const &
- subpasses_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo2 &
+ setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription2> const & subpasses_ ) VULKAN_HPP_NOEXCEPT
{
subpassCount = static_cast<uint32_t>( subpasses_.size() );
pSubpasses = subpasses_.data();
@@ -82460,17 +85108,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 &
- setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ ) VULKAN_HPP_NOEXCEPT
{
pDependencies = pDependencies_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- RenderPassCreateInfo2 & setDependencies(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency2> const &
- dependencies_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo2 &
+ setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency2> const & dependencies_ ) VULKAN_HPP_NOEXCEPT
{
dependencyCount = static_cast<uint32_t>( dependencies_.size() );
pDependencies = dependencies_.data();
@@ -82478,23 +85124,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 &
- setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT
{
correlatedViewMaskCount = correlatedViewMaskCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 &
- setPCorrelatedViewMasks( const uint32_t * pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t * pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
{
pCorrelatedViewMasks = pCorrelatedViewMasks_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- RenderPassCreateInfo2 & setCorrelatedViewMasks(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo2 &
+ setCorrelatedViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
{
correlatedViewMaskCount = static_cast<uint32_t>( correlatedViewMasks_.size() );
pCorrelatedViewMasks = correlatedViewMasks_.data();
@@ -82503,17 +85147,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkRenderPassCreateInfo2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassCreateInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassCreateInfo2 *>( this );
}
- explicit operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassCreateInfo2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -82550,15 +85194,13 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) &&
- ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( attachmentCount == rhs.attachmentCount ) &&
+ ( pAttachments == rhs.pAttachments ) && ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) &&
( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies ) &&
- ( correlatedViewMaskCount == rhs.correlatedViewMaskCount ) &&
- ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks );
+ ( correlatedViewMaskCount == rhs.correlatedViewMaskCount ) && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks );
# endif
}
@@ -82569,24 +85211,18 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {};
- uint32_t attachmentCount = {};
- const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments = {};
- uint32_t subpassCount = {};
- const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses = {};
- uint32_t dependencyCount = {};
- const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {};
+ uint32_t attachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments = {};
+ uint32_t subpassCount = {};
+ const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses = {};
+ uint32_t dependencyCount = {};
+ const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies = {};
uint32_t correlatedViewMaskCount = {};
const uint32_t * pCorrelatedViewMasks = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2>::value,
- "RenderPassCreateInfo2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eRenderPassCreateInfo2>
@@ -82595,73 +85231,331 @@ namespace VULKAN_HPP_NAMESPACE
};
using RenderPassCreateInfo2KHR = RenderPassCreateInfo2;
+ struct RenderPassCreationControlEXT
+ {
+ using NativeType = VkRenderPassCreationControlEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreationControlEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR RenderPassCreationControlEXT( VULKAN_HPP_NAMESPACE::Bool32 disallowMerging_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , disallowMerging( disallowMerging_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR RenderPassCreationControlEXT( RenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RenderPassCreationControlEXT( VkRenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : RenderPassCreationControlEXT( *reinterpret_cast<RenderPassCreationControlEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ RenderPassCreationControlEXT & operator=( RenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RenderPassCreationControlEXT & operator=( VkRenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 RenderPassCreationControlEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 RenderPassCreationControlEXT & setDisallowMerging( VULKAN_HPP_NAMESPACE::Bool32 disallowMerging_ ) VULKAN_HPP_NOEXCEPT
+ {
+ disallowMerging = disallowMerging_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkRenderPassCreationControlEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkRenderPassCreationControlEXT *>( this );
+ }
+
+ operator VkRenderPassCreationControlEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkRenderPassCreationControlEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, disallowMerging );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( RenderPassCreationControlEXT const & ) const = default;
+#else
+ bool operator==( RenderPassCreationControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( disallowMerging == rhs.disallowMerging );
+# endif
+ }
+
+ bool operator!=( RenderPassCreationControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreationControlEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 disallowMerging = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eRenderPassCreationControlEXT>
+ {
+ using Type = RenderPassCreationControlEXT;
+ };
+
+ struct RenderPassCreationFeedbackInfoEXT
+ {
+ using NativeType = VkRenderPassCreationFeedbackInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackInfoEXT( uint32_t postMergeSubpassCount_ = {} ) VULKAN_HPP_NOEXCEPT
+ : postMergeSubpassCount( postMergeSubpassCount_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackInfoEXT( RenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RenderPassCreationFeedbackInfoEXT( VkRenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : RenderPassCreationFeedbackInfoEXT( *reinterpret_cast<RenderPassCreationFeedbackInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ RenderPassCreationFeedbackInfoEXT & operator=( RenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RenderPassCreationFeedbackInfoEXT & operator=( VkRenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ operator VkRenderPassCreationFeedbackInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkRenderPassCreationFeedbackInfoEXT *>( this );
+ }
+
+ operator VkRenderPassCreationFeedbackInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkRenderPassCreationFeedbackInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<uint32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( postMergeSubpassCount );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( RenderPassCreationFeedbackInfoEXT const & ) const = default;
+#else
+ bool operator==( RenderPassCreationFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( postMergeSubpassCount == rhs.postMergeSubpassCount );
+# endif
+ }
+
+ bool operator!=( RenderPassCreationFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ uint32_t postMergeSubpassCount = {};
+ };
+
+ struct RenderPassCreationFeedbackCreateInfoEXT
+ {
+ using NativeType = VkRenderPassCreationFeedbackCreateInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreationFeedbackCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pRenderPassFeedback( pRenderPassFeedback_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackCreateInfoEXT( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RenderPassCreationFeedbackCreateInfoEXT( VkRenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : RenderPassCreationFeedbackCreateInfoEXT( *reinterpret_cast<RenderPassCreationFeedbackCreateInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ RenderPassCreationFeedbackCreateInfoEXT & operator=( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RenderPassCreationFeedbackCreateInfoEXT & operator=( VkRenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 RenderPassCreationFeedbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 RenderPassCreationFeedbackCreateInfoEXT &
+ setPRenderPassFeedback( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pRenderPassFeedback = pRenderPassFeedback_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkRenderPassCreationFeedbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkRenderPassCreationFeedbackCreateInfoEXT *>( this );
+ }
+
+ operator VkRenderPassCreationFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkRenderPassCreationFeedbackCreateInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, pRenderPassFeedback );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( RenderPassCreationFeedbackCreateInfoEXT const & ) const = default;
+#else
+ bool operator==( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pRenderPassFeedback == rhs.pRenderPassFeedback );
+# endif
+ }
+
+ bool operator!=( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreationFeedbackCreateInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eRenderPassCreationFeedbackCreateInfoEXT>
+ {
+ using Type = RenderPassCreationFeedbackCreateInfoEXT;
+ };
+
struct RenderPassFragmentDensityMapCreateInfoEXT
{
using NativeType = VkRenderPassFragmentDensityMapCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {} ) VULKAN_HPP_NOEXCEPT
- : fragmentDensityMapAttachment( fragmentDensityMapAttachment_ )
- {}
+ VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , fragmentDensityMapAttachment( fragmentDensityMapAttachment_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT(
- RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : RenderPassFragmentDensityMapCreateInfoEXT(
- *reinterpret_cast<RenderPassFragmentDensityMapCreateInfoEXT const *>( &rhs ) )
- {}
+ RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : RenderPassFragmentDensityMapCreateInfoEXT( *reinterpret_cast<RenderPassFragmentDensityMapCreateInfoEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- RenderPassFragmentDensityMapCreateInfoEXT &
- operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ RenderPassFragmentDensityMapCreateInfoEXT & operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- RenderPassFragmentDensityMapCreateInfoEXT &
- operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ RenderPassFragmentDensityMapCreateInfoEXT & operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment(
- VULKAN_HPP_NAMESPACE::AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT &
+ setFragmentDensityMapAttachment( VULKAN_HPP_NAMESPACE::AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT
{
fragmentDensityMapAttachment = fragmentDensityMapAttachment_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkRenderPassFragmentDensityMapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassFragmentDensityMapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassFragmentDensityMapCreateInfoEXT *>( this );
}
- explicit operator VkRenderPassFragmentDensityMapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassFragmentDensityMapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassFragmentDensityMapCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::AttachmentReference const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AttachmentReference const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -82674,11 +85568,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment );
# endif
}
@@ -82689,19 +85582,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT ) ==
- sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT>::value,
- "RenderPassFragmentDensityMapCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eRenderPassFragmentDensityMapCreateInfoEXT>
@@ -82714,63 +85598,58 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkRenderPassInputAttachmentAspectCreateInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eRenderPassInputAttachmentAspectCreateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo(
- uint32_t aspectReferenceCount_ = {},
- const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ = {} ) VULKAN_HPP_NOEXCEPT
- : aspectReferenceCount( aspectReferenceCount_ )
+ VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = {},
+ const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , aspectReferenceCount( aspectReferenceCount_ )
, pAspectReferences( pAspectReferences_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo(
- RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs )
- VULKAN_HPP_NOEXCEPT
- : RenderPassInputAttachmentAspectCreateInfo(
- *reinterpret_cast<RenderPassInputAttachmentAspectCreateInfo const *>( &rhs ) )
- {}
+ RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ : RenderPassInputAttachmentAspectCreateInfo( *reinterpret_cast<RenderPassInputAttachmentAspectCreateInfo const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassInputAttachmentAspectCreateInfo(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference> const &
- aspectReferences_ )
- : aspectReferenceCount( static_cast<uint32_t>( aspectReferences_.size() ) )
- , pAspectReferences( aspectReferences_.data() )
- {}
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference> const & aspectReferences_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), aspectReferenceCount( static_cast<uint32_t>( aspectReferences_.size() ) ), pAspectReferences( aspectReferences_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- RenderPassInputAttachmentAspectCreateInfo &
- operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ RenderPassInputAttachmentAspectCreateInfo & operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- RenderPassInputAttachmentAspectCreateInfo &
- operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ RenderPassInputAttachmentAspectCreateInfo & operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo &
- setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT
{
aspectReferenceCount = aspectReferenceCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setPAspectReferences(
- const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo &
+ setPAspectReferences( const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ ) VULKAN_HPP_NOEXCEPT
{
pAspectReferences = pAspectReferences_;
return *this;
@@ -82778,8 +85657,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassInputAttachmentAspectCreateInfo & setAspectReferences(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference> const &
- aspectReferences_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference> const & aspectReferences_ ) VULKAN_HPP_NOEXCEPT
{
aspectReferenceCount = static_cast<uint32_t>( aspectReferences_.size() );
pAspectReferences = aspectReferences_.data();
@@ -82788,17 +85666,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkRenderPassInputAttachmentAspectCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassInputAttachmentAspectCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo *>( this );
}
- explicit operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassInputAttachmentAspectCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -82818,7 +85696,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aspectReferenceCount == rhs.aspectReferenceCount ) &&
@@ -82833,20 +85711,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
- const void * pNext = {};
- uint32_t aspectReferenceCount = {};
- const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
+ const void * pNext = {};
+ uint32_t aspectReferenceCount = {};
+ const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo ) ==
- sizeof( VkRenderPassInputAttachmentAspectCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo>::value,
- "RenderPassInputAttachmentAspectCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eRenderPassInputAttachmentAspectCreateInfo>
@@ -82868,39 +85737,43 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t dependencyCount_ = {},
const int32_t * pViewOffsets_ = {},
uint32_t correlationMaskCount_ = {},
- const uint32_t * pCorrelationMasks_ = {} ) VULKAN_HPP_NOEXCEPT
- : subpassCount( subpassCount_ )
+ const uint32_t * pCorrelationMasks_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , subpassCount( subpassCount_ )
, pViewMasks( pViewMasks_ )
, dependencyCount( dependencyCount_ )
, pViewOffsets( pViewOffsets_ )
, correlationMaskCount( correlationMaskCount_ )
, pCorrelationMasks( pCorrelationMasks_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- RenderPassMultiviewCreateInfo( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassMultiviewCreateInfo( *reinterpret_cast<RenderPassMultiviewCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- RenderPassMultiviewCreateInfo(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & viewOffsets_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ = {} )
- : subpassCount( static_cast<uint32_t>( viewMasks_.size() ) )
+ RenderPassMultiviewCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & viewOffsets_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , subpassCount( static_cast<uint32_t>( viewMasks_.size() ) )
, pViewMasks( viewMasks_.data() )
, dependencyCount( static_cast<uint32_t>( viewOffsets_.size() ) )
, pViewOffsets( viewOffsets_.data() )
, correlationMaskCount( static_cast<uint32_t>( correlationMasks_.size() ) )
, pCorrelationMasks( correlationMasks_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- RenderPassMultiviewCreateInfo &
- operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ RenderPassMultiviewCreateInfo & operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassMultiviewCreateInfo & operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -82915,23 +85788,20 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo &
- setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
{
subpassCount = subpassCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo &
- setPViewMasks( const uint32_t * pViewMasks_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t * pViewMasks_ ) VULKAN_HPP_NOEXCEPT
{
pViewMasks = pViewMasks_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- RenderPassMultiviewCreateInfo & setViewMasks(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassMultiviewCreateInfo & setViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_ ) VULKAN_HPP_NOEXCEPT
{
subpassCount = static_cast<uint32_t>( viewMasks_.size() );
pViewMasks = viewMasks_.data();
@@ -82939,23 +85809,20 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo &
- setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
{
dependencyCount = dependencyCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo &
- setPViewOffsets( const int32_t * pViewOffsets_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t * pViewOffsets_ ) VULKAN_HPP_NOEXCEPT
{
pViewOffsets = pViewOffsets_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- RenderPassMultiviewCreateInfo & setViewOffsets(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & viewOffsets_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassMultiviewCreateInfo & setViewOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & viewOffsets_ ) VULKAN_HPP_NOEXCEPT
{
dependencyCount = static_cast<uint32_t>( viewOffsets_.size() );
pViewOffsets = viewOffsets_.data();
@@ -82963,23 +85830,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo &
- setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT
{
correlationMaskCount = correlationMaskCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo &
- setPCorrelationMasks( const uint32_t * pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t * pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT
{
pCorrelationMasks = pCorrelationMasks_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- RenderPassMultiviewCreateInfo & setCorrelationMasks(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassMultiviewCreateInfo &
+ setCorrelationMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ ) VULKAN_HPP_NOEXCEPT
{
correlationMaskCount = static_cast<uint32_t>( correlationMasks_.size() );
pCorrelationMasks = correlationMasks_.data();
@@ -82988,17 +85853,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkRenderPassMultiviewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassMultiviewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassMultiviewCreateInfo *>( this );
}
- explicit operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassMultiviewCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -83013,14 +85878,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- subpassCount,
- pViewMasks,
- dependencyCount,
- pViewOffsets,
- correlationMaskCount,
- pCorrelationMasks );
+ return std::tie( sType, pNext, subpassCount, pViewMasks, dependencyCount, pViewOffsets, correlationMaskCount, pCorrelationMasks );
}
#endif
@@ -83029,12 +85887,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassCount == rhs.subpassCount ) &&
- ( pViewMasks == rhs.pViewMasks ) && ( dependencyCount == rhs.dependencyCount ) &&
- ( pViewOffsets == rhs.pViewOffsets ) && ( correlationMaskCount == rhs.correlationMaskCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassCount == rhs.subpassCount ) && ( pViewMasks == rhs.pViewMasks ) &&
+ ( dependencyCount == rhs.dependencyCount ) && ( pViewOffsets == rhs.pViewOffsets ) && ( correlationMaskCount == rhs.correlationMaskCount ) &&
( pCorrelationMasks == rhs.pCorrelationMasks );
# endif
}
@@ -83055,14 +85912,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t correlationMaskCount = {};
const uint32_t * pCorrelationMasks = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo ) ==
- sizeof( VkRenderPassMultiviewCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo>::value,
- "RenderPassMultiviewCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eRenderPassMultiviewCreateInfo>
@@ -83076,19 +85925,19 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkSubpassSampleLocationsEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT(
- uint32_t subpassIndex_ = {},
- VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( uint32_t subpassIndex_ = {},
+ VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT
: subpassIndex( subpassIndex_ )
, sampleLocationsInfo( sampleLocationsInfo_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- SubpassSampleLocationsEXT( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SubpassSampleLocationsEXT( *reinterpret_cast<SubpassSampleLocationsEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SubpassSampleLocationsEXT & operator=( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -83106,25 +85955,25 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & setSampleLocationsInfo(
- VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT &
+ setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsInfo = sampleLocationsInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSubpassSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSubpassSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassSampleLocationsEXT *>( this );
}
- explicit operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubpassSampleLocationsEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -83141,7 +85990,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( subpassIndex == rhs.subpassIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
@@ -83158,61 +86007,54 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t subpassIndex = {};
VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT ) ==
- sizeof( VkSubpassSampleLocationsEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT>::value,
- "SubpassSampleLocationsEXT is not nothrow_move_constructible!" );
struct RenderPassSampleLocationsBeginInfoEXT
{
using NativeType = VkRenderPassSampleLocationsBeginInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eRenderPassSampleLocationsBeginInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT(
- uint32_t attachmentInitialSampleLocationsCount_ = {},
- const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ = {},
- uint32_t postSubpassSampleLocationsCount_ = {},
- const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT
- : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ )
+ VULKAN_HPP_CONSTEXPR
+ RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ = {},
+ uint32_t postSubpassSampleLocationsCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ )
, pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ )
, postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ )
, pPostSubpassSampleLocations( pPostSubpassSampleLocations_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( RenderPassSampleLocationsBeginInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : RenderPassSampleLocationsBeginInfoEXT(
- *reinterpret_cast<RenderPassSampleLocationsBeginInfoEXT const *>( &rhs ) )
- {}
+ : RenderPassSampleLocationsBeginInfoEXT( *reinterpret_cast<RenderPassSampleLocationsBeginInfoEXT const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassSampleLocationsBeginInfoEXT(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT> const &
- attachmentInitialSampleLocations_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT> const &
- postSubpassSampleLocations_ = {} )
- : attachmentInitialSampleLocationsCount( static_cast<uint32_t>( attachmentInitialSampleLocations_.size() ) )
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT> const & attachmentInitialSampleLocations_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT> const & postSubpassSampleLocations_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , attachmentInitialSampleLocationsCount( static_cast<uint32_t>( attachmentInitialSampleLocations_.size() ) )
, pAttachmentInitialSampleLocations( attachmentInitialSampleLocations_.data() )
, postSubpassSampleLocationsCount( static_cast<uint32_t>( postSubpassSampleLocations_.size() ) )
, pPostSubpassSampleLocations( postSubpassSampleLocations_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- RenderPassSampleLocationsBeginInfoEXT &
- operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ RenderPassSampleLocationsBeginInfoEXT & operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- RenderPassSampleLocationsBeginInfoEXT &
- operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ RenderPassSampleLocationsBeginInfoEXT & operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const *>( &rhs );
return *this;
@@ -83226,15 +86068,14 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT &
- setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
+ setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
{
attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPAttachmentInitialSampleLocations(
- const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ )
- VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT &
+ setPAttachmentInitialSampleLocations( const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT
{
pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_;
return *this;
@@ -83242,8 +86083,8 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocations(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT> const &
- attachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT> const & attachmentInitialSampleLocations_ )
+ VULKAN_HPP_NOEXCEPT
{
attachmentInitialSampleLocationsCount = static_cast<uint32_t>( attachmentInitialSampleLocations_.size() );
pAttachmentInitialSampleLocations = attachmentInitialSampleLocations_.data();
@@ -83252,14 +86093,14 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT &
- setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
+ setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
{
postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPPostSubpassSampleLocations(
- const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT &
+ setPPostSubpassSampleLocations( const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
{
pPostSubpassSampleLocations = pPostSubpassSampleLocations_;
return *this;
@@ -83267,8 +86108,8 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocations(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT> const &
- postSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT> const & postSubpassSampleLocations_ )
+ VULKAN_HPP_NOEXCEPT
{
postSubpassSampleLocationsCount = static_cast<uint32_t>( postSubpassSampleLocations_.size() );
pPostSubpassSampleLocations = postSubpassSampleLocations_.data();
@@ -83277,17 +86118,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkRenderPassSampleLocationsBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassSampleLocationsBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassSampleLocationsBeginInfoEXT *>( this );
}
- explicit operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassSampleLocationsBeginInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -83300,12 +86141,8 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- attachmentInitialSampleLocationsCount,
- pAttachmentInitialSampleLocations,
- postSubpassSampleLocationsCount,
- pPostSubpassSampleLocations );
+ return std::tie(
+ sType, pNext, attachmentInitialSampleLocationsCount, pAttachmentInitialSampleLocations, postSubpassSampleLocationsCount, pPostSubpassSampleLocations );
}
#endif
@@ -83314,14 +86151,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount ) &&
( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations ) &&
- ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount ) &&
- ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations );
+ ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount ) && ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations );
# endif
}
@@ -83332,21 +86167,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
- const void * pNext = {};
- uint32_t attachmentInitialSampleLocationsCount = {};
- const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations = {};
- uint32_t postSubpassSampleLocationsCount = {};
- const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
+ const void * pNext = {};
+ uint32_t attachmentInitialSampleLocationsCount = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations = {};
+ uint32_t postSubpassSampleLocationsCount = {};
+ const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT ) ==
- sizeof( VkRenderPassSampleLocationsBeginInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT>::value,
- "RenderPassSampleLocationsBeginInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eRenderPassSampleLocationsBeginInfoEXT>
@@ -83354,29 +86181,207 @@ namespace VULKAN_HPP_NAMESPACE
using Type = RenderPassSampleLocationsBeginInfoEXT;
};
+ struct RenderPassSubpassFeedbackInfoEXT
+ {
+ using NativeType = VkRenderPassSubpassFeedbackInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14
+ RenderPassSubpassFeedbackInfoEXT( VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT subpassMergeStatus_ = VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT::eMerged,
+ std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
+ uint32_t postMergeIndex_ = {} ) VULKAN_HPP_NOEXCEPT
+ : subpassMergeStatus( subpassMergeStatus_ )
+ , description( description_ )
+ , postMergeIndex( postMergeIndex_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackInfoEXT( RenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RenderPassSubpassFeedbackInfoEXT( VkRenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : RenderPassSubpassFeedbackInfoEXT( *reinterpret_cast<RenderPassSubpassFeedbackInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ RenderPassSubpassFeedbackInfoEXT & operator=( RenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RenderPassSubpassFeedbackInfoEXT & operator=( VkRenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+ operator VkRenderPassSubpassFeedbackInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkRenderPassSubpassFeedbackInfoEXT *>( this );
+ }
+
+ operator VkRenderPassSubpassFeedbackInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkRenderPassSubpassFeedbackInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::
+ tuple<VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, uint32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( subpassMergeStatus, description, postMergeIndex );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( RenderPassSubpassFeedbackInfoEXT const & ) const = default;
+#else
+ bool operator==( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( subpassMergeStatus == rhs.subpassMergeStatus ) && ( description == rhs.description ) && ( postMergeIndex == rhs.postMergeIndex );
+# endif
+ }
+
+ bool operator!=( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT subpassMergeStatus = VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT::eMerged;
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
+ uint32_t postMergeIndex = {};
+ };
+
+ struct RenderPassSubpassFeedbackCreateInfoEXT
+ {
+ using NativeType = VkRenderPassSubpassFeedbackCreateInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSubpassFeedbackCreateInfoEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pSubpassFeedback( pSubpassFeedback_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RenderPassSubpassFeedbackCreateInfoEXT( VkRenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : RenderPassSubpassFeedbackCreateInfoEXT( *reinterpret_cast<RenderPassSubpassFeedbackCreateInfoEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ RenderPassSubpassFeedbackCreateInfoEXT & operator=( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ RenderPassSubpassFeedbackCreateInfoEXT & operator=( VkRenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT &
+ setPSubpassFeedback( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pSubpassFeedback = pSubpassFeedback_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkRenderPassSubpassFeedbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkRenderPassSubpassFeedbackCreateInfoEXT *>( this );
+ }
+
+ operator VkRenderPassSubpassFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkRenderPassSubpassFeedbackCreateInfoEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, pSubpassFeedback );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( RenderPassSubpassFeedbackCreateInfoEXT const & ) const = default;
+#else
+ bool operator==( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pSubpassFeedback == rhs.pSubpassFeedback );
+# endif
+ }
+
+ bool operator!=( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSubpassFeedbackCreateInfoEXT;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eRenderPassSubpassFeedbackCreateInfoEXT>
+ {
+ using Type = RenderPassSubpassFeedbackCreateInfoEXT;
+ };
+
struct RenderPassTransformBeginInfoQCOM
{
using NativeType = VkRenderPassTransformBeginInfoQCOM;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassTransformBeginInfoQCOM;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassTransformBeginInfoQCOM;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM(
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ =
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity ) VULKAN_HPP_NOEXCEPT : transform( transform_ )
- {}
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , transform( transform_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- RenderPassTransformBeginInfoQCOM( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassTransformBeginInfoQCOM( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderPassTransformBeginInfoQCOM( *reinterpret_cast<RenderPassTransformBeginInfoQCOM const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- RenderPassTransformBeginInfoQCOM &
- operator=( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ RenderPassTransformBeginInfoQCOM & operator=( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderPassTransformBeginInfoQCOM & operator=( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -83391,31 +86396,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM &
- setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
{
transform = transform_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkRenderPassTransformBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassTransformBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderPassTransformBeginInfoQCOM *>( this );
}
- explicit operator VkRenderPassTransformBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassTransformBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderPassTransformBeginInfoQCOM *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -83428,7 +86430,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform );
@@ -83442,19 +86444,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTransformBeginInfoQCOM;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform =
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTransformBeginInfoQCOM;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM ) ==
- sizeof( VkRenderPassTransformBeginInfoQCOM ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM>::value,
- "RenderPassTransformBeginInfoQCOM is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eRenderPassTransformBeginInfoQCOM>
@@ -83470,16 +86463,17 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAttachmentInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo(
- VULKAN_HPP_NAMESPACE::ImageView imageView_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone,
- VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
- VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
- VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT
- : imageView( imageView_ )
+ VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone,
+ VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
+ VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
+ VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , imageView( imageView_ )
, imageLayout( imageLayout_ )
, resolveMode( resolveMode_ )
, resolveImageView( resolveImageView_ )
@@ -83487,14 +86481,15 @@ namespace VULKAN_HPP_NAMESPACE
, loadOp( loadOp_ )
, storeOp( storeOp_ )
, clearValue( clearValue_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- RenderingAttachmentInfo( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
RenderingAttachmentInfo( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: RenderingAttachmentInfo( *reinterpret_cast<RenderingAttachmentInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
RenderingAttachmentInfo & operator=( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -83512,74 +86507,66 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo &
- setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
{
imageView = imageView_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo &
- setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
{
imageLayout = imageLayout_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo &
- setResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ ) VULKAN_HPP_NOEXCEPT
{
resolveMode = resolveMode_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo &
- setResolveImageView( VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveImageView( VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ ) VULKAN_HPP_NOEXCEPT
{
resolveImageView = resolveImageView_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo &
- setResolveImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
resolveImageLayout = resolveImageLayout_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo &
- setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
{
loadOp = loadOp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo &
- setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
{
storeOp = storeOp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo &
- setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT
{
clearValue = clearValue_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkRenderingAttachmentInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRenderingAttachmentInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderingAttachmentInfo *>( this );
}
- explicit operator VkRenderingAttachmentInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkRenderingAttachmentInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderingAttachmentInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -83596,16 +86583,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- imageView,
- imageLayout,
- resolveMode,
- resolveImageView,
- resolveImageLayout,
- loadOp,
- storeOp,
- clearValue );
+ return std::tie( sType, pNext, imageView, imageLayout, resolveMode, resolveImageView, resolveImageLayout, loadOp, storeOp, clearValue );
}
#endif
@@ -83621,13 +86599,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
VULKAN_HPP_NAMESPACE::ClearValue clearValue = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo ) ==
- sizeof( VkRenderingAttachmentInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo>::value,
- "RenderingAttachmentInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eRenderingAttachmentInfo>
@@ -83641,72 +86612,66 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkRenderingFragmentDensityMapAttachmentInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR RenderingFragmentDensityMapAttachmentInfoEXT(
- VULKAN_HPP_NAMESPACE::ImageView imageView_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined )
- VULKAN_HPP_NOEXCEPT
- : imageView( imageView_ )
+ VULKAN_HPP_CONSTEXPR
+ RenderingFragmentDensityMapAttachmentInfoEXT( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , imageView( imageView_ )
, imageLayout( imageLayout_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR RenderingFragmentDensityMapAttachmentInfoEXT(
- RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR RenderingFragmentDensityMapAttachmentInfoEXT( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- RenderingFragmentDensityMapAttachmentInfoEXT( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : RenderingFragmentDensityMapAttachmentInfoEXT(
- *reinterpret_cast<RenderingFragmentDensityMapAttachmentInfoEXT const *>( &rhs ) )
- {}
+ RenderingFragmentDensityMapAttachmentInfoEXT( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : RenderingFragmentDensityMapAttachmentInfoEXT( *reinterpret_cast<RenderingFragmentDensityMapAttachmentInfoEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- RenderingFragmentDensityMapAttachmentInfoEXT &
- operator=( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ RenderingFragmentDensityMapAttachmentInfoEXT & operator=( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- RenderingFragmentDensityMapAttachmentInfoEXT &
- operator=( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ RenderingFragmentDensityMapAttachmentInfoEXT & operator=( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT &
- setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
{
imageView = imageView_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT &
- setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
{
imageLayout = imageLayout_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkRenderingFragmentDensityMapAttachmentInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRenderingFragmentDensityMapAttachmentInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderingFragmentDensityMapAttachmentInfoEXT *>( this );
}
- explicit operator VkRenderingFragmentDensityMapAttachmentInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkRenderingFragmentDensityMapAttachmentInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderingFragmentDensityMapAttachmentInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -83726,11 +86691,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) &&
- ( imageLayout == rhs.imageLayout );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout );
# endif
}
@@ -83746,15 +86710,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ImageView imageView = {};
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT ) ==
- sizeof( VkRenderingFragmentDensityMapAttachmentInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT>::value,
- "RenderingFragmentDensityMapAttachmentInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT>
@@ -83767,80 +86722,76 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkRenderingFragmentShadingRateAttachmentInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR RenderingFragmentShadingRateAttachmentInfoKHR(
- VULKAN_HPP_NAMESPACE::ImageView imageView_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : imageView( imageView_ )
+ VULKAN_HPP_CONSTEXPR
+ RenderingFragmentShadingRateAttachmentInfoKHR( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , imageView( imageView_ )
, imageLayout( imageLayout_ )
, shadingRateAttachmentTexelSize( shadingRateAttachmentTexelSize_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR RenderingFragmentShadingRateAttachmentInfoKHR(
- RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ RenderingFragmentShadingRateAttachmentInfoKHR( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- RenderingFragmentShadingRateAttachmentInfoKHR( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : RenderingFragmentShadingRateAttachmentInfoKHR(
- *reinterpret_cast<RenderingFragmentShadingRateAttachmentInfoKHR const *>( &rhs ) )
- {}
+ RenderingFragmentShadingRateAttachmentInfoKHR( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : RenderingFragmentShadingRateAttachmentInfoKHR( *reinterpret_cast<RenderingFragmentShadingRateAttachmentInfoKHR const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- RenderingFragmentShadingRateAttachmentInfoKHR &
- operator=( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ RenderingFragmentShadingRateAttachmentInfoKHR & operator=( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- RenderingFragmentShadingRateAttachmentInfoKHR &
- operator=( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ RenderingFragmentShadingRateAttachmentInfoKHR & operator=( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR &
- setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
{
imageView = imageView_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR &
- setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
{
imageLayout = imageLayout_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setShadingRateAttachmentTexelSize(
- VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR &
+ setShadingRateAttachmentTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT
{
shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkRenderingFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRenderingFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderingFragmentShadingRateAttachmentInfoKHR *>( this );
}
- explicit operator VkRenderingFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkRenderingFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderingFragmentShadingRateAttachmentInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -83861,11 +86812,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) &&
- ( imageLayout == rhs.imageLayout ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout ) &&
( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize );
# endif
}
@@ -83877,21 +86827,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::ImageView imageView = {};
- VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageView imageView = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR ) ==
- sizeof( VkRenderingFragmentShadingRateAttachmentInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR>::value,
- "RenderingFragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR>
@@ -83907,16 +86848,17 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 RenderingInfo(
- VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {},
- uint32_t layerCount_ = {},
- uint32_t viewMask_ = {},
- uint32_t colorAttachmentCount_ = {},
- const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ = {},
- const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {},
- const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR_14 RenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {},
+ uint32_t layerCount_ = {},
+ uint32_t viewMask_ = {},
+ uint32_t colorAttachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ = {},
+ const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {},
+ const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, renderArea( renderArea_ )
, layerCount( layerCount_ )
, viewMask( viewMask_ )
@@ -83924,25 +86866,24 @@ namespace VULKAN_HPP_NAMESPACE
, pColorAttachments( pColorAttachments_ )
, pDepthAttachment( pDepthAttachment_ )
, pStencilAttachment( pStencilAttachment_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR_14 RenderingInfo( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- RenderingInfo( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : RenderingInfo( *reinterpret_cast<RenderingInfo const *>( &rhs ) )
- {}
+ RenderingInfo( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT : RenderingInfo( *reinterpret_cast<RenderingInfo const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- RenderingInfo(
- VULKAN_HPP_NAMESPACE::RenderingFlags flags_,
- VULKAN_HPP_NAMESPACE::Rect2D renderArea_,
- uint32_t layerCount_,
- uint32_t viewMask_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo> const &
- colorAttachments_,
- const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {},
- const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {} )
- : flags( flags_ )
+ RenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_,
+ VULKAN_HPP_NAMESPACE::Rect2D renderArea_,
+ uint32_t layerCount_,
+ uint32_t viewMask_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo> const & colorAttachments_,
+ const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {},
+ const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, renderArea( renderArea_ )
, layerCount( layerCount_ )
, viewMask( viewMask_ )
@@ -83950,7 +86891,8 @@ namespace VULKAN_HPP_NAMESPACE
, pColorAttachments( colorAttachments_.data() )
, pDepthAttachment( pDepthAttachment_ )
, pStencilAttachment( pStencilAttachment_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -83975,8 +86917,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderingInfo &
- setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
{
renderArea = renderArea_;
return *this;
@@ -83994,15 +86935,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderingInfo &
- setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = colorAttachmentCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPColorAttachments(
- const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPColorAttachments( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pColorAttachments = pColorAttachments_;
return *this;
@@ -84010,8 +86949,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
RenderingInfo & setColorAttachments(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo> const &
- colorAttachments_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
pColorAttachments = colorAttachments_.data();
@@ -84019,32 +86957,31 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 RenderingInfo &
- setPDepthAttachment( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPDepthAttachment( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ ) VULKAN_HPP_NOEXCEPT
{
pDepthAttachment = pDepthAttachment_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPStencilAttachment(
- const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 RenderingInfo &
+ setPStencilAttachment( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
{
pStencilAttachment = pStencilAttachment_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkRenderingInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkRenderingInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkRenderingInfo *>( this );
}
- explicit operator VkRenderingInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkRenderingInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkRenderingInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -84061,16 +86998,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- flags,
- renderArea,
- layerCount,
- viewMask,
- colorAttachmentCount,
- pColorAttachments,
- pDepthAttachment,
- pStencilAttachment );
+ return std::tie( sType, pNext, flags, renderArea, layerCount, viewMask, colorAttachmentCount, pColorAttachments, pDepthAttachment, pStencilAttachment );
}
#endif
@@ -84079,13 +87007,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( renderArea == rhs.renderArea ) && ( layerCount == rhs.layerCount ) && ( viewMask == rhs.viewMask ) &&
- ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachments == rhs.pColorAttachments ) &&
- ( pDepthAttachment == rhs.pDepthAttachment ) && ( pStencilAttachment == rhs.pStencilAttachment );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( renderArea == rhs.renderArea ) &&
+ ( layerCount == rhs.layerCount ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) &&
+ ( pColorAttachments == rhs.pColorAttachments ) && ( pDepthAttachment == rhs.pDepthAttachment ) && ( pStencilAttachment == rhs.pStencilAttachment );
# endif
}
@@ -84107,12 +87034,6 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment = {};
const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingInfo ) == sizeof( VkRenderingInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingInfo>::value,
- "RenderingInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eRenderingInfo>
@@ -84129,41 +87050,43 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ResolveImageInfo2(
- VULKAN_HPP_NAMESPACE::Image srcImage_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- VULKAN_HPP_NAMESPACE::Image dstImage_ = {},
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
- uint32_t regionCount_ = {},
- const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT
- : srcImage( srcImage_ )
+ VULKAN_HPP_CONSTEXPR ResolveImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::Image dstImage_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ uint32_t regionCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , srcImage( srcImage_ )
, srcImageLayout( srcImageLayout_ )
, dstImage( dstImage_ )
, dstImageLayout( dstImageLayout_ )
, regionCount( regionCount_ )
, pRegions( pRegions_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ResolveImageInfo2( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ResolveImageInfo2( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : ResolveImageInfo2( *reinterpret_cast<ResolveImageInfo2 const *>( &rhs ) )
- {}
+ ResolveImageInfo2( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : ResolveImageInfo2( *reinterpret_cast<ResolveImageInfo2 const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- ResolveImageInfo2(
- VULKAN_HPP_NAMESPACE::Image srcImage_,
- VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_,
- VULKAN_HPP_NAMESPACE::Image dstImage_,
- VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2> const & regions_ )
- : srcImage( srcImage_ )
+ ResolveImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_,
+ VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_,
+ VULKAN_HPP_NAMESPACE::Image dstImage_,
+ VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2> const & regions_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , srcImage( srcImage_ )
, srcImageLayout( srcImageLayout_ )
, dstImage( dstImage_ )
, dstImageLayout( dstImageLayout_ )
, regionCount( static_cast<uint32_t>( regions_.size() ) )
, pRegions( regions_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -84188,8 +87111,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 &
- setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
srcImageLayout = srcImageLayout_;
return *this;
@@ -84201,8 +87123,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 &
- setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
{
dstImageLayout = dstImageLayout_;
return *this;
@@ -84214,17 +87135,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 &
- setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- ResolveImageInfo2 & setRegions(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2> const & regions_ )
- VULKAN_HPP_NOEXCEPT
+ ResolveImageInfo2 &
+ setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2> const & regions_ ) VULKAN_HPP_NOEXCEPT
{
regionCount = static_cast<uint32_t>( regions_.size() );
pRegions = regions_.data();
@@ -84233,17 +87152,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkResolveImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkResolveImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkResolveImageInfo2 *>( this );
}
- explicit operator VkResolveImageInfo2 &() VULKAN_HPP_NOEXCEPT
+ operator VkResolveImageInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkResolveImageInfo2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -84267,13 +87186,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) &&
- ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) &&
- ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) &&
- ( pRegions == rhs.pRegions );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) &&
+ ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
# endif
}
@@ -84293,12 +87210,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t regionCount = {};
const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ResolveImageInfo2 ) == sizeof( VkResolveImageInfo2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ResolveImageInfo2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ResolveImageInfo2>::value,
- "ResolveImageInfo2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eResolveImageInfo2>
@@ -84312,71 +87223,67 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkSamplerBorderColorComponentMappingCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- SamplerBorderColorComponentMappingCreateInfoEXT( VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 srgb_ = {} ) VULKAN_HPP_NOEXCEPT
- : components( components_ )
+ VULKAN_HPP_CONSTEXPR SamplerBorderColorComponentMappingCreateInfoEXT( VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 srgb_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , components( components_ )
, srgb( srgb_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR SamplerBorderColorComponentMappingCreateInfoEXT(
- SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ SamplerBorderColorComponentMappingCreateInfoEXT( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SamplerBorderColorComponentMappingCreateInfoEXT( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : SamplerBorderColorComponentMappingCreateInfoEXT(
- *reinterpret_cast<SamplerBorderColorComponentMappingCreateInfoEXT const *>( &rhs ) )
- {}
+ SamplerBorderColorComponentMappingCreateInfoEXT( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : SamplerBorderColorComponentMappingCreateInfoEXT( *reinterpret_cast<SamplerBorderColorComponentMappingCreateInfoEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SamplerBorderColorComponentMappingCreateInfoEXT &
- operator=( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SamplerBorderColorComponentMappingCreateInfoEXT & operator=( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SamplerBorderColorComponentMappingCreateInfoEXT &
- operator=( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ SamplerBorderColorComponentMappingCreateInfoEXT & operator=( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT &
- setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
+ setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
{
components = components_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT &
- setSrgb( VULKAN_HPP_NAMESPACE::Bool32 srgb_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setSrgb( VULKAN_HPP_NAMESPACE::Bool32 srgb_ ) VULKAN_HPP_NOEXCEPT
{
srgb = srgb_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSamplerBorderColorComponentMappingCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSamplerBorderColorComponentMappingCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerBorderColorComponentMappingCreateInfoEXT *>( this );
}
- explicit operator VkSamplerBorderColorComponentMappingCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkSamplerBorderColorComponentMappingCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSamplerBorderColorComponentMappingCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -84396,11 +87303,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( components == rhs.components ) &&
- ( srgb == rhs.srgb );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( components == rhs.components ) && ( srgb == rhs.srgb );
# endif
}
@@ -84416,15 +87322,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
VULKAN_HPP_NAMESPACE::Bool32 srgb = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT ) ==
- sizeof( VkSamplerBorderColorComponentMappingCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT>::value,
- "SamplerBorderColorComponentMappingCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT>
@@ -84440,24 +87337,25 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SamplerCreateInfo(
- VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {},
- VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
- VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
- VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest,
- VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
- VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
- VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
- float mipLodBias_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {},
- float maxAnisotropy_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {},
- VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
- float minLod_ = {},
- float maxLod_ = {},
- VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack,
- VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR SamplerCreateInfo( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
+ VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
+ VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest,
+ VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
+ VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
+ VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
+ float mipLodBias_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {},
+ float maxAnisotropy_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {},
+ VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
+ float minLod_ = {},
+ float maxLod_ = {},
+ VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack,
+ VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, magFilter( magFilter_ )
, minFilter( minFilter_ )
, mipmapMode( mipmapMode_ )
@@ -84473,13 +87371,12 @@ namespace VULKAN_HPP_NAMESPACE
, maxLod( maxLod_ )
, borderColor( borderColor_ )
, unnormalizedCoordinates( unnormalizedCoordinates_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : SamplerCreateInfo( *reinterpret_cast<SamplerCreateInfo const *>( &rhs ) )
- {}
+ SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SamplerCreateInfo( *reinterpret_cast<SamplerCreateInfo const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SamplerCreateInfo & operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -84497,50 +87394,43 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &
- setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT
{
magFilter = magFilter_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &
- setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT
{
minFilter = minFilter_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &
- setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT
{
mipmapMode = mipmapMode_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &
- setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT
{
addressModeU = addressModeU_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &
- setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT
{
addressModeV = addressModeV_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &
- setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT
{
addressModeW = addressModeW_;
return *this;
@@ -84552,8 +87442,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &
- setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT
{
anisotropyEnable = anisotropyEnable_;
return *this;
@@ -84565,15 +87454,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &
- setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT
{
compareEnable = compareEnable_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &
- setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
{
compareOp = compareOp_;
return *this;
@@ -84591,32 +87478,30 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &
- setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT
{
borderColor = borderColor_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &
- setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT
{
unnormalizedCoordinates = unnormalizedCoordinates_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSamplerCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSamplerCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerCreateInfo *>( this );
}
- explicit operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSamplerCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -84667,17 +87552,14 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( magFilter == rhs.magFilter ) && ( minFilter == rhs.minFilter ) && ( mipmapMode == rhs.mipmapMode ) &&
- ( addressModeU == rhs.addressModeU ) && ( addressModeV == rhs.addressModeV ) &&
- ( addressModeW == rhs.addressModeW ) && ( mipLodBias == rhs.mipLodBias ) &&
- ( anisotropyEnable == rhs.anisotropyEnable ) && ( maxAnisotropy == rhs.maxAnisotropy ) &&
- ( compareEnable == rhs.compareEnable ) && ( compareOp == rhs.compareOp ) && ( minLod == rhs.minLod ) &&
- ( maxLod == rhs.maxLod ) && ( borderColor == rhs.borderColor ) &&
- ( unnormalizedCoordinates == rhs.unnormalizedCoordinates );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( magFilter == rhs.magFilter ) && ( minFilter == rhs.minFilter ) &&
+ ( mipmapMode == rhs.mipmapMode ) && ( addressModeU == rhs.addressModeU ) && ( addressModeV == rhs.addressModeV ) &&
+ ( addressModeW == rhs.addressModeW ) && ( mipLodBias == rhs.mipLodBias ) && ( anisotropyEnable == rhs.anisotropyEnable ) &&
+ ( maxAnisotropy == rhs.maxAnisotropy ) && ( compareEnable == rhs.compareEnable ) && ( compareOp == rhs.compareOp ) && ( minLod == rhs.minLod ) &&
+ ( maxLod == rhs.maxLod ) && ( borderColor == rhs.borderColor ) && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates );
# endif
}
@@ -84688,31 +87570,25 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {};
- VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
- VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
- VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest;
- VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
- VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
- VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
- float mipLodBias = {};
- VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {};
- float maxAnisotropy = {};
- VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {};
- VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
- float minLod = {};
- float maxLod = {};
- VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
+ VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
+ VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest;
+ VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
+ VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
+ VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
+ float mipLodBias = {};
+ VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {};
+ float maxAnisotropy = {};
+ VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {};
+ VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
+ float minLod = {};
+ float maxLod = {};
+ VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack;
VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerCreateInfo>::value,
- "SamplerCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSamplerCreateInfo>
@@ -84725,31 +87601,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkSamplerCustomBorderColorCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eSamplerCustomBorderColorCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCustomBorderColorCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {},
- VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT
- : customBorderColor( customBorderColor_ )
+ VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {},
+ VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , customBorderColor( customBorderColor_ )
, format( format_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( SamplerCustomBorderColorCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SamplerCustomBorderColorCreateInfoEXT( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : SamplerCustomBorderColorCreateInfoEXT(
- *reinterpret_cast<SamplerCustomBorderColorCreateInfoEXT const *>( &rhs ) )
- {}
+ : SamplerCustomBorderColorCreateInfoEXT( *reinterpret_cast<SamplerCustomBorderColorCreateInfoEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SamplerCustomBorderColorCreateInfoEXT &
- operator=( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SamplerCustomBorderColorCreateInfoEXT & operator=( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SamplerCustomBorderColorCreateInfoEXT &
- operator=( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ SamplerCustomBorderColorCreateInfoEXT & operator=( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT const *>( &rhs );
return *this;
@@ -84763,31 +87637,30 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT &
- setCustomBorderColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & customBorderColor_ ) VULKAN_HPP_NOEXCEPT
+ setCustomBorderColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & customBorderColor_ ) VULKAN_HPP_NOEXCEPT
{
customBorderColor = customBorderColor_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT &
- setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSamplerCustomBorderColorCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSamplerCustomBorderColorCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerCustomBorderColorCreateInfoEXT *>( this );
}
- explicit operator VkSamplerCustomBorderColorCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkSamplerCustomBorderColorCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSamplerCustomBorderColorCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -84808,14 +87681,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor = {};
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT ) ==
- sizeof( VkSamplerCustomBorderColorCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT>::value,
- "SamplerCustomBorderColorCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSamplerCustomBorderColorCreateInfoEXT>
@@ -84831,22 +87696,23 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerReductionModeCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo(
- VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ =
- VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage ) VULKAN_HPP_NOEXCEPT
- : reductionMode( reductionMode_ )
- {}
-
VULKAN_HPP_CONSTEXPR
- SamplerReductionModeCreateInfo( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SamplerReductionModeCreateInfo( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , reductionMode( reductionMode_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SamplerReductionModeCreateInfo( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SamplerReductionModeCreateInfo( *reinterpret_cast<SamplerReductionModeCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SamplerReductionModeCreateInfo &
- operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SamplerReductionModeCreateInfo & operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SamplerReductionModeCreateInfo & operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -84861,31 +87727,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo &
- setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT
{
reductionMode = reductionMode_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSamplerReductionModeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSamplerReductionModeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerReductionModeCreateInfo *>( this );
}
- explicit operator VkSamplerReductionModeCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSamplerReductionModeCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSamplerReductionModeCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::SamplerReductionMode const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SamplerReductionMode const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -84898,7 +87761,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reductionMode == rhs.reductionMode );
@@ -84912,19 +87775,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode =
- VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo ) ==
- sizeof( VkSamplerReductionModeCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo>::value,
- "SamplerReductionModeCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSamplerReductionModeCreateInfo>
@@ -84938,20 +87792,21 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkSamplerYcbcrConversionCreateInfo;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionCreateInfo;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo(
- VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ =
- VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity,
- VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull,
- VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {},
- VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
- VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
- VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
- VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {} ) VULKAN_HPP_NOEXCEPT
- : format( format_ )
+ VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity,
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull,
+ VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {},
+ VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
+ VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
+ VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
+ VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , format( format_ )
, ycbcrModel( ycbcrModel_ )
, ycbcrRange( ycbcrRange_ )
, components( components_ )
@@ -84959,18 +87814,18 @@ namespace VULKAN_HPP_NAMESPACE
, yChromaOffset( yChromaOffset_ )
, chromaFilter( chromaFilter_ )
, forceExplicitReconstruction( forceExplicitReconstruction_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- SamplerYcbcrConversionCreateInfo( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SamplerYcbcrConversionCreateInfo( *reinterpret_cast<SamplerYcbcrConversionCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SamplerYcbcrConversionCreateInfo &
- operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SamplerYcbcrConversionCreateInfo & operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SamplerYcbcrConversionCreateInfo & operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -84985,74 +87840,68 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo &
- setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo &
- setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT
+ setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT
{
ycbcrModel = ycbcrModel_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo &
- setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT
{
ycbcrRange = ycbcrRange_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo &
- setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
{
components = components_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo &
- setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT
{
xChromaOffset = xChromaOffset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo &
- setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT
{
yChromaOffset = yChromaOffset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo &
- setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT
{
chromaFilter = chromaFilter_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo &
- setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT
+ setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT
{
forceExplicitReconstruction = forceExplicitReconstruction_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSamplerYcbcrConversionCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSamplerYcbcrConversionCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( this );
}
- explicit operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSamplerYcbcrConversionCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -85069,16 +87918,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- format,
- ycbcrModel,
- ycbcrRange,
- components,
- xChromaOffset,
- yChromaOffset,
- chromaFilter,
- forceExplicitReconstruction );
+ return std::tie( sType, pNext, format, ycbcrModel, ycbcrRange, components, xChromaOffset, yChromaOffset, chromaFilter, forceExplicitReconstruction );
}
#endif
@@ -85087,13 +87927,13 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) &&
- ( ycbcrModel == rhs.ycbcrModel ) && ( ycbcrRange == rhs.ycbcrRange ) && ( components == rhs.components ) &&
- ( xChromaOffset == rhs.xChromaOffset ) && ( yChromaOffset == rhs.yChromaOffset ) &&
- ( chromaFilter == rhs.chromaFilter ) && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( ycbcrModel == rhs.ycbcrModel ) &&
+ ( ycbcrRange == rhs.ycbcrRange ) && ( components == rhs.components ) && ( xChromaOffset == rhs.xChromaOffset ) &&
+ ( yChromaOffset == rhs.yChromaOffset ) && ( chromaFilter == rhs.chromaFilter ) &&
+ ( forceExplicitReconstruction == rhs.forceExplicitReconstruction );
# endif
}
@@ -85104,26 +87944,17 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
- VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel =
- VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
- VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
- VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
- VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
- VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
- VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
- VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo ) ==
- sizeof( VkSamplerYcbcrConversionCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo>::value,
- "SamplerYcbcrConversionCreateInfo is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
+ VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
+ VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+ VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+ VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
+ VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {};
+ };
template <>
struct CppType<StructureType, StructureType::eSamplerYcbcrConversionCreateInfo>
@@ -85137,46 +87968,43 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkSamplerYcbcrConversionImageFormatProperties;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eSamplerYcbcrConversionImageFormatProperties;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties(
- uint32_t combinedImageSamplerDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ )
- {}
+ VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( uint32_t combinedImageSamplerDescriptorCount_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties(
- SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs )
- VULKAN_HPP_NOEXCEPT
- : SamplerYcbcrConversionImageFormatProperties(
- *reinterpret_cast<SamplerYcbcrConversionImageFormatProperties const *>( &rhs ) )
- {}
+ SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ : SamplerYcbcrConversionImageFormatProperties( *reinterpret_cast<SamplerYcbcrConversionImageFormatProperties const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SamplerYcbcrConversionImageFormatProperties &
- operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SamplerYcbcrConversionImageFormatProperties & operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SamplerYcbcrConversionImageFormatProperties &
- operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversionImageFormatProperties & operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const *>( &rhs );
return *this;
}
- explicit operator VkSamplerYcbcrConversionImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSamplerYcbcrConversionImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerYcbcrConversionImageFormatProperties *>( this );
}
- explicit operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -85193,11 +88021,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount );
# endif
}
@@ -85208,19 +88035,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
+ void * pNext = {};
uint32_t combinedImageSamplerDescriptorCount = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties ) ==
- sizeof( VkSamplerYcbcrConversionImageFormatProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties>::value,
- "SamplerYcbcrConversionImageFormatProperties is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSamplerYcbcrConversionImageFormatProperties>
@@ -85237,17 +88055,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- SamplerYcbcrConversionInfo( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {} ) VULKAN_HPP_NOEXCEPT
- : conversion( conversion_ )
- {}
+ VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , conversion( conversion_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- SamplerYcbcrConversionInfo( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SamplerYcbcrConversionInfo( *reinterpret_cast<SamplerYcbcrConversionInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SamplerYcbcrConversionInfo & operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -85265,31 +88085,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo &
- setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT
{
conversion = conversion_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSamplerYcbcrConversionInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSamplerYcbcrConversionInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSamplerYcbcrConversionInfo *>( this );
}
- explicit operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSamplerYcbcrConversionInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -85302,7 +88119,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conversion == rhs.conversion );
@@ -85320,13 +88137,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo ) ==
- sizeof( VkSamplerYcbcrConversionInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo>::value,
- "SamplerYcbcrConversionInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSamplerYcbcrConversionInfo>
@@ -85346,18 +88156,21 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ = {},
struct _screen_context * context_ = {},
- struct _screen_window * window_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ struct _screen_window * window_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, context( context_ )
, window( window_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ScreenSurfaceCreateInfoQNX( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ScreenSurfaceCreateInfoQNX( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT
: ScreenSurfaceCreateInfoQNX( *reinterpret_cast<ScreenSurfaceCreateInfoQNX const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ScreenSurfaceCreateInfoQNX & operator=( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -85375,39 +88188,36 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX &
- setFlags( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setFlags( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX &
- setContext( struct _screen_context * context_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setContext( struct _screen_context * context_ ) VULKAN_HPP_NOEXCEPT
{
context = context_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX &
- setWindow( struct _screen_window * window_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setWindow( struct _screen_window * window_ ) VULKAN_HPP_NOEXCEPT
{
window = window_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkScreenSurfaceCreateInfoQNX const &() const VULKAN_HPP_NOEXCEPT
+ operator VkScreenSurfaceCreateInfoQNX const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( this );
}
- explicit operator VkScreenSurfaceCreateInfoQNX &() VULKAN_HPP_NOEXCEPT
+ operator VkScreenSurfaceCreateInfoQNX &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkScreenSurfaceCreateInfoQNX *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -85428,11 +88238,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( context == rhs.context ) &&
- ( window == rhs.window );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( context == rhs.context ) && ( window == rhs.window );
# endif
}
@@ -85449,13 +88258,6 @@ namespace VULKAN_HPP_NAMESPACE
struct _screen_context * context = {};
struct _screen_window * window = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX ) ==
- sizeof( VkScreenSurfaceCreateInfoQNX ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX>::value,
- "ScreenSurfaceCreateInfoQNX is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eScreenSurfaceCreateInfoQNX>
@@ -85472,16 +88274,17 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- SemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- {}
+ VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : SemaphoreCreateInfo( *reinterpret_cast<SemaphoreCreateInfo const *>( &rhs ) )
- {}
+ SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreCreateInfo( *reinterpret_cast<SemaphoreCreateInfo const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SemaphoreCreateInfo & operator=( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -85499,31 +88302,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSemaphoreCreateInfo *>( this );
}
- explicit operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSemaphoreCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -85536,7 +88336,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
@@ -85554,12 +88354,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo>::value,
- "SemaphoreCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSemaphoreCreateInfo>
@@ -85575,19 +88369,22 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetFdInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd )
- VULKAN_HPP_NOEXCEPT
- : semaphore( semaphore_ )
+ VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR(
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , semaphore( semaphore_ )
, handleType( handleType_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: SemaphoreGetFdInfoKHR( *reinterpret_cast<SemaphoreGetFdInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SemaphoreGetFdInfoKHR & operator=( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -85605,32 +88402,30 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR &
- setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSemaphoreGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( this );
}
- explicit operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSemaphoreGetFdInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -85650,11 +88445,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) &&
- ( handleType == rhs.handleType );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType );
# endif
}
@@ -85665,18 +88459,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR>::value,
- "SemaphoreGetFdInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSemaphoreGetFdInfoKHR>
@@ -85694,23 +88481,24 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR(
- VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : semaphore( semaphore_ )
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , semaphore( semaphore_ )
, handleType( handleType_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- SemaphoreGetWin32HandleInfoKHR( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: SemaphoreGetWin32HandleInfoKHR( *reinterpret_cast<SemaphoreGetWin32HandleInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SemaphoreGetWin32HandleInfoKHR &
- operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SemaphoreGetWin32HandleInfoKHR & operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SemaphoreGetWin32HandleInfoKHR & operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -85725,32 +88513,31 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR &
- setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSemaphoreGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( this );
}
- explicit operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSemaphoreGetWin32HandleInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -85770,11 +88557,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) &&
- ( handleType == rhs.handleType );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType );
# endif
}
@@ -85785,20 +88571,11 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR ) ==
- sizeof( VkSemaphoreGetWin32HandleInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR>::value,
- "SemaphoreGetWin32HandleInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSemaphoreGetWin32HandleInfoKHR>
@@ -85813,31 +88590,30 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkSemaphoreGetZirconHandleInfoFUCHSIA;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA(
- VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ =
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : semaphore( semaphore_ )
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , semaphore( semaphore_ )
, handleType( handleType_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SemaphoreGetZirconHandleInfoFUCHSIA( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
: SemaphoreGetZirconHandleInfoFUCHSIA( *reinterpret_cast<SemaphoreGetZirconHandleInfoFUCHSIA const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SemaphoreGetZirconHandleInfoFUCHSIA &
- operator=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SemaphoreGetZirconHandleInfoFUCHSIA & operator=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SemaphoreGetZirconHandleInfoFUCHSIA &
- operator=( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+ SemaphoreGetZirconHandleInfoFUCHSIA & operator=( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA const *>( &rhs );
return *this;
@@ -85850,32 +88626,31 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA &
- setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA &
- setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSemaphoreGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( this );
}
- explicit operator VkSemaphoreGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSemaphoreGetZirconHandleInfoFUCHSIA *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -85895,11 +88670,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) &&
- ( handleType == rhs.handleType );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType );
# endif
}
@@ -85910,20 +88684,11 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType =
- VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA ) ==
- sizeof( VkSemaphoreGetZirconHandleInfoFUCHSIA ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA>::value,
- "SemaphoreGetZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA>
@@ -85940,17 +88705,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSignalInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
- uint64_t value_ = {} ) VULKAN_HPP_NOEXCEPT
- : semaphore( semaphore_ )
+ VULKAN_HPP_CONSTEXPR
+ SemaphoreSignalInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , semaphore( semaphore_ )
, value( value_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : SemaphoreSignalInfo( *reinterpret_cast<SemaphoreSignalInfo const *>( &rhs ) )
- {}
+ SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreSignalInfo( *reinterpret_cast<SemaphoreSignalInfo const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SemaphoreSignalInfo & operator=( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -85968,8 +88735,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo &
- setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
@@ -85982,24 +88748,21 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSemaphoreSignalInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreSignalInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSemaphoreSignalInfo *>( this );
}
- explicit operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSemaphoreSignalInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::Semaphore const &,
- uint64_t const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, uint64_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -86012,11 +88775,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) &&
- ( value == rhs.value );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( value == rhs.value );
# endif
}
@@ -86032,12 +88794,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
uint64_t value = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo>::value,
- "SemaphoreSignalInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSemaphoreSignalInfo>
@@ -86054,21 +88810,24 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSubmitInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
- uint64_t value_ = {},
- VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ = {},
- uint32_t deviceIndex_ = {} ) VULKAN_HPP_NOEXCEPT
- : semaphore( semaphore_ )
+ VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
+ uint64_t value_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ = {},
+ uint32_t deviceIndex_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , semaphore( semaphore_ )
, value( value_ )
, stageMask( stageMask_ )
, deviceIndex( deviceIndex_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SemaphoreSubmitInfo( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : SemaphoreSubmitInfo( *reinterpret_cast<SemaphoreSubmitInfo const *>( &rhs ) )
- {}
+ SemaphoreSubmitInfo( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreSubmitInfo( *reinterpret_cast<SemaphoreSubmitInfo const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SemaphoreSubmitInfo & operator=( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -86086,8 +88845,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo &
- setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
@@ -86099,8 +88857,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo &
- setStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ ) VULKAN_HPP_NOEXCEPT
{
stageMask = stageMask_;
return *this;
@@ -86113,17 +88870,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSemaphoreSubmitInfo *>( this );
}
- explicit operator VkSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSemaphoreSubmitInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -86145,11 +88902,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) &&
- ( value == rhs.value ) && ( stageMask == rhs.stageMask ) && ( deviceIndex == rhs.deviceIndex );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( value == rhs.value ) && ( stageMask == rhs.stageMask ) &&
+ ( deviceIndex == rhs.deviceIndex );
# endif
}
@@ -86167,12 +88924,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask = {};
uint32_t deviceIndex = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo ) == sizeof( VkSemaphoreSubmitInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo>::value,
- "SemaphoreSubmitInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSemaphoreSubmitInfo>
@@ -86189,18 +88940,21 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreTypeCreateInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo(
- VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary,
- uint64_t initialValue_ = {} ) VULKAN_HPP_NOEXCEPT
- : semaphoreType( semaphoreType_ )
+ VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary,
+ uint64_t initialValue_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , semaphoreType( semaphoreType_ )
, initialValue( initialValue_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SemaphoreTypeCreateInfo( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: SemaphoreTypeCreateInfo( *reinterpret_cast<SemaphoreTypeCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SemaphoreTypeCreateInfo & operator=( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -86218,8 +88972,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo &
- setSemaphoreType( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setSemaphoreType( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ ) VULKAN_HPP_NOEXCEPT
{
semaphoreType = semaphoreType_;
return *this;
@@ -86232,24 +88985,21 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSemaphoreTypeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreTypeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSemaphoreTypeCreateInfo *>( this );
}
- explicit operator VkSemaphoreTypeCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreTypeCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSemaphoreTypeCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::SemaphoreType const &,
- uint64_t const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SemaphoreType const &, uint64_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -86262,11 +89012,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphoreType == rhs.semaphoreType ) &&
- ( initialValue == rhs.initialValue );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphoreType == rhs.semaphoreType ) && ( initialValue == rhs.initialValue );
# endif
}
@@ -86282,13 +89031,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary;
uint64_t initialValue = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo ) ==
- sizeof( VkSemaphoreTypeCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo>::value,
- "SemaphoreTypeCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSemaphoreTypeCreateInfo>
@@ -86308,25 +89050,27 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {},
uint32_t semaphoreCount_ = {},
const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ = {},
- const uint64_t * pValues_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ const uint64_t * pValues_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, semaphoreCount( semaphoreCount_ )
, pSemaphores( pSemaphores_ )
, pValues( pValues_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : SemaphoreWaitInfo( *reinterpret_cast<SemaphoreWaitInfo const *>( &rhs ) )
- {}
+ SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreWaitInfo( *reinterpret_cast<SemaphoreWaitInfo const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SemaphoreWaitInfo(
- VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & semaphores_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ = {} )
- : flags( flags_ )
+ SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & semaphores_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, semaphoreCount( static_cast<uint32_t>( semaphores_.size() ) )
, pSemaphores( semaphores_.data() )
, pValues( values_.data() )
@@ -86336,8 +89080,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( semaphores_.size() != values_.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::SemaphoreWaitInfo::SemaphoreWaitInfo: semaphores_.size() != values_.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SemaphoreWaitInfo::SemaphoreWaitInfo: semaphores_.size() != values_.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
@@ -86359,8 +89102,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo &
- setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -86372,17 +89114,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo &
- setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
pSemaphores = pSemaphores_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SemaphoreWaitInfo & setSemaphores(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & semaphores_ )
- VULKAN_HPP_NOEXCEPT
+ SemaphoreWaitInfo &
+ setSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & semaphores_ ) VULKAN_HPP_NOEXCEPT
{
semaphoreCount = static_cast<uint32_t>( semaphores_.size() );
pSemaphores = semaphores_.data();
@@ -86397,8 +89137,7 @@ namespace VULKAN_HPP_NAMESPACE
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SemaphoreWaitInfo &
- setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreWaitInfo & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ ) VULKAN_HPP_NOEXCEPT
{
semaphoreCount = static_cast<uint32_t>( values_.size() );
pValues = values_.data();
@@ -86407,17 +89146,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSemaphoreWaitInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreWaitInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSemaphoreWaitInfo *>( this );
}
- explicit operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSemaphoreWaitInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -86439,12 +89178,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( semaphoreCount == rhs.semaphoreCount ) && ( pSemaphores == rhs.pSemaphores ) &&
- ( pValues == rhs.pValues );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( semaphoreCount == rhs.semaphoreCount ) &&
+ ( pSemaphores == rhs.pSemaphores ) && ( pValues == rhs.pValues );
# endif
}
@@ -86462,12 +89200,6 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores = {};
const uint64_t * pValues = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo>::value,
- "SemaphoreWaitInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSemaphoreWaitInfo>
@@ -86483,16 +89215,15 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( uint32_t data_ = {} ) VULKAN_HPP_NOEXCEPT : data( data_ ) {}
- VULKAN_HPP_CONSTEXPR
- SetStateFlagsIndirectCommandNV( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SetStateFlagsIndirectCommandNV( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
: SetStateFlagsIndirectCommandNV( *reinterpret_cast<SetStateFlagsIndirectCommandNV const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SetStateFlagsIndirectCommandNV &
- operator=( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SetStateFlagsIndirectCommandNV & operator=( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SetStateFlagsIndirectCommandNV & operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -86508,17 +89239,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSetStateFlagsIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSetStateFlagsIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSetStateFlagsIndirectCommandNV *>( this );
}
- explicit operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+ operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSetStateFlagsIndirectCommandNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -86535,7 +89266,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( data == rhs.data );
@@ -86551,14 +89282,6 @@ namespace VULKAN_HPP_NAMESPACE
public:
uint32_t data = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV ) ==
- sizeof( VkSetStateFlagsIndirectCommandNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV>::value,
- "SetStateFlagsIndirectCommandNV is not nothrow_move_constructible!" );
struct ShaderModuleCreateInfo
{
@@ -86570,23 +89293,29 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {},
size_t codeSize_ = {},
- const uint32_t * pCode_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ const uint32_t * pCode_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, codeSize( codeSize_ )
, pCode( pCode_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: ShaderModuleCreateInfo( *reinterpret_cast<ShaderModuleCreateInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & code_ )
- : flags( flags_ ), codeSize( code_.size() * 4 ), pCode( code_.data() )
- {}
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & code_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), flags( flags_ ), codeSize( code_.size() * 4 ), pCode( code_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -86605,8 +89334,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo &
- setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -86625,8 +89353,7 @@ namespace VULKAN_HPP_NAMESPACE
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- ShaderModuleCreateInfo &
- setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & code_ ) VULKAN_HPP_NOEXCEPT
+ ShaderModuleCreateInfo & setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & code_ ) VULKAN_HPP_NOEXCEPT
{
codeSize = code_.size() * 4;
pCode = code_.data();
@@ -86635,17 +89362,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkShaderModuleCreateInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkShaderModuleCreateInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkShaderModuleCreateInfo *>( this );
}
- explicit operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkShaderModuleCreateInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -86666,11 +89393,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( codeSize == rhs.codeSize ) && ( pCode == rhs.pCode );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( codeSize == rhs.codeSize ) && ( pCode == rhs.pCode );
# endif
}
@@ -86687,13 +89413,6 @@ namespace VULKAN_HPP_NAMESPACE
size_t codeSize = {};
const uint32_t * pCode = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo ) ==
- sizeof( VkShaderModuleCreateInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo>::value,
- "ShaderModuleCreateInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eShaderModuleCreateInfo>
@@ -86701,73 +89420,156 @@ namespace VULKAN_HPP_NAMESPACE
using Type = ShaderModuleCreateInfo;
};
+ struct ShaderModuleIdentifierEXT
+ {
+ using NativeType = VkShaderModuleIdentifierEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleIdentifierEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR_14 ShaderModuleIdentifierEXT( uint32_t identifierSize_ = {},
+ std::array<uint8_t, VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT> const & identifier_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , identifierSize( identifierSize_ )
+ , identifier( identifier_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 ShaderModuleIdentifierEXT( ShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ShaderModuleIdentifierEXT( VkShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ShaderModuleIdentifierEXT( *reinterpret_cast<ShaderModuleIdentifierEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ ShaderModuleIdentifierEXT & operator=( ShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ ShaderModuleIdentifierEXT & operator=( VkShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT const *>( &rhs );
+ return *this;
+ }
+
+ operator VkShaderModuleIdentifierEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkShaderModuleIdentifierEXT *>( this );
+ }
+
+ operator VkShaderModuleIdentifierEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkShaderModuleIdentifierEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ uint32_t const &,
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT> const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, identifierSize, identifier );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( ShaderModuleIdentifierEXT const & ) const = default;
+#else
+ bool operator==( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( identifierSize == rhs.identifierSize ) && ( identifier == rhs.identifier );
+# endif
+ }
+
+ bool operator!=( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleIdentifierEXT;
+ void * pNext = {};
+ uint32_t identifierSize = {};
+ VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT> identifier = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eShaderModuleIdentifierEXT>
+ {
+ using Type = ShaderModuleIdentifierEXT;
+ };
+
struct ShaderModuleValidationCacheCreateInfoEXT
{
using NativeType = VkShaderModuleValidationCacheCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eShaderModuleValidationCacheCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleValidationCacheCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {} ) VULKAN_HPP_NOEXCEPT
- : validationCache( validationCache_ )
- {}
+ VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , validationCache( validationCache_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT(
- ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : ShaderModuleValidationCacheCreateInfoEXT(
- *reinterpret_cast<ShaderModuleValidationCacheCreateInfoEXT const *>( &rhs ) )
- {}
+ ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : ShaderModuleValidationCacheCreateInfoEXT( *reinterpret_cast<ShaderModuleValidationCacheCreateInfoEXT const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- ShaderModuleValidationCacheCreateInfoEXT &
- operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ ShaderModuleValidationCacheCreateInfoEXT & operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ShaderModuleValidationCacheCreateInfoEXT &
- operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ ShaderModuleValidationCacheCreateInfoEXT & operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT &
- setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT
+ setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT
{
validationCache = validationCache_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkShaderModuleValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkShaderModuleValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkShaderModuleValidationCacheCreateInfoEXT *>( this );
}
- explicit operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkShaderModuleValidationCacheCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ValidationCacheEXT const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ValidationCacheEXT const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -86780,7 +89582,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( validationCache == rhs.validationCache );
@@ -86798,15 +89600,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT ) ==
- sizeof( VkShaderModuleValidationCacheCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT>::value,
- "ShaderModuleValidationCacheCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eShaderModuleValidationCacheCreateInfoEXT>
@@ -86829,13 +89622,15 @@ namespace VULKAN_HPP_NAMESPACE
, ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ )
, ldsUsageSizeInBytes( ldsUsageSizeInBytes_ )
, scratchMemUsageInBytes( scratchMemUsageInBytes_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
: ShaderResourceUsageAMD( *reinterpret_cast<ShaderResourceUsageAMD const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ShaderResourceUsageAMD & operator=( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -86846,17 +89641,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkShaderResourceUsageAMD const &() const VULKAN_HPP_NOEXCEPT
+ operator VkShaderResourceUsageAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkShaderResourceUsageAMD *>( this );
}
- explicit operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT
+ operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkShaderResourceUsageAMD *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -86864,8 +89659,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- numUsedVgprs, numUsedSgprs, ldsSizePerLocalWorkGroup, ldsUsageSizeInBytes, scratchMemUsageInBytes );
+ return std::tie( numUsedVgprs, numUsedSgprs, ldsSizePerLocalWorkGroup, ldsUsageSizeInBytes, scratchMemUsageInBytes );
}
#endif
@@ -86874,13 +89668,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( numUsedVgprs == rhs.numUsedVgprs ) && ( numUsedSgprs == rhs.numUsedSgprs ) &&
- ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup ) &&
- ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes ) &&
- ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes );
+ return ( numUsedVgprs == rhs.numUsedVgprs ) && ( numUsedSgprs == rhs.numUsedSgprs ) && ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup ) &&
+ ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes ) && ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes );
# endif
}
@@ -86897,27 +89689,19 @@ namespace VULKAN_HPP_NAMESPACE
size_t ldsUsageSizeInBytes = {};
size_t scratchMemUsageInBytes = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD ) ==
- sizeof( VkShaderResourceUsageAMD ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD>::value,
- "ShaderResourceUsageAMD is not nothrow_move_constructible!" );
struct ShaderStatisticsInfoAMD
{
using NativeType = VkShaderStatisticsInfoAMD;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14
- ShaderStatisticsInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {},
- VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {},
- uint32_t numPhysicalVgprs_ = {},
- uint32_t numPhysicalSgprs_ = {},
- uint32_t numAvailableVgprs_ = {},
- uint32_t numAvailableSgprs_ = {},
- std::array<uint32_t, 3> const & computeWorkGroupSize_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {},
+ VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {},
+ uint32_t numPhysicalVgprs_ = {},
+ uint32_t numPhysicalSgprs_ = {},
+ uint32_t numAvailableVgprs_ = {},
+ uint32_t numAvailableSgprs_ = {},
+ std::array<uint32_t, 3> const & computeWorkGroupSize_ = {} ) VULKAN_HPP_NOEXCEPT
: shaderStageMask( shaderStageMask_ )
, resourceUsage( resourceUsage_ )
, numPhysicalVgprs( numPhysicalVgprs_ )
@@ -86925,14 +89709,15 @@ namespace VULKAN_HPP_NAMESPACE
, numAvailableVgprs( numAvailableVgprs_ )
, numAvailableSgprs( numAvailableSgprs_ )
, computeWorkGroupSize( computeWorkGroupSize_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- ShaderStatisticsInfoAMD( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
: ShaderStatisticsInfoAMD( *reinterpret_cast<ShaderStatisticsInfoAMD const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ShaderStatisticsInfoAMD & operator=( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -86943,17 +89728,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkShaderStatisticsInfoAMD const &() const VULKAN_HPP_NOEXCEPT
+ operator VkShaderStatisticsInfoAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkShaderStatisticsInfoAMD *>( this );
}
- explicit operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT
+ operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkShaderStatisticsInfoAMD *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -86967,13 +89752,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( shaderStageMask,
- resourceUsage,
- numPhysicalVgprs,
- numPhysicalSgprs,
- numAvailableVgprs,
- numAvailableSgprs,
- computeWorkGroupSize );
+ return std::tie( shaderStageMask, resourceUsage, numPhysicalVgprs, numPhysicalSgprs, numAvailableVgprs, numAvailableSgprs, computeWorkGroupSize );
}
#endif
@@ -86982,12 +89761,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( shaderStageMask == rhs.shaderStageMask ) && ( resourceUsage == rhs.resourceUsage ) &&
- ( numPhysicalVgprs == rhs.numPhysicalVgprs ) && ( numPhysicalSgprs == rhs.numPhysicalSgprs ) &&
- ( numAvailableVgprs == rhs.numAvailableVgprs ) && ( numAvailableSgprs == rhs.numAvailableSgprs ) &&
+ return ( shaderStageMask == rhs.shaderStageMask ) && ( resourceUsage == rhs.resourceUsage ) && ( numPhysicalVgprs == rhs.numPhysicalVgprs ) &&
+ ( numPhysicalSgprs == rhs.numPhysicalSgprs ) && ( numAvailableVgprs == rhs.numAvailableVgprs ) && ( numAvailableSgprs == rhs.numAvailableSgprs ) &&
( computeWorkGroupSize == rhs.computeWorkGroupSize );
# endif
}
@@ -87007,62 +89785,53 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t numAvailableSgprs = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> computeWorkGroupSize = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD ) ==
- sizeof( VkShaderStatisticsInfoAMD ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD>::value,
- "ShaderStatisticsInfoAMD is not nothrow_move_constructible!" );
struct SharedPresentSurfaceCapabilitiesKHR
{
using NativeType = VkSharedPresentSurfaceCapabilitiesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eSharedPresentSurfaceCapabilitiesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR(
- VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT
- : sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ )
- {}
+ VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( SharedPresentSurfaceCapabilitiesKHR const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: SharedPresentSurfaceCapabilitiesKHR( *reinterpret_cast<SharedPresentSurfaceCapabilitiesKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SharedPresentSurfaceCapabilitiesKHR &
- operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SharedPresentSurfaceCapabilitiesKHR & operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SharedPresentSurfaceCapabilitiesKHR &
- operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ SharedPresentSurfaceCapabilitiesKHR & operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const *>( &rhs );
return *this;
}
- explicit operator VkSharedPresentSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSharedPresentSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSharedPresentSurfaceCapabilitiesKHR *>( this );
}
- explicit operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::
- tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -87075,11 +89844,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags );
# endif
}
@@ -87090,18 +89858,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR ) ==
- sizeof( VkSharedPresentSurfaceCapabilitiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR>::value,
- "SharedPresentSurfaceCapabilitiesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSharedPresentSurfaceCapabilitiesKHR>
@@ -87114,21 +89874,21 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkSparseImageFormatProperties;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- SparseImageFormatProperties( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
- VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {},
- VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
+ VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {},
+ VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
: aspectMask( aspectMask_ )
, imageGranularity( imageGranularity_ )
, flags( flags_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- SparseImageFormatProperties( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
: SparseImageFormatProperties( *reinterpret_cast<SparseImageFormatProperties const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SparseImageFormatProperties & operator=( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -87139,23 +89899,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkSparseImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSparseImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseImageFormatProperties *>( this );
}
- explicit operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+ operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSparseImageFormatProperties *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &,
- VULKAN_HPP_NAMESPACE::Extent3D const &,
- VULKAN_HPP_NAMESPACE::SparseImageFormatFlags const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, VULKAN_HPP_NAMESPACE::Extent3D const &, VULKAN_HPP_NAMESPACE::SparseImageFormatFlags const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -87168,7 +89926,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( aspectMask == rhs.aspectMask ) && ( imageGranularity == rhs.imageGranularity ) && ( flags == rhs.flags );
@@ -87186,14 +89944,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {};
VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties ) ==
- sizeof( VkSparseImageFormatProperties ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>::value,
- "SparseImageFormatProperties is not nothrow_move_constructible!" );
struct SparseImageFormatProperties2
{
@@ -87203,17 +89953,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageFormatProperties2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2(
- VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT
- : properties( properties_ )
- {}
+ VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , properties( properties_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- SparseImageFormatProperties2( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
: SparseImageFormatProperties2( *reinterpret_cast<SparseImageFormatProperties2 const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SparseImageFormatProperties2 & operator=( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -87224,23 +89976,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkSparseImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSparseImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseImageFormatProperties2 *>( this );
}
- explicit operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
+ operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSparseImageFormatProperties2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -87253,7 +90003,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties );
@@ -87271,14 +90021,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 ) ==
- sizeof( VkSparseImageFormatProperties2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value,
- "SparseImageFormatProperties2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSparseImageFormatProperties2>
@@ -87292,29 +90034,28 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkSparseImageMemoryRequirements;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- SparseImageMemoryRequirements( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {},
- uint32_t imageMipTailFirstLod_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {},
+ uint32_t imageMipTailFirstLod_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {} ) VULKAN_HPP_NOEXCEPT
: formatProperties( formatProperties_ )
, imageMipTailFirstLod( imageMipTailFirstLod_ )
, imageMipTailSize( imageMipTailSize_ )
, imageMipTailOffset( imageMipTailOffset_ )
, imageMipTailStride( imageMipTailStride_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- SparseImageMemoryRequirements( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
: SparseImageMemoryRequirements( *reinterpret_cast<SparseImageMemoryRequirements const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SparseImageMemoryRequirements &
- operator=( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SparseImageMemoryRequirements & operator=( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SparseImageMemoryRequirements & operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -87322,17 +90063,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkSparseImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSparseImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseImageMemoryRequirements *>( this );
}
- explicit operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT
+ operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSparseImageMemoryRequirements *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -87344,8 +90085,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- formatProperties, imageMipTailFirstLod, imageMipTailSize, imageMipTailOffset, imageMipTailStride );
+ return std::tie( formatProperties, imageMipTailFirstLod, imageMipTailSize, imageMipTailOffset, imageMipTailStride );
}
#endif
@@ -87354,7 +90094,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( formatProperties == rhs.formatProperties ) && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod ) &&
@@ -87376,14 +90116,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {};
VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements ) ==
- sizeof( VkSparseImageMemoryRequirements ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>::value,
- "SparseImageMemoryRequirements is not nothrow_move_constructible!" );
struct SparseImageMemoryRequirements2
{
@@ -87393,21 +90125,22 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageMemoryRequirements2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2(
- VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT
- : memoryRequirements( memoryRequirements_ )
- {}
+ VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , memoryRequirements( memoryRequirements_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- SparseImageMemoryRequirements2( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
: SparseImageMemoryRequirements2( *reinterpret_cast<SparseImageMemoryRequirements2 const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SparseImageMemoryRequirements2 &
- operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SparseImageMemoryRequirements2 & operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SparseImageMemoryRequirements2 & operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -87415,23 +90148,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkSparseImageMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSparseImageMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSparseImageMemoryRequirements2 *>( this );
}
- explicit operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
+ operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSparseImageMemoryRequirements2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -87444,7 +90175,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements );
@@ -87458,18 +90189,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 ) ==
- sizeof( VkSparseImageMemoryRequirements2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value,
- "SparseImageMemoryRequirements2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSparseImageMemoryRequirements2>
@@ -87484,30 +90207,29 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkStreamDescriptorSurfaceCreateInfoGGP;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- StreamDescriptorSurfaceCreateInfoGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {},
- GgpStreamDescriptor streamDescriptor_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {},
+ GgpStreamDescriptor streamDescriptor_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, streamDescriptor( streamDescriptor_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( StreamDescriptorSurfaceCreateInfoGGP const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
: StreamDescriptorSurfaceCreateInfoGGP( *reinterpret_cast<StreamDescriptorSurfaceCreateInfoGGP const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- StreamDescriptorSurfaceCreateInfoGGP &
- operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ StreamDescriptorSurfaceCreateInfoGGP & operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- StreamDescriptorSurfaceCreateInfoGGP &
- operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+ StreamDescriptorSurfaceCreateInfoGGP & operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const *>( &rhs );
return *this;
@@ -87521,31 +90243,30 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP &
- setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT
+ setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP &
- setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT
{
streamDescriptor = streamDescriptor_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkStreamDescriptorSurfaceCreateInfoGGP const &() const VULKAN_HPP_NOEXCEPT
+ operator VkStreamDescriptorSurfaceCreateInfoGGP const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( this );
}
- explicit operator VkStreamDescriptorSurfaceCreateInfoGGP &() VULKAN_HPP_NOEXCEPT
+ operator VkStreamDescriptorSurfaceCreateInfoGGP &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkStreamDescriptorSurfaceCreateInfoGGP *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -87588,19 +90309,11 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
+ const void * pNext = {};
VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {};
GgpStreamDescriptor streamDescriptor = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP ) ==
- sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP>::value,
- "StreamDescriptorSurfaceCreateInfoGGP is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eStreamDescriptorSurfaceCreateInfoGGP>
@@ -87614,25 +90327,24 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkStridedDeviceAddressRegionKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- StridedDeviceAddressRegionKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT
: deviceAddress( deviceAddress_ )
, stride( stride_ )
, size( size_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- StridedDeviceAddressRegionKHR( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
StridedDeviceAddressRegionKHR( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: StridedDeviceAddressRegionKHR( *reinterpret_cast<StridedDeviceAddressRegionKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- StridedDeviceAddressRegionKHR &
- operator=( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ StridedDeviceAddressRegionKHR & operator=( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
StridedDeviceAddressRegionKHR & operator=( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -87641,45 +90353,40 @@ namespace VULKAN_HPP_NAMESPACE
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR &
- setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
deviceAddress = deviceAddress_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR &
- setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
{
stride = stride_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR &
- setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkStridedDeviceAddressRegionKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkStridedDeviceAddressRegionKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( this );
}
- explicit operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkStridedDeviceAddressRegionKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &,
- VULKAN_HPP_NAMESPACE::DeviceSize const &,
- VULKAN_HPP_NAMESPACE::DeviceSize const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -87692,7 +90399,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( deviceAddress == rhs.deviceAddress ) && ( stride == rhs.stride ) && ( size == rhs.size );
@@ -87710,14 +90417,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::DeviceSize stride = {};
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR ) ==
- sizeof( VkStridedDeviceAddressRegionKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR>::value,
- "StridedDeviceAddressRegionKHR is not nothrow_move_constructible!" );
struct SubmitInfo
{
@@ -87727,39 +90426,37 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- SubmitInfo( uint32_t waitSemaphoreCount_ = {},
- const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {},
- const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ = {},
- uint32_t commandBufferCount_ = {},
- const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ = {},
- uint32_t signalSemaphoreCount_ = {},
- const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT
- : waitSemaphoreCount( waitSemaphoreCount_ )
+ VULKAN_HPP_CONSTEXPR SubmitInfo( uint32_t waitSemaphoreCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ = {},
+ uint32_t commandBufferCount_ = {},
+ const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ = {},
+ uint32_t signalSemaphoreCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , waitSemaphoreCount( waitSemaphoreCount_ )
, pWaitSemaphores( pWaitSemaphores_ )
, pWaitDstStageMask( pWaitDstStageMask_ )
, commandBufferCount( commandBufferCount_ )
, pCommandBuffers( pCommandBuffers_ )
, signalSemaphoreCount( signalSemaphoreCount_ )
, pSignalSemaphores( pSignalSemaphores_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SubmitInfo( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : SubmitInfo( *reinterpret_cast<SubmitInfo const *>( &rhs ) )
- {}
+ SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubmitInfo( *reinterpret_cast<SubmitInfo const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SubmitInfo(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineStageFlags> const &
- waitDstStageMask_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBuffer> const &
- commandBuffers_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const &
- signalSemaphores_ = {} )
- : waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) )
+ SubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineStageFlags> const & waitDstStageMask_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) )
, pWaitSemaphores( waitSemaphores_.data() )
, pWaitDstStageMask( waitDstStageMask_.data() )
, commandBufferCount( static_cast<uint32_t>( commandBuffers_.size() ) )
@@ -87772,8 +90469,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( waitSemaphores_.size() != waitDstStageMask_.size() )
{
- throw LogicError( VULKAN_HPP_NAMESPACE_STRING
- "::SubmitInfo::SubmitInfo: waitSemaphores_.size() != waitDstStageMask_.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubmitInfo::SubmitInfo: waitSemaphores_.size() != waitDstStageMask_.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
@@ -87801,17 +90497,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubmitInfo &
- setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
pWaitSemaphores = pWaitSemaphores_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SubmitInfo & setWaitSemaphores(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ )
- VULKAN_HPP_NOEXCEPT
+ SubmitInfo &
+ setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
pWaitSemaphores = waitSemaphores_.data();
@@ -87819,17 +90513,15 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 SubmitInfo &
- setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
{
pWaitDstStageMask = pWaitDstStageMask_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SubmitInfo & setWaitDstStageMask(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineStageFlags> const &
- waitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
+ SubmitInfo & setWaitDstStageMask( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineStageFlags> const & waitDstStageMask_ )
+ VULKAN_HPP_NOEXCEPT
{
waitSemaphoreCount = static_cast<uint32_t>( waitDstStageMask_.size() );
pWaitDstStageMask = waitDstStageMask_.data();
@@ -87843,17 +90535,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubmitInfo &
- setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT
{
pCommandBuffers = pCommandBuffers_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SubmitInfo & setCommandBuffers(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers_ )
- VULKAN_HPP_NOEXCEPT
+ SubmitInfo &
+ setCommandBuffers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers_ ) VULKAN_HPP_NOEXCEPT
{
commandBufferCount = static_cast<uint32_t>( commandBuffers_.size() );
pCommandBuffers = commandBuffers_.data();
@@ -87867,17 +90557,15 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubmitInfo &
- setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
pSignalSemaphores = pSignalSemaphores_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SubmitInfo & setSignalSemaphores(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ )
- VULKAN_HPP_NOEXCEPT
+ SubmitInfo &
+ setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreCount = static_cast<uint32_t>( signalSemaphores_.size() );
pSignalSemaphores = signalSemaphores_.data();
@@ -87886,17 +90574,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubmitInfo *>( this );
}
- explicit operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubmitInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -87912,15 +90600,8 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- waitSemaphoreCount,
- pWaitSemaphores,
- pWaitDstStageMask,
- commandBufferCount,
- pCommandBuffers,
- signalSemaphoreCount,
- pSignalSemaphores );
+ return std::tie(
+ sType, pNext, waitSemaphoreCount, pWaitSemaphores, pWaitDstStageMask, commandBufferCount, pCommandBuffers, signalSemaphoreCount, pSignalSemaphores );
}
#endif
@@ -87929,13 +90610,13 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) &&
- ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) &&
- ( commandBufferCount == rhs.commandBufferCount ) && ( pCommandBuffers == rhs.pCommandBuffers ) &&
- ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && ( pSignalSemaphores == rhs.pSignalSemaphores );
+ ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) && ( commandBufferCount == rhs.commandBufferCount ) &&
+ ( pCommandBuffers == rhs.pCommandBuffers ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) &&
+ ( pSignalSemaphores == rhs.pSignalSemaphores );
# endif
}
@@ -87956,12 +90637,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t signalSemaphoreCount = {};
const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo ) == sizeof( VkSubmitInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubmitInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubmitInfo>::value,
- "SubmitInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSubmitInfo>
@@ -87977,45 +90652,45 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo2;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- SubmitInfo2( VULKAN_HPP_NAMESPACE::SubmitFlags flags_ = {},
- uint32_t waitSemaphoreInfoCount_ = {},
- const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ = {},
- uint32_t commandBufferInfoCount_ = {},
- const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ = {},
- uint32_t signalSemaphoreInfoCount_ = {},
- const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR SubmitInfo2( VULKAN_HPP_NAMESPACE::SubmitFlags flags_ = {},
+ uint32_t waitSemaphoreInfoCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ = {},
+ uint32_t commandBufferInfoCount_ = {},
+ const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ = {},
+ uint32_t signalSemaphoreInfoCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, waitSemaphoreInfoCount( waitSemaphoreInfoCount_ )
, pWaitSemaphoreInfos( pWaitSemaphoreInfos_ )
, commandBufferInfoCount( commandBufferInfoCount_ )
, pCommandBufferInfos( pCommandBufferInfos_ )
, signalSemaphoreInfoCount( signalSemaphoreInfoCount_ )
, pSignalSemaphoreInfos( pSignalSemaphoreInfos_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SubmitInfo2( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SubmitInfo2( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : SubmitInfo2( *reinterpret_cast<SubmitInfo2 const *>( &rhs ) )
- {}
+ SubmitInfo2( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubmitInfo2( *reinterpret_cast<SubmitInfo2 const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SubmitInfo2( VULKAN_HPP_NAMESPACE::SubmitFlags flags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const &
- waitSemaphoreInfos_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo> const & commandBufferInfos_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const &
- signalSemaphoreInfos_ = {} )
- : flags( flags_ )
+ SubmitInfo2( VULKAN_HPP_NAMESPACE::SubmitFlags flags_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & waitSemaphoreInfos_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo> const & commandBufferInfos_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & signalSemaphoreInfos_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, waitSemaphoreInfoCount( static_cast<uint32_t>( waitSemaphoreInfos_.size() ) )
, pWaitSemaphoreInfos( waitSemaphoreInfos_.data() )
, commandBufferInfoCount( static_cast<uint32_t>( commandBufferInfos_.size() ) )
, pCommandBufferInfos( commandBufferInfos_.data() )
, signalSemaphoreInfoCount( static_cast<uint32_t>( signalSemaphoreInfos_.size() ) )
, pSignalSemaphoreInfos( signalSemaphoreInfos_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -88040,15 +90715,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 &
- setWaitSemaphoreInfoCount( uint32_t waitSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setWaitSemaphoreInfoCount( uint32_t waitSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreInfoCount = waitSemaphoreInfoCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPWaitSemaphoreInfos(
- const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPWaitSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
{
pWaitSemaphoreInfos = pWaitSemaphoreInfos_;
return *this;
@@ -88056,8 +90729,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubmitInfo2 & setWaitSemaphoreInfos(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const &
- waitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & waitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreInfoCount = static_cast<uint32_t>( waitSemaphoreInfos_.size() );
pWaitSemaphoreInfos = waitSemaphoreInfos_.data();
@@ -88065,15 +90737,14 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 &
- setCommandBufferInfoCount( uint32_t commandBufferInfoCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setCommandBufferInfoCount( uint32_t commandBufferInfoCount_ ) VULKAN_HPP_NOEXCEPT
{
commandBufferInfoCount = commandBufferInfoCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPCommandBufferInfos(
- const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 &
+ setPCommandBufferInfos( const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ ) VULKAN_HPP_NOEXCEPT
{
pCommandBufferInfos = pCommandBufferInfos_;
return *this;
@@ -88081,8 +90752,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubmitInfo2 & setCommandBufferInfos(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo> const &
- commandBufferInfos_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo> const & commandBufferInfos_ ) VULKAN_HPP_NOEXCEPT
{
commandBufferInfoCount = static_cast<uint32_t>( commandBufferInfos_.size() );
pCommandBufferInfos = commandBufferInfos_.data();
@@ -88090,15 +90760,14 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 &
- setSignalSemaphoreInfoCount( uint32_t signalSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setSignalSemaphoreInfoCount( uint32_t signalSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreInfoCount = signalSemaphoreInfoCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPSignalSemaphoreInfos(
- const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 &
+ setPSignalSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
{
pSignalSemaphoreInfos = pSignalSemaphoreInfos_;
return *this;
@@ -88106,8 +90775,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubmitInfo2 & setSignalSemaphoreInfos(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const &
- signalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & signalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreInfoCount = static_cast<uint32_t>( signalSemaphoreInfos_.size() );
pSignalSemaphoreInfos = signalSemaphoreInfos_.data();
@@ -88116,17 +90784,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSubmitInfo2 const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSubmitInfo2 const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubmitInfo2 *>( this );
}
- explicit operator VkSubmitInfo2 &() VULKAN_HPP_NOEXCEPT
+ operator VkSubmitInfo2 &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubmitInfo2 *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -88159,15 +90827,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( waitSemaphoreInfoCount == rhs.waitSemaphoreInfoCount ) &&
- ( pWaitSemaphoreInfos == rhs.pWaitSemaphoreInfos ) &&
- ( commandBufferInfoCount == rhs.commandBufferInfoCount ) &&
- ( pCommandBufferInfos == rhs.pCommandBufferInfos ) &&
- ( signalSemaphoreInfoCount == rhs.signalSemaphoreInfoCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( waitSemaphoreInfoCount == rhs.waitSemaphoreInfoCount ) &&
+ ( pWaitSemaphoreInfos == rhs.pWaitSemaphoreInfos ) && ( commandBufferInfoCount == rhs.commandBufferInfoCount ) &&
+ ( pCommandBufferInfos == rhs.pCommandBufferInfos ) && ( signalSemaphoreInfoCount == rhs.signalSemaphoreInfoCount ) &&
( pSignalSemaphoreInfos == rhs.pSignalSemaphoreInfos );
# endif
}
@@ -88189,12 +90854,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t signalSemaphoreInfoCount = {};
const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo2 ) == sizeof( VkSubmitInfo2 ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubmitInfo2>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubmitInfo2>::value,
- "SubmitInfo2 is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSubmitInfo2>
@@ -88211,16 +90870,16 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SubpassBeginInfo( VULKAN_HPP_NAMESPACE::SubpassContents contents_ =
- VULKAN_HPP_NAMESPACE::SubpassContents::eInline ) VULKAN_HPP_NOEXCEPT
- : contents( contents_ )
- {}
+ VULKAN_HPP_CONSTEXPR SubpassBeginInfo( VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , contents( contents_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : SubpassBeginInfo( *reinterpret_cast<SubpassBeginInfo const *>( &rhs ) )
- {}
+ SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassBeginInfo( *reinterpret_cast<SubpassBeginInfo const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -88238,31 +90897,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo &
- setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT
{
contents = contents_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSubpassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSubpassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassBeginInfo *>( this );
}
- explicit operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubpassBeginInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::SubpassContents const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SubpassContents const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -88275,7 +90931,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( contents == rhs.contents );
@@ -88293,12 +90949,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassBeginInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassBeginInfo>::value,
- "SubpassBeginInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSubpassBeginInfo>
@@ -88312,33 +90962,32 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkSubpassDescriptionDepthStencilResolve;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eSubpassDescriptionDepthStencilResolve;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescriptionDepthStencilResolve;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve(
- VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone,
- VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone,
- const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ = {} ) VULKAN_HPP_NOEXCEPT
- : depthResolveMode( depthResolveMode_ )
+ VULKAN_HPP_CONSTEXPR
+ SubpassDescriptionDepthStencilResolve( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone,
+ VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone,
+ const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , depthResolveMode( depthResolveMode_ )
, stencilResolveMode( stencilResolveMode_ )
, pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( SubpassDescriptionDepthStencilResolve const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SubpassDescriptionDepthStencilResolve( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
- : SubpassDescriptionDepthStencilResolve(
- *reinterpret_cast<SubpassDescriptionDepthStencilResolve const *>( &rhs ) )
- {}
+ : SubpassDescriptionDepthStencilResolve( *reinterpret_cast<SubpassDescriptionDepthStencilResolve const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SubpassDescriptionDepthStencilResolve &
- operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SubpassDescriptionDepthStencilResolve & operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SubpassDescriptionDepthStencilResolve &
- operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubpassDescriptionDepthStencilResolve & operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve const *>( &rhs );
return *this;
@@ -88352,38 +91001,38 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve &
- setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT
+ setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT
{
depthResolveMode = depthResolveMode_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve &
- setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT
+ setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT
{
stencilResolveMode = stencilResolveMode_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setPDepthStencilResolveAttachment(
- const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve &
+ setPDepthStencilResolveAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT
{
pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSubpassDescriptionDepthStencilResolve const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSubpassDescriptionDepthStencilResolve const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassDescriptionDepthStencilResolve *>( this );
}
- explicit operator VkSubpassDescriptionDepthStencilResolve &() VULKAN_HPP_NOEXCEPT
+ operator VkSubpassDescriptionDepthStencilResolve &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubpassDescriptionDepthStencilResolve *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -88404,12 +91053,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthResolveMode == rhs.depthResolveMode ) &&
- ( stencilResolveMode == rhs.stencilResolveMode ) &&
- ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment );
+ ( stencilResolveMode == rhs.stencilResolveMode ) && ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment );
# endif
}
@@ -88420,20 +91068,12 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
- VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
+ VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve ) ==
- sizeof( VkSubpassDescriptionDepthStencilResolve ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve>::value,
- "SubpassDescriptionDepthStencilResolve is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSubpassDescriptionDepthStencilResolve>
@@ -88450,13 +91090,11 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SubpassEndInfo() VULKAN_HPP_NOEXCEPT {}
+ VULKAN_HPP_CONSTEXPR SubpassEndInfo( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext( pNext_ ) {}
VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : SubpassEndInfo( *reinterpret_cast<SubpassEndInfo const *>( &rhs ) )
- {}
+ SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassEndInfo( *reinterpret_cast<SubpassEndInfo const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -88475,17 +91113,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSubpassEndInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSubpassEndInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassEndInfo *>( this );
}
- explicit operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubpassEndInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -88502,7 +91140,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
@@ -88519,12 +91157,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo;
const void * pNext = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassEndInfo ) == sizeof( VkSubpassEndInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassEndInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassEndInfo>::value,
- "SubpassEndInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSubpassEndInfo>
@@ -88538,49 +91170,46 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkSubpassFragmentDensityMapOffsetEndInfoQCOM;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SubpassFragmentDensityMapOffsetEndInfoQCOM(
- uint32_t fragmentDensityOffsetCount_ = {},
- const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ = {} ) VULKAN_HPP_NOEXCEPT
- : fragmentDensityOffsetCount( fragmentDensityOffsetCount_ )
+ VULKAN_HPP_CONSTEXPR SubpassFragmentDensityMapOffsetEndInfoQCOM( uint32_t fragmentDensityOffsetCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , fragmentDensityOffsetCount( fragmentDensityOffsetCount_ )
, pFragmentDensityOffsets( pFragmentDensityOffsets_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR SubpassFragmentDensityMapOffsetEndInfoQCOM(
- SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SubpassFragmentDensityMapOffsetEndInfoQCOM( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SubpassFragmentDensityMapOffsetEndInfoQCOM( VkSubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs )
- VULKAN_HPP_NOEXCEPT
- : SubpassFragmentDensityMapOffsetEndInfoQCOM(
- *reinterpret_cast<SubpassFragmentDensityMapOffsetEndInfoQCOM const *>( &rhs ) )
- {}
+ SubpassFragmentDensityMapOffsetEndInfoQCOM( VkSubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ : SubpassFragmentDensityMapOffsetEndInfoQCOM( *reinterpret_cast<SubpassFragmentDensityMapOffsetEndInfoQCOM const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubpassFragmentDensityMapOffsetEndInfoQCOM(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Offset2D> const &
- fragmentDensityOffsets_ )
- : fragmentDensityOffsetCount( static_cast<uint32_t>( fragmentDensityOffsets_.size() ) )
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Offset2D> const & fragmentDensityOffsets_, const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , fragmentDensityOffsetCount( static_cast<uint32_t>( fragmentDensityOffsets_.size() ) )
, pFragmentDensityOffsets( fragmentDensityOffsets_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SubpassFragmentDensityMapOffsetEndInfoQCOM &
- operator=( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SubpassFragmentDensityMapOffsetEndInfoQCOM & operator=( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SubpassFragmentDensityMapOffsetEndInfoQCOM &
- operator=( VkSubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubpassFragmentDensityMapOffsetEndInfoQCOM & operator=( VkSubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
@@ -88594,7 +91223,7 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM &
- setPFragmentDensityOffsets( const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT
+ setPFragmentDensityOffsets( const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT
{
pFragmentDensityOffsets = pFragmentDensityOffsets_;
return *this;
@@ -88602,8 +91231,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
SubpassFragmentDensityMapOffsetEndInfoQCOM & setFragmentDensityOffsets(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Offset2D> const &
- fragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Offset2D> const & fragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT
{
fragmentDensityOffsetCount = static_cast<uint32_t>( fragmentDensityOffsets_.size() );
pFragmentDensityOffsets = fragmentDensityOffsets_.data();
@@ -88612,24 +91240,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSubpassFragmentDensityMapOffsetEndInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSubpassFragmentDensityMapOffsetEndInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassFragmentDensityMapOffsetEndInfoQCOM *>( this );
}
- explicit operator VkSubpassFragmentDensityMapOffsetEndInfoQCOM &() VULKAN_HPP_NOEXCEPT
+ operator VkSubpassFragmentDensityMapOffsetEndInfoQCOM &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubpassFragmentDensityMapOffsetEndInfoQCOM *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- uint32_t const &,
- const VULKAN_HPP_NAMESPACE::Offset2D * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Offset2D * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -88642,11 +91267,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( fragmentDensityOffsetCount == rhs.fragmentDensityOffsetCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityOffsetCount == rhs.fragmentDensityOffsetCount ) &&
( pFragmentDensityOffsets == rhs.pFragmentDensityOffsets );
# endif
}
@@ -88658,20 +91282,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM;
+ const void * pNext = {};
uint32_t fragmentDensityOffsetCount = {};
const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM ) ==
- sizeof( VkSubpassFragmentDensityMapOffsetEndInfoQCOM ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM>::value,
- "SubpassFragmentDensityMapOffsetEndInfoQCOM is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM>
@@ -88679,58 +91294,136 @@ namespace VULKAN_HPP_NAMESPACE
using Type = SubpassFragmentDensityMapOffsetEndInfoQCOM;
};
+ struct SubpassResolvePerformanceQueryEXT
+ {
+ using NativeType = VkSubpassResolvePerformanceQueryEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassResolvePerformanceQueryEXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SubpassResolvePerformanceQueryEXT( VULKAN_HPP_NAMESPACE::Bool32 optimal_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , optimal( optimal_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR SubpassResolvePerformanceQueryEXT( SubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SubpassResolvePerformanceQueryEXT( VkSubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : SubpassResolvePerformanceQueryEXT( *reinterpret_cast<SubpassResolvePerformanceQueryEXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ SubpassResolvePerformanceQueryEXT & operator=( SubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SubpassResolvePerformanceQueryEXT & operator=( VkSubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT const *>( &rhs );
+ return *this;
+ }
+
+ operator VkSubpassResolvePerformanceQueryEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSubpassResolvePerformanceQueryEXT *>( this );
+ }
+
+ operator VkSubpassResolvePerformanceQueryEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSubpassResolvePerformanceQueryEXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, optimal );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( SubpassResolvePerformanceQueryEXT const & ) const = default;
+#else
+ bool operator==( SubpassResolvePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( optimal == rhs.optimal );
+# endif
+ }
+
+ bool operator!=( SubpassResolvePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassResolvePerformanceQueryEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 optimal = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eSubpassResolvePerformanceQueryEXT>
+ {
+ using Type = SubpassResolvePerformanceQueryEXT;
+ };
+
struct SubpassShadingPipelineCreateInfoHUAWEI
{
using NativeType = VkSubpassShadingPipelineCreateInfoHUAWEI;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eSubpassShadingPipelineCreateInfoHUAWEI;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassShadingPipelineCreateInfoHUAWEI;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
- uint32_t subpass_ = {} ) VULKAN_HPP_NOEXCEPT
- : renderPass( renderPass_ )
+ uint32_t subpass_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , renderPass( renderPass_ )
, subpass( subpass_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI( SubpassShadingPipelineCreateInfoHUAWEI const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SubpassShadingPipelineCreateInfoHUAWEI( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
- : SubpassShadingPipelineCreateInfoHUAWEI(
- *reinterpret_cast<SubpassShadingPipelineCreateInfoHUAWEI const *>( &rhs ) )
- {}
+ : SubpassShadingPipelineCreateInfoHUAWEI( *reinterpret_cast<SubpassShadingPipelineCreateInfoHUAWEI const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SubpassShadingPipelineCreateInfoHUAWEI &
- operator=( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SubpassShadingPipelineCreateInfoHUAWEI & operator=( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SubpassShadingPipelineCreateInfoHUAWEI &
- operator=( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubpassShadingPipelineCreateInfoHUAWEI & operator=( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI const *>( &rhs );
return *this;
}
- explicit operator VkSubpassShadingPipelineCreateInfoHUAWEI const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSubpassShadingPipelineCreateInfoHUAWEI const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSubpassShadingPipelineCreateInfoHUAWEI *>( this );
}
- explicit operator VkSubpassShadingPipelineCreateInfoHUAWEI &() VULKAN_HPP_NOEXCEPT
+ operator VkSubpassShadingPipelineCreateInfoHUAWEI &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSubpassShadingPipelineCreateInfoHUAWEI *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::RenderPass const &,
- uint32_t const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::RenderPass const &, uint32_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -88743,11 +91436,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) &&
- ( subpass == rhs.subpass );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass );
# endif
}
@@ -88763,15 +91455,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
uint32_t subpass = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI ) ==
- sizeof( VkSubpassShadingPipelineCreateInfoHUAWEI ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI>::value,
- "SubpassShadingPipelineCreateInfoHUAWEI is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSubpassShadingPipelineCreateInfoHUAWEI>
@@ -88779,6 +91462,88 @@ namespace VULKAN_HPP_NAMESPACE
using Type = SubpassShadingPipelineCreateInfoHUAWEI;
};
+ struct SubresourceLayout2EXT
+ {
+ using NativeType = VkSubresourceLayout2EXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceLayout2EXT;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , subresourceLayout( subresourceLayout_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR SubresourceLayout2EXT( SubresourceLayout2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SubresourceLayout2EXT( VkSubresourceLayout2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : SubresourceLayout2EXT( *reinterpret_cast<SubresourceLayout2EXT const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ SubresourceLayout2EXT & operator=( SubresourceLayout2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SubresourceLayout2EXT & operator=( VkSubresourceLayout2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT const *>( &rhs );
+ return *this;
+ }
+
+ operator VkSubresourceLayout2EXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSubresourceLayout2EXT *>( this );
+ }
+
+ operator VkSubresourceLayout2EXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSubresourceLayout2EXT *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SubresourceLayout const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, subresourceLayout );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( SubresourceLayout2EXT const & ) const = default;
+#else
+ bool operator==( SubresourceLayout2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subresourceLayout == rhs.subresourceLayout );
+# endif
+ }
+
+ bool operator!=( SubresourceLayout2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceLayout2EXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eSubresourceLayout2EXT>
+ {
+ using Type = SubresourceLayout2EXT;
+ };
+
struct SurfaceCapabilities2EXT
{
using NativeType = VkSurfaceCapabilities2EXT;
@@ -88788,19 +91553,20 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT(
- uint32_t minImageCount_ = {},
- uint32_t maxImageCount_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {},
- uint32_t maxImageArrayLayers_ = {},
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {},
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ =
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
- VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {},
- VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {},
- VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT
- : minImageCount( minImageCount_ )
+ uint32_t minImageCount_ = {},
+ uint32_t maxImageCount_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {},
+ uint32_t maxImageArrayLayers_ = {},
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {},
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
+ VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {},
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {},
+ VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , minImageCount( minImageCount_ )
, maxImageCount( maxImageCount_ )
, currentExtent( currentExtent_ )
, minImageExtent( minImageExtent_ )
@@ -88811,13 +91577,15 @@ namespace VULKAN_HPP_NAMESPACE
, supportedCompositeAlpha( supportedCompositeAlpha_ )
, supportedUsageFlags( supportedUsageFlags_ )
, supportedSurfaceCounters( supportedSurfaceCounters_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SurfaceCapabilities2EXT( *reinterpret_cast<SurfaceCapabilities2EXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SurfaceCapabilities2EXT & operator=( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -88828,17 +91596,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkSurfaceCapabilities2EXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSurfaceCapabilities2EXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceCapabilities2EXT *>( this );
}
- explicit operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT
+ operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceCapabilities2EXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -88879,17 +91647,14 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImageCount == rhs.minImageCount ) &&
- ( maxImageCount == rhs.maxImageCount ) && ( currentExtent == rhs.currentExtent ) &&
- ( minImageExtent == rhs.minImageExtent ) && ( maxImageExtent == rhs.maxImageExtent ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImageCount == rhs.minImageCount ) && ( maxImageCount == rhs.maxImageCount ) &&
+ ( currentExtent == rhs.currentExtent ) && ( minImageExtent == rhs.minImageExtent ) && ( maxImageExtent == rhs.maxImageExtent ) &&
( maxImageArrayLayers == rhs.maxImageArrayLayers ) && ( supportedTransforms == rhs.supportedTransforms ) &&
- ( currentTransform == rhs.currentTransform ) &&
- ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) &&
- ( supportedUsageFlags == rhs.supportedUsageFlags ) &&
- ( supportedSurfaceCounters == rhs.supportedSurfaceCounters );
+ ( currentTransform == rhs.currentTransform ) && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) &&
+ ( supportedUsageFlags == rhs.supportedUsageFlags ) && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters );
# endif
}
@@ -88900,28 +91665,20 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT;
- void * pNext = {};
- uint32_t minImageCount = {};
- uint32_t maxImageCount = {};
- VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
- VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
- VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
- uint32_t maxImageArrayLayers = {};
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform =
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
- VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
- VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
- VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT;
+ void * pNext = {};
+ uint32_t minImageCount = {};
+ uint32_t maxImageCount = {};
+ VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
+ VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
+ VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
+ uint32_t maxImageArrayLayers = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+ VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
+ VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT ) ==
- sizeof( VkSurfaceCapabilities2EXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::value,
- "SurfaceCapabilities2EXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSurfaceCapabilities2EXT>
@@ -88934,18 +91691,17 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkSurfaceCapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- SurfaceCapabilitiesKHR( uint32_t minImageCount_ = {},
- uint32_t maxImageCount_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {},
- uint32_t maxImageArrayLayers_ = {},
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {},
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ =
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
- VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {},
- VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR(
+ uint32_t minImageCount_ = {},
+ uint32_t maxImageCount_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {},
+ uint32_t maxImageArrayLayers_ = {},
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {},
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
+ VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {},
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT
: minImageCount( minImageCount_ )
, maxImageCount( maxImageCount_ )
, currentExtent( currentExtent_ )
@@ -88956,13 +91712,15 @@ namespace VULKAN_HPP_NAMESPACE
, currentTransform( currentTransform_ )
, supportedCompositeAlpha( supportedCompositeAlpha_ )
, supportedUsageFlags( supportedUsageFlags_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: SurfaceCapabilitiesKHR( *reinterpret_cast<SurfaceCapabilitiesKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SurfaceCapabilitiesKHR & operator=( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -88973,17 +91731,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceCapabilitiesKHR *>( this );
}
- explicit operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceCapabilitiesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -89018,15 +91776,13 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( minImageCount == rhs.minImageCount ) && ( maxImageCount == rhs.maxImageCount ) &&
- ( currentExtent == rhs.currentExtent ) && ( minImageExtent == rhs.minImageExtent ) &&
- ( maxImageExtent == rhs.maxImageExtent ) && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) &&
+ return ( minImageCount == rhs.minImageCount ) && ( maxImageCount == rhs.maxImageCount ) && ( currentExtent == rhs.currentExtent ) &&
+ ( minImageExtent == rhs.minImageExtent ) && ( maxImageExtent == rhs.maxImageExtent ) && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) &&
( supportedTransforms == rhs.supportedTransforms ) && ( currentTransform == rhs.currentTransform ) &&
- ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) &&
- ( supportedUsageFlags == rhs.supportedUsageFlags );
+ ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) && ( supportedUsageFlags == rhs.supportedUsageFlags );
# endif
}
@@ -89037,25 +91793,17 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- uint32_t minImageCount = {};
- uint32_t maxImageCount = {};
- VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
- VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
- VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
- uint32_t maxImageArrayLayers = {};
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform =
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
- VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
- VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
+ uint32_t minImageCount = {};
+ uint32_t maxImageCount = {};
+ VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
+ VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
+ VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
+ uint32_t maxImageArrayLayers = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+ VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR ) ==
- sizeof( VkSurfaceCapabilitiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::value,
- "SurfaceCapabilitiesKHR is not nothrow_move_constructible!" );
struct SurfaceCapabilities2KHR
{
@@ -89065,16 +91813,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2KHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR(
- VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {} ) VULKAN_HPP_NOEXCEPT
- : surfaceCapabilities( surfaceCapabilities_ )
- {}
+ VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , surfaceCapabilities( surfaceCapabilities_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
: SurfaceCapabilities2KHR( *reinterpret_cast<SurfaceCapabilities2KHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SurfaceCapabilities2KHR & operator=( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -89085,23 +91836,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkSurfaceCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSurfaceCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceCapabilities2KHR *>( this );
}
- explicit operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
+ operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceCapabilities2KHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -89114,7 +91863,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCapabilities == rhs.surfaceCapabilities );
@@ -89132,13 +91881,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR ) ==
- sizeof( VkSurfaceCapabilities2KHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::value,
- "SurfaceCapabilities2KHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSurfaceCapabilities2KHR>
@@ -89152,30 +91894,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkSurfaceCapabilitiesFullScreenExclusiveEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT(
- VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {} ) VULKAN_HPP_NOEXCEPT
- : fullScreenExclusiveSupported( fullScreenExclusiveSupported_ )
- {}
+ VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , fullScreenExclusiveSupported( fullScreenExclusiveSupported_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT(
- SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : SurfaceCapabilitiesFullScreenExclusiveEXT(
- *reinterpret_cast<SurfaceCapabilitiesFullScreenExclusiveEXT const *>( &rhs ) )
- {}
+ SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : SurfaceCapabilitiesFullScreenExclusiveEXT( *reinterpret_cast<SurfaceCapabilitiesFullScreenExclusiveEXT const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SurfaceCapabilitiesFullScreenExclusiveEXT &
- operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SurfaceCapabilitiesFullScreenExclusiveEXT &
- operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const *>( &rhs );
return *this;
@@ -89189,24 +91928,24 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesFullScreenExclusiveEXT &
- setFullScreenExclusiveSupported( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ ) VULKAN_HPP_NOEXCEPT
+ setFullScreenExclusiveSupported( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ ) VULKAN_HPP_NOEXCEPT
{
fullScreenExclusiveSupported = fullScreenExclusiveSupported_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceCapabilitiesFullScreenExclusiveEXT *>( this );
}
- explicit operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceCapabilitiesFullScreenExclusiveEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -89223,11 +91962,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported );
# endif
}
@@ -89238,19 +91976,10 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT ) ==
- sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT>::value,
- "SurfaceCapabilitiesFullScreenExclusiveEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT>
@@ -89259,24 +91988,120 @@ namespace VULKAN_HPP_NAMESPACE
};
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ struct SurfaceCapabilitiesPresentBarrierNV
+ {
+ using NativeType = VkSurfaceCapabilitiesPresentBarrierNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesPresentBarrierNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentBarrierNV( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierSupported_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , presentBarrierSupported( presentBarrierSupported_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentBarrierNV( SurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SurfaceCapabilitiesPresentBarrierNV( VkSurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : SurfaceCapabilitiesPresentBarrierNV( *reinterpret_cast<SurfaceCapabilitiesPresentBarrierNV const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ SurfaceCapabilitiesPresentBarrierNV & operator=( SurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SurfaceCapabilitiesPresentBarrierNV & operator=( VkSurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesPresentBarrierNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesPresentBarrierNV &
+ setPresentBarrierSupported( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierSupported_ ) VULKAN_HPP_NOEXCEPT
+ {
+ presentBarrierSupported = presentBarrierSupported_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkSurfaceCapabilitiesPresentBarrierNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSurfaceCapabilitiesPresentBarrierNV *>( this );
+ }
+
+ operator VkSurfaceCapabilitiesPresentBarrierNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSurfaceCapabilitiesPresentBarrierNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, presentBarrierSupported );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( SurfaceCapabilitiesPresentBarrierNV const & ) const = default;
+#else
+ bool operator==( SurfaceCapabilitiesPresentBarrierNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrierSupported == rhs.presentBarrierSupported );
+# endif
+ }
+
+ bool operator!=( SurfaceCapabilitiesPresentBarrierNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesPresentBarrierNV;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 presentBarrierSupported = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eSurfaceCapabilitiesPresentBarrierNV>
+ {
+ using Type = SurfaceCapabilitiesPresentBarrierNV;
+ };
+
struct SurfaceFormatKHR
{
using NativeType = VkSurfaceFormatKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR
- SurfaceFormatKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ =
- VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT
+ SurfaceFormatKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT
: format( format_ )
, colorSpace( colorSpace_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : SurfaceFormatKHR( *reinterpret_cast<SurfaceFormatKHR const *>( &rhs ) )
- {}
+ SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT : SurfaceFormatKHR( *reinterpret_cast<SurfaceFormatKHR const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SurfaceFormatKHR & operator=( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -89287,17 +92112,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkSurfaceFormatKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSurfaceFormatKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceFormatKHR *>( this );
}
- explicit operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceFormatKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -89314,7 +92139,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( format == rhs.format ) && ( colorSpace == rhs.colorSpace );
@@ -89331,12 +92156,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>::value,
- "SurfaceFormatKHR is not nothrow_move_constructible!" );
struct SurfaceFormat2KHR
{
@@ -89346,16 +92165,15 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFormat2KHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- SurfaceFormat2KHR( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {} ) VULKAN_HPP_NOEXCEPT
- : surfaceFormat( surfaceFormat_ )
- {}
+ VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , surfaceFormat( surfaceFormat_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : SurfaceFormat2KHR( *reinterpret_cast<SurfaceFormat2KHR const *>( &rhs ) )
- {}
+ SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT : SurfaceFormat2KHR( *reinterpret_cast<SurfaceFormat2KHR const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
SurfaceFormat2KHR & operator=( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -89366,22 +92184,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkSurfaceFormat2KHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSurfaceFormat2KHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceFormat2KHR *>( this );
}
- explicit operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT
+ operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceFormat2KHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::
- tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -89394,7 +92211,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceFormat == rhs.surfaceFormat );
@@ -89412,12 +92229,6 @@ namespace VULKAN_HPP_NAMESPACE
void * pNext = {};
VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>::value,
- "SurfaceFormat2KHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSurfaceFormat2KHR>
@@ -89431,26 +92242,26 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkSurfaceFullScreenExclusiveInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eSurfaceFullScreenExclusiveInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- SurfaceFullScreenExclusiveInfoEXT( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ =
- VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault ) VULKAN_HPP_NOEXCEPT
- : fullScreenExclusive( fullScreenExclusive_ )
- {}
+ VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT(
+ VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault,
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , fullScreenExclusive( fullScreenExclusive_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- SurfaceFullScreenExclusiveInfoEXT( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SurfaceFullScreenExclusiveInfoEXT( *reinterpret_cast<SurfaceFullScreenExclusiveInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SurfaceFullScreenExclusiveInfoEXT &
- operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SurfaceFullScreenExclusiveInfoEXT & operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceFullScreenExclusiveInfoEXT & operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -89466,30 +92277,28 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT &
- setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT
+ setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT
{
fullScreenExclusive = fullScreenExclusive_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSurfaceFullScreenExclusiveInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSurfaceFullScreenExclusiveInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceFullScreenExclusiveInfoEXT *>( this );
}
- explicit operator VkSurfaceFullScreenExclusiveInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkSurfaceFullScreenExclusiveInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceFullScreenExclusiveInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -89502,7 +92311,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fullScreenExclusive == rhs.fullScreenExclusive );
@@ -89516,19 +92325,10 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive =
- VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT ) ==
- sizeof( VkSurfaceFullScreenExclusiveInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT>::value,
- "SurfaceFullScreenExclusiveInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSurfaceFullScreenExclusiveInfoEXT>
@@ -89543,28 +92343,26 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkSurfaceFullScreenExclusiveWin32InfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = {} ) VULKAN_HPP_NOEXCEPT
- : hmonitor( hmonitor_ )
- {}
+ VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , hmonitor( hmonitor_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : SurfaceFullScreenExclusiveWin32InfoEXT(
- *reinterpret_cast<SurfaceFullScreenExclusiveWin32InfoEXT const *>( &rhs ) )
- {}
+ : SurfaceFullScreenExclusiveWin32InfoEXT( *reinterpret_cast<SurfaceFullScreenExclusiveWin32InfoEXT const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SurfaceFullScreenExclusiveWin32InfoEXT &
- operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SurfaceFullScreenExclusiveWin32InfoEXT & operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SurfaceFullScreenExclusiveWin32InfoEXT &
- operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ SurfaceFullScreenExclusiveWin32InfoEXT & operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const *>( &rhs );
return *this;
@@ -89577,25 +92375,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT &
- setHmonitor( HMONITOR hmonitor_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & setHmonitor( HMONITOR hmonitor_ ) VULKAN_HPP_NOEXCEPT
{
hmonitor = hmonitor_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSurfaceFullScreenExclusiveWin32InfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSurfaceFullScreenExclusiveWin32InfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceFullScreenExclusiveWin32InfoEXT *>( this );
}
- explicit operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceFullScreenExclusiveWin32InfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -89612,7 +92409,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hmonitor == rhs.hmonitor );
@@ -89630,15 +92427,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
HMONITOR hmonitor = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT ) ==
- sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT>::value,
- "SurfaceFullScreenExclusiveWin32InfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT>
@@ -89655,21 +92443,22 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceProtectedCapabilitiesKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- SurfaceProtectedCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {} ) VULKAN_HPP_NOEXCEPT
- : supportsProtected( supportsProtected_ )
- {}
+ VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , supportsProtected( supportsProtected_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- SurfaceProtectedCapabilitiesKHR( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: SurfaceProtectedCapabilitiesKHR( *reinterpret_cast<SurfaceProtectedCapabilitiesKHR const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SurfaceProtectedCapabilitiesKHR &
- operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SurfaceProtectedCapabilitiesKHR & operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SurfaceProtectedCapabilitiesKHR & operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -89684,25 +92473,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SurfaceProtectedCapabilitiesKHR &
- setSupportsProtected( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SurfaceProtectedCapabilitiesKHR & setSupportsProtected( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ ) VULKAN_HPP_NOEXCEPT
{
supportsProtected = supportsProtected_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSurfaceProtectedCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSurfaceProtectedCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSurfaceProtectedCapabilitiesKHR *>( this );
}
- explicit operator VkSurfaceProtectedCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkSurfaceProtectedCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -89719,7 +92507,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportsProtected == rhs.supportsProtected );
@@ -89737,14 +92525,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR ) ==
- sizeof( VkSurfaceProtectedCapabilitiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR>::value,
- "SurfaceProtectedCapabilitiesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSurfaceProtectedCapabilitiesKHR>
@@ -89760,21 +92540,22 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCounterCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT
- : surfaceCounters( surfaceCounters_ )
- {}
+ VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , surfaceCounters( surfaceCounters_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- SwapchainCounterCreateInfoEXT( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: SwapchainCounterCreateInfoEXT( *reinterpret_cast<SwapchainCounterCreateInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SwapchainCounterCreateInfoEXT &
- operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SwapchainCounterCreateInfoEXT & operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SwapchainCounterCreateInfoEXT & operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -89790,30 +92571,28 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT &
- setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT
+ setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT
{
surfaceCounters = surfaceCounters_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSwapchainCounterCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSwapchainCounterCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSwapchainCounterCreateInfoEXT *>( this );
}
- explicit operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSwapchainCounterCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -89826,7 +92605,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCounters == rhs.surfaceCounters );
@@ -89844,14 +92623,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT ) ==
- sizeof( VkSwapchainCounterCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT>::value,
- "SwapchainCounterCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSwapchainCounterCreateInfoEXT>
@@ -89867,26 +92638,26 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCreateInfoKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR(
- VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {},
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {},
- uint32_t minImageCount_ = {},
- VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear,
- VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {},
- uint32_t imageArrayLayers_ = {},
- VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {},
- VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
- uint32_t queueFamilyIndexCount_ = {},
- const uint32_t * pQueueFamilyIndices_ = {},
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ =
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
- VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ =
- VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque,
- VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate,
- VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {},
- VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR
+ SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {},
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {},
+ uint32_t minImageCount_ = {},
+ VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear,
+ VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {},
+ uint32_t imageArrayLayers_ = {},
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {},
+ VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
+ uint32_t queueFamilyIndexCount_ = {},
+ const uint32_t * pQueueFamilyIndices_ = {},
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
+ VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque,
+ VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate,
+ VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {},
+ VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, surface( surface_ )
, minImageCount( minImageCount_ )
, imageFormat( imageFormat_ )
@@ -89902,34 +92673,35 @@ namespace VULKAN_HPP_NAMESPACE
, presentMode( presentMode_ )
, clipped( clipped_ )
, oldSwapchain( oldSwapchain_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: SwapchainCreateInfoKHR( *reinterpret_cast<SwapchainCreateInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SwapchainCreateInfoKHR(
- VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_,
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface_,
- uint32_t minImageCount_,
- VULKAN_HPP_NAMESPACE::Format imageFormat_,
- VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_,
- VULKAN_HPP_NAMESPACE::Extent2D imageExtent_,
- uint32_t imageArrayLayers_,
- VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_,
- VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_,
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ =
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
- VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ =
- VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque,
- VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate,
- VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {},
- VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {} )
- : flags( flags_ )
+ SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_,
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface_,
+ uint32_t minImageCount_,
+ VULKAN_HPP_NAMESPACE::Format imageFormat_,
+ VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_,
+ VULKAN_HPP_NAMESPACE::Extent2D imageExtent_,
+ uint32_t imageArrayLayers_,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_,
+ VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_,
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
+ VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque,
+ VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate,
+ VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {},
+ VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, surface( surface_ )
, minImageCount( minImageCount_ )
, imageFormat( imageFormat_ )
@@ -89945,7 +92717,8 @@ namespace VULKAN_HPP_NAMESPACE
, presentMode( presentMode_ )
, clipped( clipped_ )
, oldSwapchain( oldSwapchain_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -89964,15 +92737,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &
- setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
{
surface = surface_;
return *this;
@@ -89984,65 +92755,57 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &
- setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT
{
imageFormat = imageFormat_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &
- setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT
{
imageColorSpace = imageColorSpace_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &
- setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
{
imageExtent = imageExtent_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &
- setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT
{
imageArrayLayers = imageArrayLayers_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &
- setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT
{
imageUsage = imageUsage_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &
- setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT
{
imageSharingMode = imageSharingMode_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &
- setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndexCount = queueFamilyIndexCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &
- setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
{
pQueueFamilyIndices = pQueueFamilyIndices_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- SwapchainCreateInfoKHR & setQueueFamilyIndices(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR &
+ setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
pQueueFamilyIndices = queueFamilyIndices_.data();
@@ -90050,53 +92813,48 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &
- setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT
{
preTransform = preTransform_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &
- setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT
{
compositeAlpha = compositeAlpha_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &
- setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT
{
presentMode = presentMode_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &
- setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT
{
clipped = clipped_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &
- setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT
{
oldSwapchain = oldSwapchain_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSwapchainCreateInfoKHR *>( this );
}
- explicit operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSwapchainCreateInfoKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -90147,17 +92905,15 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( surface == rhs.surface ) &&
- ( minImageCount == rhs.minImageCount ) && ( imageFormat == rhs.imageFormat ) &&
- ( imageColorSpace == rhs.imageColorSpace ) && ( imageExtent == rhs.imageExtent ) &&
- ( imageArrayLayers == rhs.imageArrayLayers ) && ( imageUsage == rhs.imageUsage ) &&
+ ( minImageCount == rhs.minImageCount ) && ( imageFormat == rhs.imageFormat ) && ( imageColorSpace == rhs.imageColorSpace ) &&
+ ( imageExtent == rhs.imageExtent ) && ( imageArrayLayers == rhs.imageArrayLayers ) && ( imageUsage == rhs.imageUsage ) &&
( imageSharingMode == rhs.imageSharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) &&
- ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && ( preTransform == rhs.preTransform ) &&
- ( compositeAlpha == rhs.compositeAlpha ) && ( presentMode == rhs.presentMode ) &&
- ( clipped == rhs.clipped ) && ( oldSwapchain == rhs.oldSwapchain );
+ ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && ( preTransform == rhs.preTransform ) && ( compositeAlpha == rhs.compositeAlpha ) &&
+ ( presentMode == rhs.presentMode ) && ( clipped == rhs.clipped ) && ( oldSwapchain == rhs.oldSwapchain );
# endif
}
@@ -90168,34 +92924,25 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {};
- VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
- uint32_t minImageCount = {};
- VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
- VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
- VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
- uint32_t imageArrayLayers = {};
- VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {};
- VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
- uint32_t queueFamilyIndexCount = {};
- const uint32_t * pQueueFamilyIndices = {};
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform =
- VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
- VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha =
- VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque;
- VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate;
- VULKAN_HPP_NAMESPACE::Bool32 clipped = {};
- VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {};
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
+ uint32_t minImageCount = {};
+ VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
+ VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
+ uint32_t imageArrayLayers = {};
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {};
+ VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
+ uint32_t queueFamilyIndexCount = {};
+ const uint32_t * pQueueFamilyIndices = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+ VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque;
+ VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate;
+ VULKAN_HPP_NAMESPACE::Bool32 clipped = {};
+ VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR ) ==
- sizeof( VkSwapchainCreateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR>::value,
- "SwapchainCreateInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSwapchainCreateInfoKHR>
@@ -90208,28 +92955,27 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkSwapchainDisplayNativeHdrCreateInfoAMD;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {} )
- VULKAN_HPP_NOEXCEPT : localDimmingEnable( localDimmingEnable_ )
- {}
+ VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , localDimmingEnable( localDimmingEnable_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- : SwapchainDisplayNativeHdrCreateInfoAMD(
- *reinterpret_cast<SwapchainDisplayNativeHdrCreateInfoAMD const *>( &rhs ) )
- {}
+ : SwapchainDisplayNativeHdrCreateInfoAMD( *reinterpret_cast<SwapchainDisplayNativeHdrCreateInfoAMD const *>( &rhs ) )
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- SwapchainDisplayNativeHdrCreateInfoAMD &
- operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ SwapchainDisplayNativeHdrCreateInfoAMD & operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- SwapchainDisplayNativeHdrCreateInfoAMD &
- operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ SwapchainDisplayNativeHdrCreateInfoAMD & operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const *>( &rhs );
return *this;
@@ -90243,24 +92989,24 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD &
- setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT
+ setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT
{
localDimmingEnable = localDimmingEnable_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkSwapchainDisplayNativeHdrCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
+ operator VkSwapchainDisplayNativeHdrCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkSwapchainDisplayNativeHdrCreateInfoAMD *>( this );
}
- explicit operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
+ operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkSwapchainDisplayNativeHdrCreateInfoAMD *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -90277,7 +93023,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingEnable == rhs.localDimmingEnable );
@@ -90295,15 +93041,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD ) ==
- sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD>::value,
- "SwapchainDisplayNativeHdrCreateInfoAMD is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD>
@@ -90311,49 +93048,146 @@ namespace VULKAN_HPP_NAMESPACE
using Type = SwapchainDisplayNativeHdrCreateInfoAMD;
};
+ struct SwapchainPresentBarrierCreateInfoNV
+ {
+ using NativeType = VkSwapchainPresentBarrierCreateInfoNV;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentBarrierCreateInfoNV;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR SwapchainPresentBarrierCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , presentBarrierEnable( presentBarrierEnable_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR SwapchainPresentBarrierCreateInfoNV( SwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SwapchainPresentBarrierCreateInfoNV( VkSwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : SwapchainPresentBarrierCreateInfoNV( *reinterpret_cast<SwapchainPresentBarrierCreateInfoNV const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ SwapchainPresentBarrierCreateInfoNV & operator=( SwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ SwapchainPresentBarrierCreateInfoNV & operator=( VkSwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 SwapchainPresentBarrierCreateInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 SwapchainPresentBarrierCreateInfoNV &
+ setPresentBarrierEnable( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable_ ) VULKAN_HPP_NOEXCEPT
+ {
+ presentBarrierEnable = presentBarrierEnable_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkSwapchainPresentBarrierCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkSwapchainPresentBarrierCreateInfoNV *>( this );
+ }
+
+ operator VkSwapchainPresentBarrierCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkSwapchainPresentBarrierCreateInfoNV *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, presentBarrierEnable );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( SwapchainPresentBarrierCreateInfoNV const & ) const = default;
+#else
+ bool operator==( SwapchainPresentBarrierCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrierEnable == rhs.presentBarrierEnable );
+# endif
+ }
+
+ bool operator!=( SwapchainPresentBarrierCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentBarrierCreateInfoNV;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eSwapchainPresentBarrierCreateInfoNV>
+ {
+ using Type = SwapchainPresentBarrierCreateInfoNV;
+ };
+
struct TextureLODGatherFormatPropertiesAMD
{
using NativeType = VkTextureLODGatherFormatPropertiesAMD;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eTextureLodGatherFormatPropertiesAMD;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTextureLodGatherFormatPropertiesAMD;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD(
- VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {} ) VULKAN_HPP_NOEXCEPT
- : supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ )
- {}
+ VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( TextureLODGatherFormatPropertiesAMD const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
: TextureLODGatherFormatPropertiesAMD( *reinterpret_cast<TextureLODGatherFormatPropertiesAMD const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- TextureLODGatherFormatPropertiesAMD &
- operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ TextureLODGatherFormatPropertiesAMD & operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- TextureLODGatherFormatPropertiesAMD &
- operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ TextureLODGatherFormatPropertiesAMD & operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const *>( &rhs );
return *this;
}
- explicit operator VkTextureLODGatherFormatPropertiesAMD const &() const VULKAN_HPP_NOEXCEPT
+ operator VkTextureLODGatherFormatPropertiesAMD const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTextureLODGatherFormatPropertiesAMD *>( this );
}
- explicit operator VkTextureLODGatherFormatPropertiesAMD &() VULKAN_HPP_NOEXCEPT
+ operator VkTextureLODGatherFormatPropertiesAMD &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTextureLODGatherFormatPropertiesAMD *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -90370,11 +93204,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD );
# endif
}
@@ -90385,18 +93218,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD;
- void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD;
+ void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD ) ==
- sizeof( VkTextureLODGatherFormatPropertiesAMD ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD>::value,
- "TextureLODGatherFormatPropertiesAMD is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eTextureLodGatherFormatPropertiesAMD>
@@ -90404,6 +93229,122 @@ namespace VULKAN_HPP_NAMESPACE
using Type = TextureLODGatherFormatPropertiesAMD;
};
+ struct TilePropertiesQCOM
+ {
+ using NativeType = VkTilePropertiesQCOM;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTilePropertiesQCOM;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR TilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Extent3D tileSize_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D apronSize_ = {},
+ VULKAN_HPP_NAMESPACE::Offset2D origin_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , tileSize( tileSize_ )
+ , apronSize( apronSize_ )
+ , origin( origin_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR TilePropertiesQCOM( TilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ TilePropertiesQCOM( VkTilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : TilePropertiesQCOM( *reinterpret_cast<TilePropertiesQCOM const *>( &rhs ) ) {}
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ TilePropertiesQCOM & operator=( TilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ TilePropertiesQCOM & operator=( VkTilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setTileSize( VULKAN_HPP_NAMESPACE::Extent3D const & tileSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ tileSize = tileSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setApronSize( VULKAN_HPP_NAMESPACE::Extent2D const & apronSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ apronSize = apronSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setOrigin( VULKAN_HPP_NAMESPACE::Offset2D const & origin_ ) VULKAN_HPP_NOEXCEPT
+ {
+ origin = origin_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkTilePropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkTilePropertiesQCOM *>( this );
+ }
+
+ operator VkTilePropertiesQCOM &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkTilePropertiesQCOM *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::Extent3D const &,
+ VULKAN_HPP_NAMESPACE::Extent2D const &,
+ VULKAN_HPP_NAMESPACE::Offset2D const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, tileSize, apronSize, origin );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( TilePropertiesQCOM const & ) const = default;
+#else
+ bool operator==( TilePropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tileSize == rhs.tileSize ) && ( apronSize == rhs.apronSize ) && ( origin == rhs.origin );
+# endif
+ }
+
+ bool operator!=( TilePropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTilePropertiesQCOM;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Extent3D tileSize = {};
+ VULKAN_HPP_NAMESPACE::Extent2D apronSize = {};
+ VULKAN_HPP_NAMESPACE::Offset2D origin = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eTilePropertiesQCOM>
+ {
+ using Type = TilePropertiesQCOM;
+ };
+
struct TimelineSemaphoreSubmitInfo
{
using NativeType = VkTimelineSemaphoreSubmitInfo;
@@ -90412,33 +93353,37 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTimelineSemaphoreSubmitInfo;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- TimelineSemaphoreSubmitInfo( uint32_t waitSemaphoreValueCount_ = {},
- const uint64_t * pWaitSemaphoreValues_ = {},
- uint32_t signalSemaphoreValueCount_ = {},
- const uint64_t * pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT
- : waitSemaphoreValueCount( waitSemaphoreValueCount_ )
+ VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( uint32_t waitSemaphoreValueCount_ = {},
+ const uint64_t * pWaitSemaphoreValues_ = {},
+ uint32_t signalSemaphoreValueCount_ = {},
+ const uint64_t * pSignalSemaphoreValues_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , waitSemaphoreValueCount( waitSemaphoreValueCount_ )
, pWaitSemaphoreValues( pWaitSemaphoreValues_ )
, signalSemaphoreValueCount( signalSemaphoreValueCount_ )
, pSignalSemaphoreValues( pSignalSemaphoreValues_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- TimelineSemaphoreSubmitInfo( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TimelineSemaphoreSubmitInfo( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
: TimelineSemaphoreSubmitInfo( *reinterpret_cast<TimelineSemaphoreSubmitInfo const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- TimelineSemaphoreSubmitInfo(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {} )
- : waitSemaphoreValueCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) )
+ TimelineSemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , waitSemaphoreValueCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) )
, pWaitSemaphoreValues( waitSemaphoreValues_.data() )
, signalSemaphoreValueCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) )
, pSignalSemaphoreValues( signalSemaphoreValues_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -90457,23 +93402,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo &
- setWaitSemaphoreValueCount( uint32_t waitSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setWaitSemaphoreValueCount( uint32_t waitSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreValueCount = waitSemaphoreValueCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo &
- setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
{
pWaitSemaphoreValues = pWaitSemaphoreValues_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- TimelineSemaphoreSubmitInfo & setWaitSemaphoreValues(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+ TimelineSemaphoreSubmitInfo &
+ setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreValueCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
pWaitSemaphoreValues = waitSemaphoreValues_.data();
@@ -90481,23 +93424,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo &
- setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreValueCount = signalSemaphoreValueCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo &
- setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
{
pSignalSemaphoreValues = pSignalSemaphoreValues_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- TimelineSemaphoreSubmitInfo & setSignalSemaphoreValues(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+ TimelineSemaphoreSubmitInfo &
+ setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreValueCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
pSignalSemaphoreValues = signalSemaphoreValues_.data();
@@ -90506,17 +93447,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkTimelineSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
+ operator VkTimelineSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTimelineSemaphoreSubmitInfo *>( this );
}
- explicit operator VkTimelineSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT
+ operator VkTimelineSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTimelineSemaphoreSubmitInfo *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -90529,12 +93470,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- waitSemaphoreValueCount,
- pWaitSemaphoreValues,
- signalSemaphoreValueCount,
- pSignalSemaphoreValues );
+ return std::tie( sType, pNext, waitSemaphoreValueCount, pWaitSemaphoreValues, signalSemaphoreValueCount, pSignalSemaphoreValues );
}
#endif
@@ -90543,13 +93479,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount ) &&
- ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) &&
- ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount ) &&
+ ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount ) &&
( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
# endif
}
@@ -90568,14 +93502,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t signalSemaphoreValueCount = {};
const uint64_t * pSignalSemaphoreValues = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo ) ==
- sizeof( VkTimelineSemaphoreSubmitInfo ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo>::value,
- "TimelineSemaphoreSubmitInfo is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eTimelineSemaphoreSubmitInfo>
@@ -90584,29 +93510,268 @@ namespace VULKAN_HPP_NAMESPACE
};
using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo;
+ struct TraceRaysIndirectCommand2KHR
+ {
+ using NativeType = VkTraceRaysIndirectCommand2KHR;
+
+#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommand2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride_ = {},
+ uint32_t width_ = {},
+ uint32_t height_ = {},
+ uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT
+ : raygenShaderRecordAddress( raygenShaderRecordAddress_ )
+ , raygenShaderRecordSize( raygenShaderRecordSize_ )
+ , missShaderBindingTableAddress( missShaderBindingTableAddress_ )
+ , missShaderBindingTableSize( missShaderBindingTableSize_ )
+ , missShaderBindingTableStride( missShaderBindingTableStride_ )
+ , hitShaderBindingTableAddress( hitShaderBindingTableAddress_ )
+ , hitShaderBindingTableSize( hitShaderBindingTableSize_ )
+ , hitShaderBindingTableStride( hitShaderBindingTableStride_ )
+ , callableShaderBindingTableAddress( callableShaderBindingTableAddress_ )
+ , callableShaderBindingTableSize( callableShaderBindingTableSize_ )
+ , callableShaderBindingTableStride( callableShaderBindingTableStride_ )
+ , width( width_ )
+ , height( height_ )
+ , depth( depth_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommand2KHR( TraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ TraceRaysIndirectCommand2KHR( VkTraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : TraceRaysIndirectCommand2KHR( *reinterpret_cast<TraceRaysIndirectCommand2KHR const *>( &rhs ) )
+ {
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ TraceRaysIndirectCommand2KHR & operator=( TraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ TraceRaysIndirectCommand2KHR & operator=( VkTraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR const *>( &rhs );
+ return *this;
+ }
+
+#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR &
+ setRaygenShaderRecordAddress( VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ raygenShaderRecordAddress = raygenShaderRecordAddress_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR &
+ setRaygenShaderRecordSize( VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ raygenShaderRecordSize = raygenShaderRecordSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR &
+ setMissShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ missShaderBindingTableAddress = missShaderBindingTableAddress_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR &
+ setMissShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ missShaderBindingTableSize = missShaderBindingTableSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR &
+ setMissShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT
+ {
+ missShaderBindingTableStride = missShaderBindingTableStride_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR &
+ setHitShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ hitShaderBindingTableAddress = hitShaderBindingTableAddress_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR &
+ setHitShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ hitShaderBindingTableSize = hitShaderBindingTableSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR &
+ setHitShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT
+ {
+ hitShaderBindingTableStride = hitShaderBindingTableStride_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR &
+ setCallableShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ callableShaderBindingTableAddress = callableShaderBindingTableAddress_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR &
+ setCallableShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ callableShaderBindingTableSize = callableShaderBindingTableSize_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR &
+ setCallableShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT
+ {
+ callableShaderBindingTableStride = callableShaderBindingTableStride_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+ {
+ width = width_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+ {
+ height = height_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
+ {
+ depth = depth_;
+ return *this;
+ }
+#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkTraceRaysIndirectCommand2KHR const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkTraceRaysIndirectCommand2KHR *>( this );
+ }
+
+ operator VkTraceRaysIndirectCommand2KHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkTraceRaysIndirectCommand2KHR *>( this );
+ }
+
+#if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ VULKAN_HPP_NAMESPACE::DeviceAddress const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ VULKAN_HPP_NAMESPACE::DeviceAddress const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ VULKAN_HPP_NAMESPACE::DeviceAddress const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ VULKAN_HPP_NAMESPACE::DeviceSize const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( raygenShaderRecordAddress,
+ raygenShaderRecordSize,
+ missShaderBindingTableAddress,
+ missShaderBindingTableSize,
+ missShaderBindingTableStride,
+ hitShaderBindingTableAddress,
+ hitShaderBindingTableSize,
+ hitShaderBindingTableStride,
+ callableShaderBindingTableAddress,
+ callableShaderBindingTableSize,
+ callableShaderBindingTableStride,
+ width,
+ height,
+ depth );
+ }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( TraceRaysIndirectCommand2KHR const & ) const = default;
+#else
+ bool operator==( TraceRaysIndirectCommand2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( raygenShaderRecordAddress == rhs.raygenShaderRecordAddress ) && ( raygenShaderRecordSize == rhs.raygenShaderRecordSize ) &&
+ ( missShaderBindingTableAddress == rhs.missShaderBindingTableAddress ) && ( missShaderBindingTableSize == rhs.missShaderBindingTableSize ) &&
+ ( missShaderBindingTableStride == rhs.missShaderBindingTableStride ) && ( hitShaderBindingTableAddress == rhs.hitShaderBindingTableAddress ) &&
+ ( hitShaderBindingTableSize == rhs.hitShaderBindingTableSize ) && ( hitShaderBindingTableStride == rhs.hitShaderBindingTableStride ) &&
+ ( callableShaderBindingTableAddress == rhs.callableShaderBindingTableAddress ) &&
+ ( callableShaderBindingTableSize == rhs.callableShaderBindingTableSize ) &&
+ ( callableShaderBindingTableStride == rhs.callableShaderBindingTableStride ) && ( width == rhs.width ) && ( height == rhs.height ) &&
+ ( depth == rhs.depth );
+# endif
+ }
+
+ bool operator!=( TraceRaysIndirectCommand2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+#endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize = {};
+ VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride = {};
+ VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride = {};
+ VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride = {};
+ uint32_t width = {};
+ uint32_t height = {};
+ uint32_t depth = {};
+ };
+
struct TraceRaysIndirectCommandKHR
{
using NativeType = VkTraceRaysIndirectCommandKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( uint32_t width_ = {},
- uint32_t height_ = {},
- uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT
: width( width_ )
, height( height_ )
, depth( depth_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- TraceRaysIndirectCommandKHR( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
TraceRaysIndirectCommandKHR( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: TraceRaysIndirectCommandKHR( *reinterpret_cast<TraceRaysIndirectCommandKHR const *>( &rhs ) )
- {}
+ {
+ }
explicit TraceRaysIndirectCommandKHR( Extent2D const & extent2D, uint32_t depth_ = {} )
: width( extent2D.width ), height( extent2D.height ), depth( depth_ )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
TraceRaysIndirectCommandKHR & operator=( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -90637,17 +93802,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkTraceRaysIndirectCommandKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkTraceRaysIndirectCommandKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkTraceRaysIndirectCommandKHR *>( this );
}
- explicit operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkTraceRaysIndirectCommandKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -90664,7 +93829,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth );
@@ -90682,14 +93847,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t height = {};
uint32_t depth = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR ) ==
- sizeof( VkTraceRaysIndirectCommandKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR>::value,
- "TraceRaysIndirectCommandKHR is not nothrow_move_constructible!" );
struct ValidationCacheCreateInfoEXT
{
@@ -90699,27 +93856,32 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationCacheCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {},
- size_t initialDataSize_ = {},
- const void * pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {},
+ size_t initialDataSize_ = {},
+ const void * pInitialData_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, initialDataSize( initialDataSize_ )
, pInitialData( pInitialData_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- ValidationCacheCreateInfoEXT( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ValidationCacheCreateInfoEXT( *reinterpret_cast<ValidationCacheCreateInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ )
- : flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() )
- {}
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -90738,22 +93900,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT &
- setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT &
- setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
{
initialDataSize = initialDataSize_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT &
- setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT
{
pInitialData = pInitialData_;
return *this;
@@ -90761,8 +93920,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
- ValidationCacheCreateInfoEXT &
- setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
+ ValidationCacheCreateInfoEXT & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
{
initialDataSize = initialData_.size() * sizeof( T );
pInitialData = initialData_.data();
@@ -90771,17 +93929,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( this );
}
- explicit operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkValidationCacheCreateInfoEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -90802,11 +93960,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( initialDataSize == rhs.initialDataSize ) && ( pInitialData == rhs.pInitialData );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( initialDataSize == rhs.initialDataSize ) &&
+ ( pInitialData == rhs.pInitialData );
# endif
}
@@ -90823,14 +93981,6 @@ namespace VULKAN_HPP_NAMESPACE
size_t initialDataSize = {};
const void * pInitialData = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT ) ==
- sizeof( VkValidationCacheCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT>::value,
- "ValidationCacheCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eValidationCacheCreateInfoEXT>
@@ -90846,34 +93996,38 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT(
- uint32_t enabledValidationFeatureCount_ = {},
- const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ = {},
- uint32_t disabledValidationFeatureCount_ = {},
- const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
- : enabledValidationFeatureCount( enabledValidationFeatureCount_ )
+ VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( uint32_t enabledValidationFeatureCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ = {},
+ uint32_t disabledValidationFeatureCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , enabledValidationFeatureCount( enabledValidationFeatureCount_ )
, pEnabledValidationFeatures( pEnabledValidationFeatures_ )
, disabledValidationFeatureCount( disabledValidationFeatureCount_ )
, pDisabledValidationFeatures( pDisabledValidationFeatures_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: ValidationFeaturesEXT( *reinterpret_cast<ValidationFeaturesEXT const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ValidationFeaturesEXT(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT> const &
- enabledValidationFeatures_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT> const &
- disabledValidationFeatures_ = {} )
- : enabledValidationFeatureCount( static_cast<uint32_t>( enabledValidationFeatures_.size() ) )
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT> const & enabledValidationFeatures_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT> const & disabledValidationFeatures_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , enabledValidationFeatureCount( static_cast<uint32_t>( enabledValidationFeatures_.size() ) )
, pEnabledValidationFeatures( enabledValidationFeatures_.data() )
, disabledValidationFeatureCount( static_cast<uint32_t>( disabledValidationFeatures_.size() ) )
, pDisabledValidationFeatures( disabledValidationFeatures_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -90892,15 +94046,14 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT &
- setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
{
enabledValidationFeatureCount = enabledValidationFeatureCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPEnabledValidationFeatures(
- const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT &
+ setPEnabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
{
pEnabledValidationFeatures = pEnabledValidationFeatures_;
return *this;
@@ -90908,8 +94061,8 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ValidationFeaturesEXT & setEnabledValidationFeatures(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT> const &
- enabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT> const & enabledValidationFeatures_ )
+ VULKAN_HPP_NOEXCEPT
{
enabledValidationFeatureCount = static_cast<uint32_t>( enabledValidationFeatures_.size() );
pEnabledValidationFeatures = enabledValidationFeatures_.data();
@@ -90917,15 +94070,14 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT &
- setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
{
disabledValidationFeatureCount = disabledValidationFeatureCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPDisabledValidationFeatures(
- const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT &
+ setPDisabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
{
pDisabledValidationFeatures = pDisabledValidationFeatures_;
return *this;
@@ -90933,8 +94085,8 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ValidationFeaturesEXT & setDisabledValidationFeatures(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT> const &
- disabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT> const & disabledValidationFeatures_ )
+ VULKAN_HPP_NOEXCEPT
{
disabledValidationFeatureCount = static_cast<uint32_t>( disabledValidationFeatures_.size() );
pDisabledValidationFeatures = disabledValidationFeatures_.data();
@@ -90943,17 +94095,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkValidationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkValidationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkValidationFeaturesEXT *>( this );
}
- explicit operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkValidationFeaturesEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -90966,12 +94118,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- enabledValidationFeatureCount,
- pEnabledValidationFeatures,
- disabledValidationFeatureCount,
- pDisabledValidationFeatures );
+ return std::tie( sType, pNext, enabledValidationFeatureCount, pEnabledValidationFeatures, disabledValidationFeatureCount, pDisabledValidationFeatures );
}
#endif
@@ -90980,13 +94127,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount ) &&
- ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures ) &&
- ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount ) &&
+ ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures ) && ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount ) &&
( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures );
# endif
}
@@ -90998,19 +94143,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT;
+ const void * pNext = {};
uint32_t enabledValidationFeatureCount = {};
const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures = {};
uint32_t disabledValidationFeatureCount = {};
const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT>::value,
- "ValidationFeaturesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eValidationFeaturesEXT>
@@ -91026,26 +94165,27 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFlagsEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ValidationFlagsEXT(
- uint32_t disabledValidationCheckCount_ = {},
- const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ = {} ) VULKAN_HPP_NOEXCEPT
- : disabledValidationCheckCount( disabledValidationCheckCount_ )
+ VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , disabledValidationCheckCount( disabledValidationCheckCount_ )
, pDisabledValidationChecks( pDisabledValidationChecks_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : ValidationFlagsEXT( *reinterpret_cast<ValidationFlagsEXT const *>( &rhs ) )
- {}
+ ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ValidationFlagsEXT( *reinterpret_cast<ValidationFlagsEXT const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- ValidationFlagsEXT(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationCheckEXT> const &
- disabledValidationChecks_ )
- : disabledValidationCheckCount( static_cast<uint32_t>( disabledValidationChecks_.size() ) )
+ ValidationFlagsEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationCheckEXT> const & disabledValidationChecks_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , disabledValidationCheckCount( static_cast<uint32_t>( disabledValidationChecks_.size() ) )
, pDisabledValidationChecks( disabledValidationChecks_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -91064,15 +94204,14 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT &
- setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ ) VULKAN_HPP_NOEXCEPT
{
disabledValidationCheckCount = disabledValidationCheckCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setPDisabledValidationChecks(
- const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT &
+ setPDisabledValidationChecks( const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
{
pDisabledValidationChecks = pDisabledValidationChecks_;
return *this;
@@ -91080,8 +94219,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ValidationFlagsEXT & setDisabledValidationChecks(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationCheckEXT> const &
- disabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationCheckEXT> const & disabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
{
disabledValidationCheckCount = static_cast<uint32_t>( disabledValidationChecks_.size() );
pDisabledValidationChecks = disabledValidationChecks_.data();
@@ -91090,24 +94228,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkValidationFlagsEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkValidationFlagsEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkValidationFlagsEXT *>( this );
}
- explicit operator VkValidationFlagsEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkValidationFlagsEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkValidationFlagsEXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- uint32_t const &,
- const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -91120,11 +94255,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( disabledValidationCheckCount == rhs.disabledValidationCheckCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount ) &&
( pDisabledValidationChecks == rhs.pDisabledValidationChecks );
# endif
}
@@ -91141,12 +94275,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t disabledValidationCheckCount = {};
const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT>::value,
- "ValidationFlagsEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eValidationFlagsEXT>
@@ -91159,34 +94287,33 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkVertexInputAttributeDescription2EXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVertexInputAttributeDescription2EXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVertexInputAttributeDescription2EXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT(
- uint32_t location_ = {},
- uint32_t binding_ = {},
- VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT
- : location( location_ )
+ VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( uint32_t location_ = {},
+ uint32_t binding_ = {},
+ VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ uint32_t offset_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , location( location_ )
, binding( binding_ )
, format( format_ )
, offset( offset_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( VertexInputAttributeDescription2EXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VertexInputAttributeDescription2EXT( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
: VertexInputAttributeDescription2EXT( *reinterpret_cast<VertexInputAttributeDescription2EXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VertexInputAttributeDescription2EXT &
- operator=( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VertexInputAttributeDescription2EXT & operator=( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VertexInputAttributeDescription2EXT &
- operator=( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VertexInputAttributeDescription2EXT & operator=( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT const *>( &rhs );
return *this;
@@ -91211,8 +94338,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT &
- setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
@@ -91225,17 +94351,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVertexInputAttributeDescription2EXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVertexInputAttributeDescription2EXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( this );
}
- explicit operator VkVertexInputAttributeDescription2EXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVertexInputAttributeDescription2EXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVertexInputAttributeDescription2EXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -91257,11 +94383,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( location == rhs.location ) &&
- ( binding == rhs.binding ) && ( format == rhs.format ) && ( offset == rhs.offset );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) &&
+ ( offset == rhs.offset );
# endif
}
@@ -91279,14 +94405,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
uint32_t offset = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT ) ==
- sizeof( VkVertexInputAttributeDescription2EXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT>::value,
- "VertexInputAttributeDescription2EXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVertexInputAttributeDescription2EXT>
@@ -91299,31 +94417,31 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkVertexInputBindingDescription2EXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVertexInputBindingDescription2EXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVertexInputBindingDescription2EXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT(
- uint32_t binding_ = {},
- uint32_t stride_ = {},
- VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex,
- uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT
- : binding( binding_ )
+ VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT( uint32_t binding_ = {},
+ uint32_t stride_ = {},
+ VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex,
+ uint32_t divisor_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , binding( binding_ )
, stride( stride_ )
, inputRate( inputRate_ )
, divisor( divisor_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VertexInputBindingDescription2EXT( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VertexInputBindingDescription2EXT( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
: VertexInputBindingDescription2EXT( *reinterpret_cast<VertexInputBindingDescription2EXT const *>( &rhs ) )
- {}
+ {
+ }
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VertexInputBindingDescription2EXT &
- operator=( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VertexInputBindingDescription2EXT & operator=( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VertexInputBindingDescription2EXT & operator=( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -91350,8 +94468,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT &
- setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
{
inputRate = inputRate_;
return *this;
@@ -91364,17 +94481,17 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVertexInputBindingDescription2EXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVertexInputBindingDescription2EXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( this );
}
- explicit operator VkVertexInputBindingDescription2EXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVertexInputBindingDescription2EXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVertexInputBindingDescription2EXT *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -91396,11 +94513,11 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( binding == rhs.binding ) &&
- ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ) && ( divisor == rhs.divisor );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( binding == rhs.binding ) && ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ) &&
+ ( divisor == rhs.divisor );
# endif
}
@@ -91418,14 +94535,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex;
uint32_t divisor = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT ) ==
- sizeof( VkVertexInputBindingDescription2EXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT>::value,
- "VertexInputBindingDescription2EXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVertexInputBindingDescription2EXT>
@@ -91442,17 +94551,20 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eViSurfaceCreateInfoNN;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {},
- void * window_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR
+ ViSurfaceCreateInfoNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {}, void * window_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, window( window_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
: ViSurfaceCreateInfoNN( *reinterpret_cast<ViSurfaceCreateInfoNN const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ViSurfaceCreateInfoNN & operator=( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -91470,8 +94582,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN &
- setFlags( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setFlags( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -91484,24 +94595,21 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkViSurfaceCreateInfoNN const &() const VULKAN_HPP_NOEXCEPT
+ operator VkViSurfaceCreateInfoNN const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkViSurfaceCreateInfoNN *>( this );
}
- explicit operator VkViSurfaceCreateInfoNN &() VULKAN_HPP_NOEXCEPT
+ operator VkViSurfaceCreateInfoNN &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkViSurfaceCreateInfoNN *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN const &,
- void * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN const &, void * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -91514,7 +94622,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window );
@@ -91533,12 +94641,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {};
void * window = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN>::value,
- "ViSurfaceCreateInfoNN is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eViSurfaceCreateInfoNN>
@@ -91548,86 +94650,86 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_VI_NN*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoPictureResourceKHR
+ struct VideoPictureResourceInfoKHR
{
- using NativeType = VkVideoPictureResourceKHR;
+ using NativeType = VkVideoPictureResourceInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoPictureResourceKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoPictureResourceInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- VideoPictureResourceKHR( VULKAN_HPP_NAMESPACE::Offset2D codedOffset_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {},
- uint32_t baseArrayLayer_ = {},
- VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ = {} ) VULKAN_HPP_NOEXCEPT
- : codedOffset( codedOffset_ )
+ VULKAN_HPP_CONSTEXPR VideoPictureResourceInfoKHR( VULKAN_HPP_NAMESPACE::Offset2D codedOffset_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {},
+ uint32_t baseArrayLayer_ = {},
+ VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , codedOffset( codedOffset_ )
, codedExtent( codedExtent_ )
, baseArrayLayer( baseArrayLayer_ )
, imageViewBinding( imageViewBinding_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR VideoPictureResourceKHR( VideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoPictureResourceInfoKHR( VideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoPictureResourceKHR( VkVideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoPictureResourceKHR( *reinterpret_cast<VideoPictureResourceKHR const *>( &rhs ) )
- {}
+ VideoPictureResourceInfoKHR( VkVideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoPictureResourceInfoKHR( *reinterpret_cast<VideoPictureResourceInfoKHR const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoPictureResourceKHR & operator=( VideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoPictureResourceInfoKHR & operator=( VideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoPictureResourceKHR & operator=( VkVideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoPictureResourceInfoKHR & operator=( VkVideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR &
- setCodedOffset( VULKAN_HPP_NAMESPACE::Offset2D const & codedOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setCodedOffset( VULKAN_HPP_NAMESPACE::Offset2D const & codedOffset_ ) VULKAN_HPP_NOEXCEPT
{
codedOffset = codedOffset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR &
- setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT
{
codedExtent = codedExtent_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
{
baseArrayLayer = baseArrayLayer_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR &
- setImageViewBinding( VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setImageViewBinding( VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ ) VULKAN_HPP_NOEXCEPT
{
imageViewBinding = imageViewBinding_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoPictureResourceKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoPictureResourceInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkVideoPictureResourceKHR *>( this );
+ return *reinterpret_cast<const VkVideoPictureResourceInfoKHR *>( this );
}
- explicit operator VkVideoPictureResourceKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoPictureResourceInfoKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkVideoPictureResourceKHR *>( this );
+ return *reinterpret_cast<VkVideoPictureResourceInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -91645,118 +94747,113 @@ namespace VULKAN_HPP_NAMESPACE
# endif
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( VideoPictureResourceKHR const & ) const = default;
+ auto operator<=>( VideoPictureResourceInfoKHR const & ) const = default;
# else
- bool operator==( VideoPictureResourceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( VideoPictureResourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( codedOffset == rhs.codedOffset ) &&
- ( codedExtent == rhs.codedExtent ) && ( baseArrayLayer == rhs.baseArrayLayer ) &&
- ( imageViewBinding == rhs.imageViewBinding );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( codedOffset == rhs.codedOffset ) && ( codedExtent == rhs.codedExtent ) &&
+ ( baseArrayLayer == rhs.baseArrayLayer ) && ( imageViewBinding == rhs.imageViewBinding );
# endif
}
- bool operator!=( VideoPictureResourceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( VideoPictureResourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoPictureResourceKHR;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoPictureResourceInfoKHR;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::Offset2D codedOffset = {};
VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {};
uint32_t baseArrayLayer = {};
VULKAN_HPP_NAMESPACE::ImageView imageViewBinding = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR ) ==
- sizeof( VkVideoPictureResourceKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR>::value,
- "VideoPictureResourceKHR is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::eVideoPictureResourceKHR>
+ struct CppType<StructureType, StructureType::eVideoPictureResourceInfoKHR>
{
- using Type = VideoPictureResourceKHR;
+ using Type = VideoPictureResourceInfoKHR;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoReferenceSlotKHR
+ struct VideoReferenceSlotInfoKHR
{
- using NativeType = VkVideoReferenceSlotKHR;
+ using NativeType = VkVideoReferenceSlotInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoReferenceSlotKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoReferenceSlotInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoReferenceSlotKHR(
- int8_t slotIndex_ = {},
- const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR * pPictureResource_ = {} ) VULKAN_HPP_NOEXCEPT
- : slotIndex( slotIndex_ )
+ VULKAN_HPP_CONSTEXPR VideoReferenceSlotInfoKHR( int32_t slotIndex_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , slotIndex( slotIndex_ )
, pPictureResource( pPictureResource_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR VideoReferenceSlotKHR( VideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoReferenceSlotInfoKHR( VideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoReferenceSlotKHR( VkVideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoReferenceSlotKHR( *reinterpret_cast<VideoReferenceSlotKHR const *>( &rhs ) )
- {}
+ VideoReferenceSlotInfoKHR( VkVideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoReferenceSlotInfoKHR( *reinterpret_cast<VideoReferenceSlotInfoKHR const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoReferenceSlotKHR & operator=( VideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoReferenceSlotInfoKHR & operator=( VideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoReferenceSlotKHR & operator=( VkVideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoReferenceSlotInfoKHR & operator=( VkVideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotKHR & setSlotIndex( int8_t slotIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setSlotIndex( int32_t slotIndex_ ) VULKAN_HPP_NOEXCEPT
{
slotIndex = slotIndex_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotKHR &
- setPPictureResource( const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR * pPictureResource_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR &
+ setPPictureResource( const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource_ ) VULKAN_HPP_NOEXCEPT
{
pPictureResource = pPictureResource_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoReferenceSlotKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoReferenceSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkVideoReferenceSlotKHR *>( this );
+ return *reinterpret_cast<const VkVideoReferenceSlotInfoKHR *>( this );
}
- explicit operator VkVideoReferenceSlotKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoReferenceSlotInfoKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkVideoReferenceSlotKHR *>( this );
+ return *reinterpret_cast<VkVideoReferenceSlotInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
const void * const &,
- int8_t const &,
- const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR * const &>
+ int32_t const &,
+ const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -91765,41 +94862,34 @@ namespace VULKAN_HPP_NAMESPACE
# endif
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( VideoReferenceSlotKHR const & ) const = default;
+ auto operator<=>( VideoReferenceSlotInfoKHR const & ) const = default;
# else
- bool operator==( VideoReferenceSlotKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( VideoReferenceSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) &&
- ( pPictureResource == rhs.pPictureResource );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) && ( pPictureResource == rhs.pPictureResource );
# endif
}
- bool operator!=( VideoReferenceSlotKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( VideoReferenceSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoReferenceSlotKHR;
- const void * pNext = {};
- int8_t slotIndex = {};
- const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR * pPictureResource = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoReferenceSlotInfoKHR;
+ const void * pNext = {};
+ int32_t slotIndex = {};
+ const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR ) == sizeof( VkVideoReferenceSlotKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR>::value,
- "VideoReferenceSlotKHR is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::eVideoReferenceSlotKHR>
+ struct CppType<StructureType, StructureType::eVideoReferenceSlotInfoKHR>
{
- using Type = VideoReferenceSlotKHR;
+ using Type = VideoReferenceSlotInfoKHR;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -91812,42 +94902,42 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoBeginCodingInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR(
- VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ = {},
- VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_ = {},
- VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {},
- VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ = {},
- uint32_t referenceSlotCount_ = {},
- const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , codecQualityPreset( codecQualityPreset_ )
+ VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ = {},
+ VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {},
+ VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ = {},
+ uint32_t referenceSlotCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, videoSession( videoSession_ )
, videoSessionParameters( videoSessionParameters_ )
, referenceSlotCount( referenceSlotCount_ )
, pReferenceSlots( pReferenceSlots_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoBeginCodingInfoKHR( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoBeginCodingInfoKHR( *reinterpret_cast<VideoBeginCodingInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoBeginCodingInfoKHR(
- VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_,
- VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_,
- VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_,
- VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR> const &
- referenceSlots_ )
- : flags( flags_ )
- , codecQualityPreset( codecQualityPreset_ )
+ VideoBeginCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_,
+ VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_,
+ VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR> const & referenceSlots_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, videoSession( videoSession_ )
, videoSessionParameters( videoSessionParameters_ )
, referenceSlotCount( static_cast<uint32_t>( referenceSlots_.size() ) )
, pReferenceSlots( referenceSlots_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -91866,43 +94956,33 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setCodecQualityPreset(
- VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_ ) VULKAN_HPP_NOEXCEPT
- {
- codecQualityPreset = codecQualityPreset_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR &
- setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT
{
videoSession = videoSession_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setVideoSessionParameters(
- VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR &
+ setVideoSessionParameters( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ ) VULKAN_HPP_NOEXCEPT
{
videoSessionParameters = videoSessionParameters_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR &
- setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
{
referenceSlotCount = referenceSlotCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR &
- setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
+ setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
{
pReferenceSlots = pReferenceSlots_;
return *this;
@@ -91910,8 +94990,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoBeginCodingInfoKHR & setReferenceSlots(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR> const &
- referenceSlots_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR> const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT
{
referenceSlotCount = static_cast<uint32_t>( referenceSlots_.size() );
pReferenceSlots = referenceSlots_.data();
@@ -91920,39 +94999,31 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoBeginCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoBeginCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( this );
}
- explicit operator VkVideoBeginCodingInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoBeginCodingInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoBeginCodingInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
const void * const &,
VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR const &,
- VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR const &,
VULKAN_HPP_NAMESPACE::VideoSessionKHR const &,
VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const &,
uint32_t const &,
- const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * const &>
+ const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- flags,
- codecQualityPreset,
- videoSession,
- videoSessionParameters,
- referenceSlotCount,
- pReferenceSlots );
+ return std::tie( sType, pNext, flags, videoSession, videoSessionParameters, referenceSlotCount, pReferenceSlots );
}
# endif
@@ -91961,13 +95032,12 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( codecQualityPreset == rhs.codecQualityPreset ) && ( videoSession == rhs.videoSession ) &&
- ( videoSessionParameters == rhs.videoSessionParameters ) &&
- ( referenceSlotCount == rhs.referenceSlotCount ) && ( pReferenceSlots == rhs.pReferenceSlots );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( videoSession == rhs.videoSession ) &&
+ ( videoSessionParameters == rhs.videoSessionParameters ) && ( referenceSlotCount == rhs.referenceSlotCount ) &&
+ ( pReferenceSlots == rhs.pReferenceSlots );
# endif
}
@@ -91978,22 +95048,14 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoBeginCodingInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags = {};
- VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset = {};
- VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {};
- VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters = {};
- uint32_t referenceSlotCount = {};
- const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoBeginCodingInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags = {};
+ VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {};
+ VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters = {};
+ uint32_t referenceSlotCount = {};
+ const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR ) ==
- sizeof( VkVideoBeginCodingInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR>::value,
- "VideoBeginCodingInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoBeginCodingInfoKHR>
@@ -92003,143 +95065,6 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoBindMemoryKHR
- {
- using NativeType = VkVideoBindMemoryKHR;
-
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoBindMemoryKHR;
-
-# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoBindMemoryKHR( uint32_t memoryBindIndex_ = {},
- VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ = {} ) VULKAN_HPP_NOEXCEPT
- : memoryBindIndex( memoryBindIndex_ )
- , memory( memory_ )
- , memoryOffset( memoryOffset_ )
- , memorySize( memorySize_ )
- {}
-
- VULKAN_HPP_CONSTEXPR VideoBindMemoryKHR( VideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- VideoBindMemoryKHR( VkVideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoBindMemoryKHR( *reinterpret_cast<VideoBindMemoryKHR const *>( &rhs ) )
- {}
-# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
-
- VideoBindMemoryKHR & operator=( VideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- VideoBindMemoryKHR & operator=( VkVideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR const *>( &rhs );
- return *this;
- }
-
-# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR & setMemoryBindIndex( uint32_t memoryBindIndex_ ) VULKAN_HPP_NOEXCEPT
- {
- memoryBindIndex = memoryBindIndex_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR &
- setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
- {
- memory = memory_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR &
- setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
- {
- memoryOffset = memoryOffset_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR &
- setMemorySize( VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ ) VULKAN_HPP_NOEXCEPT
- {
- memorySize = memorySize_;
- return *this;
- }
-# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
-
- explicit operator VkVideoBindMemoryKHR const &() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkVideoBindMemoryKHR *>( this );
- }
-
- explicit operator VkVideoBindMemoryKHR &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkVideoBindMemoryKHR *>( this );
- }
-
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
-# if 14 <= VULKAN_HPP_CPP_VERSION
- auto
-# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- uint32_t const &,
- VULKAN_HPP_NAMESPACE::DeviceMemory const &,
- VULKAN_HPP_NAMESPACE::DeviceSize const &,
- VULKAN_HPP_NAMESPACE::DeviceSize const &>
-# endif
- reflect() const VULKAN_HPP_NOEXCEPT
- {
- return std::tie( sType, pNext, memoryBindIndex, memory, memoryOffset, memorySize );
- }
-# endif
-
-# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( VideoBindMemoryKHR const & ) const = default;
-# else
- bool operator==( VideoBindMemoryKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
- return this->reflect() == rhs.reflect();
-# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) &&
- ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) && ( memorySize == rhs.memorySize );
-# endif
- }
-
- bool operator!=( VideoBindMemoryKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
- return !operator==( rhs );
- }
-# endif
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoBindMemoryKHR;
- const void * pNext = {};
- uint32_t memoryBindIndex = {};
- VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
- VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
- VULKAN_HPP_NAMESPACE::DeviceSize memorySize = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR ) == sizeof( VkVideoBindMemoryKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR>::value,
- "VideoBindMemoryKHR is not nothrow_move_constructible!" );
-
- template <>
- struct CppType<StructureType, StructureType::eVideoBindMemoryKHR>
- {
- using Type = VideoBindMemoryKHR;
- };
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
struct VideoCapabilitiesKHR
{
using NativeType = VkVideoCapabilitiesKHR;
@@ -92148,29 +95073,35 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCapabilitiesKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR capabilityFlags_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D videoPictureExtentGranularity_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D minExtent_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D maxExtent_ = {},
- uint32_t maxReferencePicturesSlotsCount_ = {},
- uint32_t maxReferencePicturesActiveCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : capabilityFlags( capabilityFlags_ )
+ VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR flags_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D pictureAccessGranularity_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D minCodedExtent_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent_ = {},
+ uint32_t maxDpbSlots_ = {},
+ uint32_t maxActiveReferencePictures_ = {},
+ VULKAN_HPP_NAMESPACE::ExtensionProperties stdHeaderVersion_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, minBitstreamBufferOffsetAlignment( minBitstreamBufferOffsetAlignment_ )
, minBitstreamBufferSizeAlignment( minBitstreamBufferSizeAlignment_ )
- , videoPictureExtentGranularity( videoPictureExtentGranularity_ )
- , minExtent( minExtent_ )
- , maxExtent( maxExtent_ )
- , maxReferencePicturesSlotsCount( maxReferencePicturesSlotsCount_ )
- , maxReferencePicturesActiveCount( maxReferencePicturesActiveCount_ )
- {}
+ , pictureAccessGranularity( pictureAccessGranularity_ )
+ , minCodedExtent( minCodedExtent_ )
+ , maxCodedExtent( maxCodedExtent_ )
+ , maxDpbSlots( maxDpbSlots_ )
+ , maxActiveReferencePictures( maxActiveReferencePictures_ )
+ , stdHeaderVersion( stdHeaderVersion_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR VideoCapabilitiesKHR( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoCapabilitiesKHR( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoCapabilitiesKHR( *reinterpret_cast<VideoCapabilitiesKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
VideoCapabilitiesKHR & operator=( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -92181,17 +95112,17 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkVideoCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoCapabilitiesKHR *>( this );
}
- explicit operator VkVideoCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoCapabilitiesKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -92204,20 +95135,22 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Extent2D const &,
VULKAN_HPP_NAMESPACE::Extent2D const &,
uint32_t const &,
- uint32_t const &>
+ uint32_t const &,
+ VULKAN_HPP_NAMESPACE::ExtensionProperties const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType,
pNext,
- capabilityFlags,
+ flags,
minBitstreamBufferOffsetAlignment,
minBitstreamBufferSizeAlignment,
- videoPictureExtentGranularity,
- minExtent,
- maxExtent,
- maxReferencePicturesSlotsCount,
- maxReferencePicturesActiveCount );
+ pictureAccessGranularity,
+ minCodedExtent,
+ maxCodedExtent,
+ maxDpbSlots,
+ maxActiveReferencePictures,
+ stdHeaderVersion );
}
# endif
@@ -92226,16 +95159,14 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( capabilityFlags == rhs.capabilityFlags ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
( minBitstreamBufferOffsetAlignment == rhs.minBitstreamBufferOffsetAlignment ) &&
- ( minBitstreamBufferSizeAlignment == rhs.minBitstreamBufferSizeAlignment ) &&
- ( videoPictureExtentGranularity == rhs.videoPictureExtentGranularity ) && ( minExtent == rhs.minExtent ) &&
- ( maxExtent == rhs.maxExtent ) &&
- ( maxReferencePicturesSlotsCount == rhs.maxReferencePicturesSlotsCount ) &&
- ( maxReferencePicturesActiveCount == rhs.maxReferencePicturesActiveCount );
+ ( minBitstreamBufferSizeAlignment == rhs.minBitstreamBufferSizeAlignment ) && ( pictureAccessGranularity == rhs.pictureAccessGranularity ) &&
+ ( minCodedExtent == rhs.minCodedExtent ) && ( maxCodedExtent == rhs.maxCodedExtent ) && ( maxDpbSlots == rhs.maxDpbSlots ) &&
+ ( maxActiveReferencePictures == rhs.maxActiveReferencePictures ) && ( stdHeaderVersion == rhs.stdHeaderVersion );
# endif
}
@@ -92246,23 +95177,18 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCapabilitiesKHR;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR capabilityFlags = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCapabilitiesKHR;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR flags = {};
VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment = {};
VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment = {};
- VULKAN_HPP_NAMESPACE::Extent2D videoPictureExtentGranularity = {};
- VULKAN_HPP_NAMESPACE::Extent2D minExtent = {};
- VULKAN_HPP_NAMESPACE::Extent2D maxExtent = {};
- uint32_t maxReferencePicturesSlotsCount = {};
- uint32_t maxReferencePicturesActiveCount = {};
+ VULKAN_HPP_NAMESPACE::Extent2D pictureAccessGranularity = {};
+ VULKAN_HPP_NAMESPACE::Extent2D minCodedExtent = {};
+ VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent = {};
+ uint32_t maxDpbSlots = {};
+ uint32_t maxActiveReferencePictures = {};
+ VULKAN_HPP_NAMESPACE::ExtensionProperties stdHeaderVersion = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR ) == sizeof( VkVideoCapabilitiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::value,
- "VideoCapabilitiesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoCapabilitiesKHR>
@@ -92280,17 +95206,19 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCodingControlInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- VideoCodingControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- {}
+ VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoCodingControlInfoKHR( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoCodingControlInfoKHR( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoCodingControlInfoKHR( *reinterpret_cast<VideoCodingControlInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
VideoCodingControlInfoKHR & operator=( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -92308,31 +95236,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoCodingControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoCodingControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoCodingControlInfoKHR *>( this );
}
- explicit operator VkVideoCodingControlInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoCodingControlInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoCodingControlInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -92345,7 +95270,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
@@ -92363,13 +95288,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR ) ==
- sizeof( VkVideoCodingControlInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR>::value,
- "VideoCodingControlInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoCodingControlInfoKHR>
@@ -92379,317 +95297,284 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoDecodeH264CapabilitiesEXT
+ struct VideoDecodeCapabilitiesKHR
{
- using NativeType = VkVideoDecodeH264CapabilitiesEXT;
+ using NativeType = VkVideoDecodeCapabilitiesKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264CapabilitiesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeCapabilitiesKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264CapabilitiesEXT(
- uint32_t maxLevel_ = {},
- VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity_ = {},
- VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxLevel( maxLevel_ )
- , fieldOffsetGranularity( fieldOffsetGranularity_ )
- , stdExtensionVersion( stdExtensionVersion_ )
- {}
+ VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR flags_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- VideoDecodeH264CapabilitiesEXT( VideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR( VideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoDecodeH264CapabilitiesEXT( VkVideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoDecodeH264CapabilitiesEXT( *reinterpret_cast<VideoDecodeH264CapabilitiesEXT const *>( &rhs ) )
- {}
+ VideoDecodeCapabilitiesKHR( VkVideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoDecodeCapabilitiesKHR( *reinterpret_cast<VideoDecodeCapabilitiesKHR const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoDecodeH264CapabilitiesEXT &
- operator=( VideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoDecodeCapabilitiesKHR & operator=( VideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoDecodeH264CapabilitiesEXT & operator=( VkVideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoDecodeCapabilitiesKHR & operator=( VkVideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR const *>( &rhs );
return *this;
}
- explicit operator VkVideoDecodeH264CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkVideoDecodeH264CapabilitiesEXT *>( this );
+ return *reinterpret_cast<const VkVideoDecodeCapabilitiesKHR *>( this );
}
- explicit operator VkVideoDecodeH264CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkVideoDecodeH264CapabilitiesEXT *>( this );
+ return *reinterpret_cast<VkVideoDecodeCapabilitiesKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- uint32_t const &,
- VULKAN_HPP_NAMESPACE::Offset2D const &,
- VULKAN_HPP_NAMESPACE::ExtensionProperties const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, maxLevel, fieldOffsetGranularity, stdExtensionVersion );
+ return std::tie( sType, pNext, flags );
}
# endif
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( VideoDecodeH264CapabilitiesEXT const & ) const = default;
+ auto operator<=>( VideoDecodeCapabilitiesKHR const & ) const = default;
# else
- bool operator==( VideoDecodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( VideoDecodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxLevel == rhs.maxLevel ) &&
- ( fieldOffsetGranularity == rhs.fieldOffsetGranularity ) &&
- ( stdExtensionVersion == rhs.stdExtensionVersion );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
# endif
}
- bool operator!=( VideoDecodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( VideoDecodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264CapabilitiesEXT;
- void * pNext = {};
- uint32_t maxLevel = {};
- VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity = {};
- VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeCapabilitiesKHR;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR flags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT ) ==
- sizeof( VkVideoDecodeH264CapabilitiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT>::value,
- "VideoDecodeH264CapabilitiesEXT is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::eVideoDecodeH264CapabilitiesEXT>
+ struct CppType<StructureType, StructureType::eVideoDecodeCapabilitiesKHR>
{
- using Type = VideoDecodeH264CapabilitiesEXT;
+ using Type = VideoDecodeCapabilitiesKHR;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoDecodeH264DpbSlotInfoEXT
+ struct VideoDecodeH264CapabilitiesEXT
{
- using NativeType = VkVideoDecodeH264DpbSlotInfoEXT;
+ using NativeType = VkVideoDecodeH264CapabilitiesEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264DpbSlotInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264CapabilitiesEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoEXT(
- const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ = {} ) VULKAN_HPP_NOEXCEPT
- : pStdReferenceInfo( pStdReferenceInfo_ )
- {}
-
- VULKAN_HPP_CONSTEXPR
- VideoDecodeH264DpbSlotInfoEXT( VideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- VideoDecodeH264DpbSlotInfoEXT( VkVideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoDecodeH264DpbSlotInfoEXT( *reinterpret_cast<VideoDecodeH264DpbSlotInfoEXT const *>( &rhs ) )
- {}
-# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
-
- VideoDecodeH264DpbSlotInfoEXT &
- operator=( VideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- VideoDecodeH264DpbSlotInfoEXT & operator=( VkVideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR VideoDecodeH264CapabilitiesEXT( StdVideoH264LevelIdc maxLevelIdc_ = {},
+ VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxLevelIdc( maxLevelIdc_ )
+ , fieldOffsetGranularity( fieldOffsetGranularity_ )
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT const *>( &rhs );
- return *this;
}
-# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR VideoDecodeH264CapabilitiesEXT( VideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ VideoDecodeH264CapabilitiesEXT( VkVideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoDecodeH264CapabilitiesEXT( *reinterpret_cast<VideoDecodeH264CapabilitiesEXT const *>( &rhs ) )
{
- pNext = pNext_;
- return *this;
}
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoEXT &
- setPStdReferenceInfo( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
+ VideoDecodeH264CapabilitiesEXT & operator=( VideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ VideoDecodeH264CapabilitiesEXT & operator=( VkVideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pStdReferenceInfo = pStdReferenceInfo_;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT const *>( &rhs );
return *this;
}
-# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoDecodeH264DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH264CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkVideoDecodeH264DpbSlotInfoEXT *>( this );
+ return *reinterpret_cast<const VkVideoDecodeH264CapabilitiesEXT *>( this );
}
- explicit operator VkVideoDecodeH264DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH264CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkVideoDecodeH264DpbSlotInfoEXT *>( this );
+ return *reinterpret_cast<VkVideoDecodeH264CapabilitiesEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- const StdVideoDecodeH264ReferenceInfo * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, StdVideoH264LevelIdc const &, VULKAN_HPP_NAMESPACE::Offset2D const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, pStdReferenceInfo );
+ return std::tie( sType, pNext, maxLevelIdc, fieldOffsetGranularity );
}
# endif
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( VideoDecodeH264DpbSlotInfoEXT const & ) const = default;
-# else
- bool operator==( VideoDecodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ std::strong_ordering operator<=>( VideoDecodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
- return this->reflect() == rhs.reflect();
-# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo );
-# endif
+ if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
+ return cmp;
+ if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
+ return cmp;
+ if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ); cmp != 0 )
+ return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+ if ( auto cmp = fieldOffsetGranularity <=> rhs.fieldOffsetGranularity; cmp != 0 )
+ return cmp;
+
+ return std::strong_ordering::equivalent;
}
+# endif
- bool operator!=( VideoDecodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( VideoDecodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ) == 0 ) &&
+ ( fieldOffsetGranularity == rhs.fieldOffsetGranularity );
+ }
+
+ bool operator!=( VideoDecodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
-# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264DpbSlotInfoEXT;
- const void * pNext = {};
- const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264CapabilitiesEXT;
+ void * pNext = {};
+ StdVideoH264LevelIdc maxLevelIdc = {};
+ VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT ) ==
- sizeof( VkVideoDecodeH264DpbSlotInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT>::value,
- "VideoDecodeH264DpbSlotInfoEXT is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::eVideoDecodeH264DpbSlotInfoEXT>
+ struct CppType<StructureType, StructureType::eVideoDecodeH264CapabilitiesEXT>
{
- using Type = VideoDecodeH264DpbSlotInfoEXT;
+ using Type = VideoDecodeH264CapabilitiesEXT;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoDecodeH264MvcEXT
+ struct VideoDecodeH264DpbSlotInfoEXT
{
- using NativeType = VkVideoDecodeH264MvcEXT;
+ using NativeType = VkVideoDecodeH264DpbSlotInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264MvcEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264DpbSlotInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoDecodeH264MvcEXT( const StdVideoDecodeH264Mvc * pStdMvc_ = {} ) VULKAN_HPP_NOEXCEPT
- : pStdMvc( pStdMvc_ )
- {}
+ VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoEXT( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pStdReferenceInfo( pStdReferenceInfo_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR VideoDecodeH264MvcEXT( VideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoEXT( VideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoDecodeH264MvcEXT( VkVideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoDecodeH264MvcEXT( *reinterpret_cast<VideoDecodeH264MvcEXT const *>( &rhs ) )
- {}
+ VideoDecodeH264DpbSlotInfoEXT( VkVideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoDecodeH264DpbSlotInfoEXT( *reinterpret_cast<VideoDecodeH264DpbSlotInfoEXT const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoDecodeH264MvcEXT & operator=( VideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoDecodeH264DpbSlotInfoEXT & operator=( VideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoDecodeH264MvcEXT & operator=( VkVideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoDecodeH264DpbSlotInfoEXT & operator=( VkVideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264MvcEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264MvcEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264MvcEXT &
- setPStdMvc( const StdVideoDecodeH264Mvc * pStdMvc_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoEXT &
+ setPStdReferenceInfo( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
{
- pStdMvc = pStdMvc_;
+ pStdReferenceInfo = pStdReferenceInfo_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoDecodeH264MvcEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH264DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkVideoDecodeH264MvcEXT *>( this );
+ return *reinterpret_cast<const VkVideoDecodeH264DpbSlotInfoEXT *>( this );
}
- explicit operator VkVideoDecodeH264MvcEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH264DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkVideoDecodeH264MvcEXT *>( this );
+ return *reinterpret_cast<VkVideoDecodeH264DpbSlotInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoDecodeH264Mvc * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoDecodeH264ReferenceInfo * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, pStdMvc );
+ return std::tie( sType, pNext, pStdReferenceInfo );
}
# endif
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( VideoDecodeH264MvcEXT const & ) const = default;
+ auto operator<=>( VideoDecodeH264DpbSlotInfoEXT const & ) const = default;
# else
- bool operator==( VideoDecodeH264MvcEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( VideoDecodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdMvc == rhs.pStdMvc );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo );
# endif
}
- bool operator!=( VideoDecodeH264MvcEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( VideoDecodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264MvcEXT;
- const void * pNext = {};
- const StdVideoDecodeH264Mvc * pStdMvc = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264DpbSlotInfoEXT;
+ const void * pNext = {};
+ const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264MvcEXT ) == sizeof( VkVideoDecodeH264MvcEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264MvcEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264MvcEXT>::value,
- "VideoDecodeH264MvcEXT is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::eVideoDecodeH264MvcEXT>
+ struct CppType<StructureType, StructureType::eVideoDecodeH264DpbSlotInfoEXT>
{
- using Type = VideoDecodeH264MvcEXT;
+ using Type = VideoDecodeH264DpbSlotInfoEXT;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -92703,33 +95588,34 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoEXT( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ = {},
- uint32_t slicesCount_ = {},
- const uint32_t * pSlicesDataOffsets_ = {} ) VULKAN_HPP_NOEXCEPT
- : pStdPictureInfo( pStdPictureInfo_ )
- , slicesCount( slicesCount_ )
- , pSlicesDataOffsets( pSlicesDataOffsets_ )
- {}
+ uint32_t sliceCount_ = {},
+ const uint32_t * pSliceOffsets_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pStdPictureInfo( pStdPictureInfo_ )
+ , sliceCount( sliceCount_ )
+ , pSliceOffsets( pSliceOffsets_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoDecodeH264PictureInfoEXT( VideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoEXT( VideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH264PictureInfoEXT( VkVideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeH264PictureInfoEXT( *reinterpret_cast<VideoDecodeH264PictureInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoDecodeH264PictureInfoEXT(
- const StdVideoDecodeH264PictureInfo * pStdPictureInfo_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & slicesDataOffsets_ )
- : pStdPictureInfo( pStdPictureInfo_ )
- , slicesCount( static_cast<uint32_t>( slicesDataOffsets_.size() ) )
- , pSlicesDataOffsets( slicesDataOffsets_.data() )
- {}
+ VideoDecodeH264PictureInfoEXT( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & sliceOffsets_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), pStdPictureInfo( pStdPictureInfo_ ), sliceCount( static_cast<uint32_t>( sliceOffsets_.size() ) ), pSliceOffsets( sliceOffsets_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoDecodeH264PictureInfoEXT &
- operator=( VideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoDecodeH264PictureInfoEXT & operator=( VideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH264PictureInfoEXT & operator=( VkVideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -92744,48 +95630,45 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT &
- setPStdPictureInfo( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT & setPStdPictureInfo( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
{
pStdPictureInfo = pStdPictureInfo_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT & setSlicesCount( uint32_t slicesCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT & setSliceCount( uint32_t sliceCount_ ) VULKAN_HPP_NOEXCEPT
{
- slicesCount = slicesCount_;
+ sliceCount = sliceCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT &
- setPSlicesDataOffsets( const uint32_t * pSlicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT & setPSliceOffsets( const uint32_t * pSliceOffsets_ ) VULKAN_HPP_NOEXCEPT
{
- pSlicesDataOffsets = pSlicesDataOffsets_;
+ pSliceOffsets = pSliceOffsets_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoDecodeH264PictureInfoEXT & setSlicesDataOffsets(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & slicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT
+ VideoDecodeH264PictureInfoEXT & setSliceOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & sliceOffsets_ ) VULKAN_HPP_NOEXCEPT
{
- slicesCount = static_cast<uint32_t>( slicesDataOffsets_.size() );
- pSlicesDataOffsets = slicesDataOffsets_.data();
+ sliceCount = static_cast<uint32_t>( sliceOffsets_.size() );
+ pSliceOffsets = sliceOffsets_.data();
return *this;
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoDecodeH264PictureInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH264PictureInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH264PictureInfoEXT *>( this );
}
- explicit operator VkVideoDecodeH264PictureInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH264PictureInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH264PictureInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -92797,7 +95680,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, pStdPictureInfo, slicesCount, pSlicesDataOffsets );
+ return std::tie( sType, pNext, pStdPictureInfo, sliceCount, pSliceOffsets );
}
# endif
@@ -92806,11 +95689,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoDecodeH264PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) &&
- ( slicesCount == rhs.slicesCount ) && ( pSlicesDataOffsets == rhs.pSlicesDataOffsets );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && ( sliceCount == rhs.sliceCount ) &&
+ ( pSliceOffsets == rhs.pSliceOffsets );
# endif
}
@@ -92821,20 +95704,12 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264PictureInfoEXT;
- const void * pNext = {};
- const StdVideoDecodeH264PictureInfo * pStdPictureInfo = {};
- uint32_t slicesCount = {};
- const uint32_t * pSlicesDataOffsets = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264PictureInfoEXT;
+ const void * pNext = {};
+ const StdVideoDecodeH264PictureInfo * pStdPictureInfo = {};
+ uint32_t sliceCount = {};
+ const uint32_t * pSliceOffsets = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoEXT ) ==
- sizeof( VkVideoDecodeH264PictureInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoEXT>::value,
- "VideoDecodeH264PictureInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH264PictureInfoEXT>
@@ -92844,77 +95719,79 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoDecodeH264ProfileEXT
+ struct VideoDecodeH264ProfileInfoEXT
{
- using NativeType = VkVideoDecodeH264ProfileEXT;
+ using NativeType = VkVideoDecodeH264ProfileInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264ProfileEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264ProfileInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileEXT(
- StdVideoH264ProfileIdc stdProfileIdc_ = {},
- VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsEXT pictureLayout_ = {} ) VULKAN_HPP_NOEXCEPT
- : stdProfileIdc( stdProfileIdc_ )
+ VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileInfoEXT( StdVideoH264ProfileIdc stdProfileIdc_ = {},
+ VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsEXT pictureLayout_ =
+ VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsEXT::eProgressive,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , stdProfileIdc( stdProfileIdc_ )
, pictureLayout( pictureLayout_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoDecodeH264ProfileEXT( VideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileInfoEXT( VideoDecodeH264ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoDecodeH264ProfileEXT( VkVideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoDecodeH264ProfileEXT( *reinterpret_cast<VideoDecodeH264ProfileEXT const *>( &rhs ) )
- {}
+ VideoDecodeH264ProfileInfoEXT( VkVideoDecodeH264ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoDecodeH264ProfileInfoEXT( *reinterpret_cast<VideoDecodeH264ProfileInfoEXT const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoDecodeH264ProfileEXT & operator=( VideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoDecodeH264ProfileInfoEXT & operator=( VideoDecodeH264ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoDecodeH264ProfileEXT & operator=( VkVideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoDecodeH264ProfileInfoEXT & operator=( VkVideoDecodeH264ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoEXT const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileEXT &
- setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoEXT & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
{
stdProfileIdc = stdProfileIdc_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileEXT &
- setPictureLayout( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsEXT pictureLayout_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoEXT &
+ setPictureLayout( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsEXT pictureLayout_ ) VULKAN_HPP_NOEXCEPT
{
pictureLayout = pictureLayout_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoDecodeH264ProfileEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH264ProfileInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkVideoDecodeH264ProfileEXT *>( this );
+ return *reinterpret_cast<const VkVideoDecodeH264ProfileInfoEXT *>( this );
}
- explicit operator VkVideoDecodeH264ProfileEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH264ProfileInfoEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkVideoDecodeH264ProfileEXT *>( this );
+ return *reinterpret_cast<VkVideoDecodeH264ProfileInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
const void * const &,
StdVideoH264ProfileIdc const &,
- VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsEXT const &>
+ VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsEXT const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -92923,7 +95800,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- std::strong_ordering operator<=>( VideoDecodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ std::strong_ordering operator<=>( VideoDecodeH264ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
return cmp;
@@ -92938,284 +95815,145 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif
- bool operator==( VideoDecodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( VideoDecodeH264ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 ) &&
( pictureLayout == rhs.pictureLayout );
}
- bool operator!=( VideoDecodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( VideoDecodeH264ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264ProfileEXT;
- const void * pNext = {};
- StdVideoH264ProfileIdc stdProfileIdc = {};
- VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsEXT pictureLayout = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264ProfileInfoEXT;
+ const void * pNext = {};
+ StdVideoH264ProfileIdc stdProfileIdc = {};
+ VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsEXT pictureLayout = VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsEXT::eProgressive;
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileEXT ) ==
- sizeof( VkVideoDecodeH264ProfileEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileEXT>::value,
- "VideoDecodeH264ProfileEXT is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::eVideoDecodeH264ProfileEXT>
+ struct CppType<StructureType, StructureType::eVideoDecodeH264ProfileInfoEXT>
{
- using Type = VideoDecodeH264ProfileEXT;
+ using Type = VideoDecodeH264ProfileInfoEXT;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoDecodeH264SessionCreateInfoEXT
+ struct VideoDecodeH264SessionParametersAddInfoEXT
{
- using NativeType = VkVideoDecodeH264SessionCreateInfoEXT;
+ using NativeType = VkVideoDecodeH264SessionParametersAddInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoDecodeH264SessionCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264SessionParametersAddInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::VideoDecodeH264CreateFlagsEXT flags_ = {},
- const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , pStdExtensionVersion( pStdExtensionVersion_ )
- {}
-
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT( VideoDecodeH264SessionCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
-
- VideoDecodeH264SessionCreateInfoEXT( VkVideoDecodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoDecodeH264SessionCreateInfoEXT( *reinterpret_cast<VideoDecodeH264SessionCreateInfoEXT const *>( &rhs ) )
- {}
-# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
-
- VideoDecodeH264SessionCreateInfoEXT &
- operator=( VideoDecodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- VideoDecodeH264SessionCreateInfoEXT &
- operator=( VkVideoDecodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionCreateInfoEXT const *>( &rhs );
- return *this;
- }
-
-# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT &
- setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeH264CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
- {
- flags = flags_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT & setPStdExtensionVersion(
- const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoEXT( uint32_t stdSPSCount_ = {},
+ const StdVideoH264SequenceParameterSet * pStdSPSs_ = {},
+ uint32_t stdPPSCount_ = {},
+ const StdVideoH264PictureParameterSet * pStdPPSs_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , stdSPSCount( stdSPSCount_ )
+ , pStdSPSs( pStdSPSs_ )
+ , stdPPSCount( stdPPSCount_ )
+ , pStdPPSs( pStdPPSs_ )
{
- pStdExtensionVersion = pStdExtensionVersion_;
- return *this;
}
-# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoDecodeH264SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkVideoDecodeH264SessionCreateInfoEXT *>( this );
- }
+ VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoEXT( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- explicit operator VkVideoDecodeH264SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ VideoDecodeH264SessionParametersAddInfoEXT( VkVideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoDecodeH264SessionParametersAddInfoEXT( *reinterpret_cast<VideoDecodeH264SessionParametersAddInfoEXT const *>( &rhs ) )
{
- return *reinterpret_cast<VkVideoDecodeH264SessionCreateInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
-# if 14 <= VULKAN_HPP_CPP_VERSION
- auto
-# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::VideoDecodeH264CreateFlagsEXT const &,
- const VULKAN_HPP_NAMESPACE::ExtensionProperties * const &>
-# endif
- reflect() const VULKAN_HPP_NOEXCEPT
- {
- return std::tie( sType, pNext, flags, pStdExtensionVersion );
- }
-# endif
-
-# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( VideoDecodeH264SessionCreateInfoEXT const & ) const = default;
-# else
- bool operator==( VideoDecodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
- return this->reflect() == rhs.reflect();
-# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( pStdExtensionVersion == rhs.pStdExtensionVersion );
-# endif
- }
-
- bool operator!=( VideoDecodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ VideoDecodeH264SessionParametersAddInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & stdSPSs_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & stdPPSs_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , stdSPSCount( static_cast<uint32_t>( stdSPSs_.size() ) )
+ , pStdSPSs( stdSPSs_.data() )
+ , stdPPSCount( static_cast<uint32_t>( stdPPSs_.size() ) )
+ , pStdPPSs( stdPPSs_.data() )
{
- return !operator==( rhs );
}
-# endif
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionCreateInfoEXT;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::VideoDecodeH264CreateFlagsEXT flags = {};
- const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionCreateInfoEXT ) ==
- sizeof( VkVideoDecodeH264SessionCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionCreateInfoEXT>::value,
- "VideoDecodeH264SessionCreateInfoEXT is not nothrow_move_constructible!" );
-
- template <>
- struct CppType<StructureType, StructureType::eVideoDecodeH264SessionCreateInfoEXT>
- {
- using Type = VideoDecodeH264SessionCreateInfoEXT;
- };
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoDecodeH264SessionParametersAddInfoEXT
- {
- using NativeType = VkVideoDecodeH264SessionParametersAddInfoEXT;
-
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoDecodeH264SessionParametersAddInfoEXT;
-
-# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoEXT(
- uint32_t spsStdCount_ = {},
- const StdVideoH264SequenceParameterSet * pSpsStd_ = {},
- uint32_t ppsStdCount_ = {},
- const StdVideoH264PictureParameterSet * pPpsStd_ = {} ) VULKAN_HPP_NOEXCEPT
- : spsStdCount( spsStdCount_ )
- , pSpsStd( pSpsStd_ )
- , ppsStdCount( ppsStdCount_ )
- , pPpsStd( pPpsStd_ )
- {}
-
- VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoEXT(
- VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- VideoDecodeH264SessionParametersAddInfoEXT( VkVideoDecodeH264SessionParametersAddInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : VideoDecodeH264SessionParametersAddInfoEXT(
- *reinterpret_cast<VideoDecodeH264SessionParametersAddInfoEXT const *>( &rhs ) )
- {}
-
-# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoDecodeH264SessionParametersAddInfoEXT(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & spsStd_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & ppsStd_ = {} )
- : spsStdCount( static_cast<uint32_t>( spsStd_.size() ) )
- , pSpsStd( spsStd_.data() )
- , ppsStdCount( static_cast<uint32_t>( ppsStd_.size() ) )
- , pPpsStd( ppsStd_.data() )
- {}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoDecodeH264SessionParametersAddInfoEXT &
- operator=( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoDecodeH264SessionParametersAddInfoEXT & operator=( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoDecodeH264SessionParametersAddInfoEXT &
- operator=( VkVideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoDecodeH264SessionParametersAddInfoEXT & operator=( VkVideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT &
- setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT
{
- spsStdCount = spsStdCount_;
+ stdSPSCount = stdSPSCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT &
- setPSpsStd( const StdVideoH264SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & setPStdSPSs( const StdVideoH264SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT
{
- pSpsStd = pSpsStd_;
+ pStdSPSs = pStdSPSs_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeH264SessionParametersAddInfoEXT &
- setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & spsStd_ )
- VULKAN_HPP_NOEXCEPT
+ setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT
{
- spsStdCount = static_cast<uint32_t>( spsStd_.size() );
- pSpsStd = spsStd_.data();
+ stdSPSCount = static_cast<uint32_t>( stdSPSs_.size() );
+ pStdSPSs = stdSPSs_.data();
return *this;
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT &
- setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT
{
- ppsStdCount = ppsStdCount_;
+ stdPPSCount = stdPPSCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT &
- setPPpsStd( const StdVideoH264PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & setPStdPPSs( const StdVideoH264PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT
{
- pPpsStd = pPpsStd_;
+ pStdPPSs = pStdPPSs_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeH264SessionParametersAddInfoEXT &
- setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & ppsStd_ )
- VULKAN_HPP_NOEXCEPT
+ setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT
{
- ppsStdCount = static_cast<uint32_t>( ppsStd_.size() );
- pPpsStd = ppsStd_.data();
+ stdPPSCount = static_cast<uint32_t>( stdPPSs_.size() );
+ pStdPPSs = stdPPSs_.data();
return *this;
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoDecodeH264SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH264SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH264SessionParametersAddInfoEXT *>( this );
}
- explicit operator VkVideoDecodeH264SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH264SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH264SessionParametersAddInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -93228,7 +95966,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, spsStdCount, pSpsStd, ppsStdCount, pPpsStd );
+ return std::tie( sType, pNext, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs );
}
# endif
@@ -93237,11 +95975,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsStdCount == rhs.spsStdCount ) &&
- ( pSpsStd == rhs.pSpsStd ) && ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stdSPSCount == rhs.stdSPSCount ) && ( pStdSPSs == rhs.pStdSPSs ) &&
+ ( stdPPSCount == rhs.stdPPSCount ) && ( pStdPPSs == rhs.pStdPPSs );
# endif
}
@@ -93254,20 +95992,11 @@ namespace VULKAN_HPP_NAMESPACE
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersAddInfoEXT;
const void * pNext = {};
- uint32_t spsStdCount = {};
- const StdVideoH264SequenceParameterSet * pSpsStd = {};
- uint32_t ppsStdCount = {};
- const StdVideoH264PictureParameterSet * pPpsStd = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT ) ==
- sizeof( VkVideoDecodeH264SessionParametersAddInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT>::value,
- "VideoDecodeH264SessionParametersAddInfoEXT is not nothrow_move_constructible!" );
+ uint32_t stdSPSCount = {};
+ const StdVideoH264SequenceParameterSet * pStdSPSs = {};
+ uint32_t stdPPSCount = {};
+ const StdVideoH264PictureParameterSet * pStdPPSs = {};
+ };
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH264SessionParametersAddInfoEXT>
@@ -93282,81 +96011,76 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkVideoDecodeH264SessionParametersCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersCreateInfoEXT(
- uint32_t maxSpsStdCount_ = {},
- uint32_t maxPpsStdCount_ = {},
- const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ = {} )
- VULKAN_HPP_NOEXCEPT
- : maxSpsStdCount( maxSpsStdCount_ )
- , maxPpsStdCount( maxPpsStdCount_ )
+ VULKAN_HPP_CONSTEXPR
+ VideoDecodeH264SessionParametersCreateInfoEXT( uint32_t maxStdSPSCount_ = {},
+ uint32_t maxStdPPSCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxStdSPSCount( maxStdSPSCount_ )
+ , maxStdPPSCount( maxStdPPSCount_ )
, pParametersAddInfo( pParametersAddInfo_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersCreateInfoEXT(
- VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ VideoDecodeH264SessionParametersCreateInfoEXT( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoDecodeH264SessionParametersCreateInfoEXT( VkVideoDecodeH264SessionParametersCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : VideoDecodeH264SessionParametersCreateInfoEXT(
- *reinterpret_cast<VideoDecodeH264SessionParametersCreateInfoEXT const *>( &rhs ) )
- {}
+ VideoDecodeH264SessionParametersCreateInfoEXT( VkVideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoDecodeH264SessionParametersCreateInfoEXT( *reinterpret_cast<VideoDecodeH264SessionParametersCreateInfoEXT const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoDecodeH264SessionParametersCreateInfoEXT &
- operator=( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoDecodeH264SessionParametersCreateInfoEXT & operator=( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoDecodeH264SessionParametersCreateInfoEXT &
- operator=( VkVideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoDecodeH264SessionParametersCreateInfoEXT & operator=( VkVideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoEXT const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT &
- setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT
{
- maxSpsStdCount = maxSpsStdCount_;
+ maxStdSPSCount = maxStdSPSCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT &
- setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT
{
- maxPpsStdCount = maxPpsStdCount_;
+ maxStdPPSCount = maxStdPPSCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT & setPParametersAddInfo(
- const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT &
+ setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
{
pParametersAddInfo = pParametersAddInfo_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoDecodeH264SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH264SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH264SessionParametersCreateInfoEXT *>( this );
}
- explicit operator VkVideoDecodeH264SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH264SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH264SessionParametersCreateInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -93368,7 +96092,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, maxSpsStdCount, maxPpsStdCount, pParametersAddInfo );
+ return std::tie( sType, pNext, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo );
}
# endif
@@ -93377,11 +96101,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSpsStdCount == rhs.maxSpsStdCount ) &&
- ( maxPpsStdCount == rhs.maxPpsStdCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxStdSPSCount == rhs.maxStdSPSCount ) && ( maxStdPPSCount == rhs.maxStdPPSCount ) &&
+ ( pParametersAddInfo == rhs.pParametersAddInfo );
# endif
}
@@ -93392,21 +96116,12 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT;
- const void * pNext = {};
- uint32_t maxSpsStdCount = {};
- uint32_t maxPpsStdCount = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT;
+ const void * pNext = {};
+ uint32_t maxStdSPSCount = {};
+ uint32_t maxStdPPSCount = {};
const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoEXT ) ==
- sizeof( VkVideoDecodeH264SessionParametersCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoEXT>::value,
- "VideoDecodeH264SessionParametersCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT>
@@ -93424,22 +96139,21 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265CapabilitiesEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265CapabilitiesEXT(
- uint32_t maxLevel_ = {}, VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT
- : maxLevel( maxLevel_ )
- , stdExtensionVersion( stdExtensionVersion_ )
- {}
+ VULKAN_HPP_CONSTEXPR VideoDecodeH265CapabilitiesEXT( StdVideoH265LevelIdc maxLevelIdc_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxLevelIdc( maxLevelIdc_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- VideoDecodeH265CapabilitiesEXT( VideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoDecodeH265CapabilitiesEXT( VideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH265CapabilitiesEXT( VkVideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeH265CapabilitiesEXT( *reinterpret_cast<VideoDecodeH265CapabilitiesEXT const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoDecodeH265CapabilitiesEXT &
- operator=( VideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoDecodeH265CapabilitiesEXT & operator=( VideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH265CapabilitiesEXT & operator=( VkVideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -93447,64 +96161,57 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkVideoDecodeH265CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH265CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH265CapabilitiesEXT *>( this );
}
- explicit operator VkVideoDecodeH265CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH265CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH265CapabilitiesEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- uint32_t const &,
- VULKAN_HPP_NAMESPACE::ExtensionProperties const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, StdVideoH265LevelIdc const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, maxLevel, stdExtensionVersion );
+ return std::tie( sType, pNext, maxLevelIdc );
}
# endif
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( VideoDecodeH265CapabilitiesEXT const & ) const = default;
-# else
+ std::strong_ordering operator<=>( VideoDecodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
+ return cmp;
+ if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
+ return cmp;
+ if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ); cmp != 0 )
+ return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
+
+ return std::strong_ordering::equivalent;
+ }
+# endif
+
bool operator==( VideoDecodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
- return this->reflect() == rhs.reflect();
-# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxLevel == rhs.maxLevel ) &&
- ( stdExtensionVersion == rhs.stdExtensionVersion );
-# endif
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ) == 0 );
}
bool operator!=( VideoDecodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
-# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265CapabilitiesEXT;
- void * pNext = {};
- uint32_t maxLevel = {};
- VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265CapabilitiesEXT;
+ void * pNext = {};
+ StdVideoH265LevelIdc maxLevelIdc = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesEXT ) ==
- sizeof( VkVideoDecodeH265CapabilitiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesEXT>::value,
- "VideoDecodeH265CapabilitiesEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH265CapabilitiesEXT>
@@ -93522,21 +96229,22 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265DpbSlotInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoEXT(
- const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ = {} ) VULKAN_HPP_NOEXCEPT
- : pStdReferenceInfo( pStdReferenceInfo_ )
- {}
+ VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoEXT( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pStdReferenceInfo( pStdReferenceInfo_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoDecodeH265DpbSlotInfoEXT( VideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoEXT( VideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH265DpbSlotInfoEXT( VkVideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeH265DpbSlotInfoEXT( *reinterpret_cast<VideoDecodeH265DpbSlotInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoDecodeH265DpbSlotInfoEXT &
- operator=( VideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoDecodeH265DpbSlotInfoEXT & operator=( VideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH265DpbSlotInfoEXT & operator=( VkVideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -93552,30 +96260,28 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoEXT &
- setPStdReferenceInfo( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
+ setPStdReferenceInfo( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
{
pStdReferenceInfo = pStdReferenceInfo_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoDecodeH265DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH265DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH265DpbSlotInfoEXT *>( this );
}
- explicit operator VkVideoDecodeH265DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH265DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH265DpbSlotInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- const StdVideoDecodeH265ReferenceInfo * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoDecodeH265ReferenceInfo * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -93588,7 +96294,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoDecodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo );
@@ -93606,14 +96312,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoEXT ) ==
- sizeof( VkVideoDecodeH265DpbSlotInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoEXT>::value,
- "VideoDecodeH265DpbSlotInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH265DpbSlotInfoEXT>
@@ -93632,33 +96330,34 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoEXT( StdVideoDecodeH265PictureInfo * pStdPictureInfo_ = {},
- uint32_t slicesCount_ = {},
- const uint32_t * pSlicesDataOffsets_ = {} ) VULKAN_HPP_NOEXCEPT
- : pStdPictureInfo( pStdPictureInfo_ )
- , slicesCount( slicesCount_ )
- , pSlicesDataOffsets( pSlicesDataOffsets_ )
- {}
+ uint32_t sliceCount_ = {},
+ const uint32_t * pSliceOffsets_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pStdPictureInfo( pStdPictureInfo_ )
+ , sliceCount( sliceCount_ )
+ , pSliceOffsets( pSliceOffsets_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoDecodeH265PictureInfoEXT( VideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoEXT( VideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH265PictureInfoEXT( VkVideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoDecodeH265PictureInfoEXT( *reinterpret_cast<VideoDecodeH265PictureInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoDecodeH265PictureInfoEXT(
- StdVideoDecodeH265PictureInfo * pStdPictureInfo_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & slicesDataOffsets_ )
- : pStdPictureInfo( pStdPictureInfo_ )
- , slicesCount( static_cast<uint32_t>( slicesDataOffsets_.size() ) )
- , pSlicesDataOffsets( slicesDataOffsets_.data() )
- {}
+ VideoDecodeH265PictureInfoEXT( StdVideoDecodeH265PictureInfo * pStdPictureInfo_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & sliceOffsets_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), pStdPictureInfo( pStdPictureInfo_ ), sliceCount( static_cast<uint32_t>( sliceOffsets_.size() ) ), pSliceOffsets( sliceOffsets_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoDecodeH265PictureInfoEXT &
- operator=( VideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoDecodeH265PictureInfoEXT & operator=( VideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoDecodeH265PictureInfoEXT & operator=( VkVideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -93673,48 +96372,45 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT &
- setPStdPictureInfo( StdVideoDecodeH265PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT & setPStdPictureInfo( StdVideoDecodeH265PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
{
pStdPictureInfo = pStdPictureInfo_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT & setSlicesCount( uint32_t slicesCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT & setSliceCount( uint32_t sliceCount_ ) VULKAN_HPP_NOEXCEPT
{
- slicesCount = slicesCount_;
+ sliceCount = sliceCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT &
- setPSlicesDataOffsets( const uint32_t * pSlicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT & setPSliceOffsets( const uint32_t * pSliceOffsets_ ) VULKAN_HPP_NOEXCEPT
{
- pSlicesDataOffsets = pSlicesDataOffsets_;
+ pSliceOffsets = pSliceOffsets_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoDecodeH265PictureInfoEXT & setSlicesDataOffsets(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & slicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT
+ VideoDecodeH265PictureInfoEXT & setSliceOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & sliceOffsets_ ) VULKAN_HPP_NOEXCEPT
{
- slicesCount = static_cast<uint32_t>( slicesDataOffsets_.size() );
- pSlicesDataOffsets = slicesDataOffsets_.data();
+ sliceCount = static_cast<uint32_t>( sliceOffsets_.size() );
+ pSliceOffsets = sliceOffsets_.data();
return *this;
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoDecodeH265PictureInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH265PictureInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH265PictureInfoEXT *>( this );
}
- explicit operator VkVideoDecodeH265PictureInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH265PictureInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH265PictureInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -93726,7 +96422,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, pStdPictureInfo, slicesCount, pSlicesDataOffsets );
+ return std::tie( sType, pNext, pStdPictureInfo, sliceCount, pSliceOffsets );
}
# endif
@@ -93735,11 +96431,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoDecodeH265PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) &&
- ( slicesCount == rhs.slicesCount ) && ( pSlicesDataOffsets == rhs.pSlicesDataOffsets );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && ( sliceCount == rhs.sliceCount ) &&
+ ( pSliceOffsets == rhs.pSliceOffsets );
# endif
}
@@ -93750,20 +96446,12 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265PictureInfoEXT;
- const void * pNext = {};
- StdVideoDecodeH265PictureInfo * pStdPictureInfo = {};
- uint32_t slicesCount = {};
- const uint32_t * pSlicesDataOffsets = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265PictureInfoEXT;
+ const void * pNext = {};
+ StdVideoDecodeH265PictureInfo * pStdPictureInfo = {};
+ uint32_t sliceCount = {};
+ const uint32_t * pSliceOffsets = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoEXT ) ==
- sizeof( VkVideoDecodeH265PictureInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoEXT>::value,
- "VideoDecodeH265PictureInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH265PictureInfoEXT>
@@ -93773,60 +96461,61 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoDecodeH265ProfileEXT
+ struct VideoDecodeH265ProfileInfoEXT
{
- using NativeType = VkVideoDecodeH265ProfileEXT;
+ using NativeType = VkVideoDecodeH265ProfileInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265ProfileEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265ProfileInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileEXT( StdVideoH265ProfileIdc stdProfileIdc_ = {} ) VULKAN_HPP_NOEXCEPT
- : stdProfileIdc( stdProfileIdc_ )
- {}
+ VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileInfoEXT( StdVideoH265ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , stdProfileIdc( stdProfileIdc_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoDecodeH265ProfileEXT( VideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileInfoEXT( VideoDecodeH265ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoDecodeH265ProfileEXT( VkVideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoDecodeH265ProfileEXT( *reinterpret_cast<VideoDecodeH265ProfileEXT const *>( &rhs ) )
- {}
+ VideoDecodeH265ProfileInfoEXT( VkVideoDecodeH265ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoDecodeH265ProfileInfoEXT( *reinterpret_cast<VideoDecodeH265ProfileInfoEXT const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoDecodeH265ProfileEXT & operator=( VideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoDecodeH265ProfileInfoEXT & operator=( VideoDecodeH265ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoDecodeH265ProfileEXT & operator=( VkVideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoDecodeH265ProfileInfoEXT & operator=( VkVideoDecodeH265ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoEXT const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileEXT &
- setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileInfoEXT & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
{
stdProfileIdc = stdProfileIdc_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoDecodeH265ProfileEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH265ProfileInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkVideoDecodeH265ProfileEXT *>( this );
+ return *reinterpret_cast<const VkVideoDecodeH265ProfileInfoEXT *>( this );
}
- explicit operator VkVideoDecodeH265ProfileEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH265ProfileInfoEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkVideoDecodeH265ProfileEXT *>( this );
+ return *reinterpret_cast<VkVideoDecodeH265ProfileInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -93839,7 +96528,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- std::strong_ordering operator<=>( VideoDecodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ std::strong_ordering operator<=>( VideoDecodeH265ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
return cmp;
@@ -93852,295 +96541,187 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif
- bool operator==( VideoDecodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( VideoDecodeH265ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 );
}
- bool operator!=( VideoDecodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( VideoDecodeH265ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265ProfileEXT;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265ProfileInfoEXT;
const void * pNext = {};
StdVideoH265ProfileIdc stdProfileIdc = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileEXT ) ==
- sizeof( VkVideoDecodeH265ProfileEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileEXT>::value,
- "VideoDecodeH265ProfileEXT is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::eVideoDecodeH265ProfileEXT>
+ struct CppType<StructureType, StructureType::eVideoDecodeH265ProfileInfoEXT>
{
- using Type = VideoDecodeH265ProfileEXT;
+ using Type = VideoDecodeH265ProfileInfoEXT;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoDecodeH265SessionCreateInfoEXT
+ struct VideoDecodeH265SessionParametersAddInfoEXT
{
- using NativeType = VkVideoDecodeH265SessionCreateInfoEXT;
+ using NativeType = VkVideoDecodeH265SessionParametersAddInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoDecodeH265SessionCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265SessionParametersAddInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::VideoDecodeH265CreateFlagsEXT flags_ = {},
- const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , pStdExtensionVersion( pStdExtensionVersion_ )
- {}
-
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT( VideoDecodeH265SessionCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
-
- VideoDecodeH265SessionCreateInfoEXT( VkVideoDecodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoDecodeH265SessionCreateInfoEXT( *reinterpret_cast<VideoDecodeH265SessionCreateInfoEXT const *>( &rhs ) )
- {}
-# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
-
- VideoDecodeH265SessionCreateInfoEXT &
- operator=( VideoDecodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- VideoDecodeH265SessionCreateInfoEXT &
- operator=( VkVideoDecodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoEXT( uint32_t stdVPSCount_ = {},
+ const StdVideoH265VideoParameterSet * pStdVPSs_ = {},
+ uint32_t stdSPSCount_ = {},
+ const StdVideoH265SequenceParameterSet * pStdSPSs_ = {},
+ uint32_t stdPPSCount_ = {},
+ const StdVideoH265PictureParameterSet * pStdPPSs_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , stdVPSCount( stdVPSCount_ )
+ , pStdVPSs( pStdVPSs_ )
+ , stdSPSCount( stdSPSCount_ )
+ , pStdSPSs( pStdSPSs_ )
+ , stdPPSCount( stdPPSCount_ )
+ , pStdPPSs( pStdPPSs_ )
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionCreateInfoEXT const *>( &rhs );
- return *this;
}
-# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoEXT( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT &
- setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeH265CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ VideoDecodeH265SessionParametersAddInfoEXT( VkVideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoDecodeH265SessionParametersAddInfoEXT( *reinterpret_cast<VideoDecodeH265SessionParametersAddInfoEXT const *>( &rhs ) )
{
- flags = flags_;
- return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT & setPStdExtensionVersion(
- const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ VideoDecodeH265SessionParametersAddInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265VideoParameterSet> const & stdVPSs_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & stdSPSs_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & stdPPSs_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , stdVPSCount( static_cast<uint32_t>( stdVPSs_.size() ) )
+ , pStdVPSs( stdVPSs_.data() )
+ , stdSPSCount( static_cast<uint32_t>( stdSPSs_.size() ) )
+ , pStdSPSs( stdSPSs_.data() )
+ , stdPPSCount( static_cast<uint32_t>( stdPPSs_.size() ) )
+ , pStdPPSs( stdPPSs_.data() )
{
- pStdExtensionVersion = pStdExtensionVersion_;
- return *this;
}
-# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- explicit operator VkVideoDecodeH265SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkVideoDecodeH265SessionCreateInfoEXT *>( this );
- }
+ VideoDecodeH265SessionParametersAddInfoEXT & operator=( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- explicit operator VkVideoDecodeH265SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ VideoDecodeH265SessionParametersAddInfoEXT & operator=( VkVideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkVideoDecodeH265SessionCreateInfoEXT *>( this );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT const *>( &rhs );
+ return *this;
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
-# if 14 <= VULKAN_HPP_CPP_VERSION
- auto
-# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::VideoDecodeH265CreateFlagsEXT const &,
- const VULKAN_HPP_NAMESPACE::ExtensionProperties * const &>
-# endif
- reflect() const VULKAN_HPP_NOEXCEPT
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, flags, pStdExtensionVersion );
+ pNext = pNext_;
+ return *this;
}
-# endif
-# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( VideoDecodeH265SessionCreateInfoEXT const & ) const = default;
-# else
- bool operator==( VideoDecodeH265SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & setStdVPSCount( uint32_t stdVPSCount_ ) VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
- return this->reflect() == rhs.reflect();
-# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( pStdExtensionVersion == rhs.pStdExtensionVersion );
-# endif
+ stdVPSCount = stdVPSCount_;
+ return *this;
}
- bool operator!=( VideoDecodeH265SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & setPStdVPSs( const StdVideoH265VideoParameterSet * pStdVPSs_ ) VULKAN_HPP_NOEXCEPT
{
- return !operator==( rhs );
+ pStdVPSs = pStdVPSs_;
+ return *this;
}
-# endif
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionCreateInfoEXT;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::VideoDecodeH265CreateFlagsEXT flags = {};
- const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionCreateInfoEXT ) ==
- sizeof( VkVideoDecodeH265SessionCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionCreateInfoEXT>::value,
- "VideoDecodeH265SessionCreateInfoEXT is not nothrow_move_constructible!" );
-
- template <>
- struct CppType<StructureType, StructureType::eVideoDecodeH265SessionCreateInfoEXT>
- {
- using Type = VideoDecodeH265SessionCreateInfoEXT;
- };
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoDecodeH265SessionParametersAddInfoEXT
- {
- using NativeType = VkVideoDecodeH265SessionParametersAddInfoEXT;
-
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoDecodeH265SessionParametersAddInfoEXT;
-
-# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoEXT(
- uint32_t spsStdCount_ = {},
- const StdVideoH265SequenceParameterSet * pSpsStd_ = {},
- uint32_t ppsStdCount_ = {},
- const StdVideoH265PictureParameterSet * pPpsStd_ = {} ) VULKAN_HPP_NOEXCEPT
- : spsStdCount( spsStdCount_ )
- , pSpsStd( pSpsStd_ )
- , ppsStdCount( ppsStdCount_ )
- , pPpsStd( pPpsStd_ )
- {}
-
- VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoEXT(
- VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- VideoDecodeH265SessionParametersAddInfoEXT( VkVideoDecodeH265SessionParametersAddInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : VideoDecodeH265SessionParametersAddInfoEXT(
- *reinterpret_cast<VideoDecodeH265SessionParametersAddInfoEXT const *>( &rhs ) )
- {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoDecodeH265SessionParametersAddInfoEXT(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & spsStd_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & ppsStd_ = {} )
- : spsStdCount( static_cast<uint32_t>( spsStd_.size() ) )
- , pSpsStd( spsStd_.data() )
- , ppsStdCount( static_cast<uint32_t>( ppsStd_.size() ) )
- , pPpsStd( ppsStd_.data() )
- {}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
-
- VideoDecodeH265SessionParametersAddInfoEXT &
- operator=( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
VideoDecodeH265SessionParametersAddInfoEXT &
- operator=( VkVideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT const *>( &rhs );
- return *this;
- }
-
-# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ setStdVPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265VideoParameterSet> const & stdVPSs_ ) VULKAN_HPP_NOEXCEPT
{
- pNext = pNext_;
+ stdVPSCount = static_cast<uint32_t>( stdVPSs_.size() );
+ pStdVPSs = stdVPSs_.data();
return *this;
}
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT &
- setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT
{
- spsStdCount = spsStdCount_;
+ stdSPSCount = stdSPSCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT &
- setPSpsStd( const StdVideoH265SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & setPStdSPSs( const StdVideoH265SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT
{
- pSpsStd = pSpsStd_;
+ pStdSPSs = pStdSPSs_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeH265SessionParametersAddInfoEXT &
- setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & spsStd_ )
- VULKAN_HPP_NOEXCEPT
+ setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT
{
- spsStdCount = static_cast<uint32_t>( spsStd_.size() );
- pSpsStd = spsStd_.data();
+ stdSPSCount = static_cast<uint32_t>( stdSPSs_.size() );
+ pStdSPSs = stdSPSs_.data();
return *this;
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT &
- setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT
{
- ppsStdCount = ppsStdCount_;
+ stdPPSCount = stdPPSCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT &
- setPPpsStd( const StdVideoH265PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & setPStdPPSs( const StdVideoH265PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT
{
- pPpsStd = pPpsStd_;
+ pStdPPSs = pStdPPSs_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeH265SessionParametersAddInfoEXT &
- setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & ppsStd_ )
- VULKAN_HPP_NOEXCEPT
+ setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT
{
- ppsStdCount = static_cast<uint32_t>( ppsStd_.size() );
- pPpsStd = ppsStd_.data();
+ stdPPSCount = static_cast<uint32_t>( stdPPSs_.size() );
+ pStdPPSs = stdPPSs_.data();
return *this;
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoDecodeH265SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH265SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH265SessionParametersAddInfoEXT *>( this );
}
- explicit operator VkVideoDecodeH265SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH265SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH265SessionParametersAddInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
const void * const &,
uint32_t const &,
+ const StdVideoH265VideoParameterSet * const &,
+ uint32_t const &,
const StdVideoH265SequenceParameterSet * const &,
uint32_t const &,
const StdVideoH265PictureParameterSet * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, spsStdCount, pSpsStd, ppsStdCount, pPpsStd );
+ return std::tie( sType, pNext, stdVPSCount, pStdVPSs, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs );
}
# endif
@@ -94149,11 +96730,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsStdCount == rhs.spsStdCount ) &&
- ( pSpsStd == rhs.pSpsStd ) && ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stdVPSCount == rhs.stdVPSCount ) && ( pStdVPSs == rhs.pStdVPSs ) &&
+ ( stdSPSCount == rhs.stdSPSCount ) && ( pStdSPSs == rhs.pStdSPSs ) && ( stdPPSCount == rhs.stdPPSCount ) && ( pStdPPSs == rhs.pStdPPSs );
# endif
}
@@ -94166,20 +96747,13 @@ namespace VULKAN_HPP_NAMESPACE
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersAddInfoEXT;
const void * pNext = {};
- uint32_t spsStdCount = {};
- const StdVideoH265SequenceParameterSet * pSpsStd = {};
- uint32_t ppsStdCount = {};
- const StdVideoH265PictureParameterSet * pPpsStd = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT ) ==
- sizeof( VkVideoDecodeH265SessionParametersAddInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT>::value,
- "VideoDecodeH265SessionParametersAddInfoEXT is not nothrow_move_constructible!" );
+ uint32_t stdVPSCount = {};
+ const StdVideoH265VideoParameterSet * pStdVPSs = {};
+ uint32_t stdSPSCount = {};
+ const StdVideoH265SequenceParameterSet * pStdSPSs = {};
+ uint32_t stdPPSCount = {};
+ const StdVideoH265PictureParameterSet * pStdPPSs = {};
+ };
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH265SessionParametersAddInfoEXT>
@@ -94194,81 +96768,84 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkVideoDecodeH265SessionParametersCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersCreateInfoEXT(
- uint32_t maxSpsStdCount_ = {},
- uint32_t maxPpsStdCount_ = {},
- const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ = {} )
- VULKAN_HPP_NOEXCEPT
- : maxSpsStdCount( maxSpsStdCount_ )
- , maxPpsStdCount( maxPpsStdCount_ )
+ VULKAN_HPP_CONSTEXPR
+ VideoDecodeH265SessionParametersCreateInfoEXT( uint32_t maxStdVPSCount_ = {},
+ uint32_t maxStdSPSCount_ = {},
+ uint32_t maxStdPPSCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxStdVPSCount( maxStdVPSCount_ )
+ , maxStdSPSCount( maxStdSPSCount_ )
+ , maxStdPPSCount( maxStdPPSCount_ )
, pParametersAddInfo( pParametersAddInfo_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersCreateInfoEXT(
- VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ VideoDecodeH265SessionParametersCreateInfoEXT( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoDecodeH265SessionParametersCreateInfoEXT( VkVideoDecodeH265SessionParametersCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : VideoDecodeH265SessionParametersCreateInfoEXT(
- *reinterpret_cast<VideoDecodeH265SessionParametersCreateInfoEXT const *>( &rhs ) )
- {}
+ VideoDecodeH265SessionParametersCreateInfoEXT( VkVideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoDecodeH265SessionParametersCreateInfoEXT( *reinterpret_cast<VideoDecodeH265SessionParametersCreateInfoEXT const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoDecodeH265SessionParametersCreateInfoEXT &
- operator=( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoDecodeH265SessionParametersCreateInfoEXT & operator=( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoDecodeH265SessionParametersCreateInfoEXT &
- operator=( VkVideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoDecodeH265SessionParametersCreateInfoEXT & operator=( VkVideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoEXT const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT &
- setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & setMaxStdVPSCount( uint32_t maxStdVPSCount_ ) VULKAN_HPP_NOEXCEPT
{
- maxSpsStdCount = maxSpsStdCount_;
+ maxStdVPSCount = maxStdVPSCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT &
- setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT
{
- maxPpsStdCount = maxPpsStdCount_;
+ maxStdSPSCount = maxStdSPSCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & setPParametersAddInfo(
- const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxStdPPSCount = maxStdPPSCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT &
+ setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
{
pParametersAddInfo = pParametersAddInfo_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoDecodeH265SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH265SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeH265SessionParametersCreateInfoEXT *>( this );
}
- explicit operator VkVideoDecodeH265SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeH265SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeH265SessionParametersCreateInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -94276,11 +96853,12 @@ namespace VULKAN_HPP_NAMESPACE
const void * const &,
uint32_t const &,
uint32_t const &,
+ uint32_t const &,
const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, maxSpsStdCount, maxPpsStdCount, pParametersAddInfo );
+ return std::tie( sType, pNext, maxStdVPSCount, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo );
}
# endif
@@ -94289,11 +96867,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSpsStdCount == rhs.maxSpsStdCount ) &&
- ( maxPpsStdCount == rhs.maxPpsStdCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxStdVPSCount == rhs.maxStdVPSCount ) && ( maxStdSPSCount == rhs.maxStdSPSCount ) &&
+ ( maxStdPPSCount == rhs.maxStdPPSCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo );
# endif
}
@@ -94304,21 +96882,13 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT;
- const void * pNext = {};
- uint32_t maxSpsStdCount = {};
- uint32_t maxPpsStdCount = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT;
+ const void * pNext = {};
+ uint32_t maxStdVPSCount = {};
+ uint32_t maxStdSPSCount = {};
+ uint32_t maxStdPPSCount = {};
const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoEXT ) ==
- sizeof( VkVideoDecodeH265SessionParametersCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoEXT>::value,
- "VideoDecodeH265SessionParametersCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT>
@@ -94336,20 +96906,17 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR(
- VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ = {},
- VULKAN_HPP_NAMESPACE::Offset2D codedOffset_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {},
- VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ = {},
- VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR dstPictureResource_ = {},
- const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ = {},
- uint32_t referenceSlotCount_ = {},
- const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , codedOffset( codedOffset_ )
- , codedExtent( codedExtent_ )
+ VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ = {},
+ VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ = {},
+ uint32_t referenceSlotCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, srcBuffer( srcBuffer_ )
, srcBufferOffset( srcBufferOffset_ )
, srcBufferRange( srcBufferRange_ )
@@ -94357,29 +96924,24 @@ namespace VULKAN_HPP_NAMESPACE
, pSetupReferenceSlot( pSetupReferenceSlot_ )
, referenceSlotCount( referenceSlotCount_ )
, pReferenceSlots( pReferenceSlots_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoDecodeInfoKHR( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoDecodeInfoKHR( *reinterpret_cast<VideoDecodeInfoKHR const *>( &rhs ) )
- {}
+ VideoDecodeInfoKHR( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : VideoDecodeInfoKHR( *reinterpret_cast<VideoDecodeInfoKHR const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoDecodeInfoKHR(
- VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_,
- VULKAN_HPP_NAMESPACE::Offset2D codedOffset_,
- VULKAN_HPP_NAMESPACE::Extent2D codedExtent_,
- VULKAN_HPP_NAMESPACE::Buffer srcBuffer_,
- VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_,
- VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_,
- VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR dstPictureResource_,
- const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR> const &
- referenceSlots_ )
- : flags( flags_ )
- , codedOffset( codedOffset_ )
- , codedExtent( codedExtent_ )
+ VideoDecodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_,
+ VULKAN_HPP_NAMESPACE::Buffer srcBuffer_,
+ VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_,
+ VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_,
+ VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource_,
+ const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR> const & referenceSlots_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, srcBuffer( srcBuffer_ )
, srcBufferOffset( srcBufferOffset_ )
, srcBufferRange( srcBufferRange_ )
@@ -94387,7 +96949,8 @@ namespace VULKAN_HPP_NAMESPACE
, pSetupReferenceSlot( pSetupReferenceSlot_ )
, referenceSlotCount( static_cast<uint32_t>( referenceSlots_.size() ) )
, pReferenceSlots( referenceSlots_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -94406,71 +96969,52 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR &
- setCodedOffset( VULKAN_HPP_NAMESPACE::Offset2D const & codedOffset_ ) VULKAN_HPP_NOEXCEPT
- {
- codedOffset = codedOffset_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR &
- setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT
- {
- codedExtent = codedExtent_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR &
- setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
{
srcBuffer = srcBuffer_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR &
- setSrcBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ ) VULKAN_HPP_NOEXCEPT
{
srcBufferOffset = srcBufferOffset_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR &
- setSrcBufferRange( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBufferRange( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ ) VULKAN_HPP_NOEXCEPT
{
srcBufferRange = srcBufferRange_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setDstPictureResource(
- VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const & dstPictureResource_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR &
+ setDstPictureResource( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const & dstPictureResource_ ) VULKAN_HPP_NOEXCEPT
{
dstPictureResource = dstPictureResource_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setPSetupReferenceSlot(
- const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR &
+ setPSetupReferenceSlot( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT
{
pSetupReferenceSlot = pSetupReferenceSlot_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR &
- setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
{
referenceSlotCount = referenceSlotCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR &
- setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
+ setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
{
pReferenceSlots = pReferenceSlots_;
return *this;
@@ -94478,8 +97022,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoDecodeInfoKHR & setReferenceSlots(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR> const &
- referenceSlots_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR> const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT
{
referenceSlotCount = static_cast<uint32_t>( referenceSlots_.size() );
pReferenceSlots = referenceSlots_.data();
@@ -94488,47 +97031,35 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoDecodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoDecodeInfoKHR *>( this );
}
- explicit operator VkVideoDecodeInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoDecodeInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
const void * const &,
VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR const &,
- VULKAN_HPP_NAMESPACE::Offset2D const &,
- VULKAN_HPP_NAMESPACE::Extent2D const &,
VULKAN_HPP_NAMESPACE::Buffer const &,
VULKAN_HPP_NAMESPACE::DeviceSize const &,
VULKAN_HPP_NAMESPACE::DeviceSize const &,
- VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const &,
- const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * const &,
+ VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const &,
+ const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * const &,
uint32_t const &,
- const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * const &>
+ const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- flags,
- codedOffset,
- codedExtent,
- srcBuffer,
- srcBufferOffset,
- srcBufferRange,
- dstPictureResource,
- pSetupReferenceSlot,
- referenceSlotCount,
- pReferenceSlots );
+ return std::tie(
+ sType, pNext, flags, srcBuffer, srcBufferOffset, srcBufferRange, dstPictureResource, pSetupReferenceSlot, referenceSlotCount, pReferenceSlots );
}
# endif
@@ -94537,13 +97068,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( codedOffset == rhs.codedOffset ) && ( codedExtent == rhs.codedExtent ) &&
- ( srcBuffer == rhs.srcBuffer ) && ( srcBufferOffset == rhs.srcBufferOffset ) &&
- ( srcBufferRange == rhs.srcBufferRange ) && ( dstPictureResource == rhs.dstPictureResource ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( srcBuffer == rhs.srcBuffer ) &&
+ ( srcBufferOffset == rhs.srcBufferOffset ) && ( srcBufferRange == rhs.srcBufferRange ) && ( dstPictureResource == rhs.dstPictureResource ) &&
( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) && ( referenceSlotCount == rhs.referenceSlotCount ) &&
( pReferenceSlots == rhs.pReferenceSlots );
# endif
@@ -94556,25 +97085,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags = {};
- VULKAN_HPP_NAMESPACE::Offset2D codedOffset = {};
- VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {};
- VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {};
- VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset = {};
- VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange = {};
- VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR dstPictureResource = {};
- const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot = {};
- uint32_t referenceSlotCount = {};
- const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags = {};
+ VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange = {};
+ VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource = {};
+ const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot = {};
+ uint32_t referenceSlotCount = {};
+ const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR ) == sizeof( VkVideoDecodeInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR>::value,
- "VideoDecodeInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoDecodeInfoKHR>
@@ -94584,168 +97105,288 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoEncodeH264CapabilitiesEXT
+ struct VideoDecodeUsageInfoKHR
{
- using NativeType = VkVideoEncodeH264CapabilitiesEXT;
+ using NativeType = VkVideoDecodeUsageInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264CapabilitiesEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeUsageInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT(
- VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT flags_ = {},
- VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags_ = {},
- VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D minPictureSizeInMbs_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D maxPictureSizeInMbs_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D inputImageDataAlignment_ = {},
- uint8_t maxNumL0ReferenceForP_ = {},
- uint8_t maxNumL0ReferenceForB_ = {},
- uint8_t maxNumL1Reference_ = {},
- uint8_t qualityLevelCount_ = {},
- VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , inputModeFlags( inputModeFlags_ )
- , outputModeFlags( outputModeFlags_ )
- , minPictureSizeInMbs( minPictureSizeInMbs_ )
- , maxPictureSizeInMbs( maxPictureSizeInMbs_ )
- , inputImageDataAlignment( inputImageDataAlignment_ )
- , maxNumL0ReferenceForP( maxNumL0ReferenceForP_ )
- , maxNumL0ReferenceForB( maxNumL0ReferenceForB_ )
- , maxNumL1Reference( maxNumL1Reference_ )
- , qualityLevelCount( qualityLevelCount_ )
- , stdExtensionVersion( stdExtensionVersion_ )
- {}
+ VULKAN_HPP_CONSTEXPR VideoDecodeUsageInfoKHR( VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , videoUsageHints( videoUsageHints_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR_14
- VideoEncodeH264CapabilitiesEXT( VideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoDecodeUsageInfoKHR( VideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH264CapabilitiesEXT( VkVideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoEncodeH264CapabilitiesEXT( *reinterpret_cast<VideoEncodeH264CapabilitiesEXT const *>( &rhs ) )
- {}
+ VideoDecodeUsageInfoKHR( VkVideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoDecodeUsageInfoKHR( *reinterpret_cast<VideoDecodeUsageInfoKHR const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH264CapabilitiesEXT &
- operator=( VideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoDecodeUsageInfoKHR & operator=( VideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH264CapabilitiesEXT & operator=( VkVideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoDecodeUsageInfoKHR & operator=( VkVideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeUsageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT &
- setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoDecodeUsageInfoKHR & setVideoUsageHints( VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints_ ) VULKAN_HPP_NOEXCEPT
{
- flags = flags_;
+ videoUsageHints = videoUsageHints_;
return *this;
}
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT &
- setInputModeFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags_ ) VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeUsageInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
- inputModeFlags = inputModeFlags_;
- return *this;
+ return *reinterpret_cast<const VkVideoDecodeUsageInfoKHR *>( this );
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT &
- setOutputModeFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags_ ) VULKAN_HPP_NOEXCEPT
+ operator VkVideoDecodeUsageInfoKHR &() VULKAN_HPP_NOEXCEPT
{
- outputModeFlags = outputModeFlags_;
- return *this;
+ return *reinterpret_cast<VkVideoDecodeUsageInfoKHR *>( this );
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT &
- setMinPictureSizeInMbs( VULKAN_HPP_NAMESPACE::Extent2D const & minPictureSizeInMbs_ ) VULKAN_HPP_NOEXCEPT
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
{
- minPictureSizeInMbs = minPictureSizeInMbs_;
- return *this;
+ return std::tie( sType, pNext, videoUsageHints );
}
+# endif
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT &
- setMaxPictureSizeInMbs( VULKAN_HPP_NAMESPACE::Extent2D const & maxPictureSizeInMbs_ ) VULKAN_HPP_NOEXCEPT
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( VideoDecodeUsageInfoKHR const & ) const = default;
+# else
+ bool operator==( VideoDecodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- maxPictureSizeInMbs = maxPictureSizeInMbs_;
- return *this;
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoUsageHints == rhs.videoUsageHints );
+# endif
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT &
- setInputImageDataAlignment( VULKAN_HPP_NAMESPACE::Extent2D const & inputImageDataAlignment_ ) VULKAN_HPP_NOEXCEPT
+ bool operator!=( VideoDecodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- inputImageDataAlignment = inputImageDataAlignment_;
- return *this;
+ return !operator==( rhs );
}
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeUsageInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eVideoDecodeUsageInfoKHR>
+ {
+ using Type = VideoDecodeUsageInfoKHR;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT &
- setMaxNumL0ReferenceForP( uint8_t maxNumL0ReferenceForP_ ) VULKAN_HPP_NOEXCEPT
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ struct VideoEncodeCapabilitiesKHR
+ {
+ using NativeType = VkVideoEncodeCapabilitiesKHR;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeCapabilitiesKHR;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR VideoEncodeCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR flags_ = {},
+ VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR rateControlModes_ = {},
+ uint8_t rateControlLayerCount_ = {},
+ uint8_t qualityLevelCount_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D inputImageDataFillAlignment_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ , rateControlModes( rateControlModes_ )
+ , rateControlLayerCount( rateControlLayerCount_ )
+ , qualityLevelCount( qualityLevelCount_ )
+ , inputImageDataFillAlignment( inputImageDataFillAlignment_ )
{
- maxNumL0ReferenceForP = maxNumL0ReferenceForP_;
- return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT &
- setMaxNumL0ReferenceForB( uint8_t maxNumL0ReferenceForB_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR VideoEncodeCapabilitiesKHR( VideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ VideoEncodeCapabilitiesKHR( VkVideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoEncodeCapabilitiesKHR( *reinterpret_cast<VideoEncodeCapabilitiesKHR const *>( &rhs ) )
{
- maxNumL0ReferenceForB = maxNumL0ReferenceForB_;
- return *this;
}
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT &
- setMaxNumL1Reference( uint8_t maxNumL1Reference_ ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeCapabilitiesKHR & operator=( VideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ VideoEncodeCapabilitiesKHR & operator=( VkVideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- maxNumL1Reference = maxNumL1Reference_;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR const *>( &rhs );
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT &
- setQualityLevelCount( uint8_t qualityLevelCount_ ) VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
- qualityLevelCount = qualityLevelCount_;
- return *this;
+ return *reinterpret_cast<const VkVideoEncodeCapabilitiesKHR *>( this );
+ }
+
+ operator VkVideoEncodeCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkVideoEncodeCapabilitiesKHR *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR const &,
+ VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR const &,
+ uint8_t const &,
+ uint8_t const &,
+ VULKAN_HPP_NAMESPACE::Extent2D const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, flags, rateControlModes, rateControlLayerCount, qualityLevelCount, inputImageDataFillAlignment );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( VideoEncodeCapabilitiesKHR const & ) const = default;
+# else
+ bool operator==( VideoEncodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rateControlModes == rhs.rateControlModes ) &&
+ ( rateControlLayerCount == rhs.rateControlLayerCount ) && ( qualityLevelCount == rhs.qualityLevelCount ) &&
+ ( inputImageDataFillAlignment == rhs.inputImageDataFillAlignment );
+# endif
+ }
+
+ bool operator!=( VideoEncodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
}
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeCapabilitiesKHR;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR flags = {};
+ VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR rateControlModes = {};
+ uint8_t rateControlLayerCount = {};
+ uint8_t qualityLevelCount = {};
+ VULKAN_HPP_NAMESPACE::Extent2D inputImageDataFillAlignment = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eVideoEncodeCapabilitiesKHR>
+ {
+ using Type = VideoEncodeCapabilitiesKHR;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ struct VideoEncodeH264CapabilitiesEXT
+ {
+ using NativeType = VkVideoEncodeH264CapabilitiesEXT;
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & setStdExtensionVersion(
- VULKAN_HPP_NAMESPACE::ExtensionProperties const & stdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264CapabilitiesEXT;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesEXT( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT flags_ = {},
+ VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags_ = {},
+ VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags_ = {},
+ uint8_t maxPPictureL0ReferenceCount_ = {},
+ uint8_t maxBPictureL0ReferenceCount_ = {},
+ uint8_t maxL1ReferenceCount_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 motionVectorsOverPicBoundariesFlag_ = {},
+ uint32_t maxBytesPerPicDenom_ = {},
+ uint32_t maxBitsPerMbDenom_ = {},
+ uint32_t log2MaxMvLengthHorizontal_ = {},
+ uint32_t log2MaxMvLengthVertical_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ , inputModeFlags( inputModeFlags_ )
+ , outputModeFlags( outputModeFlags_ )
+ , maxPPictureL0ReferenceCount( maxPPictureL0ReferenceCount_ )
+ , maxBPictureL0ReferenceCount( maxBPictureL0ReferenceCount_ )
+ , maxL1ReferenceCount( maxL1ReferenceCount_ )
+ , motionVectorsOverPicBoundariesFlag( motionVectorsOverPicBoundariesFlag_ )
+ , maxBytesPerPicDenom( maxBytesPerPicDenom_ )
+ , maxBitsPerMbDenom( maxBitsPerMbDenom_ )
+ , log2MaxMvLengthHorizontal( log2MaxMvLengthHorizontal_ )
+ , log2MaxMvLengthVertical( log2MaxMvLengthVertical_ )
{
- stdExtensionVersion = stdExtensionVersion_;
+ }
+
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesEXT( VideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ VideoEncodeH264CapabilitiesEXT( VkVideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoEncodeH264CapabilitiesEXT( *reinterpret_cast<VideoEncodeH264CapabilitiesEXT const *>( &rhs ) )
+ {
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ VideoEncodeH264CapabilitiesEXT & operator=( VideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ VideoEncodeH264CapabilitiesEXT & operator=( VkVideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT const *>( &rhs );
return *this;
}
-# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH264CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264CapabilitiesEXT *>( this );
}
- explicit operator VkVideoEncodeH264CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264CapabilitiesEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
+ void * const &,
VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT const &,
VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT const &,
VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT const &,
- VULKAN_HPP_NAMESPACE::Extent2D const &,
- VULKAN_HPP_NAMESPACE::Extent2D const &,
- VULKAN_HPP_NAMESPACE::Extent2D const &,
uint8_t const &,
uint8_t const &,
uint8_t const &,
- uint8_t const &,
- VULKAN_HPP_NAMESPACE::ExtensionProperties const &>
+ VULKAN_HPP_NAMESPACE::Bool32 const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &,
+ uint32_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -94754,14 +97395,14 @@ namespace VULKAN_HPP_NAMESPACE
flags,
inputModeFlags,
outputModeFlags,
- minPictureSizeInMbs,
- maxPictureSizeInMbs,
- inputImageDataAlignment,
- maxNumL0ReferenceForP,
- maxNumL0ReferenceForB,
- maxNumL1Reference,
- qualityLevelCount,
- stdExtensionVersion );
+ maxPPictureL0ReferenceCount,
+ maxBPictureL0ReferenceCount,
+ maxL1ReferenceCount,
+ motionVectorsOverPicBoundariesFlag,
+ maxBytesPerPicDenom,
+ maxBitsPerMbDenom,
+ log2MaxMvLengthHorizontal,
+ log2MaxMvLengthVertical );
}
# endif
@@ -94770,16 +97411,15 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( inputModeFlags == rhs.inputModeFlags ) && ( outputModeFlags == rhs.outputModeFlags ) &&
- ( minPictureSizeInMbs == rhs.minPictureSizeInMbs ) && ( maxPictureSizeInMbs == rhs.maxPictureSizeInMbs ) &&
- ( inputImageDataAlignment == rhs.inputImageDataAlignment ) &&
- ( maxNumL0ReferenceForP == rhs.maxNumL0ReferenceForP ) &&
- ( maxNumL0ReferenceForB == rhs.maxNumL0ReferenceForB ) && ( maxNumL1Reference == rhs.maxNumL1Reference ) &&
- ( qualityLevelCount == rhs.qualityLevelCount ) && ( stdExtensionVersion == rhs.stdExtensionVersion );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( inputModeFlags == rhs.inputModeFlags ) &&
+ ( outputModeFlags == rhs.outputModeFlags ) && ( maxPPictureL0ReferenceCount == rhs.maxPPictureL0ReferenceCount ) &&
+ ( maxBPictureL0ReferenceCount == rhs.maxBPictureL0ReferenceCount ) && ( maxL1ReferenceCount == rhs.maxL1ReferenceCount ) &&
+ ( motionVectorsOverPicBoundariesFlag == rhs.motionVectorsOverPicBoundariesFlag ) && ( maxBytesPerPicDenom == rhs.maxBytesPerPicDenom ) &&
+ ( maxBitsPerMbDenom == rhs.maxBitsPerMbDenom ) && ( log2MaxMvLengthHorizontal == rhs.log2MaxMvLengthHorizontal ) &&
+ ( log2MaxMvLengthVertical == rhs.log2MaxMvLengthVertical );
# endif
}
@@ -94790,28 +97430,20 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264CapabilitiesEXT;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT flags = {};
- VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags = {};
- VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags = {};
- VULKAN_HPP_NAMESPACE::Extent2D minPictureSizeInMbs = {};
- VULKAN_HPP_NAMESPACE::Extent2D maxPictureSizeInMbs = {};
- VULKAN_HPP_NAMESPACE::Extent2D inputImageDataAlignment = {};
- uint8_t maxNumL0ReferenceForP = {};
- uint8_t maxNumL0ReferenceForB = {};
- uint8_t maxNumL1Reference = {};
- uint8_t qualityLevelCount = {};
- VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT ) ==
- sizeof( VkVideoEncodeH264CapabilitiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT>::value,
- "VideoEncodeH264CapabilitiesEXT is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264CapabilitiesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT flags = {};
+ VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags = {};
+ VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags = {};
+ uint8_t maxPPictureL0ReferenceCount = {};
+ uint8_t maxBPictureL0ReferenceCount = {};
+ uint8_t maxL1ReferenceCount = {};
+ VULKAN_HPP_NAMESPACE::Bool32 motionVectorsOverPicBoundariesFlag = {};
+ uint32_t maxBytesPerPicDenom = {};
+ uint32_t maxBitsPerMbDenom = {};
+ uint32_t log2MaxMvLengthHorizontal = {};
+ uint32_t log2MaxMvLengthVertical = {};
+ };
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264CapabilitiesEXT>
@@ -94829,23 +97461,24 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264DpbSlotInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- VideoEncodeH264DpbSlotInfoEXT( int8_t slotIndex_ = {},
- const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ = {} ) VULKAN_HPP_NOEXCEPT
- : slotIndex( slotIndex_ )
- , pStdPictureInfo( pStdPictureInfo_ )
- {}
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoEXT( int8_t slotIndex_ = {},
+ const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , slotIndex( slotIndex_ )
+ , pStdReferenceInfo( pStdReferenceInfo_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoEncodeH264DpbSlotInfoEXT( VideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoEXT( VideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264DpbSlotInfoEXT( VkVideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264DpbSlotInfoEXT( *reinterpret_cast<VideoEncodeH264DpbSlotInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH264DpbSlotInfoEXT &
- operator=( VideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeH264DpbSlotInfoEXT & operator=( VideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264DpbSlotInfoEXT & operator=( VkVideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -94867,35 +97500,32 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoEXT &
- setPStdPictureInfo( const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
+ setPStdReferenceInfo( const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
{
- pStdPictureInfo = pStdPictureInfo_;
+ pStdReferenceInfo = pStdReferenceInfo_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH264DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264DpbSlotInfoEXT *>( this );
}
- explicit operator VkVideoEncodeH264DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264DpbSlotInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- int8_t const &,
- const StdVideoEncodeH264PictureInfo * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, int8_t const &, const StdVideoEncodeH264ReferenceInfo * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, slotIndex, pStdPictureInfo );
+ return std::tie( sType, pNext, slotIndex, pStdReferenceInfo );
}
# endif
@@ -94904,11 +97534,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) &&
- ( pStdPictureInfo == rhs.pStdPictureInfo );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo );
# endif
}
@@ -94919,19 +97548,11 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264DpbSlotInfoEXT;
- const void * pNext = {};
- int8_t slotIndex = {};
- const StdVideoEncodeH264PictureInfo * pStdPictureInfo = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264DpbSlotInfoEXT;
+ const void * pNext = {};
+ int8_t slotIndex = {};
+ const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT ) ==
- sizeof( VkVideoEncodeH264DpbSlotInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT>::value,
- "VideoEncodeH264DpbSlotInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264DpbSlotInfoEXT>
@@ -94941,95 +97562,91 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoEncodeH264EmitPictureParametersEXT
+ struct VideoEncodeH264EmitPictureParametersInfoEXT
{
- using NativeType = VkVideoEncodeH264EmitPictureParametersEXT;
+ using NativeType = VkVideoEncodeH264EmitPictureParametersInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoEncodeH264EmitPictureParametersEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264EmitPictureParametersInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- VideoEncodeH264EmitPictureParametersEXT( uint8_t spsId_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ = {},
- uint32_t ppsIdEntryCount_ = {},
- const uint8_t * ppsIdEntries_ = {} ) VULKAN_HPP_NOEXCEPT
- : spsId( spsId_ )
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264EmitPictureParametersInfoEXT( uint8_t spsId_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ = {},
+ uint32_t ppsIdEntryCount_ = {},
+ const uint8_t * ppsIdEntries_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , spsId( spsId_ )
, emitSpsEnable( emitSpsEnable_ )
, ppsIdEntryCount( ppsIdEntryCount_ )
, ppsIdEntries( ppsIdEntries_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR VideoEncodeH264EmitPictureParametersEXT( VideoEncodeH264EmitPictureParametersEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264EmitPictureParametersInfoEXT( VideoEncodeH264EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH264EmitPictureParametersEXT( VkVideoEncodeH264EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoEncodeH264EmitPictureParametersEXT(
- *reinterpret_cast<VideoEncodeH264EmitPictureParametersEXT const *>( &rhs ) )
- {}
+ VideoEncodeH264EmitPictureParametersInfoEXT( VkVideoEncodeH264EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoEncodeH264EmitPictureParametersInfoEXT( *reinterpret_cast<VideoEncodeH264EmitPictureParametersInfoEXT const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoEncodeH264EmitPictureParametersEXT(
- uint8_t spsId_,
- VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & psIdEntries_ )
- : spsId( spsId_ )
+ VideoEncodeH264EmitPictureParametersInfoEXT( uint8_t spsId_,
+ VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & psIdEntries_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , spsId( spsId_ )
, emitSpsEnable( emitSpsEnable_ )
, ppsIdEntryCount( static_cast<uint32_t>( psIdEntries_.size() ) )
, ppsIdEntries( psIdEntries_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH264EmitPictureParametersEXT &
- operator=( VideoEncodeH264EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeH264EmitPictureParametersInfoEXT & operator=( VideoEncodeH264EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH264EmitPictureParametersEXT &
- operator=( VkVideoEncodeH264EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeH264EmitPictureParametersInfoEXT & operator=( VkVideoEncodeH264EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersInfoEXT const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT & setSpsId( uint8_t spsId_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersInfoEXT & setSpsId( uint8_t spsId_ ) VULKAN_HPP_NOEXCEPT
{
spsId = spsId_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT &
- setEmitSpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersInfoEXT & setEmitSpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ ) VULKAN_HPP_NOEXCEPT
{
emitSpsEnable = emitSpsEnable_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT &
- setPpsIdEntryCount( uint32_t ppsIdEntryCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersInfoEXT & setPpsIdEntryCount( uint32_t ppsIdEntryCount_ ) VULKAN_HPP_NOEXCEPT
{
ppsIdEntryCount = ppsIdEntryCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT &
- setPpsIdEntries( const uint8_t * ppsIdEntries_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersInfoEXT & setPpsIdEntries( const uint8_t * ppsIdEntries_ ) VULKAN_HPP_NOEXCEPT
{
ppsIdEntries = ppsIdEntries_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoEncodeH264EmitPictureParametersEXT & setPsIdEntries(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & psIdEntries_ ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeH264EmitPictureParametersInfoEXT &
+ setPsIdEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & psIdEntries_ ) VULKAN_HPP_NOEXCEPT
{
ppsIdEntryCount = static_cast<uint32_t>( psIdEntries_.size() );
ppsIdEntries = psIdEntries_.data();
@@ -95038,17 +97655,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH264EmitPictureParametersEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264EmitPictureParametersInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkVideoEncodeH264EmitPictureParametersEXT *>( this );
+ return *reinterpret_cast<const VkVideoEncodeH264EmitPictureParametersInfoEXT *>( this );
}
- explicit operator VkVideoEncodeH264EmitPictureParametersEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264EmitPictureParametersInfoEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkVideoEncodeH264EmitPictureParametersEXT *>( this );
+ return *reinterpret_cast<VkVideoEncodeH264EmitPictureParametersInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -95066,47 +97683,37 @@ namespace VULKAN_HPP_NAMESPACE
# endif
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( VideoEncodeH264EmitPictureParametersEXT const & ) const = default;
+ auto operator<=>( VideoEncodeH264EmitPictureParametersInfoEXT const & ) const = default;
# else
- bool operator==( VideoEncodeH264EmitPictureParametersEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( VideoEncodeH264EmitPictureParametersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsId == rhs.spsId ) &&
- ( emitSpsEnable == rhs.emitSpsEnable ) && ( ppsIdEntryCount == rhs.ppsIdEntryCount ) &&
- ( ppsIdEntries == rhs.ppsIdEntries );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsId == rhs.spsId ) && ( emitSpsEnable == rhs.emitSpsEnable ) &&
+ ( ppsIdEntryCount == rhs.ppsIdEntryCount ) && ( ppsIdEntries == rhs.ppsIdEntries );
# endif
}
- bool operator!=( VideoEncodeH264EmitPictureParametersEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( VideoEncodeH264EmitPictureParametersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264EmitPictureParametersEXT;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264EmitPictureParametersInfoEXT;
const void * pNext = {};
uint8_t spsId = {};
VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable = {};
uint32_t ppsIdEntryCount = {};
const uint8_t * ppsIdEntries = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersEXT ) ==
- sizeof( VkVideoEncodeH264EmitPictureParametersEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersEXT>::value,
- "VideoEncodeH264EmitPictureParametersEXT is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::eVideoEncodeH264EmitPictureParametersEXT>
+ struct CppType<StructureType, StructureType::eVideoEncodeH264EmitPictureParametersInfoEXT>
{
- using Type = VideoEncodeH264EmitPictureParametersEXT;
+ using Type = VideoEncodeH264EmitPictureParametersInfoEXT;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -95116,20 +97723,19 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkVideoEncodeH264FrameSizeEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeEXT( uint32_t frameISize_ = {},
- uint32_t framePSize_ = {},
- uint32_t frameBSize_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeEXT( uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {} ) VULKAN_HPP_NOEXCEPT
: frameISize( frameISize_ )
, framePSize( framePSize_ )
, frameBSize( frameBSize_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoEncodeH264FrameSizeEXT( VideoEncodeH264FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeEXT( VideoEncodeH264FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264FrameSizeEXT( VkVideoEncodeH264FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264FrameSizeEXT( *reinterpret_cast<VideoEncodeH264FrameSizeEXT const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
VideoEncodeH264FrameSizeEXT & operator=( VideoEncodeH264FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -95160,17 +97766,17 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH264FrameSizeEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264FrameSizeEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264FrameSizeEXT *>( this );
}
- explicit operator VkVideoEncodeH264FrameSizeEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264FrameSizeEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264FrameSizeEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -95187,7 +97793,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeH264FrameSizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( frameISize == rhs.frameISize ) && ( framePSize == rhs.framePSize ) && ( frameBSize == rhs.frameBSize );
@@ -95205,283 +97811,372 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t framePSize = {};
uint32_t frameBSize = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT ) ==
- sizeof( VkVideoEncodeH264FrameSizeEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT>::value,
- "VideoEncodeH264FrameSizeEXT is not nothrow_move_constructible!" );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoEncodeH264NaluSliceEXT
+ struct VideoEncodeH264ReferenceListsInfoEXT
{
- using NativeType = VkVideoEncodeH264NaluSliceEXT;
+ using NativeType = VkVideoEncodeH264ReferenceListsInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264NaluSliceEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264ReferenceListsInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceEXT(
- const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_ = {},
- uint32_t mbCount_ = {},
- uint8_t refFinalList0EntryCount_ = {},
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList0Entries_ = {},
- uint8_t refFinalList1EntryCount_ = {},
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList1Entries_ = {} ) VULKAN_HPP_NOEXCEPT
- : pSliceHeaderStd( pSliceHeaderStd_ )
- , mbCount( mbCount_ )
- , refFinalList0EntryCount( refFinalList0EntryCount_ )
- , pRefFinalList0Entries( pRefFinalList0Entries_ )
- , refFinalList1EntryCount( refFinalList1EntryCount_ )
- , pRefFinalList1Entries( pRefFinalList1Entries_ )
- {}
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264ReferenceListsInfoEXT( uint8_t referenceList0EntryCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList0Entries_ = {},
+ uint8_t referenceList1EntryCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList1Entries_ = {},
+ const StdVideoEncodeH264RefMemMgmtCtrlOperations * pMemMgmtCtrlOperations_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , referenceList0EntryCount( referenceList0EntryCount_ )
+ , pReferenceList0Entries( pReferenceList0Entries_ )
+ , referenceList1EntryCount( referenceList1EntryCount_ )
+ , pReferenceList1Entries( pReferenceList1Entries_ )
+ , pMemMgmtCtrlOperations( pMemMgmtCtrlOperations_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoEncodeH264NaluSliceEXT( VideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264ReferenceListsInfoEXT( VideoEncodeH264ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH264NaluSliceEXT( VkVideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoEncodeH264NaluSliceEXT( *reinterpret_cast<VideoEncodeH264NaluSliceEXT const *>( &rhs ) )
- {}
+ VideoEncodeH264ReferenceListsInfoEXT( VkVideoEncodeH264ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoEncodeH264ReferenceListsInfoEXT( *reinterpret_cast<VideoEncodeH264ReferenceListsInfoEXT const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoEncodeH264NaluSliceEXT(
- const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_,
- uint32_t mbCount_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const &
- refFinalList0Entries_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const &
- refFinalList1Entries_ = {} )
- : pSliceHeaderStd( pSliceHeaderStd_ )
- , mbCount( mbCount_ )
- , refFinalList0EntryCount( static_cast<uint8_t>( refFinalList0Entries_.size() ) )
- , pRefFinalList0Entries( refFinalList0Entries_.data() )
- , refFinalList1EntryCount( static_cast<uint8_t>( refFinalList1Entries_.size() ) )
- , pRefFinalList1Entries( refFinalList1Entries_.data() )
- {}
+ VideoEncodeH264ReferenceListsInfoEXT(
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const & referenceList0Entries_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const & referenceList1Entries_ = {},
+ const StdVideoEncodeH264RefMemMgmtCtrlOperations * pMemMgmtCtrlOperations_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , referenceList0EntryCount( static_cast<uint8_t>( referenceList0Entries_.size() ) )
+ , pReferenceList0Entries( referenceList0Entries_.data() )
+ , referenceList1EntryCount( static_cast<uint8_t>( referenceList1Entries_.size() ) )
+ , pReferenceList1Entries( referenceList1Entries_.data() )
+ , pMemMgmtCtrlOperations( pMemMgmtCtrlOperations_ )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH264NaluSliceEXT & operator=( VideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeH264ReferenceListsInfoEXT & operator=( VideoEncodeH264ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH264NaluSliceEXT & operator=( VkVideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeH264ReferenceListsInfoEXT & operator=( VkVideoEncodeH264ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT &
- setPSliceHeaderStd( const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsInfoEXT & setReferenceList0EntryCount( uint8_t referenceList0EntryCount_ ) VULKAN_HPP_NOEXCEPT
{
- pSliceHeaderStd = pSliceHeaderStd_;
+ referenceList0EntryCount = referenceList0EntryCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT & setMbCount( uint32_t mbCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsInfoEXT &
+ setPReferenceList0Entries( const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList0Entries_ ) VULKAN_HPP_NOEXCEPT
{
- mbCount = mbCount_;
+ pReferenceList0Entries = pReferenceList0Entries_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT &
- setRefFinalList0EntryCount( uint8_t refFinalList0EntryCount_ ) VULKAN_HPP_NOEXCEPT
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ VideoEncodeH264ReferenceListsInfoEXT & setReferenceList0Entries(
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const & referenceList0Entries_ )
+ VULKAN_HPP_NOEXCEPT
{
- refFinalList0EntryCount = refFinalList0EntryCount_;
+ referenceList0EntryCount = static_cast<uint8_t>( referenceList0Entries_.size() );
+ pReferenceList0Entries = referenceList0Entries_.data();
return *this;
}
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT & setPRefFinalList0Entries(
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList0Entries_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsInfoEXT & setReferenceList1EntryCount( uint8_t referenceList1EntryCount_ ) VULKAN_HPP_NOEXCEPT
{
- pRefFinalList0Entries = pRefFinalList0Entries_;
+ referenceList1EntryCount = referenceList1EntryCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsInfoEXT &
+ setPReferenceList1Entries( const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList1Entries_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pReferenceList1Entries = pReferenceList1Entries_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoEncodeH264NaluSliceEXT & setRefFinalList0Entries(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const &
- refFinalList0Entries_ ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeH264ReferenceListsInfoEXT & setReferenceList1Entries(
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const & referenceList1Entries_ )
+ VULKAN_HPP_NOEXCEPT
{
- refFinalList0EntryCount = static_cast<uint8_t>( refFinalList0Entries_.size() );
- pRefFinalList0Entries = refFinalList0Entries_.data();
+ referenceList1EntryCount = static_cast<uint8_t>( referenceList1Entries_.size() );
+ pReferenceList1Entries = referenceList1Entries_.data();
return *this;
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT &
- setRefFinalList1EntryCount( uint8_t refFinalList1EntryCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsInfoEXT &
+ setPMemMgmtCtrlOperations( const StdVideoEncodeH264RefMemMgmtCtrlOperations * pMemMgmtCtrlOperations_ ) VULKAN_HPP_NOEXCEPT
{
- refFinalList1EntryCount = refFinalList1EntryCount_;
+ pMemMgmtCtrlOperations = pMemMgmtCtrlOperations_;
return *this;
}
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkVideoEncodeH264ReferenceListsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkVideoEncodeH264ReferenceListsInfoEXT *>( this );
+ }
+
+ operator VkVideoEncodeH264ReferenceListsInfoEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkVideoEncodeH264ReferenceListsInfoEXT *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ uint8_t const &,
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * const &,
+ uint8_t const &,
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * const &,
+ const StdVideoEncodeH264RefMemMgmtCtrlOperations * const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie(
+ sType, pNext, referenceList0EntryCount, pReferenceList0Entries, referenceList1EntryCount, pReferenceList1Entries, pMemMgmtCtrlOperations );
+ }
+# endif
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT & setPRefFinalList1Entries(
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList1Entries_ ) VULKAN_HPP_NOEXCEPT
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( VideoEncodeH264ReferenceListsInfoEXT const & ) const = default;
+# else
+ bool operator==( VideoEncodeH264ReferenceListsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- pRefFinalList1Entries = pRefFinalList1Entries_;
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( referenceList0EntryCount == rhs.referenceList0EntryCount ) &&
+ ( pReferenceList0Entries == rhs.pReferenceList0Entries ) && ( referenceList1EntryCount == rhs.referenceList1EntryCount ) &&
+ ( pReferenceList1Entries == rhs.pReferenceList1Entries ) && ( pMemMgmtCtrlOperations == rhs.pMemMgmtCtrlOperations );
+# endif
+ }
+
+ bool operator!=( VideoEncodeH264ReferenceListsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264ReferenceListsInfoEXT;
+ const void * pNext = {};
+ uint8_t referenceList0EntryCount = {};
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList0Entries = {};
+ uint8_t referenceList1EntryCount = {};
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList1Entries = {};
+ const StdVideoEncodeH264RefMemMgmtCtrlOperations * pMemMgmtCtrlOperations = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eVideoEncodeH264ReferenceListsInfoEXT>
+ {
+ using Type = VideoEncodeH264ReferenceListsInfoEXT;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ struct VideoEncodeH264NaluSliceInfoEXT
+ {
+ using NativeType = VkVideoEncodeH264NaluSliceInfoEXT;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264NaluSliceInfoEXT;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceInfoEXT( uint32_t mbCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists_ = {},
+ const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , mbCount( mbCount_ )
+ , pReferenceFinalLists( pReferenceFinalLists_ )
+ , pSliceHeaderStd( pSliceHeaderStd_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceInfoEXT( VideoEncodeH264NaluSliceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ VideoEncodeH264NaluSliceInfoEXT( VkVideoEncodeH264NaluSliceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoEncodeH264NaluSliceInfoEXT( *reinterpret_cast<VideoEncodeH264NaluSliceInfoEXT const *>( &rhs ) )
+ {
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ VideoEncodeH264NaluSliceInfoEXT & operator=( VideoEncodeH264NaluSliceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ VideoEncodeH264NaluSliceInfoEXT & operator=( VkVideoEncodeH264NaluSliceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT const *>( &rhs );
return *this;
}
-# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoEncodeH264NaluSliceEXT & setRefFinalList1Entries(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const &
- refFinalList1Entries_ ) VULKAN_HPP_NOEXCEPT
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
- refFinalList1EntryCount = static_cast<uint8_t>( refFinalList1Entries_.size() );
- pRefFinalList1Entries = refFinalList1Entries_.data();
+ pNext = pNext_;
return *this;
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH264NaluSliceEXT const &() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoEXT & setMbCount( uint32_t mbCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ mbCount = mbCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoEXT &
+ setPReferenceFinalLists( const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pReferenceFinalLists = pReferenceFinalLists_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoEXT & setPSliceHeaderStd( const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pSliceHeaderStd = pSliceHeaderStd_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkVideoEncodeH264NaluSliceInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkVideoEncodeH264NaluSliceEXT *>( this );
+ return *reinterpret_cast<const VkVideoEncodeH264NaluSliceInfoEXT *>( this );
}
- explicit operator VkVideoEncodeH264NaluSliceEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264NaluSliceInfoEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkVideoEncodeH264NaluSliceEXT *>( this );
+ return *reinterpret_cast<VkVideoEncodeH264NaluSliceInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
const void * const &,
- const StdVideoEncodeH264SliceHeader * const &,
uint32_t const &,
- uint8_t const &,
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * const &,
- uint8_t const &,
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * const &>
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * const &,
+ const StdVideoEncodeH264SliceHeader * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- pSliceHeaderStd,
- mbCount,
- refFinalList0EntryCount,
- pRefFinalList0Entries,
- refFinalList1EntryCount,
- pRefFinalList1Entries );
+ return std::tie( sType, pNext, mbCount, pReferenceFinalLists, pSliceHeaderStd );
}
# endif
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( VideoEncodeH264NaluSliceEXT const & ) const = default;
+ auto operator<=>( VideoEncodeH264NaluSliceInfoEXT const & ) const = default;
# else
- bool operator==( VideoEncodeH264NaluSliceEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( VideoEncodeH264NaluSliceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pSliceHeaderStd == rhs.pSliceHeaderStd ) &&
- ( mbCount == rhs.mbCount ) && ( refFinalList0EntryCount == rhs.refFinalList0EntryCount ) &&
- ( pRefFinalList0Entries == rhs.pRefFinalList0Entries ) &&
- ( refFinalList1EntryCount == rhs.refFinalList1EntryCount ) &&
- ( pRefFinalList1Entries == rhs.pRefFinalList1Entries );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mbCount == rhs.mbCount ) && ( pReferenceFinalLists == rhs.pReferenceFinalLists ) &&
+ ( pSliceHeaderStd == rhs.pSliceHeaderStd );
# endif
}
- bool operator!=( VideoEncodeH264NaluSliceEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( VideoEncodeH264NaluSliceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264NaluSliceEXT;
- const void * pNext = {};
- const StdVideoEncodeH264SliceHeader * pSliceHeaderStd = {};
- uint32_t mbCount = {};
- uint8_t refFinalList0EntryCount = {};
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList0Entries = {};
- uint8_t refFinalList1EntryCount = {};
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList1Entries = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264NaluSliceInfoEXT;
+ const void * pNext = {};
+ uint32_t mbCount = {};
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists = {};
+ const StdVideoEncodeH264SliceHeader * pSliceHeaderStd = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT ) ==
- sizeof( VkVideoEncodeH264NaluSliceEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT>::value,
- "VideoEncodeH264NaluSliceEXT is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::eVideoEncodeH264NaluSliceEXT>
+ struct CppType<StructureType, StructureType::eVideoEncodeH264NaluSliceInfoEXT>
{
- using Type = VideoEncodeH264NaluSliceEXT;
+ using Type = VideoEncodeH264NaluSliceInfoEXT;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoEncodeH264ProfileEXT
+ struct VideoEncodeH264ProfileInfoEXT
{
- using NativeType = VkVideoEncodeH264ProfileEXT;
+ using NativeType = VkVideoEncodeH264ProfileInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264ProfileEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264ProfileInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileEXT( StdVideoH264ProfileIdc stdProfileIdc_ = {} ) VULKAN_HPP_NOEXCEPT
- : stdProfileIdc( stdProfileIdc_ )
- {}
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileInfoEXT( StdVideoH264ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , stdProfileIdc( stdProfileIdc_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoEncodeH264ProfileEXT( VideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileInfoEXT( VideoEncodeH264ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH264ProfileEXT( VkVideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoEncodeH264ProfileEXT( *reinterpret_cast<VideoEncodeH264ProfileEXT const *>( &rhs ) )
- {}
+ VideoEncodeH264ProfileInfoEXT( VkVideoEncodeH264ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoEncodeH264ProfileInfoEXT( *reinterpret_cast<VideoEncodeH264ProfileInfoEXT const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH264ProfileEXT & operator=( VideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeH264ProfileInfoEXT & operator=( VideoEncodeH264ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH264ProfileEXT & operator=( VkVideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeH264ProfileInfoEXT & operator=( VkVideoEncodeH264ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoEXT const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileEXT &
- setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileInfoEXT & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
{
stdProfileIdc = stdProfileIdc_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH264ProfileEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264ProfileInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkVideoEncodeH264ProfileEXT *>( this );
+ return *reinterpret_cast<const VkVideoEncodeH264ProfileInfoEXT *>( this );
}
- explicit operator VkVideoEncodeH264ProfileEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264ProfileInfoEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkVideoEncodeH264ProfileEXT *>( this );
+ return *reinterpret_cast<VkVideoEncodeH264ProfileInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -95494,7 +98189,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- std::strong_ordering operator<=>( VideoEncodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ std::strong_ordering operator<=>( VideoEncodeH264ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
return cmp;
@@ -95507,34 +98202,26 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif
- bool operator==( VideoEncodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( VideoEncodeH264ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 );
}
- bool operator!=( VideoEncodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( VideoEncodeH264ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264ProfileEXT;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264ProfileInfoEXT;
const void * pNext = {};
StdVideoH264ProfileIdc stdProfileIdc = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileEXT ) ==
- sizeof( VkVideoEncodeH264ProfileEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileEXT>::value,
- "VideoEncodeH264ProfileEXT is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::eVideoEncodeH264ProfileEXT>
+ struct CppType<StructureType, StructureType::eVideoEncodeH264ProfileInfoEXT>
{
- using Type = VideoEncodeH264ProfileEXT;
+ using Type = VideoEncodeH264ProfileInfoEXT;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -95544,18 +98231,19 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkVideoEncodeH264QpEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- VideoEncodeH264QpEXT( int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264QpEXT( int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {} ) VULKAN_HPP_NOEXCEPT
: qpI( qpI_ )
, qpP( qpP_ )
, qpB( qpB_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR VideoEncodeH264QpEXT( VideoEncodeH264QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264QpEXT( VkVideoEncodeH264QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264QpEXT( *reinterpret_cast<VideoEncodeH264QpEXT const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
VideoEncodeH264QpEXT & operator=( VideoEncodeH264QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -95586,17 +98274,17 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH264QpEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264QpEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264QpEXT *>( this );
}
- explicit operator VkVideoEncodeH264QpEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264QpEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264QpEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -95613,7 +98301,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeH264QpEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( qpI == rhs.qpI ) && ( qpP == rhs.qpP ) && ( qpB == rhs.qpB );
@@ -95631,12 +98319,6 @@ namespace VULKAN_HPP_NAMESPACE
int32_t qpP = {};
int32_t qpB = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT ) == sizeof( VkVideoEncodeH264QpEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT>::value,
- "VideoEncodeH264QpEXT is not nothrow_move_constructible!" );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
@@ -95645,34 +98327,34 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkVideoEncodeH264RateControlInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoEncodeH264RateControlInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264RateControlInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoEXT(
- uint32_t gopFrameCount_ = {},
- uint32_t idrPeriod_ = {},
- uint32_t consecutiveBFrameCount_ = {},
- VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT rateControlStructure_ =
- VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT::eUnknown,
- uint8_t temporalLayerCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : gopFrameCount( gopFrameCount_ )
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoEXT( uint32_t gopFrameCount_ = {},
+ uint32_t idrPeriod_ = {},
+ uint32_t consecutiveBFrameCount_ = {},
+ VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureEXT rateControlStructure_ =
+ VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureEXT::eUnknown,
+ uint8_t temporalLayerCount_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , gopFrameCount( gopFrameCount_ )
, idrPeriod( idrPeriod_ )
, consecutiveBFrameCount( consecutiveBFrameCount_ )
, rateControlStructure( rateControlStructure_ )
, temporalLayerCount( temporalLayerCount_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoEncodeH264RateControlInfoEXT( VideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoEXT( VideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264RateControlInfoEXT( VkVideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264RateControlInfoEXT( *reinterpret_cast<VideoEncodeH264RateControlInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH264RateControlInfoEXT &
- operator=( VideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeH264RateControlInfoEXT & operator=( VideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264RateControlInfoEXT & operator=( VkVideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -95687,8 +98369,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT &
- setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT
{
gopFrameCount = gopFrameCount_;
return *this;
@@ -95700,39 +98381,37 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT &
- setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT
{
consecutiveBFrameCount = consecutiveBFrameCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setRateControlStructure(
- VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT rateControlStructure_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT &
+ setRateControlStructure( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureEXT rateControlStructure_ ) VULKAN_HPP_NOEXCEPT
{
rateControlStructure = rateControlStructure_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT &
- setTemporalLayerCount( uint8_t temporalLayerCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setTemporalLayerCount( uint8_t temporalLayerCount_ ) VULKAN_HPP_NOEXCEPT
{
temporalLayerCount = temporalLayerCount_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH264RateControlInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264RateControlInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264RateControlInfoEXT *>( this );
}
- explicit operator VkVideoEncodeH264RateControlInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264RateControlInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264RateControlInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -95741,13 +98420,12 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t const &,
uint32_t const &,
uint32_t const &,
- VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT const &,
+ VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureEXT const &,
uint8_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, gopFrameCount, idrPeriod, consecutiveBFrameCount, rateControlStructure, temporalLayerCount );
+ return std::tie( sType, pNext, gopFrameCount, idrPeriod, consecutiveBFrameCount, rateControlStructure, temporalLayerCount );
}
# endif
@@ -95756,12 +98434,12 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeH264RateControlInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( gopFrameCount == rhs.gopFrameCount ) &&
- ( idrPeriod == rhs.idrPeriod ) && ( consecutiveBFrameCount == rhs.consecutiveBFrameCount ) &&
- ( rateControlStructure == rhs.rateControlStructure ) && ( temporalLayerCount == rhs.temporalLayerCount );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( gopFrameCount == rhs.gopFrameCount ) && ( idrPeriod == rhs.idrPeriod ) &&
+ ( consecutiveBFrameCount == rhs.consecutiveBFrameCount ) && ( rateControlStructure == rhs.rateControlStructure ) &&
+ ( temporalLayerCount == rhs.temporalLayerCount );
# endif
}
@@ -95772,23 +98450,14 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlInfoEXT;
- const void * pNext = {};
- uint32_t gopFrameCount = {};
- uint32_t idrPeriod = {};
- uint32_t consecutiveBFrameCount = {};
- VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT rateControlStructure =
- VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT::eUnknown;
- uint8_t temporalLayerCount = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT ) ==
- sizeof( VkVideoEncodeH264RateControlInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT>::value,
- "VideoEncodeH264RateControlInfoEXT is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlInfoEXT;
+ const void * pNext = {};
+ uint32_t gopFrameCount = {};
+ uint32_t idrPeriod = {};
+ uint32_t consecutiveBFrameCount = {};
+ VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureEXT rateControlStructure = VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureEXT::eUnknown;
+ uint8_t temporalLayerCount = {};
+ };
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264RateControlInfoEXT>
@@ -95803,21 +98472,21 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkVideoEncodeH264RateControlLayerInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoEncodeH264RateControlLayerInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264RateControlLayerInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoEXT(
- uint8_t temporalLayerId_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ = {},
- VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT initialRcQp_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {},
- VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT minQp_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {},
- VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT maxQp_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {},
- VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT maxFrameSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : temporalLayerId( temporalLayerId_ )
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoEXT( uint8_t temporalLayerId_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ = {},
+ VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT initialRcQp_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {},
+ VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT minQp_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {},
+ VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT maxQp_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {},
+ VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT maxFrameSize_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , temporalLayerId( temporalLayerId_ )
, useInitialRcQp( useInitialRcQp_ )
, initialRcQp( initialRcQp_ )
, useMinQp( useMinQp_ )
@@ -95826,22 +98495,20 @@ namespace VULKAN_HPP_NAMESPACE
, maxQp( maxQp_ )
, useMaxFrameSize( useMaxFrameSize_ )
, maxFrameSize( maxFrameSize_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoEXT( VideoEncodeH264RateControlLayerInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoEXT( VideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264RateControlLayerInfoEXT( VkVideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoEncodeH264RateControlLayerInfoEXT(
- *reinterpret_cast<VideoEncodeH264RateControlLayerInfoEXT const *>( &rhs ) )
- {}
+ : VideoEncodeH264RateControlLayerInfoEXT( *reinterpret_cast<VideoEncodeH264RateControlLayerInfoEXT const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH264RateControlLayerInfoEXT &
- operator=( VideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeH264RateControlLayerInfoEXT & operator=( VideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH264RateControlLayerInfoEXT &
- operator=( VkVideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeH264RateControlLayerInfoEXT & operator=( VkVideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT const *>( &rhs );
return *this;
@@ -95854,81 +98521,74 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT &
- setTemporalLayerId( uint8_t temporalLayerId_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setTemporalLayerId( uint8_t temporalLayerId_ ) VULKAN_HPP_NOEXCEPT
{
temporalLayerId = temporalLayerId_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT &
- setUseInitialRcQp( VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setUseInitialRcQp( VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ ) VULKAN_HPP_NOEXCEPT
{
useInitialRcQp = useInitialRcQp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT &
- setInitialRcQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & initialRcQp_ ) VULKAN_HPP_NOEXCEPT
+ setInitialRcQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & initialRcQp_ ) VULKAN_HPP_NOEXCEPT
{
initialRcQp = initialRcQp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT &
- setUseMinQp( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setUseMinQp( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT
{
useMinQp = useMinQp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT &
- setMinQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & minQp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setMinQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & minQp_ ) VULKAN_HPP_NOEXCEPT
{
minQp = minQp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT &
- setUseMaxQp( VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setUseMaxQp( VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT
{
useMaxQp = useMaxQp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT &
- setMaxQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & maxQp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setMaxQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & maxQp_ ) VULKAN_HPP_NOEXCEPT
{
maxQp = maxQp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT &
- setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT
{
useMaxFrameSize = useMaxFrameSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT &
- setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT
+ setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT
{
maxFrameSize = maxFrameSize_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH264RateControlLayerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264RateControlLayerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264RateControlLayerInfoEXT *>( this );
}
- explicit operator VkVideoEncodeH264RateControlLayerInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264RateControlLayerInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264RateControlLayerInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -95946,17 +98606,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- temporalLayerId,
- useInitialRcQp,
- initialRcQp,
- useMinQp,
- minQp,
- useMaxQp,
- maxQp,
- useMaxFrameSize,
- maxFrameSize );
+ return std::tie( sType, pNext, temporalLayerId, useInitialRcQp, initialRcQp, useMinQp, minQp, useMaxQp, maxQp, useMaxFrameSize, maxFrameSize );
}
# endif
@@ -95965,14 +98615,12 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeH264RateControlLayerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( temporalLayerId == rhs.temporalLayerId ) &&
- ( useInitialRcQp == rhs.useInitialRcQp ) && ( initialRcQp == rhs.initialRcQp ) &&
- ( useMinQp == rhs.useMinQp ) && ( minQp == rhs.minQp ) && ( useMaxQp == rhs.useMaxQp ) &&
- ( maxQp == rhs.maxQp ) && ( useMaxFrameSize == rhs.useMaxFrameSize ) &&
- ( maxFrameSize == rhs.maxFrameSize );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( temporalLayerId == rhs.temporalLayerId ) && ( useInitialRcQp == rhs.useInitialRcQp ) &&
+ ( initialRcQp == rhs.initialRcQp ) && ( useMinQp == rhs.useMinQp ) && ( minQp == rhs.minQp ) && ( useMaxQp == rhs.useMaxQp ) &&
+ ( maxQp == rhs.maxQp ) && ( useMaxFrameSize == rhs.useMaxFrameSize ) && ( maxFrameSize == rhs.maxFrameSize );
# endif
}
@@ -95983,8 +98631,8 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlLayerInfoEXT;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlLayerInfoEXT;
+ const void * pNext = {};
uint8_t temporalLayerId = {};
VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT initialRcQp = {};
@@ -95995,15 +98643,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT maxFrameSize = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT ) ==
- sizeof( VkVideoEncodeH264RateControlLayerInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT>::value,
- "VideoEncodeH264RateControlLayerInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264RateControlLayerInfoEXT>
@@ -96013,261 +98652,119 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoEncodeH264SessionCreateInfoEXT
+ struct VideoEncodeH264SessionParametersAddInfoEXT
{
- using NativeType = VkVideoEncodeH264SessionCreateInfoEXT;
+ using NativeType = VkVideoEncodeH264SessionParametersAddInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoEncodeH264SessionCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersAddInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::VideoEncodeH264CreateFlagsEXT flags_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D maxPictureSizeInMbs_ = {},
- const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , maxPictureSizeInMbs( maxPictureSizeInMbs_ )
- , pStdExtensionVersion( pStdExtensionVersion_ )
- {}
-
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT( VideoEncodeH264SessionCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
-
- VideoEncodeH264SessionCreateInfoEXT( VkVideoEncodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoEncodeH264SessionCreateInfoEXT( *reinterpret_cast<VideoEncodeH264SessionCreateInfoEXT const *>( &rhs ) )
- {}
-# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
-
- VideoEncodeH264SessionCreateInfoEXT &
- operator=( VideoEncodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- VideoEncodeH264SessionCreateInfoEXT &
- operator=( VkVideoEncodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoEXT( uint32_t stdSPSCount_ = {},
+ const StdVideoH264SequenceParameterSet * pStdSPSs_ = {},
+ uint32_t stdPPSCount_ = {},
+ const StdVideoH264PictureParameterSet * pStdPPSs_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , stdSPSCount( stdSPSCount_ )
+ , pStdSPSs( pStdSPSs_ )
+ , stdPPSCount( stdPPSCount_ )
+ , pStdPPSs( pStdPPSs_ )
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoEXT const *>( &rhs );
- return *this;
}
-# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoEXT( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT &
- setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeH264SessionParametersAddInfoEXT( VkVideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoEncodeH264SessionParametersAddInfoEXT( *reinterpret_cast<VideoEncodeH264SessionParametersAddInfoEXT const *>( &rhs ) )
{
- flags = flags_;
- return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT &
- setMaxPictureSizeInMbs( VULKAN_HPP_NAMESPACE::Extent2D const & maxPictureSizeInMbs_ ) VULKAN_HPP_NOEXCEPT
- {
- maxPictureSizeInMbs = maxPictureSizeInMbs_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT & setPStdExtensionVersion(
- const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT
- {
- pStdExtensionVersion = pStdExtensionVersion_;
- return *this;
- }
-# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
-
- explicit operator VkVideoEncodeH264SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkVideoEncodeH264SessionCreateInfoEXT *>( this );
- }
-
- explicit operator VkVideoEncodeH264SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkVideoEncodeH264SessionCreateInfoEXT *>( this );
- }
-
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
-# if 14 <= VULKAN_HPP_CPP_VERSION
- auto
-# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::VideoEncodeH264CreateFlagsEXT const &,
- VULKAN_HPP_NAMESPACE::Extent2D const &,
- const VULKAN_HPP_NAMESPACE::ExtensionProperties * const &>
-# endif
- reflect() const VULKAN_HPP_NOEXCEPT
- {
- return std::tie( sType, pNext, flags, maxPictureSizeInMbs, pStdExtensionVersion );
- }
-# endif
-
-# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( VideoEncodeH264SessionCreateInfoEXT const & ) const = default;
-# else
- bool operator==( VideoEncodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
- {
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
- return this->reflect() == rhs.reflect();
-# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( maxPictureSizeInMbs == rhs.maxPictureSizeInMbs ) && ( pStdExtensionVersion == rhs.pStdExtensionVersion );
-# endif
- }
-
- bool operator!=( VideoEncodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ VideoEncodeH264SessionParametersAddInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & stdSPSs_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & stdPPSs_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , stdSPSCount( static_cast<uint32_t>( stdSPSs_.size() ) )
+ , pStdSPSs( stdSPSs_.data() )
+ , stdPPSCount( static_cast<uint32_t>( stdPPSs_.size() ) )
+ , pStdPPSs( stdPPSs_.data() )
{
- return !operator==( rhs );
}
-# endif
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionCreateInfoEXT;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::VideoEncodeH264CreateFlagsEXT flags = {};
- VULKAN_HPP_NAMESPACE::Extent2D maxPictureSizeInMbs = {};
- const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoEXT ) ==
- sizeof( VkVideoEncodeH264SessionCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoEXT>::value,
- "VideoEncodeH264SessionCreateInfoEXT is not nothrow_move_constructible!" );
-
- template <>
- struct CppType<StructureType, StructureType::eVideoEncodeH264SessionCreateInfoEXT>
- {
- using Type = VideoEncodeH264SessionCreateInfoEXT;
- };
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoEncodeH264SessionParametersAddInfoEXT
- {
- using NativeType = VkVideoEncodeH264SessionParametersAddInfoEXT;
-
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoEncodeH264SessionParametersAddInfoEXT;
-
-# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoEXT(
- uint32_t spsStdCount_ = {},
- const StdVideoH264SequenceParameterSet * pSpsStd_ = {},
- uint32_t ppsStdCount_ = {},
- const StdVideoH264PictureParameterSet * pPpsStd_ = {} ) VULKAN_HPP_NOEXCEPT
- : spsStdCount( spsStdCount_ )
- , pSpsStd( pSpsStd_ )
- , ppsStdCount( ppsStdCount_ )
- , pPpsStd( pPpsStd_ )
- {}
-
- VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoEXT(
- VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- VideoEncodeH264SessionParametersAddInfoEXT( VkVideoEncodeH264SessionParametersAddInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : VideoEncodeH264SessionParametersAddInfoEXT(
- *reinterpret_cast<VideoEncodeH264SessionParametersAddInfoEXT const *>( &rhs ) )
- {}
-
-# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoEncodeH264SessionParametersAddInfoEXT(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & spsStd_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & ppsStd_ = {} )
- : spsStdCount( static_cast<uint32_t>( spsStd_.size() ) )
- , pSpsStd( spsStd_.data() )
- , ppsStdCount( static_cast<uint32_t>( ppsStd_.size() ) )
- , pPpsStd( ppsStd_.data() )
- {}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH264SessionParametersAddInfoEXT &
- operator=( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeH264SessionParametersAddInfoEXT & operator=( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH264SessionParametersAddInfoEXT &
- operator=( VkVideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeH264SessionParametersAddInfoEXT & operator=( VkVideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT &
- setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT
{
- spsStdCount = spsStdCount_;
+ stdSPSCount = stdSPSCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT &
- setPSpsStd( const StdVideoH264SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & setPStdSPSs( const StdVideoH264SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT
{
- pSpsStd = pSpsStd_;
+ pStdSPSs = pStdSPSs_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeH264SessionParametersAddInfoEXT &
- setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & spsStd_ )
- VULKAN_HPP_NOEXCEPT
+ setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT
{
- spsStdCount = static_cast<uint32_t>( spsStd_.size() );
- pSpsStd = spsStd_.data();
+ stdSPSCount = static_cast<uint32_t>( stdSPSs_.size() );
+ pStdSPSs = stdSPSs_.data();
return *this;
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT &
- setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT
{
- ppsStdCount = ppsStdCount_;
+ stdPPSCount = stdPPSCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT &
- setPPpsStd( const StdVideoH264PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & setPStdPPSs( const StdVideoH264PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT
{
- pPpsStd = pPpsStd_;
+ pStdPPSs = pStdPPSs_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeH264SessionParametersAddInfoEXT &
- setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & ppsStd_ )
- VULKAN_HPP_NOEXCEPT
+ setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT
{
- ppsStdCount = static_cast<uint32_t>( ppsStd_.size() );
- pPpsStd = ppsStd_.data();
+ stdPPSCount = static_cast<uint32_t>( stdPPSs_.size() );
+ pStdPPSs = stdPPSs_.data();
return *this;
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH264SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264SessionParametersAddInfoEXT *>( this );
}
- explicit operator VkVideoEncodeH264SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264SessionParametersAddInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -96280,7 +98777,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, spsStdCount, pSpsStd, ppsStdCount, pPpsStd );
+ return std::tie( sType, pNext, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs );
}
# endif
@@ -96289,11 +98786,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsStdCount == rhs.spsStdCount ) &&
- ( pSpsStd == rhs.pSpsStd ) && ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stdSPSCount == rhs.stdSPSCount ) && ( pStdSPSs == rhs.pStdSPSs ) &&
+ ( stdPPSCount == rhs.stdPPSCount ) && ( pStdPPSs == rhs.pStdPPSs );
# endif
}
@@ -96306,20 +98803,11 @@ namespace VULKAN_HPP_NAMESPACE
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersAddInfoEXT;
const void * pNext = {};
- uint32_t spsStdCount = {};
- const StdVideoH264SequenceParameterSet * pSpsStd = {};
- uint32_t ppsStdCount = {};
- const StdVideoH264PictureParameterSet * pPpsStd = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT ) ==
- sizeof( VkVideoEncodeH264SessionParametersAddInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT>::value,
- "VideoEncodeH264SessionParametersAddInfoEXT is not nothrow_move_constructible!" );
+ uint32_t stdSPSCount = {};
+ const StdVideoH264SequenceParameterSet * pStdSPSs = {};
+ uint32_t stdPPSCount = {};
+ const StdVideoH264PictureParameterSet * pStdPPSs = {};
+ };
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264SessionParametersAddInfoEXT>
@@ -96334,81 +98822,76 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkVideoEncodeH264SessionParametersCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersCreateInfoEXT(
- uint32_t maxSpsStdCount_ = {},
- uint32_t maxPpsStdCount_ = {},
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ = {} )
- VULKAN_HPP_NOEXCEPT
- : maxSpsStdCount( maxSpsStdCount_ )
- , maxPpsStdCount( maxPpsStdCount_ )
+ VULKAN_HPP_CONSTEXPR
+ VideoEncodeH264SessionParametersCreateInfoEXT( uint32_t maxStdSPSCount_ = {},
+ uint32_t maxStdPPSCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxStdSPSCount( maxStdSPSCount_ )
+ , maxStdPPSCount( maxStdPPSCount_ )
, pParametersAddInfo( pParametersAddInfo_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersCreateInfoEXT(
- VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ VideoEncodeH264SessionParametersCreateInfoEXT( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH264SessionParametersCreateInfoEXT( VkVideoEncodeH264SessionParametersCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : VideoEncodeH264SessionParametersCreateInfoEXT(
- *reinterpret_cast<VideoEncodeH264SessionParametersCreateInfoEXT const *>( &rhs ) )
- {}
+ VideoEncodeH264SessionParametersCreateInfoEXT( VkVideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoEncodeH264SessionParametersCreateInfoEXT( *reinterpret_cast<VideoEncodeH264SessionParametersCreateInfoEXT const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH264SessionParametersCreateInfoEXT &
- operator=( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeH264SessionParametersCreateInfoEXT & operator=( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH264SessionParametersCreateInfoEXT &
- operator=( VkVideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeH264SessionParametersCreateInfoEXT & operator=( VkVideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT &
- setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT
{
- maxSpsStdCount = maxSpsStdCount_;
+ maxStdSPSCount = maxStdSPSCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT &
- setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT
{
- maxPpsStdCount = maxPpsStdCount_;
+ maxStdPPSCount = maxStdPPSCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & setPParametersAddInfo(
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT &
+ setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
{
pParametersAddInfo = pParametersAddInfo_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH264SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264SessionParametersCreateInfoEXT *>( this );
}
- explicit operator VkVideoEncodeH264SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264SessionParametersCreateInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -96420,7 +98903,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, maxSpsStdCount, maxPpsStdCount, pParametersAddInfo );
+ return std::tie( sType, pNext, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo );
}
# endif
@@ -96429,11 +98912,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSpsStdCount == rhs.maxSpsStdCount ) &&
- ( maxPpsStdCount == rhs.maxPpsStdCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxStdSPSCount == rhs.maxStdSPSCount ) && ( maxStdPPSCount == rhs.maxStdPPSCount ) &&
+ ( pParametersAddInfo == rhs.pParametersAddInfo );
# endif
}
@@ -96444,21 +98927,12 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT;
- const void * pNext = {};
- uint32_t maxSpsStdCount = {};
- uint32_t maxPpsStdCount = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT;
+ const void * pNext = {};
+ uint32_t maxStdSPSCount = {};
+ uint32_t maxStdPPSCount = {};
const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT ) ==
- sizeof( VkVideoEncodeH264SessionParametersCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT>::value,
- "VideoEncodeH264SessionParametersCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT>
@@ -96476,52 +98950,43 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264VclFrameInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoEncodeH264VclFrameInfoEXT(
- uint8_t refDefaultFinalList0EntryCount_ = {},
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList0Entries_ = {},
- uint8_t refDefaultFinalList1EntryCount_ = {},
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList1Entries_ = {},
- uint32_t naluSliceEntryCount_ = {},
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT * pNaluSliceEntries_ = {},
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pCurrentPictureInfo_ = {} ) VULKAN_HPP_NOEXCEPT
- : refDefaultFinalList0EntryCount( refDefaultFinalList0EntryCount_ )
- , pRefDefaultFinalList0Entries( pRefDefaultFinalList0Entries_ )
- , refDefaultFinalList1EntryCount( refDefaultFinalList1EntryCount_ )
- , pRefDefaultFinalList1Entries( pRefDefaultFinalList1Entries_ )
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264VclFrameInfoEXT( const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists_ = {},
+ uint32_t naluSliceEntryCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT * pNaluSliceEntries_ = {},
+ const StdVideoEncodeH264PictureInfo * pCurrentPictureInfo_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pReferenceFinalLists( pReferenceFinalLists_ )
, naluSliceEntryCount( naluSliceEntryCount_ )
, pNaluSliceEntries( pNaluSliceEntries_ )
, pCurrentPictureInfo( pCurrentPictureInfo_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoEncodeH264VclFrameInfoEXT( VideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoEncodeH264VclFrameInfoEXT( VideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264VclFrameInfoEXT( VkVideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH264VclFrameInfoEXT( *reinterpret_cast<VideoEncodeH264VclFrameInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeH264VclFrameInfoEXT(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const &
- refDefaultFinalList0Entries_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const &
- refDefaultFinalList1Entries_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT> const &
- naluSliceEntries_ = {},
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pCurrentPictureInfo_ = {} )
- : refDefaultFinalList0EntryCount( static_cast<uint8_t>( refDefaultFinalList0Entries_.size() ) )
- , pRefDefaultFinalList0Entries( refDefaultFinalList0Entries_.data() )
- , refDefaultFinalList1EntryCount( static_cast<uint8_t>( refDefaultFinalList1Entries_.size() ) )
- , pRefDefaultFinalList1Entries( refDefaultFinalList1Entries_.data() )
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT> const & naluSliceEntries_,
+ const StdVideoEncodeH264PictureInfo * pCurrentPictureInfo_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , pReferenceFinalLists( pReferenceFinalLists_ )
, naluSliceEntryCount( static_cast<uint32_t>( naluSliceEntries_.size() ) )
, pNaluSliceEntries( naluSliceEntries_.data() )
, pCurrentPictureInfo( pCurrentPictureInfo_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH264VclFrameInfoEXT &
- operator=( VideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeH264VclFrameInfoEXT & operator=( VideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH264VclFrameInfoEXT & operator=( VkVideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -96537,64 +99002,20 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT &
- setRefDefaultFinalList0EntryCount( uint8_t refDefaultFinalList0EntryCount_ ) VULKAN_HPP_NOEXCEPT
- {
- refDefaultFinalList0EntryCount = refDefaultFinalList0EntryCount_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPRefDefaultFinalList0Entries(
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList0Entries_ ) VULKAN_HPP_NOEXCEPT
- {
- pRefDefaultFinalList0Entries = pRefDefaultFinalList0Entries_;
- return *this;
- }
-
-# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoEncodeH264VclFrameInfoEXT & setRefDefaultFinalList0Entries(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const &
- refDefaultFinalList0Entries_ ) VULKAN_HPP_NOEXCEPT
- {
- refDefaultFinalList0EntryCount = static_cast<uint8_t>( refDefaultFinalList0Entries_.size() );
- pRefDefaultFinalList0Entries = refDefaultFinalList0Entries_.data();
- return *this;
- }
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
-
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT &
- setRefDefaultFinalList1EntryCount( uint8_t refDefaultFinalList1EntryCount_ ) VULKAN_HPP_NOEXCEPT
- {
- refDefaultFinalList1EntryCount = refDefaultFinalList1EntryCount_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPRefDefaultFinalList1Entries(
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList1Entries_ ) VULKAN_HPP_NOEXCEPT
- {
- pRefDefaultFinalList1Entries = pRefDefaultFinalList1Entries_;
- return *this;
- }
-
-# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoEncodeH264VclFrameInfoEXT & setRefDefaultFinalList1Entries(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const &
- refDefaultFinalList1Entries_ ) VULKAN_HPP_NOEXCEPT
+ setPReferenceFinalLists( const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists_ ) VULKAN_HPP_NOEXCEPT
{
- refDefaultFinalList1EntryCount = static_cast<uint8_t>( refDefaultFinalList1Entries_.size() );
- pRefDefaultFinalList1Entries = refDefaultFinalList1Entries_.data();
+ pReferenceFinalLists = pReferenceFinalLists_;
return *this;
}
-# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT &
- setNaluSliceEntryCount( uint32_t naluSliceEntryCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setNaluSliceEntryCount( uint32_t naluSliceEntryCount_ ) VULKAN_HPP_NOEXCEPT
{
naluSliceEntryCount = naluSliceEntryCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPNaluSliceEntries(
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT * pNaluSliceEntries_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT &
+ setPNaluSliceEntries( const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT * pNaluSliceEntries_ ) VULKAN_HPP_NOEXCEPT
{
pNaluSliceEntries = pNaluSliceEntries_;
return *this;
@@ -96602,8 +99023,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeH264VclFrameInfoEXT & setNaluSliceEntries(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT> const &
- naluSliceEntries_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT> const & naluSliceEntries_ ) VULKAN_HPP_NOEXCEPT
{
naluSliceEntryCount = static_cast<uint32_t>( naluSliceEntries_.size() );
pNaluSliceEntries = naluSliceEntries_.data();
@@ -96611,49 +99031,38 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPCurrentPictureInfo(
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pCurrentPictureInfo_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT &
+ setPCurrentPictureInfo( const StdVideoEncodeH264PictureInfo * pCurrentPictureInfo_ ) VULKAN_HPP_NOEXCEPT
{
pCurrentPictureInfo = pCurrentPictureInfo_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH264VclFrameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264VclFrameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH264VclFrameInfoEXT *>( this );
}
- explicit operator VkVideoEncodeH264VclFrameInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH264VclFrameInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH264VclFrameInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
const void * const &,
- uint8_t const &,
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * const &,
- uint8_t const &,
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * const &,
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * const &,
uint32_t const &,
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT * const &,
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * const &>
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT * const &,
+ const StdVideoEncodeH264PictureInfo * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- refDefaultFinalList0EntryCount,
- pRefDefaultFinalList0Entries,
- refDefaultFinalList1EntryCount,
- pRefDefaultFinalList1Entries,
- naluSliceEntryCount,
- pNaluSliceEntries,
- pCurrentPictureInfo );
+ return std::tie( sType, pNext, pReferenceFinalLists, naluSliceEntryCount, pNaluSliceEntries, pCurrentPictureInfo );
}
# endif
@@ -96662,14 +99071,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeH264VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( refDefaultFinalList0EntryCount == rhs.refDefaultFinalList0EntryCount ) &&
- ( pRefDefaultFinalList0Entries == rhs.pRefDefaultFinalList0Entries ) &&
- ( refDefaultFinalList1EntryCount == rhs.refDefaultFinalList1EntryCount ) &&
- ( pRefDefaultFinalList1Entries == rhs.pRefDefaultFinalList1Entries ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pReferenceFinalLists == rhs.pReferenceFinalLists ) &&
( naluSliceEntryCount == rhs.naluSliceEntryCount ) && ( pNaluSliceEntries == rhs.pNaluSliceEntries ) &&
( pCurrentPictureInfo == rhs.pCurrentPictureInfo );
# endif
@@ -96682,24 +99087,13 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264VclFrameInfoEXT;
- const void * pNext = {};
- uint8_t refDefaultFinalList0EntryCount = {};
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList0Entries = {};
- uint8_t refDefaultFinalList1EntryCount = {};
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList1Entries = {};
- uint32_t naluSliceEntryCount = {};
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT * pNaluSliceEntries = {};
- const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pCurrentPictureInfo = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264VclFrameInfoEXT;
+ const void * pNext = {};
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists = {};
+ uint32_t naluSliceEntryCount = {};
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT * pNaluSliceEntries = {};
+ const StdVideoEncodeH264PictureInfo * pCurrentPictureInfo = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT ) ==
- sizeof( VkVideoEncodeH264VclFrameInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT>::value,
- "VideoEncodeH264VclFrameInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH264VclFrameInfoEXT>
@@ -96717,41 +99111,60 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265CapabilitiesEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT(
- VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT flags_ = {},
- VULKAN_HPP_NAMESPACE::VideoEncodeH265InputModeFlagsEXT inputModeFlags_ = {},
- VULKAN_HPP_NAMESPACE::VideoEncodeH265OutputModeFlagsEXT outputModeFlags_ = {},
- VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT ctbSizes_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D inputImageDataAlignment_ = {},
- uint8_t maxNumL0ReferenceForP_ = {},
- uint8_t maxNumL0ReferenceForB_ = {},
- uint8_t maxNumL1Reference_ = {},
- uint8_t maxNumSubLayers_ = {},
- uint8_t qualityLevelCount_ = {},
- VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilitiesEXT( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT flags_ = {},
+ VULKAN_HPP_NAMESPACE::VideoEncodeH265InputModeFlagsEXT inputModeFlags_ = {},
+ VULKAN_HPP_NAMESPACE::VideoEncodeH265OutputModeFlagsEXT outputModeFlags_ = {},
+ VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT ctbSizes_ = {},
+ VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsEXT transformBlockSizes_ = {},
+ uint8_t maxPPictureL0ReferenceCount_ = {},
+ uint8_t maxBPictureL0ReferenceCount_ = {},
+ uint8_t maxL1ReferenceCount_ = {},
+ uint8_t maxSubLayersCount_ = {},
+ uint8_t minLog2MinLumaCodingBlockSizeMinus3_ = {},
+ uint8_t maxLog2MinLumaCodingBlockSizeMinus3_ = {},
+ uint8_t minLog2MinLumaTransformBlockSizeMinus2_ = {},
+ uint8_t maxLog2MinLumaTransformBlockSizeMinus2_ = {},
+ uint8_t minMaxTransformHierarchyDepthInter_ = {},
+ uint8_t maxMaxTransformHierarchyDepthInter_ = {},
+ uint8_t minMaxTransformHierarchyDepthIntra_ = {},
+ uint8_t maxMaxTransformHierarchyDepthIntra_ = {},
+ uint8_t maxDiffCuQpDeltaDepth_ = {},
+ uint8_t minMaxNumMergeCand_ = {},
+ uint8_t maxMaxNumMergeCand_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, inputModeFlags( inputModeFlags_ )
, outputModeFlags( outputModeFlags_ )
, ctbSizes( ctbSizes_ )
- , inputImageDataAlignment( inputImageDataAlignment_ )
- , maxNumL0ReferenceForP( maxNumL0ReferenceForP_ )
- , maxNumL0ReferenceForB( maxNumL0ReferenceForB_ )
- , maxNumL1Reference( maxNumL1Reference_ )
- , maxNumSubLayers( maxNumSubLayers_ )
- , qualityLevelCount( qualityLevelCount_ )
- , stdExtensionVersion( stdExtensionVersion_ )
- {}
-
- VULKAN_HPP_CONSTEXPR_14
- VideoEncodeH265CapabilitiesEXT( VideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ , transformBlockSizes( transformBlockSizes_ )
+ , maxPPictureL0ReferenceCount( maxPPictureL0ReferenceCount_ )
+ , maxBPictureL0ReferenceCount( maxBPictureL0ReferenceCount_ )
+ , maxL1ReferenceCount( maxL1ReferenceCount_ )
+ , maxSubLayersCount( maxSubLayersCount_ )
+ , minLog2MinLumaCodingBlockSizeMinus3( minLog2MinLumaCodingBlockSizeMinus3_ )
+ , maxLog2MinLumaCodingBlockSizeMinus3( maxLog2MinLumaCodingBlockSizeMinus3_ )
+ , minLog2MinLumaTransformBlockSizeMinus2( minLog2MinLumaTransformBlockSizeMinus2_ )
+ , maxLog2MinLumaTransformBlockSizeMinus2( maxLog2MinLumaTransformBlockSizeMinus2_ )
+ , minMaxTransformHierarchyDepthInter( minMaxTransformHierarchyDepthInter_ )
+ , maxMaxTransformHierarchyDepthInter( maxMaxTransformHierarchyDepthInter_ )
+ , minMaxTransformHierarchyDepthIntra( minMaxTransformHierarchyDepthIntra_ )
+ , maxMaxTransformHierarchyDepthIntra( maxMaxTransformHierarchyDepthIntra_ )
+ , maxDiffCuQpDeltaDepth( maxDiffCuQpDeltaDepth_ )
+ , minMaxNumMergeCand( minMaxNumMergeCand_ )
+ , maxMaxNumMergeCand( maxMaxNumMergeCand_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilitiesEXT( VideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265CapabilitiesEXT( VkVideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265CapabilitiesEXT( *reinterpret_cast<VideoEncodeH265CapabilitiesEXT const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH265CapabilitiesEXT &
- operator=( VideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeH265CapabilitiesEXT & operator=( VideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265CapabilitiesEXT & operator=( VkVideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -96759,118 +99172,42 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
-# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT &
- setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
- {
- flags = flags_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT &
- setInputModeFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH265InputModeFlagsEXT inputModeFlags_ ) VULKAN_HPP_NOEXCEPT
- {
- inputModeFlags = inputModeFlags_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT &
- setOutputModeFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH265OutputModeFlagsEXT outputModeFlags_ ) VULKAN_HPP_NOEXCEPT
- {
- outputModeFlags = outputModeFlags_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT &
- setCtbSizes( VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT ctbSizes_ ) VULKAN_HPP_NOEXCEPT
- {
- ctbSizes = ctbSizes_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT &
- setInputImageDataAlignment( VULKAN_HPP_NAMESPACE::Extent2D const & inputImageDataAlignment_ ) VULKAN_HPP_NOEXCEPT
- {
- inputImageDataAlignment = inputImageDataAlignment_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT &
- setMaxNumL0ReferenceForP( uint8_t maxNumL0ReferenceForP_ ) VULKAN_HPP_NOEXCEPT
- {
- maxNumL0ReferenceForP = maxNumL0ReferenceForP_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT &
- setMaxNumL0ReferenceForB( uint8_t maxNumL0ReferenceForB_ ) VULKAN_HPP_NOEXCEPT
- {
- maxNumL0ReferenceForB = maxNumL0ReferenceForB_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT &
- setMaxNumL1Reference( uint8_t maxNumL1Reference_ ) VULKAN_HPP_NOEXCEPT
- {
- maxNumL1Reference = maxNumL1Reference_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT &
- setMaxNumSubLayers( uint8_t maxNumSubLayers_ ) VULKAN_HPP_NOEXCEPT
- {
- maxNumSubLayers = maxNumSubLayers_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT &
- setQualityLevelCount( uint8_t qualityLevelCount_ ) VULKAN_HPP_NOEXCEPT
- {
- qualityLevelCount = qualityLevelCount_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setStdExtensionVersion(
- VULKAN_HPP_NAMESPACE::ExtensionProperties const & stdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT
- {
- stdExtensionVersion = stdExtensionVersion_;
- return *this;
- }
-# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
-
- explicit operator VkVideoEncodeH265CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265CapabilitiesEXT *>( this );
}
- explicit operator VkVideoEncodeH265CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265CapabilitiesEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
+ void * const &,
VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT const &,
VULKAN_HPP_NAMESPACE::VideoEncodeH265InputModeFlagsEXT const &,
VULKAN_HPP_NAMESPACE::VideoEncodeH265OutputModeFlagsEXT const &,
VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT const &,
- VULKAN_HPP_NAMESPACE::Extent2D const &,
+ VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsEXT const &,
uint8_t const &,
uint8_t const &,
uint8_t const &,
uint8_t const &,
uint8_t const &,
- VULKAN_HPP_NAMESPACE::ExtensionProperties const &>
+ uint8_t const &,
+ uint8_t const &,
+ uint8_t const &,
+ uint8_t const &,
+ uint8_t const &,
+ uint8_t const &,
+ uint8_t const &,
+ uint8_t const &,
+ uint8_t const &,
+ uint8_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -96880,13 +99217,22 @@ namespace VULKAN_HPP_NAMESPACE
inputModeFlags,
outputModeFlags,
ctbSizes,
- inputImageDataAlignment,
- maxNumL0ReferenceForP,
- maxNumL0ReferenceForB,
- maxNumL1Reference,
- maxNumSubLayers,
- qualityLevelCount,
- stdExtensionVersion );
+ transformBlockSizes,
+ maxPPictureL0ReferenceCount,
+ maxBPictureL0ReferenceCount,
+ maxL1ReferenceCount,
+ maxSubLayersCount,
+ minLog2MinLumaCodingBlockSizeMinus3,
+ maxLog2MinLumaCodingBlockSizeMinus3,
+ minLog2MinLumaTransformBlockSizeMinus2,
+ maxLog2MinLumaTransformBlockSizeMinus2,
+ minMaxTransformHierarchyDepthInter,
+ maxMaxTransformHierarchyDepthInter,
+ minMaxTransformHierarchyDepthIntra,
+ maxMaxTransformHierarchyDepthIntra,
+ maxDiffCuQpDeltaDepth,
+ minMaxNumMergeCand,
+ maxMaxNumMergeCand );
}
# endif
@@ -96895,16 +99241,22 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( inputModeFlags == rhs.inputModeFlags ) && ( outputModeFlags == rhs.outputModeFlags ) &&
- ( ctbSizes == rhs.ctbSizes ) && ( inputImageDataAlignment == rhs.inputImageDataAlignment ) &&
- ( maxNumL0ReferenceForP == rhs.maxNumL0ReferenceForP ) &&
- ( maxNumL0ReferenceForB == rhs.maxNumL0ReferenceForB ) && ( maxNumL1Reference == rhs.maxNumL1Reference ) &&
- ( maxNumSubLayers == rhs.maxNumSubLayers ) && ( qualityLevelCount == rhs.qualityLevelCount ) &&
- ( stdExtensionVersion == rhs.stdExtensionVersion );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( inputModeFlags == rhs.inputModeFlags ) &&
+ ( outputModeFlags == rhs.outputModeFlags ) && ( ctbSizes == rhs.ctbSizes ) && ( transformBlockSizes == rhs.transformBlockSizes ) &&
+ ( maxPPictureL0ReferenceCount == rhs.maxPPictureL0ReferenceCount ) && ( maxBPictureL0ReferenceCount == rhs.maxBPictureL0ReferenceCount ) &&
+ ( maxL1ReferenceCount == rhs.maxL1ReferenceCount ) && ( maxSubLayersCount == rhs.maxSubLayersCount ) &&
+ ( minLog2MinLumaCodingBlockSizeMinus3 == rhs.minLog2MinLumaCodingBlockSizeMinus3 ) &&
+ ( maxLog2MinLumaCodingBlockSizeMinus3 == rhs.maxLog2MinLumaCodingBlockSizeMinus3 ) &&
+ ( minLog2MinLumaTransformBlockSizeMinus2 == rhs.minLog2MinLumaTransformBlockSizeMinus2 ) &&
+ ( maxLog2MinLumaTransformBlockSizeMinus2 == rhs.maxLog2MinLumaTransformBlockSizeMinus2 ) &&
+ ( minMaxTransformHierarchyDepthInter == rhs.minMaxTransformHierarchyDepthInter ) &&
+ ( maxMaxTransformHierarchyDepthInter == rhs.maxMaxTransformHierarchyDepthInter ) &&
+ ( minMaxTransformHierarchyDepthIntra == rhs.minMaxTransformHierarchyDepthIntra ) &&
+ ( maxMaxTransformHierarchyDepthIntra == rhs.maxMaxTransformHierarchyDepthIntra ) && ( maxDiffCuQpDeltaDepth == rhs.maxDiffCuQpDeltaDepth ) &&
+ ( minMaxNumMergeCand == rhs.minMaxNumMergeCand ) && ( maxMaxNumMergeCand == rhs.maxMaxNumMergeCand );
# endif
}
@@ -96915,28 +99267,29 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265CapabilitiesEXT;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT flags = {};
- VULKAN_HPP_NAMESPACE::VideoEncodeH265InputModeFlagsEXT inputModeFlags = {};
- VULKAN_HPP_NAMESPACE::VideoEncodeH265OutputModeFlagsEXT outputModeFlags = {};
- VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT ctbSizes = {};
- VULKAN_HPP_NAMESPACE::Extent2D inputImageDataAlignment = {};
- uint8_t maxNumL0ReferenceForP = {};
- uint8_t maxNumL0ReferenceForB = {};
- uint8_t maxNumL1Reference = {};
- uint8_t maxNumSubLayers = {};
- uint8_t qualityLevelCount = {};
- VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT ) ==
- sizeof( VkVideoEncodeH265CapabilitiesEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT>::value,
- "VideoEncodeH265CapabilitiesEXT is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265CapabilitiesEXT;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT flags = {};
+ VULKAN_HPP_NAMESPACE::VideoEncodeH265InputModeFlagsEXT inputModeFlags = {};
+ VULKAN_HPP_NAMESPACE::VideoEncodeH265OutputModeFlagsEXT outputModeFlags = {};
+ VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT ctbSizes = {};
+ VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsEXT transformBlockSizes = {};
+ uint8_t maxPPictureL0ReferenceCount = {};
+ uint8_t maxBPictureL0ReferenceCount = {};
+ uint8_t maxL1ReferenceCount = {};
+ uint8_t maxSubLayersCount = {};
+ uint8_t minLog2MinLumaCodingBlockSizeMinus3 = {};
+ uint8_t maxLog2MinLumaCodingBlockSizeMinus3 = {};
+ uint8_t minLog2MinLumaTransformBlockSizeMinus2 = {};
+ uint8_t maxLog2MinLumaTransformBlockSizeMinus2 = {};
+ uint8_t minMaxTransformHierarchyDepthInter = {};
+ uint8_t maxMaxTransformHierarchyDepthInter = {};
+ uint8_t minMaxTransformHierarchyDepthIntra = {};
+ uint8_t maxMaxTransformHierarchyDepthIntra = {};
+ uint8_t maxDiffCuQpDeltaDepth = {};
+ uint8_t minMaxNumMergeCand = {};
+ uint8_t maxMaxNumMergeCand = {};
+ };
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265CapabilitiesEXT>
@@ -96954,22 +99307,24 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265DpbSlotInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoEXT(
- int8_t slotIndex_ = {}, const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ = {} ) VULKAN_HPP_NOEXCEPT
- : slotIndex( slotIndex_ )
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoEXT( int8_t slotIndex_ = {},
+ const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , slotIndex( slotIndex_ )
, pStdReferenceInfo( pStdReferenceInfo_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoEncodeH265DpbSlotInfoEXT( VideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoEXT( VideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265DpbSlotInfoEXT( VkVideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265DpbSlotInfoEXT( *reinterpret_cast<VideoEncodeH265DpbSlotInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH265DpbSlotInfoEXT &
- operator=( VideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeH265DpbSlotInfoEXT & operator=( VideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265DpbSlotInfoEXT & operator=( VkVideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -96991,31 +99346,28 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoEXT &
- setPStdReferenceInfo( const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
+ setPStdReferenceInfo( const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
{
pStdReferenceInfo = pStdReferenceInfo_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH265DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265DpbSlotInfoEXT *>( this );
}
- explicit operator VkVideoEncodeH265DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265DpbSlotInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- int8_t const &,
- const StdVideoEncodeH265ReferenceInfo * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, int8_t const &, const StdVideoEncodeH265ReferenceInfo * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -97028,11 +99380,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) &&
- ( pStdReferenceInfo == rhs.pStdReferenceInfo );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo );
# endif
}
@@ -97048,14 +99399,6 @@ namespace VULKAN_HPP_NAMESPACE
int8_t slotIndex = {};
const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT ) ==
- sizeof( VkVideoEncodeH265DpbSlotInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT>::value,
- "VideoEncodeH265DpbSlotInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265DpbSlotInfoEXT>
@@ -97065,116 +99408,111 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoEncodeH265EmitPictureParametersEXT
+ struct VideoEncodeH265EmitPictureParametersInfoEXT
{
- using NativeType = VkVideoEncodeH265EmitPictureParametersEXT;
+ using NativeType = VkVideoEncodeH265EmitPictureParametersInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoEncodeH265EmitPictureParametersEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265EmitPictureParametersInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- VideoEncodeH265EmitPictureParametersEXT( uint8_t vpsId_ = {},
- uint8_t spsId_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ = {},
- uint32_t ppsIdEntryCount_ = {},
- const uint8_t * ppsIdEntries_ = {} ) VULKAN_HPP_NOEXCEPT
- : vpsId( vpsId_ )
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265EmitPictureParametersInfoEXT( uint8_t vpsId_ = {},
+ uint8_t spsId_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ = {},
+ uint32_t ppsIdEntryCount_ = {},
+ const uint8_t * ppsIdEntries_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , vpsId( vpsId_ )
, spsId( spsId_ )
, emitVpsEnable( emitVpsEnable_ )
, emitSpsEnable( emitSpsEnable_ )
, ppsIdEntryCount( ppsIdEntryCount_ )
, ppsIdEntries( ppsIdEntries_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR VideoEncodeH265EmitPictureParametersEXT( VideoEncodeH265EmitPictureParametersEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265EmitPictureParametersInfoEXT( VideoEncodeH265EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH265EmitPictureParametersEXT( VkVideoEncodeH265EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoEncodeH265EmitPictureParametersEXT(
- *reinterpret_cast<VideoEncodeH265EmitPictureParametersEXT const *>( &rhs ) )
- {}
+ VideoEncodeH265EmitPictureParametersInfoEXT( VkVideoEncodeH265EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoEncodeH265EmitPictureParametersInfoEXT( *reinterpret_cast<VideoEncodeH265EmitPictureParametersInfoEXT const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoEncodeH265EmitPictureParametersEXT(
- uint8_t vpsId_,
- uint8_t spsId_,
- VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_,
- VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & psIdEntries_ )
- : vpsId( vpsId_ )
+ VideoEncodeH265EmitPictureParametersInfoEXT( uint8_t vpsId_,
+ uint8_t spsId_,
+ VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_,
+ VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & psIdEntries_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , vpsId( vpsId_ )
, spsId( spsId_ )
, emitVpsEnable( emitVpsEnable_ )
, emitSpsEnable( emitSpsEnable_ )
, ppsIdEntryCount( static_cast<uint32_t>( psIdEntries_.size() ) )
, ppsIdEntries( psIdEntries_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH265EmitPictureParametersEXT &
- operator=( VideoEncodeH265EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeH265EmitPictureParametersInfoEXT & operator=( VideoEncodeH265EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH265EmitPictureParametersEXT &
- operator=( VkVideoEncodeH265EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeH265EmitPictureParametersInfoEXT & operator=( VkVideoEncodeH265EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersInfoEXT const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT & setVpsId( uint8_t vpsId_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setVpsId( uint8_t vpsId_ ) VULKAN_HPP_NOEXCEPT
{
vpsId = vpsId_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT & setSpsId( uint8_t spsId_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setSpsId( uint8_t spsId_ ) VULKAN_HPP_NOEXCEPT
{
spsId = spsId_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT &
- setEmitVpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setEmitVpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_ ) VULKAN_HPP_NOEXCEPT
{
emitVpsEnable = emitVpsEnable_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT &
- setEmitSpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setEmitSpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ ) VULKAN_HPP_NOEXCEPT
{
emitSpsEnable = emitSpsEnable_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT &
- setPpsIdEntryCount( uint32_t ppsIdEntryCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setPpsIdEntryCount( uint32_t ppsIdEntryCount_ ) VULKAN_HPP_NOEXCEPT
{
ppsIdEntryCount = ppsIdEntryCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT &
- setPpsIdEntries( const uint8_t * ppsIdEntries_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setPpsIdEntries( const uint8_t * ppsIdEntries_ ) VULKAN_HPP_NOEXCEPT
{
ppsIdEntries = ppsIdEntries_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoEncodeH265EmitPictureParametersEXT & setPsIdEntries(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & psIdEntries_ ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeH265EmitPictureParametersInfoEXT &
+ setPsIdEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & psIdEntries_ ) VULKAN_HPP_NOEXCEPT
{
ppsIdEntryCount = static_cast<uint32_t>( psIdEntries_.size() );
ppsIdEntries = psIdEntries_.data();
@@ -97183,17 +99521,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH265EmitPictureParametersEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265EmitPictureParametersInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkVideoEncodeH265EmitPictureParametersEXT *>( this );
+ return *reinterpret_cast<const VkVideoEncodeH265EmitPictureParametersInfoEXT *>( this );
}
- explicit operator VkVideoEncodeH265EmitPictureParametersEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265EmitPictureParametersInfoEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkVideoEncodeH265EmitPictureParametersEXT *>( this );
+ return *reinterpret_cast<VkVideoEncodeH265EmitPictureParametersInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -97213,27 +99551,26 @@ namespace VULKAN_HPP_NAMESPACE
# endif
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( VideoEncodeH265EmitPictureParametersEXT const & ) const = default;
+ auto operator<=>( VideoEncodeH265EmitPictureParametersInfoEXT const & ) const = default;
# else
- bool operator==( VideoEncodeH265EmitPictureParametersEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( VideoEncodeH265EmitPictureParametersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vpsId == rhs.vpsId ) && ( spsId == rhs.spsId ) &&
- ( emitVpsEnable == rhs.emitVpsEnable ) && ( emitSpsEnable == rhs.emitSpsEnable ) &&
- ( ppsIdEntryCount == rhs.ppsIdEntryCount ) && ( ppsIdEntries == rhs.ppsIdEntries );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vpsId == rhs.vpsId ) && ( spsId == rhs.spsId ) && ( emitVpsEnable == rhs.emitVpsEnable ) &&
+ ( emitSpsEnable == rhs.emitSpsEnable ) && ( ppsIdEntryCount == rhs.ppsIdEntryCount ) && ( ppsIdEntries == rhs.ppsIdEntries );
# endif
}
- bool operator!=( VideoEncodeH265EmitPictureParametersEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( VideoEncodeH265EmitPictureParametersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265EmitPictureParametersEXT;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265EmitPictureParametersInfoEXT;
const void * pNext = {};
uint8_t vpsId = {};
uint8_t spsId = {};
@@ -97242,20 +99579,11 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t ppsIdEntryCount = {};
const uint8_t * ppsIdEntries = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersEXT ) ==
- sizeof( VkVideoEncodeH265EmitPictureParametersEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersEXT>::value,
- "VideoEncodeH265EmitPictureParametersEXT is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::eVideoEncodeH265EmitPictureParametersEXT>
+ struct CppType<StructureType, StructureType::eVideoEncodeH265EmitPictureParametersInfoEXT>
{
- using Type = VideoEncodeH265EmitPictureParametersEXT;
+ using Type = VideoEncodeH265EmitPictureParametersInfoEXT;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -97265,20 +99593,19 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkVideoEncodeH265FrameSizeEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeEXT( uint32_t frameISize_ = {},
- uint32_t framePSize_ = {},
- uint32_t frameBSize_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeEXT( uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {} ) VULKAN_HPP_NOEXCEPT
: frameISize( frameISize_ )
, framePSize( framePSize_ )
, frameBSize( frameBSize_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoEncodeH265FrameSizeEXT( VideoEncodeH265FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeEXT( VideoEncodeH265FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265FrameSizeEXT( VkVideoEncodeH265FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265FrameSizeEXT( *reinterpret_cast<VideoEncodeH265FrameSizeEXT const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
VideoEncodeH265FrameSizeEXT & operator=( VideoEncodeH265FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -97309,17 +99636,17 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH265FrameSizeEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265FrameSizeEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265FrameSizeEXT *>( this );
}
- explicit operator VkVideoEncodeH265FrameSizeEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265FrameSizeEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265FrameSizeEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -97336,7 +99663,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeH265FrameSizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( frameISize == rhs.frameISize ) && ( framePSize == rhs.framePSize ) && ( frameBSize == rhs.frameBSize );
@@ -97354,95 +99681,88 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t framePSize = {};
uint32_t frameBSize = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT ) ==
- sizeof( VkVideoEncodeH265FrameSizeEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT>::value,
- "VideoEncodeH265FrameSizeEXT is not nothrow_move_constructible!" );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoEncodeH265ReferenceListsEXT
+ struct VideoEncodeH265ReferenceListsInfoEXT
{
- using NativeType = VkVideoEncodeH265ReferenceListsEXT;
+ using NativeType = VkVideoEncodeH265ReferenceListsInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265ReferenceListsEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265ReferenceListsInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoEncodeH265ReferenceListsEXT(
- uint8_t referenceList0EntryCount_ = {},
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList0Entries_ = {},
- uint8_t referenceList1EntryCount_ = {},
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList1Entries_ = {},
- const StdVideoEncodeH265ReferenceModifications * pReferenceModifications_ = {} ) VULKAN_HPP_NOEXCEPT
- : referenceList0EntryCount( referenceList0EntryCount_ )
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265ReferenceListsInfoEXT( uint8_t referenceList0EntryCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList0Entries_ = {},
+ uint8_t referenceList1EntryCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList1Entries_ = {},
+ const StdVideoEncodeH265ReferenceModifications * pReferenceModifications_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , referenceList0EntryCount( referenceList0EntryCount_ )
, pReferenceList0Entries( pReferenceList0Entries_ )
, referenceList1EntryCount( referenceList1EntryCount_ )
, pReferenceList1Entries( pReferenceList1Entries_ )
, pReferenceModifications( pReferenceModifications_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoEncodeH265ReferenceListsEXT( VideoEncodeH265ReferenceListsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265ReferenceListsInfoEXT( VideoEncodeH265ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH265ReferenceListsEXT( VkVideoEncodeH265ReferenceListsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoEncodeH265ReferenceListsEXT( *reinterpret_cast<VideoEncodeH265ReferenceListsEXT const *>( &rhs ) )
- {}
+ VideoEncodeH265ReferenceListsInfoEXT( VkVideoEncodeH265ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoEncodeH265ReferenceListsInfoEXT( *reinterpret_cast<VideoEncodeH265ReferenceListsInfoEXT const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoEncodeH265ReferenceListsEXT(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT> const &
- referenceList0Entries_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT> const &
- referenceList1Entries_ = {},
- const StdVideoEncodeH265ReferenceModifications * pReferenceModifications_ = {} )
- : referenceList0EntryCount( static_cast<uint8_t>( referenceList0Entries_.size() ) )
+ VideoEncodeH265ReferenceListsInfoEXT(
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT> const & referenceList0Entries_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT> const & referenceList1Entries_ = {},
+ const StdVideoEncodeH265ReferenceModifications * pReferenceModifications_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , referenceList0EntryCount( static_cast<uint8_t>( referenceList0Entries_.size() ) )
, pReferenceList0Entries( referenceList0Entries_.data() )
, referenceList1EntryCount( static_cast<uint8_t>( referenceList1Entries_.size() ) )
, pReferenceList1Entries( referenceList1Entries_.data() )
, pReferenceModifications( pReferenceModifications_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH265ReferenceListsEXT &
- operator=( VideoEncodeH265ReferenceListsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeH265ReferenceListsInfoEXT & operator=( VideoEncodeH265ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH265ReferenceListsEXT & operator=( VkVideoEncodeH265ReferenceListsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeH265ReferenceListsInfoEXT & operator=( VkVideoEncodeH265ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT &
- setReferenceList0EntryCount( uint8_t referenceList0EntryCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsInfoEXT & setReferenceList0EntryCount( uint8_t referenceList0EntryCount_ ) VULKAN_HPP_NOEXCEPT
{
referenceList0EntryCount = referenceList0EntryCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT & setPReferenceList0Entries(
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList0Entries_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsInfoEXT &
+ setPReferenceList0Entries( const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList0Entries_ ) VULKAN_HPP_NOEXCEPT
{
pReferenceList0Entries = pReferenceList0Entries_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoEncodeH265ReferenceListsEXT & setReferenceList0Entries(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT> const &
- referenceList0Entries_ ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeH265ReferenceListsInfoEXT & setReferenceList0Entries(
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT> const & referenceList0Entries_ )
+ VULKAN_HPP_NOEXCEPT
{
referenceList0EntryCount = static_cast<uint8_t>( referenceList0Entries_.size() );
pReferenceList0Entries = referenceList0Entries_.data();
@@ -97450,24 +99770,23 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT &
- setReferenceList1EntryCount( uint8_t referenceList1EntryCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsInfoEXT & setReferenceList1EntryCount( uint8_t referenceList1EntryCount_ ) VULKAN_HPP_NOEXCEPT
{
referenceList1EntryCount = referenceList1EntryCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT & setPReferenceList1Entries(
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList1Entries_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsInfoEXT &
+ setPReferenceList1Entries( const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList1Entries_ ) VULKAN_HPP_NOEXCEPT
{
pReferenceList1Entries = pReferenceList1Entries_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoEncodeH265ReferenceListsEXT & setReferenceList1Entries(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT> const &
- referenceList1Entries_ ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeH265ReferenceListsInfoEXT & setReferenceList1Entries(
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT> const & referenceList1Entries_ )
+ VULKAN_HPP_NOEXCEPT
{
referenceList1EntryCount = static_cast<uint8_t>( referenceList1Entries_.size() );
pReferenceList1Entries = referenceList1Entries_.data();
@@ -97475,25 +99794,25 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT & setPReferenceModifications(
- const StdVideoEncodeH265ReferenceModifications * pReferenceModifications_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsInfoEXT &
+ setPReferenceModifications( const StdVideoEncodeH265ReferenceModifications * pReferenceModifications_ ) VULKAN_HPP_NOEXCEPT
{
pReferenceModifications = pReferenceModifications_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH265ReferenceListsEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265ReferenceListsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkVideoEncodeH265ReferenceListsEXT *>( this );
+ return *reinterpret_cast<const VkVideoEncodeH265ReferenceListsInfoEXT *>( this );
}
- explicit operator VkVideoEncodeH265ReferenceListsEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265ReferenceListsInfoEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkVideoEncodeH265ReferenceListsEXT *>( this );
+ return *reinterpret_cast<VkVideoEncodeH265ReferenceListsInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -97507,147 +99826,130 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- referenceList0EntryCount,
- pReferenceList0Entries,
- referenceList1EntryCount,
- pReferenceList1Entries,
- pReferenceModifications );
+ return std::tie(
+ sType, pNext, referenceList0EntryCount, pReferenceList0Entries, referenceList1EntryCount, pReferenceList1Entries, pReferenceModifications );
}
# endif
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( VideoEncodeH265ReferenceListsEXT const & ) const = default;
+ auto operator<=>( VideoEncodeH265ReferenceListsInfoEXT const & ) const = default;
# else
- bool operator==( VideoEncodeH265ReferenceListsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( VideoEncodeH265ReferenceListsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( referenceList0EntryCount == rhs.referenceList0EntryCount ) &&
- ( pReferenceList0Entries == rhs.pReferenceList0Entries ) &&
- ( referenceList1EntryCount == rhs.referenceList1EntryCount ) &&
- ( pReferenceList1Entries == rhs.pReferenceList1Entries ) &&
- ( pReferenceModifications == rhs.pReferenceModifications );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( referenceList0EntryCount == rhs.referenceList0EntryCount ) &&
+ ( pReferenceList0Entries == rhs.pReferenceList0Entries ) && ( referenceList1EntryCount == rhs.referenceList1EntryCount ) &&
+ ( pReferenceList1Entries == rhs.pReferenceList1Entries ) && ( pReferenceModifications == rhs.pReferenceModifications );
# endif
}
- bool operator!=( VideoEncodeH265ReferenceListsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( VideoEncodeH265ReferenceListsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265ReferenceListsEXT;
- const void * pNext = {};
- uint8_t referenceList0EntryCount = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265ReferenceListsInfoEXT;
+ const void * pNext = {};
+ uint8_t referenceList0EntryCount = {};
const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList0Entries = {};
uint8_t referenceList1EntryCount = {};
const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList1Entries = {};
const StdVideoEncodeH265ReferenceModifications * pReferenceModifications = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT ) ==
- sizeof( VkVideoEncodeH265ReferenceListsEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT>::value,
- "VideoEncodeH265ReferenceListsEXT is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::eVideoEncodeH265ReferenceListsEXT>
+ struct CppType<StructureType, StructureType::eVideoEncodeH265ReferenceListsInfoEXT>
{
- using Type = VideoEncodeH265ReferenceListsEXT;
+ using Type = VideoEncodeH265ReferenceListsInfoEXT;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoEncodeH265NaluSliceSegmentEXT
+ struct VideoEncodeH265NaluSliceSegmentInfoEXT
{
- using NativeType = VkVideoEncodeH265NaluSliceSegmentEXT;
+ using NativeType = VkVideoEncodeH265NaluSliceSegmentInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoEncodeH265NaluSliceSegmentEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265NaluSliceSegmentInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentEXT(
- uint32_t ctbCount_ = {},
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists_ = {},
- const StdVideoEncodeH265SliceSegmentHeader * pSliceSegmentHeaderStd_ = {} ) VULKAN_HPP_NOEXCEPT
- : ctbCount( ctbCount_ )
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentInfoEXT( uint32_t ctbCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists_ = {},
+ const StdVideoEncodeH265SliceSegmentHeader * pSliceSegmentHeaderStd_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , ctbCount( ctbCount_ )
, pReferenceFinalLists( pReferenceFinalLists_ )
, pSliceSegmentHeaderStd( pSliceSegmentHeaderStd_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentEXT( VideoEncodeH265NaluSliceSegmentEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentInfoEXT( VideoEncodeH265NaluSliceSegmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH265NaluSliceSegmentEXT( VkVideoEncodeH265NaluSliceSegmentEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoEncodeH265NaluSliceSegmentEXT( *reinterpret_cast<VideoEncodeH265NaluSliceSegmentEXT const *>( &rhs ) )
- {}
+ VideoEncodeH265NaluSliceSegmentInfoEXT( VkVideoEncodeH265NaluSliceSegmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoEncodeH265NaluSliceSegmentInfoEXT( *reinterpret_cast<VideoEncodeH265NaluSliceSegmentInfoEXT const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH265NaluSliceSegmentEXT &
- operator=( VideoEncodeH265NaluSliceSegmentEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeH265NaluSliceSegmentInfoEXT & operator=( VideoEncodeH265NaluSliceSegmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH265NaluSliceSegmentEXT &
- operator=( VkVideoEncodeH265NaluSliceSegmentEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeH265NaluSliceSegmentInfoEXT & operator=( VkVideoEncodeH265NaluSliceSegmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentEXT & setCtbCount( uint32_t ctbCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoEXT & setCtbCount( uint32_t ctbCount_ ) VULKAN_HPP_NOEXCEPT
{
ctbCount = ctbCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentEXT & setPReferenceFinalLists(
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoEXT &
+ setPReferenceFinalLists( const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists_ ) VULKAN_HPP_NOEXCEPT
{
pReferenceFinalLists = pReferenceFinalLists_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentEXT & setPSliceSegmentHeaderStd(
- const StdVideoEncodeH265SliceSegmentHeader * pSliceSegmentHeaderStd_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoEXT &
+ setPSliceSegmentHeaderStd( const StdVideoEncodeH265SliceSegmentHeader * pSliceSegmentHeaderStd_ ) VULKAN_HPP_NOEXCEPT
{
pSliceSegmentHeaderStd = pSliceSegmentHeaderStd_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH265NaluSliceSegmentEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265NaluSliceSegmentInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkVideoEncodeH265NaluSliceSegmentEXT *>( this );
+ return *reinterpret_cast<const VkVideoEncodeH265NaluSliceSegmentInfoEXT *>( this );
}
- explicit operator VkVideoEncodeH265NaluSliceSegmentEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265NaluSliceSegmentInfoEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkVideoEncodeH265NaluSliceSegmentEXT *>( this );
+ return *reinterpret_cast<VkVideoEncodeH265NaluSliceSegmentInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
const void * const &,
uint32_t const &,
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * const &,
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * const &,
const StdVideoEncodeH265SliceSegmentHeader * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
@@ -97657,103 +99959,95 @@ namespace VULKAN_HPP_NAMESPACE
# endif
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( VideoEncodeH265NaluSliceSegmentEXT const & ) const = default;
+ auto operator<=>( VideoEncodeH265NaluSliceSegmentInfoEXT const & ) const = default;
# else
- bool operator==( VideoEncodeH265NaluSliceSegmentEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( VideoEncodeH265NaluSliceSegmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ctbCount == rhs.ctbCount ) &&
- ( pReferenceFinalLists == rhs.pReferenceFinalLists ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ctbCount == rhs.ctbCount ) && ( pReferenceFinalLists == rhs.pReferenceFinalLists ) &&
( pSliceSegmentHeaderStd == rhs.pSliceSegmentHeaderStd );
# endif
}
- bool operator!=( VideoEncodeH265NaluSliceSegmentEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( VideoEncodeH265NaluSliceSegmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265NaluSliceSegmentEXT;
- const void * pNext = {};
- uint32_t ctbCount = {};
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists = {};
- const StdVideoEncodeH265SliceSegmentHeader * pSliceSegmentHeaderStd = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265NaluSliceSegmentInfoEXT;
+ const void * pNext = {};
+ uint32_t ctbCount = {};
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists = {};
+ const StdVideoEncodeH265SliceSegmentHeader * pSliceSegmentHeaderStd = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT ) ==
- sizeof( VkVideoEncodeH265NaluSliceSegmentEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT>::value,
- "VideoEncodeH265NaluSliceSegmentEXT is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::eVideoEncodeH265NaluSliceSegmentEXT>
+ struct CppType<StructureType, StructureType::eVideoEncodeH265NaluSliceSegmentInfoEXT>
{
- using Type = VideoEncodeH265NaluSliceSegmentEXT;
+ using Type = VideoEncodeH265NaluSliceSegmentInfoEXT;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoEncodeH265ProfileEXT
+ struct VideoEncodeH265ProfileInfoEXT
{
- using NativeType = VkVideoEncodeH265ProfileEXT;
+ using NativeType = VkVideoEncodeH265ProfileInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265ProfileEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265ProfileInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileEXT( StdVideoH265ProfileIdc stdProfileIdc_ = {} ) VULKAN_HPP_NOEXCEPT
- : stdProfileIdc( stdProfileIdc_ )
- {}
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileInfoEXT( StdVideoH265ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , stdProfileIdc( stdProfileIdc_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoEncodeH265ProfileEXT( VideoEncodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileInfoEXT( VideoEncodeH265ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH265ProfileEXT( VkVideoEncodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoEncodeH265ProfileEXT( *reinterpret_cast<VideoEncodeH265ProfileEXT const *>( &rhs ) )
- {}
+ VideoEncodeH265ProfileInfoEXT( VkVideoEncodeH265ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoEncodeH265ProfileInfoEXT( *reinterpret_cast<VideoEncodeH265ProfileInfoEXT const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH265ProfileEXT & operator=( VideoEncodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeH265ProfileInfoEXT & operator=( VideoEncodeH265ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH265ProfileEXT & operator=( VkVideoEncodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeH265ProfileInfoEXT & operator=( VkVideoEncodeH265ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileEXT const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoEXT const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileEXT &
- setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileInfoEXT & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
{
stdProfileIdc = stdProfileIdc_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH265ProfileEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265ProfileInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkVideoEncodeH265ProfileEXT *>( this );
+ return *reinterpret_cast<const VkVideoEncodeH265ProfileInfoEXT *>( this );
}
- explicit operator VkVideoEncodeH265ProfileEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265ProfileInfoEXT &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkVideoEncodeH265ProfileEXT *>( this );
+ return *reinterpret_cast<VkVideoEncodeH265ProfileInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -97766,7 +100060,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- std::strong_ordering operator<=>( VideoEncodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ std::strong_ordering operator<=>( VideoEncodeH265ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
return cmp;
@@ -97779,34 +100073,26 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif
- bool operator==( VideoEncodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( VideoEncodeH265ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 );
}
- bool operator!=( VideoEncodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( VideoEncodeH265ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265ProfileEXT;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265ProfileInfoEXT;
const void * pNext = {};
StdVideoH265ProfileIdc stdProfileIdc = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileEXT ) ==
- sizeof( VkVideoEncodeH265ProfileEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileEXT>::value,
- "VideoEncodeH265ProfileEXT is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::eVideoEncodeH265ProfileEXT>
+ struct CppType<StructureType, StructureType::eVideoEncodeH265ProfileInfoEXT>
{
- using Type = VideoEncodeH265ProfileEXT;
+ using Type = VideoEncodeH265ProfileInfoEXT;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -97816,18 +100102,19 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkVideoEncodeH265QpEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- VideoEncodeH265QpEXT( int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {} ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265QpEXT( int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {} ) VULKAN_HPP_NOEXCEPT
: qpI( qpI_ )
, qpP( qpP_ )
, qpB( qpB_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR VideoEncodeH265QpEXT( VideoEncodeH265QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265QpEXT( VkVideoEncodeH265QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265QpEXT( *reinterpret_cast<VideoEncodeH265QpEXT const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
VideoEncodeH265QpEXT & operator=( VideoEncodeH265QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -97858,17 +100145,17 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH265QpEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265QpEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265QpEXT *>( this );
}
- explicit operator VkVideoEncodeH265QpEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265QpEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265QpEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -97885,7 +100172,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeH265QpEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( qpI == rhs.qpI ) && ( qpP == rhs.qpP ) && ( qpB == rhs.qpB );
@@ -97903,12 +100190,6 @@ namespace VULKAN_HPP_NAMESPACE
int32_t qpP = {};
int32_t qpB = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT ) == sizeof( VkVideoEncodeH265QpEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT>::value,
- "VideoEncodeH265QpEXT is not nothrow_move_constructible!" );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
@@ -97917,34 +100198,34 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkVideoEncodeH265RateControlInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoEncodeH265RateControlInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265RateControlInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoEXT(
- uint32_t gopFrameCount_ = {},
- uint32_t idrPeriod_ = {},
- uint32_t consecutiveBFrameCount_ = {},
- VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT rateControlStructure_ =
- VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT::eUnknown,
- uint8_t subLayerCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : gopFrameCount( gopFrameCount_ )
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoEXT( uint32_t gopFrameCount_ = {},
+ uint32_t idrPeriod_ = {},
+ uint32_t consecutiveBFrameCount_ = {},
+ VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureEXT rateControlStructure_ =
+ VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureEXT::eUnknown,
+ uint8_t subLayerCount_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , gopFrameCount( gopFrameCount_ )
, idrPeriod( idrPeriod_ )
, consecutiveBFrameCount( consecutiveBFrameCount_ )
, rateControlStructure( rateControlStructure_ )
, subLayerCount( subLayerCount_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoEncodeH265RateControlInfoEXT( VideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoEXT( VideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265RateControlInfoEXT( VkVideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265RateControlInfoEXT( *reinterpret_cast<VideoEncodeH265RateControlInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH265RateControlInfoEXT &
- operator=( VideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeH265RateControlInfoEXT & operator=( VideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265RateControlInfoEXT & operator=( VkVideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -97959,8 +100240,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT &
- setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT
{
gopFrameCount = gopFrameCount_;
return *this;
@@ -97972,39 +100252,37 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT &
- setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT
{
consecutiveBFrameCount = consecutiveBFrameCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setRateControlStructure(
- VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT rateControlStructure_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT &
+ setRateControlStructure( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureEXT rateControlStructure_ ) VULKAN_HPP_NOEXCEPT
{
rateControlStructure = rateControlStructure_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT &
- setSubLayerCount( uint8_t subLayerCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setSubLayerCount( uint8_t subLayerCount_ ) VULKAN_HPP_NOEXCEPT
{
subLayerCount = subLayerCount_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH265RateControlInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265RateControlInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265RateControlInfoEXT *>( this );
}
- explicit operator VkVideoEncodeH265RateControlInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265RateControlInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265RateControlInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -98013,13 +100291,12 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t const &,
uint32_t const &,
uint32_t const &,
- VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT const &,
+ VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureEXT const &,
uint8_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, gopFrameCount, idrPeriod, consecutiveBFrameCount, rateControlStructure, subLayerCount );
+ return std::tie( sType, pNext, gopFrameCount, idrPeriod, consecutiveBFrameCount, rateControlStructure, subLayerCount );
}
# endif
@@ -98028,12 +100305,12 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeH265RateControlInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( gopFrameCount == rhs.gopFrameCount ) &&
- ( idrPeriod == rhs.idrPeriod ) && ( consecutiveBFrameCount == rhs.consecutiveBFrameCount ) &&
- ( rateControlStructure == rhs.rateControlStructure ) && ( subLayerCount == rhs.subLayerCount );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( gopFrameCount == rhs.gopFrameCount ) && ( idrPeriod == rhs.idrPeriod ) &&
+ ( consecutiveBFrameCount == rhs.consecutiveBFrameCount ) && ( rateControlStructure == rhs.rateControlStructure ) &&
+ ( subLayerCount == rhs.subLayerCount );
# endif
}
@@ -98044,23 +100321,14 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlInfoEXT;
- const void * pNext = {};
- uint32_t gopFrameCount = {};
- uint32_t idrPeriod = {};
- uint32_t consecutiveBFrameCount = {};
- VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT rateControlStructure =
- VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT::eUnknown;
- uint8_t subLayerCount = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT ) ==
- sizeof( VkVideoEncodeH265RateControlInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT>::value,
- "VideoEncodeH265RateControlInfoEXT is not nothrow_move_constructible!" );
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlInfoEXT;
+ const void * pNext = {};
+ uint32_t gopFrameCount = {};
+ uint32_t idrPeriod = {};
+ uint32_t consecutiveBFrameCount = {};
+ VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureEXT rateControlStructure = VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureEXT::eUnknown;
+ uint8_t subLayerCount = {};
+ };
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265RateControlInfoEXT>
@@ -98075,21 +100343,21 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkVideoEncodeH265RateControlLayerInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoEncodeH265RateControlLayerInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265RateControlLayerInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoEXT(
- uint8_t temporalId_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ = {},
- VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT initialRcQp_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {},
- VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT minQp_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {},
- VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT maxQp_ = {},
- VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {},
- VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT maxFrameSize_ = {} ) VULKAN_HPP_NOEXCEPT
- : temporalId( temporalId_ )
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoEXT( uint8_t temporalId_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ = {},
+ VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT initialRcQp_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {},
+ VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT minQp_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {},
+ VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT maxQp_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {},
+ VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT maxFrameSize_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , temporalId( temporalId_ )
, useInitialRcQp( useInitialRcQp_ )
, initialRcQp( initialRcQp_ )
, useMinQp( useMinQp_ )
@@ -98098,22 +100366,20 @@ namespace VULKAN_HPP_NAMESPACE
, maxQp( maxQp_ )
, useMaxFrameSize( useMaxFrameSize_ )
, maxFrameSize( maxFrameSize_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoEXT( VideoEncodeH265RateControlLayerInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoEXT( VideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265RateControlLayerInfoEXT( VkVideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoEncodeH265RateControlLayerInfoEXT(
- *reinterpret_cast<VideoEncodeH265RateControlLayerInfoEXT const *>( &rhs ) )
- {}
+ : VideoEncodeH265RateControlLayerInfoEXT( *reinterpret_cast<VideoEncodeH265RateControlLayerInfoEXT const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH265RateControlLayerInfoEXT &
- operator=( VideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeH265RateControlLayerInfoEXT & operator=( VideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH265RateControlLayerInfoEXT &
- operator=( VkVideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeH265RateControlLayerInfoEXT & operator=( VkVideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT const *>( &rhs );
return *this;
@@ -98126,81 +100392,74 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT &
- setTemporalId( uint8_t temporalId_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setTemporalId( uint8_t temporalId_ ) VULKAN_HPP_NOEXCEPT
{
temporalId = temporalId_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT &
- setUseInitialRcQp( VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setUseInitialRcQp( VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ ) VULKAN_HPP_NOEXCEPT
{
useInitialRcQp = useInitialRcQp_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT &
- setInitialRcQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & initialRcQp_ ) VULKAN_HPP_NOEXCEPT
+ setInitialRcQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & initialRcQp_ ) VULKAN_HPP_NOEXCEPT
{
initialRcQp = initialRcQp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT &
- setUseMinQp( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setUseMinQp( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT
{
useMinQp = useMinQp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT &
- setMinQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & minQp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setMinQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & minQp_ ) VULKAN_HPP_NOEXCEPT
{
minQp = minQp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT &
- setUseMaxQp( VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setUseMaxQp( VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT
{
useMaxQp = useMaxQp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT &
- setMaxQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & maxQp_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setMaxQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & maxQp_ ) VULKAN_HPP_NOEXCEPT
{
maxQp = maxQp_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT &
- setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT
{
useMaxFrameSize = useMaxFrameSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT &
- setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT
+ setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT
{
maxFrameSize = maxFrameSize_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH265RateControlLayerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265RateControlLayerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265RateControlLayerInfoEXT *>( this );
}
- explicit operator VkVideoEncodeH265RateControlLayerInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265RateControlLayerInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265RateControlLayerInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -98218,17 +100477,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- temporalId,
- useInitialRcQp,
- initialRcQp,
- useMinQp,
- minQp,
- useMaxQp,
- maxQp,
- useMaxFrameSize,
- maxFrameSize );
+ return std::tie( sType, pNext, temporalId, useInitialRcQp, initialRcQp, useMinQp, minQp, useMaxQp, maxQp, useMaxFrameSize, maxFrameSize );
}
# endif
@@ -98237,14 +100486,12 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeH265RateControlLayerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( temporalId == rhs.temporalId ) &&
- ( useInitialRcQp == rhs.useInitialRcQp ) && ( initialRcQp == rhs.initialRcQp ) &&
- ( useMinQp == rhs.useMinQp ) && ( minQp == rhs.minQp ) && ( useMaxQp == rhs.useMaxQp ) &&
- ( maxQp == rhs.maxQp ) && ( useMaxFrameSize == rhs.useMaxFrameSize ) &&
- ( maxFrameSize == rhs.maxFrameSize );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( temporalId == rhs.temporalId ) && ( useInitialRcQp == rhs.useInitialRcQp ) &&
+ ( initialRcQp == rhs.initialRcQp ) && ( useMinQp == rhs.useMinQp ) && ( minQp == rhs.minQp ) && ( useMaxQp == rhs.useMaxQp ) &&
+ ( maxQp == rhs.maxQp ) && ( useMaxFrameSize == rhs.useMaxFrameSize ) && ( maxFrameSize == rhs.maxFrameSize );
# endif
}
@@ -98255,8 +100502,8 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlLayerInfoEXT;
- const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlLayerInfoEXT;
+ const void * pNext = {};
uint8_t temporalId = {};
VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT initialRcQp = {};
@@ -98267,15 +100514,6 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {};
VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT maxFrameSize = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT ) ==
- sizeof( VkVideoEncodeH265RateControlLayerInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT>::value,
- "VideoEncodeH265RateControlLayerInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265RateControlLayerInfoEXT>
@@ -98285,282 +100523,148 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoEncodeH265SessionCreateInfoEXT
+ struct VideoEncodeH265SessionParametersAddInfoEXT
{
- using NativeType = VkVideoEncodeH265SessionCreateInfoEXT;
+ using NativeType = VkVideoEncodeH265SessionParametersAddInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoEncodeH265SessionCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersAddInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoEXT(
- VULKAN_HPP_NAMESPACE::VideoEncodeH265CreateFlagsEXT flags_ = {},
- const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , pStdExtensionVersion( pStdExtensionVersion_ )
- {}
-
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoEXT( VideoEncodeH265SessionCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
-
- VideoEncodeH265SessionCreateInfoEXT( VkVideoEncodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoEncodeH265SessionCreateInfoEXT( *reinterpret_cast<VideoEncodeH265SessionCreateInfoEXT const *>( &rhs ) )
- {}
-# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
-
- VideoEncodeH265SessionCreateInfoEXT &
- operator=( VideoEncodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- VideoEncodeH265SessionCreateInfoEXT &
- operator=( VkVideoEncodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoEXT const *>( &rhs );
- return *this;
- }
-
-# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoEXT &
- setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH265CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
- {
- flags = flags_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoEXT & setPStdExtensionVersion(
- const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT
- {
- pStdExtensionVersion = pStdExtensionVersion_;
- return *this;
- }
-# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
-
- explicit operator VkVideoEncodeH265SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkVideoEncodeH265SessionCreateInfoEXT *>( this );
- }
-
- explicit operator VkVideoEncodeH265SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoEXT( uint32_t stdVPSCount_ = {},
+ const StdVideoH265VideoParameterSet * pStdVPSs_ = {},
+ uint32_t stdSPSCount_ = {},
+ const StdVideoH265SequenceParameterSet * pStdSPSs_ = {},
+ uint32_t stdPPSCount_ = {},
+ const StdVideoH265PictureParameterSet * pStdPPSs_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , stdVPSCount( stdVPSCount_ )
+ , pStdVPSs( pStdVPSs_ )
+ , stdSPSCount( stdSPSCount_ )
+ , pStdSPSs( pStdSPSs_ )
+ , stdPPSCount( stdPPSCount_ )
+ , pStdPPSs( pStdPPSs_ )
{
- return *reinterpret_cast<VkVideoEncodeH265SessionCreateInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
-# if 14 <= VULKAN_HPP_CPP_VERSION
- auto
-# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::VideoEncodeH265CreateFlagsEXT const &,
- const VULKAN_HPP_NAMESPACE::ExtensionProperties * const &>
-# endif
- reflect() const VULKAN_HPP_NOEXCEPT
- {
- return std::tie( sType, pNext, flags, pStdExtensionVersion );
- }
-# endif
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoEXT( VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( VideoEncodeH265SessionCreateInfoEXT const & ) const = default;
-# else
- bool operator==( VideoEncodeH265SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+ VideoEncodeH265SessionParametersAddInfoEXT( VkVideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoEncodeH265SessionParametersAddInfoEXT( *reinterpret_cast<VideoEncodeH265SessionParametersAddInfoEXT const *>( &rhs ) )
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
- return this->reflect() == rhs.reflect();
-# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( pStdExtensionVersion == rhs.pStdExtensionVersion );
-# endif
}
- bool operator!=( VideoEncodeH265SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ VideoEncodeH265SessionParametersAddInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265VideoParameterSet> const & stdVPSs_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & stdSPSs_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & stdPPSs_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , stdVPSCount( static_cast<uint32_t>( stdVPSs_.size() ) )
+ , pStdVPSs( stdVPSs_.data() )
+ , stdSPSCount( static_cast<uint32_t>( stdSPSs_.size() ) )
+ , pStdSPSs( stdSPSs_.data() )
+ , stdPPSCount( static_cast<uint32_t>( stdPPSs_.size() ) )
+ , pStdPPSs( stdPPSs_.data() )
{
- return !operator==( rhs );
}
-# endif
-
- public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionCreateInfoEXT;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::VideoEncodeH265CreateFlagsEXT flags = {};
- const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoEXT ) ==
- sizeof( VkVideoEncodeH265SessionCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoEXT>::value,
- "VideoEncodeH265SessionCreateInfoEXT is not nothrow_move_constructible!" );
-
- template <>
- struct CppType<StructureType, StructureType::eVideoEncodeH265SessionCreateInfoEXT>
- {
- using Type = VideoEncodeH265SessionCreateInfoEXT;
- };
-#endif /*VK_ENABLE_BETA_EXTENSIONS*/
-
-#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoEncodeH265SessionParametersAddInfoEXT
- {
- using NativeType = VkVideoEncodeH265SessionParametersAddInfoEXT;
-
- static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoEncodeH265SessionParametersAddInfoEXT;
-
-# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoEXT(
- uint32_t vpsStdCount_ = {},
- const StdVideoH265VideoParameterSet * pVpsStd_ = {},
- uint32_t spsStdCount_ = {},
- const StdVideoH265SequenceParameterSet * pSpsStd_ = {},
- uint32_t ppsStdCount_ = {},
- const StdVideoH265PictureParameterSet * pPpsStd_ = {} ) VULKAN_HPP_NOEXCEPT
- : vpsStdCount( vpsStdCount_ )
- , pVpsStd( pVpsStd_ )
- , spsStdCount( spsStdCount_ )
- , pSpsStd( pSpsStd_ )
- , ppsStdCount( ppsStdCount_ )
- , pPpsStd( pPpsStd_ )
- {}
-
- VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoEXT(
- VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
-
- VideoEncodeH265SessionParametersAddInfoEXT( VkVideoEncodeH265SessionParametersAddInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : VideoEncodeH265SessionParametersAddInfoEXT(
- *reinterpret_cast<VideoEncodeH265SessionParametersAddInfoEXT const *>( &rhs ) )
- {}
-
-# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoEncodeH265SessionParametersAddInfoEXT(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265VideoParameterSet> const & vpsStd_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & spsStd_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & ppsStd_ = {} )
- : vpsStdCount( static_cast<uint32_t>( vpsStd_.size() ) )
- , pVpsStd( vpsStd_.data() )
- , spsStdCount( static_cast<uint32_t>( spsStd_.size() ) )
- , pSpsStd( spsStd_.data() )
- , ppsStdCount( static_cast<uint32_t>( ppsStd_.size() ) )
- , pPpsStd( ppsStd_.data() )
- {}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH265SessionParametersAddInfoEXT &
- operator=( VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeH265SessionParametersAddInfoEXT & operator=( VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH265SessionParametersAddInfoEXT &
- operator=( VkVideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeH265SessionParametersAddInfoEXT & operator=( VkVideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT &
- setVpsStdCount( uint32_t vpsStdCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setStdVPSCount( uint32_t stdVPSCount_ ) VULKAN_HPP_NOEXCEPT
{
- vpsStdCount = vpsStdCount_;
+ stdVPSCount = stdVPSCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT &
- setPVpsStd( const StdVideoH265VideoParameterSet * pVpsStd_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setPStdVPSs( const StdVideoH265VideoParameterSet * pStdVPSs_ ) VULKAN_HPP_NOEXCEPT
{
- pVpsStd = pVpsStd_;
+ pStdVPSs = pStdVPSs_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeH265SessionParametersAddInfoEXT &
- setVpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265VideoParameterSet> const & vpsStd_ )
- VULKAN_HPP_NOEXCEPT
+ setStdVPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265VideoParameterSet> const & stdVPSs_ ) VULKAN_HPP_NOEXCEPT
{
- vpsStdCount = static_cast<uint32_t>( vpsStd_.size() );
- pVpsStd = vpsStd_.data();
+ stdVPSCount = static_cast<uint32_t>( stdVPSs_.size() );
+ pStdVPSs = stdVPSs_.data();
return *this;
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT &
- setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT
{
- spsStdCount = spsStdCount_;
+ stdSPSCount = stdSPSCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT &
- setPSpsStd( const StdVideoH265SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setPStdSPSs( const StdVideoH265SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT
{
- pSpsStd = pSpsStd_;
+ pStdSPSs = pStdSPSs_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeH265SessionParametersAddInfoEXT &
- setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & spsStd_ )
- VULKAN_HPP_NOEXCEPT
+ setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT
{
- spsStdCount = static_cast<uint32_t>( spsStd_.size() );
- pSpsStd = spsStd_.data();
+ stdSPSCount = static_cast<uint32_t>( stdSPSs_.size() );
+ pStdSPSs = stdSPSs_.data();
return *this;
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT &
- setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT
{
- ppsStdCount = ppsStdCount_;
+ stdPPSCount = stdPPSCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT &
- setPPpsStd( const StdVideoH265PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setPStdPPSs( const StdVideoH265PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT
{
- pPpsStd = pPpsStd_;
+ pStdPPSs = pStdPPSs_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeH265SessionParametersAddInfoEXT &
- setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & ppsStd_ )
- VULKAN_HPP_NOEXCEPT
+ setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT
{
- ppsStdCount = static_cast<uint32_t>( ppsStd_.size() );
- pPpsStd = ppsStd_.data();
+ stdPPSCount = static_cast<uint32_t>( stdPPSs_.size() );
+ pStdPPSs = stdPPSs_.data();
return *this;
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH265SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265SessionParametersAddInfoEXT *>( this );
}
- explicit operator VkVideoEncodeH265SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265SessionParametersAddInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -98575,7 +100679,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, vpsStdCount, pVpsStd, spsStdCount, pSpsStd, ppsStdCount, pPpsStd );
+ return std::tie( sType, pNext, stdVPSCount, pStdVPSs, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs );
}
# endif
@@ -98584,12 +100688,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vpsStdCount == rhs.vpsStdCount ) &&
- ( pVpsStd == rhs.pVpsStd ) && ( spsStdCount == rhs.spsStdCount ) && ( pSpsStd == rhs.pSpsStd ) &&
- ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stdVPSCount == rhs.stdVPSCount ) && ( pStdVPSs == rhs.pStdVPSs ) &&
+ ( stdSPSCount == rhs.stdSPSCount ) && ( pStdSPSs == rhs.pStdSPSs ) && ( stdPPSCount == rhs.stdPPSCount ) && ( pStdPPSs == rhs.pStdPPSs );
# endif
}
@@ -98602,22 +100705,13 @@ namespace VULKAN_HPP_NAMESPACE
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersAddInfoEXT;
const void * pNext = {};
- uint32_t vpsStdCount = {};
- const StdVideoH265VideoParameterSet * pVpsStd = {};
- uint32_t spsStdCount = {};
- const StdVideoH265SequenceParameterSet * pSpsStd = {};
- uint32_t ppsStdCount = {};
- const StdVideoH265PictureParameterSet * pPpsStd = {};
- };
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT ) ==
- sizeof( VkVideoEncodeH265SessionParametersAddInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT>::value,
- "VideoEncodeH265SessionParametersAddInfoEXT is not nothrow_move_constructible!" );
+ uint32_t stdVPSCount = {};
+ const StdVideoH265VideoParameterSet * pStdVPSs = {};
+ uint32_t stdSPSCount = {};
+ const StdVideoH265SequenceParameterSet * pStdSPSs = {};
+ uint32_t stdPPSCount = {};
+ const StdVideoH265PictureParameterSet * pStdPPSs = {};
+ };
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265SessionParametersAddInfoEXT>
@@ -98632,90 +100726,84 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkVideoEncodeH265SessionParametersCreateInfoEXT;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersCreateInfoEXT(
- uint32_t maxVpsStdCount_ = {},
- uint32_t maxSpsStdCount_ = {},
- uint32_t maxPpsStdCount_ = {},
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ = {} )
- VULKAN_HPP_NOEXCEPT
- : maxVpsStdCount( maxVpsStdCount_ )
- , maxSpsStdCount( maxSpsStdCount_ )
- , maxPpsStdCount( maxPpsStdCount_ )
+ VULKAN_HPP_CONSTEXPR
+ VideoEncodeH265SessionParametersCreateInfoEXT( uint32_t maxStdVPSCount_ = {},
+ uint32_t maxStdSPSCount_ = {},
+ uint32_t maxStdPPSCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , maxStdVPSCount( maxStdVPSCount_ )
+ , maxStdSPSCount( maxStdSPSCount_ )
+ , maxStdPPSCount( maxStdPPSCount_ )
, pParametersAddInfo( pParametersAddInfo_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersCreateInfoEXT(
- VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR
+ VideoEncodeH265SessionParametersCreateInfoEXT( VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH265SessionParametersCreateInfoEXT( VkVideoEncodeH265SessionParametersCreateInfoEXT const & rhs )
- VULKAN_HPP_NOEXCEPT
- : VideoEncodeH265SessionParametersCreateInfoEXT(
- *reinterpret_cast<VideoEncodeH265SessionParametersCreateInfoEXT const *>( &rhs ) )
- {}
+ VideoEncodeH265SessionParametersCreateInfoEXT( VkVideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoEncodeH265SessionParametersCreateInfoEXT( *reinterpret_cast<VideoEncodeH265SessionParametersCreateInfoEXT const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH265SessionParametersCreateInfoEXT &
- operator=( VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeH265SessionParametersCreateInfoEXT & operator=( VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeH265SessionParametersCreateInfoEXT &
- operator=( VkVideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeH265SessionParametersCreateInfoEXT & operator=( VkVideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT &
- setMaxVpsStdCount( uint32_t maxVpsStdCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setMaxStdVPSCount( uint32_t maxStdVPSCount_ ) VULKAN_HPP_NOEXCEPT
{
- maxVpsStdCount = maxVpsStdCount_;
+ maxStdVPSCount = maxStdVPSCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT &
- setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT
{
- maxSpsStdCount = maxSpsStdCount_;
+ maxStdSPSCount = maxStdSPSCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT &
- setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT
{
- maxPpsStdCount = maxPpsStdCount_;
+ maxStdPPSCount = maxStdPPSCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setPParametersAddInfo(
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT &
+ setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
{
pParametersAddInfo = pParametersAddInfo_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH265SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265SessionParametersCreateInfoEXT *>( this );
}
- explicit operator VkVideoEncodeH265SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265SessionParametersCreateInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -98728,7 +100816,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, maxVpsStdCount, maxSpsStdCount, maxPpsStdCount, pParametersAddInfo );
+ return std::tie( sType, pNext, maxStdVPSCount, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo );
}
# endif
@@ -98737,12 +100825,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVpsStdCount == rhs.maxVpsStdCount ) &&
- ( maxSpsStdCount == rhs.maxSpsStdCount ) && ( maxPpsStdCount == rhs.maxPpsStdCount ) &&
- ( pParametersAddInfo == rhs.pParametersAddInfo );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxStdVPSCount == rhs.maxStdVPSCount ) && ( maxStdSPSCount == rhs.maxStdSPSCount ) &&
+ ( maxStdPPSCount == rhs.maxStdPPSCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo );
# endif
}
@@ -98753,22 +100840,13 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT;
- const void * pNext = {};
- uint32_t maxVpsStdCount = {};
- uint32_t maxSpsStdCount = {};
- uint32_t maxPpsStdCount = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT;
+ const void * pNext = {};
+ uint32_t maxStdVPSCount = {};
+ uint32_t maxStdSPSCount = {};
+ uint32_t maxStdPPSCount = {};
const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT * pParametersAddInfo = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT ) ==
- sizeof( VkVideoEncodeH265SessionParametersCreateInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT>::value,
- "VideoEncodeH265SessionParametersCreateInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT>
@@ -98786,40 +100864,43 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265VclFrameInfoEXT;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoEncodeH265VclFrameInfoEXT(
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists_ = {},
- uint32_t naluSliceSegmentEntryCount_ = {},
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT * pNaluSliceSegmentEntries_ = {},
- const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo_ = {} ) VULKAN_HPP_NOEXCEPT
- : pReferenceFinalLists( pReferenceFinalLists_ )
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265VclFrameInfoEXT( const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists_ = {},
+ uint32_t naluSliceSegmentEntryCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT * pNaluSliceSegmentEntries_ = {},
+ const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , pReferenceFinalLists( pReferenceFinalLists_ )
, naluSliceSegmentEntryCount( naluSliceSegmentEntryCount_ )
, pNaluSliceSegmentEntries( pNaluSliceSegmentEntries_ )
, pCurrentPictureInfo( pCurrentPictureInfo_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoEncodeH265VclFrameInfoEXT( VideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoEncodeH265VclFrameInfoEXT( VideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265VclFrameInfoEXT( VkVideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeH265VclFrameInfoEXT( *reinterpret_cast<VideoEncodeH265VclFrameInfoEXT const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeH265VclFrameInfoEXT(
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT> const & naluSliceSegmentEntries_,
- const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo_ = {} )
- : pReferenceFinalLists( pReferenceFinalLists_ )
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT> const & naluSliceSegmentEntries_,
+ const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , pReferenceFinalLists( pReferenceFinalLists_ )
, naluSliceSegmentEntryCount( static_cast<uint32_t>( naluSliceSegmentEntries_.size() ) )
, pNaluSliceSegmentEntries( naluSliceSegmentEntries_.data() )
, pCurrentPictureInfo( pCurrentPictureInfo_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeH265VclFrameInfoEXT &
- operator=( VideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeH265VclFrameInfoEXT & operator=( VideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeH265VclFrameInfoEXT & operator=( VkVideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -98834,22 +100915,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setPReferenceFinalLists(
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT &
+ setPReferenceFinalLists( const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists_ ) VULKAN_HPP_NOEXCEPT
{
pReferenceFinalLists = pReferenceFinalLists_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT &
- setNaluSliceSegmentEntryCount( uint32_t naluSliceSegmentEntryCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setNaluSliceSegmentEntryCount( uint32_t naluSliceSegmentEntryCount_ ) VULKAN_HPP_NOEXCEPT
{
naluSliceSegmentEntryCount = naluSliceSegmentEntryCount_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setPNaluSliceSegmentEntries(
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT * pNaluSliceSegmentEntries_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT &
+ setPNaluSliceSegmentEntries( const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT * pNaluSliceSegmentEntries_ ) VULKAN_HPP_NOEXCEPT
{
pNaluSliceSegmentEntries = pNaluSliceSegmentEntries_;
return *this;
@@ -98857,8 +100937,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeH265VclFrameInfoEXT & setNaluSliceSegmentEntries(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT> const & naluSliceSegmentEntries_ )
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT> const & naluSliceSegmentEntries_ )
VULKAN_HPP_NOEXCEPT
{
naluSliceSegmentEntryCount = static_cast<uint32_t>( naluSliceSegmentEntries_.size() );
@@ -98868,38 +100947,37 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT &
- setPCurrentPictureInfo( const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo_ ) VULKAN_HPP_NOEXCEPT
+ setPCurrentPictureInfo( const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo_ ) VULKAN_HPP_NOEXCEPT
{
pCurrentPictureInfo = pCurrentPictureInfo_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeH265VclFrameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265VclFrameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeH265VclFrameInfoEXT *>( this );
}
- explicit operator VkVideoEncodeH265VclFrameInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeH265VclFrameInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeH265VclFrameInfoEXT *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
const void * const &,
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * const &,
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * const &,
uint32_t const &,
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT * const &,
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT * const &,
const StdVideoEncodeH265PictureInfo * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie(
- sType, pNext, pReferenceFinalLists, naluSliceSegmentEntryCount, pNaluSliceSegmentEntries, pCurrentPictureInfo );
+ return std::tie( sType, pNext, pReferenceFinalLists, naluSliceSegmentEntryCount, pNaluSliceSegmentEntries, pCurrentPictureInfo );
}
# endif
@@ -98908,12 +100986,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeH265VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pReferenceFinalLists == rhs.pReferenceFinalLists ) &&
- ( naluSliceSegmentEntryCount == rhs.naluSliceSegmentEntryCount ) &&
- ( pNaluSliceSegmentEntries == rhs.pNaluSliceSegmentEntries ) &&
+ ( naluSliceSegmentEntryCount == rhs.naluSliceSegmentEntryCount ) && ( pNaluSliceSegmentEntries == rhs.pNaluSliceSegmentEntries ) &&
( pCurrentPictureInfo == rhs.pCurrentPictureInfo );
# endif
}
@@ -98925,21 +101002,13 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265VclFrameInfoEXT;
- const void * pNext = {};
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists = {};
- uint32_t naluSliceSegmentEntryCount = {};
- const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT * pNaluSliceSegmentEntries = {};
- const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265VclFrameInfoEXT;
+ const void * pNext = {};
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists = {};
+ uint32_t naluSliceSegmentEntryCount = {};
+ const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT * pNaluSliceSegmentEntries = {};
+ const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT ) ==
- sizeof( VkVideoEncodeH265VclFrameInfoEXT ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT>::value,
- "VideoEncodeH265VclFrameInfoEXT is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoEncodeH265VclFrameInfoEXT>
@@ -98957,21 +101026,20 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- VideoEncodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ = {},
- uint32_t qualityLevel_ = {},
- VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {},
- VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_ = {},
- VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_ = {},
- VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR srcPictureResource_ = {},
- const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ = {},
- uint32_t referenceSlotCount_ = {},
- const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ = {},
- uint32_t precedingExternallyEncodedBytes_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ = {},
+ uint32_t qualityLevel_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_ = {},
+ VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ = {},
+ uint32_t referenceSlotCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {},
+ uint32_t precedingExternallyEncodedBytes_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, qualityLevel( qualityLevel_ )
- , codedExtent( codedExtent_ )
, dstBitstreamBuffer( dstBitstreamBuffer_ )
, dstBitstreamBufferOffset( dstBitstreamBufferOffset_ )
, dstBitstreamBufferMaxRange( dstBitstreamBufferMaxRange_ )
@@ -98980,30 +101048,27 @@ namespace VULKAN_HPP_NAMESPACE
, referenceSlotCount( referenceSlotCount_ )
, pReferenceSlots( pReferenceSlots_ )
, precedingExternallyEncodedBytes( precedingExternallyEncodedBytes_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeInfoKHR( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoEncodeInfoKHR( *reinterpret_cast<VideoEncodeInfoKHR const *>( &rhs ) )
- {}
+ VideoEncodeInfoKHR( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : VideoEncodeInfoKHR( *reinterpret_cast<VideoEncodeInfoKHR const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- VideoEncodeInfoKHR(
- VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_,
- uint32_t qualityLevel_,
- VULKAN_HPP_NAMESPACE::Extent2D codedExtent_,
- VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_,
- VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_,
- VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_,
- VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR srcPictureResource_,
- const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR> const &
- referenceSlots_,
- uint32_t precedingExternallyEncodedBytes_ = {} )
- : flags( flags_ )
+ VideoEncodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_,
+ uint32_t qualityLevel_,
+ VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_,
+ VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_,
+ VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_,
+ VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource_,
+ const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR> const & referenceSlots_,
+ uint32_t precedingExternallyEncodedBytes_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, qualityLevel( qualityLevel_ )
- , codedExtent( codedExtent_ )
, dstBitstreamBuffer( dstBitstreamBuffer_ )
, dstBitstreamBufferOffset( dstBitstreamBufferOffset_ )
, dstBitstreamBufferMaxRange( dstBitstreamBufferMaxRange_ )
@@ -99012,7 +101077,8 @@ namespace VULKAN_HPP_NAMESPACE
, referenceSlotCount( static_cast<uint32_t>( referenceSlots_.size() ) )
, pReferenceSlots( referenceSlots_.data() )
, precedingExternallyEncodedBytes( precedingExternallyEncodedBytes_ )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
@@ -99031,8 +101097,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -99044,57 +101109,47 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR &
- setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT
- {
- codedExtent = codedExtent_;
- return *this;
- }
-
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR &
- setDstBitstreamBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBitstreamBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_ ) VULKAN_HPP_NOEXCEPT
{
dstBitstreamBuffer = dstBitstreamBuffer_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR &
- setDstBitstreamBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBitstreamBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_ ) VULKAN_HPP_NOEXCEPT
{
dstBitstreamBufferOffset = dstBitstreamBufferOffset_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR &
- setDstBitstreamBufferMaxRange( VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_ ) VULKAN_HPP_NOEXCEPT
+ setDstBitstreamBufferMaxRange( VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_ ) VULKAN_HPP_NOEXCEPT
{
dstBitstreamBufferMaxRange = dstBitstreamBufferMaxRange_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setSrcPictureResource(
- VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const & srcPictureResource_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR &
+ setSrcPictureResource( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const & srcPictureResource_ ) VULKAN_HPP_NOEXCEPT
{
srcPictureResource = srcPictureResource_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPSetupReferenceSlot(
- const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR &
+ setPSetupReferenceSlot( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT
{
pSetupReferenceSlot = pSetupReferenceSlot_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR &
- setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
{
referenceSlotCount = referenceSlotCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR &
- setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
+ setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
{
pReferenceSlots = pReferenceSlots_;
return *this;
@@ -99102,8 +101157,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeInfoKHR & setReferenceSlots(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR> const &
- referenceSlots_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR> const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT
{
referenceSlotCount = static_cast<uint32_t>( referenceSlots_.size() );
pReferenceSlots = referenceSlots_.data();
@@ -99111,25 +101165,24 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR &
- setPrecedingExternallyEncodedBytes( uint32_t precedingExternallyEncodedBytes_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPrecedingExternallyEncodedBytes( uint32_t precedingExternallyEncodedBytes_ ) VULKAN_HPP_NOEXCEPT
{
precedingExternallyEncodedBytes = precedingExternallyEncodedBytes_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeInfoKHR *>( this );
}
- explicit operator VkVideoEncodeInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -99137,14 +101190,13 @@ namespace VULKAN_HPP_NAMESPACE
const void * const &,
VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR const &,
uint32_t const &,
- VULKAN_HPP_NAMESPACE::Extent2D const &,
VULKAN_HPP_NAMESPACE::Buffer const &,
VULKAN_HPP_NAMESPACE::DeviceSize const &,
VULKAN_HPP_NAMESPACE::DeviceSize const &,
- VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const &,
- const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * const &,
+ VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const &,
+ const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * const &,
uint32_t const &,
- const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * const &,
+ const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * const &,
uint32_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
@@ -99153,7 +101205,6 @@ namespace VULKAN_HPP_NAMESPACE
pNext,
flags,
qualityLevel,
- codedExtent,
dstBitstreamBuffer,
dstBitstreamBufferOffset,
dstBitstreamBufferMaxRange,
@@ -99170,17 +101221,14 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( qualityLevel == rhs.qualityLevel ) && ( codedExtent == rhs.codedExtent ) &&
- ( dstBitstreamBuffer == rhs.dstBitstreamBuffer ) &&
- ( dstBitstreamBufferOffset == rhs.dstBitstreamBufferOffset ) &&
- ( dstBitstreamBufferMaxRange == rhs.dstBitstreamBufferMaxRange ) &&
- ( srcPictureResource == rhs.srcPictureResource ) && ( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) &&
- ( referenceSlotCount == rhs.referenceSlotCount ) && ( pReferenceSlots == rhs.pReferenceSlots ) &&
- ( precedingExternallyEncodedBytes == rhs.precedingExternallyEncodedBytes );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( qualityLevel == rhs.qualityLevel ) &&
+ ( dstBitstreamBuffer == rhs.dstBitstreamBuffer ) && ( dstBitstreamBufferOffset == rhs.dstBitstreamBufferOffset ) &&
+ ( dstBitstreamBufferMaxRange == rhs.dstBitstreamBufferMaxRange ) && ( srcPictureResource == rhs.srcPictureResource ) &&
+ ( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) && ( referenceSlotCount == rhs.referenceSlotCount ) &&
+ ( pReferenceSlots == rhs.pReferenceSlots ) && ( precedingExternallyEncodedBytes == rhs.precedingExternallyEncodedBytes );
# endif
}
@@ -99191,26 +101239,19 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags = {};
- uint32_t qualityLevel = {};
- VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {};
- VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer = {};
- VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset = {};
- VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange = {};
- VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR srcPictureResource = {};
- const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot = {};
- uint32_t referenceSlotCount = {};
- const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots = {};
- uint32_t precedingExternallyEncodedBytes = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags = {};
+ uint32_t qualityLevel = {};
+ VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange = {};
+ VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource = {};
+ const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot = {};
+ uint32_t referenceSlotCount = {};
+ const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {};
+ uint32_t precedingExternallyEncodedBytes = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR ) == sizeof( VkVideoEncodeInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR>::value,
- "VideoEncodeInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoEncodeInfoKHR>
@@ -99225,38 +101266,37 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkVideoEncodeRateControlLayerInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoEncodeRateControlLayerInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeRateControlLayerInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- VideoEncodeRateControlLayerInfoKHR( uint32_t averageBitrate_ = {},
- uint32_t maxBitrate_ = {},
- uint32_t frameRateNumerator_ = {},
- uint32_t frameRateDenominator_ = {},
- uint32_t virtualBufferSizeInMs_ = {},
- uint32_t initialVirtualBufferSizeInMs_ = {} ) VULKAN_HPP_NOEXCEPT
- : averageBitrate( averageBitrate_ )
+ VULKAN_HPP_CONSTEXPR VideoEncodeRateControlLayerInfoKHR( uint32_t averageBitrate_ = {},
+ uint32_t maxBitrate_ = {},
+ uint32_t frameRateNumerator_ = {},
+ uint32_t frameRateDenominator_ = {},
+ uint32_t virtualBufferSizeInMs_ = {},
+ uint32_t initialVirtualBufferSizeInMs_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , averageBitrate( averageBitrate_ )
, maxBitrate( maxBitrate_ )
, frameRateNumerator( frameRateNumerator_ )
, frameRateDenominator( frameRateDenominator_ )
, virtualBufferSizeInMs( virtualBufferSizeInMs_ )
, initialVirtualBufferSizeInMs( initialVirtualBufferSizeInMs_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR VideoEncodeRateControlLayerInfoKHR( VideoEncodeRateControlLayerInfoKHR const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoEncodeRateControlLayerInfoKHR( VideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeRateControlLayerInfoKHR( VkVideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeRateControlLayerInfoKHR( *reinterpret_cast<VideoEncodeRateControlLayerInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeRateControlLayerInfoKHR &
- operator=( VideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeRateControlLayerInfoKHR & operator=( VideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoEncodeRateControlLayerInfoKHR &
- operator=( VkVideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoEncodeRateControlLayerInfoKHR & operator=( VkVideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR const *>( &rhs );
return *this;
@@ -99269,60 +101309,54 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR &
- setAverageBitrate( uint32_t averageBitrate_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setAverageBitrate( uint32_t averageBitrate_ ) VULKAN_HPP_NOEXCEPT
{
averageBitrate = averageBitrate_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR &
- setMaxBitrate( uint32_t maxBitrate_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setMaxBitrate( uint32_t maxBitrate_ ) VULKAN_HPP_NOEXCEPT
{
maxBitrate = maxBitrate_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR &
- setFrameRateNumerator( uint32_t frameRateNumerator_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setFrameRateNumerator( uint32_t frameRateNumerator_ ) VULKAN_HPP_NOEXCEPT
{
frameRateNumerator = frameRateNumerator_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR &
- setFrameRateDenominator( uint32_t frameRateDenominator_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setFrameRateDenominator( uint32_t frameRateDenominator_ ) VULKAN_HPP_NOEXCEPT
{
frameRateDenominator = frameRateDenominator_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR &
- setVirtualBufferSizeInMs( uint32_t virtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setVirtualBufferSizeInMs( uint32_t virtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT
{
virtualBufferSizeInMs = virtualBufferSizeInMs_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR &
- setInitialVirtualBufferSizeInMs( uint32_t initialVirtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setInitialVirtualBufferSizeInMs( uint32_t initialVirtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT
{
initialVirtualBufferSizeInMs = initialVirtualBufferSizeInMs_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeRateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeRateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeRateControlLayerInfoKHR *>( this );
}
- explicit operator VkVideoEncodeRateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeRateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeRateControlLayerInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -99337,14 +101371,8 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- averageBitrate,
- maxBitrate,
- frameRateNumerator,
- frameRateDenominator,
- virtualBufferSizeInMs,
- initialVirtualBufferSizeInMs );
+ return std::tie(
+ sType, pNext, averageBitrate, maxBitrate, frameRateNumerator, frameRateDenominator, virtualBufferSizeInMs, initialVirtualBufferSizeInMs );
}
# endif
@@ -99353,14 +101381,12 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeRateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( averageBitrate == rhs.averageBitrate ) &&
- ( maxBitrate == rhs.maxBitrate ) && ( frameRateNumerator == rhs.frameRateNumerator ) &&
- ( frameRateDenominator == rhs.frameRateDenominator ) &&
- ( virtualBufferSizeInMs == rhs.virtualBufferSizeInMs ) &&
- ( initialVirtualBufferSizeInMs == rhs.initialVirtualBufferSizeInMs );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( averageBitrate == rhs.averageBitrate ) && ( maxBitrate == rhs.maxBitrate ) &&
+ ( frameRateNumerator == rhs.frameRateNumerator ) && ( frameRateDenominator == rhs.frameRateDenominator ) &&
+ ( virtualBufferSizeInMs == rhs.virtualBufferSizeInMs ) && ( initialVirtualBufferSizeInMs == rhs.initialVirtualBufferSizeInMs );
# endif
}
@@ -99371,23 +101397,15 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlLayerInfoKHR;
- const void * pNext = {};
- uint32_t averageBitrate = {};
- uint32_t maxBitrate = {};
- uint32_t frameRateNumerator = {};
- uint32_t frameRateDenominator = {};
- uint32_t virtualBufferSizeInMs = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlLayerInfoKHR;
+ const void * pNext = {};
+ uint32_t averageBitrate = {};
+ uint32_t maxBitrate = {};
+ uint32_t frameRateNumerator = {};
+ uint32_t frameRateDenominator = {};
+ uint32_t virtualBufferSizeInMs = {};
uint32_t initialVirtualBufferSizeInMs = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR ) ==
- sizeof( VkVideoEncodeRateControlLayerInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR>::value,
- "VideoEncodeRateControlLayerInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoEncodeRateControlLayerInfoKHR>
@@ -99406,40 +101424,43 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR VideoEncodeRateControlInfoKHR(
- VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ = {},
- VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ =
- VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eNone,
- uint8_t layerCount_ = {},
- const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayerConfigs_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ = {},
+ VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eNone,
+ uint8_t layerCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayerConfigs_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, rateControlMode( rateControlMode_ )
, layerCount( layerCount_ )
, pLayerConfigs( pLayerConfigs_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoEncodeRateControlInfoKHR( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoEncodeRateControlInfoKHR( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeRateControlInfoKHR( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEncodeRateControlInfoKHR( *reinterpret_cast<VideoEncodeRateControlInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeRateControlInfoKHR(
- VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_,
- VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR> const & layerConfigs_ )
- : flags( flags_ )
+ VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_,
+ VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR> const & layerConfigs_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , flags( flags_ )
, rateControlMode( rateControlMode_ )
, layerCount( static_cast<uint8_t>( layerConfigs_.size() ) )
, pLayerConfigs( layerConfigs_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoEncodeRateControlInfoKHR &
- operator=( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoEncodeRateControlInfoKHR & operator=( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEncodeRateControlInfoKHR & operator=( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
@@ -99454,15 +101475,14 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setRateControlMode(
- VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR &
+ setRateControlMode( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ ) VULKAN_HPP_NOEXCEPT
{
rateControlMode = rateControlMode_;
return *this;
@@ -99474,8 +101494,8 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setPLayerConfigs(
- const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayerConfigs_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR &
+ setPLayerConfigs( const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayerConfigs_ ) VULKAN_HPP_NOEXCEPT
{
pLayerConfigs = pLayerConfigs_;
return *this;
@@ -99483,8 +101503,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
VideoEncodeRateControlInfoKHR & setLayerConfigs(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
- const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR> const & layerConfigs_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR> const & layerConfigs_ ) VULKAN_HPP_NOEXCEPT
{
layerCount = static_cast<uint8_t>( layerConfigs_.size() );
pLayerConfigs = layerConfigs_.data();
@@ -99493,17 +101512,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEncodeRateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeRateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEncodeRateControlInfoKHR *>( this );
}
- explicit operator VkVideoEncodeRateControlInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEncodeRateControlInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEncodeRateControlInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -99525,12 +101544,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( rateControlMode == rhs.rateControlMode ) && ( layerCount == rhs.layerCount ) &&
- ( pLayerConfigs == rhs.pLayerConfigs );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rateControlMode == rhs.rateControlMode ) &&
+ ( layerCount == rhs.layerCount ) && ( pLayerConfigs == rhs.pLayerConfigs );
# endif
}
@@ -99541,22 +101559,13 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags = {};
- VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode =
- VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eNone;
- uint8_t layerCount = {};
- const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayerConfigs = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags = {};
+ VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode = VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eNone;
+ uint8_t layerCount = {};
+ const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayerConfigs = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR ) ==
- sizeof( VkVideoEncodeRateControlInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR>::value,
- "VideoEncodeRateControlInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoEncodeRateControlInfoKHR>
@@ -99566,6 +101575,130 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ struct VideoEncodeUsageInfoKHR
+ {
+ using NativeType = VkVideoEncodeUsageInfoKHR;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeUsageInfoKHR;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR
+ VideoEncodeUsageInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR videoUsageHints_ = {},
+ VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR videoContentHints_ = {},
+ VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR tuningMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR::eDefault,
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , videoUsageHints( videoUsageHints_ )
+ , videoContentHints( videoContentHints_ )
+ , tuningMode( tuningMode_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR VideoEncodeUsageInfoKHR( VideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ VideoEncodeUsageInfoKHR( VkVideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoEncodeUsageInfoKHR( *reinterpret_cast<VideoEncodeUsageInfoKHR const *>( &rhs ) )
+ {
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ VideoEncodeUsageInfoKHR & operator=( VideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ VideoEncodeUsageInfoKHR & operator=( VkVideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR const *>( &rhs );
+ return *this;
+ }
+
+# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setVideoUsageHints( VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR videoUsageHints_ ) VULKAN_HPP_NOEXCEPT
+ {
+ videoUsageHints = videoUsageHints_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR &
+ setVideoContentHints( VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR videoContentHints_ ) VULKAN_HPP_NOEXCEPT
+ {
+ videoContentHints = videoContentHints_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setTuningMode( VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR tuningMode_ ) VULKAN_HPP_NOEXCEPT
+ {
+ tuningMode = tuningMode_;
+ return *this;
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
+
+ operator VkVideoEncodeUsageInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkVideoEncodeUsageInfoKHR *>( this );
+ }
+
+ operator VkVideoEncodeUsageInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkVideoEncodeUsageInfoKHR *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ const void * const &,
+ VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR const &,
+ VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR const &,
+ VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, videoUsageHints, videoContentHints, tuningMode );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( VideoEncodeUsageInfoKHR const & ) const = default;
+# else
+ bool operator==( VideoEncodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoUsageHints == rhs.videoUsageHints ) && ( videoContentHints == rhs.videoContentHints ) &&
+ ( tuningMode == rhs.tuningMode );
+# endif
+ }
+
+ bool operator!=( VideoEncodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeUsageInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR videoUsageHints = {};
+ VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR videoContentHints = {};
+ VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR tuningMode = VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR::eDefault;
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eVideoEncodeUsageInfoKHR>
+ {
+ using Type = VideoEncodeUsageInfoKHR;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
struct VideoEndCodingInfoKHR
{
using NativeType = VkVideoEndCodingInfoKHR;
@@ -99574,16 +101707,18 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEndCodingInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- VideoEndCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- {}
+ VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoEndCodingInfoKHR( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoEndCodingInfoKHR( *reinterpret_cast<VideoEndCodingInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
VideoEndCodingInfoKHR & operator=( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -99601,31 +101736,28 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoEndCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoEndCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoEndCodingInfoKHR *>( this );
}
- explicit operator VkVideoEndCodingInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoEndCodingInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoEndCodingInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -99638,7 +101770,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
@@ -99656,12 +101788,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR ) == sizeof( VkVideoEndCodingInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR>::value,
- "VideoEndCodingInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoEndCodingInfoKHR>
@@ -99679,16 +101805,29 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatPropertiesKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR(
- VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT
- : format( format_ )
- {}
+ VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::ComponentMapping componentMapping_ = {},
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags imageCreateFlags_ = {},
+ VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D,
+ VULKAN_HPP_NAMESPACE::ImageTiling imageTiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsageFlags_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , format( format_ )
+ , componentMapping( componentMapping_ )
+ , imageCreateFlags( imageCreateFlags_ )
+ , imageType( imageType_ )
+ , imageTiling( imageTiling_ )
+ , imageUsageFlags( imageUsageFlags_ )
+ {
+ }
VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoFormatPropertiesKHR( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoFormatPropertiesKHR( *reinterpret_cast<VideoFormatPropertiesKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
VideoFormatPropertiesKHR & operator=( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -99699,25 +101838,32 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- explicit operator VkVideoFormatPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoFormatPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoFormatPropertiesKHR *>( this );
}
- explicit operator VkVideoFormatPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoFormatPropertiesKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoFormatPropertiesKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Format const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
+ void * const &,
+ VULKAN_HPP_NAMESPACE::Format const &,
+ VULKAN_HPP_NAMESPACE::ComponentMapping const &,
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags const &,
+ VULKAN_HPP_NAMESPACE::ImageType const &,
+ VULKAN_HPP_NAMESPACE::ImageTiling const &,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, format );
+ return std::tie( sType, pNext, format, componentMapping, imageCreateFlags, imageType, imageTiling, imageUsageFlags );
}
# endif
@@ -99726,10 +101872,12 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( componentMapping == rhs.componentMapping ) &&
+ ( imageCreateFlags == rhs.imageCreateFlags ) && ( imageType == rhs.imageType ) && ( imageTiling == rhs.imageTiling ) &&
+ ( imageUsageFlags == rhs.imageUsageFlags );
# endif
}
@@ -99740,17 +101888,15 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatPropertiesKHR;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatPropertiesKHR;
+ void * pNext = {};
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::ComponentMapping componentMapping = {};
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags imageCreateFlags = {};
+ VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D;
+ VULKAN_HPP_NAMESPACE::ImageTiling imageTiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsageFlags = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR ) ==
- sizeof( VkVideoFormatPropertiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>::value,
- "VideoFormatPropertiesKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoFormatPropertiesKHR>
@@ -99760,231 +101906,262 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoGetMemoryPropertiesKHR
+ struct VideoProfileInfoKHR
{
- using NativeType = VkVideoGetMemoryPropertiesKHR;
+ using NativeType = VkVideoProfileInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoGetMemoryPropertiesKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfileInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoGetMemoryPropertiesKHR(
- uint32_t memoryBindIndex_ = {},
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT
- : memoryBindIndex( memoryBindIndex_ )
- , pMemoryRequirements( pMemoryRequirements_ )
- {}
+ VULKAN_HPP_CONSTEXPR VideoProfileInfoKHR(
+ VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ = VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eNone,
+ VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ = {},
+ VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ = {},
+ VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , videoCodecOperation( videoCodecOperation_ )
+ , chromaSubsampling( chromaSubsampling_ )
+ , lumaBitDepth( lumaBitDepth_ )
+ , chromaBitDepth( chromaBitDepth_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoGetMemoryPropertiesKHR( VideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoProfileInfoKHR( VideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoGetMemoryPropertiesKHR( VkVideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoGetMemoryPropertiesKHR( *reinterpret_cast<VideoGetMemoryPropertiesKHR const *>( &rhs ) )
- {}
+ VideoProfileInfoKHR( VkVideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : VideoProfileInfoKHR( *reinterpret_cast<VideoProfileInfoKHR const *>( &rhs ) )
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoGetMemoryPropertiesKHR & operator=( VideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoProfileInfoKHR & operator=( VideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoGetMemoryPropertiesKHR & operator=( VkVideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoProfileInfoKHR & operator=( VkVideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoGetMemoryPropertiesKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoGetMemoryPropertiesKHR &
- setMemoryBindIndex( uint32_t memoryBindIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR &
+ setVideoCodecOperation( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ ) VULKAN_HPP_NOEXCEPT
{
- memoryBindIndex = memoryBindIndex_;
+ videoCodecOperation = videoCodecOperation_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR &
+ setChromaSubsampling( VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ ) VULKAN_HPP_NOEXCEPT
+ {
+ chromaSubsampling = chromaSubsampling_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setLumaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ ) VULKAN_HPP_NOEXCEPT
+ {
+ lumaBitDepth = lumaBitDepth_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoGetMemoryPropertiesKHR &
- setPMemoryRequirements( VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setChromaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ ) VULKAN_HPP_NOEXCEPT
{
- pMemoryRequirements = pMemoryRequirements_;
+ chromaBitDepth = chromaBitDepth_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoGetMemoryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkVideoGetMemoryPropertiesKHR *>( this );
+ return *reinterpret_cast<const VkVideoProfileInfoKHR *>( this );
}
- explicit operator VkVideoGetMemoryPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoProfileInfoKHR &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkVideoGetMemoryPropertiesKHR *>( this );
+ return *reinterpret_cast<VkVideoProfileInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
const void * const &,
- uint32_t const &,
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * const &>
+ VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR const &,
+ VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR const &,
+ VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR const &,
+ VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, memoryBindIndex, pMemoryRequirements );
+ return std::tie( sType, pNext, videoCodecOperation, chromaSubsampling, lumaBitDepth, chromaBitDepth );
}
# endif
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( VideoGetMemoryPropertiesKHR const & ) const = default;
+ auto operator<=>( VideoProfileInfoKHR const & ) const = default;
# else
- bool operator==( VideoGetMemoryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( VideoProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) &&
- ( pMemoryRequirements == rhs.pMemoryRequirements );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperation == rhs.videoCodecOperation ) &&
+ ( chromaSubsampling == rhs.chromaSubsampling ) && ( lumaBitDepth == rhs.lumaBitDepth ) && ( chromaBitDepth == rhs.chromaBitDepth );
# endif
}
- bool operator!=( VideoGetMemoryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( VideoProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoGetMemoryPropertiesKHR;
- const void * pNext = {};
- uint32_t memoryBindIndex = {};
- VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfileInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation = VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eNone;
+ VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling = {};
+ VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth = {};
+ VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR ) ==
- sizeof( VkVideoGetMemoryPropertiesKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR>::value,
- "VideoGetMemoryPropertiesKHR is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::eVideoGetMemoryPropertiesKHR>
+ struct CppType<StructureType, StructureType::eVideoProfileInfoKHR>
{
- using Type = VideoGetMemoryPropertiesKHR;
+ using Type = VideoProfileInfoKHR;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
- struct VideoQueueFamilyProperties2KHR
+ struct VideoProfileListInfoKHR
{
- using NativeType = VkVideoQueueFamilyProperties2KHR;
+ using NativeType = VkVideoProfileListInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoQueueFamilyProperties2KHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfileListInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoQueueFamilyProperties2KHR(
- VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ = {} ) VULKAN_HPP_NOEXCEPT
- : videoCodecOperations( videoCodecOperations_ )
- {}
+ VULKAN_HPP_CONSTEXPR VideoProfileListInfoKHR( uint32_t profileCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pProfiles_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , profileCount( profileCount_ )
+ , pProfiles( pProfiles_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoQueueFamilyProperties2KHR( VideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoProfileListInfoKHR( VideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoQueueFamilyProperties2KHR( VkVideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : VideoQueueFamilyProperties2KHR( *reinterpret_cast<VideoQueueFamilyProperties2KHR const *>( &rhs ) )
- {}
-# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+ VideoProfileListInfoKHR( VkVideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoProfileListInfoKHR( *reinterpret_cast<VideoProfileListInfoKHR const *>( &rhs ) )
+ {
+ }
- VideoQueueFamilyProperties2KHR &
- operator=( VideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ VideoProfileListInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR> const & profiles_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ ), profileCount( static_cast<uint32_t>( profiles_.size() ) ), pProfiles( profiles_.data() )
+ {
+ }
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ VideoProfileListInfoKHR & operator=( VideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoQueueFamilyProperties2KHR & operator=( VkVideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoProfileListInfoKHR & operator=( VkVideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoQueueFamilyProperties2KHR const *>( &rhs );
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR const *>( &rhs );
return *this;
}
# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 VideoQueueFamilyProperties2KHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoProfileListInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoQueueFamilyProperties2KHR & setVideoCodecOperations(
- VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoProfileListInfoKHR & setProfileCount( uint32_t profileCount_ ) VULKAN_HPP_NOEXCEPT
{
- videoCodecOperations = videoCodecOperations_;
+ profileCount = profileCount_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 VideoProfileListInfoKHR & setPProfiles( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pProfiles_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pProfiles = pProfiles_;
return *this;
}
-# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoQueueFamilyProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
+# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+ VideoProfileListInfoKHR &
+ setProfiles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR> const & profiles_ ) VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkVideoQueueFamilyProperties2KHR *>( this );
+ profileCount = static_cast<uint32_t>( profiles_.size() );
+ pProfiles = profiles_.data();
+ return *this;
}
+# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoQueueFamilyProperties2KHR &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoProfileListInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkVideoQueueFamilyProperties2KHR *>( this );
+ return *reinterpret_cast<const VkVideoProfileListInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+ operator VkVideoProfileListInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkVideoProfileListInfoKHR *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- void * const &,
- VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, videoCodecOperations );
+ return std::tie( sType, pNext, profileCount, pProfiles );
}
# endif
# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
- auto operator<=>( VideoQueueFamilyProperties2KHR const & ) const = default;
+ auto operator<=>( VideoProfileListInfoKHR const & ) const = default;
# else
- bool operator==( VideoQueueFamilyProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( VideoProfileListInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperations == rhs.videoCodecOperations );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( profileCount == rhs.profileCount ) && ( pProfiles == rhs.pProfiles );
# endif
}
- bool operator!=( VideoQueueFamilyProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( VideoProfileListInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoQueueFamilyProperties2KHR;
- void * pNext = {};
- VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfileListInfoKHR;
+ const void * pNext = {};
+ uint32_t profileCount = {};
+ const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pProfiles = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoQueueFamilyProperties2KHR ) ==
- sizeof( VkVideoQueueFamilyProperties2KHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoQueueFamilyProperties2KHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoQueueFamilyProperties2KHR>::value,
- "VideoQueueFamilyProperties2KHR is not nothrow_move_constructible!" );
template <>
- struct CppType<StructureType, StructureType::eVideoQueueFamilyProperties2KHR>
+ struct CppType<StructureType, StructureType::eVideoProfileListInfoKHR>
{
- using Type = VideoQueueFamilyProperties2KHR;
+ using Type = VideoProfileListInfoKHR;
};
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@@ -99997,31 +102174,35 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionCreateInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoSessionCreateInfoKHR(
- uint32_t queueFamilyIndex_ = {},
- VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ = {},
- const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile_ = {},
- VULKAN_HPP_NAMESPACE::Format pictureFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent_ = {},
- VULKAN_HPP_NAMESPACE::Format referencePicturesFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
- uint32_t maxReferencePicturesSlotsCount_ = {},
- uint32_t maxReferencePicturesActiveCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : queueFamilyIndex( queueFamilyIndex_ )
+ VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR( uint32_t queueFamilyIndex_ = {},
+ VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ = {},
+ const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile_ = {},
+ VULKAN_HPP_NAMESPACE::Format pictureFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent_ = {},
+ VULKAN_HPP_NAMESPACE::Format referencePictureFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ uint32_t maxDpbSlots_ = {},
+ uint32_t maxActiveReferencePictures_ = {},
+ const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , queueFamilyIndex( queueFamilyIndex_ )
, flags( flags_ )
, pVideoProfile( pVideoProfile_ )
, pictureFormat( pictureFormat_ )
, maxCodedExtent( maxCodedExtent_ )
- , referencePicturesFormat( referencePicturesFormat_ )
- , maxReferencePicturesSlotsCount( maxReferencePicturesSlotsCount_ )
- , maxReferencePicturesActiveCount( maxReferencePicturesActiveCount_ )
- {}
+ , referencePictureFormat( referencePictureFormat_ )
+ , maxDpbSlots( maxDpbSlots_ )
+ , maxActiveReferencePictures( maxActiveReferencePictures_ )
+ , pStdHeaderVersion( pStdHeaderVersion_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- VideoSessionCreateInfoKHR( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoSessionCreateInfoKHR( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoSessionCreateInfoKHR( *reinterpret_cast<VideoSessionCreateInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
VideoSessionCreateInfoKHR & operator=( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -100039,74 +102220,73 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR &
- setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
{
queueFamilyIndex = queueFamilyIndex_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR &
- setPVideoProfile( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPVideoProfile( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile_ ) VULKAN_HPP_NOEXCEPT
{
pVideoProfile = pVideoProfile_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR &
- setPictureFormat( VULKAN_HPP_NAMESPACE::Format pictureFormat_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPictureFormat( VULKAN_HPP_NAMESPACE::Format pictureFormat_ ) VULKAN_HPP_NOEXCEPT
{
pictureFormat = pictureFormat_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR &
- setMaxCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & maxCodedExtent_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & maxCodedExtent_ ) VULKAN_HPP_NOEXCEPT
{
maxCodedExtent = maxCodedExtent_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR &
- setReferencePicturesFormat( VULKAN_HPP_NAMESPACE::Format referencePicturesFormat_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setReferencePictureFormat( VULKAN_HPP_NAMESPACE::Format referencePictureFormat_ ) VULKAN_HPP_NOEXCEPT
{
- referencePicturesFormat = referencePicturesFormat_;
+ referencePictureFormat = referencePictureFormat_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR &
- setMaxReferencePicturesSlotsCount( uint32_t maxReferencePicturesSlotsCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxDpbSlots( uint32_t maxDpbSlots_ ) VULKAN_HPP_NOEXCEPT
{
- maxReferencePicturesSlotsCount = maxReferencePicturesSlotsCount_;
+ maxDpbSlots = maxDpbSlots_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxActiveReferencePictures( uint32_t maxActiveReferencePictures_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxActiveReferencePictures = maxActiveReferencePictures_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR &
- setMaxReferencePicturesActiveCount( uint32_t maxReferencePicturesActiveCount_ ) VULKAN_HPP_NOEXCEPT
+ setPStdHeaderVersion( const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion_ ) VULKAN_HPP_NOEXCEPT
{
- maxReferencePicturesActiveCount = maxReferencePicturesActiveCount_;
+ pStdHeaderVersion = pStdHeaderVersion_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoSessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoSessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( this );
}
- explicit operator VkVideoSessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoSessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoSessionCreateInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -100114,12 +102294,13 @@ namespace VULKAN_HPP_NAMESPACE
const void * const &,
uint32_t const &,
VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR const &,
- const VULKAN_HPP_NAMESPACE::VideoProfileKHR * const &,
+ const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * const &,
VULKAN_HPP_NAMESPACE::Format const &,
VULKAN_HPP_NAMESPACE::Extent2D const &,
VULKAN_HPP_NAMESPACE::Format const &,
uint32_t const &,
- uint32_t const &>
+ uint32_t const &,
+ const VULKAN_HPP_NAMESPACE::ExtensionProperties * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -100130,9 +102311,10 @@ namespace VULKAN_HPP_NAMESPACE
pVideoProfile,
pictureFormat,
maxCodedExtent,
- referencePicturesFormat,
- maxReferencePicturesSlotsCount,
- maxReferencePicturesActiveCount );
+ referencePictureFormat,
+ maxDpbSlots,
+ maxActiveReferencePictures,
+ pStdHeaderVersion );
}
# endif
@@ -100141,15 +102323,13 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) &&
- ( flags == rhs.flags ) && ( pVideoProfile == rhs.pVideoProfile ) &&
- ( pictureFormat == rhs.pictureFormat ) && ( maxCodedExtent == rhs.maxCodedExtent ) &&
- ( referencePicturesFormat == rhs.referencePicturesFormat ) &&
- ( maxReferencePicturesSlotsCount == rhs.maxReferencePicturesSlotsCount ) &&
- ( maxReferencePicturesActiveCount == rhs.maxReferencePicturesActiveCount );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( flags == rhs.flags ) &&
+ ( pVideoProfile == rhs.pVideoProfile ) && ( pictureFormat == rhs.pictureFormat ) && ( maxCodedExtent == rhs.maxCodedExtent ) &&
+ ( referencePictureFormat == rhs.referencePictureFormat ) && ( maxDpbSlots == rhs.maxDpbSlots ) &&
+ ( maxActiveReferencePictures == rhs.maxActiveReferencePictures ) && ( pStdHeaderVersion == rhs.pStdHeaderVersion );
# endif
}
@@ -100160,24 +102340,18 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionCreateInfoKHR;
- const void * pNext = {};
- uint32_t queueFamilyIndex = {};
- VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags = {};
- const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile = {};
- VULKAN_HPP_NAMESPACE::Format pictureFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
- VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent = {};
- VULKAN_HPP_NAMESPACE::Format referencePicturesFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
- uint32_t maxReferencePicturesSlotsCount = {};
- uint32_t maxReferencePicturesActiveCount = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionCreateInfoKHR;
+ const void * pNext = {};
+ uint32_t queueFamilyIndex = {};
+ VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags = {};
+ const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile = {};
+ VULKAN_HPP_NAMESPACE::Format pictureFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent = {};
+ VULKAN_HPP_NAMESPACE::Format referencePictureFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ uint32_t maxDpbSlots = {};
+ uint32_t maxActiveReferencePictures = {};
+ const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR ) ==
- sizeof( VkVideoSessionCreateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR>::value,
- "VideoSessionCreateInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoSessionCreateInfoKHR>
@@ -100187,35 +102361,124 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ struct VideoSessionMemoryRequirementsKHR
+ {
+ using NativeType = VkVideoSessionMemoryRequirementsKHR;
+
+ static const bool allowDuplicate = false;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionMemoryRequirementsKHR;
+
+# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+ VULKAN_HPP_CONSTEXPR VideoSessionMemoryRequirementsKHR( uint32_t memoryBindIndex_ = {},
+ VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {},
+ void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , memoryBindIndex( memoryBindIndex_ )
+ , memoryRequirements( memoryRequirements_ )
+ {
+ }
+
+ VULKAN_HPP_CONSTEXPR VideoSessionMemoryRequirementsKHR( VideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ VideoSessionMemoryRequirementsKHR( VkVideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : VideoSessionMemoryRequirementsKHR( *reinterpret_cast<VideoSessionMemoryRequirementsKHR const *>( &rhs ) )
+ {
+ }
+# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
+
+ VideoSessionMemoryRequirementsKHR & operator=( VideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+ VideoSessionMemoryRequirementsKHR & operator=( VkVideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR const *>( &rhs );
+ return *this;
+ }
+
+ operator VkVideoSessionMemoryRequirementsKHR const &() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkVideoSessionMemoryRequirementsKHR *>( this );
+ }
+
+ operator VkVideoSessionMemoryRequirementsKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( this );
+ }
+
+# if defined( VULKAN_HPP_USE_REFLECT )
+# if 14 <= VULKAN_HPP_CPP_VERSION
+ auto
+# else
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::MemoryRequirements const &>
+# endif
+ reflect() const VULKAN_HPP_NOEXCEPT
+ {
+ return std::tie( sType, pNext, memoryBindIndex, memoryRequirements );
+ }
+# endif
+
+# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+ auto operator<=>( VideoSessionMemoryRequirementsKHR const & ) const = default;
+# else
+ bool operator==( VideoSessionMemoryRequirementsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+# if defined( VULKAN_HPP_USE_REFLECT )
+ return this->reflect() == rhs.reflect();
+# else
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) && ( memoryRequirements == rhs.memoryRequirements );
+# endif
+ }
+
+ bool operator!=( VideoSessionMemoryRequirementsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+# endif
+
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionMemoryRequirementsKHR;
+ void * pNext = {};
+ uint32_t memoryBindIndex = {};
+ VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {};
+ };
+
+ template <>
+ struct CppType<StructureType, StructureType::eVideoSessionMemoryRequirementsKHR>
+ {
+ using Type = VideoSessionMemoryRequirementsKHR;
+ };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
struct VideoSessionParametersCreateInfoKHR
{
using NativeType = VkVideoSessionParametersCreateInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoSessionParametersCreateInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionParametersCreateInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR(
- VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ = {},
- VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {} ) VULKAN_HPP_NOEXCEPT
- : videoSessionParametersTemplate( videoSessionParametersTemplate_ )
+ VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR flags_ = {},
+ VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ = {},
+ VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
+ , videoSessionParametersTemplate( videoSessionParametersTemplate_ )
, videoSession( videoSession_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR( VideoSessionParametersCreateInfoKHR const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR( VideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoSessionParametersCreateInfoKHR( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoSessionParametersCreateInfoKHR( *reinterpret_cast<VideoSessionParametersCreateInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoSessionParametersCreateInfoKHR &
- operator=( VideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoSessionParametersCreateInfoKHR & operator=( VideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoSessionParametersCreateInfoKHR &
- operator=( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoSessionParametersCreateInfoKHR & operator=( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const *>( &rhs );
return *this;
@@ -100228,43 +102491,50 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setVideoSessionParametersTemplate(
- VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR &
+ setFlags( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
- videoSessionParametersTemplate = videoSessionParametersTemplate_;
+ flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR &
- setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT
+ setVideoSessionParametersTemplate( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ ) VULKAN_HPP_NOEXCEPT
+ {
+ videoSessionParametersTemplate = videoSessionParametersTemplate_;
+ return *this;
+ }
+
+ VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT
{
videoSession = videoSession_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoSessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoSessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( this );
}
- explicit operator VkVideoSessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoSessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoSessionParametersCreateInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
const void * const &,
+ VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR const &,
VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const &,
VULKAN_HPP_NAMESPACE::VideoSessionKHR const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType, pNext, videoSessionParametersTemplate, videoSession );
+ return std::tie( sType, pNext, flags, videoSessionParametersTemplate, videoSession );
}
# endif
@@ -100273,12 +102543,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( videoSessionParametersTemplate == rhs.videoSessionParametersTemplate ) &&
- ( videoSession == rhs.videoSession );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
+ ( videoSessionParametersTemplate == rhs.videoSessionParametersTemplate ) && ( videoSession == rhs.videoSession );
# endif
}
@@ -100289,19 +102558,12 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersCreateInfoKHR;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate = {};
- VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersCreateInfoKHR;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR flags = {};
+ VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate = {};
+ VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR ) ==
- sizeof( VkVideoSessionParametersCreateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR>::value,
- "VideoSessionParametersCreateInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoSessionParametersCreateInfoKHR>
@@ -100316,27 +102578,26 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkVideoSessionParametersUpdateInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eVideoSessionParametersUpdateInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionParametersUpdateInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( uint32_t updateSequenceCount_ = {} ) VULKAN_HPP_NOEXCEPT
- : updateSequenceCount( updateSequenceCount_ )
- {}
+ VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( uint32_t updateSequenceCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , updateSequenceCount( updateSequenceCount_ )
+ {
+ }
- VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( VideoSessionParametersUpdateInfoKHR const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( VideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
VideoSessionParametersUpdateInfoKHR( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: VideoSessionParametersUpdateInfoKHR( *reinterpret_cast<VideoSessionParametersUpdateInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- VideoSessionParametersUpdateInfoKHR &
- operator=( VideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VideoSessionParametersUpdateInfoKHR & operator=( VideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- VideoSessionParametersUpdateInfoKHR &
- operator=( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ VideoSessionParametersUpdateInfoKHR & operator=( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR const *>( &rhs );
return *this;
@@ -100349,25 +102610,24 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR &
- setUpdateSequenceCount( uint32_t updateSequenceCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR & setUpdateSequenceCount( uint32_t updateSequenceCount_ ) VULKAN_HPP_NOEXCEPT
{
updateSequenceCount = updateSequenceCount_;
return *this;
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkVideoSessionParametersUpdateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkVideoSessionParametersUpdateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( this );
}
- explicit operator VkVideoSessionParametersUpdateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkVideoSessionParametersUpdateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkVideoSessionParametersUpdateInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -100384,7 +102644,7 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( updateSequenceCount == rhs.updateSequenceCount );
@@ -100402,14 +102662,6 @@ namespace VULKAN_HPP_NAMESPACE
const void * pNext = {};
uint32_t updateSequenceCount = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR ) ==
- sizeof( VkVideoSessionParametersUpdateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR>::value,
- "VideoSessionParametersUpdateInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eVideoSessionParametersUpdateInfoKHR>
@@ -100429,18 +102681,21 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {},
struct wl_display * display_ = {},
- struct wl_surface * surface_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ struct wl_surface * surface_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, display( display_ )
, surface( surface_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: WaylandSurfaceCreateInfoKHR( *reinterpret_cast<WaylandSurfaceCreateInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
WaylandSurfaceCreateInfoKHR & operator=( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -100458,8 +102713,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -100478,17 +102732,17 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkWaylandSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkWaylandSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( this );
}
- explicit operator VkWaylandSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkWaylandSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkWaylandSurfaceCreateInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -100509,11 +102763,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( display == rhs.display ) &&
- ( surface == rhs.surface );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( display == rhs.display ) && ( surface == rhs.surface );
# endif
}
@@ -100530,14 +102783,6 @@ namespace VULKAN_HPP_NAMESPACE
struct wl_display * display = {};
struct wl_surface * surface = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR ) ==
- sizeof( VkWaylandSurfaceCreateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR>::value,
- "WaylandSurfaceCreateInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eWaylandSurfaceCreateInfoKHR>
@@ -100552,43 +102797,44 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkWin32KeyedMutexAcquireReleaseInfoKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = {},
- const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {},
- const uint64_t * pAcquireKeys_ = {},
- const uint32_t * pAcquireTimeouts_ = {},
- uint32_t releaseCount_ = {},
- const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {},
- const uint64_t * pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT
- : acquireCount( acquireCount_ )
+ VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {},
+ const uint64_t * pAcquireKeys_ = {},
+ const uint32_t * pAcquireTimeouts_ = {},
+ uint32_t releaseCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {},
+ const uint64_t * pReleaseKeys_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , acquireCount( acquireCount_ )
, pAcquireSyncs( pAcquireSyncs_ )
, pAcquireKeys( pAcquireKeys_ )
, pAcquireTimeouts( pAcquireTimeouts_ )
, releaseCount( releaseCount_ )
, pReleaseSyncs( pReleaseSyncs_ )
, pReleaseKeys( pReleaseKeys_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: Win32KeyedMutexAcquireReleaseInfoKHR( *reinterpret_cast<Win32KeyedMutexAcquireReleaseInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- Win32KeyedMutexAcquireReleaseInfoKHR(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeouts_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const &
- releaseSyncs_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ = {} )
- : acquireCount( static_cast<uint32_t>( acquireSyncs_.size() ) )
+ Win32KeyedMutexAcquireReleaseInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeouts_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , acquireCount( static_cast<uint32_t>( acquireSyncs_.size() ) )
, pAcquireSyncs( acquireSyncs_.data() )
, pAcquireKeys( acquireKeys_.data() )
, pAcquireTimeouts( acquireTimeouts_.data() )
@@ -100603,21 +102849,18 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( acquireSyncs_.size() != acquireKeys_.size() )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireKeys_.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING
+ "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireKeys_.size()" );
}
if ( acquireSyncs_.size() != acquireTimeouts_.size() )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireTimeouts_.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING
+ "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireTimeouts_.size()" );
}
if ( acquireKeys_.size() != acquireTimeouts_.size() )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireKeys_.size() != acquireTimeouts_.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING
+ "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireKeys_.size() != acquireTimeouts_.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
@@ -100626,20 +102869,17 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( releaseSyncs_.size() != releaseKeys_.size() )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: releaseSyncs_.size() != releaseKeys_.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING
+ "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: releaseSyncs_.size() != releaseKeys_.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- Win32KeyedMutexAcquireReleaseInfoKHR &
- operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ Win32KeyedMutexAcquireReleaseInfoKHR & operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- Win32KeyedMutexAcquireReleaseInfoKHR &
- operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ Win32KeyedMutexAcquireReleaseInfoKHR & operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const *>( &rhs );
return *this;
@@ -100652,24 +102892,22 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR &
- setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT
{
acquireCount = acquireCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR &
- setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
+ setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
{
pAcquireSyncs = pAcquireSyncs_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireSyncs(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_ )
- VULKAN_HPP_NOEXCEPT
+ Win32KeyedMutexAcquireReleaseInfoKHR &
+ setAcquireSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT
{
acquireCount = static_cast<uint32_t>( acquireSyncs_.size() );
pAcquireSyncs = acquireSyncs_.data();
@@ -100677,16 +102915,15 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR &
- setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
{
pAcquireKeys = pAcquireKeys_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireKeys(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT
+ Win32KeyedMutexAcquireReleaseInfoKHR &
+ setAcquireKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT
{
acquireCount = static_cast<uint32_t>( acquireKeys_.size() );
pAcquireKeys = acquireKeys_.data();
@@ -100694,16 +102931,15 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR &
- setPAcquireTimeouts( const uint32_t * pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireTimeouts( const uint32_t * pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
{
pAcquireTimeouts = pAcquireTimeouts_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireTimeouts(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
+ Win32KeyedMutexAcquireReleaseInfoKHR &
+ setAcquireTimeouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
{
acquireCount = static_cast<uint32_t>( acquireTimeouts_.size() );
pAcquireTimeouts = acquireTimeouts_.data();
@@ -100711,24 +102947,22 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR &
- setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
{
releaseCount = releaseCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR &
- setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
+ setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
{
pReleaseSyncs = pReleaseSyncs_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseSyncs(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ )
- VULKAN_HPP_NOEXCEPT
+ Win32KeyedMutexAcquireReleaseInfoKHR &
+ setReleaseSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT
{
releaseCount = static_cast<uint32_t>( releaseSyncs_.size() );
pReleaseSyncs = releaseSyncs_.data();
@@ -100736,16 +102970,15 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR &
- setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
{
pReleaseKeys = pReleaseKeys_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseKeys(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT
+ Win32KeyedMutexAcquireReleaseInfoKHR &
+ setReleaseKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT
{
releaseCount = static_cast<uint32_t>( releaseKeys_.size() );
pReleaseKeys = releaseKeys_.data();
@@ -100754,17 +102987,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkWin32KeyedMutexAcquireReleaseInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkWin32KeyedMutexAcquireReleaseInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoKHR *>( this );
}
- explicit operator VkWin32KeyedMutexAcquireReleaseInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkWin32KeyedMutexAcquireReleaseInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -100780,15 +103013,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- acquireCount,
- pAcquireSyncs,
- pAcquireKeys,
- pAcquireTimeouts,
- releaseCount,
- pReleaseSyncs,
- pReleaseKeys );
+ return std::tie( sType, pNext, acquireCount, pAcquireSyncs, pAcquireKeys, pAcquireTimeouts, releaseCount, pReleaseSyncs, pReleaseKeys );
}
# endif
@@ -100797,12 +103022,11 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireCount == rhs.acquireCount ) &&
- ( pAcquireSyncs == rhs.pAcquireSyncs ) && ( pAcquireKeys == rhs.pAcquireKeys ) &&
- ( pAcquireTimeouts == rhs.pAcquireTimeouts ) && ( releaseCount == rhs.releaseCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireCount == rhs.acquireCount ) && ( pAcquireSyncs == rhs.pAcquireSyncs ) &&
+ ( pAcquireKeys == rhs.pAcquireKeys ) && ( pAcquireTimeouts == rhs.pAcquireTimeouts ) && ( releaseCount == rhs.releaseCount ) &&
( pReleaseSyncs == rhs.pReleaseSyncs ) && ( pReleaseKeys == rhs.pReleaseKeys );
# endif
}
@@ -100824,14 +103048,6 @@ namespace VULKAN_HPP_NAMESPACE
const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs = {};
const uint64_t * pReleaseKeys = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR ) ==
- sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR>::value,
- "Win32KeyedMutexAcquireReleaseInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR>
@@ -100846,43 +103062,44 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkWin32KeyedMutexAcquireReleaseInfoNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR
- Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = {},
- const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {},
- const uint64_t * pAcquireKeys_ = {},
- const uint32_t * pAcquireTimeoutMilliseconds_ = {},
- uint32_t releaseCount_ = {},
- const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {},
- const uint64_t * pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT
- : acquireCount( acquireCount_ )
+ VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {},
+ const uint64_t * pAcquireKeys_ = {},
+ const uint32_t * pAcquireTimeoutMilliseconds_ = {},
+ uint32_t releaseCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {},
+ const uint64_t * pReleaseKeys_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , acquireCount( acquireCount_ )
, pAcquireSyncs( pAcquireSyncs_ )
, pAcquireKeys( pAcquireKeys_ )
, pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ )
, releaseCount( releaseCount_ )
, pReleaseSyncs( pReleaseSyncs_ )
, pReleaseKeys( pReleaseKeys_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( Win32KeyedMutexAcquireReleaseInfoNV const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
: Win32KeyedMutexAcquireReleaseInfoNV( *reinterpret_cast<Win32KeyedMutexAcquireReleaseInfoNV const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- Win32KeyedMutexAcquireReleaseInfoNV(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeoutMilliseconds_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const &
- releaseSyncs_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ = {} )
- : acquireCount( static_cast<uint32_t>( acquireSyncs_.size() ) )
+ Win32KeyedMutexAcquireReleaseInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeoutMilliseconds_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , acquireCount( static_cast<uint32_t>( acquireSyncs_.size() ) )
, pAcquireSyncs( acquireSyncs_.data() )
, pAcquireKeys( acquireKeys_.data() )
, pAcquireTimeoutMilliseconds( acquireTimeoutMilliseconds_.data() )
@@ -100897,9 +103114,8 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( acquireSyncs_.size() != acquireKeys_.size() )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireKeys_.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING
+ "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireKeys_.size()" );
}
if ( acquireSyncs_.size() != acquireTimeoutMilliseconds_.size() )
{
@@ -100920,20 +103136,17 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( releaseSyncs_.size() != releaseKeys_.size() )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: releaseSyncs_.size() != releaseKeys_.size()" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING
+ "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: releaseSyncs_.size() != releaseKeys_.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- Win32KeyedMutexAcquireReleaseInfoNV &
- operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ Win32KeyedMutexAcquireReleaseInfoNV & operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- Win32KeyedMutexAcquireReleaseInfoNV &
- operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ Win32KeyedMutexAcquireReleaseInfoNV & operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const *>( &rhs );
return *this;
@@ -100946,24 +103159,22 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV &
- setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT
{
acquireCount = acquireCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV &
- setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
+ setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
{
pAcquireSyncs = pAcquireSyncs_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- Win32KeyedMutexAcquireReleaseInfoNV & setAcquireSyncs(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_ )
- VULKAN_HPP_NOEXCEPT
+ Win32KeyedMutexAcquireReleaseInfoNV &
+ setAcquireSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT
{
acquireCount = static_cast<uint32_t>( acquireSyncs_.size() );
pAcquireSyncs = acquireSyncs_.data();
@@ -100971,16 +103182,15 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV &
- setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
{
pAcquireKeys = pAcquireKeys_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- Win32KeyedMutexAcquireReleaseInfoNV & setAcquireKeys(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT
+ Win32KeyedMutexAcquireReleaseInfoNV &
+ setAcquireKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT
{
acquireCount = static_cast<uint32_t>( acquireKeys_.size() );
pAcquireKeys = acquireKeys_.data();
@@ -100989,16 +103199,15 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV &
- setPAcquireTimeoutMilliseconds( const uint32_t * pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT
+ setPAcquireTimeoutMilliseconds( const uint32_t * pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT
{
pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- Win32KeyedMutexAcquireReleaseInfoNV & setAcquireTimeoutMilliseconds(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeoutMilliseconds_ )
- VULKAN_HPP_NOEXCEPT
+ Win32KeyedMutexAcquireReleaseInfoNV &
+ setAcquireTimeoutMilliseconds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT
{
acquireCount = static_cast<uint32_t>( acquireTimeoutMilliseconds_.size() );
pAcquireTimeoutMilliseconds = acquireTimeoutMilliseconds_.data();
@@ -101006,24 +103215,22 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV &
- setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
{
releaseCount = releaseCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV &
- setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
+ setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
{
pReleaseSyncs = pReleaseSyncs_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- Win32KeyedMutexAcquireReleaseInfoNV & setReleaseSyncs(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ )
- VULKAN_HPP_NOEXCEPT
+ Win32KeyedMutexAcquireReleaseInfoNV &
+ setReleaseSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT
{
releaseCount = static_cast<uint32_t>( releaseSyncs_.size() );
pReleaseSyncs = releaseSyncs_.data();
@@ -101031,16 +103238,15 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV &
- setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
{
pReleaseKeys = pReleaseKeys_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- Win32KeyedMutexAcquireReleaseInfoNV & setReleaseKeys(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT
+ Win32KeyedMutexAcquireReleaseInfoNV &
+ setReleaseKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT
{
releaseCount = static_cast<uint32_t>( releaseKeys_.size() );
pReleaseKeys = releaseKeys_.data();
@@ -101049,17 +103255,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkWin32KeyedMutexAcquireReleaseInfoNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkWin32KeyedMutexAcquireReleaseInfoNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoNV *>( this );
}
- explicit operator VkWin32KeyedMutexAcquireReleaseInfoNV &() VULKAN_HPP_NOEXCEPT
+ operator VkWin32KeyedMutexAcquireReleaseInfoNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoNV *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -101075,15 +103281,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- acquireCount,
- pAcquireSyncs,
- pAcquireKeys,
- pAcquireTimeoutMilliseconds,
- releaseCount,
- pReleaseSyncs,
- pReleaseKeys );
+ return std::tie( sType, pNext, acquireCount, pAcquireSyncs, pAcquireKeys, pAcquireTimeoutMilliseconds, releaseCount, pReleaseSyncs, pReleaseKeys );
}
# endif
@@ -101092,14 +103290,12 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireCount == rhs.acquireCount ) &&
- ( pAcquireSyncs == rhs.pAcquireSyncs ) && ( pAcquireKeys == rhs.pAcquireKeys ) &&
- ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds ) &&
- ( releaseCount == rhs.releaseCount ) && ( pReleaseSyncs == rhs.pReleaseSyncs ) &&
- ( pReleaseKeys == rhs.pReleaseKeys );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireCount == rhs.acquireCount ) && ( pAcquireSyncs == rhs.pAcquireSyncs ) &&
+ ( pAcquireKeys == rhs.pAcquireKeys ) && ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds ) &&
+ ( releaseCount == rhs.releaseCount ) && ( pReleaseSyncs == rhs.pReleaseSyncs ) && ( pReleaseKeys == rhs.pReleaseKeys );
# endif
}
@@ -101110,24 +103306,16 @@ namespace VULKAN_HPP_NAMESPACE
# endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
- const void * pNext = {};
- uint32_t acquireCount = {};
- const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {};
- const uint64_t * pAcquireKeys = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
+ const void * pNext = {};
+ uint32_t acquireCount = {};
+ const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {};
+ const uint64_t * pAcquireKeys = {};
const uint32_t * pAcquireTimeoutMilliseconds = {};
uint32_t releaseCount = {};
const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs = {};
const uint64_t * pReleaseKeys = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV ) ==
- sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV>::value,
- "Win32KeyedMutexAcquireReleaseInfoNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eWin32KeyedMutexAcquireReleaseInfoNV>
@@ -101147,18 +103335,21 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {},
HINSTANCE hinstance_ = {},
- HWND hwnd_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ HWND hwnd_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, hinstance( hinstance_ )
, hwnd( hwnd_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR
- Win32SurfaceCreateInfoKHR( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: Win32SurfaceCreateInfoKHR( *reinterpret_cast<Win32SurfaceCreateInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
Win32SurfaceCreateInfoKHR & operator=( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -101176,8 +103367,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -101196,17 +103386,17 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkWin32SurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkWin32SurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( this );
}
- explicit operator VkWin32SurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkWin32SurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkWin32SurfaceCreateInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -101227,11 +103417,10 @@ namespace VULKAN_HPP_NAMESPACE
# else
bool operator==( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( hinstance == rhs.hinstance ) && ( hwnd == rhs.hwnd );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( hinstance == rhs.hinstance ) && ( hwnd == rhs.hwnd );
# endif
}
@@ -101248,13 +103437,6 @@ namespace VULKAN_HPP_NAMESPACE
HINSTANCE hinstance = {};
HWND hwnd = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR ) ==
- sizeof( VkWin32SurfaceCreateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR>::value,
- "Win32SurfaceCreateInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eWin32SurfaceCreateInfoKHR>
@@ -101271,16 +103453,17 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR WriteDescriptorSet(
- VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {},
- uint32_t dstBinding_ = {},
- uint32_t dstArrayElement_ = {},
- uint32_t descriptorCount_ = {},
- VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
- const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ = {},
- const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ = {},
- const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ = {} ) VULKAN_HPP_NOEXCEPT
- : dstSet( dstSet_ )
+ VULKAN_HPP_CONSTEXPR WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {},
+ uint32_t dstBinding_ = {},
+ uint32_t dstArrayElement_ = {},
+ uint32_t descriptorCount_ = {},
+ VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
+ const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ = {},
+ const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ = {},
+ const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , dstSet( dstSet_ )
, dstBinding( dstBinding_ )
, dstArrayElement( dstArrayElement_ )
, descriptorCount( descriptorCount_ )
@@ -101288,26 +103471,24 @@ namespace VULKAN_HPP_NAMESPACE
, pImageInfo( pImageInfo_ )
, pBufferInfo( pBufferInfo_ )
, pTexelBufferView( pTexelBufferView_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
- : WriteDescriptorSet( *reinterpret_cast<WriteDescriptorSet const *>( &rhs ) )
- {}
+ WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT : WriteDescriptorSet( *reinterpret_cast<WriteDescriptorSet const *>( &rhs ) ) {}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- WriteDescriptorSet(
- VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_,
- uint32_t dstBinding_,
- uint32_t dstArrayElement_,
- VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_,
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const &
- bufferInfo_ = {},
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const &
- texelBufferView_ = {} )
- : dstSet( dstSet_ )
+ WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_,
+ uint32_t dstBinding_,
+ uint32_t dstArrayElement_,
+ VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_,
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ = {},
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ = {},
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , dstSet( dstSet_ )
, dstBinding( dstBinding_ )
, dstArrayElement( dstArrayElement_ )
, descriptorCount( static_cast<uint32_t>( !imageInfo_.empty() ? imageInfo_.size()
@@ -101323,9 +103504,8 @@ namespace VULKAN_HPP_NAMESPACE
# else
if ( 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) )
{
- throw LogicError(
- VULKAN_HPP_NAMESPACE_STRING
- "::WriteDescriptorSet::WriteDescriptorSet: 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() )" );
+ throw LogicError( VULKAN_HPP_NAMESPACE_STRING
+ "::WriteDescriptorSet::WriteDescriptorSet: 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() )" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
}
@@ -101347,8 +103527,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet &
- setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
{
dstSet = dstSet_;
return *this;
@@ -101372,24 +103551,21 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet &
- setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
{
descriptorType = descriptorType_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet &
- setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ ) VULKAN_HPP_NOEXCEPT
{
pImageInfo = pImageInfo_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- WriteDescriptorSet & setImageInfo(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const &
- imageInfo_ ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSet &
+ setImageInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_ ) VULKAN_HPP_NOEXCEPT
{
descriptorCount = static_cast<uint32_t>( imageInfo_.size() );
pImageInfo = imageInfo_.data();
@@ -101397,17 +103573,15 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet &
- setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ ) VULKAN_HPP_NOEXCEPT
{
pBufferInfo = pBufferInfo_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- WriteDescriptorSet & setBufferInfo(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const &
- bufferInfo_ ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSet &
+ setBufferInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ ) VULKAN_HPP_NOEXCEPT
{
descriptorCount = static_cast<uint32_t>( bufferInfo_.size() );
pBufferInfo = bufferInfo_.data();
@@ -101415,17 +103589,15 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
- VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet &
- setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT
{
pTexelBufferView = pTexelBufferView_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
- WriteDescriptorSet & setTexelBufferView(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ )
- VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSet &
+ setTexelBufferView( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ ) VULKAN_HPP_NOEXCEPT
{
descriptorCount = static_cast<uint32_t>( texelBufferView_.size() );
pTexelBufferView = texelBufferView_.data();
@@ -101434,17 +103606,17 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkWriteDescriptorSet const &() const VULKAN_HPP_NOEXCEPT
+ operator VkWriteDescriptorSet const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWriteDescriptorSet *>( this );
}
- explicit operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT
+ operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkWriteDescriptorSet *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -101461,16 +103633,7 @@ namespace VULKAN_HPP_NAMESPACE
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
- return std::tie( sType,
- pNext,
- dstSet,
- dstBinding,
- dstArrayElement,
- descriptorCount,
- descriptorType,
- pImageInfo,
- pBufferInfo,
- pTexelBufferView );
+ return std::tie( sType, pNext, dstSet, dstBinding, dstArrayElement, descriptorCount, descriptorType, pImageInfo, pBufferInfo, pTexelBufferView );
}
#endif
@@ -101479,14 +103642,12 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dstSet == rhs.dstSet ) &&
- ( dstBinding == rhs.dstBinding ) && ( dstArrayElement == rhs.dstArrayElement ) &&
- ( descriptorCount == rhs.descriptorCount ) && ( descriptorType == rhs.descriptorType ) &&
- ( pImageInfo == rhs.pImageInfo ) && ( pBufferInfo == rhs.pBufferInfo ) &&
- ( pTexelBufferView == rhs.pTexelBufferView );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) &&
+ ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ) && ( descriptorType == rhs.descriptorType ) &&
+ ( pImageInfo == rhs.pImageInfo ) && ( pBufferInfo == rhs.pBufferInfo ) && ( pTexelBufferView == rhs.pTexelBufferView );
# endif
}
@@ -101497,23 +103658,17 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet;
- const void * pNext = {};
- VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
- uint32_t dstBinding = {};
- uint32_t dstArrayElement = {};
- uint32_t descriptorCount = {};
- VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
- const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo = {};
- const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet;
+ const void * pNext = {};
+ VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
+ uint32_t dstBinding = {};
+ uint32_t dstArrayElement = {};
+ uint32_t descriptorCount = {};
+ VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+ const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo = {};
+ const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo = {};
const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSet>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSet>::value,
- "WriteDescriptorSet is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eWriteDescriptorSet>
@@ -101526,49 +103681,47 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkWriteDescriptorSetAccelerationStructureKHR;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eWriteDescriptorSetAccelerationStructureKHR;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureKHR;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR(
- uint32_t accelerationStructureCount_ = {},
- const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT
- : accelerationStructureCount( accelerationStructureCount_ )
+ VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( uint32_t accelerationStructureCount_ = {},
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , accelerationStructureCount( accelerationStructureCount_ )
, pAccelerationStructures( pAccelerationStructures_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR(
- WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- WriteDescriptorSetAccelerationStructureKHR( VkWriteDescriptorSetAccelerationStructureKHR const & rhs )
- VULKAN_HPP_NOEXCEPT
- : WriteDescriptorSetAccelerationStructureKHR(
- *reinterpret_cast<WriteDescriptorSetAccelerationStructureKHR const *>( &rhs ) )
- {}
+ WriteDescriptorSetAccelerationStructureKHR( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ : WriteDescriptorSetAccelerationStructureKHR( *reinterpret_cast<WriteDescriptorSetAccelerationStructureKHR const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
WriteDescriptorSetAccelerationStructureKHR(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const &
- accelerationStructures_ )
- : accelerationStructureCount( static_cast<uint32_t>( accelerationStructures_.size() ) )
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , accelerationStructureCount( static_cast<uint32_t>( accelerationStructures_.size() ) )
, pAccelerationStructures( accelerationStructures_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- WriteDescriptorSetAccelerationStructureKHR &
- operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ WriteDescriptorSetAccelerationStructureKHR & operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- WriteDescriptorSetAccelerationStructureKHR &
- operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSetAccelerationStructureKHR & operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
@@ -101581,8 +103734,8 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & setPAccelerationStructures(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR &
+ setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
{
pAccelerationStructures = pAccelerationStructures_;
return *this;
@@ -101590,8 +103743,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructures(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const &
- accelerationStructures_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureCount = static_cast<uint32_t>( accelerationStructures_.size() );
pAccelerationStructures = accelerationStructures_.data();
@@ -101600,24 +103752,22 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkWriteDescriptorSetAccelerationStructureKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkWriteDescriptorSetAccelerationStructureKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureKHR *>( this );
}
- explicit operator VkWriteDescriptorSetAccelerationStructureKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkWriteDescriptorSetAccelerationStructureKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureKHR *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- uint32_t const &,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * const &>
+ std::
+ tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -101630,11 +103780,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( accelerationStructureCount == rhs.accelerationStructureCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructureCount == rhs.accelerationStructureCount ) &&
( pAccelerationStructures == rhs.pAccelerationStructures );
# endif
}
@@ -101646,20 +103795,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureKHR;
- const void * pNext = {};
- uint32_t accelerationStructureCount = {};
- const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureKHR;
+ const void * pNext = {};
+ uint32_t accelerationStructureCount = {};
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR ) ==
- sizeof( VkWriteDescriptorSetAccelerationStructureKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR>::value,
- "WriteDescriptorSetAccelerationStructureKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eWriteDescriptorSetAccelerationStructureKHR>
@@ -101672,49 +103812,47 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkWriteDescriptorSetAccelerationStructureNV;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eWriteDescriptorSetAccelerationStructureNV;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureNV;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV(
- uint32_t accelerationStructureCount_ = {},
- const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT
- : accelerationStructureCount( accelerationStructureCount_ )
+ VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( uint32_t accelerationStructureCount_ = {},
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , accelerationStructureCount( accelerationStructureCount_ )
, pAccelerationStructures( pAccelerationStructures_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV(
- WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs )
- VULKAN_HPP_NOEXCEPT
- : WriteDescriptorSetAccelerationStructureNV(
- *reinterpret_cast<WriteDescriptorSetAccelerationStructureNV const *>( &rhs ) )
- {}
+ WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ : WriteDescriptorSetAccelerationStructureNV( *reinterpret_cast<WriteDescriptorSetAccelerationStructureNV const *>( &rhs ) )
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
WriteDescriptorSetAccelerationStructureNV(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const &
- accelerationStructures_ )
- : accelerationStructureCount( static_cast<uint32_t>( accelerationStructures_.size() ) )
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures_,
+ const void * pNext_ = nullptr )
+ : pNext( pNext_ )
+ , accelerationStructureCount( static_cast<uint32_t>( accelerationStructures_.size() ) )
, pAccelerationStructures( accelerationStructures_.data() )
- {}
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- WriteDescriptorSetAccelerationStructureNV &
- operator=( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ WriteDescriptorSetAccelerationStructureNV & operator=( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- WriteDescriptorSetAccelerationStructureNV &
- operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSetAccelerationStructureNV & operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
- VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV &
- setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
@@ -101727,8 +103865,8 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & setPAccelerationStructures(
- const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV &
+ setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
{
pAccelerationStructures = pAccelerationStructures_;
return *this;
@@ -101736,8 +103874,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
WriteDescriptorSetAccelerationStructureNV & setAccelerationStructures(
- VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const &
- accelerationStructures_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructureCount = static_cast<uint32_t>( accelerationStructures_.size() );
pAccelerationStructures = accelerationStructures_.data();
@@ -101746,24 +103883,22 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkWriteDescriptorSetAccelerationStructureNV const &() const VULKAN_HPP_NOEXCEPT
+ operator VkWriteDescriptorSetAccelerationStructureNV const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureNV *>( this );
}
- explicit operator VkWriteDescriptorSetAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT
+ operator VkWriteDescriptorSetAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureNV *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
- const void * const &,
- uint32_t const &,
- const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * const &>
+ std::
+ tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -101776,11 +103911,10 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
- ( accelerationStructureCount == rhs.accelerationStructureCount ) &&
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructureCount == rhs.accelerationStructureCount ) &&
( pAccelerationStructures == rhs.pAccelerationStructures );
# endif
}
@@ -101792,20 +103926,11 @@ namespace VULKAN_HPP_NAMESPACE
#endif
public:
- VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV;
- const void * pNext = {};
- uint32_t accelerationStructureCount = {};
- const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures = {};
+ VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV;
+ const void * pNext = {};
+ uint32_t accelerationStructureCount = {};
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV ) ==
- sizeof( VkWriteDescriptorSetAccelerationStructureNV ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV>::value,
- "WriteDescriptorSetAccelerationStructureNV is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eWriteDescriptorSetAccelerationStructureNV>
@@ -101818,36 +103943,36 @@ namespace VULKAN_HPP_NAMESPACE
using NativeType = VkWriteDescriptorSetInlineUniformBlock;
static const bool allowDuplicate = false;
- static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
- StructureType::eWriteDescriptorSetInlineUniformBlock;
+ static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetInlineUniformBlock;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlock( uint32_t dataSize_ = {},
- const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT
- : dataSize( dataSize_ )
+ VULKAN_HPP_CONSTEXPR
+ WriteDescriptorSetInlineUniformBlock( uint32_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , dataSize( dataSize_ )
, pData( pData_ )
- {}
+ {
+ }
- VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlock( WriteDescriptorSetInlineUniformBlock const & rhs )
- VULKAN_HPP_NOEXCEPT = default;
+ VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlock( WriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT = default;
WriteDescriptorSetInlineUniformBlock( VkWriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT
: WriteDescriptorSetInlineUniformBlock( *reinterpret_cast<WriteDescriptorSetInlineUniformBlock const *>( &rhs ) )
- {}
+ {
+ }
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
- WriteDescriptorSetInlineUniformBlock( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ )
- : dataSize( static_cast<uint32_t>( data_.size() * sizeof( T ) ) ), pData( data_.data() )
- {}
+ WriteDescriptorSetInlineUniformBlock( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_, const void * pNext_ = nullptr )
+ : pNext( pNext_ ), dataSize( static_cast<uint32_t>( data_.size() * sizeof( T ) ) ), pData( data_.data() )
+ {
+ }
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
- WriteDescriptorSetInlineUniformBlock &
- operator=( WriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+ WriteDescriptorSetInlineUniformBlock & operator=( WriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT = default;
- WriteDescriptorSetInlineUniformBlock &
- operator=( VkWriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSetInlineUniformBlock & operator=( VkWriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock const *>( &rhs );
return *this;
@@ -101874,8 +103999,7 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
- WriteDescriptorSetInlineUniformBlock &
- setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSetInlineUniformBlock & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
{
dataSize = static_cast<uint32_t>( data_.size() * sizeof( T ) );
pData = data_.data();
@@ -101884,22 +104008,21 @@ namespace VULKAN_HPP_NAMESPACE
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkWriteDescriptorSetInlineUniformBlock const &() const VULKAN_HPP_NOEXCEPT
+ operator VkWriteDescriptorSetInlineUniformBlock const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkWriteDescriptorSetInlineUniformBlock *>( this );
}
- explicit operator VkWriteDescriptorSetInlineUniformBlock &() VULKAN_HPP_NOEXCEPT
+ operator VkWriteDescriptorSetInlineUniformBlock &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkWriteDescriptorSetInlineUniformBlock *>( this );
}
-#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
- std::
- tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const void * const &>
+ std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const void * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
@@ -101912,7 +104035,7 @@ namespace VULKAN_HPP_NAMESPACE
#else
bool operator==( WriteDescriptorSetInlineUniformBlock const & rhs ) const VULKAN_HPP_NOEXCEPT
{
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData );
@@ -101931,14 +104054,6 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t dataSize = {};
const void * pData = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock ) ==
- sizeof( VkWriteDescriptorSetInlineUniformBlock ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT(
- std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock>::value,
- "WriteDescriptorSetInlineUniformBlock is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eWriteDescriptorSetInlineUniformBlock>
@@ -101958,17 +104073,21 @@ namespace VULKAN_HPP_NAMESPACE
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {},
xcb_connection_t * connection_ = {},
- xcb_window_t window_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ xcb_window_t window_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, connection( connection_ )
, window( window_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: XcbSurfaceCreateInfoKHR( *reinterpret_cast<XcbSurfaceCreateInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
XcbSurfaceCreateInfoKHR & operator=( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -101986,15 +104105,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR &
- setConnection( xcb_connection_t * connection_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setConnection( xcb_connection_t * connection_ ) VULKAN_HPP_NOEXCEPT
{
connection = connection_;
return *this;
@@ -102007,17 +104124,17 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkXcbSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkXcbSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( this );
}
- explicit operator VkXcbSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkXcbSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkXcbSurfaceCreateInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -102053,8 +104170,8 @@ namespace VULKAN_HPP_NAMESPACE
bool operator==( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
{
- return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
- ( connection == rhs.connection ) && ( memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ) == 0 );
+ return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( connection == rhs.connection ) &&
+ ( memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ) == 0 );
}
bool operator!=( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
@@ -102069,13 +104186,6 @@ namespace VULKAN_HPP_NAMESPACE
xcb_connection_t * connection = {};
xcb_window_t window = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR ) ==
- sizeof( VkXcbSurfaceCreateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR>::value,
- "XcbSurfaceCreateInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eXcbSurfaceCreateInfoKHR>
@@ -102093,19 +104203,23 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXlibSurfaceCreateInfoKHR;
# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
- VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {},
- Display * dpy_ = {},
- Window window_ = {} ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
+ VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {},
+ Display * dpy_ = {},
+ Window window_ = {},
+ const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ : pNext( pNext_ )
+ , flags( flags_ )
, dpy( dpy_ )
, window( window_ )
- {}
+ {
+ }
VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
: XlibSurfaceCreateInfoKHR( *reinterpret_cast<XlibSurfaceCreateInfoKHR const *>( &rhs ) )
- {}
+ {
+ }
# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
XlibSurfaceCreateInfoKHR & operator=( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
@@ -102123,8 +104237,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR &
- setFlags( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -102143,17 +104256,17 @@ namespace VULKAN_HPP_NAMESPACE
}
# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
- explicit operator VkXlibSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
+ operator VkXlibSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( this );
}
- explicit operator VkXlibSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkXlibSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkXlibSurfaceCreateInfoKHR *>( this );
}
-# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION )
+# if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
@@ -102205,13 +104318,6 @@ namespace VULKAN_HPP_NAMESPACE
Display * dpy = {};
Window window = {};
};
- VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR ) ==
- sizeof( VkXlibSurfaceCreateInfoKHR ),
- "struct and wrapper have different size!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR>::value,
- "struct wrapper is not a standard layout!" );
- VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR>::value,
- "XlibSurfaceCreateInfoKHR is not nothrow_move_constructible!" );
template <>
struct CppType<StructureType, StructureType::eXlibSurfaceCreateInfoKHR>
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_to_string.hpp b/thirdparty/vulkan/include/vulkan/vulkan_to_string.hpp
new file mode 100644
index 0000000000..2bcc89d12a
--- /dev/null
+++ b/thirdparty/vulkan/include/vulkan/vulkan_to_string.hpp
@@ -0,0 +1,8177 @@
+// Copyright 2015-2022 The Khronos Group Inc.
+//
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_TO_STRING_HPP
+#define VULKAN_TO_STRING_HPP
+
+#include <vulkan/vulkan_enums.hpp>
+
+#if __cpp_lib_format
+# include <format> // std::format
+#else
+# include <sstream> // std::stringstream
+#endif
+
+namespace VULKAN_HPP_NAMESPACE
+{
+ //==========================
+ //=== BITMASKs to_string ===
+ //==========================
+
+ //=== VK_VERSION_1_0 ===
+
+ VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & FormatFeatureFlagBits::eSampledImage )
+ result += "SampledImage | ";
+ if ( value & FormatFeatureFlagBits::eStorageImage )
+ result += "StorageImage | ";
+ if ( value & FormatFeatureFlagBits::eStorageImageAtomic )
+ result += "StorageImageAtomic | ";
+ if ( value & FormatFeatureFlagBits::eUniformTexelBuffer )
+ result += "UniformTexelBuffer | ";
+ if ( value & FormatFeatureFlagBits::eStorageTexelBuffer )
+ result += "StorageTexelBuffer | ";
+ if ( value & FormatFeatureFlagBits::eStorageTexelBufferAtomic )
+ result += "StorageTexelBufferAtomic | ";
+ if ( value & FormatFeatureFlagBits::eVertexBuffer )
+ result += "VertexBuffer | ";
+ if ( value & FormatFeatureFlagBits::eColorAttachment )
+ result += "ColorAttachment | ";
+ if ( value & FormatFeatureFlagBits::eColorAttachmentBlend )
+ result += "ColorAttachmentBlend | ";
+ if ( value & FormatFeatureFlagBits::eDepthStencilAttachment )
+ result += "DepthStencilAttachment | ";
+ if ( value & FormatFeatureFlagBits::eBlitSrc )
+ result += "BlitSrc | ";
+ if ( value & FormatFeatureFlagBits::eBlitDst )
+ result += "BlitDst | ";
+ if ( value & FormatFeatureFlagBits::eSampledImageFilterLinear )
+ result += "SampledImageFilterLinear | ";
+ if ( value & FormatFeatureFlagBits::eTransferSrc )
+ result += "TransferSrc | ";
+ if ( value & FormatFeatureFlagBits::eTransferDst )
+ result += "TransferDst | ";
+ if ( value & FormatFeatureFlagBits::eMidpointChromaSamples )
+ result += "MidpointChromaSamples | ";
+ if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter )
+ result += "SampledImageYcbcrConversionLinearFilter | ";
+ if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter )
+ result += "SampledImageYcbcrConversionSeparateReconstructionFilter | ";
+ if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit )
+ result += "SampledImageYcbcrConversionChromaReconstructionExplicit | ";
+ if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable )
+ result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | ";
+ if ( value & FormatFeatureFlagBits::eDisjoint )
+ result += "Disjoint | ";
+ if ( value & FormatFeatureFlagBits::eCositedChromaSamples )
+ result += "CositedChromaSamples | ";
+ if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmax )
+ result += "SampledImageFilterMinmax | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ if ( value & FormatFeatureFlagBits::eVideoDecodeOutputKHR )
+ result += "VideoDecodeOutputKHR | ";
+ if ( value & FormatFeatureFlagBits::eVideoDecodeDpbKHR )
+ result += "VideoDecodeDpbKHR | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ if ( value & FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR )
+ result += "AccelerationStructureVertexBufferKHR | ";
+ if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicEXT )
+ result += "SampledImageFilterCubicEXT | ";
+ if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT )
+ result += "FragmentDensityMapEXT | ";
+ if ( value & FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR )
+ result += "FragmentShadingRateAttachmentKHR | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ if ( value & FormatFeatureFlagBits::eVideoEncodeInputKHR )
+ result += "VideoEncodeInputKHR | ";
+ if ( value & FormatFeatureFlagBits::eVideoEncodeDpbKHR )
+ result += "VideoEncodeDpbKHR | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ImageCreateFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ImageCreateFlagBits::eSparseBinding )
+ result += "SparseBinding | ";
+ if ( value & ImageCreateFlagBits::eSparseResidency )
+ result += "SparseResidency | ";
+ if ( value & ImageCreateFlagBits::eSparseAliased )
+ result += "SparseAliased | ";
+ if ( value & ImageCreateFlagBits::eMutableFormat )
+ result += "MutableFormat | ";
+ if ( value & ImageCreateFlagBits::eCubeCompatible )
+ result += "CubeCompatible | ";
+ if ( value & ImageCreateFlagBits::eAlias )
+ result += "Alias | ";
+ if ( value & ImageCreateFlagBits::eSplitInstanceBindRegions )
+ result += "SplitInstanceBindRegions | ";
+ if ( value & ImageCreateFlagBits::e2DArrayCompatible )
+ result += "2DArrayCompatible | ";
+ if ( value & ImageCreateFlagBits::eBlockTexelViewCompatible )
+ result += "BlockTexelViewCompatible | ";
+ if ( value & ImageCreateFlagBits::eExtendedUsage )
+ result += "ExtendedUsage | ";
+ if ( value & ImageCreateFlagBits::eProtected )
+ result += "Protected | ";
+ if ( value & ImageCreateFlagBits::eDisjoint )
+ result += "Disjoint | ";
+ if ( value & ImageCreateFlagBits::eCornerSampledNV )
+ result += "CornerSampledNV | ";
+ if ( value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT )
+ result += "SampleLocationsCompatibleDepthEXT | ";
+ if ( value & ImageCreateFlagBits::eSubsampledEXT )
+ result += "SubsampledEXT | ";
+ if ( value & ImageCreateFlagBits::eMultisampledRenderToSingleSampledEXT )
+ result += "MultisampledRenderToSingleSampledEXT | ";
+ if ( value & ImageCreateFlagBits::e2DViewCompatibleEXT )
+ result += "2DViewCompatibleEXT | ";
+ if ( value & ImageCreateFlagBits::eFragmentDensityMapOffsetQCOM )
+ result += "FragmentDensityMapOffsetQCOM | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ImageUsageFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ImageUsageFlagBits::eTransferSrc )
+ result += "TransferSrc | ";
+ if ( value & ImageUsageFlagBits::eTransferDst )
+ result += "TransferDst | ";
+ if ( value & ImageUsageFlagBits::eSampled )
+ result += "Sampled | ";
+ if ( value & ImageUsageFlagBits::eStorage )
+ result += "Storage | ";
+ if ( value & ImageUsageFlagBits::eColorAttachment )
+ result += "ColorAttachment | ";
+ if ( value & ImageUsageFlagBits::eDepthStencilAttachment )
+ result += "DepthStencilAttachment | ";
+ if ( value & ImageUsageFlagBits::eTransientAttachment )
+ result += "TransientAttachment | ";
+ if ( value & ImageUsageFlagBits::eInputAttachment )
+ result += "InputAttachment | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ if ( value & ImageUsageFlagBits::eVideoDecodeDstKHR )
+ result += "VideoDecodeDstKHR | ";
+ if ( value & ImageUsageFlagBits::eVideoDecodeSrcKHR )
+ result += "VideoDecodeSrcKHR | ";
+ if ( value & ImageUsageFlagBits::eVideoDecodeDpbKHR )
+ result += "VideoDecodeDpbKHR | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ if ( value & ImageUsageFlagBits::eFragmentDensityMapEXT )
+ result += "FragmentDensityMapEXT | ";
+ if ( value & ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR )
+ result += "FragmentShadingRateAttachmentKHR | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ if ( value & ImageUsageFlagBits::eVideoEncodeDstKHR )
+ result += "VideoEncodeDstKHR | ";
+ if ( value & ImageUsageFlagBits::eVideoEncodeSrcKHR )
+ result += "VideoEncodeSrcKHR | ";
+ if ( value & ImageUsageFlagBits::eVideoEncodeDpbKHR )
+ result += "VideoEncodeDpbKHR | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ if ( value & ImageUsageFlagBits::eAttachmentFeedbackLoopEXT )
+ result += "AttachmentFeedbackLoopEXT | ";
+ if ( value & ImageUsageFlagBits::eInvocationMaskHUAWEI )
+ result += "InvocationMaskHUAWEI | ";
+ if ( value & ImageUsageFlagBits::eSampleWeightQCOM )
+ result += "SampleWeightQCOM | ";
+ if ( value & ImageUsageFlagBits::eSampleBlockMatchQCOM )
+ result += "SampleBlockMatchQCOM | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & InstanceCreateFlagBits::eEnumeratePortabilityKHR )
+ result += "EnumeratePortabilityKHR | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & MemoryHeapFlagBits::eDeviceLocal )
+ result += "DeviceLocal | ";
+ if ( value & MemoryHeapFlagBits::eMultiInstance )
+ result += "MultiInstance | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & MemoryPropertyFlagBits::eDeviceLocal )
+ result += "DeviceLocal | ";
+ if ( value & MemoryPropertyFlagBits::eHostVisible )
+ result += "HostVisible | ";
+ if ( value & MemoryPropertyFlagBits::eHostCoherent )
+ result += "HostCoherent | ";
+ if ( value & MemoryPropertyFlagBits::eHostCached )
+ result += "HostCached | ";
+ if ( value & MemoryPropertyFlagBits::eLazilyAllocated )
+ result += "LazilyAllocated | ";
+ if ( value & MemoryPropertyFlagBits::eProtected )
+ result += "Protected | ";
+ if ( value & MemoryPropertyFlagBits::eDeviceCoherentAMD )
+ result += "DeviceCoherentAMD | ";
+ if ( value & MemoryPropertyFlagBits::eDeviceUncachedAMD )
+ result += "DeviceUncachedAMD | ";
+ if ( value & MemoryPropertyFlagBits::eRdmaCapableNV )
+ result += "RdmaCapableNV | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( QueueFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & QueueFlagBits::eGraphics )
+ result += "Graphics | ";
+ if ( value & QueueFlagBits::eCompute )
+ result += "Compute | ";
+ if ( value & QueueFlagBits::eTransfer )
+ result += "Transfer | ";
+ if ( value & QueueFlagBits::eSparseBinding )
+ result += "SparseBinding | ";
+ if ( value & QueueFlagBits::eProtected )
+ result += "Protected | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ if ( value & QueueFlagBits::eVideoDecodeKHR )
+ result += "VideoDecodeKHR | ";
+ if ( value & QueueFlagBits::eVideoEncodeKHR )
+ result += "VideoEncodeKHR | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ if ( value & QueueFlagBits::eOpticalFlowNV )
+ result += "OpticalFlowNV | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SampleCountFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & SampleCountFlagBits::e1 )
+ result += "1 | ";
+ if ( value & SampleCountFlagBits::e2 )
+ result += "2 | ";
+ if ( value & SampleCountFlagBits::e4 )
+ result += "4 | ";
+ if ( value & SampleCountFlagBits::e8 )
+ result += "8 | ";
+ if ( value & SampleCountFlagBits::e16 )
+ result += "16 | ";
+ if ( value & SampleCountFlagBits::e32 )
+ result += "32 | ";
+ if ( value & SampleCountFlagBits::e64 )
+ result += "64 | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlags )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & DeviceQueueCreateFlagBits::eProtected )
+ result += "Protected | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & PipelineStageFlagBits::eTopOfPipe )
+ result += "TopOfPipe | ";
+ if ( value & PipelineStageFlagBits::eDrawIndirect )
+ result += "DrawIndirect | ";
+ if ( value & PipelineStageFlagBits::eVertexInput )
+ result += "VertexInput | ";
+ if ( value & PipelineStageFlagBits::eVertexShader )
+ result += "VertexShader | ";
+ if ( value & PipelineStageFlagBits::eTessellationControlShader )
+ result += "TessellationControlShader | ";
+ if ( value & PipelineStageFlagBits::eTessellationEvaluationShader )
+ result += "TessellationEvaluationShader | ";
+ if ( value & PipelineStageFlagBits::eGeometryShader )
+ result += "GeometryShader | ";
+ if ( value & PipelineStageFlagBits::eFragmentShader )
+ result += "FragmentShader | ";
+ if ( value & PipelineStageFlagBits::eEarlyFragmentTests )
+ result += "EarlyFragmentTests | ";
+ if ( value & PipelineStageFlagBits::eLateFragmentTests )
+ result += "LateFragmentTests | ";
+ if ( value & PipelineStageFlagBits::eColorAttachmentOutput )
+ result += "ColorAttachmentOutput | ";
+ if ( value & PipelineStageFlagBits::eComputeShader )
+ result += "ComputeShader | ";
+ if ( value & PipelineStageFlagBits::eTransfer )
+ result += "Transfer | ";
+ if ( value & PipelineStageFlagBits::eBottomOfPipe )
+ result += "BottomOfPipe | ";
+ if ( value & PipelineStageFlagBits::eHost )
+ result += "Host | ";
+ if ( value & PipelineStageFlagBits::eAllGraphics )
+ result += "AllGraphics | ";
+ if ( value & PipelineStageFlagBits::eAllCommands )
+ result += "AllCommands | ";
+ if ( value & PipelineStageFlagBits::eTransformFeedbackEXT )
+ result += "TransformFeedbackEXT | ";
+ if ( value & PipelineStageFlagBits::eConditionalRenderingEXT )
+ result += "ConditionalRenderingEXT | ";
+ if ( value & PipelineStageFlagBits::eAccelerationStructureBuildKHR )
+ result += "AccelerationStructureBuildKHR | ";
+ if ( value & PipelineStageFlagBits::eRayTracingShaderKHR )
+ result += "RayTracingShaderKHR | ";
+ if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT )
+ result += "FragmentDensityProcessEXT | ";
+ if ( value & PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR )
+ result += "FragmentShadingRateAttachmentKHR | ";
+ if ( value & PipelineStageFlagBits::eCommandPreprocessNV )
+ result += "CommandPreprocessNV | ";
+ if ( value & PipelineStageFlagBits::eTaskShaderEXT )
+ result += "TaskShaderEXT | ";
+ if ( value & PipelineStageFlagBits::eMeshShaderEXT )
+ result += "MeshShaderEXT | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ImageAspectFlagBits::eColor )
+ result += "Color | ";
+ if ( value & ImageAspectFlagBits::eDepth )
+ result += "Depth | ";
+ if ( value & ImageAspectFlagBits::eStencil )
+ result += "Stencil | ";
+ if ( value & ImageAspectFlagBits::eMetadata )
+ result += "Metadata | ";
+ if ( value & ImageAspectFlagBits::ePlane0 )
+ result += "Plane0 | ";
+ if ( value & ImageAspectFlagBits::ePlane1 )
+ result += "Plane1 | ";
+ if ( value & ImageAspectFlagBits::ePlane2 )
+ result += "Plane2 | ";
+ if ( value & ImageAspectFlagBits::eMemoryPlane0EXT )
+ result += "MemoryPlane0EXT | ";
+ if ( value & ImageAspectFlagBits::eMemoryPlane1EXT )
+ result += "MemoryPlane1EXT | ";
+ if ( value & ImageAspectFlagBits::eMemoryPlane2EXT )
+ result += "MemoryPlane2EXT | ";
+ if ( value & ImageAspectFlagBits::eMemoryPlane3EXT )
+ result += "MemoryPlane3EXT | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & SparseImageFormatFlagBits::eSingleMiptail )
+ result += "SingleMiptail | ";
+ if ( value & SparseImageFormatFlagBits::eAlignedMipSize )
+ result += "AlignedMipSize | ";
+ if ( value & SparseImageFormatFlagBits::eNonstandardBlockSize )
+ result += "NonstandardBlockSize | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & SparseMemoryBindFlagBits::eMetadata )
+ result += "Metadata | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( FenceCreateFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & FenceCreateFlagBits::eSignaled )
+ result += "Signaled | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlags )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( EventCreateFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & EventCreateFlagBits::eDeviceOnly )
+ result += "DeviceOnly | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices )
+ result += "InputAssemblyVertices | ";
+ if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives )
+ result += "InputAssemblyPrimitives | ";
+ if ( value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations )
+ result += "VertexShaderInvocations | ";
+ if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations )
+ result += "GeometryShaderInvocations | ";
+ if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives )
+ result += "GeometryShaderPrimitives | ";
+ if ( value & QueryPipelineStatisticFlagBits::eClippingInvocations )
+ result += "ClippingInvocations | ";
+ if ( value & QueryPipelineStatisticFlagBits::eClippingPrimitives )
+ result += "ClippingPrimitives | ";
+ if ( value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations )
+ result += "FragmentShaderInvocations | ";
+ if ( value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches )
+ result += "TessellationControlShaderPatches | ";
+ if ( value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations )
+ result += "TessellationEvaluationShaderInvocations | ";
+ if ( value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations )
+ result += "ComputeShaderInvocations | ";
+ if ( value & QueryPipelineStatisticFlagBits::eTaskShaderInvocationsEXT )
+ result += "TaskShaderInvocationsEXT | ";
+ if ( value & QueryPipelineStatisticFlagBits::eMeshShaderInvocationsEXT )
+ result += "MeshShaderInvocationsEXT | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlags )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & QueryResultFlagBits::e64 )
+ result += "64 | ";
+ if ( value & QueryResultFlagBits::eWait )
+ result += "Wait | ";
+ if ( value & QueryResultFlagBits::eWithAvailability )
+ result += "WithAvailability | ";
+ if ( value & QueryResultFlagBits::ePartial )
+ result += "Partial | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ if ( value & QueryResultFlagBits::eWithStatusKHR )
+ result += "WithStatusKHR | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( BufferCreateFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & BufferCreateFlagBits::eSparseBinding )
+ result += "SparseBinding | ";
+ if ( value & BufferCreateFlagBits::eSparseResidency )
+ result += "SparseResidency | ";
+ if ( value & BufferCreateFlagBits::eSparseAliased )
+ result += "SparseAliased | ";
+ if ( value & BufferCreateFlagBits::eProtected )
+ result += "Protected | ";
+ if ( value & BufferCreateFlagBits::eDeviceAddressCaptureReplay )
+ result += "DeviceAddressCaptureReplay | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & BufferUsageFlagBits::eTransferSrc )
+ result += "TransferSrc | ";
+ if ( value & BufferUsageFlagBits::eTransferDst )
+ result += "TransferDst | ";
+ if ( value & BufferUsageFlagBits::eUniformTexelBuffer )
+ result += "UniformTexelBuffer | ";
+ if ( value & BufferUsageFlagBits::eStorageTexelBuffer )
+ result += "StorageTexelBuffer | ";
+ if ( value & BufferUsageFlagBits::eUniformBuffer )
+ result += "UniformBuffer | ";
+ if ( value & BufferUsageFlagBits::eStorageBuffer )
+ result += "StorageBuffer | ";
+ if ( value & BufferUsageFlagBits::eIndexBuffer )
+ result += "IndexBuffer | ";
+ if ( value & BufferUsageFlagBits::eVertexBuffer )
+ result += "VertexBuffer | ";
+ if ( value & BufferUsageFlagBits::eIndirectBuffer )
+ result += "IndirectBuffer | ";
+ if ( value & BufferUsageFlagBits::eShaderDeviceAddress )
+ result += "ShaderDeviceAddress | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ if ( value & BufferUsageFlagBits::eVideoDecodeSrcKHR )
+ result += "VideoDecodeSrcKHR | ";
+ if ( value & BufferUsageFlagBits::eVideoDecodeDstKHR )
+ result += "VideoDecodeDstKHR | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT )
+ result += "TransformFeedbackBufferEXT | ";
+ if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT )
+ result += "TransformFeedbackCounterBufferEXT | ";
+ if ( value & BufferUsageFlagBits::eConditionalRenderingEXT )
+ result += "ConditionalRenderingEXT | ";
+ if ( value & BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR )
+ result += "AccelerationStructureBuildInputReadOnlyKHR | ";
+ if ( value & BufferUsageFlagBits::eAccelerationStructureStorageKHR )
+ result += "AccelerationStructureStorageKHR | ";
+ if ( value & BufferUsageFlagBits::eShaderBindingTableKHR )
+ result += "ShaderBindingTableKHR | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ if ( value & BufferUsageFlagBits::eVideoEncodeDstKHR )
+ result += "VideoEncodeDstKHR | ";
+ if ( value & BufferUsageFlagBits::eVideoEncodeSrcKHR )
+ result += "VideoEncodeSrcKHR | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ if ( value & BufferUsageFlagBits::eMicromapBuildInputReadOnlyEXT )
+ result += "MicromapBuildInputReadOnlyEXT | ";
+ if ( value & BufferUsageFlagBits::eMicromapStorageEXT )
+ result += "MicromapStorageEXT | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlags )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT )
+ result += "FragmentDensityMapDynamicEXT | ";
+ if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT )
+ result += "FragmentDensityMapDeferredEXT | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlags )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & PipelineCacheCreateFlagBits::eExternallySynchronized )
+ result += "ExternallySynchronized | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ColorComponentFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ColorComponentFlagBits::eR )
+ result += "R | ";
+ if ( value & ColorComponentFlagBits::eG )
+ result += "G | ";
+ if ( value & ColorComponentFlagBits::eB )
+ result += "B | ";
+ if ( value & ColorComponentFlagBits::eA )
+ result += "A | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( CullModeFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & CullModeFlagBits::eFront )
+ result += "Front | ";
+ if ( value & CullModeFlagBits::eBack )
+ result += "Back | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessEXT )
+ result += "RasterizationOrderAttachmentAccessEXT | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & PipelineCreateFlagBits::eDisableOptimization )
+ result += "DisableOptimization | ";
+ if ( value & PipelineCreateFlagBits::eAllowDerivatives )
+ result += "AllowDerivatives | ";
+ if ( value & PipelineCreateFlagBits::eDerivative )
+ result += "Derivative | ";
+ if ( value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex )
+ result += "ViewIndexFromDeviceIndex | ";
+ if ( value & PipelineCreateFlagBits::eDispatchBase )
+ result += "DispatchBase | ";
+ if ( value & PipelineCreateFlagBits::eFailOnPipelineCompileRequired )
+ result += "FailOnPipelineCompileRequired | ";
+ if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailure )
+ result += "EarlyReturnOnFailure | ";
+ if ( value & PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR )
+ result += "RenderingFragmentShadingRateAttachmentKHR | ";
+ if ( value & PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT )
+ result += "RenderingFragmentDensityMapAttachmentEXT | ";
+ if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR )
+ result += "RayTracingNoNullAnyHitShadersKHR | ";
+ if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR )
+ result += "RayTracingNoNullClosestHitShadersKHR | ";
+ if ( value & PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR )
+ result += "RayTracingNoNullMissShadersKHR | ";
+ if ( value & PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR )
+ result += "RayTracingNoNullIntersectionShadersKHR | ";
+ if ( value & PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR )
+ result += "RayTracingSkipTrianglesKHR | ";
+ if ( value & PipelineCreateFlagBits::eRayTracingSkipAabbsKHR )
+ result += "RayTracingSkipAabbsKHR | ";
+ if ( value & PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR )
+ result += "RayTracingShaderGroupHandleCaptureReplayKHR | ";
+ if ( value & PipelineCreateFlagBits::eDeferCompileNV )
+ result += "DeferCompileNV | ";
+ if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR )
+ result += "CaptureStatisticsKHR | ";
+ if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR )
+ result += "CaptureInternalRepresentationsKHR | ";
+ if ( value & PipelineCreateFlagBits::eIndirectBindableNV )
+ result += "IndirectBindableNV | ";
+ if ( value & PipelineCreateFlagBits::eLibraryKHR )
+ result += "LibraryKHR | ";
+ if ( value & PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT )
+ result += "RetainLinkTimeOptimizationInfoEXT | ";
+ if ( value & PipelineCreateFlagBits::eLinkTimeOptimizationEXT )
+ result += "LinkTimeOptimizationEXT | ";
+ if ( value & PipelineCreateFlagBits::eRayTracingAllowMotionNV )
+ result += "RayTracingAllowMotionNV | ";
+ if ( value & PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT )
+ result += "ColorAttachmentFeedbackLoopEXT | ";
+ if ( value & PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT )
+ result += "DepthStencilAttachmentFeedbackLoopEXT | ";
+ if ( value & PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT )
+ result += "RayTracingOpacityMicromapEXT | ";
+ if ( value & PipelineCreateFlagBits::eNoProtectedAccessEXT )
+ result += "NoProtectedAccessEXT | ";
+ if ( value & PipelineCreateFlagBits::eProtectedAccessOnlyEXT )
+ result += "ProtectedAccessOnlyEXT | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessEXT )
+ result += "RasterizationOrderAttachmentDepthAccessEXT | ";
+ if ( value & PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessEXT )
+ result += "RasterizationOrderAttachmentStencilAccessEXT | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlags )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlags )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & PipelineLayoutCreateFlagBits::eIndependentSetsEXT )
+ result += "IndependentSetsEXT | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlags )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlags )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize )
+ result += "AllowVaryingSubgroupSize | ";
+ if ( value & PipelineShaderStageCreateFlagBits::eRequireFullSubgroups )
+ result += "RequireFullSubgroups | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlags )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlags )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlags )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ShaderStageFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ShaderStageFlagBits::eVertex )
+ result += "Vertex | ";
+ if ( value & ShaderStageFlagBits::eTessellationControl )
+ result += "TessellationControl | ";
+ if ( value & ShaderStageFlagBits::eTessellationEvaluation )
+ result += "TessellationEvaluation | ";
+ if ( value & ShaderStageFlagBits::eGeometry )
+ result += "Geometry | ";
+ if ( value & ShaderStageFlagBits::eFragment )
+ result += "Fragment | ";
+ if ( value & ShaderStageFlagBits::eCompute )
+ result += "Compute | ";
+ if ( value & ShaderStageFlagBits::eRaygenKHR )
+ result += "RaygenKHR | ";
+ if ( value & ShaderStageFlagBits::eAnyHitKHR )
+ result += "AnyHitKHR | ";
+ if ( value & ShaderStageFlagBits::eClosestHitKHR )
+ result += "ClosestHitKHR | ";
+ if ( value & ShaderStageFlagBits::eMissKHR )
+ result += "MissKHR | ";
+ if ( value & ShaderStageFlagBits::eIntersectionKHR )
+ result += "IntersectionKHR | ";
+ if ( value & ShaderStageFlagBits::eCallableKHR )
+ result += "CallableKHR | ";
+ if ( value & ShaderStageFlagBits::eTaskEXT )
+ result += "TaskEXT | ";
+ if ( value & ShaderStageFlagBits::eMeshEXT )
+ result += "MeshEXT | ";
+ if ( value & ShaderStageFlagBits::eSubpassShadingHUAWEI )
+ result += "SubpassShadingHUAWEI | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & SamplerCreateFlagBits::eSubsampledEXT )
+ result += "SubsampledEXT | ";
+ if ( value & SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT )
+ result += "SubsampledCoarseReconstructionEXT | ";
+ if ( value & SamplerCreateFlagBits::eNonSeamlessCubeMapEXT )
+ result += "NonSeamlessCubeMapEXT | ";
+ if ( value & SamplerCreateFlagBits::eImageProcessingQCOM )
+ result += "ImageProcessingQCOM | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet )
+ result += "FreeDescriptorSet | ";
+ if ( value & DescriptorPoolCreateFlagBits::eUpdateAfterBind )
+ result += "UpdateAfterBind | ";
+ if ( value & DescriptorPoolCreateFlagBits::eHostOnlyEXT )
+ result += "HostOnlyEXT | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlags )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool )
+ result += "UpdateAfterBindPool | ";
+ if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR )
+ result += "PushDescriptorKHR | ";
+ if ( value & DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT )
+ result += "HostOnlyPoolEXT | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( AccessFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & AccessFlagBits::eIndirectCommandRead )
+ result += "IndirectCommandRead | ";
+ if ( value & AccessFlagBits::eIndexRead )
+ result += "IndexRead | ";
+ if ( value & AccessFlagBits::eVertexAttributeRead )
+ result += "VertexAttributeRead | ";
+ if ( value & AccessFlagBits::eUniformRead )
+ result += "UniformRead | ";
+ if ( value & AccessFlagBits::eInputAttachmentRead )
+ result += "InputAttachmentRead | ";
+ if ( value & AccessFlagBits::eShaderRead )
+ result += "ShaderRead | ";
+ if ( value & AccessFlagBits::eShaderWrite )
+ result += "ShaderWrite | ";
+ if ( value & AccessFlagBits::eColorAttachmentRead )
+ result += "ColorAttachmentRead | ";
+ if ( value & AccessFlagBits::eColorAttachmentWrite )
+ result += "ColorAttachmentWrite | ";
+ if ( value & AccessFlagBits::eDepthStencilAttachmentRead )
+ result += "DepthStencilAttachmentRead | ";
+ if ( value & AccessFlagBits::eDepthStencilAttachmentWrite )
+ result += "DepthStencilAttachmentWrite | ";
+ if ( value & AccessFlagBits::eTransferRead )
+ result += "TransferRead | ";
+ if ( value & AccessFlagBits::eTransferWrite )
+ result += "TransferWrite | ";
+ if ( value & AccessFlagBits::eHostRead )
+ result += "HostRead | ";
+ if ( value & AccessFlagBits::eHostWrite )
+ result += "HostWrite | ";
+ if ( value & AccessFlagBits::eMemoryRead )
+ result += "MemoryRead | ";
+ if ( value & AccessFlagBits::eMemoryWrite )
+ result += "MemoryWrite | ";
+ if ( value & AccessFlagBits::eTransformFeedbackWriteEXT )
+ result += "TransformFeedbackWriteEXT | ";
+ if ( value & AccessFlagBits::eTransformFeedbackCounterReadEXT )
+ result += "TransformFeedbackCounterReadEXT | ";
+ if ( value & AccessFlagBits::eTransformFeedbackCounterWriteEXT )
+ result += "TransformFeedbackCounterWriteEXT | ";
+ if ( value & AccessFlagBits::eConditionalRenderingReadEXT )
+ result += "ConditionalRenderingReadEXT | ";
+ if ( value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT )
+ result += "ColorAttachmentReadNoncoherentEXT | ";
+ if ( value & AccessFlagBits::eAccelerationStructureReadKHR )
+ result += "AccelerationStructureReadKHR | ";
+ if ( value & AccessFlagBits::eAccelerationStructureWriteKHR )
+ result += "AccelerationStructureWriteKHR | ";
+ if ( value & AccessFlagBits::eFragmentDensityMapReadEXT )
+ result += "FragmentDensityMapReadEXT | ";
+ if ( value & AccessFlagBits::eFragmentShadingRateAttachmentReadKHR )
+ result += "FragmentShadingRateAttachmentReadKHR | ";
+ if ( value & AccessFlagBits::eCommandPreprocessReadNV )
+ result += "CommandPreprocessReadNV | ";
+ if ( value & AccessFlagBits::eCommandPreprocessWriteNV )
+ result += "CommandPreprocessWriteNV | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & AttachmentDescriptionFlagBits::eMayAlias )
+ result += "MayAlias | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DependencyFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & DependencyFlagBits::eByRegion )
+ result += "ByRegion | ";
+ if ( value & DependencyFlagBits::eDeviceGroup )
+ result += "DeviceGroup | ";
+ if ( value & DependencyFlagBits::eViewLocal )
+ result += "ViewLocal | ";
+ if ( value & DependencyFlagBits::eFeedbackLoopEXT )
+ result += "FeedbackLoopEXT | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & FramebufferCreateFlagBits::eImageless )
+ result += "Imageless | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & RenderPassCreateFlagBits::eTransformQCOM )
+ result += "TransformQCOM | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & SubpassDescriptionFlagBits::ePerViewAttributesNVX )
+ result += "PerViewAttributesNVX | ";
+ if ( value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX )
+ result += "PerViewPositionXOnlyNVX | ";
+ if ( value & SubpassDescriptionFlagBits::eFragmentRegionQCOM )
+ result += "FragmentRegionQCOM | ";
+ if ( value & SubpassDescriptionFlagBits::eShaderResolveQCOM )
+ result += "ShaderResolveQCOM | ";
+ if ( value & SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessEXT )
+ result += "RasterizationOrderAttachmentColorAccessEXT | ";
+ if ( value & SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessEXT )
+ result += "RasterizationOrderAttachmentDepthAccessEXT | ";
+ if ( value & SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessEXT )
+ result += "RasterizationOrderAttachmentStencilAccessEXT | ";
+ if ( value & SubpassDescriptionFlagBits::eEnableLegacyDitheringEXT )
+ result += "EnableLegacyDitheringEXT | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & CommandPoolCreateFlagBits::eTransient )
+ result += "Transient | ";
+ if ( value & CommandPoolCreateFlagBits::eResetCommandBuffer )
+ result += "ResetCommandBuffer | ";
+ if ( value & CommandPoolCreateFlagBits::eProtected )
+ result += "Protected | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & CommandPoolResetFlagBits::eReleaseResources )
+ result += "ReleaseResources | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & CommandBufferResetFlagBits::eReleaseResources )
+ result += "ReleaseResources | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & CommandBufferUsageFlagBits::eOneTimeSubmit )
+ result += "OneTimeSubmit | ";
+ if ( value & CommandBufferUsageFlagBits::eRenderPassContinue )
+ result += "RenderPassContinue | ";
+ if ( value & CommandBufferUsageFlagBits::eSimultaneousUse )
+ result += "SimultaneousUse | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( QueryControlFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & QueryControlFlagBits::ePrecise )
+ result += "Precise | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( StencilFaceFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & StencilFaceFlagBits::eFront )
+ result += "Front | ";
+ if ( value & StencilFaceFlagBits::eBack )
+ result += "Back | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ //=== VK_VERSION_1_1 ===
+
+ VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & SubgroupFeatureFlagBits::eBasic )
+ result += "Basic | ";
+ if ( value & SubgroupFeatureFlagBits::eVote )
+ result += "Vote | ";
+ if ( value & SubgroupFeatureFlagBits::eArithmetic )
+ result += "Arithmetic | ";
+ if ( value & SubgroupFeatureFlagBits::eBallot )
+ result += "Ballot | ";
+ if ( value & SubgroupFeatureFlagBits::eShuffle )
+ result += "Shuffle | ";
+ if ( value & SubgroupFeatureFlagBits::eShuffleRelative )
+ result += "ShuffleRelative | ";
+ if ( value & SubgroupFeatureFlagBits::eClustered )
+ result += "Clustered | ";
+ if ( value & SubgroupFeatureFlagBits::eQuad )
+ result += "Quad | ";
+ if ( value & SubgroupFeatureFlagBits::ePartitionedNV )
+ result += "PartitionedNV | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & PeerMemoryFeatureFlagBits::eCopySrc )
+ result += "CopySrc | ";
+ if ( value & PeerMemoryFeatureFlagBits::eCopyDst )
+ result += "CopyDst | ";
+ if ( value & PeerMemoryFeatureFlagBits::eGenericSrc )
+ result += "GenericSrc | ";
+ if ( value & PeerMemoryFeatureFlagBits::eGenericDst )
+ result += "GenericDst | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & MemoryAllocateFlagBits::eDeviceMask )
+ result += "DeviceMask | ";
+ if ( value & MemoryAllocateFlagBits::eDeviceAddress )
+ result += "DeviceAddress | ";
+ if ( value & MemoryAllocateFlagBits::eDeviceAddressCaptureReplay )
+ result += "DeviceAddressCaptureReplay | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlags )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlags )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd )
+ result += "OpaqueFd | ";
+ if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 )
+ result += "OpaqueWin32 | ";
+ if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt )
+ result += "OpaqueWin32Kmt | ";
+ if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture )
+ result += "D3D11Texture | ";
+ if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt )
+ result += "D3D11TextureKmt | ";
+ if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap )
+ result += "D3D12Heap | ";
+ if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource )
+ result += "D3D12Resource | ";
+ if ( value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT )
+ result += "DmaBufEXT | ";
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+ if ( value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID )
+ result += "AndroidHardwareBufferANDROID | ";
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+ if ( value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT )
+ result += "HostAllocationEXT | ";
+ if ( value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT )
+ result += "HostMappedForeignMemoryEXT | ";
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+ if ( value & ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA )
+ result += "ZirconVmoFUCHSIA | ";
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+ if ( value & ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV )
+ result += "RdmaAddressNV | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ExternalMemoryFeatureFlagBits::eDedicatedOnly )
+ result += "DedicatedOnly | ";
+ if ( value & ExternalMemoryFeatureFlagBits::eExportable )
+ result += "Exportable | ";
+ if ( value & ExternalMemoryFeatureFlagBits::eImportable )
+ result += "Importable | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueFd )
+ result += "OpaqueFd | ";
+ if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32 )
+ result += "OpaqueWin32 | ";
+ if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt )
+ result += "OpaqueWin32Kmt | ";
+ if ( value & ExternalFenceHandleTypeFlagBits::eSyncFd )
+ result += "SyncFd | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ExternalFenceFeatureFlagBits::eExportable )
+ result += "Exportable | ";
+ if ( value & ExternalFenceFeatureFlagBits::eImportable )
+ result += "Importable | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( FenceImportFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & FenceImportFlagBits::eTemporary )
+ result += "Temporary | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & SemaphoreImportFlagBits::eTemporary )
+ result += "Temporary | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd )
+ result += "OpaqueFd | ";
+ if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 )
+ result += "OpaqueWin32 | ";
+ if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt )
+ result += "OpaqueWin32Kmt | ";
+ if ( value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence )
+ result += "D3D12Fence | ";
+ if ( value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd )
+ result += "SyncFd | ";
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+ if ( value & ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA )
+ result += "ZirconEventFUCHSIA | ";
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ExternalSemaphoreFeatureFlagBits::eExportable )
+ result += "Exportable | ";
+ if ( value & ExternalSemaphoreFeatureFlagBits::eImportable )
+ result += "Importable | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ //=== VK_VERSION_1_2 ===
+
+ VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & DescriptorBindingFlagBits::eUpdateAfterBind )
+ result += "UpdateAfterBind | ";
+ if ( value & DescriptorBindingFlagBits::eUpdateUnusedWhilePending )
+ result += "UpdateUnusedWhilePending | ";
+ if ( value & DescriptorBindingFlagBits::ePartiallyBound )
+ result += "PartiallyBound | ";
+ if ( value & DescriptorBindingFlagBits::eVariableDescriptorCount )
+ result += "VariableDescriptorCount | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ResolveModeFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ResolveModeFlagBits::eSampleZero )
+ result += "SampleZero | ";
+ if ( value & ResolveModeFlagBits::eAverage )
+ result += "Average | ";
+ if ( value & ResolveModeFlagBits::eMin )
+ result += "Min | ";
+ if ( value & ResolveModeFlagBits::eMax )
+ result += "Max | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & SemaphoreWaitFlagBits::eAny )
+ result += "Any | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ //=== VK_VERSION_1_3 ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & PipelineCreationFeedbackFlagBits::eValid )
+ result += "Valid | ";
+ if ( value & PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit )
+ result += "ApplicationPipelineCacheHit | ";
+ if ( value & PipelineCreationFeedbackFlagBits::eBasePipelineAcceleration )
+ result += "BasePipelineAcceleration | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ToolPurposeFlagBits::eValidation )
+ result += "Validation | ";
+ if ( value & ToolPurposeFlagBits::eProfiling )
+ result += "Profiling | ";
+ if ( value & ToolPurposeFlagBits::eTracing )
+ result += "Tracing | ";
+ if ( value & ToolPurposeFlagBits::eAdditionalFeatures )
+ result += "AdditionalFeatures | ";
+ if ( value & ToolPurposeFlagBits::eModifyingFeatures )
+ result += "ModifyingFeatures | ";
+ if ( value & ToolPurposeFlagBits::eDebugReportingEXT )
+ result += "DebugReportingEXT | ";
+ if ( value & ToolPurposeFlagBits::eDebugMarkersEXT )
+ result += "DebugMarkersEXT | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlags )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags2 value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & PipelineStageFlagBits2::eTopOfPipe )
+ result += "TopOfPipe | ";
+ if ( value & PipelineStageFlagBits2::eDrawIndirect )
+ result += "DrawIndirect | ";
+ if ( value & PipelineStageFlagBits2::eVertexInput )
+ result += "VertexInput | ";
+ if ( value & PipelineStageFlagBits2::eVertexShader )
+ result += "VertexShader | ";
+ if ( value & PipelineStageFlagBits2::eTessellationControlShader )
+ result += "TessellationControlShader | ";
+ if ( value & PipelineStageFlagBits2::eTessellationEvaluationShader )
+ result += "TessellationEvaluationShader | ";
+ if ( value & PipelineStageFlagBits2::eGeometryShader )
+ result += "GeometryShader | ";
+ if ( value & PipelineStageFlagBits2::eFragmentShader )
+ result += "FragmentShader | ";
+ if ( value & PipelineStageFlagBits2::eEarlyFragmentTests )
+ result += "EarlyFragmentTests | ";
+ if ( value & PipelineStageFlagBits2::eLateFragmentTests )
+ result += "LateFragmentTests | ";
+ if ( value & PipelineStageFlagBits2::eColorAttachmentOutput )
+ result += "ColorAttachmentOutput | ";
+ if ( value & PipelineStageFlagBits2::eComputeShader )
+ result += "ComputeShader | ";
+ if ( value & PipelineStageFlagBits2::eAllTransfer )
+ result += "AllTransfer | ";
+ if ( value & PipelineStageFlagBits2::eBottomOfPipe )
+ result += "BottomOfPipe | ";
+ if ( value & PipelineStageFlagBits2::eHost )
+ result += "Host | ";
+ if ( value & PipelineStageFlagBits2::eAllGraphics )
+ result += "AllGraphics | ";
+ if ( value & PipelineStageFlagBits2::eAllCommands )
+ result += "AllCommands | ";
+ if ( value & PipelineStageFlagBits2::eCopy )
+ result += "Copy | ";
+ if ( value & PipelineStageFlagBits2::eResolve )
+ result += "Resolve | ";
+ if ( value & PipelineStageFlagBits2::eBlit )
+ result += "Blit | ";
+ if ( value & PipelineStageFlagBits2::eClear )
+ result += "Clear | ";
+ if ( value & PipelineStageFlagBits2::eIndexInput )
+ result += "IndexInput | ";
+ if ( value & PipelineStageFlagBits2::eVertexAttributeInput )
+ result += "VertexAttributeInput | ";
+ if ( value & PipelineStageFlagBits2::ePreRasterizationShaders )
+ result += "PreRasterizationShaders | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ if ( value & PipelineStageFlagBits2::eVideoDecodeKHR )
+ result += "VideoDecodeKHR | ";
+ if ( value & PipelineStageFlagBits2::eVideoEncodeKHR )
+ result += "VideoEncodeKHR | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ if ( value & PipelineStageFlagBits2::eTransformFeedbackEXT )
+ result += "TransformFeedbackEXT | ";
+ if ( value & PipelineStageFlagBits2::eConditionalRenderingEXT )
+ result += "ConditionalRenderingEXT | ";
+ if ( value & PipelineStageFlagBits2::eCommandPreprocessNV )
+ result += "CommandPreprocessNV | ";
+ if ( value & PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR )
+ result += "FragmentShadingRateAttachmentKHR | ";
+ if ( value & PipelineStageFlagBits2::eAccelerationStructureBuildKHR )
+ result += "AccelerationStructureBuildKHR | ";
+ if ( value & PipelineStageFlagBits2::eRayTracingShaderKHR )
+ result += "RayTracingShaderKHR | ";
+ if ( value & PipelineStageFlagBits2::eFragmentDensityProcessEXT )
+ result += "FragmentDensityProcessEXT | ";
+ if ( value & PipelineStageFlagBits2::eTaskShaderEXT )
+ result += "TaskShaderEXT | ";
+ if ( value & PipelineStageFlagBits2::eMeshShaderEXT )
+ result += "MeshShaderEXT | ";
+ if ( value & PipelineStageFlagBits2::eSubpassShadingHUAWEI )
+ result += "SubpassShadingHUAWEI | ";
+ if ( value & PipelineStageFlagBits2::eInvocationMaskHUAWEI )
+ result += "InvocationMaskHUAWEI | ";
+ if ( value & PipelineStageFlagBits2::eAccelerationStructureCopyKHR )
+ result += "AccelerationStructureCopyKHR | ";
+ if ( value & PipelineStageFlagBits2::eMicromapBuildEXT )
+ result += "MicromapBuildEXT | ";
+ if ( value & PipelineStageFlagBits2::eOpticalFlowNV )
+ result += "OpticalFlowNV | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( AccessFlags2 value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & AccessFlagBits2::eIndirectCommandRead )
+ result += "IndirectCommandRead | ";
+ if ( value & AccessFlagBits2::eIndexRead )
+ result += "IndexRead | ";
+ if ( value & AccessFlagBits2::eVertexAttributeRead )
+ result += "VertexAttributeRead | ";
+ if ( value & AccessFlagBits2::eUniformRead )
+ result += "UniformRead | ";
+ if ( value & AccessFlagBits2::eInputAttachmentRead )
+ result += "InputAttachmentRead | ";
+ if ( value & AccessFlagBits2::eShaderRead )
+ result += "ShaderRead | ";
+ if ( value & AccessFlagBits2::eShaderWrite )
+ result += "ShaderWrite | ";
+ if ( value & AccessFlagBits2::eColorAttachmentRead )
+ result += "ColorAttachmentRead | ";
+ if ( value & AccessFlagBits2::eColorAttachmentWrite )
+ result += "ColorAttachmentWrite | ";
+ if ( value & AccessFlagBits2::eDepthStencilAttachmentRead )
+ result += "DepthStencilAttachmentRead | ";
+ if ( value & AccessFlagBits2::eDepthStencilAttachmentWrite )
+ result += "DepthStencilAttachmentWrite | ";
+ if ( value & AccessFlagBits2::eTransferRead )
+ result += "TransferRead | ";
+ if ( value & AccessFlagBits2::eTransferWrite )
+ result += "TransferWrite | ";
+ if ( value & AccessFlagBits2::eHostRead )
+ result += "HostRead | ";
+ if ( value & AccessFlagBits2::eHostWrite )
+ result += "HostWrite | ";
+ if ( value & AccessFlagBits2::eMemoryRead )
+ result += "MemoryRead | ";
+ if ( value & AccessFlagBits2::eMemoryWrite )
+ result += "MemoryWrite | ";
+ if ( value & AccessFlagBits2::eShaderSampledRead )
+ result += "ShaderSampledRead | ";
+ if ( value & AccessFlagBits2::eShaderStorageRead )
+ result += "ShaderStorageRead | ";
+ if ( value & AccessFlagBits2::eShaderStorageWrite )
+ result += "ShaderStorageWrite | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ if ( value & AccessFlagBits2::eVideoDecodeReadKHR )
+ result += "VideoDecodeReadKHR | ";
+ if ( value & AccessFlagBits2::eVideoDecodeWriteKHR )
+ result += "VideoDecodeWriteKHR | ";
+ if ( value & AccessFlagBits2::eVideoEncodeReadKHR )
+ result += "VideoEncodeReadKHR | ";
+ if ( value & AccessFlagBits2::eVideoEncodeWriteKHR )
+ result += "VideoEncodeWriteKHR | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ if ( value & AccessFlagBits2::eTransformFeedbackWriteEXT )
+ result += "TransformFeedbackWriteEXT | ";
+ if ( value & AccessFlagBits2::eTransformFeedbackCounterReadEXT )
+ result += "TransformFeedbackCounterReadEXT | ";
+ if ( value & AccessFlagBits2::eTransformFeedbackCounterWriteEXT )
+ result += "TransformFeedbackCounterWriteEXT | ";
+ if ( value & AccessFlagBits2::eConditionalRenderingReadEXT )
+ result += "ConditionalRenderingReadEXT | ";
+ if ( value & AccessFlagBits2::eCommandPreprocessReadNV )
+ result += "CommandPreprocessReadNV | ";
+ if ( value & AccessFlagBits2::eCommandPreprocessWriteNV )
+ result += "CommandPreprocessWriteNV | ";
+ if ( value & AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR )
+ result += "FragmentShadingRateAttachmentReadKHR | ";
+ if ( value & AccessFlagBits2::eAccelerationStructureReadKHR )
+ result += "AccelerationStructureReadKHR | ";
+ if ( value & AccessFlagBits2::eAccelerationStructureWriteKHR )
+ result += "AccelerationStructureWriteKHR | ";
+ if ( value & AccessFlagBits2::eFragmentDensityMapReadEXT )
+ result += "FragmentDensityMapReadEXT | ";
+ if ( value & AccessFlagBits2::eColorAttachmentReadNoncoherentEXT )
+ result += "ColorAttachmentReadNoncoherentEXT | ";
+ if ( value & AccessFlagBits2::eInvocationMaskReadHUAWEI )
+ result += "InvocationMaskReadHUAWEI | ";
+ if ( value & AccessFlagBits2::eShaderBindingTableReadKHR )
+ result += "ShaderBindingTableReadKHR | ";
+ if ( value & AccessFlagBits2::eMicromapReadEXT )
+ result += "MicromapReadEXT | ";
+ if ( value & AccessFlagBits2::eMicromapWriteEXT )
+ result += "MicromapWriteEXT | ";
+ if ( value & AccessFlagBits2::eOpticalFlowReadNV )
+ result += "OpticalFlowReadNV | ";
+ if ( value & AccessFlagBits2::eOpticalFlowWriteNV )
+ result += "OpticalFlowWriteNV | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SubmitFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & SubmitFlagBits::eProtected )
+ result += "Protected | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( RenderingFlags value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & RenderingFlagBits::eContentsSecondaryCommandBuffers )
+ result += "ContentsSecondaryCommandBuffers | ";
+ if ( value & RenderingFlagBits::eSuspending )
+ result += "Suspending | ";
+ if ( value & RenderingFlagBits::eResuming )
+ result += "Resuming | ";
+ if ( value & RenderingFlagBits::eEnableLegacyDitheringEXT )
+ result += "EnableLegacyDitheringEXT | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags2 value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & FormatFeatureFlagBits2::eSampledImage )
+ result += "SampledImage | ";
+ if ( value & FormatFeatureFlagBits2::eStorageImage )
+ result += "StorageImage | ";
+ if ( value & FormatFeatureFlagBits2::eStorageImageAtomic )
+ result += "StorageImageAtomic | ";
+ if ( value & FormatFeatureFlagBits2::eUniformTexelBuffer )
+ result += "UniformTexelBuffer | ";
+ if ( value & FormatFeatureFlagBits2::eStorageTexelBuffer )
+ result += "StorageTexelBuffer | ";
+ if ( value & FormatFeatureFlagBits2::eStorageTexelBufferAtomic )
+ result += "StorageTexelBufferAtomic | ";
+ if ( value & FormatFeatureFlagBits2::eVertexBuffer )
+ result += "VertexBuffer | ";
+ if ( value & FormatFeatureFlagBits2::eColorAttachment )
+ result += "ColorAttachment | ";
+ if ( value & FormatFeatureFlagBits2::eColorAttachmentBlend )
+ result += "ColorAttachmentBlend | ";
+ if ( value & FormatFeatureFlagBits2::eDepthStencilAttachment )
+ result += "DepthStencilAttachment | ";
+ if ( value & FormatFeatureFlagBits2::eBlitSrc )
+ result += "BlitSrc | ";
+ if ( value & FormatFeatureFlagBits2::eBlitDst )
+ result += "BlitDst | ";
+ if ( value & FormatFeatureFlagBits2::eSampledImageFilterLinear )
+ result += "SampledImageFilterLinear | ";
+ if ( value & FormatFeatureFlagBits2::eSampledImageFilterCubic )
+ result += "SampledImageFilterCubic | ";
+ if ( value & FormatFeatureFlagBits2::eTransferSrc )
+ result += "TransferSrc | ";
+ if ( value & FormatFeatureFlagBits2::eTransferDst )
+ result += "TransferDst | ";
+ if ( value & FormatFeatureFlagBits2::eSampledImageFilterMinmax )
+ result += "SampledImageFilterMinmax | ";
+ if ( value & FormatFeatureFlagBits2::eMidpointChromaSamples )
+ result += "MidpointChromaSamples | ";
+ if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter )
+ result += "SampledImageYcbcrConversionLinearFilter | ";
+ if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter )
+ result += "SampledImageYcbcrConversionSeparateReconstructionFilter | ";
+ if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit )
+ result += "SampledImageYcbcrConversionChromaReconstructionExplicit | ";
+ if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable )
+ result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | ";
+ if ( value & FormatFeatureFlagBits2::eDisjoint )
+ result += "Disjoint | ";
+ if ( value & FormatFeatureFlagBits2::eCositedChromaSamples )
+ result += "CositedChromaSamples | ";
+ if ( value & FormatFeatureFlagBits2::eStorageReadWithoutFormat )
+ result += "StorageReadWithoutFormat | ";
+ if ( value & FormatFeatureFlagBits2::eStorageWriteWithoutFormat )
+ result += "StorageWriteWithoutFormat | ";
+ if ( value & FormatFeatureFlagBits2::eSampledImageDepthComparison )
+ result += "SampledImageDepthComparison | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ if ( value & FormatFeatureFlagBits2::eVideoDecodeOutputKHR )
+ result += "VideoDecodeOutputKHR | ";
+ if ( value & FormatFeatureFlagBits2::eVideoDecodeDpbKHR )
+ result += "VideoDecodeDpbKHR | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ if ( value & FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR )
+ result += "AccelerationStructureVertexBufferKHR | ";
+ if ( value & FormatFeatureFlagBits2::eFragmentDensityMapEXT )
+ result += "FragmentDensityMapEXT | ";
+ if ( value & FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR )
+ result += "FragmentShadingRateAttachmentKHR | ";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ if ( value & FormatFeatureFlagBits2::eVideoEncodeInputKHR )
+ result += "VideoEncodeInputKHR | ";
+ if ( value & FormatFeatureFlagBits2::eVideoEncodeDpbKHR )
+ result += "VideoEncodeDpbKHR | ";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ if ( value & FormatFeatureFlagBits2::eLinearColorAttachmentNV )
+ result += "LinearColorAttachmentNV | ";
+ if ( value & FormatFeatureFlagBits2::eWeightImageQCOM )
+ result += "WeightImageQCOM | ";
+ if ( value & FormatFeatureFlagBits2::eWeightSampledImageQCOM )
+ result += "WeightSampledImageQCOM | ";
+ if ( value & FormatFeatureFlagBits2::eBlockMatchingQCOM )
+ result += "BlockMatchingQCOM | ";
+ if ( value & FormatFeatureFlagBits2::eBoxFilterSampledQCOM )
+ result += "BoxFilterSampledQCOM | ";
+ if ( value & FormatFeatureFlagBits2::eOpticalFlowImageNV )
+ result += "OpticalFlowImageNV | ";
+ if ( value & FormatFeatureFlagBits2::eOpticalFlowVectorNV )
+ result += "OpticalFlowVectorNV | ";
+ if ( value & FormatFeatureFlagBits2::eOpticalFlowCostNV )
+ result += "OpticalFlowCostNV | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ //=== VK_KHR_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & CompositeAlphaFlagBitsKHR::eOpaque )
+ result += "Opaque | ";
+ if ( value & CompositeAlphaFlagBitsKHR::ePreMultiplied )
+ result += "PreMultiplied | ";
+ if ( value & CompositeAlphaFlagBitsKHR::ePostMultiplied )
+ result += "PostMultiplied | ";
+ if ( value & CompositeAlphaFlagBitsKHR::eInherit )
+ result += "Inherit | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ //=== VK_KHR_swapchain ===
+
+ VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions )
+ result += "SplitInstanceBindRegions | ";
+ if ( value & SwapchainCreateFlagBitsKHR::eProtected )
+ result += "Protected | ";
+ if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat )
+ result += "MutableFormat | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocal )
+ result += "Local | ";
+ if ( value & DeviceGroupPresentModeFlagBitsKHR::eRemote )
+ result += "Remote | ";
+ if ( value & DeviceGroupPresentModeFlagBitsKHR::eSum )
+ result += "Sum | ";
+ if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice )
+ result += "LocalMultiDevice | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ //=== VK_KHR_display ===
+
+ VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagsKHR )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & DisplayPlaneAlphaFlagBitsKHR::eOpaque )
+ result += "Opaque | ";
+ if ( value & DisplayPlaneAlphaFlagBitsKHR::eGlobal )
+ result += "Global | ";
+ if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel )
+ result += "PerPixel | ";
+ if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied )
+ result += "PerPixelPremultiplied | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagsKHR )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & SurfaceTransformFlagBitsKHR::eIdentity )
+ result += "Identity | ";
+ if ( value & SurfaceTransformFlagBitsKHR::eRotate90 )
+ result += "Rotate90 | ";
+ if ( value & SurfaceTransformFlagBitsKHR::eRotate180 )
+ result += "Rotate180 | ";
+ if ( value & SurfaceTransformFlagBitsKHR::eRotate270 )
+ result += "Rotate270 | ";
+ if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirror )
+ result += "HorizontalMirror | ";
+ if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 )
+ result += "HorizontalMirrorRotate90 | ";
+ if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 )
+ result += "HorizontalMirrorRotate180 | ";
+ if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 )
+ result += "HorizontalMirrorRotate270 | ";
+ if ( value & SurfaceTransformFlagBitsKHR::eInherit )
+ result += "Inherit | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+ //=== VK_KHR_xlib_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagsKHR )
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+ //=== VK_KHR_xcb_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagsKHR )
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+ //=== VK_KHR_wayland_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagsKHR )
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+ //=== VK_KHR_android_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagsKHR )
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_KHR_win32_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagsKHR )
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ //=== VK_EXT_debug_report ===
+
+ VULKAN_HPP_INLINE std::string to_string( DebugReportFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & DebugReportFlagBitsEXT::eInformation )
+ result += "Information | ";
+ if ( value & DebugReportFlagBitsEXT::eWarning )
+ result += "Warning | ";
+ if ( value & DebugReportFlagBitsEXT::ePerformanceWarning )
+ result += "PerformanceWarning | ";
+ if ( value & DebugReportFlagBitsEXT::eError )
+ result += "Error | ";
+ if ( value & DebugReportFlagBitsEXT::eDebug )
+ result += "Debug | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_KHR_video_queue ===
+
+ VULKAN_HPP_INLINE std::string to_string( VideoCodecOperationFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ if ( value & VideoCodecOperationFlagBitsKHR::eEncodeH264EXT )
+ result += "EncodeH264EXT | ";
+ if ( value & VideoCodecOperationFlagBitsKHR::eEncodeH265EXT )
+ result += "EncodeH265EXT | ";
+ if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH264EXT )
+ result += "DecodeH264EXT | ";
+ if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH265EXT )
+ result += "DecodeH265EXT | ";
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoChromaSubsamplingFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & VideoChromaSubsamplingFlagBitsKHR::eMonochrome )
+ result += "Monochrome | ";
+ if ( value & VideoChromaSubsamplingFlagBitsKHR::e420 )
+ result += "420 | ";
+ if ( value & VideoChromaSubsamplingFlagBitsKHR::e422 )
+ result += "422 | ";
+ if ( value & VideoChromaSubsamplingFlagBitsKHR::e444 )
+ result += "444 | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoComponentBitDepthFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & VideoComponentBitDepthFlagBitsKHR::e8 )
+ result += "8 | ";
+ if ( value & VideoComponentBitDepthFlagBitsKHR::e10 )
+ result += "10 | ";
+ if ( value & VideoComponentBitDepthFlagBitsKHR::e12 )
+ result += "12 | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoCapabilityFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & VideoCapabilityFlagBitsKHR::eProtectedContent )
+ result += "ProtectedContent | ";
+ if ( value & VideoCapabilityFlagBitsKHR::eSeparateReferenceImages )
+ result += "SeparateReferenceImages | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoSessionCreateFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & VideoSessionCreateFlagBitsKHR::eProtectedContent )
+ result += "ProtectedContent | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoSessionParametersCreateFlagsKHR )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoBeginCodingFlagsKHR )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEndCodingFlagsKHR )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoCodingControlFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & VideoCodingControlFlagBitsKHR::eReset )
+ result += "Reset | ";
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ if ( value & VideoCodingControlFlagBitsKHR::eEncodeRateControl )
+ result += "EncodeRateControl | ";
+ if ( value & VideoCodingControlFlagBitsKHR::eEncodeRateControlLayer )
+ result += "EncodeRateControlLayer | ";
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_KHR_video_decode_queue ===
+
+ VULKAN_HPP_INLINE std::string to_string( VideoDecodeCapabilityFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputCoincide )
+ result += "DpbAndOutputCoincide | ";
+ if ( value & VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputDistinct )
+ result += "DpbAndOutputDistinct | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoDecodeUsageFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & VideoDecodeUsageFlagBitsKHR::eTranscoding )
+ result += "Transcoding | ";
+ if ( value & VideoDecodeUsageFlagBitsKHR::eOffline )
+ result += "Offline | ";
+ if ( value & VideoDecodeUsageFlagBitsKHR::eStreaming )
+ result += "Streaming | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoDecodeFlagsKHR )
+ {
+ return "{}";
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ //=== VK_EXT_transform_feedback ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagsEXT )
+ {
+ return "{}";
+ }
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_EXT_video_encode_h264 ===
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilityFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDirect8X8InferenceEnabled )
+ result += "Direct8X8InferenceEnabled | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDirect8X8InferenceDisabled )
+ result += "Direct8X8InferenceDisabled | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eSeparateColourPlane )
+ result += "SeparateColourPlane | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eQpprimeYZeroTransformBypass )
+ result += "QpprimeYZeroTransformBypass | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eScalingLists )
+ result += "ScalingLists | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eHrdCompliance )
+ result += "HrdCompliance | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eChromaQpOffset )
+ result += "ChromaQpOffset | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eSecondChromaQpOffset )
+ result += "SecondChromaQpOffset | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::ePicInitQpMinus26 )
+ result += "PicInitQpMinus26 | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eWeightedPred )
+ result += "WeightedPred | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBipredExplicit )
+ result += "WeightedBipredExplicit | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBipredImplicit )
+ result += "WeightedBipredImplicit | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eWeightedPredNoTable )
+ result += "WeightedPredNoTable | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eTransform8X8 )
+ result += "Transform8X8 | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eCabac )
+ result += "Cabac | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eCavlc )
+ result += "Cavlc | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterDisabled )
+ result += "DeblockingFilterDisabled | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterEnabled )
+ result += "DeblockingFilterEnabled | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterPartial )
+ result += "DeblockingFilterPartial | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDisableDirectSpatialMvPred )
+ result += "DisableDirectSpatialMvPred | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eMultipleSlicePerFrame )
+ result += "MultipleSlicePerFrame | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eSliceMbCount )
+ result += "SliceMbCount | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eRowUnalignedSlice )
+ result += "RowUnalignedSlice | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDifferentSliceType )
+ result += "DifferentSliceType | ";
+ if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eBFrameInL1List )
+ result += "BFrameInL1List | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264InputModeFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & VideoEncodeH264InputModeFlagBitsEXT::eFrame )
+ result += "Frame | ";
+ if ( value & VideoEncodeH264InputModeFlagBitsEXT::eSlice )
+ result += "Slice | ";
+ if ( value & VideoEncodeH264InputModeFlagBitsEXT::eNonVcl )
+ result += "NonVcl | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264OutputModeFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eFrame )
+ result += "Frame | ";
+ if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eSlice )
+ result += "Slice | ";
+ if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl )
+ result += "NonVcl | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_EXT_video_encode_h265 ===
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CapabilityFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eSeparateColourPlane )
+ result += "SeparateColourPlane | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eScalingLists )
+ result += "ScalingLists | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eSampleAdaptiveOffsetEnabled )
+ result += "SampleAdaptiveOffsetEnabled | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::ePcmEnable )
+ result += "PcmEnable | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eSpsTemporalMvpEnabled )
+ result += "SpsTemporalMvpEnabled | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eHrdCompliance )
+ result += "HrdCompliance | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eInitQpMinus26 )
+ result += "InitQpMinus26 | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eLog2ParallelMergeLevelMinus2 )
+ result += "Log2ParallelMergeLevelMinus2 | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eSignDataHidingEnabled )
+ result += "SignDataHidingEnabled | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eTransformSkipEnabled )
+ result += "TransformSkipEnabled | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eTransformSkipDisabled )
+ result += "TransformSkipDisabled | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::ePpsSliceChromaQpOffsetsPresent )
+ result += "PpsSliceChromaQpOffsetsPresent | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eWeightedPred )
+ result += "WeightedPred | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eWeightedBipred )
+ result += "WeightedBipred | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eWeightedPredNoTable )
+ result += "WeightedPredNoTable | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eTransquantBypassEnabled )
+ result += "TransquantBypassEnabled | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eEntropyCodingSyncEnabled )
+ result += "EntropyCodingSyncEnabled | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eDeblockingFilterOverrideEnabled )
+ result += "DeblockingFilterOverrideEnabled | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eMultipleTilePerFrame )
+ result += "MultipleTilePerFrame | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eMultipleSlicePerTile )
+ result += "MultipleSlicePerTile | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eMultipleTilePerSlice )
+ result += "MultipleTilePerSlice | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eSliceSegmentCtbCount )
+ result += "SliceSegmentCtbCount | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eRowUnalignedSliceSegment )
+ result += "RowUnalignedSliceSegment | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eDependentSliceSegment )
+ result += "DependentSliceSegment | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eDifferentSliceType )
+ result += "DifferentSliceType | ";
+ if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eBFrameInL1List )
+ result += "BFrameInL1List | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265InputModeFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & VideoEncodeH265InputModeFlagBitsEXT::eFrame )
+ result += "Frame | ";
+ if ( value & VideoEncodeH265InputModeFlagBitsEXT::eSliceSegment )
+ result += "SliceSegment | ";
+ if ( value & VideoEncodeH265InputModeFlagBitsEXT::eNonVcl )
+ result += "NonVcl | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265OutputModeFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & VideoEncodeH265OutputModeFlagBitsEXT::eFrame )
+ result += "Frame | ";
+ if ( value & VideoEncodeH265OutputModeFlagBitsEXT::eSliceSegment )
+ result += "SliceSegment | ";
+ if ( value & VideoEncodeH265OutputModeFlagBitsEXT::eNonVcl )
+ result += "NonVcl | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CtbSizeFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & VideoEncodeH265CtbSizeFlagBitsEXT::e16 )
+ result += "16 | ";
+ if ( value & VideoEncodeH265CtbSizeFlagBitsEXT::e32 )
+ result += "32 | ";
+ if ( value & VideoEncodeH265CtbSizeFlagBitsEXT::e64 )
+ result += "64 | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265TransformBlockSizeFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & VideoEncodeH265TransformBlockSizeFlagBitsEXT::e4 )
+ result += "4 | ";
+ if ( value & VideoEncodeH265TransformBlockSizeFlagBitsEXT::e8 )
+ result += "8 | ";
+ if ( value & VideoEncodeH265TransformBlockSizeFlagBitsEXT::e16 )
+ result += "16 | ";
+ if ( value & VideoEncodeH265TransformBlockSizeFlagBitsEXT::e32 )
+ result += "32 | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_EXT_video_decode_h264 ===
+
+ VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264PictureLayoutFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedInterleavedLines )
+ result += "InterlacedInterleavedLines | ";
+ if ( value & VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedSeparatePlanes )
+ result += "InterlacedSeparatePlanes | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_USE_PLATFORM_GGP )
+ //=== VK_GGP_stream_descriptor_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagsGGP )
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_GGP*/
+
+ //=== VK_NV_external_memory_capabilities ===
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagsNV value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 )
+ result += "OpaqueWin32 | ";
+ if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt )
+ result += "OpaqueWin32Kmt | ";
+ if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image )
+ result += "D3D11Image | ";
+ if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt )
+ result += "D3D11ImageKmt | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagsNV value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly )
+ result += "DedicatedOnly | ";
+ if ( value & ExternalMemoryFeatureFlagBitsNV::eExportable )
+ result += "Exportable | ";
+ if ( value & ExternalMemoryFeatureFlagBitsNV::eImportable )
+ result += "Importable | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+ //=== VK_NN_vi_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagsNN )
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+ //=== VK_EXT_conditional_rendering ===
+
+ VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ConditionalRenderingFlagBitsEXT::eInverted )
+ result += "Inverted | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ //=== VK_EXT_display_surface_counter ===
+
+ VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & SurfaceCounterFlagBitsEXT::eVblank )
+ result += "Vblank | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ //=== VK_NV_viewport_swizzle ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagsNV )
+ {
+ return "{}";
+ }
+
+ //=== VK_EXT_discard_rectangles ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagsEXT )
+ {
+ return "{}";
+ }
+
+ //=== VK_EXT_conservative_rasterization ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagsEXT )
+ {
+ return "{}";
+ }
+
+ //=== VK_EXT_depth_clip_enable ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagsEXT )
+ {
+ return "{}";
+ }
+
+ //=== VK_KHR_performance_query ===
+
+ VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting )
+ result += "PerformanceImpacting | ";
+ if ( value & PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted )
+ result += "ConcurrentlyImpacted | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagsKHR )
+ {
+ return "{}";
+ }
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+ //=== VK_MVK_ios_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagsMVK )
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+ //=== VK_MVK_macos_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagsMVK )
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+ //=== VK_EXT_debug_utils ===
+
+ VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose )
+ result += "Verbose | ";
+ if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo )
+ result += "Info | ";
+ if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning )
+ result += "Warning | ";
+ if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eError )
+ result += "Error | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral )
+ result += "General | ";
+ if ( value & DebugUtilsMessageTypeFlagBitsEXT::eValidation )
+ result += "Validation | ";
+ if ( value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance )
+ result += "Performance | ";
+ if ( value & DebugUtilsMessageTypeFlagBitsEXT::eDeviceAddressBinding )
+ result += "DeviceAddressBinding | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagsEXT )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagsEXT )
+ {
+ return "{}";
+ }
+
+ //=== VK_NV_fragment_coverage_to_color ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagsNV )
+ {
+ return "{}";
+ }
+
+ //=== VK_KHR_acceleration_structure ===
+
+ VULKAN_HPP_INLINE std::string to_string( GeometryFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & GeometryFlagBitsKHR::eOpaque )
+ result += "Opaque | ";
+ if ( value & GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation )
+ result += "NoDuplicateAnyHitInvocation | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable )
+ result += "TriangleFacingCullDisable | ";
+ if ( value & GeometryInstanceFlagBitsKHR::eTriangleFlipFacing )
+ result += "TriangleFlipFacing | ";
+ if ( value & GeometryInstanceFlagBitsKHR::eForceOpaque )
+ result += "ForceOpaque | ";
+ if ( value & GeometryInstanceFlagBitsKHR::eForceNoOpaque )
+ result += "ForceNoOpaque | ";
+ if ( value & GeometryInstanceFlagBitsKHR::eForceOpacityMicromap2StateEXT )
+ result += "ForceOpacityMicromap2StateEXT | ";
+ if ( value & GeometryInstanceFlagBitsKHR::eDisableOpacityMicromapsEXT )
+ result += "DisableOpacityMicromapsEXT | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowUpdate )
+ result += "AllowUpdate | ";
+ if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowCompaction )
+ result += "AllowCompaction | ";
+ if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace )
+ result += "PreferFastTrace | ";
+ if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild )
+ result += "PreferFastBuild | ";
+ if ( value & BuildAccelerationStructureFlagBitsKHR::eLowMemory )
+ result += "LowMemory | ";
+ if ( value & BuildAccelerationStructureFlagBitsKHR::eMotionNV )
+ result += "MotionNV | ";
+ if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapUpdateEXT )
+ result += "AllowOpacityMicromapUpdateEXT | ";
+ if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowDisableOpacityMicromapsEXT )
+ result += "AllowDisableOpacityMicromapsEXT | ";
+ if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapDataUpdateEXT )
+ result += "AllowOpacityMicromapDataUpdateEXT | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCreateFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay )
+ result += "DeviceAddressCaptureReplay | ";
+ if ( value & AccelerationStructureCreateFlagBitsKHR::eMotionNV )
+ result += "MotionNV | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ //=== VK_NV_framebuffer_mixed_samples ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagsNV )
+ {
+ return "{}";
+ }
+
+ //=== VK_EXT_validation_cache ===
+
+ VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagsEXT )
+ {
+ return "{}";
+ }
+
+ //=== VK_AMD_pipeline_compiler_control ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagsAMD )
+ {
+ return "{}";
+ }
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+ //=== VK_FUCHSIA_imagepipe_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagsFUCHSIA )
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ //=== VK_EXT_metal_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagsEXT )
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+ //=== VK_AMD_shader_core_properties2 ===
+
+ VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagsAMD )
+ {
+ return "{}";
+ }
+
+ //=== VK_NV_coverage_reduction_mode ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagsNV )
+ {
+ return "{}";
+ }
+
+ //=== VK_EXT_headless_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagsEXT )
+ {
+ return "{}";
+ }
+
+ //=== VK_NV_device_generated_commands ===
+
+ VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagsNV value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & IndirectStateFlagBitsNV::eFlagFrontface )
+ result += "FlagFrontface | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNV value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess )
+ result += "ExplicitPreprocess | ";
+ if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences )
+ result += "IndexedSequences | ";
+ if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences )
+ result += "UnorderedSequences | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ //=== VK_EXT_device_memory_report ===
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportFlagsEXT )
+ {
+ return "{}";
+ }
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_KHR_video_encode_queue ===
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagsKHR )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeCapabilityFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes )
+ result += "PrecedingExternallyEncodedBytes | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeUsageFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & VideoEncodeUsageFlagBitsKHR::eTranscoding )
+ result += "Transcoding | ";
+ if ( value & VideoEncodeUsageFlagBitsKHR::eStreaming )
+ result += "Streaming | ";
+ if ( value & VideoEncodeUsageFlagBitsKHR::eRecording )
+ result += "Recording | ";
+ if ( value & VideoEncodeUsageFlagBitsKHR::eConferencing )
+ result += "Conferencing | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeContentFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & VideoEncodeContentFlagBitsKHR::eCamera )
+ result += "Camera | ";
+ if ( value & VideoEncodeContentFlagBitsKHR::eDesktop )
+ result += "Desktop | ";
+ if ( value & VideoEncodeContentFlagBitsKHR::eRendered )
+ result += "Rendered | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlFlagsKHR )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlModeFlagsKHR value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ //=== VK_NV_device_diagnostics_config ===
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagsNV value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo )
+ result += "EnableShaderDebugInfo | ";
+ if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking )
+ result += "EnableResourceTracking | ";
+ if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints )
+ result += "EnableAutomaticCheckpoints | ";
+ if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderErrorReporting )
+ result += "EnableShaderErrorReporting | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ //=== VK_EXT_metal_objects ===
+
+ VULKAN_HPP_INLINE std::string to_string( ExportMetalObjectTypeFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalDevice )
+ result += "MetalDevice | ";
+ if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalCommandQueue )
+ result += "MetalCommandQueue | ";
+ if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalBuffer )
+ result += "MetalBuffer | ";
+ if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalTexture )
+ result += "MetalTexture | ";
+ if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalIosurface )
+ result += "MetalIosurface | ";
+ if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalSharedEvent )
+ result += "MetalSharedEvent | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+ //=== VK_EXT_graphics_pipeline_library ===
+
+ VULKAN_HPP_INLINE std::string to_string( GraphicsPipelineLibraryFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & GraphicsPipelineLibraryFlagBitsEXT::eVertexInputInterface )
+ result += "VertexInputInterface | ";
+ if ( value & GraphicsPipelineLibraryFlagBitsEXT::ePreRasterizationShaders )
+ result += "PreRasterizationShaders | ";
+ if ( value & GraphicsPipelineLibraryFlagBitsEXT::eFragmentShader )
+ result += "FragmentShader | ";
+ if ( value & GraphicsPipelineLibraryFlagBitsEXT::eFragmentOutputInterface )
+ result += "FragmentOutputInterface | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ //=== VK_NV_ray_tracing_motion_blur ===
+
+ VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInfoFlagsNV )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInstanceFlagsNV )
+ {
+ return "{}";
+ }
+
+ //=== VK_EXT_image_compression_control ===
+
+ VULKAN_HPP_INLINE std::string to_string( ImageCompressionFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ImageCompressionFlagBitsEXT::eFixedRateDefault )
+ result += "FixedRateDefault | ";
+ if ( value & ImageCompressionFlagBitsEXT::eFixedRateExplicit )
+ result += "FixedRateExplicit | ";
+ if ( value & ImageCompressionFlagBitsEXT::eDisabled )
+ result += "Disabled | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ImageCompressionFixedRateFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e1Bpc )
+ result += "1Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e2Bpc )
+ result += "2Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e3Bpc )
+ result += "3Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e4Bpc )
+ result += "4Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e5Bpc )
+ result += "5Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e6Bpc )
+ result += "6Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e7Bpc )
+ result += "7Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e8Bpc )
+ result += "8Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e9Bpc )
+ result += "9Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e10Bpc )
+ result += "10Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e11Bpc )
+ result += "11Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e12Bpc )
+ result += "12Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e13Bpc )
+ result += "13Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e14Bpc )
+ result += "14Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e15Bpc )
+ result += "15Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e16Bpc )
+ result += "16Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e17Bpc )
+ result += "17Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e18Bpc )
+ result += "18Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e19Bpc )
+ result += "19Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e20Bpc )
+ result += "20Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e21Bpc )
+ result += "21Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e22Bpc )
+ result += "22Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e23Bpc )
+ result += "23Bpc | ";
+ if ( value & ImageCompressionFixedRateFlagBitsEXT::e24Bpc )
+ result += "24Bpc | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+ //=== VK_EXT_directfb_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagsEXT )
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+ //=== VK_EXT_device_address_binding_report ===
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceAddressBindingFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & DeviceAddressBindingFlagBitsEXT::eInternalObject )
+ result += "InternalObject | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+ //=== VK_FUCHSIA_buffer_collection ===
+
+ VULKAN_HPP_INLINE std::string to_string( ImageFormatConstraintsFlagsFUCHSIA )
+ {
+ return "{}";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ImageConstraintsInfoFlagsFUCHSIA value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely )
+ result += "CpuReadRarely | ";
+ if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften )
+ result += "CpuReadOften | ";
+ if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely )
+ result += "CpuWriteRarely | ";
+ if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften )
+ result += "CpuWriteOften | ";
+ if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional )
+ result += "ProtectedOptional | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+ //=== VK_QNX_screen_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( ScreenSurfaceCreateFlagsQNX )
+ {
+ return "{}";
+ }
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+ //=== VK_EXT_opacity_micromap ===
+
+ VULKAN_HPP_INLINE std::string to_string( BuildMicromapFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & BuildMicromapFlagBitsEXT::ePreferFastTrace )
+ result += "PreferFastTrace | ";
+ if ( value & BuildMicromapFlagBitsEXT::ePreferFastBuild )
+ result += "PreferFastBuild | ";
+ if ( value & BuildMicromapFlagBitsEXT::eAllowCompaction )
+ result += "AllowCompaction | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( MicromapCreateFlagsEXT value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & MicromapCreateFlagBitsEXT::eDeviceAddressCaptureReplay )
+ result += "DeviceAddressCaptureReplay | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ //=== VK_NV_optical_flow ===
+
+ VULKAN_HPP_INLINE std::string to_string( OpticalFlowUsageFlagsNV value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & OpticalFlowUsageFlagBitsNV::eInput )
+ result += "Input | ";
+ if ( value & OpticalFlowUsageFlagBitsNV::eOutput )
+ result += "Output | ";
+ if ( value & OpticalFlowUsageFlagBitsNV::eHint )
+ result += "Hint | ";
+ if ( value & OpticalFlowUsageFlagBitsNV::eCost )
+ result += "Cost | ";
+ if ( value & OpticalFlowUsageFlagBitsNV::eGlobalFlow )
+ result += "GlobalFlow | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( OpticalFlowGridSizeFlagsNV value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & OpticalFlowGridSizeFlagBitsNV::e1X1 )
+ result += "1X1 | ";
+ if ( value & OpticalFlowGridSizeFlagBitsNV::e2X2 )
+ result += "2X2 | ";
+ if ( value & OpticalFlowGridSizeFlagBitsNV::e4X4 )
+ result += "4X4 | ";
+ if ( value & OpticalFlowGridSizeFlagBitsNV::e8X8 )
+ result += "8X8 | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( OpticalFlowSessionCreateFlagsNV value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & OpticalFlowSessionCreateFlagBitsNV::eEnableHint )
+ result += "EnableHint | ";
+ if ( value & OpticalFlowSessionCreateFlagBitsNV::eEnableCost )
+ result += "EnableCost | ";
+ if ( value & OpticalFlowSessionCreateFlagBitsNV::eEnableGlobalFlow )
+ result += "EnableGlobalFlow | ";
+ if ( value & OpticalFlowSessionCreateFlagBitsNV::eAllowRegions )
+ result += "AllowRegions | ";
+ if ( value & OpticalFlowSessionCreateFlagBitsNV::eBothDirections )
+ result += "BothDirections | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( OpticalFlowExecuteFlagsNV value )
+ {
+ if ( !value )
+ return "{}";
+
+ std::string result;
+ if ( value & OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints )
+ result += "DisableTemporalHints | ";
+
+ return "{ " + result.substr( 0, result.size() - 3 ) + " }";
+ }
+
+ //=======================
+ //=== ENUMs to_string ===
+ //=======================
+
+ VULKAN_HPP_INLINE std::string toHexString( uint32_t value )
+ {
+#if __cpp_lib_format
+ return std::format( "{:x}", value );
+#else
+ std::stringstream stream;
+ stream << std::hex << value;
+ return stream.str();
+#endif
+ }
+
+ //=== VK_VERSION_1_0 ===
+
+ VULKAN_HPP_INLINE std::string to_string( Result value )
+ {
+ switch ( value )
+ {
+ case Result::eSuccess: return "Success";
+ case Result::eNotReady: return "NotReady";
+ case Result::eTimeout: return "Timeout";
+ case Result::eEventSet: return "EventSet";
+ case Result::eEventReset: return "EventReset";
+ case Result::eIncomplete: return "Incomplete";
+ case Result::eErrorOutOfHostMemory: return "ErrorOutOfHostMemory";
+ case Result::eErrorOutOfDeviceMemory: return "ErrorOutOfDeviceMemory";
+ case Result::eErrorInitializationFailed: return "ErrorInitializationFailed";
+ case Result::eErrorDeviceLost: return "ErrorDeviceLost";
+ case Result::eErrorMemoryMapFailed: return "ErrorMemoryMapFailed";
+ case Result::eErrorLayerNotPresent: return "ErrorLayerNotPresent";
+ case Result::eErrorExtensionNotPresent: return "ErrorExtensionNotPresent";
+ case Result::eErrorFeatureNotPresent: return "ErrorFeatureNotPresent";
+ case Result::eErrorIncompatibleDriver: return "ErrorIncompatibleDriver";
+ case Result::eErrorTooManyObjects: return "ErrorTooManyObjects";
+ case Result::eErrorFormatNotSupported: return "ErrorFormatNotSupported";
+ case Result::eErrorFragmentedPool: return "ErrorFragmentedPool";
+ case Result::eErrorUnknown: return "ErrorUnknown";
+ case Result::eErrorOutOfPoolMemory: return "ErrorOutOfPoolMemory";
+ case Result::eErrorInvalidExternalHandle: return "ErrorInvalidExternalHandle";
+ case Result::eErrorFragmentation: return "ErrorFragmentation";
+ case Result::eErrorInvalidOpaqueCaptureAddress: return "ErrorInvalidOpaqueCaptureAddress";
+ case Result::ePipelineCompileRequired: return "PipelineCompileRequired";
+ case Result::eErrorSurfaceLostKHR: return "ErrorSurfaceLostKHR";
+ case Result::eErrorNativeWindowInUseKHR: return "ErrorNativeWindowInUseKHR";
+ case Result::eSuboptimalKHR: return "SuboptimalKHR";
+ case Result::eErrorOutOfDateKHR: return "ErrorOutOfDateKHR";
+ case Result::eErrorIncompatibleDisplayKHR: return "ErrorIncompatibleDisplayKHR";
+ case Result::eErrorValidationFailedEXT: return "ErrorValidationFailedEXT";
+ case Result::eErrorInvalidShaderNV: return "ErrorInvalidShaderNV";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case Result::eErrorImageUsageNotSupportedKHR: return "ErrorImageUsageNotSupportedKHR";
+ case Result::eErrorVideoPictureLayoutNotSupportedKHR: return "ErrorVideoPictureLayoutNotSupportedKHR";
+ case Result::eErrorVideoProfileOperationNotSupportedKHR: return "ErrorVideoProfileOperationNotSupportedKHR";
+ case Result::eErrorVideoProfileFormatNotSupportedKHR: return "ErrorVideoProfileFormatNotSupportedKHR";
+ case Result::eErrorVideoProfileCodecNotSupportedKHR: return "ErrorVideoProfileCodecNotSupportedKHR";
+ case Result::eErrorVideoStdVersionNotSupportedKHR: return "ErrorVideoStdVersionNotSupportedKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT";
+ case Result::eErrorNotPermittedKHR: return "ErrorNotPermittedKHR";
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ case Result::eErrorFullScreenExclusiveModeLostEXT: return "ErrorFullScreenExclusiveModeLostEXT";
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ case Result::eThreadIdleKHR: return "ThreadIdleKHR";
+ case Result::eThreadDoneKHR: return "ThreadDoneKHR";
+ case Result::eOperationDeferredKHR: return "OperationDeferredKHR";
+ case Result::eOperationNotDeferredKHR: return "OperationNotDeferredKHR";
+ case Result::eErrorCompressionExhaustedEXT: return "ErrorCompressionExhaustedEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( StructureType value )
+ {
+ switch ( value )
+ {
+ case StructureType::eApplicationInfo: return "ApplicationInfo";
+ case StructureType::eInstanceCreateInfo: return "InstanceCreateInfo";
+ case StructureType::eDeviceQueueCreateInfo: return "DeviceQueueCreateInfo";
+ case StructureType::eDeviceCreateInfo: return "DeviceCreateInfo";
+ case StructureType::eSubmitInfo: return "SubmitInfo";
+ case StructureType::eMemoryAllocateInfo: return "MemoryAllocateInfo";
+ case StructureType::eMappedMemoryRange: return "MappedMemoryRange";
+ case StructureType::eBindSparseInfo: return "BindSparseInfo";
+ case StructureType::eFenceCreateInfo: return "FenceCreateInfo";
+ case StructureType::eSemaphoreCreateInfo: return "SemaphoreCreateInfo";
+ case StructureType::eEventCreateInfo: return "EventCreateInfo";
+ case StructureType::eQueryPoolCreateInfo: return "QueryPoolCreateInfo";
+ case StructureType::eBufferCreateInfo: return "BufferCreateInfo";
+ case StructureType::eBufferViewCreateInfo: return "BufferViewCreateInfo";
+ case StructureType::eImageCreateInfo: return "ImageCreateInfo";
+ case StructureType::eImageViewCreateInfo: return "ImageViewCreateInfo";
+ case StructureType::eShaderModuleCreateInfo: return "ShaderModuleCreateInfo";
+ case StructureType::ePipelineCacheCreateInfo: return "PipelineCacheCreateInfo";
+ case StructureType::ePipelineShaderStageCreateInfo: return "PipelineShaderStageCreateInfo";
+ case StructureType::ePipelineVertexInputStateCreateInfo: return "PipelineVertexInputStateCreateInfo";
+ case StructureType::ePipelineInputAssemblyStateCreateInfo: return "PipelineInputAssemblyStateCreateInfo";
+ case StructureType::ePipelineTessellationStateCreateInfo: return "PipelineTessellationStateCreateInfo";
+ case StructureType::ePipelineViewportStateCreateInfo: return "PipelineViewportStateCreateInfo";
+ case StructureType::ePipelineRasterizationStateCreateInfo: return "PipelineRasterizationStateCreateInfo";
+ case StructureType::ePipelineMultisampleStateCreateInfo: return "PipelineMultisampleStateCreateInfo";
+ case StructureType::ePipelineDepthStencilStateCreateInfo: return "PipelineDepthStencilStateCreateInfo";
+ case StructureType::ePipelineColorBlendStateCreateInfo: return "PipelineColorBlendStateCreateInfo";
+ case StructureType::ePipelineDynamicStateCreateInfo: return "PipelineDynamicStateCreateInfo";
+ case StructureType::eGraphicsPipelineCreateInfo: return "GraphicsPipelineCreateInfo";
+ case StructureType::eComputePipelineCreateInfo: return "ComputePipelineCreateInfo";
+ case StructureType::ePipelineLayoutCreateInfo: return "PipelineLayoutCreateInfo";
+ case StructureType::eSamplerCreateInfo: return "SamplerCreateInfo";
+ case StructureType::eDescriptorSetLayoutCreateInfo: return "DescriptorSetLayoutCreateInfo";
+ case StructureType::eDescriptorPoolCreateInfo: return "DescriptorPoolCreateInfo";
+ case StructureType::eDescriptorSetAllocateInfo: return "DescriptorSetAllocateInfo";
+ case StructureType::eWriteDescriptorSet: return "WriteDescriptorSet";
+ case StructureType::eCopyDescriptorSet: return "CopyDescriptorSet";
+ case StructureType::eFramebufferCreateInfo: return "FramebufferCreateInfo";
+ case StructureType::eRenderPassCreateInfo: return "RenderPassCreateInfo";
+ case StructureType::eCommandPoolCreateInfo: return "CommandPoolCreateInfo";
+ case StructureType::eCommandBufferAllocateInfo: return "CommandBufferAllocateInfo";
+ case StructureType::eCommandBufferInheritanceInfo: return "CommandBufferInheritanceInfo";
+ case StructureType::eCommandBufferBeginInfo: return "CommandBufferBeginInfo";
+ case StructureType::eRenderPassBeginInfo: return "RenderPassBeginInfo";
+ case StructureType::eBufferMemoryBarrier: return "BufferMemoryBarrier";
+ case StructureType::eImageMemoryBarrier: return "ImageMemoryBarrier";
+ case StructureType::eMemoryBarrier: return "MemoryBarrier";
+ case StructureType::eLoaderInstanceCreateInfo: return "LoaderInstanceCreateInfo";
+ case StructureType::eLoaderDeviceCreateInfo: return "LoaderDeviceCreateInfo";
+ case StructureType::ePhysicalDeviceSubgroupProperties: return "PhysicalDeviceSubgroupProperties";
+ case StructureType::eBindBufferMemoryInfo: return "BindBufferMemoryInfo";
+ case StructureType::eBindImageMemoryInfo: return "BindImageMemoryInfo";
+ case StructureType::ePhysicalDevice16BitStorageFeatures: return "PhysicalDevice16BitStorageFeatures";
+ case StructureType::eMemoryDedicatedRequirements: return "MemoryDedicatedRequirements";
+ case StructureType::eMemoryDedicatedAllocateInfo: return "MemoryDedicatedAllocateInfo";
+ case StructureType::eMemoryAllocateFlagsInfo: return "MemoryAllocateFlagsInfo";
+ case StructureType::eDeviceGroupRenderPassBeginInfo: return "DeviceGroupRenderPassBeginInfo";
+ case StructureType::eDeviceGroupCommandBufferBeginInfo: return "DeviceGroupCommandBufferBeginInfo";
+ case StructureType::eDeviceGroupSubmitInfo: return "DeviceGroupSubmitInfo";
+ case StructureType::eDeviceGroupBindSparseInfo: return "DeviceGroupBindSparseInfo";
+ case StructureType::eBindBufferMemoryDeviceGroupInfo: return "BindBufferMemoryDeviceGroupInfo";
+ case StructureType::eBindImageMemoryDeviceGroupInfo: return "BindImageMemoryDeviceGroupInfo";
+ case StructureType::ePhysicalDeviceGroupProperties: return "PhysicalDeviceGroupProperties";
+ case StructureType::eDeviceGroupDeviceCreateInfo: return "DeviceGroupDeviceCreateInfo";
+ case StructureType::eBufferMemoryRequirementsInfo2: return "BufferMemoryRequirementsInfo2";
+ case StructureType::eImageMemoryRequirementsInfo2: return "ImageMemoryRequirementsInfo2";
+ case StructureType::eImageSparseMemoryRequirementsInfo2: return "ImageSparseMemoryRequirementsInfo2";
+ case StructureType::eMemoryRequirements2: return "MemoryRequirements2";
+ case StructureType::eSparseImageMemoryRequirements2: return "SparseImageMemoryRequirements2";
+ case StructureType::ePhysicalDeviceFeatures2: return "PhysicalDeviceFeatures2";
+ case StructureType::ePhysicalDeviceProperties2: return "PhysicalDeviceProperties2";
+ case StructureType::eFormatProperties2: return "FormatProperties2";
+ case StructureType::eImageFormatProperties2: return "ImageFormatProperties2";
+ case StructureType::ePhysicalDeviceImageFormatInfo2: return "PhysicalDeviceImageFormatInfo2";
+ case StructureType::eQueueFamilyProperties2: return "QueueFamilyProperties2";
+ case StructureType::ePhysicalDeviceMemoryProperties2: return "PhysicalDeviceMemoryProperties2";
+ case StructureType::eSparseImageFormatProperties2: return "SparseImageFormatProperties2";
+ case StructureType::ePhysicalDeviceSparseImageFormatInfo2: return "PhysicalDeviceSparseImageFormatInfo2";
+ case StructureType::ePhysicalDevicePointClippingProperties: return "PhysicalDevicePointClippingProperties";
+ case StructureType::eRenderPassInputAttachmentAspectCreateInfo: return "RenderPassInputAttachmentAspectCreateInfo";
+ case StructureType::eImageViewUsageCreateInfo: return "ImageViewUsageCreateInfo";
+ case StructureType::ePipelineTessellationDomainOriginStateCreateInfo: return "PipelineTessellationDomainOriginStateCreateInfo";
+ case StructureType::eRenderPassMultiviewCreateInfo: return "RenderPassMultiviewCreateInfo";
+ case StructureType::ePhysicalDeviceMultiviewFeatures: return "PhysicalDeviceMultiviewFeatures";
+ case StructureType::ePhysicalDeviceMultiviewProperties: return "PhysicalDeviceMultiviewProperties";
+ case StructureType::ePhysicalDeviceVariablePointersFeatures: return "PhysicalDeviceVariablePointersFeatures";
+ case StructureType::eProtectedSubmitInfo: return "ProtectedSubmitInfo";
+ case StructureType::ePhysicalDeviceProtectedMemoryFeatures: return "PhysicalDeviceProtectedMemoryFeatures";
+ case StructureType::ePhysicalDeviceProtectedMemoryProperties: return "PhysicalDeviceProtectedMemoryProperties";
+ case StructureType::eDeviceQueueInfo2: return "DeviceQueueInfo2";
+ case StructureType::eSamplerYcbcrConversionCreateInfo: return "SamplerYcbcrConversionCreateInfo";
+ case StructureType::eSamplerYcbcrConversionInfo: return "SamplerYcbcrConversionInfo";
+ case StructureType::eBindImagePlaneMemoryInfo: return "BindImagePlaneMemoryInfo";
+ case StructureType::eImagePlaneMemoryRequirementsInfo: return "ImagePlaneMemoryRequirementsInfo";
+ case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures: return "PhysicalDeviceSamplerYcbcrConversionFeatures";
+ case StructureType::eSamplerYcbcrConversionImageFormatProperties: return "SamplerYcbcrConversionImageFormatProperties";
+ case StructureType::eDescriptorUpdateTemplateCreateInfo: return "DescriptorUpdateTemplateCreateInfo";
+ case StructureType::ePhysicalDeviceExternalImageFormatInfo: return "PhysicalDeviceExternalImageFormatInfo";
+ case StructureType::eExternalImageFormatProperties: return "ExternalImageFormatProperties";
+ case StructureType::ePhysicalDeviceExternalBufferInfo: return "PhysicalDeviceExternalBufferInfo";
+ case StructureType::eExternalBufferProperties: return "ExternalBufferProperties";
+ case StructureType::ePhysicalDeviceIdProperties: return "PhysicalDeviceIdProperties";
+ case StructureType::eExternalMemoryBufferCreateInfo: return "ExternalMemoryBufferCreateInfo";
+ case StructureType::eExternalMemoryImageCreateInfo: return "ExternalMemoryImageCreateInfo";
+ case StructureType::eExportMemoryAllocateInfo: return "ExportMemoryAllocateInfo";
+ case StructureType::ePhysicalDeviceExternalFenceInfo: return "PhysicalDeviceExternalFenceInfo";
+ case StructureType::eExternalFenceProperties: return "ExternalFenceProperties";
+ case StructureType::eExportFenceCreateInfo: return "ExportFenceCreateInfo";
+ case StructureType::eExportSemaphoreCreateInfo: return "ExportSemaphoreCreateInfo";
+ case StructureType::ePhysicalDeviceExternalSemaphoreInfo: return "PhysicalDeviceExternalSemaphoreInfo";
+ case StructureType::eExternalSemaphoreProperties: return "ExternalSemaphoreProperties";
+ case StructureType::ePhysicalDeviceMaintenance3Properties: return "PhysicalDeviceMaintenance3Properties";
+ case StructureType::eDescriptorSetLayoutSupport: return "DescriptorSetLayoutSupport";
+ case StructureType::ePhysicalDeviceShaderDrawParametersFeatures: return "PhysicalDeviceShaderDrawParametersFeatures";
+ case StructureType::ePhysicalDeviceVulkan11Features: return "PhysicalDeviceVulkan11Features";
+ case StructureType::ePhysicalDeviceVulkan11Properties: return "PhysicalDeviceVulkan11Properties";
+ case StructureType::ePhysicalDeviceVulkan12Features: return "PhysicalDeviceVulkan12Features";
+ case StructureType::ePhysicalDeviceVulkan12Properties: return "PhysicalDeviceVulkan12Properties";
+ case StructureType::eImageFormatListCreateInfo: return "ImageFormatListCreateInfo";
+ case StructureType::eAttachmentDescription2: return "AttachmentDescription2";
+ case StructureType::eAttachmentReference2: return "AttachmentReference2";
+ case StructureType::eSubpassDescription2: return "SubpassDescription2";
+ case StructureType::eSubpassDependency2: return "SubpassDependency2";
+ case StructureType::eRenderPassCreateInfo2: return "RenderPassCreateInfo2";
+ case StructureType::eSubpassBeginInfo: return "SubpassBeginInfo";
+ case StructureType::eSubpassEndInfo: return "SubpassEndInfo";
+ case StructureType::ePhysicalDevice8BitStorageFeatures: return "PhysicalDevice8BitStorageFeatures";
+ case StructureType::ePhysicalDeviceDriverProperties: return "PhysicalDeviceDriverProperties";
+ case StructureType::ePhysicalDeviceShaderAtomicInt64Features: return "PhysicalDeviceShaderAtomicInt64Features";
+ case StructureType::ePhysicalDeviceShaderFloat16Int8Features: return "PhysicalDeviceShaderFloat16Int8Features";
+ case StructureType::ePhysicalDeviceFloatControlsProperties: return "PhysicalDeviceFloatControlsProperties";
+ case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo: return "DescriptorSetLayoutBindingFlagsCreateInfo";
+ case StructureType::ePhysicalDeviceDescriptorIndexingFeatures: return "PhysicalDeviceDescriptorIndexingFeatures";
+ case StructureType::ePhysicalDeviceDescriptorIndexingProperties: return "PhysicalDeviceDescriptorIndexingProperties";
+ case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo: return "DescriptorSetVariableDescriptorCountAllocateInfo";
+ case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport: return "DescriptorSetVariableDescriptorCountLayoutSupport";
+ case StructureType::ePhysicalDeviceDepthStencilResolveProperties: return "PhysicalDeviceDepthStencilResolveProperties";
+ case StructureType::eSubpassDescriptionDepthStencilResolve: return "SubpassDescriptionDepthStencilResolve";
+ case StructureType::ePhysicalDeviceScalarBlockLayoutFeatures: return "PhysicalDeviceScalarBlockLayoutFeatures";
+ case StructureType::eImageStencilUsageCreateInfo: return "ImageStencilUsageCreateInfo";
+ case StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties: return "PhysicalDeviceSamplerFilterMinmaxProperties";
+ case StructureType::eSamplerReductionModeCreateInfo: return "SamplerReductionModeCreateInfo";
+ case StructureType::ePhysicalDeviceVulkanMemoryModelFeatures: return "PhysicalDeviceVulkanMemoryModelFeatures";
+ case StructureType::ePhysicalDeviceImagelessFramebufferFeatures: return "PhysicalDeviceImagelessFramebufferFeatures";
+ case StructureType::eFramebufferAttachmentsCreateInfo: return "FramebufferAttachmentsCreateInfo";
+ case StructureType::eFramebufferAttachmentImageInfo: return "FramebufferAttachmentImageInfo";
+ case StructureType::eRenderPassAttachmentBeginInfo: return "RenderPassAttachmentBeginInfo";
+ case StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures: return "PhysicalDeviceUniformBufferStandardLayoutFeatures";
+ case StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures: return "PhysicalDeviceShaderSubgroupExtendedTypesFeatures";
+ case StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures: return "PhysicalDeviceSeparateDepthStencilLayoutsFeatures";
+ case StructureType::eAttachmentReferenceStencilLayout: return "AttachmentReferenceStencilLayout";
+ case StructureType::eAttachmentDescriptionStencilLayout: return "AttachmentDescriptionStencilLayout";
+ case StructureType::ePhysicalDeviceHostQueryResetFeatures: return "PhysicalDeviceHostQueryResetFeatures";
+ case StructureType::ePhysicalDeviceTimelineSemaphoreFeatures: return "PhysicalDeviceTimelineSemaphoreFeatures";
+ case StructureType::ePhysicalDeviceTimelineSemaphoreProperties: return "PhysicalDeviceTimelineSemaphoreProperties";
+ case StructureType::eSemaphoreTypeCreateInfo: return "SemaphoreTypeCreateInfo";
+ case StructureType::eTimelineSemaphoreSubmitInfo: return "TimelineSemaphoreSubmitInfo";
+ case StructureType::eSemaphoreWaitInfo: return "SemaphoreWaitInfo";
+ case StructureType::eSemaphoreSignalInfo: return "SemaphoreSignalInfo";
+ case StructureType::ePhysicalDeviceBufferDeviceAddressFeatures: return "PhysicalDeviceBufferDeviceAddressFeatures";
+ case StructureType::eBufferDeviceAddressInfo: return "BufferDeviceAddressInfo";
+ case StructureType::eBufferOpaqueCaptureAddressCreateInfo: return "BufferOpaqueCaptureAddressCreateInfo";
+ case StructureType::eMemoryOpaqueCaptureAddressAllocateInfo: return "MemoryOpaqueCaptureAddressAllocateInfo";
+ case StructureType::eDeviceMemoryOpaqueCaptureAddressInfo: return "DeviceMemoryOpaqueCaptureAddressInfo";
+ case StructureType::ePhysicalDeviceVulkan13Features: return "PhysicalDeviceVulkan13Features";
+ case StructureType::ePhysicalDeviceVulkan13Properties: return "PhysicalDeviceVulkan13Properties";
+ case StructureType::ePipelineCreationFeedbackCreateInfo: return "PipelineCreationFeedbackCreateInfo";
+ case StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures: return "PhysicalDeviceShaderTerminateInvocationFeatures";
+ case StructureType::ePhysicalDeviceToolProperties: return "PhysicalDeviceToolProperties";
+ case StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures: return "PhysicalDeviceShaderDemoteToHelperInvocationFeatures";
+ case StructureType::ePhysicalDevicePrivateDataFeatures: return "PhysicalDevicePrivateDataFeatures";
+ case StructureType::eDevicePrivateDataCreateInfo: return "DevicePrivateDataCreateInfo";
+ case StructureType::ePrivateDataSlotCreateInfo: return "PrivateDataSlotCreateInfo";
+ case StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures: return "PhysicalDevicePipelineCreationCacheControlFeatures";
+ case StructureType::eMemoryBarrier2: return "MemoryBarrier2";
+ case StructureType::eBufferMemoryBarrier2: return "BufferMemoryBarrier2";
+ case StructureType::eImageMemoryBarrier2: return "ImageMemoryBarrier2";
+ case StructureType::eDependencyInfo: return "DependencyInfo";
+ case StructureType::eSubmitInfo2: return "SubmitInfo2";
+ case StructureType::eSemaphoreSubmitInfo: return "SemaphoreSubmitInfo";
+ case StructureType::eCommandBufferSubmitInfo: return "CommandBufferSubmitInfo";
+ case StructureType::ePhysicalDeviceSynchronization2Features: return "PhysicalDeviceSynchronization2Features";
+ case StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures: return "PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures";
+ case StructureType::ePhysicalDeviceImageRobustnessFeatures: return "PhysicalDeviceImageRobustnessFeatures";
+ case StructureType::eCopyBufferInfo2: return "CopyBufferInfo2";
+ case StructureType::eCopyImageInfo2: return "CopyImageInfo2";
+ case StructureType::eCopyBufferToImageInfo2: return "CopyBufferToImageInfo2";
+ case StructureType::eCopyImageToBufferInfo2: return "CopyImageToBufferInfo2";
+ case StructureType::eBlitImageInfo2: return "BlitImageInfo2";
+ case StructureType::eResolveImageInfo2: return "ResolveImageInfo2";
+ case StructureType::eBufferCopy2: return "BufferCopy2";
+ case StructureType::eImageCopy2: return "ImageCopy2";
+ case StructureType::eImageBlit2: return "ImageBlit2";
+ case StructureType::eBufferImageCopy2: return "BufferImageCopy2";
+ case StructureType::eImageResolve2: return "ImageResolve2";
+ case StructureType::ePhysicalDeviceSubgroupSizeControlProperties: return "PhysicalDeviceSubgroupSizeControlProperties";
+ case StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo: return "PipelineShaderStageRequiredSubgroupSizeCreateInfo";
+ case StructureType::ePhysicalDeviceSubgroupSizeControlFeatures: return "PhysicalDeviceSubgroupSizeControlFeatures";
+ case StructureType::ePhysicalDeviceInlineUniformBlockFeatures: return "PhysicalDeviceInlineUniformBlockFeatures";
+ case StructureType::ePhysicalDeviceInlineUniformBlockProperties: return "PhysicalDeviceInlineUniformBlockProperties";
+ case StructureType::eWriteDescriptorSetInlineUniformBlock: return "WriteDescriptorSetInlineUniformBlock";
+ case StructureType::eDescriptorPoolInlineUniformBlockCreateInfo: return "DescriptorPoolInlineUniformBlockCreateInfo";
+ case StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures: return "PhysicalDeviceTextureCompressionAstcHdrFeatures";
+ case StructureType::eRenderingInfo: return "RenderingInfo";
+ case StructureType::eRenderingAttachmentInfo: return "RenderingAttachmentInfo";
+ case StructureType::ePipelineRenderingCreateInfo: return "PipelineRenderingCreateInfo";
+ case StructureType::ePhysicalDeviceDynamicRenderingFeatures: return "PhysicalDeviceDynamicRenderingFeatures";
+ case StructureType::eCommandBufferInheritanceRenderingInfo: return "CommandBufferInheritanceRenderingInfo";
+ case StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures: return "PhysicalDeviceShaderIntegerDotProductFeatures";
+ case StructureType::ePhysicalDeviceShaderIntegerDotProductProperties: return "PhysicalDeviceShaderIntegerDotProductProperties";
+ case StructureType::ePhysicalDeviceTexelBufferAlignmentProperties: return "PhysicalDeviceTexelBufferAlignmentProperties";
+ case StructureType::eFormatProperties3: return "FormatProperties3";
+ case StructureType::ePhysicalDeviceMaintenance4Features: return "PhysicalDeviceMaintenance4Features";
+ case StructureType::ePhysicalDeviceMaintenance4Properties: return "PhysicalDeviceMaintenance4Properties";
+ case StructureType::eDeviceBufferMemoryRequirements: return "DeviceBufferMemoryRequirements";
+ case StructureType::eDeviceImageMemoryRequirements: return "DeviceImageMemoryRequirements";
+ case StructureType::eSwapchainCreateInfoKHR: return "SwapchainCreateInfoKHR";
+ case StructureType::ePresentInfoKHR: return "PresentInfoKHR";
+ case StructureType::eDeviceGroupPresentCapabilitiesKHR: return "DeviceGroupPresentCapabilitiesKHR";
+ case StructureType::eImageSwapchainCreateInfoKHR: return "ImageSwapchainCreateInfoKHR";
+ case StructureType::eBindImageMemorySwapchainInfoKHR: return "BindImageMemorySwapchainInfoKHR";
+ case StructureType::eAcquireNextImageInfoKHR: return "AcquireNextImageInfoKHR";
+ case StructureType::eDeviceGroupPresentInfoKHR: return "DeviceGroupPresentInfoKHR";
+ case StructureType::eDeviceGroupSwapchainCreateInfoKHR: return "DeviceGroupSwapchainCreateInfoKHR";
+ case StructureType::eDisplayModeCreateInfoKHR: return "DisplayModeCreateInfoKHR";
+ case StructureType::eDisplaySurfaceCreateInfoKHR: return "DisplaySurfaceCreateInfoKHR";
+ case StructureType::eDisplayPresentInfoKHR: return "DisplayPresentInfoKHR";
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+ case StructureType::eXlibSurfaceCreateInfoKHR: return "XlibSurfaceCreateInfoKHR";
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+ case StructureType::eXcbSurfaceCreateInfoKHR: return "XcbSurfaceCreateInfoKHR";
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+ case StructureType::eWaylandSurfaceCreateInfoKHR: return "WaylandSurfaceCreateInfoKHR";
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+ case StructureType::eAndroidSurfaceCreateInfoKHR: return "AndroidSurfaceCreateInfoKHR";
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ case StructureType::eWin32SurfaceCreateInfoKHR: return "Win32SurfaceCreateInfoKHR";
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ case StructureType::eDebugReportCallbackCreateInfoEXT: return "DebugReportCallbackCreateInfoEXT";
+ case StructureType::ePipelineRasterizationStateRasterizationOrderAMD: return "PipelineRasterizationStateRasterizationOrderAMD";
+ case StructureType::eDebugMarkerObjectNameInfoEXT: return "DebugMarkerObjectNameInfoEXT";
+ case StructureType::eDebugMarkerObjectTagInfoEXT: return "DebugMarkerObjectTagInfoEXT";
+ case StructureType::eDebugMarkerMarkerInfoEXT: return "DebugMarkerMarkerInfoEXT";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case StructureType::eVideoProfileInfoKHR: return "VideoProfileInfoKHR";
+ case StructureType::eVideoCapabilitiesKHR: return "VideoCapabilitiesKHR";
+ case StructureType::eVideoPictureResourceInfoKHR: return "VideoPictureResourceInfoKHR";
+ case StructureType::eVideoSessionMemoryRequirementsKHR: return "VideoSessionMemoryRequirementsKHR";
+ case StructureType::eBindVideoSessionMemoryInfoKHR: return "BindVideoSessionMemoryInfoKHR";
+ case StructureType::eVideoSessionCreateInfoKHR: return "VideoSessionCreateInfoKHR";
+ case StructureType::eVideoSessionParametersCreateInfoKHR: return "VideoSessionParametersCreateInfoKHR";
+ case StructureType::eVideoSessionParametersUpdateInfoKHR: return "VideoSessionParametersUpdateInfoKHR";
+ case StructureType::eVideoBeginCodingInfoKHR: return "VideoBeginCodingInfoKHR";
+ case StructureType::eVideoEndCodingInfoKHR: return "VideoEndCodingInfoKHR";
+ case StructureType::eVideoCodingControlInfoKHR: return "VideoCodingControlInfoKHR";
+ case StructureType::eVideoReferenceSlotInfoKHR: return "VideoReferenceSlotInfoKHR";
+ case StructureType::eQueueFamilyVideoPropertiesKHR: return "QueueFamilyVideoPropertiesKHR";
+ case StructureType::eVideoProfileListInfoKHR: return "VideoProfileListInfoKHR";
+ case StructureType::ePhysicalDeviceVideoFormatInfoKHR: return "PhysicalDeviceVideoFormatInfoKHR";
+ case StructureType::eVideoFormatPropertiesKHR: return "VideoFormatPropertiesKHR";
+ case StructureType::eQueueFamilyQueryResultStatusPropertiesKHR: return "QueueFamilyQueryResultStatusPropertiesKHR";
+ case StructureType::eVideoDecodeInfoKHR: return "VideoDecodeInfoKHR";
+ case StructureType::eVideoDecodeCapabilitiesKHR: return "VideoDecodeCapabilitiesKHR";
+ case StructureType::eVideoDecodeUsageInfoKHR: return "VideoDecodeUsageInfoKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case StructureType::eDedicatedAllocationImageCreateInfoNV: return "DedicatedAllocationImageCreateInfoNV";
+ case StructureType::eDedicatedAllocationBufferCreateInfoNV: return "DedicatedAllocationBufferCreateInfoNV";
+ case StructureType::eDedicatedAllocationMemoryAllocateInfoNV: return "DedicatedAllocationMemoryAllocateInfoNV";
+ case StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT: return "PhysicalDeviceTransformFeedbackFeaturesEXT";
+ case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT: return "PhysicalDeviceTransformFeedbackPropertiesEXT";
+ case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT: return "PipelineRasterizationStateStreamCreateInfoEXT";
+ case StructureType::eCuModuleCreateInfoNVX: return "CuModuleCreateInfoNVX";
+ case StructureType::eCuFunctionCreateInfoNVX: return "CuFunctionCreateInfoNVX";
+ case StructureType::eCuLaunchInfoNVX: return "CuLaunchInfoNVX";
+ case StructureType::eImageViewHandleInfoNVX: return "ImageViewHandleInfoNVX";
+ case StructureType::eImageViewAddressPropertiesNVX: return "ImageViewAddressPropertiesNVX";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case StructureType::eVideoEncodeH264CapabilitiesEXT: return "VideoEncodeH264CapabilitiesEXT";
+ case StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT: return "VideoEncodeH264SessionParametersCreateInfoEXT";
+ case StructureType::eVideoEncodeH264SessionParametersAddInfoEXT: return "VideoEncodeH264SessionParametersAddInfoEXT";
+ case StructureType::eVideoEncodeH264VclFrameInfoEXT: return "VideoEncodeH264VclFrameInfoEXT";
+ case StructureType::eVideoEncodeH264DpbSlotInfoEXT: return "VideoEncodeH264DpbSlotInfoEXT";
+ case StructureType::eVideoEncodeH264NaluSliceInfoEXT: return "VideoEncodeH264NaluSliceInfoEXT";
+ case StructureType::eVideoEncodeH264EmitPictureParametersInfoEXT: return "VideoEncodeH264EmitPictureParametersInfoEXT";
+ case StructureType::eVideoEncodeH264ProfileInfoEXT: return "VideoEncodeH264ProfileInfoEXT";
+ case StructureType::eVideoEncodeH264RateControlInfoEXT: return "VideoEncodeH264RateControlInfoEXT";
+ case StructureType::eVideoEncodeH264RateControlLayerInfoEXT: return "VideoEncodeH264RateControlLayerInfoEXT";
+ case StructureType::eVideoEncodeH264ReferenceListsInfoEXT: return "VideoEncodeH264ReferenceListsInfoEXT";
+ case StructureType::eVideoEncodeH265CapabilitiesEXT: return "VideoEncodeH265CapabilitiesEXT";
+ case StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT: return "VideoEncodeH265SessionParametersCreateInfoEXT";
+ case StructureType::eVideoEncodeH265SessionParametersAddInfoEXT: return "VideoEncodeH265SessionParametersAddInfoEXT";
+ case StructureType::eVideoEncodeH265VclFrameInfoEXT: return "VideoEncodeH265VclFrameInfoEXT";
+ case StructureType::eVideoEncodeH265DpbSlotInfoEXT: return "VideoEncodeH265DpbSlotInfoEXT";
+ case StructureType::eVideoEncodeH265NaluSliceSegmentInfoEXT: return "VideoEncodeH265NaluSliceSegmentInfoEXT";
+ case StructureType::eVideoEncodeH265EmitPictureParametersInfoEXT: return "VideoEncodeH265EmitPictureParametersInfoEXT";
+ case StructureType::eVideoEncodeH265ProfileInfoEXT: return "VideoEncodeH265ProfileInfoEXT";
+ case StructureType::eVideoEncodeH265ReferenceListsInfoEXT: return "VideoEncodeH265ReferenceListsInfoEXT";
+ case StructureType::eVideoEncodeH265RateControlInfoEXT: return "VideoEncodeH265RateControlInfoEXT";
+ case StructureType::eVideoEncodeH265RateControlLayerInfoEXT: return "VideoEncodeH265RateControlLayerInfoEXT";
+ case StructureType::eVideoDecodeH264CapabilitiesEXT: return "VideoDecodeH264CapabilitiesEXT";
+ case StructureType::eVideoDecodeH264PictureInfoEXT: return "VideoDecodeH264PictureInfoEXT";
+ case StructureType::eVideoDecodeH264ProfileInfoEXT: return "VideoDecodeH264ProfileInfoEXT";
+ case StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT: return "VideoDecodeH264SessionParametersCreateInfoEXT";
+ case StructureType::eVideoDecodeH264SessionParametersAddInfoEXT: return "VideoDecodeH264SessionParametersAddInfoEXT";
+ case StructureType::eVideoDecodeH264DpbSlotInfoEXT: return "VideoDecodeH264DpbSlotInfoEXT";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case StructureType::eTextureLodGatherFormatPropertiesAMD: return "TextureLodGatherFormatPropertiesAMD";
+ case StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR: return "RenderingFragmentShadingRateAttachmentInfoKHR";
+ case StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT: return "RenderingFragmentDensityMapAttachmentInfoEXT";
+ case StructureType::eAttachmentSampleCountInfoAMD: return "AttachmentSampleCountInfoAMD";
+ case StructureType::eMultiviewPerViewAttributesInfoNVX: return "MultiviewPerViewAttributesInfoNVX";
+#if defined( VK_USE_PLATFORM_GGP )
+ case StructureType::eStreamDescriptorSurfaceCreateInfoGGP: return "StreamDescriptorSurfaceCreateInfoGGP";
+#endif /*VK_USE_PLATFORM_GGP*/
+ case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV: return "PhysicalDeviceCornerSampledImageFeaturesNV";
+ case StructureType::eExternalMemoryImageCreateInfoNV: return "ExternalMemoryImageCreateInfoNV";
+ case StructureType::eExportMemoryAllocateInfoNV: return "ExportMemoryAllocateInfoNV";
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ case StructureType::eImportMemoryWin32HandleInfoNV: return "ImportMemoryWin32HandleInfoNV";
+ case StructureType::eExportMemoryWin32HandleInfoNV: return "ExportMemoryWin32HandleInfoNV";
+ case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV: return "Win32KeyedMutexAcquireReleaseInfoNV";
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ case StructureType::eValidationFlagsEXT: return "ValidationFlagsEXT";
+#if defined( VK_USE_PLATFORM_VI_NN )
+ case StructureType::eViSurfaceCreateInfoNN: return "ViSurfaceCreateInfoNN";
+#endif /*VK_USE_PLATFORM_VI_NN*/
+ case StructureType::eImageViewAstcDecodeModeEXT: return "ImageViewAstcDecodeModeEXT";
+ case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT: return "PhysicalDeviceAstcDecodeFeaturesEXT";
+ case StructureType::ePipelineRobustnessCreateInfoEXT: return "PipelineRobustnessCreateInfoEXT";
+ case StructureType::ePhysicalDevicePipelineRobustnessFeaturesEXT: return "PhysicalDevicePipelineRobustnessFeaturesEXT";
+ case StructureType::ePhysicalDevicePipelineRobustnessPropertiesEXT: return "PhysicalDevicePipelineRobustnessPropertiesEXT";
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ case StructureType::eImportMemoryWin32HandleInfoKHR: return "ImportMemoryWin32HandleInfoKHR";
+ case StructureType::eExportMemoryWin32HandleInfoKHR: return "ExportMemoryWin32HandleInfoKHR";
+ case StructureType::eMemoryWin32HandlePropertiesKHR: return "MemoryWin32HandlePropertiesKHR";
+ case StructureType::eMemoryGetWin32HandleInfoKHR: return "MemoryGetWin32HandleInfoKHR";
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ case StructureType::eImportMemoryFdInfoKHR: return "ImportMemoryFdInfoKHR";
+ case StructureType::eMemoryFdPropertiesKHR: return "MemoryFdPropertiesKHR";
+ case StructureType::eMemoryGetFdInfoKHR: return "MemoryGetFdInfoKHR";
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR: return "Win32KeyedMutexAcquireReleaseInfoKHR";
+ case StructureType::eImportSemaphoreWin32HandleInfoKHR: return "ImportSemaphoreWin32HandleInfoKHR";
+ case StructureType::eExportSemaphoreWin32HandleInfoKHR: return "ExportSemaphoreWin32HandleInfoKHR";
+ case StructureType::eD3D12FenceSubmitInfoKHR: return "D3D12FenceSubmitInfoKHR";
+ case StructureType::eSemaphoreGetWin32HandleInfoKHR: return "SemaphoreGetWin32HandleInfoKHR";
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ case StructureType::eImportSemaphoreFdInfoKHR: return "ImportSemaphoreFdInfoKHR";
+ case StructureType::eSemaphoreGetFdInfoKHR: return "SemaphoreGetFdInfoKHR";
+ case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR: return "PhysicalDevicePushDescriptorPropertiesKHR";
+ case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT: return "CommandBufferInheritanceConditionalRenderingInfoEXT";
+ case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT: return "PhysicalDeviceConditionalRenderingFeaturesEXT";
+ case StructureType::eConditionalRenderingBeginInfoEXT: return "ConditionalRenderingBeginInfoEXT";
+ case StructureType::ePresentRegionsKHR: return "PresentRegionsKHR";
+ case StructureType::ePipelineViewportWScalingStateCreateInfoNV: return "PipelineViewportWScalingStateCreateInfoNV";
+ case StructureType::eSurfaceCapabilities2EXT: return "SurfaceCapabilities2EXT";
+ case StructureType::eDisplayPowerInfoEXT: return "DisplayPowerInfoEXT";
+ case StructureType::eDeviceEventInfoEXT: return "DeviceEventInfoEXT";
+ case StructureType::eDisplayEventInfoEXT: return "DisplayEventInfoEXT";
+ case StructureType::eSwapchainCounterCreateInfoEXT: return "SwapchainCounterCreateInfoEXT";
+ case StructureType::ePresentTimesInfoGOOGLE: return "PresentTimesInfoGOOGLE";
+ case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX: return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX";
+ case StructureType::ePipelineViewportSwizzleStateCreateInfoNV: return "PipelineViewportSwizzleStateCreateInfoNV";
+ case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT: return "PhysicalDeviceDiscardRectanglePropertiesEXT";
+ case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT: return "PipelineDiscardRectangleStateCreateInfoEXT";
+ case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT: return "PhysicalDeviceConservativeRasterizationPropertiesEXT";
+ case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT: return "PipelineRasterizationConservativeStateCreateInfoEXT";
+ case StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT: return "PhysicalDeviceDepthClipEnableFeaturesEXT";
+ case StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT: return "PipelineRasterizationDepthClipStateCreateInfoEXT";
+ case StructureType::eHdrMetadataEXT: return "HdrMetadataEXT";
+ case StructureType::eSharedPresentSurfaceCapabilitiesKHR: return "SharedPresentSurfaceCapabilitiesKHR";
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ case StructureType::eImportFenceWin32HandleInfoKHR: return "ImportFenceWin32HandleInfoKHR";
+ case StructureType::eExportFenceWin32HandleInfoKHR: return "ExportFenceWin32HandleInfoKHR";
+ case StructureType::eFenceGetWin32HandleInfoKHR: return "FenceGetWin32HandleInfoKHR";
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ case StructureType::eImportFenceFdInfoKHR: return "ImportFenceFdInfoKHR";
+ case StructureType::eFenceGetFdInfoKHR: return "FenceGetFdInfoKHR";
+ case StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR: return "PhysicalDevicePerformanceQueryFeaturesKHR";
+ case StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR: return "PhysicalDevicePerformanceQueryPropertiesKHR";
+ case StructureType::eQueryPoolPerformanceCreateInfoKHR: return "QueryPoolPerformanceCreateInfoKHR";
+ case StructureType::ePerformanceQuerySubmitInfoKHR: return "PerformanceQuerySubmitInfoKHR";
+ case StructureType::eAcquireProfilingLockInfoKHR: return "AcquireProfilingLockInfoKHR";
+ case StructureType::ePerformanceCounterKHR: return "PerformanceCounterKHR";
+ case StructureType::ePerformanceCounterDescriptionKHR: return "PerformanceCounterDescriptionKHR";
+ case StructureType::ePhysicalDeviceSurfaceInfo2KHR: return "PhysicalDeviceSurfaceInfo2KHR";
+ case StructureType::eSurfaceCapabilities2KHR: return "SurfaceCapabilities2KHR";
+ case StructureType::eSurfaceFormat2KHR: return "SurfaceFormat2KHR";
+ case StructureType::eDisplayProperties2KHR: return "DisplayProperties2KHR";
+ case StructureType::eDisplayPlaneProperties2KHR: return "DisplayPlaneProperties2KHR";
+ case StructureType::eDisplayModeProperties2KHR: return "DisplayModeProperties2KHR";
+ case StructureType::eDisplayPlaneInfo2KHR: return "DisplayPlaneInfo2KHR";
+ case StructureType::eDisplayPlaneCapabilities2KHR: return "DisplayPlaneCapabilities2KHR";
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+ case StructureType::eIosSurfaceCreateInfoMVK: return "IosSurfaceCreateInfoMVK";
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+ case StructureType::eMacosSurfaceCreateInfoMVK: return "MacosSurfaceCreateInfoMVK";
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+ case StructureType::eDebugUtilsObjectNameInfoEXT: return "DebugUtilsObjectNameInfoEXT";
+ case StructureType::eDebugUtilsObjectTagInfoEXT: return "DebugUtilsObjectTagInfoEXT";
+ case StructureType::eDebugUtilsLabelEXT: return "DebugUtilsLabelEXT";
+ case StructureType::eDebugUtilsMessengerCallbackDataEXT: return "DebugUtilsMessengerCallbackDataEXT";
+ case StructureType::eDebugUtilsMessengerCreateInfoEXT: return "DebugUtilsMessengerCreateInfoEXT";
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+ case StructureType::eAndroidHardwareBufferUsageANDROID: return "AndroidHardwareBufferUsageANDROID";
+ case StructureType::eAndroidHardwareBufferPropertiesANDROID: return "AndroidHardwareBufferPropertiesANDROID";
+ case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID: return "AndroidHardwareBufferFormatPropertiesANDROID";
+ case StructureType::eImportAndroidHardwareBufferInfoANDROID: return "ImportAndroidHardwareBufferInfoANDROID";
+ case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID: return "MemoryGetAndroidHardwareBufferInfoANDROID";
+ case StructureType::eExternalFormatANDROID: return "ExternalFormatANDROID";
+ case StructureType::eAndroidHardwareBufferFormatProperties2ANDROID: return "AndroidHardwareBufferFormatProperties2ANDROID";
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+ case StructureType::eSampleLocationsInfoEXT: return "SampleLocationsInfoEXT";
+ case StructureType::eRenderPassSampleLocationsBeginInfoEXT: return "RenderPassSampleLocationsBeginInfoEXT";
+ case StructureType::ePipelineSampleLocationsStateCreateInfoEXT: return "PipelineSampleLocationsStateCreateInfoEXT";
+ case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT: return "PhysicalDeviceSampleLocationsPropertiesEXT";
+ case StructureType::eMultisamplePropertiesEXT: return "MultisamplePropertiesEXT";
+ case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT: return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT";
+ case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT: return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT";
+ case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT: return "PipelineColorBlendAdvancedStateCreateInfoEXT";
+ case StructureType::ePipelineCoverageToColorStateCreateInfoNV: return "PipelineCoverageToColorStateCreateInfoNV";
+ case StructureType::eWriteDescriptorSetAccelerationStructureKHR: return "WriteDescriptorSetAccelerationStructureKHR";
+ case StructureType::eAccelerationStructureBuildGeometryInfoKHR: return "AccelerationStructureBuildGeometryInfoKHR";
+ case StructureType::eAccelerationStructureDeviceAddressInfoKHR: return "AccelerationStructureDeviceAddressInfoKHR";
+ case StructureType::eAccelerationStructureGeometryAabbsDataKHR: return "AccelerationStructureGeometryAabbsDataKHR";
+ case StructureType::eAccelerationStructureGeometryInstancesDataKHR: return "AccelerationStructureGeometryInstancesDataKHR";
+ case StructureType::eAccelerationStructureGeometryTrianglesDataKHR: return "AccelerationStructureGeometryTrianglesDataKHR";
+ case StructureType::eAccelerationStructureGeometryKHR: return "AccelerationStructureGeometryKHR";
+ case StructureType::eAccelerationStructureVersionInfoKHR: return "AccelerationStructureVersionInfoKHR";
+ case StructureType::eCopyAccelerationStructureInfoKHR: return "CopyAccelerationStructureInfoKHR";
+ case StructureType::eCopyAccelerationStructureToMemoryInfoKHR: return "CopyAccelerationStructureToMemoryInfoKHR";
+ case StructureType::eCopyMemoryToAccelerationStructureInfoKHR: return "CopyMemoryToAccelerationStructureInfoKHR";
+ case StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR: return "PhysicalDeviceAccelerationStructureFeaturesKHR";
+ case StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR: return "PhysicalDeviceAccelerationStructurePropertiesKHR";
+ case StructureType::eAccelerationStructureCreateInfoKHR: return "AccelerationStructureCreateInfoKHR";
+ case StructureType::eAccelerationStructureBuildSizesInfoKHR: return "AccelerationStructureBuildSizesInfoKHR";
+ case StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR: return "PhysicalDeviceRayTracingPipelineFeaturesKHR";
+ case StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR: return "PhysicalDeviceRayTracingPipelinePropertiesKHR";
+ case StructureType::eRayTracingPipelineCreateInfoKHR: return "RayTracingPipelineCreateInfoKHR";
+ case StructureType::eRayTracingShaderGroupCreateInfoKHR: return "RayTracingShaderGroupCreateInfoKHR";
+ case StructureType::eRayTracingPipelineInterfaceCreateInfoKHR: return "RayTracingPipelineInterfaceCreateInfoKHR";
+ case StructureType::ePhysicalDeviceRayQueryFeaturesKHR: return "PhysicalDeviceRayQueryFeaturesKHR";
+ case StructureType::ePipelineCoverageModulationStateCreateInfoNV: return "PipelineCoverageModulationStateCreateInfoNV";
+ case StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV: return "PhysicalDeviceShaderSmBuiltinsFeaturesNV";
+ case StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV: return "PhysicalDeviceShaderSmBuiltinsPropertiesNV";
+ case StructureType::eDrmFormatModifierPropertiesListEXT: return "DrmFormatModifierPropertiesListEXT";
+ case StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT: return "PhysicalDeviceImageDrmFormatModifierInfoEXT";
+ case StructureType::eImageDrmFormatModifierListCreateInfoEXT: return "ImageDrmFormatModifierListCreateInfoEXT";
+ case StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT: return "ImageDrmFormatModifierExplicitCreateInfoEXT";
+ case StructureType::eImageDrmFormatModifierPropertiesEXT: return "ImageDrmFormatModifierPropertiesEXT";
+ case StructureType::eDrmFormatModifierPropertiesList2EXT: return "DrmFormatModifierPropertiesList2EXT";
+ case StructureType::eValidationCacheCreateInfoEXT: return "ValidationCacheCreateInfoEXT";
+ case StructureType::eShaderModuleValidationCacheCreateInfoEXT: return "ShaderModuleValidationCacheCreateInfoEXT";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR: return "PhysicalDevicePortabilitySubsetFeaturesKHR";
+ case StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR: return "PhysicalDevicePortabilitySubsetPropertiesKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV: return "PipelineViewportShadingRateImageStateCreateInfoNV";
+ case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV: return "PhysicalDeviceShadingRateImageFeaturesNV";
+ case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV: return "PhysicalDeviceShadingRateImagePropertiesNV";
+ case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV: return "PipelineViewportCoarseSampleOrderStateCreateInfoNV";
+ case StructureType::eRayTracingPipelineCreateInfoNV: return "RayTracingPipelineCreateInfoNV";
+ case StructureType::eAccelerationStructureCreateInfoNV: return "AccelerationStructureCreateInfoNV";
+ case StructureType::eGeometryNV: return "GeometryNV";
+ case StructureType::eGeometryTrianglesNV: return "GeometryTrianglesNV";
+ case StructureType::eGeometryAabbNV: return "GeometryAabbNV";
+ case StructureType::eBindAccelerationStructureMemoryInfoNV: return "BindAccelerationStructureMemoryInfoNV";
+ case StructureType::eWriteDescriptorSetAccelerationStructureNV: return "WriteDescriptorSetAccelerationStructureNV";
+ case StructureType::eAccelerationStructureMemoryRequirementsInfoNV: return "AccelerationStructureMemoryRequirementsInfoNV";
+ case StructureType::ePhysicalDeviceRayTracingPropertiesNV: return "PhysicalDeviceRayTracingPropertiesNV";
+ case StructureType::eRayTracingShaderGroupCreateInfoNV: return "RayTracingShaderGroupCreateInfoNV";
+ case StructureType::eAccelerationStructureInfoNV: return "AccelerationStructureInfoNV";
+ case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV: return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV";
+ case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV: return "PipelineRepresentativeFragmentTestStateCreateInfoNV";
+ case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT: return "PhysicalDeviceImageViewImageFormatInfoEXT";
+ case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT: return "FilterCubicImageViewImageFormatPropertiesEXT";
+ case StructureType::eImportMemoryHostPointerInfoEXT: return "ImportMemoryHostPointerInfoEXT";
+ case StructureType::eMemoryHostPointerPropertiesEXT: return "MemoryHostPointerPropertiesEXT";
+ case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT: return "PhysicalDeviceExternalMemoryHostPropertiesEXT";
+ case StructureType::ePhysicalDeviceShaderClockFeaturesKHR: return "PhysicalDeviceShaderClockFeaturesKHR";
+ case StructureType::ePipelineCompilerControlCreateInfoAMD: return "PipelineCompilerControlCreateInfoAMD";
+ case StructureType::eCalibratedTimestampInfoEXT: return "CalibratedTimestampInfoEXT";
+ case StructureType::ePhysicalDeviceShaderCorePropertiesAMD: return "PhysicalDeviceShaderCorePropertiesAMD";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case StructureType::eVideoDecodeH265CapabilitiesEXT: return "VideoDecodeH265CapabilitiesEXT";
+ case StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT: return "VideoDecodeH265SessionParametersCreateInfoEXT";
+ case StructureType::eVideoDecodeH265SessionParametersAddInfoEXT: return "VideoDecodeH265SessionParametersAddInfoEXT";
+ case StructureType::eVideoDecodeH265ProfileInfoEXT: return "VideoDecodeH265ProfileInfoEXT";
+ case StructureType::eVideoDecodeH265PictureInfoEXT: return "VideoDecodeH265PictureInfoEXT";
+ case StructureType::eVideoDecodeH265DpbSlotInfoEXT: return "VideoDecodeH265DpbSlotInfoEXT";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR: return "DeviceQueueGlobalPriorityCreateInfoKHR";
+ case StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR: return "PhysicalDeviceGlobalPriorityQueryFeaturesKHR";
+ case StructureType::eQueueFamilyGlobalPriorityPropertiesKHR: return "QueueFamilyGlobalPriorityPropertiesKHR";
+ case StructureType::eDeviceMemoryOverallocationCreateInfoAMD: return "DeviceMemoryOverallocationCreateInfoAMD";
+ case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT: return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT";
+ case StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT: return "PipelineVertexInputDivisorStateCreateInfoEXT";
+ case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT: return "PhysicalDeviceVertexAttributeDivisorFeaturesEXT";
+#if defined( VK_USE_PLATFORM_GGP )
+ case StructureType::ePresentFrameTokenGGP: return "PresentFrameTokenGGP";
+#endif /*VK_USE_PLATFORM_GGP*/
+ case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV: return "PhysicalDeviceComputeShaderDerivativesFeaturesNV";
+ case StructureType::ePhysicalDeviceMeshShaderFeaturesNV: return "PhysicalDeviceMeshShaderFeaturesNV";
+ case StructureType::ePhysicalDeviceMeshShaderPropertiesNV: return "PhysicalDeviceMeshShaderPropertiesNV";
+ case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV: return "PhysicalDeviceShaderImageFootprintFeaturesNV";
+ case StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV: return "PipelineViewportExclusiveScissorStateCreateInfoNV";
+ case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV: return "PhysicalDeviceExclusiveScissorFeaturesNV";
+ case StructureType::eCheckpointDataNV: return "CheckpointDataNV";
+ case StructureType::eQueueFamilyCheckpointPropertiesNV: return "QueueFamilyCheckpointPropertiesNV";
+ case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL: return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL";
+ case StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL: return "QueryPoolPerformanceQueryCreateInfoINTEL";
+ case StructureType::eInitializePerformanceApiInfoINTEL: return "InitializePerformanceApiInfoINTEL";
+ case StructureType::ePerformanceMarkerInfoINTEL: return "PerformanceMarkerInfoINTEL";
+ case StructureType::ePerformanceStreamMarkerInfoINTEL: return "PerformanceStreamMarkerInfoINTEL";
+ case StructureType::ePerformanceOverrideInfoINTEL: return "PerformanceOverrideInfoINTEL";
+ case StructureType::ePerformanceConfigurationAcquireInfoINTEL: return "PerformanceConfigurationAcquireInfoINTEL";
+ case StructureType::ePhysicalDevicePciBusInfoPropertiesEXT: return "PhysicalDevicePciBusInfoPropertiesEXT";
+ case StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD: return "DisplayNativeHdrSurfaceCapabilitiesAMD";
+ case StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD: return "SwapchainDisplayNativeHdrCreateInfoAMD";
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+ case StructureType::eImagepipeSurfaceCreateInfoFUCHSIA: return "ImagepipeSurfaceCreateInfoFUCHSIA";
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ case StructureType::eMetalSurfaceCreateInfoEXT: return "MetalSurfaceCreateInfoEXT";
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+ case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT: return "PhysicalDeviceFragmentDensityMapFeaturesEXT";
+ case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT: return "PhysicalDeviceFragmentDensityMapPropertiesEXT";
+ case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT: return "RenderPassFragmentDensityMapCreateInfoEXT";
+ case StructureType::eFragmentShadingRateAttachmentInfoKHR: return "FragmentShadingRateAttachmentInfoKHR";
+ case StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR: return "PipelineFragmentShadingRateStateCreateInfoKHR";
+ case StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR: return "PhysicalDeviceFragmentShadingRatePropertiesKHR";
+ case StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR: return "PhysicalDeviceFragmentShadingRateFeaturesKHR";
+ case StructureType::ePhysicalDeviceFragmentShadingRateKHR: return "PhysicalDeviceFragmentShadingRateKHR";
+ case StructureType::ePhysicalDeviceShaderCoreProperties2AMD: return "PhysicalDeviceShaderCoreProperties2AMD";
+ case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD: return "PhysicalDeviceCoherentMemoryFeaturesAMD";
+ case StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT: return "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT";
+ case StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT: return "PhysicalDeviceMemoryBudgetPropertiesEXT";
+ case StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT: return "PhysicalDeviceMemoryPriorityFeaturesEXT";
+ case StructureType::eMemoryPriorityAllocateInfoEXT: return "MemoryPriorityAllocateInfoEXT";
+ case StructureType::eSurfaceProtectedCapabilitiesKHR: return "SurfaceProtectedCapabilitiesKHR";
+ case StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV: return "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV";
+ case StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT: return "PhysicalDeviceBufferDeviceAddressFeaturesEXT";
+ case StructureType::eBufferDeviceAddressCreateInfoEXT: return "BufferDeviceAddressCreateInfoEXT";
+ case StructureType::eValidationFeaturesEXT: return "ValidationFeaturesEXT";
+ case StructureType::ePhysicalDevicePresentWaitFeaturesKHR: return "PhysicalDevicePresentWaitFeaturesKHR";
+ case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV: return "PhysicalDeviceCooperativeMatrixFeaturesNV";
+ case StructureType::eCooperativeMatrixPropertiesNV: return "CooperativeMatrixPropertiesNV";
+ case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV: return "PhysicalDeviceCooperativeMatrixPropertiesNV";
+ case StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV: return "PhysicalDeviceCoverageReductionModeFeaturesNV";
+ case StructureType::ePipelineCoverageReductionStateCreateInfoNV: return "PipelineCoverageReductionStateCreateInfoNV";
+ case StructureType::eFramebufferMixedSamplesCombinationNV: return "FramebufferMixedSamplesCombinationNV";
+ case StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT: return "PhysicalDeviceFragmentShaderInterlockFeaturesEXT";
+ case StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT: return "PhysicalDeviceYcbcrImageArraysFeaturesEXT";
+ case StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT: return "PhysicalDeviceProvokingVertexFeaturesEXT";
+ case StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT: return "PipelineRasterizationProvokingVertexStateCreateInfoEXT";
+ case StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT: return "PhysicalDeviceProvokingVertexPropertiesEXT";
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ case StructureType::eSurfaceFullScreenExclusiveInfoEXT: return "SurfaceFullScreenExclusiveInfoEXT";
+ case StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT: return "SurfaceCapabilitiesFullScreenExclusiveEXT";
+ case StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT: return "SurfaceFullScreenExclusiveWin32InfoEXT";
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ case StructureType::eHeadlessSurfaceCreateInfoEXT: return "HeadlessSurfaceCreateInfoEXT";
+ case StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT: return "PhysicalDeviceLineRasterizationFeaturesEXT";
+ case StructureType::ePipelineRasterizationLineStateCreateInfoEXT: return "PipelineRasterizationLineStateCreateInfoEXT";
+ case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT: return "PhysicalDeviceLineRasterizationPropertiesEXT";
+ case StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT: return "PhysicalDeviceShaderAtomicFloatFeaturesEXT";
+ case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT: return "PhysicalDeviceIndexTypeUint8FeaturesEXT";
+ case StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT: return "PhysicalDeviceExtendedDynamicStateFeaturesEXT";
+ case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR: return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR";
+ case StructureType::ePipelineInfoKHR: return "PipelineInfoKHR";
+ case StructureType::ePipelineExecutablePropertiesKHR: return "PipelineExecutablePropertiesKHR";
+ case StructureType::ePipelineExecutableInfoKHR: return "PipelineExecutableInfoKHR";
+ case StructureType::ePipelineExecutableStatisticKHR: return "PipelineExecutableStatisticKHR";
+ case StructureType::ePipelineExecutableInternalRepresentationKHR: return "PipelineExecutableInternalRepresentationKHR";
+ case StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT: return "PhysicalDeviceShaderAtomicFloat2FeaturesEXT";
+ case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV: return "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV";
+ case StructureType::eGraphicsShaderGroupCreateInfoNV: return "GraphicsShaderGroupCreateInfoNV";
+ case StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV: return "GraphicsPipelineShaderGroupsCreateInfoNV";
+ case StructureType::eIndirectCommandsLayoutTokenNV: return "IndirectCommandsLayoutTokenNV";
+ case StructureType::eIndirectCommandsLayoutCreateInfoNV: return "IndirectCommandsLayoutCreateInfoNV";
+ case StructureType::eGeneratedCommandsInfoNV: return "GeneratedCommandsInfoNV";
+ case StructureType::eGeneratedCommandsMemoryRequirementsInfoNV: return "GeneratedCommandsMemoryRequirementsInfoNV";
+ case StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV: return "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV";
+ case StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV: return "PhysicalDeviceInheritedViewportScissorFeaturesNV";
+ case StructureType::eCommandBufferInheritanceViewportScissorInfoNV: return "CommandBufferInheritanceViewportScissorInfoNV";
+ case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT: return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT";
+ case StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM: return "CommandBufferInheritanceRenderPassTransformInfoQCOM";
+ case StructureType::eRenderPassTransformBeginInfoQCOM: return "RenderPassTransformBeginInfoQCOM";
+ case StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT: return "PhysicalDeviceDeviceMemoryReportFeaturesEXT";
+ case StructureType::eDeviceDeviceMemoryReportCreateInfoEXT: return "DeviceDeviceMemoryReportCreateInfoEXT";
+ case StructureType::eDeviceMemoryReportCallbackDataEXT: return "DeviceMemoryReportCallbackDataEXT";
+ case StructureType::ePhysicalDeviceRobustness2FeaturesEXT: return "PhysicalDeviceRobustness2FeaturesEXT";
+ case StructureType::ePhysicalDeviceRobustness2PropertiesEXT: return "PhysicalDeviceRobustness2PropertiesEXT";
+ case StructureType::eSamplerCustomBorderColorCreateInfoEXT: return "SamplerCustomBorderColorCreateInfoEXT";
+ case StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT: return "PhysicalDeviceCustomBorderColorPropertiesEXT";
+ case StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT: return "PhysicalDeviceCustomBorderColorFeaturesEXT";
+ case StructureType::ePipelineLibraryCreateInfoKHR: return "PipelineLibraryCreateInfoKHR";
+ case StructureType::ePhysicalDevicePresentBarrierFeaturesNV: return "PhysicalDevicePresentBarrierFeaturesNV";
+ case StructureType::eSurfaceCapabilitiesPresentBarrierNV: return "SurfaceCapabilitiesPresentBarrierNV";
+ case StructureType::eSwapchainPresentBarrierCreateInfoNV: return "SwapchainPresentBarrierCreateInfoNV";
+ case StructureType::ePresentIdKHR: return "PresentIdKHR";
+ case StructureType::ePhysicalDevicePresentIdFeaturesKHR: return "PhysicalDevicePresentIdFeaturesKHR";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case StructureType::eVideoEncodeInfoKHR: return "VideoEncodeInfoKHR";
+ case StructureType::eVideoEncodeRateControlInfoKHR: return "VideoEncodeRateControlInfoKHR";
+ case StructureType::eVideoEncodeRateControlLayerInfoKHR: return "VideoEncodeRateControlLayerInfoKHR";
+ case StructureType::eVideoEncodeCapabilitiesKHR: return "VideoEncodeCapabilitiesKHR";
+ case StructureType::eVideoEncodeUsageInfoKHR: return "VideoEncodeUsageInfoKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV: return "PhysicalDeviceDiagnosticsConfigFeaturesNV";
+ case StructureType::eDeviceDiagnosticsConfigCreateInfoNV: return "DeviceDiagnosticsConfigCreateInfoNV";
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ case StructureType::eExportMetalObjectCreateInfoEXT: return "ExportMetalObjectCreateInfoEXT";
+ case StructureType::eExportMetalObjectsInfoEXT: return "ExportMetalObjectsInfoEXT";
+ case StructureType::eExportMetalDeviceInfoEXT: return "ExportMetalDeviceInfoEXT";
+ case StructureType::eExportMetalCommandQueueInfoEXT: return "ExportMetalCommandQueueInfoEXT";
+ case StructureType::eExportMetalBufferInfoEXT: return "ExportMetalBufferInfoEXT";
+ case StructureType::eImportMetalBufferInfoEXT: return "ImportMetalBufferInfoEXT";
+ case StructureType::eExportMetalTextureInfoEXT: return "ExportMetalTextureInfoEXT";
+ case StructureType::eImportMetalTextureInfoEXT: return "ImportMetalTextureInfoEXT";
+ case StructureType::eExportMetalIoSurfaceInfoEXT: return "ExportMetalIoSurfaceInfoEXT";
+ case StructureType::eImportMetalIoSurfaceInfoEXT: return "ImportMetalIoSurfaceInfoEXT";
+ case StructureType::eExportMetalSharedEventInfoEXT: return "ExportMetalSharedEventInfoEXT";
+ case StructureType::eImportMetalSharedEventInfoEXT: return "ImportMetalSharedEventInfoEXT";
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+ case StructureType::eQueueFamilyCheckpointProperties2NV: return "QueueFamilyCheckpointProperties2NV";
+ case StructureType::eCheckpointData2NV: return "CheckpointData2NV";
+ case StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT: return "PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT";
+ case StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT: return "PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT";
+ case StructureType::eGraphicsPipelineLibraryCreateInfoEXT: return "GraphicsPipelineLibraryCreateInfoEXT";
+ case StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD: return "PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD";
+ case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR: return "PhysicalDeviceFragmentShaderBarycentricFeaturesKHR";
+ case StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR: return "PhysicalDeviceFragmentShaderBarycentricPropertiesKHR";
+ case StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR: return "PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR";
+ case StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV: return "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV";
+ case StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV: return "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV";
+ case StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV: return "PipelineFragmentShadingRateEnumStateCreateInfoNV";
+ case StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV: return "AccelerationStructureGeometryMotionTrianglesDataNV";
+ case StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV: return "PhysicalDeviceRayTracingMotionBlurFeaturesNV";
+ case StructureType::eAccelerationStructureMotionInfoNV: return "AccelerationStructureMotionInfoNV";
+ case StructureType::ePhysicalDeviceMeshShaderFeaturesEXT: return "PhysicalDeviceMeshShaderFeaturesEXT";
+ case StructureType::ePhysicalDeviceMeshShaderPropertiesEXT: return "PhysicalDeviceMeshShaderPropertiesEXT";
+ case StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT: return "PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT";
+ case StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT: return "PhysicalDeviceFragmentDensityMap2FeaturesEXT";
+ case StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT: return "PhysicalDeviceFragmentDensityMap2PropertiesEXT";
+ case StructureType::eCopyCommandTransformInfoQCOM: return "CopyCommandTransformInfoQCOM";
+ case StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR: return "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR";
+ case StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT: return "PhysicalDeviceImageCompressionControlFeaturesEXT";
+ case StructureType::eImageCompressionControlEXT: return "ImageCompressionControlEXT";
+ case StructureType::eSubresourceLayout2EXT: return "SubresourceLayout2EXT";
+ case StructureType::eImageSubresource2EXT: return "ImageSubresource2EXT";
+ case StructureType::eImageCompressionPropertiesEXT: return "ImageCompressionPropertiesEXT";
+ case StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT: return "PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT";
+ case StructureType::ePhysicalDevice4444FormatsFeaturesEXT: return "PhysicalDevice4444FormatsFeaturesEXT";
+ case StructureType::ePhysicalDeviceFaultFeaturesEXT: return "PhysicalDeviceFaultFeaturesEXT";
+ case StructureType::eDeviceFaultCountsEXT: return "DeviceFaultCountsEXT";
+ case StructureType::eDeviceFaultInfoEXT: return "DeviceFaultInfoEXT";
+ case StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT: return "PhysicalDeviceRgba10X6FormatsFeaturesEXT";
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+ case StructureType::eDirectfbSurfaceCreateInfoEXT: return "DirectfbSurfaceCreateInfoEXT";
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+ case StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT: return "PhysicalDeviceVertexInputDynamicStateFeaturesEXT";
+ case StructureType::eVertexInputBindingDescription2EXT: return "VertexInputBindingDescription2EXT";
+ case StructureType::eVertexInputAttributeDescription2EXT: return "VertexInputAttributeDescription2EXT";
+ case StructureType::ePhysicalDeviceDrmPropertiesEXT: return "PhysicalDeviceDrmPropertiesEXT";
+ case StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT: return "PhysicalDeviceAddressBindingReportFeaturesEXT";
+ case StructureType::eDeviceAddressBindingCallbackDataEXT: return "DeviceAddressBindingCallbackDataEXT";
+ case StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT: return "PhysicalDeviceDepthClipControlFeaturesEXT";
+ case StructureType::ePipelineViewportDepthClipControlCreateInfoEXT: return "PipelineViewportDepthClipControlCreateInfoEXT";
+ case StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT: return "PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT";
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+ case StructureType::eImportMemoryZirconHandleInfoFUCHSIA: return "ImportMemoryZirconHandleInfoFUCHSIA";
+ case StructureType::eMemoryZirconHandlePropertiesFUCHSIA: return "MemoryZirconHandlePropertiesFUCHSIA";
+ case StructureType::eMemoryGetZirconHandleInfoFUCHSIA: return "MemoryGetZirconHandleInfoFUCHSIA";
+ case StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA: return "ImportSemaphoreZirconHandleInfoFUCHSIA";
+ case StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA: return "SemaphoreGetZirconHandleInfoFUCHSIA";
+ case StructureType::eBufferCollectionCreateInfoFUCHSIA: return "BufferCollectionCreateInfoFUCHSIA";
+ case StructureType::eImportMemoryBufferCollectionFUCHSIA: return "ImportMemoryBufferCollectionFUCHSIA";
+ case StructureType::eBufferCollectionImageCreateInfoFUCHSIA: return "BufferCollectionImageCreateInfoFUCHSIA";
+ case StructureType::eBufferCollectionPropertiesFUCHSIA: return "BufferCollectionPropertiesFUCHSIA";
+ case StructureType::eBufferConstraintsInfoFUCHSIA: return "BufferConstraintsInfoFUCHSIA";
+ case StructureType::eBufferCollectionBufferCreateInfoFUCHSIA: return "BufferCollectionBufferCreateInfoFUCHSIA";
+ case StructureType::eImageConstraintsInfoFUCHSIA: return "ImageConstraintsInfoFUCHSIA";
+ case StructureType::eImageFormatConstraintsInfoFUCHSIA: return "ImageFormatConstraintsInfoFUCHSIA";
+ case StructureType::eSysmemColorSpaceFUCHSIA: return "SysmemColorSpaceFUCHSIA";
+ case StructureType::eBufferCollectionConstraintsInfoFUCHSIA: return "BufferCollectionConstraintsInfoFUCHSIA";
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+ case StructureType::eSubpassShadingPipelineCreateInfoHUAWEI: return "SubpassShadingPipelineCreateInfoHUAWEI";
+ case StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI: return "PhysicalDeviceSubpassShadingFeaturesHUAWEI";
+ case StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI: return "PhysicalDeviceSubpassShadingPropertiesHUAWEI";
+ case StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI: return "PhysicalDeviceInvocationMaskFeaturesHUAWEI";
+ case StructureType::eMemoryGetRemoteAddressInfoNV: return "MemoryGetRemoteAddressInfoNV";
+ case StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV: return "PhysicalDeviceExternalMemoryRdmaFeaturesNV";
+ case StructureType::ePipelinePropertiesIdentifierEXT: return "PipelinePropertiesIdentifierEXT";
+ case StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT: return "PhysicalDevicePipelinePropertiesFeaturesEXT";
+ case StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT: return "PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT";
+ case StructureType::eSubpassResolvePerformanceQueryEXT: return "SubpassResolvePerformanceQueryEXT";
+ case StructureType::eMultisampledRenderToSingleSampledInfoEXT: return "MultisampledRenderToSingleSampledInfoEXT";
+ case StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT: return "PhysicalDeviceExtendedDynamicState2FeaturesEXT";
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+ case StructureType::eScreenSurfaceCreateInfoQNX: return "ScreenSurfaceCreateInfoQNX";
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+ case StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT: return "PhysicalDeviceColorWriteEnableFeaturesEXT";
+ case StructureType::ePipelineColorWriteCreateInfoEXT: return "PipelineColorWriteCreateInfoEXT";
+ case StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT: return "PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT";
+ case StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR: return "PhysicalDeviceRayTracingMaintenance1FeaturesKHR";
+ case StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT: return "PhysicalDeviceImageViewMinLodFeaturesEXT";
+ case StructureType::eImageViewMinLodCreateInfoEXT: return "ImageViewMinLodCreateInfoEXT";
+ case StructureType::ePhysicalDeviceMultiDrawFeaturesEXT: return "PhysicalDeviceMultiDrawFeaturesEXT";
+ case StructureType::ePhysicalDeviceMultiDrawPropertiesEXT: return "PhysicalDeviceMultiDrawPropertiesEXT";
+ case StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT: return "PhysicalDeviceImage2DViewOf3DFeaturesEXT";
+ case StructureType::eMicromapBuildInfoEXT: return "MicromapBuildInfoEXT";
+ case StructureType::eMicromapVersionInfoEXT: return "MicromapVersionInfoEXT";
+ case StructureType::eCopyMicromapInfoEXT: return "CopyMicromapInfoEXT";
+ case StructureType::eCopyMicromapToMemoryInfoEXT: return "CopyMicromapToMemoryInfoEXT";
+ case StructureType::eCopyMemoryToMicromapInfoEXT: return "CopyMemoryToMicromapInfoEXT";
+ case StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT: return "PhysicalDeviceOpacityMicromapFeaturesEXT";
+ case StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT: return "PhysicalDeviceOpacityMicromapPropertiesEXT";
+ case StructureType::eMicromapCreateInfoEXT: return "MicromapCreateInfoEXT";
+ case StructureType::eMicromapBuildSizesInfoEXT: return "MicromapBuildSizesInfoEXT";
+ case StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT: return "AccelerationStructureTrianglesOpacityMicromapEXT";
+ case StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT: return "PhysicalDeviceBorderColorSwizzleFeaturesEXT";
+ case StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT: return "SamplerBorderColorComponentMappingCreateInfoEXT";
+ case StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT: return "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT";
+ case StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE: return "PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE";
+ case StructureType::eDescriptorSetBindingReferenceVALVE: return "DescriptorSetBindingReferenceVALVE";
+ case StructureType::eDescriptorSetLayoutHostMappingInfoVALVE: return "DescriptorSetLayoutHostMappingInfoVALVE";
+ case StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesEXT: return "PhysicalDeviceDepthClampZeroOneFeaturesEXT";
+ case StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT: return "PhysicalDeviceNonSeamlessCubeMapFeaturesEXT";
+ case StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM: return "PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM";
+ case StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM: return "PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM";
+ case StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM: return "SubpassFragmentDensityMapOffsetEndInfoQCOM";
+ case StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV: return "PhysicalDeviceLinearColorAttachmentFeaturesNV";
+ case StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT: return "PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT";
+ case StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM: return "PhysicalDeviceImageProcessingFeaturesQCOM";
+ case StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM: return "PhysicalDeviceImageProcessingPropertiesQCOM";
+ case StructureType::eImageViewSampleWeightCreateInfoQCOM: return "ImageViewSampleWeightCreateInfoQCOM";
+ case StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT: return "PhysicalDeviceExtendedDynamicState3FeaturesEXT";
+ case StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT: return "PhysicalDeviceExtendedDynamicState3PropertiesEXT";
+ case StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT: return "PhysicalDeviceSubpassMergeFeedbackFeaturesEXT";
+ case StructureType::eRenderPassCreationControlEXT: return "RenderPassCreationControlEXT";
+ case StructureType::eRenderPassCreationFeedbackCreateInfoEXT: return "RenderPassCreationFeedbackCreateInfoEXT";
+ case StructureType::eRenderPassSubpassFeedbackCreateInfoEXT: return "RenderPassSubpassFeedbackCreateInfoEXT";
+ case StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT: return "PhysicalDeviceShaderModuleIdentifierFeaturesEXT";
+ case StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT: return "PhysicalDeviceShaderModuleIdentifierPropertiesEXT";
+ case StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT: return "PipelineShaderStageModuleIdentifierCreateInfoEXT";
+ case StructureType::eShaderModuleIdentifierEXT: return "ShaderModuleIdentifierEXT";
+ case StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT: return "PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT";
+ case StructureType::ePhysicalDeviceOpticalFlowFeaturesNV: return "PhysicalDeviceOpticalFlowFeaturesNV";
+ case StructureType::ePhysicalDeviceOpticalFlowPropertiesNV: return "PhysicalDeviceOpticalFlowPropertiesNV";
+ case StructureType::eOpticalFlowImageFormatInfoNV: return "OpticalFlowImageFormatInfoNV";
+ case StructureType::eOpticalFlowImageFormatPropertiesNV: return "OpticalFlowImageFormatPropertiesNV";
+ case StructureType::eOpticalFlowSessionCreateInfoNV: return "OpticalFlowSessionCreateInfoNV";
+ case StructureType::eOpticalFlowExecuteInfoNV: return "OpticalFlowExecuteInfoNV";
+ case StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV: return "OpticalFlowSessionCreatePrivateDataInfoNV";
+ case StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT: return "PhysicalDeviceLegacyDitheringFeaturesEXT";
+ case StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT: return "PhysicalDevicePipelineProtectedAccessFeaturesEXT";
+ case StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM: return "PhysicalDeviceTilePropertiesFeaturesQCOM";
+ case StructureType::eTilePropertiesQCOM: return "TilePropertiesQCOM";
+ case StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC: return "PhysicalDeviceAmigoProfilingFeaturesSEC";
+ case StructureType::eAmigoProfilingSubmitInfoSEC: return "AmigoProfilingSubmitInfoSEC";
+ case StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT: return "PhysicalDeviceMutableDescriptorTypeFeaturesEXT";
+ case StructureType::eMutableDescriptorTypeCreateInfoEXT: return "MutableDescriptorTypeCreateInfoEXT";
+ case StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM: return "PhysicalDeviceShaderCoreBuiltinsFeaturesARM";
+ case StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM: return "PhysicalDeviceShaderCoreBuiltinsPropertiesARM";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCacheHeaderVersion value )
+ {
+ switch ( value )
+ {
+ case PipelineCacheHeaderVersion::eOne: return "One";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ObjectType value )
+ {
+ switch ( value )
+ {
+ case ObjectType::eUnknown: return "Unknown";
+ case ObjectType::eInstance: return "Instance";
+ case ObjectType::ePhysicalDevice: return "PhysicalDevice";
+ case ObjectType::eDevice: return "Device";
+ case ObjectType::eQueue: return "Queue";
+ case ObjectType::eSemaphore: return "Semaphore";
+ case ObjectType::eCommandBuffer: return "CommandBuffer";
+ case ObjectType::eFence: return "Fence";
+ case ObjectType::eDeviceMemory: return "DeviceMemory";
+ case ObjectType::eBuffer: return "Buffer";
+ case ObjectType::eImage: return "Image";
+ case ObjectType::eEvent: return "Event";
+ case ObjectType::eQueryPool: return "QueryPool";
+ case ObjectType::eBufferView: return "BufferView";
+ case ObjectType::eImageView: return "ImageView";
+ case ObjectType::eShaderModule: return "ShaderModule";
+ case ObjectType::ePipelineCache: return "PipelineCache";
+ case ObjectType::ePipelineLayout: return "PipelineLayout";
+ case ObjectType::eRenderPass: return "RenderPass";
+ case ObjectType::ePipeline: return "Pipeline";
+ case ObjectType::eDescriptorSetLayout: return "DescriptorSetLayout";
+ case ObjectType::eSampler: return "Sampler";
+ case ObjectType::eDescriptorPool: return "DescriptorPool";
+ case ObjectType::eDescriptorSet: return "DescriptorSet";
+ case ObjectType::eFramebuffer: return "Framebuffer";
+ case ObjectType::eCommandPool: return "CommandPool";
+ case ObjectType::eSamplerYcbcrConversion: return "SamplerYcbcrConversion";
+ case ObjectType::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate";
+ case ObjectType::ePrivateDataSlot: return "PrivateDataSlot";
+ case ObjectType::eSurfaceKHR: return "SurfaceKHR";
+ case ObjectType::eSwapchainKHR: return "SwapchainKHR";
+ case ObjectType::eDisplayKHR: return "DisplayKHR";
+ case ObjectType::eDisplayModeKHR: return "DisplayModeKHR";
+ case ObjectType::eDebugReportCallbackEXT: return "DebugReportCallbackEXT";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case ObjectType::eVideoSessionKHR: return "VideoSessionKHR";
+ case ObjectType::eVideoSessionParametersKHR: return "VideoSessionParametersKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case ObjectType::eCuModuleNVX: return "CuModuleNVX";
+ case ObjectType::eCuFunctionNVX: return "CuFunctionNVX";
+ case ObjectType::eDebugUtilsMessengerEXT: return "DebugUtilsMessengerEXT";
+ case ObjectType::eAccelerationStructureKHR: return "AccelerationStructureKHR";
+ case ObjectType::eValidationCacheEXT: return "ValidationCacheEXT";
+ case ObjectType::eAccelerationStructureNV: return "AccelerationStructureNV";
+ case ObjectType::ePerformanceConfigurationINTEL: return "PerformanceConfigurationINTEL";
+ case ObjectType::eDeferredOperationKHR: return "DeferredOperationKHR";
+ case ObjectType::eIndirectCommandsLayoutNV: return "IndirectCommandsLayoutNV";
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+ case ObjectType::eBufferCollectionFUCHSIA: return "BufferCollectionFUCHSIA";
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+ case ObjectType::eMicromapEXT: return "MicromapEXT";
+ case ObjectType::eOpticalFlowSessionNV: return "OpticalFlowSessionNV";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VendorId value )
+ {
+ switch ( value )
+ {
+ case VendorId::eVIV: return "VIV";
+ case VendorId::eVSI: return "VSI";
+ case VendorId::eKazan: return "Kazan";
+ case VendorId::eCodeplay: return "Codeplay";
+ case VendorId::eMESA: return "MESA";
+ case VendorId::ePocl: return "Pocl";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( Format value )
+ {
+ switch ( value )
+ {
+ case Format::eUndefined: return "Undefined";
+ case Format::eR4G4UnormPack8: return "R4G4UnormPack8";
+ case Format::eR4G4B4A4UnormPack16: return "R4G4B4A4UnormPack16";
+ case Format::eB4G4R4A4UnormPack16: return "B4G4R4A4UnormPack16";
+ case Format::eR5G6B5UnormPack16: return "R5G6B5UnormPack16";
+ case Format::eB5G6R5UnormPack16: return "B5G6R5UnormPack16";
+ case Format::eR5G5B5A1UnormPack16: return "R5G5B5A1UnormPack16";
+ case Format::eB5G5R5A1UnormPack16: return "B5G5R5A1UnormPack16";
+ case Format::eA1R5G5B5UnormPack16: return "A1R5G5B5UnormPack16";
+ case Format::eR8Unorm: return "R8Unorm";
+ case Format::eR8Snorm: return "R8Snorm";
+ case Format::eR8Uscaled: return "R8Uscaled";
+ case Format::eR8Sscaled: return "R8Sscaled";
+ case Format::eR8Uint: return "R8Uint";
+ case Format::eR8Sint: return "R8Sint";
+ case Format::eR8Srgb: return "R8Srgb";
+ case Format::eR8G8Unorm: return "R8G8Unorm";
+ case Format::eR8G8Snorm: return "R8G8Snorm";
+ case Format::eR8G8Uscaled: return "R8G8Uscaled";
+ case Format::eR8G8Sscaled: return "R8G8Sscaled";
+ case Format::eR8G8Uint: return "R8G8Uint";
+ case Format::eR8G8Sint: return "R8G8Sint";
+ case Format::eR8G8Srgb: return "R8G8Srgb";
+ case Format::eR8G8B8Unorm: return "R8G8B8Unorm";
+ case Format::eR8G8B8Snorm: return "R8G8B8Snorm";
+ case Format::eR8G8B8Uscaled: return "R8G8B8Uscaled";
+ case Format::eR8G8B8Sscaled: return "R8G8B8Sscaled";
+ case Format::eR8G8B8Uint: return "R8G8B8Uint";
+ case Format::eR8G8B8Sint: return "R8G8B8Sint";
+ case Format::eR8G8B8Srgb: return "R8G8B8Srgb";
+ case Format::eB8G8R8Unorm: return "B8G8R8Unorm";
+ case Format::eB8G8R8Snorm: return "B8G8R8Snorm";
+ case Format::eB8G8R8Uscaled: return "B8G8R8Uscaled";
+ case Format::eB8G8R8Sscaled: return "B8G8R8Sscaled";
+ case Format::eB8G8R8Uint: return "B8G8R8Uint";
+ case Format::eB8G8R8Sint: return "B8G8R8Sint";
+ case Format::eB8G8R8Srgb: return "B8G8R8Srgb";
+ case Format::eR8G8B8A8Unorm: return "R8G8B8A8Unorm";
+ case Format::eR8G8B8A8Snorm: return "R8G8B8A8Snorm";
+ case Format::eR8G8B8A8Uscaled: return "R8G8B8A8Uscaled";
+ case Format::eR8G8B8A8Sscaled: return "R8G8B8A8Sscaled";
+ case Format::eR8G8B8A8Uint: return "R8G8B8A8Uint";
+ case Format::eR8G8B8A8Sint: return "R8G8B8A8Sint";
+ case Format::eR8G8B8A8Srgb: return "R8G8B8A8Srgb";
+ case Format::eB8G8R8A8Unorm: return "B8G8R8A8Unorm";
+ case Format::eB8G8R8A8Snorm: return "B8G8R8A8Snorm";
+ case Format::eB8G8R8A8Uscaled: return "B8G8R8A8Uscaled";
+ case Format::eB8G8R8A8Sscaled: return "B8G8R8A8Sscaled";
+ case Format::eB8G8R8A8Uint: return "B8G8R8A8Uint";
+ case Format::eB8G8R8A8Sint: return "B8G8R8A8Sint";
+ case Format::eB8G8R8A8Srgb: return "B8G8R8A8Srgb";
+ case Format::eA8B8G8R8UnormPack32: return "A8B8G8R8UnormPack32";
+ case Format::eA8B8G8R8SnormPack32: return "A8B8G8R8SnormPack32";
+ case Format::eA8B8G8R8UscaledPack32: return "A8B8G8R8UscaledPack32";
+ case Format::eA8B8G8R8SscaledPack32: return "A8B8G8R8SscaledPack32";
+ case Format::eA8B8G8R8UintPack32: return "A8B8G8R8UintPack32";
+ case Format::eA8B8G8R8SintPack32: return "A8B8G8R8SintPack32";
+ case Format::eA8B8G8R8SrgbPack32: return "A8B8G8R8SrgbPack32";
+ case Format::eA2R10G10B10UnormPack32: return "A2R10G10B10UnormPack32";
+ case Format::eA2R10G10B10SnormPack32: return "A2R10G10B10SnormPack32";
+ case Format::eA2R10G10B10UscaledPack32: return "A2R10G10B10UscaledPack32";
+ case Format::eA2R10G10B10SscaledPack32: return "A2R10G10B10SscaledPack32";
+ case Format::eA2R10G10B10UintPack32: return "A2R10G10B10UintPack32";
+ case Format::eA2R10G10B10SintPack32: return "A2R10G10B10SintPack32";
+ case Format::eA2B10G10R10UnormPack32: return "A2B10G10R10UnormPack32";
+ case Format::eA2B10G10R10SnormPack32: return "A2B10G10R10SnormPack32";
+ case Format::eA2B10G10R10UscaledPack32: return "A2B10G10R10UscaledPack32";
+ case Format::eA2B10G10R10SscaledPack32: return "A2B10G10R10SscaledPack32";
+ case Format::eA2B10G10R10UintPack32: return "A2B10G10R10UintPack32";
+ case Format::eA2B10G10R10SintPack32: return "A2B10G10R10SintPack32";
+ case Format::eR16Unorm: return "R16Unorm";
+ case Format::eR16Snorm: return "R16Snorm";
+ case Format::eR16Uscaled: return "R16Uscaled";
+ case Format::eR16Sscaled: return "R16Sscaled";
+ case Format::eR16Uint: return "R16Uint";
+ case Format::eR16Sint: return "R16Sint";
+ case Format::eR16Sfloat: return "R16Sfloat";
+ case Format::eR16G16Unorm: return "R16G16Unorm";
+ case Format::eR16G16Snorm: return "R16G16Snorm";
+ case Format::eR16G16Uscaled: return "R16G16Uscaled";
+ case Format::eR16G16Sscaled: return "R16G16Sscaled";
+ case Format::eR16G16Uint: return "R16G16Uint";
+ case Format::eR16G16Sint: return "R16G16Sint";
+ case Format::eR16G16Sfloat: return "R16G16Sfloat";
+ case Format::eR16G16B16Unorm: return "R16G16B16Unorm";
+ case Format::eR16G16B16Snorm: return "R16G16B16Snorm";
+ case Format::eR16G16B16Uscaled: return "R16G16B16Uscaled";
+ case Format::eR16G16B16Sscaled: return "R16G16B16Sscaled";
+ case Format::eR16G16B16Uint: return "R16G16B16Uint";
+ case Format::eR16G16B16Sint: return "R16G16B16Sint";
+ case Format::eR16G16B16Sfloat: return "R16G16B16Sfloat";
+ case Format::eR16G16B16A16Unorm: return "R16G16B16A16Unorm";
+ case Format::eR16G16B16A16Snorm: return "R16G16B16A16Snorm";
+ case Format::eR16G16B16A16Uscaled: return "R16G16B16A16Uscaled";
+ case Format::eR16G16B16A16Sscaled: return "R16G16B16A16Sscaled";
+ case Format::eR16G16B16A16Uint: return "R16G16B16A16Uint";
+ case Format::eR16G16B16A16Sint: return "R16G16B16A16Sint";
+ case Format::eR16G16B16A16Sfloat: return "R16G16B16A16Sfloat";
+ case Format::eR32Uint: return "R32Uint";
+ case Format::eR32Sint: return "R32Sint";
+ case Format::eR32Sfloat: return "R32Sfloat";
+ case Format::eR32G32Uint: return "R32G32Uint";
+ case Format::eR32G32Sint: return "R32G32Sint";
+ case Format::eR32G32Sfloat: return "R32G32Sfloat";
+ case Format::eR32G32B32Uint: return "R32G32B32Uint";
+ case Format::eR32G32B32Sint: return "R32G32B32Sint";
+ case Format::eR32G32B32Sfloat: return "R32G32B32Sfloat";
+ case Format::eR32G32B32A32Uint: return "R32G32B32A32Uint";
+ case Format::eR32G32B32A32Sint: return "R32G32B32A32Sint";
+ case Format::eR32G32B32A32Sfloat: return "R32G32B32A32Sfloat";
+ case Format::eR64Uint: return "R64Uint";
+ case Format::eR64Sint: return "R64Sint";
+ case Format::eR64Sfloat: return "R64Sfloat";
+ case Format::eR64G64Uint: return "R64G64Uint";
+ case Format::eR64G64Sint: return "R64G64Sint";
+ case Format::eR64G64Sfloat: return "R64G64Sfloat";
+ case Format::eR64G64B64Uint: return "R64G64B64Uint";
+ case Format::eR64G64B64Sint: return "R64G64B64Sint";
+ case Format::eR64G64B64Sfloat: return "R64G64B64Sfloat";
+ case Format::eR64G64B64A64Uint: return "R64G64B64A64Uint";
+ case Format::eR64G64B64A64Sint: return "R64G64B64A64Sint";
+ case Format::eR64G64B64A64Sfloat: return "R64G64B64A64Sfloat";
+ case Format::eB10G11R11UfloatPack32: return "B10G11R11UfloatPack32";
+ case Format::eE5B9G9R9UfloatPack32: return "E5B9G9R9UfloatPack32";
+ case Format::eD16Unorm: return "D16Unorm";
+ case Format::eX8D24UnormPack32: return "X8D24UnormPack32";
+ case Format::eD32Sfloat: return "D32Sfloat";
+ case Format::eS8Uint: return "S8Uint";
+ case Format::eD16UnormS8Uint: return "D16UnormS8Uint";
+ case Format::eD24UnormS8Uint: return "D24UnormS8Uint";
+ case Format::eD32SfloatS8Uint: return "D32SfloatS8Uint";
+ case Format::eBc1RgbUnormBlock: return "Bc1RgbUnormBlock";
+ case Format::eBc1RgbSrgbBlock: return "Bc1RgbSrgbBlock";
+ case Format::eBc1RgbaUnormBlock: return "Bc1RgbaUnormBlock";
+ case Format::eBc1RgbaSrgbBlock: return "Bc1RgbaSrgbBlock";
+ case Format::eBc2UnormBlock: return "Bc2UnormBlock";
+ case Format::eBc2SrgbBlock: return "Bc2SrgbBlock";
+ case Format::eBc3UnormBlock: return "Bc3UnormBlock";
+ case Format::eBc3SrgbBlock: return "Bc3SrgbBlock";
+ case Format::eBc4UnormBlock: return "Bc4UnormBlock";
+ case Format::eBc4SnormBlock: return "Bc4SnormBlock";
+ case Format::eBc5UnormBlock: return "Bc5UnormBlock";
+ case Format::eBc5SnormBlock: return "Bc5SnormBlock";
+ case Format::eBc6HUfloatBlock: return "Bc6HUfloatBlock";
+ case Format::eBc6HSfloatBlock: return "Bc6HSfloatBlock";
+ case Format::eBc7UnormBlock: return "Bc7UnormBlock";
+ case Format::eBc7SrgbBlock: return "Bc7SrgbBlock";
+ case Format::eEtc2R8G8B8UnormBlock: return "Etc2R8G8B8UnormBlock";
+ case Format::eEtc2R8G8B8SrgbBlock: return "Etc2R8G8B8SrgbBlock";
+ case Format::eEtc2R8G8B8A1UnormBlock: return "Etc2R8G8B8A1UnormBlock";
+ case Format::eEtc2R8G8B8A1SrgbBlock: return "Etc2R8G8B8A1SrgbBlock";
+ case Format::eEtc2R8G8B8A8UnormBlock: return "Etc2R8G8B8A8UnormBlock";
+ case Format::eEtc2R8G8B8A8SrgbBlock: return "Etc2R8G8B8A8SrgbBlock";
+ case Format::eEacR11UnormBlock: return "EacR11UnormBlock";
+ case Format::eEacR11SnormBlock: return "EacR11SnormBlock";
+ case Format::eEacR11G11UnormBlock: return "EacR11G11UnormBlock";
+ case Format::eEacR11G11SnormBlock: return "EacR11G11SnormBlock";
+ case Format::eAstc4x4UnormBlock: return "Astc4x4UnormBlock";
+ case Format::eAstc4x4SrgbBlock: return "Astc4x4SrgbBlock";
+ case Format::eAstc5x4UnormBlock: return "Astc5x4UnormBlock";
+ case Format::eAstc5x4SrgbBlock: return "Astc5x4SrgbBlock";
+ case Format::eAstc5x5UnormBlock: return "Astc5x5UnormBlock";
+ case Format::eAstc5x5SrgbBlock: return "Astc5x5SrgbBlock";
+ case Format::eAstc6x5UnormBlock: return "Astc6x5UnormBlock";
+ case Format::eAstc6x5SrgbBlock: return "Astc6x5SrgbBlock";
+ case Format::eAstc6x6UnormBlock: return "Astc6x6UnormBlock";
+ case Format::eAstc6x6SrgbBlock: return "Astc6x6SrgbBlock";
+ case Format::eAstc8x5UnormBlock: return "Astc8x5UnormBlock";
+ case Format::eAstc8x5SrgbBlock: return "Astc8x5SrgbBlock";
+ case Format::eAstc8x6UnormBlock: return "Astc8x6UnormBlock";
+ case Format::eAstc8x6SrgbBlock: return "Astc8x6SrgbBlock";
+ case Format::eAstc8x8UnormBlock: return "Astc8x8UnormBlock";
+ case Format::eAstc8x8SrgbBlock: return "Astc8x8SrgbBlock";
+ case Format::eAstc10x5UnormBlock: return "Astc10x5UnormBlock";
+ case Format::eAstc10x5SrgbBlock: return "Astc10x5SrgbBlock";
+ case Format::eAstc10x6UnormBlock: return "Astc10x6UnormBlock";
+ case Format::eAstc10x6SrgbBlock: return "Astc10x6SrgbBlock";
+ case Format::eAstc10x8UnormBlock: return "Astc10x8UnormBlock";
+ case Format::eAstc10x8SrgbBlock: return "Astc10x8SrgbBlock";
+ case Format::eAstc10x10UnormBlock: return "Astc10x10UnormBlock";
+ case Format::eAstc10x10SrgbBlock: return "Astc10x10SrgbBlock";
+ case Format::eAstc12x10UnormBlock: return "Astc12x10UnormBlock";
+ case Format::eAstc12x10SrgbBlock: return "Astc12x10SrgbBlock";
+ case Format::eAstc12x12UnormBlock: return "Astc12x12UnormBlock";
+ case Format::eAstc12x12SrgbBlock: return "Astc12x12SrgbBlock";
+ case Format::eG8B8G8R8422Unorm: return "G8B8G8R8422Unorm";
+ case Format::eB8G8R8G8422Unorm: return "B8G8R8G8422Unorm";
+ case Format::eG8B8R83Plane420Unorm: return "G8B8R83Plane420Unorm";
+ case Format::eG8B8R82Plane420Unorm: return "G8B8R82Plane420Unorm";
+ case Format::eG8B8R83Plane422Unorm: return "G8B8R83Plane422Unorm";
+ case Format::eG8B8R82Plane422Unorm: return "G8B8R82Plane422Unorm";
+ case Format::eG8B8R83Plane444Unorm: return "G8B8R83Plane444Unorm";
+ case Format::eR10X6UnormPack16: return "R10X6UnormPack16";
+ case Format::eR10X6G10X6Unorm2Pack16: return "R10X6G10X6Unorm2Pack16";
+ case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return "R10X6G10X6B10X6A10X6Unorm4Pack16";
+ case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return "G10X6B10X6G10X6R10X6422Unorm4Pack16";
+ case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return "B10X6G10X6R10X6G10X6422Unorm4Pack16";
+ case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return "G10X6B10X6R10X63Plane420Unorm3Pack16";
+ case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return "G10X6B10X6R10X62Plane420Unorm3Pack16";
+ case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return "G10X6B10X6R10X63Plane422Unorm3Pack16";
+ case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return "G10X6B10X6R10X62Plane422Unorm3Pack16";
+ case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return "G10X6B10X6R10X63Plane444Unorm3Pack16";
+ case Format::eR12X4UnormPack16: return "R12X4UnormPack16";
+ case Format::eR12X4G12X4Unorm2Pack16: return "R12X4G12X4Unorm2Pack16";
+ case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return "R12X4G12X4B12X4A12X4Unorm4Pack16";
+ case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return "G12X4B12X4G12X4R12X4422Unorm4Pack16";
+ case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return "B12X4G12X4R12X4G12X4422Unorm4Pack16";
+ case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return "G12X4B12X4R12X43Plane420Unorm3Pack16";
+ case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return "G12X4B12X4R12X42Plane420Unorm3Pack16";
+ case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return "G12X4B12X4R12X43Plane422Unorm3Pack16";
+ case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return "G12X4B12X4R12X42Plane422Unorm3Pack16";
+ case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return "G12X4B12X4R12X43Plane444Unorm3Pack16";
+ case Format::eG16B16G16R16422Unorm: return "G16B16G16R16422Unorm";
+ case Format::eB16G16R16G16422Unorm: return "B16G16R16G16422Unorm";
+ case Format::eG16B16R163Plane420Unorm: return "G16B16R163Plane420Unorm";
+ case Format::eG16B16R162Plane420Unorm: return "G16B16R162Plane420Unorm";
+ case Format::eG16B16R163Plane422Unorm: return "G16B16R163Plane422Unorm";
+ case Format::eG16B16R162Plane422Unorm: return "G16B16R162Plane422Unorm";
+ case Format::eG16B16R163Plane444Unorm: return "G16B16R163Plane444Unorm";
+ case Format::eG8B8R82Plane444Unorm: return "G8B8R82Plane444Unorm";
+ case Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return "G10X6B10X6R10X62Plane444Unorm3Pack16";
+ case Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return "G12X4B12X4R12X42Plane444Unorm3Pack16";
+ case Format::eG16B16R162Plane444Unorm: return "G16B16R162Plane444Unorm";
+ case Format::eA4R4G4B4UnormPack16: return "A4R4G4B4UnormPack16";
+ case Format::eA4B4G4R4UnormPack16: return "A4B4G4R4UnormPack16";
+ case Format::eAstc4x4SfloatBlock: return "Astc4x4SfloatBlock";
+ case Format::eAstc5x4SfloatBlock: return "Astc5x4SfloatBlock";
+ case Format::eAstc5x5SfloatBlock: return "Astc5x5SfloatBlock";
+ case Format::eAstc6x5SfloatBlock: return "Astc6x5SfloatBlock";
+ case Format::eAstc6x6SfloatBlock: return "Astc6x6SfloatBlock";
+ case Format::eAstc8x5SfloatBlock: return "Astc8x5SfloatBlock";
+ case Format::eAstc8x6SfloatBlock: return "Astc8x6SfloatBlock";
+ case Format::eAstc8x8SfloatBlock: return "Astc8x8SfloatBlock";
+ case Format::eAstc10x5SfloatBlock: return "Astc10x5SfloatBlock";
+ case Format::eAstc10x6SfloatBlock: return "Astc10x6SfloatBlock";
+ case Format::eAstc10x8SfloatBlock: return "Astc10x8SfloatBlock";
+ case Format::eAstc10x10SfloatBlock: return "Astc10x10SfloatBlock";
+ case Format::eAstc12x10SfloatBlock: return "Astc12x10SfloatBlock";
+ case Format::eAstc12x12SfloatBlock: return "Astc12x12SfloatBlock";
+ case Format::ePvrtc12BppUnormBlockIMG: return "Pvrtc12BppUnormBlockIMG";
+ case Format::ePvrtc14BppUnormBlockIMG: return "Pvrtc14BppUnormBlockIMG";
+ case Format::ePvrtc22BppUnormBlockIMG: return "Pvrtc22BppUnormBlockIMG";
+ case Format::ePvrtc24BppUnormBlockIMG: return "Pvrtc24BppUnormBlockIMG";
+ case Format::ePvrtc12BppSrgbBlockIMG: return "Pvrtc12BppSrgbBlockIMG";
+ case Format::ePvrtc14BppSrgbBlockIMG: return "Pvrtc14BppSrgbBlockIMG";
+ case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG";
+ case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG";
+ case Format::eR16G16S105NV: return "R16G16S105NV";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits value )
+ {
+ switch ( value )
+ {
+ case FormatFeatureFlagBits::eSampledImage: return "SampledImage";
+ case FormatFeatureFlagBits::eStorageImage: return "StorageImage";
+ case FormatFeatureFlagBits::eStorageImageAtomic: return "StorageImageAtomic";
+ case FormatFeatureFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer";
+ case FormatFeatureFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer";
+ case FormatFeatureFlagBits::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic";
+ case FormatFeatureFlagBits::eVertexBuffer: return "VertexBuffer";
+ case FormatFeatureFlagBits::eColorAttachment: return "ColorAttachment";
+ case FormatFeatureFlagBits::eColorAttachmentBlend: return "ColorAttachmentBlend";
+ case FormatFeatureFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment";
+ case FormatFeatureFlagBits::eBlitSrc: return "BlitSrc";
+ case FormatFeatureFlagBits::eBlitDst: return "BlitDst";
+ case FormatFeatureFlagBits::eSampledImageFilterLinear: return "SampledImageFilterLinear";
+ case FormatFeatureFlagBits::eTransferSrc: return "TransferSrc";
+ case FormatFeatureFlagBits::eTransferDst: return "TransferDst";
+ case FormatFeatureFlagBits::eMidpointChromaSamples: return "MidpointChromaSamples";
+ case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter: return "SampledImageYcbcrConversionLinearFilter";
+ case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter: return "SampledImageYcbcrConversionSeparateReconstructionFilter";
+ case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit: return "SampledImageYcbcrConversionChromaReconstructionExplicit";
+ case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable:
+ return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable";
+ case FormatFeatureFlagBits::eDisjoint: return "Disjoint";
+ case FormatFeatureFlagBits::eCositedChromaSamples: return "CositedChromaSamples";
+ case FormatFeatureFlagBits::eSampledImageFilterMinmax: return "SampledImageFilterMinmax";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case FormatFeatureFlagBits::eVideoDecodeOutputKHR: return "VideoDecodeOutputKHR";
+ case FormatFeatureFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR: return "AccelerationStructureVertexBufferKHR";
+ case FormatFeatureFlagBits::eSampledImageFilterCubicEXT: return "SampledImageFilterCubicEXT";
+ case FormatFeatureFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT";
+ case FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case FormatFeatureFlagBits::eVideoEncodeInputKHR: return "VideoEncodeInputKHR";
+ case FormatFeatureFlagBits::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ImageCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case ImageCreateFlagBits::eSparseBinding: return "SparseBinding";
+ case ImageCreateFlagBits::eSparseResidency: return "SparseResidency";
+ case ImageCreateFlagBits::eSparseAliased: return "SparseAliased";
+ case ImageCreateFlagBits::eMutableFormat: return "MutableFormat";
+ case ImageCreateFlagBits::eCubeCompatible: return "CubeCompatible";
+ case ImageCreateFlagBits::eAlias: return "Alias";
+ case ImageCreateFlagBits::eSplitInstanceBindRegions: return "SplitInstanceBindRegions";
+ case ImageCreateFlagBits::e2DArrayCompatible: return "2DArrayCompatible";
+ case ImageCreateFlagBits::eBlockTexelViewCompatible: return "BlockTexelViewCompatible";
+ case ImageCreateFlagBits::eExtendedUsage: return "ExtendedUsage";
+ case ImageCreateFlagBits::eProtected: return "Protected";
+ case ImageCreateFlagBits::eDisjoint: return "Disjoint";
+ case ImageCreateFlagBits::eCornerSampledNV: return "CornerSampledNV";
+ case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT: return "SampleLocationsCompatibleDepthEXT";
+ case ImageCreateFlagBits::eSubsampledEXT: return "SubsampledEXT";
+ case ImageCreateFlagBits::eMultisampledRenderToSingleSampledEXT: return "MultisampledRenderToSingleSampledEXT";
+ case ImageCreateFlagBits::e2DViewCompatibleEXT: return "2DViewCompatibleEXT";
+ case ImageCreateFlagBits::eFragmentDensityMapOffsetQCOM: return "FragmentDensityMapOffsetQCOM";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ImageTiling value )
+ {
+ switch ( value )
+ {
+ case ImageTiling::eOptimal: return "Optimal";
+ case ImageTiling::eLinear: return "Linear";
+ case ImageTiling::eDrmFormatModifierEXT: return "DrmFormatModifierEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ImageType value )
+ {
+ switch ( value )
+ {
+ case ImageType::e1D: return "1D";
+ case ImageType::e2D: return "2D";
+ case ImageType::e3D: return "3D";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ImageUsageFlagBits value )
+ {
+ switch ( value )
+ {
+ case ImageUsageFlagBits::eTransferSrc: return "TransferSrc";
+ case ImageUsageFlagBits::eTransferDst: return "TransferDst";
+ case ImageUsageFlagBits::eSampled: return "Sampled";
+ case ImageUsageFlagBits::eStorage: return "Storage";
+ case ImageUsageFlagBits::eColorAttachment: return "ColorAttachment";
+ case ImageUsageFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment";
+ case ImageUsageFlagBits::eTransientAttachment: return "TransientAttachment";
+ case ImageUsageFlagBits::eInputAttachment: return "InputAttachment";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case ImageUsageFlagBits::eVideoDecodeDstKHR: return "VideoDecodeDstKHR";
+ case ImageUsageFlagBits::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR";
+ case ImageUsageFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case ImageUsageFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT";
+ case ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case ImageUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR";
+ case ImageUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR";
+ case ImageUsageFlagBits::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case ImageUsageFlagBits::eAttachmentFeedbackLoopEXT: return "AttachmentFeedbackLoopEXT";
+ case ImageUsageFlagBits::eInvocationMaskHUAWEI: return "InvocationMaskHUAWEI";
+ case ImageUsageFlagBits::eSampleWeightQCOM: return "SampleWeightQCOM";
+ case ImageUsageFlagBits::eSampleBlockMatchQCOM: return "SampleBlockMatchQCOM";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case InstanceCreateFlagBits::eEnumeratePortabilityKHR: return "EnumeratePortabilityKHR";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( InternalAllocationType value )
+ {
+ switch ( value )
+ {
+ case InternalAllocationType::eExecutable: return "Executable";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlagBits value )
+ {
+ switch ( value )
+ {
+ case MemoryHeapFlagBits::eDeviceLocal: return "DeviceLocal";
+ case MemoryHeapFlagBits::eMultiInstance: return "MultiInstance";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlagBits value )
+ {
+ switch ( value )
+ {
+ case MemoryPropertyFlagBits::eDeviceLocal: return "DeviceLocal";
+ case MemoryPropertyFlagBits::eHostVisible: return "HostVisible";
+ case MemoryPropertyFlagBits::eHostCoherent: return "HostCoherent";
+ case MemoryPropertyFlagBits::eHostCached: return "HostCached";
+ case MemoryPropertyFlagBits::eLazilyAllocated: return "LazilyAllocated";
+ case MemoryPropertyFlagBits::eProtected: return "Protected";
+ case MemoryPropertyFlagBits::eDeviceCoherentAMD: return "DeviceCoherentAMD";
+ case MemoryPropertyFlagBits::eDeviceUncachedAMD: return "DeviceUncachedAMD";
+ case MemoryPropertyFlagBits::eRdmaCapableNV: return "RdmaCapableNV";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceType value )
+ {
+ switch ( value )
+ {
+ case PhysicalDeviceType::eOther: return "Other";
+ case PhysicalDeviceType::eIntegratedGpu: return "IntegratedGpu";
+ case PhysicalDeviceType::eDiscreteGpu: return "DiscreteGpu";
+ case PhysicalDeviceType::eVirtualGpu: return "VirtualGpu";
+ case PhysicalDeviceType::eCpu: return "Cpu";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( QueueFlagBits value )
+ {
+ switch ( value )
+ {
+ case QueueFlagBits::eGraphics: return "Graphics";
+ case QueueFlagBits::eCompute: return "Compute";
+ case QueueFlagBits::eTransfer: return "Transfer";
+ case QueueFlagBits::eSparseBinding: return "SparseBinding";
+ case QueueFlagBits::eProtected: return "Protected";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case QueueFlagBits::eVideoDecodeKHR: return "VideoDecodeKHR";
+ case QueueFlagBits::eVideoEncodeKHR: return "VideoEncodeKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case QueueFlagBits::eOpticalFlowNV: return "OpticalFlowNV";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SampleCountFlagBits value )
+ {
+ switch ( value )
+ {
+ case SampleCountFlagBits::e1: return "1";
+ case SampleCountFlagBits::e2: return "2";
+ case SampleCountFlagBits::e4: return "4";
+ case SampleCountFlagBits::e8: return "8";
+ case SampleCountFlagBits::e16: return "16";
+ case SampleCountFlagBits::e32: return "32";
+ case SampleCountFlagBits::e64: return "64";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SystemAllocationScope value )
+ {
+ switch ( value )
+ {
+ case SystemAllocationScope::eCommand: return "Command";
+ case SystemAllocationScope::eObject: return "Object";
+ case SystemAllocationScope::eCache: return "Cache";
+ case SystemAllocationScope::eDevice: return "Device";
+ case SystemAllocationScope::eInstance: return "Instance";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value )
+ {
+ switch ( value )
+ {
+ case PipelineStageFlagBits::eTopOfPipe: return "TopOfPipe";
+ case PipelineStageFlagBits::eDrawIndirect: return "DrawIndirect";
+ case PipelineStageFlagBits::eVertexInput: return "VertexInput";
+ case PipelineStageFlagBits::eVertexShader: return "VertexShader";
+ case PipelineStageFlagBits::eTessellationControlShader: return "TessellationControlShader";
+ case PipelineStageFlagBits::eTessellationEvaluationShader: return "TessellationEvaluationShader";
+ case PipelineStageFlagBits::eGeometryShader: return "GeometryShader";
+ case PipelineStageFlagBits::eFragmentShader: return "FragmentShader";
+ case PipelineStageFlagBits::eEarlyFragmentTests: return "EarlyFragmentTests";
+ case PipelineStageFlagBits::eLateFragmentTests: return "LateFragmentTests";
+ case PipelineStageFlagBits::eColorAttachmentOutput: return "ColorAttachmentOutput";
+ case PipelineStageFlagBits::eComputeShader: return "ComputeShader";
+ case PipelineStageFlagBits::eTransfer: return "Transfer";
+ case PipelineStageFlagBits::eBottomOfPipe: return "BottomOfPipe";
+ case PipelineStageFlagBits::eHost: return "Host";
+ case PipelineStageFlagBits::eAllGraphics: return "AllGraphics";
+ case PipelineStageFlagBits::eAllCommands: return "AllCommands";
+ case PipelineStageFlagBits::eNone: return "None";
+ case PipelineStageFlagBits::eTransformFeedbackEXT: return "TransformFeedbackEXT";
+ case PipelineStageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT";
+ case PipelineStageFlagBits::eAccelerationStructureBuildKHR: return "AccelerationStructureBuildKHR";
+ case PipelineStageFlagBits::eRayTracingShaderKHR: return "RayTracingShaderKHR";
+ case PipelineStageFlagBits::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT";
+ case PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR";
+ case PipelineStageFlagBits::eCommandPreprocessNV: return "CommandPreprocessNV";
+ case PipelineStageFlagBits::eTaskShaderEXT: return "TaskShaderEXT";
+ case PipelineStageFlagBits::eMeshShaderEXT: return "MeshShaderEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ImageAspectFlagBits value )
+ {
+ switch ( value )
+ {
+ case ImageAspectFlagBits::eColor: return "Color";
+ case ImageAspectFlagBits::eDepth: return "Depth";
+ case ImageAspectFlagBits::eStencil: return "Stencil";
+ case ImageAspectFlagBits::eMetadata: return "Metadata";
+ case ImageAspectFlagBits::ePlane0: return "Plane0";
+ case ImageAspectFlagBits::ePlane1: return "Plane1";
+ case ImageAspectFlagBits::ePlane2: return "Plane2";
+ case ImageAspectFlagBits::eNone: return "None";
+ case ImageAspectFlagBits::eMemoryPlane0EXT: return "MemoryPlane0EXT";
+ case ImageAspectFlagBits::eMemoryPlane1EXT: return "MemoryPlane1EXT";
+ case ImageAspectFlagBits::eMemoryPlane2EXT: return "MemoryPlane2EXT";
+ case ImageAspectFlagBits::eMemoryPlane3EXT: return "MemoryPlane3EXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlagBits value )
+ {
+ switch ( value )
+ {
+ case SparseImageFormatFlagBits::eSingleMiptail: return "SingleMiptail";
+ case SparseImageFormatFlagBits::eAlignedMipSize: return "AlignedMipSize";
+ case SparseImageFormatFlagBits::eNonstandardBlockSize: return "NonstandardBlockSize";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlagBits value )
+ {
+ switch ( value )
+ {
+ case SparseMemoryBindFlagBits::eMetadata: return "Metadata";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( FenceCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case FenceCreateFlagBits::eSignaled: return "Signaled";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case EventCreateFlagBits::eDeviceOnly: return "DeviceOnly";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlagBits value )
+ {
+ switch ( value )
+ {
+ case QueryPipelineStatisticFlagBits::eInputAssemblyVertices: return "InputAssemblyVertices";
+ case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives: return "InputAssemblyPrimitives";
+ case QueryPipelineStatisticFlagBits::eVertexShaderInvocations: return "VertexShaderInvocations";
+ case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations: return "GeometryShaderInvocations";
+ case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives: return "GeometryShaderPrimitives";
+ case QueryPipelineStatisticFlagBits::eClippingInvocations: return "ClippingInvocations";
+ case QueryPipelineStatisticFlagBits::eClippingPrimitives: return "ClippingPrimitives";
+ case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations: return "FragmentShaderInvocations";
+ case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches: return "TessellationControlShaderPatches";
+ case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations: return "TessellationEvaluationShaderInvocations";
+ case QueryPipelineStatisticFlagBits::eComputeShaderInvocations: return "ComputeShaderInvocations";
+ case QueryPipelineStatisticFlagBits::eTaskShaderInvocationsEXT: return "TaskShaderInvocationsEXT";
+ case QueryPipelineStatisticFlagBits::eMeshShaderInvocationsEXT: return "MeshShaderInvocationsEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( QueryResultFlagBits value )
+ {
+ switch ( value )
+ {
+ case QueryResultFlagBits::e64: return "64";
+ case QueryResultFlagBits::eWait: return "Wait";
+ case QueryResultFlagBits::eWithAvailability: return "WithAvailability";
+ case QueryResultFlagBits::ePartial: return "Partial";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case QueryResultFlagBits::eWithStatusKHR: return "WithStatusKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( QueryType value )
+ {
+ switch ( value )
+ {
+ case QueryType::eOcclusion: return "Occlusion";
+ case QueryType::ePipelineStatistics: return "PipelineStatistics";
+ case QueryType::eTimestamp: return "Timestamp";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case QueryType::eResultStatusOnlyKHR: return "ResultStatusOnlyKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case QueryType::eTransformFeedbackStreamEXT: return "TransformFeedbackStreamEXT";
+ case QueryType::ePerformanceQueryKHR: return "PerformanceQueryKHR";
+ case QueryType::eAccelerationStructureCompactedSizeKHR: return "AccelerationStructureCompactedSizeKHR";
+ case QueryType::eAccelerationStructureSerializationSizeKHR: return "AccelerationStructureSerializationSizeKHR";
+ case QueryType::eAccelerationStructureCompactedSizeNV: return "AccelerationStructureCompactedSizeNV";
+ case QueryType::ePerformanceQueryINTEL: return "PerformanceQueryINTEL";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case QueryType::eVideoEncodeBitstreamBufferRangeKHR: return "VideoEncodeBitstreamBufferRangeKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case QueryType::eMeshPrimitivesGeneratedEXT: return "MeshPrimitivesGeneratedEXT";
+ case QueryType::ePrimitivesGeneratedEXT: return "PrimitivesGeneratedEXT";
+ case QueryType::eAccelerationStructureSerializationBottomLevelPointersKHR: return "AccelerationStructureSerializationBottomLevelPointersKHR";
+ case QueryType::eAccelerationStructureSizeKHR: return "AccelerationStructureSizeKHR";
+ case QueryType::eMicromapSerializationSizeEXT: return "MicromapSerializationSizeEXT";
+ case QueryType::eMicromapCompactedSizeEXT: return "MicromapCompactedSizeEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( BufferCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case BufferCreateFlagBits::eSparseBinding: return "SparseBinding";
+ case BufferCreateFlagBits::eSparseResidency: return "SparseResidency";
+ case BufferCreateFlagBits::eSparseAliased: return "SparseAliased";
+ case BufferCreateFlagBits::eProtected: return "Protected";
+ case BufferCreateFlagBits::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits value )
+ {
+ switch ( value )
+ {
+ case BufferUsageFlagBits::eTransferSrc: return "TransferSrc";
+ case BufferUsageFlagBits::eTransferDst: return "TransferDst";
+ case BufferUsageFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer";
+ case BufferUsageFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer";
+ case BufferUsageFlagBits::eUniformBuffer: return "UniformBuffer";
+ case BufferUsageFlagBits::eStorageBuffer: return "StorageBuffer";
+ case BufferUsageFlagBits::eIndexBuffer: return "IndexBuffer";
+ case BufferUsageFlagBits::eVertexBuffer: return "VertexBuffer";
+ case BufferUsageFlagBits::eIndirectBuffer: return "IndirectBuffer";
+ case BufferUsageFlagBits::eShaderDeviceAddress: return "ShaderDeviceAddress";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case BufferUsageFlagBits::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR";
+ case BufferUsageFlagBits::eVideoDecodeDstKHR: return "VideoDecodeDstKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case BufferUsageFlagBits::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT";
+ case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT";
+ case BufferUsageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT";
+ case BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR: return "AccelerationStructureBuildInputReadOnlyKHR";
+ case BufferUsageFlagBits::eAccelerationStructureStorageKHR: return "AccelerationStructureStorageKHR";
+ case BufferUsageFlagBits::eShaderBindingTableKHR: return "ShaderBindingTableKHR";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case BufferUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR";
+ case BufferUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case BufferUsageFlagBits::eMicromapBuildInputReadOnlyEXT: return "MicromapBuildInputReadOnlyEXT";
+ case BufferUsageFlagBits::eMicromapStorageEXT: return "MicromapStorageEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SharingMode value )
+ {
+ switch ( value )
+ {
+ case SharingMode::eExclusive: return "Exclusive";
+ case SharingMode::eConcurrent: return "Concurrent";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ImageLayout value )
+ {
+ switch ( value )
+ {
+ case ImageLayout::eUndefined: return "Undefined";
+ case ImageLayout::eGeneral: return "General";
+ case ImageLayout::eColorAttachmentOptimal: return "ColorAttachmentOptimal";
+ case ImageLayout::eDepthStencilAttachmentOptimal: return "DepthStencilAttachmentOptimal";
+ case ImageLayout::eDepthStencilReadOnlyOptimal: return "DepthStencilReadOnlyOptimal";
+ case ImageLayout::eShaderReadOnlyOptimal: return "ShaderReadOnlyOptimal";
+ case ImageLayout::eTransferSrcOptimal: return "TransferSrcOptimal";
+ case ImageLayout::eTransferDstOptimal: return "TransferDstOptimal";
+ case ImageLayout::ePreinitialized: return "Preinitialized";
+ case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal: return "DepthReadOnlyStencilAttachmentOptimal";
+ case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal: return "DepthAttachmentStencilReadOnlyOptimal";
+ case ImageLayout::eDepthAttachmentOptimal: return "DepthAttachmentOptimal";
+ case ImageLayout::eDepthReadOnlyOptimal: return "DepthReadOnlyOptimal";
+ case ImageLayout::eStencilAttachmentOptimal: return "StencilAttachmentOptimal";
+ case ImageLayout::eStencilReadOnlyOptimal: return "StencilReadOnlyOptimal";
+ case ImageLayout::eReadOnlyOptimal: return "ReadOnlyOptimal";
+ case ImageLayout::eAttachmentOptimal: return "AttachmentOptimal";
+ case ImageLayout::ePresentSrcKHR: return "PresentSrcKHR";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case ImageLayout::eVideoDecodeDstKHR: return "VideoDecodeDstKHR";
+ case ImageLayout::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR";
+ case ImageLayout::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case ImageLayout::eSharedPresentKHR: return "SharedPresentKHR";
+ case ImageLayout::eFragmentDensityMapOptimalEXT: return "FragmentDensityMapOptimalEXT";
+ case ImageLayout::eFragmentShadingRateAttachmentOptimalKHR: return "FragmentShadingRateAttachmentOptimalKHR";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case ImageLayout::eVideoEncodeDstKHR: return "VideoEncodeDstKHR";
+ case ImageLayout::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR";
+ case ImageLayout::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case ImageLayout::eAttachmentFeedbackLoopOptimalEXT: return "AttachmentFeedbackLoopOptimalEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ComponentSwizzle value )
+ {
+ switch ( value )
+ {
+ case ComponentSwizzle::eIdentity: return "Identity";
+ case ComponentSwizzle::eZero: return "Zero";
+ case ComponentSwizzle::eOne: return "One";
+ case ComponentSwizzle::eR: return "R";
+ case ComponentSwizzle::eG: return "G";
+ case ComponentSwizzle::eB: return "B";
+ case ComponentSwizzle::eA: return "A";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT: return "FragmentDensityMapDynamicEXT";
+ case ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT: return "FragmentDensityMapDeferredEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ImageViewType value )
+ {
+ switch ( value )
+ {
+ case ImageViewType::e1D: return "1D";
+ case ImageViewType::e2D: return "2D";
+ case ImageViewType::e3D: return "3D";
+ case ImageViewType::eCube: return "Cube";
+ case ImageViewType::e1DArray: return "1DArray";
+ case ImageViewType::e2DArray: return "2DArray";
+ case ImageViewType::eCubeArray: return "CubeArray";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( BlendFactor value )
+ {
+ switch ( value )
+ {
+ case BlendFactor::eZero: return "Zero";
+ case BlendFactor::eOne: return "One";
+ case BlendFactor::eSrcColor: return "SrcColor";
+ case BlendFactor::eOneMinusSrcColor: return "OneMinusSrcColor";
+ case BlendFactor::eDstColor: return "DstColor";
+ case BlendFactor::eOneMinusDstColor: return "OneMinusDstColor";
+ case BlendFactor::eSrcAlpha: return "SrcAlpha";
+ case BlendFactor::eOneMinusSrcAlpha: return "OneMinusSrcAlpha";
+ case BlendFactor::eDstAlpha: return "DstAlpha";
+ case BlendFactor::eOneMinusDstAlpha: return "OneMinusDstAlpha";
+ case BlendFactor::eConstantColor: return "ConstantColor";
+ case BlendFactor::eOneMinusConstantColor: return "OneMinusConstantColor";
+ case BlendFactor::eConstantAlpha: return "ConstantAlpha";
+ case BlendFactor::eOneMinusConstantAlpha: return "OneMinusConstantAlpha";
+ case BlendFactor::eSrcAlphaSaturate: return "SrcAlphaSaturate";
+ case BlendFactor::eSrc1Color: return "Src1Color";
+ case BlendFactor::eOneMinusSrc1Color: return "OneMinusSrc1Color";
+ case BlendFactor::eSrc1Alpha: return "Src1Alpha";
+ case BlendFactor::eOneMinusSrc1Alpha: return "OneMinusSrc1Alpha";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( BlendOp value )
+ {
+ switch ( value )
+ {
+ case BlendOp::eAdd: return "Add";
+ case BlendOp::eSubtract: return "Subtract";
+ case BlendOp::eReverseSubtract: return "ReverseSubtract";
+ case BlendOp::eMin: return "Min";
+ case BlendOp::eMax: return "Max";
+ case BlendOp::eZeroEXT: return "ZeroEXT";
+ case BlendOp::eSrcEXT: return "SrcEXT";
+ case BlendOp::eDstEXT: return "DstEXT";
+ case BlendOp::eSrcOverEXT: return "SrcOverEXT";
+ case BlendOp::eDstOverEXT: return "DstOverEXT";
+ case BlendOp::eSrcInEXT: return "SrcInEXT";
+ case BlendOp::eDstInEXT: return "DstInEXT";
+ case BlendOp::eSrcOutEXT: return "SrcOutEXT";
+ case BlendOp::eDstOutEXT: return "DstOutEXT";
+ case BlendOp::eSrcAtopEXT: return "SrcAtopEXT";
+ case BlendOp::eDstAtopEXT: return "DstAtopEXT";
+ case BlendOp::eXorEXT: return "XorEXT";
+ case BlendOp::eMultiplyEXT: return "MultiplyEXT";
+ case BlendOp::eScreenEXT: return "ScreenEXT";
+ case BlendOp::eOverlayEXT: return "OverlayEXT";
+ case BlendOp::eDarkenEXT: return "DarkenEXT";
+ case BlendOp::eLightenEXT: return "LightenEXT";
+ case BlendOp::eColordodgeEXT: return "ColordodgeEXT";
+ case BlendOp::eColorburnEXT: return "ColorburnEXT";
+ case BlendOp::eHardlightEXT: return "HardlightEXT";
+ case BlendOp::eSoftlightEXT: return "SoftlightEXT";
+ case BlendOp::eDifferenceEXT: return "DifferenceEXT";
+ case BlendOp::eExclusionEXT: return "ExclusionEXT";
+ case BlendOp::eInvertEXT: return "InvertEXT";
+ case BlendOp::eInvertRgbEXT: return "InvertRgbEXT";
+ case BlendOp::eLineardodgeEXT: return "LineardodgeEXT";
+ case BlendOp::eLinearburnEXT: return "LinearburnEXT";
+ case BlendOp::eVividlightEXT: return "VividlightEXT";
+ case BlendOp::eLinearlightEXT: return "LinearlightEXT";
+ case BlendOp::ePinlightEXT: return "PinlightEXT";
+ case BlendOp::eHardmixEXT: return "HardmixEXT";
+ case BlendOp::eHslHueEXT: return "HslHueEXT";
+ case BlendOp::eHslSaturationEXT: return "HslSaturationEXT";
+ case BlendOp::eHslColorEXT: return "HslColorEXT";
+ case BlendOp::eHslLuminosityEXT: return "HslLuminosityEXT";
+ case BlendOp::ePlusEXT: return "PlusEXT";
+ case BlendOp::ePlusClampedEXT: return "PlusClampedEXT";
+ case BlendOp::ePlusClampedAlphaEXT: return "PlusClampedAlphaEXT";
+ case BlendOp::ePlusDarkerEXT: return "PlusDarkerEXT";
+ case BlendOp::eMinusEXT: return "MinusEXT";
+ case BlendOp::eMinusClampedEXT: return "MinusClampedEXT";
+ case BlendOp::eContrastEXT: return "ContrastEXT";
+ case BlendOp::eInvertOvgEXT: return "InvertOvgEXT";
+ case BlendOp::eRedEXT: return "RedEXT";
+ case BlendOp::eGreenEXT: return "GreenEXT";
+ case BlendOp::eBlueEXT: return "BlueEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ColorComponentFlagBits value )
+ {
+ switch ( value )
+ {
+ case ColorComponentFlagBits::eR: return "R";
+ case ColorComponentFlagBits::eG: return "G";
+ case ColorComponentFlagBits::eB: return "B";
+ case ColorComponentFlagBits::eA: return "A";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( CompareOp value )
+ {
+ switch ( value )
+ {
+ case CompareOp::eNever: return "Never";
+ case CompareOp::eLess: return "Less";
+ case CompareOp::eEqual: return "Equal";
+ case CompareOp::eLessOrEqual: return "LessOrEqual";
+ case CompareOp::eGreater: return "Greater";
+ case CompareOp::eNotEqual: return "NotEqual";
+ case CompareOp::eGreaterOrEqual: return "GreaterOrEqual";
+ case CompareOp::eAlways: return "Always";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( CullModeFlagBits value )
+ {
+ switch ( value )
+ {
+ case CullModeFlagBits::eNone: return "None";
+ case CullModeFlagBits::eFront: return "Front";
+ case CullModeFlagBits::eBack: return "Back";
+ case CullModeFlagBits::eFrontAndBack: return "FrontAndBack";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DynamicState value )
+ {
+ switch ( value )
+ {
+ case DynamicState::eViewport: return "Viewport";
+ case DynamicState::eScissor: return "Scissor";
+ case DynamicState::eLineWidth: return "LineWidth";
+ case DynamicState::eDepthBias: return "DepthBias";
+ case DynamicState::eBlendConstants: return "BlendConstants";
+ case DynamicState::eDepthBounds: return "DepthBounds";
+ case DynamicState::eStencilCompareMask: return "StencilCompareMask";
+ case DynamicState::eStencilWriteMask: return "StencilWriteMask";
+ case DynamicState::eStencilReference: return "StencilReference";
+ case DynamicState::eCullMode: return "CullMode";
+ case DynamicState::eFrontFace: return "FrontFace";
+ case DynamicState::ePrimitiveTopology: return "PrimitiveTopology";
+ case DynamicState::eViewportWithCount: return "ViewportWithCount";
+ case DynamicState::eScissorWithCount: return "ScissorWithCount";
+ case DynamicState::eVertexInputBindingStride: return "VertexInputBindingStride";
+ case DynamicState::eDepthTestEnable: return "DepthTestEnable";
+ case DynamicState::eDepthWriteEnable: return "DepthWriteEnable";
+ case DynamicState::eDepthCompareOp: return "DepthCompareOp";
+ case DynamicState::eDepthBoundsTestEnable: return "DepthBoundsTestEnable";
+ case DynamicState::eStencilTestEnable: return "StencilTestEnable";
+ case DynamicState::eStencilOp: return "StencilOp";
+ case DynamicState::eRasterizerDiscardEnable: return "RasterizerDiscardEnable";
+ case DynamicState::eDepthBiasEnable: return "DepthBiasEnable";
+ case DynamicState::ePrimitiveRestartEnable: return "PrimitiveRestartEnable";
+ case DynamicState::eViewportWScalingNV: return "ViewportWScalingNV";
+ case DynamicState::eDiscardRectangleEXT: return "DiscardRectangleEXT";
+ case DynamicState::eSampleLocationsEXT: return "SampleLocationsEXT";
+ case DynamicState::eRayTracingPipelineStackSizeKHR: return "RayTracingPipelineStackSizeKHR";
+ case DynamicState::eViewportShadingRatePaletteNV: return "ViewportShadingRatePaletteNV";
+ case DynamicState::eViewportCoarseSampleOrderNV: return "ViewportCoarseSampleOrderNV";
+ case DynamicState::eExclusiveScissorNV: return "ExclusiveScissorNV";
+ case DynamicState::eFragmentShadingRateKHR: return "FragmentShadingRateKHR";
+ case DynamicState::eLineStippleEXT: return "LineStippleEXT";
+ case DynamicState::eVertexInputEXT: return "VertexInputEXT";
+ case DynamicState::ePatchControlPointsEXT: return "PatchControlPointsEXT";
+ case DynamicState::eLogicOpEXT: return "LogicOpEXT";
+ case DynamicState::eColorWriteEnableEXT: return "ColorWriteEnableEXT";
+ case DynamicState::eTessellationDomainOriginEXT: return "TessellationDomainOriginEXT";
+ case DynamicState::eDepthClampEnableEXT: return "DepthClampEnableEXT";
+ case DynamicState::ePolygonModeEXT: return "PolygonModeEXT";
+ case DynamicState::eRasterizationSamplesEXT: return "RasterizationSamplesEXT";
+ case DynamicState::eSampleMaskEXT: return "SampleMaskEXT";
+ case DynamicState::eAlphaToCoverageEnableEXT: return "AlphaToCoverageEnableEXT";
+ case DynamicState::eAlphaToOneEnableEXT: return "AlphaToOneEnableEXT";
+ case DynamicState::eLogicOpEnableEXT: return "LogicOpEnableEXT";
+ case DynamicState::eColorBlendEnableEXT: return "ColorBlendEnableEXT";
+ case DynamicState::eColorBlendEquationEXT: return "ColorBlendEquationEXT";
+ case DynamicState::eColorWriteMaskEXT: return "ColorWriteMaskEXT";
+ case DynamicState::eRasterizationStreamEXT: return "RasterizationStreamEXT";
+ case DynamicState::eConservativeRasterizationModeEXT: return "ConservativeRasterizationModeEXT";
+ case DynamicState::eExtraPrimitiveOverestimationSizeEXT: return "ExtraPrimitiveOverestimationSizeEXT";
+ case DynamicState::eDepthClipEnableEXT: return "DepthClipEnableEXT";
+ case DynamicState::eSampleLocationsEnableEXT: return "SampleLocationsEnableEXT";
+ case DynamicState::eColorBlendAdvancedEXT: return "ColorBlendAdvancedEXT";
+ case DynamicState::eProvokingVertexModeEXT: return "ProvokingVertexModeEXT";
+ case DynamicState::eLineRasterizationModeEXT: return "LineRasterizationModeEXT";
+ case DynamicState::eLineStippleEnableEXT: return "LineStippleEnableEXT";
+ case DynamicState::eDepthClipNegativeOneToOneEXT: return "DepthClipNegativeOneToOneEXT";
+ case DynamicState::eViewportWScalingEnableNV: return "ViewportWScalingEnableNV";
+ case DynamicState::eViewportSwizzleNV: return "ViewportSwizzleNV";
+ case DynamicState::eCoverageToColorEnableNV: return "CoverageToColorEnableNV";
+ case DynamicState::eCoverageToColorLocationNV: return "CoverageToColorLocationNV";
+ case DynamicState::eCoverageModulationModeNV: return "CoverageModulationModeNV";
+ case DynamicState::eCoverageModulationTableEnableNV: return "CoverageModulationTableEnableNV";
+ case DynamicState::eCoverageModulationTableNV: return "CoverageModulationTableNV";
+ case DynamicState::eShadingRateImageEnableNV: return "ShadingRateImageEnableNV";
+ case DynamicState::eRepresentativeFragmentTestEnableNV: return "RepresentativeFragmentTestEnableNV";
+ case DynamicState::eCoverageReductionModeNV: return "CoverageReductionModeNV";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( FrontFace value )
+ {
+ switch ( value )
+ {
+ case FrontFace::eCounterClockwise: return "CounterClockwise";
+ case FrontFace::eClockwise: return "Clockwise";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( LogicOp value )
+ {
+ switch ( value )
+ {
+ case LogicOp::eClear: return "Clear";
+ case LogicOp::eAnd: return "And";
+ case LogicOp::eAndReverse: return "AndReverse";
+ case LogicOp::eCopy: return "Copy";
+ case LogicOp::eAndInverted: return "AndInverted";
+ case LogicOp::eNoOp: return "NoOp";
+ case LogicOp::eXor: return "Xor";
+ case LogicOp::eOr: return "Or";
+ case LogicOp::eNor: return "Nor";
+ case LogicOp::eEquivalent: return "Equivalent";
+ case LogicOp::eInvert: return "Invert";
+ case LogicOp::eOrReverse: return "OrReverse";
+ case LogicOp::eCopyInverted: return "CopyInverted";
+ case LogicOp::eOrInverted: return "OrInverted";
+ case LogicOp::eNand: return "Nand";
+ case LogicOp::eSet: return "Set";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case PipelineCreateFlagBits::eDisableOptimization: return "DisableOptimization";
+ case PipelineCreateFlagBits::eAllowDerivatives: return "AllowDerivatives";
+ case PipelineCreateFlagBits::eDerivative: return "Derivative";
+ case PipelineCreateFlagBits::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex";
+ case PipelineCreateFlagBits::eDispatchBase: return "DispatchBase";
+ case PipelineCreateFlagBits::eFailOnPipelineCompileRequired: return "FailOnPipelineCompileRequired";
+ case PipelineCreateFlagBits::eEarlyReturnOnFailure: return "EarlyReturnOnFailure";
+ case PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR: return "RenderingFragmentShadingRateAttachmentKHR";
+ case PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT: return "RenderingFragmentDensityMapAttachmentEXT";
+ case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR: return "RayTracingNoNullAnyHitShadersKHR";
+ case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR: return "RayTracingNoNullClosestHitShadersKHR";
+ case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR: return "RayTracingNoNullMissShadersKHR";
+ case PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR: return "RayTracingNoNullIntersectionShadersKHR";
+ case PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR: return "RayTracingSkipTrianglesKHR";
+ case PipelineCreateFlagBits::eRayTracingSkipAabbsKHR: return "RayTracingSkipAabbsKHR";
+ case PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR: return "RayTracingShaderGroupHandleCaptureReplayKHR";
+ case PipelineCreateFlagBits::eDeferCompileNV: return "DeferCompileNV";
+ case PipelineCreateFlagBits::eCaptureStatisticsKHR: return "CaptureStatisticsKHR";
+ case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR: return "CaptureInternalRepresentationsKHR";
+ case PipelineCreateFlagBits::eIndirectBindableNV: return "IndirectBindableNV";
+ case PipelineCreateFlagBits::eLibraryKHR: return "LibraryKHR";
+ case PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT: return "RetainLinkTimeOptimizationInfoEXT";
+ case PipelineCreateFlagBits::eLinkTimeOptimizationEXT: return "LinkTimeOptimizationEXT";
+ case PipelineCreateFlagBits::eRayTracingAllowMotionNV: return "RayTracingAllowMotionNV";
+ case PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT: return "ColorAttachmentFeedbackLoopEXT";
+ case PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT: return "DepthStencilAttachmentFeedbackLoopEXT";
+ case PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT: return "RayTracingOpacityMicromapEXT";
+ case PipelineCreateFlagBits::eNoProtectedAccessEXT: return "NoProtectedAccessEXT";
+ case PipelineCreateFlagBits::eProtectedAccessOnlyEXT: return "ProtectedAccessOnlyEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize: return "AllowVaryingSubgroupSize";
+ case PipelineShaderStageCreateFlagBits::eRequireFullSubgroups: return "RequireFullSubgroups";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PolygonMode value )
+ {
+ switch ( value )
+ {
+ case PolygonMode::eFill: return "Fill";
+ case PolygonMode::eLine: return "Line";
+ case PolygonMode::ePoint: return "Point";
+ case PolygonMode::eFillRectangleNV: return "FillRectangleNV";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PrimitiveTopology value )
+ {
+ switch ( value )
+ {
+ case PrimitiveTopology::ePointList: return "PointList";
+ case PrimitiveTopology::eLineList: return "LineList";
+ case PrimitiveTopology::eLineStrip: return "LineStrip";
+ case PrimitiveTopology::eTriangleList: return "TriangleList";
+ case PrimitiveTopology::eTriangleStrip: return "TriangleStrip";
+ case PrimitiveTopology::eTriangleFan: return "TriangleFan";
+ case PrimitiveTopology::eLineListWithAdjacency: return "LineListWithAdjacency";
+ case PrimitiveTopology::eLineStripWithAdjacency: return "LineStripWithAdjacency";
+ case PrimitiveTopology::eTriangleListWithAdjacency: return "TriangleListWithAdjacency";
+ case PrimitiveTopology::eTriangleStripWithAdjacency: return "TriangleStripWithAdjacency";
+ case PrimitiveTopology::ePatchList: return "PatchList";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ShaderStageFlagBits value )
+ {
+ switch ( value )
+ {
+ case ShaderStageFlagBits::eVertex: return "Vertex";
+ case ShaderStageFlagBits::eTessellationControl: return "TessellationControl";
+ case ShaderStageFlagBits::eTessellationEvaluation: return "TessellationEvaluation";
+ case ShaderStageFlagBits::eGeometry: return "Geometry";
+ case ShaderStageFlagBits::eFragment: return "Fragment";
+ case ShaderStageFlagBits::eCompute: return "Compute";
+ case ShaderStageFlagBits::eAllGraphics: return "AllGraphics";
+ case ShaderStageFlagBits::eAll: return "All";
+ case ShaderStageFlagBits::eRaygenKHR: return "RaygenKHR";
+ case ShaderStageFlagBits::eAnyHitKHR: return "AnyHitKHR";
+ case ShaderStageFlagBits::eClosestHitKHR: return "ClosestHitKHR";
+ case ShaderStageFlagBits::eMissKHR: return "MissKHR";
+ case ShaderStageFlagBits::eIntersectionKHR: return "IntersectionKHR";
+ case ShaderStageFlagBits::eCallableKHR: return "CallableKHR";
+ case ShaderStageFlagBits::eTaskEXT: return "TaskEXT";
+ case ShaderStageFlagBits::eMeshEXT: return "MeshEXT";
+ case ShaderStageFlagBits::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( StencilOp value )
+ {
+ switch ( value )
+ {
+ case StencilOp::eKeep: return "Keep";
+ case StencilOp::eZero: return "Zero";
+ case StencilOp::eReplace: return "Replace";
+ case StencilOp::eIncrementAndClamp: return "IncrementAndClamp";
+ case StencilOp::eDecrementAndClamp: return "DecrementAndClamp";
+ case StencilOp::eInvert: return "Invert";
+ case StencilOp::eIncrementAndWrap: return "IncrementAndWrap";
+ case StencilOp::eDecrementAndWrap: return "DecrementAndWrap";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VertexInputRate value )
+ {
+ switch ( value )
+ {
+ case VertexInputRate::eVertex: return "Vertex";
+ case VertexInputRate::eInstance: return "Instance";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( BorderColor value )
+ {
+ switch ( value )
+ {
+ case BorderColor::eFloatTransparentBlack: return "FloatTransparentBlack";
+ case BorderColor::eIntTransparentBlack: return "IntTransparentBlack";
+ case BorderColor::eFloatOpaqueBlack: return "FloatOpaqueBlack";
+ case BorderColor::eIntOpaqueBlack: return "IntOpaqueBlack";
+ case BorderColor::eFloatOpaqueWhite: return "FloatOpaqueWhite";
+ case BorderColor::eIntOpaqueWhite: return "IntOpaqueWhite";
+ case BorderColor::eFloatCustomEXT: return "FloatCustomEXT";
+ case BorderColor::eIntCustomEXT: return "IntCustomEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( Filter value )
+ {
+ switch ( value )
+ {
+ case Filter::eNearest: return "Nearest";
+ case Filter::eLinear: return "Linear";
+ case Filter::eCubicEXT: return "CubicEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SamplerAddressMode value )
+ {
+ switch ( value )
+ {
+ case SamplerAddressMode::eRepeat: return "Repeat";
+ case SamplerAddressMode::eMirroredRepeat: return "MirroredRepeat";
+ case SamplerAddressMode::eClampToEdge: return "ClampToEdge";
+ case SamplerAddressMode::eClampToBorder: return "ClampToBorder";
+ case SamplerAddressMode::eMirrorClampToEdge: return "MirrorClampToEdge";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case SamplerCreateFlagBits::eSubsampledEXT: return "SubsampledEXT";
+ case SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT: return "SubsampledCoarseReconstructionEXT";
+ case SamplerCreateFlagBits::eNonSeamlessCubeMapEXT: return "NonSeamlessCubeMapEXT";
+ case SamplerCreateFlagBits::eImageProcessingQCOM: return "ImageProcessingQCOM";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SamplerMipmapMode value )
+ {
+ switch ( value )
+ {
+ case SamplerMipmapMode::eNearest: return "Nearest";
+ case SamplerMipmapMode::eLinear: return "Linear";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case DescriptorPoolCreateFlagBits::eFreeDescriptorSet: return "FreeDescriptorSet";
+ case DescriptorPoolCreateFlagBits::eUpdateAfterBind: return "UpdateAfterBind";
+ case DescriptorPoolCreateFlagBits::eHostOnlyEXT: return "HostOnlyEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool: return "UpdateAfterBindPool";
+ case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR: return "PushDescriptorKHR";
+ case DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT: return "HostOnlyPoolEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DescriptorType value )
+ {
+ switch ( value )
+ {
+ case DescriptorType::eSampler: return "Sampler";
+ case DescriptorType::eCombinedImageSampler: return "CombinedImageSampler";
+ case DescriptorType::eSampledImage: return "SampledImage";
+ case DescriptorType::eStorageImage: return "StorageImage";
+ case DescriptorType::eUniformTexelBuffer: return "UniformTexelBuffer";
+ case DescriptorType::eStorageTexelBuffer: return "StorageTexelBuffer";
+ case DescriptorType::eUniformBuffer: return "UniformBuffer";
+ case DescriptorType::eStorageBuffer: return "StorageBuffer";
+ case DescriptorType::eUniformBufferDynamic: return "UniformBufferDynamic";
+ case DescriptorType::eStorageBufferDynamic: return "StorageBufferDynamic";
+ case DescriptorType::eInputAttachment: return "InputAttachment";
+ case DescriptorType::eInlineUniformBlock: return "InlineUniformBlock";
+ case DescriptorType::eAccelerationStructureKHR: return "AccelerationStructureKHR";
+ case DescriptorType::eAccelerationStructureNV: return "AccelerationStructureNV";
+ case DescriptorType::eSampleWeightImageQCOM: return "SampleWeightImageQCOM";
+ case DescriptorType::eBlockMatchImageQCOM: return "BlockMatchImageQCOM";
+ case DescriptorType::eMutableEXT: return "MutableEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlagBits )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( AccessFlagBits value )
+ {
+ switch ( value )
+ {
+ case AccessFlagBits::eIndirectCommandRead: return "IndirectCommandRead";
+ case AccessFlagBits::eIndexRead: return "IndexRead";
+ case AccessFlagBits::eVertexAttributeRead: return "VertexAttributeRead";
+ case AccessFlagBits::eUniformRead: return "UniformRead";
+ case AccessFlagBits::eInputAttachmentRead: return "InputAttachmentRead";
+ case AccessFlagBits::eShaderRead: return "ShaderRead";
+ case AccessFlagBits::eShaderWrite: return "ShaderWrite";
+ case AccessFlagBits::eColorAttachmentRead: return "ColorAttachmentRead";
+ case AccessFlagBits::eColorAttachmentWrite: return "ColorAttachmentWrite";
+ case AccessFlagBits::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead";
+ case AccessFlagBits::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite";
+ case AccessFlagBits::eTransferRead: return "TransferRead";
+ case AccessFlagBits::eTransferWrite: return "TransferWrite";
+ case AccessFlagBits::eHostRead: return "HostRead";
+ case AccessFlagBits::eHostWrite: return "HostWrite";
+ case AccessFlagBits::eMemoryRead: return "MemoryRead";
+ case AccessFlagBits::eMemoryWrite: return "MemoryWrite";
+ case AccessFlagBits::eNone: return "None";
+ case AccessFlagBits::eTransformFeedbackWriteEXT: return "TransformFeedbackWriteEXT";
+ case AccessFlagBits::eTransformFeedbackCounterReadEXT: return "TransformFeedbackCounterReadEXT";
+ case AccessFlagBits::eTransformFeedbackCounterWriteEXT: return "TransformFeedbackCounterWriteEXT";
+ case AccessFlagBits::eConditionalRenderingReadEXT: return "ConditionalRenderingReadEXT";
+ case AccessFlagBits::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT";
+ case AccessFlagBits::eAccelerationStructureReadKHR: return "AccelerationStructureReadKHR";
+ case AccessFlagBits::eAccelerationStructureWriteKHR: return "AccelerationStructureWriteKHR";
+ case AccessFlagBits::eFragmentDensityMapReadEXT: return "FragmentDensityMapReadEXT";
+ case AccessFlagBits::eFragmentShadingRateAttachmentReadKHR: return "FragmentShadingRateAttachmentReadKHR";
+ case AccessFlagBits::eCommandPreprocessReadNV: return "CommandPreprocessReadNV";
+ case AccessFlagBits::eCommandPreprocessWriteNV: return "CommandPreprocessWriteNV";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlagBits value )
+ {
+ switch ( value )
+ {
+ case AttachmentDescriptionFlagBits::eMayAlias: return "MayAlias";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( AttachmentLoadOp value )
+ {
+ switch ( value )
+ {
+ case AttachmentLoadOp::eLoad: return "Load";
+ case AttachmentLoadOp::eClear: return "Clear";
+ case AttachmentLoadOp::eDontCare: return "DontCare";
+ case AttachmentLoadOp::eNoneEXT: return "NoneEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( AttachmentStoreOp value )
+ {
+ switch ( value )
+ {
+ case AttachmentStoreOp::eStore: return "Store";
+ case AttachmentStoreOp::eDontCare: return "DontCare";
+ case AttachmentStoreOp::eNone: return "None";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DependencyFlagBits value )
+ {
+ switch ( value )
+ {
+ case DependencyFlagBits::eByRegion: return "ByRegion";
+ case DependencyFlagBits::eDeviceGroup: return "DeviceGroup";
+ case DependencyFlagBits::eViewLocal: return "ViewLocal";
+ case DependencyFlagBits::eFeedbackLoopEXT: return "FeedbackLoopEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case FramebufferCreateFlagBits::eImageless: return "Imageless";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineBindPoint value )
+ {
+ switch ( value )
+ {
+ case PipelineBindPoint::eGraphics: return "Graphics";
+ case PipelineBindPoint::eCompute: return "Compute";
+ case PipelineBindPoint::eRayTracingKHR: return "RayTracingKHR";
+ case PipelineBindPoint::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case RenderPassCreateFlagBits::eTransformQCOM: return "TransformQCOM";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlagBits value )
+ {
+ switch ( value )
+ {
+ case SubpassDescriptionFlagBits::ePerViewAttributesNVX: return "PerViewAttributesNVX";
+ case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX: return "PerViewPositionXOnlyNVX";
+ case SubpassDescriptionFlagBits::eFragmentRegionQCOM: return "FragmentRegionQCOM";
+ case SubpassDescriptionFlagBits::eShaderResolveQCOM: return "ShaderResolveQCOM";
+ case SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessEXT: return "RasterizationOrderAttachmentColorAccessEXT";
+ case SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessEXT: return "RasterizationOrderAttachmentDepthAccessEXT";
+ case SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessEXT: return "RasterizationOrderAttachmentStencilAccessEXT";
+ case SubpassDescriptionFlagBits::eEnableLegacyDitheringEXT: return "EnableLegacyDitheringEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case CommandPoolCreateFlagBits::eTransient: return "Transient";
+ case CommandPoolCreateFlagBits::eResetCommandBuffer: return "ResetCommandBuffer";
+ case CommandPoolCreateFlagBits::eProtected: return "Protected";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlagBits value )
+ {
+ switch ( value )
+ {
+ case CommandPoolResetFlagBits::eReleaseResources: return "ReleaseResources";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( CommandBufferLevel value )
+ {
+ switch ( value )
+ {
+ case CommandBufferLevel::ePrimary: return "Primary";
+ case CommandBufferLevel::eSecondary: return "Secondary";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlagBits value )
+ {
+ switch ( value )
+ {
+ case CommandBufferResetFlagBits::eReleaseResources: return "ReleaseResources";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlagBits value )
+ {
+ switch ( value )
+ {
+ case CommandBufferUsageFlagBits::eOneTimeSubmit: return "OneTimeSubmit";
+ case CommandBufferUsageFlagBits::eRenderPassContinue: return "RenderPassContinue";
+ case CommandBufferUsageFlagBits::eSimultaneousUse: return "SimultaneousUse";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( QueryControlFlagBits value )
+ {
+ switch ( value )
+ {
+ case QueryControlFlagBits::ePrecise: return "Precise";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( IndexType value )
+ {
+ switch ( value )
+ {
+ case IndexType::eUint16: return "Uint16";
+ case IndexType::eUint32: return "Uint32";
+ case IndexType::eNoneKHR: return "NoneKHR";
+ case IndexType::eUint8EXT: return "Uint8EXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( StencilFaceFlagBits value )
+ {
+ switch ( value )
+ {
+ case StencilFaceFlagBits::eFront: return "Front";
+ case StencilFaceFlagBits::eBack: return "Back";
+ case StencilFaceFlagBits::eFrontAndBack: return "FrontAndBack";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SubpassContents value )
+ {
+ switch ( value )
+ {
+ case SubpassContents::eInline: return "Inline";
+ case SubpassContents::eSecondaryCommandBuffers: return "SecondaryCommandBuffers";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_VERSION_1_1 ===
+
+ VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlagBits value )
+ {
+ switch ( value )
+ {
+ case SubgroupFeatureFlagBits::eBasic: return "Basic";
+ case SubgroupFeatureFlagBits::eVote: return "Vote";
+ case SubgroupFeatureFlagBits::eArithmetic: return "Arithmetic";
+ case SubgroupFeatureFlagBits::eBallot: return "Ballot";
+ case SubgroupFeatureFlagBits::eShuffle: return "Shuffle";
+ case SubgroupFeatureFlagBits::eShuffleRelative: return "ShuffleRelative";
+ case SubgroupFeatureFlagBits::eClustered: return "Clustered";
+ case SubgroupFeatureFlagBits::eQuad: return "Quad";
+ case SubgroupFeatureFlagBits::ePartitionedNV: return "PartitionedNV";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlagBits value )
+ {
+ switch ( value )
+ {
+ case PeerMemoryFeatureFlagBits::eCopySrc: return "CopySrc";
+ case PeerMemoryFeatureFlagBits::eCopyDst: return "CopyDst";
+ case PeerMemoryFeatureFlagBits::eGenericSrc: return "GenericSrc";
+ case PeerMemoryFeatureFlagBits::eGenericDst: return "GenericDst";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlagBits value )
+ {
+ switch ( value )
+ {
+ case MemoryAllocateFlagBits::eDeviceMask: return "DeviceMask";
+ case MemoryAllocateFlagBits::eDeviceAddress: return "DeviceAddress";
+ case MemoryAllocateFlagBits::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlagBits )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PointClippingBehavior value )
+ {
+ switch ( value )
+ {
+ case PointClippingBehavior::eAllClipPlanes: return "AllClipPlanes";
+ case PointClippingBehavior::eUserClipPlanesOnly: return "UserClipPlanesOnly";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( TessellationDomainOrigin value )
+ {
+ switch ( value )
+ {
+ case TessellationDomainOrigin::eUpperLeft: return "UpperLeft";
+ case TessellationDomainOrigin::eLowerLeft: return "LowerLeft";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case DeviceQueueCreateFlagBits::eProtected: return "Protected";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrModelConversion value )
+ {
+ switch ( value )
+ {
+ case SamplerYcbcrModelConversion::eRgbIdentity: return "RgbIdentity";
+ case SamplerYcbcrModelConversion::eYcbcrIdentity: return "YcbcrIdentity";
+ case SamplerYcbcrModelConversion::eYcbcr709: return "Ycbcr709";
+ case SamplerYcbcrModelConversion::eYcbcr601: return "Ycbcr601";
+ case SamplerYcbcrModelConversion::eYcbcr2020: return "Ycbcr2020";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrRange value )
+ {
+ switch ( value )
+ {
+ case SamplerYcbcrRange::eItuFull: return "ItuFull";
+ case SamplerYcbcrRange::eItuNarrow: return "ItuNarrow";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ChromaLocation value )
+ {
+ switch ( value )
+ {
+ case ChromaLocation::eCositedEven: return "CositedEven";
+ case ChromaLocation::eMidpoint: return "Midpoint";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateType value )
+ {
+ switch ( value )
+ {
+ case DescriptorUpdateTemplateType::eDescriptorSet: return "DescriptorSet";
+ case DescriptorUpdateTemplateType::ePushDescriptorsKHR: return "PushDescriptorsKHR";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBits value )
+ {
+ switch ( value )
+ {
+ case ExternalMemoryHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd";
+ case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32";
+ case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
+ case ExternalMemoryHandleTypeFlagBits::eD3D11Texture: return "D3D11Texture";
+ case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt: return "D3D11TextureKmt";
+ case ExternalMemoryHandleTypeFlagBits::eD3D12Heap: return "D3D12Heap";
+ case ExternalMemoryHandleTypeFlagBits::eD3D12Resource: return "D3D12Resource";
+ case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT: return "DmaBufEXT";
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+ case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID: return "AndroidHardwareBufferANDROID";
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+ case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT: return "HostAllocationEXT";
+ case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT: return "HostMappedForeignMemoryEXT";
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+ case ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA: return "ZirconVmoFUCHSIA";
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+ case ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV: return "RdmaAddressNV";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBits value )
+ {
+ switch ( value )
+ {
+ case ExternalMemoryFeatureFlagBits::eDedicatedOnly: return "DedicatedOnly";
+ case ExternalMemoryFeatureFlagBits::eExportable: return "Exportable";
+ case ExternalMemoryFeatureFlagBits::eImportable: return "Importable";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlagBits value )
+ {
+ switch ( value )
+ {
+ case ExternalFenceHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd";
+ case ExternalFenceHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32";
+ case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
+ case ExternalFenceHandleTypeFlagBits::eSyncFd: return "SyncFd";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlagBits value )
+ {
+ switch ( value )
+ {
+ case ExternalFenceFeatureFlagBits::eExportable: return "Exportable";
+ case ExternalFenceFeatureFlagBits::eImportable: return "Importable";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( FenceImportFlagBits value )
+ {
+ switch ( value )
+ {
+ case FenceImportFlagBits::eTemporary: return "Temporary";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlagBits value )
+ {
+ switch ( value )
+ {
+ case SemaphoreImportFlagBits::eTemporary: return "Temporary";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlagBits value )
+ {
+ switch ( value )
+ {
+ case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd";
+ case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32";
+ case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
+ case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence: return "D3D12Fence";
+ case ExternalSemaphoreHandleTypeFlagBits::eSyncFd: return "SyncFd";
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+ case ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA: return "ZirconEventFUCHSIA";
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlagBits value )
+ {
+ switch ( value )
+ {
+ case ExternalSemaphoreFeatureFlagBits::eExportable: return "Exportable";
+ case ExternalSemaphoreFeatureFlagBits::eImportable: return "Importable";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_VERSION_1_2 ===
+
+ VULKAN_HPP_INLINE std::string to_string( DriverId value )
+ {
+ switch ( value )
+ {
+ case DriverId::eAmdProprietary: return "AmdProprietary";
+ case DriverId::eAmdOpenSource: return "AmdOpenSource";
+ case DriverId::eMesaRadv: return "MesaRadv";
+ case DriverId::eNvidiaProprietary: return "NvidiaProprietary";
+ case DriverId::eIntelProprietaryWindows: return "IntelProprietaryWindows";
+ case DriverId::eIntelOpenSourceMESA: return "IntelOpenSourceMESA";
+ case DriverId::eImaginationProprietary: return "ImaginationProprietary";
+ case DriverId::eQualcommProprietary: return "QualcommProprietary";
+ case DriverId::eArmProprietary: return "ArmProprietary";
+ case DriverId::eGoogleSwiftshader: return "GoogleSwiftshader";
+ case DriverId::eGgpProprietary: return "GgpProprietary";
+ case DriverId::eBroadcomProprietary: return "BroadcomProprietary";
+ case DriverId::eMesaLlvmpipe: return "MesaLlvmpipe";
+ case DriverId::eMoltenvk: return "Moltenvk";
+ case DriverId::eCoreaviProprietary: return "CoreaviProprietary";
+ case DriverId::eJuiceProprietary: return "JuiceProprietary";
+ case DriverId::eVerisiliconProprietary: return "VerisiliconProprietary";
+ case DriverId::eMesaTurnip: return "MesaTurnip";
+ case DriverId::eMesaV3Dv: return "MesaV3Dv";
+ case DriverId::eMesaPanvk: return "MesaPanvk";
+ case DriverId::eSamsungProprietary: return "SamsungProprietary";
+ case DriverId::eMesaVenus: return "MesaVenus";
+ case DriverId::eMesaDozen: return "MesaDozen";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ShaderFloatControlsIndependence value )
+ {
+ switch ( value )
+ {
+ case ShaderFloatControlsIndependence::e32BitOnly: return "32BitOnly";
+ case ShaderFloatControlsIndependence::eAll: return "All";
+ case ShaderFloatControlsIndependence::eNone: return "None";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlagBits value )
+ {
+ switch ( value )
+ {
+ case DescriptorBindingFlagBits::eUpdateAfterBind: return "UpdateAfterBind";
+ case DescriptorBindingFlagBits::eUpdateUnusedWhilePending: return "UpdateUnusedWhilePending";
+ case DescriptorBindingFlagBits::ePartiallyBound: return "PartiallyBound";
+ case DescriptorBindingFlagBits::eVariableDescriptorCount: return "VariableDescriptorCount";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ResolveModeFlagBits value )
+ {
+ switch ( value )
+ {
+ case ResolveModeFlagBits::eNone: return "None";
+ case ResolveModeFlagBits::eSampleZero: return "SampleZero";
+ case ResolveModeFlagBits::eAverage: return "Average";
+ case ResolveModeFlagBits::eMin: return "Min";
+ case ResolveModeFlagBits::eMax: return "Max";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SamplerReductionMode value )
+ {
+ switch ( value )
+ {
+ case SamplerReductionMode::eWeightedAverage: return "WeightedAverage";
+ case SamplerReductionMode::eMin: return "Min";
+ case SamplerReductionMode::eMax: return "Max";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SemaphoreType value )
+ {
+ switch ( value )
+ {
+ case SemaphoreType::eBinary: return "Binary";
+ case SemaphoreType::eTimeline: return "Timeline";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlagBits value )
+ {
+ switch ( value )
+ {
+ case SemaphoreWaitFlagBits::eAny: return "Any";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_VERSION_1_3 ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagBits value )
+ {
+ switch ( value )
+ {
+ case PipelineCreationFeedbackFlagBits::eValid: return "Valid";
+ case PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit: return "ApplicationPipelineCacheHit";
+ case PipelineCreationFeedbackFlagBits::eBasePipelineAcceleration: return "BasePipelineAcceleration";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagBits value )
+ {
+ switch ( value )
+ {
+ case ToolPurposeFlagBits::eValidation: return "Validation";
+ case ToolPurposeFlagBits::eProfiling: return "Profiling";
+ case ToolPurposeFlagBits::eTracing: return "Tracing";
+ case ToolPurposeFlagBits::eAdditionalFeatures: return "AdditionalFeatures";
+ case ToolPurposeFlagBits::eModifyingFeatures: return "ModifyingFeatures";
+ case ToolPurposeFlagBits::eDebugReportingEXT: return "DebugReportingEXT";
+ case ToolPurposeFlagBits::eDebugMarkersEXT: return "DebugMarkersEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits2 value )
+ {
+ switch ( value )
+ {
+ case PipelineStageFlagBits2::eNone: return "None";
+ case PipelineStageFlagBits2::eTopOfPipe: return "TopOfPipe";
+ case PipelineStageFlagBits2::eDrawIndirect: return "DrawIndirect";
+ case PipelineStageFlagBits2::eVertexInput: return "VertexInput";
+ case PipelineStageFlagBits2::eVertexShader: return "VertexShader";
+ case PipelineStageFlagBits2::eTessellationControlShader: return "TessellationControlShader";
+ case PipelineStageFlagBits2::eTessellationEvaluationShader: return "TessellationEvaluationShader";
+ case PipelineStageFlagBits2::eGeometryShader: return "GeometryShader";
+ case PipelineStageFlagBits2::eFragmentShader: return "FragmentShader";
+ case PipelineStageFlagBits2::eEarlyFragmentTests: return "EarlyFragmentTests";
+ case PipelineStageFlagBits2::eLateFragmentTests: return "LateFragmentTests";
+ case PipelineStageFlagBits2::eColorAttachmentOutput: return "ColorAttachmentOutput";
+ case PipelineStageFlagBits2::eComputeShader: return "ComputeShader";
+ case PipelineStageFlagBits2::eAllTransfer: return "AllTransfer";
+ case PipelineStageFlagBits2::eBottomOfPipe: return "BottomOfPipe";
+ case PipelineStageFlagBits2::eHost: return "Host";
+ case PipelineStageFlagBits2::eAllGraphics: return "AllGraphics";
+ case PipelineStageFlagBits2::eAllCommands: return "AllCommands";
+ case PipelineStageFlagBits2::eCopy: return "Copy";
+ case PipelineStageFlagBits2::eResolve: return "Resolve";
+ case PipelineStageFlagBits2::eBlit: return "Blit";
+ case PipelineStageFlagBits2::eClear: return "Clear";
+ case PipelineStageFlagBits2::eIndexInput: return "IndexInput";
+ case PipelineStageFlagBits2::eVertexAttributeInput: return "VertexAttributeInput";
+ case PipelineStageFlagBits2::ePreRasterizationShaders: return "PreRasterizationShaders";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case PipelineStageFlagBits2::eVideoDecodeKHR: return "VideoDecodeKHR";
+ case PipelineStageFlagBits2::eVideoEncodeKHR: return "VideoEncodeKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case PipelineStageFlagBits2::eTransformFeedbackEXT: return "TransformFeedbackEXT";
+ case PipelineStageFlagBits2::eConditionalRenderingEXT: return "ConditionalRenderingEXT";
+ case PipelineStageFlagBits2::eCommandPreprocessNV: return "CommandPreprocessNV";
+ case PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR";
+ case PipelineStageFlagBits2::eAccelerationStructureBuildKHR: return "AccelerationStructureBuildKHR";
+ case PipelineStageFlagBits2::eRayTracingShaderKHR: return "RayTracingShaderKHR";
+ case PipelineStageFlagBits2::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT";
+ case PipelineStageFlagBits2::eTaskShaderEXT: return "TaskShaderEXT";
+ case PipelineStageFlagBits2::eMeshShaderEXT: return "MeshShaderEXT";
+ case PipelineStageFlagBits2::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI";
+ case PipelineStageFlagBits2::eInvocationMaskHUAWEI: return "InvocationMaskHUAWEI";
+ case PipelineStageFlagBits2::eAccelerationStructureCopyKHR: return "AccelerationStructureCopyKHR";
+ case PipelineStageFlagBits2::eMicromapBuildEXT: return "MicromapBuildEXT";
+ case PipelineStageFlagBits2::eOpticalFlowNV: return "OpticalFlowNV";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( AccessFlagBits2 value )
+ {
+ switch ( value )
+ {
+ case AccessFlagBits2::eNone: return "None";
+ case AccessFlagBits2::eIndirectCommandRead: return "IndirectCommandRead";
+ case AccessFlagBits2::eIndexRead: return "IndexRead";
+ case AccessFlagBits2::eVertexAttributeRead: return "VertexAttributeRead";
+ case AccessFlagBits2::eUniformRead: return "UniformRead";
+ case AccessFlagBits2::eInputAttachmentRead: return "InputAttachmentRead";
+ case AccessFlagBits2::eShaderRead: return "ShaderRead";
+ case AccessFlagBits2::eShaderWrite: return "ShaderWrite";
+ case AccessFlagBits2::eColorAttachmentRead: return "ColorAttachmentRead";
+ case AccessFlagBits2::eColorAttachmentWrite: return "ColorAttachmentWrite";
+ case AccessFlagBits2::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead";
+ case AccessFlagBits2::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite";
+ case AccessFlagBits2::eTransferRead: return "TransferRead";
+ case AccessFlagBits2::eTransferWrite: return "TransferWrite";
+ case AccessFlagBits2::eHostRead: return "HostRead";
+ case AccessFlagBits2::eHostWrite: return "HostWrite";
+ case AccessFlagBits2::eMemoryRead: return "MemoryRead";
+ case AccessFlagBits2::eMemoryWrite: return "MemoryWrite";
+ case AccessFlagBits2::eShaderSampledRead: return "ShaderSampledRead";
+ case AccessFlagBits2::eShaderStorageRead: return "ShaderStorageRead";
+ case AccessFlagBits2::eShaderStorageWrite: return "ShaderStorageWrite";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case AccessFlagBits2::eVideoDecodeReadKHR: return "VideoDecodeReadKHR";
+ case AccessFlagBits2::eVideoDecodeWriteKHR: return "VideoDecodeWriteKHR";
+ case AccessFlagBits2::eVideoEncodeReadKHR: return "VideoEncodeReadKHR";
+ case AccessFlagBits2::eVideoEncodeWriteKHR: return "VideoEncodeWriteKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case AccessFlagBits2::eTransformFeedbackWriteEXT: return "TransformFeedbackWriteEXT";
+ case AccessFlagBits2::eTransformFeedbackCounterReadEXT: return "TransformFeedbackCounterReadEXT";
+ case AccessFlagBits2::eTransformFeedbackCounterWriteEXT: return "TransformFeedbackCounterWriteEXT";
+ case AccessFlagBits2::eConditionalRenderingReadEXT: return "ConditionalRenderingReadEXT";
+ case AccessFlagBits2::eCommandPreprocessReadNV: return "CommandPreprocessReadNV";
+ case AccessFlagBits2::eCommandPreprocessWriteNV: return "CommandPreprocessWriteNV";
+ case AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR: return "FragmentShadingRateAttachmentReadKHR";
+ case AccessFlagBits2::eAccelerationStructureReadKHR: return "AccelerationStructureReadKHR";
+ case AccessFlagBits2::eAccelerationStructureWriteKHR: return "AccelerationStructureWriteKHR";
+ case AccessFlagBits2::eFragmentDensityMapReadEXT: return "FragmentDensityMapReadEXT";
+ case AccessFlagBits2::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT";
+ case AccessFlagBits2::eInvocationMaskReadHUAWEI: return "InvocationMaskReadHUAWEI";
+ case AccessFlagBits2::eShaderBindingTableReadKHR: return "ShaderBindingTableReadKHR";
+ case AccessFlagBits2::eMicromapReadEXT: return "MicromapReadEXT";
+ case AccessFlagBits2::eMicromapWriteEXT: return "MicromapWriteEXT";
+ case AccessFlagBits2::eOpticalFlowReadNV: return "OpticalFlowReadNV";
+ case AccessFlagBits2::eOpticalFlowWriteNV: return "OpticalFlowWriteNV";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( SubmitFlagBits value )
+ {
+ switch ( value )
+ {
+ case SubmitFlagBits::eProtected: return "Protected";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( RenderingFlagBits value )
+ {
+ switch ( value )
+ {
+ case RenderingFlagBits::eContentsSecondaryCommandBuffers: return "ContentsSecondaryCommandBuffers";
+ case RenderingFlagBits::eSuspending: return "Suspending";
+ case RenderingFlagBits::eResuming: return "Resuming";
+ case RenderingFlagBits::eEnableLegacyDitheringEXT: return "EnableLegacyDitheringEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits2 value )
+ {
+ switch ( value )
+ {
+ case FormatFeatureFlagBits2::eSampledImage: return "SampledImage";
+ case FormatFeatureFlagBits2::eStorageImage: return "StorageImage";
+ case FormatFeatureFlagBits2::eStorageImageAtomic: return "StorageImageAtomic";
+ case FormatFeatureFlagBits2::eUniformTexelBuffer: return "UniformTexelBuffer";
+ case FormatFeatureFlagBits2::eStorageTexelBuffer: return "StorageTexelBuffer";
+ case FormatFeatureFlagBits2::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic";
+ case FormatFeatureFlagBits2::eVertexBuffer: return "VertexBuffer";
+ case FormatFeatureFlagBits2::eColorAttachment: return "ColorAttachment";
+ case FormatFeatureFlagBits2::eColorAttachmentBlend: return "ColorAttachmentBlend";
+ case FormatFeatureFlagBits2::eDepthStencilAttachment: return "DepthStencilAttachment";
+ case FormatFeatureFlagBits2::eBlitSrc: return "BlitSrc";
+ case FormatFeatureFlagBits2::eBlitDst: return "BlitDst";
+ case FormatFeatureFlagBits2::eSampledImageFilterLinear: return "SampledImageFilterLinear";
+ case FormatFeatureFlagBits2::eSampledImageFilterCubic: return "SampledImageFilterCubic";
+ case FormatFeatureFlagBits2::eTransferSrc: return "TransferSrc";
+ case FormatFeatureFlagBits2::eTransferDst: return "TransferDst";
+ case FormatFeatureFlagBits2::eSampledImageFilterMinmax: return "SampledImageFilterMinmax";
+ case FormatFeatureFlagBits2::eMidpointChromaSamples: return "MidpointChromaSamples";
+ case FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter: return "SampledImageYcbcrConversionLinearFilter";
+ case FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter: return "SampledImageYcbcrConversionSeparateReconstructionFilter";
+ case FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit: return "SampledImageYcbcrConversionChromaReconstructionExplicit";
+ case FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable:
+ return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable";
+ case FormatFeatureFlagBits2::eDisjoint: return "Disjoint";
+ case FormatFeatureFlagBits2::eCositedChromaSamples: return "CositedChromaSamples";
+ case FormatFeatureFlagBits2::eStorageReadWithoutFormat: return "StorageReadWithoutFormat";
+ case FormatFeatureFlagBits2::eStorageWriteWithoutFormat: return "StorageWriteWithoutFormat";
+ case FormatFeatureFlagBits2::eSampledImageDepthComparison: return "SampledImageDepthComparison";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case FormatFeatureFlagBits2::eVideoDecodeOutputKHR: return "VideoDecodeOutputKHR";
+ case FormatFeatureFlagBits2::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR: return "AccelerationStructureVertexBufferKHR";
+ case FormatFeatureFlagBits2::eFragmentDensityMapEXT: return "FragmentDensityMapEXT";
+ case FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR";
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case FormatFeatureFlagBits2::eVideoEncodeInputKHR: return "VideoEncodeInputKHR";
+ case FormatFeatureFlagBits2::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR";
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ case FormatFeatureFlagBits2::eLinearColorAttachmentNV: return "LinearColorAttachmentNV";
+ case FormatFeatureFlagBits2::eWeightImageQCOM: return "WeightImageQCOM";
+ case FormatFeatureFlagBits2::eWeightSampledImageQCOM: return "WeightSampledImageQCOM";
+ case FormatFeatureFlagBits2::eBlockMatchingQCOM: return "BlockMatchingQCOM";
+ case FormatFeatureFlagBits2::eBoxFilterSampledQCOM: return "BoxFilterSampledQCOM";
+ case FormatFeatureFlagBits2::eOpticalFlowImageNV: return "OpticalFlowImageNV";
+ case FormatFeatureFlagBits2::eOpticalFlowVectorNV: return "OpticalFlowVectorNV";
+ case FormatFeatureFlagBits2::eOpticalFlowCostNV: return "OpticalFlowCostNV";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_KHR_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case SurfaceTransformFlagBitsKHR::eIdentity: return "Identity";
+ case SurfaceTransformFlagBitsKHR::eRotate90: return "Rotate90";
+ case SurfaceTransformFlagBitsKHR::eRotate180: return "Rotate180";
+ case SurfaceTransformFlagBitsKHR::eRotate270: return "Rotate270";
+ case SurfaceTransformFlagBitsKHR::eHorizontalMirror: return "HorizontalMirror";
+ case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90: return "HorizontalMirrorRotate90";
+ case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180: return "HorizontalMirrorRotate180";
+ case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270: return "HorizontalMirrorRotate270";
+ case SurfaceTransformFlagBitsKHR::eInherit: return "Inherit";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PresentModeKHR value )
+ {
+ switch ( value )
+ {
+ case PresentModeKHR::eImmediate: return "Immediate";
+ case PresentModeKHR::eMailbox: return "Mailbox";
+ case PresentModeKHR::eFifo: return "Fifo";
+ case PresentModeKHR::eFifoRelaxed: return "FifoRelaxed";
+ case PresentModeKHR::eSharedDemandRefresh: return "SharedDemandRefresh";
+ case PresentModeKHR::eSharedContinuousRefresh: return "SharedContinuousRefresh";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ColorSpaceKHR value )
+ {
+ switch ( value )
+ {
+ case ColorSpaceKHR::eSrgbNonlinear: return "SrgbNonlinear";
+ case ColorSpaceKHR::eDisplayP3NonlinearEXT: return "DisplayP3NonlinearEXT";
+ case ColorSpaceKHR::eExtendedSrgbLinearEXT: return "ExtendedSrgbLinearEXT";
+ case ColorSpaceKHR::eDisplayP3LinearEXT: return "DisplayP3LinearEXT";
+ case ColorSpaceKHR::eDciP3NonlinearEXT: return "DciP3NonlinearEXT";
+ case ColorSpaceKHR::eBt709LinearEXT: return "Bt709LinearEXT";
+ case ColorSpaceKHR::eBt709NonlinearEXT: return "Bt709NonlinearEXT";
+ case ColorSpaceKHR::eBt2020LinearEXT: return "Bt2020LinearEXT";
+ case ColorSpaceKHR::eHdr10St2084EXT: return "Hdr10St2084EXT";
+ case ColorSpaceKHR::eDolbyvisionEXT: return "DolbyvisionEXT";
+ case ColorSpaceKHR::eHdr10HlgEXT: return "Hdr10HlgEXT";
+ case ColorSpaceKHR::eAdobergbLinearEXT: return "AdobergbLinearEXT";
+ case ColorSpaceKHR::eAdobergbNonlinearEXT: return "AdobergbNonlinearEXT";
+ case ColorSpaceKHR::ePassThroughEXT: return "PassThroughEXT";
+ case ColorSpaceKHR::eExtendedSrgbNonlinearEXT: return "ExtendedSrgbNonlinearEXT";
+ case ColorSpaceKHR::eDisplayNativeAMD: return "DisplayNativeAMD";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case CompositeAlphaFlagBitsKHR::eOpaque: return "Opaque";
+ case CompositeAlphaFlagBitsKHR::ePreMultiplied: return "PreMultiplied";
+ case CompositeAlphaFlagBitsKHR::ePostMultiplied: return "PostMultiplied";
+ case CompositeAlphaFlagBitsKHR::eInherit: return "Inherit";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_KHR_swapchain ===
+
+ VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions: return "SplitInstanceBindRegions";
+ case SwapchainCreateFlagBitsKHR::eProtected: return "Protected";
+ case SwapchainCreateFlagBitsKHR::eMutableFormat: return "MutableFormat";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case DeviceGroupPresentModeFlagBitsKHR::eLocal: return "Local";
+ case DeviceGroupPresentModeFlagBitsKHR::eRemote: return "Remote";
+ case DeviceGroupPresentModeFlagBitsKHR::eSum: return "Sum";
+ case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice: return "LocalMultiDevice";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_KHR_display ===
+
+ VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case DisplayPlaneAlphaFlagBitsKHR::eOpaque: return "Opaque";
+ case DisplayPlaneAlphaFlagBitsKHR::eGlobal: return "Global";
+ case DisplayPlaneAlphaFlagBitsKHR::ePerPixel: return "PerPixel";
+ case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied: return "PerPixelPremultiplied";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagBitsKHR )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagBitsKHR )
+ {
+ return "(void)";
+ }
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+ //=== VK_KHR_xlib_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagBitsKHR )
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+ //=== VK_KHR_xcb_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagBitsKHR )
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+ //=== VK_KHR_wayland_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagBitsKHR )
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+ //=== VK_KHR_android_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagBitsKHR )
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_KHR_win32_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagBitsKHR )
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ //=== VK_EXT_debug_report ===
+
+ VULKAN_HPP_INLINE std::string to_string( DebugReportFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case DebugReportFlagBitsEXT::eInformation: return "Information";
+ case DebugReportFlagBitsEXT::eWarning: return "Warning";
+ case DebugReportFlagBitsEXT::ePerformanceWarning: return "PerformanceWarning";
+ case DebugReportFlagBitsEXT::eError: return "Error";
+ case DebugReportFlagBitsEXT::eDebug: return "Debug";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DebugReportObjectTypeEXT value )
+ {
+ switch ( value )
+ {
+ case DebugReportObjectTypeEXT::eUnknown: return "Unknown";
+ case DebugReportObjectTypeEXT::eInstance: return "Instance";
+ case DebugReportObjectTypeEXT::ePhysicalDevice: return "PhysicalDevice";
+ case DebugReportObjectTypeEXT::eDevice: return "Device";
+ case DebugReportObjectTypeEXT::eQueue: return "Queue";
+ case DebugReportObjectTypeEXT::eSemaphore: return "Semaphore";
+ case DebugReportObjectTypeEXT::eCommandBuffer: return "CommandBuffer";
+ case DebugReportObjectTypeEXT::eFence: return "Fence";
+ case DebugReportObjectTypeEXT::eDeviceMemory: return "DeviceMemory";
+ case DebugReportObjectTypeEXT::eBuffer: return "Buffer";
+ case DebugReportObjectTypeEXT::eImage: return "Image";
+ case DebugReportObjectTypeEXT::eEvent: return "Event";
+ case DebugReportObjectTypeEXT::eQueryPool: return "QueryPool";
+ case DebugReportObjectTypeEXT::eBufferView: return "BufferView";
+ case DebugReportObjectTypeEXT::eImageView: return "ImageView";
+ case DebugReportObjectTypeEXT::eShaderModule: return "ShaderModule";
+ case DebugReportObjectTypeEXT::ePipelineCache: return "PipelineCache";
+ case DebugReportObjectTypeEXT::ePipelineLayout: return "PipelineLayout";
+ case DebugReportObjectTypeEXT::eRenderPass: return "RenderPass";
+ case DebugReportObjectTypeEXT::ePipeline: return "Pipeline";
+ case DebugReportObjectTypeEXT::eDescriptorSetLayout: return "DescriptorSetLayout";
+ case DebugReportObjectTypeEXT::eSampler: return "Sampler";
+ case DebugReportObjectTypeEXT::eDescriptorPool: return "DescriptorPool";
+ case DebugReportObjectTypeEXT::eDescriptorSet: return "DescriptorSet";
+ case DebugReportObjectTypeEXT::eFramebuffer: return "Framebuffer";
+ case DebugReportObjectTypeEXT::eCommandPool: return "CommandPool";
+ case DebugReportObjectTypeEXT::eSurfaceKHR: return "SurfaceKHR";
+ case DebugReportObjectTypeEXT::eSwapchainKHR: return "SwapchainKHR";
+ case DebugReportObjectTypeEXT::eDebugReportCallbackEXT: return "DebugReportCallbackEXT";
+ case DebugReportObjectTypeEXT::eDisplayKHR: return "DisplayKHR";
+ case DebugReportObjectTypeEXT::eDisplayModeKHR: return "DisplayModeKHR";
+ case DebugReportObjectTypeEXT::eValidationCacheEXT: return "ValidationCacheEXT";
+ case DebugReportObjectTypeEXT::eSamplerYcbcrConversion: return "SamplerYcbcrConversion";
+ case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate";
+ case DebugReportObjectTypeEXT::eCuModuleNVX: return "CuModuleNVX";
+ case DebugReportObjectTypeEXT::eCuFunctionNVX: return "CuFunctionNVX";
+ case DebugReportObjectTypeEXT::eAccelerationStructureKHR: return "AccelerationStructureKHR";
+ case DebugReportObjectTypeEXT::eAccelerationStructureNV: return "AccelerationStructureNV";
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+ case DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA: return "BufferCollectionFUCHSIA";
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_AMD_rasterization_order ===
+
+ VULKAN_HPP_INLINE std::string to_string( RasterizationOrderAMD value )
+ {
+ switch ( value )
+ {
+ case RasterizationOrderAMD::eStrict: return "Strict";
+ case RasterizationOrderAMD::eRelaxed: return "Relaxed";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_KHR_video_queue ===
+
+ VULKAN_HPP_INLINE std::string to_string( VideoCodecOperationFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case VideoCodecOperationFlagBitsKHR::eNone: return "None";
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case VideoCodecOperationFlagBitsKHR::eEncodeH264EXT: return "EncodeH264EXT";
+ case VideoCodecOperationFlagBitsKHR::eEncodeH265EXT: return "EncodeH265EXT";
+ case VideoCodecOperationFlagBitsKHR::eDecodeH264EXT: return "DecodeH264EXT";
+ case VideoCodecOperationFlagBitsKHR::eDecodeH265EXT: return "DecodeH265EXT";
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoChromaSubsamplingFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case VideoChromaSubsamplingFlagBitsKHR::eInvalid: return "Invalid";
+ case VideoChromaSubsamplingFlagBitsKHR::eMonochrome: return "Monochrome";
+ case VideoChromaSubsamplingFlagBitsKHR::e420: return "420";
+ case VideoChromaSubsamplingFlagBitsKHR::e422: return "422";
+ case VideoChromaSubsamplingFlagBitsKHR::e444: return "444";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoComponentBitDepthFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case VideoComponentBitDepthFlagBitsKHR::eInvalid: return "Invalid";
+ case VideoComponentBitDepthFlagBitsKHR::e8: return "8";
+ case VideoComponentBitDepthFlagBitsKHR::e10: return "10";
+ case VideoComponentBitDepthFlagBitsKHR::e12: return "12";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoCapabilityFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case VideoCapabilityFlagBitsKHR::eProtectedContent: return "ProtectedContent";
+ case VideoCapabilityFlagBitsKHR::eSeparateReferenceImages: return "SeparateReferenceImages";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoSessionCreateFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case VideoSessionCreateFlagBitsKHR::eProtectedContent: return "ProtectedContent";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoCodingControlFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case VideoCodingControlFlagBitsKHR::eReset: return "Reset";
+# if defined( VK_ENABLE_BETA_EXTENSIONS )
+ case VideoCodingControlFlagBitsKHR::eEncodeRateControl: return "EncodeRateControl";
+ case VideoCodingControlFlagBitsKHR::eEncodeRateControlLayer: return "EncodeRateControlLayer";
+# endif /*VK_ENABLE_BETA_EXTENSIONS*/
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( QueryResultStatusKHR value )
+ {
+ switch ( value )
+ {
+ case QueryResultStatusKHR::eError: return "Error";
+ case QueryResultStatusKHR::eNotReady: return "NotReady";
+ case QueryResultStatusKHR::eComplete: return "Complete";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoSessionParametersCreateFlagBitsKHR )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoBeginCodingFlagBitsKHR )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEndCodingFlagBitsKHR )
+ {
+ return "(void)";
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_KHR_video_decode_queue ===
+
+ VULKAN_HPP_INLINE std::string to_string( VideoDecodeCapabilityFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputCoincide: return "DpbAndOutputCoincide";
+ case VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputDistinct: return "DpbAndOutputDistinct";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoDecodeUsageFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case VideoDecodeUsageFlagBitsKHR::eDefault: return "Default";
+ case VideoDecodeUsageFlagBitsKHR::eTranscoding: return "Transcoding";
+ case VideoDecodeUsageFlagBitsKHR::eOffline: return "Offline";
+ case VideoDecodeUsageFlagBitsKHR::eStreaming: return "Streaming";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoDecodeFlagBitsKHR )
+ {
+ return "(void)";
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ //=== VK_EXT_transform_feedback ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagBitsEXT )
+ {
+ return "(void)";
+ }
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_EXT_video_encode_h264 ===
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilityFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case VideoEncodeH264CapabilityFlagBitsEXT::eDirect8X8InferenceEnabled: return "Direct8X8InferenceEnabled";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eDirect8X8InferenceDisabled: return "Direct8X8InferenceDisabled";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eSeparateColourPlane: return "SeparateColourPlane";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eQpprimeYZeroTransformBypass: return "QpprimeYZeroTransformBypass";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eScalingLists: return "ScalingLists";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eHrdCompliance: return "HrdCompliance";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eChromaQpOffset: return "ChromaQpOffset";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eSecondChromaQpOffset: return "SecondChromaQpOffset";
+ case VideoEncodeH264CapabilityFlagBitsEXT::ePicInitQpMinus26: return "PicInitQpMinus26";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eWeightedPred: return "WeightedPred";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBipredExplicit: return "WeightedBipredExplicit";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBipredImplicit: return "WeightedBipredImplicit";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eWeightedPredNoTable: return "WeightedPredNoTable";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eTransform8X8: return "Transform8X8";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eCabac: return "Cabac";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eCavlc: return "Cavlc";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterDisabled: return "DeblockingFilterDisabled";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterEnabled: return "DeblockingFilterEnabled";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterPartial: return "DeblockingFilterPartial";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eDisableDirectSpatialMvPred: return "DisableDirectSpatialMvPred";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eMultipleSlicePerFrame: return "MultipleSlicePerFrame";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eSliceMbCount: return "SliceMbCount";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eRowUnalignedSlice: return "RowUnalignedSlice";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eDifferentSliceType: return "DifferentSliceType";
+ case VideoEncodeH264CapabilityFlagBitsEXT::eBFrameInL1List: return "BFrameInL1List";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264InputModeFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case VideoEncodeH264InputModeFlagBitsEXT::eFrame: return "Frame";
+ case VideoEncodeH264InputModeFlagBitsEXT::eSlice: return "Slice";
+ case VideoEncodeH264InputModeFlagBitsEXT::eNonVcl: return "NonVcl";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264OutputModeFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case VideoEncodeH264OutputModeFlagBitsEXT::eFrame: return "Frame";
+ case VideoEncodeH264OutputModeFlagBitsEXT::eSlice: return "Slice";
+ case VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl: return "NonVcl";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264RateControlStructureEXT value )
+ {
+ switch ( value )
+ {
+ case VideoEncodeH264RateControlStructureEXT::eUnknown: return "Unknown";
+ case VideoEncodeH264RateControlStructureEXT::eFlat: return "Flat";
+ case VideoEncodeH264RateControlStructureEXT::eDyadic: return "Dyadic";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_EXT_video_encode_h265 ===
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CapabilityFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case VideoEncodeH265CapabilityFlagBitsEXT::eSeparateColourPlane: return "SeparateColourPlane";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eScalingLists: return "ScalingLists";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eSampleAdaptiveOffsetEnabled: return "SampleAdaptiveOffsetEnabled";
+ case VideoEncodeH265CapabilityFlagBitsEXT::ePcmEnable: return "PcmEnable";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eSpsTemporalMvpEnabled: return "SpsTemporalMvpEnabled";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eHrdCompliance: return "HrdCompliance";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eInitQpMinus26: return "InitQpMinus26";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eLog2ParallelMergeLevelMinus2: return "Log2ParallelMergeLevelMinus2";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eSignDataHidingEnabled: return "SignDataHidingEnabled";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eTransformSkipEnabled: return "TransformSkipEnabled";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eTransformSkipDisabled: return "TransformSkipDisabled";
+ case VideoEncodeH265CapabilityFlagBitsEXT::ePpsSliceChromaQpOffsetsPresent: return "PpsSliceChromaQpOffsetsPresent";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eWeightedPred: return "WeightedPred";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eWeightedBipred: return "WeightedBipred";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eWeightedPredNoTable: return "WeightedPredNoTable";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eTransquantBypassEnabled: return "TransquantBypassEnabled";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eEntropyCodingSyncEnabled: return "EntropyCodingSyncEnabled";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eDeblockingFilterOverrideEnabled: return "DeblockingFilterOverrideEnabled";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eMultipleTilePerFrame: return "MultipleTilePerFrame";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eMultipleSlicePerTile: return "MultipleSlicePerTile";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eMultipleTilePerSlice: return "MultipleTilePerSlice";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eSliceSegmentCtbCount: return "SliceSegmentCtbCount";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eRowUnalignedSliceSegment: return "RowUnalignedSliceSegment";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eDependentSliceSegment: return "DependentSliceSegment";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eDifferentSliceType: return "DifferentSliceType";
+ case VideoEncodeH265CapabilityFlagBitsEXT::eBFrameInL1List: return "BFrameInL1List";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265InputModeFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case VideoEncodeH265InputModeFlagBitsEXT::eFrame: return "Frame";
+ case VideoEncodeH265InputModeFlagBitsEXT::eSliceSegment: return "SliceSegment";
+ case VideoEncodeH265InputModeFlagBitsEXT::eNonVcl: return "NonVcl";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265OutputModeFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case VideoEncodeH265OutputModeFlagBitsEXT::eFrame: return "Frame";
+ case VideoEncodeH265OutputModeFlagBitsEXT::eSliceSegment: return "SliceSegment";
+ case VideoEncodeH265OutputModeFlagBitsEXT::eNonVcl: return "NonVcl";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CtbSizeFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case VideoEncodeH265CtbSizeFlagBitsEXT::e16: return "16";
+ case VideoEncodeH265CtbSizeFlagBitsEXT::e32: return "32";
+ case VideoEncodeH265CtbSizeFlagBitsEXT::e64: return "64";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265TransformBlockSizeFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case VideoEncodeH265TransformBlockSizeFlagBitsEXT::e4: return "4";
+ case VideoEncodeH265TransformBlockSizeFlagBitsEXT::e8: return "8";
+ case VideoEncodeH265TransformBlockSizeFlagBitsEXT::e16: return "16";
+ case VideoEncodeH265TransformBlockSizeFlagBitsEXT::e32: return "32";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265RateControlStructureEXT value )
+ {
+ switch ( value )
+ {
+ case VideoEncodeH265RateControlStructureEXT::eUnknown: return "Unknown";
+ case VideoEncodeH265RateControlStructureEXT::eFlat: return "Flat";
+ case VideoEncodeH265RateControlStructureEXT::eDyadic: return "Dyadic";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_EXT_video_decode_h264 ===
+
+ VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264PictureLayoutFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case VideoDecodeH264PictureLayoutFlagBitsEXT::eProgressive: return "Progressive";
+ case VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedInterleavedLines: return "InterlacedInterleavedLines";
+ case VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedSeparatePlanes: return "InterlacedSeparatePlanes";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ //=== VK_AMD_shader_info ===
+
+ VULKAN_HPP_INLINE std::string to_string( ShaderInfoTypeAMD value )
+ {
+ switch ( value )
+ {
+ case ShaderInfoTypeAMD::eStatistics: return "Statistics";
+ case ShaderInfoTypeAMD::eBinary: return "Binary";
+ case ShaderInfoTypeAMD::eDisassembly: return "Disassembly";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+#if defined( VK_USE_PLATFORM_GGP )
+ //=== VK_GGP_stream_descriptor_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagBitsGGP )
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_GGP*/
+
+ //=== VK_NV_external_memory_capabilities ===
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBitsNV value )
+ {
+ switch ( value )
+ {
+ case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32: return "OpaqueWin32";
+ case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
+ case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image: return "D3D11Image";
+ case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt: return "D3D11ImageKmt";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBitsNV value )
+ {
+ switch ( value )
+ {
+ case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly: return "DedicatedOnly";
+ case ExternalMemoryFeatureFlagBitsNV::eExportable: return "Exportable";
+ case ExternalMemoryFeatureFlagBitsNV::eImportable: return "Importable";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_EXT_validation_flags ===
+
+ VULKAN_HPP_INLINE std::string to_string( ValidationCheckEXT value )
+ {
+ switch ( value )
+ {
+ case ValidationCheckEXT::eAll: return "All";
+ case ValidationCheckEXT::eShaders: return "Shaders";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+ //=== VK_NN_vi_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagBitsNN )
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+ //=== VK_EXT_pipeline_robustness ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineRobustnessBufferBehaviorEXT value )
+ {
+ switch ( value )
+ {
+ case PipelineRobustnessBufferBehaviorEXT::eDeviceDefault: return "DeviceDefault";
+ case PipelineRobustnessBufferBehaviorEXT::eDisabled: return "Disabled";
+ case PipelineRobustnessBufferBehaviorEXT::eRobustBufferAccess: return "RobustBufferAccess";
+ case PipelineRobustnessBufferBehaviorEXT::eRobustBufferAccess2: return "RobustBufferAccess2";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineRobustnessImageBehaviorEXT value )
+ {
+ switch ( value )
+ {
+ case PipelineRobustnessImageBehaviorEXT::eDeviceDefault: return "DeviceDefault";
+ case PipelineRobustnessImageBehaviorEXT::eDisabled: return "Disabled";
+ case PipelineRobustnessImageBehaviorEXT::eRobustImageAccess: return "RobustImageAccess";
+ case PipelineRobustnessImageBehaviorEXT::eRobustImageAccess2: return "RobustImageAccess2";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_EXT_conditional_rendering ===
+
+ VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case ConditionalRenderingFlagBitsEXT::eInverted: return "Inverted";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_EXT_display_surface_counter ===
+
+ VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case SurfaceCounterFlagBitsEXT::eVblank: return "Vblank";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_EXT_display_control ===
+
+ VULKAN_HPP_INLINE std::string to_string( DisplayPowerStateEXT value )
+ {
+ switch ( value )
+ {
+ case DisplayPowerStateEXT::eOff: return "Off";
+ case DisplayPowerStateEXT::eSuspend: return "Suspend";
+ case DisplayPowerStateEXT::eOn: return "On";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceEventTypeEXT value )
+ {
+ switch ( value )
+ {
+ case DeviceEventTypeEXT::eDisplayHotplug: return "DisplayHotplug";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DisplayEventTypeEXT value )
+ {
+ switch ( value )
+ {
+ case DisplayEventTypeEXT::eFirstPixelOut: return "FirstPixelOut";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_NV_viewport_swizzle ===
+
+ VULKAN_HPP_INLINE std::string to_string( ViewportCoordinateSwizzleNV value )
+ {
+ switch ( value )
+ {
+ case ViewportCoordinateSwizzleNV::ePositiveX: return "PositiveX";
+ case ViewportCoordinateSwizzleNV::eNegativeX: return "NegativeX";
+ case ViewportCoordinateSwizzleNV::ePositiveY: return "PositiveY";
+ case ViewportCoordinateSwizzleNV::eNegativeY: return "NegativeY";
+ case ViewportCoordinateSwizzleNV::ePositiveZ: return "PositiveZ";
+ case ViewportCoordinateSwizzleNV::eNegativeZ: return "NegativeZ";
+ case ViewportCoordinateSwizzleNV::ePositiveW: return "PositiveW";
+ case ViewportCoordinateSwizzleNV::eNegativeW: return "NegativeW";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagBitsNV )
+ {
+ return "(void)";
+ }
+
+ //=== VK_EXT_discard_rectangles ===
+
+ VULKAN_HPP_INLINE std::string to_string( DiscardRectangleModeEXT value )
+ {
+ switch ( value )
+ {
+ case DiscardRectangleModeEXT::eInclusive: return "Inclusive";
+ case DiscardRectangleModeEXT::eExclusive: return "Exclusive";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagBitsEXT )
+ {
+ return "(void)";
+ }
+
+ //=== VK_EXT_conservative_rasterization ===
+
+ VULKAN_HPP_INLINE std::string to_string( ConservativeRasterizationModeEXT value )
+ {
+ switch ( value )
+ {
+ case ConservativeRasterizationModeEXT::eDisabled: return "Disabled";
+ case ConservativeRasterizationModeEXT::eOverestimate: return "Overestimate";
+ case ConservativeRasterizationModeEXT::eUnderestimate: return "Underestimate";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagBitsEXT )
+ {
+ return "(void)";
+ }
+
+ //=== VK_EXT_depth_clip_enable ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagBitsEXT )
+ {
+ return "(void)";
+ }
+
+ //=== VK_KHR_performance_query ===
+
+ VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting: return "PerformanceImpacting";
+ case PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted: return "ConcurrentlyImpacted";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PerformanceCounterScopeKHR value )
+ {
+ switch ( value )
+ {
+ case PerformanceCounterScopeKHR::eCommandBuffer: return "CommandBuffer";
+ case PerformanceCounterScopeKHR::eRenderPass: return "RenderPass";
+ case PerformanceCounterScopeKHR::eCommand: return "Command";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PerformanceCounterStorageKHR value )
+ {
+ switch ( value )
+ {
+ case PerformanceCounterStorageKHR::eInt32: return "Int32";
+ case PerformanceCounterStorageKHR::eInt64: return "Int64";
+ case PerformanceCounterStorageKHR::eUint32: return "Uint32";
+ case PerformanceCounterStorageKHR::eUint64: return "Uint64";
+ case PerformanceCounterStorageKHR::eFloat32: return "Float32";
+ case PerformanceCounterStorageKHR::eFloat64: return "Float64";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PerformanceCounterUnitKHR value )
+ {
+ switch ( value )
+ {
+ case PerformanceCounterUnitKHR::eGeneric: return "Generic";
+ case PerformanceCounterUnitKHR::ePercentage: return "Percentage";
+ case PerformanceCounterUnitKHR::eNanoseconds: return "Nanoseconds";
+ case PerformanceCounterUnitKHR::eBytes: return "Bytes";
+ case PerformanceCounterUnitKHR::eBytesPerSecond: return "BytesPerSecond";
+ case PerformanceCounterUnitKHR::eKelvin: return "Kelvin";
+ case PerformanceCounterUnitKHR::eWatts: return "Watts";
+ case PerformanceCounterUnitKHR::eVolts: return "Volts";
+ case PerformanceCounterUnitKHR::eAmps: return "Amps";
+ case PerformanceCounterUnitKHR::eHertz: return "Hertz";
+ case PerformanceCounterUnitKHR::eCycles: return "Cycles";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagBitsKHR )
+ {
+ return "(void)";
+ }
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+ //=== VK_MVK_ios_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagBitsMVK )
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+ //=== VK_MVK_macos_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagBitsMVK )
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+ //=== VK_EXT_debug_utils ===
+
+ VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose: return "Verbose";
+ case DebugUtilsMessageSeverityFlagBitsEXT::eInfo: return "Info";
+ case DebugUtilsMessageSeverityFlagBitsEXT::eWarning: return "Warning";
+ case DebugUtilsMessageSeverityFlagBitsEXT::eError: return "Error";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case DebugUtilsMessageTypeFlagBitsEXT::eGeneral: return "General";
+ case DebugUtilsMessageTypeFlagBitsEXT::eValidation: return "Validation";
+ case DebugUtilsMessageTypeFlagBitsEXT::ePerformance: return "Performance";
+ case DebugUtilsMessageTypeFlagBitsEXT::eDeviceAddressBinding: return "DeviceAddressBinding";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagBitsEXT )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagBitsEXT )
+ {
+ return "(void)";
+ }
+
+ //=== VK_EXT_blend_operation_advanced ===
+
+ VULKAN_HPP_INLINE std::string to_string( BlendOverlapEXT value )
+ {
+ switch ( value )
+ {
+ case BlendOverlapEXT::eUncorrelated: return "Uncorrelated";
+ case BlendOverlapEXT::eDisjoint: return "Disjoint";
+ case BlendOverlapEXT::eConjoint: return "Conjoint";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_NV_fragment_coverage_to_color ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagBitsNV )
+ {
+ return "(void)";
+ }
+
+ //=== VK_KHR_acceleration_structure ===
+
+ VULKAN_HPP_INLINE std::string to_string( AccelerationStructureTypeKHR value )
+ {
+ switch ( value )
+ {
+ case AccelerationStructureTypeKHR::eTopLevel: return "TopLevel";
+ case AccelerationStructureTypeKHR::eBottomLevel: return "BottomLevel";
+ case AccelerationStructureTypeKHR::eGeneric: return "Generic";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( AccelerationStructureBuildTypeKHR value )
+ {
+ switch ( value )
+ {
+ case AccelerationStructureBuildTypeKHR::eHost: return "Host";
+ case AccelerationStructureBuildTypeKHR::eDevice: return "Device";
+ case AccelerationStructureBuildTypeKHR::eHostOrDevice: return "HostOrDevice";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case GeometryFlagBitsKHR::eOpaque: return "Opaque";
+ case GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation: return "NoDuplicateAnyHitInvocation";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable: return "TriangleFacingCullDisable";
+ case GeometryInstanceFlagBitsKHR::eTriangleFlipFacing: return "TriangleFlipFacing";
+ case GeometryInstanceFlagBitsKHR::eForceOpaque: return "ForceOpaque";
+ case GeometryInstanceFlagBitsKHR::eForceNoOpaque: return "ForceNoOpaque";
+ case GeometryInstanceFlagBitsKHR::eForceOpacityMicromap2StateEXT: return "ForceOpacityMicromap2StateEXT";
+ case GeometryInstanceFlagBitsKHR::eDisableOpacityMicromapsEXT: return "DisableOpacityMicromapsEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case BuildAccelerationStructureFlagBitsKHR::eAllowUpdate: return "AllowUpdate";
+ case BuildAccelerationStructureFlagBitsKHR::eAllowCompaction: return "AllowCompaction";
+ case BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace: return "PreferFastTrace";
+ case BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild: return "PreferFastBuild";
+ case BuildAccelerationStructureFlagBitsKHR::eLowMemory: return "LowMemory";
+ case BuildAccelerationStructureFlagBitsKHR::eMotionNV: return "MotionNV";
+ case BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapUpdateEXT: return "AllowOpacityMicromapUpdateEXT";
+ case BuildAccelerationStructureFlagBitsKHR::eAllowDisableOpacityMicromapsEXT: return "AllowDisableOpacityMicromapsEXT";
+ case BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapDataUpdateEXT: return "AllowOpacityMicromapDataUpdateEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( CopyAccelerationStructureModeKHR value )
+ {
+ switch ( value )
+ {
+ case CopyAccelerationStructureModeKHR::eClone: return "Clone";
+ case CopyAccelerationStructureModeKHR::eCompact: return "Compact";
+ case CopyAccelerationStructureModeKHR::eSerialize: return "Serialize";
+ case CopyAccelerationStructureModeKHR::eDeserialize: return "Deserialize";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( GeometryTypeKHR value )
+ {
+ switch ( value )
+ {
+ case GeometryTypeKHR::eTriangles: return "Triangles";
+ case GeometryTypeKHR::eAabbs: return "Aabbs";
+ case GeometryTypeKHR::eInstances: return "Instances";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCompatibilityKHR value )
+ {
+ switch ( value )
+ {
+ case AccelerationStructureCompatibilityKHR::eCompatible: return "Compatible";
+ case AccelerationStructureCompatibilityKHR::eIncompatible: return "Incompatible";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCreateFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay";
+ case AccelerationStructureCreateFlagBitsKHR::eMotionNV: return "MotionNV";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureModeKHR value )
+ {
+ switch ( value )
+ {
+ case BuildAccelerationStructureModeKHR::eBuild: return "Build";
+ case BuildAccelerationStructureModeKHR::eUpdate: return "Update";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_NV_framebuffer_mixed_samples ===
+
+ VULKAN_HPP_INLINE std::string to_string( CoverageModulationModeNV value )
+ {
+ switch ( value )
+ {
+ case CoverageModulationModeNV::eNone: return "None";
+ case CoverageModulationModeNV::eRgb: return "Rgb";
+ case CoverageModulationModeNV::eAlpha: return "Alpha";
+ case CoverageModulationModeNV::eRgba: return "Rgba";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagBitsNV )
+ {
+ return "(void)";
+ }
+
+ //=== VK_EXT_validation_cache ===
+
+ VULKAN_HPP_INLINE std::string to_string( ValidationCacheHeaderVersionEXT value )
+ {
+ switch ( value )
+ {
+ case ValidationCacheHeaderVersionEXT::eOne: return "One";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagBitsEXT )
+ {
+ return "(void)";
+ }
+
+ //=== VK_NV_shading_rate_image ===
+
+ VULKAN_HPP_INLINE std::string to_string( ShadingRatePaletteEntryNV value )
+ {
+ switch ( value )
+ {
+ case ShadingRatePaletteEntryNV::eNoInvocations: return "NoInvocations";
+ case ShadingRatePaletteEntryNV::e16InvocationsPerPixel: return "16InvocationsPerPixel";
+ case ShadingRatePaletteEntryNV::e8InvocationsPerPixel: return "8InvocationsPerPixel";
+ case ShadingRatePaletteEntryNV::e4InvocationsPerPixel: return "4InvocationsPerPixel";
+ case ShadingRatePaletteEntryNV::e2InvocationsPerPixel: return "2InvocationsPerPixel";
+ case ShadingRatePaletteEntryNV::e1InvocationPerPixel: return "1InvocationPerPixel";
+ case ShadingRatePaletteEntryNV::e1InvocationPer2X1Pixels: return "1InvocationPer2X1Pixels";
+ case ShadingRatePaletteEntryNV::e1InvocationPer1X2Pixels: return "1InvocationPer1X2Pixels";
+ case ShadingRatePaletteEntryNV::e1InvocationPer2X2Pixels: return "1InvocationPer2X2Pixels";
+ case ShadingRatePaletteEntryNV::e1InvocationPer4X2Pixels: return "1InvocationPer4X2Pixels";
+ case ShadingRatePaletteEntryNV::e1InvocationPer2X4Pixels: return "1InvocationPer2X4Pixels";
+ case ShadingRatePaletteEntryNV::e1InvocationPer4X4Pixels: return "1InvocationPer4X4Pixels";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( CoarseSampleOrderTypeNV value )
+ {
+ switch ( value )
+ {
+ case CoarseSampleOrderTypeNV::eDefault: return "Default";
+ case CoarseSampleOrderTypeNV::eCustom: return "Custom";
+ case CoarseSampleOrderTypeNV::ePixelMajor: return "PixelMajor";
+ case CoarseSampleOrderTypeNV::eSampleMajor: return "SampleMajor";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_NV_ray_tracing ===
+
+ VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMemoryRequirementsTypeNV value )
+ {
+ switch ( value )
+ {
+ case AccelerationStructureMemoryRequirementsTypeNV::eObject: return "Object";
+ case AccelerationStructureMemoryRequirementsTypeNV::eBuildScratch: return "BuildScratch";
+ case AccelerationStructureMemoryRequirementsTypeNV::eUpdateScratch: return "UpdateScratch";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_AMD_pipeline_compiler_control ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagBitsAMD )
+ {
+ return "(void)";
+ }
+
+ //=== VK_EXT_calibrated_timestamps ===
+
+ VULKAN_HPP_INLINE std::string to_string( TimeDomainEXT value )
+ {
+ switch ( value )
+ {
+ case TimeDomainEXT::eDevice: return "Device";
+ case TimeDomainEXT::eClockMonotonic: return "ClockMonotonic";
+ case TimeDomainEXT::eClockMonotonicRaw: return "ClockMonotonicRaw";
+ case TimeDomainEXT::eQueryPerformanceCounter: return "QueryPerformanceCounter";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_KHR_global_priority ===
+
+ VULKAN_HPP_INLINE std::string to_string( QueueGlobalPriorityKHR value )
+ {
+ switch ( value )
+ {
+ case QueueGlobalPriorityKHR::eLow: return "Low";
+ case QueueGlobalPriorityKHR::eMedium: return "Medium";
+ case QueueGlobalPriorityKHR::eHigh: return "High";
+ case QueueGlobalPriorityKHR::eRealtime: return "Realtime";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_AMD_memory_overallocation_behavior ===
+
+ VULKAN_HPP_INLINE std::string to_string( MemoryOverallocationBehaviorAMD value )
+ {
+ switch ( value )
+ {
+ case MemoryOverallocationBehaviorAMD::eDefault: return "Default";
+ case MemoryOverallocationBehaviorAMD::eAllowed: return "Allowed";
+ case MemoryOverallocationBehaviorAMD::eDisallowed: return "Disallowed";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_INTEL_performance_query ===
+
+ VULKAN_HPP_INLINE std::string to_string( PerformanceConfigurationTypeINTEL value )
+ {
+ switch ( value )
+ {
+ case PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated: return "CommandQueueMetricsDiscoveryActivated";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( QueryPoolSamplingModeINTEL value )
+ {
+ switch ( value )
+ {
+ case QueryPoolSamplingModeINTEL::eManual: return "Manual";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PerformanceOverrideTypeINTEL value )
+ {
+ switch ( value )
+ {
+ case PerformanceOverrideTypeINTEL::eNullHardware: return "NullHardware";
+ case PerformanceOverrideTypeINTEL::eFlushGpuCaches: return "FlushGpuCaches";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PerformanceParameterTypeINTEL value )
+ {
+ switch ( value )
+ {
+ case PerformanceParameterTypeINTEL::eHwCountersSupported: return "HwCountersSupported";
+ case PerformanceParameterTypeINTEL::eStreamMarkerValidBits: return "StreamMarkerValidBits";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PerformanceValueTypeINTEL value )
+ {
+ switch ( value )
+ {
+ case PerformanceValueTypeINTEL::eUint32: return "Uint32";
+ case PerformanceValueTypeINTEL::eUint64: return "Uint64";
+ case PerformanceValueTypeINTEL::eFloat: return "Float";
+ case PerformanceValueTypeINTEL::eBool: return "Bool";
+ case PerformanceValueTypeINTEL::eString: return "String";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+ //=== VK_FUCHSIA_imagepipe_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagBitsFUCHSIA )
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ //=== VK_EXT_metal_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagBitsEXT )
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+ //=== VK_KHR_fragment_shading_rate ===
+
+ VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateCombinerOpKHR value )
+ {
+ switch ( value )
+ {
+ case FragmentShadingRateCombinerOpKHR::eKeep: return "Keep";
+ case FragmentShadingRateCombinerOpKHR::eReplace: return "Replace";
+ case FragmentShadingRateCombinerOpKHR::eMin: return "Min";
+ case FragmentShadingRateCombinerOpKHR::eMax: return "Max";
+ case FragmentShadingRateCombinerOpKHR::eMul: return "Mul";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_AMD_shader_core_properties2 ===
+
+ VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagBitsAMD )
+ {
+ return "(void)";
+ }
+
+ //=== VK_EXT_validation_features ===
+
+ VULKAN_HPP_INLINE std::string to_string( ValidationFeatureEnableEXT value )
+ {
+ switch ( value )
+ {
+ case ValidationFeatureEnableEXT::eGpuAssisted: return "GpuAssisted";
+ case ValidationFeatureEnableEXT::eGpuAssistedReserveBindingSlot: return "GpuAssistedReserveBindingSlot";
+ case ValidationFeatureEnableEXT::eBestPractices: return "BestPractices";
+ case ValidationFeatureEnableEXT::eDebugPrintf: return "DebugPrintf";
+ case ValidationFeatureEnableEXT::eSynchronizationValidation: return "SynchronizationValidation";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ValidationFeatureDisableEXT value )
+ {
+ switch ( value )
+ {
+ case ValidationFeatureDisableEXT::eAll: return "All";
+ case ValidationFeatureDisableEXT::eShaders: return "Shaders";
+ case ValidationFeatureDisableEXT::eThreadSafety: return "ThreadSafety";
+ case ValidationFeatureDisableEXT::eApiParameters: return "ApiParameters";
+ case ValidationFeatureDisableEXT::eObjectLifetimes: return "ObjectLifetimes";
+ case ValidationFeatureDisableEXT::eCoreChecks: return "CoreChecks";
+ case ValidationFeatureDisableEXT::eUniqueHandles: return "UniqueHandles";
+ case ValidationFeatureDisableEXT::eShaderValidationCache: return "ShaderValidationCache";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_NV_cooperative_matrix ===
+
+ VULKAN_HPP_INLINE std::string to_string( ScopeNV value )
+ {
+ switch ( value )
+ {
+ case ScopeNV::eDevice: return "Device";
+ case ScopeNV::eWorkgroup: return "Workgroup";
+ case ScopeNV::eSubgroup: return "Subgroup";
+ case ScopeNV::eQueueFamily: return "QueueFamily";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ComponentTypeNV value )
+ {
+ switch ( value )
+ {
+ case ComponentTypeNV::eFloat16: return "Float16";
+ case ComponentTypeNV::eFloat32: return "Float32";
+ case ComponentTypeNV::eFloat64: return "Float64";
+ case ComponentTypeNV::eSint8: return "Sint8";
+ case ComponentTypeNV::eSint16: return "Sint16";
+ case ComponentTypeNV::eSint32: return "Sint32";
+ case ComponentTypeNV::eSint64: return "Sint64";
+ case ComponentTypeNV::eUint8: return "Uint8";
+ case ComponentTypeNV::eUint16: return "Uint16";
+ case ComponentTypeNV::eUint32: return "Uint32";
+ case ComponentTypeNV::eUint64: return "Uint64";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_NV_coverage_reduction_mode ===
+
+ VULKAN_HPP_INLINE std::string to_string( CoverageReductionModeNV value )
+ {
+ switch ( value )
+ {
+ case CoverageReductionModeNV::eMerge: return "Merge";
+ case CoverageReductionModeNV::eTruncate: return "Truncate";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagBitsNV )
+ {
+ return "(void)";
+ }
+
+ //=== VK_EXT_provoking_vertex ===
+
+ VULKAN_HPP_INLINE std::string to_string( ProvokingVertexModeEXT value )
+ {
+ switch ( value )
+ {
+ case ProvokingVertexModeEXT::eFirstVertex: return "FirstVertex";
+ case ProvokingVertexModeEXT::eLastVertex: return "LastVertex";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+ //=== VK_EXT_full_screen_exclusive ===
+
+ VULKAN_HPP_INLINE std::string to_string( FullScreenExclusiveEXT value )
+ {
+ switch ( value )
+ {
+ case FullScreenExclusiveEXT::eDefault: return "Default";
+ case FullScreenExclusiveEXT::eAllowed: return "Allowed";
+ case FullScreenExclusiveEXT::eDisallowed: return "Disallowed";
+ case FullScreenExclusiveEXT::eApplicationControlled: return "ApplicationControlled";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+ //=== VK_EXT_headless_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagBitsEXT )
+ {
+ return "(void)";
+ }
+
+ //=== VK_EXT_line_rasterization ===
+
+ VULKAN_HPP_INLINE std::string to_string( LineRasterizationModeEXT value )
+ {
+ switch ( value )
+ {
+ case LineRasterizationModeEXT::eDefault: return "Default";
+ case LineRasterizationModeEXT::eRectangular: return "Rectangular";
+ case LineRasterizationModeEXT::eBresenham: return "Bresenham";
+ case LineRasterizationModeEXT::eRectangularSmooth: return "RectangularSmooth";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_KHR_pipeline_executable_properties ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineExecutableStatisticFormatKHR value )
+ {
+ switch ( value )
+ {
+ case PipelineExecutableStatisticFormatKHR::eBool32: return "Bool32";
+ case PipelineExecutableStatisticFormatKHR::eInt64: return "Int64";
+ case PipelineExecutableStatisticFormatKHR::eUint64: return "Uint64";
+ case PipelineExecutableStatisticFormatKHR::eFloat64: return "Float64";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_NV_device_generated_commands ===
+
+ VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagBitsNV value )
+ {
+ switch ( value )
+ {
+ case IndirectStateFlagBitsNV::eFlagFrontface: return "FlagFrontface";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNV value )
+ {
+ switch ( value )
+ {
+ case IndirectCommandsTokenTypeNV::eShaderGroup: return "ShaderGroup";
+ case IndirectCommandsTokenTypeNV::eStateFlags: return "StateFlags";
+ case IndirectCommandsTokenTypeNV::eIndexBuffer: return "IndexBuffer";
+ case IndirectCommandsTokenTypeNV::eVertexBuffer: return "VertexBuffer";
+ case IndirectCommandsTokenTypeNV::ePushConstant: return "PushConstant";
+ case IndirectCommandsTokenTypeNV::eDrawIndexed: return "DrawIndexed";
+ case IndirectCommandsTokenTypeNV::eDraw: return "Draw";
+ case IndirectCommandsTokenTypeNV::eDrawTasks: return "DrawTasks";
+ case IndirectCommandsTokenTypeNV::eDrawMeshTasks: return "DrawMeshTasks";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNV value )
+ {
+ switch ( value )
+ {
+ case IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess: return "ExplicitPreprocess";
+ case IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences: return "IndexedSequences";
+ case IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences: return "UnorderedSequences";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_EXT_device_memory_report ===
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportEventTypeEXT value )
+ {
+ switch ( value )
+ {
+ case DeviceMemoryReportEventTypeEXT::eAllocate: return "Allocate";
+ case DeviceMemoryReportEventTypeEXT::eFree: return "Free";
+ case DeviceMemoryReportEventTypeEXT::eImport: return "Import";
+ case DeviceMemoryReportEventTypeEXT::eUnimport: return "Unimport";
+ case DeviceMemoryReportEventTypeEXT::eAllocationFailed: return "AllocationFailed";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportFlagBitsEXT )
+ {
+ return "(void)";
+ }
+
+ //=== VK_EXT_pipeline_creation_cache_control ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case PipelineCacheCreateFlagBits::eExternallySynchronized: return "ExternallySynchronized";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+ //=== VK_KHR_video_encode_queue ===
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeCapabilityFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes: return "PrecedingExternallyEncodedBytes";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeUsageFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case VideoEncodeUsageFlagBitsKHR::eDefault: return "Default";
+ case VideoEncodeUsageFlagBitsKHR::eTranscoding: return "Transcoding";
+ case VideoEncodeUsageFlagBitsKHR::eStreaming: return "Streaming";
+ case VideoEncodeUsageFlagBitsKHR::eRecording: return "Recording";
+ case VideoEncodeUsageFlagBitsKHR::eConferencing: return "Conferencing";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeContentFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case VideoEncodeContentFlagBitsKHR::eDefault: return "Default";
+ case VideoEncodeContentFlagBitsKHR::eCamera: return "Camera";
+ case VideoEncodeContentFlagBitsKHR::eDesktop: return "Desktop";
+ case VideoEncodeContentFlagBitsKHR::eRendered: return "Rendered";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeTuningModeKHR value )
+ {
+ switch ( value )
+ {
+ case VideoEncodeTuningModeKHR::eDefault: return "Default";
+ case VideoEncodeTuningModeKHR::eHighQuality: return "HighQuality";
+ case VideoEncodeTuningModeKHR::eLowLatency: return "LowLatency";
+ case VideoEncodeTuningModeKHR::eUltraLowLatency: return "UltraLowLatency";
+ case VideoEncodeTuningModeKHR::eLossless: return "Lossless";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlModeFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case VideoEncodeRateControlModeFlagBitsKHR::eNone: return "None";
+ case VideoEncodeRateControlModeFlagBitsKHR::eCbr: return "Cbr";
+ case VideoEncodeRateControlModeFlagBitsKHR::eVbr: return "Vbr";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagBitsKHR )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlFlagBitsKHR )
+ {
+ return "(void)";
+ }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+ //=== VK_NV_device_diagnostics_config ===
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagBitsNV value )
+ {
+ switch ( value )
+ {
+ case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo: return "EnableShaderDebugInfo";
+ case DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking: return "EnableResourceTracking";
+ case DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints: return "EnableAutomaticCheckpoints";
+ case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderErrorReporting: return "EnableShaderErrorReporting";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+ //=== VK_EXT_metal_objects ===
+
+ VULKAN_HPP_INLINE std::string to_string( ExportMetalObjectTypeFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case ExportMetalObjectTypeFlagBitsEXT::eMetalDevice: return "MetalDevice";
+ case ExportMetalObjectTypeFlagBitsEXT::eMetalCommandQueue: return "MetalCommandQueue";
+ case ExportMetalObjectTypeFlagBitsEXT::eMetalBuffer: return "MetalBuffer";
+ case ExportMetalObjectTypeFlagBitsEXT::eMetalTexture: return "MetalTexture";
+ case ExportMetalObjectTypeFlagBitsEXT::eMetalIosurface: return "MetalIosurface";
+ case ExportMetalObjectTypeFlagBitsEXT::eMetalSharedEvent: return "MetalSharedEvent";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+ //=== VK_EXT_graphics_pipeline_library ===
+
+ VULKAN_HPP_INLINE std::string to_string( GraphicsPipelineLibraryFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case GraphicsPipelineLibraryFlagBitsEXT::eVertexInputInterface: return "VertexInputInterface";
+ case GraphicsPipelineLibraryFlagBitsEXT::ePreRasterizationShaders: return "PreRasterizationShaders";
+ case GraphicsPipelineLibraryFlagBitsEXT::eFragmentShader: return "FragmentShader";
+ case GraphicsPipelineLibraryFlagBitsEXT::eFragmentOutputInterface: return "FragmentOutputInterface";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case PipelineLayoutCreateFlagBits::eIndependentSetsEXT: return "IndependentSetsEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_NV_fragment_shading_rate_enums ===
+
+ VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateNV value )
+ {
+ switch ( value )
+ {
+ case FragmentShadingRateNV::e1InvocationPerPixel: return "1InvocationPerPixel";
+ case FragmentShadingRateNV::e1InvocationPer1X2Pixels: return "1InvocationPer1X2Pixels";
+ case FragmentShadingRateNV::e1InvocationPer2X1Pixels: return "1InvocationPer2X1Pixels";
+ case FragmentShadingRateNV::e1InvocationPer2X2Pixels: return "1InvocationPer2X2Pixels";
+ case FragmentShadingRateNV::e1InvocationPer2X4Pixels: return "1InvocationPer2X4Pixels";
+ case FragmentShadingRateNV::e1InvocationPer4X2Pixels: return "1InvocationPer4X2Pixels";
+ case FragmentShadingRateNV::e1InvocationPer4X4Pixels: return "1InvocationPer4X4Pixels";
+ case FragmentShadingRateNV::e2InvocationsPerPixel: return "2InvocationsPerPixel";
+ case FragmentShadingRateNV::e4InvocationsPerPixel: return "4InvocationsPerPixel";
+ case FragmentShadingRateNV::e8InvocationsPerPixel: return "8InvocationsPerPixel";
+ case FragmentShadingRateNV::e16InvocationsPerPixel: return "16InvocationsPerPixel";
+ case FragmentShadingRateNV::eNoInvocations: return "NoInvocations";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateTypeNV value )
+ {
+ switch ( value )
+ {
+ case FragmentShadingRateTypeNV::eFragmentSize: return "FragmentSize";
+ case FragmentShadingRateTypeNV::eEnums: return "Enums";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_NV_ray_tracing_motion_blur ===
+
+ VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInstanceTypeNV value )
+ {
+ switch ( value )
+ {
+ case AccelerationStructureMotionInstanceTypeNV::eStatic: return "Static";
+ case AccelerationStructureMotionInstanceTypeNV::eMatrixMotion: return "MatrixMotion";
+ case AccelerationStructureMotionInstanceTypeNV::eSrtMotion: return "SrtMotion";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInfoFlagBitsNV )
+ {
+ return "(void)";
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInstanceFlagBitsNV )
+ {
+ return "(void)";
+ }
+
+ //=== VK_EXT_image_compression_control ===
+
+ VULKAN_HPP_INLINE std::string to_string( ImageCompressionFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case ImageCompressionFlagBitsEXT::eDefault: return "Default";
+ case ImageCompressionFlagBitsEXT::eFixedRateDefault: return "FixedRateDefault";
+ case ImageCompressionFlagBitsEXT::eFixedRateExplicit: return "FixedRateExplicit";
+ case ImageCompressionFlagBitsEXT::eDisabled: return "Disabled";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ImageCompressionFixedRateFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case ImageCompressionFixedRateFlagBitsEXT::eNone: return "None";
+ case ImageCompressionFixedRateFlagBitsEXT::e1Bpc: return "1Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e2Bpc: return "2Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e3Bpc: return "3Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e4Bpc: return "4Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e5Bpc: return "5Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e6Bpc: return "6Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e7Bpc: return "7Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e8Bpc: return "8Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e9Bpc: return "9Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e10Bpc: return "10Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e11Bpc: return "11Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e12Bpc: return "12Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e13Bpc: return "13Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e14Bpc: return "14Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e15Bpc: return "15Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e16Bpc: return "16Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e17Bpc: return "17Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e18Bpc: return "18Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e19Bpc: return "19Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e20Bpc: return "20Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e21Bpc: return "21Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e22Bpc: return "22Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e23Bpc: return "23Bpc";
+ case ImageCompressionFixedRateFlagBitsEXT::e24Bpc: return "24Bpc";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_EXT_device_fault ===
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceFaultAddressTypeEXT value )
+ {
+ switch ( value )
+ {
+ case DeviceFaultAddressTypeEXT::eNone: return "None";
+ case DeviceFaultAddressTypeEXT::eReadInvalid: return "ReadInvalid";
+ case DeviceFaultAddressTypeEXT::eWriteInvalid: return "WriteInvalid";
+ case DeviceFaultAddressTypeEXT::eExecuteInvalid: return "ExecuteInvalid";
+ case DeviceFaultAddressTypeEXT::eInstructionPointerUnknown: return "InstructionPointerUnknown";
+ case DeviceFaultAddressTypeEXT::eInstructionPointerInvalid: return "InstructionPointerInvalid";
+ case DeviceFaultAddressTypeEXT::eInstructionPointerFault: return "InstructionPointerFault";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceFaultVendorBinaryHeaderVersionEXT value )
+ {
+ switch ( value )
+ {
+ case DeviceFaultVendorBinaryHeaderVersionEXT::eOne: return "One";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+ //=== VK_EXT_directfb_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagBitsEXT )
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+ //=== VK_KHR_ray_tracing_pipeline ===
+
+ VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeKHR value )
+ {
+ switch ( value )
+ {
+ case RayTracingShaderGroupTypeKHR::eGeneral: return "General";
+ case RayTracingShaderGroupTypeKHR::eTrianglesHitGroup: return "TrianglesHitGroup";
+ case RayTracingShaderGroupTypeKHR::eProceduralHitGroup: return "ProceduralHitGroup";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ShaderGroupShaderKHR value )
+ {
+ switch ( value )
+ {
+ case ShaderGroupShaderKHR::eGeneral: return "General";
+ case ShaderGroupShaderKHR::eClosestHit: return "ClosestHit";
+ case ShaderGroupShaderKHR::eAnyHit: return "AnyHit";
+ case ShaderGroupShaderKHR::eIntersection: return "Intersection";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_EXT_device_address_binding_report ===
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceAddressBindingFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case DeviceAddressBindingFlagBitsEXT::eInternalObject: return "InternalObject";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceAddressBindingTypeEXT value )
+ {
+ switch ( value )
+ {
+ case DeviceAddressBindingTypeEXT::eBind: return "Bind";
+ case DeviceAddressBindingTypeEXT::eUnbind: return "Unbind";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+ //=== VK_FUCHSIA_buffer_collection ===
+
+ VULKAN_HPP_INLINE std::string to_string( ImageConstraintsInfoFlagBitsFUCHSIA value )
+ {
+ switch ( value )
+ {
+ case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely: return "CpuReadRarely";
+ case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften: return "CpuReadOften";
+ case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely: return "CpuWriteRarely";
+ case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften: return "CpuWriteOften";
+ case ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional: return "ProtectedOptional";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ImageFormatConstraintsFlagBitsFUCHSIA )
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+ //=== VK_QNX_screen_surface ===
+
+ VULKAN_HPP_INLINE std::string to_string( ScreenSurfaceCreateFlagBitsQNX )
+ {
+ return "(void)";
+ }
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+ //=== VK_EXT_opacity_micromap ===
+
+ VULKAN_HPP_INLINE std::string to_string( MicromapTypeEXT value )
+ {
+ switch ( value )
+ {
+ case MicromapTypeEXT::eOpacityMicromap: return "OpacityMicromap";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( BuildMicromapFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case BuildMicromapFlagBitsEXT::ePreferFastTrace: return "PreferFastTrace";
+ case BuildMicromapFlagBitsEXT::ePreferFastBuild: return "PreferFastBuild";
+ case BuildMicromapFlagBitsEXT::eAllowCompaction: return "AllowCompaction";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( CopyMicromapModeEXT value )
+ {
+ switch ( value )
+ {
+ case CopyMicromapModeEXT::eClone: return "Clone";
+ case CopyMicromapModeEXT::eSerialize: return "Serialize";
+ case CopyMicromapModeEXT::eDeserialize: return "Deserialize";
+ case CopyMicromapModeEXT::eCompact: return "Compact";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( MicromapCreateFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case MicromapCreateFlagBitsEXT::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( BuildMicromapModeEXT value )
+ {
+ switch ( value )
+ {
+ case BuildMicromapModeEXT::eBuild: return "Build";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( OpacityMicromapFormatEXT value )
+ {
+ switch ( value )
+ {
+ case OpacityMicromapFormatEXT::e2State: return "2State";
+ case OpacityMicromapFormatEXT::e4State: return "4State";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( OpacityMicromapSpecialIndexEXT value )
+ {
+ switch ( value )
+ {
+ case OpacityMicromapSpecialIndexEXT::eFullyTransparent: return "FullyTransparent";
+ case OpacityMicromapSpecialIndexEXT::eFullyOpaque: return "FullyOpaque";
+ case OpacityMicromapSpecialIndexEXT::eFullyUnknownTransparent: return "FullyUnknownTransparent";
+ case OpacityMicromapSpecialIndexEXT::eFullyUnknownOpaque: return "FullyUnknownOpaque";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_EXT_subpass_merge_feedback ===
+
+ VULKAN_HPP_INLINE std::string to_string( SubpassMergeStatusEXT value )
+ {
+ switch ( value )
+ {
+ case SubpassMergeStatusEXT::eMerged: return "Merged";
+ case SubpassMergeStatusEXT::eDisallowed: return "Disallowed";
+ case SubpassMergeStatusEXT::eNotMergedSideEffects: return "NotMergedSideEffects";
+ case SubpassMergeStatusEXT::eNotMergedSamplesMismatch: return "NotMergedSamplesMismatch";
+ case SubpassMergeStatusEXT::eNotMergedViewsMismatch: return "NotMergedViewsMismatch";
+ case SubpassMergeStatusEXT::eNotMergedAliasing: return "NotMergedAliasing";
+ case SubpassMergeStatusEXT::eNotMergedDependencies: return "NotMergedDependencies";
+ case SubpassMergeStatusEXT::eNotMergedIncompatibleInputAttachment: return "NotMergedIncompatibleInputAttachment";
+ case SubpassMergeStatusEXT::eNotMergedTooManyAttachments: return "NotMergedTooManyAttachments";
+ case SubpassMergeStatusEXT::eNotMergedInsufficientStorage: return "NotMergedInsufficientStorage";
+ case SubpassMergeStatusEXT::eNotMergedDepthStencilCount: return "NotMergedDepthStencilCount";
+ case SubpassMergeStatusEXT::eNotMergedResolveAttachmentReuse: return "NotMergedResolveAttachmentReuse";
+ case SubpassMergeStatusEXT::eNotMergedSingleSubpass: return "NotMergedSingleSubpass";
+ case SubpassMergeStatusEXT::eNotMergedUnspecified: return "NotMergedUnspecified";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_EXT_rasterization_order_attachment_access ===
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessEXT: return "RasterizationOrderAttachmentAccessEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessEXT: return "RasterizationOrderAttachmentDepthAccessEXT";
+ case PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessEXT: return "RasterizationOrderAttachmentStencilAccessEXT";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ //=== VK_NV_optical_flow ===
+
+ VULKAN_HPP_INLINE std::string to_string( OpticalFlowUsageFlagBitsNV value )
+ {
+ switch ( value )
+ {
+ case OpticalFlowUsageFlagBitsNV::eUnknown: return "Unknown";
+ case OpticalFlowUsageFlagBitsNV::eInput: return "Input";
+ case OpticalFlowUsageFlagBitsNV::eOutput: return "Output";
+ case OpticalFlowUsageFlagBitsNV::eHint: return "Hint";
+ case OpticalFlowUsageFlagBitsNV::eCost: return "Cost";
+ case OpticalFlowUsageFlagBitsNV::eGlobalFlow: return "GlobalFlow";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( OpticalFlowGridSizeFlagBitsNV value )
+ {
+ switch ( value )
+ {
+ case OpticalFlowGridSizeFlagBitsNV::eUnknown: return "Unknown";
+ case OpticalFlowGridSizeFlagBitsNV::e1X1: return "1X1";
+ case OpticalFlowGridSizeFlagBitsNV::e2X2: return "2X2";
+ case OpticalFlowGridSizeFlagBitsNV::e4X4: return "4X4";
+ case OpticalFlowGridSizeFlagBitsNV::e8X8: return "8X8";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( OpticalFlowPerformanceLevelNV value )
+ {
+ switch ( value )
+ {
+ case OpticalFlowPerformanceLevelNV::eUnknown: return "Unknown";
+ case OpticalFlowPerformanceLevelNV::eSlow: return "Slow";
+ case OpticalFlowPerformanceLevelNV::eMedium: return "Medium";
+ case OpticalFlowPerformanceLevelNV::eFast: return "Fast";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( OpticalFlowSessionBindingPointNV value )
+ {
+ switch ( value )
+ {
+ case OpticalFlowSessionBindingPointNV::eUnknown: return "Unknown";
+ case OpticalFlowSessionBindingPointNV::eInput: return "Input";
+ case OpticalFlowSessionBindingPointNV::eReference: return "Reference";
+ case OpticalFlowSessionBindingPointNV::eHint: return "Hint";
+ case OpticalFlowSessionBindingPointNV::eFlowVector: return "FlowVector";
+ case OpticalFlowSessionBindingPointNV::eBackwardFlowVector: return "BackwardFlowVector";
+ case OpticalFlowSessionBindingPointNV::eCost: return "Cost";
+ case OpticalFlowSessionBindingPointNV::eBackwardCost: return "BackwardCost";
+ case OpticalFlowSessionBindingPointNV::eGlobalFlow: return "GlobalFlow";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( OpticalFlowSessionCreateFlagBitsNV value )
+ {
+ switch ( value )
+ {
+ case OpticalFlowSessionCreateFlagBitsNV::eEnableHint: return "EnableHint";
+ case OpticalFlowSessionCreateFlagBitsNV::eEnableCost: return "EnableCost";
+ case OpticalFlowSessionCreateFlagBitsNV::eEnableGlobalFlow: return "EnableGlobalFlow";
+ case OpticalFlowSessionCreateFlagBitsNV::eAllowRegions: return "AllowRegions";
+ case OpticalFlowSessionCreateFlagBitsNV::eBothDirections: return "BothDirections";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( OpticalFlowExecuteFlagBitsNV value )
+ {
+ switch ( value )
+ {
+ case OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints: return "DisableTemporalHints";
+ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
+ }
+ }
+
+} // namespace VULKAN_HPP_NAMESPACE
+#endif
diff --git a/thirdparty/vulkan/patches/VKEnumStringHelper-use-volk.patch b/thirdparty/vulkan/patches/VKEnumStringHelper-use-volk.patch
index 9a2267e2b6..849a2aa040 100644
--- a/thirdparty/vulkan/patches/VKEnumStringHelper-use-volk.patch
+++ b/thirdparty/vulkan/patches/VKEnumStringHelper-use-volk.patch
@@ -1,8 +1,8 @@
diff --git a/thirdparty/vulkan/vk_enum_string_helper.h b/thirdparty/vulkan/vk_enum_string_helper.h
-index 4c36430341..004a861774 100644
+index 65b3322c07..e8c61aaf15 100644
--- a/thirdparty/vulkan/vk_enum_string_helper.h
+++ b/thirdparty/vulkan/vk_enum_string_helper.h
-@@ -36,7 +36,11 @@
+@@ -37,7 +37,11 @@
#endif
#include <string>
diff --git a/thirdparty/vulkan/vk_enum_string_helper.h b/thirdparty/vulkan/vk_enum_string_helper.h
index 15241d7773..1869f132eb 100644
--- a/thirdparty/vulkan/vk_enum_string_helper.h
+++ b/thirdparty/vulkan/vk_enum_string_helper.h
@@ -48,6 +48,8 @@ static inline const char* string_VkResult(VkResult input_value)
{
switch (input_value)
{
+ case VK_ERROR_COMPRESSION_EXHAUSTED_EXT:
+ return "VK_ERROR_COMPRESSION_EXHAUSTED_EXT";
case VK_ERROR_DEVICE_LOST:
return "VK_ERROR_DEVICE_LOST";
case VK_ERROR_EXTENSION_NOT_PRESENT:
@@ -62,6 +64,10 @@ static inline const char* string_VkResult(VkResult input_value)
return "VK_ERROR_FRAGMENTED_POOL";
case VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT:
return "VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT";
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ case VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR:
+ return "VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR";
+#endif // VK_ENABLE_BETA_EXTENSIONS
case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR:
return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR";
case VK_ERROR_INCOMPATIBLE_DRIVER:
@@ -100,6 +106,26 @@ static inline const char* string_VkResult(VkResult input_value)
return "VK_ERROR_UNKNOWN";
case VK_ERROR_VALIDATION_FAILED_EXT:
return "VK_ERROR_VALIDATION_FAILED_EXT";
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ case VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR:
+ return "VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR";
+#endif // VK_ENABLE_BETA_EXTENSIONS
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ case VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR:
+ return "VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR";
+#endif // VK_ENABLE_BETA_EXTENSIONS
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ case VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR:
+ return "VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR";
+#endif // VK_ENABLE_BETA_EXTENSIONS
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ case VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR:
+ return "VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR";
+#endif // VK_ENABLE_BETA_EXTENSIONS
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ case VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR:
+ return "VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR";
+#endif // VK_ENABLE_BETA_EXTENSIONS
case VK_EVENT_RESET:
return "VK_EVENT_RESET";
case VK_EVENT_SET:
@@ -159,12 +185,16 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV";
case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV:
return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV";
+ case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT:
+ return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT";
case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR:
return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR";
case VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR:
return "VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR";
case VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR:
return "VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR";
+ case VK_STRUCTURE_TYPE_AMIGO_PROFILING_SUBMIT_INFO_SEC:
+ return "VK_STRUCTURE_TYPE_AMIGO_PROFILING_SUBMIT_INFO_SEC";
case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID:
return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID";
case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID:
@@ -203,6 +233,10 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO";
case VK_STRUCTURE_TYPE_BIND_SPARSE_INFO:
return "VK_STRUCTURE_TYPE_BIND_SPARSE_INFO";
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ case VK_STRUCTURE_TYPE_BIND_VIDEO_SESSION_MEMORY_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_BIND_VIDEO_SESSION_MEMORY_INFO_KHR";
+#endif // VK_ENABLE_BETA_EXTENSIONS
case VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2:
return "VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2";
case VK_STRUCTURE_TYPE_BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA:
@@ -285,6 +319,12 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2";
case VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR:
return "VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT";
+ case VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT";
+ case VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT";
case VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX:
return "VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX";
case VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX:
@@ -325,10 +365,14 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO";
case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO:
return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO";
+ case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE:
+ return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE";
case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO:
return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO";
case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO:
return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE:
+ return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE";
case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT:
return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT";
case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO:
@@ -337,6 +381,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT";
case VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO:
return "VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT:
+ return "VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT";
case VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS:
return "VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS";
case VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO:
@@ -347,6 +393,10 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV";
case VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT:
return "VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT";
+ case VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT:
+ return "VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT";
+ case VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT";
case VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO:
return "VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO";
case VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO:
@@ -421,6 +471,22 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR";
case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV:
return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV";
+ case VK_STRUCTURE_TYPE_EXPORT_METAL_BUFFER_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_EXPORT_METAL_BUFFER_INFO_EXT";
+ case VK_STRUCTURE_TYPE_EXPORT_METAL_COMMAND_QUEUE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_EXPORT_METAL_COMMAND_QUEUE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_EXPORT_METAL_DEVICE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_EXPORT_METAL_DEVICE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_EXPORT_METAL_IO_SURFACE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_EXPORT_METAL_IO_SURFACE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT";
+ case VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT";
+ case VK_STRUCTURE_TYPE_EXPORT_METAL_TEXTURE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_EXPORT_METAL_TEXTURE_INFO_EXT";
case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO:
return "VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO";
case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR:
@@ -475,6 +541,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV";
case VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO:
return "VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT";
case VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV:
return "VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV";
case VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV:
@@ -487,6 +555,10 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA";
case VK_STRUCTURE_TYPE_IMAGE_BLIT_2:
return "VK_STRUCTURE_TYPE_IMAGE_BLIT_2";
+ case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT:
+ return "VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT";
+ case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT";
case VK_STRUCTURE_TYPE_IMAGE_CONSTRAINTS_INFO_FUCHSIA:
return "VK_STRUCTURE_TYPE_IMAGE_CONSTRAINTS_INFO_FUCHSIA";
case VK_STRUCTURE_TYPE_IMAGE_COPY_2:
@@ -519,6 +591,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2";
case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
return "VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT:
+ return "VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT";
case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR:
return "VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR";
case VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX:
@@ -531,6 +605,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX";
case VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT:
return "VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM:
+ return "VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM";
case VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO:
return "VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO";
case VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID:
@@ -551,6 +627,14 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV";
case VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA:
return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA";
+ case VK_STRUCTURE_TYPE_IMPORT_METAL_BUFFER_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_IMPORT_METAL_BUFFER_INFO_EXT";
+ case VK_STRUCTURE_TYPE_IMPORT_METAL_IO_SURFACE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_IMPORT_METAL_IO_SURFACE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT";
+ case VK_STRUCTURE_TYPE_IMPORT_METAL_TEXTURE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_IMPORT_METAL_TEXTURE_INFO_EXT";
case VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR:
return "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR";
case VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR:
@@ -613,12 +697,32 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA";
case VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT:
return "VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT";
+ case VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT";
+ case VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT";
+ case VK_STRUCTURE_TYPE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT";
case VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT:
return "VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT";
case VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX:
return "VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX";
- case VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE:
- return "VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE";
+ case VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV";
+ case VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV:
+ return "VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV";
+ case VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV:
+ return "VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV";
+ case VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV";
+ case VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV:
+ return "VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV";
case VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL:
return "VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL";
case VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR:
@@ -643,8 +747,14 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT:
@@ -679,6 +789,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT:
@@ -689,6 +801,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV:
@@ -709,6 +823,10 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO:
@@ -723,6 +841,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES:
@@ -739,8 +859,10 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV:
@@ -755,6 +877,10 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES:
@@ -763,10 +889,20 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT:
@@ -783,6 +919,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT:
@@ -801,10 +939,16 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX:
@@ -815,8 +959,18 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT:
@@ -829,6 +983,14 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES";
#ifdef VK_ENABLE_BETA_EXTENSIONS
@@ -839,10 +1001,14 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR";
#endif // VK_ENABLE_BETA_EXTENSIONS
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES:
@@ -859,10 +1025,12 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR:
@@ -897,6 +1065,10 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD:
@@ -905,6 +1077,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT:
@@ -917,6 +1091,10 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV:
@@ -939,6 +1117,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI:
@@ -953,6 +1133,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES:
@@ -1045,6 +1227,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR";
case VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO:
return "VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_PIPELINE_PROPERTIES_IDENTIFIER_EXT:
+ return "VK_STRUCTURE_TYPE_PIPELINE_PROPERTIES_IDENTIFIER_EXT";
case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT:
return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT";
case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT:
@@ -1063,10 +1247,14 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO";
case VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV:
return "VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV";
+ case VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT";
case VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT:
return "VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT";
case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO:
return "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT";
case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO:
return "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO";
case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO:
@@ -1120,8 +1308,12 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
case VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2:
return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2";
#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_2_KHR:
- return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_2_KHR";
+ case VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR:
+ return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR";
+#endif // VK_ENABLE_BETA_EXTENSIONS
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ case VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR:
+ return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR";
#endif // VK_ENABLE_BETA_EXTENSIONS
case VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR:
return "VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR";
@@ -1149,6 +1341,10 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO";
case VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2:
return "VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2";
+ case VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT:
+ return "VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT";
+ case VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT";
case VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT:
return "VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT";
case VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO:
@@ -1157,6 +1353,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO";
case VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT:
return "VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT";
+ case VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT";
case VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM:
return "VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM";
case VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2:
@@ -1197,6 +1395,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO";
case VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO:
return "VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT:
+ return "VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT";
case VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT:
return "VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT";
case VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR:
@@ -1223,14 +1423,20 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_SUBPASS_END_INFO";
case VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM:
return "VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM";
+ case VK_STRUCTURE_TYPE_SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT:
+ return "VK_STRUCTURE_TYPE_SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT";
case VK_STRUCTURE_TYPE_SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI:
return "VK_STRUCTURE_TYPE_SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI";
+ case VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT:
+ return "VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT";
case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT:
return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT";
case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR:
return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR";
case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT:
return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT";
+ case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV:
+ return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV";
case VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR:
return "VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR";
case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT:
@@ -1245,10 +1451,14 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR";
case VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD:
return "VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD";
+ case VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV";
case VK_STRUCTURE_TYPE_SYSMEM_COLOR_SPACE_FUCHSIA:
return "VK_STRUCTURE_TYPE_SYSMEM_COLOR_SPACE_FUCHSIA";
case VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD:
return "VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD";
+ case VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM:
+ return "VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM";
case VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO:
return "VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO";
case VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT:
@@ -1266,10 +1476,6 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_BIND_MEMORY_KHR:
- return "VK_STRUCTURE_TYPE_VIDEO_BIND_MEMORY_KHR";
-#endif // VK_ENABLE_BETA_EXTENSIONS
-#ifdef VK_ENABLE_BETA_EXTENSIONS
case VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR:
return "VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR";
#endif // VK_ENABLE_BETA_EXTENSIONS
@@ -1278,6 +1484,10 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
+ case VK_STRUCTURE_TYPE_VIDEO_DECODE_CAPABILITIES_KHR:
+ return "VK_STRUCTURE_TYPE_VIDEO_DECODE_CAPABILITIES_KHR";
+#endif // VK_ENABLE_BETA_EXTENSIONS
+#ifdef VK_ENABLE_BETA_EXTENSIONS
case VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_EXT:
return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_EXT";
#endif // VK_ENABLE_BETA_EXTENSIONS
@@ -1286,20 +1496,12 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_EXT";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_MVC_EXT:
- return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_MVC_EXT";
-#endif // VK_ENABLE_BETA_EXTENSIONS
-#ifdef VK_ENABLE_BETA_EXTENSIONS
case VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_EXT:
return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_EXT";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_EXT:
- return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_EXT";
-#endif // VK_ENABLE_BETA_EXTENSIONS
-#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_EXT";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
case VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT:
@@ -1322,12 +1524,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_EXT";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_EXT:
- return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_EXT";
-#endif // VK_ENABLE_BETA_EXTENSIONS
-#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_EXT";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
case VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT:
@@ -1342,6 +1540,14 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
+ case VK_STRUCTURE_TYPE_VIDEO_DECODE_USAGE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_VIDEO_DECODE_USAGE_INFO_KHR";
+#endif // VK_ENABLE_BETA_EXTENSIONS
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ case VK_STRUCTURE_TYPE_VIDEO_ENCODE_CAPABILITIES_KHR:
+ return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_CAPABILITIES_KHR";
+#endif // VK_ENABLE_BETA_EXTENSIONS
+#ifdef VK_ENABLE_BETA_EXTENSIONS
case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_EXT:
return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_EXT";
#endif // VK_ENABLE_BETA_EXTENSIONS
@@ -1350,16 +1556,16 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_EXT:
- return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_EXT";
+ case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_INFO_EXT";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_EXT:
- return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_EXT";
+ case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_EXT";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_EXT:
- return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_EXT";
+ case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_EXT";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_EXT:
@@ -1370,8 +1576,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_EXT";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_REFERENCE_LISTS_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_REFERENCE_LISTS_INFO_EXT";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT:
@@ -1394,16 +1600,16 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_EXT";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_EMIT_PICTURE_PARAMETERS_EXT:
- return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_EMIT_PICTURE_PARAMETERS_EXT";
+ case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_EMIT_PICTURE_PARAMETERS_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_EMIT_PICTURE_PARAMETERS_INFO_EXT";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_EXT:
- return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_EXT";
+ case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_EXT";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_EXT:
- return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_EXT";
+ case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_EXT";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_EXT:
@@ -1414,12 +1620,8 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_EXT";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_REFERENCE_LISTS_EXT:
- return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_REFERENCE_LISTS_EXT";
-#endif // VK_ENABLE_BETA_EXTENSIONS
-#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_REFERENCE_LISTS_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_REFERENCE_LISTS_INFO_EXT";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT:
@@ -1446,6 +1648,10 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
+ case VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR";
+#endif // VK_ENABLE_BETA_EXTENSIONS
+#ifdef VK_ENABLE_BETA_EXTENSIONS
case VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR:
return "VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR";
#endif // VK_ENABLE_BETA_EXTENSIONS
@@ -1454,34 +1660,30 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
return "VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_GET_MEMORY_PROPERTIES_KHR:
- return "VK_STRUCTURE_TYPE_VIDEO_GET_MEMORY_PROPERTIES_KHR";
-#endif // VK_ENABLE_BETA_EXTENSIONS
-#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_KHR:
- return "VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_KHR";
+ case VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_PROFILES_KHR:
- return "VK_STRUCTURE_TYPE_VIDEO_PROFILES_KHR";
+ case VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_PROFILE_KHR:
- return "VK_STRUCTURE_TYPE_VIDEO_PROFILE_KHR";
+ case VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_QUEUE_FAMILY_PROPERTIES_2_KHR:
- return "VK_STRUCTURE_TYPE_VIDEO_QUEUE_FAMILY_PROPERTIES_2_KHR";
-#endif // VK_ENABLE_BETA_EXTENSIONS
-#ifdef VK_ENABLE_BETA_EXTENSIONS
- case VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_KHR:
- return "VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_KHR";
+ case VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
case VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR:
return "VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR";
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
+ case VK_STRUCTURE_TYPE_VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR:
+ return "VK_STRUCTURE_TYPE_VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR";
+#endif // VK_ENABLE_BETA_EXTENSIONS
+#ifdef VK_ENABLE_BETA_EXTENSIONS
case VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR:
return "VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR";
#endif // VK_ENABLE_BETA_EXTENSIONS
@@ -1516,6 +1718,17 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
}
}
+static inline const char* string_VkPipelineCacheHeaderVersion(VkPipelineCacheHeaderVersion input_value)
+{
+ switch (input_value)
+ {
+ case VK_PIPELINE_CACHE_HEADER_VERSION_ONE:
+ return "VK_PIPELINE_CACHE_HEADER_VERSION_ONE";
+ default:
+ return "Unhandled VkPipelineCacheHeaderVersion";
+ }
+}
+
static inline const char* string_VkAccessFlagBits(VkAccessFlagBits input_value)
{
switch (input_value)
@@ -1603,6 +1816,8 @@ static inline const char* string_VkImageLayout(VkImageLayout input_value)
{
switch (input_value)
{
+ case VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT:
+ return "VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT";
case VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL:
return "VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL";
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
@@ -1692,8 +1907,8 @@ static inline const char* string_VkImageAspectFlagBits(VkImageAspectFlagBits inp
return "VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT";
case VK_IMAGE_ASPECT_METADATA_BIT:
return "VK_IMAGE_ASPECT_METADATA_BIT";
- case VK_IMAGE_ASPECT_NONE_KHR:
- return "VK_IMAGE_ASPECT_NONE_KHR";
+ case VK_IMAGE_ASPECT_NONE:
+ return "VK_IMAGE_ASPECT_NONE";
case VK_IMAGE_ASPECT_PLANE_0_BIT:
return "VK_IMAGE_ASPECT_PLANE_0_BIT";
case VK_IMAGE_ASPECT_PLANE_1_BIT:
@@ -1781,6 +1996,10 @@ static inline const char* string_VkObjectType(VkObjectType input_value)
return "VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV";
case VK_OBJECT_TYPE_INSTANCE:
return "VK_OBJECT_TYPE_INSTANCE";
+ case VK_OBJECT_TYPE_MICROMAP_EXT:
+ return "VK_OBJECT_TYPE_MICROMAP_EXT";
+ case VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV:
+ return "VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV";
case VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL:
return "VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL";
case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
@@ -1828,17 +2047,6 @@ static inline const char* string_VkObjectType(VkObjectType input_value)
}
}
-static inline const char* string_VkPipelineCacheHeaderVersion(VkPipelineCacheHeaderVersion input_value)
-{
- switch (input_value)
- {
- case VK_PIPELINE_CACHE_HEADER_VERSION_ONE:
- return "VK_PIPELINE_CACHE_HEADER_VERSION_ONE";
- default:
- return "Unhandled VkPipelineCacheHeaderVersion";
- }
-}
-
static inline const char* string_VkVendorId(VkVendorId input_value)
{
switch (input_value)
@@ -2242,6 +2450,8 @@ static inline const char* string_VkFormat(VkFormat input_value)
return "VK_FORMAT_R16G16B16_UNORM";
case VK_FORMAT_R16G16B16_USCALED:
return "VK_FORMAT_R16G16B16_USCALED";
+ case VK_FORMAT_R16G16_S10_5_NV:
+ return "VK_FORMAT_R16G16_S10_5_NV";
case VK_FORMAT_R16G16_SFLOAT:
return "VK_FORMAT_R16G16_SFLOAT";
case VK_FORMAT_R16G16_SINT:
@@ -2421,8 +2631,8 @@ static inline const char* string_VkFormatFeatureFlagBits(VkFormatFeatureFlagBits
return "VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT";
case VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT:
return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT";
- case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG:
- return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG";
+ case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT:
+ return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT";
case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT:
return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT";
case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT:
@@ -2494,6 +2704,8 @@ static inline const char* string_VkImageCreateFlagBits(VkImageCreateFlagBits inp
{
case VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT:
return "VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT";
+ case VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT:
+ return "VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT";
case VK_IMAGE_CREATE_ALIAS_BIT:
return "VK_IMAGE_CREATE_ALIAS_BIT";
case VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT:
@@ -2508,6 +2720,8 @@ static inline const char* string_VkImageCreateFlagBits(VkImageCreateFlagBits inp
return "VK_IMAGE_CREATE_EXTENDED_USAGE_BIT";
case VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM:
return "VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM";
+ case VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT:
+ return "VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT";
case VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT:
return "VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT";
case VK_IMAGE_CREATE_PROTECTED_BIT:
@@ -2618,6 +2832,8 @@ static inline const char* string_VkImageUsageFlagBits(VkImageUsageFlagBits input
{
switch (input_value)
{
+ case VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT:
+ return "VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT";
case VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT:
return "VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT";
case VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT:
@@ -2632,6 +2848,10 @@ static inline const char* string_VkImageUsageFlagBits(VkImageUsageFlagBits input
return "VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI";
case VK_IMAGE_USAGE_SAMPLED_BIT:
return "VK_IMAGE_USAGE_SAMPLED_BIT";
+ case VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM:
+ return "VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM";
+ case VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM:
+ return "VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM";
case VK_IMAGE_USAGE_STORAGE_BIT:
return "VK_IMAGE_USAGE_STORAGE_BIT";
case VK_IMAGE_USAGE_TRANSFER_DST_BIT:
@@ -2685,6 +2905,33 @@ static inline std::string string_VkImageUsageFlags(VkImageUsageFlags input_value
return ret;
}
+static inline const char* string_VkInstanceCreateFlagBits(VkInstanceCreateFlagBits input_value)
+{
+ switch (input_value)
+ {
+ case VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR:
+ return "VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR";
+ default:
+ return "Unhandled VkInstanceCreateFlagBits";
+ }
+}
+
+static inline std::string string_VkInstanceCreateFlags(VkInstanceCreateFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkInstanceCreateFlagBits(static_cast<VkInstanceCreateFlagBits>(1U << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkInstanceCreateFlagBits(static_cast<VkInstanceCreateFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkMemoryHeapFlagBits(VkMemoryHeapFlagBits input_value)
{
switch (input_value)
@@ -2784,6 +3031,8 @@ static inline const char* string_VkQueueFlagBits(VkQueueFlagBits input_value)
return "VK_QUEUE_COMPUTE_BIT";
case VK_QUEUE_GRAPHICS_BIT:
return "VK_QUEUE_GRAPHICS_BIT";
+ case VK_QUEUE_OPTICAL_FLOW_BIT_NV:
+ return "VK_QUEUE_OPTICAL_FLOW_BIT_NV";
case VK_QUEUE_PROTECTED_BIT:
return "VK_QUEUE_PROTECTED_BIT";
case VK_QUEUE_SPARSE_BINDING_BIT:
@@ -2882,14 +3131,14 @@ static inline const char* string_VkPipelineStageFlagBits(VkPipelineStageFlagBits
return "VK_PIPELINE_STAGE_HOST_BIT";
case VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT:
return "VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT";
- case VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV:
- return "VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV";
+ case VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT:
+ return "VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT";
case VK_PIPELINE_STAGE_NONE:
return "VK_PIPELINE_STAGE_NONE";
case VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR:
return "VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR";
- case VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV:
- return "VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV";
+ case VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT:
+ return "VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT";
case VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT:
return "VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT";
case VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT:
@@ -3057,6 +3306,10 @@ static inline const char* string_VkQueryPipelineStatisticFlagBits(VkQueryPipelin
return "VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT";
case VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT:
return "VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT";
+ case VK_QUERY_PIPELINE_STATISTIC_MESH_SHADER_INVOCATIONS_BIT_EXT:
+ return "VK_QUERY_PIPELINE_STATISTIC_MESH_SHADER_INVOCATIONS_BIT_EXT";
+ case VK_QUERY_PIPELINE_STATISTIC_TASK_SHADER_INVOCATIONS_BIT_EXT:
+ return "VK_QUERY_PIPELINE_STATISTIC_TASK_SHADER_INVOCATIONS_BIT_EXT";
case VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT:
return "VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT";
case VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT:
@@ -3092,8 +3345,18 @@ static inline const char* string_VkQueryType(VkQueryType input_value)
return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR";
case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV:
return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV";
+ case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR:
+ return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR";
case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR:
return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR";
+ case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR:
+ return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR";
+ case VK_QUERY_TYPE_MESH_PRIMITIVES_GENERATED_EXT:
+ return "VK_QUERY_TYPE_MESH_PRIMITIVES_GENERATED_EXT";
+ case VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT:
+ return "VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT";
+ case VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT:
+ return "VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT";
case VK_QUERY_TYPE_OCCLUSION:
return "VK_QUERY_TYPE_OCCLUSION";
case VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL:
@@ -3102,6 +3365,8 @@ static inline const char* string_VkQueryType(VkQueryType input_value)
return "VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR";
case VK_QUERY_TYPE_PIPELINE_STATISTICS:
return "VK_QUERY_TYPE_PIPELINE_STATISTICS";
+ case VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT:
+ return "VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT";
#ifdef VK_ENABLE_BETA_EXTENSIONS
case VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR:
return "VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR";
@@ -3205,6 +3470,10 @@ static inline const char* string_VkBufferUsageFlagBits(VkBufferUsageFlagBits inp
return "VK_BUFFER_USAGE_INDEX_BUFFER_BIT";
case VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT:
return "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT";
+ case VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT:
+ return "VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT";
+ case VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT:
+ return "VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT";
case VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR:
return "VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR";
case VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT:
@@ -3605,8 +3874,12 @@ static inline const char* string_VkPipelineCreateFlagBits(VkPipelineCreateFlagBi
return "VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR";
case VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR:
return "VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR";
+ case VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT:
+ return "VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT";
case VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV:
return "VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV";
+ case VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT:
+ return "VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT";
case VK_PIPELINE_CREATE_DERIVATIVE_BIT:
return "VK_PIPELINE_CREATE_DERIVATIVE_BIT";
case VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT:
@@ -3621,6 +3894,12 @@ static inline const char* string_VkPipelineCreateFlagBits(VkPipelineCreateFlagBi
return "VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV";
case VK_PIPELINE_CREATE_LIBRARY_BIT_KHR:
return "VK_PIPELINE_CREATE_LIBRARY_BIT_KHR";
+ case VK_PIPELINE_CREATE_LINK_TIME_OPTIMIZATION_BIT_EXT:
+ return "VK_PIPELINE_CREATE_LINK_TIME_OPTIMIZATION_BIT_EXT";
+ case VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT:
+ return "VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT";
+ case VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT:
+ return "VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT";
case VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV:
return "VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV";
case VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR:
@@ -3631,6 +3910,8 @@ static inline const char* string_VkPipelineCreateFlagBits(VkPipelineCreateFlagBi
return "VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR";
case VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR:
return "VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR";
+ case VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT:
+ return "VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT";
case VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR:
return "VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR";
case VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR:
@@ -3641,6 +3922,8 @@ static inline const char* string_VkPipelineCreateFlagBits(VkPipelineCreateFlagBi
return "VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT";
case VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR:
return "VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR";
+ case VK_PIPELINE_CREATE_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT:
+ return "VK_PIPELINE_CREATE_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT";
case VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT:
return "VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT";
default:
@@ -3715,16 +3998,16 @@ static inline const char* string_VkShaderStageFlagBits(VkShaderStageFlagBits inp
return "VK_SHADER_STAGE_GEOMETRY_BIT";
case VK_SHADER_STAGE_INTERSECTION_BIT_KHR:
return "VK_SHADER_STAGE_INTERSECTION_BIT_KHR";
- case VK_SHADER_STAGE_MESH_BIT_NV:
- return "VK_SHADER_STAGE_MESH_BIT_NV";
+ case VK_SHADER_STAGE_MESH_BIT_EXT:
+ return "VK_SHADER_STAGE_MESH_BIT_EXT";
case VK_SHADER_STAGE_MISS_BIT_KHR:
return "VK_SHADER_STAGE_MISS_BIT_KHR";
case VK_SHADER_STAGE_RAYGEN_BIT_KHR:
return "VK_SHADER_STAGE_RAYGEN_BIT_KHR";
case VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI:
return "VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI";
- case VK_SHADER_STAGE_TASK_BIT_NV:
- return "VK_SHADER_STAGE_TASK_BIT_NV";
+ case VK_SHADER_STAGE_TASK_BIT_EXT:
+ return "VK_SHADER_STAGE_TASK_BIT_EXT";
case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
return "VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT";
case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
@@ -3789,10 +4072,36 @@ static inline const char* string_VkDynamicState(VkDynamicState input_value)
{
switch (input_value)
{
+ case VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT:
+ return "VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT";
+ case VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT:
+ return "VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT";
case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
return "VK_DYNAMIC_STATE_BLEND_CONSTANTS";
+ case VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT:
+ return "VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT";
+ case VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT:
+ return "VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT";
+ case VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT:
+ return "VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT";
case VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT:
return "VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT";
+ case VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT:
+ return "VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT";
+ case VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT:
+ return "VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT";
+ case VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV:
+ return "VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV";
+ case VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV:
+ return "VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV";
+ case VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV:
+ return "VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV";
+ case VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV:
+ return "VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV";
+ case VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV:
+ return "VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV";
+ case VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV:
+ return "VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV";
case VK_DYNAMIC_STATE_CULL_MODE:
return "VK_DYNAMIC_STATE_CULL_MODE";
case VK_DYNAMIC_STATE_DEPTH_BIAS:
@@ -3803,6 +4112,12 @@ static inline const char* string_VkDynamicState(VkDynamicState input_value)
return "VK_DYNAMIC_STATE_DEPTH_BOUNDS";
case VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE:
return "VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE";
+ case VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT:
+ return "VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT";
+ case VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT:
+ return "VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT";
+ case VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT:
+ return "VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT";
case VK_DYNAMIC_STATE_DEPTH_COMPARE_OP:
return "VK_DYNAMIC_STATE_DEPTH_COMPARE_OP";
case VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE:
@@ -3813,32 +4128,56 @@ static inline const char* string_VkDynamicState(VkDynamicState input_value)
return "VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT";
case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
return "VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV";
+ case VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT:
+ return "VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT";
case VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR:
return "VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR";
case VK_DYNAMIC_STATE_FRONT_FACE:
return "VK_DYNAMIC_STATE_FRONT_FACE";
+ case VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT:
+ return "VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT";
+ case VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT:
+ return "VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT";
case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
return "VK_DYNAMIC_STATE_LINE_STIPPLE_EXT";
case VK_DYNAMIC_STATE_LINE_WIDTH:
return "VK_DYNAMIC_STATE_LINE_WIDTH";
+ case VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT:
+ return "VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT";
case VK_DYNAMIC_STATE_LOGIC_OP_EXT:
return "VK_DYNAMIC_STATE_LOGIC_OP_EXT";
case VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT:
return "VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT";
+ case VK_DYNAMIC_STATE_POLYGON_MODE_EXT:
+ return "VK_DYNAMIC_STATE_POLYGON_MODE_EXT";
case VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE:
return "VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE";
case VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY:
return "VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY";
+ case VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT:
+ return "VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT";
+ case VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT:
+ return "VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT";
+ case VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT:
+ return "VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT";
case VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE:
return "VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE";
case VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR:
return "VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR";
+ case VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV:
+ return "VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV";
+ case VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT:
+ return "VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT";
case VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT:
return "VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT";
+ case VK_DYNAMIC_STATE_SAMPLE_MASK_EXT:
+ return "VK_DYNAMIC_STATE_SAMPLE_MASK_EXT";
case VK_DYNAMIC_STATE_SCISSOR:
return "VK_DYNAMIC_STATE_SCISSOR";
case VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT:
return "VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT";
+ case VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV:
+ return "VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV";
case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
return "VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK";
case VK_DYNAMIC_STATE_STENCIL_OP:
@@ -3849,6 +4188,8 @@ static inline const char* string_VkDynamicState(VkDynamicState input_value)
return "VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE";
case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
return "VK_DYNAMIC_STATE_STENCIL_WRITE_MASK";
+ case VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT:
+ return "VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT";
case VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE:
return "VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE";
case VK_DYNAMIC_STATE_VERTEX_INPUT_EXT:
@@ -3859,8 +4200,12 @@ static inline const char* string_VkDynamicState(VkDynamicState input_value)
return "VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV";
case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
return "VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV";
+ case VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV:
+ return "VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV";
case VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT:
return "VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT";
+ case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV:
+ return "VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV";
case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
return "VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV";
default:
@@ -3946,10 +4291,10 @@ static inline const char* string_VkPipelineDepthStencilStateCreateFlagBits(VkPip
{
switch (input_value)
{
- case VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM:
- return "VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM";
- case VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM:
- return "VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM";
+ case VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT:
+ return "VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT";
+ case VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT:
+ return "VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT";
default:
return "Unhandled VkPipelineDepthStencilStateCreateFlagBits";
}
@@ -4000,8 +4345,8 @@ static inline const char* string_VkPipelineColorBlendStateCreateFlagBits(VkPipel
{
switch (input_value)
{
- case VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM:
- return "VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM";
+ case VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT:
+ return "VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT";
default:
return "Unhandled VkPipelineColorBlendStateCreateFlagBits";
}
@@ -4064,6 +4409,33 @@ static inline const char* string_VkLogicOp(VkLogicOp input_value)
}
}
+static inline const char* string_VkPipelineLayoutCreateFlagBits(VkPipelineLayoutCreateFlagBits input_value)
+{
+ switch (input_value)
+ {
+ case VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT:
+ return "VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT";
+ default:
+ return "Unhandled VkPipelineLayoutCreateFlagBits";
+ }
+}
+
+static inline std::string string_VkPipelineLayoutCreateFlags(VkPipelineLayoutCreateFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkPipelineLayoutCreateFlagBits(static_cast<VkPipelineLayoutCreateFlagBits>(1U << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkPipelineLayoutCreateFlagBits(static_cast<VkPipelineLayoutCreateFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkBorderColor(VkBorderColor input_value)
{
switch (input_value)
@@ -4093,8 +4465,8 @@ static inline const char* string_VkFilter(VkFilter input_value)
{
switch (input_value)
{
- case VK_FILTER_CUBIC_IMG:
- return "VK_FILTER_CUBIC_IMG";
+ case VK_FILTER_CUBIC_EXT:
+ return "VK_FILTER_CUBIC_EXT";
case VK_FILTER_LINEAR:
return "VK_FILTER_LINEAR";
case VK_FILTER_NEAREST:
@@ -4127,6 +4499,10 @@ static inline const char* string_VkSamplerCreateFlagBits(VkSamplerCreateFlagBits
{
switch (input_value)
{
+ case VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM:
+ return "VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM";
+ case VK_SAMPLER_CREATE_NON_SEAMLESS_CUBE_MAP_BIT_EXT:
+ return "VK_SAMPLER_CREATE_NON_SEAMLESS_CUBE_MAP_BIT_EXT";
case VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT:
return "VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT";
case VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT:
@@ -4171,8 +4547,8 @@ static inline const char* string_VkDescriptorPoolCreateFlagBits(VkDescriptorPool
{
case VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT:
return "VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT";
- case VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE:
- return "VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE";
+ case VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT:
+ return "VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT";
case VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT:
return "VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT";
default:
@@ -4204,18 +4580,22 @@ static inline const char* string_VkDescriptorType(VkDescriptorType input_value)
return "VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR";
case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV:
return "VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV";
+ case VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM:
+ return "VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM";
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
return "VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER";
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
return "VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK";
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
return "VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT";
- case VK_DESCRIPTOR_TYPE_MUTABLE_VALVE:
- return "VK_DESCRIPTOR_TYPE_MUTABLE_VALVE";
+ case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
+ return "VK_DESCRIPTOR_TYPE_MUTABLE_EXT";
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
return "VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE";
case VK_DESCRIPTOR_TYPE_SAMPLER:
return "VK_DESCRIPTOR_TYPE_SAMPLER";
+ case VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM:
+ return "VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM";
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
return "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER";
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
@@ -4239,8 +4619,8 @@ static inline const char* string_VkDescriptorSetLayoutCreateFlagBits(VkDescripto
{
switch (input_value)
{
- case VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE:
- return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE";
+ case VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT:
+ return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT";
case VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR:
return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR";
case VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT:
@@ -4333,6 +4713,8 @@ static inline const char* string_VkDependencyFlagBits(VkDependencyFlagBits input
return "VK_DEPENDENCY_BY_REGION_BIT";
case VK_DEPENDENCY_DEVICE_GROUP_BIT:
return "VK_DEPENDENCY_DEVICE_GROUP_BIT";
+ case VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT:
+ return "VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT";
case VK_DEPENDENCY_VIEW_LOCAL_BIT:
return "VK_DEPENDENCY_VIEW_LOCAL_BIT";
default:
@@ -4431,18 +4813,20 @@ static inline const char* string_VkSubpassDescriptionFlagBits(VkSubpassDescripti
{
switch (input_value)
{
+ case VK_SUBPASS_DESCRIPTION_ENABLE_LEGACY_DITHERING_BIT_EXT:
+ return "VK_SUBPASS_DESCRIPTION_ENABLE_LEGACY_DITHERING_BIT_EXT";
case VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM:
return "VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM";
case VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX:
return "VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX";
case VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX:
return "VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX";
- case VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_ARM:
- return "VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_ARM";
- case VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM:
- return "VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM";
- case VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM:
- return "VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM";
+ case VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT:
+ return "VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT";
+ case VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT:
+ return "VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT";
+ case VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT:
+ return "VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT";
case VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM:
return "VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM";
default:
@@ -5164,6 +5548,8 @@ static inline const char* string_VkDriverId(VkDriverId input_value)
return "VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS";
case VK_DRIVER_ID_JUICE_PROPRIETARY:
return "VK_DRIVER_ID_JUICE_PROPRIETARY";
+ case VK_DRIVER_ID_MESA_DOZEN:
+ return "VK_DRIVER_ID_MESA_DOZEN";
case VK_DRIVER_ID_MESA_LLVMPIPE:
return "VK_DRIVER_ID_MESA_LLVMPIPE";
case VK_DRIVER_ID_MESA_PANVK:
@@ -5405,6 +5791,8 @@ static inline const char* string_VkPipelineStageFlagBits2(uint64_t input_value)
{
case VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR:
return "VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR";
+ case VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR:
+ return "VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR";
case VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT:
return "VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT";
case VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT:
@@ -5447,10 +5835,14 @@ static inline const char* string_VkPipelineStageFlagBits2(uint64_t input_value)
return "VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI";
case VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT:
return "VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT";
- case VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV:
- return "VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV";
+ case VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT:
+ return "VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT";
+ case VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT:
+ return "VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT";
case VK_PIPELINE_STAGE_2_NONE:
return "VK_PIPELINE_STAGE_2_NONE";
+ case VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV:
+ return "VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV";
case VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT:
return "VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT";
case VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR:
@@ -5459,8 +5851,8 @@ static inline const char* string_VkPipelineStageFlagBits2(uint64_t input_value)
return "VK_PIPELINE_STAGE_2_RESOLVE_BIT";
case VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI:
return "VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI";
- case VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV:
- return "VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV";
+ case VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT:
+ return "VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT";
case VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT:
return "VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT";
case VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT:
@@ -5548,8 +5940,18 @@ static inline const char* string_VkAccessFlagBits2(uint64_t input_value)
return "VK_ACCESS_2_MEMORY_READ_BIT";
case VK_ACCESS_2_MEMORY_WRITE_BIT:
return "VK_ACCESS_2_MEMORY_WRITE_BIT";
+ case VK_ACCESS_2_MICROMAP_READ_BIT_EXT:
+ return "VK_ACCESS_2_MICROMAP_READ_BIT_EXT";
+ case VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT:
+ return "VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT";
case VK_ACCESS_2_NONE:
return "VK_ACCESS_2_NONE";
+ case VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV:
+ return "VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV";
+ case VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV:
+ return "VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV";
+ case VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR:
+ return "VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR";
case VK_ACCESS_2_SHADER_READ_BIT:
return "VK_ACCESS_2_SHADER_READ_BIT";
case VK_ACCESS_2_SHADER_SAMPLED_READ_BIT:
@@ -5644,6 +6046,8 @@ static inline const char* string_VkRenderingFlagBits(VkRenderingFlagBits input_v
{
case VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT:
return "VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT";
+ case VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT:
+ return "VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT";
case VK_RENDERING_RESUMING_BIT:
return "VK_RENDERING_RESUMING_BIT";
case VK_RENDERING_SUSPENDING_BIT:
@@ -5679,6 +6083,10 @@ static inline const char* string_VkFormatFeatureFlagBits2(uint64_t input_value)
return "VK_FORMAT_FEATURE_2_BLIT_DST_BIT";
case VK_FORMAT_FEATURE_2_BLIT_SRC_BIT:
return "VK_FORMAT_FEATURE_2_BLIT_SRC_BIT";
+ case VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM:
+ return "VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM";
+ case VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM:
+ return "VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM";
case VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT:
return "VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT";
case VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT:
@@ -5697,6 +6105,12 @@ static inline const char* string_VkFormatFeatureFlagBits2(uint64_t input_value)
return "VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV";
case VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT:
return "VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT";
+ case VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV:
+ return "VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV";
+ case VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV:
+ return "VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV";
+ case VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV:
+ return "VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV";
case VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT:
return "VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT";
case VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT:
@@ -5751,6 +6165,10 @@ static inline const char* string_VkFormatFeatureFlagBits2(uint64_t input_value)
case VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR:
return "VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR";
#endif // VK_ENABLE_BETA_EXTENSIONS
+ case VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM:
+ return "VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM";
+ case VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM:
+ return "VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM";
default:
return "Unhandled VkFormatFeatureFlagBits2";
}
@@ -6030,8 +6448,8 @@ static inline const char* string_VkVideoCodecOperationFlagBitsKHR(VkVideoCodecOp
case VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_EXT:
return "VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_EXT";
#endif // VK_ENABLE_BETA_EXTENSIONS
- case VK_VIDEO_CODEC_OPERATION_INVALID_BIT_KHR:
- return "VK_VIDEO_CODEC_OPERATION_INVALID_BIT_KHR";
+ case VK_VIDEO_CODEC_OPERATION_NONE_KHR:
+ return "VK_VIDEO_CODEC_OPERATION_NONE_KHR";
default:
return "Unhandled VkVideoCodecOperationFlagBitsKHR";
}
@@ -6067,8 +6485,8 @@ static inline const char* string_VkVideoChromaSubsamplingFlagBitsKHR(VkVideoChro
return "VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR";
case VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR:
return "VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR";
- case VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_BIT_KHR:
- return "VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_BIT_KHR";
+ case VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_KHR:
+ return "VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_KHR";
case VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR:
return "VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR";
default:
@@ -6170,8 +6588,6 @@ static inline const char* string_VkVideoSessionCreateFlagBitsKHR(VkVideoSessionC
{
switch (input_value)
{
- case VK_VIDEO_SESSION_CREATE_DEFAULT_KHR:
- return "VK_VIDEO_SESSION_CREATE_DEFAULT_KHR";
case VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR:
return "VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR";
default:
@@ -6203,8 +6619,14 @@ static inline const char* string_VkVideoCodingControlFlagBitsKHR(VkVideoCodingCo
{
switch (input_value)
{
- case VK_VIDEO_CODING_CONTROL_DEFAULT_KHR:
- return "VK_VIDEO_CODING_CONTROL_DEFAULT_KHR";
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ case VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR:
+ return "VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR";
+#endif // VK_ENABLE_BETA_EXTENSIONS
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+ case VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_LAYER_BIT_KHR:
+ return "VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_LAYER_BIT_KHR";
+#endif // VK_ENABLE_BETA_EXTENSIONS
case VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR:
return "VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR";
default:
@@ -6232,34 +6654,51 @@ static inline std::string string_VkVideoCodingControlFlagsKHR(VkVideoCodingContr
#ifdef VK_ENABLE_BETA_EXTENSIONS
-static inline const char* string_VkVideoCodingQualityPresetFlagBitsKHR(VkVideoCodingQualityPresetFlagBitsKHR input_value)
+static inline const char* string_VkQueryResultStatusKHR(VkQueryResultStatusKHR input_value)
{
switch (input_value)
{
- case VK_VIDEO_CODING_QUALITY_PRESET_NORMAL_BIT_KHR:
- return "VK_VIDEO_CODING_QUALITY_PRESET_NORMAL_BIT_KHR";
- case VK_VIDEO_CODING_QUALITY_PRESET_POWER_BIT_KHR:
- return "VK_VIDEO_CODING_QUALITY_PRESET_POWER_BIT_KHR";
- case VK_VIDEO_CODING_QUALITY_PRESET_QUALITY_BIT_KHR:
- return "VK_VIDEO_CODING_QUALITY_PRESET_QUALITY_BIT_KHR";
+ case VK_QUERY_RESULT_STATUS_COMPLETE_KHR:
+ return "VK_QUERY_RESULT_STATUS_COMPLETE_KHR";
+ case VK_QUERY_RESULT_STATUS_ERROR_KHR:
+ return "VK_QUERY_RESULT_STATUS_ERROR_KHR";
+ case VK_QUERY_RESULT_STATUS_NOT_READY_KHR:
+ return "VK_QUERY_RESULT_STATUS_NOT_READY_KHR";
+ default:
+ return "Unhandled VkQueryResultStatusKHR";
+ }
+}
+#endif // VK_ENABLE_BETA_EXTENSIONS
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+
+static inline const char* string_VkVideoDecodeCapabilityFlagBitsKHR(VkVideoDecodeCapabilityFlagBitsKHR input_value)
+{
+ switch (input_value)
+ {
+ case VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_COINCIDE_BIT_KHR:
+ return "VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_COINCIDE_BIT_KHR";
+ case VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_DISTINCT_BIT_KHR:
+ return "VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_DISTINCT_BIT_KHR";
default:
- return "Unhandled VkVideoCodingQualityPresetFlagBitsKHR";
+ return "Unhandled VkVideoDecodeCapabilityFlagBitsKHR";
}
}
-static inline std::string string_VkVideoCodingQualityPresetFlagsKHR(VkVideoCodingQualityPresetFlagsKHR input_value)
+static inline std::string string_VkVideoDecodeCapabilityFlagsKHR(VkVideoDecodeCapabilityFlagsKHR input_value)
{
std::string ret;
int index = 0;
while(input_value) {
if (input_value & 1) {
if( !ret.empty()) ret.append("|");
- ret.append(string_VkVideoCodingQualityPresetFlagBitsKHR(static_cast<VkVideoCodingQualityPresetFlagBitsKHR>(1U << index)));
+ ret.append(string_VkVideoDecodeCapabilityFlagBitsKHR(static_cast<VkVideoDecodeCapabilityFlagBitsKHR>(1U << index)));
}
++index;
input_value >>= 1;
}
- if( ret.empty()) ret.append(string_VkVideoCodingQualityPresetFlagBitsKHR(static_cast<VkVideoCodingQualityPresetFlagBitsKHR>(0)));
+ if( ret.empty()) ret.append(string_VkVideoDecodeCapabilityFlagBitsKHR(static_cast<VkVideoDecodeCapabilityFlagBitsKHR>(0)));
return ret;
}
#endif // VK_ENABLE_BETA_EXTENSIONS
@@ -6267,51 +6706,36 @@ static inline std::string string_VkVideoCodingQualityPresetFlagsKHR(VkVideoCodin
#ifdef VK_ENABLE_BETA_EXTENSIONS
-static inline const char* string_VkQueryResultStatusKHR(VkQueryResultStatusKHR input_value)
-{
- switch (input_value)
- {
- case VK_QUERY_RESULT_STATUS_COMPLETE_KHR:
- return "VK_QUERY_RESULT_STATUS_COMPLETE_KHR";
- case VK_QUERY_RESULT_STATUS_ERROR_KHR:
- return "VK_QUERY_RESULT_STATUS_ERROR_KHR";
- case VK_QUERY_RESULT_STATUS_NOT_READY_KHR:
- return "VK_QUERY_RESULT_STATUS_NOT_READY_KHR";
- default:
- return "Unhandled VkQueryResultStatusKHR";
- }
-}
-#endif // VK_ENABLE_BETA_EXTENSIONS
-
-
-#ifdef VK_ENABLE_BETA_EXTENSIONS
-
-static inline const char* string_VkVideoDecodeFlagBitsKHR(VkVideoDecodeFlagBitsKHR input_value)
+static inline const char* string_VkVideoDecodeUsageFlagBitsKHR(VkVideoDecodeUsageFlagBitsKHR input_value)
{
switch (input_value)
{
- case VK_VIDEO_DECODE_DEFAULT_KHR:
- return "VK_VIDEO_DECODE_DEFAULT_KHR";
- case VK_VIDEO_DECODE_RESERVED_0_BIT_KHR:
- return "VK_VIDEO_DECODE_RESERVED_0_BIT_KHR";
+ case VK_VIDEO_DECODE_USAGE_DEFAULT_KHR:
+ return "VK_VIDEO_DECODE_USAGE_DEFAULT_KHR";
+ case VK_VIDEO_DECODE_USAGE_OFFLINE_BIT_KHR:
+ return "VK_VIDEO_DECODE_USAGE_OFFLINE_BIT_KHR";
+ case VK_VIDEO_DECODE_USAGE_STREAMING_BIT_KHR:
+ return "VK_VIDEO_DECODE_USAGE_STREAMING_BIT_KHR";
+ case VK_VIDEO_DECODE_USAGE_TRANSCODING_BIT_KHR:
+ return "VK_VIDEO_DECODE_USAGE_TRANSCODING_BIT_KHR";
default:
- return "Unhandled VkVideoDecodeFlagBitsKHR";
+ return "Unhandled VkVideoDecodeUsageFlagBitsKHR";
}
}
-static inline std::string string_VkVideoDecodeFlagsKHR(VkVideoDecodeFlagsKHR input_value)
+static inline std::string string_VkVideoDecodeUsageFlagsKHR(VkVideoDecodeUsageFlagsKHR input_value)
{
std::string ret;
int index = 0;
while(input_value) {
if (input_value & 1) {
if( !ret.empty()) ret.append("|");
- ret.append(string_VkVideoDecodeFlagBitsKHR(static_cast<VkVideoDecodeFlagBitsKHR>(1U << index)));
+ ret.append(string_VkVideoDecodeUsageFlagBitsKHR(static_cast<VkVideoDecodeUsageFlagBitsKHR>(1U << index)));
}
++index;
input_value >>= 1;
}
- if( ret.empty()) ret.append(string_VkVideoDecodeFlagBitsKHR(static_cast<VkVideoDecodeFlagBitsKHR>(0)));
+ if( ret.empty()) ret.append(string_VkVideoDecodeUsageFlagBitsKHR(static_cast<VkVideoDecodeUsageFlagBitsKHR>(0)));
return ret;
}
#endif // VK_ENABLE_BETA_EXTENSIONS
@@ -6322,6 +6746,8 @@ static inline const char* string_VkRenderingFlagBitsKHR(VkRenderingFlagBitsKHR i
{
case VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT:
return "VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT";
+ case VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT:
+ return "VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT";
case VK_RENDERING_RESUMING_BIT:
return "VK_RENDERING_RESUMING_BIT";
case VK_RENDERING_SUSPENDING_BIT:
@@ -6898,6 +7324,8 @@ static inline const char* string_VkDriverIdKHR(VkDriverIdKHR input_value)
return "VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS";
case VK_DRIVER_ID_JUICE_PROPRIETARY:
return "VK_DRIVER_ID_JUICE_PROPRIETARY";
+ case VK_DRIVER_ID_MESA_DOZEN:
+ return "VK_DRIVER_ID_MESA_DOZEN";
case VK_DRIVER_ID_MESA_LLVMPIPE:
return "VK_DRIVER_ID_MESA_LLVMPIPE";
case VK_DRIVER_ID_MESA_PANVK:
@@ -7054,32 +7482,65 @@ static inline const char* string_VkPipelineExecutableStatisticFormatKHR(VkPipeli
#ifdef VK_ENABLE_BETA_EXTENSIONS
-static inline const char* string_VkVideoEncodeFlagBitsKHR(VkVideoEncodeFlagBitsKHR input_value)
+static inline const char* string_VkVideoEncodeCapabilityFlagBitsKHR(VkVideoEncodeCapabilityFlagBitsKHR input_value)
+{
+ switch (input_value)
+ {
+ case VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR:
+ return "VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR";
+ default:
+ return "Unhandled VkVideoEncodeCapabilityFlagBitsKHR";
+ }
+}
+
+static inline std::string string_VkVideoEncodeCapabilityFlagsKHR(VkVideoEncodeCapabilityFlagsKHR input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkVideoEncodeCapabilityFlagBitsKHR(static_cast<VkVideoEncodeCapabilityFlagBitsKHR>(1U << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkVideoEncodeCapabilityFlagBitsKHR(static_cast<VkVideoEncodeCapabilityFlagBitsKHR>(0)));
+ return ret;
+}
+#endif // VK_ENABLE_BETA_EXTENSIONS
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+
+static inline const char* string_VkVideoEncodeRateControlModeFlagBitsKHR(VkVideoEncodeRateControlModeFlagBitsKHR input_value)
{
switch (input_value)
{
- case VK_VIDEO_ENCODE_DEFAULT_KHR:
- return "VK_VIDEO_ENCODE_DEFAULT_KHR";
- case VK_VIDEO_ENCODE_RESERVED_0_BIT_KHR:
- return "VK_VIDEO_ENCODE_RESERVED_0_BIT_KHR";
+ case VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR:
+ return "VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR";
+ case VK_VIDEO_ENCODE_RATE_CONTROL_MODE_NONE_BIT_KHR:
+ return "VK_VIDEO_ENCODE_RATE_CONTROL_MODE_NONE_BIT_KHR";
+ case VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR:
+ return "VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR";
default:
- return "Unhandled VkVideoEncodeFlagBitsKHR";
+ return "Unhandled VkVideoEncodeRateControlModeFlagBitsKHR";
}
}
-static inline std::string string_VkVideoEncodeFlagsKHR(VkVideoEncodeFlagsKHR input_value)
+static inline std::string string_VkVideoEncodeRateControlModeFlagsKHR(VkVideoEncodeRateControlModeFlagsKHR input_value)
{
std::string ret;
int index = 0;
while(input_value) {
if (input_value & 1) {
if( !ret.empty()) ret.append("|");
- ret.append(string_VkVideoEncodeFlagBitsKHR(static_cast<VkVideoEncodeFlagBitsKHR>(1U << index)));
+ ret.append(string_VkVideoEncodeRateControlModeFlagBitsKHR(static_cast<VkVideoEncodeRateControlModeFlagBitsKHR>(1U << index)));
}
++index;
input_value >>= 1;
}
- if( ret.empty()) ret.append(string_VkVideoEncodeFlagBitsKHR(static_cast<VkVideoEncodeFlagBitsKHR>(0)));
+ if( ret.empty()) ret.append(string_VkVideoEncodeRateControlModeFlagBitsKHR(static_cast<VkVideoEncodeRateControlModeFlagBitsKHR>(0)));
return ret;
}
#endif // VK_ENABLE_BETA_EXTENSIONS
@@ -7087,32 +7548,38 @@ static inline std::string string_VkVideoEncodeFlagsKHR(VkVideoEncodeFlagsKHR inp
#ifdef VK_ENABLE_BETA_EXTENSIONS
-static inline const char* string_VkVideoEncodeRateControlFlagBitsKHR(VkVideoEncodeRateControlFlagBitsKHR input_value)
+static inline const char* string_VkVideoEncodeUsageFlagBitsKHR(VkVideoEncodeUsageFlagBitsKHR input_value)
{
switch (input_value)
{
- case VK_VIDEO_ENCODE_RATE_CONTROL_DEFAULT_KHR:
- return "VK_VIDEO_ENCODE_RATE_CONTROL_DEFAULT_KHR";
- case VK_VIDEO_ENCODE_RATE_CONTROL_RESERVED_0_BIT_KHR:
- return "VK_VIDEO_ENCODE_RATE_CONTROL_RESERVED_0_BIT_KHR";
+ case VK_VIDEO_ENCODE_USAGE_CONFERENCING_BIT_KHR:
+ return "VK_VIDEO_ENCODE_USAGE_CONFERENCING_BIT_KHR";
+ case VK_VIDEO_ENCODE_USAGE_DEFAULT_KHR:
+ return "VK_VIDEO_ENCODE_USAGE_DEFAULT_KHR";
+ case VK_VIDEO_ENCODE_USAGE_RECORDING_BIT_KHR:
+ return "VK_VIDEO_ENCODE_USAGE_RECORDING_BIT_KHR";
+ case VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR:
+ return "VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR";
+ case VK_VIDEO_ENCODE_USAGE_TRANSCODING_BIT_KHR:
+ return "VK_VIDEO_ENCODE_USAGE_TRANSCODING_BIT_KHR";
default:
- return "Unhandled VkVideoEncodeRateControlFlagBitsKHR";
+ return "Unhandled VkVideoEncodeUsageFlagBitsKHR";
}
}
-static inline std::string string_VkVideoEncodeRateControlFlagsKHR(VkVideoEncodeRateControlFlagsKHR input_value)
+static inline std::string string_VkVideoEncodeUsageFlagsKHR(VkVideoEncodeUsageFlagsKHR input_value)
{
std::string ret;
int index = 0;
while(input_value) {
if (input_value & 1) {
if( !ret.empty()) ret.append("|");
- ret.append(string_VkVideoEncodeRateControlFlagBitsKHR(static_cast<VkVideoEncodeRateControlFlagBitsKHR>(1U << index)));
+ ret.append(string_VkVideoEncodeUsageFlagBitsKHR(static_cast<VkVideoEncodeUsageFlagBitsKHR>(1U << index)));
}
++index;
input_value >>= 1;
}
- if( ret.empty()) ret.append(string_VkVideoEncodeRateControlFlagBitsKHR(static_cast<VkVideoEncodeRateControlFlagBitsKHR>(0)));
+ if( ret.empty()) ret.append(string_VkVideoEncodeUsageFlagBitsKHR(static_cast<VkVideoEncodeUsageFlagBitsKHR>(0)));
return ret;
}
#endif // VK_ENABLE_BETA_EXTENSIONS
@@ -7120,44 +7587,71 @@ static inline std::string string_VkVideoEncodeRateControlFlagsKHR(VkVideoEncodeR
#ifdef VK_ENABLE_BETA_EXTENSIONS
-static inline const char* string_VkVideoEncodeRateControlModeFlagBitsKHR(VkVideoEncodeRateControlModeFlagBitsKHR input_value)
+static inline const char* string_VkVideoEncodeContentFlagBitsKHR(VkVideoEncodeContentFlagBitsKHR input_value)
{
switch (input_value)
{
- case VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR:
- return "VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR";
- case VK_VIDEO_ENCODE_RATE_CONTROL_MODE_NONE_BIT_KHR:
- return "VK_VIDEO_ENCODE_RATE_CONTROL_MODE_NONE_BIT_KHR";
- case VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR:
- return "VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR";
+ case VK_VIDEO_ENCODE_CONTENT_CAMERA_BIT_KHR:
+ return "VK_VIDEO_ENCODE_CONTENT_CAMERA_BIT_KHR";
+ case VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR:
+ return "VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR";
+ case VK_VIDEO_ENCODE_CONTENT_DESKTOP_BIT_KHR:
+ return "VK_VIDEO_ENCODE_CONTENT_DESKTOP_BIT_KHR";
+ case VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR:
+ return "VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR";
default:
- return "Unhandled VkVideoEncodeRateControlModeFlagBitsKHR";
+ return "Unhandled VkVideoEncodeContentFlagBitsKHR";
}
}
-static inline std::string string_VkVideoEncodeRateControlModeFlagsKHR(VkVideoEncodeRateControlModeFlagsKHR input_value)
+static inline std::string string_VkVideoEncodeContentFlagsKHR(VkVideoEncodeContentFlagsKHR input_value)
{
std::string ret;
int index = 0;
while(input_value) {
if (input_value & 1) {
if( !ret.empty()) ret.append("|");
- ret.append(string_VkVideoEncodeRateControlModeFlagBitsKHR(static_cast<VkVideoEncodeRateControlModeFlagBitsKHR>(1U << index)));
+ ret.append(string_VkVideoEncodeContentFlagBitsKHR(static_cast<VkVideoEncodeContentFlagBitsKHR>(1U << index)));
}
++index;
input_value >>= 1;
}
- if( ret.empty()) ret.append(string_VkVideoEncodeRateControlModeFlagBitsKHR(static_cast<VkVideoEncodeRateControlModeFlagBitsKHR>(0)));
+ if( ret.empty()) ret.append(string_VkVideoEncodeContentFlagBitsKHR(static_cast<VkVideoEncodeContentFlagBitsKHR>(0)));
return ret;
}
#endif // VK_ENABLE_BETA_EXTENSIONS
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+
+static inline const char* string_VkVideoEncodeTuningModeKHR(VkVideoEncodeTuningModeKHR input_value)
+{
+ switch (input_value)
+ {
+ case VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR:
+ return "VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR";
+ case VK_VIDEO_ENCODE_TUNING_MODE_HIGH_QUALITY_KHR:
+ return "VK_VIDEO_ENCODE_TUNING_MODE_HIGH_QUALITY_KHR";
+ case VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR:
+ return "VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR";
+ case VK_VIDEO_ENCODE_TUNING_MODE_LOW_LATENCY_KHR:
+ return "VK_VIDEO_ENCODE_TUNING_MODE_LOW_LATENCY_KHR";
+ case VK_VIDEO_ENCODE_TUNING_MODE_ULTRA_LOW_LATENCY_KHR:
+ return "VK_VIDEO_ENCODE_TUNING_MODE_ULTRA_LOW_LATENCY_KHR";
+ default:
+ return "Unhandled VkVideoEncodeTuningModeKHR";
+ }
+}
+#endif // VK_ENABLE_BETA_EXTENSIONS
+
static inline const char* string_VkPipelineStageFlagBits2KHR(uint64_t input_value)
{
switch (input_value)
{
case VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR:
return "VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR";
+ case VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR:
+ return "VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR";
case VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT:
return "VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT";
case VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT:
@@ -7200,10 +7694,14 @@ static inline const char* string_VkPipelineStageFlagBits2KHR(uint64_t input_valu
return "VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI";
case VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT:
return "VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT";
- case VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV:
- return "VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV";
+ case VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT:
+ return "VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT";
+ case VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT:
+ return "VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT";
case VK_PIPELINE_STAGE_2_NONE:
return "VK_PIPELINE_STAGE_2_NONE";
+ case VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV:
+ return "VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV";
case VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT:
return "VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT";
case VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR:
@@ -7212,8 +7710,8 @@ static inline const char* string_VkPipelineStageFlagBits2KHR(uint64_t input_valu
return "VK_PIPELINE_STAGE_2_RESOLVE_BIT";
case VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI:
return "VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI";
- case VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV:
- return "VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV";
+ case VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT:
+ return "VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT";
case VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT:
return "VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT";
case VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT:
@@ -7301,8 +7799,18 @@ static inline const char* string_VkAccessFlagBits2KHR(uint64_t input_value)
return "VK_ACCESS_2_MEMORY_READ_BIT";
case VK_ACCESS_2_MEMORY_WRITE_BIT:
return "VK_ACCESS_2_MEMORY_WRITE_BIT";
+ case VK_ACCESS_2_MICROMAP_READ_BIT_EXT:
+ return "VK_ACCESS_2_MICROMAP_READ_BIT_EXT";
+ case VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT:
+ return "VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT";
case VK_ACCESS_2_NONE:
return "VK_ACCESS_2_NONE";
+ case VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV:
+ return "VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV";
+ case VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV:
+ return "VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV";
+ case VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR:
+ return "VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR";
case VK_ACCESS_2_SHADER_READ_BIT:
return "VK_ACCESS_2_SHADER_READ_BIT";
case VK_ACCESS_2_SHADER_SAMPLED_READ_BIT:
@@ -7401,6 +7909,10 @@ static inline const char* string_VkFormatFeatureFlagBits2KHR(uint64_t input_valu
return "VK_FORMAT_FEATURE_2_BLIT_DST_BIT";
case VK_FORMAT_FEATURE_2_BLIT_SRC_BIT:
return "VK_FORMAT_FEATURE_2_BLIT_SRC_BIT";
+ case VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM:
+ return "VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM";
+ case VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM:
+ return "VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM";
case VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT:
return "VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT";
case VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT:
@@ -7419,6 +7931,12 @@ static inline const char* string_VkFormatFeatureFlagBits2KHR(uint64_t input_valu
return "VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV";
case VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT:
return "VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT";
+ case VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV:
+ return "VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV";
+ case VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV:
+ return "VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV";
+ case VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV:
+ return "VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV";
case VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT:
return "VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT";
case VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT:
@@ -7473,6 +7991,10 @@ static inline const char* string_VkFormatFeatureFlagBits2KHR(uint64_t input_valu
case VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR:
return "VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR";
#endif // VK_ENABLE_BETA_EXTENSIONS
+ case VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM:
+ return "VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM";
+ case VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM:
+ return "VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM";
default:
return "Unhandled VkFormatFeatureFlagBits2KHR";
}
@@ -7636,6 +8158,8 @@ static inline const char* string_VkVideoEncodeH264CapabilityFlagBitsEXT(VkVideoE
{
switch (input_value)
{
+ case VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_EXT";
case VK_VIDEO_ENCODE_H264_CAPABILITY_CABAC_BIT_EXT:
return "VK_VIDEO_ENCODE_H264_CAPABILITY_CABAC_BIT_EXT";
case VK_VIDEO_ENCODE_H264_CAPABILITY_CAVLC_BIT_EXT:
@@ -7648,16 +8172,42 @@ static inline const char* string_VkVideoEncodeH264CapabilityFlagBitsEXT(VkVideoE
return "VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_ENABLED_BIT_EXT";
case VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_PARTIAL_BIT_EXT:
return "VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_PARTIAL_BIT_EXT";
- case VK_VIDEO_ENCODE_H264_CAPABILITY_EVENLY_DISTRIBUTED_SLICE_SIZE_BIT_EXT:
- return "VK_VIDEO_ENCODE_H264_CAPABILITY_EVENLY_DISTRIBUTED_SLICE_SIZE_BIT_EXT";
+ case VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_EXT";
+ case VK_VIDEO_ENCODE_H264_CAPABILITY_DIRECT_8X8_INFERENCE_DISABLED_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H264_CAPABILITY_DIRECT_8X8_INFERENCE_DISABLED_BIT_EXT";
+ case VK_VIDEO_ENCODE_H264_CAPABILITY_DIRECT_8X8_INFERENCE_ENABLED_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H264_CAPABILITY_DIRECT_8X8_INFERENCE_ENABLED_BIT_EXT";
+ case VK_VIDEO_ENCODE_H264_CAPABILITY_DISABLE_DIRECT_SPATIAL_MV_PRED_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H264_CAPABILITY_DISABLE_DIRECT_SPATIAL_MV_PRED_BIT_EXT";
+ case VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_EXT";
case VK_VIDEO_ENCODE_H264_CAPABILITY_MULTIPLE_SLICE_PER_FRAME_BIT_EXT:
return "VK_VIDEO_ENCODE_H264_CAPABILITY_MULTIPLE_SLICE_PER_FRAME_BIT_EXT";
+ case VK_VIDEO_ENCODE_H264_CAPABILITY_PIC_INIT_QP_MINUS26_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H264_CAPABILITY_PIC_INIT_QP_MINUS26_BIT_EXT";
+ case VK_VIDEO_ENCODE_H264_CAPABILITY_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H264_CAPABILITY_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_BIT_EXT";
+ case VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_EXT";
+ case VK_VIDEO_ENCODE_H264_CAPABILITY_SCALING_LISTS_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H264_CAPABILITY_SCALING_LISTS_BIT_EXT";
case VK_VIDEO_ENCODE_H264_CAPABILITY_SECOND_CHROMA_QP_OFFSET_BIT_EXT:
return "VK_VIDEO_ENCODE_H264_CAPABILITY_SECOND_CHROMA_QP_OFFSET_BIT_EXT";
+ case VK_VIDEO_ENCODE_H264_CAPABILITY_SEPARATE_COLOUR_PLANE_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H264_CAPABILITY_SEPARATE_COLOUR_PLANE_BIT_EXT";
+ case VK_VIDEO_ENCODE_H264_CAPABILITY_SLICE_MB_COUNT_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H264_CAPABILITY_SLICE_MB_COUNT_BIT_EXT";
case VK_VIDEO_ENCODE_H264_CAPABILITY_TRANSFORM_8X8_BIT_EXT:
return "VK_VIDEO_ENCODE_H264_CAPABILITY_TRANSFORM_8X8_BIT_EXT";
- case VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BI_PRED_IMPLICIT_BIT_EXT:
- return "VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BI_PRED_IMPLICIT_BIT_EXT";
+ case VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BIPRED_EXPLICIT_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BIPRED_EXPLICIT_BIT_EXT";
+ case VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BIPRED_IMPLICIT_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BIPRED_IMPLICIT_BIT_EXT";
+ case VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_PRED_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_PRED_BIT_EXT";
+ case VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_PRED_NO_TABLE_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_PRED_NO_TABLE_BIT_EXT";
default:
return "Unhandled VkVideoEncodeH264CapabilityFlagBitsEXT";
}
@@ -7753,67 +8303,99 @@ static inline std::string string_VkVideoEncodeH264OutputModeFlagsEXT(VkVideoEnco
#ifdef VK_ENABLE_BETA_EXTENSIONS
-static inline const char* string_VkVideoEncodeH264CreateFlagBitsEXT(VkVideoEncodeH264CreateFlagBitsEXT input_value)
+static inline const char* string_VkVideoEncodeH264RateControlStructureEXT(VkVideoEncodeH264RateControlStructureEXT input_value)
{
switch (input_value)
{
- case VK_VIDEO_ENCODE_H264_CREATE_DEFAULT_EXT:
- return "VK_VIDEO_ENCODE_H264_CREATE_DEFAULT_EXT";
- case VK_VIDEO_ENCODE_H264_CREATE_RESERVED_0_BIT_EXT:
- return "VK_VIDEO_ENCODE_H264_CREATE_RESERVED_0_BIT_EXT";
+ case VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_DYADIC_EXT:
+ return "VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_DYADIC_EXT";
+ case VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_FLAT_EXT:
+ return "VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_FLAT_EXT";
+ case VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT:
+ return "VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT";
default:
- return "Unhandled VkVideoEncodeH264CreateFlagBitsEXT";
- }
-}
-
-static inline std::string string_VkVideoEncodeH264CreateFlagsEXT(VkVideoEncodeH264CreateFlagsEXT input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkVideoEncodeH264CreateFlagBitsEXT(static_cast<VkVideoEncodeH264CreateFlagBitsEXT>(1U << index)));
- }
- ++index;
- input_value >>= 1;
+ return "Unhandled VkVideoEncodeH264RateControlStructureEXT";
}
- if( ret.empty()) ret.append(string_VkVideoEncodeH264CreateFlagBitsEXT(static_cast<VkVideoEncodeH264CreateFlagBitsEXT>(0)));
- return ret;
}
#endif // VK_ENABLE_BETA_EXTENSIONS
#ifdef VK_ENABLE_BETA_EXTENSIONS
-static inline const char* string_VkVideoEncodeH264RateControlStructureFlagBitsEXT(VkVideoEncodeH264RateControlStructureFlagBitsEXT input_value)
+static inline const char* string_VkVideoEncodeH265CapabilityFlagBitsEXT(VkVideoEncodeH265CapabilityFlagBitsEXT input_value)
{
switch (input_value)
{
- case VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_DYADIC_BIT_EXT:
- return "VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_DYADIC_BIT_EXT";
- case VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_FLAT_BIT_EXT:
- return "VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_FLAT_BIT_EXT";
- case VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT:
- return "VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_DEBLOCKING_FILTER_OVERRIDE_ENABLED_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_DEBLOCKING_FILTER_OVERRIDE_ENABLED_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_DEPENDENT_SLICE_SEGMENT_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_DEPENDENT_SLICE_SEGMENT_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_ENTROPY_CODING_SYNC_ENABLED_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_ENTROPY_CODING_SYNC_ENABLED_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_INIT_QP_MINUS26_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_INIT_QP_MINUS26_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_PER_TILE_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_PER_TILE_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILE_PER_FRAME_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILE_PER_FRAME_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILE_PER_SLICE_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILE_PER_SLICE_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_PCM_ENABLE_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_PCM_ENABLE_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_SAMPLE_ADAPTIVE_OFFSET_ENABLED_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_SAMPLE_ADAPTIVE_OFFSET_ENABLED_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_SCALING_LISTS_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_SCALING_LISTS_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_SEPARATE_COLOUR_PLANE_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_SEPARATE_COLOUR_PLANE_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_SIGN_DATA_HIDING_ENABLED_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_SIGN_DATA_HIDING_ENABLED_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_SLICE_SEGMENT_CTB_COUNT_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_SLICE_SEGMENT_CTB_COUNT_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_SPS_TEMPORAL_MVP_ENABLED_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_SPS_TEMPORAL_MVP_ENABLED_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSFORM_SKIP_DISABLED_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSFORM_SKIP_DISABLED_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSFORM_SKIP_ENABLED_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSFORM_SKIP_ENABLED_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSQUANT_BYPASS_ENABLED_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSQUANT_BYPASS_ENABLED_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_BIPRED_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_BIPRED_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_PRED_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_PRED_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_PRED_NO_TABLE_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_PRED_NO_TABLE_BIT_EXT";
default:
- return "Unhandled VkVideoEncodeH264RateControlStructureFlagBitsEXT";
+ return "Unhandled VkVideoEncodeH265CapabilityFlagBitsEXT";
}
}
-static inline std::string string_VkVideoEncodeH264RateControlStructureFlagsEXT(VkVideoEncodeH264RateControlStructureFlagsEXT input_value)
+static inline std::string string_VkVideoEncodeH265CapabilityFlagsEXT(VkVideoEncodeH265CapabilityFlagsEXT input_value)
{
std::string ret;
int index = 0;
while(input_value) {
if (input_value & 1) {
if( !ret.empty()) ret.append("|");
- ret.append(string_VkVideoEncodeH264RateControlStructureFlagBitsEXT(static_cast<VkVideoEncodeH264RateControlStructureFlagBitsEXT>(1U << index)));
+ ret.append(string_VkVideoEncodeH265CapabilityFlagBitsEXT(static_cast<VkVideoEncodeH265CapabilityFlagBitsEXT>(1U << index)));
}
++index;
input_value >>= 1;
}
- if( ret.empty()) ret.append(string_VkVideoEncodeH264RateControlStructureFlagBitsEXT(static_cast<VkVideoEncodeH264RateControlStructureFlagBitsEXT>(0)));
+ if( ret.empty()) ret.append(string_VkVideoEncodeH265CapabilityFlagBitsEXT(static_cast<VkVideoEncodeH265CapabilityFlagBitsEXT>(0)));
return ret;
}
#endif // VK_ENABLE_BETA_EXTENSIONS
@@ -7829,8 +8411,8 @@ static inline const char* string_VkVideoEncodeH265InputModeFlagBitsEXT(VkVideoEn
return "VK_VIDEO_ENCODE_H265_INPUT_MODE_FRAME_BIT_EXT";
case VK_VIDEO_ENCODE_H265_INPUT_MODE_NON_VCL_BIT_EXT:
return "VK_VIDEO_ENCODE_H265_INPUT_MODE_NON_VCL_BIT_EXT";
- case VK_VIDEO_ENCODE_H265_INPUT_MODE_SLICE_BIT_EXT:
- return "VK_VIDEO_ENCODE_H265_INPUT_MODE_SLICE_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_INPUT_MODE_SLICE_SEGMENT_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_INPUT_MODE_SLICE_SEGMENT_BIT_EXT";
default:
return "Unhandled VkVideoEncodeH265InputModeFlagBitsEXT";
}
@@ -7864,8 +8446,8 @@ static inline const char* string_VkVideoEncodeH265OutputModeFlagBitsEXT(VkVideoE
return "VK_VIDEO_ENCODE_H265_OUTPUT_MODE_FRAME_BIT_EXT";
case VK_VIDEO_ENCODE_H265_OUTPUT_MODE_NON_VCL_BIT_EXT:
return "VK_VIDEO_ENCODE_H265_OUTPUT_MODE_NON_VCL_BIT_EXT";
- case VK_VIDEO_ENCODE_H265_OUTPUT_MODE_SLICE_BIT_EXT:
- return "VK_VIDEO_ENCODE_H265_OUTPUT_MODE_SLICE_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_OUTPUT_MODE_SLICE_SEGMENT_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_OUTPUT_MODE_SLICE_SEGMENT_BIT_EXT";
default:
return "Unhandled VkVideoEncodeH265OutputModeFlagBitsEXT";
}
@@ -7901,8 +8483,6 @@ static inline const char* string_VkVideoEncodeH265CtbSizeFlagBitsEXT(VkVideoEnco
return "VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_EXT";
case VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_EXT:
return "VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_EXT";
- case VK_VIDEO_ENCODE_H265_CTB_SIZE_8_BIT_EXT:
- return "VK_VIDEO_ENCODE_H265_CTB_SIZE_8_BIT_EXT";
default:
return "Unhandled VkVideoEncodeH265CtbSizeFlagBitsEXT";
}
@@ -7928,34 +8508,36 @@ static inline std::string string_VkVideoEncodeH265CtbSizeFlagsEXT(VkVideoEncodeH
#ifdef VK_ENABLE_BETA_EXTENSIONS
-static inline const char* string_VkVideoEncodeH265RateControlStructureFlagBitsEXT(VkVideoEncodeH265RateControlStructureFlagBitsEXT input_value)
+static inline const char* string_VkVideoEncodeH265TransformBlockSizeFlagBitsEXT(VkVideoEncodeH265TransformBlockSizeFlagBitsEXT input_value)
{
switch (input_value)
{
- case VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_DYADIC_BIT_EXT:
- return "VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_DYADIC_BIT_EXT";
- case VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_FLAT_BIT_EXT:
- return "VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_FLAT_BIT_EXT";
- case VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT:
- return "VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT";
+ case VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_EXT";
+ case VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_EXT:
+ return "VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_EXT";
default:
- return "Unhandled VkVideoEncodeH265RateControlStructureFlagBitsEXT";
+ return "Unhandled VkVideoEncodeH265TransformBlockSizeFlagBitsEXT";
}
}
-static inline std::string string_VkVideoEncodeH265RateControlStructureFlagsEXT(VkVideoEncodeH265RateControlStructureFlagsEXT input_value)
+static inline std::string string_VkVideoEncodeH265TransformBlockSizeFlagsEXT(VkVideoEncodeH265TransformBlockSizeFlagsEXT input_value)
{
std::string ret;
int index = 0;
while(input_value) {
if (input_value & 1) {
if( !ret.empty()) ret.append("|");
- ret.append(string_VkVideoEncodeH265RateControlStructureFlagBitsEXT(static_cast<VkVideoEncodeH265RateControlStructureFlagBitsEXT>(1U << index)));
+ ret.append(string_VkVideoEncodeH265TransformBlockSizeFlagBitsEXT(static_cast<VkVideoEncodeH265TransformBlockSizeFlagBitsEXT>(1U << index)));
}
++index;
input_value >>= 1;
}
- if( ret.empty()) ret.append(string_VkVideoEncodeH265RateControlStructureFlagBitsEXT(static_cast<VkVideoEncodeH265RateControlStructureFlagBitsEXT>(0)));
+ if( ret.empty()) ret.append(string_VkVideoEncodeH265TransformBlockSizeFlagBitsEXT(static_cast<VkVideoEncodeH265TransformBlockSizeFlagBitsEXT>(0)));
return ret;
}
#endif // VK_ENABLE_BETA_EXTENSIONS
@@ -7963,6 +8545,25 @@ static inline std::string string_VkVideoEncodeH265RateControlStructureFlagsEXT(V
#ifdef VK_ENABLE_BETA_EXTENSIONS
+static inline const char* string_VkVideoEncodeH265RateControlStructureEXT(VkVideoEncodeH265RateControlStructureEXT input_value)
+{
+ switch (input_value)
+ {
+ case VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_DYADIC_EXT:
+ return "VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_DYADIC_EXT";
+ case VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_FLAT_EXT:
+ return "VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_FLAT_EXT";
+ case VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT:
+ return "VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT";
+ default:
+ return "Unhandled VkVideoEncodeH265RateControlStructureEXT";
+ }
+}
+#endif // VK_ENABLE_BETA_EXTENSIONS
+
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+
static inline const char* string_VkVideoDecodeH264PictureLayoutFlagBitsEXT(VkVideoDecodeH264PictureLayoutFlagBitsEXT input_value)
{
switch (input_value)
@@ -8087,6 +8688,40 @@ static inline const char* string_VkValidationCheckEXT(VkValidationCheckEXT input
}
}
+static inline const char* string_VkPipelineRobustnessBufferBehaviorEXT(VkPipelineRobustnessBufferBehaviorEXT input_value)
+{
+ switch (input_value)
+ {
+ case VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT_EXT:
+ return "VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT_EXT";
+ case VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT:
+ return "VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT";
+ case VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT:
+ return "VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT";
+ case VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT:
+ return "VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT";
+ default:
+ return "Unhandled VkPipelineRobustnessBufferBehaviorEXT";
+ }
+}
+
+static inline const char* string_VkPipelineRobustnessImageBehaviorEXT(VkPipelineRobustnessImageBehaviorEXT input_value)
+{
+ switch (input_value)
+ {
+ case VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT_EXT:
+ return "VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT_EXT";
+ case VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT:
+ return "VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT";
+ case VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2_EXT:
+ return "VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2_EXT";
+ case VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_EXT:
+ return "VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_EXT";
+ default:
+ return "Unhandled VkPipelineRobustnessImageBehaviorEXT";
+ }
+}
+
static inline const char* string_VkConditionalRenderingFlagBitsEXT(VkConditionalRenderingFlagBitsEXT input_value)
{
switch (input_value)
@@ -8268,6 +8903,8 @@ static inline const char* string_VkDebugUtilsMessageTypeFlagBitsEXT(VkDebugUtils
{
switch (input_value)
{
+ case VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT:
+ return "VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT";
case VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT:
return "VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT";
case VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT:
@@ -8588,8 +9225,12 @@ static inline const char* string_VkGeometryInstanceFlagBitsKHR(VkGeometryInstanc
{
switch (input_value)
{
+ case VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT:
+ return "VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT";
case VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR:
return "VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR";
+ case VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT:
+ return "VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT";
case VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR:
return "VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR";
case VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR:
@@ -8621,8 +9262,12 @@ static inline const char* string_VkGeometryInstanceFlagBitsNV(VkGeometryInstance
{
switch (input_value)
{
+ case VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT:
+ return "VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT";
case VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR:
return "VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR";
+ case VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT:
+ return "VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT";
case VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR:
return "VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR";
case VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR:
@@ -8656,6 +9301,12 @@ static inline const char* string_VkBuildAccelerationStructureFlagBitsKHR(VkBuild
{
case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR:
return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR";
+ case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT:
+ return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT";
+ case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT:
+ return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT";
+ case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT:
+ return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT";
case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR:
return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR";
case VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR:
@@ -8693,6 +9344,12 @@ static inline const char* string_VkBuildAccelerationStructureFlagBitsNV(VkBuildA
{
case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR:
return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR";
+ case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT:
+ return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT";
+ case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT:
+ return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT";
+ case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT:
+ return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT";
case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR:
return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR";
case VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR:
@@ -9148,6 +9805,8 @@ static inline const char* string_VkIndirectCommandsTokenTypeNV(VkIndirectCommand
{
case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV:
return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV";
+ case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV:
+ return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV";
case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV:
return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV";
case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV:
@@ -9227,6 +9886,8 @@ static inline const char* string_VkDeviceDiagnosticsConfigFlagBitsNV(VkDeviceDia
return "VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV";
case VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV:
return "VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV";
+ case VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_ERROR_REPORTING_BIT_NV:
+ return "VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_ERROR_REPORTING_BIT_NV";
default:
return "Unhandled VkDeviceDiagnosticsConfigFlagBitsNV";
}
@@ -9248,6 +9909,80 @@ static inline std::string string_VkDeviceDiagnosticsConfigFlagsNV(VkDeviceDiagno
return ret;
}
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+
+static inline const char* string_VkExportMetalObjectTypeFlagBitsEXT(VkExportMetalObjectTypeFlagBitsEXT input_value)
+{
+ switch (input_value)
+ {
+ case VK_EXPORT_METAL_OBJECT_TYPE_METAL_BUFFER_BIT_EXT:
+ return "VK_EXPORT_METAL_OBJECT_TYPE_METAL_BUFFER_BIT_EXT";
+ case VK_EXPORT_METAL_OBJECT_TYPE_METAL_COMMAND_QUEUE_BIT_EXT:
+ return "VK_EXPORT_METAL_OBJECT_TYPE_METAL_COMMAND_QUEUE_BIT_EXT";
+ case VK_EXPORT_METAL_OBJECT_TYPE_METAL_DEVICE_BIT_EXT:
+ return "VK_EXPORT_METAL_OBJECT_TYPE_METAL_DEVICE_BIT_EXT";
+ case VK_EXPORT_METAL_OBJECT_TYPE_METAL_IOSURFACE_BIT_EXT:
+ return "VK_EXPORT_METAL_OBJECT_TYPE_METAL_IOSURFACE_BIT_EXT";
+ case VK_EXPORT_METAL_OBJECT_TYPE_METAL_SHARED_EVENT_BIT_EXT:
+ return "VK_EXPORT_METAL_OBJECT_TYPE_METAL_SHARED_EVENT_BIT_EXT";
+ case VK_EXPORT_METAL_OBJECT_TYPE_METAL_TEXTURE_BIT_EXT:
+ return "VK_EXPORT_METAL_OBJECT_TYPE_METAL_TEXTURE_BIT_EXT";
+ default:
+ return "Unhandled VkExportMetalObjectTypeFlagBitsEXT";
+ }
+}
+
+static inline std::string string_VkExportMetalObjectTypeFlagsEXT(VkExportMetalObjectTypeFlagsEXT input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkExportMetalObjectTypeFlagBitsEXT(static_cast<VkExportMetalObjectTypeFlagBitsEXT>(1U << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkExportMetalObjectTypeFlagBitsEXT(static_cast<VkExportMetalObjectTypeFlagBitsEXT>(0)));
+ return ret;
+}
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+static inline const char* string_VkGraphicsPipelineLibraryFlagBitsEXT(VkGraphicsPipelineLibraryFlagBitsEXT input_value)
+{
+ switch (input_value)
+ {
+ case VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT:
+ return "VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT";
+ case VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT:
+ return "VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT";
+ case VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT:
+ return "VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT";
+ case VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT:
+ return "VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT";
+ default:
+ return "Unhandled VkGraphicsPipelineLibraryFlagBitsEXT";
+ }
+}
+
+static inline std::string string_VkGraphicsPipelineLibraryFlagsEXT(VkGraphicsPipelineLibraryFlagsEXT input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkGraphicsPipelineLibraryFlagBitsEXT(static_cast<VkGraphicsPipelineLibraryFlagBitsEXT>(1U << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkGraphicsPipelineLibraryFlagBitsEXT(static_cast<VkGraphicsPipelineLibraryFlagBitsEXT>(0)));
+ return ret;
+}
+
static inline const char* string_VkFragmentShadingRateTypeNV(VkFragmentShadingRateTypeNV input_value)
{
switch (input_value)
@@ -9309,6 +10044,188 @@ static inline const char* string_VkAccelerationStructureMotionInstanceTypeNV(VkA
}
}
+static inline const char* string_VkImageCompressionFlagBitsEXT(VkImageCompressionFlagBitsEXT input_value)
+{
+ switch (input_value)
+ {
+ case VK_IMAGE_COMPRESSION_DEFAULT_EXT:
+ return "VK_IMAGE_COMPRESSION_DEFAULT_EXT";
+ case VK_IMAGE_COMPRESSION_DISABLED_EXT:
+ return "VK_IMAGE_COMPRESSION_DISABLED_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_EXPLICIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_EXPLICIT_EXT";
+ default:
+ return "Unhandled VkImageCompressionFlagBitsEXT";
+ }
+}
+
+static inline std::string string_VkImageCompressionFlagsEXT(VkImageCompressionFlagsEXT input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkImageCompressionFlagBitsEXT(static_cast<VkImageCompressionFlagBitsEXT>(1U << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkImageCompressionFlagBitsEXT(static_cast<VkImageCompressionFlagBitsEXT>(0)));
+ return ret;
+}
+
+static inline const char* string_VkImageCompressionFixedRateFlagBitsEXT(VkImageCompressionFixedRateFlagBitsEXT input_value)
+{
+ switch (input_value)
+ {
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_13BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_13BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_14BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_14BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_15BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_15BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_16BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_16BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_17BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_17BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_18BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_18BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_19BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_19BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_20BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_20BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_21BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_21BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_22BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_22BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_23BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_23BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT";
+ case VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT:
+ return "VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT";
+ default:
+ return "Unhandled VkImageCompressionFixedRateFlagBitsEXT";
+ }
+}
+
+static inline std::string string_VkImageCompressionFixedRateFlagsEXT(VkImageCompressionFixedRateFlagsEXT input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkImageCompressionFixedRateFlagBitsEXT(static_cast<VkImageCompressionFixedRateFlagBitsEXT>(1U << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkImageCompressionFixedRateFlagBitsEXT(static_cast<VkImageCompressionFixedRateFlagBitsEXT>(0)));
+ return ret;
+}
+
+static inline const char* string_VkDeviceFaultAddressTypeEXT(VkDeviceFaultAddressTypeEXT input_value)
+{
+ switch (input_value)
+ {
+ case VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT:
+ return "VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT";
+ case VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT:
+ return "VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT";
+ case VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT:
+ return "VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT";
+ case VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT:
+ return "VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT";
+ case VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT:
+ return "VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT";
+ case VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT:
+ return "VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT";
+ case VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT:
+ return "VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT";
+ default:
+ return "Unhandled VkDeviceFaultAddressTypeEXT";
+ }
+}
+
+static inline const char* string_VkDeviceFaultVendorBinaryHeaderVersionEXT(VkDeviceFaultVendorBinaryHeaderVersionEXT input_value)
+{
+ switch (input_value)
+ {
+ case VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT:
+ return "VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT";
+ default:
+ return "Unhandled VkDeviceFaultVendorBinaryHeaderVersionEXT";
+ }
+}
+
+static inline const char* string_VkDeviceAddressBindingFlagBitsEXT(VkDeviceAddressBindingFlagBitsEXT input_value)
+{
+ switch (input_value)
+ {
+ case VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT:
+ return "VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT";
+ default:
+ return "Unhandled VkDeviceAddressBindingFlagBitsEXT";
+ }
+}
+
+static inline std::string string_VkDeviceAddressBindingFlagsEXT(VkDeviceAddressBindingFlagsEXT input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkDeviceAddressBindingFlagBitsEXT(static_cast<VkDeviceAddressBindingFlagBitsEXT>(1U << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkDeviceAddressBindingFlagBitsEXT(static_cast<VkDeviceAddressBindingFlagBitsEXT>(0)));
+ return ret;
+}
+
+static inline const char* string_VkDeviceAddressBindingTypeEXT(VkDeviceAddressBindingTypeEXT input_value)
+{
+ switch (input_value)
+ {
+ case VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT:
+ return "VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT";
+ case VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT:
+ return "VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT";
+ default:
+ return "Unhandled VkDeviceAddressBindingTypeEXT";
+ }
+}
+
#ifdef VK_USE_PLATFORM_FUCHSIA
@@ -9348,16 +10265,143 @@ static inline std::string string_VkImageConstraintsInfoFlagsFUCHSIA(VkImageConst
}
#endif // VK_USE_PLATFORM_FUCHSIA
-static inline const char* string_VkBuildAccelerationStructureModeKHR(VkBuildAccelerationStructureModeKHR input_value)
+static inline const char* string_VkMicromapTypeEXT(VkMicromapTypeEXT input_value)
{
switch (input_value)
{
- case VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR:
- return "VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR";
- case VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR:
- return "VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR";
+ case VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT:
+ return "VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT";
default:
- return "Unhandled VkBuildAccelerationStructureModeKHR";
+ return "Unhandled VkMicromapTypeEXT";
+ }
+}
+
+static inline const char* string_VkBuildMicromapFlagBitsEXT(VkBuildMicromapFlagBitsEXT input_value)
+{
+ switch (input_value)
+ {
+ case VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT:
+ return "VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT";
+ case VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT:
+ return "VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT";
+ case VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT:
+ return "VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT";
+ default:
+ return "Unhandled VkBuildMicromapFlagBitsEXT";
+ }
+}
+
+static inline std::string string_VkBuildMicromapFlagsEXT(VkBuildMicromapFlagsEXT input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkBuildMicromapFlagBitsEXT(static_cast<VkBuildMicromapFlagBitsEXT>(1U << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkBuildMicromapFlagBitsEXT(static_cast<VkBuildMicromapFlagBitsEXT>(0)));
+ return ret;
+}
+
+static inline const char* string_VkBuildMicromapModeEXT(VkBuildMicromapModeEXT input_value)
+{
+ switch (input_value)
+ {
+ case VK_BUILD_MICROMAP_MODE_BUILD_EXT:
+ return "VK_BUILD_MICROMAP_MODE_BUILD_EXT";
+ default:
+ return "Unhandled VkBuildMicromapModeEXT";
+ }
+}
+
+static inline const char* string_VkMicromapCreateFlagBitsEXT(VkMicromapCreateFlagBitsEXT input_value)
+{
+ switch (input_value)
+ {
+ case VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT:
+ return "VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT";
+ default:
+ return "Unhandled VkMicromapCreateFlagBitsEXT";
+ }
+}
+
+static inline std::string string_VkMicromapCreateFlagsEXT(VkMicromapCreateFlagsEXT input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkMicromapCreateFlagBitsEXT(static_cast<VkMicromapCreateFlagBitsEXT>(1U << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkMicromapCreateFlagBitsEXT(static_cast<VkMicromapCreateFlagBitsEXT>(0)));
+ return ret;
+}
+
+static inline const char* string_VkCopyMicromapModeEXT(VkCopyMicromapModeEXT input_value)
+{
+ switch (input_value)
+ {
+ case VK_COPY_MICROMAP_MODE_CLONE_EXT:
+ return "VK_COPY_MICROMAP_MODE_CLONE_EXT";
+ case VK_COPY_MICROMAP_MODE_COMPACT_EXT:
+ return "VK_COPY_MICROMAP_MODE_COMPACT_EXT";
+ case VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT:
+ return "VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT";
+ case VK_COPY_MICROMAP_MODE_SERIALIZE_EXT:
+ return "VK_COPY_MICROMAP_MODE_SERIALIZE_EXT";
+ default:
+ return "Unhandled VkCopyMicromapModeEXT";
+ }
+}
+
+static inline const char* string_VkOpacityMicromapFormatEXT(VkOpacityMicromapFormatEXT input_value)
+{
+ switch (input_value)
+ {
+ case VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT:
+ return "VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT";
+ case VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT:
+ return "VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT";
+ default:
+ return "Unhandled VkOpacityMicromapFormatEXT";
+ }
+}
+
+static inline const char* string_VkOpacityMicromapSpecialIndexEXT(VkOpacityMicromapSpecialIndexEXT input_value)
+{
+ switch (input_value)
+ {
+ case VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT:
+ return "VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT";
+ case VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT:
+ return "VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT";
+ case VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT:
+ return "VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT";
+ case VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT:
+ return "VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT";
+ default:
+ return "Unhandled VkOpacityMicromapSpecialIndexEXT";
+ }
+}
+
+static inline const char* string_VkAccelerationStructureCompatibilityKHR(VkAccelerationStructureCompatibilityKHR input_value)
+{
+ switch (input_value)
+ {
+ case VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR:
+ return "VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR";
+ case VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR:
+ return "VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR";
+ default:
+ return "Unhandled VkAccelerationStructureCompatibilityKHR";
}
}
@@ -9376,6 +10420,234 @@ static inline const char* string_VkAccelerationStructureBuildTypeKHR(VkAccelerat
}
}
+static inline const char* string_VkSubpassMergeStatusEXT(VkSubpassMergeStatusEXT input_value)
+{
+ switch (input_value)
+ {
+ case VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT:
+ return "VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT";
+ case VK_SUBPASS_MERGE_STATUS_MERGED_EXT:
+ return "VK_SUBPASS_MERGE_STATUS_MERGED_EXT";
+ case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT:
+ return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT";
+ case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT:
+ return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT";
+ case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT:
+ return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT";
+ case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT:
+ return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT";
+ case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT:
+ return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT";
+ case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT:
+ return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT";
+ case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT:
+ return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT";
+ case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT:
+ return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT";
+ case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT:
+ return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT";
+ case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT:
+ return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT";
+ case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT:
+ return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT";
+ case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT:
+ return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT";
+ default:
+ return "Unhandled VkSubpassMergeStatusEXT";
+ }
+}
+
+static inline const char* string_VkOpticalFlowGridSizeFlagBitsNV(VkOpticalFlowGridSizeFlagBitsNV input_value)
+{
+ switch (input_value)
+ {
+ case VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV:
+ return "VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV";
+ case VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV:
+ return "VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV";
+ case VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV:
+ return "VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV";
+ case VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV:
+ return "VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV";
+ case VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV:
+ return "VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV";
+ default:
+ return "Unhandled VkOpticalFlowGridSizeFlagBitsNV";
+ }
+}
+
+static inline std::string string_VkOpticalFlowGridSizeFlagsNV(VkOpticalFlowGridSizeFlagsNV input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkOpticalFlowGridSizeFlagBitsNV(static_cast<VkOpticalFlowGridSizeFlagBitsNV>(1U << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkOpticalFlowGridSizeFlagBitsNV(static_cast<VkOpticalFlowGridSizeFlagBitsNV>(0)));
+ return ret;
+}
+
+static inline const char* string_VkOpticalFlowUsageFlagBitsNV(VkOpticalFlowUsageFlagBitsNV input_value)
+{
+ switch (input_value)
+ {
+ case VK_OPTICAL_FLOW_USAGE_COST_BIT_NV:
+ return "VK_OPTICAL_FLOW_USAGE_COST_BIT_NV";
+ case VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV:
+ return "VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV";
+ case VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV:
+ return "VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV";
+ case VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV:
+ return "VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV";
+ case VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV:
+ return "VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV";
+ case VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV:
+ return "VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV";
+ default:
+ return "Unhandled VkOpticalFlowUsageFlagBitsNV";
+ }
+}
+
+static inline std::string string_VkOpticalFlowUsageFlagsNV(VkOpticalFlowUsageFlagsNV input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkOpticalFlowUsageFlagBitsNV(static_cast<VkOpticalFlowUsageFlagBitsNV>(1U << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkOpticalFlowUsageFlagBitsNV(static_cast<VkOpticalFlowUsageFlagBitsNV>(0)));
+ return ret;
+}
+
+static inline const char* string_VkOpticalFlowPerformanceLevelNV(VkOpticalFlowPerformanceLevelNV input_value)
+{
+ switch (input_value)
+ {
+ case VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV:
+ return "VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV";
+ case VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV:
+ return "VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV";
+ case VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV:
+ return "VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV";
+ case VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV:
+ return "VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV";
+ default:
+ return "Unhandled VkOpticalFlowPerformanceLevelNV";
+ }
+}
+
+static inline const char* string_VkOpticalFlowSessionBindingPointNV(VkOpticalFlowSessionBindingPointNV input_value)
+{
+ switch (input_value)
+ {
+ case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV:
+ return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV";
+ case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV:
+ return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV";
+ case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV:
+ return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV";
+ case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV:
+ return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV";
+ case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV:
+ return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV";
+ case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV:
+ return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV";
+ case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV:
+ return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV";
+ case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV:
+ return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV";
+ case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV:
+ return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV";
+ default:
+ return "Unhandled VkOpticalFlowSessionBindingPointNV";
+ }
+}
+
+static inline const char* string_VkOpticalFlowSessionCreateFlagBitsNV(VkOpticalFlowSessionCreateFlagBitsNV input_value)
+{
+ switch (input_value)
+ {
+ case VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV:
+ return "VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV";
+ case VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV:
+ return "VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV";
+ case VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV:
+ return "VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV";
+ case VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV:
+ return "VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV";
+ case VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV:
+ return "VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV";
+ default:
+ return "Unhandled VkOpticalFlowSessionCreateFlagBitsNV";
+ }
+}
+
+static inline std::string string_VkOpticalFlowSessionCreateFlagsNV(VkOpticalFlowSessionCreateFlagsNV input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkOpticalFlowSessionCreateFlagBitsNV(static_cast<VkOpticalFlowSessionCreateFlagBitsNV>(1U << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkOpticalFlowSessionCreateFlagBitsNV(static_cast<VkOpticalFlowSessionCreateFlagBitsNV>(0)));
+ return ret;
+}
+
+static inline const char* string_VkOpticalFlowExecuteFlagBitsNV(VkOpticalFlowExecuteFlagBitsNV input_value)
+{
+ switch (input_value)
+ {
+ case VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV:
+ return "VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV";
+ default:
+ return "Unhandled VkOpticalFlowExecuteFlagBitsNV";
+ }
+}
+
+static inline std::string string_VkOpticalFlowExecuteFlagsNV(VkOpticalFlowExecuteFlagsNV input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkOpticalFlowExecuteFlagBitsNV(static_cast<VkOpticalFlowExecuteFlagBitsNV>(1U << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkOpticalFlowExecuteFlagBitsNV(static_cast<VkOpticalFlowExecuteFlagBitsNV>(0)));
+ return ret;
+}
+
+static inline const char* string_VkBuildAccelerationStructureModeKHR(VkBuildAccelerationStructureModeKHR input_value)
+{
+ switch (input_value)
+ {
+ case VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR:
+ return "VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR";
+ case VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR:
+ return "VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR";
+ default:
+ return "Unhandled VkBuildAccelerationStructureModeKHR";
+ }
+}
+
static inline const char* string_VkAccelerationStructureCreateFlagBitsKHR(VkAccelerationStructureCreateFlagBitsKHR input_value)
{
switch (input_value)
@@ -9405,19 +10677,6 @@ static inline std::string string_VkAccelerationStructureCreateFlagsKHR(VkAcceler
return ret;
}
-static inline const char* string_VkAccelerationStructureCompatibilityKHR(VkAccelerationStructureCompatibilityKHR input_value)
-{
- switch (input_value)
- {
- case VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR:
- return "VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR";
- case VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR:
- return "VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR";
- default:
- return "Unhandled VkAccelerationStructureCompatibilityKHR";
- }
-}
-
static inline const char* string_VkShaderGroupShaderKHR(VkShaderGroupShaderKHR input_value)
{
switch (input_value)
@@ -9504,6 +10763,13 @@ static inline bool IsDuplicatePnext(VkStructureType input_value)
case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT:
case VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT:
case VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO:
+ case VK_STRUCTURE_TYPE_EXPORT_METAL_BUFFER_INFO_EXT:
+ case VK_STRUCTURE_TYPE_EXPORT_METAL_COMMAND_QUEUE_INFO_EXT:
+ case VK_STRUCTURE_TYPE_EXPORT_METAL_IO_SURFACE_INFO_EXT:
+ case VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT:
+ case VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT:
+ case VK_STRUCTURE_TYPE_EXPORT_METAL_TEXTURE_INFO_EXT:
+ case VK_STRUCTURE_TYPE_IMPORT_METAL_TEXTURE_INFO_EXT:
return true;
default:
return false;
diff --git a/thirdparty/zlib/LICENSE b/thirdparty/zlib/LICENSE
new file mode 100644
index 0000000000..ab8ee6f714
--- /dev/null
+++ b/thirdparty/zlib/LICENSE
@@ -0,0 +1,22 @@
+Copyright notice:
+
+ (C) 1995-2022 Jean-loup Gailly and Mark Adler
+
+ This software is provided 'as-is', without any express or implied
+ warranty. In no event will the authors be held liable for any damages
+ arising from the use of this software.
+
+ Permission is granted to anyone to use this software for any purpose,
+ including commercial applications, and to alter it and redistribute it
+ freely, subject to the following restrictions:
+
+ 1. The origin of this software must not be misrepresented; you must not
+ claim that you wrote the original software. If you use this software
+ in a product, an acknowledgment in the product documentation would be
+ appreciated but is not required.
+ 2. Altered source versions must be plainly marked as such, and must not be
+ misrepresented as being the original software.
+ 3. This notice may not be removed or altered from any source distribution.
+
+ Jean-loup Gailly Mark Adler
+ jloup@gzip.org madler@alumni.caltech.edu
diff --git a/thirdparty/zlib/compress.c b/thirdparty/zlib/compress.c
index e2db404abf..2ad5326c14 100644
--- a/thirdparty/zlib/compress.c
+++ b/thirdparty/zlib/compress.c
@@ -19,7 +19,7 @@
memory, Z_BUF_ERROR if there was not enough room in the output buffer,
Z_STREAM_ERROR if the level parameter is invalid.
*/
-int ZEXPORT compress2 (dest, destLen, source, sourceLen, level)
+int ZEXPORT compress2(dest, destLen, source, sourceLen, level)
Bytef *dest;
uLongf *destLen;
const Bytef *source;
@@ -65,7 +65,7 @@ int ZEXPORT compress2 (dest, destLen, source, sourceLen, level)
/* ===========================================================================
*/
-int ZEXPORT compress (dest, destLen, source, sourceLen)
+int ZEXPORT compress(dest, destLen, source, sourceLen)
Bytef *dest;
uLongf *destLen;
const Bytef *source;
@@ -78,7 +78,7 @@ int ZEXPORT compress (dest, destLen, source, sourceLen)
If the default memLevel or windowBits for deflateInit() is changed, then
this function needs to be updated.
*/
-uLong ZEXPORT compressBound (sourceLen)
+uLong ZEXPORT compressBound(sourceLen)
uLong sourceLen;
{
return sourceLen + (sourceLen >> 12) + (sourceLen >> 14) +
diff --git a/thirdparty/zlib/crc32.c b/thirdparty/zlib/crc32.c
index a1bdce5c23..f8357b083f 100644
--- a/thirdparty/zlib/crc32.c
+++ b/thirdparty/zlib/crc32.c
@@ -98,13 +98,22 @@
# endif
#endif
+/* If available, use the ARM processor CRC32 instruction. */
+#if defined(__aarch64__) && defined(__ARM_FEATURE_CRC32) && W == 8
+# define ARMCRC32
+#endif
+
/* Local functions. */
local z_crc_t multmodp OF((z_crc_t a, z_crc_t b));
local z_crc_t x2nmodp OF((z_off64_t n, unsigned k));
-/* If available, use the ARM processor CRC32 instruction. */
-#if defined(__aarch64__) && defined(__ARM_FEATURE_CRC32) && W == 8
-# define ARMCRC32
+#if defined(W) && (!defined(ARMCRC32) || defined(DYNAMIC_CRC_TABLE))
+ local z_word_t byte_swap OF((z_word_t word));
+#endif
+
+#if defined(W) && !defined(ARMCRC32)
+ local z_crc_t crc_word OF((z_word_t data));
+ local z_word_t crc_word_big OF((z_word_t data));
#endif
#if defined(W) && (!defined(ARMCRC32) || defined(DYNAMIC_CRC_TABLE))
@@ -630,7 +639,7 @@ unsigned long ZEXPORT crc32_z(crc, buf, len)
#endif /* DYNAMIC_CRC_TABLE */
/* Pre-condition the CRC */
- crc ^= 0xffffffff;
+ crc = (~crc) & 0xffffffff;
/* Compute the CRC up to a word boundary. */
while (len && ((z_size_t)buf & 7) != 0) {
@@ -645,8 +654,8 @@ unsigned long ZEXPORT crc32_z(crc, buf, len)
len &= 7;
/* Do three interleaved CRCs to realize the throughput of one crc32x
- instruction per cycle. Each CRC is calcuated on Z_BATCH words. The three
- CRCs are combined into a single CRC after each set of batches. */
+ instruction per cycle. Each CRC is calculated on Z_BATCH words. The
+ three CRCs are combined into a single CRC after each set of batches. */
while (num >= 3 * Z_BATCH) {
crc1 = 0;
crc2 = 0;
@@ -749,7 +758,7 @@ unsigned long ZEXPORT crc32_z(crc, buf, len)
#endif /* DYNAMIC_CRC_TABLE */
/* Pre-condition the CRC */
- crc ^= 0xffffffff;
+ crc = (~crc) & 0xffffffff;
#ifdef W
@@ -1077,7 +1086,7 @@ uLong ZEXPORT crc32_combine64(crc1, crc2, len2)
#ifdef DYNAMIC_CRC_TABLE
once(&made, make_crc_table);
#endif /* DYNAMIC_CRC_TABLE */
- return multmodp(x2nmodp(len2, 3), crc1) ^ crc2;
+ return multmodp(x2nmodp(len2, 3), crc1) ^ (crc2 & 0xffffffff);
}
/* ========================================================================= */
@@ -1086,7 +1095,7 @@ uLong ZEXPORT crc32_combine(crc1, crc2, len2)
uLong crc2;
z_off_t len2;
{
- return crc32_combine64(crc1, crc2, len2);
+ return crc32_combine64(crc1, crc2, (z_off64_t)len2);
}
/* ========================================================================= */
@@ -1103,14 +1112,14 @@ uLong ZEXPORT crc32_combine_gen64(len2)
uLong ZEXPORT crc32_combine_gen(len2)
z_off_t len2;
{
- return crc32_combine_gen64(len2);
+ return crc32_combine_gen64((z_off64_t)len2);
}
/* ========================================================================= */
-uLong crc32_combine_op(crc1, crc2, op)
+uLong ZEXPORT crc32_combine_op(crc1, crc2, op)
uLong crc1;
uLong crc2;
uLong op;
{
- return multmodp(op, crc1) ^ crc2;
+ return multmodp(op, crc1) ^ (crc2 & 0xffffffff);
}
diff --git a/thirdparty/zlib/deflate.c b/thirdparty/zlib/deflate.c
index 799fb93cc0..4a689db359 100644
--- a/thirdparty/zlib/deflate.c
+++ b/thirdparty/zlib/deflate.c
@@ -52,7 +52,7 @@
#include "deflate.h"
const char deflate_copyright[] =
- " deflate 1.2.12 Copyright 1995-2022 Jean-loup Gailly and Mark Adler ";
+ " deflate 1.2.13 Copyright 1995-2022 Jean-loup Gailly and Mark Adler ";
/*
If you use the zlib library in a product, an acknowledgment is welcome
in the documentation of your product. If for some reason you cannot
@@ -87,13 +87,7 @@ local void lm_init OF((deflate_state *s));
local void putShortMSB OF((deflate_state *s, uInt b));
local void flush_pending OF((z_streamp strm));
local unsigned read_buf OF((z_streamp strm, Bytef *buf, unsigned size));
-#ifdef ASMV
-# pragma message("Assembler code may have bugs -- use at your own risk")
- void match_init OF((void)); /* asm code initialization */
- uInt longest_match OF((deflate_state *s, IPos cur_match));
-#else
local uInt longest_match OF((deflate_state *s, IPos cur_match));
-#endif
#ifdef ZLIB_DEBUG
local void check_match OF((deflate_state *s, IPos start, IPos match,
@@ -160,7 +154,7 @@ local const config configuration_table[10] = {
* characters, so that a running hash key can be computed from the previous
* key instead of complete recalculation each time.
*/
-#define UPDATE_HASH(s,h,c) (h = (((h)<<s->hash_shift) ^ (c)) & s->hash_mask)
+#define UPDATE_HASH(s,h,c) (h = (((h) << s->hash_shift) ^ (c)) & s->hash_mask)
/* ===========================================================================
@@ -191,9 +185,9 @@ local const config configuration_table[10] = {
*/
#define CLEAR_HASH(s) \
do { \
- s->head[s->hash_size-1] = NIL; \
+ s->head[s->hash_size - 1] = NIL; \
zmemzero((Bytef *)s->head, \
- (unsigned)(s->hash_size-1)*sizeof(*s->head)); \
+ (unsigned)(s->hash_size - 1)*sizeof(*s->head)); \
} while (0)
/* ===========================================================================
@@ -285,6 +279,8 @@ int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy,
if (windowBits < 0) { /* suppress zlib wrapper */
wrap = 0;
+ if (windowBits < -15)
+ return Z_STREAM_ERROR;
windowBits = -windowBits;
}
#ifdef GZIP
@@ -314,7 +310,7 @@ int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy,
s->hash_bits = (uInt)memLevel + 7;
s->hash_size = 1 << s->hash_bits;
s->hash_mask = s->hash_size - 1;
- s->hash_shift = ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH);
+ s->hash_shift = ((s->hash_bits + MIN_MATCH-1) / MIN_MATCH);
s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof(Byte));
s->prev = (Posf *) ZALLOC(strm, s->w_size, sizeof(Pos));
@@ -340,11 +336,11 @@ int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy,
* sym_buf value to read moves forward three bytes. From that symbol, up to
* 31 bits are written to pending_buf. The closest the written pending_buf
* bits gets to the next sym_buf symbol to read is just before the last
- * code is written. At that time, 31*(n-2) bits have been written, just
- * after 24*(n-2) bits have been consumed from sym_buf. sym_buf starts at
- * 8*n bits into pending_buf. (Note that the symbol buffer fills when n-1
+ * code is written. At that time, 31*(n - 2) bits have been written, just
+ * after 24*(n - 2) bits have been consumed from sym_buf. sym_buf starts at
+ * 8*n bits into pending_buf. (Note that the symbol buffer fills when n - 1
* symbols are written.) The closest the writing gets to what is unread is
- * then n+14 bits. Here n is lit_bufsize, which is 16384 by default, and
+ * then n + 14 bits. Here n is lit_bufsize, which is 16384 by default, and
* can range from 128 to 32768.
*
* Therefore, at a minimum, there are 142 bits of space between what is
@@ -390,7 +386,7 @@ int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy,
/* =========================================================================
* Check for a valid deflate stream state. Return 0 if ok, 1 if not.
*/
-local int deflateStateCheck (strm)
+local int deflateStateCheck(strm)
z_streamp strm;
{
deflate_state *s;
@@ -413,7 +409,7 @@ local int deflateStateCheck (strm)
}
/* ========================================================================= */
-int ZEXPORT deflateSetDictionary (strm, dictionary, dictLength)
+int ZEXPORT deflateSetDictionary(strm, dictionary, dictLength)
z_streamp strm;
const Bytef *dictionary;
uInt dictLength;
@@ -482,7 +478,7 @@ int ZEXPORT deflateSetDictionary (strm, dictionary, dictLength)
}
/* ========================================================================= */
-int ZEXPORT deflateGetDictionary (strm, dictionary, dictLength)
+int ZEXPORT deflateGetDictionary(strm, dictionary, dictLength)
z_streamp strm;
Bytef *dictionary;
uInt *dictLength;
@@ -504,7 +500,7 @@ int ZEXPORT deflateGetDictionary (strm, dictionary, dictLength)
}
/* ========================================================================= */
-int ZEXPORT deflateResetKeep (strm)
+int ZEXPORT deflateResetKeep(strm)
z_streamp strm;
{
deflate_state *s;
@@ -542,7 +538,7 @@ int ZEXPORT deflateResetKeep (strm)
}
/* ========================================================================= */
-int ZEXPORT deflateReset (strm)
+int ZEXPORT deflateReset(strm)
z_streamp strm;
{
int ret;
@@ -554,7 +550,7 @@ int ZEXPORT deflateReset (strm)
}
/* ========================================================================= */
-int ZEXPORT deflateSetHeader (strm, head)
+int ZEXPORT deflateSetHeader(strm, head)
z_streamp strm;
gz_headerp head;
{
@@ -565,7 +561,7 @@ int ZEXPORT deflateSetHeader (strm, head)
}
/* ========================================================================= */
-int ZEXPORT deflatePending (strm, pending, bits)
+int ZEXPORT deflatePending(strm, pending, bits)
unsigned *pending;
int *bits;
z_streamp strm;
@@ -579,7 +575,7 @@ int ZEXPORT deflatePending (strm, pending, bits)
}
/* ========================================================================= */
-int ZEXPORT deflatePrime (strm, bits, value)
+int ZEXPORT deflatePrime(strm, bits, value)
z_streamp strm;
int bits;
int value;
@@ -674,36 +670,50 @@ int ZEXPORT deflateTune(strm, good_length, max_lazy, nice_length, max_chain)
}
/* =========================================================================
- * For the default windowBits of 15 and memLevel of 8, this function returns
- * a close to exact, as well as small, upper bound on the compressed size.
- * They are coded as constants here for a reason--if the #define's are
- * changed, then this function needs to be changed as well. The return
- * value for 15 and 8 only works for those exact settings.
+ * For the default windowBits of 15 and memLevel of 8, this function returns a
+ * close to exact, as well as small, upper bound on the compressed size. This
+ * is an expansion of ~0.03%, plus a small constant.
+ *
+ * For any setting other than those defaults for windowBits and memLevel, one
+ * of two worst case bounds is returned. This is at most an expansion of ~4% or
+ * ~13%, plus a small constant.
*
- * For any setting other than those defaults for windowBits and memLevel,
- * the value returned is a conservative worst case for the maximum expansion
- * resulting from using fixed blocks instead of stored blocks, which deflate
- * can emit on compressed data for some combinations of the parameters.
+ * Both the 0.03% and 4% derive from the overhead of stored blocks. The first
+ * one is for stored blocks of 16383 bytes (memLevel == 8), whereas the second
+ * is for stored blocks of 127 bytes (the worst case memLevel == 1). The
+ * expansion results from five bytes of header for each stored block.
*
- * This function could be more sophisticated to provide closer upper bounds for
- * every combination of windowBits and memLevel. But even the conservative
- * upper bound of about 14% expansion does not seem onerous for output buffer
- * allocation.
+ * The larger expansion of 13% results from a window size less than or equal to
+ * the symbols buffer size (windowBits <= memLevel + 7). In that case some of
+ * the data being compressed may have slid out of the sliding window, impeding
+ * a stored block from being emitted. Then the only choice is a fixed or
+ * dynamic block, where a fixed block limits the maximum expansion to 9 bits
+ * per 8-bit byte, plus 10 bits for every block. The smallest block size for
+ * which this can occur is 255 (memLevel == 2).
+ *
+ * Shifts are used to approximate divisions, for speed.
*/
uLong ZEXPORT deflateBound(strm, sourceLen)
z_streamp strm;
uLong sourceLen;
{
deflate_state *s;
- uLong complen, wraplen;
+ uLong fixedlen, storelen, wraplen;
+
+ /* upper bound for fixed blocks with 9-bit literals and length 255
+ (memLevel == 2, which is the lowest that may not use stored blocks) --
+ ~13% overhead plus a small constant */
+ fixedlen = sourceLen + (sourceLen >> 3) + (sourceLen >> 8) +
+ (sourceLen >> 9) + 4;
- /* conservative upper bound for compressed data */
- complen = sourceLen +
- ((sourceLen + 7) >> 3) + ((sourceLen + 63) >> 6) + 5;
+ /* upper bound for stored blocks with length 127 (memLevel == 1) --
+ ~4% overhead plus a small constant */
+ storelen = sourceLen + (sourceLen >> 5) + (sourceLen >> 7) +
+ (sourceLen >> 11) + 7;
- /* if can't get parameters, return conservative bound plus zlib wrapper */
+ /* if can't get parameters, return larger bound plus a zlib wrapper */
if (deflateStateCheck(strm))
- return complen + 6;
+ return (fixedlen > storelen ? fixedlen : storelen) + 6;
/* compute wrapper length */
s = strm->state;
@@ -740,11 +750,12 @@ uLong ZEXPORT deflateBound(strm, sourceLen)
wraplen = 6;
}
- /* if not default parameters, return conservative bound */
+ /* if not default parameters, return one of the conservative bounds */
if (s->w_bits != 15 || s->hash_bits != 8 + 7)
- return complen + wraplen;
+ return (s->w_bits <= s->hash_bits ? fixedlen : storelen) + wraplen;
- /* default settings: return tight bound for that case */
+ /* default settings: return tight bound for that case -- ~0.03% overhead
+ plus a small constant */
return sourceLen + (sourceLen >> 12) + (sourceLen >> 14) +
(sourceLen >> 25) + 13 - 6 + wraplen;
}
@@ -754,7 +765,7 @@ uLong ZEXPORT deflateBound(strm, sourceLen)
* IN assertion: the stream state is correct and there is enough room in
* pending_buf.
*/
-local void putShortMSB (s, b)
+local void putShortMSB(s, b)
deflate_state *s;
uInt b;
{
@@ -801,7 +812,7 @@ local void flush_pending(strm)
} while (0)
/* ========================================================================= */
-int ZEXPORT deflate (strm, flush)
+int ZEXPORT deflate(strm, flush)
z_streamp strm;
int flush;
{
@@ -856,7 +867,7 @@ int ZEXPORT deflate (strm, flush)
s->status = BUSY_STATE;
if (s->status == INIT_STATE) {
/* zlib header */
- uInt header = (Z_DEFLATED + ((s->w_bits-8)<<4)) << 8;
+ uInt header = (Z_DEFLATED + ((s->w_bits - 8) << 4)) << 8;
uInt level_flags;
if (s->strategy >= Z_HUFFMAN_ONLY || s->level < 2)
@@ -1116,7 +1127,7 @@ int ZEXPORT deflate (strm, flush)
}
/* ========================================================================= */
-int ZEXPORT deflateEnd (strm)
+int ZEXPORT deflateEnd(strm)
z_streamp strm;
{
int status;
@@ -1142,7 +1153,7 @@ int ZEXPORT deflateEnd (strm)
* To simplify the source, this is not supported for 16-bit MSDOS (which
* doesn't have enough memory anyway to duplicate compression states).
*/
-int ZEXPORT deflateCopy (dest, source)
+int ZEXPORT deflateCopy(dest, source)
z_streamp dest;
z_streamp source;
{
@@ -1231,7 +1242,7 @@ local unsigned read_buf(strm, buf, size)
/* ===========================================================================
* Initialize the "longest match" routines for a new zlib stream
*/
-local void lm_init (s)
+local void lm_init(s)
deflate_state *s;
{
s->window_size = (ulg)2L*s->w_size;
@@ -1252,11 +1263,6 @@ local void lm_init (s)
s->match_length = s->prev_length = MIN_MATCH-1;
s->match_available = 0;
s->ins_h = 0;
-#ifndef FASTEST
-#ifdef ASMV
- match_init(); /* initialize the asm code */
-#endif
-#endif
}
#ifndef FASTEST
@@ -1269,10 +1275,6 @@ local void lm_init (s)
* string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
* OUT assertion: the match length is not greater than s->lookahead.
*/
-#ifndef ASMV
-/* For 80x86 and 680x0, an optimized version will be provided in match.asm or
- * match.S. The code will be functionally equivalent.
- */
local uInt longest_match(s, cur_match)
deflate_state *s;
IPos cur_match; /* current match */
@@ -1297,10 +1299,10 @@ local uInt longest_match(s, cur_match)
*/
register Bytef *strend = s->window + s->strstart + MAX_MATCH - 1;
register ush scan_start = *(ushf*)scan;
- register ush scan_end = *(ushf*)(scan+best_len-1);
+ register ush scan_end = *(ushf*)(scan + best_len - 1);
#else
register Bytef *strend = s->window + s->strstart + MAX_MATCH;
- register Byte scan_end1 = scan[best_len-1];
+ register Byte scan_end1 = scan[best_len - 1];
register Byte scan_end = scan[best_len];
#endif
@@ -1318,7 +1320,8 @@ local uInt longest_match(s, cur_match)
*/
if ((uInt)nice_match > s->lookahead) nice_match = (int)s->lookahead;
- Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
+ Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD,
+ "need lookahead");
do {
Assert(cur_match < s->strstart, "no future");
@@ -1336,43 +1339,44 @@ local uInt longest_match(s, cur_match)
/* This code assumes sizeof(unsigned short) == 2. Do not use
* UNALIGNED_OK if your compiler uses a different size.
*/
- if (*(ushf*)(match+best_len-1) != scan_end ||
+ if (*(ushf*)(match + best_len - 1) != scan_end ||
*(ushf*)match != scan_start) continue;
/* It is not necessary to compare scan[2] and match[2] since they are
* always equal when the other bytes match, given that the hash keys
* are equal and that HASH_BITS >= 8. Compare 2 bytes at a time at
- * strstart+3, +5, ... up to strstart+257. We check for insufficient
+ * strstart + 3, + 5, up to strstart + 257. We check for insufficient
* lookahead only every 4th comparison; the 128th check will be made
- * at strstart+257. If MAX_MATCH-2 is not a multiple of 8, it is
+ * at strstart + 257. If MAX_MATCH-2 is not a multiple of 8, it is
* necessary to put more guard bytes at the end of the window, or
* to check more often for insufficient lookahead.
*/
Assert(scan[2] == match[2], "scan[2]?");
scan++, match++;
do {
- } while (*(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
- *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
- *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
- *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
+ } while (*(ushf*)(scan += 2) == *(ushf*)(match += 2) &&
+ *(ushf*)(scan += 2) == *(ushf*)(match += 2) &&
+ *(ushf*)(scan += 2) == *(ushf*)(match += 2) &&
+ *(ushf*)(scan += 2) == *(ushf*)(match += 2) &&
scan < strend);
/* The funny "do {}" generates better code on most compilers */
- /* Here, scan <= window+strstart+257 */
- Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
+ /* Here, scan <= window + strstart + 257 */
+ Assert(scan <= s->window + (unsigned)(s->window_size - 1),
+ "wild scan");
if (*scan == *match) scan++;
- len = (MAX_MATCH - 1) - (int)(strend-scan);
+ len = (MAX_MATCH - 1) - (int)(strend - scan);
scan = strend - (MAX_MATCH-1);
#else /* UNALIGNED_OK */
- if (match[best_len] != scan_end ||
- match[best_len-1] != scan_end1 ||
- *match != *scan ||
- *++match != scan[1]) continue;
+ if (match[best_len] != scan_end ||
+ match[best_len - 1] != scan_end1 ||
+ *match != *scan ||
+ *++match != scan[1]) continue;
- /* The check at best_len-1 can be removed because it will be made
+ /* The check at best_len - 1 can be removed because it will be made
* again later. (This heuristic is not always a win.)
* It is not necessary to compare scan[2] and match[2] since they
* are always equal when the other bytes match, given that
@@ -1382,7 +1386,7 @@ local uInt longest_match(s, cur_match)
Assert(*scan == *match, "match[2]?");
/* We check for insufficient lookahead only every 8th comparison;
- * the 256th check will be made at strstart+258.
+ * the 256th check will be made at strstart + 258.
*/
do {
} while (*++scan == *++match && *++scan == *++match &&
@@ -1391,7 +1395,8 @@ local uInt longest_match(s, cur_match)
*++scan == *++match && *++scan == *++match &&
scan < strend);
- Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
+ Assert(scan <= s->window + (unsigned)(s->window_size - 1),
+ "wild scan");
len = MAX_MATCH - (int)(strend - scan);
scan = strend - MAX_MATCH;
@@ -1403,9 +1408,9 @@ local uInt longest_match(s, cur_match)
best_len = len;
if (len >= nice_match) break;
#ifdef UNALIGNED_OK
- scan_end = *(ushf*)(scan+best_len-1);
+ scan_end = *(ushf*)(scan + best_len - 1);
#else
- scan_end1 = scan[best_len-1];
+ scan_end1 = scan[best_len - 1];
scan_end = scan[best_len];
#endif
}
@@ -1415,7 +1420,6 @@ local uInt longest_match(s, cur_match)
if ((uInt)best_len <= s->lookahead) return (uInt)best_len;
return s->lookahead;
}
-#endif /* ASMV */
#else /* FASTEST */
@@ -1436,7 +1440,8 @@ local uInt longest_match(s, cur_match)
*/
Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");
- Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
+ Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD,
+ "need lookahead");
Assert(cur_match < s->strstart, "no future");
@@ -1446,7 +1451,7 @@ local uInt longest_match(s, cur_match)
*/
if (match[0] != scan[0] || match[1] != scan[1]) return MIN_MATCH-1;
- /* The check at best_len-1 can be removed because it will be made
+ /* The check at best_len - 1 can be removed because it will be made
* again later. (This heuristic is not always a win.)
* It is not necessary to compare scan[2] and match[2] since they
* are always equal when the other bytes match, given that
@@ -1456,7 +1461,7 @@ local uInt longest_match(s, cur_match)
Assert(*scan == *match, "match[2]?");
/* We check for insufficient lookahead only every 8th comparison;
- * the 256th check will be made at strstart+258.
+ * the 256th check will be made at strstart + 258.
*/
do {
} while (*++scan == *++match && *++scan == *++match &&
@@ -1465,7 +1470,7 @@ local uInt longest_match(s, cur_match)
*++scan == *++match && *++scan == *++match &&
scan < strend);
- Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
+ Assert(scan <= s->window + (unsigned)(s->window_size - 1), "wild scan");
len = MAX_MATCH - (int)(strend - scan);
@@ -1501,7 +1506,7 @@ local void check_match(s, start, match, length)
z_error("invalid match");
}
if (z_verbose > 1) {
- fprintf(stderr,"\\[%d,%d]", start-match, length);
+ fprintf(stderr,"\\[%d,%d]", start - match, length);
do { putc(s->window[start++], stderr); } while (--length != 0);
}
}
@@ -1547,9 +1552,9 @@ local void fill_window(s)
/* If the window is almost full and there is insufficient lookahead,
* move the upper half to the lower one to make room in the upper half.
*/
- if (s->strstart >= wsize+MAX_DIST(s)) {
+ if (s->strstart >= wsize + MAX_DIST(s)) {
- zmemcpy(s->window, s->window+wsize, (unsigned)wsize - more);
+ zmemcpy(s->window, s->window + wsize, (unsigned)wsize - more);
s->match_start -= wsize;
s->strstart -= wsize; /* we now have strstart >= MAX_DIST */
s->block_start -= (long) wsize;
@@ -1680,7 +1685,7 @@ local void fill_window(s)
*
* deflate_stored() is written to minimize the number of times an input byte is
* copied. It is most efficient with large input and output buffers, which
- * maximizes the opportunites to have a single copy from next_in to next_out.
+ * maximizes the opportunities to have a single copy from next_in to next_out.
*/
local block_state deflate_stored(s, flush)
deflate_state *s;
@@ -1890,7 +1895,7 @@ local block_state deflate_fast(s, flush)
if (s->lookahead == 0) break; /* flush the current block */
}
- /* Insert the string window[strstart .. strstart+2] in the
+ /* Insert the string window[strstart .. strstart + 2] in the
* dictionary, and set hash_head to the head of the hash chain:
*/
hash_head = NIL;
@@ -1938,7 +1943,7 @@ local block_state deflate_fast(s, flush)
s->strstart += s->match_length;
s->match_length = 0;
s->ins_h = s->window[s->strstart];
- UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
+ UPDATE_HASH(s, s->ins_h, s->window[s->strstart + 1]);
#if MIN_MATCH != 3
Call UPDATE_HASH() MIN_MATCH-3 more times
#endif
@@ -1949,7 +1954,7 @@ local block_state deflate_fast(s, flush)
} else {
/* No match, output a literal byte */
Tracevv((stderr,"%c", s->window[s->strstart]));
- _tr_tally_lit (s, s->window[s->strstart], bflush);
+ _tr_tally_lit(s, s->window[s->strstart], bflush);
s->lookahead--;
s->strstart++;
}
@@ -1993,7 +1998,7 @@ local block_state deflate_slow(s, flush)
if (s->lookahead == 0) break; /* flush the current block */
}
- /* Insert the string window[strstart .. strstart+2] in the
+ /* Insert the string window[strstart .. strstart + 2] in the
* dictionary, and set hash_head to the head of the hash chain:
*/
hash_head = NIL;
@@ -2035,17 +2040,17 @@ local block_state deflate_slow(s, flush)
uInt max_insert = s->strstart + s->lookahead - MIN_MATCH;
/* Do not insert strings in hash table beyond this. */
- check_match(s, s->strstart-1, s->prev_match, s->prev_length);
+ check_match(s, s->strstart - 1, s->prev_match, s->prev_length);
- _tr_tally_dist(s, s->strstart -1 - s->prev_match,
+ _tr_tally_dist(s, s->strstart - 1 - s->prev_match,
s->prev_length - MIN_MATCH, bflush);
/* Insert in hash table all strings up to the end of the match.
- * strstart-1 and strstart are already inserted. If there is not
+ * strstart - 1 and strstart are already inserted. If there is not
* enough lookahead, the last two strings are not inserted in
* the hash table.
*/
- s->lookahead -= s->prev_length-1;
+ s->lookahead -= s->prev_length - 1;
s->prev_length -= 2;
do {
if (++s->strstart <= max_insert) {
@@ -2063,8 +2068,8 @@ local block_state deflate_slow(s, flush)
* single literal. If there was a match but the current match
* is longer, truncate the previous match to a single literal.
*/
- Tracevv((stderr,"%c", s->window[s->strstart-1]));
- _tr_tally_lit(s, s->window[s->strstart-1], bflush);
+ Tracevv((stderr,"%c", s->window[s->strstart - 1]));
+ _tr_tally_lit(s, s->window[s->strstart - 1], bflush);
if (bflush) {
FLUSH_BLOCK_ONLY(s, 0);
}
@@ -2082,8 +2087,8 @@ local block_state deflate_slow(s, flush)
}
Assert (flush != Z_NO_FLUSH, "no flush?");
if (s->match_available) {
- Tracevv((stderr,"%c", s->window[s->strstart-1]));
- _tr_tally_lit(s, s->window[s->strstart-1], bflush);
+ Tracevv((stderr,"%c", s->window[s->strstart - 1]));
+ _tr_tally_lit(s, s->window[s->strstart - 1], bflush);
s->match_available = 0;
}
s->insert = s->strstart < MIN_MATCH-1 ? s->strstart : MIN_MATCH-1;
@@ -2140,7 +2145,8 @@ local block_state deflate_rle(s, flush)
if (s->match_length > s->lookahead)
s->match_length = s->lookahead;
}
- Assert(scan <= s->window+(uInt)(s->window_size-1), "wild scan");
+ Assert(scan <= s->window + (uInt)(s->window_size - 1),
+ "wild scan");
}
/* Emit match if have run of MIN_MATCH or longer, else emit literal */
@@ -2155,7 +2161,7 @@ local block_state deflate_rle(s, flush)
} else {
/* No match, output a literal byte */
Tracevv((stderr,"%c", s->window[s->strstart]));
- _tr_tally_lit (s, s->window[s->strstart], bflush);
+ _tr_tally_lit(s, s->window[s->strstart], bflush);
s->lookahead--;
s->strstart++;
}
@@ -2195,7 +2201,7 @@ local block_state deflate_huff(s, flush)
/* Output a literal byte */
s->match_length = 0;
Tracevv((stderr,"%c", s->window[s->strstart]));
- _tr_tally_lit (s, s->window[s->strstart], bflush);
+ _tr_tally_lit(s, s->window[s->strstart], bflush);
s->lookahead--;
s->strstart++;
if (bflush) FLUSH_BLOCK(s, 0);
diff --git a/thirdparty/zlib/deflate.h b/thirdparty/zlib/deflate.h
index 17c226113b..1a06cd5f25 100644
--- a/thirdparty/zlib/deflate.h
+++ b/thirdparty/zlib/deflate.h
@@ -329,8 +329,8 @@ void ZLIB_INTERNAL _tr_stored_block OF((deflate_state *s, charf *buf,
# define _tr_tally_dist(s, distance, length, flush) \
{ uch len = (uch)(length); \
ush dist = (ush)(distance); \
- s->sym_buf[s->sym_next++] = dist; \
- s->sym_buf[s->sym_next++] = dist >> 8; \
+ s->sym_buf[s->sym_next++] = (uch)dist; \
+ s->sym_buf[s->sym_next++] = (uch)(dist >> 8); \
s->sym_buf[s->sym_next++] = len; \
dist--; \
s->dyn_ltree[_length_code[len]+LITERALS+1].Freq++; \
diff --git a/thirdparty/zlib/gzlib.c b/thirdparty/zlib/gzlib.c
index dddaf26873..55da46a453 100644
--- a/thirdparty/zlib/gzlib.c
+++ b/thirdparty/zlib/gzlib.c
@@ -30,7 +30,7 @@ local gzFile gz_open OF((const void *, int, const char *));
The gz_strwinerror function does not change the current setting of
GetLastError. */
-char ZLIB_INTERNAL *gz_strwinerror (error)
+char ZLIB_INTERNAL *gz_strwinerror(error)
DWORD error;
{
static char buf[1024];
diff --git a/thirdparty/zlib/gzread.c b/thirdparty/zlib/gzread.c
index 884c9bfe4c..dd77381596 100644
--- a/thirdparty/zlib/gzread.c
+++ b/thirdparty/zlib/gzread.c
@@ -157,11 +157,9 @@ local int gz_look(state)
the output buffer is larger than the input buffer, which also assures
space for gzungetc() */
state->x.next = state->out;
- if (strm->avail_in) {
- memcpy(state->x.next, strm->next_in, strm->avail_in);
- state->x.have = strm->avail_in;
- strm->avail_in = 0;
- }
+ memcpy(state->x.next, strm->next_in, strm->avail_in);
+ state->x.have = strm->avail_in;
+ strm->avail_in = 0;
state->how = COPY;
state->direct = 1;
return 0;
diff --git a/thirdparty/zlib/gzwrite.c b/thirdparty/zlib/gzwrite.c
index a8ffc8f53d..eb8a0e5893 100644
--- a/thirdparty/zlib/gzwrite.c
+++ b/thirdparty/zlib/gzwrite.c
@@ -474,7 +474,7 @@ int ZEXPORTVA gzprintf(gzFile file, const char *format, ...)
#else /* !STDC && !Z_HAVE_STDARG_H */
/* -- see zlib.h -- */
-int ZEXPORTVA gzprintf (file, format, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10,
+int ZEXPORTVA gzprintf(file, format, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10,
a11, a12, a13, a14, a15, a16, a17, a18, a19, a20)
gzFile file;
const char *format;
diff --git a/thirdparty/zlib/infback.c b/thirdparty/zlib/infback.c
index a390c58e81..babeaf1806 100644
--- a/thirdparty/zlib/infback.c
+++ b/thirdparty/zlib/infback.c
@@ -66,6 +66,7 @@ int stream_size;
state->window = window;
state->wnext = 0;
state->whave = 0;
+ state->sane = 1;
return Z_OK;
}
@@ -605,25 +606,27 @@ void FAR *out_desc;
break;
case DONE:
- /* inflate stream terminated properly -- write leftover output */
+ /* inflate stream terminated properly */
ret = Z_STREAM_END;
- if (left < state->wsize) {
- if (out(out_desc, state->window, state->wsize - left))
- ret = Z_BUF_ERROR;
- }
goto inf_leave;
case BAD:
ret = Z_DATA_ERROR;
goto inf_leave;
- default: /* can't happen, but makes compilers happy */
+ default:
+ /* can't happen, but makes compilers happy */
ret = Z_STREAM_ERROR;
goto inf_leave;
}
- /* Return unused input */
+ /* Write leftover output and return unused input */
inf_leave:
+ if (left < state->wsize) {
+ if (out(out_desc, state->window, state->wsize - left) &&
+ ret == Z_STREAM_END)
+ ret = Z_BUF_ERROR;
+ }
strm->next_in = next;
strm->avail_in = have;
return ret;
diff --git a/thirdparty/zlib/inflate.c b/thirdparty/zlib/inflate.c
index 7be8c63662..8acbef44e9 100644
--- a/thirdparty/zlib/inflate.c
+++ b/thirdparty/zlib/inflate.c
@@ -168,6 +168,8 @@ int windowBits;
/* extract wrap request from windowBits parameter */
if (windowBits < 0) {
+ if (windowBits < -15)
+ return Z_STREAM_ERROR;
wrap = 0;
windowBits = -windowBits;
}
@@ -764,8 +766,9 @@ int flush;
if (copy > have) copy = have;
if (copy) {
if (state->head != Z_NULL &&
- state->head->extra != Z_NULL) {
- len = state->head->extra_len - state->length;
+ state->head->extra != Z_NULL &&
+ (len = state->head->extra_len - state->length) <
+ state->head->extra_max) {
zmemcpy(state->head->extra + len, next,
len + copy > state->head->extra_max ?
state->head->extra_max - len : copy);
diff --git a/thirdparty/zlib/inftrees.c b/thirdparty/zlib/inftrees.c
index 09462a740b..57d2793bec 100644
--- a/thirdparty/zlib/inftrees.c
+++ b/thirdparty/zlib/inftrees.c
@@ -9,7 +9,7 @@
#define MAXBITS 15
const char inflate_copyright[] =
- " inflate 1.2.12 Copyright 1995-2022 Mark Adler ";
+ " inflate 1.2.13 Copyright 1995-2022 Mark Adler ";
/*
If you use the zlib library in a product, an acknowledgment is welcome
in the documentation of your product. If for some reason you cannot
@@ -62,7 +62,7 @@ unsigned short FAR *work;
35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
static const unsigned short lext[31] = { /* Length codes 257..285 extra */
16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18,
- 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 199, 202};
+ 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 194, 65};
static const unsigned short dbase[32] = { /* Distance codes 0..29 base */
1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
diff --git a/thirdparty/zlib/inftrees.h b/thirdparty/zlib/inftrees.h
index baa53a0b1a..f53665311c 100644
--- a/thirdparty/zlib/inftrees.h
+++ b/thirdparty/zlib/inftrees.h
@@ -38,7 +38,7 @@ typedef struct {
/* Maximum size of the dynamic table. The maximum number of code structures is
1444, which is the sum of 852 for literal/length codes and 592 for distance
codes. These values were found by exhaustive searches using the program
- examples/enough.c found in the zlib distribtution. The arguments to that
+ examples/enough.c found in the zlib distribution. The arguments to that
program are the number of symbols, the initial root table size, and the
maximum bit length of a code. "enough 286 9 15" for literal/length codes
returns returns 852, and "enough 30 6 15" for distance codes returns 592.
diff --git a/thirdparty/zlib/trees.c b/thirdparty/zlib/trees.c
index f73fd99c37..5f305c4722 100644
--- a/thirdparty/zlib/trees.c
+++ b/thirdparty/zlib/trees.c
@@ -193,7 +193,7 @@ local void send_bits(s, value, length)
s->bits_sent += (ulg)length;
/* If not enough room in bi_buf, use (valid) bits from bi_buf and
- * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))
+ * (16 - bi_valid) bits from value, leaving (width - (16 - bi_valid))
* unused bits in value.
*/
if (s->bi_valid > (int)Buf_size - length) {
@@ -256,7 +256,7 @@ local void tr_static_init()
length = 0;
for (code = 0; code < LENGTH_CODES-1; code++) {
base_length[code] = length;
- for (n = 0; n < (1<<extra_lbits[code]); n++) {
+ for (n = 0; n < (1 << extra_lbits[code]); n++) {
_length_code[length++] = (uch)code;
}
}
@@ -265,13 +265,13 @@ local void tr_static_init()
* in two different ways: code 284 + 5 bits or code 285, so we
* overwrite length_code[255] to use the best encoding:
*/
- _length_code[length-1] = (uch)code;
+ _length_code[length - 1] = (uch)code;
/* Initialize the mapping dist (0..32K) -> dist code (0..29) */
dist = 0;
for (code = 0 ; code < 16; code++) {
base_dist[code] = dist;
- for (n = 0; n < (1<<extra_dbits[code]); n++) {
+ for (n = 0; n < (1 << extra_dbits[code]); n++) {
_dist_code[dist++] = (uch)code;
}
}
@@ -279,11 +279,11 @@ local void tr_static_init()
dist >>= 7; /* from now on, all distances are divided by 128 */
for ( ; code < D_CODES; code++) {
base_dist[code] = dist << 7;
- for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) {
+ for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) {
_dist_code[256 + dist++] = (uch)code;
}
}
- Assert (dist == 256, "tr_static_init: 256+dist != 512");
+ Assert (dist == 256, "tr_static_init: 256 + dist != 512");
/* Construct the codes of the static literal tree */
for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0;
@@ -312,7 +312,7 @@ local void tr_static_init()
}
/* ===========================================================================
- * Genererate the file trees.h describing the static trees.
+ * Generate the file trees.h describing the static trees.
*/
#ifdef GEN_TREES_H
# ifndef ZLIB_DEBUG
@@ -321,7 +321,7 @@ local void tr_static_init()
# define SEPARATOR(i, last, width) \
((i) == (last)? "\n};\n\n" : \
- ((i) % (width) == (width)-1 ? ",\n" : ", "))
+ ((i) % (width) == (width) - 1 ? ",\n" : ", "))
void gen_trees_header()
{
@@ -458,7 +458,7 @@ local void pqdownheap(s, tree, k)
while (j <= s->heap_len) {
/* Set j to the smallest of the two sons: */
if (j < s->heap_len &&
- smaller(tree, s->heap[j+1], s->heap[j], s->depth)) {
+ smaller(tree, s->heap[j + 1], s->heap[j], s->depth)) {
j++;
}
/* Exit if v is smaller than both sons */
@@ -507,7 +507,7 @@ local void gen_bitlen(s, desc)
*/
tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */
- for (h = s->heap_max+1; h < HEAP_SIZE; h++) {
+ for (h = s->heap_max + 1; h < HEAP_SIZE; h++) {
n = s->heap[h];
bits = tree[tree[n].Dad].Len + 1;
if (bits > max_length) bits = max_length, overflow++;
@@ -518,7 +518,7 @@ local void gen_bitlen(s, desc)
s->bl_count[bits]++;
xbits = 0;
- if (n >= base) xbits = extra[n-base];
+ if (n >= base) xbits = extra[n - base];
f = tree[n].Freq;
s->opt_len += (ulg)f * (unsigned)(bits + xbits);
if (stree) s->static_len += (ulg)f * (unsigned)(stree[n].Len + xbits);
@@ -530,10 +530,10 @@ local void gen_bitlen(s, desc)
/* Find the first bit length which could increase: */
do {
- bits = max_length-1;
+ bits = max_length - 1;
while (s->bl_count[bits] == 0) bits--;
- s->bl_count[bits]--; /* move one leaf down the tree */
- s->bl_count[bits+1] += 2; /* move one overflow item as its brother */
+ s->bl_count[bits]--; /* move one leaf down the tree */
+ s->bl_count[bits + 1] += 2; /* move one overflow item as its brother */
s->bl_count[max_length]--;
/* The brother of the overflow item also moves one step up,
* but this does not affect bl_count[max_length]
@@ -569,7 +569,7 @@ local void gen_bitlen(s, desc)
* OUT assertion: the field code is set for all tree elements of non
* zero code length.
*/
-local void gen_codes (tree, max_code, bl_count)
+local void gen_codes(tree, max_code, bl_count)
ct_data *tree; /* the tree to decorate */
int max_code; /* largest code with non zero frequency */
ushf *bl_count; /* number of codes at each bit length */
@@ -583,13 +583,13 @@ local void gen_codes (tree, max_code, bl_count)
* without bit reversal.
*/
for (bits = 1; bits <= MAX_BITS; bits++) {
- code = (code + bl_count[bits-1]) << 1;
+ code = (code + bl_count[bits - 1]) << 1;
next_code[bits] = (ush)code;
}
/* Check that the bit counts in bl_count are consistent. The last code
* must be all ones.
*/
- Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
+ Assert (code + bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1,
"inconsistent bit counts");
Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
@@ -600,7 +600,7 @@ local void gen_codes (tree, max_code, bl_count)
tree[n].Code = (ush)bi_reverse(next_code[len]++, len);
Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
- n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1));
+ n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len] - 1));
}
}
@@ -624,7 +624,7 @@ local void build_tree(s, desc)
int node; /* new node being created */
/* Construct the initial heap, with least frequent element in
- * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
+ * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n + 1].
* heap[0] is not used.
*/
s->heap_len = 0, s->heap_max = HEAP_SIZE;
@@ -652,7 +652,7 @@ local void build_tree(s, desc)
}
desc->max_code = max_code;
- /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
+ /* The elements heap[heap_len/2 + 1 .. heap_len] are leaves of the tree,
* establish sub-heaps of increasing lengths:
*/
for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n);
@@ -700,7 +700,7 @@ local void build_tree(s, desc)
* Scan a literal or distance tree to determine the frequencies of the codes
* in the bit length tree.
*/
-local void scan_tree (s, tree, max_code)
+local void scan_tree(s, tree, max_code)
deflate_state *s;
ct_data *tree; /* the tree to be scanned */
int max_code; /* and its largest code of non zero frequency */
@@ -714,10 +714,10 @@ local void scan_tree (s, tree, max_code)
int min_count = 4; /* min repeat count */
if (nextlen == 0) max_count = 138, min_count = 3;
- tree[max_code+1].Len = (ush)0xffff; /* guard */
+ tree[max_code + 1].Len = (ush)0xffff; /* guard */
for (n = 0; n <= max_code; n++) {
- curlen = nextlen; nextlen = tree[n+1].Len;
+ curlen = nextlen; nextlen = tree[n + 1].Len;
if (++count < max_count && curlen == nextlen) {
continue;
} else if (count < min_count) {
@@ -745,7 +745,7 @@ local void scan_tree (s, tree, max_code)
* Send a literal or distance tree in compressed form, using the codes in
* bl_tree.
*/
-local void send_tree (s, tree, max_code)
+local void send_tree(s, tree, max_code)
deflate_state *s;
ct_data *tree; /* the tree to be scanned */
int max_code; /* and its largest code of non zero frequency */
@@ -758,11 +758,11 @@ local void send_tree (s, tree, max_code)
int max_count = 7; /* max repeat count */
int min_count = 4; /* min repeat count */
- /* tree[max_code+1].Len = -1; */ /* guard already set */
+ /* tree[max_code + 1].Len = -1; */ /* guard already set */
if (nextlen == 0) max_count = 138, min_count = 3;
for (n = 0; n <= max_code; n++) {
- curlen = nextlen; nextlen = tree[n+1].Len;
+ curlen = nextlen; nextlen = tree[n + 1].Len;
if (++count < max_count && curlen == nextlen) {
continue;
} else if (count < min_count) {
@@ -773,13 +773,13 @@ local void send_tree (s, tree, max_code)
send_code(s, curlen, s->bl_tree); count--;
}
Assert(count >= 3 && count <= 6, " 3_6?");
- send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2);
+ send_code(s, REP_3_6, s->bl_tree); send_bits(s, count - 3, 2);
} else if (count <= 10) {
- send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3);
+ send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count - 3, 3);
} else {
- send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7);
+ send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count - 11, 7);
}
count = 0; prevlen = curlen;
if (nextlen == 0) {
@@ -807,8 +807,8 @@ local int build_bl_tree(s)
/* Build the bit length tree: */
build_tree(s, (tree_desc *)(&(s->bl_desc)));
- /* opt_len now includes the length of the tree representations, except
- * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
+ /* opt_len now includes the length of the tree representations, except the
+ * lengths of the bit lengths codes and the 5 + 5 + 4 bits for the counts.
*/
/* Determine the number of bit length codes to send. The pkzip format
@@ -819,7 +819,7 @@ local int build_bl_tree(s)
if (s->bl_tree[bl_order[max_blindex]].Len != 0) break;
}
/* Update opt_len to include the bit length tree and counts */
- s->opt_len += 3*((ulg)max_blindex+1) + 5+5+4;
+ s->opt_len += 3*((ulg)max_blindex + 1) + 5 + 5 + 4;
Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",
s->opt_len, s->static_len));
@@ -841,19 +841,19 @@ local void send_all_trees(s, lcodes, dcodes, blcodes)
Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
"too many codes");
Tracev((stderr, "\nbl counts: "));
- send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */
- send_bits(s, dcodes-1, 5);
- send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */
+ send_bits(s, lcodes - 257, 5); /* not +255 as stated in appnote.txt */
+ send_bits(s, dcodes - 1, 5);
+ send_bits(s, blcodes - 4, 4); /* not -3 as stated in appnote.txt */
for (rank = 0; rank < blcodes; rank++) {
Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
send_bits(s, s->bl_tree[bl_order[rank]].Len, 3);
}
Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));
- send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */
+ send_tree(s, (ct_data *)s->dyn_ltree, lcodes - 1); /* literal tree */
Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));
- send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */
+ send_tree(s, (ct_data *)s->dyn_dtree, dcodes - 1); /* distance tree */
Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));
}
@@ -866,7 +866,7 @@ void ZLIB_INTERNAL _tr_stored_block(s, buf, stored_len, last)
ulg stored_len; /* length of input block */
int last; /* one if this is the last block for a file */
{
- send_bits(s, (STORED_BLOCK<<1)+last, 3); /* send block type */
+ send_bits(s, (STORED_BLOCK<<1) + last, 3); /* send block type */
bi_windup(s); /* align on byte boundary */
put_short(s, (ush)stored_len);
put_short(s, (ush)~stored_len);
@@ -877,7 +877,7 @@ void ZLIB_INTERNAL _tr_stored_block(s, buf, stored_len, last)
s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L;
s->compressed_len += (stored_len + 4) << 3;
s->bits_sent += 2*16;
- s->bits_sent += stored_len<<3;
+ s->bits_sent += stored_len << 3;
#endif
}
@@ -943,14 +943,17 @@ void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last)
max_blindex = build_bl_tree(s);
/* Determine the best encoding. Compute the block lengths in bytes. */
- opt_lenb = (s->opt_len+3+7)>>3;
- static_lenb = (s->static_len+3+7)>>3;
+ opt_lenb = (s->opt_len + 3 + 7) >> 3;
+ static_lenb = (s->static_len + 3 + 7) >> 3;
Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",
opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
s->sym_next / 3));
- if (static_lenb <= opt_lenb) opt_lenb = static_lenb;
+#ifndef FORCE_STATIC
+ if (static_lenb <= opt_lenb || s->strategy == Z_FIXED)
+#endif
+ opt_lenb = static_lenb;
} else {
Assert(buf != (char*)0, "lost buf");
@@ -960,7 +963,7 @@ void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last)
#ifdef FORCE_STORED
if (buf != (char*)0) { /* force stored block */
#else
- if (stored_len+4 <= opt_lenb && buf != (char*)0) {
+ if (stored_len + 4 <= opt_lenb && buf != (char*)0) {
/* 4: two words for the lengths */
#endif
/* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
@@ -971,21 +974,17 @@ void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last)
*/
_tr_stored_block(s, buf, stored_len, last);
-#ifdef FORCE_STATIC
- } else if (static_lenb >= 0) { /* force static trees */
-#else
- } else if (s->strategy == Z_FIXED || static_lenb == opt_lenb) {
-#endif
- send_bits(s, (STATIC_TREES<<1)+last, 3);
+ } else if (static_lenb == opt_lenb) {
+ send_bits(s, (STATIC_TREES<<1) + last, 3);
compress_block(s, (const ct_data *)static_ltree,
(const ct_data *)static_dtree);
#ifdef ZLIB_DEBUG
s->compressed_len += 3 + s->static_len;
#endif
} else {
- send_bits(s, (DYN_TREES<<1)+last, 3);
- send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1,
- max_blindex+1);
+ send_bits(s, (DYN_TREES<<1) + last, 3);
+ send_all_trees(s, s->l_desc.max_code + 1, s->d_desc.max_code + 1,
+ max_blindex + 1);
compress_block(s, (const ct_data *)s->dyn_ltree,
(const ct_data *)s->dyn_dtree);
#ifdef ZLIB_DEBUG
@@ -1004,22 +1003,22 @@ void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last)
s->compressed_len += 7; /* align on byte boundary */
#endif
}
- Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,
- s->compressed_len-7*last));
+ Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len >> 3,
+ s->compressed_len - 7*last));
}
/* ===========================================================================
* Save the match info and tally the frequency counts. Return true if
* the current block must be flushed.
*/
-int ZLIB_INTERNAL _tr_tally (s, dist, lc)
+int ZLIB_INTERNAL _tr_tally(s, dist, lc)
deflate_state *s;
unsigned dist; /* distance of matched string */
- unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */
+ unsigned lc; /* match length - MIN_MATCH or unmatched char (dist==0) */
{
- s->sym_buf[s->sym_next++] = dist;
- s->sym_buf[s->sym_next++] = dist >> 8;
- s->sym_buf[s->sym_next++] = lc;
+ s->sym_buf[s->sym_next++] = (uch)dist;
+ s->sym_buf[s->sym_next++] = (uch)(dist >> 8);
+ s->sym_buf[s->sym_next++] = (uch)lc;
if (dist == 0) {
/* lc is the unmatched char */
s->dyn_ltree[lc].Freq++;
@@ -1031,7 +1030,7 @@ int ZLIB_INTERNAL _tr_tally (s, dist, lc)
(ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&
(ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match");
- s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++;
+ s->dyn_ltree[_length_code[lc] + LITERALS + 1].Freq++;
s->dyn_dtree[d_code(dist)].Freq++;
}
return (s->sym_next == s->sym_end);
@@ -1061,7 +1060,7 @@ local void compress_block(s, ltree, dtree)
} else {
/* Here, lc is the match length - MIN_MATCH */
code = _length_code[lc];
- send_code(s, code+LITERALS+1, ltree); /* send the length code */
+ send_code(s, code + LITERALS + 1, ltree); /* send length code */
extra = extra_lbits[code];
if (extra != 0) {
lc -= base_length[code];
@@ -1177,6 +1176,6 @@ local void bi_windup(s)
s->bi_buf = 0;
s->bi_valid = 0;
#ifdef ZLIB_DEBUG
- s->bits_sent = (s->bits_sent+7) & ~7;
+ s->bits_sent = (s->bits_sent + 7) & ~7;
#endif
}
diff --git a/thirdparty/zlib/uncompr.c b/thirdparty/zlib/uncompr.c
index f03a1a865e..f9532f46c1 100644
--- a/thirdparty/zlib/uncompr.c
+++ b/thirdparty/zlib/uncompr.c
@@ -24,7 +24,7 @@
Z_DATA_ERROR if the input data was corrupted, including if the input data is
an incomplete zlib stream.
*/
-int ZEXPORT uncompress2 (dest, destLen, source, sourceLen)
+int ZEXPORT uncompress2(dest, destLen, source, sourceLen)
Bytef *dest;
uLongf *destLen;
const Bytef *source;
@@ -83,7 +83,7 @@ int ZEXPORT uncompress2 (dest, destLen, source, sourceLen)
err;
}
-int ZEXPORT uncompress (dest, destLen, source, sourceLen)
+int ZEXPORT uncompress(dest, destLen, source, sourceLen)
Bytef *dest;
uLongf *destLen;
const Bytef *source;
diff --git a/thirdparty/zlib/zconf.h b/thirdparty/zlib/zconf.h
index 5e1d68a004..bf977d3e70 100644
--- a/thirdparty/zlib/zconf.h
+++ b/thirdparty/zlib/zconf.h
@@ -38,6 +38,9 @@
# define crc32 z_crc32
# define crc32_combine z_crc32_combine
# define crc32_combine64 z_crc32_combine64
+# define crc32_combine_gen z_crc32_combine_gen
+# define crc32_combine_gen64 z_crc32_combine_gen64
+# define crc32_combine_op z_crc32_combine_op
# define crc32_z z_crc32_z
# define deflate z_deflate
# define deflateBound z_deflateBound
@@ -349,6 +352,9 @@
# ifdef FAR
# undef FAR
# endif
+# ifndef WIN32_LEAN_AND_MEAN
+# define WIN32_LEAN_AND_MEAN
+# endif
# include <windows.h>
/* No need for _export, use ZLIB.DEF instead. */
/* For complete Windows compatibility, use WINAPI, not __stdcall. */
@@ -467,11 +473,18 @@ typedef uLong FAR uLongf;
# undef _LARGEFILE64_SOURCE
#endif
-#if defined(__WATCOMC__) && !defined(Z_HAVE_UNISTD_H)
-# define Z_HAVE_UNISTD_H
+#ifndef Z_HAVE_UNISTD_H
+# ifdef __WATCOMC__
+# define Z_HAVE_UNISTD_H
+# endif
+#endif
+#ifndef Z_HAVE_UNISTD_H
+# if defined(_LARGEFILE64_SOURCE) && !defined(_WIN32)
+# define Z_HAVE_UNISTD_H
+# endif
#endif
#ifndef Z_SOLO
-# if defined(Z_HAVE_UNISTD_H) || defined(_LARGEFILE64_SOURCE)
+# if defined(Z_HAVE_UNISTD_H)
# include <unistd.h> /* for SEEK_*, off_t, and _LFS64_LARGEFILE */
# ifdef VMS
# include <unixio.h> /* for off_t */
diff --git a/thirdparty/zlib/zlib.h b/thirdparty/zlib/zlib.h
index 4a98e38bf3..953cb5012d 100644
--- a/thirdparty/zlib/zlib.h
+++ b/thirdparty/zlib/zlib.h
@@ -1,5 +1,5 @@
/* zlib.h -- interface of the 'zlib' general purpose compression library
- version 1.2.12, March 11th, 2022
+ version 1.2.13, October 13th, 2022
Copyright (C) 1995-2022 Jean-loup Gailly and Mark Adler
@@ -37,11 +37,11 @@
extern "C" {
#endif
-#define ZLIB_VERSION "1.2.12"
-#define ZLIB_VERNUM 0x12c0
+#define ZLIB_VERSION "1.2.13"
+#define ZLIB_VERNUM 0x12d0
#define ZLIB_VER_MAJOR 1
#define ZLIB_VER_MINOR 2
-#define ZLIB_VER_REVISION 12
+#define ZLIB_VER_REVISION 13
#define ZLIB_VER_SUBREVISION 0
/*
@@ -276,7 +276,7 @@ ZEXTERN int ZEXPORT deflate OF((z_streamp strm, int flush));
== 0), or after each call of deflate(). If deflate returns Z_OK and with
zero avail_out, it must be called again after making room in the output
buffer because there might be more output pending. See deflatePending(),
- which can be used if desired to determine whether or not there is more ouput
+ which can be used if desired to determine whether or not there is more output
in that case.
Normally the parameter flush is set to Z_NO_FLUSH, which allows deflate to
@@ -660,7 +660,7 @@ ZEXTERN int ZEXPORT deflateGetDictionary OF((z_streamp strm,
to dictionary. dictionary must have enough space, where 32768 bytes is
always enough. If deflateGetDictionary() is called with dictionary equal to
Z_NULL, then only the dictionary length is returned, and nothing is copied.
- Similary, if dictLength is Z_NULL, then it is not set.
+ Similarly, if dictLength is Z_NULL, then it is not set.
deflateGetDictionary() may return a length less than the window size, even
when more than the window size in input has been provided. It may return up
@@ -915,7 +915,7 @@ ZEXTERN int ZEXPORT inflateGetDictionary OF((z_streamp strm,
to dictionary. dictionary must have enough space, where 32768 bytes is
always enough. If inflateGetDictionary() is called with dictionary equal to
Z_NULL, then only the dictionary length is returned, and nothing is copied.
- Similary, if dictLength is Z_NULL, then it is not set.
+ Similarly, if dictLength is Z_NULL, then it is not set.
inflateGetDictionary returns Z_OK on success, or Z_STREAM_ERROR if the
stream state is inconsistent.
@@ -1437,12 +1437,12 @@ ZEXTERN z_size_t ZEXPORT gzfread OF((voidp buf, z_size_t size, z_size_t nitems,
In the event that the end of file is reached and only a partial item is
available at the end, i.e. the remaining uncompressed data length is not a
- multiple of size, then the final partial item is nevetheless read into buf
+ multiple of size, then the final partial item is nevertheless read into buf
and the end-of-file flag is set. The length of the partial item read is not
provided, but could be inferred from the result of gztell(). This behavior
is the same as the behavior of fread() implementations in common libraries,
but it prevents the direct use of gzfread() to read a concurrently written
- file, reseting and retrying on end-of-file, when size is not 1.
+ file, resetting and retrying on end-of-file, when size is not 1.
*/
ZEXTERN int ZEXPORT gzwrite OF((gzFile file, voidpc buf, unsigned len));
@@ -1913,7 +1913,7 @@ ZEXTERN int ZEXPORT inflateSyncPoint OF((z_streamp));
ZEXTERN const z_crc_t FAR * ZEXPORT get_crc_table OF((void));
ZEXTERN int ZEXPORT inflateUndermine OF((z_streamp, int));
ZEXTERN int ZEXPORT inflateValidate OF((z_streamp, int));
-ZEXTERN unsigned long ZEXPORT inflateCodesUsed OF ((z_streamp));
+ZEXTERN unsigned long ZEXPORT inflateCodesUsed OF((z_streamp));
ZEXTERN int ZEXPORT inflateResetKeep OF((z_streamp));
ZEXTERN int ZEXPORT deflateResetKeep OF((z_streamp));
#if defined(_WIN32) && !defined(Z_SOLO)
diff --git a/thirdparty/zlib/zutil.c b/thirdparty/zlib/zutil.c
index dcab28a0d5..9543ae825e 100644
--- a/thirdparty/zlib/zutil.c
+++ b/thirdparty/zlib/zutil.c
@@ -61,9 +61,11 @@ uLong ZEXPORT zlibCompileFlags()
#ifdef ZLIB_DEBUG
flags += 1 << 8;
#endif
+ /*
#if defined(ASMV) || defined(ASMINF)
flags += 1 << 9;
#endif
+ */
#ifdef ZLIB_WINAPI
flags += 1 << 10;
#endif
@@ -119,7 +121,7 @@ uLong ZEXPORT zlibCompileFlags()
# endif
int ZLIB_INTERNAL z_verbose = verbose;
-void ZLIB_INTERNAL z_error (m)
+void ZLIB_INTERNAL z_error(m)
char *m;
{
fprintf(stderr, "%s\n", m);
@@ -214,7 +216,7 @@ local ptr_table table[MAX_PTR];
* a protected system like OS/2. Use Microsoft C instead.
*/
-voidpf ZLIB_INTERNAL zcalloc (voidpf opaque, unsigned items, unsigned size)
+voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, unsigned items, unsigned size)
{
voidpf buf;
ulg bsize = (ulg)items*size;
@@ -240,7 +242,7 @@ voidpf ZLIB_INTERNAL zcalloc (voidpf opaque, unsigned items, unsigned size)
return buf;
}
-void ZLIB_INTERNAL zcfree (voidpf opaque, voidpf ptr)
+void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr)
{
int n;
@@ -277,13 +279,13 @@ void ZLIB_INTERNAL zcfree (voidpf opaque, voidpf ptr)
# define _hfree hfree
#endif
-voidpf ZLIB_INTERNAL zcalloc (voidpf opaque, uInt items, uInt size)
+voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, uInt items, uInt size)
{
(void)opaque;
return _halloc((long)items, size);
}
-void ZLIB_INTERNAL zcfree (voidpf opaque, voidpf ptr)
+void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr)
{
(void)opaque;
_hfree(ptr);
@@ -302,7 +304,7 @@ extern voidp calloc OF((uInt items, uInt size));
extern void free OF((voidpf ptr));
#endif
-voidpf ZLIB_INTERNAL zcalloc (opaque, items, size)
+voidpf ZLIB_INTERNAL zcalloc(opaque, items, size)
voidpf opaque;
unsigned items;
unsigned size;
@@ -312,7 +314,7 @@ voidpf ZLIB_INTERNAL zcalloc (opaque, items, size)
(voidpf)calloc(items, size);
}
-void ZLIB_INTERNAL zcfree (opaque, ptr)
+void ZLIB_INTERNAL zcfree(opaque, ptr)
voidpf opaque;
voidpf ptr;
{
diff --git a/thirdparty/zlib/zutil.h b/thirdparty/zlib/zutil.h
index d9a20ae1bf..0bc7f4ecd1 100644
--- a/thirdparty/zlib/zutil.h
+++ b/thirdparty/zlib/zutil.h
@@ -193,6 +193,7 @@ extern z_const char * const z_errmsg[10]; /* indexed by 2-zlib_error */
(!defined(_LARGEFILE64_SOURCE) || _LFS64_LARGEFILE-0 == 0)
ZEXTERN uLong ZEXPORT adler32_combine64 OF((uLong, uLong, z_off_t));
ZEXTERN uLong ZEXPORT crc32_combine64 OF((uLong, uLong, z_off_t));
+ ZEXTERN uLong ZEXPORT crc32_combine_gen64 OF((z_off_t));
#endif
/* common defaults */